diff --git a/.clang-format b/.clang-format index 63ebecbce1..a5d54596e2 100644 --- a/.clang-format +++ b/.clang-format @@ -9,7 +9,7 @@ Language: ObjC BasedOnStyle: Google BinPackParameters: false BinPackArguments: false -ColumnLimit: 100 +ColumnLimit: 80 ObjCBlockIndentWidth: 2 AllowAllParametersOfDeclarationOnNextLine: true AlignOperands: false diff --git a/.gitignore b/.gitignore index a86b405fab..2ff4979337 100644 --- a/.gitignore +++ b/.gitignore @@ -25,6 +25,7 @@ *_proto.xml *_proto_cpp.xml *~ +\#*# .*.sw? .cache .cipd diff --git a/.gn b/.gn index b9948d2fcd..e628c3abba 100644 --- a/.gn +++ b/.gn @@ -12,8 +12,8 @@ import("//build/dotfile_settings.gni") buildconfig = "//build/config/BUILDCONFIG.gn" # The python interpreter to use by default. On Windows, this will look -# for python3.exe and python3.bat. -script_executable = "python3" +# for vpython3.exe and vpython3.bat. +script_executable = "vpython3" # The secondary source root is a parallel directory tree where # GN build files are placed when they can not be placed directly @@ -27,16 +27,19 @@ secondary_source = "//build/secondary/" no_check_targets = [ "//third_party/icu/*", - # TODO(crbug.com/1151236) Remove once fixed. - "//base/allocator/partition_allocator:partition_alloc", + # TODO: crbug/326607005 - GTEST_HAS_ABSL is broken + "//third_party/googletest:gmock", + "//third_party/googletest:gtest", ] -# These are the list of GN files that run exec_script. This whitelist exists +# These are the list of GN files that run exec_script. This allowlist exists # to force additional review for new uses of exec_script, which is strongly # discouraged except for gypi_to_gn calls. -exec_script_whitelist = build_dotfile_settings.exec_script_whitelist + +exec_script_allowlist = build_dotfile_settings.exec_script_allowlist + [ "//build_overrides/build.gni" ] +export_compile_commands = [ "*" ] + default_args = { # Webrtc does not support component builds because we are not using the # template "component" but we rely directly on "rtc_static_library" and @@ -49,7 +52,7 @@ default_args = { mac_sdk_min = "10.12" - ios_deployment_target = "12.0" + ios_deployment_target = "14.0" # The SDK API level, in contrast, is set by build/android/AndroidManifest.xml. android32_ndk_api_level = 21 @@ -74,4 +77,17 @@ default_args = { # Chromium fix resolves the problem. fuchsia_sdk_readelf_exec = "//third_party/llvm-build/Release+Asserts/bin/llvm-readelf" + + # WebRTC doesn't use jni_zero's multiplexing. Since this causes an error + # let's temporarily disable it. + enable_jni_multiplexing = false + + # TODO(b/42223878): use_fuzztest_wrapper adds a dependency to //base so + # let's temporarly disable it. + use_fuzztest_wrapper = false + + # Enable Rust in WebRTC + enable_rust = true + enable_rust_cxx = true + enable_chromium_prelude = true } diff --git a/.rustfmt.toml b/.rustfmt.toml new file mode 100644 index 0000000000..0be5c8f05d --- /dev/null +++ b/.rustfmt.toml @@ -0,0 +1,26 @@ +# This file defines the Rust style for automatic reformatting. +# See also https://rust-lang.github.io/rustfmt + +# Rust language edition to be used by the parser. +edition = "2021" + +# Version of the formatting rules to use. +style_edition = "2021" + +# Line endings will be converted to \n. +newline_style = "Unix" + +wrap_comments = true + +# The "Default" setting has a heuristic which splits lines too aggresively. +# We are willing to revisit this setting in future versions of rustfmt. +# Bugs: +# * https://github.com/rust-lang/rustfmt/issues/3119 +# * https://github.com/rust-lang/rustfmt/issues/3120 +use_small_heuristics = "Max" + +# Third party code is formatted upstream. +ignore = [ + "third_party/rust/**/crate", + "third_party/rust/chromium_crates_io/vendor", +] diff --git a/.style.yapf b/.style.yapf index c34341d425..557fa7bf84 100644 --- a/.style.yapf +++ b/.style.yapf @@ -1,4 +1,2 @@ [style] based_on_style = pep8 -indent_width = 2 -column_limit = 80 \ No newline at end of file diff --git a/.vpython b/.vpython deleted file mode 100644 index d226875f02..0000000000 --- a/.vpython +++ /dev/null @@ -1,76 +0,0 @@ -# This is a vpython "spec" file. -# -# It describes patterns for python wheel dependencies of the python scripts in -# the chromium repo, particularly for dependencies that have compiled components -# (since pure-python dependencies can be easily vendored into third_party). -# -# When vpython is invoked, it finds this file and builds a python VirtualEnv, -# containing all of the dependencies described in this file, fetching them from -# CIPD (the "Chrome Infrastructure Package Deployer" service). Unlike `pip`, -# this never requires the end-user machine to have a working python extension -# compilation environment. All of these packages are built using: -# https://chromium.googlesource.com/infra/infra/+/main/infra/tools/dockerbuild/ -# -# All python scripts in the repo share this same spec, to avoid dependency -# fragmentation. -# -# If you have depot_tools installed in your $PATH, you can invoke python scripts -# in this repo by running them as you normally would run them, except -# substituting `vpython` instead of `python` on the command line, e.g.: -# vpython path/to/script.py some --arguments -# -# Read more about `vpython` and how to modify this file here: -# https://chromium.googlesource.com/infra/infra/+/main/doc/users/vpython.md - -python_version: "2.7" - -# Used by: -# third_party/catapult -wheel: < - name: "infra/python/wheels/psutil/${platform}_${py_python}_${py_abi}" - version: "version:5.2.2" -> - -# Used by tools_webrtc/perf/process_perf_results.py. -wheel: < - name: "infra/python/wheels/httplib2-py2_py3" - version: "version:0.10.3" -> - -# Used by: -# build/toolchain/win -wheel: < - name: "infra/python/wheels/pypiwin32/${vpython_platform}" - version: "version:219" - match_tag: < - platform: "win32" - > - match_tag: < - platform: "win_amd64" - > -> - -wheel: < - name: "infra/python/wheels/six-py2_py3" - version: "version:1.15.0" -> -wheel: < - name: "infra/python/wheels/pbr-py2_py3" - version: "version:3.0.0" -> -wheel: < - name: "infra/python/wheels/funcsigs-py2_py3" - version: "version:1.0.2" -> -wheel: < - name: "infra/python/wheels/mock-py2_py3" - version: "version:2.0.0" -> -wheel: < - name: "infra/python/wheels/protobuf-py2_py3" - version: "version:3.13.0" -> -wheel: < - name: "infra/python/wheels/requests-py2_py3" - version: "version:2.13.0" -> diff --git a/.vpython3 b/.vpython3 index 3f571df261..6d52ebcba6 100644 --- a/.vpython3 +++ b/.vpython3 @@ -22,24 +22,24 @@ # Read more about `vpython` and how to modify this file here: # https://chromium.googlesource.com/infra/infra/+/main/doc/users/vpython.md -python_version: "3.8" +python_version: "3.11" # Used by: # third_party/catapult wheel: < name: "infra/python/wheels/psutil/${vpython_platform}" - version: "version:5.8.0.chromium.2" + version: "version:5.8.0.chromium.3" > # Used by tools_webrtc/perf/process_perf_results.py. wheel: < name: "infra/python/wheels/httplib2-py3" - version: "version:0.19.1" + version: "version:0.22.0" > wheel: < - name: "infra/python/wheels/pyparsing-py2_py3" - version: "version:2.4.7" + name: "infra/python/wheels/pyparsing-py3" + version: "version:3.1.1" > @@ -47,7 +47,7 @@ wheel: < # build/toolchain/win wheel: < name: "infra/python/wheels/pywin32/${vpython_platform}" - version: "version:300" + version: "version:306" match_tag: < platform: "win32" > @@ -59,48 +59,48 @@ wheel: < # GRPC used by iOS test. wheel: < name: "infra/python/wheels/grpcio/${vpython_platform}" - version: "version:1.44.0" + version: "version:1.57.0" > wheel: < name: "infra/python/wheels/six-py2_py3" - version: "version:1.15.0" + version: "version:1.16.0" > wheel: < name: "infra/python/wheels/pbr-py2_py3" - version: "version:3.0.0" + version: "version:5.9.0" > wheel: < name: "infra/python/wheels/funcsigs-py2_py3" version: "version:1.0.2" > wheel: < - name: "infra/python/wheels/mock-py2_py3" - version: "version:2.0.0" + name: "infra/python/wheels/mock-py3" + version: "version:4.0.3" > wheel: < name: "infra/python/wheels/protobuf-py3" - version: "version:3.20.0" + version: "version:6.30.2" > wheel: < name: "infra/python/wheels/requests-py3" version: "version:2.31.0" > wheel: < - name: "infra/python/wheels/idna-py2_py3" - version: "version:2.8" + name: "infra/python/wheels/idna-py3" + version: "version:3.4" > wheel: < - name: "infra/python/wheels/urllib3-py2_py3" - version: "version:1.26.6" + name: "infra/python/wheels/urllib3-py3" + version: "version:2.1.0" > wheel: < - name: "infra/python/wheels/certifi-py2_py3" - version: "version:2020.11.8" + name: "infra/python/wheels/certifi-py3" + version: "version:2023.11.17" > wheel: < name: "infra/python/wheels/charset_normalizer-py3" - version: "version:2.0.4" + version: "version:3.3.2" > wheel: < name: "infra/python/wheels/brotli/${vpython_platform}" diff --git a/AUTHORS b/AUTHORS index eb393ef057..0599f49da1 100644 --- a/AUTHORS +++ b/AUTHORS @@ -17,10 +17,14 @@ Akshay Shah Alexander Brauckmann Alexandre Gouaillard Alex Henrie +Andrei Volykhin Andrew MacDonald Andrey Efremov Andrew Johnson +Andy Tamilo Anil Kumar +Anna Lemehova +Anton Barkov Ben Strong Berthold Herrmann Bob Withers @@ -33,6 +37,7 @@ Christophe Dumez Chris Tserng Cody Barnes Colin Plumb +Corby Hoback Cyril Lashkevich CZ Theng Danail Kirov @@ -40,6 +45,7 @@ Dave Cowart David Porter David Sanders Dax Booysen +Denis Genestier Dennis Angelo Dharmesh Chauhan Di Wu @@ -49,11 +55,16 @@ Eike Rathke Eric Rescorla, RTFM Inc. Filip Hlasek Frederik Riedel, Frogg GmbH +Gao Chun Giji Gangadharan Graham Yoakum +Guillaume Petit Gustavo Garcia Hans Knoechel +Helmut Januschka Hugues Ekra +Hyungjoo Na +Ilya Katsnelson Jake Hilton James H. Brown Jan Grulich @@ -65,10 +76,12 @@ Jie Mao Jiwon Kim Johnny Wong Jose Antonio Olivera Ortega +Karim Hammache Keiichi Enomoto Kiran Thind Korniltsev Anatoly Kyutae Lee +lauren n. liberda Lennart Grahl Luke Weber Maksim Khobat @@ -76,7 +89,6 @@ Mallikarjuna Rao V Manish Jethani Martin Storsjo Matthias Liebig -Maksim Sisov Maxim Pavlov Maxim Potapov Michael Iedema @@ -96,6 +108,7 @@ Olivier Crête Pali Rohar Paul Kapustin Peng Yu +Pete Makeev Philipp Hancke Piasy Xu Rafael Lopez Diez @@ -103,7 +116,9 @@ Ralph Giles Raman Budny Ramprakash Jelari Riku Voipio +Rishit Bansal Robert Bares +Robert Mader Robert Mader Robert Nagy Ryan Yoakum @@ -113,12 +128,16 @@ Satender Saroha Saul Kravitz Sergio Garcia Murillo Shaofan Qi +Shigemasa Watanabe Shuhai Peng +Shunbo Li +Shunbo Li Seija Silviu Caragea Stefan Gula Stephan Hartmann Steve Reid +Tao chen Takaaki Suzuki Tarun Chawla Todd Wong @@ -127,16 +146,20 @@ Trevor Hayes Uladzislau Susha Vicken Simonian Victor Costan +Vinzenz Feenstra Vladimir Beloborodov Xiaohong Xu Xiaolei Yu Xinchao Tian Yaowen Guo +Youfa Yura Yaroshevich Yuriy Pavlyshak Yusuke Suzuki Pengfei Han Yingying Ma +Hailin Zhao +Fizz Fang # END individuals section. # BEGIN organizations section. @@ -154,6 +177,7 @@ Google Inc. <*@google.com> Highfive, Inc. <*@highfive.com> Hopin Ltd. <*@hopin.to> HyperConnect Inc. <*@hpcnt.com> +Igalia S.L. <*@igalia.com> Intel Corporation <*@intel.com> LG Electronics, Inc. <*@lge.com> Life On Air Inc. <*@lifeonair.com> @@ -181,6 +205,7 @@ The WebRTC Authors <*@webrtc.org> Threema GmbH <*@threema.ch> Tuple, LLC <*@tuple.app> Twilio, Inc. <*@twilio.com> +Twitch Interactive, Inc. <*@justin.tv> Vewd Software AS <*@vewd.com> Videona Socialmedia <*@videona.com> Videxio AS <*@videxio.com> diff --git a/BUILD.gn b/BUILD.gn index 6a3042678a..ca8d8faa61 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -30,6 +30,7 @@ if (rtc_enable_protobuf) { if (is_android) { import("//build/config/android/config.gni") import("//build/config/android/rules.gni") + import("//third_party/jni_zero/jni_zero.gni") } if (!build_with_chromium) { @@ -46,6 +47,7 @@ if (!build_with_chromium) { } if (rtc_include_tests) { deps += [ + ":rtc_p2p_unittests", ":rtc_unittests", ":video_engine_tests", ":voip_unittests", @@ -78,6 +80,9 @@ if (!build_with_chromium) { "video:sv_loopback", "video:video_loopback", ] + if (use_libfuzzer) { + deps += [ "test/fuzzers" ] + } if (!is_asan) { # Do not build :webrtc_lib_link_test because lld complains on some OS # (e.g. when target_os = "mac") when is_asan=true. For more details, @@ -134,14 +139,10 @@ config("library_impl_config") { # Contains the defines and includes in common.gypi that are duplicated both as # target_defaults and direct_dependent_settings. config("common_inherited_config") { - defines = [] + defines = [ "PROTOBUF_ENABLE_DEBUG_LOGGING_MAY_LEAK_PII=0" ] cflags = [] ldflags = [] - if (rtc_jni_generator_legacy_symbols) { - defines += [ "RTC_JNI_GENERATOR_LEGACY_SYMBOLS" ] - } - if (rtc_objc_prefix != "") { defines += [ "RTC_OBJC_TYPE_PREFIX=${rtc_objc_prefix}" ] } @@ -173,16 +174,18 @@ config("common_inherited_config") { defines += [ "RTC_ENABLE_WIN_WGC" ] } - # Some tests need to declare their own trace event handlers. If this define is - # not set, the first time TRACE_EVENT_* is called it will store the return - # value for the current handler in an static variable, so that subsequent - # changes to the handler for that TRACE_EVENT_* will be ignored. - # So when tests are included, we set this define, making it possible to use - # different event handlers in different tests. - if (rtc_include_tests) { - defines += [ "WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1" ] - } else { - defines += [ "WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=0" ] + if (!rtc_use_perfetto) { + # Some tests need to declare their own trace event handlers. If this define is + # not set, the first time TRACE_EVENT_* is called it will store the return + # value for the current handler in an static variable, so that subsequent + # changes to the handler for that TRACE_EVENT_* will be ignored. + # So when tests are included, we set this define, making it possible to use + # different event handlers in different tests. + if (rtc_include_tests) { + defines += [ "WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1" ] + } else { + defines += [ "WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=0" ] + } } if (build_with_chromium) { defines += [ "WEBRTC_CHROMIUM_BUILD" ] @@ -248,6 +251,10 @@ config("common_inherited_config") { if (is_ubsan) { cflags += [ "-fsanitize=float-cast-overflow" ] } + + if (rtc_allow_deprecated_namespaces) { + defines += [ "WEBRTC_ALLOW_DEPRECATED_NAMESPACES" ] + } } # TODO(bugs.webrtc.org/9693): Remove the possibility to suppress this warning @@ -268,6 +275,33 @@ config("rtc_prod_config") { } } +group("tracing") { + all_dependent_configs = [ "//third_party/perfetto/gn:public_config" ] + if (rtc_use_perfetto) { + if (build_with_chromium) { + public_deps = # no-presubmit-check TODO(webrtc:8603) + [ "//third_party/perfetto:libperfetto" ] + } else { + public_deps = [ # no-presubmit-check TODO(webrtc:8603) + ":webrtc_libperfetto", + "//third_party/perfetto/include/perfetto/tracing", + ] + } + } else { + public_deps = # no-presubmit-check TODO(webrtc:8603) + [ "//third_party/perfetto/include/perfetto/tracing" ] + } +} + +if (rtc_use_perfetto) { + rtc_library("webrtc_libperfetto") { + deps = [ + "//third_party/perfetto/src/tracing:client_api_without_backends", + "//third_party/perfetto/src/tracing:platform_impl", + ] + } +} + config("common_config") { cflags = [] cflags_c = [] @@ -301,6 +335,10 @@ config("common_config") { defines += [ "RTC_ENABLE_VP9" ] } + if (rtc_use_h265) { + defines += [ "RTC_ENABLE_H265" ] + } + if (rtc_include_dav1d_in_internal_decoder_factory) { defines += [ "RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY" ] } @@ -321,10 +359,6 @@ config("common_config") { defines += [ "WEBRTC_ABSL_MUTEX" ] } - if (rtc_enable_libevent) { - defines += [ "WEBRTC_ENABLE_LIBEVENT" ] - } - if (rtc_disable_logging) { defines += [ "RTC_DISABLE_LOGGING" ] } @@ -337,10 +371,6 @@ config("common_config") { defines += [ "RTC_DISABLE_METRICS" ] } - if (rtc_exclude_transient_suppressor) { - defines += [ "WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR" ] - } - if (rtc_exclude_audio_processing_module) { defines += [ "WEBRTC_EXCLUDE_AUDIO_PROCESSING_MODULE" ] } @@ -390,19 +420,11 @@ config("common_config") { } if (is_clang) { - cflags += [ "-Wc++11-narrowing" ] - - if (!is_fuchsia) { - # Compiling with the Fuchsia SDK results in Wundef errors - # TODO(bugs.fuchsia.dev/100722): Remove from (!is_fuchsia) branch when - # Fuchsia build errors are fixed. - cflags += [ "-Wundef" ] - } - - if (!is_nacl) { - # Flags NaCl (Clang 3.7) do not recognize. - cflags += [ "-Wunused-lambda-capture" ] - } + cflags += [ + "-Wc++11-narrowing", + "-Wundef", + "-Wunused-lambda-capture", + ] } if (is_win && !is_clang) { @@ -462,7 +484,7 @@ config("common_config") { ] } - if (use_fuzzing_engine && optimize_for_fuzzing) { + if (use_fuzzing_engine) { # Used in Chromium's overrides to disable logging defines += [ "WEBRTC_UNSAFE_FUZZER_MODE" ] } @@ -473,12 +495,28 @@ config("common_config") { "/U_UNICODE", ] } + + if (rtc_use_perfetto) { + defines += [ "RTC_USE_PERFETTO" ] + } } config("common_objc") { frameworks = [ "Foundation.framework" ] } +if (!rtc_build_ssl) { + config("external_ssl_library") { + if (rtc_ssl_root != "") { + include_dirs = [ rtc_ssl_root ] + } + libs = [ + "crypto", + "ssl", + ] + } +} + if (!build_with_chromium) { # Target to build all the WebRTC production code. rtc_static_library("webrtc") { @@ -495,9 +533,11 @@ if (!build_with_chromium) { deps = [ "api:create_peerconnection_factory", + "api:enable_media", "api:libjingle_peerconnection_api", "api:rtc_error", "api:transport_api", + "api/audio_codecs:opus_audio_decoder_factory", "api/crypto", "api/rtc_event_log:rtc_event_log_factory", "api/task_queue", @@ -521,7 +561,6 @@ if (!build_with_chromium) { "media", "modules", "modules/video_capture:video_capture_internal_impl", - "p2p:rtc_p2p", "pc:libjingle_peerconnection", "pc:rtc_pc", "sdk", @@ -540,14 +579,6 @@ if (!build_with_chromium) { "api/video:video_frame", "api/video:video_rtp_headers", ] - } else { - deps += [ - "api", - "logging", - "p2p", - "pc", - "stats", - ] } if (rtc_enable_protobuf) { @@ -583,25 +614,38 @@ if (use_libfuzzer || use_afl) { } if (rtc_include_tests && !build_with_chromium) { + rtc_unittests_resources = [ "resources/reference_video_640x360_30fps.y4m" ] + + if (is_ios) { + bundle_data("rtc_unittests_bundle_data") { + testonly = true + sources = rtc_unittests_resources + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + } + } + rtc_test("rtc_unittests") { testonly = true deps = [ "api:compile_all_headers", "api:rtc_api_unittests", - "api/audio/test:audio_api_unittests", + "api/audio:audio_api_unittests", "api/audio_codecs/test:audio_codecs_api_unittests", "api/numerics:numerics_unittests", "api/task_queue:pending_task_safety_flag_unittests", "api/test/metrics:metrics_unittests", "api/transport:stun_unittest", + "api/transport/rtp:corruption_detection_message_unittest", "api/video/test:rtc_api_video_unittests", + "api/video_codecs:libaom_av1_encoder_factory_test", + "api/video_codecs:simple_encoder_wrapper_unittests", "api/video_codecs/test:video_codecs_api_unittests", "api/voip:compile_all_headers", "call:fake_network_pipe_unittests", - "p2p:libstunprober_unittests", - "p2p:rtc_p2p_unittests", "rtc_base:async_dns_resolver_unittests", + "rtc_base:async_packet_socket_unittest", + "rtc_base:async_udp_socket_unittest", "rtc_base:callback_list_unittests", "rtc_base:rtc_base_approved_unittests", "rtc_base:rtc_base_unittests", @@ -623,8 +667,17 @@ if (rtc_include_tests && !build_with_chromium) { "test/network:network_emulation_unittests", ] + data = rtc_unittests_resources + if (rtc_enable_protobuf) { - deps += [ "logging:rtc_event_log_tests" ] + deps += [ + "api/test/network_emulation:network_config_schedule_proto", + "logging:rtc_event_log_tests", + ] + } + + if (is_ios) { + deps += [ ":rtc_unittests_bundle_data" ] } if (is_android) { @@ -634,12 +687,20 @@ if (rtc_include_tests && !build_with_chromium) { deps += [ "sdk/android:native_unittests", "sdk/android:native_unittests_java", - "//testing/android/native_test:native_test_support", ] shard_timeout = 900 } } + rtc_test("rtc_p2p_unittests") { + testonly = true + + deps = [ + "p2p:rtc_p2p_unittests", + "test:test_main", + ] + } + if (rtc_enable_google_benchmarks) { rtc_test("benchmarks") { testonly = true @@ -651,19 +712,6 @@ if (rtc_include_tests && !build_with_chromium) { } # TODO(pbos): Rename test suite, this is no longer "just" for video targets. - video_engine_tests_resources = [ - "resources/foreman_cif_short.yuv", - "resources/voice_engine/audio_long16.pcm", - ] - - if (is_ios) { - bundle_data("video_engine_tests_bundle_data") { - testonly = true - sources = video_engine_tests_resources - outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] - } - } - rtc_test("video_engine_tests") { testonly = true deps = [ @@ -679,40 +727,16 @@ if (rtc_include_tests && !build_with_chromium) { "video:video_tests", "video/adaptation:video_adaptation_tests", ] - data = video_engine_tests_resources + + data_deps = [ "resources:video_engine_tests_data" ] + if (is_android) { use_default_launcher = false - deps += [ - "//build/android/gtest_apk:native_test_instrumentation_test_runner_java", - "//testing/android/native_test:native_test_java", - "//testing/android/native_test:native_test_support", - ] + deps += [ "//build/android/gtest_apk:native_test_instrumentation_test_runner_java" ] shard_timeout = 900 } if (is_ios) { - deps += [ ":video_engine_tests_bundle_data" ] - } - } - - webrtc_perf_tests_resources = [ - "resources/ConferenceMotion_1280_720_50.yuv", - "resources/audio_coding/speech_mono_16kHz.pcm", - "resources/audio_coding/speech_mono_32_48kHz.pcm", - "resources/audio_coding/testfile32kHz.pcm", - "resources/difficult_photo_1850_1110.yuv", - "resources/foreman_cif.yuv", - "resources/paris_qcif.yuv", - "resources/photo_1850_1110.yuv", - "resources/presentation_1850_1110.yuv", - "resources/voice_engine/audio_long16.pcm", - "resources/web_screenshot_1850_1110.yuv", - ] - - if (is_ios) { - bundle_data("webrtc_perf_tests_bundle_data") { - testonly = true - sources = webrtc_perf_tests_resources - outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + deps += [ "resources:video_engine_tests_bundle_data" ] } } @@ -728,18 +752,15 @@ if (rtc_include_tests && !build_with_chromium) { "video:video_pc_full_stack_tests", ] - data = webrtc_perf_tests_resources + data_deps = [ "resources:webrtc_perf_tests_data" ] + if (is_android) { use_default_launcher = false - deps += [ - "//build/android/gtest_apk:native_test_instrumentation_test_runner_java", - "//testing/android/native_test:native_test_java", - "//testing/android/native_test:native_test_support", - ] + deps += [ "//build/android/gtest_apk:native_test_instrumentation_test_runner_java" ] shard_timeout = 4500 } if (is_ios) { - deps += [ ":webrtc_perf_tests_bundle_data" ] + deps += [ "resources:webrtc_perf_tests_bundle_data" ] } } @@ -747,7 +768,6 @@ if (rtc_include_tests && !build_with_chromium) { testonly = true deps = [ "rtc_base:rtc_base_nonparallel_tests" ] if (is_android) { - deps += [ "//testing/android/native_test:native_test_support" ] shard_timeout = 900 } } @@ -795,13 +815,10 @@ rtc_static_library("dcsctp") { group("poison_audio_codecs") { } -group("poison_default_task_queue") { -} - group("poison_default_echo_detector") { } -group("poison_rtc_json") { +group("poison_environment_construction") { } group("poison_software_video_codecs") { diff --git a/DEPS b/DEPS index 84db0c0c56..61027df618 100644 --- a/DEPS +++ b/DEPS @@ -10,13 +10,24 @@ vars = { # chromium waterfalls. More info at: crbug.com/570091. 'checkout_configuration': 'default', 'checkout_instrumented_libraries': 'checkout_linux and checkout_configuration == "default"', - 'chromium_revision': '6ac79291669656814b2c66e66ea296caac6652fd', + 'chromium_revision': '92c0179e11451c30349504abb941d3615938ca29', # Fetch the prebuilt binaries for llvm-cov and llvm-profdata. Needed to # process the raw profiles produced by instrumented targets (built with # the gn arg 'use_clang_coverage'). 'checkout_clang_coverage_tools': False, + # Fetch clangd into the same bin/ directory as our clang binary. + 'checkout_clangd': False, + + # Fetch clang-tidy into the same bin/ directory as our clang binary. + 'checkout_clang_tidy': False, + + # Fetch libraries required to compile and run fuzzer tests. + 'checkout_fuzzer': False, + + 'chromium_git': 'https://chromium.googlesource.com', + # Keep the Chromium default of generating location tags. 'generate_location_tags': True, @@ -25,14 +36,14 @@ vars = { # By default, download the fuchsia sdk from the public sdk directory. 'fuchsia_sdk_cipd_prefix': 'fuchsia/sdk/core/', - 'fuchsia_version': 'version:14.20230826.1.1', + 'fuchsia_version': 'version:27.20250424.2.1', # By default, download the fuchsia images from the fuchsia GCS bucket. 'fuchsia_images_bucket': 'fuchsia', 'checkout_fuchsia': False, # Since the images are hundreds of MB, default to only downloading the image # most commonly useful for developers. Bots and developers that need to use # other images can override this with additional images. - 'checkout_fuchsia_boot_images': "terminal.qemu-x64", + 'checkout_fuchsia_boot_images': "terminal.x64", 'checkout_fuchsia_product_bundles': '"{checkout_fuchsia_boot_images}" != ""', # Fetch configuration files required for the 'use_remoteexec' gn arg @@ -40,40 +51,52 @@ vars = { # RBE instance to use for running remote builds 'rbe_instance': 'projects/rbe-webrtc-developer/instances/default_instance', # reclient CIPD package version - 'reclient_version': 're_client_version:0.113.0.8b45b89-gomaip', + 'reclient_version': 're_client_version:0.177.1.e58c0145-gomaip', + # siso CIPD package version. + 'siso_version': 'git_revision:70e1167e0e6dad10c8388cace8fd9d9376c43316', + + # ninja CIPD package. + 'ninja_package': 'infra/3pp/tools/ninja/', # ninja CIPD package version # https://chrome-infra-packages.appspot.com/p/infra/3pp/tools/ninja - 'ninja_version': 'version:2@1.11.1.chromium.6', + 'ninja_version': 'version:3@1.12.1.chromium.4', + + # condition to allowlist deps for non-git-source processing. + 'non_git_source': 'True', + + # This can be overridden, e.g. with custom_vars, to build clang from HEAD + # instead of downloading the prebuilt pinned revision. + 'llvm_force_head_revision': False, } deps = { # TODO(kjellander): Move this to be Android-only. 'src/base': - 'https://chromium.googlesource.com/chromium/src/base@609cafa975c8a29d3b2f686c9a42530a556835fe', + 'https://chromium.googlesource.com/chromium/src/base@86c814633cf284bc8057a539bc722e2a672afe2f', 'src/build': - 'https://chromium.googlesource.com/chromium/src/build@115a7079919c25462a7fd8c1d22900378bbc6585', + 'https://chromium.googlesource.com/chromium/src/build@88030b320338e0706b6b93336c4b35e6bbaf467e', 'src/buildtools': - 'https://chromium.googlesource.com/chromium/src/buildtools@b2043d4f435131d0a1bdd5342c17753ef9236572', + 'https://chromium.googlesource.com/chromium/src/buildtools@0f32cb9025766951122d4ed19aba87a94ded3f43', # Gradle 6.6.1. Used for testing Android Studio project generation for WebRTC. 'src/examples/androidtests/third_party/gradle': { 'url': 'https://chromium.googlesource.com/external/github.com/gradle/gradle.git@f2d1fb54a951d8b11d25748e4711bec8d128d7e3', 'condition': 'checkout_android', }, 'src/ios': { - 'url': 'https://chromium.googlesource.com/chromium/src/ios@17864bdc8fb2f78060ea4109d61a9144f64f4d67', + 'url': 'https://chromium.googlesource.com/chromium/src/ios@058aa981a69171da048bdcf82d89c64fcd43d16b', 'condition': 'checkout_ios', }, 'src/testing': - 'https://chromium.googlesource.com/chromium/src/testing@ff8dee88bc0b49f8337cee6e82151c245a63b98c', + 'https://chromium.googlesource.com/chromium/src/testing@a89c37d36bf80c05963727e28b9916835ae88d3a', 'src/third_party': - 'https://chromium.googlesource.com/chromium/src/third_party@ee6367daea550c5845a6079cec5fd6555f39144f', + 'https://chromium.googlesource.com/chromium/src/third_party@8062e0e102496ff14a8c58b586f014527424953d', 'src/buildtools/linux64': { 'packages': [ { 'package': 'gn/gn/linux-${{arch}}', - 'version': 'git_revision:cc56a0f98bb34accd5323316e0292575ff17a5d4', + 'version': 'git_revision:85cc21e94af590a267c1c7a47020d9b420f8a033', } ], 'dep_type': 'cipd', @@ -83,7 +106,7 @@ deps = { 'packages': [ { 'package': 'gn/gn/mac-${{arch}}', - 'version': 'git_revision:cc56a0f98bb34accd5323316e0292575ff17a5d4', + 'version': 'git_revision:85cc21e94af590a267c1c7a47020d9b420f8a033', } ], 'dep_type': 'cipd', @@ -93,7 +116,7 @@ deps = { 'packages': [ { 'package': 'gn/gn/windows-amd64', - 'version': 'git_revision:cc56a0f98bb34accd5323316e0292575ff17a5d4', + 'version': 'git_revision:85cc21e94af590a267c1c7a47020d9b420f8a033', } ], 'dep_type': 'cipd', @@ -112,35 +135,251 @@ deps = { 'condition': 'not (host_os == "linux" and host_cpu == "arm64")', }, + 'src/third_party/llvm-build/Release+Asserts': { + 'dep_type': 'gcs', + 'bucket': 'chromium-browser-clang', + 'condition': 'not llvm_force_head_revision', + 'objects': [ + { + # The Android libclang_rt.builtins libraries are currently only included in the Linux clang package. + 'object_name': 'Linux_x64/clang-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '17d9277d32a87f856c6d0a5ee3f662015d423d876315d7736ca7b05b2b6f047e', + 'size_bytes': 54610924, + 'generation': 1743178947242029, + 'condition': '(host_os == "linux" or checkout_android) and non_git_source', + }, + { + 'object_name': 'Linux_x64/clang-tidy-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '426b5f0daf964c3e1a9b9e7626311e8544a3e4768c8ef80d31464894b0fead68', + 'size_bytes': 13573888, + 'generation': 1743178947383618, + 'condition': 'host_os == "linux" and checkout_clang_tidy and non_git_source', + }, + { + 'object_name': 'Linux_x64/clangd-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': 'e419c7db3d30a163fdd8692cfeef26a1fb22db039f2f6ab42b930edfc4e5fd43', + 'size_bytes': 13829552, + 'generation': 1743178947404834, + 'condition': 'host_os == "linux" and checkout_clangd and non_git_source', + }, + { + 'object_name': 'Linux_x64/llvm-code-coverage-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '935be59242458aa5849133c5a47b4ae11f6d8cae92401188e7db070dcae11326', + 'size_bytes': 2303256, + 'generation': 1743178947639495, + 'condition': 'host_os == "linux" and checkout_clang_coverage_tools and non_git_source', + }, + { + 'object_name': 'Linux_x64/llvmobjdump-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '3617952c541889b46806f66223bb4e8342e4c714938e7c83c7b9ca27b8e94cec', + 'size_bytes': 5699348, + 'generation': 1743178947497618, + 'condition': '((checkout_linux or checkout_mac or checkout_android) and host_os == "linux") and non_git_source', + }, + { + 'object_name': 'Mac/clang-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '690ae86dbd1c3689713db944059e7249760bdc30d4e06fdee79851780d62b255', + 'size_bytes': 51731764, + 'generation': 1743178948995628, + 'condition': 'host_os == "mac" and host_cpu == "x64"', + }, + { + 'object_name': 'Mac/clang-mac-runtime-library-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '6e5b19f3ef4f2a84b2137f64c66cf7dc559c399ee070b69c26791ad4530dc84a', + 'size_bytes': 980732, + 'generation': 1743178955913911, + 'condition': 'checkout_mac and not host_os == "mac"', + }, + { + 'object_name': 'Mac/clang-tidy-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '05c89e445986aece87cf514b0e877c6cb1b847ac32c8fe24baf5386fa2874956', + 'size_bytes': 13595068, + 'generation': 1743178949025853, + 'condition': 'host_os == "mac" and host_cpu == "x64" and checkout_clang_tidy', + }, + { + 'object_name': 'Mac/clangd-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '27945ba4a22b9e59b0c28dd299919a1da3a18a2d76e10f30a5e0e72242af6b55', + 'size_bytes': 15018860, + 'generation': 1743178949033402, + 'condition': 'host_os == "mac" and host_cpu == "x64" and checkout_clangd', + }, + { + 'object_name': 'Mac/llvm-code-coverage-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '3eb87f93194f7fe3177d4796c8d6376421811538747d1e65c825654ec4ecceea', + 'size_bytes': 2257340, + 'generation': 1743178949227666, + 'condition': 'host_os == "mac" and host_cpu == "x64" and checkout_clang_coverage_tools', + }, + { + 'object_name': 'Mac_arm64/clang-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': 'd4478bb335f9555fe6b0c888a4c0f5d48695081b02ce662cfe7a125f3f501eca', + 'size_bytes': 43908296, + 'generation': 1743178957140254, + 'condition': 'host_os == "mac" and host_cpu == "arm64"', + }, + { + 'object_name': 'Mac_arm64/clang-tidy-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '06bd6ba46d4c0fc51e018d53c41f1ab81f7222f99dde95a10d57cb7f6c340eb7', + 'size_bytes': 11774556, + 'generation': 1743178957285392, + 'condition': 'host_os == "mac" and host_cpu == "arm64" and checkout_clang_tidy', + }, + { + 'object_name': 'Mac_arm64/clangd-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '1cb1d20b1d9bcf5bfab05f333c1cd21ff723a463f8b3a84c3b1f79758fc8d834', + 'size_bytes': 12058388, + 'generation': 1743178957310593, + 'condition': 'host_os == "mac" and host_cpu == "arm64" and checkout_clangd', + }, + { + 'object_name': 'Mac_arm64/llvm-code-coverage-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '932bd7fb9dd4b8da2a59880f71f9f9fb4c66b071a4e481b19bbc340b9336dd11', + 'size_bytes': 1978748, + 'generation': 1743178957577801, + 'condition': 'host_os == "mac" and host_cpu == "arm64" and checkout_clang_coverage_tools', + }, + { + 'object_name': 'Win/clang-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '6f486adfb406b0fcb9d3b83485ed4fca3467a3565d67baf0d9fd822721b780a1', + 'size_bytes': 46895260, + 'generation': 1743178966458891, + 'condition': 'host_os == "win"', + }, + { + 'object_name': 'Win/clang-tidy-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '3fd9808e08070f84920d22ad537e1f2445bcf5773c134ddef28b14cfe46a67e1', + 'size_bytes': 13459292, + 'generation': 1743178966633313, + 'condition': 'host_os == "win" and checkout_clang_tidy', + }, + { + 'object_name': 'Win/clang-win-runtime-library-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': 'bcf2e1a93afb20f384dbbcc484815989cb0aa2e593f2235251498c97d7f22493', + 'size_bytes': 2477288, + 'generation': 1743178973852867, + 'condition': 'checkout_win and not host_os == "win"', + }, + { + 'object_name': 'Win/clangd-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '12978a7e84b777aa10857e13f1e5e999061091126891631bf29df486f4bb2770', + 'size_bytes': 13891176, + 'generation': 1743178966674825, + 'condition': 'host_os == "win" and checkout_clangd', + }, + { + 'object_name': 'Win/llvm-code-coverage-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '14842204c68030e276bb054bd5058a561be025d1ee1a0909943ea4f8c31cd715', + 'size_bytes': 2367868, + 'generation': 1743178966817841, + 'condition': 'host_os == "win" and checkout_clang_coverage_tools', + }, + { + 'object_name': 'Win/llvmobjdump-llvmorg-21-init-6681-g5b36835d-1.tar.xz', + 'sha256sum': '6c505137fcc3879c3f0ad3562ec1f530a4b83f725d2a91363d1cfdcc3536252e', + 'size_bytes': 5673896, + 'generation': 1743178966726617, + 'condition': '(checkout_linux or checkout_mac or checkout_android) and host_os == "win"', + }, + ] + }, + + # Update prebuilt Rust toolchain. + 'src/third_party/rust-toolchain': { + 'dep_type': 'gcs', + 'bucket': 'chromium-browser-clang', + 'objects': [ + { + 'object_name': 'Linux_x64/rust-toolchain-9fcc9cf4a202aadfe1f44722b39c83536eba3dba-2-llvmorg-21-init-1655-g7b473dfe.tar.xz', + 'sha256sum': '4bf96a6d0d9bdff23475e556d925bb6846036859ea3868c2c74caa8e5fab42df', + 'size_bytes': 116749200, + 'generation': 1739484481010800, + 'condition': 'host_os == "linux" and non_git_source', + }, + { + 'object_name': 'Mac/rust-toolchain-9fcc9cf4a202aadfe1f44722b39c83536eba3dba-2-llvmorg-21-init-1655-g7b473dfe.tar.xz', + 'sha256sum': '6d950bab0d0cdf7ae201344b55291def8a6b2bd868fb477133ce5532ab6c1e55', + 'size_bytes': 109928956, + 'generation': 1739484482836636, + 'condition': 'host_os == "mac" and host_cpu == "x64"', + }, + { + 'object_name': 'Mac_arm64/rust-toolchain-9fcc9cf4a202aadfe1f44722b39c83536eba3dba-2-llvmorg-21-init-1655-g7b473dfe.tar.xz', + 'sha256sum': 'fd1620c4cced85861f5838e177fc8e1cbe1973a365143ed7de32a19c9e9cb725', + 'size_bytes': 98784068, + 'generation': 1739484484763552, + 'condition': 'host_os == "mac" and host_cpu == "arm64"', + }, + { + 'object_name': 'Win/rust-toolchain-9fcc9cf4a202aadfe1f44722b39c83536eba3dba-2-llvmorg-21-init-1655-g7b473dfe.tar.xz', + 'sha256sum': 'e06ac15f6cdab4cd2ac259785adf93da5275e44060c794ba8ff5bd5b4c29ff28', + 'size_bytes': 178946936, + 'generation': 1739484486536378, + 'condition': 'host_os == "win"', + }, + ], + }, + 'src/third_party/clang-format/script': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/clang/tools/clang-format.git@e5337933f2951cacd3aeacd238ce4578163ca0b9', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/clang/tools/clang-format.git@37f6e68a107df43b7d7e044fd36a13cbae3413f2', + 'src/third_party/compiler-rt/src': + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/compiler-rt.git@57213f125d03209892fed26189feb3b736e96735', 'src/third_party/libc++/src': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git@84fb809dd6dae36d556dc0bb702c6cc2ce9d4b80', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git@917609c669e43edc850eeb192a342434a54e1dfd', 'src/third_party/libc++abi/src': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@3d83ca7bd2ab81f042bafe6996da08c9cd57c119', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@f2a7f2987f9dcdf8b04c2d8cd4dcb186641a7c3e', + 'src/third_party/llvm-libc/src': + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libc.git@912274164f0877ca917c06e8484ad3be1784833a', 'src/third_party/libunwind/src': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@76e621a89787516da745489245d8b65a48ad60d8', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@81e2cb40a70de2b6978e6d8658891ded9a77f7e3', + + 'src/third_party/test_fonts/test_fonts': { + 'dep_type': 'gcs', + 'condition': 'non_git_source', + 'bucket': 'chromium-fonts', + 'objects': [ + { + 'object_name': 'f26f29c9d3bfae588207bbc9762de8d142e58935c62a86f67332819b15203b35', + 'sha256sum': 'f26f29c9d3bfae588207bbc9762de8d142e58935c62a86f67332819b15203b35', + 'size_bytes': 32750602, + 'generation': 1717109450425063, + }, + ], + }, 'src/third_party/ninja': { 'packages': [ { - 'package': 'infra/3pp/tools/ninja/${{platform}}', + 'package': Var('ninja_package') + '${{platform}}', 'version': Var('ninja_version'), } ], + 'condition': 'non_git_source', + 'dep_type': 'cipd', + }, + + 'src/third_party/siso/cipd': { + 'packages': [ + { + 'package': 'infra/build/siso/${{platform}}', + 'version': Var('siso_version'), + } + ], + 'condition': 'non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_system_sdk': { + 'src/third_party/android_system_sdk/cipd': { 'packages': [ { 'package': 'chromium/third_party/android_system_sdk/public', - 'version': '4QeolYaSKWBtVTgzJU4tHUfzA9OJTDM8YUcD426IctwC', + 'version': 'Pfb3HDUW_uRir_VVTCYkGhf6bnPPF55NUJO2WXOxIe0C', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, + 'src/tools/resultdb': { 'packages': [ { @@ -151,34 +390,101 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_build_tools/aapt2': { + 'src/third_party/android_build_tools/aapt2/cipd': { 'packages': [ { 'package': 'chromium/third_party/android_build_tools/aapt2', - 'version': 'STY0BXlZxsEhudnlXQFed-B5UpwehcoM0sYqor6qRqsC', + 'version': '_lNsOL_GGlXLOIMGtrbMOqNd7TQHabaP1q8SlvUpFbMC', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_build_tools/bundletool': { + 'src/third_party/android_build_tools/bundletool/cipd': { 'packages': [ { 'package': 'chromium/third_party/android_build_tools/bundletool', - 'version': '2RPwohwtc6on0_96oFxokeEvnC1LbLrGuyCAw00k62AC', + 'version': 'zV93G9_1s5h6x7c2qdcibr0uuQ_5Q2QgcxhkUs9-tOsC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_build_tools/dagger_compiler/cipd': { + 'packages': [ + { + 'package': 'chromium/third_party/android_build_tools/dagger_compiler', + 'version': 'AC0DoTEXQf40KFt7hyCNSEJPrT9Rprw9zsZxNKdw7BQC', + }, + ], + 'condition': 'checkout_android and non_git_source', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_build_tools/error_prone/cipd': { + 'packages': [ + { + 'package': 'chromium/third_party/android_build_tools/error_prone', + 'version': '-IrkxgAkj3RxGNBPp6b4r9OWZC6_dTbI-jC3c3UPgq8C', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_build_tools/error_prone_javac/cipd': { + 'packages': [ + { + 'package': 'chromium/third_party/android_build_tools/error_prone_javac', + 'version': '7EcHxlEXEaLRWEyHIAxf0ouPjkmN1Od6jkutuo0sfBIC', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, + 'src/third_party/android_build_tools/lint/cipd': { + 'packages': [ + { + 'package': 'chromium/third_party/android_build_tools/lint', + 'version': 'gx4NrAApWUVG5HG3WJRIIzZrTssD6H5uGbKe0g77mucC', + }, + ], + 'condition': 'checkout_android and non_git_source', + 'dep_type': 'cipd', + }, + + # TODO(webrtc:42223878): This is only needed for //base. + 'src/third_party/android_build_tools/nullaway/cipd': { + 'packages': [ + { + 'package': 'chromium/third_party/android_build_tools/nullaway', + 'version': 'F6la8NsEkr27_Sm2MswVDBDB7UXVqRtaCMS5YWEQ9dQC', + }, + ], + 'condition': 'checkout_android and non_git_source', + 'dep_type': 'cipd', + }, + + 'src/third_party/aosp_dalvik/cipd': { + 'packages': [ + { + 'package': 'chromium/third_party/aosp_dalvik/linux-amd64', + 'version': 'version:2@13.0.0_r24.cr1', + }, + ], + 'condition': 'checkout_android and non_git_source', + 'dep_type': 'cipd', + }, + 'src/third_party/boringssl/src': - 'https://boringssl.googlesource.com/boringssl.git@b8e012e1ff736cc794273af4a7db521e6b18bcd5', + 'https://boringssl.googlesource.com/boringssl.git@34492c89a8e381e0e856a686cc71b1eb5bd728db', 'src/third_party/breakpad/breakpad': - 'https://chromium.googlesource.com/breakpad/breakpad.git@8988364bcddd9b194b0bf931c10bc125987330ed', + 'https://chromium.googlesource.com/breakpad/breakpad.git@232a723f5096ab02d53d87931efa485fa77d3b03', 'src/third_party/catapult': - 'https://chromium.googlesource.com/catapult.git@b8c4f2d99ac66fe47cb8cceec0dd1a1da5d1b51e', + 'https://chromium.googlesource.com/catapult.git@000f47cfa393d7f9557025a252862e2a61a60d44', 'src/third_party/ced/src': { 'url': 'https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git@ba412eaaacd3186085babcd901679a48863c7dd5', }, @@ -189,54 +495,49 @@ deps = { 'condition': 'checkout_android', }, 'src/third_party/crc32c/src': - 'https://chromium.googlesource.com/external/github.com/google/crc32c.git@fa5ade41ee480003d9c5af6f43567ba22e4e17e6', + 'https://chromium.googlesource.com/external/github.com/google/crc32c.git@d3d60ac6e0f16780bcfcc825385e1d338801a558', 'src/third_party/depot_tools': - 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@427f0f43ad0ceb08399561ab9cc60e45931059d3', + 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@fa8fc854e1766b86f10c9a15902cf3cc23adaac2', 'src/third_party/ffmpeg': - 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@0ba37733400593b162e5ae9ff26b384cff49c250', + 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@01f23648c6b84de6c0f717fa4e1816f53b9ee72e', 'src/third_party/flatbuffers/src': - 'https://chromium.googlesource.com/external/github.com/google/flatbuffers.git@28861d1d7d5ec6ce34d4bbdc10bec4aace341167', + 'https://chromium.googlesource.com/external/github.com/google/flatbuffers.git@8db59321d9f02cdffa30126654059c7d02f70c32', 'src/third_party/grpc/src': { - 'url': 'https://chromium.googlesource.com/external/github.com/grpc/grpc.git@822dab21d9995c5cf942476b35ca12a1aa9d2737', + 'url': 'https://chromium.googlesource.com/external/github.com/grpc/grpc.git@957c9f95224b1e1318c0ecb98d0e7584ea5ccff2', }, # Used for embedded builds. CrOS & Linux use the system version. 'src/third_party/fontconfig/src': { - 'url': 'https://chromium.googlesource.com/external/fontconfig.git@2fb3419a92156569bc1ec707401258c922cd0d99', + 'url': 'https://chromium.googlesource.com/external/fontconfig.git@14d466b30a8ab4a9d789977ed94f2c30e7209267', 'condition': 'checkout_linux', }, 'src/third_party/freetype/src': - 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@dd1ced4ee37b375686a1e0fb6e3a6966b195f4ab', + 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@1da283b8ae6d6b94f34a5c4b8c1227adc9dbb1d8', 'src/third_party/harfbuzz-ng/src': - 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@db700b5670d9475cc8ed4880cc9447b232c5e432', + 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@9f83bbbe64654b45ba5bb06927ff36c2e7588495', 'src/third_party/google_benchmark/src': { - 'url': 'https://chromium.googlesource.com/external/github.com/google/benchmark.git@b177433f3ee2513b1075140c723d73ab8901790f', + 'url': 'https://chromium.googlesource.com/external/github.com/google/benchmark.git@761305ec3b33abf30e08d50eb829e19a802581cc', }, # WebRTC-only dependency (not present in Chromium). 'src/third_party/gtest-parallel': - 'https://chromium.googlesource.com/external/github.com/google/gtest-parallel@f4d65b555894b301699c7c3c52906f72ea052e83', - 'src/third_party/google-truth': { - 'packages': [ - { - 'package': 'chromium/third_party/google-truth', - 'version': 'u8oovXxp24lStqX4d54htRovta-75Sy2w7ijg1TL07gC', - }, - ], + 'https://chromium.googlesource.com/external/github.com/google/gtest-parallel@96f4f904922f9bf66689e749c40f314845baaac8', + 'src/third_party/google-truth/src': { + 'url': 'https://chromium.googlesource.com/external/github.com/google/truth.git@33387149b465f82712a817e6744847fe136949b3', 'condition': 'checkout_android', - 'dep_type': 'cipd', }, 'src/third_party/googletest/src': - 'https://chromium.googlesource.com/external/github.com/google/googletest.git@af29db7ec28d6df1c7f0f745186884091e602e07', + 'https://chromium.googlesource.com/external/github.com/google/googletest.git@cd430b47a54841ec45d64d2377d7cabaf0eba610', 'src/third_party/icu': { - 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@985b9a6f70e13f3db741fed121e4dcc3046ad494', + 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@4c8cc4b365a505ce35be1e0bd488476c5f79805d', }, - 'src/third_party/jdk': { + 'src/third_party/jdk/current': { 'packages': [ { - 'package': 'chromium/third_party/jdk', - 'version': '0yjD6s5XYtcGAQoObIys7xs2ThkudwxJwS-2ZNP0SFEC', + 'package': 'chromium/third_party/jdk/linux-amd64', + 'version': '2iiuF-nKDH3moTImx2op4WTRetbfhzKoZhH7Xo44zGsC', }, - ], - 'condition': 'host_os == "linux" and checkout_android', + ], + # Needed on Linux for use on chromium_presubmit (for checkstyle). + 'condition': '(checkout_android or checkout_linux) and non_git_source', 'dep_type': 'cipd', }, # Deprecated - only use for tools which are broken real JDK. @@ -255,14 +556,14 @@ deps = { 'src/third_party/jsoncpp/source': 'https://chromium.googlesource.com/external/github.com/open-source-parsers/jsoncpp.git@42e892d96e47b1f6e29844cc705e148ec4856448', # from svn 248 'src/third_party/junit/src': { - 'url': 'https://chromium.googlesource.com/external/junit.git@05fe2a64f59127c02135be22f416e91260d6ede6', + 'url': 'https://chromium.googlesource.com/external/junit.git@0eb5ce72848d730da5bd6d42902fdd6a8a42055d', 'condition': 'checkout_android', }, - 'src/third_party/kotlin_stdlib': { + 'src/third_party/kotlin_stdlib/cipd': { 'packages': [ { 'package': 'chromium/third_party/kotlin_stdlib', - 'version': '6cGkpHi3fSRhpRfq2b1mjmzfFmShvtQe6gy4g2nFQd0C', + 'version': 'WkqHVVoDtiOfLxiPT-eO3zyieJRoIRrvs7XqgcIXBYIC', }, ], 'condition': 'checkout_android', @@ -273,54 +574,65 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/kotlinc', - 'version': '6Hdj5fkzcomS1cNTWnXoeTZj0wvCG4zdyLtZ23eK-U4C', + 'version': 'YASKBvUSO-m2borC3V_hGLsRTyOSnHl2J0BaCmINZkUC', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - # Used for building libFuzzers (only supports Linux). 'src/third_party/libFuzzer/src': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/compiler-rt/lib/fuzzer.git@26cc39e59b2bf5cbc20486296248a842c536878d', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/compiler-rt/lib/fuzzer.git@e31b99917861f891308269c36a32363b120126bb', + 'src/third_party/fuzztest/src': + 'https://chromium.googlesource.com/external/github.com/google/fuzztest.git@b10387fdbbca18192f85eaa5323a59f44bf9c468', 'src/third_party/libjpeg_turbo': - 'https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git@30bdb85e302ecfc52593636b2f44af438e05e784', + 'https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git@e14cbfaa85529d47f9f55b0f104a579c1061f9ad', 'src/third_party/libsrtp': - 'https://chromium.googlesource.com/chromium/deps/libsrtp.git@5b7c744eb8310250ccc534f3f86a2015b3887a0a', + 'https://chromium.googlesource.com/chromium/deps/libsrtp.git@a52756acb1c5e133089c798736dd171567df11f5', 'src/third_party/dav1d/libdav1d': - 'https://chromium.googlesource.com/external/github.com/videolan/dav1d.git@f8ae94eca0f53502a2cddd29a263c1edea4822a0', + 'https://chromium.googlesource.com/external/github.com/videolan/dav1d.git@8d956180934f16244bdb58b39175824775125e55', 'src/third_party/libaom/source/libaom': - 'https://aomedia.googlesource.com/aom.git@5f8db64abce68a3698fb732697ae50880bc9cac4', + 'https://aomedia.googlesource.com/aom.git@a23a4799ec2d7dd6e436c7b64a34553773014ed7', 'src/third_party/libunwindstack': { - 'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@4dbfa0e8c844c8e243b297bc185e54a99ff94f9e', + 'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@0d758dd57f42564acecdd7a1e7ac5c8521c1b01a', 'condition': 'checkout_android', }, 'src/third_party/perfetto': - 'https://android.googlesource.com/platform/external/perfetto.git@00427277dd1728c836d92f78006c60430c04d6bc', + Var('chromium_git') + '/external/github.com/google/perfetto.git' + '@' + 'a54dd38d60593129ae56d400f1a72860670abea4', + 'src/third_party/protobuf-javascript/src': + Var('chromium_git') + '/external/github.com/protocolbuffers/protobuf-javascript' + '@' + 'eb785a9363664a402b6336dfe96aad27fb33ffa8', 'src/third_party/libvpx/source/libvpx': - 'https://chromium.googlesource.com/webm/libvpx.git@24c0dcc8513b8c1ba4ffbf934a399f89de646ffe', + 'https://chromium.googlesource.com/webm/libvpx.git@ff1d193f4b9dfa9b2ced51efbb6ec7a69e58e88c', 'src/third_party/libyuv': - 'https://chromium.googlesource.com/libyuv/libyuv.git@04821d1e7d60845525e8db55c7bcd41ef5be9406', + 'https://chromium.googlesource.com/libyuv/libyuv.git@1e40e34573c3861480d107cd4a4ce290df79951f', 'src/third_party/lss': { - 'url': 'https://chromium.googlesource.com/linux-syscall-support.git@ce877209e11aa69dcfffbd53ef90ea1d07136521', + 'url': 'https://chromium.googlesource.com/linux-syscall-support.git@ed31caa60f20a4f6569883b2d752ef7522de51e0', 'condition': 'checkout_android or checkout_linux', }, 'src/third_party/mockito/src': { 'url': 'https://chromium.googlesource.com/external/mockito/mockito.git@04a2a289a4222f80ad20717c25144981210d2eac', 'condition': 'checkout_android', }, + 'src/third_party/instrumented_libs': { + 'url': Var('chromium_git') + '/chromium/third_party/instrumented_libraries.git' + '@' + '69015643b3f68dbd438c010439c59adc52cac808', + 'condition': 'checkout_instrumented_libraries', + }, # Used by boringssl. 'src/third_party/nasm': { - 'url': 'https://chromium.googlesource.com/chromium/deps/nasm.git@7fc833e889d1afda72c06220e5bed8fb43b2e5ce' + 'url': 'https://chromium.googlesource.com/chromium/deps/nasm.git@9f916e90e6fc34ec302573f6ce147e43e33d68ca' }, 'src/third_party/openh264/src': - 'https://chromium.googlesource.com/external/github.com/cisco/openh264@09a4f3ec842a8932341b195c5b01e141c8a16eb7', - 'src/third_party/r8': { + 'https://chromium.googlesource.com/external/github.com/cisco/openh264@652bdb7719f30b52b08e506645a7322ff1b2cc6f', + + 'src/third_party/re2/src': + 'https://chromium.googlesource.com/external/github.com/google/re2.git@c84a140c93352cdabbfb547c531be34515b12228', + + 'src/third_party/r8/cipd': { 'packages': [ { 'package': 'chromium/third_party/r8', - 'version': 'TBaeKaSTY2ttKx2JSFuWiQ8Na80KHZwLEgSAvT1DBJ0C', + 'version': 'bA3htCoEd_EArHekDGQSNpmBzQrcby2ioG6SFyl3AtwC', }, ], 'condition': 'checkout_android', @@ -329,11 +641,11 @@ deps = { # This duplication is intentional, so we avoid updating the r8.jar used by # dexing unless necessary, since each update invalidates all incremental # dexing and unnecessarily slows down all bots. - 'src/third_party/r8/d8': { + 'src/third_party/r8/d8/cipd': { 'packages': [ { 'package': 'chromium/third_party/r8', - 'version': 'vw5kLlW3-suSlCKSO9OQpFWpR8oDnvQ8k1RgKNUapQYC', + 'version': '6qLey8EBp9ivhThnqVPWy2ZDGpsf5Y29EsTbi_rZ1pMC', }, ], 'condition': 'checkout_android', @@ -344,40 +656,7 @@ deps = { 'condition': 'checkout_android', }, 'src/tools': - 'https://chromium.googlesource.com/chromium/src/tools@3e78ed797e9e5308cb90f319c7330a6d44dac2c7', - - 'src/third_party/accessibility_test_framework': { - 'packages': [ - { - 'package': 'chromium/third_party/accessibility-test-framework', - 'version': 'b5ec1e56e58e56bc1a0c77d43111c37f9b512c8a', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/byte_buddy': { - 'packages': [ - { - 'package': 'chromium/third_party/byte_buddy', - 'version': 'c9b53316603fc2d997c899c7ca1707f809b918cd', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/byte_buddy/android_sdk_build_tools_25_0_2': { - 'packages': [ - { - 'package': 'chromium/third_party/android_sdk/public/build-tools', - 'version': 'kwIs2vdfTm93yEP8LG5aSnchN4BVEdVxbqQtF4XpPdkC', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, + 'https://chromium.googlesource.com/chromium/src/tools@ffcbc837bbb14d80d09147c2af5302ff6bd4bd69', 'src/third_party/espresso': { 'packages': [ @@ -390,11 +669,11 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/hamcrest': { + 'src/third_party/hamcrest/cipd': { 'packages': [ { 'package': 'chromium/third_party/hamcrest', - 'version': '37eccfc658fe79695d6abb6dd497463c4372032f', + 'version': 'dBioOAmFJjqAr_DY7dipbXdVfAxUQwjOBNibMPtX8lQC', }, ], 'condition': 'checkout_android', @@ -405,29 +684,29 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_toolchain/android_toolchain', - 'version': 'R_8suM8m0oHbZ1awdxGXvKEFpAOETscbfZxkkMthyk8C', + 'version': 'KXOia11cm9lVdUdPlbGLu8sCz6Y4ey_HV2s8_8qeqhgC', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/androidx': { + 'src/third_party/androidx/cipd': { 'packages': [ { 'package': 'chromium/third_party/androidx', - 'version': '2n47PFweHFzGxPWjh9RANTrGhmSDWowZ-YhkOV4j11MC', + 'version': 'cxEyTzVT7uuNifSPlUiuKlb4tSo35_YQjBbMVmgZeCwC', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_build_tools/manifest_merger': { + 'src/third_party/android_build_tools/manifest_merger/cipd': { 'packages': [ { 'package': 'chromium/third_party/android_build_tools/manifest_merger', - 'version': 'kkbYOGsVRXhtxBiXuTufY0puTnG5QAfyxvFTBHFWL08C', + 'version': 'HhXknpcPosMUDHZQD-8Ogve-HaG4HmJ4p082zKKAcnMC', }, ], 'condition': 'checkout_android', @@ -437,76 +716,53 @@ deps = { 'src/third_party/android_sdk/public': { 'packages': [ { - 'package': 'chromium/third_party/android_sdk/public/build-tools/34.0.0', - 'version': 'YK9Rzw3fDzMHVzatNN6VlyoD_81amLZpN1AbmkdOd6AC', + 'package': 'chromium/third_party/android_sdk/public/build-tools/36.0.0', + 'version': 'y3EsZLg4bxPmpW0oYsAHylywNyMnIwPS3kh1VbQLAFAC', }, { 'package': 'chromium/third_party/android_sdk/public/emulator', 'version': '9lGp8nTUCRRWGMnI_96HcKfzjnxEJKUcfvfwmA3wXNkC', }, - { - 'package': 'chromium/third_party/android_sdk/public/patcher', - 'version': 'I6FNMhrXlpB-E1lOhMlvld7xt9lBVNOO83KIluXDyA0C', - }, { 'package': 'chromium/third_party/android_sdk/public/platform-tools', - 'version': 'HWVsGs2HCKgSVv41FsOcsfJbNcB0UFiNrF6Tc4yRArYC', - }, - { - 'package': 'chromium/third_party/android_sdk/public/platforms/android-34', - 'version': 'u-bhWbTME6u-DjypTgr3ZikCyeAeU6txkR9ET6Uudc8C', - }, - { - 'package': 'chromium/third_party/android_sdk/public/platforms/android-tiramisuprivacysandbox', - 'version': 'YWMYkzyxGBgVsty0GhXL1oxbY0pGXQIgFc0Rh7ZMRPYC', + 'version': 'mjFmRj7k_XR9yj60pYbr9mG38FyEbU5oWdU56bZQ5cwC' }, { - 'package': 'chromium/third_party/android_sdk/public/sources/android-31', - 'version': '_a_BcnANjPYw5mSKlNHa7GFY8yc1kdqj2rmQgac7yUcC', + 'package': 'chromium/third_party/android_sdk/public/platforms/android-36', + 'version': '_YHemUrK49JrE7Mctdf5DDNOHu1VKBx_PTcWnZ-cbOAC', }, { 'package': 'chromium/third_party/android_sdk/public/cmdline-tools', - 'version': 'Sy00LuyBIUJdRGYKwg0zjWH8eAIUvgnnNiPkI8etaZYC', + 'version': 'gekOVsZjseS1w9BXAT3FsoW__ByGDJYS9DgqesiwKYoC', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/icu4j': { + 'src/third_party/icu4j/cipd': { 'packages': [ { 'package': 'chromium/third_party/icu4j', - 'version': 'e87e5bed2b4935913ee26a3ebd0b723ee2344354', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/objenesis': { - 'packages': [ - { - 'package': 'chromium/third_party/objenesis', - 'version': 'tknDblENYi8IaJYyD6tUahUyHYZlzJ_Y74_QZSz4DpIC', + 'version': '8dV7WRVX0tTaNNqkLEnCA_dMofr2MJXFK400E7gOFygC', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/robolectric': { + 'src/third_party/robolectric/cipd': { 'packages': [ { 'package': 'chromium/third_party/robolectric', - 'version': 'hzetqh1qFI32FOgQroZvGcGdomrgVBJ6WKRnl1KFw6EC', + 'version': 'G3VkWqTv1YWDvC6zCrL34iQREzrzdBmSL4GMboAIiAEC', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/sqlite4java': { + 'src/third_party/sqlite4java/cipd': { 'packages': [ { 'package': 'chromium/third_party/sqlite4java', @@ -517,30 +773,40 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/turbine': { + 'src/third_party/turbine/cipd': { 'packages': [ { 'package': 'chromium/third_party/turbine', - 'version': 'ZlMS4BOYyYmbU8BuBDGyW7QrkvZ_-pTkm4lH4jKjTi4C', + 'version': 'scfGptWnO9bwzbg-jr0mcnVO3NG5KQJvlAQd_JSD5QUC', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, + 'src/third_party/zstd/src': { + 'url': Var('chromium_git') + '/external/github.com/facebook/zstd.git' + '@' + 'd654fca78690fa15cceb8058ac47454d914a0e63', + 'condition': 'checkout_android', + }, + 'src/tools/luci-go': { 'packages': [ + { + 'package': 'infra/tools/luci/cas/${{platform}}', + 'version': 'git_revision:fd48b8efe637ea5f431fc371f1617b215b7ba1a2', + }, { 'package': 'infra/tools/luci/isolate/${{platform}}', - 'version': 'git_revision:fe3cfd422b1012c2c8cf00d65cdb11aa2c26cd66', + 'version': 'git_revision:fd48b8efe637ea5f431fc371f1617b215b7ba1a2', }, { 'package': 'infra/tools/luci/swarming/${{platform}}', - 'version': 'git_revision:fe3cfd422b1012c2c8cf00d65cdb11aa2c26cd66', - }, + 'version': 'git_revision:fd48b8efe637ea5f431fc371f1617b215b7ba1a2', + } ], 'dep_type': 'cipd', }, + 'src/third_party/pipewire/linux-amd64': { 'packages': [ { @@ -557,1821 +823,1260 @@ deps = { 'dep_type': 'cipd', }, - # Everything coming after this is automatically updated by the auto-roller. - # === ANDROID_DEPS Generated Code Start === - # Generated by //third_party/android_deps/fetch_all.py - 'src/third_party/android_deps/libs/android_arch_core_common': { + 'src/third_party/android_deps/autorolled/cipd': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/android_arch_core_common', - 'version': 'version:2@1.1.1.cr1', + 'package': 'chromium/third_party/android_deps/autorolled', + 'version': 'yZGuDQ9pDcy816oWVPUC2zMwLtFUfGt4W2PmIYEWTcAC', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/android_arch_core_runtime': { + # Everything coming after this is automatically updated by the auto-roller. + # === ANDROID_DEPS Generated Code Start === + # Generated by //third_party/android_deps/fetch_all.py + 'src/third_party/android_deps/cipd/libs/com_android_support_support_annotations': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/android_arch_core_runtime', - 'version': 'version:2@1.1.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_annotations', + 'version': 'version:2@28.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/android_arch_lifecycle_common': { + 'src/third_party/android_deps/cipd/libs/com_android_tools_common': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/android_arch_lifecycle_common', - 'version': 'version:2@1.1.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_android_tools_common', + 'version': 'version:2@30.2.0-beta01.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/android_arch_lifecycle_common_java8': { + 'src/third_party/android_deps/cipd/libs/com_android_tools_layoutlib_layoutlib_api': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/android_arch_lifecycle_common_java8', - 'version': 'version:2@1.1.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_android_tools_layoutlib_layoutlib_api', + 'version': 'version:2@30.2.0-beta01.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/android_arch_lifecycle_livedata': { + 'src/third_party/android_deps/cipd/libs/com_android_tools_sdk_common': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/android_arch_lifecycle_livedata', - 'version': 'version:2@1.1.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_android_tools_sdk_common', + 'version': 'version:2@30.2.0-beta01.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/android_arch_lifecycle_livedata_core': { + 'src/third_party/android_deps/cipd/libs/com_google_android_apps_common_testing_accessibility_framework_accessibility_test_framework': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/android_arch_lifecycle_livedata_core', - 'version': 'version:2@1.1.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_apps_common_testing_accessibility_framework_accessibility_test_framework', + 'version': 'version:2@4.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/android_arch_lifecycle_runtime': { + 'src/third_party/android_deps/cipd/libs/com_google_android_datatransport_transport_api': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/android_arch_lifecycle_runtime', - 'version': 'version:2@1.1.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_datatransport_transport_api', + 'version': 'version:2@4.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/android_arch_lifecycle_viewmodel': { + 'src/third_party/android_deps/cipd/libs/com_google_android_datatransport_transport_backend_cct': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/android_arch_lifecycle_viewmodel', - 'version': 'version:2@1.1.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_datatransport_transport_backend_cct', + 'version': 'version:2@4.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_animated_vector_drawable': { + 'src/third_party/android_deps/cipd/libs/com_google_android_datatransport_transport_runtime': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_animated_vector_drawable', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_datatransport_transport_runtime', + 'version': 'version:2@4.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_appcompat_v7': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_auth': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_appcompat_v7', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_auth', + 'version': 'version:2@21.3.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_asynclayoutinflater': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_auth_api_phone': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_asynclayoutinflater', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_auth_api_phone', + 'version': 'version:2@18.1.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_cardview_v7': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_auth_base': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_cardview_v7', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_auth_base', + 'version': 'version:2@18.1.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_collections': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_auth_blockstore': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_collections', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_auth_blockstore', + 'version': 'version:2@16.4.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_coordinatorlayout': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_base': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_coordinatorlayout', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_base', + 'version': 'version:2@18.5.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_cursoradapter': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_basement': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_cursoradapter', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_basement', + 'version': 'version:2@18.5.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_customview': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_cast': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_customview', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_cast', + 'version': 'version:2@22.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_design': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_cast_framework': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_design', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_cast_framework', + 'version': 'version:2@22.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_documentfile': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_clearcut': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_documentfile', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_clearcut', + 'version': 'version:2@17.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_drawerlayout': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_cloud_messaging': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_drawerlayout', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_cloud_messaging', + 'version': 'version:2@17.2.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_interpolator': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_fido': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_interpolator', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_fido', + 'version': 'version:2@21.1.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_loader': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_flags': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_loader', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_flags', + 'version': 'version:2@18.1.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_localbroadcastmanager': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_gcm': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_localbroadcastmanager', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_gcm', + 'version': 'version:2@17.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_multidex': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_identity_credentials': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_multidex', - 'version': 'version:2@1.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_support_print': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_print', - 'version': 'version:2@28.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_identity_credentials', + 'version': 'version:2@16.0.0-alpha05.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_android_support_recyclerview_v7': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_recyclerview_v7', - 'version': 'version:2@28.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_support_slidingpanelayout': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_slidingpanelayout', - 'version': 'version:2@28.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_support_support_annotations': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_annotations', - 'version': 'version:2@28.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_support_support_compat': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_compat', - 'version': 'version:2@28.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_support_support_core_ui': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_core_ui', - 'version': 'version:2@28.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_support_support_core_utils': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_core_utils', - 'version': 'version:2@28.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_support_support_fragment': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_fragment', - 'version': 'version:2@28.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_support_support_media_compat': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_media_compat', - 'version': 'version:2@28.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_support_support_v4': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_v4', - 'version': 'version:2@28.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_support_support_vector_drawable': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_vector_drawable', - 'version': 'version:2@28.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_support_swiperefreshlayout': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_swiperefreshlayout', - 'version': 'version:2@28.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_support_transition': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_transition', - 'version': 'version:2@28.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_support_versionedparcelable': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_versionedparcelable', - 'version': 'version:2@28.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_support_viewpager': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_support_viewpager', - 'version': 'version:2@28.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_tools_common': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_tools_common', - 'version': 'version:2@30.2.0-beta01.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_tools_layoutlib_layoutlib_api': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_tools_layoutlib_layoutlib_api', - 'version': 'version:2@30.2.0-beta01.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_android_tools_sdk_common': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_android_tools_sdk_common', - 'version': 'version:2@30.2.0-beta01.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_github_ben_manes_caffeine_caffeine': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_github_ben_manes_caffeine_caffeine', - 'version': 'version:2@2.8.8.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_github_kevinstern_software_and_algorithms': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_github_kevinstern_software_and_algorithms', - 'version': 'version:2@1.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_annotations': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_annotations', - 'version': 'version:2@4.1.1.4.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_apps_common_testing_accessibility_framework_accessibility_test_framework': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_apps_common_testing_accessibility_framework_accessibility_test_framework', - 'version': 'version:2@4.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_datatransport_transport_api': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_datatransport_transport_api', - 'version': 'version:2@2.2.1.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_auth': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_auth', - 'version': 'version:2@20.1.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_auth_api_phone': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_auth_api_phone', - 'version': 'version:2@18.0.1.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_auth_base': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_auth_base', - 'version': 'version:2@18.0.2.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_base': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_base', - 'version': 'version:2@18.0.1.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_basement': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_basement', - 'version': 'version:2@18.1.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_cast': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_cast', - 'version': 'version:2@17.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_cast_framework': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_cast_framework', - 'version': 'version:2@17.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_clearcut': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_clearcut', - 'version': 'version:2@17.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_cloud_messaging': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_cloud_messaging', - 'version': 'version:2@16.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_flags': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_flags', - 'version': 'version:2@17.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_gcm': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_gcm', - 'version': 'version:2@17.0.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_iid': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_iid': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_iid', 'version': 'version:2@17.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_instantapps': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_instantapps': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_instantapps', - 'version': 'version:2@18.0.1.cr1', + 'version': 'version:2@18.1.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_location': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_location': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_location', - 'version': 'version:2@19.0.1.cr1', + 'version': 'version:2@21.3.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_phenotype': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_phenotype': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_phenotype', 'version': 'version:2@17.0.0.cr1', }, ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_places_placereport': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_places_placereport', - 'version': 'version:2@17.0.0.cr1', - }, - ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_stats': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_stats': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_stats', - 'version': 'version:2@17.0.0.cr1', + 'version': 'version:2@17.1.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_tasks': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_tasks': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_tasks', - 'version': 'version:2@18.0.2.cr1', + 'version': 'version:2@18.2.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_vision': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_vision': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_vision', 'version': 'version:2@20.1.3.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_android_gms_play_services_vision_common': { + 'src/third_party/android_deps/cipd/libs/com_google_android_gms_play_services_vision_common': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_vision_common', 'version': 'version:2@19.1.3.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_android_material_material': { + 'src/third_party/android_deps/cipd/libs/com_google_android_libraries_identity_googleid_googleid': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_material_material', - 'version': 'version:2@1.7.0-alpha02.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_libraries_identity_googleid_googleid', + 'version': 'version:2@1.1.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_android_play_core_common': { + 'src/third_party/android_deps/cipd/libs/com_google_android_material_material': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_play_core_common', - 'version': 'version:2@2.0.2.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_material_material', + 'version': 'version:2@1.13.0-alpha05.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_android_play_feature_delivery': { + 'src/third_party/android_deps/cipd/libs/com_google_android_play_core_common': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_android_play_feature_delivery', - 'version': 'version:2@2.0.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_play_core_common', + 'version': 'version:2@2.0.3.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_auto_auto_common': { + 'src/third_party/android_deps/cipd/libs/com_google_android_play_feature_delivery': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_auto_auto_common', - 'version': 'version:2@1.2.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_android_play_feature_delivery', + 'version': 'version:2@2.1.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_auto_service_auto_service': { + 'src/third_party/android_deps/cipd/libs/com_google_ar_impress': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_auto_service_auto_service', - 'version': 'version:2@1.0-rc6.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_ar_impress', + 'version': 'version:2@0.0.2.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_auto_service_auto_service_annotations': { + 'src/third_party/android_deps/cipd/libs/com_google_auto_service_auto_service_annotations': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_auto_service_auto_service_annotations', 'version': 'version:2@1.0-rc6.cr1', }, ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_auto_value_auto_value_annotations': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_auto_value_auto_value_annotations', - 'version': 'version:2@1.10.1.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_code_findbugs_jsr305': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_code_findbugs_jsr305', - 'version': 'version:2@3.0.2.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_code_gson_gson': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_code_gson_gson', - 'version': 'version:2@2.9.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_dagger_dagger': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_dagger_dagger', - 'version': 'version:2@2.30.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_dagger_dagger_compiler': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_dagger_dagger_compiler', - 'version': 'version:2@2.30.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_dagger_dagger_producers': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_dagger_dagger_producers', - 'version': 'version:2@2.30.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_dagger_dagger_spi': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_dagger_dagger_spi', - 'version': 'version:2@2.30.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_errorprone_error_prone_annotation': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_annotation', - 'version': 'version:2@2.11.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_errorprone_error_prone_annotations': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_annotations', - 'version': 'version:2@2.18.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_errorprone_error_prone_check_api': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_check_api', - 'version': 'version:2@2.11.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_errorprone_error_prone_core': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_core', - 'version': 'version:2@2.11.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_errorprone_error_prone_type_annotations': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_type_annotations', - 'version': 'version:2@2.11.0.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_errorprone_javac': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_javac', - 'version': 'version:2@9+181-r4173-1.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_errorprone_javac_shaded': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_javac_shaded', - 'version': 'version:2@9-dev-r4023-3.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/com_google_firebase_firebase_annotations': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_annotations', - 'version': 'version:2@16.0.0.cr1', - }, - ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_firebase_firebase_common': { + 'src/third_party/android_deps/cipd/libs/com_google_auto_value_auto_value_annotations': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_common', - 'version': 'version:2@19.5.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_auto_value_auto_value_annotations', + 'version': 'version:2@1.11.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_firebase_firebase_components': { + 'src/third_party/android_deps/cipd/libs/com_google_code_findbugs_jsr305': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_components', - 'version': 'version:2@16.1.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_code_findbugs_jsr305', + 'version': 'version:2@3.0.2.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_firebase_firebase_encoders': { + 'src/third_party/android_deps/cipd/libs/com_google_code_gson_gson': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_encoders', - 'version': 'version:2@16.1.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_code_gson_gson', + 'version': 'version:2@2.8.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_firebase_firebase_encoders_json': { + 'src/third_party/android_deps/cipd/libs/com_google_dagger_dagger': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_encoders_json', - 'version': 'version:2@17.1.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_dagger_dagger', + 'version': 'version:2@2.52.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_firebase_firebase_iid': { + 'src/third_party/android_deps/cipd/libs/com_google_errorprone_error_prone_annotations': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_iid', - 'version': 'version:2@21.0.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_annotations', + 'version': 'version:2@2.30.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_firebase_firebase_iid_interop': { + 'src/third_party/android_deps/cipd/libs/com_google_firebase_firebase_annotations': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_iid_interop', - 'version': 'version:2@17.0.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_annotations', + 'version': 'version:2@16.2.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_firebase_firebase_installations': { + 'src/third_party/android_deps/cipd/libs/com_google_firebase_firebase_common': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_installations', - 'version': 'version:2@16.3.5.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_common', + 'version': 'version:2@21.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_firebase_firebase_installations_interop': { + 'src/third_party/android_deps/cipd/libs/com_google_firebase_firebase_common_ktx': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_installations_interop', - 'version': 'version:2@16.0.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_common_ktx', + 'version': 'version:2@21.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_firebase_firebase_measurement_connector': { + 'src/third_party/android_deps/cipd/libs/com_google_firebase_firebase_components': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_measurement_connector', + 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_components', 'version': 'version:2@18.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_firebase_firebase_messaging': { + 'src/third_party/android_deps/cipd/libs/com_google_firebase_firebase_datatransport': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_messaging', - 'version': 'version:2@21.0.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_datatransport', + 'version': 'version:2@19.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_googlejavaformat_google_java_format': { + 'src/third_party/android_deps/cipd/libs/com_google_firebase_firebase_encoders': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_googlejavaformat_google_java_format', - 'version': 'version:2@1.5.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_encoders', + 'version': 'version:2@17.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_guava_failureaccess': { + 'src/third_party/android_deps/cipd/libs/com_google_firebase_firebase_encoders_json': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_guava_failureaccess', - 'version': 'version:2@1.0.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_encoders_json', + 'version': 'version:2@18.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_guava_guava': { + 'src/third_party/android_deps/cipd/libs/com_google_firebase_firebase_encoders_proto': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_guava_guava', - 'version': 'version:2@31.1-jre.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_encoders_proto', + 'version': 'version:2@16.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_guava_guava_android': { + 'src/third_party/android_deps/cipd/libs/com_google_firebase_firebase_iid': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_guava_guava_android', - 'version': 'version:2@31.1-android.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_iid', + 'version': 'version:2@21.1.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_guava_listenablefuture': { + 'src/third_party/android_deps/cipd/libs/com_google_firebase_firebase_iid_interop': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_guava_listenablefuture', - 'version': 'version:2@1.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_iid_interop', + 'version': 'version:2@17.1.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_j2objc_j2objc_annotations': { + 'src/third_party/android_deps/cipd/libs/com_google_firebase_firebase_installations': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_j2objc_j2objc_annotations', - 'version': 'version:2@1.3.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_installations', + 'version': 'version:2@17.2.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_protobuf_protobuf_java': { + 'src/third_party/android_deps/cipd/libs/com_google_firebase_firebase_installations_interop': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_protobuf_protobuf_java', - 'version': 'version:2@3.19.2.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_installations_interop', + 'version': 'version:2@17.1.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_google_protobuf_protobuf_javalite': { + 'src/third_party/android_deps/cipd/libs/com_google_firebase_firebase_measurement_connector': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_google_protobuf_protobuf_javalite', - 'version': 'version:2@3.21.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_measurement_connector', + 'version': 'version:2@20.0.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_googlecode_java_diff_utils_diffutils': { + 'src/third_party/android_deps/cipd/libs/com_google_firebase_firebase_messaging': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_googlecode_java_diff_utils_diffutils', - 'version': 'version:2@1.3.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_firebase_firebase_messaging', + 'version': 'version:2@24.1.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_squareup_javapoet': { + 'src/third_party/android_deps/cipd/libs/com_google_guava_failureaccess': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_squareup_javapoet', - 'version': 'version:2@1.13.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_guava_failureaccess', + 'version': 'version:2@1.0.2.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_squareup_javawriter': { + 'src/third_party/android_deps/cipd/libs/com_google_guava_guava': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_squareup_javawriter', - 'version': 'version:2@2.1.1.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_guava_guava', + 'version': 'version:2@33.3.1-jre.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_squareup_okio_okio_jvm': { + 'src/third_party/android_deps/cipd/libs/com_google_guava_guava_android': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_squareup_okio_okio_jvm', - 'version': 'version:2@3.3.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_guava_guava_android', + 'version': 'version:2@33.3.1-android.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/com_squareup_wire_wire_runtime_jvm': { + 'src/third_party/android_deps/cipd/libs/com_google_j2objc_j2objc_annotations': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/com_squareup_wire_wire_runtime_jvm', - 'version': 'version:2@4.7.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_j2objc_j2objc_annotations', + 'version': 'version:2@3.0.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/io_github_java_diff_utils_java_diff_utils': { + 'src/third_party/android_deps/cipd/libs/com_google_protobuf_protobuf_javalite': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/io_github_java_diff_utils_java_diff_utils', - 'version': 'version:2@4.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_protobuf_protobuf_javalite', + 'version': 'version:2@4.28.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/io_grpc_grpc_api': { + 'src/third_party/android_deps/cipd/libs/com_google_testparameterinjector_test_parameter_injector': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/io_grpc_grpc_api', - 'version': 'version:2@1.49.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_google_testparameterinjector_test_parameter_injector', + 'version': 'version:2@1.18.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/io_grpc_grpc_binder': { + 'src/third_party/android_deps/cipd/libs/com_googlecode_java_diff_utils_diffutils': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/io_grpc_grpc_binder', - 'version': 'version:2@1.49.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_googlecode_java_diff_utils_diffutils', + 'version': 'version:2@1.3.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/io_grpc_grpc_context': { + 'src/third_party/android_deps/cipd/libs/com_squareup_javapoet': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/io_grpc_grpc_context', - 'version': 'version:2@1.49.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_squareup_javapoet', + 'version': 'version:2@1.13.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/io_grpc_grpc_core': { + 'src/third_party/android_deps/cipd/libs/com_squareup_moshi_moshi': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/io_grpc_grpc_core', - 'version': 'version:2@1.49.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_squareup_moshi_moshi', + 'version': 'version:2@1.15.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/io_grpc_grpc_protobuf_lite': { + 'src/third_party/android_deps/cipd/libs/com_squareup_moshi_moshi_adapters': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/io_grpc_grpc_protobuf_lite', - 'version': 'version:2@1.49.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_squareup_moshi_moshi_adapters', + 'version': 'version:2@1.15.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/io_grpc_grpc_stub': { + 'src/third_party/android_deps/cipd/libs/com_squareup_okio_okio_jvm': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/io_grpc_grpc_stub', - 'version': 'version:2@1.49.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_squareup_okio_okio_jvm', + 'version': 'version:2@3.9.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/io_perfmark_perfmark_api': { + 'src/third_party/android_deps/cipd/libs/com_squareup_wire_wire_runtime_jvm': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/io_perfmark_perfmark_api', - 'version': 'version:2@0.25.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/com_squareup_wire_wire_runtime_jvm', + 'version': 'version:2@5.2.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/javax_annotation_javax_annotation_api': { + 'src/third_party/android_deps/cipd/libs/jakarta_inject_jakarta_inject_api': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/javax_annotation_javax_annotation_api', - 'version': 'version:2@1.3.2.cr1', + 'package': 'chromium/third_party/android_deps/libs/jakarta_inject_jakarta_inject_api', + 'version': 'version:2@2.0.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/javax_annotation_jsr250_api': { + 'src/third_party/android_deps/cipd/libs/javax_annotation_javax_annotation_api': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/javax_annotation_jsr250_api', - 'version': 'version:2@1.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/javax_annotation_javax_annotation_api', + 'version': 'version:2@1.3.2.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/javax_inject_javax_inject': { + 'src/third_party/android_deps/cipd/libs/javax_inject_javax_inject': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/javax_inject_javax_inject', 'version': 'version:2@1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/net_bytebuddy_byte_buddy': { + 'src/third_party/android_deps/cipd/libs/net_bytebuddy_byte_buddy': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/net_bytebuddy_byte_buddy', - 'version': 'version:2@1.14.5.cr1', + 'version': 'version:2@1.14.12.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/net_bytebuddy_byte_buddy_agent': { + 'src/third_party/android_deps/cipd/libs/net_bytebuddy_byte_buddy_agent': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/net_bytebuddy_byte_buddy_agent', - 'version': 'version:2@1.14.5.cr1', + 'version': 'version:2@1.14.12.cr1', }, ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/net_ltgt_gradle_incap_incap': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/net_ltgt_gradle_incap_incap', - 'version': 'version:2@0.2.cr1', - }, - ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_bouncycastle_bcprov_jdk18on': { + 'src/third_party/android_deps/cipd/libs/org_bouncycastle_bcprov_jdk18on': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_bouncycastle_bcprov_jdk18on', - 'version': 'version:2@1.72.cr1', + 'version': 'version:2@1.78.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_ccil_cowan_tagsoup_tagsoup': { + 'src/third_party/android_deps/cipd/libs/org_ccil_cowan_tagsoup_tagsoup': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_ccil_cowan_tagsoup_tagsoup', 'version': 'version:2@1.2.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_checkerframework_checker_compat_qual': { + 'src/third_party/android_deps/cipd/libs/org_checkerframework_checker_compat_qual': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_checker_compat_qual', 'version': 'version:2@2.5.5.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_checkerframework_checker_qual': { + 'src/third_party/android_deps/cipd/libs/org_checkerframework_checker_qual': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_checker_qual', - 'version': 'version:2@3.25.0.cr1', + 'version': 'version:2@3.43.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_checkerframework_checker_util': { + 'src/third_party/android_deps/cipd/libs/org_checkerframework_checker_util': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_checker_util', 'version': 'version:2@3.25.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_checkerframework_dataflow_errorprone': { + 'src/third_party/android_deps/cipd/libs/org_codehaus_mojo_animal_sniffer_annotations': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_dataflow_errorprone', - 'version': 'version:2@3.15.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/org_codehaus_mojo_animal_sniffer_annotations', + 'version': 'version:2@1.17.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_codehaus_mojo_animal_sniffer_annotations': { + 'src/third_party/android_deps/cipd/libs/org_conscrypt_conscrypt_openjdk_uber': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_codehaus_mojo_animal_sniffer_annotations', - 'version': 'version:2@1.21.cr1', + 'package': 'chromium/third_party/android_deps/libs/org_conscrypt_conscrypt_openjdk_uber', + 'version': 'version:2@2.5.2.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_conscrypt_conscrypt_openjdk_uber': { + 'src/third_party/android_deps/cipd/libs/org_jetbrains_kotlin_kotlin_android_extensions_runtime': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_conscrypt_conscrypt_openjdk_uber', - 'version': 'version:2@2.5.2.cr1', + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_android_extensions_runtime', + 'version': 'version:2@1.9.22.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_eclipse_jgit_org_eclipse_jgit': { + 'src/third_party/android_deps/cipd/libs/org_jetbrains_kotlin_kotlin_parcelize_runtime': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_eclipse_jgit_org_eclipse_jgit', - 'version': 'version:2@4.4.1.201607150455-r.cr1', + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_parcelize_runtime', + 'version': 'version:2@1.9.22.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_hamcrest_hamcrest': { + 'src/third_party/android_deps/cipd/libs/org_jetbrains_kotlinx_atomicfu_jvm': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_hamcrest_hamcrest', - 'version': 'version:2@2.2.cr1', + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_atomicfu_jvm', + 'version': 'version:2@0.23.2.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib_jdk7': { + 'src/third_party/android_deps/cipd/libs/org_jetbrains_kotlinx_kotlinx_coroutines_android': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib_jdk7', - 'version': 'version:2@1.8.20.cr1', + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_android', + 'version': 'version:2@1.8.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib_jdk8': { + 'src/third_party/android_deps/cipd/libs/org_jetbrains_kotlinx_kotlinx_coroutines_core_jvm': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib_jdk8', - 'version': 'version:2@1.8.20.cr1', + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_core_jvm', + 'version': 'version:2@1.10.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_android': { + 'src/third_party/android_deps/cipd/libs/org_jetbrains_kotlinx_kotlinx_coroutines_guava': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_android', - 'version': 'version:2@1.6.4.cr1', + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_guava', + 'version': 'version:2@1.8.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_core_jvm': { + 'src/third_party/android_deps/cipd/libs/org_jetbrains_kotlinx_kotlinx_coroutines_play_services': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_core_jvm', - 'version': 'version:2@1.6.4.cr1', + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_play_services', + 'version': 'version:2@1.10.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_guava': { + 'src/third_party/android_deps/cipd/libs/org_jetbrains_kotlinx_kotlinx_coroutines_test_jvm': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_guava', - 'version': 'version:2@1.6.4.cr1', + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_coroutines_test_jvm', + 'version': 'version:2@1.7.3.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_metadata_jvm': { + 'src/third_party/android_deps/cipd/libs/org_jetbrains_kotlinx_kotlinx_serialization_core_jvm': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_metadata_jvm', - 'version': 'version:2@0.1.0.cr1', + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_serialization_core_jvm', + 'version': 'version:2@1.7.2.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_jsoup_jsoup': { + 'src/third_party/android_deps/cipd/libs/org_jsoup_jsoup': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_jsoup_jsoup', 'version': 'version:2@1.15.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/cipd/libs/org_jspecify_jspecify': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_jspecify_jspecify', + 'version': 'version:2@1.0.0.cr1', + }, + ], + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_mockito_mockito_android': { + 'src/third_party/android_deps/cipd/libs/org_mockito_mockito_android': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_mockito_mockito_android', - 'version': 'version:2@5.4.0.cr1', + 'version': 'version:2@5.11.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_mockito_mockito_core': { + 'src/third_party/android_deps/cipd/libs/org_mockito_mockito_core': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_mockito_mockito_core', - 'version': 'version:2@5.4.0.cr1', + 'version': 'version:2@5.11.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_mockito_mockito_subclass': { + 'src/third_party/android_deps/cipd/libs/org_mockito_mockito_subclass': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_mockito_mockito_subclass', - 'version': 'version:2@5.4.0.cr1', + 'version': 'version:2@5.11.0.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_objenesis_objenesis': { + 'src/third_party/android_deps/cipd/libs/org_objenesis_objenesis': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_objenesis_objenesis', 'version': 'version:2@3.3.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_ow2_asm_asm': { + 'src/third_party/android_deps/cipd/libs/org_ow2_asm_asm': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm', - 'version': 'version:2@9.5.cr1', + 'version': 'version:2@9.7.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_ow2_asm_asm_analysis': { + 'src/third_party/android_deps/cipd/libs/org_ow2_asm_asm_analysis': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_analysis', - 'version': 'version:2@9.5.cr1', + 'version': 'version:2@9.7.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_ow2_asm_asm_commons': { + 'src/third_party/android_deps/cipd/libs/org_ow2_asm_asm_commons': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_commons', - 'version': 'version:2@9.5.cr1', + 'version': 'version:2@9.7.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_ow2_asm_asm_tree': { + 'src/third_party/android_deps/cipd/libs/org_ow2_asm_asm_tree': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_tree', - 'version': 'version:2@9.5.cr1', + 'version': 'version:2@9.7.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_ow2_asm_asm_util': { + 'src/third_party/android_deps/cipd/libs/org_ow2_asm_asm_util': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_util', - 'version': 'version:2@9.5.cr1', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/org_pcollections_pcollections': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/org_pcollections_pcollections', - 'version': 'version:2@3.1.4.cr1', + 'version': 'version:2@9.7.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_robolectric_annotations': { + 'src/third_party/android_deps/cipd/libs/org_robolectric_annotations': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_robolectric_annotations', - 'version': 'version:2@4.10.3.cr1', + 'version': 'version:2@4.14.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_robolectric_junit': { + 'src/third_party/android_deps/cipd/libs/org_robolectric_junit': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_robolectric_junit', - 'version': 'version:2@4.10.3.cr1', + 'version': 'version:2@4.14.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_robolectric_nativeruntime': { + 'src/third_party/android_deps/cipd/libs/org_robolectric_nativeruntime': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_robolectric_nativeruntime', - 'version': 'version:2@4.10.3.cr1', + 'version': 'version:2@4.14.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_robolectric_nativeruntime_dist_compat': { + 'src/third_party/android_deps/cipd/libs/org_robolectric_nativeruntime_dist_compat': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_robolectric_nativeruntime_dist_compat', - 'version': 'version:2@1.0.1.cr1', + 'version': 'version:2@1.0.16.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_robolectric_pluginapi': { + 'src/third_party/android_deps/cipd/libs/org_robolectric_pluginapi': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_robolectric_pluginapi', - 'version': 'version:2@4.10.3.cr1', + 'version': 'version:2@4.14.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_robolectric_plugins_maven_dependency_resolver': { + 'src/third_party/android_deps/cipd/libs/org_robolectric_plugins_maven_dependency_resolver': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_robolectric_plugins_maven_dependency_resolver', - 'version': 'version:2@4.10.3.cr1', + 'version': 'version:2@4.14.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_robolectric_resources': { + 'src/third_party/android_deps/cipd/libs/org_robolectric_resources': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_robolectric_resources', - 'version': 'version:2@4.10.3.cr1', + 'version': 'version:2@4.14.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_robolectric_robolectric': { + 'src/third_party/android_deps/cipd/libs/org_robolectric_robolectric': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_robolectric_robolectric', - 'version': 'version:2@4.10.3.cr1', + 'version': 'version:2@4.14.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_robolectric_sandbox': { + 'src/third_party/android_deps/cipd/libs/org_robolectric_sandbox': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_robolectric_sandbox', - 'version': 'version:2@4.10.3.cr1', + 'version': 'version:2@4.14.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_robolectric_shadowapi': { + 'src/third_party/android_deps/cipd/libs/org_robolectric_shadowapi': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadowapi', - 'version': 'version:2@4.10.3.cr1', + 'version': 'version:2@4.14.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_robolectric_shadows_framework': { + 'src/third_party/android_deps/cipd/libs/org_robolectric_shadows_framework': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_framework', - 'version': 'version:2@4.10.3.cr1', + 'version': 'version:2@4.14.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_robolectric_shadows_playservices': { + 'src/third_party/android_deps/cipd/libs/org_robolectric_utils': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_playservices', - 'version': 'version:2@4.10.3.cr1', + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_utils', + 'version': 'version:2@4.14.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_robolectric_utils': { + 'src/third_party/android_deps/cipd/libs/org_robolectric_utils_reflector': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_robolectric_utils', - 'version': 'version:2@4.10.3.cr1', + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_utils_reflector', + 'version': 'version:2@4.14.1.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_robolectric_utils_reflector': { + 'src/third_party/android_deps/cipd/libs/org_yaml_snakeyaml': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_robolectric_utils_reflector', - 'version': 'version:2@4.10.3.cr1', + 'package': 'chromium/third_party/android_deps/libs/org_yaml_snakeyaml', + 'version': 'version:2@2.3.cr1', }, ], - 'condition': 'checkout_android', + 'condition': 'checkout_android and non_git_source', 'dep_type': 'cipd', }, @@ -2506,6 +2211,20 @@ hooks = [ 'action': ['python3', 'src/tools/clang/scripts/update.py', '--package=coverage_tools'], }, + { + # This is also supposed to support the same set of platforms as 'clang' + # above. LLVM ToT support isn't provided at the moment. + 'name': 'clangd', + 'pattern': '.', + 'condition': 'checkout_clangd', + 'action': ['vpython3', 'src/tools/clang/scripts/update.py', + '--package=clangd'], + }, + { + 'name': 'rust', + 'pattern': '.', + 'action': ['python3', 'src/tools/rust/update_rust.py'], + }, { # Update LASTCHANGE. 'name': 'lastchange', @@ -2540,60 +2259,6 @@ hooks = [ '-o', 'src/tools/clang/dsymutil/bin/dsymutil', ], }, - # Pull clang-format binaries using checked-in hashes. - { - 'name': 'clang_format_win', - 'pattern': '.', - 'condition': 'host_os == "win"', - 'action': [ 'python3', - 'src/third_party/depot_tools/download_from_google_storage.py', - '--no_resume', - '--platform=win32', - '--no_auth', - '--bucket', 'chromium-clang-format', - '-s', 'src/buildtools/win/clang-format.exe.sha1', - ], - }, - { - 'name': 'clang_format_mac_x64', - 'pattern': '.', - 'condition': 'host_os == "mac" and host_cpu == "x64"', - 'action': [ 'python3', - 'src/third_party/depot_tools/download_from_google_storage.py', - '--no_resume', - '--platform=darwin', - '--no_auth', - '--bucket', 'chromium-clang-format', - '-s', 'src/buildtools/mac/clang-format.x64.sha1', - '-o', 'src/buildtools/mac/clang-format', - ], - }, - { - 'name': 'clang_format_mac_arm64', - 'pattern': '.', - 'condition': 'host_os == "mac" and host_cpu == "arm64"', - 'action': [ 'python3', - 'src/third_party/depot_tools/download_from_google_storage.py', - '--no_resume', - '--no_auth', - '--bucket', 'chromium-clang-format', - '-s', 'src/buildtools/mac/clang-format.arm64.sha1', - '-o', 'src/buildtools/mac/clang-format', - ], - }, - { - 'name': 'clang_format_linux', - 'pattern': '.', - 'condition': 'host_os == "linux"', - 'action': [ 'python3', - 'src/third_party/depot_tools/download_from_google_storage.py', - '--no_resume', - '--platform=linux*', - '--no_auth', - '--bucket', 'chromium-clang-format', - '-s', 'src/buildtools/linux64/clang-format.sha1', - ], - }, # Pull rc binaries using checked-in hashes. { 'name': 'rc_win', @@ -2631,41 +2296,6 @@ hooks = [ '-s', 'src/build/toolchain/win/rc/linux64/rc.sha1', ], }, - { - 'name': 'test_fonts', - 'pattern': '.', - 'action': [ 'download_from_google_storage', - '--no_resume', - '--extract', - '--no_auth', - '--bucket', 'chromium-fonts', - '-s', 'src/third_party/test_fonts/test_fonts.tar.gz.sha1', - ], - }, - { - 'name': 'msan_chained_origins_focal', - 'pattern': '.', - 'condition': 'checkout_instrumented_libraries', - 'action': [ 'python3', - 'src/third_party/depot_tools/download_from_google_storage.py', - '--no_resume', - '--no_auth', - '--bucket', 'chromium-instrumented-libraries', - '-s', 'src/third_party/instrumented_libraries/binaries/msan-chained-origins-focal.tgz.sha1', - ], - }, - { - 'name': 'msan_no_origins_focal', - 'pattern': '.', - 'condition': 'checkout_instrumented_libraries', - 'action': [ 'python3', - 'src/third_party/depot_tools/download_from_google_storage.py', - '--no_resume', - '--no_auth', - '--bucket', 'chromium-instrumented-libraries', - '-s', 'src/third_party/instrumented_libraries/binaries/msan-no-origins-focal.tgz.sha1', - ], - }, { # Download test resources, i.e. video and audio files from Google Storage. 'pattern': '.', @@ -2699,11 +2329,11 @@ hooks = [ }, # Download remote exec cfg files { - 'name': 'fetch_reclient_cfgs', + 'name': 'configure_reclient_cfgs', 'pattern': '.', 'condition': 'download_remoteexec_cfg', 'action': ['python3', - 'src/buildtools/reclient_cfgs/fetch_reclient_cfgs.py', + 'src/buildtools/reclient_cfgs/configure_reclient_cfgs.py', '--rbe_instance', Var('rbe_instance'), '--reproxy_cfg_template', @@ -2711,9 +2341,33 @@ hooks = [ '--quiet', ], }, + # Configure Siso for developer builds. + { + 'name': 'configure_siso', + 'pattern': '.', + 'action': ['python3', + 'src/build/config/siso/configure_siso.py', + '--rbe_instance', + Var('rbe_instance'), + ], + }, + { + # Ensure we remove any file from disk that is no longer needed (e.g. after + # hooks to native GCS deps migration). + 'name': 'remove_stale_files', + 'pattern': '.', + 'action': [ + 'python3', + 'src/tools/remove_stale_files.py', + 'src/third_party/test_fonts/test_fonts.tar.gz', # Remove after 20240901 + ], + }, ] -recursedeps = [] +recursedeps = [ + 'src/buildtools', + 'src/third_party/instrumented_libs', +] # Define rules for which include paths are allowed in our source. include_rules = [ @@ -2739,25 +2393,29 @@ include_rules = [ "+absl/algorithm/container.h", "+absl/base/attributes.h", "+absl/base/config.h", - "+absl/base/const_init.h", + "+absl/base/nullability.h", "+absl/base/macros.h", "+absl/cleanup/cleanup.h", - "+absl/container/inlined_vector.h", + "+absl/container", + "-absl/container/fixed_array.h", "+absl/functional/any_invocable.h", "+absl/functional/bind_front.h", "+absl/memory/memory.h", - "+absl/meta/type_traits.h", "+absl/numeric/bits.h", "+absl/strings/ascii.h", "+absl/strings/escaping.h", "+absl/strings/match.h", + "+absl/strings/str_cat.h", # note - allowed for single argument version only "+absl/strings/str_replace.h", "+absl/strings/string_view.h", - "+absl/types/optional.h", - "+absl/types/variant.h", # Abseil flags are allowed in tests and tools. "+absl/flags", + + # Perfetto should be used through rtc_base/trace_event.h + '-third_party/perfetto', + '-perfetto', + '-protos/perfetto', ] specific_include_rules = { diff --git a/OWNERS_INFRA b/OWNERS_INFRA index 7172570152..c7d672b8ee 100644 --- a/OWNERS_INFRA +++ b/OWNERS_INFRA @@ -10,6 +10,8 @@ per-file .vpython3=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org per-file AUTHORS=* per-file DEPS=* per-file pylintrc=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org +per-file .rustfmt.toml=boivie@webrtc.org,mbonadei@webrtc.org,jleconte@webrtc.org +per-file pylintrc_old_style=mbonadei@webrtc.org,jansson@webrtc.org,jleconte@webrtc.org per-file WATCHLISTS=* per-file native-api.md=mbonadei@webrtc.org per-file ....lua=titovartem@webrtc.org diff --git a/PRESUBMIT.py b/PRESUBMIT.py index 4fbee0e32d..96fa8abd9d 100755 --- a/PRESUBMIT.py +++ b/PRESUBMIT.py @@ -47,6 +47,15 @@ 'voice_engine', ] +PYLINT_OLD_STYLE = [ + "PRESUBMIT.py", + "tools_webrtc/autoroller/roll_deps.py", + "tools_webrtc/android/build_aar.py", + "tools_webrtc/ios/build_ios_libs.py", + "tools_webrtc/mb/mb.py", + "tools_webrtc/mb/mb_unittest.py", +] + # These filters will always be removed, even if the caller specifies a filter # set, as they are problematic or broken in some way. # @@ -54,12 +63,15 @@ # - build/c++11 : Rvalue ref checks are unreliable (false positives), # include file and feature blocklists are # google3-specific. +# - readability/todo : WebRTC puts bug links, not usernames, in TODOs. +# The new TODO style also doesn't match with this check. # - runtime/references : Mutable references are not banned by the Google # C++ style guide anymore (starting from May 2020). # - whitespace/operators: Same as above (doesn't seem sufficient to eliminate # all move-related errors). DISABLED_LINT_FILTERS = [ '-build/c++11', + '-readability/todo', '-runtime/references', '-whitespace/operators', ] @@ -121,50 +133,43 @@ FILE_PATH_RE = re.compile(r'"(?P(\w|\/)+)(?P\.\w+)"') -def FindSrcDirPath(starting_dir): - """Returns the abs path to the src/ dir of the project.""" - src_dir = starting_dir - while os.path.basename(src_dir) != 'src': - src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) - return src_dir - - @contextmanager def _AddToPath(*paths): - original_sys_path = sys.path - sys.path.extend(paths) - try: - yield - finally: - # Restore sys.path to what it was before. - sys.path = original_sys_path + original_sys_path = sys.path + sys.path.extend(paths) + try: + yield + finally: + # Restore sys.path to what it was before. + sys.path = original_sys_path def VerifyNativeApiHeadersListIsValid(input_api, output_api): - """Ensures the list of native API header directories is up to date.""" - non_existing_paths = [] - native_api_full_paths = [ - input_api.os_path.join(input_api.PresubmitLocalPath(), *path.split('/')) - for path in API_DIRS - ] - for path in native_api_full_paths: - if not os.path.isdir(path): - non_existing_paths.append(path) - if non_existing_paths: - return [ - output_api.PresubmitError( - 'Directories to native API headers have changed which has made ' - 'the list in PRESUBMIT.py outdated.\nPlease update it to the ' - 'current location of our native APIs.', non_existing_paths) + """Ensures the list of native API header directories is up to date.""" + non_existing_paths = [] + native_api_full_paths = [ + input_api.os_path.join(input_api.PresubmitLocalPath(), + *path.split('/')) for path in API_DIRS ] - return [] + for path in native_api_full_paths: + if not os.path.isdir(path): + non_existing_paths.append(path) + if non_existing_paths: + return [ + output_api.PresubmitError( + 'Directories to native API headers have changed which has made ' + 'the list in PRESUBMIT.py outdated.\nPlease update it to the ' + 'current location of our native APIs.', non_existing_paths) + ] + return [] API_CHANGE_MSG = """ You seem to be changing native API header files. Please make sure that you: 1. Make compatible changes that don't break existing clients. Usually this is done by keeping the existing method signatures unchanged. - 2. Mark the old stuff as deprecated (use the ABSL_DEPRECATED macro). + 2. Mark the old stuff as deprecated (use the [[deprecated]] attribute or + the ABSL_DEPRECATE_AND_INLINE macro). 3. Create a timeline and plan for when the deprecated stuff will be removed. (The amount of time we give users to change their code should be informed by how much work it is for them. If they just @@ -181,592 +186,579 @@ def VerifyNativeApiHeadersListIsValid(input_api, output_api): def CheckNativeApiHeaderChanges(input_api, output_api): - """Checks to remind proper changing of native APIs.""" - files = [] - source_file_filter = lambda x: input_api.FilterSourceFile( - x, files_to_check=[r'.+\.(gn|gni|h)$']) - for f in input_api.AffectedSourceFiles(source_file_filter): - for path in API_DIRS: - dn = os.path.dirname(f.LocalPath()) - if path == 'api': - # Special case: Subdirectories included. - if dn == 'api' or dn.startswith('api/'): - files.append(f.LocalPath()) - else: - # Normal case: Subdirectories not included. - if dn == path: - files.append(f.LocalPath()) - - if files: - return [output_api.PresubmitNotifyResult(API_CHANGE_MSG, files)] - return [] + """Checks to remind proper changing of native APIs.""" + files = set() + source_file_filter = lambda x: input_api.FilterSourceFile( + x, files_to_check=[r'.+\.(gn|gni|h)$']) + for f in input_api.AffectedSourceFiles(source_file_filter): + for path in API_DIRS: + dn = os.path.dirname(f.LocalPath()) + if path == 'api': + # Special case: Subdirectories included. + if dn == 'api' or dn.startswith('api/'): + files.add(f.LocalPath()) + else: + # Normal case: Subdirectories not included. + if dn == path: + files.add(f.LocalPath()) + + if files: + return [output_api.PresubmitNotifyResult(API_CHANGE_MSG, list(files))] + return [] def CheckNoIOStreamInHeaders(input_api, output_api, source_file_filter): - """Checks to make sure no .h files include .""" - files = [] - pattern = input_api.re.compile(r'^#include\s*', - input_api.re.MULTILINE) - file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter( - x)) - for f in input_api.AffectedSourceFiles(file_filter): - if not f.LocalPath().endswith('.h'): - continue - contents = input_api.ReadFile(f) - if pattern.search(contents): - files.append(f) - - if len(files) > 0: - return [ - output_api.PresubmitError( - 'Do not #include in header files, since it inserts ' - 'static initialization into every file including the header. ' - 'Instead, #include . See http://crbug.com/94794', files) - ] - return [] + """Checks to make sure no .h files include .""" + files = set() + pattern = input_api.re.compile(r'^#include\s*', + input_api.re.MULTILINE) + file_filter = lambda x: (input_api.FilterSourceFile(x) and + source_file_filter(x)) + for f in input_api.AffectedSourceFiles(file_filter): + if not f.LocalPath().endswith('.h'): + continue + contents = input_api.ReadFile(f) + if pattern.search(contents): + files.add(f) + + if len(files) > 0: + return [ + output_api.PresubmitError( + 'Do not #include in header files, since it inserts ' + 'static initialization into every file including the header. ' + 'Instead, #include . See http://crbug.com/94794', + list(files)) + ] + return [] def CheckNoPragmaOnce(input_api, output_api, source_file_filter): - """Make sure that banned functions are not used.""" - files = [] - pattern = input_api.re.compile(r'^#pragma\s+once', input_api.re.MULTILINE) - file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter( - x)) - for f in input_api.AffectedSourceFiles(file_filter): - if not f.LocalPath().endswith('.h'): - continue - contents = input_api.ReadFile(f) - if pattern.search(contents): - files.append(f) - - if files: - return [ - output_api.PresubmitError( - 'Do not use #pragma once in header files.\n' - 'See http://www.chromium.org/developers/coding-style' - '#TOC-File-headers', files) - ] - return [] + """Make sure that banned functions are not used.""" + files = set() + pattern = input_api.re.compile(r'^#pragma\s+once', input_api.re.MULTILINE) + file_filter = lambda x: (input_api.FilterSourceFile(x) and + source_file_filter(x)) + for f in input_api.AffectedSourceFiles(file_filter): + if not f.LocalPath().endswith('.h'): + continue + contents = input_api.ReadFile(f) + if pattern.search(contents): + files.add(f) + + if files: + return [ + output_api.PresubmitError( + 'Do not use #pragma once in header files.\n' + 'See http://www.chromium.org/developers/coding-style' + '#TOC-File-headers', list(files)) + ] + return [] def CheckNoFRIEND_TEST(# pylint: disable=invalid-name input_api, output_api, source_file_filter): - """Make sure that gtest's FRIEND_TEST() macro is not used, the + """Make sure that gtest's FRIEND_TEST() macro is not used, the FRIEND_TEST_ALL_PREFIXES() macro from testsupport/gtest_prod_util.h should be used instead since that allows for FLAKY_, FAILS_ and DISABLED_ prefixes.""" - problems = [] + problems = [] - file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) and - source_file_filter(f)) - for f in input_api.AffectedFiles(file_filter=file_filter): - for line_num, line in f.ChangedContents(): - if 'FRIEND_TEST(' in line: - problems.append(' %s:%d' % (f.LocalPath(), line_num)) + file_filter = lambda f: (f.LocalPath().endswith( + ('.cc', '.h')) and source_file_filter(f)) + for f in input_api.AffectedFiles(file_filter=file_filter): + for line_num, line in f.ChangedContents(): + if 'FRIEND_TEST(' in line: + problems.append(' %s:%d' % (f.LocalPath(), line_num)) - if not problems: - return [] - return [ - output_api.PresubmitPromptWarning( - 'WebRTC\'s code should not use gtest\'s FRIEND_TEST() macro. ' - 'Include testsupport/gtest_prod_util.h and use ' - 'FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems)) - ] + if not problems: + return [] + return [ + output_api.PresubmitPromptWarning( + 'WebRTC\'s code should not use gtest\'s FRIEND_TEST() macro. ' + 'Include testsupport/gtest_prod_util.h and use ' + 'FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems)) + ] def IsLintDisabled(disabled_paths, file_path): - """ Checks if a file is disabled for lint check.""" - for path in disabled_paths: - if file_path == path or os.path.dirname(file_path).startswith(path): - return True - return False + """ Checks if a file is disabled for lint check.""" + for path in disabled_paths: + if file_path == path or os.path.dirname(file_path).startswith(path): + return True + return False def CheckApprovedFilesLintClean(input_api, output_api, source_file_filter=None): - """Checks that all new or non-exempt .cc and .h files pass cpplint.py. + """Checks that all new or non-exempt .cc and .h files pass cpplint.py. This check is based on CheckChangeLintsClean in depot_tools/presubmit_canned_checks.py but has less filters and only checks added files.""" - result = [] - - # Initialize cpplint. - import cpplint - # Access to a protected member _XX of a client class - # pylint: disable=W0212 - cpplint._cpplint_state.ResetErrorCounts() - - lint_filters = cpplint._Filters() - lint_filters.extend(DISABLED_LINT_FILTERS) - cpplint._SetFilters(','.join(lint_filters)) - - # Create a platform independent exempt list for cpplint. - disabled_paths = [ - input_api.os_path.join(*path.split('/')) for path in CPPLINT_EXCEPTIONS - ] - - # Use the strictest verbosity level for cpplint.py (level 1) which is the - # default when running cpplint.py from command line. To make it possible to - # work with not-yet-converted code, we're only applying it to new (or - # moved/renamed) files and files not listed in CPPLINT_EXCEPTIONS. - verbosity_level = 1 - files = [] - for f in input_api.AffectedSourceFiles(source_file_filter): - # Note that moved/renamed files also count as added. - if f.Action() == 'A' or not IsLintDisabled(disabled_paths, f.LocalPath()): - files.append(f.AbsoluteLocalPath()) - - for file_name in files: - cpplint.ProcessFile(file_name, verbosity_level) - - if cpplint._cpplint_state.error_count > 0: - if input_api.is_committing: - res_type = output_api.PresubmitError - else: - res_type = output_api.PresubmitPromptWarning - result = [res_type('Changelist failed cpplint.py check.')] + result = [] - return result + # Initialize cpplint. + import cpplint + # Access to a protected member _XX of a client class + # pylint: disable=W0212 + cpplint._cpplint_state.ResetErrorCounts() + lint_filters = cpplint._Filters() + lint_filters.extend(DISABLED_LINT_FILTERS) + cpplint._SetFilters(','.join(lint_filters)) -def CheckNoSourcesAbove(input_api, gn_files, output_api): - # Disallow referencing source files with paths above the GN file location. - source_pattern = input_api.re.compile(r' +sources \+?= \[(.*?)\]', - re.MULTILINE | re.DOTALL) - file_pattern = input_api.re.compile(r'"((\.\./.*?)|(//.*?))"') - violating_gn_files = set() - violating_source_entries = [] - for gn_file in gn_files: - contents = input_api.ReadFile(gn_file) - for source_block_match in source_pattern.finditer(contents): - # Find all source list entries starting with ../ in the source block - # (exclude overrides entries). - for file_list_match in file_pattern.finditer(source_block_match.group(1)): - source_file = file_list_match.group(1) - if 'overrides/' not in source_file: - violating_source_entries.append(source_file) - violating_gn_files.add(gn_file) - if violating_gn_files: - return [ - output_api.PresubmitError( - 'Referencing source files above the directory of the GN file ' - 'is not allowed. Please introduce new GN targets in the proper ' - 'location instead.\n' - 'Invalid source entries:\n' - '%s\n' - 'Violating GN files:' % '\n'.join(violating_source_entries), - items=violating_gn_files) + # Create a platform independent exempt list for cpplint. + disabled_paths = [ + input_api.os_path.join(*path.split('/')) for path in CPPLINT_EXCEPTIONS ] - return [] - - -def CheckAbseilDependencies(input_api, gn_files, output_api): - """Checks that Abseil dependencies are declared in `absl_deps`.""" - absl_re = re.compile(r'third_party/abseil-cpp', re.MULTILINE | re.DOTALL) - target_types_to_check = [ - 'rtc_library', - 'rtc_source_set', - 'rtc_static_library', - 'webrtc_fuzzer_test', - ] - error_msg = ('Abseil dependencies in target "%s" (file: %s) ' - 'should be moved to the "absl_deps" parameter.') - errors = [] - - # pylint: disable=too-many-nested-blocks - for gn_file in gn_files: - gn_file_content = input_api.ReadFile(gn_file) - for target_match in TARGET_RE.finditer(gn_file_content): - target_type = target_match.group('target_type') - target_name = target_match.group('target_name') - target_contents = target_match.group('target_contents') - if target_type in target_types_to_check: - for deps_match in DEPS_RE.finditer(target_contents): - deps = deps_match.group('deps').splitlines() - for dep in deps: - if re.search(absl_re, dep): - errors.append( - output_api.PresubmitError(error_msg % - (target_name, gn_file.LocalPath()))) - break # no need to warn more than once per target - return errors + + # Use the strictest verbosity level for cpplint.py (level 1) which is the + # default when running cpplint.py from command line. To make it possible to + # work with not-yet-converted code, we're only applying it to new (or + # moved/renamed) files and files not listed in CPPLINT_EXCEPTIONS. + verbosity_level = 1 + files = set() + for f in input_api.AffectedSourceFiles(source_file_filter): + # Note that moved/renamed files also count as added. + if f.Action() == 'A' or not IsLintDisabled(disabled_paths, + f.LocalPath()): + files.add(f.AbsoluteLocalPath()) + + for file_name in files: + cpplint.ProcessFile(file_name, verbosity_level) + + if cpplint._cpplint_state.error_count > 0: + if input_api.is_committing: + res_type = output_api.PresubmitError + else: + res_type = output_api.PresubmitPromptWarning + result = [res_type('Changelist failed cpplint.py check.')] + + return result + + +def CheckNoSourcesAbove(input_api, gn_files, output_api): + # Disallow referencing source files with paths above the GN file location. + source_pattern = input_api.re.compile(r' +sources \+?= \[(.*?)\]', + re.MULTILINE | re.DOTALL) + file_pattern = input_api.re.compile(r'"((\.\./.*?)|(//.*?))"') + violating_gn_files = set() + violating_source_entries = set() + for gn_file in gn_files: + contents = input_api.ReadFile(gn_file) + for source_block_match in source_pattern.finditer(contents): + # Find all source list entries starting with ../ in the source block + # (exclude overrides entries). + for file_list_match in file_pattern.finditer( + source_block_match.group(1)): + source_file = file_list_match.group(1) + if 'overrides/' not in source_file: + violating_source_entries.add(source_file) + violating_gn_files.add(gn_file) + if violating_gn_files: + return [ + output_api.PresubmitError( + 'Referencing source files above the directory of the GN file ' + 'is not allowed. Please introduce new GN targets in the proper ' + 'location instead.\n' + 'Invalid source entries:\n' + '%s\n' + 'Violating GN files:' % '\n'.join(violating_source_entries), + items=list(violating_gn_files)) + ] + return [] def CheckNoMixingSources(input_api, gn_files, output_api): - """Disallow mixing C, C++ and Obj-C/Obj-C++ in the same target. + """Disallow mixing C, C++ and Obj-C/Obj-C++ in the same target. See bugs.webrtc.org/7743 for more context. """ - - def _MoreThanOneSourceUsed(*sources_lists): - sources_used = 0 - for source_list in sources_lists: - if len(source_list) > 0: - sources_used += 1 - return sources_used > 1 - - errors = defaultdict(lambda: []) - for gn_file in gn_files: - gn_file_content = input_api.ReadFile(gn_file) - for target_match in TARGET_RE.finditer(gn_file_content): - # list_of_sources is a list of tuples of the form - # (c_files, cc_files, objc_files) that keeps track of all the - # sources defined in a target. A GN target can have more that - # on definition of sources (since it supports if/else statements). - # E.g.: - # rtc_static_library("foo") { - # if (is_win) { - # sources = [ "foo.cc" ] - # } else { - # sources = [ "foo.mm" ] - # } - # } - # This is allowed and the presubmit check should support this case. - list_of_sources = [] - c_files = [] - cc_files = [] - objc_files = [] - target_name = target_match.group('target_name') - target_contents = target_match.group('target_contents') - for sources_match in SOURCES_RE.finditer(target_contents): - if '+=' not in sources_match.group(0): - if c_files or cc_files or objc_files: + def _MoreThanOneSourceUsed(*sources_lists): + sources_used = 0 + for source_list in sources_lists: + if len(source_list) > 0: + sources_used += 1 + return sources_used > 1 + + errors = defaultdict(lambda: []) + for gn_file in gn_files: + gn_file_content = input_api.ReadFile(gn_file) + for target_match in TARGET_RE.finditer(gn_file_content): + # list_of_sources is a list of tuples of the form + # (c_files, cc_files, objc_files) that keeps track of all the + # sources defined in a target. A GN target can have more that + # on definition of sources (since it supports if/else statements). + # E.g.: + # rtc_static_library("foo") { + # if (is_win) { + # sources = [ "foo.cc" ] + # } else { + # sources = [ "foo.mm" ] + # } + # } + # This is allowed and the presubmit check should support this case. + list_of_sources = [] + c_files = [] + cc_files = [] + objc_files = [] + target_name = target_match.group('target_name') + target_contents = target_match.group('target_contents') + for sources_match in SOURCES_RE.finditer(target_contents): + if '+=' not in sources_match.group(0): + if c_files or cc_files or objc_files: + list_of_sources.append((c_files, cc_files, objc_files)) + c_files = [] + cc_files = [] + objc_files = [] + for file_match in FILE_PATH_RE.finditer( + sources_match.group(1)): + file_path = file_match.group('file_path') + extension = file_match.group('extension') + if extension == '.c': + c_files.append(file_path + extension) + if extension == '.cc': + cc_files.append(file_path + extension) + if extension in ['.m', '.mm']: + objc_files.append(file_path + extension) list_of_sources.append((c_files, cc_files, objc_files)) - c_files = [] - cc_files = [] - objc_files = [] - for file_match in FILE_PATH_RE.finditer(sources_match.group(1)): - file_path = file_match.group('file_path') - extension = file_match.group('extension') - if extension == '.c': - c_files.append(file_path + extension) - if extension == '.cc': - cc_files.append(file_path + extension) - if extension in ['.m', '.mm']: - objc_files.append(file_path + extension) - list_of_sources.append((c_files, cc_files, objc_files)) - for c_files_list, cc_files_list, objc_files_list in list_of_sources: - if _MoreThanOneSourceUsed(c_files_list, cc_files_list, objc_files_list): - all_sources = sorted(c_files_list + cc_files_list + objc_files_list) - errors[gn_file.LocalPath()].append((target_name, all_sources)) - if errors: - return [ - output_api.PresubmitError( - 'GN targets cannot mix .c, .cc and .m (or .mm) source files.\n' - 'Please create a separate target for each collection of ' - 'sources.\n' - 'Mixed sources: \n' - '%s\n' - 'Violating GN files:\n%s\n' % - (json.dumps(errors, indent=2), '\n'.join(list(errors.keys())))) - ] - return [] + for c_files_list, cc_files_list, objc_files_list in list_of_sources: + if _MoreThanOneSourceUsed(c_files_list, cc_files_list, + objc_files_list): + all_sources = sorted(c_files_list + cc_files_list + + objc_files_list) + errors[gn_file.LocalPath()].append( + (target_name, all_sources)) + if errors: + return [ + output_api.PresubmitError( + 'GN targets cannot mix .c, .cc and .m (or .mm) source files.\n' + 'Please create a separate target for each collection of ' + 'sources.\n' + 'Mixed sources: \n' + '%s\n' + 'Violating GN files:\n%s\n' % + (json.dumps(errors, indent=2), '\n'.join(list(errors.keys())))) + ] + return [] def CheckNoPackageBoundaryViolations(input_api, gn_files, output_api): - cwd = input_api.PresubmitLocalPath() - with _AddToPath( - input_api.os_path.join(cwd, 'tools_webrtc', 'presubmit_checks_lib')): - from check_package_boundaries import CheckPackageBoundaries - build_files = [os.path.join(cwd, gn_file.LocalPath()) for gn_file in gn_files] - errors = CheckPackageBoundaries(cwd, build_files)[:5] - if errors: - return [ - output_api.PresubmitError( - 'There are package boundary violations in the following GN ' - 'files:', - long_text='\n\n'.join(str(err) for err in errors)) + cwd = input_api.PresubmitLocalPath() + with _AddToPath( + input_api.os_path.join(cwd, 'tools_webrtc', + 'presubmit_checks_lib')): + from check_package_boundaries import CheckPackageBoundaries + build_files = [ + os.path.join(cwd, gn_file.LocalPath()) for gn_file in gn_files ] - return [] + errors = CheckPackageBoundaries(cwd, build_files)[:5] + if errors: + return [ + output_api.PresubmitError( + 'There are package boundary violations in the following GN ' + 'files:', + long_text='\n\n'.join(str(err) for err in errors)) + ] + return [] def _ReportFileAndLine(filename, line_num): - """Default error formatter for _FindNewViolationsOfRule.""" - return '%s (line %s)' % (filename, line_num) + """Default error formatter for _FindNewViolationsOfRule.""" + return '%s (line %s)' % (filename, line_num) def CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api, output_api, error_formatter=_ReportFileAndLine): - """Ensure warning suppression flags are not added without a reason.""" - msg = ('Usage of //build/config/clang:extra_warnings is discouraged ' - 'in WebRTC.\n' - 'If you are not adding this code (e.g. you are just moving ' - 'existing code) or you want to add an exception,\n' - 'you can add a comment on the line that causes the problem:\n\n' - '"-Wno-odr" # no-presubmit-check TODO(bugs.webrtc.org/BUG_ID)\n' - '\n' - 'Affected files:\n') - errors = [] # 2-element tuples with (file, line number) - clang_warn_re = input_api.re.compile(r'//build/config/clang:extra_warnings') - # pylint: disable-next=fixme - no_presubmit_re = input_api.re.compile( - r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)') - for f in gn_files: - for line_num, line in f.ChangedContents(): - if clang_warn_re.search(line) and not no_presubmit_re.search(line): - errors.append(error_formatter(f.LocalPath(), line_num)) - if errors: - return [output_api.PresubmitError(msg, errors)] - return [] + """Ensure warning suppression flags are not added without a reason.""" + msg = ('Usage of //build/config/clang:extra_warnings is discouraged ' + 'in WebRTC.\n' + 'If you are not adding this code (e.g. you are just moving ' + 'existing code) or you want to add an exception,\n' + 'you can add a comment on the line that causes the problem:\n\n' + '"-Wno-odr" # no-presubmit-check TODO(bugs.webrtc.org/BUG_ID)\n' + '\n' + 'Affected files:\n') + errors = [] # 2-element tuples with (file, line number) + clang_warn_re = input_api.re.compile( + r'//build/config/clang:extra_warnings') + # pylint: disable-next=fixme + no_presubmit_re = input_api.re.compile( + r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)') + for f in gn_files: + for line_num, line in f.ChangedContents(): + if clang_warn_re.search(line) and not no_presubmit_re.search(line): + errors.append(error_formatter(f.LocalPath(), line_num)) + if errors: + return [output_api.PresubmitError(msg, errors)] + return [] def CheckNoTestCaseUsageIsAdded(input_api, output_api, source_file_filter, error_formatter=_ReportFileAndLine): - error_msg = ('Usage of legacy GoogleTest API detected!\nPlease use the ' - 'new API: https://github.com/google/googletest/blob/master/' - 'googletest/docs/primer.md#beware-of-the-nomenclature.\n' - 'Affected files:\n') - errors = [] # 2-element tuples with (file, line number) - test_case_re = input_api.re.compile(r'TEST_CASE') - file_filter = lambda f: (source_file_filter(f) and f.LocalPath().endswith( - '.cc')) - for f in input_api.AffectedSourceFiles(file_filter): - for line_num, line in f.ChangedContents(): - if test_case_re.search(line): - errors.append(error_formatter(f.LocalPath(), line_num)) - if errors: - return [output_api.PresubmitError(error_msg, errors)] - return [] + error_msg = ('Usage of legacy GoogleTest API detected!\nPlease use the ' + 'new API: https://github.com/google/googletest/blob/master/' + 'googletest/docs/primer.md#beware-of-the-nomenclature.\n' + 'Affected files:\n') + errors = [] # 2-element tuples with (file, line number) + test_case_re = input_api.re.compile(r'TEST_CASE') + file_filter = lambda f: (source_file_filter(f) and f.LocalPath().endswith( + '.cc')) + for f in input_api.AffectedSourceFiles(file_filter): + for line_num, line in f.ChangedContents(): + if test_case_re.search(line): + errors.append(error_formatter(f.LocalPath(), line_num)) + if errors: + return [output_api.PresubmitError(error_msg, errors)] + return [] def CheckNoStreamUsageIsAdded(input_api, output_api, source_file_filter, error_formatter=_ReportFileAndLine): - """Make sure that no more dependencies on stringstream are added.""" - error_msg = ('Usage of , and in WebRTC is ' - 'deprecated.\n' - 'This includes the following types:\n' - 'std::istringstream, std::ostringstream, std::wistringstream, ' - 'std::wostringstream,\n' - 'std::wstringstream, std::ostream, std::wostream, std::istream,' - 'std::wistream,\n' - 'std::iostream, std::wiostream.\n' - 'If you are not adding this code (e.g. you are just moving ' - 'existing code),\n' - 'you can add a comment on the line that causes the problem:\n\n' - '#include // no-presubmit-check TODO(webrtc:8982)\n' - 'std::ostream& F() { // no-presubmit-check TODO(webrtc:8982)\n' - '\n' - 'If you are adding new code, consider using ' - 'rtc::SimpleStringBuilder\n' - '(in rtc_base/strings/string_builder.h).\n' - 'Affected files:\n') - errors = [] # 2-element tuples with (file, line number) - include_re = input_api.re.compile(r'#include <(i|o|s)stream>') - usage_re = input_api.re.compile(r'std::(w|i|o|io|wi|wo|wio)(string)*stream') - no_presubmit_re = input_api.re.compile( - r'// no-presubmit-check TODO\(webrtc:8982\)') - file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter( - x)) - - def _IsException(file_path): - is_test = any( - file_path.endswith(x) - for x in ['_test.cc', '_tests.cc', '_unittest.cc', '_unittests.cc']) - return (file_path.startswith('examples') or file_path.startswith('test') - or is_test) - - for f in input_api.AffectedSourceFiles(file_filter): - # Usage of stringstream is allowed under examples/ and in tests. - if f.LocalPath() == 'PRESUBMIT.py' or _IsException(f.LocalPath()): - continue - for line_num, line in f.ChangedContents(): - if ((include_re.search(line) or usage_re.search(line)) - and not no_presubmit_re.search(line)): - errors.append(error_formatter(f.LocalPath(), line_num)) - if errors: - return [output_api.PresubmitError(error_msg, errors)] - return [] + """Make sure that no more dependencies on stringstream are added.""" + error_msg = ( + 'Usage of , and in WebRTC is ' + 'deprecated.\n' + 'This includes the following types:\n' + 'std::istringstream, std::ostringstream, std::wistringstream, ' + 'std::wostringstream,\n' + 'std::wstringstream, std::ostream, std::wostream, std::istream,' + 'std::wistream,\n' + 'std::iostream, std::wiostream.\n' + 'If you are not adding this code (e.g. you are just moving ' + 'existing code),\n' + 'you can add a comment on the line that causes the problem:\n\n' + '#include // no-presubmit-check TODO(webrtc:8982)\n' + 'std::ostream& F() { // no-presubmit-check TODO(webrtc:8982)\n' + '\n' + 'If you are adding new code, consider using ' + 'webrtc::SimpleStringBuilder\n' + '(in rtc_base/strings/string_builder.h).\n' + 'Affected files:\n') + errors = [] # 2-element tuples with (file, line number) + include_re = input_api.re.compile(r'#include <(i|o|s)stream>') + usage_re = input_api.re.compile( + r'std::(w|i|o|io|wi|wo|wio)(string)*stream') + no_presubmit_re = input_api.re.compile( + r'// no-presubmit-check TODO\(webrtc:8982\)') + file_filter = lambda x: (input_api.FilterSourceFile(x) and + source_file_filter(x)) + + def _IsException(file_path): + is_test = any( + file_path.endswith(x) for x in + ['_test.cc', '_tests.cc', '_unittest.cc', '_unittests.cc']) + return (file_path.startswith('examples') + or file_path.startswith('test') or is_test) + + for f in input_api.AffectedSourceFiles(file_filter): + # Usage of stringstream is allowed under examples/ and in tests. + if f.LocalPath() == 'PRESUBMIT.py' or _IsException(f.LocalPath()): + continue + for line_num, line in f.ChangedContents(): + if ((include_re.search(line) or usage_re.search(line)) + and not no_presubmit_re.search(line)): + errors.append(error_formatter(f.LocalPath(), line_num)) + if errors: + return [output_api.PresubmitError(error_msg, errors)] + return [] def CheckPublicDepsIsNotUsed(gn_files, input_api, output_api): - """Checks that public_deps is not used without a good reason.""" - result = [] - no_presubmit_check_re = input_api.re.compile( - r'# no-presubmit-check TODO\(webrtc:\d+\)') - error_msg = ('public_deps is not recommended in WebRTC BUILD.gn files ' - 'because it doesn\'t map well to downstream build systems.\n' - 'Used in: %s (line %d).\n' - 'If you are not adding this code (e.g. you are just moving ' - 'existing code) or you have a good reason, you can add this ' - 'comment (verbatim) on the line that causes the problem:\n\n' - 'public_deps = [ # no-presubmit-check TODO(webrtc:8603)\n') - for affected_file in gn_files: - for (line_number, affected_line) in affected_file.ChangedContents(): - if 'public_deps' in affected_line: - surpressed = no_presubmit_check_re.search(affected_line) - if not surpressed: - result.append( - output_api.PresubmitError( - error_msg % (affected_file.LocalPath(), line_number))) - return result + """Checks that public_deps is not used without a good reason.""" + result = [] + no_presubmit_check_re = input_api.re.compile( + r'# no-presubmit-check TODO\(webrtc:\d+\)') + error_msg = ('public_deps is not recommended in WebRTC BUILD.gn files ' + 'because it doesn\'t map well to downstream build systems.\n' + 'Used in: %s (line %d).\n' + 'If you are not adding this code (e.g. you are just moving ' + 'existing code) or you have a good reason, you can add this ' + 'comment (verbatim) on the line that causes the problem:\n\n' + 'public_deps = [ # no-presubmit-check TODO(webrtc:8603)\n') + for affected_file in gn_files: + for (line_number, affected_line) in affected_file.ChangedContents(): + if 'public_deps' in affected_line: + surpressed = no_presubmit_check_re.search(affected_line) + if not surpressed: + result.append( + output_api.PresubmitError( + error_msg % + (affected_file.LocalPath(), line_number))) + return result def CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api): - result = [] - error_msg = ('check_includes overrides are not allowed since it can cause ' - 'incorrect dependencies to form. It effectively means that your ' - 'module can include any .h file without depending on its ' - 'corresponding target. There are some exceptional cases when ' - 'this is allowed: if so, get approval from a .gn owner in the ' - 'root OWNERS file.\n' - 'Used in: %s (line %d).') - # pylint: disable-next=fixme - no_presubmit_re = input_api.re.compile( - r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)') - for affected_file in gn_files: - for (line_number, affected_line) in affected_file.ChangedContents(): - if ('check_includes' in affected_line - and not no_presubmit_re.search(affected_line)): - result.append( - output_api.PresubmitError(error_msg % - (affected_file.LocalPath(), line_number))) - return result + result = [] + error_msg = ( + 'check_includes overrides are not allowed since it can cause ' + 'incorrect dependencies to form. It effectively means that your ' + 'module can include any .h file without depending on its ' + 'corresponding target. There are some exceptional cases when ' + 'this is allowed: if so, get approval from a .gn owner in the ' + 'root OWNERS file.\n' + 'Used in: %s (line %d).') + # pylint: disable-next=fixme + no_presubmit_re = input_api.re.compile( + r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)') + for affected_file in gn_files: + for (line_number, affected_line) in affected_file.ChangedContents(): + if ('check_includes' in affected_line + and not no_presubmit_re.search(affected_line)): + result.append( + output_api.PresubmitError( + error_msg % (affected_file.LocalPath(), line_number))) + return result def CheckGnChanges(input_api, output_api): - file_filter = lambda x: (input_api.FilterSourceFile( - x, - files_to_check=(r'.+\.(gn|gni)$', ), - files_to_skip=(r'.*/presubmit_checks_lib/testdata/.*', ))) - - gn_files = [] - for f in input_api.AffectedSourceFiles(file_filter): - gn_files.append(f) - - result = [] - if gn_files: - result.extend(CheckNoSourcesAbove(input_api, gn_files, output_api)) - result.extend(CheckNoMixingSources(input_api, gn_files, output_api)) - result.extend(CheckAbseilDependencies(input_api, gn_files, output_api)) - result.extend( - CheckNoPackageBoundaryViolations(input_api, gn_files, output_api)) - result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, output_api)) - result.extend(CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api)) - result.extend( - CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api, output_api)) - return result + file_filter = lambda x: (input_api.FilterSourceFile( + x, + files_to_check=(r'.+\.(gn|gni)$', ), + files_to_skip=(r'.*/presubmit_checks_lib/testdata/.*', ))) + + gn_files = set() + for f in input_api.AffectedSourceFiles(file_filter): + gn_files.add(f) + + result = [] + if gn_files: + result.extend(CheckNoSourcesAbove(input_api, gn_files, output_api)) + result.extend(CheckNoMixingSources(input_api, gn_files, output_api)) + result.extend( + CheckNoPackageBoundaryViolations(input_api, gn_files, output_api)) + result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, + output_api)) + result.extend( + CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api)) + result.extend( + CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api, + output_api)) + return result def CheckGnGen(input_api, output_api): - """Runs `gn gen --check` with default args to detect mismatches between + """Runs `gn gen --check` with default args to detect mismatches between #includes and dependencies in the BUILD.gn files, as well as general build errors. """ - with _AddToPath( - input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools_webrtc', - 'presubmit_checks_lib')): - from build_helpers import RunGnCheck - errors = RunGnCheck(FindSrcDirPath(input_api.PresubmitLocalPath()))[:5] - if errors: - return [ - output_api.PresubmitPromptWarning( - 'Some #includes do not match the build dependency graph. ' - 'Please run:\n' - ' gn gen --check ', - long_text='\n\n'.join(errors)) - ] - return [] + with _AddToPath( + input_api.os_path.join(input_api.PresubmitLocalPath(), + 'tools_webrtc', 'presubmit_checks_lib')): + from build_helpers import run_gn_check + errors = run_gn_check(input_api.change.RepositoryRoot())[:5] + if errors: + return [ + output_api.PresubmitPromptWarning( + 'Some #includes do not match the build dependency graph. ' + 'Please run:\n' + ' gn gen --check ', + long_text='\n\n'.join(errors)) + ] + return [] def CheckUnwantedDependencies(input_api, output_api, source_file_filter): - """Runs checkdeps on #include statements added in this + """Runs checkdeps on #include statements added in this change. Breaking - rules is an error, breaking ! rules is a warning. """ - # Copied from Chromium's src/PRESUBMIT.py. - - # We need to wait until we have an input_api object and use this - # roundabout construct to import checkdeps because this file is - # eval-ed and thus doesn't have __file__. - src_path = FindSrcDirPath(input_api.PresubmitLocalPath()) - checkdeps_path = input_api.os_path.join(src_path, 'buildtools', 'checkdeps') - if not os.path.exists(checkdeps_path): - return [ - output_api.PresubmitError( - 'Cannot find checkdeps at %s\nHave you run "gclient sync" to ' - 'download all the DEPS entries?' % checkdeps_path) - ] - with _AddToPath(checkdeps_path): - import checkdeps - from cpp_checker import CppChecker - from rules import Rule - - added_includes = [] - for f in input_api.AffectedFiles(file_filter=source_file_filter): - if not CppChecker.IsCppFile(f.LocalPath()): - continue - - changed_lines = [line for _, line in f.ChangedContents()] - added_includes.append([f.LocalPath(), changed_lines]) - - deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath()) - - error_descriptions = [] - warning_descriptions = [] - for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes( - added_includes): - description_with_path = '%s\n %s' % (path, rule_description) - if rule_type == Rule.DISALLOW: - error_descriptions.append(description_with_path) - else: - warning_descriptions.append(description_with_path) + # Copied from Chromium's src/PRESUBMIT.py. + + # We need to wait until we have an input_api object and use this + # roundabout construct to import checkdeps because this file is + # eval-ed and thus doesn't have __file__. + repo_root = input_api.change.RepositoryRoot() + checkdeps_path = input_api.os_path.join(repo_root, 'buildtools', + 'checkdeps') + if not os.path.exists(checkdeps_path): + return [ + output_api.PresubmitError( + 'Cannot find checkdeps at %s\nHave you run "gclient sync" to ' + 'download all the DEPS entries?' % checkdeps_path) + ] + with _AddToPath(checkdeps_path): + import checkdeps + from cpp_checker import CppChecker + from rules import Rule + + added_includes = [] + for f in input_api.AffectedFiles(file_filter=source_file_filter): + if not CppChecker.IsCppFile(f.LocalPath()): + continue + + changed_lines = [line for _, line in f.ChangedContents()] + added_includes.append([f.LocalPath(), changed_lines]) + + deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath()) + + error_descriptions = [] + warning_descriptions = [] + for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes( + added_includes): + description_with_path = '%s\n %s' % (path, rule_description) + if rule_type == Rule.DISALLOW: + error_descriptions.append(description_with_path) + else: + warning_descriptions.append(description_with_path) - results = [] - if error_descriptions: - results.append( - output_api.PresubmitError( - 'You added one or more #includes that violate checkdeps rules.' - '\nCheck that the DEPS files in these locations contain valid ' - 'rules.\nSee ' - 'https://cs.chromium.org/chromium/src/buildtools/checkdeps/ ' - 'for more details about checkdeps.', error_descriptions)) - if warning_descriptions: - results.append( - output_api.PresubmitPromptOrNotify( - 'You added one or more #includes of files that are temporarily' - '\nallowed but being removed. Can you avoid introducing the\n' - '#include? See relevant DEPS file(s) for details and contacts.' - '\nSee ' - 'https://cs.chromium.org/chromium/src/buildtools/checkdeps/ ' - 'for more details about checkdeps.', warning_descriptions)) - return results + results = [] + if error_descriptions: + results.append( + output_api.PresubmitError( + 'You added one or more #includes that violate checkdeps rules.' + '\nCheck that the DEPS files in these locations contain valid ' + 'rules.\nSee ' + 'https://cs.chromium.org/chromium/src/buildtools/checkdeps/ ' + 'for more details about checkdeps.', error_descriptions)) + if warning_descriptions: + results.append( + output_api.PresubmitPromptOrNotify( + 'You added one or more #includes of files that are temporarily' + '\nallowed but being removed. Can you avoid introducing the\n' + '#include? See relevant DEPS file(s) for details and contacts.' + '\nSee ' + 'https://cs.chromium.org/chromium/src/buildtools/checkdeps/ ' + 'for more details about checkdeps.', warning_descriptions)) + return results def CheckCommitMessageBugEntry(input_api, output_api): - """Check that bug entries are well-formed in commit message.""" - bogus_bug_msg = ( - 'Bogus Bug entry: %s. Please specify the issue tracker prefix and the ' - 'issue number, separated by a colon, e.g. webrtc:123 or chromium:12345.') - results = [] - for bug in input_api.change.BugsFromDescription(): - bug = bug.strip() - if bug.lower() == 'none': - continue - if 'b/' not in bug and ':' not in bug: - try: - if int(bug) > 100000: - # Rough indicator for current chromium bugs. - prefix_guess = 'chromium' - else: - prefix_guess = 'webrtc' - results.append('Bug entry requires issue tracker prefix, e.g. %s:%s' % - (prefix_guess, bug)) - except ValueError: - results.append(bogus_bug_msg % bug) - elif not (re.match(r'\w+:\d+', bug) or re.match(r'b/\d+', bug)): - results.append(bogus_bug_msg % bug) - return [output_api.PresubmitError(r) for r in results] + """Check that bug entries are well-formed in commit message.""" + bogus_bug_msg = ( + 'Bogus Bug entry: %s. Please specify the issue tracker prefix and the ' + 'issue number, separated by a colon, e.g. webrtc:123 or chromium:12345.' + ) + results = [] + for bug in input_api.change.BugsFromDescription(): + bug = bug.strip() + if bug.lower() == 'none': + continue + if 'b/' not in bug and ':' not in bug: + try: + if int(bug) > 100000: + # Rough indicator for current chromium bugs. + prefix_guess = 'chromium' + else: + prefix_guess = 'webrtc' + results.append( + 'Bug entry requires issue tracker prefix, e.g. %s:%s' % + (prefix_guess, bug)) + except ValueError: + results.append(bogus_bug_msg % bug) + elif not (re.match(r'\w+:\d+', bug) or re.match(r'b/\d+', bug)): + results.append(bogus_bug_msg % bug) + return [output_api.PresubmitError(r) for r in results] def CheckChangeHasBugField(input_api, output_api): - """Requires that the changelist is associated with a bug. + """Requires that the changelist is associated with a bug. This check is stricter than the one in depot_tools/presubmit_canned_checks.py since it fails the presubmit if the bug field is missing or doesn't contain @@ -775,277 +767,299 @@ def CheckChangeHasBugField(input_api, output_api): This supports both 'BUG=' and 'Bug:' since we are in the process of migrating to Gerrit and it encourages the usage of 'Bug:'. """ - if input_api.change.BugsFromDescription(): - return [] - return [ - output_api.PresubmitError( - 'The "Bug: [bug number]" footer is mandatory. Please create a ' - 'bug and reference it using either of:\n' - ' * https://bugs.webrtc.org - reference it using Bug: ' - 'webrtc:XXXX\n' - ' * https://crbug.com - reference it using Bug: chromium:XXXXXX') - ] + if input_api.change.BugsFromDescription(): + return [] + return [ + output_api.PresubmitError( + 'The "Bug: [bug number]" footer is mandatory. Please create a ' + 'bug and reference it using either of:\n' + ' * https://bugs.webrtc.org - reference it using Bug: ' + 'webrtc:XXXX\n' + ' * https://crbug.com - reference it using Bug: chromium:XXXXXX') + ] def CheckJSONParseErrors(input_api, output_api, source_file_filter): - """Check that JSON files do not contain syntax errors.""" + """Check that JSON files do not contain syntax errors.""" + def FilterFile(affected_file): + return (input_api.os_path.splitext(affected_file.LocalPath())[1] + == '.json' and source_file_filter(affected_file)) + + def GetJSONParseError(input_api, filename): + try: + contents = input_api.ReadFile(filename) + input_api.json.loads(contents) + except ValueError as e: + return e + return None + + results = [] + for affected_file in input_api.AffectedFiles(file_filter=FilterFile, + include_deletes=False): + parse_error = GetJSONParseError(input_api, + affected_file.AbsoluteLocalPath()) + if parse_error: + results.append( + output_api.PresubmitError( + '%s could not be parsed: %s' % + (affected_file.LocalPath(), parse_error))) + return results - def FilterFile(affected_file): - return (input_api.os_path.splitext(affected_file.LocalPath())[1] == '.json' - and source_file_filter(affected_file)) - def GetJSONParseError(input_api, filename): - try: - contents = input_api.ReadFile(filename) - input_api.json.loads(contents) - except ValueError as e: - return e - return None - - results = [] - for affected_file in input_api.AffectedFiles(file_filter=FilterFile, - include_deletes=False): - parse_error = GetJSONParseError(input_api, - affected_file.AbsoluteLocalPath()) - if parse_error: - results.append( - output_api.PresubmitError('%s could not be parsed: %s' % - (affected_file.LocalPath(), parse_error))) - return results +def RunPythonTests(input_api, output_api): + def Join(*args): + return input_api.os_path.join(input_api.PresubmitLocalPath(), *args) + + excluded_files = [ + # These tests should be run manually after webrtc_dashboard_upload + # target has been built. + 'catapult_uploader_test.py', + 'process_perf_results_test.py', + ] + test_directories = [ + input_api.PresubmitLocalPath(), + Join('rtc_tools', 'py_event_log_analyzer'), + ] + [ + root for root, _, files in os.walk(Join('tools_webrtc')) if any( + f.endswith('_test.py') and f not in excluded_files for f in files) + ] -def RunPythonTests(input_api, output_api): - def Join(*args): - return input_api.os_path.join(input_api.PresubmitLocalPath(), *args) - - excluded_files = [ - # These tests should be run manually after webrtc_dashboard_upload target - # has been built. - 'catapult_uploader_test.py', - 'process_perf_results_test.py', - ] - - test_directories = [ - input_api.PresubmitLocalPath(), - Join('rtc_tools', 'py_event_log_analyzer'), - ] + [ - root for root, _, files in os.walk(Join('tools_webrtc')) if any( - f.endswith('_test.py') and f not in excluded_files for f in files) - ] - - tests = [] - - for directory in test_directories: - tests.extend( - input_api.canned_checks.GetUnitTestsInDirectory( - input_api, - output_api, - directory, - files_to_check=[r'.+_test\.py$'], - run_on_python2=False)) - return input_api.RunTests(tests, parallel=True) + tests = [] + + for directory in test_directories: + tests.extend( + input_api.canned_checks.GetUnitTestsInDirectory( + input_api, + output_api, + directory, + files_to_check=[r'.+_test\.py$'], + run_on_python2=False)) + return input_api.RunTests(tests, parallel=True) def CheckUsageOfGoogleProtobufNamespace(input_api, output_api, source_file_filter): - """Checks that the namespace google::protobuf has not been used.""" - files = [] - pattern = input_api.re.compile(r'google::protobuf') - proto_utils_path = os.path.join('rtc_base', 'protobuf_utils.h') - file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter( - x)) - for f in input_api.AffectedSourceFiles(file_filter): - if f.LocalPath() in [proto_utils_path, 'PRESUBMIT.py']: - continue - contents = input_api.ReadFile(f) - if pattern.search(contents): - files.append(f) - - if files: - return [ - output_api.PresubmitError( - 'Please avoid to use namespace `google::protobuf` directly.\n' - 'Add a using directive in `%s` and include that header instead.' % - proto_utils_path, files) - ] - return [] + """Checks that the namespace google::protobuf has not been used.""" + files = set() + pattern = input_api.re.compile(r'google::protobuf') + proto_utils_path = os.path.join('rtc_base', 'protobuf_utils.h') + file_filter = lambda x: (input_api.FilterSourceFile(x) and + source_file_filter(x)) + for f in input_api.AffectedSourceFiles(file_filter): + if f.LocalPath() in [proto_utils_path, 'PRESUBMIT.py']: + continue + contents = input_api.ReadFile(f) + if pattern.search(contents): + files.add(f) + + if files: + return [ + output_api.PresubmitError( + 'Please avoid to use namespace `google::protobuf` directly.\n' + 'Add a using directive in `%s` and include that header instead.' + % proto_utils_path, list(files)) + ] + return [] def _LicenseHeader(input_api): - """Returns the license header regexp.""" - # Accept any year number from 2003 to the current year - current_year = int(input_api.time.strftime('%Y')) - allowed_years = (str(s) for s in reversed(range(2003, current_year + 1))) - years_re = '(' + '|'.join(allowed_years) + ')' - license_header = ( - r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. ' - r'All [Rr]ights [Rr]eserved\.\n' - r'.*?\n' - r'.*? Use of this source code is governed by a BSD-style license\n' - r'.*? that can be found in the LICENSE file in the root of the source\n' - r'.*? tree\. An additional intellectual property rights grant can be ' - r'found\n' - r'.*? in the file PATENTS\. All contributing project authors may\n' - r'.*? be found in the AUTHORS file in the root of the source tree\.\n' - ) % { - 'year': years_re, - } - return license_header + """Returns the license header regexp.""" + # Accept any year number from 2003 to the current year + current_year = int(input_api.time.strftime('%Y')) + allowed_years = (str(s) for s in reversed(range(2003, current_year + 1))) + years_re = '(' + '|'.join(allowed_years) + ')' + license_header = ( + r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. ' + r'All [Rr]ights [Rr]eserved\.\n' + r'.*?\n' + r'.*? Use of this source code is governed by a BSD-style license\n' + r'.*? that can be found in the LICENSE file in the root of the source\n' + r'.*? tree\. An additional intellectual property rights grant can be ' + r'found\n' + r'.*? in the file PATENTS\. All contributing project authors may\n' + r'.*? be found in the AUTHORS file in the root of the source tree\.\n' + ) % { + 'year': years_re, + } + return license_header def CommonChecks(input_api, output_api): - """Checks common to both upload and commit.""" - results = [] - # Filter out files that are in objc or ios dirs from being cpplint-ed since - # they do not follow C++ lint rules. - exception_list = input_api.DEFAULT_FILES_TO_SKIP + ( - r".*\bobjc[\\\/].*", - r".*objc\.[hcm]+$", - ) - source_file_filter = lambda x: input_api.FilterSourceFile( - x, None, exception_list) - results.extend( - CheckApprovedFilesLintClean(input_api, output_api, source_file_filter)) - results.extend( - input_api.canned_checks.CheckLicense(input_api, output_api, - _LicenseHeader(input_api))) - - # TODO(bugs.webrtc.org/12114): Delete this filter and run pylint on - # all python files. This is a temporary solution. - python_file_filter = lambda f: (f.LocalPath().endswith('.py') and - source_file_filter(f)) - python_changed_files = [ - f.LocalPath() - for f in input_api.AffectedFiles(include_deletes=False, - file_filter=python_file_filter) - ] - - results.extend( - input_api.canned_checks.RunPylint( - input_api, - output_api, - files_to_check=python_changed_files, - files_to_skip=( - r'^base[\\\/].*\.py$', - r'^build[\\\/].*\.py$', - r'^buildtools[\\\/].*\.py$', - r'^infra[\\\/].*\.py$', - r'^ios[\\\/].*\.py$', - r'^out.*[\\\/].*\.py$', - r'^testing[\\\/].*\.py$', - r'^third_party[\\\/].*\.py$', - r'^tools[\\\/].*\.py$', - r'^xcodebuild.*[\\\/].*\.py$', - ), - pylintrc='pylintrc', - version='2.7')) - - # TODO(bugs.webrtc.org/13606): talk/ is no more, so make below checks simpler? - # WebRTC can't use the presubmit_canned_checks.PanProjectChecks function - # since we need to have different license checks - # in talk/ and webrtc/directories. - # Instead, hand-picked checks are included below. - - # .m and .mm files are ObjC files. For simplicity we will consider - # .h files in ObjC subdirectories ObjC headers. - objc_filter_list = (r'.+\.m$', r'.+\.mm$', r'.+objc\/.+\.h$') - # Skip long-lines check for DEPS and GN files. - build_file_filter_list = (r'.+\.gn$', r'.+\.gni$', 'DEPS') - # Also we will skip most checks for third_party directory. - third_party_filter_list = (r'(^|.*[\\\/])third_party[\\\/].+', ) - eighty_char_sources = lambda x: input_api.FilterSourceFile( - x, - files_to_skip=build_file_filter_list + objc_filter_list + - third_party_filter_list) - hundred_char_sources = lambda x: input_api.FilterSourceFile( - x, files_to_check=objc_filter_list) - non_third_party_sources = lambda x: input_api.FilterSourceFile( - x, files_to_skip=third_party_filter_list) - - results.extend( - input_api.canned_checks.CheckLongLines( - input_api, - output_api, - maxlen=80, - source_file_filter=eighty_char_sources)) - results.extend( - input_api.canned_checks.CheckLongLines( - input_api, - output_api, - maxlen=100, - source_file_filter=hundred_char_sources)) - results.extend( - input_api.canned_checks.CheckChangeHasNoTabs( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend( - input_api.canned_checks.CheckChangeHasNoStrayWhitespace( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend( - input_api.canned_checks.CheckAuthorizedAuthor( - input_api, - output_api, - bot_allowlist=[ - 'chromium-webrtc-autoroll@webrtc-ci.iam.gserviceaccount.com', - 'webrtc-version-updater@webrtc-ci.iam.gserviceaccount.com', - ])) - results.extend( - input_api.canned_checks.CheckChangeTodoHasOwner( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend( - input_api.canned_checks.CheckPatchFormatted(input_api, output_api)) - results.extend(CheckNativeApiHeaderChanges(input_api, output_api)) - results.extend( - CheckNoIOStreamInHeaders(input_api, - output_api, - source_file_filter=non_third_party_sources)) - results.extend( - CheckNoPragmaOnce(input_api, - output_api, - source_file_filter=non_third_party_sources)) - results.extend( - CheckNoFRIEND_TEST(input_api, - output_api, - source_file_filter=non_third_party_sources)) - results.extend(CheckGnChanges(input_api, output_api)) - results.extend( - CheckUnwantedDependencies(input_api, - output_api, - source_file_filter=non_third_party_sources)) - results.extend( - CheckJSONParseErrors(input_api, + """Checks common to both upload and commit.""" + results = [] + # Filter out files that are in objc or ios dirs from being cpplint-ed since + # they do not follow C++ lint rules. + exception_list = input_api.DEFAULT_FILES_TO_SKIP + ( + r".*\bobjc[\\\/].*", + r".*objc\.[hcm]+$", + ) + source_file_filter = lambda x: input_api.FilterSourceFile( + x, None, exception_list) + results.extend( + CheckApprovedFilesLintClean(input_api, output_api, source_file_filter)) + results.extend( + input_api.canned_checks.CheckLicense(input_api, output_api, + _LicenseHeader(input_api))) + + # TODO(bugs.webrtc.org/12114): Delete this filter and run pylint on + # all python files. This is a temporary solution. + python_file_filter = lambda f: (f.LocalPath().endswith('.py') and + source_file_filter(f)) + python_changed_files = [ + f.LocalPath() + for f in input_api.AffectedFiles(include_deletes=False, + file_filter=python_file_filter) + ] + pylint_new_style = [ + f for f in python_changed_files if f not in PYLINT_OLD_STYLE + ] + pylint_old_style = [ + f for f in python_changed_files if f in PYLINT_OLD_STYLE + ] + if pylint_new_style: + results.extend( + input_api.canned_checks.RunPylint( + input_api, + output_api, + files_to_check=pylint_new_style, + files_to_skip=( + r'^base[\\\/].*\.py$', + r'^build[\\\/].*\.py$', + r'^buildtools[\\\/].*\.py$', + r'^infra[\\\/].*\.py$', + r'^ios[\\\/].*\.py$', + r'^out.*[\\\/].*\.py$', + r'^testing[\\\/].*\.py$', + r'^third_party[\\\/].*\.py$', + r'^tools[\\\/].*\.py$', + r'^xcodebuild.*[\\\/].*\.py$', + ), + pylintrc='pylintrc', + version='2.7')) + + if pylint_old_style: + results.extend( + input_api.canned_checks.RunPylint(input_api, + output_api, + files_to_check=pylint_old_style, + pylintrc='pylintrc_old_style', + version='2.7')) + # TODO(bugs.webrtc.org/13606): talk/ is no more, so make below checks + # simpler. WebRTC can't use the presubmit_canned_checks.PanProjectChecks + # function since we need to have different license checks in talk/ and + # webrtc/directories. Instead, hand-picked checks are included below. + + # .m and .mm files are ObjC files. For simplicity we will consider + # .h files in ObjC subdirectories ObjC headers. + objc_filter_list = (r'.+\.m$', r'.+\.mm$', r'.+objc\/.+\.h$') + # Skip long-lines check for DEPS and GN files. + build_file_filter_list = (r'.+\.gn$', r'.+\.gni$', 'DEPS') + # Also we will skip most checks for third_party directory. + third_party_filter_list = (r'(^|.*[\\\/])third_party[\\\/].+', ) + eighty_char_sources = lambda x: input_api.FilterSourceFile( + x, + files_to_skip=build_file_filter_list + objc_filter_list + + third_party_filter_list) + hundred_char_sources = lambda x: input_api.FilterSourceFile( + x, files_to_check=objc_filter_list) + non_third_party_sources = lambda x: input_api.FilterSourceFile( + x, files_to_skip=third_party_filter_list) + + results.extend( + input_api.canned_checks.CheckLongLines( + input_api, + output_api, + maxlen=80, + source_file_filter=eighty_char_sources)) + results.extend( + input_api.canned_checks.CheckLongLines( + input_api, + output_api, + maxlen=100, + source_file_filter=hundred_char_sources)) + results.extend( + input_api.canned_checks.CheckChangeHasNoTabs( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend( + input_api.canned_checks.CheckChangeHasNoStrayWhitespace( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend( + input_api.canned_checks.CheckAuthorizedAuthor( + input_api, + output_api, + bot_allowlist=[ + 'chromium-webrtc-autoroll@webrtc-ci.iam.gserviceaccount.com', + 'webrtc-version-updater@webrtc-ci.iam.gserviceaccount.com', + ])) + results.extend( + input_api.canned_checks.CheckChangeTodoHasOwner( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend( + input_api.canned_checks.CheckPatchFormatted(input_api, output_api)) + results.extend(CheckNativeApiHeaderChanges(input_api, output_api)) + results.extend( + CheckNoIOStreamInHeaders(input_api, + output_api, + source_file_filter=non_third_party_sources)) + results.extend( + CheckNoPragmaOnce(input_api, + output_api, + source_file_filter=non_third_party_sources)) + results.extend( + CheckNoFRIEND_TEST(input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend(RunPythonTests(input_api, output_api)) - results.extend( - CheckUsageOfGoogleProtobufNamespace( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend( - CheckOrphanHeaders(input_api, - output_api, - source_file_filter=non_third_party_sources)) - results.extend( - CheckNewlineAtTheEndOfProtoFiles( - input_api, output_api, source_file_filter=non_third_party_sources)) - results.extend( - CheckNoStreamUsageIsAdded(input_api, output_api, non_third_party_sources)) - results.extend( - CheckNoTestCaseUsageIsAdded(input_api, output_api, + results.extend(CheckGnChanges(input_api, output_api)) + results.extend( + CheckUnwantedDependencies(input_api, + output_api, + source_file_filter=non_third_party_sources)) + results.extend( + CheckJSONParseErrors(input_api, + output_api, + source_file_filter=non_third_party_sources)) + results.extend(RunPythonTests(input_api, output_api)) + results.extend( + CheckUsageOfGoogleProtobufNamespace( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend( + CheckOrphanHeaders(input_api, + output_api, + source_file_filter=non_third_party_sources)) + results.extend( + CheckNewlineAtTheEndOfProtoFiles( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend( + CheckLFNewline(input_api, output_api, non_third_party_sources)) + results.extend( + CheckNoStreamUsageIsAdded(input_api, output_api, + non_third_party_sources)) + results.extend( + CheckNoTestCaseUsageIsAdded(input_api, output_api, + non_third_party_sources)) + results.extend(CheckAddedDepsHaveTargetApprovals(input_api, output_api)) + results.extend(CheckApiDepsFileIsUpToDate(input_api, output_api)) + results.extend( + CheckAbslMemoryInclude(input_api, output_api, non_third_party_sources)) + results.extend( + CheckAssertUsage(input_api, output_api, non_third_party_sources)) + results.extend( + CheckBannedAbslMakeUnique(input_api, output_api, non_third_party_sources)) - results.extend(CheckAddedDepsHaveTargetApprovals(input_api, output_api)) - results.extend(CheckApiDepsFileIsUpToDate(input_api, output_api)) - results.extend( - CheckAbslMemoryInclude(input_api, output_api, non_third_party_sources)) - results.extend( - CheckAssertUsage(input_api, output_api, non_third_party_sources)) - results.extend( - CheckBannedAbslMakeUnique(input_api, output_api, non_third_party_sources)) - results.extend( - CheckObjcApiSymbols(input_api, output_api, non_third_party_sources)) - return results + results.extend( + CheckBannedAbslOptional(input_api, output_api, + non_third_party_sources)) + results.extend( + CheckObjcApiSymbols(input_api, output_api, non_third_party_sources)) + results.extend( + CheckConditionalIncludes(input_api, output_api, + non_third_party_sources)) + return results def CheckApiDepsFileIsUpToDate(input_api, output_api): - """Check that 'include_rules' in api/DEPS is up to date. + """Check that 'include_rules' in api/DEPS is up to date. The file api/DEPS must be kept up to date in order to avoid to avoid to include internal header from WebRTC's api/ headers. @@ -1054,378 +1068,452 @@ def CheckApiDepsFileIsUpToDate(input_api, output_api): rule for each root level directory. More focused allow rules can be added to 'specific_include_rules'. """ - results = [] - api_deps = os.path.join(input_api.PresubmitLocalPath(), 'api', 'DEPS') - with open(api_deps) as f: - deps_content = _ParseDeps(f.read()) - - include_rules = deps_content.get('include_rules', []) - dirs_to_skip = set(['api', 'docs']) - - # Only check top level directories affected by the current CL. - dirs_to_check = set() - for f in input_api.AffectedFiles(): - path_tokens = [t for t in f.LocalPath().split(os.sep) if t] - if len(path_tokens) > 1: - if (path_tokens[0] not in dirs_to_skip and os.path.isdir( - os.path.join(input_api.PresubmitLocalPath(), path_tokens[0]))): - dirs_to_check.add(path_tokens[0]) - - missing_include_rules = set() - for p in dirs_to_check: - rule = '-%s' % p - if rule not in include_rules: - missing_include_rules.add(rule) - - if missing_include_rules: - error_msg = [ - 'include_rules = [\n', - ' ...\n', - ] + results = [] + api_deps = os.path.join(input_api.PresubmitLocalPath(), 'api', 'DEPS') + with open(api_deps) as f: + deps_content = _ParseDeps(f.read()) + + include_rules = deps_content.get('include_rules', []) + dirs_to_skip = set(['api', 'docs']) + + # Only check top level directories affected by the current CL. + dirs_to_check = set() + for f in input_api.AffectedFiles(): + path_tokens = [t for t in f.LocalPath().split(os.sep) if t] + if len(path_tokens) > 1: + if (path_tokens[0] not in dirs_to_skip and os.path.isdir( + os.path.join(input_api.PresubmitLocalPath(), + path_tokens[0]))): + dirs_to_check.add(path_tokens[0]) + + missing_include_rules = set() + for p in dirs_to_check: + rule = '-%s' % p + if rule not in include_rules: + missing_include_rules.add(rule) + + if missing_include_rules: + error_msg = [ + 'include_rules = [\n', + ' ...\n', + ] + + for r in sorted(missing_include_rules): + error_msg.append(' "%s",\n' % str(r)) + + error_msg.append(' ...\n') + error_msg.append(']\n') - for r in sorted(missing_include_rules): - error_msg.append(' "%s",\n' % str(r)) + results.append( + output_api.PresubmitError( + 'New root level directory detected! WebRTC api/ headers should ' + 'not #include headers from \n' + 'the new directory, so please update "include_rules" in file\n' + '"%s". Example:\n%s\n' % (api_deps, ''.join(error_msg)))) - error_msg.append(' ...\n') - error_msg.append(']\n') + return results - results.append( - output_api.PresubmitError( - 'New root level directory detected! WebRTC api/ headers should ' - 'not #include headers from \n' - 'the new directory, so please update "include_rules" in file\n' - '"%s". Example:\n%s\n' % (api_deps, ''.join(error_msg)))) - return results +def CheckBannedAbslMakeUnique(input_api, output_api, source_file_filter): + file_filter = lambda f: (f.LocalPath().endswith( + ('.cc', '.h')) and source_file_filter(f)) + + files = set() + for f in input_api.AffectedFiles(include_deletes=False, + file_filter=file_filter): + for _, line in f.ChangedContents(): + if 'absl::make_unique' in line: + files.add(f) + break + + if files: + return [ + output_api.PresubmitError( + 'Please use std::make_unique instead of absl::make_unique.\n' + 'Affected files:', list(files)) + ] + return [] -def CheckBannedAbslMakeUnique(input_api, output_api, source_file_filter): - file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) and - source_file_filter(f)) - - files = [] - for f in input_api.AffectedFiles(include_deletes=False, - file_filter=file_filter): - for _, line in f.ChangedContents(): - if 'absl::make_unique' in line: - files.append(f) - break - - if files: - return [ - output_api.PresubmitError( - 'Please use std::make_unique instead of absl::make_unique.\n' - 'Affected files:', files) - ] - return [] +def CheckBannedAbslOptional(input_api, output_api, source_file_filter): + absl_optional = re.compile(r'absl::(optional|make_optional|nullopt)', + re.MULTILINE) + absl_optional_include = re.compile(r'^#include\s*"absl/types/optional\.h"', + input_api.re.MULTILINE) + file_filter = lambda f: (f.LocalPath().endswith( + ('.cc', '.h')) and source_file_filter(f)) + + files = set() + for f in input_api.AffectedFiles(include_deletes=False, + file_filter=file_filter): + for _, line in f.ChangedContents(): + if absl_optional.search(line) or absl_optional_include.search( + line): + files.add(f.LocalPath()) + break + + if files: + return [ + output_api.PresubmitError( + 'Please use std::optional instead of absl::optional.\n' + 'Affected files:', list(files)) + ] + return [] + + +def CheckConditionalIncludes(input_api, output_api, source_file_filter): + conditional_includes = { + '': '"rtc_base/ip_address.h"', + '': '"rtc_base/net_helpers.h"', + } + file_filter = lambda f: (f.LocalPath().endswith( + ('.cc', '.h')) and source_file_filter(f)) + results = [] + for key, value in conditional_includes.items(): + include_regex = re.compile('^#include ' + key + + '((?!IWYU pragma|no-presubmit-check).)*$') + files = set() + for f in input_api.AffectedFiles(include_deletes=False, + file_filter=file_filter): + for _, line in f.ChangedContents(): + if include_regex.search(line): + files.add(f.LocalPath()) + break + + if files: + results.append( + output_api.PresubmitError( + 'Please include ' + value + ' instead of ' + key + + '.\nAffected files:', list(files))) + return results def CheckObjcApiSymbols(input_api, output_api, source_file_filter): - rtc_objc_export = re.compile(r'RTC_OBJC_EXPORT(.|\n){26}', - re.MULTILINE | re.DOTALL) - file_filter = lambda f: (f.LocalPath().endswith(('.h')) and - source_file_filter(f)) - - files = [] - file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter( - x)) - for f in input_api.AffectedSourceFiles(file_filter): - if not f.LocalPath().endswith('.h') or not 'sdk/objc' in f.LocalPath(): - continue - if f.LocalPath().endswith('sdk/objc/base/RTCMacros.h'): - continue - contents = input_api.ReadFile(f) - for match in rtc_objc_export.finditer(contents): - export_block = match.group(0) - if 'RTC_OBJC_TYPE' not in export_block: - files.append(f.LocalPath()) - - if len(files) > 0: - return [ - output_api.PresubmitError( - 'RTC_OBJC_EXPORT types must be wrapped into an RTC_OBJC_TYPE() ' + - 'macro.\n\n' + 'For example:\n' + - 'RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RtcFoo)\n\n' + - 'RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE(RtcFoo)\n\n' + - 'Please fix the following files:', files) - ] - return [] + rtc_objc_export = re.compile(r'RTC_OBJC_EXPORT(.|\n){26}', + re.MULTILINE | re.DOTALL) + file_filter = lambda f: (f.LocalPath().endswith( + ('.h')) and source_file_filter(f)) + + files = set() + file_filter = lambda x: (input_api.FilterSourceFile(x) and + source_file_filter(x)) + for f in input_api.AffectedSourceFiles(file_filter): + if not f.LocalPath().endswith('.h') or not 'sdk/objc' in f.LocalPath(): + continue + if f.LocalPath().endswith('sdk/objc/base/RTCMacros.h'): + continue + contents = input_api.ReadFile(f) + for match in rtc_objc_export.finditer(contents): + export_block = match.group(0) + if 'RTC_OBJC_TYPE' not in export_block: + files.add(f.LocalPath()) + + if len(files) > 0: + return [ + output_api.PresubmitError( + 'RTC_OBJC_EXPORT types must be wrapped into an RTC_OBJC_TYPE() ' + + 'macro.\n\n' + 'For example:\n' + + 'RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RtcFoo)\n\n' + + 'RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE(RtcFoo)\n\n' + + 'Please fix the following files:', list(files)) + ] + return [] def CheckAssertUsage(input_api, output_api, source_file_filter): - pattern = input_api.re.compile(r'\bassert\(') - file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h', '.m', '.mm')) - and source_file_filter(f)) - - files = [] - for f in input_api.AffectedFiles(include_deletes=False, - file_filter=file_filter): - for _, line in f.ChangedContents(): - if pattern.search(line): - files.append(f.LocalPath()) - break - - if len(files) > 0: - return [ - output_api.PresubmitError( - 'Usage of assert() has been detected in the following files, ' - 'please use RTC_DCHECK() instead.\n Files:', files) - ] - return [] + pattern = input_api.re.compile(r'\bassert\(') + file_filter = lambda f: (f.LocalPath().endswith( + ('.cc', '.h', '.m', '.mm')) and source_file_filter(f)) + + files = set() + for f in input_api.AffectedFiles(include_deletes=False, + file_filter=file_filter): + for _, line in f.ChangedContents(): + if pattern.search(line): + files.add(f.LocalPath()) + break + + if len(files) > 0: + return [ + output_api.PresubmitError( + 'Usage of assert() has been detected in the following files, ' + 'please use RTC_DCHECK() instead.\n Files:', list(files)) + ] + return [] def CheckAbslMemoryInclude(input_api, output_api, source_file_filter): - pattern = input_api.re.compile(r'^#include\s*"absl/memory/memory.h"', - input_api.re.MULTILINE) - file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) and - source_file_filter(f)) - - files = [] - for f in input_api.AffectedFiles(include_deletes=False, - file_filter=file_filter): - contents = input_api.ReadFile(f) - if pattern.search(contents): - continue - for _, line in f.ChangedContents(): - if 'absl::WrapUnique' in line: - files.append(f) - break - - if len(files) > 0: - return [ - output_api.PresubmitError( - 'Please include "absl/memory/memory.h" header for ' - 'absl::WrapUnique.\nThis header may or may not be included ' - 'transitively depending on the C++ standard version.', files) - ] - return [] + pattern = input_api.re.compile(r'^#include\s*"absl/memory/memory.h"', + input_api.re.MULTILINE) + file_filter = lambda f: (f.LocalPath().endswith( + ('.cc', '.h')) and source_file_filter(f)) + + files = set() + for f in input_api.AffectedFiles(include_deletes=False, + file_filter=file_filter): + contents = input_api.ReadFile(f) + if pattern.search(contents): + continue + for _, line in f.ChangedContents(): + if 'absl::WrapUnique' in line: + files.add(f) + break + + if len(files) > 0: + return [ + output_api.PresubmitError( + 'Please include "absl/memory/memory.h" header for ' + 'absl::WrapUnique.\nThis header may or may not be included ' + 'transitively depending on the C++ standard version.', + list(files)) + ] + return [] def CheckChangeOnUpload(input_api, output_api): - results = [] - results.extend(CommonChecks(input_api, output_api)) - results.extend(CheckGnGen(input_api, output_api)) - results.extend(input_api.canned_checks.CheckGNFormatted( - input_api, output_api)) - return results + results = [] + results.extend(CommonChecks(input_api, output_api)) + results.extend(CheckGnGen(input_api, output_api)) + results.extend( + input_api.canned_checks.CheckGNFormatted(input_api, output_api)) + return results def CheckChangeOnCommit(input_api, output_api): - results = [] - results.extend(CommonChecks(input_api, output_api)) - results.extend(VerifyNativeApiHeadersListIsValid(input_api, output_api)) - results.extend(input_api.canned_checks.CheckOwners(input_api, output_api)) - results.extend( - input_api.canned_checks.CheckChangeWasUploaded(input_api, output_api)) - results.extend( - input_api.canned_checks.CheckChangeHasDescription(input_api, output_api)) - results.extend(CheckChangeHasBugField(input_api, output_api)) - results.extend(CheckCommitMessageBugEntry(input_api, output_api)) - results.extend( - input_api.canned_checks.CheckTreeIsOpen( - input_api, - output_api, - json_url='http://webrtc-status.appspot.com/current?format=json')) - return results + results = [] + results.extend(CommonChecks(input_api, output_api)) + results.extend(VerifyNativeApiHeadersListIsValid(input_api, output_api)) + results.extend(input_api.canned_checks.CheckOwners(input_api, output_api)) + results.extend( + input_api.canned_checks.CheckChangeWasUploaded(input_api, output_api)) + results.extend( + input_api.canned_checks.CheckChangeHasDescription( + input_api, output_api)) + results.extend(CheckChangeHasBugField(input_api, output_api)) + results.extend(CheckCommitMessageBugEntry(input_api, output_api)) + results.extend( + input_api.canned_checks.CheckTreeIsOpen( + input_api, + output_api, + json_url='http://webrtc-status.appspot.com/current?format=json')) + return results def CheckOrphanHeaders(input_api, output_api, source_file_filter): - # We need to wait until we have an input_api object and use this - # roundabout construct to import prebubmit_checks_lib because this file is - # eval-ed and thus doesn't have __file__. - error_msg = """{} should be listed in {}.""" - results = [] - exempt_paths = [re.escape(os.path.join('tools_webrtc', 'ios', 'SDK'))] - - with _AddToPath( - input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools_webrtc', - 'presubmit_checks_lib')): - from check_orphan_headers import GetBuildGnPathFromFilePath - from check_orphan_headers import IsHeaderInBuildGn - - file_filter = lambda x: input_api.FilterSourceFile( - x, files_to_skip=exempt_paths) and source_file_filter(x) - for f in input_api.AffectedSourceFiles(file_filter): - if f.LocalPath().endswith('.h'): - file_path = os.path.abspath(f.LocalPath()) - root_dir = os.getcwd() - gn_file_path = GetBuildGnPathFromFilePath(file_path, os.path.exists, - root_dir) - in_build_gn = IsHeaderInBuildGn(file_path, gn_file_path) - if not in_build_gn: - results.append( - output_api.PresubmitError( - error_msg.format(f.LocalPath(), os.path.relpath(gn_file_path)))) - return results + # We need to wait until we have an input_api object and use this + # roundabout construct to import prebubmit_checks_lib because this file is + # eval-ed and thus doesn't have __file__. + error_msg = """{} should be listed in {}.""" + results = [] + exempt_paths = [re.escape(os.path.join('tools_webrtc', 'ios', 'SDK'))] + + with _AddToPath( + input_api.os_path.join(input_api.PresubmitLocalPath(), + 'tools_webrtc', 'presubmit_checks_lib')): + from check_orphan_headers import GetBuildGnPathFromFilePath + from check_orphan_headers import IsHeaderInBuildGn + + file_filter = lambda x: input_api.FilterSourceFile( + x, files_to_skip=exempt_paths) and source_file_filter(x) + for f in input_api.AffectedSourceFiles(file_filter): + if f.LocalPath().endswith('.h'): + file_path = os.path.abspath(f.LocalPath()) + root_dir = os.getcwd() + gn_file_path = GetBuildGnPathFromFilePath(file_path, + os.path.exists, root_dir) + in_build_gn = IsHeaderInBuildGn(file_path, gn_file_path) + if not in_build_gn: + results.append( + output_api.PresubmitError( + error_msg.format(f.LocalPath(), + os.path.relpath(gn_file_path)))) + return results def CheckNewlineAtTheEndOfProtoFiles(input_api, output_api, source_file_filter): - """Checks that all .proto files are terminated with a newline.""" - error_msg = 'File {} must end with exactly one newline.' - results = [] - file_filter = lambda x: input_api.FilterSourceFile( - x, files_to_check=(r'.+\.proto$', )) and source_file_filter(x) - for f in input_api.AffectedSourceFiles(file_filter): - file_path = f.LocalPath() - with open(file_path) as f: - lines = f.readlines() - if len(lines) > 0 and not lines[-1].endswith('\n'): - results.append(output_api.PresubmitError(error_msg.format(file_path))) - return results - + """Checks that all .proto files are terminated with a newline.""" + error_msg = 'File {} must end with exactly one newline.' + results = [] + file_filter = lambda x: input_api.FilterSourceFile( + x, files_to_check=(r'.+\.proto$', )) and source_file_filter(x) + for f in input_api.AffectedSourceFiles(file_filter): + file_path = f.LocalPath() + with open(file_path) as f: + lines = f.readlines() + if len(lines) > 0 and not lines[-1].endswith('\n'): + results.append( + output_api.PresubmitError(error_msg.format(file_path))) + return results + + +def CheckLFNewline(input_api, output_api, source_file_filter): + """Checks that all files have LF newlines.""" + error_msg = 'File {} must use LF newlines.' + results = [] + file_filter = lambda x: input_api.FilterSourceFile( + x, files_to_check=(r'.+', )) and source_file_filter(x) + for f in input_api.AffectedSourceFiles(file_filter): + file_path = f.LocalPath() + with open(file_path, 'rb') as f: + if b'\r\n' in f.read(): + results.append( + output_api.PresubmitError(error_msg.format(file_path))) + return results def _ExtractAddRulesFromParsedDeps(parsed_deps): - """Extract the rules that add dependencies from a parsed DEPS file. + """Extract the rules that add dependencies from a parsed DEPS file. Args: parsed_deps: the locals dictionary from evaluating the DEPS file.""" - add_rules = set() - add_rules.update([ - rule[1:] for rule in parsed_deps.get('include_rules', []) - if rule.startswith('+') or rule.startswith('!') - ]) - for _, rules in parsed_deps.get('specific_include_rules', {}).items(): + add_rules = set() add_rules.update([ - rule[1:] for rule in rules + rule[1:] for rule in parsed_deps.get('include_rules', []) if rule.startswith('+') or rule.startswith('!') ]) - return add_rules + for _, rules in parsed_deps.get('specific_include_rules', {}).items(): + add_rules.update([ + rule[1:] for rule in rules + if rule.startswith('+') or rule.startswith('!') + ]) + return add_rules def _ParseDeps(contents): - """Simple helper for parsing DEPS files.""" + """Simple helper for parsing DEPS files.""" - # Stubs for handling special syntax in the root DEPS file. - class VarImpl: - def __init__(self, local_scope): - self._local_scope = local_scope + # Stubs for handling special syntax in the root DEPS file. + class VarImpl: + def __init__(self, local_scope): + self._local_scope = local_scope - def Lookup(self, var_name): - """Implements the Var syntax.""" - try: - return self._local_scope['vars'][var_name] - except KeyError as var_not_defined: - raise Exception('Var is not defined: %s' % - var_name) from var_not_defined + def Lookup(self, var_name): + """Implements the Var syntax.""" + try: + return self._local_scope['vars'][var_name] + except KeyError as var_not_defined: + raise Exception('Var is not defined: %s' % + var_name) from var_not_defined - local_scope = {} - global_scope = { - 'Var': VarImpl(local_scope).Lookup, - } - exec(contents, global_scope, local_scope) - return local_scope + local_scope = {} + global_scope = { + 'Var': VarImpl(local_scope).Lookup, + } + exec(contents, global_scope, local_scope) + return local_scope def _CalculateAddedDeps(os_path, old_contents, new_contents): - """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns + """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns a set of DEPS entries that we should look up. For a directory (rather than a specific filename) we fake a path to a specific filename by adding /DEPS. This is chosen as a file that will seldom or never be subject to per-file include_rules. """ - # We ignore deps entries on auto-generated directories. - auto_generated_dirs = ['grit', 'jni'] + # We ignore deps entries on auto-generated directories. + auto_generated_dirs = ['grit', 'jni'] - old_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(old_contents)) - new_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(new_contents)) + old_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(old_contents)) + new_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(new_contents)) - added_deps = new_deps.difference(old_deps) + added_deps = new_deps.difference(old_deps) - results = set() - for added_dep in added_deps: - if added_dep.split('/')[0] in auto_generated_dirs: - continue - # Assume that a rule that ends in .h is a rule for a specific file. - if added_dep.endswith('.h'): - results.add(added_dep) - else: - results.add(os_path.join(added_dep, 'DEPS')) - return results + results = set() + for added_dep in added_deps: + if added_dep.split('/')[0] in auto_generated_dirs: + continue + # Assume that a rule that ends in .h is a rule for a specific file. + if added_dep.endswith('.h'): + results.add(added_dep) + else: + results.add(os_path.join(added_dep, 'DEPS')) + return results def CheckAddedDepsHaveTargetApprovals(input_api, output_api): - """When a dependency prefixed with + is added to a DEPS file, we + """When a dependency prefixed with + is added to a DEPS file, we want to make sure that the change is reviewed by an OWNER of the target file or directory, to avoid layering violations from being introduced. This check verifies that this happens. """ - virtual_depended_on_files = set() - - file_filter = lambda f: not input_api.re.match( - r"^third_party[\\\/](WebKit|blink)[\\\/].*", f.LocalPath()) - for f in input_api.AffectedFiles(include_deletes=False, - file_filter=file_filter): - filename = input_api.os_path.basename(f.LocalPath()) - if filename == 'DEPS': - virtual_depended_on_files.update( - _CalculateAddedDeps(input_api.os_path, '\n'.join(f.OldContents()), - '\n'.join(f.NewContents()))) - - if not virtual_depended_on_files: - return [] + virtual_depended_on_files = set() + + file_filter = lambda f: not input_api.re.match( + r"^third_party[\\\/](WebKit|blink)[\\\/].*", f.LocalPath()) + for f in input_api.AffectedFiles(include_deletes=False, + file_filter=file_filter): + filename = input_api.os_path.basename(f.LocalPath()) + if filename == 'DEPS': + virtual_depended_on_files.update( + _CalculateAddedDeps(input_api.os_path, + '\n'.join(f.OldContents()), + '\n'.join(f.NewContents()))) + + if not virtual_depended_on_files: + return [] - if input_api.is_committing: - if input_api.tbr: - return [ - output_api.PresubmitNotifyResult( - '--tbr was specified, skipping OWNERS check for DEPS ' - 'additions') - ] - if input_api.dry_run: - return [ - output_api.PresubmitNotifyResult( - 'This is a dry run, skipping OWNERS check for DEPS ' - 'additions') - ] - if not input_api.change.issue: - return [ - output_api.PresubmitError( - "DEPS approval by OWNERS check failed: this change has " - "no change number, so we can't check it for approvals.") - ] - output = output_api.PresubmitError - else: - output = output_api.PresubmitNotifyResult - - owner_email, reviewers = ( - input_api.canned_checks.GetCodereviewOwnerAndReviewers( - input_api, None, approval_needed=input_api.is_committing)) - - owner_email = owner_email or input_api.change.author_email - - approval_status = input_api.owners_client.GetFilesApprovalStatus( - virtual_depended_on_files, reviewers.union([owner_email]), []) - missing_files = [ - f for f in virtual_depended_on_files - if approval_status[f] != input_api.owners_client.APPROVED - ] - - # We strip the /DEPS part that was added by - # _FilesToCheckForIncomingDeps to fake a path to a file in a - # directory. - def StripDeps(path): - start_deps = path.rfind('/DEPS') - if start_deps != -1: - return path[:start_deps] - return path - - unapproved_dependencies = [ - "'+%s'," % StripDeps(path) for path in missing_files - ] - - if unapproved_dependencies: - output_list = [ - output('You need LGTM from owners of depends-on paths in DEPS that ' - ' were modified in this CL:\n %s' % - '\n '.join(sorted(unapproved_dependencies))) + if input_api.is_committing: + if input_api.tbr: + return [ + output_api.PresubmitNotifyResult( + '--tbr was specified, skipping OWNERS check for DEPS ' + 'additions') + ] + if input_api.dry_run: + return [ + output_api.PresubmitNotifyResult( + 'This is a dry run, skipping OWNERS check for DEPS ' + 'additions') + ] + if not input_api.change.issue: + return [ + output_api.PresubmitError( + "DEPS approval by OWNERS check failed: this change has " + "no change number, so we can't check it for approvals.") + ] + output = output_api.PresubmitError + else: + output = output_api.PresubmitNotifyResult + + owner_email, reviewers = ( + input_api.canned_checks.GetCodereviewOwnerAndReviewers( + input_api, None, approval_needed=input_api.is_committing)) + + owner_email = owner_email or input_api.change.author_email + + approval_status = input_api.owners_client.GetFilesApprovalStatus( + virtual_depended_on_files, reviewers.union([owner_email]), []) + missing_files = [ + f for f in virtual_depended_on_files + if approval_status[f] != input_api.owners_client.APPROVED + ] + + # We strip the /DEPS part that was added by + # _FilesToCheckForIncomingDeps to fake a path to a file in a + # directory. + def StripDeps(path): + start_deps = path.rfind('/DEPS') + if start_deps != -1: + return path[:start_deps] + return path + + unapproved_dependencies = [ + "'+%s'," % StripDeps(path) for path in missing_files ] - suggested_owners = input_api.owners_client.SuggestOwners( - missing_files, exclude=[owner_email]) - output_list.append( - output('Suggested missing target path OWNERS:\n %s' % - '\n '.join(suggested_owners or []))) - return output_list - - return [] + + if unapproved_dependencies: + output_list = [ + output( + 'You need LGTM from owners of depends-on paths in DEPS that ' + ' were modified in this CL:\n %s' % + '\n '.join(sorted(unapproved_dependencies))) + ] + suggested_owners = input_api.owners_client.SuggestOwners( + missing_files, exclude=[owner_email]) + output_list.append( + output('Suggested missing target path OWNERS:\n %s' % + '\n '.join(suggested_owners or []))) + return output_list + + return [] diff --git a/README.chromium b/README.chromium index 333e79e707..ea1874d82a 100644 --- a/README.chromium +++ b/README.chromium @@ -5,6 +5,7 @@ CPEPrefix: cpe:/a:webrtc_project:webrtc:90 License: BSD License File: LICENSE Shipped: yes +Security Critical: yes Description: WebRTC provides real time voice and video processing diff --git a/api/BUILD.gn b/api/BUILD.gn index f2d10ec296..7a3591881f 100644 --- a/api/BUILD.gn +++ b/api/BUILD.gn @@ -26,40 +26,76 @@ rtc_source_set("call_api") { sources = [ "call/audio_sink.h" ] } -rtc_source_set("callfactory_api") { +rtc_source_set("enable_media") { visibility = [ "*" ] - sources = [ "call/call_factory_interface.h" ] + sources = [ + "enable_media.cc", + "enable_media.h", + ] deps = [ - "../call:rtp_interfaces", + ":libjingle_peerconnection_api", + ":scoped_refptr", + "../call", + "../call:call_interfaces", + "../media:media_engine", + "../media:rtc_audio_video", + "../pc:media_factory", + "../rtc_base/system:rtc_export", + "environment", + "//third_party/abseil-cpp/absl/base:nullability", + ] +} + +rtc_source_set("enable_media_with_defaults") { + visibility = [ "*" ] + allow_poison = [ + "audio_codecs", + "environment_construction", + "software_video_codecs", + ] + sources = [ + "enable_media_with_defaults.cc", + "enable_media_with_defaults.h", + ] + deps = [ + ":enable_media", + ":libjingle_peerconnection_api", + ":scoped_refptr", "../rtc_base/system:rtc_export", + "audio:builtin_audio_processing_builder", + "audio_codecs:builtin_audio_decoder_factory", + "audio_codecs:builtin_audio_encoder_factory", + "task_queue:default_task_queue_factory", + "video_codecs:builtin_video_decoder_factory", + "video_codecs:builtin_video_encoder_factory", ] } if (!build_with_chromium) { rtc_library("create_peerconnection_factory") { visibility = [ "*" ] - allow_poison = [ "default_task_queue" ] + allow_poison = [ "environment_construction" ] sources = [ "create_peerconnection_factory.cc", "create_peerconnection_factory.h", ] deps = [ - ":callfactory_api", + ":enable_media", + ":field_trials_view", ":libjingle_peerconnection_api", ":scoped_refptr", "../api/rtc_event_log:rtc_event_log_factory", - "../media:rtc_audio_video", - "../media:rtc_media_base", - "../modules/audio_device:audio_device_api", - "../modules/audio_processing:api", "../pc:peer_connection_factory", "../pc:webrtc_sdp", + "../rtc_base:socket_server", "../rtc_base:threading", + "../rtc_base/system:rtc_export", "../stats:rtc_stats", + "audio:audio_device", "audio:audio_mixer_api", + "audio:audio_processing", + "audio:builtin_audio_processing_builder", "audio_codecs:audio_codecs_api", - "task_queue:default_task_queue_factory", - "transport:field_trial_based_config", "video_codecs:video_codecs_api", ] } @@ -84,10 +120,11 @@ rtc_library("rtp_headers") { ] deps = [ ":array_view", + "../rtc_base:checks", + "../rtc_base/system:rtc_export", "units:timestamp", "video:video_rtp_headers", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("rtp_packet_info") { @@ -107,13 +144,11 @@ rtc_library("rtp_packet_info") { "units:time_delta", "units:timestamp", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("video_track_source_constraints") { visibility = [ "*" ] sources = [ "video_track_source_constraints.h" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("media_stream_interface") { @@ -127,21 +162,20 @@ rtc_library("media_stream_interface") { deps = [ ":audio_options_api", ":make_ref_counted", + ":ref_count", + ":ref_count", ":rtp_parameters", ":scoped_refptr", ":sequence_checker", ":video_track_source_constraints", - "../modules/audio_processing:audio_processing_statistics", "../rtc_base:checks", - "../rtc_base:refcount", + "../rtc_base:macromagic", "../rtc_base/system:no_unique_address", "../rtc_base/system:rtc_export", + "audio:audio_processing_statistics", "video:recordable_encoded_frame", "video:video_frame", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -153,22 +187,29 @@ rtc_library("candidate") { "candidate.h", ] deps = [ + "../p2p:p2p_constants", "../rtc_base:checks", + "../rtc_base:crc32", + "../rtc_base:crypto_random", "../rtc_base:ip_address", "../rtc_base:logging", "../rtc_base:network_constants", "../rtc_base:socket_address", - "../rtc_base:ssl", "../rtc_base:stringutils", "../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_source_set("turn_customizer") { visibility = [ "*" ] sources = [ "turn_customizer.h" ] - deps = [ "transport:stun_types" ] + deps = [ + "../p2p:port_interface", + "transport:stun_types", + ] } rtc_source_set("ice_transport_interface") { @@ -178,9 +219,9 @@ rtc_source_set("ice_transport_interface") { deps = [ ":async_dns_resolver", ":packet_socket_factory", + ":ref_count", ":rtc_error", ":scoped_refptr", - "../rtc_base:refcount", "rtc_event_log:rtc_event_log", ] } @@ -194,13 +235,13 @@ rtc_library("dtls_transport_interface") { ] deps = [ ":ice_transport_interface", + ":ref_count", ":rtc_error", ":scoped_refptr", - "../rtc_base:refcount", "../rtc_base:ssl", "../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/base:core_headers", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("dtmf_sender_interface") { @@ -209,7 +250,7 @@ rtc_library("dtmf_sender_interface") { sources = [ "dtmf_sender_interface.h" ] deps = [ ":media_stream_interface", - "../rtc_base:refcount", + ":ref_count", ] } @@ -225,23 +266,22 @@ rtc_library("rtp_sender_interface") { ":dtmf_sender_interface", ":frame_transformer_interface", ":media_stream_interface", + ":ref_count", ":rtc_error", ":rtp_parameters", ":scoped_refptr", "../rtc_base:checks", - "../rtc_base:refcount", "../rtc_base/system:rtc_export", "crypto:frame_encryptor_interface", "video_codecs:video_codecs_api", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable" ] } rtc_library("libjingle_peerconnection_api") { visibility = [ "*" ] cflags = [] sources = [ - "crypto_params.h", "data_channel_interface.cc", "data_channel_interface.h", "jsep.cc", @@ -277,8 +317,8 @@ rtc_library("libjingle_peerconnection_api") { ":array_view", ":async_dns_resolver", ":audio_options_api", - ":callfactory_api", ":candidate", + ":data_channel_event_observer_interface", ":dtls_transport_interface", ":fec_controller_api", ":field_trials", @@ -291,6 +331,7 @@ rtc_library("libjingle_peerconnection_api") { ":network_state_predictor_api", ":packet_socket_factory", ":priority", + ":ref_count", ":rtc_error", ":rtc_stats_api", ":rtp_packet_info", @@ -301,67 +342,82 @@ rtc_library("libjingle_peerconnection_api") { ":sequence_checker", ":turn_customizer", "../call:rtp_interfaces", - "../p2p:rtc_p2p", + "../media:media_engine", + "../p2p:connection", + "../p2p:port", + "../p2p:port_allocator", + "../pc:media_factory", "../rtc_base:copy_on_write_buffer", "../rtc_base:logging", + "../rtc_base:macromagic", "../rtc_base:network", "../rtc_base:network_constants", - "../rtc_base:refcount", "../rtc_base:rtc_certificate_generator", + "../rtc_base:socket_factory", "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:stringutils", "adaptation:resource_adaptation_api", + "audio:audio_device", + "audio:audio_frame_processor", "audio:audio_mixer_api", + "audio:audio_processing", "audio_codecs:audio_codecs_api", "crypto:frame_decryptor_interface", "crypto:frame_encryptor_interface", "crypto:options", "metronome", "neteq:neteq_api", - "rtc_event_log", + "rtc_event_log:rtc_event_log_factory_interface", "task_queue", + "transport:bandwidth_estimation_settings", "transport:bitrate_settings", "transport:enums", "transport:network_control", "transport:sctp_transport_factory_interface", "transport/rtp:rtp_source", "units:data_rate", + "units:time_delta", "units:timestamp", "video:encoded_image", "video:video_bitrate_allocator_factory", "video:video_frame", "video:video_rtp_headers", "video_codecs:video_codecs_api", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", # Basically, don't add stuff here. You might break sensitive downstream # targets like pnacl. API should not depend on anything outside of this # file, really. All these should arguably go away in time. - "../media:rtc_media_base", "../media:rtc_media_config", - "../modules/audio_processing:audio_processing_statistics", "../rtc_base:checks", "../rtc_base:ip_address", "../rtc_base:socket_address", "../rtc_base:threading", "../rtc_base/system:rtc_export", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_source_set("frame_transformer_interface") { visibility = [ "*" ] - sources = [ "frame_transformer_interface.h" ] + sources = [ + "frame_transformer_interface.cc", + "frame_transformer_interface.h", + ] deps = [ + ":array_view", ":make_ref_counted", + ":ref_count", ":scoped_refptr", "../rtc_base:refcount", + "../rtc_base/system:rtc_export", + "units:time_delta", + "units:timestamp", "video:encoded_frame", "video:video_frame_metadata", ] @@ -378,26 +434,31 @@ rtc_library("rtc_error") { "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/meta:type_traits", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:str_format", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ +} + +rtc_source_set("rtc_error_matchers") { + testonly = true + sources = [ "test/rtc_error_matchers.h" ] + deps = [ + ":rtc_error", + "../test:test_support", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } rtc_source_set("packet_socket_factory") { visibility = [ "*" ] - sources = [ - "async_resolver_factory.h", - "packet_socket_factory.h", - ] + sources = [ "packet_socket_factory.h" ] deps = [ ":async_dns_resolver", - ":wrapping_async_dns_resolver", "../rtc_base:async_packet_socket", - "../rtc_base:async_resolver_interface", - "../rtc_base:proxy_info", "../rtc_base:socket_address", + "../rtc_base:ssl", "../rtc_base/system:rtc_export", ] } @@ -409,41 +470,30 @@ rtc_source_set("async_dns_resolver") { "../rtc_base:checks", "../rtc_base:socket_address", "../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable" ] } -rtc_source_set("wrapping_async_dns_resolver") { - visibility = [ - ":*", - "../p2p:rtc_p2p", - ] - sources = [ - "wrapping_async_dns_resolver.cc", - "wrapping_async_dns_resolver.h", - ] - deps = [ - ":async_dns_resolver", - ":sequence_checker", - "../rtc_base:async_resolver_interface", - "../rtc_base:checks", - "../rtc_base:macromagic", - "../rtc_base:socket_address", - "../rtc_base:threading", - "../rtc_base/third_party/sigslot", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] +rtc_source_set("ref_count") { + visibility = [ "*" ] + sources = [ "ref_count.h" ] } rtc_source_set("scoped_refptr") { visibility = [ "*" ] sources = [ "scoped_refptr.h" ] + deps = [ "//third_party/abseil-cpp/absl/base:nullability" ] } rtc_source_set("make_ref_counted") { visibility = [ "*" ] sources = [ "make_ref_counted.h" ] - deps = [ "../rtc_base:refcount" ] + deps = [ + ":ref_count", + ":scoped_refptr", + "../rtc_base:refcount", + "//third_party/abseil-cpp/absl/base:nullability", + ] } rtc_source_set("video_quality_analyzer_api") { @@ -453,23 +503,22 @@ rtc_source_set("video_quality_analyzer_api") { deps = [ ":array_view", + ":rtc_stats_api", + ":scoped_refptr", ":stats_observer_interface", "../rtc_base:checks", "video:encoded_image", "video:video_frame", "video:video_rtp_headers", "video_codecs:video_codecs_api", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } rtc_source_set("track_id_stream_info_map") { visibility = [ "*" ] sources = [ "test/track_id_stream_info_map.h" ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + deps = [ "//third_party/abseil-cpp/absl/strings:string_view" ] } rtc_source_set("rtp_transceiver_direction") { @@ -480,6 +529,10 @@ rtc_source_set("rtp_transceiver_direction") { rtc_source_set("priority") { visibility = [ "*" ] sources = [ "priority.h" ] + deps = [ + "../rtc_base:checks", + "../rtc_base:strong_alias", + ] } rtc_library("rtp_parameters") { @@ -494,16 +547,16 @@ rtc_library("rtp_parameters") { ":array_view", ":priority", ":rtp_transceiver_direction", + "../media:media_constants", "../rtc_base:checks", "../rtc_base:stringutils", "../rtc_base/system:rtc_export", "video:resolution", "video_codecs:scalability_mode", - ] - absl_deps = [ + "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:str_format", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -524,23 +577,31 @@ rtc_source_set("audio_quality_analyzer_api") { ] } +rtc_library("rtp_packet_sender") { + visibility = [ "*" ] + sources = [ "rtp_packet_sender.h" ] +} + rtc_source_set("stats_observer_interface") { visibility = [ "*" ] testonly = true sources = [ "test/stats_observer_interface.h" ] - deps = [ ":rtc_stats_api" ] - - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + deps = [ + ":rtc_stats_api", + ":scoped_refptr", + "//third_party/abseil-cpp/absl/strings:string_view", + ] } rtc_source_set("peer_network_dependencies") { visibility = [ "*" ] sources = [ "test/peer_network_dependencies.h" ] deps = [ - ":packet_socket_factory", "../rtc_base:network", + "../rtc_base:socket_factory", "../rtc_base:threading", + "//third_party/abseil-cpp/absl/base:nullability", ] } @@ -552,7 +613,6 @@ rtc_source_set("peer_connection_quality_test_fixture_api") { deps = [ ":array_view", ":audio_quality_analyzer_api", - ":callfactory_api", ":fec_controller_api", ":frame_generator_api", ":function_view", @@ -560,15 +620,12 @@ rtc_source_set("peer_connection_quality_test_fixture_api") { ":media_stream_interface", ":network_state_predictor_api", ":packet_socket_factory", - ":peer_network_dependencies", ":rtp_parameters", ":simulated_network_api", ":stats_observer_interface", ":track_id_stream_info_map", ":video_quality_analyzer_api", "../media:media_constants", - "../media:rtc_media_base", - "../modules/audio_processing:api", "../rtc_base:checks", "../rtc_base:network", "../rtc_base:rtc_certificate_generator", @@ -577,6 +634,7 @@ rtc_source_set("peer_connection_quality_test_fixture_api") { "../rtc_base:threading", "../test:fileutils", "audio:audio_mixer_api", + "audio:audio_processing", "rtc_event_log", "task_queue", "test/pclf:media_configuration", @@ -587,12 +645,9 @@ rtc_source_set("peer_connection_quality_test_fixture_api") { "units:time_delta", "video:video_frame", "video_codecs:video_codecs_api", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -605,9 +660,9 @@ rtc_source_set("frame_generator_api") { deps = [ ":scoped_refptr", + "../rtc_base:checks", "video:video_frame", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (rtc_include_tests) { @@ -620,6 +675,7 @@ if (rtc_include_tests) { "test/create_network_emulation_manager.h", ] deps = [ + ":field_trials_view", ":network_emulation_manager_api", "../test/network:emulated_network", ] @@ -709,8 +765,11 @@ rtc_library("create_frame_generator") { "../rtc_base:checks", "../system_wrappers", "../test:frame_generator_impl", + "environment", + "environment:environment_factory", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("create_peer_connection_quality_test_frame_generator") { @@ -724,16 +783,26 @@ rtc_library("create_peer_connection_quality_test_frame_generator") { ":create_frame_generator", ":frame_generator_api", "../rtc_base:checks", + "../system_wrappers", "../test:fileutils", "test/pclf:media_configuration", + "units:time_delta", + ] +} + +rtc_source_set("data_channel_event_observer_interface") { + visibility = [ "*" ] + sources = [ "data_channel_event_observer_interface.h" ] + deps = [ + ":array_view", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("libjingle_logging_api") { visibility = [ "*" ] sources = [ "rtc_event_log_output.h" ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ] + deps = [ "//third_party/abseil-cpp/absl/strings:string_view" ] } rtc_library("rtc_event_log_output_file") { @@ -749,6 +818,7 @@ rtc_library("rtc_event_log_output_file") { "../rtc_base:logging", "../rtc_base/system:file_wrapper", "rtc_event_log", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -756,6 +826,7 @@ rtc_source_set("rtc_stats_api") { visibility = [ "*" ] cflags = [] sources = [ + "stats/attribute.h", "stats/rtc_stats.h", "stats/rtc_stats_collector_callback.h", "stats/rtc_stats_report.h", @@ -764,6 +835,7 @@ rtc_source_set("rtc_stats_api") { deps = [ ":make_ref_counted", + ":ref_count", ":scoped_refptr", "../api:refcountedbase", "../rtc_base:checks", @@ -771,8 +843,6 @@ rtc_source_set("rtc_stats_api") { "../rtc_base/system:rtc_export", "units:timestamp", ] - - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("audio_options_api") { @@ -787,7 +857,6 @@ rtc_library("audio_options_api") { "../rtc_base:stringutils", "../rtc_base/system:rtc_export", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("transport_api") { @@ -819,8 +888,10 @@ rtc_source_set("simulated_network_api") { deps = [ "../rtc_base:macromagic", "../rtc_base:random", + "transport:ecn_marking", + "units:data_rate", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } # TODO(srte): Move to network_emulation sub directory. @@ -832,19 +903,21 @@ rtc_source_set("network_emulation_manager_api") { ] deps = [ ":array_view", + ":field_trials_view", ":packet_socket_factory", ":peer_network_dependencies", ":simulated_network_api", ":time_controller", - "../call:simulated_network", "../rtc_base:checks", + "../rtc_base:ip_address", "../rtc_base:network", "../rtc_base:network_constants", - "../rtc_base:threading", + "../rtc_base:socket_address", + "../test/network:simulated_network", "test/network_emulation", "units:data_rate", - "units:data_size", - "units:timestamp", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -856,12 +929,14 @@ rtc_source_set("time_controller") { ] deps = [ + "../rtc_base:socket_server", "../rtc_base:threading", "../rtc_base/synchronization:yield_policy", "../system_wrappers", "task_queue", "units:time_delta", "units:timestamp", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -874,6 +949,7 @@ rtc_source_set("fec_controller_api") { deps = [ "../modules:module_fec_api", + "environment", "video:video_frame_type", ] } @@ -881,6 +957,7 @@ rtc_source_set("fec_controller_api") { rtc_source_set("network_state_predictor_api") { visibility = [ "*" ] sources = [ "network_state_predictor.h" ] + deps = [ "transport:bandwidth_usage" ] } rtc_source_set("array_view") { @@ -896,6 +973,7 @@ rtc_source_set("refcountedbase") { visibility = [ "*" ] sources = [ "ref_counted_base.h" ] deps = [ + ":ref_count", "../rtc_base:macromagic", "../rtc_base:refcount", ] @@ -913,7 +991,13 @@ rtc_library("ice_transport_factory") { ":make_ref_counted", ":packet_socket_factory", ":scoped_refptr", - "../p2p:rtc_p2p", + ":sequence_checker", + "../p2p:connection", + "../p2p:ice_transport_internal", + "../p2p:p2p_constants", + "../p2p:p2p_transport_channel", + "../p2p:port_allocator", + "../rtc_base:macromagic", "../rtc_base:threading", "../rtc_base/system:rtc_export", "rtc_event_log:rtc_event_log", @@ -926,6 +1010,7 @@ rtc_library("neteq_simulator_api") { "test/neteq_simulator.cc", "test/neteq_simulator.h", ] + deps = [ "neteq:neteq_api" ] } rtc_source_set("function_view") { @@ -941,6 +1026,7 @@ rtc_source_set("sequence_checker") { "../rtc_base:checks", "../rtc_base:macromagic", "../rtc_base/synchronization:sequence_checker_internal", + "task_queue:task_queue", ] } @@ -955,9 +1041,12 @@ if (rtc_include_tests) { ] deps = [ + ":scoped_refptr", "../modules/audio_processing", - "../modules/audio_processing:api", "../modules/audio_processing:audioproc_f_impl", + "audio:audio_processing", + "audio:builtin_audio_processing_builder", + "//third_party/abseil-cpp/absl/base:nullability", ] } @@ -973,12 +1062,9 @@ if (rtc_include_tests) { "../modules/audio_coding:neteq_test_factory", "../rtc_base:checks", "neteq:neteq_api", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } } @@ -1016,25 +1102,6 @@ if (rtc_include_tests) { "../rtc_base:stringutils", "video:video_frame_type", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - - rtc_library("video_codec_stats_api") { - visibility = [ "*" ] - testonly = true - sources = [ - "test/video_codec_stats.cc", - "test/video_codec_stats.h", - ] - deps = [ - "../api/numerics:numerics", - "../api/units:data_rate", - "../api/units:data_size", - "../api/units:frequency", - "test/metrics:metric", - "test/metrics:metrics_logger", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("videocodec_test_fixture_api") { @@ -1043,25 +1110,11 @@ if (rtc_include_tests) { sources = [ "test/videocodec_test_fixture.h" ] deps = [ ":videocodec_test_stats_api", + "../modules/video_coding:codec_globals_headers", "../modules/video_coding:video_codec_interface", - "video_codecs:video_codecs_api", - ] - } - - rtc_library("video_codec_tester_api") { - visibility = [ "*" ] - testonly = true - sources = [ "test/video_codec_tester.h" ] - deps = [ - ":video_codec_stats_api", - "../modules/video_coding/svc:scalability_mode_util", "video:encoded_image", - "video:resolution", "video:video_frame", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/types:optional", + "video_codecs:video_codecs_api", ] } @@ -1080,19 +1133,6 @@ if (rtc_include_tests) { ] } - rtc_library("create_video_codec_tester_api") { - visibility = [ "*" ] - testonly = true - sources = [ - "test/create_video_codec_tester.cc", - "test/create_video_codec_tester.h", - ] - deps = [ - ":video_codec_tester_api", - "../modules/video_coding:video_codec_tester", - ] - } - rtc_source_set("mock_audio_mixer") { visibility = [ "*" ] testonly = true @@ -1100,6 +1140,7 @@ if (rtc_include_tests) { deps = [ "../test:test_support", + "audio:audio_frame_api", "audio:audio_mixer_api", ] } @@ -1113,7 +1154,6 @@ if (rtc_include_tests) { "../api:media_stream_interface", "../test:test_support", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("mock_data_channel") { @@ -1123,7 +1163,12 @@ if (rtc_include_tests) { deps = [ ":libjingle_peerconnection_api", + ":priority", + ":rtc_error", + ":scoped_refptr", + "../rtc_base:refcount", "../test:test_support", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] } @@ -1135,6 +1180,9 @@ if (rtc_include_tests) { deps = [ ":dtmf_sender_interface", ":libjingle_peerconnection_api", + ":make_ref_counted", + ":scoped_refptr", + "../rtc_base:refcount", "../test:test_support", ] } @@ -1154,6 +1202,9 @@ if (rtc_include_tests) { testonly = true sources = [ "test/mock_frame_encryptor.h" ] deps = [ + ":array_view", + ":rtp_parameters", + # For api/crypto/frame_encryptor_interface.h ":libjingle_peerconnection_api", "../test:test_support", @@ -1166,12 +1217,25 @@ if (rtc_include_tests) { testonly = true sources = [ "test/mock_frame_decryptor.h" ] deps = [ + ":array_view", ":libjingle_peerconnection_api", + ":rtp_parameters", "../test:test_support", "crypto:frame_decryptor_interface", ] } + rtc_library("mock_frame_transformer") { + visibility = [ "*" ] + testonly = true + sources = [ "test/mock_frame_transformer.h" ] + deps = [ + ":frame_transformer_interface", + ":scoped_refptr", + "../test:test_support", + ] + } + rtc_library("mock_encoder_selector") { visibility = [ "*" ] testonly = true @@ -1180,6 +1244,8 @@ if (rtc_include_tests) { ":libjingle_peerconnection_api", "../api/video_codecs:video_codecs_api", "../test:test_support", + "units:data_rate", + "video:render_resolution", ] } @@ -1194,6 +1260,7 @@ if (rtc_include_tests) { ":array_view", ":libjingle_peerconnection_api", ":make_ref_counted", + ":ref_count", ":rtp_parameters", "../rtc_base:checks", "../rtc_base:refcount", @@ -1224,7 +1291,10 @@ if (rtc_include_tests) { sources = [ "test/mock_media_stream_interface.h" ] deps = [ + ":audio_options_api", ":media_stream_interface", + ":scoped_refptr", + "../rtc_base:refcount", "../test:test_support", ] } @@ -1235,7 +1305,10 @@ if (rtc_include_tests) { sources = [ "test/mock_packet_socket_factory.h" ] deps = [ + ":async_dns_resolver", ":packet_socket_factory", + "../rtc_base:async_packet_socket", + "../rtc_base:socket_address", "../test:test_support", ] } @@ -1246,10 +1319,25 @@ if (rtc_include_tests) { sources = [ "test/mock_peerconnectioninterface.h" ] deps = [ + ":candidate", + ":data_channel_event_observer_interface", + ":dtls_transport_interface", + ":libjingle_logging_api", ":libjingle_peerconnection_api", + ":make_ref_counted", + ":media_stream_interface", + ":ref_count", + ":rtc_error", + ":rtc_stats_api", + ":rtp_parameters", "../api:scoped_refptr", "../rtc_base:refcount", + "../rtc_base:threading", "../test:test_support", + "adaptation:resource_adaptation_api", + "transport:bandwidth_estimation_settings", + "transport:bitrate_settings", + "transport:network_control", ] } @@ -1259,8 +1347,15 @@ if (rtc_include_tests) { sources = [ "test/mock_peer_connection_factory_interface.h" ] deps = [ + ":audio_options_api", ":libjingle_peerconnection_api", + ":media_stream_interface", + ":rtc_error", + ":rtp_parameters", + ":scoped_refptr", + "../rtc_base:refcount", "../test:test_support", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -1269,18 +1364,34 @@ if (rtc_include_tests) { testonly = true sources = [ "test/mock_session_description_interface.h" ] deps = [ + ":candidate", ":libjingle_peerconnection_api", "../test:test_support", ] } + rtc_source_set("mock_transformable_frame") { + visibility = [ "*" ] + testonly = true + sources = [ "test/mock_transformable_frame.h" ] + deps = [ + ":array_view", + ":frame_transformer_interface", + "../test:test_support", + "units:time_delta", + "units:timestamp", + ] + } + rtc_source_set("mock_async_dns_resolver") { visibility = [ "*" ] testonly = true sources = [ "test/mock_async_dns_resolver.h" ] deps = [ ":async_dns_resolver", + "../rtc_base:socket_address", "../test:test_support", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] } @@ -1294,10 +1405,23 @@ if (rtc_include_tests) { ] deps = [ + ":array_view", + ":dtls_transport_interface", + ":frame_transformer_interface", ":libjingle_peerconnection_api", + ":make_ref_counted", + ":media_stream_interface", + ":rtc_error", + ":rtp_parameters", ":rtp_sender_interface", + ":rtp_transceiver_direction", + ":scoped_refptr", "../api/crypto:frame_decryptor_interface", + "../rtc_base:refcount", "../test:test_support", + "crypto:frame_encryptor_interface", + "transport/rtp:rtp_source", + "video_codecs:video_codecs_api", ] } @@ -1307,8 +1431,11 @@ if (rtc_include_tests) { sources = [ "test/mock_transformable_audio_frame.h" ] deps = [ + ":array_view", ":frame_transformer_interface", + "../api/units:timestamp", "../test:test_support", + "units:time_delta", ] } @@ -1318,8 +1445,12 @@ if (rtc_include_tests) { sources = [ "test/mock_transformable_video_frame.h" ] deps = [ + ":array_view", ":frame_transformer_interface", "../test:test_support", + "units:time_delta", + "units:timestamp", + "video:video_frame_metadata", ] } @@ -1331,6 +1462,7 @@ if (rtc_include_tests) { deps = [ "../api/video:video_bitrate_allocator", "../test:test_support", + "video:video_bitrate_allocation", ] } @@ -1342,6 +1474,9 @@ if (rtc_include_tests) { deps = [ "../api/video:video_bitrate_allocator_factory", "../test:test_support", + "environment", + "video:video_bitrate_allocator", + "video_codecs:video_codecs_api", ] } @@ -1356,6 +1491,7 @@ if (rtc_include_tests) { deps = [ "../api/video_codecs:video_codecs_api", "../test:test_support", + "environment", ] } @@ -1367,6 +1503,8 @@ if (rtc_include_tests) { deps = [ "../api/video_codecs:video_codecs_api", "../test:test_support", + "video:encoded_image", + "video:video_frame", ] } @@ -1376,8 +1514,12 @@ if (rtc_include_tests) { sources = [ "test/mock_video_encoder.h" ] deps = [ + ":fec_controller_api", "../api/video_codecs:video_codecs_api", "../test:test_support", + "video:encoded_image", + "video:video_frame", + "video:video_frame_type", ] } @@ -1387,10 +1529,12 @@ if (rtc_include_tests) { sources = [ "test/mock_video_track.h" ] deps = [ + ":ref_count", "../api:media_stream_interface", "../api:scoped_refptr", "../rtc_base:refcount", "../test:test_support", + "video:video_frame", ] } @@ -1403,12 +1547,20 @@ if (rtc_include_tests) { ] deps = [ - ":callfactory_api", + ":enable_media_with_defaults", + ":libjingle_peerconnection_api", ":time_controller", "../call", "../call:call_interfaces", - "../call:rtp_interfaces", + "../media:media_engine", + "../pc:media_factory", + "../rtc_base:checks", + "../system_wrappers", "../test/time_controller", + "environment", + "environment:environment_factory", + "units:timestamp", + "//third_party/abseil-cpp/absl/base:nullability", ] } @@ -1417,8 +1569,10 @@ if (rtc_include_tests) { sources = [ "array_view_unittest.cc", + "candidate_unittest.cc", "field_trials_unittest.cc", "function_view_unittest.cc", + "jsep_unittest.cc", "rtc_error_unittest.cc", "rtc_event_log_output_file_unittest.cc", "rtp_packet_info_unittest.cc", @@ -1426,12 +1580,12 @@ if (rtc_include_tests) { "rtp_parameters_unittest.cc", "scoped_refptr_unittest.cc", "sequence_checker_unittest.cc", - "test/create_time_controller_unittest.cc", "test/peerconnection_quality_test_fixture_unittest.cc", ] deps = [ ":array_view", + ":candidate", ":create_time_controller", ":field_trials", ":field_trials_view", @@ -1440,25 +1594,35 @@ if (rtc_include_tests) { ":peer_connection_quality_test_fixture_api", ":rtc_error", ":rtc_event_log_output_file", + ":rtp_headers", + ":rtp_headers", ":rtp_packet_info", ":rtp_parameters", ":scoped_refptr", ":sequence_checker", ":time_controller", + "../p2p:p2p_constants", "../rtc_base:buffer", "../rtc_base:checks", "../rtc_base:gunit_helpers", + "../rtc_base:logging", + "../rtc_base:macromagic", "../rtc_base:platform_thread", "../rtc_base:rtc_event", - "../rtc_base:rtc_task_queue", + "../rtc_base:socket_address", + "../rtc_base:ssl", "../rtc_base:task_queue_for_test", "../rtc_base/containers:flat_set", + "../rtc_base/synchronization:sequence_checker_internal", "../rtc_base/task_utils:repeating_task", + "../system_wrappers", "../system_wrappers:field_trial", "../test:field_trial", "../test:fileutils", "../test:rtc_expect_death", "../test:test_support", + "audio_codecs/opus:unittests", + "environment:environment_unittests", "task_queue:task_queue_default_factory_unittests", "test/pclf:media_configuration", "test/video:video_frame_writer", @@ -1468,12 +1632,17 @@ if (rtc_include_tests) { "units:units_unittests", "video:frame_buffer_unittest", "video:rtp_video_frame_assembler_unittests", + "video:video_frame", "video:video_frame_metadata_unittest", - ] - absl_deps = [ + "//testing/gtest", + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] + + if (rtc_use_h265) { + deps += [ "video:rtp_video_frame_h265_assembler_unittests" ] + } } rtc_library("compile_all_headers") { @@ -1498,6 +1667,7 @@ if (rtc_include_tests) { ":mock_rtp", ":mock_session_description_interface", ":mock_transformable_audio_frame", + ":mock_transformable_frame", ":mock_transformable_video_frame", ":mock_video_bitrate_allocator", ":mock_video_bitrate_allocator_factory", @@ -1524,18 +1694,19 @@ rtc_source_set("field_trials_registry") { "../rtc_base:logging", "../rtc_base/containers:flat_set", "../rtc_base/system:rtc_export", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } rtc_source_set("field_trials_view") { visibility = [ "*" ] sources = [ "field_trials_view.h" ] - deps = [ "../rtc_base/system:rtc_export" ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + deps = [ + "../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", + ] } rtc_source_set("webrtc_key_value_config") { @@ -1555,8 +1726,8 @@ rtc_library("field_trials") { "../rtc_base:checks", "../rtc_base/containers:flat_map", "../system_wrappers:field_trial", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("frame_transformer_factory") { @@ -1567,10 +1738,12 @@ rtc_library("frame_transformer_factory") { ] deps = [ ":frame_transformer_interface", + ":ref_count", ":scoped_refptr", "../audio:audio", "../modules/rtp_rtcp", - "../rtc_base:refcount", + "../rtc_base:checks", + "../rtc_base/system:rtc_export", "video:encoded_frame", "video:video_frame_metadata", ] diff --git a/api/DEPS b/api/DEPS index bcfd705741..0efc076c3e 100644 --- a/api/DEPS +++ b/api/DEPS @@ -49,6 +49,7 @@ specific_include_rules = { ], ".*\.h": [ + "+rtc_base/arraysize.h", "+rtc_base/checks.h", "+rtc_base/system/rtc_export.h", "+rtc_base/system/rtc_export_template.h", @@ -74,14 +75,25 @@ specific_include_rules = { "+rtc_base/socket_address.h", ], + "audio_device_defines\.h": [ + "+rtc_base/strings/string_builder.h", + ], + + "audio_format\.h": [ + "+rtc_base/strings/string_builder.h", + ], + "candidate\.h": [ "+rtc_base/network_constants.h", "+rtc_base/socket_address.h", ], + "create_peerconnection_factory\.h": [ + "+rtc_base/thread.h", + ], + "data_channel_interface\.h": [ "+rtc_base/copy_on_write_buffer.h", - "+rtc_base/ref_count.h", ], "data_channel_transport_interface\.h": [ @@ -89,38 +101,21 @@ specific_include_rules = { ], "dtls_transport_interface\.h": [ - "+rtc_base/ref_count.h", "+rtc_base/ssl_certificate.h", ], - "dtmf_sender_interface\.h": [ - "+rtc_base/ref_count.h", - ], - "fec_controller\.h": [ "+modules/include/module_fec_types.h", ], - "frame_transformer_interface\.h": [ - "+rtc_base/ref_count.h", - ], - - "ice_transport_interface\.h": [ - "+rtc_base/ref_count.h", - ], - - "jsep\.h": [ - "+rtc_base/ref_count.h", - ], - - "media_stream_interface\.h": [ - "+modules/audio_processing/include/audio_processing_statistics.h", - "+rtc_base/ref_count.h", - ], - "packet_socket_factory\.h": [ - "+rtc_base/proxy_info.h", "+rtc_base/async_packet_socket.h", + "+rtc_base/socket_address.h", + "+rtc_base/ssl_certificate.h", + ], + + "turn_customizer\.h": [ + "+p2p/base/port_interface.h", ], "peer_connection_interface\.h": [ @@ -132,10 +127,10 @@ specific_include_rules = { "+rtc_base/network.h", "+rtc_base/network_constants.h", "+rtc_base/network_monitor_factory.h", - "+rtc_base/ref_count.h", "+rtc_base/rtc_certificate.h", "+rtc_base/rtc_certificate_generator.h", "+rtc_base/socket_address.h", + "+rtc_base/socket_factory.h", "+rtc_base/ssl_certificate.h", "+rtc_base/ssl_stream_adapter.h", "+rtc_base/thread.h", @@ -148,100 +143,32 @@ specific_include_rules = { ], "ref_counted_base\.h": [ - "+rtc_base/ref_count.h", "+rtc_base/ref_counter.h", ], "rtc_error\.h": [ "+rtc_base/logging.h", + "+absl/strings/has_absl_stringify.h", + "+absl/strings/str_format.h", ], "rtc_event_log_output_file.h": [ # For private member and constructor. "+rtc_base/system/file_wrapper.h", ], - "rtp_receiver_interface\.h": [ - "+rtc_base/ref_count.h", - ], - - "rtp_sender_interface\.h": [ - "+rtc_base/ref_count.h", - ], - - "rtp_transceiver_interface\.h": [ - "+rtc_base/ref_count.h", - ], - - "sctp_transport_interface\.h": [ - "+rtc_base/ref_count.h", - ], - - "set_local_description_observer_interface\.h": [ - "+rtc_base/ref_count.h", - ], - "set_remote_description_observer_interface\.h": [ - "+rtc_base/ref_count.h", - ], "legacy_stats_types\.h": [ - "+rtc_base/ref_count.h", + "+rtc_base/thread_annotations.h", "+rtc_base/thread_checker.h", ], - "uma_metrics\.h": [ - "+rtc_base/ref_count.h", - ], - - "audio_mixer\.h": [ - "+rtc_base/ref_count.h", - ], - "audio_decoder\.h": [ "+rtc_base/buffer.h", ], - "audio_decoder_factory\.h": [ - "+rtc_base/ref_count.h", - ], - "audio_encoder\.h": [ "+rtc_base/buffer.h", ], - "audio_encoder_factory\.h": [ - "+rtc_base/ref_count.h", - ], - - "frame_decryptor_interface\.h": [ - "+rtc_base/ref_count.h", - ], - - "frame_encryptor_interface\.h": [ - "+rtc_base/ref_count.h", - ], - - "rtc_stats_collector_callback\.h": [ - "+rtc_base/ref_count.h", - ], - - "rtc_stats_report\.h": [ - "+rtc_base/ref_count.h", - ], - - "audioproc_float\.h": [ - "+modules/audio_processing/include/audio_processing.h", - ], - - "echo_detector_creator\.h": [ - "+modules/audio_processing/include/audio_processing.h", - ], - - "fake_metronome\.h": [ - "+rtc_base/synchronization/mutex.h", - "+rtc_base/task_queue.h", - "+rtc_base/task_utils/repeating_task.h", - "+rtc_base/thread_annotations.h", - ], - "make_ref_counted\.h": [ "+rtc_base/ref_counted_object.h", ], @@ -260,6 +187,11 @@ specific_include_rules = { "notifier\.h": [ "+rtc_base/system/no_unique_address.h", + "+rtc_base/thread_annotations.h", + ], + + "priority\.h": [ + "+rtc_base/strong_alias.h", ], "simulated_network\.h": [ @@ -279,8 +211,8 @@ specific_include_rules = { "+modules/video_coding/include/video_codec_interface.h" ], - "video_encoder_config\.h": [ - "+rtc_base/ref_count.h", + "rtp_parameters\.h": [ + "+absl/strings/str_format.h", ], "sequence_checker\.h": [ @@ -288,18 +220,23 @@ specific_include_rules = { "+rtc_base/thread_annotations.h", ], - "wrapping_async_dns_resolver\.h": [ - "+rtc_base/async_resolver.h", - "+rtc_base/async_resolver_interface.h", - "+rtc_base/socket_address.h", - "+rtc_base/third_party/sigslot/sigslot.h", - "+rtc_base/thread_annotations.h", - ], - "video_encoder_factory_template.*\.h": [ "+modules/video_coding", ], + "video_encoder_factory_interface\.h": [ + "+rtc_base/numerics", + ], + + "video_encoder_interface\.h": [ + "+rtc_base/numerics", + ], + + "simple_encoder_wrapper\.h": [ + "+common_video", + "+modules", + ], + "video_decoder_factory_template.*\.h": [ "+modules/video_coding", ], @@ -316,6 +253,10 @@ specific_include_rules = { "+rtc_base/containers/flat_set.h", ], + "ice_transport_factory\.h": [ + "+p2p/base/port_allocator.h", + ], + # .cc files in api/ should not be restricted in what they can #include, # so we re-add all the top-level directories here. (That's because .h # files leak their #includes to whoever's #including them, but .cc files diff --git a/api/README.md b/api/README.md index 7153cb57c4..cf6d73a855 100644 --- a/api/README.md +++ b/api/README.md @@ -13,8 +13,9 @@ Mostly, just follow the regular [style guide](/g3doc/style-guide.md), but: mountain of technical debt that we’re trying to shrink. * `.cc` files in `api/`, on the other hand, are free to `#include` headers outside `api/`. +* Avoid structs in api, prefer classes. -That is, the preferred way for `api/` code to access non-`api/` code is to call +The preferred way for `api/` code to access non-`api/` code is to call it from a `.cc` file, so that users of our API headers won’t transitively `#include` non-public headers. @@ -25,3 +26,12 @@ usual [rules](/g3doc/style-guide.md#forward-declarations) still apply, though. `.cc` files in `api/` should preferably be kept reasonably small. If a substantial implementation is needed, consider putting it with our non-public code, and just call it from the `api/` `.cc` file. + +Avoid defining api with structs as it makes harder for the api to evolve. +Your struct may gain invariant, or change how it represents data. +Evolving struct from the api is particular challenging as it is designed to be +used in other code bases and thus needs to be updated independetly from its usage. +Class with accessors and setters makes such migration safer. +See [Google C++ style guide](https://google.github.io/styleguide/cppguide.html#Structs_vs._Classes) for more. + +If you need to evolve existent struct in api, prefer first to convert it into a class. diff --git a/api/adaptation/BUILD.gn b/api/adaptation/BUILD.gn index 839ad2c24b..fe66065a36 100644 --- a/api/adaptation/BUILD.gn +++ b/api/adaptation/BUILD.gn @@ -15,6 +15,7 @@ rtc_source_set("resource_adaptation_api") { "resource.h", ] deps = [ + "..:ref_count", "../../api:scoped_refptr", "../../rtc_base:checks", "../../rtc_base:refcount", diff --git a/api/adaptation/DEPS b/api/adaptation/DEPS deleted file mode 100644 index 734e152497..0000000000 --- a/api/adaptation/DEPS +++ /dev/null @@ -1,7 +0,0 @@ -specific_include_rules = { - "resource\.h": [ - # ref_count.h is a public_deps of rtc_base:refcount. Necessary because of - # rtc::RefCountInterface. - "+rtc_base/ref_count.h", - ], -} \ No newline at end of file diff --git a/api/adaptation/resource.h b/api/adaptation/resource.h index 7d7c70b3eb..bd71b7424c 100644 --- a/api/adaptation/resource.h +++ b/api/adaptation/resource.h @@ -13,8 +13,8 @@ #include +#include "api/ref_count.h" #include "api/scoped_refptr.h" -#include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -35,9 +35,8 @@ class RTC_EXPORT ResourceListener { public: virtual ~ResourceListener(); - virtual void OnResourceUsageStateMeasured( - rtc::scoped_refptr resource, - ResourceUsageState usage_state) = 0; + virtual void OnResourceUsageStateMeasured(scoped_refptr resource, + ResourceUsageState usage_state) = 0; }; // A Resource monitors an implementation-specific resource. It may report @@ -50,7 +49,7 @@ class RTC_EXPORT ResourceListener { // The Resource is reference counted to prevent use-after-free when posting // between task queues. As such, the implementation MUST NOT make any // assumptions about which task queue Resource is destructed on. -class RTC_EXPORT Resource : public rtc::RefCountInterface { +class RTC_EXPORT Resource : public RefCountInterface { public: Resource(); // Destruction may happen on any task queue. diff --git a/api/array_view.h b/api/array_view.h index 7e01959b01..db85b47991 100644 --- a/api/array_view.h +++ b/api/array_view.h @@ -13,15 +13,16 @@ #include #include +#include #include #include #include "rtc_base/checks.h" #include "rtc_base/type_traits.h" -namespace rtc { +namespace webrtc { -// tl;dr: rtc::ArrayView is the same thing as gsl::span from the Guideline +// tl;dr: webrtc::ArrayView is the same thing as gsl::span from the Guideline // Support Library. // // Many functions read from or write to arrays. The obvious way to do this is @@ -36,8 +37,8 @@ namespace rtc { // } // // This is flexible, since it doesn't matter how the array is stored (C array, -// std::vector, rtc::Buffer, ...), but it's error-prone because the caller has -// to correctly specify the array length: +// std::vector, webrtc::Buffer, ...), but it's error-prone because the caller +// has to correctly specify the array length: // // Contains17(arr, arraysize(arr)); // C array // Contains17(arr.data(), arr.size()); // std::vector @@ -47,11 +48,11 @@ namespace rtc { // It's also kind of messy to have two separate arguments for what is // conceptually a single thing. // -// Enter rtc::ArrayView. It contains a T pointer (to an array it doesn't +// Enter webrtc::ArrayView. It contains a T pointer (to an array it doesn't // own) and a count, and supports the basic things you'd expect, such as // indexing and iteration. It allows us to write our function like this: // -// bool Contains17(rtc::ArrayView arr) { +// bool Contains17(webrtc::ArrayView arr) { // for (auto e : arr) { // if (e == 17) // return true; @@ -64,7 +65,7 @@ namespace rtc { // // Contains17(arr); // C array // Contains17(arr); // std::vector -// Contains17(rtc::ArrayView(arr, size)); // pointer + size +// Contains17(webrtc::ArrayView(arr, size)); // pointer + size // Contains17(nullptr); // nullptr -> empty ArrayView // ... // @@ -96,7 +97,7 @@ class ArrayViewBase { static_assert(Size > 0, "ArrayView size must be variable or non-negative"); public: - ArrayViewBase(T* data, size_t size) : data_(data) {} + ArrayViewBase(T* data, size_t /* size */) : data_(data) {} static constexpr size_t size() { return Size; } static constexpr bool empty() { return false; } @@ -113,7 +114,7 @@ class ArrayViewBase { template class ArrayViewBase { public: - explicit ArrayViewBase(T* data, size_t size) {} + explicit ArrayViewBase(T* /* data */, size_t /* size */) {} static constexpr size_t size() { return 0; } static constexpr bool empty() { return true; } @@ -238,8 +239,8 @@ class ArrayView final : public array_view_internal::ArrayViewBase { // ArrayView to ArrayView or ArrayView, // std::vector to ArrayView or ArrayView, // const std::vector to ArrayView, - // rtc::Buffer to ArrayView or ArrayView, and - // const rtc::Buffer to ArrayView. + // webrtc::Buffer to ArrayView or ArrayView, and + // const webrtc::Buffer to ArrayView. template < typename U, typename std::enable_if reinterpret_array_view(ArrayView view) { return ArrayView(reinterpret_cast(view.data()), view.size()); } +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +template +using ArrayView = ::webrtc::ArrayView; +using ::webrtc::MakeArrayView; +using ::webrtc::reinterpret_array_view; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // API_ARRAY_VIEW_H_ diff --git a/api/array_view_unittest.cc b/api/array_view_unittest.cc index 97267df006..445f4189b1 100644 --- a/api/array_view_unittest.cc +++ b/api/array_view_unittest.cc @@ -10,18 +10,20 @@ #include "api/array_view.h" -#include +#include + #include +#include #include #include #include #include "rtc_base/buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/gunit.h" #include "test/gmock.h" +#include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { @@ -34,7 +36,7 @@ size_t Call(ArrayView av) { } template -void CallFixed(ArrayView av) {} +void CallFixed(ArrayView /* av */) {} } // namespace @@ -298,11 +300,11 @@ TEST(ArrayViewTest, TestStdArray) { constexpr size_t size = 5; std::array arr{}; // Fixed size view. - rtc::ArrayView arr_view_fixed(arr); + ArrayView arr_view_fixed(arr); EXPECT_EQ(arr.data(), arr_view_fixed.data()); static_assert(size == arr_view_fixed.size(), ""); // Variable size view. - rtc::ArrayView arr_view(arr); + ArrayView arr_view(arr); EXPECT_EQ(arr.data(), arr_view.data()); EXPECT_EQ(size, arr_view.size()); } @@ -311,17 +313,17 @@ TEST(ArrayViewTest, TestConstStdArray) { constexpr size_t size = 5; constexpr std::array constexpr_arr{}; - rtc::ArrayView constexpr_arr_view(constexpr_arr); + ArrayView constexpr_arr_view(constexpr_arr); EXPECT_EQ(constexpr_arr.data(), constexpr_arr_view.data()); static_assert(constexpr_arr.size() == constexpr_arr_view.size(), ""); const std::array const_arr{}; - rtc::ArrayView const_arr_view(const_arr); + ArrayView const_arr_view(const_arr); EXPECT_EQ(const_arr.data(), const_arr_view.data()); static_assert(const_arr.size() == const_arr_view.size(), ""); std::array non_const_arr{}; - rtc::ArrayView non_const_arr_view(non_const_arr); + ArrayView non_const_arr_view(non_const_arr); EXPECT_EQ(non_const_arr.data(), non_const_arr_view.data()); static_assert(non_const_arr.size() == non_const_arr_view.size(), ""); } @@ -352,7 +354,7 @@ TEST(ArrayViewTest, TestStdVector) { } TEST(ArrayViewTest, TestRtcBuffer) { - rtc::Buffer b = "so buffer"; + Buffer b = "so buffer"; EXPECT_EQ(10u, Call(b)); EXPECT_EQ(10u, Call(b)); // Call(b); // Compile error, because incompatible types. @@ -364,7 +366,7 @@ TEST(ArrayViewTest, TestRtcBuffer) { EXPECT_EQ(10u, y.size()); EXPECT_EQ(b.data(), y.data()); // ArrayView d = b; // Compile error, because incompatible types. - const rtc::Buffer cb = "very const"; + const Buffer cb = "very const"; EXPECT_EQ(11u, Call(cb)); // Call(cb); // Compile error, because can't drop const. ArrayView z = cb; @@ -627,4 +629,4 @@ TEST(ArrayViewTest, TestReinterpretCastVariableSize) { EXPECT_EQ(uint8_av[1], 2); EXPECT_EQ(uint8_av[2], 3); } -} // namespace rtc +} // namespace webrtc diff --git a/api/async_dns_resolver.h b/api/async_dns_resolver.h index db02cd185c..11fdf67d3b 100644 --- a/api/async_dns_resolver.h +++ b/api/async_dns_resolver.h @@ -11,11 +11,9 @@ #ifndef API_ASYNC_DNS_RESOLVER_H_ #define API_ASYNC_DNS_RESOLVER_H_ -#include #include #include "absl/functional/any_invocable.h" -#include "rtc_base/checks.h" #include "rtc_base/socket_address.h" #include "rtc_base/system/rtc_export.h" @@ -47,8 +45,7 @@ class AsyncDnsResolverResult { // If the address was successfully resolved, sets `addr` to a copy of the // address from `Start` with the IP address set to the top most resolved // address of `family` (`addr` will have both hostname and the resolved ip). - virtual bool GetResolvedAddress(int family, - rtc::SocketAddress* addr) const = 0; + virtual bool GetResolvedAddress(int family, SocketAddress* addr) const = 0; // Returns error from resolver. virtual int GetError() const = 0; }; @@ -63,10 +60,10 @@ class RTC_EXPORT AsyncDnsResolverInterface { virtual ~AsyncDnsResolverInterface() = default; // Start address resolution of the hostname in `addr`. - virtual void Start(const rtc::SocketAddress& addr, + virtual void Start(const SocketAddress& addr, absl::AnyInvocable callback) = 0; // Start address resolution of the hostname in `addr` matching `family`. - virtual void Start(const rtc::SocketAddress& addr, + virtual void Start(const SocketAddress& addr, int family, absl::AnyInvocable callback) = 0; virtual const AsyncDnsResolverResult& result() const = 0; @@ -83,14 +80,14 @@ class AsyncDnsResolverFactoryInterface { // will be called when resolution is finished. // The callback will be called on the sequence that the caller runs on. virtual std::unique_ptr CreateAndResolve( - const rtc::SocketAddress& addr, + const SocketAddress& addr, absl::AnyInvocable callback) = 0; // Creates an AsyncDnsResolver and starts resolving the name to an address // matching the specified family. The callback will be called when resolution // is finished. The callback will be called on the sequence that the caller // runs on. virtual std::unique_ptr CreateAndResolve( - const rtc::SocketAddress& addr, + const SocketAddress& addr, int family, absl::AnyInvocable callback) = 0; // Creates an AsyncDnsResolver and does not start it. diff --git a/api/async_resolver_factory.h b/api/async_resolver_factory.h deleted file mode 100644 index ffa958268d..0000000000 --- a/api/async_resolver_factory.h +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_ASYNC_RESOLVER_FACTORY_H_ -#define API_ASYNC_RESOLVER_FACTORY_H_ - -#include "rtc_base/async_resolver_interface.h" - -namespace webrtc { - -// An abstract factory for creating AsyncResolverInterfaces. This allows -// client applications to provide WebRTC with their own mechanism for -// performing DNS resolution. -// TODO(bugs.webrtc.org/12598): Deprecate and remove. -class AsyncResolverFactory { - public: - AsyncResolverFactory() = default; - virtual ~AsyncResolverFactory() = default; - - // The caller should call Destroy on the returned object to delete it. - virtual rtc::AsyncResolverInterface* Create() = 0; -}; - -} // namespace webrtc - -#endif // API_ASYNC_RESOLVER_FACTORY_H_ diff --git a/api/audio/BUILD.gn b/api/audio/BUILD.gn index 4832751b5f..13e8d1f82c 100644 --- a/api/audio/BUILD.gn +++ b/api/audio/BUILD.gn @@ -8,16 +8,33 @@ import("../../webrtc.gni") +rtc_source_set("audio_device") { + visibility = [ "*" ] + sources = [ + "audio_device.h", + "audio_device_defines.h", + ] + deps = [ + "..:ref_count", + "..:scoped_refptr", + "../../rtc_base:checks", + "../../rtc_base:stringutils", + "../task_queue", + ] +} + rtc_library("audio_frame_api") { visibility = [ "*" ] sources = [ "audio_frame.cc", "audio_frame.h", + "audio_view.h", "channel_layout.cc", "channel_layout.h", ] deps = [ + "..:array_view", "..:rtp_packet_info", "../../rtc_base:checks", "../../rtc_base:logging", @@ -38,39 +55,77 @@ rtc_source_set("audio_mixer_api") { deps = [ ":audio_frame_api", "..:make_ref_counted", + "..:ref_count", "../../rtc_base:refcount", ] } -rtc_library("aec3_config") { +rtc_source_set("audio_processing") { visibility = [ "*" ] sources = [ - "echo_canceller3_config.cc", - "echo_canceller3_config.h", + "audio_processing.cc", + "audio_processing.h", ] deps = [ + ":aec3_config", + ":audio_processing_statistics", + ":echo_control", + "..:array_view", + "..:ref_count", + "..:scoped_refptr", "../../rtc_base:checks", - "../../rtc_base:safe_minmax", + "../../rtc_base:macromagic", + "../../rtc_base:stringutils", + "../../rtc_base/system:arch", + "../../rtc_base/system:file_wrapper", "../../rtc_base/system:rtc_export", + "../environment", + "../task_queue", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/strings:string_view", ] } -rtc_library("aec3_config_json") { +rtc_library("builtin_audio_processing_builder") { visibility = [ "*" ] - allow_poison = [ "rtc_json" ] + configs += [ "../../modules/audio_processing:apm_debug_dump" ] sources = [ - "echo_canceller3_config_json.cc", - "echo_canceller3_config_json.h", + "builtin_audio_processing_builder.cc", + "builtin_audio_processing_builder.h", ] deps = [ - ":aec3_config", - "../../rtc_base:checks", + ":audio_processing", + ":echo_control", + "..:make_ref_counted", + "..:scoped_refptr", + "../../modules/audio_processing", "../../rtc_base:logging", - "../../rtc_base:rtc_json", - "../../rtc_base:stringutils", + "../../rtc_base/system:rtc_export", + "../environment", + "//third_party/abseil-cpp/absl/base:nullability", + ] +} + +rtc_source_set("audio_processing_statistics") { + visibility = [ "*" ] + sources = [ + "audio_processing_statistics.cc", + "audio_processing_statistics.h", + ] + deps = [ "../../rtc_base/system:rtc_export" ] +} + +rtc_library("aec3_config") { + visibility = [ "*" ] + sources = [ + "echo_canceller3_config.cc", + "echo_canceller3_config.h", + ] + deps = [ + "../../rtc_base:checks", + "../../rtc_base:safe_minmax", "../../rtc_base/system:rtc_export", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("aec3_factory") { @@ -86,13 +141,19 @@ rtc_library("aec3_factory") { ":echo_control", "../../modules/audio_processing/aec3", "../../rtc_base/system:rtc_export", + "../environment", + "//third_party/abseil-cpp/absl/base:nullability", ] } rtc_source_set("echo_control") { visibility = [ "*" ] sources = [ "echo_control.h" ] - deps = [ "../../rtc_base:checks" ] + deps = [ + "../../rtc_base:checks", + "../environment", + "//third_party/abseil-cpp/absl/base:nullability", + ] } rtc_source_set("echo_detector_creator") { @@ -103,9 +164,30 @@ rtc_source_set("echo_detector_creator") { "echo_detector_creator.h", ] deps = [ + ":audio_processing", "..:make_ref_counted", "../../api:scoped_refptr", - "../../modules/audio_processing:api", "../../modules/audio_processing:residual_echo_detector", ] } + +if (rtc_include_tests) { + rtc_library("audio_api_unittests") { + testonly = true + sources = [ + "audio_processing_unittest.cc", + "builtin_audio_processing_builder_unittest.cc", + ] + deps = [ + ":audio_processing", + ":builtin_audio_processing_builder", + "..:make_ref_counted", + "..:scoped_refptr", + "../../modules/audio_processing:mocks", + "../../test:test_support", + "../environment", + "../environment:environment_factory", + "test:audio_api_unittests", + ] + } +} diff --git a/api/audio/audio_device.h b/api/audio/audio_device.h new file mode 100644 index 0000000000..b22b021ec4 --- /dev/null +++ b/api/audio/audio_device.h @@ -0,0 +1,199 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_AUDIO_DEVICE_H_ +#define API_AUDIO_AUDIO_DEVICE_H_ + +#include +#include +#include "api/audio/audio_device_defines.h" +#include "api/ref_count.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_factory.h" + +namespace webrtc { + +class AudioDeviceModuleForTest; + +class AudioDeviceModule : public webrtc::RefCountInterface { + public: + enum AudioLayer { + kPlatformDefaultAudio = 0, + kWindowsCoreAudio, + kWindowsCoreAudio2, + kLinuxAlsaAudio, + kLinuxPulseAudio, + kAndroidJavaAudio, + kAndroidOpenSLESAudio, + kAndroidJavaInputAndOpenSLESOutputAudio, + kAndroidAAudioAudio, + kAndroidJavaInputAndAAudioOutputAudio, + kDummyAudio, + }; + + enum WindowsDeviceType { + kDefaultCommunicationDevice = -1, + kDefaultDevice = -2 + }; + +// Only supported on iOS. +#if defined(WEBRTC_IOS) + enum MutedSpeechEvent { kMutedSpeechStarted, kMutedSpeechEnded }; + typedef void (^MutedSpeechEventHandler)(MutedSpeechEvent event); +#endif // WEBRTC_IOS + + struct Stats { + // The fields below correspond to similarly-named fields in the WebRTC stats + // spec. https://w3c.github.io/webrtc-stats/#playoutstats-dict* + double synthesized_samples_duration_s = 0; + uint64_t synthesized_samples_events = 0; + double total_samples_duration_s = 0; + double total_playout_delay_s = 0; + uint64_t total_samples_count = 0; + }; + + public: + // Creates a default ADM for usage in production code. + static scoped_refptr Create( + AudioLayer audio_layer, TaskQueueFactory* task_queue_factory); + // Creates an ADM with support for extra test methods. Don't use this factory + // in production code. + static scoped_refptr CreateForTest( + AudioLayer audio_layer, TaskQueueFactory* task_queue_factory); + + // Retrieve the currently utilized audio layer + virtual int32_t ActiveAudioLayer(AudioLayer* audioLayer) const = 0; + + // Full-duplex transportation of PCM audio + virtual int32_t RegisterAudioCallback(AudioTransport* audioCallback) = 0; + + // Main initialization and termination + virtual int32_t Init() = 0; + virtual int32_t Terminate() = 0; + virtual bool Initialized() const = 0; + + // Device enumeration + virtual int16_t PlayoutDevices() = 0; + virtual int16_t RecordingDevices() = 0; + virtual int32_t PlayoutDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) = 0; + virtual int32_t RecordingDeviceName(uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) = 0; + + // Device selection + virtual int32_t SetPlayoutDevice(uint16_t index) = 0; + virtual int32_t SetPlayoutDevice(WindowsDeviceType device) = 0; + virtual int32_t SetRecordingDevice(uint16_t index) = 0; + virtual int32_t SetRecordingDevice(WindowsDeviceType device) = 0; + + // Audio transport initialization + virtual int32_t PlayoutIsAvailable(bool* available) = 0; + virtual int32_t InitPlayout() = 0; + virtual bool PlayoutIsInitialized() const = 0; + virtual int32_t RecordingIsAvailable(bool* available) = 0; + virtual int32_t InitRecording() = 0; + virtual bool RecordingIsInitialized() const = 0; + + // Audio transport control + virtual int32_t StartPlayout() = 0; + virtual int32_t StopPlayout() = 0; + virtual bool Playing() const = 0; + virtual int32_t StartRecording() = 0; + virtual int32_t StopRecording() = 0; + virtual bool Recording() const = 0; + + // Audio mixer initialization + virtual int32_t InitSpeaker() = 0; + virtual bool SpeakerIsInitialized() const = 0; + virtual int32_t InitMicrophone() = 0; + virtual bool MicrophoneIsInitialized() const = 0; + + // Speaker volume controls + virtual int32_t SpeakerVolumeIsAvailable(bool* available) = 0; + virtual int32_t SetSpeakerVolume(uint32_t volume) = 0; + virtual int32_t SpeakerVolume(uint32_t* volume) const = 0; + virtual int32_t MaxSpeakerVolume(uint32_t* maxVolume) const = 0; + virtual int32_t MinSpeakerVolume(uint32_t* minVolume) const = 0; + + // Microphone volume controls + virtual int32_t MicrophoneVolumeIsAvailable(bool* available) = 0; + virtual int32_t SetMicrophoneVolume(uint32_t volume) = 0; + virtual int32_t MicrophoneVolume(uint32_t* volume) const = 0; + virtual int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const = 0; + virtual int32_t MinMicrophoneVolume(uint32_t* minVolume) const = 0; + + // Speaker mute control + virtual int32_t SpeakerMuteIsAvailable(bool* available) = 0; + virtual int32_t SetSpeakerMute(bool enable) = 0; + virtual int32_t SpeakerMute(bool* enabled) const = 0; + + // Microphone mute control + virtual int32_t MicrophoneMuteIsAvailable(bool* available) = 0; + virtual int32_t SetMicrophoneMute(bool enable) = 0; + virtual int32_t MicrophoneMute(bool* enabled) const = 0; + + // Stereo support + virtual int32_t StereoPlayoutIsAvailable(bool* available) const = 0; + virtual int32_t SetStereoPlayout(bool enable) = 0; + virtual int32_t StereoPlayout(bool* enabled) const = 0; + virtual int32_t StereoRecordingIsAvailable(bool* available) const = 0; + virtual int32_t SetStereoRecording(bool enable) = 0; + virtual int32_t StereoRecording(bool* enabled) const = 0; + + // Playout delay + virtual int32_t PlayoutDelay(uint16_t* delayMS) const = 0; + + // Only supported on Android. + virtual bool BuiltInAECIsAvailable() const = 0; + virtual bool BuiltInAGCIsAvailable() const = 0; + virtual bool BuiltInNSIsAvailable() const = 0; + + // Enables the built-in audio effects. Only supported on Android. + virtual int32_t EnableBuiltInAEC(bool enable) = 0; + virtual int32_t EnableBuiltInAGC(bool enable) = 0; + virtual int32_t EnableBuiltInNS(bool enable) = 0; + + // Play underrun count. Only supported on Android. + // TODO(alexnarest): Make it abstract after upstream projects support it. + virtual int32_t GetPlayoutUnderrunCount() const { return -1; } + + // Used to generate RTC stats. If not implemented, RTCAudioPlayoutStats will + // not be present in the stats. + virtual std::optional GetStats() const { return std::nullopt; } + +// Only supported on iOS. +#if defined(WEBRTC_IOS) + virtual int GetPlayoutAudioParameters(AudioParameters* params) const = 0; + virtual int GetRecordAudioParameters(AudioParameters* params) const = 0; +#endif // WEBRTC_IOS + + protected: + ~AudioDeviceModule() override {} +}; + +// Extends the default ADM interface with some extra test methods. +// Intended for usage in tests only and requires a unique factory method. +class AudioDeviceModuleForTest : public AudioDeviceModule { + public: + // Triggers internal restart sequences of audio streaming. Can be used by + // tests to emulate events corresponding to e.g. removal of an active audio + // device or other actions which causes the stream to be disconnected. + virtual int RestartPlayoutInternally() = 0; + virtual int RestartRecordingInternally() = 0; + + virtual int SetPlayoutSampleRate(uint32_t sample_rate) = 0; + virtual int SetRecordingSampleRate(uint32_t sample_rate) = 0; +}; + +} // namespace webrtc + +#endif // API_AUDIO_AUDIO_DEVICE_H_ diff --git a/api/audio/audio_device_defines.h b/api/audio/audio_device_defines.h new file mode 100644 index 0000000000..63eca2a422 --- /dev/null +++ b/api/audio/audio_device_defines.h @@ -0,0 +1,178 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_AUDIO_DEVICE_DEFINES_H_ +#define API_AUDIO_AUDIO_DEVICE_DEFINES_H_ + +#include + +#include +#include +#include + +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { + +static const int kAdmMaxDeviceNameSize = 128; +static const int kAdmMaxFileNameSize = 512; +static const int kAdmMaxGuidSize = 128; + +static const int kAdmMinPlayoutBufferSizeMs = 10; +static const int kAdmMaxPlayoutBufferSizeMs = 250; + +// ---------------------------------------------------------------------------- +// AudioTransport +// ---------------------------------------------------------------------------- + +class AudioTransport { + public: + // TODO(bugs.webrtc.org/13620) Deprecate this function + virtual int32_t RecordedDataIsAvailable(const void* audioSamples, + size_t nSamples, + size_t nBytesPerSample, + size_t nChannels, + uint32_t samplesPerSec, + uint32_t totalDelayMS, + int32_t clockDrift, + uint32_t currentMicLevel, + bool keyPressed, + uint32_t& newMicLevel) = 0; // NOLINT + + virtual int32_t RecordedDataIsAvailable( + const void* audioSamples, + size_t nSamples, + size_t nBytesPerSample, + size_t nChannels, + uint32_t samplesPerSec, + uint32_t totalDelayMS, + int32_t clockDrift, + uint32_t currentMicLevel, + bool keyPressed, + uint32_t& newMicLevel, + std::optional /* estimatedCaptureTimeNS */) { // NOLINT + // TODO(webrtc:13620) Make the default behaver of the new API to behave as + // the old API. This can be pure virtual if all uses of the old API is + // removed. + return RecordedDataIsAvailable( + audioSamples, nSamples, nBytesPerSample, nChannels, samplesPerSec, + totalDelayMS, clockDrift, currentMicLevel, keyPressed, newMicLevel); + } + + // Implementation has to setup safe values for all specified out parameters. + virtual int32_t NeedMorePlayData(size_t nSamples, + size_t nBytesPerSample, + size_t nChannels, + uint32_t samplesPerSec, + void* audioSamples, + size_t& nSamplesOut, // NOLINT + int64_t* elapsed_time_ms, + int64_t* ntp_time_ms) = 0; // NOLINT + + // Method to pull mixed render audio data from all active VoE channels. + // The data will not be passed as reference for audio processing internally. + virtual void PullRenderData(int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + void* audio_data, + int64_t* elapsed_time_ms, + int64_t* ntp_time_ms) = 0; + + protected: + virtual ~AudioTransport() {} +}; + +// Helper class for storage of fundamental audio parameters such as sample rate, +// number of channels, native buffer size etc. +// Note that one audio frame can contain more than one channel sample and each +// sample is assumed to be a 16-bit PCM sample. Hence, one audio frame in +// stereo contains 2 * (16/8) = 4 bytes of data. +class AudioParameters { + public: + // This implementation does only support 16-bit PCM samples. + static const size_t kBitsPerSample = 16; + AudioParameters() + : sample_rate_(0), + channels_(0), + frames_per_buffer_(0), + frames_per_10ms_buffer_(0) {} + AudioParameters(int sample_rate, size_t channels, size_t frames_per_buffer) + : sample_rate_(sample_rate), + channels_(channels), + frames_per_buffer_(frames_per_buffer), + frames_per_10ms_buffer_(static_cast(sample_rate / 100)) {} + void reset(int sample_rate, size_t channels, size_t frames_per_buffer) { + sample_rate_ = sample_rate; + channels_ = channels; + frames_per_buffer_ = frames_per_buffer; + frames_per_10ms_buffer_ = static_cast(sample_rate / 100); + } + size_t bits_per_sample() const { return kBitsPerSample; } + void reset(int sample_rate, size_t channels, double buffer_duration) { + reset(sample_rate, channels, + static_cast(sample_rate * buffer_duration + 0.5)); + } + void reset(int sample_rate, size_t channels) { + reset(sample_rate, channels, static_cast(0)); + } + int sample_rate() const { return sample_rate_; } + size_t channels() const { return channels_; } + size_t frames_per_buffer() const { return frames_per_buffer_; } + size_t frames_per_10ms_buffer() const { return frames_per_10ms_buffer_; } + size_t GetBytesPerFrame() const { return channels_ * kBitsPerSample / 8; } + size_t GetBytesPerBuffer() const { + return frames_per_buffer_ * GetBytesPerFrame(); + } + // The WebRTC audio device buffer (ADB) only requires that the sample rate + // and number of channels are configured. Hence, to be "valid", only these + // two attributes must be set. + bool is_valid() const { return ((sample_rate_ > 0) && (channels_ > 0)); } + // Most platforms also require that a native buffer size is defined. + // An audio parameter instance is considered to be "complete" if it is both + // "valid" (can be used by the ADB) and also has a native frame size. + bool is_complete() const { return (is_valid() && (frames_per_buffer_ > 0)); } + size_t GetBytesPer10msBuffer() const { + return frames_per_10ms_buffer_ * GetBytesPerFrame(); + } + double GetBufferSizeInMilliseconds() const { + if (sample_rate_ == 0) + return 0.0; + return frames_per_buffer_ / (sample_rate_ / 1000.0); + } + double GetBufferSizeInSeconds() const { + if (sample_rate_ == 0) + return 0.0; + return static_cast(frames_per_buffer_) / (sample_rate_); + } + std::string ToString() const { + char ss_buf[1024]; + SimpleStringBuilder ss(ss_buf); + ss << "AudioParameters: "; + ss << "sample_rate=" << sample_rate() << ", channels=" << channels(); + ss << ", frames_per_buffer=" << frames_per_buffer(); + ss << ", frames_per_10ms_buffer=" << frames_per_10ms_buffer(); + ss << ", bytes_per_frame=" << GetBytesPerFrame(); + ss << ", bytes_per_buffer=" << GetBytesPerBuffer(); + ss << ", bytes_per_10ms_buffer=" << GetBytesPer10msBuffer(); + ss << ", size_in_ms=" << GetBufferSizeInMilliseconds(); + return ss.str(); + } + + private: + int sample_rate_; + size_t channels_; + size_t frames_per_buffer_; + size_t frames_per_10ms_buffer_; +}; + +} // namespace webrtc + +#endif // API_AUDIO_AUDIO_DEVICE_DEFINES_H_ diff --git a/api/audio/audio_frame.cc b/api/audio/audio_frame.cc index 3e12006386..f578b719ff 100644 --- a/api/audio/audio_frame.cc +++ b/api/audio/audio_frame.cc @@ -12,6 +12,13 @@ #include +#include +#include + +#include "api/array_view.h" +#include "api/audio/audio_view.h" +#include "api/audio/channel_layout.h" +#include "api/rtp_packet_infos.h" #include "rtc_base/checks.h" #include "rtc_base/time_utils.h" @@ -22,6 +29,20 @@ AudioFrame::AudioFrame() { static_assert(sizeof(data_) == kMaxDataSizeBytes, "kMaxDataSizeBytes"); } +AudioFrame::AudioFrame(int sample_rate_hz, + size_t num_channels, + ChannelLayout layout /*= CHANNEL_LAYOUT_UNSUPPORTED*/) + : samples_per_channel_(SampleRateToDefaultChannelSize(sample_rate_hz)), + sample_rate_hz_(sample_rate_hz), + num_channels_(num_channels), + channel_layout_(layout == CHANNEL_LAYOUT_UNSUPPORTED + ? GuessChannelLayout(num_channels) + : layout) { + RTC_DCHECK_LE(num_channels_, kMaxConcurrentChannels); + RTC_DCHECK_GT(sample_rate_hz_, 0); + RTC_DCHECK_GT(samples_per_channel_, 0u); +} + void AudioFrame::Reset() { ResetWithoutMuting(); muted_ = true; @@ -41,7 +62,7 @@ void AudioFrame::ResetWithoutMuting() { vad_activity_ = kVadUnknown; profile_timestamp_ms_ = 0; packet_infos_ = RtpPacketInfos(); - absolute_capture_timestamp_ms_ = absl::nullopt; + absolute_capture_timestamp_ms_ = std::nullopt; } void AudioFrame::UpdateFrame(uint32_t timestamp, @@ -51,6 +72,7 @@ void AudioFrame::UpdateFrame(uint32_t timestamp, SpeechType speech_type, VADActivity vad_activity, size_t num_channels) { + RTC_CHECK_LE(num_channels, kMaxConcurrentChannels); timestamp_ = timestamp; samples_per_channel_ = samples_per_channel; sample_rate_hz_ = sample_rate_hz; @@ -63,9 +85,9 @@ void AudioFrame::UpdateFrame(uint32_t timestamp, } const size_t length = samples_per_channel * num_channels; - RTC_CHECK_LE(length, kMaxDataSizeSamples); + RTC_CHECK_LE(length, data_.size()); if (data != nullptr) { - memcpy(data_, data, sizeof(int16_t) * length); + memcpy(data_.data(), data, sizeof(int16_t) * length); muted_ = false; } else { muted_ = true; @@ -76,6 +98,16 @@ void AudioFrame::CopyFrom(const AudioFrame& src) { if (this == &src) return; + if (muted_ && !src.muted()) { + // TODO: bugs.webrtc.org/5647 - Since the default value for `muted_` is + // false and `data_` may still be uninitialized (because we don't initialize + // data_ as part of construction), we clear the full buffer here before + // copying over new values. If we don't, msan might complain in some tests. + // Consider locking down construction, avoiding the default constructor and + // prefering construction that initializes all state. + ClearSamples(data_); + } + timestamp_ = src.timestamp_; elapsed_time_ms_ = src.elapsed_time_ms_; ntp_time_ms_ = src.ntp_time_ms_; @@ -89,16 +121,15 @@ void AudioFrame::CopyFrom(const AudioFrame& src) { channel_layout_ = src.channel_layout_; absolute_capture_timestamp_ms_ = src.absolute_capture_timestamp_ms(); - const size_t length = samples_per_channel_ * num_channels_; - RTC_CHECK_LE(length, kMaxDataSizeSamples); - if (!src.muted()) { - memcpy(data_, src.data(), sizeof(int16_t) * length); - muted_ = false; + auto data = src.data_view(); + RTC_CHECK_LE(data.size(), data_.size()); + if (!muted_ && !data.empty()) { + memcpy(&data_[0], &data[0], sizeof(int16_t) * data.size()); } } void AudioFrame::UpdateProfileTimeStamp() { - profile_timestamp_ms_ = rtc::TimeMillis(); + profile_timestamp_ms_ = TimeMillis(); } int64_t AudioFrame::ElapsedProfileTimeMs() const { @@ -106,21 +137,60 @@ int64_t AudioFrame::ElapsedProfileTimeMs() const { // Profiling has not been activated. return -1; } - return rtc::TimeSince(profile_timestamp_ms_); + return TimeSince(profile_timestamp_ms_); } const int16_t* AudioFrame::data() const { - return muted_ ? empty_data() : data_; + return muted_ ? zeroed_data().begin() : data_.data(); +} + +InterleavedView AudioFrame::data_view() const { + // If you get a nullptr from `data_view()`, it's likely because the + // samples_per_channel_ and/or num_channels_ members haven't been properly + // set. Since `data_view()` returns an InterleavedView<> (which internally + // uses webrtc::ArrayView<>), we inherit the behavior in InterleavedView when + // the view size is 0 that ArrayView<>::data() returns nullptr. So, even when + // an AudioFrame is muted and we want to return `zeroed_data()`, if + // samples_per_channel_ or num_channels_ is 0, the view will point to + // nullptr. + return InterleavedView(muted_ ? &zeroed_data()[0] : &data_[0], + samples_per_channel_, num_channels_); } -// TODO(henrik.lundin) Can we skip zeroing the buffer? -// See https://bugs.chromium.org/p/webrtc/issues/detail?id=5647. int16_t* AudioFrame::mutable_data() { + // TODO: bugs.webrtc.org/5647 - Can we skip zeroing the buffer? + // Consider instead if we should rather zero the buffer when `muted_` is set + // to `true`. if (muted_) { - memset(data_, 0, kMaxDataSizeBytes); + ClearSamples(data_); muted_ = false; } - return data_; + return &data_[0]; +} + +InterleavedView AudioFrame::mutable_data(size_t samples_per_channel, + size_t num_channels) { + const size_t total_samples = samples_per_channel * num_channels; + RTC_CHECK_LE(total_samples, data_.size()); + RTC_CHECK_LE(num_channels, kMaxConcurrentChannels); + // Sanity check for valid argument values during development. + // If `samples_per_channel` is < `num_channels` but larger than 0, + // then chances are the order of arguments is incorrect. + RTC_DCHECK((samples_per_channel == 0 && num_channels == 0) || + num_channels <= samples_per_channel) + << "samples_per_channel=" << samples_per_channel + << "num_channels=" << num_channels; + + // TODO: bugs.webrtc.org/5647 - Can we skip zeroing the buffer? + // Consider instead if we should rather zero the whole buffer when `muted_` is + // set to `true`. + if (muted_) { + ClearSamples(data_, total_samples); + muted_ = false; + } + samples_per_channel_ = samples_per_channel; + num_channels_ = num_channels; + return InterleavedView(&data_[0], samples_per_channel, num_channels); } void AudioFrame::Mute() { @@ -131,10 +201,35 @@ bool AudioFrame::muted() const { return muted_; } +void AudioFrame::SetLayoutAndNumChannels(ChannelLayout layout, + size_t num_channels) { + channel_layout_ = layout; + num_channels_ = num_channels; +#if RTC_DCHECK_IS_ON + // Do a sanity check that the layout and num_channels match. + // If this lookup yield 0u, then the layout is likely CHANNEL_LAYOUT_DISCRETE. + auto expected_num_channels = ChannelLayoutToChannelCount(layout); + if (expected_num_channels) { // If expected_num_channels is 0 + RTC_DCHECK_EQ(expected_num_channels, num_channels_); + } +#endif + RTC_CHECK_LE(samples_per_channel_ * num_channels_, data_.size()); +} + +void AudioFrame::SetSampleRateAndChannelSize(int sample_rate) { + sample_rate_hz_ = sample_rate; + // We could call `AudioProcessing::GetFrameSize()` here, but that requires + // adding a dependency on the ":audio_processing" build target, which can + // complicate the dependency tree. Some refactoring is probably in order to + // get some consistency around this since there are many places across the + // code that assume this default buffer size. + samples_per_channel_ = SampleRateToDefaultChannelSize(sample_rate_hz_); +} + // static -const int16_t* AudioFrame::empty_data() { +ArrayView AudioFrame::zeroed_data() { static int16_t* null_data = new int16_t[kMaxDataSizeSamples](); - return &null_data[0]; + return ArrayView(null_data, kMaxDataSizeSamples); } } // namespace webrtc diff --git a/api/audio/audio_frame.h b/api/audio/audio_frame.h index d5dcb5f788..456bd2885c 100644 --- a/api/audio/audio_frame.h +++ b/api/audio/audio_frame.h @@ -14,11 +14,34 @@ #include #include +#include +#include + +#include "api/array_view.h" +#include "api/audio/audio_view.h" #include "api/audio/channel_layout.h" #include "api/rtp_packet_infos.h" +#include "rtc_base/checks.h" namespace webrtc { +// Default webrtc buffer size in milliseconds. +constexpr size_t kDefaultAudioBufferLengthMs = 10u; + +// Default total number of audio buffers per second based on the default length. +constexpr size_t kDefaultAudioBuffersPerSec = + 1000u / kDefaultAudioBufferLengthMs; + +// Returns the number of samples a buffer needs to hold for ~10ms of a single +// audio channel at a given sample rate. +// See also `AudioProcessing::GetFrameSize()`. +inline size_t SampleRateToDefaultChannelSize(size_t sample_rate) { + // Basic sanity check. 192kHz is the highest supported input sample rate. + RTC_DCHECK_LE(sample_rate, 192000); + return sample_rate / kDefaultAudioBuffersPerSec; +} +///////////////////////////////////////////////////////////////////// + /* This class holds up to 120 ms of super-wideband (32 kHz) stereo audio. It * allows for adding and subtracting frames while keeping track of the resulting * states. @@ -57,6 +80,15 @@ class AudioFrame { AudioFrame(); + // Construct an audio frame with frame length properties and channel + // information. `samples_per_channel()` will be initialized to a 10ms buffer + // size and if `layout` is not specified (default value of + // CHANNEL_LAYOUT_UNSUPPORTED is set), then the channel layout is derived + // (guessed) from `num_channels`. + AudioFrame(int sample_rate_hz, + size_t num_channels, + ChannelLayout layout = CHANNEL_LAYOUT_UNSUPPORTED); + AudioFrame(const AudioFrame&) = delete; AudioFrame& operator=(const AudioFrame&) = delete; @@ -68,6 +100,7 @@ class AudioFrame { // ResetWithoutMuting() to skip this wasteful zeroing. void ResetWithoutMuting(); + // TODO: b/335805780 - Accept InterleavedView. void UpdateFrame(uint32_t timestamp, const int16_t* data, size_t samples_per_channel, @@ -90,20 +123,40 @@ class AudioFrame { int64_t ElapsedProfileTimeMs() const; // data() returns a zeroed static buffer if the frame is muted. - // mutable_frame() always returns a non-static buffer; the first call to - // mutable_frame() zeros the non-static buffer and marks the frame unmuted. + // TODO: b/335805780 - Return InterleavedView. const int16_t* data() const; + + // Returns a read-only view of all the valid samples held by the AudioFrame. + // For a muted AudioFrame, the samples will all be 0. + InterleavedView data_view() const; + + // mutable_frame() always returns a non-static buffer; the first call to + // mutable_frame() zeros the buffer and marks the frame as unmuted. + // TODO: b/335805780 - Return an InterleavedView. int16_t* mutable_data(); + // Grants write access to the audio buffer. The size of the returned writable + // view is determined by the `samples_per_channel` and `num_channels` + // dimensions which the function checks for correctness and stores in the + // internal member variables; `samples_per_channel()` and `num_channels()` + // respectively. + // If the state is currently muted, the returned view will be zeroed out. + InterleavedView mutable_data(size_t samples_per_channel, + size_t num_channels); + // Prefer to mute frames using AudioFrameOperations::Mute. void Mute(); // Frame is muted by default. bool muted() const; - size_t max_16bit_samples() const { return kMaxDataSizeSamples; } + size_t max_16bit_samples() const { return data_.size(); } size_t samples_per_channel() const { return samples_per_channel_; } size_t num_channels() const { return num_channels_; } + ChannelLayout channel_layout() const { return channel_layout_; } + // Sets the `channel_layout` property as well as `num_channels`. + void SetLayoutAndNumChannels(ChannelLayout layout, size_t num_channels); + int sample_rate_hz() const { return sample_rate_hz_; } void set_absolute_capture_timestamp_ms( @@ -111,10 +164,14 @@ class AudioFrame { absolute_capture_timestamp_ms_ = absolute_capture_time_stamp_ms; } - absl::optional absolute_capture_timestamp_ms() const { + std::optional absolute_capture_timestamp_ms() const { return absolute_capture_timestamp_ms_; } + // Sets the sample_rate_hz and samples_per_channel properties based on a + // given sample rate and calculates a default 10ms samples_per_channel value. + void SetSampleRateAndChannelSize(int sample_rate); + // RTP timestamp of the first sample in the AudioFrame. uint32_t timestamp_ = 0; // Time since the first frame in milliseconds. @@ -126,14 +183,13 @@ class AudioFrame { size_t samples_per_channel_ = 0; int sample_rate_hz_ = 0; size_t num_channels_ = 0; - ChannelLayout channel_layout_ = CHANNEL_LAYOUT_NONE; SpeechType speech_type_ = kUndefined; VADActivity vad_activity_ = kVadUnknown; // Monotonically increasing timestamp intended for profiling of audio frames. // Typically used for measuring elapsed time between two different points in // the audio path. No lock is used to save resources and we are thread safe // by design. - // TODO(nisse@webrtc.org): consider using absl::optional. + // TODO(nisse@webrtc.org): consider using std::optional. int64_t profile_timestamp_ms_ = 0; // Information about packets used to assemble this audio frame. This is needed @@ -154,18 +210,19 @@ class AudioFrame { private: // A permanently zeroed out buffer to represent muted frames. This is a - // header-only class, so the only way to avoid creating a separate empty + // header-only class, so the only way to avoid creating a separate zeroed // buffer per translation unit is to wrap a static in an inline function. - static const int16_t* empty_data(); + static ArrayView zeroed_data(); - int16_t data_[kMaxDataSizeSamples]; + std::array data_; bool muted_ = true; + ChannelLayout channel_layout_ = CHANNEL_LAYOUT_NONE; // Absolute capture timestamp when this audio frame was originally captured. // This is only valid for audio frames captured on this machine. The absolute // capture timestamp of a received frame is found in `packet_infos_`. - // This timestamp MUST be based on the same clock as rtc::TimeMillis(). - absl::optional absolute_capture_timestamp_ms_; + // This timestamp MUST be based on the same clock as webrtc::TimeMillis(). + std::optional absolute_capture_timestamp_ms_; }; } // namespace webrtc diff --git a/api/audio/audio_mixer.h b/api/audio/audio_mixer.h index 3483df22bc..baf9f55acb 100644 --- a/api/audio/audio_mixer.h +++ b/api/audio/audio_mixer.h @@ -11,17 +11,17 @@ #ifndef API_AUDIO_AUDIO_MIXER_H_ #define API_AUDIO_AUDIO_MIXER_H_ -#include +#include #include "api/audio/audio_frame.h" -#include "rtc_base/ref_count.h" +#include "api/ref_count.h" namespace webrtc { // WORK IN PROGRESS // This class is under development and is not yet intended for for use outside // of WebRtc/Libjingle. -class AudioMixer : public rtc::RefCountInterface { +class AudioMixer : public RefCountInterface { public: // A callback class that all mixer participants must inherit from/implement. class Source { diff --git a/modules/audio_processing/include/audio_processing.cc b/api/audio/audio_processing.cc similarity index 91% rename from modules/audio_processing/include/audio_processing.cc rename to api/audio/audio_processing.cc index 13ddcc588a..2941001c1d 100644 --- a/modules/audio_processing/include/audio_processing.cc +++ b/api/audio/audio_processing.cc @@ -8,10 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/include/audio_processing.h" +#include "api/audio/audio_processing.h" +#include +#include +#include + +#include "absl/base/nullability.h" +#include "api/environment/environment.h" +#include "api/scoped_refptr.h" +#include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/system/arch.h" namespace webrtc { namespace { @@ -51,7 +58,7 @@ std::string GainController1ModeToString(const Agc1Config::Mode& mode) { constexpr int AudioProcessing::kNativeSampleRatesHz[]; void CustomProcessing::SetRuntimeSetting( - AudioProcessing::RuntimeSetting setting) {} + AudioProcessing::RuntimeSetting /* setting */) {} bool Agc1Config::operator==(const Agc1Config& rhs) const { const auto& analog_lhs = analog_gain_controller; @@ -121,7 +128,7 @@ operator==(const AudioProcessing::Config::CaptureLevelAdjustment:: std::string AudioProcessing::Config::ToString() const { char buf[2048]; - rtc::SimpleStringBuilder builder(buf); + SimpleStringBuilder builder(buf); builder << "AudioProcessing::Config{ " "pipeline: { " "maximum_internal_processing_rate: " @@ -207,4 +214,25 @@ std::string AudioProcessing::Config::ToString() const { return builder.str(); } +absl_nonnull std::unique_ptr +CustomAudioProcessing( + absl_nonnull scoped_refptr audio_processing) { + class Builder : public AudioProcessingBuilderInterface { + public: + explicit Builder(absl_nonnull scoped_refptr ap) + : ap_(std::move(ap)) {} + + absl_nullable scoped_refptr Build( + const Environment& /*env*/) override { + return std::move(ap_); + } + + private: + absl_nonnull scoped_refptr ap_; + }; + + RTC_CHECK(audio_processing); + return std::make_unique(std::move(audio_processing)); +} + } // namespace webrtc diff --git a/api/audio/audio_processing.h b/api/audio/audio_processing.h new file mode 100644 index 0000000000..6a56874fe7 --- /dev/null +++ b/api/audio/audio_processing.h @@ -0,0 +1,897 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_AUDIO_PROCESSING_H_ +#define API_AUDIO_AUDIO_PROCESSING_H_ + +// MSVC++ requires this to be set before any other includes to get M_PI. +#ifndef _USE_MATH_DEFINES +#define _USE_MATH_DEFINES +#endif + +#include +#include // size_t +#include // FILE +#include + +#include +#include +#include +#include +#include + +#include "absl/base/nullability.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/audio/audio_processing_statistics.h" +#include "api/audio/echo_control.h" +#include "api/environment/environment.h" +#include "api/ref_count.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" +#include "rtc_base/arraysize.h" +#include "rtc_base/checks.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class AecDump; +class AudioBuffer; + +class StreamConfig; +class ProcessingConfig; + +class EchoDetector; + +// The Audio Processing Module (APM) provides a collection of voice processing +// components designed for real-time communications software. +// +// APM operates on two audio streams on a frame-by-frame basis. Frames of the +// primary stream, on which all processing is applied, are passed to +// `ProcessStream()`. Frames of the reverse direction stream are passed to +// `ProcessReverseStream()`. On the client-side, this will typically be the +// near-end (capture) and far-end (render) streams, respectively. APM should be +// placed in the signal chain as close to the audio hardware abstraction layer +// (HAL) as possible. +// +// On the server-side, the reverse stream will normally not be used, with +// processing occurring on each incoming stream. +// +// Component interfaces follow a similar pattern and are accessed through +// corresponding getters in APM. All components are disabled at create-time, +// with default settings that are recommended for most situations. New settings +// can be applied without enabling a component. Enabling a component triggers +// memory allocation and initialization to allow it to start processing the +// streams. +// +// Thread safety is provided with the following assumptions to reduce locking +// overhead: +// 1. The stream getters and setters are called from the same thread as +// ProcessStream(). More precisely, stream functions are never called +// concurrently with ProcessStream(). +// 2. Parameter getters are never called concurrently with the corresponding +// setter. +// +// APM accepts only linear PCM audio data in chunks of ~10 ms (see +// AudioProcessing::GetFrameSize() for details) and sample rates ranging from +// 8000 Hz to 384000 Hz. The int16 interfaces use interleaved data, while the +// float interfaces use deinterleaved data. +// +// Usage example, omitting error checking: +// +// AudioProcessing::Config config; +// config.echo_canceller.enabled = true; +// config.echo_canceller.mobile_mode = false; +// +// config.gain_controller1.enabled = true; +// config.gain_controller1.mode = +// AudioProcessing::Config::GainController1::kAdaptiveAnalog; +// config.gain_controller1.analog_level_minimum = 0; +// config.gain_controller1.analog_level_maximum = 255; +// +// config.gain_controller2.enabled = true; +// +// config.high_pass_filter.enabled = true; +// +// scoped_refptr apm = +// BuiltinAudioProcessingBuilder(config).Build(CreateEnvironment()); +// +// // Start a voice call... +// +// // ... Render frame arrives bound for the audio HAL ... +// apm->ProcessReverseStream(render_frame); +// +// // ... Capture frame arrives from the audio HAL ... +// // Call required set_stream_ functions. +// apm->set_stream_delay_ms(delay_ms); +// apm->set_stream_analog_level(analog_level); +// +// apm->ProcessStream(capture_frame); +// +// // Call required stream_ functions. +// analog_level = apm->recommended_stream_analog_level(); +// has_voice = apm->stream_has_voice(); +// +// // Repeat render and capture processing for the duration of the call... +// // Start a new call... +// apm->Initialize(); +// +// // Close the application... +// apm.reset(); +// +class RTC_EXPORT AudioProcessing : public RefCountInterface { + public: + // The struct below constitutes the new parameter scheme for the audio + // processing. It is being introduced gradually and until it is fully + // introduced, it is prone to change. + // TODO(peah): Remove this comment once the new config scheme is fully rolled + // out. + // + // The parameters and behavior of the audio processing module are controlled + // by changing the default values in the AudioProcessing::Config struct. + // The config is applied by passing the struct to the ApplyConfig method. + // + // This config is intended to be used during setup, and to enable/disable + // top-level processing effects. Use during processing may cause undesired + // submodule resets, affecting the audio quality. Use the RuntimeSetting + // construct for runtime configuration. + struct RTC_EXPORT Config { + // Sets the properties of the audio processing pipeline. + struct RTC_EXPORT Pipeline { + // Ways to downmix a multi-channel track to mono. + enum class DownmixMethod { + kAverageChannels, // Average across channels. + kUseFirstChannel // Use the first channel. + }; + + // Maximum allowed processing rate used internally. May only be set to + // 32000 or 48000 and any differing values will be treated as 48000. + int maximum_internal_processing_rate = 48000; + // Allow multi-channel processing of render audio. + bool multi_channel_render = false; + // Allow multi-channel processing of capture audio when AEC3 is active + // or a custom AEC is injected.. + bool multi_channel_capture = false; + // Indicates how to downmix multi-channel capture audio to mono (when + // needed). + DownmixMethod capture_downmix_method = DownmixMethod::kAverageChannels; + } pipeline; + + // Enabled the pre-amplifier. It amplifies the capture signal + // before any other processing is done. + // TODO(webrtc:5298): Deprecate and use the pre-gain functionality in + // capture_level_adjustment instead. + struct PreAmplifier { + bool enabled = false; + float fixed_gain_factor = 1.0f; + } pre_amplifier; + + // Functionality for general level adjustment in the capture pipeline. This + // should not be used together with the legacy PreAmplifier functionality. + struct CaptureLevelAdjustment { + bool operator==(const CaptureLevelAdjustment& rhs) const; + bool operator!=(const CaptureLevelAdjustment& rhs) const { + return !(*this == rhs); + } + bool enabled = false; + // The `pre_gain_factor` scales the signal before any processing is done. + float pre_gain_factor = 1.0f; + // The `post_gain_factor` scales the signal after all processing is done. + float post_gain_factor = 1.0f; + struct AnalogMicGainEmulation { + bool operator==(const AnalogMicGainEmulation& rhs) const; + bool operator!=(const AnalogMicGainEmulation& rhs) const { + return !(*this == rhs); + } + bool enabled = false; + // Initial analog gain level to use for the emulated analog gain. Must + // be in the range [0...255]. + int initial_level = 255; + } analog_mic_gain_emulation; + } capture_level_adjustment; + + struct HighPassFilter { + bool enabled = false; + bool apply_in_full_band = true; + } high_pass_filter; + + struct EchoCanceller { + bool enabled = false; + bool mobile_mode = false; + bool export_linear_aec_output = false; + // Enforce the highpass filter to be on (has no effect for the mobile + // mode). + bool enforce_high_pass_filtering = true; + } echo_canceller; + + // Enables background noise suppression. + struct NoiseSuppression { + bool enabled = false; + enum Level { kLow, kModerate, kHigh, kVeryHigh }; + Level level = kModerate; + bool analyze_linear_aec_output_when_available = false; + } noise_suppression; + + // TODO(bugs.webrtc.org/357281131): Deprecated. Stop using and remove. + // Enables transient suppression. + struct TransientSuppression { + bool enabled = false; + } transient_suppression; + + // Enables automatic gain control (AGC) functionality. + // The automatic gain control (AGC) component brings the signal to an + // appropriate range. This is done by applying a digital gain directly and, + // in the analog mode, prescribing an analog gain to be applied at the audio + // HAL. + // Recommended to be enabled on the client-side. + struct RTC_EXPORT GainController1 { + bool operator==(const GainController1& rhs) const; + bool operator!=(const GainController1& rhs) const { + return !(*this == rhs); + } + + bool enabled = false; + enum Mode { + // Adaptive mode intended for use if an analog volume control is + // available on the capture device. It will require the user to provide + // coupling between the OS mixer controls and AGC through the + // stream_analog_level() functions. + // It consists of an analog gain prescription for the audio device and a + // digital compression stage. + kAdaptiveAnalog, + // Adaptive mode intended for situations in which an analog volume + // control is unavailable. It operates in a similar fashion to the + // adaptive analog mode, but with scaling instead applied in the digital + // domain. As with the analog mode, it additionally uses a digital + // compression stage. + kAdaptiveDigital, + // Fixed mode which enables only the digital compression stage also used + // by the two adaptive modes. + // It is distinguished from the adaptive modes by considering only a + // short time-window of the input signal. It applies a fixed gain + // through most of the input level range, and compresses (gradually + // reduces gain with increasing level) the input signal at higher + // levels. This mode is preferred on embedded devices where the capture + // signal level is predictable, so that a known gain can be applied. + kFixedDigital + }; + Mode mode = kAdaptiveAnalog; + // Sets the target peak level (or envelope) of the AGC in dBFs (decibels + // from digital full-scale). The convention is to use positive values. For + // instance, passing in a value of 3 corresponds to -3 dBFs, or a target + // level 3 dB below full-scale. Limited to [0, 31]. + int target_level_dbfs = 3; + // Sets the maximum gain the digital compression stage may apply, in dB. A + // higher number corresponds to greater compression, while a value of 0 + // will leave the signal uncompressed. Limited to [0, 90]. + // For updates after APM setup, use a RuntimeSetting instead. + int compression_gain_db = 9; + // When enabled, the compression stage will hard limit the signal to the + // target level. Otherwise, the signal will be compressed but not limited + // above the target level. + bool enable_limiter = true; + + // Enables the analog gain controller functionality. + struct AnalogGainController { + bool enabled = true; + // TODO(bugs.webrtc.org/7494): Deprecated. Stop using and remove. + int startup_min_volume = 0; + // Lowest analog microphone level that will be applied in response to + // clipping. + int clipped_level_min = 70; + // If true, an adaptive digital gain is applied. + bool enable_digital_adaptive = true; + // Amount the microphone level is lowered with every clipping event. + // Limited to (0, 255]. + int clipped_level_step = 15; + // Proportion of clipped samples required to declare a clipping event. + // Limited to (0.f, 1.f). + float clipped_ratio_threshold = 0.1f; + // Time in frames to wait after a clipping event before checking again. + // Limited to values higher than 0. + int clipped_wait_frames = 300; + + // Enables clipping prediction functionality. + struct ClippingPredictor { + bool enabled = false; + enum Mode { + // Clipping event prediction mode with fixed step estimation. + kClippingEventPrediction, + // Clipped peak estimation mode with adaptive step estimation. + kAdaptiveStepClippingPeakPrediction, + // Clipped peak estimation mode with fixed step estimation. + kFixedStepClippingPeakPrediction, + }; + Mode mode = kClippingEventPrediction; + // Number of frames in the sliding analysis window. + int window_length = 5; + // Number of frames in the sliding reference window. + int reference_window_length = 5; + // Reference window delay (unit: number of frames). + int reference_window_delay = 5; + // Clipping prediction threshold (dBFS). + float clipping_threshold = -1.0f; + // Crest factor drop threshold (dB). + float crest_factor_margin = 3.0f; + // If true, the recommended clipped level step is used to modify the + // analog gain. Otherwise, the predictor runs without affecting the + // analog gain. + bool use_predicted_step = true; + } clipping_predictor; + } analog_gain_controller; + } gain_controller1; + + // Parameters for AGC2, an Automatic Gain Control (AGC) sub-module which + // replaces the AGC sub-module parametrized by `gain_controller1`. + // AGC2 brings the captured audio signal to the desired level by combining + // three different controllers (namely, input volume controller, adapative + // digital controller and fixed digital controller) and a limiter. + // TODO(bugs.webrtc.org:7494): Name `GainController` when AGC1 removed. + struct RTC_EXPORT GainController2 { + bool operator==(const GainController2& rhs) const; + bool operator!=(const GainController2& rhs) const { + return !(*this == rhs); + } + + // AGC2 must be created if and only if `enabled` is true. + bool enabled = false; + + // Parameters for the input volume controller, which adjusts the input + // volume applied when the audio is captured (e.g., microphone volume on + // a soundcard, input volume on HAL). + struct InputVolumeController { + bool operator==(const InputVolumeController& rhs) const; + bool operator!=(const InputVolumeController& rhs) const { + return !(*this == rhs); + } + bool enabled = false; + } input_volume_controller; + + // Parameters for the adaptive digital controller, which adjusts and + // applies a digital gain after echo cancellation and after noise + // suppression. + struct RTC_EXPORT AdaptiveDigital { + bool operator==(const AdaptiveDigital& rhs) const; + bool operator!=(const AdaptiveDigital& rhs) const { + return !(*this == rhs); + } + bool enabled = false; + float headroom_db = 5.0f; + float max_gain_db = 50.0f; + float initial_gain_db = 15.0f; + float max_gain_change_db_per_second = 6.0f; + float max_output_noise_level_dbfs = -50.0f; + } adaptive_digital; + + // Parameters for the fixed digital controller, which applies a fixed + // digital gain after the adaptive digital controller and before the + // limiter. + struct FixedDigital { + // By setting `gain_db` to a value greater than zero, the limiter can be + // turned into a compressor that first applies a fixed gain. + float gain_db = 0.0f; + } fixed_digital; + } gain_controller2; + + std::string ToString() const; + }; + + // Specifies the properties of a setting to be passed to AudioProcessing at + // runtime. + class RuntimeSetting { + public: + enum class Type { + kNotSpecified, + kCapturePreGain, + kCaptureCompressionGain, + kCaptureFixedPostGain, + kPlayoutVolumeChange, + kCustomRenderProcessingRuntimeSetting, + kPlayoutAudioDeviceChange, + kCapturePostGain, + kCaptureOutputUsed + }; + + // Play-out audio device properties. + struct PlayoutAudioDeviceInfo { + int id; // Identifies the audio device. + int max_volume; // Maximum play-out volume. + }; + + RuntimeSetting() : type_(Type::kNotSpecified), value_(0.0f) {} + ~RuntimeSetting() = default; + + static RuntimeSetting CreateCapturePreGain(float gain) { + return {Type::kCapturePreGain, gain}; + } + + static RuntimeSetting CreateCapturePostGain(float gain) { + return {Type::kCapturePostGain, gain}; + } + + // Corresponds to Config::GainController1::compression_gain_db, but for + // runtime configuration. + static RuntimeSetting CreateCompressionGainDb(int gain_db) { + RTC_DCHECK_GE(gain_db, 0); + RTC_DCHECK_LE(gain_db, 90); + return {Type::kCaptureCompressionGain, static_cast(gain_db)}; + } + + // Corresponds to Config::GainController2::fixed_digital::gain_db, but for + // runtime configuration. + static RuntimeSetting CreateCaptureFixedPostGain(float gain_db) { + RTC_DCHECK_GE(gain_db, 0.0f); + RTC_DCHECK_LE(gain_db, 90.0f); + return {Type::kCaptureFixedPostGain, gain_db}; + } + + // Creates a runtime setting to notify play-out (aka render) audio device + // changes. + static RuntimeSetting CreatePlayoutAudioDeviceChange( + PlayoutAudioDeviceInfo audio_device) { + return {Type::kPlayoutAudioDeviceChange, audio_device}; + } + + // Creates a runtime setting to notify play-out (aka render) volume changes. + // `volume` is the unnormalized volume, the maximum of which + static RuntimeSetting CreatePlayoutVolumeChange(int volume) { + return {Type::kPlayoutVolumeChange, volume}; + } + + static RuntimeSetting CreateCustomRenderSetting(float payload) { + return {Type::kCustomRenderProcessingRuntimeSetting, payload}; + } + + static RuntimeSetting CreateCaptureOutputUsedSetting( + bool capture_output_used) { + return {Type::kCaptureOutputUsed, capture_output_used}; + } + + Type type() const { return type_; } + // Getters do not return a value but instead modify the argument to protect + // from implicit casting. + void GetFloat(float* value) const { + RTC_DCHECK(value); + *value = value_.float_value; + } + void GetInt(int* value) const { + RTC_DCHECK(value); + *value = value_.int_value; + } + void GetBool(bool* value) const { + RTC_DCHECK(value); + *value = value_.bool_value; + } + void GetPlayoutAudioDeviceInfo(PlayoutAudioDeviceInfo* value) const { + RTC_DCHECK(value); + *value = value_.playout_audio_device_info; + } + + private: + RuntimeSetting(Type id, float value) : type_(id), value_(value) {} + RuntimeSetting(Type id, int value) : type_(id), value_(value) {} + RuntimeSetting(Type id, PlayoutAudioDeviceInfo value) + : type_(id), value_(value) {} + Type type_; + union U { + U() {} + U(int value) : int_value(value) {} + U(float value) : float_value(value) {} + U(PlayoutAudioDeviceInfo value) : playout_audio_device_info(value) {} + float float_value; + int int_value; + bool bool_value; + PlayoutAudioDeviceInfo playout_audio_device_info; + } value_; + }; + + ~AudioProcessing() override {} + + // Initializes internal states, while retaining all user settings. This + // should be called before beginning to process a new audio stream. However, + // it is not necessary to call before processing the first stream after + // creation. + // + // It is also not necessary to call if the audio parameters (sample + // rate and number of channels) have changed. Passing updated parameters + // directly to `ProcessStream()` and `ProcessReverseStream()` is permissible. + // If the parameters are known at init-time though, they may be provided. + // TODO(webrtc:5298): Change to return void. + virtual int Initialize() = 0; + + // The int16 interfaces require: + // - only `NativeRate`s be used + // - that the input, output and reverse rates must match + // - that `processing_config.output_stream()` matches + // `processing_config.input_stream()`. + // + // The float interfaces accept arbitrary rates and support differing input and + // output layouts, but the output must have either one channel or the same + // number of channels as the input. + virtual int Initialize(const ProcessingConfig& processing_config) = 0; + + // TODO(peah): This method is a temporary solution used to take control + // over the parameters in the audio processing module and is likely to change. + virtual void ApplyConfig(const Config& config) = 0; + + // TODO(ajm): Only intended for internal use. Make private and friend the + // necessary classes? + virtual int proc_sample_rate_hz() const = 0; + virtual int proc_split_sample_rate_hz() const = 0; + virtual size_t num_input_channels() const = 0; + virtual size_t num_proc_channels() const = 0; + virtual size_t num_output_channels() const = 0; + virtual size_t num_reverse_channels() const = 0; + + // Set to true when the output of AudioProcessing will be muted or in some + // other way not used. Ideally, the captured audio would still be processed, + // but some components may change behavior based on this information. + // Default false. This method takes a lock. To achieve this in a lock-less + // manner the PostRuntimeSetting can instead be used. + virtual void set_output_will_be_muted(bool muted) = 0; + + // Enqueues a runtime setting. + virtual void SetRuntimeSetting(RuntimeSetting setting) = 0; + + // Enqueues a runtime setting. Returns a bool indicating whether the + // enqueueing was successfull. + virtual bool PostRuntimeSetting(RuntimeSetting setting) = 0; + + // Accepts and produces a ~10 ms frame of interleaved 16 bit integer audio as + // specified in `input_config` and `output_config`. `src` and `dest` may use + // the same memory, if desired. + virtual int ProcessStream(const int16_t* const src, + const StreamConfig& input_config, + const StreamConfig& output_config, + int16_t* const dest) = 0; + + // Accepts deinterleaved float audio with the range [-1, 1]. Each element of + // `src` points to a channel buffer, arranged according to `input_stream`. At + // output, the channels will be arranged according to `output_stream` in + // `dest`. + // + // The output must have one channel or as many channels as the input. `src` + // and `dest` may use the same memory, if desired. + virtual int ProcessStream(const float* const* src, + const StreamConfig& input_config, + const StreamConfig& output_config, + float* const* dest) = 0; + + // Accepts and produces a ~10 ms frame of interleaved 16 bit integer audio for + // the reverse direction audio stream as specified in `input_config` and + // `output_config`. `src` and `dest` may use the same memory, if desired. + virtual int ProcessReverseStream(const int16_t* const src, + const StreamConfig& input_config, + const StreamConfig& output_config, + int16_t* const dest) = 0; + + // Accepts deinterleaved float audio with the range [-1, 1]. Each element of + // `data` points to a channel buffer, arranged according to `reverse_config`. + virtual int ProcessReverseStream(const float* const* src, + const StreamConfig& input_config, + const StreamConfig& output_config, + float* const* dest) = 0; + + // Accepts deinterleaved float audio with the range [-1, 1]. Each element + // of `data` points to a channel buffer, arranged according to + // `reverse_config`. + virtual int AnalyzeReverseStream(const float* const* data, + const StreamConfig& reverse_config) = 0; + + // Returns the most recently produced ~10 ms of the linear AEC output at a + // rate of 16 kHz. If there is more than one capture channel, a mono + // representation of the input is returned. Returns true/false to indicate + // whether an output returned. + virtual bool GetLinearAecOutput( + ArrayView> linear_output) const = 0; + + // This must be called prior to ProcessStream() if and only if adaptive analog + // gain control is enabled, to pass the current analog level from the audio + // HAL. Must be within the range [0, 255]. + virtual void set_stream_analog_level(int level) = 0; + + // When an analog mode is set, this should be called after + // `set_stream_analog_level()` and `ProcessStream()` to obtain the recommended + // new analog level for the audio HAL. It is the user's responsibility to + // apply this level. + virtual int recommended_stream_analog_level() const = 0; + + // This must be called if and only if echo processing is enabled. + // + // Sets the `delay` in ms between ProcessReverseStream() receiving a far-end + // frame and ProcessStream() receiving a near-end frame containing the + // corresponding echo. On the client-side this can be expressed as + // delay = (t_render - t_analyze) + (t_process - t_capture) + // where, + // - t_analyze is the time a frame is passed to ProcessReverseStream() and + // t_render is the time the first sample of the same frame is rendered by + // the audio hardware. + // - t_capture is the time the first sample of a frame is captured by the + // audio hardware and t_process is the time the same frame is passed to + // ProcessStream(). + virtual int set_stream_delay_ms(int delay) = 0; + virtual int stream_delay_ms() const = 0; + + // Call to signal that a key press occurred (true) or did not occur (false) + // with this chunk of audio. + virtual void set_stream_key_pressed(bool key_pressed) = 0; + + // Creates and attaches an webrtc::AecDump for recording debugging + // information. + // The `worker_queue` may not be null and must outlive the created + // AecDump instance. |max_log_size_bytes == -1| means the log size + // will be unlimited. `handle` may not be null. The AecDump takes + // responsibility for `handle` and closes it in the destructor. A + // return value of true indicates that the file has been + // sucessfully opened, while a value of false indicates that + // opening the file failed. + virtual bool CreateAndAttachAecDump(absl::string_view file_name, + int64_t max_log_size_bytes, + TaskQueueBase* absl_nonnull + worker_queue) = 0; + virtual bool CreateAndAttachAecDump(FILE* absl_nonnull handle, + int64_t max_log_size_bytes, + TaskQueueBase* absl_nonnull + worker_queue) = 0; + + // TODO(webrtc:5298) Deprecated variant. + // Attaches provided webrtc::AecDump for recording debugging + // information. Log file and maximum file size logic is supposed to + // be handled by implementing instance of AecDump. Calling this + // method when another AecDump is attached resets the active AecDump + // with a new one. This causes the d-tor of the earlier AecDump to + // be called. The d-tor call may block until all pending logging + // tasks are completed. + virtual void AttachAecDump(std::unique_ptr aec_dump) = 0; + + // If no AecDump is attached, this has no effect. If an AecDump is + // attached, it's destructor is called. The d-tor may block until + // all pending logging tasks are completed. + virtual void DetachAecDump() = 0; + + // Get audio processing statistics. + virtual AudioProcessingStats GetStatistics() = 0; + // TODO(webrtc:5298) Deprecated variant. The `has_remote_tracks` argument + // should be set if there are active remote tracks (this would usually be true + // during a call). If there are no remote tracks some of the stats will not be + // set by AudioProcessing, because they only make sense if there is at least + // one remote track. + virtual AudioProcessingStats GetStatistics(bool has_remote_tracks) = 0; + + // Returns the last applied configuration. + virtual AudioProcessing::Config GetConfig() const = 0; + + enum Error { + // Fatal errors. + kNoError = 0, + kUnspecifiedError = -1, + kCreationFailedError = -2, + kUnsupportedComponentError = -3, + kUnsupportedFunctionError = -4, + kNullPointerError = -5, + kBadParameterError = -6, + kBadSampleRateError = -7, + kBadDataLengthError = -8, + kBadNumberChannelsError = -9, + kFileError = -10, + kStreamParameterNotSetError = -11, + kNotEnabledError = -12, + + // Warnings are non-fatal. + // This results when a set_stream_ parameter is out of range. Processing + // will continue, but the parameter may have been truncated. + kBadStreamParameterWarning = -13 + }; + + // Native rates supported by the integer interfaces. + enum NativeRate { + kSampleRate8kHz = 8000, + kSampleRate16kHz = 16000, + kSampleRate32kHz = 32000, + kSampleRate48kHz = 48000 + }; + + // TODO(kwiberg): We currently need to support a compiler (Visual C++) that + // complains if we don't explicitly state the size of the array here. Remove + // the size when that's no longer the case. + static constexpr int kNativeSampleRatesHz[4] = { + kSampleRate8kHz, kSampleRate16kHz, kSampleRate32kHz, kSampleRate48kHz}; + static constexpr size_t kNumNativeSampleRates = + arraysize(kNativeSampleRatesHz); + static constexpr int kMaxNativeSampleRateHz = + kNativeSampleRatesHz[kNumNativeSampleRates - 1]; + + // APM processes audio in chunks of about 10 ms. See GetFrameSize() for + // details. + static constexpr int kChunkSizeMs = 10; + + // Returns floor(sample_rate_hz/100): the number of samples per channel used + // as input and output to the audio processing module in calls to + // ProcessStream, ProcessReverseStream, AnalyzeReverseStream, and + // GetLinearAecOutput. + // + // This is exactly 10 ms for sample rates divisible by 100. For example: + // - 48000 Hz (480 samples per channel), + // - 44100 Hz (441 samples per channel), + // - 16000 Hz (160 samples per channel). + // + // Sample rates not divisible by 100 are received/produced in frames of + // approximately 10 ms. For example: + // - 22050 Hz (220 samples per channel, or ~9.98 ms per frame), + // - 11025 Hz (110 samples per channel, or ~9.98 ms per frame). + // These nondivisible sample rates yield lower audio quality compared to + // multiples of 100. Internal resampling to 10 ms frames causes a simulated + // clock drift effect which impacts the performance of (for example) echo + // cancellation. + static int GetFrameSize(int sample_rate_hz) { return sample_rate_hz / 100; } +}; + +class AudioProcessingBuilderInterface { + public: + virtual ~AudioProcessingBuilderInterface() = default; + + virtual absl_nullable scoped_refptr Build( + const Environment& env) = 0; +}; + +// Returns builder that returns the `audio_processing` ignoring the extra +// construction parameter `env`. +// nullptr `audio_processing` is not supported as in some scenarios that imply +// no audio processing, while in others - default builtin audio processing. +// Callers should be explicit which of these two behaviors they want. +absl_nonnull std::unique_ptr +CustomAudioProcessing( + absl_nonnull scoped_refptr audio_processing); + +// Experimental interface for a custom analysis submodule. +class CustomAudioAnalyzer { + public: + // (Re-) Initializes the submodule. + virtual void Initialize(int sample_rate_hz, int num_channels) = 0; + // Analyzes the given capture or render signal. + virtual void Analyze(const AudioBuffer* audio) = 0; + // Returns a string representation of the module state. + virtual std::string ToString() const = 0; + + virtual ~CustomAudioAnalyzer() {} +}; + +// Interface for a custom processing submodule. +class CustomProcessing { + public: + // (Re-)Initializes the submodule. + virtual void Initialize(int sample_rate_hz, int num_channels) = 0; + // Processes the given capture or render signal. + virtual void Process(AudioBuffer* audio) = 0; + // Returns a string representation of the module state. + virtual std::string ToString() const = 0; + // Handles RuntimeSettings. TODO(webrtc:9262): make pure virtual + // after updating dependencies. + virtual void SetRuntimeSetting(AudioProcessing::RuntimeSetting setting); + + virtual ~CustomProcessing() {} +}; + +class StreamConfig { + public: + // sample_rate_hz: The sampling rate of the stream. + // num_channels: The number of audio channels in the stream. + StreamConfig(int sample_rate_hz = 0, + size_t num_channels = 0) // NOLINT(runtime/explicit) + : sample_rate_hz_(sample_rate_hz), + num_channels_(num_channels), + num_frames_(calculate_frames(sample_rate_hz)) {} + + void set_sample_rate_hz(int value) { + sample_rate_hz_ = value; + num_frames_ = calculate_frames(value); + } + void set_num_channels(size_t value) { num_channels_ = value; } + + int sample_rate_hz() const { return sample_rate_hz_; } + + // The number of channels in the stream. + size_t num_channels() const { return num_channels_; } + + size_t num_frames() const { return num_frames_; } + size_t num_samples() const { return num_channels_ * num_frames_; } + + bool operator==(const StreamConfig& other) const { + return sample_rate_hz_ == other.sample_rate_hz_ && + num_channels_ == other.num_channels_; + } + + bool operator!=(const StreamConfig& other) const { return !(*this == other); } + + private: + static size_t calculate_frames(int sample_rate_hz) { + return static_cast(AudioProcessing::GetFrameSize(sample_rate_hz)); + } + + int sample_rate_hz_; + size_t num_channels_; + size_t num_frames_; +}; + +class ProcessingConfig { + public: + enum StreamName { + kInputStream, + kOutputStream, + kReverseInputStream, + kReverseOutputStream, + kNumStreamNames, + }; + + const StreamConfig& input_stream() const { + return streams[StreamName::kInputStream]; + } + const StreamConfig& output_stream() const { + return streams[StreamName::kOutputStream]; + } + const StreamConfig& reverse_input_stream() const { + return streams[StreamName::kReverseInputStream]; + } + const StreamConfig& reverse_output_stream() const { + return streams[StreamName::kReverseOutputStream]; + } + + StreamConfig& input_stream() { return streams[StreamName::kInputStream]; } + StreamConfig& output_stream() { return streams[StreamName::kOutputStream]; } + StreamConfig& reverse_input_stream() { + return streams[StreamName::kReverseInputStream]; + } + StreamConfig& reverse_output_stream() { + return streams[StreamName::kReverseOutputStream]; + } + + bool operator==(const ProcessingConfig& other) const { + for (int i = 0; i < StreamName::kNumStreamNames; ++i) { + if (this->streams[i] != other.streams[i]) { + return false; + } + } + return true; + } + + bool operator!=(const ProcessingConfig& other) const { + return !(*this == other); + } + + StreamConfig streams[StreamName::kNumStreamNames]; +}; + +// Interface for an echo detector submodule. +class EchoDetector : public RefCountInterface { + public: + // (Re-)Initializes the submodule. + virtual void Initialize(int capture_sample_rate_hz, + int num_capture_channels, + int render_sample_rate_hz, + int num_render_channels) = 0; + + // Analysis (not changing) of the first channel of the render signal. + virtual void AnalyzeRenderAudio(ArrayView render_audio) = 0; + + // Analysis (not changing) of the capture signal. + virtual void AnalyzeCaptureAudio(ArrayView capture_audio) = 0; + + struct Metrics { + std::optional echo_likelihood; + std::optional echo_likelihood_recent_max; + }; + + // Collect current metrics from the echo detector. + virtual Metrics GetMetrics() const = 0; +}; + +} // namespace webrtc + +#endif // API_AUDIO_AUDIO_PROCESSING_H_ diff --git a/modules/audio_processing/include/audio_processing_statistics.cc b/api/audio/audio_processing_statistics.cc similarity index 89% rename from modules/audio_processing/include/audio_processing_statistics.cc rename to api/audio/audio_processing_statistics.cc index 7139ee502e..90da7e8874 100644 --- a/modules/audio_processing/include/audio_processing_statistics.cc +++ b/api/audio/audio_processing_statistics.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/include/audio_processing_statistics.h" +#include "api/audio/audio_processing_statistics.h" namespace webrtc { diff --git a/api/audio/audio_processing_statistics.h b/api/audio/audio_processing_statistics.h new file mode 100644 index 0000000000..d6f8b6e61c --- /dev/null +++ b/api/audio/audio_processing_statistics.h @@ -0,0 +1,68 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_AUDIO_PROCESSING_STATISTICS_H_ +#define API_AUDIO_AUDIO_PROCESSING_STATISTICS_H_ + +#include + +#include + +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { +// This version of the stats uses Optionals, it will replace the regular +// AudioProcessingStatistics struct. +struct RTC_EXPORT AudioProcessingStats { + AudioProcessingStats(); + AudioProcessingStats(const AudioProcessingStats& other); + ~AudioProcessingStats(); + + // Deprecated. + // TODO(bugs.webrtc.org/11226): Remove. + // True if voice is detected in the last capture frame, after processing. + // It is conservative in flagging audio as speech, with low likelihood of + // incorrectly flagging a frame as voice. + // Only reported if voice detection is enabled in AudioProcessing::Config. + std::optional voice_detected; + + // AEC Statistics. + // ERL = 10log_10(P_far / P_echo) + std::optional echo_return_loss; + // ERLE = 10log_10(P_echo / P_out) + std::optional echo_return_loss_enhancement; + // Fraction of time that the AEC linear filter is divergent, in a 1-second + // non-overlapped aggregation window. + std::optional divergent_filter_fraction; + + // The delay metrics consists of the delay median and standard deviation. It + // also consists of the fraction of delay estimates that can make the echo + // cancellation perform poorly. The values are aggregated until the first + // call to `GetStatistics()` and afterwards aggregated and updated every + // second. Note that if there are several clients pulling metrics from + // `GetStatistics()` during a session the first call from any of them will + // change to one second aggregation window for all. + std::optional delay_median_ms; + std::optional delay_standard_deviation_ms; + + // Residual echo detector likelihood. + std::optional residual_echo_likelihood; + // Maximum residual echo likelihood from the last time period. + std::optional residual_echo_likelihood_recent_max; + + // The instantaneous delay estimate produced in the AEC. The unit is in + // milliseconds and the value is the instantaneous value at the time of the + // call to `GetStatistics()`. + std::optional delay_ms; +}; + +} // namespace webrtc + +#endif // API_AUDIO_AUDIO_PROCESSING_STATISTICS_H_ diff --git a/api/audio/audio_processing_unittest.cc b/api/audio/audio_processing_unittest.cc new file mode 100644 index 0000000000..347d8b1c49 --- /dev/null +++ b/api/audio/audio_processing_unittest.cc @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio/audio_processing.h" + +#include + +#include "api/environment/environment_factory.h" +#include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "modules/audio_processing/include/mock_audio_processing.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { + +using ::testing::_; +using ::testing::NotNull; + +TEST(CustomAudioProcessingTest, ReturnsPassedAudioProcessing) { + scoped_refptr ap = + make_ref_counted(); + + std::unique_ptr builder = + CustomAudioProcessing(ap); + + ASSERT_THAT(builder, NotNull()); + EXPECT_EQ(builder->Build(CreateEnvironment()), ap); +} + +#if GTEST_HAS_DEATH_TEST +TEST(CustomAudioProcessingTest, NullptrAudioProcessingIsUnsupported) { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wnonnull" + EXPECT_DEATH(CustomAudioProcessing(nullptr), _); +#pragma clang diagnostic pop +} +#endif + +} // namespace webrtc diff --git a/api/audio/audio_view.h b/api/audio/audio_view.h new file mode 100644 index 0000000000..719d60896c --- /dev/null +++ b/api/audio/audio_view.h @@ -0,0 +1,272 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_AUDIO_VIEW_H_ +#define API_AUDIO_AUDIO_VIEW_H_ + +#include +#include + +#include "api/array_view.h" +#include "api/audio/channel_layout.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +// This file contains 3 types of view classes: +// +// * MonoView<>: A single channel contiguous buffer of samples. +// +// * InterleavedView<>: Channel samples are interleaved (side-by-side) in +// the buffer. A single channel InterleavedView<> is the same thing as a +// MonoView<> +// +// * DeinterleavedView<>: Each channel's samples are contiguous within the +// buffer. Channels can be enumerated and accessing the individual channel +// data is done via MonoView<>. +// +// The views are comparable to and built on webrtc::ArrayView<> but add +// audio specific properties for the dimensions of the buffer and the above +// specialized [de]interleaved support. +// +// There are also a few generic utility functions that can simplify +// generic code for supporting more than one type of view. + +// MonoView<> represents a view over a single contiguous, audio buffer. This +// can be either an single channel (mono) interleaved buffer (e.g. AudioFrame), +// or a de-interleaved channel (e.g. from AudioBuffer). +template +using MonoView = ArrayView; + +// InterleavedView<> is a view over an interleaved audio buffer (e.g. from +// AudioFrame). +template +class InterleavedView { + public: + using value_type = T; + + InterleavedView() = default; + + template + InterleavedView(U* data, size_t samples_per_channel, size_t num_channels) + : num_channels_(num_channels), + samples_per_channel_(samples_per_channel), + data_(data, num_channels * samples_per_channel) { + RTC_DCHECK_LE(num_channels_, kMaxConcurrentChannels); + RTC_DCHECK(num_channels_ == 0u || samples_per_channel_ != 0u); + } + + // Construct an InterleavedView from a C-style array. Samples per channels + // is calculated based on the array size / num_channels. + template + InterleavedView(U (&array)[N], // NOLINT + size_t num_channels) + : InterleavedView(array, N / num_channels, num_channels) { + RTC_DCHECK_EQ(N % num_channels, 0u); + } + + template + InterleavedView(const InterleavedView& other) + : num_channels_(other.num_channels()), + samples_per_channel_(other.samples_per_channel()), + data_(other.data()) {} + + size_t num_channels() const { return num_channels_; } + size_t samples_per_channel() const { return samples_per_channel_; } + ArrayView data() const { return data_; } + bool empty() const { return data_.empty(); } + size_t size() const { return data_.size(); } + + MonoView AsMono() const { + RTC_DCHECK_EQ(num_channels(), 1u); + RTC_DCHECK_EQ(data_.size(), samples_per_channel_); + return data_; + } + + // A simple wrapper around memcpy that includes checks for properties. + // TODO(tommi): Consider if this can be utility function for both interleaved + // and deinterleaved views. + template + void CopyFrom(const InterleavedView& source) { + static_assert(sizeof(T) == sizeof(U), ""); + RTC_DCHECK_EQ(num_channels(), source.num_channels()); + RTC_DCHECK_EQ(samples_per_channel(), source.samples_per_channel()); + RTC_DCHECK_GE(data_.size(), source.data().size()); + const auto data = source.data(); + memcpy(&data_[0], &data[0], data.size() * sizeof(U)); + } + + T& operator[](size_t idx) const { return data_[idx]; } + T* begin() const { return data_.begin(); } + T* end() const { return data_.end(); } + const T* cbegin() const { return data_.cbegin(); } + const T* cend() const { return data_.cend(); } + std::reverse_iterator rbegin() const { return data_.rbegin(); } + std::reverse_iterator rend() const { return data_.rend(); } + std::reverse_iterator crbegin() const { return data_.crbegin(); } + std::reverse_iterator crend() const { return data_.crend(); } + + private: + // TODO(tommi): Consider having these both be stored as uint16_t to + // save a few bytes per view. Use `dchecked_cast` to support size_t during + // construction. + size_t num_channels_ = 0u; + size_t samples_per_channel_ = 0u; + ArrayView data_; +}; + +template +class DeinterleavedView { + public: + using value_type = T; + + DeinterleavedView() = default; + + template + DeinterleavedView(U* data, size_t samples_per_channel, size_t num_channels) + : num_channels_(num_channels), + samples_per_channel_(samples_per_channel), + data_(data, num_channels * samples_per_channel_) {} + + template + DeinterleavedView(const DeinterleavedView& other) + : num_channels_(other.num_channels()), + samples_per_channel_(other.samples_per_channel()), + data_(other.data()) {} + + // Returns a deinterleaved channel where `idx` is the zero based index, + // in the range [0 .. num_channels()-1]. + MonoView operator[](size_t idx) const { + RTC_DCHECK_LT(idx, num_channels_); + return MonoView(&data_[idx * samples_per_channel_], + samples_per_channel_); + } + + size_t num_channels() const { return num_channels_; } + size_t samples_per_channel() const { return samples_per_channel_; } + ArrayView data() const { return data_; } + bool empty() const { return data_.empty(); } + size_t size() const { return data_.size(); } + + // Returns the first (and possibly only) channel. + MonoView AsMono() const { + RTC_DCHECK_GE(num_channels(), 1u); + return (*this)[0]; + } + + private: + // TODO(tommi): Consider having these be stored as uint16_t to save a few + // bytes per view. Use `dchecked_cast` to support size_t during construction. + size_t num_channels_ = 0u; + size_t samples_per_channel_ = 0u; + ArrayView data_; +}; + +template +constexpr size_t NumChannels(const MonoView& /* view */) { + return 1u; +} + +template +size_t NumChannels(const InterleavedView& view) { + return view.num_channels(); +} + +template +size_t NumChannels(const DeinterleavedView& view) { + return view.num_channels(); +} + +template +constexpr bool IsMono(const MonoView& /* view */) { + return true; +} + +template +constexpr bool IsInterleavedView(const MonoView& /* view */) { + return true; +} + +template +constexpr bool IsInterleavedView(const InterleavedView& /* view */) { + return true; +} + +template +constexpr bool IsInterleavedView(const DeinterleavedView& /* view */) { + return false; +} + +template +bool IsMono(const InterleavedView& view) { + return NumChannels(view) == 1u; +} + +template +bool IsMono(const DeinterleavedView& view) { + return NumChannels(view) == 1u; +} + +template +size_t SamplesPerChannel(const MonoView& view) { + return view.size(); +} + +template +size_t SamplesPerChannel(const InterleavedView& view) { + return view.samples_per_channel(); +} + +template +size_t SamplesPerChannel(const DeinterleavedView& view) { + return view.samples_per_channel(); +} +// A simple wrapper around memcpy that includes checks for properties. +// The parameter order is the same as for memcpy(), first destination then +// source. +template +void CopySamples(D& destination, const S& source) { + static_assert( + sizeof(typename D::value_type) == sizeof(typename S::value_type), ""); + // Here we'd really like to do + // static_assert(IsInterleavedView(destination) == IsInterleavedView(source), + // ""); + // but the compiler doesn't like it inside this template function for + // some reason. The following check is an approximation but unfortunately + // means that copying between a MonoView and single channel interleaved or + // deinterleaved views wouldn't work. + // static_assert(sizeof(destination) == sizeof(source), + // "Incompatible view types"); + RTC_DCHECK_EQ(NumChannels(destination), NumChannels(source)); + RTC_DCHECK_EQ(SamplesPerChannel(destination), SamplesPerChannel(source)); + RTC_DCHECK_GE(destination.size(), source.size()); + memcpy(&destination[0], &source[0], + source.size() * sizeof(typename S::value_type)); +} + +// Sets all the samples in a view to 0. This template function is a simple +// wrapper around `memset()` but adds the benefit of automatically calculating +// the byte size from the number of samples and sample type. +template +void ClearSamples(T& view) { + memset(&view[0], 0, view.size() * sizeof(typename T::value_type)); +} + +// Same as `ClearSamples()` above but allows for clearing only the first +// `sample_count` number of samples. +template +void ClearSamples(T& view, size_t sample_count) { + RTC_DCHECK_LE(sample_count, view.size()); + memset(&view[0], 0, sample_count * sizeof(typename T::value_type)); +} + +} // namespace webrtc + +#endif // API_AUDIO_AUDIO_VIEW_H_ diff --git a/modules/audio_processing/audio_processing_builder_impl.cc b/api/audio/builtin_audio_processing_builder.cc similarity index 50% rename from modules/audio_processing/audio_processing_builder_impl.cc rename to api/audio/builtin_audio_processing_builder.cc index a246448c26..50c534138e 100644 --- a/modules/audio_processing/audio_processing_builder_impl.cc +++ b/api/audio/builtin_audio_processing_builder.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -7,28 +7,25 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +#include "api/audio/builtin_audio_processing_builder.h" -#include +#include +#include "absl/base/nullability.h" +#include "api/audio/audio_processing.h" +#include "api/environment/environment.h" #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" #include "modules/audio_processing/audio_processing_impl.h" -#include "modules/audio_processing/include/audio_processing.h" namespace webrtc { -AudioProcessingBuilder::AudioProcessingBuilder() = default; -AudioProcessingBuilder::~AudioProcessingBuilder() = default; - -rtc::scoped_refptr AudioProcessingBuilder::Create() { -#ifdef WEBRTC_EXCLUDE_AUDIO_PROCESSING_MODULE - // Return a null pointer when the APM is excluded from the build. - return nullptr; -#else // WEBRTC_EXCLUDE_AUDIO_PROCESSING_MODULE - return rtc::make_ref_counted( - config_, std::move(capture_post_processing_), +absl_nullable scoped_refptr +BuiltinAudioProcessingBuilder::Build(const Environment& env) { + return make_ref_counted( + env, config_, std::move(capture_post_processing_), std::move(render_pre_processing_), std::move(echo_control_factory_), std::move(echo_detector_), std::move(capture_analyzer_)); -#endif } } // namespace webrtc diff --git a/modules/audio_processing/test/audio_processing_builder_for_testing.h b/api/audio/builtin_audio_processing_builder.h similarity index 57% rename from modules/audio_processing/test/audio_processing_builder_for_testing.h rename to api/audio/builtin_audio_processing_builder.h index e73706c1b6..eec0a06aa5 100644 --- a/modules/audio_processing/test/audio_processing_builder_for_testing.h +++ b/api/audio/builtin_audio_processing_builder.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,65 +8,69 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_AUDIO_PROCESSING_TEST_AUDIO_PROCESSING_BUILDER_FOR_TESTING_H_ -#define MODULES_AUDIO_PROCESSING_TEST_AUDIO_PROCESSING_BUILDER_FOR_TESTING_H_ +#ifndef API_AUDIO_BUILTIN_AUDIO_PROCESSING_BUILDER_H_ +#define API_AUDIO_BUILTIN_AUDIO_PROCESSING_BUILDER_H_ -#include #include #include -#include -#include "modules/audio_processing/include/audio_processing.h" +#include "absl/base/nullability.h" +#include "api/audio/audio_processing.h" +#include "api/audio/echo_control.h" +#include "api/environment/environment.h" +#include "api/scoped_refptr.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { -// Facilitates building of AudioProcessingImp for the tests. -class AudioProcessingBuilderForTesting { +class RTC_EXPORT BuiltinAudioProcessingBuilder + : public AudioProcessingBuilderInterface { public: - AudioProcessingBuilderForTesting(); - AudioProcessingBuilderForTesting(const AudioProcessingBuilderForTesting&) = - delete; - AudioProcessingBuilderForTesting& operator=( - const AudioProcessingBuilderForTesting&) = delete; - ~AudioProcessingBuilderForTesting(); + BuiltinAudioProcessingBuilder() = default; + explicit BuiltinAudioProcessingBuilder(const AudioProcessing::Config& config) + : config_(config) {} + BuiltinAudioProcessingBuilder(const BuiltinAudioProcessingBuilder&) = delete; + BuiltinAudioProcessingBuilder& operator=( + const BuiltinAudioProcessingBuilder&) = delete; + ~BuiltinAudioProcessingBuilder() override = default; // Sets the APM configuration. - AudioProcessingBuilderForTesting& SetConfig( + BuiltinAudioProcessingBuilder& SetConfig( const AudioProcessing::Config& config) { config_ = config; return *this; } // Sets the echo controller factory to inject when APM is created. - AudioProcessingBuilderForTesting& SetEchoControlFactory( + BuiltinAudioProcessingBuilder& SetEchoControlFactory( std::unique_ptr echo_control_factory) { echo_control_factory_ = std::move(echo_control_factory); return *this; } // Sets the capture post-processing sub-module to inject when APM is created. - AudioProcessingBuilderForTesting& SetCapturePostProcessing( + BuiltinAudioProcessingBuilder& SetCapturePostProcessing( std::unique_ptr capture_post_processing) { capture_post_processing_ = std::move(capture_post_processing); return *this; } // Sets the render pre-processing sub-module to inject when APM is created. - AudioProcessingBuilderForTesting& SetRenderPreProcessing( + BuiltinAudioProcessingBuilder& SetRenderPreProcessing( std::unique_ptr render_pre_processing) { render_pre_processing_ = std::move(render_pre_processing); return *this; } // Sets the echo detector to inject when APM is created. - AudioProcessingBuilderForTesting& SetEchoDetector( - rtc::scoped_refptr echo_detector) { + BuiltinAudioProcessingBuilder& SetEchoDetector( + scoped_refptr echo_detector) { echo_detector_ = std::move(echo_detector); return *this; } // Sets the capture analyzer sub-module to inject when APM is created. - AudioProcessingBuilderForTesting& SetCaptureAnalyzer( + BuiltinAudioProcessingBuilder& SetCaptureAnalyzer( std::unique_ptr capture_analyzer) { capture_analyzer_ = std::move(capture_analyzer); return *this; @@ -74,22 +78,19 @@ class AudioProcessingBuilderForTesting { // Creates an APM instance with the specified config or the default one if // unspecified. Injects the specified components transferring the ownership - // to the newly created APM instance - i.e., except for the config, the - // builder is reset to its initial state. - rtc::scoped_refptr Create(); + // to the newly created APM instance. + absl_nullable scoped_refptr Build( + const Environment& env) override; private: - // Transfers the ownership to a non-testing builder. - void TransferOwnershipsToBuilder(AudioProcessingBuilder* builder); - AudioProcessing::Config config_; std::unique_ptr echo_control_factory_; std::unique_ptr capture_post_processing_; std::unique_ptr render_pre_processing_; - rtc::scoped_refptr echo_detector_; + scoped_refptr echo_detector_; std::unique_ptr capture_analyzer_; }; } // namespace webrtc -#endif // MODULES_AUDIO_PROCESSING_TEST_AUDIO_PROCESSING_BUILDER_FOR_TESTING_H_ +#endif // API_AUDIO_BUILTIN_AUDIO_PROCESSING_BUILDER_H_ diff --git a/api/audio/builtin_audio_processing_builder_unittest.cc b/api/audio/builtin_audio_processing_builder_unittest.cc new file mode 100644 index 0000000000..4a3b953527 --- /dev/null +++ b/api/audio/builtin_audio_processing_builder_unittest.cc @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio/builtin_audio_processing_builder.h" + +#include "api/audio/audio_processing.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/scoped_refptr.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { + +using ::testing::NotNull; + +TEST(BuiltinAudioProcessingBuilderTest, CreatesWithDefaults) { + EXPECT_THAT(BuiltinAudioProcessingBuilder().Build(CreateEnvironment()), + NotNull()); +} + +TEST(BuiltinAudioProcessingBuilderTest, CreatesWithConfig) { + const Environment env = CreateEnvironment(); + AudioProcessing::Config config; + // Change a field to make config different to default one. + config.gain_controller1.enabled = !config.gain_controller1.enabled; + + scoped_refptr ap1 = + BuiltinAudioProcessingBuilder(config).Build(env); + ASSERT_THAT(ap1, NotNull()); + EXPECT_EQ(ap1->GetConfig().gain_controller1.enabled, + config.gain_controller1.enabled); + + scoped_refptr ap2 = + BuiltinAudioProcessingBuilder().SetConfig(config).Build(env); + ASSERT_THAT(ap2, NotNull()); + EXPECT_EQ(ap2->GetConfig().gain_controller1.enabled, + config.gain_controller1.enabled); +} + +} // namespace webrtc diff --git a/api/audio/echo_canceller3_config.cc b/api/audio/echo_canceller3_config.cc index 0224c712b4..973e9a7b56 100644 --- a/api/audio/echo_canceller3_config.cc +++ b/api/audio/echo_canceller3_config.cc @@ -11,6 +11,7 @@ #include #include +#include #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_minmax.h" @@ -18,7 +19,7 @@ namespace webrtc { namespace { bool Limit(float* value, float min, float max) { - float clamped = rtc::SafeClamp(*value, min, max); + float clamped = SafeClamp(*value, min, max); clamped = std::isfinite(clamped) ? clamped : min; bool res = *value == clamped; *value = clamped; @@ -26,14 +27,14 @@ bool Limit(float* value, float min, float max) { } bool Limit(size_t* value, size_t min, size_t max) { - size_t clamped = rtc::SafeClamp(*value, min, max); + size_t clamped = SafeClamp(*value, min, max); bool res = *value == clamped; *value = clamped; return res; } bool Limit(int* value, int min, int max) { - int clamped = rtc::SafeClamp(*value, min, max); + int clamped = SafeClamp(*value, min, max); bool res = *value == clamped; *value = clamped; return res; @@ -275,4 +276,20 @@ bool EchoCanceller3Config::Validate(EchoCanceller3Config* config) { return res; } + +EchoCanceller3Config EchoCanceller3Config::CreateDefaultMultichannelConfig() { + EchoCanceller3Config cfg; + // Use shorter and more rapidly adapting coarse filter to compensate for + // the increased number of total filter parameters to adapt. + cfg.filter.coarse.length_blocks = 11; + cfg.filter.coarse.rate = 0.95f; + cfg.filter.coarse_initial.length_blocks = 11; + cfg.filter.coarse_initial.rate = 0.95f; + + // Use more conservative suppressor behavior for non-nearend speech. + cfg.suppressor.normal_tuning.max_dec_factor_lf = 0.35f; + cfg.suppressor.normal_tuning.max_inc_factor = 1.5f; + return cfg; +} + } // namespace webrtc diff --git a/api/audio/echo_canceller3_config.h b/api/audio/echo_canceller3_config.h index 4b1c7fbc47..66681deb97 100644 --- a/api/audio/echo_canceller3_config.h +++ b/api/audio/echo_canceller3_config.h @@ -23,6 +23,9 @@ struct RTC_EXPORT EchoCanceller3Config { // ranges. Returns true if and only of the config did not need to be changed. static bool Validate(EchoCanceller3Config* config); + // Produces a default configuration for multichannel. + static EchoCanceller3Config CreateDefaultMultichannelConfig(); + EchoCanceller3Config(); EchoCanceller3Config(const EchoCanceller3Config& e); EchoCanceller3Config& operator=(const EchoCanceller3Config& other); diff --git a/api/audio/echo_canceller3_factory.cc b/api/audio/echo_canceller3_factory.cc index 284b117bea..6e3a54a783 100644 --- a/api/audio/echo_canceller3_factory.cc +++ b/api/audio/echo_canceller3_factory.cc @@ -10,23 +10,34 @@ #include "api/audio/echo_canceller3_factory.h" #include +#include +#include "absl/base/nullability.h" +#include "api/audio/echo_canceller3_config.h" +#include "api/audio/echo_control.h" +#include "api/environment/environment.h" #include "modules/audio_processing/aec3/echo_canceller3.h" namespace webrtc { EchoCanceller3Factory::EchoCanceller3Factory() {} -EchoCanceller3Factory::EchoCanceller3Factory(const EchoCanceller3Config& config) - : config_(config) {} +EchoCanceller3Factory::EchoCanceller3Factory(const EchoCanceller3Config config) + : config_(config), multichannel_config_(std::nullopt) {} -std::unique_ptr EchoCanceller3Factory::Create( +EchoCanceller3Factory::EchoCanceller3Factory( + const EchoCanceller3Config config, + std::optional multichannel_config) + : config_(config), multichannel_config_(multichannel_config) {} + +absl_nonnull std::unique_ptr EchoCanceller3Factory::Create( + const Environment& env, int sample_rate_hz, int num_render_channels, int num_capture_channels) { - return std::make_unique( - config_, /*multichannel_config=*/absl::nullopt, sample_rate_hz, - num_render_channels, num_capture_channels); + return std::make_unique(env, config_, multichannel_config_, + sample_rate_hz, num_render_channels, + num_capture_channels); } } // namespace webrtc diff --git a/api/audio/echo_canceller3_factory.h b/api/audio/echo_canceller3_factory.h index 8b5380057b..e26f298930 100644 --- a/api/audio/echo_canceller3_factory.h +++ b/api/audio/echo_canceller3_factory.h @@ -12,9 +12,12 @@ #define API_AUDIO_ECHO_CANCELLER3_FACTORY_H_ #include +#include +#include "absl/base/nullability.h" #include "api/audio/echo_canceller3_config.h" #include "api/audio/echo_control.h" +#include "api/environment/environment.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -26,15 +29,24 @@ class RTC_EXPORT EchoCanceller3Factory : public EchoControlFactory { // Factory producing EchoCanceller3 instances with the specified // configuration. - explicit EchoCanceller3Factory(const EchoCanceller3Config& config); + explicit EchoCanceller3Factory(const EchoCanceller3Config config); + + // Factory producing EchoCanceller3 instances with the specified + // configuration and multichannel configuration. + EchoCanceller3Factory( + const EchoCanceller3Config config, + std::optional multichannel_config); // Creates an EchoCanceller3 with a specified channel count and sampling rate. - std::unique_ptr Create(int sample_rate_hz, - int num_render_channels, - int num_capture_channels) override; + absl_nonnull std::unique_ptr Create( + const Environment& env, + int sample_rate_hz, + int num_render_channels, + int num_capture_channels) override; private: const EchoCanceller3Config config_; + const std::optional multichannel_config_; }; } // namespace webrtc diff --git a/api/audio/echo_control.h b/api/audio/echo_control.h index 74fbc27b12..d1a1faad5d 100644 --- a/api/audio/echo_control.h +++ b/api/audio/echo_control.h @@ -13,7 +13,8 @@ #include -#include "rtc_base/checks.h" +#include "absl/base/nullability.h" +#include "api/environment/environment.h" namespace webrtc { @@ -53,7 +54,7 @@ class EchoControl { // resulting output is anyway not used, for instance when the endpoint is // muted. // TODO(b/177830919): Make pure virtual. - virtual void SetCaptureOutputUsage(bool capture_output_used) {} + virtual void SetCaptureOutputUsage(bool /* capture_output_used */) {} // Returns wheter the signal is altered. virtual bool ActiveProcessing() const = 0; @@ -64,11 +65,13 @@ class EchoControl { // Interface for a factory that creates EchoControllers. class EchoControlFactory { public: - virtual std::unique_ptr Create(int sample_rate_hz, - int num_render_channels, - int num_capture_channels) = 0; - virtual ~EchoControlFactory() = default; + + virtual absl_nonnull std::unique_ptr Create( + const Environment& env, + int sample_rate_hz, + int num_render_channels, + int num_capture_channels) = 0; }; } // namespace webrtc diff --git a/api/audio/echo_detector_creator.cc b/api/audio/echo_detector_creator.cc index 15b7c51dca..bb807ed4b3 100644 --- a/api/audio/echo_detector_creator.cc +++ b/api/audio/echo_detector_creator.cc @@ -9,13 +9,15 @@ */ #include "api/audio/echo_detector_creator.h" +#include "api/audio/audio_processing.h" #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" #include "modules/audio_processing/residual_echo_detector.h" namespace webrtc { -rtc::scoped_refptr CreateEchoDetector() { - return rtc::make_ref_counted(); +scoped_refptr CreateEchoDetector() { + return make_ref_counted(); } } // namespace webrtc diff --git a/api/audio/echo_detector_creator.h b/api/audio/echo_detector_creator.h index 5ba171de97..8f260ae0da 100644 --- a/api/audio/echo_detector_creator.h +++ b/api/audio/echo_detector_creator.h @@ -11,15 +11,15 @@ #ifndef API_AUDIO_ECHO_DETECTOR_CREATOR_H_ #define API_AUDIO_ECHO_DETECTOR_CREATOR_H_ +#include "api/audio/audio_processing.h" #include "api/scoped_refptr.h" -#include "modules/audio_processing/include/audio_processing.h" namespace webrtc { // Returns an instance of the WebRTC implementation of a residual echo detector. -// It can be provided to the webrtc::AudioProcessingBuilder to obtain the +// It can be provided to the webrtc::BuiltinAudioProcessingBuilder to obtain the // usual residual echo metrics. -rtc::scoped_refptr CreateEchoDetector(); +scoped_refptr CreateEchoDetector(); } // namespace webrtc diff --git a/api/audio/test/BUILD.gn b/api/audio/test/BUILD.gn index dfe8c32f80..b1060faae3 100644 --- a/api/audio/test/BUILD.gn +++ b/api/audio/test/BUILD.gn @@ -17,13 +17,15 @@ if (rtc_include_tests) { testonly = true sources = [ "audio_frame_unittest.cc", - "echo_canceller3_config_json_unittest.cc", + "audio_view_unittest.cc", "echo_canceller3_config_unittest.cc", ] deps = [ "..:aec3_config", - "..:aec3_config_json", "..:audio_frame_api", + "../..:array_view", + "../../../modules/audio_processing:aec3_config_json", + "../../../rtc_base:checks", "../../../test:test_support", ] } diff --git a/api/audio/test/audio_frame_unittest.cc b/api/audio/test/audio_frame_unittest.cc index dbf45ceabc..4397716a45 100644 --- a/api/audio/test/audio_frame_unittest.cc +++ b/api/audio/test/audio_frame_unittest.cc @@ -13,16 +13,37 @@ #include #include // memcmp +#include "api/audio/audio_view.h" +#include "api/audio/channel_layout.h" +#include "rtc_base/checks.h" #include "test/gtest.h" namespace webrtc { namespace { +bool AllSamplesAre(int16_t sample, InterleavedView samples) { + for (const auto s : samples) { + if (s != sample) { + return false; + } + } + return true; +} + bool AllSamplesAre(int16_t sample, const AudioFrame& frame) { - const int16_t* frame_data = frame.data(); - for (size_t i = 0; i < frame.max_16bit_samples(); i++) { - if (frame_data[i] != sample) { + return AllSamplesAre(sample, frame.data_view()); +} + +// Checks the values of samples in the AudioFrame buffer, regardless of whether +// they're valid or not, and disregard the `muted()` state of the frame. +// I.e. use `max_16bit_samples()` instead of `data_view().size()` +bool AllBufferSamplesAre(int16_t sample, const AudioFrame& frame) { + auto view = frame.data_view(); + RTC_DCHECK(!view.empty()); + const int16_t* data = &view.data()[0]; + for (size_t i = 0; i < frame.max_16bit_samples(); ++i) { + if (data[i] != sample) { return false; } } @@ -38,29 +59,47 @@ constexpr size_t kSamplesPerChannel = kSampleRateHz / 100; } // namespace -TEST(AudioFrameTest, FrameStartsMuted) { +TEST(AudioFrameTest, FrameStartsZeroedAndMuted) { AudioFrame frame; EXPECT_TRUE(frame.muted()); + EXPECT_TRUE(frame.data_view().empty()); EXPECT_TRUE(AllSamplesAre(0, frame)); } +// TODO: b/335805780 - Delete test when `mutable_data()` returns ArrayView. +TEST(AudioFrameTest, UnmutedFrameIsInitiallyZeroedLegacy) { + AudioFrame frame(kSampleRateHz, kNumChannelsMono, CHANNEL_LAYOUT_NONE); + frame.mutable_data(); + EXPECT_FALSE(frame.muted()); + EXPECT_TRUE(AllSamplesAre(0, frame)); + EXPECT_TRUE(AllBufferSamplesAre(0, frame)); +} + TEST(AudioFrameTest, UnmutedFrameIsInitiallyZeroed) { AudioFrame frame; - frame.mutable_data(); + auto data = frame.mutable_data(kSamplesPerChannel, kNumChannelsMono); EXPECT_FALSE(frame.muted()); + EXPECT_TRUE(IsMono(data)); + EXPECT_EQ(frame.data_view().size(), kSamplesPerChannel); + EXPECT_EQ(SamplesPerChannel(data), kSamplesPerChannel); EXPECT_TRUE(AllSamplesAre(0, frame)); } TEST(AudioFrameTest, MutedFrameBufferIsZeroed) { AudioFrame frame; - int16_t* frame_data = frame.mutable_data(); + int16_t* frame_data = + frame.mutable_data(kSamplesPerChannel, kNumChannelsMono).begin(); + EXPECT_FALSE(frame.muted()); + // Fill the reserved buffer with non-zero data. for (size_t i = 0; i < frame.max_16bit_samples(); i++) { frame_data[i] = 17; } ASSERT_TRUE(AllSamplesAre(17, frame)); + ASSERT_TRUE(AllBufferSamplesAre(17, frame)); frame.Mute(); EXPECT_TRUE(frame.muted()); EXPECT_TRUE(AllSamplesAre(0, frame)); + ASSERT_TRUE(AllBufferSamplesAre(0, frame)); } TEST(AudioFrameTest, UpdateFrameMono) { @@ -95,11 +134,17 @@ TEST(AudioFrameTest, UpdateFrameMultiChannel) { EXPECT_EQ(kSamplesPerChannel, frame.samples_per_channel()); EXPECT_EQ(kNumChannelsStereo, frame.num_channels()); EXPECT_EQ(CHANNEL_LAYOUT_STEREO, frame.channel_layout()); + EXPECT_TRUE(frame.muted()); - frame.UpdateFrame(kTimestamp, nullptr /* data */, kSamplesPerChannel, + // Initialize the frame with valid `kNumChannels5_1` data to make sure we + // get an unmuted frame with valid samples. + int16_t samples[kSamplesPerChannel * kNumChannels5_1] = {17}; + frame.UpdateFrame(kTimestamp, samples /* data */, kSamplesPerChannel, kSampleRateHz, AudioFrame::kPLC, AudioFrame::kVadActive, kNumChannels5_1); + EXPECT_FALSE(frame.muted()); EXPECT_EQ(kSamplesPerChannel, frame.samples_per_channel()); + EXPECT_EQ(kSamplesPerChannel * kNumChannels5_1, frame.data_view().size()); EXPECT_EQ(kNumChannels5_1, frame.num_channels()); EXPECT_EQ(CHANNEL_LAYOUT_5_1, frame.channel_layout()); } @@ -121,6 +166,7 @@ TEST(AudioFrameTest, CopyFrom) { EXPECT_EQ(frame2.vad_activity_, frame1.vad_activity_); EXPECT_EQ(frame2.num_channels_, frame1.num_channels_); + EXPECT_EQ(frame2.data_view().size(), frame1.data_view().size()); EXPECT_EQ(frame2.muted(), frame1.muted()); EXPECT_EQ(0, memcmp(frame2.data(), frame1.data(), sizeof(samples))); diff --git a/api/audio/test/audio_view_unittest.cc b/api/audio/test/audio_view_unittest.cc new file mode 100644 index 0000000000..1d3f5f17b5 --- /dev/null +++ b/api/audio/test/audio_view_unittest.cc @@ -0,0 +1,193 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio/audio_view.h" + +#include +#include +#include + +#include "api/array_view.h" +#include "test/gtest.h" + +namespace webrtc { + +namespace { + +constexpr const float kFloatStepIncrease = 0.5f; +constexpr const int16_t kIntStepIncrease = 1; + +template +void Increment(float& t) { + t += kFloatStepIncrease; +} + +template +void Increment(int16_t& t) { + t += kIntStepIncrease; +} + +// Fills a given buffer with monotonically increasing values. +template +void FillBuffer(ArrayView buffer) { + T value = {}; + for (T& t : buffer) { + Increment(value); + t = value; + } +} + +} // namespace + +TEST(AudioViewTest, MonoView) { + const size_t kArraySize = 100u; + int16_t arr[kArraySize]; + FillBuffer(ArrayView(arr)); + + MonoView mono(arr); + MonoView const_mono(arr); + EXPECT_EQ(mono.size(), kArraySize); + EXPECT_EQ(const_mono.size(), kArraySize); + EXPECT_EQ(&mono[0], &const_mono[0]); + EXPECT_EQ(mono[0], arr[0]); + + EXPECT_EQ(1u, NumChannels(mono)); + EXPECT_EQ(1u, NumChannels(const_mono)); + EXPECT_EQ(100u, SamplesPerChannel(mono)); + EXPECT_TRUE(IsMono(mono)); + EXPECT_TRUE(IsMono(const_mono)); +} + +TEST(AudioViewTest, InterleavedView) { + const size_t kArraySize = 100u; + int16_t arr[kArraySize]; + FillBuffer(ArrayView(arr)); + + InterleavedView interleaved(arr, kArraySize, 1); + EXPECT_EQ(NumChannels(interleaved), 1u); + EXPECT_TRUE(IsMono(interleaved)); + EXPECT_EQ(SamplesPerChannel(interleaved), kArraySize); + EXPECT_EQ(interleaved.AsMono().size(), kArraySize); + EXPECT_EQ(&interleaved.AsMono()[0], &arr[0]); + EXPECT_EQ(interleaved.AsMono(), interleaved.data()); + + // Basic iterator test. + int i = 0; + for (auto s : interleaved) { + EXPECT_EQ(s, arr[i++]); + } + + interleaved = InterleavedView(arr, kArraySize / 2, 2); + InterleavedView const_interleaved(arr, 50, 2); + EXPECT_EQ(NumChannels(interleaved), 2u); + EXPECT_EQ(NumChannels(const_interleaved), 2u); + EXPECT_EQ(&const_interleaved[0], &interleaved[0]); + EXPECT_TRUE(!IsMono(interleaved)); + EXPECT_TRUE(!IsMono(const_interleaved)); + EXPECT_EQ(SamplesPerChannel(interleaved), 50u); + EXPECT_EQ(SamplesPerChannel(const_interleaved), 50u); + + interleaved = InterleavedView(arr, 4); + EXPECT_EQ(NumChannels(interleaved), 4u); + InterleavedView const_interleaved2(interleaved); + EXPECT_EQ(NumChannels(const_interleaved2), 4u); + EXPECT_EQ(SamplesPerChannel(interleaved), 25u); + + const_interleaved2 = interleaved; + EXPECT_EQ(NumChannels(const_interleaved2), 4u); + EXPECT_EQ(&const_interleaved2[0], &interleaved[0]); +} + +TEST(AudioViewTest, DeinterleavedView) { + const size_t kArraySize = 100u; + int16_t arr[kArraySize] = {}; + DeinterleavedView di(arr, 10, 10); + DeinterleavedView const_di(arr, 10, 10); + EXPECT_EQ(NumChannels(di), 10u); + EXPECT_EQ(SamplesPerChannel(di), 10u); + EXPECT_TRUE(!IsMono(di)); + EXPECT_EQ(const_di[5][1], di[5][1]); // Spot check. + // For deinterleaved views, although they may hold multiple channels, + // the AsMono() method is still available and returns the first channel + // in the view. + auto mono_ch = di.AsMono(); + EXPECT_EQ(NumChannels(mono_ch), 1u); + EXPECT_EQ(SamplesPerChannel(mono_ch), 10u); + EXPECT_EQ(di[0], mono_ch); // first channel should be same as mono. + + di = DeinterleavedView(arr, 50, 2); + // Test assignment. + const_di = di; + EXPECT_EQ(&di.AsMono()[0], &const_di.AsMono()[0]); + + // Access the second channel in the deinterleaved view. + // The start of the second channel should be directly after the first channel. + // The memory width of each channel is held by the `stride()` member which + // by default is the same value as samples per channel. + mono_ch = di[1]; + EXPECT_EQ(SamplesPerChannel(mono_ch), 50u); + EXPECT_EQ(&mono_ch[0], &arr[di.samples_per_channel()]); +} + +TEST(AudioViewTest, CopySamples) { + const size_t kArraySize = 100u; + int16_t source_arr[kArraySize] = {}; + int16_t dest_arr[kArraySize] = {}; + FillBuffer(ArrayView(source_arr)); + + InterleavedView source(source_arr, 2); + InterleavedView destination(dest_arr, 2); + + static_assert(IsInterleavedView(source) == IsInterleavedView(destination), + ""); + + // Values in `dest_arr` should all be 0, none of the values in `source_arr` + // should be 0. + for (size_t i = 0; i < kArraySize; ++i) { + ASSERT_EQ(dest_arr[i], 0); + ASSERT_NE(source_arr[i], 0); + } + + CopySamples(destination, source); + for (size_t i = 0; i < kArraySize; ++i) { + ASSERT_EQ(dest_arr[i], source_arr[i]) << "i == " << i; + } +} + +TEST(AudioViewTest, ClearSamples) { + std::array samples = {}; + FillBuffer(ArrayView(samples)); + ASSERT_NE(samples[0], 0); + ClearSamples(samples); + for (const auto s : samples) { + ASSERT_EQ(s, 0); + } + + std::array samples_f = {}; + FillBuffer(ArrayView(samples_f)); + ASSERT_NE(samples_f[0], 0.0); + ClearSamples(samples_f); + for (const auto s : samples_f) { + ASSERT_EQ(s, 0.0); + } + + // Clear only half of the buffer + FillBuffer(ArrayView(samples)); + const auto half_way = samples.size() / 2; + ClearSamples(samples, half_way); + for (size_t i = 0u; i < samples.size(); ++i) { + if (i < half_way) { + ASSERT_EQ(samples[i], 0); + } else { + ASSERT_NE(samples[i], 0); + } + } +} +} // namespace webrtc diff --git a/api/audio/test/echo_canceller3_config_unittest.cc b/api/audio/test/echo_canceller3_config_unittest.cc index 91312a0f40..da0255806e 100644 --- a/api/audio/test/echo_canceller3_config_unittest.cc +++ b/api/audio/test/echo_canceller3_config_unittest.cc @@ -10,7 +10,7 @@ #include "api/audio/echo_canceller3_config.h" -#include "api/audio/echo_canceller3_config_json.h" +#include "modules/audio_processing/test/echo_canceller3_config_json.h" #include "test/gtest.h" namespace webrtc { diff --git a/api/audio_codecs/BUILD.gn b/api/audio_codecs/BUILD.gn index 82ed31a5da..492a44aa2f 100644 --- a/api/audio_codecs/BUILD.gn +++ b/api/audio_codecs/BUILD.gn @@ -32,20 +32,23 @@ rtc_library("audio_codecs_api") { "..:array_view", "..:bitrate_allocation", "..:make_ref_counted", + "..:ref_count", "..:scoped_refptr", - "../../api:field_trials_view", + "../../api:rtp_parameters", "../../rtc_base:buffer", "../../rtc_base:checks", "../../rtc_base:event_tracer", "../../rtc_base:refcount", "../../rtc_base:sanitizer", + "../../rtc_base:stringutils", "../../rtc_base/system:rtc_export", + "../environment", + "../units:data_rate", "../units:time_delta", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/base:nullability", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -64,12 +67,6 @@ rtc_library("builtin_audio_decoder_factory") { "g722:audio_decoder_g722", ] defines = [] - if (rtc_include_ilbc) { - deps += [ "ilbc:audio_decoder_ilbc" ] - defines += [ "WEBRTC_USE_BUILTIN_ILBC=1" ] - } else { - defines += [ "WEBRTC_USE_BUILTIN_ILBC=0" ] - } if (rtc_include_opus) { deps += [ "opus:audio_decoder_multiopus", @@ -90,20 +87,16 @@ rtc_library("builtin_audio_encoder_factory") { ] deps = [ ":audio_codecs_api", + "..:field_trials_view", "..:scoped_refptr", "L16:audio_encoder_L16", "g711:audio_encoder_g711", "g722:audio_encoder_g722", ] defines = [] - if (rtc_include_ilbc) { - deps += [ "ilbc:audio_encoder_ilbc" ] - defines += [ "WEBRTC_USE_BUILTIN_ILBC=1" ] - } else { - defines += [ "WEBRTC_USE_BUILTIN_ILBC=0" ] - } if (rtc_include_opus) { deps += [ + "..:field_trials_view", "opus:audio_encoder_multiopus", "opus:audio_encoder_opus", ] @@ -137,6 +130,7 @@ rtc_library("opus_audio_encoder_factory") { ] deps = [ ":audio_codecs_api", + "..:field_trials_view", "..:scoped_refptr", "opus:audio_encoder_multiopus", "opus:audio_encoder_opus", diff --git a/api/audio_codecs/L16/BUILD.gn b/api/audio_codecs/L16/BUILD.gn index 41e9eb42d8..8ce7122cec 100644 --- a/api/audio_codecs/L16/BUILD.gn +++ b/api/audio_codecs/L16/BUILD.gn @@ -23,14 +23,12 @@ rtc_library("audio_encoder_L16") { "..:audio_codecs_api", "../../../api:field_trials_view", "../../../modules/audio_coding:pcm16b", + "../../../rtc_base:checks", "../../../rtc_base:safe_conversions", "../../../rtc_base:safe_minmax", "../../../rtc_base:stringutils", "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -47,9 +45,6 @@ rtc_library("audio_decoder_L16") { "../../../modules/audio_coding:pcm16b", "../../../rtc_base:safe_conversions", "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } diff --git a/api/audio_codecs/L16/audio_decoder_L16.cc b/api/audio_codecs/L16/audio_decoder_L16.cc index a03abe26f7..e6e1b243b1 100644 --- a/api/audio_codecs/L16/audio_decoder_L16.cc +++ b/api/audio_codecs/L16/audio_decoder_L16.cc @@ -11,23 +11,29 @@ #include "api/audio_codecs/L16/audio_decoder_L16.h" #include +#include +#include #include "absl/strings/match.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h" #include "modules/audio_coding/codecs/pcm16b/pcm16b_common.h" #include "rtc_base/numerics/safe_conversions.h" namespace webrtc { -absl::optional AudioDecoderL16::SdpToConfig( +std::optional AudioDecoderL16::SdpToConfig( const SdpAudioFormat& format) { Config config; config.sample_rate_hz = format.clockrate_hz; - config.num_channels = rtc::checked_cast(format.num_channels); + config.num_channels = checked_cast(format.num_channels); if (absl::EqualsIgnoreCase(format.name, "L16") && config.IsOk()) { return config; } - return absl::nullopt; + return std::nullopt; } void AudioDecoderL16::AppendSupportedDecoders( @@ -37,8 +43,8 @@ void AudioDecoderL16::AppendSupportedDecoders( std::unique_ptr AudioDecoderL16::MakeAudioDecoder( const Config& config, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { + std::optional /*codec_pair_id*/, + const FieldTrialsView* /* field_trials */) { if (!config.IsOk()) { return nullptr; } diff --git a/api/audio_codecs/L16/audio_decoder_L16.h b/api/audio_codecs/L16/audio_decoder_L16.h index 5a01b7dc01..339527f964 100644 --- a/api/audio_codecs/L16/audio_decoder_L16.h +++ b/api/audio_codecs/L16/audio_decoder_L16.h @@ -12,9 +12,9 @@ #define API_AUDIO_CODECS_L16_AUDIO_DECODER_L16_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" @@ -36,11 +36,11 @@ struct RTC_EXPORT AudioDecoderL16 { int sample_rate_hz = 8000; int num_channels = 1; }; - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static std::optional SdpToConfig(const SdpAudioFormat& audio_format); static void AppendSupportedDecoders(std::vector* specs); static std::unique_ptr MakeAudioDecoder( const Config& config, - absl::optional codec_pair_id = absl::nullopt, + std::optional codec_pair_id = std::nullopt, const FieldTrialsView* field_trials = nullptr); }; diff --git a/api/audio_codecs/L16/audio_encoder_L16.cc b/api/audio_codecs/L16/audio_encoder_L16.cc index 20259b9ad8..458f680ec4 100644 --- a/api/audio_codecs/L16/audio_encoder_L16.cc +++ b/api/audio_codecs/L16/audio_encoder_L16.cc @@ -10,37 +10,47 @@ #include "api/audio_codecs/L16/audio_encoder_L16.h" +#include + +#include #include +#include +#include #include "absl/strings/match.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h" #include "modules/audio_coding/codecs/pcm16b/pcm16b_common.h" +#include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/string_to_number.h" namespace webrtc { -absl::optional AudioEncoderL16::SdpToConfig( +std::optional AudioEncoderL16::SdpToConfig( const SdpAudioFormat& format) { - if (!rtc::IsValueInRangeForNumericType(format.num_channels)) { + if (!IsValueInRangeForNumericType(format.num_channels)) { RTC_DCHECK_NOTREACHED(); - return absl::nullopt; + return std::nullopt; } Config config; config.sample_rate_hz = format.clockrate_hz; - config.num_channels = rtc::dchecked_cast(format.num_channels); + config.num_channels = dchecked_cast(format.num_channels); auto ptime_iter = format.parameters.find("ptime"); if (ptime_iter != format.parameters.end()) { - const auto ptime = rtc::StringToNumber(ptime_iter->second); + const auto ptime = StringToNumber(ptime_iter->second); if (ptime && *ptime > 0) { - config.frame_size_ms = rtc::SafeClamp(10 * (*ptime / 10), 10, 60); + config.frame_size_ms = SafeClamp(10 * (*ptime / 10), 10, 60); } } if (absl::EqualsIgnoreCase(format.name, "L16") && config.IsOk()) { return config; } - return absl::nullopt; + return std::nullopt; } void AudioEncoderL16::AppendSupportedEncoders( @@ -51,16 +61,15 @@ void AudioEncoderL16::AppendSupportedEncoders( AudioCodecInfo AudioEncoderL16::QueryAudioEncoder( const AudioEncoderL16::Config& config) { RTC_DCHECK(config.IsOk()); - return {config.sample_rate_hz, - rtc::dchecked_cast(config.num_channels), + return {config.sample_rate_hz, dchecked_cast(config.num_channels), config.sample_rate_hz * config.num_channels * 16}; } std::unique_ptr AudioEncoderL16::MakeAudioEncoder( const AudioEncoderL16::Config& config, int payload_type, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { + std::optional /*codec_pair_id*/, + const FieldTrialsView* /* field_trials */) { AudioEncoderPcm16B::Config c; c.sample_rate_hz = config.sample_rate_hz; c.num_channels = config.num_channels; diff --git a/api/audio_codecs/L16/audio_encoder_L16.h b/api/audio_codecs/L16/audio_encoder_L16.h index 47509849de..a104a6306f 100644 --- a/api/audio_codecs/L16/audio_encoder_L16.h +++ b/api/audio_codecs/L16/audio_encoder_L16.h @@ -12,9 +12,9 @@ #define API_AUDIO_CODECS_L16_AUDIO_ENCODER_L16_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" @@ -39,13 +39,13 @@ struct RTC_EXPORT AudioEncoderL16 { int num_channels = 1; int frame_size_ms = 10; }; - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static std::optional SdpToConfig(const SdpAudioFormat& audio_format); static void AppendSupportedEncoders(std::vector* specs); static AudioCodecInfo QueryAudioEncoder(const Config& config); static std::unique_ptr MakeAudioEncoder( const Config& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt, + std::optional codec_pair_id = std::nullopt, const FieldTrialsView* field_trials = nullptr); }; diff --git a/api/audio_codecs/audio_decoder.cc b/api/audio_codecs/audio_decoder.cc index 0a131f15bc..83d09bb1e5 100644 --- a/api/audio_codecs/audio_decoder.cc +++ b/api/audio_codecs/audio_decoder.cc @@ -10,10 +10,15 @@ #include "api/audio_codecs/audio_decoder.h" +#include +#include #include +#include #include +#include #include "api/array_view.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" #include "rtc_base/sanitizer.h" #include "rtc_base/trace_event.h" @@ -24,7 +29,7 @@ namespace { class OldStyleEncodedFrame final : public AudioDecoder::EncodedAudioFrame { public: - OldStyleEncodedFrame(AudioDecoder* decoder, rtc::Buffer&& payload) + OldStyleEncodedFrame(AudioDecoder* decoder, Buffer&& payload) : decoder_(decoder), payload_(std::move(payload)) {} size_t Duration() const override { @@ -32,20 +37,20 @@ class OldStyleEncodedFrame final : public AudioDecoder::EncodedAudioFrame { return ret < 0 ? 0 : static_cast(ret); } - absl::optional Decode( - rtc::ArrayView decoded) const override { + std::optional Decode( + ArrayView decoded) const override { auto speech_type = AudioDecoder::kSpeech; const int ret = decoder_->Decode( payload_.data(), payload_.size(), decoder_->SampleRateHz(), decoded.size() * sizeof(int16_t), decoded.data(), &speech_type); - return ret < 0 ? absl::nullopt - : absl::optional( + return ret < 0 ? std::nullopt + : std::optional( {static_cast(ret), speech_type}); } private: AudioDecoder* const decoder_; - const rtc::Buffer payload_; + const Buffer payload_; }; } // namespace @@ -69,7 +74,7 @@ AudioDecoder::ParseResult& AudioDecoder::ParseResult::operator=( ParseResult&& b) = default; std::vector AudioDecoder::ParsePayload( - rtc::Buffer&& payload, + Buffer&& payload, uint32_t timestamp) { std::vector results; std::unique_ptr frame( @@ -85,7 +90,7 @@ int AudioDecoder::Decode(const uint8_t* encoded, int16_t* decoded, SpeechType* speech_type) { TRACE_EVENT0("webrtc", "AudioDecoder::Decode"); - rtc::MsanCheckInitialized(rtc::MakeArrayView(encoded, encoded_len)); + MsanCheckInitialized(MakeArrayView(encoded, encoded_len)); int duration = PacketDuration(encoded, encoded_len); if (duration >= 0 && duration * Channels() * sizeof(int16_t) > max_decoded_bytes) { @@ -102,7 +107,7 @@ int AudioDecoder::DecodeRedundant(const uint8_t* encoded, int16_t* decoded, SpeechType* speech_type) { TRACE_EVENT0("webrtc", "AudioDecoder::DecodeRedundant"); - rtc::MsanCheckInitialized(rtc::MakeArrayView(encoded, encoded_len)); + MsanCheckInitialized(MakeArrayView(encoded, encoded_len)); int duration = PacketDurationRedundant(encoded, encoded_len); if (duration >= 0 && duration * Channels() * sizeof(int16_t) > max_decoded_bytes) { @@ -125,30 +130,31 @@ bool AudioDecoder::HasDecodePlc() const { return false; } -size_t AudioDecoder::DecodePlc(size_t num_frames, int16_t* decoded) { +size_t AudioDecoder::DecodePlc(size_t /* num_frames */, + int16_t* /* decoded */) { return 0; } // TODO(bugs.webrtc.org/9676): Remove default implementation. void AudioDecoder::GeneratePlc(size_t /*requested_samples_per_channel*/, - rtc::BufferT* /*concealment_audio*/) {} + BufferT* /*concealment_audio*/) {} int AudioDecoder::ErrorCode() { return 0; } -int AudioDecoder::PacketDuration(const uint8_t* encoded, - size_t encoded_len) const { +int AudioDecoder::PacketDuration(const uint8_t* /* encoded */, + size_t /* encoded_len */) const { return kNotImplemented; } -int AudioDecoder::PacketDurationRedundant(const uint8_t* encoded, - size_t encoded_len) const { +int AudioDecoder::PacketDurationRedundant(const uint8_t* /* encoded */, + size_t /* encoded_len */) const { return kNotImplemented; } -bool AudioDecoder::PacketHasFec(const uint8_t* encoded, - size_t encoded_len) const { +bool AudioDecoder::PacketHasFec(const uint8_t* /* encoded */, + size_t /* encoded_len */) const { return false; } diff --git a/api/audio_codecs/audio_decoder.h b/api/audio_codecs/audio_decoder.h index 41138741bb..d2d5e7b30c 100644 --- a/api/audio_codecs/audio_decoder.h +++ b/api/audio_codecs/audio_decoder.h @@ -15,9 +15,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "rtc_base/buffer.h" @@ -57,12 +57,12 @@ class AudioDecoder { // Decodes this frame of audio and writes the result in `decoded`. // `decoded` must be large enough to store as many samples as indicated by a - // call to Duration() . On success, returns an absl::optional containing the + // call to Duration() . On success, returns an std::optional containing the // total number of samples across all channels, as well as whether the // decoder produced comfort noise or speech. On failure, returns an empty - // absl::optional. Decode may be called at most once per frame object. - virtual absl::optional Decode( - rtc::ArrayView decoded) const = 0; + // std::optional. Decode may be called at most once per frame object. + virtual std::optional Decode( + ArrayView decoded) const = 0; }; struct ParseResult { @@ -90,7 +90,7 @@ class AudioDecoder { // this call. The decoder is free to swap or move the data from the `payload` // buffer. `timestamp` is the input timestamp, in samples, corresponding to // the start of the payload. - virtual std::vector ParsePayload(rtc::Buffer&& payload, + virtual std::vector ParsePayload(Buffer&& payload, uint32_t timestamp); // TODO(bugs.webrtc.org/10098): The Decode and DecodeRedundant methods are @@ -140,7 +140,7 @@ class AudioDecoder { // implementations must provide their own, which can be a simple as a no-op. // TODO(bugs.webrtc.org/9676): Remove default implementation. virtual void GeneratePlc(size_t requested_samples_per_channel, - rtc::BufferT* concealment_audio); + BufferT* concealment_audio); // Resets the decoder state (empty buffers etc.). virtual void Reset() = 0; diff --git a/api/audio_codecs/audio_decoder_factory.h b/api/audio_codecs/audio_decoder_factory.h index 2811f6704b..775afafe33 100644 --- a/api/audio_codecs/audio_decoder_factory.h +++ b/api/audio_codecs/audio_decoder_factory.h @@ -12,18 +12,20 @@ #define API_AUDIO_CODECS_AUDIO_DECODER_FACTORY_H_ #include +#include #include -#include "absl/types/optional.h" +#include "absl/base/nullability.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" -#include "rtc_base/ref_count.h" +#include "api/environment/environment.h" +#include "api/ref_count.h" namespace webrtc { // A factory that creates AudioDecoders. -class AudioDecoderFactory : public rtc::RefCountInterface { +class AudioDecoderFactory : public RefCountInterface { public: virtual std::vector GetSupportedDecoders() = 0; @@ -31,21 +33,22 @@ class AudioDecoderFactory : public rtc::RefCountInterface { // Create a new decoder instance. The `codec_pair_id` argument is used to link // encoders and decoders that talk to the same remote entity: if a - // AudioEncoderFactory::MakeAudioEncoder() and a - // AudioDecoderFactory::MakeAudioDecoder() call receive non-null IDs that - // compare equal, the factory implementations may assume that the encoder and - // decoder form a pair. (The intended use case for this is to set up - // communication between the AudioEncoder and AudioDecoder instances, which is - // needed for some codecs with built-in bandwidth adaptation.) + // AudioEncoderFactory::Create() and a AudioDecoderFactory::Create() call + // receive non-null IDs that compare equal, the factory implementations may + // assume that the encoder and decoder form a pair. (The intended use case for + // this is to set up communication between the AudioEncoder and AudioDecoder + // instances, which is needed for some codecs with built-in bandwidth + // adaptation.) // // Returns null if the format isn't supported. // // Note: Implementations need to be robust against combinations other than // one encoder, one decoder getting the same ID; such decoders must still // work. - virtual std::unique_ptr MakeAudioDecoder( + virtual absl_nullable std::unique_ptr Create( + const Environment& env, const SdpAudioFormat& format, - absl::optional codec_pair_id) = 0; + std::optional codec_pair_id) = 0; }; } // namespace webrtc diff --git a/api/audio_codecs/audio_decoder_factory_template.h b/api/audio_codecs/audio_decoder_factory_template.h index 7ea0c91372..e55d43b0f1 100644 --- a/api/audio_codecs/audio_decoder_factory_template.h +++ b/api/audio_codecs/audio_decoder_factory_template.h @@ -12,10 +12,17 @@ #define API_AUDIO_CODECS_AUDIO_DECODER_FACTORY_TEMPLATE_H_ #include +#include +#include +#include #include +#include "absl/base/nullability.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_decoder_factory.h" -#include "api/field_trials_view.h" +#include "api/audio_codecs/audio_format.h" +#include "api/environment/environment.h" #include "api/make_ref_counted.h" #include "api/scoped_refptr.h" @@ -29,16 +36,53 @@ struct Helper; // Base case: 0 template parameters. template <> struct Helper<> { - static void AppendSupportedDecoders(std::vector* specs) {} - static bool IsSupportedDecoder(const SdpAudioFormat& format) { return false; } - static std::unique_ptr MakeAudioDecoder( - const SdpAudioFormat& format, - absl::optional codec_pair_id, - const FieldTrialsView* field_trials) { + static void AppendSupportedDecoders( + std::vector* /* specs */) {} + static bool IsSupportedDecoder(const SdpAudioFormat& /* format */) { + return false; + } + + static absl_nullable std::unique_ptr MakeAudioDecoder( + const Environment& /* env */, + const SdpAudioFormat& /* format */, + std::optional /* codec_pair_id */) { return nullptr; } }; +// Use ranked overloads (abseil.io/tips/229) for dispatching. +struct Rank0 {}; +struct Rank1 : Rank0 {}; + +template (), + std::declval(), + std::declval>())), + std::unique_ptr>>> +absl_nullable std::unique_ptr CreateDecoder( + Rank1, + const Environment& env, + const typename Trait::Config& config, + std::optional codec_pair_id) { + return Trait::MakeAudioDecoder(env, config, codec_pair_id); +} + +template (), + std::declval>())), + std::unique_ptr>>> +absl_nullable std::unique_ptr CreateDecoder( + Rank0, + const Environment& /* env */, + const typename Trait::Config& config, + std::optional codec_pair_id) { + return Trait::MakeAudioDecoder(config, codec_pair_id); +} + // Inductive case: Called with n + 1 template parameters; calls subroutines // with n template parameters. template @@ -50,29 +94,26 @@ struct Helper { static bool IsSupportedDecoder(const SdpAudioFormat& format) { auto opt_config = T::SdpToConfig(format); static_assert(std::is_same>::value, + std::optional>::value, "T::SdpToConfig() must return a value of type " - "absl::optional"); + "std::optional"); return opt_config ? true : Helper::IsSupportedDecoder(format); } - static std::unique_ptr MakeAudioDecoder( + + static absl_nullable std::unique_ptr MakeAudioDecoder( + const Environment& env, const SdpAudioFormat& format, - absl::optional codec_pair_id, - const FieldTrialsView* field_trials) { + std::optional codec_pair_id) { auto opt_config = T::SdpToConfig(format); - return opt_config ? T::MakeAudioDecoder(*opt_config, codec_pair_id) - : Helper::MakeAudioDecoder(format, codec_pair_id, - field_trials); + return opt_config.has_value() + ? CreateDecoder(Rank1{}, env, *opt_config, codec_pair_id) + : Helper::MakeAudioDecoder(env, format, codec_pair_id); } }; template class AudioDecoderFactoryT : public AudioDecoderFactory { public: - explicit AudioDecoderFactoryT(const FieldTrialsView* field_trials) { - field_trials_ = field_trials; - } - std::vector GetSupportedDecoders() override { std::vector specs; Helper::AppendSupportedDecoders(&specs); @@ -83,14 +124,12 @@ class AudioDecoderFactoryT : public AudioDecoderFactory { return Helper::IsSupportedDecoder(format); } - std::unique_ptr MakeAudioDecoder( + absl_nullable std::unique_ptr Create( + const Environment& env, const SdpAudioFormat& format, - absl::optional codec_pair_id) override { - return Helper::MakeAudioDecoder(format, codec_pair_id, - field_trials_); + std::optional codec_pair_id) override { + return Helper::MakeAudioDecoder(env, format, codec_pair_id); } - - const FieldTrialsView* field_trials_; }; } // namespace audio_decoder_factory_template_impl @@ -103,17 +142,22 @@ class AudioDecoderFactoryT : public AudioDecoderFactory { // // Converts `audio_format` to a ConfigType instance. Returns an empty // // optional if `audio_format` doesn't correctly specify a decoder of our // // type. -// absl::optional SdpToConfig(const SdpAudioFormat& audio_format); +// std::optional SdpToConfig(const SdpAudioFormat& audio_format); // // // Appends zero or more AudioCodecSpecs to the list that will be returned // // by AudioDecoderFactory::GetSupportedDecoders(). // void AppendSupportedDecoders(std::vector* specs); // // // Creates an AudioDecoder for the specified format. Used to implement -// // AudioDecoderFactory::MakeAudioDecoder(). +// // AudioDecoderFactory::Create(). +// std::unique_ptr MakeAudioDecoder( +// const Environment& env, +// const ConfigType& config, +// std::optional codec_pair_id); +// or // std::unique_ptr MakeAudioDecoder( // const ConfigType& config, -// absl::optional codec_pair_id); +// std::optional codec_pair_id); // // ConfigType should be a type that encapsulates all the settings needed to // create an AudioDecoder. T::Config (where T is the decoder struct) should @@ -126,8 +170,7 @@ class AudioDecoderFactoryT : public AudioDecoderFactory { // TODO(kwiberg): Point at CreateBuiltinAudioDecoderFactory() for an example of // how it is used. template -rtc::scoped_refptr CreateAudioDecoderFactory( - const FieldTrialsView* field_trials = nullptr) { +scoped_refptr CreateAudioDecoderFactory() { // There's no technical reason we couldn't allow zero template parameters, // but such a factory couldn't create any decoders, and callers can do this // by mistake by simply forgetting the <> altogether. So we forbid it in @@ -135,9 +178,8 @@ rtc::scoped_refptr CreateAudioDecoderFactory( static_assert(sizeof...(Ts) >= 1, "Caller must give at least one template parameter"); - return rtc::make_ref_counted< - audio_decoder_factory_template_impl::AudioDecoderFactoryT>( - field_trials); + return make_ref_counted< + audio_decoder_factory_template_impl::AudioDecoderFactoryT>(); } } // namespace webrtc diff --git a/api/audio_codecs/audio_encoder.cc b/api/audio_codecs/audio_encoder.cc index 31bb8739f7..377db93c5e 100644 --- a/api/audio_codecs/audio_encoder.cc +++ b/api/audio_codecs/audio_encoder.cc @@ -10,6 +10,15 @@ #include "api/audio_codecs/audio_encoder.h" +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/call/bitrate_allocation.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" #include "rtc_base/trace_event.h" @@ -32,10 +41,9 @@ int AudioEncoder::RtpTimestampRateHz() const { return SampleRateHz(); } -AudioEncoder::EncodedInfo AudioEncoder::Encode( - uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) { +AudioEncoder::EncodedInfo AudioEncoder::Encode(uint32_t rtp_timestamp, + ArrayView audio, + Buffer* encoded) { TRACE_EVENT0("webrtc", "AudioEncoder::Encode"); RTC_CHECK_EQ(audio.size(), static_cast(NumChannels() * SampleRateHz() / 100)); @@ -58,53 +66,54 @@ bool AudioEncoder::GetDtx() const { return false; } -bool AudioEncoder::SetApplication(Application application) { +bool AudioEncoder::SetApplication(Application /* application */) { return false; } -void AudioEncoder::SetMaxPlaybackRate(int frequency_hz) {} +void AudioEncoder::SetMaxPlaybackRate(int /* frequency_hz */) {} -void AudioEncoder::SetTargetBitrate(int target_bps) {} +void AudioEncoder::SetTargetBitrate(int /* target_bps */) {} -rtc::ArrayView> +ArrayView> AudioEncoder::ReclaimContainedEncoders() { return nullptr; } -bool AudioEncoder::EnableAudioNetworkAdaptor(const std::string& config_string, - RtcEventLog* event_log) { +bool AudioEncoder::EnableAudioNetworkAdaptor( + const std::string& /* config_string */, + RtcEventLog* /* event_log */) { return false; } void AudioEncoder::DisableAudioNetworkAdaptor() {} void AudioEncoder::OnReceivedUplinkPacketLossFraction( - float uplink_packet_loss_fraction) {} + float /* uplink_packet_loss_fraction */) {} void AudioEncoder::OnReceivedUplinkRecoverablePacketLossFraction( - float uplink_recoverable_packet_loss_fraction) { + float /* uplink_recoverable_packet_loss_fraction */) { RTC_DCHECK_NOTREACHED(); } void AudioEncoder::OnReceivedTargetAudioBitrate(int target_audio_bitrate_bps) { - OnReceivedUplinkBandwidth(target_audio_bitrate_bps, absl::nullopt); + OnReceivedUplinkBandwidth(target_audio_bitrate_bps, std::nullopt); } void AudioEncoder::OnReceivedUplinkBandwidth( - int target_audio_bitrate_bps, - absl::optional bwe_period_ms) {} + int /* target_audio_bitrate_bps */, + std::optional /* bwe_period_ms */) {} void AudioEncoder::OnReceivedUplinkAllocation(BitrateAllocationUpdate update) { OnReceivedUplinkBandwidth(update.target_bitrate.bps(), update.bwe_period.ms()); } -void AudioEncoder::OnReceivedRtt(int rtt_ms) {} +void AudioEncoder::OnReceivedRtt(int /* rtt_ms */) {} -void AudioEncoder::OnReceivedOverhead(size_t overhead_bytes_per_packet) {} +void AudioEncoder::OnReceivedOverhead(size_t /* overhead_bytes_per_packet */) {} -void AudioEncoder::SetReceiverFrameLengthRange(int min_frame_length_ms, - int max_frame_length_ms) {} +void AudioEncoder::SetReceiverFrameLengthRange(int /* min_frame_length_ms */, + int /* max_frame_length_ms */) {} ANAStats AudioEncoder::GetANAStats() const { return ANAStats(); diff --git a/api/audio_codecs/audio_encoder.h b/api/audio_codecs/audio_encoder.h index 7f5a34214f..56ea287319 100644 --- a/api/audio_codecs/audio_encoder.h +++ b/api/audio_codecs/audio_encoder.h @@ -11,15 +11,19 @@ #ifndef API_AUDIO_CODECS_AUDIO_ENCODER_H_ #define API_AUDIO_CODECS_AUDIO_ENCODER_H_ +#include +#include + #include +#include #include #include #include #include "absl/base/attributes.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/call/bitrate_allocation.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "rtc_base/buffer.h" @@ -35,30 +39,30 @@ struct ANAStats { // Number of actions taken by the ANA bitrate controller since the start of // the call. If this value is not set, it indicates that the bitrate // controller is disabled. - absl::optional bitrate_action_counter; + std::optional bitrate_action_counter; // Number of actions taken by the ANA channel controller since the start of // the call. If this value is not set, it indicates that the channel // controller is disabled. - absl::optional channel_action_counter; + std::optional channel_action_counter; // Number of actions taken by the ANA DTX controller since the start of the // call. If this value is not set, it indicates that the DTX controller is // disabled. - absl::optional dtx_action_counter; + std::optional dtx_action_counter; // Number of actions taken by the ANA FEC controller since the start of the // call. If this value is not set, it indicates that the FEC controller is // disabled. - absl::optional fec_action_counter; + std::optional fec_action_counter; // Number of times the ANA frame length controller decided to increase the // frame length since the start of the call. If this value is not set, it // indicates that the frame length controller is disabled. - absl::optional frame_length_increase_counter; + std::optional frame_length_increase_counter; // Number of times the ANA frame length controller decided to decrease the // frame length since the start of the call. If this value is not set, it // indicates that the frame length controller is disabled. - absl::optional frame_length_decrease_counter; + std::optional frame_length_decrease_counter; // The uplink packet loss fractions as set by the ANA FEC controller. If this // value is not set, it indicates that the ANA FEC controller is not active. - absl::optional uplink_packet_loss_fraction; + std::optional uplink_packet_loss_fraction; }; // This is the interface class for encoders in AudioCoding module. Each codec @@ -76,7 +80,6 @@ class AudioEncoder { kPcmA = 3, kPcmU = 4, kG722 = 5, - kIlbc = 6, // Number of histogram bins in the UMA logging of codec types. The // total number of different codecs that are logged cannot exceed this @@ -148,8 +151,8 @@ class AudioEncoder { // EncodeImpl() which does the actual work, and then checks some // postconditions. EncodedInfo Encode(uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded); + ArrayView audio, + Buffer* encoded); // Resets the encoder to its starting state, discarding any input that has // been fed to the encoder but not yet emitted in a packet. @@ -194,8 +197,7 @@ class AudioEncoder { // not call any methods on this encoder afterwards, except for the // destructor. The default implementation just returns an empty array. // NOTE: This method is subject to change. Do not call or override it. - virtual rtc::ArrayView> - ReclaimContainedEncoders(); + virtual ArrayView> ReclaimContainedEncoders(); // Enables audio network adaptor. Returns true if successful. virtual bool EnableAudioNetworkAdaptor(const std::string& config_string, @@ -219,7 +221,7 @@ class AudioEncoder { // Provides target audio bitrate and corresponding probing interval of // the bandwidth estimator to this encoder to allow it to adapt. virtual void OnReceivedUplinkBandwidth(int target_audio_bitrate_bps, - absl::optional bwe_period_ms); + std::optional bwe_period_ms); // Provides target audio bitrate and corresponding probing interval of // the bandwidth estimator to this encoder to allow it to adapt. @@ -240,12 +242,19 @@ class AudioEncoder { // Get statistics related to audio network adaptation. virtual ANAStats GetANAStats() const; - // The range of frame lengths that are supported or nullopt if there's no sch - // information. This is used to calculated the full bitrate range, including - // overhead. - virtual absl::optional> GetFrameLengthRange() + // The range of frame lengths that are supported or nullopt if there's no such + // information. This is used together with the bitrate range to calculate the + // full bitrate range, including overhead. + virtual std::optional> GetFrameLengthRange() const = 0; + // The range of payload bitrates that are supported. This is used together + // with the frame length range to calculate the full bitrate range, including + // overhead. + virtual std::optional> GetBitrateRange() const { + return std::nullopt; + } + // The maximum number of audio channels supported by WebRTC encoders. static constexpr int kMaxNumberOfChannels = 24; @@ -253,8 +262,8 @@ class AudioEncoder { // Subclasses implement this to perform the actual encoding. Called by // Encode(). virtual EncodedInfo EncodeImpl(uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) = 0; + ArrayView audio, + Buffer* encoded) = 0; }; } // namespace webrtc #endif // API_AUDIO_CODECS_AUDIO_ENCODER_H_ diff --git a/api/audio_codecs/audio_encoder_factory.h b/api/audio_codecs/audio_encoder_factory.h index 6128b1b6f3..df595fa524 100644 --- a/api/audio_codecs/audio_encoder_factory.h +++ b/api/audio_codecs/audio_encoder_factory.h @@ -12,49 +12,56 @@ #define API_AUDIO_CODECS_AUDIO_ENCODER_FACTORY_H_ #include +#include #include -#include "absl/types/optional.h" +#include "absl/base/nullability.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" -#include "rtc_base/ref_count.h" +#include "api/environment/environment.h" +#include "api/ref_count.h" namespace webrtc { // A factory that creates AudioEncoders. -class AudioEncoderFactory : public rtc::RefCountInterface { +class AudioEncoderFactory : public RefCountInterface { public: + struct Options { + // The encoder will tags its payloads with the specified payload type. + // TODO(ossu): Try to avoid audio encoders having to know their payload + // type. + int payload_type = -1; + + // Links encoders and decoders that talk to the same remote entity: if + // a AudioEncoderFactory::Create() and a AudioDecoderFactory::Create() call + // receive non-null IDs that compare equal, the factory implementations may + // assume that the encoder and decoder form a pair. (The intended use case + // for this is to set up communication between the AudioEncoder and + // AudioDecoder instances, which is needed for some codecs with built-in + // bandwidth adaptation.) + // + // Note: Implementations need to be robust against combinations other than + // one encoder, one decoder getting the same ID; such encoders must still + // work. + std::optional codec_pair_id; + }; + // Returns a prioritized list of audio codecs, to use for signaling etc. virtual std::vector GetSupportedEncoders() = 0; // Returns information about how this format would be encoded, provided it's // supported. More format and format variations may be supported than those // returned by GetSupportedEncoders(). - virtual absl::optional QueryAudioEncoder( + virtual std::optional QueryAudioEncoder( const SdpAudioFormat& format) = 0; - // Creates an AudioEncoder for the specified format. The encoder will tags its - // payloads with the specified payload type. The `codec_pair_id` argument is - // used to link encoders and decoders that talk to the same remote entity: if - // a AudioEncoderFactory::MakeAudioEncoder() and a - // AudioDecoderFactory::MakeAudioDecoder() call receive non-null IDs that - // compare equal, the factory implementations may assume that the encoder and - // decoder form a pair. (The intended use case for this is to set up - // communication between the AudioEncoder and AudioDecoder instances, which is - // needed for some codecs with built-in bandwidth adaptation.) - // + // Creates an AudioEncoder for the specified format. // Returns null if the format isn't supported. - // - // Note: Implementations need to be robust against combinations other than - // one encoder, one decoder getting the same ID; such encoders must still - // work. - // - // TODO(ossu): Try to avoid audio encoders having to know their payload type. - virtual std::unique_ptr MakeAudioEncoder( - int payload_type, + virtual absl_nullable std::unique_ptr Create( + const Environment& env, const SdpAudioFormat& format, - absl::optional codec_pair_id) = 0; + Options options) = 0; }; } // namespace webrtc diff --git a/api/audio_codecs/audio_encoder_factory_template.h b/api/audio_codecs/audio_encoder_factory_template.h index 8a70ba2268..845194f955 100644 --- a/api/audio_codecs/audio_encoder_factory_template.h +++ b/api/audio_codecs/audio_encoder_factory_template.h @@ -12,10 +12,17 @@ #define API_AUDIO_CODECS_AUDIO_ENCODER_FACTORY_TEMPLATE_H_ #include +#include +#include +#include #include +#include "absl/base/nullability.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_encoder_factory.h" -#include "api/field_trials_view.h" +#include "api/audio_codecs/audio_format.h" +#include "api/environment/environment.h" #include "api/make_ref_counted.h" #include "api/scoped_refptr.h" @@ -29,20 +36,55 @@ struct Helper; // Base case: 0 template parameters. template <> struct Helper<> { - static void AppendSupportedEncoders(std::vector* specs) {} - static absl::optional QueryAudioEncoder( - const SdpAudioFormat& format) { - return absl::nullopt; + static void AppendSupportedEncoders( + std::vector* /* specs */) {} + static std::optional QueryAudioEncoder( + const SdpAudioFormat& /* format */) { + return std::nullopt; } - static std::unique_ptr MakeAudioEncoder( - int payload_type, - const SdpAudioFormat& format, - absl::optional codec_pair_id, - const FieldTrialsView* field_trials) { + static absl_nullable std::unique_ptr CreateAudioEncoder( + const Environment& /* env */, + const SdpAudioFormat& /* format */, + const AudioEncoderFactory::Options& /* options */) { return nullptr; } }; +// Use ranked overloads (abseil.io/tips/229) for dispatching. +struct Rank0 {}; +struct Rank1 : Rank0 {}; + +template (), + std::declval(), + std::declval())), + std::unique_ptr>>> +absl_nullable std::unique_ptr CreateEncoder( + Rank1, + const Environment& env, + const typename Trait::Config& config, + const AudioEncoderFactory::Options& options) { + return Trait::MakeAudioEncoder(env, config, options); +} + +template (), + int{}, + std::declval>())), + std::unique_ptr>>> +absl_nullable std::unique_ptr CreateEncoder( + Rank0, + const Environment& /* env */, + const typename Trait::Config& config, + const AudioEncoderFactory::Options& options) { + return Trait::MakeAudioEncoder(config, options.payload_type, + options.codec_pair_id); +} + // Inductive case: Called with n + 1 template parameters; calls subroutines // with n template parameters. template @@ -51,59 +93,49 @@ struct Helper { T::AppendSupportedEncoders(specs); Helper::AppendSupportedEncoders(specs); } - static absl::optional QueryAudioEncoder( + static std::optional QueryAudioEncoder( const SdpAudioFormat& format) { auto opt_config = T::SdpToConfig(format); static_assert(std::is_same>::value, + std::optional>::value, "T::SdpToConfig() must return a value of type " - "absl::optional"); - return opt_config ? absl::optional( + "std::optional"); + return opt_config ? std::optional( T::QueryAudioEncoder(*opt_config)) : Helper::QueryAudioEncoder(format); } - static std::unique_ptr MakeAudioEncoder( - int payload_type, + + static absl_nullable std::unique_ptr CreateAudioEncoder( + const Environment& env, const SdpAudioFormat& format, - absl::optional codec_pair_id, - const FieldTrialsView* field_trials) { - auto opt_config = T::SdpToConfig(format); - if (opt_config) { - return T::MakeAudioEncoder(*opt_config, payload_type, codec_pair_id); - } else { - return Helper::MakeAudioEncoder(payload_type, format, - codec_pair_id, field_trials); + const AudioEncoderFactory::Options& options) { + if (auto opt_config = T::SdpToConfig(format); opt_config.has_value()) { + return CreateEncoder(Rank1{}, env, *opt_config, options); } + return Helper::CreateAudioEncoder(env, format, options); } }; template class AudioEncoderFactoryT : public AudioEncoderFactory { public: - explicit AudioEncoderFactoryT(const FieldTrialsView* field_trials) { - field_trials_ = field_trials; - } - std::vector GetSupportedEncoders() override { std::vector specs; Helper::AppendSupportedEncoders(&specs); return specs; } - absl::optional QueryAudioEncoder( + std::optional QueryAudioEncoder( const SdpAudioFormat& format) override { return Helper::QueryAudioEncoder(format); } - std::unique_ptr MakeAudioEncoder( - int payload_type, + absl_nullable std::unique_ptr Create( + const Environment& env, const SdpAudioFormat& format, - absl::optional codec_pair_id) override { - return Helper::MakeAudioEncoder(payload_type, format, codec_pair_id, - field_trials_); + Options options) override { + return Helper::CreateAudioEncoder(env, format, options); } - - const FieldTrialsView* field_trials_; }; } // namespace audio_encoder_factory_template_impl @@ -116,7 +148,7 @@ class AudioEncoderFactoryT : public AudioEncoderFactory { // // Converts `audio_format` to a ConfigType instance. Returns an empty // // optional if `audio_format` doesn't correctly specify an encoder of our // // type. -// absl::optional SdpToConfig(const SdpAudioFormat& audio_format); +// std::optional SdpToConfig(const SdpAudioFormat& audio_format); // // // Appends zero or more AudioCodecSpecs to the list that will be returned // // by AudioEncoderFactory::GetSupportedEncoders(). @@ -127,15 +159,21 @@ class AudioEncoderFactoryT : public AudioEncoderFactory { // AudioCodecInfo QueryAudioEncoder(const ConfigType& config); // // // Creates an AudioEncoder for the specified format. Used to implement -// // AudioEncoderFactory::MakeAudioEncoder(). -// std::unique_ptr MakeAudioEncoder( +// // AudioEncoderFactory::Create. +// std::unique_ptr MakeAudioEncoder( +// const Environment& env, +// const ConfigType& config, +// const AudioEncoderFactory::Options& options); +// or +// std::unique_ptr MakeAudioEncoder( // const ConfigType& config, // int payload_type, -// absl::optional codec_pair_id); +// std::optional codec_pair_id); // // ConfigType should be a type that encapsulates all the settings needed to // create an AudioEncoder. T::Config (where T is the encoder struct) should // either be the config type, or an alias for it. +// When both MakeAudioEncoder signatures are present, 1st one is preferred. // // Whenever it tries to do something, the new factory will try each of the // encoders in the order they were specified in the template argument list, @@ -144,8 +182,7 @@ class AudioEncoderFactoryT : public AudioEncoderFactory { // TODO(kwiberg): Point at CreateBuiltinAudioEncoderFactory() for an example of // how it is used. template -rtc::scoped_refptr CreateAudioEncoderFactory( - const FieldTrialsView* field_trials = nullptr) { +scoped_refptr CreateAudioEncoderFactory() { // There's no technical reason we couldn't allow zero template parameters, // but such a factory couldn't create any encoders, and callers can do this // by mistake by simply forgetting the <> altogether. So we forbid it in @@ -153,9 +190,8 @@ rtc::scoped_refptr CreateAudioEncoderFactory( static_assert(sizeof...(Ts) >= 1, "Caller must give at least one template parameter"); - return rtc::make_ref_counted< - audio_encoder_factory_template_impl::AudioEncoderFactoryT>( - field_trials); + return make_ref_counted< + audio_encoder_factory_template_impl::AudioEncoderFactoryT>(); } } // namespace webrtc diff --git a/api/audio_codecs/audio_format.cc b/api/audio_codecs/audio_format.cc index 2a529a49ee..c83e631e84 100644 --- a/api/audio_codecs/audio_format.cc +++ b/api/audio_codecs/audio_format.cc @@ -10,9 +10,13 @@ #include "api/audio_codecs/audio_format.h" +#include #include #include "absl/strings/match.h" +#include "absl/strings/string_view.h" +#include "api/rtp_parameters.h" +#include "rtc_base/checks.h" namespace webrtc { @@ -27,7 +31,7 @@ SdpAudioFormat::SdpAudioFormat(absl::string_view name, SdpAudioFormat::SdpAudioFormat(absl::string_view name, int clockrate_hz, size_t num_channels, - const Parameters& param) + const CodecParameterMap& param) : name(name), clockrate_hz(clockrate_hz), num_channels(num_channels), @@ -36,7 +40,7 @@ SdpAudioFormat::SdpAudioFormat(absl::string_view name, SdpAudioFormat::SdpAudioFormat(absl::string_view name, int clockrate_hz, size_t num_channels, - Parameters&& param) + CodecParameterMap&& param) : name(name), clockrate_hz(clockrate_hz), num_channels(num_channels), diff --git a/api/audio_codecs/audio_format.h b/api/audio_codecs/audio_format.h index 0cf67799b8..a5d4a92065 100644 --- a/api/audio_codecs/audio_format.h +++ b/api/audio_codecs/audio_format.h @@ -17,14 +17,17 @@ #include #include "absl/strings/string_view.h" +#include "api/rtp_parameters.h" #include "rtc_base/checks.h" -#include "rtc_base/system/rtc_export.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/system/rtc_export.h" // IWYU pragma: private namespace webrtc { // SDP specification for a single audio codec. struct RTC_EXPORT SdpAudioFormat { - using Parameters = std::map; + using Parameters [[deprecated("Use webrtc::CodecParameterMap")]] = + std::map; SdpAudioFormat(const SdpAudioFormat&); SdpAudioFormat(SdpAudioFormat&&); @@ -32,11 +35,11 @@ struct RTC_EXPORT SdpAudioFormat { SdpAudioFormat(absl::string_view name, int clockrate_hz, size_t num_channels, - const Parameters& param); + const CodecParameterMap& param); SdpAudioFormat(absl::string_view name, int clockrate_hz, size_t num_channels, - Parameters&& param); + CodecParameterMap&& param); ~SdpAudioFormat(); // Returns true if this format is compatible with `o`. In SDP terminology: @@ -52,10 +55,27 @@ struct RTC_EXPORT SdpAudioFormat { return !(a == b); } + template + friend void AbslStringify(Sink& sink, const SdpAudioFormat& saf) { + StringBuilder sb("{"); + bool first = true; + for (const auto& [key, value] : saf.parameters) { + if (!first) { + sb << ", "; + } + first = false; + sb << key << ": " << value; + } + sb << "}"; + absl::Format( + &sink, "{name: %s, clockrate_hz: %d, num_channels: %d, parameters: %v}", + saf.name, saf.clockrate_hz, saf.num_channels, sb.Release()); + } + std::string name; int clockrate_hz; size_t num_channels; - Parameters parameters; + CodecParameterMap parameters; }; // Information about how an audio format is treated by the codec implementation. @@ -103,6 +123,17 @@ struct AudioCodecInfo { return min_bitrate_bps == max_bitrate_bps; } + template + friend void AbslStringify(Sink& sink, const AudioCodecInfo& aci) { + absl::Format(&sink, + "{sample_rate_hz: %d, num_channels: %d, default_bitrate_bps: " + "%d, min_bitrate_bps: %d, max_bitrate_bps: %d, " + "allow_comfort_noise: %v, supports_network_adaption: %v}", + aci.sample_rate_hz, aci.num_channels, aci.default_bitrate_bps, + aci.min_bitrate_bps, aci.max_bitrate_bps, + aci.allow_comfort_noise, aci.supports_network_adaption); + } + int sample_rate_hz; size_t num_channels; int default_bitrate_bps; @@ -124,6 +155,11 @@ struct AudioCodecSpec { bool operator!=(const AudioCodecSpec& b) const { return !(*this == b); } + template + friend void AbslStringify(Sink& sink, const AudioCodecSpec& acs) { + absl::Format(&sink, "{format: %v, info: %v}", acs.format, acs.info); + } + SdpAudioFormat format; AudioCodecInfo info; }; diff --git a/api/audio_codecs/builtin_audio_decoder_factory.cc b/api/audio_codecs/builtin_audio_decoder_factory.cc index 881113d985..c8d5800587 100644 --- a/api/audio_codecs/builtin_audio_decoder_factory.cc +++ b/api/audio_codecs/builtin_audio_decoder_factory.cc @@ -11,15 +11,18 @@ #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include +#include #include #include "api/audio_codecs/L16/audio_decoder_L16.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_decoder_factory_template.h" +#include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/g711/audio_decoder_g711.h" #include "api/audio_codecs/g722/audio_decoder_g722.h" -#if WEBRTC_USE_BUILTIN_ILBC -#include "api/audio_codecs/ilbc/audio_decoder_ilbc.h" // nogncheck -#endif +#include "api/scoped_refptr.h" #if WEBRTC_USE_BUILTIN_OPUS #include "api/audio_codecs/opus/audio_decoder_multi_channel_opus.h" #include "api/audio_codecs/opus/audio_decoder_opus.h" // nogncheck @@ -33,36 +36,30 @@ namespace { template struct NotAdvertised { using Config = typename T::Config; - static absl::optional SdpToConfig( - const SdpAudioFormat& audio_format) { + static std::optional SdpToConfig(const SdpAudioFormat& audio_format) { return T::SdpToConfig(audio_format); } - static void AppendSupportedDecoders(std::vector* specs) { + static void AppendSupportedDecoders( + std::vector* /* specs */) { // Don't advertise support for anything. } static std::unique_ptr MakeAudioDecoder( const Config& config, - absl::optional codec_pair_id = absl::nullopt) { + std::optional codec_pair_id = std::nullopt) { return T::MakeAudioDecoder(config, codec_pair_id); } }; } // namespace -rtc::scoped_refptr CreateBuiltinAudioDecoderFactory() { +scoped_refptr CreateBuiltinAudioDecoderFactory() { return CreateAudioDecoderFactory< #if WEBRTC_USE_BUILTIN_OPUS AudioDecoderOpus, NotAdvertised, #endif - AudioDecoderG722, - -#if WEBRTC_USE_BUILTIN_ILBC - AudioDecoderIlbc, -#endif - - AudioDecoderG711, NotAdvertised>(); + AudioDecoderG722, AudioDecoderG711, NotAdvertised>(); } } // namespace webrtc diff --git a/api/audio_codecs/builtin_audio_decoder_factory.h b/api/audio_codecs/builtin_audio_decoder_factory.h index 72e1e3d96e..41f7c125b8 100644 --- a/api/audio_codecs/builtin_audio_decoder_factory.h +++ b/api/audio_codecs/builtin_audio_decoder_factory.h @@ -21,7 +21,7 @@ namespace webrtc { // only need a subset of the codecs, consider using // CreateAudioDecoderFactory<...codecs listed here...>() or // CreateOpusAudioDecoderFactory() instead. -rtc::scoped_refptr CreateBuiltinAudioDecoderFactory(); +scoped_refptr CreateBuiltinAudioDecoderFactory(); } // namespace webrtc diff --git a/api/audio_codecs/builtin_audio_encoder_factory.cc b/api/audio_codecs/builtin_audio_encoder_factory.cc index 4546a2eaee..4d63804da9 100644 --- a/api/audio_codecs/builtin_audio_encoder_factory.cc +++ b/api/audio_codecs/builtin_audio_encoder_factory.cc @@ -11,15 +11,19 @@ #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include +#include #include #include "api/audio_codecs/L16/audio_encoder_L16.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_encoder_factory.h" #include "api/audio_codecs/audio_encoder_factory_template.h" +#include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/g711/audio_encoder_g711.h" #include "api/audio_codecs/g722/audio_encoder_g722.h" -#if WEBRTC_USE_BUILTIN_ILBC -#include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" // nogncheck -#endif +#include "api/field_trials_view.h" +#include "api/scoped_refptr.h" #if WEBRTC_USE_BUILTIN_OPUS #include "api/audio_codecs/opus/audio_encoder_multi_channel_opus.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" // nogncheck @@ -33,11 +37,11 @@ namespace { template struct NotAdvertised { using Config = typename T::Config; - static absl::optional SdpToConfig( - const SdpAudioFormat& audio_format) { + static std::optional SdpToConfig(const SdpAudioFormat& audio_format) { return T::SdpToConfig(audio_format); } - static void AppendSupportedEncoders(std::vector* specs) { + static void AppendSupportedEncoders( + std::vector* /* specs */) { // Don't advertise support for anything. } static AudioCodecInfo QueryAudioEncoder(const Config& config) { @@ -46,7 +50,7 @@ struct NotAdvertised { static std::unique_ptr MakeAudioEncoder( const Config& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt, + std::optional codec_pair_id = std::nullopt, const FieldTrialsView* field_trials = nullptr) { return T::MakeAudioEncoder(config, payload_type, codec_pair_id, field_trials); @@ -55,20 +59,14 @@ struct NotAdvertised { } // namespace -rtc::scoped_refptr CreateBuiltinAudioEncoderFactory() { +scoped_refptr CreateBuiltinAudioEncoderFactory() { return CreateAudioEncoderFactory< #if WEBRTC_USE_BUILTIN_OPUS AudioEncoderOpus, NotAdvertised, #endif - AudioEncoderG722, - -#if WEBRTC_USE_BUILTIN_ILBC - AudioEncoderIlbc, -#endif - - AudioEncoderG711, NotAdvertised>(); + AudioEncoderG722, AudioEncoderG711, NotAdvertised>(); } } // namespace webrtc diff --git a/api/audio_codecs/builtin_audio_encoder_factory.h b/api/audio_codecs/builtin_audio_encoder_factory.h index f833de10f1..8c0a4ab452 100644 --- a/api/audio_codecs/builtin_audio_encoder_factory.h +++ b/api/audio_codecs/builtin_audio_encoder_factory.h @@ -21,7 +21,7 @@ namespace webrtc { // only need a subset of the codecs, consider using // CreateAudioEncoderFactory<...codecs listed here...>() or // CreateOpusAudioEncoderFactory() instead. -rtc::scoped_refptr CreateBuiltinAudioEncoderFactory(); +scoped_refptr CreateBuiltinAudioEncoderFactory(); } // namespace webrtc diff --git a/api/audio_codecs/g711/BUILD.gn b/api/audio_codecs/g711/BUILD.gn index b2ff324f12..6c00e040fe 100644 --- a/api/audio_codecs/g711/BUILD.gn +++ b/api/audio_codecs/g711/BUILD.gn @@ -23,14 +23,12 @@ rtc_library("audio_encoder_g711") { "..:audio_codecs_api", "../../../api:field_trials_view", "../../../modules/audio_coding:g711", + "../../../rtc_base:checks", "../../../rtc_base:safe_conversions", "../../../rtc_base:safe_minmax", "../../../rtc_base:stringutils", "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -45,11 +43,9 @@ rtc_library("audio_decoder_g711") { "..:audio_codecs_api", "../../../api:field_trials_view", "../../../modules/audio_coding:g711", + "../../../rtc_base:checks", "../../../rtc_base:safe_conversions", "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } diff --git a/api/audio_codecs/g711/audio_decoder_g711.cc b/api/audio_codecs/g711/audio_decoder_g711.cc index 838f7e9624..400a27a17a 100644 --- a/api/audio_codecs/g711/audio_decoder_g711.cc +++ b/api/audio_codecs/g711/audio_decoder_g711.cc @@ -10,16 +10,23 @@ #include "api/audio_codecs/g711/audio_decoder_g711.h" +#include #include +#include #include #include "absl/strings/match.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "modules/audio_coding/codecs/g711/audio_decoder_pcm.h" +#include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" namespace webrtc { -absl::optional AudioDecoderG711::SdpToConfig( +std::optional AudioDecoderG711::SdpToConfig( const SdpAudioFormat& format) { const bool is_pcmu = absl::EqualsIgnoreCase(format.name, "PCMU"); const bool is_pcma = absl::EqualsIgnoreCase(format.name, "PCMA"); @@ -27,14 +34,14 @@ absl::optional AudioDecoderG711::SdpToConfig( (is_pcmu || is_pcma)) { Config config; config.type = is_pcmu ? Config::Type::kPcmU : Config::Type::kPcmA; - config.num_channels = rtc::dchecked_cast(format.num_channels); + config.num_channels = dchecked_cast(format.num_channels); if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); - return absl::nullopt; + return std::nullopt; } return config; } else { - return absl::nullopt; + return std::nullopt; } } @@ -47,8 +54,8 @@ void AudioDecoderG711::AppendSupportedDecoders( std::unique_ptr AudioDecoderG711::MakeAudioDecoder( const Config& config, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { + std::optional /*codec_pair_id*/, + const FieldTrialsView* /* field_trials */) { if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); return nullptr; diff --git a/api/audio_codecs/g711/audio_decoder_g711.h b/api/audio_codecs/g711/audio_decoder_g711.h index 0f7a98d345..155c13d40d 100644 --- a/api/audio_codecs/g711/audio_decoder_g711.h +++ b/api/audio_codecs/g711/audio_decoder_g711.h @@ -12,9 +12,9 @@ #define API_AUDIO_CODECS_G711_AUDIO_DECODER_G711_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" @@ -36,11 +36,11 @@ struct RTC_EXPORT AudioDecoderG711 { Type type; int num_channels; }; - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static std::optional SdpToConfig(const SdpAudioFormat& audio_format); static void AppendSupportedDecoders(std::vector* specs); static std::unique_ptr MakeAudioDecoder( const Config& config, - absl::optional codec_pair_id = absl::nullopt, + std::optional codec_pair_id = std::nullopt, const FieldTrialsView* field_trials = nullptr); }; diff --git a/api/audio_codecs/g711/audio_encoder_g711.cc b/api/audio_codecs/g711/audio_encoder_g711.cc index 1dca3b80d3..f430bbc862 100644 --- a/api/audio_codecs/g711/audio_encoder_g711.cc +++ b/api/audio_codecs/g711/audio_encoder_g711.cc @@ -10,18 +10,28 @@ #include "api/audio_codecs/g711/audio_encoder_g711.h" +#include + +#include +#include #include +#include #include #include "absl/strings/match.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "modules/audio_coding/codecs/g711/audio_encoder_pcm.h" +#include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/string_to_number.h" namespace webrtc { -absl::optional AudioEncoderG711::SdpToConfig( +std::optional AudioEncoderG711::SdpToConfig( const SdpAudioFormat& format) { const bool is_pcmu = absl::EqualsIgnoreCase(format.name, "PCMU"); const bool is_pcma = absl::EqualsIgnoreCase(format.name, "PCMA"); @@ -29,22 +39,22 @@ absl::optional AudioEncoderG711::SdpToConfig( (is_pcmu || is_pcma)) { Config config; config.type = is_pcmu ? Config::Type::kPcmU : Config::Type::kPcmA; - config.num_channels = rtc::dchecked_cast(format.num_channels); + config.num_channels = dchecked_cast(format.num_channels); config.frame_size_ms = 20; auto ptime_iter = format.parameters.find("ptime"); if (ptime_iter != format.parameters.end()) { - const auto ptime = rtc::StringToNumber(ptime_iter->second); + const auto ptime = StringToNumber(ptime_iter->second); if (ptime && *ptime > 0) { - config.frame_size_ms = rtc::SafeClamp(10 * (*ptime / 10), 10, 60); + config.frame_size_ms = SafeClamp(10 * (*ptime / 10), 10, 60); } } if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); - return absl::nullopt; + return std::nullopt; } return config; } else { - return absl::nullopt; + return std::nullopt; } } @@ -57,15 +67,15 @@ void AudioEncoderG711::AppendSupportedEncoders( AudioCodecInfo AudioEncoderG711::QueryAudioEncoder(const Config& config) { RTC_DCHECK(config.IsOk()); - return {8000, rtc::dchecked_cast(config.num_channels), + return {8000, dchecked_cast(config.num_channels), 64000 * config.num_channels}; } std::unique_ptr AudioEncoderG711::MakeAudioEncoder( const Config& config, int payload_type, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { + std::optional /*codec_pair_id*/, + const FieldTrialsView* /* field_trials */) { if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); return nullptr; diff --git a/api/audio_codecs/g711/audio_encoder_g711.h b/api/audio_codecs/g711/audio_encoder_g711.h index 4b3eb845e0..db39a987c2 100644 --- a/api/audio_codecs/g711/audio_encoder_g711.h +++ b/api/audio_codecs/g711/audio_encoder_g711.h @@ -12,9 +12,9 @@ #define API_AUDIO_CODECS_G711_AUDIO_ENCODER_G711_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" @@ -38,14 +38,14 @@ struct RTC_EXPORT AudioEncoderG711 { int num_channels = 1; int frame_size_ms = 20; }; - static absl::optional SdpToConfig( + static std::optional SdpToConfig( const SdpAudioFormat& audio_format); static void AppendSupportedEncoders(std::vector* specs); static AudioCodecInfo QueryAudioEncoder(const Config& config); static std::unique_ptr MakeAudioEncoder( const Config& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt, + std::optional codec_pair_id = std::nullopt, const FieldTrialsView* field_trials = nullptr); }; diff --git a/api/audio_codecs/g722/BUILD.gn b/api/audio_codecs/g722/BUILD.gn index af13ac3de3..a3ee0d4e0c 100644 --- a/api/audio_codecs/g722/BUILD.gn +++ b/api/audio_codecs/g722/BUILD.gn @@ -30,14 +30,12 @@ rtc_library("audio_encoder_g722") { "..:audio_codecs_api", "../../../api:field_trials_view", "../../../modules/audio_coding:g722", + "../../../rtc_base:checks", "../../../rtc_base:safe_conversions", "../../../rtc_base:safe_minmax", "../../../rtc_base:stringutils", "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -52,11 +50,9 @@ rtc_library("audio_decoder_g722") { "..:audio_codecs_api", "../../../api:field_trials_view", "../../../modules/audio_coding:g722", + "../../../rtc_base:checks", "../../../rtc_base:safe_conversions", "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } diff --git a/api/audio_codecs/g722/audio_decoder_g722.cc b/api/audio_codecs/g722/audio_decoder_g722.cc index ed7163471a..f948a21da0 100644 --- a/api/audio_codecs/g722/audio_decoder_g722.cc +++ b/api/audio_codecs/g722/audio_decoder_g722.cc @@ -11,22 +11,28 @@ #include "api/audio_codecs/g722/audio_decoder_g722.h" #include +#include #include #include "absl/strings/match.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" #include "modules/audio_coding/codecs/g722/audio_decoder_g722.h" +#include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" namespace webrtc { -absl::optional AudioDecoderG722::SdpToConfig( +std::optional AudioDecoderG722::SdpToConfig( const SdpAudioFormat& format) { if (absl::EqualsIgnoreCase(format.name, "G722") && format.clockrate_hz == 8000 && (format.num_channels == 1 || format.num_channels == 2)) { - return Config{rtc::dchecked_cast(format.num_channels)}; + return Config{dchecked_cast(format.num_channels)}; } - return absl::nullopt; + return std::nullopt; } void AudioDecoderG722::AppendSupportedDecoders( @@ -36,8 +42,8 @@ void AudioDecoderG722::AppendSupportedDecoders( std::unique_ptr AudioDecoderG722::MakeAudioDecoder( Config config, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { + std::optional /*codec_pair_id*/, + const FieldTrialsView* /* field_trials */) { if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); return nullptr; diff --git a/api/audio_codecs/g722/audio_decoder_g722.h b/api/audio_codecs/g722/audio_decoder_g722.h index 6f7b253039..185986638d 100644 --- a/api/audio_codecs/g722/audio_decoder_g722.h +++ b/api/audio_codecs/g722/audio_decoder_g722.h @@ -12,9 +12,9 @@ #define API_AUDIO_CODECS_G722_AUDIO_DECODER_G722_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" @@ -30,11 +30,11 @@ struct RTC_EXPORT AudioDecoderG722 { bool IsOk() const { return num_channels == 1 || num_channels == 2; } int num_channels; }; - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static std::optional SdpToConfig(const SdpAudioFormat& audio_format); static void AppendSupportedDecoders(std::vector* specs); static std::unique_ptr MakeAudioDecoder( Config config, - absl::optional codec_pair_id = absl::nullopt, + std::optional codec_pair_id = std::nullopt, const FieldTrialsView* field_trials = nullptr); }; diff --git a/api/audio_codecs/g722/audio_encoder_g722.cc b/api/audio_codecs/g722/audio_encoder_g722.cc index 56a6c4da6a..ed86d164da 100644 --- a/api/audio_codecs/g722/audio_encoder_g722.cc +++ b/api/audio_codecs/g722/audio_encoder_g722.cc @@ -10,37 +10,47 @@ #include "api/audio_codecs/g722/audio_encoder_g722.h" +#include + +#include #include +#include #include #include "absl/strings/match.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/audio_codecs/g722/audio_encoder_g722_config.h" +#include "api/field_trials_view.h" #include "modules/audio_coding/codecs/g722/audio_encoder_g722.h" +#include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/string_to_number.h" namespace webrtc { -absl::optional AudioEncoderG722::SdpToConfig( +std::optional AudioEncoderG722::SdpToConfig( const SdpAudioFormat& format) { if (!absl::EqualsIgnoreCase(format.name, "g722") || format.clockrate_hz != 8000) { - return absl::nullopt; + return std::nullopt; } AudioEncoderG722Config config; - config.num_channels = rtc::checked_cast(format.num_channels); + config.num_channels = checked_cast(format.num_channels); auto ptime_iter = format.parameters.find("ptime"); if (ptime_iter != format.parameters.end()) { - auto ptime = rtc::StringToNumber(ptime_iter->second); + auto ptime = StringToNumber(ptime_iter->second); if (ptime && *ptime > 0) { const int whole_packets = *ptime / 10; - config.frame_size_ms = rtc::SafeClamp(whole_packets * 10, 10, 60); + config.frame_size_ms = SafeClamp(whole_packets * 10, 10, 60); } } if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); - return absl::nullopt; + return std::nullopt; } return config; } @@ -55,15 +65,15 @@ void AudioEncoderG722::AppendSupportedEncoders( AudioCodecInfo AudioEncoderG722::QueryAudioEncoder( const AudioEncoderG722Config& config) { RTC_DCHECK(config.IsOk()); - return {16000, rtc::dchecked_cast(config.num_channels), + return {16000, dchecked_cast(config.num_channels), 64000 * config.num_channels}; } std::unique_ptr AudioEncoderG722::MakeAudioEncoder( const AudioEncoderG722Config& config, int payload_type, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { + std::optional /*codec_pair_id*/, + const FieldTrialsView* /* field_trials */) { if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); return nullptr; diff --git a/api/audio_codecs/g722/audio_encoder_g722.h b/api/audio_codecs/g722/audio_encoder_g722.h index 78ceddd1e9..0997d8bb19 100644 --- a/api/audio_codecs/g722/audio_encoder_g722.h +++ b/api/audio_codecs/g722/audio_encoder_g722.h @@ -12,9 +12,9 @@ #define API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" @@ -28,14 +28,14 @@ namespace webrtc { // CreateAudioEncoderFactory<...>(). struct RTC_EXPORT AudioEncoderG722 { using Config = AudioEncoderG722Config; - static absl::optional SdpToConfig( + static std::optional SdpToConfig( const SdpAudioFormat& audio_format); static void AppendSupportedEncoders(std::vector* specs); static AudioCodecInfo QueryAudioEncoder(const AudioEncoderG722Config& config); static std::unique_ptr MakeAudioEncoder( const AudioEncoderG722Config& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt, + std::optional codec_pair_id = std::nullopt, const FieldTrialsView* field_trials = nullptr); }; diff --git a/api/audio_codecs/ilbc/BUILD.gn b/api/audio_codecs/ilbc/BUILD.gn deleted file mode 100644 index 22cf48220f..0000000000 --- a/api/audio_codecs/ilbc/BUILD.gn +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -import("../../../webrtc.gni") -if (is_android) { - import("//build/config/android/config.gni") - import("//build/config/android/rules.gni") -} - -rtc_source_set("audio_encoder_ilbc_config") { - visibility = [ "*" ] - sources = [ "audio_encoder_ilbc_config.h" ] -} - -rtc_library("audio_encoder_ilbc") { - visibility = [ "*" ] - poisonous = [ "audio_codecs" ] - sources = [ - "audio_encoder_ilbc.cc", - "audio_encoder_ilbc.h", - ] - deps = [ - ":audio_encoder_ilbc_config", - "..:audio_codecs_api", - "../../../api:field_trials_view", - "../../../modules/audio_coding:ilbc", - "../../../rtc_base:safe_conversions", - "../../../rtc_base:safe_minmax", - "../../../rtc_base:stringutils", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("audio_decoder_ilbc") { - visibility = [ "*" ] - poisonous = [ "audio_codecs" ] - sources = [ - "audio_decoder_ilbc.cc", - "audio_decoder_ilbc.h", - ] - deps = [ - "..:audio_codecs_api", - "../../../api:field_trials_view", - "../../../modules/audio_coding:ilbc", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} diff --git a/api/audio_codecs/ilbc/audio_decoder_ilbc.cc b/api/audio_codecs/ilbc/audio_decoder_ilbc.cc deleted file mode 100644 index c58316903a..0000000000 --- a/api/audio_codecs/ilbc/audio_decoder_ilbc.cc +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/audio_codecs/ilbc/audio_decoder_ilbc.h" - -#include -#include - -#include "absl/strings/match.h" -#include "modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h" - -namespace webrtc { - -absl::optional AudioDecoderIlbc::SdpToConfig( - const SdpAudioFormat& format) { - if (absl::EqualsIgnoreCase(format.name, "ILBC") && - format.clockrate_hz == 8000 && format.num_channels == 1) { - return Config(); - } - return absl::nullopt; -} - -void AudioDecoderIlbc::AppendSupportedDecoders( - std::vector* specs) { - specs->push_back({{"ILBC", 8000, 1}, {8000, 1, 13300}}); -} - -std::unique_ptr AudioDecoderIlbc::MakeAudioDecoder( - Config config, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { - return std::make_unique(); -} - -} // namespace webrtc diff --git a/api/audio_codecs/ilbc/audio_decoder_ilbc.h b/api/audio_codecs/ilbc/audio_decoder_ilbc.h deleted file mode 100644 index 60566c88df..0000000000 --- a/api/audio_codecs/ilbc/audio_decoder_ilbc.h +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ILBC_AUDIO_DECODER_ILBC_H_ -#define API_AUDIO_CODECS_ILBC_AUDIO_DECODER_ILBC_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_codec_pair_id.h" -#include "api/audio_codecs/audio_decoder.h" -#include "api/audio_codecs/audio_format.h" -#include "api/field_trials_view.h" - -namespace webrtc { - -// ILBC decoder API for use as a template parameter to -// CreateAudioDecoderFactory<...>(). -struct AudioDecoderIlbc { - struct Config {}; // Empty---no config values needed! - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); - static void AppendSupportedDecoders(std::vector* specs); - static std::unique_ptr MakeAudioDecoder( - Config config, - absl::optional codec_pair_id = absl::nullopt, - const FieldTrialsView* field_trials = nullptr); -}; - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ILBC_AUDIO_DECODER_ILBC_H_ diff --git a/api/audio_codecs/ilbc/audio_encoder_ilbc.cc b/api/audio_codecs/ilbc/audio_encoder_ilbc.cc deleted file mode 100644 index b497948491..0000000000 --- a/api/audio_codecs/ilbc/audio_encoder_ilbc.cc +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" - -#include -#include - -#include "absl/strings/match.h" -#include "modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/numerics/safe_minmax.h" -#include "rtc_base/string_to_number.h" - -namespace webrtc { -namespace { -int GetIlbcBitrate(int ptime) { - switch (ptime) { - case 20: - case 40: - // 38 bytes per frame of 20 ms => 15200 bits/s. - return 15200; - case 30: - case 60: - // 50 bytes per frame of 30 ms => (approx) 13333 bits/s. - return 13333; - default: - RTC_CHECK_NOTREACHED(); - } -} -} // namespace - -absl::optional AudioEncoderIlbc::SdpToConfig( - const SdpAudioFormat& format) { - if (!absl::EqualsIgnoreCase(format.name.c_str(), "ILBC") || - format.clockrate_hz != 8000 || format.num_channels != 1) { - return absl::nullopt; - } - - AudioEncoderIlbcConfig config; - auto ptime_iter = format.parameters.find("ptime"); - if (ptime_iter != format.parameters.end()) { - auto ptime = rtc::StringToNumber(ptime_iter->second); - if (ptime && *ptime > 0) { - const int whole_packets = *ptime / 10; - config.frame_size_ms = rtc::SafeClamp(whole_packets * 10, 20, 60); - } - } - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return absl::nullopt; - } - return config; -} - -void AudioEncoderIlbc::AppendSupportedEncoders( - std::vector* specs) { - const SdpAudioFormat fmt = {"ILBC", 8000, 1}; - const AudioCodecInfo info = QueryAudioEncoder(*SdpToConfig(fmt)); - specs->push_back({fmt, info}); -} - -AudioCodecInfo AudioEncoderIlbc::QueryAudioEncoder( - const AudioEncoderIlbcConfig& config) { - RTC_DCHECK(config.IsOk()); - return {8000, 1, GetIlbcBitrate(config.frame_size_ms)}; -} - -std::unique_ptr AudioEncoderIlbc::MakeAudioEncoder( - const AudioEncoderIlbcConfig& config, - int payload_type, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return nullptr; - } - return std::make_unique(config, payload_type); -} - -} // namespace webrtc diff --git a/api/audio_codecs/ilbc/audio_encoder_ilbc.h b/api/audio_codecs/ilbc/audio_encoder_ilbc.h deleted file mode 100644 index a5306841ce..0000000000 --- a/api/audio_codecs/ilbc/audio_encoder_ilbc.h +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_ -#define API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_codec_pair_id.h" -#include "api/audio_codecs/audio_encoder.h" -#include "api/audio_codecs/audio_format.h" -#include "api/audio_codecs/ilbc/audio_encoder_ilbc_config.h" -#include "api/field_trials_view.h" - -namespace webrtc { - -// ILBC encoder API for use as a template parameter to -// CreateAudioEncoderFactory<...>(). -struct AudioEncoderIlbc { - using Config = AudioEncoderIlbcConfig; - static absl::optional SdpToConfig( - const SdpAudioFormat& audio_format); - static void AppendSupportedEncoders(std::vector* specs); - static AudioCodecInfo QueryAudioEncoder(const AudioEncoderIlbcConfig& config); - static std::unique_ptr MakeAudioEncoder( - const AudioEncoderIlbcConfig& config, - int payload_type, - absl::optional codec_pair_id = absl::nullopt, - const FieldTrialsView* field_trials = nullptr); -}; - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_ diff --git a/api/audio_codecs/ilbc/audio_encoder_ilbc_config.h b/api/audio_codecs/ilbc/audio_encoder_ilbc_config.h deleted file mode 100644 index 4d82f9901c..0000000000 --- a/api/audio_codecs/ilbc/audio_encoder_ilbc_config.h +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_CONFIG_H_ -#define API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_CONFIG_H_ - -namespace webrtc { - -struct AudioEncoderIlbcConfig { - bool IsOk() const { - return (frame_size_ms == 20 || frame_size_ms == 30 || frame_size_ms == 40 || - frame_size_ms == 60); - } - int frame_size_ms = 30; // Valid values are 20, 30, 40, and 60 ms. - // Note that frame size 40 ms produces encodings with two 20 ms frames in - // them, and frame size 60 ms consists of two 30 ms frames. -}; - -} // namespace webrtc - -#endif // API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_CONFIG_H_ diff --git a/api/audio_codecs/opus/BUILD.gn b/api/audio_codecs/opus/BUILD.gn index eb90a0b9ac..2035a791be 100644 --- a/api/audio_codecs/opus/BUILD.gn +++ b/api/audio_codecs/opus/BUILD.gn @@ -21,7 +21,6 @@ rtc_library("audio_encoder_opus_config") { "audio_encoder_opus_config.h", ] deps = [ "../../../rtc_base/system:rtc_export" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] defines = [] if (rtc_opus_variable_complexity) { defines += [ "WEBRTC_OPUS_VARIABLE_COMPLEXITY=1" ] @@ -44,13 +43,10 @@ rtc_library("audio_encoder_opus") { deps = [ ":audio_encoder_opus_config", "..:audio_codecs_api", - "../../../api:field_trials_view", "../../../modules/audio_coding:webrtc_opus", + "../../../rtc_base:checks", "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "../../environment", ] } @@ -63,13 +59,12 @@ rtc_library("audio_decoder_opus") { ] deps = [ "..:audio_codecs_api", - "../../../api:field_trials_view", + "../..:field_trials_view", "../../../modules/audio_coding:webrtc_opus", + "../../../rtc_base:checks", "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ + "../../environment", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -85,7 +80,6 @@ rtc_library("audio_encoder_multiopus") { "../../../rtc_base/system:rtc_export", "../opus:audio_encoder_opus_config", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("audio_decoder_multiopus") { @@ -101,10 +95,21 @@ rtc_library("audio_decoder_multiopus") { "../../../api:field_trials_view", "../../../modules/audio_coding:webrtc_multiopus", "../../../rtc_base/system:rtc_export", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("unittests") { + visibility = [ "*" ] + testonly = true + sources = [ "audio_decoder_opus_unittest.cc" ] + deps = [ + ":audio_decoder_opus", + "..:audio_codecs_api", + "../../../api/environment", + "../../../api/environment:environment_factory", + "../../../test:explicit_key_value_config", + "../../../test:test_support", ] } diff --git a/api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc b/api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc index 0fb4e05511..5c904019ec 100644 --- a/api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc +++ b/api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc @@ -11,16 +11,20 @@ #include "api/audio_codecs/opus/audio_decoder_multi_channel_opus.h" #include +#include #include #include -#include "absl/memory/memory.h" -#include "absl/strings/match.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h" +#include "api/field_trials_view.h" #include "modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.h" namespace webrtc { -absl::optional +std::optional AudioDecoderMultiChannelOpus::SdpToConfig(const SdpAudioFormat& format) { return AudioDecoderMultiChannelOpusImpl::SdpToConfig(format); } @@ -64,8 +68,8 @@ void AudioDecoderMultiChannelOpus::AppendSupportedDecoders( std::unique_ptr AudioDecoderMultiChannelOpus::MakeAudioDecoder( AudioDecoderMultiChannelOpusConfig config, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { + std::optional /*codec_pair_id*/, + const FieldTrialsView* /* field_trials */) { return AudioDecoderMultiChannelOpusImpl::MakeAudioDecoder(config); } } // namespace webrtc diff --git a/api/audio_codecs/opus/audio_decoder_multi_channel_opus.h b/api/audio_codecs/opus/audio_decoder_multi_channel_opus.h index eafd6c6939..d9fc693fbb 100644 --- a/api/audio_codecs/opus/audio_decoder_multi_channel_opus.h +++ b/api/audio_codecs/opus/audio_decoder_multi_channel_opus.h @@ -12,9 +12,9 @@ #define API_AUDIO_CODECS_OPUS_AUDIO_DECODER_MULTI_CHANNEL_OPUS_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" @@ -28,12 +28,12 @@ namespace webrtc { // CreateAudioDecoderFactory<...>(). struct RTC_EXPORT AudioDecoderMultiChannelOpus { using Config = AudioDecoderMultiChannelOpusConfig; - static absl::optional SdpToConfig( + static std::optional SdpToConfig( const SdpAudioFormat& audio_format); static void AppendSupportedDecoders(std::vector* specs); static std::unique_ptr MakeAudioDecoder( AudioDecoderMultiChannelOpusConfig config, - absl::optional codec_pair_id = absl::nullopt, + std::optional codec_pair_id = std::nullopt, const FieldTrialsView* field_trials = nullptr); }; diff --git a/api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h b/api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h index f97c5c3193..a24e28e036 100644 --- a/api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h +++ b/api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h @@ -11,6 +11,7 @@ #ifndef API_AUDIO_CODECS_OPUS_AUDIO_DECODER_MULTI_CHANNEL_OPUS_CONFIG_H_ #define API_AUDIO_CODECS_OPUS_AUDIO_DECODER_MULTI_CHANNEL_OPUS_CONFIG_H_ +#include #include #include "api/audio_codecs/audio_decoder.h" diff --git a/api/audio_codecs/opus/audio_decoder_opus.cc b/api/audio_codecs/opus/audio_decoder_opus.cc index efc9a73546..bd6ed18d58 100644 --- a/api/audio_codecs/opus/audio_decoder_opus.cc +++ b/api/audio_codecs/opus/audio_decoder_opus.cc @@ -10,14 +10,30 @@ #include "api/audio_codecs/opus/audio_decoder_opus.h" +#include #include +#include +#include #include #include #include "absl/strings/match.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "modules/audio_coding/codecs/opus/audio_decoder_opus.h" +#include "rtc_base/checks.h" namespace webrtc { +namespace { + +int GetDefaultNumChannels(const FieldTrialsView& field_trials) { + return field_trials.IsEnabled("WebRTC-Audio-OpusDecodeStereoByDefault") ? 2 + : 1; +} + +} // namespace bool AudioDecoderOpus::Config::IsOk() const { if (sample_rate_hz != 16000 && sample_rate_hz != 48000) { @@ -25,40 +41,37 @@ bool AudioDecoderOpus::Config::IsOk() const { // well; we can add support for them when needed.) return false; } - if (num_channels != 1 && num_channels != 2) { - return false; - } - return true; + return !num_channels.has_value() || *num_channels == 1 || *num_channels == 2; } -absl::optional AudioDecoderOpus::SdpToConfig( +std::optional AudioDecoderOpus::SdpToConfig( const SdpAudioFormat& format) { - const auto num_channels = [&]() -> absl::optional { - auto stereo = format.parameters.find("stereo"); - if (stereo != format.parameters.end()) { - if (stereo->second == "0") { - return 1; - } else if (stereo->second == "1") { - return 2; - } else { - return absl::nullopt; // Bad stereo parameter. - } - } - return 1; // Default to mono. - }(); - if (absl::EqualsIgnoreCase(format.name, "opus") && - format.clockrate_hz == 48000 && format.num_channels == 2 && - num_channels) { - Config config; - config.num_channels = *num_channels; - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return absl::nullopt; + if (!absl::EqualsIgnoreCase(format.name, "opus") || + format.clockrate_hz != 48000 || format.num_channels != 2) { + return std::nullopt; + } + + Config config; + + // Parse the "stereo" codec parameter. If set, it overrides the default number + // of channels. + const auto stereo_param = format.parameters.find("stereo"); + if (stereo_param != format.parameters.end()) { + if (stereo_param->second == "0") { + config.num_channels = 1; + } else if (stereo_param->second == "1") { + config.num_channels = 2; + } else { + // Malformed stereo parameter. + return std::nullopt; } - return config; - } else { - return absl::nullopt; } + + if (!config.IsOk()) { + RTC_DCHECK_NOTREACHED(); + return std::nullopt; + } + return config; } void AudioDecoderOpus::AppendSupportedDecoders( @@ -72,15 +85,16 @@ void AudioDecoderOpus::AppendSupportedDecoders( } std::unique_ptr AudioDecoderOpus::MakeAudioDecoder( - Config config, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { + const Environment& env, + Config config) { if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); return nullptr; } - return std::make_unique(config.num_channels, - config.sample_rate_hz); + return std::make_unique( + env.field_trials(), + config.num_channels.value_or(GetDefaultNumChannels(env.field_trials())), + config.sample_rate_hz); } } // namespace webrtc diff --git a/api/audio_codecs/opus/audio_decoder_opus.h b/api/audio_codecs/opus/audio_decoder_opus.h index 138c0377df..50cf80de71 100644 --- a/api/audio_codecs/opus/audio_decoder_opus.h +++ b/api/audio_codecs/opus/audio_decoder_opus.h @@ -12,13 +12,13 @@ #define API_AUDIO_CODECS_OPUS_AUDIO_DECODER_OPUS_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" -#include "api/field_trials_view.h" +#include "api/environment/environment.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -29,14 +29,19 @@ struct RTC_EXPORT AudioDecoderOpus { struct Config { bool IsOk() const; // Checks if the values are currently OK. int sample_rate_hz = 48000; - int num_channels = 1; + std::optional num_channels; }; - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static std::optional SdpToConfig(const SdpAudioFormat& audio_format); static void AppendSupportedDecoders(std::vector* specs); + + static std::unique_ptr MakeAudioDecoder(const Environment& env, + Config config); static std::unique_ptr MakeAudioDecoder( + const Environment& env, Config config, - absl::optional codec_pair_id = absl::nullopt, - const FieldTrialsView* field_trials = nullptr); + std::optional /*codec_pair_id*/) { + return MakeAudioDecoder(env, config); + } }; } // namespace webrtc diff --git a/api/audio_codecs/opus/audio_decoder_opus_unittest.cc b/api/audio_codecs/opus/audio_decoder_opus_unittest.cc new file mode 100644 index 0000000000..fdcb28c4b5 --- /dev/null +++ b/api/audio_codecs/opus/audio_decoder_opus_unittest.cc @@ -0,0 +1,112 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/opus/audio_decoder_opus.h" + +#include +#include +#include + +#include "api/audio_codecs/audio_format.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "test/explicit_key_value_config.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using test::ExplicitKeyValueConfig; +using ::testing::Field; +using ::testing::Optional; +using Config = AudioDecoderOpus::Config; + +enum class StereoParam { kUnset, kMono, kStereo }; + +SdpAudioFormat GetSdpAudioFormat(StereoParam param) { + SdpAudioFormat format("opus", 48000, 2); + switch (param) { + case StereoParam::kUnset: + // Do nothing. + break; + case StereoParam::kMono: + format.parameters.emplace("stereo", "0"); + break; + case StereoParam::kStereo: + format.parameters.emplace("stereo", "1"); + break; + } + return format; +} + +constexpr int kDefaultNumChannels = 1; +constexpr int kAlternativeNumChannels = 2; + +TEST(AudioDecoderOpusTest, SdpToConfigDoesNotSetNumChannels) { + const std::optional config = + AudioDecoderOpus::SdpToConfig(GetSdpAudioFormat(StereoParam::kUnset)); + + EXPECT_THAT(config, Optional(Field(&Config::num_channels, std::nullopt))); +} + +TEST(AudioDecoderOpusTest, SdpToConfigForcesMono) { + const std::optional config = + AudioDecoderOpus::SdpToConfig(GetSdpAudioFormat(StereoParam::kMono)); + + EXPECT_THAT(config, Optional(Field(&Config::num_channels, 1))); +} + +TEST(AudioDecoderOpusTest, SdpToConfigForcesStereo) { + const std::optional config = + AudioDecoderOpus::SdpToConfig(GetSdpAudioFormat(StereoParam::kStereo)); + + EXPECT_THAT(config, Optional(Field(&Config::num_channels, 2))); +} + +TEST(AudioDecoderOpusTest, MakeAudioDecoderForcesDefaultNumChannels) { + const Environment env = CreateEnvironment(); + auto decoder = AudioDecoderOpus::MakeAudioDecoder( + env, /*config=*/{.num_channels = std::nullopt}); + + EXPECT_EQ(decoder->Channels(), static_cast(kDefaultNumChannels)); +} + +TEST(AudioDecoderOpusTest, MakeAudioDecoderCannotForceDefaultNumChannels) { + const Environment env = CreateEnvironment(); + auto decoder = AudioDecoderOpus::MakeAudioDecoder( + env, /*config=*/{.num_channels = kAlternativeNumChannels}); + + EXPECT_EQ(decoder->Channels(), static_cast(kAlternativeNumChannels)); +} + +TEST(AudioDecoderOpusTest, MakeAudioDecoderForcesStereo) { + const Environment env = + CreateEnvironment(std::make_unique( + "WebRTC-Audio-OpusDecodeStereoByDefault/Enabled/")); + auto decoder = AudioDecoderOpus::MakeAudioDecoder( + env, + /*config=*/{.num_channels = std::nullopt}); + + EXPECT_EQ(decoder->Channels(), 2u); +} + +TEST(AudioDecoderOpusTest, MakeAudioDecoderCannotForceStereo) { + const Environment env = + CreateEnvironment(std::make_unique( + "WebRTC-Audio-OpusDecodeStereoByDefault/Enabled/")); + auto decoder = + AudioDecoderOpus::MakeAudioDecoder(env, /*config=*/{.num_channels = 1}); + + EXPECT_EQ(decoder->Channels(), 1u); +} + +} // namespace +} // namespace webrtc diff --git a/api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc b/api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc index 14f480b1ec..dc749bb202 100644 --- a/api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc +++ b/api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc @@ -10,13 +10,21 @@ #include "api/audio_codecs/opus/audio_encoder_multi_channel_opus.h" +#include +#include #include +#include +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h" +#include "api/field_trials_view.h" #include "modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h" namespace webrtc { -absl::optional +std::optional AudioEncoderMultiChannelOpus::SdpToConfig(const SdpAudioFormat& format) { return AudioEncoderMultiChannelOpusImpl::SdpToConfig(format); } @@ -66,8 +74,8 @@ AudioCodecInfo AudioEncoderMultiChannelOpus::QueryAudioEncoder( std::unique_ptr AudioEncoderMultiChannelOpus::MakeAudioEncoder( const AudioEncoderMultiChannelOpusConfig& config, int payload_type, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { + std::optional /*codec_pair_id*/, + const FieldTrialsView* /* field_trials */) { return AudioEncoderMultiChannelOpusImpl::MakeAudioEncoder(config, payload_type); } diff --git a/api/audio_codecs/opus/audio_encoder_multi_channel_opus.h b/api/audio_codecs/opus/audio_encoder_multi_channel_opus.h index c1c4db3577..923e5c02cc 100644 --- a/api/audio_codecs/opus/audio_encoder_multi_channel_opus.h +++ b/api/audio_codecs/opus/audio_encoder_multi_channel_opus.h @@ -12,9 +12,9 @@ #define API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_MULTI_CHANNEL_OPUS_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" @@ -28,13 +28,13 @@ namespace webrtc { // CreateAudioEncoderFactory<...>(). struct RTC_EXPORT AudioEncoderMultiChannelOpus { using Config = AudioEncoderMultiChannelOpusConfig; - static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static std::optional SdpToConfig(const SdpAudioFormat& audio_format); static void AppendSupportedEncoders(std::vector* specs); static AudioCodecInfo QueryAudioEncoder(const Config& config); static std::unique_ptr MakeAudioEncoder( const Config& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt, + std::optional codec_pair_id = std::nullopt, const FieldTrialsView* field_trials = nullptr); }; diff --git a/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.cc b/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.cc index e159bd77cf..d8367b45c9 100644 --- a/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.cc +++ b/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.cc @@ -10,6 +10,9 @@ #include "api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h" +#include +#include + namespace webrtc { namespace { diff --git a/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h b/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h index 9b51246c15..8de807396c 100644 --- a/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h +++ b/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h @@ -15,8 +15,6 @@ #include -#include "absl/types/optional.h" -#include "api/audio_codecs/opus/audio_encoder_opus_config.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { diff --git a/api/audio_codecs/opus/audio_encoder_opus.cc b/api/audio_codecs/opus/audio_encoder_opus.cc index 5b6322da4c..95238250d1 100644 --- a/api/audio_codecs/opus/audio_encoder_opus.cc +++ b/api/audio_codecs/opus/audio_encoder_opus.cc @@ -10,11 +10,21 @@ #include "api/audio_codecs/opus/audio_encoder_opus.h" +#include +#include +#include + +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/audio_codecs/audio_format.h" +#include "api/audio_codecs/opus/audio_encoder_opus_config.h" +#include "api/environment/environment.h" #include "modules/audio_coding/codecs/opus/audio_encoder_opus.h" +#include "rtc_base/checks.h" namespace webrtc { -absl::optional AudioEncoderOpus::SdpToConfig( +std::optional AudioEncoderOpus::SdpToConfig( const SdpAudioFormat& format) { return AudioEncoderOpusImpl::SdpToConfig(format); } @@ -30,15 +40,15 @@ AudioCodecInfo AudioEncoderOpus::QueryAudioEncoder( } std::unique_ptr AudioEncoderOpus::MakeAudioEncoder( + const Environment& env, const AudioEncoderOpusConfig& config, - int payload_type, - absl::optional /*codec_pair_id*/, - const FieldTrialsView* field_trials) { + const AudioEncoderFactory::Options& options) { if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); return nullptr; } - return AudioEncoderOpusImpl::MakeAudioEncoder(config, payload_type); + return std::make_unique(env, config, + options.payload_type); } } // namespace webrtc diff --git a/api/audio_codecs/opus/audio_encoder_opus.h b/api/audio_codecs/opus/audio_encoder_opus.h index df93ae5303..1a7a4ca204 100644 --- a/api/audio_codecs/opus/audio_encoder_opus.h +++ b/api/audio_codecs/opus/audio_encoder_opus.h @@ -12,14 +12,14 @@ #define API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_OPUS_H_ #include +#include #include -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_encoder_factory.h" #include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/opus/audio_encoder_opus_config.h" -#include "api/field_trials_view.h" +#include "api/environment/environment.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -28,15 +28,14 @@ namespace webrtc { // CreateAudioEncoderFactory<...>(). struct RTC_EXPORT AudioEncoderOpus { using Config = AudioEncoderOpusConfig; - static absl::optional SdpToConfig( + static std::optional SdpToConfig( const SdpAudioFormat& audio_format); static void AppendSupportedEncoders(std::vector* specs); static AudioCodecInfo QueryAudioEncoder(const AudioEncoderOpusConfig& config); static std::unique_ptr MakeAudioEncoder( + const Environment& env, const AudioEncoderOpusConfig& config, - int payload_type, - absl::optional codec_pair_id = absl::nullopt, - const FieldTrialsView* field_trials = nullptr); + const AudioEncoderFactory::Options& options); }; } // namespace webrtc diff --git a/api/audio_codecs/opus/audio_encoder_opus_config.h b/api/audio_codecs/opus/audio_encoder_opus_config.h index d5d7256c70..1fe5c18d76 100644 --- a/api/audio_codecs/opus/audio_encoder_opus_config.h +++ b/api/audio_codecs/opus/audio_encoder_opus_config.h @@ -13,9 +13,9 @@ #include +#include #include -#include "absl/types/optional.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -43,7 +43,7 @@ struct RTC_EXPORT AudioEncoderOpusConfig { // NOTE: This member must always be set. // TODO(kwiberg): Turn it into just an int. - absl::optional bitrate_bps; + std::optional bitrate_bps; bool fec_enabled; bool cbr_enabled; diff --git a/api/audio_codecs/opus_audio_decoder_factory.cc b/api/audio_codecs/opus_audio_decoder_factory.cc index ed68f2584e..c0a8aa251f 100644 --- a/api/audio_codecs/opus_audio_decoder_factory.cc +++ b/api/audio_codecs/opus_audio_decoder_factory.cc @@ -11,11 +11,17 @@ #include "api/audio_codecs/opus_audio_decoder_factory.h" #include +#include #include +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_decoder_factory_template.h" +#include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/opus/audio_decoder_multi_channel_opus.h" #include "api/audio_codecs/opus/audio_decoder_opus.h" +#include "api/scoped_refptr.h" namespace webrtc { @@ -25,23 +31,23 @@ namespace { template struct NotAdvertised { using Config = typename T::Config; - static absl::optional SdpToConfig( - const SdpAudioFormat& audio_format) { + static std::optional SdpToConfig(const SdpAudioFormat& audio_format) { return T::SdpToConfig(audio_format); } - static void AppendSupportedDecoders(std::vector* specs) { + static void AppendSupportedDecoders( + std::vector* /* specs */) { // Don't advertise support for anything. } static std::unique_ptr MakeAudioDecoder( const Config& config, - absl::optional codec_pair_id = absl::nullopt) { + std::optional codec_pair_id = std::nullopt) { return T::MakeAudioDecoder(config, codec_pair_id); } }; } // namespace -rtc::scoped_refptr CreateOpusAudioDecoderFactory() { +scoped_refptr CreateOpusAudioDecoderFactory() { return CreateAudioDecoderFactory< AudioDecoderOpus, NotAdvertised>(); } diff --git a/api/audio_codecs/opus_audio_decoder_factory.h b/api/audio_codecs/opus_audio_decoder_factory.h index b4f497f8ff..90917fbef5 100644 --- a/api/audio_codecs/opus_audio_decoder_factory.h +++ b/api/audio_codecs/opus_audio_decoder_factory.h @@ -19,7 +19,7 @@ namespace webrtc { // Creates a new factory that can create only Opus audio decoders. Works like // CreateAudioDecoderFactory(), but is easier to use and is // not inline because it isn't a template. -rtc::scoped_refptr CreateOpusAudioDecoderFactory(); +scoped_refptr CreateOpusAudioDecoderFactory(); } // namespace webrtc diff --git a/api/audio_codecs/opus_audio_encoder_factory.cc b/api/audio_codecs/opus_audio_encoder_factory.cc index 8c286f21e1..411d0cab0f 100644 --- a/api/audio_codecs/opus_audio_encoder_factory.cc +++ b/api/audio_codecs/opus_audio_encoder_factory.cc @@ -11,11 +11,18 @@ #include "api/audio_codecs/opus_audio_encoder_factory.h" #include +#include #include +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_encoder_factory.h" #include "api/audio_codecs/audio_encoder_factory_template.h" +#include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/opus/audio_encoder_multi_channel_opus.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" +#include "api/field_trials_view.h" +#include "api/scoped_refptr.h" namespace webrtc { namespace { @@ -24,11 +31,11 @@ namespace { template struct NotAdvertised { using Config = typename T::Config; - static absl::optional SdpToConfig( - const SdpAudioFormat& audio_format) { + static std::optional SdpToConfig(const SdpAudioFormat& audio_format) { return T::SdpToConfig(audio_format); } - static void AppendSupportedEncoders(std::vector* specs) { + static void AppendSupportedEncoders( + std::vector* /* specs */) { // Don't advertise support for anything. } static AudioCodecInfo QueryAudioEncoder(const Config& config) { @@ -37,7 +44,7 @@ struct NotAdvertised { static std::unique_ptr MakeAudioEncoder( const Config& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt, + std::optional codec_pair_id = std::nullopt, const FieldTrialsView* field_trials = nullptr) { return T::MakeAudioEncoder(config, payload_type, codec_pair_id, field_trials); @@ -46,7 +53,7 @@ struct NotAdvertised { } // namespace -rtc::scoped_refptr CreateOpusAudioEncoderFactory() { +scoped_refptr CreateOpusAudioEncoderFactory() { return CreateAudioEncoderFactory< AudioEncoderOpus, NotAdvertised>(); } diff --git a/api/audio_codecs/opus_audio_encoder_factory.h b/api/audio_codecs/opus_audio_encoder_factory.h index 8c1683b6f5..90c82c266d 100644 --- a/api/audio_codecs/opus_audio_encoder_factory.h +++ b/api/audio_codecs/opus_audio_encoder_factory.h @@ -19,7 +19,7 @@ namespace webrtc { // Creates a new factory that can create only Opus audio encoders. Works like // CreateAudioEncoderFactory(), but is easier to use and is // not inline because it isn't a template. -rtc::scoped_refptr CreateOpusAudioEncoderFactory(); +scoped_refptr CreateOpusAudioEncoderFactory(); } // namespace webrtc diff --git a/api/audio_codecs/test/BUILD.gn b/api/audio_codecs/test/BUILD.gn index 89f5fef1ea..40714b196e 100644 --- a/api/audio_codecs/test/BUILD.gn +++ b/api/audio_codecs/test/BUILD.gn @@ -21,17 +21,18 @@ if (rtc_include_tests) { ] deps = [ "..:audio_codecs_api", + "../..:make_ref_counted", + "../..:scoped_refptr", "../../../test:audio_codec_mocks", - "../../../test:scoped_key_value_config", "../../../test:test_support", + "../../environment", + "../../environment:environment_factory", "../L16:audio_decoder_L16", "../L16:audio_encoder_L16", "../g711:audio_decoder_g711", "../g711:audio_encoder_g711", "../g722:audio_decoder_g722", "../g722:audio_encoder_g722", - "../ilbc:audio_decoder_ilbc", - "../ilbc:audio_encoder_ilbc", "../opus:audio_decoder_opus", "../opus:audio_encoder_opus", ] diff --git a/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc b/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc index 0b18cf934a..50768b930a 100644 --- a/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc +++ b/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc @@ -11,21 +11,35 @@ #include "api/audio_codecs/audio_decoder_factory_template.h" #include +#include +#include +#include #include "api/audio_codecs/L16/audio_decoder_L16.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/g711/audio_decoder_g711.h" #include "api/audio_codecs/g722/audio_decoder_g722.h" -#include "api/audio_codecs/ilbc/audio_decoder_ilbc.h" #include "api/audio_codecs/opus/audio_decoder_opus.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/mock_audio_decoder.h" -#include "test/scoped_key_value_config.h" namespace webrtc { - namespace { +using ::testing::NiceMock; +using ::testing::NotNull; +using ::testing::Pointer; +using ::testing::Property; +using ::testing::Return; + struct BogusParams { static SdpAudioFormat AudioFormat() { return {"bogus", 8000, 1}; } static AudioCodecInfo CodecInfo() { return {8000, 1, 12345}; } @@ -44,13 +58,12 @@ struct AudioDecoderFakeApi { SdpAudioFormat audio_format; }; - static absl::optional SdpToConfig( - const SdpAudioFormat& audio_format) { + static std::optional SdpToConfig(const SdpAudioFormat& audio_format) { if (Params::AudioFormat() == audio_format) { Config config = {audio_format}; return config; } else { - return absl::nullopt; + return std::nullopt; } } @@ -64,7 +77,7 @@ struct AudioDecoderFakeApi { static std::unique_ptr MakeAudioDecoder( const Config&, - absl::optional /*codec_pair_id*/ = absl::nullopt) { + std::optional /*codec_pair_id*/ = std::nullopt) { auto dec = std::make_unique>(); EXPECT_CALL(*dec, SampleRateHz()) .WillOnce(::testing::Return(Params::CodecInfo().sample_rate_hz)); @@ -73,35 +86,100 @@ struct AudioDecoderFakeApi { } }; -} // namespace +// Trait to pass as template parameter to `CreateAudioDecoderFactory` with +// all the functions except the functions to create the audio decoder. +struct BaseAudioDecoderApi { + struct Config {}; + + static SdpAudioFormat AudioFormat() { return {"fake", 16'000, 2, {}}; } + + static std::optional SdpToConfig( + const SdpAudioFormat& /* audio_format */) { + return Config(); + } + + static void AppendSupportedDecoders(std::vector* specs) { + specs->push_back({.format = AudioFormat(), .info = {16'000, 2, 23456}}); + } +}; + +struct TraitWithTwoMakeAudioDecoders : BaseAudioDecoderApi { + // Create Decoders with different sample rates depending if it is created + // through one or another `MAkeAudioDecoder` so that a test may detect which + // method was used. + static constexpr int kRateWithoutEnv = 10'000; + static constexpr int kRateWithEnv = 20'000; + + static std::unique_ptr MakeAudioDecoder( + const Config& /* config */, + std::optional /* codec_pair_id */) { + auto decoder = std::make_unique>(); + ON_CALL(*decoder, SampleRateHz).WillByDefault(Return(kRateWithoutEnv)); + return decoder; + } + + static std::unique_ptr MakeAudioDecoder( + const Environment& /* env */, + const Config& /* config */, + std::optional /* codec_pair_id */) { + auto decoder = std::make_unique>(); + ON_CALL(*decoder, SampleRateHz).WillByDefault(Return(kRateWithEnv)); + return decoder; + } +}; + +TEST(AudioDecoderFactoryTemplateTest, + PrefersToPassEnvironmentToMakeAudioDecoder) { + const Environment env = CreateEnvironment(); + auto factory = CreateAudioDecoderFactory(); + + EXPECT_THAT(factory->Create(env, BaseAudioDecoderApi::AudioFormat(), {}), + Pointer(Property(&AudioDecoder::SampleRateHz, + TraitWithTwoMakeAudioDecoders::kRateWithEnv))); +} + +struct AudioDecoderApiWithV1Make : BaseAudioDecoderApi { + static std::unique_ptr MakeAudioDecoder( + const Config& /* config */, + std::optional /* codec_pair_id */) { + return std::make_unique>(); + } +}; + +TEST(AudioDecoderFactoryTemplateTest, + CanUseMakeAudioDecoderWithoutPassingEnvironment) { + const Environment env = CreateEnvironment(); + auto factory = CreateAudioDecoderFactory(); + EXPECT_THAT(factory->Create(env, BaseAudioDecoderApi::AudioFormat(), {}), + NotNull()); +} TEST(AudioDecoderFactoryTemplateTest, NoDecoderTypes) { - test::ScopedKeyValueConfig field_trials; - rtc::scoped_refptr factory( - rtc::make_ref_counted< - audio_decoder_factory_template_impl::AudioDecoderFactoryT<>>( - &field_trials)); + const Environment env = CreateEnvironment(); + scoped_refptr factory( + make_ref_counted< + audio_decoder_factory_template_impl::AudioDecoderFactoryT<>>()); EXPECT_THAT(factory->GetSupportedDecoders(), ::testing::IsEmpty()); EXPECT_FALSE(factory->IsSupportedDecoder({"foo", 8000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioDecoder({"bar", 16000, 1}, absl::nullopt)); + EXPECT_EQ(nullptr, factory->Create(env, {"bar", 16000, 1}, std::nullopt)); } TEST(AudioDecoderFactoryTemplateTest, OneDecoderType) { + const Environment env = CreateEnvironment(); auto factory = CreateAudioDecoderFactory>(); EXPECT_THAT(factory->GetSupportedDecoders(), ::testing::ElementsAre( AudioCodecSpec{{"bogus", 8000, 1}, {8000, 1, 12345}})); EXPECT_FALSE(factory->IsSupportedDecoder({"foo", 8000, 1})); EXPECT_TRUE(factory->IsSupportedDecoder({"bogus", 8000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioDecoder({"bar", 16000, 1}, absl::nullopt)); - auto dec = factory->MakeAudioDecoder({"bogus", 8000, 1}, absl::nullopt); + EXPECT_EQ(nullptr, factory->Create(env, {"bar", 16000, 1}, std::nullopt)); + auto dec = factory->Create(env, {"bogus", 8000, 1}, std::nullopt); ASSERT_NE(nullptr, dec); EXPECT_EQ(8000, dec->SampleRateHz()); } TEST(AudioDecoderFactoryTemplateTest, TwoDecoderTypes) { + const Environment env = CreateEnvironment(); auto factory = CreateAudioDecoderFactory, AudioDecoderFakeApi>(); EXPECT_THAT(factory->GetSupportedDecoders(), @@ -113,20 +191,19 @@ TEST(AudioDecoderFactoryTemplateTest, TwoDecoderTypes) { EXPECT_TRUE(factory->IsSupportedDecoder({"bogus", 8000, 1})); EXPECT_TRUE( factory->IsSupportedDecoder({"sham", 16000, 2, {{"param", "value"}}})); - EXPECT_EQ(nullptr, - factory->MakeAudioDecoder({"bar", 16000, 1}, absl::nullopt)); - auto dec1 = factory->MakeAudioDecoder({"bogus", 8000, 1}, absl::nullopt); + EXPECT_EQ(nullptr, factory->Create(env, {"bar", 16000, 1}, std::nullopt)); + auto dec1 = factory->Create(env, {"bogus", 8000, 1}, std::nullopt); ASSERT_NE(nullptr, dec1); EXPECT_EQ(8000, dec1->SampleRateHz()); - EXPECT_EQ(nullptr, - factory->MakeAudioDecoder({"sham", 16000, 2}, absl::nullopt)); - auto dec2 = factory->MakeAudioDecoder( - {"sham", 16000, 2, {{"param", "value"}}}, absl::nullopt); + EXPECT_EQ(nullptr, factory->Create(env, {"sham", 16000, 2}, std::nullopt)); + auto dec2 = factory->Create(env, {"sham", 16000, 2, {{"param", "value"}}}, + std::nullopt); ASSERT_NE(nullptr, dec2); EXPECT_EQ(16000, dec2->SampleRateHz()); } TEST(AudioDecoderFactoryTemplateTest, G711) { + const Environment env = CreateEnvironment(); auto factory = CreateAudioDecoderFactory(); EXPECT_THAT(factory->GetSupportedDecoders(), ::testing::ElementsAre( @@ -135,52 +212,38 @@ TEST(AudioDecoderFactoryTemplateTest, G711) { EXPECT_FALSE(factory->IsSupportedDecoder({"G711", 8000, 1})); EXPECT_TRUE(factory->IsSupportedDecoder({"PCMU", 8000, 1})); EXPECT_TRUE(factory->IsSupportedDecoder({"pcma", 8000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioDecoder({"pcmu", 16000, 1}, absl::nullopt)); - auto dec1 = factory->MakeAudioDecoder({"pcmu", 8000, 1}, absl::nullopt); + EXPECT_EQ(nullptr, factory->Create(env, {"pcmu", 16000, 1}, std::nullopt)); + auto dec1 = factory->Create(env, {"pcmu", 8000, 1}, std::nullopt); ASSERT_NE(nullptr, dec1); EXPECT_EQ(8000, dec1->SampleRateHz()); - auto dec2 = factory->MakeAudioDecoder({"PCMA", 8000, 1}, absl::nullopt); + auto dec2 = factory->Create(env, {"PCMA", 8000, 1}, std::nullopt); ASSERT_NE(nullptr, dec2); EXPECT_EQ(8000, dec2->SampleRateHz()); } TEST(AudioDecoderFactoryTemplateTest, G722) { + const Environment env = CreateEnvironment(); auto factory = CreateAudioDecoderFactory(); EXPECT_THAT(factory->GetSupportedDecoders(), ::testing::ElementsAre( AudioCodecSpec{{"G722", 8000, 1}, {16000, 1, 64000}})); EXPECT_FALSE(factory->IsSupportedDecoder({"foo", 8000, 1})); EXPECT_TRUE(factory->IsSupportedDecoder({"G722", 8000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioDecoder({"bar", 16000, 1}, absl::nullopt)); - auto dec1 = factory->MakeAudioDecoder({"G722", 8000, 1}, absl::nullopt); + EXPECT_EQ(nullptr, factory->Create(env, {"bar", 16000, 1}, std::nullopt)); + auto dec1 = factory->Create(env, {"G722", 8000, 1}, std::nullopt); ASSERT_NE(nullptr, dec1); EXPECT_EQ(16000, dec1->SampleRateHz()); EXPECT_EQ(1u, dec1->Channels()); - auto dec2 = factory->MakeAudioDecoder({"G722", 8000, 2}, absl::nullopt); + auto dec2 = factory->Create(env, {"G722", 8000, 2}, std::nullopt); ASSERT_NE(nullptr, dec2); EXPECT_EQ(16000, dec2->SampleRateHz()); EXPECT_EQ(2u, dec2->Channels()); - auto dec3 = factory->MakeAudioDecoder({"G722", 8000, 3}, absl::nullopt); + auto dec3 = factory->Create(env, {"G722", 8000, 3}, std::nullopt); ASSERT_EQ(nullptr, dec3); } -TEST(AudioDecoderFactoryTemplateTest, Ilbc) { - auto factory = CreateAudioDecoderFactory(); - EXPECT_THAT(factory->GetSupportedDecoders(), - ::testing::ElementsAre( - AudioCodecSpec{{"ILBC", 8000, 1}, {8000, 1, 13300}})); - EXPECT_FALSE(factory->IsSupportedDecoder({"foo", 8000, 1})); - EXPECT_TRUE(factory->IsSupportedDecoder({"ilbc", 8000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioDecoder({"bar", 8000, 1}, absl::nullopt)); - auto dec = factory->MakeAudioDecoder({"ilbc", 8000, 1}, absl::nullopt); - ASSERT_NE(nullptr, dec); - EXPECT_EQ(8000, dec->SampleRateHz()); -} - TEST(AudioDecoderFactoryTemplateTest, L16) { + const Environment env = CreateEnvironment(); auto factory = CreateAudioDecoderFactory(); EXPECT_THAT( factory->GetSupportedDecoders(), @@ -194,14 +257,14 @@ TEST(AudioDecoderFactoryTemplateTest, L16) { EXPECT_FALSE(factory->IsSupportedDecoder({"foo", 8000, 1})); EXPECT_TRUE(factory->IsSupportedDecoder({"L16", 48000, 1})); EXPECT_FALSE(factory->IsSupportedDecoder({"L16", 96000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioDecoder({"L16", 8000, 0}, absl::nullopt)); - auto dec = factory->MakeAudioDecoder({"L16", 48000, 2}, absl::nullopt); + EXPECT_EQ(nullptr, factory->Create(env, {"L16", 8000, 0}, std::nullopt)); + auto dec = factory->Create(env, {"L16", 48000, 2}, std::nullopt); ASSERT_NE(nullptr, dec); EXPECT_EQ(48000, dec->SampleRateHz()); } TEST(AudioDecoderFactoryTemplateTest, Opus) { + const Environment env = CreateEnvironment(); auto factory = CreateAudioDecoderFactory(); AudioCodecInfo opus_info{48000, 1, 64000, 6000, 510000}; opus_info.allow_comfort_noise = false; @@ -212,11 +275,11 @@ TEST(AudioDecoderFactoryTemplateTest, Opus) { ::testing::ElementsAre(AudioCodecSpec{opus_format, opus_info})); EXPECT_FALSE(factory->IsSupportedDecoder({"opus", 48000, 1})); EXPECT_TRUE(factory->IsSupportedDecoder({"opus", 48000, 2})); - EXPECT_EQ(nullptr, - factory->MakeAudioDecoder({"bar", 16000, 1}, absl::nullopt)); - auto dec = factory->MakeAudioDecoder({"opus", 48000, 2}, absl::nullopt); + EXPECT_EQ(nullptr, factory->Create(env, {"bar", 16000, 1}, std::nullopt)); + auto dec = factory->Create(env, {"opus", 48000, 2}, std::nullopt); ASSERT_NE(nullptr, dec); EXPECT_EQ(48000, dec->SampleRateHz()); } +} // namespace } // namespace webrtc diff --git a/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc b/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc index dbba387724..fc26198db4 100644 --- a/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc +++ b/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc @@ -11,21 +11,35 @@ #include "api/audio_codecs/audio_encoder_factory_template.h" #include +#include +#include +#include #include "api/audio_codecs/L16/audio_encoder_L16.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/g711/audio_encoder_g711.h" #include "api/audio_codecs/g722/audio_encoder_g722.h" -#include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/mock_audio_encoder.h" -#include "test/scoped_key_value_config.h" namespace webrtc { - namespace { +using ::testing::IsNull; +using ::testing::NiceMock; +using ::testing::Pointer; +using ::testing::Property; +using ::testing::Return; + struct BogusParams { static SdpAudioFormat AudioFormat() { return {"bogus", 8000, 1}; } static AudioCodecInfo CodecInfo() { return {8000, 1, 12345}; } @@ -44,13 +58,12 @@ struct AudioEncoderFakeApi { SdpAudioFormat audio_format; }; - static absl::optional SdpToConfig( - const SdpAudioFormat& audio_format) { + static std::optional SdpToConfig(const SdpAudioFormat& audio_format) { if (Params::AudioFormat() == audio_format) { Config config = {audio_format}; return config; } else { - return absl::nullopt; + return std::nullopt; } } @@ -64,8 +77,8 @@ struct AudioEncoderFakeApi { static std::unique_ptr MakeAudioEncoder( const Config&, - int payload_type, - absl::optional /*codec_pair_id*/ = absl::nullopt) { + int /* payload_type */, + std::optional /*codec_pair_id*/ = std::nullopt) { auto enc = std::make_unique>(); EXPECT_CALL(*enc, SampleRateHz()) .WillOnce(::testing::Return(Params::CodecInfo().sample_rate_hz)); @@ -73,36 +86,130 @@ struct AudioEncoderFakeApi { } }; -} // namespace +// Trait to pass as template parameter to `CreateAudioEncoderFactory` with +// all the functions except the functions to create the audio encoder. +struct BaseAudioEncoderApi { + // Create Encoders with different sample rates depending if it is created + // through V1 or V2 method so that a test may detect which method was used. + static constexpr int kV1SameRate = 10'000; + static constexpr int kV2SameRate = 20'000; + + struct Config {}; + + static SdpAudioFormat AudioFormat() { return {"fake", 16'000, 2, {}}; } + static AudioCodecInfo CodecInfo() { return {16'000, 2, 23456}; } + + static std::optional SdpToConfig( + const SdpAudioFormat& /* audio_format */) { + return Config(); + } + + static void AppendSupportedEncoders(std::vector* specs) { + specs->push_back({AudioFormat(), CodecInfo()}); + } + + static AudioCodecInfo QueryAudioEncoder(const Config&) { return CodecInfo(); } +}; + +struct AudioEncoderApiWithV1Make : BaseAudioEncoderApi { + static std::unique_ptr MakeAudioEncoder( + const Config&, + int /* payload_type */, + std::optional /* codec_pair_id */) { + auto encoder = std::make_unique>(); + ON_CALL(*encoder, SampleRateHz).WillByDefault(Return(kV1SameRate)); + return encoder; + } +}; + +struct AudioEncoderApiWithV2Make : BaseAudioEncoderApi { + static std::unique_ptr MakeAudioEncoder( + const Environment& /* env */, + const Config& /* config */, + const AudioEncoderFactory::Options& /* options */) { + auto encoder = std::make_unique>(); + ON_CALL(*encoder, SampleRateHz).WillByDefault(Return(kV2SameRate)); + return encoder; + } +}; + +struct AudioEncoderApiWithBothV1AndV2Make : BaseAudioEncoderApi { + static std::unique_ptr MakeAudioEncoder( + const Config&, + int /* payload_type */, + std::optional /* codec_pair_id */) { + auto encoder = std::make_unique>(); + ON_CALL(*encoder, SampleRateHz).WillByDefault(Return(kV1SameRate)); + return encoder; + } + + static std::unique_ptr MakeAudioEncoder( + const Environment& /* env */, + const Config& /* config */, + const AudioEncoderFactory::Options& /* options */) { + auto encoder = std::make_unique>(); + ON_CALL(*encoder, SampleRateHz).WillByDefault(Return(kV2SameRate)); + return encoder; + } +}; + +TEST(AudioEncoderFactoryTemplateTest, + UsesV1MakeAudioEncoderWhenV2IsNotAvailable) { + const Environment env = CreateEnvironment(); + auto factory = CreateAudioEncoderFactory(); + + EXPECT_THAT(factory->Create(env, BaseAudioEncoderApi::AudioFormat(), {}), + Pointer(Property(&AudioEncoder::SampleRateHz, + BaseAudioEncoderApi::kV1SameRate))); +} + +TEST(AudioEncoderFactoryTemplateTest, + PreferV2MakeAudioEncoderWhenBothAreAvailable) { + const Environment env = CreateEnvironment(); + auto factory = + CreateAudioEncoderFactory(); + + EXPECT_THAT(factory->Create(env, BaseAudioEncoderApi::AudioFormat(), {}), + Pointer(Property(&AudioEncoder::SampleRateHz, + BaseAudioEncoderApi::kV2SameRate))); +} + +TEST(AudioEncoderFactoryTemplateTest, CanUseTraitWithOnlyV2MakeAudioEncoder) { + const Environment env = CreateEnvironment(); + auto factory = CreateAudioEncoderFactory(); + EXPECT_THAT(factory->Create(env, BaseAudioEncoderApi::AudioFormat(), {}), + Pointer(Property(&AudioEncoder::SampleRateHz, + BaseAudioEncoderApi::kV2SameRate))); +} TEST(AudioEncoderFactoryTemplateTest, NoEncoderTypes) { - test::ScopedKeyValueConfig field_trials; - rtc::scoped_refptr factory( - rtc::make_ref_counted< - audio_encoder_factory_template_impl::AudioEncoderFactoryT<>>( - &field_trials)); + const Environment env = CreateEnvironment(); + scoped_refptr factory( + make_ref_counted< + audio_encoder_factory_template_impl::AudioEncoderFactoryT<>>()); EXPECT_THAT(factory->GetSupportedEncoders(), ::testing::IsEmpty()); - EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioEncoder(17, {"bar", 16000, 1}, absl::nullopt)); + EXPECT_EQ(std::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); + + EXPECT_THAT(factory->Create(env, {"bar", 16000, 1}, {}), IsNull()); } TEST(AudioEncoderFactoryTemplateTest, OneEncoderType) { + const Environment env = CreateEnvironment(); auto factory = CreateAudioEncoderFactory>(); EXPECT_THAT(factory->GetSupportedEncoders(), ::testing::ElementsAre( AudioCodecSpec{{"bogus", 8000, 1}, {8000, 1, 12345}})); - EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); + EXPECT_EQ(std::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); EXPECT_EQ(AudioCodecInfo(8000, 1, 12345), factory->QueryAudioEncoder({"bogus", 8000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioEncoder(17, {"bar", 16000, 1}, absl::nullopt)); - auto enc = factory->MakeAudioEncoder(17, {"bogus", 8000, 1}, absl::nullopt); - ASSERT_NE(nullptr, enc); - EXPECT_EQ(8000, enc->SampleRateHz()); + + EXPECT_THAT(factory->Create(env, {"bar", 16000, 1}, {}), IsNull()); + EXPECT_THAT(factory->Create(env, {"bogus", 8000, 1}, {}), + Pointer(Property(&AudioEncoder::SampleRateHz, 8000))); } TEST(AudioEncoderFactoryTemplateTest, TwoEncoderTypes) { + const Environment env = CreateEnvironment(); auto factory = CreateAudioEncoderFactory, AudioEncoderFakeApi>(); EXPECT_THAT(factory->GetSupportedEncoders(), @@ -110,75 +217,57 @@ TEST(AudioEncoderFactoryTemplateTest, TwoEncoderTypes) { AudioCodecSpec{{"bogus", 8000, 1}, {8000, 1, 12345}}, AudioCodecSpec{{"sham", 16000, 2, {{"param", "value"}}}, {16000, 2, 23456}})); - EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); + EXPECT_EQ(std::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); EXPECT_EQ(AudioCodecInfo(8000, 1, 12345), factory->QueryAudioEncoder({"bogus", 8000, 1})); EXPECT_EQ( AudioCodecInfo(16000, 2, 23456), factory->QueryAudioEncoder({"sham", 16000, 2, {{"param", "value"}}})); - EXPECT_EQ(nullptr, - factory->MakeAudioEncoder(17, {"bar", 16000, 1}, absl::nullopt)); - auto enc1 = factory->MakeAudioEncoder(17, {"bogus", 8000, 1}, absl::nullopt); - ASSERT_NE(nullptr, enc1); - EXPECT_EQ(8000, enc1->SampleRateHz()); - EXPECT_EQ(nullptr, - factory->MakeAudioEncoder(17, {"sham", 16000, 2}, absl::nullopt)); - auto enc2 = factory->MakeAudioEncoder( - 17, {"sham", 16000, 2, {{"param", "value"}}}, absl::nullopt); - ASSERT_NE(nullptr, enc2); - EXPECT_EQ(16000, enc2->SampleRateHz()); + + EXPECT_THAT(factory->Create(env, {"bar", 16000, 1}, {}), IsNull()); + EXPECT_THAT(factory->Create(env, {"bogus", 8000, 1}, {}), + Pointer(Property(&AudioEncoder::SampleRateHz, 8000))); + EXPECT_THAT(factory->Create(env, {"sham", 16000, 2}, {}), IsNull()); + EXPECT_THAT( + factory->Create(env, {"sham", 16000, 2, {{"param", "value"}}}, {}), + Pointer(Property(&AudioEncoder::SampleRateHz, 16000))); } TEST(AudioEncoderFactoryTemplateTest, G711) { + const Environment env = CreateEnvironment(); auto factory = CreateAudioEncoderFactory(); EXPECT_THAT(factory->GetSupportedEncoders(), ::testing::ElementsAre( AudioCodecSpec{{"PCMU", 8000, 1}, {8000, 1, 64000}}, AudioCodecSpec{{"PCMA", 8000, 1}, {8000, 1, 64000}})); - EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"PCMA", 16000, 1})); + EXPECT_EQ(std::nullopt, factory->QueryAudioEncoder({"PCMA", 16000, 1})); EXPECT_EQ(AudioCodecInfo(8000, 1, 64000), factory->QueryAudioEncoder({"PCMA", 8000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioEncoder(17, {"PCMU", 16000, 1}, absl::nullopt)); - auto enc1 = factory->MakeAudioEncoder(17, {"PCMU", 8000, 1}, absl::nullopt); - ASSERT_NE(nullptr, enc1); - EXPECT_EQ(8000, enc1->SampleRateHz()); - auto enc2 = factory->MakeAudioEncoder(17, {"PCMA", 8000, 1}, absl::nullopt); - ASSERT_NE(nullptr, enc2); - EXPECT_EQ(8000, enc2->SampleRateHz()); + + EXPECT_THAT(factory->Create(env, {"PCMU", 16000, 1}, {}), IsNull()); + EXPECT_THAT(factory->Create(env, {"PCMU", 8000, 1}, {}), + Pointer(Property(&AudioEncoder::SampleRateHz, 8000))); + EXPECT_THAT(factory->Create(env, {"PCMA", 8000, 1}, {}), + Pointer(Property(&AudioEncoder::SampleRateHz, 8000))); } TEST(AudioEncoderFactoryTemplateTest, G722) { + const Environment env = CreateEnvironment(); auto factory = CreateAudioEncoderFactory(); EXPECT_THAT(factory->GetSupportedEncoders(), ::testing::ElementsAre( AudioCodecSpec{{"G722", 8000, 1}, {16000, 1, 64000}})); - EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); + EXPECT_EQ(std::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); EXPECT_EQ(AudioCodecInfo(16000, 1, 64000), factory->QueryAudioEncoder({"G722", 8000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioEncoder(17, {"bar", 16000, 1}, absl::nullopt)); - auto enc = factory->MakeAudioEncoder(17, {"G722", 8000, 1}, absl::nullopt); - ASSERT_NE(nullptr, enc); - EXPECT_EQ(16000, enc->SampleRateHz()); -} -TEST(AudioEncoderFactoryTemplateTest, Ilbc) { - auto factory = CreateAudioEncoderFactory(); - EXPECT_THAT(factory->GetSupportedEncoders(), - ::testing::ElementsAre( - AudioCodecSpec{{"ILBC", 8000, 1}, {8000, 1, 13333}})); - EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); - EXPECT_EQ(AudioCodecInfo(8000, 1, 13333), - factory->QueryAudioEncoder({"ilbc", 8000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioEncoder(17, {"bar", 8000, 1}, absl::nullopt)); - auto enc = factory->MakeAudioEncoder(17, {"ilbc", 8000, 1}, absl::nullopt); - ASSERT_NE(nullptr, enc); - EXPECT_EQ(8000, enc->SampleRateHz()); + EXPECT_THAT(factory->Create(env, {"bar", 16000, 1}, {}), IsNull()); + EXPECT_THAT(factory->Create(env, {"G722", 8000, 1}, {}), + Pointer(Property(&AudioEncoder::SampleRateHz, 16000))); } TEST(AudioEncoderFactoryTemplateTest, L16) { + const Environment env = CreateEnvironment(); auto factory = CreateAudioEncoderFactory(); EXPECT_THAT( factory->GetSupportedEncoders(), @@ -189,17 +278,17 @@ TEST(AudioEncoderFactoryTemplateTest, L16) { AudioCodecSpec{{"L16", 8000, 2}, {8000, 2, 8000 * 16 * 2}}, AudioCodecSpec{{"L16", 16000, 2}, {16000, 2, 16000 * 16 * 2}}, AudioCodecSpec{{"L16", 32000, 2}, {32000, 2, 32000 * 16 * 2}})); - EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"L16", 8000, 0})); + EXPECT_EQ(std::nullopt, factory->QueryAudioEncoder({"L16", 8000, 0})); EXPECT_EQ(AudioCodecInfo(48000, 1, 48000 * 16), factory->QueryAudioEncoder({"L16", 48000, 1})); - EXPECT_EQ(nullptr, - factory->MakeAudioEncoder(17, {"L16", 8000, 0}, absl::nullopt)); - auto enc = factory->MakeAudioEncoder(17, {"L16", 48000, 2}, absl::nullopt); - ASSERT_NE(nullptr, enc); - EXPECT_EQ(48000, enc->SampleRateHz()); + + EXPECT_THAT(factory->Create(env, {"L16", 8000, 0}, {}), IsNull()); + EXPECT_THAT(factory->Create(env, {"L16", 48000, 2}, {}), + Pointer(Property(&AudioEncoder::SampleRateHz, 48000))); } TEST(AudioEncoderFactoryTemplateTest, Opus) { + const Environment env = CreateEnvironment(); auto factory = CreateAudioEncoderFactory(); AudioCodecInfo info = {48000, 1, 32000, 6000, 510000}; info.allow_comfort_noise = false; @@ -209,16 +298,17 @@ TEST(AudioEncoderFactoryTemplateTest, Opus) { ::testing::ElementsAre(AudioCodecSpec{ {"opus", 48000, 2, {{"minptime", "10"}, {"useinbandfec", "1"}}}, info})); - EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); + EXPECT_EQ(std::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); EXPECT_EQ( info, factory->QueryAudioEncoder( {"opus", 48000, 2, {{"minptime", "10"}, {"useinbandfec", "1"}}})); - EXPECT_EQ(nullptr, - factory->MakeAudioEncoder(17, {"bar", 16000, 1}, absl::nullopt)); - auto enc = factory->MakeAudioEncoder(17, {"opus", 48000, 2}, absl::nullopt); - ASSERT_NE(nullptr, enc); - EXPECT_EQ(48000, enc->SampleRateHz()); + + EXPECT_THAT(factory->Create(env, {"bar", 16000, 1}, {.payload_type = 17}), + IsNull()); + EXPECT_THAT(factory->Create(env, {"opus", 48000, 2}, {.payload_type = 17}), + Pointer(Property(&AudioEncoder::SampleRateHz, 48000))); } +} // namespace } // namespace webrtc diff --git a/api/audio_options.cc b/api/audio_options.cc index a3f2b6e887..46913b8a4c 100644 --- a/api/audio_options.cc +++ b/api/audio_options.cc @@ -10,23 +10,26 @@ #include "api/audio_options.h" +#include +#include + #include "api/array_view.h" #include "rtc_base/strings/string_builder.h" -namespace cricket { +namespace webrtc { namespace { template -void ToStringIfSet(rtc::SimpleStringBuilder* result, +void ToStringIfSet(SimpleStringBuilder* result, const char* key, - const absl::optional& val) { + const std::optional& val) { if (val) { (*result) << key << ": " << *val << ", "; } } template -void SetFrom(absl::optional* s, const absl::optional& o) { +void SetFrom(std::optional* s, const std::optional& o) { if (o) { *s = o; } @@ -78,7 +81,7 @@ bool AudioOptions::operator==(const AudioOptions& o) const { std::string AudioOptions::ToString() const { char buffer[1024]; - rtc::SimpleStringBuilder result(buffer); + SimpleStringBuilder result(buffer); result << "AudioOptions {"; ToStringIfSet(&result, "aec", echo_cancellation); #if defined(WEBRTC_IOS) @@ -101,4 +104,4 @@ std::string AudioOptions::ToString() const { return result.str(); } -} // namespace cricket +} // namespace webrtc diff --git a/api/audio_options.h b/api/audio_options.h index 3ab3b3c98c..723ee0ff3e 100644 --- a/api/audio_options.h +++ b/api/audio_options.h @@ -11,14 +11,12 @@ #ifndef API_AUDIO_OPTIONS_H_ #define API_AUDIO_OPTIONS_H_ -#include - +#include #include -#include "absl/types/optional.h" #include "rtc_base/system/rtc_export.h" -namespace cricket { +namespace webrtc { // Options that can be applied to a VoiceMediaChannel or a VoiceMediaEngine. // Used to be flags, but that makes it hard to selectively apply options. @@ -36,40 +34,48 @@ struct RTC_EXPORT AudioOptions { // Audio processing that attempts to filter away the output signal from // later inbound pickup. - absl::optional echo_cancellation; + std::optional echo_cancellation; #if defined(WEBRTC_IOS) // Forces software echo cancellation on iOS. This is a temporary workaround // (until Apple fixes the bug) for a device with non-functioning AEC. May // improve performance on that particular device, but will cause unpredictable // behavior in all other cases. See http://bugs.webrtc.org/8682. - absl::optional ios_force_software_aec_HACK; + std::optional ios_force_software_aec_HACK; #endif // Audio processing to adjust the sensitivity of the local mic dynamically. - absl::optional auto_gain_control; + std::optional auto_gain_control; // Audio processing to filter out background noise. - absl::optional noise_suppression; + std::optional noise_suppression; // Audio processing to remove background noise of lower frequencies. - absl::optional highpass_filter; + std::optional highpass_filter; // Audio processing to swap the left and right channels. - absl::optional stereo_swapping; + std::optional stereo_swapping; // Audio receiver jitter buffer (NetEq) max capacity in number of packets. - absl::optional audio_jitter_buffer_max_packets; + std::optional audio_jitter_buffer_max_packets; // Audio receiver jitter buffer (NetEq) fast accelerate mode. - absl::optional audio_jitter_buffer_fast_accelerate; + std::optional audio_jitter_buffer_fast_accelerate; // Audio receiver jitter buffer (NetEq) minimum target delay in milliseconds. - absl::optional audio_jitter_buffer_min_delay_ms; + std::optional audio_jitter_buffer_min_delay_ms; // Enable audio network adaptor. // TODO(webrtc:11717): Remove this API in favor of adaptivePtime in // RtpEncodingParameters. - absl::optional audio_network_adaptor; + std::optional audio_network_adaptor; // Config string for audio network adaptor. - absl::optional audio_network_adaptor_config; + std::optional audio_network_adaptor_config; // Pre-initialize the ADM for recording when starting to send. Default to // true. // TODO(webrtc:13566): Remove this option. See issue for details. - absl::optional init_recording_on_send; + std::optional init_recording_on_send; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::AudioOptions; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // API_AUDIO_OPTIONS_H_ diff --git a/api/call/call_factory_interface.h b/api/call/call_factory_interface.h deleted file mode 100644 index 6051409cc3..0000000000 --- a/api/call/call_factory_interface.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_CALL_CALL_FACTORY_INTERFACE_H_ -#define API_CALL_CALL_FACTORY_INTERFACE_H_ - -#include - -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// These classes are not part of the API, and are treated as opaque pointers. -class Call; -struct CallConfig; - -// This interface exists to allow webrtc to be optionally built without media -// support (i.e., if only being used for data channels). PeerConnectionFactory -// is constructed with a CallFactoryInterface, which may or may not be null. -class CallFactoryInterface { - public: - virtual ~CallFactoryInterface() {} - - virtual Call* CreateCall(const CallConfig& config) = 0; -}; - -RTC_EXPORT std::unique_ptr CreateCallFactory(); - -} // namespace webrtc - -#endif // API_CALL_CALL_FACTORY_INTERFACE_H_ diff --git a/api/call/transport.cc b/api/call/transport.cc index bcadc762de..0a9dd5bcc7 100644 --- a/api/call/transport.cc +++ b/api/call/transport.cc @@ -10,8 +10,6 @@ #include "api/call/transport.h" -#include - namespace webrtc { PacketOptions::PacketOptions() = default; diff --git a/api/call/transport.h b/api/call/transport.h index 6c6cbb8941..b2bc18ef10 100644 --- a/api/call/transport.h +++ b/api/call/transport.h @@ -11,12 +11,9 @@ #ifndef API_CALL_TRANSPORT_H_ #define API_CALL_TRANSPORT_H_ -#include #include #include "api/array_view.h" -#include "api/ref_counted_base.h" -#include "api/scoped_refptr.h" namespace webrtc { @@ -27,16 +24,14 @@ struct PacketOptions { PacketOptions(const PacketOptions&); ~PacketOptions(); - // A 16 bits positive id. Negative ids are invalid and should be interpreted + // Negative ids are invalid and should be interpreted // as packet_id not being set. - int packet_id = -1; - // Additional data bound to the RTP packet for use in application code, - // outside of WebRTC. - rtc::scoped_refptr additional_data; - // Whether this is a retransmission of an earlier packet. - bool is_retransmit = false; + int64_t packet_id = -1; + // Whether this is an audio or video packet, excluding retransmissions. + bool is_media = true; bool included_in_feedback = false; bool included_in_allocation = false; + bool send_as_ect1 = false; // Whether this packet can be part of a packet batch at lower levels. bool batchable = false; // Whether this packet is the last of a batch. @@ -45,29 +40,9 @@ struct PacketOptions { class Transport { public: - // New style functions. Default implementations are to accomodate - // subclasses that haven't been converted to new style yet. - // TODO(bugs.webrtc.org/14870): Deprecate and remove old functions. -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wdeprecated-declarations" - virtual bool SendRtp(rtc::ArrayView packet, - const PacketOptions& options) { - return SendRtp(packet.data(), packet.size(), options); - } - virtual bool SendRtcp(rtc::ArrayView packet) { - return SendRtcp(packet.data(), packet.size()); - } -#pragma clang diagnostic pop - // Old style functions. - [[deprecated("Use ArrayView version")]] virtual bool - SendRtp(const uint8_t* packet, size_t length, const PacketOptions& options) { - return SendRtp(rtc::MakeArrayView(packet, length), options); - } - [[deprecated("Use ArrayView version")]] virtual bool SendRtcp( - const uint8_t* packet, - size_t length) { - return SendRtcp(rtc::MakeArrayView(packet, length)); - } + virtual bool SendRtp(ArrayView packet, + const PacketOptions& options) = 0; + virtual bool SendRtcp(ArrayView packet) = 0; protected: virtual ~Transport() {} diff --git a/api/candidate.cc b/api/candidate.cc index 90cb326823..30a6c1b87b 100644 --- a/api/candidate.cc +++ b/api/candidate.cc @@ -10,35 +10,62 @@ #include "api/candidate.h" -#include "rtc_base/helpers.h" +#include // IWYU pragma: keep +#include +#include + +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" +#include "p2p/base/p2p_constants.h" +#include "rtc_base/checks.h" +#include "rtc_base/crc32.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/ip_address.h" -#include "rtc_base/logging.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/socket_address.h" #include "rtc_base/strings/string_builder.h" -namespace cricket { +using webrtc::IceCandidateType; + +namespace webrtc { +absl::string_view IceCandidateTypeToString(IceCandidateType type) { + switch (type) { + case IceCandidateType::kHost: + return "host"; + case IceCandidateType::kSrflx: + return "srflx"; + case IceCandidateType::kPrflx: + return "prflx"; + case IceCandidateType::kRelay: + return "relay"; + } +} +} // namespace webrtc + +namespace webrtc { Candidate::Candidate() - : id_(rtc::CreateRandomString(8)), - component_(0), + : id_(CreateRandomString(8)), + component_(ICE_CANDIDATE_COMPONENT_DEFAULT), priority_(0), - network_type_(rtc::ADAPTER_TYPE_UNKNOWN), - underlying_type_for_vpn_(rtc::ADAPTER_TYPE_UNKNOWN), + network_type_(webrtc::ADAPTER_TYPE_UNKNOWN), + underlying_type_for_vpn_(webrtc::ADAPTER_TYPE_UNKNOWN), generation_(0), network_id_(0), network_cost_(0) {} Candidate::Candidate(int component, absl::string_view protocol, - const rtc::SocketAddress& address, + const SocketAddress& address, uint32_t priority, absl::string_view username, absl::string_view password, - absl::string_view type, + IceCandidateType type, uint32_t generation, absl::string_view foundation, - uint16_t network_id, - uint16_t network_cost) - : id_(rtc::CreateRandomString(8)), + uint16_t network_id /*= 0*/, + uint16_t network_cost /*= 0*/) + : id_(CreateRandomString(8)), component_(component), protocol_(protocol), address_(address), @@ -46,8 +73,8 @@ Candidate::Candidate(int component, username_(username), password_(password), type_(type), - network_type_(rtc::ADAPTER_TYPE_UNKNOWN), - underlying_type_for_vpn_(rtc::ADAPTER_TYPE_UNKNOWN), + network_type_(webrtc::ADAPTER_TYPE_UNKNOWN), + underlying_type_for_vpn_(webrtc::ADAPTER_TYPE_UNKNOWN), generation_(generation), foundation_(foundation), network_id_(network_id), @@ -57,6 +84,27 @@ Candidate::Candidate(const Candidate&) = default; Candidate::~Candidate() = default; +void Candidate::generate_id() { + id_ = CreateRandomString(8); +} + +bool Candidate::is_local() const { + return type_ == IceCandidateType::kHost; +} +bool Candidate::is_stun() const { + return type_ == IceCandidateType::kSrflx; +} +bool Candidate::is_prflx() const { + return type_ == IceCandidateType::kPrflx; +} +bool Candidate::is_relay() const { + return type_ == IceCandidateType::kRelay; +} + +absl::string_view Candidate::type_name() const { + return webrtc::IceCandidateTypeToString(type_); +} + bool Candidate::IsEquivalent(const Candidate& c) const { // We ignore the network name, since that is just debug information, and // the priority and the network cost, since they should be the same if the @@ -75,15 +123,16 @@ bool Candidate::MatchesForRemoval(const Candidate& c) const { } std::string Candidate::ToStringInternal(bool sensitive) const { - rtc::StringBuilder ost; + StringBuilder ost; std::string address = sensitive ? address_.ToSensitiveString() : address_.ToString(); std::string related_address = sensitive ? related_address_.ToSensitiveString() : related_address_.ToString(); ost << "Cand[" << transport_name_ << ":" << foundation_ << ":" << component_ - << ":" << protocol_ << ":" << priority_ << ":" << address << ":" << type_ - << ":" << related_address << ":" << username_ << ":" << password_ << ":" - << network_id_ << ":" << network_cost_ << ":" << generation_ << "]"; + << ":" << protocol_ << ":" << priority_ << ":" << address << ":" + << type_name() << ":" << related_address << ":" << username_ << ":" + << password_ << ":" << network_id_ << ":" << network_cost_ << ":" + << generation_ << "]"; return ost.Release(); } @@ -108,7 +157,7 @@ uint32_t Candidate::GetPriority(uint32_t type_preference, // local preference = (NIC Type << 8 | Addr_Pref) + relay preference. // The relay preference is based on the number of TURN servers, the // first TURN server gets the highest preference. - int addr_pref = IPAddressPrecedence(address_.ipaddr()); + int addr_pref = webrtc::IPAddressPrecedence(address_.ipaddr()); int local_preference = ((network_adapter_preference << 8) | addr_pref) + relay_preference; @@ -147,32 +196,72 @@ bool Candidate::operator!=(const Candidate& o) const { } Candidate Candidate::ToSanitizedCopy(bool use_hostname_address, - bool filter_related_address) const { + bool filter_related_address, + bool filter_ufrag) const { Candidate copy(*this); if (use_hostname_address) { - rtc::IPAddress ip; + IPAddress ip; if (address().hostname().empty()) { // IP needs to be redacted, but no hostname available. - rtc::SocketAddress redacted_addr("redacted-ip.invalid", address().port()); + SocketAddress redacted_addr("redacted-ip.invalid", address().port()); copy.set_address(redacted_addr); - } else if (IPFromString(address().hostname(), &ip)) { + } else if (webrtc::IPFromString(address().hostname(), &ip)) { // The hostname is an IP literal, and needs to be redacted too. - rtc::SocketAddress redacted_addr("redacted-literal.invalid", - address().port()); + SocketAddress redacted_addr("redacted-literal.invalid", address().port()); copy.set_address(redacted_addr); } else { - rtc::SocketAddress hostname_only_addr(address().hostname(), - address().port()); + SocketAddress hostname_only_addr(address().hostname(), address().port()); copy.set_address(hostname_only_addr); } } if (filter_related_address) { copy.set_related_address( - rtc::EmptySocketAddressWithFamily(copy.address().family())); + webrtc::EmptySocketAddressWithFamily(copy.address().family())); } + if (filter_ufrag) { + copy.set_username(""); + } + return copy; } +void Candidate::ComputeFoundation(const SocketAddress& base_address, + uint64_t tie_breaker) { + // https://www.rfc-editor.org/rfc/rfc5245#section-4.1.1.3 + // The foundation is an identifier, scoped within a session. Two candidates + // MUST have the same foundation ID when all of the following are true: + // + // o they are of the same type. + // o their bases have the same IP address (the ports can be different). + // o for reflexive and relayed candidates, the STUN or TURN servers used to + // obtain them have the same IP address. + // o they were obtained using the same transport protocol (TCP, UDP, etc.). + // + // Similarly, two candidates MUST have different foundations if their + // types are different, their bases have different IP addresses, the STUN or + // TURN servers used to obtain them have different IP addresses, or their + // transport protocols are different. + + StringBuilder sb; + sb << type_name() << base_address.ipaddr().ToString() << protocol_ + << relay_protocol_; + + // https://www.rfc-editor.org/rfc/rfc5245#section-5.2 + // [...] it is possible for both agents to mistakenly believe they are + // controlled or controlling. To resolve this, each agent MUST select a random + // number, called the tie-breaker, uniformly distributed between 0 and (2**64) + // - 1 (that is, a 64-bit positive integer). This number is used in + // connectivity checks to detect and repair this case [...] + sb << absl::StrCat(tie_breaker); + foundation_ = absl::StrCat(webrtc::ComputeCrc32(sb.Release())); +} + +void Candidate::ComputePrflxFoundation() { + RTC_DCHECK(is_prflx()); + RTC_DCHECK(!id_.empty()); + foundation_ = absl::StrCat(webrtc::ComputeCrc32(id_)); +} + void Candidate::Assign(std::string& s, absl::string_view view) { // Assigning via a temporary object, like s = std::string(view), results in // binary size bloat. To avoid that, extract pointer and size from the @@ -180,4 +269,4 @@ void Candidate::Assign(std::string& s, absl::string_view view) { s.assign(view.data(), view.size()); } -} // namespace cricket +} // namespace webrtc diff --git a/api/candidate.h b/api/candidate.h index 15cd48c7b4..b02d0ac3a3 100644 --- a/api/candidate.h +++ b/api/candidate.h @@ -11,10 +11,9 @@ #ifndef API_CANDIDATE_H_ #define API_CANDIDATE_H_ -#include +#include #include -#include #include #include "absl/strings/string_view.h" @@ -23,27 +22,37 @@ #include "rtc_base/socket_address.h" #include "rtc_base/system/rtc_export.h" -namespace cricket { +namespace webrtc { + +enum class IceCandidateType : int { kHost, kSrflx, kPrflx, kRelay }; +RTC_EXPORT absl::string_view IceCandidateTypeToString(IceCandidateType); + +// TODO(tommi): Remove. No usage in WebRTC now, remove once downstream projects +// don't have reliance. +[[deprecated("Use IceCandidateType")]] static constexpr char LOCAL_PORT_TYPE[] = + "local"; +[[deprecated("Use IceCandidateType")]] static constexpr char STUN_PORT_TYPE[] = + "stun"; +[[deprecated("Use IceCandidateType")]] static constexpr char PRFLX_PORT_TYPE[] = + "prflx"; +[[deprecated("Use IceCandidateType")]] static constexpr char RELAY_PORT_TYPE[] = + "relay"; // TURN servers are limited to 32 in accordance with // https://w3c.github.io/webrtc-pc/#dom-rtcconfiguration-iceservers static constexpr size_t kMaxTurnServers = 32; // Candidate for ICE based connection discovery. -// TODO(phoglund): remove things in here that are not needed in the public API. - class RTC_EXPORT Candidate { public: Candidate(); - // TODO(pthatcher): Match the ordering and param list as per RFC 5245 - // candidate-attribute syntax. http://tools.ietf.org/html/rfc5245#section-15.1 Candidate(int component, absl::string_view protocol, - const rtc::SocketAddress& address, + const SocketAddress& address, uint32_t priority, absl::string_view username, absl::string_view password, - absl::string_view type, + IceCandidateType type, uint32_t generation, absl::string_view foundation, uint16_t network_id = 0, @@ -51,48 +60,37 @@ class RTC_EXPORT Candidate { Candidate(const Candidate&); ~Candidate(); + // 8 character long randomized ID string for logging purposes. const std::string& id() const { return id_; } - void set_id(absl::string_view id) { Assign(id_, id); } + // Generates a new, 8 character long, id. + void generate_id(); int component() const { return component_; } void set_component(int component) { component_ = component; } const std::string& protocol() const { return protocol_; } + + // Valid protocol values are: + // UDP_PROTOCOL_NAME, TCP_PROTOCOL_NAME, SSLTCP_PROTOCOL_NAME, + // TLS_PROTOCOL_NAME. void set_protocol(absl::string_view protocol) { Assign(protocol_, protocol); } // The protocol used to talk to relay. const std::string& relay_protocol() const { return relay_protocol_; } + + // Valid protocol values are: + // UDP_PROTOCOL_NAME, TCP_PROTOCOL_NAME, SSLTCP_PROTOCOL_NAME, + // TLS_PROTOCOL_NAME. void set_relay_protocol(absl::string_view protocol) { Assign(relay_protocol_, protocol); } - const rtc::SocketAddress& address() const { return address_; } - void set_address(const rtc::SocketAddress& address) { address_ = address; } + const SocketAddress& address() const { return address_; } + void set_address(const SocketAddress& address) { address_ = address; } uint32_t priority() const { return priority_; } void set_priority(const uint32_t priority) { priority_ = priority; } - // TODO(pthatcher): Remove once Chromium's jingle/glue/utils.cc - // doesn't use it. - // Maps old preference (which was 0.0-1.0) to match priority (which - // is 0-2^32-1) to to match RFC 5245, section 4.1.2.1. Also see - // https://docs.google.com/a/google.com/document/d/ - // 1iNQDiwDKMh0NQOrCqbj3DKKRT0Dn5_5UJYhmZO-t7Uc/edit - float preference() const { - // The preference value is clamped to two decimal precision. - return static_cast(((priority_ >> 24) * 100 / 127) / 100.0); - } - - // TODO(pthatcher): Remove once Chromium's jingle/glue/utils.cc - // doesn't use it. - void set_preference(float preference) { - // Limiting priority to UINT_MAX when value exceeds uint32_t max. - // This can happen for e.g. when preference = 3. - uint64_t prio_val = static_cast(preference * 127) << 24; - priority_ = static_cast( - std::min(prio_val, static_cast(UINT_MAX))); - } - // TODO(honghaiz): Change to usernameFragment or ufrag. const std::string& username() const { return username_; } void set_username(absl::string_view username) { Assign(username_, username); } @@ -100,23 +98,58 @@ class RTC_EXPORT Candidate { const std::string& password() const { return password_; } void set_password(absl::string_view password) { Assign(password_, password); } - const std::string& type() const { return type_; } - void set_type(absl::string_view type) { Assign(type_, type); } + IceCandidateType type() const { return type_; } + + // Returns the name of the candidate type as specified in + // https://datatracker.ietf.org/doc/html/rfc5245#section-15.1 + absl::string_view type_name() const; + + // Setting the type requires a constant string (e.g. + // webrtc::LOCAL_PORT_TYPE). The type should really be an enum rather than a + // string, but until we make that change the lifetime attribute helps us lock + // things down. See also the `Port` class. + void set_type(IceCandidateType type) { type_ = type; } + + // Simple checkers for checking the candidate type without dependency on the + // IceCandidateType enum. The `is_local()` and `is_stun()` names are legacy + // names and should now more accurately be `is_host()` and `is_srflx()`. + bool is_local() const; + bool is_stun() const; + bool is_prflx() const; + bool is_relay() const; + + // Returns the type preference, a value between 0-126 inclusive, with 0 being + // the lowest preference value, as described in RFC 5245. + // https://datatracker.ietf.org/doc/html/rfc5245#section-4.1.2.1 + int type_preference() const { + // From https://datatracker.ietf.org/doc/html/rfc5245#section-4.1.4 : + // It is RECOMMENDED that default candidates be chosen based on the + // likelihood of those candidates to work with the peer that is being + // contacted. + // I.e. it is recommended that relayed > reflexive > host. + if (is_local()) + return 1; // Host. + if (is_stun()) + return 2; // Reflexive. + if (is_relay()) + return 3; // Relayed. + return 0; // Unknown, lowest preference. + } const std::string& network_name() const { return network_name_; } void set_network_name(absl::string_view network_name) { Assign(network_name_, network_name); } - rtc::AdapterType network_type() const { return network_type_; } - void set_network_type(rtc::AdapterType network_type) { + AdapterType network_type() const { return network_type_; } + void set_network_type(AdapterType network_type) { network_type_ = network_type; } - rtc::AdapterType underlying_type_for_vpn() const { + AdapterType underlying_type_for_vpn() const { return underlying_type_for_vpn_; } - void set_underlying_type_for_vpn(rtc::AdapterType network_type) { + void set_underlying_type_for_vpn(AdapterType network_type) { underlying_type_for_vpn_ = network_type; } @@ -126,9 +159,10 @@ class RTC_EXPORT Candidate { // `network_cost` measures the cost/penalty of using this candidate. A network // cost of 0 indicates this candidate can be used freely. A value of - // rtc::kNetworkCostMax indicates it should be used only as the last resort. + // webrtc::kNetworkCostMax indicates it should be used only as the last + // resort. void set_network_cost(uint16_t network_cost) { - RTC_DCHECK_LE(network_cost, rtc::kNetworkCostMax); + RTC_DCHECK_LE(network_cost, webrtc::kNetworkCostMax); network_cost_ = network_cost; } uint16_t network_cost() const { return network_cost_; } @@ -137,13 +171,21 @@ class RTC_EXPORT Candidate { uint16_t network_id() const { return network_id_; } void set_network_id(uint16_t network_id) { network_id_ = network_id; } + // From RFC 5245, section-7.2.1.3: + // The foundation of the candidate is set to an arbitrary value, different + // from the foundation for all other remote candidates. + // Note: Use ComputeFoundation to populate this value. const std::string& foundation() const { return foundation_; } + + // TODO(tommi): Deprecate in favor of ComputeFoundation. + // For situations where serializing/deserializing a candidate is needed, + // the constructor can be used to inject a value for the foundation. void set_foundation(absl::string_view foundation) { Assign(foundation_, foundation); } - const rtc::SocketAddress& related_address() const { return related_address_; } - void set_related_address(const rtc::SocketAddress& related_address) { + const SocketAddress& related_address() const { return related_address_; } + void set_related_address(const SocketAddress& related_address) { related_address_ = related_address; } const std::string& tcptype() const { return tcptype_; } @@ -186,8 +228,35 @@ class RTC_EXPORT Candidate { // to the wildcard address (i.e. 0.0.0.0 for IPv4 and :: for IPv6). Note that // setting both booleans to false returns an identical copy to the original // candidate. + // The username fragment may be filtered, e.g. for prflx candidates before + // any remote ice parameters have been set. + [[deprecated("Use variant with filter_ufrag")]] Candidate ToSanitizedCopy( + bool use_hostname_address, + bool filter_related_address) const { + return ToSanitizedCopy(use_hostname_address, filter_related_address, false); + } Candidate ToSanitizedCopy(bool use_hostname_address, - bool filter_related_address) const; + bool filter_related_address, + bool filter_ufrag) const; + + // Computes and populates the `foundation()` field. + // Foundation: An arbitrary string that is the same for two candidates + // that have the same type, base IP address, protocol (UDP, TCP, + // etc.), and STUN or TURN server. If any of these are different, + // then the foundation will be different. Two candidate pairs with + // the same foundation pairs are likely to have similar network + // characteristics. Foundations are used in the frozen algorithm. + // A session wide (peerconnection) tie-breaker is applied to the foundation, + // adds additional randomness and must be the same for all candidates. + void ComputeFoundation(const SocketAddress& base_address, + uint64_t tie_breaker); + + // https://www.rfc-editor.org/rfc/rfc5245#section-7.2.1.3 + // Call to populate the foundation field for a new peer reflexive remote + // candidate. The type of the candidate must be "prflx". + // The foundation of the candidate is set to an arbitrary value, different + // from the foundation for all other remote candidates. + void ComputePrflxFoundation(); private: // TODO(bugs.webrtc.org/13220): With C++17, we get a std::string assignment @@ -200,17 +269,17 @@ class RTC_EXPORT Candidate { int component_; std::string protocol_; std::string relay_protocol_; - rtc::SocketAddress address_; + SocketAddress address_; uint32_t priority_; std::string username_; std::string password_; - std::string type_; + IceCandidateType type_ = IceCandidateType::kHost; std::string network_name_; - rtc::AdapterType network_type_; - rtc::AdapterType underlying_type_for_vpn_; + AdapterType network_type_; + AdapterType underlying_type_for_vpn_; uint32_t generation_; std::string foundation_; - rtc::SocketAddress related_address_; + SocketAddress related_address_; std::string tcptype_; std::string transport_name_; uint16_t network_id_; @@ -218,6 +287,19 @@ class RTC_EXPORT Candidate { std::string url_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::Candidate; +using ::webrtc::kMaxTurnServers; +using ::webrtc::LOCAL_PORT_TYPE; +using ::webrtc::PRFLX_PORT_TYPE; +using ::webrtc::RELAY_PORT_TYPE; +using ::webrtc::STUN_PORT_TYPE; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // API_CANDIDATE_H_ diff --git a/api/candidate_unittest.cc b/api/candidate_unittest.cc new file mode 100644 index 0000000000..2eb9707d37 --- /dev/null +++ b/api/candidate_unittest.cc @@ -0,0 +1,103 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/candidate.h" + +#include + +#include "p2p/base/p2p_constants.h" +#include "rtc_base/socket_address.h" +#include "test/gtest.h" + +using webrtc::IceCandidateType; + +namespace webrtc { + +TEST(CandidateTest, Id) { + Candidate c; + EXPECT_EQ(c.id().size(), 8u); + std::string current_id = c.id(); + // Generate a new ID. + c.generate_id(); + EXPECT_EQ(c.id().size(), 8u); + EXPECT_NE(current_id, c.id()); +} + +TEST(CandidateTest, Component) { + Candidate c; + EXPECT_EQ(c.component(), ICE_CANDIDATE_COMPONENT_DEFAULT); + c.set_component(ICE_CANDIDATE_COMPONENT_RTCP); + EXPECT_EQ(c.component(), ICE_CANDIDATE_COMPONENT_RTCP); +} + +TEST(CandidateTest, TypeName) { + Candidate c; + // The `type_name()` property defaults to "host". + EXPECT_EQ(c.type_name(), "host"); + EXPECT_EQ(c.type(), IceCandidateType::kHost); + + c.set_type(IceCandidateType::kSrflx); + EXPECT_EQ(c.type_name(), "srflx"); + EXPECT_EQ(c.type(), IceCandidateType::kSrflx); + + c.set_type(IceCandidateType::kPrflx); + EXPECT_EQ(c.type_name(), "prflx"); + EXPECT_EQ(c.type(), IceCandidateType::kPrflx); + + c.set_type(IceCandidateType::kRelay); + EXPECT_EQ(c.type_name(), "relay"); + EXPECT_EQ(c.type(), IceCandidateType::kRelay); +} + +TEST(CandidateTest, Foundation) { + Candidate c; + EXPECT_TRUE(c.foundation().empty()); + c.set_protocol("udp"); + c.set_relay_protocol("udp"); + + SocketAddress address("99.99.98.1", 1024); + c.set_address(address); + c.ComputeFoundation(c.address(), 1); + std::string foundation1 = c.foundation(); + EXPECT_FALSE(foundation1.empty()); + + // Change the tiebreaker. + c.ComputeFoundation(c.address(), 2); + std::string foundation2 = c.foundation(); + EXPECT_NE(foundation1, foundation2); + + // Provide a different base address. + address.SetIP("100.100.100.1"); + c.ComputeFoundation(address, 1); // Same tiebreaker as for foundation1. + foundation2 = c.foundation(); + EXPECT_NE(foundation1, foundation2); + + // Consistency check (just in case the algorithm ever changes to random!). + c.ComputeFoundation(c.address(), 1); + foundation2 = c.foundation(); + EXPECT_EQ(foundation1, foundation2); + + // Changing the protocol should affect the foundation. + auto prev_protocol = c.protocol(); + c.set_protocol("tcp"); + ASSERT_NE(prev_protocol, c.protocol()); + c.ComputeFoundation(c.address(), 1); + EXPECT_NE(foundation1, c.foundation()); + c.set_protocol(prev_protocol); + + // Changing the relay protocol should affect the foundation. + prev_protocol = c.relay_protocol(); + c.set_relay_protocol("tcp"); + ASSERT_NE(prev_protocol, c.relay_protocol()); + c.ComputeFoundation(c.address(), 1); + EXPECT_NE(foundation1, c.foundation()); +} + +} // namespace webrtc diff --git a/api/create_peerconnection_factory.cc b/api/create_peerconnection_factory.cc index b7f9eb7f30..ededc00816 100644 --- a/api/create_peerconnection_factory.cc +++ b/api/create_peerconnection_factory.cc @@ -13,117 +13,66 @@ #include #include -#include "api/call/call_factory_interface.h" +#include "api/audio/audio_device.h" +#include "api/audio/audio_mixer.h" +#include "api/audio/audio_processing.h" +#include "api/audio/builtin_audio_processing_builder.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/enable_media.h" +#include "api/field_trials_view.h" #include "api/peer_connection_interface.h" #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/scoped_refptr.h" -#include "api/task_queue/default_task_queue_factory.h" -#include "api/transport/field_trial_based_config.h" -#include "media/base/media_engine.h" -#include "media/engine/webrtc_media_engine.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" #include "rtc_base/thread.h" namespace webrtc { -rtc::scoped_refptr CreatePeerConnectionFactory( - rtc::Thread* network_thread, - rtc::Thread* worker_thread, - rtc::Thread* signaling_thread, - rtc::scoped_refptr default_adm, - rtc::scoped_refptr audio_encoder_factory, - rtc::scoped_refptr audio_decoder_factory, +scoped_refptr CreatePeerConnectionFactory( + Thread* network_thread, + Thread* worker_thread, + Thread* signaling_thread, + scoped_refptr default_adm, + scoped_refptr audio_encoder_factory, + scoped_refptr audio_decoder_factory, std::unique_ptr video_encoder_factory, std::unique_ptr video_decoder_factory, - rtc::scoped_refptr audio_mixer, - rtc::scoped_refptr audio_processing, - AudioFrameProcessor* audio_frame_processor, - std::unique_ptr owned_audio_frame_processor, + scoped_refptr audio_mixer, + scoped_refptr audio_processing, + std::unique_ptr audio_frame_processor, std::unique_ptr field_trials) { - if (!field_trials) { - field_trials = std::make_unique(); - } - PeerConnectionFactoryDependencies dependencies; dependencies.network_thread = network_thread; dependencies.worker_thread = worker_thread; dependencies.signaling_thread = signaling_thread; - dependencies.task_queue_factory = - CreateDefaultTaskQueueFactory(field_trials.get()); - dependencies.call_factory = CreateCallFactory(); - dependencies.event_log_factory = std::make_unique( - dependencies.task_queue_factory.get()); + dependencies.event_log_factory = std::make_unique(); dependencies.trials = std::move(field_trials); if (network_thread) { - // TODO(bugs.webrtc.org/13145): Add an rtc::SocketFactory* argument. + // TODO(bugs.webrtc.org/13145): Add an webrtc::SocketFactory* argument. dependencies.socket_factory = network_thread->socketserver(); } - cricket::MediaEngineDependencies media_dependencies; - media_dependencies.task_queue_factory = dependencies.task_queue_factory.get(); - media_dependencies.adm = std::move(default_adm); - media_dependencies.audio_encoder_factory = std::move(audio_encoder_factory); - media_dependencies.audio_decoder_factory = std::move(audio_decoder_factory); - if (audio_frame_processor) { - media_dependencies.audio_frame_processor = audio_frame_processor; - } else if (owned_audio_frame_processor) { - media_dependencies.owned_audio_frame_processor = - std::move(owned_audio_frame_processor); - } - if (audio_processing) { - media_dependencies.audio_processing = std::move(audio_processing); + dependencies.adm = std::move(default_adm); + dependencies.audio_encoder_factory = std::move(audio_encoder_factory); + dependencies.audio_decoder_factory = std::move(audio_decoder_factory); + dependencies.audio_frame_processor = std::move(audio_frame_processor); + if (audio_processing != nullptr) { + dependencies.audio_processing_builder = + CustomAudioProcessing(std::move(audio_processing)); } else { - media_dependencies.audio_processing = AudioProcessingBuilder().Create(); +#ifndef WEBRTC_EXCLUDE_AUDIO_PROCESSING_MODULE + dependencies.audio_processing_builder = + std::make_unique(); +#endif } - media_dependencies.audio_mixer = std::move(audio_mixer); - media_dependencies.video_encoder_factory = std::move(video_encoder_factory); - media_dependencies.video_decoder_factory = std::move(video_decoder_factory); - media_dependencies.trials = dependencies.trials.get(); - dependencies.media_engine = - cricket::CreateMediaEngine(std::move(media_dependencies)); + dependencies.audio_mixer = std::move(audio_mixer); + dependencies.video_encoder_factory = std::move(video_encoder_factory); + dependencies.video_decoder_factory = std::move(video_decoder_factory); + EnableMedia(dependencies); return CreateModularPeerConnectionFactory(std::move(dependencies)); } -rtc::scoped_refptr CreatePeerConnectionFactory( - rtc::Thread* network_thread, - rtc::Thread* worker_thread, - rtc::Thread* signaling_thread, - rtc::scoped_refptr default_adm, - rtc::scoped_refptr audio_encoder_factory, - rtc::scoped_refptr audio_decoder_factory, - std::unique_ptr video_encoder_factory, - std::unique_ptr video_decoder_factory, - rtc::scoped_refptr audio_mixer, - rtc::scoped_refptr audio_processing, - AudioFrameProcessor* audio_frame_processor) { - return CreatePeerConnectionFactory( - network_thread, worker_thread, signaling_thread, default_adm, - audio_encoder_factory, audio_decoder_factory, - std::move(video_encoder_factory), std::move(video_decoder_factory), - audio_mixer, audio_processing, audio_frame_processor, nullptr, nullptr); -} - -rtc::scoped_refptr CreatePeerConnectionFactory( - rtc::Thread* network_thread, - rtc::Thread* worker_thread, - rtc::Thread* signaling_thread, - rtc::scoped_refptr default_adm, - rtc::scoped_refptr audio_encoder_factory, - rtc::scoped_refptr audio_decoder_factory, - std::unique_ptr video_encoder_factory, - std::unique_ptr video_decoder_factory, - rtc::scoped_refptr audio_mixer, - rtc::scoped_refptr audio_processing, - std::unique_ptr owned_audio_frame_processor, - std::unique_ptr field_trials) { - return CreatePeerConnectionFactory( - network_thread, worker_thread, signaling_thread, default_adm, - audio_encoder_factory, audio_decoder_factory, - std::move(video_encoder_factory), std::move(video_decoder_factory), - audio_mixer, audio_processing, nullptr, - std::move(owned_audio_frame_processor), std::move(field_trials)); -} - } // namespace webrtc diff --git a/api/create_peerconnection_factory.h b/api/create_peerconnection_factory.h index f8f52a0869..68311008ae 100644 --- a/api/create_peerconnection_factory.h +++ b/api/create_peerconnection_factory.h @@ -10,63 +10,42 @@ #ifndef API_CREATE_PEERCONNECTION_FACTORY_H_ #define API_CREATE_PEERCONNECTION_FACTORY_H_ +// IWYU pragma: no_include "rtc_base/thread.h" #include +#include "api/audio/audio_device.h" #include "api/audio/audio_mixer.h" +#include "api/audio/audio_processing.h" #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder_factory.h" +#include "api/field_trials_view.h" #include "api/peer_connection_interface.h" #include "api/scoped_refptr.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" - -namespace rtc { -// TODO(bugs.webrtc.org/9987): Move rtc::Thread to api/ or expose a better -// type. At the moment, rtc::Thread is not part of api/ so it cannot be -// included in order to avoid to leak internal types. -class Thread; -} // namespace rtc +#include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread.h" namespace webrtc { - -class AudioDeviceModule; class AudioFrameProcessor; -class AudioProcessing; // Create a new instance of PeerConnectionFactoryInterface with optional video // codec factories. These video factories represents all video codecs, i.e. no // extra internal video codecs will be added. -// TODO(bugs.webrtc.org/15111): -// Remove the method with the raw AudioFrameProcessor pointer in the -// follow-up. -RTC_EXPORT rtc::scoped_refptr -CreatePeerConnectionFactory( - rtc::Thread* network_thread, - rtc::Thread* worker_thread, - rtc::Thread* signaling_thread, - rtc::scoped_refptr default_adm, - rtc::scoped_refptr audio_encoder_factory, - rtc::scoped_refptr audio_decoder_factory, - std::unique_ptr video_encoder_factory, - std::unique_ptr video_decoder_factory, - rtc::scoped_refptr audio_mixer, - rtc::scoped_refptr audio_processing, - AudioFrameProcessor* audio_frame_processor = nullptr); - -RTC_EXPORT rtc::scoped_refptr +RTC_EXPORT scoped_refptr CreatePeerConnectionFactory( - rtc::Thread* network_thread, - rtc::Thread* worker_thread, - rtc::Thread* signaling_thread, - rtc::scoped_refptr default_adm, - rtc::scoped_refptr audio_encoder_factory, - rtc::scoped_refptr audio_decoder_factory, + Thread* network_thread, + Thread* worker_thread, + Thread* signaling_thread, + scoped_refptr default_adm, + scoped_refptr audio_encoder_factory, + scoped_refptr audio_decoder_factory, std::unique_ptr video_encoder_factory, std::unique_ptr video_decoder_factory, - rtc::scoped_refptr audio_mixer, - rtc::scoped_refptr audio_processing, - std::unique_ptr owned_audio_frame_processor, + scoped_refptr audio_mixer, + scoped_refptr audio_processing, + std::unique_ptr audio_frame_processor = nullptr, std::unique_ptr field_trials = nullptr); } // namespace webrtc diff --git a/api/crypto/BUILD.gn b/api/crypto/BUILD.gn index 8d041ea059..2970f341b1 100644 --- a/api/crypto/BUILD.gn +++ b/api/crypto/BUILD.gn @@ -23,7 +23,8 @@ rtc_library("options") { "crypto_options.h", ] deps = [ - "../../rtc_base:ssl", + "../../rtc_base:checks", + "../../rtc_base:ssl_adapter", "../../rtc_base/system:rtc_export", ] } @@ -33,6 +34,7 @@ rtc_source_set("frame_decryptor_interface") { sources = [ "frame_decryptor_interface.h" ] deps = [ "..:array_view", + "..:ref_count", "..:rtp_parameters", "../../rtc_base:refcount", ] @@ -43,6 +45,7 @@ rtc_source_set("frame_encryptor_interface") { sources = [ "frame_encryptor_interface.h" ] deps = [ "..:array_view", + "..:ref_count", "..:rtp_parameters", "../../rtc_base:refcount", ] diff --git a/api/crypto/crypto_options.cc b/api/crypto/crypto_options.cc index 22c5dd464b..2923833c2f 100644 --- a/api/crypto/crypto_options.cc +++ b/api/crypto/crypto_options.cc @@ -10,6 +10,9 @@ #include "api/crypto/crypto_options.h" +#include + +#include "rtc_base/checks.h" #include "rtc_base/ssl_stream_adapter.h" namespace webrtc { @@ -38,18 +41,18 @@ std::vector CryptoOptions::GetSupportedDtlsSrtpCryptoSuites() const { // As the cipher suite is potentially insecure, it will only be used if // enabled by both peers. if (srtp.enable_aes128_sha1_32_crypto_cipher) { - crypto_suites.push_back(rtc::kSrtpAes128CmSha1_32); + crypto_suites.push_back(kSrtpAes128CmSha1_32); } if (srtp.enable_aes128_sha1_80_crypto_cipher) { - crypto_suites.push_back(rtc::kSrtpAes128CmSha1_80); + crypto_suites.push_back(kSrtpAes128CmSha1_80); } // Note: GCM cipher suites are not the top choice since they increase the // packet size. In order to negotiate them the other side must not support // kSrtpAes128CmSha1_80. if (srtp.enable_gcm_crypto_suites) { - crypto_suites.push_back(rtc::kSrtpAeadAes256Gcm); - crypto_suites.push_back(rtc::kSrtpAeadAes128Gcm); + crypto_suites.push_back(kSrtpAeadAes256Gcm); + crypto_suites.push_back(kSrtpAeadAes128Gcm); } RTC_CHECK(!crypto_suites.empty()); return crypto_suites; diff --git a/api/crypto/crypto_options.h b/api/crypto/crypto_options.h index 317995134a..a937490787 100644 --- a/api/crypto/crypto_options.h +++ b/api/crypto/crypto_options.h @@ -41,7 +41,7 @@ struct RTC_EXPORT CryptoOptions { struct Srtp { // Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used // if both sides enable it. - bool enable_gcm_crypto_suites = false; + bool enable_gcm_crypto_suites = true; // If set to true, the (potentially insecure) crypto cipher // kSrtpAes128CmSha1_32 will be included in the list of supported ciphers @@ -53,9 +53,10 @@ struct RTC_EXPORT CryptoOptions { // purposes. bool enable_aes128_sha1_80_crypto_cipher = true; - // If set to true, encrypted RTP header extensions as defined in RFC 6904 - // will be negotiated. They will only be used if both peers support them. - bool enable_encrypted_rtp_header_extensions = false; + // This feature enables encrypting RTP header extensions using RFC 6904, if + // requested. For this to work the Chromium field trial + // `kWebRtcEncryptedRtpHeaderExtensions` must be enabled. + bool enable_encrypted_rtp_header_extensions = true; } srtp; // Options to be used when the FrameEncryptor / FrameDecryptor APIs are used. diff --git a/api/crypto/frame_decryptor_interface.h b/api/crypto/frame_decryptor_interface.h index 2f6bdac4b4..7ed165fc84 100644 --- a/api/crypto/frame_decryptor_interface.h +++ b/api/crypto/frame_decryptor_interface.h @@ -11,11 +11,13 @@ #ifndef API_CRYPTO_FRAME_DECRYPTOR_INTERFACE_H_ #define API_CRYPTO_FRAME_DECRYPTOR_INTERFACE_H_ +#include +#include #include #include "api/array_view.h" #include "api/media_types.h" -#include "rtc_base/ref_count.h" +#include "api/ref_count.h" namespace webrtc { @@ -27,7 +29,7 @@ namespace webrtc { // without it. You may assume that this interface will have the same lifetime // as the RTPReceiver it is attached to. It must only be attached to one // RTPReceiver. Additional data may be null. -class FrameDecryptorInterface : public rtc::RefCountInterface { +class FrameDecryptorInterface : public RefCountInterface { public: // The Status enum represents all possible states that can be // returned when attempting to decrypt a frame. kRecoverable indicates that @@ -58,16 +60,16 @@ class FrameDecryptorInterface : public rtc::RefCountInterface { // kRecoverable should be returned if the failure was due to something other // than a decryption failure. kFailedToDecrypt should be returned in all other // cases. - virtual Result Decrypt(cricket::MediaType media_type, + virtual Result Decrypt(webrtc::MediaType media_type, const std::vector& csrcs, - rtc::ArrayView additional_data, - rtc::ArrayView encrypted_frame, - rtc::ArrayView frame) = 0; + ArrayView additional_data, + ArrayView encrypted_frame, + ArrayView frame) = 0; // Returns the total required length in bytes for the output of the // decryption. This can be larger than the actual number of bytes you need but // must never be smaller as it informs the size of the frame buffer. - virtual size_t GetMaxPlaintextByteSize(cricket::MediaType media_type, + virtual size_t GetMaxPlaintextByteSize(webrtc::MediaType media_type, size_t encrypted_frame_size) = 0; }; diff --git a/api/crypto/frame_encryptor_interface.h b/api/crypto/frame_encryptor_interface.h index 1452b80189..9a2b1709d8 100644 --- a/api/crypto/frame_encryptor_interface.h +++ b/api/crypto/frame_encryptor_interface.h @@ -11,9 +11,12 @@ #ifndef API_CRYPTO_FRAME_ENCRYPTOR_INTERFACE_H_ #define API_CRYPTO_FRAME_ENCRYPTOR_INTERFACE_H_ +#include +#include + #include "api/array_view.h" #include "api/media_types.h" -#include "rtc_base/ref_count.h" +#include "api/ref_count.h" namespace webrtc { @@ -24,7 +27,7 @@ namespace webrtc { // addition to the standard SRTP mechanism and is not intended to be used // without it. Implementations of this interface will have the same lifetime as // the RTPSenders it is attached to. Additional data may be null. -class FrameEncryptorInterface : public rtc::RefCountInterface { +class FrameEncryptorInterface : public RefCountInterface { public: ~FrameEncryptorInterface() override {} @@ -35,17 +38,17 @@ class FrameEncryptorInterface : public rtc::RefCountInterface { // must set bytes_written to the number of bytes you wrote in the // encrypted_frame. 0 must be returned if successful all other numbers can be // selected by the implementer to represent error codes. - virtual int Encrypt(cricket::MediaType media_type, + virtual int Encrypt(webrtc::MediaType media_type, uint32_t ssrc, - rtc::ArrayView additional_data, - rtc::ArrayView frame, - rtc::ArrayView encrypted_frame, + ArrayView additional_data, + ArrayView frame, + ArrayView encrypted_frame, size_t* bytes_written) = 0; // Returns the total required length in bytes for the output of the // encryption. This can be larger than the actual number of bytes you need but // must never be smaller as it informs the size of the encrypted_frame buffer. - virtual size_t GetMaxCiphertextByteSize(cricket::MediaType media_type, + virtual size_t GetMaxCiphertextByteSize(webrtc::MediaType media_type, size_t frame_size) = 0; }; diff --git a/api/crypto_params.h b/api/crypto_params.h deleted file mode 100644 index 34906ea0ef..0000000000 --- a/api/crypto_params.h +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_CRYPTO_PARAMS_H_ -#define API_CRYPTO_PARAMS_H_ - -#include - -#include "absl/strings/string_view.h" - -namespace cricket { - -// Parameters for SRTP negotiation, as described in RFC 4568. -// TODO(benwright) - Rename to SrtpCryptoParams as these only apply to SRTP and -// not generic crypto parameters for WebRTC. -struct CryptoParams { - CryptoParams() : tag(0) {} - CryptoParams(int t, - absl::string_view cs, - absl::string_view kp, - absl::string_view sp) - : tag(t), crypto_suite(cs), key_params(kp), session_params(sp) {} - - bool Matches(const CryptoParams& params) const { - return (tag == params.tag && crypto_suite == params.crypto_suite); - } - - int tag; - std::string crypto_suite; - std::string key_params; - std::string session_params; -}; - -} // namespace cricket - -#endif // API_CRYPTO_PARAMS_H_ diff --git a/api/data_channel_event_observer_interface.h b/api/data_channel_event_observer_interface.h new file mode 100644 index 0000000000..f9b59020f4 --- /dev/null +++ b/api/data_channel_event_observer_interface.h @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_DATA_CHANNEL_EVENT_OBSERVER_INTERFACE_H_ +#define API_DATA_CHANNEL_EVENT_OBSERVER_INTERFACE_H_ + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" + +namespace webrtc { + +// TODO: issues.chromium.org/407785197 - Maybe update the observer to also +// notify on controll messages as well. +// TODO: issues.chromium.org/407785197 - Remove comment below when DataChannel +// logging has been launched. +// NOTE: This class is still under development and may change without notice. +class DataChannelEventObserverInterface { + public: + virtual ~DataChannelEventObserverInterface() = default; + + class Message { + public: + enum class Direction { kSend, kReceive }; + enum class DataType { kString, kBinary }; + + // When `direction` is `kSend` the timestamp represent when the message was + // handed over to the transport, if `direction` is `kReceive` then it + // represent when the message was received from the transport. + int64_t unix_timestamp_ms() const { return unix_timestamp_; } + void set_unix_timestamp_ms(int64_t timestamp) { + unix_timestamp_ = timestamp; + } + + int datachannel_id() const { return datachannel_id_; } + void set_datachannel_id(int id) { datachannel_id_ = id; } + + absl::string_view label() const { return label_; } + void set_label(absl::string_view label) { label_ = std::string(label); } + + Direction direction() const { return direction_; } + void set_direction(Direction direction) { direction_ = direction; } + + DataType data_type() const { return data_type_; } + void set_data_type(DataType type) { data_type_ = type; } + + const std::vector& data() const { return data_; } + void set_data(ArrayView d) { + data_.assign(d.begin(), d.end()); + } + + private: + int64_t unix_timestamp_; + int datachannel_id_; + std::string label_; + Direction direction_; + DataType data_type_; + std::vector data_; + }; + + virtual void OnMessage(const Message& message) = 0; +}; + +} // namespace webrtc + +#endif // API_DATA_CHANNEL_EVENT_OBSERVER_INTERFACE_H_ diff --git a/api/data_channel_interface.cc b/api/data_channel_interface.cc index 970f53b4bd..2c4f0e6cdf 100644 --- a/api/data_channel_interface.cc +++ b/api/data_channel_interface.cc @@ -10,6 +10,13 @@ #include "api/data_channel_interface.h" +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/priority.h" +#include "api/rtc_error.h" #include "rtc_base/checks.h" namespace webrtc { @@ -18,20 +25,12 @@ bool DataChannelInterface::ordered() const { return false; } -uint16_t DataChannelInterface::maxRetransmitTime() const { - return 0; -} - -uint16_t DataChannelInterface::maxRetransmits() const { - return 0; +std::optional DataChannelInterface::maxRetransmitsOpt() const { + return std::nullopt; } -absl::optional DataChannelInterface::maxRetransmitsOpt() const { - return absl::nullopt; -} - -absl::optional DataChannelInterface::maxPacketLifeTime() const { - return absl::nullopt; +std::optional DataChannelInterface::maxPacketLifeTime() const { + return std::nullopt; } std::string DataChannelInterface::protocol() const { @@ -42,20 +41,24 @@ bool DataChannelInterface::negotiated() const { return false; } +PriorityValue DataChannelInterface::priority() const { + return PriorityValue(Priority::kLow); +} + uint64_t DataChannelInterface::MaxSendQueueSize() { return 16 * 1024 * 1024; // 16 MiB } // TODO(tommi): Remove method once downstream implementations have been removed. -bool DataChannelInterface::Send(const DataBuffer& buffer) { +bool DataChannelInterface::Send(const DataBuffer& /* buffer */) { RTC_DCHECK_NOTREACHED(); return false; } // TODO(tommi): Remove implementation once method is pure virtual. void DataChannelInterface::SendAsync( - DataBuffer buffer, - absl::AnyInvocable on_complete) { + DataBuffer /* buffer */, + absl::AnyInvocable /* on_complete */) { RTC_DCHECK_NOTREACHED(); } diff --git a/api/data_channel_interface.h b/api/data_channel_interface.h index bf27c6c4f3..4a8299a0b7 100644 --- a/api/data_channel_interface.h +++ b/api/data_channel_interface.h @@ -17,21 +17,21 @@ #include #include +#include #include #include "absl/functional/any_invocable.h" -#include "absl/types/optional.h" #include "api/priority.h" +#include "api/ref_count.h" #include "api/rtc_error.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { // C++ version of: https://www.w3.org/TR/webrtc/#idl-def-rtcdatachannelinit -// TODO(deadbeef): Use absl::optional for the "-1 if unset" things. +// TODO(deadbeef): Use std::optional for the "-1 if unset" things. struct DataChannelInit { // Deprecated. Reliability is assumed, and channel will be unreliable if // maxRetransmitTime or MaxRetransmits is set. @@ -46,13 +46,13 @@ struct DataChannelInit { // Cannot be set along with `maxRetransmits`. // This is called `maxPacketLifeTime` in the WebRTC JS API. // Negative values are ignored, and positive values are clamped to [0-65535] - absl::optional maxRetransmitTime; + std::optional maxRetransmitTime; // The max number of retransmissions. // // Cannot be set along with `maxRetransmitTime`. // Negative values are ignored, and positive values are clamped to [0-65535] - absl::optional maxRetransmits; + std::optional maxRetransmits; // This is set by the application and opaque to the WebRTC implementation. std::string protocol; @@ -67,21 +67,21 @@ struct DataChannelInit { int id = -1; // https://w3c.github.io/webrtc-priority/#new-rtcdatachannelinit-member - absl::optional priority; + std::optional priority; }; // At the JavaScript level, data can be passed in as a string or a blob, so // this structure's `binary` flag tells whether the data should be interpreted // as binary or text. struct DataBuffer { - DataBuffer(const rtc::CopyOnWriteBuffer& data, bool binary) + DataBuffer(const CopyOnWriteBuffer& data, bool binary) : data(data), binary(binary) {} // For convenience for unit tests. explicit DataBuffer(const std::string& text) : data(text.data(), text.length()), binary(false) {} size_t size() const { return data.size(); } - rtc::CopyOnWriteBuffer data; + CopyOnWriteBuffer data; // Indicates if the received data contains UTF-8 or binary data. // Note that the upper layers are left to verify the UTF-8 encoding. // TODO(jiayl): prefer to use an enum instead of a bool. @@ -99,7 +99,7 @@ class DataChannelObserver { // A data buffer was successfully received. virtual void OnMessage(const DataBuffer& buffer) = 0; // The data channel's buffered_amount has changed. - virtual void OnBufferedAmountChange(uint64_t sent_data_size) {} + virtual void OnBufferedAmountChange(uint64_t /* sent_data_size */) {} // Override this to get callbacks directly on the network thread. // An implementation that does that must not block the network thread @@ -116,7 +116,7 @@ class DataChannelObserver { virtual ~DataChannelObserver() = default; }; -class RTC_EXPORT DataChannelInterface : public rtc::RefCountInterface { +class RTC_EXPORT DataChannelInterface : public RefCountInterface { public: // C++ version of: https://www.w3.org/TR/webrtc/#idl-def-rtcdatachannelstate // Unlikely to change, but keep in sync with DataChannel.java:State and @@ -160,11 +160,8 @@ class RTC_EXPORT DataChannelInterface : public rtc::RefCountInterface { // implemented these APIs. They should all just return the values the // DataChannel was created with. virtual bool ordered() const; - // TODO(hta): Deprecate and remove the following two functions. - virtual uint16_t maxRetransmitTime() const; - virtual uint16_t maxRetransmits() const; - virtual absl::optional maxRetransmitsOpt() const; - virtual absl::optional maxPacketLifeTime() const; + virtual std::optional maxRetransmitsOpt() const; + virtual std::optional maxPacketLifeTime() const; virtual std::string protocol() const; virtual bool negotiated() const; @@ -172,7 +169,7 @@ class RTC_EXPORT DataChannelInterface : public rtc::RefCountInterface { // If negotiated in-band, this ID will be populated once the DTLS role is // determined, and until then this will return -1. virtual int id() const = 0; - virtual Priority priority() const { return Priority::kLow; } + virtual PriorityValue priority() const; virtual DataState state() const = 0; // When state is kClosed, and the DataChannel was not closed using // the closing procedure, returns the error information about the closing. diff --git a/api/dtls_transport_interface.cc b/api/dtls_transport_interface.cc index faebc0972f..4e672e2201 100644 --- a/api/dtls_transport_interface.cc +++ b/api/dtls_transport_interface.cc @@ -10,6 +10,12 @@ #include "api/dtls_transport_interface.h" +#include +#include +#include + +#include "rtc_base/ssl_certificate.h" + namespace webrtc { DtlsTransportInformation::DtlsTransportInformation() @@ -20,11 +26,11 @@ DtlsTransportInformation::DtlsTransportInformation(DtlsTransportState state) DtlsTransportInformation::DtlsTransportInformation( DtlsTransportState state, - absl::optional role, - absl::optional tls_version, - absl::optional ssl_cipher_suite, - absl::optional srtp_cipher_suite, - std::unique_ptr remote_ssl_certificates) + std::optional role, + std::optional tls_version, + std::optional ssl_cipher_suite, + std::optional srtp_cipher_suite, + std::unique_ptr remote_ssl_certificates) : state_(state), role_(role), tls_version_(tls_version), @@ -35,12 +41,12 @@ DtlsTransportInformation::DtlsTransportInformation( // Deprecated version DtlsTransportInformation::DtlsTransportInformation( DtlsTransportState state, - absl::optional tls_version, - absl::optional ssl_cipher_suite, - absl::optional srtp_cipher_suite, - std::unique_ptr remote_ssl_certificates) + std::optional tls_version, + std::optional ssl_cipher_suite, + std::optional srtp_cipher_suite, + std::unique_ptr remote_ssl_certificates) : state_(state), - role_(absl::nullopt), + role_(std::nullopt), tls_version_(tls_version), ssl_cipher_suite_(ssl_cipher_suite), srtp_cipher_suite_(srtp_cipher_suite), diff --git a/api/dtls_transport_interface.h b/api/dtls_transport_interface.h index 7b0151249c..10fc174643 100644 --- a/api/dtls_transport_interface.h +++ b/api/dtls_transport_interface.h @@ -12,13 +12,13 @@ #define API_DTLS_TRANSPORT_INTERFACE_H_ #include -#include +#include -#include "absl/types/optional.h" +#include "absl/base/attributes.h" #include "api/ice_transport_interface.h" +#include "api/ref_count.h" #include "api/rtc_error.h" #include "api/scoped_refptr.h" -#include "rtc_base/ref_count.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/system/rtc_export.h" @@ -49,18 +49,18 @@ class RTC_EXPORT DtlsTransportInformation { explicit DtlsTransportInformation(DtlsTransportState state); DtlsTransportInformation( DtlsTransportState state, - absl::optional role, - absl::optional tls_version, - absl::optional ssl_cipher_suite, - absl::optional srtp_cipher_suite, - std::unique_ptr remote_ssl_certificates); + std::optional role, + std::optional tls_version, + std::optional ssl_cipher_suite, + std::optional srtp_cipher_suite, + std::unique_ptr remote_ssl_certificates); ABSL_DEPRECATED("Use version with role parameter") DtlsTransportInformation( DtlsTransportState state, - absl::optional tls_version, - absl::optional ssl_cipher_suite, - absl::optional srtp_cipher_suite, - std::unique_ptr remote_ssl_certificates); + std::optional tls_version, + std::optional ssl_cipher_suite, + std::optional srtp_cipher_suite, + std::unique_ptr remote_ssl_certificates); // Copy and assign DtlsTransportInformation(const DtlsTransportInformation& c); @@ -71,22 +71,22 @@ class RTC_EXPORT DtlsTransportInformation { default; DtlsTransportState state() const { return state_; } - absl::optional role() const { return role_; } - absl::optional tls_version() const { return tls_version_; } - absl::optional ssl_cipher_suite() const { return ssl_cipher_suite_; } - absl::optional srtp_cipher_suite() const { return srtp_cipher_suite_; } + std::optional role() const { return role_; } + std::optional tls_version() const { return tls_version_; } + std::optional ssl_cipher_suite() const { return ssl_cipher_suite_; } + std::optional srtp_cipher_suite() const { return srtp_cipher_suite_; } // The accessor returns a temporary pointer, it does not release ownership. - const rtc::SSLCertChain* remote_ssl_certificates() const { + const SSLCertChain* remote_ssl_certificates() const { return remote_ssl_certificates_.get(); } private: DtlsTransportState state_; - absl::optional role_; - absl::optional tls_version_; - absl::optional ssl_cipher_suite_; - absl::optional srtp_cipher_suite_; - std::unique_ptr remote_ssl_certificates_; + std::optional role_; + std::optional tls_version_; + std::optional ssl_cipher_suite_; + std::optional srtp_cipher_suite_; + std::unique_ptr remote_ssl_certificates_; }; class DtlsTransportObserverInterface { @@ -107,10 +107,10 @@ class DtlsTransportObserverInterface { // accessed on that thread, except for functions explicitly marked otherwise. // References can be held by other threads, and destruction can therefore // be initiated by other threads. -class DtlsTransportInterface : public rtc::RefCountInterface { +class DtlsTransportInterface : public webrtc::RefCountInterface { public: // Returns a pointer to the ICE transport that is owned by the DTLS transport. - virtual rtc::scoped_refptr ice_transport() = 0; + virtual scoped_refptr ice_transport() = 0; // Returns information on the state of the DtlsTransport. // This function can be called from other threads. virtual DtlsTransportInformation Information() = 0; diff --git a/api/dtmf_sender_interface.h b/api/dtmf_sender_interface.h index d63e66bbf7..16ce665e53 100644 --- a/api/dtmf_sender_interface.h +++ b/api/dtmf_sender_interface.h @@ -13,8 +13,7 @@ #include -#include "api/media_stream_interface.h" -#include "rtc_base/ref_count.h" +#include "api/ref_count.h" namespace webrtc { @@ -28,12 +27,12 @@ class DtmfSenderObserverInterface { // tones. // The callback includes the state of the tone buffer at the time when // the tone finished playing. - virtual void OnToneChange(const std::string& tone, - const std::string& tone_buffer) {} + virtual void OnToneChange(const std::string& /* tone */, + const std::string& /* tone_buffer */) {} // DEPRECATED: Older API without tone buffer. // TODO(bugs.webrtc.org/9725): Remove old API and default implementation // when old callers are gone. - virtual void OnToneChange(const std::string& tone) {} + virtual void OnToneChange(const std::string& /* tone */) {} protected: virtual ~DtmfSenderObserverInterface() = default; @@ -42,7 +41,7 @@ class DtmfSenderObserverInterface { // The interface of native implementation of the RTCDTMFSender defined by the // WebRTC W3C Editor's Draft. // See: https://www.w3.org/TR/webrtc/#peer-to-peer-dtmf -class DtmfSenderInterface : public rtc::RefCountInterface { +class DtmfSenderInterface : public webrtc::RefCountInterface { public: // Provides the spec compliant default 2 second delay for the ',' character. static const int kDtmfDefaultCommaDelayMs = 2000; @@ -92,7 +91,7 @@ class DtmfSenderInterface : public rtc::RefCountInterface { virtual bool InsertDtmf(const std::string& tones, int duration, int inter_tone_gap, - int comma_delay) { + int /* comma_delay */) { // TODO(bugs.webrtc.org/165700): Remove once downstream implementations // override this signature rather than the 3-parameter one. return InsertDtmf(tones, duration, inter_tone_gap); diff --git a/api/enable_media.cc b/api/enable_media.cc new file mode 100644 index 0000000000..7d6d0cc3df --- /dev/null +++ b/api/enable_media.cc @@ -0,0 +1,75 @@ +/* + * Copyright 2023 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/enable_media.h" + +#include +#include + +#include "absl/base/nullability.h" +#include "api/environment/environment.h" +#include "api/peer_connection_interface.h" +#include "api/scoped_refptr.h" +#include "call/call.h" +#include "call/call_config.h" +#include "media/base/media_engine.h" +#include "media/engine/webrtc_video_engine.h" +#include "media/engine/webrtc_voice_engine.h" +#include "pc/media_factory.h" + +namespace webrtc { +namespace { + +class MediaFactoryImpl : public MediaFactory { + public: + MediaFactoryImpl() = default; + MediaFactoryImpl(const MediaFactoryImpl&) = delete; + MediaFactoryImpl& operator=(const MediaFactoryImpl&) = delete; + ~MediaFactoryImpl() override = default; + + std::unique_ptr CreateCall(CallConfig config) override { + return webrtc::Call::Create(std::move(config)); + } + + std::unique_ptr CreateMediaEngine( + const Environment& env, + PeerConnectionFactoryDependencies& deps) override { + absl_nullable scoped_refptr audio_processing = + deps.audio_processing_builder != nullptr + ? std::move(deps.audio_processing_builder)->Build(env) +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + : std::move(deps.audio_processing); +#pragma clang diagnostic pop + + auto audio_engine = std::make_unique( + env, std::move(deps.adm), std::move(deps.audio_encoder_factory), + std::move(deps.audio_decoder_factory), std::move(deps.audio_mixer), + std::move(audio_processing), std::move(deps.audio_frame_processor)); + auto video_engine = std::make_unique( + std::move(deps.video_encoder_factory), + std::move(deps.video_decoder_factory), env.field_trials()); + return std::make_unique(std::move(audio_engine), + std::move(video_engine)); + } +}; + +} // namespace + +void EnableMedia(PeerConnectionFactoryDependencies& deps) { + if (deps.media_factory != nullptr) { + // Do nothing if media is already enabled. Overwriting media_factory can be + // harmful when a different (e.g. test-only) implementation is used. + return; + } + deps.media_factory = std::make_unique(); +} + +} // namespace webrtc diff --git a/api/enable_media.h b/api/enable_media.h new file mode 100644 index 0000000000..85183963cf --- /dev/null +++ b/api/enable_media.h @@ -0,0 +1,27 @@ +/* + * Copyright 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ENABLE_MEDIA_H_ +#define API_ENABLE_MEDIA_H_ + +#include "api/peer_connection_interface.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Enables media support for PeerConnnectionFactory created from `deps` +// This function is located in its own build target to allow webrtc users that +// do not need any media to avoid linking media specific code and thus to reduce +// binary size. +RTC_EXPORT void EnableMedia(PeerConnectionFactoryDependencies& deps); + +} // namespace webrtc + +#endif // API_ENABLE_MEDIA_H_ diff --git a/api/enable_media_with_defaults.cc b/api/enable_media_with_defaults.cc new file mode 100644 index 0000000000..2ad88849d8 --- /dev/null +++ b/api/enable_media_with_defaults.cc @@ -0,0 +1,54 @@ +/* + * Copyright 2023 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/enable_media_with_defaults.h" + +#include + +#include "api/audio/builtin_audio_processing_builder.h" +#include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/enable_media.h" +#include "api/peer_connection_interface.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "api/video_codecs/builtin_video_decoder_factory.h" +#include "api/video_codecs/builtin_video_encoder_factory.h" + +namespace webrtc { + +void EnableMediaWithDefaults(PeerConnectionFactoryDependencies& deps) { + if (deps.task_queue_factory == nullptr) { + deps.task_queue_factory = CreateDefaultTaskQueueFactory(); + } + if (deps.audio_encoder_factory == nullptr) { + deps.audio_encoder_factory = CreateBuiltinAudioEncoderFactory(); + } + if (deps.audio_decoder_factory == nullptr) { + deps.audio_decoder_factory = CreateBuiltinAudioDecoderFactory(); + } +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + if (deps.audio_processing == nullptr && +#pragma clang diagnostic pop + deps.audio_processing_builder == nullptr) { + deps.audio_processing_builder = + std::make_unique(); + } + if (deps.video_encoder_factory == nullptr) { + deps.video_encoder_factory = CreateBuiltinVideoEncoderFactory(); + } + if (deps.video_decoder_factory == nullptr) { + deps.video_decoder_factory = CreateBuiltinVideoDecoderFactory(); + } + EnableMedia(deps); +} + +} // namespace webrtc diff --git a/api/enable_media_with_defaults.h b/api/enable_media_with_defaults.h new file mode 100644 index 0000000000..1b13a98fc9 --- /dev/null +++ b/api/enable_media_with_defaults.h @@ -0,0 +1,28 @@ +/* + * Copyright 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ENABLE_MEDIA_WITH_DEFAULTS_H_ +#define API_ENABLE_MEDIA_WITH_DEFAULTS_H_ + +#include "api/peer_connection_interface.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Fills unset media related dependencies in `deps` and enables media support +// for a PeerConnectionFactory created from `deps`. +// This function is located in its own build target as it pulls additional +// dependencies compared to `EnableMedia`, and thus may add extra binary size. +RTC_EXPORT void EnableMediaWithDefaults( + PeerConnectionFactoryDependencies& deps); + +} // namespace webrtc + +#endif // API_ENABLE_MEDIA_WITH_DEFAULTS_H_ diff --git a/api/environment/BUILD.gn b/api/environment/BUILD.gn new file mode 100644 index 0000000000..1a21672e5c --- /dev/null +++ b/api/environment/BUILD.gn @@ -0,0 +1,67 @@ +# Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_source_set("environment") { + visibility = [ "*" ] + sources = [ "environment.h" ] + deps = [ + "..:field_trials_view", + "..:refcountedbase", + "..:scoped_refptr", + "../../rtc_base/system:rtc_export", + "../../system_wrappers", + "../rtc_event_log", + "../task_queue", + "//third_party/abseil-cpp/absl/base:nullability", + ] +} + +rtc_library("environment_factory") { + visibility = [ "*" ] + poisonous = [ "environment_construction" ] + sources = [ + "environment_factory.cc", + "environment_factory.h", + ] + deps = [ + ":environment", + "..:field_trials_view", + "..:make_ref_counted", + "..:refcountedbase", + "..:scoped_refptr", + "../../rtc_base:checks", + "../../rtc_base/system:rtc_export", + "../../system_wrappers", + "../rtc_event_log", + "../task_queue:default_task_queue_factory", + "../task_queue:task_queue", + "../transport:field_trial_based_config", + "//third_party/abseil-cpp/absl/base:nullability", + ] +} + +if (rtc_include_tests) { + rtc_library("environment_unittests") { + testonly = true + sources = [ "environment_unittest.cc" ] + deps = [ + ":environment", + ":environment_factory", + "..:field_trials_view", + "../../system_wrappers", + "../../test:test_support", + "../rtc_event_log", + "../task_queue", + "../units:timestamp", + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/strings:string_view", + ] + } +} diff --git a/api/environment/DEPS b/api/environment/DEPS new file mode 100644 index 0000000000..7050f40f70 --- /dev/null +++ b/api/environment/DEPS @@ -0,0 +1,3 @@ +include_rules = [ + "+system_wrappers/include/clock.h", +] diff --git a/api/environment/OWNERS b/api/environment/OWNERS new file mode 100644 index 0000000000..a8af6b5b26 --- /dev/null +++ b/api/environment/OWNERS @@ -0,0 +1,15 @@ +# Environment has a limited visibility for stronger control what utilities are +# exposed through it. +# Utilities exposed through environemnt +# - should be helpful for various WebRTC sub components. +# - should be thread safe. +# - should have a default implementation. +# - should provide functionality different to existing utilities in the +# environemnt. +# - should need at most one instance per peer connection. +set noparent +include ../../OWNERS_INFRA + +danilchap@webrtc.org +hta@webrtc.org +mbonadei@webrtc.org diff --git a/api/environment/environment.h b/api/environment/environment.h new file mode 100644 index 0000000000..60f064c6f1 --- /dev/null +++ b/api/environment/environment.h @@ -0,0 +1,143 @@ +/* + * Copyright 2023 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This header file provides wrapper for common WebRTC utilities. +// Different application may need different implementations of these utilities, +// Moreover, single application may need to use WebRTC for multiple purposes, +// and thus would need to provide different utilities implementations for +// different peer connections. +// The main purpose of the `Environment` class below is to propagate references +// to those utilities to all WebRTC classes that need them. + +#ifndef API_ENVIRONMENT_ENVIRONMENT_H_ +#define API_ENVIRONMENT_ENVIRONMENT_H_ + +#include + +#include "absl/base/nullability.h" +#include "api/field_trials_view.h" +#include "api/ref_counted_base.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_factory.h" +#include "rtc_base/system/rtc_export.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { +// Contains references to WebRTC utilities. Object of this class should be +// passed as a construction parameter and saved by value in each class that +// needs it. Most classes shouldn't create a new instance of the `Environment`, +// but instead should use a propagated copy. +// Usually Environment should be the first parameter in a constructor or a +// factory, and the first member in the class. Keeping Environment as the first +// member in the class ensures utilities (e.g. clock) are still valid during +// destruction of other members. +// +// Example: +// class PeerConnection { +// public: +// PeerConnection(const Environment& env, ...) +// : env_(env), +// log_duration_on_destruction_(&env_.clock()), +// rtp_manager_(env_, ...), +// ... +// +// const FieldTrialsView& trials() const { return env_.field_trials(); } +// +// scoped_refptr AddTransceiver(...) { +// return make_ref_counted(env_, ...); +// } +// +// private: +// const Environment env_; +// Stats log_duration_on_destruction_; +// RtpTransmissionManager rtp_manager_; +// }; +// This class is thread safe. +class RTC_EXPORT Environment final { + public: + // Default constructor is deleted in favor of creating this object using + // `EnvironmentFactory`. To create the default environment use + // `EnvironmentFactory().Create()` or `CreateEnvironment()`. + Environment() = delete; + + Environment(const Environment&) = default; + Environment(Environment&&) = default; + Environment& operator=(const Environment&) = default; + Environment& operator=(Environment&&) = default; + + ~Environment() = default; + + // Provides means to alter behavior, mostly for A/B testing new features. + // See ../../g3doc/field-trials.md + const FieldTrialsView& field_trials() const; + + // Provides an interface to query current time. + // See ../../g3doc/implementation_basics.md#time + Clock& clock() const; + + // Provides a factory for task queues, WebRTC threading primitives. + // See ../../g3doc/implementation_basics.md#threads + TaskQueueFactory& task_queue_factory() const; + + // Provides an interface for collecting structured logs. + // See ../../logging/g3doc/rtc_event_log.md + RtcEventLog& event_log() const; + + private: + friend class EnvironmentFactory; + Environment(scoped_refptr storage, + const FieldTrialsView* absl_nonnull field_trials, + Clock* absl_nonnull clock, + TaskQueueFactory* absl_nonnull task_queue_factory, + RtcEventLog* absl_nonnull event_log) + : storage_(std::move(storage)), + field_trials_(field_trials), + clock_(clock), + task_queue_factory_(task_queue_factory), + event_log_(event_log) {} + + // Container that keeps ownership of the utilities below. + // Defining this as a RefCountedBase allows `Environment` to share this + // storage with another `Environment`, in particular allows `Environment` to + // be copyable. It is up to the `EnvironmentFactory` to provide an object that + // ensures references to utilties below are valid while object in the + // `storage_` is alive. + scoped_refptr storage_; + + const FieldTrialsView* absl_nonnull field_trials_; + Clock* absl_nonnull clock_; + TaskQueueFactory* absl_nonnull task_queue_factory_; + RtcEventLog* absl_nonnull event_log_; +}; + +//------------------------------------------------------------------------------ +// Implementation details follow +//------------------------------------------------------------------------------ + +inline const FieldTrialsView& Environment::field_trials() const { + return *field_trials_; +} + +inline Clock& Environment::clock() const { + return *clock_; +} + +inline TaskQueueFactory& Environment::task_queue_factory() const { + return *task_queue_factory_; +} + +inline RtcEventLog& Environment::event_log() const { + return *event_log_; +} + +} // namespace webrtc + +#endif // API_ENVIRONMENT_ENVIRONMENT_H_ diff --git a/api/environment/environment_factory.cc b/api/environment/environment_factory.cc new file mode 100644 index 0000000000..692ecc09c2 --- /dev/null +++ b/api/environment/environment_factory.cc @@ -0,0 +1,128 @@ +/* + * Copyright 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/environment/environment_factory.h" + +#include +#include + +#include "absl/base/nullability.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/make_ref_counted.h" +#include "api/ref_counted_base.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/transport/field_trial_based_config.h" +#include "rtc_base/checks.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { +namespace { + +template +void Store(absl_nonnull std::unique_ptr value, + scoped_refptr& leaf) { + class StorageNode : public RefCountedBase { + public: + StorageNode(scoped_refptr parent, + absl_nonnull std::unique_ptr value) + : parent_(std::move(parent)), value_(std::move(value)) {} + + StorageNode(const StorageNode&) = delete; + StorageNode& operator=(const StorageNode&) = delete; + + ~StorageNode() override = default; + + private: + scoped_refptr parent_; + absl_nonnull std::unique_ptr value_; + }; + + // Utilities provided with ownership form a tree: + // Root is nullptr, each node keeps an ownership of one utility. + // Each child node has a link to the parent, but parent is unaware of its + // children. Each `EnvironmentFactory` and `Environment` keep a reference to a + // 'leaf_' - node with the last provided utility. This way `Environment` keeps + // ownership of a single branch of the storage tree with each used utiltity + // owned by one of the nodes on that branch. + leaf = make_ref_counted(std::move(leaf), std::move(value)); +} + +} // namespace + +EnvironmentFactory::EnvironmentFactory(const Environment& env) + : leaf_(env.storage_), + field_trials_(env.field_trials_), + clock_(env.clock_), + task_queue_factory_(env.task_queue_factory_), + event_log_(env.event_log_) {} + +void EnvironmentFactory::Set( + absl_nullable std::unique_ptr utility) { + if (utility != nullptr) { + field_trials_ = utility.get(); + Store(std::move(utility), leaf_); + } +} + +void EnvironmentFactory::Set(absl_nullable std::unique_ptr utility) { + if (utility != nullptr) { + clock_ = utility.get(); + Store(std::move(utility), leaf_); + } +} + +void EnvironmentFactory::Set( + absl_nullable std::unique_ptr utility) { + if (utility != nullptr) { + task_queue_factory_ = utility.get(); + Store(std::move(utility), leaf_); + } +} + +void EnvironmentFactory::Set( + absl_nullable std::unique_ptr utility) { + if (utility != nullptr) { + event_log_ = utility.get(); + Store(std::move(utility), leaf_); + } +} + +Environment EnvironmentFactory::CreateWithDefaults() && { + if (field_trials_ == nullptr) { + Set(std::make_unique()); + } + if (clock_ == nullptr) { + Set(Clock::GetRealTimeClock()); + } + if (task_queue_factory_ == nullptr) { + Set(CreateDefaultTaskQueueFactory(field_trials_)); + } + if (event_log_ == nullptr) { + Set(std::make_unique()); + } + + RTC_DCHECK(field_trials_ != nullptr); + RTC_DCHECK(clock_ != nullptr); + RTC_DCHECK(task_queue_factory_ != nullptr); + RTC_DCHECK(event_log_ != nullptr); + return Environment(std::move(leaf_), // + field_trials_, clock_, task_queue_factory_, event_log_); +} + +Environment EnvironmentFactory::Create() const { + // Create a temporary copy to avoid mutating `this` with default utilities. + return EnvironmentFactory(*this).CreateWithDefaults(); +} + +} // namespace webrtc diff --git a/api/environment/environment_factory.h b/api/environment/environment_factory.h new file mode 100644 index 0000000000..3023ea4238 --- /dev/null +++ b/api/environment/environment_factory.h @@ -0,0 +1,144 @@ +/* + * Copyright 2023 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ENVIRONMENT_ENVIRONMENT_FACTORY_H_ +#define API_ENVIRONMENT_ENVIRONMENT_FACTORY_H_ + +#include +#include + +#include "absl/base/nullability.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/ref_counted_base.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_factory.h" +#include "rtc_base/system/rtc_export.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { +// Constructs `Environment`. +// Individual utilities are provided using one of the `Set` functions. +// `Set` functions do nothing when nullptr value is passed. +// Creates default implementations for utilities that are not provided. +// +// Examples: +// Environment default_env = EnvironmentFactory().Create(); +// +// EnvironmentFactory factory; +// factory.Set(std::make_unique()); +// factory.Set(std::make_unique()); +// Environment custom_env = factory.Create(); +// +class RTC_EXPORT EnvironmentFactory final { + public: + EnvironmentFactory() = default; + explicit EnvironmentFactory(const Environment& env); + + EnvironmentFactory(const EnvironmentFactory&) = default; + EnvironmentFactory(EnvironmentFactory&&) = default; + EnvironmentFactory& operator=(const EnvironmentFactory&) = default; + EnvironmentFactory& operator=(EnvironmentFactory&&) = default; + + ~EnvironmentFactory() = default; + + void Set(absl_nullable std::unique_ptr utility); + void Set(absl_nullable std::unique_ptr utility); + void Set(absl_nullable std::unique_ptr utility); + void Set(absl_nullable std::unique_ptr utility); + + void Set(const FieldTrialsView* absl_nullable utility); + void Set(Clock* absl_nullable utility); + void Set(TaskQueueFactory* absl_nullable utility); + void Set(RtcEventLog* absl_nullable utility); + + Environment Create() const; + + private: + Environment CreateWithDefaults() &&; + + scoped_refptr leaf_; + + const FieldTrialsView* absl_nullable field_trials_ = nullptr; + Clock* absl_nullable clock_ = nullptr; + TaskQueueFactory* absl_nullable task_queue_factory_ = nullptr; + RtcEventLog* absl_nullable event_log_ = nullptr; +}; + +// Helper for concise way to create an environment. +// `Environment env = CreateEnvironment(utility1, utility2)` is a shortcut to +// `EnvironmentFactory factory; +// factory.Set(utility1); +// factory.Set(utility2); +// Environment env = factory.Create();` +// +// Examples: +// Environment default_env = CreateEnvironment(); +// Environment custom_env = +// CreateEnvironment(std::make_unique(), +// std::make_unique()); +template +Environment CreateEnvironment(Utilities&&... utilities); + +//------------------------------------------------------------------------------ +// Implementation details follow +//------------------------------------------------------------------------------ + +inline void EnvironmentFactory::Set( + const FieldTrialsView* absl_nullable utility) { + if (utility != nullptr) { + field_trials_ = utility; + } +} + +inline void EnvironmentFactory::Set(Clock* absl_nullable utility) { + if (utility != nullptr) { + clock_ = utility; + } +} + +inline void EnvironmentFactory::Set(TaskQueueFactory* absl_nullable utility) { + if (utility != nullptr) { + task_queue_factory_ = utility; + } +} + +inline void EnvironmentFactory::Set(RtcEventLog* absl_nullable utility) { + if (utility != nullptr) { + event_log_ = utility; + } +} + +namespace webrtc_create_environment_internal { + +inline void Set(EnvironmentFactory& /* factory */) {} + +template +void Set(EnvironmentFactory& factory, + FirstUtility&& first, + Utilities&&... utilities) { + factory.Set(std::forward(first)); + Set(factory, std::forward(utilities)...); +} + +} // namespace webrtc_create_environment_internal + +template +Environment CreateEnvironment(Utilities&&... utilities) { + EnvironmentFactory factory; + webrtc_create_environment_internal::Set( + factory, std::forward(utilities)...); + return factory.Create(); +} + +} // namespace webrtc + +#endif // API_ENVIRONMENT_ENVIRONMENT_FACTORY_H_ diff --git a/api/environment/environment_unittest.cc b/api/environment/environment_unittest.cc new file mode 100644 index 0000000000..12dd40d70a --- /dev/null +++ b/api/environment/environment_unittest.cc @@ -0,0 +1,280 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/environment/environment.h" + +#include +#include +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "absl/strings/string_view.h" +#include "api/environment/environment_factory.h" +#include "api/field_trials_view.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/units/timestamp.h" +#include "system_wrappers/include/clock.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAre; +using ::testing::IsEmpty; +using ::testing::Not; +using ::testing::NotNull; +using ::testing::Ref; + +class FakeEvent : public RtcEvent { + public: + Type GetType() const override { return RtcEvent::Type::FakeEvent; } + bool IsConfigEvent() const override { return false; } +}; + +class FakeFieldTrials : public FieldTrialsView { + public: + explicit FakeFieldTrials(absl::AnyInvocable on_destroyed = nullptr) + : on_destroyed_(std::move(on_destroyed)) {} + ~FakeFieldTrials() override { + if (on_destroyed_ != nullptr) { + std::move(on_destroyed_)(); + } + } + + std::string Lookup(absl::string_view /* key */) const override { + return "fake"; + } + + private: + absl::AnyInvocable on_destroyed_; +}; + +class FakeTaskQueueFactory : public TaskQueueFactory { + public: + explicit FakeTaskQueueFactory( + absl::AnyInvocable on_destroyed = nullptr) + : on_destroyed_(std::move(on_destroyed)) {} + ~FakeTaskQueueFactory() override { + if (on_destroyed_ != nullptr) { + std::move(on_destroyed_)(); + } + } + + std::unique_ptr CreateTaskQueue( + absl::string_view /* name */, + Priority /* priority */) const override { + return nullptr; + } + + private: + absl::AnyInvocable on_destroyed_; +}; + +TEST(EnvironmentTest, DefaultEnvironmentHasAllUtilities) { + Environment env = EnvironmentFactory().Create(); + + // Try to use each utility, expect no crashes. + env.clock().CurrentTime(); + EXPECT_THAT(env.task_queue_factory().CreateTaskQueue( + "test", TaskQueueFactory::Priority::NORMAL), + NotNull()); + env.event_log().Log(std::make_unique()); + env.field_trials().Lookup("WebRTC-Debugging-RtpDump"); +} + +TEST(EnvironmentTest, UsesProvidedUtilitiesWithOwnership) { + auto owned_field_trials = std::make_unique(); + auto owned_task_queue_factory = std::make_unique(); + auto owned_clock = std::make_unique(Timestamp::Zero()); + auto owned_event_log = std::make_unique(); + + FieldTrialsView& field_trials = *owned_field_trials; + TaskQueueFactory& task_queue_factory = *owned_task_queue_factory; + Clock& clock = *owned_clock; + RtcEventLog& event_log = *owned_event_log; + + Environment env = CreateEnvironment( + std::move(owned_field_trials), std::move(owned_clock), + std::move(owned_task_queue_factory), std::move(owned_event_log)); + + EXPECT_THAT(env.field_trials(), Ref(field_trials)); + EXPECT_THAT(env.task_queue_factory(), Ref(task_queue_factory)); + EXPECT_THAT(env.clock(), Ref(clock)); + EXPECT_THAT(env.event_log(), Ref(event_log)); +} + +TEST(EnvironmentTest, UsesProvidedUtilitiesWithoutOwnership) { + FakeFieldTrials field_trials; + FakeTaskQueueFactory task_queue_factory; + SimulatedClock clock(Timestamp::Zero()); + RtcEventLogNull event_log; + + Environment env = + CreateEnvironment(&field_trials, &clock, &task_queue_factory, &event_log); + + EXPECT_THAT(env.field_trials(), Ref(field_trials)); + EXPECT_THAT(env.task_queue_factory(), Ref(task_queue_factory)); + EXPECT_THAT(env.clock(), Ref(clock)); + EXPECT_THAT(env.event_log(), Ref(event_log)); +} + +TEST(EnvironmentTest, UsesLastProvidedUtility) { + auto owned_field_trials1 = std::make_unique(); + auto owned_field_trials2 = std::make_unique(); + FieldTrialsView& field_trials2 = *owned_field_trials2; + + Environment env = CreateEnvironment(std::move(owned_field_trials1), + std::move(owned_field_trials2)); + + EXPECT_THAT(env.field_trials(), Ref(field_trials2)); +} + +// Utilities can be provided from different sources, and when some source +// choose not to provide an utility, it is usually expressed with nullptr. +// When utility is not provided, it is natural to use previously set one. +// E.g. Both PeerConnectionFactoryDependencies and PeerConnectionDependencies +// provide field trials. When PeerConnectionDependencies::trials == nullptr, +// then trials from the PeerConnectionFactoryDependencies should be used. +// With nullptr accepted and ignored this can be expressed by +// `Environemt env = CreateEnvironment(pcf_deps.trials, pc_deps.trials);` +// That would use pc_deps.trials when not nullptr, pcf_deps.trials when +// pc_deps.trials is nullptr, but pcf_deps.trials is not, and default field +// trials when both are nullptr. +TEST(EnvironmentTest, IgnoresProvidedNullptrUtility) { + auto owned_field_trials = std::make_unique(); + std::unique_ptr null_field_trials = nullptr; + FieldTrialsView& field_trials = *owned_field_trials; + + Environment env = CreateEnvironment(std::move(owned_field_trials), + std::move(null_field_trials)); + + EXPECT_THAT(env.field_trials(), Ref(field_trials)); +} + +TEST(EnvironmentTest, KeepsUtilityAliveWhileEnvironmentIsAlive) { + bool utility_destroyed = false; + auto field_trials = std::make_unique( + /*on_destroyed=*/[&] { utility_destroyed = true; }); + + // Wrap Environment into optional to have explicit control when it is deleted. + std::optional env = CreateEnvironment(std::move(field_trials)); + + EXPECT_FALSE(utility_destroyed); + env = std::nullopt; + EXPECT_TRUE(utility_destroyed); +} + +TEST(EnvironmentTest, KeepsUtilityAliveWhileCopyOfEnvironmentIsAlive) { + bool utility_destroyed = false; + auto field_trials = std::make_unique( + /*on_destroyed=*/[&] { utility_destroyed = true; }); + + std::optional env1 = CreateEnvironment(std::move(field_trials)); + std::optional env2 = env1; + + EXPECT_FALSE(utility_destroyed); + env1 = std::nullopt; + EXPECT_FALSE(utility_destroyed); + env2 = std::nullopt; + EXPECT_TRUE(utility_destroyed); +} + +TEST(EnvironmentTest, FactoryCanBeReusedToCreateDifferentEnvironments) { + auto owned_task_queue_factory = std::make_unique(); + auto owned_field_trials1 = std::make_unique(); + auto owned_field_trials2 = std::make_unique(); + TaskQueueFactory& task_queue_factory = *owned_task_queue_factory; + FieldTrialsView& field_trials1 = *owned_field_trials1; + FieldTrialsView& field_trials2 = *owned_field_trials2; + + EnvironmentFactory factory; + factory.Set(std::move(owned_task_queue_factory)); + factory.Set(std::move(owned_field_trials1)); + Environment env1 = factory.Create(); + factory.Set(std::move(owned_field_trials2)); + Environment env2 = factory.Create(); + + // Environments share the same custom task queue factory. + EXPECT_THAT(env1.task_queue_factory(), Ref(task_queue_factory)); + EXPECT_THAT(env2.task_queue_factory(), Ref(task_queue_factory)); + + // Environments have different field trials. + EXPECT_THAT(env1.field_trials(), Ref(field_trials1)); + EXPECT_THAT(env2.field_trials(), Ref(field_trials2)); +} + +TEST(EnvironmentTest, FactoryCanCreateNewEnvironmentFromExistingOne) { + Environment env1 = + CreateEnvironment(std::make_unique()); + EnvironmentFactory factory(env1); + factory.Set(std::make_unique()); + Environment env2 = factory.Create(); + + // Environments share the same default clock. + EXPECT_THAT(env2.clock(), Ref(env1.clock())); + + // Environments share the same custom task queue factory. + EXPECT_THAT(env2.task_queue_factory(), Ref(env1.task_queue_factory())); + + // Environments have different field trials. + EXPECT_THAT(env2.field_trials(), Not(Ref(env1.field_trials()))); +} + +TEST(EnvironmentTest, KeepsOwnershipsWhenCreateNewEnvironmentFromExistingOne) { + bool utility1_destroyed = false; + bool utility2_destroyed = false; + std::optional env1 = + CreateEnvironment(std::make_unique( + /*on_destroyed=*/[&] { utility1_destroyed = true; })); + + std::optional factory = EnvironmentFactory(*env1); + + // Destroy env1, check utility1 it was using is still alive. + env1 = std::nullopt; + EXPECT_FALSE(utility1_destroyed); + + factory->Set(std::make_unique( + /*on_destroyed=*/[&] { utility2_destroyed = true; })); + std::optional env2 = factory->Create(); + + // Destroy the factory, check all utilities used by env2 are alive. + factory = std::nullopt; + EXPECT_FALSE(utility1_destroyed); + EXPECT_FALSE(utility2_destroyed); + + // Once last Environment object is deleted, utilties should be deleted too. + env2 = std::nullopt; + EXPECT_TRUE(utility1_destroyed); + EXPECT_TRUE(utility2_destroyed); +} + +TEST(EnvironmentTest, DestroysUtilitiesInReverseProvidedOrder) { + std::vector destroyed; + auto field_trials = std::make_unique( + /*on_destroyed=*/[&] { destroyed.push_back("field_trials"); }); + auto task_queue_factory = std::make_unique( + /*on_destroyed=*/[&] { destroyed.push_back("task_queue_factory"); }); + + std::optional env = + CreateEnvironment(std::move(field_trials), std::move(task_queue_factory)); + + ASSERT_THAT(destroyed, IsEmpty()); + env = std::nullopt; + EXPECT_THAT(destroyed, ElementsAre("task_queue_factory", "field_trials")); +} + +} // namespace +} // namespace webrtc diff --git a/api/fec_controller.h b/api/fec_controller.h index a9be656d6e..a6cf769523 100644 --- a/api/fec_controller.h +++ b/api/fec_controller.h @@ -11,9 +11,12 @@ #ifndef API_FEC_CONTROLLER_H_ #define API_FEC_CONTROLLER_H_ +#include +#include #include #include +#include "api/environment/environment.h" #include "api/video/video_frame_type.h" #include "modules/include/module_fec_types.h" @@ -34,6 +37,7 @@ class VCMProtectionCallback { // 'retransmission_mode' is either a value of enum RetransmissionMode, or // computed with bitwise operators on values of enum RetransmissionMode. virtual void SetRetransmissionMode(int retransmission_mode) = 0; + protected: virtual ~VCMProtectionCallback() {} }; @@ -87,8 +91,10 @@ class FecController { class FecControllerFactoryInterface { public: - virtual std::unique_ptr CreateFecController() = 0; virtual ~FecControllerFactoryInterface() = default; + + virtual std::unique_ptr CreateFecController( + const Environment& env) = 0; }; } // namespace webrtc diff --git a/api/field_trials.cc b/api/field_trials.cc index 4bd11271dc..4aca69d781 100644 --- a/api/field_trials.cc +++ b/api/field_trials.cc @@ -11,44 +11,41 @@ #include "api/field_trials.h" #include +#include +#include +#include +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" +#include "rtc_base/containers/flat_map.h" #include "system_wrappers/include/field_trial.h" namespace { -// This part is copied from system_wrappers/field_trial.cc. -webrtc::flat_map InsertIntoMap(const std::string& s) { - std::string::size_type field_start = 0; +webrtc::flat_map InsertIntoMap(absl::string_view s) { webrtc::flat_map key_value_map; - while (field_start < s.size()) { - std::string::size_type separator_pos = s.find('/', field_start); - RTC_CHECK_NE(separator_pos, std::string::npos) + while (!s.empty()) { + absl::string_view::size_type separator_pos = s.find('/'); + RTC_CHECK_NE(separator_pos, absl::string_view::npos) << "Missing separator '/' after field trial key."; - RTC_CHECK_GT(separator_pos, field_start) - << "Field trial key cannot be empty."; - std::string key = s.substr(field_start, separator_pos - field_start); - field_start = separator_pos + 1; + RTC_CHECK_GT(separator_pos, 0) << "Field trial key cannot be empty."; + absl::string_view key = s.substr(0, separator_pos); + s.remove_prefix(separator_pos + 1); - RTC_CHECK_LT(field_start, s.size()) + RTC_CHECK(!s.empty()) << "Missing value after field trial key. String ended."; - separator_pos = s.find('/', field_start); - RTC_CHECK_NE(separator_pos, std::string::npos) + separator_pos = s.find('/'); + RTC_CHECK_NE(separator_pos, absl::string_view::npos) << "Missing terminating '/' in field trial string."; - RTC_CHECK_GT(separator_pos, field_start) - << "Field trial value cannot be empty."; - std::string value = s.substr(field_start, separator_pos - field_start); - field_start = separator_pos + 1; + RTC_CHECK_GT(separator_pos, 0) << "Field trial value cannot be empty."; + absl::string_view value = s.substr(0, separator_pos); + s.remove_prefix(separator_pos + 1); // If a key is specified multiple times, only the value linked to the first // key is stored. note: This will crash in debug build when calling // InitFieldTrialsFromString(). key_value_map.emplace(key, value); } - // This check is technically redundant due to earlier checks. - // We nevertheless keep the check to make it clear that the entire - // string has been processed, and without indexing past the end. - RTC_CHECK_EQ(field_start, s.size()); return key_value_map; } @@ -62,7 +59,7 @@ std::atomic instance_created_{false}; namespace webrtc { -FieldTrials::FieldTrials(const std::string& s) +FieldTrials::FieldTrials(absl::string_view s) : uses_global_(true), field_trial_string_(s), previous_field_trial_string_(webrtc::field_trial::GetFieldTrialString()), @@ -73,11 +70,11 @@ FieldTrials::FieldTrials(const std::string& s) << "Only one instance may be instanciated at any given time!"; } -std::unique_ptr FieldTrials::CreateNoGlobal(const std::string& s) { +std::unique_ptr FieldTrials::CreateNoGlobal(absl::string_view s) { return std::unique_ptr(new FieldTrials(s, true)); } -FieldTrials::FieldTrials(const std::string& s, bool) +FieldTrials::FieldTrials(absl::string_view s, bool) : uses_global_(false), previous_field_trial_string_(nullptr), key_value_map_(InsertIntoMap(s)) {} @@ -91,7 +88,7 @@ FieldTrials::~FieldTrials() { } std::string FieldTrials::GetValue(absl::string_view key) const { - auto it = key_value_map_.find(std::string(key)); + auto it = key_value_map_.find(key); if (it != key_value_map_.end()) return it->second; @@ -99,7 +96,7 @@ std::string FieldTrials::GetValue(absl::string_view key) const { // a mix between FieldTrials and the global string continue to work // TODO(bugs.webrtc.org/10335): Remove the global string! if (uses_global_) { - return field_trial::FindFullName(std::string(key)); + return field_trial::FindFullName(key); } return ""; } diff --git a/api/field_trials.h b/api/field_trials.h index bf7a7cc625..693c1afe8c 100644 --- a/api/field_trials.h +++ b/api/field_trials.h @@ -36,15 +36,15 @@ namespace webrtc { // (unless using CreateNoGlobal): class FieldTrials : public FieldTrialsRegistry { public: - explicit FieldTrials(const std::string& s); + explicit FieldTrials(absl::string_view s); ~FieldTrials(); // Create a FieldTrials object that is not reading/writing from // global variable (i.e can not be used for all parts of webrtc). - static std::unique_ptr CreateNoGlobal(const std::string& s); + static std::unique_ptr CreateNoGlobal(absl::string_view s); private: - explicit FieldTrials(const std::string& s, bool); + explicit FieldTrials(absl::string_view s, bool); std::string GetValue(absl::string_view key) const override; diff --git a/api/field_trials_registry.cc b/api/field_trials_registry.cc index 61d31512ce..11259cc6af 100644 --- a/api/field_trials_registry.cc +++ b/api/field_trials_registry.cc @@ -11,12 +11,13 @@ #include -#include "absl/algorithm/container.h" #include "absl/strings/string_view.h" +// IWYU pragma: begin_keep +#include "absl/algorithm/container.h" #include "experiments/registered_field_trials.h" #include "rtc_base/checks.h" -#include "rtc_base/containers/flat_set.h" #include "rtc_base/logging.h" +// IWYU pragma: end_keep namespace webrtc { diff --git a/api/field_trials_unittest.cc b/api/field_trials_unittest.cc index 804b52a818..8144f11c38 100644 --- a/api/field_trials_unittest.cc +++ b/api/field_trials_unittest.cc @@ -11,9 +11,7 @@ #include "api/field_trials.h" #include -#include -#include "absl/strings/string_view.h" #include "api/transport/field_trial_based_config.h" #include "rtc_base/containers/flat_set.h" #include "system_wrappers/include/field_trial.h" diff --git a/api/frame_transformer_factory.cc b/api/frame_transformer_factory.cc index 841ab0f941..a619f0df53 100644 --- a/api/frame_transformer_factory.cc +++ b/api/frame_transformer_factory.cc @@ -10,8 +10,13 @@ #include "api/frame_transformer_factory.h" +#include + +#include "api/frame_transformer_interface.h" +#include "audio/channel_receive_frame_transformer_delegate.h" #include "audio/channel_send_frame_transformer_delegate.h" #include "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h" +#include "rtc_base/checks.h" namespace webrtc { @@ -27,13 +32,16 @@ std::unique_ptr CreateVideoReceiverFrame() { std::unique_ptr CloneAudioFrame( TransformableAudioFrameInterface* original) { - // At the moment, only making sender frames is supported. + if (original->GetDirection() == + TransformableAudioFrameInterface::Direction::kReceiver) + return CloneReceiverAudioFrame(original); return CloneSenderAudioFrame(original); } std::unique_ptr CloneVideoFrame( TransformableVideoFrameInterface* original) { - // At the moment, only making sender frames from receiver frames is supported. + // At the moment, only making sender frames from receiver frames is + // supported. return CloneSenderVideoFrame(original); } diff --git a/api/frame_transformer_factory.h b/api/frame_transformer_factory.h index a73ff62956..fda6117314 100644 --- a/api/frame_transformer_factory.h +++ b/api/frame_transformer_factory.h @@ -12,12 +12,9 @@ #define API_FRAME_TRANSFORMER_FACTORY_H_ #include -#include #include "api/frame_transformer_interface.h" -#include "api/scoped_refptr.h" -#include "api/video/encoded_frame.h" -#include "api/video/video_frame_metadata.h" +#include "rtc_base/system/rtc_export.h" // This file contains EXPERIMENTAL functions to create video frames from // either an old video frame or directly from parameters. diff --git a/api/frame_transformer_interface.cc b/api/frame_transformer_interface.cc new file mode 100644 index 0000000000..88d4d198fb --- /dev/null +++ b/api/frame_transformer_interface.cc @@ -0,0 +1,26 @@ +/* + * Copyright 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/frame_transformer_interface.h" + +namespace webrtc { + +TransformableFrameInterface::TransformableFrameInterface( + TransformableFrameInterface::Passkey) {} + +TransformableVideoFrameInterface::TransformableVideoFrameInterface( + TransformableFrameInterface::Passkey passkey) + : TransformableFrameInterface(passkey) {} + +TransformableAudioFrameInterface::TransformableAudioFrameInterface( + TransformableFrameInterface::Passkey passkey) + : TransformableFrameInterface(passkey) {} + +} // namespace webrtc diff --git a/api/frame_transformer_interface.h b/api/frame_transformer_interface.h index bd9ea2d67a..873bf6dce3 100644 --- a/api/frame_transformer_interface.h +++ b/api/frame_transformer_interface.h @@ -11,37 +11,62 @@ #ifndef API_FRAME_TRANSFORMER_INTERFACE_H_ #define API_FRAME_TRANSFORMER_INTERFACE_H_ +#include #include -#include +#include +#include +#include "api/array_view.h" +#include "api/ref_count.h" #include "api/scoped_refptr.h" -#include "api/video/encoded_frame.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/video_frame_metadata.h" -#include "rtc_base/ref_count.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { // Owns the frame payload data. class TransformableFrameInterface { public: + // Only a known list of internal implementations of transformable frames are + // permitted to allow internal downcasting. This is enforced via the + // internally-constructable Passkey. + // TODO: bugs.webrtc.org/339815768 - Remove this passkey once the + // downcasts are removed. + class Passkey; + RTC_EXPORT explicit TransformableFrameInterface(Passkey); + + TransformableFrameInterface(TransformableFrameInterface&&) = default; + TransformableFrameInterface& operator=(TransformableFrameInterface&&) = + default; + virtual ~TransformableFrameInterface() = default; // Returns the frame payload data. The data is valid until the next non-const // method call. - virtual rtc::ArrayView GetData() const = 0; + virtual ArrayView GetData() const = 0; // Copies `data` into the owned frame payload data. - virtual void SetData(rtc::ArrayView data) = 0; + virtual void SetData(ArrayView data) = 0; virtual uint8_t GetPayloadType() const = 0; virtual uint32_t GetSsrc() const = 0; virtual uint32_t GetTimestamp() const = 0; virtual void SetRTPTimestamp(uint32_t timestamp) = 0; + // TODO(https://bugs.webrtc.org/373365537): Remove this once its usage is + // removed from blink. + [[deprecated( + "Use GetPresentationTimestamp instead")]] virtual std::optional + GetCaptureTimeIdentifier() const { + return std::nullopt; + } + // TODO(https://bugs.webrtc.org/14878): Change this to pure virtual after it // is implemented everywhere. - virtual absl::optional GetCaptureTimeIdentifier() const { - return absl::nullopt; + virtual std::optional GetPresentationTimestamp() const { + return std::nullopt; } enum class Direction { @@ -53,10 +78,31 @@ class TransformableFrameInterface { // sender frames to allow received frames to be directly re-transmitted on // other PeerConnectionss. virtual Direction GetDirection() const { return Direction::kUnknown; } + virtual std::string GetMimeType() const = 0; + + // Timestamp at which the packet has been first seen on the network interface. + // Only defined for received frames. + virtual std::optional ReceiveTime() const = 0; + + // Timestamp at which the frame was captured in the capturer system. + // The timestamp is expressed in the capturer system's clock relative to the + // NTP epoch (January 1st 1970 00:00 UTC) + // Accessible only if the absolute capture timestamp header extension is + // enabled. + virtual std::optional CaptureTime() const = 0; + + // Offset between the sender system's clock and the capturer system's clock. + // Can be used to express the capture time in the local system's clock as + // long as the local system can determine the offset between its local clock + // and the sender system's clock. + // Accessible only if the absolute capture timestamp header extension is + // enabled. + virtual std::optional SenderCaptureTimeOffset() const = 0; }; class TransformableVideoFrameInterface : public TransformableFrameInterface { public: + RTC_EXPORT explicit TransformableVideoFrameInterface(Passkey passkey); virtual ~TransformableVideoFrameInterface() = default; virtual bool IsKeyFrame() const = 0; @@ -68,59 +114,97 @@ class TransformableVideoFrameInterface : public TransformableFrameInterface { // Extends the TransformableFrameInterface to expose audio-specific information. class TransformableAudioFrameInterface : public TransformableFrameInterface { public: + RTC_EXPORT explicit TransformableAudioFrameInterface(Passkey passkey); virtual ~TransformableAudioFrameInterface() = default; - virtual rtc::ArrayView GetContributingSources() const = 0; + virtual ArrayView GetContributingSources() const = 0; - // TODO(crbug.com/1453226): Change this to pure virtual after it - // is implemented everywhere. - virtual const absl::optional SequenceNumber() const { - return absl::nullopt; - } + virtual const std::optional SequenceNumber() const = 0; - // TODO(crbug.com/1456628): Change this to pure virtual after it - // is implemented everywhere. - virtual absl::optional AbsoluteCaptureTimestamp() const { - return absl::nullopt; - } + // TODO(crbug.com/391114797): Delete this function. + virtual std::optional AbsoluteCaptureTimestamp() const = 0; enum class FrameType { kEmptyFrame, kAudioFrameSpeech, kAudioFrameCN }; // TODO(crbug.com/1456628): Change this to pure virtual after it // is implemented everywhere. virtual FrameType Type() const { return FrameType::kEmptyFrame; } + + // Audio level in -dBov. Values range from 0 to 127, representing 0 to -127 + // dBov. 127 represents digital silence. Only present on remote frames if + // the audio level header extension was included. + virtual std::optional AudioLevel() const = 0; }; // Objects implement this interface to be notified with the transformed frame. -class TransformedFrameCallback : public rtc::RefCountInterface { +class TransformedFrameCallback : public RefCountInterface { public: virtual void OnTransformedFrame( std::unique_ptr frame) = 0; + // Request to no longer be called on each frame, instead having frames be + // sent directly to OnTransformedFrame without additional work. + // TODO(crbug.com/1502781): Make pure virtual once all mocks have + // implementations. + virtual void StartShortCircuiting() {} + protected: ~TransformedFrameCallback() override = default; }; // Transforms encoded frames. The transformed frame is sent in a callback using // the TransformedFrameCallback interface (see above). -class FrameTransformerInterface : public rtc::RefCountInterface { +class FrameTransformerInterface : public RefCountInterface { public: // Transforms `frame` using the implementing class' processing logic. virtual void Transform( std::unique_ptr transformable_frame) = 0; virtual void RegisterTransformedFrameCallback( - rtc::scoped_refptr) {} + scoped_refptr) {} virtual void RegisterTransformedFrameSinkCallback( - rtc::scoped_refptr, - uint32_t ssrc) {} + scoped_refptr, + uint32_t /* ssrc */) {} virtual void UnregisterTransformedFrameCallback() {} - virtual void UnregisterTransformedFrameSinkCallback(uint32_t ssrc) {} + virtual void UnregisterTransformedFrameSinkCallback(uint32_t /* ssrc */) {} protected: ~FrameTransformerInterface() override = default; }; +// An interface implemented by classes that can host a transform. +// Currently this is implemented by the RTCRtpSender and RTCRtpReceiver. +class FrameTransformerHost { + public: + virtual ~FrameTransformerHost() {} + virtual void SetFrameTransformer( + scoped_refptr frame_transformer) = 0; + // TODO: bugs.webrtc.org/15929 - To be added: + // virtual AddIncomingMediaType(RtpCodec codec) = 0; + // virtual AddOutgoingMediaType(RtpCodec codec) = 0; +}; + +//------------------------------------------------------------------------------ +// Implementation details follow +//------------------------------------------------------------------------------ +class TransformableFrameInterface::Passkey { + public: + ~Passkey() = default; + + private: + // Explicit list of allowed internal implmentations of + // TransformableFrameInterface. + friend class TransformableOutgoingAudioFrame; + friend class TransformableIncomingAudioFrame; + friend class TransformableVideoSenderFrame; + friend class TransformableVideoReceiverFrame; + + friend class MockTransformableFrame; + friend class MockTransformableAudioFrame; + friend class MockTransformableVideoFrame; + Passkey() = default; +}; + } // namespace webrtc #endif // API_FRAME_TRANSFORMER_INTERFACE_H_ diff --git a/api/function_view.h b/api/function_view.h index 5ae1bd6cfe..f19a1d389d 100644 --- a/api/function_view.h +++ b/api/function_view.h @@ -11,6 +11,7 @@ #ifndef API_FUNCTION_VIEW_H_ #define API_FUNCTION_VIEW_H_ +#include #include #include @@ -28,7 +29,7 @@ // // Example use: // -// void SomeFunction(rtc::FunctionView index_transform); +// void SomeFunction(webrtc::FunctionView index_transform); // ... // SomeFunction([](int i) { return 2 * i + 1; }); // @@ -36,7 +37,7 @@ // copyable, so it's probably cheaper to pass it by value than by const // reference. -namespace rtc { +namespace webrtc { template class FunctionView; // Undefined. @@ -85,7 +86,7 @@ class FunctionView final { typename std::enable_if::type>::value>::type* = nullptr> - FunctionView(F&& f) : call_(nullptr) {} + FunctionView(F&& /* f */) : call_(nullptr) {} // Default constructor. Creates an empty FunctionView. FunctionView() : call_(nullptr) {} @@ -125,6 +126,14 @@ class FunctionView final { RetT (*call_)(VoidUnion, ArgT...); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::FunctionView; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // API_FUNCTION_VIEW_H_ diff --git a/api/function_view_unittest.cc b/api/function_view_unittest.cc index 156ea5c22d..8c5f176d23 100644 --- a/api/function_view_unittest.cc +++ b/api/function_view_unittest.cc @@ -15,11 +15,11 @@ #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { -int CallWith33(rtc::FunctionView fv) { +int CallWith33(FunctionView fv) { return fv ? fv(33) : -1; } @@ -40,7 +40,7 @@ TEST(FunctionViewTest, ImplicitConversion) { TEST(FunctionViewTest, IntIntLambdaWithoutState) { auto f = [](int x) { return x + 1; }; EXPECT_EQ(18, f(17)); - rtc::FunctionView fv(f); + FunctionView fv(f); EXPECT_TRUE(fv); EXPECT_EQ(18, fv(17)); } @@ -48,7 +48,7 @@ TEST(FunctionViewTest, IntIntLambdaWithoutState) { TEST(FunctionViewTest, IntVoidLambdaWithState) { int x = 13; auto f = [x]() mutable { return ++x; }; - rtc::FunctionView fv(f); + FunctionView fv(f); EXPECT_TRUE(fv); EXPECT_EQ(14, f()); EXPECT_EQ(15, fv()); @@ -57,30 +57,30 @@ TEST(FunctionViewTest, IntVoidLambdaWithState) { } TEST(FunctionViewTest, IntIntFunction) { - rtc::FunctionView fv(Add33); + FunctionView fv(Add33); EXPECT_TRUE(fv); EXPECT_EQ(50, fv(17)); } TEST(FunctionViewTest, IntIntFunctionPointer) { - rtc::FunctionView fv(&Add33); + FunctionView fv(&Add33); EXPECT_TRUE(fv); EXPECT_EQ(50, fv(17)); } TEST(FunctionViewTest, Null) { // These two call constructors that statically construct null FunctionViews. - EXPECT_FALSE(rtc::FunctionView()); - EXPECT_FALSE(rtc::FunctionView(nullptr)); + EXPECT_FALSE(FunctionView()); + EXPECT_FALSE(FunctionView(nullptr)); // This calls the constructor for function pointers. - EXPECT_FALSE(rtc::FunctionView(reinterpret_cast(0))); + EXPECT_FALSE(FunctionView(reinterpret_cast(0))); } // Ensure that FunctionView handles move-only arguments and return values. TEST(FunctionViewTest, UniquePtrPassthrough) { auto f = [](std::unique_ptr x) { return x; }; - rtc::FunctionView(std::unique_ptr)> fv(f); + FunctionView(std::unique_ptr)> fv(f); std::unique_ptr x(new int); int* x_addr = x.get(); auto y = fv(std::move(x)); @@ -89,25 +89,25 @@ TEST(FunctionViewTest, UniquePtrPassthrough) { TEST(FunctionViewTest, CopyConstructor) { auto f17 = [] { return 17; }; - rtc::FunctionView fv1(f17); - rtc::FunctionView fv2(fv1); + FunctionView fv1(f17); + FunctionView fv2(fv1); EXPECT_EQ(17, fv1()); EXPECT_EQ(17, fv2()); } TEST(FunctionViewTest, MoveConstructorIsCopy) { auto f17 = [] { return 17; }; - rtc::FunctionView fv1(f17); - rtc::FunctionView fv2(std::move(fv1)); // NOLINT + FunctionView fv1(f17); + FunctionView fv2(std::move(fv1)); // NOLINT EXPECT_EQ(17, fv1()); EXPECT_EQ(17, fv2()); } TEST(FunctionViewTest, CopyAssignment) { auto f17 = [] { return 17; }; - rtc::FunctionView fv1(f17); + FunctionView fv1(f17); auto f23 = [] { return 23; }; - rtc::FunctionView fv2(f23); + FunctionView fv2(f23); EXPECT_EQ(17, fv1()); EXPECT_EQ(23, fv2()); fv2 = fv1; @@ -117,9 +117,9 @@ TEST(FunctionViewTest, CopyAssignment) { TEST(FunctionViewTest, MoveAssignmentIsCopy) { auto f17 = [] { return 17; }; - rtc::FunctionView fv1(f17); + FunctionView fv1(f17); auto f23 = [] { return 23; }; - rtc::FunctionView fv2(f23); + FunctionView fv2(f23); EXPECT_EQ(17, fv1()); EXPECT_EQ(23, fv2()); fv2 = std::move(fv1); // NOLINT @@ -129,9 +129,9 @@ TEST(FunctionViewTest, MoveAssignmentIsCopy) { TEST(FunctionViewTest, Swap) { auto f17 = [] { return 17; }; - rtc::FunctionView fv1(f17); + FunctionView fv1(f17); auto f23 = [] { return 23; }; - rtc::FunctionView fv2(f23); + FunctionView fv2(f23); EXPECT_EQ(17, fv1()); EXPECT_EQ(23, fv2()); using std::swap; @@ -145,8 +145,8 @@ TEST(FunctionViewTest, Swap) { // the old one). TEST(FunctionViewTest, CopyConstructorChaining) { auto f17 = [] { return 17; }; - rtc::FunctionView fv1(f17); - rtc::FunctionView fv2(fv1); + FunctionView fv1(f17); + FunctionView fv2(fv1); EXPECT_EQ(17, fv1()); EXPECT_EQ(17, fv2()); auto f23 = [] { return 23; }; @@ -159,8 +159,8 @@ TEST(FunctionViewTest, CopyConstructorChaining) { // copy (as opposed to making the second FunctionView point to the first one). TEST(FunctionViewTest, CopyAssignmentChaining) { auto f17 = [] { return 17; }; - rtc::FunctionView fv1(f17); - rtc::FunctionView fv2; + FunctionView fv1(f17); + FunctionView fv2; EXPECT_TRUE(fv1); EXPECT_EQ(17, fv1()); EXPECT_FALSE(fv2); @@ -173,4 +173,4 @@ TEST(FunctionViewTest, CopyAssignmentChaining) { EXPECT_EQ(17, fv2()); } -} // namespace rtc +} // namespace webrtc diff --git a/api/ice_transport_factory.cc b/api/ice_transport_factory.cc index e88ac183fa..3cd2deb1a7 100644 --- a/api/ice_transport_factory.cc +++ b/api/ice_transport_factory.cc @@ -13,12 +13,15 @@ #include #include +#include "api/ice_transport_interface.h" #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/p2p_transport_channel.h" #include "p2p/base/port_allocator.h" -#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -30,38 +33,37 @@ namespace { class IceTransportWithTransportChannel : public IceTransportInterface { public: IceTransportWithTransportChannel( - std::unique_ptr internal) + std::unique_ptr internal) : internal_(std::move(internal)) {} ~IceTransportWithTransportChannel() override { RTC_DCHECK_RUN_ON(&thread_checker_); } - cricket::IceTransportInternal* internal() override { + IceTransportInternal* internal() override { RTC_DCHECK_RUN_ON(&thread_checker_); return internal_.get(); } private: const SequenceChecker thread_checker_{}; - const std::unique_ptr internal_ + const std::unique_ptr internal_ RTC_GUARDED_BY(thread_checker_); }; } // namespace -rtc::scoped_refptr CreateIceTransport( - cricket::PortAllocator* port_allocator) { +scoped_refptr CreateIceTransport( + PortAllocator* port_allocator) { IceTransportInit init; init.set_port_allocator(port_allocator); return CreateIceTransport(std::move(init)); } -rtc::scoped_refptr CreateIceTransport( - IceTransportInit init) { - return rtc::make_ref_counted( - cricket::P2PTransportChannel::Create( - "", cricket::ICE_CANDIDATE_COMPONENT_RTP, std::move(init))); +scoped_refptr CreateIceTransport(IceTransportInit init) { + return make_ref_counted( + P2PTransportChannel::Create("", ICE_CANDIDATE_COMPONENT_RTP, + std::move(init))); } } // namespace webrtc diff --git a/api/ice_transport_factory.h b/api/ice_transport_factory.h index 2268ea5e12..cdd9db1a2f 100644 --- a/api/ice_transport_factory.h +++ b/api/ice_transport_factory.h @@ -13,12 +13,9 @@ #include "api/ice_transport_interface.h" #include "api/scoped_refptr.h" +#include "p2p/base/port_allocator.h" #include "rtc_base/system/rtc_export.h" -namespace cricket { -class PortAllocator; -} // namespace cricket - namespace webrtc { // Static factory for an IceTransport object that can be created @@ -28,8 +25,8 @@ namespace webrtc { // The PortAllocator must outlive the created IceTransportInterface object. // TODO(steveanton): Remove in favor of the overload that takes // IceTransportInit. -RTC_EXPORT rtc::scoped_refptr CreateIceTransport( - cricket::PortAllocator* port_allocator); +RTC_EXPORT scoped_refptr CreateIceTransport( + PortAllocator* port_allocator); // Static factory for an IceTransport object that can be created // without using a webrtc::PeerConnection. @@ -39,7 +36,7 @@ RTC_EXPORT rtc::scoped_refptr CreateIceTransport( // IceTransportInterface object. // `init.async_resolver_factory()` and `init.event_log()` are optional, but if // provided must outlive the created IceTransportInterface object. -RTC_EXPORT rtc::scoped_refptr CreateIceTransport( +RTC_EXPORT scoped_refptr CreateIceTransport( IceTransportInit); } // namespace webrtc diff --git a/api/ice_transport_interface.h b/api/ice_transport_interface.h index 431f3330a5..d859ed6aa8 100644 --- a/api/ice_transport_interface.h +++ b/api/ice_transport_interface.h @@ -14,33 +14,29 @@ #include #include "api/async_dns_resolver.h" -#include "api/async_resolver_factory.h" -#include "api/rtc_error.h" +#include "api/ref_count.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/scoped_refptr.h" -#include "rtc_base/ref_count.h" - -namespace cricket { -class IceTransportInternal; -class PortAllocator; -class IceControllerFactoryInterface; -class ActiveIceControllerFactoryInterface; -} // namespace cricket namespace webrtc { + +class ActiveIceControllerFactoryInterface; class FieldTrialsView; +class IceControllerFactoryInterface; +class IceTransportInternal; +class PortAllocator; // An ICE transport, as represented to the outside world. // This object is refcounted, and is therefore alive until the // last holder has released it. -class IceTransportInterface : public rtc::RefCountInterface { +class IceTransportInterface : public RefCountInterface { public: // Accessor for the internal representation of an ICE transport. // The returned object can only be safely used on the signalling thread. // TODO(crbug.com/907849): Add API calls for the functions that have to // be exposed to clients, and stop allowing access to the - // cricket::IceTransportInternal API. - virtual cricket::IceTransportInternal* internal() = 0; + // webrtc::IceTransportInternal API. + virtual IceTransportInternal* internal() = 0; }; struct IceTransportInit final { @@ -51,8 +47,8 @@ struct IceTransportInit final { IceTransportInit& operator=(const IceTransportInit&) = delete; IceTransportInit& operator=(IceTransportInit&&) = default; - cricket::PortAllocator* port_allocator() { return port_allocator_; } - void set_port_allocator(cricket::PortAllocator* port_allocator) { + PortAllocator* port_allocator() { return port_allocator_; } + void set_port_allocator(PortAllocator* port_allocator) { port_allocator_ = port_allocator; } @@ -61,28 +57,17 @@ struct IceTransportInit final { } void set_async_dns_resolver_factory( AsyncDnsResolverFactoryInterface* async_dns_resolver_factory) { - RTC_DCHECK(!async_resolver_factory_); async_dns_resolver_factory_ = async_dns_resolver_factory; } - [[deprecated("Use async_dns_resolver_factory")]] AsyncResolverFactory* - async_resolver_factory() { - return async_resolver_factory_; - } - ABSL_DEPRECATED("bugs.webrtc.org/12598") - void set_async_resolver_factory( - AsyncResolverFactory* async_resolver_factory) { - RTC_DCHECK(!async_dns_resolver_factory_); - async_resolver_factory_ = async_resolver_factory; - } RtcEventLog* event_log() { return event_log_; } void set_event_log(RtcEventLog* event_log) { event_log_ = event_log; } void set_ice_controller_factory( - cricket::IceControllerFactoryInterface* ice_controller_factory) { + IceControllerFactoryInterface* ice_controller_factory) { ice_controller_factory_ = ice_controller_factory; } - cricket::IceControllerFactoryInterface* ice_controller_factory() { + IceControllerFactoryInterface* ice_controller_factory() { return ice_controller_factory_; } @@ -98,12 +83,10 @@ struct IceTransportInit final { // 2. If not, a default active ICE controller is used, wrapping over the // supplied or the default legacy ICE controller. void set_active_ice_controller_factory( - cricket::ActiveIceControllerFactoryInterface* - active_ice_controller_factory) { + ActiveIceControllerFactoryInterface* active_ice_controller_factory) { active_ice_controller_factory_ = active_ice_controller_factory; } - cricket::ActiveIceControllerFactoryInterface* - active_ice_controller_factory() { + ActiveIceControllerFactoryInterface* active_ice_controller_factory() { return active_ice_controller_factory_; } @@ -113,14 +96,11 @@ struct IceTransportInit final { } private: - cricket::PortAllocator* port_allocator_ = nullptr; + PortAllocator* port_allocator_ = nullptr; AsyncDnsResolverFactoryInterface* async_dns_resolver_factory_ = nullptr; - // For backwards compatibility. Only one resolver factory can be set. - AsyncResolverFactory* async_resolver_factory_ = nullptr; RtcEventLog* event_log_ = nullptr; - cricket::IceControllerFactoryInterface* ice_controller_factory_ = nullptr; - cricket::ActiveIceControllerFactoryInterface* active_ice_controller_factory_ = - nullptr; + IceControllerFactoryInterface* ice_controller_factory_ = nullptr; + ActiveIceControllerFactoryInterface* active_ice_controller_factory_ = nullptr; const FieldTrialsView* field_trials_ = nullptr; // TODO(https://crbug.com/webrtc/12657): Redesign to have const members. }; @@ -141,7 +121,7 @@ class IceTransportFactory { // requires the returned transport to be constructed and destroyed on the // network thread and an ICE transport factory that intends to work with a // peer connection should offer transports compatible with these assumptions. - virtual rtc::scoped_refptr CreateIceTransport( + virtual scoped_refptr CreateIceTransport( const std::string& transport_name, int component, IceTransportInit init) = 0; diff --git a/api/jsep.cc b/api/jsep.cc index 5fdc8905c7..65e22442f8 100644 --- a/api/jsep.cc +++ b/api/jsep.cc @@ -10,6 +10,13 @@ #include "api/jsep.h" +#include +#include +#include +#include + +#include "api/candidate.h" + namespace webrtc { std::string IceCandidateInterface::server_url() const { @@ -17,7 +24,7 @@ std::string IceCandidateInterface::server_url() const { } size_t SessionDescriptionInterface::RemoveCandidates( - const std::vector& candidates) { + const std::vector& /* candidates */) { return 0; } @@ -40,7 +47,7 @@ const char* SdpTypeToString(SdpType type) { return ""; } -absl::optional SdpTypeFromString(const std::string& type_str) { +std::optional SdpTypeFromString(const std::string& type_str) { if (type_str == SessionDescriptionInterface::kOffer) { return SdpType::kOffer; } else if (type_str == SessionDescriptionInterface::kPrAnswer) { @@ -50,7 +57,7 @@ absl::optional SdpTypeFromString(const std::string& type_str) { } else if (type_str == SessionDescriptionInterface::kRollback) { return SdpType::kRollback; } else { - return absl::nullopt; + return std::nullopt; } } diff --git a/api/jsep.h b/api/jsep.h index d2aa57c784..c5674e90cd 100644 --- a/api/jsep.h +++ b/api/jsep.h @@ -9,13 +9,13 @@ */ // This file contains declarations of interfaces that wrap SDP-related -// constructs; session descriptions and ICE candidates. The inner "cricket::" +// constructs; session descriptions and ICE candidates. The inner "webrtc::" // objects shouldn't be accessed directly; the intention is that an application // using the PeerConnection API only creates these objects from strings, and // them passes them into the PeerConnection. // // Though in the future, we're planning to provide an SDP parsing API, with a -// structure more friendly than cricket::SessionDescription. +// structure more friendly than webrtc::SessionDescription. #ifndef API_JSEP_H_ #define API_JSEP_H_ @@ -23,21 +23,19 @@ #include #include +#include #include #include -#include "absl/types/optional.h" +#include "api/candidate.h" +#include "api/ref_count.h" #include "api/rtc_error.h" -#include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" -namespace cricket { -class Candidate; -class SessionDescription; -} // namespace cricket - namespace webrtc { +class SessionDescription; + struct SdpParseError { public: // The sdp line that causes the error. @@ -62,7 +60,7 @@ class RTC_EXPORT IceCandidateInterface { // is associated with. Needed when an endpoint doesn't support MIDs. virtual int sdp_mline_index() const = 0; // Only for use internally. - virtual const cricket::Candidate& candidate() const = 0; + virtual const Candidate& candidate() const = 0; // The URL of the ICE server which this candidate was gathered from. // TODO(zhihuang): Remove the default implementation once the subclasses // implement this method. @@ -83,7 +81,7 @@ RTC_EXPORT IceCandidateInterface* CreateIceCandidate(const std::string& sdp_mid, RTC_EXPORT std::unique_ptr CreateIceCandidate( const std::string& sdp_mid, int sdp_mline_index, - const cricket::Candidate& candidate); + const Candidate& candidate); // This class represents a collection of candidates for a specific m= section. // Used in SessionDescriptionInterface. @@ -117,7 +115,8 @@ RTC_EXPORT const char* SdpTypeToString(SdpType type); // Returns the SdpType from its string form. The string form can be one of the // constants defined in SessionDescriptionInterface. Passing in any other string // results in nullopt. -absl::optional SdpTypeFromString(const std::string& type_str); +RTC_EXPORT std::optional SdpTypeFromString( + const std::string& type_str); // Class representation of an SDP session description. // @@ -143,8 +142,8 @@ class RTC_EXPORT SessionDescriptionInterface { } // Only for use internally. - virtual cricket::SessionDescription* description() = 0; - virtual const cricket::SessionDescription* description() const = 0; + virtual SessionDescription* description() = 0; + virtual const SessionDescription* description() const = 0; // Get the session id and session version, which are defined based on // RFC 4566 for the SDP o= line. @@ -173,8 +172,7 @@ class RTC_EXPORT SessionDescriptionInterface { // Removes the candidates from the description, if found. // // Returns the number of candidates removed. - virtual size_t RemoveCandidates( - const std::vector& candidates); + virtual size_t RemoveCandidates(const std::vector& candidates); // Returns the number of m= sections in the session description. virtual size_t number_of_mediasections() const = 0; @@ -186,17 +184,32 @@ class RTC_EXPORT SessionDescriptionInterface { // Serializes the description to SDP. virtual bool ToString(std::string* out) const = 0; + template + friend void AbslStringify(Sink& sink, const SessionDescriptionInterface& p) { + sink.Append("\n--- BEGIN SDP "); + sink.Append(SdpTypeToString(p.GetType())); + sink.Append(" ---\n"); + std::string temp; + if (p.ToString(&temp)) { + sink.Append(temp); + } else { + sink.Append("Error in ToString\n"); + } + sink.Append("--- END SDP ---\n"); + } }; // Creates a SessionDescriptionInterface based on the SDP string and the type. // Returns null if the sdp string can't be parsed or the type is unsupported. // `error` may be null. -// TODO(steveanton): This function is deprecated. Please use the functions below -// which take an SdpType enum instead. Remove this once it is no longer used. -RTC_EXPORT SessionDescriptionInterface* CreateSessionDescription( - const std::string& type, - const std::string& sdp, - SdpParseError* error); +// TODO(https://issues.webrtc.org/360909068): This function is deprecated. +// Please use the functions below which take an SdpType enum instead. Remove +// this once it is no longer used. +[[deprecated("Use version with SdpType argument")]] RTC_EXPORT + SessionDescriptionInterface* + CreateSessionDescription(const std::string& type, + const std::string& sdp, + SdpParseError* error); // Creates a SessionDescriptionInterface based on the SDP string and the type. // Returns null if the SDP string cannot be parsed. @@ -215,11 +228,11 @@ std::unique_ptr CreateSessionDescription( SdpType type, const std::string& session_id, const std::string& session_version, - std::unique_ptr description); + std::unique_ptr description); // CreateOffer and CreateAnswer callback interface. class RTC_EXPORT CreateSessionDescriptionObserver - : public rtc::RefCountInterface { + : public webrtc::RefCountInterface { public: // This callback transfers the ownership of the `desc`. // TODO(deadbeef): Make this take an std::unique_ptr<> to avoid confusion @@ -238,7 +251,8 @@ class RTC_EXPORT CreateSessionDescriptionObserver }; // SetLocalDescription and SetRemoteDescription callback interface. -class RTC_EXPORT SetSessionDescriptionObserver : public rtc::RefCountInterface { +class RTC_EXPORT SetSessionDescriptionObserver + : public webrtc::RefCountInterface { public: virtual void OnSuccess() = 0; // See description in CreateSessionDescriptionObserver for OnFailure. diff --git a/api/jsep_ice_candidate.cc b/api/jsep_ice_candidate.cc index e18e6e28af..9642dbe86b 100644 --- a/api/jsep_ice_candidate.cc +++ b/api/jsep_ice_candidate.cc @@ -10,11 +10,15 @@ #include "api/jsep_ice_candidate.h" +#include #include +#include #include #include "absl/algorithm/container.h" #include "absl/memory/memory.h" +#include "api/candidate.h" +#include "api/jsep.h" namespace webrtc { @@ -26,7 +30,7 @@ int JsepIceCandidate::sdp_mline_index() const { return sdp_mline_index_; } -const cricket::Candidate& JsepIceCandidate::candidate() const { +const Candidate& JsepIceCandidate::candidate() const { return candidate_; } @@ -61,7 +65,7 @@ bool JsepCandidateCollection::HasCandidate( }); } -size_t JsepCandidateCollection::remove(const cricket::Candidate& candidate) { +size_t JsepCandidateCollection::remove(const Candidate& candidate) { auto iter = absl::c_find_if( candidates_, [&](const std::unique_ptr& c) { return candidate.MatchesForRemoval(c->candidate()); diff --git a/api/jsep_ice_candidate.h b/api/jsep_ice_candidate.h index 8f47a102e7..63880818ec 100644 --- a/api/jsep_ice_candidate.h +++ b/api/jsep_ice_candidate.h @@ -32,19 +32,17 @@ class RTC_EXPORT JsepIceCandidate : public IceCandidateInterface { JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index); JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index, - const cricket::Candidate& candidate); + const Candidate& candidate); JsepIceCandidate(const JsepIceCandidate&) = delete; JsepIceCandidate& operator=(const JsepIceCandidate&) = delete; ~JsepIceCandidate() override; // `err` may be null. bool Initialize(const std::string& sdp, SdpParseError* err); - void SetCandidate(const cricket::Candidate& candidate) { - candidate_ = candidate; - } + void SetCandidate(const Candidate& candidate) { candidate_ = candidate; } std::string sdp_mid() const override; int sdp_mline_index() const override; - const cricket::Candidate& candidate() const override; + const Candidate& candidate() const override; std::string server_url() const override; @@ -53,7 +51,7 @@ class RTC_EXPORT JsepIceCandidate : public IceCandidateInterface { private: std::string sdp_mid_; int sdp_mline_index_; - cricket::Candidate candidate_; + Candidate candidate_; }; // Implementation of IceCandidateCollection which stores JsepIceCandidates. @@ -79,7 +77,7 @@ class JsepCandidateCollection : public IceCandidateCollection { // Removes the candidate that has a matching address and protocol. // // Returns the number of candidates that were removed. - size_t remove(const cricket::Candidate& candidate); + size_t remove(const Candidate& candidate); private: std::vector> candidates_; diff --git a/api/jsep_session_description.h b/api/jsep_session_description.h index c51abd57d4..2b25bb1cf9 100644 --- a/api/jsep_session_description.h +++ b/api/jsep_session_description.h @@ -14,6 +14,7 @@ #ifndef API_JSEP_SESSION_DESCRIPTION_H_ #define API_JSEP_SESSION_DESCRIPTION_H_ +#include #include #include #include @@ -23,39 +24,34 @@ #include "api/jsep.h" #include "api/jsep_ice_candidate.h" -namespace cricket { -class SessionDescription; -} - namespace webrtc { +class SessionDescription; + // Implementation of SessionDescriptionInterface. class JsepSessionDescription : public SessionDescriptionInterface { public: explicit JsepSessionDescription(SdpType type); // TODO(steveanton): Remove this once callers have switched to SdpType. explicit JsepSessionDescription(const std::string& type); - JsepSessionDescription( - SdpType type, - std::unique_ptr description, - absl::string_view session_id, - absl::string_view session_version); + JsepSessionDescription(SdpType type, + std::unique_ptr description, + absl::string_view session_id, + absl::string_view session_version); virtual ~JsepSessionDescription(); JsepSessionDescription(const JsepSessionDescription&) = delete; JsepSessionDescription& operator=(const JsepSessionDescription&) = delete; // Takes ownership of `description`. - bool Initialize(std::unique_ptr description, + bool Initialize(std::unique_ptr description, const std::string& session_id, const std::string& session_version); virtual std::unique_ptr Clone() const; - virtual cricket::SessionDescription* description() { - return description_.get(); - } - virtual const cricket::SessionDescription* description() const { + virtual SessionDescription* description() { return description_.get(); } + virtual const SessionDescription* description() const { return description_.get(); } virtual std::string session_id() const { return session_id_; } @@ -64,15 +60,14 @@ class JsepSessionDescription : public SessionDescriptionInterface { virtual std::string type() const { return SdpTypeToString(type_); } // Allows changing the type. Used for testing. virtual bool AddCandidate(const IceCandidateInterface* candidate); - virtual size_t RemoveCandidates( - const std::vector& candidates); + virtual size_t RemoveCandidates(const std::vector& candidates); virtual size_t number_of_mediasections() const; virtual const IceCandidateCollection* candidates( size_t mediasection_index) const; virtual bool ToString(std::string* out) const; private: - std::unique_ptr description_; + std::unique_ptr description_; std::string session_id_; std::string session_version_; SdpType type_; @@ -80,7 +75,7 @@ class JsepSessionDescription : public SessionDescriptionInterface { bool GetMediasectionIndex(const IceCandidateInterface* candidate, size_t* index); - int GetMediasectionIndex(const cricket::Candidate& candidate); + int GetMediasectionIndex(const Candidate& candidate); }; } // namespace webrtc diff --git a/api/jsep_unittest.cc b/api/jsep_unittest.cc new file mode 100644 index 0000000000..9e5777c1b7 --- /dev/null +++ b/api/jsep_unittest.cc @@ -0,0 +1,67 @@ +/* + * Copyright 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/jsep.h" + +#include +#include + +#include "absl/strings/str_cat.h" +#include "rtc_base/logging.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { + +using ::testing::HasSubstr; + +TEST(JsepTest, AbslStringifySdp) { + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE 0 1\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=audio 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:1\r\n" + "a=rtpmap:111 H264/90000\r\n" + "a=fmtp:111 " + "level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42e01f\r\n"; + + std::unique_ptr some_sdp = + CreateSessionDescription(SdpType::kOffer, sdp); + // Verify that sending the SDP to the log compiles. + RTC_LOG(LS_VERBOSE) << "The SDP is " << *some_sdp; + // Since create/stringify mangles order of fields, we only test + // some substrings. + EXPECT_THAT(absl::StrCat(*some_sdp), HasSubstr("a=rtpmap:111 opus/48000")); + EXPECT_THAT( + absl::StrCat(*some_sdp), + HasSubstr( + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n")); +} + +} // namespace webrtc diff --git a/api/legacy_stats_types.cc b/api/legacy_stats_types.cc index 01eee2ce98..64e8d42409 100644 --- a/api/legacy_stats_types.cc +++ b/api/legacy_stats_types.cc @@ -12,12 +12,15 @@ #include +#include +#include #include #include "absl/algorithm/container.h" +#include "absl/strings/str_cat.h" #include "api/make_ref_counted.h" +#include "api/sequence_checker.h" #include "rtc_base/checks.h" -#include "rtc_base/string_encode.h" // TODO(tommi): Could we have a static map of value name -> expected type // and use this to RTC_DCHECK on correct usage (somewhat strongly typed values)? @@ -101,7 +104,7 @@ class TypedIntId : public StatsReport::IdBase { std::string ToString() const override { return std::string(InternalTypeToString(type_)) + kSeparator + - rtc::ToString(id_); + absl::StrCat(id_); } protected: @@ -167,7 +170,7 @@ class ComponentId : public StatsReport::IdBase { std::string ret(prefix); ret += content_name_; ret += '-'; - ret += rtc::ToString(component_); + ret += absl::StrCat(component_); return ret; } @@ -192,7 +195,7 @@ class CandidatePairId : public ComponentId { std::string ToString() const override { std::string ret(ComponentId::ToString("Conn-")); ret += '-'; - ret += rtc::ToString(index_); + ret += absl::StrCat(index_); return ret; } @@ -669,11 +672,11 @@ const char* StatsReport::Value::display_name() const { std::string StatsReport::Value::ToString() const { switch (type_) { case kInt: - return rtc::ToString(value_.int_); + return absl::StrCat(value_.int_); case kInt64: - return rtc::ToString(value_.int64_); + return absl::StrCat(value_.int64_); case kFloat: - return rtc::ToString(value_.float_); + return absl::StrCat(value_.float_); case kStaticString: return std::string(value_.static_string_); case kString: @@ -695,17 +698,17 @@ StatsReport::~StatsReport() = default; // static StatsReport::Id StatsReport::NewBandwidthEstimationId() { - return rtc::make_ref_counted(); + return make_ref_counted(); } // static StatsReport::Id StatsReport::NewTypedId(StatsType type, const std::string& id) { - return rtc::make_ref_counted(type, id); + return make_ref_counted(type, id); } // static StatsReport::Id StatsReport::NewTypedIntId(StatsType type, int id) { - return rtc::make_ref_counted(type, id); + return make_ref_counted(type, id); } // static @@ -713,25 +716,25 @@ StatsReport::Id StatsReport::NewIdWithDirection( StatsType type, const std::string& id, StatsReport::Direction direction) { - return rtc::make_ref_counted(type, id, direction); + return make_ref_counted(type, id, direction); } // static StatsReport::Id StatsReport::NewCandidateId(bool local, const std::string& id) { - return rtc::make_ref_counted(local, id); + return make_ref_counted(local, id); } // static StatsReport::Id StatsReport::NewComponentId(const std::string& content_name, int component) { - return rtc::make_ref_counted(content_name, component); + return make_ref_counted(content_name, component); } // static StatsReport::Id StatsReport::NewCandidatePairId(const std::string& content_name, int component, int index) { - return rtc::make_ref_counted(content_name, component, index); + return make_ref_counted(content_name, component, index); } const char* StatsReport::TypeToString() const { diff --git a/api/legacy_stats_types.h b/api/legacy_stats_types.h index e49cb6d6dd..8d94541ed8 100644 --- a/api/legacy_stats_types.h +++ b/api/legacy_stats_types.h @@ -14,16 +14,20 @@ #ifndef API_LEGACY_STATS_TYPES_H_ #define API_LEGACY_STATS_TYPES_H_ -#include +#include +#include + #include #include #include #include +#include "api/ref_count.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" -#include "rtc_base/ref_count.h" +#include "rtc_base/checks.h" #include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -240,7 +244,7 @@ class RTC_EXPORT StatsReport { kStatsValueNameLocalCandidateRelayProtocol, }; - class RTC_EXPORT IdBase : public rtc::RefCountInterface { + class RTC_EXPORT IdBase : public webrtc::RefCountInterface { public: ~IdBase() override; StatsType type() const; @@ -248,7 +252,7 @@ class RTC_EXPORT StatsReport { // Users of IdBase will be using the Id typedef, which is compatible with // this Equals() function. It simply calls the protected (and overridden) // Equals() method. - bool Equals(const rtc::scoped_refptr& other) const { + bool Equals(const scoped_refptr& other) const { return Equals(*other.get()); } @@ -264,7 +268,7 @@ class RTC_EXPORT StatsReport { static const char kSeparator = '_'; }; - typedef rtc::scoped_refptr Id; + typedef scoped_refptr Id; struct RTC_EXPORT Value { enum Type { @@ -368,7 +372,7 @@ class RTC_EXPORT StatsReport { } value_; }; - typedef rtc::scoped_refptr ValuePtr; + typedef scoped_refptr ValuePtr; typedef std::map Values; // Ownership of `id` is passed to `this`. diff --git a/api/make_ref_counted.h b/api/make_ref_counted.h index e958da9774..b7781e9bed 100644 --- a/api/make_ref_counted.h +++ b/api/make_ref_counted.h @@ -13,9 +13,12 @@ #include #include +#include "absl/base/nullability.h" +#include "api/ref_count.h" +#include "api/scoped_refptr.h" #include "rtc_base/ref_counted_object.h" -namespace rtc { +namespace webrtc { namespace webrtc_make_ref_counted_internal { // Determines if the given class has AddRef and Release methods. @@ -53,8 +56,8 @@ class HasAddRefAndRelease { // auto p = scoped_refptr(new RefCountedObject("bar", 123)); // // If the class does not inherit from RefCountInterface, but does have -// AddRef/Release methods (so a T* is convertible to rtc::scoped_refptr), this -// is equivalent to just +// AddRef/Release methods (so a T* is convertible to webrtc::scoped_refptr), +// this is equivalent to just // // auto p = scoped_refptr(new Foo("bar", 123)); // @@ -83,7 +86,7 @@ template < typename std::enable_if && std::is_abstract_v, T>::type* = nullptr> -scoped_refptr make_ref_counted(Args&&... args) { +absl_nonnull scoped_refptr make_ref_counted(Args&&... args) { return scoped_refptr(new RefCountedObject(std::forward(args)...)); } @@ -96,7 +99,7 @@ template < !std::is_convertible_v && webrtc_make_ref_counted_internal::HasAddRefAndRelease::value, T>::type* = nullptr> -scoped_refptr make_ref_counted(Args&&... args) { +absl_nonnull scoped_refptr make_ref_counted(Args&&... args) { return scoped_refptr(new T(std::forward(args)...)); } @@ -110,11 +113,20 @@ template < !webrtc_make_ref_counted_internal::HasAddRefAndRelease::value, T>::type* = nullptr> -scoped_refptr> make_ref_counted(Args&&... args) { +absl_nonnull scoped_refptr> make_ref_counted( + Args&&... args) { return scoped_refptr>( new FinalRefCountedObject(std::forward(args)...)); } +} // namespace webrtc + +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +// Backwards compatibe alias. +// TODO: bugs.webrtc.org/42225969 - deprecate and remove. +using ::webrtc::make_ref_counted; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // API_MAKE_REF_COUNTED_H_ diff --git a/api/media_stream_interface.cc b/api/media_stream_interface.cc index 6b0a6a9297..a092855bb5 100644 --- a/api/media_stream_interface.cc +++ b/api/media_stream_interface.cc @@ -10,29 +10,29 @@ #include "api/media_stream_interface.h" +#include "api/audio_options.h" #include "api/media_types.h" +#include "api/scoped_refptr.h" namespace webrtc { -const char* const MediaStreamTrackInterface::kVideoKind = - cricket::kMediaTypeVideo; -const char* const MediaStreamTrackInterface::kAudioKind = - cricket::kMediaTypeAudio; +const char* const MediaStreamTrackInterface::kVideoKind = kMediaTypeVideo; +const char* const MediaStreamTrackInterface::kAudioKind = kMediaTypeAudio; VideoTrackInterface::ContentHint VideoTrackInterface::content_hint() const { return ContentHint::kNone; } -bool AudioTrackInterface::GetSignalLevel(int* level) { +bool AudioTrackInterface::GetSignalLevel(int* /* level */) { return false; } -rtc::scoped_refptr +scoped_refptr AudioTrackInterface::GetAudioProcessor() { return nullptr; } -const cricket::AudioOptions AudioSourceInterface::options() const { +const AudioOptions AudioSourceInterface::options() const { return {}; } diff --git a/api/media_stream_interface.h b/api/media_stream_interface.h index 9d336739e4..c3b60f7034 100644 --- a/api/media_stream_interface.h +++ b/api/media_stream_interface.h @@ -17,20 +17,22 @@ #define API_MEDIA_STREAM_INTERFACE_H_ #include +#include +#include #include #include -#include "absl/types/optional.h" +#include "api/audio/audio_processing_statistics.h" #include "api/audio_options.h" +#include "api/ref_count.h" #include "api/scoped_refptr.h" #include "api/video/recordable_encoded_frame.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "api/video_track_source_constraints.h" -#include "modules/audio_processing/include/audio_processing_statistics.h" -#include "rtc_base/ref_count.h" +#include "rtc_base/checks.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -54,7 +56,7 @@ class NotifierInterface { // Base class for sources. A MediaStreamTrack has an underlying source that // provides media. A source can be shared by multiple tracks. -class RTC_EXPORT MediaSourceInterface : public rtc::RefCountInterface, +class RTC_EXPORT MediaSourceInterface : public webrtc::RefCountInterface, public NotifierInterface { public: enum SourceState { kInitializing, kLive, kEnded, kMuted }; @@ -69,7 +71,7 @@ class RTC_EXPORT MediaSourceInterface : public rtc::RefCountInterface, // C++ version of MediaStreamTrack. // See: https://www.w3.org/TR/mediacapture-streams/#mediastreamtrack -class RTC_EXPORT MediaStreamTrackInterface : public rtc::RefCountInterface, +class RTC_EXPORT MediaStreamTrackInterface : public webrtc::RefCountInterface, public NotifierInterface { public: enum TrackState { @@ -104,12 +106,12 @@ class RTC_EXPORT MediaStreamTrackInterface : public rtc::RefCountInterface, // VideoTrackSourceInterface is a reference counted source used for // VideoTracks. The same source can be used by multiple VideoTracks. // VideoTrackSourceInterface is designed to be invoked on the signaling thread -// except for rtc::VideoSourceInterface methods that will be invoked -// on the worker thread via a VideoTrack. A custom implementation of a source -// can inherit AdaptedVideoTrackSource instead of directly implementing this -// interface. +// except for webrtc::VideoSourceInterface methods that will be +// invoked on the worker thread via a VideoTrack. A custom implementation of a +// source can inherit AdaptedVideoTrackSource instead of directly implementing +// this interface. class VideoTrackSourceInterface : public MediaSourceInterface, - public rtc::VideoSourceInterface { + public VideoSourceInterface { public: struct Stats { // Original size of captured frame, before video adaptation. @@ -129,7 +131,7 @@ class VideoTrackSourceInterface : public MediaSourceInterface, // depending on video codec. // TODO(perkj): Remove this once denoising is done by the source, and not by // the encoder. - virtual absl::optional needs_denoising() const = 0; + virtual std::optional needs_denoising() const = 0; // Returns false if no stats are available, e.g, for a remote source, or a // source which has not seen its first frame yet. @@ -148,32 +150,31 @@ class VideoTrackSourceInterface : public MediaSourceInterface, // a key frame to be generated from the source. The sink will be // invoked from a decoder queue. virtual void AddEncodedSink( - rtc::VideoSinkInterface* sink) = 0; + VideoSinkInterface* sink) = 0; // Removes an encoded video sink from the source. virtual void RemoveEncodedSink( - rtc::VideoSinkInterface* sink) = 0; + VideoSinkInterface* sink) = 0; // Notify about constraints set on the source. The information eventually gets // routed to attached sinks via VideoSinkInterface<>::OnConstraintsChanged. // The call is expected to happen on the network thread. // TODO(crbug/1255737): make pure virtual once downstream project adapts. virtual void ProcessConstraints( - const webrtc::VideoTrackSourceConstraints& constraints) {} + const webrtc::VideoTrackSourceConstraints& /* constraints */) {} protected: ~VideoTrackSourceInterface() override = default; }; // VideoTrackInterface is designed to be invoked on the signaling thread except -// for rtc::VideoSourceInterface methods that must be invoked +// for webrtc::VideoSourceInterface methods that must be invoked // on the worker thread. // PeerConnectionFactory::CreateVideoTrack can be used for creating a VideoTrack // that ensures thread safety and that all methods are called on the right // thread. -class RTC_EXPORT VideoTrackInterface - : public MediaStreamTrackInterface, - public rtc::VideoSourceInterface { +class RTC_EXPORT VideoTrackInterface : public MediaStreamTrackInterface, + public VideoSourceInterface { public: // Video track content hint, used to override the source is_screencast // property. @@ -182,14 +183,14 @@ class RTC_EXPORT VideoTrackInterface // Register a video sink for this track. Used to connect the track to the // underlying video engine. - void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override {} - void RemoveSink(rtc::VideoSinkInterface* sink) override {} + void AddOrUpdateSink(VideoSinkInterface* /* sink */, + const VideoSinkWants& /* wants */) override {} + void RemoveSink(VideoSinkInterface* /* sink */) override {} virtual VideoTrackSourceInterface* GetSource() const = 0; virtual ContentHint content_hint() const; - virtual void set_content_hint(ContentHint hint) {} + virtual void set_content_hint(ContentHint /* hint */) {} protected: ~VideoTrackInterface() override = default; @@ -198,24 +199,25 @@ class RTC_EXPORT VideoTrackInterface // Interface for receiving audio data from a AudioTrack. class AudioTrackSinkInterface { public: - virtual void OnData(const void* audio_data, - int bits_per_sample, - int sample_rate, - size_t number_of_channels, - size_t number_of_frames) { + virtual void OnData(const void* /* audio_data */, + int /* bits_per_sample */, + int /* sample_rate */, + size_t /* number_of_channels */, + size_t /* number_of_frames */) { RTC_DCHECK_NOTREACHED() << "This method must be overridden, or not used."; } // In this method, `absolute_capture_timestamp_ms`, when available, is // supposed to deliver the timestamp when this audio frame was originally // captured. This timestamp MUST be based on the same clock as - // rtc::TimeMillis(). - virtual void OnData(const void* audio_data, - int bits_per_sample, - int sample_rate, - size_t number_of_channels, - size_t number_of_frames, - absl::optional absolute_capture_timestamp_ms) { + // webrtc::TimeMillis(). + virtual void OnData( + const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + std::optional /* absolute_capture_timestamp_ms */) { // TODO(bugs.webrtc.org/10739): Deprecate the old OnData and make this one // pure virtual. return OnData(audio_data, bits_per_sample, sample_rate, number_of_channels, @@ -249,25 +251,25 @@ class RTC_EXPORT AudioSourceInterface : public MediaSourceInterface { // Sets the volume of the source. `volume` is in the range of [0, 10]. // TODO(tommi): This method should be on the track and ideally volume should // be applied in the track in a way that does not affect clones of the track. - virtual void SetVolume(double volume) {} + virtual void SetVolume(double /* volume */) {} // Registers/unregisters observers to the audio source. - virtual void RegisterAudioObserver(AudioObserver* observer) {} - virtual void UnregisterAudioObserver(AudioObserver* observer) {} + virtual void RegisterAudioObserver(AudioObserver* /* observer */) {} + virtual void UnregisterAudioObserver(AudioObserver* /* observer */) {} // TODO(tommi): Make pure virtual. - virtual void AddSink(AudioTrackSinkInterface* sink) {} - virtual void RemoveSink(AudioTrackSinkInterface* sink) {} + virtual void AddSink(AudioTrackSinkInterface* /* sink */) {} + virtual void RemoveSink(AudioTrackSinkInterface* /* sink */) {} // Returns options for the AudioSource. // (for some of the settings this approach is broken, e.g. setting // audio network adaptation on the source is the wrong layer of abstraction). - virtual const cricket::AudioOptions options() const; + virtual const AudioOptions options() const; }; // Interface of the audio processor used by the audio track to collect // statistics. -class AudioProcessorInterface : public rtc::RefCountInterface { +class AudioProcessorInterface : public webrtc::RefCountInterface { public: struct AudioProcessorStatistics { bool typing_noise_detected = false; @@ -304,14 +306,14 @@ class RTC_EXPORT AudioTrackInterface : public MediaStreamTrackInterface { // Get the audio processor used by the audio track. Return null if the track // does not have any processor. // TODO(deadbeef): Make the interface pure virtual. - virtual rtc::scoped_refptr GetAudioProcessor(); + virtual scoped_refptr GetAudioProcessor(); protected: ~AudioTrackInterface() override = default; }; -typedef std::vector > AudioTrackVector; -typedef std::vector > VideoTrackVector; +typedef std::vector > AudioTrackVector; +typedef std::vector > VideoTrackVector; // C++ version of https://www.w3.org/TR/mediacapture-streams/#mediastream. // @@ -321,51 +323,34 @@ typedef std::vector > VideoTrackVector; // must be pushed down. // // Thus, this interface acts as simply a container for tracks. -class MediaStreamInterface : public rtc::RefCountInterface, +class MediaStreamInterface : public webrtc::RefCountInterface, public NotifierInterface { public: virtual std::string id() const = 0; virtual AudioTrackVector GetAudioTracks() = 0; virtual VideoTrackVector GetVideoTracks() = 0; - virtual rtc::scoped_refptr FindAudioTrack( + virtual scoped_refptr FindAudioTrack( const std::string& track_id) = 0; - virtual rtc::scoped_refptr FindVideoTrack( + virtual scoped_refptr FindVideoTrack( const std::string& track_id) = 0; // Takes ownership of added tracks. // Note: Default implementations are for avoiding link time errors in // implementations that mock this API. // TODO(bugs.webrtc.org/13980): Remove default implementations. - virtual bool AddTrack(rtc::scoped_refptr track) { + virtual bool AddTrack(scoped_refptr /* track */) { RTC_CHECK_NOTREACHED(); } - virtual bool AddTrack(rtc::scoped_refptr track) { + virtual bool AddTrack(scoped_refptr /* track */) { RTC_CHECK_NOTREACHED(); } - virtual bool RemoveTrack(rtc::scoped_refptr track) { + virtual bool RemoveTrack(scoped_refptr /* track */) { RTC_CHECK_NOTREACHED(); } - virtual bool RemoveTrack(rtc::scoped_refptr track) { + virtual bool RemoveTrack(scoped_refptr /* track */) { RTC_CHECK_NOTREACHED(); } - // Deprecated: Should use scoped_refptr versions rather than pointers. - [[deprecated("Pass a scoped_refptr")]] virtual bool AddTrack( - AudioTrackInterface* track) { - return AddTrack(rtc::scoped_refptr(track)); - } - [[deprecated("Pass a scoped_refptr")]] virtual bool AddTrack( - VideoTrackInterface* track) { - return AddTrack(rtc::scoped_refptr(track)); - } - [[deprecated("Pass a scoped_refptr")]] virtual bool RemoveTrack( - AudioTrackInterface* track) { - return RemoveTrack(rtc::scoped_refptr(track)); - } - [[deprecated("Pass a scoped_refptr")]] virtual bool RemoveTrack( - VideoTrackInterface* track) { - return RemoveTrack(rtc::scoped_refptr(track)); - } protected: ~MediaStreamInterface() override = default; diff --git a/api/media_types.cc b/api/media_types.cc index 5c7d55b876..9cd45324b4 100644 --- a/api/media_types.cc +++ b/api/media_types.cc @@ -10,9 +10,11 @@ #include "api/media_types.h" +#include + #include "rtc_base/checks.h" -namespace cricket { +namespace webrtc { const char kMediaTypeVideo[] = "video"; const char kMediaTypeAudio[] = "audio"; @@ -20,13 +22,13 @@ const char kMediaTypeData[] = "data"; std::string MediaTypeToString(MediaType type) { switch (type) { - case MEDIA_TYPE_AUDIO: + case MediaType::AUDIO: return kMediaTypeAudio; - case MEDIA_TYPE_VIDEO: + case MediaType::VIDEO: return kMediaTypeVideo; - case MEDIA_TYPE_DATA: + case MediaType::DATA: return kMediaTypeData; - case MEDIA_TYPE_UNSUPPORTED: + default: // Unsupported media stores the m= differently. RTC_DCHECK_NOTREACHED(); return ""; @@ -34,4 +36,4 @@ std::string MediaTypeToString(MediaType type) { RTC_CHECK_NOTREACHED(); } -} // namespace cricket +} // namespace webrtc diff --git a/api/media_types.h b/api/media_types.h index b2ff08c0c3..c2d4149161 100644 --- a/api/media_types.h +++ b/api/media_types.h @@ -13,32 +13,60 @@ #include +#include "absl/base/macros.h" #include "rtc_base/system/rtc_export.h" -// The cricket and webrtc have separate definitions for what a media type is. -// They're not compatible. Watch out for this. - -namespace cricket { +namespace webrtc { -enum MediaType { - MEDIA_TYPE_AUDIO, - MEDIA_TYPE_VIDEO, - MEDIA_TYPE_DATA, - MEDIA_TYPE_UNSUPPORTED +enum class MediaType { + AUDIO, + VIDEO, + DATA, + UNSUPPORTED, + ANY, + // Backwards compatibility values for webrtc::MediaType users + // TODO: https://issues.webrtc.org/42222911 - remove + MEDIA_TYPE_AUDIO [[deprecated("Use AUDIO")]] = AUDIO, + MEDIA_TYPE_VIDEO [[deprecated("Use VIDEO")]] = VIDEO, + MEDIA_TYPE_DATA [[deprecated("Use DATA")]] = DATA, + MEDIA_TYPE_UNSUPPORTED [[deprecated("Use UNSUPPORTED")]] = UNSUPPORTED, }; +RTC_EXPORT std::string MediaTypeToString(MediaType type); + +template +void AbslStringify(Sink& sink, MediaType type) { + sink.Append(MediaTypeToString(type)); +} + extern const char kMediaTypeAudio[]; extern const char kMediaTypeVideo[]; extern const char kMediaTypeData[]; -RTC_EXPORT std::string MediaTypeToString(MediaType type); +} // namespace webrtc -} // namespace cricket +// The cricket and webrtc have separate definitions for what a media type is. +// They used to be incompatible, but now cricket is defined in terms of the +// webrtc definition. -namespace webrtc { +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { -enum class MediaType { ANY, AUDIO, VIDEO, DATA }; +using MediaType ABSL_DEPRECATE_AND_INLINE() = webrtc::MediaType; +using webrtc::kMediaTypeAudio; +using webrtc::kMediaTypeData; +using webrtc::kMediaTypeVideo; +using webrtc::MediaTypeToString; -} // namespace webrtc +// Backwards compatibility values for cricket::MediaType users +// TODO: https://issues.webrtc.org/42222911 - remove +[[deprecated]] constexpr MediaType MEDIA_TYPE_AUDIO = webrtc::MediaType::AUDIO; +[[deprecated]] constexpr MediaType MEDIA_TYPE_VIDEO = webrtc::MediaType::VIDEO; +[[deprecated]] constexpr MediaType MEDIA_TYPE_DATA = webrtc::MediaType::DATA; +[[deprecated]] constexpr MediaType MEDIA_TYPE_UNSUPPORTED = + webrtc::MediaType::UNSUPPORTED; + +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // API_MEDIA_TYPES_H_ diff --git a/api/metronome/BUILD.gn b/api/metronome/BUILD.gn index 3d3d876df0..7e77b70a82 100644 --- a/api/metronome/BUILD.gn +++ b/api/metronome/BUILD.gn @@ -13,7 +13,7 @@ rtc_source_set("metronome") { sources = [ "metronome.h" ] deps = [ "../../rtc_base/system:rtc_export", - "../task_queue", "../units:time_delta", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] } diff --git a/api/metronome/metronome.h b/api/metronome/metronome.h index a312b1c862..fb079762c2 100644 --- a/api/metronome/metronome.h +++ b/api/metronome/metronome.h @@ -11,7 +11,7 @@ #ifndef API_METRONOME_METRONOME_H_ #define API_METRONOME_METRONOME_H_ -#include "api/task_queue/task_queue_base.h" +#include "absl/functional/any_invocable.h" #include "api/units/time_delta.h" #include "rtc_base/system/rtc_export.h" @@ -36,7 +36,8 @@ class RTC_EXPORT Metronome { // executed on the same sequence as they were requested on. There are no // features for cancellation. When that's needed, use e.g. ScopedTaskSafety // from the client. - virtual void RequestCallOnNextTick(absl::AnyInvocable callback) {} + virtual void RequestCallOnNextTick( + absl::AnyInvocable /* callback */) {} // Returns the current tick period of the metronome. virtual TimeDelta TickPeriod() const = 0; diff --git a/api/metronome/test/BUILD.gn b/api/metronome/test/BUILD.gn index f415d98a0b..1b87fe8625 100644 --- a/api/metronome/test/BUILD.gn +++ b/api/metronome/test/BUILD.gn @@ -16,15 +16,8 @@ rtc_library("fake_metronome") { ] deps = [ "..:metronome", - "../..:priority", - "../..:sequence_checker", - "../../../rtc_base:macromagic", - "../../../rtc_base:rtc_event", - "../../../rtc_base:rtc_task_queue", - "../../../rtc_base/synchronization:mutex", - "../../../rtc_base/task_utils:repeating_task", - "../../../test:test_support", "../../task_queue", "../../units:time_delta", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] } diff --git a/api/metronome/test/fake_metronome.cc b/api/metronome/test/fake_metronome.cc index 025f7ce5a6..563bd7ef98 100644 --- a/api/metronome/test/fake_metronome.cc +++ b/api/metronome/test/fake_metronome.cc @@ -10,16 +10,13 @@ #include "api/metronome/test/fake_metronome.h" +#include #include #include -#include "api/priority.h" -#include "api/sequence_checker.h" +#include "absl/functional/any_invocable.h" #include "api/task_queue/task_queue_base.h" -#include "api/task_queue/task_queue_factory.h" #include "api/units/time_delta.h" -#include "rtc_base/event.h" -#include "rtc_base/task_utils/repeating_task.h" namespace webrtc::test { @@ -49,6 +46,10 @@ void ForcedTickMetronome::Tick() { FakeMetronome::FakeMetronome(TimeDelta tick_period) : tick_period_(tick_period) {} +void FakeMetronome::SetTickPeriod(TimeDelta tick_period) { + tick_period_ = tick_period; +} + void FakeMetronome::RequestCallOnNextTick( absl::AnyInvocable callback) { TaskQueueBase* current = TaskQueueBase::Current(); diff --git a/api/metronome/test/fake_metronome.h b/api/metronome/test/fake_metronome.h index 73c938e9cd..9702062cf6 100644 --- a/api/metronome/test/fake_metronome.h +++ b/api/metronome/test/fake_metronome.h @@ -11,18 +11,12 @@ #ifndef API_METRONOME_TEST_FAKE_METRONOME_H_ #define API_METRONOME_TEST_FAKE_METRONOME_H_ -#include -#include +#include #include +#include "absl/functional/any_invocable.h" #include "api/metronome/metronome.h" -#include "api/task_queue/task_queue_base.h" -#include "api/task_queue/task_queue_factory.h" #include "api/units/time_delta.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/repeating_task.h" -#include "rtc_base/thread_annotations.h" namespace webrtc::test { @@ -48,19 +42,18 @@ class ForcedTickMetronome : public Metronome { // FakeMetronome is a metronome that ticks based on a repeating task at the // `tick_period` provided in the constructor. It is designed for use with // simulated task queues for unit tests. -// -// `Stop()` must be called before destruction, as it cancels the metronome tick -// on the proper task queue. class FakeMetronome : public Metronome { public: explicit FakeMetronome(TimeDelta tick_period); + void SetTickPeriod(TimeDelta tick_period); + // Metronome implementation. void RequestCallOnNextTick(absl::AnyInvocable callback) override; TimeDelta TickPeriod() const override; private: - const TimeDelta tick_period_; + TimeDelta tick_period_; std::vector> callbacks_; }; diff --git a/api/neteq/BUILD.gn b/api/neteq/BUILD.gn index 504fa059bb..9e79a877fd 100644 --- a/api/neteq/BUILD.gn +++ b/api/neteq/BUILD.gn @@ -17,14 +17,31 @@ rtc_source_set("neteq_api") { ] deps = [ + "..:array_view", "..:rtp_headers", "..:rtp_packet_info", "..:scoped_refptr", "../../rtc_base:stringutils", - "../../system_wrappers:system_wrappers", "../audio_codecs:audio_codecs_api", + "../environment", + "../units:timestamp", + ] +} + +rtc_source_set("default_neteq_factory") { + visibility = [ "*" ] + sources = [ + "default_neteq_factory.cc", + "default_neteq_factory.h", + ] + deps = [ + ":default_neteq_controller_factory", + ":neteq_api", + "..:scoped_refptr", + "../../modules/audio_coding:neteq", + "../audio_codecs:audio_codecs_api", + "../environment", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("custom_neteq_factory") { @@ -39,8 +56,8 @@ rtc_source_set("custom_neteq_factory") { ":neteq_controller_api", "..:scoped_refptr", "../../modules/audio_coding:neteq", - "../../system_wrappers:system_wrappers", "../audio_codecs:audio_codecs_api", + "../environment", ] } @@ -54,9 +71,8 @@ rtc_source_set("neteq_controller_api") { deps = [ ":neteq_api", ":tick_timer", - "../../system_wrappers:system_wrappers", + "../environment", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("default_neteq_controller_factory") { @@ -69,6 +85,7 @@ rtc_source_set("default_neteq_controller_factory") { deps = [ ":neteq_controller_api", "../../modules/audio_coding:neteq", + "../environment", ] } @@ -78,9 +95,7 @@ rtc_source_set("tick_timer") { "tick_timer.cc", "tick_timer.h", ] - deps = [ - "../../rtc_base:checks", - ] + deps = [ "../../rtc_base:checks" ] } rtc_source_set("tick_timer_unittest") { diff --git a/api/neteq/custom_neteq_factory.cc b/api/neteq/custom_neteq_factory.cc index b2df5df9ff..45dd32d806 100644 --- a/api/neteq/custom_neteq_factory.cc +++ b/api/neteq/custom_neteq_factory.cc @@ -10,8 +10,14 @@ #include "api/neteq/custom_neteq_factory.h" +#include #include +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/environment/environment.h" +#include "api/neteq/neteq.h" +#include "api/neteq/neteq_controller_factory.h" +#include "api/scoped_refptr.h" #include "modules/audio_coding/neteq/neteq_impl.h" namespace webrtc { @@ -22,12 +28,12 @@ CustomNetEqFactory::CustomNetEqFactory( CustomNetEqFactory::~CustomNetEqFactory() = default; -std::unique_ptr CustomNetEqFactory::CreateNetEq( +std::unique_ptr CustomNetEqFactory::Create( + const Environment& env, const NetEq::Config& config, - const rtc::scoped_refptr& decoder_factory, - Clock* clock) const { + scoped_refptr decoder_factory) const { return std::make_unique( - config, NetEqImpl::Dependencies(config, clock, decoder_factory, + config, NetEqImpl::Dependencies(env, config, std::move(decoder_factory), *controller_factory_)); } diff --git a/api/neteq/custom_neteq_factory.h b/api/neteq/custom_neteq_factory.h index d080f68e8e..b77eb06894 100644 --- a/api/neteq/custom_neteq_factory.h +++ b/api/neteq/custom_neteq_factory.h @@ -14,10 +14,11 @@ #include #include "api/audio_codecs/audio_decoder_factory.h" +#include "api/environment/environment.h" +#include "api/neteq/neteq.h" #include "api/neteq/neteq_controller_factory.h" #include "api/neteq/neteq_factory.h" #include "api/scoped_refptr.h" -#include "system_wrappers/include/clock.h" namespace webrtc { @@ -31,10 +32,10 @@ class CustomNetEqFactory : public NetEqFactory { CustomNetEqFactory(const CustomNetEqFactory&) = delete; CustomNetEqFactory& operator=(const CustomNetEqFactory&) = delete; - std::unique_ptr CreateNetEq( + std::unique_ptr Create( + const Environment& env, const NetEq::Config& config, - const rtc::scoped_refptr& decoder_factory, - Clock* clock) const override; + scoped_refptr decoder_factory) const override; private: std::unique_ptr controller_factory_; diff --git a/api/neteq/default_neteq_controller_factory.cc b/api/neteq/default_neteq_controller_factory.cc index 4e0a0df108..81461c0817 100644 --- a/api/neteq/default_neteq_controller_factory.cc +++ b/api/neteq/default_neteq_controller_factory.cc @@ -10,6 +10,10 @@ #include "api/neteq/default_neteq_controller_factory.h" +#include + +#include "api/environment/environment.h" +#include "api/neteq/neteq_controller.h" #include "modules/audio_coding/neteq/decision_logic.h" namespace webrtc { @@ -17,10 +21,10 @@ namespace webrtc { DefaultNetEqControllerFactory::DefaultNetEqControllerFactory() = default; DefaultNetEqControllerFactory::~DefaultNetEqControllerFactory() = default; -std::unique_ptr -DefaultNetEqControllerFactory::CreateNetEqController( +std::unique_ptr DefaultNetEqControllerFactory::Create( + const Environment& env, const NetEqController::Config& config) const { - return std::make_unique(config); + return std::make_unique(env, config); } } // namespace webrtc diff --git a/api/neteq/default_neteq_controller_factory.h b/api/neteq/default_neteq_controller_factory.h index 611afc2586..e0c6926f2f 100644 --- a/api/neteq/default_neteq_controller_factory.h +++ b/api/neteq/default_neteq_controller_factory.h @@ -13,6 +13,8 @@ #include +#include "api/environment/environment.h" +#include "api/neteq/neteq_controller.h" #include "api/neteq/neteq_controller_factory.h" namespace webrtc { @@ -26,7 +28,8 @@ class DefaultNetEqControllerFactory : public NetEqControllerFactory { DefaultNetEqControllerFactory& operator=( const DefaultNetEqControllerFactory&) = delete; - std::unique_ptr CreateNetEqController( + std::unique_ptr Create( + const Environment& env, const NetEqController::Config& config) const override; }; diff --git a/modules/audio_coding/neteq/default_neteq_factory.cc b/api/neteq/default_neteq_factory.cc similarity index 63% rename from modules/audio_coding/neteq/default_neteq_factory.cc rename to api/neteq/default_neteq_factory.cc index 487450fe0f..f245814ab1 100644 --- a/modules/audio_coding/neteq/default_neteq_factory.cc +++ b/api/neteq/default_neteq_factory.cc @@ -8,10 +8,15 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_coding/neteq/default_neteq_factory.h" +#include "api/neteq/default_neteq_factory.h" +#include #include +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/environment/environment.h" +#include "api/neteq/neteq.h" +#include "api/scoped_refptr.h" #include "modules/audio_coding/neteq/neteq_impl.h" namespace webrtc { @@ -19,12 +24,12 @@ namespace webrtc { DefaultNetEqFactory::DefaultNetEqFactory() = default; DefaultNetEqFactory::~DefaultNetEqFactory() = default; -std::unique_ptr DefaultNetEqFactory::CreateNetEq( +std::unique_ptr DefaultNetEqFactory::Create( + const Environment& env, const NetEq::Config& config, - const rtc::scoped_refptr& decoder_factory, - Clock* clock) const { + scoped_refptr decoder_factory) const { return std::make_unique( - config, NetEqImpl::Dependencies(config, clock, decoder_factory, + config, NetEqImpl::Dependencies(env, config, std::move(decoder_factory), controller_factory_)); } diff --git a/modules/audio_coding/neteq/default_neteq_factory.h b/api/neteq/default_neteq_factory.h similarity index 73% rename from modules/audio_coding/neteq/default_neteq_factory.h rename to api/neteq/default_neteq_factory.h index 24d2bae419..444d0ee3ed 100644 --- a/modules/audio_coding/neteq/default_neteq_factory.h +++ b/api/neteq/default_neteq_factory.h @@ -8,16 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_AUDIO_CODING_NETEQ_DEFAULT_NETEQ_FACTORY_H_ -#define MODULES_AUDIO_CODING_NETEQ_DEFAULT_NETEQ_FACTORY_H_ +#ifndef API_NETEQ_DEFAULT_NETEQ_FACTORY_H_ +#define API_NETEQ_DEFAULT_NETEQ_FACTORY_H_ #include #include "api/audio_codecs/audio_decoder_factory.h" +#include "api/environment/environment.h" #include "api/neteq/default_neteq_controller_factory.h" +#include "api/neteq/neteq.h" #include "api/neteq/neteq_factory.h" #include "api/scoped_refptr.h" -#include "system_wrappers/include/clock.h" namespace webrtc { @@ -28,14 +29,14 @@ class DefaultNetEqFactory : public NetEqFactory { DefaultNetEqFactory(const DefaultNetEqFactory&) = delete; DefaultNetEqFactory& operator=(const DefaultNetEqFactory&) = delete; - std::unique_ptr CreateNetEq( + std::unique_ptr Create( + const Environment& env, const NetEq::Config& config, - const rtc::scoped_refptr& decoder_factory, - Clock* clock) const override; + scoped_refptr decoder_factory) const override; private: const DefaultNetEqControllerFactory controller_factory_; }; } // namespace webrtc -#endif // MODULES_AUDIO_CODING_NETEQ_DEFAULT_NETEQ_FACTORY_H_ +#endif // API_NETEQ_DEFAULT_NETEQ_FACTORY_H_ diff --git a/api/neteq/neteq.cc b/api/neteq/neteq.cc index 155ddf2cf3..df903a84bf 100644 --- a/api/neteq/neteq.cc +++ b/api/neteq/neteq.cc @@ -10,6 +10,8 @@ #include "api/neteq/neteq.h" +#include + #include "rtc_base/strings/string_builder.h" namespace webrtc { @@ -23,9 +25,8 @@ NetEq::Config& NetEq::Config::operator=(Config&&) = default; std::string NetEq::Config::ToString() const { char buf[1024]; - rtc::SimpleStringBuilder ss(buf); - ss << "sample_rate_hz=" << sample_rate_hz << ", enable_post_decode_vad=" - << (enable_post_decode_vad ? "true" : "false") + SimpleStringBuilder ss(buf); + ss << "sample_rate_hz=" << sample_rate_hz << ", max_packets_in_buffer=" << max_packets_in_buffer << ", min_delay_ms=" << min_delay_ms << ", enable_fast_accelerate=" << (enable_fast_accelerate ? "true" : "false") diff --git a/api/neteq/neteq.h b/api/neteq/neteq.h index 43e0e09784..67a1a83433 100644 --- a/api/neteq/neteq.h +++ b/api/neteq/neteq.h @@ -12,24 +12,24 @@ #define API_NETEQ_NETEQ_H_ #include // Provide access to size_t. +#include #include +#include #include #include -#include "absl/types/optional.h" +#include "api/array_view.h" #include "api/audio_codecs/audio_codec_pair_id.h" -#include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" #include "api/rtp_headers.h" -#include "api/scoped_refptr.h" +#include "api/rtp_packet_info.h" +#include "api/units/timestamp.h" namespace webrtc { // Forward declarations. class AudioFrame; -class AudioDecoderFactory; -class Clock; struct NetEqNetworkStatistics { uint16_t current_buffer_size_ms; // Current jitter buffer size in ms. @@ -93,6 +93,7 @@ struct NetEqLifetimeStatistics { int32_t total_interruption_duration_ms = 0; // Total number of comfort noise samples generated during DTX. uint64_t generated_noise_samples = 0; + uint64_t total_processing_delay_us = 0; }; // Metrics that describe the operations performed in NetEq, and the internal @@ -130,14 +131,13 @@ class NetEq { std::string ToString() const; int sample_rate_hz = 48000; // Initial value. Will change with input data. - bool enable_post_decode_vad = false; size_t max_packets_in_buffer = 200; int max_delay_ms = 0; int min_delay_ms = 0; bool enable_fast_accelerate = false; bool enable_muted_state = false; bool enable_rtx_handling = false; - absl::optional codec_pair_id; + std::optional codec_pair_id; bool for_test_no_time_stretching = false; // Use only for testing. }; @@ -177,6 +177,7 @@ class NetEq { // Return type for GetDecoderFormat. struct DecoderFormat { + int payload_type; int sample_rate_hz; int num_channels; SdpAudioFormat sdp_format; @@ -184,10 +185,28 @@ class NetEq { virtual ~NetEq() {} + virtual int InsertPacket(const RTPHeader& rtp_header, + ArrayView payload) { + return InsertPacket(rtp_header, payload, + /*receive_time=*/Timestamp::MinusInfinity()); + } + + // TODO: webrtc:343501093 - removed unused method. + virtual int InsertPacket(const RTPHeader& rtp_header, + ArrayView payload, + Timestamp receive_time) { + return InsertPacket(rtp_header, payload, + RtpPacketInfo(rtp_header, receive_time)); + } + // Inserts a new packet into NetEq. // Returns 0 on success, -1 on failure. + // TODO: webrtc:343501093 - Make this method pure virtual. virtual int InsertPacket(const RTPHeader& rtp_header, - rtc::ArrayView payload) = 0; + ArrayView payload, + const RtpPacketInfo& /* rtp_packet_info */) { + return InsertPacket(rtp_header, payload); + } // Lets NetEq know that a packet arrived with an empty payload. This typically // happens when empty packets are used for probing the network channel, and @@ -197,24 +216,23 @@ class NetEq { // Instructs NetEq to deliver 10 ms of audio data. The data is written to // `audio_frame`. All data in `audio_frame` is wiped; `data_`, `speech_type_`, - // `num_channels_`, `sample_rate_hz_`, `samples_per_channel_`, and - // `vad_activity_` are updated upon success. If an error is returned, some - // fields may not have been updated, or may contain inconsistent values. - // If muted state is enabled (through Config::enable_muted_state), `muted` - // may be set to true after a prolonged expand period. When this happens, the - // `data_` in `audio_frame` is not written, but should be interpreted as being - // all zeros. For testing purposes, an override can be supplied in the - // `action_override` argument, which will cause NetEq to take this action - // next, instead of the action it would normally choose. An optional output - // argument for fetching the current sample rate can be provided, which - // will return the same value as last_output_sample_rate_hz() but will avoid - // additional synchronization. + // `num_channels_`, `sample_rate_hz_` and `samples_per_channel_` are updated + // upon success. If an error is returned, some fields may not have been + // updated, or may contain inconsistent values. If muted state is enabled + // (through Config::enable_muted_state), `muted` may be set to true after a + // prolonged expand period. When this happens, the `data_` in `audio_frame` + // is not written, but should be interpreted as being all zeros. For testing + // purposes, an override can be supplied in the `action_override` argument, + // which will cause NetEq to take this action next, instead of the action it + // would normally choose. An optional output argument for fetching the current + // sample rate can be provided, which will return the same value as + // last_output_sample_rate_hz() but will avoid additional synchronization. // Returns kOK on success, or kFail in case of an error. virtual int GetAudio( AudioFrame* audio_frame, - bool* muted, + bool* muted = nullptr, int* current_sample_rate_hz = nullptr, - absl::optional action_override = absl::nullopt) = 0; + std::optional action_override = std::nullopt) = 0; // Replaces the current set of decoders with the given one. virtual void SetCodecs(const std::map& codecs) = 0; @@ -278,16 +296,9 @@ class NetEq { // statistics are never reset. virtual NetEqOperationsAndState GetOperationsAndState() const = 0; - // Enables post-decode VAD. When enabled, GetAudio() will return - // kOutputVADPassive when the signal contains no speech. - virtual void EnableVad() = 0; - - // Disables post-decode VAD. - virtual void DisableVad() = 0; - // Returns the RTP timestamp for the last sample delivered by GetAudio(). // The return value will be empty if no valid timestamp is available. - virtual absl::optional GetPlayoutTimestamp() const = 0; + virtual std::optional GetPlayoutTimestamp() const = 0; // Returns the sample rate in Hz of the audio produced in the last GetAudio // call. If GetAudio has not been called yet, the configured sample rate @@ -296,8 +307,16 @@ class NetEq { // Returns the decoder info for the given payload type. Returns empty if no // such payload type was registered. - virtual absl::optional GetDecoderFormat( - int payload_type) const = 0; + [[deprecated( + "Use GetCurrentDecoderFormat")]] virtual std::optional + GetDecoderFormat(int /* payload_type */) const { + return std::nullopt; + } + + // Returns info for the most recently used decoder. + virtual std::optional GetCurrentDecoderFormat() const { + return std::nullopt; + } // Flushes both the packet buffer and the sync buffer. virtual void FlushBuffers() = 0; diff --git a/api/neteq/neteq_controller.h b/api/neteq/neteq_controller.h index 6f42e83b68..b764263cc9 100644 --- a/api/neteq/neteq_controller.h +++ b/api/neteq/neteq_controller.h @@ -13,13 +13,10 @@ #include #include -#include -#include +#include -#include "absl/types/optional.h" #include "api/neteq/neteq.h" #include "api/neteq/tick_timer.h" -#include "system_wrappers/include/clock.h" namespace webrtc { @@ -67,7 +64,6 @@ class NetEqController { int max_packets_in_buffer; int base_min_delay_ms; TickTimer* tick_timer; - webrtc::Clock* clock = nullptr; }; struct PacketInfo { @@ -88,7 +84,7 @@ class NetEqController { uint32_t target_timestamp; int16_t expand_mutefactor; size_t last_packet_samples; - absl::optional next_packet; + std::optional next_packet; NetEq::Mode last_mode; bool play_dtmf; size_t generated_noise_samples; @@ -163,9 +159,9 @@ class NetEqController { // Notify the NetEqController that a packet has arrived. Returns the relative // arrival delay, if it can be computed. - virtual absl::optional PacketArrived(int fs_hz, - bool should_update_stats, - const PacketArrivedInfo& info) = 0; + virtual std::optional PacketArrived(int fs_hz, + bool should_update_stats, + const PacketArrivedInfo& info) = 0; // Notify the NetEqController that we are currently in muted state. // TODO(bugs.webrtc.org/14270): Make pure virtual when downstream is updated. diff --git a/api/neteq/neteq_controller_factory.h b/api/neteq/neteq_controller_factory.h index 9aba8a21a7..28c1f0b155 100644 --- a/api/neteq/neteq_controller_factory.h +++ b/api/neteq/neteq_controller_factory.h @@ -13,6 +13,7 @@ #include +#include "api/environment/environment.h" #include "api/neteq/neteq_controller.h" namespace webrtc { @@ -24,7 +25,8 @@ class NetEqControllerFactory { virtual ~NetEqControllerFactory() = default; // Creates a new NetEqController object, with parameters set in `config`. - virtual std::unique_ptr CreateNetEqController( + virtual std::unique_ptr Create( + const Environment& env, const NetEqController::Config& config) const = 0; }; diff --git a/api/neteq/neteq_factory.h b/api/neteq/neteq_factory.h index 526a1282f5..98165c9841 100644 --- a/api/neteq/neteq_factory.h +++ b/api/neteq/neteq_factory.h @@ -14,8 +14,9 @@ #include #include "api/audio_codecs/audio_decoder_factory.h" +#include "api/environment/environment.h" #include "api/neteq/neteq.h" -#include "system_wrappers/include/clock.h" +#include "api/scoped_refptr.h" namespace webrtc { @@ -27,10 +28,10 @@ class NetEqFactory { // Creates a new NetEq object, with parameters set in `config`. The `config` // object will only have to be valid for the duration of the call to this // method. - virtual std::unique_ptr CreateNetEq( + virtual std::unique_ptr Create( + const Environment& env, const NetEq::Config& config, - const rtc::scoped_refptr& decoder_factory, - Clock* clock) const = 0; + scoped_refptr decoder_factory) const = 0; }; } // namespace webrtc diff --git a/api/neteq/tick_timer.cc b/api/neteq/tick_timer.cc index 8f60bf48bf..a00514a801 100644 --- a/api/neteq/tick_timer.cc +++ b/api/neteq/tick_timer.cc @@ -10,6 +10,8 @@ #include "api/neteq/tick_timer.h" +#include + namespace webrtc { TickTimer::Stopwatch::Stopwatch(const TickTimer& ticktimer) diff --git a/api/neteq/tick_timer_unittest.cc b/api/neteq/tick_timer_unittest.cc index 863c0117f4..25d3a8abf1 100644 --- a/api/neteq/tick_timer_unittest.cc +++ b/api/neteq/tick_timer_unittest.cc @@ -10,9 +10,9 @@ #include "api/neteq/tick_timer.h" +#include #include -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { diff --git a/api/network_state_predictor.h b/api/network_state_predictor.h index 9cf5ab6f02..d3063ceafd 100644 --- a/api/network_state_predictor.h +++ b/api/network_state_predictor.h @@ -11,17 +11,12 @@ #ifndef API_NETWORK_STATE_PREDICTOR_H_ #define API_NETWORK_STATE_PREDICTOR_H_ +#include #include -#include -namespace webrtc { +#include "api/transport/bandwidth_usage.h" -enum class BandwidthUsage { - kBwNormal = 0, - kBwUnderusing = 1, - kBwOverusing = 2, - kLast -}; +namespace webrtc { // TODO(yinwa): work in progress. API in class NetworkStatePredictor should not // be used by other users until this comment is removed. diff --git a/api/notifier.h b/api/notifier.h index 64d5b22ad5..d1070a96e7 100644 --- a/api/notifier.h +++ b/api/notifier.h @@ -17,6 +17,7 @@ #include "api/sequence_checker.h" #include "rtc_base/checks.h" #include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { diff --git a/api/numerics/BUILD.gn b/api/numerics/BUILD.gn index 408dc5b9f1..4cc5489d5a 100644 --- a/api/numerics/BUILD.gn +++ b/api/numerics/BUILD.gn @@ -21,8 +21,8 @@ rtc_library("numerics") { "../../rtc_base:rtc_numerics", "../../rtc_base:timeutils", "../units:timestamp", + "//third_party/abseil-cpp/absl/algorithm:container", ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } if (rtc_include_tests) { @@ -35,7 +35,7 @@ if (rtc_include_tests) { deps = [ ":numerics", "../../test:test_support", + "//third_party/abseil-cpp/absl/algorithm:container", ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } } diff --git a/api/numerics/samples_stats_counter.cc b/api/numerics/samples_stats_counter.cc index 4eb0cde299..0c4dba98c9 100644 --- a/api/numerics/samples_stats_counter.cc +++ b/api/numerics/samples_stats_counter.cc @@ -12,8 +12,11 @@ #include #include +#include #include "absl/algorithm/container.h" +#include "api/units/timestamp.h" +#include "rtc_base/checks.h" #include "rtc_base/time_utils.h" namespace webrtc { @@ -32,7 +35,7 @@ SamplesStatsCounter& SamplesStatsCounter::operator=(SamplesStatsCounter&&) = default; void SamplesStatsCounter::AddSample(double value) { - AddSample(StatsSample{value, Timestamp::Micros(rtc::TimeMicros())}); + AddSample(StatsSample{value, Timestamp::Micros(TimeMicros())}); } void SamplesStatsCounter::AddSample(StatsSample sample) { diff --git a/api/numerics/samples_stats_counter.h b/api/numerics/samples_stats_counter.h index c4eabffa2f..d993effe27 100644 --- a/api/numerics/samples_stats_counter.h +++ b/api/numerics/samples_stats_counter.h @@ -11,6 +11,9 @@ #ifndef API_NUMERICS_SAMPLES_STATS_COUNTER_H_ #define API_NUMERICS_SAMPLES_STATS_COUNTER_H_ +#include +#include + #include #include #include @@ -100,7 +103,7 @@ class SamplesStatsCounter { // guarantees of order, so samples can be in different order comparing to in // which they were added into counter. Also return value will be invalidate // after call to any non const method. - rtc::ArrayView GetTimedSamples() const { return samples_; } + ArrayView GetTimedSamples() const { return samples_; } std::vector GetSamples() const { std::vector out; out.reserve(samples_.size()); diff --git a/api/packet_socket_factory.h b/api/packet_socket_factory.h index 29d2606b9b..602eeaa9ed 100644 --- a/api/packet_socket_factory.h +++ b/api/packet_socket_factory.h @@ -11,20 +11,18 @@ #ifndef API_PACKET_SOCKET_FACTORY_H_ #define API_PACKET_SOCKET_FACTORY_H_ +#include #include #include #include #include "api/async_dns_resolver.h" -#include "api/wrapping_async_dns_resolver.h" #include "rtc_base/async_packet_socket.h" -#include "rtc_base/proxy_info.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_certificate.h" #include "rtc_base/system/rtc_export.h" -namespace rtc { - -class SSLCertificateVerifier; -class AsyncResolverInterface; +namespace webrtc { struct PacketSocketTcpOptions { PacketSocketTcpOptions() = default; @@ -68,33 +66,25 @@ class RTC_EXPORT PacketSocketFactory { virtual AsyncPacketSocket* CreateClientTcpSocket( const SocketAddress& local_address, const SocketAddress& remote_address, - const ProxyInfo& proxy_info, - const std::string& user_agent, const PacketSocketTcpOptions& tcp_options) = 0; - // The AsyncResolverInterface is deprecated; users are encouraged - // to switch to the AsyncDnsResolverInterface. - // TODO(bugs.webrtc.org/12598): Remove once all downstream users - // are converted. - virtual AsyncResolverInterface* CreateAsyncResolver() { - // Default implementation, so that downstream users can remove this - // immediately after changing to CreateAsyncDnsResolver - RTC_DCHECK_NOTREACHED(); - return nullptr; - } - - virtual std::unique_ptr - CreateAsyncDnsResolver() { - // Default implementation, to aid in transition to AsyncDnsResolverInterface - return std::make_unique( - CreateAsyncResolver()); - } + virtual std::unique_ptr + CreateAsyncDnsResolver() = 0; private: PacketSocketFactory(const PacketSocketFactory&) = delete; PacketSocketFactory& operator=(const PacketSocketFactory&) = delete; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::PacketSocketFactory; +using ::webrtc::PacketSocketTcpOptions; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // API_PACKET_SOCKET_FACTORY_H_ diff --git a/api/peer_connection_interface.cc b/api/peer_connection_interface.cc index 050b61da95..947ecc62a6 100644 --- a/api/peer_connection_interface.cc +++ b/api/peer_connection_interface.cc @@ -10,7 +10,7 @@ #include "api/peer_connection_interface.h" -#include +#include "pc/media_factory.h" namespace webrtc { @@ -58,56 +58,15 @@ PeerConnectionDependencies::~PeerConnectionDependencies() = default; PeerConnectionFactoryDependencies::PeerConnectionFactoryDependencies() = default; +// TODO: bugs.webrtc.org/369904700 - remove pragma once `audio_processing` +// is removed from PeerConnectionFactoryDependencies. +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" PeerConnectionFactoryDependencies::PeerConnectionFactoryDependencies( PeerConnectionFactoryDependencies&&) = default; +#pragma clang diagnostic pop PeerConnectionFactoryDependencies::~PeerConnectionFactoryDependencies() = default; -rtc::scoped_refptr -PeerConnectionFactoryInterface::CreatePeerConnection( - const PeerConnectionInterface::RTCConfiguration& configuration, - std::unique_ptr allocator, - std::unique_ptr cert_generator, - PeerConnectionObserver* observer) { - PeerConnectionDependencies dependencies(observer); - dependencies.allocator = std::move(allocator); - dependencies.cert_generator = std::move(cert_generator); - auto result = - CreatePeerConnectionOrError(configuration, std::move(dependencies)); - if (!result.ok()) { - return nullptr; - } - return result.MoveValue(); -} - -rtc::scoped_refptr -PeerConnectionFactoryInterface::CreatePeerConnection( - const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies dependencies) { - auto result = - CreatePeerConnectionOrError(configuration, std::move(dependencies)); - if (!result.ok()) { - return nullptr; - } - return result.MoveValue(); -} - -RTCErrorOr> -PeerConnectionFactoryInterface::CreatePeerConnectionOrError( - const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies dependencies) { - return RTCError(RTCErrorType::INTERNAL_ERROR); -} - -RtpCapabilities PeerConnectionFactoryInterface::GetRtpSenderCapabilities( - cricket::MediaType kind) const { - return {}; -} - -RtpCapabilities PeerConnectionFactoryInterface::GetRtpReceiverCapabilities( - cricket::MediaType kind) const { - return {}; -} - } // namespace webrtc diff --git a/api/peer_connection_interface.h b/api/peer_connection_interface.h index 37dcfbbc27..de071d5b04 100644 --- a/api/peer_connection_interface.h +++ b/api/peer_connection_interface.h @@ -66,28 +66,30 @@ #ifndef API_PEER_CONNECTION_INTERFACE_H_ #define API_PEER_CONNECTION_INTERFACE_H_ +// IWYU pragma: no_include "pc/media_factory.h" #include #include #include #include +#include #include #include #include "absl/base/attributes.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/adaptation/resource.h" #include "api/async_dns_resolver.h" -#include "api/async_resolver_factory.h" +#include "api/audio/audio_device.h" #include "api/audio/audio_mixer.h" +#include "api/audio/audio_processing.h" #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder_factory.h" #include "api/audio_options.h" -#include "api/call/call_factory_interface.h" #include "api/candidate.h" #include "api/crypto/crypto_options.h" +#include "api/data_channel_event_observer_interface.h" #include "api/data_channel_interface.h" #include "api/dtls_transport_interface.h" #include "api/fec_controller.h" @@ -114,39 +116,45 @@ #include "api/set_remote_description_observer_interface.h" #include "api/stats/rtc_stats_collector_callback.h" #include "api/task_queue/task_queue_factory.h" +#include "api/transport/bandwidth_estimation_settings.h" #include "api/transport/bitrate_settings.h" #include "api/transport/enums.h" #include "api/transport/network_control.h" #include "api/transport/sctp_transport_factory_interface.h" #include "api/turn_customizer.h" #include "api/video/video_bitrate_allocator_factory.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" #include "call/rtp_transport_controller_send_factory_interface.h" #include "media/base/media_config.h" -#include "media/base/media_engine.h" // TODO(bugs.webrtc.org/7447): We plan to provide a way to let applications // inject a PacketSocketFactory and/or NetworkManager, and not expose // PortAllocator in the PeerConnection api. +#include "api/audio/audio_frame_processor.h" +#include "api/ref_count.h" +#include "api/units/time_delta.h" +#include "p2p/base/port.h" #include "p2p/base/port_allocator.h" +#include "rtc_base/checks.h" #include "rtc_base/network.h" #include "rtc_base/network_constants.h" #include "rtc_base/network_monitor_factory.h" -#include "rtc_base/ref_count.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/rtc_certificate_generator.h" -#include "rtc_base/socket_address.h" +#include "rtc_base/socket_factory.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread.h" -namespace rtc { -class Thread; -} // namespace rtc - namespace webrtc { +// IWYU pragma: begin_keep +// MediaFactory class definition is not part of the api. +class MediaFactory; +// IWYU pragma: end_keep // MediaStream container interface. -class StreamCollectionInterface : public rtc::RefCountInterface { +class StreamCollectionInterface : public webrtc::RefCountInterface { public: // TODO(ronghuawu): Update the function names to c++ style, e.g. find -> Find. virtual size_t count() = 0; @@ -160,7 +168,7 @@ class StreamCollectionInterface : public rtc::RefCountInterface { ~StreamCollectionInterface() override = default; }; -class StatsObserver : public rtc::RefCountInterface { +class StatsObserver : public webrtc::RefCountInterface { public: virtual void OnComplete(const StatsReports& reports) = 0; @@ -175,7 +183,7 @@ enum class SdpSemantics { kUnifiedPlan, }; -class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { +class RTC_EXPORT PeerConnectionInterface : public webrtc::RefCountInterface { public: // See https://w3c.github.io/webrtc-pc/#dom-rtcsignalingstate enum SignalingState { @@ -340,6 +348,13 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { bool dscp() const { return media_config.enable_dscp; } void set_dscp(bool enable) { media_config.enable_dscp = enable; } + bool stats_timestamp_with_environment_clock() const { + return media_config.stats_timestamp_with_environment_clock; + } + void set_stats_timestamp_with_environment_clock(bool enable) { + media_config.stats_timestamp_with_environment_clock = enable; + } + bool cpu_adaptation() const { return media_config.video.enable_cpu_adaptation; } @@ -415,14 +430,14 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { IceTransportsType type = kAll; BundlePolicy bundle_policy = kBundlePolicyBalanced; RtcpMuxPolicy rtcp_mux_policy = kRtcpMuxPolicyRequire; - std::vector> certificates; + std::vector> certificates; int ice_candidate_pool_size = 0; ////////////////////////////////////////////////////////////////////////// // The below fields correspond to constraints from the deprecated // constraints interface for constructing a PeerConnection. // - // absl::optional fields can be "missing", in which case the implementation + // std::optional fields can be "missing", in which case the implementation // default will be used. ////////////////////////////////////////////////////////////////////////// @@ -437,7 +452,7 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // and delaying ICE completion. // // Can be set to INT_MAX to effectively disable the limit. - int max_ipv6_networks = cricket::kDefaultMaxIPv6Networks; + int max_ipv6_networks = kDefaultMaxIPv6Networks; // Exclude link-local network interfaces // from consideration for gathering ICE candidates. @@ -446,16 +461,7 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // Minimum bitrate at which screencast video tracks will be encoded at. // This means adding padding bits up to this bitrate, which can help // when switching from a static scene to one with motion. - absl::optional screencast_min_bitrate; - -#if defined(WEBRTC_FUCHSIA) - // TODO(bugs.webrtc.org/11066): Remove entirely once Fuchsia does not use. - // TODO(bugs.webrtc.org/9891) - Move to crypto_options - // Can be used to disable DTLS-SRTP. This should never be done, but can be - // useful for testing purposes, for example in setting up a loopback call - // with a single PeerConnection. - absl::optional enable_dtls_srtp; -#endif + std::optional screencast_min_bitrate; ///////////////////////////////////////////////// // The below fields are not part of the standard. @@ -510,7 +516,7 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // Implementation defined settings. A public member only for the benefit of // the implementation. Applications must not access it directly, and should // instead use provided accessor methods, e.g., set_cpu_adaptation. - struct cricket::MediaConfig media_config; + struct MediaConfig media_config; // If set to true, only one preferred TURN allocation will be used per // network interface. UDP is preferred over TCP and IPv6 over IPv4. This @@ -579,28 +585,28 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // 3) ice_check_min_interval defines the minimal interval (equivalently the // maximum rate) that overrides the above two intervals when either of them // is less. - absl::optional ice_check_interval_strong_connectivity; - absl::optional ice_check_interval_weak_connectivity; - absl::optional ice_check_min_interval; + std::optional ice_check_interval_strong_connectivity; + std::optional ice_check_interval_weak_connectivity; + std::optional ice_check_min_interval; // The min time period for which a candidate pair must wait for response to // connectivity checks before it becomes unwritable. This parameter // overrides the default value in the ICE implementation if set. - absl::optional ice_unwritable_timeout; + std::optional ice_unwritable_timeout; // The min number of connectivity checks that a candidate pair must sent // without receiving response before it becomes unwritable. This parameter // overrides the default value in the ICE implementation if set. - absl::optional ice_unwritable_min_checks; + std::optional ice_unwritable_min_checks; // The min time period for which a candidate pair must wait for response to // connectivity checks it becomes inactive. This parameter overrides the // default value in the ICE implementation if set. - absl::optional ice_inactive_timeout; + std::optional ice_inactive_timeout; // The interval in milliseconds at which STUN candidates will resend STUN // binding requests to keep NAT bindings open. - absl::optional stun_candidate_keepalive_interval; + std::optional stun_candidate_keepalive_interval; // Optional TurnCustomizer. // With this class one can modify outgoing TURN messages. @@ -612,7 +618,7 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // A candidate pair on a preferred network has a higher precedence in ICE // than one on an un-preferred network, regardless of priority or network // cost. - absl::optional network_preference; + std::optional network_preference; // Configure the SDP semantics used by this PeerConnection. By default, this // is Unified Plan which is compliant to the WebRTC 1.0 specification. It is @@ -646,7 +652,7 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // Defines advanced optional cryptographic settings related to SRTP and // frame encryption for native WebRTC. Setting this will overwrite any // settings set in PeerConnectionFactory (which is deprecated). - absl::optional crypto_options; + std::optional crypto_options; // Configure if we should include the SDP attribute extmap-allow-mixed in // our offer on session level. @@ -661,16 +667,13 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // Added to be able to control rollout of this feature. bool enable_implicit_rollback = false; - // Whether network condition based codec switching is allowed. - absl::optional allow_codec_switching; - // The delay before doing a usage histogram report for long-lived // PeerConnections. Used for testing only. - absl::optional report_usage_pattern_delay_ms; + std::optional report_usage_pattern_delay_ms; // The ping interval (ms) when the connection is stable and writable. This // parameter overrides the default value in the ICE implementation if set. - absl::optional stable_writable_connection_ping_interval_ms; + std::optional stable_writable_connection_ping_interval_ms; // Whether this PeerConnection will avoid VPNs (kAvoidVpn), prefer VPNs // (kPreferVpn), only work over VPN (kOnlyUseVpn) or only work over non-VPN @@ -681,12 +684,12 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // List of address/length subnets that should be treated like // VPN (in case webrtc fails to auto detect them). - std::vector vpn_list; + std::vector vpn_list; PortAllocatorConfig port_allocator_config; // The burst interval of the pacer, see TaskQueuePacedSender constructor. - absl::optional pacer_burst_interval; + std::optional pacer_burst_interval; // // Don't forget to update operator== if adding something. @@ -758,12 +761,12 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // Accessor methods to active local streams. // This method is not supported with kUnifiedPlan semantics. Please use // GetSenders() instead. - virtual rtc::scoped_refptr local_streams() = 0; + virtual scoped_refptr local_streams() = 0; // Accessor methods to remote streams. // This method is not supported with kUnifiedPlan semantics. Please use // GetReceivers() instead. - virtual rtc::scoped_refptr remote_streams() = 0; + virtual scoped_refptr remote_streams() = 0; // Add a new MediaStream to be sent on this PeerConnection. // Note that a SessionDescription negotiation is needed before the @@ -795,8 +798,8 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // - INVALID_PARAMETER: `track` is null, has a kind other than audio or video, // or a sender already exists for the track. // - INVALID_STATE: The PeerConnection is closed. - virtual RTCErrorOr> AddTrack( - rtc::scoped_refptr track, + virtual RTCErrorOr> AddTrack( + scoped_refptr track, const std::vector& stream_ids) = 0; // Add a new MediaStreamTrack as above, but with an additional parameter, @@ -804,8 +807,8 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // similar to init_send_encodings in RtpTransceiverInit. // Note that a new transceiver will always be created. // - virtual RTCErrorOr> AddTrack( - rtc::scoped_refptr track, + virtual RTCErrorOr> AddTrack( + scoped_refptr track, const std::vector& stream_ids, const std::vector& init_send_encodings) = 0; @@ -824,7 +827,7 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // TODO(bugs.webrtc.org/9534): Rename to RemoveTrack once the other signature // is removed; remove default implementation once upstream is updated. virtual RTCError RemoveTrackOrError( - rtc::scoped_refptr sender) { + scoped_refptr /* sender */) { RTC_CHECK_NOTREACHED(); return RTCError(); } @@ -853,22 +856,21 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // the track. // Errors: // - INVALID_PARAMETER: `track` is null. - virtual RTCErrorOr> - AddTransceiver(rtc::scoped_refptr track) = 0; - virtual RTCErrorOr> - AddTransceiver(rtc::scoped_refptr track, - const RtpTransceiverInit& init) = 0; - - // Adds a transceiver with the given kind. Can either be MEDIA_TYPE_AUDIO or - // MEDIA_TYPE_VIDEO. - // Errors: - // - INVALID_PARAMETER: `media_type` is not MEDIA_TYPE_AUDIO or - // MEDIA_TYPE_VIDEO. - virtual RTCErrorOr> - AddTransceiver(cricket::MediaType media_type) = 0; - virtual RTCErrorOr> - AddTransceiver(cricket::MediaType media_type, - const RtpTransceiverInit& init) = 0; + virtual RTCErrorOr> AddTransceiver( + scoped_refptr track) = 0; + virtual RTCErrorOr> AddTransceiver( + scoped_refptr track, + const RtpTransceiverInit& init) = 0; + + // Adds a transceiver with the given kind. Can either be + // webrtc::MediaType::AUDIO or webrtc::MediaType::VIDEO. Errors: + // - INVALID_PARAMETER: `media_type` is not webrtc::MediaType::AUDIO or + // webrtc::MediaType::VIDEO. + virtual RTCErrorOr> AddTransceiver( + webrtc::MediaType media_type) = 0; + virtual RTCErrorOr> AddTransceiver( + webrtc::MediaType media_type, + const RtpTransceiverInit& init) = 0; // Creates a sender without a track. Can be used for "early media"/"warmup" // use cases, where the application may want to negotiate video attributes @@ -884,7 +886,7 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // // This method is not supported with kUnifiedPlan semantics. Please use // AddTransceiver instead. - virtual rtc::scoped_refptr CreateSender( + virtual scoped_refptr CreateSender( const std::string& kind, const std::string& stream_id) = 0; @@ -894,8 +896,7 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // // If Unified Plan semantics are specified, gets the RtpSender for each // RtpTransceiver. - virtual std::vector> GetSenders() - const = 0; + virtual std::vector> GetSenders() const = 0; // If Plan B semantics are specified, gets all RtpReceivers created when a // remote description is applied. All receivers of a specific media type share @@ -905,7 +906,7 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // // If Unified Plan semantics are specified, gets the RtpReceiver for each // RtpTransceiver. - virtual std::vector> GetReceivers() + virtual std::vector> GetReceivers() const = 0; // Get all RtpTransceivers, created either through AddTransceiver, AddTrack or @@ -913,8 +914,8 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // // Note: This method is only available when Unified Plan is enabled (see // RTCConfiguration). - virtual std::vector> - GetTransceivers() const = 0; + virtual std::vector> GetTransceivers() + const = 0; // The legacy non-compliant GetStats() API. This correspond to the // callback-based version of getStats() in JavaScript. The returned metrics @@ -939,21 +940,19 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // version of getStats() in JavaScript. Implementation status is described in // api/stats/rtcstats_objects.h. For more details on stats, see spec: // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-getstats - // TODO(hbos): Takes shared ownership, use rtc::scoped_refptr<> instead. This - // requires stop overriding the current version in third party or making third - // party calls explicit to avoid ambiguity during switch. Make the future - // version abstract as soon as third party projects implement it. + // TODO(hbos): Takes shared ownership, use webrtc::scoped_refptr<> instead. + // This requires stop overriding the current version in third party or making + // third party calls explicit to avoid ambiguity during switch. Make the + // future version abstract as soon as third party projects implement it. virtual void GetStats(RTCStatsCollectorCallback* callback) = 0; // Spec-compliant getStats() performing the stats selection algorithm with the // sender. https://w3c.github.io/webrtc-pc/#dom-rtcrtpsender-getstats - virtual void GetStats( - rtc::scoped_refptr selector, - rtc::scoped_refptr callback) = 0; + virtual void GetStats(scoped_refptr selector, + scoped_refptr callback) = 0; // Spec-compliant getStats() performing the stats selection algorithm with the // receiver. https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-getstats - virtual void GetStats( - rtc::scoped_refptr selector, - rtc::scoped_refptr callback) = 0; + virtual void GetStats(scoped_refptr selector, + scoped_refptr callback) = 0; // Clear cached stats in the RTCStatsCollector. virtual void ClearStatsCache() {} @@ -964,15 +963,15 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // Also, calling CreateDataChannel is the only way to get a data "m=" section // in SDP, so it should be done before CreateOffer is called, if the // application plans to use data channels. - virtual RTCErrorOr> - CreateDataChannelOrError(const std::string& label, - const DataChannelInit* config) { + virtual RTCErrorOr> + CreateDataChannelOrError(const std::string& /* label */, + const DataChannelInit* /* config */) { return RTCError(RTCErrorType::INTERNAL_ERROR, "dummy function called"); } // TODO(crbug.com/788659): Remove "virtual" below and default implementation // above once mock in Chrome is fixed. ABSL_DEPRECATED("Use CreateDataChannelOrError") - virtual rtc::scoped_refptr CreateDataChannel( + virtual scoped_refptr CreateDataChannel( const std::string& label, const DataChannelInit* config) { auto result = CreateDataChannelOrError(label, config); @@ -1011,8 +1010,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // for negotiation and subsequent CreateOffer() calls will act as if // RTCOfferAnswerOptions::ice_restart is true. // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-restartice - // TODO(hbos): Remove default implementation when downstream projects - // implement this. virtual void RestartIce() = 0; // Create a new offer. @@ -1037,15 +1034,15 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // The observer is invoked as soon as the operation completes, which could be // before or after the SetLocalDescription() method has exited. virtual void SetLocalDescription( - std::unique_ptr desc, - rtc::scoped_refptr observer) {} + std::unique_ptr /* desc */, + scoped_refptr /* observer */) {} // Creates an offer or answer (depending on current signaling state) and sets // it as the local session description. // // The observer is invoked as soon as the operation completes, which could be // before or after the SetLocalDescription() method has exited. virtual void SetLocalDescription( - rtc::scoped_refptr observer) {} + scoped_refptr /* observer */) {} // Like SetLocalDescription() above, but the observer is invoked with a delay // after the operation completes. This helps avoid recursive calls by the // observer but also makes it possible for states to change in-between the @@ -1055,7 +1052,8 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // ones taking SetLocalDescriptionObserverInterface as argument. virtual void SetLocalDescription(SetSessionDescriptionObserver* observer, SessionDescriptionInterface* desc) = 0; - virtual void SetLocalDescription(SetSessionDescriptionObserver* observer) {} + virtual void SetLocalDescription( + SetSessionDescriptionObserver* /* observer */) {} // Sets the remote session description. // @@ -1066,7 +1064,7 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // before or after the SetRemoteDescription() method has exited. virtual void SetRemoteDescription( std::unique_ptr desc, - rtc::scoped_refptr observer) = 0; + scoped_refptr observer) = 0; // Like SetRemoteDescription() above, but the observer is invoked with a delay // after the operation completes. This helps avoid recursive calls by the // observer but also makes it possible for states to change in-between the @@ -1074,8 +1072,9 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // for synchronizing peer connection states to the application. // TODO(https://crbug.com/webrtc/11798): Delete this method in favor of the // ones taking SetRemoteDescriptionObserverInterface as argument. - virtual void SetRemoteDescription(SetSessionDescriptionObserver* observer, - SessionDescriptionInterface* desc) {} + virtual void SetRemoteDescription( + SetSessionDescriptionObserver* /* observer */, + SessionDescriptionInterface* /* desc */) {} // According to spec, we must only fire "negotiationneeded" if the Operations // Chain is empty. This method takes care of validating an event previously @@ -1083,9 +1082,7 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // sure that even if there was a delay (e.g. due to a PostTask) between the // event being generated and the time of firing, the Operations Chain is empty // and the event is still valid to be fired. - virtual bool ShouldFireNegotiationNeededEvent(uint32_t event_id) { - return true; - } + virtual bool ShouldFireNegotiationNeededEvent(uint32_t event_id) = 0; virtual PeerConnectionInterface::RTCConfiguration GetConfiguration() = 0; @@ -1122,17 +1119,18 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { virtual bool AddIceCandidate(const IceCandidateInterface* candidate) = 0; // TODO(hbos): Remove default implementation once implemented by downstream // projects. - virtual void AddIceCandidate(std::unique_ptr candidate, - std::function callback) {} + virtual void AddIceCandidate( + std::unique_ptr /* candidate */, + std::function /* callback */) {} // Removes a group of remote candidates from the ICE agent. Needed mainly for // continual gathering, to avoid an ever-growing list of candidates as // networks come and go. Note that the candidates' transport_name must be set // to the MID of the m= section that generated the candidate. // TODO(bugs.webrtc.org/8395): Use IceCandidateInterface instead of - // cricket::Candidate, which would avoid the transport_name oddity. + // webrtc::Candidate, which would avoid the transport_name oddity. virtual bool RemoveIceCandidates( - const std::vector& candidates) = 0; + const std::vector& candidates) = 0; // SetBitrate limits the bandwidth allocated for all RTP streams sent by // this PeerConnection. Other limitations might affect these limits and @@ -1142,29 +1140,35 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // to the provided value. virtual RTCError SetBitrate(const BitrateSettings& bitrate) = 0; + // Allows an application to reconfigure bandwidth estimation. + // The method can be called both before and after estimation has started. + // Estimation starts when the first RTP packet is sent. + // Estimation will be restarted if already started. + virtual void ReconfigureBandwidthEstimation( + const BandwidthEstimationSettings& settings) = 0; + // Enable/disable playout of received audio streams. Enabled by default. Note // that even if playout is enabled, streams will only be played out if the // appropriate SDP is also applied. Setting `playout` to false will stop // playout of the underlying audio device but starts a task which will poll // for audio data every 10ms to ensure that audio processing happens and the // audio statistics are updated. - virtual void SetAudioPlayout(bool playout) {} + virtual void SetAudioPlayout(bool playout) = 0; // Enable/disable recording of transmitted audio streams. Enabled by default. // Note that even if recording is enabled, streams will only be recorded if // the appropriate SDP is also applied. - virtual void SetAudioRecording(bool recording) {} + virtual void SetAudioRecording(bool recording) = 0; // Looks up the DtlsTransport associated with a MID value. // In the Javascript API, DtlsTransport is a property of a sender, but // because the PeerConnection owns the DtlsTransport in this implementation, // it is better to look them up on the PeerConnection. - virtual rtc::scoped_refptr LookupDtlsTransportByMid( + virtual scoped_refptr LookupDtlsTransportByMid( const std::string& mid) = 0; // Returns the SCTP transport, if any. - virtual rtc::scoped_refptr GetSctpTransport() - const = 0; + virtual scoped_refptr GetSctpTransport() const = 0; // Returns the current SignalingState. virtual SignalingState signaling_state() = 0; @@ -1185,28 +1189,25 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // Returns the current state of canTrickleIceCandidates per // https://w3c.github.io/webrtc-pc/#attributes-1 - virtual absl::optional can_trickle_ice_candidates() { - // TODO(crbug.com/708484): Remove default implementation. - return absl::nullopt; - } + virtual std::optional can_trickle_ice_candidates() = 0; // When a resource is overused, the PeerConnection will try to reduce the load // on the sysem, for example by reducing the resolution or frame rate of // encoded streams. The Resource API allows injecting platform-specific usage // measurements. The conditions to trigger kOveruse or kUnderuse are up to the // implementation. - // TODO(hbos): Make pure virtual when implemented by downstream projects. - virtual void AddAdaptationResource(rtc::scoped_refptr resource) {} + virtual void AddAdaptationResource(scoped_refptr resource) = 0; // Start RtcEventLog using an existing output-sink. Takes ownership of - // `output` and passes it on to Call, which will take the ownership. If the - // operation fails the output will be closed and deallocated. The event log - // will send serialized events to the output object every `output_period_ms`. - // Applications using the event log should generally make their own trade-off - // regarding the output period. A long period is generally more efficient, - // with potential drawbacks being more bursty thread usage, and more events - // lost in case the application crashes. If the `output_period_ms` argument is - // omitted, webrtc selects a default deemed to be workable in most cases. + // `output` and passes it on to Call, which will take the ownership. If + // the operation fails the output will be closed and deallocated. The + // event log will send serialized events to the output object every + // `output_period_ms`. Applications using the event log should generally + // make their own trade-off regarding the output period. A long period is + // generally more efficient, with potential drawbacks being more bursty + // thread usage, and more events lost in case the application crashes. If + // the `output_period_ms` argument is omitted, webrtc selects a default + // deemed to be workable in most cases. virtual bool StartRtcEventLog(std::unique_ptr output, int64_t output_period_ms) = 0; virtual bool StartRtcEventLog(std::unique_ptr output) = 0; @@ -1214,6 +1215,9 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // Stops logging the RtcEventLog. virtual void StopRtcEventLog() = 0; + virtual void SetDataChannelEventObserver( + std::unique_ptr observer) = 0; + // Terminates all media, closes the transports, and in general releases any // resources used by the PeerConnection. This is an irreversible operation. // @@ -1227,8 +1231,11 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // // Also the only thread on which it's safe to use SessionDescriptionInterface // pointers. - // TODO(deadbeef): Make pure virtual when all subclasses implement it. - virtual rtc::Thread* signaling_thread() const { return nullptr; } + virtual Thread* signaling_thread() const = 0; + + // NetworkController instance being used by this PeerConnection, to be used + // to identify instances when using a custom NetworkControllerFactory. + virtual NetworkControllerInterface* GetNetworkController() = 0; protected: // Dtor protected as objects shouldn't be deleted via this interface. @@ -1246,15 +1253,15 @@ class PeerConnectionObserver { PeerConnectionInterface::SignalingState new_state) = 0; // Triggered when media is received on a new stream from remote peer. - virtual void OnAddStream(rtc::scoped_refptr stream) {} + virtual void OnAddStream(scoped_refptr /* stream */) {} // Triggered when a remote peer closes a stream. - virtual void OnRemoveStream(rtc::scoped_refptr stream) { - } + virtual void OnRemoveStream( + scoped_refptr /* stream */) {} // Triggered when a remote peer opens a data channel. virtual void OnDataChannel( - rtc::scoped_refptr data_channel) = 0; + scoped_refptr data_channel) = 0; // Triggered when renegotiation is needed. For example, an ICE restart // has begun. @@ -1268,7 +1275,7 @@ class PeerConnectionObserver { // PeerConnection::ShouldFireNegotiationNeededEvent() returns true since it is // possible for the event to become invalidated by operations subsequently // chained. - virtual void OnNegotiationNeededEvent(uint32_t event_id) {} + virtual void OnNegotiationNeededEvent(uint32_t /* event_id */) {} // Called any time the legacy IceConnectionState changes. // @@ -1279,15 +1286,15 @@ class PeerConnectionObserver { // // TODO(jonasolsson): deprecate and remove this. virtual void OnIceConnectionChange( - PeerConnectionInterface::IceConnectionState new_state) {} + PeerConnectionInterface::IceConnectionState /* new_state */) {} // Called any time the standards-compliant IceConnectionState changes. virtual void OnStandardizedIceConnectionChange( - PeerConnectionInterface::IceConnectionState new_state) {} + PeerConnectionInterface::IceConnectionState /* new_state */) {} // Called any time the PeerConnectionState changes. virtual void OnConnectionChange( - PeerConnectionInterface::PeerConnectionState new_state) {} + PeerConnectionInterface::PeerConnectionState /* new_state */) {} // Called any time the IceGatheringState changes. virtual void OnIceGatheringChange( @@ -1298,24 +1305,24 @@ class PeerConnectionObserver { // Gathering of an ICE candidate failed. // See https://w3c.github.io/webrtc-pc/#event-icecandidateerror - virtual void OnIceCandidateError(const std::string& address, - int port, - const std::string& url, - int error_code, - const std::string& error_text) {} + virtual void OnIceCandidateError(const std::string& /* address */, + int /* port */, + const std::string& /* url */, + int /* error_code */, + const std::string& /* error_text */) {} // Ice candidates have been removed. // TODO(honghaiz): Make this a pure virtual method when all its subclasses // implement it. virtual void OnIceCandidatesRemoved( - const std::vector& candidates) {} + const std::vector& /* candidates */) {} // Called when the ICE connection receiving status changes. - virtual void OnIceConnectionReceivingChange(bool receiving) {} + virtual void OnIceConnectionReceivingChange(bool /* receiving */) {} // Called when the selected candidate pair for the ICE connection changes. virtual void OnIceSelectedCandidatePairChanged( - const cricket::CandidatePairChangeEvent& event) {} + const CandidatePairChangeEvent& /* event */) {} // This is called when a receiver and its track are created. // TODO(zhihuang): Make this pure virtual when all subclasses implement it. @@ -1323,8 +1330,8 @@ class PeerConnectionObserver { // Plan users should prefer OnTrack, OnAddTrack is only called as backwards // compatibility (and is called in the exact same situations as OnTrack). virtual void OnAddTrack( - rtc::scoped_refptr receiver, - const std::vector>& streams) {} + scoped_refptr /* receiver */, + const std::vector>& /* streams */) {} // This is called when signaling indicates a transceiver will be receiving // media from the remote endpoint. This is fired during a call to @@ -1336,7 +1343,7 @@ class PeerConnectionObserver { // RTCSessionDescription" algorithm: // https://w3c.github.io/webrtc-pc/#set-description virtual void OnTrack( - rtc::scoped_refptr transceiver) {} + scoped_refptr /* transceiver */) {} // Called when signaling indicates that media will no longer be received on a // track. @@ -1347,7 +1354,7 @@ class PeerConnectionObserver { // https://w3c.github.io/webrtc-pc/#process-remote-track-removal // TODO(hbos,deadbeef): Make pure virtual when all subclasses implement it. virtual void OnRemoveTrack( - rtc::scoped_refptr receiver) {} + scoped_refptr /* receiver */) {} // Called when an interesting usage is detected by WebRTC. // An appropriate action is to add information about the context of the @@ -1355,7 +1362,7 @@ class PeerConnectionObserver { // log function. // The heuristics for defining what constitutes "interesting" are // implementation-defined. - virtual void OnInterestingUsage(int usage_pattern) {} + virtual void OnInterestingUsage(int /* usage_pattern */) {} }; // PeerConnectionDependencies holds all of PeerConnections dependencies. @@ -1381,20 +1388,19 @@ struct RTC_EXPORT PeerConnectionDependencies final { // TODO(bugs.webrtc.org/7447): remove port allocator once downstream is // updated. The recommended way to inject networking components is to pass a // PacketSocketFactory when creating the PeerConnectionFactory. - std::unique_ptr allocator; + std::unique_ptr allocator; // Factory for creating resolvers that look up hostnames in DNS std::unique_ptr async_dns_resolver_factory; - // Deprecated - use async_dns_resolver_factory - // Deprecation is in abeyance until Chromium is updated. - // TODO(crbug.com/1475925): Deprecate once Chromium is updated - // [[deprecated("Use async_dns_resolver_factory")]] - std::unique_ptr async_resolver_factory; std::unique_ptr ice_transport_factory; - std::unique_ptr cert_generator; - std::unique_ptr tls_cert_verifier; + std::unique_ptr cert_generator; + std::unique_ptr tls_cert_verifier; std::unique_ptr video_bitrate_allocator_factory; + // Optional network controller factory to use. + // Overrides that set in PeerConnectionFactoryDependencies. + std::unique_ptr network_controller_factory; + // Optional field trials to use. // Overrides those from PeerConnectionFactoryDependencies. std::unique_ptr trials; @@ -1420,16 +1426,14 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final { ~PeerConnectionFactoryDependencies(); // Optional dependencies - rtc::Thread* network_thread = nullptr; - rtc::Thread* worker_thread = nullptr; - rtc::Thread* signaling_thread = nullptr; - rtc::SocketFactory* socket_factory = nullptr; + Thread* network_thread = nullptr; + Thread* worker_thread = nullptr; + Thread* signaling_thread = nullptr; + SocketFactory* socket_factory = nullptr; // The `packet_socket_factory` will only be used if CreatePeerConnection is // called without a `port_allocator`. - std::unique_ptr packet_socket_factory; + std::unique_ptr packet_socket_factory; std::unique_ptr task_queue_factory; - std::unique_ptr media_engine; - std::unique_ptr call_factory; std::unique_ptr event_log_factory; std::unique_ptr fec_controller_factory; std::unique_ptr @@ -1438,16 +1442,41 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final { // The `network_manager` will only be used if CreatePeerConnection is called // without a `port_allocator`, causing the default allocator and network // manager to be used. - std::unique_ptr network_manager; + std::unique_ptr network_manager; // The `network_monitor_factory` will only be used if CreatePeerConnection is // called without a `port_allocator`, and the above `network_manager' is null. - std::unique_ptr network_monitor_factory; + std::unique_ptr network_monitor_factory; std::unique_ptr neteq_factory; std::unique_ptr sctp_factory; std::unique_ptr trials; std::unique_ptr transport_controller_send_factory; - std::unique_ptr metronome; + // Metronome used for decoding, must be called on the worker thread. + std::unique_ptr decode_metronome; + // Metronome used for encoding, must be called on the worker thread. + // TODO(b/304158952): Consider merging into a single metronome for all codec + // usage. + std::unique_ptr encode_metronome; + + // Media specific dependencies. Unused when `media_factory == nullptr`. + scoped_refptr adm; + scoped_refptr audio_encoder_factory; + scoped_refptr audio_decoder_factory; + scoped_refptr audio_mixer; + // TODO: bugs.webrtc.org/369904700 - Delete `audio_processing` in favor + // of `audio_processing_builder`. + [[deprecated]] scoped_refptr audio_processing; + std::unique_ptr audio_processing_builder; + std::unique_ptr audio_frame_processor; + std::unique_ptr video_encoder_factory; + std::unique_ptr video_decoder_factory; + + // The `media_factory` members allows webrtc to be optionally built without + // media support (i.e., if only being used for data channels). + // By default media is disabled. To enable media call + // `EnableMedia(PeerConnectionFactoryDependencies&)`. Definition of the + // `MediaFactory` interface is a webrtc implementation detail. + std::unique_ptr media_factory; }; // PeerConnectionFactoryInterface is the factory interface used for creating @@ -1464,7 +1493,7 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final { // CreatePeerConnectionFactory method which accepts threads as input, and use // the CreatePeerConnection version that takes a PortAllocator as an argument. class RTC_EXPORT PeerConnectionFactoryInterface - : public rtc::RefCountInterface { + : public webrtc::RefCountInterface { public: class Options { public: @@ -1485,15 +1514,15 @@ class RTC_EXPORT PeerConnectionFactoryInterface // Sets the network types to ignore. For instance, calling this with // ADAPTER_TYPE_ETHERNET | ADAPTER_TYPE_LOOPBACK will ignore Ethernet and // loopback interfaces. - int network_ignore_mask = rtc::kDefaultNetworkIgnoreMask; + int network_ignore_mask = kDefaultNetworkIgnoreMask; // Sets the maximum supported protocol version. The highest version // supported by both ends will be used for the connection, i.e. if one // party supports DTLS 1.0 and the other DTLS 1.2, DTLS 1.0 will be used. - rtc::SSLProtocolVersion ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12; + SSLProtocolVersion ssl_max_version = SSL_PROTOCOL_DTLS_12; // Sets crypto related options, e.g. enabled cipher suites. - CryptoOptions crypto_options = CryptoOptions::NoGcm(); + CryptoOptions crypto_options = {}; }; // Set the options to be used for subsequently created PeerConnections. @@ -1501,70 +1530,46 @@ class RTC_EXPORT PeerConnectionFactoryInterface // The preferred way to create a new peer connection. Simply provide the // configuration and a PeerConnectionDependencies structure. - // TODO(benwright): Make pure virtual once downstream mock PC factory classes - // are updated. - virtual RTCErrorOr> + virtual RTCErrorOr> CreatePeerConnectionOrError( const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies dependencies); - // Deprecated creator - does not return an error code on error. - // TODO(bugs.webrtc.org:12238): Deprecate and remove. - ABSL_DEPRECATED("Use CreatePeerConnectionOrError") - virtual rtc::scoped_refptr CreatePeerConnection( - const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies dependencies); - - // Deprecated; `allocator` and `cert_generator` may be null, in which case - // default implementations will be used. - // - // `observer` must not be null. - // - // Note that this method does not take ownership of `observer`; it's the - // responsibility of the caller to delete it. It can be safely deleted after - // Close has been called on the returned PeerConnection, which ensures no - // more observer callbacks will be invoked. - ABSL_DEPRECATED("Use CreatePeerConnectionOrError") - virtual rtc::scoped_refptr CreatePeerConnection( - const PeerConnectionInterface::RTCConfiguration& configuration, - std::unique_ptr allocator, - std::unique_ptr cert_generator, - PeerConnectionObserver* observer); + PeerConnectionDependencies dependencies) = 0; // Returns the capabilities of an RTP sender of type `kind`. - // If for some reason you pass in MEDIA_TYPE_DATA, returns an empty structure. - // TODO(orphis): Make pure virtual when all subclasses implement it. + // If for some reason you pass in webrtc::MediaType::DATA, returns an empty + // structure. virtual RtpCapabilities GetRtpSenderCapabilities( - cricket::MediaType kind) const; + webrtc::MediaType kind) const = 0; // Returns the capabilities of an RTP receiver of type `kind`. - // If for some reason you pass in MEDIA_TYPE_DATA, returns an empty structure. - // TODO(orphis): Make pure virtual when all subclasses implement it. + // If for some reason you pass in webrtc::MediaType::DATA, returns an empty + // structure. virtual RtpCapabilities GetRtpReceiverCapabilities( - cricket::MediaType kind) const; + webrtc::MediaType kind) const = 0; - virtual rtc::scoped_refptr CreateLocalMediaStream( + virtual scoped_refptr CreateLocalMediaStream( const std::string& stream_id) = 0; // Creates an AudioSourceInterface. // `options` decides audio processing settings. - virtual rtc::scoped_refptr CreateAudioSource( - const cricket::AudioOptions& options) = 0; + virtual scoped_refptr CreateAudioSource( + const AudioOptions& options) = 0; // Creates a new local VideoTrack. The same `source` can be used in several // tracks. - virtual rtc::scoped_refptr CreateVideoTrack( - rtc::scoped_refptr source, + virtual scoped_refptr CreateVideoTrack( + scoped_refptr source, absl::string_view label) = 0; ABSL_DEPRECATED("Use version with scoped_refptr") - virtual rtc::scoped_refptr CreateVideoTrack( + virtual scoped_refptr CreateVideoTrack( const std::string& label, VideoTrackSourceInterface* source) { - return CreateVideoTrack( - rtc::scoped_refptr(source), label); + return CreateVideoTrack(scoped_refptr(source), + label); } // Creates an new AudioTrack. At the moment `source` can be null. - virtual rtc::scoped_refptr CreateAudioTrack( + virtual scoped_refptr CreateAudioTrack( const std::string& label, AudioSourceInterface* source) = 0; @@ -1577,7 +1582,7 @@ class RTC_EXPORT PeerConnectionFactoryInterface // StopAecDump function is called. // TODO(webrtc:6463): Delete default implementation when downstream mocks // classes are updated. - virtual bool StartAecDump(FILE* file, int64_t max_size_bytes) { + virtual bool StartAecDump(FILE* /* file */, int64_t /* max_size_bytes */) { return false; } @@ -1606,8 +1611,8 @@ class RTC_EXPORT PeerConnectionFactoryInterface // If `network_thread` or `worker_thread` are null, the PeerConnectionFactory // will create the necessary thread internally. If `signaling_thread` is null, // the PeerConnectionFactory will use the thread on which this method is called -// as the signaling thread, wrapping it in an rtc::Thread object if needed. -RTC_EXPORT rtc::scoped_refptr +// as the signaling thread, wrapping it in an webrtc::Thread object if needed. +RTC_EXPORT scoped_refptr CreateModularPeerConnectionFactory( PeerConnectionFactoryDependencies dependencies); diff --git a/api/priority.h b/api/priority.h index 4953e453a3..2735c2c282 100644 --- a/api/priority.h +++ b/api/priority.h @@ -11,6 +11,11 @@ #ifndef API_PRIORITY_H_ #define API_PRIORITY_H_ +#include + +#include "rtc_base/checks.h" +#include "rtc_base/strong_alias.h" + namespace webrtc { // GENERATED_JAVA_ENUM_PACKAGE: org.webrtc @@ -21,6 +26,31 @@ enum class Priority { kHigh, }; +class PriorityValue + : public webrtc::StrongAlias { + public: + explicit PriorityValue(Priority priority) { + switch (priority) { + case Priority::kVeryLow: + value_ = 128; + break; + case Priority::kLow: + value_ = 256; + break; + case Priority::kMedium: + value_ = 512; + break; + case Priority::kHigh: + value_ = 1024; + break; + default: + RTC_CHECK_NOTREACHED(); + } + } + + explicit PriorityValue(uint16_t priority) : StrongAlias(priority) {} +}; + } // namespace webrtc #endif // API_PRIORITY_H_ diff --git a/api/ref_count.h b/api/ref_count.h new file mode 100644 index 0000000000..b3fb7630c8 --- /dev/null +++ b/api/ref_count.h @@ -0,0 +1,67 @@ +/* + * Copyright 2011 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_REF_COUNT_H_ +#define API_REF_COUNT_H_ + +namespace webrtc { + +// Refcounted objects should implement the following informal interface: +// +// void AddRef() const ; +// RefCountReleaseStatus Release() const; +// +// You may access members of a reference-counted object, including the AddRef() +// and Release() methods, only if you already own a reference to it, or if +// you're borrowing someone else's reference. (A newly created object is a +// special case: the reference count is zero on construction, and the code that +// creates the object should immediately call AddRef(), bringing the reference +// count from zero to one, e.g., by constructing an webrtc::scoped_refptr). +// +// AddRef() creates a new reference to the object. +// +// Release() releases a reference to the object; the caller now has one less +// reference than before the call. Returns kDroppedLastRef if the number of +// references dropped to zero because of this (in which case the object destroys +// itself). Otherwise, returns kOtherRefsRemained, to signal that at the precise +// time the caller's reference was dropped, other references still remained (but +// if other threads own references, this may of course have changed by the time +// Release() returns). +// +// The caller of Release() must treat it in the same way as a delete operation: +// Regardless of the return value from Release(), the caller mustn't access the +// object. The object might still be alive, due to references held by other +// users of the object, but the object can go away at any time, e.g., as the +// result of another thread calling Release(). +// +// Calling AddRef() and Release() manually is discouraged. It's recommended to +// use webrtc::scoped_refptr to manage all pointers to reference counted +// objects. Note that webrtc::scoped_refptr depends on compile-time duck-typing; +// formally implementing the below RefCountInterface is not required. + +enum class RefCountReleaseStatus { kDroppedLastRef, kOtherRefsRemained }; + +// Interfaces where refcounting is part of the public api should +// inherit this abstract interface. The implementation of these +// methods is usually provided by the RefCountedObject template class, +// applied as a leaf in the inheritance tree. +class RefCountInterface { + public: + virtual void AddRef() const = 0; + virtual RefCountReleaseStatus Release() const = 0; + + // Non-public destructor, because Release() has exclusive responsibility for + // destroying the object. + protected: + virtual ~RefCountInterface() {} +}; + +} // namespace webrtc + +#endif // API_REF_COUNT_H_ diff --git a/api/ref_counted_base.h b/api/ref_counted_base.h index f20228b740..8862ebdcad 100644 --- a/api/ref_counted_base.h +++ b/api/ref_counted_base.h @@ -12,9 +12,10 @@ #include +#include "api/ref_count.h" #include "rtc_base/ref_counter.h" -namespace rtc { +namespace webrtc { class RefCountedBase { public: @@ -48,11 +49,11 @@ class RefCountedBase { // vtable. // // To use: -// struct MyInt : public rtc::RefCountedNonVirtual { +// struct MyInt : public webrtc::RefCountedNonVirtual { // int foo_ = 0; // }; // -// rtc::scoped_refptr my_int(new MyInt()); +// webrtc::scoped_refptr my_int(new MyInt()); // // sizeof(MyInt) on a 32 bit system would then be 8, int + refcount and no // vtable generated. @@ -72,7 +73,7 @@ class RefCountedNonVirtual { // so the virtual attribute(s) can be removed. // 2) The virtual methods are a part of the design of the class. In this // case you can consider using `RefCountedBase` instead or alternatively - // use `rtc::RefCountedObject`. + // use `webrtc::RefCountedObject`. static_assert(!std::is_polymorphic::value, "T has virtual methods. RefCountedBase is a better fit."); const auto status = ref_count_.DecRef(); @@ -93,6 +94,16 @@ class RefCountedNonVirtual { mutable webrtc::webrtc_impl::RefCounter ref_count_{0}; }; +} // namespace webrtc + +// Backwards compatibe aliases. +// TODO: https://issues.webrtc.org/42225969 - deprecate and remove. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using RefCountedBase = webrtc::RefCountedBase; +template +using RefCountedNonVirtual = webrtc::RefCountedNonVirtual; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // API_REF_COUNTED_BASE_H_ diff --git a/api/rtc_error.cc b/api/rtc_error.cc index 0aa4304386..eb4f90118b 100644 --- a/api/rtc_error.cc +++ b/api/rtc_error.cc @@ -11,6 +11,7 @@ #include "api/rtc_error.h" #include +#include #include "absl/strings/string_view.h" diff --git a/api/rtc_error.h b/api/rtc_error.h index 7adf30eacf..2ed33c41c8 100644 --- a/api/rtc_error.h +++ b/api/rtc_error.h @@ -11,14 +11,15 @@ #ifndef API_RTC_ERROR_H_ #define API_RTC_ERROR_H_ -#ifdef WEBRTC_UNIT_TEST -#include -#endif // WEBRTC_UNIT_TEST +#include + +#include #include +#include #include // For std::move. +#include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/system/rtc_export.h" @@ -96,6 +97,24 @@ enum class RTCErrorDetailType { HARDWARE_ENCODER_ERROR, }; +// Outputs the error as a friendly string. Update this method when adding a new +// error type. +// +// Only intended to be used for logging/diagnostics. The returned char* points +// to literal strings that live for the whole duration of the program. +RTC_EXPORT absl::string_view ToString(RTCErrorType error); +RTC_EXPORT absl::string_view ToString(RTCErrorDetailType error); + +template +void AbslStringify(Sink& sink, RTCErrorType error) { + sink.Append(ToString(error)); +} + +template +void AbslStringify(Sink& sink, RTCErrorDetailType error_detail) { + sink.Append(ToString(error_detail)); +} + // Roughly corresponds to RTCError in the web api. Holds an error type, a // message, and possibly additional information specific to that error. // @@ -138,7 +157,7 @@ class RTC_EXPORT RTCError { RTCErrorDetailType error_detail() const { return error_detail_; } void set_error_detail(RTCErrorDetailType detail) { error_detail_ = detail; } - absl::optional sctp_cause_code() const { return sctp_cause_code_; } + std::optional sctp_cause_code() const { return sctp_cause_code_; } void set_sctp_cause_code(uint16_t cause_code) { sctp_cause_code_ = cause_code; } @@ -147,35 +166,23 @@ class RTC_EXPORT RTCError { // error occurred. bool ok() const { return type_ == RTCErrorType::NONE; } + template + friend void AbslStringify(Sink& sink, const RTCError& error) { + sink.Append(ToString(error.type_)); + if (!error.message_.empty()) { + sink.Append(" with message: \""); + sink.Append(error.message_); + sink.Append("\""); + } + } + private: RTCErrorType type_ = RTCErrorType::NONE; std::string message_; RTCErrorDetailType error_detail_ = RTCErrorDetailType::NONE; - absl::optional sctp_cause_code_; + std::optional sctp_cause_code_; }; -// Outputs the error as a friendly string. Update this method when adding a new -// error type. -// -// Only intended to be used for logging/diagnostics. The returned char* points -// to literal string that lives for the whole duration of the program. -RTC_EXPORT absl::string_view ToString(RTCErrorType error); -RTC_EXPORT absl::string_view ToString(RTCErrorDetailType error); - -#ifdef WEBRTC_UNIT_TEST -inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) - std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) - RTCErrorType error) { - return stream << ToString(error); -} - -inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) - std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) - RTCErrorDetailType error) { - return stream << ToString(error); -} -#endif // WEBRTC_UNIT_TEST - // Helper macro that can be used by implementations to create an error with a // message and log it. `message` should be a string literal or movable // std::string. @@ -322,9 +329,22 @@ class RTCErrorOr { return std::move(*value_); } + template + friend void AbslStringify(Sink& sink, const RTCErrorOr& error_or) { + if (error_or.ok()) { + sink.Append("OK"); + if constexpr (std::is_convertible_v) { + sink.Append(" with value: "); + sink.Append(absl::StrCat(error_or.value())); + } + } else { + sink.Append(absl::StrCat(error_or.error())); + } + } + private: RTCError error_; - absl::optional value_; + std::optional value_; }; } // namespace webrtc diff --git a/api/rtc_error_unittest.cc b/api/rtc_error_unittest.cc index 29dd002b14..860f3ead05 100644 --- a/api/rtc_error_unittest.cc +++ b/api/rtc_error_unittest.cc @@ -10,8 +10,12 @@ #include "api/rtc_error.h" +#include #include +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" +#include "rtc_base/checks.h" #include "test/gtest.h" namespace webrtc { @@ -144,6 +148,11 @@ TEST(RTCErrorTest, SetMessage) { EXPECT_STREQ(e.message(), "string"); } +TEST(RTCErrorTest, Stringify) { + RTCError e(RTCErrorType::INVALID_PARAMETER, "foo"); + EXPECT_EQ(absl::StrCat(e), "INVALID_PARAMETER with message: \"foo\""); +} + // Test that the default constructor creates an "INTERNAL_ERROR". TEST(RTCErrorOrTest, DefaultConstructor) { RTCErrorOr e; @@ -209,6 +218,26 @@ TEST(RTCErrorOrTest, MoveValue) { EXPECT_EQ(value.value, 88); } +TEST(RTCErrorOrTest, StringifyWithUnprintableValue) { + RTCErrorOr e(MoveOnlyInt(1337)); + EXPECT_EQ(absl::StrCat(e), "OK"); +} + +TEST(RTCErrorOrTest, StringifyWithStringValue) { + RTCErrorOr e("foo"); + EXPECT_EQ(absl::StrCat(e), "OK with value: foo"); +} + +TEST(RTCErrorOrTest, StringifyWithPrintableValue) { + RTCErrorOr e(1337); + EXPECT_EQ(absl::StrCat(e), "OK with value: 1337"); +} + +TEST(RTCErrorOrTest, StringifyWithError) { + RTCErrorOr e({RTCErrorType::SYNTAX_ERROR, "message"}); + EXPECT_EQ(absl::StrCat(e), "SYNTAX_ERROR with message: \"message\""); +} + // Death tests. // Disabled on Android because death tests misbehave on Android, see // base/test/gtest_util.h. diff --git a/api/rtc_event_log/BUILD.gn b/api/rtc_event_log/BUILD.gn index 158dc06a7b..fcf5e7e87a 100644 --- a/api/rtc_event_log/BUILD.gn +++ b/api/rtc_event_log/BUILD.gn @@ -15,14 +15,21 @@ rtc_library("rtc_event_log") { "rtc_event.h", "rtc_event_log.cc", "rtc_event_log.h", - "rtc_event_log_factory_interface.h", ] deps = [ "..:libjingle_logging_api", - "../../rtc_base:checks", "../../rtc_base:timeutils", - "../task_queue", + ] +} + +rtc_source_set("rtc_event_log_factory_interface") { + visibility = [ "*" ] + sources = [ "rtc_event_log_factory_interface.h" ] + deps = [ + ":rtc_event_log", + "../environment", + "//third_party/abseil-cpp/absl/base:nullability", ] } @@ -35,10 +42,12 @@ rtc_library("rtc_event_log_factory") { deps = [ ":rtc_event_log", - "../../rtc_base:checks", + ":rtc_event_log_factory_interface", + "..:field_trials_view", "../../rtc_base/system:rtc_export", - "../../system_wrappers:field_trial", + "../environment", "../task_queue", + "//third_party/abseil-cpp/absl/base:nullability", ] if (rtc_enable_protobuf) { diff --git a/api/rtc_event_log/rtc_event.cc b/api/rtc_event_log/rtc_event.cc index 631188b915..8a98fc8183 100644 --- a/api/rtc_event_log/rtc_event.cc +++ b/api/rtc_event_log/rtc_event.cc @@ -14,6 +14,6 @@ namespace webrtc { -RtcEvent::RtcEvent() : timestamp_us_(rtc::TimeMillis() * 1000) {} +RtcEvent::RtcEvent() : timestamp_us_(TimeMillis() * 1000) {} } // namespace webrtc diff --git a/api/rtc_event_log/rtc_event_log.cc b/api/rtc_event_log/rtc_event_log.cc index 56189c0ff7..9437a23408 100644 --- a/api/rtc_event_log/rtc_event_log.cc +++ b/api/rtc_event_log/rtc_event_log.cc @@ -10,6 +10,11 @@ #include "api/rtc_event_log/rtc_event_log.h" +#include +#include + +#include "api/rtc_event_log_output.h" + namespace webrtc { bool RtcEventLogNull::StartLogging( diff --git a/api/rtc_event_log/rtc_event_log.h b/api/rtc_event_log/rtc_event_log.h index 7b42cdc028..55dee7d36e 100644 --- a/api/rtc_event_log/rtc_event_log.h +++ b/api/rtc_event_log/rtc_event_log.h @@ -18,7 +18,6 @@ #include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log_output.h" -#include "api/task_queue/task_queue_factory.h" namespace webrtc { @@ -61,7 +60,7 @@ class RtcEventLogNull final : public RtcEventLog { bool StartLogging(std::unique_ptr output, int64_t output_period_ms) override; void StopLogging() override {} - void Log(std::unique_ptr event) override {} + void Log(std::unique_ptr /* event */) override {} }; } // namespace webrtc diff --git a/api/rtc_event_log/rtc_event_log_factory.cc b/api/rtc_event_log/rtc_event_log_factory.cc index a3cb68cf54..6a8ac33866 100644 --- a/api/rtc_event_log/rtc_event_log_factory.cc +++ b/api/rtc_event_log/rtc_event_log_factory.cc @@ -11,10 +11,11 @@ #include "api/rtc_event_log/rtc_event_log_factory.h" #include -#include -#include "rtc_base/checks.h" -#include "system_wrappers/include/field_trial.h" +#include "absl/base/nullability.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/rtc_event_log/rtc_event_log.h" #ifdef WEBRTC_ENABLE_RTC_EVENT_LOG #include "logging/rtc_event_log/rtc_event_log_impl.h" @@ -22,27 +23,16 @@ namespace webrtc { -RtcEventLogFactory::RtcEventLogFactory(TaskQueueFactory* task_queue_factory) - : task_queue_factory_(task_queue_factory) { - RTC_DCHECK(task_queue_factory_); -} - -std::unique_ptr RtcEventLogFactory::Create( - RtcEventLog::EncodingType encoding_type) const { -#ifdef WEBRTC_ENABLE_RTC_EVENT_LOG - if (field_trial::IsEnabled("WebRTC-RtcEventLogKillSwitch")) { +absl_nonnull std::unique_ptr RtcEventLogFactory::Create( + const Environment& env) const { +#ifndef WEBRTC_ENABLE_RTC_EVENT_LOG + return std::make_unique(); +#else + if (env.field_trials().IsEnabled("WebRTC-RtcEventLogKillSwitch")) { return std::make_unique(); } - return std::make_unique( - RtcEventLogImpl::CreateEncoder(encoding_type), task_queue_factory_); -#else - return std::make_unique(); + return std::make_unique(env); #endif } -std::unique_ptr RtcEventLogFactory::CreateRtcEventLog( - RtcEventLog::EncodingType encoding_type) { - return Create(encoding_type); -} - } // namespace webrtc diff --git a/api/rtc_event_log/rtc_event_log_factory.h b/api/rtc_event_log/rtc_event_log_factory.h index fd1db3c728..c00821bccd 100644 --- a/api/rtc_event_log/rtc_event_log_factory.h +++ b/api/rtc_event_log/rtc_event_log_factory.h @@ -13,6 +13,8 @@ #include +#include "absl/base/nullability.h" +#include "api/environment/environment.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/rtc_event_log/rtc_event_log_factory_interface.h" #include "api/task_queue/task_queue_factory.h" @@ -22,16 +24,15 @@ namespace webrtc { class RTC_EXPORT RtcEventLogFactory : public RtcEventLogFactoryInterface { public: - explicit RtcEventLogFactory(TaskQueueFactory* task_queue_factory); - ~RtcEventLogFactory() override {} + RtcEventLogFactory() = default; - std::unique_ptr Create( - RtcEventLog::EncodingType encoding_type) const override; - std::unique_ptr CreateRtcEventLog( - RtcEventLog::EncodingType encoding_type) override; + [[deprecated("Use default constructor")]] // + explicit RtcEventLogFactory(TaskQueueFactory* /* task_queue_factory */) {} - private: - TaskQueueFactory* const task_queue_factory_; + ~RtcEventLogFactory() override = default; + + absl_nonnull std::unique_ptr Create( + const Environment& env) const override; }; } // namespace webrtc diff --git a/api/rtc_event_log/rtc_event_log_factory_interface.h b/api/rtc_event_log/rtc_event_log_factory_interface.h index a6f4dee92f..e9cd1e5626 100644 --- a/api/rtc_event_log/rtc_event_log_factory_interface.h +++ b/api/rtc_event_log/rtc_event_log_factory_interface.h @@ -13,6 +13,8 @@ #include +#include "absl/base/nullability.h" +#include "api/environment/environment.h" #include "api/rtc_event_log/rtc_event_log.h" namespace webrtc { @@ -24,10 +26,8 @@ class RtcEventLogFactoryInterface { public: virtual ~RtcEventLogFactoryInterface() = default; - virtual std::unique_ptr Create( - RtcEventLog::EncodingType encoding_type) const = 0; - [[deprecated]] virtual std::unique_ptr CreateRtcEventLog( - RtcEventLog::EncodingType encoding_type) = 0; + virtual absl_nonnull std::unique_ptr Create( + const Environment& env) const = 0; }; } // namespace webrtc diff --git a/api/rtc_event_log_output.h b/api/rtc_event_log_output.h index f1f84a5f3a..37062d39d6 100644 --- a/api/rtc_event_log_output.h +++ b/api/rtc_event_log_output.h @@ -11,8 +11,6 @@ #ifndef API_RTC_EVENT_LOG_OUTPUT_H_ #define API_RTC_EVENT_LOG_OUTPUT_H_ -#include - #include "absl/strings/string_view.h" namespace webrtc { diff --git a/api/rtc_event_log_output_file.cc b/api/rtc_event_log_output_file.cc index e1d4c7c711..b31e39a3e7 100644 --- a/api/rtc_event_log_output_file.cc +++ b/api/rtc_event_log_output_file.cc @@ -10,12 +10,17 @@ #include "api/rtc_event_log_output_file.h" +#include +#include #include +#include #include +#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event_log.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/system/file_wrapper.h" namespace webrtc { diff --git a/api/rtc_event_log_output_file.h b/api/rtc_event_log_output_file.h index c9ae0a8ede..1f94b0d20a 100644 --- a/api/rtc_event_log_output_file.h +++ b/api/rtc_event_log_output_file.h @@ -16,6 +16,7 @@ #include +#include "absl/strings/string_view.h" #include "api/rtc_event_log_output.h" #include "rtc_base/system/file_wrapper.h" diff --git a/api/rtc_event_log_output_file_unittest.cc b/api/rtc_event_log_output_file_unittest.cc index 0aff57fbbc..b1441c7cf7 100644 --- a/api/rtc_event_log_output_file_unittest.cc +++ b/api/rtc_event_log_output_file_unittest.cc @@ -10,7 +10,11 @@ #include "api/rtc_event_log_output_file.h" +#include + +#include #include +#include #include #include #include @@ -34,7 +38,8 @@ class RtcEventLogOutputFileTest : public ::testing::Test { protected: std::string GetOutputFilePath() const { auto test_info = ::testing::UnitTest::GetInstance()->current_test_info(); - return test::OutputPath() + test_info->test_case_name() + test_info->name(); + return test::OutputPathWithRandomDirectory() + test_info->test_case_name() + + test_info->name(); } std::string GetOutputFileContents() const { diff --git a/api/rtp_headers.cc b/api/rtp_headers.cc index 0573e54684..c272ccae06 100644 --- a/api/rtp_headers.cc +++ b/api/rtp_headers.cc @@ -10,8 +10,20 @@ #include "api/rtp_headers.h" +#include "api/video/video_content_type.h" +#include "api/video/video_rotation.h" +#include "rtc_base/checks.h" + namespace webrtc { +AudioLevel::AudioLevel() : voice_activity_(false), audio_level_(0) {} + +AudioLevel::AudioLevel(bool voice_activity, int audio_level) + : voice_activity_(voice_activity), audio_level_(audio_level) { + RTC_CHECK_GE(audio_level, 0); + RTC_CHECK_LE(audio_level, 127); +} + RTPHeaderExtension::RTPHeaderExtension() : hasTransmissionTimeOffset(false), transmissionTimeOffset(0), @@ -19,9 +31,6 @@ RTPHeaderExtension::RTPHeaderExtension() absoluteSendTime(0), hasTransportSequenceNumber(false), transportSequenceNumber(0), - hasAudioLevel(false), - voiceActivity(false), - audioLevel(0), hasVideoRotation(false), videoRotation(kVideoRotation_0), hasVideoContentType(false), diff --git a/api/rtp_headers.h b/api/rtp_headers.h index 5d4d4190d5..6ba3292fca 100644 --- a/api/rtp_headers.h +++ b/api/rtp_headers.h @@ -14,15 +14,16 @@ #include #include +#include #include -#include "absl/types/optional.h" -#include "api/array_view.h" #include "api/units/timestamp.h" #include "api/video/color_space.h" #include "api/video/video_content_type.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" +#include "rtc_base/checks.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -74,7 +75,30 @@ struct AbsoluteCaptureTime { // system’s NTP clock: // // Capture NTP Clock = Sender NTP Clock + Capture Clock Offset - absl::optional estimated_capture_clock_offset; + std::optional estimated_capture_clock_offset; +}; + +// The audio level extension is used to indicate the voice activity and the +// audio level of the payload in the RTP stream. See: +// https://tools.ietf.org/html/rfc6464#section-3. +class AudioLevel { + public: + AudioLevel(); + AudioLevel(bool voice_activity, int audio_level); + AudioLevel(const AudioLevel& other) = default; + AudioLevel& operator=(const AudioLevel& other) = default; + + // Flag indicating whether the encoder believes the audio packet contains + // voice activity. + bool voice_activity() const { return voice_activity_; } + + // Audio level in -dBov. Values range from 0 to 127, representing 0 to -127 + // dBov. 127 represents digital silence. + int level() const { return audio_level_; } + + private: + bool voice_activity_; + int audio_level_; }; inline bool operator==(const AbsoluteCaptureTime& lhs, @@ -107,16 +131,18 @@ struct RTPHeaderExtension { int32_t transmissionTimeOffset; bool hasAbsoluteSendTime; uint32_t absoluteSendTime; - absl::optional absolute_capture_time; + std::optional absolute_capture_time; bool hasTransportSequenceNumber; uint16_t transportSequenceNumber; - absl::optional feedback_request; + std::optional feedback_request; // Audio Level includes both level in dBov and voiced/unvoiced bit. See: // https://tools.ietf.org/html/rfc6464#section-3 - bool hasAudioLevel; - bool voiceActivity; - uint8_t audioLevel; + std::optional audio_level() const { return audio_level_; } + + void set_audio_level(std::optional audio_level) { + audio_level_ = audio_level; + } // For Coordination of Video Orientation. See // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ @@ -124,7 +150,7 @@ struct RTPHeaderExtension { bool hasVideoRotation; VideoRotation videoRotation; - // TODO(ilnik): Refactor this and one above to be absl::optional() and remove + // TODO(ilnik): Refactor this and one above to be std::optional() and remove // a corresponding bool flag. bool hasVideoContentType; VideoContentType videoContentType; @@ -143,7 +169,10 @@ struct RTPHeaderExtension { // https://tools.ietf.org/html/rfc8843 std::string mid; - absl::optional color_space; + std::optional color_space; + + private: + std::optional audio_level_; }; enum { kRtpCsrcSize = 15 }; // RFC 3550 page 13 diff --git a/api/rtp_packet_info.cc b/api/rtp_packet_info.cc index cba274ec38..90cd27515c 100644 --- a/api/rtp_packet_info.cc +++ b/api/rtp_packet_info.cc @@ -10,8 +10,15 @@ #include "api/rtp_packet_info.h" +#include + #include +#include #include +#include + +#include "api/rtp_headers.h" +#include "api/units/timestamp.h" namespace webrtc { @@ -37,8 +44,8 @@ RtpPacketInfo::RtpPacketInfo(const RTPHeader& rtp_header, csrcs_.assign(&rtp_header.arrOfCSRCs[0], &rtp_header.arrOfCSRCs[csrcs_count]); - if (extension.hasAudioLevel) { - audio_level_ = extension.audioLevel; + if (extension.audio_level()) { + audio_level_ = extension.audio_level()->level(); } absolute_capture_time_ = extension.absolute_capture_time; diff --git a/api/rtp_packet_info.h b/api/rtp_packet_info.h index 8df12a36cf..8e8236d8f0 100644 --- a/api/rtp_packet_info.h +++ b/api/rtp_packet_info.h @@ -12,10 +12,10 @@ #define API_RTP_PACKET_INFO_H_ #include +#include #include #include -#include "absl/types/optional.h" #include "api/rtp_headers.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -56,26 +56,26 @@ class RTC_EXPORT RtpPacketInfo { Timestamp receive_time() const { return receive_time_; } void set_receive_time(Timestamp value) { receive_time_ = value; } - absl::optional audio_level() const { return audio_level_; } - RtpPacketInfo& set_audio_level(absl::optional value) { + std::optional audio_level() const { return audio_level_; } + RtpPacketInfo& set_audio_level(std::optional value) { audio_level_ = value; return *this; } - const absl::optional& absolute_capture_time() const { + const std::optional& absolute_capture_time() const { return absolute_capture_time_; } RtpPacketInfo& set_absolute_capture_time( - const absl::optional& value) { + const std::optional& value) { absolute_capture_time_ = value; return *this; } - const absl::optional& local_capture_clock_offset() const { + const std::optional& local_capture_clock_offset() const { return local_capture_clock_offset_; } RtpPacketInfo& set_local_capture_clock_offset( - absl::optional value) { + std::optional value) { local_capture_clock_offset_ = value; return *this; } @@ -92,18 +92,18 @@ class RTC_EXPORT RtpPacketInfo { // Fields from the Audio Level header extension: // https://tools.ietf.org/html/rfc6464#section-3 - absl::optional audio_level_; + std::optional audio_level_; // Fields from the Absolute Capture Time header extension: // http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time - absl::optional absolute_capture_time_; + std::optional absolute_capture_time_; // Clock offset between the local clock and the capturer's clock. // Do not confuse with `AbsoluteCaptureTime::estimated_capture_clock_offset` // which instead represents the clock offset between a remote sender and the // capturer. The following holds: // Capture's NTP Clock = Local NTP Clock + Local-Capture Clock Offset - absl::optional local_capture_clock_offset_; + std::optional local_capture_clock_offset_; }; bool operator==(const RtpPacketInfo& lhs, const RtpPacketInfo& rhs); diff --git a/api/rtp_packet_info_unittest.cc b/api/rtp_packet_info_unittest.cc index d35edf75db..9aea11d24c 100644 --- a/api/rtp_packet_info_unittest.cc +++ b/api/rtp_packet_info_unittest.cc @@ -8,9 +8,15 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "api/rtp_packet_infos.h" +#include "api/rtp_packet_info.h" + +#include +#include +#include + +#include "api/rtp_headers.h" #include "api/units/time_delta.h" -#include "test/gmock.h" +#include "api/units/timestamp.h" #include "test/gtest.h" namespace webrtc { @@ -128,7 +134,7 @@ TEST(RtpPacketInfoTest, ReceiveTimeMs) { } TEST(RtpPacketInfoTest, AudioLevel) { - constexpr absl::optional kValue = 31; + constexpr std::optional kValue = 31; RtpPacketInfo lhs; RtpPacketInfo rhs; @@ -157,7 +163,7 @@ TEST(RtpPacketInfoTest, AudioLevel) { } TEST(RtpPacketInfoTest, AbsoluteCaptureTime) { - constexpr absl::optional kValue = AbsoluteCaptureTime{ + constexpr std::optional kValue = AbsoluteCaptureTime{ .absolute_capture_timestamp = 12, .estimated_capture_clock_offset = 34}; RtpPacketInfo lhs; @@ -207,7 +213,7 @@ TEST(RtpPacketInfoTest, LocalCaptureClockOffset) { EXPECT_FALSE(lhs != rhs); rhs = RtpPacketInfo(); - EXPECT_EQ(rhs.local_capture_clock_offset(), absl::nullopt); + EXPECT_EQ(rhs.local_capture_clock_offset(), std::nullopt); rhs = RtpPacketInfo(/*ssrc=*/{}, /*csrcs=*/{}, /*rtp_timestamp=*/{}, /*receive_time=*/Timestamp::Zero()); diff --git a/api/rtp_packet_infos.h b/api/rtp_packet_infos.h index 7445729fbb..e5e106c649 100644 --- a/api/rtp_packet_infos.h +++ b/api/rtp_packet_infos.h @@ -11,7 +11,6 @@ #ifndef API_RTP_PACKET_INFOS_H_ #define API_RTP_PACKET_INFOS_H_ -#include #include #include @@ -80,24 +79,24 @@ class RTC_EXPORT RtpPacketInfos { size_type size() const { return entries().size(); } private: - class Data final : public rtc::RefCountedNonVirtual { + class Data final : public RefCountedNonVirtual { public: - static rtc::scoped_refptr Create(const vector_type& entries) { + static scoped_refptr Create(const vector_type& entries) { // Performance optimization for the empty case. if (entries.empty()) { return nullptr; } - return rtc::make_ref_counted(entries); + return make_ref_counted(entries); } - static rtc::scoped_refptr Create(vector_type&& entries) { + static scoped_refptr Create(vector_type&& entries) { // Performance optimization for the empty case. if (entries.empty()) { return nullptr; } - return rtc::make_ref_counted(std::move(entries)); + return make_ref_counted(std::move(entries)); } const vector_type& entries() const { return entries_; } @@ -123,7 +122,7 @@ class RTC_EXPORT RtpPacketInfos { } } - rtc::scoped_refptr data_; + scoped_refptr data_; }; } // namespace webrtc diff --git a/api/rtp_packet_infos_unittest.cc b/api/rtp_packet_infos_unittest.cc index a90cfa03e2..544bb93b38 100644 --- a/api/rtp_packet_infos_unittest.cc +++ b/api/rtp_packet_infos_unittest.cc @@ -10,6 +10,11 @@ #include "api/rtp_packet_infos.h" +#include + +#include "api/rtp_headers.h" +#include "api/rtp_packet_info.h" +#include "api/units/timestamp.h" #include "test/gmock.h" #include "test/gtest.h" diff --git a/modules/rtp_rtcp/include/rtp_packet_sender.h b/api/rtp_packet_sender.h similarity index 75% rename from modules/rtp_rtcp/include/rtp_packet_sender.h rename to api/rtp_packet_sender.h index ebc65298a5..92220a1fbf 100644 --- a/modules/rtp_rtcp/include/rtp_packet_sender.h +++ b/api/rtp_packet_sender.h @@ -8,17 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_RTP_RTCP_INCLUDE_RTP_PACKET_SENDER_H_ -#define MODULES_RTP_RTCP_INCLUDE_RTP_PACKET_SENDER_H_ +#ifndef API_RTP_PACKET_SENDER_H_ +#define API_RTP_PACKET_SENDER_H_ +#include #include #include -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" - namespace webrtc { +class RtpPacketToSend; + class RtpPacketSender { public: virtual ~RtpPacketSender() = default; @@ -32,9 +32,9 @@ class RtpPacketSender { // Clear any pending packets with the given SSRC from the queue. // TODO(crbug.com/1395081): Make pure virtual when downstream code has been // updated. - virtual void RemovePacketsForSsrc(uint32_t ssrc) {} + virtual void RemovePacketsForSsrc(uint32_t /* ssrc */) {} }; } // namespace webrtc -#endif // MODULES_RTP_RTCP_INCLUDE_RTP_PACKET_SENDER_H_ +#endif // API_RTP_PACKET_SENDER_H_ diff --git a/api/rtp_parameters.cc b/api/rtp_parameters.cc index 54132bcdbb..bb2f500fd4 100644 --- a/api/rtp_parameters.cc +++ b/api/rtp_parameters.cc @@ -10,11 +10,16 @@ #include "api/rtp_parameters.h" #include +#include #include #include -#include +#include +#include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/rtp_transceiver_direction.h" +#include "media/base/media_constants.h" +#include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" namespace webrtc { @@ -47,6 +52,13 @@ RtcpFeedback::~RtcpFeedback() = default; RtpCodec::RtpCodec() = default; RtpCodec::RtpCodec(const RtpCodec&) = default; RtpCodec::~RtpCodec() = default; +bool RtpCodec::IsResiliencyCodec() const { + return name == kRtxCodecName || name == kRedCodecName || + name == kUlpfecCodecName || name == kFlexfecCodecName; +} +bool RtpCodec::IsMediaCodec() const { + return !IsResiliencyCodec() && name != kComfortNoiseCodecName; +} RtpCodecCapability::RtpCodecCapability() = default; RtpCodecCapability::~RtpCodecCapability() = default; @@ -63,6 +75,15 @@ RtpHeaderExtensionCapability::RtpHeaderExtensionCapability( int preferred_id, RtpTransceiverDirection direction) : uri(uri), preferred_id(preferred_id), direction(direction) {} +RtpHeaderExtensionCapability::RtpHeaderExtensionCapability( + absl::string_view uri, + int preferred_id, + bool preferred_encrypt, + RtpTransceiverDirection direction) + : uri(uri), + preferred_id(preferred_id), + preferred_encrypt(preferred_encrypt), + direction(direction) {} RtpHeaderExtensionCapability::~RtpHeaderExtensionCapability() = default; RtpExtension::RtpExtension() = default; @@ -106,7 +127,7 @@ RtpParameters::~RtpParameters() = default; std::string RtpExtension::ToString() const { char buf[256]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); sb << "{uri: " << uri; sb << ", id: " << id; if (encrypt) { @@ -136,6 +157,7 @@ constexpr char RtpExtension::kRidUri[]; constexpr char RtpExtension::kRepairedRidUri[]; constexpr char RtpExtension::kVideoFrameTrackingIdUri[]; constexpr char RtpExtension::kCsrcAudioLevelsUri[]; +constexpr char RtpExtension::kCorruptionDetectionUri[]; constexpr int RtpExtension::kMinId; constexpr int RtpExtension::kMaxId; @@ -171,7 +193,8 @@ bool RtpExtension::IsSupportedForVideo(absl::string_view uri) { uri == webrtc::RtpExtension::kRidUri || uri == webrtc::RtpExtension::kRepairedRidUri || uri == webrtc::RtpExtension::kVideoLayersAllocationUri || - uri == webrtc::RtpExtension::kVideoFrameTrackingIdUri; + uri == webrtc::RtpExtension::kVideoFrameTrackingIdUri || + uri == webrtc::RtpExtension::kCorruptionDetectionUri; } bool RtpExtension::IsEncryptionSupported(absl::string_view uri) { diff --git a/api/rtp_parameters.h b/api/rtp_parameters.h index 09473a6ce9..57d77f2fe4 100644 --- a/api/rtp_parameters.h +++ b/api/rtp_parameters.h @@ -14,12 +14,13 @@ #include #include +#include #include #include #include "absl/container/inlined_vector.h" +#include "absl/strings/str_format.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/media_types.h" #include "api/priority.h" #include "api/rtp_transceiver_direction.h" @@ -29,6 +30,8 @@ namespace webrtc { +using CodecParameterMap = std::map; + // These structures are intended to mirror those defined by: // http://draft.ortc.org/#rtcrtpdictionaries* // Contains everything specified as of 2017 Jan 24. @@ -107,7 +110,7 @@ struct RTC_EXPORT RtcpFeedback { // 1. It's an enum instead of a string. // 2. Generic NACK feedback is represented by a GENERIC_NACK message type, // rather than an unset "parameter" value. - absl::optional message_type; + std::optional message_type; // Constructors for convenience. RtcpFeedback(); @@ -134,17 +137,17 @@ struct RTC_EXPORT RtpCodec { std::string name; // The media type of this codec. Equivalent to MIME top-level type. - cricket::MediaType kind = cricket::MEDIA_TYPE_AUDIO; + webrtc::MediaType kind = webrtc::MediaType::AUDIO; // If unset, the implementation default is used. - absl::optional clock_rate; + std::optional clock_rate; // The number of audio channels used. Unset for video codecs. If unset for // audio, the implementation default is used. // TODO(deadbeef): The "implementation default" part isn't fully implemented. // Only defaults to 1, even though some codecs (such as opus) should really // default to 2. - absl::optional num_channels; + std::optional num_channels; // Feedback mechanisms to be used for this codec. // TODO(deadbeef): Not implemented with PeerConnection senders/receivers. @@ -165,6 +168,8 @@ struct RTC_EXPORT RtpCodec { parameters == o.parameters; } bool operator!=(const RtpCodec& o) const { return !(*this == o); } + bool IsResiliencyCodec() const; + bool IsMediaCodec() const; }; // RtpCodecCapability is to RtpCodecParameters as RtpCapabilities is to @@ -176,7 +181,7 @@ struct RTC_EXPORT RtpCodecCapability : public RtpCodec { // Default payload type for this codec. Mainly needed for codecs that have // statically assigned payload types. - absl::optional preferred_payload_type; + std::optional preferred_payload_type; // List of scalability modes supported by the video codec. absl::InlinedVector scalability_modes; @@ -187,6 +192,16 @@ struct RTC_EXPORT RtpCodecCapability : public RtpCodec { scalability_modes == o.scalability_modes; } bool operator!=(const RtpCodecCapability& o) const { return !(*this == o); } + + template + friend void AbslStringify(Sink& sink, const RtpCodecCapability& cap) { + if (cap.kind == webrtc::MediaType::AUDIO) { + absl::Format(&sink, "[audio/%s/%d/%d]", cap.name, + cap.clock_rate.value_or(0), cap.num_channels.value_or(1)); + } else { + absl::Format(&sink, "[video/%s]", cap.name); + } + } }; // Used in RtpCapabilities and RtpTransceiverInterface's header extensions query @@ -198,17 +213,17 @@ struct RTC_EXPORT RtpCodecCapability : public RtpCodec { // RtpHeaderExtensionParameters. // // Note that ORTC includes a "kind" field, but we omit this because it's -// redundant; if you call "RtpReceiver::GetCapabilities(MEDIA_TYPE_AUDIO)", -// you know you're getting audio capabilities. +// redundant; if you call +// "RtpReceiver::GetCapabilities(webrtc::MediaType::AUDIO)", you know you're +// getting audio capabilities. struct RTC_EXPORT RtpHeaderExtensionCapability { // URI of this extension, as defined in RFC8285. std::string uri; // Preferred value of ID that goes in the packet. - absl::optional preferred_id; + std::optional preferred_id; // If true, it's preferred that the value in the header is encrypted. - // TODO(deadbeef): Not implemented. bool preferred_encrypt = false; // The direction of the extension. The kStopped value is only used with @@ -223,6 +238,10 @@ struct RTC_EXPORT RtpHeaderExtensionCapability { RtpHeaderExtensionCapability(absl::string_view uri, int preferred_id, RtpTransceiverDirection direction); + RtpHeaderExtensionCapability(absl::string_view uri, + int preferred_id, + bool preferred_encrypt, + RtpTransceiverDirection direction); ~RtpHeaderExtensionCapability(); bool operator==(const RtpHeaderExtensionCapability& o) const { @@ -373,6 +392,10 @@ struct RTC_EXPORT RtpExtension { static constexpr char kCsrcAudioLevelsUri[] = "urn:ietf:params:rtp-hdrext:csrc-audio-level"; + // Header extension for automatic corruption detection. + static constexpr char kCorruptionDetectionUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/corruption-detection"; + // Inclusive min and max IDs for two-byte header extensions and one-byte // header extensions, per RFC8285 Section 4.2-4.3. static constexpr int kMinId = 1; @@ -384,12 +407,21 @@ struct RTC_EXPORT RtpExtension { std::string uri; int id = 0; bool encrypt = false; + + template + friend void AbslStringify(Sink& sink, const RtpExtension& extension) { + if (extension.encrypt) { + absl::Format(&sink, "[%d %s (encrypted)]", extension.id, extension.uri); + } else { + absl::Format(&sink, "[%d %s]", extension.id, extension.uri); + } + } }; struct RTC_EXPORT RtpFecParameters { // If unset, a value is chosen by the implementation. // Works just like RtpEncodingParameters::ssrc. - absl::optional ssrc; + std::optional ssrc; FecMechanism mechanism = FecMechanism::RED; @@ -409,7 +441,7 @@ struct RTC_EXPORT RtpFecParameters { struct RTC_EXPORT RtpRtxParameters { // If unset, a value is chosen by the implementation. // Works just like RtpEncodingParameters::ssrc. - absl::optional ssrc; + std::optional ssrc; // Constructors for convenience. RtpRtxParameters(); @@ -432,7 +464,7 @@ struct RTC_EXPORT RtpEncodingParameters { // may change due to an SSRC conflict, in which case the conflict is handled // internally without any event. Another way of looking at this is that an // unset SSRC acts as a "wildcard" SSRC. - absl::optional ssrc; + std::optional ssrc; // The relative bitrate priority of this encoding. Currently this is // implemented for the entire rtp sender by using the value of the first @@ -467,42 +499,35 @@ struct RTC_EXPORT RtpEncodingParameters { // bandwidth for the entire bandwidth estimator (audio and video). This is // just always how "b=AS" was handled, but it's not correct and should be // fixed. - absl::optional max_bitrate_bps; + std::optional max_bitrate_bps; // Specifies the minimum bitrate in bps for video. - absl::optional min_bitrate_bps; + std::optional min_bitrate_bps; // Specifies the maximum framerate in fps for video. - absl::optional max_framerate; + std::optional max_framerate; // Specifies the number of temporal layers for video (if the feature is // supported by the codec implementation). // Screencast support is experimental. - absl::optional num_temporal_layers; + std::optional num_temporal_layers; // For video, scale the resolution down by this factor. - absl::optional scale_resolution_down_by; + std::optional scale_resolution_down_by; // https://w3c.github.io/webrtc-svc/#rtcrtpencodingparameters - absl::optional scalability_mode; + std::optional scalability_mode; - // Requested encode resolution. - // - // This field provides an alternative to `scale_resolution_down_by` - // that is not dependent on the video source. - // - // When setting requested_resolution it is not necessary to adapt the - // video source using OnOutputFormatRequest, since the VideoStreamEncoder - // will apply downscaling if necessary. requested_resolution will also be - // propagated to the video source, this allows downscaling earlier in the - // pipeline which can be beneficial if the source is consumed by multiple - // encoders, but is not strictly necessary. + // This is an alternative API to `scale_resolution_down_by` but expressed in + // absolute terms (max width and max height) as opposed to relative terms (a + // scaling factor that is relative to the input frame size). // - // The `requested_resolution` is subject to resource adaptation. + // If both `scale_resolution_down_by` and `scale_resolution_down_to` are + // specified, the "scale by" value is ignored. // - // It is an error to set both `requested_resolution` and - // `scale_resolution_down_by`. - absl::optional requested_resolution; + // See spec: + // https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-scaleresolutiondownto + std::optional scale_resolution_down_to; // For an RtpSender, set to true to cause this encoding to be encoded and // sent, and false for it not to be encoded and sent. This allows control @@ -522,7 +547,7 @@ struct RTC_EXPORT RtpEncodingParameters { bool adaptive_ptime = false; // Allow changing the used codec for this encoding. - absl::optional codec; + std::optional codec; bool operator==(const RtpEncodingParameters& o) const { return ssrc == o.ssrc && bitrate_priority == o.bitrate_priority && @@ -534,7 +559,8 @@ struct RTC_EXPORT RtpEncodingParameters { scale_resolution_down_by == o.scale_resolution_down_by && active == o.active && rid == o.rid && adaptive_ptime == o.adaptive_ptime && - requested_resolution == o.requested_resolution && codec == o.codec; + scale_resolution_down_to == o.scale_resolution_down_to && + codec == o.codec; } bool operator!=(const RtpEncodingParameters& o) const { return !(*this == o); @@ -589,7 +615,7 @@ struct RtcpParameters final { // The SSRC to be used in the "SSRC of packet sender" field. If not set, one // will be chosen by the implementation. // TODO(deadbeef): Not implemented. - absl::optional ssrc; + std::optional ssrc; // The Canonical Name (CNAME) used by RTCP (e.g. in SDES messages). // @@ -645,7 +671,7 @@ struct RTC_EXPORT RtpParameters { // When bandwidth is constrained and the RtpSender needs to choose between // degrading resolution or degrading framerate, degradationPreference // indicates which is preferred. Only for video tracks. - absl::optional degradation_preference; + std::optional degradation_preference; bool operator==(const RtpParameters& o) const { return mid == o.mid && codecs == o.codecs && diff --git a/api/rtp_parameters_unittest.cc b/api/rtp_parameters_unittest.cc index 234c3c9b6d..0dc3cb5bd6 100644 --- a/api/rtp_parameters_unittest.cc +++ b/api/rtp_parameters_unittest.cc @@ -10,6 +10,8 @@ #include "api/rtp_parameters.h" +#include + #include "test/gtest.h" namespace webrtc { diff --git a/api/rtp_receiver_interface.cc b/api/rtp_receiver_interface.cc index bc9aef5aef..7d300f745b 100644 --- a/api/rtp_receiver_interface.cc +++ b/api/rtp_receiver_interface.cc @@ -10,14 +10,24 @@ #include "api/rtp_receiver_interface.h" +#include +#include + +#include "api/crypto/frame_decryptor_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/frame_transformer_interface.h" +#include "api/media_stream_interface.h" +#include "api/scoped_refptr.h" +#include "api/transport/rtp/rtp_source.h" + namespace webrtc { std::vector RtpReceiverInterface::stream_ids() const { return {}; } -std::vector> -RtpReceiverInterface::streams() const { +std::vector> RtpReceiverInterface::streams() + const { return {}; } @@ -26,19 +36,19 @@ std::vector RtpReceiverInterface::GetSources() const { } void RtpReceiverInterface::SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) {} + scoped_refptr /* frame_decryptor */) {} -rtc::scoped_refptr -RtpReceiverInterface::GetFrameDecryptor() const { +scoped_refptr RtpReceiverInterface::GetFrameDecryptor() + const { return nullptr; } -rtc::scoped_refptr -RtpReceiverInterface::dtls_transport() const { +scoped_refptr RtpReceiverInterface::dtls_transport() + const { return nullptr; } -void RtpReceiverInterface::SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) {} +void RtpReceiverInterface::SetFrameTransformer( + scoped_refptr /* frame_transformer */) {} } // namespace webrtc diff --git a/api/rtp_receiver_interface.h b/api/rtp_receiver_interface.h index e4ec9b5986..29df678e18 100644 --- a/api/rtp_receiver_interface.h +++ b/api/rtp_receiver_interface.h @@ -14,7 +14,9 @@ #ifndef API_RTP_RECEIVER_INTERFACE_H_ #define API_RTP_RECEIVER_INTERFACE_H_ +#include #include +#include #include #include "api/crypto/frame_decryptor_interface.h" @@ -22,10 +24,10 @@ #include "api/frame_transformer_interface.h" #include "api/media_stream_interface.h" #include "api/media_types.h" +#include "api/ref_count.h" #include "api/rtp_parameters.h" #include "api/scoped_refptr.h" #include "api/transport/rtp/rtp_source.h" -#include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -38,21 +40,22 @@ class RtpReceiverObserverInterface { // In the future, it's likely that an RtpReceiver will only call // OnFirstPacketReceived when a packet is received specifically for its // SSRC/mid. - virtual void OnFirstPacketReceived(cricket::MediaType media_type) = 0; + virtual void OnFirstPacketReceived(webrtc::MediaType media_type) = 0; protected: virtual ~RtpReceiverObserverInterface() {} }; -class RTC_EXPORT RtpReceiverInterface : public rtc::RefCountInterface { +class RTC_EXPORT RtpReceiverInterface : public webrtc::RefCountInterface, + public FrameTransformerHost { public: - virtual rtc::scoped_refptr track() const = 0; + virtual scoped_refptr track() const = 0; // The dtlsTransport attribute exposes the DTLS transport on which the // media is received. It may be null. // https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-transport // TODO(https://bugs.webrtc.org/907849) remove default implementation - virtual rtc::scoped_refptr dtls_transport() const; + virtual scoped_refptr dtls_transport() const; // The list of streams that `track` is associated with. This is the same as // the [[AssociatedRemoteMediaStreams]] internal slot in the spec. @@ -62,10 +65,10 @@ class RTC_EXPORT RtpReceiverInterface : public rtc::RefCountInterface { // stream_ids() as soon as downstream projects are no longer dependent on // stream objects. virtual std::vector stream_ids() const; - virtual std::vector> streams() const; + virtual std::vector> streams() const; // Audio or video receiver? - virtual cricket::MediaType media_type() const = 0; + virtual webrtc::MediaType media_type() const = 0; // Not to be confused with "mid", this is a field we can temporarily use // to uniquely identify a receiver until we implement Unified Plan SDP. @@ -77,7 +80,9 @@ class RTC_EXPORT RtpReceiverInterface : public rtc::RefCountInterface { virtual RtpParameters GetParameters() const = 0; // TODO(dinosaurav): Delete SetParameters entirely after rolling to Chromium. // Currently, doesn't support changing any parameters. - virtual bool SetParameters(const RtpParameters& parameters) { return false; } + virtual bool SetParameters(const RtpParameters& /* parameters */) { + return false; + } // Does not take ownership of observer. // Must call SetObserver(nullptr) before the observer is destroyed. @@ -88,7 +93,7 @@ class RTC_EXPORT RtpReceiverInterface : public rtc::RefCountInterface { // positive value including 0.0 measured in seconds. `nullopt` means default // value must be used. virtual void SetJitterBufferMinimumDelay( - absl::optional delay_seconds) = 0; + std::optional delay_seconds) = 0; // TODO(zhihuang): Remove the default implementation once the subclasses // implement this. Currently, the only relevant subclass is the @@ -101,18 +106,27 @@ class RTC_EXPORT RtpReceiverInterface : public rtc::RefCountInterface { // enabled or not. // TODO(bugs.webrtc.org/12772): Remove. virtual void SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor); + scoped_refptr frame_decryptor); // Returns a pointer to the frame decryptor set previously by the // user. This can be used to update the state of the object. // TODO(bugs.webrtc.org/12772): Remove. - virtual rtc::scoped_refptr GetFrameDecryptor() const; + virtual scoped_refptr GetFrameDecryptor() const; // Sets a frame transformer between the depacketizer and the decoder to enable // client code to transform received frames according to their own processing // logic. + // TODO: bugs.webrtc.org/15929 - add [[deprecated("Use SetFrameTransformer")]] + // when usage in Chrome is removed virtual void SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer); + scoped_refptr frame_transformer) { + SetFrameTransformer(std::move(frame_transformer)); + } + + // Default implementation of SetFrameTransformer. + // TODO: bugs.webrtc.org/15929 - Make pure virtual. + void SetFrameTransformer( + scoped_refptr frame_transformer) override; protected: ~RtpReceiverInterface() override = default; diff --git a/api/rtp_sender_interface.cc b/api/rtp_sender_interface.cc index f1ca5c2203..1ec869617a 100644 --- a/api/rtp_sender_interface.cc +++ b/api/rtp_sender_interface.cc @@ -10,12 +10,14 @@ #include "api/rtp_sender_interface.h" +#include "api/rtp_parameters.h" #include "rtc_base/checks.h" namespace webrtc { -void RtpSenderInterface::SetParametersAsync(const RtpParameters& parameters, - SetParametersCallback callback) { +void RtpSenderInterface::SetParametersAsync( + const RtpParameters& /* parameters */, + SetParametersCallback /* callback */) { RTC_DCHECK_NOTREACHED() << "Default implementation called"; } diff --git a/api/rtp_sender_interface.h b/api/rtp_sender_interface.h index 41d35bc287..478e15d570 100644 --- a/api/rtp_sender_interface.h +++ b/api/rtp_sender_interface.h @@ -14,8 +14,10 @@ #ifndef API_RTP_SENDER_INTERFACE_H_ #define API_RTP_SENDER_INTERFACE_H_ +#include #include #include +#include #include #include "absl/functional/any_invocable.h" @@ -25,37 +27,48 @@ #include "api/frame_transformer_interface.h" #include "api/media_stream_interface.h" #include "api/media_types.h" +#include "api/ref_count.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" #include "api/scoped_refptr.h" #include "api/video_codecs/video_encoder_factory.h" -#include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { +class RtpSenderObserverInterface { + public: + // The observer is called when the first media packet is sent for the observed + // sender. It is called immediately if the first packet was already sent. + virtual void OnFirstPacketSent(webrtc::MediaType media_type) = 0; + + protected: + virtual ~RtpSenderObserverInterface() {} +}; + using SetParametersCallback = absl::AnyInvocable; -class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { +class RTC_EXPORT RtpSenderInterface : public webrtc::RefCountInterface, + public FrameTransformerHost { public: // Returns true if successful in setting the track. // Fails if an audio track is set on a video RtpSender, or vice-versa. virtual bool SetTrack(MediaStreamTrackInterface* track) = 0; - virtual rtc::scoped_refptr track() const = 0; + virtual scoped_refptr track() const = 0; // The dtlsTransport attribute exposes the DTLS transport on which the // media is sent. It may be null. // https://w3c.github.io/webrtc-pc/#dom-rtcrtpsender-transport - virtual rtc::scoped_refptr dtls_transport() const = 0; + virtual scoped_refptr dtls_transport() const = 0; // Returns primary SSRC used by this sender for sending media. // Returns 0 if not yet determined. - // TODO(deadbeef): Change to absl::optional. + // TODO(deadbeef): Change to std::optional. // TODO(deadbeef): Remove? With GetParameters this should be redundant. virtual uint32_t ssrc() const = 0; // Audio or video sender? - virtual cricket::MediaType media_type() const = 0; + virtual webrtc::MediaType media_type() const = 0; // Not to be confused with "mid", this is a field we can temporarily use // to uniquely identify a receiver until we implement Unified Plan SDP. @@ -85,23 +98,32 @@ class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { virtual void SetParametersAsync(const RtpParameters& parameters, SetParametersCallback callback); + // Sets an observer which gets a callback when the first media packet is sent + // for this sender. + // Does not take ownership of observer. + // Must call SetObserver(nullptr) before the observer is destroyed. + virtual void SetObserver(RtpSenderObserverInterface* /* observer */) {} + // Returns null for a video sender. - virtual rtc::scoped_refptr GetDtmfSender() const = 0; + virtual scoped_refptr GetDtmfSender() const = 0; // Sets a user defined frame encryptor that will encrypt the entire frame // before it is sent across the network. This will encrypt the entire frame // using the user provided encryption mechanism regardless of whether SRTP is // enabled or not. virtual void SetFrameEncryptor( - rtc::scoped_refptr frame_encryptor) = 0; + scoped_refptr frame_encryptor) = 0; // Returns a pointer to the frame encryptor set previously by the // user. This can be used to update the state of the object. - virtual rtc::scoped_refptr GetFrameEncryptor() - const = 0; + virtual scoped_refptr GetFrameEncryptor() const = 0; + // TODO: bugs.webrtc.org/15929 - add [[deprecated("Use SetFrameTransformer")]] + // when usage in Chrome is removed virtual void SetEncoderToPacketizerFrameTransformer( - rtc::scoped_refptr frame_transformer) = 0; + scoped_refptr frame_transformer) { + SetFrameTransformer(std::move(frame_transformer)); + } // Sets a user defined encoder selector. // Overrides selector that is (optionally) provided by VideoEncoderFactory. @@ -109,6 +131,11 @@ class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { std::unique_ptr encoder_selector) = 0; + // Default implementation of SetFrameTransformer. + // TODO: bugs.webrtc.org/15929 - remove when all implementations are good + void SetFrameTransformer(scoped_refptr + /* frame_transformer */) override {} + protected: ~RtpSenderInterface() override = default; }; diff --git a/api/rtp_transceiver_interface.cc b/api/rtp_transceiver_interface.cc index 7267b286be..1a9429b8d8 100644 --- a/api/rtp_transceiver_interface.cc +++ b/api/rtp_transceiver_interface.cc @@ -10,6 +10,10 @@ #include "api/rtp_transceiver_interface.h" +#include + +#include "api/rtc_error.h" +#include "api/rtp_transceiver_direction.h" #include "rtc_base/checks.h" namespace webrtc { @@ -20,9 +24,9 @@ RtpTransceiverInit::RtpTransceiverInit(const RtpTransceiverInit& rhs) = default; RtpTransceiverInit::~RtpTransceiverInit() = default; -absl::optional +std::optional RtpTransceiverInterface::fired_direction() const { - return absl::nullopt; + return std::nullopt; } bool RtpTransceiverInterface::stopping() const { @@ -52,7 +56,7 @@ void RtpTransceiverInterface::SetDirection( } RTCError RtpTransceiverInterface::SetDirectionWithError( - RtpTransceiverDirection new_direction) { + RtpTransceiverDirection /* new_direction */) { RTC_DCHECK_NOTREACHED() << "Default implementation called"; return RTCError::OK(); } diff --git a/api/rtp_transceiver_interface.h b/api/rtp_transceiver_interface.h index 7d0d1a18bf..e35c5b729f 100644 --- a/api/rtp_transceiver_interface.h +++ b/api/rtp_transceiver_interface.h @@ -11,19 +11,20 @@ #ifndef API_RTP_TRANSCEIVER_INTERFACE_H_ #define API_RTP_TRANSCEIVER_INTERFACE_H_ +#include #include #include #include "absl/base/attributes.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/media_types.h" +#include "api/ref_count.h" +#include "api/rtc_error.h" #include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_direction.h" #include "api/scoped_refptr.h" -#include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -58,29 +59,29 @@ struct RTC_EXPORT RtpTransceiverInit final { // // WebRTC specification for RTCRtpTransceiver, the JavaScript analog: // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver -class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { +class RTC_EXPORT RtpTransceiverInterface : public webrtc::RefCountInterface { public: // Media type of the transceiver. Any sender(s)/receiver(s) will have this // type as well. - virtual cricket::MediaType media_type() const = 0; + virtual webrtc::MediaType media_type() const = 0; // The mid attribute is the mid negotiated and present in the local and // remote descriptions. Before negotiation is complete, the mid value may be // null. After rollbacks, the value may change from a non-null value to null. // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-mid - virtual absl::optional mid() const = 0; + virtual std::optional mid() const = 0; // The sender attribute exposes the RtpSender corresponding to the RTP media // that may be sent with the transceiver's mid. The sender is always present, // regardless of the direction of media. // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-sender - virtual rtc::scoped_refptr sender() const = 0; + virtual scoped_refptr sender() const = 0; // The receiver attribute exposes the RtpReceiver corresponding to the RTP // media that may be received with the transceiver's mid. The receiver is // always present, regardless of the direction of media. // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-receiver - virtual rtc::scoped_refptr receiver() const = 0; + virtual scoped_refptr receiver() const = 0; // The stopped attribute indicates that the sender of this transceiver will no // longer send, and that the receiver will no longer receive. It is true if @@ -118,14 +119,14 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { // for this transceiver. If this transceiver has never been represented in an // offer/answer exchange, or if the transceiver is stopped, the value is null. // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-currentdirection - virtual absl::optional current_direction() const = 0; + virtual std::optional current_direction() const = 0; // An internal slot designating for which direction the relevant // PeerConnection events have been fired. This is to ensure that events like // OnAddTrack only get fired once even if the same session description is // applied again. // Exposed in the public interface for use by Chromium. - virtual absl::optional fired_direction() const; + virtual std::optional fired_direction() const; // Initiates a stop of the transceiver. // The stop is complete when stopped() returns true. @@ -145,7 +146,7 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { // by WebRTC for this transceiver. // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-setcodecpreferences virtual RTCError SetCodecPreferences( - rtc::ArrayView codecs) = 0; + ArrayView codecs) = 0; virtual std::vector codec_preferences() const = 0; // Returns the set of header extensions that was set @@ -165,7 +166,7 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { // so that it negotiates use of header extensions which are not kStopped. // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface virtual webrtc::RTCError SetHeaderExtensionsToNegotiate( - rtc::ArrayView header_extensions) = 0; + ArrayView header_extensions) = 0; protected: ~RtpTransceiverInterface() override = default; diff --git a/api/scoped_refptr.h b/api/scoped_refptr.h index e145509127..f533670444 100644 --- a/api/scoped_refptr.h +++ b/api/scoped_refptr.h @@ -22,13 +22,13 @@ // }; // // void some_function() { -// scoped_refptr foo = new MyFoo(); +// scoped_refptr foo = make_ref_counted(); // foo->Method(param); // // `foo` is released when this function returns // } // // void some_other_function() { -// scoped_refptr foo = new MyFoo(); +// scoped_refptr foo = make_ref_counted(); // ... // foo = nullptr; // explicitly releases `foo` // ... @@ -41,7 +41,7 @@ // references between the two objects, like so: // // { -// scoped_refptr a = new MyFoo(); +// scoped_refptr a = make_ref_counted(); // scoped_refptr b; // // b.swap(a); @@ -52,7 +52,7 @@ // object, simply use the assignment operator: // // { -// scoped_refptr a = new MyFoo(); +// scoped_refptr a = make_ref_counted(); // scoped_refptr b; // // b = a; @@ -63,20 +63,22 @@ #ifndef API_SCOPED_REFPTR_H_ #define API_SCOPED_REFPTR_H_ -#include +#include #include -namespace rtc { +#include "absl/base/nullability.h" + +namespace webrtc { template -class scoped_refptr { +class ABSL_NULLABILITY_COMPATIBLE scoped_refptr { public: - typedef T element_type; + using element_type = T; scoped_refptr() : ptr_(nullptr) {} scoped_refptr(std::nullptr_t) : ptr_(nullptr) {} // NOLINT(runtime/explicit) - explicit scoped_refptr(T* p) : ptr_(p) { + explicit scoped_refptr(T* absl_nullable p) : ptr_(p) { if (ptr_) ptr_->AddRef(); } @@ -119,7 +121,7 @@ class scoped_refptr { return retVal; } - scoped_refptr& operator=(T* p) { + scoped_refptr& operator=(T* absl_nullable p) { // AddRef first so that self assignment should work if (p) p->AddRef(); @@ -149,7 +151,7 @@ class scoped_refptr { return *this; } - void swap(T** pp) noexcept { + void swap(T** absl_nonnull pp) noexcept { T* p = ptr_; ptr_ = *pp; *pp = p; @@ -162,61 +164,67 @@ class scoped_refptr { }; template -bool operator==(const rtc::scoped_refptr& a, - const rtc::scoped_refptr& b) { +bool operator==(const scoped_refptr& a, const scoped_refptr& b) { return a.get() == b.get(); } template -bool operator!=(const rtc::scoped_refptr& a, - const rtc::scoped_refptr& b) { +bool operator!=(const scoped_refptr& a, const scoped_refptr& b) { return !(a == b); } template -bool operator==(const rtc::scoped_refptr& a, std::nullptr_t) { +bool operator==(const scoped_refptr& a, std::nullptr_t) { return a.get() == nullptr; } template -bool operator!=(const rtc::scoped_refptr& a, std::nullptr_t) { +bool operator!=(const scoped_refptr& a, std::nullptr_t) { return !(a == nullptr); } template -bool operator==(std::nullptr_t, const rtc::scoped_refptr& a) { +bool operator==(std::nullptr_t, const scoped_refptr& a) { return a.get() == nullptr; } template -bool operator!=(std::nullptr_t, const rtc::scoped_refptr& a) { +bool operator!=(std::nullptr_t, const scoped_refptr& a) { return !(a == nullptr); } // Comparison with raw pointer. template -bool operator==(const rtc::scoped_refptr& a, const U* b) { +bool operator==(const scoped_refptr& a, const U* b) { return a.get() == b; } template -bool operator!=(const rtc::scoped_refptr& a, const U* b) { +bool operator!=(const scoped_refptr& a, const U* b) { return !(a == b); } template -bool operator==(const T* a, const rtc::scoped_refptr& b) { +bool operator==(const T* a, const scoped_refptr& b) { return a == b.get(); } template -bool operator!=(const T* a, const rtc::scoped_refptr& b) { +bool operator!=(const T* a, const scoped_refptr& b) { return !(a == b); } // Ordered comparison, needed for use as a std::map key. template -bool operator<(const rtc::scoped_refptr& a, const rtc::scoped_refptr& b) { +bool operator<(const scoped_refptr& a, const scoped_refptr& b) { return a.get() < b.get(); } +} // namespace webrtc + +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +// Backwards compatible alias. +// TODO: bugs.webrtc.org/42225969 - Deprecate and remove. +using ::webrtc::scoped_refptr; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // API_SCOPED_REFPTR_H_ diff --git a/api/scoped_refptr_unittest.cc b/api/scoped_refptr_unittest.cc index 22b61209cd..50a72b7c81 100644 --- a/api/scoped_refptr_unittest.cc +++ b/api/scoped_refptr_unittest.cc @@ -10,12 +10,13 @@ #include "api/scoped_refptr.h" +#include #include #include #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { struct FunctionsCalled { @@ -108,4 +109,4 @@ TEST(ScopedRefptrTest, MovableDuringVectorReallocation) { } } // namespace -} // namespace rtc +} // namespace webrtc diff --git a/api/sctp_transport_interface.cc b/api/sctp_transport_interface.cc index 8a0a866e85..429b7b9e78 100644 --- a/api/sctp_transport_interface.cc +++ b/api/sctp_transport_interface.cc @@ -10,8 +10,12 @@ #include "api/sctp_transport_interface.h" +#include #include +#include "api/dtls_transport_interface.h" +#include "api/scoped_refptr.h" + namespace webrtc { SctpTransportInformation::SctpTransportInformation(SctpTransportState state) @@ -19,9 +23,9 @@ SctpTransportInformation::SctpTransportInformation(SctpTransportState state) SctpTransportInformation::SctpTransportInformation( SctpTransportState state, - rtc::scoped_refptr dtls_transport, - absl::optional max_message_size, - absl::optional max_channels) + scoped_refptr dtls_transport, + std::optional max_message_size, + std::optional max_channels) : state_(state), dtls_transport_(std::move(dtls_transport)), max_message_size_(max_message_size), diff --git a/api/sctp_transport_interface.h b/api/sctp_transport_interface.h index 7080889fcf..a9fd115937 100644 --- a/api/sctp_transport_interface.h +++ b/api/sctp_transport_interface.h @@ -11,11 +11,12 @@ #ifndef API_SCTP_TRANSPORT_INTERFACE_H_ #define API_SCTP_TRANSPORT_INTERFACE_H_ -#include "absl/types/optional.h" +#include + #include "api/dtls_transport_interface.h" -#include "api/rtc_error.h" +#include "api/ref_count.h" #include "api/scoped_refptr.h" -#include "rtc_base/ref_count.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -38,25 +39,24 @@ class RTC_EXPORT SctpTransportInformation { SctpTransportInformation() = default; SctpTransportInformation(const SctpTransportInformation&) = default; explicit SctpTransportInformation(SctpTransportState state); - SctpTransportInformation( - SctpTransportState state, - rtc::scoped_refptr dtls_transport, - absl::optional max_message_size, - absl::optional max_channels); + SctpTransportInformation(SctpTransportState state, + scoped_refptr dtls_transport, + std::optional max_message_size, + std::optional max_channels); ~SctpTransportInformation(); // The DTLS transport that supports this SCTP transport. - rtc::scoped_refptr dtls_transport() const { + scoped_refptr dtls_transport() const { return dtls_transport_; } SctpTransportState state() const { return state_; } - absl::optional MaxMessageSize() const { return max_message_size_; } - absl::optional MaxChannels() const { return max_channels_; } + std::optional MaxMessageSize() const { return max_message_size_; } + std::optional MaxChannels() const { return max_channels_; } private: - SctpTransportState state_; - rtc::scoped_refptr dtls_transport_; - absl::optional max_message_size_; - absl::optional max_channels_; + SctpTransportState state_ = SctpTransportState::kNew; + scoped_refptr dtls_transport_; + std::optional max_message_size_; + std::optional max_channels_; }; class SctpTransportObserverInterface { @@ -75,10 +75,10 @@ class SctpTransportObserverInterface { // accessed on that thread, except for functions explicitly marked otherwise. // References can be held by other threads, and destruction can therefore // be initiated by other threads. -class SctpTransportInterface : public rtc::RefCountInterface { +class SctpTransportInterface : public webrtc::RefCountInterface { public: // This function can be called from other threads. - virtual rtc::scoped_refptr dtls_transport() const = 0; + virtual scoped_refptr dtls_transport() const = 0; // Returns information on the state of the SctpTransport. // This function can be called from other threads. virtual SctpTransportInformation Information() const = 0; @@ -87,6 +87,26 @@ class SctpTransportInterface : public rtc::RefCountInterface { virtual void UnregisterObserver() = 0; }; +// The size of the SCTP association send buffer. 256kB, the usrsctp default. +constexpr int kSctpSendBufferSize = 256 * 1024; + +// SCTP options negotiated in the SDP. +struct SctpOptions { + // https://www.rfc-editor.org/rfc/rfc8841.html#name-sctp-port + // `local_port` and `remote_port` are passed along the wire and the + // listener and connector must be using the same port. They are not related + // to the ports at the IP level. If set to -1 we default to + // kSctpDefaultPort. + // TODO(bugs.webrtc.org/402429107): make these optional. + int local_port = -1; + int remote_port = -1; + + // https://www.rfc-editor.org/rfc/rfc8841.html#name-max-message-size + // `max_message_size` sets the maxium message size on the connection. + // It must be smaller than or equal to kSctpSendBufferSize. + int max_message_size = kSctpSendBufferSize; +}; + } // namespace webrtc #endif // API_SCTP_TRANSPORT_INTERFACE_H_ diff --git a/api/sequence_checker.h b/api/sequence_checker.h index 33e0f3c074..0d2d83806a 100644 --- a/api/sequence_checker.h +++ b/api/sequence_checker.h @@ -10,6 +10,7 @@ #ifndef API_SEQUENCE_CHECKER_H_ #define API_SEQUENCE_CHECKER_H_ +#include "api/task_queue/task_queue_base.h" #include "rtc_base/checks.h" #include "rtc_base/synchronization/sequence_checker_internal.h" #include "rtc_base/thread_annotations.h" @@ -46,8 +47,22 @@ class RTC_LOCKABLE SequenceChecker public: enum InitialState : bool { kDetached = false, kAttached = true }; + // TODO(tommi): We could maybe join these two ctors and have fewer factory + // functions. At the moment they're separate to minimize code changes when + // we added the second ctor as well as avoiding to have unnecessary code at + // the SequenceChecker which much only run for the SequenceCheckerImpl + // implementation. + // In theory we could have something like: + // + // SequenceChecker(InitialState initial_state = kAttached, + // TaskQueueBase* attached_queue = TaskQueueBase::Current()); + // + // But the problem with that is having the call to `Current()` exist for + // `SequenceCheckerDoNothing`. explicit SequenceChecker(InitialState initial_state = kAttached) : Impl(initial_state) {} + explicit SequenceChecker(TaskQueueBase* attached_queue) + : Impl(attached_queue) {} // Returns true if sequence checker is attached to the current sequence. bool IsCurrent() const { return Impl::IsCurrent(); } @@ -85,14 +100,14 @@ class RTC_LOCKABLE SequenceChecker // public: // class Encoder { // public: -// rtc::TaskQueueBase& Queue() { return encoder_queue_; } +// webrtc::TaskQueueBase& Queue() { return encoder_queue_; } // void Encode() { // RTC_DCHECK_RUN_ON(&encoder_queue_); // DoSomething(var_); // } // // private: -// rtc::TaskQueueBase& encoder_queue_; +// webrtc::TaskQueueBase& encoder_queue_; // Frame var_ RTC_GUARDED_BY(encoder_queue_); // }; // @@ -100,12 +115,12 @@ class RTC_LOCKABLE SequenceChecker // // Will fail at runtime when DCHECK is enabled: // // encoder_->Encode(); // // Will work: -// rtc::scoped_refptr encoder = encoder_; +// webrtc::scoped_refptr encoder = encoder_; // encoder_->Queue().PostTask([encoder] { encoder->Encode(); }); // } // // private: -// rtc::scoped_refptr encoder_; +// webrtc::scoped_refptr encoder_; // } // Document if a function expected to be called from same thread/task queue. @@ -114,13 +129,13 @@ class RTC_LOCKABLE SequenceChecker // Checks current code is running on the desired sequence. // -// First statement validates it is running on the sequence `x`. -// Second statement annotates for the thread safety analyzer the check was done. +// First statement annotates for the thread safety analyzer the check was done. +// Second statement validates it is running on the sequence `x`. // Such annotation has to be attached to a function, and that function has to be // called. Thus current implementation creates a noop lambda and calls it. #define RTC_DCHECK_RUN_ON(x) \ + []() RTC_ASSERT_EXCLUSIVE_LOCK(x) {}(); \ RTC_DCHECK((x)->IsCurrent()) \ - << webrtc::webrtc_sequence_checker_internal::ExpectationToString(x); \ - []() RTC_ASSERT_EXCLUSIVE_LOCK(x) {}() + << webrtc::webrtc_sequence_checker_internal::ExpectationToString(x); #endif // API_SEQUENCE_CHECKER_H_ diff --git a/api/sequence_checker_unittest.cc b/api/sequence_checker_unittest.cc index f117926d73..41eef3a7a7 100644 --- a/api/sequence_checker_unittest.cc +++ b/api/sequence_checker_unittest.cc @@ -10,14 +10,18 @@ #include "api/sequence_checker.h" +#include #include -#include +#include "absl/functional/any_invocable.h" #include "api/function_view.h" #include "api/units/time_delta.h" +#include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/platform_thread.h" +#include "rtc_base/synchronization/sequence_checker_internal.h" #include "rtc_base/task_queue_for_test.h" +#include "rtc_base/thread_annotations.h" #include "test/gmock.h" #include "test/gtest.h" @@ -43,9 +47,9 @@ class CompileTimeTestForGuardedBy { ::webrtc::SequenceChecker sequence_checker_; }; -void RunOnDifferentThread(rtc::FunctionView run) { - rtc::Event thread_has_run_event; - rtc::PlatformThread::SpawnJoinable( +void RunOnDifferentThread(FunctionView run) { + Event thread_has_run_event; + PlatformThread::SpawnJoinable( [&] { run(); thread_has_run_event.Set(); @@ -83,6 +87,13 @@ TEST(SequenceCheckerTest, DetachFromThreadAndUseOnTaskQueue) { queue.SendTask([&] { EXPECT_TRUE(sequence_checker.IsCurrent()); }); } +TEST(SequenceCheckerTest, InitializeForDifferentTaskQueue) { + TaskQueueForTest queue; + SequenceChecker sequence_checker(queue.Get()); + EXPECT_EQ(sequence_checker.IsCurrent(), !RTC_DCHECK_IS_ON); + queue.SendTask([&] { EXPECT_TRUE(sequence_checker.IsCurrent()); }); +} + TEST(SequenceCheckerTest, DetachFromTaskQueueAndUseOnThread) { TaskQueueForTest queue; queue.SendTask([] { @@ -137,13 +148,13 @@ TEST(SequenceCheckerTest, ExpectationToString) { SequenceChecker sequence_checker(SequenceChecker::kDetached); - rtc::Event blocker; + Event blocker; queue1.PostTask([&blocker, &sequence_checker]() { (void)sequence_checker.IsCurrent(); blocker.Set(); }); - blocker.Wait(rtc::Event::kForever); + blocker.Wait(Event::kForever); #if RTC_DCHECK_IS_ON @@ -166,13 +177,13 @@ TEST(SequenceCheckerTest, InitiallyDetached) { SequenceChecker sequence_checker(SequenceChecker::kDetached); - rtc::Event blocker; + Event blocker; queue1.PostTask([&blocker, &sequence_checker]() { EXPECT_TRUE(sequence_checker.IsCurrent()); blocker.Set(); }); - blocker.Wait(rtc::Event::kForever); + blocker.Wait(Event::kForever); #if RTC_DCHECK_IS_ON EXPECT_FALSE(sequence_checker.IsCurrent()); diff --git a/api/set_local_description_observer_interface.h b/api/set_local_description_observer_interface.h index 8e7b6258d3..40f85b944e 100644 --- a/api/set_local_description_observer_interface.h +++ b/api/set_local_description_observer_interface.h @@ -11,15 +11,15 @@ #ifndef API_SET_LOCAL_DESCRIPTION_OBSERVER_INTERFACE_H_ #define API_SET_LOCAL_DESCRIPTION_OBSERVER_INTERFACE_H_ +#include "api/ref_count.h" #include "api/rtc_error.h" -#include "rtc_base/ref_count.h" namespace webrtc { // OnSetLocalDescriptionComplete() invokes as soon as // PeerConnectionInterface::SetLocalDescription() operation completes, allowing // the observer to examine the effects of the operation without delay. -class SetLocalDescriptionObserverInterface : public rtc::RefCountInterface { +class SetLocalDescriptionObserverInterface : public webrtc::RefCountInterface { public: // On success, `error.ok()` is true. virtual void OnSetLocalDescriptionComplete(RTCError error) = 0; diff --git a/api/set_remote_description_observer_interface.h b/api/set_remote_description_observer_interface.h index d1c075309f..c1625410df 100644 --- a/api/set_remote_description_observer_interface.h +++ b/api/set_remote_description_observer_interface.h @@ -11,8 +11,8 @@ #ifndef API_SET_REMOTE_DESCRIPTION_OBSERVER_INTERFACE_H_ #define API_SET_REMOTE_DESCRIPTION_OBSERVER_INTERFACE_H_ +#include "api/ref_count.h" #include "api/rtc_error.h" -#include "rtc_base/ref_count.h" namespace webrtc { @@ -20,7 +20,7 @@ namespace webrtc { // callback is invoked such that the state of the peer connection can be // examined to accurately reflect the effects of the SetRemoteDescription // operation. -class SetRemoteDescriptionObserverInterface : public rtc::RefCountInterface { +class SetRemoteDescriptionObserverInterface : public webrtc::RefCountInterface { public: // On success, `error.ok()` is true. virtual void OnSetRemoteDescriptionComplete(RTCError error) = 0; diff --git a/api/stats/attribute.h b/api/stats/attribute.h new file mode 100644 index 0000000000..931a3d0952 --- /dev/null +++ b/api/stats/attribute.h @@ -0,0 +1,94 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_STATS_ATTRIBUTE_H_ +#define API_STATS_ATTRIBUTE_H_ + +#include +#include +#include +#include +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// A light-weight wrapper of an RTCStats attribute, i.e. an individual metric of +// type std::optional. +class RTC_EXPORT Attribute { + public: + // All supported attribute types. + typedef std::variant*, + const std::optional*, + const std::optional*, + const std::optional*, + const std::optional*, + const std::optional*, + const std::optional*, + const std::optional>*, + const std::optional>*, + const std::optional>*, + const std::optional>*, + const std::optional>*, + const std::optional>*, + const std::optional>*, + const std::optional>*, + const std::optional>*> + StatVariant; + + template + Attribute(const char* name, const std::optional* attribute) + : name_(name), attribute_(attribute) {} + + const char* name() const; + const StatVariant& as_variant() const; + + bool has_value() const; + template + bool holds_alternative() const { + return std::holds_alternative*>(attribute_); + } + template + const std::optional& as_optional() const { + RTC_CHECK(holds_alternative()); + return *std::get*>(attribute_); + } + template + const T& get() const { + RTC_CHECK(holds_alternative()); + RTC_CHECK(has_value()); + return std::get*>(attribute_)->value(); + } + + bool is_sequence() const; + bool is_string() const; + std::string ToString() const; + + bool operator==(const Attribute& other) const; + bool operator!=(const Attribute& other) const; + + private: + const char* name_; + StatVariant attribute_; +}; + +struct RTC_EXPORT AttributeInit { + AttributeInit(const char* name, const Attribute::StatVariant& variant); + + const char* name; + Attribute::StatVariant variant; +}; + +} // namespace webrtc + +#endif // API_STATS_ATTRIBUTE_H_ diff --git a/api/stats/rtc_stats.h b/api/stats/rtc_stats.h index 6cc39a309f..b8070877f4 100644 --- a/api/stats/rtc_stats.h +++ b/api/stats/rtc_stats.h @@ -14,22 +14,18 @@ #include #include -#include #include +#include #include -#include #include -#include "absl/types/optional.h" +#include "api/stats/attribute.h" #include "api/units/timestamp.h" #include "rtc_base/checks.h" #include "rtc_base/system/rtc_export.h" -#include "rtc_base/system/rtc_export_template.h" namespace webrtc { -class RTCStatsMemberInterface; - // Abstract base class for RTCStats-derived dictionaries, see // https://w3c.github.io/webrtc-stats/. // @@ -40,8 +36,8 @@ class RTCStatsMemberInterface; // Use the `WEBRTC_RTCSTATS_IMPL` macro when implementing subclasses, see macro // for details. // -// Derived classes list their dictionary members, RTCStatsMember, as public -// fields, allowing the following: +// Derived classes list their dictionary attributes, std::optional, as +// public fields, allowing the following: // // RTCFooStats foo("fooId", Timestamp::Micros(GetCurrentTime())); // foo.bar = 42; @@ -49,38 +45,51 @@ class RTCStatsMemberInterface; // foo.baz->push_back("hello world"); // uint32_t x = *foo.bar; // -// Pointers to all the members are available with `Members`, allowing iteration: +// Pointers to all the attributes are available with `Attributes()`, allowing +// iteration: // -// for (const RTCStatsMemberInterface* member : foo.Members()) { -// printf("%s = %s\n", member->name(), member->ValueToString().c_str()); +// for (const auto& attribute : foo.Attributes()) { +// printf("%s = %s\n", attribute.name(), attribute.ToString().c_str()); // } class RTC_EXPORT RTCStats { public: RTCStats(const std::string& id, Timestamp timestamp) : id_(id), timestamp_(timestamp) {} - - virtual ~RTCStats() {} + RTCStats(const RTCStats& other); + virtual ~RTCStats(); virtual std::unique_ptr copy() const = 0; const std::string& id() const { return id_; } // Time relative to the UNIX epoch (Jan 1, 1970, UTC), in microseconds. Timestamp timestamp() const { return timestamp_; } + void set_timestamp(Timestamp timestamp) { timestamp_ = timestamp; } // Returns the static member variable `kType` of the implementing class. virtual const char* type() const = 0; - // Returns a vector of pointers to all the `RTCStatsMemberInterface` members - // of this class. This allows for iteration of members. For a given class, - // `Members` always returns the same members in the same order. - std::vector Members() const; + // Returns all attributes of this stats object, i.e. a list of its individual + // metrics as viewed via the Attribute wrapper. + std::vector Attributes() const; + template + Attribute GetAttribute(const std::optional& stat) const { + for (const auto& attribute : Attributes()) { + if (!attribute.holds_alternative()) { + continue; + } + if (std::get*>(attribute.as_variant()) == &stat) { + return attribute; + } + } + RTC_CHECK_NOTREACHED(); + } // Checks if the two stats objects are of the same type and have the same - // member values. Timestamps are not compared. These operators are exposed for - // testing. + // attribute values. Timestamps are not compared. These operators are exposed + // for testing. bool operator==(const RTCStats& other) const; bool operator!=(const RTCStats& other) const; // Creates a JSON readable string representation of the stats - // object, listing all of its members (names and values). + // object, listing all of its attributes (names and values). std::string ToJson() const; // Downcasts the stats object to an `RTCStats` subclass `T`. DCHECKs that the @@ -92,14 +101,10 @@ class RTC_EXPORT RTCStats { } protected: - // Gets a vector of all members of this `RTCStats` object, including members - // derived from parent classes. `additional_capacity` is how many more members - // shall be reserved in the vector (so that subclasses can allocate a vector - // with room for both parent and child members without it having to resize). - virtual std::vector - MembersOfThisObjectAndAncestors(size_t additional_capacity) const; + virtual std::vector AttributesImpl( + size_t additional_capacity) const; - std::string const id_; + std::string id_; Timestamp timestamp_; }; @@ -109,9 +114,8 @@ class RTC_EXPORT RTCStats { // // These macros declare (in _DECL) and define (in _IMPL) the static `kType` and // overrides methods as required by subclasses of `RTCStats`: `copy`, `type` and -// `MembersOfThisObjectAndAncestors`. The |...| argument is a list of addresses -// to each member defined in the implementing class. The list must have at least -// one member. +// `AttributesImpl`. The |...| argument is a list of addresses to each attribute +// defined in the implementing class. The list must have at least one attribute. // // (Since class names need to be known to implement these methods this cannot be // part of the base `RTCStats`. While these methods could be implemented using @@ -129,8 +133,8 @@ class RTC_EXPORT RTCStats { // // RTCFooStats(const std::string& id, Timestamp timestamp); // -// RTCStatsMember foo; -// RTCStatsMember bar; +// std::optional foo; +// std::optional bar; // }; // // rtcfoostats.cc: @@ -144,247 +148,50 @@ class RTC_EXPORT RTCStats { // bar("bar") { // } // -#define WEBRTC_RTCSTATS_DECL() \ - protected: \ - std::vector \ - MembersOfThisObjectAndAncestors(size_t local_var_additional_capacity) \ - const override; \ - \ - public: \ - static const char kType[]; \ - \ - std::unique_ptr copy() const override; \ - const char* type() const override - -#define WEBRTC_RTCSTATS_IMPL(this_class, parent_class, type_str, ...) \ - const char this_class::kType[] = type_str; \ - \ - std::unique_ptr this_class::copy() const { \ - return std::make_unique(*this); \ - } \ - \ - const char* this_class::type() const { \ - return this_class::kType; \ - } \ - \ - std::vector \ - this_class::MembersOfThisObjectAndAncestors( \ - size_t local_var_additional_capacity) const { \ - const webrtc::RTCStatsMemberInterface* local_var_members[] = { \ - __VA_ARGS__}; \ - size_t local_var_members_count = \ - sizeof(local_var_members) / sizeof(local_var_members[0]); \ - std::vector \ - local_var_members_vec = parent_class::MembersOfThisObjectAndAncestors( \ - local_var_members_count + local_var_additional_capacity); \ - RTC_DCHECK_GE( \ - local_var_members_vec.capacity() - local_var_members_vec.size(), \ - local_var_members_count + local_var_additional_capacity); \ - local_var_members_vec.insert(local_var_members_vec.end(), \ - &local_var_members[0], \ - &local_var_members[local_var_members_count]); \ - return local_var_members_vec; \ - } - -// A version of WEBRTC_RTCSTATS_IMPL() where "..." is omitted, used to avoid a -// compile error on windows. This is used if the stats dictionary does not -// declare any members of its own (but perhaps its parent dictionary does). -#define WEBRTC_RTCSTATS_IMPL_NO_MEMBERS(this_class, parent_class, type_str) \ - const char this_class::kType[] = type_str; \ +#define WEBRTC_RTCSTATS_DECL(SelfT) \ + protected: \ + std::vector AttributesImpl(size_t additional_capacity) \ + const override; \ \ - std::unique_ptr this_class::copy() const { \ - return std::make_unique(*this); \ - } \ + public: \ + static const char kType[]; \ \ - const char* this_class::type() const { \ - return this_class::kType; \ + template \ + friend void AbslStringify(Sink& sink, const SelfT& stats) { \ + sink.Append(stats.ToJson()); \ } \ \ - std::vector \ - this_class::MembersOfThisObjectAndAncestors( \ - size_t local_var_additional_capacity) const { \ - return parent_class::MembersOfThisObjectAndAncestors(0); \ - } - -// Interface for `RTCStats` members, which have a name and a value of a type -// defined in a subclass. Only the types listed in `Type` are supported, these -// are implemented by `RTCStatsMember`. The value of a member may be -// undefined, the value can only be read if `is_defined`. -class RTCStatsMemberInterface { - public: - // Member value types. - enum Type { - kBool, // bool - kInt32, // int32_t - kUint32, // uint32_t - kInt64, // int64_t - kUint64, // uint64_t - kDouble, // double - kString, // std::string - - kSequenceBool, // std::vector - kSequenceInt32, // std::vector - kSequenceUint32, // std::vector - kSequenceInt64, // std::vector - kSequenceUint64, // std::vector - kSequenceDouble, // std::vector - kSequenceString, // std::vector - - kMapStringUint64, // std::map - kMapStringDouble, // std::map - }; - - virtual ~RTCStatsMemberInterface() {} - - const char* name() const { return name_; } - virtual Type type() const = 0; - virtual bool is_sequence() const = 0; - virtual bool is_string() const = 0; - virtual bool is_defined() const = 0; - // Type and value comparator. The names are not compared. These operators are - // exposed for testing. - bool operator==(const RTCStatsMemberInterface& other) const { - return IsEqual(other); - } - bool operator!=(const RTCStatsMemberInterface& other) const { - return !(*this == other); - } - virtual std::string ValueToString() const = 0; - // This is the same as ValueToString except for kInt64 and kUint64 types, - // where the value is represented as a double instead of as an integer. - // Since JSON stores numbers as floating point numbers, very large integers - // cannot be accurately represented, so we prefer to display them as doubles - // instead. - virtual std::string ValueToJson() const = 0; - - template - const T& cast_to() const { - RTC_DCHECK_EQ(type(), T::StaticType()); - return static_cast(*this); - } - - protected: - explicit RTCStatsMemberInterface(const char* name) : name_(name) {} - - virtual bool IsEqual(const RTCStatsMemberInterface& other) const = 0; - - const char* const name_; -}; - -// Template implementation of `RTCStatsMemberInterface`. -// The supported types are the ones described by -// `RTCStatsMemberInterface::Type`. -template -class RTCStatsMember : public RTCStatsMemberInterface { - public: - explicit RTCStatsMember(const char* name) - : RTCStatsMemberInterface(name), value_() {} - RTCStatsMember(const char* name, const T& value) - : RTCStatsMemberInterface(name), value_(value) {} - RTCStatsMember(const char* name, T&& value) - : RTCStatsMemberInterface(name), value_(std::move(value)) {} - explicit RTCStatsMember(const RTCStatsMember& other) - : RTCStatsMemberInterface(other.name_), value_(other.value_) {} - explicit RTCStatsMember(RTCStatsMember&& other) - : RTCStatsMemberInterface(other.name_), value_(std::move(other.value_)) {} - - static Type StaticType(); - Type type() const override { return StaticType(); } - bool is_sequence() const override; - bool is_string() const override; - bool is_defined() const override { return value_.has_value(); } - std::string ValueToString() const override; - std::string ValueToJson() const override; - - template - inline T ValueOrDefault(U default_value) const { - return value_.value_or(default_value); - } - - // Assignment operators. - T& operator=(const T& value) { - value_ = value; - return value_.value(); - } - T& operator=(const T&& value) { - value_ = std::move(value); - return value_.value(); - } - - // Getter methods that look the same as absl::optional. Please prefer these - // in order to unblock replacing RTCStatsMember with absl::optional in - // the future (https://crbug.com/webrtc/15164). - bool has_value() const { return value_.has_value(); } - const T& value() const { return value_.value(); } - T& value() { return value_.value(); } - T& operator*() { - RTC_DCHECK(value_); - return *value_; - } - const T& operator*() const { - RTC_DCHECK(value_); - return *value_; - } - T* operator->() { - RTC_DCHECK(value_); - return &(*value_); - } - const T* operator->() const { - RTC_DCHECK(value_); - return &(*value_); - } + std::unique_ptr copy() const override; \ + const char* type() const override - protected: - bool IsEqual(const RTCStatsMemberInterface& other) const override { - if (type() != other.type()) - return false; - const RTCStatsMember& other_t = - static_cast&>(other); - return value_ == other_t.value_; +#define WEBRTC_RTCSTATS_IMPL(this_class, parent_class, type_str, ...) \ + const char this_class::kType[] = type_str; \ + \ + std::unique_ptr this_class::copy() const { \ + return std::make_unique(*this); \ + } \ + \ + const char* this_class::type() const { \ + return this_class::kType; \ + } \ + \ + std::vector this_class::AttributesImpl( \ + size_t additional_capacity) const { \ + webrtc::AttributeInit attribute_inits[] = {__VA_ARGS__}; \ + size_t attribute_inits_size = \ + sizeof(attribute_inits) / sizeof(attribute_inits[0]); \ + std::vector attributes = parent_class::AttributesImpl( \ + attribute_inits_size + additional_capacity); \ + for (size_t i = 0; i < attribute_inits_size; ++i) { \ + attributes.push_back(std::visit( \ + [&](const auto* field) { \ + return Attribute(attribute_inits[i].name, field); \ + }, \ + attribute_inits[i].variant)); \ + } \ + return attributes; \ } - private: - absl::optional value_; -}; - -namespace rtc_stats_internal { - -typedef std::map MapStringUint64; -typedef std::map MapStringDouble; - -} // namespace rtc_stats_internal - -#define WEBRTC_DECLARE_RTCSTATSMEMBER(T) \ - template <> \ - RTC_EXPORT RTCStatsMemberInterface::Type RTCStatsMember::StaticType(); \ - template <> \ - RTC_EXPORT bool RTCStatsMember::is_sequence() const; \ - template <> \ - RTC_EXPORT bool RTCStatsMember::is_string() const; \ - template <> \ - RTC_EXPORT std::string RTCStatsMember::ValueToString() const; \ - template <> \ - RTC_EXPORT std::string RTCStatsMember::ValueToJson() const; \ - extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) \ - RTCStatsMember - -WEBRTC_DECLARE_RTCSTATSMEMBER(bool); -WEBRTC_DECLARE_RTCSTATSMEMBER(int32_t); -WEBRTC_DECLARE_RTCSTATSMEMBER(uint32_t); -WEBRTC_DECLARE_RTCSTATSMEMBER(int64_t); -WEBRTC_DECLARE_RTCSTATSMEMBER(uint64_t); -WEBRTC_DECLARE_RTCSTATSMEMBER(double); -WEBRTC_DECLARE_RTCSTATSMEMBER(std::string); -WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); -WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); -WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); -WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); -WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); -WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); -WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); -WEBRTC_DECLARE_RTCSTATSMEMBER(rtc_stats_internal::MapStringUint64); -WEBRTC_DECLARE_RTCSTATSMEMBER(rtc_stats_internal::MapStringDouble); - } // namespace webrtc #endif // API_STATS_RTC_STATS_H_ diff --git a/api/stats/rtc_stats_collector_callback.h b/api/stats/rtc_stats_collector_callback.h index 506cc63e6f..90819a9140 100644 --- a/api/stats/rtc_stats_collector_callback.h +++ b/api/stats/rtc_stats_collector_callback.h @@ -11,18 +11,18 @@ #ifndef API_STATS_RTC_STATS_COLLECTOR_CALLBACK_H_ #define API_STATS_RTC_STATS_COLLECTOR_CALLBACK_H_ +#include "api/ref_count.h" #include "api/scoped_refptr.h" #include "api/stats/rtc_stats_report.h" -#include "rtc_base/ref_count.h" namespace webrtc { -class RTCStatsCollectorCallback : public rtc::RefCountInterface { +class RTCStatsCollectorCallback : public RefCountInterface { public: ~RTCStatsCollectorCallback() override = default; virtual void OnStatsDelivered( - const rtc::scoped_refptr& report) = 0; + const scoped_refptr& report) = 0; }; } // namespace webrtc diff --git a/api/stats/rtc_stats_report.h b/api/stats/rtc_stats_report.h index 1dce4d89b2..fdeec278b4 100644 --- a/api/stats/rtc_stats_report.h +++ b/api/stats/rtc_stats_report.h @@ -24,9 +24,6 @@ #include "api/scoped_refptr.h" #include "api/stats/rtc_stats.h" #include "api/units/timestamp.h" -// TODO(tommi): Remove this include after fixing iwyu issue in chromium. -// See: third_party/blink/renderer/platform/peerconnection/rtc_stats.cc -#include "rtc_base/ref_counted_object.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -34,7 +31,7 @@ namespace webrtc { // A collection of stats. // This is accessible as a map from `RTCStats::id` to `RTCStats`. class RTC_EXPORT RTCStatsReport final - : public rtc::RefCountedNonVirtual { + : public RefCountedNonVirtual { public: typedef std::map> StatsMap; @@ -52,20 +49,20 @@ class RTC_EXPORT RTCStatsReport final private: friend class RTCStatsReport; - ConstIterator(const rtc::scoped_refptr& report, + ConstIterator(const scoped_refptr& report, StatsMap::const_iterator it); // Reference report to make sure it is kept alive. - rtc::scoped_refptr report_; + scoped_refptr report_; StatsMap::const_iterator it_; }; - static rtc::scoped_refptr Create(Timestamp timestamp); + static scoped_refptr Create(Timestamp timestamp); explicit RTCStatsReport(Timestamp timestamp); RTCStatsReport(const RTCStatsReport& other) = delete; - rtc::scoped_refptr Copy() const; + scoped_refptr Copy() const; Timestamp timestamp() const { return timestamp_; } void AddStats(std::unique_ptr stats); @@ -101,7 +98,7 @@ class RTC_EXPORT RTCStatsReport final // if there is no object with `id`. std::unique_ptr Take(const std::string& id); // Takes ownership of all the stats in `other`, leaving it empty. - void TakeMembersFrom(rtc::scoped_refptr other); + void TakeMembersFrom(scoped_refptr other); // Stats iterators. Stats are ordered lexicographically on `RTCStats::id`. ConstIterator begin() const; @@ -124,7 +121,7 @@ class RTC_EXPORT RTCStatsReport final std::string ToJson() const; protected: - friend class rtc::RefCountedNonVirtual; + friend class RefCountedNonVirtual; ~RTCStatsReport() = default; private: diff --git a/api/stats/rtcstats_objects.h b/api/stats/rtcstats_objects.h index 8bb1ff2d05..82acb45368 100644 --- a/api/stats/rtcstats_objects.h +++ b/api/stats/rtcstats_objects.h @@ -15,10 +15,11 @@ #include #include +#include #include -#include #include "api/stats/rtc_stats.h" +#include "api/units/timestamp.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -26,124 +27,114 @@ namespace webrtc { // https://w3c.github.io/webrtc-stats/#certificatestats-dict* class RTC_EXPORT RTCCertificateStats final : public RTCStats { public: - WEBRTC_RTCSTATS_DECL(); - + WEBRTC_RTCSTATS_DECL(RTCCertificateStats); RTCCertificateStats(std::string id, Timestamp timestamp); - RTCCertificateStats(const RTCCertificateStats& other); ~RTCCertificateStats() override; - RTCStatsMember fingerprint; - RTCStatsMember fingerprint_algorithm; - RTCStatsMember base64_certificate; - RTCStatsMember issuer_certificate_id; + std::optional fingerprint; + std::optional fingerprint_algorithm; + std::optional base64_certificate; + std::optional issuer_certificate_id; }; // https://w3c.github.io/webrtc-stats/#codec-dict* class RTC_EXPORT RTCCodecStats final : public RTCStats { public: - WEBRTC_RTCSTATS_DECL(); - + WEBRTC_RTCSTATS_DECL(RTCCodecStats); RTCCodecStats(std::string id, Timestamp timestamp); - RTCCodecStats(const RTCCodecStats& other); ~RTCCodecStats() override; - RTCStatsMember transport_id; - RTCStatsMember payload_type; - RTCStatsMember mime_type; - RTCStatsMember clock_rate; - RTCStatsMember channels; - RTCStatsMember sdp_fmtp_line; + std::optional transport_id; + std::optional payload_type; + std::optional mime_type; + std::optional clock_rate; + std::optional channels; + std::optional sdp_fmtp_line; }; // https://w3c.github.io/webrtc-stats/#dcstats-dict* class RTC_EXPORT RTCDataChannelStats final : public RTCStats { public: - WEBRTC_RTCSTATS_DECL(); - + WEBRTC_RTCSTATS_DECL(RTCDataChannelStats); RTCDataChannelStats(std::string id, Timestamp timestamp); - RTCDataChannelStats(const RTCDataChannelStats& other); ~RTCDataChannelStats() override; - RTCStatsMember label; - RTCStatsMember protocol; - RTCStatsMember data_channel_identifier; - RTCStatsMember state; - RTCStatsMember messages_sent; - RTCStatsMember bytes_sent; - RTCStatsMember messages_received; - RTCStatsMember bytes_received; + std::optional label; + std::optional protocol; + std::optional data_channel_identifier; + std::optional state; + std::optional messages_sent; + std::optional bytes_sent; + std::optional messages_received; + std::optional bytes_received; }; // https://w3c.github.io/webrtc-stats/#candidatepair-dict* class RTC_EXPORT RTCIceCandidatePairStats final : public RTCStats { public: - WEBRTC_RTCSTATS_DECL(); - + WEBRTC_RTCSTATS_DECL(RTCIceCandidatePairStats); RTCIceCandidatePairStats(std::string id, Timestamp timestamp); - RTCIceCandidatePairStats(const RTCIceCandidatePairStats& other); ~RTCIceCandidatePairStats() override; - RTCStatsMember transport_id; - RTCStatsMember local_candidate_id; - RTCStatsMember remote_candidate_id; - RTCStatsMember state; + std::optional transport_id; + std::optional local_candidate_id; + std::optional remote_candidate_id; + std::optional state; // Obsolete: priority - RTCStatsMember priority; - RTCStatsMember nominated; + std::optional priority; + std::optional nominated; // `writable` does not exist in the spec and old comments suggest it used to // exist but was incorrectly implemented. // TODO(https://crbug.com/webrtc/14171): Standardize and/or modify // implementation. - RTCStatsMember writable; - RTCStatsMember packets_sent; - RTCStatsMember packets_received; - RTCStatsMember bytes_sent; - RTCStatsMember bytes_received; - RTCStatsMember total_round_trip_time; - RTCStatsMember current_round_trip_time; - RTCStatsMember available_outgoing_bitrate; - RTCStatsMember available_incoming_bitrate; - RTCStatsMember requests_received; - RTCStatsMember requests_sent; - RTCStatsMember responses_received; - RTCStatsMember responses_sent; - RTCStatsMember consent_requests_sent; - RTCStatsMember packets_discarded_on_send; - RTCStatsMember bytes_discarded_on_send; - RTCStatsMember last_packet_received_timestamp; - RTCStatsMember last_packet_sent_timestamp; + std::optional writable; + std::optional packets_sent; + std::optional packets_received; + std::optional bytes_sent; + std::optional bytes_received; + std::optional total_round_trip_time; + std::optional current_round_trip_time; + std::optional available_outgoing_bitrate; + std::optional available_incoming_bitrate; + std::optional requests_received; + std::optional requests_sent; + std::optional responses_received; + std::optional responses_sent; + std::optional consent_requests_sent; + std::optional packets_discarded_on_send; + std::optional bytes_discarded_on_send; + std::optional last_packet_received_timestamp; + std::optional last_packet_sent_timestamp; }; // https://w3c.github.io/webrtc-stats/#icecandidate-dict* class RTC_EXPORT RTCIceCandidateStats : public RTCStats { public: - WEBRTC_RTCSTATS_DECL(); - - RTCIceCandidateStats(const RTCIceCandidateStats& other); + WEBRTC_RTCSTATS_DECL(RTCIceCandidateStats); ~RTCIceCandidateStats() override; - RTCStatsMember transport_id; + std::optional transport_id; // Obsolete: is_remote - RTCStatsMember is_remote; - RTCStatsMember network_type; - RTCStatsMember ip; - RTCStatsMember address; - RTCStatsMember port; - RTCStatsMember protocol; - RTCStatsMember relay_protocol; - RTCStatsMember candidate_type; - RTCStatsMember priority; - RTCStatsMember url; - RTCStatsMember foundation; - RTCStatsMember related_address; - RTCStatsMember related_port; - RTCStatsMember username_fragment; - RTCStatsMember tcp_type; + std::optional is_remote; + std::optional network_type; + std::optional ip; + std::optional address; + std::optional port; + std::optional protocol; + std::optional relay_protocol; + std::optional candidate_type; + std::optional priority; + std::optional url; + std::optional foundation; + std::optional related_address; + std::optional related_port; + std::optional username_fragment; + std::optional tcp_type; // The following metrics are NOT exposed to JavaScript. We should consider // standardizing or removing them. - RTCStatsMember vpn; - RTCStatsMember network_adapter_type; + std::optional vpn; + std::optional network_adapter_type; protected: RTCIceCandidateStats(std::string id, Timestamp timestamp, bool is_remote); @@ -174,28 +165,24 @@ class RTC_EXPORT RTCRemoteIceCandidateStats final // https://w3c.github.io/webrtc-stats/#pcstats-dict* class RTC_EXPORT RTCPeerConnectionStats final : public RTCStats { public: - WEBRTC_RTCSTATS_DECL(); - + WEBRTC_RTCSTATS_DECL(RTCPeerConnectionStats); RTCPeerConnectionStats(std::string id, Timestamp timestamp); - RTCPeerConnectionStats(const RTCPeerConnectionStats& other); ~RTCPeerConnectionStats() override; - RTCStatsMember data_channels_opened; - RTCStatsMember data_channels_closed; + std::optional data_channels_opened; + std::optional data_channels_closed; }; // https://w3c.github.io/webrtc-stats/#streamstats-dict* class RTC_EXPORT RTCRtpStreamStats : public RTCStats { public: - WEBRTC_RTCSTATS_DECL(); - - RTCRtpStreamStats(const RTCRtpStreamStats& other); + WEBRTC_RTCSTATS_DECL(RTCRtpStreamStats); ~RTCRtpStreamStats() override; - RTCStatsMember ssrc; - RTCStatsMember kind; - RTCStatsMember transport_id; - RTCStatsMember codec_id; + std::optional ssrc; + std::optional kind; + std::optional transport_id; + std::optional codec_id; protected: RTCRtpStreamStats(std::string id, Timestamp timestamp); @@ -204,13 +191,11 @@ class RTC_EXPORT RTCRtpStreamStats : public RTCStats { // https://www.w3.org/TR/webrtc-stats/#receivedrtpstats-dict* class RTC_EXPORT RTCReceivedRtpStreamStats : public RTCRtpStreamStats { public: - WEBRTC_RTCSTATS_DECL(); - - RTCReceivedRtpStreamStats(const RTCReceivedRtpStreamStats& other); + WEBRTC_RTCSTATS_DECL(RTCReceivedRtpStreamStats); ~RTCReceivedRtpStreamStats() override; - RTCStatsMember jitter; - RTCStatsMember packets_lost; // Signed per RFC 3550 + std::optional jitter; + std::optional packets_lost; // Signed per RFC 3550 protected: RTCReceivedRtpStreamStats(std::string id, Timestamp timestamp); @@ -219,13 +204,11 @@ class RTC_EXPORT RTCReceivedRtpStreamStats : public RTCRtpStreamStats { // https://www.w3.org/TR/webrtc-stats/#sentrtpstats-dict* class RTC_EXPORT RTCSentRtpStreamStats : public RTCRtpStreamStats { public: - WEBRTC_RTCSTATS_DECL(); - - RTCSentRtpStreamStats(const RTCSentRtpStreamStats& other); + WEBRTC_RTCSTATS_DECL(RTCSentRtpStreamStats); ~RTCSentRtpStreamStats() override; - RTCStatsMember packets_sent; - RTCStatsMember bytes_sent; + std::optional packets_sent; + std::optional bytes_sent; protected: RTCSentRtpStreamStats(std::string id, Timestamp timestamp); @@ -235,193 +218,198 @@ class RTC_EXPORT RTCSentRtpStreamStats : public RTCRtpStreamStats { class RTC_EXPORT RTCInboundRtpStreamStats final : public RTCReceivedRtpStreamStats { public: - WEBRTC_RTCSTATS_DECL(); - + WEBRTC_RTCSTATS_DECL(RTCInboundRtpStreamStats); RTCInboundRtpStreamStats(std::string id, Timestamp timestamp); - RTCInboundRtpStreamStats(const RTCInboundRtpStreamStats& other); ~RTCInboundRtpStreamStats() override; - RTCStatsMember playout_id; - RTCStatsMember track_identifier; - RTCStatsMember mid; - RTCStatsMember remote_id; - RTCStatsMember packets_received; - RTCStatsMember packets_discarded; - RTCStatsMember fec_packets_received; - RTCStatsMember fec_bytes_received; - RTCStatsMember fec_packets_discarded; + std::optional playout_id; + std::optional track_identifier; + std::optional mid; + std::optional remote_id; + std::optional packets_received; + std::optional packets_discarded; + std::optional fec_packets_received; + std::optional fec_bytes_received; + std::optional fec_packets_discarded; // Inbound FEC SSRC. Only present if a mechanism like FlexFEC is negotiated. - RTCStatsMember fec_ssrc; - RTCStatsMember bytes_received; - RTCStatsMember header_bytes_received; + std::optional fec_ssrc; + std::optional bytes_received; + std::optional header_bytes_received; // Inbound RTX stats. Only defined when RTX is used and it is therefore // possible to distinguish retransmissions. - RTCStatsMember retransmitted_packets_received; - RTCStatsMember retransmitted_bytes_received; - RTCStatsMember rtx_ssrc; - - RTCStatsMember last_packet_received_timestamp; - RTCStatsMember jitter_buffer_delay; - RTCStatsMember jitter_buffer_target_delay; - RTCStatsMember jitter_buffer_minimum_delay; - RTCStatsMember jitter_buffer_emitted_count; - RTCStatsMember total_samples_received; - RTCStatsMember concealed_samples; - RTCStatsMember silent_concealed_samples; - RTCStatsMember concealment_events; - RTCStatsMember inserted_samples_for_deceleration; - RTCStatsMember removed_samples_for_acceleration; - RTCStatsMember audio_level; - RTCStatsMember total_audio_energy; - RTCStatsMember total_samples_duration; + std::optional retransmitted_packets_received; + std::optional retransmitted_bytes_received; + std::optional rtx_ssrc; + + std::optional last_packet_received_timestamp; + std::optional jitter_buffer_delay; + std::optional jitter_buffer_target_delay; + std::optional jitter_buffer_minimum_delay; + std::optional jitter_buffer_emitted_count; + std::optional total_samples_received; + std::optional concealed_samples; + std::optional silent_concealed_samples; + std::optional concealment_events; + std::optional inserted_samples_for_deceleration; + std::optional removed_samples_for_acceleration; + std::optional audio_level; + std::optional total_audio_energy; + std::optional total_samples_duration; // Stats below are only implemented or defined for video. - RTCStatsMember frames_received; - RTCStatsMember frame_width; - RTCStatsMember frame_height; - RTCStatsMember frames_per_second; - RTCStatsMember frames_decoded; - RTCStatsMember key_frames_decoded; - RTCStatsMember frames_dropped; - RTCStatsMember total_decode_time; - RTCStatsMember total_processing_delay; - RTCStatsMember total_assembly_time; - RTCStatsMember frames_assembled_from_multiple_packets; - RTCStatsMember total_inter_frame_delay; - RTCStatsMember total_squared_inter_frame_delay; - RTCStatsMember pause_count; - RTCStatsMember total_pauses_duration; - RTCStatsMember freeze_count; - RTCStatsMember total_freezes_duration; + std::optional frames_received; + std::optional frame_width; + std::optional frame_height; + std::optional frames_per_second; + std::optional frames_decoded; + std::optional key_frames_decoded; + std::optional frames_dropped; + std::optional total_decode_time; + std::optional total_processing_delay; + std::optional total_assembly_time; + std::optional frames_assembled_from_multiple_packets; + // TODO(https://crbug.com/webrtc/15600): Implement framesRendered, which is + // incremented at the same time that totalInterFrameDelay and + // totalSquaredInterFrameDelay is incremented. (Dividing inter-frame delay by + // framesDecoded is slightly wrong.) + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-framesrendered + // + // TODO(https://crbug.com/webrtc/15601): Inter-frame, pause and freeze metrics + // all related to when the frame is rendered, but our implementation measures + // at delivery to sink, not at actual render time. When we have an actual + // frame rendered callback, move the calculating of these metrics to there in + // order to make them more accurate. + std::optional total_inter_frame_delay; + std::optional total_squared_inter_frame_delay; + std::optional pause_count; + std::optional total_pauses_duration; + std::optional freeze_count; + std::optional total_freezes_duration; // https://w3c.github.io/webrtc-provisional-stats/#dom-rtcinboundrtpstreamstats-contenttype - RTCStatsMember content_type; + std::optional content_type; // Only populated if audio/video sync is enabled. // TODO(https://crbug.com/webrtc/14177): Expose even if A/V sync is off? - RTCStatsMember estimated_playout_timestamp; + std::optional estimated_playout_timestamp; // Only defined for video. // In JavaScript, this is only exposed if HW exposure is allowed. - RTCStatsMember decoder_implementation; + std::optional decoder_implementation; // FIR and PLI counts are only defined for |kind == "video"|. - RTCStatsMember fir_count; - RTCStatsMember pli_count; - RTCStatsMember nack_count; - RTCStatsMember qp_sum; + std::optional fir_count; + std::optional pli_count; + std::optional nack_count; + std::optional qp_sum; + std::optional total_corruption_probability; + std::optional total_squared_corruption_probability; + std::optional corruption_measurements; // This is a remnant of the legacy getStats() API. When the "video-timing" // header extension is used, // https://webrtc.github.io/webrtc-org/experiments/rtp-hdrext/video-timing/, // `googTimingFrameInfo` is exposed with the value of // TimingFrameInfo::ToString(). // TODO(https://crbug.com/webrtc/14586): Unship or standardize this metric. - RTCStatsMember goog_timing_frame_info; + std::optional goog_timing_frame_info; // In JavaScript, this is only exposed if HW exposure is allowed. - RTCStatsMember power_efficient_decoder; + std::optional power_efficient_decoder; // The following metrics are NOT exposed to JavaScript. We should consider // standardizing or removing them. - RTCStatsMember jitter_buffer_flushes; - RTCStatsMember delayed_packet_outage_samples; - RTCStatsMember relative_packet_arrival_delay; - RTCStatsMember interruption_count; - RTCStatsMember total_interruption_duration; - RTCStatsMember min_playout_delay; + std::optional jitter_buffer_flushes; + std::optional delayed_packet_outage_samples; + std::optional relative_packet_arrival_delay; + std::optional interruption_count; + std::optional total_interruption_duration; + std::optional min_playout_delay; }; // https://w3c.github.io/webrtc-stats/#outboundrtpstats-dict* class RTC_EXPORT RTCOutboundRtpStreamStats final : public RTCSentRtpStreamStats { public: - WEBRTC_RTCSTATS_DECL(); - + WEBRTC_RTCSTATS_DECL(RTCOutboundRtpStreamStats); RTCOutboundRtpStreamStats(std::string id, Timestamp timestamp); - RTCOutboundRtpStreamStats(const RTCOutboundRtpStreamStats& other); ~RTCOutboundRtpStreamStats() override; - RTCStatsMember media_source_id; - RTCStatsMember remote_id; - RTCStatsMember mid; - RTCStatsMember rid; - RTCStatsMember retransmitted_packets_sent; - RTCStatsMember header_bytes_sent; - RTCStatsMember retransmitted_bytes_sent; - RTCStatsMember target_bitrate; - RTCStatsMember frames_encoded; - RTCStatsMember key_frames_encoded; - RTCStatsMember total_encode_time; - RTCStatsMember total_encoded_bytes_target; - RTCStatsMember frame_width; - RTCStatsMember frame_height; - RTCStatsMember frames_per_second; - RTCStatsMember frames_sent; - RTCStatsMember huge_frames_sent; - RTCStatsMember total_packet_send_delay; - RTCStatsMember quality_limitation_reason; - RTCStatsMember> quality_limitation_durations; + std::optional media_source_id; + std::optional remote_id; + std::optional mid; + std::optional rid; + std::optional encoding_index; + std::optional retransmitted_packets_sent; + std::optional header_bytes_sent; + std::optional retransmitted_bytes_sent; + std::optional target_bitrate; + std::optional frames_encoded; + std::optional key_frames_encoded; + std::optional total_encode_time; + std::optional total_encoded_bytes_target; + std::optional frame_width; + std::optional frame_height; + std::optional frames_per_second; + std::optional frames_sent; + std::optional huge_frames_sent; + std::optional total_packet_send_delay; + std::optional quality_limitation_reason; + std::optional> quality_limitation_durations; // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-qualitylimitationresolutionchanges - RTCStatsMember quality_limitation_resolution_changes; + std::optional quality_limitation_resolution_changes; // https://w3c.github.io/webrtc-provisional-stats/#dom-rtcoutboundrtpstreamstats-contenttype - RTCStatsMember content_type; + std::optional content_type; // In JavaScript, this is only exposed if HW exposure is allowed. // Only implemented for video. // TODO(https://crbug.com/webrtc/14178): Implement for audio as well. - RTCStatsMember encoder_implementation; + std::optional encoder_implementation; // FIR and PLI counts are only defined for |kind == "video"|. - RTCStatsMember fir_count; - RTCStatsMember pli_count; - RTCStatsMember nack_count; - RTCStatsMember qp_sum; - RTCStatsMember active; + std::optional fir_count; + std::optional pli_count; + std::optional nack_count; + std::optional qp_sum; + std::optional active; // In JavaScript, this is only exposed if HW exposure is allowed. - RTCStatsMember power_efficient_encoder; - RTCStatsMember scalability_mode; + std::optional power_efficient_encoder; + std::optional scalability_mode; // RTX ssrc. Only present if RTX is negotiated. - RTCStatsMember rtx_ssrc; + std::optional rtx_ssrc; }; // https://w3c.github.io/webrtc-stats/#remoteinboundrtpstats-dict* class RTC_EXPORT RTCRemoteInboundRtpStreamStats final : public RTCReceivedRtpStreamStats { public: - WEBRTC_RTCSTATS_DECL(); - + WEBRTC_RTCSTATS_DECL(RTCRemoteInboundRtpStreamStats); RTCRemoteInboundRtpStreamStats(std::string id, Timestamp timestamp); - RTCRemoteInboundRtpStreamStats(const RTCRemoteInboundRtpStreamStats& other); ~RTCRemoteInboundRtpStreamStats() override; - RTCStatsMember local_id; - RTCStatsMember round_trip_time; - RTCStatsMember fraction_lost; - RTCStatsMember total_round_trip_time; - RTCStatsMember round_trip_time_measurements; + std::optional local_id; + std::optional round_trip_time; + std::optional fraction_lost; + std::optional total_round_trip_time; + std::optional round_trip_time_measurements; }; // https://w3c.github.io/webrtc-stats/#remoteoutboundrtpstats-dict* class RTC_EXPORT RTCRemoteOutboundRtpStreamStats final : public RTCSentRtpStreamStats { public: - WEBRTC_RTCSTATS_DECL(); - + WEBRTC_RTCSTATS_DECL(RTCRemoteOutboundRtpStreamStats); RTCRemoteOutboundRtpStreamStats(std::string id, Timestamp timestamp); - RTCRemoteOutboundRtpStreamStats(const RTCRemoteOutboundRtpStreamStats& other); ~RTCRemoteOutboundRtpStreamStats() override; - RTCStatsMember local_id; - RTCStatsMember remote_timestamp; - RTCStatsMember reports_sent; - RTCStatsMember round_trip_time; - RTCStatsMember round_trip_time_measurements; - RTCStatsMember total_round_trip_time; + std::optional local_id; + std::optional remote_timestamp; + std::optional reports_sent; + std::optional round_trip_time; + std::optional round_trip_time_measurements; + std::optional total_round_trip_time; }; // https://w3c.github.io/webrtc-stats/#dom-rtcmediasourcestats class RTC_EXPORT RTCMediaSourceStats : public RTCStats { public: - WEBRTC_RTCSTATS_DECL(); - - RTCMediaSourceStats(const RTCMediaSourceStats& other); + WEBRTC_RTCSTATS_DECL(RTCMediaSourceStats); ~RTCMediaSourceStats() override; - RTCStatsMember track_identifier; - RTCStatsMember kind; + std::optional track_identifier; + std::optional kind; protected: RTCMediaSourceStats(std::string id, Timestamp timestamp); @@ -430,77 +418,69 @@ class RTC_EXPORT RTCMediaSourceStats : public RTCStats { // https://w3c.github.io/webrtc-stats/#dom-rtcaudiosourcestats class RTC_EXPORT RTCAudioSourceStats final : public RTCMediaSourceStats { public: - WEBRTC_RTCSTATS_DECL(); - + WEBRTC_RTCSTATS_DECL(RTCAudioSourceStats); RTCAudioSourceStats(std::string id, Timestamp timestamp); - RTCAudioSourceStats(const RTCAudioSourceStats& other); ~RTCAudioSourceStats() override; - RTCStatsMember audio_level; - RTCStatsMember total_audio_energy; - RTCStatsMember total_samples_duration; - RTCStatsMember echo_return_loss; - RTCStatsMember echo_return_loss_enhancement; + std::optional audio_level; + std::optional total_audio_energy; + std::optional total_samples_duration; + std::optional echo_return_loss; + std::optional echo_return_loss_enhancement; }; // https://w3c.github.io/webrtc-stats/#dom-rtcvideosourcestats class RTC_EXPORT RTCVideoSourceStats final : public RTCMediaSourceStats { public: - WEBRTC_RTCSTATS_DECL(); - + WEBRTC_RTCSTATS_DECL(RTCVideoSourceStats); RTCVideoSourceStats(std::string id, Timestamp timestamp); - RTCVideoSourceStats(const RTCVideoSourceStats& other); ~RTCVideoSourceStats() override; - RTCStatsMember width; - RTCStatsMember height; - RTCStatsMember frames; - RTCStatsMember frames_per_second; + std::optional width; + std::optional height; + std::optional frames; + std::optional frames_per_second; }; // https://w3c.github.io/webrtc-stats/#transportstats-dict* class RTC_EXPORT RTCTransportStats final : public RTCStats { public: - WEBRTC_RTCSTATS_DECL(); - + WEBRTC_RTCSTATS_DECL(RTCTransportStats); RTCTransportStats(std::string id, Timestamp timestamp); - RTCTransportStats(const RTCTransportStats& other); ~RTCTransportStats() override; - RTCStatsMember bytes_sent; - RTCStatsMember packets_sent; - RTCStatsMember bytes_received; - RTCStatsMember packets_received; - RTCStatsMember rtcp_transport_stats_id; - RTCStatsMember dtls_state; - RTCStatsMember selected_candidate_pair_id; - RTCStatsMember local_certificate_id; - RTCStatsMember remote_certificate_id; - RTCStatsMember tls_version; - RTCStatsMember dtls_cipher; - RTCStatsMember dtls_role; - RTCStatsMember srtp_cipher; - RTCStatsMember selected_candidate_pair_changes; - RTCStatsMember ice_role; - RTCStatsMember ice_local_username_fragment; - RTCStatsMember ice_state; + std::optional bytes_sent; + std::optional packets_sent; + std::optional bytes_received; + std::optional packets_received; + std::optional rtcp_transport_stats_id; + std::optional dtls_state; + std::optional selected_candidate_pair_id; + std::optional local_certificate_id; + std::optional remote_certificate_id; + std::optional tls_version; + std::optional dtls_cipher; + std::optional dtls_role; + std::optional srtp_cipher; + std::optional selected_candidate_pair_changes; + std::optional ice_role; + std::optional ice_local_username_fragment; + std::optional ice_state; }; // https://w3c.github.io/webrtc-stats/#playoutstats-dict* class RTC_EXPORT RTCAudioPlayoutStats final : public RTCStats { public: - WEBRTC_RTCSTATS_DECL(); - + WEBRTC_RTCSTATS_DECL(RTCAudioPlayoutStats); RTCAudioPlayoutStats(const std::string& id, Timestamp timestamp); - RTCAudioPlayoutStats(const RTCAudioPlayoutStats& other); ~RTCAudioPlayoutStats() override; - RTCStatsMember kind; - RTCStatsMember synthesized_samples_duration; - RTCStatsMember synthesized_samples_events; - RTCStatsMember total_samples_duration; - RTCStatsMember total_playout_delay; - RTCStatsMember total_samples_count; + std::optional kind; + std::optional synthesized_samples_duration; + std::optional synthesized_samples_events; + std::optional total_samples_duration; + std::optional total_playout_delay; + std::optional total_samples_count; }; } // namespace webrtc diff --git a/api/task_queue/BUILD.gn b/api/task_queue/BUILD.gn index 527fac414b..9f10f0afc6 100644 --- a/api/task_queue/BUILD.gn +++ b/api/task_queue/BUILD.gn @@ -22,12 +22,10 @@ rtc_library("task_queue") { "../../rtc_base:macromagic", "../../rtc_base/system:rtc_export", "../units:time_delta", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:config", "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -56,8 +54,6 @@ rtc_library("task_queue_test") { deps = [ "../../../webrtc_overrides:webrtc_component", "../../test:test_support", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/cleanup", "//third_party/abseil-cpp/absl/strings", ] @@ -67,13 +63,12 @@ rtc_library("task_queue_test") { ":task_queue", "../../api:field_trials_view", "../../api:make_ref_counted", + "../../api:ref_count", "../../api/units:time_delta", "../../rtc_base:refcount", "../../rtc_base:rtc_event", "../../rtc_base:timeutils", "../../test:test_support", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/cleanup", "//third_party/abseil-cpp/absl/strings", ] @@ -83,7 +78,10 @@ rtc_library("task_queue_test") { rtc_library("default_task_queue_factory") { visibility = [ "*" ] if (!is_ios && !is_android) { - poisonous = [ "default_task_queue" ] + # Internally webrtc shouldn't rely on any specific TaskQueue implementation + # and should create TaskQueue using TaskQueueFactory interface. + # TaskQueueFactory interface can be propagated with Environment. + poisonous = [ "environment_construction" ] } sources = [ "default_task_queue_factory.h" ] deps = [ @@ -92,24 +90,10 @@ rtc_library("default_task_queue_factory") { "../../rtc_base/memory:always_valid_pointer", ] - if (rtc_enable_libevent) { - if (is_android) { - sources += - [ "default_task_queue_factory_stdlib_or_libevent_experiment.cc" ] - deps += [ - "../../api/transport:field_trial_based_config", - "../../rtc_base:logging", - "../../rtc_base:rtc_task_queue_libevent", - "../../rtc_base:rtc_task_queue_stdlib", - ] - } else { - sources += [ "default_task_queue_factory_libevent.cc" ] - deps += [ "../../rtc_base:rtc_task_queue_libevent" ] - } - } else if (is_mac || is_ios) { + if (is_mac || is_ios) { sources += [ "default_task_queue_factory_gcd.cc" ] deps += [ "../../rtc_base:rtc_task_queue_gcd" ] - } else if (is_win && current_os != "winuwp") { + } else if (is_win && current_os != "winuwp" && !build_with_chromium) { sources += [ "default_task_queue_factory_win.cc" ] deps += [ "../../rtc_base:rtc_task_queue_win" ] } else { @@ -125,14 +109,16 @@ rtc_library("pending_task_safety_flag") { "pending_task_safety_flag.h", ] deps = [ + ":task_queue", "../../api:refcountedbase", "../../api:scoped_refptr", "../../api:sequence_checker", "../../rtc_base:checks", "../../rtc_base/system:no_unique_address", "../../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable" ] } if (rtc_include_tests) { @@ -151,9 +137,11 @@ if (rtc_include_tests) { sources = [ "pending_task_safety_flag_unittest.cc" ] deps = [ ":pending_task_safety_flag", + ":task_queue", + "..:scoped_refptr", + "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:rtc_event", - "../../rtc_base:rtc_task_queue", "../../rtc_base:task_queue_for_test", "../../test:test_support", ] diff --git a/api/task_queue/default_task_queue_factory_gcd.cc b/api/task_queue/default_task_queue_factory_gcd.cc index 391f09b393..fb293b3ced 100644 --- a/api/task_queue/default_task_queue_factory_gcd.cc +++ b/api/task_queue/default_task_queue_factory_gcd.cc @@ -16,7 +16,7 @@ namespace webrtc { std::unique_ptr CreateDefaultTaskQueueFactory( - const FieldTrialsView* field_trials) { + const FieldTrialsView* /* field_trials */) { return CreateTaskQueueGcdFactory(); } diff --git a/api/task_queue/default_task_queue_factory_libevent.cc b/api/task_queue/default_task_queue_factory_libevent.cc deleted file mode 100644 index 89079f51ca..0000000000 --- a/api/task_queue/default_task_queue_factory_libevent.cc +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include - -#include "api/field_trials_view.h" -#include "api/task_queue/task_queue_factory.h" -#include "rtc_base/task_queue_libevent.h" - -namespace webrtc { - -std::unique_ptr CreateDefaultTaskQueueFactory( - const FieldTrialsView* field_trials) { - return CreateTaskQueueLibeventFactory(); -} - -} // namespace webrtc diff --git a/api/task_queue/default_task_queue_factory_stdlib_or_libevent_experiment.cc b/api/task_queue/default_task_queue_factory_stdlib_or_libevent_experiment.cc deleted file mode 100644 index dc6e835907..0000000000 --- a/api/task_queue/default_task_queue_factory_stdlib_or_libevent_experiment.cc +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "api/field_trials_view.h" -#include "api/task_queue/task_queue_factory.h" -#include "api/transport/field_trial_based_config.h" -#include "rtc_base/logging.h" -#include "rtc_base/memory/always_valid_pointer.h" -#include "rtc_base/task_queue_libevent.h" -#include "rtc_base/task_queue_stdlib.h" - -namespace webrtc { - -std::unique_ptr CreateDefaultTaskQueueFactory( - const FieldTrialsView* field_trials_view) { - AlwaysValidPointer field_trials( - field_trials_view); - if (field_trials->IsEnabled("WebRTC-TaskQueue-ReplaceLibeventWithStdlib")) { - RTC_LOG(LS_INFO) << "WebRTC-TaskQueue-ReplaceLibeventWithStdlib: " - << "using TaskQueueStdlibFactory."; - return CreateTaskQueueStdlibFactory(); - } - - RTC_LOG(LS_INFO) << "WebRTC-TaskQueue-ReplaceLibeventWithStdlib: " - << "using TaskQueueLibeventFactory."; - return CreateTaskQueueLibeventFactory(); -} - -} // namespace webrtc diff --git a/api/task_queue/pending_task_safety_flag.cc b/api/task_queue/pending_task_safety_flag.cc index 437ce0755d..32bc037e55 100644 --- a/api/task_queue/pending_task_safety_flag.cc +++ b/api/task_queue/pending_task_safety_flag.cc @@ -10,31 +10,46 @@ #include "api/task_queue/pending_task_safety_flag.h" +#include "absl/base/nullability.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "rtc_base/checks.h" + namespace webrtc { // static -rtc::scoped_refptr PendingTaskSafetyFlag::CreateInternal( +scoped_refptr PendingTaskSafetyFlag::CreateInternal( bool alive) { // Explicit new, to access private constructor. - return rtc::scoped_refptr( - new PendingTaskSafetyFlag(alive)); + return scoped_refptr(new PendingTaskSafetyFlag(alive)); } // static -rtc::scoped_refptr PendingTaskSafetyFlag::Create() { +scoped_refptr PendingTaskSafetyFlag::Create() { return CreateInternal(true); } -rtc::scoped_refptr -PendingTaskSafetyFlag::CreateDetached() { - rtc::scoped_refptr safety_flag = CreateInternal(true); +scoped_refptr PendingTaskSafetyFlag::CreateDetached() { + scoped_refptr safety_flag = CreateInternal(true); safety_flag->main_sequence_.Detach(); return safety_flag; } -rtc::scoped_refptr +// Creates a flag, but with its SequenceChecker explicitly initialized for +// a given task queue and the `alive()` flag specified. +scoped_refptr +PendingTaskSafetyFlag::CreateAttachedToTaskQueue(bool alive, + TaskQueueBase* absl_nonnull + attached_queue) { + RTC_DCHECK(attached_queue) << "Null TaskQueue provided"; + return scoped_refptr( + new PendingTaskSafetyFlag(alive, attached_queue)); +} + +scoped_refptr PendingTaskSafetyFlag::CreateDetachedInactive() { - rtc::scoped_refptr safety_flag = CreateInternal(false); + scoped_refptr safety_flag = CreateInternal(false); safety_flag->main_sequence_.Detach(); return safety_flag; } diff --git a/api/task_queue/pending_task_safety_flag.h b/api/task_queue/pending_task_safety_flag.h index 7f6a592856..71e871064b 100644 --- a/api/task_queue/pending_task_safety_flag.h +++ b/api/task_queue/pending_task_safety_flag.h @@ -13,11 +13,12 @@ #include +#include "absl/base/nullability.h" #include "absl/functional/any_invocable.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" -#include "rtc_base/checks.h" +#include "api/task_queue/task_queue_base.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/system/rtc_export.h" @@ -39,7 +40,7 @@ namespace webrtc { // // class ExampleClass { // .... -// rtc::scoped_refptr flag = safety_flag_; +// webrtc::scoped_refptr flag = safety_flag_; // my_task_queue_->PostTask( // [flag = std::move(flag), this] { // // Now running on the main thread. @@ -60,17 +61,23 @@ namespace webrtc { // my_task_queue_->PostTask(SafeTask(safety_flag_, [this] { MyMethod(); })); // class RTC_EXPORT PendingTaskSafetyFlag final - : public rtc::RefCountedNonVirtual { + : public RefCountedNonVirtual { public: - static rtc::scoped_refptr Create(); + static scoped_refptr Create(); // Creates a flag, but with its SequenceChecker initially detached. Hence, it // may be created on a different thread than the flag will be used on. - static rtc::scoped_refptr CreateDetached(); + static scoped_refptr CreateDetached(); + + // Creates a flag, but with its SequenceChecker explicitly initialized for + // a given task queue and the `alive()` flag specified. + static scoped_refptr CreateAttachedToTaskQueue( + bool alive, + TaskQueueBase* absl_nonnull attached_queue); // Same as `CreateDetached()` except the initial state of the returned flag // will be `!alive()`. - static rtc::scoped_refptr CreateDetachedInactive(); + static scoped_refptr CreateDetachedInactive(); ~PendingTaskSafetyFlag() = default; @@ -95,9 +102,11 @@ class RTC_EXPORT PendingTaskSafetyFlag final protected: explicit PendingTaskSafetyFlag(bool alive) : alive_(alive) {} + PendingTaskSafetyFlag(bool alive, TaskQueueBase* absl_nonnull attached_queue) + : alive_(alive), main_sequence_(attached_queue) {} private: - static rtc::scoped_refptr CreateInternal(bool alive); + static scoped_refptr CreateInternal(bool alive); bool alive_ = true; RTC_NO_UNIQUE_ADDRESS SequenceChecker main_sequence_; @@ -120,23 +129,22 @@ class RTC_EXPORT PendingTaskSafetyFlag final class RTC_EXPORT ScopedTaskSafety final { public: ScopedTaskSafety() = default; - explicit ScopedTaskSafety(rtc::scoped_refptr flag) + explicit ScopedTaskSafety(scoped_refptr flag) : flag_(std::move(flag)) {} ~ScopedTaskSafety() { flag_->SetNotAlive(); } // Returns a new reference to the safety flag. - rtc::scoped_refptr flag() const { return flag_; } + scoped_refptr flag() const { return flag_; } // Marks the current flag as not-alive and attaches to a new one. - void reset(rtc::scoped_refptr new_flag = + void reset(scoped_refptr new_flag = PendingTaskSafetyFlag::Create()) { flag_->SetNotAlive(); flag_ = std::move(new_flag); } private: - rtc::scoped_refptr flag_ = - PendingTaskSafetyFlag::Create(); + scoped_refptr flag_ = PendingTaskSafetyFlag::Create(); }; // Like ScopedTaskSafety, but allows construction on a different thread than @@ -147,15 +155,15 @@ class RTC_EXPORT ScopedTaskSafetyDetached final { ~ScopedTaskSafetyDetached() { flag_->SetNotAlive(); } // Returns a new reference to the safety flag. - rtc::scoped_refptr flag() const { return flag_; } + scoped_refptr flag() const { return flag_; } private: - rtc::scoped_refptr flag_ = + scoped_refptr flag_ = PendingTaskSafetyFlag::CreateDetached(); }; inline absl::AnyInvocable SafeTask( - rtc::scoped_refptr flag, + scoped_refptr flag, absl::AnyInvocable task) { return [flag = std::move(flag), task = std::move(task)]() mutable { if (flag->alive()) { diff --git a/api/task_queue/pending_task_safety_flag_unittest.cc b/api/task_queue/pending_task_safety_flag_unittest.cc index cedf0eb8df..4bf5585ed3 100644 --- a/api/task_queue/pending_task_safety_flag_unittest.cc +++ b/api/task_queue/pending_task_safety_flag_unittest.cc @@ -11,17 +11,19 @@ #include "api/task_queue/pending_task_safety_flag.h" #include +#include +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" +#include "rtc_base/checks.h" #include "rtc_base/event.h" -#include "rtc_base/logging.h" #include "rtc_base/task_queue_for_test.h" -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { TEST(PendingTaskSafetyFlagTest, Basic) { - rtc::scoped_refptr safety_flag; + scoped_refptr safety_flag; { // Scope for the `owner` instance. class Owner { @@ -29,7 +31,7 @@ TEST(PendingTaskSafetyFlagTest, Basic) { Owner() = default; ~Owner() { flag_->SetNotAlive(); } - rtc::scoped_refptr flag_ = + scoped_refptr flag_ = PendingTaskSafetyFlag::Create(); } owner; EXPECT_TRUE(owner.flag_->alive()); @@ -41,7 +43,7 @@ TEST(PendingTaskSafetyFlagTest, Basic) { } TEST(PendingTaskSafetyFlagTest, BasicScoped) { - rtc::scoped_refptr safety_flag; + scoped_refptr safety_flag; { struct Owner { ScopedTaskSafety safety; @@ -67,7 +69,7 @@ TEST(PendingTaskSafetyFlagTest, PendingTaskSuccess) { void DoStuff() { RTC_DCHECK(!tq_main_->IsCurrent()); - rtc::scoped_refptr safe = flag_; + scoped_refptr safe = flag_; tq_main_->PostTask([safe = std::move(safe), this]() { if (!safe->alive()) return; @@ -80,7 +82,7 @@ TEST(PendingTaskSafetyFlagTest, PendingTaskSuccess) { private: TaskQueueBase* const tq_main_; bool stuff_done_ = false; - rtc::scoped_refptr flag_ = + scoped_refptr flag_ = PendingTaskSafetyFlag::Create(); }; @@ -131,9 +133,9 @@ TEST(PendingTaskSafetyFlagTest, PendingTaskDropped) { ASSERT_TRUE(owner); // Queue up a task on tq1 that will execute before the 'DoStuff' task // can, and delete the `owner` before the 'stuff' task can execute. - rtc::Event blocker; + Event blocker; tq1.PostTask([&blocker, &owner]() { - blocker.Wait(rtc::Event::kForever); + blocker.Wait(Event::kForever); owner.reset(); }); @@ -167,9 +169,19 @@ TEST(PendingTaskSafetyFlagTest, PendingTaskNotAliveInitialized) { EXPECT_TRUE(task_2_ran); } +TEST(PendingTaskSafetyFlagTest, PendingTaskInitializedForTaskQueue) { + TaskQueueForTest tq("PendingTaskAliveInitializedForTaskQueue"); + + // Create a new flag that initially `alive`, attached to a specific TQ. + auto flag = PendingTaskSafetyFlag::CreateAttachedToTaskQueue(true, tq.Get()); + tq.SendTask([&flag]() { EXPECT_TRUE(flag->alive()); }); + // Repeat the same steps but initialize as inactive. + flag = PendingTaskSafetyFlag::CreateAttachedToTaskQueue(false, tq.Get()); + tq.SendTask([&flag]() { EXPECT_FALSE(flag->alive()); }); +} + TEST(PendingTaskSafetyFlagTest, SafeTask) { - rtc::scoped_refptr flag = - PendingTaskSafetyFlag::Create(); + scoped_refptr flag = PendingTaskSafetyFlag::Create(); int count = 0; // Create two identical tasks that increment the `count`. diff --git a/api/task_queue/task_queue_base.cc b/api/task_queue/task_queue_base.cc index ecdc7f7691..6533aa0146 100644 --- a/api/task_queue/task_queue_base.cc +++ b/api/task_queue/task_queue_base.cc @@ -11,9 +11,6 @@ #include "absl/base/attributes.h" #include "absl/base/config.h" -#include "absl/functional/any_invocable.h" -#include "api/units/time_delta.h" -#include "rtc_base/checks.h" #if defined(ABSL_HAVE_THREAD_LOCAL) diff --git a/api/task_queue/task_queue_base.h b/api/task_queue/task_queue_base.h index da7a00d438..8b7bb1b903 100644 --- a/api/task_queue/task_queue_base.h +++ b/api/task_queue/task_queue_base.h @@ -10,7 +10,6 @@ #ifndef API_TASK_QUEUE_TASK_QUEUE_BASE_H_ #define API_TASK_QUEUE_TASK_QUEUE_BASE_H_ -#include #include #include "absl/functional/any_invocable.h" @@ -94,8 +93,7 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueueBase { void PostDelayedTask(absl::AnyInvocable task, TimeDelta delay, const Location& location = Location::Current()) { - PostDelayedTaskImpl(std::move(task), delay, - PostDelayedTaskTraits{.high_precision = false}, + PostDelayedTaskImpl(std::move(task), delay, PostDelayedTaskTraits{}, location); } @@ -119,9 +117,9 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueueBase { absl::AnyInvocable task, TimeDelta delay, const Location& location = Location::Current()) { - PostDelayedTaskImpl(std::move(task), delay, - PostDelayedTaskTraits{.high_precision = true}, - location); + PostDelayedTaskTraits traits; + traits.high_precision = true; + PostDelayedTaskImpl(std::move(task), delay, traits, location); } // As specified by `precision`, calls either PostDelayedTask() or diff --git a/api/task_queue/task_queue_test.cc b/api/task_queue/task_queue_test.cc index b02333ec58..1366a39cd8 100644 --- a/api/task_queue/task_queue_test.cc +++ b/api/task_queue/task_queue_test.cc @@ -9,23 +9,29 @@ */ #include "api/task_queue/task_queue_test.h" +#include #include +#include +#include #include "absl/cleanup/cleanup.h" #include "absl/strings/string_view.h" +#include "api/ref_count.h" #include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" #include "api/units/time_delta.h" #include "rtc_base/event.h" #include "rtc_base/ref_counter.h" #include "rtc_base/time_utils.h" +#include "test/gtest.h" namespace webrtc { namespace { // Avoids a dependency to system_wrappers. void SleepFor(TimeDelta duration) { - rtc::ScopedAllowBaseSyncPrimitivesForTesting allow; - rtc::Event event; + ScopedAllowBaseSyncPrimitivesForTesting allow; + Event event; event.Wait(duration); } @@ -44,11 +50,11 @@ TEST_P(TaskQueueTest, Construct) { TEST_P(TaskQueueTest, PostAndCheckCurrent) { std::unique_ptr factory = GetParam()(nullptr); - rtc::Event event; + Event event; auto queue = CreateTaskQueue(factory, "PostAndCheckCurrent"); // We're not running a task, so `queue` shouldn't be current. - // Note that because rtc::Thread also supports the TQ interface and + // Note that because webrtc::Thread also supports the TQ interface and // TestMainImpl::Init wraps the main test thread (bugs.webrtc.org/9714), that // means that TaskQueueBase::Current() will still return a valid value. EXPECT_FALSE(queue->IsCurrent()); @@ -62,17 +68,17 @@ TEST_P(TaskQueueTest, PostAndCheckCurrent) { TEST_P(TaskQueueTest, PostCustomTask) { std::unique_ptr factory = GetParam()(nullptr); - rtc::Event ran; + Event ran; auto queue = CreateTaskQueue(factory, "PostCustomImplementation"); class CustomTask { public: - explicit CustomTask(rtc::Event* ran) : ran_(ran) {} + explicit CustomTask(Event* ran) : ran_(ran) {} void operator()() { ran_->Set(); } private: - rtc::Event* const ran_; + Event* const ran_; } my_task(&ran); queue->PostTask(my_task); @@ -81,7 +87,7 @@ TEST_P(TaskQueueTest, PostCustomTask) { TEST_P(TaskQueueTest, PostDelayedZero) { std::unique_ptr factory = GetParam()(nullptr); - rtc::Event event; + Event event; auto queue = CreateTaskQueue(factory, "PostDelayedZero"); queue->PostDelayedTask([&event] { event.Set(); }, TimeDelta::Zero()); @@ -90,7 +96,7 @@ TEST_P(TaskQueueTest, PostDelayedZero) { TEST_P(TaskQueueTest, PostFromQueue) { std::unique_ptr factory = GetParam()(nullptr); - rtc::Event event; + Event event; auto queue = CreateTaskQueue(factory, "PostFromQueue"); queue->PostTask( @@ -100,11 +106,11 @@ TEST_P(TaskQueueTest, PostFromQueue) { TEST_P(TaskQueueTest, PostDelayed) { std::unique_ptr factory = GetParam()(nullptr); - rtc::Event event; + Event event; auto queue = CreateTaskQueue(factory, "PostDelayed", TaskQueueFactory::Priority::HIGH); - int64_t start = rtc::TimeMillis(); + int64_t start = TimeMillis(); queue->PostDelayedTask( [&event, &queue] { EXPECT_TRUE(queue->IsCurrent()); @@ -112,7 +118,7 @@ TEST_P(TaskQueueTest, PostDelayed) { }, TimeDelta::Millis(100)); EXPECT_TRUE(event.Wait(TimeDelta::Seconds(1))); - int64_t end = rtc::TimeMillis(); + int64_t end = TimeMillis(); // These tests are a little relaxed due to how "powerful" our test bots can // be. Most recently we've seen windows bots fire the callback after 94-99ms, // which is why we have a little bit of leeway backwards as well. @@ -124,9 +130,9 @@ TEST_P(TaskQueueTest, PostMultipleDelayed) { std::unique_ptr factory = GetParam()(nullptr); auto queue = CreateTaskQueue(factory, "PostMultipleDelayed"); - std::vector events(100); + std::vector events(100); for (int i = 0; i < 100; ++i) { - rtc::Event* event = &events[i]; + Event* event = &events[i]; queue->PostDelayedTask( [event, &queue] { EXPECT_TRUE(queue->IsCurrent()); @@ -135,14 +141,14 @@ TEST_P(TaskQueueTest, PostMultipleDelayed) { TimeDelta::Millis(i)); } - for (rtc::Event& e : events) + for (Event& e : events) EXPECT_TRUE(e.Wait(TimeDelta::Seconds(1))); } TEST_P(TaskQueueTest, PostDelayedAfterDestruct) { std::unique_ptr factory = GetParam()(nullptr); - rtc::Event run; - rtc::Event deleted; + Event run; + Event deleted; auto queue = CreateTaskQueue(factory, "PostDelayedAfterDestruct"); absl::Cleanup cleanup = [&deleted] { deleted.Set(); }; queue->PostDelayedTask([&run, cleanup = std::move(cleanup)] { run.Set(); }, @@ -156,8 +162,8 @@ TEST_P(TaskQueueTest, PostDelayedAfterDestruct) { TEST_P(TaskQueueTest, PostDelayedHighPrecisionAfterDestruct) { std::unique_ptr factory = GetParam()(nullptr); - rtc::Event run; - rtc::Event deleted; + Event run; + Event deleted; auto queue = CreateTaskQueue(factory, "PostDelayedHighPrecisionAfterDestruct"); absl::Cleanup cleanup = [&deleted] { deleted.Set(); }; @@ -179,7 +185,7 @@ TEST_P(TaskQueueTest, PostedUnexecutedClosureDestroyedOnTaskQueue) { queue->PostTask([] { SleepFor(TimeDelta::Millis(100)); }); // Give the task queue a chance to start executing the first lambda. SleepFor(TimeDelta::Millis(10)); - rtc::Event finished; + Event finished; // Then ensure the next lambda (which is likely not executing yet) is // destroyed in the task queue context when the queue is deleted. auto cleanup = absl::Cleanup([queue_ptr, &finished] { @@ -195,7 +201,7 @@ TEST_P(TaskQueueTest, PostedClosureDestroyedOnTaskQueue) { std::unique_ptr factory = GetParam()(nullptr); auto queue = CreateTaskQueue(factory, "PostedClosureDestroyedOnTaskQueue"); TaskQueueBase* queue_ptr = queue.get(); - rtc::Event finished; + Event finished; auto cleanup = absl::Cleanup([queue_ptr, &finished] { EXPECT_EQ(queue_ptr, TaskQueueBase::Current()); finished.Set(); @@ -214,7 +220,7 @@ TEST_P(TaskQueueTest, PostedExecutedClosureDestroyedOnTaskQueue) { CreateTaskQueue(factory, "PostedExecutedClosureDestroyedOnTaskQueue"); TaskQueueBase* queue_ptr = queue.get(); // Ensure an executed lambda is destroyed on the task queue. - rtc::Event finished; + Event finished; queue->PostTask([cleanup = absl::Cleanup([queue_ptr, &finished] { EXPECT_EQ(queue_ptr, TaskQueueBase::Current()); finished.Set(); @@ -224,7 +230,7 @@ TEST_P(TaskQueueTest, PostedExecutedClosureDestroyedOnTaskQueue) { TEST_P(TaskQueueTest, PostAndReuse) { std::unique_ptr factory = GetParam()(nullptr); - rtc::Event event; + Event event; auto post_queue = CreateTaskQueue(factory, "PostQueue"); auto reply_queue = CreateTaskQueue(factory, "ReplyQueue"); @@ -232,7 +238,7 @@ TEST_P(TaskQueueTest, PostAndReuse) { class ReusedTask { public: - ReusedTask(int* counter, TaskQueueBase* reply_queue, rtc::Event* event) + ReusedTask(int* counter, TaskQueueBase* reply_queue, Event* event) : counter_(*counter), reply_queue_(reply_queue), event_(*event) { EXPECT_EQ(counter_, 0); } @@ -253,7 +259,7 @@ TEST_P(TaskQueueTest, PostAndReuse) { private: int& counter_; TaskQueueBase* const reply_queue_; - rtc::Event& event_; + Event& event_; }; ReusedTask task(&call_count, reply_queue.get(), &event); @@ -268,7 +274,7 @@ TEST_P(TaskQueueTest, PostALot) { explicit BlockingCounter(int initial_count) : count_(initial_count) {} void DecrementCount() { - if (count_.DecRef() == rtc::RefCountReleaseStatus::kDroppedLastRef) { + if (count_.DecRef() == webrtc::RefCountReleaseStatus::kDroppedLastRef) { event_.Set(); } } @@ -276,12 +282,12 @@ TEST_P(TaskQueueTest, PostALot) { private: webrtc_impl::RefCounter count_; - rtc::Event event_; + Event event_; }; std::unique_ptr factory = GetParam()(nullptr); static constexpr int kTaskCount = 0xffff; - rtc::Event posting_done; + Event posting_done; BlockingCounter all_destroyed(kTaskCount); int tasks_executed = 0; @@ -301,7 +307,7 @@ TEST_P(TaskQueueTest, PostALot) { }); // Before destroying the task queue wait until all child tasks are posted. - posting_done.Wait(rtc::Event::kForever); + posting_done.Wait(Event::kForever); // Destroy the task queue. task_queue = nullptr; @@ -330,7 +336,7 @@ TEST_P(TaskQueueTest, PostTwoWithSharedUnprotectedState) { } state; auto queue = CreateTaskQueue(factory, "PostTwoWithSharedUnprotectedState"); - rtc::Event done; + Event done; queue->PostTask([&state, &queue, &done] { // Post tasks from queue to guarantee, that 1st task won't be // executed before the second one will be posted. diff --git a/api/task_queue/test/BUILD.gn b/api/task_queue/test/BUILD.gn index 25f7ed0c7f..65a590f67e 100644 --- a/api/task_queue/test/BUILD.gn +++ b/api/task_queue/test/BUILD.gn @@ -12,9 +12,10 @@ rtc_library("mock_task_queue_base") { testonly = true sources = [ "mock_task_queue_base.h" ] deps = [ + "../..:location", "../../../api/task_queue:task_queue", "../../../api/units:time_delta", "../../../test:test_support", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable" ] } diff --git a/api/task_queue/test/mock_task_queue_base.h b/api/task_queue/test/mock_task_queue_base.h index 0540afe16b..c70884a797 100644 --- a/api/task_queue/test/mock_task_queue_base.h +++ b/api/task_queue/test/mock_task_queue_base.h @@ -12,6 +12,7 @@ #define API_TASK_QUEUE_TEST_MOCK_TASK_QUEUE_BASE_H_ #include "absl/functional/any_invocable.h" +#include "api/location.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" #include "test/gmock.h" diff --git a/api/test/DEPS b/api/test/DEPS index 270b274c5f..b54b321b59 100644 --- a/api/test/DEPS +++ b/api/test/DEPS @@ -5,6 +5,9 @@ specific_include_rules = { ".*": [ "+video" ], + ".*": [ + "+rtc_base/ref_counted_object.h", + ], "dummy_peer_connection\.h": [ "+rtc_base/ref_counted_object.h", ], @@ -12,11 +15,12 @@ specific_include_rules = { "+system_wrappers/include/clock.h", ], "network_emulation_manager\.h": [ - "+rtc_base/thread.h", - "+rtc_base/network.h", "+rtc_base/network_constants.h", + "+rtc_base/ip_address.h", + "+rtc_base/socket_address.h", ], "peer_network_dependencies\.h": [ + "+rtc_base/socket_factory.h", "+rtc_base/network.h", "+rtc_base/thread.h", ], @@ -32,8 +36,32 @@ specific_include_rules = { "time_controller\.h": [ "+rtc_base/synchronization/yield_policy.h", "+system_wrappers/include/clock.h", + "+rtc_base/socket_server.h", ], "create_frame_generator\.h": [ "+system_wrappers/include/clock.h", ], + "mock_async_dns_resolver\.h": [ + "+rtc_base/socket_address.h", + ], + "mock_packet_socket_factory\.h": [ + "+rtc_base/async_packet_socket.h", + "+rtc_base/socket_address.h", + ], + "mock_peerconnectioninterface\.h": [ + "+rtc_base/thread.h", + ], + "mock_peer_connection_factory_interface\.h": [ + "+p2p/base/port_allocator.h", + "+rtc_base/rtc_certificate_generator.h", + ], + "mock_peerconnectioninterface\.h": [ + "+rtc_base/thread.h", + ], + "videocodec_test_fixture\.h": [ + "+modules/video_coding/codecs/h264/include/h264_globals.h", + ], + "rtc_error_matchers\.h": [ + "+test/gmock.h", + ], } diff --git a/api/test/audioproc_float.cc b/api/test/audioproc_float.cc index c8d7ff7193..c1b6042c28 100644 --- a/api/test/audioproc_float.cc +++ b/api/test/audioproc_float.cc @@ -10,34 +10,34 @@ #include "api/test/audioproc_float.h" +#include #include +#include "absl/base/nullability.h" +#include "api/audio/audio_processing.h" +#include "api/audio/builtin_audio_processing_builder.h" #include "modules/audio_processing/test/audioproc_float_impl.h" namespace webrtc { namespace test { -int AudioprocFloat(rtc::scoped_refptr audio_processing, - int argc, - char* argv[]) { - return AudioprocFloatImpl(std::move(audio_processing), argc, argv); +int AudioprocFloat(int argc, char* argv[]) { + return AudioprocFloatImpl(std::make_unique(), + argc, argv); } -int AudioprocFloat(std::unique_ptr ap_builder, - int argc, - char* argv[]) { - return AudioprocFloatImpl(std::move(ap_builder), argc, argv, - /*input_aecdump=*/"", - /*processed_capture_samples=*/nullptr); +int AudioprocFloat( + absl_nonnull std::unique_ptr ap_builder, + int argc, + char* argv[]) { + return AudioprocFloatImpl(std::move(ap_builder), argc, argv); } -int AudioprocFloat(std::unique_ptr ap_builder, - int argc, - char* argv[], - absl::string_view input_aecdump, - std::vector* processed_capture_samples) { - return AudioprocFloatImpl(std::move(ap_builder), argc, argv, input_aecdump, - processed_capture_samples); +int AudioprocFloat( + absl_nonnull std::unique_ptr ap_builder, + int argc, + char* argv[]) { + return AudioprocFloatImpl(std::move(ap_builder), argc, argv); } } // namespace test diff --git a/api/test/audioproc_float.h b/api/test/audioproc_float.h index 1ef1c9828f..e065ef34c6 100644 --- a/api/test/audioproc_float.h +++ b/api/test/audioproc_float.h @@ -12,9 +12,10 @@ #define API_TEST_AUDIOPROC_FLOAT_H_ #include -#include -#include "modules/audio_processing/include/audio_processing.h" +#include "absl/base/nullability.h" +#include "api/audio/audio_processing.h" +#include "api/audio/builtin_audio_processing_builder.h" namespace webrtc { namespace test { @@ -22,49 +23,27 @@ namespace test { // This is an interface for the audio processing simulation utility. This // utility can be used to simulate the audioprocessing module using a recording // (either an AEC dump or wav files), and generate the output as a wav file. -// Any audio_processing object specified in the input is used for the -// simulation. The optional `audio_processing` object provides the -// AudioProcessing instance that is used during the simulation. Note that when -// the audio_processing object is specified all functionality that relies on -// using the AudioProcessingBuilder is deactivated, since the AudioProcessing -// object is already created and the builder is not used in the simulation. It -// is needed to pass the command line flags as `argc` and `argv`, so these can -// be interpreted properly by the utility. To see a list of all supported -// command line flags, run the executable with the '--help' flag. -int AudioprocFloat(rtc::scoped_refptr audio_processing, - int argc, - char* argv[]); +// +// It is needed to pass the command line flags as `argc` and `argv`, so these +// can be interpreted properly by the utility. To see a list of all supported +// command line flags, run the executable with the '--helpfull' flag. +// +// The optional `ap_builder` object will be used to create the AudioProcessing +// instance that is used during the simulation. BuiltinAudioProcessingBuilder +// `ap_builder` supports setting of injectable components, which will be passed +// on to the created AudioProcessing instance. When generic +// `AudioProcessingBuilderInterface` is used, all functionality that relies on +// using the BuiltinAudioProcessingBuilder is deactivated. +int AudioprocFloat(int argc, char* argv[]); +int AudioprocFloat( + absl_nonnull std::unique_ptr ap_builder, + int argc, + char* argv[]); +int AudioprocFloat( + absl_nonnull std::unique_ptr ap_builder, + int argc, + char* argv[]); -// This is an interface for the audio processing simulation utility. This -// utility can be used to simulate the audioprocessing module using a recording -// (either an AEC dump or wav files), and generate the output as a wav file. -// The `ap_builder` object will be used to create the AudioProcessing instance -// that is used during the simulation. The `ap_builder` supports setting of -// injectable components, which will be passed on to the created AudioProcessing -// instance. It is needed to pass the command line flags as `argc` and `argv`, -// so these can be interpreted properly by the utility. -// To get a fully-working audioproc_f utility, all that is needed is to write a -// main function, create an AudioProcessingBuilder, optionally set custom -// processing components on it, and pass the builder together with the command -// line arguments into this function. -// To see a list of all supported command line flags, run the executable with -// the '--help' flag. -int AudioprocFloat(std::unique_ptr ap_builder, - int argc, - char* argv[]); - -// Interface for the audio processing simulation utility, which is similar to -// the one above, but which adds the option of receiving the input as a string -// and returning the output as an array. The first three arguments fulfill the -// same purpose as above. Pass the `input_aecdump` to provide the content of an -// AEC dump file as a string. After the simulation is completed, -// `processed_capture_samples` will contain the the samples processed on the -// capture side. -int AudioprocFloat(std::unique_ptr ap_builder, - int argc, - char* argv[], - absl::string_view input_aecdump, - std::vector* processed_capture_samples); } // namespace test } // namespace webrtc diff --git a/api/test/compile_all_headers.cc b/api/test/compile_all_headers.cc index 1fcf63e97b..ca586b39b1 100644 --- a/api/test/compile_all_headers.cc +++ b/api/test/compile_all_headers.cc @@ -27,6 +27,7 @@ // "api/test/videocodec_test_fixture.h" // "api/test/videocodec_test_stats.h" +// IWYU pragma: begin_keep #include "api/test/fake_frame_decryptor.h" #include "api/test/fake_frame_encryptor.h" #include "api/test/mock_async_dns_resolver.h" @@ -43,6 +44,7 @@ #include "api/test/mock_rtpreceiver.h" #include "api/test/mock_rtpsender.h" #include "api/test/mock_session_description_interface.h" +#include "api/test/mock_transformable_frame.h" #include "api/test/mock_transformable_video_frame.h" #include "api/test/mock_video_bitrate_allocator.h" #include "api/test/mock_video_bitrate_allocator_factory.h" @@ -51,3 +53,4 @@ #include "api/test/mock_video_encoder.h" #include "api/test/mock_video_encoder_factory.h" #include "api/test/mock_video_track.h" +// IWYU pragma: end_keep diff --git a/api/test/create_frame_generator.cc b/api/test/create_frame_generator.cc index 5e6fb3228b..e9e41dc369 100644 --- a/api/test/create_frame_generator.cc +++ b/api/test/create_frame_generator.cc @@ -10,10 +10,19 @@ #include "api/test/create_frame_generator.h" +#include #include -#include +#include +#include +#include +#include +#include "absl/base/nullability.h" +#include "absl/strings/string_view.h" +#include "api/environment/environment.h" +#include "api/test/frame_generator_interface.h" #include "rtc_base/checks.h" +#include "system_wrappers/include/clock.h" #include "test/frame_generator.h" #include "test/testsupport/ivf_video_frame_generator.h" @@ -23,8 +32,8 @@ namespace test { std::unique_ptr CreateSquareFrameGenerator( int width, int height, - absl::optional type, - absl::optional num_squares) { + std::optional type, + std::optional num_squares) { return std::make_unique( width, height, type.value_or(FrameGeneratorInterface::OutputType::kI420), num_squares.value_or(10)); @@ -64,9 +73,11 @@ std::unique_ptr CreateFromNV12FileFrameGenerator( frame_repeat_count); } -std::unique_ptr CreateFromIvfFileFrameGenerator( - std::string filename) { - return std::make_unique(std::move(filename)); +absl_nonnull std::unique_ptr +CreateFromIvfFileFrameGenerator(const Environment& env, + absl::string_view filename, + std::optional fps_hint) { + return std::make_unique(env, filename, fps_hint); } std::unique_ptr diff --git a/api/test/create_frame_generator.h b/api/test/create_frame_generator.h index 70be0c4e8e..0ed83c2bb0 100644 --- a/api/test/create_frame_generator.h +++ b/api/test/create_frame_generator.h @@ -11,11 +11,16 @@ #ifndef API_TEST_CREATE_FRAME_GENERATOR_H_ #define API_TEST_CREATE_FRAME_GENERATOR_H_ +#include +#include #include +#include #include #include -#include "absl/types/optional.h" +#include "absl/base/nullability.h" +#include "absl/strings/string_view.h" +#include "api/environment/environment.h" #include "api/test/frame_generator_interface.h" #include "system_wrappers/include/clock.h" @@ -29,8 +34,8 @@ namespace test { std::unique_ptr CreateSquareFrameGenerator( int width, int height, - absl::optional type, - absl::optional num_squares); + std::optional type, + std::optional num_squares); // Creates a frame generator that repeatedly plays a set of yuv files. // The frame_repeat_count determines how many times each frame is shown, @@ -50,9 +55,10 @@ std::unique_ptr CreateFromNV12FileFrameGenerator( size_t height, int frame_repeat_count = 1); -// Creates a frame generator that repeatedly plays an ivf file. -std::unique_ptr CreateFromIvfFileFrameGenerator( - std::string filename); +absl_nonnull std::unique_ptr +CreateFromIvfFileFrameGenerator(const Environment& env, + absl::string_view filename, + std::optional fps_hint = std::nullopt); // Creates a frame generator which takes a set of yuv files (wrapping a // frame generator created by CreateFromYuvFile() above), but outputs frames diff --git a/api/test/create_network_emulation_manager.cc b/api/test/create_network_emulation_manager.cc index f5d5a1bc88..488481d338 100644 --- a/api/test/create_network_emulation_manager.cc +++ b/api/test/create_network_emulation_manager.cc @@ -12,16 +12,27 @@ #include "api/test/create_network_emulation_manager.h" #include +#include +#include "api/field_trials_view.h" +#include "api/test/network_emulation_manager.h" #include "test/network/network_emulation_manager.h" namespace webrtc { +std::unique_ptr CreateNetworkEmulationManager( + NetworkEmulationManagerConfig config) { + return std::make_unique(std::move(config)); +} + std::unique_ptr CreateNetworkEmulationManager( TimeMode time_mode, - EmulatedNetworkStatsGatheringMode stats_gathering_mode) { - return std::make_unique( - time_mode, stats_gathering_mode); + EmulatedNetworkStatsGatheringMode stats_gathering_mode, + const FieldTrialsView* field_trials) { + return CreateNetworkEmulationManager( + {.time_mode = time_mode, + .stats_gathering_mode = stats_gathering_mode, + .field_trials = field_trials}); } } // namespace webrtc diff --git a/api/test/create_network_emulation_manager.h b/api/test/create_network_emulation_manager.h index 941b2b1c52..d27d0abe71 100644 --- a/api/test/create_network_emulation_manager.h +++ b/api/test/create_network_emulation_manager.h @@ -13,15 +13,22 @@ #include +#include "api/field_trials_view.h" #include "api/test/network_emulation_manager.h" namespace webrtc { // Returns a non-null NetworkEmulationManager instance. std::unique_ptr CreateNetworkEmulationManager( - TimeMode time_mode = TimeMode::kRealTime, + NetworkEmulationManagerConfig config = NetworkEmulationManagerConfig()); + +[[deprecated("Use version with NetworkEmulationManagerConfig)")]] // +std::unique_ptr +CreateNetworkEmulationManager( + TimeMode time_mode, EmulatedNetworkStatsGatheringMode stats_gathering_mode = - EmulatedNetworkStatsGatheringMode::kDefault); + EmulatedNetworkStatsGatheringMode::kDefault, + const FieldTrialsView* field_trials = nullptr); } // namespace webrtc diff --git a/api/test/create_peer_connection_quality_test_frame_generator.cc b/api/test/create_peer_connection_quality_test_frame_generator.cc index a1c53635f9..b99f1bd834 100644 --- a/api/test/create_peer_connection_quality_test_frame_generator.cc +++ b/api/test/create_peer_connection_quality_test_frame_generator.cc @@ -10,12 +10,18 @@ #include "api/test/create_peer_connection_quality_test_frame_generator.h" +#include +#include +#include #include #include #include "api/test/create_frame_generator.h" +#include "api/test/frame_generator_interface.h" #include "api/test/pclf/media_configuration.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" +#include "system_wrappers/include/clock.h" #include "test/testsupport/file_utils.h" namespace webrtc { @@ -49,9 +55,9 @@ void ValidateScreenShareConfig(const VideoConfig& video_config, std::unique_ptr CreateSquareFrameGenerator( const VideoConfig& video_config, - absl::optional type) { + std::optional type) { return test::CreateSquareFrameGenerator( - video_config.width, video_config.height, std::move(type), absl::nullopt); + video_config.width, video_config.height, std::move(type), std::nullopt); } std::unique_ptr CreateFromYuvFileFrameGenerator( diff --git a/api/test/create_peer_connection_quality_test_frame_generator.h b/api/test/create_peer_connection_quality_test_frame_generator.h index 62043d140a..a8515079f5 100644 --- a/api/test/create_peer_connection_quality_test_frame_generator.h +++ b/api/test/create_peer_connection_quality_test_frame_generator.h @@ -11,9 +11,9 @@ #define API_TEST_CREATE_PEER_CONNECTION_QUALITY_TEST_FRAME_GENERATOR_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/test/frame_generator_interface.h" #include "api/test/pclf/media_configuration.h" @@ -26,7 +26,7 @@ namespace webrtc_pc_e2e { // weight and height. std::unique_ptr CreateSquareFrameGenerator( const VideoConfig& video_config, - absl::optional type); + std::optional type); // Creates a frame generator that plays frames from the yuv file. std::unique_ptr CreateFromYuvFileFrameGenerator( diff --git a/api/test/create_peerconnection_quality_test_fixture.cc b/api/test/create_peerconnection_quality_test_fixture.cc index e156991ed4..fd8e2cfaa7 100644 --- a/api/test/create_peerconnection_quality_test_fixture.cc +++ b/api/test/create_peerconnection_quality_test_fixture.cc @@ -11,10 +11,14 @@ #include "api/test/create_peerconnection_quality_test_fixture.h" #include +#include #include +#include "api/test/audio_quality_analyzer_interface.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" +#include "api/test/peerconnection_quality_test_fixture.h" #include "api/test/time_controller.h" +#include "api/test/video_quality_analyzer_interface.h" #include "test/pc/e2e/peer_connection_quality_test.h" namespace webrtc { diff --git a/api/test/create_peerconnection_quality_test_fixture.h b/api/test/create_peerconnection_quality_test_fixture.h index a0b0d08dd4..c6593fc540 100644 --- a/api/test/create_peerconnection_quality_test_fixture.h +++ b/api/test/create_peerconnection_quality_test_fixture.h @@ -27,7 +27,7 @@ namespace webrtc_pc_e2e { // During the test Alice will be caller and Bob will answer the call. // `test_case_name` is a name of test case, that will be used for all metrics // reporting. -// `time_controller` is used to manage all rtc::Thread's and TaskQueue +// `time_controller` is used to manage all webrtc::Thread's and TaskQueue // instances. Instance of `time_controller` have to outlive created fixture. // Returns a non-null PeerConnectionE2EQualityTestFixture instance. std::unique_ptr diff --git a/api/test/create_simulcast_test_fixture.cc b/api/test/create_simulcast_test_fixture.cc index 024145dff0..164395f946 100644 --- a/api/test/create_simulcast_test_fixture.cc +++ b/api/test/create_simulcast_test_fixture.cc @@ -14,6 +14,9 @@ #include #include "api/test/simulcast_test_fixture.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" #include "modules/video_coding/utility/simulcast_test_fixture_impl.h" namespace webrtc { diff --git a/api/test/create_time_controller.cc b/api/test/create_time_controller.cc index d198f2b0fe..9754f70ea8 100644 --- a/api/test/create_time_controller.cc +++ b/api/test/create_time_controller.cc @@ -11,43 +11,63 @@ #include "api/test/create_time_controller.h" #include +#include +#include "absl/base/nullability.h" +#include "api/enable_media_with_defaults.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/peer_connection_interface.h" +#include "api/test/time_controller.h" +#include "api/units/timestamp.h" #include "call/call.h" -#include "call/rtp_transport_config.h" -#include "call/rtp_transport_controller_send_factory_interface.h" -#include "test/time_controller/external_time_controller.h" +#include "call/call_config.h" +#include "media/base/media_engine.h" +#include "pc/media_factory.h" +#include "rtc_base/checks.h" +#include "system_wrappers/include/clock.h" #include "test/time_controller/simulated_time_controller.h" namespace webrtc { -std::unique_ptr CreateTimeController( - ControlledAlarmClock* alarm) { - return std::make_unique(alarm); -} - std::unique_ptr CreateSimulatedTimeController() { return std::make_unique( Timestamp::Seconds(10000)); } -std::unique_ptr CreateTimeControllerBasedCallFactory( - TimeController* time_controller) { - class TimeControllerBasedCallFactory : public CallFactoryInterface { +void EnableMediaWithDefaultsAndTimeController( + TimeController& time_controller, + PeerConnectionFactoryDependencies& deps) { + class TimeControllerBasedFactory : public MediaFactory { public: - explicit TimeControllerBasedCallFactory(TimeController* time_controller) - : time_controller_(time_controller) {} - Call* CreateCall(const Call::Config& config) override { - RtpTransportConfig transportConfig = config.ExtractTransportConfig(); - - return Call::Create(config, time_controller_->GetClock(), - config.rtp_transport_controller_send_factory->Create( - transportConfig, time_controller_->GetClock())); + TimeControllerBasedFactory( + Clock* absl_nonnull clock, + absl_nonnull std::unique_ptr media_factory) + : clock_(clock), media_factory_(std::move(media_factory)) {} + + std::unique_ptr CreateCall(CallConfig config) override { + EnvironmentFactory env_factory(config.env); + env_factory.Set(clock_); + + config.env = env_factory.Create(); + return media_factory_->CreateCall(std::move(config)); + } + + std::unique_ptr CreateMediaEngine( + const Environment& env, + PeerConnectionFactoryDependencies& dependencies) override { + return media_factory_->CreateMediaEngine(env, dependencies); } private: - TimeController* time_controller_; + Clock* absl_nonnull clock_; + absl_nonnull std::unique_ptr media_factory_; }; - return std::make_unique(time_controller); + + EnableMediaWithDefaults(deps); + RTC_CHECK(deps.media_factory); + deps.media_factory = std::make_unique( + time_controller.GetClock(), std::move(deps.media_factory)); } } // namespace webrtc diff --git a/api/test/create_time_controller.h b/api/test/create_time_controller.h index e7bc9cb465..7d1d9b3800 100644 --- a/api/test/create_time_controller.h +++ b/api/test/create_time_controller.h @@ -12,22 +12,20 @@ #include -#include "api/call/call_factory_interface.h" +#include "api/peer_connection_interface.h" #include "api/test/time_controller.h" namespace webrtc { -// Creates a time coltroller that wraps `alarm`. -std::unique_ptr CreateTimeController( - ControlledAlarmClock* alarm); - // Creates a time controller that runs in simulated time. std::unique_ptr CreateSimulatedTimeController(); -// This is creates a call factory that creates Call instances that are backed by -// a time controller. -std::unique_ptr CreateTimeControllerBasedCallFactory( - TimeController* time_controller); +// Adjusts media `deps` to use clock `time_controller` provides, fills media +// related dependencies, and enables media support for a PeerConnectionFactory +// created from `deps`. +void EnableMediaWithDefaultsAndTimeController( + TimeController& time_controller, + PeerConnectionFactoryDependencies& deps); } // namespace webrtc diff --git a/api/test/create_time_controller_unittest.cc b/api/test/create_time_controller_unittest.cc deleted file mode 100644 index 0ea868c5cc..0000000000 --- a/api/test/create_time_controller_unittest.cc +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/test/create_time_controller.h" - -#include "api/test/time_controller.h" -#include "api/units/time_delta.h" -#include "api/units/timestamp.h" -#include "test/gmock.h" -#include "test/gtest.h" - -namespace webrtc { -namespace { - -class FakeAlarm : public ControlledAlarmClock { - public: - explicit FakeAlarm(Timestamp start_time); - - Clock* GetClock() override; - bool ScheduleAlarmAt(Timestamp deadline) override; - void SetCallback(std::function callback) override; - void Sleep(TimeDelta duration) override; - - private: - SimulatedClock clock_; - Timestamp deadline_; - std::function callback_; -}; - -FakeAlarm::FakeAlarm(Timestamp start_time) - : clock_(start_time), - deadline_(Timestamp::PlusInfinity()), - callback_([] {}) {} - -Clock* FakeAlarm::GetClock() { - return &clock_; -} - -bool FakeAlarm::ScheduleAlarmAt(Timestamp deadline) { - if (deadline < deadline_) { - deadline_ = deadline; - return true; - } - return false; -} - -void FakeAlarm::SetCallback(std::function callback) { - callback_ = callback; -} - -void FakeAlarm::Sleep(TimeDelta duration) { - Timestamp end_time = clock_.CurrentTime() + duration; - - while (deadline_ <= end_time) { - clock_.AdvanceTime(deadline_ - clock_.CurrentTime()); - deadline_ = Timestamp::PlusInfinity(); - callback_(); - } - - clock_.AdvanceTime(end_time - clock_.CurrentTime()); -} - -TEST(CreateTimeControllerTest, CreatesNonNullController) { - FakeAlarm alarm(Timestamp::Millis(100)); - EXPECT_NE(CreateTimeController(&alarm), nullptr); -} - -} // namespace -} // namespace webrtc diff --git a/api/test/create_video_codec_tester.cc b/api/test/create_video_codec_tester.cc deleted file mode 100644 index a1efefdb48..0000000000 --- a/api/test/create_video_codec_tester.cc +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/test/create_video_codec_tester.h" - -#include -#include - -#include "api/test/video_codec_tester.h" -#include "modules/video_coding/codecs/test/video_codec_tester_impl.h" - -namespace webrtc { -namespace test { - -std::unique_ptr CreateVideoCodecTester() { - return std::make_unique(); -} - -} // namespace test -} // namespace webrtc diff --git a/api/test/create_video_quality_test_fixture.cc b/api/test/create_video_quality_test_fixture.cc index 1fa7d243cc..1cfe228423 100644 --- a/api/test/create_video_quality_test_fixture.cc +++ b/api/test/create_video_quality_test_fixture.cc @@ -13,6 +13,8 @@ #include #include +#include "api/fec_controller.h" +#include "api/test/video_quality_test_fixture.h" #include "video/video_quality_test.h" namespace webrtc { diff --git a/api/test/create_videocodec_test_fixture.cc b/api/test/create_videocodec_test_fixture.cc index 1f618e5db8..f8a035febc 100644 --- a/api/test/create_videocodec_test_fixture.cc +++ b/api/test/create_videocodec_test_fixture.cc @@ -14,6 +14,8 @@ #include #include "api/test/videocodec_test_fixture.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" #include "modules/video_coding/codecs/test/videocodec_test_fixture_impl.h" namespace webrtc { diff --git a/api/test/fake_frame_decryptor.cc b/api/test/fake_frame_decryptor.cc index 16cb1bd3b6..ad635467f3 100644 --- a/api/test/fake_frame_decryptor.cc +++ b/api/test/fake_frame_decryptor.cc @@ -10,8 +10,12 @@ #include "api/test/fake_frame_decryptor.h" +#include +#include #include +#include "api/array_view.h" +#include "api/media_types.h" #include "rtc_base/checks.h" namespace webrtc { @@ -21,11 +25,11 @@ FakeFrameDecryptor::FakeFrameDecryptor(uint8_t fake_key, : fake_key_(fake_key), expected_postfix_byte_(expected_postfix_byte) {} FakeFrameDecryptor::Result FakeFrameDecryptor::Decrypt( - cricket::MediaType media_type, - const std::vector& csrcs, - rtc::ArrayView additional_data, - rtc::ArrayView encrypted_frame, - rtc::ArrayView frame) { + webrtc::MediaType /* media_type */, + const std::vector& /* csrcs */, + ArrayView /* additional_data */, + ArrayView encrypted_frame, + ArrayView frame) { if (fail_decryption_) { return Result(Status::kFailedToDecrypt, 0); } @@ -43,7 +47,7 @@ FakeFrameDecryptor::Result FakeFrameDecryptor::Decrypt( } size_t FakeFrameDecryptor::GetMaxPlaintextByteSize( - cricket::MediaType media_type, + webrtc::MediaType /* media_type */, size_t encrypted_frame_size) { return encrypted_frame_size - 1; } diff --git a/api/test/fake_frame_decryptor.h b/api/test/fake_frame_decryptor.h index 783bc805c4..f204a91aeb 100644 --- a/api/test/fake_frame_decryptor.h +++ b/api/test/fake_frame_decryptor.h @@ -34,13 +34,13 @@ class FakeFrameDecryptor : public FrameDecryptorInterface { uint8_t expected_postfix_byte = 255); // Fake decryption that just xors the payload with the 1 byte key and checks // the postfix byte. This will always fail if fail_decryption_ is set to true. - Result Decrypt(cricket::MediaType media_type, + Result Decrypt(webrtc::MediaType media_type, const std::vector& csrcs, - rtc::ArrayView additional_data, - rtc::ArrayView encrypted_frame, - rtc::ArrayView frame) override; + ArrayView additional_data, + ArrayView encrypted_frame, + ArrayView frame) override; // Always returns 1 less than the size of the encrypted frame. - size_t GetMaxPlaintextByteSize(cricket::MediaType media_type, + size_t GetMaxPlaintextByteSize(webrtc::MediaType media_type, size_t encrypted_frame_size) override; // Sets the fake key to use for encryption. void SetFakeKey(uint8_t fake_key); diff --git a/api/test/fake_frame_encryptor.cc b/api/test/fake_frame_encryptor.cc index 89d14aab88..b988245b99 100644 --- a/api/test/fake_frame_encryptor.cc +++ b/api/test/fake_frame_encryptor.cc @@ -10,6 +10,11 @@ #include "api/test/fake_frame_encryptor.h" +#include +#include + +#include "api/array_view.h" +#include "api/media_types.h" #include "rtc_base/checks.h" namespace webrtc { @@ -17,11 +22,11 @@ FakeFrameEncryptor::FakeFrameEncryptor(uint8_t fake_key, uint8_t postfix_byte) : fake_key_(fake_key), postfix_byte_(postfix_byte) {} // FrameEncryptorInterface implementation -int FakeFrameEncryptor::Encrypt(cricket::MediaType media_type, - uint32_t ssrc, - rtc::ArrayView additional_data, - rtc::ArrayView frame, - rtc::ArrayView encrypted_frame, +int FakeFrameEncryptor::Encrypt(webrtc::MediaType /* media_type */, + uint32_t /* ssrc */, + ArrayView /* additional_data */, + ArrayView frame, + ArrayView encrypted_frame, size_t* bytes_written) { if (fail_encryption_) { return static_cast(FakeEncryptionStatus::FORCED_FAILURE); @@ -38,7 +43,7 @@ int FakeFrameEncryptor::Encrypt(cricket::MediaType media_type, } size_t FakeFrameEncryptor::GetMaxCiphertextByteSize( - cricket::MediaType media_type, + webrtc::MediaType /* media_type */, size_t frame_size) { return frame_size + 1; } diff --git a/api/test/fake_frame_encryptor.h b/api/test/fake_frame_encryptor.h index 074981b183..4cb615b00d 100644 --- a/api/test/fake_frame_encryptor.h +++ b/api/test/fake_frame_encryptor.h @@ -25,22 +25,21 @@ namespace webrtc { // FrameEncryptorInterface. It is constructed with a simple single digit key and // a fixed postfix byte. This is just to validate that the core code works // as expected. -class FakeFrameEncryptor - : public rtc::RefCountedObject { +class FakeFrameEncryptor : public RefCountedObject { public: // Provide a key (0,255) and some postfix byte (0,255). explicit FakeFrameEncryptor(uint8_t fake_key = 0xAA, uint8_t postfix_byte = 255); // Simply xors each payload with the provided fake key and adds the postfix // bit to the end. This will always fail if fail_encryption_ is set to true. - int Encrypt(cricket::MediaType media_type, + int Encrypt(webrtc::MediaType media_type, uint32_t ssrc, - rtc::ArrayView additional_data, - rtc::ArrayView frame, - rtc::ArrayView encrypted_frame, + ArrayView additional_data, + ArrayView frame, + ArrayView encrypted_frame, size_t* bytes_written) override; // Always returns 1 more than the size of the frame. - size_t GetMaxCiphertextByteSize(cricket::MediaType media_type, + size_t GetMaxCiphertextByteSize(webrtc::MediaType media_type, size_t frame_size) override; // Sets the fake key to use during encryption. void SetFakeKey(uint8_t fake_key); diff --git a/api/test/frame_generator_interface.cc b/api/test/frame_generator_interface.cc index fe7b1e883d..5779158345 100644 --- a/api/test/frame_generator_interface.cc +++ b/api/test/frame_generator_interface.cc @@ -10,6 +10,8 @@ #include "api/test/frame_generator_interface.h" +#include "rtc_base/checks.h" + namespace webrtc { namespace test { diff --git a/api/test/frame_generator_interface.h b/api/test/frame_generator_interface.h index a8a88a8a04..d575771510 100644 --- a/api/test/frame_generator_interface.h +++ b/api/test/frame_generator_interface.h @@ -11,9 +11,10 @@ #ifndef API_TEST_FRAME_GENERATOR_INTERFACE_H_ #define API_TEST_FRAME_GENERATOR_INTERFACE_H_ +#include +#include #include -#include "absl/types/optional.h" #include "api/scoped_refptr.h" #include "api/video/video_frame.h" #include "api/video/video_frame_buffer.h" @@ -28,12 +29,12 @@ class FrameGeneratorInterface { size_t height; }; struct VideoFrameData { - VideoFrameData(rtc::scoped_refptr buffer, - absl::optional update_rect) + VideoFrameData(scoped_refptr buffer, + std::optional update_rect) : buffer(std::move(buffer)), update_rect(update_rect) {} - rtc::scoped_refptr buffer; - absl::optional update_rect; + scoped_refptr buffer; + std::optional update_rect; }; enum class OutputType { kI420, kI420A, kI010, kNV12 }; @@ -44,6 +45,9 @@ class FrameGeneratorInterface { // Returns VideoFrameBuffer and area where most of update was done to set them // on the VideoFrame object. virtual VideoFrameData NextFrame() = 0; + // Skips the next frame in case it doesn't need to be encoded. + // Default implementation is to call NextFrame and ignore the returned value. + virtual void SkipNextFrame() { NextFrame(); } // Change the capture resolution. virtual void ChangeResolution(size_t width, size_t height) = 0; @@ -52,8 +56,8 @@ class FrameGeneratorInterface { // Returns the frames per second this generator is supposed to provide // according to its data source. Not all frame generators know the frames per - // second of the data source, in such case this method returns absl::nullopt. - virtual absl::optional fps() const = 0; + // second of the data source, in such case this method returns std::nullopt. + virtual std::optional fps() const = 0; }; } // namespace test diff --git a/api/test/metrics/BUILD.gn b/api/test/metrics/BUILD.gn index 309b699329..3f93d22fa8 100644 --- a/api/test/metrics/BUILD.gn +++ b/api/test/metrics/BUILD.gn @@ -49,8 +49,10 @@ rtc_library("metric") { "metric.cc", "metric.h", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - deps = [ "../../../api/units:timestamp" ] + deps = [ + "../../units:timestamp", + "//third_party/abseil-cpp/absl/strings:string_view", + ] } rtc_library("metrics_logger") { @@ -62,12 +64,13 @@ rtc_library("metrics_logger") { deps = [ ":metric", ":metrics_accumulator", - "../..:array_view", + "../../../rtc_base:macromagic", "../../../rtc_base/synchronization:mutex", "../../../system_wrappers", "../../numerics", + "../../units:timestamp", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("metrics_accumulator") { @@ -82,8 +85,8 @@ rtc_library("metrics_accumulator") { "../../../rtc_base/synchronization:mutex", "../../numerics", "../../units:timestamp", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("metrics_exporter") { @@ -107,7 +110,6 @@ rtc_library("stdout_metrics_exporter") { "../..:array_view", "../../../rtc_base:stringutils", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("chrome_perf_dashboard_metrics_exporter") { @@ -120,13 +122,11 @@ rtc_library("chrome_perf_dashboard_metrics_exporter") { deps = [ ":metric", ":metrics_exporter", - "../../../api:array_view", + "../..:array_view", "../../../test:fileutils", "../../../test:perf_test", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -152,6 +152,7 @@ rtc_library("metrics_set_proto_file_exporter") { "../..:array_view", "../../../rtc_base:logging", "../../../test:fileutils", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (rtc_enable_protobuf) { @@ -171,6 +172,7 @@ rtc_library("print_result_proxy_metrics_exporter") { ":metrics_exporter", "../..:array_view", "../../../test:perf_test", + "../../numerics", ] } @@ -181,6 +183,7 @@ rtc_library("global_metrics_logger_and_exporter") { "global_metrics_logger_and_exporter.h", ] deps = [ + ":metric", ":metrics_exporter", ":metrics_logger", "../../../rtc_base:checks", @@ -198,8 +201,8 @@ if (rtc_include_tests) { "../../../system_wrappers", "../../../test:test_support", "../../numerics", + "../../units:timestamp", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("metrics_accumulator_test") { @@ -211,7 +214,6 @@ if (rtc_include_tests) { "../../../test:test_support", "../../units:timestamp", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("stdout_metrics_exporter_test") { @@ -244,10 +246,10 @@ if (rtc_include_tests) { ":metric", ":metrics_exporter", ":metrics_logger", + "../..:array_view", "../../../system_wrappers", "../../../test:test_support", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (rtc_enable_protobuf) { @@ -271,9 +273,9 @@ if (rtc_include_tests) { deps = [ ":chrome_perf_dashboard_metrics_exporter", ":metric", - "../../../api/units:timestamp", "../../../test:fileutils", "../../../test:test_support", + "../../units:timestamp", "//third_party/catapult/tracing/tracing:histogram", ] } diff --git a/api/test/metrics/DEPS b/api/test/metrics/DEPS index 74889c61c7..9edcc370ed 100644 --- a/api/test/metrics/DEPS +++ b/api/test/metrics/DEPS @@ -5,10 +5,14 @@ specific_include_rules = { ], "metrics_logger\.h": [ "+rtc_base/synchronization/mutex.h", + "+rtc_base/thread_annotations.h", "+system_wrappers/include/clock.h", ], "metrics_accumulator\.h": [ "+rtc_base/synchronization/mutex.h", "+rtc_base/thread_annotations.h", ], + "chrome_perf_dashboard_metrics_exporter_test.cc": [ + "+tracing/tracing/proto/histogram.pb.h" + ], } diff --git a/api/test/metrics/chrome_perf_dashboard_metrics_exporter.cc b/api/test/metrics/chrome_perf_dashboard_metrics_exporter.cc index 018d110b12..b03185d4b0 100644 --- a/api/test/metrics/chrome_perf_dashboard_metrics_exporter.cc +++ b/api/test/metrics/chrome_perf_dashboard_metrics_exporter.cc @@ -20,6 +20,7 @@ #include "api/array_view.h" #include "api/test/metrics/metric.h" #include "test/testsupport/file_utils.h" +#include "test/testsupport/perf_test.h" #include "test/testsupport/perf_test_histogram_writer.h" #include "test/testsupport/perf_test_result_writer.h" @@ -101,7 +102,7 @@ ChromePerfDashboardMetricsExporter::ChromePerfDashboardMetricsExporter( : export_file_path_(export_file_path) {} bool ChromePerfDashboardMetricsExporter::Export( - rtc::ArrayView metrics) { + ArrayView metrics) { std::unique_ptr writer = absl::WrapUnique(CreateHistogramWriter()); for (const Metric& metric : metrics) { diff --git a/api/test/metrics/chrome_perf_dashboard_metrics_exporter.h b/api/test/metrics/chrome_perf_dashboard_metrics_exporter.h index dda17a08c6..10447510a8 100644 --- a/api/test/metrics/chrome_perf_dashboard_metrics_exporter.h +++ b/api/test/metrics/chrome_perf_dashboard_metrics_exporter.h @@ -29,7 +29,7 @@ class ChromePerfDashboardMetricsExporter : public MetricsExporter { absl::string_view export_file_path); ~ChromePerfDashboardMetricsExporter() override = default; - bool Export(rtc::ArrayView metrics) override; + bool Export(ArrayView metrics) override; private: const std::string export_file_path_; diff --git a/api/test/metrics/chrome_perf_dashboard_metrics_exporter_test.cc b/api/test/metrics/chrome_perf_dashboard_metrics_exporter_test.cc index 5d3136f49a..ae1ab84b30 100644 --- a/api/test/metrics/chrome_perf_dashboard_metrics_exporter_test.cc +++ b/api/test/metrics/chrome_perf_dashboard_metrics_exporter_test.cc @@ -10,7 +10,10 @@ #include "api/test/metrics/chrome_perf_dashboard_metrics_exporter.h" #include +#include +#include #include +#include #include #include "api/test/metrics/metric.h" diff --git a/api/test/metrics/global_metrics_logger_and_exporter.cc b/api/test/metrics/global_metrics_logger_and_exporter.cc index 2d42a976aa..b9757e4f19 100644 --- a/api/test/metrics/global_metrics_logger_and_exporter.cc +++ b/api/test/metrics/global_metrics_logger_and_exporter.cc @@ -10,12 +10,11 @@ #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include -#include #include +#include "api/test/metrics/metric.h" #include "api/test/metrics/metrics_exporter.h" #include "api/test/metrics/metrics_logger.h" -#include "rtc_base/checks.h" #include "system_wrappers/include/clock.h" namespace webrtc { diff --git a/api/test/metrics/global_metrics_logger_and_exporter_test.cc b/api/test/metrics/global_metrics_logger_and_exporter_test.cc index 567b3da9e3..bf462a6f3f 100644 --- a/api/test/metrics/global_metrics_logger_and_exporter_test.cc +++ b/api/test/metrics/global_metrics_logger_and_exporter_test.cc @@ -11,11 +11,12 @@ #include #include +#include #include #include #include -#include "absl/types/optional.h" +#include "api/array_view.h" #include "api/test/metrics/metric.h" #include "api/test/metrics/metrics_exporter.h" #include "api/test/metrics/metrics_logger.h" @@ -53,7 +54,7 @@ struct TestMetricsExporterFactory { : factory_(factory), export_result_(export_result) {} ~TestMetricsExporter() override = default; - bool Export(rtc::ArrayView metrics) override { + bool Export(ArrayView metrics) override { factory_->exported_metrics = std::vector(metrics.begin(), metrics.end()); return export_result_; @@ -91,10 +92,10 @@ TEST(ExportPerfMetricTest, CollectedMetricsAreExporter) { EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0)); EXPECT_THAT(metric.time_series.samples[0].sample_metadata, Eq(std::map{})); - ASSERT_THAT(metric.stats.mean, absl::optional(10.0)); - ASSERT_THAT(metric.stats.stddev, absl::nullopt); - ASSERT_THAT(metric.stats.min, absl::optional(10.0)); - ASSERT_THAT(metric.stats.max, absl::optional(10.0)); + ASSERT_THAT(metric.stats.mean, std::optional(10.0)); + ASSERT_THAT(metric.stats.stddev, std::nullopt); + ASSERT_THAT(metric.stats.min, std::optional(10.0)); + ASSERT_THAT(metric.stats.max, std::optional(10.0)); } TEST(ExportPerfMetricTest, OneFailedExporterDoesNotPreventExportToOthers) { diff --git a/api/test/metrics/metric.cc b/api/test/metrics/metric.cc index 3c30f36f49..8be4d5c307 100644 --- a/api/test/metrics/metric.cc +++ b/api/test/metrics/metric.cc @@ -9,7 +9,7 @@ */ #include "api/test/metrics/metric.h" -#include +#include "absl/strings/string_view.h" namespace webrtc { namespace test { diff --git a/api/test/metrics/metric.h b/api/test/metrics/metric.h index 17c1755f95..b58dce6463 100644 --- a/api/test/metrics/metric.h +++ b/api/test/metrics/metric.h @@ -12,10 +12,11 @@ #define API_TEST_METRICS_METRIC_H_ #include +#include #include #include -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" #include "api/units/timestamp.h" namespace webrtc { @@ -66,12 +67,12 @@ struct Metric { struct Stats { // Sample mean of the metric // (https://en.wikipedia.org/wiki/Sample_mean_and_covariance). - absl::optional mean; + std::optional mean; // Standard deviation (https://en.wikipedia.org/wiki/Standard_deviation). // Is undefined if `time_series` contains only a single value. - absl::optional stddev; - absl::optional min; - absl::optional max; + std::optional stddev; + std::optional min; + std::optional max; }; // Metric name, for example PSNR, SSIM, decode_time, etc. diff --git a/api/test/metrics/metrics_accumulator_test.cc b/api/test/metrics/metrics_accumulator_test.cc index 677f523339..9f883950fa 100644 --- a/api/test/metrics/metrics_accumulator_test.cc +++ b/api/test/metrics/metrics_accumulator_test.cc @@ -10,6 +10,8 @@ #include "api/test/metrics/metrics_accumulator.h" #include +#include +#include #include #include "api/test/metrics/metric.h" @@ -47,10 +49,10 @@ TEST(MetricsAccumulatorTest, AddSampleToTheNewMetricWillCreateOne) { Eq(Timestamp::Seconds(1))); EXPECT_THAT(metric.time_series.samples[0].sample_metadata, Eq(std::map{{"key", "value"}})); - ASSERT_THAT(metric.stats.mean, absl::optional(10.0)); - ASSERT_THAT(metric.stats.stddev, absl::optional(0.0)); - ASSERT_THAT(metric.stats.min, absl::optional(10.0)); - ASSERT_THAT(metric.stats.max, absl::optional(10.0)); + ASSERT_THAT(metric.stats.mean, std::optional(10.0)); + ASSERT_THAT(metric.stats.stddev, std::optional(0.0)); + ASSERT_THAT(metric.stats.min, std::optional(10.0)); + ASSERT_THAT(metric.stats.max, std::optional(10.0)); } TEST(MetricsAccumulatorTest, AddSamplesToExistingMetricWontCreateNewOne) { @@ -86,10 +88,10 @@ TEST(MetricsAccumulatorTest, AddSamplesToExistingMetricWontCreateNewOne) { Eq(Timestamp::Seconds(2))); EXPECT_THAT(metric.time_series.samples[1].sample_metadata, Eq(std::map{{"key2", "value2"}})); - ASSERT_THAT(metric.stats.mean, absl::optional(15.0)); - ASSERT_THAT(metric.stats.stddev, absl::optional(5.0)); - ASSERT_THAT(metric.stats.min, absl::optional(10.0)); - ASSERT_THAT(metric.stats.max, absl::optional(20.0)); + ASSERT_THAT(metric.stats.mean, std::optional(15.0)); + ASSERT_THAT(metric.stats.stddev, std::optional(5.0)); + ASSERT_THAT(metric.stats.min, std::optional(10.0)); + ASSERT_THAT(metric.stats.max, std::optional(20.0)); } TEST(MetricsAccumulatorTest, AddSampleToDifferentMetricsWillCreateBoth) { @@ -119,10 +121,10 @@ TEST(MetricsAccumulatorTest, AddSampleToDifferentMetricsWillCreateBoth) { Eq(Timestamp::Seconds(1))); EXPECT_THAT(metrics[0].time_series.samples[0].sample_metadata, Eq(std::map{{"key1", "value1"}})); - ASSERT_THAT(metrics[0].stats.mean, absl::optional(10.0)); - ASSERT_THAT(metrics[0].stats.stddev, absl::optional(0.0)); - ASSERT_THAT(metrics[0].stats.min, absl::optional(10.0)); - ASSERT_THAT(metrics[0].stats.max, absl::optional(10.0)); + ASSERT_THAT(metrics[0].stats.mean, std::optional(10.0)); + ASSERT_THAT(metrics[0].stats.stddev, std::optional(0.0)); + ASSERT_THAT(metrics[0].stats.min, std::optional(10.0)); + ASSERT_THAT(metrics[0].stats.max, std::optional(10.0)); EXPECT_THAT(metrics[1].name, Eq("metric_name2")); EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); EXPECT_THAT(metrics[1].unit, Eq(Unit::kUnitless)); @@ -135,10 +137,10 @@ TEST(MetricsAccumulatorTest, AddSampleToDifferentMetricsWillCreateBoth) { Eq(Timestamp::Seconds(2))); EXPECT_THAT(metrics[1].time_series.samples[0].sample_metadata, Eq(std::map{{"key2", "value2"}})); - ASSERT_THAT(metrics[1].stats.mean, absl::optional(20.0)); - ASSERT_THAT(metrics[1].stats.stddev, absl::optional(0.0)); - ASSERT_THAT(metrics[1].stats.min, absl::optional(20.0)); - ASSERT_THAT(metrics[1].stats.max, absl::optional(20.0)); + ASSERT_THAT(metrics[1].stats.mean, std::optional(20.0)); + ASSERT_THAT(metrics[1].stats.stddev, std::optional(0.0)); + ASSERT_THAT(metrics[1].stats.min, std::optional(20.0)); + ASSERT_THAT(metrics[1].stats.max, std::optional(20.0)); } TEST(MetricsAccumulatorTest, AddMetadataToTheNewMetricWillCreateOne) { @@ -160,10 +162,10 @@ TEST(MetricsAccumulatorTest, AddMetadataToTheNewMetricWillCreateOne) { EXPECT_THAT(metric.metric_metadata, Eq(std::map{{"key", "value"}})); ASSERT_THAT(metric.time_series.samples, IsEmpty()); - ASSERT_THAT(metric.stats.mean, absl::nullopt); - ASSERT_THAT(metric.stats.stddev, absl::nullopt); - ASSERT_THAT(metric.stats.min, absl::nullopt); - ASSERT_THAT(metric.stats.max, absl::nullopt); + ASSERT_THAT(metric.stats.mean, std::nullopt); + ASSERT_THAT(metric.stats.stddev, std::nullopt); + ASSERT_THAT(metric.stats.min, std::nullopt); + ASSERT_THAT(metric.stats.max, std::nullopt); } TEST(MetricsAccumulatorTest, @@ -192,10 +194,10 @@ TEST(MetricsAccumulatorTest, EXPECT_THAT(metric.metric_metadata, Eq(std::map{{"key2", "value2"}})); ASSERT_THAT(metric.time_series.samples, IsEmpty()); - ASSERT_THAT(metric.stats.mean, absl::nullopt); - ASSERT_THAT(metric.stats.stddev, absl::nullopt); - ASSERT_THAT(metric.stats.min, absl::nullopt); - ASSERT_THAT(metric.stats.max, absl::nullopt); + ASSERT_THAT(metric.stats.mean, std::nullopt); + ASSERT_THAT(metric.stats.stddev, std::nullopt); + ASSERT_THAT(metric.stats.min, std::nullopt); + ASSERT_THAT(metric.stats.max, std::nullopt); } TEST(MetricsAccumulatorTest, AddMetadataToDifferentMetricsWillCreateBoth) { @@ -222,10 +224,10 @@ TEST(MetricsAccumulatorTest, AddMetadataToDifferentMetricsWillCreateBoth) { EXPECT_THAT(metrics[0].metric_metadata, Eq(std::map{{"key1", "value1"}})); ASSERT_THAT(metrics[0].time_series.samples, IsEmpty()); - ASSERT_THAT(metrics[0].stats.mean, absl::nullopt); - ASSERT_THAT(metrics[0].stats.stddev, absl::nullopt); - ASSERT_THAT(metrics[0].stats.min, absl::nullopt); - ASSERT_THAT(metrics[0].stats.max, absl::nullopt); + ASSERT_THAT(metrics[0].stats.mean, std::nullopt); + ASSERT_THAT(metrics[0].stats.stddev, std::nullopt); + ASSERT_THAT(metrics[0].stats.min, std::nullopt); + ASSERT_THAT(metrics[0].stats.max, std::nullopt); EXPECT_THAT(metrics[1].name, Eq("metric_name2")); EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2")); EXPECT_THAT(metrics[1].unit, Eq(Unit::kBytes)); @@ -234,10 +236,10 @@ TEST(MetricsAccumulatorTest, AddMetadataToDifferentMetricsWillCreateBoth) { EXPECT_THAT(metrics[1].metric_metadata, Eq(std::map{{"key2", "value2"}})); ASSERT_THAT(metrics[1].time_series.samples, IsEmpty()); - ASSERT_THAT(metrics[1].stats.mean, absl::nullopt); - ASSERT_THAT(metrics[1].stats.stddev, absl::nullopt); - ASSERT_THAT(metrics[1].stats.min, absl::nullopt); - ASSERT_THAT(metrics[1].stats.max, absl::nullopt); + ASSERT_THAT(metrics[1].stats.mean, std::nullopt); + ASSERT_THAT(metrics[1].stats.stddev, std::nullopt); + ASSERT_THAT(metrics[1].stats.min, std::nullopt); + ASSERT_THAT(metrics[1].stats.max, std::nullopt); } TEST(MetricsAccumulatorTest, AddMetadataAfterAddingSampleWontCreateNewMetric) { @@ -269,10 +271,10 @@ TEST(MetricsAccumulatorTest, AddMetadataAfterAddingSampleWontCreateNewMetric) { Eq(Timestamp::Seconds(1))); EXPECT_THAT(metric.time_series.samples[0].sample_metadata, Eq(std::map{{"key_s", "value_s"}})); - ASSERT_THAT(metric.stats.mean, absl::optional(10.0)); - ASSERT_THAT(metric.stats.stddev, absl::optional(0.0)); - ASSERT_THAT(metric.stats.min, absl::optional(10.0)); - ASSERT_THAT(metric.stats.max, absl::optional(10.0)); + ASSERT_THAT(metric.stats.mean, std::optional(10.0)); + ASSERT_THAT(metric.stats.stddev, std::optional(0.0)); + ASSERT_THAT(metric.stats.min, std::optional(10.0)); + ASSERT_THAT(metric.stats.max, std::optional(10.0)); } TEST(MetricsAccumulatorTest, AddSampleAfterAddingMetadataWontCreateNewMetric) { @@ -304,10 +306,10 @@ TEST(MetricsAccumulatorTest, AddSampleAfterAddingMetadataWontCreateNewMetric) { Eq(Timestamp::Seconds(1))); EXPECT_THAT(metric.time_series.samples[0].sample_metadata, Eq(std::map{{"key_s", "value_s"}})); - ASSERT_THAT(metric.stats.mean, absl::optional(10.0)); - ASSERT_THAT(metric.stats.stddev, absl::optional(0.0)); - ASSERT_THAT(metric.stats.min, absl::optional(10.0)); - ASSERT_THAT(metric.stats.max, absl::optional(10.0)); + ASSERT_THAT(metric.stats.mean, std::optional(10.0)); + ASSERT_THAT(metric.stats.stddev, std::optional(0.0)); + ASSERT_THAT(metric.stats.min, std::optional(10.0)); + ASSERT_THAT(metric.stats.max, std::optional(10.0)); } } // namespace diff --git a/api/test/metrics/metrics_exporter.h b/api/test/metrics/metrics_exporter.h index 23954b6b1f..b71d4fbd93 100644 --- a/api/test/metrics/metrics_exporter.h +++ b/api/test/metrics/metrics_exporter.h @@ -24,7 +24,7 @@ class MetricsExporter { // Exports specified metrics in a format that depends on the implementation. // Returns true if export succeeded, false otherwise. - virtual bool Export(rtc::ArrayView metrics) = 0; + virtual bool Export(ArrayView metrics) = 0; }; } // namespace test diff --git a/api/test/metrics/metrics_logger.cc b/api/test/metrics/metrics_logger.cc index 1e24400367..ac9274251e 100644 --- a/api/test/metrics/metrics_logger.cc +++ b/api/test/metrics/metrics_logger.cc @@ -10,6 +10,7 @@ #include "api/test/metrics/metrics_logger.h" #include +#include #include #include #include @@ -17,6 +18,7 @@ #include "absl/strings/string_view.h" #include "api/numerics/samples_stats_counter.h" #include "api/test/metrics/metric.h" +#include "api/units/timestamp.h" #include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -53,7 +55,7 @@ void DefaultMetricsLogger::LogSingleValueMetric( Metric::TimeSeries{.samples = std::vector{Metric::TimeSeries::Sample{ .timestamp = Now(), .value = value}}}, .stats = Metric::Stats{ - .mean = value, .stddev = absl::nullopt, .min = value, .max = value}}); + .mean = value, .stddev = std::nullopt, .min = value, .max = value}}); } void DefaultMetricsLogger::LogMetric( diff --git a/api/test/metrics/metrics_logger.h b/api/test/metrics/metrics_logger.h index 66f9e55b95..e41b39c785 100644 --- a/api/test/metrics/metrics_logger.h +++ b/api/test/metrics/metrics_logger.h @@ -13,14 +13,15 @@ #include #include -#include #include #include "absl/strings/string_view.h" #include "api/numerics/samples_stats_counter.h" #include "api/test/metrics/metric.h" #include "api/test/metrics/metrics_accumulator.h" +#include "api/units/timestamp.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" namespace webrtc { diff --git a/api/test/metrics/metrics_logger_test.cc b/api/test/metrics/metrics_logger_test.cc index de4501ca36..a75fdd6ca7 100644 --- a/api/test/metrics/metrics_logger_test.cc +++ b/api/test/metrics/metrics_logger_test.cc @@ -10,13 +10,13 @@ #include "api/test/metrics/metrics_logger.h" #include -#include +#include #include #include -#include "absl/types/optional.h" #include "api/numerics/samples_stats_counter.h" #include "api/test/metrics/metric.h" +#include "api/units/timestamp.h" #include "system_wrappers/include/clock.h" #include "test/gmock.h" #include "test/gtest.h" @@ -54,10 +54,10 @@ TEST(DefaultMetricsLoggerTest, LogSingleValueMetricRecordsMetric) { EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0)); EXPECT_THAT(metric.time_series.samples[0].sample_metadata, Eq(std::map{})); - ASSERT_THAT(metric.stats.mean, absl::optional(10.0)); - ASSERT_THAT(metric.stats.stddev, absl::nullopt); - ASSERT_THAT(metric.stats.min, absl::optional(10.0)); - ASSERT_THAT(metric.stats.max, absl::optional(10.0)); + ASSERT_THAT(metric.stats.mean, std::optional(10.0)); + ASSERT_THAT(metric.stats.stddev, std::nullopt); + ASSERT_THAT(metric.stats.min, std::optional(10.0)); + ASSERT_THAT(metric.stats.max, std::optional(10.0)); } TEST(DefaultMetricsLoggerTest, LogMetricWithSamplesStatsCounterRecordsMetric) { @@ -95,10 +95,10 @@ TEST(DefaultMetricsLoggerTest, LogMetricWithSamplesStatsCounterRecordsMetric) { EXPECT_THAT(metric.time_series.samples[1].value, Eq(20.0)); EXPECT_THAT(metric.time_series.samples[1].sample_metadata, Eq(std::map{{"point_key2", "value2"}})); - ASSERT_THAT(metric.stats.mean, absl::optional(15.0)); - ASSERT_THAT(metric.stats.stddev, absl::optional(5.0)); - ASSERT_THAT(metric.stats.min, absl::optional(10.0)); - ASSERT_THAT(metric.stats.max, absl::optional(20.0)); + ASSERT_THAT(metric.stats.mean, std::optional(15.0)); + ASSERT_THAT(metric.stats.stddev, std::optional(5.0)); + ASSERT_THAT(metric.stats.min, std::optional(10.0)); + ASSERT_THAT(metric.stats.max, std::optional(20.0)); } TEST(DefaultMetricsLoggerTest, @@ -113,10 +113,10 @@ TEST(DefaultMetricsLoggerTest, EXPECT_THAT(metrics[0].name, Eq("metric_name")); EXPECT_THAT(metrics[0].test_case, Eq("test_case_name")); EXPECT_THAT(metrics[0].time_series.samples, IsEmpty()); - ASSERT_THAT(metrics[0].stats.mean, Eq(absl::nullopt)); - ASSERT_THAT(metrics[0].stats.stddev, Eq(absl::nullopt)); - ASSERT_THAT(metrics[0].stats.min, Eq(absl::nullopt)); - ASSERT_THAT(metrics[0].stats.max, Eq(absl::nullopt)); + ASSERT_THAT(metrics[0].stats.mean, Eq(std::nullopt)); + ASSERT_THAT(metrics[0].stats.stddev, Eq(std::nullopt)); + ASSERT_THAT(metrics[0].stats.min, Eq(std::nullopt)); + ASSERT_THAT(metrics[0].stats.max, Eq(std::nullopt)); } TEST(DefaultMetricsLoggerTest, LogMetricWithStatsRecordsMetric) { @@ -137,10 +137,10 @@ TEST(DefaultMetricsLoggerTest, LogMetricWithStatsRecordsMetric) { EXPECT_THAT(metric.metric_metadata, Eq(std::map{{"key", "value"}})); ASSERT_THAT(metric.time_series.samples, IsEmpty()); - ASSERT_THAT(metric.stats.mean, absl::optional(15.0)); - ASSERT_THAT(metric.stats.stddev, absl::optional(5.0)); - ASSERT_THAT(metric.stats.min, absl::optional(10.0)); - ASSERT_THAT(metric.stats.max, absl::optional(20.0)); + ASSERT_THAT(metric.stats.mean, std::optional(15.0)); + ASSERT_THAT(metric.stats.stddev, std::optional(5.0)); + ASSERT_THAT(metric.stats.min, std::optional(10.0)); + ASSERT_THAT(metric.stats.max, std::optional(20.0)); } TEST(DefaultMetricsLoggerTest, LogSingleValueMetricRecordsMultipleMetrics) { @@ -267,10 +267,10 @@ TEST(DefaultMetricsLoggerTest, AccumulatedMetricsReturnedInCollectedMetrics) { Eq(Timestamp::Seconds(1))); EXPECT_THAT(metric.time_series.samples[0].sample_metadata, Eq(std::map{{"key", "value"}})); - ASSERT_THAT(metric.stats.mean, absl::optional(10.0)); - ASSERT_THAT(metric.stats.stddev, absl::optional(0.0)); - ASSERT_THAT(metric.stats.min, absl::optional(10.0)); - ASSERT_THAT(metric.stats.max, absl::optional(10.0)); + ASSERT_THAT(metric.stats.mean, std::optional(10.0)); + ASSERT_THAT(metric.stats.stddev, std::optional(0.0)); + ASSERT_THAT(metric.stats.min, std::optional(10.0)); + ASSERT_THAT(metric.stats.max, std::optional(10.0)); } TEST(DefaultMetricsLoggerTest, @@ -300,10 +300,10 @@ TEST(DefaultMetricsLoggerTest, Eq(Timestamp::Seconds(1))); EXPECT_THAT(metrics[0].time_series.samples[0].sample_metadata, Eq(std::map{{"key_s", "value_s"}})); - ASSERT_THAT(metrics[0].stats.mean, absl::optional(10.0)); - ASSERT_THAT(metrics[0].stats.stddev, absl::optional(0.0)); - ASSERT_THAT(metrics[0].stats.min, absl::optional(10.0)); - ASSERT_THAT(metrics[0].stats.max, absl::optional(10.0)); + ASSERT_THAT(metrics[0].stats.mean, std::optional(10.0)); + ASSERT_THAT(metrics[0].stats.stddev, std::optional(0.0)); + ASSERT_THAT(metrics[0].stats.min, std::optional(10.0)); + ASSERT_THAT(metrics[0].stats.max, std::optional(10.0)); EXPECT_THAT(metrics[1].name, Eq("metric_name1")); EXPECT_THAT(metrics[1].test_case, Eq("test_case_name1")); EXPECT_THAT(metrics[1].unit, Eq(Unit::kMilliseconds)); @@ -315,10 +315,10 @@ TEST(DefaultMetricsLoggerTest, EXPECT_THAT(metrics[1].time_series.samples[0].value, Eq(10.0)); EXPECT_THAT(metrics[1].time_series.samples[0].sample_metadata, Eq(std::map{})); - ASSERT_THAT(metrics[1].stats.mean, absl::optional(10.0)); - ASSERT_THAT(metrics[1].stats.stddev, absl::nullopt); - ASSERT_THAT(metrics[1].stats.min, absl::optional(10.0)); - ASSERT_THAT(metrics[1].stats.max, absl::optional(10.0)); + ASSERT_THAT(metrics[1].stats.mean, std::optional(10.0)); + ASSERT_THAT(metrics[1].stats.stddev, std::nullopt); + ASSERT_THAT(metrics[1].stats.min, std::optional(10.0)); + ASSERT_THAT(metrics[1].stats.max, std::optional(10.0)); } } // namespace diff --git a/api/test/metrics/metrics_set_proto_file_exporter.cc b/api/test/metrics/metrics_set_proto_file_exporter.cc index f6f3d392a2..49364846e8 100644 --- a/api/test/metrics/metrics_set_proto_file_exporter.cc +++ b/api/test/metrics/metrics_set_proto_file_exporter.cc @@ -15,6 +15,8 @@ #include #include +#include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/test/metrics/metric.h" #include "rtc_base/logging.h" #include "test/testsupport/file_utils.h" @@ -131,7 +133,7 @@ MetricsSetProtoFileExporter::Options::Options( std::map metadata) : export_file_path(export_file_path), metadata(std::move(metadata)) {} -bool MetricsSetProtoFileExporter::Export(rtc::ArrayView metrics) { +bool MetricsSetProtoFileExporter::Export(ArrayView metrics) { #if WEBRTC_ENABLE_PROTOBUF webrtc::test_metrics::MetricsSet metrics_set; for (const auto& [key, value] : options_.metadata) { diff --git a/api/test/metrics/metrics_set_proto_file_exporter.h b/api/test/metrics/metrics_set_proto_file_exporter.h index 586ab83d00..6e38eb9981 100644 --- a/api/test/metrics/metrics_set_proto_file_exporter.h +++ b/api/test/metrics/metrics_set_proto_file_exporter.h @@ -14,6 +14,7 @@ #include #include +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/test/metrics/metric.h" #include "api/test/metrics/metrics_exporter.h" @@ -47,7 +48,7 @@ class MetricsSetProtoFileExporter : public MetricsExporter { MetricsSetProtoFileExporter& operator=(const MetricsSetProtoFileExporter&) = delete; - bool Export(rtc::ArrayView metrics) override; + bool Export(ArrayView metrics) override; private: const Options options_; diff --git a/api/test/metrics/metrics_set_proto_file_exporter_test.cc b/api/test/metrics/metrics_set_proto_file_exporter_test.cc index 9202d31343..7eee6c3d2b 100644 --- a/api/test/metrics/metrics_set_proto_file_exporter_test.cc +++ b/api/test/metrics/metrics_set_proto_file_exporter_test.cc @@ -10,6 +10,8 @@ #include "api/test/metrics/metrics_set_proto_file_exporter.h" #include +#include +#include #include #include #include diff --git a/api/test/metrics/print_result_proxy_metrics_exporter.cc b/api/test/metrics/print_result_proxy_metrics_exporter.cc index 1ce1e63892..cb163facde 100644 --- a/api/test/metrics/print_result_proxy_metrics_exporter.cc +++ b/api/test/metrics/print_result_proxy_metrics_exporter.cc @@ -9,10 +9,12 @@ */ #include "api/test/metrics/print_result_proxy_metrics_exporter.h" +#include #include #include #include "api/array_view.h" +#include "api/numerics/samples_stats_counter.h" #include "api/test/metrics/metric.h" #include "test/testsupport/perf_test.h" @@ -76,8 +78,7 @@ bool NameEndsWithConnected(const std::string& name) { } // namespace -bool PrintResultProxyMetricsExporter::Export( - rtc::ArrayView metrics) { +bool PrintResultProxyMetricsExporter::Export(ArrayView metrics) { static const std::unordered_set per_call_metrics{ "actual_encode_bitrate", "encode_frame_rate", diff --git a/api/test/metrics/print_result_proxy_metrics_exporter.h b/api/test/metrics/print_result_proxy_metrics_exporter.h index bad0594972..cbc72ab212 100644 --- a/api/test/metrics/print_result_proxy_metrics_exporter.h +++ b/api/test/metrics/print_result_proxy_metrics_exporter.h @@ -23,7 +23,7 @@ class PrintResultProxyMetricsExporter : public MetricsExporter { public: ~PrintResultProxyMetricsExporter() override = default; - bool Export(rtc::ArrayView metrics) override; + bool Export(ArrayView metrics) override; }; } // namespace test diff --git a/api/test/metrics/print_result_proxy_metrics_exporter_test.cc b/api/test/metrics/print_result_proxy_metrics_exporter_test.cc index 768c794b40..22ef272e80 100644 --- a/api/test/metrics/print_result_proxy_metrics_exporter_test.cc +++ b/api/test/metrics/print_result_proxy_metrics_exporter_test.cc @@ -15,7 +15,6 @@ #include "api/test/metrics/metric.h" #include "api/units/timestamp.h" -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { diff --git a/api/test/metrics/stdout_metrics_exporter.cc b/api/test/metrics/stdout_metrics_exporter.cc index 22243e73e8..62b8ad8a74 100644 --- a/api/test/metrics/stdout_metrics_exporter.cc +++ b/api/test/metrics/stdout_metrics_exporter.cc @@ -12,9 +12,10 @@ #include #include +#include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/test/metrics/metric.h" #include "rtc_base/strings/string_builder.h" @@ -30,7 +31,7 @@ int64_t IntegralPart(double value) { void AppendWithPrecision(double value, int digits_after_comma, - rtc::StringBuilder& out) { + StringBuilder& out) { int64_t multiplier = std::lround(std::pow(10, digits_after_comma)); int64_t integral_part = IntegralPart(value); double decimal_part = std::abs(value) - integral_part; @@ -70,7 +71,7 @@ void AppendWithPrecision(double value, StdoutMetricsExporter::StdoutMetricsExporter() : output_(stdout) {} -bool StdoutMetricsExporter::Export(rtc::ArrayView metrics) { +bool StdoutMetricsExporter::Export(ArrayView metrics) { for (const Metric& metric : metrics) { PrintMetric(metric); } @@ -78,7 +79,7 @@ bool StdoutMetricsExporter::Export(rtc::ArrayView metrics) { } void StdoutMetricsExporter::PrintMetric(const Metric& metric) { - rtc::StringBuilder value_stream; + StringBuilder value_stream; value_stream << metric.test_case << " / " << metric.name << "= {mean="; if (metric.stats.mean.has_value()) { AppendWithPrecision(*metric.stats.mean, 8, value_stream); diff --git a/api/test/metrics/stdout_metrics_exporter.h b/api/test/metrics/stdout_metrics_exporter.h index 2c572cb2ea..60354b58fd 100644 --- a/api/test/metrics/stdout_metrics_exporter.h +++ b/api/test/metrics/stdout_metrics_exporter.h @@ -11,6 +11,8 @@ #ifndef API_TEST_METRICS_STDOUT_METRICS_EXPORTER_H_ #define API_TEST_METRICS_STDOUT_METRICS_EXPORTER_H_ +#include + #include "api/array_view.h" #include "api/test/metrics/metric.h" #include "api/test/metrics/metrics_exporter.h" @@ -27,7 +29,7 @@ class StdoutMetricsExporter : public MetricsExporter { StdoutMetricsExporter(const StdoutMetricsExporter&) = delete; StdoutMetricsExporter& operator=(const StdoutMetricsExporter&) = delete; - bool Export(rtc::ArrayView metrics) override; + bool Export(ArrayView metrics) override; private: void PrintMetric(const Metric& metric); diff --git a/api/test/metrics/stdout_metrics_exporter_test.cc b/api/test/metrics/stdout_metrics_exporter_test.cc index 91c06fac5b..27c1d84927 100644 --- a/api/test/metrics/stdout_metrics_exporter_test.cc +++ b/api/test/metrics/stdout_metrics_exporter_test.cc @@ -15,7 +15,6 @@ #include "api/test/metrics/metric.h" #include "api/units/timestamp.h" -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { diff --git a/api/test/mock_async_dns_resolver.h b/api/test/mock_async_dns_resolver.h index 26127eb1f6..5848972d07 100644 --- a/api/test/mock_async_dns_resolver.h +++ b/api/test/mock_async_dns_resolver.h @@ -11,10 +11,11 @@ #ifndef API_TEST_MOCK_ASYNC_DNS_RESOLVER_H_ #define API_TEST_MOCK_ASYNC_DNS_RESOLVER_H_ -#include #include +#include "absl/functional/any_invocable.h" #include "api/async_dns_resolver.h" +#include "rtc_base/socket_address.h" #include "test/gmock.h" namespace webrtc { @@ -23,7 +24,7 @@ class MockAsyncDnsResolverResult : public AsyncDnsResolverResult { public: MOCK_METHOD(bool, GetResolvedAddress, - (int, rtc::SocketAddress*), + (int, webrtc::SocketAddress*), (const, override)); MOCK_METHOD(int, GetError, (), (const, override)); }; @@ -32,11 +33,11 @@ class MockAsyncDnsResolver : public AsyncDnsResolverInterface { public: MOCK_METHOD(void, Start, - (const rtc::SocketAddress&, absl::AnyInvocable), + (const webrtc::SocketAddress&, absl::AnyInvocable), (override)); MOCK_METHOD(void, Start, - (const rtc::SocketAddress&, + (const webrtc::SocketAddress&, int family, absl::AnyInvocable), (override)); @@ -47,11 +48,11 @@ class MockAsyncDnsResolverFactory : public AsyncDnsResolverFactoryInterface { public: MOCK_METHOD(std::unique_ptr, CreateAndResolve, - (const rtc::SocketAddress&, absl::AnyInvocable), + (const webrtc::SocketAddress&, absl::AnyInvocable), (override)); MOCK_METHOD(std::unique_ptr, CreateAndResolve, - (const rtc::SocketAddress&, int, absl::AnyInvocable), + (const webrtc::SocketAddress&, int, absl::AnyInvocable), (override)); MOCK_METHOD(std::unique_ptr, Create, diff --git a/api/test/mock_audio_mixer.h b/api/test/mock_audio_mixer.h index 88dc108ca3..458c0763f6 100644 --- a/api/test/mock_audio_mixer.h +++ b/api/test/mock_audio_mixer.h @@ -11,6 +11,9 @@ #ifndef API_TEST_MOCK_AUDIO_MIXER_H_ #define API_TEST_MOCK_AUDIO_MIXER_H_ +#include + +#include "api/audio/audio_frame.h" #include "api/audio/audio_mixer.h" #include "test/gmock.h" diff --git a/api/test/mock_audio_sink.h b/api/test/mock_audio_sink.h index 88f38a3c57..dcb01cc678 100644 --- a/api/test/mock_audio_sink.h +++ b/api/test/mock_audio_sink.h @@ -11,7 +11,10 @@ #ifndef API_TEST_MOCK_AUDIO_SINK_H_ #define API_TEST_MOCK_AUDIO_SINK_H_ -#include "absl/types/optional.h" +#include +#include +#include + #include "api/media_stream_interface.h" #include "test/gmock.h" @@ -35,7 +38,7 @@ class MockAudioSink : public webrtc::AudioTrackSinkInterface { int sample_rate, size_t number_of_channels, size_t number_of_frames, - absl::optional absolute_capture_timestamp_ms), + std::optional absolute_capture_timestamp_ms), (override)); }; diff --git a/api/test/mock_data_channel.h b/api/test/mock_data_channel.h index 5d38ec1375..9a8ff8cf51 100644 --- a/api/test/mock_data_channel.h +++ b/api/test/mock_data_channel.h @@ -11,18 +11,25 @@ #ifndef API_TEST_MOCK_DATA_CHANNEL_H_ #define API_TEST_MOCK_DATA_CHANNEL_H_ +#include +#include #include +#include "absl/functional/any_invocable.h" #include "api/data_channel_interface.h" +#include "api/priority.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "rtc_base/ref_counted_object.h" #include "test/gmock.h" namespace webrtc { class MockDataChannelInterface - : public rtc::RefCountedObject { + : public RefCountedObject { public: - static rtc::scoped_refptr Create() { - return rtc::scoped_refptr( + static scoped_refptr Create() { + return scoped_refptr( new MockDataChannelInterface()); } @@ -34,14 +41,12 @@ class MockDataChannelInterface MOCK_METHOD(std::string, label, (), (const, override)); MOCK_METHOD(bool, reliable, (), (const, override)); MOCK_METHOD(bool, ordered, (), (const, override)); - MOCK_METHOD(uint16_t, maxRetransmitTime, (), (const, override)); - MOCK_METHOD(uint16_t, maxRetransmits, (), (const, override)); - MOCK_METHOD(absl::optional, maxRetransmitsOpt, (), (const, override)); - MOCK_METHOD(absl::optional, maxPacketLifeTime, (), (const, override)); + MOCK_METHOD(std::optional, maxRetransmitsOpt, (), (const, override)); + MOCK_METHOD(std::optional, maxPacketLifeTime, (), (const, override)); MOCK_METHOD(std::string, protocol, (), (const, override)); MOCK_METHOD(bool, negotiated, (), (const, override)); MOCK_METHOD(int, id, (), (const, override)); - MOCK_METHOD(Priority, priority, (), (const, override)); + MOCK_METHOD(PriorityValue, priority, (), (const, override)); MOCK_METHOD(DataState, state, (), (const, override)); MOCK_METHOD(RTCError, error, (), (const, override)); MOCK_METHOD(uint32_t, messages_sent, (), (const, override)); diff --git a/api/test/mock_dtmf_sender.h b/api/test/mock_dtmf_sender.h index 9029195025..3164975568 100644 --- a/api/test/mock_dtmf_sender.h +++ b/api/test/mock_dtmf_sender.h @@ -12,8 +12,12 @@ #define API_TEST_MOCK_DTMF_SENDER_H_ #include +#include #include "api/dtmf_sender_interface.h" +#include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "rtc_base/ref_counted_object.h" #include "test/gmock.h" namespace webrtc { @@ -31,8 +35,8 @@ static_assert(!std::is_abstract_v, ""); class MockDtmfSender : public DtmfSenderInterface { public: - static rtc::scoped_refptr Create() { - return rtc::make_ref_counted(); + static scoped_refptr Create() { + return make_ref_counted(); } MOCK_METHOD(void, @@ -41,15 +45,16 @@ class MockDtmfSender : public DtmfSenderInterface { (override)); MOCK_METHOD(void, UnregisterObserver, (), (override)); MOCK_METHOD(bool, CanInsertDtmf, (), (override)); - MOCK_METHOD(std::string, tones, (), (const override)); - MOCK_METHOD(int, duration, (), (const override)); - MOCK_METHOD(int, inter_tone_gap, (), (const override)); + MOCK_METHOD(std::string, tones, (), (const, override)); + MOCK_METHOD(int, duration, (), (const, override)); + MOCK_METHOD(int, inter_tone_gap, (), (const, override)); protected: MockDtmfSender() = default; }; -static_assert(!std::is_abstract_v>, ""); +static_assert(!std::is_abstract_v>, + ""); } // namespace webrtc diff --git a/api/test/mock_encoder_selector.h b/api/test/mock_encoder_selector.h index 2e018d57ba..bb5c3a0c33 100644 --- a/api/test/mock_encoder_selector.h +++ b/api/test/mock_encoder_selector.h @@ -11,6 +11,11 @@ #ifndef API_TEST_MOCK_ENCODER_SELECTOR_H_ #define API_TEST_MOCK_ENCODER_SELECTOR_H_ +#include + +#include "api/units/data_rate.h" +#include "api/video/render_resolution.h" +#include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_encoder_factory.h" #include "test/gmock.h" @@ -24,17 +29,17 @@ class MockEncoderSelector (const SdpVideoFormat& format), (override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, OnAvailableBitrate, (const DataRate& rate), (override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, OnResolutionChange, (const RenderResolution& resolution), (override)); - MOCK_METHOD(absl::optional, OnEncoderBroken, (), (override)); + MOCK_METHOD(std::optional, OnEncoderBroken, (), (override)); }; } // namespace webrtc diff --git a/api/test/mock_frame_decryptor.h b/api/test/mock_frame_decryptor.h index 9604b96cc2..79062ba3ea 100644 --- a/api/test/mock_frame_decryptor.h +++ b/api/test/mock_frame_decryptor.h @@ -11,9 +11,13 @@ #ifndef API_TEST_MOCK_FRAME_DECRYPTOR_H_ #define API_TEST_MOCK_FRAME_DECRYPTOR_H_ +#include +#include #include +#include "api/array_view.h" #include "api/crypto/frame_decryptor_interface.h" +#include "api/media_types.h" #include "test/gmock.h" namespace webrtc { @@ -22,16 +26,16 @@ class MockFrameDecryptor : public FrameDecryptorInterface { public: MOCK_METHOD(Result, Decrypt, - (cricket::MediaType, + (webrtc::MediaType, const std::vector&, - rtc::ArrayView, - rtc::ArrayView, - rtc::ArrayView), + webrtc::ArrayView, + webrtc::ArrayView, + webrtc::ArrayView), (override)); MOCK_METHOD(size_t, GetMaxPlaintextByteSize, - (cricket::MediaType, size_t encrypted_frame_size), + (webrtc::MediaType, size_t encrypted_frame_size), (override)); }; diff --git a/api/test/mock_frame_encryptor.h b/api/test/mock_frame_encryptor.h index e47321f801..31f0a2c518 100644 --- a/api/test/mock_frame_encryptor.h +++ b/api/test/mock_frame_encryptor.h @@ -11,7 +11,12 @@ #ifndef API_TEST_MOCK_FRAME_ENCRYPTOR_H_ #define API_TEST_MOCK_FRAME_ENCRYPTOR_H_ +#include +#include + +#include "api/array_view.h" #include "api/crypto/frame_encryptor_interface.h" +#include "api/media_types.h" #include "test/gmock.h" namespace webrtc { @@ -20,17 +25,17 @@ class MockFrameEncryptor : public FrameEncryptorInterface { public: MOCK_METHOD(int, Encrypt, - (cricket::MediaType, + (webrtc::MediaType, uint32_t, - rtc::ArrayView, - rtc::ArrayView, - rtc::ArrayView, + webrtc::ArrayView, + webrtc::ArrayView, + webrtc::ArrayView, size_t*), (override)); MOCK_METHOD(size_t, GetMaxCiphertextByteSize, - (cricket::MediaType media_type, size_t frame_size), + (webrtc::MediaType media_type, size_t frame_size), (override)); }; diff --git a/test/mock_frame_transformer.h b/api/test/mock_frame_transformer.h similarity index 78% rename from test/mock_frame_transformer.h rename to api/test/mock_frame_transformer.h index 617cda8a43..8dd311c854 100644 --- a/test/mock_frame_transformer.h +++ b/api/test/mock_frame_transformer.h @@ -8,13 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef TEST_MOCK_FRAME_TRANSFORMER_H_ -#define TEST_MOCK_FRAME_TRANSFORMER_H_ +#ifndef API_TEST_MOCK_FRAME_TRANSFORMER_H_ +#define API_TEST_MOCK_FRAME_TRANSFORMER_H_ +#include #include -#include #include "api/frame_transformer_interface.h" +#include "api/scoped_refptr.h" #include "test/gmock.h" namespace webrtc { @@ -27,11 +28,11 @@ class MockFrameTransformer : public FrameTransformerInterface { (override)); MOCK_METHOD(void, RegisterTransformedFrameCallback, - (rtc::scoped_refptr), + (webrtc::scoped_refptr), (override)); MOCK_METHOD(void, RegisterTransformedFrameSinkCallback, - (rtc::scoped_refptr, uint32_t), + (webrtc::scoped_refptr, uint32_t), (override)); MOCK_METHOD(void, UnregisterTransformedFrameCallback, (), (override)); MOCK_METHOD(void, @@ -42,4 +43,4 @@ class MockFrameTransformer : public FrameTransformerInterface { } // namespace webrtc -#endif // TEST_MOCK_FRAME_TRANSFORMER_H_ +#endif // API_TEST_MOCK_FRAME_TRANSFORMER_H_ diff --git a/api/test/mock_media_stream_interface.h b/api/test/mock_media_stream_interface.h index dfdbab35e9..0910f1daac 100644 --- a/api/test/mock_media_stream_interface.h +++ b/api/test/mock_media_stream_interface.h @@ -12,16 +12,20 @@ #define API_TEST_MOCK_MEDIA_STREAM_INTERFACE_H_ #include +#include +#include "api/audio_options.h" #include "api/media_stream_interface.h" +#include "api/scoped_refptr.h" +#include "rtc_base/ref_counted_object.h" #include "test/gmock.h" namespace webrtc { -class MockAudioSource : public rtc::RefCountedObject { +class MockAudioSource : public RefCountedObject { public: - static rtc::scoped_refptr Create() { - return rtc::scoped_refptr(new MockAudioSource()); + static scoped_refptr Create() { + return scoped_refptr(new MockAudioSource()); } MOCK_METHOD(void, @@ -45,16 +49,16 @@ class MockAudioSource : public rtc::RefCountedObject { (override)); MOCK_METHOD(void, AddSink, (AudioTrackSinkInterface * sink), (override)); MOCK_METHOD(void, RemoveSink, (AudioTrackSinkInterface * sink), (override)); - MOCK_METHOD(const cricket::AudioOptions, options, (), (const, override)); + MOCK_METHOD(const AudioOptions, options, (), (const, override)); private: MockAudioSource() = default; }; -class MockAudioTrack : public rtc::RefCountedObject { +class MockAudioTrack : public RefCountedObject { public: - static rtc::scoped_refptr Create() { - return rtc::scoped_refptr(new MockAudioTrack()); + static scoped_refptr Create() { + return scoped_refptr(new MockAudioTrack()); } MOCK_METHOD(void, @@ -74,7 +78,7 @@ class MockAudioTrack : public rtc::RefCountedObject { MOCK_METHOD(void, AddSink, (AudioTrackSinkInterface * sink), (override)); MOCK_METHOD(void, RemoveSink, (AudioTrackSinkInterface * sink), (override)); MOCK_METHOD(bool, GetSignalLevel, (int* level), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, GetAudioProcessor, (), (override)); @@ -85,38 +89,33 @@ class MockAudioTrack : public rtc::RefCountedObject { class MockMediaStream : public MediaStreamInterface { public: - MOCK_METHOD(std::string, id, (), (const override)); + MOCK_METHOD(std::string, id, (), (const, override)); MOCK_METHOD(AudioTrackVector, GetAudioTracks, (), (override)); MOCK_METHOD(VideoTrackVector, GetVideoTracks, (), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, FindAudioTrack, (const std::string& track_id), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, FindVideoTrack, (const std::string& track_id), (override)); MOCK_METHOD(bool, AddTrack, - (rtc::scoped_refptr track), + (webrtc::scoped_refptr track), (override)); MOCK_METHOD(bool, AddTrack, - (rtc::scoped_refptr track), + (webrtc::scoped_refptr track), (override)); MOCK_METHOD(bool, RemoveTrack, - (rtc::scoped_refptr track), + (webrtc::scoped_refptr track), (override)); MOCK_METHOD(bool, RemoveTrack, - (rtc::scoped_refptr track), + (webrtc::scoped_refptr track), (override)); - // Old AddTrack/RemoveTrack methods - slated for removal - MOCK_METHOD(bool, AddTrack, (AudioTrackInterface * track), (override)); - MOCK_METHOD(bool, AddTrack, (VideoTrackInterface * track), (override)); - MOCK_METHOD(bool, RemoveTrack, (AudioTrackInterface * track), (override)); - MOCK_METHOD(bool, RemoveTrack, (VideoTrackInterface * track), (override)); MOCK_METHOD(void, RegisterObserver, (ObserverInterface * observer), @@ -127,7 +126,8 @@ class MockMediaStream : public MediaStreamInterface { (override)); }; -static_assert(!std::is_abstract_v>, ""); +static_assert(!std::is_abstract_v>, + ""); } // namespace webrtc diff --git a/api/test/mock_packet_socket_factory.h b/api/test/mock_packet_socket_factory.h index 7e59556385..d32836b39f 100644 --- a/api/test/mock_packet_socket_factory.h +++ b/api/test/mock_packet_socket_factory.h @@ -11,13 +11,17 @@ #ifndef API_TEST_MOCK_PACKET_SOCKET_FACTORY_H_ #define API_TEST_MOCK_PACKET_SOCKET_FACTORY_H_ +#include #include -#include +#include +#include "api/async_dns_resolver.h" #include "api/packet_socket_factory.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/socket_address.h" #include "test/gmock.h" -namespace rtc { +namespace webrtc { class MockPacketSocketFactory : public PacketSocketFactory { public: MOCK_METHOD(AsyncPacketSocket*, @@ -32,11 +36,9 @@ class MockPacketSocketFactory : public PacketSocketFactory { CreateClientTcpSocket, (const SocketAddress& local_address, const SocketAddress&, - const ProxyInfo&, - const std::string&, const PacketSocketTcpOptions&), (override)); - MOCK_METHOD(std::unique_ptr, + MOCK_METHOD(std::unique_ptr, CreateAsyncDnsResolver, (), (override)); @@ -44,6 +46,14 @@ class MockPacketSocketFactory : public PacketSocketFactory { static_assert(!std::is_abstract_v, ""); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::MockPacketSocketFactory; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // API_TEST_MOCK_PACKET_SOCKET_FACTORY_H_ diff --git a/api/test/mock_peer_connection_factory_interface.h b/api/test/mock_peer_connection_factory_interface.h index 67a67b8e06..a297e58435 100644 --- a/api/test/mock_peer_connection_factory_interface.h +++ b/api/test/mock_peer_connection_factory_interface.h @@ -11,66 +11,63 @@ #ifndef API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_ #define API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_ -#include +#include +#include #include +#include "absl/strings/string_view.h" +#include "api/audio_options.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "rtc_base/ref_counted_object.h" #include "test/gmock.h" namespace webrtc { class MockPeerConnectionFactoryInterface - : public rtc::RefCountedObject { + : public RefCountedObject { public: - static rtc::scoped_refptr Create() { - return rtc::scoped_refptr( + static scoped_refptr Create() { + return scoped_refptr( new MockPeerConnectionFactoryInterface()); } MOCK_METHOD(void, SetOptions, (const Options&), (override)); - MOCK_METHOD(rtc::scoped_refptr, - CreatePeerConnection, - (const PeerConnectionInterface::RTCConfiguration&, - PeerConnectionDependencies), - (override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, CreatePeerConnectionOrError, (const PeerConnectionInterface::RTCConfiguration&, PeerConnectionDependencies), (override)); - MOCK_METHOD(rtc::scoped_refptr, - CreatePeerConnection, - (const PeerConnectionInterface::RTCConfiguration&, - std::unique_ptr, - std::unique_ptr, - PeerConnectionObserver*), - (override)); MOCK_METHOD(RtpCapabilities, GetRtpSenderCapabilities, - (cricket::MediaType), + (webrtc::MediaType), (const, override)); MOCK_METHOD(RtpCapabilities, GetRtpReceiverCapabilities, - (cricket::MediaType), + (webrtc::MediaType), (const, override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, CreateLocalMediaStream, (const std::string&), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, CreateAudioSource, - (const cricket::AudioOptions&), + (const webrtc::AudioOptions&), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, CreateVideoTrack, (const std::string&, VideoTrackSourceInterface*), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, CreateVideoTrack, - (rtc::scoped_refptr, + (webrtc::scoped_refptr, absl::string_view), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, CreateAudioTrack, (const std::string&, AudioSourceInterface*), (override)); diff --git a/api/test/mock_peerconnectioninterface.h b/api/test/mock_peerconnectioninterface.h index ccc6ce46b1..1a75fa356c 100644 --- a/api/test/mock_peerconnectioninterface.h +++ b/api/test/mock_peerconnectioninterface.h @@ -11,82 +11,104 @@ #ifndef API_TEST_MOCK_PEERCONNECTIONINTERFACE_H_ #define API_TEST_MOCK_PEERCONNECTIONINTERFACE_H_ +#include #include +#include #include #include -#include #include +#include "api/adaptation/resource.h" +#include "api/candidate.h" +#include "api/data_channel_event_observer_interface.h" +#include "api/data_channel_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/jsep.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtc_event_log_output.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" #include "api/sctp_transport_interface.h" +#include "api/set_remote_description_observer_interface.h" +#include "api/stats/rtc_stats_collector_callback.h" +#include "api/transport/bandwidth_estimation_settings.h" +#include "api/transport/bitrate_settings.h" +#include "api/transport/network_control.h" #include "rtc_base/ref_counted_object.h" +#include "rtc_base/thread.h" #include "test/gmock.h" namespace webrtc { class MockPeerConnectionInterface : public webrtc::PeerConnectionInterface { public: - static rtc::scoped_refptr Create() { - return rtc::make_ref_counted(); + static scoped_refptr Create() { + return make_ref_counted(); } // PeerConnectionInterface - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, local_streams, (), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, remote_streams, (), (override)); MOCK_METHOD(bool, AddStream, (MediaStreamInterface*), (override)); MOCK_METHOD(void, RemoveStream, (MediaStreamInterface*), (override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, AddTrack, - (rtc::scoped_refptr, + (webrtc::scoped_refptr, const std::vector&), (override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, AddTrack, - (rtc::scoped_refptr, + (webrtc::scoped_refptr, const std::vector&, const std::vector&), (override)); MOCK_METHOD(RTCError, RemoveTrackOrError, - (rtc::scoped_refptr), + (webrtc::scoped_refptr), (override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, AddTransceiver, - (rtc::scoped_refptr), + (webrtc::scoped_refptr), (override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, AddTransceiver, - (rtc::scoped_refptr, + (webrtc::scoped_refptr, const RtpTransceiverInit&), (override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, AddTransceiver, - (cricket::MediaType), + (webrtc::MediaType), (override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, AddTransceiver, - (cricket::MediaType, const RtpTransceiverInit&), + (webrtc::MediaType, const RtpTransceiverInit&), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, CreateSender, (const std::string&, const std::string&), (override)); - MOCK_METHOD(std::vector>, + MOCK_METHOD(std::vector>, GetSenders, (), (const, override)); - MOCK_METHOD(std::vector>, + MOCK_METHOD(std::vector>, GetReceivers, (), (const, override)); - MOCK_METHOD(std::vector>, + MOCK_METHOD(std::vector>, GetTransceivers, (), (const, override)); @@ -97,20 +119,20 @@ class MockPeerConnectionInterface : public webrtc::PeerConnectionInterface { MOCK_METHOD(void, GetStats, (RTCStatsCollectorCallback*), (override)); MOCK_METHOD(void, GetStats, - (rtc::scoped_refptr, - rtc::scoped_refptr), + (webrtc::scoped_refptr, + webrtc::scoped_refptr), (override)); MOCK_METHOD(void, GetStats, - (rtc::scoped_refptr, - rtc::scoped_refptr), + (webrtc::scoped_refptr, + webrtc::scoped_refptr), (override)); MOCK_METHOD(void, ClearStatsCache, (), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, GetSctpTransport, (), (const, override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, CreateDataChannelOrError, (const std::string&, const DataChannelInit*), (override)); @@ -158,7 +180,11 @@ class MockPeerConnectionInterface : public webrtc::PeerConnectionInterface { MOCK_METHOD(void, SetRemoteDescription, (std::unique_ptr, - rtc::scoped_refptr), + webrtc::scoped_refptr), + (override)); + MOCK_METHOD(bool, + ShouldFireNegotiationNeededEvent, + (uint32_t event_id), (override)); MOCK_METHOD(PeerConnectionInterface::RTCConfiguration, GetConfiguration, @@ -174,12 +200,16 @@ class MockPeerConnectionInterface : public webrtc::PeerConnectionInterface { (override)); MOCK_METHOD(bool, RemoveIceCandidates, - (const std::vector&), + (const std::vector&), (override)); MOCK_METHOD(RTCError, SetBitrate, (const BitrateSettings&), (override)); + MOCK_METHOD(void, + ReconfigureBandwidthEstimation, + (const BandwidthEstimationSettings&), + (override)); MOCK_METHOD(void, SetAudioPlayout, (bool), (override)); MOCK_METHOD(void, SetAudioRecording, (bool), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, LookupDtlsTransportByMid, (const std::string&), (override)); @@ -191,7 +221,11 @@ class MockPeerConnectionInterface : public webrtc::PeerConnectionInterface { (override)); MOCK_METHOD(PeerConnectionState, peer_connection_state, (), (override)); MOCK_METHOD(IceGatheringState, ice_gathering_state, (), (override)); - MOCK_METHOD(absl::optional, can_trickle_ice_candidates, (), (override)); + MOCK_METHOD(void, + AddAdaptationResource, + (webrtc::scoped_refptr), + (override)); + MOCK_METHOD(std::optional, can_trickle_ice_candidates, (), (override)); MOCK_METHOD(bool, StartRtcEventLog, (std::unique_ptr, int64_t), @@ -200,12 +234,21 @@ class MockPeerConnectionInterface : public webrtc::PeerConnectionInterface { StartRtcEventLog, (std::unique_ptr), (override)); + MOCK_METHOD(void, + SetDataChannelEventObserver, + (std::unique_ptr), + (override)); MOCK_METHOD(void, StopRtcEventLog, (), (override)); MOCK_METHOD(void, Close, (), (override)); + MOCK_METHOD(Thread*, signaling_thread, (), (const, override)); + MOCK_METHOD(NetworkControllerInterface*, + GetNetworkController, + (), + (override)); }; static_assert( - !std::is_abstract_v>, + !std::is_abstract_v>, ""); } // namespace webrtc diff --git a/api/test/mock_rtp_transceiver.h b/api/test/mock_rtp_transceiver.h index 72a69f1bd0..6992c54bf3 100644 --- a/api/test/mock_rtp_transceiver.h +++ b/api/test/mock_rtp_transceiver.h @@ -11,10 +11,20 @@ #ifndef API_TEST_MOCK_RTP_TRANSCEIVER_H_ #define API_TEST_MOCK_RTP_TRANSCEIVER_H_ +#include #include #include +#include "api/array_view.h" +#include "api/make_ref_counted.h" +#include "api/media_types.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" #include "test/gmock.h" namespace webrtc { @@ -23,17 +33,14 @@ class MockRtpTransceiver : public RtpTransceiverInterface { public: MockRtpTransceiver() = default; - static rtc::scoped_refptr Create() { - return rtc::make_ref_counted(); + static scoped_refptr Create() { + return make_ref_counted(); } - MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); - MOCK_METHOD(absl::optional, mid, (), (const, override)); - MOCK_METHOD(rtc::scoped_refptr, - sender, - (), - (const, override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(webrtc::MediaType, media_type, (), (const, override)); + MOCK_METHOD(std::optional, mid, (), (const, override)); + MOCK_METHOD(scoped_refptr, sender, (), (const, override)); + MOCK_METHOD(scoped_refptr, receiver, (), (const, override)); @@ -48,11 +55,11 @@ class MockRtpTransceiver : public RtpTransceiverInterface { SetDirectionWithError, (RtpTransceiverDirection new_direction), (override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, current_direction, (), (const, override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, fired_direction, (), (const, override)); @@ -61,7 +68,7 @@ class MockRtpTransceiver : public RtpTransceiverInterface { MOCK_METHOD(void, Stop, (), (override)); MOCK_METHOD(RTCError, SetCodecPreferences, - (rtc::ArrayView codecs), + (webrtc::ArrayView codecs), (override)); MOCK_METHOD(std::vector, codec_preferences, @@ -78,7 +85,7 @@ class MockRtpTransceiver : public RtpTransceiverInterface { MOCK_METHOD( webrtc::RTCError, SetHeaderExtensionsToNegotiate, - (rtc::ArrayView header_extensions), + (webrtc::ArrayView header_extensions), (override)); }; diff --git a/api/test/mock_rtpreceiver.h b/api/test/mock_rtpreceiver.h index 63318dc32d..b0c34a20a0 100644 --- a/api/test/mock_rtpreceiver.h +++ b/api/test/mock_rtpreceiver.h @@ -11,26 +11,33 @@ #ifndef API_TEST_MOCK_RTPRECEIVER_H_ #define API_TEST_MOCK_RTPRECEIVER_H_ +#include #include #include #include "api/crypto/frame_decryptor_interface.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/transport/rtp/rtp_source.h" +#include "rtc_base/ref_counted_object.h" #include "test/gmock.h" namespace webrtc { -class MockRtpReceiver : public rtc::RefCountedObject { +class MockRtpReceiver : public RefCountedObject { public: - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, track, (), (const, override)); - MOCK_METHOD(std::vector>, + MOCK_METHOD(std::vector>, streams, (), (const, override)); - MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); + MOCK_METHOD(webrtc::MediaType, media_type, (), (const, override)); MOCK_METHOD(std::string, id, (), (const, override)); MOCK_METHOD(RtpParameters, GetParameters, (), (const, override)); MOCK_METHOD(bool, @@ -40,14 +47,14 @@ class MockRtpReceiver : public rtc::RefCountedObject { MOCK_METHOD(void, SetObserver, (RtpReceiverObserverInterface*), (override)); MOCK_METHOD(void, SetJitterBufferMinimumDelay, - (absl::optional), + (std::optional), (override)); MOCK_METHOD(std::vector, GetSources, (), (const, override)); MOCK_METHOD(void, SetFrameDecryptor, - (rtc::scoped_refptr), + (webrtc::scoped_refptr), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, GetFrameDecryptor, (), (const, override)); diff --git a/api/test/mock_rtpsender.h b/api/test/mock_rtpsender.h index 22113678b9..da1078f356 100644 --- a/api/test/mock_rtpsender.h +++ b/api/test/mock_rtpsender.h @@ -11,32 +11,46 @@ #ifndef API_TEST_MOCK_RTPSENDER_H_ #define API_TEST_MOCK_RTPSENDER_H_ +#include #include #include +#include #include +#include "api/crypto/frame_encryptor_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/dtmf_sender_interface.h" +#include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" #include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "rtc_base/ref_counted_object.h" #include "test/gmock.h" namespace webrtc { class MockRtpSender : public RtpSenderInterface { public: - static rtc::scoped_refptr Create() { - return rtc::make_ref_counted(); + static scoped_refptr Create() { + return make_ref_counted(); } MOCK_METHOD(bool, SetTrack, (MediaStreamTrackInterface*), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, track, (), (const, override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, dtls_transport, (), - (const override)); + (const, override)); MOCK_METHOD(uint32_t, ssrc, (), (const, override)); - MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); + MOCK_METHOD(webrtc::MediaType, media_type, (), (const, override)); MOCK_METHOD(std::string, id, (), (const, override)); MOCK_METHOD(std::vector, stream_ids, (), (const, override)); MOCK_METHOD(void, SetStreams, (const std::vector&), (override)); @@ -50,29 +64,30 @@ class MockRtpSender : public RtpSenderInterface { SetParametersAsync, (const RtpParameters&, SetParametersCallback), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, GetDtmfSender, (), (const, override)); MOCK_METHOD(void, SetFrameEncryptor, - (rtc::scoped_refptr), + (webrtc::scoped_refptr), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, GetFrameEncryptor, (), (const, override)); MOCK_METHOD(void, - SetEncoderToPacketizerFrameTransformer, - (rtc::scoped_refptr), + SetFrameTransformer, + (webrtc::scoped_refptr), (override)); MOCK_METHOD(void, SetEncoderSelector, (std::unique_ptr), (override)); + MOCK_METHOD(void, SetObserver, (RtpSenderObserverInterface*), (override)); }; -static_assert(!std::is_abstract_v>, ""); +static_assert(!std::is_abstract_v>, ""); } // namespace webrtc #endif // API_TEST_MOCK_RTPSENDER_H_ diff --git a/api/test/mock_session_description_interface.h b/api/test/mock_session_description_interface.h index f0346ceb11..549eded2f9 100644 --- a/api/test/mock_session_description_interface.h +++ b/api/test/mock_session_description_interface.h @@ -11,11 +11,13 @@ #ifndef API_TEST_MOCK_SESSION_DESCRIPTION_INTERFACE_H_ #define API_TEST_MOCK_SESSION_DESCRIPTION_INTERFACE_H_ +#include #include #include -#include +#include #include +#include "api/candidate.h" #include "api/jsep.h" #include "test/gmock.h" @@ -27,11 +29,8 @@ class MockSessionDescriptionInterface : public SessionDescriptionInterface { Clone, (), (const, override)); - MOCK_METHOD(cricket::SessionDescription*, description, (), (override)); - MOCK_METHOD(const cricket::SessionDescription*, - description, - (), - (const, override)); + MOCK_METHOD(SessionDescription*, description, (), (override)); + MOCK_METHOD(const SessionDescription*, description, (), (const, override)); MOCK_METHOD(std::string, session_id, (), (const, override)); MOCK_METHOD(std::string, session_version, (), (const, override)); MOCK_METHOD(SdpType, GetType, (), (const, override)); @@ -39,7 +38,7 @@ class MockSessionDescriptionInterface : public SessionDescriptionInterface { MOCK_METHOD(bool, AddCandidate, (const IceCandidateInterface*), (override)); MOCK_METHOD(size_t, RemoveCandidates, - (const std::vector&), + (const std::vector&), (override)); MOCK_METHOD(size_t, number_of_mediasections, (), (const, override)); MOCK_METHOD(const IceCandidateCollection*, diff --git a/api/test/mock_transformable_audio_frame.h b/api/test/mock_transformable_audio_frame.h index be703006ea..552d109f24 100644 --- a/api/test/mock_transformable_audio_frame.h +++ b/api/test/mock_transformable_audio_frame.h @@ -11,24 +11,34 @@ #ifndef API_TEST_MOCK_TRANSFORMABLE_AUDIO_FRAME_H_ #define API_TEST_MOCK_TRANSFORMABLE_AUDIO_FRAME_H_ +#include +#include +#include + +#include "api/array_view.h" #include "api/frame_transformer_interface.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "test/gmock.h" namespace webrtc { class MockTransformableAudioFrame : public TransformableAudioFrameInterface { public: - MOCK_METHOD(rtc::ArrayView, GetData, (), (const, override)); - MOCK_METHOD(void, SetData, (rtc::ArrayView), (override)); + MockTransformableAudioFrame() : TransformableAudioFrameInterface(Passkey()) {} + + MOCK_METHOD(ArrayView, GetData, (), (const, override)); + MOCK_METHOD(void, SetData, (webrtc::ArrayView), (override)); MOCK_METHOD(void, SetRTPTimestamp, (uint32_t), (override)); MOCK_METHOD(uint8_t, GetPayloadType, (), (const, override)); MOCK_METHOD(uint32_t, GetSsrc, (), (const, override)); MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override)); - MOCK_METHOD(rtc::ArrayView, + MOCK_METHOD(std::string, GetMimeType, (), (const, override)); + MOCK_METHOD(ArrayView, GetContributingSources, (), - (const override)); - MOCK_METHOD(const absl::optional, + (const, override)); + MOCK_METHOD(const std::optional, SequenceNumber, (), (const, override)); @@ -36,7 +46,7 @@ class MockTransformableAudioFrame : public TransformableAudioFrameInterface { GetDirection, (), (const, override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, AbsoluteCaptureTimestamp, (), (const, override)); @@ -44,6 +54,14 @@ class MockTransformableAudioFrame : public TransformableAudioFrameInterface { Type, (), (const, override)); + MOCK_METHOD(std::optional, AudioLevel, (), (const, override)); + + MOCK_METHOD(std::optional, ReceiveTime, (), (const, override)); + MOCK_METHOD(std::optional, CaptureTime, (), (const, override)); + MOCK_METHOD(std::optional, + SenderCaptureTimeOffset, + (), + (const, override)); }; } // namespace webrtc diff --git a/api/test/mock_transformable_frame.h b/api/test/mock_transformable_frame.h new file mode 100644 index 0000000000..9a1456d6fa --- /dev/null +++ b/api/test/mock_transformable_frame.h @@ -0,0 +1,55 @@ +/* + * Copyright 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_TRANSFORMABLE_FRAME_H_ +#define API_TEST_MOCK_TRANSFORMABLE_FRAME_H_ + +#include + +#include +#include +#include + +#include "api/array_view.h" +#include "api/frame_transformer_interface.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockTransformableFrame : public TransformableFrameInterface { + public: + MockTransformableFrame() : TransformableFrameInterface(Passkey()) {} + + MOCK_METHOD(ArrayView, GetData, (), (const, override)); + MOCK_METHOD(void, SetData, (webrtc::ArrayView), (override)); + MOCK_METHOD(uint8_t, GetPayloadType, (), (const, override)); + MOCK_METHOD(uint32_t, GetSsrc, (), (const, override)); + MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override)); + MOCK_METHOD(void, SetRTPTimestamp, (uint32_t), (override)); + MOCK_METHOD(std::optional, + GetPresentationTimestamp, + (), + (const, override)); + MOCK_METHOD(std::string, GetMimeType, (), (const, override)); + MOCK_METHOD(std::optional, ReceiveTime, (), (const, override)); + MOCK_METHOD(std::optional, CaptureTime, (), (const, override)); + MOCK_METHOD(std::optional, + SenderCaptureTimeOffset, + (), + (const, override)); +}; + +static_assert(!std::is_abstract_v, ""); + +} // namespace webrtc + +#endif // API_TEST_MOCK_TRANSFORMABLE_FRAME_H_ diff --git a/api/test/mock_transformable_video_frame.h b/api/test/mock_transformable_video_frame.h index 21c4dc2b69..8b13e16e95 100644 --- a/api/test/mock_transformable_video_frame.h +++ b/api/test/mock_transformable_video_frame.h @@ -11,9 +11,16 @@ #ifndef API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_ #define API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_ -#include +#include +#include +#include +#include +#include "api/array_view.h" #include "api/frame_transformer_interface.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/video_frame_metadata.h" #include "test/gmock.h" namespace webrtc { @@ -21,8 +28,12 @@ namespace webrtc { class MockTransformableVideoFrame : public webrtc::TransformableVideoFrameInterface { public: - MOCK_METHOD(rtc::ArrayView, GetData, (), (const, override)); - MOCK_METHOD(void, SetData, (rtc::ArrayView data), (override)); + MockTransformableVideoFrame() : TransformableVideoFrameInterface(Passkey()) {} + MOCK_METHOD(ArrayView, GetData, (), (const, override)); + MOCK_METHOD(void, + SetData, + (webrtc::ArrayView data), + (override)); MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override)); MOCK_METHOD(void, SetRTPTimestamp, (uint32_t), (override)); MOCK_METHOD(uint32_t, GetSsrc, (), (const, override)); @@ -36,9 +47,16 @@ class MockTransformableVideoFrame GetDirection, (), (const, override)); + MOCK_METHOD(std::string, GetMimeType, (), (const, override)); MOCK_METHOD(VideoFrameMetadata, Metadata, (), (const, override)); - MOCK_METHOD(absl::optional, - GetCaptureTimeIdentifier, + MOCK_METHOD(std::optional, + GetPresentationTimestamp, + (), + (const, override)); + MOCK_METHOD(std::optional, ReceiveTime, (), (const, override)); + MOCK_METHOD(std::optional, CaptureTime, (), (const, override)); + MOCK_METHOD(std::optional, + SenderCaptureTimeOffset, (), (const, override)); }; diff --git a/api/test/mock_video_bitrate_allocator.h b/api/test/mock_video_bitrate_allocator.h index 76cf49e955..de06d93e3f 100644 --- a/api/test/mock_video_bitrate_allocator.h +++ b/api/test/mock_video_bitrate_allocator.h @@ -11,6 +11,7 @@ #ifndef API_TEST_MOCK_VIDEO_BITRATE_ALLOCATOR_H_ #define API_TEST_MOCK_VIDEO_BITRATE_ALLOCATOR_H_ +#include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocator.h" #include "test/gmock.h" diff --git a/api/test/mock_video_bitrate_allocator_factory.h b/api/test/mock_video_bitrate_allocator_factory.h index 16af191970..7e7906a921 100644 --- a/api/test/mock_video_bitrate_allocator_factory.h +++ b/api/test/mock_video_bitrate_allocator_factory.h @@ -13,18 +13,20 @@ #include +#include "api/environment/environment.h" +#include "api/video/video_bitrate_allocator.h" #include "api/video/video_bitrate_allocator_factory.h" +#include "api/video_codecs/video_codec.h" #include "test/gmock.h" namespace webrtc { -class MockVideoBitrateAllocatorFactory - : public webrtc::VideoBitrateAllocatorFactory { +class MockVideoBitrateAllocatorFactory : public VideoBitrateAllocatorFactory { public: ~MockVideoBitrateAllocatorFactory() override { Die(); } MOCK_METHOD(std::unique_ptr, - CreateVideoBitrateAllocator, - (const VideoCodec&), + Create, + (const Environment&, const VideoCodec&), (override)); MOCK_METHOD(void, Die, ()); }; diff --git a/api/test/mock_video_decoder.h b/api/test/mock_video_decoder.h index dc499d45f7..0608222a99 100644 --- a/api/test/mock_video_decoder.h +++ b/api/test/mock_video_decoder.h @@ -11,8 +11,11 @@ #ifndef API_TEST_MOCK_VIDEO_DECODER_H_ #define API_TEST_MOCK_VIDEO_DECODER_H_ -#include +#include +#include +#include "api/video/encoded_image.h" +#include "api/video/video_frame.h" #include "api/video_codecs/video_decoder.h" #include "test/gmock.h" @@ -35,8 +38,8 @@ class MockDecodedImageCallback : public DecodedImageCallback { MOCK_METHOD(void, Decoded, (VideoFrame & decoded_image, // NOLINT - absl::optional decode_time_ms, - absl::optional qp), + std::optional decode_time_ms, + std::optional qp), (override)); }; @@ -61,8 +64,7 @@ class MockVideoDecoder : public VideoDecoder { MOCK_METHOD(bool, Configure, (const Settings& settings), (override)); MOCK_METHOD(int32_t, Decode, - (const EncodedImage& input_image, - int64_t render_time_ms), + (const EncodedImage& input_image, int64_t render_time_ms), (override)); MOCK_METHOD(int32_t, Decode, diff --git a/api/test/mock_video_decoder_factory.h b/api/test/mock_video_decoder_factory.h index 6150d9f8b5..f661ea3654 100644 --- a/api/test/mock_video_decoder_factory.h +++ b/api/test/mock_video_decoder_factory.h @@ -14,6 +14,7 @@ #include #include +#include "api/environment/environment.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder_factory.h" @@ -21,17 +22,17 @@ namespace webrtc { -class MockVideoDecoderFactory : public webrtc::VideoDecoderFactory { +class MockVideoDecoderFactory : public VideoDecoderFactory { public: ~MockVideoDecoderFactory() override { Die(); } - MOCK_METHOD(std::vector, + MOCK_METHOD(std::vector, GetSupportedFormats, (), (const, override)); - MOCK_METHOD(std::unique_ptr, - CreateVideoDecoder, - (const webrtc::SdpVideoFormat&), + MOCK_METHOD(std::unique_ptr, + Create, + (const Environment&, const SdpVideoFormat&), (override)); MOCK_METHOD(void, Die, ()); }; diff --git a/api/test/mock_video_encoder.h b/api/test/mock_video_encoder.h index 11e0f64b3f..9340d6ea38 100644 --- a/api/test/mock_video_encoder.h +++ b/api/test/mock_video_encoder.h @@ -11,8 +11,15 @@ #ifndef API_TEST_MOCK_VIDEO_ENCODER_H_ #define API_TEST_MOCK_VIDEO_ENCODER_H_ +#include +#include #include +#include "api/fec_controller_override.h" +#include "api/video/encoded_image.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" #include "test/gmock.h" diff --git a/api/test/mock_video_encoder_factory.h b/api/test/mock_video_encoder_factory.h index 02ee7aa15e..beeba29d8a 100644 --- a/api/test/mock_video_encoder_factory.h +++ b/api/test/mock_video_encoder_factory.h @@ -14,6 +14,7 @@ #include #include +#include "api/environment/environment.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" @@ -30,8 +31,8 @@ class MockVideoEncoderFactory : public webrtc::VideoEncoderFactory { (), (const, override)); MOCK_METHOD(std::unique_ptr, - CreateVideoEncoder, - (const SdpVideoFormat&), + Create, + (const Environment&, const SdpVideoFormat&), (override)); MOCK_METHOD(void, Die, ()); diff --git a/api/test/mock_video_track.h b/api/test/mock_video_track.h index 1212a32527..3bf083dd6c 100644 --- a/api/test/mock_video_track.h +++ b/api/test/mock_video_track.h @@ -15,16 +15,18 @@ #include "api/media_stream_interface.h" #include "api/scoped_refptr.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" #include "rtc_base/ref_counted_object.h" #include "test/gmock.h" namespace webrtc { -class MockVideoTrack - : public rtc::RefCountedObject { +class MockVideoTrack : public RefCountedObject { public: - static rtc::scoped_refptr Create() { - return rtc::scoped_refptr(new MockVideoTrack()); + static scoped_refptr Create() { + return scoped_refptr(new MockVideoTrack()); } // NotifierInterface @@ -47,14 +49,14 @@ class MockVideoTrack // VideoSourceInterface MOCK_METHOD(void, AddOrUpdateSink, - (rtc::VideoSinkInterface * sink, - const rtc::VideoSinkWants& wants), + (webrtc::VideoSinkInterface * sink, + const webrtc::VideoSinkWants& wants), (override)); // RemoveSink must guarantee that at the time the method returns, // there is no current and no future calls to VideoSinkInterface::OnFrame. MOCK_METHOD(void, RemoveSink, - (rtc::VideoSinkInterface * sink), + (webrtc::VideoSinkInterface * sink), (override)); // VideoTrackInterface diff --git a/api/test/neteq_simulator.h b/api/test/neteq_simulator.h index 88c7ffa681..02795fefee 100644 --- a/api/test/neteq_simulator.h +++ b/api/test/neteq_simulator.h @@ -16,6 +16,8 @@ #include #include +#include "api/neteq/neteq.h" + namespace webrtc { namespace test { @@ -74,6 +76,9 @@ class NetEqSimulator { // Get the current state of NetEq. virtual NetEqState GetNetEqState() = 0; + + // Get the underlying NetEq instance. + virtual NetEq* GetNetEq() = 0; }; } // namespace test diff --git a/api/test/neteq_simulator_factory.cc b/api/test/neteq_simulator_factory.cc index 82b27e546d..6a05816744 100644 --- a/api/test/neteq_simulator_factory.cc +++ b/api/test/neteq_simulator_factory.cc @@ -12,12 +12,10 @@ #include #include -#include -#include "absl/flags/flag.h" -#include "absl/flags/parse.h" +#include "absl/strings/string_view.h" +#include "api/test/neteq_simulator.h" #include "modules/audio_coding/neteq/tools/neteq_test_factory.h" -#include "rtc_base/checks.h" namespace webrtc { namespace test { @@ -36,6 +34,7 @@ NetEqTestFactory::Config convertConfig( config.plot_scripts_basename = simulation_config.python_plot_filename; config.textlog = simulation_config.text_log_filename.has_value(); config.textlog_filename = simulation_config.text_log_filename; + config.ssrc_filter = simulation_config.ssrc_filter; return config; } } // namespace diff --git a/api/test/neteq_simulator_factory.h b/api/test/neteq_simulator_factory.h index 2a716e665e..220daa5cf1 100644 --- a/api/test/neteq_simulator_factory.h +++ b/api/test/neteq_simulator_factory.h @@ -11,11 +11,12 @@ #ifndef API_TEST_NETEQ_SIMULATOR_FACTORY_H_ #define API_TEST_NETEQ_SIMULATOR_FACTORY_H_ +#include #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/neteq/neteq_factory.h" #include "api/test/neteq_simulator.h" @@ -43,13 +44,15 @@ class NetEqSimulatorFactory { // A WebRTC field trial string to be used during the simulation. std::string field_trial_string; // A filename for the generated output audio file. - absl::optional output_audio_filename; + std::optional output_audio_filename; // A filename for the python plot. - absl::optional python_plot_filename; + std::optional python_plot_filename; // A filename for the text log. - absl::optional text_log_filename; + std::optional text_log_filename; // A custom NetEqFactory can be used. NetEqFactory* neteq_factory = nullptr; + // The SSRC to use for the simulation. + std::optional ssrc_filter; }; std::unique_ptr CreateSimulatorFromFile( absl::string_view event_log_filename, diff --git a/api/test/network_emulation/BUILD.gn b/api/test/network_emulation/BUILD.gn index d009d39a21..e574cb342a 100644 --- a/api/test/network_emulation/BUILD.gn +++ b/api/test/network_emulation/BUILD.gn @@ -6,32 +6,59 @@ # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. +import("//third_party/protobuf/proto_library.gni") import("../../../webrtc.gni") +if (rtc_enable_protobuf) { + proto_library("network_config_schedule_proto") { + visibility = [ "*" ] + sources = [ "network_config_schedule.proto" ] + proto_out_dir = "api/test/network_emulation/" + } + + rtc_source_set("schedulable_network_node_builder") { + visibility = [ "*" ] + sources = [ + "schedulable_network_node_builder.cc", + "schedulable_network_node_builder.h", + ] + deps = [ + ":network_config_schedule_proto", + "../..:network_emulation_manager_api", + "../../../rtc_base:timeutils", + "../../../test/network:schedulable_network_behavior", + "../../units:timestamp", + "//third_party/abseil-cpp/absl/functional:any_invocable", + ] + } +} + rtc_library("network_emulation") { visibility = [ "*" ] sources = [ "cross_traffic.h", + "ecn_marking_counter.cc", + "ecn_marking_counter.h", "network_emulation_interfaces.cc", "network_emulation_interfaces.h", ] deps = [ - "../..:array_view", "../../../rtc_base:checks", "../../../rtc_base:copy_on_write_buffer", "../../../rtc_base:ip_address", "../../../rtc_base:net_helper", + "../../../rtc_base:net_helpers", "../../../rtc_base:socket_address", "../../numerics", - "../../task_queue", + "../../transport:ecn_marking", "../../units:data_rate", "../../units:data_size", "../../units:time_delta", "../../units:timestamp", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("create_cross_traffic") { @@ -46,7 +73,6 @@ rtc_library("create_cross_traffic") { deps = [ ":network_emulation", "../..:network_emulation_manager_api", - "../../../rtc_base/task_utils:repeating_task", "../../../test/network:emulated_network", ] } diff --git a/api/test/network_emulation/create_cross_traffic.cc b/api/test/network_emulation/create_cross_traffic.cc index 36a535cec6..29481e32da 100644 --- a/api/test/network_emulation/create_cross_traffic.cc +++ b/api/test/network_emulation/create_cross_traffic.cc @@ -11,8 +11,9 @@ #include -#include "rtc_base/task_utils/repeating_task.h" +#include "api/test/network_emulation/cross_traffic.h" #include "test/network/cross_traffic.h" +#include "test/network/network_emulation.h" namespace webrtc { diff --git a/api/test/network_emulation/cross_traffic.h b/api/test/network_emulation/cross_traffic.h index 737a93c2fd..603e1d4eb7 100644 --- a/api/test/network_emulation/cross_traffic.h +++ b/api/test/network_emulation/cross_traffic.h @@ -10,8 +10,9 @@ #ifndef API_TEST_NETWORK_EMULATION_CROSS_TRAFFIC_H_ #define API_TEST_NETWORK_EMULATION_CROSS_TRAFFIC_H_ -#include "api/task_queue/task_queue_base.h" -#include "api/test/network_emulation/network_emulation_interfaces.h" +#include +#include + #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" diff --git a/api/test/network_emulation/ecn_marking_counter.cc b/api/test/network_emulation/ecn_marking_counter.cc new file mode 100644 index 0000000000..c83ba871a6 --- /dev/null +++ b/api/test/network_emulation/ecn_marking_counter.cc @@ -0,0 +1,42 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/network_emulation/ecn_marking_counter.h" + +#include "api/transport/ecn_marking.h" + +namespace webrtc { + +void EcnMarkingCounter::Add(EcnMarking ecn) { + switch (ecn) { + case EcnMarking::kNotEct: + ++not_ect_; + break; + case EcnMarking::kEct0: + ++ect_0_; + break; + case EcnMarking::kEct1: + ++ect_1_; + break; + case EcnMarking::kCe: + ++ce_; + break; + } +} + +EcnMarkingCounter& EcnMarkingCounter::operator+=( + const EcnMarkingCounter& counter) { + not_ect_ += counter.not_ect(); + ect_0_ += counter.ect_0(); + ect_1_ += counter.ect_1(); + ce_ += counter.ce(); + return *this; +} + +} // namespace webrtc diff --git a/api/test/network_emulation/ecn_marking_counter.h b/api/test/network_emulation/ecn_marking_counter.h new file mode 100644 index 0000000000..24c8bffb72 --- /dev/null +++ b/api/test/network_emulation/ecn_marking_counter.h @@ -0,0 +1,41 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_NETWORK_EMULATION_ECN_MARKING_COUNTER_H_ +#define API_TEST_NETWORK_EMULATION_ECN_MARKING_COUNTER_H_ + +#include "api/transport/ecn_marking.h" + +namespace webrtc { + +// Counts Explicit Congestion Notifaction marks in IP packets. +// https://www.rfc-editor.org/rfc/rfc9331.html +class EcnMarkingCounter { + public: + // Number of packets without ECT explicitly set sent through the network. + int not_ect() const { return not_ect_; } + // Number of packets with ECT(1) sent through the network. + int ect_0() const { return ect_0_; } + // Number of packets with ECT(1) sent through the network. + int ect_1() const { return ect_1_; } + // Number of packets the network has marked as CE (congestion experienced). + int ce() const { return ce_; } + + void Add(EcnMarking ecn); + EcnMarkingCounter& operator+=(const EcnMarkingCounter& counter); + + private: + int not_ect_ = 0; + int ect_0_ = 0; // Not used by WebRTC or L4S. + int ect_1_ = 0; + int ce_ = 0; +}; + +} // namespace webrtc +#endif // API_TEST_NETWORK_EMULATION_ECN_MARKING_COUNTER_H_ diff --git a/api/test/network_emulation/network_config_schedule.proto b/api/test/network_emulation/network_config_schedule.proto new file mode 100644 index 0000000000..0e1036cdc2 --- /dev/null +++ b/api/test/network_emulation/network_config_schedule.proto @@ -0,0 +1,27 @@ +syntax = "proto2"; + +option optimize_for = LITE_RUNTIME; +package webrtc.network_behaviour; + +message NetworkConfigScheduleItem { + // Time since the first sent packet when this item should be applied. + // This should typically be 0 for the first item in the schedule. + optional int64 time_since_first_sent_packet_ms = 1; + + // Network parameters, See webrtc::BuiltInNetworkBehaviorConfig. + optional int64 queue_length_packets = 2; + optional int64 queue_delay_ms = 3; + optional int64 link_capacity_kbps = 4; + optional int64 loss_percent = 5; + optional int64 delay_standard_deviation_ms = 6; + optional bool allow_reordering = 7; + optional int64 avg_burst_loss_length = 8; + optional int64 packet_overhead = 9; +} + +// Schedule describing network parameters in a simulated network. +message NetworkConfigSchedule { + optional int64 repeat_schedule_after_last_ms = 1; + // Items should be sorted by time_since_first_sent_packet_ms. + repeated NetworkConfigScheduleItem item = 2; +} diff --git a/api/test/network_emulation/network_emulation_interfaces.cc b/api/test/network_emulation/network_emulation_interfaces.cc index 0f3a7f8ffd..65f6c0dea7 100644 --- a/api/test/network_emulation/network_emulation_interfaces.cc +++ b/api/test/network_emulation/network_emulation_interfaces.cc @@ -9,21 +9,32 @@ */ #include "api/test/network_emulation/network_emulation_interfaces.h" +#include + +#include "api/transport/ecn_marking.h" +#include "api/units/data_rate.h" +#include "api/units/timestamp.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/net_helper.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/socket_address.h" namespace webrtc { -EmulatedIpPacket::EmulatedIpPacket(const rtc::SocketAddress& from, - const rtc::SocketAddress& to, - rtc::CopyOnWriteBuffer data, +EmulatedIpPacket::EmulatedIpPacket(const SocketAddress& from, + const SocketAddress& to, + CopyOnWriteBuffer data, Timestamp arrival_time, - uint16_t application_overhead) + uint16_t application_overhead, + EcnMarking ecn) : from(from), to(to), data(data), headers_size(to.ipaddr().overhead() + application_overhead + - cricket::kUdpHeaderSize), - arrival_time(arrival_time) { + kUdpHeaderSize), + arrival_time(arrival_time), + ecn(ecn) { RTC_DCHECK(to.family() == AF_INET || to.family() == AF_INET6); } diff --git a/api/test/network_emulation/network_emulation_interfaces.h b/api/test/network_emulation/network_emulation_interfaces.h index 7cab07b75d..a055592847 100644 --- a/api/test/network_emulation/network_emulation_interfaces.h +++ b/api/test/network_emulation/network_emulation_interfaces.h @@ -10,13 +10,16 @@ #ifndef API_TEST_NETWORK_EMULATION_NETWORK_EMULATION_INTERFACES_H_ #define API_TEST_NETWORK_EMULATION_NETWORK_EMULATION_INTERFACES_H_ +#include +#include #include -#include +#include #include -#include "absl/types/optional.h" -#include "api/array_view.h" +#include "absl/functional/any_invocable.h" #include "api/numerics/samples_stats_counter.h" +#include "api/test/network_emulation/ecn_marking_counter.h" +#include "api/transport/ecn_marking.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" @@ -28,11 +31,12 @@ namespace webrtc { struct EmulatedIpPacket { public: - EmulatedIpPacket(const rtc::SocketAddress& from, - const rtc::SocketAddress& to, - rtc::CopyOnWriteBuffer data, + EmulatedIpPacket(const SocketAddress& from, + const SocketAddress& to, + CopyOnWriteBuffer data, Timestamp arrival_time, - uint16_t application_overhead = 0); + uint16_t application_overhead = 0, + EcnMarking ecn = EcnMarking::kNotEct); ~EmulatedIpPacket() = default; // This object is not copyable or assignable. EmulatedIpPacket(const EmulatedIpPacket&) = delete; @@ -45,12 +49,13 @@ struct EmulatedIpPacket { const uint8_t* cdata() const { return data.cdata(); } size_t ip_packet_size() const { return size() + headers_size; } - rtc::SocketAddress from; - rtc::SocketAddress to; + SocketAddress from; + SocketAddress to; // Holds the UDP payload. - rtc::CopyOnWriteBuffer data; + CopyOnWriteBuffer data; uint16_t headers_size; Timestamp arrival_time; + EcnMarking ecn; }; // Interface for handling IP packets from an emulated network. This is used with @@ -79,6 +84,8 @@ struct EmulatedNetworkOutgoingStats { // Time of the last packet sent or infinite value if no packets were sent. Timestamp last_packet_sent_time = Timestamp::MinusInfinity(); + EcnMarkingCounter ecn_count; + // Returns average send rate. Requires that at least 2 packets were sent. DataRate AverageSendRate() const; }; @@ -114,6 +121,8 @@ struct EmulatedNetworkIncomingStats { // received. Timestamp last_packet_received_time = Timestamp::MinusInfinity(); + EcnMarkingCounter ecn_count; + DataRate AverageReceiveRate() const; }; @@ -206,7 +215,7 @@ struct EmulatedNetworkStats { // List of IP addresses that were used to send data considered in this stats // object. - std::vector local_addresses; + std::vector local_addresses; // Overall outgoing stats for all IP addresses which were requested. EmulatedNetworkOutgoingStats overall_outgoing_stats; @@ -215,10 +224,9 @@ struct EmulatedNetworkStats { // on requested interfaces. EmulatedNetworkIncomingStats overall_incoming_stats; - std::map + std::map outgoing_stats_per_destination; - std::map - incoming_stats_per_source; + std::map incoming_stats_per_source; // Duration between packet was received on network interface and was // dispatched to the network in microseconds. @@ -250,10 +258,11 @@ class EmulatedEndpoint : public EmulatedNetworkReceiverInterface { // socket. // `to` will be used for routing verification and picking right socket by port // on destination endpoint. - virtual void SendPacket(const rtc::SocketAddress& from, - const rtc::SocketAddress& to, - rtc::CopyOnWriteBuffer packet_data, - uint16_t application_overhead = 0) = 0; + virtual void SendPacket(const SocketAddress& from, + const SocketAddress& to, + CopyOnWriteBuffer packet_data, + uint16_t application_overhead = 0, + EcnMarking ecn = EcnMarking::kNotEct) = 0; // Binds receiver to this endpoint to send and receive data. // `desired_port` is a port that should be used. If it is equal to 0, @@ -262,12 +271,12 @@ class EmulatedEndpoint : public EmulatedNetworkReceiverInterface { // // Returns the port, that should be used (it will be equals to desired, if // `desired_port` != 0 and is free or will be the one, selected by endpoint) - // or absl::nullopt if desired_port in used. Also fails if there are no more + // or std::nullopt if desired_port in used. Also fails if there are no more // free ports to bind to. // // The Bind- and Unbind-methods must not be called from within a bound // receiver's OnPacketReceived method. - virtual absl::optional BindReceiver( + virtual std::optional BindReceiver( uint16_t desired_port, EmulatedNetworkReceiverInterface* receiver) = 0; // Unbinds receiver from the specified port. Do nothing if no receiver was @@ -281,7 +290,7 @@ class EmulatedEndpoint : public EmulatedNetworkReceiverInterface { // Unbinds default receiver. Do nothing if no default receiver was bound // before. virtual void UnbindDefaultReceiver() = 0; - virtual rtc::IPAddress GetPeerLocalAddress() const = 0; + virtual IPAddress GetPeerLocalAddress() const = 0; private: // Ensure that there can be no other subclass than EmulatedEndpointImpl. This @@ -301,7 +310,8 @@ class TcpMessageRoute { // Sends a TCP message of the given `size` over the route, `on_received` is // called when the message has been delivered. Note that the connection // parameters are reset iff there's no currently pending message on the route. - virtual void SendMessage(size_t size, std::function on_received) = 0; + virtual void SendMessage(size_t size, + absl::AnyInvocable on_received) = 0; protected: ~TcpMessageRoute() = default; diff --git a/api/test/network_emulation/schedulable_network_node_builder.cc b/api/test/network_emulation/schedulable_network_node_builder.cc new file mode 100644 index 0000000000..2b2bf39d86 --- /dev/null +++ b/api/test/network_emulation/schedulable_network_node_builder.cc @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/network_emulation/schedulable_network_node_builder.h" + +#include +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/test/network_emulation/network_config_schedule.pb.h" +#include "api/test/network_emulation_manager.h" +#include "api/units/timestamp.h" +#include "rtc_base/time_utils.h" +#include "test/network/schedulable_network_behavior.h" + +namespace webrtc { + +SchedulableNetworkNodeBuilder::SchedulableNetworkNodeBuilder( + webrtc::NetworkEmulationManager& net, + network_behaviour::NetworkConfigSchedule schedule) + : net_(net), + schedule_(std::move(schedule)), + start_condition_([](webrtc::Timestamp) { return true; }) {} + +void SchedulableNetworkNodeBuilder::set_start_condition( + absl::AnyInvocable start_condition) { + start_condition_ = std::move(start_condition); +} + +webrtc::EmulatedNetworkNode* SchedulableNetworkNodeBuilder::Build( + std::optional random_seed) { + uint64_t seed = random_seed.has_value() ? *random_seed + : static_cast(TimeNanos()); + return net_.CreateEmulatedNode(std::make_unique( + std::move(schedule_), seed, *net_.time_controller()->GetClock(), + std::move(start_condition_))); +} +} // namespace webrtc diff --git a/api/test/network_emulation/schedulable_network_node_builder.h b/api/test/network_emulation/schedulable_network_node_builder.h new file mode 100644 index 0000000000..9ac6547a32 --- /dev/null +++ b/api/test/network_emulation/schedulable_network_node_builder.h @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_NETWORK_EMULATION_SCHEDULABLE_NETWORK_NODE_BUILDER_H_ +#define API_TEST_NETWORK_EMULATION_SCHEDULABLE_NETWORK_NODE_BUILDER_H_ + +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/test/network_emulation/network_config_schedule.pb.h" +#include "api/test/network_emulation_manager.h" +#include "api/units/timestamp.h" + +namespace webrtc { + +class SchedulableNetworkNodeBuilder { + public: + SchedulableNetworkNodeBuilder( + webrtc::NetworkEmulationManager& net, + network_behaviour::NetworkConfigSchedule schedule); + // set_start_condition allows a test to control when the schedule start. + // `start_condition` is invoked every time a packet is enqueued on the network + // until the first time `start_condition` returns true. Until then, the first + // NetworkConfigScheduleItem is used. There is no guarantee on which + // thread/task queue that will be used. + void set_start_condition( + absl::AnyInvocable start_condition); + + // If no random seed is provided, one will be created. + // The random seed is required for loss rate and to delay standard deviation. + webrtc::EmulatedNetworkNode* Build( + std::optional random_seed = std::nullopt); + + private: + webrtc::NetworkEmulationManager& net_; + network_behaviour::NetworkConfigSchedule schedule_; + absl::AnyInvocable start_condition_; +}; + +} // namespace webrtc + +#endif // API_TEST_NETWORK_EMULATION_SCHEDULABLE_NETWORK_NODE_BUILDER_H_ diff --git a/api/test/network_emulation_manager.cc b/api/test/network_emulation_manager.cc index 756fe4e757..03dcd84573 100644 --- a/api/test/network_emulation_manager.cc +++ b/api/test/network_emulation_manager.cc @@ -9,10 +9,16 @@ */ #include "api/test/network_emulation_manager.h" +#include +#include +#include #include -#include "call/simulated_network.h" +#include "absl/strings/string_view.h" +#include "api/test/simulated_network.h" +#include "api/units/data_rate.h" #include "rtc_base/checks.h" +#include "test/network/simulated_network.h" namespace webrtc { @@ -55,23 +61,38 @@ NetworkEmulationManager::SimulatedNetworkNode::Builder::delay_ms( return *this; } +NetworkEmulationManager::SimulatedNetworkNode::Builder& +NetworkEmulationManager::SimulatedNetworkNode::Builder::capacity( + DataRate link_capacity) { + config_.link_capacity = link_capacity; + return *this; +} + NetworkEmulationManager::SimulatedNetworkNode::Builder& NetworkEmulationManager::SimulatedNetworkNode::Builder::capacity_kbps( int link_capacity_kbps) { - config_.link_capacity_kbps = link_capacity_kbps; + if (link_capacity_kbps > 0) { + config_.link_capacity = DataRate::KilobitsPerSec(link_capacity_kbps); + } else { + config_.link_capacity = DataRate::Infinity(); + } return *this; } NetworkEmulationManager::SimulatedNetworkNode::Builder& NetworkEmulationManager::SimulatedNetworkNode::Builder::capacity_Mbps( int link_capacity_Mbps) { - config_.link_capacity_kbps = link_capacity_Mbps * 1000; + if (link_capacity_Mbps > 0) { + config_.link_capacity = DataRate::KilobitsPerSec(link_capacity_Mbps * 1000); + } else { + config_.link_capacity = DataRate::Infinity(); + } return *this; } NetworkEmulationManager::SimulatedNetworkNode::Builder& NetworkEmulationManager::SimulatedNetworkNode::Builder::loss(double loss_rate) { - config_.loss_percent = std::round(loss_rate * 100); + config_.loss_percent = loss_rate * 100; return *this; } diff --git a/api/test/network_emulation_manager.h b/api/test/network_emulation_manager.h index 6b66d8188a..199a60e2ab 100644 --- a/api/test/network_emulation_manager.h +++ b/api/test/network_emulation_manager.h @@ -11,23 +11,27 @@ #ifndef API_TEST_NETWORK_EMULATION_MANAGER_H_ #define API_TEST_NETWORK_EMULATION_MANAGER_H_ +#include #include #include +#include #include #include #include +#include "absl/base/nullability.h" +#include "absl/strings/string_view.h" #include "api/array_view.h" -#include "api/packet_socket_factory.h" +#include "api/field_trials_view.h" #include "api/test/network_emulation/cross_traffic.h" #include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/test/peer_network_dependencies.h" #include "api/test/simulated_network.h" #include "api/test/time_controller.h" -#include "api/units/timestamp.h" -#include "rtc_base/network.h" +#include "api/units/data_rate.h" +#include "rtc_base/ip_address.h" #include "rtc_base/network_constants.h" -#include "rtc_base/thread.h" +#include "rtc_base/socket_address.h" namespace webrtc { @@ -63,16 +67,16 @@ struct EmulatedEndpointConfig { enum class IpAddressFamily { kIpv4, kIpv6 }; // If specified will be used to name endpoint for logging purposes. - absl::optional name = absl::nullopt; + std::optional name = std::nullopt; IpAddressFamily generated_ip_family = IpAddressFamily::kIpv4; // If specified will be used as IP address for endpoint node. Must be unique // among all created nodes. - absl::optional ip; + std::optional ip; // Should endpoint be enabled or not, when it will be created. // Enabled endpoints will be available for webrtc to send packets. bool start_as_enabled = true; // Network type which will be used to represent endpoint to WebRTC. - rtc::AdapterType type = rtc::AdapterType::ADAPTER_TYPE_UNKNOWN; + AdapterType type = AdapterType::ADAPTER_TYPE_UNKNOWN; // Allow endpoint to send packets specifying source IP address different to // the current endpoint IP address. If false endpoint will crash if attempt // to send such packet will be done. @@ -86,6 +90,7 @@ struct EmulatedEndpointConfig { struct EmulatedTURNServerConfig { EmulatedEndpointConfig client_config; EmulatedEndpointConfig peer_config; + bool enable_permission_checks = true; }; // EmulatedTURNServer is an abstraction for a TURN server. @@ -108,7 +113,7 @@ class EmulatedTURNServerInterface { // Returns socket address, which client should use to connect to TURN server // and do TURN allocation. - virtual rtc::SocketAddress GetClientEndpointAddress() const = 0; + virtual SocketAddress GetClientEndpointAddress() const = 0; // Get non-null peer endpoint, that is "connected to the internet". // This shall typically be connected to another TURN server. @@ -118,25 +123,11 @@ class EmulatedTURNServerInterface { // Provide interface to obtain all required objects to inject network emulation // layer into PeerConnection. Also contains information about network interfaces // accessible by PeerConnection. -class EmulatedNetworkManagerInterface { +class EmulatedNetworkManagerInterface + : public webrtc_pc_e2e::PeerNetworkDependencies { public: - virtual ~EmulatedNetworkManagerInterface() = default; - - // Returns non-null pointer to thread that have to be used as network thread - // for WebRTC to properly setup network emulation. Returned thread is owned - // by EmulatedNetworkManagerInterface implementation. - virtual rtc::Thread* network_thread() = 0; - // Returns non-null pointer to network manager that have to be injected into - // WebRTC to properly setup network emulation. Returned manager is owned by - // EmulatedNetworkManagerInterface implementation. - virtual rtc::NetworkManager* network_manager() = 0; - // Returns non-null pointer to packet socket factory that have to be injected - // into WebRTC to properly setup network emulation. Returned factory is owned - // by EmulatedNetworkManagerInterface implementation. - virtual rtc::PacketSocketFactory* packet_socket_factory() = 0; - webrtc::webrtc_pc_e2e::PeerNetworkDependencies network_dependencies() { - return {network_thread(), network_manager(), packet_socket_factory()}; - } + ~EmulatedNetworkManagerInterface() override = default; + // Returns list of endpoints that are associated with this instance. Pointers // are guaranteed to be non-null and are owned by NetworkEmulationManager. virtual std::vector endpoints() const = 0; @@ -160,6 +151,26 @@ bool AbslParseFlag(absl::string_view text, TimeMode* mode, std::string* error); // `mode`. std::string AbslUnparseFlag(TimeMode mode); +// The construction-time configuration options for NetworkEmulationManager. +struct NetworkEmulationManagerConfig { + // The mode of the underlying time controller. + TimeMode time_mode = TimeMode::kRealTime; + // The mode that determines the set of metrics to collect into + // `EmulatedNetworkStats` and `EmulatedNetworkNodeStats`. + EmulatedNetworkStatsGatheringMode stats_gathering_mode = + EmulatedNetworkStatsGatheringMode::kDefault; + // Field trials that can alter the behavior of NetworkEmulationManager. + const FieldTrialsView* field_trials = nullptr; + // If this flag is set, NetworkEmulationManager ignores the sizes of peers' + // DTLS handshake packets when determining when to let the packets through + // a constrained emulated network. Actual hanshake's packet size is ignored + // and a hardcoded fake size is used to compute packet's use of link capacity. + // This is useful for tests that require deterministic packets scheduling + // timing-wise even when the sizes of DTLS hadshake packets are not + // deterministic. This mode make sense only together with the simulated time. + bool fake_dtls_handshake_sizes = false; +}; + // Provides an API for creating and configuring emulated network layer. // All objects returned by this API are owned by NetworkEmulationManager itself // and will be deleted when manager will be deleted. @@ -180,6 +191,7 @@ class NetworkEmulationManager { // values. Builder& config(BuiltInNetworkBehaviorConfig config); Builder& delay_ms(int queue_delay_ms); + Builder& capacity(DataRate link_capacity); Builder& capacity_kbps(int link_capacity_kbps); Builder& capacity_Mbps(int link_capacity_Mbps); Builder& loss(double loss_rate); @@ -321,11 +333,11 @@ class NetworkEmulationManager { virtual void StopCrossTraffic(CrossTrafficGenerator* generator) = 0; // Creates EmulatedNetworkManagerInterface which can be used then to inject - // network emulation layer into PeerConnection. `endpoints` - are available - // network interfaces for PeerConnection. If endpoint is enabled, it will be - // immediately available for PeerConnection, otherwise user will be able to - // enable endpoint later to make it available for PeerConnection. - virtual EmulatedNetworkManagerInterface* + // network emulation layer into PeerConnectionFactory. `endpoints` are + // available network interfaces for PeerConnection. If endpoint is enabled, it + // will be immediately available for PeerConnection, otherwise user will be + // able to enable endpoint later to make it available for PeerConnection. + virtual EmulatedNetworkManagerInterface* absl_nonnull CreateEmulatedNetworkManagerInterface( const std::vector& endpoints) = 0; @@ -333,14 +345,14 @@ class NetworkEmulationManager { // `stats_callback`. Callback will be executed on network emulation // internal task queue. virtual void GetStats( - rtc::ArrayView endpoints, + ArrayView endpoints, std::function stats_callback) = 0; // Passes combined network stats for all specified `nodes` into specified // `stats_callback`. Callback will be executed on network emulation // internal task queue. virtual void GetStats( - rtc::ArrayView nodes, + ArrayView nodes, std::function stats_callback) = 0; // Create a EmulatedTURNServer. diff --git a/api/test/pclf/BUILD.gn b/api/test/pclf/BUILD.gn index f3d78370ed..fd0a4a10e4 100644 --- a/api/test/pclf/BUILD.gn +++ b/api/test/pclf/BUILD.gn @@ -19,42 +19,16 @@ rtc_source_set("media_configuration") { deps = [ "../..:array_view", "../..:audio_options_api", - "../..:audio_quality_analyzer_api", - "../..:callfactory_api", - "../..:fec_controller_api", - "../..:frame_generator_api", - "../..:function_view", - "../..:libjingle_peerconnection_api", "../..:media_stream_interface", - "../..:packet_socket_factory", - "../..:peer_network_dependencies", "../..:rtp_parameters", - "../..:simulated_network_api", - "../..:stats_observer_interface", - "../..:track_id_stream_info_map", - "../..:video_quality_analyzer_api", - "../../../modules/audio_processing:api", "../../../rtc_base:checks", - "../../../rtc_base:network", - "../../../rtc_base:rtc_certificate_generator", - "../../../rtc_base:ssl", "../../../rtc_base:stringutils", - "../../../rtc_base:threading", "../../../test:fileutils", - "../../../test:video_test_support", + "../../../test:video_frame_writer", "../../../test/pc/e2e/analyzer/video:video_dumping", - "../../audio:audio_mixer_api", - "../../rtc_event_log", - "../../task_queue", - "../../transport:network_control", "../../units:time_delta", - "../../video_codecs:video_codecs_api", "../video:video_frame_writer", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -66,22 +40,27 @@ rtc_library("media_quality_test_params") { deps = [ ":media_configuration", "../..:async_dns_resolver", - "../../../api:callfactory_api", - "../../../api:fec_controller_api", - "../../../api:field_trials_view", - "../../../api:libjingle_peerconnection_api", - "../../../api:packet_socket_factory", - "../../../api/audio:audio_mixer_api", - "../../../api/rtc_event_log", - "../../../api/task_queue", - "../../../api/transport:network_control", - "../../../api/video_codecs:video_codecs_api", - "../../../modules/audio_processing:api", - "../../../p2p:rtc_p2p", + "../..:fec_controller_api", + "../..:field_trials_view", + "../..:ice_transport_interface", + "../..:libjingle_peerconnection_api", + "../..:scoped_refptr", + "../../../p2p:port_allocator", + "../../../rtc_base:checks", "../../../rtc_base:network", "../../../rtc_base:rtc_certificate_generator", + "../../../rtc_base:socket_factory", "../../../rtc_base:ssl", "../../../rtc_base:threading", + "../../audio:audio_mixer_api", + "../../audio:audio_processing", + "../../audio_codecs:audio_codecs_api", + "../../neteq:neteq_api", + "../../rtc_event_log:rtc_event_log_factory_interface", + "../../transport:bitrate_settings", + "../../transport:network_control", + "../../units:time_delta", + "../../video_codecs:video_codecs_api", ] } @@ -96,21 +75,28 @@ rtc_library("peer_configurer") { ":media_configuration", ":media_quality_test_params", "../..:async_dns_resolver", - "../../../api:callfactory_api", - "../../../api:create_peer_connection_quality_test_frame_generator", - "../../../api:fec_controller_api", - "../../../api:packet_socket_factory", - "../../../api:peer_network_dependencies", - "../../../api/audio:audio_mixer_api", - "../../../api/rtc_event_log", - "../../../api/task_queue", - "../../../api/transport:network_control", - "../../../api/video_codecs:video_codecs_api", - "../../../modules/audio_processing:api", - "../../../rtc_base:network", + "../..:create_peer_connection_quality_test_frame_generator", + "../..:fec_controller_api", + "../..:field_trials_view", + "../..:frame_generator_api", + "../..:ice_transport_interface", + "../..:libjingle_peerconnection_api", + "../..:network_emulation_manager_api", + "../..:peer_network_dependencies", + "../..:scoped_refptr", + "../../../p2p:port_allocator", + "../../../rtc_base:checks", "../../../rtc_base:rtc_certificate_generator", "../../../rtc_base:ssl", - "../../../rtc_base:threading", + "../../audio:audio_mixer_api", + "../../audio:audio_processing", + "../../audio_codecs:audio_codecs_api", + "../../neteq:neteq_api", + "../../rtc_event_log:rtc_event_log_factory_interface", + "../../transport:bitrate_settings", + "../../transport:network_control", + "../../video_codecs:video_codecs_api", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } diff --git a/api/test/pclf/DEPS b/api/test/pclf/DEPS index 60cc0aeeb3..ec2451b05b 100644 --- a/api/test/pclf/DEPS +++ b/api/test/pclf/DEPS @@ -5,9 +5,11 @@ specific_include_rules = { "+rtc_base/network.h", "+rtc_base/rtc_certificate_generator.h", "+rtc_base/ssl_certificate.h", + "+rtc_base/socket_factory.h", "+rtc_base/thread.h", ], "media_quality_test_params\.h": [ "+p2p/base/port_allocator.h", + "+rtc_base/socket_factory.h", ], } diff --git a/api/test/pclf/media_configuration.cc b/api/test/pclf/media_configuration.cc index 4446e11400..679486ded4 100644 --- a/api/test/pclf/media_configuration.cc +++ b/api/test/pclf/media_configuration.cc @@ -10,13 +10,20 @@ #include "api/test/pclf/media_configuration.h" +#include +#include +#include +#include +#include +#include #include #include +#include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/test/video/video_frame_writer.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" #include "test/pc/e2e/analyzer/video/video_dumping.h" @@ -37,7 +44,7 @@ absl::string_view SpecToString(VideoResolution::Spec spec) { } void AppendResolution(const VideoResolution& resolution, - rtc::StringBuilder& builder) { + StringBuilder& builder) { builder << "_" << resolution.width() << "x" << resolution.height() << "_" << resolution.fps(); } @@ -57,8 +64,8 @@ EmulatedSFUConfig::EmulatedSFUConfig(int target_layer_index) RTC_CHECK_GE(target_layer_index, 0); } -EmulatedSFUConfig::EmulatedSFUConfig(absl::optional target_layer_index, - absl::optional target_temporal_index) +EmulatedSFUConfig::EmulatedSFUConfig(std::optional target_layer_index, + std::optional target_temporal_index) : target_layer_index(target_layer_index), target_temporal_index(target_temporal_index) { RTC_CHECK_GE(target_temporal_index.value_or(0), 0); @@ -88,7 +95,7 @@ bool VideoResolution::IsRegular() const { return spec_ == Spec::kNone; } std::string VideoResolution::ToString() const { - rtc::StringBuilder out; + StringBuilder out; out << "{ width=" << width_ << ", height=" << height_ << ", fps=" << fps_ << ", spec=" << SpecToString(spec_) << " }"; return out.Release(); @@ -120,7 +127,7 @@ VideoDumpOptions::CreateInputDumpVideoFrameWriter( const VideoResolution& resolution) const { std::unique_ptr writer = video_frame_writer_factory_( GetInputDumpFileName(stream_label, resolution), resolution); - absl::optional frame_ids_file = + std::optional frame_ids_file = GetInputFrameIdsDumpFileName(stream_label, resolution); if (frame_ids_file.has_value()) { writer = CreateVideoFrameWithIdsWriter(std::move(writer), *frame_ids_file); @@ -135,7 +142,7 @@ VideoDumpOptions::CreateOutputDumpVideoFrameWriter( const VideoResolution& resolution) const { std::unique_ptr writer = video_frame_writer_factory_( GetOutputDumpFileName(stream_label, receiver, resolution), resolution); - absl::optional frame_ids_file = + std::optional frame_ids_file = GetOutputFrameIdsDumpFileName(stream_label, receiver, resolution); if (frame_ids_file.has_value()) { writer = CreateVideoFrameWithIdsWriter(std::move(writer), *frame_ids_file); @@ -155,17 +162,17 @@ VideoDumpOptions::Y4mVideoFrameWriterFactory( std::string VideoDumpOptions::GetInputDumpFileName( absl::string_view stream_label, const VideoResolution& resolution) const { - rtc::StringBuilder file_name; + StringBuilder file_name; file_name << stream_label; AppendResolution(resolution, file_name); return test::JoinFilename(output_directory_, file_name.Release()); } -absl::optional VideoDumpOptions::GetInputFrameIdsDumpFileName( +std::optional VideoDumpOptions::GetInputFrameIdsDumpFileName( absl::string_view stream_label, const VideoResolution& resolution) const { if (!export_frame_ids_) { - return absl::nullopt; + return std::nullopt; } return GetInputDumpFileName(stream_label, resolution) + ".frame_ids.txt"; } @@ -174,25 +181,25 @@ std::string VideoDumpOptions::GetOutputDumpFileName( absl::string_view stream_label, absl::string_view receiver, const VideoResolution& resolution) const { - rtc::StringBuilder file_name; + StringBuilder file_name; file_name << stream_label << "_" << receiver; AppendResolution(resolution, file_name); return test::JoinFilename(output_directory_, file_name.Release()); } -absl::optional VideoDumpOptions::GetOutputFrameIdsDumpFileName( +std::optional VideoDumpOptions::GetOutputFrameIdsDumpFileName( absl::string_view stream_label, absl::string_view receiver, const VideoResolution& resolution) const { if (!export_frame_ids_) { - return absl::nullopt; + return std::nullopt; } return GetOutputDumpFileName(stream_label, receiver, resolution) + ".frame_ids.txt"; } std::string VideoDumpOptions::ToString() const { - rtc::StringBuilder out; + StringBuilder out; out << "{ output_directory_=" << output_directory_ << ", sampling_modulo_=" << sampling_modulo_ << ", export_frame_ids_=" << export_frame_ids_ << " }"; @@ -213,9 +220,6 @@ VideoConfig::VideoConfig(absl::string_view stream_label, int32_t fps) : width(width), height(height), fps(fps), stream_label(stream_label) {} -AudioConfig::AudioConfig(absl::string_view stream_label) - : stream_label(stream_label) {} - VideoCodecConfig::VideoCodecConfig(absl::string_view name) : name(name), required_params() {} @@ -224,8 +228,8 @@ VideoCodecConfig::VideoCodecConfig( std::map required_params) : name(name), required_params(std::move(required_params)) {} -absl::optional VideoSubscription::GetMaxResolution( - rtc::ArrayView video_configs) { +std::optional VideoSubscription::GetMaxResolution( + ArrayView video_configs) { std::vector resolutions; for (const auto& video_config : video_configs) { resolutions.push_back(video_config.GetResolution()); @@ -233,10 +237,10 @@ absl::optional VideoSubscription::GetMaxResolution( return GetMaxResolution(resolutions); } -absl::optional VideoSubscription::GetMaxResolution( - rtc::ArrayView resolutions) { +std::optional VideoSubscription::GetMaxResolution( + ArrayView resolutions) { if (resolutions.empty()) { - return absl::nullopt; + return std::nullopt; } VideoResolution max_resolution; @@ -275,7 +279,7 @@ VideoSubscription& VideoSubscription::SubscribeToAllPeers( return *this; } -absl::optional VideoSubscription::GetResolutionForPeer( +std::optional VideoSubscription::GetResolutionForPeer( absl::string_view peer_name) const { auto it = peers_resolution_.find(std::string(peer_name)); if (it == peers_resolution_.end()) { @@ -294,7 +298,7 @@ std::vector VideoSubscription::GetSubscribedPeers() const { } std::string VideoSubscription::ToString() const { - rtc::StringBuilder out; + StringBuilder out; out << "{ default_resolution_=["; if (default_resolution_.has_value()) { out << default_resolution_->ToString(); diff --git a/api/test/pclf/media_configuration.h b/api/test/pclf/media_configuration.h index 5bcb308c83..5f8e45cb76 100644 --- a/api/test/pclf/media_configuration.h +++ b/api/test/pclf/media_configuration.h @@ -16,44 +16,17 @@ #include #include #include +#include #include -#include #include -#include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" -#include "api/async_resolver_factory.h" -#include "api/audio/audio_mixer.h" #include "api/audio_options.h" -#include "api/call/call_factory_interface.h" -#include "api/fec_controller.h" -#include "api/function_view.h" #include "api/media_stream_interface.h" -#include "api/peer_connection_interface.h" -#include "api/rtc_event_log/rtc_event_log_factory_interface.h" #include "api/rtp_parameters.h" -#include "api/task_queue/task_queue_factory.h" -#include "api/test/audio_quality_analyzer_interface.h" -#include "api/test/frame_generator_interface.h" -#include "api/test/peer_network_dependencies.h" -#include "api/test/simulated_network.h" -#include "api/test/stats_observer_interface.h" -#include "api/test/track_id_stream_info_map.h" #include "api/test/video/video_frame_writer.h" -#include "api/test/video_quality_analyzer_interface.h" -#include "api/transport/network_control.h" #include "api/units/time_delta.h" -#include "api/video_codecs/video_decoder_factory.h" -#include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/video_encoder_factory.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "rtc_base/checks.h" -#include "rtc_base/network.h" -#include "rtc_base/rtc_certificate_generator.h" -#include "rtc_base/ssl_certificate.h" -#include "rtc_base/thread.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -106,7 +79,7 @@ struct ScreenShareConfig { bool generate_slides = false; // If present scrolling will be applied. Please read extra requirement on // `slides_yuv_file_names` for scrolling. - absl::optional scrolling_params; + std::optional scrolling_params; // Contains list of yuv files with slides. // // If empty, default set of slides will be used. In such case @@ -157,8 +130,8 @@ struct VideoSimulcastConfig { struct EmulatedSFUConfig { EmulatedSFUConfig() = default; explicit EmulatedSFUConfig(int target_layer_index); - EmulatedSFUConfig(absl::optional target_layer_index, - absl::optional target_temporal_index); + EmulatedSFUConfig(std::optional target_layer_index, + std::optional target_temporal_index); // Specifies simulcast or spatial index of the video stream to analyze. // There are 2 cases: @@ -173,12 +146,12 @@ struct EmulatedSFUConfig { // If not specified then all streams will be received and analyzed. // When set, it instructs the framework to create an emulated Selective // Forwarding Unit (SFU) that will propagate only the requested layers. - absl::optional target_layer_index; + std::optional target_layer_index; // Specifies the index of the maximum temporal unit to keep. // If not specified then all temporal layers will be received and analyzed. // When set, it instructs the framework to create an emulated Selective // Forwarding Unit (SFU) that will propagate only up to the requested layer. - absl::optional target_temporal_index; + std::optional target_temporal_index; }; class VideoResolution { @@ -279,16 +252,16 @@ class VideoDumpOptions { std::string GetInputDumpFileName(absl::string_view stream_label, const VideoResolution& resolution) const; // Returns file name for input frame ids dump if `export_frame_ids()` is - // true, absl::nullopt otherwise. - absl::optional GetInputFrameIdsDumpFileName( + // true, std::nullopt otherwise. + std::optional GetInputFrameIdsDumpFileName( absl::string_view stream_label, const VideoResolution& resolution) const; std::string GetOutputDumpFileName(absl::string_view stream_label, absl::string_view receiver, const VideoResolution& resolution) const; // Returns file name for output frame ids dump if `export_frame_ids()` is - // true, absl::nullopt otherwise. - absl::optional GetOutputFrameIdsDumpFileName( + // true, std::nullopt otherwise. + std::optional GetOutputFrameIdsDumpFileName( absl::string_view stream_label, absl::string_view receiver, const VideoResolution& resolution) const; @@ -322,10 +295,10 @@ struct VideoConfig { // Have to be unique among all specified configs for all peers in the call. // Will be auto generated if omitted. - absl::optional stream_label; + std::optional stream_label; // Will be set for current video track. If equals to kText or kDetailed - // screencast in on. - absl::optional content_hint; + std::optional content_hint; // If presented video will be transfered in simulcast/SVC mode depending on // which encoder is used. // @@ -334,9 +307,9 @@ struct VideoConfig { // simulcast tracks. For VP9 simulcast enables VP9 SVC mode and support RTX, // but only on non-lossy networks. See more in documentation to // VideoSimulcastConfig. - absl::optional simulcast_config; + std::optional simulcast_config; // Configuration for the emulated Selective Forward Unit (SFU). - absl::optional emulated_sfu_config; + std::optional emulated_sfu_config; // Encoding parameters for both singlecast and per simulcast layer. // If singlecast is used, if not empty, a single value can be provided. // If simulcast is used, if not empty, `encoding_params` size have to be @@ -348,16 +321,16 @@ struct VideoConfig { // Count of temporal layers for video stream. This value will be set into // each RtpEncodingParameters of RtpParameters of corresponding // RtpSenderInterface for this video stream. - absl::optional temporal_layers_count; + std::optional temporal_layers_count; // If specified defines how input should be dumped. It is actually one of // the test's output file, which contains copy of what was captured during // the test for this video stream on sender side. It is useful when // generator is used as input. - absl::optional input_dump_options; + std::optional input_dump_options; // If specified defines how output should be dumped on the receiver side for // this stream. The produced files contain what was rendered for this video // stream on receiver side per each receiver. - absl::optional output_dump_options; + std::optional output_dump_options; // If set to true uses fixed frame rate while dumping output video to the // file. Requested `VideoSubscription::fps()` will be used as frame rate. bool output_dump_use_fixed_framerate = false; @@ -366,36 +339,33 @@ struct VideoConfig { // If specified, determines a sync group to which this video stream belongs. // According to bugs.webrtc.org/4762 WebRTC supports synchronization only // for pair of single audio and single video stream. - absl::optional sync_group; + std::optional sync_group; // If specified, it will be set into RtpParameters of corresponding // RtpSenderInterface for this video stream. // Note that this setting takes precedence over `content_hint`. - absl::optional degradation_preference; + std::optional degradation_preference; }; // Contains properties for audio in the call. struct AudioConfig { - AudioConfig() = default; - explicit AudioConfig(absl::string_view stream_label); - // Have to be unique among all specified configs for all peers in the call. // Will be auto generated if omitted. - absl::optional stream_label; + std::optional stream_label; // If no file is specified an audio will be generated. - absl::optional input_file_name; + std::optional input_file_name; // If specified the input stream will be also copied to specified file. - absl::optional input_dump_file_name; + std::optional input_dump_file_name; // If specified the output stream will be copied to specified file. - absl::optional output_dump_file_name; + std::optional output_dump_file_name; // Audio options to use. - cricket::AudioOptions audio_options; + AudioOptions audio_options; // Sampling frequency of input audio data (from file or generated). int sampling_frequency_in_hz = 48000; // If specified, determines a sync group to which this audio stream belongs. // According to bugs.webrtc.org/4762 WebRTC supports synchronization only // for pair of single audio and single video stream. - absl::optional sync_group; + std::optional sync_group; }; struct VideoCodecConfig { @@ -423,10 +393,10 @@ class VideoSubscription { public: // Returns the resolution constructed as maximum from all resolution // dimensions: width, height and fps. - static absl::optional GetMaxResolution( - rtc::ArrayView video_configs); - static absl::optional GetMaxResolution( - rtc::ArrayView resolutions); + static std::optional GetMaxResolution( + ArrayView video_configs); + static std::optional GetMaxResolution( + ArrayView resolutions); bool operator==(const VideoSubscription& other) const; bool operator!=(const VideoSubscription& other) const; @@ -449,9 +419,9 @@ class VideoSubscription { // Returns resolution for specific sender. If no specific resolution was // set for this sender, then will return resolution used for all streams. - // If subscription doesn't subscribe to all streams, `absl::nullopt` will be + // If subscription doesn't subscribe to all streams, `std::nullopt` will be // returned. - absl::optional GetResolutionForPeer( + std::optional GetResolutionForPeer( absl::string_view peer_name) const; // Returns a maybe empty list of senders for which peer explicitly @@ -461,7 +431,7 @@ class VideoSubscription { std::string ToString() const; private: - absl::optional default_resolution_ = absl::nullopt; + std::optional default_resolution_ = std::nullopt; std::map peers_resolution_; }; diff --git a/api/test/pclf/media_quality_test_params.h b/api/test/pclf/media_quality_test_params.h index a247f342b0..7305aa3fad 100644 --- a/api/test/pclf/media_quality_test_params.h +++ b/api/test/pclf/media_quality_test_params.h @@ -11,25 +11,36 @@ #define API_TEST_PCLF_MEDIA_QUALITY_TEST_PARAMS_H_ #include +#include #include +#include #include +#include #include #include "api/async_dns_resolver.h" #include "api/audio/audio_mixer.h" -#include "api/call/call_factory_interface.h" +#include "api/audio/audio_processing.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_encoder_factory.h" #include "api/fec_controller.h" #include "api/field_trials_view.h" +#include "api/ice_transport_interface.h" +#include "api/neteq/neteq_factory.h" +#include "api/peer_connection_interface.h" #include "api/rtc_event_log/rtc_event_log_factory_interface.h" -#include "api/task_queue/task_queue_factory.h" +#include "api/scoped_refptr.h" #include "api/test/pclf/media_configuration.h" +#include "api/transport/bitrate_settings.h" #include "api/transport/network_control.h" +#include "api/units/time_delta.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" -#include "modules/audio_processing/include/audio_processing.h" #include "p2p/base/port_allocator.h" +#include "rtc_base/checks.h" #include "rtc_base/network.h" #include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/socket_factory.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/thread.h" @@ -46,24 +57,22 @@ namespace webrtc_pc_e2e { // can override only some parts of media engine like video encoder/decoder // factories. struct PeerConnectionFactoryComponents { - std::unique_ptr task_queue_factory; - std::unique_ptr call_factory; + std::unique_ptr network_manager; + SocketFactory* socket_factory = nullptr; std::unique_ptr event_log_factory; std::unique_ptr fec_controller_factory; std::unique_ptr network_controller_factory; std::unique_ptr neteq_factory; - // Will be passed to MediaEngineInterface, that will be used in - // PeerConnectionFactory. std::unique_ptr video_encoder_factory; std::unique_ptr video_decoder_factory; - rtc::scoped_refptr audio_encoder_factory; - rtc::scoped_refptr audio_decoder_factory; + scoped_refptr audio_encoder_factory; + scoped_refptr audio_decoder_factory; std::unique_ptr trials; - rtc::scoped_refptr audio_processing; - rtc::scoped_refptr audio_mixer; + std::unique_ptr audio_processing; + scoped_refptr audio_mixer; }; // Contains most parts from PeerConnectionDependencies. Also all fields are @@ -73,42 +82,32 @@ struct PeerConnectionFactoryComponents { // Separate class was introduced to clarify which components can be // overridden. For example observer, which is required to // PeerConnectionDependencies, will be provided by fixture implementation, -// so client can't inject its own. Also only network manager can be overridden -// inside port allocator. +// so client can't inject its own. struct PeerConnectionComponents { - PeerConnectionComponents(rtc::NetworkManager* network_manager, - rtc::PacketSocketFactory* packet_socket_factory) - : network_manager(network_manager), - packet_socket_factory(packet_socket_factory) { - RTC_CHECK(network_manager); - } - - rtc::NetworkManager* const network_manager; - rtc::PacketSocketFactory* const packet_socket_factory; std::unique_ptr async_dns_resolver_factory; - std::unique_ptr cert_generator; - std::unique_ptr tls_cert_verifier; + std::unique_ptr cert_generator; + std::unique_ptr tls_cert_verifier; std::unique_ptr ice_transport_factory; }; // Contains all components, that can be overridden in peer connection. Also // has a network thread, that will be used to communicate with another peers. struct InjectableComponents { - InjectableComponents(rtc::Thread* network_thread, - rtc::NetworkManager* network_manager, - rtc::PacketSocketFactory* packet_socket_factory) + InjectableComponents(Thread* network_thread, + std::unique_ptr network_manager, + SocketFactory* socket_factory) : network_thread(network_thread), worker_thread(nullptr), pcf_dependencies(std::make_unique()), - pc_dependencies( - std::make_unique(network_manager, - packet_socket_factory)) { + pc_dependencies(std::make_unique()) { RTC_CHECK(network_thread); + pcf_dependencies->network_manager = std::move(network_manager); + pcf_dependencies->socket_factory = socket_factory; } - rtc::Thread* const network_thread; - rtc::Thread* worker_thread; + Thread* const network_thread; + Thread* worker_thread; std::unique_ptr pcf_dependencies; std::unique_ptr pc_dependencies; @@ -119,18 +118,21 @@ struct InjectableComponents { // to set up peer connection. struct Params { // Peer name. If empty - default one will be set by the fixture. - absl::optional name; + std::optional name; // If `audio_config` is set audio stream will be configured - absl::optional audio_config; - // Flags to set on `cricket::PortAllocator`. These flags will be added - // to the default ones that are presented on the port allocator. - uint32_t port_allocator_extra_flags = cricket::kDefaultPortAllocatorFlags; + std::optional audio_config; + // Flags to override `rtc_configuration.port_allocator_config.flags` + // + // IMPORTANT: if you use WebRTC Network Emulation + // (api/test/network_emulation_manager.h) and set this field, remember to set + // webrtc::PORTALLOCATOR_DISABLE_TCP. + uint32_t port_allocator_flags = PORTALLOCATOR_DISABLE_TCP; // If `rtc_event_log_path` is set, an RTCEventLog will be saved in that // location and it will be available for further analysis. - absl::optional rtc_event_log_path; + std::optional rtc_event_log_path; // If `aec_dump_path` is set, an AEC dump will be saved in that location and // it will be available for further analysis. - absl::optional aec_dump_path; + std::optional aec_dump_path; bool use_ulp_fec = false; bool use_flex_fec = false; @@ -142,6 +144,7 @@ struct Params { // provided into VideoEncoder::SetRates(...). double video_encoder_bitrate_multiplier = 1.0; + PeerConnectionFactoryInterface::Options peer_connection_factory_options; PeerConnectionInterface::RTCConfiguration rtc_configuration; PeerConnectionInterface::RTCOfferAnswerOptions rtc_offer_answer_options; BitrateSettings bitrate_settings; @@ -183,7 +186,7 @@ struct RunParams { // If specified echo emulation will be done, by mixing the render audio into // the capture signal. In such case input signal will be reduced by half to // avoid saturation or compression in the echo path simulation. - absl::optional echo_emulation_config; + std::optional echo_emulation_config; }; } // namespace webrtc_pc_e2e diff --git a/api/test/pclf/peer_configurer.cc b/api/test/pclf/peer_configurer.cc index b614940c99..7da73512d5 100644 --- a/api/test/pclf/peer_configurer.cc +++ b/api/test/pclf/peer_configurer.cc @@ -10,22 +10,48 @@ #include "api/test/pclf/peer_configurer.h" -#include +#include +#include +#include +#include +#include +#include #include "absl/strings/string_view.h" +#include "api/async_dns_resolver.h" +#include "api/audio/audio_mixer.h" +#include "api/audio/audio_processing.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/fec_controller.h" +#include "api/field_trials_view.h" +#include "api/ice_transport_interface.h" +#include "api/neteq/neteq_factory.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_event_log/rtc_event_log_factory_interface.h" +#include "api/scoped_refptr.h" +#include "api/test/create_peer_connection_quality_test_frame_generator.h" +#include "api/test/frame_generator_interface.h" #include "api/test/pclf/media_configuration.h" #include "api/test/pclf/media_quality_test_params.h" #include "api/test/peer_network_dependencies.h" +#include "api/transport/bitrate_settings.h" +#include "api/transport/network_control.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "p2p/base/port_allocator.h" +#include "rtc_base/checks.h" +#include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/ssl_certificate.h" namespace webrtc { namespace webrtc_pc_e2e { -PeerConfigurer::PeerConfigurer( - const PeerNetworkDependencies& network_dependencies) +PeerConfigurer::PeerConfigurer(PeerNetworkDependencies& network) : components_(std::make_unique( - network_dependencies.network_thread, - network_dependencies.network_manager, - network_dependencies.packet_socket_factory)), + network.network_thread(), + network.ReleaseNetworkManager(), + network.socket_factory())), params_(std::make_unique()), configurable_params_(std::make_unique()) {} @@ -34,17 +60,6 @@ PeerConfigurer* PeerConfigurer::SetName(absl::string_view name) { return this; } -PeerConfigurer* PeerConfigurer::SetTaskQueueFactory( - std::unique_ptr task_queue_factory) { - components_->pcf_dependencies->task_queue_factory = - std::move(task_queue_factory); - return this; -} -PeerConfigurer* PeerConfigurer::SetCallFactory( - std::unique_ptr call_factory) { - components_->pcf_dependencies->call_factory = std::move(call_factory); - return this; -} PeerConfigurer* PeerConfigurer::SetEventLogFactory( std::unique_ptr event_log_factory) { components_->pcf_dependencies->event_log_factory = @@ -77,12 +92,12 @@ PeerConfigurer* PeerConfigurer::SetVideoDecoderFactory( return this; } PeerConfigurer* PeerConfigurer::SetAudioEncoderFactory( - rtc::scoped_refptr audio_encoder_factory) { + scoped_refptr audio_encoder_factory) { components_->pcf_dependencies->audio_encoder_factory = audio_encoder_factory; return this; } PeerConfigurer* PeerConfigurer::SetAudioDecoderFactory( - rtc::scoped_refptr audio_decoder_factory) { + scoped_refptr audio_decoder_factory) { components_->pcf_dependencies->audio_decoder_factory = audio_decoder_factory; return this; } @@ -94,12 +109,12 @@ PeerConfigurer* PeerConfigurer::SetAsyncDnsResolverFactory( return this; } PeerConfigurer* PeerConfigurer::SetRTCCertificateGenerator( - std::unique_ptr cert_generator) { + std::unique_ptr cert_generator) { components_->pc_dependencies->cert_generator = std::move(cert_generator); return this; } PeerConfigurer* PeerConfigurer::SetSSLCertificateVerifier( - std::unique_ptr tls_cert_verifier) { + std::unique_ptr tls_cert_verifier) { components_->pc_dependencies->tls_cert_verifier = std::move(tls_cert_verifier); return this; @@ -107,7 +122,7 @@ PeerConfigurer* PeerConfigurer::SetSSLCertificateVerifier( PeerConfigurer* PeerConfigurer::AddVideoConfig(VideoConfig config) { video_sources_.push_back( - CreateSquareFrameGenerator(config, /*type=*/absl::nullopt)); + CreateSquareFrameGenerator(config, /*type=*/std::nullopt)); configurable_params_->video_configs.push_back(std::move(config)); return this; } @@ -167,12 +182,12 @@ PeerConfigurer* PeerConfigurer::SetNetEqFactory( return this; } PeerConfigurer* PeerConfigurer::SetAudioProcessing( - rtc::scoped_refptr audio_processing) { - components_->pcf_dependencies->audio_processing = audio_processing; + std::unique_ptr audio_processing) { + components_->pcf_dependencies->audio_processing = std::move(audio_processing); return this; } PeerConfigurer* PeerConfigurer::SetAudioMixer( - rtc::scoped_refptr audio_mixer) { + scoped_refptr audio_mixer) { components_->pcf_dependencies->audio_mixer = audio_mixer; return this; } @@ -190,6 +205,11 @@ PeerConfigurer* PeerConfigurer::SetAecDumpPath(absl::string_view path) { params_->aec_dump_path = std::string(path); return this; } +PeerConfigurer* PeerConfigurer::SetPCFOptions( + PeerConnectionFactoryInterface::Options options) { + params_->peer_connection_factory_options = std::move(options); + return this; +} PeerConfigurer* PeerConfigurer::SetRTCConfiguration( PeerConnectionInterface::RTCConfiguration configuration) { params_->rtc_configuration = std::move(configuration); @@ -212,11 +232,24 @@ PeerConfigurer* PeerConfigurer::SetIceTransportFactory( return this; } +PeerConfigurer* PeerConfigurer::SetFieldTrials( + std::unique_ptr field_trials) { + components_->pcf_dependencies->trials = std::move(field_trials); + return this; +} + PeerConfigurer* PeerConfigurer::SetPortAllocatorExtraFlags( uint32_t extra_flags) { - params_->port_allocator_extra_flags = extra_flags; + params_->port_allocator_flags = + kDefaultPortAllocatorFlags | PORTALLOCATOR_DISABLE_TCP | extra_flags; + return this; +} + +PeerConfigurer* PeerConfigurer::SetPortAllocatorFlags(uint32_t flags) { + params_->port_allocator_flags = flags; return this; } + std::unique_ptr PeerConfigurer::ReleaseComponents() { RTC_CHECK(components_); auto components = std::move(components_); diff --git a/api/test/pclf/peer_configurer.h b/api/test/pclf/peer_configurer.h index d10b53fa3d..2eb525ed9d 100644 --- a/api/test/pclf/peer_configurer.h +++ b/api/test/pclf/peer_configurer.h @@ -10,30 +10,37 @@ #ifndef API_TEST_PCLF_PEER_CONFIGURER_H_ #define API_TEST_PCLF_PEER_CONFIGURER_H_ +#include #include #include #include +#include #include +#include "absl/base/macros.h" #include "absl/strings/string_view.h" #include "api/async_dns_resolver.h" #include "api/audio/audio_mixer.h" -#include "api/call/call_factory_interface.h" +#include "api/audio/audio_processing.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_encoder_factory.h" #include "api/fec_controller.h" +#include "api/field_trials_view.h" +#include "api/ice_transport_interface.h" +#include "api/neteq/neteq_factory.h" +#include "api/peer_connection_interface.h" #include "api/rtc_event_log/rtc_event_log_factory_interface.h" -#include "api/task_queue/task_queue_factory.h" -#include "api/test/create_peer_connection_quality_test_frame_generator.h" +#include "api/scoped_refptr.h" +#include "api/test/frame_generator_interface.h" #include "api/test/pclf/media_configuration.h" #include "api/test/pclf/media_quality_test_params.h" #include "api/test/peer_network_dependencies.h" +#include "api/transport/bitrate_settings.h" #include "api/transport/network_control.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "rtc_base/network.h" #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/ssl_certificate.h" -#include "rtc_base/thread.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -42,23 +49,19 @@ namespace webrtc_pc_e2e { class PeerConfigurer { public: using VideoSource = - absl::variant, - CapturingDeviceIndex>; + std::variant, + CapturingDeviceIndex>; - explicit PeerConfigurer(const PeerNetworkDependencies& network_dependencies); + explicit PeerConfigurer(PeerNetworkDependencies& network); // Sets peer name that will be used to report metrics related to this peer. // If not set, some default name will be assigned. All names have to be // unique. PeerConfigurer* SetName(absl::string_view name); - // The parameters of the following 9 methods will be passed to the + // The parameters of the following 7 methods will be passed to the // PeerConnectionFactoryInterface implementation that will be created for // this peer. - PeerConfigurer* SetTaskQueueFactory( - std::unique_ptr task_queue_factory); - PeerConfigurer* SetCallFactory( - std::unique_ptr call_factory); PeerConfigurer* SetEventLogFactory( std::unique_ptr event_log_factory); PeerConfigurer* SetFecControllerFactory( @@ -71,15 +74,20 @@ class PeerConfigurer { PeerConfigurer* SetVideoDecoderFactory( std::unique_ptr video_decoder_factory); PeerConfigurer* SetAudioEncoderFactory( - rtc::scoped_refptr audio_encoder_factory); + scoped_refptr audio_encoder_factory); PeerConfigurer* SetAudioDecoderFactory( - rtc::scoped_refptr audio_decoder_factory); + scoped_refptr audio_decoder_factory); // Set a custom NetEqFactory to be used in the call. PeerConfigurer* SetNetEqFactory(std::unique_ptr neteq_factory); PeerConfigurer* SetAudioProcessing( - rtc::scoped_refptr audio_processing); - PeerConfigurer* SetAudioMixer( - rtc::scoped_refptr audio_mixer); + std::unique_ptr audio_processing); + ABSL_DEPRECATE_AND_INLINE() + PeerConfigurer* SetAudioProcessing( + scoped_refptr audio_processing) { + return SetAudioProcessing( + CustomAudioProcessing(std::move(audio_processing))); + } + PeerConfigurer* SetAudioMixer(scoped_refptr audio_mixer); // Forces the Peerconnection to use the network thread as the worker thread. // Ie, worker thread and the network thread is the same thread. @@ -90,17 +98,27 @@ class PeerConfigurer { // peer. PeerConfigurer* SetAsyncDnsResolverFactory( std::unique_ptr - async_resolver_factory); + async_dns_resolver_factory); PeerConfigurer* SetRTCCertificateGenerator( - std::unique_ptr cert_generator); + std::unique_ptr cert_generator); PeerConfigurer* SetSSLCertificateVerifier( - std::unique_ptr tls_cert_verifier); + std::unique_ptr tls_cert_verifier); PeerConfigurer* SetIceTransportFactory( std::unique_ptr factory); - // Flags to set on `cricket::PortAllocator`. These flags will be added - // to the default ones that are presented on the port allocator. - // For possible values check p2p/base/port_allocator.h. + // Flags to set on `webrtc::PortAllocator`. These flags will be added + // to the webrtc::kDefaultPortAllocatorFlags with + // webrtc::PORTALLOCATOR_DISABLE_TCP disabled. For possible values check + // p2p/base/port_allocator.h. PeerConfigurer* SetPortAllocatorExtraFlags(uint32_t extra_flags); + // Flags to set on `webrtc::PortAllocator`. These flags will override + // the default ones that are presented on the port allocator. + // + // For possible values check p2p/base/port_allocator.h. + // + // IMPORTANT: if you use WebRTC Network Emulation + // (api/test/network_emulation_manager.h) and set this field, remember to set + // webrtc::PORTALLOCATOR_DISABLE_TCP to 0. + PeerConfigurer* SetPortAllocatorFlags(uint32_t flags); // Add new video stream to the call that will be sent from this peer. // Default implementation of video frames generator will be used. @@ -157,6 +175,8 @@ class PeerConfigurer { // If is set, an AEC dump will be saved in that location and it will be // available for further analysis. PeerConfigurer* SetAecDumpPath(absl::string_view path); + PeerConfigurer* SetPCFOptions( + PeerConnectionFactoryInterface::Options options); PeerConfigurer* SetRTCConfiguration( PeerConnectionInterface::RTCConfiguration configuration); PeerConfigurer* SetRTCOfferAnswerOptions( @@ -164,6 +184,8 @@ class PeerConfigurer { // Set bitrate parameters on PeerConnection. This constraints will be // applied to all summed RTP streams for this peer. PeerConfigurer* SetBitrateSettings(BitrateSettings bitrate_settings); + // Set field trials used for this PeerConnection. + PeerConfigurer* SetFieldTrials(std::unique_ptr field_trials); // Returns InjectableComponents and transfer ownership to the caller. // Can be called once. diff --git a/api/test/peer_network_dependencies.h b/api/test/peer_network_dependencies.h index 6f85ad0a4d..fe2fdbf7a2 100644 --- a/api/test/peer_network_dependencies.h +++ b/api/test/peer_network_dependencies.h @@ -11,19 +11,26 @@ #ifndef API_TEST_PEER_NETWORK_DEPENDENCIES_H_ #define API_TEST_PEER_NETWORK_DEPENDENCIES_H_ -#include "api/packet_socket_factory.h" +#include + +#include "absl/base/nullability.h" #include "rtc_base/network.h" +#include "rtc_base/socket_factory.h" #include "rtc_base/thread.h" namespace webrtc { namespace webrtc_pc_e2e { -// The network dependencies needed when adding a peer to tests using -// PeerConnectionE2EQualityTestFixture. -struct PeerNetworkDependencies { - rtc::Thread* network_thread; - rtc::NetworkManager* network_manager; - rtc::PacketSocketFactory* packet_socket_factory; +// Provides interface to obtain all required objects to inject network layer +// into PeerConnectionFactory. +class PeerNetworkDependencies { + public: + virtual ~PeerNetworkDependencies() = default; + + virtual Thread* absl_nonnull network_thread() = 0; + virtual SocketFactory* absl_nonnull socket_factory() = 0; + virtual absl_nonnull std::unique_ptr + ReleaseNetworkManager() = 0; }; } // namespace webrtc_pc_e2e diff --git a/api/test/peerconnection_quality_test_fixture.h b/api/test/peerconnection_quality_test_fixture.h index 74470cdf86..04be298bdb 100644 --- a/api/test/peerconnection_quality_test_fixture.h +++ b/api/test/peerconnection_quality_test_fixture.h @@ -11,53 +11,16 @@ #define API_TEST_PEERCONNECTION_QUALITY_TEST_FIXTURE_H_ #include -#include #include -#include #include -#include -#include -#include -#include "absl/base/macros.h" -#include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/async_resolver_factory.h" -#include "api/audio/audio_mixer.h" -#include "api/call/call_factory_interface.h" -#include "api/fec_controller.h" -#include "api/function_view.h" -#include "api/media_stream_interface.h" -#include "api/peer_connection_interface.h" -#include "api/rtc_event_log/rtc_event_log_factory_interface.h" -#include "api/rtp_parameters.h" -#include "api/task_queue/task_queue_factory.h" -#include "api/test/audio_quality_analyzer_interface.h" -#include "api/test/frame_generator_interface.h" -#include "api/test/pclf/media_configuration.h" #include "api/test/pclf/media_quality_test_params.h" #include "api/test/pclf/peer_configurer.h" -#include "api/test/peer_network_dependencies.h" -#include "api/test/simulated_network.h" #include "api/test/stats_observer_interface.h" #include "api/test/track_id_stream_info_map.h" -#include "api/test/video/video_frame_writer.h" -#include "api/test/video_quality_analyzer_interface.h" -#include "api/transport/network_control.h" #include "api/units/time_delta.h" -#include "api/video_codecs/video_decoder_factory.h" -#include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/video_encoder_factory.h" -#include "media/base/media_constants.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "rtc_base/checks.h" -#include "rtc_base/network.h" -#include "rtc_base/rtc_certificate_generator.h" -#include "rtc_base/ssl_certificate.h" -#include "rtc_base/thread.h" namespace webrtc { namespace webrtc_pc_e2e { diff --git a/api/test/peerconnection_quality_test_fixture_unittest.cc b/api/test/peerconnection_quality_test_fixture_unittest.cc index 26ae8cf98f..ed0dc55bb1 100644 --- a/api/test/peerconnection_quality_test_fixture_unittest.cc +++ b/api/test/peerconnection_quality_test_fixture_unittest.cc @@ -8,15 +8,18 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "api/test/peerconnection_quality_test_fixture.h" - +#include +#include +#include +#include #include -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" #include "api/test/pclf/media_configuration.h" #include "api/test/video/video_frame_writer.h" -#include "rtc_base/gunit.h" +#include "api/video/video_frame.h" #include "test/gmock.h" +#include "test/gtest.h" #include "test/testsupport/file_utils.h" namespace webrtc { @@ -57,7 +60,7 @@ TEST(PclfVideoSubscriptionTest, WhenSpecIsNotSetFieldsAreCompared) { } TEST(PclfVideoSubscriptionTest, GetMaxResolutionForEmptyReturnsNullopt) { - absl::optional resolution = + std::optional resolution = VideoSubscription::GetMaxResolution(std::vector{}); ASSERT_FALSE(resolution.has_value()); } @@ -67,7 +70,7 @@ TEST(PclfVideoSubscriptionTest, GetMaxResolutionSelectMaxForEachDimention) { VideoConfig max_height(/*width=*/1, /*height=*/100, /*fps=*/1); VideoConfig max_fps(/*width=*/1, /*height=*/1, /*fps=*/10); - absl::optional resolution = + std::optional resolution = VideoSubscription::GetMaxResolution( std::vector{max_width, max_height, max_fps}); ASSERT_TRUE(resolution.has_value()); @@ -82,7 +85,7 @@ struct TestVideoFrameWriter : public test::VideoFrameWriter { const VideoResolution& resolution) : file_name_prefix(file_name_prefix), resolution(resolution) {} - bool WriteFrame(const VideoFrame& frame) override { return true; } + bool WriteFrame(const VideoFrame& /* frame */) override { return true; } void Close() override {} diff --git a/api/test/rtc_error_matchers.h b/api/test/rtc_error_matchers.h new file mode 100644 index 0000000000..2abc569f8a --- /dev/null +++ b/api/test/rtc_error_matchers.h @@ -0,0 +1,94 @@ +/* + * Copyright 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_RTC_ERROR_MATCHERS_H_ +#define API_TEST_RTC_ERROR_MATCHERS_H_ + +#include + +#include "absl/strings/str_cat.h" +#include "api/rtc_error.h" +#include "test/gmock.h" + +namespace webrtc { + +MATCHER(IsRtcOk, "") { + if (!arg.ok()) { + *result_listener << "Expected OK, got " << absl::StrCat(arg); + return false; + } + return true; +} + +MATCHER_P(IsRtcOkAndHolds, + matcher, + "RtcErrorOr that is holding an OK status and ") { + if (!arg.ok()) { + *result_listener << "Expected OK, got " << absl::StrCat(arg); + return false; + } + return testing::ExplainMatchResult(matcher, arg.value(), result_listener); +} + +MATCHER_P(IsRtcErrorWithType, error_type, ToString(error_type)) { + if (arg.ok()) { + *result_listener << "Expected " << ToString(error_type) << ", got OK."; + return false; + } + if (arg.type() != error_type) { + *result_listener << "Expected " << ToString(error_type) << ", got " + << ToString(arg.type()); + return false; + } + return true; +} + +MATCHER_P2(IsRtcErrorWithTypeAndMessage, + error_type, + message, + ToString(error_type)) { + if (arg.ok()) { + *result_listener << "Expected " << ToString(error_type) << ", got OK."; + return false; + } + if (arg.type() != error_type) { + *result_listener << "Expected " << ToString(error_type) << ", got " + << ToString(arg.type()); + return false; + } + if (std::string(arg.message()) != message) { + *result_listener << "Expected message \"" << message << "\", got \"" + << arg.message() << "\""; + return false; + } + return true; +} + +MATCHER_P2(IsRtcErrorOrWithMessage, + error_matcher, + message_matcher, + "RtcErrorOr that is holding an error that " + + testing::DescribeMatcher(error_matcher, negation) + + (negation ? " or " : " and ") + " with a message that " + + testing::DescribeMatcher(message_matcher, + negation)) { + if (arg.ok()) { + *result_listener << "Expected error, got " << absl::StrCat(arg); + return false; + } + return testing::ExplainMatchResult(error_matcher, arg.error(), + result_listener) && + testing::ExplainMatchResult(message_matcher, arg.error().message(), + result_listener); +} + +} // namespace webrtc + +#endif // API_TEST_RTC_ERROR_MATCHERS_H_ diff --git a/api/test/simulated_network.h b/api/test/simulated_network.h index 04c5517c8d..6f31c60c97 100644 --- a/api/test/simulated_network.h +++ b/api/test/simulated_network.h @@ -14,30 +14,45 @@ #include #include -#include -#include +#include +#include #include -#include "absl/types/optional.h" -#include "rtc_base/random.h" -#include "rtc_base/thread_annotations.h" +#include "absl/functional/any_invocable.h" +#include "api/transport/ecn_marking.h" +#include "api/units/data_rate.h" namespace webrtc { struct PacketInFlightInfo { + PacketInFlightInfo(size_t size, + int64_t send_time_us, + uint64_t packet_id, + webrtc::EcnMarking ecn) + : size(size), + send_time_us(send_time_us), + packet_id(packet_id), + ecn(ecn) {} + PacketInFlightInfo(size_t size, int64_t send_time_us, uint64_t packet_id) - : size(size), send_time_us(send_time_us), packet_id(packet_id) {} + : PacketInFlightInfo(size, + send_time_us, + packet_id, + webrtc::EcnMarking::kNotEct) {} size_t size; int64_t send_time_us; // Unique identifier for the packet in relation to other packets in flight. uint64_t packet_id; + webrtc::EcnMarking ecn; }; struct PacketDeliveryInfo { static constexpr int kNotReceived = -1; PacketDeliveryInfo(PacketInFlightInfo source, int64_t receive_time_us) - : receive_time_us(receive_time_us), packet_id(source.packet_id) {} + : receive_time_us(receive_time_us), + packet_id(source.packet_id), + ecn(source.ecn) {} bool operator==(const PacketDeliveryInfo& other) const { return receive_time_us == other.receive_time_us && @@ -46,6 +61,7 @@ struct PacketDeliveryInfo { int64_t receive_time_us; uint64_t packet_id; + webrtc::EcnMarking ecn; }; // BuiltInNetworkBehaviorConfig is a built-in network behavior configuration @@ -58,10 +74,10 @@ struct BuiltInNetworkBehaviorConfig { int queue_delay_ms = 0; // Standard deviation of the extra delay. int delay_standard_deviation_ms = 0; - // Link capacity in kbps. - int link_capacity_kbps = 0; - // Random packet loss. - int loss_percent = 0; + // Link capacity. + DataRate link_capacity = DataRate::Infinity(); + // Random packet loss, range 0 to 100. + double loss_percent = 0.; // If packets are allowed to be reordered. bool allow_reordering = false; // The average length of a burst of lost packets. @@ -114,7 +130,15 @@ class NetworkBehaviorInterface { // possible that no packet will be delivered by that time (e.g. in case of // random extra delay), in such case this method should be called again to get // the updated estimated delivery time. - virtual absl::optional NextDeliveryTimeUs() const = 0; + virtual std::optional NextDeliveryTimeUs() const = 0; + // Registers a callback that should be triggered by an implementation if the + // next NextDeliveryTimeUs() has changed between a call to NextDeliveryTimeUs + // and DequeueDeliverablePackets. + // The intended usage is to invoke NextDeliveryTimeUs and reschedule the + // DequeueDeliverablePackets call when network parameters (such as link + // capacity) changes. + virtual void RegisterDeliveryTimeChangedCallback( + absl::AnyInvocable /* callback */) {} virtual ~NetworkBehaviorInterface() = default; }; diff --git a/api/test/stats_observer_interface.h b/api/test/stats_observer_interface.h index 58d8f52d77..1a072e99a0 100644 --- a/api/test/stats_observer_interface.h +++ b/api/test/stats_observer_interface.h @@ -12,6 +12,7 @@ #define API_TEST_STATS_OBSERVER_INTERFACE_H_ #include "absl/strings/string_view.h" +#include "api/scoped_refptr.h" #include "api/stats/rtc_stats_report.h" namespace webrtc { @@ -26,7 +27,7 @@ class StatsObserverInterface { // identified by `pc_label`. virtual void OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) = 0; + const scoped_refptr& report) = 0; }; } // namespace webrtc_pc_e2e diff --git a/api/test/test_dependency_factory.cc b/api/test/test_dependency_factory.cc index 41ad70cc3f..4ed18d4a71 100644 --- a/api/test/test_dependency_factory.cc +++ b/api/test/test_dependency_factory.cc @@ -13,6 +13,7 @@ #include #include +#include "api/test/video_quality_test_fixture.h" #include "rtc_base/checks.h" #include "rtc_base/platform_thread_types.h" @@ -22,8 +23,8 @@ namespace { // This checks everything in this file gets called on the same thread. It's // static because it needs to look at the static methods too. bool IsValidTestDependencyFactoryThread() { - const rtc::PlatformThreadRef main_thread = rtc::CurrentThreadRef(); - return rtc::IsThreadRefEqual(main_thread, rtc::CurrentThreadRef()); + const PlatformThreadRef main_thread = CurrentThreadRef(); + return IsThreadRefEqual(main_thread, CurrentThreadRef()); } } // namespace diff --git a/api/test/time_controller.cc b/api/test/time_controller.cc index 364dbc235d..627777bc24 100644 --- a/api/test/time_controller.cc +++ b/api/test/time_controller.cc @@ -9,6 +9,14 @@ */ #include "api/test/time_controller.h" +#include +#include + +#include "absl/strings/string_view.h" +#include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/units/time_delta.h" + namespace webrtc { std::unique_ptr TimeController::CreateTaskQueueFactory() { class FactoryWrapper final : public TaskQueueFactory { diff --git a/api/test/time_controller.h b/api/test/time_controller.h index 121f65cea9..8d9cb7c28a 100644 --- a/api/test/time_controller.h +++ b/api/test/time_controller.h @@ -16,8 +16,7 @@ #include "api/task_queue/task_queue_factory.h" #include "api/units/time_delta.h" -#include "api/units/timestamp.h" -#include "rtc_base/synchronization/yield_policy.h" +#include "rtc_base/socket_server.h" #include "rtc_base/thread.h" #include "system_wrappers/include/clock.h" @@ -40,16 +39,16 @@ class TimeController { // is destroyed. std::unique_ptr CreateTaskQueueFactory(); - // Creates an rtc::Thread instance. If `socket_server` is nullptr, a default - // noop socket server is created. - // Returned thread is not null and started. - virtual std::unique_ptr CreateThread( + // Creates an webrtc::Thread instance. If `socket_server` is nullptr, a + // default noop socket server is created. Returned thread is not null and + // started. + virtual std::unique_ptr CreateThread( const std::string& name, - std::unique_ptr socket_server = nullptr) = 0; + std::unique_ptr socket_server = nullptr) = 0; - // Creates an rtc::Thread instance that ensure that it's set as the current + // Creates an webrtc::Thread instance that ensure that it's set as the current // thread. - virtual rtc::Thread* GetMainThread() = 0; + virtual Thread* GetMainThread() = 0; // Allow task queues and process threads created by this instance to execute // for the given `duration`. virtual void AdvanceTime(TimeDelta duration) = 0; @@ -62,28 +61,5 @@ class TimeController { TimeDelta max_duration = TimeDelta::Seconds(5)); }; -// Interface for telling time, scheduling an event to fire at a particular time, -// and waiting for time to pass. -class ControlledAlarmClock { - public: - virtual ~ControlledAlarmClock() = default; - - // Gets a clock that tells the alarm clock's notion of time. - virtual Clock* GetClock() = 0; - - // Schedules the alarm to fire at `deadline`. - // An alarm clock only supports one deadline. Calls to `ScheduleAlarmAt` with - // an earlier deadline will reset the alarm to fire earlier.Calls to - // `ScheduleAlarmAt` with a later deadline are ignored. Returns true if the - // deadline changed, false otherwise. - virtual bool ScheduleAlarmAt(Timestamp deadline) = 0; - - // Sets the callback that should be run when the alarm fires. - virtual void SetCallback(std::function callback) = 0; - - // Waits for `duration` to pass, according to the alarm clock. - virtual void Sleep(TimeDelta duration) = 0; -}; - } // namespace webrtc #endif // API_TEST_TIME_CONTROLLER_H_ diff --git a/api/test/video/BUILD.gn b/api/test/video/BUILD.gn index 0eae85aef3..f1f8db6c61 100644 --- a/api/test/video/BUILD.gn +++ b/api/test/video/BUILD.gn @@ -18,13 +18,13 @@ rtc_library("function_video_factory") { deps = [ "../../../rtc_base:checks", + "../../environment", "../../video_codecs:video_codecs_api", ] } rtc_library("video_frame_writer") { visibility = [ "*" ] - testonly = true sources = [ "video_frame_writer.h" ] deps = [ "../../video:video_frame" ] @@ -47,5 +47,4 @@ rtc_library("test_video_track_source") { "../../video:recordable_encoded_frame", "../../video:video_frame", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } diff --git a/api/test/video/function_video_decoder_factory.h b/api/test/video/function_video_decoder_factory.h index 2145c71bff..ceedd801a0 100644 --- a/api/test/video/function_video_decoder_factory.h +++ b/api/test/video/function_video_decoder_factory.h @@ -16,10 +16,10 @@ #include #include +#include "api/environment/environment.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder_factory.h" -#include "rtc_base/checks.h" namespace webrtc { namespace test { @@ -29,17 +29,20 @@ class FunctionVideoDecoderFactory final : public VideoDecoderFactory { public: explicit FunctionVideoDecoderFactory( std::function()> create) - : create_([create = std::move(create)](const SdpVideoFormat&) { + : create_([create = std::move(create)](const Environment&, + const SdpVideoFormat&) { return create(); }) {} explicit FunctionVideoDecoderFactory( - std::function(const SdpVideoFormat&)> + std::function(const Environment&, + const SdpVideoFormat&)> create) : create_(std::move(create)) {} FunctionVideoDecoderFactory( std::function()> create, std::vector sdp_video_formats) - : create_([create = std::move(create)](const SdpVideoFormat&) { + : create_([create = std::move(create)](const Environment&, + const SdpVideoFormat&) { return create(); }), sdp_video_formats_(std::move(sdp_video_formats)) {} @@ -48,13 +51,14 @@ class FunctionVideoDecoderFactory final : public VideoDecoderFactory { return sdp_video_formats_; } - std::unique_ptr CreateVideoDecoder( - const SdpVideoFormat& format) override { - return create_(format); + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override { + return create_(env, format); } private: - const std::function(const SdpVideoFormat&)> + const std::function(const Environment& env, + const SdpVideoFormat&)> create_; const std::vector sdp_video_formats_; }; diff --git a/api/test/video/function_video_encoder_factory.h b/api/test/video/function_video_encoder_factory.h index 98ece2bc94..8902c3e774 100644 --- a/api/test/video/function_video_encoder_factory.h +++ b/api/test/video/function_video_encoder_factory.h @@ -16,6 +16,7 @@ #include #include +#include "api/environment/environment.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" @@ -30,11 +31,13 @@ class FunctionVideoEncoderFactory final : public VideoEncoderFactory { public: explicit FunctionVideoEncoderFactory( std::function()> create) - : create_([create = std::move(create)](const SdpVideoFormat&) { + : create_([create = std::move(create)](const Environment&, + const SdpVideoFormat&) { return create(); }) {} explicit FunctionVideoEncoderFactory( - std::function(const SdpVideoFormat&)> + std::function(const Environment&, + const SdpVideoFormat&)> create) : create_(std::move(create)) {} @@ -44,13 +47,14 @@ class FunctionVideoEncoderFactory final : public VideoEncoderFactory { return {}; } - std::unique_ptr CreateVideoEncoder( - const SdpVideoFormat& format) override { - return create_(format); + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override { + return create_(env, format); } private: - const std::function(const SdpVideoFormat&)> + const std::function(const Environment&, + const SdpVideoFormat&)> create_; }; diff --git a/api/test/video/test_video_track_source.cc b/api/test/video/test_video_track_source.cc index 56d70d1774..b4b88c549d 100644 --- a/api/test/video/test_video_track_source.cc +++ b/api/test/video/test_video_track_source.cc @@ -9,9 +9,10 @@ */ #include "api/test/video/test_video_track_source.h" +#include +#include #include -#include "absl/types/optional.h" #include "api/media_stream_interface.h" #include "api/sequence_checker.h" #include "api/video/video_frame.h" @@ -24,7 +25,7 @@ namespace test { TestVideoTrackSource::TestVideoTrackSource( bool remote, - absl::optional stream_label) + std::optional stream_label) : stream_label_(std::move(stream_label)), state_(kInitializing), remote_(remote) { @@ -45,15 +46,13 @@ void TestVideoTrackSource::SetState(SourceState new_state) { } } -void TestVideoTrackSource::AddOrUpdateSink( - rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { +void TestVideoTrackSource::AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) { RTC_DCHECK(worker_thread_checker_.IsCurrent()); source()->AddOrUpdateSink(sink, wants); } -void TestVideoTrackSource::RemoveSink( - rtc::VideoSinkInterface* sink) { +void TestVideoTrackSource::RemoveSink(VideoSinkInterface* sink) { RTC_DCHECK(worker_thread_checker_.IsCurrent()); source()->RemoveSink(sink); } diff --git a/api/test/video/test_video_track_source.h b/api/test/video/test_video_track_source.h index 173bb64e58..6e0e720f92 100644 --- a/api/test/video/test_video_track_source.h +++ b/api/test/video/test_video_track_source.h @@ -11,9 +11,9 @@ #ifndef API_TEST_VIDEO_TEST_VIDEO_TRACK_SOURCE_H_ #define API_TEST_VIDEO_TEST_VIDEO_TRACK_SOURCE_H_ +#include #include -#include "absl/types/optional.h" #include "api/media_stream_interface.h" #include "api/notifier.h" #include "api/sequence_checker.h" @@ -32,7 +32,7 @@ class TestVideoTrackSource : public Notifier { public: explicit TestVideoTrackSource( bool remote, - absl::optional stream_label = absl::nullopt); + std::optional stream_label = std::nullopt); ~TestVideoTrackSource() override = default; void SetState(SourceState new_state); @@ -41,22 +41,20 @@ class TestVideoTrackSource : public Notifier { bool remote() const override { return remote_; } bool is_screencast() const override { return false; } - absl::optional needs_denoising() const override { - return absl::nullopt; - } + std::optional needs_denoising() const override { return std::nullopt; } - bool GetStats(Stats* stats) override { return false; } + bool GetStats(Stats* /* stats */) override { return false; } - void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override; - void RemoveSink(rtc::VideoSinkInterface* sink) override; + void AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) override; + void RemoveSink(VideoSinkInterface* sink) override; bool SupportsEncodedOutput() const override { return false; } void GenerateKeyFrame() override {} void AddEncodedSink( - rtc::VideoSinkInterface* sink) override {} + VideoSinkInterface* /* sink */) override {} void RemoveEncodedSink( - rtc::VideoSinkInterface* sink) override {} + VideoSinkInterface* /* sink */) override {} // Starts producing video. virtual void Start() = 0; @@ -67,24 +65,24 @@ class TestVideoTrackSource : public Notifier { virtual void SetScreencast(bool is_screencast) = 0; // TODO(titovartem): make next 4 methods pure virtual. - virtual void SetEnableAdaptation(bool enable_adaptation) {} + virtual void SetEnableAdaptation(bool /* enable_adaptation */) {} virtual int GetFrameWidth() const { return 0; } virtual int GetFrameHeight() const { return 0; } - virtual void OnOutputFormatRequest(int width, - int height, - const absl::optional& max_fps) {} + virtual void OnOutputFormatRequest(int /* width */, + int /* height */, + const std::optional& /* max_fps */) {} // Returns stream label for this video source if present. Implementations // may override this method to increase debugability and testability. - virtual absl::optional GetStreamLabel() { return stream_label_; } + virtual std::optional GetStreamLabel() { return stream_label_; } protected: - virtual rtc::VideoSourceInterface* source() = 0; + virtual VideoSourceInterface* source() = 0; private: - const absl::optional stream_label_; + const std::optional stream_label_; RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_; RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_; SourceState state_ RTC_GUARDED_BY(&signaling_thread_checker_); diff --git a/api/test/video_codec_stats.cc b/api/test/video_codec_stats.cc deleted file mode 100644 index fb7226701e..0000000000 --- a/api/test/video_codec_stats.cc +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/test/video_codec_stats.h" - -namespace webrtc { -namespace test { - -void VideoCodecStats::Stream::LogMetrics( - MetricsLogger* logger, - std::string test_case_name, - std::map metadata) const { - logger->LogMetric("width", test_case_name, width, Unit::kCount, - webrtc::test::ImprovementDirection::kBiggerIsBetter, - metadata); - - logger->LogMetric("height", test_case_name, height, Unit::kCount, - webrtc::test::ImprovementDirection::kBiggerIsBetter, - metadata); - - logger->LogMetric( - "frame_size_bytes", test_case_name, frame_size_bytes, Unit::kBytes, - webrtc::test::ImprovementDirection::kNeitherIsBetter, metadata); - - logger->LogMetric("keyframe", test_case_name, keyframe, Unit::kCount, - webrtc::test::ImprovementDirection::kSmallerIsBetter, - metadata); - - logger->LogMetric("qp", test_case_name, qp, Unit::kUnitless, - webrtc::test::ImprovementDirection::kSmallerIsBetter, - metadata); - - logger->LogMetric( - "encode_time_ms", test_case_name, encode_time_ms, Unit::kMilliseconds, - webrtc::test::ImprovementDirection::kSmallerIsBetter, metadata); - - logger->LogMetric( - "decode_time_ms", test_case_name, decode_time_ms, Unit::kMilliseconds, - webrtc::test::ImprovementDirection::kSmallerIsBetter, metadata); - - logger->LogMetric("target_bitrate_kbps", test_case_name, target_bitrate_kbps, - Unit::kKilobitsPerSecond, - webrtc::test::ImprovementDirection::kBiggerIsBetter, - metadata); - - logger->LogMetric("target_framerate_fps", test_case_name, - target_framerate_fps, Unit::kHertz, - webrtc::test::ImprovementDirection::kBiggerIsBetter, - metadata); - - logger->LogMetric("encoded_bitrate_kbps", test_case_name, - encoded_bitrate_kbps, Unit::kKilobitsPerSecond, - webrtc::test::ImprovementDirection::kBiggerIsBetter, - metadata); - - logger->LogMetric("encoded_framerate_fps", test_case_name, - encoded_framerate_fps, Unit::kHertz, - webrtc::test::ImprovementDirection::kBiggerIsBetter, - metadata); - - logger->LogMetric("bitrate_mismatch_pct", test_case_name, - bitrate_mismatch_pct, Unit::kPercent, - webrtc::test::ImprovementDirection::kSmallerIsBetter, - metadata); - - logger->LogMetric("framerate_mismatch_pct", test_case_name, - framerate_mismatch_pct, Unit::kPercent, - webrtc::test::ImprovementDirection::kSmallerIsBetter, - metadata); - - logger->LogMetric("transmission_time_ms", test_case_name, - transmission_time_ms, Unit::kMilliseconds, - webrtc::test::ImprovementDirection::kSmallerIsBetter, - metadata); - - logger->LogMetric("psnr_y_db", test_case_name, psnr.y, Unit::kUnitless, - webrtc::test::ImprovementDirection::kBiggerIsBetter, - metadata); - - logger->LogMetric("psnr_u_db", test_case_name, psnr.u, Unit::kUnitless, - webrtc::test::ImprovementDirection::kBiggerIsBetter, - metadata); - - logger->LogMetric("psnr_v_db", test_case_name, psnr.v, Unit::kUnitless, - webrtc::test::ImprovementDirection::kBiggerIsBetter, - metadata); -} - -} // namespace test -} // namespace webrtc diff --git a/api/test/video_codec_stats.h b/api/test/video_codec_stats.h deleted file mode 100644 index 80f8287848..0000000000 --- a/api/test/video_codec_stats.h +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_TEST_VIDEO_CODEC_STATS_H_ -#define API_TEST_VIDEO_CODEC_STATS_H_ - -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/numerics/samples_stats_counter.h" -#include "api/test/metrics/metric.h" -#include "api/test/metrics/metrics_logger.h" -#include "api/units/data_rate.h" -#include "api/units/data_size.h" -#include "api/units/frequency.h" - -namespace webrtc { -namespace test { - -// Interface for encoded and/or decoded video frame and stream statistics. -class VideoCodecStats { - public: - // Filter for slicing frames. - struct Filter { - absl::optional first_frame; - absl::optional last_frame; - absl::optional spatial_idx; - absl::optional temporal_idx; - }; - - struct Frame { - int frame_num = 0; - uint32_t timestamp_rtp = 0; - - int spatial_idx = 0; - int temporal_idx = 0; - - int width = 0; - int height = 0; - DataSize frame_size = DataSize::Zero(); - bool keyframe = false; - absl::optional qp; - absl::optional base_spatial_idx; - - Timestamp encode_start = Timestamp::Zero(); - TimeDelta encode_time = TimeDelta::Zero(); - Timestamp decode_start = Timestamp::Zero(); - TimeDelta decode_time = TimeDelta::Zero(); - - struct Psnr { - double y = 0.0; - double u = 0.0; - double v = 0.0; - }; - absl::optional psnr; - - absl::optional target_bitrate; - absl::optional target_framerate; - - bool encoded = false; - bool decoded = false; - }; - - struct Stream { - SamplesStatsCounter width; - SamplesStatsCounter height; - SamplesStatsCounter frame_size_bytes; - SamplesStatsCounter keyframe; - SamplesStatsCounter qp; - - SamplesStatsCounter encode_time_ms; - SamplesStatsCounter decode_time_ms; - - SamplesStatsCounter target_bitrate_kbps; - SamplesStatsCounter target_framerate_fps; - - SamplesStatsCounter encoded_bitrate_kbps; - SamplesStatsCounter encoded_framerate_fps; - - SamplesStatsCounter bitrate_mismatch_pct; - SamplesStatsCounter framerate_mismatch_pct; - - SamplesStatsCounter transmission_time_ms; - - struct Psnr { - SamplesStatsCounter y; - SamplesStatsCounter u; - SamplesStatsCounter v; - } psnr; - - // Logs `Stream` metrics to provided `MetricsLogger`. - void LogMetrics(MetricsLogger* logger, - std::string test_case_name, - std::map metadata = {}) const; - }; - - virtual ~VideoCodecStats() = default; - - // Returns frames from interval, spatial and temporal layer specified by given - // `filter`. - virtual std::vector Slice( - absl::optional filter = absl::nullopt) const = 0; - - // Returns video statistics aggregated for given `frames`. - virtual Stream Aggregate(const std::vector& frames) const = 0; -}; - -} // namespace test -} // namespace webrtc - -#endif // API_TEST_VIDEO_CODEC_STATS_H_ diff --git a/api/test/video_codec_tester.h b/api/test/video_codec_tester.h deleted file mode 100644 index c2fb89e2cb..0000000000 --- a/api/test/video_codec_tester.h +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_TEST_VIDEO_CODEC_TESTER_H_ -#define API_TEST_VIDEO_CODEC_TESTER_H_ - -#include -#include - -#include "absl/functional/any_invocable.h" -#include "absl/types/optional.h" -#include "api/test/video_codec_stats.h" -#include "api/video/encoded_image.h" -#include "api/video/resolution.h" -#include "api/video/video_frame.h" - -namespace webrtc { -namespace test { - -// Interface for a video codec tester. The interface provides minimalistic set -// of data structures that enables implementation of decode-only, encode-only -// and encode-decode tests. -class VideoCodecTester { - public: - // Pacing settings for codec input. - struct PacingSettings { - enum PacingMode { - // Pacing is not used. Frames are sent to codec back-to-back. - kNoPacing, - // Pace with the rate equal to the target video frame rate. Pacing time is - // derived from RTP timestamp. - kRealTime, - // Pace with the explicitly provided rate. - kConstantRate, - }; - PacingMode mode = PacingMode::kNoPacing; - // Pacing rate for `kConstantRate` mode. - Frequency constant_rate = Frequency::Zero(); - }; - - struct DecoderSettings { - PacingSettings pacing; - absl::optional decoder_input_base_path; - absl::optional decoder_output_base_path; - }; - - struct EncoderSettings { - PacingSettings pacing; - absl::optional encoder_input_base_path; - absl::optional encoder_output_base_path; - }; - - virtual ~VideoCodecTester() = default; - - // Interface for a raw video frames source. - class RawVideoSource { - public: - virtual ~RawVideoSource() = default; - - // Returns next frame. If no more frames to pull, returns `absl::nullopt`. - // For analysis and pacing purposes, frame must have RTP timestamp set. The - // timestamp must represent the target video frame rate and be unique. - virtual absl::optional PullFrame() = 0; - - // Returns early pulled frame with RTP timestamp equal to `timestamp_rtp`. - virtual VideoFrame GetFrame(uint32_t timestamp_rtp, - Resolution resolution) = 0; - }; - - // Interface for a coded video frames source. - class CodedVideoSource { - public: - virtual ~CodedVideoSource() = default; - - // Returns next frame. If no more frames to pull, returns `absl::nullopt`. - // For analysis and pacing purposes, frame must have RTP timestamp set. The - // timestamp must represent the target video frame rate and be unique. - virtual absl::optional PullFrame() = 0; - }; - - // Interface for a video encoder. - class Encoder { - public: - using EncodeCallback = - absl::AnyInvocable; - - virtual ~Encoder() = default; - - virtual void Initialize() = 0; - - virtual void Encode(const VideoFrame& frame, EncodeCallback callback) = 0; - - virtual void Flush() = 0; - }; - - // Interface for a video decoder. - class Decoder { - public: - using DecodeCallback = - absl::AnyInvocable; - - virtual ~Decoder() = default; - - virtual void Initialize() = 0; - - virtual void Decode(const EncodedImage& frame, DecodeCallback callback) = 0; - - virtual void Flush() = 0; - }; - - // Pulls coded video frames from `video_source` and passes them to `decoder`. - // Returns `VideoCodecTestStats` object that contains collected per-frame - // metrics. - virtual std::unique_ptr RunDecodeTest( - CodedVideoSource* video_source, - Decoder* decoder, - const DecoderSettings& decoder_settings) = 0; - - // Pulls raw video frames from `video_source` and passes them to `encoder`. - // Returns `VideoCodecTestStats` object that contains collected per-frame - // metrics. - virtual std::unique_ptr RunEncodeTest( - RawVideoSource* video_source, - Encoder* encoder, - const EncoderSettings& encoder_settings) = 0; - - // Pulls raw video frames from `video_source`, passes them to `encoder` and - // then passes encoded frames to `decoder`. Returns `VideoCodecTestStats` - // object that contains collected per-frame metrics. - virtual std::unique_ptr RunEncodeDecodeTest( - RawVideoSource* video_source, - Encoder* encoder, - Decoder* decoder, - const EncoderSettings& encoder_settings, - const DecoderSettings& decoder_settings) = 0; -}; - -} // namespace test -} // namespace webrtc - -#endif // API_TEST_VIDEO_CODEC_TESTER_H_ diff --git a/api/test/video_quality_analyzer_interface.h b/api/test/video_quality_analyzer_interface.h index d21bc6c5f7..48685b9936 100644 --- a/api/test/video_quality_analyzer_interface.h +++ b/api/test/video_quality_analyzer_interface.h @@ -11,12 +11,14 @@ #ifndef API_TEST_VIDEO_QUALITY_ANALYZER_INTERFACE_H_ #define API_TEST_VIDEO_QUALITY_ANALYZER_INTERFACE_H_ -#include +#include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/scoped_refptr.h" +#include "api/stats/rtc_stats_report.h" #include "api/test/stats_observer_interface.h" #include "api/video/encoded_image.h" #include "api/video/video_frame.h" @@ -39,7 +41,7 @@ namespace webrtc { // ___________ ________ _________ // | | | | | | // | Frame |-(A)→| WebRTC |-(B)→| Video |-(C)┐ -// | Generator | | Stack | | Decoder | | +// | Generator | | Stack | | Encoder | | // ¯¯¯¯¯¯¯¯¯¯¯ ¯¯¯¯¯¯¯¯ ¯¯¯¯¯¯¯¯¯ | // __↓________ // | Transport | @@ -71,7 +73,9 @@ class VideoQualityAnalyzerInterface std::string decoder_name = "unknown"; // Decode time provided by decoder itself. If decoder doesn’t produce such // information can be omitted. - absl::optional decode_time_ms = absl::nullopt; + std::optional decode_time_ms = std::nullopt; + // Decoder quantizer value. + std::optional qp = std::nullopt; }; ~VideoQualityAnalyzerInterface() override = default; @@ -83,9 +87,9 @@ class VideoQualityAnalyzerInterface // calculations. Analyzer can perform simple calculations on the calling // thread in each method, but should remember, that it is the same thread, // that is used in video pipeline. - virtual void Start(std::string test_case_name, - rtc::ArrayView peer_names, - int max_threads_count) {} + virtual void Start(std::string /* test_case_name */, + ArrayView /* peer_names */, + int /* max_threads_count */) {} // Will be called when frame was generated from the input stream. // `peer_name` is name of the peer on which side frame was captured. @@ -95,73 +99,75 @@ class VideoQualityAnalyzerInterface const VideoFrame& frame) = 0; // Will be called before calling the encoder. // `peer_name` is name of the peer on which side frame came to encoder. - virtual void OnFramePreEncode(absl::string_view peer_name, - const VideoFrame& frame) {} + virtual void OnFramePreEncode(absl::string_view /* peer_name */, + const VideoFrame& /* frame */) {} // Will be called for each EncodedImage received from encoder. Single // VideoFrame can produce multiple EncodedImages. Each encoded image will // have id from VideoFrame. // `peer_name` is name of the peer on which side frame was encoded. - virtual void OnFrameEncoded(absl::string_view peer_name, - uint16_t frame_id, - const EncodedImage& encoded_image, - const EncoderStats& stats, - bool discarded) {} + virtual void OnFrameEncoded(absl::string_view /* peer_name */, + uint16_t /* frame_id */, + const EncodedImage& /* encoded_image */, + const EncoderStats& /* stats */, + bool /* discarded */) {} // Will be called for each frame dropped by encoder. // `peer_name` is name of the peer on which side frame drop was detected. - virtual void OnFrameDropped(absl::string_view peer_name, - EncodedImageCallback::DropReason reason) {} + virtual void OnFrameDropped(absl::string_view /* peer_name */, + EncodedImageCallback::DropReason /* reason */) {} // Will be called before calling the decoder. // `peer_name` is name of the peer on which side frame was received. - virtual void OnFramePreDecode(absl::string_view peer_name, - uint16_t frame_id, - const EncodedImage& encoded_image) {} + virtual void OnFramePreDecode(absl::string_view /* peer_name */, + uint16_t /* frame_id */, + const EncodedImage& /* encoded_image */) {} // Will be called after decoding the frame. // `peer_name` is name of the peer on which side frame was decoded. - virtual void OnFrameDecoded(absl::string_view peer_name, - const VideoFrame& frame, - const DecoderStats& stats) {} + virtual void OnFrameDecoded(absl::string_view /* peer_name */, + const VideoFrame& /* frame */, + const DecoderStats& /* stats */) {} // Will be called when frame will be obtained from PeerConnection stack. // `peer_name` is name of the peer on which side frame was rendered. - virtual void OnFrameRendered(absl::string_view peer_name, - const VideoFrame& frame) {} + virtual void OnFrameRendered(absl::string_view /* peer_name */, + const VideoFrame& /* frame */) {} // Will be called if encoder return not WEBRTC_VIDEO_CODEC_OK. // All available codes are listed in // modules/video_coding/include/video_error_codes.h // `peer_name` is name of the peer on which side error acquired. - virtual void OnEncoderError(absl::string_view peer_name, - const VideoFrame& frame, - int32_t error_code) {} + virtual void OnEncoderError(absl::string_view /* peer_name */, + const VideoFrame& /* frame */, + int32_t /* error_code */) {} // Will be called if decoder return not WEBRTC_VIDEO_CODEC_OK. // All available codes are listed in // modules/video_coding/include/video_error_codes.h // `peer_name` is name of the peer on which side error acquired. - virtual void OnDecoderError(absl::string_view peer_name, - uint16_t frame_id, - int32_t error_code, - const DecoderStats& stats) {} + virtual void OnDecoderError(absl::string_view /* peer_name */, + uint16_t /* frame_id */, + int32_t /* error_code */, + const DecoderStats& /* stats */) {} // Will be called every time new stats reports are available for the // Peer Connection identified by `pc_label`. void OnStatsReports( - absl::string_view pc_label, - const rtc::scoped_refptr& report) override {} + absl::string_view /* pc_label */, + const scoped_refptr& /* report */) override {} // Will be called before test adds new participant in the middle of a call. - virtual void RegisterParticipantInCall(absl::string_view peer_name) {} + virtual void RegisterParticipantInCall(absl::string_view /* peer_name */) {} // Will be called after test removed existing participant in the middle of the // call. - virtual void UnregisterParticipantInCall(absl::string_view peer_name) {} + virtual void UnregisterParticipantInCall(absl::string_view /* peer_name */) {} // Informs analyzer that peer `receiver_peer_name` should not receive any // stream from sender `sender_peer_name`. // This method is a no-op if the sender or the receiver does not exist. - virtual void OnPauseAllStreamsFrom(absl::string_view sender_peer_name, - absl::string_view receiver_peer_name) {} + virtual void OnPauseAllStreamsFrom( + absl::string_view /* sender_peer_name */, + absl::string_view /* receiver_peer_name */) {} // Informs analyzer that peer `receiver_peer_name` is expected to receive all // streams from `sender_peer_name`. // This method is a no-op if the sender or the receiver does not exist. - virtual void OnResumeAllStreamsFrom(absl::string_view sender_peer_name, - absl::string_view receiver_peer_name) {} + virtual void OnResumeAllStreamsFrom( + absl::string_view /* sender_peer_name */, + absl::string_view /* receiver_peer_name */) {} // Tells analyzer that analysis complete and it should calculate final // statistics. @@ -175,7 +181,7 @@ class VideoQualityAnalyzerInterface // Returns the sender peer name of the last stream where this frame was // captured. The sender for this frame id may change when the frame ids wrap // around. Also it will crash, if the specified `frame_id` wasn't captured. - virtual std::string GetSenderPeerName(uint16_t frame_id) const { + virtual std::string GetSenderPeerName(uint16_t /* frame_id */) const { RTC_CHECK(false) << "Not implemented."; } }; diff --git a/api/test/video_quality_test_fixture.h b/api/test/video_quality_test_fixture.h index b45faef286..763dd7ddf8 100644 --- a/api/test/video_quality_test_fixture.h +++ b/api/test/video_quality_test_fixture.h @@ -11,18 +11,23 @@ #ifndef API_TEST_VIDEO_QUALITY_TEST_FIXTURE_H_ #define API_TEST_VIDEO_QUALITY_TEST_FIXTURE_H_ +#include +#include #include #include +#include #include #include #include "api/fec_controller.h" #include "api/media_types.h" #include "api/network_state_predictor.h" +#include "api/rtp_parameters.h" #include "api/test/simulated_network.h" #include "api/transport/bitrate_settings.h" #include "api/transport/network_control.h" -#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/spatial_layer.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" #include "video/config/video_encoder_config.h" @@ -61,7 +66,7 @@ class VideoQualityTestFixtureInterface { bool automatic_scaling = false; std::string clip_path; // "Generator" to generate frames instead. size_t capture_device_index = 0; - SdpVideoFormat::Parameters sdp_params; + CodecParameterMap sdp_params; double encoder_overshoot_factor = 0.0; } video[2]; struct Audio { @@ -69,7 +74,7 @@ class VideoQualityTestFixtureInterface { bool sync_video = false; bool dtx = false; bool use_real_adm = false; - absl::optional ana_config; + std::optional ana_config; } audio; struct Screenshare { bool enabled = false; @@ -90,7 +95,7 @@ class VideoQualityTestFixtureInterface { // `sender_network` and `receiver_network` in InjectionComponents are // non-null. May be nullopt even if `sender_network` and `receiver_network` // are null; in that case, a default config will be used. - absl::optional config; + std::optional config; struct SS { // Spatial scalability. std::vector streams; // If empty, one stream is assumed. size_t selected_stream = 0; diff --git a/api/test/videocodec_test_fixture.h b/api/test/videocodec_test_fixture.h index 8e66f72b91..0e35ec354f 100644 --- a/api/test/videocodec_test_fixture.h +++ b/api/test/videocodec_test_fixture.h @@ -11,14 +11,18 @@ #ifndef API_TEST_VIDEOCODEC_TEST_FIXTURE_H_ #define API_TEST_VIDEOCODEC_TEST_FIXTURE_H_ +#include +#include #include #include #include "api/test/videocodec_test_stats.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" #include "api/video_codecs/h264_profile_level_id.h" -#include "api/video_codecs/video_decoder_factory.h" -#include "api/video_codecs/video_encoder_factory.h" -#include "modules/video_coding/include/video_codec_interface.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" namespace webrtc { namespace test { @@ -66,7 +70,7 @@ class VideoCodecTestFixture { struct Config { Config(); - void SetCodecSettings(std::string codec_name, + void SetCodecSettings(std::string codec_name_to_set, size_t num_simulcast_streams, size_t num_spatial_layers, size_t num_temporal_layers, @@ -91,15 +95,15 @@ class VideoCodecTestFixture { std::string filename; // Dimensions of test clip. Falls back to (codec_settings.width/height) if // not set. - absl::optional clip_width; - absl::optional clip_height; + std::optional clip_width; + std::optional clip_height; // Framerate of input clip. Defaults to 30fps if not set. - absl::optional clip_fps; + std::optional clip_fps; // The resolution at which psnr/ssim comparisons should be made. Frames // will be scaled to this size if different. - absl::optional reference_width; - absl::optional reference_height; + std::optional reference_width; + std::optional reference_height; // File to process. This must be a video file in the YUV format. std::string filepath; @@ -134,8 +138,8 @@ class VideoCodecTestFixture { // default `SdpVideoFormat` based on `codec_name`. // Encoder and decoder name (`SdpVideoFormat::name`) should be the same as // `codec_name`. - absl::optional encoder_format; - absl::optional decoder_format; + std::optional encoder_format; + std::optional decoder_format; // H.264 specific settings. struct H264CodecSettings { diff --git a/api/test/videocodec_test_stats.cc b/api/test/videocodec_test_stats.cc index f082b1e935..5f0b8dccd0 100644 --- a/api/test/videocodec_test_stats.cc +++ b/api/test/videocodec_test_stats.cc @@ -10,6 +10,10 @@ #include "api/test/videocodec_test_stats.h" +#include +#include +#include + #include "rtc_base/strings/string_builder.h" namespace webrtc { @@ -23,7 +27,7 @@ VideoCodecTestStats::FrameStatistics::FrameStatistics(size_t frame_number, spatial_idx(spatial_idx) {} std::string VideoCodecTestStats::FrameStatistics::ToString() const { - rtc::StringBuilder ss; + StringBuilder ss; for (const auto& entry : ToMap()) { if (ss.size() > 0) { ss << " "; @@ -62,7 +66,7 @@ std::map VideoCodecTestStats::FrameStatistics::ToMap() std::string VideoCodecTestStats::VideoStatistics::ToString( std::string prefix) const { - rtc::StringBuilder ss; + StringBuilder ss; for (const auto& entry : ToMap()) { if (ss.size() > 0) { ss << "\n"; diff --git a/api/test/videocodec_test_stats.h b/api/test/videocodec_test_stats.h index d620d31f12..3ce91ce451 100644 --- a/api/test/videocodec_test_stats.h +++ b/api/test/videocodec_test_stats.h @@ -18,7 +18,6 @@ #include #include -#include "absl/types/optional.h" #include "api/units/data_rate.h" #include "api/units/frequency.h" #include "api/video/video_frame_type.h" diff --git a/api/transport/BUILD.gn b/api/transport/BUILD.gn index 12a1f57066..7769526e07 100644 --- a/api/transport/BUILD.gn +++ b/api/transport/BUILD.gn @@ -15,7 +15,22 @@ rtc_library("bitrate_settings") { "bitrate_settings.h", ] deps = [ "../../rtc_base/system:rtc_export" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_source_set("bandwidth_usage") { + visibility = [ "*" ] + sources = [ "bandwidth_usage.h" ] +} + +rtc_library("bandwidth_estimation_settings") { + visibility = [ "*" ] + sources = [ "bandwidth_estimation_settings.h" ] + deps = [ "../../rtc_base/system:rtc_export" ] +} + +rtc_source_set("ecn_marking") { + visibility = [ "*" ] + sources = [ "ecn_marking.h" ] } rtc_source_set("enums") { @@ -32,16 +47,16 @@ rtc_library("network_control") { ] deps = [ + ":ecn_marking", "../../api:field_trials_view", + "../../rtc_base/system:rtc_export", + "../environment", "../rtc_event_log", "../units:data_rate", "../units:data_size", "../units:time_delta", "../units:timestamp", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -53,9 +68,10 @@ rtc_library("field_trial_based_config") { ] deps = [ "../../api:field_trials_registry", + "../../rtc_base/system:rtc_export", "../../system_wrappers:field_trial", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_source_set("datagram_transport_interface") { @@ -63,10 +79,10 @@ rtc_source_set("datagram_transport_interface") { sources = [ "data_channel_transport_interface.h" ] deps = [ "..:array_view", + "..:priority", "..:rtc_error", "../../rtc_base:copy_on_write_buffer", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("goog_cc") { @@ -78,15 +94,16 @@ rtc_library("goog_cc") { deps = [ ":network_control", "..:network_state_predictor_api", - "../../api:field_trials_view", + "../../api/units:time_delta", "../../modules/congestion_controller/goog_cc", + "../../rtc_base/system:rtc_export", ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] } rtc_source_set("sctp_transport_factory_interface") { visibility = [ "*" ] sources = [ "sctp_transport_factory_interface.h" ] + deps = [ "../../api/environment" ] } rtc_source_set("stun_types") { @@ -102,22 +119,29 @@ rtc_source_set("stun_types") { "../../rtc_base:byte_order", "../../rtc_base:checks", "../../rtc_base:crc32", + "../../rtc_base:crypto_random", + "../../rtc_base:digest", "../../rtc_base:ip_address", "../../rtc_base:logging", + "../../rtc_base:net_helpers", "../../rtc_base:socket_address", - "../../rtc_base:ssl", "../../system_wrappers:metrics", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } if (rtc_include_tests) { rtc_source_set("test_feedback_generator_interface") { testonly = true sources = [ "test/feedback_generator_interface.h" ] + visibility = [ "*" ] deps = [ ":network_control", "..:simulated_network_api", + "../units:data_rate", + "../units:data_size", + "../units:time_delta", + "../units:timestamp", ] } rtc_library("test_feedback_generator") { @@ -142,8 +166,10 @@ if (rtc_include_tests) { sources = [ "stun_unittest.cc" ] deps = [ ":stun_types", + "..:array_view", "../../rtc_base:byte_buffer", "../../rtc_base:byte_order", + "../../rtc_base:ip_address", "../../rtc_base:macromagic", "../../rtc_base:socket_address", "../../system_wrappers:metrics", diff --git a/api/transport/DEPS b/api/transport/DEPS index 53a68e0e85..c057dfe5de 100644 --- a/api/transport/DEPS +++ b/api/transport/DEPS @@ -2,6 +2,7 @@ specific_include_rules = { "stun\.h": [ "+rtc_base/byte_buffer.h", "+rtc_base/ip_address.h", + "+rtc_base/net_helpers.h", "+rtc_base/socket_address.h", ], } diff --git a/api/transport/bandwidth_estimation_settings.h b/api/transport/bandwidth_estimation_settings.h new file mode 100644 index 0000000000..7ae8cc9ef8 --- /dev/null +++ b/api/transport/bandwidth_estimation_settings.h @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_BANDWIDTH_ESTIMATION_SETTINGS_H_ +#define API_TRANSPORT_BANDWIDTH_ESTIMATION_SETTINGS_H_ + +#include "rtc_base/system/rtc_export.h" +namespace webrtc { +// Configuration settings affecting bandwidth estimation. +// These settings can be set and changed by an application. +struct RTC_EXPORT BandwidthEstimationSettings { + // A bandwith estimation probe may be sent using a RtpTransceiver with + // direction SendOnly or SendRecv that supports RTX. The probe can be sent + // without first sending media packets in which case Rtp padding packets are + // used. + bool allow_probe_without_media = false; +}; + +} // namespace webrtc +#endif // API_TRANSPORT_BANDWIDTH_ESTIMATION_SETTINGS_H_ diff --git a/api/metronome/metronome.cc b/api/transport/bandwidth_usage.h similarity index 57% rename from api/metronome/metronome.cc rename to api/transport/bandwidth_usage.h index 8d74f928a0..77500be99f 100644 --- a/api/metronome/metronome.cc +++ b/api/transport/bandwidth_usage.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,12 +8,18 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "api/metronome/metronome.h" +#ifndef API_TRANSPORT_BANDWIDTH_USAGE_H_ +#define API_TRANSPORT_BANDWIDTH_USAGE_H_ namespace webrtc { -// TODO(crbug.com/1381982): Remove outdated methods. -void Metronome::AddListener(TickListener* listener) {} -void Metronome::RemoveListener(TickListener* listener) {} +enum class BandwidthUsage { + kBwNormal = 0, + kBwUnderusing = 1, + kBwOverusing = 2, + kLast +}; } // namespace webrtc + +#endif // API_TRANSPORT_BANDWIDTH_USAGE_H_ diff --git a/api/transport/bitrate_settings.h b/api/transport/bitrate_settings.h index 562309a46c..9bcd694d8e 100644 --- a/api/transport/bitrate_settings.h +++ b/api/transport/bitrate_settings.h @@ -11,9 +11,8 @@ #ifndef API_TRANSPORT_BITRATE_SETTINGS_H_ #define API_TRANSPORT_BITRATE_SETTINGS_H_ -#include +#include -#include "absl/types/optional.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -28,9 +27,9 @@ struct RTC_EXPORT BitrateSettings { ~BitrateSettings(); BitrateSettings(const BitrateSettings&); // 0 <= min <= start <= max should hold for set parameters. - absl::optional min_bitrate_bps; - absl::optional start_bitrate_bps; - absl::optional max_bitrate_bps; + std::optional min_bitrate_bps; + std::optional start_bitrate_bps; + std::optional max_bitrate_bps; }; // TODO(srte): BitrateConstraints and BitrateSettings should be merged. diff --git a/api/transport/data_channel_transport_interface.h b/api/transport/data_channel_transport_interface.h index cdae6fee19..d6b939ab86 100644 --- a/api/transport/data_channel_transport_interface.h +++ b/api/transport/data_channel_transport_interface.h @@ -12,7 +12,10 @@ #ifndef API_TRANSPORT_DATA_CHANNEL_TRANSPORT_INTERFACE_H_ #define API_TRANSPORT_DATA_CHANNEL_TRANSPORT_INTERFACE_H_ -#include "absl/types/optional.h" +#include +#include + +#include "api/priority.h" #include "api/rtc_error.h" #include "rtc_base/copy_on_write_buffer.h" @@ -46,14 +49,14 @@ struct SendDataParams { // Setting this value to zero disables retransmission. // Valid values are in the range [0-UINT16_MAX]. // `max_rtx_count` and `max_rtx_ms` may not be set simultaneously. - absl::optional max_rtx_count; + std::optional max_rtx_count; // If set, the maximum number of milliseconds for which the transport // may retransmit this message before it is dropped. // Setting this value to zero disables retransmission. // Valid values are in the range [0-UINT16_MAX]. // `max_rtx_count` and `max_rtx_ms` may not be set simultaneously. - absl::optional max_rtx_ms; + std::optional max_rtx_ms; }; // Sink for callbacks related to a data channel. @@ -64,7 +67,7 @@ class DataChannelSink { // Callback issued when data is received by the transport. virtual void OnDataReceived(int channel_id, DataMessageType type, - const rtc::CopyOnWriteBuffer& buffer) = 0; + const CopyOnWriteBuffer& buffer) = 0; // Callback issued when a remote data channel begins the closing procedure. // Messages sent after the closing procedure begins will not be transmitted. @@ -85,7 +88,11 @@ class DataChannelSink { // Callback issued when the data channel becomes unusable (closed). // TODO(https://crbug.com/webrtc/10360): Make pure virtual when all // consumers updated. - virtual void OnTransportClosed(RTCError error) {} + virtual void OnTransportClosed(RTCError /* error */) {} + + // The data channel's buffered_amount has fallen to or below the threshold + // set when calling `SetBufferedAmountLowThreshold` + virtual void OnBufferedAmountLow(int channel_id) = 0; }; // Transport for data channels. @@ -95,14 +102,14 @@ class DataChannelTransportInterface { // Opens a data `channel_id` for sending. May return an error if the // specified `channel_id` is unusable. Must be called before `SendData`. - virtual RTCError OpenChannel(int channel_id) = 0; + virtual RTCError OpenChannel(int channel_id, PriorityValue priority) = 0; // Sends a data buffer to the remote endpoint using the given send parameters. // `buffer` may not be larger than 256 KiB. Returns an error if the send // fails. virtual RTCError SendData(int channel_id, const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) = 0; + const CopyOnWriteBuffer& buffer) = 0; // Closes `channel_id` gracefully. Returns an error if `channel_id` is not // open. Data sent after the closing procedure begins will not be @@ -118,6 +125,10 @@ class DataChannelTransportInterface { // Note: the default implementation always returns false (as it assumes no one // has implemented the interface). This default implementation is temporary. virtual bool IsReadyToSend() const = 0; + + virtual size_t buffered_amount(int channel_id) const = 0; + virtual size_t buffered_amount_low_threshold(int channel_id) const = 0; + virtual void SetBufferedAmountLowThreshold(int channel_id, size_t bytes) = 0; }; } // namespace webrtc diff --git a/api/transport/ecn_marking.h b/api/transport/ecn_marking.h new file mode 100644 index 0000000000..bbcab6eb0f --- /dev/null +++ b/api/transport/ecn_marking.h @@ -0,0 +1,42 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_ECN_MARKING_H_ +#define API_TRANSPORT_ECN_MARKING_H_ + +namespace webrtc { + +// TODO: bugs.webrtc.org/42225697 - L4S support is slowly being developed. +// Help is appreciated. + +// L4S Explicit Congestion Notification (ECN) . +// https://www.rfc-editor.org/rfc/rfc9331.html ECT stands for ECN-Capable +// Transport and CE stands for Congestion Experienced. + +// RFC-3168, Section 5 +// +-----+-----+ +// | ECN FIELD | +// +-----+-----+ +// ECT CE [Obsolete] RFC 2481 names for the ECN bits. +// 0 0 Not-ECT +// 0 1 ECT(1) +// 1 0 ECT(0) +// 1 1 CE + +enum class EcnMarking { + kNotEct = 0, // Not ECN-Capable Transport + kEct1 = 1, // ECN-Capable Transport + kEct0 = 2, // Not used by L4s (or webrtc.) + kCe = 3, // Congestion experienced +}; + +} // namespace webrtc + +#endif // API_TRANSPORT_ECN_MARKING_H_ diff --git a/api/transport/field_trial_based_config.cc b/api/transport/field_trial_based_config.cc index 0cef30f054..ea3ce21512 100644 --- a/api/transport/field_trial_based_config.cc +++ b/api/transport/field_trial_based_config.cc @@ -9,6 +9,9 @@ */ #include "api/transport/field_trial_based_config.h" +#include + +#include "absl/strings/string_view.h" #include "system_wrappers/include/field_trial.h" namespace webrtc { diff --git a/api/transport/field_trial_based_config.h b/api/transport/field_trial_based_config.h index d47140e579..441f89e51d 100644 --- a/api/transport/field_trial_based_config.h +++ b/api/transport/field_trial_based_config.h @@ -14,10 +14,11 @@ #include "absl/strings/string_view.h" #include "api/field_trials_registry.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { // Implementation using the field trial API fo the key value lookup. -class FieldTrialBasedConfig : public FieldTrialsRegistry { +class RTC_EXPORT FieldTrialBasedConfig : public FieldTrialsRegistry { private: std::string GetValue(absl::string_view key) const override; }; diff --git a/api/transport/goog_cc_factory.cc b/api/transport/goog_cc_factory.cc index fd1189901f..11dd3686df 100644 --- a/api/transport/goog_cc_factory.cc +++ b/api/transport/goog_cc_factory.cc @@ -10,21 +10,15 @@ #include "api/transport/goog_cc_factory.h" +#include #include #include +#include "api/transport/network_control.h" +#include "api/units/time_delta.h" #include "modules/congestion_controller/goog_cc/goog_cc_network_control.h" namespace webrtc { -GoogCcNetworkControllerFactory::GoogCcNetworkControllerFactory( - RtcEventLog* event_log) - : event_log_(event_log) {} - -GoogCcNetworkControllerFactory::GoogCcNetworkControllerFactory( - NetworkStatePredictorFactoryInterface* network_state_predictor_factory) { - factory_config_.network_state_predictor_factory = - network_state_predictor_factory; -} GoogCcNetworkControllerFactory::GoogCcNetworkControllerFactory( GoogCcFactoryConfig config) @@ -32,15 +26,12 @@ GoogCcNetworkControllerFactory::GoogCcNetworkControllerFactory( std::unique_ptr GoogCcNetworkControllerFactory::Create(NetworkControllerConfig config) { - if (event_log_) - config.event_log = event_log_; GoogCcConfig goog_cc_config; goog_cc_config.feedback_only = factory_config_.feedback_only; if (factory_config_.network_state_estimator_factory) { - RTC_DCHECK(config.key_value_config); goog_cc_config.network_state_estimator = factory_config_.network_state_estimator_factory->Create( - config.key_value_config); + &config.env.field_trials()); } if (factory_config_.network_state_predictor_factory) { goog_cc_config.network_state_predictor = @@ -56,10 +47,4 @@ TimeDelta GoogCcNetworkControllerFactory::GetProcessInterval() const { return TimeDelta::Millis(kUpdateIntervalMs); } -GoogCcFeedbackNetworkControllerFactory::GoogCcFeedbackNetworkControllerFactory( - RtcEventLog* event_log) - : GoogCcNetworkControllerFactory(event_log) { - factory_config_.feedback_only = true; -} - } // namespace webrtc diff --git a/api/transport/goog_cc_factory.h b/api/transport/goog_cc_factory.h index e12755d745..56947306bc 100644 --- a/api/transport/goog_cc_factory.h +++ b/api/transport/goog_cc_factory.h @@ -10,52 +10,37 @@ #ifndef API_TRANSPORT_GOOG_CC_FACTORY_H_ #define API_TRANSPORT_GOOG_CC_FACTORY_H_ + #include -#include "absl/base/attributes.h" #include "api/network_state_predictor.h" #include "api/transport/network_control.h" +#include "api/units/time_delta.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { -class RtcEventLog; struct GoogCcFactoryConfig { - std::unique_ptr - network_state_estimator_factory = nullptr; + std::unique_ptr network_state_estimator_factory; NetworkStatePredictorFactoryInterface* network_state_predictor_factory = nullptr; bool feedback_only = false; }; -class GoogCcNetworkControllerFactory +class RTC_EXPORT GoogCcNetworkControllerFactory : public NetworkControllerFactoryInterface { public: GoogCcNetworkControllerFactory() = default; - ABSL_DEPRECATED("") - explicit GoogCcNetworkControllerFactory(RtcEventLog* event_log); - explicit GoogCcNetworkControllerFactory( - NetworkStatePredictorFactoryInterface* network_state_predictor_factory); - explicit GoogCcNetworkControllerFactory(GoogCcFactoryConfig config); + std::unique_ptr Create( NetworkControllerConfig config) override; TimeDelta GetProcessInterval() const override; - protected: - RtcEventLog* const event_log_ = nullptr; + private: GoogCcFactoryConfig factory_config_; }; -// Deprecated, use GoogCcFactoryConfig to enable feedback only mode instead. -// Factory to create packet feedback only GoogCC, this can be used for -// connections providing packet receive time feedback but no other reports. -class ABSL_DEPRECATED("use GoogCcFactoryConfig instead") - GoogCcFeedbackNetworkControllerFactory - : public GoogCcNetworkControllerFactory { - public: - explicit GoogCcFeedbackNetworkControllerFactory(RtcEventLog* event_log); -}; - } // namespace webrtc #endif // API_TRANSPORT_GOOG_CC_FACTORY_H_ diff --git a/api/transport/network_control.h b/api/transport/network_control.h index 862322443d..90cdebe9bb 100644 --- a/api/transport/network_control.h +++ b/api/transport/network_control.h @@ -10,14 +10,16 @@ #ifndef API_TRANSPORT_NETWORK_CONTROL_H_ #define API_TRANSPORT_NETWORK_CONTROL_H_ -#include #include +#include #include "absl/base/attributes.h" +#include "api/environment/environment.h" #include "api/field_trials_view.h" -#include "api/rtc_event_log/rtc_event_log.h" #include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" namespace webrtc { @@ -35,6 +37,10 @@ class TargetTransferRateObserver { // Configuration sent to factory create function. The parameters here are // optional to use for a network controller implementation. struct NetworkControllerConfig { + explicit NetworkControllerConfig(const Environment& env) : env(env) {} + + Environment env; + // The initial constraints to start with, these can be changed at any later // time by calls to OnTargetRateConstraints. Note that the starting rate // has to be set initially to provide a starting state for the network @@ -43,12 +49,6 @@ struct NetworkControllerConfig { // Initial stream specific configuration, these are changed at any later time // by calls to OnStreamsConfig. StreamsConfig stream_based_config; - - // Optional override of configuration of WebRTC internals. Using nullptr here - // indicates that the field trial API will be used. - const FieldTrialsView* key_value_config = nullptr; - // Optional override of event log. - RtcEventLog* event_log = nullptr; }; // NetworkControllerInterface is implemented by network controllers. A network @@ -118,7 +118,7 @@ class NetworkControllerFactoryInterface { class NetworkStateEstimator { public: // Gets the current best estimate according to the estimator. - virtual absl::optional GetCurrentEstimate() = 0; + virtual std::optional GetCurrentEstimate() = 0; // Called with per packet feedback regarding receive time. // Used when the NetworkStateEstimator runs in the sending endpoint. virtual void OnTransportPacketsFeedback(const TransportPacketsFeedback&) = 0; diff --git a/api/transport/network_types.cc b/api/transport/network_types.cc index d6495ce490..ccb4a6e3a9 100644 --- a/api/transport/network_types.cc +++ b/api/transport/network_types.cc @@ -11,6 +11,7 @@ #include "api/transport/network_types.h" #include +#include namespace webrtc { StreamsConfig::StreamsConfig() = default; @@ -97,7 +98,7 @@ PacedPacketInfo::PacedPacketInfo(int probe_cluster_id, probe_cluster_min_bytes(probe_cluster_min_bytes) {} bool PacedPacketInfo::operator==(const PacedPacketInfo& rhs) const { - return send_bitrate_bps == rhs.send_bitrate_bps && + return send_bitrate == rhs.send_bitrate && probe_cluster_id == rhs.probe_cluster_id && probe_cluster_min_probes == rhs.probe_cluster_min_probes && probe_cluster_min_bytes == rhs.probe_cluster_min_bytes; diff --git a/api/transport/network_types.h b/api/transport/network_types.h index a62c350474..d229d77bfc 100644 --- a/api/transport/network_types.h +++ b/api/transport/network_types.h @@ -12,13 +12,16 @@ #define API_TRANSPORT_NETWORK_TYPES_H_ #include +#include +#include #include -#include "absl/types/optional.h" +#include "api/transport/ecn_marking.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -27,7 +30,7 @@ namespace webrtc { // Represents constraints and rates related to the currently enabled streams. // This is used as input to the congestion controller via the StreamsConfig // struct. -struct BitrateAllocationLimits { +struct RTC_EXPORT BitrateAllocationLimits { // The total minimum send bitrate required by all sending streams. DataRate min_allocatable_rate = DataRate::Zero(); // The total maximum allocatable bitrate for all currently available streams. @@ -40,40 +43,44 @@ struct BitrateAllocationLimits { // Use StreamsConfig for information about streams that is required for specific // adjustments to the algorithms in network controllers. Especially useful // for experiments. -struct StreamsConfig { +struct RTC_EXPORT StreamsConfig { StreamsConfig(); StreamsConfig(const StreamsConfig&); ~StreamsConfig(); Timestamp at_time = Timestamp::PlusInfinity(); - absl::optional requests_alr_probing; - absl::optional pacing_factor; + std::optional requests_alr_probing; + // If `enable_repeated_initial_probing` is set to true, Probes are sent + // periodically every 1s during the first 5s after the network becomes + // available. The probes ignores max_total_allocated_bitrate. + std::optional enable_repeated_initial_probing; + std::optional pacing_factor; // TODO(srte): Use BitrateAllocationLimits here. - absl::optional min_total_allocated_bitrate; - absl::optional max_padding_rate; - absl::optional max_total_allocated_bitrate; + std::optional min_total_allocated_bitrate; + std::optional max_padding_rate; + std::optional max_total_allocated_bitrate; }; -struct TargetRateConstraints { +struct RTC_EXPORT TargetRateConstraints { TargetRateConstraints(); TargetRateConstraints(const TargetRateConstraints&); ~TargetRateConstraints(); Timestamp at_time = Timestamp::PlusInfinity(); - absl::optional min_data_rate; - absl::optional max_data_rate; + std::optional min_data_rate; + std::optional max_data_rate; // The initial bandwidth estimate to base target rate on. This should be used // as the basis for initial OnTargetTransferRate and OnPacerConfig callbacks. - absl::optional starting_rate; + std::optional starting_rate; }; // Send side information -struct NetworkAvailability { +struct RTC_EXPORT NetworkAvailability { Timestamp at_time = Timestamp::PlusInfinity(); bool network_available = false; }; -struct NetworkRouteChange { +struct RTC_EXPORT NetworkRouteChange { NetworkRouteChange(); NetworkRouteChange(const NetworkRouteChange&); ~NetworkRouteChange(); @@ -83,7 +90,7 @@ struct NetworkRouteChange { TargetRateConstraints constraints; }; -struct PacedPacketInfo { +struct RTC_EXPORT PacedPacketInfo { PacedPacketInfo(); PacedPacketInfo(int probe_cluster_id, int probe_cluster_min_probes, @@ -93,14 +100,14 @@ struct PacedPacketInfo { // TODO(srte): Move probing info to a separate, optional struct. static constexpr int kNotAProbe = -1; - int send_bitrate_bps = -1; + DataRate send_bitrate = DataRate::BitsPerSec(0); int probe_cluster_id = kNotAProbe; int probe_cluster_min_probes = -1; int probe_cluster_min_bytes = -1; int probe_cluster_bytes_sent = 0; }; -struct SentPacket { +struct RTC_EXPORT SentPacket { Timestamp send_time = Timestamp::PlusInfinity(); // Size of packet with overhead up to IP layer. DataSize size = DataSize::Zero(); @@ -119,7 +126,7 @@ struct SentPacket { DataSize data_in_flight = DataSize::Zero(); }; -struct ReceivedPacket { +struct RTC_EXPORT ReceivedPacket { Timestamp send_time = Timestamp::MinusInfinity(); Timestamp receive_time = Timestamp::PlusInfinity(); DataSize size = DataSize::Zero(); @@ -127,18 +134,18 @@ struct ReceivedPacket { // Transport level feedback -struct RemoteBitrateReport { +struct RTC_EXPORT RemoteBitrateReport { Timestamp receive_time = Timestamp::PlusInfinity(); DataRate bandwidth = DataRate::Infinity(); }; -struct RoundTripTimeUpdate { +struct RTC_EXPORT RoundTripTimeUpdate { Timestamp receive_time = Timestamp::PlusInfinity(); TimeDelta round_trip_time = TimeDelta::PlusInfinity(); bool smoothed = false; }; -struct TransportLossReport { +struct RTC_EXPORT TransportLossReport { Timestamp receive_time = Timestamp::PlusInfinity(); Timestamp start_time = Timestamp::PlusInfinity(); Timestamp end_time = Timestamp::PlusInfinity(); @@ -148,7 +155,7 @@ struct TransportLossReport { // Packet level feedback -struct PacketResult { +struct RTC_EXPORT PacketResult { class ReceiveTimeOrder { public: bool operator()(const PacketResult& lhs, const PacketResult& rhs); @@ -162,17 +169,17 @@ struct PacketResult { SentPacket sent_packet; Timestamp receive_time = Timestamp::PlusInfinity(); + EcnMarking ecn = EcnMarking::kNotEct; }; -struct TransportPacketsFeedback { +struct RTC_EXPORT TransportPacketsFeedback { TransportPacketsFeedback(); TransportPacketsFeedback(const TransportPacketsFeedback& other); ~TransportPacketsFeedback(); Timestamp feedback_time = Timestamp::PlusInfinity(); - Timestamp first_unacked_send_time = Timestamp::PlusInfinity(); DataSize data_in_flight = DataSize::Zero(); - DataSize prior_in_flight = DataSize::Zero(); + bool transport_supports_ecn = false; std::vector packet_feedbacks; // Arrival times for messages without send time information. @@ -186,7 +193,7 @@ struct TransportPacketsFeedback { // Network estimation -struct NetworkEstimate { +struct RTC_EXPORT NetworkEstimate { Timestamp at_time = Timestamp::PlusInfinity(); // Deprecated, use TargetTransferRate::target_rate instead. DataRate bandwidth = DataRate::Infinity(); @@ -198,7 +205,7 @@ struct NetworkEstimate { // Network control -struct PacerConfig { +struct RTC_EXPORT PacerConfig { Timestamp at_time = Timestamp::PlusInfinity(); // Pacer should send at most data_window data over time_window duration. DataSize data_window = DataSize::Infinity(); @@ -209,15 +216,18 @@ struct PacerConfig { DataRate pad_rate() const { return pad_window / time_window; } }; -struct ProbeClusterConfig { +struct RTC_EXPORT ProbeClusterConfig { Timestamp at_time = Timestamp::PlusInfinity(); DataRate target_data_rate = DataRate::Zero(); + // Duration of a probe. TimeDelta target_duration = TimeDelta::Zero(); + // Delta time between sent bursts of packets during probe. + TimeDelta min_probe_delta = TimeDelta::Millis(2); int32_t target_probe_count = 0; int32_t id = 0; }; -struct TargetTransferRate { +struct RTC_EXPORT TargetTransferRate { Timestamp at_time = Timestamp::PlusInfinity(); // The estimate on which the target rate is based on. NetworkEstimate network_estimate; @@ -229,7 +239,7 @@ struct TargetTransferRate { // Contains updates of network controller comand state. Using optionals to // indicate whether a member has been updated. The array of probe clusters // should be used to send out probes if not empty. -struct NetworkControlUpdate { +struct RTC_EXPORT NetworkControlUpdate { NetworkControlUpdate(); NetworkControlUpdate(const NetworkControlUpdate&); ~NetworkControlUpdate(); @@ -239,20 +249,20 @@ struct NetworkControlUpdate { !probe_cluster_configs.empty() || target_rate.has_value(); } - absl::optional congestion_window; - absl::optional pacer_config; + std::optional congestion_window; + std::optional pacer_config; std::vector probe_cluster_configs; - absl::optional target_rate; + std::optional target_rate; }; // Process control -struct ProcessInterval { +struct RTC_EXPORT ProcessInterval { Timestamp at_time = Timestamp::PlusInfinity(); - absl::optional pacer_queue; + std::optional pacer_queue; }; // Under development, subject to change without notice. -struct NetworkStateEstimate { +struct RTC_EXPORT NetworkStateEstimate { double confidence = NAN; // The time the estimate was received/calculated. Timestamp update_time = Timestamp::MinusInfinity(); diff --git a/api/transport/rtp/BUILD.gn b/api/transport/rtp/BUILD.gn index 6f2a15e0e9..d4d5e4460c 100644 --- a/api/transport/rtp/BUILD.gn +++ b/api/transport/rtp/BUILD.gn @@ -17,7 +17,6 @@ rtc_source_set("rtp_source") { "../../../api/units:timestamp", "../../../rtc_base:checks", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("dependency_descriptor") { @@ -29,10 +28,27 @@ rtc_source_set("dependency_descriptor") { deps = [ "../../../rtc_base:checks", "../../video:render_resolution", + "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ +} + +rtc_source_set("corruption_detection_message") { + visibility = [ "*" ] + sources = [ "corruption_detection_message.h" ] + deps = [ + "../../../api:array_view", "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } + +if (rtc_include_tests && !build_with_chromium) { + rtc_library("corruption_detection_message_unittest") { + testonly = true + sources = [ "corruption_detection_message_unittest.cc" ] + deps = [ + ":corruption_detection_message", + "../../../test:test_support", + ] + } +} diff --git a/api/transport/rtp/corruption_detection_message.h b/api/transport/rtp/corruption_detection_message.h new file mode 100644 index 0000000000..4364c4bf29 --- /dev/null +++ b/api/transport/rtp/corruption_detection_message.h @@ -0,0 +1,153 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_RTP_CORRUPTION_DETECTION_MESSAGE_H_ +#define API_TRANSPORT_RTP_CORRUPTION_DETECTION_MESSAGE_H_ + +#include +#include + +#include "absl/container/inlined_vector.h" +#include "api/array_view.h" + +namespace webrtc { + +class CorruptionDetectionMessage { + public: + class Builder; + + CorruptionDetectionMessage() = default; + + CorruptionDetectionMessage(const CorruptionDetectionMessage&) = default; + CorruptionDetectionMessage& operator=(const CorruptionDetectionMessage&) = + default; + + ~CorruptionDetectionMessage() = default; + + int sequence_index() const { return sequence_index_; } + bool interpret_sequence_index_as_most_significant_bits() const { + return interpret_sequence_index_as_most_significant_bits_; + } + double std_dev() const { return std_dev_; } + int luma_error_threshold() const { return luma_error_threshold_; } + int chroma_error_threshold() const { return chroma_error_threshold_; } + ArrayView sample_values() const { + return MakeArrayView(sample_values_.data(), sample_values_.size()); + } + + private: + friend class CorruptionDetectionExtension; + + static const size_t kMaxSampleSize = 13; + + // Sequence index in the Halton sequence. + // Valid values: [0, 2^7-1] + int sequence_index_ = 0; + + // Whether to interpret the `sequence_index_` as the most significant bits of + // the true sequence index. + bool interpret_sequence_index_as_most_significant_bits_ = false; + + // Standard deviation of the Gaussian filter kernel. + // Valid values: [0, 40.0] + double std_dev_ = 0.0; + + // Corruption threshold for the luma layer. + // Valid values: [0, 2^4 - 1] + int luma_error_threshold_ = 0; + + // Corruption threshold for the chroma layer. + // Valid values: [0, 2^4 - 1] + int chroma_error_threshold_ = 0; + + // An ordered list of samples that are the result of applying the Gaussian + // filter on the image. The coordinates of the samples and their layer are + // determined by the Halton sequence. + // An empty list should be interpreted as a way to keep the `sequence_index` + // in sync. + absl::InlinedVector sample_values_; +}; + +class CorruptionDetectionMessage::Builder { + public: + Builder() = default; + + Builder(const Builder&) = default; + Builder& operator=(const Builder&) = default; + + ~Builder() = default; + + std::optional Build() { + if (message_.sequence_index_ < 0 || + message_.sequence_index_ > 0b0111'1111) { + return std::nullopt; + } + if (message_.std_dev_ < 0.0 || message_.std_dev_ > 40.0) { + return std::nullopt; + } + if (message_.luma_error_threshold_ < 0 || + message_.luma_error_threshold_ > 15) { + return std::nullopt; + } + if (message_.chroma_error_threshold_ < 0 || + message_.chroma_error_threshold_ > 15) { + return std::nullopt; + } + if (message_.sample_values_.size() > kMaxSampleSize) { + return std::nullopt; + } + for (double sample_value : message_.sample_values_) { + if (sample_value < 0.0 || sample_value > 255.0) { + return std::nullopt; + } + } + return message_; + } + + Builder& WithSequenceIndex(int sequence_index) { + message_.sequence_index_ = sequence_index; + return *this; + } + + Builder& WithInterpretSequenceIndexAsMostSignificantBits( + bool interpret_sequence_index_as_most_significant_bits) { + message_.interpret_sequence_index_as_most_significant_bits_ = + interpret_sequence_index_as_most_significant_bits; + return *this; + } + + Builder& WithStdDev(double std_dev) { + message_.std_dev_ = std_dev; + return *this; + } + + Builder& WithLumaErrorThreshold(int luma_error_threshold) { + message_.luma_error_threshold_ = luma_error_threshold; + return *this; + } + + Builder& WithChromaErrorThreshold(int chroma_error_threshold) { + message_.chroma_error_threshold_ = chroma_error_threshold; + return *this; + } + + Builder& WithSampleValues(const ArrayView& sample_values) { + message_.sample_values_.assign(sample_values.cbegin(), + sample_values.cend()); + return *this; + } + + private: + CorruptionDetectionMessage message_; +}; + +} // namespace webrtc + +#endif // API_TRANSPORT_RTP_CORRUPTION_DETECTION_MESSAGE_H_ diff --git a/api/transport/rtp/corruption_detection_message_unittest.cc b/api/transport/rtp/corruption_detection_message_unittest.cc new file mode 100644 index 0000000000..1da692534a --- /dev/null +++ b/api/transport/rtp/corruption_detection_message_unittest.cc @@ -0,0 +1,124 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/transport/rtp/corruption_detection_message.h" + +#include +#include + +#include "test/gtest.h" + +namespace webrtc { +namespace { + +TEST(CorruptionDetectionMessageTest, FailsToCreateWhenSequenceIndexIsTooLarge) { + EXPECT_EQ(CorruptionDetectionMessage::Builder() + .WithSequenceIndex(0b1000'0000) + .Build(), + std::nullopt); +} + +TEST(CorruptionDetectionMessageTest, FailsToCreateWhenSequenceIndexIsTooSmall) { + EXPECT_EQ(CorruptionDetectionMessage::Builder().WithSequenceIndex(-1).Build(), + std::nullopt); +} + +TEST(CorruptionDetectionMessageTest, FailsToCreateWhenStddevIsTooLarge) { + EXPECT_EQ(CorruptionDetectionMessage::Builder().WithStdDev(45.0).Build(), + std::nullopt); +} + +TEST(CorruptionDetectionMessageTest, FailsToCreateWhenStddevIsTooSmall) { + EXPECT_EQ(CorruptionDetectionMessage::Builder().WithStdDev(-1.0).Build(), + std::nullopt); +} + +TEST(CorruptionDetectionMessageTest, + FailsToCreateWhenLumaErrorThresholdIsTooLarge) { + EXPECT_EQ( + CorruptionDetectionMessage::Builder().WithLumaErrorThreshold(16).Build(), + std::nullopt); +} + +TEST(CorruptionDetectionMessageTest, + FailsToCreateWhenLumaErrorThresholdIsTooSmall) { + EXPECT_EQ( + CorruptionDetectionMessage::Builder().WithLumaErrorThreshold(-1).Build(), + std::nullopt); +} + +TEST(CorruptionDetectionMessageTest, + FailsToCreateWhenChromaErrorThresholdIsTooLarge) { + EXPECT_EQ(CorruptionDetectionMessage::Builder() + .WithChromaErrorThreshold(16) + .Build(), + std::nullopt); +} + +TEST(CorruptionDetectionMessageTest, + FailsToCreateWhenChromaErrorThresholdIsTooSmall) { + EXPECT_EQ(CorruptionDetectionMessage::Builder() + .WithChromaErrorThreshold(-1) + .Build(), + std::nullopt); +} + +TEST(CorruptionDetectionMessageTest, + FailsToCreateWhenTooManySamplesAreSpecified) { + const std::vector kSampleValues = {1.0, 2.0, 3.0, 4.0, 5.0, + 6.0, 7.0, 8.0, 9.0, 10.0, + 11.0, 12.0, 13.0, 14.0}; + + EXPECT_EQ(CorruptionDetectionMessage::Builder() + .WithSampleValues(kSampleValues) + .Build(), + std::nullopt); +} + +TEST(CorruptionDetectionMessageTest, FailsToCreateWhenSampleValueIsTooLarge) { + const std::vector kSampleValues = {255.1}; + + EXPECT_EQ(CorruptionDetectionMessage::Builder() + .WithSampleValues(kSampleValues) + .Build(), + std::nullopt); +} + +TEST(CorruptionDetectionMessageTest, FailsToCreateWhenSampleValueIsTooSmall) { + const std::vector kSampleValues = {-0.1}; + + EXPECT_EQ(CorruptionDetectionMessage::Builder() + .WithSampleValues(kSampleValues) + .Build(), + std::nullopt); +} + +TEST(CorruptionDetectionMessageTest, + CreatesDefaultWhenNoParametersAreSpecified) { + EXPECT_NE(CorruptionDetectionMessage::Builder().Build(), std::nullopt); +} + +TEST(CorruptionDetectionMessageTest, CreatesWhenValidParametersAreSpecified) { + const std::vector kSampleValues = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, + 7.0, 8.0, 9.0, 10.0, 11.0, 12.0}; + + EXPECT_NE(CorruptionDetectionMessage::Builder() + .WithSequenceIndex(0b0111'1111) + .WithInterpretSequenceIndexAsMostSignificantBits(true) + .WithStdDev(40.0) + .WithLumaErrorThreshold(15) + .WithChromaErrorThreshold(15) + .WithSampleValues(kSampleValues) + .Build(), + std::nullopt); +} + +} // namespace +} // namespace webrtc diff --git a/api/transport/rtp/dependency_descriptor.h b/api/transport/rtp/dependency_descriptor.h index 0db600918e..2ff5ed89e9 100644 --- a/api/transport/rtp/dependency_descriptor.h +++ b/api/transport/rtp/dependency_descriptor.h @@ -15,11 +15,11 @@ #include #include +#include #include #include "absl/container/inlined_vector.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/video/render_resolution.h" namespace webrtc { @@ -78,6 +78,27 @@ struct FrameDependencyStructure { std::vector templates; }; +class DependencyDescriptorMandatory { + public: + void set_frame_number(int frame_number) { frame_number_ = frame_number; } + int frame_number() const { return frame_number_; } + + void set_template_id(int template_id) { template_id_ = template_id; } + int template_id() const { return template_id_; } + + void set_first_packet_in_frame(bool first) { first_packet_in_frame_ = first; } + bool first_packet_in_frame() const { return first_packet_in_frame_; } + + void set_last_packet_in_frame(bool last) { last_packet_in_frame_ = last; } + bool last_packet_in_frame() const { return last_packet_in_frame_; } + + private: + int frame_number_; + int template_id_; + bool first_packet_in_frame_; + bool last_packet_in_frame_; +}; + struct DependencyDescriptor { static constexpr int kMaxSpatialIds = 4; static constexpr int kMaxTemporalIds = 8; @@ -88,8 +109,8 @@ struct DependencyDescriptor { bool last_packet_in_frame = true; int frame_number = 0; FrameDependencyTemplate frame_dependencies; - absl::optional resolution; - absl::optional active_decode_targets_bitmask; + std::optional resolution; + std::optional active_decode_targets_bitmask; std::unique_ptr attached_structure; }; diff --git a/api/transport/rtp/rtp_source.h b/api/transport/rtp/rtp_source.h index 41a0552db0..4732044be2 100644 --- a/api/transport/rtp/rtp_source.h +++ b/api/transport/rtp/rtp_source.h @@ -13,11 +13,11 @@ #include -#include "absl/types/optional.h" +#include + #include "api/rtp_headers.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "rtc_base/checks.h" namespace webrtc { @@ -29,18 +29,18 @@ enum class RtpSourceType { class RtpSource { public: struct Extensions { - absl::optional audio_level; + std::optional audio_level; // Fields from the Absolute Capture Time header extension: // http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time - absl::optional absolute_capture_time; + std::optional absolute_capture_time; // Clock offset between the local clock and the capturer's clock. // Do not confuse with `AbsoluteCaptureTime::estimated_capture_clock_offset` // which instead represents the clock offset between a remote sender and the // capturer. The following holds: // Capture's NTP Clock = Local NTP Clock + Local-Capture Clock Offset - absl::optional local_capture_clock_offset; + std::optional local_capture_clock_offset; }; RtpSource() = delete; @@ -56,54 +56,31 @@ class RtpSource { extensions_(extensions), rtp_timestamp_(rtp_timestamp) {} - // TODO(bugs.webrtc.org/13757): deprecate when chromium stop using this - // and remove after 2023-09-18 - RtpSource(int64_t timestamp_ms, - uint32_t source_id, - RtpSourceType source_type, - uint32_t rtp_timestamp, - const RtpSource::Extensions& extensions) - : timestamp_(Timestamp::Millis(timestamp_ms)), - source_id_(source_id), - source_type_(source_type), - extensions_(extensions), - rtp_timestamp_(rtp_timestamp) {} - RtpSource(const RtpSource&) = default; RtpSource& operator=(const RtpSource&) = default; ~RtpSource() = default; Timestamp timestamp() const { return timestamp_; } - // TODO(bugs.webrtc.org/13757): deprecate when chromium stop using this - // and remove after 2023-09-18 - int64_t timestamp_ms() const { return timestamp_.ms(); } - [[deprecated]] void update_timestamp_ms(int64_t timestamp_ms) { - RTC_DCHECK_LE(timestamp_.ms(), timestamp_ms); - timestamp_ = Timestamp::Millis(timestamp_ms); - } - // The identifier of the source can be the CSRC or the SSRC. uint32_t source_id() const { return source_id_; } // The source can be either a contributing source or a synchronization source. RtpSourceType source_type() const { return source_type_; } - absl::optional audio_level() const { - return extensions_.audio_level; - } + std::optional audio_level() const { return extensions_.audio_level; } - void set_audio_level(const absl::optional& level) { + void set_audio_level(const std::optional& level) { extensions_.audio_level = level; } uint32_t rtp_timestamp() const { return rtp_timestamp_; } - absl::optional absolute_capture_time() const { + std::optional absolute_capture_time() const { return extensions_.absolute_capture_time; } - absl::optional local_capture_clock_offset() const { + std::optional local_capture_clock_offset() const { return extensions_.local_capture_clock_offset; } diff --git a/api/transport/sctp_transport_factory_interface.h b/api/transport/sctp_transport_factory_interface.h index 4fc8af5bad..d624994184 100644 --- a/api/transport/sctp_transport_factory_interface.h +++ b/api/transport/sctp_transport_factory_interface.h @@ -13,28 +13,27 @@ #include -// These classes are not part of the API, and are treated as opaque pointers. -namespace cricket { -class SctpTransportInternal; -} // namespace cricket +#include "api/environment/environment.h" -namespace rtc { -class PacketTransportInternal; -} // namespace rtc +// These classes are not part of the API, and are treated as opaque pointers. namespace webrtc { +class DtlsTransportInternal; +class SctpTransportInternal; + // Factory class which can be used to allow fake SctpTransports to be injected // for testing. An application is not intended to implement this interface nor -// 'cricket::SctpTransportInternal' because SctpTransportInternal is not +// 'webrtc::SctpTransportInternal' because SctpTransportInternal is not // guaranteed to remain stable in future WebRTC versions. class SctpTransportFactoryInterface { public: virtual ~SctpTransportFactoryInterface() = default; // Create an SCTP transport using `channel` for the underlying transport. - virtual std::unique_ptr CreateSctpTransport( - rtc::PacketTransportInternal* channel) = 0; + virtual std::unique_ptr CreateSctpTransport( + const Environment& env, + DtlsTransportInternal* channel) = 0; }; } // namespace webrtc diff --git a/api/transport/stun.cc b/api/transport/stun.cc index 35a65fd8e8..dd7c142829 100644 --- a/api/transport/stun.cc +++ b/api/transport/stun.cc @@ -12,24 +12,34 @@ #include -#include +#include // IWYU pragma: keep #include +#include +#include #include #include +#include #include +#include +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "rtc_base/byte_buffer.h" #include "rtc_base/byte_order.h" #include "rtc_base/checks.h" #include "rtc_base/crc32.h" -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/message_digest.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/socket_address.h" #include "system_wrappers/include/metrics.h" -using rtc::ByteBufferReader; -using rtc::ByteBufferWriter; +using ::webrtc::ByteBufferReader; +using ::webrtc::ByteBufferWriter; -namespace cricket { +namespace webrtc { namespace { @@ -38,10 +48,12 @@ const int kMessageIntegrityAttributeLength = 20; const int kTheoreticalMaximumAttributeLength = 65535; uint32_t ReduceTransactionId(absl::string_view transaction_id) { - RTC_DCHECK(transaction_id.length() == cricket::kStunTransactionIdLength || - transaction_id.length() == cricket::kStunLegacyTransactionIdLength) + RTC_DCHECK(transaction_id.length() == kStunTransactionIdLength || + transaction_id.length() == kStunLegacyTransactionIdLength) << transaction_id.length(); - ByteBufferReader reader(transaction_id.data(), transaction_id.size()); + ByteBufferReader reader( + MakeArrayView(reinterpret_cast(transaction_id.data()), + transaction_id.size())); uint32_t result = 0; uint32_t next; while (reader.ReadUInt32(&next)) { @@ -97,7 +109,6 @@ const char STUN_ERROR_REASON_UNSUPPORTED_PROTOCOL[] = "Unsupported Protocol"; const char STUN_ERROR_REASON_ROLE_CONFLICT[] = "Role Conflict"; const char STUN_ERROR_REASON_SERVER_ERROR[] = "Server Error"; -const char TURN_MAGIC_COOKIE_VALUE[] = {'\x72', '\xC6', '\x4B', '\xC6'}; const char EMPTY_TRANSACTION_ID[] = "0000000000000000"; const uint32_t STUN_FINGERPRINT_XOR_VALUE = 0x5354554E; const int SERVER_NOT_REACHABLE_ERROR = 701; @@ -355,24 +366,6 @@ bool StunMessage::ValidateMessageIntegrity32ForTesting( password); } -// Deprecated -bool StunMessage::ValidateMessageIntegrity(const char* data, - size_t size, - const std::string& password) { - return ValidateMessageIntegrityOfType(STUN_ATTR_MESSAGE_INTEGRITY, - kStunMessageIntegritySize, data, size, - password); -} - -// Deprecated -bool StunMessage::ValidateMessageIntegrity32(const char* data, - size_t size, - const std::string& password) { - return ValidateMessageIntegrityOfType(STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32, - kStunMessageIntegrity32Size, data, size, - password); -} - // Verifies a STUN message has a valid MESSAGE-INTEGRITY attribute, using the // procedure outlined in RFC 5389, section 15.4. bool StunMessage::ValidateMessageIntegrityOfType(int mi_attr_type, @@ -388,7 +381,7 @@ bool StunMessage::ValidateMessageIntegrityOfType(int mi_attr_type, } // Getting the message length from the STUN header. - uint16_t msg_length = rtc::GetBE16(&data[2]); + uint16_t msg_length = webrtc::GetBE16(&data[2]); if (size != (msg_length + kStunHeaderSize)) { return false; } @@ -399,8 +392,8 @@ bool StunMessage::ValidateMessageIntegrityOfType(int mi_attr_type, while (current_pos + 4 <= size) { uint16_t attr_type, attr_length; // Getting attribute type and length. - attr_type = rtc::GetBE16(&data[current_pos]); - attr_length = rtc::GetBE16(&data[current_pos + sizeof(attr_type)]); + attr_type = webrtc::GetBE16(&data[current_pos]); + attr_length = webrtc::GetBE16(&data[current_pos + sizeof(attr_type)]); // If M-I, sanity check it, and break out. if (attr_type == mi_attr_type) { @@ -441,13 +434,14 @@ bool StunMessage::ValidateMessageIntegrityOfType(int mi_attr_type, // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // |0 0| STUN Message Type | Message Length | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ - rtc::SetBE16(temp_data.get() + 2, static_cast(new_adjusted_len)); + webrtc::SetBE16(temp_data.get() + 2, + static_cast(new_adjusted_len)); } char hmac[kStunMessageIntegritySize]; - size_t ret = - rtc::ComputeHmac(rtc::DIGEST_SHA_1, password.c_str(), password.size(), - temp_data.get(), mi_pos, hmac, sizeof(hmac)); + size_t ret = webrtc::ComputeHmac(webrtc::DIGEST_SHA_1, password.c_str(), + password.size(), temp_data.get(), mi_pos, + hmac, sizeof(hmac)); RTC_DCHECK(ret == sizeof(hmac)); if (ret != sizeof(hmac)) { return false; @@ -488,8 +482,8 @@ bool StunMessage::AddMessageIntegrityOfType(int attr_type, buf.Length() - kStunAttributeHeaderSize - msg_integrity_attr->length()); char hmac[kStunMessageIntegritySize]; size_t ret = - rtc::ComputeHmac(rtc::DIGEST_SHA_1, key.data(), key.size(), buf.Data(), - msg_len_for_hmac, hmac, sizeof(hmac)); + webrtc::ComputeHmac(webrtc::DIGEST_SHA_1, key.data(), key.size(), + buf.Data(), msg_len_for_hmac, hmac, sizeof(hmac)); RTC_DCHECK(ret == sizeof(hmac)); if (ret != sizeof(hmac)) { RTC_LOG(LS_ERROR) << "HMAC computation failed. Message-Integrity " @@ -517,29 +511,29 @@ bool StunMessage::ValidateFingerprint(const char* data, size_t size) { // Skip the rest if the magic cookie isn't present. const char* magic_cookie = data + kStunTransactionIdOffset - kStunMagicCookieLength; - if (rtc::GetBE32(magic_cookie) != kStunMagicCookie) + if (webrtc::GetBE32(magic_cookie) != kStunMagicCookie) return false; // Check the fingerprint type and length. const char* fingerprint_attr_data = data + size - fingerprint_attr_size; - if (rtc::GetBE16(fingerprint_attr_data) != STUN_ATTR_FINGERPRINT || - rtc::GetBE16(fingerprint_attr_data + sizeof(uint16_t)) != + if (webrtc::GetBE16(fingerprint_attr_data) != STUN_ATTR_FINGERPRINT || + webrtc::GetBE16(fingerprint_attr_data + sizeof(uint16_t)) != StunUInt32Attribute::SIZE) return false; // Check the fingerprint value. uint32_t fingerprint = - rtc::GetBE32(fingerprint_attr_data + kStunAttributeHeaderSize); + webrtc::GetBE32(fingerprint_attr_data + kStunAttributeHeaderSize); return ((fingerprint ^ STUN_FINGERPRINT_XOR_VALUE) == - rtc::ComputeCrc32(data, size - fingerprint_attr_size)); + webrtc::ComputeCrc32(data, size - fingerprint_attr_size)); } // static std::string StunMessage::GenerateTransactionId() { - return rtc::CreateRandomString(kStunTransactionIdLength); + return webrtc::CreateRandomString(kStunTransactionIdLength); } -bool StunMessage::IsStunMethod(rtc::ArrayView methods, +bool StunMessage::IsStunMethod(ArrayView methods, const char* data, size_t size) { // Check the message length. @@ -549,10 +543,10 @@ bool StunMessage::IsStunMethod(rtc::ArrayView methods, // Skip the rest if the magic cookie isn't present. const char* magic_cookie = data + kStunTransactionIdOffset - kStunMagicCookieLength; - if (rtc::GetBE32(magic_cookie) != kStunMagicCookie) + if (webrtc::GetBE32(magic_cookie) != kStunMagicCookie) return false; - int method = rtc::GetBE16(data); + int method = webrtc::GetBE16(data); for (int m : methods) { if (m == method) { return true; @@ -576,7 +570,7 @@ bool StunMessage::AddFingerprint() { int msg_len_for_crc32 = static_cast( buf.Length() - kStunAttributeHeaderSize - fingerprint_attr->length()); - uint32_t c = rtc::ComputeCrc32(buf.Data(), msg_len_for_crc32); + uint32_t c = webrtc::ComputeCrc32(buf.Data(), msg_len_for_crc32); // Insert the correct CRC-32, XORed with a constant, into the attribute. fingerprint_attr->SetValue(c ^ STUN_FINGERPRINT_XOR_VALUE); @@ -585,7 +579,7 @@ bool StunMessage::AddFingerprint() { bool StunMessage::Read(ByteBufferReader* buf) { // Keep a copy of the buffer data around for later verification. - buffer_.assign(buf->Data(), buf->Length()); + buffer_.assign(reinterpret_cast(buf->Data()), buf->Length()); if (!buf->ReadUInt16(&type_)) { return false; @@ -601,8 +595,8 @@ bool StunMessage::Read(ByteBufferReader* buf) { return false; } - std::string magic_cookie; - if (!buf->ReadString(&magic_cookie, kStunMagicCookieLength)) { + absl::string_view magic_cookie; + if (!buf->ReadStringView(&magic_cookie, kStunMagicCookieLength)) { return false; } @@ -615,7 +609,7 @@ bool StunMessage::Read(ByteBufferReader* buf) { static_assert(sizeof(magic_cookie_int) == kStunMagicCookieLength, "Integer size mismatch: magic_cookie_int and kStunMagicCookie"); std::memcpy(&magic_cookie_int, magic_cookie.data(), sizeof(magic_cookie_int)); - if (rtc::NetworkToHost32(magic_cookie_int) != kStunMagicCookie) { + if (webrtc::NetworkToHost32(magic_cookie_int) != kStunMagicCookie) { // If magic cookie is invalid it means that the peer implements // RFC3489 instead of RFC5389. transaction_id.insert(0, magic_cookie); @@ -763,8 +757,8 @@ bool StunMessage::EqualAttributes( const StunMessage* other, std::function attribute_type_mask) const { RTC_DCHECK(other != nullptr); - rtc::ByteBufferWriter tmp_buffer_ptr1; - rtc::ByteBufferWriter tmp_buffer_ptr2; + ByteBufferWriter tmp_buffer_ptr1; + ByteBufferWriter tmp_buffer_ptr2; for (const auto& attr : attrs_) { if (attribute_type_mask(attr->type())) { const StunAttribute* other_attr = other->GetAttribute(attr->type()); @@ -812,8 +806,8 @@ void StunAttribute::ConsumePadding(ByteBufferReader* buf) const { void StunAttribute::WritePadding(ByteBufferWriter* buf) const { int remainder = length_ % 4; if (remainder > 0) { - char zeroes[4] = {0}; - buf->WriteBytes(zeroes, 4 - remainder); + uint8_t zeroes[4] = {0}; + buf->Write(ArrayView(zeroes, 4 - remainder)); } } @@ -883,7 +877,7 @@ StunAttribute::CreateUnknownAttributes() { } StunAddressAttribute::StunAddressAttribute(uint16_t type, - const rtc::SocketAddress& addr) + const SocketAddress& addr) : StunAttribute(type, 0) { SetAddress(addr); } @@ -912,21 +906,23 @@ bool StunAddressAttribute::Read(ByteBufferReader* buf) { if (length() != SIZE_IP4) { return false; } - if (!buf->ReadBytes(reinterpret_cast(&v4addr), sizeof(v4addr))) { + if (!buf->ReadBytes(MakeArrayView(reinterpret_cast(&v4addr), + sizeof(v4addr)))) { return false; } - rtc::IPAddress ipaddr(v4addr); - SetAddress(rtc::SocketAddress(ipaddr, port)); + IPAddress ipaddr(v4addr); + SetAddress(SocketAddress(ipaddr, port)); } else if (stun_family == STUN_ADDRESS_IPV6) { in6_addr v6addr; if (length() != SIZE_IP6) { return false; } - if (!buf->ReadBytes(reinterpret_cast(&v6addr), sizeof(v6addr))) { + if (!buf->ReadBytes(MakeArrayView(reinterpret_cast(&v6addr), + sizeof(v6addr)))) { return false; } - rtc::IPAddress ipaddr(v6addr); - SetAddress(rtc::SocketAddress(ipaddr, port)); + IPAddress ipaddr(v6addr); + SetAddress(SocketAddress(ipaddr, port)); } else { return false; } @@ -945,12 +941,14 @@ bool StunAddressAttribute::Write(ByteBufferWriter* buf) const { switch (address_.family()) { case AF_INET: { in_addr v4addr = address_.ipaddr().ipv4_address(); - buf->WriteBytes(reinterpret_cast(&v4addr), sizeof(v4addr)); + buf->Write(ArrayView(reinterpret_cast(&v4addr), + sizeof(v4addr))); break; } case AF_INET6: { in6_addr v6addr = address_.ipaddr().ipv6_address(); - buf->WriteBytes(reinterpret_cast(&v6addr), sizeof(v6addr)); + buf->Write(ArrayView(reinterpret_cast(&v6addr), + sizeof(v6addr))); break; } } @@ -958,7 +956,7 @@ bool StunAddressAttribute::Write(ByteBufferWriter* buf) const { } StunXorAddressAttribute::StunXorAddressAttribute(uint16_t type, - const rtc::SocketAddress& addr) + const SocketAddress& addr) : StunAddressAttribute(type, addr), owner_(NULL) {} StunXorAddressAttribute::StunXorAddressAttribute(uint16_t type, @@ -974,15 +972,15 @@ void StunXorAddressAttribute::SetOwner(StunMessage* owner) { owner_ = owner; } -rtc::IPAddress StunXorAddressAttribute::GetXoredIP() const { +IPAddress StunXorAddressAttribute::GetXoredIP() const { if (owner_) { - rtc::IPAddress ip = ipaddr(); + IPAddress ip = ipaddr(); switch (ip.family()) { case AF_INET: { in_addr v4addr = ip.ipv4_address(); v4addr.s_addr = - (v4addr.s_addr ^ rtc::HostToNetwork32(kStunMagicCookie)); - return rtc::IPAddress(v4addr); + (v4addr.s_addr ^ webrtc::HostToNetwork32(kStunMagicCookie)); + return IPAddress(v4addr); } case AF_INET6: { in6_addr v6addr = ip.ipv6_address(); @@ -995,11 +993,11 @@ rtc::IPAddress StunXorAddressAttribute::GetXoredIP() const { // Transaction ID is in network byte order, but magic cookie // is stored in host byte order. ip_as_ints[0] = - (ip_as_ints[0] ^ rtc::HostToNetwork32(kStunMagicCookie)); + (ip_as_ints[0] ^ webrtc::HostToNetwork32(kStunMagicCookie)); ip_as_ints[1] = (ip_as_ints[1] ^ transactionid_as_ints[0]); ip_as_ints[2] = (ip_as_ints[2] ^ transactionid_as_ints[1]); ip_as_ints[3] = (ip_as_ints[3] ^ transactionid_as_ints[2]); - return rtc::IPAddress(v6addr); + return IPAddress(v6addr); } break; } @@ -1007,15 +1005,15 @@ rtc::IPAddress StunXorAddressAttribute::GetXoredIP() const { } // Invalid ip family or transaction ID, or missing owner. // Return an AF_UNSPEC address. - return rtc::IPAddress(); + return IPAddress(); } bool StunXorAddressAttribute::Read(ByteBufferReader* buf) { if (!StunAddressAttribute::Read(buf)) return false; uint16_t xoredport = port() ^ (kStunMagicCookie >> 16); - rtc::IPAddress xored_ip = GetXoredIP(); - SetAddress(rtc::SocketAddress(xored_ip, xoredport)); + IPAddress xored_ip = GetXoredIP(); + SetAddress(SocketAddress(xored_ip, xoredport)); return true; } @@ -1025,7 +1023,7 @@ bool StunXorAddressAttribute::Write(ByteBufferWriter* buf) const { RTC_LOG(LS_ERROR) << "Error writing xor-address attribute: unknown family."; return false; } - rtc::IPAddress xored_ip = GetXoredIP(); + IPAddress xored_ip = GetXoredIP(); if (xored_ip.family() == AF_UNSPEC) { return false; } @@ -1035,12 +1033,14 @@ bool StunXorAddressAttribute::Write(ByteBufferWriter* buf) const { switch (xored_ip.family()) { case AF_INET: { in_addr v4addr = xored_ip.ipv4_address(); - buf->WriteBytes(reinterpret_cast(&v4addr), sizeof(v4addr)); + buf->Write(ArrayView( + reinterpret_cast(&v4addr), sizeof(v4addr))); break; } case AF_INET6: { in6_addr v6addr = xored_ip.ipv6_address(); - buf->WriteBytes(reinterpret_cast(&v6addr), sizeof(v6addr)); + buf->Write(ArrayView( + reinterpret_cast(&v6addr), sizeof(v6addr))); break; } } @@ -1128,13 +1128,13 @@ StunAttributeValueType StunByteStringAttribute::value_type() const { } void StunByteStringAttribute::CopyBytes(absl::string_view bytes) { - char* new_bytes = new char[bytes.size()]; + uint8_t* new_bytes = new uint8_t[bytes.size()]; memcpy(new_bytes, bytes.data(), bytes.size()); SetBytes(new_bytes, bytes.size()); } void StunByteStringAttribute::CopyBytes(const void* bytes, size_t length) { - char* new_bytes = new char[length]; + uint8_t* new_bytes = new uint8_t[length]; memcpy(new_bytes, bytes, length); SetBytes(new_bytes, length); } @@ -1142,7 +1142,7 @@ void StunByteStringAttribute::CopyBytes(const void* bytes, size_t length) { uint8_t StunByteStringAttribute::GetByte(size_t index) const { RTC_DCHECK(bytes_ != NULL); RTC_DCHECK(index < length()); - return static_cast(bytes_[index]); + return bytes_[index]; } void StunByteStringAttribute::SetByte(size_t index, uint8_t value) { @@ -1152,8 +1152,8 @@ void StunByteStringAttribute::SetByte(size_t index, uint8_t value) { } bool StunByteStringAttribute::Read(ByteBufferReader* buf) { - bytes_ = new char[length()]; - if (!buf->ReadBytes(bytes_, length())) { + bytes_ = new uint8_t[length()]; + if (!buf->ReadBytes(ArrayView(bytes_, length()))) { return false; } @@ -1166,12 +1166,12 @@ bool StunByteStringAttribute::Write(ByteBufferWriter* buf) const { if (!LengthValid(type(), length())) { return false; } - buf->WriteBytes(bytes_, length()); + buf->Write(ArrayView(bytes_, length())); WritePadding(buf); return true; } -void StunByteStringAttribute::SetBytes(char* bytes, size_t length) { +void StunByteStringAttribute::SetBytes(uint8_t* bytes, size_t length) { delete[] bytes_; bytes_ = bytes; SetLength(static_cast(length)); @@ -1387,9 +1387,9 @@ bool ComputeStunCredentialHash(const std::string& username, input += ':'; input += password; - char digest[rtc::MessageDigest::kMaxSize]; - size_t size = rtc::ComputeDigest(rtc::DIGEST_MD5, input.c_str(), input.size(), - digest, sizeof(digest)); + char digest[MessageDigest::kMaxSize]; + size_t size = webrtc::ComputeDigest(webrtc::DIGEST_MD5, input.c_str(), + input.size(), digest, sizeof(digest)); if (size == 0) { return false; } @@ -1400,7 +1400,7 @@ bool ComputeStunCredentialHash(const std::string& username, std::unique_ptr CopyStunAttribute( const StunAttribute& attribute, - rtc::ByteBufferWriter* tmp_buffer_ptr) { + ByteBufferWriter* tmp_buffer_ptr) { ByteBufferWriter tmpBuffer; if (tmp_buffer_ptr == nullptr) { tmp_buffer_ptr = &tmpBuffer; @@ -1417,7 +1417,7 @@ std::unique_ptr CopyStunAttribute( if (!attribute.Write(tmp_buffer_ptr)) { return nullptr; } - rtc::ByteBufferReader reader(*tmp_buffer_ptr); + ByteBufferReader reader(*tmp_buffer_ptr); if (!copy->Read(&reader)) { return nullptr; } @@ -1425,36 +1425,11 @@ std::unique_ptr CopyStunAttribute( return copy; } -StunAttributeValueType RelayMessage::GetAttributeValueType(int type) const { - switch (type) { - case STUN_ATTR_LIFETIME: - return STUN_VALUE_UINT32; - case STUN_ATTR_MAGIC_COOKIE: - return STUN_VALUE_BYTE_STRING; - case STUN_ATTR_BANDWIDTH: - return STUN_VALUE_UINT32; - case STUN_ATTR_DESTINATION_ADDRESS: - return STUN_VALUE_ADDRESS; - case STUN_ATTR_SOURCE_ADDRESS2: - return STUN_VALUE_ADDRESS; - case STUN_ATTR_DATA: - return STUN_VALUE_BYTE_STRING; - case STUN_ATTR_OPTIONS: - return STUN_VALUE_UINT32; - default: - return StunMessage::GetAttributeValueType(type); - } -} - -StunMessage* RelayMessage::CreateNew() const { - return new RelayMessage(); -} - StunAttributeValueType TurnMessage::GetAttributeValueType(int type) const { switch (type) { case STUN_ATTR_CHANNEL_NUMBER: return STUN_VALUE_UINT32; - case STUN_ATTR_TURN_LIFETIME: + case STUN_ATTR_LIFETIME: return STUN_VALUE_UINT32; case STUN_ATTR_XOR_PEER_ADDRESS: return STUN_VALUE_XOR_ADDRESS; @@ -1505,15 +1480,15 @@ std::unique_ptr StunMessage::Clone() const { if (!copy) { return nullptr; } - rtc::ByteBufferWriter buf; + ByteBufferWriter buf; if (!Write(&buf)) { return nullptr; } - rtc::ByteBufferReader reader(buf); + ByteBufferReader reader(buf); if (!copy->Read(&reader)) { return nullptr; } return copy; } -} // namespace cricket +} // namespace webrtc diff --git a/api/transport/stun.h b/api/transport/stun.h index 4a04db33cf..80cce7355b 100644 --- a/api/transport/stun.h +++ b/api/transport/stun.h @@ -25,10 +25,12 @@ #include "absl/strings/string_view.h" #include "api/array_view.h" #include "rtc_base/byte_buffer.h" +#include "rtc_base/checks.h" #include "rtc_base/ip_address.h" +#include "rtc_base/net_helpers.h" #include "rtc_base/socket_address.h" -namespace cricket { +namespace webrtc { // These are the types of STUN messages defined in RFC 5389. enum StunMessageType : uint16_t { @@ -91,13 +93,17 @@ enum StunAddressFamily { // These are the types of STUN error codes defined in RFC 5389. enum StunErrorCode { + // Not an actual error from RFC 5389 and not emitted via icecandidateerror. + STUN_ERROR_NOT_AN_ERROR = 0, STUN_ERROR_TRY_ALTERNATE = 300, STUN_ERROR_BAD_REQUEST = 400, STUN_ERROR_UNAUTHORIZED = 401, STUN_ERROR_UNKNOWN_ATTRIBUTE = 420, STUN_ERROR_STALE_NONCE = 438, STUN_ERROR_SERVER_ERROR = 500, - STUN_ERROR_GLOBAL_FAILURE = 600 + STUN_ERROR_GLOBAL_FAILURE = 600, + // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnectioniceerrorevent-errorcode + STUN_ERROR_SERVER_NOT_REACHABLE = 701, }; // Strings for the error codes above. @@ -249,7 +255,7 @@ class StunMessage { // Verify that a buffer has stun magic cookie and one of the specified // methods. Note that it does not check for the existance of FINGERPRINT. - static bool IsStunMethod(rtc::ArrayView methods, + static bool IsStunMethod(ArrayView methods, const char* data, size_t size); @@ -264,11 +270,11 @@ class StunMessage { // Parses the STUN packet in the given buffer and records it here. The // return value indicates whether this was successful. - bool Read(rtc::ByteBufferReader* buf); + bool Read(ByteBufferReader* buf); // Writes this object into a STUN packet. The return value indicates whether // this was successful. - bool Write(rtc::ByteBufferWriter* buf) const; + bool Write(ByteBufferWriter* buf) const; // Creates an empty message. Overridable by derived classes. virtual StunMessage* CreateNew() const; @@ -288,19 +294,6 @@ class StunMessage { bool EqualAttributes(const StunMessage* other, std::function attribute_type_mask) const; - // Validates that a STUN message in byte buffer form - // has a correct MESSAGE-INTEGRITY value. - // These functions are not recommended and will be deprecated; use - // ValidateMessageIntegrity(password) on the parsed form instead. - [[deprecated("Use member function")]] static bool ValidateMessageIntegrity( - const char* data, - size_t size, - const std::string& password); - [[deprecated("Use member function")]] static bool ValidateMessageIntegrity32( - const char* data, - size_t size, - const std::string& password); - // Expose raw-buffer ValidateMessageIntegrity function for testing. static bool ValidateMessageIntegrityForTesting(const char* data, size_t size, @@ -352,15 +345,15 @@ class StunAttribute { virtual StunAttributeValueType value_type() const = 0; // Only XorAddressAttribute needs this so far. - virtual void SetOwner(StunMessage* owner) {} + virtual void SetOwner(StunMessage* /* owner */) {} // Reads the body (not the type or length) for this type of attribute from // the given buffer. Return value is true if successful. - virtual bool Read(rtc::ByteBufferReader* buf) = 0; + virtual bool Read(ByteBufferReader* buf) = 0; // Writes the body (not the type or length) to the given buffer. Return // value is true if successful. - virtual bool Write(rtc::ByteBufferWriter* buf) const = 0; + virtual bool Write(ByteBufferWriter* buf) const = 0; // Creates an attribute object with the given type and smallest length. static StunAttribute* Create(StunAttributeValueType value_type, @@ -384,8 +377,8 @@ class StunAttribute { protected: StunAttribute(uint16_t type, uint16_t length); void SetLength(uint16_t length) { length_ = length; } - void WritePadding(rtc::ByteBufferWriter* buf) const; - void ConsumePadding(rtc::ByteBufferReader* buf) const; + void WritePadding(ByteBufferWriter* buf) const; + void ConsumePadding(ByteBufferReader* buf) const; private: uint16_t type_; @@ -398,7 +391,7 @@ class StunAddressAttribute : public StunAttribute { static const uint16_t SIZE_UNDEF = 0; static const uint16_t SIZE_IP4 = 8; static const uint16_t SIZE_IP6 = 20; - StunAddressAttribute(uint16_t type, const rtc::SocketAddress& addr); + StunAddressAttribute(uint16_t type, const SocketAddress& addr); StunAddressAttribute(uint16_t type, uint16_t length); StunAttributeValueType value_type() const override; @@ -413,22 +406,22 @@ class StunAddressAttribute : public StunAttribute { return STUN_ADDRESS_UNDEF; } - const rtc::SocketAddress& GetAddress() const { return address_; } - const rtc::IPAddress& ipaddr() const { return address_.ipaddr(); } + const SocketAddress& GetAddress() const { return address_; } + const IPAddress& ipaddr() const { return address_.ipaddr(); } uint16_t port() const { return address_.port(); } - void SetAddress(const rtc::SocketAddress& addr) { + void SetAddress(const SocketAddress& addr) { address_ = addr; EnsureAddressLength(); } - void SetIP(const rtc::IPAddress& ip) { + void SetIP(const IPAddress& ip) { address_.SetIP(ip); EnsureAddressLength(); } void SetPort(uint16_t port) { address_.SetPort(port); } - bool Read(rtc::ByteBufferReader* buf) override; - bool Write(rtc::ByteBufferWriter* buf) const override; + bool Read(ByteBufferReader* buf) override; + bool Write(ByteBufferWriter* buf) const override; private: void EnsureAddressLength() { @@ -447,7 +440,7 @@ class StunAddressAttribute : public StunAttribute { } } } - rtc::SocketAddress address_; + SocketAddress address_; }; // Implements STUN attributes that record an Internet address. When encoded @@ -455,16 +448,16 @@ class StunAddressAttribute : public StunAttribute { // transaction ID of the message. class StunXorAddressAttribute : public StunAddressAttribute { public: - StunXorAddressAttribute(uint16_t type, const rtc::SocketAddress& addr); + StunXorAddressAttribute(uint16_t type, const SocketAddress& addr); StunXorAddressAttribute(uint16_t type, uint16_t length, StunMessage* owner); StunAttributeValueType value_type() const override; void SetOwner(StunMessage* owner) override; - bool Read(rtc::ByteBufferReader* buf) override; - bool Write(rtc::ByteBufferWriter* buf) const override; + bool Read(ByteBufferReader* buf) override; + bool Write(ByteBufferWriter* buf) const override; private: - rtc::IPAddress GetXoredIP() const; + IPAddress GetXoredIP() const; StunMessage* owner_; }; @@ -483,8 +476,8 @@ class StunUInt32Attribute : public StunAttribute { bool GetBit(size_t index) const; void SetBit(size_t index, bool value); - bool Read(rtc::ByteBufferReader* buf) override; - bool Write(rtc::ByteBufferWriter* buf) const override; + bool Read(ByteBufferReader* buf) override; + bool Write(ByteBufferWriter* buf) const override; private: uint32_t bits_; @@ -501,8 +494,8 @@ class StunUInt64Attribute : public StunAttribute { uint64_t value() const { return bits_; } void SetValue(uint64_t bits) { bits_ = bits; } - bool Read(rtc::ByteBufferReader* buf) override; - bool Write(rtc::ByteBufferWriter* buf) const override; + bool Read(ByteBufferReader* buf) override; + bool Write(ByteBufferWriter* buf) const override; private: uint64_t bits_; @@ -519,13 +512,22 @@ class StunByteStringAttribute : public StunAttribute { StunAttributeValueType value_type() const override; - const char* bytes() const { return bytes_; } + [[deprecated("Use array_view")]] const char* bytes() const { + return reinterpret_cast(bytes_); + } + // Returns the attribute value as a string. + // Use this for attributes that are text or text-compatible. absl::string_view string_view() const { - return absl::string_view(bytes_, length()); + return absl::string_view(reinterpret_cast(bytes_), length()); + } + // Returns the attribute value as an uint8_t view. + // Use this function for values that are not text. + ArrayView array_view() const { + return MakeArrayView(bytes_, length()); } [[deprecated]] std::string GetString() const { - return std::string(bytes_, length()); + return std::string(reinterpret_cast(bytes_), length()); } void CopyBytes(const void* bytes, size_t length); @@ -534,13 +536,13 @@ class StunByteStringAttribute : public StunAttribute { uint8_t GetByte(size_t index) const; void SetByte(size_t index, uint8_t value); - bool Read(rtc::ByteBufferReader* buf) override; - bool Write(rtc::ByteBufferWriter* buf) const override; + bool Read(ByteBufferReader* buf) override; + bool Write(ByteBufferWriter* buf) const override; private: - void SetBytes(char* bytes, size_t length); + void SetBytes(uint8_t* bytes, size_t length); - char* bytes_; + uint8_t* bytes_; }; // Implements STUN attributes that record an error code. @@ -565,8 +567,8 @@ class StunErrorCodeAttribute : public StunAttribute { void SetNumber(uint8_t number) { number_ = number; } void SetReason(const std::string& reason); - bool Read(rtc::ByteBufferReader* buf) override; - bool Write(rtc::ByteBufferWriter* buf) const override; + bool Read(ByteBufferReader* buf) override; + bool Write(ByteBufferWriter* buf) const override; private: uint8_t class_; @@ -588,8 +590,8 @@ class StunUInt16ListAttribute : public StunAttribute { void AddType(uint16_t value); void AddTypeAtIndex(uint16_t index, uint16_t value); - bool Read(rtc::ByteBufferReader* buf) override; - bool Write(rtc::ByteBufferWriter* buf) const override; + bool Read(ByteBufferReader* buf) override; + bool Write(ByteBufferWriter* buf) const override; private: std::vector* attr_types_; @@ -634,46 +636,7 @@ bool ComputeStunCredentialHash(const std::string& username, // a buffer will created in the method. std::unique_ptr CopyStunAttribute( const StunAttribute& attribute, - rtc::ByteBufferWriter* tmp_buffer_ptr = 0); - -// TODO(?): Move the TURN/ICE stuff below out to separate files. -extern const char TURN_MAGIC_COOKIE_VALUE[4]; - -// "GTURN" STUN methods. -// TODO(?): Rename these methods to GTURN_ to make it clear they aren't -// part of standard STUN/TURN. -enum RelayMessageType { - // For now, using the same defs from TurnMessageType below. - // STUN_ALLOCATE_REQUEST = 0x0003, - // STUN_ALLOCATE_RESPONSE = 0x0103, - // STUN_ALLOCATE_ERROR_RESPONSE = 0x0113, - STUN_SEND_REQUEST = 0x0004, - STUN_SEND_RESPONSE = 0x0104, - STUN_SEND_ERROR_RESPONSE = 0x0114, - STUN_DATA_INDICATION = 0x0115, -}; - -// "GTURN"-specific STUN attributes. -// TODO(?): Rename these attributes to GTURN_ to avoid conflicts. -enum RelayAttributeType { - STUN_ATTR_LIFETIME = 0x000d, // UInt32 - STUN_ATTR_MAGIC_COOKIE = 0x000f, // ByteString, 4 bytes - STUN_ATTR_BANDWIDTH = 0x0010, // UInt32 - STUN_ATTR_DESTINATION_ADDRESS = 0x0011, // Address - STUN_ATTR_SOURCE_ADDRESS2 = 0x0012, // Address - STUN_ATTR_DATA = 0x0013, // ByteString - STUN_ATTR_OPTIONS = 0x8001, // UInt32 -}; - -// A "GTURN" STUN message. -class RelayMessage : public StunMessage { - public: - using StunMessage::StunMessage; - - protected: - StunAttributeValueType GetAttributeValueType(int type) const override; - StunMessage* CreateNew() const override; -}; + ByteBufferWriter* tmp_buffer_ptr = 0); // Defined in TURN RFC 5766. enum TurnMessageType : uint16_t { @@ -694,19 +657,15 @@ enum TurnMessageType : uint16_t { }; enum TurnAttributeType { - STUN_ATTR_CHANNEL_NUMBER = 0x000C, // UInt32 - STUN_ATTR_TURN_LIFETIME = 0x000d, // UInt32 - STUN_ATTR_XOR_PEER_ADDRESS = 0x0012, // XorAddress - // TODO(mallinath) - Uncomment after RelayAttributes are renamed. - // STUN_ATTR_DATA = 0x0013, // ByteString + STUN_ATTR_CHANNEL_NUMBER = 0x000C, // UInt32 + STUN_ATTR_LIFETIME = 0x000d, // UInt32 + STUN_ATTR_XOR_PEER_ADDRESS = 0x0012, // XorAddress + STUN_ATTR_DATA = 0x0013, // ByteString STUN_ATTR_XOR_RELAYED_ADDRESS = 0x0016, // XorAddress STUN_ATTR_EVEN_PORT = 0x0018, // ByteString, 1 byte. STUN_ATTR_REQUESTED_TRANSPORT = 0x0019, // UInt32 STUN_ATTR_DONT_FRAGMENT = 0x001A, // No content, Length = 0 STUN_ATTR_RESERVATION_TOKEN = 0x0022, // ByteString, 8 bytes. - // TODO(mallinath) - Rename STUN_ATTR_TURN_LIFETIME to STUN_ATTR_LIFETIME and - // STUN_ATTR_TURN_DATA to STUN_ATTR_DATA. Also rename RelayMessage attributes - // by appending G to attribute name. }; // RFC 5766-defined errors. @@ -717,7 +676,8 @@ enum TurnErrorType { STUN_ERROR_UNSUPPORTED_PROTOCOL = 442 }; -extern const int SERVER_NOT_REACHABLE_ERROR; +[[deprecated("Use STUN_ERROR_SERVER_NOT_REACHABLE")]] extern const int + SERVER_NOT_REACHABLE_ERROR; extern const char STUN_ERROR_REASON_FORBIDDEN[]; extern const char STUN_ERROR_REASON_ALLOCATION_MISMATCH[]; @@ -762,6 +722,9 @@ enum IceAttributeType { STUN_ATTR_GOOG_DELTA_SYNC_REQ = 0xC05E, // Not yet implemented. // MESSAGE-INTEGRITY truncated to 32-bit. STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32 = 0xC060, + // Experimental: piggybacking the DTLS handshake in STUN. + STUN_ATTR_META_DTLS_IN_STUN = 0xC070, + STUN_ATTR_META_DTLS_IN_STUN_ACK = 0xC071, }; // When adding new attributes to STUN_ATTR_GOOG_MISC_INFO @@ -794,6 +757,151 @@ class IceMessage : public StunMessage { StunMessage* CreateNew() const override; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::ComputeStunCredentialHash; +using ::webrtc::CopyStunAttribute; +using ::webrtc::GetStunErrorResponseType; +using ::webrtc::GetStunSuccessResponseType; +using ::webrtc::GOOG_PING_ERROR_RESPONSE; +using ::webrtc::GOOG_PING_REQUEST; +using ::webrtc::GOOG_PING_RESPONSE; +using ::webrtc::IceAttributeType; +using ::webrtc::IceErrorCode; +using ::webrtc::IceGoogMiscInfoBindingRequestAttributeIndex; +using ::webrtc::IceGoogMiscInfoBindingResponseAttributeIndex; +using ::webrtc::IceMessage; +using ::webrtc::IsStunErrorResponseType; +using ::webrtc::IsStunIndicationType; +using ::webrtc::IsStunRequestType; +using ::webrtc::IsStunSuccessResponseType; +using ::webrtc::kStunAttributeHeaderSize; +using ::webrtc::kStunHeaderSize; +using ::webrtc::kStunLegacyTransactionIdLength; +using ::webrtc::kStunMagicCookie; +using ::webrtc::kStunMagicCookieLength; +using ::webrtc::kStunMessageIntegrity32Size; +using ::webrtc::kStunMessageIntegritySize; +using ::webrtc::kStunTransactionIdLength; +using ::webrtc::kStunTransactionIdOffset; +using ::webrtc::kStunTypeMask; +using ::webrtc::SERVER_NOT_REACHABLE_ERROR; +using ::webrtc::STUN_ADDRESS_IPV4; +using ::webrtc::STUN_ADDRESS_IPV6; +using ::webrtc::STUN_ADDRESS_UNDEF; +using ::webrtc::STUN_ALLOCATE_ERROR_RESPONSE; +using ::webrtc::STUN_ALLOCATE_REQUEST; +using ::webrtc::STUN_ALLOCATE_RESPONSE; +using ::webrtc::STUN_ATTR_ALTERNATE_SERVER; +using ::webrtc::STUN_ATTR_CHANNEL_NUMBER; +using ::webrtc::STUN_ATTR_DATA; +using ::webrtc::STUN_ATTR_DONT_FRAGMENT; +using ::webrtc::STUN_ATTR_ERROR_CODE; +using ::webrtc::STUN_ATTR_EVEN_PORT; +using ::webrtc::STUN_ATTR_FINGERPRINT; +using ::webrtc::STUN_ATTR_GOOG_CONNECTION_ID; +using ::webrtc::STUN_ATTR_GOOG_DELTA; +using ::webrtc::STUN_ATTR_GOOG_DELTA_ACK; +using ::webrtc::STUN_ATTR_GOOG_DELTA_SYNC_REQ; +using ::webrtc::STUN_ATTR_GOOG_LAST_ICE_CHECK_RECEIVED; +using ::webrtc::STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32; +using ::webrtc::STUN_ATTR_GOOG_MISC_INFO; +using ::webrtc::STUN_ATTR_GOOG_NETWORK_INFO; +using ::webrtc::STUN_ATTR_GOOG_OBSOLETE_1; +using ::webrtc::STUN_ATTR_ICE_CONTROLLED; +using ::webrtc::STUN_ATTR_ICE_CONTROLLING; +using ::webrtc::STUN_ATTR_LIFETIME; +using ::webrtc::STUN_ATTR_MAPPED_ADDRESS; +using ::webrtc::STUN_ATTR_MESSAGE_INTEGRITY; +using ::webrtc::STUN_ATTR_META_DTLS_IN_STUN; +using ::webrtc::STUN_ATTR_META_DTLS_IN_STUN_ACK; +using ::webrtc::STUN_ATTR_NOMINATION; +using ::webrtc::STUN_ATTR_NONCE; +using ::webrtc::STUN_ATTR_PRIORITY; +using ::webrtc::STUN_ATTR_REALM; +using ::webrtc::STUN_ATTR_REQUESTED_TRANSPORT; +using ::webrtc::STUN_ATTR_RESERVATION_TOKEN; +using ::webrtc::STUN_ATTR_RETRANSMIT_COUNT; +using ::webrtc::STUN_ATTR_SOFTWARE; +using ::webrtc::STUN_ATTR_UNKNOWN_ATTRIBUTES; +using ::webrtc::STUN_ATTR_USE_CANDIDATE; +using ::webrtc::STUN_ATTR_USERNAME; +using ::webrtc::STUN_ATTR_XOR_MAPPED_ADDRESS; +using ::webrtc::STUN_ATTR_XOR_PEER_ADDRESS; +using ::webrtc::STUN_ATTR_XOR_RELAYED_ADDRESS; +using ::webrtc::STUN_BINDING_ERROR_RESPONSE; +using ::webrtc::STUN_BINDING_INDICATION; +using ::webrtc::STUN_BINDING_REQUEST; +using ::webrtc::STUN_BINDING_RESPONSE; +using ::webrtc::STUN_ERROR_ALLOCATION_MISMATCH; +using ::webrtc::STUN_ERROR_BAD_REQUEST; +using ::webrtc::STUN_ERROR_FORBIDDEN; +using ::webrtc::STUN_ERROR_GLOBAL_FAILURE; +using ::webrtc::STUN_ERROR_NOT_AN_ERROR; +using ::webrtc::STUN_ERROR_REASON_ALLOCATION_MISMATCH; +using ::webrtc::STUN_ERROR_REASON_BAD_REQUEST; +using ::webrtc::STUN_ERROR_REASON_FORBIDDEN; +using ::webrtc::STUN_ERROR_REASON_ROLE_CONFLICT; +using ::webrtc::STUN_ERROR_REASON_SERVER_ERROR; +using ::webrtc::STUN_ERROR_REASON_STALE_NONCE; +using ::webrtc::STUN_ERROR_REASON_TRY_ALTERNATE_SERVER; +using ::webrtc::STUN_ERROR_REASON_UNAUTHORIZED; +using ::webrtc::STUN_ERROR_REASON_UNKNOWN_ATTRIBUTE; +using ::webrtc::STUN_ERROR_REASON_UNSUPPORTED_PROTOCOL; +using ::webrtc::STUN_ERROR_REASON_WRONG_CREDENTIALS; +using ::webrtc::STUN_ERROR_ROLE_CONFLICT; +using ::webrtc::STUN_ERROR_SERVER_ERROR; +using ::webrtc::STUN_ERROR_SERVER_NOT_REACHABLE; +using ::webrtc::STUN_ERROR_STALE_NONCE; +using ::webrtc::STUN_ERROR_TRY_ALTERNATE; +using ::webrtc::STUN_ERROR_UNAUTHORIZED; +using ::webrtc::STUN_ERROR_UNKNOWN_ATTRIBUTE; +using ::webrtc::STUN_ERROR_UNSUPPORTED_PROTOCOL; +using ::webrtc::STUN_ERROR_WRONG_CREDENTIALS; +using ::webrtc::STUN_INVALID_MESSAGE_TYPE; +using ::webrtc::STUN_VALUE_ADDRESS; +using ::webrtc::STUN_VALUE_BYTE_STRING; +using ::webrtc::STUN_VALUE_ERROR_CODE; +using ::webrtc::STUN_VALUE_UINT16_LIST; +using ::webrtc::STUN_VALUE_UINT32; +using ::webrtc::STUN_VALUE_UINT64; +using ::webrtc::STUN_VALUE_UNKNOWN; +using ::webrtc::STUN_VALUE_XOR_ADDRESS; +using ::webrtc::StunAddressAttribute; +using ::webrtc::StunAddressFamily; +using ::webrtc::StunAttribute; +using ::webrtc::StunAttributeType; +using ::webrtc::StunAttributeValueType; +using ::webrtc::StunByteStringAttribute; +using ::webrtc::StunErrorCode; +using ::webrtc::StunErrorCodeAttribute; +using ::webrtc::StunMessage; +using ::webrtc::StunMessageType; +using ::webrtc::StunMethodToString; +using ::webrtc::StunUInt16ListAttribute; +using ::webrtc::StunUInt32Attribute; +using ::webrtc::StunUInt64Attribute; +using ::webrtc::StunXorAddressAttribute; +using ::webrtc::TURN_CHANNEL_BIND_ERROR_RESPONSE; +using ::webrtc::TURN_CHANNEL_BIND_REQUEST; +using ::webrtc::TURN_CHANNEL_BIND_RESPONSE; +using ::webrtc::TURN_CREATE_PERMISSION_ERROR_RESPONSE; +using ::webrtc::TURN_CREATE_PERMISSION_REQUEST; +using ::webrtc::TURN_CREATE_PERMISSION_RESPONSE; +using ::webrtc::TURN_DATA_INDICATION; +using ::webrtc::TURN_REFRESH_ERROR_RESPONSE; +using ::webrtc::TURN_REFRESH_REQUEST; +using ::webrtc::TURN_REFRESH_RESPONSE; +using ::webrtc::TURN_SEND_INDICATION; +using ::webrtc::TurnAttributeType; +using ::webrtc::TurnErrorType; +using ::webrtc::TurnMessage; +using ::webrtc::TurnMessageType; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // API_TRANSPORT_STUN_H_ diff --git a/api/transport/stun_unittest.cc b/api/transport/stun_unittest.cc index 96ad45843b..793973e037 100644 --- a/api/transport/stun_unittest.cc +++ b/api/transport/stun_unittest.cc @@ -12,18 +12,21 @@ #include +#include #include #include #include +#include "api/array_view.h" #include "rtc_base/arraysize.h" #include "rtc_base/byte_buffer.h" #include "rtc_base/byte_order.h" +#include "rtc_base/ip_address.h" #include "rtc_base/socket_address.h" #include "system_wrappers/include/metrics.h" #include "test/gtest.h" -namespace cricket { +namespace webrtc { class StunTest : public ::testing::Test { protected: @@ -35,7 +38,7 @@ class StunTest : public ::testing::Test { } void CheckStunTransactionID(const StunMessage& msg, - const unsigned char* expectedID, + const uint8_t* expectedID, size_t length) { ASSERT_EQ(length, msg.transaction_id().size()); ASSERT_EQ(length == kStunTransactionIdLength + 4, msg.IsLegacy()); @@ -46,7 +49,7 @@ class StunTest : public ::testing::Test { void CheckStunAddressAttribute(const StunAddressAttribute* addr, StunAddressFamily expected_family, int expected_port, - const rtc::IPAddress& expected_address) { + const IPAddress& expected_address) { ASSERT_EQ(expected_family, addr->family()); ASSERT_EQ(expected_port, addr->port()); @@ -65,10 +68,9 @@ class StunTest : public ::testing::Test { } size_t ReadStunMessageTestCase(StunMessage* msg, - const unsigned char* testcase, + const uint8_t* testcase, size_t size) { - const char* input = reinterpret_cast(testcase); - rtc::ByteBufferReader buf(input, size); + ByteBufferReader buf(MakeArrayView(testcase, size)); if (msg->Read(&buf)) { // Returns the size the stun message should report itself as being return (size - 20); @@ -85,7 +87,7 @@ class StunTest : public ::testing::Test { // clang-format off // clang formatting doesn't respect inline comments. -static const unsigned char kStunMessageWithIPv6MappedAddress[] = { +static const uint8_t kStunMessageWithIPv6MappedAddress[] = { 0x00, 0x01, 0x00, 0x18, // message header 0x21, 0x12, 0xa4, 0x42, // transaction id 0x29, 0x1f, 0xcd, 0x7c, @@ -99,7 +101,7 @@ static const unsigned char kStunMessageWithIPv6MappedAddress[] = { 0xfe, 0xe5, 0x00, 0xc3 }; -static const unsigned char kStunMessageWithIPv4MappedAddress[] = { +static const uint8_t kStunMessageWithIPv4MappedAddress[] = { 0x01, 0x01, 0x00, 0x0c, // binding response, length 12 0x21, 0x12, 0xa4, 0x42, // magic cookie 0x29, 0x1f, 0xcd, 0x7c, // transaction ID @@ -111,7 +113,7 @@ static const unsigned char kStunMessageWithIPv4MappedAddress[] = { }; // Test XOR-mapped IP addresses: -static const unsigned char kStunMessageWithIPv6XorMappedAddress[] = { +static const uint8_t kStunMessageWithIPv6XorMappedAddress[] = { 0x01, 0x01, 0x00, 0x18, // message header (binding response) 0x21, 0x12, 0xa4, 0x42, // magic cookie (rfc5389) 0xe3, 0xa9, 0x46, 0xe1, // transaction ID @@ -125,7 +127,7 @@ static const unsigned char kStunMessageWithIPv6XorMappedAddress[] = { 0xaa, 0xed, 0x01, 0xc3 }; -static const unsigned char kStunMessageWithIPv4XorMappedAddress[] = { +static const uint8_t kStunMessageWithIPv4XorMappedAddress[] = { 0x01, 0x01, 0x00, 0x0c, // message header (binding response) 0x21, 0x12, 0xa4, 0x42, // magic cookie 0x29, 0x1f, 0xcd, 0x7c, // transaction ID @@ -137,7 +139,7 @@ static const unsigned char kStunMessageWithIPv4XorMappedAddress[] = { }; // ByteString Attribute (username) -static const unsigned char kStunMessageWithByteStringAttribute[] = { +static const uint8_t kStunMessageWithByteStringAttribute[] = { 0x00, 0x01, 0x00, 0x0c, 0x21, 0x12, 0xa4, 0x42, 0xe3, 0xa9, 0x46, 0xe1, @@ -150,7 +152,7 @@ static const unsigned char kStunMessageWithByteStringAttribute[] = { // Message with an unknown but comprehensible optional attribute. // Parsing should succeed despite this unknown attribute. -static const unsigned char kStunMessageWithUnknownAttribute[] = { +static const uint8_t kStunMessageWithUnknownAttribute[] = { 0x00, 0x01, 0x00, 0x14, 0x21, 0x12, 0xa4, 0x42, 0xe3, 0xa9, 0x46, 0xe1, @@ -164,7 +166,7 @@ static const unsigned char kStunMessageWithUnknownAttribute[] = { }; // ByteString Attribute (username) with padding byte -static const unsigned char kStunMessageWithPaddedByteStringAttribute[] = { +static const uint8_t kStunMessageWithPaddedByteStringAttribute[] = { 0x00, 0x01, 0x00, 0x08, 0x21, 0x12, 0xa4, 0x42, 0xe3, 0xa9, 0x46, 0xe1, @@ -175,7 +177,7 @@ static const unsigned char kStunMessageWithPaddedByteStringAttribute[] = { }; // Message with an Unknown Attributes (uint16_t list) attribute. -static const unsigned char kStunMessageWithUInt16ListAttribute[] = { +static const uint8_t kStunMessageWithUInt16ListAttribute[] = { 0x00, 0x01, 0x00, 0x0c, 0x21, 0x12, 0xa4, 0x42, 0xe3, 0xa9, 0x46, 0xe1, @@ -187,7 +189,7 @@ static const unsigned char kStunMessageWithUInt16ListAttribute[] = { }; // Error response message (unauthorized) -static const unsigned char kStunMessageWithErrorAttribute[] = { +static const uint8_t kStunMessageWithErrorAttribute[] = { 0x01, 0x11, 0x00, 0x14, 0x21, 0x12, 0xa4, 0x42, 0x29, 0x1f, 0xcd, 0x7c, @@ -205,7 +207,7 @@ static const unsigned char kStunMessageWithErrorAttribute[] = { // The actual length in bytes of the invalid messages (including STUN header) static const int kRealLengthOfInvalidLengthTestCases = 32; -static const unsigned char kStunMessageWithZeroLength[] = { +static const uint8_t kStunMessageWithZeroLength[] = { 0x00, 0x01, 0x00, 0x00, // length of 0 (last 2 bytes) 0x21, 0x12, 0xA4, 0x42, // magic cookie '0', '1', '2', '3', // transaction id @@ -216,7 +218,7 @@ static const unsigned char kStunMessageWithZeroLength[] = { 0x21, 0x12, 0xA4, 0x53, }; -static const unsigned char kStunMessageWithExcessLength[] = { +static const uint8_t kStunMessageWithExcessLength[] = { 0x00, 0x01, 0x00, 0x55, // length of 85 0x21, 0x12, 0xA4, 0x42, // magic cookie '0', '1', '2', '3', // transaction id @@ -227,7 +229,7 @@ static const unsigned char kStunMessageWithExcessLength[] = { 0x21, 0x12, 0xA4, 0x53, }; -static const unsigned char kStunMessageWithSmallLength[] = { +static const uint8_t kStunMessageWithSmallLength[] = { 0x00, 0x01, 0x00, 0x03, // length of 3 0x21, 0x12, 0xA4, 0x42, // magic cookie '0', '1', '2', '3', // transaction id @@ -238,7 +240,7 @@ static const unsigned char kStunMessageWithSmallLength[] = { 0x21, 0x12, 0xA4, 0x53, }; -static const unsigned char kStunMessageWithBadHmacAtEnd[] = { +static const uint8_t kStunMessageWithBadHmacAtEnd[] = { 0x00, 0x01, 0x00, 0x14, // message length exactly 20 0x21, 0x12, 0xA4, 0x42, // magic cookie '0', '1', '2', '3', // transaction ID @@ -253,7 +255,7 @@ static const unsigned char kStunMessageWithBadHmacAtEnd[] = { // RTCP packet, for testing we correctly ignore non stun packet types. // V=2, P=false, RC=0, Type=200, Len=6, Sender-SSRC=85, etc -static const unsigned char kRtcpPacket[] = { +static const uint8_t kRtcpPacket[] = { 0x80, 0xc8, 0x00, 0x06, 0x00, 0x00, 0x00, 0x55, 0xce, 0xa5, 0x18, 0x3a, 0x39, 0xcc, 0x7d, 0x09, 0x23, 0xed, 0x19, 0x07, 0x00, 0x00, 0x01, 0x56, @@ -266,19 +268,19 @@ static const unsigned char kRtcpPacket[] = { // Software name (response): "test vector" (without quotes) // Username: "evtj:h6vY" (without quotes) // Password: "VOkJxbRl1RmTxUk/WvJxBt" (without quotes) -static const unsigned char kRfc5769SampleMsgTransactionId[] = { +static const uint8_t kRfc5769SampleMsgTransactionId[] = { 0xb7, 0xe7, 0xa7, 0x01, 0xbc, 0x34, 0xd6, 0x86, 0xfa, 0x87, 0xdf, 0xae }; static const char kRfc5769SampleMsgClientSoftware[] = "STUN test client"; static const char kRfc5769SampleMsgServerSoftware[] = "test vector"; static const char kRfc5769SampleMsgUsername[] = "evtj:h6vY"; static const char kRfc5769SampleMsgPassword[] = "VOkJxbRl1RmTxUk/WvJxBt"; -static const rtc::SocketAddress kRfc5769SampleMsgMappedAddress( +static const SocketAddress kRfc5769SampleMsgMappedAddress( "192.0.2.1", 32853); -static const rtc::SocketAddress kRfc5769SampleMsgIPv6MappedAddress( +static const SocketAddress kRfc5769SampleMsgIPv6MappedAddress( "2001:db8:1234:5678:11:2233:4455:6677", 32853); -static const unsigned char kRfc5769SampleMsgWithAuthTransactionId[] = { +static const uint8_t kRfc5769SampleMsgWithAuthTransactionId[] = { 0x78, 0xad, 0x34, 0x33, 0xc6, 0xad, 0x72, 0xc0, 0x29, 0xda, 0x41, 0x2e }; static const char kRfc5769SampleMsgWithAuthUsername[] = @@ -289,7 +291,7 @@ static const char kRfc5769SampleMsgWithAuthNonce[] = static const char kRfc5769SampleMsgWithAuthRealm[] = "example.org"; // 2.1. Sample Request -static const unsigned char kRfc5769SampleRequest[] = { +static const uint8_t kRfc5769SampleRequest[] = { 0x00, 0x01, 0x00, 0x58, // Request type and message length 0x21, 0x12, 0xa4, 0x42, // Magic cookie 0xb7, 0xe7, 0xa7, 0x01, // } @@ -320,7 +322,7 @@ static const unsigned char kRfc5769SampleRequest[] = { }; // 2.1. Sample Request -static const unsigned char kSampleRequestMI32[] = { +static const uint8_t kSampleRequestMI32[] = { 0x00, 0x01, 0x00, 0x48, // Request type and message length 0x21, 0x12, 0xa4, 0x42, // Magic cookie 0xb7, 0xe7, 0xa7, 0x01, // } @@ -347,7 +349,7 @@ static const unsigned char kSampleRequestMI32[] = { }; // 2.2. Sample IPv4 Response -static const unsigned char kRfc5769SampleResponse[] = { +static const uint8_t kRfc5769SampleResponse[] = { 0x01, 0x01, 0x00, 0x3c, // Response type and message length 0x21, 0x12, 0xa4, 0x42, // Magic cookie 0xb7, 0xe7, 0xa7, 0x01, // } @@ -371,7 +373,7 @@ static const unsigned char kRfc5769SampleResponse[] = { }; // 2.3. Sample IPv6 Response -static const unsigned char kRfc5769SampleResponseIPv6[] = { +static const uint8_t kRfc5769SampleResponseIPv6[] = { 0x01, 0x01, 0x00, 0x48, // Response type and message length 0x21, 0x12, 0xa4, 0x42, // Magic cookie 0xb7, 0xe7, 0xa7, 0x01, // } @@ -398,7 +400,7 @@ static const unsigned char kRfc5769SampleResponseIPv6[] = { }; // 2.4. Sample Request with Long-Term Authentication -static const unsigned char kRfc5769SampleRequestLongTermAuth[] = { +static const uint8_t kRfc5769SampleRequestLongTermAuth[] = { 0x00, 0x01, 0x00, 0x60, // Request type and message length 0x21, 0x12, 0xa4, 0x42, // Magic cookie 0x78, 0xad, 0x34, 0x33, // } @@ -433,7 +435,7 @@ static const unsigned char kRfc5769SampleRequestLongTermAuth[] = { // Length parameter is changed to 0x38 from 0x58. // AddMessageIntegrity will add MI information and update the length param // accordingly. -static const unsigned char kRfc5769SampleRequestWithoutMI[] = { +static const uint8_t kRfc5769SampleRequestWithoutMI[] = { 0x00, 0x01, 0x00, 0x38, // Request type and message length 0x21, 0x12, 0xa4, 0x42, // Magic cookie 0xb7, 0xe7, 0xa7, 0x01, // } @@ -457,7 +459,7 @@ static const unsigned char kRfc5769SampleRequestWithoutMI[] = { // This HMAC differs from the RFC 5769 SampleRequest message. This differs // because spec uses 0x20 for the padding where as our implementation uses 0. -static const unsigned char kCalculatedHmac1[] = { +static const uint8_t kCalculatedHmac1[] = { 0x79, 0x07, 0xc2, 0xd2, // } 0xed, 0xbf, 0xea, 0x48, // } 0x0e, 0x4c, 0x76, 0xd8, // } HMAC-SHA1 fingerprint @@ -469,14 +471,14 @@ static const unsigned char kCalculatedHmac1[] = { // above since the sum is computed including header // and the header is different since the message is shorter // than when MESSAGE-INTEGRITY is used. -static const unsigned char kCalculatedHmac1_32[] = { +static const uint8_t kCalculatedHmac1_32[] = { 0xda, 0x39, 0xde, 0x5d, // } }; // Length parameter is changed to 0x1c from 0x3c. // AddMessageIntegrity will add MI information and update the length param // accordingly. -static const unsigned char kRfc5769SampleResponseWithoutMI[] = { +static const uint8_t kRfc5769SampleResponseWithoutMI[] = { 0x01, 0x01, 0x00, 0x1c, // Response type and message length 0x21, 0x12, 0xa4, 0x42, // Magic cookie 0xb7, 0xe7, 0xa7, 0x01, // } @@ -493,7 +495,7 @@ static const unsigned char kRfc5769SampleResponseWithoutMI[] = { // This HMAC differs from the RFC 5769 SampleResponse message. This differs // because spec uses 0x20 for the padding where as our implementation uses 0. -static const unsigned char kCalculatedHmac2[] = { +static const uint8_t kCalculatedHmac2[] = { 0x5d, 0x6b, 0x58, 0xbe, // } 0xad, 0x94, 0xe0, 0x7e, // } 0xef, 0x0d, 0xfc, 0x12, // } HMAC-SHA1 fingerprint @@ -505,7 +507,7 @@ static const unsigned char kCalculatedHmac2[] = { // above since the sum is computed including header // and the header is different since the message is shorter // than when MESSAGE-INTEGRITY is used. -static const unsigned char kCalculatedHmac2_32[] = { +static const uint8_t kCalculatedHmac2_32[] = { 0xe7, 0x5c, 0xd3, 0x16, // } }; @@ -513,14 +515,14 @@ static const unsigned char kCalculatedHmac2_32[] = { // A transaction ID without the 'magic cookie' portion // pjnat's test programs use this transaction ID a lot. -const unsigned char kTestTransactionId1[] = {0x029, 0x01f, 0x0cd, 0x07c, - 0x0ba, 0x058, 0x0ab, 0x0d7, - 0x0f2, 0x041, 0x001, 0x000}; +const uint8_t kTestTransactionId1[] = {0x029, 0x01f, 0x0cd, 0x07c, + 0x0ba, 0x058, 0x0ab, 0x0d7, + 0x0f2, 0x041, 0x001, 0x000}; // They use this one sometimes too. -const unsigned char kTestTransactionId2[] = {0x0e3, 0x0a9, 0x046, 0x0e1, - 0x07c, 0x000, 0x0c2, 0x062, - 0x054, 0x008, 0x001, 0x000}; +const uint8_t kTestTransactionId2[] = {0x0e3, 0x0a9, 0x046, 0x0e1, + 0x07c, 0x000, 0x0c2, 0x062, + 0x054, 0x008, 0x001, 0x000}; const in6_addr kIPv6TestAddress1 = { {{0x24, 0x01, 0xfa, 0x00, 0x00, 0x04, 0x10, 0x00, 0xbe, 0x30, 0x5b, 0xff, @@ -580,7 +582,7 @@ TEST_F(StunTest, ReadMessageWithIPv4AddressAttribute) { CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); const StunAddressAttribute* addr = msg.GetAddress(STUN_ATTR_MAPPED_ADDRESS); - rtc::IPAddress test_address(kIPv4TestAddress1); + IPAddress test_address(kIPv4TestAddress1); CheckStunAddressAttribute(addr, STUN_ADDRESS_IPV4, kTestMessagePort4, test_address); } @@ -594,7 +596,7 @@ TEST_F(StunTest, ReadMessageWithIPv4XorAddressAttribute) { const StunAddressAttribute* addr = msg.GetAddress(STUN_ATTR_XOR_MAPPED_ADDRESS); - rtc::IPAddress test_address(kIPv4TestAddress1); + IPAddress test_address(kIPv4TestAddress1); CheckStunAddressAttribute(addr, STUN_ADDRESS_IPV4, kTestMessagePort3, test_address); } @@ -605,7 +607,7 @@ TEST_F(StunTest, ReadMessageWithIPv6AddressAttribute) { CheckStunHeader(msg, STUN_BINDING_REQUEST, size); CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); - rtc::IPAddress test_address(kIPv6TestAddress1); + IPAddress test_address(kIPv6TestAddress1); const StunAddressAttribute* addr = msg.GetAddress(STUN_ATTR_MAPPED_ADDRESS); CheckStunAddressAttribute(addr, STUN_ADDRESS_IPV6, kTestMessagePort2, @@ -618,7 +620,7 @@ TEST_F(StunTest, ReadMessageWithInvalidAddressAttribute) { CheckStunHeader(msg, STUN_BINDING_REQUEST, size); CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); - rtc::IPAddress test_address(kIPv6TestAddress1); + IPAddress test_address(kIPv6TestAddress1); const StunAddressAttribute* addr = msg.GetAddress(STUN_ATTR_MAPPED_ADDRESS); CheckStunAddressAttribute(addr, STUN_ADDRESS_IPV6, kTestMessagePort2, @@ -629,7 +631,7 @@ TEST_F(StunTest, ReadMessageWithIPv6XorAddressAttribute) { StunMessage msg; size_t size = ReadStunMessage(&msg, kStunMessageWithIPv6XorMappedAddress); - rtc::IPAddress test_address(kIPv6TestAddress1); + IPAddress test_address(kIPv6TestAddress1); CheckStunHeader(msg, STUN_BINDING_RESPONSE, size); CheckStunTransactionID(msg, kTestTransactionId2, kStunTransactionIdLength); @@ -743,7 +745,7 @@ TEST_F(StunTest, ReadRfc5769RequestMessageLongTermAuth) { // kStunMessageWithIPv4MappedAddress, but with a different value where the // magic cookie was. TEST_F(StunTest, ReadLegacyMessage) { - unsigned char rfc3489_packet[sizeof(kStunMessageWithIPv4MappedAddress)]; + uint8_t rfc3489_packet[sizeof(kStunMessageWithIPv4MappedAddress)]; memcpy(rfc3489_packet, kStunMessageWithIPv4MappedAddress, sizeof(kStunMessageWithIPv4MappedAddress)); // Overwrite the magic cookie here. @@ -755,7 +757,7 @@ TEST_F(StunTest, ReadLegacyMessage) { CheckStunTransactionID(msg, &rfc3489_packet[4], kStunTransactionIdLength + 4); const StunAddressAttribute* addr = msg.GetAddress(STUN_ATTR_MAPPED_ADDRESS); - rtc::IPAddress test_address(kIPv4TestAddress1); + IPAddress test_address(kIPv4TestAddress1); CheckStunAddressAttribute(addr, STUN_ADDRESS_IPV4, kTestMessagePort4, test_address); } @@ -764,7 +766,7 @@ TEST_F(StunTest, SetIPv6XorAddressAttributeOwner) { StunMessage msg; size_t size = ReadStunMessage(&msg, kStunMessageWithIPv6XorMappedAddress); - rtc::IPAddress test_address(kIPv6TestAddress1); + IPAddress test_address(kIPv6TestAddress1); CheckStunHeader(msg, STUN_BINDING_RESPONSE, size); CheckStunTransactionID(msg, kTestTransactionId2, kStunTransactionIdLength); @@ -783,15 +785,15 @@ TEST_F(StunTest, SetIPv6XorAddressAttributeOwner) { // The internal IP address shouldn't change. ASSERT_EQ(addr2.ipaddr(), addr->ipaddr()); - rtc::ByteBufferWriter correct_buf; - rtc::ByteBufferWriter wrong_buf; + ByteBufferWriter correct_buf; + ByteBufferWriter wrong_buf; EXPECT_TRUE(addr->Write(&correct_buf)); EXPECT_TRUE(addr2.Write(&wrong_buf)); // But when written out, the buffers should look different. ASSERT_NE(0, memcmp(correct_buf.Data(), wrong_buf.Data(), wrong_buf.Length())); // And when reading a known good value, the address should be wrong. - rtc::ByteBufferReader read_buf(correct_buf); + ByteBufferReader read_buf(correct_buf); addr2.Read(&read_buf); ASSERT_NE(addr->ipaddr(), addr2.ipaddr()); addr2.SetIP(addr->ipaddr()); @@ -811,7 +813,7 @@ TEST_F(StunTest, SetIPv4XorAddressAttributeOwner) { StunMessage msg; size_t size = ReadStunMessage(&msg, kStunMessageWithIPv4XorMappedAddress); - rtc::IPAddress test_address(kIPv4TestAddress1); + IPAddress test_address(kIPv4TestAddress1); CheckStunHeader(msg, STUN_BINDING_RESPONSE, size); CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); @@ -830,8 +832,8 @@ TEST_F(StunTest, SetIPv4XorAddressAttributeOwner) { // The internal IP address shouldn't change. ASSERT_EQ(addr2.ipaddr(), addr->ipaddr()); - rtc::ByteBufferWriter correct_buf; - rtc::ByteBufferWriter wrong_buf; + ByteBufferWriter correct_buf; + ByteBufferWriter wrong_buf; EXPECT_TRUE(addr->Write(&correct_buf)); EXPECT_TRUE(addr2.Write(&wrong_buf)); // The same address data should be written. @@ -839,7 +841,7 @@ TEST_F(StunTest, SetIPv4XorAddressAttributeOwner) { memcmp(correct_buf.Data(), wrong_buf.Data(), wrong_buf.Length())); // And an attribute should be able to un-XOR an address belonging to a message // with a different transaction ID. - rtc::ByteBufferReader read_buf(correct_buf); + ByteBufferReader read_buf(correct_buf); EXPECT_TRUE(addr2.Read(&read_buf)); ASSERT_EQ(addr->ipaddr(), addr2.ipaddr()); @@ -851,10 +853,10 @@ TEST_F(StunTest, SetIPv4XorAddressAttributeOwner) { } TEST_F(StunTest, CreateIPv6AddressAttribute) { - rtc::IPAddress test_ip(kIPv6TestAddress2); + IPAddress test_ip(kIPv6TestAddress2); auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); - rtc::SocketAddress test_addr(test_ip, kTestMessagePort2); + SocketAddress test_addr(test_ip, kTestMessagePort2); addr->SetAddress(test_addr); CheckStunAddressAttribute(addr.get(), STUN_ADDRESS_IPV6, kTestMessagePort2, @@ -864,10 +866,10 @@ TEST_F(StunTest, CreateIPv6AddressAttribute) { TEST_F(StunTest, CreateIPv4AddressAttribute) { struct in_addr test_in_addr; test_in_addr.s_addr = 0xBEB0B0BE; - rtc::IPAddress test_ip(test_in_addr); + IPAddress test_ip(test_in_addr); auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); - rtc::SocketAddress test_addr(test_ip, kTestMessagePort2); + SocketAddress test_addr(test_ip, kTestMessagePort2); addr->SetAddress(test_addr); CheckStunAddressAttribute(addr.get(), STUN_ADDRESS_IPV4, kTestMessagePort2, @@ -876,25 +878,25 @@ TEST_F(StunTest, CreateIPv4AddressAttribute) { // Test that we don't care what order we set the parts of an address TEST_F(StunTest, CreateAddressInArbitraryOrder) { - auto addr = StunAttribute::CreateAddress(STUN_ATTR_DESTINATION_ADDRESS); + auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); // Port first addr->SetPort(kTestMessagePort1); - addr->SetIP(rtc::IPAddress(kIPv4TestAddress1)); + addr->SetIP(IPAddress(kIPv4TestAddress1)); ASSERT_EQ(kTestMessagePort1, addr->port()); - ASSERT_EQ(rtc::IPAddress(kIPv4TestAddress1), addr->ipaddr()); + ASSERT_EQ(IPAddress(kIPv4TestAddress1), addr->ipaddr()); - auto addr2 = StunAttribute::CreateAddress(STUN_ATTR_DESTINATION_ADDRESS); + auto addr2 = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); // IP first - addr2->SetIP(rtc::IPAddress(kIPv4TestAddress1)); + addr2->SetIP(IPAddress(kIPv4TestAddress1)); addr2->SetPort(kTestMessagePort2); ASSERT_EQ(kTestMessagePort2, addr2->port()); - ASSERT_EQ(rtc::IPAddress(kIPv4TestAddress1), addr2->ipaddr()); + ASSERT_EQ(IPAddress(kIPv4TestAddress1), addr2->ipaddr()); } TEST_F(StunTest, WriteMessageWithIPv6AddressAttribute) { size_t size = sizeof(kStunMessageWithIPv6MappedAddress); - rtc::IPAddress test_ip(kIPv6TestAddress1); + IPAddress test_ip(kIPv6TestAddress1); StunMessage msg( STUN_BINDING_REQUEST, @@ -903,17 +905,17 @@ TEST_F(StunTest, WriteMessageWithIPv6AddressAttribute) { CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); - rtc::SocketAddress test_addr(test_ip, kTestMessagePort2); + SocketAddress test_addr(test_ip, kTestMessagePort2); addr->SetAddress(test_addr); msg.AddAttribute(std::move(addr)); CheckStunHeader(msg, STUN_BINDING_REQUEST, (size - 20)); - rtc::ByteBufferWriter out; + ByteBufferWriter out; EXPECT_TRUE(msg.Write(&out)); ASSERT_EQ(out.Length(), sizeof(kStunMessageWithIPv6MappedAddress)); int len1 = static_cast(out.Length()); - rtc::ByteBufferReader read_buf(out); + ByteBufferReader read_buf(out); std::string bytes; read_buf.ReadString(&bytes, len1); ASSERT_EQ(0, memcmp(bytes.c_str(), kStunMessageWithIPv6MappedAddress, len1)); @@ -922,7 +924,7 @@ TEST_F(StunTest, WriteMessageWithIPv6AddressAttribute) { TEST_F(StunTest, WriteMessageWithIPv4AddressAttribute) { size_t size = sizeof(kStunMessageWithIPv4MappedAddress); - rtc::IPAddress test_ip(kIPv4TestAddress1); + IPAddress test_ip(kIPv4TestAddress1); StunMessage msg( STUN_BINDING_RESPONSE, @@ -931,17 +933,17 @@ TEST_F(StunTest, WriteMessageWithIPv4AddressAttribute) { CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); - rtc::SocketAddress test_addr(test_ip, kTestMessagePort4); + SocketAddress test_addr(test_ip, kTestMessagePort4); addr->SetAddress(test_addr); msg.AddAttribute(std::move(addr)); CheckStunHeader(msg, STUN_BINDING_RESPONSE, (size - 20)); - rtc::ByteBufferWriter out; + ByteBufferWriter out; EXPECT_TRUE(msg.Write(&out)); ASSERT_EQ(out.Length(), sizeof(kStunMessageWithIPv4MappedAddress)); int len1 = static_cast(out.Length()); - rtc::ByteBufferReader read_buf(out); + ByteBufferReader read_buf(out); std::string bytes; read_buf.ReadString(&bytes, len1); ASSERT_EQ(0, memcmp(bytes.c_str(), kStunMessageWithIPv4MappedAddress, len1)); @@ -950,7 +952,7 @@ TEST_F(StunTest, WriteMessageWithIPv4AddressAttribute) { TEST_F(StunTest, WriteMessageWithIPv6XorAddressAttribute) { size_t size = sizeof(kStunMessageWithIPv6XorMappedAddress); - rtc::IPAddress test_ip(kIPv6TestAddress1); + IPAddress test_ip(kIPv6TestAddress1); StunMessage msg( STUN_BINDING_RESPONSE, @@ -959,17 +961,17 @@ TEST_F(StunTest, WriteMessageWithIPv6XorAddressAttribute) { CheckStunTransactionID(msg, kTestTransactionId2, kStunTransactionIdLength); auto addr = StunAttribute::CreateXorAddress(STUN_ATTR_XOR_MAPPED_ADDRESS); - rtc::SocketAddress test_addr(test_ip, kTestMessagePort1); + SocketAddress test_addr(test_ip, kTestMessagePort1); addr->SetAddress(test_addr); msg.AddAttribute(std::move(addr)); CheckStunHeader(msg, STUN_BINDING_RESPONSE, (size - 20)); - rtc::ByteBufferWriter out; + ByteBufferWriter out; EXPECT_TRUE(msg.Write(&out)); ASSERT_EQ(out.Length(), sizeof(kStunMessageWithIPv6XorMappedAddress)); int len1 = static_cast(out.Length()); - rtc::ByteBufferReader read_buf(out); + ByteBufferReader read_buf(out); std::string bytes; read_buf.ReadString(&bytes, len1); ASSERT_EQ(0, @@ -979,7 +981,7 @@ TEST_F(StunTest, WriteMessageWithIPv6XorAddressAttribute) { TEST_F(StunTest, WriteMessageWithIPv4XoreAddressAttribute) { size_t size = sizeof(kStunMessageWithIPv4XorMappedAddress); - rtc::IPAddress test_ip(kIPv4TestAddress1); + IPAddress test_ip(kIPv4TestAddress1); StunMessage msg( STUN_BINDING_RESPONSE, @@ -988,17 +990,17 @@ TEST_F(StunTest, WriteMessageWithIPv4XoreAddressAttribute) { CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); auto addr = StunAttribute::CreateXorAddress(STUN_ATTR_XOR_MAPPED_ADDRESS); - rtc::SocketAddress test_addr(test_ip, kTestMessagePort3); + SocketAddress test_addr(test_ip, kTestMessagePort3); addr->SetAddress(test_addr); msg.AddAttribute(std::move(addr)); CheckStunHeader(msg, STUN_BINDING_RESPONSE, (size - 20)); - rtc::ByteBufferWriter out; + ByteBufferWriter out; EXPECT_TRUE(msg.Write(&out)); ASSERT_EQ(out.Length(), sizeof(kStunMessageWithIPv4XorMappedAddress)); int len1 = static_cast(out.Length()); - rtc::ByteBufferReader read_buf(out); + ByteBufferReader read_buf(out); std::string bytes; read_buf.ReadString(&bytes, len1); ASSERT_EQ(0, @@ -1091,7 +1093,7 @@ TEST_F(StunTest, WriteMessageWithAnErrorCodeAttribute) { msg.AddAttribute(std::move(errorcode)); CheckStunHeader(msg, STUN_BINDING_ERROR_RESPONSE, (size - 20)); - rtc::ByteBufferWriter out; + ByteBufferWriter out; EXPECT_TRUE(msg.Write(&out)); ASSERT_EQ(size, out.Length()); // No padding. @@ -1113,7 +1115,7 @@ TEST_F(StunTest, WriteMessageWithAUInt16ListAttribute) { msg.AddAttribute(std::move(list)); CheckStunHeader(msg, STUN_BINDING_REQUEST, (size - 20)); - rtc::ByteBufferWriter out; + ByteBufferWriter out; EXPECT_TRUE(msg.Write(&out)); ASSERT_EQ(size, out.Length()); // Check everything up to the padding. @@ -1122,10 +1124,9 @@ TEST_F(StunTest, WriteMessageWithAUInt16ListAttribute) { } // Test that we fail to read messages with invalid lengths. -void CheckFailureToRead(const unsigned char* testcase, size_t length) { +void CheckFailureToRead(const uint8_t* testcase, size_t length) { StunMessage msg; - const char* input = reinterpret_cast(testcase); - rtc::ByteBufferReader buf(input, length); + ByteBufferReader buf(MakeArrayView(testcase, length)); ASSERT_FALSE(msg.Read(&buf)); } @@ -1193,15 +1194,15 @@ TEST_F(StunTest, ValidateMessageIntegrity) { // Again, but with the lengths matching what is claimed in the headers. EXPECT_FALSE(StunMessage::ValidateMessageIntegrityForTesting( reinterpret_cast(kStunMessageWithZeroLength), - kStunHeaderSize + rtc::GetBE16(&kStunMessageWithZeroLength[2]), + kStunHeaderSize + webrtc::GetBE16(&kStunMessageWithZeroLength[2]), kRfc5769SampleMsgPassword)); EXPECT_FALSE(StunMessage::ValidateMessageIntegrityForTesting( reinterpret_cast(kStunMessageWithExcessLength), - kStunHeaderSize + rtc::GetBE16(&kStunMessageWithExcessLength[2]), + kStunHeaderSize + webrtc::GetBE16(&kStunMessageWithExcessLength[2]), kRfc5769SampleMsgPassword)); EXPECT_FALSE(StunMessage::ValidateMessageIntegrityForTesting( reinterpret_cast(kStunMessageWithSmallLength), - kStunHeaderSize + rtc::GetBE16(&kStunMessageWithSmallLength[2]), + kStunHeaderSize + webrtc::GetBE16(&kStunMessageWithSmallLength[2]), kRfc5769SampleMsgPassword)); // Check that a too-short HMAC doesn't cause buffer overflow. @@ -1228,36 +1229,32 @@ TEST_F(StunTest, ValidateMessageIntegrity) { // the RFC5769 test messages used include attributes not found in basic STUN. TEST_F(StunTest, AddMessageIntegrity) { IceMessage msg; - rtc::ByteBufferReader buf( - reinterpret_cast(kRfc5769SampleRequestWithoutMI), - sizeof(kRfc5769SampleRequestWithoutMI)); + ByteBufferReader buf(kRfc5769SampleRequestWithoutMI); EXPECT_TRUE(msg.Read(&buf)); EXPECT_TRUE(msg.AddMessageIntegrity(kRfc5769SampleMsgPassword)); const StunByteStringAttribute* mi_attr = msg.GetByteString(STUN_ATTR_MESSAGE_INTEGRITY); EXPECT_EQ(20U, mi_attr->length()); - EXPECT_EQ( - 0, memcmp(mi_attr->bytes(), kCalculatedHmac1, sizeof(kCalculatedHmac1))); + EXPECT_EQ(0, memcmp(mi_attr->array_view().data(), kCalculatedHmac1, + sizeof(kCalculatedHmac1))); - rtc::ByteBufferWriter buf1; + ByteBufferWriter buf1; EXPECT_TRUE(msg.Write(&buf1)); EXPECT_TRUE(StunMessage::ValidateMessageIntegrityForTesting( reinterpret_cast(buf1.Data()), buf1.Length(), kRfc5769SampleMsgPassword)); IceMessage msg2; - rtc::ByteBufferReader buf2( - reinterpret_cast(kRfc5769SampleResponseWithoutMI), - sizeof(kRfc5769SampleResponseWithoutMI)); + ByteBufferReader buf2(kRfc5769SampleResponseWithoutMI); EXPECT_TRUE(msg2.Read(&buf2)); EXPECT_TRUE(msg2.AddMessageIntegrity(kRfc5769SampleMsgPassword)); const StunByteStringAttribute* mi_attr2 = msg2.GetByteString(STUN_ATTR_MESSAGE_INTEGRITY); EXPECT_EQ(20U, mi_attr2->length()); - EXPECT_EQ( - 0, memcmp(mi_attr2->bytes(), kCalculatedHmac2, sizeof(kCalculatedHmac2))); + EXPECT_EQ(0, memcmp(mi_attr2->array_view().data(), kCalculatedHmac2, + sizeof(kCalculatedHmac2))); - rtc::ByteBufferWriter buf3; + ByteBufferWriter buf3; EXPECT_TRUE(msg2.Write(&buf3)); EXPECT_TRUE(StunMessage::ValidateMessageIntegrityForTesting( reinterpret_cast(buf3.Data()), buf3.Length(), @@ -1288,15 +1285,15 @@ TEST_F(StunTest, ValidateMessageIntegrity32) { // Again, but with the lengths matching what is claimed in the headers. EXPECT_FALSE(StunMessage::ValidateMessageIntegrity32ForTesting( reinterpret_cast(kStunMessageWithZeroLength), - kStunHeaderSize + rtc::GetBE16(&kStunMessageWithZeroLength[2]), + kStunHeaderSize + webrtc::GetBE16(&kStunMessageWithZeroLength[2]), kRfc5769SampleMsgPassword)); EXPECT_FALSE(StunMessage::ValidateMessageIntegrity32ForTesting( reinterpret_cast(kStunMessageWithExcessLength), - kStunHeaderSize + rtc::GetBE16(&kStunMessageWithExcessLength[2]), + kStunHeaderSize + webrtc::GetBE16(&kStunMessageWithExcessLength[2]), kRfc5769SampleMsgPassword)); EXPECT_FALSE(StunMessage::ValidateMessageIntegrity32ForTesting( reinterpret_cast(kStunMessageWithSmallLength), - kStunHeaderSize + rtc::GetBE16(&kStunMessageWithSmallLength[2]), + kStunHeaderSize + webrtc::GetBE16(&kStunMessageWithSmallLength[2]), kRfc5769SampleMsgPassword)); // Check that a too-short HMAC doesn't cause buffer overflow. @@ -1321,36 +1318,32 @@ TEST_F(StunTest, ValidateMessageIntegrity32) { // Validate that we generate correct MESSAGE-INTEGRITY-32 attributes. TEST_F(StunTest, AddMessageIntegrity32) { IceMessage msg; - rtc::ByteBufferReader buf( - reinterpret_cast(kRfc5769SampleRequestWithoutMI), - sizeof(kRfc5769SampleRequestWithoutMI)); + ByteBufferReader buf(kRfc5769SampleRequestWithoutMI); EXPECT_TRUE(msg.Read(&buf)); EXPECT_TRUE(msg.AddMessageIntegrity32(kRfc5769SampleMsgPassword)); const StunByteStringAttribute* mi_attr = msg.GetByteString(STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32); EXPECT_EQ(4U, mi_attr->length()); - EXPECT_EQ(0, memcmp(mi_attr->bytes(), kCalculatedHmac1_32, + EXPECT_EQ(0, memcmp(mi_attr->array_view().data(), kCalculatedHmac1_32, sizeof(kCalculatedHmac1_32))); - rtc::ByteBufferWriter buf1; + ByteBufferWriter buf1; EXPECT_TRUE(msg.Write(&buf1)); EXPECT_TRUE(StunMessage::ValidateMessageIntegrity32ForTesting( reinterpret_cast(buf1.Data()), buf1.Length(), kRfc5769SampleMsgPassword)); IceMessage msg2; - rtc::ByteBufferReader buf2( - reinterpret_cast(kRfc5769SampleResponseWithoutMI), - sizeof(kRfc5769SampleResponseWithoutMI)); + ByteBufferReader buf2(kRfc5769SampleResponseWithoutMI); EXPECT_TRUE(msg2.Read(&buf2)); EXPECT_TRUE(msg2.AddMessageIntegrity32(kRfc5769SampleMsgPassword)); const StunByteStringAttribute* mi_attr2 = msg2.GetByteString(STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32); EXPECT_EQ(4U, mi_attr2->length()); - EXPECT_EQ(0, memcmp(mi_attr2->bytes(), kCalculatedHmac2_32, + EXPECT_EQ(0, memcmp(mi_attr2->array_view().data(), kCalculatedHmac2_32, sizeof(kCalculatedHmac2_32))); - rtc::ByteBufferWriter buf3; + ByteBufferWriter buf3; EXPECT_TRUE(msg2.Write(&buf3)); EXPECT_TRUE(StunMessage::ValidateMessageIntegrity32ForTesting( reinterpret_cast(buf3.Data()), buf3.Length(), @@ -1368,7 +1361,7 @@ TEST_F(StunTest, AddMessageIntegrity32AndMessageIntegrity) { msg.AddMessageIntegrity32("password1"); msg.AddMessageIntegrity("password2"); - rtc::ByteBufferWriter buf1; + ByteBufferWriter buf1; EXPECT_TRUE(msg.Write(&buf1)); EXPECT_TRUE(StunMessage::ValidateMessageIntegrity32ForTesting( reinterpret_cast(buf1.Data()), buf1.Length(), "password1")); @@ -1420,168 +1413,16 @@ TEST_F(StunTest, ValidateFingerprint) { TEST_F(StunTest, AddFingerprint) { IceMessage msg; - rtc::ByteBufferReader buf( - reinterpret_cast(kRfc5769SampleRequestWithoutMI), - sizeof(kRfc5769SampleRequestWithoutMI)); + ByteBufferReader buf(kRfc5769SampleRequestWithoutMI); EXPECT_TRUE(msg.Read(&buf)); EXPECT_TRUE(msg.AddFingerprint()); - rtc::ByteBufferWriter buf1; + ByteBufferWriter buf1; EXPECT_TRUE(msg.Write(&buf1)); EXPECT_TRUE(StunMessage::ValidateFingerprint( reinterpret_cast(buf1.Data()), buf1.Length())); } -// Sample "GTURN" relay message. -// clang-format off -// clang formatting doesn't respect inline comments. -static const unsigned char kRelayMessage[] = { - 0x00, 0x01, 0x00, 88, // message header - 0x21, 0x12, 0xA4, 0x42, // magic cookie - '0', '1', '2', '3', // transaction id - '4', '5', '6', '7', - '8', '9', 'a', 'b', - 0x00, 0x01, 0x00, 8, // mapped address - 0x00, 0x01, 0x00, 13, - 0x00, 0x00, 0x00, 17, - 0x00, 0x06, 0x00, 12, // username - 'a', 'b', 'c', 'd', - 'e', 'f', 'g', 'h', - 'i', 'j', 'k', 'l', - 0x00, 0x0d, 0x00, 4, // lifetime - 0x00, 0x00, 0x00, 11, - 0x00, 0x0f, 0x00, 4, // magic cookie - 0x72, 0xc6, 0x4b, 0xc6, - 0x00, 0x10, 0x00, 4, // bandwidth - 0x00, 0x00, 0x00, 6, - 0x00, 0x11, 0x00, 8, // destination address - 0x00, 0x01, 0x00, 13, - 0x00, 0x00, 0x00, 17, - 0x00, 0x12, 0x00, 8, // source address 2 - 0x00, 0x01, 0x00, 13, - 0x00, 0x00, 0x00, 17, - 0x00, 0x13, 0x00, 7, // data - 'a', 'b', 'c', 'd', - 'e', 'f', 'g', 0 // DATA must be padded per rfc5766. -}; -// clang-format on - -// Test that we can read the GTURN-specific fields. -TEST_F(StunTest, ReadRelayMessage) { - RelayMessage msg; - - const char* input = reinterpret_cast(kRelayMessage); - size_t size = sizeof(kRelayMessage); - rtc::ByteBufferReader buf(input, size); - EXPECT_TRUE(msg.Read(&buf)); - - EXPECT_EQ(STUN_BINDING_REQUEST, msg.type()); - EXPECT_EQ(size - 20, msg.length()); - EXPECT_EQ("0123456789ab", msg.transaction_id()); - - RelayMessage msg2(STUN_BINDING_REQUEST, "0123456789ab"); - - in_addr legacy_in_addr; - legacy_in_addr.s_addr = htonl(17U); - rtc::IPAddress legacy_ip(legacy_in_addr); - - const StunAddressAttribute* addr = msg.GetAddress(STUN_ATTR_MAPPED_ADDRESS); - ASSERT_TRUE(addr != NULL); - EXPECT_EQ(1, addr->family()); - EXPECT_EQ(13, addr->port()); - EXPECT_EQ(legacy_ip, addr->ipaddr()); - - auto addr2 = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); - addr2->SetPort(13); - addr2->SetIP(legacy_ip); - msg2.AddAttribute(std::move(addr2)); - - const StunByteStringAttribute* bytes = msg.GetByteString(STUN_ATTR_USERNAME); - ASSERT_TRUE(bytes != NULL); - EXPECT_EQ(12U, bytes->length()); - EXPECT_EQ("abcdefghijkl", bytes->string_view()); - - auto bytes2 = StunAttribute::CreateByteString(STUN_ATTR_USERNAME); - bytes2->CopyBytes("abcdefghijkl"); - msg2.AddAttribute(std::move(bytes2)); - - const StunUInt32Attribute* uval = msg.GetUInt32(STUN_ATTR_LIFETIME); - ASSERT_TRUE(uval != NULL); - EXPECT_EQ(11U, uval->value()); - - auto uval2 = StunAttribute::CreateUInt32(STUN_ATTR_LIFETIME); - uval2->SetValue(11); - msg2.AddAttribute(std::move(uval2)); - - bytes = msg.GetByteString(STUN_ATTR_MAGIC_COOKIE); - ASSERT_TRUE(bytes != NULL); - EXPECT_EQ(4U, bytes->length()); - EXPECT_EQ(0, memcmp(bytes->bytes(), TURN_MAGIC_COOKIE_VALUE, - sizeof(TURN_MAGIC_COOKIE_VALUE))); - - bytes2 = StunAttribute::CreateByteString(STUN_ATTR_MAGIC_COOKIE); - bytes2->CopyBytes(reinterpret_cast(TURN_MAGIC_COOKIE_VALUE), - sizeof(TURN_MAGIC_COOKIE_VALUE)); - msg2.AddAttribute(std::move(bytes2)); - - uval = msg.GetUInt32(STUN_ATTR_BANDWIDTH); - ASSERT_TRUE(uval != NULL); - EXPECT_EQ(6U, uval->value()); - - uval2 = StunAttribute::CreateUInt32(STUN_ATTR_BANDWIDTH); - uval2->SetValue(6); - msg2.AddAttribute(std::move(uval2)); - - addr = msg.GetAddress(STUN_ATTR_DESTINATION_ADDRESS); - ASSERT_TRUE(addr != NULL); - EXPECT_EQ(1, addr->family()); - EXPECT_EQ(13, addr->port()); - EXPECT_EQ(legacy_ip, addr->ipaddr()); - - addr2 = StunAttribute::CreateAddress(STUN_ATTR_DESTINATION_ADDRESS); - addr2->SetPort(13); - addr2->SetIP(legacy_ip); - msg2.AddAttribute(std::move(addr2)); - - addr = msg.GetAddress(STUN_ATTR_SOURCE_ADDRESS2); - ASSERT_TRUE(addr != NULL); - EXPECT_EQ(1, addr->family()); - EXPECT_EQ(13, addr->port()); - EXPECT_EQ(legacy_ip, addr->ipaddr()); - - addr2 = StunAttribute::CreateAddress(STUN_ATTR_SOURCE_ADDRESS2); - addr2->SetPort(13); - addr2->SetIP(legacy_ip); - msg2.AddAttribute(std::move(addr2)); - - bytes = msg.GetByteString(STUN_ATTR_DATA); - ASSERT_TRUE(bytes != NULL); - EXPECT_EQ(7U, bytes->length()); - EXPECT_EQ("abcdefg", bytes->string_view()); - - bytes2 = StunAttribute::CreateByteString(STUN_ATTR_DATA); - bytes2->CopyBytes("abcdefg"); - msg2.AddAttribute(std::move(bytes2)); - - rtc::ByteBufferWriter out; - EXPECT_TRUE(msg.Write(&out)); - EXPECT_EQ(size, out.Length()); - size_t len1 = out.Length(); - rtc::ByteBufferReader read_buf(out); - std::string outstring; - read_buf.ReadString(&outstring, len1); - EXPECT_EQ(0, memcmp(outstring.c_str(), input, len1)); - - rtc::ByteBufferWriter out2; - EXPECT_TRUE(msg2.Write(&out2)); - EXPECT_EQ(size, out2.Length()); - size_t len2 = out2.Length(); - rtc::ByteBufferReader read_buf2(out2); - std::string outstring2; - read_buf2.ReadString(&outstring2, len2); - EXPECT_EQ(0, memcmp(outstring2.c_str(), input, len2)); -} - // Test that we can remove attribute from a message. TEST_F(StunTest, RemoveAttribute) { StunMessage msg; @@ -1600,8 +1441,9 @@ TEST_F(StunTest, RemoveAttribute) { auto attr = msg.RemoveAttribute(STUN_ATTR_USERNAME); ASSERT_NE(attr, nullptr); EXPECT_EQ(attr->type(), STUN_ATTR_USERNAME); - EXPECT_STREQ("kes", - static_cast(attr.get())->bytes()); + EXPECT_STREQ("kes", static_cast(attr.get()) + ->string_view() + .data()); EXPECT_LT(msg.length(), len); } @@ -1623,8 +1465,9 @@ TEST_F(StunTest, RemoveAttribute) { auto attr = msg.RemoveAttribute(STUN_ATTR_USERNAME); ASSERT_NE(attr, nullptr); EXPECT_EQ(attr->type(), STUN_ATTR_USERNAME); - EXPECT_STREQ("kenta", - static_cast(attr.get())->bytes()); + EXPECT_STREQ("kenta", static_cast(attr.get()) + ->string_view() + .data()); } // Remove should remove the last added occurrence. @@ -1632,8 +1475,9 @@ TEST_F(StunTest, RemoveAttribute) { auto attr = msg.RemoveAttribute(STUN_ATTR_USERNAME); ASSERT_NE(attr, nullptr); EXPECT_EQ(attr->type(), STUN_ATTR_USERNAME); - EXPECT_STREQ("kes", - static_cast(attr.get())->bytes()); + EXPECT_STREQ("kes", static_cast(attr.get()) + ->string_view() + .data()); } // Removing something that does exist should return nullptr. @@ -1656,8 +1500,8 @@ TEST_F(StunTest, ClearAttributes) { // Test CopyStunAttribute TEST_F(StunTest, CopyAttribute) { - rtc::ByteBufferWriter buf; - rtc::ByteBufferWriter* buffer_ptrs[] = {&buf, nullptr}; + ByteBufferWriter buf; + ByteBufferWriter* buffer_ptrs[] = {&buf, nullptr}; // Test both with and without supplied ByteBufferWriter. for (auto buffer_ptr : buffer_ptrs) { { // Test StunByteStringAttribute. @@ -1666,14 +1510,15 @@ TEST_F(StunTest, CopyAttribute) { auto copy = CopyStunAttribute(*attr.get(), buffer_ptr); ASSERT_EQ(copy->value_type(), STUN_VALUE_BYTE_STRING); - EXPECT_STREQ("kes", - static_cast(copy.get())->bytes()); + EXPECT_STREQ("kes", static_cast(copy.get()) + ->string_view() + .data()); } { // Test StunAddressAttribute. - rtc::IPAddress test_ip(kIPv6TestAddress2); + IPAddress test_ip(kIPv6TestAddress2); auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); - rtc::SocketAddress test_addr(test_ip, kTestMessagePort2); + SocketAddress test_addr(test_ip, kTestMessagePort2); addr->SetAddress(test_addr); CheckStunAddressAttribute(addr.get(), STUN_ADDRESS_IPV6, kTestMessagePort2, test_ip); @@ -1685,9 +1530,9 @@ TEST_F(StunTest, CopyAttribute) { } { // Test StunAddressAttribute. - rtc::IPAddress test_ip(kIPv6TestAddress2); + IPAddress test_ip(kIPv6TestAddress2); auto addr = StunAttribute::CreateAddress(STUN_ATTR_XOR_MAPPED_ADDRESS); - rtc::SocketAddress test_addr(test_ip, kTestMessagePort2); + SocketAddress test_addr(test_ip, kTestMessagePort2); addr->SetAddress(test_addr); CheckStunAddressAttribute(addr.get(), STUN_ADDRESS_IPV6, kTestMessagePort2, test_ip); @@ -1721,16 +1566,16 @@ TEST_F(StunTest, Clone) { } { auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); - addr->SetIP(rtc::IPAddress(kIPv6TestAddress1)); + addr->SetIP(IPAddress(kIPv6TestAddress1)); addr->SetPort(kTestMessagePort1); msg.AddAttribute(std::move(addr)); } auto copy = msg.Clone(); ASSERT_NE(nullptr, copy.get()); - rtc::ByteBufferWriter out1; + ByteBufferWriter out1; EXPECT_TRUE(msg.Write(&out1)); - rtc::ByteBufferWriter out2; + ByteBufferWriter out2; EXPECT_TRUE(copy->Write(&out2)); ASSERT_EQ(out1.Length(), out2.Length()); @@ -1758,20 +1603,21 @@ TEST_F(StunTest, EqualAttributes) { } { auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); - addr->SetIP(rtc::IPAddress(kIPv6TestAddress1)); + addr->SetIP(IPAddress(kIPv6TestAddress1)); addr->SetPort(kTestMessagePort1); msg.AddAttribute(std::move(addr)); } auto copy = msg.Clone(); ASSERT_NE(nullptr, copy.get()); - EXPECT_TRUE(copy->EqualAttributes(&msg, [](int type) { return true; })); + EXPECT_TRUE(copy->EqualAttributes(&msg, [](int /* type */) { return true; })); { auto attr = StunAttribute::CreateByteString(STUN_ATTR_NONCE); attr->CopyBytes("keso"); msg.AddAttribute(std::move(attr)); - EXPECT_FALSE(copy->EqualAttributes(&msg, [](int type) { return true; })); + EXPECT_FALSE( + copy->EqualAttributes(&msg, [](int /* type */) { return true; })); EXPECT_TRUE(copy->EqualAttributes( &msg, [](int type) { return type != STUN_ATTR_NONCE; })); } @@ -1780,21 +1626,24 @@ TEST_F(StunTest, EqualAttributes) { auto attr = StunAttribute::CreateByteString(STUN_ATTR_NONCE); attr->CopyBytes("keso"); copy->AddAttribute(std::move(attr)); - EXPECT_TRUE(copy->EqualAttributes(&msg, [](int type) { return true; })); + EXPECT_TRUE( + copy->EqualAttributes(&msg, [](int /* type */) { return true; })); } { copy->RemoveAttribute(STUN_ATTR_NONCE); auto attr = StunAttribute::CreateByteString(STUN_ATTR_NONCE); attr->CopyBytes("kent"); copy->AddAttribute(std::move(attr)); - EXPECT_FALSE(copy->EqualAttributes(&msg, [](int type) { return true; })); + EXPECT_FALSE( + copy->EqualAttributes(&msg, [](int /* type */) { return true; })); EXPECT_TRUE(copy->EqualAttributes( &msg, [](int type) { return type != STUN_ATTR_NONCE; })); } { msg.RemoveAttribute(STUN_ATTR_NONCE); - EXPECT_FALSE(copy->EqualAttributes(&msg, [](int type) { return true; })); + EXPECT_FALSE( + copy->EqualAttributes(&msg, [](int /* type */) { return true; })); EXPECT_TRUE(copy->EqualAttributes( &msg, [](int type) { return type != STUN_ATTR_NONCE; })); } @@ -1821,12 +1670,12 @@ TEST_F(StunTest, GoogMiscInfo) { msg.AddAttribute(std::move(list)); CheckStunHeader(msg, STUN_BINDING_REQUEST, (size - 20)); - rtc::ByteBufferWriter out; + ByteBufferWriter out; EXPECT_TRUE(msg.Write(&out)); ASSERT_EQ(size, out.Length()); size_t read_size = ReadStunMessageTestCase( - &msg, reinterpret_cast(out.Data()), out.Length()); + &msg, reinterpret_cast(out.Data()), out.Length()); ASSERT_EQ(read_size + 20, size); CheckStunHeader(msg, STUN_BINDING_REQUEST, read_size); const StunUInt16ListAttribute* types = @@ -1852,7 +1701,7 @@ TEST_F(StunTest, SizeRestrictionOnAttributes) { std::string long_string(509, 'x'); long_username->CopyBytes(long_string.c_str(), long_string.size()); msg.AddAttribute(std::move(long_username)); - rtc::ByteBufferWriter out; + ByteBufferWriter out; ASSERT_FALSE(msg.Write(&out)); } @@ -1860,9 +1709,7 @@ TEST_F(StunTest, ValidateMessageIntegrityWithParser) { webrtc::metrics::Reset(); // Ensure counters start from zero. // Try the messages from RFC 5769. StunMessage message; - rtc::ByteBufferReader reader( - reinterpret_cast(kRfc5769SampleRequest), - sizeof(kRfc5769SampleRequest)); + ByteBufferReader reader(kRfc5769SampleRequest); EXPECT_TRUE(message.Read(&reader)); EXPECT_EQ(message.ValidateMessageIntegrity(kRfc5769SampleMsgPassword), StunMessage::IntegrityStatus::kIntegrityOk); @@ -1879,4 +1726,4 @@ TEST_F(StunTest, ValidateMessageIntegrityWithParser) { EXPECT_EQ(webrtc::metrics::NumSamples("WebRTC.Stun.Integrity.Request"), 2); } -} // namespace cricket +} // namespace webrtc diff --git a/api/transport/test/create_feedback_generator.cc b/api/transport/test/create_feedback_generator.cc index 7c83823ce4..6144bccacf 100644 --- a/api/transport/test/create_feedback_generator.cc +++ b/api/transport/test/create_feedback_generator.cc @@ -11,6 +11,7 @@ #include +#include "api/transport/test/feedback_generator_interface.h" #include "test/network/feedback_generator.h" namespace webrtc { diff --git a/api/transport/test/feedback_generator_interface.h b/api/transport/test/feedback_generator_interface.h index 6e5118cbf4..1ffc8b0796 100644 --- a/api/transport/test/feedback_generator_interface.h +++ b/api/transport/test/feedback_generator_interface.h @@ -10,10 +10,15 @@ #ifndef API_TRANSPORT_TEST_FEEDBACK_GENERATOR_INTERFACE_H_ #define API_TRANSPORT_TEST_FEEDBACK_GENERATOR_INTERFACE_H_ +#include #include #include "api/test/simulated_network.h" #include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" namespace webrtc { class FeedbackGenerator { diff --git a/api/transport/test/mock_network_control.h b/api/transport/test/mock_network_control.h index e3a15b8e11..94f4b796dd 100644 --- a/api/transport/test/mock_network_control.h +++ b/api/transport/test/mock_network_control.h @@ -11,7 +11,10 @@ #ifndef API_TRANSPORT_TEST_MOCK_NETWORK_CONTROL_H_ #define API_TRANSPORT_TEST_MOCK_NETWORK_CONTROL_H_ +#include + #include "api/transport/network_control.h" +#include "api/transport/network_types.h" #include "test/gmock.h" namespace webrtc { @@ -67,7 +70,7 @@ class MockNetworkControllerInterface : public NetworkControllerInterface { class MockNetworkStateEstimator : public NetworkStateEstimator { public: - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, GetCurrentEstimate, (), (override)); diff --git a/api/turn_customizer.h b/api/turn_customizer.h index 8d569b36d2..af41649fc3 100644 --- a/api/turn_customizer.h +++ b/api/turn_customizer.h @@ -14,10 +14,7 @@ #include #include "api/transport/stun.h" - -namespace cricket { -class PortInterface; -} // namespace cricket +#include "p2p/base/port_interface.h" namespace webrtc { @@ -25,15 +22,14 @@ class TurnCustomizer { public: // This is called before a TURN message is sent. // This could be used to add implementation specific attributes to a request. - virtual void MaybeModifyOutgoingStunMessage( - cricket::PortInterface* port, - cricket::StunMessage* message) = 0; + virtual void MaybeModifyOutgoingStunMessage(PortInterface* port, + StunMessage* message) = 0; // TURN can send data using channel data messages or Send indication. // This method should return false if `data` should be sent using // a Send indication instead of a ChannelData message, even if a // channel is bound. - virtual bool AllowChannelData(cricket::PortInterface* port, + virtual bool AllowChannelData(PortInterface* port, const void* data, size_t size, bool payload) = 0; diff --git a/api/uma_metrics.h b/api/uma_metrics.h index 925ba07576..1525f08526 100644 --- a/api/uma_metrics.h +++ b/api/uma_metrics.h @@ -175,6 +175,55 @@ enum RtcpMuxPolicyUsage { kRtcpMuxPolicyUsageMax }; +// Metrics for SDP munging. +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. Keep in sync with SdpMungingType from +// tools/metrics/histograms/metadata/web_rtc/enums.xml +enum SdpMungingType { + kNoModification = 0, + kUnknownModification = 1, + kWithoutCreateAnswer = 2, + kWithoutCreateOffer = 3, + kNumberOfContents = 4, + // Transport-related munging. + kIceOptions = 20, + kIcePwd = 21, + kIceUfrag = 22, + kIceMode = 23, + kDtlsSetup = 24, + kMid = 25, + kPayloadTypes = 26, + kSsrcs = 27, + kIceOptionsRenomination = 28, + // RTP header extension munging. + kRtpHeaderExtensionRemoved = 40, + kRtpHeaderExtensionAdded = 41, + kRtpHeaderExtensionModified = 42, + // Audio-related munging. + kAudioCodecsRemoved = 60, + kAudioCodecsAdded = 61, + kAudioCodecsReordered = 62, + kAudioCodecsAddedMultiOpus = 63, + kAudioCodecsAddedL16 = 64, + kAudioCodecsRtcpFbAudioNack = 65, + kAudioCodecsFmtpOpusFec = 66, + kAudioCodecsFmtpOpusCbr = 67, + kAudioCodecsFmtpOpusStereo = 68, + kAudioCodecsFmtpOpusDtx = 69, + kAudioCodecsFmtp = 70, + kAudioCodecsRtcpFb = 71, + kAudioCodecsRtcpFbRrtr = 72, + // Video-related munging. + kVideoCodecsRemoved = 80, + kVideoCodecsAdded = 81, + kVideoCodecsReordered = 82, + kVideoCodecsLegacySimulcast = 83, + kVideoCodecsFmtpH264SpsPpsIdrInKeyframe = 84, + kVideoCodecsFmtp = 85, + kVideoCodecsRtcpFb = 86, + kMaxValue, +}; + // When adding new metrics please consider using the style described in // https://chromium.googlesource.com/chromium/src.git/+/HEAD/tools/metrics/histograms/README.md#usage // instead of the legacy enums used above. diff --git a/api/units/BUILD.gn b/api/units/BUILD.gn index 45cdcd3097..85d6d1d4e9 100644 --- a/api/units/BUILD.gn +++ b/api/units/BUILD.gn @@ -22,6 +22,7 @@ rtc_library("data_rate") { "..:array_view", "../../rtc_base:checks", "../../rtc_base:stringutils", + "../../rtc_base/system:rtc_export", "../../rtc_base/units:unit_base", ] } @@ -37,6 +38,7 @@ rtc_library("data_size") { "..:array_view", "../../rtc_base:checks", "../../rtc_base:stringutils", + "../../rtc_base/system:rtc_export", "../../rtc_base/units:unit_base", ] } @@ -52,6 +54,7 @@ rtc_library("time_delta") { "..:array_view", "../../rtc_base:checks", "../../rtc_base:stringutils", + "../../rtc_base/system:rtc_export", "../../rtc_base/units:unit_base", ] } @@ -68,6 +71,7 @@ rtc_library("frequency") { "..:array_view", "../../rtc_base:checks", "../../rtc_base:stringutils", + "../../rtc_base/system:rtc_export", "../../rtc_base/units:unit_base", ] } @@ -84,6 +88,7 @@ rtc_library("timestamp") { "..:array_view", "../../rtc_base:checks", "../../rtc_base:stringutils", + "../../rtc_base/system:rtc_export", "../../rtc_base/units:unit_base", ] } @@ -104,6 +109,7 @@ if (rtc_include_tests) { ":frequency", ":time_delta", ":timestamp", + "../../rtc_base:checks", "../../rtc_base:logging", "../../test:test_support", ] diff --git a/api/units/data_rate.cc b/api/units/data_rate.cc index f9586c5f40..7f99a172ed 100644 --- a/api/units/data_rate.cc +++ b/api/units/data_rate.cc @@ -10,6 +10,8 @@ #include "api/units/data_rate.h" +#include + #include "api/array_view.h" #include "rtc_base/strings/string_builder.h" @@ -17,7 +19,7 @@ namespace webrtc { std::string ToString(DataRate value) { char buf[64]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); if (value.IsPlusInfinity()) { sb << "+inf bps"; } else if (value.IsMinusInfinity()) { diff --git a/api/units/data_rate.h b/api/units/data_rate.h index d813c61156..a9400c0442 100644 --- a/api/units/data_rate.h +++ b/api/units/data_rate.h @@ -11,10 +11,7 @@ #ifndef API_UNITS_DATA_RATE_H_ #define API_UNITS_DATA_RATE_H_ -#ifdef WEBRTC_UNIT_TEST -#include // no-presubmit-check TODO(webrtc:8982) -#endif // WEBRTC_UNIT_TEST - +#include #include #include #include @@ -23,6 +20,7 @@ #include "api/units/frequency.h" #include "api/units/time_delta.h" #include "rtc_base/checks.h" +#include "rtc_base/system/rtc_export.h" #include "rtc_base/units/unit_base.h" // IWYU pragma: export namespace webrtc { @@ -48,7 +46,10 @@ class DataRate final : public rtc_units_impl::RelativeUnit { } static constexpr DataRate Infinity() { return PlusInfinity(); } - DataRate() = delete; + constexpr DataRate() = default; + + template + friend void AbslStringify(Sink& sink, DataRate value); template constexpr T bps() const { @@ -137,18 +138,12 @@ inline constexpr DataRate operator*(const Frequency frequency, return size * frequency; } -std::string ToString(DataRate value); -inline std::string ToLogString(DataRate value) { - return ToString(value); -} +RTC_EXPORT std::string ToString(DataRate value); -#ifdef WEBRTC_UNIT_TEST -inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) - std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) - DataRate value) { - return stream << ToString(value); +template +void AbslStringify(Sink& sink, DataRate value) { + sink.Append(ToString(value)); } -#endif // WEBRTC_UNIT_TEST } // namespace webrtc diff --git a/api/units/data_rate_unittest.cc b/api/units/data_rate_unittest.cc index f77b3702d4..1021921b62 100644 --- a/api/units/data_rate_unittest.cc +++ b/api/units/data_rate_unittest.cc @@ -10,6 +10,13 @@ #include "api/units/data_rate.h" +#include +#include + +#include "api/units/data_size.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "test/gtest.h" @@ -27,6 +34,7 @@ TEST(DataRateTest, ConstExpr) { constexpr int64_t kValue = 12345; constexpr DataRate kDataRateZero = DataRate::Zero(); constexpr DataRate kDataRateInf = DataRate::Infinity(); + static_assert(DataRate() == kDataRateZero); static_assert(kDataRateZero.IsZero(), ""); static_assert(kDataRateInf.IsInfinite(), ""); static_assert(kDataRateInf.bps_or(-1) == -1, ""); diff --git a/api/units/data_size.cc b/api/units/data_size.cc index 45487dfd83..abe4906c92 100644 --- a/api/units/data_size.cc +++ b/api/units/data_size.cc @@ -10,6 +10,8 @@ #include "api/units/data_size.h" +#include + #include "api/array_view.h" #include "rtc_base/strings/string_builder.h" @@ -17,7 +19,7 @@ namespace webrtc { std::string ToString(DataSize value) { char buf[64]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); if (value.IsPlusInfinity()) { sb << "+inf bytes"; } else if (value.IsMinusInfinity()) { diff --git a/api/units/data_size.h b/api/units/data_size.h index 9df6434fb9..78d41b12f8 100644 --- a/api/units/data_size.h +++ b/api/units/data_size.h @@ -11,13 +11,11 @@ #ifndef API_UNITS_DATA_SIZE_H_ #define API_UNITS_DATA_SIZE_H_ -#ifdef WEBRTC_UNIT_TEST -#include // no-presubmit-check TODO(webrtc:8982) -#endif // WEBRTC_UNIT_TEST - +#include #include #include +#include "rtc_base/system/rtc_export.h" #include "rtc_base/units/unit_base.h" // IWYU pragma: export namespace webrtc { @@ -31,7 +29,10 @@ class DataSize final : public rtc_units_impl::RelativeUnit { } static constexpr DataSize Infinity() { return PlusInfinity(); } - DataSize() = delete; + constexpr DataSize() = default; + + template + friend void AbslStringify(Sink& sink, DataSize value); template constexpr T bytes() const { @@ -48,18 +49,12 @@ class DataSize final : public rtc_units_impl::RelativeUnit { static constexpr bool one_sided = true; }; -std::string ToString(DataSize value); -inline std::string ToLogString(DataSize value) { - return ToString(value); -} +RTC_EXPORT std::string ToString(DataSize value); -#ifdef WEBRTC_UNIT_TEST -inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) - std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) - DataSize value) { - return stream << ToString(value); +template +void AbslStringify(Sink& sink, DataSize value) { + sink.Append(ToString(value)); } -#endif // WEBRTC_UNIT_TEST } // namespace webrtc diff --git a/api/units/data_size_unittest.cc b/api/units/data_size_unittest.cc index eb8d98c1f0..b198060ec6 100644 --- a/api/units/data_size_unittest.cc +++ b/api/units/data_size_unittest.cc @@ -10,6 +10,7 @@ #include "api/units/data_size.h" +#include #include #include "test/gtest.h" @@ -21,6 +22,7 @@ TEST(DataSizeTest, ConstExpr) { constexpr int64_t kValue = 12345; constexpr DataSize kDataSizeZero = DataSize::Zero(); constexpr DataSize kDataSizeInf = DataSize::Infinity(); + static_assert(DataSize() == kDataSizeZero); static_assert(kDataSizeZero.IsZero(), ""); static_assert(kDataSizeInf.IsInfinite(), ""); static_assert(kDataSizeInf.bytes_or(-1) == -1, ""); diff --git a/api/units/frequency.cc b/api/units/frequency.cc index 2d938a2814..ca4b52e34c 100644 --- a/api/units/frequency.cc +++ b/api/units/frequency.cc @@ -9,12 +9,15 @@ */ #include "api/units/frequency.h" +#include +#include + #include "rtc_base/strings/string_builder.h" namespace webrtc { std::string ToString(Frequency value) { char buf[64]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); if (value.IsPlusInfinity()) { sb << "+inf Hz"; } else if (value.IsMinusInfinity()) { diff --git a/api/units/frequency.h b/api/units/frequency.h index 06081e4c0d..51c53f9ba8 100644 --- a/api/units/frequency.h +++ b/api/units/frequency.h @@ -10,16 +10,15 @@ #ifndef API_UNITS_FREQUENCY_H_ #define API_UNITS_FREQUENCY_H_ -#ifdef WEBRTC_UNIT_TEST -#include // no-presubmit-check TODO(webrtc:8982) -#endif // WEBRTC_UNIT_TEST - +#include #include #include #include #include #include "api/units/time_delta.h" +#include "rtc_base/checks.h" +#include "rtc_base/system/rtc_export.h" #include "rtc_base/units/unit_base.h" // IWYU pragma: export namespace webrtc { @@ -42,7 +41,10 @@ class Frequency final : public rtc_units_impl::RelativeUnit { return FromFraction(1'000'000, value); } - Frequency() = delete; + constexpr Frequency() = default; + + template + friend void AbslStringify(Sink& sink, Frequency value); template constexpr T hertz() const { @@ -84,18 +86,12 @@ inline constexpr double operator*(TimeDelta time_delta, Frequency frequency) { return frequency * time_delta; } -std::string ToString(Frequency value); -inline std::string ToLogString(Frequency value) { - return ToString(value); -} +RTC_EXPORT std::string ToString(Frequency value); -#ifdef WEBRTC_UNIT_TEST -inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) - std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) - Frequency value) { - return stream << ToString(value); +template +void AbslStringify(Sink& sink, Frequency value) { + sink.Append(ToString(value)); } -#endif // WEBRTC_UNIT_TEST } // namespace webrtc #endif // API_UNITS_FREQUENCY_H_ diff --git a/api/units/frequency_unittest.cc b/api/units/frequency_unittest.cc index 1260c2107d..e426e75f01 100644 --- a/api/units/frequency_unittest.cc +++ b/api/units/frequency_unittest.cc @@ -9,8 +9,9 @@ */ #include "api/units/frequency.h" -#include +#include +#include "api/units/time_delta.h" #include "test/gtest.h" namespace webrtc { @@ -19,6 +20,7 @@ TEST(FrequencyTest, ConstExpr) { constexpr Frequency kFrequencyZero = Frequency::Zero(); constexpr Frequency kFrequencyPlusInf = Frequency::PlusInfinity(); constexpr Frequency kFrequencyMinusInf = Frequency::MinusInfinity(); + static_assert(Frequency() == kFrequencyZero); static_assert(kFrequencyZero.IsZero(), ""); static_assert(kFrequencyPlusInf.IsPlusInfinity(), ""); static_assert(kFrequencyMinusInf.IsMinusInfinity(), ""); diff --git a/api/units/time_delta.cc b/api/units/time_delta.cc index 31bf3e0106..efd5a2a518 100644 --- a/api/units/time_delta.cc +++ b/api/units/time_delta.cc @@ -10,6 +10,8 @@ #include "api/units/time_delta.h" +#include + #include "api/array_view.h" #include "rtc_base/strings/string_builder.h" @@ -17,7 +19,7 @@ namespace webrtc { std::string ToString(TimeDelta value) { char buf[64]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); if (value.IsPlusInfinity()) { sb << "+inf ms"; } else if (value.IsMinusInfinity()) { diff --git a/api/units/time_delta.h b/api/units/time_delta.h index 5981e32dce..29e86c76d4 100644 --- a/api/units/time_delta.h +++ b/api/units/time_delta.h @@ -11,14 +11,12 @@ #ifndef API_UNITS_TIME_DELTA_H_ #define API_UNITS_TIME_DELTA_H_ -#ifdef WEBRTC_UNIT_TEST -#include // no-presubmit-check TODO(webrtc:8982) -#endif // WEBRTC_UNIT_TEST - +#include #include #include #include +#include "rtc_base/system/rtc_export.h" #include "rtc_base/units/unit_base.h" // IWYU pragma: export namespace webrtc { @@ -53,7 +51,10 @@ class TimeDelta final : public rtc_units_impl::RelativeUnit { return FromValue(value); } - TimeDelta() = delete; + constexpr TimeDelta() = default; + + template + friend void AbslStringify(Sink& sink, TimeDelta value); template constexpr T seconds() const { @@ -92,18 +93,12 @@ class TimeDelta final : public rtc_units_impl::RelativeUnit { static constexpr bool one_sided = false; }; -std::string ToString(TimeDelta value); -inline std::string ToLogString(TimeDelta value) { - return ToString(value); -} +RTC_EXPORT std::string ToString(TimeDelta value); -#ifdef WEBRTC_UNIT_TEST -inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) - std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) - TimeDelta value) { - return stream << ToString(value); +template +void AbslStringify(Sink& sink, TimeDelta value) { + sink.Append(ToString(value)); } -#endif // WEBRTC_UNIT_TEST } // namespace webrtc diff --git a/api/units/time_delta_unittest.cc b/api/units/time_delta_unittest.cc index 51a7aa233a..8e1aee0c62 100644 --- a/api/units/time_delta_unittest.cc +++ b/api/units/time_delta_unittest.cc @@ -10,6 +10,8 @@ #include "api/units/time_delta.h" +#include +#include #include #include "test/gtest.h" @@ -21,6 +23,7 @@ TEST(TimeDeltaTest, ConstExpr) { constexpr TimeDelta kTimeDeltaZero = TimeDelta::Zero(); constexpr TimeDelta kTimeDeltaPlusInf = TimeDelta::PlusInfinity(); constexpr TimeDelta kTimeDeltaMinusInf = TimeDelta::MinusInfinity(); + static_assert(TimeDelta() == kTimeDeltaZero); static_assert(kTimeDeltaZero.IsZero(), ""); static_assert(kTimeDeltaPlusInf.IsPlusInfinity(), ""); static_assert(kTimeDeltaMinusInf.IsMinusInfinity(), ""); diff --git a/api/units/timestamp.cc b/api/units/timestamp.cc index fc4f419596..38b0d1157d 100644 --- a/api/units/timestamp.cc +++ b/api/units/timestamp.cc @@ -10,13 +10,15 @@ #include "api/units/timestamp.h" +#include + #include "api/array_view.h" #include "rtc_base/strings/string_builder.h" namespace webrtc { std::string ToString(Timestamp value) { char buf[64]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); if (value.IsPlusInfinity()) { sb << "+inf ms"; } else if (value.IsMinusInfinity()) { diff --git a/api/units/timestamp.h b/api/units/timestamp.h index 8aabe05cad..7831bba2d0 100644 --- a/api/units/timestamp.h +++ b/api/units/timestamp.h @@ -11,15 +11,13 @@ #ifndef API_UNITS_TIMESTAMP_H_ #define API_UNITS_TIMESTAMP_H_ -#ifdef WEBRTC_UNIT_TEST -#include // no-presubmit-check TODO(webrtc:8982) -#endif // WEBRTC_UNIT_TEST - +#include #include #include #include "api/units/time_delta.h" #include "rtc_base/checks.h" +#include "rtc_base/system/rtc_export.h" #include "rtc_base/units/unit_base.h" // IWYU pragma: export namespace webrtc { @@ -47,6 +45,9 @@ class Timestamp final : public rtc_units_impl::UnitBase { Timestamp() = delete; + template + friend void AbslStringify(Sink& sink, Timestamp value); + template constexpr T seconds() const { return ToFraction<1000000, T>(); @@ -121,18 +122,12 @@ class Timestamp final : public rtc_units_impl::UnitBase { static constexpr bool one_sided = true; }; -std::string ToString(Timestamp value); -inline std::string ToLogString(Timestamp value) { - return ToString(value); -} +RTC_EXPORT std::string ToString(Timestamp value); -#ifdef WEBRTC_UNIT_TEST -inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) - std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) - Timestamp value) { - return stream << ToString(value); +template +void AbslStringify(Sink& sink, Timestamp value) { + sink.Append(ToString(value)); } -#endif // WEBRTC_UNIT_TEST } // namespace webrtc diff --git a/api/units/timestamp_unittest.cc b/api/units/timestamp_unittest.cc index f49b8ddde6..1b88cb7f42 100644 --- a/api/units/timestamp_unittest.cc +++ b/api/units/timestamp_unittest.cc @@ -10,8 +10,10 @@ #include "api/units/timestamp.h" +#include #include +#include "api/units/time_delta.h" #include "test/gtest.h" namespace webrtc { diff --git a/api/video/BUILD.gn b/api/video/BUILD.gn index 807fdcc3a9..98591e6050 100644 --- a/api/video/BUILD.gn +++ b/api/video/BUILD.gn @@ -36,10 +36,6 @@ rtc_library("video_rtp_headers") { "../units:data_rate", "../units:time_delta", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("video_frame") { @@ -67,17 +63,20 @@ rtc_library("video_frame") { ":video_rtp_headers", "..:array_view", "..:make_ref_counted", + "..:ref_count", "..:rtp_packet_info", "..:scoped_refptr", "..:video_track_source_constraints", "../../rtc_base:checks", "../../rtc_base:refcount", + "../../rtc_base:safe_conversions", "../../rtc_base:timeutils", "../../rtc_base/memory:aligned_malloc", "../../rtc_base/system:rtc_export", + "../units:time_delta", + "../units:timestamp", "//third_party/libyuv", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (is_android) { @@ -103,7 +102,9 @@ rtc_library("video_frame_i010") { "..:scoped_refptr", "../../rtc_base:checks", "../../rtc_base:refcount", + "../../rtc_base:safe_conversions", "../../rtc_base/memory:aligned_malloc", + "../../rtc_base/system:rtc_export", "//third_party/libyuv", ] } @@ -126,8 +127,10 @@ rtc_source_set("recordable_encoded_frame") { rtc_source_set("video_frame_type") { visibility = [ "*" ] sources = [ "video_frame_type.h" ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] - deps = [ "../../rtc_base:checks" ] + deps = [ + "../../rtc_base:checks", + "//third_party/abseil-cpp/absl/strings:string_view", + ] } rtc_source_set("render_resolution") { @@ -138,6 +141,12 @@ rtc_source_set("render_resolution") { rtc_source_set("resolution") { visibility = [ "*" ] public = [ "resolution.h" ] + deps = [ "../../rtc_base/system:rtc_export" ] +} + +rtc_source_set("corruption_detection_filter_settings") { + visibility = [ "*" ] + public = [ "corruption_detection_filter_settings.h" ] } rtc_library("encoded_image") { @@ -147,19 +156,22 @@ rtc_library("encoded_image") { "encoded_image.h", ] deps = [ + ":corruption_detection_filter_settings", ":video_codec_constants", ":video_frame", ":video_frame_type", ":video_rtp_headers", + "..:make_ref_counted", + "..:ref_count", "..:refcountedbase", "..:rtp_packet_info", "..:scoped_refptr", + "../../rtc_base:buffer", "../../rtc_base:checks", "../../rtc_base:refcount", "../../rtc_base/system:rtc_export", "../units:timestamp", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("encoded_frame") { @@ -172,11 +184,12 @@ rtc_library("encoded_frame") { deps = [ ":encoded_image", ":video_frame", + "../../common_video:frame_instrumentation_data", "../../modules/rtp_rtcp:rtp_video_header", + "../../modules/video_coding:codec_globals_headers", "../../modules/video_coding:video_codec_interface", "../units:timestamp", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("rtp_video_frame_assembler") { @@ -188,17 +201,22 @@ rtc_library("rtp_video_frame_assembler") { deps = [ ":encoded_frame", + ":encoded_image", + ":video_frame_type", + ":video_rtp_headers", + "..:array_view", + "..:rtp_packet_info", + "..:scoped_refptr", "../../modules/rtp_rtcp:rtp_rtcp", "../../modules/rtp_rtcp:rtp_rtcp_format", + "../../modules/rtp_rtcp:rtp_video_header", "../../modules/video_coding:packet_buffer", "../../modules/video_coding:video_coding", + "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:rtc_numerics", - ] - - absl_deps = [ + "../transport/rtp:dependency_descriptor", "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -207,15 +225,43 @@ rtc_library("rtp_video_frame_assembler_unittests") { sources = [ "rtp_video_frame_assembler_unittests.cc" ] deps = [ + ":encoded_frame", ":rtp_video_frame_assembler", + ":video_frame", + ":video_frame_type", "..:array_view", "../../modules/rtp_rtcp:rtp_packetizer_av1_test_helper", "../../modules/rtp_rtcp:rtp_rtcp", "../../modules/rtp_rtcp:rtp_rtcp_format", + "../../modules/rtp_rtcp:rtp_video_header", + "../../modules/video_coding:codec_globals_headers", + "../../rtc_base:checks", "../../test:test_support", + "../transport/rtp:dependency_descriptor", ] } +if (rtc_use_h265) { + rtc_library("rtp_video_frame_h265_assembler_unittests") { + testonly = true + sources = [ "rtp_video_frame_h265_assembler_unittests.cc" ] + + deps = [ + ":encoded_frame", + ":rtp_video_frame_assembler", + ":video_frame", + ":video_frame_type", + "..:array_view", + "../../modules/rtp_rtcp:rtp_rtcp", + "../../modules/rtp_rtcp:rtp_rtcp_format", + "../../modules/rtp_rtcp:rtp_video_header", + "../../modules/video_coding:codec_globals_headers", + "../../rtc_base:checks", + "../../test:test_support", + ] + } +} + rtc_source_set("video_codec_constants") { visibility = [ "*" ] sources = [ "video_codec_constants.h" ] @@ -235,14 +281,15 @@ rtc_library("video_bitrate_allocation") { "../../rtc_base:stringutils", "../../rtc_base/system:rtc_export", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("video_layers_allocation") { visibility = [ "*" ] sources = [ "video_layers_allocation.h" ] - deps = [ "../units:data_rate" ] - absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ] + deps = [ + "../units:data_rate", + "//third_party/abseil-cpp/absl/container:inlined_vector", + ] } rtc_library("video_bitrate_allocator") { @@ -262,6 +309,7 @@ rtc_source_set("video_bitrate_allocator_factory") { sources = [ "video_bitrate_allocator_factory.h" ] deps = [ ":video_bitrate_allocator", + "../environment", "../video_codecs:video_codecs_api", ] } @@ -300,7 +348,6 @@ rtc_source_set("video_stream_encoder") { "../units:data_rate", "../video_codecs:video_codecs_api", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("video_frame_metadata") { @@ -317,11 +364,7 @@ rtc_source_set("video_frame_metadata") { "../../modules/video_coding:codec_globals_headers", "../../rtc_base/system:rtc_export", "../transport/rtp:dependency_descriptor", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", ] } @@ -336,13 +379,15 @@ rtc_library("builtin_video_bitrate_allocator_factory") { ":video_bitrate_allocation", ":video_bitrate_allocator", ":video_bitrate_allocator_factory", + ":video_frame", "../../api:scoped_refptr", "../../media:rtc_media_base", "../../modules/video_coding:video_coding_utility", "../../modules/video_coding/svc:svc_rate_allocator", + "../environment", "../video_codecs:video_codecs_api", + "//third_party/abseil-cpp/absl/base:core_headers", ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] } rtc_library("frame_buffer") { @@ -352,17 +397,15 @@ rtc_library("frame_buffer") { "frame_buffer.h", ] deps = [ + "..:array_view", "../../api:field_trials_view", "../../api/units:timestamp", "../../api/video:encoded_frame", "../../modules/video_coding:video_coding_utility", "../../rtc_base:logging", "../../rtc_base:rtc_numerics", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/types:optional", ] } diff --git a/api/video/DEPS b/api/video/DEPS index 1dd2943d1f..fd3ad70862 100644 --- a/api/video/DEPS +++ b/api/video/DEPS @@ -3,8 +3,10 @@ specific_include_rules = { "+modules/rtp_rtcp/source/rtp_video_header.h", "+modules/video_coding/include/video_codec_interface.h", "+modules/video_coding/include/video_coding_defines.h", + "+common_video/frame_instrumentation_data.h", ], "encoded_image\.h" : [ + "+rtc_base/buffer.h", "+rtc_base/ref_count.h", ], diff --git a/api/video/OWNERS b/api/video/OWNERS index 49b62f3780..a30dab0903 100644 --- a/api/video/OWNERS +++ b/api/video/OWNERS @@ -1,5 +1,6 @@ brandtr@webrtc.org magjed@webrtc.org philipel@webrtc.org +sprang@webrtc.org per-file video_timing.h=ilnik@webrtc.org diff --git a/api/video/builtin_video_bitrate_allocator_factory.cc b/api/video/builtin_video_bitrate_allocator_factory.cc index 252ae210b6..7b95c2aab9 100644 --- a/api/video/builtin_video_bitrate_allocator_factory.cc +++ b/api/video/builtin_video_bitrate_allocator_factory.cc @@ -12,9 +12,10 @@ #include -#include "absl/base/attributes.h" -#include "absl/base/macros.h" +#include "api/environment/environment.h" #include "api/video/video_bitrate_allocator.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "api/video/video_codec_type.h" #include "api/video_codecs/video_codec.h" #include "modules/video_coding/svc/svc_rate_allocator.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" @@ -29,16 +30,17 @@ class BuiltinVideoBitrateAllocatorFactory BuiltinVideoBitrateAllocatorFactory() = default; ~BuiltinVideoBitrateAllocatorFactory() override = default; - std::unique_ptr CreateVideoBitrateAllocator( + std::unique_ptr Create( + const Environment& env, const VideoCodec& codec) override { // TODO(https://crbug.com/webrtc/14884): Update SvcRateAllocator to // support simulcast and use it for VP9/AV1 simulcast as well. if ((codec.codecType == kVideoCodecAV1 || codec.codecType == kVideoCodecVP9) && codec.numberOfSimulcastStreams <= 1) { - return std::make_unique(codec); + return std::make_unique(codec, env.field_trials()); } - return std::make_unique(codec); + return std::make_unique(env, codec); } }; diff --git a/api/video/color_space.cc b/api/video/color_space.cc index a0cd32edb2..776cd1205c 100644 --- a/api/video/color_space.cc +++ b/api/video/color_space.cc @@ -10,6 +10,14 @@ #include "api/video/color_space.h" +#include +#include +#include +#include + +#include "api/video/hdr_metadata.h" +#include "rtc_base/strings/string_builder.h" + namespace webrtc { namespace { // Try to convert `enum_value` into the enum class T. `enum_bitmask` is created @@ -93,8 +101,8 @@ ColorSpace::ColorSpace(PrimaryID primaries, range_(range), chroma_siting_horizontal_(chroma_siting_horz), chroma_siting_vertical_(chroma_siting_vert), - hdr_metadata_(hdr_metadata ? absl::make_optional(*hdr_metadata) - : absl::nullopt) {} + hdr_metadata_(hdr_metadata ? std::make_optional(*hdr_metadata) + : std::nullopt) {} ColorSpace::PrimaryID ColorSpace::primaries() const { return primaries_; @@ -124,6 +132,80 @@ const HdrMetadata* ColorSpace::hdr_metadata() const { return hdr_metadata_ ? &*hdr_metadata_ : nullptr; } +#define PRINT_ENUM_CASE(TYPE, NAME) \ + case TYPE::NAME: \ + ss << #NAME; \ + break; + +std::string ColorSpace::AsString() const { + char buf[1024]; + SimpleStringBuilder ss(buf); + ss << "{primaries:"; + switch (primaries_) { + PRINT_ENUM_CASE(PrimaryID, kBT709) + PRINT_ENUM_CASE(PrimaryID, kUnspecified) + PRINT_ENUM_CASE(PrimaryID, kBT470M) + PRINT_ENUM_CASE(PrimaryID, kBT470BG) + PRINT_ENUM_CASE(PrimaryID, kSMPTE170M) + PRINT_ENUM_CASE(PrimaryID, kSMPTE240M) + PRINT_ENUM_CASE(PrimaryID, kFILM) + PRINT_ENUM_CASE(PrimaryID, kBT2020) + PRINT_ENUM_CASE(PrimaryID, kSMPTEST428) + PRINT_ENUM_CASE(PrimaryID, kSMPTEST431) + PRINT_ENUM_CASE(PrimaryID, kSMPTEST432) + PRINT_ENUM_CASE(PrimaryID, kJEDECP22) + } + ss << ", transfer:"; + switch (transfer_) { + PRINT_ENUM_CASE(TransferID, kBT709) + PRINT_ENUM_CASE(TransferID, kUnspecified) + PRINT_ENUM_CASE(TransferID, kGAMMA22) + PRINT_ENUM_CASE(TransferID, kGAMMA28) + PRINT_ENUM_CASE(TransferID, kSMPTE170M) + PRINT_ENUM_CASE(TransferID, kSMPTE240M) + PRINT_ENUM_CASE(TransferID, kLINEAR) + PRINT_ENUM_CASE(TransferID, kLOG) + PRINT_ENUM_CASE(TransferID, kLOG_SQRT) + PRINT_ENUM_CASE(TransferID, kIEC61966_2_4) + PRINT_ENUM_CASE(TransferID, kBT1361_ECG) + PRINT_ENUM_CASE(TransferID, kIEC61966_2_1) + PRINT_ENUM_CASE(TransferID, kBT2020_10) + PRINT_ENUM_CASE(TransferID, kBT2020_12) + PRINT_ENUM_CASE(TransferID, kSMPTEST2084) + PRINT_ENUM_CASE(TransferID, kSMPTEST428) + PRINT_ENUM_CASE(TransferID, kARIB_STD_B67) + } + ss << ", matrix:"; + switch (matrix_) { + PRINT_ENUM_CASE(MatrixID, kRGB) + PRINT_ENUM_CASE(MatrixID, kBT709) + PRINT_ENUM_CASE(MatrixID, kUnspecified) + PRINT_ENUM_CASE(MatrixID, kFCC) + PRINT_ENUM_CASE(MatrixID, kBT470BG) + PRINT_ENUM_CASE(MatrixID, kSMPTE170M) + PRINT_ENUM_CASE(MatrixID, kSMPTE240M) + PRINT_ENUM_CASE(MatrixID, kYCOCG) + PRINT_ENUM_CASE(MatrixID, kBT2020_NCL) + PRINT_ENUM_CASE(MatrixID, kBT2020_CL) + PRINT_ENUM_CASE(MatrixID, kSMPTE2085) + PRINT_ENUM_CASE(MatrixID, kCDNCLS) + PRINT_ENUM_CASE(MatrixID, kCDCLS) + PRINT_ENUM_CASE(MatrixID, kBT2100_ICTCP) + } + + ss << ", range:"; + switch (range_) { + PRINT_ENUM_CASE(RangeID, kInvalid) + PRINT_ENUM_CASE(RangeID, kLimited) + PRINT_ENUM_CASE(RangeID, kFull) + PRINT_ENUM_CASE(RangeID, kDerived) + } + ss << "}"; + return ss.str(); +} + +#undef PRINT_ENUM_CASE + bool ColorSpace::set_primaries_from_uint8(uint8_t enum_value) { constexpr PrimaryID kPrimaryIds[] = { PrimaryID::kBT709, PrimaryID::kUnspecified, PrimaryID::kBT470M, @@ -181,7 +263,7 @@ bool ColorSpace::set_chroma_siting_vertical_from_uint8(uint8_t enum_value) { void ColorSpace::set_hdr_metadata(const HdrMetadata* hdr_metadata) { hdr_metadata_ = - hdr_metadata ? absl::make_optional(*hdr_metadata) : absl::nullopt; + hdr_metadata ? std::make_optional(*hdr_metadata) : std::nullopt; } } // namespace webrtc diff --git a/api/video/color_space.h b/api/video/color_space.h index e491c52e72..00f5cb9229 100644 --- a/api/video/color_space.h +++ b/api/video/color_space.h @@ -13,7 +13,9 @@ #include -#include "absl/types/optional.h" +#include +#include + #include "api/video/hdr_metadata.h" #include "rtc_base/system/rtc_export.h" @@ -155,6 +157,7 @@ class RTC_EXPORT ColorSpace { ChromaSiting chroma_siting_horizontal() const; ChromaSiting chroma_siting_vertical() const; const HdrMetadata* hdr_metadata() const; + std::string AsString() const; bool set_primaries_from_uint8(uint8_t enum_value); bool set_transfer_from_uint8(uint8_t enum_value); @@ -171,7 +174,7 @@ class RTC_EXPORT ColorSpace { RangeID range_ = RangeID::kInvalid; ChromaSiting chroma_siting_horizontal_ = ChromaSiting::kUnspecified; ChromaSiting chroma_siting_vertical_ = ChromaSiting::kUnspecified; - absl::optional hdr_metadata_; + std::optional hdr_metadata_; }; } // namespace webrtc diff --git a/api/video/corruption_detection_filter_settings.h b/api/video/corruption_detection_filter_settings.h new file mode 100644 index 0000000000..080a88e834 --- /dev/null +++ b/api/video/corruption_detection_filter_settings.h @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CORRUPTION_DETECTION_FILTER_SETTINGS_H_ +#define API_VIDEO_CORRUPTION_DETECTION_FILTER_SETTINGS_H_ + +#include + +namespace webrtc { + +// Filter settings for automatic corruption detection. See +// http://www.webrtc.org/experiments/rtp-hdrext/corruption-detection for more +// information. +struct CorruptionDetectionFilterSettings { + // Size of the blur kernel used. + double std_dev = 0.0; + // Allowed error thresholds (maps to `Y err` and `UV err` respectively). + int luma_error_threshold = 0; + int chroma_error_threshold = 0; +}; + +} // namespace webrtc + +#endif // API_VIDEO_CORRUPTION_DETECTION_FILTER_SETTINGS_H_ diff --git a/api/video/encoded_frame.cc b/api/video/encoded_frame.cc index cf3d4a20e5..7ceffb6f80 100644 --- a/api/video/encoded_frame.cc +++ b/api/video/encoded_frame.cc @@ -10,21 +10,29 @@ #include "api/video/encoded_frame.h" -#include "absl/types/optional.h" +#include +#include +#include + +#include "api/units/timestamp.h" +#include "api/video/video_codec_type.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" namespace webrtc { -absl::optional EncodedFrame::ReceivedTimestamp() const { +std::optional EncodedFrame::ReceivedTimestamp() const { return ReceivedTime() >= 0 - ? absl::make_optional(Timestamp::Millis(ReceivedTime())) - : absl::nullopt; + ? std::make_optional(Timestamp::Millis(ReceivedTime())) + : std::nullopt; } -absl::optional EncodedFrame::RenderTimestamp() const { +std::optional EncodedFrame::RenderTimestamp() const { return RenderTimeMs() >= 0 - ? absl::make_optional(Timestamp::Millis(RenderTimeMs())) - : absl::nullopt; + ? std::make_optional(Timestamp::Millis(RenderTimeMs())) + : std::nullopt; } bool EncodedFrame::delayed_by_retransmission() const { @@ -36,7 +44,7 @@ void EncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header) { switch (header->codec) { case kVideoCodecVP8: { const auto& vp8_header = - absl::get(header->video_type_header); + std::get(header->video_type_header); if (_codecSpecificInfo.codecType != kVideoCodecVP8) { // This is the first packet for this frame. _codecSpecificInfo.codecSpecific.VP8.temporalIdx = 0; @@ -58,7 +66,7 @@ void EncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header) { } case kVideoCodecVP9: { const auto& vp9_header = - absl::get(header->video_type_header); + std::get(header->video_type_header); if (_codecSpecificInfo.codecType != kVideoCodecVP9) { // This is the first packet for this frame. _codecSpecificInfo.codecSpecific.VP9.temporal_idx = 0; diff --git a/api/video/encoded_frame.h b/api/video/encoded_frame.h index 1e626f0800..18c426bb1f 100644 --- a/api/video/encoded_frame.h +++ b/api/video/encoded_frame.h @@ -14,13 +14,15 @@ #include #include -#include "absl/types/optional.h" +#include +#include + #include "api/units/timestamp.h" #include "api/video/encoded_image.h" #include "api/video/video_codec_type.h" +#include "common_video/frame_instrumentation_data.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "modules/video_coding/include/video_coding_defines.h" namespace webrtc { @@ -39,7 +41,7 @@ class EncodedFrame : public EncodedImage { virtual int64_t ReceivedTime() const { return -1; } // Returns a Timestamp from `ReceivedTime`, or nullopt if there is no receive // time. - absl::optional ReceivedTimestamp() const; + std::optional ReceivedTimestamp() const; // When this frame should be rendered. // TODO(bugs.webrtc.org/13756): Use Timestamp instead of int. @@ -48,7 +50,7 @@ class EncodedFrame : public EncodedImage { int64_t RenderTimeMs() const { return _renderTimeMs; } // Returns a Timestamp from `RenderTime`, or nullopt if there is no // render time. - absl::optional RenderTimestamp() const; + std::optional RenderTimestamp() const; // This information is currently needed by the timing calculation class. // TODO(philipel): Remove this function when a new timing class has @@ -74,6 +76,12 @@ class EncodedFrame : public EncodedImage { void SetCodecSpecific(const CodecSpecificInfo* codec_specific) { _codecSpecificInfo = *codec_specific; } + void SetFrameInstrumentationData( + const std::optional< + std::variant> + frame_instrumentation) { + _codecSpecificInfo.frame_instrumentation_data = frame_instrumentation; + } // TODO(philipel): Add simple modify/access functions to prevent adding too // many `references`. diff --git a/api/video/encoded_image.cc b/api/video/encoded_image.cc index 09224c3c49..c5f1787f1a 100644 --- a/api/video/encoded_image.cc +++ b/api/video/encoded_image.cc @@ -11,52 +11,54 @@ #include "api/video/encoded_image.h" #include -#include + +#include +#include +#include + +#include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "api/units/timestamp.h" +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" namespace webrtc { -EncodedImageBuffer::EncodedImageBuffer(size_t size) : size_(size) { - buffer_ = static_cast(malloc(size)); -} +EncodedImageBuffer::EncodedImageBuffer(size_t size) : buffer_(size) {} EncodedImageBuffer::EncodedImageBuffer(const uint8_t* data, size_t size) - : EncodedImageBuffer(size) { - memcpy(buffer_, data, size); -} + : buffer_(data, size) {} -EncodedImageBuffer::~EncodedImageBuffer() { - free(buffer_); -} +EncodedImageBuffer::EncodedImageBuffer(Buffer buffer) + : buffer_(std::move(buffer)) {} // static -rtc::scoped_refptr EncodedImageBuffer::Create(size_t size) { - return rtc::make_ref_counted(size); +scoped_refptr EncodedImageBuffer::Create(size_t size) { + return make_ref_counted(size); } // static -rtc::scoped_refptr EncodedImageBuffer::Create( +scoped_refptr EncodedImageBuffer::Create( const uint8_t* data, size_t size) { - return rtc::make_ref_counted(data, size); + return make_ref_counted(data, size); +} +// static +scoped_refptr EncodedImageBuffer::Create(Buffer buffer) { + return make_ref_counted(std::move(buffer)); } const uint8_t* EncodedImageBuffer::data() const { - return buffer_; + return buffer_.data(); } uint8_t* EncodedImageBuffer::data() { - return buffer_; + return buffer_.data(); } size_t EncodedImageBuffer::size() const { - return size_; + return buffer_.size(); } void EncodedImageBuffer::Realloc(size_t size) { - // Calling realloc with size == 0 is equivalent to free, and returns nullptr. - // Which is confusing on systems where malloc(0) doesn't return a nullptr. - // More specifically, it breaks expectations of - // VCMSessionInfo::UpdateDataPointers. - RTC_DCHECK(size > 0); - buffer_ = static_cast(realloc(buffer_, size)); - size_ = size; + buffer_.SetSize(size); } EncodedImage::EncodedImage() = default; @@ -80,14 +82,14 @@ webrtc::Timestamp EncodedImage::CaptureTime() const { : Timestamp::MinusInfinity(); } -absl::optional EncodedImage::SpatialLayerFrameSize( +std::optional EncodedImage::SpatialLayerFrameSize( int spatial_index) const { RTC_DCHECK_GE(spatial_index, 0); RTC_DCHECK_LE(spatial_index, spatial_index_.value_or(0)); auto it = spatial_layer_frame_size_bytes_.find(spatial_index); if (it == spatial_layer_frame_size_bytes_.end()) { - return absl::nullopt; + return std::nullopt; } return it->second; diff --git a/api/video/encoded_image.h b/api/video/encoded_image.h index 5fc4a0e60a..ea48495054 100644 --- a/api/video/encoded_image.h +++ b/api/video/encoded_image.h @@ -13,21 +13,24 @@ #include +#include #include +#include #include -#include "absl/types/optional.h" +#include "api/ref_count.h" #include "api/rtp_packet_infos.h" #include "api/scoped_refptr.h" #include "api/units/timestamp.h" #include "api/video/color_space.h" +#include "api/video/corruption_detection_filter_settings.h" #include "api/video/video_codec_constants.h" #include "api/video/video_content_type.h" #include "api/video/video_frame_type.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -35,22 +38,28 @@ namespace webrtc { // Abstract interface for buffer storage. Intended to support buffers owned by // external encoders with special release requirements, e.g, java encoders with // releaseOutputBuffer. -class EncodedImageBufferInterface : public rtc::RefCountInterface { +class EncodedImageBufferInterface : public RefCountInterface { public: + using value_type = uint8_t; + virtual const uint8_t* data() const = 0; // TODO(bugs.webrtc.org/9378): Make interface essentially read-only, delete // this non-const data method. virtual uint8_t* data() = 0; virtual size_t size() const = 0; + + const uint8_t* begin() const { return data(); } + const uint8_t* end() const { return data() + size(); } }; // Basic implementation of EncodedImageBufferInterface. class RTC_EXPORT EncodedImageBuffer : public EncodedImageBufferInterface { public: - static rtc::scoped_refptr Create() { return Create(0); } - static rtc::scoped_refptr Create(size_t size); - static rtc::scoped_refptr Create(const uint8_t* data, - size_t size); + static scoped_refptr Create() { return Create(0); } + static scoped_refptr Create(size_t size); + static scoped_refptr Create(const uint8_t* data, + size_t size); + static scoped_refptr Create(Buffer buffer); const uint8_t* data() const override; uint8_t* data() override; @@ -60,10 +69,9 @@ class RTC_EXPORT EncodedImageBuffer : public EncodedImageBufferInterface { protected: explicit EncodedImageBuffer(size_t size); EncodedImageBuffer(const uint8_t* data, size_t size); - ~EncodedImageBuffer(); + explicit EncodedImageBuffer(Buffer buffer); - size_t size_; - uint8_t* buffer_; + Buffer buffer_; }; // TODO(bug.webrtc.org/9378): This is a legacy api class, which is slowly being @@ -79,48 +87,47 @@ class RTC_EXPORT EncodedImage { EncodedImage& operator=(EncodedImage&&); EncodedImage& operator=(const EncodedImage&); - // TODO(bugs.webrtc.org/9378): Change style to timestamp(), set_timestamp(), - // for consistency with the VideoFrame class. Set frame timestamp (90kHz). - void SetTimestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; } - - // Get frame timestamp (90kHz). - uint32_t Timestamp() const { return timestamp_rtp_; } + // Frame capture time in RTP timestamp representation (90kHz). + void SetRtpTimestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; } + uint32_t RtpTimestamp() const { return timestamp_rtp_; } void SetEncodeTime(int64_t encode_start_ms, int64_t encode_finish_ms); - webrtc::Timestamp CaptureTime() const; + // Frame capture time in local time. + Timestamp CaptureTime() const; + // Frame capture time in ntp epoch time, i.e. time since 1st Jan 1900 int64_t NtpTimeMs() const { return ntp_time_ms_; } // Every simulcast layer (= encoding) has its own encoder and RTP stream. // There can be no dependencies between different simulcast layers. - absl::optional SimulcastIndex() const { return simulcast_index_; } - void SetSimulcastIndex(absl::optional simulcast_index) { + std::optional SimulcastIndex() const { return simulcast_index_; } + void SetSimulcastIndex(std::optional simulcast_index) { RTC_DCHECK_GE(simulcast_index.value_or(0), 0); RTC_DCHECK_LT(simulcast_index.value_or(0), kMaxSimulcastStreams); simulcast_index_ = simulcast_index; } - const absl::optional& CaptureTimeIdentifier() const { - return capture_time_identifier_; + const std::optional& PresentationTimestamp() const { + return presentation_timestamp_; } - void SetCaptureTimeIdentifier( - const absl::optional& capture_time_identifier) { - capture_time_identifier_ = capture_time_identifier; + void SetPresentationTimestamp( + const std::optional& presentation_timestamp) { + presentation_timestamp_ = presentation_timestamp; } // Encoded images can have dependencies between spatial and/or temporal // layers, depending on the scalability mode used by the encoder. See diagrams // at https://w3c.github.io/webrtc-svc/#dependencydiagrams*. - absl::optional SpatialIndex() const { return spatial_index_; } - void SetSpatialIndex(absl::optional spatial_index) { + std::optional SpatialIndex() const { return spatial_index_; } + void SetSpatialIndex(std::optional spatial_index) { RTC_DCHECK_GE(spatial_index.value_or(0), 0); RTC_DCHECK_LT(spatial_index.value_or(0), kMaxSpatialLayers); spatial_index_ = spatial_index; } - absl::optional TemporalIndex() const { return temporal_index_; } - void SetTemporalIndex(absl::optional temporal_index) { + std::optional TemporalIndex() const { return temporal_index_; } + void SetTemporalIndex(std::optional temporal_index) { RTC_DCHECK_GE(temporal_index_.value_or(0), 0); RTC_DCHECK_LT(temporal_index_.value_or(0), kMaxTemporalStreams); temporal_index_ = temporal_index; @@ -128,30 +135,30 @@ class RTC_EXPORT EncodedImage { // These methods can be used to set/get size of subframe with spatial index // `spatial_index` on encoded frames that consist of multiple spatial layers. - absl::optional SpatialLayerFrameSize(int spatial_index) const; + std::optional SpatialLayerFrameSize(int spatial_index) const; void SetSpatialLayerFrameSize(int spatial_index, size_t size_bytes); const webrtc::ColorSpace* ColorSpace() const { return color_space_ ? &*color_space_ : nullptr; } - void SetColorSpace(const absl::optional& color_space) { + void SetColorSpace(const std::optional& color_space) { color_space_ = color_space; } - absl::optional PlayoutDelay() const { + std::optional PlayoutDelay() const { return playout_delay_; } - void SetPlayoutDelay(absl::optional playout_delay) { + void SetPlayoutDelay(std::optional playout_delay) { playout_delay_ = playout_delay; } // These methods along with the private member video_frame_tracking_id_ are // meant for media quality testing purpose only. - absl::optional VideoFrameTrackingId() const { + std::optional VideoFrameTrackingId() const { return video_frame_tracking_id_; } - void SetVideoFrameTrackingId(absl::optional tracking_id) { + void SetVideoFrameTrackingId(std::optional tracking_id) { video_frame_tracking_id_ = tracking_id; } @@ -172,8 +179,7 @@ class RTC_EXPORT EncodedImage { size_ = new_size; } - void SetEncodedData( - rtc::scoped_refptr encoded_data) { + void SetEncodedData(scoped_refptr encoded_data) { encoded_data_ = encoded_data; size_ = encoded_data->size(); } @@ -183,7 +189,7 @@ class RTC_EXPORT EncodedImage { size_ = 0; } - rtc::scoped_refptr GetEncodedData() const { + scoped_refptr GetEncodedData() const { return encoded_data_; } @@ -191,16 +197,29 @@ class RTC_EXPORT EncodedImage { return encoded_data_ ? encoded_data_->data() : nullptr; } + const uint8_t* begin() const { return data(); } + const uint8_t* end() const { return data() + size(); } + // Returns whether the encoded image can be considered to be of target // quality. - bool IsAtTargetQuality() const { return at_target_quality_; } + [[deprecated]] bool IsAtTargetQuality() const { return at_target_quality_; } // Sets that the encoded image can be considered to be of target quality to // true or false. - void SetAtTargetQuality(bool at_target_quality) { + [[deprecated]] void SetAtTargetQuality(bool at_target_quality) { at_target_quality_ = at_target_quality; } + // Returns whether the frame that was encoded is a steady-state refresh frame + // intended to improve the visual quality. + bool IsSteadyStateRefreshFrame() const { + return is_steady_state_refresh_frame_; + } + + void SetIsSteadyStateRefreshFrame(bool refresh_frame) { + is_steady_state_refresh_frame_ = refresh_frame; + } + webrtc::VideoFrameType FrameType() const { return _frameType; } void SetFrameType(webrtc::VideoFrameType frame_type) { @@ -209,6 +228,15 @@ class RTC_EXPORT EncodedImage { VideoContentType contentType() const { return content_type_; } VideoRotation rotation() const { return rotation_; } + std::optional + corruption_detection_filter_settings() const { + return corruption_detection_filter_settings_; + } + void set_corruption_detection_filter_settings( + const CorruptionDetectionFilterSettings& settings) { + corruption_detection_filter_settings_ = settings; + } + uint32_t _encodedWidth = 0; uint32_t _encodedHeight = 0; // NTP time of the capture time in local timebase in milliseconds. @@ -239,20 +267,20 @@ class RTC_EXPORT EncodedImage { // When set, indicates that all future frames will be constrained with those // limits until the application indicates a change again. - absl::optional playout_delay_; + std::optional playout_delay_; - rtc::scoped_refptr encoded_data_; + scoped_refptr encoded_data_; size_t size_ = 0; // Size of encoded frame data. uint32_t timestamp_rtp_ = 0; - absl::optional simulcast_index_; - absl::optional capture_time_identifier_; - absl::optional spatial_index_; - absl::optional temporal_index_; + std::optional simulcast_index_; + std::optional presentation_timestamp_; + std::optional spatial_index_; + std::optional temporal_index_; std::map spatial_layer_frame_size_bytes_; - absl::optional color_space_; + std::optional color_space_; // This field is meant for media quality testing purpose only. When enabled it // carries the webrtc::VideoFrame id field from the sender to the receiver. - absl::optional video_frame_tracking_id_; + std::optional video_frame_tracking_id_; // Information about packets used to assemble this video frame. This is needed // by `SourceTracker` when the frame is delivered to the RTCRtpReceiver's // MediaStreamTrack, in order to implement getContributingSources(). See: @@ -261,6 +289,15 @@ class RTC_EXPORT EncodedImage { bool retransmission_allowed_ = true; // True if the encoded image can be considered to be of target quality. bool at_target_quality_ = false; + // True if the frame that was encoded is a steady-state refresh frame intended + // to improve the visual quality. + bool is_steady_state_refresh_frame_ = false; + + // Filter settings for corruption detection suggested by the encoder + // implementation, if any. Otherwise generic per-codec-type settings will be + // used. + std::optional + corruption_detection_filter_settings_; }; } // namespace webrtc diff --git a/api/video/frame_buffer.cc b/api/video/frame_buffer.cc index 4cdf2212a6..17a030b2c4 100644 --- a/api/video/frame_buffer.cc +++ b/api/video/frame_buffer.cc @@ -11,9 +11,18 @@ #include "api/video/frame_buffer.h" #include +#include +#include +#include +#include +#include +#include #include "absl/algorithm/container.h" #include "absl/container/inlined_vector.h" +#include "api/array_view.h" +#include "api/field_trials_view.h" +#include "api/video/encoded_frame.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/sequence_number_util.h" @@ -37,7 +46,7 @@ bool ValidReferences(const EncodedFrame& frame) { // Since FrameBuffer::FrameInfo is private it can't be used in the function // signature, hence the FrameIteratorT type. template -rtc::ArrayView GetReferences(const FrameIteratorT& it) { +ArrayView GetReferences(const FrameIteratorT& it) { return {it->second.encoded_frame->references, std::min(it->second.encoded_frame->num_references, EncodedFrame::kMaxFrameReferences)}; @@ -50,7 +59,7 @@ int64_t GetFrameId(const FrameIteratorT& it) { template uint32_t GetTimestamp(const FrameIteratorT& it) { - return it->second.encoded_frame->Timestamp(); + return it->second.encoded_frame->RtpTimestamp(); } template @@ -76,7 +85,7 @@ bool FrameBuffer::InsertFrame(std::unique_ptr frame) { if (frame->Id() <= decoded_frame_history_.GetLastDecodedFrameId()) { if (legacy_frame_id_jump_behavior_ && frame->is_keyframe() && - AheadOf(frame->Timestamp(), + AheadOf(frame->RtpTimestamp(), *decoded_frame_history_.GetLastDecodedFrameTimestamp())) { RTC_DLOG(LS_WARNING) << "Keyframe " << frame->Id() @@ -148,15 +157,15 @@ void FrameBuffer::DropNextDecodableTemporalUnit() { FindNextAndLastDecodableTemporalUnit(); } -absl::optional FrameBuffer::LastContinuousFrameId() const { +std::optional FrameBuffer::LastContinuousFrameId() const { return last_continuous_frame_id_; } -absl::optional FrameBuffer::LastContinuousTemporalUnitFrameId() const { +std::optional FrameBuffer::LastContinuousTemporalUnitFrameId() const { return last_continuous_temporal_unit_frame_id_; } -absl::optional +std::optional FrameBuffer::DecodableTemporalUnitsInfo() const { return decodable_temporal_units_info_; } diff --git a/api/video/frame_buffer.h b/api/video/frame_buffer.h index 94edf64d5a..e607a182c2 100644 --- a/api/video/frame_buffer.h +++ b/api/video/frame_buffer.h @@ -11,12 +11,13 @@ #ifndef API_VIDEO_FRAME_BUFFER_H_ #define API_VIDEO_FRAME_BUFFER_H_ +#include +#include #include #include -#include +#include #include "absl/container/inlined_vector.h" -#include "absl/types/optional.h" #include "api/field_trials_view.h" #include "api/video/encoded_frame.h" #include "modules/video_coding/utility/decoded_frames_history.h" @@ -60,9 +61,9 @@ class FrameBuffer { // Drop all frames in the next decodable unit. void DropNextDecodableTemporalUnit(); - absl::optional LastContinuousFrameId() const; - absl::optional LastContinuousTemporalUnitFrameId() const; - absl::optional DecodableTemporalUnitsInfo() const; + std::optional LastContinuousFrameId() const; + std::optional LastContinuousTemporalUnitFrameId() const; + std::optional DecodableTemporalUnitsInfo() const; int GetTotalNumberOfContinuousTemporalUnits() const; int GetTotalNumberOfDroppedFrames() const; @@ -91,10 +92,10 @@ class FrameBuffer { const bool legacy_frame_id_jump_behavior_; const size_t max_size_; FrameMap frames_; - absl::optional next_decodable_temporal_unit_; - absl::optional decodable_temporal_units_info_; - absl::optional last_continuous_frame_id_; - absl::optional last_continuous_temporal_unit_frame_id_; + std::optional next_decodable_temporal_unit_; + std::optional decodable_temporal_units_info_; + std::optional last_continuous_frame_id_; + std::optional last_continuous_temporal_unit_frame_id_; video_coding::DecodedFramesHistory decoded_frame_history_; int num_continuous_temporal_units_ = 0; diff --git a/api/video/frame_buffer_unittest.cc b/api/video/frame_buffer_unittest.cc index 92e2f67540..d97c7ec914 100644 --- a/api/video/frame_buffer_unittest.cc +++ b/api/video/frame_buffer_unittest.cc @@ -9,9 +9,9 @@ */ #include "api/video/frame_buffer.h" +#include #include -#include "api/video/encoded_frame.h" #include "test/fake_encoded_frame.h" #include "test/gmock.h" #include "test/gtest.h" @@ -36,7 +36,7 @@ TEST(FrameBuffer3Test, RejectInvalidRefs) { // Ref must be less than the id of this frame. EXPECT_FALSE(buffer.InsertFrame( test::FakeFrameBuilder().Time(0).Id(0).Refs({0}).AsLast().Build())); - EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(absl::nullopt)); + EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(std::nullopt)); // Duplicate ids are also invalid. EXPECT_TRUE(buffer.InsertFrame( @@ -50,13 +50,13 @@ TEST(FrameBuffer3Test, LastContinuousUpdatesOnInsertedFrames) { test::ScopedKeyValueConfig field_trials; FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100, field_trials); - EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(absl::nullopt)); - EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt)); + EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(std::nullopt)); + EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(std::nullopt)); EXPECT_TRUE( buffer.InsertFrame(test::FakeFrameBuilder().Time(10).Id(1).Build())); EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(1)); - EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt)); + EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(std::nullopt)); EXPECT_TRUE(buffer.InsertFrame( test::FakeFrameBuilder().Time(10).Id(2).Refs({1}).AsLast().Build())); @@ -87,7 +87,7 @@ TEST(FrameBuffer3Test, LastContinuousTemporalUnit) { EXPECT_TRUE( buffer.InsertFrame(test::FakeFrameBuilder().Time(10).Id(1).Build())); - EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt)); + EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(std::nullopt)); EXPECT_TRUE(buffer.InsertFrame( test::FakeFrameBuilder().Time(10).Id(2).Refs({1}).AsLast().Build())); EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(2)); @@ -104,7 +104,7 @@ TEST(FrameBuffer3Test, LastContinuousTemporalUnitReordering) { test::FakeFrameBuilder().Time(20).Id(3).Refs({1}).Build())); EXPECT_TRUE(buffer.InsertFrame( test::FakeFrameBuilder().Time(20).Id(4).Refs({2, 3}).AsLast().Build())); - EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt)); + EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(std::nullopt)); EXPECT_TRUE(buffer.InsertFrame( test::FakeFrameBuilder().Time(10).Id(2).Refs({1}).AsLast().Build())); @@ -116,7 +116,7 @@ TEST(FrameBuffer3Test, NextDecodable) { FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100, field_trials); - EXPECT_THAT(buffer.DecodableTemporalUnitsInfo(), Eq(absl::nullopt)); + EXPECT_THAT(buffer.DecodableTemporalUnitsInfo(), Eq(std::nullopt)); EXPECT_TRUE(buffer.InsertFrame( test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build())); EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->next_rtp_timestamp, Eq(10U)); diff --git a/api/video/i010_buffer.cc b/api/video/i010_buffer.cc index d78e854eb9..e9cbe09298 100644 --- a/api/video/i010_buffer.cc +++ b/api/video/i010_buffer.cc @@ -9,12 +9,20 @@ */ #include "api/video/i010_buffer.h" +#include #include #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" #include "rtc_base/checks.h" +#include "rtc_base/memory/aligned_malloc.h" +#include "rtc_base/numerics/safe_conversions.h" #include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/convert_from.h" +#include "third_party/libyuv/include/libyuv/rotate.h" #include "third_party/libyuv/include/libyuv/scale.h" // Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. @@ -25,9 +33,14 @@ namespace webrtc { namespace { -int I010DataSize(int height, int stride_y, int stride_u, int stride_v) { - return kBytesPerPixel * - (stride_y * height + (stride_u + stride_v) * ((height + 1) / 2)); +int I010DataSize(int width, + int height, + int stride_y, + int stride_u, + int stride_v) { + CheckValidDimensions(width, height, stride_y, stride_u, stride_v); + int64_t h = height, y = stride_y, u = stride_u, v = stride_v; + return checked_cast(kBytesPerPixel * (y * h + (u + v) * ((h + 1) / 2))); } } // namespace @@ -42,12 +55,9 @@ I010Buffer::I010Buffer(int width, stride_y_(stride_y), stride_u_(stride_u), stride_v_(stride_v), - data_(static_cast( - AlignedMalloc(I010DataSize(height, stride_y, stride_u, stride_v), - kBufferAlignment))) { - RTC_DCHECK_GT(width, 0); - RTC_DCHECK_GT(height, 0); - RTC_DCHECK_GE(stride_y, width); + data_(static_cast(AlignedMalloc( + I010DataSize(width, height, stride_y, stride_u, stride_v), + kBufferAlignment))) { RTC_DCHECK_GE(stride_u, (width + 1) / 2); RTC_DCHECK_GE(stride_v, (width + 1) / 2); } @@ -55,17 +65,16 @@ I010Buffer::I010Buffer(int width, I010Buffer::~I010Buffer() {} // static -rtc::scoped_refptr I010Buffer::Create(int width, int height) { - return rtc::make_ref_counted(width, height, width, - (width + 1) / 2, (width + 1) / 2); +scoped_refptr I010Buffer::Create(int width, int height) { + return make_ref_counted(width, height, width, (width + 1) / 2, + (width + 1) / 2); } // static -rtc::scoped_refptr I010Buffer::Copy( - const I010BufferInterface& source) { +scoped_refptr I010Buffer::Copy(const I010BufferInterface& source) { const int width = source.width(); const int height = source.height(); - rtc::scoped_refptr buffer = Create(width, height); + scoped_refptr buffer = Create(width, height); int res = libyuv::I010Copy( source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), source.DataV(), source.StrideV(), buffer->MutableDataY(), @@ -77,11 +86,10 @@ rtc::scoped_refptr I010Buffer::Copy( } // static -rtc::scoped_refptr I010Buffer::Copy( - const I420BufferInterface& source) { +scoped_refptr I010Buffer::Copy(const I420BufferInterface& source) { const int width = source.width(); const int height = source.height(); - rtc::scoped_refptr buffer = Create(width, height); + scoped_refptr buffer = Create(width, height); int res = libyuv::I420ToI010( source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), source.DataV(), source.StrideV(), buffer->MutableDataY(), @@ -93,9 +101,8 @@ rtc::scoped_refptr I010Buffer::Copy( } // static -rtc::scoped_refptr I010Buffer::Rotate( - const I010BufferInterface& src, - VideoRotation rotation) { +scoped_refptr I010Buffer::Rotate(const I010BufferInterface& src, + VideoRotation rotation) { if (rotation == webrtc::kVideoRotation_0) return Copy(src); @@ -109,7 +116,7 @@ rtc::scoped_refptr I010Buffer::Rotate( std::swap(rotated_width, rotated_height); } - rtc::scoped_refptr buffer = + scoped_refptr buffer = Create(rotated_width, rotated_height); int res = libyuv::I010Rotate( @@ -123,9 +130,8 @@ rtc::scoped_refptr I010Buffer::Rotate( return buffer; } -rtc::scoped_refptr I010Buffer::ToI420() { - rtc::scoped_refptr i420_buffer = - I420Buffer::Create(width(), height()); +scoped_refptr I010Buffer::ToI420() { + scoped_refptr i420_buffer = I420Buffer::Create(width(), height()); int res = libyuv::I010ToI420( DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), i420_buffer->MutableDataY(), i420_buffer->StrideY(), diff --git a/api/video/i010_buffer.h b/api/video/i010_buffer.h index 11e0879fec..17191c59e4 100644 --- a/api/video/i010_buffer.h +++ b/api/video/i010_buffer.h @@ -26,20 +26,20 @@ namespace webrtc { class I010Buffer : public I010BufferInterface { public: // Create a new buffer. - static rtc::scoped_refptr Create(int width, int height); + static scoped_refptr Create(int width, int height); // Create a new buffer and copy the pixel data. - static rtc::scoped_refptr Copy(const I010BufferInterface& buffer); + static scoped_refptr Copy(const I010BufferInterface& buffer); // Convert and put I420 buffer into a new buffer. - static rtc::scoped_refptr Copy(const I420BufferInterface& buffer); + static scoped_refptr Copy(const I420BufferInterface& buffer); // Return a rotated copy of `src`. - static rtc::scoped_refptr Rotate(const I010BufferInterface& src, - VideoRotation rotation); + static scoped_refptr Rotate(const I010BufferInterface& src, + VideoRotation rotation); // VideoFrameBuffer implementation. - rtc::scoped_refptr ToI420() override; + scoped_refptr ToI420() override; // PlanarYuv16BBuffer implementation. int width() const override; diff --git a/api/video/i210_buffer.cc b/api/video/i210_buffer.cc index c83c8a0c0b..c01214eff8 100644 --- a/api/video/i210_buffer.cc +++ b/api/video/i210_buffer.cc @@ -9,13 +9,21 @@ */ #include "api/video/i210_buffer.h" +#include #include #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" #include "api/video/i422_buffer.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" #include "rtc_base/checks.h" +#include "rtc_base/memory/aligned_malloc.h" +#include "rtc_base/numerics/safe_conversions.h" #include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/planar_functions.h" +#include "third_party/libyuv/include/libyuv/rotate.h" #include "third_party/libyuv/include/libyuv/scale.h" // Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. @@ -26,9 +34,14 @@ namespace webrtc { namespace { -int I210DataSize(int height, int stride_y, int stride_u, int stride_v) { - return kBytesPerPixel * - (stride_y * height + stride_u * height + stride_v * height); +int I210DataSize(int width, + int height, + int stride_y, + int stride_u, + int stride_v) { + CheckValidDimensions(width, height, stride_y, stride_u, stride_v); + int64_t h = height, y = stride_y, u = stride_u, v = stride_v; + return checked_cast(kBytesPerPixel * (y * h + u * h + v * h)); } } // namespace @@ -43,12 +56,9 @@ I210Buffer::I210Buffer(int width, stride_y_(stride_y), stride_u_(stride_u), stride_v_(stride_v), - data_(static_cast( - AlignedMalloc(I210DataSize(height, stride_y, stride_u, stride_v), - kBufferAlignment))) { - RTC_DCHECK_GT(width, 0); - RTC_DCHECK_GT(height, 0); - RTC_DCHECK_GE(stride_y, width); + data_(static_cast(AlignedMalloc( + I210DataSize(width, height, stride_y, stride_u, stride_v), + kBufferAlignment))) { RTC_DCHECK_GE(stride_u, (width + 1) / 2); RTC_DCHECK_GE(stride_v, (width + 1) / 2); } @@ -56,17 +66,16 @@ I210Buffer::I210Buffer(int width, I210Buffer::~I210Buffer() {} // static -rtc::scoped_refptr I210Buffer::Create(int width, int height) { - return rtc::make_ref_counted(width, height, width, - (width + 1) / 2, (width + 1) / 2); +scoped_refptr I210Buffer::Create(int width, int height) { + return make_ref_counted(width, height, width, (width + 1) / 2, + (width + 1) / 2); } // static -rtc::scoped_refptr I210Buffer::Copy( - const I210BufferInterface& source) { +scoped_refptr I210Buffer::Copy(const I210BufferInterface& source) { const int width = source.width(); const int height = source.height(); - rtc::scoped_refptr buffer = Create(width, height); + scoped_refptr buffer = Create(width, height); RTC_CHECK_EQ( 0, libyuv::I210Copy( source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), @@ -77,12 +86,11 @@ rtc::scoped_refptr I210Buffer::Copy( } // static -rtc::scoped_refptr I210Buffer::Copy( - const I420BufferInterface& source) { +scoped_refptr I210Buffer::Copy(const I420BufferInterface& source) { const int width = source.width(); const int height = source.height(); auto i422buffer = I422Buffer::Copy(source); - rtc::scoped_refptr buffer = Create(width, height); + scoped_refptr buffer = Create(width, height); RTC_CHECK_EQ(0, libyuv::I422ToI210(i422buffer->DataY(), i422buffer->StrideY(), i422buffer->DataU(), i422buffer->StrideU(), i422buffer->DataV(), i422buffer->StrideV(), @@ -94,9 +102,8 @@ rtc::scoped_refptr I210Buffer::Copy( } // static -rtc::scoped_refptr I210Buffer::Rotate( - const I210BufferInterface& src, - VideoRotation rotation) { +scoped_refptr I210Buffer::Rotate(const I210BufferInterface& src, + VideoRotation rotation) { RTC_CHECK(src.DataY()); RTC_CHECK(src.DataU()); RTC_CHECK(src.DataV()); @@ -108,7 +115,7 @@ rtc::scoped_refptr I210Buffer::Rotate( std::swap(rotated_width, rotated_height); } - rtc::scoped_refptr buffer = + scoped_refptr buffer = I210Buffer::Create(rotated_width, rotated_height); RTC_CHECK_EQ(0, @@ -122,9 +129,8 @@ rtc::scoped_refptr I210Buffer::Rotate( return buffer; } -rtc::scoped_refptr I210Buffer::ToI420() { - rtc::scoped_refptr i420_buffer = - I420Buffer::Create(width(), height()); +scoped_refptr I210Buffer::ToI420() { + scoped_refptr i420_buffer = I420Buffer::Create(width(), height()); libyuv::I210ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), i420_buffer->MutableDataY(), i420_buffer->StrideY(), i420_buffer->MutableDataU(), i420_buffer->StrideU(), diff --git a/api/video/i210_buffer.h b/api/video/i210_buffer.h index e3b6452b95..11f0f4a548 100644 --- a/api/video/i210_buffer.h +++ b/api/video/i210_buffer.h @@ -26,20 +26,20 @@ namespace webrtc { class I210Buffer : public I210BufferInterface { public: // Create a new buffer. - static rtc::scoped_refptr Create(int width, int height); + static scoped_refptr Create(int width, int height); // Create a new buffer and copy the pixel data. - static rtc::scoped_refptr Copy(const I210BufferInterface& buffer); + static scoped_refptr Copy(const I210BufferInterface& buffer); // Convert and put I420 buffer into a new buffer. - static rtc::scoped_refptr Copy(const I420BufferInterface& buffer); + static scoped_refptr Copy(const I420BufferInterface& buffer); // Return a rotated copy of `src`. - static rtc::scoped_refptr Rotate(const I210BufferInterface& src, - VideoRotation rotation); + static scoped_refptr Rotate(const I210BufferInterface& src, + VideoRotation rotation); // VideoFrameBuffer implementation. - rtc::scoped_refptr ToI420() override; + scoped_refptr ToI420() override; // PlanarYuv16BBuffer implementation. int width() const override; diff --git a/api/video/i410_buffer.cc b/api/video/i410_buffer.cc index 1b0d4fdb5c..f6fc13f25e 100644 --- a/api/video/i410_buffer.cc +++ b/api/video/i410_buffer.cc @@ -12,13 +12,20 @@ #include #include +#include #include #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" #include "rtc_base/checks.h" +#include "rtc_base/memory/aligned_malloc.h" +#include "rtc_base/numerics/safe_conversions.h" #include "third_party/libyuv/include/libyuv/convert.h" #include "third_party/libyuv/include/libyuv/planar_functions.h" +#include "third_party/libyuv/include/libyuv/rotate.h" #include "third_party/libyuv/include/libyuv/scale.h" // Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. @@ -29,9 +36,14 @@ namespace webrtc { namespace { -int I410DataSize(int height, int stride_y, int stride_u, int stride_v) { - return kBytesPerPixel * - (stride_y * height + stride_u * height + stride_v * height); +int I410DataSize(int width, + int height, + int stride_y, + int stride_u, + int stride_v) { + CheckValidDimensions(width, height, stride_y, stride_u, stride_v); + int64_t h = height, y = stride_y, u = stride_u, v = stride_v; + return checked_cast(kBytesPerPixel * (y * h + u * h + v * h)); } } // namespace @@ -49,12 +61,9 @@ I410Buffer::I410Buffer(int width, stride_y_(stride_y), stride_u_(stride_u), stride_v_(stride_v), - data_(static_cast( - AlignedMalloc(I410DataSize(height, stride_y, stride_u, stride_v), - kBufferAlignment))) { - RTC_DCHECK_GT(width, 0); - RTC_DCHECK_GT(height, 0); - RTC_DCHECK_GE(stride_y, width); + data_(static_cast(AlignedMalloc( + I410DataSize(width, height, stride_y, stride_u, stride_v), + kBufferAlignment))) { RTC_DCHECK_GE(stride_u, width); RTC_DCHECK_GE(stride_v, width); } @@ -62,39 +71,38 @@ I410Buffer::I410Buffer(int width, I410Buffer::~I410Buffer() {} // static -rtc::scoped_refptr I410Buffer::Create(int width, int height) { - return rtc::make_ref_counted(width, height); +scoped_refptr I410Buffer::Create(int width, int height) { + return make_ref_counted(width, height); } // static -rtc::scoped_refptr I410Buffer::Create(int width, - int height, - int stride_y, - int stride_u, - int stride_v) { - return rtc::make_ref_counted(width, height, stride_y, stride_u, - stride_v); +scoped_refptr I410Buffer::Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v) { + return make_ref_counted(width, height, stride_y, stride_u, + stride_v); } // static -rtc::scoped_refptr I410Buffer::Copy( - const I410BufferInterface& source) { +scoped_refptr I410Buffer::Copy(const I410BufferInterface& source) { return Copy(source.width(), source.height(), source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), source.DataV(), source.StrideV()); } // static -rtc::scoped_refptr I410Buffer::Copy(int width, - int height, - const uint16_t* data_y, - int stride_y, - const uint16_t* data_u, - int stride_u, - const uint16_t* data_v, - int stride_v) { +scoped_refptr I410Buffer::Copy(int width, + int height, + const uint16_t* data_y, + int stride_y, + const uint16_t* data_u, + int stride_u, + const uint16_t* data_v, + int stride_v) { // Note: May use different strides than the input data. - rtc::scoped_refptr buffer = Create(width, height); + scoped_refptr buffer = Create(width, height); int res = libyuv::I410Copy(data_y, stride_y, data_u, stride_u, data_v, stride_v, buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataU(), @@ -106,9 +114,8 @@ rtc::scoped_refptr I410Buffer::Copy(int width, } // static -rtc::scoped_refptr I410Buffer::Rotate( - const I410BufferInterface& src, - VideoRotation rotation) { +scoped_refptr I410Buffer::Rotate(const I410BufferInterface& src, + VideoRotation rotation) { RTC_CHECK(src.DataY()); RTC_CHECK(src.DataU()); RTC_CHECK(src.DataV()); @@ -120,7 +127,7 @@ rtc::scoped_refptr I410Buffer::Rotate( std::swap(rotated_width, rotated_height); } - rtc::scoped_refptr buffer = + scoped_refptr buffer = I410Buffer::Create(rotated_width, rotated_height); int res = libyuv::I410Rotate( @@ -134,9 +141,8 @@ rtc::scoped_refptr I410Buffer::Rotate( return buffer; } -rtc::scoped_refptr I410Buffer::ToI420() { - rtc::scoped_refptr i420_buffer = - I420Buffer::Create(width(), height()); +scoped_refptr I410Buffer::ToI420() { + scoped_refptr i420_buffer = I420Buffer::Create(width(), height()); int res = libyuv::I410ToI420( DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), i420_buffer->MutableDataY(), i420_buffer->StrideY(), @@ -149,7 +155,7 @@ rtc::scoped_refptr I410Buffer::ToI420() { void I410Buffer::InitializeData() { memset(data_.get(), 0, - I410DataSize(height_, stride_y_, stride_u_, stride_v_)); + I410DataSize(width_, height_, stride_y_, stride_u_, stride_v_)); } int I410Buffer::width() const { diff --git a/api/video/i410_buffer.h b/api/video/i410_buffer.h index 1c0cd86c12..8176d80d3a 100644 --- a/api/video/i410_buffer.h +++ b/api/video/i410_buffer.h @@ -19,36 +19,37 @@ #include "api/video/video_frame_buffer.h" #include "api/video/video_rotation.h" #include "rtc_base/memory/aligned_malloc.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { // Plain I410 (yuv 444 planar 10 bits) buffer in standard memory. class RTC_EXPORT I410Buffer : public I410BufferInterface { public: - static rtc::scoped_refptr Create(int width, int height); - static rtc::scoped_refptr Create(int width, - int height, - int stride_y, - int stride_u, - int stride_v); + static scoped_refptr Create(int width, int height); + static scoped_refptr Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v); // Create a new buffer and copy the pixel data. - static rtc::scoped_refptr Copy(const I410BufferInterface& buffer); + static scoped_refptr Copy(const I410BufferInterface& buffer); - static rtc::scoped_refptr Copy(int width, - int height, - const uint16_t* data_y, - int stride_y, - const uint16_t* data_u, - int stride_u, - const uint16_t* data_v, - int stride_v); + static scoped_refptr Copy(int width, + int height, + const uint16_t* data_y, + int stride_y, + const uint16_t* data_u, + int stride_u, + const uint16_t* data_v, + int stride_v); // Returns a rotated copy of |src|. - static rtc::scoped_refptr Rotate(const I410BufferInterface& src, - VideoRotation rotation); + static scoped_refptr Rotate(const I410BufferInterface& src, + VideoRotation rotation); - rtc::scoped_refptr ToI420() final; + scoped_refptr ToI420() final; const I420BufferInterface* GetI420() const final { return nullptr; } // Sets all three planes to all zeros. Used to work around for diff --git a/api/video/i420_buffer.cc b/api/video/i420_buffer.cc index bf7fc06ee9..39260b125a 100644 --- a/api/video/i420_buffer.cc +++ b/api/video/i420_buffer.cc @@ -12,12 +12,19 @@ #include #include +#include #include #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" #include "rtc_base/checks.h" +#include "rtc_base/memory/aligned_malloc.h" +#include "rtc_base/numerics/safe_conversions.h" #include "third_party/libyuv/include/libyuv/convert.h" #include "third_party/libyuv/include/libyuv/planar_functions.h" +#include "third_party/libyuv/include/libyuv/rotate.h" #include "third_party/libyuv/include/libyuv/scale.h" // Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. @@ -27,8 +34,16 @@ namespace webrtc { namespace { -int I420DataSize(int height, int stride_y, int stride_u, int stride_v) { - return stride_y * height + (stride_u + stride_v) * ((height + 1) / 2); +int I420DataSize(int width, + int height, + int stride_y, + int stride_u, + int stride_v) { + CheckValidDimensions(width, height, stride_y, stride_u, stride_v); + // Do the size calculation using 64bit integers and use checked_cast to catch + // overflow. + int64_t h = height, y = stride_y, u = stride_u, v = stride_v; + return checked_cast(y * h + (u + v) * ((h + 1) / 2)); } } // namespace @@ -46,12 +61,9 @@ I420Buffer::I420Buffer(int width, stride_y_(stride_y), stride_u_(stride_u), stride_v_(stride_v), - data_(static_cast( - AlignedMalloc(I420DataSize(height, stride_y, stride_u, stride_v), - kBufferAlignment))) { - RTC_DCHECK_GT(width, 0); - RTC_DCHECK_GT(height, 0); - RTC_DCHECK_GE(stride_y, width); + data_(static_cast(AlignedMalloc( + I420DataSize(width, height, stride_y, stride_u, stride_v), + kBufferAlignment))) { RTC_DCHECK_GE(stride_u, (width + 1) / 2); RTC_DCHECK_GE(stride_v, (width + 1) / 2); } @@ -59,39 +71,38 @@ I420Buffer::I420Buffer(int width, I420Buffer::~I420Buffer() {} // static -rtc::scoped_refptr I420Buffer::Create(int width, int height) { - return rtc::make_ref_counted(width, height); +scoped_refptr I420Buffer::Create(int width, int height) { + return make_ref_counted(width, height); } // static -rtc::scoped_refptr I420Buffer::Create(int width, - int height, - int stride_y, - int stride_u, - int stride_v) { - return rtc::make_ref_counted(width, height, stride_y, stride_u, - stride_v); +scoped_refptr I420Buffer::Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v) { + return make_ref_counted(width, height, stride_y, stride_u, + stride_v); } // static -rtc::scoped_refptr I420Buffer::Copy( - const I420BufferInterface& source) { +scoped_refptr I420Buffer::Copy(const I420BufferInterface& source) { return Copy(source.width(), source.height(), source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), source.DataV(), source.StrideV()); } // static -rtc::scoped_refptr I420Buffer::Copy(int width, - int height, - const uint8_t* data_y, - int stride_y, - const uint8_t* data_u, - int stride_u, - const uint8_t* data_v, - int stride_v) { +scoped_refptr I420Buffer::Copy(int width, + int height, + const uint8_t* data_y, + int stride_y, + const uint8_t* data_u, + int stride_u, + const uint8_t* data_v, + int stride_v) { // Note: May use different strides than the input data. - rtc::scoped_refptr buffer = Create(width, height); + scoped_refptr buffer = Create(width, height); RTC_CHECK_EQ(0, libyuv::I420Copy(data_y, stride_y, data_u, stride_u, data_v, stride_v, buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataU(), @@ -101,9 +112,8 @@ rtc::scoped_refptr I420Buffer::Copy(int width, } // static -rtc::scoped_refptr I420Buffer::Rotate( - const I420BufferInterface& src, - VideoRotation rotation) { +scoped_refptr I420Buffer::Rotate(const I420BufferInterface& src, + VideoRotation rotation) { RTC_CHECK(src.DataY()); RTC_CHECK(src.DataU()); RTC_CHECK(src.DataV()); @@ -115,7 +125,7 @@ rtc::scoped_refptr I420Buffer::Rotate( std::swap(rotated_width, rotated_height); } - rtc::scoped_refptr buffer = + scoped_refptr buffer = I420Buffer::Create(rotated_width, rotated_height); RTC_CHECK_EQ(0, @@ -131,7 +141,7 @@ rtc::scoped_refptr I420Buffer::Rotate( void I420Buffer::InitializeData() { memset(data_.get(), 0, - I420DataSize(height_, stride_y_, stride_u_, stride_v_)); + I420DataSize(width_, height_, stride_y_, stride_u_, stride_v_)); } int I420Buffer::width() const { diff --git a/api/video/i420_buffer.h b/api/video/i420_buffer.h index b337489657..a7a0fb014d 100644 --- a/api/video/i420_buffer.h +++ b/api/video/i420_buffer.h @@ -26,35 +26,35 @@ namespace webrtc { // Plain I420 buffer in standard memory. class RTC_EXPORT I420Buffer : public I420BufferInterface { public: - static rtc::scoped_refptr Create(int width, int height); - static rtc::scoped_refptr Create(int width, - int height, - int stride_y, - int stride_u, - int stride_v); + static scoped_refptr Create(int width, int height); + static scoped_refptr Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v); // Create a new buffer and copy the pixel data. - static rtc::scoped_refptr Copy(const I420BufferInterface& buffer); + static scoped_refptr Copy(const I420BufferInterface& buffer); // Deprecated. - static rtc::scoped_refptr Copy(const VideoFrameBuffer& buffer) { + static scoped_refptr Copy(const VideoFrameBuffer& buffer) { return Copy(*buffer.GetI420()); } - static rtc::scoped_refptr Copy(int width, - int height, - const uint8_t* data_y, - int stride_y, - const uint8_t* data_u, - int stride_u, - const uint8_t* data_v, - int stride_v); + static scoped_refptr Copy(int width, + int height, + const uint8_t* data_y, + int stride_y, + const uint8_t* data_u, + int stride_u, + const uint8_t* data_v, + int stride_v); // Returns a rotated copy of `src`. - static rtc::scoped_refptr Rotate(const I420BufferInterface& src, - VideoRotation rotation); + static scoped_refptr Rotate(const I420BufferInterface& src, + VideoRotation rotation); // Deprecated. - static rtc::scoped_refptr Rotate(const VideoFrameBuffer& src, - VideoRotation rotation) { + static scoped_refptr Rotate(const VideoFrameBuffer& src, + VideoRotation rotation) { return Rotate(*src.GetI420(), rotation); } diff --git a/api/video/i422_buffer.cc b/api/video/i422_buffer.cc index fddc1b57fd..4322ace00c 100644 --- a/api/video/i422_buffer.cc +++ b/api/video/i422_buffer.cc @@ -12,13 +12,21 @@ #include #include +#include #include #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" #include "rtc_base/checks.h" +#include "rtc_base/memory/aligned_malloc.h" +#include "rtc_base/numerics/safe_conversions.h" #include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/convert_from.h" #include "third_party/libyuv/include/libyuv/planar_functions.h" +#include "third_party/libyuv/include/libyuv/rotate.h" #include "third_party/libyuv/include/libyuv/scale.h" // Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. @@ -28,8 +36,14 @@ namespace webrtc { namespace { -int I422DataSize(int height, int stride_y, int stride_u, int stride_v) { - return stride_y * height + stride_u * height + stride_v * height; +int I422DataSize(int width, + int height, + int stride_y, + int stride_u, + int stride_v) { + CheckValidDimensions(width, height, stride_y, stride_u, stride_v); + int64_t h = height, y = stride_y, u = stride_u, v = stride_v; + return checked_cast(y * h + u * h + v * h); } } // namespace @@ -46,12 +60,9 @@ I422Buffer::I422Buffer(int width, stride_y_(stride_y), stride_u_(stride_u), stride_v_(stride_v), - data_(static_cast( - AlignedMalloc(I422DataSize(height, stride_y, stride_u, stride_v), - kBufferAlignment))) { - RTC_DCHECK_GT(width, 0); - RTC_DCHECK_GT(height, 0); - RTC_DCHECK_GE(stride_y, width); + data_(static_cast(AlignedMalloc( + I422DataSize(width, height, stride_y, stride_u, stride_v), + kBufferAlignment))) { RTC_DCHECK_GE(stride_u, (width + 1) / 2); RTC_DCHECK_GE(stride_v, (width + 1) / 2); } @@ -59,34 +70,32 @@ I422Buffer::I422Buffer(int width, I422Buffer::~I422Buffer() {} // static -rtc::scoped_refptr I422Buffer::Create(int width, int height) { - return rtc::make_ref_counted(width, height); +scoped_refptr I422Buffer::Create(int width, int height) { + return make_ref_counted(width, height); } // static -rtc::scoped_refptr I422Buffer::Create(int width, - int height, - int stride_y, - int stride_u, - int stride_v) { - return rtc::make_ref_counted(width, height, stride_y, stride_u, - stride_v); +scoped_refptr I422Buffer::Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v) { + return make_ref_counted(width, height, stride_y, stride_u, + stride_v); } // static -rtc::scoped_refptr I422Buffer::Copy( - const I422BufferInterface& source) { +scoped_refptr I422Buffer::Copy(const I422BufferInterface& source) { return Copy(source.width(), source.height(), source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), source.DataV(), source.StrideV()); } // static -rtc::scoped_refptr I422Buffer::Copy( - const I420BufferInterface& source) { +scoped_refptr I422Buffer::Copy(const I420BufferInterface& source) { const int width = source.width(); const int height = source.height(); - rtc::scoped_refptr buffer = Create(width, height); + scoped_refptr buffer = Create(width, height); int res = libyuv::I420ToI422( source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), source.DataV(), source.StrideV(), buffer->MutableDataY(), @@ -98,16 +107,16 @@ rtc::scoped_refptr I422Buffer::Copy( } // static -rtc::scoped_refptr I422Buffer::Copy(int width, - int height, - const uint8_t* data_y, - int stride_y, - const uint8_t* data_u, - int stride_u, - const uint8_t* data_v, - int stride_v) { +scoped_refptr I422Buffer::Copy(int width, + int height, + const uint8_t* data_y, + int stride_y, + const uint8_t* data_u, + int stride_u, + const uint8_t* data_v, + int stride_v) { // Note: May use different strides than the input data. - rtc::scoped_refptr buffer = Create(width, height); + scoped_refptr buffer = Create(width, height); int res = libyuv::I422Copy(data_y, stride_y, data_u, stride_u, data_v, stride_v, buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataU(), @@ -119,9 +128,8 @@ rtc::scoped_refptr I422Buffer::Copy(int width, } // static -rtc::scoped_refptr I422Buffer::Rotate( - const I422BufferInterface& src, - VideoRotation rotation) { +scoped_refptr I422Buffer::Rotate(const I422BufferInterface& src, + VideoRotation rotation) { RTC_CHECK(src.DataY()); RTC_CHECK(src.DataU()); RTC_CHECK(src.DataV()); @@ -133,7 +141,7 @@ rtc::scoped_refptr I422Buffer::Rotate( std::swap(rotated_width, rotated_height); } - rtc::scoped_refptr buffer = + scoped_refptr buffer = I422Buffer::Create(rotated_width, rotated_height); int res = libyuv::I422Rotate( @@ -147,9 +155,8 @@ rtc::scoped_refptr I422Buffer::Rotate( return buffer; } -rtc::scoped_refptr I422Buffer::ToI420() { - rtc::scoped_refptr i420_buffer = - I420Buffer::Create(width(), height()); +scoped_refptr I422Buffer::ToI420() { + scoped_refptr i420_buffer = I420Buffer::Create(width(), height()); int res = libyuv::I422ToI420( DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), i420_buffer->MutableDataY(), i420_buffer->StrideY(), @@ -162,7 +169,7 @@ rtc::scoped_refptr I422Buffer::ToI420() { void I422Buffer::InitializeData() { memset(data_.get(), 0, - I422DataSize(height_, stride_y_, stride_u_, stride_v_)); + I422DataSize(width_, height_, stride_y_, stride_u_, stride_v_)); } int I422Buffer::width() const { diff --git a/api/video/i422_buffer.h b/api/video/i422_buffer.h index 600b4ecea7..4788cd9ff7 100644 --- a/api/video/i422_buffer.h +++ b/api/video/i422_buffer.h @@ -26,32 +26,32 @@ namespace webrtc { // Plain I422 buffer in standard memory. class RTC_EXPORT I422Buffer : public I422BufferInterface { public: - static rtc::scoped_refptr Create(int width, int height); - static rtc::scoped_refptr Create(int width, - int height, - int stride_y, - int stride_u, - int stride_v); + static scoped_refptr Create(int width, int height); + static scoped_refptr Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v); // Create a new buffer and copy the pixel data. - static rtc::scoped_refptr Copy(const I422BufferInterface& buffer); + static scoped_refptr Copy(const I422BufferInterface& buffer); /// Convert and put I420 buffer into a new buffer. - static rtc::scoped_refptr Copy(const I420BufferInterface& buffer); + static scoped_refptr Copy(const I420BufferInterface& buffer); - static rtc::scoped_refptr Copy(int width, - int height, - const uint8_t* data_y, - int stride_y, - const uint8_t* data_u, - int stride_u, - const uint8_t* data_v, - int stride_v); + static scoped_refptr Copy(int width, + int height, + const uint8_t* data_y, + int stride_y, + const uint8_t* data_u, + int stride_u, + const uint8_t* data_v, + int stride_v); // Returns a rotated copy of `src`. - static rtc::scoped_refptr Rotate(const I422BufferInterface& src, - VideoRotation rotation); + static scoped_refptr Rotate(const I422BufferInterface& src, + VideoRotation rotation); - rtc::scoped_refptr ToI420() final; + scoped_refptr ToI420() final; const I420BufferInterface* GetI420() const final { return nullptr; } // Sets the buffer to all black. diff --git a/api/video/i444_buffer.cc b/api/video/i444_buffer.cc index 98e892308f..73486622bc 100644 --- a/api/video/i444_buffer.cc +++ b/api/video/i444_buffer.cc @@ -12,13 +12,20 @@ #include #include +#include #include #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" #include "rtc_base/checks.h" +#include "rtc_base/memory/aligned_malloc.h" +#include "rtc_base/numerics/safe_conversions.h" #include "third_party/libyuv/include/libyuv/convert.h" #include "third_party/libyuv/include/libyuv/planar_functions.h" +#include "third_party/libyuv/include/libyuv/rotate.h" #include "third_party/libyuv/include/libyuv/scale.h" // Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. @@ -28,8 +35,14 @@ namespace webrtc { namespace { -int I444DataSize(int height, int stride_y, int stride_u, int stride_v) { - return stride_y * height + stride_u * height + stride_v * height; +int I444DataSize(int width, + int height, + int stride_y, + int stride_u, + int stride_v) { + CheckValidDimensions(width, height, stride_y, stride_u, stride_v); + int64_t h = height, y = stride_y, u = stride_u, v = stride_v; + return checked_cast(y * h + u * h + v * h); } } // namespace @@ -47,52 +60,48 @@ I444Buffer::I444Buffer(int width, stride_y_(stride_y), stride_u_(stride_u), stride_v_(stride_v), - data_(static_cast( - AlignedMalloc(I444DataSize(height, stride_y, stride_u, stride_v), - kBufferAlignment))) { - RTC_DCHECK_GT(width, 0); - RTC_DCHECK_GT(height, 0); - RTC_DCHECK_GE(stride_y, width); - RTC_DCHECK_GE(stride_u, (width)); - RTC_DCHECK_GE(stride_v, (width)); + data_(static_cast(AlignedMalloc( + I444DataSize(width, height, stride_y, stride_u, stride_v), + kBufferAlignment))) { + RTC_DCHECK_GE(stride_u, width); + RTC_DCHECK_GE(stride_v, width); } I444Buffer::~I444Buffer() {} // static -rtc::scoped_refptr I444Buffer::Create(int width, int height) { - return rtc::make_ref_counted(width, height); +scoped_refptr I444Buffer::Create(int width, int height) { + return make_ref_counted(width, height); } // static -rtc::scoped_refptr I444Buffer::Create(int width, - int height, - int stride_y, - int stride_u, - int stride_v) { - return rtc::make_ref_counted(width, height, stride_y, stride_u, - stride_v); +scoped_refptr I444Buffer::Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v) { + return make_ref_counted(width, height, stride_y, stride_u, + stride_v); } // static -rtc::scoped_refptr I444Buffer::Copy( - const I444BufferInterface& source) { +scoped_refptr I444Buffer::Copy(const I444BufferInterface& source) { return Copy(source.width(), source.height(), source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), source.DataV(), source.StrideV()); } // static -rtc::scoped_refptr I444Buffer::Copy(int width, - int height, - const uint8_t* data_y, - int stride_y, - const uint8_t* data_u, - int stride_u, - const uint8_t* data_v, - int stride_v) { +scoped_refptr I444Buffer::Copy(int width, + int height, + const uint8_t* data_y, + int stride_y, + const uint8_t* data_u, + int stride_u, + const uint8_t* data_v, + int stride_v) { // Note: May use different strides than the input data. - rtc::scoped_refptr buffer = Create(width, height); + scoped_refptr buffer = Create(width, height); RTC_CHECK_EQ(0, libyuv::I444Copy(data_y, stride_y, data_u, stride_u, data_v, stride_v, buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataU(), @@ -102,9 +111,8 @@ rtc::scoped_refptr I444Buffer::Copy(int width, } // static -rtc::scoped_refptr I444Buffer::Rotate( - const I444BufferInterface& src, - VideoRotation rotation) { +scoped_refptr I444Buffer::Rotate(const I444BufferInterface& src, + VideoRotation rotation) { RTC_CHECK(src.DataY()); RTC_CHECK(src.DataU()); RTC_CHECK(src.DataV()); @@ -116,7 +124,7 @@ rtc::scoped_refptr I444Buffer::Rotate( std::swap(rotated_width, rotated_height); } - rtc::scoped_refptr buffer = + scoped_refptr buffer = I444Buffer::Create(rotated_width, rotated_height); RTC_CHECK_EQ(0, @@ -130,9 +138,8 @@ rtc::scoped_refptr I444Buffer::Rotate( return buffer; } -rtc::scoped_refptr I444Buffer::ToI420() { - rtc::scoped_refptr i420_buffer = - I420Buffer::Create(width(), height()); +scoped_refptr I444Buffer::ToI420() { + scoped_refptr i420_buffer = I420Buffer::Create(width(), height()); libyuv::I444ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), i420_buffer->MutableDataY(), i420_buffer->StrideY(), i420_buffer->MutableDataU(), i420_buffer->StrideU(), @@ -143,7 +150,7 @@ rtc::scoped_refptr I444Buffer::ToI420() { void I444Buffer::InitializeData() { memset(data_.get(), 0, - I444DataSize(height_, stride_y_, stride_u_, stride_v_)); + I444DataSize(width_, height_, stride_y_, stride_u_, stride_v_)); } int I444Buffer::width() const { diff --git a/api/video/i444_buffer.h b/api/video/i444_buffer.h index f1e3f63114..3e5cf01fff 100644 --- a/api/video/i444_buffer.h +++ b/api/video/i444_buffer.h @@ -28,30 +28,30 @@ namespace webrtc { // https://en.wikipedia.org/wiki/Chroma_subsampling#4:4:4 class RTC_EXPORT I444Buffer : public I444BufferInterface { public: - static rtc::scoped_refptr Create(int width, int height); - static rtc::scoped_refptr Create(int width, - int height, - int stride_y, - int stride_u, - int stride_v); + static scoped_refptr Create(int width, int height); + static scoped_refptr Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v); // Create a new buffer and copy the pixel data. - static rtc::scoped_refptr Copy(const I444BufferInterface& buffer); + static scoped_refptr Copy(const I444BufferInterface& buffer); - static rtc::scoped_refptr Copy(int width, - int height, - const uint8_t* data_y, - int stride_y, - const uint8_t* data_u, - int stride_u, - const uint8_t* data_v, - int stride_v); + static scoped_refptr Copy(int width, + int height, + const uint8_t* data_y, + int stride_y, + const uint8_t* data_u, + int stride_u, + const uint8_t* data_v, + int stride_v); // Returns a rotated copy of |src|. - static rtc::scoped_refptr Rotate(const I444BufferInterface& src, - VideoRotation rotation); + static scoped_refptr Rotate(const I444BufferInterface& src, + VideoRotation rotation); - rtc::scoped_refptr ToI420() final; + scoped_refptr ToI420() final; const I420BufferInterface* GetI420() const final { return nullptr; } // Sets all three planes to all zeros. Used to work around for diff --git a/api/video/nv12_buffer.cc b/api/video/nv12_buffer.cc index ca9dcd8677..946b54fd39 100644 --- a/api/video/nv12_buffer.cc +++ b/api/video/nv12_buffer.cc @@ -10,10 +10,19 @@ #include "api/video/nv12_buffer.h" +#include +#include +#include + #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" #include "rtc_base/checks.h" +#include "rtc_base/memory/aligned_malloc.h" +#include "rtc_base/numerics/safe_conversions.h" #include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/convert_from.h" #include "third_party/libyuv/include/libyuv/scale.h" namespace webrtc { @@ -22,8 +31,10 @@ namespace { static const int kBufferAlignment = 64; -int NV12DataSize(int height, int stride_y, int stride_uv) { - return stride_y * height + stride_uv * ((height + 1) / 2); +int NV12DataSize(int width, int height, int stride_y, int stride_uv) { + CheckValidDimensions(width, height, stride_y, stride_uv, stride_uv); + int64_t h = height, y = stride_y, uv = stride_uv; + return checked_cast(y * h + uv * ((h + 1) / 2)); } } // namespace @@ -37,33 +48,30 @@ NV12Buffer::NV12Buffer(int width, int height, int stride_y, int stride_uv) stride_y_(stride_y), stride_uv_(stride_uv), data_(static_cast( - AlignedMalloc(NV12DataSize(height_, stride_y_, stride_uv), + AlignedMalloc(NV12DataSize(width, height, stride_y, stride_uv), kBufferAlignment))) { - RTC_DCHECK_GT(width, 0); - RTC_DCHECK_GT(height, 0); - RTC_DCHECK_GE(stride_y, width); - RTC_DCHECK_GE(stride_uv, (width + width % 2)); + RTC_DCHECK_GE(stride_uv, width + width % 2); } NV12Buffer::~NV12Buffer() = default; // static -rtc::scoped_refptr NV12Buffer::Create(int width, int height) { - return rtc::make_ref_counted(width, height); +scoped_refptr NV12Buffer::Create(int width, int height) { + return make_ref_counted(width, height); } // static -rtc::scoped_refptr NV12Buffer::Create(int width, - int height, - int stride_y, - int stride_uv) { - return rtc::make_ref_counted(width, height, stride_y, stride_uv); +scoped_refptr NV12Buffer::Create(int width, + int height, + int stride_y, + int stride_uv) { + return make_ref_counted(width, height, stride_y, stride_uv); } // static -rtc::scoped_refptr NV12Buffer::Copy( +scoped_refptr NV12Buffer::Copy( const I420BufferInterface& i420_buffer) { - rtc::scoped_refptr buffer = + scoped_refptr buffer = NV12Buffer::Create(i420_buffer.width(), i420_buffer.height()); libyuv::I420ToNV12( i420_buffer.DataY(), i420_buffer.StrideY(), i420_buffer.DataU(), @@ -73,9 +81,8 @@ rtc::scoped_refptr NV12Buffer::Copy( return buffer; } -rtc::scoped_refptr NV12Buffer::ToI420() { - rtc::scoped_refptr i420_buffer = - I420Buffer::Create(width(), height()); +scoped_refptr NV12Buffer::ToI420() { + scoped_refptr i420_buffer = I420Buffer::Create(width(), height()); libyuv::NV12ToI420(DataY(), StrideY(), DataUV(), StrideUV(), i420_buffer->MutableDataY(), i420_buffer->StrideY(), i420_buffer->MutableDataU(), i420_buffer->StrideU(), @@ -119,7 +126,7 @@ size_t NV12Buffer::UVOffset() const { } void NV12Buffer::InitializeData() { - memset(data_.get(), 0, NV12DataSize(height_, stride_y_, stride_uv_)); + memset(data_.get(), 0, NV12DataSize(width_, height_, stride_y_, stride_uv_)); } void NV12Buffer::CropAndScaleFrom(const NV12BufferInterface& src, diff --git a/api/video/nv12_buffer.h b/api/video/nv12_buffer.h index 46a85f82e1..290c14300c 100644 --- a/api/video/nv12_buffer.h +++ b/api/video/nv12_buffer.h @@ -11,8 +11,9 @@ #ifndef API_VIDEO_NV12_BUFFER_H_ #define API_VIDEO_NV12_BUFFER_H_ +#include +#include #include -#include #include "api/scoped_refptr.h" #include "api/video/video_frame_buffer.h" @@ -26,15 +27,14 @@ namespace webrtc { // http://msdn.microsoft.com/library/windows/desktop/dd206750.aspx#nv12. class RTC_EXPORT NV12Buffer : public NV12BufferInterface { public: - static rtc::scoped_refptr Create(int width, int height); - static rtc::scoped_refptr Create(int width, - int height, - int stride_y, - int stride_uv); - static rtc::scoped_refptr Copy( - const I420BufferInterface& i420_buffer); - - rtc::scoped_refptr ToI420() override; + static scoped_refptr Create(int width, int height); + static scoped_refptr Create(int width, + int height, + int stride_y, + int stride_uv); + static scoped_refptr Copy(const I420BufferInterface& i420_buffer); + + scoped_refptr ToI420() override; int width() const override; int height() const override; diff --git a/api/video/recordable_encoded_frame.h b/api/video/recordable_encoded_frame.h index 47ea23f119..aa55e9480a 100644 --- a/api/video/recordable_encoded_frame.h +++ b/api/video/recordable_encoded_frame.h @@ -11,12 +11,14 @@ #ifndef API_VIDEO_RECORDABLE_ENCODED_FRAME_H_ #define API_VIDEO_RECORDABLE_ENCODED_FRAME_H_ -#include "api/array_view.h" +#include + #include "api/scoped_refptr.h" #include "api/units/timestamp.h" #include "api/video/color_space.h" #include "api/video/encoded_image.h" #include "api/video/video_codec_type.h" +#include "api/video/video_rotation.h" namespace webrtc { @@ -35,12 +37,16 @@ class RecordableEncodedFrame { virtual ~RecordableEncodedFrame() = default; // Provides access to encoded data - virtual rtc::scoped_refptr encoded_buffer() + virtual scoped_refptr encoded_buffer() const = 0; // Optionally returns the colorspace of the encoded frame. This can differ // from the eventually decoded frame's colorspace. - virtual absl::optional color_space() const = 0; + virtual std::optional color_space() const = 0; + + // Optionally returns the rotation of the encoded frame. This is limited to + // {0,90,180,270} degrees. + virtual std::optional video_rotation() const = 0; // Returns the codec of the encoded frame virtual VideoCodecType codec() const = 0; diff --git a/api/video/resolution.h b/api/video/resolution.h index 11ffef0b03..1a3c97c458 100644 --- a/api/video/resolution.h +++ b/api/video/resolution.h @@ -13,10 +13,12 @@ #include +#include "rtc_base/system/rtc_export.h" + namespace webrtc { // A struct representing a video resolution in pixels. -struct Resolution { +struct RTC_EXPORT Resolution { int width = 0; int height = 0; diff --git a/api/video/rtp_video_frame_assembler.cc b/api/video/rtp_video_frame_assembler.cc index 3d041ca218..7d79b7a5aa 100644 --- a/api/video/rtp_video_frame_assembler.cc +++ b/api/video/rtp_video_frame_assembler.cc @@ -10,19 +10,27 @@ #include "api/video/rtp_video_frame_assembler.h" -#include #include -#include #include +#include #include #include #include "absl/container/inlined_vector.h" -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/rtp_packet_infos.h" +#include "api/scoped_refptr.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/encoded_image.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_timing.h" #include "modules/rtp_rtcp/source/frame_object.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_generic.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_h264.h" @@ -31,9 +39,14 @@ #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h" #include "modules/video_coding/packet_buffer.h" #include "modules/video_coding/rtp_frame_reference_finder.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" +#ifdef RTC_ENABLE_H265 +#include "modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h" +#endif + namespace webrtc { namespace { std::unique_ptr CreateDepacketizer( @@ -51,6 +64,12 @@ std::unique_ptr CreateDepacketizer( return std::make_unique(); case RtpVideoFrameAssembler::kGeneric: return std::make_unique(); + case RtpVideoFrameAssembler::kH265: +#ifdef RTC_ENABLE_H265 + return std::make_unique(); +#else + return nullptr; +#endif } RTC_DCHECK_NOTREACHED(); return nullptr; @@ -79,8 +98,9 @@ class RtpVideoFrameAssembler::Impl { void ClearOldData(uint16_t incoming_seq_num); std::unique_ptr video_structure_; + SeqNumUnwrapper rtp_sequence_number_unwrapper_; SeqNumUnwrapper frame_id_unwrapper_; - absl::optional video_structure_frame_id_; + std::optional video_structure_frame_id_; std::unique_ptr depacketizer_; video_coding::PacketBuffer packet_buffer_; RtpFrameReferenceFinder reference_finder_; @@ -98,10 +118,10 @@ RtpVideoFrameAssembler::FrameVector RtpVideoFrameAssembler::Impl::InsertPacket( return UpdateWithPadding(rtp_packet.SequenceNumber()); } - absl::optional parsed_payload = + std::optional parsed_payload = depacketizer_->Parse(rtp_packet.PayloadBuffer()); - if (parsed_payload == absl::nullopt) { + if (parsed_payload == std::nullopt) { return {}; } @@ -120,7 +140,9 @@ RtpVideoFrameAssembler::FrameVector RtpVideoFrameAssembler::Impl::InsertPacket( parsed_payload->video_header.is_last_packet_in_frame |= rtp_packet.Marker(); auto packet = std::make_unique( - rtp_packet, parsed_payload->video_header); + rtp_packet, + rtp_sequence_number_unwrapper_.Unwrap(rtp_packet.SequenceNumber()), + parsed_payload->video_header); packet->video_payload = std::move(parsed_payload->video_payload); ClearOldData(rtp_packet.SequenceNumber()); @@ -139,7 +161,7 @@ RtpVideoFrameAssembler::Impl::RtpFrameVector RtpVideoFrameAssembler::Impl::AssembleFrames( video_coding::PacketBuffer::InsertResult insert_result) { video_coding::PacketBuffer::Packet* first_packet = nullptr; - std::vector> payloads; + std::vector> payloads; RtpFrameVector result; for (auto& packet : insert_result.packets) { @@ -150,7 +172,7 @@ RtpVideoFrameAssembler::Impl::AssembleFrames( payloads.emplace_back(packet->video_payload); if (packet->is_last_packet_in_frame()) { - rtc::scoped_refptr bitstream = + scoped_refptr bitstream = depacketizer_->AssembleFrame(payloads); if (!bitstream) { @@ -159,22 +181,23 @@ RtpVideoFrameAssembler::Impl::AssembleFrames( const video_coding::PacketBuffer::Packet& last_packet = *packet; result.push_back(std::make_unique( - first_packet->seq_num, // - last_packet.seq_num, // - last_packet.marker_bit, // - /*times_nacked=*/0, // - /*first_packet_received_time=*/0, // - /*last_packet_received_time=*/0, // - first_packet->timestamp, // - /*ntp_time_ms=*/0, // - /*timing=*/VideoSendTiming(), // - first_packet->payload_type, // - first_packet->codec(), // - last_packet.video_header.rotation, // - last_packet.video_header.content_type, // - first_packet->video_header, // - last_packet.video_header.color_space, // - /*packet_infos=*/RtpPacketInfos(), // + first_packet->seq_num(), // + last_packet.seq_num(), // + last_packet.marker_bit, // + /*times_nacked=*/0, // + /*first_packet_received_time=*/0, // + /*last_packet_received_time=*/0, // + first_packet->timestamp, // + /*ntp_time_ms=*/0, // + /*timing=*/VideoSendTiming(), // + first_packet->payload_type, // + first_packet->codec(), // + last_packet.video_header.rotation, // + last_packet.video_header.content_type, // + first_packet->video_header, // + last_packet.video_header.color_space, // + last_packet.video_header.frame_instrumentation_data, // + /*packet_infos=*/RtpPacketInfos(), // std::move(bitstream))); } } diff --git a/api/video/rtp_video_frame_assembler.h b/api/video/rtp_video_frame_assembler.h index 83162cb818..099c962f23 100644 --- a/api/video/rtp_video_frame_assembler.h +++ b/api/video/rtp_video_frame_assembler.h @@ -52,7 +52,7 @@ class RtpVideoFrameAssembler { // FrameVector is just a vector-like type of std::unique_ptr. // The vector type may change without notice. using FrameVector = absl::InlinedVector; - enum PayloadFormat { kRaw, kH264, kVp8, kVp9, kAv1, kGeneric }; + enum PayloadFormat { kRaw, kH264, kVp8, kVp9, kAv1, kGeneric, kH265 }; explicit RtpVideoFrameAssembler(PayloadFormat payload_format); RtpVideoFrameAssembler(const RtpVideoFrameAssembler& other) = delete; diff --git a/api/video/rtp_video_frame_assembler_unittests.cc b/api/video/rtp_video_frame_assembler_unittests.cc index 82defb8399..1ac5fe5eff 100644 --- a/api/video/rtp_video_frame_assembler_unittests.cc +++ b/api/video/rtp_video_frame_assembler_unittests.cc @@ -8,15 +8,29 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include +#include #include #include "api/array_view.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/encoded_frame.h" #include "api/video/rtp_video_frame_assembler.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_format.h" +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_packetizer_av1_test_helper.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "rtc_base/checks.h" #include "test/gmock.h" #include "test/gtest.h" @@ -42,7 +56,7 @@ class PacketBuilder { return *this; } - PacketBuilder& WithPayload(rtc::ArrayView payload) { + PacketBuilder& WithPayload(ArrayView payload) { payload_.assign(payload.begin(), payload.end()); return *this; } @@ -72,10 +86,10 @@ class PacketBuilder { } private: - absl::optional GetVideoCodecType() { + std::optional GetVideoCodecType() { switch (format_) { case PayloadFormat::kRaw: { - return absl::nullopt; + return std::nullopt; } case PayloadFormat::kH264: { return kVideoCodecH264; @@ -89,12 +103,15 @@ class PacketBuilder { case PayloadFormat::kAv1: { return kVideoCodecAV1; } + case PayloadFormat::kH265: { + return kVideoCodecH265; + } case PayloadFormat::kGeneric: { return kVideoCodecGeneric; } } RTC_DCHECK_NOTREACHED(); - return absl::nullopt; + return std::nullopt; } const RtpVideoFrameAssembler::PayloadFormat format_; @@ -118,12 +135,12 @@ void AppendFrames(RtpVideoFrameAssembler::FrameVector from, std::make_move_iterator(from.end())); } -rtc::ArrayView References(const std::unique_ptr& frame) { - return rtc::MakeArrayView(frame->references, frame->num_references); +ArrayView References(const std::unique_ptr& frame) { + return MakeArrayView(frame->references, frame->num_references); } -rtc::ArrayView Payload(const std::unique_ptr& frame) { - return rtc::ArrayView(*frame->GetEncodedData()); +ArrayView Payload(const std::unique_ptr& frame) { + return ArrayView(*frame->GetEncodedData()); } TEST(RtpVideoFrameAssembler, Vp8Packetization) { diff --git a/api/video/rtp_video_frame_h265_assembler_unittests.cc b/api/video/rtp_video_frame_h265_assembler_unittests.cc new file mode 100644 index 0000000000..5fb8be49ea --- /dev/null +++ b/api/video/rtp_video_frame_h265_assembler_unittests.cc @@ -0,0 +1,158 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/video/encoded_frame.h" +#include "api/video/rtp_video_frame_assembler.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" +#include "modules/rtp_rtcp/source/rtp_format.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "rtc_base/checks.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAreArray; +using ::testing::Eq; +using ::testing::IsEmpty; +using ::testing::SizeIs; +using ::testing::UnorderedElementsAre; +using PayloadFormat = RtpVideoFrameAssembler::PayloadFormat; + +class PacketBuilder { + public: + explicit PacketBuilder(PayloadFormat format) + : format_(format), packet_to_send_(&extension_manager_) {} + + PacketBuilder& WithSeqNum(uint16_t seq_num) { + seq_num_ = seq_num; + return *this; + } + + PacketBuilder& WithPayload(ArrayView payload) { + payload_.assign(payload.begin(), payload.end()); + return *this; + } + + PacketBuilder& WithVideoHeader(const RTPVideoHeader& video_header) { + video_header_ = video_header; + return *this; + } + + template + PacketBuilder& WithExtension(int id, const Args&... args) { + extension_manager_.Register(id); + packet_to_send_.IdentifyExtensions(extension_manager_); + packet_to_send_.SetExtension(std::forward(args)...); + return *this; + } + + RtpPacketReceived Build() { + auto packetizer = + RtpPacketizer::Create(GetVideoCodecType(), payload_, {}, video_header_); + packetizer->NextPacket(&packet_to_send_); + packet_to_send_.SetSequenceNumber(seq_num_); + + RtpPacketReceived received(&extension_manager_); + received.Parse(packet_to_send_.Buffer()); + return received; + } + + private: + std::optional GetVideoCodecType() { + switch (format_) { + case PayloadFormat::kH265: { + return kVideoCodecH265; + } + default: + RTC_DCHECK_NOTREACHED(); + return std::nullopt; + } + } + + const RtpVideoFrameAssembler::PayloadFormat format_; + uint16_t seq_num_ = 0; + std::vector payload_; + RTPVideoHeader video_header_; + RtpPacketReceived::ExtensionManager extension_manager_; + RtpPacketToSend packet_to_send_; +}; + +void AppendFrames(RtpVideoFrameAssembler::FrameVector&& from, + RtpVideoFrameAssembler::FrameVector& to) { + to.insert(to.end(), std::make_move_iterator(from.begin()), + std::make_move_iterator(from.end())); +} + +ArrayView References(const std::unique_ptr& frame) { + return MakeArrayView(frame->references, frame->num_references); +} + +ArrayView Payload(const std::unique_ptr& frame) { + return ArrayView(*frame->GetEncodedData()); +} + +TEST(RtpVideoFrameH265Assembler, H265Packetization) { + RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kH265); + RtpVideoFrameAssembler::FrameVector frames; + + // Key and delta frames generated on linux with ffmpeg command: + // `ffmpeg -i /dev/video0 -r 30 -c:v libx265 -s 1280x720 camera.h265`, + // truncated for test. + // IDR_N_LP(key) frame with start code included. + uint8_t kIdrPayload[] = {0x00, 0x00, 0x00, 0x01, 0x28, 0x01, 0xaf, + 0x08, 0x4a, 0x31, 0x11, 0x15, 0xe5, 0xc0}; + // TRAIL_R(delta) frame with start code included. + uint8_t kDeltaPayload[] = {0x00, 0x00, 0x00, 0x01, 0x02, 0x01, 0xd0, + 0x09, 0x7e, 0x10, 0xc6, 0x1c, 0x8c, 0x17}; + + RTPVideoHeader video_header; + video_header.frame_type = VideoFrameType::kVideoFrameKey; + RtpVideoFrameAssembler::FrameVector idr_frames = + assembler.InsertPacket(PacketBuilder(PayloadFormat::kH265) + .WithPayload(kIdrPayload) + .WithVideoHeader(video_header) + .WithSeqNum(10) + .Build()); + AppendFrames(std::move(idr_frames), frames); + + RtpVideoFrameAssembler::FrameVector delta_frames = + assembler.InsertPacket(PacketBuilder(PayloadFormat::kH265) + .WithPayload(kDeltaPayload) + .WithSeqNum(11) + .Build()); + AppendFrames(std::move(delta_frames), frames); + ASSERT_THAT(frames, SizeIs(2)); + + auto first_frame = frames[0].ExtractFrame(); + EXPECT_THAT(first_frame->Id(), Eq(10)); + EXPECT_THAT(Payload(first_frame), ElementsAreArray(kIdrPayload)); + EXPECT_THAT(References(first_frame), IsEmpty()); + + auto second_frame = frames[1].ExtractFrame(); + EXPECT_THAT(second_frame->Id(), Eq(11)); + EXPECT_THAT(Payload(second_frame), ElementsAreArray(kDeltaPayload)); + EXPECT_THAT(References(second_frame), UnorderedElementsAre(10)); +} + +} // namespace +} // namespace webrtc diff --git a/api/video/test/BUILD.gn b/api/video/test/BUILD.gn index 60ec4b852f..184c651ea9 100644 --- a/api/video/test/BUILD.gn +++ b/api/video/test/BUILD.gn @@ -26,10 +26,10 @@ rtc_library("rtc_api_video_unittests") { "..:video_frame", "..:video_frame_i010", "..:video_rtp_headers", + "../..:scoped_refptr", "../../../test:frame_utils", "../../../test:test_support", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("mock_recordable_encoded_frame") { @@ -38,8 +38,13 @@ rtc_source_set("mock_recordable_encoded_frame") { sources = [ "mock_recordable_encoded_frame.h" ] deps = [ + "..:encoded_image", "..:recordable_encoded_frame", + "..:video_frame", + "..:video_rtp_headers", + "../..:scoped_refptr", "../../../test:test_support", + "../../units:timestamp", ] } diff --git a/api/video/test/color_space_unittest.cc b/api/video/test/color_space_unittest.cc index 1d8b3a87f6..ae66b018f5 100644 --- a/api/video/test/color_space_unittest.cc +++ b/api/video/test/color_space_unittest.cc @@ -71,4 +71,13 @@ TEST(ColorSpace, TestSettingChromaSitingVerticalFromUint8) { EXPECT_FALSE(color_space.set_chroma_siting_vertical_from_uint8(3)); } +TEST(ColorSpace, TestAsStringFunction) { + ColorSpace color_space( + ColorSpace::PrimaryID::kBT709, ColorSpace::TransferID::kBT709, + ColorSpace::MatrixID::kBT709, ColorSpace::RangeID::kLimited); + EXPECT_EQ( + color_space.AsString(), + "{primaries:kBT709, transfer:kBT709, matrix:kBT709, range:kLimited}"); +} + } // namespace webrtc diff --git a/api/video/test/i210_buffer_unittest.cc b/api/video/test/i210_buffer_unittest.cc index aaa231b6d2..64a77f6cf0 100644 --- a/api/video/test/i210_buffer_unittest.cc +++ b/api/video/test/i210_buffer_unittest.cc @@ -11,28 +11,32 @@ #include "api/video/i210_buffer.h" +#include +#include + +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" #include "test/frame_utils.h" -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { namespace { -int GetY(rtc::scoped_refptr buf, int col, int row) { +int GetY(scoped_refptr buf, int col, int row) { return buf->DataY()[row * buf->StrideY() + col]; } -int GetU(rtc::scoped_refptr buf, int col, int row) { +int GetU(scoped_refptr buf, int col, int row) { return buf->DataU()[row * buf->StrideU() + col]; } -int GetV(rtc::scoped_refptr buf, int col, int row) { +int GetV(scoped_refptr buf, int col, int row) { return buf->DataV()[row * buf->StrideV() + col]; } -void FillI210Buffer(rtc::scoped_refptr buf) { +void FillI210Buffer(scoped_refptr buf) { const uint16_t Y = 4; const uint16_t U = 8; const uint16_t V = 16; @@ -58,7 +62,7 @@ TEST(I210BufferTest, InitialData) { constexpr int halfwidth = (width + 1) >> 1; constexpr int height = 3; - rtc::scoped_refptr i210_buffer(I210Buffer::Create(width, height)); + scoped_refptr i210_buffer(I210Buffer::Create(width, height)); EXPECT_EQ(width, i210_buffer->width()); EXPECT_EQ(height, i210_buffer->height()); EXPECT_EQ(stride, i210_buffer->StrideY()); @@ -73,7 +77,7 @@ TEST(I210BufferTest, ReadPixels) { constexpr int halfwidth = (width + 1) >> 1; constexpr int height = 3; - rtc::scoped_refptr i210_buffer(I210Buffer::Create(width, height)); + scoped_refptr i210_buffer(I210Buffer::Create(width, height)); // Y = 4, U = 8, V = 16. FillI210Buffer(i210_buffer); for (int row = 0; row < height; row++) { @@ -95,12 +99,12 @@ TEST(I210BufferTest, ToI420) { constexpr int height = 3; constexpr int size = width * height; constexpr int quartersize = (width + 1) / 2 * (height + 1) / 2; - rtc::scoped_refptr reference(I420Buffer::Create(width, height)); + scoped_refptr reference(I420Buffer::Create(width, height)); memset(reference->MutableDataY(), 1, size); memset(reference->MutableDataU(), 2, quartersize); memset(reference->MutableDataV(), 4, quartersize); - rtc::scoped_refptr i210_buffer(I210Buffer::Create(width, height)); + scoped_refptr i210_buffer(I210Buffer::Create(width, height)); // Y = 4, U = 8, V = 16. FillI210Buffer(i210_buffer); @@ -117,7 +121,7 @@ TEST(I210BufferTest, ToI420) { } } - rtc::scoped_refptr i420_buffer(i210_buffer->ToI420()); + scoped_refptr i420_buffer(i210_buffer->ToI420()); EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer)); EXPECT_EQ(height, i420_buffer->height()); EXPECT_EQ(width, i420_buffer->width()); diff --git a/api/video/test/i410_buffer_unittest.cc b/api/video/test/i410_buffer_unittest.cc index c5d2d5bf2d..255c765c66 100644 --- a/api/video/test/i410_buffer_unittest.cc +++ b/api/video/test/i410_buffer_unittest.cc @@ -11,9 +11,13 @@ #include "api/video/i410_buffer.h" +#include +#include + +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" #include "test/frame_utils.h" -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { @@ -23,19 +27,19 @@ constexpr uint16_t kYValue = 4; constexpr uint16_t kUValue = 8; constexpr uint16_t kVValue = 16; -int GetY(rtc::scoped_refptr buf, int col, int row) { +int GetY(scoped_refptr buf, int col, int row) { return buf->DataY()[row * buf->StrideY() + col]; } -int GetU(rtc::scoped_refptr buf, int col, int row) { +int GetU(scoped_refptr buf, int col, int row) { return buf->DataU()[row * buf->StrideU() + col]; } -int GetV(rtc::scoped_refptr buf, int col, int row) { +int GetV(scoped_refptr buf, int col, int row) { return buf->DataV()[row * buf->StrideV() + col]; } -void FillI410Buffer(rtc::scoped_refptr buf) { +void FillI410Buffer(scoped_refptr buf) { for (int row = 0; row < buf->height(); ++row) { for (int col = 0; col < buf->width(); ++col) { buf->MutableDataY()[row * buf->StrideY() + col] = kYValue; @@ -52,7 +56,7 @@ TEST(I410BufferTest, InitialData) { constexpr int width = 3; constexpr int height = 3; - rtc::scoped_refptr i410_buffer(I410Buffer::Create(width, height)); + scoped_refptr i410_buffer(I410Buffer::Create(width, height)); EXPECT_EQ(width, i410_buffer->width()); EXPECT_EQ(height, i410_buffer->height()); EXPECT_EQ(stride, i410_buffer->StrideY()); @@ -66,7 +70,7 @@ TEST(I410BufferTest, ReadPixels) { constexpr int width = 3; constexpr int height = 3; - rtc::scoped_refptr i410_buffer(I410Buffer::Create(width, height)); + scoped_refptr i410_buffer(I410Buffer::Create(width, height)); FillI410Buffer(i410_buffer); for (int row = 0; row < height; row++) { for (int col = 0; col < width; col++) { @@ -85,13 +89,13 @@ TEST(I410BufferTest, ToI420) { constexpr int size_y = width * height; constexpr int size_u = (width + 1) / 2 * (height + 1) / 2; constexpr int size_v = (width + 1) / 2 * (height + 1) / 2; - rtc::scoped_refptr reference(I420Buffer::Create(width, height)); + scoped_refptr reference(I420Buffer::Create(width, height)); // I410 is 10-bit while I420 is 8 bit, so last 2 bits would be discarded. memset(reference->MutableDataY(), kYValue >> 2, size_y); memset(reference->MutableDataU(), kUValue >> 2, size_u); memset(reference->MutableDataV(), kVValue >> 2, size_v); - rtc::scoped_refptr i410_buffer(I410Buffer::Create(width, height)); + scoped_refptr i410_buffer(I410Buffer::Create(width, height)); FillI410Buffer(i410_buffer); // Confirm YUV values are as expected. @@ -103,7 +107,7 @@ TEST(I410BufferTest, ToI420) { } } - rtc::scoped_refptr i420_buffer(i410_buffer->ToI420()); + scoped_refptr i420_buffer(i410_buffer->ToI420()); // Confirm YUV values are as expected. for (int row = 0; row < height; row++) { diff --git a/api/video/test/i422_buffer_unittest.cc b/api/video/test/i422_buffer_unittest.cc index 499b268546..71fbf464dc 100644 --- a/api/video/test/i422_buffer_unittest.cc +++ b/api/video/test/i422_buffer_unittest.cc @@ -11,27 +11,31 @@ #include "api/video/i422_buffer.h" +#include +#include + +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" #include "test/frame_utils.h" -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { namespace { -int GetY(rtc::scoped_refptr buf, int col, int row) { +int GetY(scoped_refptr buf, int col, int row) { return buf->DataY()[row * buf->StrideY() + col]; } -int GetU(rtc::scoped_refptr buf, int col, int row) { +int GetU(scoped_refptr buf, int col, int row) { return buf->DataU()[row * buf->StrideU() + col]; } -int GetV(rtc::scoped_refptr buf, int col, int row) { +int GetV(scoped_refptr buf, int col, int row) { return buf->DataV()[row * buf->StrideV() + col]; } -void FillI422Buffer(rtc::scoped_refptr buf) { +void FillI422Buffer(scoped_refptr buf) { const uint8_t Y = 1; const uint8_t U = 2; const uint8_t V = 3; @@ -57,7 +61,7 @@ TEST(I422BufferTest, InitialData) { constexpr int halfwidth = (width + 1) >> 1; constexpr int height = 3; - rtc::scoped_refptr i422_buffer(I422Buffer::Create(width, height)); + scoped_refptr i422_buffer(I422Buffer::Create(width, height)); EXPECT_EQ(width, i422_buffer->width()); EXPECT_EQ(height, i422_buffer->height()); EXPECT_EQ(stride, i422_buffer->StrideY()); @@ -72,7 +76,7 @@ TEST(I422BufferTest, ReadPixels) { constexpr int halfwidth = (width + 1) >> 1; constexpr int height = 3; - rtc::scoped_refptr i422_buffer(I422Buffer::Create(width, height)); + scoped_refptr i422_buffer(I422Buffer::Create(width, height)); // Y = 1, U = 2, V = 3. FillI422Buffer(i422_buffer); for (int row = 0; row < height; row++) { @@ -95,12 +99,12 @@ TEST(I422BufferTest, ToI420) { constexpr int size = width * height; constexpr int halfsize = (width + 1) / 2 * height; constexpr int quartersize = (width + 1) / 2 * (height + 1) / 2; - rtc::scoped_refptr reference(I420Buffer::Create(width, height)); + scoped_refptr reference(I420Buffer::Create(width, height)); memset(reference->MutableDataY(), 8, size); memset(reference->MutableDataU(), 4, quartersize); memset(reference->MutableDataV(), 2, quartersize); - rtc::scoped_refptr i422_buffer(I422Buffer::Create(width, height)); + scoped_refptr i422_buffer(I422Buffer::Create(width, height)); // Convert the reference buffer to I422. memset(i422_buffer->MutableDataY(), 8, size); memset(i422_buffer->MutableDataU(), 4, halfsize); @@ -119,7 +123,7 @@ TEST(I422BufferTest, ToI420) { } } - rtc::scoped_refptr i420_buffer(i422_buffer->ToI420()); + scoped_refptr i420_buffer(i422_buffer->ToI420()); EXPECT_EQ(height, i420_buffer->height()); EXPECT_EQ(width, i420_buffer->width()); EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer)); diff --git a/api/video/test/i444_buffer_unittest.cc b/api/video/test/i444_buffer_unittest.cc index 9a1a9315aa..e9d6832abb 100644 --- a/api/video/test/i444_buffer_unittest.cc +++ b/api/video/test/i444_buffer_unittest.cc @@ -11,27 +11,31 @@ #include "api/video/i444_buffer.h" +#include +#include + +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" #include "test/frame_utils.h" -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { namespace { -int GetY(rtc::scoped_refptr buf, int col, int row) { +int GetY(scoped_refptr buf, int col, int row) { return buf->DataY()[row * buf->StrideY() + col]; } -int GetU(rtc::scoped_refptr buf, int col, int row) { +int GetU(scoped_refptr buf, int col, int row) { return buf->DataU()[row * buf->StrideU() + col]; } -int GetV(rtc::scoped_refptr buf, int col, int row) { +int GetV(scoped_refptr buf, int col, int row) { return buf->DataV()[row * buf->StrideV() + col]; } -void FillI444Buffer(rtc::scoped_refptr buf) { +void FillI444Buffer(scoped_refptr buf) { const uint8_t Y = 1; const uint8_t U = 2; const uint8_t V = 3; @@ -51,7 +55,7 @@ TEST(I444BufferTest, InitialData) { constexpr int width = 3; constexpr int height = 3; - rtc::scoped_refptr i444_buffer(I444Buffer::Create(width, height)); + scoped_refptr i444_buffer(I444Buffer::Create(width, height)); EXPECT_EQ(width, i444_buffer->width()); EXPECT_EQ(height, i444_buffer->height()); EXPECT_EQ(stride, i444_buffer->StrideY()); @@ -65,7 +69,7 @@ TEST(I444BufferTest, ReadPixels) { constexpr int width = 3; constexpr int height = 3; - rtc::scoped_refptr i444_buffer(I444Buffer::Create(width, height)); + scoped_refptr i444_buffer(I444Buffer::Create(width, height)); // Y = 1, U = 2, V = 3. FillI444Buffer(i444_buffer); for (int row = 0; row < height; row++) { @@ -83,12 +87,12 @@ TEST(I444BufferTest, ToI420) { constexpr int size_y = width * height; constexpr int size_u = (width + 1) / 2 * (height + 1) / 2; constexpr int size_v = (width + 1) / 2 * (height + 1) / 2; - rtc::scoped_refptr reference(I420Buffer::Create(width, height)); + scoped_refptr reference(I420Buffer::Create(width, height)); memset(reference->MutableDataY(), 8, size_y); memset(reference->MutableDataU(), 4, size_u); memset(reference->MutableDataV(), 2, size_v); - rtc::scoped_refptr i444_buffer(I444Buffer::Create(width, height)); + scoped_refptr i444_buffer(I444Buffer::Create(width, height)); // Convert the reference buffer to I444. memset(i444_buffer->MutableDataY(), 8, size_y); memset(i444_buffer->MutableDataU(), 4, size_y); @@ -103,7 +107,7 @@ TEST(I444BufferTest, ToI420) { } } - rtc::scoped_refptr i420_buffer(i444_buffer->ToI420()); + scoped_refptr i420_buffer(i444_buffer->ToI420()); EXPECT_EQ(height, i420_buffer->height()); EXPECT_EQ(width, i420_buffer->width()); EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer)); diff --git a/api/video/test/mock_recordable_encoded_frame.h b/api/video/test/mock_recordable_encoded_frame.h index 2178932d2a..d99c0ddece 100644 --- a/api/video/test/mock_recordable_encoded_frame.h +++ b/api/video/test/mock_recordable_encoded_frame.h @@ -11,20 +11,32 @@ #ifndef API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_ #define API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_ +#include + +#include "api/scoped_refptr.h" +#include "api/units/timestamp.h" +#include "api/video/color_space.h" +#include "api/video/encoded_image.h" #include "api/video/recordable_encoded_frame.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_rotation.h" #include "test/gmock.h" namespace webrtc { class MockRecordableEncodedFrame : public RecordableEncodedFrame { public: - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, encoded_buffer, (), (const, override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, color_space, (), (const, override)); + MOCK_METHOD(std::optional, + video_rotation, + (), + (const, override)); MOCK_METHOD(VideoCodecType, codec, (), (const, override)); MOCK_METHOD(bool, is_key_frame, (), (const, override)); MOCK_METHOD(EncodedResolution, resolution, (), (const, override)); diff --git a/api/video/test/nv12_buffer_unittest.cc b/api/video/test/nv12_buffer_unittest.cc index d84adb5bf5..e309f532f6 100644 --- a/api/video/test/nv12_buffer_unittest.cc +++ b/api/video/test/nv12_buffer_unittest.cc @@ -10,27 +10,31 @@ #include "api/video/nv12_buffer.h" +#include +#include + +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" #include "test/frame_utils.h" -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { namespace { -int GetY(rtc::scoped_refptr buf, int col, int row) { +int GetY(scoped_refptr buf, int col, int row) { return buf->DataY()[row * buf->StrideY() + col]; } -int GetU(rtc::scoped_refptr buf, int col, int row) { +int GetU(scoped_refptr buf, int col, int row) { return buf->DataUV()[(row / 2) * buf->StrideUV() + (col / 2) * 2]; } -int GetV(rtc::scoped_refptr buf, int col, int row) { +int GetV(scoped_refptr buf, int col, int row) { return buf->DataUV()[(row / 2) * buf->StrideUV() + (col / 2) * 2 + 1]; } -void FillNV12Buffer(rtc::scoped_refptr buf) { +void FillNV12Buffer(scoped_refptr buf) { const uint8_t Y = 1; const uint8_t U = 2; const uint8_t V = 3; @@ -57,7 +61,7 @@ TEST(NV12BufferTest, InitialData) { constexpr int width = 3; constexpr int height = 3; - rtc::scoped_refptr nv12_buffer(NV12Buffer::Create(width, height)); + scoped_refptr nv12_buffer(NV12Buffer::Create(width, height)); EXPECT_EQ(width, nv12_buffer->width()); EXPECT_EQ(height, nv12_buffer->height()); EXPECT_EQ(stride_y, nv12_buffer->StrideY()); @@ -70,7 +74,7 @@ TEST(NV12BufferTest, ReadPixels) { constexpr int width = 3; constexpr int height = 3; - rtc::scoped_refptr nv12_buffer(NV12Buffer::Create(width, height)); + scoped_refptr nv12_buffer(NV12Buffer::Create(width, height)); // Y = 1, U = 2, V = 3. FillNV12Buffer(nv12_buffer); for (int row = 0; row < height; row++) { @@ -88,12 +92,12 @@ TEST(NV12BufferTest, ToI420) { constexpr int size_y = width * height; constexpr int size_u = (width + 1) / 2 * (height + 1) / 2; constexpr int size_v = (width + 1) / 2 * (height + 1) / 2; - rtc::scoped_refptr reference(I420Buffer::Create(width, height)); + scoped_refptr reference(I420Buffer::Create(width, height)); memset(reference->MutableDataY(), 8, size_y); memset(reference->MutableDataU(), 4, size_u); memset(reference->MutableDataV(), 2, size_v); - rtc::scoped_refptr nv12_buffer(NV12Buffer::Create(width, height)); + scoped_refptr nv12_buffer(NV12Buffer::Create(width, height)); // Convert the reference buffer to NV12. memset(nv12_buffer->MutableDataY(), 8, size_y); // Interleaving u/v values. @@ -110,7 +114,7 @@ TEST(NV12BufferTest, ToI420) { } } - rtc::scoped_refptr i420_buffer(nv12_buffer->ToI420()); + scoped_refptr i420_buffer(nv12_buffer->ToI420()); EXPECT_EQ(height, i420_buffer->height()); EXPECT_EQ(width, i420_buffer->width()); EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer)); diff --git a/api/video/test/video_bitrate_allocation_unittest.cc b/api/video/test/video_bitrate_allocation_unittest.cc index 8e66d4b0a1..f84fa69a2f 100644 --- a/api/video/test/video_bitrate_allocation_unittest.cc +++ b/api/video/test/video_bitrate_allocation_unittest.cc @@ -10,9 +10,9 @@ #include "api/video/video_bitrate_allocation.h" +#include #include -#include "absl/types/optional.h" #include "test/gtest.h" namespace webrtc { @@ -31,7 +31,7 @@ TEST(VideoBitrateAllocation, SimulcastTargetBitrate) { layer1_bitrate.SetBitrate(0, 0, 40000); layer1_bitrate.SetBitrate(0, 1, 80000); - std::vector> layer_allocations = + std::vector> layer_allocations = bitrate.GetSimulcastAllocations(); EXPECT_EQ(layer0_bitrate, layer_allocations[0]); @@ -54,7 +54,7 @@ TEST(VideoBitrateAllocation, SimulcastTargetBitrateWithInactiveStream) { layer2_bitrate.SetBitrate(0, 0, 40000); layer2_bitrate.SetBitrate(0, 1, 80000); - std::vector> layer_allocations = + std::vector> layer_allocations = bitrate.GetSimulcastAllocations(); EXPECT_EQ(layer0_bitrate, layer_allocations[0]); diff --git a/api/video/test/video_frame_matchers.h b/api/video/test/video_frame_matchers.h index 250459377b..bd06592e94 100644 --- a/api/video/test/video_frame_matchers.h +++ b/api/video/test/video_frame_matchers.h @@ -11,8 +11,6 @@ #ifndef API_VIDEO_TEST_VIDEO_FRAME_MATCHERS_H_ #define API_VIDEO_TEST_VIDEO_FRAME_MATCHERS_H_ -#include "api/rtp_packet_infos.h" -#include "api/video/video_frame.h" #include "test/gmock.h" namespace webrtc::test::video_frame_matchers { diff --git a/api/video/video_adaptation_counters.cc b/api/video/video_adaptation_counters.cc index df1769d5d4..4594b87142 100644 --- a/api/video/video_adaptation_counters.cc +++ b/api/video/video_adaptation_counters.cc @@ -10,6 +10,8 @@ #include "api/video/video_adaptation_counters.h" +#include + #include "rtc_base/strings/string_builder.h" namespace webrtc { @@ -33,7 +35,7 @@ VideoAdaptationCounters VideoAdaptationCounters::operator+( } std::string VideoAdaptationCounters::ToString() const { - rtc::StringBuilder ss; + StringBuilder ss; ss << "{ res=" << resolution_adaptations << " fps=" << fps_adaptations << " }"; return ss.Release(); diff --git a/api/video/video_bitrate_allocation.cc b/api/video/video_bitrate_allocation.cc index e189db1c19..6e47777621 100644 --- a/api/video/video_bitrate_allocation.cc +++ b/api/video/video_bitrate_allocation.cc @@ -10,8 +10,13 @@ #include "api/video/video_bitrate_allocation.h" +#include #include +#include +#include +#include +#include "api/video/video_codec_constants.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/strings/string_builder.h" @@ -27,7 +32,7 @@ bool VideoBitrateAllocation::SetBitrate(size_t spatial_index, RTC_CHECK_LT(spatial_index, kMaxSpatialLayers); RTC_CHECK_LT(temporal_index, kMaxTemporalStreams); int64_t new_bitrate_sum_bps = sum_; - absl::optional& layer_bitrate = + std::optional& layer_bitrate = bitrates_[spatial_index][temporal_index]; if (layer_bitrate) { RTC_DCHECK_LE(*layer_bitrate, sum_); @@ -38,7 +43,7 @@ bool VideoBitrateAllocation::SetBitrate(size_t spatial_index, return false; layer_bitrate = bitrate_bps; - sum_ = rtc::dchecked_cast(new_bitrate_sum_bps); + sum_ = dchecked_cast(new_bitrate_sum_bps); return true; } @@ -107,11 +112,11 @@ std::vector VideoBitrateAllocation::GetTemporalLayerAllocation( return temporal_rates; } -std::vector> +std::vector> VideoBitrateAllocation::GetSimulcastAllocations() const { - std::vector> bitrates; + std::vector> bitrates; for (size_t si = 0; si < kMaxSpatialLayers; ++si) { - absl::optional layer_bitrate; + std::optional layer_bitrate; if (IsSpatialLayerUsed(si)) { layer_bitrate = VideoBitrateAllocation(); for (int tl = 0; tl < kMaxTemporalStreams; ++tl) { @@ -142,7 +147,7 @@ std::string VideoBitrateAllocation::ToString() const { // Max string length in practice is 260, but let's have some overhead and // round up to nearest power of two. char string_buf[512]; - rtc::SimpleStringBuilder ssb(string_buf); + SimpleStringBuilder ssb(string_buf); ssb << "VideoBitrateAllocation ["; uint32_t spatial_cumulator = 0; diff --git a/api/video/video_bitrate_allocation.h b/api/video/video_bitrate_allocation.h index 4feffa2e66..63ac26a173 100644 --- a/api/video/video_bitrate_allocation.h +++ b/api/video/video_bitrate_allocation.h @@ -15,10 +15,10 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/video/video_codec_constants.h" #include "rtc_base/system/rtc_export.h" @@ -64,7 +64,7 @@ class RTC_EXPORT VideoBitrateAllocation { // Returns one VideoBitrateAllocation for each spatial layer. This is used to // configure simulcast streams. Note that the length of the returned vector is // always kMaxSpatialLayers, the optional is unset for unused layers. - std::vector> GetSimulcastAllocations() + std::vector> GetSimulcastAllocations() const; uint32_t get_sum_bps() const { return sum_; } // Sum of all bitrates. @@ -87,7 +87,7 @@ class RTC_EXPORT VideoBitrateAllocation { private: uint32_t sum_; - absl::optional bitrates_[kMaxSpatialLayers][kMaxTemporalStreams]; + std::optional bitrates_[kMaxSpatialLayers][kMaxTemporalStreams]; bool is_bw_limited_; }; diff --git a/api/video/video_bitrate_allocator.cc b/api/video/video_bitrate_allocator.cc index f4e843b348..128a34ac36 100644 --- a/api/video/video_bitrate_allocator.cc +++ b/api/video/video_bitrate_allocator.cc @@ -10,6 +10,11 @@ #include "api/video/video_bitrate_allocator.h" +#include + +#include "api/units/data_rate.h" +#include "api/video/video_bitrate_allocation.h" + namespace webrtc { VideoBitrateAllocationParameters::VideoBitrateAllocationParameters( @@ -49,6 +54,6 @@ VideoBitrateAllocation VideoBitrateAllocator::Allocate( return GetAllocation(parameters.total_bitrate.bps(), parameters.framerate); } -void VideoBitrateAllocator::SetLegacyConferenceMode(bool enabled) {} +void VideoBitrateAllocator::SetLegacyConferenceMode(bool /* enabled */) {} } // namespace webrtc diff --git a/api/video/video_bitrate_allocator.h b/api/video/video_bitrate_allocator.h index fdc86dbc57..61d74ae490 100644 --- a/api/video/video_bitrate_allocator.h +++ b/api/video/video_bitrate_allocator.h @@ -11,6 +11,8 @@ #ifndef API_VIDEO_VIDEO_BITRATE_ALLOCATOR_H_ #define API_VIDEO_VIDEO_BITRATE_ALLOCATOR_H_ +#include + #include "api/units/data_rate.h" #include "api/video/video_bitrate_allocation.h" diff --git a/api/video/video_bitrate_allocator_factory.h b/api/video/video_bitrate_allocator_factory.h index cb34ebb5e1..31b6794ad5 100644 --- a/api/video/video_bitrate_allocator_factory.h +++ b/api/video/video_bitrate_allocator_factory.h @@ -13,6 +13,7 @@ #include +#include "api/environment/environment.h" #include "api/video/video_bitrate_allocator.h" #include "api/video_codecs/video_codec.h" @@ -23,8 +24,10 @@ namespace webrtc { class VideoBitrateAllocatorFactory { public: virtual ~VideoBitrateAllocatorFactory() = default; + // Creates a VideoBitrateAllocator for a specific video codec. - virtual std::unique_ptr CreateVideoBitrateAllocator( + virtual std::unique_ptr Create( + const Environment& env, const VideoCodec& codec) = 0; }; diff --git a/api/video/video_codec_type.h b/api/video/video_codec_type.h index 74a4bc4258..2509793e89 100644 --- a/api/video/video_codec_type.h +++ b/api/video/video_codec_type.h @@ -21,7 +21,7 @@ enum VideoCodecType { kVideoCodecVP9, kVideoCodecAV1, kVideoCodecH264, - kVideoCodecMultiplex, + kVideoCodecH265, }; } // namespace webrtc diff --git a/api/video/video_content_type.cc b/api/video/video_content_type.cc index 79da9ff273..75beb5c935 100644 --- a/api/video/video_content_type.cc +++ b/api/video/video_content_type.cc @@ -10,6 +10,8 @@ #include "api/video/video_content_type.h" +#include + #include "rtc_base/checks.h" namespace webrtc { diff --git a/api/video/video_frame.cc b/api/video/video_frame.cc index 35dedce1b2..8483ed011a 100644 --- a/api/video/video_frame.cc +++ b/api/video/video_frame.cc @@ -11,8 +11,16 @@ #include "api/video/video_frame.h" #include +#include +#include #include +#include "api/rtp_packet_infos.h" +#include "api/scoped_refptr.h" +#include "api/units/timestamp.h" +#include "api/video/color_space.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" #include "rtc_base/checks.h" #include "rtc_base/time_utils.h" @@ -164,20 +172,20 @@ VideoFrame::Builder::~Builder() = default; VideoFrame VideoFrame::Builder::build() { RTC_CHECK(video_frame_buffer_ != nullptr); return VideoFrame(id_, video_frame_buffer_, timestamp_us_, - capture_time_identifier_, timestamp_rtp_, ntp_time_ms_, - rotation_, color_space_, render_parameters_, update_rect_, - packet_infos_); + presentation_timestamp_, reference_time_, timestamp_rtp_, + ntp_time_ms_, rotation_, color_space_, render_parameters_, + update_rect_, packet_infos_); } VideoFrame::Builder& VideoFrame::Builder::set_video_frame_buffer( - const rtc::scoped_refptr& buffer) { + const scoped_refptr& buffer) { video_frame_buffer_ = buffer; return *this; } VideoFrame::Builder& VideoFrame::Builder::set_timestamp_ms( int64_t timestamp_ms) { - timestamp_us_ = timestamp_ms * rtc::kNumMicrosecsPerMillisec; + timestamp_us_ = timestamp_ms * kNumMicrosecsPerMillisec; return *this; } @@ -188,8 +196,26 @@ VideoFrame::Builder& VideoFrame::Builder::set_timestamp_us( } VideoFrame::Builder& VideoFrame::Builder::set_capture_time_identifier( - const absl::optional& capture_time_identifier) { - capture_time_identifier_ = capture_time_identifier; + const std::optional& presentation_timestamp) { + presentation_timestamp_ = presentation_timestamp; + return *this; +} + +VideoFrame::Builder& VideoFrame::Builder::set_presentation_timestamp( + const std::optional& presentation_timestamp) { + presentation_timestamp_ = presentation_timestamp; + return *this; +} + +VideoFrame::Builder& VideoFrame::Builder::set_reference_time( + const std::optional& reference_time) { + reference_time_ = reference_time; + return *this; +} + +VideoFrame::Builder& VideoFrame::Builder::set_rtp_timestamp( + uint32_t rtp_timestamp) { + timestamp_rtp_ = rtp_timestamp; return *this; } @@ -210,15 +236,14 @@ VideoFrame::Builder& VideoFrame::Builder::set_rotation(VideoRotation rotation) { } VideoFrame::Builder& VideoFrame::Builder::set_color_space( - const absl::optional& color_space) { + const std::optional& color_space) { color_space_ = color_space; return *this; } VideoFrame::Builder& VideoFrame::Builder::set_color_space( const ColorSpace* color_space) { - color_space_ = - color_space ? absl::make_optional(*color_space) : absl::nullopt; + color_space_ = color_space ? std::make_optional(*color_space) : std::nullopt; return *this; } @@ -228,7 +253,7 @@ VideoFrame::Builder& VideoFrame::Builder::set_id(uint16_t id) { } VideoFrame::Builder& VideoFrame::Builder::set_update_rect( - const absl::optional& update_rect) { + const std::optional& update_rect) { update_rect_ = update_rect; return *this; } @@ -239,7 +264,7 @@ VideoFrame::Builder& VideoFrame::Builder::set_packet_infos( return *this; } -VideoFrame::VideoFrame(const rtc::scoped_refptr& buffer, +VideoFrame::VideoFrame(const scoped_refptr& buffer, webrtc::VideoRotation rotation, int64_t timestamp_us) : video_frame_buffer_(buffer), @@ -248,35 +273,37 @@ VideoFrame::VideoFrame(const rtc::scoped_refptr& buffer, timestamp_us_(timestamp_us), rotation_(rotation) {} -VideoFrame::VideoFrame(const rtc::scoped_refptr& buffer, +VideoFrame::VideoFrame(const scoped_refptr& buffer, uint32_t timestamp_rtp, int64_t render_time_ms, VideoRotation rotation) : video_frame_buffer_(buffer), timestamp_rtp_(timestamp_rtp), ntp_time_ms_(0), - timestamp_us_(render_time_ms * rtc::kNumMicrosecsPerMillisec), + timestamp_us_(render_time_ms * kNumMicrosecsPerMillisec), rotation_(rotation) { RTC_DCHECK(buffer); } VideoFrame::VideoFrame(uint16_t id, - const rtc::scoped_refptr& buffer, + const scoped_refptr& buffer, int64_t timestamp_us, - const absl::optional& capture_time_identifier, + const std::optional& presentation_timestamp, + const std::optional& reference_time, uint32_t timestamp_rtp, int64_t ntp_time_ms, VideoRotation rotation, - const absl::optional& color_space, + const std::optional& color_space, const RenderParameters& render_parameters, - const absl::optional& update_rect, + const std::optional& update_rect, RtpPacketInfos packet_infos) : id_(id), video_frame_buffer_(buffer), timestamp_rtp_(timestamp_rtp), ntp_time_ms_(ntp_time_ms), timestamp_us_(timestamp_us), - capture_time_identifier_(capture_time_identifier), + presentation_timestamp_(presentation_timestamp), + reference_time_(reference_time), rotation_(rotation), color_space_(color_space), render_parameters_(render_parameters), @@ -309,18 +336,18 @@ uint32_t VideoFrame::size() const { return width() * height(); } -rtc::scoped_refptr VideoFrame::video_frame_buffer() const { +scoped_refptr VideoFrame::video_frame_buffer() const { return video_frame_buffer_; } void VideoFrame::set_video_frame_buffer( - const rtc::scoped_refptr& buffer) { + const scoped_refptr& buffer) { RTC_CHECK(buffer); video_frame_buffer_ = buffer; } int64_t VideoFrame::render_time_ms() const { - return timestamp_us() / rtc::kNumMicrosecsPerMillisec; + return timestamp_us() / kNumMicrosecsPerMillisec; } } // namespace webrtc diff --git a/api/video/video_frame.h b/api/video/video_frame.h index a257a3209e..f7616d3a02 100644 --- a/api/video/video_frame.h +++ b/api/video/video_frame.h @@ -13,13 +13,14 @@ #include +#include #include -#include "absl/types/optional.h" #include "api/rtp_packet_infos.h" #include "api/scoped_refptr.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/color_space.h" -#include "api/video/hdr_metadata.h" #include "api/video/video_frame_buffer.h" #include "api/video/video_rotation.h" #include "rtc_base/checks.h" @@ -33,10 +34,10 @@ class RTC_EXPORT VideoFrame { static constexpr uint16_t kNotSetId = 0; struct RTC_EXPORT UpdateRect { - int offset_x; - int offset_y; - int width; - int height; + int offset_x = 0; + int offset_y = 0; + int width = 0; + int height = 0; // Makes this UpdateRect a bounding box of this and other rect. void Union(const UpdateRect& other); @@ -83,7 +84,7 @@ class RTC_EXPORT VideoFrame { struct RTC_EXPORT RenderParameters { bool use_low_latency_rendering = false; - absl::optional max_composition_delay_in_frames; + std::optional max_composition_delay_in_frames; bool operator==(const RenderParameters& other) const { return other.use_low_latency_rendering == use_low_latency_rendering && @@ -104,39 +105,46 @@ class RTC_EXPORT VideoFrame { VideoFrame build(); Builder& set_video_frame_buffer( - const rtc::scoped_refptr& buffer); + const scoped_refptr& buffer); Builder& set_timestamp_ms(int64_t timestamp_ms); Builder& set_timestamp_us(int64_t timestamp_us); - Builder& set_capture_time_identifier( - const absl::optional& capture_time_identifier); + [[deprecated("Use set_presentation_timestamp instead")]] Builder& + set_capture_time_identifier( + const std::optional& presentation_timestamp); + Builder& set_presentation_timestamp( + const std::optional& presentation_timestamp); + Builder& set_reference_time(const std::optional& reference_time); + Builder& set_rtp_timestamp(uint32_t rtp_timestamp); + // TODO(https://bugs.webrtc.org/13756): Deprecate and use set_rtp_timestamp. Builder& set_timestamp_rtp(uint32_t timestamp_rtp); Builder& set_ntp_time_ms(int64_t ntp_time_ms); Builder& set_rotation(VideoRotation rotation); - Builder& set_color_space(const absl::optional& color_space); + Builder& set_color_space(const std::optional& color_space); Builder& set_color_space(const ColorSpace* color_space); Builder& set_id(uint16_t id); - Builder& set_update_rect(const absl::optional& update_rect); + Builder& set_update_rect(const std::optional& update_rect); Builder& set_packet_infos(RtpPacketInfos packet_infos); private: uint16_t id_ = kNotSetId; - rtc::scoped_refptr video_frame_buffer_; + scoped_refptr video_frame_buffer_; int64_t timestamp_us_ = 0; - absl::optional capture_time_identifier_; + std::optional presentation_timestamp_; + std::optional reference_time_; uint32_t timestamp_rtp_ = 0; int64_t ntp_time_ms_ = 0; VideoRotation rotation_ = kVideoRotation_0; - absl::optional color_space_; + std::optional color_space_; RenderParameters render_parameters_; - absl::optional update_rect_; + std::optional update_rect_; RtpPacketInfos packet_infos_; }; // To be deprecated. Migrate all use to Builder. - VideoFrame(const rtc::scoped_refptr& buffer, + VideoFrame(const scoped_refptr& buffer, webrtc::VideoRotation rotation, int64_t timestamp_us); - VideoFrame(const rtc::scoped_refptr& buffer, + VideoFrame(const scoped_refptr& buffer, uint32_t timestamp_rtp, int64_t render_time_ms, VideoRotation rotation); @@ -165,23 +173,38 @@ class RTC_EXPORT VideoFrame { uint16_t id() const { return id_; } void set_id(uint16_t id) { id_ = id; } - // System monotonic clock, same timebase as rtc::TimeMicros(). + // System monotonic clock, same timebase as webrtc::TimeMicros(). int64_t timestamp_us() const { return timestamp_us_; } void set_timestamp_us(int64_t timestamp_us) { timestamp_us_ = timestamp_us; } - const absl::optional& capture_time_identifier() const { - return capture_time_identifier_; + // TODO(https://bugs.webrtc.org/373365537): Remove this once its usage is + // removed from blink. + const std::optional& capture_time_identifier() const { + return presentation_timestamp_; } - void set_capture_time_identifier( - const absl::optional& capture_time_identifier) { - capture_time_identifier_ = capture_time_identifier; + + const std::optional& presentation_timestamp() const { + return presentation_timestamp_; + } + void set_presentation_timestamp( + const std::optional& presentation_timestamp) { + presentation_timestamp_ = presentation_timestamp; + } + + const std::optional& reference_time() const { + return reference_time_; + } + void set_reference_time(const std::optional& reference_time) { + reference_time_ = reference_time; } // Set frame timestamp (90kHz). - void set_timestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; } + void set_rtp_timestamp(uint32_t rtp_timestamp) { + timestamp_rtp_ = rtp_timestamp; + } // Get frame timestamp (90kHz). - uint32_t timestamp() const { return timestamp_rtp_; } + uint32_t rtp_timestamp() const { return timestamp_rtp_; } // Set capture ntp time in milliseconds. void set_ntp_time_ms(int64_t ntp_time_ms) { ntp_time_ms_ = ntp_time_ms; } @@ -203,8 +226,8 @@ class RTC_EXPORT VideoFrame { void set_rotation(VideoRotation rotation) { rotation_ = rotation; } // Get color space when available. - const absl::optional& color_space() const { return color_space_; } - void set_color_space(const absl::optional& color_space) { + const std::optional& color_space() const { return color_space_; } + void set_color_space(const std::optional& color_space) { color_space_ = color_space; } @@ -213,23 +236,14 @@ class RTC_EXPORT VideoFrame { render_parameters_ = render_parameters; } - // Deprecated in favor of render_parameters, will be removed once Chromium is - // updated. max_composition_delay_in_frames() is used in an experiment of a - // low-latency renderer algorithm see crbug.com/1138888. - [[deprecated("Use render_parameters() instead.")]] absl::optional - max_composition_delay_in_frames() const { - return render_parameters_.max_composition_delay_in_frames; - } - // Get render time in milliseconds. int64_t render_time_ms() const; // Return the underlying buffer. Never nullptr for a properly // initialized VideoFrame. - rtc::scoped_refptr video_frame_buffer() const; + scoped_refptr video_frame_buffer() const; - void set_video_frame_buffer( - const rtc::scoped_refptr& buffer); + void set_video_frame_buffer(const scoped_refptr& buffer); // Return true if the frame is stored in a texture. bool is_texture() const { @@ -253,7 +267,7 @@ class RTC_EXPORT VideoFrame { update_rect_ = update_rect; } - void clear_update_rect() { update_rect_ = absl::nullopt; } + void clear_update_rect() { update_rect_ = std::nullopt; } // Get information about packets used to assemble this video frame. Might be // empty if the information isn't available. @@ -262,7 +276,7 @@ class RTC_EXPORT VideoFrame { packet_infos_ = std::move(value); } - const absl::optional processing_time() const { + const std::optional processing_time() const { return processing_time_; } void set_processing_time(const ProcessingTime& processing_time) { @@ -271,33 +285,39 @@ class RTC_EXPORT VideoFrame { private: VideoFrame(uint16_t id, - const rtc::scoped_refptr& buffer, + const scoped_refptr& buffer, int64_t timestamp_us, - const absl::optional& capture_time_identifier, + const std::optional& presentation_timestamp, + const std::optional& reference_time, uint32_t timestamp_rtp, int64_t ntp_time_ms, VideoRotation rotation, - const absl::optional& color_space, + const std::optional& color_space, const RenderParameters& render_parameters, - const absl::optional& update_rect, + const std::optional& update_rect, RtpPacketInfos packet_infos); uint16_t id_; // An opaque reference counted handle that stores the pixel data. - rtc::scoped_refptr video_frame_buffer_; + scoped_refptr video_frame_buffer_; uint32_t timestamp_rtp_; int64_t ntp_time_ms_; int64_t timestamp_us_; - absl::optional capture_time_identifier_; + std::optional presentation_timestamp_; + // Contains a monotonically increasing clock time and represents the time + // when the frame was captured. Not all platforms provide the "true" sample + // capture time in |reference_time| but might instead use a somewhat delayed + // (by the time it took to capture the frame) version of it. + std::optional reference_time_; VideoRotation rotation_; - absl::optional color_space_; + std::optional color_space_; // Contains parameters that affect have the frame should be rendered. RenderParameters render_parameters_; // Updated since the last frame area. If present it means that the bounding // box of all the changes is within the rectangular area and is close to it. // If absent, it means that there's no information about the change at all and // update_rect() will return a rectangle corresponding to the entire frame. - absl::optional update_rect_; + std::optional update_rect_; // Information about packets used to assemble this video frame. This is needed // by `SourceTracker` when the frame is delivered to the RTCRtpReceiver's // MediaStreamTrack, in order to implement getContributingSources(). See: @@ -307,7 +327,7 @@ class RTC_EXPORT VideoFrame { // timestamps when the frame is sent to the decoder and the decoded image // returned from the decoder. // Currently, not set for locally captured video frames. - absl::optional processing_time_; + std::optional processing_time_; }; } // namespace webrtc diff --git a/api/video/video_frame_buffer.cc b/api/video/video_frame_buffer.cc index 374b438adc..b14281415c 100644 --- a/api/video/video_frame_buffer.cc +++ b/api/video/video_frame_buffer.cc @@ -10,6 +10,10 @@ #include "api/video/video_frame_buffer.h" +#include + +#include "api/array_view.h" +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" #include "api/video/i422_buffer.h" #include "api/video/i444_buffer.h" @@ -18,14 +22,14 @@ namespace webrtc { -rtc::scoped_refptr VideoFrameBuffer::CropAndScale( +scoped_refptr VideoFrameBuffer::CropAndScale( int offset_x, int offset_y, int crop_width, int crop_height, int scaled_width, int scaled_height) { - rtc::scoped_refptr result = + scoped_refptr result = I420Buffer::Create(scaled_width, scaled_height); result->CropAndScaleFrom(*this->ToI420(), offset_x, offset_y, crop_width, crop_height); @@ -73,12 +77,16 @@ const NV12BufferInterface* VideoFrameBuffer::GetNV12() const { return static_cast(this); } -rtc::scoped_refptr VideoFrameBuffer::GetMappedFrameBuffer( - rtc::ArrayView types) { +scoped_refptr VideoFrameBuffer::GetMappedFrameBuffer( + ArrayView /* types */) { RTC_CHECK(type() == Type::kNative); return nullptr; } +std::string VideoFrameBuffer::storage_representation() const { + return "?"; +} + VideoFrameBuffer::Type I420BufferInterface::type() const { return Type::kI420; } @@ -116,8 +124,8 @@ int I420BufferInterface::ChromaHeight() const { return (height() + 1) / 2; } -rtc::scoped_refptr I420BufferInterface::ToI420() { - return rtc::scoped_refptr(this); +scoped_refptr I420BufferInterface::ToI420() { + return scoped_refptr(this); } const I420BufferInterface* I420BufferInterface::GetI420() const { @@ -140,14 +148,14 @@ int I444BufferInterface::ChromaHeight() const { return height(); } -rtc::scoped_refptr I444BufferInterface::CropAndScale( +scoped_refptr I444BufferInterface::CropAndScale( int offset_x, int offset_y, int crop_width, int crop_height, int scaled_width, int scaled_height) { - rtc::scoped_refptr result = + scoped_refptr result = I444Buffer::Create(scaled_width, scaled_height); result->CropAndScaleFrom(*this, offset_x, offset_y, crop_width, crop_height); return result; @@ -165,14 +173,14 @@ int I422BufferInterface::ChromaHeight() const { return height(); } -rtc::scoped_refptr I422BufferInterface::CropAndScale( +scoped_refptr I422BufferInterface::CropAndScale( int offset_x, int offset_y, int crop_width, int crop_height, int scaled_width, int scaled_height) { - rtc::scoped_refptr result = + scoped_refptr result = I422Buffer::Create(scaled_width, scaled_height); result->CropAndScaleFrom(*this, offset_x, offset_y, crop_width, crop_height); return result; @@ -226,17 +234,29 @@ int NV12BufferInterface::ChromaHeight() const { return (height() + 1) / 2; } -rtc::scoped_refptr NV12BufferInterface::CropAndScale( +scoped_refptr NV12BufferInterface::CropAndScale( int offset_x, int offset_y, int crop_width, int crop_height, int scaled_width, int scaled_height) { - rtc::scoped_refptr result = + scoped_refptr result = NV12Buffer::Create(scaled_width, scaled_height); result->CropAndScaleFrom(*this, offset_x, offset_y, crop_width, crop_height); return result; } +void CheckValidDimensions(int width, + int height, + int stride_y, + int stride_u, + int stride_v) { + RTC_CHECK_GT(width, 0); + RTC_CHECK_GT(height, 0); + RTC_CHECK_GE(stride_y, width); + RTC_CHECK_GT(stride_u, 0); + RTC_CHECK_GT(stride_v, 0); +} + } // namespace webrtc diff --git a/api/video/video_frame_buffer.h b/api/video/video_frame_buffer.h index aaf786699f..e5df90075f 100644 --- a/api/video/video_frame_buffer.h +++ b/api/video/video_frame_buffer.h @@ -11,11 +11,12 @@ #ifndef API_VIDEO_VIDEO_FRAME_BUFFER_H_ #define API_VIDEO_VIDEO_FRAME_BUFFER_H_ -#include +#include +#include #include "api/array_view.h" +#include "api/ref_count.h" #include "api/scoped_refptr.h" -#include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -44,7 +45,7 @@ class NV12BufferInterface; // performance by providing an optimized path without intermediate conversions. // Frame metadata such as rotation and timestamp are stored in // webrtc::VideoFrame, and not here. -class RTC_EXPORT VideoFrameBuffer : public rtc::RefCountInterface { +class RTC_EXPORT VideoFrameBuffer : public webrtc::RefCountInterface { public: // New frame buffer types will be added conservatively when there is an // opportunity to optimize the path between some pair of video source and @@ -77,7 +78,7 @@ class RTC_EXPORT VideoFrameBuffer : public rtc::RefCountInterface { // software encoders. // Conversion may fail, for example if reading the pixel data from a texture // fails. If the conversion fails, nullptr is returned. - virtual rtc::scoped_refptr ToI420() = 0; + virtual scoped_refptr ToI420() = 0; // GetI420() methods should return I420 buffer if conversion is trivial, i.e // no change for binary data is needed. Otherwise these methods should return @@ -94,16 +95,15 @@ class RTC_EXPORT VideoFrameBuffer : public rtc::RefCountInterface { // especially for kNative. // First, the image is cropped to `crop_width` and `crop_height` and then // scaled to `scaled_width` and `scaled_height`. - virtual rtc::scoped_refptr CropAndScale(int offset_x, - int offset_y, - int crop_width, - int crop_height, - int scaled_width, - int scaled_height); + virtual scoped_refptr CropAndScale(int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height); // Alias for common use case. - rtc::scoped_refptr Scale(int scaled_width, - int scaled_height) { + scoped_refptr Scale(int scaled_width, int scaled_height) { return CropAndScale(0, 0, width(), height(), scaled_width, scaled_height); } @@ -122,8 +122,11 @@ class RTC_EXPORT VideoFrameBuffer : public rtc::RefCountInterface { // conversion for encoding with a software encoder. Returns nullptr if the // frame type is not supported, mapping is not possible, or if the kNative // frame has not implemented this method. Only callable if type() is kNative. - virtual rtc::scoped_refptr GetMappedFrameBuffer( - rtc::ArrayView types); + virtual scoped_refptr GetMappedFrameBuffer( + ArrayView types); + + // For logging: returns a textual representation of the storage. + virtual std::string storage_representation() const; protected: ~VideoFrameBuffer() override {} @@ -169,7 +172,7 @@ class RTC_EXPORT I420BufferInterface : public PlanarYuv8Buffer { int ChromaWidth() const final; int ChromaHeight() const final; - rtc::scoped_refptr ToI420() final; + scoped_refptr ToI420() final; const I420BufferInterface* GetI420() const final; protected: @@ -194,12 +197,12 @@ class I422BufferInterface : public PlanarYuv8Buffer { int ChromaWidth() const final; int ChromaHeight() const final; - rtc::scoped_refptr CropAndScale(int offset_x, - int offset_y, - int crop_width, - int crop_height, - int scaled_width, - int scaled_height) override; + scoped_refptr CropAndScale(int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) override; protected: ~I422BufferInterface() override {} @@ -213,19 +216,19 @@ class I444BufferInterface : public PlanarYuv8Buffer { int ChromaWidth() const final; int ChromaHeight() const final; - rtc::scoped_refptr CropAndScale(int offset_x, - int offset_y, - int crop_width, - int crop_height, - int scaled_width, - int scaled_height) override; + scoped_refptr CropAndScale(int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) override; protected: ~I444BufferInterface() override {} }; -// This interface represents 8-bit to 16-bit color depth formats: Type::kI010 or -// Type::kI210 . +// This interface represents 8-bit to 16-bit color depth formats: Type::kI010, +// Type::kI210, or Type::kI410. class PlanarYuv16BBuffer : public PlanarYuvBuffer { public: // Returns pointer to the pixel data for a given plane. The memory is owned by @@ -309,17 +312,28 @@ class RTC_EXPORT NV12BufferInterface : public BiplanarYuv8Buffer { int ChromaWidth() const final; int ChromaHeight() const final; - rtc::scoped_refptr CropAndScale(int offset_x, - int offset_y, - int crop_width, - int crop_height, - int scaled_width, - int scaled_height) override; + scoped_refptr CropAndScale(int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) override; protected: ~NV12BufferInterface() override {} }; +// RTC_CHECKs that common values used to calculate buffer sizes are within the +// range of [1..std::numeric_limits::max()]. +// `width` and `height` must be > 0, `stride_y` must be >= `width` whereas +// `stride_u` and `stride_v` must be `> 0` as this is where the various yuv +// formats differ. +void CheckValidDimensions(int width, + int height, + int stride_y, + int stride_u, + int stride_v); + } // namespace webrtc #endif // API_VIDEO_VIDEO_FRAME_BUFFER_H_ diff --git a/api/video/video_frame_metadata.cc b/api/video/video_frame_metadata.cc index c5f880848c..f265fc335a 100644 --- a/api/video/video_frame_metadata.cc +++ b/api/video/video_frame_metadata.cc @@ -10,7 +10,17 @@ #include "api/video/video_frame_metadata.h" +#include +#include #include +#include + +#include "api/array_view.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" namespace webrtc { @@ -56,11 +66,11 @@ void VideoFrameMetadata::SetContentType(VideoContentType content_type) { content_type_ = content_type; } -absl::optional VideoFrameMetadata::GetFrameId() const { +std::optional VideoFrameMetadata::GetFrameId() const { return frame_id_; } -void VideoFrameMetadata::SetFrameId(absl::optional frame_id) { +void VideoFrameMetadata::SetFrameId(std::optional frame_id) { frame_id_ = frame_id; } @@ -80,23 +90,23 @@ void VideoFrameMetadata::SetTemporalIndex(int temporal_index) { temporal_index_ = temporal_index; } -rtc::ArrayView VideoFrameMetadata::GetFrameDependencies() const { +ArrayView VideoFrameMetadata::GetFrameDependencies() const { return frame_dependencies_; } void VideoFrameMetadata::SetFrameDependencies( - rtc::ArrayView frame_dependencies) { + ArrayView frame_dependencies) { frame_dependencies_.assign(frame_dependencies.begin(), frame_dependencies.end()); } -rtc::ArrayView +ArrayView VideoFrameMetadata::GetDecodeTargetIndications() const { return decode_target_indications_; } void VideoFrameMetadata::SetDecodeTargetIndications( - rtc::ArrayView decode_target_indications) { + ArrayView decode_target_indications) { decode_target_indications_.assign(decode_target_indications.begin(), decode_target_indications.end()); } diff --git a/api/video/video_frame_metadata.h b/api/video/video_frame_metadata.h index bf46387338..342d69b4f9 100644 --- a/api/video/video_frame_metadata.h +++ b/api/video/video_frame_metadata.h @@ -12,11 +12,11 @@ #define API_VIDEO_VIDEO_FRAME_METADATA_H_ #include +#include +#include #include #include "absl/container/inlined_vector.h" -#include "absl/types/optional.h" -#include "absl/types/variant.h" #include "api/array_view.h" #include "api/transport/rtp/dependency_descriptor.h" #include "api/video/video_codec_type.h" @@ -30,10 +30,10 @@ namespace webrtc { -using RTPVideoHeaderCodecSpecifics = absl::variant; +using RTPVideoHeaderCodecSpecifics = std::variant; // A subset of metadata from the RTP video header, exposed in insertable streams // API. @@ -58,8 +58,8 @@ class RTC_EXPORT VideoFrameMetadata { VideoContentType GetContentType() const; void SetContentType(VideoContentType content_type); - absl::optional GetFrameId() const; - void SetFrameId(absl::optional frame_id); + std::optional GetFrameId() const; + void SetFrameId(std::optional frame_id); int GetSpatialIndex() const; void SetSpatialIndex(int spatial_index); @@ -67,13 +67,12 @@ class RTC_EXPORT VideoFrameMetadata { int GetTemporalIndex() const; void SetTemporalIndex(int temporal_index); - rtc::ArrayView GetFrameDependencies() const; - void SetFrameDependencies(rtc::ArrayView frame_dependencies); + ArrayView GetFrameDependencies() const; + void SetFrameDependencies(ArrayView frame_dependencies); - rtc::ArrayView GetDecodeTargetIndications() - const; + ArrayView GetDecodeTargetIndications() const; void SetDecodeTargetIndications( - rtc::ArrayView decode_target_indications); + ArrayView decode_target_indications); bool GetIsLastFrameInPicture() const; void SetIsLastFrameInPicture(bool is_last_frame_in_picture); @@ -107,7 +106,7 @@ class RTC_EXPORT VideoFrameMetadata { VideoContentType content_type_ = VideoContentType::UNSPECIFIED; // Corresponding to GenericDescriptorInfo. - absl::optional frame_id_; + std::optional frame_id_; int spatial_index_ = 0; int temporal_index_ = 0; absl::InlinedVector frame_dependencies_; diff --git a/api/video/video_frame_metadata_unittest.cc b/api/video/video_frame_metadata_unittest.cc index 0f730f7410..d12ff1f106 100644 --- a/api/video/video_frame_metadata_unittest.cc +++ b/api/video/video_frame_metadata_unittest.cc @@ -10,11 +10,9 @@ #include "api/video/video_frame_metadata.h" -#include "api/video/video_frame.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "test/gtest.h" -#include "video/video_receive_stream2.h" namespace webrtc { namespace { @@ -28,8 +26,7 @@ RTPVideoHeaderH264 ExampleHeaderH264() { RTPVideoHeaderH264 header; header.nalu_type = 4; header.packetization_type = H264PacketizationTypes::kH264StapA; - header.nalus[0] = nalu_info; - header.nalus_length = 1; + header.nalus = {nalu_info}; header.packetization_mode = H264PacketizationMode::SingleNalUnit; return header; } diff --git a/api/video/video_sink_interface.h b/api/video/video_sink_interface.h index 9c1f5f3214..b5ddb03a91 100644 --- a/api/video/video_sink_interface.h +++ b/api/video/video_sink_interface.h @@ -11,11 +11,9 @@ #ifndef API_VIDEO_VIDEO_SINK_INTERFACE_H_ #define API_VIDEO_VIDEO_SINK_INTERFACE_H_ -#include "absl/types/optional.h" #include "api/video_track_source_constraints.h" -#include "rtc_base/checks.h" -namespace rtc { +namespace webrtc { template class VideoSinkInterface { @@ -31,9 +29,17 @@ class VideoSinkInterface { // Called on the network thread when video constraints change. // TODO(crbug/1255737): make pure virtual once downstream project adapts. virtual void OnConstraintsChanged( - const webrtc::VideoTrackSourceConstraints& constraints) {} + const VideoTrackSourceConstraints& /* constraints */) {} }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::VideoSinkInterface; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // API_VIDEO_VIDEO_SINK_INTERFACE_H_ diff --git a/api/video/video_source_interface.cc b/api/video/video_source_interface.cc index 70a86c3d64..d3f987e4ef 100644 --- a/api/video/video_source_interface.cc +++ b/api/video/video_source_interface.cc @@ -10,10 +10,10 @@ #include "api/video/video_source_interface.h" -namespace rtc { +namespace webrtc { VideoSinkWants::VideoSinkWants() = default; VideoSinkWants::VideoSinkWants(const VideoSinkWants&) = default; VideoSinkWants::~VideoSinkWants() = default; -} // namespace rtc +} // namespace webrtc diff --git a/api/video/video_source_interface.h b/api/video/video_source_interface.h index c636c2ff95..b6f418218c 100644 --- a/api/video/video_source_interface.h +++ b/api/video/video_source_interface.h @@ -12,13 +12,13 @@ #define API_VIDEO_VIDEO_SOURCE_INTERFACE_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/video/video_sink_interface.h" #include "rtc_base/system/rtc_export.h" -namespace rtc { +namespace webrtc { // VideoSinkWants is used for notifying the source of properties a video frame // should have when it is delivered to a certain sink. @@ -50,7 +50,7 @@ struct RTC_EXPORT VideoSinkWants { // have improved after an earlier downgrade. The source should select the // closest resolution to this pixel count, but if max_pixel_count is set, it // still sets the absolute upper bound. - absl::optional target_pixel_count; + std::optional target_pixel_count; // Tells the source the maximum framerate the sink wants. int max_framerate_fps = std::numeric_limits::max(); @@ -82,8 +82,9 @@ struct RTC_EXPORT VideoSinkWants { // should only be used as a hint when constructing the webrtc::VideoFrame. std::vector resolutions; - // This is the resolution requested by the user using RtpEncodingParameters. - absl::optional requested_resolution; + // This is the resolution requested by the user using RtpEncodingParameters, + // which is the maximum `scale_resolution_down_by` value of any encoding. + std::optional requested_resolution; // `is_active` : Is this VideoSinkWants from an encoder that is encoding any // layer. IF YES, it will affect how the VideoAdapter will choose to @@ -96,13 +97,13 @@ struct RTC_EXPORT VideoSinkWants { // that aggregates several VideoSinkWants (and sends them to // AdaptedVideoTrackSource). struct Aggregates { - // `active_without_requested_resolution` is set by VideoBroadcaster + // `any_active_without_requested_resolution` is set by VideoBroadcaster // when aggregating sink wants if there exists any sink (encoder) that is // active but has not set the `requested_resolution`, i.e is relying on // OnOutputFormatRequest to handle encode resolution. bool any_active_without_requested_resolution = false; }; - absl::optional aggregates; + std::optional aggregates; }; inline bool operator==(const VideoSinkWants::FrameSize& a, @@ -131,5 +132,14 @@ class VideoSourceInterface { virtual void RequestRefreshFrame() {} }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::VideoSinkWants; +using ::webrtc::VideoSourceInterface; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // API_VIDEO_VIDEO_SOURCE_INTERFACE_H_ diff --git a/api/video/video_stream_encoder_settings.h b/api/video/video_stream_encoder_settings.h index 3aee5b7050..d02bcbe739 100644 --- a/api/video/video_stream_encoder_settings.h +++ b/api/video/video_stream_encoder_settings.h @@ -11,9 +11,8 @@ #ifndef API_VIDEO_VIDEO_STREAM_ENCODER_SETTINGS_H_ #define API_VIDEO_VIDEO_STREAM_ENCODER_SETTINGS_H_ -#include - #include "api/video/video_bitrate_allocator_factory.h" +#include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" @@ -53,6 +52,10 @@ struct VideoStreamEncoderSettings { // Negotiated capabilities which the VideoEncoder may expect the other // side to use. VideoEncoder::Capabilities capabilities; + + // Enables the frame instrumentation generator that is required for automatic + // corruption detection. + bool enable_frame_instrumentation_generator = false; }; } // namespace webrtc diff --git a/api/video/video_timing.cc b/api/video/video_timing.cc index d16911fb58..a5b7f78dc9 100644 --- a/api/video/video_timing.cc +++ b/api/video/video_timing.cc @@ -11,6 +11,8 @@ #include "api/video/video_timing.h" #include +#include +#include #include "api/array_view.h" #include "api/units/time_delta.h" @@ -25,7 +27,7 @@ uint16_t VideoSendTiming::GetDeltaCappedMs(int64_t base_ms, int64_t time_ms) { RTC_DLOG(LS_ERROR) << "Delta " << (time_ms - base_ms) << "ms expected to be positive"; } - return rtc::saturated_cast(time_ms - base_ms); + return saturated_cast(time_ms - base_ms); } uint16_t VideoSendTiming::GetDeltaCappedMs(TimeDelta delta) { @@ -33,7 +35,7 @@ uint16_t VideoSendTiming::GetDeltaCappedMs(TimeDelta delta) { RTC_DLOG(LS_ERROR) << "Delta " << delta.ms() << "ms expected to be positive"; } - return rtc::saturated_cast(delta.ms()); + return saturated_cast(delta.ms()); } TimingFrameInfo::TimingFrameInfo() @@ -87,7 +89,7 @@ std::string TimingFrameInfo::ToString() const { } char buf[1024]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); sb << rtp_timestamp << ',' << capture_time_ms << ',' << encode_start_ms << ',' << encode_finish_ms << ',' << packetization_finish_ms << ',' diff --git a/api/video_codecs/BUILD.gn b/api/video_codecs/BUILD.gn index d9dd5aea2b..8351ae424b 100644 --- a/api/video_codecs/BUILD.gn +++ b/api/video_codecs/BUILD.gn @@ -21,8 +21,8 @@ rtc_source_set("scalability_mode") { deps = [ "../../rtc_base:checks", "../../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_source_set("scalability_mode_helper") { @@ -31,10 +31,11 @@ rtc_source_set("scalability_mode_helper") { "scalability_mode_helper.cc", "scalability_mode_helper.h", ] - deps = [ "../../modules/video_coding/svc:scalability_mode_util" ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + deps = [ + ":scalability_mode", + "../../modules/video_coding/svc:scalability_mode_util", + "../../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -49,12 +50,12 @@ rtc_library("video_codecs_api") { "sdp_video_format.h", "simulcast_stream.cc", "simulcast_stream.h", - "spatial_layer.cc", "spatial_layer.h", "video_codec.cc", "video_codec.h", "video_decoder.cc", "video_decoder.h", + "video_decoder_factory.cc", "video_decoder_factory.h", "video_encoder.cc", "video_encoder.h", @@ -80,6 +81,8 @@ rtc_library("video_codecs_api") { "..:fec_controller_api", "..:scoped_refptr", "../../api:array_view", + "../../api:rtp_parameters", + "../../media:media_constants", "../../modules/video_coding:codec_globals_headers", "../../rtc_base:checks", "../../rtc_base:logging", @@ -87,6 +90,7 @@ rtc_library("video_codecs_api") { "../../rtc_base:refcount", "../../rtc_base:stringutils", "../../rtc_base/system:rtc_export", + "../environment", "../units:data_rate", "../video:encoded_image", "../video:render_resolution", @@ -94,13 +98,12 @@ rtc_library("video_codecs_api") { "../video:video_bitrate_allocation", "../video:video_codec_constants", "../video:video_frame", + "../video:video_frame_type", "../video:video_rtp_headers", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -108,7 +111,6 @@ rtc_source_set("bitstream_parser_api") { visibility = [ "*" ] sources = [ "bitstream_parser.h" ] deps = [ "..:array_view" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("builtin_video_decoder_factory") { @@ -132,10 +134,7 @@ rtc_library("builtin_video_decoder_factory") { rtc_library("builtin_video_encoder_factory") { visibility = [ "*" ] - allow_poison = [ - "audio_codecs", # TODO(bugs.webrtc.org/8396): Remove. - "software_video_codecs", - ] + allow_poison = [ "software_video_codecs" ] sources = [ "builtin_video_encoder_factory.cc", "builtin_video_encoder_factory.h", @@ -143,18 +142,10 @@ rtc_library("builtin_video_encoder_factory") { deps = [ ":video_codecs_api", - "../../api:scoped_refptr", - "../../media:codec", - "../../media:media_constants", "../../media:rtc_internal_video_codecs", - "../../media:rtc_media_base", "../../media:rtc_simulcast_encoder_adapter", - "../../rtc_base:checks", "../../rtc_base/system:rtc_export", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "../environment", ] } @@ -164,12 +155,13 @@ rtc_source_set("video_encoder_factory_template") { public = [ "video_encoder_factory_template.h" ] deps = [ + ":scalability_mode", ":video_codecs_api", - "../../api:array_view", + "..:array_view", "../../modules/video_coding/svc:scalability_mode_util", + "../environment", + "//third_party/abseil-cpp/absl/algorithm:container", ] - - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } rtc_source_set("video_encoder_factory_template_libvpx_vp8_adapter") { @@ -178,12 +170,13 @@ rtc_source_set("video_encoder_factory_template_libvpx_vp8_adapter") { public = [ "video_encoder_factory_template_libvpx_vp8_adapter.h" ] deps = [ + ":scalability_mode", ":video_codecs_api", "../../modules/video_coding:webrtc_vp8", "../../modules/video_coding:webrtc_vp8_scalability", + "../environment", + "//third_party/abseil-cpp/absl/container:inlined_vector", ] - - absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ] } rtc_source_set("video_encoder_factory_template_libvpx_vp9_adapter") { @@ -191,7 +184,12 @@ rtc_source_set("video_encoder_factory_template_libvpx_vp9_adapter") { allow_poison = [ "software_video_codecs" ] public = [ "video_encoder_factory_template_libvpx_vp9_adapter.h" ] - deps = [ "../../modules/video_coding:webrtc_vp9" ] + deps = [ + ":scalability_mode", + ":video_codecs_api", + "../../modules/video_coding:webrtc_vp9", + "../environment", + ] } rtc_source_set("video_encoder_factory_template_open_h264_adapter") { @@ -199,7 +197,12 @@ rtc_source_set("video_encoder_factory_template_open_h264_adapter") { allow_poison = [ "software_video_codecs" ] public = [ "video_encoder_factory_template_open_h264_adapter.h" ] - deps = [ "../../modules/video_coding:webrtc_h264" ] + deps = [ + ":scalability_mode", + ":video_codecs_api", + "../../modules/video_coding:webrtc_h264", + "../environment", + ] } rtc_source_set("video_encoder_factory_template_libaom_av1_adapter") { @@ -213,8 +216,9 @@ rtc_source_set("video_encoder_factory_template_libaom_av1_adapter") { "../../modules/video_coding/codecs/av1:av1_svc_config", "../../modules/video_coding/codecs/av1:libaom_av1_encoder", "../../modules/video_coding/svc:scalability_mode_util", + "../environment", + "//third_party/abseil-cpp/absl/container:inlined_vector", ] - absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ] } rtc_source_set("video_decoder_factory_template") { @@ -224,10 +228,10 @@ rtc_source_set("video_decoder_factory_template") { deps = [ ":video_codecs_api", - "../../api:array_view", + "..:array_view", + "../environment", + "//third_party/abseil-cpp/absl/algorithm:container", ] - - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } rtc_source_set("video_decoder_factory_template_libvpx_vp8_adapter") { @@ -238,6 +242,7 @@ rtc_source_set("video_decoder_factory_template_libvpx_vp8_adapter") { deps = [ ":video_codecs_api", "../../modules/video_coding:webrtc_vp8", + "../environment", ] } @@ -246,7 +251,10 @@ rtc_source_set("video_decoder_factory_template_libvpx_vp9_adapter") { allow_poison = [ "software_video_codecs" ] public = [ "video_decoder_factory_template_libvpx_vp9_adapter.h" ] - deps = [ "../../modules/video_coding:webrtc_vp9" ] + deps = [ + ":video_codecs_api", + "../../modules/video_coding:webrtc_vp9", + ] } rtc_source_set("video_decoder_factory_template_open_h264_adapter") { @@ -254,7 +262,10 @@ rtc_source_set("video_decoder_factory_template_open_h264_adapter") { allow_poison = [ "software_video_codecs" ] public = [ "video_decoder_factory_template_open_h264_adapter.h" ] - deps = [ "../../modules/video_coding:webrtc_h264" ] + deps = [ + ":video_codecs_api", + "../../modules/video_coding:webrtc_h264", + ] } rtc_source_set("video_decoder_factory_template_dav1d_adapter") { @@ -265,6 +276,151 @@ rtc_source_set("video_decoder_factory_template_dav1d_adapter") { deps = [ ":video_codecs_api", "../../modules/video_coding/codecs/av1:dav1d_decoder", + "../environment", + ] +} + +rtc_source_set("video_encoding_general") { + public = [ "video_encoding_general.h" ] +} + +rtc_source_set("video_encoder_interface") { + public = [ "video_encoder_interface.h" ] + + deps = [ + ":video_encoding_general", + "..:array_view", + "..:scoped_refptr", + "../../api/units:data_rate", + "../../api/units:data_size", + "../../api/units:time_delta", + "../../api/units:timestamp", + "../../api/video:encoded_image", + "../../api/video:resolution", + "../../api/video:video_frame", + "../../api/video_codecs:video_codecs_api", + "../../rtc_base:rtc_numerics", + "//third_party/abseil-cpp/absl/functional:any_invocable", + ] +} + +rtc_source_set("video_encoder_factory_interface") { + public = [ "video_encoder_factory_interface.h" ] + + deps = [ + ":video_encoder_interface", + ":video_encoding_general", + "../../api/units:time_delta", + "../../api/video:resolution", + "../../rtc_base:rtc_numerics", + "../video:video_frame", + ] +} + +rtc_library("simple_encoder_wrapper") { + sources = [ + "simple_encoder_wrapper.cc", + "simple_encoder_wrapper.h", + ] + + deps = [ + ":video_encoder_factory_interface", + ":video_encoder_interface", + "..:array_view", + "..:scoped_refptr", + "../../api/units:data_rate", + "../../api/video_codecs:scalability_mode", + "../../api/video_codecs:scalability_mode_helper", + "../../common_video/generic_frame_descriptor:generic_frame_descriptor", + "../../modules/video_coding/svc:scalability_structures", + "../../modules/video_coding/svc:scalable_video_controller", + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../rtc_base:rtc_numerics", + "../transport/rtp:dependency_descriptor", + "../units:data_size", + "../units:frequency", + "../units:timestamp", + "../video:video_frame", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("simple_encoder_wrapper_unittests") { + testonly = true + + sources = [ "simple_encoder_wrapper_unittests.cc" ] + + deps = [ + ":simple_encoder_wrapper", + ":video_encoder_factory_interface", + ":video_encoder_interface", + ":video_encoding_general", + "../../api/video:video_frame", + "../../api/video_codecs:libaom_av1_encoder_factory", + "../../test:fileutils", + "../../test:test_support", + "../../test:video_test_support", + ] +} + +rtc_library("libaom_av1_encoder_factory") { + sources = [ + "libaom_av1_encoder_factory.cc", + "libaom_av1_encoder_factory.h", + ] + + deps = [ + ":video_codecs_api", + ":video_encoder_factory_interface", + ":video_encoder_interface", + ":video_encoding_general", + "..:array_view", + "..:scoped_refptr", + "../../api/units:time_delta", + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../rtc_base:rtc_numerics", + "../../rtc_base:stringutils", + "../units:data_rate", + "../units:data_size", + "../video:resolution", + "../video:video_frame", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/cleanup", + "//third_party/libaom", + ] +} + +rtc_library("libaom_av1_encoder_factory_test") { + testonly = true + sources = [ "libaom_av1_encoder_factory_test.cc" ] + data = [ "../../resources/reference_video_640x360_30fps.y4m" ] + + deps = [ + ":libaom_av1_encoder_factory", + ":video_encoder_factory_interface", + ":video_encoder_interface", + ":video_encoding_general", + "..:array_view", + "..:scoped_refptr", + "../../api/video:video_frame", + "../../api/video_codecs:video_codecs_api", + "../../common_video:common_video", + "../../modules/video_coding/codecs/av1:dav1d_decoder", + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../test:fileutils", + "../../test:test_support", + "../../test:video_frame_writer", + "../../test:video_test_support", + "../units:data_rate", + "../units:data_size", + "../units:time_delta", + "../units:timestamp", + "../video:encoded_image", ] } @@ -298,9 +454,11 @@ rtc_library("rtc_software_fallback_wrappers") { deps = [ ":video_codecs_api", "..:fec_controller_api", - "../../api/transport:field_trial_based_config", + "..:scoped_refptr", + "../../api:field_trials_view", + "../../api/environment", "../../api/video:video_frame", - "../../media:rtc_media_base", + "../../media:video_common", "../../modules/video_coding:video_codec_interface", "../../modules/video_coding:video_coding_utility", "../../rtc_base:checks", @@ -308,16 +466,12 @@ rtc_library("rtc_software_fallback_wrappers") { "../../rtc_base:logging", "../../rtc_base/experiments:field_trial_parser", "../../rtc_base/system:rtc_export", - "../../system_wrappers:field_trial", "../../system_wrappers:metrics", "../video:encoded_image", "../video:video_bitrate_allocation", "../video:video_frame", + "../video:video_frame_type", "../video:video_rtp_headers", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } diff --git a/api/video_codecs/av1_profile.cc b/api/video_codecs/av1_profile.cc index eefe166d80..1cbe9abf87 100644 --- a/api/video_codecs/av1_profile.cc +++ b/api/video_codecs/av1_profile.cc @@ -11,15 +11,16 @@ #include "api/video_codecs/av1_profile.h" #include -#include +#include +#include +#include "absl/strings/string_view.h" +#include "api/rtp_parameters.h" +#include "media/base/media_constants.h" #include "rtc_base/string_to_number.h" namespace webrtc { -// Parameter name in the format parameter map for AV1 video. -const char kAV1FmtpProfile[] = "profile"; - absl::string_view AV1ProfileToString(AV1Profile profile) { switch (profile) { case AV1Profile::kProfile0: @@ -32,10 +33,10 @@ absl::string_view AV1ProfileToString(AV1Profile profile) { return "0"; } -absl::optional StringToAV1Profile(absl::string_view str) { - const absl::optional i = rtc::StringToNumber(str); +std::optional StringToAV1Profile(absl::string_view str) { + const std::optional i = StringToNumber(str); if (!i.has_value()) - return absl::nullopt; + return std::nullopt; switch (i.value()) { case 0: @@ -45,23 +46,23 @@ absl::optional StringToAV1Profile(absl::string_view str) { case 2: return AV1Profile::kProfile2; default: - return absl::nullopt; + return std::nullopt; } } -absl::optional ParseSdpForAV1Profile( - const SdpVideoFormat::Parameters& params) { - const auto profile_it = params.find(kAV1FmtpProfile); +std::optional ParseSdpForAV1Profile( + const CodecParameterMap& params) { + const auto profile_it = params.find(kAv1FmtpProfile); if (profile_it == params.end()) return AV1Profile::kProfile0; const std::string& profile_str = profile_it->second; return StringToAV1Profile(profile_str); } -bool AV1IsSameProfile(const SdpVideoFormat::Parameters& params1, - const SdpVideoFormat::Parameters& params2) { - const absl::optional profile = ParseSdpForAV1Profile(params1); - const absl::optional other_profile = +bool AV1IsSameProfile(const CodecParameterMap& params1, + const CodecParameterMap& params2) { + const std::optional profile = ParseSdpForAV1Profile(params1); + const std::optional other_profile = ParseSdpForAV1Profile(params2); return profile && other_profile && profile == other_profile; } diff --git a/api/video_codecs/av1_profile.h b/api/video_codecs/av1_profile.h index 2254d5ecd3..a96285ddc2 100644 --- a/api/video_codecs/av1_profile.h +++ b/api/video_codecs/av1_profile.h @@ -11,18 +11,14 @@ #ifndef API_VIDEO_CODECS_AV1_PROFILE_H_ #define API_VIDEO_CODECS_AV1_PROFILE_H_ -#include +#include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/video_codecs/sdp_video_format.h" +#include "api/rtp_parameters.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { -// Profile information for AV1 video. -extern RTC_EXPORT const char kAV1FmtpProfile[]; - // Profiles can be found at: // https://aomedia.org/av1/specification/annex-a/#profiles // The enum values match the number specified in the SDP. @@ -38,19 +34,19 @@ RTC_EXPORT absl::string_view AV1ProfileToString(AV1Profile profile); // Helper function which converts a std::string to AV1Profile. Returns null if // |profile| is not a valid profile string. -absl::optional StringToAV1Profile(absl::string_view profile); +std::optional StringToAV1Profile(absl::string_view profile); // Parses an SDP key-value map of format parameters to retrive an AV1 profile. // Returns an AV1Profile if one has been specified, `kProfile0` if no profile is // specified and an empty value if the profile key is present but contains an // invalid value. -RTC_EXPORT absl::optional ParseSdpForAV1Profile( - const SdpVideoFormat::Parameters& params); +RTC_EXPORT std::optional ParseSdpForAV1Profile( + const CodecParameterMap& params); // Returns true if the parameters have the same AV1 profile or neither contains // an AV1 profile, otherwise false. -bool AV1IsSameProfile(const SdpVideoFormat::Parameters& params1, - const SdpVideoFormat::Parameters& params2); +bool AV1IsSameProfile(const CodecParameterMap& params1, + const CodecParameterMap& params2); } // namespace webrtc diff --git a/api/video_codecs/bitstream_parser.h b/api/video_codecs/bitstream_parser.h index 86ce192e49..a7dd998f5e 100644 --- a/api/video_codecs/bitstream_parser.h +++ b/api/video_codecs/bitstream_parser.h @@ -14,7 +14,8 @@ #include #include -#include "absl/types/optional.h" +#include + #include "api/array_view.h" namespace webrtc { @@ -25,11 +26,11 @@ class BitstreamParser { virtual ~BitstreamParser() = default; // Parse an additional chunk of the bitstream. - virtual void ParseBitstream(rtc::ArrayView bitstream) = 0; + virtual void ParseBitstream(ArrayView bitstream) = 0; // Get the last extracted QP value from the parsed bitstream. If no QP - // value could be parsed, returns absl::nullopt. - virtual absl::optional GetLastSliceQp() const = 0; + // value could be parsed, returns std::nullopt. + virtual std::optional GetLastSliceQp() const = 0; }; } // namespace webrtc diff --git a/api/video_codecs/builtin_video_decoder_factory.cc b/api/video_codecs/builtin_video_decoder_factory.cc index f831905189..0067c4a8d6 100644 --- a/api/video_codecs/builtin_video_decoder_factory.cc +++ b/api/video_codecs/builtin_video_decoder_factory.cc @@ -12,6 +12,7 @@ #include +#include "api/video_codecs/video_decoder_factory.h" #include "media/engine/internal_decoder_factory.h" namespace webrtc { diff --git a/api/video_codecs/builtin_video_encoder_factory.cc b/api/video_codecs/builtin_video_encoder_factory.cc index 573eb47bf9..bd4bad692c 100644 --- a/api/video_codecs/builtin_video_encoder_factory.cc +++ b/api/video_codecs/builtin_video_encoder_factory.cc @@ -11,18 +11,16 @@ #include "api/video_codecs/builtin_video_encoder_factory.h" #include +#include #include #include -#include "absl/strings/match.h" -#include "absl/types/optional.h" +#include "api/environment/environment.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_encoder.h" -#include "media/base/codec.h" -#include "media/base/media_constants.h" +#include "api/video_codecs/video_encoder_factory.h" #include "media/engine/internal_encoder_factory.h" #include "media/engine/simulcast_encoder_adapter.h" -#include "rtc_base/checks.h" namespace webrtc { @@ -34,19 +32,20 @@ class BuiltinVideoEncoderFactory : public VideoEncoderFactory { BuiltinVideoEncoderFactory() : internal_encoder_factory_(new InternalEncoderFactory()) {} - std::unique_ptr CreateVideoEncoder( - const SdpVideoFormat& format) override { + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override { // Try creating an InternalEncoderFactory-backed SimulcastEncoderAdapter. // The adapter has a passthrough mode for the case that simulcast is not // used, so all responsibility can be delegated to it. - std::unique_ptr encoder; if (format.IsCodecInList( internal_encoder_factory_->GetSupportedFormats())) { - encoder = std::make_unique( - internal_encoder_factory_.get(), format); + return std::make_unique( + env, + /*primary_factory=*/internal_encoder_factory_.get(), + /*fallback_factory=*/nullptr, format); } - return encoder; + return nullptr; } std::vector GetSupportedFormats() const override { @@ -55,7 +54,7 @@ class BuiltinVideoEncoderFactory : public VideoEncoderFactory { CodecSupport QueryCodecSupport( const SdpVideoFormat& format, - absl::optional scalability_mode) const override { + std::optional scalability_mode) const override { return internal_encoder_factory_->QueryCodecSupport(format, scalability_mode); } diff --git a/api/video_codecs/h264_profile_level_id.cc b/api/video_codecs/h264_profile_level_id.cc index 5844ca0e32..0bdaed7fd3 100644 --- a/api/video_codecs/h264_profile_level_id.cc +++ b/api/video_codecs/h264_profile_level_id.cc @@ -10,12 +10,15 @@ #include "api/video_codecs/h264_profile_level_id.h" +#include #include #include +#include +#include #include +#include "api/rtp_parameters.h" #include "rtc_base/arraysize.h" -#include "rtc_base/checks.h" namespace webrtc { @@ -100,13 +103,13 @@ static constexpr LevelConstraint kLevelConstraints[] = { } // anonymous namespace -absl::optional ParseH264ProfileLevelId(const char* str) { +std::optional ParseH264ProfileLevelId(const char* str) { // The string should consist of 3 bytes in hexadecimal format. if (strlen(str) != 6u) - return absl::nullopt; + return std::nullopt; const uint32_t profile_level_id_numeric = strtol(str, nullptr, 16); if (profile_level_id_numeric == 0) - return absl::nullopt; + return std::nullopt; // Separate into three bytes. const uint8_t level_idc = @@ -144,7 +147,7 @@ absl::optional ParseH264ProfileLevelId(const char* str) { break; default: // Unrecognized level_idc. - return absl::nullopt; + return std::nullopt; } // Parse profile_idc/profile_iop into a Profile enum. @@ -156,11 +159,11 @@ absl::optional ParseH264ProfileLevelId(const char* str) { } // Unrecognized profile_idc/profile_iop combination. - return absl::nullopt; + return std::nullopt; } -absl::optional H264SupportedLevel(int max_frame_pixel_count, - float max_fps) { +std::optional H264SupportedLevel(int max_frame_pixel_count, + float max_fps) { static const int kPixelsPerMacroblock = 16 * 16; for (int i = arraysize(kLevelConstraints) - 1; i >= 0; --i) { @@ -174,11 +177,11 @@ absl::optional H264SupportedLevel(int max_frame_pixel_count, } // No level supported. - return absl::nullopt; + return std::nullopt; } -absl::optional ParseSdpForH264ProfileLevelId( - const SdpVideoFormat::Parameters& params) { +std::optional ParseSdpForH264ProfileLevelId( + const CodecParameterMap& params) { // TODO(magjed): The default should really be kProfileBaseline and kLevel1 // according to the spec: https://tools.ietf.org/html/rfc6184#section-8.1. In // order to not break backwards compatibility with older versions of WebRTC @@ -195,7 +198,7 @@ absl::optional ParseSdpForH264ProfileLevelId( : ParseH264ProfileLevelId(profile_level_id_it->second.c_str()); } -absl::optional H264ProfileLevelIdToString( +std::optional H264ProfileLevelIdToString( const H264ProfileLevelId& profile_level_id) { // Handle special case level == 1b. if (profile_level_id.level == H264Level::kLevel1_b) { @@ -208,7 +211,7 @@ absl::optional H264ProfileLevelIdToString( return {"4d100b"}; // Level 1b is not allowed for other profiles. default: - return absl::nullopt; + return std::nullopt; } } @@ -234,7 +237,7 @@ absl::optional H264ProfileLevelIdToString( break; // Unrecognized profile. default: - return absl::nullopt; + return std::nullopt; } char str[7]; @@ -243,15 +246,27 @@ absl::optional H264ProfileLevelIdToString( return {str}; } -bool H264IsSameProfile(const SdpVideoFormat::Parameters& params1, - const SdpVideoFormat::Parameters& params2) { - const absl::optional profile_level_id = +bool H264IsSameProfile(const CodecParameterMap& params1, + const CodecParameterMap& params2) { + const std::optional profile_level_id = ParseSdpForH264ProfileLevelId(params1); - const absl::optional other_profile_level_id = + const std::optional other_profile_level_id = ParseSdpForH264ProfileLevelId(params2); // Compare H264 profiles, but not levels. return profile_level_id && other_profile_level_id && profile_level_id->profile == other_profile_level_id->profile; } +bool H264IsSameProfileAndLevel(const CodecParameterMap& params1, + const CodecParameterMap& params2) { + const std::optional profile_level_id = + ParseSdpForH264ProfileLevelId(params1); + const std::optional other_profile_level_id = + ParseSdpForH264ProfileLevelId(params2); + // Compare H264 profiles, but not levels. + return profile_level_id && other_profile_level_id && + profile_level_id->profile == other_profile_level_id->profile && + profile_level_id->level == other_profile_level_id->level; +} + } // namespace webrtc diff --git a/api/video_codecs/h264_profile_level_id.h b/api/video_codecs/h264_profile_level_id.h index 4b46ad329d..1b21f18834 100644 --- a/api/video_codecs/h264_profile_level_id.h +++ b/api/video_codecs/h264_profile_level_id.h @@ -11,10 +11,10 @@ #ifndef API_VIDEO_CODECS_H264_PROFILE_LEVEL_ID_H_ #define API_VIDEO_CODECS_H264_PROFILE_LEVEL_ID_H_ +#include #include -#include "absl/types/optional.h" -#include "api/video_codecs/sdp_video_format.h" +#include "api/rtp_parameters.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -60,32 +60,36 @@ struct H264ProfileLevelId { // Parse profile level id that is represented as a string of 3 hex bytes. // Nothing will be returned if the string is not a recognized H264 // profile level id. -absl::optional ParseH264ProfileLevelId(const char* str); +std::optional ParseH264ProfileLevelId(const char* str); // Parse profile level id that is represented as a string of 3 hex bytes // contained in an SDP key-value map. A default profile level id will be // returned if the profile-level-id key is missing. Nothing will be returned if // the key is present but the string is invalid. -RTC_EXPORT absl::optional ParseSdpForH264ProfileLevelId( - const SdpVideoFormat::Parameters& params); +RTC_EXPORT std::optional ParseSdpForH264ProfileLevelId( + const CodecParameterMap& params); // Given that a decoder supports up to a given frame size (in pixels) at up to a // given number of frames per second, return the highest H.264 level where it // can guarantee that it will be able to support all valid encoded streams that // are within that level. -RTC_EXPORT absl::optional H264SupportedLevel( +RTC_EXPORT std::optional H264SupportedLevel( int max_frame_pixel_count, float max_fps); // Returns canonical string representation as three hex bytes of the profile // level id, or returns nothing for invalid profile level ids. -RTC_EXPORT absl::optional H264ProfileLevelIdToString( +RTC_EXPORT std::optional H264ProfileLevelIdToString( const H264ProfileLevelId& profile_level_id); // Returns true if the parameters have the same H264 profile (Baseline, High, // etc). -RTC_EXPORT bool H264IsSameProfile(const SdpVideoFormat::Parameters& params1, - const SdpVideoFormat::Parameters& params2); +RTC_EXPORT bool H264IsSameProfile(const CodecParameterMap& params1, + const CodecParameterMap& params2); +// Returns true if the parameters have the same H264 profile (Baseline, High, +// etc) and same level. +RTC_EXPORT bool H264IsSameProfileAndLevel(const CodecParameterMap& params1, + const CodecParameterMap& params2); } // namespace webrtc diff --git a/api/video_codecs/h265_profile_tier_level.cc b/api/video_codecs/h265_profile_tier_level.cc index f5b376e287..f63321a293 100644 --- a/api/video_codecs/h265_profile_tier_level.cc +++ b/api/video_codecs/h265_profile_tier_level.cc @@ -1,248 +1,327 @@ -/* - * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/video_codecs/h265_profile_tier_level.h" - -#include - -#include "rtc_base/string_to_number.h" - -namespace webrtc { - -namespace { - -const char kH265FmtpProfile[] = "profile-id"; -const char kH265FmtpTier[] = "tier-flag"; -const char kH265FmtpLevel[] = "level-id"; - -} // anonymous namespace - -// Annex A of https://www.itu.int/rec/T-REC-H.265 (08/21), section A.3. -absl::optional StringToH265Profile(const std::string& profile) { - absl::optional i = rtc::StringToNumber(profile); - if (!i.has_value()) { - return absl::nullopt; - } - - switch (i.value()) { - case 1: - return H265Profile::kProfileMain; - case 2: - return H265Profile::kProfileMain10; - case 3: - return H265Profile::kProfileMainStill; - case 4: - return H265Profile::kProfileRangeExtensions; - case 5: - return H265Profile::kProfileHighThroughput; - case 6: - return H265Profile::kProfileMultiviewMain; - case 7: - return H265Profile::kProfileScalableMain; - case 8: - return H265Profile::kProfile3dMain; - case 9: - return H265Profile::kProfileScreenContentCoding; - case 10: - return H265Profile::kProfileScalableRangeExtensions; - case 11: - return H265Profile::kProfileHighThroughputScreenContentCoding; - default: - return absl::nullopt; - } -} - -// Annex A of https://www.itu.int/rec/T-REC-H.265 (08/21), section A.4, -// tiers and levels. -absl::optional StringToH265Tier(const std::string& tier) { - absl::optional i = rtc::StringToNumber(tier); - if (!i.has_value()) { - return absl::nullopt; - } - - switch (i.value()) { - case 0: - return H265Tier::kTier0; - case 1: - return H265Tier::kTier1; - default: - return absl::nullopt; - } -} - -absl::optional StringToH265Level(const std::string& level) { - const absl::optional i = rtc::StringToNumber(level); - if (!i.has_value()) - return absl::nullopt; - - switch (i.value()) { - case 30: - return H265Level::kLevel1; - case 60: - return H265Level::kLevel2; - case 63: - return H265Level::kLevel2_1; - case 90: - return H265Level::kLevel3; - case 93: - return H265Level::kLevel3_1; - case 120: - return H265Level::kLevel4; - case 123: - return H265Level::kLevel4_1; - case 150: - return H265Level::kLevel5; - case 153: - return H265Level::kLevel5_1; - case 156: - return H265Level::kLevel5_2; - case 180: - return H265Level::kLevel6; - case 183: - return H265Level::kLevel6_1; - case 186: - return H265Level::kLevel6_2; - default: - return absl::nullopt; - } -} - -std::string H265ProfileToString(H265Profile profile) { - switch (profile) { - case H265Profile::kProfileMain: - return "1"; - case H265Profile::kProfileMain10: - return "2"; - case H265Profile::kProfileMainStill: - return "3"; - case H265Profile::kProfileRangeExtensions: - return "4"; - case H265Profile::kProfileHighThroughput: - return "5"; - case H265Profile::kProfileMultiviewMain: - return "6"; - case H265Profile::kProfileScalableMain: - return "7"; - case H265Profile::kProfile3dMain: - return "8"; - case H265Profile::kProfileScreenContentCoding: - return "9"; - case H265Profile::kProfileScalableRangeExtensions: - return "10"; - case H265Profile::kProfileHighThroughputScreenContentCoding: - return "11"; - } -} - -std::string H265TierToString(H265Tier tier) { - switch (tier) { - case H265Tier::kTier0: - return "0"; - case H265Tier::kTier1: - return "1"; - } -} - -std::string H265LevelToString(H265Level level) { - switch (level) { - case H265Level::kLevel1: - return "30"; - case H265Level::kLevel2: - return "60"; - case H265Level::kLevel2_1: - return "63"; - case H265Level::kLevel3: - return "90"; - case H265Level::kLevel3_1: - return "93"; - case H265Level::kLevel4: - return "120"; - case H265Level::kLevel4_1: - return "123"; - case H265Level::kLevel5: - return "150"; - case H265Level::kLevel5_1: - return "153"; - case H265Level::kLevel5_2: - return "156"; - case H265Level::kLevel6: - return "180"; - case H265Level::kLevel6_1: - return "183"; - case H265Level::kLevel6_2: - return "186"; - } -} - -absl::optional ParseSdpForH265ProfileTierLevel( - const SdpVideoFormat::Parameters& params) { - static const H265ProfileTierLevel kDefaultProfileTierLevel( - H265Profile::kProfileMain, H265Tier::kTier0, H265Level::kLevel3_1); - bool profile_tier_level_specified = false; - - absl::optional profile; - const auto profile_it = params.find(kH265FmtpProfile); - if (profile_it != params.end()) { - profile_tier_level_specified = true; - const std::string& profile_str = profile_it->second; - profile = StringToH265Profile(profile_str); - if (!profile) { - return absl::nullopt; - } - } else { - profile = H265Profile::kProfileMain; - } - absl::optional tier; - const auto tier_it = params.find(kH265FmtpTier); - if (tier_it != params.end()) { - profile_tier_level_specified = true; - const std::string& tier_str = tier_it->second; - tier = StringToH265Tier(tier_str); - if (!tier) { - return absl::nullopt; - } - } else { - tier = H265Tier::kTier0; - } - absl::optional level; - const auto level_it = params.find(kH265FmtpLevel); - if (level_it != params.end()) { - profile_tier_level_specified = true; - const std::string& level_str = level_it->second; - level = StringToH265Level(level_str); - if (!level) { - return absl::nullopt; - } - } else { - level = H265Level::kLevel3_1; - } - - // Spec Table A.9, level 1 to level 3.1 does not allow high tiers. - if (level <= H265Level::kLevel3_1 && tier == H265Tier::kTier1) { - return absl::nullopt; - } - - return !profile_tier_level_specified - ? kDefaultProfileTierLevel - : H265ProfileTierLevel(profile.value(), tier.value(), - level.value()); -} - -bool H265IsSameProfileTierLevel(const SdpVideoFormat::Parameters& params1, - const SdpVideoFormat::Parameters& params2) { - const absl::optional ptl1 = - ParseSdpForH265ProfileTierLevel(params1); - const absl::optional ptl2 = - ParseSdpForH265ProfileTierLevel(params2); - return ptl1 && ptl2 && ptl1->profile == ptl2->profile && - ptl1->tier == ptl2->tier && ptl1->level == ptl2->level; -} - -} // namespace webrtc +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/h265_profile_tier_level.h" + +#include +#include + +#include "api/rtp_parameters.h" +#include "api/video/resolution.h" +#include "rtc_base/arraysize.h" +#include "rtc_base/string_to_number.h" + +namespace webrtc { + +namespace { + +const char kH265FmtpProfile[] = "profile-id"; +const char kH265FmtpTier[] = "tier-flag"; +const char kH265FmtpLevel[] = "level-id"; + +// Used to align frame width and height for luma picture size calculation. +// Use the maximum value allowed by spec to get upper bound of luma picture +// size for given resolution. +static constexpr int kMinCbSizeYMax = 64; + +struct LevelConstraint { + const int max_luma_picture_size; + const double max_luma_sample_rate; + const int max_pic_width_or_height_in_pixels; + const H265Level level; +}; + +// This is from ITU-T H.265 (09/2023) Table A.8, A.9 & A.11 – Level limits. +// The max_pic_width_or_height_in_luma_samples is pre-calculated following +// ITU-T H.265 section A.4.1, that is, find the largest integer value that +// is multiple of minimal MinCbSizeY(8 according to equation 7-10 and 7-12), is +// less than sqrt(max_luma_picture_size * 8). For example, at level 1, +// max_luma_picture_size is 36864, so pic_width_in_luma_samples <= sqrt(36864 * +// 8) = 543.06. The largest integer that is multiple of 8 and less than 543.06 +// is 536. +static constexpr LevelConstraint kLevelConstraints[] = { + {36864, 552960, 536, H265Level::kLevel1}, + {122880, 3686400, 984, H265Level::kLevel2}, + {245760, 7372800, 1400, H265Level::kLevel2_1}, + {552960, 16588800, 2096, H265Level::kLevel3}, + {983040, 33177600, 2800, H265Level::kLevel3_1}, + {2228224, 66846720, 4216, H265Level::kLevel4}, + {2228224, 133693400, 4216, H265Level::kLevel4_1}, + {8912896, 267386880, 8440, H265Level::kLevel5}, + {8912896, 534773760, 8440, H265Level::kLevel5_1}, + {8912896, 1069547520, 8440, H265Level::kLevel5_2}, + {35651584, 1069547520, 16888, H265Level::kLevel6}, + {35651584, 2139095040, 16888, H265Level::kLevel6_1}, + {35651584, 4278190080, 16888, H265Level::kLevel6_2}, +}; + +} // anonymous namespace + +// Annex A of https://www.itu.int/rec/T-REC-H.265 (08/21), section A.3. +std::optional StringToH265Profile(const std::string& profile) { + std::optional i = StringToNumber(profile); + if (!i.has_value()) { + return std::nullopt; + } + + switch (i.value()) { + case 1: + return H265Profile::kProfileMain; + case 2: + return H265Profile::kProfileMain10; + case 3: + return H265Profile::kProfileMainStill; + case 4: + return H265Profile::kProfileRangeExtensions; + case 5: + return H265Profile::kProfileHighThroughput; + case 6: + return H265Profile::kProfileMultiviewMain; + case 7: + return H265Profile::kProfileScalableMain; + case 8: + return H265Profile::kProfile3dMain; + case 9: + return H265Profile::kProfileScreenContentCoding; + case 10: + return H265Profile::kProfileScalableRangeExtensions; + case 11: + return H265Profile::kProfileHighThroughputScreenContentCoding; + default: + return std::nullopt; + } +} + +// Annex A of https://www.itu.int/rec/T-REC-H.265 (08/21), section A.4, +// tiers and levels. +std::optional StringToH265Tier(const std::string& tier) { + std::optional i = StringToNumber(tier); + if (!i.has_value()) { + return std::nullopt; + } + + switch (i.value()) { + case 0: + return H265Tier::kTier0; + case 1: + return H265Tier::kTier1; + default: + return std::nullopt; + } +} + +std::optional StringToH265Level(const std::string& level) { + const std::optional i = StringToNumber(level); + if (!i.has_value()) + return std::nullopt; + + switch (i.value()) { + case 30: + return H265Level::kLevel1; + case 60: + return H265Level::kLevel2; + case 63: + return H265Level::kLevel2_1; + case 90: + return H265Level::kLevel3; + case 93: + return H265Level::kLevel3_1; + case 120: + return H265Level::kLevel4; + case 123: + return H265Level::kLevel4_1; + case 150: + return H265Level::kLevel5; + case 153: + return H265Level::kLevel5_1; + case 156: + return H265Level::kLevel5_2; + case 180: + return H265Level::kLevel6; + case 183: + return H265Level::kLevel6_1; + case 186: + return H265Level::kLevel6_2; + default: + return std::nullopt; + } +} + +std::string H265ProfileToString(H265Profile profile) { + switch (profile) { + case H265Profile::kProfileMain: + return "1"; + case H265Profile::kProfileMain10: + return "2"; + case H265Profile::kProfileMainStill: + return "3"; + case H265Profile::kProfileRangeExtensions: + return "4"; + case H265Profile::kProfileHighThroughput: + return "5"; + case H265Profile::kProfileMultiviewMain: + return "6"; + case H265Profile::kProfileScalableMain: + return "7"; + case H265Profile::kProfile3dMain: + return "8"; + case H265Profile::kProfileScreenContentCoding: + return "9"; + case H265Profile::kProfileScalableRangeExtensions: + return "10"; + case H265Profile::kProfileHighThroughputScreenContentCoding: + return "11"; + } +} + +std::string H265TierToString(H265Tier tier) { + switch (tier) { + case H265Tier::kTier0: + return "0"; + case H265Tier::kTier1: + return "1"; + } +} + +std::string H265LevelToString(H265Level level) { + switch (level) { + case H265Level::kLevel1: + return "30"; + case H265Level::kLevel2: + return "60"; + case H265Level::kLevel2_1: + return "63"; + case H265Level::kLevel3: + return "90"; + case H265Level::kLevel3_1: + return "93"; + case H265Level::kLevel4: + return "120"; + case H265Level::kLevel4_1: + return "123"; + case H265Level::kLevel5: + return "150"; + case H265Level::kLevel5_1: + return "153"; + case H265Level::kLevel5_2: + return "156"; + case H265Level::kLevel6: + return "180"; + case H265Level::kLevel6_1: + return "183"; + case H265Level::kLevel6_2: + return "186"; + } +} + +std::optional ParseSdpForH265ProfileTierLevel( + const CodecParameterMap& params) { + static const H265ProfileTierLevel kDefaultProfileTierLevel( + H265Profile::kProfileMain, H265Tier::kTier0, H265Level::kLevel3_1); + bool profile_tier_level_specified = false; + + std::optional profile; + const auto profile_it = params.find(kH265FmtpProfile); + if (profile_it != params.end()) { + profile_tier_level_specified = true; + const std::string& profile_str = profile_it->second; + profile = StringToH265Profile(profile_str); + if (!profile) { + return std::nullopt; + } + } else { + profile = H265Profile::kProfileMain; + } + std::optional tier; + const auto tier_it = params.find(kH265FmtpTier); + if (tier_it != params.end()) { + profile_tier_level_specified = true; + const std::string& tier_str = tier_it->second; + tier = StringToH265Tier(tier_str); + if (!tier) { + return std::nullopt; + } + } else { + tier = H265Tier::kTier0; + } + std::optional level; + const auto level_it = params.find(kH265FmtpLevel); + if (level_it != params.end()) { + profile_tier_level_specified = true; + const std::string& level_str = level_it->second; + level = StringToH265Level(level_str); + if (!level) { + return std::nullopt; + } + } else { + level = H265Level::kLevel3_1; + } + + // Spec Table A.9, level 1 to level 3.1 does not allow high tiers. + if (level <= H265Level::kLevel3_1 && tier == H265Tier::kTier1) { + return std::nullopt; + } + + return !profile_tier_level_specified + ? kDefaultProfileTierLevel + : H265ProfileTierLevel(profile.value(), tier.value(), + level.value()); +} + +bool H265IsSameProfileTierLevel(const CodecParameterMap& params1, + const CodecParameterMap& params2) { + const std::optional ptl1 = + ParseSdpForH265ProfileTierLevel(params1); + const std::optional ptl2 = + ParseSdpForH265ProfileTierLevel(params2); + return ptl1 && ptl2 && ptl1->profile == ptl2->profile && + ptl1->tier == ptl2->tier && ptl1->level == ptl2->level; +} + +bool H265IsSameProfile(const CodecParameterMap& params1, + const CodecParameterMap& params2) { + const std::optional ptl1 = + ParseSdpForH265ProfileTierLevel(params1); + const std::optional ptl2 = + ParseSdpForH265ProfileTierLevel(params2); + return ptl1 && ptl2 && ptl1->profile == ptl2->profile; +} + +bool H265IsSameTier(const CodecParameterMap& params1, + const CodecParameterMap& params2) { + const std::optional ptl1 = + ParseSdpForH265ProfileTierLevel(params1); + const std::optional ptl2 = + ParseSdpForH265ProfileTierLevel(params2); + return ptl1 && ptl2 && ptl1->tier == ptl2->tier; +} + +std::optional GetSupportedH265Level(const Resolution& resolution, + float max_fps) { + int aligned_width = + (resolution.width + kMinCbSizeYMax - 1) & ~(kMinCbSizeYMax - 1); + int aligned_height = + (resolution.height + kMinCbSizeYMax - 1) & ~(kMinCbSizeYMax - 1); + + for (int i = arraysize(kLevelConstraints) - 1; i >= 0; --i) { + const LevelConstraint& level_constraint = kLevelConstraints[i]; + if (level_constraint.max_luma_picture_size <= + aligned_width * aligned_height && + level_constraint.max_luma_sample_rate <= + aligned_width * aligned_height * max_fps && + level_constraint.max_pic_width_or_height_in_pixels >= aligned_width && + level_constraint.max_pic_width_or_height_in_pixels >= aligned_height) { + return level_constraint.level; + } + } + return std::nullopt; +} + +} // namespace webrtc diff --git a/api/video_codecs/h265_profile_tier_level.h b/api/video_codecs/h265_profile_tier_level.h index 3056d2b623..676b1f573b 100644 --- a/api/video_codecs/h265_profile_tier_level.h +++ b/api/video_codecs/h265_profile_tier_level.h @@ -1,109 +1,127 @@ -/* - * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_VIDEO_CODECS_H265_PROFILE_TIER_LEVEL_H_ -#define API_VIDEO_CODECS_H265_PROFILE_TIER_LEVEL_H_ - -#include - -#include "absl/types/optional.h" -#include "api/video_codecs/sdp_video_format.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// Profiles can be found at: -// https://www.itu.int/rec/T-REC-H.265 -// The enum values match the number specified in the SDP. -enum class H265Profile { - kProfileMain = 1, - kProfileMain10 = 2, - kProfileMainStill = 3, - kProfileRangeExtensions = 4, - kProfileHighThroughput = 5, - kProfileMultiviewMain = 6, - kProfileScalableMain = 7, - kProfile3dMain = 8, - kProfileScreenContentCoding = 9, - kProfileScalableRangeExtensions = 10, - kProfileHighThroughputScreenContentCoding = 11, -}; - -// Tiers can be found at https://www.itu.int/rec/T-REC-H.265 -enum class H265Tier { - kTier0, - kTier1, -}; - -// All values are equal to 30 times the level number. -enum class H265Level { - kLevel1 = 30, - kLevel2 = 60, - kLevel2_1 = 63, - kLevel3 = 90, - kLevel3_1 = 93, - kLevel4 = 120, - kLevel4_1 = 123, - kLevel5 = 150, - kLevel5_1 = 153, - kLevel5_2 = 156, - kLevel6 = 180, - kLevel6_1 = 183, - kLevel6_2 = 186, -}; - -struct H265ProfileTierLevel { - constexpr H265ProfileTierLevel(H265Profile profile, - H265Tier tier, - H265Level level) - : profile(profile), tier(tier), level(level) {} - H265Profile profile; - H265Tier tier; - H265Level level; -}; - -// Helper function to convert H265Profile to std::string. -RTC_EXPORT std::string H265ProfileToString(H265Profile profile); - -// Helper function to convert H265Tier to std::string. -RTC_EXPORT std::string H265TierToString(H265Tier tier); - -// Helper function to convert H265Level to std::string. -RTC_EXPORT std::string H265LevelToString(H265Level level); - -// Helper function to get H265Profile from profile string. -RTC_EXPORT absl::optional StringToH265Profile( - const std::string& profile); - -// Helper function to get H265Tier from tier string. -RTC_EXPORT absl::optional StringToH265Tier(const std::string& tier); - -// Helper function to get H265Level from level string. -RTC_EXPORT absl::optional StringToH265Level( - const std::string& level); - -// Parses an SDP key-value map of format parameters to retrive an H265 -// profile/tier/level. Returns an H265ProfileTierlevel by setting its -// members. profile defaults to `kProfileMain` if no profile-id is specified. -// tier defaults to "kTier0" if no tier-flag is specified. -// level defaults to "kLevel3_1" if no level-id is specified. -// Returns empty value if any of the profile/tier/level key is present but -// contains an invalid value. -RTC_EXPORT absl::optional ParseSdpForH265ProfileTierLevel( - const SdpVideoFormat::Parameters& params); - -// Returns true if the parameters have the same H265 profile or neither contains -// an H265 profile, otherwise false. -bool H265IsSameProfileTierLevel(const SdpVideoFormat::Parameters& params1, - const SdpVideoFormat::Parameters& params2); - -} // namespace webrtc - -#endif // API_VIDEO_CODECS_H265_PROFILE_TIER_LEVEL_H_ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_H265_PROFILE_TIER_LEVEL_H_ +#define API_VIDEO_CODECS_H265_PROFILE_TIER_LEVEL_H_ + +#include +#include + +#include "api/rtp_parameters.h" +#include "api/video/resolution.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Profiles can be found at: +// https://www.itu.int/rec/T-REC-H.265 +// The enum values match the number specified in the SDP. +enum class H265Profile { + kProfileMain = 1, + kProfileMain10 = 2, + kProfileMainStill = 3, + kProfileRangeExtensions = 4, + kProfileHighThroughput = 5, + kProfileMultiviewMain = 6, + kProfileScalableMain = 7, + kProfile3dMain = 8, + kProfileScreenContentCoding = 9, + kProfileScalableRangeExtensions = 10, + kProfileHighThroughputScreenContentCoding = 11, +}; + +// Tiers can be found at https://www.itu.int/rec/T-REC-H.265 +enum class H265Tier { + kTier0, + kTier1, +}; + +// All values are equal to 30 times the level number. +enum class H265Level { + kLevel1 = 30, + kLevel2 = 60, + kLevel2_1 = 63, + kLevel3 = 90, + kLevel3_1 = 93, + kLevel4 = 120, + kLevel4_1 = 123, + kLevel5 = 150, + kLevel5_1 = 153, + kLevel5_2 = 156, + kLevel6 = 180, + kLevel6_1 = 183, + kLevel6_2 = 186, +}; + +struct H265ProfileTierLevel { + constexpr H265ProfileTierLevel(H265Profile profile, + H265Tier tier, + H265Level level) + : profile(profile), tier(tier), level(level) {} + H265Profile profile; + H265Tier tier; + H265Level level; +}; + +// Helper function to convert H265Profile to std::string. +RTC_EXPORT std::string H265ProfileToString(H265Profile profile); + +// Helper function to convert H265Tier to std::string. +RTC_EXPORT std::string H265TierToString(H265Tier tier); + +// Helper function to convert H265Level to std::string. +RTC_EXPORT std::string H265LevelToString(H265Level level); + +// Helper function to get H265Profile from profile string. +RTC_EXPORT std::optional StringToH265Profile( + const std::string& profile); + +// Helper function to get H265Tier from tier string. +RTC_EXPORT std::optional StringToH265Tier(const std::string& tier); + +// Helper function to get H265Level from level string. +RTC_EXPORT std::optional StringToH265Level(const std::string& level); + +// Given that a decoder supports up to a give frame size(in pixels) at up to a +// given number of frames per second, return the highest H.265 level where it +// can guranatee that it will be able to support all valid encoded streams that +// are within that level. +RTC_EXPORT std::optional GetSupportedH265Level( + const Resolution& resolution, + float max_fps); + +// Parses an SDP key-value map of format parameters to retrive an H265 +// profile/tier/level. Returns an H265ProfileTierlevel by setting its +// members. profile defaults to `kProfileMain` if no profile-id is specified. +// tier defaults to "kTier0" if no tier-flag is specified. +// level defaults to "kLevel3_1" if no level-id is specified. +// Returns empty value if any of the profile/tier/level key is present but +// contains an invalid value. +RTC_EXPORT std::optional ParseSdpForH265ProfileTierLevel( + const CodecParameterMap& params); + +// Returns true if the parameters have the same H265 profile/tier/level or +// neither contains an H265 profile/tier/level, otherwise false. +RTC_EXPORT bool H265IsSameProfileTierLevel(const CodecParameterMap& params1, + const CodecParameterMap& params2); + +// Returns true if the parameters have the same H265 profile, or neither +// contains an H265 profile, otherwise false. +RTC_EXPORT bool H265IsSameProfile(const CodecParameterMap& params1, + const CodecParameterMap& params2); + +// Returns true if the parameters have the same H265 tier, or neither +// contains an H265 tier, otherwise false. +RTC_EXPORT bool H265IsSameTier(const CodecParameterMap& params1, + const CodecParameterMap& params2); + +} // namespace webrtc + +#endif // API_VIDEO_CODECS_H265_PROFILE_TIER_LEVEL_H_ diff --git a/api/video_codecs/libaom_av1_encoder_factory.cc b/api/video_codecs/libaom_av1_encoder_factory.cc new file mode 100644 index 0000000000..7652aae250 --- /dev/null +++ b/api/video_codecs/libaom_av1_encoder_factory.cc @@ -0,0 +1,872 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/libaom_av1_encoder_factory.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/cleanup/cleanup.h" +#include "api/array_view.h" +#include "api/scoped_refptr.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/video/resolution.h" +#include "api/video/video_frame_buffer.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder_factory_interface.h" +#include "api/video_codecs/video_encoder_interface.h" +#include "api/video_codecs/video_encoding_general.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/rational.h" +#include "rtc_base/strings/string_builder.h" +#include "third_party/libaom/source/libaom/aom/aom_codec.h" +#include "third_party/libaom/source/libaom/aom/aom_encoder.h" +#include "third_party/libaom/source/libaom/aom/aom_image.h" +#include "third_party/libaom/source/libaom/aom/aomcx.h" + +#define SET_OR_RETURN(param_id, param_value) \ + do { \ + if (!SetEncoderControlParameters(&ctx_, param_id, param_value)) { \ + return; \ + } \ + } while (0) + +#define SET_OR_RETURN_FALSE(param_id, param_value) \ + do { \ + if (!SetEncoderControlParameters(&ctx_, param_id, param_value)) { \ + return false; \ + } \ + } while (0) + +namespace webrtc { + +using FrameEncodeSettings = VideoEncoderInterface::FrameEncodeSettings; +using Cbr = FrameEncodeSettings::Cbr; +using Cqp = FrameEncodeSettings::Cqp; +using aom_img_ptr = std::unique_ptr; + +namespace { +// MaxQp defined here: +// http://google3/third_party/libaom/git_root/av1/av1_cx_iface.c;l=3510;rcl=527067478 +constexpr int kMaxQp = 63; +constexpr int kNumBuffers = 8; +constexpr int kMaxReferences = 3; +constexpr int kMinEffortLevel = -2; +constexpr int kMaxEffortLevel = 2; +constexpr int kMaxSpatialLayersWtf = 4; +constexpr int kMaxTemporalLayers = 4; +constexpr int kRtpTicksPerSecond = 90000; +constexpr std::array kSupportedInputFormats = { + VideoFrameBuffer::Type::kI420, VideoFrameBuffer::Type::kNV12}; + +constexpr std::array kSupportedScalingFactors = { + {{8, 1}, {4, 1}, {2, 1}, {1, 1}, {1, 2}, {1, 4}, {1, 8}}}; + +std::optional GetScalingFactor(const Resolution& from, + const Resolution& to) { + auto it = absl::c_find_if(kSupportedScalingFactors, [&](const Rational& r) { + return (from.width * r.numerator / r.denominator) == to.width && + (from.height * r.numerator / r.denominator) == to.height; + }); + + if (it != kSupportedScalingFactors.end()) { + return *it; + } + + return {}; +} + +class LibaomAv1Encoder : public VideoEncoderInterface { + public: + LibaomAv1Encoder() = default; + ~LibaomAv1Encoder() override; + + bool InitEncode( + const VideoEncoderFactoryInterface::StaticEncoderSettings& settings, + const std::map& encoder_specific_settings); + + void Encode(scoped_refptr frame_buffer, + const TemporalUnitSettings& tu_settings, + std::vector frame_settings) override; + + private: + aom_img_ptr image_to_encode_ = aom_img_ptr(nullptr, aom_img_free); + aom_codec_ctx_t ctx_; + aom_codec_enc_cfg_t cfg_; + + std::optional current_content_type_; + std::array, kMaxSpatialLayersWtf> current_effort_level_; + int max_number_of_threads_; + std::array, 8> last_resolution_in_buffer_; +}; + +template +bool SetEncoderControlParameters(aom_codec_ctx_t* ctx, int id, T value) { + aom_codec_err_t error_code = aom_codec_control(ctx, id, value); + if (error_code != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "aom_codec_control returned " << error_code + << " with id: " << id << "."; + } + return error_code == AOM_CODEC_OK; +} + +LibaomAv1Encoder::~LibaomAv1Encoder() { + aom_codec_destroy(&ctx_); +} + +bool LibaomAv1Encoder::InitEncode( + const VideoEncoderFactoryInterface::StaticEncoderSettings& settings, + const std::map& encoder_specific_settings) { + if (!encoder_specific_settings.empty()) { + RTC_LOG(LS_ERROR) + << "libaom av1 encoder accepts no encoder specific settings"; + return false; + } + + if (aom_codec_err_t ret = aom_codec_enc_config_default( + aom_codec_av1_cx(), &cfg_, AOM_USAGE_REALTIME); + ret != AOM_CODEC_OK) { + RTC_LOG(LS_ERROR) << "aom_codec_enc_config_default returned " << ret; + return false; + } + + max_number_of_threads_ = settings.max_number_of_threads; + + // The encode resolution is set dynamically for each call to `Encode`, but for + // `aom_codec_enc_init` to not fail we set it here as well. + cfg_.g_w = settings.max_encode_dimensions.width; + cfg_.g_h = settings.max_encode_dimensions.height; + cfg_.g_timebase.num = 1; + // TD: does 90khz timebase make sense, use microseconds instead maybe? + cfg_.g_timebase.den = kRtpTicksPerSecond; + cfg_.g_input_bit_depth = settings.encoding_format.bit_depth; + cfg_.kf_mode = AOM_KF_DISABLED; + // TD: rc_undershoot_pct and rc_overshoot_pct should probably be removed. + cfg_.rc_undershoot_pct = 50; + cfg_.rc_overshoot_pct = 50; + auto* cbr = + std::get_if( + &settings.rc_mode); + cfg_.rc_buf_initial_sz = cbr ? cbr->target_buffer_size.ms() : 600; + cfg_.rc_buf_optimal_sz = cbr ? cbr->target_buffer_size.ms() : 600; + cfg_.rc_buf_sz = cbr ? cbr->max_buffer_size.ms() : 1000; + cfg_.g_usage = AOM_USAGE_REALTIME; + cfg_.g_pass = AOM_RC_ONE_PASS; + cfg_.g_lag_in_frames = 0; + cfg_.g_error_resilient = 0; + cfg_.rc_end_usage = cbr ? AOM_CBR : AOM_Q; + + if (aom_codec_err_t ret = + aom_codec_enc_init(&ctx_, aom_codec_av1_cx(), &cfg_, /*flags=*/0); + ret != AOM_CODEC_OK) { + RTC_LOG(LS_ERROR) << "aom_codec_enc_init returned " << ret; + return false; + } + + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_CDEF, 1); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_TPL_MODEL, 0); + SET_OR_RETURN_FALSE(AV1E_SET_DELTAQ_MODE, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_ORDER_HINT, 0); + SET_OR_RETURN_FALSE(AV1E_SET_AQ_MODE, 3); + SET_OR_RETURN_FALSE(AOME_SET_MAX_INTRA_BITRATE_PCT, 300); + SET_OR_RETURN_FALSE(AV1E_SET_COEFF_COST_UPD_FREQ, 3); + SET_OR_RETURN_FALSE(AV1E_SET_MODE_COST_UPD_FREQ, 3); + SET_OR_RETURN_FALSE(AV1E_SET_MV_COST_UPD_FREQ, 3); + SET_OR_RETURN_FALSE(AV1E_SET_ROW_MT, 1); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_OBMC, 0); + SET_OR_RETURN_FALSE(AV1E_SET_NOISE_SENSITIVITY, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_WARPED_MOTION, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_GLOBAL_MOTION, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_REF_FRAME_MVS, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_CFL_INTRA, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_SMOOTH_INTRA, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_ANGLE_DELTA, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_FILTER_INTRA, 0); + SET_OR_RETURN_FALSE(AV1E_SET_INTRA_DEFAULT_TX_ONLY, 1); + SET_OR_RETURN_FALSE(AV1E_SET_DISABLE_TRELLIS_QUANT, 1); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_DIST_WTD_COMP, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_DIFF_WTD_COMP, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_DUAL_FILTER, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_INTERINTRA_COMP, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_INTERINTRA_WEDGE, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_INTRA_EDGE_FILTER, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_INTRABC, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_MASKED_COMP, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_PAETH_INTRA, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_QM, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_RECT_PARTITIONS, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_RESTORATION, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_SMOOTH_INTERINTRA, 0); + SET_OR_RETURN_FALSE(AV1E_SET_ENABLE_TX64, 0); + SET_OR_RETURN_FALSE(AV1E_SET_MAX_REFERENCE_FRAMES, 3); + + return true; +} + +struct ThreadTilesAndSuperblockSizeInfo { + int num_threads; + int exp_tile_rows; + int exp_tile_colums; + aom_superblock_size_t superblock_size; +}; + +ThreadTilesAndSuperblockSizeInfo GetThreadingTilesAndSuperblockSize( + int width, + int height, + int max_number_of_threads) { + ThreadTilesAndSuperblockSizeInfo res; + const int num_pixels = width * height; + if (num_pixels >= 1920 * 1080 && max_number_of_threads > 8) { + res.num_threads = 8; + res.exp_tile_rows = 2; + res.exp_tile_colums = 1; + } else if (num_pixels >= 640 * 360 && max_number_of_threads > 4) { + res.num_threads = 4; + res.exp_tile_rows = 1; + res.exp_tile_colums = 1; + } else if (num_pixels >= 320 * 180 && max_number_of_threads > 2) { + res.num_threads = 2; + res.exp_tile_rows = 1; + res.exp_tile_colums = 0; + } else { + res.num_threads = 1; + res.exp_tile_rows = 0; + res.exp_tile_colums = 0; + } + + if (res.num_threads > 4 && num_pixels >= 960 * 540) { + res.superblock_size = AOM_SUPERBLOCK_SIZE_64X64; + } else { + res.superblock_size = AOM_SUPERBLOCK_SIZE_DYNAMIC; + } + + RTC_LOG(LS_WARNING) << __FUNCTION__ << " res.num_threads=" << res.num_threads + << " res.exp_tile_rows=" << res.exp_tile_rows + << " res.exp_tile_colums=" << res.exp_tile_colums + << " res.superblock_size=" << res.superblock_size; + + return res; +} + +bool ValidateEncodeParams( + const webrtc::VideoFrameBuffer& /* frame_buffer */, + const VideoEncoderInterface::TemporalUnitSettings& /* tu_settings */, + const std::vector& + frame_settings, + const std::array, 8>& last_resolution_in_buffer, + aom_rc_mode rc_mode) { + if (frame_settings.empty()) { + RTC_LOG(LS_ERROR) << "No frame settings provided."; + return false; + } + + auto in_range = [](int low, int high, int val) { + return low <= val && val < high; + }; + + for (size_t i = 0; i < frame_settings.size(); ++i) { + const VideoEncoderInterface::FrameEncodeSettings& settings = + frame_settings[i]; + + if (!settings.frame_output) { + RTC_LOG(LS_ERROR) << "No frame output provided."; + return false; + } + + if (!in_range(kMinEffortLevel, kMaxEffortLevel + 1, + settings.effort_level)) { + RTC_LOG(LS_ERROR) << "Unsupported effort level " << settings.effort_level; + return false; + } + + if (!in_range(0, kMaxSpatialLayersWtf, settings.spatial_id)) { + RTC_LOG(LS_ERROR) << "invalid spatial id " << settings.spatial_id; + return false; + } + + if (!in_range(0, kMaxTemporalLayers, settings.temporal_id)) { + RTC_LOG(LS_ERROR) << "invalid temporal id " << settings.temporal_id; + return false; + } + + if ((settings.frame_type == FrameType::kKeyframe || + settings.frame_type == FrameType::kStartFrame) && + !settings.reference_buffers.empty()) { + RTC_LOG(LS_ERROR) << "Reference buffers can not be used for keyframes."; + return false; + } + + if ((settings.frame_type == FrameType::kKeyframe || + settings.frame_type == FrameType::kStartFrame) && + !settings.update_buffer) { + RTC_LOG(LS_ERROR) + << "Buffer to update must be specified for keyframe/startframe"; + return false; + } + + if (settings.update_buffer && + !in_range(0, kNumBuffers, *settings.update_buffer)) { + RTC_LOG(LS_ERROR) << "Invalid update buffer id."; + return false; + } + + if (settings.reference_buffers.size() > kMaxReferences) { + RTC_LOG(LS_ERROR) << "Too many referenced buffers."; + return false; + } + + for (size_t j = 0; j < settings.reference_buffers.size(); ++j) { + if (!in_range(0, kNumBuffers, settings.reference_buffers[j])) { + RTC_LOG(LS_ERROR) << "Invalid reference buffer id."; + return false; + } + + // Figure out which frame resolution a certain buffer will hold when the + // frame described by `settings` is encoded. + std::optional referenced_resolution; + bool keyframe_on_previous_layer = false; + + // Will some other frame in this temporal unit update the buffer? + for (size_t k = 0; k < i; ++k) { + if (frame_settings[k].frame_type == FrameType::kKeyframe) { + keyframe_on_previous_layer = true; + referenced_resolution.reset(); + } + if (frame_settings[k].update_buffer == settings.reference_buffers[j]) { + referenced_resolution = frame_settings[k].resolution; + } + } + + // Not updated by another frame in the temporal unit, what is the + // resolution of the last frame stored into that buffer? + if (!referenced_resolution && !keyframe_on_previous_layer) { + referenced_resolution = + last_resolution_in_buffer[settings.reference_buffers[j]]; + } + + if (!referenced_resolution) { + RTC_LOG(LS_ERROR) << "Referenced buffer holds no frame."; + return false; + } + + if (!GetScalingFactor(*referenced_resolution, settings.resolution)) { + RTC_LOG(LS_ERROR) + << "Required resolution scaling factor not supported."; + return false; + } + + for (size_t l = i + 1; l < settings.reference_buffers.size(); ++l) { + if (settings.reference_buffers[i] == settings.reference_buffers[l]) { + RTC_LOG(LS_ERROR) << "Duplicate reference buffer specified."; + return false; + } + } + } + + if ((rc_mode == AOM_CBR && + std::holds_alternative(settings.rate_options)) || + (rc_mode == AOM_Q && + std::holds_alternative(settings.rate_options))) { + RTC_LOG(LS_ERROR) << "Invalid rate options, encoder configured with " + << (rc_mode == AOM_CBR ? "AOM_CBR" : "AOM_Q"); + return false; + } + + for (size_t j = i + 1; j < frame_settings.size(); ++j) { + if (settings.spatial_id >= frame_settings[j].spatial_id) { + RTC_LOG(LS_ERROR) << "Frame spatial id specified out of order."; + return false; + } + } + } + + return true; +} + +void PrepareInputImage(const VideoFrameBuffer& input_buffer, + aom_img_ptr& out_aom_image) { + aom_img_fmt_t input_format; + switch (input_buffer.type()) { + case VideoFrameBuffer::Type::kI420: + input_format = AOM_IMG_FMT_I420; + break; + case VideoFrameBuffer::Type::kNV12: + input_format = AOM_IMG_FMT_NV12; + break; + default: + RTC_CHECK_NOTREACHED(); + return; + } + + if (!out_aom_image || out_aom_image->fmt != input_format || + static_cast(out_aom_image->w) != input_buffer.width() || + static_cast(out_aom_image->h) != input_buffer.height()) { + out_aom_image.reset( + aom_img_wrap(/*img=*/nullptr, input_format, input_buffer.width(), + input_buffer.height(), /*align=*/1, /*img_data=*/nullptr)); + + RTC_LOG(LS_WARNING) << __FUNCTION__ << " input_format=" << input_format + << " input_buffer.width()=" << input_buffer.width() + << " input_buffer.height()=" << input_buffer.height() + << " w=" << out_aom_image->w + << " h=" << out_aom_image->h + << " d_w=" << out_aom_image->d_w + << " d_h=" << out_aom_image->d_h + << " r_w=" << out_aom_image->r_w + << " r_h=" << out_aom_image->r_h; + } + + if (input_format == AOM_IMG_FMT_I420) { + const I420BufferInterface* i420_buffer = input_buffer.GetI420(); + RTC_DCHECK(i420_buffer); + out_aom_image->planes[AOM_PLANE_Y] = + const_cast(i420_buffer->DataY()); + out_aom_image->planes[AOM_PLANE_U] = + const_cast(i420_buffer->DataU()); + out_aom_image->planes[AOM_PLANE_V] = + const_cast(i420_buffer->DataV()); + out_aom_image->stride[AOM_PLANE_Y] = i420_buffer->StrideY(); + out_aom_image->stride[AOM_PLANE_U] = i420_buffer->StrideU(); + out_aom_image->stride[AOM_PLANE_V] = i420_buffer->StrideV(); + } else { + const NV12BufferInterface* nv12_buffer = input_buffer.GetNV12(); + RTC_DCHECK(nv12_buffer); + out_aom_image->planes[AOM_PLANE_Y] = + const_cast(nv12_buffer->DataY()); + out_aom_image->planes[AOM_PLANE_U] = + const_cast(nv12_buffer->DataUV()); + out_aom_image->planes[AOM_PLANE_V] = nullptr; + out_aom_image->stride[AOM_PLANE_Y] = nv12_buffer->StrideY(); + out_aom_image->stride[AOM_PLANE_U] = nv12_buffer->StrideUV(); + out_aom_image->stride[AOM_PLANE_V] = 0; + } +} + +aom_svc_ref_frame_config_t GetSvcRefFrameConfig( + const VideoEncoderInterface::FrameEncodeSettings& settings) { + // Buffer alias to use for each position. In particular when there are two + // buffers being used, prefer to alias them as LAST and GOLDEN, since the AV1 + // bitstream format has dedicated fields for them. See last_frame_idx and + // golden_frame_idx in the av1 spec + // https://aomediacodec.github.io/av1-spec/av1-spec.pdf. + + // Libaom is also compiled for RTC, which limits the number of references to + // at most three, and they must be aliased as LAST, GOLDEN and ALTREF. Also + // note that libaom favors LAST the most, and GOLDEN second most, so buffers + // should be specified in order of how useful they are for prediction. Libaom + // could be updated to make LAST, GOLDEN and ALTREF equivalent, but that is + // not a priority for now. All aliases can be used to update buffers. + // TD: Automatically select LAST, GOLDEN and ALTREF depending on previous + // buffer usage. + static constexpr int kPreferedAlias[] = {0, // LAST + 3, // GOLDEN + 6, // ALTREF + 1, 2, 4, 5}; + + aom_svc_ref_frame_config_t ref_frame_config = {}; + + int alias_index = 0; + if (!settings.reference_buffers.empty()) { + for (size_t i = 0; i < settings.reference_buffers.size(); ++i) { + ref_frame_config.ref_idx[kPreferedAlias[alias_index]] = + settings.reference_buffers[i]; + ref_frame_config.reference[kPreferedAlias[alias_index]] = 1; + alias_index++; + } + + // Delta frames must not alias unused buffers, and since start frames only + // update some buffers it is not safe to leave unused aliases to simply + // point to buffer 0. + for (size_t i = settings.reference_buffers.size(); + i < std::size(ref_frame_config.ref_idx); ++i) { + ref_frame_config.ref_idx[kPreferedAlias[i]] = + settings.reference_buffers.back(); + } + } + + if (settings.update_buffer) { + if (!absl::c_linear_search(settings.reference_buffers, + *settings.update_buffer)) { + ref_frame_config.ref_idx[kPreferedAlias[alias_index]] = + *settings.update_buffer; + alias_index++; + } + ref_frame_config.refresh[*settings.update_buffer] = 1; + } + + char buf[256]; + SimpleStringBuilder sb(buf); + sb << " spatial_id=" << settings.spatial_id; + sb << " ref_idx=[ "; + for (auto r : ref_frame_config.ref_idx) { + sb << r << " "; + } + sb << "] reference=[ "; + for (auto r : ref_frame_config.reference) { + sb << r << " "; + } + sb << "] refresh=[ "; + for (auto r : ref_frame_config.refresh) { + sb << r << " "; + } + sb << "]"; + + RTC_LOG(LS_WARNING) << __FUNCTION__ << sb.str(); + + return ref_frame_config; +} + +aom_svc_params_t GetSvcParams( + const webrtc::VideoFrameBuffer& frame_buffer, + const std::vector& + frame_settings) { + aom_svc_params_t svc_params = {}; + svc_params.number_spatial_layers = frame_settings.back().spatial_id + 1; + svc_params.number_temporal_layers = kMaxTemporalLayers; + + // TD: What about svc_params.framerate_factor? + // If `framerate_factors` are left at 0 then configured bitrate values will + // not be picked up by libaom. + for (int tid = 0; tid < svc_params.number_temporal_layers; ++tid) { + svc_params.framerate_factor[tid] = 1; + } + + // If the scaling factor is left at zero for unused layers a division by zero + // will happen inside libaom, default all layers to one. + for (int sid = 0; sid < svc_params.number_spatial_layers; ++sid) { + svc_params.scaling_factor_num[sid] = 1; + svc_params.scaling_factor_den[sid] = 1; + } + + for (const VideoEncoderInterface::FrameEncodeSettings& settings : + frame_settings) { + std::optional scaling_factor = GetScalingFactor( + {frame_buffer.width(), frame_buffer.height()}, settings.resolution); + RTC_CHECK(scaling_factor); + svc_params.scaling_factor_num[settings.spatial_id] = + scaling_factor->numerator; + svc_params.scaling_factor_den[settings.spatial_id] = + scaling_factor->denominator; + + const int flat_layer_id = + settings.spatial_id * svc_params.number_temporal_layers + + settings.temporal_id; + + RTC_LOG(LS_WARNING) << __FUNCTION__ << " flat_layer_id=" << flat_layer_id + << " num=" + << svc_params.scaling_factor_num[settings.spatial_id] + << " den=" + << svc_params.scaling_factor_den[settings.spatial_id]; + + std::visit( + [&](auto&& arg) { + using T = std::decay_t; + if constexpr (std::is_same_v) { + // Libaom calculates the total bitrate across all spatial layers by + // summing the bitrate of the last temporal layer in each spatial + // layer. This means the bitrate for the top temporal layer always + // has to be set even if that temporal layer is not being encoded. + const int last_temporal_layer_in_spatial_layer_id = + settings.spatial_id * svc_params.number_temporal_layers + + (kMaxTemporalLayers - 1); + svc_params + .layer_target_bitrate[last_temporal_layer_in_spatial_layer_id] = + arg.target_bitrate.kbps(); + + svc_params.layer_target_bitrate[flat_layer_id] = + arg.target_bitrate.kbps(); + // When libaom is configured with `AOM_CBR` it will still limit QP + // to stay between `min_quantizers` and `max_quantizers'. Set + // `max_quantizers` to max QP to avoid the encoder overshooting. + svc_params.max_quantizers[flat_layer_id] = kMaxQp; + svc_params.min_quantizers[flat_layer_id] = 0; + } else if constexpr (std::is_same_v) { + // When libaom is configured with `AOM_Q` it will still look at the + // `layer_target_bitrate` to determine whether the layer is disabled + // or not. Set `layer_target_bitrate` to 1 so that libaom knows the + // layer is active. + svc_params.layer_target_bitrate[flat_layer_id] = 1; + svc_params.max_quantizers[flat_layer_id] = arg.target_qp; + svc_params.min_quantizers[flat_layer_id] = arg.target_qp; + RTC_LOG(LS_WARNING) << __FUNCTION__ << " svc_params.qp[" + << flat_layer_id << "]=" << arg.target_qp; + // TD: Does libaom look at both max and min? Shouldn't it just be + // one of them + } + }, + settings.rate_options); + } + + char buf[512]; + SimpleStringBuilder sb(buf); + sb << "GetSvcParams" << " layer bitrates kbps"; + for (int s = 0; s < svc_params.number_spatial_layers; ++s) { + sb << " S" << s << "=[ "; + for (int t = 0; t < svc_params.number_temporal_layers; ++t) { + int id = s * svc_params.number_temporal_layers + t; + sb << "T" << t << "=" << svc_params.layer_target_bitrate[id] << " "; + } + sb << "]"; + } + + RTC_LOG(LS_WARNING) << sb.str(); + + return svc_params; +} + +void LibaomAv1Encoder::Encode( + scoped_refptr frame_buffer, + const TemporalUnitSettings& tu_settings, + std::vector frame_settings) { + absl::Cleanup on_return = [&] { + // On return call `EncodeComplete` with EncodingError result unless they + // were already called with an EncodedData result. + for (FrameEncodeSettings& settings : frame_settings) { + if (settings.frame_output) { + settings.frame_output->EncodeComplete(EncodingError()); + } + } + }; + + if (!ValidateEncodeParams(*frame_buffer, tu_settings, frame_settings, + last_resolution_in_buffer_, cfg_.rc_end_usage)) { + return; + } + + if (current_content_type_ != tu_settings.content_hint) { + if (tu_settings.content_hint == VideoCodecMode::kScreensharing) { + // TD: Set speed 11? + SET_OR_RETURN(AV1E_SET_TUNE_CONTENT, AOM_CONTENT_SCREEN); + SET_OR_RETURN(AV1E_SET_ENABLE_PALETTE, 1); + } else { + SET_OR_RETURN(AV1E_SET_TUNE_CONTENT, AOM_CONTENT_DEFAULT); + SET_OR_RETURN(AV1E_SET_ENABLE_PALETTE, 0); + } + current_content_type_ = tu_settings.content_hint; + } + + if (cfg_.rc_end_usage == AOM_CBR) { + DataRate accum_rate = DataRate::Zero(); + for (const FrameEncodeSettings& settings : frame_settings) { + accum_rate += std::get(settings.rate_options).target_bitrate; + } + cfg_.rc_target_bitrate = accum_rate.kbps(); + RTC_LOG(LS_WARNING) << __FUNCTION__ + << " cfg_.rc_target_bitrate=" << cfg_.rc_target_bitrate; + } + + if (static_cast(cfg_.g_w) != frame_buffer->width() || + static_cast(cfg_.g_h) != frame_buffer->height()) { + RTC_LOG(LS_WARNING) << __FUNCTION__ << " resolution changed from " + << cfg_.g_w << "x" << cfg_.g_h << " to " + << frame_buffer->width() << "x" + << frame_buffer->height(); + ThreadTilesAndSuperblockSizeInfo ttsbi = GetThreadingTilesAndSuperblockSize( + frame_buffer->width(), frame_buffer->height(), max_number_of_threads_); + SET_OR_RETURN(AV1E_SET_SUPERBLOCK_SIZE, ttsbi.superblock_size); + SET_OR_RETURN(AV1E_SET_TILE_ROWS, ttsbi.exp_tile_rows); + SET_OR_RETURN(AV1E_SET_TILE_COLUMNS, ttsbi.exp_tile_colums); + cfg_.g_threads = ttsbi.num_threads; + cfg_.g_w = frame_buffer->width(); + cfg_.g_h = frame_buffer->height(); + } + + PrepareInputImage(*frame_buffer, image_to_encode_); + + // The bitrates caluclated internally in libaom when `AV1E_SET_SVC_PARAMS` is + // called depends on the currently configured `cfg_.rc_target_bitrate`. If the + // total target bitrate is not updated first a division by zero could happen. + if (aom_codec_err_t ret = aom_codec_enc_config_set(&ctx_, &cfg_); + ret != AOM_CODEC_OK) { + RTC_LOG(LS_ERROR) << "aom_codec_enc_config_set returned " << ret; + return; + } + aom_svc_params_t svc_params = GetSvcParams(*frame_buffer, frame_settings); + SET_OR_RETURN(AV1E_SET_SVC_PARAMS, &svc_params); + + // The libaom AV1 encoder requires that `aom_codec_encode` is called for + // every spatial layer, even if no frame should be encoded for that layer. + std::array + settings_for_spatial_id; + settings_for_spatial_id.fill(nullptr); + FrameEncodeSettings settings_for_unused_layer; + for (FrameEncodeSettings& settings : frame_settings) { + settings_for_spatial_id[settings.spatial_id] = &settings; + } + + for (int sid = frame_settings[0].spatial_id; + sid < svc_params.number_spatial_layers; ++sid) { + const bool layer_enabled = settings_for_spatial_id[sid] != nullptr; + FrameEncodeSettings& settings = layer_enabled + ? *settings_for_spatial_id[sid] + : settings_for_unused_layer; + + aom_svc_layer_id_t layer_id = { + .spatial_layer_id = sid, + .temporal_layer_id = settings.temporal_id, + }; + SET_OR_RETURN(AV1E_SET_SVC_LAYER_ID, &layer_id); + aom_svc_ref_frame_config_t ref_config = GetSvcRefFrameConfig(settings); + SET_OR_RETURN(AV1E_SET_SVC_REF_FRAME_CONFIG, &ref_config); + + // TD: Duration can't be zero, what does it matter when the layer is + // not being encoded? + TimeDelta duration = TimeDelta::Millis(1); + if (layer_enabled) { + if (const Cbr* cbr = std::get_if(&settings.rate_options)) { + duration = cbr->duration; + } else { + // TD: What should duration be when Cqp is used? + duration = TimeDelta::Millis(1); + } + + if (settings.effort_level != current_effort_level_[settings.spatial_id]) { + // For RTC we use speed level 6 to 10, with 8 being the default. Note + // that low effort means higher speed. + SET_OR_RETURN(AOME_SET_CPUUSED, 8 - settings.effort_level); + current_effort_level_[settings.spatial_id] = settings.effort_level; + } + } + + RTC_LOG(LS_WARNING) + << __FUNCTION__ << " timestamp=" + << (tu_settings.presentation_timestamp.ms() * kRtpTicksPerSecond / 1000) + << " duration=" << (duration.ms() * kRtpTicksPerSecond / 1000) + << " type=" + << (settings.frame_type == FrameType::kKeyframe ? "key" : "delta"); + aom_codec_err_t ret = aom_codec_encode( + &ctx_, &*image_to_encode_, tu_settings.presentation_timestamp.ms() * 90, + duration.ms() * 90, + settings.frame_type == FrameType::kKeyframe ? AOM_EFLAG_FORCE_KF : 0); + if (ret != AOM_CODEC_OK) { + RTC_LOG(LS_WARNING) << "aom_codec_encode returned " << ret; + return; + } + + if (!layer_enabled) { + continue; + } + + if (settings.frame_type == FrameType::kKeyframe) { + last_resolution_in_buffer_ = {}; + } + + if (settings.update_buffer) { + last_resolution_in_buffer_[*settings.update_buffer] = settings.resolution; + } + + EncodedData result; + aom_codec_iter_t iter = nullptr; + bool bitstream_produced = false; + while (const aom_codec_cx_pkt_t* pkt = + aom_codec_get_cx_data(&ctx_, &iter)) { + if (pkt->kind == AOM_CODEC_CX_FRAME_PKT && pkt->data.frame.sz > 0) { + SET_OR_RETURN(AOME_GET_LAST_QUANTIZER_64, &result.encoded_qp); + result.frame_type = pkt->data.frame.flags & AOM_EFLAG_FORCE_KF + ? FrameType::kKeyframe + : FrameType::kDeltaFrame; + ArrayView output_buffer = + settings.frame_output->GetBitstreamOutputBuffer( + DataSize::Bytes(pkt->data.frame.sz)); + if (output_buffer.size() != pkt->data.frame.sz) { + return; + } + memcpy(output_buffer.data(), pkt->data.frame.buf, pkt->data.frame.sz); + bitstream_produced = true; + break; + } + } + + if (!bitstream_produced) { + return; + } else { + RTC_CHECK(settings.frame_output); + settings.frame_output->EncodeComplete(result); + // To avoid invoking any callback more than once. + settings.frame_output = nullptr; + } + } +} +} // namespace + +std::string LibaomAv1EncoderFactory::CodecName() const { + return "AV1"; +} + +std::string LibaomAv1EncoderFactory::ImplementationName() const { + return "Libaom"; +} + +std::map LibaomAv1EncoderFactory::CodecSpecifics() + const { + return {}; +} + +// clang-format off +// The formater and cpplint have conflicting ideas. +VideoEncoderFactoryInterface::Capabilities +LibaomAv1EncoderFactory::GetEncoderCapabilities() const { + return { + .prediction_constraints = { + .num_buffers = kNumBuffers, + .max_references = kMaxReferences, + .max_temporal_layers = kMaxTemporalLayers, + .buffer_space_type = VideoEncoderFactoryInterface::Capabilities:: + PredictionConstraints::BufferSpaceType::kSingleKeyframe, + .max_spatial_layers = kMaxSpatialLayersWtf, + .scaling_factors = {kSupportedScalingFactors.begin(), + kSupportedScalingFactors.end()}, + .supported_frame_types = {FrameType::kKeyframe, + FrameType::kStartFrame, + FrameType::kDeltaFrame}}, + .input_constraints = { + .min = {.width = 64, .height = 36}, + .max = {.width = 3840, .height = 2160}, + .pixel_alignment = 1, + .input_formats = {kSupportedInputFormats.begin(), + kSupportedInputFormats.end()}, + }, + .encoding_formats = {{.sub_sampling = EncodingFormat::k420, + .bit_depth = 8}}, + .rate_control = { + .qp_range = {0, kMaxQp}, + .rc_modes = {VideoEncoderFactoryInterface::RateControlMode::kCbr, + VideoEncoderFactoryInterface::RateControlMode::kCqp}}, + .performance = {.encode_on_calling_thread = true, + .min_max_effort_level = {kMinEffortLevel, + kMaxEffortLevel}}, + }; +} +// clang-format on + +std::unique_ptr LibaomAv1EncoderFactory::CreateEncoder( + const StaticEncoderSettings& settings, + const std::map& encoder_specific_settings) { + auto encoder = std::make_unique(); + if (!encoder->InitEncode(settings, encoder_specific_settings)) { + return nullptr; + } + return encoder; +} + +} // namespace webrtc diff --git a/api/video_codecs/libaom_av1_encoder_factory.h b/api/video_codecs/libaom_av1_encoder_factory.h new file mode 100644 index 0000000000..487ed7b89b --- /dev/null +++ b/api/video_codecs/libaom_av1_encoder_factory.h @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_LIBAOM_AV1_ENCODER_FACTORY_H_ +#define API_VIDEO_CODECS_LIBAOM_AV1_ENCODER_FACTORY_H_ + +#include +#include +#include + +#include "api/video_codecs/video_encoder_factory_interface.h" +#include "api/video_codecs/video_encoder_interface.h" + +namespace webrtc { +class LibaomAv1EncoderFactory final : VideoEncoderFactoryInterface { + public: + std::string CodecName() const override; + std::string ImplementationName() const override; + std::map CodecSpecifics() const override; + + Capabilities GetEncoderCapabilities() const override; + std::unique_ptr CreateEncoder( + const StaticEncoderSettings& settings, + const std::map& encoder_specific_settings) + override; +}; +} // namespace webrtc +#endif // API_VIDEO_CODECS_LIBAOM_AV1_ENCODER_FACTORY_H_ diff --git a/api/video_codecs/libaom_av1_encoder_factory_test.cc b/api/video_codecs/libaom_av1_encoder_factory_test.cc new file mode 100644 index 0000000000..62d74424e5 --- /dev/null +++ b/api/video_codecs/libaom_av1_encoder_factory_test.cc @@ -0,0 +1,907 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/libaom_av1_encoder_factory.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/scoped_refptr.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video_codecs/video_decoder.h" +#include "api/video_codecs/video_encoder_factory_interface.h" +#include "api/video_codecs/video_encoder_interface.h" +#include "api/video_codecs/video_encoding_general.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "modules/video_coding/codecs/av1/dav1d_decoder.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" +#include "test/testsupport/frame_reader.h" + +namespace webrtc { +namespace { +using ::testing::Eq; +using ::testing::Gt; +using ::testing::IsEmpty; +using ::testing::Not; +using Cbr = VideoEncoderInterface::FrameEncodeSettings::Cbr; +using Cqp = VideoEncoderInterface::FrameEncodeSettings::Cqp; +using EncodedData = VideoEncoderInterface::EncodedData; +using EncodeResult = VideoEncoderInterface::EncodeResult; +using FrameType = VideoEncoderInterface::FrameType; + +std::unique_ptr CreateFrameReader() { + return CreateY4mFrameReader( + test::ResourcePath("reference_video_640x360_30fps", "y4m"), + test::YuvFrameReaderImpl::RepeatMode::kPingPong); +} + +std::string OutPath() { + std::string res = test::OutputPath(); + res += "frame_dump/"; + RTC_CHECK(test::DirExists(res) || test::CreateDir(res)); + return res; +} + +class Av1Decoder : public DecodedImageCallback { + public: + Av1Decoder() : Av1Decoder("") {} + + explicit Av1Decoder(const std::string& name) + : decoder_(CreateDav1dDecoder()), file_name_(name) { + decoder_->Configure({}); + decoder_->RegisterDecodeCompleteCallback(this); + + if (!file_name_.empty()) { + std::string out = OutPath(); + out += file_name_; + out += "_raw.av1"; + RTC_CHECK(raw_out_file_ = fopen(out.c_str(), "wb")); + RTC_LOG(LS_INFO) << "Recording bitstream to " << out; + } + } + + ~Av1Decoder() { + if (raw_out_file_) { + fclose(raw_out_file_); + } + } + + // DecodedImageCallback + int32_t Decoded(VideoFrame& frame) override { + decode_result_ = std::make_unique(std::move(frame)); + return 0; + } + + VideoFrame Decode(ArrayView bitstream_data) { + EncodedImage img; + img.SetEncodedData(EncodedImageBuffer::Create(bitstream_data.data(), + bitstream_data.size())); + if (raw_out_file_) { + fwrite(bitstream_data.data(), 1, bitstream_data.size(), raw_out_file_); + } + decoder_->Decode(img, /*dont_care=*/0); + VideoFrame res(std::move(*decode_result_)); + return res; + } + + private: + std::unique_ptr decoder_; + std::unique_ptr decode_result_; + std::string file_name_; + FILE* raw_out_file_ = nullptr; +}; + +struct EncOut { + std::vector bitstream; + EncodeResult res; +}; + +class FrameEncoderSettingsBuilder { + public: + FrameEncoderSettingsBuilder() { + class IgnoredOutput : public VideoEncoderInterface::FrameOutput { + public: + ArrayView GetBitstreamOutputBuffer(DataSize size) override { + unread_.resize(size.bytes()); + return unread_; + } + void EncodeComplete(const EncodeResult& /* encode_result */) override {} + + private: + std::vector unread_; + }; + + frame_encode_settings_.frame_output = std::make_unique(); + } + + FrameEncoderSettingsBuilder& Key() { + frame_encode_settings_.frame_type = FrameType::kKeyframe; + return *this; + } + + FrameEncoderSettingsBuilder& Start() { + frame_encode_settings_.frame_type = FrameType::kStartFrame; + return *this; + } + + FrameEncoderSettingsBuilder& Delta() { + frame_encode_settings_.frame_type = FrameType::kStartFrame; + return *this; + } + + FrameEncoderSettingsBuilder& Rate( + const std::variant& rate_options) { + frame_encode_settings_.rate_options = rate_options; + return *this; + } + + FrameEncoderSettingsBuilder& T(int id) { + frame_encode_settings_.temporal_id = id; + return *this; + } + + FrameEncoderSettingsBuilder& S(int id) { + frame_encode_settings_.spatial_id = id; + return *this; + } + + FrameEncoderSettingsBuilder& Res(int width, int height) { + frame_encode_settings_.resolution = {width, height}; + return *this; + } + + FrameEncoderSettingsBuilder& Ref(const std::vector& ref) { + frame_encode_settings_.reference_buffers = ref; + return *this; + } + + FrameEncoderSettingsBuilder& Upd(int upd) { + frame_encode_settings_.update_buffer = upd; + return *this; + } + + FrameEncoderSettingsBuilder& Effort(int effort_level) { + frame_encode_settings_.effort_level = effort_level; + return *this; + } + + FrameEncoderSettingsBuilder& Out(EncOut& out) { + frame_encode_settings_.frame_output = std::make_unique(out); + return *this; + } + + operator VideoEncoderInterface::FrameEncodeSettings&&() { + return std::move(frame_encode_settings_); + } + + private: + struct FrameOut : public VideoEncoderInterface::FrameOutput { + explicit FrameOut(EncOut& e) : eo(e) {} + ArrayView GetBitstreamOutputBuffer(DataSize size) override { + eo.bitstream.resize(size.bytes()); + return ArrayView(eo.bitstream); + } + void EncodeComplete(const EncodeResult& encode_result) override { + eo.res = encode_result; + } + EncOut& eo; + }; + + VideoEncoderInterface::FrameEncodeSettings frame_encode_settings_; +}; + +using Fb = FrameEncoderSettingsBuilder; + +// Since FrameEncodeSettings is move only, initalizer-list initialization won't +// work, so instead a C-style array can be used to do aggregate initialization. +template +std::vector ToVec( + VideoEncoderInterface::FrameEncodeSettings (&&settings)[N]) { + return std::vector( + std::make_move_iterator(std::begin(settings)), + std::make_move_iterator(std::end(settings))); +} + +// For reasonable debug printout when an EXPECT fail. +struct Resolution { + explicit Resolution(const VideoFrame& frame) + : width(frame.width()), height(frame.height()) {} + + friend void PrintTo(const Resolution& res, std::ostream* os) { + *os << "(width: " << res.width << " height: " << res.height << ")"; + } + + int width; + int height; +}; + +MATCHER_P2(ResolutionIs, width, height, "") { + return arg.width == width && arg.height == height; +} + +MATCHER_P(QpIs, qp, "") { + if (auto ed = std::get_if(&arg.res)) { + return ed->encoded_qp == qp; + } + return false; +} + +MATCHER(HasBitstreamAndMetaData, "") { + return !arg.bitstream.empty() && std::holds_alternative(arg.res); +} + +double Psnr(const scoped_refptr& ref_buffer, + const VideoFrame& decoded_frame) { + return I420PSNR(*ref_buffer, *decoded_frame.video_frame_buffer()->ToI420()); +} + +static constexpr VideoEncoderFactoryInterface::StaticEncoderSettings + kCbrEncoderSettings{ + .max_encode_dimensions = {.width = 1920, .height = 1080}, + .encoding_format = {.sub_sampling = EncodingFormat::SubSampling::k420, + .bit_depth = 8}, + .rc_mode = + VideoEncoderFactoryInterface::StaticEncoderSettings::Cbr{ + .max_buffer_size = TimeDelta::Millis(1000), + .target_buffer_size = TimeDelta::Millis(600)}, + .max_number_of_threads = 1, + }; + +static constexpr VideoEncoderFactoryInterface::StaticEncoderSettings + kCqpEncoderSettings{ + .max_encode_dimensions = {.width = 1920, .height = 1080}, + .encoding_format = {.sub_sampling = EncodingFormat::SubSampling::k420, + .bit_depth = 8}, + .rc_mode = VideoEncoderFactoryInterface::StaticEncoderSettings::Cqp(), + .max_number_of_threads = 1, + }; + +static constexpr Cbr kCbr{.duration = TimeDelta::Millis(100), + .target_bitrate = DataRate::KilobitsPerSec(1000)}; + +TEST(LibaomAv1EncoderFactory, CodecName) { + EXPECT_THAT(LibaomAv1EncoderFactory().CodecName(), Eq("AV1")); +} + +TEST(LibaomAv1EncoderFactory, CodecSpecifics) { + EXPECT_THAT(LibaomAv1EncoderFactory().CodecSpecifics(), IsEmpty()); +} + +TEST(LibaomAv1EncoderFactory, QpRange) { + const std::pair kMinMaxQp = {0, 63}; + EXPECT_THAT( + LibaomAv1EncoderFactory().GetEncoderCapabilities().rate_control.qp_range, + Eq(kMinMaxQp)); +} + +TEST(LibaomAv1Encoder, KeyframeUpdatesSpecifiedBuffer) { + auto frame_reader = CreateFrameReader(); + auto enc = LibaomAv1EncoderFactory().CreateEncoder(kCbrEncoderSettings, {}); + Av1Decoder dec; + + auto raw_key = frame_reader->PullFrame(); + auto raw_delta = frame_reader->PullFrame(); + + EncOut key; + enc->Encode(raw_key, {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec({Fb().Rate(kCbr).Res(640, 360).Upd(5).Key().Out(key)})); + ASSERT_THAT(key.bitstream, Not(IsEmpty())); + VideoFrame decoded_key = dec.Decode(key.bitstream); + EXPECT_THAT(Resolution(decoded_key), ResolutionIs(640, 360)); + EXPECT_THAT(Psnr(raw_key, decoded_key), Gt(40)); + + EncOut delta; + enc->Encode(raw_delta, {.presentation_timestamp = Timestamp::Millis(100)}, + ToVec({Fb().Rate(kCbr).Res(640, 360).Ref({0}).Out(delta)})); + EXPECT_THAT(delta, Not(HasBitstreamAndMetaData())); +} + +TEST(LibaomAv1Encoder, MidTemporalUnitKeyframeResetsBuffers) { + auto frame_reader = CreateFrameReader(); + auto enc = LibaomAv1EncoderFactory().CreateEncoder(kCbrEncoderSettings, {}); + + EncOut tu0_s2; + enc->Encode(frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).S(0).Upd(0).Key(), + Fb().Rate(kCbr).Res(320, 180).S(1).Ref({0}), + Fb().Rate(kCbr).Res(640, 360).S(2).Ref({0}).Out(tu0_s2)})); + EXPECT_THAT(tu0_s2, HasBitstreamAndMetaData()); + + EncOut tu1_s0; + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(100)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).S(0).Upd(0).Ref({0}).Out(tu1_s0), + Fb().Rate(kCbr).Res(320, 180).S(1).Upd(1).Key(), + Fb().Rate(kCbr).Res(640, 360).S(2).Ref({0})})); + EXPECT_THAT(tu1_s0, Not(HasBitstreamAndMetaData())); +} + +TEST(LibaomAv1Encoder, ResolutionSwitching) { + auto frame_reader = CreateFrameReader(); + auto enc = LibaomAv1EncoderFactory().CreateEncoder(kCbrEncoderSettings, {}); + + scoped_refptr in0 = frame_reader->PullFrame(); + EncOut tu0; + enc->Encode(in0, {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec({Fb().Rate(kCbr).Res(320, 180).Upd(0).Key().Out(tu0)})); + + scoped_refptr in1 = frame_reader->PullFrame(); + EncOut tu1; + enc->Encode(in1, {.presentation_timestamp = Timestamp::Millis(100)}, + ToVec({Fb().Rate(kCbr).Res(640, 360).Ref({0}).Out(tu1)})); + + scoped_refptr in2 = frame_reader->PullFrame(); + EncOut tu2; + enc->Encode(in2, {.presentation_timestamp = Timestamp::Millis(200)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).Ref({0}).Out(tu2)})); + + Av1Decoder dec; + VideoFrame f0 = dec.Decode(tu0.bitstream); + EXPECT_THAT(Resolution(f0), ResolutionIs(320, 180)); + // TD: + // EXPECT_THAT(Psnr(in0, f0), Gt(40)); + + VideoFrame f1 = dec.Decode(tu1.bitstream); + EXPECT_THAT(Resolution(f1), ResolutionIs(640, 360)); + EXPECT_THAT(Psnr(in1, f1), Gt(40)); + + VideoFrame f2 = dec.Decode(tu2.bitstream); + EXPECT_THAT(Resolution(f2), ResolutionIs(160, 90)); + // TD: + // EXPECT_THAT(Psnr(in2, f2), Gt(40)); +} + +TEST(LibaomAv1Encoder, InputResolutionSwitching) { + auto frame_reader = CreateFrameReader(); + auto enc = LibaomAv1EncoderFactory().CreateEncoder(kCbrEncoderSettings, {}); + + scoped_refptr in0 = frame_reader->PullFrame(); + EncOut tu0; + enc->Encode(in0, {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).Upd(0).Key().Out(tu0)})); + + scoped_refptr in1 = frame_reader->PullFrame( + /*frame_num=*/nullptr, + /*resolution=*/{320, 180}, + /*framerate_scale=*/{1, 1}); + EncOut tu1; + enc->Encode(in1, {.presentation_timestamp = Timestamp::Millis(100)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).Ref({0}).Out(tu1)})); + + scoped_refptr in2 = frame_reader->PullFrame( + /*frame_num=*/nullptr, + /*resolution=*/{160, 90}, + /*framerate_scale=*/{1, 1}); + EncOut tu2; + enc->Encode(in2, {.presentation_timestamp = Timestamp::Millis(200)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).Ref({0}).Out(tu2)})); + + Av1Decoder dec; + VideoFrame f0 = dec.Decode(tu0.bitstream); + EXPECT_THAT(Resolution(f0), ResolutionIs(160, 90)); + // TD: + // EXPECT_THAT(Psnr(in0, f0), Gt(40)); + + VideoFrame f1 = dec.Decode(tu1.bitstream); + EXPECT_THAT(Resolution(f1), ResolutionIs(160, 90)); + // TD: + // EXPECT_THAT(Psnr(in1, f1), Gt(40)); + + VideoFrame f2 = dec.Decode(tu2.bitstream); + EXPECT_THAT(Resolution(f2), ResolutionIs(160, 90)); + EXPECT_THAT(Psnr(in2, f2), Gt(40)); +} + +TEST(LibaomAv1Encoder, TempoSpatial) { + auto frame_reader = CreateFrameReader(); + auto enc = LibaomAv1EncoderFactory().CreateEncoder(kCbrEncoderSettings, {}); + + const Cbr k10Fps{.duration = TimeDelta::Millis(100), + .target_bitrate = DataRate::KilobitsPerSec(500)}; + const Cbr k20Fps{.duration = TimeDelta::Millis(50), + .target_bitrate = DataRate::KilobitsPerSec(500)}; + + EncOut tu0_s0; + EncOut tu0_s1; + EncOut tu0_s2; + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec( + {Fb().Rate(k10Fps).Res(160, 90).S(0).Upd(0).Key().Out(tu0_s0), + Fb().Rate(k10Fps).Res(320, 180).S(1).Ref({0}).Upd(1).Out(tu0_s1), + Fb().Rate(k20Fps).Res(640, 360).S(2).Ref({1}).Upd(2).Out(tu0_s2)})); + + EncOut tu1_s2; + enc->Encode(frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(50)}, + ToVec({Fb().Rate(k20Fps).Res(640, 360).S(2).Ref({2}).Upd(2).Out( + tu1_s2)})); + + scoped_refptr frame = frame_reader->PullFrame(); + EncOut tu2_s0; + EncOut tu2_s1; + EncOut tu2_s2; + enc->Encode( + frame, {.presentation_timestamp = Timestamp::Millis(100)}, + ToVec( + {Fb().Rate(k10Fps).Res(160, 90).S(0).Ref({0}).Upd(0).Out(tu2_s0), + Fb().Rate(k10Fps).Res(320, 180).S(1).Ref({0, 1}).Upd(1).Out(tu2_s1), + Fb().Rate(k20Fps).Res(640, 360).S(2).Ref({1, 2}).Upd(2).Out( + tu2_s2)})); + + Av1Decoder dec; + EXPECT_THAT(Resolution(dec.Decode(tu0_s0.bitstream)), ResolutionIs(160, 90)); + EXPECT_THAT(Resolution(dec.Decode(tu0_s1.bitstream)), ResolutionIs(320, 180)); + EXPECT_THAT(Resolution(dec.Decode(tu0_s2.bitstream)), ResolutionIs(640, 360)); + EXPECT_THAT(Resolution(dec.Decode(tu1_s2.bitstream)), ResolutionIs(640, 360)); + EXPECT_THAT(Resolution(dec.Decode(tu2_s0.bitstream)), ResolutionIs(160, 90)); + EXPECT_THAT(Resolution(dec.Decode(tu2_s1.bitstream)), ResolutionIs(320, 180)); + + VideoFrame f = dec.Decode(tu2_s2.bitstream); + EXPECT_THAT(Resolution(f), ResolutionIs(640, 360)); + EXPECT_THAT(Psnr(frame, f), Gt(40)); +} + +TEST(DISABLED_LibaomAv1Encoder, InvertedTempoSpatial) { + auto frame_reader = CreateFrameReader(); + auto enc = LibaomAv1EncoderFactory().CreateEncoder(kCbrEncoderSettings, {}); + + EncOut tu0_s0; + EncOut tu0_s1; + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec({Fb().Rate(kCbr).Res(320, 180).S(0).Upd(0).Key().Out(tu0_s0), + Fb().Rate(kCbr).Res(640, 360).S(1).Ref({0}).Upd(1).Out(tu0_s1)})); + + EncOut tu1_s0; + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(100)}, + ToVec({Fb().Rate(kCbr).Res(320, 180).S(0).Ref({0}).Upd(0).Out(tu1_s0)})); + + EncOut tu2_s0; + EncOut tu2_s1; + scoped_refptr frame = frame_reader->PullFrame(); + enc->Encode( + frame, {.presentation_timestamp = Timestamp::Millis(200)}, + ToVec( + {Fb().Rate(kCbr).Res(320, 180).S(0).Ref({0}).Upd(0).Out(tu2_s0), + Fb().Rate(kCbr).Res(640, 360).S(1).Ref({1, 0}).Upd(1).Out(tu2_s1)})); + + Av1Decoder dec; + EXPECT_THAT(Resolution(dec.Decode(tu0_s0.bitstream)), ResolutionIs(320, 180)); + EXPECT_THAT(Resolution(dec.Decode(tu0_s1.bitstream)), ResolutionIs(640, 360)); + EXPECT_THAT(Resolution(dec.Decode(tu1_s0.bitstream)), ResolutionIs(320, 180)); + EXPECT_THAT(Resolution(dec.Decode(tu2_s0.bitstream)), ResolutionIs(320, 180)); + EXPECT_THAT(Resolution(dec.Decode(tu2_s1.bitstream)), ResolutionIs(640, 360)); +} + +TEST(LibaomAv1Encoder, SkipMidLayer) { + auto frame_reader = CreateFrameReader(); + auto enc = LibaomAv1EncoderFactory().CreateEncoder(kCbrEncoderSettings, {}); + + EncOut tu0_s0; + EncOut tu0_s1; + EncOut tu0_s2; + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).S(0).Upd(0).Key().Out(tu0_s0), + Fb().Rate(kCbr).Res(320, 180).S(1).Ref({0}).Upd(1).Out(tu0_s1), + Fb().Rate(kCbr).Res(640, 360).S(2).Ref({1}).Upd(2).Out(tu0_s2)})); + + EncOut tu1_s0; + EncOut tu1_s2; + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(100)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).S(0).Ref({0}).Upd(0).Out(tu1_s0), + Fb().Rate(kCbr).Res(640, 360).S(2).Ref({2}).Upd(2).Out(tu1_s2)})); + + EncOut tu2_s0; + EncOut tu2_s1; + EncOut tu2_s2; + scoped_refptr frame = frame_reader->PullFrame(); + enc->Encode( + frame, {.presentation_timestamp = Timestamp::Millis(200)}, + ToVec( + {Fb().Rate(kCbr).Res(160, 90).S(0).Ref({0}).Upd(0).Out(tu2_s0), + Fb().Rate(kCbr).Res(320, 180).S(1).Ref({0, 1}).Upd(1).Out(tu2_s1), + Fb().Rate(kCbr).Res(640, 360).S(2).Ref({1, 2}).Upd(2).Out(tu2_s2)})); + + Av1Decoder dec; + EXPECT_THAT(Resolution(dec.Decode(tu0_s0.bitstream)), ResolutionIs(160, 90)); + EXPECT_THAT(Resolution(dec.Decode(tu0_s1.bitstream)), ResolutionIs(320, 180)); + EXPECT_THAT(Resolution(dec.Decode(tu0_s2.bitstream)), ResolutionIs(640, 360)); + EXPECT_THAT(Resolution(dec.Decode(tu1_s0.bitstream)), ResolutionIs(160, 90)); + EXPECT_THAT(Resolution(dec.Decode(tu1_s2.bitstream)), ResolutionIs(640, 360)); + EXPECT_THAT(Resolution(dec.Decode(tu2_s0.bitstream)), ResolutionIs(160, 90)); + EXPECT_THAT(Resolution(dec.Decode(tu2_s1.bitstream)), ResolutionIs(320, 180)); + + VideoFrame f = dec.Decode(tu2_s2.bitstream); + EXPECT_THAT(Resolution(f), ResolutionIs(640, 360)); + EXPECT_THAT(Psnr(frame, f), Gt(40)); +} + +TEST(LibaomAv1Encoder, L3T1) { + auto frame_reader = CreateFrameReader(); + auto enc = LibaomAv1EncoderFactory().CreateEncoder(kCbrEncoderSettings, {}); + Av1Decoder dec; + + EncOut tu0_s0; + EncOut tu0_s1; + EncOut tu0_s2; + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).S(0).Upd(0).Key().Out(tu0_s0), + Fb().Rate(kCbr).Res(320, 180).S(1).Ref({0}).Upd(1).Out(tu0_s1), + Fb().Rate(kCbr).Res(640, 360).S(2).Ref({1}).Upd(2).Out(tu0_s2)})); + + EXPECT_THAT(Resolution(dec.Decode(tu0_s0.bitstream)), ResolutionIs(160, 90)); + EXPECT_THAT(Resolution(dec.Decode(tu0_s1.bitstream)), ResolutionIs(320, 180)); + EXPECT_THAT(Resolution(dec.Decode(tu0_s2.bitstream)), ResolutionIs(640, 360)); + + auto tu1_frame = frame_reader->PullFrame(); + EncOut tu1_s0; + EncOut tu1_s1; + EncOut tu1_s2; + enc->Encode( + tu1_frame, {.presentation_timestamp = Timestamp::Millis(100)}, + ToVec( + {Fb().Rate(kCbr).Res(160, 90).S(0).Ref({0}).Upd(0).Out(tu1_s0), + Fb().Rate(kCbr).Res(320, 180).S(1).Ref({1, 0}).Upd(1).Out(tu1_s1), + Fb().Rate(kCbr).Res(640, 360).S(2).Ref({2, 1}).Upd(2).Out(tu1_s2)})); + + EXPECT_THAT(Resolution(dec.Decode(tu1_s0.bitstream)), ResolutionIs(160, 90)); + EXPECT_THAT(Resolution(dec.Decode(tu1_s1.bitstream)), ResolutionIs(320, 180)); + + VideoFrame f_tu1_s2 = dec.Decode(tu1_s2.bitstream); + EXPECT_THAT(Resolution(f_tu1_s2), ResolutionIs(640, 360)); + EXPECT_THAT(Psnr(tu1_frame, f_tu1_s2), Gt(40)); + + auto tu2_frame = frame_reader->PullFrame(); + EncOut tu2_s0; + EncOut tu2_s1; + EncOut tu2_s2; + enc->Encode( + tu2_frame, {.presentation_timestamp = Timestamp::Millis(200)}, + ToVec( + {Fb().Rate(kCbr).Res(160, 90).S(0).Ref({0}).Upd(0).Out(tu2_s0), + Fb().Rate(kCbr).Res(320, 180).S(1).Ref({1, 0}).Upd(1).Out(tu2_s1), + Fb().Rate(kCbr).Res(640, 360).S(2).Ref({2, 1}).Upd(2).Out(tu2_s2)})); + + EXPECT_THAT(Resolution(dec.Decode(tu2_s0.bitstream)), ResolutionIs(160, 90)); + EXPECT_THAT(Resolution(dec.Decode(tu2_s1.bitstream)), ResolutionIs(320, 180)); + + VideoFrame f_tu2 = dec.Decode(tu2_s2.bitstream); + EXPECT_THAT(Resolution(f_tu2), ResolutionIs(640, 360)); + EXPECT_THAT(Psnr(tu2_frame, f_tu2), Gt(40)); +} + +TEST(LibaomAv1Encoder, L3T1_KEY) { + auto frame_reader = CreateFrameReader(); + auto enc = LibaomAv1EncoderFactory().CreateEncoder(kCbrEncoderSettings, {}); + + Av1Decoder dec_s0; + Av1Decoder dec_s1; + Av1Decoder dec_s2; + + EncOut tu0_s0; + EncOut tu0_s1; + EncOut tu0_s2; + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).S(0).Upd(0).Key().Out(tu0_s0), + Fb().Rate(kCbr).Res(320, 180).S(1).Ref({0}).Upd(1).Out(tu0_s1), + Fb().Rate(kCbr).Res(640, 360).S(2).Ref({1}).Upd(2).Out(tu0_s2)})); + + EXPECT_THAT(Resolution(dec_s0.Decode(tu0_s0.bitstream)), + ResolutionIs(160, 90)); + + dec_s1.Decode(tu0_s0.bitstream); + EXPECT_THAT(Resolution(dec_s1.Decode(tu0_s1.bitstream)), + ResolutionIs(320, 180)); + + dec_s2.Decode(tu0_s0.bitstream); + dec_s2.Decode(tu0_s1.bitstream); + EXPECT_THAT(Resolution(dec_s2.Decode(tu0_s2.bitstream)), + ResolutionIs(640, 360)); + + EncOut tu1_s0; + EncOut tu1_s1; + EncOut tu1_s2; + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(100)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).S(0).Ref({0}).Upd(0).Out(tu1_s0), + Fb().Rate(kCbr).Res(320, 180).S(1).Ref({1}).Upd(1).Out(tu1_s1), + Fb().Rate(kCbr).Res(640, 360).S(2).Ref({2}).Upd(2).Out(tu1_s2)})); + + EXPECT_THAT(Resolution(dec_s0.Decode(tu1_s0.bitstream)), + ResolutionIs(160, 90)); + EXPECT_THAT(Resolution(dec_s1.Decode(tu1_s1.bitstream)), + ResolutionIs(320, 180)); + EXPECT_THAT(Resolution(dec_s2.Decode(tu1_s2.bitstream)), + ResolutionIs(640, 360)); + + EncOut tu2_s0; + EncOut tu2_s1; + EncOut tu2_s2; + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(200)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).S(0).Ref({0}).Upd(0).Out(tu2_s0), + Fb().Rate(kCbr).Res(320, 180).S(1).Ref({1}).Upd(1).Out(tu2_s1), + Fb().Rate(kCbr).Res(640, 360).S(2).Ref({2}).Upd(2).Out(tu2_s2)})); + + EXPECT_THAT(Resolution(dec_s0.Decode(tu2_s0.bitstream)), + ResolutionIs(160, 90)); + EXPECT_THAT(Resolution(dec_s1.Decode(tu2_s1.bitstream)), + ResolutionIs(320, 180)); + EXPECT_THAT(Resolution(dec_s2.Decode(tu2_s2.bitstream)), + ResolutionIs(640, 360)); +} + +TEST(LibaomAv1Encoder, S3T1) { + auto frame_reader = CreateFrameReader(); + auto enc = LibaomAv1EncoderFactory().CreateEncoder(kCbrEncoderSettings, {}); + + Av1Decoder dec_s0; + Av1Decoder dec_s1; + Av1Decoder dec_s2; + + EncOut tu0_s0; + EncOut tu0_s1; + EncOut tu0_s2; + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).S(0).Start().Upd(0).Out(tu0_s0), + Fb().Rate(kCbr).Res(320, 180).S(1).Start().Upd(1).Out(tu0_s1), + Fb().Rate(kCbr).Res(640, 360).S(2).Start().Upd(2).Out(tu0_s2)})); + EXPECT_THAT(Resolution(dec_s0.Decode(tu0_s0.bitstream)), + ResolutionIs(160, 90)); + EXPECT_THAT(Resolution(dec_s1.Decode(tu0_s1.bitstream)), + ResolutionIs(320, 180)); + EXPECT_THAT(Resolution(dec_s2.Decode(tu0_s2.bitstream)), + ResolutionIs(640, 360)); + + EncOut tu1_s0; + EncOut tu1_s1; + EncOut tu1_s2; + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(100)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).S(0).Ref({0}).Upd(0).Out(tu1_s0), + Fb().Rate(kCbr).Res(320, 180).S(1).Ref({1}).Upd(1).Out(tu1_s1), + Fb().Rate(kCbr).Res(640, 360).S(2).Ref({2}).Upd(2).Out(tu1_s2)})); + + EXPECT_THAT(Resolution(dec_s0.Decode(tu1_s0.bitstream)), + ResolutionIs(160, 90)); + EXPECT_THAT(Resolution(dec_s1.Decode(tu1_s1.bitstream)), + ResolutionIs(320, 180)); + EXPECT_THAT(Resolution(dec_s2.Decode(tu1_s2.bitstream)), + ResolutionIs(640, 360)); + + EncOut tu2_s0; + EncOut tu2_s1; + EncOut tu2_s2; + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(200)}, + ToVec({Fb().Rate(kCbr).Res(160, 90).S(0).Ref({0}).Upd(0).Out(tu2_s0), + Fb().Rate(kCbr).Res(320, 180).S(1).Ref({1}).Upd(1).Out(tu2_s1), + Fb().Rate(kCbr).Res(640, 360).S(2).Ref({2}).Upd(2).Out(tu2_s2)})); + + EXPECT_THAT(Resolution(dec_s0.Decode(tu2_s0.bitstream)), + ResolutionIs(160, 90)); + EXPECT_THAT(Resolution(dec_s1.Decode(tu2_s1.bitstream)), + ResolutionIs(320, 180)); + EXPECT_THAT(Resolution(dec_s2.Decode(tu2_s2.bitstream)), + ResolutionIs(640, 360)); +} + +TEST(LibaomAv1Encoder, HigherEffortLevelYieldsHigherQualityFrames) { + auto frame_in = CreateFrameReader()->PullFrame(); + std::pair effort_range = LibaomAv1EncoderFactory() + .GetEncoderCapabilities() + .performance.min_max_effort_level; + // Cbr rc{.duration = TimeDelta::Millis(100), + // .target_bitrate = DataRate::KilobitsPerSec(100)}; + std::optional psnr_last; + Av1Decoder dec; + + for (int i = effort_range.first; i <= effort_range.second; ++i) { + auto enc = LibaomAv1EncoderFactory().CreateEncoder(kCbrEncoderSettings, {}); + EncOut tu0; + enc->Encode( + frame_in, {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec({Fb().Rate(kCbr).Res(640, 360).Upd(0).Key().Effort(i).Out(tu0)})); + double psnr = Psnr(frame_in, dec.Decode(tu0.bitstream)); + EXPECT_THAT(psnr, Gt(psnr_last)); + psnr_last = psnr; + } +} + +TEST(LibaomAv1Encoder, KeyframeAndStartrameAreApproximatelyEqual) { + int max_spatial_layers = LibaomAv1EncoderFactory() + .GetEncoderCapabilities() + .prediction_constraints.max_spatial_layers; + const Cbr kRate{.duration = TimeDelta::Millis(100), + .target_bitrate = DataRate::KilobitsPerSec(500)}; + + for (int sid = 0; sid < max_spatial_layers; ++sid) { + std::string key_name = "cbr_key_sl_"; + key_name += std::to_string(sid); + Av1Decoder dec_key(key_name); + + std::string start_name = "cbr_start_sl_"; + start_name += std::to_string(sid); + Av1Decoder dec_start(start_name); + + auto frame_reader = CreateFrameReader(); + auto enc_key = + LibaomAv1EncoderFactory().CreateEncoder(kCbrEncoderSettings, {}); + auto enc_start = + LibaomAv1EncoderFactory().CreateEncoder(kCbrEncoderSettings, {}); + DataSize total_size_key = DataSize::Zero(); + DataSize total_size_start = DataSize::Zero(); + TimeDelta total_duration = TimeDelta::Zero(); + auto frame_in = frame_reader->PullFrame(); + + EncOut key; + EncOut start; + enc_key->Encode( + frame_in, {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec({Fb().Rate(kRate).Res(640, 360).S(sid).Upd(0).Key().Out(key)})); + enc_start->Encode( + frame_in, {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec( + {Fb().Rate(kRate).Res(640, 360).S(sid).Start().Upd(0).Out(start)})); + + total_size_key += DataSize::Bytes(key.bitstream.size()); + total_size_start += DataSize::Bytes(start.bitstream.size()); + + total_duration += kRate.duration; + dec_key.Decode(key.bitstream); + dec_start.Decode(start.bitstream); + + EXPECT_NEAR(total_size_key.bytes(), total_size_start.bytes(), + 0.1 * total_size_key.bytes()); + + for (int f = 1; f < 10; ++f) { + frame_in = frame_reader->PullFrame(); + enc_key->Encode( + frame_in, {.presentation_timestamp = Timestamp::Millis(f * 100)}, + ToVec({Fb().Rate(kRate).Res(640, 360).S(sid).Ref({0}).Upd(0).Out( + key)})); + enc_start->Encode( + frame_in, {.presentation_timestamp = Timestamp::Millis(f * 100)}, + ToVec({Fb().Rate(kRate).Res(640, 360).S(sid).Ref({0}).Upd(0).Out( + start)})); + total_size_key += DataSize::Bytes(key.bitstream.size()); + total_size_start += DataSize::Bytes(start.bitstream.size()); + + total_duration += kRate.duration; + dec_key.Decode(key.bitstream); + dec_start.Decode(start.bitstream); + } + + double key_encode_kbps = (total_size_key / total_duration).kbps(); + double start_encode_kbps = (total_size_start / total_duration).kbps(); + + EXPECT_NEAR(key_encode_kbps, start_encode_kbps, start_encode_kbps * 0.05); + } +} + +TEST(LibaomAv1Encoder, BitrateConsistentAcrossSpatialLayers) { + int max_spatial_layers = LibaomAv1EncoderFactory() + .GetEncoderCapabilities() + .prediction_constraints.max_spatial_layers; + const Cbr kRate{.duration = TimeDelta::Millis(100), + .target_bitrate = DataRate::KilobitsPerSec(500)}; + + for (int sid = 0; sid < max_spatial_layers; ++sid) { + std::string out_name = "cbr_sl_"; + out_name += std::to_string(sid); + + auto frame_reader = CreateFrameReader(); + auto enc = LibaomAv1EncoderFactory().CreateEncoder(kCbrEncoderSettings, {}); + DataSize total_size = DataSize::Zero(); + TimeDelta total_duration = TimeDelta::Zero(); + + EncOut out; + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec({Fb().Rate(kRate).Res(640, 360).S(sid).Upd(0).Key().Out(out)})); + total_size += DataSize::Bytes(out.bitstream.size()); + total_duration += kRate.duration; + + for (int f = 1; f < 30; ++f) { + enc->Encode( + frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(f * 100)}, + ToVec({Fb().Rate(kRate).Res(640, 360).S(sid).Ref({0}).Upd(0).Out( + out)})); + total_size += DataSize::Bytes(out.bitstream.size()); + total_duration += kRate.duration; + } + + double encode_kbps = (total_size / total_duration).kbps(); + double target_kbps = kRate.target_bitrate.kbps(); + + EXPECT_NEAR(encode_kbps, target_kbps, target_kbps * 0.1); + } +} + +TEST(LibaomAv1Encoder, ConstantQp) { + int max_spatial_layers = LibaomAv1EncoderFactory() + .GetEncoderCapabilities() + .prediction_constraints.max_spatial_layers; + constexpr int kQp = 30; + for (int sid = 0; sid < max_spatial_layers; ++sid) { + auto enc = LibaomAv1EncoderFactory().CreateEncoder(kCqpEncoderSettings, {}); + std::string out_name = "cqp_sl_"; + out_name += std::to_string(sid); + auto frame_reader = CreateFrameReader(); + + EncOut out; + enc->Encode(frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(0)}, + ToVec({Fb().Rate(Cqp{.target_qp = kQp}) + .Res(640, 360) + .S(sid) + .Upd(0) + .Key() + .Out(out)})); + EXPECT_THAT(out, QpIs(kQp)); + + for (int f = 1; f < 10; ++f) { + enc->Encode(frame_reader->PullFrame(), + {.presentation_timestamp = Timestamp::Millis(f * 100)}, + ToVec({Fb().Rate(Cqp{.target_qp = kQp - f}) + .Res(640, 360) + .S(sid) + .Ref({0}) + .Upd(0) + .Out(out)})); + EXPECT_THAT(out, QpIs(kQp - f)); + } + } +} + +} // namespace +} // namespace webrtc diff --git a/api/video_codecs/scalability_mode.cc b/api/video_codecs/scalability_mode.cc index c449b4217e..d4e202df1e 100644 --- a/api/video_codecs/scalability_mode.cc +++ b/api/video_codecs/scalability_mode.cc @@ -10,6 +10,9 @@ #include "api/video_codecs/scalability_mode.h" +#include + +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" namespace webrtc { @@ -88,4 +91,11 @@ absl::string_view ScalabilityModeToString(ScalabilityMode scalability_mode) { RTC_CHECK_NOTREACHED(); } +absl::string_view ScalabilityModeToString( + std::optional scalability_mode) { + return scalability_mode.has_value() + ? ScalabilityModeToString(*scalability_mode) + : "nullopt"; +} + } // namespace webrtc diff --git a/api/video_codecs/scalability_mode.h b/api/video_codecs/scalability_mode.h index b26f32eb22..279253ba46 100644 --- a/api/video_codecs/scalability_mode.h +++ b/api/video_codecs/scalability_mode.h @@ -14,6 +14,8 @@ #include #include +#include + #include "absl/strings/string_view.h" #include "rtc_base/system/rtc_export.h" @@ -105,6 +107,9 @@ inline constexpr size_t kScalabilityModeCount = RTC_EXPORT absl::string_view ScalabilityModeToString(ScalabilityMode scalability_mode); +RTC_EXPORT +absl::string_view ScalabilityModeToString( + std::optional scalability_mode); } // namespace webrtc diff --git a/api/video_codecs/scalability_mode_helper.cc b/api/video_codecs/scalability_mode_helper.cc index b4571632d9..83b7bd4729 100644 --- a/api/video_codecs/scalability_mode_helper.cc +++ b/api/video_codecs/scalability_mode_helper.cc @@ -10,28 +10,37 @@ #include "api/video_codecs/scalability_mode_helper.h" +#include + +#include "absl/strings/string_view.h" +#include "api/video_codecs/scalability_mode.h" #include "modules/video_coding/svc/scalability_mode_util.h" namespace webrtc { -absl::optional ScalabilityModeStringToNumSpatialLayers( +std::optional ScalabilityModeStringToNumSpatialLayers( absl::string_view scalability_mode_string) { - absl::optional scalability_mode = + std::optional scalability_mode = ScalabilityModeFromString(scalability_mode_string); if (!scalability_mode.has_value()) { - return absl::nullopt; + return std::nullopt; } return ScalabilityModeToNumSpatialLayers(*scalability_mode); } -absl::optional ScalabilityModeStringToNumTemporalLayers( +std::optional ScalabilityModeStringToNumTemporalLayers( absl::string_view scalability_mode_string) { - absl::optional scalability_mode = + std::optional scalability_mode = ScalabilityModeFromString(scalability_mode_string); if (!scalability_mode.has_value()) { - return absl::nullopt; + return std::nullopt; } return ScalabilityModeToNumTemporalLayers(*scalability_mode); } +std::optional ScalabilityModeStringToEnum( + absl::string_view scalability_mode_string) { + return ScalabilityModeFromString(scalability_mode_string); +} + } // namespace webrtc diff --git a/api/video_codecs/scalability_mode_helper.h b/api/video_codecs/scalability_mode_helper.h index a8b060d079..bc10bc5611 100644 --- a/api/video_codecs/scalability_mode_helper.h +++ b/api/video_codecs/scalability_mode_helper.h @@ -11,19 +11,27 @@ #ifndef API_VIDEO_CODECS_SCALABILITY_MODE_HELPER_H_ #define API_VIDEO_CODECS_SCALABILITY_MODE_HELPER_H_ +#include + #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/video_codecs/scalability_mode.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { // Returns the number of spatial layers from the `scalability_mode_string` // or nullopt if the given mode is unknown. -absl::optional ScalabilityModeStringToNumSpatialLayers( +RTC_EXPORT std::optional ScalabilityModeStringToNumSpatialLayers( absl::string_view scalability_mode_string); // Returns the number of temporal layers from the `scalability_mode_string` // or nullopt if the given mode is unknown. -absl::optional ScalabilityModeStringToNumTemporalLayers( +RTC_EXPORT std::optional ScalabilityModeStringToNumTemporalLayers( + absl::string_view scalability_mode_string); + +// Convert the `scalability_mode_string` to the scalability mode enum value +// or nullopt if the given mode is unknown. +RTC_EXPORT std::optional ScalabilityModeStringToEnum( absl::string_view scalability_mode_string); } // namespace webrtc diff --git a/api/video_codecs/sdp_video_format.cc b/api/video_codecs/sdp_video_format.cc index cb7e98a682..5c2c5dac3c 100644 --- a/api/video_codecs/sdp_video_format.cc +++ b/api/video_codecs/sdp_video_format.cc @@ -10,13 +10,23 @@ #include "api/video_codecs/sdp_video_format.h" +#include +#include + +#include "absl/container/inlined_vector.h" #include "absl/strings/match.h" -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/rtp_parameters.h" +#include "api/video/video_codec_type.h" #include "api/video_codecs/av1_profile.h" #include "api/video_codecs/h264_profile_level_id.h" +#include "api/video_codecs/scalability_mode.h" +#ifdef RTC_ENABLE_H265 +#include "api/video_codecs/h265_profile_tier_level.h" +#endif #include "api/video_codecs/video_codec.h" #include "api/video_codecs/vp9_profile.h" +#include "media/base/media_constants.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" @@ -25,63 +35,122 @@ namespace webrtc { namespace { -std::string H264GetPacketizationModeOrDefault( - const SdpVideoFormat::Parameters& params) { - constexpr char kH264FmtpPacketizationMode[] = "packetization-mode"; - const auto it = params.find(kH264FmtpPacketizationMode); +// TODO(bugs.webrtc.org/15847): remove code duplication of IsSameCodecSpecific +// in media/base/codec.cc +std::string GetFmtpParameterOrDefault(const CodecParameterMap& params, + const std::string& name, + const std::string& default_value) { + const auto it = params.find(name); if (it != params.end()) { return it->second; } + return default_value; +} + +std::string H264GetPacketizationModeOrDefault(const CodecParameterMap& params) { // If packetization-mode is not present, default to "0". // https://tools.ietf.org/html/rfc6184#section-6.2 - return "0"; + return GetFmtpParameterOrDefault(params, kH264FmtpPacketizationMode, "0"); } -bool H264IsSamePacketizationMode(const SdpVideoFormat::Parameters& left, - const SdpVideoFormat::Parameters& right) { +bool H264IsSamePacketizationMode(const CodecParameterMap& left, + const CodecParameterMap& right) { return H264GetPacketizationModeOrDefault(left) == H264GetPacketizationModeOrDefault(right); } +std::string AV1GetTierOrDefault(const CodecParameterMap& params) { + // If the parameter is not present, the tier MUST be inferred to be 0. + // https://aomediacodec.github.io/av1-rtp-spec/#72-sdp-parameters + return GetFmtpParameterOrDefault(params, kAv1FmtpTier, "0"); +} + +bool AV1IsSameTier(const CodecParameterMap& left, + const CodecParameterMap& right) { + return AV1GetTierOrDefault(left) == AV1GetTierOrDefault(right); +} + +std::string AV1GetLevelIdxOrDefault(const CodecParameterMap& params) { + // If the parameter is not present, it MUST be inferred to be 5 (level 3.1). + // https://aomediacodec.github.io/av1-rtp-spec/#72-sdp-parameters + return GetFmtpParameterOrDefault(params, kAv1FmtpLevelIdx, "5"); +} + +bool AV1IsSameLevelIdx(const CodecParameterMap& left, + const CodecParameterMap& right) { + return AV1GetLevelIdxOrDefault(left) == AV1GetLevelIdxOrDefault(right); +} + +#ifdef RTC_ENABLE_H265 +std::string GetH265TxModeOrDefault(const CodecParameterMap& params) { + // If TxMode is not present, a value of "SRST" must be inferred. + // https://tools.ietf.org/html/rfc7798@section-7.1 + return GetFmtpParameterOrDefault(params, kH265FmtpTxMode, "SRST"); +} + +bool IsSameH265TxMode(const CodecParameterMap& left, + const CodecParameterMap& right) { + return absl::EqualsIgnoreCase(GetH265TxModeOrDefault(left), + GetH265TxModeOrDefault(right)); +} +#endif + // Some (video) codecs are actually families of codecs and rely on parameters // to distinguish different incompatible family members. -bool IsSameCodecSpecific(const SdpVideoFormat& format1, - const SdpVideoFormat& format2) { +bool IsSameCodecSpecific(const std::string& name1, + const CodecParameterMap& params1, + const std::string& name2, + const CodecParameterMap& params2) { // The assumption when calling this function is that the two formats have the // same name. - RTC_DCHECK(absl::EqualsIgnoreCase(format1.name, format2.name)); + RTC_DCHECK(absl::EqualsIgnoreCase(name1, name2)); - VideoCodecType codec_type = PayloadStringToCodecType(format1.name); + VideoCodecType codec_type = PayloadStringToCodecType(name1); switch (codec_type) { case kVideoCodecH264: - return H264IsSameProfile(format1.parameters, format2.parameters) && - H264IsSamePacketizationMode(format1.parameters, - format2.parameters); + return H264IsSameProfile(params1, params2) && + H264IsSamePacketizationMode(params1, params2); case kVideoCodecVP9: - return VP9IsSameProfile(format1.parameters, format2.parameters); + return VP9IsSameProfile(params1, params2); case kVideoCodecAV1: - return AV1IsSameProfile(format1.parameters, format2.parameters); + return AV1IsSameProfile(params1, params2) && + AV1IsSameTier(params1, params2) && + AV1IsSameLevelIdx(params1, params2); +#ifdef RTC_ENABLE_H265 + case kVideoCodecH265: + return H265IsSameProfile(params1, params2) && + H265IsSameTier(params1, params2) && + IsSameH265TxMode(params1, params2); +#endif default: return true; } } + } // namespace SdpVideoFormat::SdpVideoFormat(const std::string& name) : name(name) {} SdpVideoFormat::SdpVideoFormat(const std::string& name, - const Parameters& parameters) + const CodecParameterMap& parameters) : name(name), parameters(parameters) {} SdpVideoFormat::SdpVideoFormat( const std::string& name, - const Parameters& parameters, + const CodecParameterMap& parameters, const absl::InlinedVector& scalability_modes) : name(name), parameters(parameters), scalability_modes(scalability_modes) {} +SdpVideoFormat::SdpVideoFormat( + const SdpVideoFormat& format, + const absl::InlinedVector& modes) + : SdpVideoFormat(format) { + scalability_modes = modes; +} + SdpVideoFormat::SdpVideoFormat(const SdpVideoFormat&) = default; SdpVideoFormat::SdpVideoFormat(SdpVideoFormat&&) = default; SdpVideoFormat& SdpVideoFormat::operator=(const SdpVideoFormat&) = default; @@ -90,7 +159,7 @@ SdpVideoFormat& SdpVideoFormat::operator=(SdpVideoFormat&&) = default; SdpVideoFormat::~SdpVideoFormat() = default; std::string SdpVideoFormat::ToString() const { - rtc::StringBuilder builder; + StringBuilder builder; builder << "Codec name: " << name << ", parameters: {"; for (const auto& kv : parameters) { builder << " " << kv.first << "=" << kv.second; @@ -111,18 +180,18 @@ std::string SdpVideoFormat::ToString() const { builder << "]"; } - return builder.str(); + return builder.Release(); } bool SdpVideoFormat::IsSameCodec(const SdpVideoFormat& other) const { // Two codecs are considered the same if the name matches (case insensitive) // and certain codec-specific parameters match. return absl::EqualsIgnoreCase(name, other.name) && - IsSameCodecSpecific(*this, other); + IsSameCodecSpecific(name, parameters, other.name, other.parameters); } bool SdpVideoFormat::IsCodecInList( - rtc::ArrayView formats) const { + ArrayView formats) const { for (const auto& format : formats) { if (IsSameCodec(format)) { return true; @@ -136,10 +205,68 @@ bool operator==(const SdpVideoFormat& a, const SdpVideoFormat& b) { a.scalability_modes == b.scalability_modes; } -absl::optional FuzzyMatchSdpVideoFormat( - rtc::ArrayView supported_formats, +const SdpVideoFormat SdpVideoFormat::VP8() { + return SdpVideoFormat(kVp8CodecName, {}); +} + +const SdpVideoFormat SdpVideoFormat::H264() { + // H264 will typically require more tweaking like setting + // * packetization-mode (which defaults to 0 but 1 is more common) + // * level-asymmetry-allowed (which defaults to 0 but 1 is more common) + // * profile-level-id of which there are many. + return SdpVideoFormat(kH264CodecName, {}); +} + +const SdpVideoFormat SdpVideoFormat::H265() { + return SdpVideoFormat(kH265CodecName, {}); +} + +const SdpVideoFormat SdpVideoFormat::VP9Profile0() { + return SdpVideoFormat( + kVp9CodecName, + {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}}); +} + +const SdpVideoFormat SdpVideoFormat::VP9Profile1() { + return SdpVideoFormat( + kVp9CodecName, + {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile1)}}); +} + +const SdpVideoFormat SdpVideoFormat::VP9Profile2() { + return SdpVideoFormat( + kVp9CodecName, + {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}}); +} + +const SdpVideoFormat SdpVideoFormat::VP9Profile3() { + return SdpVideoFormat( + kVp9CodecName, + {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile3)}}); +} + +const SdpVideoFormat SdpVideoFormat::AV1Profile0() { + // https://aomediacodec.github.io/av1-rtp-spec/#72-sdp-parameters + return SdpVideoFormat( + kAv1CodecName, + {{kAv1FmtpProfile, AV1ProfileToString(AV1Profile::kProfile0).data()}, + {kAv1FmtpLevelIdx, "5"}, + {kAv1FmtpTier, "0"}}); +} + +const SdpVideoFormat SdpVideoFormat::AV1Profile1() { + // https://aomediacodec.github.io/av1-rtp-spec/#72-sdp-parameters + return SdpVideoFormat( + kAv1CodecName, + {{kAv1FmtpProfile, AV1ProfileToString(AV1Profile::kProfile1).data()}, + {kAv1FmtpLevelIdx, "5"}, + {kAv1FmtpTier, "0"}}); +} + +std::optional FuzzyMatchSdpVideoFormat( + ArrayView supported_formats, const SdpVideoFormat& format) { - absl::optional res; + std::optional res; int best_parameter_match = 0; for (const auto& supported_format : supported_formats) { if (absl::EqualsIgnoreCase(supported_format.name, format.name)) { diff --git a/api/video_codecs/sdp_video_format.h b/api/video_codecs/sdp_video_format.h index faaa66c241..80054cffee 100644 --- a/api/video_codecs/sdp_video_format.h +++ b/api/video_codecs/sdp_video_format.h @@ -12,11 +12,12 @@ #define API_VIDEO_CODECS_SDP_VIDEO_FORMAT_H_ #include +#include #include #include "absl/container/inlined_vector.h" -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/rtp_parameters.h" #include "api/video_codecs/scalability_mode.h" #include "rtc_base/system/rtc_export.h" @@ -25,15 +26,24 @@ namespace webrtc { // SDP specification for a single video codec. // NOTE: This class is still under development and may change without notice. struct RTC_EXPORT SdpVideoFormat { - using Parameters = std::map; + using Parameters [[deprecated("Use webrtc::CodecParameterMap")]] = + std::map; explicit SdpVideoFormat(const std::string& name); - SdpVideoFormat(const std::string& name, const Parameters& parameters); + SdpVideoFormat(const std::string& name, const CodecParameterMap& parameters); SdpVideoFormat( const std::string& name, - const Parameters& parameters, + const CodecParameterMap& parameters, const absl::InlinedVector& scalability_modes); + // Creates a new SdpVideoFormat object identical to the supplied + // SdpVideoFormat except the scalability_modes that are set to be the same as + // the supplied scalability modes. + SdpVideoFormat( + const SdpVideoFormat& format, + const absl::InlinedVector& + scalability_modes); + SdpVideoFormat(const SdpVideoFormat&); SdpVideoFormat(SdpVideoFormat&&); SdpVideoFormat& operator=(const SdpVideoFormat&); @@ -45,8 +55,7 @@ struct RTC_EXPORT SdpVideoFormat { // specific parameters. Please note that two SdpVideoFormats can represent the // same codec even though not all parameters are the same. bool IsSameCodec(const SdpVideoFormat& other) const; - bool IsCodecInList( - rtc::ArrayView formats) const; + bool IsCodecInList(ArrayView formats) const; std::string ToString() const; @@ -58,16 +67,32 @@ struct RTC_EXPORT SdpVideoFormat { } std::string name; - Parameters parameters; + CodecParameterMap parameters; absl::InlinedVector scalability_modes; + + // Well-known video codecs and their format parameters. + static const SdpVideoFormat VP8(); + static const SdpVideoFormat H264(); + static const SdpVideoFormat H265(); + static const SdpVideoFormat VP9Profile0(); + static const SdpVideoFormat VP9Profile1(); + static const SdpVideoFormat VP9Profile2(); + static const SdpVideoFormat VP9Profile3(); + static const SdpVideoFormat AV1Profile0(); + static const SdpVideoFormat AV1Profile1(); + + template + friend void AbslStringify(Sink& sink, const SdpVideoFormat& format) { + sink.Append(format.ToString()); + } }; // For not so good reasons sometimes additional parameters are added to an // SdpVideoFormat, which makes instances that should compare equal to not match // anymore. Until we stop misusing SdpVideoFormats provide this convenience // function to perform fuzzy matching. -absl::optional FuzzyMatchSdpVideoFormat( - rtc::ArrayView supported_formats, +std::optional FuzzyMatchSdpVideoFormat( + ArrayView supported_formats, const SdpVideoFormat& format); } // namespace webrtc diff --git a/api/video_codecs/simple_encoder_wrapper.cc b/api/video_codecs/simple_encoder_wrapper.cc new file mode 100644 index 0000000000..cf0fd80f8e --- /dev/null +++ b/api/video_codecs/simple_encoder_wrapper.cc @@ -0,0 +1,239 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/simple_encoder_wrapper.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/scoped_refptr.h" +#include "api/units/data_size.h" +#include "api/units/frequency.h" +#include "api/video/video_frame_buffer.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/scalability_mode_helper.h" +#include "api/video_codecs/video_encoder_factory_interface.h" +#include "api/video_codecs/video_encoder_interface.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "rtc_base/checks.h" +#include "rtc_base/numerics/rational.h" + +namespace webrtc { +using PredictionConstraints = + VideoEncoderFactoryInterface::Capabilities::PredictionConstraints; +using FrameEncodeSettings = VideoEncoderInterface::FrameEncodeSettings; + +namespace { +enum class Inter { kS, kL, kKey }; +enum class Scaling { k1_2, k2_3 }; +std::string SvcToString(int spatial_layers, + int temporal_layers, + Inter inter, + Scaling scaling) { + RTC_CHECK(spatial_layers > 1 || inter == Inter::kL); + std::string res; + res += inter == Inter::kS ? "S" : "L"; + res += std::to_string(spatial_layers); + res += "T"; + res += std::to_string(temporal_layers); + if (scaling == Scaling::k2_3) { + res += "h"; + } + if (inter == Inter::kKey) { + res += "_KEY"; + } + + return res; +} +} // namespace + +// static +std::vector SimpleEncoderWrapper::SupportedWebrtcSvcModes( + const PredictionConstraints& prediction_constraints) { + std::vector res; + + const int max_spatial_layers = + std::min(3, prediction_constraints.max_spatial_layers); + const int max_temporal_layers = + std::min(3, prediction_constraints.max_temporal_layers); + const bool scale_by_half = absl::c_linear_search( + prediction_constraints.scaling_factors, Rational{1, 2}); + const bool scale_by_two_thirds = absl::c_linear_search( + prediction_constraints.scaling_factors, Rational{2, 3}); + const bool inter_layer = + prediction_constraints.max_references > 1 && + prediction_constraints.buffer_space_type != + PredictionConstraints::BufferSpaceType::kMultiInstance; + + for (int s = 1; s <= max_spatial_layers; ++s) { + for (int t = 1; t <= max_temporal_layers; ++t) { + if (prediction_constraints.num_buffers > ((std::max(1, t - 1) * s) - 1)) { + if (s == 1 || inter_layer) { + res.push_back(SvcToString(s, t, Inter::kL, Scaling::k1_2)); + if (s == 1) { + continue; + } + } + if (scale_by_half) { + res.push_back(SvcToString(s, t, Inter::kS, Scaling::k1_2)); + if (inter_layer) { + res.push_back(SvcToString(s, t, Inter::kKey, Scaling::k1_2)); + } + } + if (scale_by_two_thirds) { + res.push_back(SvcToString(s, t, Inter::kS, Scaling::k2_3)); + if (inter_layer) { + res.push_back(SvcToString(s, t, Inter::kKey, Scaling::k2_3)); + res.push_back(SvcToString(s, t, Inter::kL, Scaling::k2_3)); + } + } + } + } + } + + return res; +} + +// static +std::unique_ptr SimpleEncoderWrapper::Create( + std::unique_ptr encoder, + absl::string_view scalability_mode) { + if (!encoder) { + return nullptr; + } + + std::optional sm = + ScalabilityModeStringToEnum(scalability_mode); + if (!sm) { + return nullptr; + } + + std::unique_ptr svc_controller = + CreateScalabilityStructure(*sm); + if (!svc_controller) { + return nullptr; + } + + return std::make_unique(std::move(encoder), + std::move(svc_controller)); +} + +SimpleEncoderWrapper::SimpleEncoderWrapper( + std::unique_ptr encoder, + std::unique_ptr svc_controller) + : encoder_(std::move(encoder)), + svc_controller_(std::move(svc_controller)), + layer_configs_(svc_controller_->StreamConfig()) {} + +void SimpleEncoderWrapper::SetEncodeQp(int qp) { + target_qp_ = qp; +} + +void SimpleEncoderWrapper::SetEncodeFps(int fps) { + fps_ = fps; +} + +void SimpleEncoderWrapper::Encode( + scoped_refptr frame_buffer, + bool force_keyframe, + EncodeResultCallback callback) { + std::vector configs = + svc_controller_->NextFrameConfig(force_keyframe); + std::vector encode_settings; + std::vector frame_infos; + + for (size_t s = 0; s < configs.size(); ++s) { + const ScalableVideoController::LayerFrameConfig& config = configs[s]; + frame_infos.push_back(svc_controller_->OnEncodeDone(config)); + FrameEncodeSettings& settings = encode_settings.emplace_back(); + settings.rate_options = VideoEncoderInterface::FrameEncodeSettings::Cqp{ + .target_qp = target_qp_}; + settings.spatial_id = config.SpatialId(); + settings.temporal_id = config.TemporalId(); + const int num = layer_configs_.scaling_factor_num[s]; + const int den = layer_configs_.scaling_factor_den[s]; + settings.resolution = {(frame_buffer->width() * num / den), + (frame_buffer->height() * num / den)}; + + bool buffer_updated = false; + for (const CodecBufferUsage& buffer : config.Buffers()) { + if (buffer.referenced) { + settings.reference_buffers.push_back(buffer.id); + } + if (buffer.updated) { + RTC_CHECK(!buffer_updated); + settings.update_buffer = buffer.id; + buffer_updated = true; + } + } + + if (settings.reference_buffers.empty()) { + settings.frame_type = FrameType::kKeyframe; + } + + struct FrameOut : public VideoEncoderInterface::FrameOutput { + ArrayView GetBitstreamOutputBuffer(DataSize size) override { + bitstream.resize(size.bytes()); + return bitstream; + } + + void EncodeComplete( + const VideoEncoderInterface::EncodeResult& result) override { + auto* data = std::get_if(&result); + + SimpleEncoderWrapper::EncodeResult res; + if (!data) { + res.oh_no = true; + callback(res); + return; + } + + res.frame_type = data->frame_type; + res.bitstream_data = std::move(bitstream); + res.generic_frame_info = frame_info; + if (res.frame_type == FrameType::kKeyframe) { + res.dependency_structure = svc_controller->DependencyStructure(); + } + callback(res); + } + std::vector bitstream; + EncodeResultCallback callback; + GenericFrameInfo frame_info; + ScalableVideoController* svc_controller; + }; + + auto out = std::make_unique(); + + out->callback = callback; + out->frame_info = std::move(frame_infos[settings.spatial_id]); + out->svc_controller = svc_controller_.get(); + + settings.frame_output = std::move(out); + } + + encoder_->Encode(std::move(frame_buffer), + {.presentation_timestamp = presentation_timestamp_}, + std::move(encode_settings)); + presentation_timestamp_ += 1 / Frequency::Hertz(fps_); +} + +} // namespace webrtc diff --git a/api/video_codecs/simple_encoder_wrapper.h b/api/video_codecs/simple_encoder_wrapper.h new file mode 100644 index 0000000000..466d45b656 --- /dev/null +++ b/api/video_codecs/simple_encoder_wrapper.h @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_SIMPLE_ENCODER_WRAPPER_H_ +#define API_VIDEO_CODECS_SIMPLE_ENCODER_WRAPPER_H_ + +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/scoped_refptr.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/timestamp.h" +#include "api/video/video_frame_buffer.h" +#include "api/video_codecs/video_encoder_factory_interface.h" +#include "api/video_codecs/video_encoder_interface.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" + +namespace webrtc { +class SimpleEncoderWrapper { + public: + struct EncodeResult { + bool oh_no = false; + std::vector bitstream_data; + FrameType frame_type; + GenericFrameInfo generic_frame_info; + std::optional dependency_structure; + }; + + using EncodeResultCallback = std::function; + + static std::vector SupportedWebrtcSvcModes( + const VideoEncoderFactoryInterface::Capabilities::PredictionConstraints& + prediction_constraints); + + static std::unique_ptr Create( + std::unique_ptr encoder, + absl::string_view scalability_mode); + + // Should be private, use the Create function instead. + SimpleEncoderWrapper(std::unique_ptr encoder, + std::unique_ptr svc_controller); + + // We should really only support CBR, but then we have to think about layer + // allocations... eh... For this PoC just use CQP. + void SetEncodeQp(int qp); + + void SetEncodeFps(int fps); + + void Encode(scoped_refptr frame_buffer, + bool force_keyframe, + EncodeResultCallback callback); + + private: + std::unique_ptr encoder_; + std::unique_ptr svc_controller_; + ScalableVideoController::StreamLayersConfig layer_configs_; + int target_qp_ = 0; + int fps_ = 0; + Timestamp presentation_timestamp_ = Timestamp::Zero(); +}; + +} // namespace webrtc +#endif // API_VIDEO_CODECS_SIMPLE_ENCODER_WRAPPER_H_ diff --git a/api/video_codecs/simple_encoder_wrapper_unittests.cc b/api/video_codecs/simple_encoder_wrapper_unittests.cc new file mode 100644 index 0000000000..c5d9bae967 --- /dev/null +++ b/api/video_codecs/simple_encoder_wrapper_unittests.cc @@ -0,0 +1,306 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include + +#include "api/video/i420_buffer.h" // IWYU pragma: keep +#include "api/video_codecs/libaom_av1_encoder_factory.h" +#include "api/video_codecs/simple_encoder_wrapper.h" +#include "api/video_codecs/video_encoder_factory_interface.h" +#include "api/video_codecs/video_encoding_general.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" +#include "test/testsupport/frame_reader.h" + +namespace webrtc { + +using ::testing::Eq; +using ::testing::Gt; +using ::testing::IsEmpty; +using ::testing::Ne; +using ::testing::Not; +using ::testing::NotNull; +using ::testing::UnorderedElementsAre; +using PredictionConstraints = + VideoEncoderFactoryInterface::Capabilities::PredictionConstraints; + +namespace { + +std::unique_ptr CreateFrameReader() { + return CreateY4mFrameReader( + test::ResourcePath("reference_video_640x360_30fps", "y4m"), + test::YuvFrameReaderImpl::RepeatMode::kPingPong); +} + +TEST(SimpleEncoderWrapper, SupportedSvcModesOnlyL1T1) { + PredictionConstraints constraints = { + .num_buffers = 2, + .max_references = 2, + .max_temporal_layers = 1, + .buffer_space_type = + PredictionConstraints::BufferSpaceType::kSingleKeyframe, + .max_spatial_layers = 1, + .scaling_factors = {{1, 1}}, + }; + + EXPECT_THAT(SimpleEncoderWrapper::SupportedWebrtcSvcModes(constraints), + UnorderedElementsAre("L1T1")); +} + +TEST(SimpleEncoderWrapper, SupportedSvcModesUpToL1T3) { + PredictionConstraints constraints = { + .num_buffers = 8, + .max_references = 1, + .max_temporal_layers = 3, + .buffer_space_type = + PredictionConstraints::BufferSpaceType::kSingleKeyframe, + .max_spatial_layers = 1, + .scaling_factors = {{1, 1}, {1, 2}}, + }; + + EXPECT_THAT(SimpleEncoderWrapper::SupportedWebrtcSvcModes(constraints), + UnorderedElementsAre("L1T1", "L1T2", "L1T3")); +} + +TEST(SimpleEncoderWrapper, SupportedSvcModesUpToL3T3Key) { + PredictionConstraints constraints = { + .num_buffers = 8, + .max_references = 2, + .max_temporal_layers = 3, + .buffer_space_type = + PredictionConstraints::BufferSpaceType::kSingleKeyframe, + .max_spatial_layers = 3, + .scaling_factors = {{1, 1}, {1, 2}}, + }; + + EXPECT_THAT( + SimpleEncoderWrapper::SupportedWebrtcSvcModes(constraints), + UnorderedElementsAre("L1T1", "L1T2", "L1T3", "L2T1", "L2T1_KEY", "L2T2", + "L2T2_KEY", "L2T3", "L2T3_KEY", "L3T1", "L3T1_KEY", + "L3T2", "L3T2_KEY", "L3T3", "L3T3_KEY", "S2T1", + "S2T2", "S2T3", "S3T1", "S3T2", "S3T3")); +} + +TEST(SimpleEncoderWrapper, SupportedSvcModesUpToS3T3) { + PredictionConstraints constraints = { + .num_buffers = 8, + .max_references = 2, + .max_temporal_layers = 3, + .buffer_space_type = + PredictionConstraints::BufferSpaceType::kMultiInstance, + .max_spatial_layers = 3, + .scaling_factors = {{1, 1}, {1, 2}}, + }; + + EXPECT_THAT(SimpleEncoderWrapper::SupportedWebrtcSvcModes(constraints), + UnorderedElementsAre("L1T1", "L1T2", "L1T3", "S2T1", "S2T2", + "S2T3", "S3T1", "S3T2", "S3T3")); +} + +TEST(SimpleEncoderWrapper, SupportedSvcModesUpToL3T3KeyWithHScaling) { + PredictionConstraints constraints = { + .num_buffers = 8, + .max_references = 2, + .max_temporal_layers = 3, + .buffer_space_type = + PredictionConstraints::BufferSpaceType::kSingleKeyframe, + .max_spatial_layers = 3, + .scaling_factors = {{1, 1}, {1, 2}, {2, 3}}, + }; + + EXPECT_THAT( + SimpleEncoderWrapper::SupportedWebrtcSvcModes(constraints), + UnorderedElementsAre( + "L1T1", "L1T2", "L1T3", "L2T1", "L2T1h", "L2T1_KEY", "L2T1h_KEY", + "L2T2", "L2T2h", "L2T2_KEY", "L2T2h_KEY", "L2T3", "L2T3h", "L2T3_KEY", + "L2T3h_KEY", "L3T1", "L3T1h", "L3T1_KEY", "L3T1h_KEY", "L3T2", + "L3T2h", "L3T2_KEY", "L3T2h_KEY", "L3T3", "L3T3h", "L3T3_KEY", + "L3T3h_KEY", "S2T1", "S2T1h", "S2T2", "S2T2h", "S2T3", "S2T3h", + "S3T1", "S3T1h", "S3T2", "S3T2h", "S3T3", "S3T3h")); +} + +// TD: The encoder wrapper shouldn't really use an actual encoder +// implementation for testing, but hey, this is just a PoC. +TEST(SimpleEncoderWrapper, EncodeL1T1) { + auto encoder = LibaomAv1EncoderFactory().CreateEncoder( + {.max_encode_dimensions = {1080, 720}, + .encoding_format = {.sub_sampling = EncodingFormat::k420, + .bit_depth = 8}, + .rc_mode = VideoEncoderFactoryInterface::StaticEncoderSettings::Cqp(), + .max_number_of_threads = 1}, + {}); + + std::unique_ptr simple_encoder = + SimpleEncoderWrapper::Create(std::move(encoder), "L1T1"); + + ASSERT_THAT(simple_encoder, NotNull()); + + simple_encoder->SetEncodeQp(30); + simple_encoder->SetEncodeFps(15); + auto frame_reader = CreateFrameReader(); + + int num_callbacks = 0; + simple_encoder->Encode( + frame_reader->PullFrame(), /*force_keyframe=*/true, + [&](const SimpleEncoderWrapper::EncodeResult& result) { + ++num_callbacks; + ASSERT_THAT(result.oh_no, Eq(false)); + EXPECT_THAT(result.dependency_structure, Ne(std::nullopt)); + EXPECT_THAT(result.bitstream_data, Not(IsEmpty())); + EXPECT_THAT(result.frame_type, Eq(FrameType::kKeyframe)); + EXPECT_THAT(result.generic_frame_info.spatial_id, Eq(0)); + EXPECT_THAT(result.generic_frame_info.temporal_id, Eq(0)); + }); + + simple_encoder->Encode( + frame_reader->PullFrame(), /*force_keyframe=*/false, + [&](const SimpleEncoderWrapper::EncodeResult& result) { + ++num_callbacks; + ASSERT_THAT(result.oh_no, Eq(false)); + EXPECT_THAT(result.dependency_structure, Eq(std::nullopt)); + EXPECT_THAT(result.bitstream_data, Not(IsEmpty())); + EXPECT_THAT(result.frame_type, Eq(FrameType::kDeltaFrame)); + EXPECT_THAT(result.generic_frame_info.spatial_id, Eq(0)); + EXPECT_THAT(result.generic_frame_info.temporal_id, Eq(0)); + }); +} + +TEST(SimpleEncoderWrapper, EncodeL2T2_KEY) { + auto encoder = LibaomAv1EncoderFactory().CreateEncoder( + {.max_encode_dimensions = {1080, 720}, + .encoding_format = {.sub_sampling = EncodingFormat::k420, + .bit_depth = 8}, + .rc_mode = VideoEncoderFactoryInterface::StaticEncoderSettings::Cqp(), + .max_number_of_threads = 1}, + {}); + + std::unique_ptr simple_encoder = + SimpleEncoderWrapper::Create(std::move(encoder), "L2T2_KEY"); + + ASSERT_THAT(simple_encoder, NotNull()); + + simple_encoder->SetEncodeQp(30); + simple_encoder->SetEncodeFps(15); + auto frame_reader = CreateFrameReader(); + + int num_callbacks = 0; + simple_encoder->Encode( + frame_reader->PullFrame(), /*force_keyframe=*/true, + [&](const SimpleEncoderWrapper::EncodeResult& result) { + ASSERT_THAT(result.oh_no, Eq(false)); + if (result.generic_frame_info.spatial_id == 0) { + ++num_callbacks; + EXPECT_THAT(result.dependency_structure, Ne(std::nullopt)); + EXPECT_THAT(result.bitstream_data, Not(IsEmpty())); + EXPECT_THAT(result.frame_type, Eq(FrameType::kKeyframe)); + EXPECT_THAT(result.generic_frame_info.temporal_id, Eq(0)); + } else if (result.generic_frame_info.spatial_id == 1) { + ++num_callbacks; + EXPECT_THAT(result.dependency_structure, Eq(std::nullopt)); + EXPECT_THAT(result.bitstream_data, Not(IsEmpty())); + EXPECT_THAT(result.frame_type, Eq(FrameType::kDeltaFrame)); + EXPECT_THAT(result.generic_frame_info.temporal_id, Eq(0)); + } + }); + + simple_encoder->Encode( + frame_reader->PullFrame(), /*force_keyframe=*/false, + [&](const SimpleEncoderWrapper::EncodeResult& result) { + ASSERT_THAT(result.oh_no, Eq(false)); + if (result.generic_frame_info.spatial_id == 0) { + ++num_callbacks; + EXPECT_THAT(result.dependency_structure, Eq(std::nullopt)); + EXPECT_THAT(result.bitstream_data, Not(IsEmpty())); + EXPECT_THAT(result.frame_type, Eq(FrameType::kDeltaFrame)); + EXPECT_THAT(result.generic_frame_info.temporal_id, Eq(1)); + } else if (result.generic_frame_info.spatial_id == 1) { + ++num_callbacks; + EXPECT_THAT(result.dependency_structure, Eq(std::nullopt)); + EXPECT_THAT(result.bitstream_data, Not(IsEmpty())); + EXPECT_THAT(result.frame_type, Eq(FrameType::kDeltaFrame)); + EXPECT_THAT(result.generic_frame_info.temporal_id, Eq(1)); + } + }); + + EXPECT_THAT(num_callbacks, Eq(4)); +} + +TEST(SimpleEncoderWrapper, EncodeL1T3ForceKeyframe) { + auto encoder = LibaomAv1EncoderFactory().CreateEncoder( + {.max_encode_dimensions = {1080, 720}, + .encoding_format = {.sub_sampling = EncodingFormat::k420, + .bit_depth = 8}, + .rc_mode = VideoEncoderFactoryInterface::StaticEncoderSettings::Cqp(), + .max_number_of_threads = 1}, + {}); + + std::unique_ptr simple_encoder = + SimpleEncoderWrapper::Create(std::move(encoder), "L1T3"); + + ASSERT_THAT(simple_encoder, NotNull()); + + simple_encoder->SetEncodeQp(30); + simple_encoder->SetEncodeFps(15); + auto frame_reader = CreateFrameReader(); + + int num_callbacks = 0; + simple_encoder->Encode( + frame_reader->PullFrame(), /*force_keyframe=*/true, + [&](const SimpleEncoderWrapper::EncodeResult& result) { + ++num_callbacks; + ASSERT_THAT(result.oh_no, Eq(false)); + EXPECT_THAT(result.frame_type, Eq(FrameType::kKeyframe)); + EXPECT_THAT(result.generic_frame_info.temporal_id, Eq(0)); + }); + + simple_encoder->Encode( + frame_reader->PullFrame(), /*force_keyframe=*/false, + [&](const SimpleEncoderWrapper::EncodeResult& result) { + ++num_callbacks; + ASSERT_THAT(result.oh_no, Eq(false)); + EXPECT_THAT(result.frame_type, Eq(FrameType::kDeltaFrame)); + EXPECT_THAT(result.generic_frame_info.temporal_id, Eq(2)); + }); + + simple_encoder->Encode( + frame_reader->PullFrame(), /*force_keyframe=*/false, + [&](const SimpleEncoderWrapper::EncodeResult& result) { + ++num_callbacks; + ASSERT_THAT(result.oh_no, Eq(false)); + EXPECT_THAT(result.frame_type, Eq(FrameType::kDeltaFrame)); + EXPECT_THAT(result.generic_frame_info.temporal_id, Eq(1)); + }); + + simple_encoder->Encode( + frame_reader->PullFrame(), /*force_keyframe=*/true, + [&](const SimpleEncoderWrapper::EncodeResult& result) { + ++num_callbacks; + ASSERT_THAT(result.oh_no, Eq(false)); + EXPECT_THAT(result.frame_type, Eq(FrameType::kKeyframe)); + EXPECT_THAT(result.generic_frame_info.temporal_id, Eq(0)); + }); + + simple_encoder->Encode( + frame_reader->PullFrame(), /*force_keyframe=*/false, + [&](const SimpleEncoderWrapper::EncodeResult& result) { + ++num_callbacks; + ASSERT_THAT(result.oh_no, Eq(false)); + EXPECT_THAT(result.frame_type, Eq(FrameType::kDeltaFrame)); + EXPECT_THAT(result.generic_frame_info.temporal_id, Eq(2)); + }); + + EXPECT_THAT(num_callbacks, Eq(5)); +} + +} // namespace +} // namespace webrtc diff --git a/api/video_codecs/simulcast_stream.cc b/api/video_codecs/simulcast_stream.cc index 312429ef9f..cda58269ac 100644 --- a/api/video_codecs/simulcast_stream.cc +++ b/api/video_codecs/simulcast_stream.cc @@ -10,6 +10,9 @@ #include "api/video_codecs/simulcast_stream.h" +#include + +#include "api/video_codecs/scalability_mode.h" #include "rtc_base/checks.h" namespace webrtc { @@ -23,15 +26,26 @@ void SimulcastStream::SetNumberOfTemporalLayers(unsigned char n) { numberOfTemporalLayers = n; } -ScalabilityMode SimulcastStream::GetScalabilityMode() const { - RTC_CHECK_GE(numberOfTemporalLayers, 1); - RTC_CHECK_LE(numberOfTemporalLayers, 3); +std::optional SimulcastStream::GetScalabilityMode() const { static const ScalabilityMode scalability_modes[3] = { ScalabilityMode::kL1T1, ScalabilityMode::kL1T2, ScalabilityMode::kL1T3, }; + if (numberOfTemporalLayers < 1 || numberOfTemporalLayers > 3) { + return std::nullopt; + } return scalability_modes[numberOfTemporalLayers - 1]; } +bool SimulcastStream::operator==(const SimulcastStream& other) const { + return (width == other.width && height == other.height && + maxFramerate == other.maxFramerate && + numberOfTemporalLayers == other.numberOfTemporalLayers && + maxBitrate == other.maxBitrate && + targetBitrate == other.targetBitrate && + minBitrate == other.minBitrate && qpMax == other.qpMax && + active == other.active); +} + } // namespace webrtc diff --git a/api/video_codecs/simulcast_stream.h b/api/video_codecs/simulcast_stream.h index 7c0dd5d786..4dbee5bd4b 100644 --- a/api/video_codecs/simulcast_stream.h +++ b/api/video_codecs/simulcast_stream.h @@ -11,19 +11,27 @@ #ifndef API_VIDEO_CODECS_SIMULCAST_STREAM_H_ #define API_VIDEO_CODECS_SIMULCAST_STREAM_H_ +#include + #include "api/video_codecs/scalability_mode.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { // TODO(bugs.webrtc.org/6883): Unify with struct VideoStream, part of // VideoEncoderConfig. -struct SimulcastStream { +struct RTC_EXPORT SimulcastStream { // Temporary utility methods for transition from numberOfTemporalLayers // setting to ScalabilityMode. unsigned char GetNumberOfTemporalLayers() const; - ScalabilityMode GetScalabilityMode() const; + std::optional GetScalabilityMode() const; void SetNumberOfTemporalLayers(unsigned char n); + bool operator==(const SimulcastStream& other) const; + bool operator!=(const SimulcastStream& other) const { + return !(*this == other); + } + int width = 0; int height = 0; float maxFramerate = 0; // fps. diff --git a/api/video_codecs/spatial_layer.cc b/api/video_codecs/spatial_layer.cc deleted file mode 100644 index 25ccdfeb48..0000000000 --- a/api/video_codecs/spatial_layer.cc +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/video_codecs/spatial_layer.h" - -namespace webrtc { - -bool SpatialLayer::operator==(const SpatialLayer& other) const { - return (width == other.width && height == other.height && - maxFramerate == other.maxFramerate && - numberOfTemporalLayers == other.numberOfTemporalLayers && - maxBitrate == other.maxBitrate && - targetBitrate == other.targetBitrate && - minBitrate == other.minBitrate && qpMax == other.qpMax && - active == other.active); -} - -} // namespace webrtc diff --git a/api/video_codecs/spatial_layer.h b/api/video_codecs/spatial_layer.h index 5a1b425427..4003f0b1b7 100644 --- a/api/video_codecs/spatial_layer.h +++ b/api/video_codecs/spatial_layer.h @@ -11,22 +11,11 @@ #ifndef API_VIDEO_CODECS_SPATIAL_LAYER_H_ #define API_VIDEO_CODECS_SPATIAL_LAYER_H_ -namespace webrtc { +#include "api/video_codecs/simulcast_stream.h" -struct SpatialLayer { - bool operator==(const SpatialLayer& other) const; - bool operator!=(const SpatialLayer& other) const { return !(*this == other); } +namespace webrtc { - unsigned short width; // NOLINT(runtime/int) - unsigned short height; // NOLINT(runtime/int) - float maxFramerate; // fps. - unsigned char numberOfTemporalLayers; - unsigned int maxBitrate; // kilobits/sec. - unsigned int targetBitrate; // kilobits/sec. - unsigned int minBitrate; // kilobits/sec. - unsigned int qpMax; // minimum quality - bool active; // encoded and sent. -}; +typedef SimulcastStream SpatialLayer; } // namespace webrtc #endif // API_VIDEO_CODECS_SPATIAL_LAYER_H_ diff --git a/api/video_codecs/test/BUILD.gn b/api/video_codecs/test/BUILD.gn index 7bfe86e9f4..6b8fe75a7a 100644 --- a/api/video_codecs/test/BUILD.gn +++ b/api/video_codecs/test/BUILD.gn @@ -31,25 +31,31 @@ if (rtc_include_tests) { "..:video_codecs_api", "../..:fec_controller_api", "../..:mock_video_encoder", + "../..:rtp_parameters", "../../../api:scoped_refptr", "../../../media:media_constants", - "../../../media:rtc_media_base", "../../../modules/video_coding:video_codec_interface", "../../../modules/video_coding:video_coding_utility", "../../../modules/video_coding:webrtc_vp8", "../../../rtc_base:checks", "../../../rtc_base:rtc_base_tests_utils", + "../../../test:explicit_key_value_config", "../../../test:fake_video_codecs", "../../../test:field_trial", "../../../test:test_support", "../../../test:video_test_common", + "../../environment", + "../../environment:environment_factory", + "../../units:timestamp", "../../video:encoded_image", + "../../video:resolution", "../../video:video_bitrate_allocation", + "../../video:video_bitrate_allocator", "../../video:video_frame", + "../../video:video_frame_type", "../../video:video_rtp_headers", "//testing/gtest", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("video_encoder_factory_template_tests") { @@ -57,6 +63,8 @@ if (rtc_include_tests) { sources = [ "video_encoder_factory_template_tests.cc" ] deps = [ + "..:scalability_mode", + "..:video_codecs_api", "..:video_encoder_factory_template", "..:video_encoder_factory_template_libaom_av1_adapter", "..:video_encoder_factory_template_libvpx_vp8_adapter", @@ -64,6 +72,8 @@ if (rtc_include_tests) { "..:video_encoder_factory_template_open_h264_adapter", "../../:mock_video_encoder", "../../../test:test_support", + "../../environment", + "../../environment:environment_factory", "//testing/gtest", ] } @@ -73,6 +83,7 @@ if (rtc_include_tests) { sources = [ "video_decoder_factory_template_tests.cc" ] deps = [ + "..:video_codecs_api", "..:video_decoder_factory_template", "..:video_decoder_factory_template_dav1d_adapter", "..:video_decoder_factory_template_libvpx_vp8_adapter", @@ -80,6 +91,8 @@ if (rtc_include_tests) { "..:video_decoder_factory_template_open_h264_adapter", "../../:mock_video_decoder", "../../../test:test_support", + "../../environment", + "../../environment:environment_factory", "//testing/gtest", ] } diff --git a/api/video_codecs/test/builtin_video_encoder_factory_unittest.cc b/api/video_codecs/test/builtin_video_encoder_factory_unittest.cc index 84fd594b4c..d2ccccfefa 100644 --- a/api/video_codecs/test/builtin_video_encoder_factory_unittest.cc +++ b/api/video_codecs/test/builtin_video_encoder_factory_unittest.cc @@ -12,9 +12,9 @@ #include #include -#include #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder_factory.h" #include "test/gtest.h" namespace webrtc { diff --git a/api/video_codecs/test/h264_profile_level_id_unittest.cc b/api/video_codecs/test/h264_profile_level_id_unittest.cc index 47098d2682..044765b44e 100644 --- a/api/video_codecs/test/h264_profile_level_id_unittest.cc +++ b/api/video_codecs/test/h264_profile_level_id_unittest.cc @@ -11,9 +11,9 @@ #include "api/video_codecs/h264_profile_level_id.h" #include -#include +#include -#include "absl/types/optional.h" +#include "api/rtp_parameters.h" #include "test/gtest.h" namespace webrtc { @@ -144,8 +144,8 @@ TEST(H264ProfileLevelId, TestToStringInvalid) { } TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdEmpty) { - const absl::optional profile_level_id = - ParseSdpForH264ProfileLevelId(SdpVideoFormat::Parameters()); + const std::optional profile_level_id = + ParseSdpForH264ProfileLevelId(CodecParameterMap()); EXPECT_TRUE(profile_level_id); EXPECT_EQ(H264Profile::kProfileConstrainedBaseline, profile_level_id->profile); @@ -153,9 +153,9 @@ TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdEmpty) { } TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdConstrainedHigh) { - SdpVideoFormat::Parameters params; + CodecParameterMap params; params["profile-level-id"] = "640c2a"; - const absl::optional profile_level_id = + const std::optional profile_level_id = ParseSdpForH264ProfileLevelId(params); EXPECT_TRUE(profile_level_id); EXPECT_EQ(H264Profile::kProfileConstrainedHigh, profile_level_id->profile); @@ -163,7 +163,7 @@ TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdConstrainedHigh) { } TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdInvalid) { - SdpVideoFormat::Parameters params; + CodecParameterMap params; params["profile-level-id"] = "foobar"; EXPECT_FALSE(ParseSdpForH264ProfileLevelId(params)); } diff --git a/api/video_codecs/test/h265_profile_tier_level_unittest.cc b/api/video_codecs/test/h265_profile_tier_level_unittest.cc index a9fdf966a5..59c75f2ff3 100644 --- a/api/video_codecs/test/h265_profile_tier_level_unittest.cc +++ b/api/video_codecs/test/h265_profile_tier_level_unittest.cc @@ -1,248 +1,408 @@ -/* - * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/video_codecs/h265_profile_tier_level.h" - -#include - -#include "absl/types/optional.h" -#include "test/gtest.h" - -namespace webrtc { - -TEST(H265ProfileTierLevel, TestLevelToString) { - EXPECT_EQ(H265LevelToString(H265Level::kLevel1), "30"); - EXPECT_EQ(H265LevelToString(H265Level::kLevel2), "60"); - EXPECT_EQ(H265LevelToString(H265Level::kLevel2_1), "63"); - EXPECT_EQ(H265LevelToString(H265Level::kLevel3), "90"); - EXPECT_EQ(H265LevelToString(H265Level::kLevel3_1), "93"); - EXPECT_EQ(H265LevelToString(H265Level::kLevel4), "120"); - EXPECT_EQ(H265LevelToString(H265Level::kLevel4_1), "123"); - EXPECT_EQ(H265LevelToString(H265Level::kLevel5), "150"); - EXPECT_EQ(H265LevelToString(H265Level::kLevel5_1), "153"); - EXPECT_EQ(H265LevelToString(H265Level::kLevel5_2), "156"); - EXPECT_EQ(H265LevelToString(H265Level::kLevel6), "180"); - EXPECT_EQ(H265LevelToString(H265Level::kLevel6_1), "183"); - EXPECT_EQ(H265LevelToString(H265Level::kLevel6_2), "186"); -} - -TEST(H265ProfileTierLevel, TestProfileToString) { - EXPECT_EQ(H265ProfileToString(H265Profile::kProfileMain), "1"); - EXPECT_EQ(H265ProfileToString(H265Profile::kProfileMain10), "2"); - EXPECT_EQ(H265ProfileToString(H265Profile::kProfileMainStill), "3"); - EXPECT_EQ(H265ProfileToString(H265Profile::kProfileRangeExtensions), "4"); - EXPECT_EQ(H265ProfileToString(H265Profile::kProfileHighThroughput), "5"); - EXPECT_EQ(H265ProfileToString(H265Profile::kProfileMultiviewMain), "6"); - EXPECT_EQ(H265ProfileToString(H265Profile::kProfileScalableMain), "7"); - EXPECT_EQ(H265ProfileToString(H265Profile::kProfile3dMain), "8"); - EXPECT_EQ(H265ProfileToString(H265Profile::kProfileScreenContentCoding), "9"); - EXPECT_EQ(H265ProfileToString(H265Profile::kProfileScalableRangeExtensions), - "10"); - EXPECT_EQ(H265ProfileToString( - H265Profile::kProfileHighThroughputScreenContentCoding), - "11"); -} - -TEST(H265ProfileTierLevel, TestTierToString) { - EXPECT_EQ(H265TierToString(H265Tier::kTier0), "0"); - EXPECT_EQ(H265TierToString(H265Tier::kTier1), "1"); -} - -TEST(H265ProfileTierLevel, TestStringToProfile) { - // Invalid profiles. - EXPECT_FALSE(StringToH265Profile("0")); - EXPECT_FALSE(StringToH265Profile("12")); - - // Malformed profiles - EXPECT_FALSE(StringToH265Profile("")); - EXPECT_FALSE(StringToH265Profile(" 1")); - EXPECT_FALSE(StringToH265Profile("12x")); - EXPECT_FALSE(StringToH265Profile("x12")); - EXPECT_FALSE(StringToH265Profile("gggg")); - - // Valid profiles. - EXPECT_EQ(StringToH265Profile("1"), H265Profile::kProfileMain); - EXPECT_EQ(StringToH265Profile("2"), H265Profile::kProfileMain10); - EXPECT_EQ(StringToH265Profile("4"), H265Profile::kProfileRangeExtensions); -} - -TEST(H265ProfileTierLevel, TestStringToLevel) { - // Invalid levels. - EXPECT_FALSE(StringToH265Level("0")); - EXPECT_FALSE(StringToH265Level("200")); - - // Malformed levels. - EXPECT_FALSE(StringToH265Level("")); - EXPECT_FALSE(StringToH265Level(" 30")); - EXPECT_FALSE(StringToH265Level("30x")); - EXPECT_FALSE(StringToH265Level("x30")); - EXPECT_FALSE(StringToH265Level("ggggg")); - - // Valid levels. - EXPECT_EQ(StringToH265Level("30"), H265Level::kLevel1); - EXPECT_EQ(StringToH265Level("93"), H265Level::kLevel3_1); - EXPECT_EQ(StringToH265Level("183"), H265Level::kLevel6_1); -} - -TEST(H265ProfileTierLevel, TestStringToTier) { - // Invalid tiers. - EXPECT_FALSE(StringToH265Tier("4")); - EXPECT_FALSE(StringToH265Tier("-1")); - - // Malformed tiers. - EXPECT_FALSE(StringToH265Tier("")); - EXPECT_FALSE(StringToH265Tier(" 1")); - EXPECT_FALSE(StringToH265Tier("t1")); - - // Valid tiers. - EXPECT_EQ(StringToH265Tier("0"), H265Tier::kTier0); - EXPECT_EQ(StringToH265Tier("1"), H265Tier::kTier1); -} - -TEST(H265ProfileTierLevel, TestParseSdpProfileTierLevelAllEmpty) { - const absl::optional profile_tier_level = - ParseSdpForH265ProfileTierLevel(SdpVideoFormat::Parameters()); - EXPECT_TRUE(profile_tier_level); - EXPECT_EQ(H265Profile::kProfileMain, profile_tier_level->profile); - EXPECT_EQ(H265Level::kLevel3_1, profile_tier_level->level); - EXPECT_EQ(H265Tier::kTier0, profile_tier_level->tier); -} - -TEST(H265ProfileTierLevel, TestParseSdpProfileTierLevelPartialEmpty) { - SdpVideoFormat::Parameters params; - params["profile-id"] = "1"; - params["tier-flag"] = "0"; - absl::optional profile_tier_level = - ParseSdpForH265ProfileTierLevel(params); - EXPECT_TRUE(profile_tier_level); - EXPECT_EQ(H265Profile::kProfileMain, profile_tier_level->profile); - EXPECT_EQ(H265Level::kLevel3_1, profile_tier_level->level); - EXPECT_EQ(H265Tier::kTier0, profile_tier_level->tier); - - params.clear(); - params["profile-id"] = "2"; - profile_tier_level = ParseSdpForH265ProfileTierLevel(params); - EXPECT_TRUE(profile_tier_level); - EXPECT_EQ(H265Profile::kProfileMain10, profile_tier_level->profile); - EXPECT_EQ(H265Level::kLevel3_1, profile_tier_level->level); - EXPECT_EQ(H265Tier::kTier0, profile_tier_level->tier); - - params.clear(); - params["level-id"] = "180"; - profile_tier_level = ParseSdpForH265ProfileTierLevel(params); - EXPECT_TRUE(profile_tier_level); - EXPECT_EQ(H265Profile::kProfileMain, profile_tier_level->profile); - EXPECT_EQ(H265Level::kLevel6, profile_tier_level->level); - EXPECT_EQ(H265Tier::kTier0, profile_tier_level->tier); -} - -TEST(H265ProfileTierLevel, TestParseSdpProfileTierLevelInvalid) { - SdpVideoFormat::Parameters params; - - // Invalid profile-tier-level combination. - params["profile-id"] = "1"; - params["tier-flag"] = "1"; - params["level-id"] = "93"; - absl::optional profile_tier_level = - ParseSdpForH265ProfileTierLevel(params); - EXPECT_FALSE(profile_tier_level); - params.clear(); - params["profile-id"] = "1"; - params["tier-flag"] = "4"; - params["level-id"] = "180"; - profile_tier_level = ParseSdpForH265ProfileTierLevel(params); - EXPECT_FALSE(profile_tier_level); - - // Valid profile-tier-level combination. - params.clear(); - params["profile-id"] = "1"; - params["tier-flag"] = "0"; - params["level-id"] = "153"; - profile_tier_level = ParseSdpForH265ProfileTierLevel(params); - EXPECT_TRUE(profile_tier_level); -} - -TEST(H265ProfileTierLevel, TestToStringRoundTrip) { - SdpVideoFormat::Parameters params; - params["profile-id"] = "1"; - params["tier-flag"] = "0"; - params["level-id"] = "93"; - absl::optional profile_tier_level = - ParseSdpForH265ProfileTierLevel(params); - EXPECT_TRUE(profile_tier_level); - EXPECT_EQ("1", H265ProfileToString(profile_tier_level->profile)); - EXPECT_EQ("0", H265TierToString(profile_tier_level->tier)); - EXPECT_EQ("93", H265LevelToString(profile_tier_level->level)); - - params.clear(); - params["profile-id"] = "2"; - params["tier-flag"] = "1"; - params["level-id"] = "180"; - profile_tier_level = ParseSdpForH265ProfileTierLevel(params); - EXPECT_TRUE(profile_tier_level); - EXPECT_EQ("2", H265ProfileToString(profile_tier_level->profile)); - EXPECT_EQ("1", H265TierToString(profile_tier_level->tier)); - EXPECT_EQ("180", H265LevelToString(profile_tier_level->level)); -} - -TEST(H265ProfileTierLevel, TestProfileTierLevelCompare) { - SdpVideoFormat::Parameters params1; - SdpVideoFormat::Parameters params2; - - // None of profile-id/tier-flag/level-id is specified, - EXPECT_TRUE(H265IsSameProfileTierLevel(params1, params2)); - - // Same non-empty PTL - params1["profile-id"] = "1"; - params1["tier-flag"] = "0"; - params1["level-id"] = "120"; - params2["profile-id"] = "1"; - params2["tier-flag"] = "0"; - params2["level-id"] = "120"; - EXPECT_TRUE(H265IsSameProfileTierLevel(params1, params2)); - - // Different profiles. - params1.clear(); - params2.clear(); - params1["profile-id"] = "1"; - params2["profile-id"] = "2"; - EXPECT_FALSE(H265IsSameProfileTierLevel(params1, params2)); - - // Different levels. - params1.clear(); - params2.clear(); - params1["profile-id"] = "1"; - params2["profile-id"] = "1"; - params1["level-id"] = "93"; - params2["level-id"] = "183"; - EXPECT_FALSE(H265IsSameProfileTierLevel(params1, params2)); - - // Different tiers. - params1.clear(); - params2.clear(); - params1["profile-id"] = "1"; - params2["profile-id"] = "1"; - params1["level-id"] = "93"; - params2["level-id"] = "93"; - params1["tier-flag"] = "0"; - params2["tier-flag"] = "1"; - EXPECT_FALSE(H265IsSameProfileTierLevel(params1, params2)); - - // One of the SdpVideoFormat::Parameters is invalid. - params1.clear(); - params2.clear(); - params1["profile-id"] = "1"; - params2["profile-id"] = "1"; - params1["tier-flag"] = "0"; - params2["tier-flag"] = "4"; - EXPECT_FALSE(H265IsSameProfileTierLevel(params1, params2)); -} - -} // namespace webrtc +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/h265_profile_tier_level.h" + +#include +#include + +#include "api/rtp_parameters.h" +#include "api/video/resolution.h" +#include "test/gtest.h" + +namespace webrtc { + +TEST(H265ProfileTierLevel, TestLevelToString) { + EXPECT_EQ(H265LevelToString(H265Level::kLevel1), "30"); + EXPECT_EQ(H265LevelToString(H265Level::kLevel2), "60"); + EXPECT_EQ(H265LevelToString(H265Level::kLevel2_1), "63"); + EXPECT_EQ(H265LevelToString(H265Level::kLevel3), "90"); + EXPECT_EQ(H265LevelToString(H265Level::kLevel3_1), "93"); + EXPECT_EQ(H265LevelToString(H265Level::kLevel4), "120"); + EXPECT_EQ(H265LevelToString(H265Level::kLevel4_1), "123"); + EXPECT_EQ(H265LevelToString(H265Level::kLevel5), "150"); + EXPECT_EQ(H265LevelToString(H265Level::kLevel5_1), "153"); + EXPECT_EQ(H265LevelToString(H265Level::kLevel5_2), "156"); + EXPECT_EQ(H265LevelToString(H265Level::kLevel6), "180"); + EXPECT_EQ(H265LevelToString(H265Level::kLevel6_1), "183"); + EXPECT_EQ(H265LevelToString(H265Level::kLevel6_2), "186"); +} + +TEST(H265ProfileTierLevel, TestProfileToString) { + EXPECT_EQ(H265ProfileToString(H265Profile::kProfileMain), "1"); + EXPECT_EQ(H265ProfileToString(H265Profile::kProfileMain10), "2"); + EXPECT_EQ(H265ProfileToString(H265Profile::kProfileMainStill), "3"); + EXPECT_EQ(H265ProfileToString(H265Profile::kProfileRangeExtensions), "4"); + EXPECT_EQ(H265ProfileToString(H265Profile::kProfileHighThroughput), "5"); + EXPECT_EQ(H265ProfileToString(H265Profile::kProfileMultiviewMain), "6"); + EXPECT_EQ(H265ProfileToString(H265Profile::kProfileScalableMain), "7"); + EXPECT_EQ(H265ProfileToString(H265Profile::kProfile3dMain), "8"); + EXPECT_EQ(H265ProfileToString(H265Profile::kProfileScreenContentCoding), "9"); + EXPECT_EQ(H265ProfileToString(H265Profile::kProfileScalableRangeExtensions), + "10"); + EXPECT_EQ(H265ProfileToString( + H265Profile::kProfileHighThroughputScreenContentCoding), + "11"); +} + +TEST(H265ProfileTierLevel, TestTierToString) { + EXPECT_EQ(H265TierToString(H265Tier::kTier0), "0"); + EXPECT_EQ(H265TierToString(H265Tier::kTier1), "1"); +} + +TEST(H265ProfileTierLevel, TestStringToProfile) { + // Invalid profiles. + EXPECT_FALSE(StringToH265Profile("0")); + EXPECT_FALSE(StringToH265Profile("12")); + + // Malformed profiles + EXPECT_FALSE(StringToH265Profile("")); + EXPECT_FALSE(StringToH265Profile(" 1")); + EXPECT_FALSE(StringToH265Profile("12x")); + EXPECT_FALSE(StringToH265Profile("x12")); + EXPECT_FALSE(StringToH265Profile("gggg")); + + // Valid profiles. + EXPECT_EQ(StringToH265Profile("1"), H265Profile::kProfileMain); + EXPECT_EQ(StringToH265Profile("2"), H265Profile::kProfileMain10); + EXPECT_EQ(StringToH265Profile("4"), H265Profile::kProfileRangeExtensions); +} + +TEST(H265ProfileTierLevel, TestStringToLevel) { + // Invalid levels. + EXPECT_FALSE(StringToH265Level("0")); + EXPECT_FALSE(StringToH265Level("200")); + + // Malformed levels. + EXPECT_FALSE(StringToH265Level("")); + EXPECT_FALSE(StringToH265Level(" 30")); + EXPECT_FALSE(StringToH265Level("30x")); + EXPECT_FALSE(StringToH265Level("x30")); + EXPECT_FALSE(StringToH265Level("ggggg")); + + // Valid levels. + EXPECT_EQ(StringToH265Level("30"), H265Level::kLevel1); + EXPECT_EQ(StringToH265Level("93"), H265Level::kLevel3_1); + EXPECT_EQ(StringToH265Level("183"), H265Level::kLevel6_1); +} + +TEST(H265ProfileTierLevel, TestStringToTier) { + // Invalid tiers. + EXPECT_FALSE(StringToH265Tier("4")); + EXPECT_FALSE(StringToH265Tier("-1")); + + // Malformed tiers. + EXPECT_FALSE(StringToH265Tier("")); + EXPECT_FALSE(StringToH265Tier(" 1")); + EXPECT_FALSE(StringToH265Tier("t1")); + + // Valid tiers. + EXPECT_EQ(StringToH265Tier("0"), H265Tier::kTier0); + EXPECT_EQ(StringToH265Tier("1"), H265Tier::kTier1); +} + +TEST(H265ProfileTierLevel, TestParseSdpProfileTierLevelAllEmpty) { + const std::optional profile_tier_level = + ParseSdpForH265ProfileTierLevel(CodecParameterMap()); + EXPECT_TRUE(profile_tier_level); + EXPECT_EQ(H265Profile::kProfileMain, profile_tier_level->profile); + EXPECT_EQ(H265Level::kLevel3_1, profile_tier_level->level); + EXPECT_EQ(H265Tier::kTier0, profile_tier_level->tier); +} + +TEST(H265ProfileTierLevel, TestParseSdpProfileTierLevelPartialEmpty) { + CodecParameterMap params; + params["profile-id"] = "1"; + params["tier-flag"] = "0"; + std::optional profile_tier_level = + ParseSdpForH265ProfileTierLevel(params); + EXPECT_TRUE(profile_tier_level); + EXPECT_EQ(H265Profile::kProfileMain, profile_tier_level->profile); + EXPECT_EQ(H265Level::kLevel3_1, profile_tier_level->level); + EXPECT_EQ(H265Tier::kTier0, profile_tier_level->tier); + + params.clear(); + params["profile-id"] = "2"; + profile_tier_level = ParseSdpForH265ProfileTierLevel(params); + EXPECT_TRUE(profile_tier_level); + EXPECT_EQ(H265Profile::kProfileMain10, profile_tier_level->profile); + EXPECT_EQ(H265Level::kLevel3_1, profile_tier_level->level); + EXPECT_EQ(H265Tier::kTier0, profile_tier_level->tier); + + params.clear(); + params["level-id"] = "180"; + profile_tier_level = ParseSdpForH265ProfileTierLevel(params); + EXPECT_TRUE(profile_tier_level); + EXPECT_EQ(H265Profile::kProfileMain, profile_tier_level->profile); + EXPECT_EQ(H265Level::kLevel6, profile_tier_level->level); + EXPECT_EQ(H265Tier::kTier0, profile_tier_level->tier); +} + +TEST(H265ProfileTierLevel, TestParseSdpProfileTierLevelInvalid) { + CodecParameterMap params; + + // Invalid profile-tier-level combination. + params["profile-id"] = "1"; + params["tier-flag"] = "1"; + params["level-id"] = "93"; + std::optional profile_tier_level = + ParseSdpForH265ProfileTierLevel(params); + EXPECT_FALSE(profile_tier_level); + params.clear(); + params["profile-id"] = "1"; + params["tier-flag"] = "4"; + params["level-id"] = "180"; + profile_tier_level = ParseSdpForH265ProfileTierLevel(params); + EXPECT_FALSE(profile_tier_level); + + // Valid profile-tier-level combination. + params.clear(); + params["profile-id"] = "1"; + params["tier-flag"] = "0"; + params["level-id"] = "153"; + profile_tier_level = ParseSdpForH265ProfileTierLevel(params); + EXPECT_TRUE(profile_tier_level); +} + +TEST(H265ProfileTierLevel, TestToStringRoundTrip) { + CodecParameterMap params; + params["profile-id"] = "1"; + params["tier-flag"] = "0"; + params["level-id"] = "93"; + std::optional profile_tier_level = + ParseSdpForH265ProfileTierLevel(params); + EXPECT_TRUE(profile_tier_level); + EXPECT_EQ("1", H265ProfileToString(profile_tier_level->profile)); + EXPECT_EQ("0", H265TierToString(profile_tier_level->tier)); + EXPECT_EQ("93", H265LevelToString(profile_tier_level->level)); + + params.clear(); + params["profile-id"] = "2"; + params["tier-flag"] = "1"; + params["level-id"] = "180"; + profile_tier_level = ParseSdpForH265ProfileTierLevel(params); + EXPECT_TRUE(profile_tier_level); + EXPECT_EQ("2", H265ProfileToString(profile_tier_level->profile)); + EXPECT_EQ("1", H265TierToString(profile_tier_level->tier)); + EXPECT_EQ("180", H265LevelToString(profile_tier_level->level)); +} + +TEST(H265ProfileTierLevel, TestProfileTierLevelCompare) { + CodecParameterMap params1; + CodecParameterMap params2; + + // None of profile-id/tier-flag/level-id is specified, + EXPECT_TRUE(H265IsSameProfileTierLevel(params1, params2)); + + // Same non-empty PTL + params1["profile-id"] = "1"; + params1["tier-flag"] = "0"; + params1["level-id"] = "120"; + params2["profile-id"] = "1"; + params2["tier-flag"] = "0"; + params2["level-id"] = "120"; + EXPECT_TRUE(H265IsSameProfileTierLevel(params1, params2)); + + // Different profiles. + params1.clear(); + params2.clear(); + params1["profile-id"] = "1"; + params2["profile-id"] = "2"; + EXPECT_FALSE(H265IsSameProfileTierLevel(params1, params2)); + + // Different levels. + params1.clear(); + params2.clear(); + params1["profile-id"] = "1"; + params2["profile-id"] = "1"; + params1["level-id"] = "93"; + params2["level-id"] = "183"; + EXPECT_FALSE(H265IsSameProfileTierLevel(params1, params2)); + + // Different tiers. + params1.clear(); + params2.clear(); + params1["profile-id"] = "1"; + params2["profile-id"] = "1"; + params1["level-id"] = "180"; + // Level 3.1 is not allowed for tier 1. + params2["level-id"] = "180"; + params1["tier-flag"] = "0"; + params2["tier-flag"] = "1"; + EXPECT_FALSE(H265IsSameProfileTierLevel(params1, params2)); + + // One of the CodecParameterMap is invalid. + params1.clear(); + params2.clear(); + params1["profile-id"] = "1"; + params2["profile-id"] = "1"; + params1["tier-flag"] = "0"; + params2["tier-flag"] = "4"; + EXPECT_FALSE(H265IsSameProfileTierLevel(params1, params2)); +} + +TEST(H265ProfileTierLevel, TestProfileCompare) { + CodecParameterMap params1; + CodecParameterMap params2; + + // None of profile-id/tier-flag/level-id is specified, + EXPECT_TRUE(H265IsSameProfile(params1, params2)); + + // Same non-empty PTL + params1["profile-id"] = "1"; + params1["tier-flag"] = "0"; + params1["level-id"] = "120"; + params2["profile-id"] = "1"; + params2["tier-flag"] = "0"; + params2["level-id"] = "120"; + EXPECT_TRUE(H265IsSameProfile(params1, params2)); + + // Different profiles. + params1.clear(); + params2.clear(); + params1["profile-id"] = "1"; + params2["profile-id"] = "2"; + EXPECT_FALSE(H265IsSameProfile(params1, params2)); + + // Different levels. We do not compare HEVC levels. + params1.clear(); + params2.clear(); + params1["profile-id"] = "1"; + params2["profile-id"] = "1"; + params1["level-id"] = "93"; + params2["level-id"] = "183"; + EXPECT_TRUE(H265IsSameProfile(params1, params2)); + + // Different tiers. + params1.clear(); + params2.clear(); + params1["profile-id"] = "1"; + params2["profile-id"] = "1"; + params1["level-id"] = "180"; + // level 3.1 is not allowed for tier 1. + params2["level-id"] = "180"; + params1["tier-flag"] = "0"; + params2["tier-flag"] = "1"; + EXPECT_TRUE(H265IsSameProfile(params1, params2)); + + // One of the CodecParameterMap is invalid. + params1.clear(); + params2.clear(); + params1["profile-id"] = "1"; + params2["profile-id"] = "1"; + params1["tier-flag"] = "0"; + params2["tier-flag"] = "4"; + EXPECT_FALSE(H265IsSameProfile(params1, params2)); +} + +TEST(H265ProfileTierLevel, TestTierCompare) { + CodecParameterMap params1; + CodecParameterMap params2; + + // None of profile-id/tier-flag/level-id is specified, + EXPECT_TRUE(H265IsSameTier(params1, params2)); + + // Same non-empty PTL + params1["profile-id"] = "1"; + params1["tier-flag"] = "0"; + params1["level-id"] = "120"; + params2["profile-id"] = "1"; + params2["tier-flag"] = "0"; + params2["level-id"] = "120"; + EXPECT_TRUE(H265IsSameTier(params1, params2)); + + // Different profiles. + params1.clear(); + params2.clear(); + params1["profile-id"] = "1"; + params2["profile-id"] = "2"; + EXPECT_TRUE(H265IsSameTier(params1, params2)); + + // Different levels. We do not compare HEVC levels. + params1.clear(); + params2.clear(); + params1["profile-id"] = "1"; + params2["profile-id"] = "1"; + params1["level-id"] = "93"; + params2["level-id"] = "183"; + EXPECT_TRUE(H265IsSameTier(params1, params2)); + + // Different tiers. + params1.clear(); + params2.clear(); + params1["profile-id"] = "1"; + params2["profile-id"] = "1"; + params1["level-id"] = "180"; + // level 3.1 is not allowed for tier 1. + params2["level-id"] = "180"; + params1["tier-flag"] = "0"; + params2["tier-flag"] = "1"; + EXPECT_FALSE(H265IsSameTier(params1, params2)); + + // One of the CodecParameterMap is invalid. + params1.clear(); + params2.clear(); + params1["profile-id"] = "1"; + params2["profile-id"] = "1"; + params1["tier-flag"] = "0"; + params2["tier-flag"] = "4"; + EXPECT_FALSE(H265IsSameTier(params1, params2)); +} + +TEST(H265ProfileTierLevel, TestGetSupportedH265Level) { + // Test with 720p at 30fps + Resolution r{.width = 1280, .height = 720}; + EXPECT_EQ(GetSupportedH265Level(r, 30).value_or(H265Level::kLevel1), + H265Level::kLevel3); + + // Test with QCIF at 15fps + r.width = 176; + r.height = 144; + EXPECT_EQ(GetSupportedH265Level(r, 15).value_or(H265Level::kLevel2), + H265Level::kLevel1); + + // Test with 1080p at 30fps + r.width = 1920; + r.height = 1080; + EXPECT_EQ(GetSupportedH265Level(r, 30).value_or(H265Level::kLevel1), + H265Level::kLevel3_1); + + // Test with 1080p at 60fps + EXPECT_EQ(GetSupportedH265Level(r, 60).value_or(H265Level::kLevel1), + H265Level::kLevel3_1); + + // Test with 4K at 30fps + r.width = 3840; + r.height = 2160; + EXPECT_EQ(GetSupportedH265Level(r, 30).value_or(H265Level::kLevel1), + H265Level::kLevel4_1); + + // Test with 4K at 60fps + EXPECT_EQ(GetSupportedH265Level(r, 60).value_or(H265Level::kLevel1), + H265Level::kLevel4_1); + + // Test with 8K at 30fps + r.width = 8192; + r.height = 4320; + EXPECT_EQ(GetSupportedH265Level(r, 30).value_or(H265Level::kLevel1), + H265Level::kLevel6); + + // Test with 64x64 at 30fps + r.width = 64; + r.height = 64; + EXPECT_EQ(GetSupportedH265Level(r, 30), std::nullopt); + + // Test with extremly large width or height at 15fps + r.width = 16928; + r.height = 64; + EXPECT_EQ(GetSupportedH265Level(r, 15), std::nullopt); +} + +} // namespace webrtc diff --git a/api/video_codecs/test/sdp_video_format_unittest.cc b/api/video_codecs/test/sdp_video_format_unittest.cc index bb158aeb95..9db8df578c 100644 --- a/api/video_codecs/test/sdp_video_format_unittest.cc +++ b/api/video_codecs/test/sdp_video_format_unittest.cc @@ -10,27 +10,32 @@ #include "api/video_codecs/sdp_video_format.h" -#include - +#include "api/rtp_parameters.h" #include "media/base/media_constants.h" #include "test/gtest.h" namespace webrtc { typedef SdpVideoFormat Sdp; -typedef SdpVideoFormat::Parameters Params; +typedef CodecParameterMap Params; TEST(SdpVideoFormatTest, SameCodecNameNoParameters) { EXPECT_TRUE(Sdp("H264").IsSameCodec(Sdp("h264"))); EXPECT_TRUE(Sdp("VP8").IsSameCodec(Sdp("vp8"))); EXPECT_TRUE(Sdp("VP9").IsSameCodec(Sdp("vp9"))); EXPECT_TRUE(Sdp("AV1").IsSameCodec(Sdp("Av1"))); +#ifdef RTC_ENABLE_H265 + EXPECT_TRUE(Sdp("H265").IsSameCodec(Sdp("h265"))); +#endif } TEST(SdpVideoFormatTest, DifferentCodecNameNoParameters) { EXPECT_FALSE(Sdp("H264").IsSameCodec(Sdp("VP8"))); EXPECT_FALSE(Sdp("VP8").IsSameCodec(Sdp("VP9"))); EXPECT_FALSE(Sdp("AV1").IsSameCodec(Sdp("VP8"))); +#ifdef RTC_ENABLE_H265 + EXPECT_FALSE(Sdp("H265").IsSameCodec(Sdp("VP8"))); +#endif } TEST(SdpVideoFormatTest, SameCodecNameSameParameters) { @@ -50,6 +55,17 @@ TEST(SdpVideoFormatTest, SameCodecNameSameParameters) { .IsSameCodec(Sdp("AV1", Params{{"profile", "0"}}))); EXPECT_TRUE(Sdp("AV1", Params{{"profile", "2"}}) .IsSameCodec(Sdp("AV1", Params{{"profile", "2"}}))); +#ifdef RTC_ENABLE_H265 + EXPECT_TRUE(Sdp("H265").IsSameCodec(Sdp( + "H265", + Params{{"profile-id", "1"}, {"tier-flag", "0"}, {"level-id", "93"}}))); + EXPECT_TRUE( + Sdp("H265", + Params{{"profile-id", "2"}, {"tier-flag", "0"}, {"level-id", "93"}}) + .IsSameCodec(Sdp("H265", Params{{"profile-id", "2"}, + {"tier-flag", "0"}, + {"level-id", "93"}}))); +#endif } TEST(SdpVideoFormatTest, SameCodecNameDifferentParameters) { @@ -69,6 +85,35 @@ TEST(SdpVideoFormatTest, SameCodecNameDifferentParameters) { .IsSameCodec(Sdp("AV1", Params{{"profile", "1"}}))); EXPECT_FALSE(Sdp("AV1", Params{{"profile", "1"}}) .IsSameCodec(Sdp("AV1", Params{{"profile", "2"}}))); +#ifdef RTC_ENABLE_H265 + EXPECT_FALSE(Sdp("H265").IsSameCodec(Sdp( + "H265", + Params{{"profile-id", "0"}, {"tier-flag", "0"}, {"level-id", "93"}}))); + EXPECT_FALSE(Sdp("H265").IsSameCodec(Sdp( + "H265", + Params{{"profile-id", "1"}, {"tier-flag", "1"}, {"level-id", "93"}}))); + EXPECT_TRUE(Sdp("H265").IsSameCodec(Sdp( + "H265", + Params{{"profile-id", "1"}, {"tier-flag", "0"}, {"level-id", "90"}}))); + EXPECT_FALSE( + Sdp("H265", + Params{{"profile-id", "2"}, {"tier-flag", "0"}, {"level-id", "93"}}) + .IsSameCodec(Sdp("H265", Params{{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", "93"}}))); + EXPECT_FALSE( + Sdp("H265", + Params{{"profile-id", "1"}, {"tier-flag", "1"}, {"level-id", "120"}}) + .IsSameCodec(Sdp("H265", Params{{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", "120"}}))); + EXPECT_TRUE( + Sdp("H265", + Params{{"profile-id", "1"}, {"tier-flag", "0"}, {"level-id", "93"}}) + .IsSameCodec(Sdp("H265", Params{{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", "90"}}))); +#endif } TEST(SdpVideoFormatTest, DifferentCodecNameSameParameters) { @@ -86,18 +131,23 @@ TEST(SdpVideoFormatTest, DifferentCodecNameSameParameters) { .IsSameCodec(Sdp("H264", Params{{"profile", "0"}}))); EXPECT_FALSE(Sdp("AV1", Params{{"profile", "2"}}) .IsSameCodec(Sdp("VP9", Params{{"profile", "2"}}))); +#ifdef RTC_ENABLE_H265 + EXPECT_FALSE(Sdp("H265", Params{{"profile-id", "0"}}) + .IsSameCodec(Sdp("H264", Params{{"profile-id", "0"}}))); + EXPECT_FALSE(Sdp("H265", Params{{"profile-id", "2"}}) + .IsSameCodec(Sdp("VP9", Params{{"profile-id", "2"}}))); +#endif } TEST(SdpVideoFormatTest, H264PacketizationMode) { // The default packetization mode is 0. - EXPECT_TRUE(Sdp("H264", Params{{cricket::kH264FmtpPacketizationMode, "0"}}) + EXPECT_TRUE(Sdp("H264", Params{{kH264FmtpPacketizationMode, "0"}}) .IsSameCodec(Sdp("H264"))); - EXPECT_FALSE(Sdp("H264", Params{{cricket::kH264FmtpPacketizationMode, "1"}}) + EXPECT_FALSE(Sdp("H264", Params{{kH264FmtpPacketizationMode, "1"}}) .IsSameCodec(Sdp("H264"))); EXPECT_TRUE( - Sdp("H264", Params{{cricket::kH264FmtpPacketizationMode, "1"}}) - .IsSameCodec( - Sdp("H264", Params{{cricket::kH264FmtpPacketizationMode, "1"}}))); + Sdp("H264", Params{{kH264FmtpPacketizationMode, "1"}}) + .IsSameCodec(Sdp("H264", Params{{kH264FmtpPacketizationMode, "1"}}))); } } // namespace webrtc diff --git a/api/video_codecs/test/video_decoder_factory_template_tests.cc b/api/video_codecs/test/video_decoder_factory_template_tests.cc index 1cc2b58274..8cae8be0bf 100644 --- a/api/video_codecs/test/video_decoder_factory_template_tests.cc +++ b/api/video_codecs/test/video_decoder_factory_template_tests.cc @@ -8,7 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include + +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/test/mock_video_decoder.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" @@ -17,17 +24,17 @@ #include "test/gmock.h" #include "test/gtest.h" -using ::testing::Contains; +namespace webrtc { +namespace { + using ::testing::Each; -using ::testing::Eq; using ::testing::Field; using ::testing::IsEmpty; -using ::testing::Ne; +using ::testing::IsNull; using ::testing::Not; +using ::testing::NotNull; using ::testing::UnorderedElementsAre; -namespace webrtc { -namespace { const SdpVideoFormat kFooSdp("Foo"); const SdpVideoFormat kBarLowSdp("Bar", {{"profile", "low"}}); const SdpVideoFormat kBarHighSdp("Bar", {{"profile", "high"}}); @@ -36,7 +43,7 @@ struct FooDecoderTemplateAdapter { static std::vector SupportedFormats() { return {kFooSdp}; } static std::unique_ptr CreateDecoder( - const SdpVideoFormat& format) { + const SdpVideoFormat& /* format */) { auto decoder = std::make_unique>(); EXPECT_CALL(*decoder, Destruct); return decoder; @@ -49,7 +56,8 @@ struct BarDecoderTemplateAdapter { } static std::unique_ptr CreateDecoder( - const SdpVideoFormat& format) { + const Environment& /* env */, + const SdpVideoFormat& /* format */) { auto decoder = std::make_unique>(); EXPECT_CALL(*decoder, Destruct); return decoder; @@ -57,10 +65,11 @@ struct BarDecoderTemplateAdapter { }; TEST(VideoDecoderFactoryTemplate, OneTemplateAdapterCreateDecoder) { + const Environment env = CreateEnvironment(); VideoDecoderFactoryTemplate factory; EXPECT_THAT(factory.GetSupportedFormats(), UnorderedElementsAre(kFooSdp)); - EXPECT_THAT(factory.CreateVideoDecoder(kFooSdp), Ne(nullptr)); - EXPECT_THAT(factory.CreateVideoDecoder(SdpVideoFormat("FooX")), Eq(nullptr)); + EXPECT_THAT(factory.Create(env, kFooSdp), NotNull()); + EXPECT_THAT(factory.Create(env, SdpVideoFormat("FooX")), IsNull()); } TEST(VideoDecoderFactoryTemplate, TwoTemplateAdaptersNoDuplicates) { @@ -71,52 +80,57 @@ TEST(VideoDecoderFactoryTemplate, TwoTemplateAdaptersNoDuplicates) { } TEST(VideoDecoderFactoryTemplate, TwoTemplateAdaptersCreateDecoders) { + const Environment env = CreateEnvironment(); VideoDecoderFactoryTemplate factory; EXPECT_THAT(factory.GetSupportedFormats(), UnorderedElementsAre(kFooSdp, kBarLowSdp, kBarHighSdp)); - EXPECT_THAT(factory.CreateVideoDecoder(kFooSdp), Ne(nullptr)); - EXPECT_THAT(factory.CreateVideoDecoder(kBarLowSdp), Ne(nullptr)); - EXPECT_THAT(factory.CreateVideoDecoder(kBarHighSdp), Ne(nullptr)); - EXPECT_THAT(factory.CreateVideoDecoder(SdpVideoFormat("FooX")), Eq(nullptr)); - EXPECT_THAT(factory.CreateVideoDecoder(SdpVideoFormat("Bar")), Eq(nullptr)); + EXPECT_THAT(factory.Create(env, kFooSdp), NotNull()); + EXPECT_THAT(factory.Create(env, kBarLowSdp), NotNull()); + EXPECT_THAT(factory.Create(env, kBarHighSdp), NotNull()); + EXPECT_THAT(factory.Create(env, SdpVideoFormat("FooX")), IsNull()); + EXPECT_THAT(factory.Create(env, SdpVideoFormat("Bar")), IsNull()); } TEST(VideoDecoderFactoryTemplate, LibvpxVp8) { + const Environment env = CreateEnvironment(); VideoDecoderFactoryTemplate factory; auto formats = factory.GetSupportedFormats(); - EXPECT_THAT(formats.size(), 1); - EXPECT_THAT(formats[0], Field(&SdpVideoFormat::name, "VP8")); - EXPECT_THAT(factory.CreateVideoDecoder(formats[0]), Ne(nullptr)); + ASSERT_THAT(formats, + UnorderedElementsAre(Field(&SdpVideoFormat::name, "VP8"))); + EXPECT_THAT(factory.Create(env, formats[0]), NotNull()); } TEST(VideoDecoderFactoryTemplate, LibvpxVp9) { + const Environment env = CreateEnvironment(); VideoDecoderFactoryTemplate factory; auto formats = factory.GetSupportedFormats(); EXPECT_THAT(formats, Not(IsEmpty())); EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::name, "VP9"))); - EXPECT_THAT(factory.CreateVideoDecoder(formats[0]), Ne(nullptr)); + EXPECT_THAT(factory.Create(env, formats[0]), NotNull()); } // TODO(bugs.webrtc.org/13573): When OpenH264 is no longer a conditional build // target remove this #ifdef. #if defined(WEBRTC_USE_H264) TEST(VideoDecoderFactoryTemplate, OpenH264) { + const Environment env = CreateEnvironment(); VideoDecoderFactoryTemplate factory; auto formats = factory.GetSupportedFormats(); EXPECT_THAT(formats, Not(IsEmpty())); EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::name, "H264"))); - EXPECT_THAT(factory.CreateVideoDecoder(formats[0]), Ne(nullptr)); + EXPECT_THAT(factory.Create(env, formats[0]), NotNull()); } #endif // defined(WEBRTC_USE_H264) TEST(VideoDecoderFactoryTemplate, Dav1d) { + const Environment env = CreateEnvironment(); VideoDecoderFactoryTemplate factory; auto formats = factory.GetSupportedFormats(); EXPECT_THAT(formats, Not(IsEmpty())); EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::name, "AV1"))); - EXPECT_THAT(factory.CreateVideoDecoder(formats[0]), Ne(nullptr)); + EXPECT_THAT(factory.Create(env, formats[0]), NotNull()); } } // namespace diff --git a/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc b/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc index 97be6250db..d452884c11 100644 --- a/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc +++ b/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc @@ -12,15 +12,20 @@ #include -#include "absl/types/optional.h" +#include +#include +#include + +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/video/encoded_image.h" #include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" #include "api/video_codecs/video_decoder.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" -#include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/checks.h" -#include "test/field_trial.h" +#include "test/explicit_key_value_config.h" #include "test/gtest.h" namespace webrtc { @@ -31,21 +36,23 @@ class VideoDecoderSoftwareFallbackWrapperTest : public ::testing::Test { : VideoDecoderSoftwareFallbackWrapperTest("") {} explicit VideoDecoderSoftwareFallbackWrapperTest( const std::string& field_trials) - : override_field_trials_(field_trials), + : field_trials_(field_trials), + env_(CreateEnvironment(&field_trials_)), fake_decoder_(new CountingFakeDecoder()), fallback_wrapper_(CreateVideoDecoderSoftwareFallbackWrapper( - std::unique_ptr(VP8Decoder::Create()), + env_, + CreateVp8Decoder(env_), std::unique_ptr(fake_decoder_))) {} class CountingFakeDecoder : public VideoDecoder { public: - bool Configure(const Settings& settings) override { + bool Configure(const Settings& /* settings */) override { ++configure_count_; return configure_return_value_; } - int32_t Decode(const EncodedImage& input_image, - int64_t render_time_ms) override { + int32_t Decode(const EncodedImage& /* input_image */, + int64_t /* render_time_ms */) override { ++decode_count_; return decode_return_code_; } @@ -71,7 +78,8 @@ class VideoDecoderSoftwareFallbackWrapperTest : public ::testing::Test { int release_count_ = 0; int reset_count_ = 0; }; - test::ScopedFieldTrials override_field_trials_; + test::ExplicitKeyValueConfig field_trials_; + const Environment env_; // `fake_decoder_` is owned and released by `fallback_wrapper_`. CountingFakeDecoder* fake_decoder_; std::unique_ptr fallback_wrapper_; @@ -175,15 +183,15 @@ TEST_F(VideoDecoderSoftwareFallbackWrapperTest, ForwardsReleaseCall) { TEST_F(VideoDecoderSoftwareFallbackWrapperTest, ForwardsRegisterDecodeCompleteCallback) { class FakeDecodedImageCallback : public DecodedImageCallback { - int32_t Decoded(VideoFrame& decodedImage) override { return 0; } - int32_t Decoded(webrtc::VideoFrame& decodedImage, - int64_t decode_time_ms) override { + int32_t Decoded(VideoFrame& /* decodedImage */) override { return 0; } + int32_t Decoded(webrtc::VideoFrame& /* decodedImage */, + int64_t /* decode_time_ms */) override { RTC_DCHECK_NOTREACHED(); return -1; } - void Decoded(webrtc::VideoFrame& decodedImage, - absl::optional decode_time_ms, - absl::optional qp) override { + void Decoded(webrtc::VideoFrame& /* decodedImage */, + std::optional /* decode_time_ms */, + std::optional /* qp */) override { RTC_DCHECK_NOTREACHED(); } } callback; @@ -275,7 +283,7 @@ class ForcedSoftwareDecoderFallbackTest fake_decoder_ = new CountingFakeDecoder(); sw_fallback_decoder_ = new CountingFakeDecoder(); fallback_wrapper_ = CreateVideoDecoderSoftwareFallbackWrapper( - std::unique_ptr(sw_fallback_decoder_), + env_, std::unique_ptr(sw_fallback_decoder_), std::unique_ptr(fake_decoder_)); } diff --git a/api/video_codecs/test/video_encoder_factory_template_tests.cc b/api/video_codecs/test/video_encoder_factory_template_tests.cc index 91b02aa905..9860104417 100644 --- a/api/video_codecs/test/video_encoder_factory_template_tests.cc +++ b/api/video_codecs/test/video_encoder_factory_template_tests.cc @@ -8,7 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include + +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/test/mock_video_encoder.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_factory.h" #include "api/video_codecs/video_encoder_factory_template.h" #include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h" #include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h" @@ -17,6 +27,9 @@ #include "test/gmock.h" #include "test/gtest.h" +namespace webrtc { +namespace { + using ::testing::Contains; using ::testing::Each; using ::testing::Eq; @@ -25,11 +38,10 @@ using ::testing::IsEmpty; using ::testing::IsNull; using ::testing::Not; using ::testing::NotNull; +using ::testing::StrictMock; using ::testing::UnorderedElementsAre; - -namespace webrtc { -namespace { using CodecSupport = VideoEncoderFactory::CodecSupport; + const SdpVideoFormat kFooSdp("Foo"); const SdpVideoFormat kBarLowSdp("Bar", {{"profile", "low"}}); const SdpVideoFormat kBarHighSdp("Bar", {{"profile", "high"}}); @@ -38,8 +50,9 @@ struct FooEncoderTemplateAdapter { static std::vector SupportedFormats() { return {kFooSdp}; } static std::unique_ptr CreateEncoder( - const SdpVideoFormat& format) { - return std::make_unique>(); + const Environment& /* env */, + const SdpVideoFormat& /* format */) { + return std::make_unique>(); } static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) { @@ -54,8 +67,9 @@ struct BarEncoderTemplateAdapter { } static std::unique_ptr CreateEncoder( - const SdpVideoFormat& format) { - return std::make_unique>(); + const Environment& /* env */, + const SdpVideoFormat& /* format */) { + return std::make_unique>(); } static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) { @@ -67,21 +81,22 @@ struct BarEncoderTemplateAdapter { }; TEST(VideoEncoderFactoryTemplate, OneTemplateAdapterCreateEncoder) { + const Environment env = CreateEnvironment(); VideoEncoderFactoryTemplate factory; EXPECT_THAT(factory.GetSupportedFormats(), UnorderedElementsAre(kFooSdp)); - EXPECT_THAT(factory.CreateVideoEncoder(kFooSdp), NotNull()); - EXPECT_THAT(factory.CreateVideoEncoder(SdpVideoFormat("FooX")), IsNull()); + EXPECT_THAT(factory.Create(env, kFooSdp), NotNull()); + EXPECT_THAT(factory.Create(env, SdpVideoFormat("FooX")), IsNull()); } TEST(VideoEncoderFactoryTemplate, OneTemplateAdapterCodecSupport) { VideoEncoderFactoryTemplate factory; - EXPECT_THAT(factory.QueryCodecSupport(kFooSdp, absl::nullopt), + EXPECT_THAT(factory.QueryCodecSupport(kFooSdp, std::nullopt), Field(&CodecSupport::is_supported, true)); EXPECT_THAT(factory.QueryCodecSupport(kFooSdp, "L1T2"), Field(&CodecSupport::is_supported, true)); EXPECT_THAT(factory.QueryCodecSupport(kFooSdp, "S3T3"), Field(&CodecSupport::is_supported, false)); - EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat("FooX"), absl::nullopt), + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat("FooX"), std::nullopt), Field(&CodecSupport::is_supported, false)); } @@ -93,31 +108,32 @@ TEST(VideoEncoderFactoryTemplate, TwoTemplateAdaptersNoDuplicates) { } TEST(VideoEncoderFactoryTemplate, TwoTemplateAdaptersCreateEncoders) { + const Environment env = CreateEnvironment(); VideoEncoderFactoryTemplate factory; EXPECT_THAT(factory.GetSupportedFormats(), UnorderedElementsAre(kFooSdp, kBarLowSdp, kBarHighSdp)); - EXPECT_THAT(factory.CreateVideoEncoder(kFooSdp), NotNull()); - EXPECT_THAT(factory.CreateVideoEncoder(kBarLowSdp), NotNull()); - EXPECT_THAT(factory.CreateVideoEncoder(kBarHighSdp), NotNull()); - EXPECT_THAT(factory.CreateVideoEncoder(SdpVideoFormat("FooX")), IsNull()); - EXPECT_THAT(factory.CreateVideoEncoder(SdpVideoFormat("Bar")), NotNull()); + EXPECT_THAT(factory.Create(env, kFooSdp), NotNull()); + EXPECT_THAT(factory.Create(env, kBarLowSdp), NotNull()); + EXPECT_THAT(factory.Create(env, kBarHighSdp), NotNull()); + EXPECT_THAT(factory.Create(env, SdpVideoFormat("FooX")), IsNull()); + EXPECT_THAT(factory.Create(env, SdpVideoFormat("Bar")), NotNull()); } TEST(VideoEncoderFactoryTemplate, TwoTemplateAdaptersCodecSupport) { VideoEncoderFactoryTemplate factory; - EXPECT_THAT(factory.QueryCodecSupport(kFooSdp, absl::nullopt), + EXPECT_THAT(factory.QueryCodecSupport(kFooSdp, std::nullopt), Field(&CodecSupport::is_supported, true)); EXPECT_THAT(factory.QueryCodecSupport(kFooSdp, "L1T2"), Field(&CodecSupport::is_supported, true)); EXPECT_THAT(factory.QueryCodecSupport(kFooSdp, "S3T3"), Field(&CodecSupport::is_supported, false)); - EXPECT_THAT(factory.QueryCodecSupport(kBarLowSdp, absl::nullopt), + EXPECT_THAT(factory.QueryCodecSupport(kBarLowSdp, std::nullopt), Field(&CodecSupport::is_supported, true)); - EXPECT_THAT(factory.QueryCodecSupport(kBarHighSdp, absl::nullopt), + EXPECT_THAT(factory.QueryCodecSupport(kBarHighSdp, std::nullopt), Field(&CodecSupport::is_supported, true)); EXPECT_THAT(factory.QueryCodecSupport(kBarLowSdp, "S2T1"), Field(&CodecSupport::is_supported, true)); @@ -126,47 +142,51 @@ TEST(VideoEncoderFactoryTemplate, TwoTemplateAdaptersCodecSupport) { } TEST(VideoEncoderFactoryTemplate, LibvpxVp8) { + const Environment env = CreateEnvironment(); VideoEncoderFactoryTemplate factory; auto formats = factory.GetSupportedFormats(); EXPECT_THAT(formats.size(), 1); EXPECT_THAT(formats[0], Field(&SdpVideoFormat::name, "VP8")); EXPECT_THAT(formats[0], Field(&SdpVideoFormat::scalability_modes, Contains(ScalabilityMode::kL1T3))); - EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), NotNull()); + EXPECT_THAT(factory.Create(env, formats[0]), NotNull()); } TEST(VideoEncoderFactoryTemplate, LibvpxVp9) { + const Environment env = CreateEnvironment(); VideoEncoderFactoryTemplate factory; auto formats = factory.GetSupportedFormats(); EXPECT_THAT(formats, Not(IsEmpty())); EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::name, "VP9"))); EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::scalability_modes, Contains(ScalabilityMode::kL3T3_KEY)))); - EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), NotNull()); + EXPECT_THAT(factory.Create(env, formats[0]), NotNull()); } // TODO(bugs.webrtc.org/13573): When OpenH264 is no longer a conditional build // target remove this #ifdef. #if defined(WEBRTC_USE_H264) TEST(VideoEncoderFactoryTemplate, OpenH264) { + const Environment env = CreateEnvironment(); VideoEncoderFactoryTemplate factory; auto formats = factory.GetSupportedFormats(); EXPECT_THAT(formats, Not(IsEmpty())); EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::name, "H264"))); EXPECT_THAT(formats, Each(Field(&SdpVideoFormat::scalability_modes, Contains(ScalabilityMode::kL1T3)))); - EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), NotNull()); + EXPECT_THAT(factory.Create(env, formats[0]), NotNull()); } #endif // defined(WEBRTC_USE_H264) TEST(VideoEncoderFactoryTemplate, LibaomAv1) { + const Environment env = CreateEnvironment(); VideoEncoderFactoryTemplate factory; auto formats = factory.GetSupportedFormats(); EXPECT_THAT(formats.size(), 1); EXPECT_THAT(formats[0], Field(&SdpVideoFormat::name, "AV1")); EXPECT_THAT(formats[0], Field(&SdpVideoFormat::scalability_modes, Contains(ScalabilityMode::kL3T3_KEY))); - EXPECT_THAT(factory.CreateVideoEncoder(formats[0]), NotNull()); + EXPECT_THAT(factory.Create(env, formats[0]), NotNull()); } } // namespace diff --git a/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc b/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc index b3fadcbecf..94e31ecf8b 100644 --- a/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc +++ b/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc @@ -14,18 +14,24 @@ #include #include +#include #include +#include #include -#include "absl/types/optional.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/fec_controller_override.h" #include "api/scoped_refptr.h" #include "api/test/mock_video_encoder.h" +#include "api/units/timestamp.h" #include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" -#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_type.h" #include "api/video/video_frame.h" #include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" #include "api/video/video_rotation.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" @@ -34,18 +40,20 @@ #include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "rtc_base/fake_clock.h" +#include "test/explicit_key_value_config.h" #include "test/fake_encoder.h" #include "test/fake_texture_frame.h" -#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" namespace webrtc { +namespace { + +using test::ExplicitKeyValueConfig; using ::testing::_; using ::testing::Return; using ::testing::ValuesIn; -namespace { const int kWidth = 320; const int kHeight = 240; const int kNumCores = 2; @@ -75,8 +83,9 @@ VideoEncoder::EncoderInfo GetEncoderInfoWithHardwareAccelerated( class FakeEncodedImageCallback : public EncodedImageCallback { public: - Result OnEncodedImage(const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info) override { + Result OnEncodedImage( + const EncodedImage& /* encoded_image */, + const CodecSpecificInfo* /* codec_specific_info */) override { ++callback_count_; return Result(Result::OK, callback_count_); } @@ -87,12 +96,13 @@ class FakeEncodedImageCallback : public EncodedImageCallback { class VideoEncoderSoftwareFallbackWrapperTestBase : public ::testing::Test { protected: VideoEncoderSoftwareFallbackWrapperTestBase( - const std::string& field_trials, + const Environment& env, std::unique_ptr sw_encoder) - : override_field_trials_(field_trials), + : env_(env), fake_encoder_(new CountingFakeEncoder()), wrapper_initialized_(false), fallback_wrapper_(CreateVideoEncoderSoftwareFallbackWrapper( + env_, std::move(sw_encoder), std::unique_ptr(fake_encoder_), false)) {} @@ -100,18 +110,19 @@ class VideoEncoderSoftwareFallbackWrapperTestBase : public ::testing::Test { class CountingFakeEncoder : public VideoEncoder { public: void SetFecControllerOverride( - FecControllerOverride* fec_controller_override) override { + FecControllerOverride* /* fec_controller_override */) override { // Ignored. } - int32_t InitEncode(const VideoCodec* codec_settings, - const VideoEncoder::Settings& settings) override { + int32_t InitEncode(const VideoCodec* /* codec_settings */, + const VideoEncoder::Settings& /* settings */) override { ++init_encode_count_; return init_encode_return_code_; } - int32_t Encode(const VideoFrame& frame, - const std::vector* frame_types) override { + int32_t Encode( + const VideoFrame& frame, + const std::vector* /* frame_types */) override { ++encode_count_; last_video_frame_ = frame; if (encode_complete_callback_ && @@ -132,7 +143,7 @@ class VideoEncoderSoftwareFallbackWrapperTestBase : public ::testing::Test { return WEBRTC_VIDEO_CODEC_OK; } - void SetRates(const RateControlParameters& parameters) override {} + void SetRates(const RateControlParameters& /* parameters */) override {} EncoderInfo GetEncoderInfo() const override { ++supports_native_handle_count_; @@ -155,7 +166,7 @@ class VideoEncoderSoftwareFallbackWrapperTestBase : public ::testing::Test { bool supports_native_handle_ = false; bool is_qp_trusted_ = false; std::string implementation_name_ = "fake-encoder"; - absl::optional last_video_frame_; + std::optional last_video_frame_; }; void InitEncode(); @@ -168,7 +179,7 @@ class VideoEncoderSoftwareFallbackWrapperTestBase : public ::testing::Test { fallback_wrapper_->GetEncoderInfo().implementation_name); } - test::ScopedFieldTrials override_field_trials_; + const Environment env_; FakeEncodedImageCallback callback_; // `fake_encoder_` is owned and released by `fallback_wrapper_`. CountingFakeEncoder* fake_encoder_; @@ -188,7 +199,7 @@ class VideoEncoderSoftwareFallbackWrapperTest explicit VideoEncoderSoftwareFallbackWrapperTest( CountingFakeEncoder* fake_sw_encoder) : VideoEncoderSoftwareFallbackWrapperTestBase( - "", + CreateEnvironment(), std::unique_ptr(fake_sw_encoder)), fake_sw_encoder_(fake_sw_encoder) { fake_sw_encoder_->implementation_name_ = "fake_sw_encoder"; @@ -203,7 +214,7 @@ void VideoEncoderSoftwareFallbackWrapperTestBase::EncodeFrame() { void VideoEncoderSoftwareFallbackWrapperTestBase::EncodeFrame( int expected_ret) { - rtc::scoped_refptr buffer = + scoped_refptr buffer = I420Buffer::Create(codec_.width, codec_.height); I420Buffer::SetBlack(buffer.get()); std::vector types(1, VideoFrameType::kVideoFrameKey); @@ -229,7 +240,7 @@ void VideoEncoderSoftwareFallbackWrapperTestBase::InitEncode() { codec_.width = kWidth; codec_.height = kHeight; codec_.VP8()->numberOfTemporalLayers = 1; - rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + rate_allocator_ = std::make_unique(env_, codec_); if (wrapper_initialized_) { fallback_wrapper_->Release(); @@ -260,7 +271,7 @@ void VideoEncoderSoftwareFallbackWrapperTestBase::UtilizeFallbackEncoder() { codec_.width = kWidth; codec_.height = kHeight; codec_.VP8()->numberOfTemporalLayers = 1; - rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + rate_allocator_ = std::make_unique(env_, codec_); if (wrapper_initialized_) { fallback_wrapper_->Release(); @@ -288,7 +299,7 @@ void VideoEncoderSoftwareFallbackWrapperTestBase::FallbackFromEncodeRequest() { codec_.width = kWidth; codec_.height = kHeight; codec_.VP8()->numberOfTemporalLayers = 1; - rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + rate_allocator_ = std::make_unique(env_, codec_); if (wrapper_initialized_) { fallback_wrapper_->Release(); } @@ -376,7 +387,7 @@ TEST_F(VideoEncoderSoftwareFallbackWrapperTest, // Encoding a frame using the fallback should arrive at the new callback. std::vector types(1, VideoFrameType::kVideoFrameKey); - frame_->set_timestamp(frame_->timestamp() + 1000); + frame_->set_rtp_timestamp(frame_->rtp_timestamp() + 1000); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Encode(*frame_, &types)); EXPECT_EQ(callback2.callback_count_, 1); @@ -384,7 +395,7 @@ TEST_F(VideoEncoderSoftwareFallbackWrapperTest, InitEncode(); EXPECT_EQ(&callback2, fake_encoder_->encode_complete_callback_); - frame_->set_timestamp(frame_->timestamp() + 2000); + frame_->set_rtp_timestamp(frame_->rtp_timestamp() + 2000); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_->Encode(*frame_, &types)); EXPECT_EQ(callback2.callback_count_, 2); } @@ -485,9 +496,9 @@ const char kFieldTrial[] = "WebRTC-VP8-Forced-Fallback-Encoder-v2"; class ForcedFallbackTest : public VideoEncoderSoftwareFallbackWrapperTestBase { public: - explicit ForcedFallbackTest(const std::string& field_trials) - : VideoEncoderSoftwareFallbackWrapperTestBase(field_trials, - VP8Encoder::Create()) {} + explicit ForcedFallbackTest(const Environment& env) + : VideoEncoderSoftwareFallbackWrapperTestBase(env, + CreateVp8Encoder(env)) {} ~ForcedFallbackTest() override {} @@ -511,7 +522,7 @@ class ForcedFallbackTest : public VideoEncoderSoftwareFallbackWrapperTestBase { codec_.VP8()->numberOfTemporalLayers = 1; codec_.VP8()->automaticResizeOn = true; codec_.SetFrameDropEnabled(true); - rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + rate_allocator_ = std::make_unique(env_, codec_); } void InitEncode(int width, int height) { @@ -544,21 +555,25 @@ class ForcedFallbackTest : public VideoEncoderSoftwareFallbackWrapperTestBase { CheckLastEncoderName(expected_name); } - rtc::ScopedFakeClock clock_; + ScopedFakeClock clock_; }; class ForcedFallbackTestEnabled : public ForcedFallbackTest { public: ForcedFallbackTestEnabled() - : ForcedFallbackTest(std::string(kFieldTrial) + "/Enabled-" + - std::to_string(kMinPixelsPerFrame) + "," + - std::to_string(kWidth * kHeight) + ",30000/") {} + : ForcedFallbackTest( + CreateEnvironment(std::make_unique( + std::string(kFieldTrial) + "/Enabled-" + + std::to_string(kMinPixelsPerFrame) + "," + + std::to_string(kWidth * kHeight) + ",30000/"))) {} }; class ForcedFallbackTestDisabled : public ForcedFallbackTest { public: ForcedFallbackTestDisabled() - : ForcedFallbackTest(std::string(kFieldTrial) + "/Disabled/") {} + : ForcedFallbackTest( + CreateEnvironment(std::make_unique( + std::string(kFieldTrial) + "/Disabled/"))) {} }; TEST_F(ForcedFallbackTestDisabled, NoFallbackWithoutFieldTrial) { @@ -704,8 +719,9 @@ TEST(SoftwareFallbackEncoderTest, BothRateControllersNotTrusted) { std::unique_ptr wrapper = CreateVideoEncoderSoftwareFallbackWrapper( - std::unique_ptr(sw_encoder), - std::unique_ptr(hw_encoder)); + CreateEnvironment(), std::unique_ptr(sw_encoder), + std::unique_ptr(hw_encoder), + /*prefer_temporal_support=*/false); EXPECT_FALSE(wrapper->GetEncoderInfo().has_trusted_rate_controller); } @@ -719,8 +735,9 @@ TEST(SoftwareFallbackEncoderTest, SwRateControllerTrusted) { std::unique_ptr wrapper = CreateVideoEncoderSoftwareFallbackWrapper( - std::unique_ptr(sw_encoder), - std::unique_ptr(hw_encoder)); + CreateEnvironment(), std::unique_ptr(sw_encoder), + std::unique_ptr(hw_encoder), + /*prefer_temporal_support=*/false); EXPECT_FALSE(wrapper->GetEncoderInfo().has_trusted_rate_controller); } @@ -734,8 +751,9 @@ TEST(SoftwareFallbackEncoderTest, HwRateControllerTrusted) { std::unique_ptr wrapper = CreateVideoEncoderSoftwareFallbackWrapper( - std::unique_ptr(sw_encoder), - std::unique_ptr(hw_encoder)); + CreateEnvironment(), std::unique_ptr(sw_encoder), + std::unique_ptr(hw_encoder), + /*prefer_temporal_support=*/false); EXPECT_TRUE(wrapper->GetEncoderInfo().has_trusted_rate_controller); VideoCodec codec_ = {}; @@ -764,8 +782,9 @@ TEST(SoftwareFallbackEncoderTest, BothRateControllersTrusted) { std::unique_ptr wrapper = CreateVideoEncoderSoftwareFallbackWrapper( - std::unique_ptr(sw_encoder), - std::unique_ptr(hw_encoder)); + CreateEnvironment(), std::unique_ptr(sw_encoder), + std::unique_ptr(hw_encoder), + /*prefer_temporal_support=*/false); EXPECT_TRUE(wrapper->GetEncoderInfo().has_trusted_rate_controller); } @@ -779,8 +798,9 @@ TEST(SoftwareFallbackEncoderTest, ReportsHardwareAccelerated) { std::unique_ptr wrapper = CreateVideoEncoderSoftwareFallbackWrapper( - std::unique_ptr(sw_encoder), - std::unique_ptr(hw_encoder)); + CreateEnvironment(), std::unique_ptr(sw_encoder), + std::unique_ptr(hw_encoder), + /*prefer_temporal_support=*/false); EXPECT_TRUE(wrapper->GetEncoderInfo().is_hardware_accelerated); VideoCodec codec_ = {}; @@ -808,8 +828,9 @@ TEST(SoftwareFallbackEncoderTest, ConfigureHardwareOnSecondAttempt) { std::unique_ptr wrapper = CreateVideoEncoderSoftwareFallbackWrapper( - std::unique_ptr(sw_encoder), - std::unique_ptr(hw_encoder)); + CreateEnvironment(), std::unique_ptr(sw_encoder), + std::unique_ptr(hw_encoder), + /*prefer_temporal_support=*/false); EXPECT_TRUE(wrapper->GetEncoderInfo().is_hardware_accelerated); // Initialize the encoder. When HW attempt fails we fallback to SW. @@ -850,7 +871,8 @@ class PreferTemporalLayersFallbackTest : public ::testing::Test { .WillRepeatedly(Return(WEBRTC_VIDEO_CODEC_OK)); wrapper_ = CreateVideoEncoderSoftwareFallbackWrapper( - std::unique_ptr(sw_), std::unique_ptr(hw_), + CreateEnvironment(), std::unique_ptr(sw_), + std::unique_ptr(hw_), /*prefer_temporal_support=*/true); codec_settings.codecType = kVideoCodecVP8; @@ -966,7 +988,7 @@ TEST_F(PreferTemporalLayersFallbackTest, PrimesEncoderOnSwitch) { FakeEncodedImageCallback callback1; class DummyFecControllerOverride : public FecControllerOverride { public: - void SetFecAllowed(bool fec_allowed) override {} + void SetFecAllowed(bool /* fec_allowed */) override {} }; DummyFecControllerOverride fec_controller_override1; VideoEncoder::RateControlParameters rate_params1; @@ -1123,15 +1145,15 @@ INSTANTIATE_TEST_SUITE_P( TEST_P(ResolutionBasedFallbackTest, VerifyForcedEncoderFallback) { const ResolutionBasedFallbackTestParams& params = GetParam(); - test::ScopedFieldTrials field_trials(params.field_trials); - auto primary = new test::FakeEncoder(Clock::GetRealTimeClock()); - auto fallback = new test::FakeEncoder(Clock::GetRealTimeClock()); - auto encoder = CreateVideoEncoderSoftwareFallbackWrapper( - std::unique_ptr(fallback), - std::unique_ptr(primary), - /*prefer_temporal_support=*/false); + const Environment env = CreateEnvironment( + std::make_unique(params.field_trials)); + auto primary = std::make_unique(env); primary->SetImplementationName("primary"); + auto fallback = std::make_unique(env); fallback->SetImplementationName("fallback"); + auto encoder = CreateVideoEncoderSoftwareFallbackWrapper( + env, std::move(fallback), std::move(primary), + /*prefer_temporal_support=*/false); VideoCodec codec; codec.codecType = params.codec_type; codec.width = params.width; diff --git a/api/video_codecs/video_codec.cc b/api/video_codecs/video_codec.cc index c6122d3f6a..7ebe0294f4 100644 --- a/api/video_codecs/video_codec.cc +++ b/api/video_codecs/video_codec.cc @@ -15,7 +15,11 @@ #include #include "absl/strings/match.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/simulcast_stream.h" #include "rtc_base/checks.h" +#include "rtc_base/strings/string_builder.h" namespace webrtc { namespace { @@ -27,7 +31,7 @@ constexpr char kPayloadNameAv1[] = "AV1"; constexpr char kPayloadNameAv1x[] = "AV1X"; constexpr char kPayloadNameH264[] = "H264"; constexpr char kPayloadNameGeneric[] = "Generic"; -constexpr char kPayloadNameMultiplex[] = "Multiplex"; +constexpr char kPayloadNameH265[] = "H265"; } // namespace bool VideoCodecVP8::operator==(const VideoCodecVP8& other) const { @@ -72,6 +76,32 @@ VideoCodec::VideoCodec() codec_specific_(), complexity_(VideoCodecComplexity::kComplexityNormal) {} +std::string VideoCodec::ToString() const { + char string_buf[2048]; + SimpleStringBuilder ss(string_buf); + + ss << "VideoCodec {" << "type: " << CodecTypeToPayloadString(codecType) + << ", mode: " + << (mode == VideoCodecMode::kRealtimeVideo ? "RealtimeVideo" + : "Screensharing"); + if (IsSinglecast()) { + ss << ", Singlecast: {" << width << "x" << height << " " + << ScalabilityModeToString(GetScalabilityMode()) + << (active ? ", active" : ", inactive") << "}"; + } else { + ss << ", Simulcast: {"; + for (size_t i = 0; i < numberOfSimulcastStreams; ++i) { + const SimulcastStream stream = simulcastStream[i]; + ss << "[" << stream.width << "x" << stream.height << " " + << ScalabilityModeToString(stream.GetScalabilityMode()) + << (stream.active ? ", active" : ", inactive") << "]"; + } + ss << "}"; + } + ss << "}"; + return ss.str(); +} + VideoCodecVP8* VideoCodec::VP8() { RTC_DCHECK_EQ(codecType, kVideoCodecVP8); return &codec_specific_.VP8; @@ -102,6 +132,16 @@ const VideoCodecH264& VideoCodec::H264() const { return codec_specific_.H264; } +VideoCodecAV1* VideoCodec::AV1() { + RTC_DCHECK_EQ(codecType, kVideoCodecAV1); + return &codec_specific_.AV1; +} + +const VideoCodecAV1& VideoCodec::AV1() const { + RTC_DCHECK_EQ(codecType, kVideoCodecAV1); + return codec_specific_.AV1; +} + const char* CodecTypeToPayloadString(VideoCodecType type) { switch (type) { case kVideoCodecVP8: @@ -112,10 +152,10 @@ const char* CodecTypeToPayloadString(VideoCodecType type) { return kPayloadNameAv1; case kVideoCodecH264: return kPayloadNameH264; - case kVideoCodecMultiplex: - return kPayloadNameMultiplex; case kVideoCodecGeneric: return kPayloadNameGeneric; + case kVideoCodecH265: + return kPayloadNameH265; } RTC_CHECK_NOTREACHED(); } @@ -130,8 +170,8 @@ VideoCodecType PayloadStringToCodecType(const std::string& name) { return kVideoCodecAV1; if (absl::EqualsIgnoreCase(name, kPayloadNameH264)) return kVideoCodecH264; - if (absl::EqualsIgnoreCase(name, kPayloadNameMultiplex)) - return kVideoCodecMultiplex; + if (absl::EqualsIgnoreCase(name, kPayloadNameH265)) + return kVideoCodecH265; return kVideoCodecGeneric; } diff --git a/api/video_codecs/video_codec.h b/api/video_codecs/video_codec.h index 496cfb5e22..7824d7f92c 100644 --- a/api/video_codecs/video_codec.h +++ b/api/video_codecs/video_codec.h @@ -14,10 +14,10 @@ #include #include +#include #include -#include "absl/strings/string_view.h" -#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_constants.h" #include "api/video/video_codec_type.h" #include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/simulcast_stream.h" @@ -97,6 +97,16 @@ struct VideoCodecH264 { uint8_t numberOfTemporalLayers; }; +struct VideoCodecAV1 { + bool operator==(const VideoCodecAV1& other) const { + return automatic_resize_on == other.automatic_resize_on; + } + bool operator!=(const VideoCodecAV1& other) const { + return !(*this == other); + } + bool automatic_resize_on; +}; + // Translates from name of codec to codec type and vice versa. RTC_EXPORT const char* CodecTypeToPayloadString(VideoCodecType type); RTC_EXPORT VideoCodecType PayloadStringToCodecType(const std::string& name); @@ -105,6 +115,7 @@ union VideoCodecUnion { VideoCodecVP8 VP8; VideoCodecVP9 VP9; VideoCodecH264 H264; + VideoCodecAV1 AV1; }; enum class VideoCodecMode { kRealtimeVideo, kScreensharing }; @@ -116,13 +127,13 @@ class RTC_EXPORT VideoCodec { // Scalability mode as described in // https://www.w3.org/TR/webrtc-svc/#scalabilitymodes* - absl::optional GetScalabilityMode() const { + std::optional GetScalabilityMode() const { return scalability_mode_; } void SetScalabilityMode(ScalabilityMode scalability_mode) { scalability_mode_ = scalability_mode; } - void UnsetScalabilityMode() { scalability_mode_ = absl::nullopt; } + void UnsetScalabilityMode() { scalability_mode_ = std::nullopt; } VideoCodecComplexity GetVideoEncoderComplexity() const; void SetVideoEncoderComplexity(VideoCodecComplexity complexity_setting); @@ -130,6 +141,9 @@ class RTC_EXPORT VideoCodec { bool GetFrameDropEnabled() const; void SetFrameDropEnabled(bool enabled); + bool IsSinglecast() const { return numberOfSimulcastStreams <= 1; } + bool IsSimulcast() const { return !IsSinglecast(); } + // Public variables. TODO(hta): Make them private with accessors. VideoCodecType codecType; @@ -182,6 +196,7 @@ class RTC_EXPORT VideoCodec { bool operator==(const VideoCodec& other) const = delete; bool operator!=(const VideoCodec& other) const = delete; + std::string ToString() const; // Accessors for codec specific information. // There is a const version of each that returns a reference, @@ -193,12 +208,14 @@ class RTC_EXPORT VideoCodec { const VideoCodecVP9& VP9() const; VideoCodecH264* H264(); const VideoCodecH264& H264() const; + VideoCodecAV1* AV1(); + const VideoCodecAV1& AV1() const; private: // TODO(hta): Consider replacing the union with a pointer type. // This will allow removing the VideoCodec* types from this file. VideoCodecUnion codec_specific_; - absl::optional scalability_mode_; + std::optional scalability_mode_; // 'complexity_' indicates the CPU capability of the client. It's used to // determine encoder CPU complexity (e.g., cpu_used for VP8, VP9. and AV1). VideoCodecComplexity complexity_; diff --git a/api/video_codecs/video_decoder.cc b/api/video_codecs/video_decoder.cc index c8f40cee7f..84f365289c 100644 --- a/api/video_codecs/video_decoder.cc +++ b/api/video_codecs/video_decoder.cc @@ -10,23 +10,25 @@ #include "api/video_codecs/video_decoder.h" -#include "absl/types/optional.h" -#include "api/video/render_resolution.h" -#include "api/video/video_codec_type.h" +#include +#include +#include + +#include "api/video/video_frame.h" #include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" namespace webrtc { int32_t DecodedImageCallback::Decoded(VideoFrame& decodedImage, - int64_t decode_time_ms) { + int64_t /* decode_time_ms */) { // The default implementation ignores custom decode time value. return Decoded(decodedImage); } void DecodedImageCallback::Decoded(VideoFrame& decodedImage, - absl::optional decode_time_ms, - absl::optional qp) { + std::optional decode_time_ms, + std::optional /* qp */) { Decoded(decodedImage, decode_time_ms.value_or(-1)); } @@ -42,12 +44,11 @@ const char* VideoDecoder::ImplementationName() const { std::string VideoDecoder::DecoderInfo::ToString() const { char string_buf[2048]; - rtc::SimpleStringBuilder oss(string_buf); + SimpleStringBuilder oss(string_buf); oss << "DecoderInfo { " - << "prefers_late_decoding = " - << "implementation_name = '" << implementation_name << "', " - << "is_hardware_accelerated = " + << "prefers_late_decoding = " << "implementation_name = '" + << implementation_name << "', " << "is_hardware_accelerated = " << (is_hardware_accelerated ? "true" : "false") << " }"; return oss.str(); } diff --git a/api/video_codecs/video_decoder.h b/api/video_codecs/video_decoder.h index 40614ce3d7..079753d6f2 100644 --- a/api/video_codecs/video_decoder.h +++ b/api/video_codecs/video_decoder.h @@ -11,11 +11,10 @@ #ifndef API_VIDEO_CODECS_VIDEO_DECODER_H_ #define API_VIDEO_CODECS_VIDEO_DECODER_H_ -#include +#include +#include #include -#include -#include "absl/types/optional.h" #include "api/video/encoded_image.h" #include "api/video/render_resolution.h" #include "api/video/video_codec_type.h" @@ -38,8 +37,8 @@ class RTC_EXPORT DecodedImageCallback { // TODO(sakal): Remove other implementations when upstream projects have been // updated. virtual void Decoded(VideoFrame& decodedImage, - absl::optional decode_time_ms, - absl::optional qp); + std::optional decode_time_ms, + std::optional qp); }; class RTC_EXPORT VideoDecoder { @@ -67,8 +66,8 @@ class RTC_EXPORT VideoDecoder { // decoder. If value isn't present some codec-default value will be used. If // value is present and decoder doesn't have buffer pool the value will be // ignored. - absl::optional buffer_pool_size() const; - void set_buffer_pool_size(absl::optional value); + std::optional buffer_pool_size() const; + void set_buffer_pool_size(std::optional value); // When valid, user of the VideoDecoder interface shouldn't `Decode` // encoded images with render resolution larger than width and height @@ -86,7 +85,7 @@ class RTC_EXPORT VideoDecoder { void set_codec_type(VideoCodecType value) { codec_type_ = value; } private: - absl::optional buffer_pool_size_; + std::optional buffer_pool_size_; RenderResolution max_resolution_; int number_of_cores_ = 1; VideoCodecType codec_type_ = kVideoCodecGeneric; @@ -108,7 +107,7 @@ class RTC_EXPORT VideoDecoder { // TODO(bugs.webrtc.org/15444): Migrate all subclasses to Decode() without // missing_frame and delete this. virtual int32_t Decode(const EncodedImage& input_image, - bool missing_frames, + bool /* missing_frames */, int64_t render_time_ms) { return Decode(input_image, render_time_ms); } @@ -124,12 +123,12 @@ class RTC_EXPORT VideoDecoder { virtual const char* ImplementationName() const; }; -inline absl::optional VideoDecoder::Settings::buffer_pool_size() const { +inline std::optional VideoDecoder::Settings::buffer_pool_size() const { return buffer_pool_size_; } inline void VideoDecoder::Settings::set_buffer_pool_size( - absl::optional value) { + std::optional value) { buffer_pool_size_ = value; } diff --git a/api/video_codecs/video_decoder_factory.cc b/api/video_codecs/video_decoder_factory.cc new file mode 100644 index 0000000000..20e9fffa67 --- /dev/null +++ b/api/video_codecs/video_decoder_factory.cc @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video_codecs/video_decoder_factory.h" + +#include "api/video_codecs/sdp_video_format.h" + +namespace webrtc { + +VideoDecoderFactory::CodecSupport VideoDecoderFactory::QueryCodecSupport( + const SdpVideoFormat& format, + bool reference_scaling) const { + // Default implementation, query for supported formats and check if the + // specified format is supported. Returns false if `reference_scaling` is + // true. + return {.is_supported = !reference_scaling && + format.IsCodecInList(GetSupportedFormats())}; +} + +} // namespace webrtc diff --git a/api/video_codecs/video_decoder_factory.h b/api/video_codecs/video_decoder_factory.h index 7e1d2ee883..89433e03f7 100644 --- a/api/video_codecs/video_decoder_factory.h +++ b/api/video_codecs/video_decoder_factory.h @@ -12,17 +12,15 @@ #define API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_H_ #include -#include #include -#include "absl/types/optional.h" +#include "api/environment/environment.h" #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { -class VideoDecoder; - // A factory that creates VideoDecoders. // NOTE: This class is still under development and may change without notice. class RTC_EXPORT VideoDecoderFactory { @@ -32,6 +30,8 @@ class RTC_EXPORT VideoDecoderFactory { bool is_power_efficient = false; }; + virtual ~VideoDecoderFactory() = default; + // Returns a list of supported video formats in order of preference, to use // for signaling etc. virtual std::vector GetSupportedFormats() const = 0; @@ -47,21 +47,12 @@ class RTC_EXPORT VideoDecoderFactory { // different scalabilty modes. NOTE: QueryCodecSupport is currently an // experimental feature that is subject to change without notice. virtual CodecSupport QueryCodecSupport(const SdpVideoFormat& format, - bool reference_scaling) const { - // Default implementation, query for supported formats and check if the - // specified format is supported. Returns false if `reference_scaling` is - // true. - CodecSupport codec_support; - codec_support.is_supported = - !reference_scaling && format.IsCodecInList(GetSupportedFormats()); - return codec_support; - } + bool reference_scaling) const; - // Creates a VideoDecoder for the specified format. - virtual std::unique_ptr CreateVideoDecoder( + // Creates a VideoDecoder for the specified `format`. + virtual std::unique_ptr Create( + const Environment& env, const SdpVideoFormat& format) = 0; - - virtual ~VideoDecoderFactory() {} }; } // namespace webrtc diff --git a/api/video_codecs/video_decoder_factory_template.h b/api/video_codecs/video_decoder_factory_template.h index 703ae11664..fba0cec496 100644 --- a/api/video_codecs/video_decoder_factory_template.h +++ b/api/video_codecs/video_decoder_factory_template.h @@ -12,10 +12,13 @@ #define API_VIDEO_CODECS_VIDEO_DECODER_FACTORY_TEMPLATE_H_ #include +#include #include #include "absl/algorithm/container.h" #include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder_factory.h" @@ -31,7 +34,8 @@ namespace webrtc { // // // Creates a decoder instance for the given format. // static std::unique_ptr -// CreateDecoder(const SdpVideoFormat& format); +// CreateDecoder(const Environment& env, +// const SdpVideoFormat& format); // // Note that the order of the template arguments matter as the factory will // return the first decoder implementation supporting the given SdpVideoFormat. @@ -42,15 +46,14 @@ class VideoDecoderFactoryTemplate : public VideoDecoderFactory { return GetSupportedFormatsInternal(); } - std::unique_ptr CreateVideoDecoder( - const SdpVideoFormat& format) override { - return CreateVideoDecoderInternal(format); + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override { + return CreateVideoDecoderInternal(env, format); } private: - bool IsFormatInList( - const SdpVideoFormat& format, - rtc::ArrayView supported_formats) const { + bool IsFormatInList(const SdpVideoFormat& format, + ArrayView supported_formats) const { return absl::c_any_of( supported_formats, [&](const SdpVideoFormat& supported_format) { return supported_format.name == format.name && @@ -77,13 +80,21 @@ class VideoDecoderFactoryTemplate : public VideoDecoderFactory { template std::unique_ptr CreateVideoDecoderInternal( + const Environment& env, const SdpVideoFormat& format) { if (IsFormatInList(format, V::SupportedFormats())) { - return V::CreateDecoder(format); + if constexpr (std::is_invocable_r_v, + decltype(V::CreateDecoder), + const Environment&, + const SdpVideoFormat&>) { + return V::CreateDecoder(env, format); + } else { + return V::CreateDecoder(format); + } } if constexpr (sizeof...(Vs) > 0) { - return CreateVideoDecoderInternal(format); + return CreateVideoDecoderInternal(env, format); } return nullptr; diff --git a/api/video_codecs/video_decoder_factory_template_dav1d_adapter.h b/api/video_codecs/video_decoder_factory_template_dav1d_adapter.h index bffbdc43d3..1d38e99c82 100644 --- a/api/video_codecs/video_decoder_factory_template_dav1d_adapter.h +++ b/api/video_codecs/video_decoder_factory_template_dav1d_adapter.h @@ -14,22 +14,21 @@ #include #include -#include "api/video_codecs/av1_profile.h" +#include "api/environment/environment.h" #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder.h" #include "modules/video_coding/codecs/av1/dav1d_decoder.h" namespace webrtc { struct Dav1dDecoderTemplateAdapter { static std::vector SupportedFormats() { - return {SdpVideoFormat("AV1"), - SdpVideoFormat( - "AV1", {{kAV1FmtpProfile, - AV1ProfileToString(AV1Profile::kProfile1).data()}})}; + return {SdpVideoFormat::AV1Profile0(), SdpVideoFormat::AV1Profile1()}; } static std::unique_ptr CreateDecoder( - const SdpVideoFormat& format) { - return CreateDav1dDecoder(); + const Environment& env, + const SdpVideoFormat& /* format */) { + return CreateDav1dDecoder(env); } }; diff --git a/api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h b/api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h index 0c45a4b622..b5aac982aa 100644 --- a/api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h +++ b/api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h @@ -14,18 +14,21 @@ #include #include +#include "api/environment/environment.h" #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" namespace webrtc { struct LibvpxVp8DecoderTemplateAdapter { static std::vector SupportedFormats() { - return {SdpVideoFormat("VP8")}; + return {SdpVideoFormat::VP8()}; } static std::unique_ptr CreateDecoder( - const SdpVideoFormat& format) { - return VP8Decoder::Create(); + const Environment& env, + const SdpVideoFormat& /* format */) { + return CreateVp8Decoder(env); } }; } // namespace webrtc diff --git a/api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h b/api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h index e0ec0010be..7e80988888 100644 --- a/api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h +++ b/api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h @@ -14,6 +14,8 @@ #include #include +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" namespace webrtc { @@ -23,7 +25,7 @@ struct LibvpxVp9DecoderTemplateAdapter { } static std::unique_ptr CreateDecoder( - const SdpVideoFormat& format) { + const SdpVideoFormat& /* format */) { return VP9Decoder::Create(); } }; diff --git a/api/video_codecs/video_decoder_factory_template_open_h264_adapter.h b/api/video_codecs/video_decoder_factory_template_open_h264_adapter.h index 2746bde132..c17157eed6 100644 --- a/api/video_codecs/video_decoder_factory_template_open_h264_adapter.h +++ b/api/video_codecs/video_decoder_factory_template_open_h264_adapter.h @@ -14,6 +14,8 @@ #include #include +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder.h" #include "modules/video_coding/codecs/h264/include/h264.h" namespace webrtc { @@ -30,7 +32,7 @@ struct OpenH264DecoderTemplateAdapter { } static std::unique_ptr CreateDecoder( - const SdpVideoFormat& format) { + const SdpVideoFormat& /* format */) { #if defined(WEBRTC_USE_H264) return H264Decoder::Create(); diff --git a/api/video_codecs/video_decoder_software_fallback_wrapper.cc b/api/video_codecs/video_decoder_software_fallback_wrapper.cc index c52ddbe511..5d5eeaa182 100644 --- a/api/video_codecs/video_decoder_software_fallback_wrapper.cc +++ b/api/video_codecs/video_decoder_software_fallback_wrapper.cc @@ -12,17 +12,21 @@ #include +#include #include #include #include +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "api/video_codecs/video_decoder.h" #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" namespace webrtc { @@ -34,6 +38,7 @@ constexpr size_t kMaxConsequtiveHwErrors = 4; class VideoDecoderSoftwareFallbackWrapper final : public VideoDecoder { public: VideoDecoderSoftwareFallbackWrapper( + const Environment& env, std::unique_ptr sw_fallback_decoder, std::unique_ptr hw_decoder); ~VideoDecoderSoftwareFallbackWrapper() override; @@ -67,6 +72,7 @@ class VideoDecoderSoftwareFallbackWrapper final : public VideoDecoder { } decoder_type_; std::unique_ptr hw_decoder_; + const bool force_sw_decoder_fallback_; Settings decoder_settings_; const std::unique_ptr fallback_decoder_; const std::string fallback_implementation_name_; @@ -76,10 +82,13 @@ class VideoDecoderSoftwareFallbackWrapper final : public VideoDecoder { }; VideoDecoderSoftwareFallbackWrapper::VideoDecoderSoftwareFallbackWrapper( + const Environment& env, std::unique_ptr sw_fallback_decoder, std::unique_ptr hw_decoder) : decoder_type_(DecoderType::kNone), hw_decoder_(std::move(hw_decoder)), + force_sw_decoder_fallback_( + env.field_trials().IsEnabled("WebRTC-Video-ForcedSwDecoderFallback")), fallback_decoder_(std::move(sw_fallback_decoder)), fallback_implementation_name_( fallback_decoder_->GetDecoderInfo().implementation_name + @@ -94,7 +103,7 @@ VideoDecoderSoftwareFallbackWrapper::~VideoDecoderSoftwareFallbackWrapper() = bool VideoDecoderSoftwareFallbackWrapper::Configure(const Settings& settings) { decoder_settings_ = settings; - if (webrtc::field_trial::IsEnabled("WebRTC-Video-ForcedSwDecoderFallback")) { + if (force_sw_decoder_fallback_) { RTC_LOG(LS_INFO) << "Forced software decoder fallback enabled."; RTC_DCHECK(decoder_type_ == DecoderType::kNone); return InitFallbackDecoder(); @@ -166,8 +175,8 @@ void VideoDecoderSoftwareFallbackWrapper::UpdateFallbackDecoderHistograms() { RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "H264", hw_decoded_frames_since_last_fallback_); break; - case kVideoCodecMultiplex: - RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "Multiplex", + case kVideoCodecH265: + RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "H265", hw_decoded_frames_since_last_fallback_); break; } @@ -272,10 +281,11 @@ VideoDecoder& VideoDecoderSoftwareFallbackWrapper::active_decoder() const { } // namespace std::unique_ptr CreateVideoDecoderSoftwareFallbackWrapper( + const Environment& env, std::unique_ptr sw_fallback_decoder, std::unique_ptr hw_decoder) { return std::make_unique( - std::move(sw_fallback_decoder), std::move(hw_decoder)); + env, std::move(sw_fallback_decoder), std::move(hw_decoder)); } } // namespace webrtc diff --git a/api/video_codecs/video_decoder_software_fallback_wrapper.h b/api/video_codecs/video_decoder_software_fallback_wrapper.h index 3f44e02b26..fa42517bdd 100644 --- a/api/video_codecs/video_decoder_software_fallback_wrapper.h +++ b/api/video_codecs/video_decoder_software_fallback_wrapper.h @@ -13,6 +13,7 @@ #include +#include "api/environment/environment.h" #include "api/video_codecs/video_decoder.h" #include "rtc_base/system/rtc_export.h" @@ -23,6 +24,7 @@ namespace webrtc { // hardware restrictions, such as max resolution. RTC_EXPORT std::unique_ptr CreateVideoDecoderSoftwareFallbackWrapper( + const Environment& env, std::unique_ptr sw_fallback_decoder, std::unique_ptr hw_decoder); diff --git a/api/video_codecs/video_encoder.cc b/api/video_codecs/video_encoder.cc index b0fe078b37..fcbb250584 100644 --- a/api/video_codecs/video_encoder.cc +++ b/api/video_codecs/video_encoder.cc @@ -13,7 +13,19 @@ #include #include - +#include +#include +#include +#include +#include + +#include "absl/container/inlined_vector.h" +#include "api/fec_controller_override.h" +#include "api/units/data_rate.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_frame_buffer.h" +#include "api/video_codecs/video_codec.h" #include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" @@ -108,9 +120,7 @@ VideoEncoder::EncoderInfo::EncoderInfo(const EncoderInfo&) = default; VideoEncoder::EncoderInfo::~EncoderInfo() = default; std::string VideoEncoder::EncoderInfo::ToString() const { - char string_buf[2048]; - rtc::SimpleStringBuilder oss(string_buf); - + StringBuilder oss; oss << "EncoderInfo { " "ScalingSettings { "; if (scaling_settings.thresholds) { @@ -150,11 +160,11 @@ std::string VideoEncoder::EncoderInfo::ToString() const { if (!fractions.empty()) { first = false; oss << "[ "; - for (size_t i = 0; i < fractions.size(); ++i) { - if (i > 0) { + for (size_t j = 0; j < fractions.size(); ++j) { + if (j > 0) { oss << ", "; } - oss << (static_cast(fractions[i]) / kMaxFramerateFraction); + oss << (static_cast(fractions[j]) / kMaxFramerateFraction); } oss << "] "; } @@ -228,7 +238,7 @@ bool VideoEncoder::EncoderInfo::operator==(const EncoderInfo& rhs) const { return true; } -absl::optional +std::optional VideoEncoder::EncoderInfo::GetEncoderBitrateLimitsForResolution( int frame_size_pixels) const { std::vector bitrate_limits = @@ -257,11 +267,11 @@ VideoEncoder::EncoderInfo::GetEncoderBitrateLimitsForResolution( } if (bitrate_limits[i].frame_size_pixels >= frame_size_pixels) { - return absl::optional(bitrate_limits[i]); + return std::optional(bitrate_limits[i]); } } - return absl::nullopt; + return std::nullopt; } VideoEncoder::RateControlParameters::RateControlParameters() @@ -298,7 +308,7 @@ bool VideoEncoder::RateControlParameters::operator!=( VideoEncoder::RateControlParameters::~RateControlParameters() = default; void VideoEncoder::SetFecControllerOverride( - FecControllerOverride* fec_controller_override) {} + FecControllerOverride* /* fec_controller_override */) {} int32_t VideoEncoder::InitEncode(const VideoCodec* codec_settings, int32_t number_of_cores, @@ -323,11 +333,11 @@ int VideoEncoder::InitEncode(const VideoCodec* codec_settings, settings.max_payload_size); } -void VideoEncoder::OnPacketLossRateUpdate(float packet_loss_rate) {} +void VideoEncoder::OnPacketLossRateUpdate(float /* packet_loss_rate */) {} -void VideoEncoder::OnRttUpdate(int64_t rtt_ms) {} +void VideoEncoder::OnRttUpdate(int64_t /* rtt_ms */) {} void VideoEncoder::OnLossNotification( - const LossNotification& loss_notification) {} + const LossNotification& /* loss_notification */) {} } // namespace webrtc diff --git a/api/video_codecs/video_encoder.h b/api/video_codecs/video_encoder.h index 49ea6e1c0e..44a75162c9 100644 --- a/api/video_codecs/video_encoder.h +++ b/api/video_codecs/video_encoder.h @@ -11,21 +11,23 @@ #ifndef API_VIDEO_CODECS_VIDEO_ENCODER_H_ #define API_VIDEO_CODECS_VIDEO_ENCODER_H_ +#include +#include #include -#include +#include #include #include #include "absl/container/inlined_vector.h" -#include "absl/types/optional.h" #include "api/fec_controller_override.h" #include "api/units/data_rate.h" #include "api/video/encoded_image.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_codec_constants.h" #include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" #include "api/video_codecs/video_codec.h" -#include "rtc_base/checks.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -80,7 +82,7 @@ class RTC_EXPORT EncodedImageCallback { const EncodedImage& encoded_image, const CodecSpecificInfo* codec_specific_info) = 0; - virtual void OnDroppedFrame(DropReason reason) {} + virtual void OnDroppedFrame(DropReason /* reason */) {} }; class RTC_EXPORT VideoEncoder { @@ -100,7 +102,7 @@ class RTC_EXPORT VideoEncoder { struct KOff {}; public: - // TODO(bugs.webrtc.org/9078): Since absl::optional should be trivially copy + // TODO(bugs.webrtc.org/9078): Since std::optional should be trivially copy // constructible, this magic value can likely be replaced by a constexpr // ScalingSettings value. static constexpr KOff kOff = {}; @@ -111,7 +113,7 @@ class RTC_EXPORT VideoEncoder { ScalingSettings(KOff); // NOLINT(runtime/explicit) ~ScalingSettings(); - absl::optional thresholds; + std::optional thresholds; // We will never ask for a resolution lower than this. // TODO(kthelgason): Lower this limit when better testing @@ -240,8 +242,8 @@ class RTC_EXPORT VideoEncoder { // Obtains the limits from `resolution_bitrate_limits` that best matches the // `frame_size_pixels`. - absl::optional - GetEncoderBitrateLimitsForResolution(int frame_size_pixels) const; + std::optional GetEncoderBitrateLimitsForResolution( + int frame_size_pixels) const; // If true, this encoder has internal support for generating simulcast // streams. Otherwise, an adapter class will be needed. @@ -258,7 +260,12 @@ class RTC_EXPORT VideoEncoder { // Indicates whether or not QP value encoder writes into frame/slice/tile // header can be interpreted as average frame/slice/tile QP. - absl::optional is_qp_trusted; + std::optional is_qp_trusted; + + // The minimum QP that the encoder is expected to use with the current + // configuration. This may be used to determine if the encoder has reached + // its target video quality for static screenshare content. + std::optional min_qp; }; struct RTC_EXPORT RateControlParameters { @@ -300,7 +307,7 @@ class RTC_EXPORT VideoEncoder { // all decodable. // `false` if some dependencies were undecodable, `true` if all dependencies // were decodable, and `nullopt` if the dependencies are unknown. - absl::optional dependencies_of_last_received_decodable; + std::optional dependencies_of_last_received_decodable; // Describes whether the received frame was decodable. // `false` if some dependency was undecodable or if some packet belonging // to the last received frame was missed. @@ -308,7 +315,7 @@ class RTC_EXPORT VideoEncoder { // to the last received frame were received. // `nullopt` if no packet belonging to the last frame was missed, but the // last packet in the frame was not yet received. - absl::optional last_received_decodable; + std::optional last_received_decodable; }; // Negotiated capabilities which the VideoEncoder may expect the other @@ -332,7 +339,7 @@ class RTC_EXPORT VideoEncoder { size_t max_payload_size; // Experimental API - currently only supported by LibvpxVp8Encoder and // the OpenH264 encoder. If set, limits the number of encoder threads. - absl::optional encoder_thread_limit; + std::optional encoder_thread_limit; }; static VideoCodecVP8 GetDefaultVp8Settings(); diff --git a/api/video_codecs/video_encoder_factory.h b/api/video_codecs/video_encoder_factory.h index d28a2a4035..8986e920d9 100644 --- a/api/video_codecs/video_encoder_factory.h +++ b/api/video_codecs/video_encoder_factory.h @@ -12,18 +12,18 @@ #define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_H_ #include +#include #include #include -#include "absl/types/optional.h" +#include "api/environment/environment.h" #include "api/units/data_rate.h" #include "api/video/render_resolution.h" #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder.h" namespace webrtc { -class VideoEncoder; - // A factory that creates VideoEncoders. // NOTE: This class is still under development and may change without notice. class VideoEncoderFactory { @@ -48,19 +48,19 @@ class VideoEncoderFactory { // Called every time the available bitrate is updated. Should return a // non-empty if an encoder switch should be performed. - virtual absl::optional OnAvailableBitrate( + virtual std::optional OnAvailableBitrate( const DataRate& rate) = 0; // Called every time the encoder input resolution change. Should return a // non-empty if an encoder switch should be performed. - virtual absl::optional OnResolutionChange( - const RenderResolution& resolution) { - return absl::nullopt; + virtual std::optional OnResolutionChange( + const RenderResolution& /* resolution */) { + return std::nullopt; } // Called if the currently used encoder reports itself as broken. Should // return a non-empty if an encoder switch should be performed. - virtual absl::optional OnEncoderBroken() = 0; + virtual std::optional OnEncoderBroken() = 0; }; // Returns a list of supported video formats in order of preference, to use @@ -84,7 +84,7 @@ class VideoEncoderFactory { // subject to change without notice. virtual CodecSupport QueryCodecSupport( const SdpVideoFormat& format, - absl::optional scalability_mode) const { + std::optional scalability_mode) const { // Default implementation, query for supported formats and check if the // specified format is supported. Returns false if scalability_mode is // specified. @@ -96,7 +96,8 @@ class VideoEncoderFactory { } // Creates a VideoEncoder for the specified format. - virtual std::unique_ptr CreateVideoEncoder( + virtual std::unique_ptr Create( + const Environment& env, const SdpVideoFormat& format) = 0; // This method creates a EncoderSelector to use for a VideoSendStream. diff --git a/api/video_codecs/video_encoder_factory_interface.h b/api/video_codecs/video_encoder_factory_interface.h new file mode 100644 index 0000000000..88d3913c52 --- /dev/null +++ b/api/video_codecs/video_encoder_factory_interface.h @@ -0,0 +1,104 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_INTERFACE_H_ +#define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_INTERFACE_H_ + +#include +#include +#include +#include +#include +#include + +#include "api/units/time_delta.h" +#include "api/video/resolution.h" +#include "api/video/video_frame_buffer.h" +#include "api/video_codecs/video_encoder_interface.h" +#include "api/video_codecs/video_encoding_general.h" +#include "rtc_base/numerics/rational.h" + +namespace webrtc { +using FrameType = VideoEncoderInterface::FrameType; + +// NOTE: This class is still under development and may change without notice. +class VideoEncoderFactoryInterface { + public: + enum class RateControlMode { kCqp, kCbr }; + + struct Capabilities { + struct PredictionConstraints { + enum class BufferSpaceType { + kMultiInstance, // multiple independent sets of buffers + kMultiKeyframe, // single set of buffers, but can store multiple + // keyframes simultaneously. + kSingleKeyframe // single set of buffers, can only store one keyframe + // at a time. + }; + + int num_buffers; + int max_references; + int max_temporal_layers; + + BufferSpaceType buffer_space_type; + int max_spatial_layers; + std::vector scaling_factors; + + std::vector supported_frame_types; + } prediction_constraints; + + struct InputConstraints { + Resolution min; + Resolution max; + int pixel_alignment; + std::vector input_formats; + } input_constraints; + + std::vector encoding_formats; + + struct BitrateControl { + std::pair qp_range; + std::vector rc_modes; + } rate_control; + + struct Performance { + bool encode_on_calling_thread; + std::pair min_max_effort_level; + } performance; + }; + + struct StaticEncoderSettings { + struct Cqp {}; + struct Cbr { + // TD: Should there be an intial buffer size? + TimeDelta max_buffer_size; + TimeDelta target_buffer_size; + }; + + Resolution max_encode_dimensions; + EncodingFormat encoding_format; + std::variant rc_mode; + int max_number_of_threads; + }; + + virtual ~VideoEncoderFactoryInterface() = default; + + virtual std::string CodecName() const = 0; + virtual std::string ImplementationName() const = 0; + virtual std::map CodecSpecifics() const = 0; + + virtual Capabilities GetEncoderCapabilities() const = 0; + virtual std::unique_ptr CreateEncoder( + const StaticEncoderSettings& settings, + const std::map& encoder_specific_settings) = 0; +}; + +} // namespace webrtc +#endif // API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_INTERFACE_H_ diff --git a/api/video_codecs/video_encoder_factory_template.h b/api/video_codecs/video_encoder_factory_template.h index 10212ac816..f58a36ed87 100644 --- a/api/video_codecs/video_encoder_factory_template.h +++ b/api/video_codecs/video_encoder_factory_template.h @@ -12,11 +12,14 @@ #define API_VIDEO_CODECS_VIDEO_ENCODER_FACTORY_TEMPLATE_H_ #include +#include #include #include #include "absl/algorithm/container.h" #include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" @@ -34,7 +37,8 @@ namespace webrtc { // // // Creates an encoder instance for the given format. // static std::unique_ptr -// CreateEncoder(const SdpVideoFormat& format); +// CreateEncoder(const Environment& env, +// const SdpVideoFormat& format); // // // Returns true if the encoder supports the given scalability mode. // static bool @@ -50,30 +54,29 @@ class VideoEncoderFactoryTemplate : public VideoEncoderFactory { return GetSupportedFormatsInternal(); } - std::unique_ptr CreateVideoEncoder( - const SdpVideoFormat& format) override { + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override { // We fuzzy match the specified format for both valid and not so valid // reasons. The valid reason is that there are many standardized codec // specific fmtp parameters that have not been implemented, and in those // cases we should not fail to instantiate an encoder just because we don't // recognize the parameter. The not so valid reason is that we have started // adding parameters completely unrelated to the SDP to the SdpVideoFormat. - // TODO(bugs.webrtc.org/13868): Remove FuzzyMatchSdpVideoFormat - absl::optional matched = + // TODO: bugs.webrtc.org/13868 - Remove FuzzyMatchSdpVideoFormat + std::optional matched = FuzzyMatchSdpVideoFormat(GetSupportedFormats(), format); - return CreateVideoEncoderInternal(matched.value_or(format)); + return CreateInternal(env, matched.value_or(format)); } CodecSupport QueryCodecSupport( const SdpVideoFormat& format, - absl::optional scalability_mode) const override { + std::optional scalability_mode) const override { return QueryCodecSupportInternal(format, scalability_mode); } private: - bool IsFormatInList( - const SdpVideoFormat& format, - rtc::ArrayView supported_formats) const { + bool IsFormatInList(const SdpVideoFormat& format, + ArrayView supported_formats) const { return absl::c_any_of( supported_formats, [&](const SdpVideoFormat& supported_format) { return supported_format.name == format.name && @@ -83,11 +86,11 @@ class VideoEncoderFactoryTemplate : public VideoEncoderFactory { template bool IsScalabilityModeSupported( - const absl::optional& scalability_mode_string) const { + const std::optional& scalability_mode_string) const { if (!scalability_mode_string.has_value()) { return true; } - absl::optional scalability_mode = + std::optional scalability_mode = ScalabilityModeFromString(*scalability_mode_string); return scalability_mode.has_value() && V::IsScalabilityModeSupported(*scalability_mode); @@ -111,14 +114,14 @@ class VideoEncoderFactoryTemplate : public VideoEncoderFactory { } template - std::unique_ptr CreateVideoEncoderInternal( - const SdpVideoFormat& format) { + std::unique_ptr CreateInternal(const Environment& env, + const SdpVideoFormat& format) { if (IsFormatInList(format, V::SupportedFormats())) { - return V::CreateEncoder(format); + return V::CreateEncoder(env, format); } if constexpr (sizeof...(Vs) > 0) { - return CreateVideoEncoderInternal(format); + return CreateInternal(env, format); } return nullptr; @@ -127,7 +130,7 @@ class VideoEncoderFactoryTemplate : public VideoEncoderFactory { template CodecSupport QueryCodecSupportInternal( const SdpVideoFormat& format, - const absl::optional& scalability_mode) const { + const std::optional& scalability_mode) const { if (IsFormatInList(format, V::SupportedFormats())) { return {.is_supported = IsScalabilityModeSupported(scalability_mode)}; } diff --git a/api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h b/api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h index 417df1e192..10cc1cc152 100644 --- a/api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h +++ b/api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h @@ -15,7 +15,10 @@ #include #include "absl/container/inlined_vector.h" +#include "api/environment/environment.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder.h" #include "modules/video_coding/codecs/av1/av1_svc_config.h" #include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" @@ -24,13 +27,13 @@ struct LibaomAv1EncoderTemplateAdapter { static std::vector SupportedFormats() { absl::InlinedVector scalability_modes = LibaomAv1EncoderSupportedScalabilityModes(); - return { - SdpVideoFormat("AV1", SdpVideoFormat::Parameters(), scalability_modes)}; + return {SdpVideoFormat(SdpVideoFormat::AV1Profile0(), scalability_modes)}; } static std::unique_ptr CreateEncoder( - const SdpVideoFormat& format) { - return CreateLibaomAv1Encoder(); + const Environment& env, + const SdpVideoFormat& /* format */) { + return CreateLibaomAv1Encoder(env); } static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) { diff --git a/api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h b/api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h index 0f0a9bacd5..6e8e290a1d 100644 --- a/api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h +++ b/api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h @@ -15,7 +15,10 @@ #include #include "absl/container/inlined_vector.h" +#include "api/environment/environment.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp8/vp8_scalability.h" @@ -28,13 +31,13 @@ struct LibvpxVp8EncoderTemplateAdapter { scalability_modes.push_back(scalability_mode); } - return { - SdpVideoFormat("VP8", SdpVideoFormat::Parameters(), scalability_modes)}; + return {SdpVideoFormat(SdpVideoFormat::VP8(), scalability_modes)}; } static std::unique_ptr CreateEncoder( - const SdpVideoFormat& format) { - return VP8Encoder::Create(); + const Environment& env, + const SdpVideoFormat& /* format */) { + return CreateVp8Encoder(env); } static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) { diff --git a/api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h b/api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h index b16989c8c7..5e3dba0799 100644 --- a/api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h +++ b/api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h @@ -14,6 +14,11 @@ #include #include +#include "api/environment/environment.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/vp9_profile.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" namespace webrtc { @@ -23,8 +28,11 @@ struct LibvpxVp9EncoderTemplateAdapter { } static std::unique_ptr CreateEncoder( + const Environment& env, const SdpVideoFormat& format) { - return VP9Encoder::Create(cricket::CreateVideoCodec(format)); + return CreateVp9Encoder(env, + {.profile = ParseSdpForVP9Profile(format.parameters) + .value_or(VP9Profile::kProfile0)}); } static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) { diff --git a/api/video_codecs/video_encoder_factory_template_open_h264_adapter.h b/api/video_codecs/video_encoder_factory_template_open_h264_adapter.h index 6995b27800..837ef6ef98 100644 --- a/api/video_codecs/video_encoder_factory_template_open_h264_adapter.h +++ b/api/video_codecs/video_encoder_factory_template_open_h264_adapter.h @@ -14,6 +14,10 @@ #include #include +#include "api/environment/environment.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder.h" #include "modules/video_coding/codecs/h264/include/h264.h" namespace webrtc { @@ -29,15 +33,17 @@ struct OpenH264EncoderTemplateAdapter { } static std::unique_ptr CreateEncoder( - const SdpVideoFormat& format) { + [[maybe_unused]] const Environment& env, + [[maybe_unused]] const SdpVideoFormat& format) { #if defined(WEBRTC_USE_H264) - return H264Encoder::Create(cricket::CreateVideoCodec(format)); + return CreateH264Encoder(env, H264EncoderSettings::Parse(format)); #else return nullptr; #endif } - static bool IsScalabilityModeSupported(ScalabilityMode scalability_mode) { + static bool IsScalabilityModeSupported( + [[maybe_unused]] ScalabilityMode scalability_mode) { #if defined(WEBRTC_USE_H264) return H264Encoder::SupportsScalabilityMode(scalability_mode); #else diff --git a/api/video_codecs/video_encoder_interface.h b/api/video_codecs/video_encoder_interface.h new file mode 100644 index 0000000000..b0b18eeb80 --- /dev/null +++ b/api/video_codecs/video_encoder_interface.h @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_CODECS_VIDEO_ENCODER_INTERFACE_H_ +#define API_VIDEO_CODECS_VIDEO_ENCODER_INTERFACE_H_ + +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/scoped_refptr.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/resolution.h" +#include "api/video/video_frame_buffer.h" +#include "api/video_codecs/video_codec.h" + +namespace webrtc { +// NOTE: This class is still under development and may change without notice. +class VideoEncoderInterface { + public: + virtual ~VideoEncoderInterface() = default; + enum class FrameType { kKeyframe, kStartFrame, kDeltaFrame }; + + struct EncodingError {}; + struct EncodedData { + FrameType frame_type; + int encoded_qp; + }; + using EncodeResult = std::variant; + + struct FrameOutput { + virtual ~FrameOutput() = default; + virtual ArrayView GetBitstreamOutputBuffer(DataSize size) = 0; + virtual void EncodeComplete(const EncodeResult& encode_result) = 0; + }; + + struct TemporalUnitSettings { + VideoCodecMode content_hint = VideoCodecMode::kRealtimeVideo; + Timestamp presentation_timestamp; + }; + + struct FrameEncodeSettings { + struct Cbr { + TimeDelta duration; + DataRate target_bitrate; + }; + + struct Cqp { + int target_qp; + }; + + std::variant rate_options; + + FrameType frame_type = FrameType::kDeltaFrame; + int temporal_id = 0; + int spatial_id = 0; + Resolution resolution; + std::vector reference_buffers; + std::optional update_buffer; + int effort_level = 0; + + std::unique_ptr frame_output; + }; + + virtual void Encode(scoped_refptr frame_buffer, + const TemporalUnitSettings& settings, + std::vector frame_settings) = 0; +}; + +} // namespace webrtc +#endif // API_VIDEO_CODECS_VIDEO_ENCODER_INTERFACE_H_ diff --git a/api/video_codecs/video_encoder_software_fallback_wrapper.cc b/api/video_codecs/video_encoder_software_fallback_wrapper.cc index d35c9f9950..1ef2c609df 100644 --- a/api/video_codecs/video_encoder_software_fallback_wrapper.cc +++ b/api/video_codecs/video_encoder_software_fallback_wrapper.cc @@ -14,25 +14,29 @@ #include #include +#include +#include #include +#include #include #include "absl/strings/match.h" -#include "absl/types/optional.h" +#include "api/environment/environment.h" #include "api/fec_controller_override.h" -#include "api/transport/field_trial_based_config.h" -#include "api/video/i420_buffer.h" -#include "api/video/video_bitrate_allocation.h" +#include "api/field_trials_view.h" +#include "api/scoped_refptr.h" +#include "api/video/video_codec_type.h" #include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" -#include "media/base/video_common.h" #include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/include/video_error_codes_utils.h" #include "modules/video_coding/utility/simulcast_utility.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -79,14 +83,14 @@ struct ForcedFallbackParams { const char kVp8ForceFallbackEncoderFieldTrial[] = "WebRTC-VP8-Forced-Fallback-Encoder-v2"; -absl::optional ParseFallbackParamsFromFieldTrials( +std::optional ParseFallbackParamsFromFieldTrials( + const FieldTrialsView& field_trials, const VideoEncoder& main_encoder) { // Ignore WebRTC-VP8-Forced-Fallback-Encoder-v2 if // WebRTC-Video-EncoderFallbackSettings is present. FieldTrialOptional resolution_threshold_px("resolution_threshold_px"); - ParseFieldTrial( - {&resolution_threshold_px}, - FieldTrialBasedConfig().Lookup("WebRTC-Video-EncoderFallbackSettings")); + ParseFieldTrial({&resolution_threshold_px}, + field_trials.Lookup("WebRTC-Video-EncoderFallbackSettings")); if (resolution_threshold_px) { ForcedFallbackParams params; params.enable_resolution_based_switch = true; @@ -95,9 +99,9 @@ absl::optional ParseFallbackParamsFromFieldTrials( } const std::string field_trial = - webrtc::field_trial::FindFullName(kVp8ForceFallbackEncoderFieldTrial); + field_trials.Lookup(kVp8ForceFallbackEncoderFieldTrial); if (!absl::StartsWith(field_trial, "Enabled")) { - return absl::nullopt; + return std::nullopt; } int max_pixels_lower_bound = @@ -111,23 +115,24 @@ absl::optional ParseFallbackParamsFromFieldTrials( ¶ms.max_pixels, &min_bps) != 3) { RTC_LOG(LS_WARNING) << "Invalid number of forced fallback parameters provided."; - return absl::nullopt; + return std::nullopt; } else if (params.min_pixels <= 0 || params.max_pixels < max_pixels_lower_bound || params.max_pixels < params.min_pixels || min_bps <= 0) { RTC_LOG(LS_WARNING) << "Invalid forced fallback parameter value provided."; - return absl::nullopt; + return std::nullopt; } params.vp8_specific_resolution_switch = true; return params; } -absl::optional GetForcedFallbackParams( +std::optional GetForcedFallbackParams( + const FieldTrialsView& field_trials, bool prefer_temporal_support, const VideoEncoder& main_encoder) { - absl::optional params = - ParseFallbackParamsFromFieldTrials(main_encoder); + std::optional params = + ParseFallbackParamsFromFieldTrials(field_trials, main_encoder); if (prefer_temporal_support) { if (!params.has_value()) { params.emplace(); @@ -140,6 +145,7 @@ absl::optional GetForcedFallbackParams( class VideoEncoderSoftwareFallbackWrapper final : public VideoEncoder { public: VideoEncoderSoftwareFallbackWrapper( + const FieldTrialsView& field_trials, std::unique_ptr sw_encoder, std::unique_ptr hw_encoder, bool prefer_temporal_support); @@ -197,15 +203,15 @@ class VideoEncoderSoftwareFallbackWrapper final : public VideoEncoder { // Settings used in the last InitEncode call and used if a dynamic fallback to // software is required. VideoCodec codec_settings_; - absl::optional encoder_settings_; + std::optional encoder_settings_; // The last rate control settings, if set. - absl::optional rate_control_parameters_; + std::optional rate_control_parameters_; // The last channel parameters set. - absl::optional packet_loss_; - absl::optional rtt_; - absl::optional loss_notification_; + std::optional packet_loss_; + std::optional rtt_; + std::optional loss_notification_; enum class EncoderState { kUninitialized, @@ -220,12 +226,13 @@ class VideoEncoderSoftwareFallbackWrapper final : public VideoEncoder { EncodedImageCallback* callback_; - const absl::optional fallback_params_; + const std::optional fallback_params_; int32_t EncodeWithMainEncoder(const VideoFrame& frame, const std::vector* frame_types); }; VideoEncoderSoftwareFallbackWrapper::VideoEncoderSoftwareFallbackWrapper( + const FieldTrialsView& field_trials, std::unique_ptr sw_encoder, std::unique_ptr hw_encoder, bool prefer_temporal_support) @@ -233,8 +240,9 @@ VideoEncoderSoftwareFallbackWrapper::VideoEncoderSoftwareFallbackWrapper( encoder_(std::move(hw_encoder)), fallback_encoder_(std::move(sw_encoder)), callback_(nullptr), - fallback_params_( - GetForcedFallbackParams(prefer_temporal_support, *encoder_)) { + fallback_params_(GetForcedFallbackParams(field_trials, + prefer_temporal_support, + *encoder_)) { RTC_DCHECK(fallback_encoder_); } @@ -264,14 +272,17 @@ void VideoEncoderSoftwareFallbackWrapper::PrimeEncoder( } bool VideoEncoderSoftwareFallbackWrapper::InitFallbackEncoder(bool is_forced) { - RTC_LOG(LS_WARNING) << "Encoder falling back to software encoding."; + RTC_LOG(LS_WARNING) << "[VESFW] " << __func__ + << "(is_forced=" << (is_forced ? "true" : "false") << ")"; RTC_DCHECK(encoder_settings_.has_value()); const int ret = fallback_encoder_->InitEncode(&codec_settings_, encoder_settings_.value()); if (ret != WEBRTC_VIDEO_CODEC_OK) { - RTC_LOG(LS_ERROR) << "Failed to initialize software-encoder fallback."; + RTC_LOG(LS_ERROR) + << "[VESFW] software-encoder fallback initialization failed with" + << " error code: " << WebRtcVideoCodecErrorToString(ret); fallback_encoder_->Release(); return false; } @@ -305,12 +316,18 @@ void VideoEncoderSoftwareFallbackWrapper::SetFecControllerOverride( int32_t VideoEncoderSoftwareFallbackWrapper::InitEncode( const VideoCodec* codec_settings, const VideoEncoder::Settings& settings) { + RTC_LOG(LS_INFO) << "[VESFW] " << __func__ + << "(codec=" << codec_settings->ToString() + << ", settings={number_of_cores: " + << settings.number_of_cores + << ", max_payload_size: " << settings.max_payload_size + << "})"; // Store settings, in case we need to dynamically switch to the fallback // encoder after a failed Encode call. codec_settings_ = *codec_settings; encoder_settings_ = settings; // Clear stored rate/channel parameters. - rate_control_parameters_ = absl::nullopt; + rate_control_parameters_ = std::nullopt; RTC_DCHECK_EQ(encoder_state_, EncoderState::kUninitialized) << "InitEncode() should never be called on an active instance!"; @@ -327,6 +344,11 @@ int32_t VideoEncoderSoftwareFallbackWrapper::InitEncode( PrimeEncoder(current_encoder()); return ret; } + if (ret == WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED) { + return ret; + } + RTC_LOG(LS_WARNING) << "[VESFW] Hardware encoder initialization failed with" + << " error code: " << WebRtcVideoCodecErrorToString(ret); // Try to instantiate software codec. if (InitFallbackEncoder(/*is_forced=*/false)) { @@ -335,6 +357,8 @@ int32_t VideoEncoderSoftwareFallbackWrapper::InitEncode( } // Software encoder failed too, use original return code. + RTC_LOG(LS_WARNING) + << "[VESFW] Software fallback encoder initialization also failed."; encoder_state_ = EncoderState::kUninitialized; return ret; } @@ -385,13 +409,13 @@ int32_t VideoEncoderSoftwareFallbackWrapper::EncodeWithMainEncoder( } else { RTC_LOG(LS_INFO) << "Fallback encoder does not support native handle - " "converting frame to I420"; - rtc::scoped_refptr src_buffer = + scoped_refptr src_buffer = frame.video_frame_buffer()->ToI420(); if (!src_buffer) { RTC_LOG(LS_ERROR) << "Failed to convert from to I420"; return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE; } - rtc::scoped_refptr dst_buffer = + scoped_refptr dst_buffer = src_buffer->Scale(codec_settings_.width, codec_settings_.height); if (!dst_buffer) { RTC_LOG(LS_ERROR) << "Failed to scale video frame."; @@ -439,9 +463,9 @@ VideoEncoder::EncoderInfo VideoEncoderSoftwareFallbackWrapper::GetEncoderInfo() EncoderInfo info = IsFallbackActive() ? fallback_encoder_info : default_encoder_info; - info.requested_resolution_alignment = cricket::LeastCommonMultiple( - fallback_encoder_info.requested_resolution_alignment, - default_encoder_info.requested_resolution_alignment); + info.requested_resolution_alignment = + std::lcm(fallback_encoder_info.requested_resolution_alignment, + default_encoder_info.requested_resolution_alignment); info.apply_alignment_to_all_simulcast_layers = fallback_encoder_info.apply_alignment_to_all_simulcast_layers || default_encoder_info.apply_alignment_to_all_simulcast_layers; @@ -523,11 +547,12 @@ bool VideoEncoderSoftwareFallbackWrapper::TryInitForcedFallbackEncoder() { } // namespace std::unique_ptr CreateVideoEncoderSoftwareFallbackWrapper( + const Environment& env, std::unique_ptr sw_fallback_encoder, std::unique_ptr hw_encoder, bool prefer_temporal_support) { return std::make_unique( - std::move(sw_fallback_encoder), std::move(hw_encoder), + env.field_trials(), std::move(sw_fallback_encoder), std::move(hw_encoder), prefer_temporal_support); } diff --git a/api/video_codecs/video_encoder_software_fallback_wrapper.h b/api/video_codecs/video_encoder_software_fallback_wrapper.h index 6e6902eb3f..3915111d44 100644 --- a/api/video_codecs/video_encoder_software_fallback_wrapper.h +++ b/api/video_codecs/video_encoder_software_fallback_wrapper.h @@ -12,8 +12,8 @@ #define API_VIDEO_CODECS_VIDEO_ENCODER_SOFTWARE_FALLBACK_WRAPPER_H_ #include -#include +#include "api/environment/environment.h" #include "api/video_codecs/video_encoder.h" #include "rtc_base/system/rtc_export.h" @@ -27,21 +27,11 @@ namespace webrtc { // fallback should be forced even if the encoder otherwise works. RTC_EXPORT std::unique_ptr CreateVideoEncoderSoftwareFallbackWrapper( + const Environment& env, std::unique_ptr sw_fallback_encoder, std::unique_ptr hw_encoder, bool prefer_temporal_support); -// Default fallback for call-sites not yet updated with -// `prefer_temporal_support`. -// TODO(sprang): Remove when usage is gone. -RTC_EXPORT inline std::unique_ptr -CreateVideoEncoderSoftwareFallbackWrapper( - std::unique_ptr sw_fallback_encoder, - std::unique_ptr hw_encoder) { - return CreateVideoEncoderSoftwareFallbackWrapper( - std::move(sw_fallback_encoder), std::move(hw_encoder), false); -} - } // namespace webrtc #endif // API_VIDEO_CODECS_VIDEO_ENCODER_SOFTWARE_FALLBACK_WRAPPER_H_ diff --git a/rtc_base/task_queue_libevent.h b/api/video_codecs/video_encoding_general.h similarity index 53% rename from rtc_base/task_queue_libevent.h rename to api/video_codecs/video_encoding_general.h index aaa72d4a1b..171e211dba 100644 --- a/rtc_base/task_queue_libevent.h +++ b/api/video_codecs/video_encoding_general.h @@ -1,5 +1,5 @@ /* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,17 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTC_BASE_TASK_QUEUE_LIBEVENT_H_ -#define RTC_BASE_TASK_QUEUE_LIBEVENT_H_ - -#include - -#include "api/task_queue/task_queue_factory.h" +#ifndef API_VIDEO_CODECS_VIDEO_ENCODING_GENERAL_H_ +#define API_VIDEO_CODECS_VIDEO_ENCODING_GENERAL_H_ namespace webrtc { -std::unique_ptr CreateTaskQueueLibeventFactory(); +struct EncodingFormat { + enum SubSampling { k420, k422, k444 }; + SubSampling sub_sampling; + int bit_depth; +}; } // namespace webrtc - -#endif // RTC_BASE_TASK_QUEUE_LIBEVENT_H_ +#endif // API_VIDEO_CODECS_VIDEO_ENCODING_GENERAL_H_ diff --git a/api/video_codecs/vp8_frame_buffer_controller.h b/api/video_codecs/vp8_frame_buffer_controller.h index fc494f7293..b5dd2c0ae5 100644 --- a/api/video_codecs/vp8_frame_buffer_controller.h +++ b/api/video_codecs/vp8_frame_buffer_controller.h @@ -12,10 +12,12 @@ #define API_VIDEO_CODECS_VP8_FRAME_BUFFER_CONTROLLER_H_ #include +#include +#include #include +#include #include -#include "absl/types/optional.h" #include "api/fec_controller_override.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" @@ -80,16 +82,16 @@ struct Vp8EncoderConfig { std::array ts_layer_id; }; - absl::optional temporal_layer_config; + std::optional temporal_layer_config; // Target bitrate, in bps. - absl::optional rc_target_bitrate; + std::optional rc_target_bitrate; // Clamp QP to max. Use 0 to disable clamping. - absl::optional rc_max_quantizer; + std::optional rc_max_quantizer; // Error resilience mode. - absl::optional g_error_resilient; + std::optional g_error_resilient; // If set to true, all previous configuration overrides should be reset. bool reset_previous_configuration_overrides = false; diff --git a/api/video_codecs/vp8_temporal_layers.cc b/api/video_codecs/vp8_temporal_layers.cc index dd75c616d8..d2787ff48e 100644 --- a/api/video_codecs/vp8_temporal_layers.cc +++ b/api/video_codecs/vp8_temporal_layers.cc @@ -10,9 +10,17 @@ #include "api/video_codecs/vp8_temporal_layers.h" +#include +#include +#include #include +#include #include "absl/algorithm/container.h" +#include "api/fec_controller_override.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/vp8_frame_buffer_controller.h" +#include "api/video_codecs/vp8_frame_config.h" #include "rtc_base/checks.h" namespace webrtc { diff --git a/api/video_codecs/vp8_temporal_layers.h b/api/video_codecs/vp8_temporal_layers.h index 2ffe6eacdf..be6a4d2e70 100644 --- a/api/video_codecs/vp8_temporal_layers.h +++ b/api/video_codecs/vp8_temporal_layers.h @@ -11,11 +11,13 @@ #ifndef API_VIDEO_CODECS_VP8_TEMPORAL_LAYERS_H_ #define API_VIDEO_CODECS_VP8_TEMPORAL_LAYERS_H_ +#include +#include #include #include #include "api/fec_controller_override.h" -#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp8_frame_buffer_controller.h" #include "api/video_codecs/vp8_frame_config.h" diff --git a/api/video_codecs/vp8_temporal_layers_factory.cc b/api/video_codecs/vp8_temporal_layers_factory.cc index 193494d71d..9ba589e9c7 100644 --- a/api/video_codecs/vp8_temporal_layers_factory.cc +++ b/api/video_codecs/vp8_temporal_layers_factory.cc @@ -16,6 +16,10 @@ #include #include "api/fec_controller_override.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/vp8_frame_buffer_controller.h" +#include "api/video_codecs/vp8_temporal_layers.h" #include "modules/video_coding/codecs/vp8/default_temporal_layers.h" #include "modules/video_coding/codecs/vp8/screenshare_layers.h" #include "modules/video_coding/utility/simulcast_utility.h" @@ -25,7 +29,7 @@ namespace webrtc { std::unique_ptr Vp8TemporalLayersFactory::Create( const VideoCodec& codec, - const VideoEncoder::Settings& settings, + const VideoEncoder::Settings& /* settings */, FecControllerOverride* fec_controller_override) { std::vector> controllers; const int num_streams = SimulcastUtility::NumberOfSimulcastStreams(codec); diff --git a/api/video_codecs/vp8_temporal_layers_factory.h b/api/video_codecs/vp8_temporal_layers_factory.h index 7a146f1d4f..95717f572d 100644 --- a/api/video_codecs/vp8_temporal_layers_factory.h +++ b/api/video_codecs/vp8_temporal_layers_factory.h @@ -13,7 +13,10 @@ #include -#include "api/video_codecs/vp8_temporal_layers.h" +#include "api/fec_controller_override.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/vp8_frame_buffer_controller.h" namespace webrtc { diff --git a/api/video_codecs/vp9_profile.cc b/api/video_codecs/vp9_profile.cc index 7e627cc080..4aec2387a6 100644 --- a/api/video_codecs/vp9_profile.cc +++ b/api/video_codecs/vp9_profile.cc @@ -11,8 +11,10 @@ #include "api/video_codecs/vp9_profile.h" #include -#include +#include +#include +#include "api/rtp_parameters.h" #include "rtc_base/string_to_number.h" namespace webrtc { @@ -34,10 +36,10 @@ std::string VP9ProfileToString(VP9Profile profile) { return "0"; } -absl::optional StringToVP9Profile(const std::string& str) { - const absl::optional i = rtc::StringToNumber(str); +std::optional StringToVP9Profile(const std::string& str) { + const std::optional i = StringToNumber(str); if (!i.has_value()) - return absl::nullopt; + return std::nullopt; switch (i.value()) { case 0: @@ -49,12 +51,12 @@ absl::optional StringToVP9Profile(const std::string& str) { case 3: return VP9Profile::kProfile3; default: - return absl::nullopt; + return std::nullopt; } } -absl::optional ParseSdpForVP9Profile( - const SdpVideoFormat::Parameters& params) { +std::optional ParseSdpForVP9Profile( + const CodecParameterMap& params) { const auto profile_it = params.find(kVP9FmtpProfileId); if (profile_it == params.end()) return VP9Profile::kProfile0; @@ -62,10 +64,10 @@ absl::optional ParseSdpForVP9Profile( return StringToVP9Profile(profile_str); } -bool VP9IsSameProfile(const SdpVideoFormat::Parameters& params1, - const SdpVideoFormat::Parameters& params2) { - const absl::optional profile = ParseSdpForVP9Profile(params1); - const absl::optional other_profile = +bool VP9IsSameProfile(const CodecParameterMap& params1, + const CodecParameterMap& params2) { + const std::optional profile = ParseSdpForVP9Profile(params1); + const std::optional other_profile = ParseSdpForVP9Profile(params2); return profile && other_profile && profile == other_profile; } diff --git a/api/video_codecs/vp9_profile.h b/api/video_codecs/vp9_profile.h index b570bc3bb6..94ac01c560 100644 --- a/api/video_codecs/vp9_profile.h +++ b/api/video_codecs/vp9_profile.h @@ -11,10 +11,10 @@ #ifndef API_VIDEO_CODECS_VP9_PROFILE_H_ #define API_VIDEO_CODECS_VP9_PROFILE_H_ +#include #include -#include "absl/types/optional.h" -#include "api/video_codecs/sdp_video_format.h" +#include "api/rtp_parameters.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -35,19 +35,19 @@ RTC_EXPORT std::string VP9ProfileToString(VP9Profile profile); // Helper functions to convert std::string to VP9Profile. Returns null if given // an invalid profile string. -absl::optional StringToVP9Profile(const std::string& str); +std::optional StringToVP9Profile(const std::string& str); // Parse profile that is represented as a string of single digit contained in an // SDP key-value map. A default profile(kProfile0) will be returned if the // profile key is missing. Nothing will be returned if the key is present but // the string is invalid. -RTC_EXPORT absl::optional ParseSdpForVP9Profile( - const SdpVideoFormat::Parameters& params); +RTC_EXPORT std::optional ParseSdpForVP9Profile( + const CodecParameterMap& params); // Returns true if the parameters have the same VP9 profile, or neither contains // VP9 profile. -bool VP9IsSameProfile(const SdpVideoFormat::Parameters& params1, - const SdpVideoFormat::Parameters& params2); +bool VP9IsSameProfile(const CodecParameterMap& params1, + const CodecParameterMap& params2); } // namespace webrtc diff --git a/api/video_track_source_constraints.h b/api/video_track_source_constraints.h index 55e5396d62..97b20f0949 100644 --- a/api/video_track_source_constraints.h +++ b/api/video_track_source_constraints.h @@ -16,15 +16,15 @@ #ifndef API_VIDEO_TRACK_SOURCE_CONSTRAINTS_H_ #define API_VIDEO_TRACK_SOURCE_CONSTRAINTS_H_ -#include "absl/types/optional.h" +#include namespace webrtc { // This struct definition describes constraints on the video source that may be // set with VideoTrackSourceInterface::ProcessConstraints. struct VideoTrackSourceConstraints { - absl::optional min_fps; - absl::optional max_fps; + std::optional min_fps; + std::optional max_fps; }; } // namespace webrtc diff --git a/api/video_track_source_proxy_factory.h b/api/video_track_source_proxy_factory.h index eb6e96429a..4799954f6e 100644 --- a/api/video_track_source_proxy_factory.h +++ b/api/video_track_source_proxy_factory.h @@ -12,6 +12,8 @@ #define API_VIDEO_TRACK_SOURCE_PROXY_FACTORY_H_ #include "api/media_stream_interface.h" +#include "api/scoped_refptr.h" +#include "rtc_base/system/rtc_export.h" #include "rtc_base/thread.h" namespace webrtc { @@ -19,9 +21,9 @@ namespace webrtc { // Creates a proxy source for `source` which makes sure the real // VideoTrackSourceInterface implementation is destroyed on the signaling thread // and marshals calls to `worker_thread` and `signaling_thread`. -rtc::scoped_refptr RTC_EXPORT -CreateVideoTrackSourceProxy(rtc::Thread* signaling_thread, - rtc::Thread* worker_thread, +scoped_refptr RTC_EXPORT +CreateVideoTrackSourceProxy(Thread* signaling_thread, + Thread* worker_thread, VideoTrackSourceInterface* source); } // namespace webrtc diff --git a/api/voip/BUILD.gn b/api/voip/BUILD.gn index 714490a526..36d2e7127e 100644 --- a/api/voip/BUILD.gn +++ b/api/voip/BUILD.gn @@ -23,15 +23,13 @@ rtc_source_set("voip_api") { "..:array_view", "../audio_codecs:audio_codecs_api", "../neteq:neteq_api", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/types:optional", ] } rtc_library("voip_engine_factory") { visibility = [ "*" ] + allow_poison = [ "environment_construction" ] sources = [ "voip_engine_factory.cc", "voip_engine_factory.h", @@ -40,10 +38,13 @@ rtc_library("voip_engine_factory") { ":voip_api", "..:scoped_refptr", "../../audio/voip:voip_core", - "../../modules/audio_device:audio_device_api", - "../../modules/audio_processing:api", + "../../rtc_base:checks", "../../rtc_base:logging", + "../audio:audio_device", + "../audio:audio_processing", "../audio_codecs:audio_codecs_api", + "../environment", + "../environment:environment_factory", "../task_queue", ] } @@ -57,8 +58,8 @@ if (rtc_include_tests) { ":voip_api", "..:array_view", "../../test:test_support", + "../audio_codecs:audio_codecs_api", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("voip_engine_factory_unittests") { @@ -66,11 +67,13 @@ if (rtc_include_tests) { sources = [ "test/voip_engine_factory_unittest.cc" ] deps = [ ":voip_engine_factory", + "..:field_trials", + "..:make_ref_counted", "../../modules/audio_device:mock_audio_device", "../../modules/audio_processing:mocks", "../../test:audio_codec_mocks", "../../test:test_support", - "../task_queue:default_task_queue_factory", + "../environment:environment_factory", ] } diff --git a/api/voip/DEPS b/api/voip/DEPS index 3845dffab0..a36df41a9c 100644 --- a/api/voip/DEPS +++ b/api/voip/DEPS @@ -1,8 +1,4 @@ specific_include_rules = { - ".*\.h": [ - "+third_party/absl/types/optional.h", - ], - "voip_engine_factory.h": [ "+modules/audio_device/include/audio_device.h", "+modules/audio_processing/include/audio_processing.h", diff --git a/api/voip/test/compile_all_headers.cc b/api/voip/test/compile_all_headers.cc index 73a0f0d1c4..4ad4528fc3 100644 --- a/api/voip/test/compile_all_headers.cc +++ b/api/voip/test/compile_all_headers.cc @@ -10,5 +10,3 @@ // This file verifies that all include files in this directory can be // compiled without errors or other required includes. - -#include "api/voip/test/mock_voip_engine.h" diff --git a/api/voip/test/mock_voip_engine.h b/api/voip/test/mock_voip_engine.h index 74b880d652..6f9e2813fc 100644 --- a/api/voip/test/mock_voip_engine.h +++ b/api/voip/test/mock_voip_engine.h @@ -11,10 +11,12 @@ #ifndef API_VOIP_TEST_MOCK_VOIP_ENGINE_H_ #define API_VOIP_TEST_MOCK_VOIP_ENGINE_H_ +#include #include +#include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/audio_codecs/audio_format.h" #include "api/voip/voip_base.h" #include "api/voip/voip_codec.h" #include "api/voip/voip_dtmf.h" @@ -30,7 +32,7 @@ class MockVoipBase : public VoipBase { public: MOCK_METHOD(ChannelId, CreateChannel, - (Transport*, absl::optional), + (Transport*, std::optional), (override)); MOCK_METHOD(VoipResult, ReleaseChannel, (ChannelId), (override)); MOCK_METHOD(VoipResult, StartSend, (ChannelId), (override)); @@ -67,11 +69,13 @@ class MockVoipNetwork : public VoipNetwork { public: MOCK_METHOD(VoipResult, ReceivedRTPPacket, - (ChannelId channel_id, rtc::ArrayView rtp_packet), + (ChannelId channel_id, + webrtc::ArrayView rtp_packet), (override)); MOCK_METHOD(VoipResult, ReceivedRTCPPacket, - (ChannelId channel_id, rtc::ArrayView rtcp_packet), + (ChannelId channel_id, + webrtc::ArrayView rtcp_packet), (override)); }; diff --git a/api/voip/test/voip_engine_factory_unittest.cc b/api/voip/test/voip_engine_factory_unittest.cc index 7d717c1662..557dfd9d0f 100644 --- a/api/voip/test/voip_engine_factory_unittest.cc +++ b/api/voip/test/voip_engine_factory_unittest.cc @@ -10,11 +10,15 @@ #include "api/voip/voip_engine_factory.h" +#include #include -#include "api/task_queue/default_task_queue_factory.h" +#include "api/environment/environment_factory.h" +#include "api/field_trials.h" +#include "api/make_ref_counted.h" #include "modules/audio_device/include/mock_audio_device.h" #include "modules/audio_processing/include/mock_audio_processing.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/mock_audio_decoder_factory.h" #include "test/mock_audio_encoder_factory.h" @@ -22,26 +26,27 @@ namespace webrtc { namespace { +using ::testing::NiceMock; + // Create voip engine with mock modules as normal use case. TEST(VoipEngineFactoryTest, CreateEngineWithMockModules) { VoipEngineConfig config; - config.encoder_factory = rtc::make_ref_counted(); - config.decoder_factory = rtc::make_ref_counted(); - config.task_queue_factory = CreateDefaultTaskQueueFactory(); - config.audio_processing = - rtc::make_ref_counted>(); + config.encoder_factory = make_ref_counted(); + config.decoder_factory = make_ref_counted(); + config.env = CreateEnvironment(FieldTrials::CreateNoGlobal("")); + config.audio_processing_builder = + std::make_unique>(); config.audio_device_module = test::MockAudioDeviceModule::CreateNice(); auto voip_engine = CreateVoipEngine(std::move(config)); EXPECT_NE(voip_engine, nullptr); } -// Create voip engine without setting audio processing as optional component. +// Create voip engine without setting optional components. TEST(VoipEngineFactoryTest, UseNoAudioProcessing) { VoipEngineConfig config; - config.encoder_factory = rtc::make_ref_counted(); - config.decoder_factory = rtc::make_ref_counted(); - config.task_queue_factory = CreateDefaultTaskQueueFactory(); + config.encoder_factory = make_ref_counted(); + config.decoder_factory = make_ref_counted(); config.audio_device_module = test::MockAudioDeviceModule::CreateNice(); auto voip_engine = CreateVoipEngine(std::move(config)); diff --git a/api/voip/voip_base.h b/api/voip/voip_base.h index 8df7bd0571..f7f4432a6c 100644 --- a/api/voip/voip_base.h +++ b/api/voip/voip_base.h @@ -11,8 +11,10 @@ #ifndef API_VOIP_VOIP_BASE_H_ #define API_VOIP_VOIP_BASE_H_ +#include +#include + #include "absl/base/attributes.h" -#include "absl/types/optional.h" namespace webrtc { @@ -65,7 +67,7 @@ class VoipBase { // Returns a ChannelId created for caller to handle subsequent Channel // operations. virtual ChannelId CreateChannel(Transport* transport, - absl::optional local_ssrc) = 0; + std::optional local_ssrc) = 0; // Releases `channel_id` that no longer has any use. // Returns following VoipResult; diff --git a/api/voip/voip_dtmf.h b/api/voip/voip_dtmf.h index ef7ea28c94..21d6742682 100644 --- a/api/voip/voip_dtmf.h +++ b/api/voip/voip_dtmf.h @@ -11,6 +11,8 @@ #ifndef API_VOIP_VOIP_DTMF_H_ #define API_VOIP_VOIP_DTMF_H_ +#include + #include "api/voip/voip_base.h" namespace webrtc { diff --git a/api/voip/voip_engine.h b/api/voip/voip_engine.h index d223f6ad6c..cdb74f9ff9 100644 --- a/api/voip/voip_engine.h +++ b/api/voip/voip_engine.h @@ -35,7 +35,8 @@ class VoipVolumeControl; // config.audio_device = // AudioDeviceModule::Create(AudioDeviceModule::kPlatformDefaultAudio, // config.task_queue_factory.get()); -// config.audio_processing = AudioProcessingBuilder().Create(); +// config.audio_processing_builder = +// std::make_unique(); // // auto voip_engine = CreateVoipEngine(std::move(config)); // diff --git a/api/voip/voip_engine_factory.cc b/api/voip/voip_engine_factory.cc index 8da53cef74..04abe98c6f 100644 --- a/api/voip/voip_engine_factory.cc +++ b/api/voip/voip_engine_factory.cc @@ -10,9 +10,16 @@ #include "api/voip/voip_engine_factory.h" +#include #include +#include "api/audio/audio_processing.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/scoped_refptr.h" +#include "api/voip/voip_engine.h" #include "audio/voip/voip_core.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { @@ -20,18 +27,26 @@ namespace webrtc { std::unique_ptr CreateVoipEngine(VoipEngineConfig config) { RTC_CHECK(config.encoder_factory); RTC_CHECK(config.decoder_factory); - RTC_CHECK(config.task_queue_factory); RTC_CHECK(config.audio_device_module); - if (!config.audio_processing) { + RTC_CHECK(config.task_queue_factory == nullptr || !config.env.has_value()); + Environment env = + config.env.has_value() + ? *config.env + : CreateEnvironment(std::move(config.task_queue_factory)); + + scoped_refptr audio_processing; + if (config.audio_processing_builder != nullptr) { + audio_processing = std::move(config.audio_processing_builder)->Build(env); + } + + if (audio_processing == nullptr) { RTC_DLOG(LS_INFO) << "No audio processing functionality provided."; } - return std::make_unique(std::move(config.encoder_factory), - std::move(config.decoder_factory), - std::move(config.task_queue_factory), - std::move(config.audio_device_module), - std::move(config.audio_processing)); + return std::make_unique( + env, std::move(config.encoder_factory), std::move(config.decoder_factory), + std::move(config.audio_device_module), std::move(audio_processing)); } } // namespace webrtc diff --git a/api/voip/voip_engine_factory.h b/api/voip/voip_engine_factory.h index 62fe8011a6..f00075e2d9 100644 --- a/api/voip/voip_engine_factory.h +++ b/api/voip/voip_engine_factory.h @@ -12,14 +12,16 @@ #define API_VOIP_VOIP_ENGINE_FACTORY_H_ #include +#include +#include "api/audio/audio_device.h" +#include "api/audio/audio_processing.h" #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder_factory.h" +#include "api/environment/environment.h" #include "api/scoped_refptr.h" #include "api/task_queue/task_queue_factory.h" #include "api/voip/voip_engine.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" namespace webrtc { @@ -32,32 +34,40 @@ struct VoipEngineConfig { // AudioEncoderFactory provides a set of audio codecs for VoipEngine to encode // the audio input sample. Application can choose to limit the set to reduce // application footprint. - rtc::scoped_refptr encoder_factory; + scoped_refptr encoder_factory; // Mandatory (e.g. api/audio_codec/builtin_audio_decoder_factory). // AudioDecoderFactory provides a set of audio codecs for VoipEngine to decode // the received RTP packets from remote media endpoint. Application can choose // to limit the set to reduce application footprint. - rtc::scoped_refptr decoder_factory; + scoped_refptr decoder_factory; - // Mandatory (e.g. api/task_queue/default_task_queue_factory). - // TaskQeueuFactory provided for VoipEngine to work asynchronously on its + // Optional (e.g. api/task_queue/default_task_queue_factory). + // TaskQueueFactory provided for VoipEngine to work asynchronously on its // encoding flow. + // It is an error to provide both `env` and `task_queue_factory`. std::unique_ptr task_queue_factory; // Mandatory (e.g. modules/audio_device/include). // AudioDeviceModule that periocally provides audio input samples from // recording device (e.g. microphone) and requests audio output samples to // play through its output device (e.g. speaker). - rtc::scoped_refptr audio_device_module; + scoped_refptr audio_device_module; - // Optional (e.g. modules/audio_processing/include). + // Optional. When not set, VoipEngine will use a default Environment created + // with `CreateEnvironment`, see api/environment/environment_factory.h + // Provides + // - TaskQueueFactory to work asynchronously on VoipEngine encoding flow + // - FieldTrialsView for experimentations + std::optional env; + + // Optional (e.g. api/audio/builtin_audio_processing_builder). // AudioProcessing provides audio procesing functionalities (e.g. acoustic // echo cancellation, noise suppression, gain control, etc) on audio input // samples for VoipEngine. When optionally not set, VoipEngine will not have // such functionalities to perform on audio input samples received from // AudioDeviceModule. - rtc::scoped_refptr audio_processing; + std::unique_ptr audio_processing_builder; }; // Creates a VoipEngine instance with provided VoipEngineConfig. diff --git a/api/voip/voip_network.h b/api/voip/voip_network.h index 0ea16b68de..b239c7eaf8 100644 --- a/api/voip/voip_network.h +++ b/api/voip/voip_network.h @@ -11,6 +11,8 @@ #ifndef API_VOIP_VOIP_NETWORK_H_ #define API_VOIP_VOIP_NETWORK_H_ +#include + #include "api/array_view.h" #include "api/voip/voip_base.h" @@ -25,9 +27,8 @@ class VoipNetwork { // Returns following VoipResult; // kOk - received RTP packet is processed. // kInvalidArgument - `channel_id` is invalid. - virtual VoipResult ReceivedRTPPacket( - ChannelId channel_id, - rtc::ArrayView rtp_packet) = 0; + virtual VoipResult ReceivedRTPPacket(ChannelId channel_id, + ArrayView rtp_packet) = 0; // The data received from the network including RTCP header is passed here. // Returns following VoipResult; @@ -35,7 +36,7 @@ class VoipNetwork { // kInvalidArgument - `channel_id` is invalid. virtual VoipResult ReceivedRTCPPacket( ChannelId channel_id, - rtc::ArrayView rtcp_packet) = 0; + ArrayView rtcp_packet) = 0; protected: virtual ~VoipNetwork() = default; diff --git a/api/voip/voip_statistics.h b/api/voip/voip_statistics.h index 2d1ab8d5e8..8b994d1c31 100644 --- a/api/voip/voip_statistics.h +++ b/api/voip/voip_statistics.h @@ -11,6 +11,9 @@ #ifndef API_VOIP_VOIP_STATISTICS_H_ #define API_VOIP_VOIP_STATISTICS_H_ +#include +#include + #include "api/neteq/neteq.h" #include "api/voip/voip_base.h" @@ -39,7 +42,7 @@ struct RemoteRtcpStatistics { double fraction_lost = 0.0; // https://w3c.github.io/webrtc-stats/#dom-rtcremoteinboundrtpstreamstats-roundtriptime - absl::optional round_trip_time; + std::optional round_trip_time; // Last time (not RTP timestamp) when RTCP report received in milliseconds. int64_t last_report_received_timestamp_ms; @@ -66,9 +69,9 @@ struct ChannelStatistics { // SSRC from remote media endpoint as indicated either by RTP header in RFC // 3550 [5.1] or RTCP SSRC of sender in RFC 3550 [6.4.1]. - absl::optional remote_ssrc; + std::optional remote_ssrc; - absl::optional remote_rtcp; + std::optional remote_rtcp; }; // VoipStatistics interface provides the interfaces for querying metrics around diff --git a/api/webrtc_key_value_config.h b/api/webrtc_key_value_config.h index e3cac59698..0648a7f593 100644 --- a/api/webrtc_key_value_config.h +++ b/api/webrtc_key_value_config.h @@ -12,6 +12,6 @@ // TODO(bugs.webrtc.org/10335): Remove once all migrated to // api/field_trials_view.h -#include "api/field_trials_view.h" +#include "api/field_trials_view.h" // IWYU pragma: keep #endif // API_WEBRTC_KEY_VALUE_CONFIG_H_ diff --git a/api/wrapping_async_dns_resolver.cc b/api/wrapping_async_dns_resolver.cc deleted file mode 100644 index 866cb0076d..0000000000 --- a/api/wrapping_async_dns_resolver.cc +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2021 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/wrapping_async_dns_resolver.h" - -namespace webrtc { - -bool WrappingAsyncDnsResolverResult::GetResolvedAddress( - int family, - rtc::SocketAddress* addr) const { - if (!owner_->wrapped()) { - return false; - } - return owner_->wrapped()->GetResolvedAddress(family, addr); -} - -int WrappingAsyncDnsResolverResult::GetError() const { - if (!owner_->wrapped()) { - return -1; // FIXME: Find a code that makes sense. - } - return owner_->wrapped()->GetError(); -} - -} // namespace webrtc diff --git a/api/wrapping_async_dns_resolver.h b/api/wrapping_async_dns_resolver.h deleted file mode 100644 index d07f1464c5..0000000000 --- a/api/wrapping_async_dns_resolver.h +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright 2021 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_WRAPPING_ASYNC_DNS_RESOLVER_H_ -#define API_WRAPPING_ASYNC_DNS_RESOLVER_H_ - -#include -#include -#include - -#include "absl/memory/memory.h" -#include "api/async_dns_resolver.h" -#include "api/sequence_checker.h" -#include "rtc_base/async_resolver.h" -#include "rtc_base/async_resolver_interface.h" -#include "rtc_base/checks.h" -#include "rtc_base/socket_address.h" -#include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread_annotations.h" - -// This file defines a DNS resolver that wraps an old-style -// AsyncResolver. -// It is part of the conversion to the newer interface, and will go away -// once conversion is finished. -// TODO(bugs.webrtc.org/12598): Delete this API. - -namespace webrtc { - -class WrappingAsyncDnsResolver; - -class RTC_EXPORT WrappingAsyncDnsResolverResult - : public AsyncDnsResolverResult { - public: - explicit WrappingAsyncDnsResolverResult(WrappingAsyncDnsResolver* owner) - : owner_(owner) {} - ~WrappingAsyncDnsResolverResult() {} - - // Note: Inline declaration not possible, since it refers to - // WrappingAsyncDnsResolver. - bool GetResolvedAddress(int family, rtc::SocketAddress* addr) const override; - int GetError() const override; - - private: - WrappingAsyncDnsResolver* const owner_; -}; - -class RTC_EXPORT WrappingAsyncDnsResolver : public AsyncDnsResolverInterface, - public sigslot::has_slots<> { - public: - explicit WrappingAsyncDnsResolver(rtc::AsyncResolverInterface* wrapped) - : wrapped_(absl::WrapUnique(wrapped)), result_(this) {} - - ~WrappingAsyncDnsResolver() override { - // Workaround to get around the fact that sigslot-using objects can't be - // destroyed from within their callback: Alert class users early. - // TODO(bugs.webrtc.org/12651): Delete this class once the sigslot users are - // gone. - RTC_CHECK(!within_resolve_result_); - wrapped_.release()->Destroy(false); - } - - void Start(const rtc::SocketAddress& addr, - absl::AnyInvocable callback) override { - RTC_DCHECK_RUN_ON(&sequence_checker_); - PrepareToResolve(std::move(callback)); - wrapped_->Start(addr); - } - - void Start(const rtc::SocketAddress& addr, - int family, - absl::AnyInvocable callback) override { - RTC_DCHECK_RUN_ON(&sequence_checker_); - PrepareToResolve(std::move(callback)); - wrapped_->Start(addr, family); - } - - const AsyncDnsResolverResult& result() const override { - RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK_EQ(State::kResolved, state_); - return result_; - } - - private: - enum class State { kNotStarted, kStarted, kResolved }; - - friend class WrappingAsyncDnsResolverResult; - // For use by WrappingAsyncDnsResolverResult - rtc::AsyncResolverInterface* wrapped() const { - RTC_DCHECK_RUN_ON(&sequence_checker_); - return wrapped_.get(); - } - - void PrepareToResolve(absl::AnyInvocable callback) { - RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK_EQ(State::kNotStarted, state_); - state_ = State::kStarted; - callback_ = std::move(callback); - wrapped_->SignalDone.connect(this, - &WrappingAsyncDnsResolver::OnResolveResult); - } - - void OnResolveResult(rtc::AsyncResolverInterface* ref) { - RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK(state_ == State::kStarted); - RTC_DCHECK_EQ(ref, wrapped_.get()); - state_ = State::kResolved; - within_resolve_result_ = true; - callback_(); - within_resolve_result_ = false; - } - - // The class variables need to be accessed on a single thread. - SequenceChecker sequence_checker_; - absl::AnyInvocable callback_ RTC_GUARDED_BY(sequence_checker_); - std::unique_ptr wrapped_ - RTC_GUARDED_BY(sequence_checker_); - State state_ RTC_GUARDED_BY(sequence_checker_) = State::kNotStarted; - WrappingAsyncDnsResolverResult result_ RTC_GUARDED_BY(sequence_checker_); - bool within_resolve_result_ RTC_GUARDED_BY(sequence_checker_) = false; -}; - -} // namespace webrtc - -#endif // API_WRAPPING_ASYNC_DNS_RESOLVER_H_ diff --git a/audio/BUILD.gn b/audio/BUILD.gn index ec09e5a350..702a8d839d 100644 --- a/audio/BUILD.gn +++ b/audio/BUILD.gn @@ -39,29 +39,39 @@ rtc_library("audio") { deps = [ "../api:array_view", + "../api:bitrate_allocation", "../api:call_api", "../api:field_trials_view", "../api:frame_transformer_interface", "../api:function_view", + "../api:make_ref_counted", "../api:rtp_headers", + "../api:rtp_packet_info", "../api:rtp_parameters", "../api:scoped_refptr", "../api:sequence_checker", "../api:transport_api", "../api/audio:aec3_factory", + "../api/audio:audio_device", "../api/audio:audio_frame_api", "../api/audio:audio_frame_processor", "../api/audio:audio_mixer_api", + "../api/audio:audio_processing", "../api/audio_codecs:audio_codecs_api", "../api/crypto:frame_decryptor_interface", "../api/crypto:frame_encryptor_interface", "../api/crypto:options", + "../api/environment", + "../api/neteq:default_neteq_factory", "../api/neteq:neteq_api", "../api/rtc_event_log", "../api/task_queue", "../api/task_queue:pending_task_safety_flag", "../api/transport/rtp:rtp_source", + "../api/units:data_rate", + "../api/units:data_size", "../api/units:time_delta", + "../api/units:timestamp", "../call:audio_sender_interface", "../call:bitrate_allocator", "../call:call_interfaces", @@ -71,7 +81,7 @@ rtc_library("audio") { "../logging:rtc_event_audio", "../logging:rtc_stream_config", "../media:media_channel", - "../media:rtc_media_base", + "../media:media_channel_impl", "../modules/async_audio_processing", "../modules/audio_coding", "../modules/audio_coding:audio_coding_module_typedefs", @@ -80,13 +90,11 @@ rtc_library("audio") { "../modules/audio_coding:red", "../modules/audio_device", "../modules/audio_processing", - "../modules/audio_processing:api", "../modules/audio_processing:audio_frame_proxies", "../modules/audio_processing:rms_level", "../modules/pacing", "../modules/rtp_rtcp", "../modules/rtp_rtcp:rtp_rtcp_format", - "../rtc_base:audio_format_to_string", "../rtc_base:buffer", "../rtc_base:checks", "../rtc_base:event_tracer", @@ -97,7 +105,6 @@ rtc_library("audio") { "../rtc_base:refcount", "../rtc_base:rtc_event", "../rtc_base:rtc_numerics", - "../rtc_base:rtc_task_queue", "../rtc_base:safe_conversions", "../rtc_base:safe_minmax", "../rtc_base:stringutils", @@ -109,15 +116,11 @@ rtc_library("audio") { "../rtc_base/system:no_unique_address", "../rtc_base/task_utils:repeating_task", "../system_wrappers", - "../system_wrappers:field_trial", "../system_wrappers:metrics", "utility:audio_frame_operations", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } if (rtc_include_tests) { @@ -131,10 +134,9 @@ if (rtc_include_tests) { deps = [ ":audio", "../api:simulated_network_api", + "../api/audio:audio_device", "../api/task_queue", "../call:fake_network", - "../call:simulated_network", - "../modules/audio_device:audio_device_api", "../modules/audio_device:test_audio_device_module", "../system_wrappers", "../test:test_common", @@ -164,22 +166,49 @@ if (rtc_include_tests) { ":audio", ":audio_end_to_end_test", ":channel_receive_unittest", + "../api:array_view", + "../api:bitrate_allocation", + "../api:call_api", + "../api:frame_transformer_factory", + "../api:frame_transformer_interface", + "../api:function_view", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:mock_audio_mixer", "../api:mock_frame_decryptor", "../api:mock_frame_encryptor", + "../api:mock_frame_transformer", + "../api:mock_transformable_audio_frame", + "../api:rtc_error_matchers", + "../api:rtp_headers", + "../api:rtp_parameters", "../api:scoped_refptr", + "../api:simulated_network_api", + "../api:transport_api", "../api/audio:audio_frame_api", + "../api/audio:audio_mixer_api", + "../api/audio:audio_processing_statistics", "../api/audio_codecs:audio_codecs_api", "../api/audio_codecs:builtin_audio_encoder_factory", "../api/audio_codecs/opus:audio_decoder_opus", "../api/audio_codecs/opus:audio_encoder_opus", "../api/crypto:frame_decryptor_interface", - "../api/rtc_event_log", + "../api/crypto:frame_encryptor_interface", + "../api/crypto:options", + "../api/environment", + "../api/environment:environment_factory", + "../api/task_queue", "../api/task_queue:default_task_queue_factory", "../api/task_queue/test:mock_task_queue_base", + "../api/transport:bitrate_settings", + "../api/transport:network_control", + "../api/transport/rtp:rtp_source", + "../api/units:data_rate", + "../api/units:data_size", "../api/units:time_delta", "../api/units:timestamp", + "../call:bitrate_allocator", + "../call:call_interfaces", "../call:mock_bitrate_allocator", "../call:mock_call_interfaces", "../call:mock_rtp_interfaces", @@ -187,16 +216,15 @@ if (rtc_include_tests) { "../call:rtp_receiver", "../call:rtp_sender", "../common_audio", - "../logging:mocks", - "../modules/audio_device:audio_device_api", + "../modules/audio_coding:audio_coding_module_typedefs", "../modules/audio_device:audio_device_impl", # For TestAudioDeviceModule "../modules/audio_device:mock_audio_device", "../modules/audio_mixer:audio_mixer_impl", "../modules/audio_mixer:audio_mixer_test_utils", - "../modules/audio_processing:audio_processing_statistics", "../modules/audio_processing:mocks", "../modules/pacing", "../modules/rtp_rtcp:mock_rtp_rtcp", + "../modules/rtp_rtcp:rtp_rtcp", "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", "../rtc_base:gunit_helpers", @@ -210,8 +238,6 @@ if (rtc_include_tests) { "../system_wrappers", "../test:audio_codec_mocks", "../test:field_trial", - "../test:mock_frame_transformer", - "../test:mock_transformable_frame", "../test:mock_transport", "../test:rtp_test_utils", "../test:run_loop", @@ -219,9 +245,13 @@ if (rtc_include_tests) { "../test:test_common", "../test:test_support", "../test:video_test_constants", + "../test:wait_until", "../test/time_controller:time_controller", "utility:utility_tests", + "//testing/gmock", "//testing/gtest", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -230,13 +260,16 @@ if (rtc_include_tests) { sources = [ "channel_receive_unittest.cc" ] deps = [ ":audio", + "../api:mock_frame_transformer", + "../api/audio:audio_device", "../api/audio_codecs:builtin_audio_decoder_factory", "../api/crypto:frame_decryptor_interface", + "../api/environment:environment_factory", "../api/task_queue:default_task_queue_factory", "../logging:mocks", - "../modules/audio_device:audio_device_api", "../modules/audio_device:mock_audio_device", "../modules/rtp_rtcp", + "../modules/rtp_rtcp:ntp_time_util", "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:logging", "../rtc_base:threading", @@ -244,7 +277,7 @@ if (rtc_include_tests) { "../test:mock_transport", "../test:test_support", "../test/time_controller", + "//third_party/abseil-cpp/absl/strings", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } } diff --git a/audio/audio_receive_stream.cc b/audio/audio_receive_stream.cc index 978bbb25b2..344c4b4428 100644 --- a/audio/audio_receive_stream.cc +++ b/audio/audio_receive_stream.cc @@ -10,22 +10,36 @@ #include "audio/audio_receive_stream.h" +#include +#include +#include +#include +#include #include #include +#include -#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/audio/audio_frame.h" +#include "api/audio/audio_mixer.h" #include "api/audio_codecs/audio_format.h" #include "api/call/audio_sink.h" -#include "api/rtp_parameters.h" +#include "api/environment/environment.h" +#include "api/frame_transformer_interface.h" +#include "api/neteq/neteq_factory.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/transport/rtp/rtp_source.h" #include "audio/audio_send_stream.h" #include "audio/audio_state.h" #include "audio/channel_receive.h" #include "audio/conversion.h" +#include "call/audio_state.h" #include "call/rtp_config.h" #include "call/rtp_stream_receiver_controller_interface.h" -#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "call/syncable.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" @@ -35,17 +49,21 @@ namespace webrtc { std::string AudioReceiveStreamInterface::Config::Rtp::ToString() const { char ss_buf[1024]; - rtc::SimpleStringBuilder ss(ss_buf); + SimpleStringBuilder ss(ss_buf); ss << "{remote_ssrc: " << remote_ssrc; ss << ", local_ssrc: " << local_ssrc; ss << ", nack: " << nack.ToString(); + ss << ", rtcp: " + << (rtcp_mode == RtcpMode::kCompound + ? "compound" + : (rtcp_mode == RtcpMode::kReducedSize ? "reducedSize" : "off")); ss << '}'; return ss.str(); } std::string AudioReceiveStreamInterface::Config::ToString() const { char ss_buf[1024]; - rtc::SimpleStringBuilder ss(ss_buf); + SimpleStringBuilder ss(ss_buf); ss << "{rtp: " << rtp.ToString(); ss << ", rtcp_send_transport: " << (rtcp_send_transport ? "(Transport)" : "null"); @@ -58,53 +76,46 @@ std::string AudioReceiveStreamInterface::Config::ToString() const { namespace { std::unique_ptr CreateChannelReceive( - Clock* clock, + const Environment& env, webrtc::AudioState* audio_state, NetEqFactory* neteq_factory, - const webrtc::AudioReceiveStreamInterface::Config& config, - RtcEventLog* event_log) { + const webrtc::AudioReceiveStreamInterface::Config& config) { RTC_DCHECK(audio_state); internal::AudioState* internal_audio_state = static_cast(audio_state); return voe::CreateChannelReceive( - clock, neteq_factory, internal_audio_state->audio_device_module(), - config.rtcp_send_transport, event_log, config.rtp.local_ssrc, - config.rtp.remote_ssrc, config.jitter_buffer_max_packets, - config.jitter_buffer_fast_accelerate, config.jitter_buffer_min_delay_ms, - config.enable_non_sender_rtt, config.decoder_factory, - config.codec_pair_id, std::move(config.frame_decryptor), - config.crypto_options, std::move(config.frame_transformer)); + env, neteq_factory, internal_audio_state->audio_device_module(), + config.rtcp_send_transport, config.rtp.local_ssrc, config.rtp.remote_ssrc, + config.jitter_buffer_max_packets, config.jitter_buffer_fast_accelerate, + config.jitter_buffer_min_delay_ms, config.enable_non_sender_rtt, + config.decoder_factory, config.codec_pair_id, + std::move(config.frame_decryptor), config.crypto_options, + std::move(config.frame_transformer)); } } // namespace AudioReceiveStreamImpl::AudioReceiveStreamImpl( - Clock* clock, + const Environment& env, PacketRouter* packet_router, NetEqFactory* neteq_factory, const webrtc::AudioReceiveStreamInterface::Config& config, - const rtc::scoped_refptr& audio_state, - webrtc::RtcEventLog* event_log) - : AudioReceiveStreamImpl(clock, - packet_router, - config, - audio_state, - event_log, - CreateChannelReceive(clock, - audio_state.get(), - neteq_factory, - config, - event_log)) {} + const scoped_refptr& audio_state) + : AudioReceiveStreamImpl( + env, + packet_router, + config, + audio_state, + CreateChannelReceive(env, audio_state.get(), neteq_factory, config)) { +} AudioReceiveStreamImpl::AudioReceiveStreamImpl( - Clock* clock, + const Environment& /* env */, PacketRouter* packet_router, const webrtc::AudioReceiveStreamInterface::Config& config, - const rtc::scoped_refptr& audio_state, - webrtc::RtcEventLog* event_log, + const scoped_refptr& audio_state, std::unique_ptr channel_receive) : config_(config), audio_state_(audio_state), - source_tracker_(clock), channel_receive_(std::move(channel_receive)) { RTC_LOG(LS_INFO) << "AudioReceiveStreamImpl: " << config.rtp.remote_ssrc; RTC_DCHECK(config.decoder_factory); @@ -116,16 +127,12 @@ AudioReceiveStreamImpl::AudioReceiveStreamImpl( // Configure bandwidth estimation. channel_receive_->RegisterReceiverCongestionControlObjects(packet_router); - // When output is muted, ChannelReceive will directly notify the source - // tracker of "delivered" frames, so RtpReceiver information will continue to - // be updated. - channel_receive_->SetSourceTracker(&source_tracker_); - // Complete configuration. // TODO(solenberg): Config NACK history window (which is a packet count), // using the actual packet size for the configured codec. channel_receive_->SetNACKStatus(config.rtp.nack.rtp_history_ms != 0, config.rtp.nack.rtp_history_ms / 20); + channel_receive_->SetRtcpMode(config.rtp.rtcp_mode); channel_receive_->SetReceiveCodecs(config.decoder_map); // `frame_transformer` and `frame_decryptor` have been given to // `channel_receive_` already. @@ -135,7 +142,6 @@ AudioReceiveStreamImpl::~AudioReceiveStreamImpl() { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_LOG(LS_INFO) << "~AudioReceiveStreamImpl: " << remote_ssrc(); Stop(); - channel_receive_->SetAssociatedSendChannel(nullptr); channel_receive_->ResetReceiverCongestionControlObjects(); } @@ -183,6 +189,7 @@ void AudioReceiveStreamImpl::Start() { if (playing_) { return; } + RTC_LOG(LS_INFO) << "AudioReceiveStreamImpl::Start: " << remote_ssrc(); channel_receive_->StartPlayout(); playing_ = true; audio_state()->AddReceivingStream(this); @@ -193,6 +200,7 @@ void AudioReceiveStreamImpl::Stop() { if (!playing_) { return; } + RTC_LOG(LS_INFO) << "AudioReceiveStreamImpl::Stop: " << remote_ssrc(); channel_receive_->StopPlayout(); playing_ = false; audio_state()->RemoveReceivingStream(this); @@ -204,7 +212,7 @@ bool AudioReceiveStreamImpl::IsRunning() const { } void AudioReceiveStreamImpl::SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); channel_receive_->SetDepacketizerToDecoderFrameTransformer( std::move(frame_transformer)); @@ -230,6 +238,16 @@ void AudioReceiveStreamImpl::SetNackHistory(int history_ms) { channel_receive_->SetNACKStatus(history_ms != 0, history_ms / 20); } +void AudioReceiveStreamImpl::SetRtcpMode(webrtc::RtcpMode mode) { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + + if (config_.rtp.rtcp_mode == mode) + return; + + config_.rtp.rtcp_mode = mode; + channel_receive_->SetRtcpMode(mode); +} + void AudioReceiveStreamImpl::SetNonSenderRttMeasurement(bool enabled) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.enable_non_sender_rtt = enabled; @@ -237,7 +255,7 @@ void AudioReceiveStreamImpl::SetNonSenderRttMeasurement(bool enabled) { } void AudioReceiveStreamImpl::SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) { + scoped_refptr frame_decryptor) { // TODO(bugs.webrtc.org/11993): This is called via WebRtcAudioReceiveStream, // expect to be called on the network thread. RTC_DCHECK_RUN_ON(&worker_thread_checker_); @@ -250,36 +268,41 @@ webrtc::AudioReceiveStreamInterface::Stats AudioReceiveStreamImpl::GetStats( webrtc::AudioReceiveStreamInterface::Stats stats; stats.remote_ssrc = remote_ssrc(); - webrtc::CallReceiveStatistics call_stats = - channel_receive_->GetRTCPStatistics(); - // TODO(solenberg): Don't return here if we can't get the codec - return the - // stats we *can* get. auto receive_codec = channel_receive_->GetReceiveCodec(); - if (!receive_codec) { - return stats; + if (receive_codec) { + stats.codec_name = receive_codec->second.name; + stats.codec_payload_type = receive_codec->first; } + webrtc::CallReceiveStatistics call_stats = + channel_receive_->GetRTCPStatistics(); stats.payload_bytes_received = call_stats.payload_bytes_received; stats.header_and_padding_bytes_received = call_stats.header_and_padding_bytes_received; - stats.packets_received = call_stats.packetsReceived; - stats.packets_lost = call_stats.cumulativeLost; + stats.packets_received = call_stats.packets_received; + stats.packets_lost = call_stats.packets_lost; + stats.jitter_ms = call_stats.jitter_ms; stats.nacks_sent = call_stats.nacks_sent; - stats.capture_start_ntp_time_ms = call_stats.capture_start_ntp_time_ms_; + stats.capture_start_ntp_time_ms = call_stats.capture_start_ntp_time_ms; stats.last_packet_received = call_stats.last_packet_received; - stats.codec_name = receive_codec->second.name; - stats.codec_payload_type = receive_codec->first; - int clockrate_khz = receive_codec->second.clockrate_hz / 1000; - if (clockrate_khz > 0) { - stats.jitter_ms = call_stats.jitterSamples / clockrate_khz; - } + stats.last_sender_report_timestamp = call_stats.last_sender_report_timestamp; + stats.last_sender_report_utc_timestamp = + call_stats.last_sender_report_utc_timestamp; + stats.last_sender_report_remote_utc_timestamp = + call_stats.last_sender_report_remote_utc_timestamp; + stats.sender_reports_packets_sent = call_stats.sender_reports_packets_sent; + stats.sender_reports_bytes_sent = call_stats.sender_reports_bytes_sent; + stats.sender_reports_reports_count = call_stats.sender_reports_reports_count; + stats.round_trip_time = call_stats.round_trip_time; + stats.round_trip_time_measurements = call_stats.round_trip_time_measurements; + stats.total_round_trip_time = call_stats.total_round_trip_time; + stats.delay_estimate_ms = channel_receive_->GetDelayEstimate(); stats.audio_level = channel_receive_->GetSpeechOutputLevelFullRange(); stats.total_output_energy = channel_receive_->GetTotalOutputEnergy(); stats.total_output_duration = channel_receive_->GetTotalOutputDuration(); stats.estimated_playout_ntp_timestamp_ms = - channel_receive_->GetCurrentEstimatedPlayoutNtpTimestampMs( - rtc::TimeMillis()); + channel_receive_->GetCurrentEstimatedPlayoutNtpTimestampMs(TimeMillis()); // Get jitter buffer and total delay (alg + jitter + playout) stats. auto ns = channel_receive_->GetNetworkStatistics(get_and_clear_legacy_stats); @@ -294,16 +317,19 @@ webrtc::AudioReceiveStreamInterface::Stats AudioReceiveStreamImpl::GetStats( stats.concealment_events = ns.concealmentEvents; stats.jitter_buffer_delay_seconds = static_cast(ns.jitterBufferDelayMs) / - static_cast(rtc::kNumMillisecsPerSec); + static_cast(kNumMillisecsPerSec); stats.jitter_buffer_emitted_count = ns.jitterBufferEmittedCount; stats.jitter_buffer_target_delay_seconds = static_cast(ns.jitterBufferTargetDelayMs) / - static_cast(rtc::kNumMillisecsPerSec); + static_cast(kNumMillisecsPerSec); stats.jitter_buffer_minimum_delay_seconds = static_cast(ns.jitterBufferMinimumDelayMs) / - static_cast(rtc::kNumMillisecsPerSec); + static_cast(kNumMillisecsPerSec); stats.inserted_samples_for_deceleration = ns.insertedSamplesForDeceleration; stats.removed_samples_for_acceleration = ns.removedSamplesForAcceleration; + stats.total_processing_delay_seconds = + static_cast(ns.totalProcessingDelayUs) / + static_cast(kNumMicrosecsPerSec); stats.expand_rate = Q14ToFloat(ns.currentExpandRate); stats.speech_expand_rate = Q14ToFloat(ns.currentSpeechExpandRate); stats.secondary_decoded_rate = Q14ToFloat(ns.currentSecondaryDecodedRate); @@ -314,7 +340,7 @@ webrtc::AudioReceiveStreamInterface::Stats AudioReceiveStreamImpl::GetStats( stats.delayed_packet_outage_samples = ns.delayedPacketOutageSamples; stats.relative_packet_arrival_delay_seconds = static_cast(ns.relativePacketArrivalDelayMs) / - static_cast(rtc::kNumMillisecsPerSec); + static_cast(kNumMillisecsPerSec); stats.interruption_count = ns.interruptionCount; stats.total_interruption_duration_ms = ns.totalInterruptionDurationMs; @@ -328,17 +354,6 @@ webrtc::AudioReceiveStreamInterface::Stats AudioReceiveStreamImpl::GetStats( stats.decoding_plc_cng = ds.decoded_plc_cng; stats.decoding_muted_output = ds.decoded_muted_output; - stats.last_sender_report_timestamp_ms = - call_stats.last_sender_report_timestamp_ms; - stats.last_sender_report_remote_timestamp_ms = - call_stats.last_sender_report_remote_timestamp_ms; - stats.sender_reports_packets_sent = call_stats.sender_reports_packets_sent; - stats.sender_reports_bytes_sent = call_stats.sender_reports_bytes_sent; - stats.sender_reports_reports_count = call_stats.sender_reports_reports_count; - stats.round_trip_time = call_stats.round_trip_time; - stats.round_trip_time_measurements = call_stats.round_trip_time_measurements; - stats.total_round_trip_time = call_stats.total_round_trip_time; - return stats; } @@ -364,19 +379,13 @@ int AudioReceiveStreamImpl::GetBaseMinimumPlayoutDelayMs() const { std::vector AudioReceiveStreamImpl::GetSources() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - return source_tracker_.GetSources(); + return channel_receive_->GetSources(); } AudioMixer::Source::AudioFrameInfo AudioReceiveStreamImpl::GetAudioFrameWithInfo(int sample_rate_hz, AudioFrame* audio_frame) { - AudioMixer::Source::AudioFrameInfo audio_frame_info = - channel_receive_->GetAudioFrameWithInfo(sample_rate_hz, audio_frame); - if (audio_frame_info != AudioMixer::Source::AudioFrameInfo::kError && - !audio_frame->packet_infos_.empty()) { - source_tracker_.OnFrameDelivered(audio_frame->packet_infos_); - } - return audio_frame_info; + return channel_receive_->GetAudioFrameWithInfo(sample_rate_hz, audio_frame); } int AudioReceiveStreamImpl::Ssrc() const { @@ -392,7 +401,7 @@ uint32_t AudioReceiveStreamImpl::id() const { return remote_ssrc(); } -absl::optional AudioReceiveStreamImpl::GetInfo() const { +std::optional AudioReceiveStreamImpl::GetInfo() const { // TODO(bugs.webrtc.org/11993): This is called via RtpStreamsSynchronizer, // expect to be called on the network thread. RTC_DCHECK_RUN_ON(&worker_thread_checker_); @@ -420,19 +429,8 @@ bool AudioReceiveStreamImpl::SetMinimumPlayoutDelay(int delay_ms) { return channel_receive_->SetMinimumPlayoutDelay(delay_ms); } -void AudioReceiveStreamImpl::AssociateSendStream( - internal::AudioSendStream* send_stream) { - RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - channel_receive_->SetAssociatedSendChannel( - send_stream ? send_stream->GetChannel() : nullptr); - associated_send_stream_ = send_stream; -} - void AudioReceiveStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) { - // TODO(solenberg): Tests call this function on a network thread, libjingle - // calls on the worker thread. We should move towards always using a network - // thread. Then this check can be enabled. - // RTC_DCHECK(!thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); channel_receive_->ReceivedRTCPPacket(packet, length); } @@ -450,7 +448,6 @@ void AudioReceiveStreamImpl::SetLocalSsrc(uint32_t local_ssrc) { uint32_t AudioReceiveStreamImpl::local_ssrc() const { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - RTC_DCHECK_EQ(config_.rtp.local_ssrc, channel_receive_->GetLocalSsrc()); return config_.rtp.local_ssrc; } @@ -459,12 +456,6 @@ const std::string& AudioReceiveStreamImpl::sync_group() const { return config_.sync_group; } -const AudioSendStream* -AudioReceiveStreamImpl::GetAssociatedSendStreamForTesting() const { - RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - return associated_send_stream_; -} - internal::AudioState* AudioReceiveStreamImpl::audio_state() const { auto* audio_state = static_cast(audio_state_.get()); RTC_DCHECK(audio_state); diff --git a/audio/audio_receive_stream.h b/audio/audio_receive_stream.h index db49631638..bc248120ef 100644 --- a/audio/audio_receive_stream.h +++ b/audio/audio_receive_stream.h @@ -11,26 +11,35 @@ #ifndef AUDIO_AUDIO_RECEIVE_STREAM_H_ #define AUDIO_AUDIO_RECEIVE_STREAM_H_ +#include +#include #include #include +#include #include #include #include "absl/strings/string_view.h" +#include "api/audio/audio_frame.h" #include "api/audio/audio_mixer.h" +#include "api/audio_codecs/audio_format.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/environment/environment.h" +#include "api/frame_transformer_interface.h" #include "api/neteq/neteq_factory.h" #include "api/rtp_headers.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/transport/rtp/rtp_source.h" #include "audio/audio_state.h" #include "call/audio_receive_stream.h" +#include "call/audio_state.h" #include "call/syncable.h" -#include "modules/rtp_rtcp/source/source_tracker.h" #include "rtc_base/system/no_unique_address.h" -#include "system_wrappers/include/clock.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { class PacketRouter; -class RtcEventLog; class RtpStreamReceiverControllerInterface; class RtpStreamReceiverInterface; @@ -38,28 +47,22 @@ namespace voe { class ChannelReceiveInterface; } // namespace voe -namespace internal { -class AudioSendStream; -} // namespace internal - class AudioReceiveStreamImpl final : public webrtc::AudioReceiveStreamInterface, public AudioMixer::Source, public Syncable { public: AudioReceiveStreamImpl( - Clock* clock, + const Environment& env, PacketRouter* packet_router, NetEqFactory* neteq_factory, const webrtc::AudioReceiveStreamInterface::Config& config, - const rtc::scoped_refptr& audio_state, - webrtc::RtcEventLog* event_log); + const scoped_refptr& audio_state); // For unit tests, which need to supply a mock channel receive. AudioReceiveStreamImpl( - Clock* clock, + const Environment& env, PacketRouter* packet_router, const webrtc::AudioReceiveStreamInterface::Config& config, - const rtc::scoped_refptr& audio_state, - webrtc::RtcEventLog* event_log, + const scoped_refptr& audio_state, std::unique_ptr channel_receive); AudioReceiveStreamImpl() = delete; @@ -87,13 +90,14 @@ class AudioReceiveStreamImpl final : public webrtc::AudioReceiveStreamInterface, void Stop() override; bool IsRunning() const override; void SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) + scoped_refptr frame_transformer) override; void SetDecoderMap(std::map decoder_map) override; void SetNackHistory(int history_ms) override; + void SetRtcpMode(RtcpMode mode) override; void SetNonSenderRttMeasurement(bool enabled) override; - void SetFrameDecryptor(rtc::scoped_refptr - frame_decryptor) override; + void SetFrameDecryptor( + scoped_refptr frame_decryptor) override; webrtc::AudioReceiveStreamInterface::Stats GetStats( bool get_and_clear_legacy_stats) const override; @@ -102,6 +106,7 @@ class AudioReceiveStreamImpl final : public webrtc::AudioReceiveStreamInterface, bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override; int GetBaseMinimumPlayoutDelayMs() const override; std::vector GetSources() const override; + AudioMixer::Source* source() override { return this; } // AudioMixer::Source AudioFrameInfo GetAudioFrameWithInfo(int sample_rate_hz, @@ -111,14 +116,13 @@ class AudioReceiveStreamImpl final : public webrtc::AudioReceiveStreamInterface, // Syncable uint32_t id() const override; - absl::optional GetInfo() const override; + std::optional GetInfo() const override; bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const override; void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, int64_t time_ms) override; bool SetMinimumPlayoutDelay(int delay_ms) override; - void AssociateSendStream(internal::AudioSendStream* send_stream); void DeliverRtcp(const uint8_t* packet, size_t length); void SetSyncGroup(absl::string_view sync_group); @@ -137,8 +141,6 @@ class AudioReceiveStreamImpl final : public webrtc::AudioReceiveStreamInterface, // Must be called on the packet delivery thread. const std::string& sync_group() const; - const AudioSendStream* GetAssociatedSendStreamForTesting() const; - // TODO(tommi): Remove this method. void ReconfigureForTesting( const webrtc::AudioReceiveStreamInterface::Config& config); @@ -157,11 +159,8 @@ class AudioReceiveStreamImpl final : public webrtc::AudioReceiveStreamInterface, RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_sequence_checker_{ SequenceChecker::kDetached}; webrtc::AudioReceiveStreamInterface::Config config_; - rtc::scoped_refptr audio_state_; - SourceTracker source_tracker_; + scoped_refptr audio_state_; const std::unique_ptr channel_receive_; - AudioSendStream* associated_send_stream_ - RTC_GUARDED_BY(packet_sequence_checker_) = nullptr; bool playing_ RTC_GUARDED_BY(worker_thread_checker_) = false; diff --git a/audio/audio_receive_stream_unittest.cc b/audio/audio_receive_stream_unittest.cc index 451d5f9b91..35d846b241 100644 --- a/audio/audio_receive_stream_unittest.cc +++ b/audio/audio_receive_stream_unittest.cc @@ -10,22 +10,34 @@ #include "audio/audio_receive_stream.h" +#include +#include #include -#include +#include #include #include +#include "api/audio_codecs/audio_format.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/environment/environment_factory.h" +#include "api/make_ref_counted.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" #include "api/test/mock_audio_mixer.h" #include "api/test/mock_frame_decryptor.h" +#include "audio/channel_receive.h" #include "audio/conversion.h" #include "audio/mock_voe_channel_proxy.h" +#include "call/audio_receive_stream.h" +#include "call/audio_state.h" #include "call/rtp_stream_receiver_controller.h" -#include "logging/rtc_event_log/mock/mock_rtc_event_log.h" +#include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/audio_device/include/mock_audio_device.h" #include "modules/audio_processing/include/mock_audio_processing.h" #include "modules/pacing/packet_router.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "rtc_base/time_utils.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/mock_audio_decoder_factory.h" #include "test/mock_transport.h" @@ -82,6 +94,7 @@ const NetworkStatistics kNetworkStats = { /*removedSamplesForAcceleration=*/321, /*fecPacketsReceived=*/123, /*fecPacketsDiscarded=*/101, + /*totalProcessingDelayMs=*/154, /*packetsDiscarded=*/989, /*currentExpandRate=*/789, /*currentSpeechExpandRate=*/12, @@ -100,10 +113,10 @@ const AudioDecodingCallStats kAudioDecodeStats = MakeAudioDecodeStatsForTest(); struct ConfigHelper { explicit ConfigHelper(bool use_null_audio_processing) - : ConfigHelper(rtc::make_ref_counted(), + : ConfigHelper(make_ref_counted(), use_null_audio_processing) {} - ConfigHelper(rtc::scoped_refptr audio_mixer, + ConfigHelper(scoped_refptr audio_mixer, bool use_null_audio_processing) : audio_mixer_(audio_mixer) { using ::testing::Invoke; @@ -113,46 +126,42 @@ struct ConfigHelper { config.audio_processing = use_null_audio_processing ? nullptr - : rtc::make_ref_counted>(); + : make_ref_counted>(); config.audio_device_module = - rtc::make_ref_counted>(); + make_ref_counted>(); audio_state_ = AudioState::Create(config); channel_receive_ = new ::testing::StrictMock(); EXPECT_CALL(*channel_receive_, SetNACKStatus(true, 15)).Times(1); + EXPECT_CALL(*channel_receive_, SetRtcpMode(_)).Times(1); EXPECT_CALL(*channel_receive_, RegisterReceiverCongestionControlObjects(&packet_router_)) .Times(1); EXPECT_CALL(*channel_receive_, ResetReceiverCongestionControlObjects()) .Times(1); - EXPECT_CALL(*channel_receive_, SetAssociatedSendChannel(nullptr)).Times(1); EXPECT_CALL(*channel_receive_, SetReceiveCodecs(_)) .WillRepeatedly(Invoke([](const std::map& codecs) { EXPECT_THAT(codecs, ::testing::IsEmpty()); })); - EXPECT_CALL(*channel_receive_, SetSourceTracker(_)); - EXPECT_CALL(*channel_receive_, GetLocalSsrc()) - .WillRepeatedly(Return(kLocalSsrc)); stream_config_.rtp.local_ssrc = kLocalSsrc; stream_config_.rtp.remote_ssrc = kRemoteSsrc; stream_config_.rtp.nack.rtp_history_ms = 300; stream_config_.rtcp_send_transport = &rtcp_send_transport_; stream_config_.decoder_factory = - rtc::make_ref_counted(); + make_ref_counted(); } std::unique_ptr CreateAudioReceiveStream() { auto ret = std::make_unique( - Clock::GetRealTimeClock(), &packet_router_, stream_config_, - audio_state_, &event_log_, + CreateEnvironment(), &packet_router_, stream_config_, audio_state_, std::unique_ptr(channel_receive_)); ret->RegisterWithTransport(&rtp_stream_receiver_controller_); return ret; } AudioReceiveStreamInterface::Config& config() { return stream_config_; } - rtc::scoped_refptr audio_mixer() { return audio_mixer_; } + scoped_refptr audio_mixer() { return audio_mixer_; } MockChannelReceive* channel_receive() { return channel_receive_; } void SetupMockForGetStats() { @@ -182,16 +191,15 @@ struct ConfigHelper { private: PacketRouter packet_router_; - MockRtcEventLog event_log_; - rtc::scoped_refptr audio_state_; - rtc::scoped_refptr audio_mixer_; + scoped_refptr audio_state_; + scoped_refptr audio_mixer_; AudioReceiveStreamInterface::Config stream_config_; ::testing::StrictMock* channel_receive_ = nullptr; RtpStreamReceiverController rtp_stream_receiver_controller_; MockTransport rtcp_send_transport_; }; -const std::vector CreateRtcpSenderReport() { +std::vector CreateRtcpSenderReport() { std::vector packet; const size_t kRtcpSrLength = 28; // In bytes. packet.resize(kRtcpSrLength); @@ -208,9 +216,10 @@ TEST(AudioReceiveStreamTest, ConfigToString) { AudioReceiveStreamInterface::Config config; config.rtp.remote_ssrc = kRemoteSsrc; config.rtp.local_ssrc = kLocalSsrc; + config.rtp.rtcp_mode = RtcpMode::kOff; EXPECT_EQ( "{rtp: {remote_ssrc: 1234, local_ssrc: 5678, nack: " - "{rtp_history_ms: 0}}, " + "{rtp_history_ms: 0}, rtcp: off}, " "rtcp_send_transport: null}", config.ToString()); } @@ -250,13 +259,11 @@ TEST(AudioReceiveStreamTest, GetStats) { EXPECT_EQ(kCallStats.payload_bytes_received, stats.payload_bytes_received); EXPECT_EQ(kCallStats.header_and_padding_bytes_received, stats.header_and_padding_bytes_received); - EXPECT_EQ(static_cast(kCallStats.packetsReceived), + EXPECT_EQ(static_cast(kCallStats.packets_received), stats.packets_received); - EXPECT_EQ(kCallStats.cumulativeLost, stats.packets_lost); + EXPECT_EQ(kCallStats.packets_lost, stats.packets_lost); EXPECT_EQ(kReceiveCodec.second.name, stats.codec_name); - EXPECT_EQ( - kCallStats.jitterSamples / (kReceiveCodec.second.clockrate_hz / 1000), - stats.jitter_ms); + EXPECT_EQ(kCallStats.jitter_ms, stats.jitter_ms); EXPECT_EQ(kNetworkStats.currentBufferSize, stats.jitter_buffer_ms); EXPECT_EQ(kNetworkStats.preferredBufferSize, stats.jitter_buffer_preferred_ms); @@ -269,15 +276,15 @@ TEST(AudioReceiveStreamTest, GetStats) { EXPECT_EQ(kNetworkStats.concealedSamples, stats.concealed_samples); EXPECT_EQ(kNetworkStats.concealmentEvents, stats.concealment_events); EXPECT_EQ(static_cast(kNetworkStats.jitterBufferDelayMs) / - static_cast(rtc::kNumMillisecsPerSec), + static_cast(kNumMillisecsPerSec), stats.jitter_buffer_delay_seconds); EXPECT_EQ(kNetworkStats.jitterBufferEmittedCount, stats.jitter_buffer_emitted_count); EXPECT_EQ(static_cast(kNetworkStats.jitterBufferTargetDelayMs) / - static_cast(rtc::kNumMillisecsPerSec), + static_cast(kNumMillisecsPerSec), stats.jitter_buffer_target_delay_seconds); EXPECT_EQ(static_cast(kNetworkStats.jitterBufferMinimumDelayMs) / - static_cast(rtc::kNumMillisecsPerSec), + static_cast(kNumMillisecsPerSec), stats.jitter_buffer_minimum_delay_seconds); EXPECT_EQ(kNetworkStats.insertedSamplesForDeceleration, stats.inserted_samples_for_deceleration); @@ -285,6 +292,9 @@ TEST(AudioReceiveStreamTest, GetStats) { stats.removed_samples_for_acceleration); EXPECT_EQ(kNetworkStats.fecPacketsReceived, stats.fec_packets_received); EXPECT_EQ(kNetworkStats.fecPacketsDiscarded, stats.fec_packets_discarded); + EXPECT_EQ(static_cast(kNetworkStats.totalProcessingDelayUs) / + static_cast(kNumMicrosecsPerSec), + stats.total_processing_delay_seconds); EXPECT_EQ(kNetworkStats.packetsDiscarded, stats.packets_discarded); EXPECT_EQ(Q14ToFloat(kNetworkStats.currentExpandRate), stats.expand_rate); EXPECT_EQ(Q14ToFloat(kNetworkStats.currentSpeechExpandRate), @@ -301,7 +311,7 @@ TEST(AudioReceiveStreamTest, GetStats) { EXPECT_EQ(kNetworkStats.delayedPacketOutageSamples, stats.delayed_packet_outage_samples); EXPECT_EQ(static_cast(kNetworkStats.relativePacketArrivalDelayMs) / - static_cast(rtc::kNumMillisecsPerSec), + static_cast(kNumMillisecsPerSec), stats.relative_packet_arrival_delay_seconds); EXPECT_EQ(kNetworkStats.interruptionCount, stats.interruption_count); EXPECT_EQ(kNetworkStats.totalInterruptionDurationMs, @@ -317,7 +327,7 @@ TEST(AudioReceiveStreamTest, GetStats) { EXPECT_EQ(kAudioDecodeStats.decoded_plc_cng, stats.decoding_plc_cng); EXPECT_EQ(kAudioDecodeStats.decoded_muted_output, stats.decoding_muted_output); - EXPECT_EQ(kCallStats.capture_start_ntp_time_ms_, + EXPECT_EQ(kCallStats.capture_start_ntp_time_ms, stats.capture_start_ntp_time_ms); EXPECT_EQ(kPlayoutNtpTimestampMs, stats.estimated_playout_ntp_timestamp_ms); recv_stream->UnregisterFromTransport(); @@ -406,8 +416,8 @@ TEST(AudioReceiveStreamTest, ReconfigureWithFrameDecryptor) { auto recv_stream = helper.CreateAudioReceiveStream(); auto new_config_0 = helper.config(); - rtc::scoped_refptr mock_frame_decryptor_0( - rtc::make_ref_counted()); + scoped_refptr mock_frame_decryptor_0( + make_ref_counted()); new_config_0.frame_decryptor = mock_frame_decryptor_0; // TODO(tommi): While this changes the internal config value, it doesn't @@ -418,8 +428,8 @@ TEST(AudioReceiveStreamTest, ReconfigureWithFrameDecryptor) { recv_stream->ReconfigureForTesting(new_config_0); auto new_config_1 = helper.config(); - rtc::scoped_refptr mock_frame_decryptor_1( - rtc::make_ref_counted()); + scoped_refptr mock_frame_decryptor_1( + make_ref_counted()); new_config_1.frame_decryptor = mock_frame_decryptor_1; new_config_1.crypto_options.sframe.require_frame_encryption = true; recv_stream->ReconfigureForTesting(new_config_1); diff --git a/audio/audio_send_stream.cc b/audio/audio_send_stream.cc index 0caf59a20e..94991f76e4 100644 --- a/audio/audio_send_stream.cc +++ b/audio/audio_send_stream.cc @@ -15,6 +15,7 @@ #include #include +#include "api/audio/audio_processing.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_encoder_factory.h" #include "api/audio_codecs/audio_format.h" @@ -34,23 +35,21 @@ #include "media/base/media_channel.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" #include "modules/audio_coding/codecs/red/audio_encoder_copy_red.h" -#include "modules/audio_processing/include/audio_processing.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/strings/audio_format_to_string.h" #include "rtc_base/trace_event.h" namespace webrtc { namespace { -void UpdateEventLogStreamConfig(RtcEventLog* event_log, +void UpdateEventLogStreamConfig(RtcEventLog& event_log, const AudioSendStream::Config& config, const AudioSendStream::Config* old_config) { using SendCodecSpec = AudioSendStream::Config::SendCodecSpec; // Only update if any of the things we log have changed. - auto payload_types_equal = [](const absl::optional& a, - const absl::optional& b) { + auto payload_types_equal = [](const std::optional& a, + const std::optional& b) { if (a.has_value() && b.has_value()) { return a->format.name == b->format.name && a->payload_type == b->payload_type; @@ -72,7 +71,7 @@ void UpdateEventLogStreamConfig(RtcEventLog* event_log, rtclog_config->codecs.emplace_back(config.send_codec_spec->format.name, config.send_codec_spec->payload_type, 0); } - event_log->Log(std::make_unique( + event_log.Log(std::make_unique( std::move(rtclog_config))); } @@ -100,63 +99,51 @@ AudioAllocationConfig::AudioAllocationConfig( namespace internal { AudioSendStream::AudioSendStream( - Clock* clock, + const Environment& env, const webrtc::AudioSendStream::Config& config, - const rtc::scoped_refptr& audio_state, - TaskQueueFactory* task_queue_factory, + const scoped_refptr& audio_state, RtpTransportControllerSendInterface* rtp_transport, BitrateAllocatorInterface* bitrate_allocator, - RtcEventLog* event_log, RtcpRttStats* rtcp_rtt_stats, - const absl::optional& suspended_rtp_state, - const FieldTrialsView& field_trials) - : AudioSendStream(clock, + const std::optional& suspended_rtp_state) + : AudioSendStream(env, config, audio_state, - task_queue_factory, rtp_transport, bitrate_allocator, - event_log, suspended_rtp_state, - voe::CreateChannelSend(clock, - task_queue_factory, + voe::CreateChannelSend(env, config.send_transport, rtcp_rtt_stats, - event_log, config.frame_encryptor.get(), config.crypto_options, config.rtp.extmap_allow_mixed, config.rtcp_report_interval_ms, config.rtp.ssrc, config.frame_transformer, - rtp_transport, - field_trials), - field_trials) {} + rtp_transport)) {} AudioSendStream::AudioSendStream( - Clock* clock, + const Environment& env, const webrtc::AudioSendStream::Config& config, - const rtc::scoped_refptr& audio_state, - TaskQueueFactory* task_queue_factory, + const scoped_refptr& audio_state, RtpTransportControllerSendInterface* rtp_transport, BitrateAllocatorInterface* bitrate_allocator, - RtcEventLog* event_log, - const absl::optional& suspended_rtp_state, - std::unique_ptr channel_send, - const FieldTrialsView& field_trials) - : clock_(clock), - field_trials_(field_trials), + const std::optional& suspended_rtp_state, + std::unique_ptr channel_send) + : env_(env), allocate_audio_without_feedback_( - field_trials_.IsEnabled("WebRTC-Audio-ABWENoTWCC")), + env_.field_trials().IsEnabled("WebRTC-Audio-ABWENoTWCC")), enable_audio_alr_probing_( - !field_trials_.IsDisabled("WebRTC-Audio-AlrProbing")), - allocation_settings_(field_trials_), + !env_.field_trials().IsDisabled("WebRTC-Audio-AlrProbing")), + allocation_settings_(env_.field_trials()), config_(Config(/*send_transport=*/nullptr)), audio_state_(audio_state), channel_send_(std::move(channel_send)), - event_log_(event_log), use_legacy_overhead_calculation_( - field_trials_.IsEnabled("WebRTC-Audio-LegacyOverhead")), + env_.field_trials().IsEnabled("WebRTC-Audio-LegacyOverhead")), + enable_priority_bitrate_( + !env_.field_trials().IsDisabled("WebRTC-Audio-PriorityBitrate")), bitrate_allocator_(bitrate_allocator), rtp_transport_(rtp_transport), rtp_rtcp_module_(channel_send_->GetRtpRtcp()), @@ -171,7 +158,6 @@ AudioSendStream::AudioSendStream( RTC_DCHECK_RUN_ON(&worker_thread_checker_); ConfigureStream(config, true, nullptr); - UpdateCachedTargetAudioBitrateConstraints(); } AudioSendStream::~AudioSendStream() { @@ -226,7 +212,7 @@ void AudioSendStream::ConfigureStream( SetParametersCallback callback) { RTC_LOG(LS_INFO) << "AudioSendStream::ConfigureStream: " << new_config.ToString(); - UpdateEventLogStreamConfig(event_log_, new_config, + UpdateEventLogStreamConfig(env_.event_log(), new_config, first_time ? nullptr : &config_); const auto& old_config = config_; @@ -324,10 +310,7 @@ void AudioSendStream::ConfigureStream( } // Set currently known overhead (used in ANA, opus only). - { - MutexLock lock(&overhead_per_packet_lock_); - UpdateOverheadForEncoder(); - } + UpdateOverheadPerPacket(); channel_send_->CallEncoder([this](AudioEncoder* encoder) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); @@ -335,7 +318,7 @@ void AudioSendStream::ConfigureStream( return; } frame_length_range_ = encoder->GetFrameLengthRange(); - UpdateCachedTargetAudioBitrateConstraints(); + bitrate_range_ = encoder->GetBitrateRange(); }); if (sending_) { @@ -343,9 +326,6 @@ void AudioSendStream::ConfigureStream( } config_ = new_config; - if (!first_time) { - UpdateCachedTargetAudioBitrateConstraints(); - } webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); } @@ -355,6 +335,7 @@ void AudioSendStream::Start() { if (sending_) { return; } + RTC_LOG(LS_INFO) << "AudioSendStream::Start: " << config_.rtp.ssrc; if (!config_.has_dscp && config_.min_bitrate_bps != -1 && config_.max_bitrate_bps != -1 && (allocate_audio_without_feedback_ || TransportSeqNumId(config_) != 0)) { @@ -376,7 +357,7 @@ void AudioSendStream::Stop() { if (!sending_) { return; } - + RTC_LOG(LS_INFO) << "AudioSendStream::Stop: " << config_.rtp.ssrc; RemoveBitrateObserver(); channel_send_->StopSend(); sending_ = false; @@ -487,73 +468,60 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats( void AudioSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); channel_send_->ReceivedRTCPPacket(packet, length); - - { - // Poll if overhead has changed, which it can do if ack triggers us to stop - // sending mid/rid. - MutexLock lock(&overhead_per_packet_lock_); - UpdateOverheadForEncoder(); - } - UpdateCachedTargetAudioBitrateConstraints(); + // Poll if overhead has changed, which it can do if ack triggers us to stop + // sending mid/rid. + UpdateOverheadPerPacket(); } uint32_t AudioSendStream::OnBitrateUpdated(BitrateAllocationUpdate update) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - // Pick a target bitrate between the constraints. Overrules the allocator if // it 1) allocated a bitrate of zero to disable the stream or 2) allocated a // higher than max to allow for e.g. extra FEC. - RTC_DCHECK(cached_constraints_.has_value()); - update.target_bitrate.Clamp(cached_constraints_->min, - cached_constraints_->max); - update.stable_target_bitrate.Clamp(cached_constraints_->min, - cached_constraints_->max); - + std::optional constraints = + GetMinMaxBitrateConstraints(); + if (constraints) { + update.target_bitrate.Clamp(constraints->min, constraints->max); + update.stable_target_bitrate.Clamp(constraints->min, constraints->max); + } channel_send_->OnBitrateAllocation(update); - // The amount of audio protection is not exposed by the encoder, hence // always returning 0. return 0; } +std::optional AudioSendStream::GetUsedRate() const { + return channel_send_->GetUsedRate(); +} + void AudioSendStream::SetTransportOverhead( int transport_overhead_per_packet_bytes) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - { - MutexLock lock(&overhead_per_packet_lock_); - transport_overhead_per_packet_bytes_ = transport_overhead_per_packet_bytes; - UpdateOverheadForEncoder(); - } - UpdateCachedTargetAudioBitrateConstraints(); + transport_overhead_per_packet_bytes_ = transport_overhead_per_packet_bytes; + UpdateOverheadPerPacket(); } -void AudioSendStream::UpdateOverheadForEncoder() { +void AudioSendStream::UpdateOverheadPerPacket() { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - size_t overhead_per_packet_bytes = GetPerPacketOverheadBytes(); + size_t overhead_per_packet_bytes = + transport_overhead_per_packet_bytes_ + + rtp_rtcp_module_->ExpectedPerPacketOverhead(); if (overhead_per_packet_ == overhead_per_packet_bytes) { return; } overhead_per_packet_ = overhead_per_packet_bytes; - channel_send_->CallEncoder([&](AudioEncoder* encoder) { encoder->OnReceivedOverhead(overhead_per_packet_bytes); }); - if (total_packet_overhead_bytes_ != overhead_per_packet_bytes) { - total_packet_overhead_bytes_ = overhead_per_packet_bytes; - if (registered_with_allocator_) { - ConfigureBitrateObserver(); - } + if (registered_with_allocator_) { + ConfigureBitrateObserver(); } + channel_send_->RegisterPacketOverhead(overhead_per_packet_bytes); } size_t AudioSendStream::TestOnlyGetPerPacketOverheadBytes() const { - MutexLock lock(&overhead_per_packet_lock_); - return GetPerPacketOverheadBytes(); -} - -size_t AudioSendStream::GetPerPacketOverheadBytes() const { - return transport_overhead_per_packet_bytes_ + - rtp_rtcp_module_->ExpectedPerPacketOverhead(); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + return overhead_per_packet_; } RtpState AudioSendStream::GetRtpState() const { @@ -594,13 +562,14 @@ bool AudioSendStream::SetupSendCodec(const Config& new_config) { const auto& spec = *new_config.send_codec_spec; RTC_DCHECK(new_config.encoder_factory); - std::unique_ptr encoder = - new_config.encoder_factory->MakeAudioEncoder( - spec.payload_type, spec.format, new_config.codec_pair_id); + std::unique_ptr encoder = new_config.encoder_factory->Create( + env_, spec.format, + {.payload_type = spec.payload_type, + .codec_pair_id = new_config.codec_pair_id}); if (!encoder) { RTC_DLOG(LS_ERROR) << "Unable to create encoder for " - << rtc::ToString(spec.format); + << absl::StrCat(spec.format); return false; } @@ -613,7 +582,7 @@ bool AudioSendStream::SetupSendCodec(const Config& new_config) { // Enable ANA if configured (currently only used by Opus). if (new_config.audio_network_adaptor_config) { if (encoder->EnableAudioNetworkAdaptor( - *new_config.audio_network_adaptor_config, event_log_)) { + *new_config.audio_network_adaptor_config, &env_.event_log())) { RTC_LOG(LS_INFO) << "Audio network adaptor enabled on SSRC " << new_config.rtp.ssrc; } else { @@ -636,26 +605,24 @@ bool AudioSendStream::SetupSendCodec(const Config& new_config) { } // Wrap the encoder in a RED encoder, if RED is enabled. + SdpAudioFormat format = spec.format; if (spec.red_payload_type) { AudioEncoderCopyRed::Config red_config; red_config.payload_type = *spec.red_payload_type; red_config.speech_encoder = std::move(encoder); encoder = std::make_unique(std::move(red_config), - field_trials_); + env_.field_trials()); + format.name = kRedCodecName; } // Set currently known overhead (used in ANA, opus only). - // If overhead changes later, it will be updated in UpdateOverheadForEncoder. - { - MutexLock lock(&overhead_per_packet_lock_); - size_t overhead = GetPerPacketOverheadBytes(); - if (overhead > 0) { - encoder->OnReceivedOverhead(overhead); - } + // If overhead changes later, it will be updated in UpdateOverheadPerPacket. + if (overhead_per_packet_ > 0) { + encoder->OnReceivedOverhead(overhead_per_packet_); } StoreEncoderProperties(encoder->SampleRateHz(), encoder->NumChannels()); - channel_send_->SetEncoder(new_config.send_codec_spec->payload_type, + channel_send_->SetEncoder(new_config.send_codec_spec->payload_type, format, std::move(encoder)); return true; @@ -689,7 +656,7 @@ bool AudioSendStream::ReconfigureSendCodec(const Config& new_config) { return SetupSendCodec(new_config); } - const absl::optional& new_target_bitrate_bps = + const std::optional& new_target_bitrate_bps = new_config.send_codec_spec->target_bitrate_bps; // If a bitrate has been specified for the codec, use it over the // codec's default. @@ -713,18 +680,14 @@ void AudioSendStream::ReconfigureANA(const Config& new_config) { return; } if (new_config.audio_network_adaptor_config) { - // This lock needs to be acquired before CallEncoder, since it aquires - // another lock and we need to maintain the same order at all call sites to - // avoid deadlock. - MutexLock lock(&overhead_per_packet_lock_); - size_t overhead = GetPerPacketOverheadBytes(); channel_send_->CallEncoder([&](AudioEncoder* encoder) { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); if (encoder->EnableAudioNetworkAdaptor( - *new_config.audio_network_adaptor_config, event_log_)) { + *new_config.audio_network_adaptor_config, &env_.event_log())) { RTC_LOG(LS_INFO) << "Audio network adaptor enabled on SSRC " << new_config.rtp.ssrc; - if (overhead > 0) { - encoder->OnReceivedOverhead(overhead); + if (overhead_per_packet_ > 0) { + encoder->OnReceivedOverhead(overhead_per_packet_); } } else { RTC_LOG(LS_INFO) << "Failed to enable Audio network adaptor on SSRC " @@ -829,8 +792,7 @@ void AudioSendStream::ConfigureBitrateObserver() { priority_bitrate += max_overhead; } else { RTC_DCHECK(frame_length_range_); - const DataSize overhead_per_packet = - DataSize::Bytes(total_packet_overhead_bytes_); + const DataSize overhead_per_packet = DataSize::Bytes(overhead_per_packet_); DataRate min_overhead = overhead_per_packet / frame_length_range_->second; priority_bitrate += min_overhead; } @@ -839,13 +801,18 @@ void AudioSendStream::ConfigureBitrateObserver() { priority_bitrate = *allocation_settings_.priority_bitrate_raw; } + if (!enable_priority_bitrate_) { + priority_bitrate = DataRate::BitsPerSec(0); + } + bitrate_allocator_->AddObserver( this, MediaStreamAllocationConfig{ constraints->min.bps(), constraints->max.bps(), 0, priority_bitrate.bps(), true, allocation_settings_.bitrate_priority.value_or( - config_.bitrate_priority)}); + config_.bitrate_priority), + TrackRateElasticity::kCanContributeUnusedRate}); registered_with_allocator_ = true; } @@ -855,14 +822,14 @@ void AudioSendStream::RemoveBitrateObserver() { bitrate_allocator_->RemoveObserver(this); } -absl::optional +std::optional AudioSendStream::GetMinMaxBitrateConstraints() const { if (config_.min_bitrate_bps < 0 || config_.max_bitrate_bps < 0) { RTC_LOG(LS_WARNING) << "Config is invalid: min_bitrate_bps=" << config_.min_bitrate_bps << "; max_bitrate_bps=" << config_.max_bitrate_bps << "; both expected greater or equal to 0"; - return absl::nullopt; + return std::nullopt; } TargetAudioBitrateConstraints constraints{ DataRate::BitsPerSec(config_.min_bitrate_bps), @@ -874,12 +841,18 @@ AudioSendStream::GetMinMaxBitrateConstraints() const { if (allocation_settings_.max_bitrate) constraints.max = *allocation_settings_.max_bitrate; + // Use encoder defined bitrate range if available. + if (bitrate_range_) { + constraints.min = bitrate_range_->first; + constraints.max = bitrate_range_->second; + } + RTC_DCHECK_GE(constraints.min, DataRate::Zero()); RTC_DCHECK_GE(constraints.max, DataRate::Zero()); if (constraints.max < constraints.min) { RTC_LOG(LS_WARNING) << "TargetAudioBitrateConstraints::max is less than " << "TargetAudioBitrateConstraints::min"; - return absl::nullopt; + return std::nullopt; } if (use_legacy_overhead_calculation_) { // OverheadPerPacket = Ipv4(20B) + UDP(8B) + SRTP(10B) + RTP(12) @@ -892,12 +865,11 @@ AudioSendStream::GetMinMaxBitrateConstraints() const { } else { if (!frame_length_range_.has_value()) { RTC_LOG(LS_WARNING) << "frame_length_range_ is not set"; - return absl::nullopt; + return std::nullopt; } - const DataSize kOverheadPerPacket = - DataSize::Bytes(total_packet_overhead_bytes_); - constraints.min += kOverheadPerPacket / frame_length_range_->second; - constraints.max += kOverheadPerPacket / frame_length_range_->first; + const DataSize overhead_per_packet = DataSize::Bytes(overhead_per_packet_); + constraints.min += overhead_per_packet / frame_length_range_->second; + constraints.max += overhead_per_packet / frame_length_range_->first; } return constraints; } @@ -907,14 +879,5 @@ void AudioSendStream::RegisterCngPayloadType(int payload_type, channel_send_->RegisterCngPayloadType(payload_type, clockrate_hz); } -void AudioSendStream::UpdateCachedTargetAudioBitrateConstraints() { - absl::optional - new_constraints = GetMinMaxBitrateConstraints(); - if (!new_constraints.has_value()) { - return; - } - cached_constraints_ = new_constraints; -} - } // namespace internal } // namespace webrtc diff --git a/audio/audio_send_stream.h b/audio/audio_send_stream.h index 62ccd524cb..c38456d5df 100644 --- a/audio/audio_send_stream.h +++ b/audio/audio_send_stream.h @@ -16,6 +16,7 @@ #include #include "absl/functional/any_invocable.h" +#include "api/environment/environment.h" #include "api/field_trials_view.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" @@ -28,10 +29,8 @@ #include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/race_checker.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_queue.h" namespace webrtc { -class RtcEventLog; class RtcpRttStats; class RtpTransportControllerSendInterface; @@ -39,13 +38,13 @@ struct AudioAllocationConfig { static constexpr char kKey[] = "WebRTC-Audio-Allocation"; // Field Trial configured bitrates to use as overrides over default/user // configured bitrate range when audio bitrate allocation is enabled. - absl::optional min_bitrate; - absl::optional max_bitrate; + std::optional min_bitrate; + std::optional max_bitrate; DataRate priority_bitrate = DataRate::Zero(); // By default the priority_bitrate is compensated for packet overhead. // Use this flag to configure a raw value instead. - absl::optional priority_bitrate_raw; - absl::optional bitrate_priority; + std::optional priority_bitrate_raw; + std::optional bitrate_priority; std::unique_ptr Parser(); explicit AudioAllocationConfig(const FieldTrialsView& field_trials); @@ -56,27 +55,21 @@ class AudioState; class AudioSendStream final : public webrtc::AudioSendStream, public webrtc::BitrateAllocatorObserver { public: - AudioSendStream(Clock* clock, + AudioSendStream(const Environment& env, const webrtc::AudioSendStream::Config& config, - const rtc::scoped_refptr& audio_state, - TaskQueueFactory* task_queue_factory, + const scoped_refptr& audio_state, RtpTransportControllerSendInterface* rtp_transport, BitrateAllocatorInterface* bitrate_allocator, - RtcEventLog* event_log, RtcpRttStats* rtcp_rtt_stats, - const absl::optional& suspended_rtp_state, - const FieldTrialsView& field_trials); + const std::optional& suspended_rtp_state); // For unit tests, which need to supply a mock ChannelSend. - AudioSendStream(Clock* clock, + AudioSendStream(const Environment& env, const webrtc::AudioSendStream::Config& config, - const rtc::scoped_refptr& audio_state, - TaskQueueFactory* task_queue_factory, + const scoped_refptr& audio_state, RtpTransportControllerSendInterface* rtp_transport, BitrateAllocatorInterface* bitrate_allocator, - RtcEventLog* event_log, - const absl::optional& suspended_rtp_state, - std::unique_ptr channel_send, - const FieldTrialsView& field_trials); + const std::optional& suspended_rtp_state, + std::unique_ptr channel_send); AudioSendStream() = delete; AudioSendStream(const AudioSendStream&) = delete; @@ -104,6 +97,7 @@ class AudioSendStream final : public webrtc::AudioSendStream, // Implements BitrateAllocatorObserver. uint32_t OnBitrateUpdated(BitrateAllocationUpdate update) override; + std::optional GetUsedRate() const override; void SetTransportOverhead(int transport_overhead_per_packet_bytes); @@ -111,8 +105,7 @@ class AudioSendStream final : public webrtc::AudioSendStream, const voe::ChannelSendInterface* GetChannel() const; // Returns combined per-packet overhead. - size_t TestOnlyGetPerPacketOverheadBytes() const - RTC_LOCKS_EXCLUDED(overhead_per_packet_lock_); + size_t TestOnlyGetPerPacketOverheadBytes() const; private: class TimedTransport; @@ -148,29 +141,20 @@ class AudioSendStream final : public webrtc::AudioSendStream, // Returns bitrate constraints, maybe including overhead when enabled by // field trial. - absl::optional GetMinMaxBitrateConstraints() + std::optional GetMinMaxBitrateConstraints() const RTC_RUN_ON(worker_thread_checker_); // Sets per-packet overhead on encoded (for ANA) based on current known values // of transport and packetization overheads. - void UpdateOverheadForEncoder() - RTC_EXCLUSIVE_LOCKS_REQUIRED(overhead_per_packet_lock_); - - // Returns combined per-packet overhead. - size_t GetPerPacketOverheadBytes() const - RTC_EXCLUSIVE_LOCKS_REQUIRED(overhead_per_packet_lock_); + void UpdateOverheadPerPacket(); void RegisterCngPayloadType(int payload_type, int clockrate_hz) RTC_RUN_ON(worker_thread_checker_); - void UpdateCachedTargetAudioBitrateConstraints() - RTC_RUN_ON(worker_thread_checker_); - - Clock* clock_; - const FieldTrialsView& field_trials_; + const Environment env_; SequenceChecker worker_thread_checker_; - rtc::RaceChecker audio_capture_race_checker_; + RaceChecker audio_capture_race_checker_; const bool allocate_audio_without_feedback_; const bool force_no_audio_feedback_ = allocate_audio_without_feedback_; @@ -179,10 +163,10 @@ class AudioSendStream final : public webrtc::AudioSendStream, webrtc::AudioSendStream::Config config_ RTC_GUARDED_BY(worker_thread_checker_); - rtc::scoped_refptr audio_state_; + scoped_refptr audio_state_; const std::unique_ptr channel_send_; - RtcEventLog* const event_log_; const bool use_legacy_overhead_calculation_; + const bool enable_priority_bitrate_; int encoder_sample_rate_hz_ RTC_GUARDED_BY(worker_thread_checker_) = 0; size_t encoder_num_channels_ RTC_GUARDED_BY(worker_thread_checker_) = 0; @@ -194,13 +178,10 @@ class AudioSendStream final : public webrtc::AudioSendStream, BitrateAllocatorInterface* const bitrate_allocator_ RTC_GUARDED_BY(worker_thread_checker_); - absl::optional - cached_constraints_ RTC_GUARDED_BY(worker_thread_checker_) = - absl::nullopt; RtpTransportControllerSendInterface* const rtp_transport_; RtpRtcpInterface* const rtp_rtcp_module_; - absl::optional const suspended_rtp_state_; + std::optional const suspended_rtp_state_; // RFC 5285: Each distinct extension MUST have a unique ID. The value 0 is // reserved for padding and MUST NOT be used as a local identifier. @@ -218,18 +199,17 @@ class AudioSendStream final : public webrtc::AudioSendStream, const std::vector& extensions); static int TransportSeqNumId(const Config& config); - mutable Mutex overhead_per_packet_lock_; - size_t overhead_per_packet_ RTC_GUARDED_BY(overhead_per_packet_lock_) = 0; - // Current transport overhead (ICE, TURN, etc.) size_t transport_overhead_per_packet_bytes_ - RTC_GUARDED_BY(overhead_per_packet_lock_) = 0; + RTC_GUARDED_BY(worker_thread_checker_) = 0; + // Total overhead, including transport and RTP headers. + size_t overhead_per_packet_ RTC_GUARDED_BY(worker_thread_checker_) = 0; bool registered_with_allocator_ RTC_GUARDED_BY(worker_thread_checker_) = false; - size_t total_packet_overhead_bytes_ RTC_GUARDED_BY(worker_thread_checker_) = - 0; - absl::optional> frame_length_range_ + std::optional> frame_length_range_ + RTC_GUARDED_BY(worker_thread_checker_); + std::optional> bitrate_range_ RTC_GUARDED_BY(worker_thread_checker_); }; } // namespace internal diff --git a/audio/audio_send_stream_tests.cc b/audio/audio_send_stream_tests.cc index ff95ed70e1..966cf9b84d 100644 --- a/audio/audio_send_stream_tests.cc +++ b/audio/audio_send_stream_tests.cc @@ -49,7 +49,7 @@ TEST_F(AudioSendStreamCallTest, SupportsCName) { CNameObserver() = default; private: - Action OnSendRtcp(rtc::ArrayView packet) override { + Action OnSendRtcp(ArrayView packet) override { RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet)); if (parser.sdes()->num_packets() > 0) { @@ -64,7 +64,7 @@ TEST_F(AudioSendStreamCallTest, SupportsCName) { void ModifyAudioConfigs(AudioSendStream::Config* send_config, std::vector* - receive_configs) override { + /* receive_configs */) override { send_config->rtp.c_name = kCName; } @@ -82,10 +82,10 @@ TEST_F(AudioSendStreamCallTest, NoExtensionsByDefault) { NoExtensionsObserver() = default; private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet; - EXPECT_TRUE(rtp_packet.Parse(packet)); // rtp packet is valid. - EXPECT_EQ(packet[0] & 0b0001'0000, 0); // extension bit not set. + EXPECT_TRUE(rtp_packet.Parse(packet)); // rtp packet is valid. + EXPECT_EQ(packet[0] & 0b0001'0000, 0); // extension bit not set. observation_complete_.Set(); return SEND_PACKET; @@ -93,7 +93,7 @@ TEST_F(AudioSendStreamCallTest, NoExtensionsByDefault) { void ModifyAudioConfigs(AudioSendStream::Config* send_config, std::vector* - receive_configs) override { + /* receive_configs */) override { send_config->rtp.extensions.clear(); } @@ -109,17 +109,16 @@ TEST_F(AudioSendStreamCallTest, SupportsAudioLevel) { class AudioLevelObserver : public AudioSendTest { public: AudioLevelObserver() : AudioSendTest() { - extensions_.Register(kAudioLevelExtensionId); + extensions_.Register(kAudioLevelExtensionId); } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet(&extensions_); EXPECT_TRUE(rtp_packet.Parse(packet)); - uint8_t audio_level = 0; - bool voice = false; - EXPECT_TRUE(rtp_packet.GetExtension(&voice, &audio_level)); - if (audio_level != 0) { + AudioLevel audio_level; + EXPECT_TRUE(rtp_packet.GetExtension(&audio_level)); + if (audio_level.level() != 0) { // Wait for at least one packet with a non-zero level. observation_complete_.Set(); } else { @@ -132,7 +131,7 @@ TEST_F(AudioSendStreamCallTest, SupportsAudioLevel) { void ModifyAudioConfigs(AudioSendStream::Config* send_config, std::vector* - receive_configs) override { + /* receive_configs */) override { send_config->rtp.extensions.clear(); send_config->rtp.extensions.push_back( RtpExtension(RtpExtension::kAudioLevelUri, kAudioLevelExtensionId)); @@ -158,7 +157,7 @@ class TransportWideSequenceNumberObserver : public AudioSendTest { } private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet(&extensions_); EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -174,7 +173,7 @@ class TransportWideSequenceNumberObserver : public AudioSendTest { void ModifyAudioConfigs(AudioSendStream::Config* send_config, std::vector* - receive_configs) override { + /* receive_configs */) override { send_config->rtp.extensions.clear(); send_config->rtp.extensions.push_back( RtpExtension(RtpExtension::kTransportSequenceNumberUri, @@ -204,7 +203,7 @@ TEST_F(AudioSendStreamCallTest, SendDtmf) { DtmfObserver() = default; private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -226,7 +225,7 @@ TEST_F(AudioSendStreamCallTest, SendDtmf) { void OnAudioStreamsCreated(AudioSendStream* send_stream, const std::vector& - receive_streams) override { + /* receive_streams */) override { // Need to start stream here, else DTMF events are dropped. send_stream->Start(); for (int event = kDtmfEventFirst; event <= kDtmfEventLast; ++event) { diff --git a/audio/audio_send_stream_unittest.cc b/audio/audio_send_stream_unittest.cc index d842afdfe5..336323e1e5 100644 --- a/audio/audio_send_stream_unittest.cc +++ b/audio/audio_send_stream_unittest.cc @@ -10,27 +10,47 @@ #include "audio/audio_send_stream.h" +#include +#include #include +#include #include -#include #include #include -#include "api/task_queue/default_task_queue_factory.h" +#include "api/audio/audio_frame.h" +#include "api/audio/audio_processing_statistics.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/call/bitrate_allocation.h" +#include "api/crypto/frame_encryptor_interface.h" +#include "api/environment/environment_factory.h" +#include "api/function_view.h" +#include "api/make_ref_counted.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/test/mock_frame_encryptor.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" #include "audio/audio_state.h" +#include "audio/channel_send.h" #include "audio/conversion.h" #include "audio/mock_voe_channel_proxy.h" +#include "call/audio_state.h" +#include "call/bitrate_allocator.h" +#include "call/test/mock_bitrate_allocator.h" #include "call/test/mock_rtp_transport_controller_send.h" -#include "logging/rtc_event_log/mock/mock_rtc_event_log.h" #include "modules/audio_device/include/mock_audio_device.h" #include "modules/audio_mixer/audio_mixer_impl.h" #include "modules/audio_mixer/sine_wave_generator.h" -#include "modules/audio_processing/include/audio_processing_statistics.h" #include "modules/audio_processing/include/mock_audio_processing.h" +#include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/mocks/mock_network_link_rtcp_observer.h" #include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h" -#include "system_wrappers/include/clock.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/mock_audio_encoder.h" #include "test/mock_audio_encoder_factory.h" @@ -51,6 +71,7 @@ using ::testing::Ne; using ::testing::NiceMock; using ::testing::Return; using ::testing::StrEq; +using ::testing::WithArg; static const float kTolerance = 0.0001f; @@ -100,49 +121,42 @@ class MockLimitObserver : public BitrateAllocator::LimitObserver { }; std::unique_ptr SetupAudioEncoderMock( - int payload_type, const SdpAudioFormat& format) { for (const auto& spec : kCodecSpecs) { if (format == spec.format) { - std::unique_ptr encoder( - new ::testing::NiceMock()); - ON_CALL(*encoder.get(), SampleRateHz()) + auto encoder = std::make_unique>(); + ON_CALL(*encoder, SampleRateHz) .WillByDefault(Return(spec.info.sample_rate_hz)); - ON_CALL(*encoder.get(), NumChannels()) + ON_CALL(*encoder, NumChannels) .WillByDefault(Return(spec.info.num_channels)); - ON_CALL(*encoder.get(), RtpTimestampRateHz()) + ON_CALL(*encoder, RtpTimestampRateHz) .WillByDefault(Return(spec.format.clockrate_hz)); - ON_CALL(*encoder.get(), GetFrameLengthRange()) - .WillByDefault(Return(absl::optional>{ - {TimeDelta::Millis(20), TimeDelta::Millis(120)}})); + ON_CALL(*encoder, GetFrameLengthRange) + .WillByDefault(Return( + std::make_pair(TimeDelta::Millis(20), TimeDelta::Millis(120)))); return encoder; } } return nullptr; } -rtc::scoped_refptr SetupEncoderFactoryMock() { - rtc::scoped_refptr factory = - rtc::make_ref_counted(); - ON_CALL(*factory.get(), GetSupportedEncoders()) +scoped_refptr SetupEncoderFactoryMock() { + scoped_refptr factory = + make_ref_counted(); + ON_CALL(*factory, GetSupportedEncoders) .WillByDefault(Return(std::vector( std::begin(kCodecSpecs), std::end(kCodecSpecs)))); - ON_CALL(*factory.get(), QueryAudioEncoder(_)) - .WillByDefault(Invoke( - [](const SdpAudioFormat& format) -> absl::optional { + ON_CALL(*factory, QueryAudioEncoder) + .WillByDefault( + [](const SdpAudioFormat& format) -> std::optional { for (const auto& spec : kCodecSpecs) { if (format == spec.format) { return spec.info; } } - return absl::nullopt; - })); - ON_CALL(*factory.get(), MakeAudioEncoderMock(_, _, _, _)) - .WillByDefault(Invoke([](int payload_type, const SdpAudioFormat& format, - absl::optional codec_pair_id, - std::unique_ptr* return_value) { - *return_value = SetupAudioEncoderMock(payload_type, format); - })); + return std::nullopt; + }); + ON_CALL(*factory, Create).WillByDefault(WithArg<1>(&SetupAudioEncoderMock)); return factory; } @@ -154,15 +168,14 @@ struct ConfigHelper { audio_processing_( use_null_audio_processing ? nullptr - : rtc::make_ref_counted>()), - bitrate_allocator_(&limit_observer_), + : make_ref_counted>()), audio_encoder_(nullptr) { using ::testing::Invoke; AudioState::Config config; config.audio_mixer = AudioMixerImpl::Create(); config.audio_processing = audio_processing_; - config.audio_device_module = rtc::make_ref_counted(); + config.audio_device_module = make_ref_counted(); audio_state_ = AudioState::Create(config); SetupDefaultChannelSend(audio_bwe_enabled); @@ -186,13 +199,12 @@ struct ConfigHelper { } std::unique_ptr CreateAudioSendStream() { - return std::unique_ptr( - new internal::AudioSendStream( - time_controller_.GetClock(), stream_config_, audio_state_, - time_controller_.GetTaskQueueFactory(), &rtp_transport_, - &bitrate_allocator_, &event_log_, absl::nullopt, - std::unique_ptr(channel_send_), - field_trials)); + return std::make_unique( + CreateEnvironment(&field_trials, time_controller_.GetClock(), + time_controller_.GetTaskQueueFactory()), + stream_config_, audio_state_, &rtp_transport_, &bitrate_allocator_, + std::nullopt, + std::unique_ptr(channel_send_)); } AudioSendStream::Config& config() { return stream_config_; } @@ -203,11 +215,11 @@ struct ConfigHelper { MockRtpRtcpInterface* rtp_rtcp() { return &rtp_rtcp_; } MockChannelSend* channel_send() { return channel_send_; } RtpTransportControllerSendInterface* transport() { return &rtp_transport_; } + MockBitrateAllocator* bitrate_allocator() { return &bitrate_allocator_; } static void AddBweToConfig(AudioSendStream::Config* config) { config->rtp.extensions.push_back(RtpExtension( RtpExtension::kTransportSequenceNumberUri, kTransportSequenceNumberId)); - config->send_codec_spec->transport_cc_enabled = true; } void SetupDefaultChannelSend(bool audio_bwe_enabled) { @@ -242,22 +254,22 @@ struct ConfigHelper { void SetupMockForSetupSendCodec(bool expect_set_encoder_call) { if (expect_set_encoder_call) { EXPECT_CALL(*channel_send_, SetEncoder) - .WillOnce( - [this](int payload_type, std::unique_ptr encoder) { - this->audio_encoder_ = std::move(encoder); - return true; - }); + .WillOnce([this](int /* payload_type */, + const SdpAudioFormat& /* format */, + std::unique_ptr encoder) { + this->audio_encoder_ = std::move(encoder); + return true; + }); } } void SetupMockForCallEncoder() { // Let ModifyEncoder to invoke mock audio encoder. EXPECT_CALL(*channel_send_, CallEncoder(_)) - .WillRepeatedly( - [this](rtc::FunctionView modifier) { - if (this->audio_encoder_) - modifier(this->audio_encoder_.get()); - }); + .WillRepeatedly([this](FunctionView modifier) { + if (this->audio_encoder_) + modifier(this->audio_encoder_.get()); + }); } void SetupMockForSendTelephoneEvent() { @@ -318,17 +330,16 @@ struct ConfigHelper { private: RealTimeController time_controller_; - rtc::scoped_refptr audio_state_; + scoped_refptr audio_state_; AudioSendStream::Config stream_config_; ::testing::StrictMock* channel_send_ = nullptr; - rtc::scoped_refptr audio_processing_; + scoped_refptr audio_processing_; AudioProcessingStats audio_processing_stats_; ::testing::StrictMock rtcp_observer_; - ::testing::NiceMock event_log_; ::testing::NiceMock rtp_transport_; ::testing::NiceMock rtp_rtcp_; ::testing::NiceMock limit_observer_; - BitrateAllocator bitrate_allocator_; + ::testing::NiceMock bitrate_allocator_; std::unique_ptr audio_encoder_; }; @@ -361,7 +372,6 @@ TEST(AudioSendStreamTest, ConfigToString) { config.send_codec_spec = AudioSendStream::Config::SendCodecSpec(kIsacPayloadType, kIsacFormat); config.send_codec_spec->nack_enabled = true; - config.send_codec_spec->transport_cc_enabled = false; config.send_codec_spec->cng_payload_type = 42; config.send_codec_spec->red_payload_type = 43; config.encoder_factory = MockAudioEncoderFactory::CreateUnusedFactory(); @@ -376,7 +386,7 @@ TEST(AudioSendStreamTest, ConfigToString) { "send_transport: null, " "min_bitrate_bps: 12000, max_bitrate_bps: 34000, has " "audio_network_adaptor_config: false, has_dscp: true, " - "send_codec_spec: {nack_enabled: true, transport_cc_enabled: false, " + "send_codec_spec: {nack_enabled: true, " "enable_non_sender_rtt: false, cng_payload_type: 42, " "red_payload_type: 43, payload_type: 103, " "format: {name: isac, clockrate_hz: 16000, num_channels: 1, " @@ -520,19 +530,17 @@ TEST(AudioSendStreamTest, SendCodecAppliesAudioNetworkAdaptor) { helper.config().audio_network_adaptor_config = kAnaConfigString; - EXPECT_CALL(helper.mock_encoder_factory(), MakeAudioEncoderMock(_, _, _, _)) - .WillOnce(Invoke([&kAnaConfigString, &kAnaReconfigString]( - int payload_type, const SdpAudioFormat& format, - absl::optional codec_pair_id, - std::unique_ptr* return_value) { - auto mock_encoder = SetupAudioEncoderMock(payload_type, format); + EXPECT_CALL(helper.mock_encoder_factory(), Create) + .WillOnce(WithArg<1>([&kAnaConfigString, &kAnaReconfigString]( + const SdpAudioFormat& format) { + auto mock_encoder = SetupAudioEncoderMock(format); EXPECT_CALL(*mock_encoder, EnableAudioNetworkAdaptor(StrEq(kAnaConfigString), _)) .WillOnce(Return(true)); EXPECT_CALL(*mock_encoder, EnableAudioNetworkAdaptor(StrEq(kAnaReconfigString), _)) .WillOnce(Return(true)); - *return_value = std::move(mock_encoder); + return mock_encoder; })); auto send_stream = helper.CreateAudioSendStream(); @@ -551,29 +559,25 @@ TEST(AudioSendStreamTest, AudioNetworkAdaptorReceivesOverhead) { AudioSendStream::Config::SendCodecSpec(0, kOpusFormat); const std::string kAnaConfigString = "abcde"; - EXPECT_CALL(helper.mock_encoder_factory(), MakeAudioEncoderMock(_, _, _, _)) - .WillOnce(Invoke( - [&kAnaConfigString](int payload_type, const SdpAudioFormat& format, - absl::optional codec_pair_id, - std::unique_ptr* return_value) { - auto mock_encoder = SetupAudioEncoderMock(payload_type, format); - InSequence s; - EXPECT_CALL( - *mock_encoder, - OnReceivedOverhead(Eq(kOverheadPerPacket.bytes()))) - .Times(2); - EXPECT_CALL(*mock_encoder, - EnableAudioNetworkAdaptor(StrEq(kAnaConfigString), _)) - .WillOnce(Return(true)); - // Note: Overhead is received AFTER ANA has been enabled. - EXPECT_CALL( - *mock_encoder, - OnReceivedOverhead(Eq(kOverheadPerPacket.bytes()))) - .WillOnce(Return()); - *return_value = std::move(mock_encoder); - })); + EXPECT_CALL(helper.mock_encoder_factory(), Create) + .WillOnce(WithArg<1>([&kAnaConfigString](const SdpAudioFormat& format) { + auto mock_encoder = SetupAudioEncoderMock(format); + InSequence s; + EXPECT_CALL( + *mock_encoder, + OnReceivedOverhead(Eq(kOverheadPerPacket.bytes()))); + EXPECT_CALL(*mock_encoder, + EnableAudioNetworkAdaptor(StrEq(kAnaConfigString), _)) + .WillOnce(Return(true)); + // Note: Overhead is received AFTER ANA has been enabled. + EXPECT_CALL( + *mock_encoder, + OnReceivedOverhead(Eq(kOverheadPerPacket.bytes()))); + return mock_encoder; + })); EXPECT_CALL(*helper.rtp_rtcp(), ExpectedPerPacketOverhead) .WillRepeatedly(Return(kOverheadPerPacket.bytes())); + EXPECT_CALL(*helper.channel_send(), RegisterPacketOverhead); auto send_stream = helper.CreateAudioSendStream(); @@ -594,7 +598,8 @@ TEST(AudioSendStreamTest, SendCodecCanApplyVad) { helper.config().send_codec_spec->cng_payload_type = 105; std::unique_ptr stolen_encoder; EXPECT_CALL(*helper.channel_send(), SetEncoder) - .WillOnce([&stolen_encoder](int payload_type, + .WillOnce([&stolen_encoder](int /* payload_type */, + const SdpAudioFormat& /* format */, std::unique_ptr encoder) { stolen_encoder = std::move(encoder); return true; @@ -685,6 +690,7 @@ TEST(AudioSendStreamTest, SSBweWithOverhead) { "WebRTC-Audio-LegacyOverhead/Disabled/"); EXPECT_CALL(*helper.rtp_rtcp(), ExpectedPerPacketOverhead) .WillRepeatedly(Return(kOverheadPerPacket.bytes())); + EXPECT_CALL(*helper.channel_send(), RegisterPacketOverhead); auto send_stream = helper.CreateAudioSendStream(); const DataRate bitrate = DataRate::BitsPerSec(helper.config().max_bitrate_bps) + @@ -707,6 +713,7 @@ TEST(AudioSendStreamTest, SSBweWithOverheadMinRespected) { "WebRTC-Audio-Allocation/min:6kbps,max:64kbps/"); EXPECT_CALL(*helper.rtp_rtcp(), ExpectedPerPacketOverhead) .WillRepeatedly(Return(kOverheadPerPacket.bytes())); + EXPECT_CALL(*helper.channel_send(), RegisterPacketOverhead); auto send_stream = helper.CreateAudioSendStream(); const DataRate bitrate = DataRate::KilobitsPerSec(6) + kMinOverheadRate; EXPECT_CALL(*helper.channel_send(), @@ -727,6 +734,7 @@ TEST(AudioSendStreamTest, SSBweWithOverheadMaxRespected) { "WebRTC-Audio-Allocation/min:6kbps,max:64kbps/"); EXPECT_CALL(*helper.rtp_rtcp(), ExpectedPerPacketOverhead) .WillRepeatedly(Return(kOverheadPerPacket.bytes())); + EXPECT_CALL(*helper.channel_send(), RegisterPacketOverhead); auto send_stream = helper.CreateAudioSendStream(); const DataRate bitrate = DataRate::KilobitsPerSec(64) + kMaxOverheadRate; EXPECT_CALL(*helper.channel_send(), @@ -809,6 +817,7 @@ TEST(AudioSendStreamTest, OnTransportOverheadChanged) { // CallEncoder will be called on overhead change. EXPECT_CALL(*helper.channel_send(), CallEncoder); + EXPECT_CALL(*helper.channel_send(), RegisterPacketOverhead); const size_t transport_overhead_per_packet_bytes = 333; send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes); @@ -824,6 +833,8 @@ TEST(AudioSendStreamTest, DoesntCallEncoderWhenOverheadUnchanged) { auto send_stream = helper.CreateAudioSendStream(); auto new_config = helper.config(); + EXPECT_CALL(*helper.channel_send(), RegisterPacketOverhead).Times(2); + // CallEncoder will be called on overhead change. EXPECT_CALL(*helper.channel_send(), CallEncoder); const size_t transport_overhead_per_packet_bytes = 333; @@ -845,8 +856,8 @@ TEST(AudioSendStreamTest, AudioOverheadChanged) { const size_t audio_overhead_per_packet_bytes = 555; EXPECT_CALL(*helper.rtp_rtcp(), ExpectedPerPacketOverhead) .WillRepeatedly(Return(audio_overhead_per_packet_bytes)); + EXPECT_CALL(*helper.channel_send(), RegisterPacketOverhead).Times(2); auto send_stream = helper.CreateAudioSendStream(); - auto new_config = helper.config(); BitrateAllocationUpdate update; update.target_bitrate = @@ -860,6 +871,8 @@ TEST(AudioSendStreamTest, AudioOverheadChanged) { EXPECT_CALL(*helper.rtp_rtcp(), ExpectedPerPacketOverhead) .WillRepeatedly(Return(audio_overhead_per_packet_bytes + 20)); + // RTP overhead can only change in response to RTCP or configuration change. + send_stream->Reconfigure(helper.config(), nullptr); EXPECT_CALL(*helper.channel_send(), OnBitrateAllocation); send_stream->OnBitrateUpdated(update); @@ -874,6 +887,7 @@ TEST(AudioSendStreamTest, OnAudioAndTransportOverheadChanged) { const size_t audio_overhead_per_packet_bytes = 555; EXPECT_CALL(*helper.rtp_rtcp(), ExpectedPerPacketOverhead) .WillRepeatedly(Return(audio_overhead_per_packet_bytes)); + EXPECT_CALL(*helper.channel_send(), RegisterPacketOverhead).Times(2); auto send_stream = helper.CreateAudioSendStream(); auto new_config = helper.config(); @@ -901,8 +915,8 @@ TEST(AudioSendStreamTest, ReconfigureWithFrameEncryptor) { auto send_stream = helper.CreateAudioSendStream(); auto new_config = helper.config(); - rtc::scoped_refptr mock_frame_encryptor_0( - rtc::make_ref_counted()); + scoped_refptr mock_frame_encryptor_0( + make_ref_counted()); new_config.frame_encryptor = mock_frame_encryptor_0; EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(Ne(nullptr))) .Times(1); @@ -914,8 +928,8 @@ TEST(AudioSendStreamTest, ReconfigureWithFrameEncryptor) { // Updating frame encryptor to a new object should force a call to the // proxy. - rtc::scoped_refptr mock_frame_encryptor_1( - rtc::make_ref_counted()); + scoped_refptr mock_frame_encryptor_1( + make_ref_counted()); new_config.frame_encryptor = mock_frame_encryptor_1; new_config.crypto_options.sframe.require_frame_encryption = true; EXPECT_CALL(*helper.channel_send(), SetFrameEncryptor(Ne(nullptr))) @@ -923,5 +937,77 @@ TEST(AudioSendStreamTest, ReconfigureWithFrameEncryptor) { send_stream->Reconfigure(new_config, nullptr); } } + +TEST(AudioSendStreamTest, DefaultsHonorsPriorityBitrate) { + ConfigHelper helper(true, true, true); + ScopedKeyValueConfig field_trials(helper.field_trials, + "WebRTC-Audio-Allocation/prio_rate:20/"); + auto send_stream = helper.CreateAudioSendStream(); + EXPECT_CALL(*helper.bitrate_allocator(), AddObserver(send_stream.get(), _)) + .WillOnce(Invoke( + [&](BitrateAllocatorObserver*, MediaStreamAllocationConfig config) { + EXPECT_EQ(config.priority_bitrate_bps, 20000); + })); + EXPECT_CALL(*helper.channel_send(), StartSend()); + send_stream->Start(); + EXPECT_CALL(*helper.channel_send(), StopSend()); + send_stream->Stop(); +} + +TEST(AudioSendStreamTest, DefaultsToContributeUnusedBitrate) { + ConfigHelper helper(true, true, true); + auto send_stream = helper.CreateAudioSendStream(); + EXPECT_CALL( + *helper.bitrate_allocator(), + AddObserver(send_stream.get(), + Field(&MediaStreamAllocationConfig::rate_elasticity, + TrackRateElasticity::kCanContributeUnusedRate))); + EXPECT_CALL(*helper.channel_send(), StartSend()); + send_stream->Start(); + EXPECT_CALL(*helper.channel_send(), StopSend()); + send_stream->Stop(); +} + +TEST(AudioSendStreamTest, OverridesPriorityBitrate) { + ConfigHelper helper(true, true, true); + ScopedKeyValueConfig field_trials(helper.field_trials, + "WebRTC-Audio-Allocation/prio_rate:20/" + "WebRTC-Audio-PriorityBitrate/Disabled/"); + auto send_stream = helper.CreateAudioSendStream(); + EXPECT_CALL(*helper.bitrate_allocator(), AddObserver(send_stream.get(), _)) + .WillOnce(Invoke( + [&](BitrateAllocatorObserver*, MediaStreamAllocationConfig config) { + EXPECT_EQ(config.priority_bitrate_bps, 0); + })); + EXPECT_CALL(*helper.channel_send(), StartSend()); + send_stream->Start(); + EXPECT_CALL(*helper.channel_send(), StopSend()); + send_stream->Stop(); +} + +TEST(AudioSendStreamTest, UseEncoderBitrateRange) { + ConfigHelper helper(true, true, true); + std::pair bitrate_range{DataRate::BitsPerSec(5000), + DataRate::BitsPerSec(10000)}; + EXPECT_CALL(helper.mock_encoder_factory(), Create) + .WillOnce(WithArg<1>([&](const SdpAudioFormat& format) { + auto mock_encoder = SetupAudioEncoderMock(format); + EXPECT_CALL(*mock_encoder, GetBitrateRange) + .WillRepeatedly(Return(bitrate_range)); + return mock_encoder; + })); + auto send_stream = helper.CreateAudioSendStream(); + EXPECT_CALL(*helper.bitrate_allocator(), AddObserver(send_stream.get(), _)) + .WillOnce(Invoke( + [&](BitrateAllocatorObserver*, MediaStreamAllocationConfig config) { + EXPECT_EQ(config.min_bitrate_bps, bitrate_range.first.bps()); + EXPECT_EQ(config.max_bitrate_bps, bitrate_range.second.bps()); + })); + EXPECT_CALL(*helper.channel_send(), StartSend()); + send_stream->Start(); + EXPECT_CALL(*helper.channel_send(), StopSend()); + send_stream->Stop(); +} + } // namespace test } // namespace webrtc diff --git a/audio/audio_state.cc b/audio/audio_state.cc index 6f20e7b128..57ae1a91b2 100644 --- a/audio/audio_state.cc +++ b/audio/audio_state.cc @@ -15,12 +15,12 @@ #include #include +#include "api/audio/audio_device.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" #include "audio/audio_receive_stream.h" #include "audio/audio_send_stream.h" -#include "modules/audio_device/include/audio_device.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -51,28 +51,47 @@ AudioTransport* AudioState::audio_transport() { return &audio_transport_; } +void AudioState::SetPlayout(bool enabled) { + RTC_LOG(LS_INFO) << "SetPlayout(" << enabled << ")"; + RTC_DCHECK_RUN_ON(&thread_checker_); + auto* adm = config_.audio_device_module.get(); + if (enabled) { + if (!receiving_streams_.empty()) { + if (!adm->Playing()) { + if (adm->InitPlayout() == 0) { + adm->StartPlayout(); + } + } + } + } else { + // Disable playout. + config_.audio_device_module->StopPlayout(); + } + playout_enabled_ = enabled; + UpdateNullAudioPollerState(); +} + void AudioState::AddReceivingStream( webrtc::AudioReceiveStreamInterface* stream) { RTC_DCHECK_RUN_ON(&thread_checker_); RTC_DCHECK_EQ(0, receiving_streams_.count(stream)); receiving_streams_.insert(stream); - if (!config_.audio_mixer->AddSource( - static_cast(stream))) { + if (!config_.audio_mixer->AddSource(stream->source())) { RTC_DLOG(LS_ERROR) << "Failed to add source to mixer."; } // Make sure playback is initialized; start playing if enabled. - UpdateNullAudioPollerState(); - auto* adm = config_.audio_device_module.get(); - if (!adm->Playing()) { - if (adm->InitPlayout() == 0) { - if (playout_enabled_) { + if (playout_enabled_) { + auto* adm = config_.audio_device_module.get(); + if (!adm->Playing()) { + if (adm->InitPlayout() == 0) { adm->StartPlayout(); + } else { + RTC_DLOG_F(LS_ERROR) << "Failed to initialize playout."; } - } else { - RTC_DLOG_F(LS_ERROR) << "Failed to initialize playout."; } } + UpdateNullAudioPollerState(); } void AudioState::RemoveReceivingStream( @@ -80,12 +99,30 @@ void AudioState::RemoveReceivingStream( RTC_DCHECK_RUN_ON(&thread_checker_); auto count = receiving_streams_.erase(stream); RTC_DCHECK_EQ(1, count); - config_.audio_mixer->RemoveSource( - static_cast(stream)); - UpdateNullAudioPollerState(); + config_.audio_mixer->RemoveSource(stream->source()); if (receiving_streams_.empty()) { config_.audio_device_module->StopPlayout(); } + UpdateNullAudioPollerState(); +} + +void AudioState::SetRecording(bool enabled) { + RTC_LOG(LS_INFO) << "SetRecording(" << enabled << ")"; + RTC_DCHECK_RUN_ON(&thread_checker_); + auto* adm = config_.audio_device_module.get(); + if (enabled) { + if (!sending_streams_.empty()) { + if (!adm->Recording()) { + if (adm->InitRecording() == 0) { + adm->StartRecording(); + } + } + } + } else { + // Disable recording. + adm->StopRecording(); + } + recording_enabled_ = enabled; } void AudioState::AddSendingStream(webrtc::AudioSendStream* stream, @@ -99,13 +136,13 @@ void AudioState::AddSendingStream(webrtc::AudioSendStream* stream, // Make sure recording is initialized; start recording if enabled. auto* adm = config_.audio_device_module.get(); - if (!adm->Recording()) { - if (adm->InitRecording() == 0) { - if (recording_enabled_) { + if (recording_enabled_) { + if (!adm->Recording()) { + if (adm->InitRecording() == 0) { adm->StartRecording(); + } else { + RTC_DLOG_F(LS_ERROR) << "Failed to initialize recording."; } - } else { - RTC_DLOG_F(LS_ERROR) << "Failed to initialize recording."; } } } @@ -120,38 +157,6 @@ void AudioState::RemoveSendingStream(webrtc::AudioSendStream* stream) { } } -void AudioState::SetPlayout(bool enabled) { - RTC_LOG(LS_INFO) << "SetPlayout(" << enabled << ")"; - RTC_DCHECK_RUN_ON(&thread_checker_); - if (playout_enabled_ != enabled) { - playout_enabled_ = enabled; - if (enabled) { - UpdateNullAudioPollerState(); - if (!receiving_streams_.empty()) { - config_.audio_device_module->StartPlayout(); - } - } else { - config_.audio_device_module->StopPlayout(); - UpdateNullAudioPollerState(); - } - } -} - -void AudioState::SetRecording(bool enabled) { - RTC_LOG(LS_INFO) << "SetRecording(" << enabled << ")"; - RTC_DCHECK_RUN_ON(&thread_checker_); - if (recording_enabled_ != enabled) { - recording_enabled_ = enabled; - if (enabled) { - if (!sending_streams_.empty()) { - config_.audio_device_module->StartRecording(); - } - } else { - config_.audio_device_module->StopRecording(); - } - } -} - void AudioState::SetStereoChannelSwapping(bool enable) { RTC_DCHECK(thread_checker_.IsCurrent()); audio_transport_.SetStereoChannelSwapping(enable); @@ -205,8 +210,7 @@ void AudioState::UpdateNullAudioPollerState() { } } // namespace internal -rtc::scoped_refptr AudioState::Create( - const AudioState::Config& config) { - return rtc::make_ref_counted(config); +scoped_refptr AudioState::Create(const AudioState::Config& config) { + return make_ref_counted(config); } } // namespace webrtc diff --git a/audio/audio_state_unittest.cc b/audio/audio_state_unittest.cc index 070e220979..0c278d770c 100644 --- a/audio/audio_state_unittest.cc +++ b/audio/audio_state_unittest.cc @@ -15,10 +15,12 @@ #include #include "api/task_queue/test/mock_task_queue_base.h" +#include "call/test/mock_audio_receive_stream.h" #include "call/test/mock_audio_send_stream.h" #include "modules/audio_device/include/mock_audio_device.h" #include "modules/audio_mixer/audio_mixer_impl.h" #include "modules/audio_processing/include/mock_audio_processing.h" +#include "rtc_base/thread.h" #include "test/gtest.h" namespace webrtc { @@ -26,6 +28,7 @@ namespace test { namespace { using ::testing::_; +using ::testing::InSequence; using ::testing::Matcher; using ::testing::NiceMock; using ::testing::StrictMock; @@ -52,8 +55,8 @@ struct FakeAsyncAudioProcessingHelper { FakeTaskQueueFactory() = default; ~FakeTaskQueueFactory() override = default; std::unique_ptr CreateTaskQueue( - absl::string_view name, - Priority priority) const override { + absl::string_view /* name */, + Priority /* priority */) const override { return std::unique_ptr( new FakeTaskQueue()); } @@ -87,8 +90,8 @@ struct FakeAsyncAudioProcessingHelper { NiceMock audio_frame_processor_; FakeTaskQueueFactory task_queue_factory_; - rtc::scoped_refptr CreateFactory() { - return rtc::make_ref_counted( + scoped_refptr CreateFactory() { + return make_ref_counted( audio_frame_processor_, task_queue_factory_); } }; @@ -105,16 +108,16 @@ struct ConfigHelper { audio_state_config.audio_processing = params.use_null_audio_processing ? nullptr - : rtc::make_ref_counted>(); + : make_ref_counted>(); audio_state_config.audio_device_module = - rtc::make_ref_counted>(); + make_ref_counted>(); if (params.use_async_audio_processing) { audio_state_config.async_audio_processing_factory = async_audio_processing_helper_.CreateFactory(); } } AudioState::Config& config() { return audio_state_config; } - rtc::scoped_refptr mixer() { return audio_mixer; } + scoped_refptr mixer() { return audio_mixer; } NiceMock& mock_audio_frame_processor() { return async_audio_processing_helper_.audio_frame_processor_; @@ -122,7 +125,7 @@ struct ConfigHelper { private: AudioState::Config audio_state_config; - rtc::scoped_refptr audio_mixer; + scoped_refptr audio_mixer; FakeAsyncAudioProcessingHelper async_audio_processing_helper_; }; @@ -179,8 +182,8 @@ TEST_P(AudioStateTest, Create) { TEST_P(AudioStateTest, ConstructDestruct) { ConfigHelper helper(GetParam()); - rtc::scoped_refptr audio_state( - rtc::make_ref_counted(helper.config())); + scoped_refptr audio_state( + make_ref_counted(helper.config())); } TEST_P(AudioStateTest, RecordedAudioArrivesAtSingleStream) { @@ -192,8 +195,8 @@ TEST_P(AudioStateTest, RecordedAudioArrivesAtSingleStream) { EXPECT_CALL(helper.mock_audio_frame_processor(), SinkCleared); } - rtc::scoped_refptr audio_state( - rtc::make_ref_counted(helper.config())); + scoped_refptr audio_state( + make_ref_counted(helper.config())); MockAudioSendStream stream; audio_state->AddSendingStream(&stream, 8000, 2); @@ -220,7 +223,6 @@ TEST_P(AudioStateTest, RecordedAudioArrivesAtSingleStream) { EXPECT_CALL(*ap, ProcessStream(_, _, _, Matcher(_))); } - constexpr int kSampleRate = 16000; constexpr size_t kNumChannels = 2; auto audio_data = Create10msTestData(kSampleRate, kNumChannels); uint32_t new_mic_level = 667; @@ -241,8 +243,8 @@ TEST_P(AudioStateTest, RecordedAudioArrivesAtMultipleStreams) { EXPECT_CALL(helper.mock_audio_frame_processor(), SinkCleared); } - rtc::scoped_refptr audio_state( - rtc::make_ref_counted(helper.config())); + scoped_refptr audio_state( + make_ref_counted(helper.config())); MockAudioSendStream stream_1; MockAudioSendStream stream_2; @@ -279,7 +281,6 @@ TEST_P(AudioStateTest, RecordedAudioArrivesAtMultipleStreams) { EXPECT_CALL(*ap, ProcessStream(_, _, _, Matcher(_))); } - constexpr int kSampleRate = 16000; constexpr size_t kNumChannels = 1; auto audio_data = Create10msTestData(kSampleRate, kNumChannels); uint32_t new_mic_level = 667; @@ -293,7 +294,6 @@ TEST_P(AudioStateTest, RecordedAudioArrivesAtMultipleStreams) { } TEST_P(AudioStateTest, EnableChannelSwap) { - constexpr int kSampleRate = 16000; constexpr size_t kNumChannels = 2; ConfigHelper helper(GetParam()); @@ -304,8 +304,8 @@ TEST_P(AudioStateTest, EnableChannelSwap) { EXPECT_CALL(helper.mock_audio_frame_processor(), SinkCleared); } - rtc::scoped_refptr audio_state( - rtc::make_ref_counted(helper.config())); + scoped_refptr audio_state( + make_ref_counted(helper.config())); audio_state->SetStereoChannelSwapping(true); @@ -357,6 +357,172 @@ TEST_P(AudioStateTest, audio_buffer, n_samples_out, &elapsed_time_ms, &ntp_time_ms); } +TEST_P(AudioStateTest, StartRecordingDoesNothingWithoutStream) { + ConfigHelper helper(GetParam()); + scoped_refptr audio_state( + make_ref_counted(helper.config())); + + auto* adm = reinterpret_cast( + helper.config().audio_device_module.get()); + + EXPECT_CALL(*adm, InitRecording()).Times(0); + EXPECT_CALL(*adm, StartRecording()).Times(0); + EXPECT_CALL(*adm, StopRecording()).Times(1); + audio_state->SetRecording(false); + audio_state->SetRecording(true); +} + +TEST_P(AudioStateTest, AddStreamDoesNothingIfRecordingDisabled) { + ConfigHelper helper(GetParam()); + scoped_refptr audio_state( + make_ref_counted(helper.config())); + + auto* adm = reinterpret_cast( + helper.config().audio_device_module.get()); + + EXPECT_CALL(*adm, StopRecording()).Times(2); + audio_state->SetRecording(false); + + MockAudioSendStream stream; + EXPECT_CALL(*adm, StartRecording).Times(0); + audio_state->AddSendingStream(&stream, kSampleRate, kNumberOfChannels); + audio_state->RemoveSendingStream(&stream); +} + +TEST_P(AudioStateTest, AlwaysCallInitRecordingBeforeStartRecording) { + ConfigHelper helper(GetParam()); + scoped_refptr audio_state( + make_ref_counted(helper.config())); + + auto* adm = reinterpret_cast( + helper.config().audio_device_module.get()); + + MockAudioSendStream stream; + { + InSequence s; + EXPECT_CALL(*adm, InitRecording()); + EXPECT_CALL(*adm, StartRecording()); + audio_state->AddSendingStream(&stream, kSampleRate, kNumberOfChannels); + } + + EXPECT_CALL(*adm, StopRecording()); + audio_state->SetRecording(false); + + { + InSequence s; + EXPECT_CALL(*adm, InitRecording()); + EXPECT_CALL(*adm, StartRecording()); + audio_state->SetRecording(true); + } + + EXPECT_CALL(*adm, StopRecording()); + audio_state->RemoveSendingStream(&stream); +} + +// The recording can also be initialized by WebRtcVoiceSendChannel +// options_.init_recording_on_send. Make sure StopRecording is still +// being called in this scenario. +TEST_P(AudioStateTest, CallStopRecordingIfRecordingIsInitialized) { + ConfigHelper helper(GetParam()); + scoped_refptr audio_state( + make_ref_counted(helper.config())); + + auto* adm = reinterpret_cast( + helper.config().audio_device_module.get()); + + audio_state->SetRecording(false); + + EXPECT_CALL(*adm, StopRecording()); + audio_state->SetRecording(false); +} + +TEST_P(AudioStateTest, StartPlayoutDoesNothingWithoutStream) { + ConfigHelper helper(GetParam()); + scoped_refptr audio_state( + make_ref_counted(helper.config())); + + auto* adm = reinterpret_cast( + helper.config().audio_device_module.get()); + + EXPECT_CALL(*adm, InitPlayout()).Times(0); + EXPECT_CALL(*adm, StartPlayout()).Times(0); + EXPECT_CALL(*adm, StopPlayout()).Times(1); + audio_state->SetPlayout(false); + + audio_state->SetPlayout(true); +} + +TEST_P(AudioStateTest, AlwaysCallInitPlayoutBeforeStartPlayout) { + ConfigHelper helper(GetParam()); + scoped_refptr audio_state( + make_ref_counted(helper.config())); + + auto* adm = reinterpret_cast( + helper.config().audio_device_module.get()); + + MockAudioReceiveStream stream; + { + InSequence s; + EXPECT_CALL(*adm, InitPlayout()); + EXPECT_CALL(*adm, StartPlayout()); + audio_state->AddReceivingStream(&stream); + } + + // SetPlayout(false) starts the NullAudioPoller...which needs a thread. + ThreadManager::Instance()->WrapCurrentThread(); + + EXPECT_CALL(*adm, StopPlayout()); + audio_state->SetPlayout(false); + + { + InSequence s; + EXPECT_CALL(*adm, InitPlayout()); + EXPECT_CALL(*adm, StartPlayout()); + audio_state->SetPlayout(true); + } + + // Playout without streams starts the NullAudioPoller... + // which needs a thread. + ThreadManager::Instance()->WrapCurrentThread(); + + EXPECT_CALL(*adm, StopPlayout()); + audio_state->RemoveReceivingStream(&stream); +} + +TEST_P(AudioStateTest, CallStopPlayoutIfPlayoutIsInitialized) { + ConfigHelper helper(GetParam()); + scoped_refptr audio_state( + make_ref_counted(helper.config())); + + auto* adm = reinterpret_cast( + helper.config().audio_device_module.get()); + + audio_state->SetPlayout(false); + + EXPECT_CALL(*adm, StopPlayout()); + audio_state->SetPlayout(false); +} + +TEST_P(AudioStateTest, AddStreamDoesNothingIfPlayoutDisabled) { + ConfigHelper helper(GetParam()); + scoped_refptr audio_state( + make_ref_counted(helper.config())); + + auto* adm = reinterpret_cast( + helper.config().audio_device_module.get()); + + EXPECT_CALL(*adm, StopPlayout()).Times(2); + audio_state->SetPlayout(false); + + // AddReceivingStream with playout disabled start the NullAudioPoller... + // which needs a thread. + ThreadManager::Instance()->WrapCurrentThread(); + + MockAudioReceiveStream stream; + audio_state->AddReceivingStream(&stream); + audio_state->RemoveReceivingStream(&stream); +} + INSTANTIATE_TEST_SUITE_P(AudioStateTest, AudioStateTest, Values(ConfigHelper::Params({false, false}), diff --git a/audio/audio_transport_impl.cc b/audio/audio_transport_impl.cc index 42a81d5b4a..ef8d3baf8d 100644 --- a/audio/audio_transport_impl.cc +++ b/audio/audio_transport_impl.cc @@ -38,8 +38,8 @@ void InitializeCaptureFrame(int input_sample_rate, RTC_DCHECK(audio_frame); int min_processing_rate_hz = std::min(input_sample_rate, send_sample_rate_hz); for (int native_rate_hz : AudioProcessing::kNativeSampleRatesHz) { - audio_frame->sample_rate_hz_ = native_rate_hz; - if (audio_frame->sample_rate_hz_ >= min_processing_rate_hz) { + audio_frame->SetSampleRateAndChannelSize(native_rate_hz); + if (native_rate_hz >= min_processing_rate_hz) { break; } } @@ -70,20 +70,19 @@ void ProcessCaptureFrame(uint32_t delay_ms, int Resample(const AudioFrame& frame, const int destination_sample_rate, PushResampler* resampler, - int16_t* destination) { + InterleavedView destination) { TRACE_EVENT2("webrtc", "Resample", "frame sample rate", frame.sample_rate_hz_, "destination_sample_rate", destination_sample_rate); - const int number_of_channels = static_cast(frame.num_channels_); - const int target_number_of_samples_per_channel = - destination_sample_rate / 100; - resampler->InitializeIfNeeded(frame.sample_rate_hz_, destination_sample_rate, - number_of_channels); - - // TODO(yujo): make resampler take an AudioFrame, and add special case - // handling of muted frames. - return resampler->Resample( - frame.data(), frame.samples_per_channel_ * number_of_channels, - destination, number_of_channels * target_number_of_samples_per_channel); + const size_t target_number_of_samples_per_channel = + SampleRateToDefaultChannelSize(destination_sample_rate); + RTC_DCHECK_EQ(NumChannels(destination), frame.num_channels_); + RTC_DCHECK_EQ(SamplesPerChannel(destination), + target_number_of_samples_per_channel); + RTC_CHECK_EQ(destination.data().size(), + frame.num_channels_ * target_number_of_samples_per_channel); + + // TODO(yujo): Add special case handling of muted frames. + return resampler->Resample(frame.data_view(), destination); } } // namespace @@ -119,7 +118,7 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( return RecordedDataIsAvailable( audio_data, number_of_frames, bytes_per_sample, number_of_channels, sample_rate, audio_delay_milliseconds, clock_drift, volume, key_pressed, - new_mic_volume, /*estimated_capture_time_ns=*/absl::nullopt); + new_mic_volume, /*estimated_capture_time_ns=*/std::nullopt); } // Not used in Chromium. Process captured audio and distribute to all sending @@ -135,7 +134,7 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( uint32_t /*volume*/, bool key_pressed, uint32_t& /*new_mic_volume*/, - absl::optional + std::optional estimated_capture_time_ns) { // NOLINT: to avoid changing APIs RTC_DCHECK(audio_data); RTC_DCHECK_GE(number_of_channels, 1); @@ -147,6 +146,9 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( RTC_DCHECK_LE(bytes_per_sample * number_of_frames * number_of_channels, AudioFrame::kMaxDataSizeBytes); + InterleavedView source(static_cast(audio_data), + number_of_frames, number_of_channels); + int send_sample_rate_hz = 0; size_t send_num_channels = 0; bool swap_stereo_channels = false; @@ -160,9 +162,8 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( std::unique_ptr audio_frame(new AudioFrame()); InitializeCaptureFrame(sample_rate, send_sample_rate_hz, number_of_channels, send_num_channels, audio_frame.get()); - voe::RemixAndResample(static_cast(audio_data), - number_of_frames, number_of_channels, sample_rate, - &capture_resampler_, audio_frame.get()); + voe::RemixAndResample(source, sample_rate, &capture_resampler_, + audio_frame.get()); ProcessCaptureFrame(audio_delay_milliseconds, key_pressed, swap_stereo_channels, audio_processing_, audio_frame.get()); @@ -209,7 +210,7 @@ int32_t AudioTransportImpl::NeedMorePlayData(const size_t nSamples, size_t& nSamplesOut, int64_t* elapsed_time_ms, int64_t* ntp_time_ms) { - TRACE_EVENT0("webrtc", "AudioTransportImpl::SendProcessedData"); + TRACE_EVENT0("webrtc", "AudioTransportImpl::NeedMorePlayData"); RTC_DCHECK_EQ(sizeof(int16_t) * nChannels, nBytesPerSample); RTC_DCHECK_GE(nChannels, 1); RTC_DCHECK_LE(nChannels, 2); @@ -232,8 +233,10 @@ int32_t AudioTransportImpl::NeedMorePlayData(const size_t nSamples, RTC_DCHECK_EQ(error, AudioProcessing::kNoError); } - nSamplesOut = Resample(mixed_frame_, samplesPerSec, &render_resampler_, - static_cast(audioSamples)); + nSamplesOut = + Resample(mixed_frame_, samplesPerSec, &render_resampler_, + InterleavedView(static_cast(audioSamples), + nSamples, nChannels)); RTC_DCHECK_EQ(nSamplesOut, nChannels * nSamples); return 0; } @@ -263,8 +266,10 @@ void AudioTransportImpl::PullRenderData(int bits_per_sample, *elapsed_time_ms = mixed_frame_.elapsed_time_ms_; *ntp_time_ms = mixed_frame_.ntp_time_ms_; - auto output_samples = Resample(mixed_frame_, sample_rate, &render_resampler_, - static_cast(audio_data)); + int output_samples = + Resample(mixed_frame_, sample_rate, &render_resampler_, + InterleavedView(static_cast(audio_data), + number_of_frames, number_of_channels)); RTC_DCHECK_EQ(output_samples, number_of_channels * number_of_frames); } diff --git a/audio/audio_transport_impl.h b/audio/audio_transport_impl.h index 24b09d2140..a240eb0fe5 100644 --- a/audio/audio_transport_impl.h +++ b/audio/audio_transport_impl.h @@ -14,12 +14,12 @@ #include #include +#include "api/audio/audio_device.h" #include "api/audio/audio_mixer.h" +#include "api/audio/audio_processing.h" #include "api/scoped_refptr.h" #include "common_audio/resampler/include/push_resampler.h" #include "modules/async_audio_processing/async_audio_processing.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -63,7 +63,7 @@ class AudioTransportImpl : public AudioTransport { uint32_t currentMicLevel, bool keyPressed, uint32_t& newMicLevel, - absl::optional estimated_capture_time_ns) override; + std::optional estimated_capture_time_ns) override; int32_t NeedMorePlayData(size_t nSamples, size_t nBytesPerSample, @@ -107,7 +107,7 @@ class AudioTransportImpl : public AudioTransport { // Render side. - rtc::scoped_refptr mixer_; + scoped_refptr mixer_; AudioFrame mixed_frame_; // Converts mixed audio to the audio device output rate. PushResampler render_resampler_; diff --git a/audio/channel_receive.cc b/audio/channel_receive.cc index 270ba61ed5..cfbf843032 100644 --- a/audio/channel_receive.cc +++ b/audio/channel_receive.cc @@ -11,44 +11,74 @@ #include "audio/channel_receive.h" #include +#include +#include #include #include +#include #include #include #include +#include "api/array_view.h" +#include "api/audio/audio_device.h" +#include "api/audio/audio_mixer.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_format.h" +#include "api/call/audio_sink.h" +#include "api/call/transport.h" +#include "api/crypto/crypto_options.h" #include "api/crypto/frame_decryptor_interface.h" +#include "api/environment/environment.h" #include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" +#include "api/media_types.h" +#include "api/neteq/default_neteq_factory.h" +#include "api/neteq/neteq.h" +#include "api/neteq/neteq_factory.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtp_headers.h" +#include "api/rtp_packet_info.h" +#include "api/rtp_packet_infos.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" +#include "api/transport/rtp/rtp_source.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "audio/audio_level.h" #include "audio/channel_receive_frame_transformer_delegate.h" -#include "audio/channel_send.h" #include "audio/utility/audio_frame_operations.h" +#include "call/syncable.h" #include "logging/rtc_event_log/events/rtc_event_audio_playout.h" #include "logging/rtc_event_log/events/rtc_event_neteq_set_minimum_delay.h" -#include "modules/audio_coding/acm2/acm_receiver.h" -#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" -#include "modules/audio_device/include/audio_device.h" +#include "modules/audio_coding/acm2/acm_resampler.h" +#include "modules/audio_coding/acm2/call_statistics.h" +#include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/pacing/packet_router.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h" +#include "modules/rtp_rtcp/include/rtcp_statistics.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h" #include "modules/rtp_rtcp/source/capture_clock_offset_updater.h" -#include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "modules/rtp_rtcp/source/source_tracker.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" #include "rtc_base/race_checker.h" +#include "rtc_base/strings/string_builder.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/metrics.h" @@ -65,46 +95,45 @@ constexpr double kAudioSampleDurationSeconds = 0.01; constexpr int kVoiceEngineMinMinPlayoutDelayMs = 0; constexpr int kVoiceEngineMaxMinPlayoutDelayMs = 10000; -acm2::AcmReceiver::Config AcmConfig( +std::unique_ptr CreateNetEq( NetEqFactory* neteq_factory, - rtc::scoped_refptr decoder_factory, - absl::optional codec_pair_id, + std::optional codec_pair_id, size_t jitter_buffer_max_packets, bool jitter_buffer_fast_playout, - int jitter_buffer_min_delay_ms) { - acm2::AcmReceiver::Config acm_config; - acm_config.neteq_factory = neteq_factory; - acm_config.decoder_factory = decoder_factory; - acm_config.neteq_config.codec_pair_id = codec_pair_id; - acm_config.neteq_config.max_packets_in_buffer = jitter_buffer_max_packets; - acm_config.neteq_config.enable_fast_accelerate = jitter_buffer_fast_playout; - acm_config.neteq_config.enable_muted_state = true; - acm_config.neteq_config.min_delay_ms = jitter_buffer_min_delay_ms; - - return acm_config; + int jitter_buffer_min_delay_ms, + const Environment& env, + scoped_refptr decoder_factory) { + NetEq::Config config; + config.codec_pair_id = codec_pair_id; + config.max_packets_in_buffer = jitter_buffer_max_packets; + config.enable_fast_accelerate = jitter_buffer_fast_playout; + config.enable_muted_state = true; + config.min_delay_ms = jitter_buffer_min_delay_ms; + if (neteq_factory) { + return neteq_factory->Create(env, config, std::move(decoder_factory)); + } + return DefaultNetEqFactory().Create(env, config, std::move(decoder_factory)); } class ChannelReceive : public ChannelReceiveInterface, public RtcpPacketTypeCounterObserver { public: // Used for receive streams. - ChannelReceive( - Clock* clock, - NetEqFactory* neteq_factory, - AudioDeviceModule* audio_device_module, - Transport* rtcp_send_transport, - RtcEventLog* rtc_event_log, - uint32_t local_ssrc, - uint32_t remote_ssrc, - size_t jitter_buffer_max_packets, - bool jitter_buffer_fast_playout, - int jitter_buffer_min_delay_ms, - bool enable_non_sender_rtt, - rtc::scoped_refptr decoder_factory, - absl::optional codec_pair_id, - rtc::scoped_refptr frame_decryptor, - const webrtc::CryptoOptions& crypto_options, - rtc::scoped_refptr frame_transformer); + ChannelReceive(const Environment& env, + NetEqFactory* neteq_factory, + AudioDeviceModule* audio_device_module, + Transport* rtcp_send_transport, + uint32_t local_ssrc, + uint32_t remote_ssrc, + size_t jitter_buffer_max_packets, + bool jitter_buffer_fast_playout, + int jitter_buffer_min_delay_ms, + bool enable_non_sender_rtt, + scoped_refptr decoder_factory, + std::optional codec_pair_id, + scoped_refptr frame_decryptor, + const webrtc::CryptoOptions& crypto_options, + scoped_refptr frame_transformer); ~ChannelReceive() override; void SetSink(AudioSinkInterface* sink) override; @@ -117,7 +146,7 @@ class ChannelReceive : public ChannelReceiveInterface, void StopPlayout() override; // Codecs - absl::optional> GetReceiveCodec() + std::optional> GetReceiveCodec() const override; void ReceivedRTCPPacket(const uint8_t* data, size_t length) override; @@ -140,12 +169,12 @@ class ChannelReceive : public ChannelReceiveInterface, // Audio+Video Sync. uint32_t GetDelayEstimate() const override; - bool SetMinimumPlayoutDelay(int delayMs) override; + bool SetMinimumPlayoutDelay(int delay_ms) override; bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const override; void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, int64_t time_ms) override; - absl::optional GetCurrentEstimatedPlayoutNtpTimestampMs( + std::optional GetCurrentEstimatedPlayoutNtpTimestampMs( int64_t now_ms) const override; // Audio quality. @@ -153,14 +182,15 @@ class ChannelReceive : public ChannelReceiveInterface, int GetBaseMinimumPlayoutDelayMs() const override; // Produces the transport-related timestamps; current_delay_ms is left unset. - absl::optional GetSyncInfo() const override; + std::optional GetSyncInfo() const override; void RegisterReceiverCongestionControlObjects( PacketRouter* packet_router) override; void ResetReceiverCongestionControlObjects() override; CallReceiveStatistics GetRTCPStatistics() const override; - void SetNACKStatus(bool enable, int maxNumberOfPackets) override; + void SetNACKStatus(bool enable, int max_packets) override; + void SetRtcpMode(webrtc::RtcpMode mode) override; void SetNonSenderRttMeasurement(bool enabled) override; AudioMixer::Source::AudioFrameInfo GetAudioFrameWithInfo( @@ -169,23 +199,18 @@ class ChannelReceive : public ChannelReceiveInterface, int PreferredSampleRate() const override; - void SetSourceTracker(SourceTracker* source_tracker) override; - - // Associate to a send channel. - // Used for obtaining RTT for a receive-only channel. - void SetAssociatedSendChannel(const ChannelSendInterface* channel) override; + std::vector GetSources() const override; // Sets a frame transformer between the depacketizer and the decoder, to // transform the received frames before decoding them. void SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) + scoped_refptr frame_transformer) override; - void SetFrameDecryptor(rtc::scoped_refptr - frame_decryptor) override; + void SetFrameDecryptor( + scoped_refptr frame_decryptor) override; void OnLocalSsrcChange(uint32_t local_ssrc) override; - uint32_t GetLocalSsrc() const override; void RtcpPacketTypesCounterUpdated( uint32_t ssrc, @@ -194,20 +219,21 @@ class ChannelReceive : public ChannelReceiveInterface, private: void ReceivePacket(const uint8_t* packet, size_t packet_length, - const RTPHeader& header) - RTC_RUN_ON(worker_thread_checker_); + const RTPHeader& header, + Timestamp receive_time) RTC_RUN_ON(worker_thread_checker_); int ResendPackets(const uint16_t* sequence_numbers, int length); void UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms) RTC_RUN_ON(worker_thread_checker_); int GetRtpTimestampRateHz() const; - void OnReceivedPayloadData(rtc::ArrayView payload, - const RTPHeader& rtpHeader) + void OnReceivedPayloadData(ArrayView payload, + const RTPHeader& rtpHeader, + Timestamp receive_time) RTC_RUN_ON(worker_thread_checker_); void InitFrameTransformerDelegate( - rtc::scoped_refptr frame_transformer) + scoped_refptr frame_transformer) RTC_RUN_ON(worker_thread_checker_); // Thread checkers document and lock usage of some methods to specific threads @@ -215,8 +241,8 @@ class ChannelReceive : public ChannelReceiveInterface, // parts with single-threaded semantics, and thereby reduce the need for // locks. RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_; - RTC_NO_UNIQUE_ADDRESS SequenceChecker network_thread_checker_; + const Environment env_; TaskQueueBase* const worker_thread_; ScopedTaskSafety worker_safety_; @@ -224,47 +250,45 @@ class ChannelReceive : public ChannelReceiveInterface, // only access. We don't necessarily own and control these threads, so thread // checkers cannot be used. E.g. Chromium may transfer "ownership" from one // audio thread to another, but access is still sequential. - rtc::RaceChecker audio_thread_race_checker_; + RaceChecker audio_thread_race_checker_; Mutex callback_mutex_; Mutex volume_settings_mutex_; + mutable Mutex call_stats_mutex_; bool playing_ RTC_GUARDED_BY(worker_thread_checker_) = false; - RtcEventLog* const event_log_; - // Indexed by payload type. std::map payload_type_frequencies_; std::unique_ptr rtp_receive_statistics_; std::unique_ptr rtp_rtcp_; const uint32_t remote_ssrc_; - SourceTracker* source_tracker_ = nullptr; + SourceTracker source_tracker_ RTC_GUARDED_BY(&worker_thread_checker_); - // Info for GetSyncInfo is updated on network or worker thread, and queried on - // the worker thread. - absl::optional last_received_rtp_timestamp_ + std::optional last_received_rtp_timestamp_ RTC_GUARDED_BY(&worker_thread_checker_); - absl::optional last_received_rtp_system_time_ms_ + std::optional last_received_rtp_system_time_ms_ RTC_GUARDED_BY(&worker_thread_checker_); - // The AcmReceiver is thread safe, using its own lock. - acm2::AcmReceiver acm_receiver_; + const std::unique_ptr neteq_; // NetEq is thread-safe; no lock needed. + acm2::ResamplerHelper resampler_helper_ + RTC_GUARDED_BY(audio_thread_race_checker_); + acm2::CallStatistics call_stats_ RTC_GUARDED_BY(call_stats_mutex_); AudioSinkInterface* audio_sink_ = nullptr; AudioLevel _outputAudioLevel; - Clock* const clock_; RemoteNtpTimeEstimator ntp_estimator_ RTC_GUARDED_BY(ts_stats_lock_); // Timestamp of the audio pulled from NetEq. - absl::optional jitter_buffer_playout_timestamp_; + std::optional jitter_buffer_playout_timestamp_; uint32_t playout_timestamp_rtp_ RTC_GUARDED_BY(worker_thread_checker_); - absl::optional playout_timestamp_rtp_time_ms_ + std::optional playout_timestamp_rtp_time_ms_ RTC_GUARDED_BY(worker_thread_checker_); uint32_t playout_delay_ms_ RTC_GUARDED_BY(worker_thread_checker_); - absl::optional playout_timestamp_ntp_ + std::optional playout_timestamp_ntp_ RTC_GUARDED_BY(worker_thread_checker_); - absl::optional playout_timestamp_ntp_time_ms_ + std::optional playout_timestamp_ntp_time_ms_ RTC_GUARDED_BY(worker_thread_checker_); mutable Mutex ts_stats_lock_; @@ -279,15 +303,12 @@ class ChannelReceive : public ChannelReceiveInterface, AudioDeviceModule* _audioDeviceModulePtr; float _outputGain RTC_GUARDED_BY(volume_settings_mutex_); - const ChannelSendInterface* associated_send_channel_ - RTC_GUARDED_BY(network_thread_checker_); - PacketRouter* packet_router_ = nullptr; SequenceChecker construction_thread_; // E2EE Audio Frame Decryption - rtc::scoped_refptr frame_decryptor_ + scoped_refptr frame_decryptor_ RTC_GUARDED_BY(worker_thread_checker_); webrtc::CryptoOptions crypto_options_; @@ -297,7 +318,7 @@ class ChannelReceive : public ChannelReceiveInterface, webrtc::CaptureClockOffsetUpdater capture_clock_offset_updater_ RTC_GUARDED_BY(ts_stats_lock_); - rtc::scoped_refptr + scoped_refptr frame_transformer_delegate_; // Counter that's used to control the frequency of reporting histograms @@ -312,43 +333,46 @@ class ChannelReceive : public ChannelReceiveInterface, mutable Mutex rtcp_counter_mutex_; RtcpPacketTypeCounter rtcp_packet_type_counter_ RTC_GUARDED_BY(rtcp_counter_mutex_); + + std::map payload_type_map_; }; -void ChannelReceive::OnReceivedPayloadData( - rtc::ArrayView payload, - const RTPHeader& rtpHeader) { +void ChannelReceive::OnReceivedPayloadData(ArrayView payload, + const RTPHeader& rtpHeader, + Timestamp receive_time) { if (!playing_) { // Avoid inserting into NetEQ when we are not playing. Count the // packet as discarded. - // If we have a source_tracker_, tell it that the frame has been - // "delivered". Normally, this happens in AudioReceiveStreamInterface when - // audio frames are pulled out, but when playout is muted, nothing is - // pulling frames. The downside of this approach is that frames delivered - // this way won't be delayed for playout, and therefore will be - // unsynchronized with (a) audio delay when playing and (b) any audio/video - // synchronization. But the alternative is that muting playout also stops - // the SourceTracker from updating RtpSource information. - if (source_tracker_) { - RtpPacketInfos::vector_type packet_vector = { - RtpPacketInfo(rtpHeader, clock_->CurrentTime())}; - source_tracker_->OnFrameDelivered(RtpPacketInfos(packet_vector)); - } - + // Tell source_tracker_ that the frame has been "delivered". Normally, this + // happens in AudioReceiveStreamInterface when audio frames are pulled out, + // but when playout is muted, nothing is pulling frames. The downside of + // this approach is that frames delivered this way won't be delayed for + // playout, and therefore will be unsynchronized with (a) audio delay when + // playing and (b) any audio/video synchronization. But the alternative is + // that muting playout also stops the SourceTracker from updating RtpSource + // information. + RtpPacketInfos::vector_type packet_vector = { + RtpPacketInfo(rtpHeader, receive_time)}; + source_tracker_.OnFrameDelivered(RtpPacketInfos(packet_vector), + env_.clock().CurrentTime()); return; } - // Push the incoming payload (parsed and ready for decoding) into the ACM - if (acm_receiver_.InsertPacket(rtpHeader, payload) != 0) { + // Push the incoming payload (parsed and ready for decoding) into NetEq. + if (payload.empty()) { + neteq_->InsertEmptyPacket(rtpHeader); + } else if (neteq_->InsertPacket(rtpHeader, payload, + RtpPacketInfo(rtpHeader, receive_time)) < 0) { RTC_DLOG(LS_ERROR) << "ChannelReceive::OnReceivedPayloadData() unable to " - "push data to the ACM"; + "insert packet into NetEq; PT = " + << static_cast(rtpHeader.payloadType); return; } TimeDelta round_trip_time = rtp_rtcp_->LastRtt().value_or(TimeDelta::Zero()); - std::vector nack_list = - acm_receiver_.GetNackList(round_trip_time.ms()); + std::vector nack_list = neteq_->GetNackList(round_trip_time.ms()); if (!nack_list.empty()) { // Can't use nack_list.data() since it's not supported by all // compilers. @@ -357,7 +381,7 @@ void ChannelReceive::OnReceivedPayloadData( } void ChannelReceive::InitFrameTransformerDelegate( - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { RTC_DCHECK(frame_transformer); RTC_DCHECK(!frame_transformer_delegate_); RTC_DCHECK(worker_thread_->IsCurrent()); @@ -365,13 +389,14 @@ void ChannelReceive::InitFrameTransformerDelegate( // Pass a callback to ChannelReceive::OnReceivedPayloadData, to be called by // the delegate to receive transformed audio. ChannelReceiveFrameTransformerDelegate::ReceiveFrameCallback - receive_audio_callback = [this](rtc::ArrayView packet, - const RTPHeader& header) { + receive_audio_callback = [this](ArrayView packet, + const RTPHeader& header, + Timestamp receive_time) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - OnReceivedPayloadData(packet, header); + OnReceivedPayloadData(packet, header, receive_time); }; frame_transformer_delegate_ = - rtc::make_ref_counted( + make_ref_counted( std::move(receive_audio_callback), std::move(frame_transformer), worker_thread_); frame_transformer_delegate_->Init(); @@ -385,12 +410,10 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( RTC_DCHECK_RUNS_SERIALIZED(&audio_thread_race_checker_); audio_frame->sample_rate_hz_ = sample_rate_hz; - event_log_->Log(std::make_unique(remote_ssrc_)); + env_.event_log().Log(std::make_unique(remote_ssrc_)); - // Get 10ms raw PCM data from the ACM (mixer limits output frequency) - bool muted; - if (acm_receiver_.GetAudio(audio_frame->sample_rate_hz_, audio_frame, - &muted) == -1) { + if ((neteq_->GetAudio(audio_frame) != NetEq::kOK) || + !resampler_helper_.MaybeResample(sample_rate_hz, audio_frame)) { RTC_DLOG(LS_ERROR) << "ChannelReceive::GetAudioFrame() PlayoutData10Ms() failed!"; // In all likelihood, the audio in this frame is garbage. We return an @@ -403,11 +426,9 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( return AudioMixer::Source::AudioFrameInfo::kError; } - if (muted) { - // TODO(henrik.lundin): We should be able to do better than this. But we - // will have to go through all the cases below where the audio samples may - // be used, and handle the muted case in some way. - AudioFrameOperations::Mute(audio_frame); + { + MutexLock lock(&call_stats_mutex_); + call_stats_.DecodedByNetEq(audio_frame->speech_type_, audio_frame->muted()); } { @@ -475,24 +496,27 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( // Fill in local capture clock offset in `audio_frame->packet_infos_`. RtpPacketInfos::vector_type packet_infos; for (auto& packet_info : audio_frame->packet_infos_) { - absl::optional local_capture_clock_offset_q32x32; + RtpPacketInfo new_packet_info(packet_info); if (packet_info.absolute_capture_time().has_value()) { MutexLock lock(&ts_stats_lock_); - local_capture_clock_offset_q32x32 = - capture_clock_offset_updater_.AdjustEstimatedCaptureClockOffset( - packet_info.absolute_capture_time() - ->estimated_capture_clock_offset); + new_packet_info.set_local_capture_clock_offset( + capture_clock_offset_updater_.ConvertsToTimeDela( + capture_clock_offset_updater_.AdjustEstimatedCaptureClockOffset( + packet_info.absolute_capture_time() + ->estimated_capture_clock_offset))); } - RtpPacketInfo new_packet_info(packet_info); - absl::optional local_capture_clock_offset; - if (local_capture_clock_offset_q32x32.has_value()) { - local_capture_clock_offset = TimeDelta::Millis( - UQ32x32ToInt64Ms(*local_capture_clock_offset_q32x32)); - } - new_packet_info.set_local_capture_clock_offset(local_capture_clock_offset); packet_infos.push_back(std::move(new_packet_info)); } - audio_frame->packet_infos_ = RtpPacketInfos(packet_infos); + audio_frame->packet_infos_ = RtpPacketInfos(std::move(packet_infos)); + if (!audio_frame->packet_infos_.empty()) { + RtpPacketInfos infos_copy = audio_frame->packet_infos_; + Timestamp delivery_time = env_.clock().CurrentTime(); + worker_thread_->PostTask( + SafeTask(worker_safety_.flag(), [this, infos_copy, delivery_time]() { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + source_tracker_.OnFrameDelivered(infos_copy, delivery_time); + })); + } ++audio_frame_interval_count_; if (audio_frame_interval_count_ >= kHistogramReportingInterval) { @@ -500,8 +524,8 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( worker_thread_->PostTask(SafeTask(worker_safety_.flag(), [this]() { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.TargetJitterBufferDelayMs", - acm_receiver_.TargetDelayMs()); - const int jitter_buffer_delay = acm_receiver_.FilteredCurrentDelayMs(); + neteq_->TargetDelayMs()); + const int jitter_buffer_delay = neteq_->FilteredCurrentDelayMs(); RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.ReceiverDelayEstimateMs", jitter_buffer_delay + playout_delay_ms_); RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.ReceiverJitterBufferDelayMs", @@ -512,81 +536,68 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( } TRACE_EVENT_END2("webrtc", "ChannelReceive::GetAudioFrameWithInfo", "gain", - output_gain, "muted", muted); - return muted ? AudioMixer::Source::AudioFrameInfo::kMuted - : AudioMixer::Source::AudioFrameInfo::kNormal; + output_gain, "muted", audio_frame->muted()); + return audio_frame->muted() ? AudioMixer::Source::AudioFrameInfo::kMuted + : AudioMixer::Source::AudioFrameInfo::kNormal; } int ChannelReceive::PreferredSampleRate() const { RTC_DCHECK_RUNS_SERIALIZED(&audio_thread_race_checker_); + const std::optional decoder = + neteq_->GetCurrentDecoderFormat(); + const int last_packet_sample_rate_hz = decoder ? decoder->sample_rate_hz : 0; // Return the bigger of playout and receive frequency in the ACM. - return std::max(acm_receiver_.last_packet_sample_rate_hz().value_or(0), - acm_receiver_.last_output_sample_rate_hz()); -} - -void ChannelReceive::SetSourceTracker(SourceTracker* source_tracker) { - source_tracker_ = source_tracker; + return std::max(last_packet_sample_rate_hz, + neteq_->last_output_sample_rate_hz()); } ChannelReceive::ChannelReceive( - Clock* clock, + const Environment& env, NetEqFactory* neteq_factory, AudioDeviceModule* audio_device_module, Transport* rtcp_send_transport, - RtcEventLog* rtc_event_log, uint32_t local_ssrc, uint32_t remote_ssrc, size_t jitter_buffer_max_packets, bool jitter_buffer_fast_playout, int jitter_buffer_min_delay_ms, bool enable_non_sender_rtt, - rtc::scoped_refptr decoder_factory, - absl::optional codec_pair_id, - rtc::scoped_refptr frame_decryptor, + scoped_refptr decoder_factory, + std::optional codec_pair_id, + scoped_refptr frame_decryptor, const webrtc::CryptoOptions& crypto_options, - rtc::scoped_refptr frame_transformer) - : worker_thread_(TaskQueueBase::Current()), - event_log_(rtc_event_log), - rtp_receive_statistics_(ReceiveStatistics::Create(clock)), + scoped_refptr frame_transformer) + : env_(env), + worker_thread_(TaskQueueBase::Current()), + rtp_receive_statistics_(ReceiveStatistics::Create(&env_.clock())), remote_ssrc_(remote_ssrc), - acm_receiver_(AcmConfig(neteq_factory, - decoder_factory, - codec_pair_id, - jitter_buffer_max_packets, - jitter_buffer_fast_playout, - jitter_buffer_min_delay_ms)), + source_tracker_(&env_.clock()), + neteq_(CreateNetEq(neteq_factory, + codec_pair_id, + jitter_buffer_max_packets, + jitter_buffer_fast_playout, + jitter_buffer_min_delay_ms, + env_, + decoder_factory)), _outputAudioLevel(), - clock_(clock), - ntp_estimator_(clock), + ntp_estimator_(&env_.clock()), playout_timestamp_rtp_(0), playout_delay_ms_(0), capture_start_rtp_time_stamp_(-1), capture_start_ntp_time_ms_(-1), _audioDeviceModulePtr(audio_device_module), _outputGain(1.0f), - associated_send_channel_(nullptr), frame_decryptor_(frame_decryptor), crypto_options_(crypto_options), - absolute_capture_time_interpolator_(clock) { + absolute_capture_time_interpolator_(&env_.clock()) { RTC_DCHECK(audio_device_module); - network_thread_checker_.Detach(); - - acm_receiver_.ResetInitialDelay(); - acm_receiver_.SetMinimumDelay(0); - acm_receiver_.SetMaximumDelay(0); - acm_receiver_.FlushBuffers(); - - _outputAudioLevel.ResetLevelFullRange(); - rtp_receive_statistics_->EnableRetransmitDetection(remote_ssrc_, true); RtpRtcpInterface::Configuration configuration; - configuration.clock = clock; configuration.audio = true; configuration.receiver_only = true; configuration.outgoing_transport = rtcp_send_transport; configuration.receive_statistics = rtp_receive_statistics_.get(); - configuration.event_log = event_log_; configuration.local_media_ssrc = local_ssrc; configuration.rtcp_packet_type_counter_observer = this; configuration.non_sender_rtt_measurement = enable_non_sender_rtt; @@ -594,7 +605,7 @@ ChannelReceive::ChannelReceive( if (frame_transformer) InitFrameTransformerDelegate(std::move(frame_transformer)); - rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(configuration); + rtp_rtcp_ = std::make_unique(env_, configuration); rtp_rtcp_->SetRemoteSSRC(remote_ssrc_); // Ensure that RTCP is enabled for the created channel. @@ -626,13 +637,18 @@ void ChannelReceive::StopPlayout() { RTC_DCHECK_RUN_ON(&worker_thread_checker_); playing_ = false; _outputAudioLevel.ResetLevelFullRange(); - acm_receiver_.FlushBuffers(); + neteq_->FlushBuffers(); } -absl::optional> ChannelReceive::GetReceiveCodec() +std::optional> ChannelReceive::GetReceiveCodec() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - return acm_receiver_.LastDecoder(); + std::optional decoder = + neteq_->GetCurrentDecoderFormat(); + if (!decoder) { + return std::nullopt; + } + return std::make_pair(decoder->payload_type, decoder->sdp_format); } void ChannelReceive::SetReceiveCodecs( @@ -642,15 +658,13 @@ void ChannelReceive::SetReceiveCodecs( RTC_DCHECK_GE(kv.second.clockrate_hz, 1000); payload_type_frequencies_[kv.first] = kv.second.clockrate_hz; } - acm_receiver_.SetCodecs(codecs); + payload_type_map_ = codecs; + neteq_->SetCodecs(codecs); } void ChannelReceive::OnRtpPacket(const RtpPacketReceived& packet) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - // TODO(bugs.webrtc.org/11993): Expect to be called exclusively on the - // network thread. Once that's done, the same applies to - // UpdatePlayoutTimestamp and - int64_t now_ms = rtc::TimeMillis(); + int64_t now_ms = TimeMillis(); last_received_rtp_timestamp_ = packet.Timestamp(); last_received_rtp_system_time_ms_ = now_ms; @@ -677,15 +691,17 @@ void ChannelReceive::OnRtpPacket(const RtpPacketReceived& packet) { AbsoluteCaptureTimeInterpolator::GetSource(header.ssrc, header.arrOfCSRCs), header.timestamp, - rtc::saturated_cast(packet_copy.payload_type_frequency()), + saturated_cast(packet_copy.payload_type_frequency()), header.extension.absolute_capture_time); - ReceivePacket(packet_copy.data(), packet_copy.size(), header); + ReceivePacket(packet_copy.data(), packet_copy.size(), header, + packet.arrival_time()); } void ChannelReceive::ReceivePacket(const uint8_t* packet, size_t packet_length, - const RTPHeader& header) { + const RTPHeader& header, + Timestamp receive_time) { const uint8_t* payload = packet + header.headerLength; RTC_DCHECK_GE(packet_length, header.headerLength); size_t payload_length = packet_length - header.headerLength; @@ -694,19 +710,19 @@ void ChannelReceive::ReceivePacket(const uint8_t* packet, // E2EE Custom Audio Frame Decryption (This is optional). // Keep this buffer around for the lifetime of the OnReceivedPayloadData call. - rtc::Buffer decrypted_audio_payload; + Buffer decrypted_audio_payload; if (frame_decryptor_ != nullptr) { const size_t max_plaintext_size = frame_decryptor_->GetMaxPlaintextByteSize( - cricket::MEDIA_TYPE_AUDIO, payload_length); + webrtc::MediaType::AUDIO, payload_length); decrypted_audio_payload.SetSize(max_plaintext_size); const std::vector csrcs(header.arrOfCSRCs, header.arrOfCSRCs + header.numCSRCs); const FrameDecryptorInterface::Result decrypt_result = frame_decryptor_->Decrypt( - cricket::MEDIA_TYPE_AUDIO, csrcs, - /*additional_data=*/nullptr, - rtc::ArrayView(payload, payload_data_length), + webrtc::MediaType::AUDIO, csrcs, + /*additional_data=*/ + nullptr, ArrayView(payload, payload_data_length), decrypted_audio_payload); if (decrypt_result.IsOk()) { @@ -724,34 +740,39 @@ void ChannelReceive::ReceivePacket(const uint8_t* packet, payload_data_length = 0; } - rtc::ArrayView payload_data(payload, payload_data_length); + ArrayView payload_data(payload, payload_data_length); if (frame_transformer_delegate_) { // Asynchronously transform the received payload. After the payload is // transformed, the delegate will call OnReceivedPayloadData to handle it. - frame_transformer_delegate_->Transform(payload_data, header, remote_ssrc_); + char buf[1024]; + SimpleStringBuilder mime_type(buf); + auto it = payload_type_map_.find(header.payloadType); + mime_type << webrtc::MediaTypeToString(webrtc::MediaType::AUDIO) << "/" + << (it != payload_type_map_.end() ? it->second.name + : "x-unknown"); + frame_transformer_delegate_->Transform(payload_data, header, remote_ssrc_, + mime_type.str(), receive_time); } else { - OnReceivedPayloadData(payload_data, header); + OnReceivedPayloadData(payload_data, header, receive_time); } } void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - // TODO(bugs.webrtc.org/11993): Expect to be called exclusively on the - // network thread. // Store playout timestamp for the received RTCP packet - UpdatePlayoutTimestamp(true, rtc::TimeMillis()); + UpdatePlayoutTimestamp(true, TimeMillis()); // Deliver RTCP packet to RTP/RTCP module for parsing - rtp_rtcp_->IncomingRtcpPacket(rtc::MakeArrayView(data, length)); + rtp_rtcp_->IncomingRtcpPacket(MakeArrayView(data, length)); - absl::optional rtt = rtp_rtcp_->LastRtt(); + std::optional rtt = rtp_rtcp_->LastRtt(); if (!rtt.has_value()) { // Waiting for valid RTT. return; } - absl::optional last_sr = + std::optional last_sr = rtp_rtcp_->GetSenderReportStats(); if (!last_sr.has_value()) { // Waiting for RTCP. @@ -760,9 +781,9 @@ void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) { { MutexLock lock(&ts_stats_lock_); - ntp_estimator_.UpdateRtcpTimestamp(*rtt, last_sr->last_remote_timestamp, + ntp_estimator_.UpdateRtcpTimestamp(*rtt, last_sr->last_remote_ntp_timestamp, last_sr->last_remote_rtp_timestamp); - absl::optional remote_to_local_clock_offset = + std::optional remote_to_local_clock_offset = ntp_estimator_.EstimateRemoteToLocalClockOffset(); if (remote_to_local_clock_offset.has_value()) { capture_clock_offset_updater_.SetRemoteToLocalClockOffset( @@ -822,23 +843,17 @@ CallReceiveStatistics ChannelReceive::GetRTCPStatistics() const { rtp_stats = statistician->GetStats(); } - stats.cumulativeLost = rtp_stats.packets_lost; - stats.jitterSamples = rtp_stats.jitter; + stats.packets_lost = rtp_stats.packets_lost; + stats.jitter_ms = rtp_stats.interarrival_jitter.ms(); // Data counters. if (statistician) { stats.payload_bytes_received = rtp_stats.packet_counter.payload_bytes; - stats.header_and_padding_bytes_received = rtp_stats.packet_counter.header_bytes + rtp_stats.packet_counter.padding_bytes; - stats.packetsReceived = rtp_stats.packet_counter.packets; + stats.packets_received = rtp_stats.packet_counter.packets; stats.last_packet_received = rtp_stats.last_packet_received; - } else { - stats.payload_bytes_received = 0; - stats.header_and_padding_bytes_received = 0; - stats.packetsReceived = 0; - stats.last_packet_received = absl::nullopt; } { @@ -849,23 +864,23 @@ CallReceiveStatistics ChannelReceive::GetRTCPStatistics() const { // Timestamps. { MutexLock lock(&ts_stats_lock_); - stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_; + stats.capture_start_ntp_time_ms = capture_start_ntp_time_ms_; } - absl::optional rtcp_sr_stats = + std::optional rtcp_sr_stats = rtp_rtcp_->GetSenderReportStats(); if (rtcp_sr_stats.has_value()) { - stats.last_sender_report_timestamp_ms = - rtcp_sr_stats->last_arrival_timestamp.ToMs() - - rtc::kNtpJan1970Millisecs; - stats.last_sender_report_remote_timestamp_ms = - rtcp_sr_stats->last_remote_timestamp.ToMs() - rtc::kNtpJan1970Millisecs; + stats.last_sender_report_timestamp = rtcp_sr_stats->last_arrival_timestamp; + stats.last_sender_report_utc_timestamp = + Clock::NtpToUtc(rtcp_sr_stats->last_arrival_ntp_timestamp); + stats.last_sender_report_remote_utc_timestamp = + Clock::NtpToUtc(rtcp_sr_stats->last_remote_ntp_timestamp); stats.sender_reports_packets_sent = rtcp_sr_stats->packets_sent; stats.sender_reports_bytes_sent = rtcp_sr_stats->bytes_sent; stats.sender_reports_reports_count = rtcp_sr_stats->reports_count; } - absl::optional non_sender_rtt_stats = + std::optional non_sender_rtt_stats = rtp_rtcp_->GetNonSenderRttStats(); if (non_sender_rtt_stats.has_value()) { stats.round_trip_time = non_sender_rtt_stats->round_trip_time; @@ -881,15 +896,21 @@ void ChannelReceive::SetNACKStatus(bool enable, int max_packets) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); // None of these functions can fail. if (enable) { - rtp_receive_statistics_->SetMaxReorderingThreshold(max_packets); - acm_receiver_.EnableNack(max_packets); + rtp_receive_statistics_->SetMaxReorderingThreshold(remote_ssrc_, + max_packets); + neteq_->EnableNack(max_packets); } else { rtp_receive_statistics_->SetMaxReorderingThreshold( - kDefaultMaxReorderingThreshold); - acm_receiver_.DisableNack(); + remote_ssrc_, kDefaultMaxReorderingThreshold); + neteq_->DisableNack(); } } +void ChannelReceive::SetRtcpMode(webrtc::RtcpMode mode) { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + rtp_rtcp_->SetRTCPStatus(mode); +} + void ChannelReceive::SetNonSenderRttMeasurement(bool enabled) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); rtp_rtcp_->SetNonSenderRttMeasurement(enabled); @@ -911,77 +932,129 @@ void ChannelReceive::RtcpPacketTypesCounterUpdated( rtcp_packet_type_counter_ = packet_counter; } -void ChannelReceive::SetAssociatedSendChannel( - const ChannelSendInterface* channel) { - RTC_DCHECK_RUN_ON(&network_thread_checker_); - associated_send_channel_ = channel; -} - void ChannelReceive::SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - // Depending on when the channel is created, the transformer might be set - // twice. Don't replace the delegate if it was already initialized. - if (!frame_transformer || frame_transformer_delegate_) { + if (!frame_transformer) { RTC_DCHECK_NOTREACHED() << "Not setting the transformer?"; return; } + if (frame_transformer_delegate_) { + // Depending on when the channel is created, the transformer might be set + // twice. Don't replace the delegate if it was already initialized. + // TODO(crbug.com/webrtc/15674): Prevent multiple calls during + // reconfiguration. + RTC_CHECK_EQ(frame_transformer_delegate_->FrameTransformer(), + frame_transformer); + return; + } InitFrameTransformerDelegate(std::move(frame_transformer)); } void ChannelReceive::SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) { - // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread. + scoped_refptr frame_decryptor) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); frame_decryptor_ = std::move(frame_decryptor); } void ChannelReceive::OnLocalSsrcChange(uint32_t local_ssrc) { - // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread. RTC_DCHECK_RUN_ON(&worker_thread_checker_); rtp_rtcp_->SetLocalSsrc(local_ssrc); } -uint32_t ChannelReceive::GetLocalSsrc() const { - // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread. - RTC_DCHECK_RUN_ON(&worker_thread_checker_); - return rtp_rtcp_->local_media_ssrc(); -} - NetworkStatistics ChannelReceive::GetNetworkStatistics( bool get_and_clear_legacy_stats) const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - NetworkStatistics stats; - acm_receiver_.GetNetworkStatistics(&stats, get_and_clear_legacy_stats); - return stats; + NetworkStatistics acm_stat; + NetEqNetworkStatistics neteq_stat; + if (get_and_clear_legacy_stats) { + // NetEq function always returns zero, so we don't check the return value. + neteq_->NetworkStatistics(&neteq_stat); + + acm_stat.currentExpandRate = neteq_stat.expand_rate; + acm_stat.currentSpeechExpandRate = neteq_stat.speech_expand_rate; + acm_stat.currentPreemptiveRate = neteq_stat.preemptive_rate; + acm_stat.currentAccelerateRate = neteq_stat.accelerate_rate; + acm_stat.currentSecondaryDecodedRate = neteq_stat.secondary_decoded_rate; + acm_stat.currentSecondaryDiscardedRate = + neteq_stat.secondary_discarded_rate; + acm_stat.meanWaitingTimeMs = neteq_stat.mean_waiting_time_ms; + acm_stat.maxWaitingTimeMs = neteq_stat.max_waiting_time_ms; + } else { + neteq_stat = neteq_->CurrentNetworkStatistics(); + acm_stat.currentExpandRate = 0; + acm_stat.currentSpeechExpandRate = 0; + acm_stat.currentPreemptiveRate = 0; + acm_stat.currentAccelerateRate = 0; + acm_stat.currentSecondaryDecodedRate = 0; + acm_stat.currentSecondaryDiscardedRate = 0; + acm_stat.meanWaitingTimeMs = -1; + acm_stat.maxWaitingTimeMs = 1; + } + acm_stat.currentBufferSize = neteq_stat.current_buffer_size_ms; + acm_stat.preferredBufferSize = neteq_stat.preferred_buffer_size_ms; + acm_stat.jitterPeaksFound = neteq_stat.jitter_peaks_found ? true : false; + + NetEqLifetimeStatistics neteq_lifetime_stat = neteq_->GetLifetimeStatistics(); + acm_stat.totalSamplesReceived = neteq_lifetime_stat.total_samples_received; + acm_stat.concealedSamples = neteq_lifetime_stat.concealed_samples; + acm_stat.silentConcealedSamples = + neteq_lifetime_stat.silent_concealed_samples; + acm_stat.concealmentEvents = neteq_lifetime_stat.concealment_events; + acm_stat.jitterBufferDelayMs = neteq_lifetime_stat.jitter_buffer_delay_ms; + acm_stat.jitterBufferTargetDelayMs = + neteq_lifetime_stat.jitter_buffer_target_delay_ms; + acm_stat.jitterBufferMinimumDelayMs = + neteq_lifetime_stat.jitter_buffer_minimum_delay_ms; + acm_stat.jitterBufferEmittedCount = + neteq_lifetime_stat.jitter_buffer_emitted_count; + acm_stat.delayedPacketOutageSamples = + neteq_lifetime_stat.delayed_packet_outage_samples; + acm_stat.relativePacketArrivalDelayMs = + neteq_lifetime_stat.relative_packet_arrival_delay_ms; + acm_stat.interruptionCount = neteq_lifetime_stat.interruption_count; + acm_stat.totalInterruptionDurationMs = + neteq_lifetime_stat.total_interruption_duration_ms; + acm_stat.insertedSamplesForDeceleration = + neteq_lifetime_stat.inserted_samples_for_deceleration; + acm_stat.removedSamplesForAcceleration = + neteq_lifetime_stat.removed_samples_for_acceleration; + acm_stat.fecPacketsReceived = neteq_lifetime_stat.fec_packets_received; + acm_stat.fecPacketsDiscarded = neteq_lifetime_stat.fec_packets_discarded; + acm_stat.totalProcessingDelayUs = + neteq_lifetime_stat.total_processing_delay_us; + acm_stat.packetsDiscarded = neteq_lifetime_stat.packets_discarded; + + NetEqOperationsAndState neteq_operations_and_state = + neteq_->GetOperationsAndState(); + acm_stat.packetBufferFlushes = + neteq_operations_and_state.packet_buffer_flushes; + return acm_stat; } AudioDecodingCallStats ChannelReceive::GetDecodingCallStatistics() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - AudioDecodingCallStats stats; - acm_receiver_.GetDecodingCallStatistics(&stats); - return stats; + MutexLock lock(&call_stats_mutex_); + return call_stats_.GetDecodingStatistics(); } uint32_t ChannelReceive::GetDelayEstimate() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); // Return the current jitter buffer delay + playout delay. - return acm_receiver_.FilteredCurrentDelayMs() + playout_delay_ms_; + return neteq_->FilteredCurrentDelayMs() + playout_delay_ms_; } bool ChannelReceive::SetMinimumPlayoutDelay(int delay_ms) { - // TODO(bugs.webrtc.org/11993): This should run on the network thread. - // We get here via RtpStreamsSynchronizer. Once that's done, many (all?) of - // these locks aren't needed. RTC_DCHECK_RUN_ON(&worker_thread_checker_); // Limit to range accepted by both VoE and ACM, so we're at least getting as // close as possible, instead of failing. - delay_ms = rtc::SafeClamp(delay_ms, kVoiceEngineMinMinPlayoutDelayMs, - kVoiceEngineMaxMinPlayoutDelayMs); - if (acm_receiver_.SetMinimumDelay(delay_ms) != 0) { + delay_ms = SafeClamp(delay_ms, kVoiceEngineMinMinPlayoutDelayMs, + kVoiceEngineMaxMinPlayoutDelayMs); + if (!neteq_->SetMinimumDelay(delay_ms)) { RTC_DLOG(LS_ERROR) - << "SetMinimumPlayoutDelay() failed to set min playout delay"; + << "SetMinimumPlayoutDelay() failed to set min playout delay " + << delay_ms; return false; } return true; @@ -1004,48 +1077,45 @@ void ChannelReceive::SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, playout_timestamp_ntp_time_ms_ = time_ms; } -absl::optional -ChannelReceive::GetCurrentEstimatedPlayoutNtpTimestampMs(int64_t now_ms) const { +std::optional ChannelReceive::GetCurrentEstimatedPlayoutNtpTimestampMs( + int64_t now_ms) const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); if (!playout_timestamp_ntp_ || !playout_timestamp_ntp_time_ms_) - return absl::nullopt; + return std::nullopt; int64_t elapsed_ms = now_ms - *playout_timestamp_ntp_time_ms_; return *playout_timestamp_ntp_ + elapsed_ms; } bool ChannelReceive::SetBaseMinimumPlayoutDelayMs(int delay_ms) { - event_log_->Log( + env_.event_log().Log( std::make_unique(remote_ssrc_, delay_ms)); - return acm_receiver_.SetBaseMinimumDelayMs(delay_ms); + return neteq_->SetBaseMinimumDelayMs(delay_ms); } int ChannelReceive::GetBaseMinimumPlayoutDelayMs() const { - return acm_receiver_.GetBaseMinimumDelayMs(); + return neteq_->GetBaseMinimumDelayMs(); } -absl::optional ChannelReceive::GetSyncInfo() const { - // TODO(bugs.webrtc.org/11993): This should run on the network thread. - // We get here via RtpStreamsSynchronizer. Once that's done, many of - // these locks aren't needed. +std::optional ChannelReceive::GetSyncInfo() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); Syncable::Info info; - absl::optional last_sr = + std::optional last_sr = rtp_rtcp_->GetSenderReportStats(); if (!last_sr.has_value()) { - return absl::nullopt; + return std::nullopt; } - info.capture_time_ntp_secs = last_sr->last_remote_timestamp.seconds(); - info.capture_time_ntp_frac = last_sr->last_remote_timestamp.fractions(); + info.capture_time_ntp_secs = last_sr->last_remote_ntp_timestamp.seconds(); + info.capture_time_ntp_frac = last_sr->last_remote_ntp_timestamp.fractions(); info.capture_time_source_clock = last_sr->last_remote_rtp_timestamp; if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) { - return absl::nullopt; + return std::nullopt; } info.latest_received_capture_timestamp = *last_received_rtp_timestamp_; info.latest_receive_time_ms = *last_received_rtp_system_time_ms_; - int jitter_buffer_delay = acm_receiver_.FilteredCurrentDelayMs(); + int jitter_buffer_delay = neteq_->FilteredCurrentDelayMs(); info.current_delay_ms = jitter_buffer_delay + playout_delay_ms_; return info; @@ -1053,10 +1123,8 @@ absl::optional ChannelReceive::GetSyncInfo() const { void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - // TODO(bugs.webrtc.org/11993): Expect to be called exclusively on the - // network thread. Once that's done, we won't need video_sync_lock_. - jitter_buffer_playout_timestamp_ = acm_receiver_.GetPlayoutTimestamp(); + jitter_buffer_playout_timestamp_ = neteq_->GetPlayoutTimestamp(); if (!jitter_buffer_playout_timestamp_) { // This can happen if this channel has not received any RTP packets. In @@ -1086,44 +1154,49 @@ void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms) { } int ChannelReceive::GetRtpTimestampRateHz() const { - const auto decoder = acm_receiver_.LastDecoder(); + const auto decoder_format = neteq_->GetCurrentDecoderFormat(); + // Default to the playout frequency if we've not gotten any packets yet. - // TODO(ossu): Zero clockrate can only happen if we've added an external + // TODO(ossu): Zero clock rate can only happen if we've added an external // decoder for a format we don't support internally. Remove once that way of // adding decoders is gone! - // TODO(kwiberg): `decoder->second.clockrate_hz` is an RTP clockrate as it - // should, but `acm_receiver_.last_output_sample_rate_hz()` is a codec sample - // rate, which is not always the same thing. - return (decoder && decoder->second.clockrate_hz != 0) - ? decoder->second.clockrate_hz - : acm_receiver_.last_output_sample_rate_hz(); + // TODO(kwiberg): `decoder_format->sdp_format.clockrate_hz` is an RTP + // clock rate as it should, but `neteq_->last_output_sample_rate_hz()` is a + // codec sample rate, which is not always the same thing. + return (decoder_format && decoder_format->sdp_format.clockrate_hz != 0) + ? decoder_format->sdp_format.clockrate_hz + : neteq_->last_output_sample_rate_hz(); +} + +std::vector ChannelReceive::GetSources() const { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + return source_tracker_.GetSources(); } } // namespace std::unique_ptr CreateChannelReceive( - Clock* clock, + const Environment& env, NetEqFactory* neteq_factory, AudioDeviceModule* audio_device_module, Transport* rtcp_send_transport, - RtcEventLog* rtc_event_log, uint32_t local_ssrc, uint32_t remote_ssrc, size_t jitter_buffer_max_packets, bool jitter_buffer_fast_playout, int jitter_buffer_min_delay_ms, bool enable_non_sender_rtt, - rtc::scoped_refptr decoder_factory, - absl::optional codec_pair_id, - rtc::scoped_refptr frame_decryptor, + scoped_refptr decoder_factory, + std::optional codec_pair_id, + scoped_refptr frame_decryptor, const webrtc::CryptoOptions& crypto_options, - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { return std::make_unique( - clock, neteq_factory, audio_device_module, rtcp_send_transport, - rtc_event_log, local_ssrc, remote_ssrc, jitter_buffer_max_packets, - jitter_buffer_fast_playout, jitter_buffer_min_delay_ms, - enable_non_sender_rtt, decoder_factory, codec_pair_id, - std::move(frame_decryptor), crypto_options, std::move(frame_transformer)); + env, neteq_factory, audio_device_module, rtcp_send_transport, local_ssrc, + remote_ssrc, jitter_buffer_max_packets, jitter_buffer_fast_playout, + jitter_buffer_min_delay_ms, enable_non_sender_rtt, decoder_factory, + codec_pair_id, std::move(frame_decryptor), crypto_options, + std::move(frame_transformer)); } } // namespace voe diff --git a/audio/channel_receive.h b/audio/channel_receive.h index ab69103269..ee72187b7d 100644 --- a/audio/channel_receive.h +++ b/audio/channel_receive.h @@ -11,33 +11,33 @@ #ifndef AUDIO_CHANNEL_RECEIVE_H_ #define AUDIO_CHANNEL_RECEIVE_H_ +#include +#include #include #include +#include #include #include -#include "absl/types/optional.h" +#include "api/audio/audio_frame.h" #include "api/audio/audio_mixer.h" +#include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_format.h" #include "api/call/audio_sink.h" #include "api/call/transport.h" #include "api/crypto/crypto_options.h" +#include "api/environment/environment.h" #include "api/frame_transformer_interface.h" #include "api/neteq/neteq_factory.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" #include "api/transport/rtp/rtp_source.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "call/rtp_packet_sink_interface.h" #include "call/syncable.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" -#include "modules/rtp_rtcp/source/source_tracker.h" -#include "system_wrappers/include/clock.h" - -// TODO(solenberg, nisse): This file contains a few NOLINT marks, to silence -// warnings about use of unsigned short. -// These need cleanup, in a separate cl. - -namespace rtc { -class TimestampWrapAroundHandler; -} namespace webrtc { @@ -46,36 +46,38 @@ class FrameDecryptorInterface; class PacketRouter; class RateLimiter; class ReceiveStatistics; -class RtcEventLog; class RtpPacketReceived; class RtpRtcp; struct CallReceiveStatistics { - int cumulativeLost; - unsigned int jitterSamples; + int packets_lost = 0; + uint32_t jitter_ms = 0; int64_t payload_bytes_received = 0; int64_t header_and_padding_bytes_received = 0; - int packetsReceived; + int packets_received = 0; uint32_t nacks_sent = 0; // The capture NTP time (in local timebase) of the first played out audio // frame. - int64_t capture_start_ntp_time_ms_; + int64_t capture_start_ntp_time_ms = 0; // The timestamp at which the last packet was received, i.e. the time of the // local clock when it was received - not the RTP timestamp of that packet. // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-lastpacketreceivedtimestamp - absl::optional last_packet_received; + std::optional last_packet_received; // Remote outbound stats derived by the received RTCP sender reports. // Note that the timestamps below correspond to the time elapsed since the // Unix epoch. // https://w3c.github.io/webrtc-stats/#remoteoutboundrtpstats-dict* - absl::optional last_sender_report_timestamp_ms; - absl::optional last_sender_report_remote_timestamp_ms; + std::optional last_sender_report_timestamp; + // TODO: bugs.webrtc.org/370535296 - Remove the utc timestamp when linked + // issue is fixed. + std::optional last_sender_report_utc_timestamp; + std::optional last_sender_report_remote_utc_timestamp; uint64_t sender_reports_packets_sent = 0; uint64_t sender_reports_bytes_sent = 0; uint64_t sender_reports_reports_count = 0; - absl::optional round_trip_time; + std::optional round_trip_time; TimeDelta total_round_trip_time = TimeDelta::Zero(); - int round_trip_time_measurements; + int round_trip_time_measurements = 0; }; namespace voe { @@ -98,7 +100,7 @@ class ChannelReceiveInterface : public RtpPacketSinkInterface { virtual void StopPlayout() = 0; // Payload type and format of last received RTP packet, if any. - virtual absl::optional> GetReceiveCodec() + virtual std::optional> GetReceiveCodec() const = 0; virtual void ReceivedRTCPPacket(const uint8_t* data, size_t length) = 0; @@ -122,7 +124,7 @@ class ChannelReceiveInterface : public RtpPacketSinkInterface { int64_t* time_ms) const = 0; virtual void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, int64_t time_ms) = 0; - virtual absl::optional GetCurrentEstimatedPlayoutNtpTimestampMs( + virtual std::optional GetCurrentEstimatedPlayoutNtpTimestampMs( int64_t now_ms) const = 0; // Audio quality. @@ -132,7 +134,7 @@ class ChannelReceiveInterface : public RtpPacketSinkInterface { virtual int GetBaseMinimumPlayoutDelayMs() const = 0; // Produces the transport-related timestamps; current_delay_ms is left unset. - virtual absl::optional GetSyncInfo() const = 0; + virtual std::optional GetSyncInfo() const = 0; virtual void RegisterReceiverCongestionControlObjects( PacketRouter* packet_router) = 0; @@ -140,6 +142,7 @@ class ChannelReceiveInterface : public RtpPacketSinkInterface { virtual CallReceiveStatistics GetRTCPStatistics() const = 0; virtual void SetNACKStatus(bool enable, int max_packets) = 0; + virtual void SetRtcpMode(webrtc::RtcpMode mode) = 0; virtual void SetNonSenderRttMeasurement(bool enabled) = 0; virtual AudioMixer::Source::AudioFrameInfo GetAudioFrameWithInfo( @@ -148,45 +151,35 @@ class ChannelReceiveInterface : public RtpPacketSinkInterface { virtual int PreferredSampleRate() const = 0; - // Sets the source tracker to notify about "delivered" packets when output is - // muted. - virtual void SetSourceTracker(SourceTracker* source_tracker) = 0; - - // Associate to a send channel. - // Used for obtaining RTT for a receive-only channel. - virtual void SetAssociatedSendChannel( - const ChannelSendInterface* channel) = 0; + virtual std::vector GetSources() const = 0; // Sets a frame transformer between the depacketizer and the decoder, to // transform the received frames before decoding them. virtual void SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr - frame_transformer) = 0; + scoped_refptr frame_transformer) = 0; virtual void SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) = 0; + scoped_refptr frame_decryptor) = 0; virtual void OnLocalSsrcChange(uint32_t local_ssrc) = 0; - virtual uint32_t GetLocalSsrc() const = 0; }; std::unique_ptr CreateChannelReceive( - Clock* clock, + const Environment& env, NetEqFactory* neteq_factory, AudioDeviceModule* audio_device_module, Transport* rtcp_send_transport, - RtcEventLog* rtc_event_log, uint32_t local_ssrc, uint32_t remote_ssrc, size_t jitter_buffer_max_packets, bool jitter_buffer_fast_playout, int jitter_buffer_min_delay_ms, bool enable_non_sender_rtt, - rtc::scoped_refptr decoder_factory, - absl::optional codec_pair_id, - rtc::scoped_refptr frame_decryptor, + scoped_refptr decoder_factory, + std::optional codec_pair_id, + scoped_refptr frame_decryptor, const webrtc::CryptoOptions& crypto_options, - rtc::scoped_refptr frame_transformer); + scoped_refptr frame_transformer); } // namespace voe } // namespace webrtc diff --git a/audio/channel_receive_frame_transformer_delegate.cc b/audio/channel_receive_frame_transformer_delegate.cc index 2d2893b8f7..42f40b764a 100644 --- a/audio/channel_receive_frame_transformer_delegate.cc +++ b/audio/channel_receive_frame_transformer_delegate.cc @@ -10,26 +10,44 @@ #include "audio/channel_receive_frame_transformer_delegate.h" +#include +#include +#include +#include #include +#include "api/array_view.h" +#include "api/frame_transformer_interface.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "rtc_base/buffer.h" +#include "rtc_base/string_encode.h" +#include "system_wrappers/include/ntp_time.h" namespace webrtc { -namespace { class TransformableIncomingAudioFrame : public TransformableAudioFrameInterface { public: - TransformableIncomingAudioFrame(rtc::ArrayView payload, + TransformableIncomingAudioFrame(ArrayView payload, const RTPHeader& header, - uint32_t ssrc) - : payload_(payload.data(), payload.size()), + uint32_t ssrc, + const std::string& codec_mime_type, + Timestamp receive_time) + : TransformableAudioFrameInterface(Passkey()), + payload_(payload.data(), payload.size()), header_(header), - ssrc_(ssrc) {} + ssrc_(ssrc), + codec_mime_type_(codec_mime_type), + receive_time_(receive_time) {} ~TransformableIncomingAudioFrame() override = default; - rtc::ArrayView GetData() const override { return payload_; } + ArrayView GetData() const override { return payload_; } - void SetData(rtc::ArrayView data) override { + void SetData(ArrayView data) override { payload_.SetData(data.data(), data.size()); } @@ -40,37 +58,76 @@ class TransformableIncomingAudioFrame uint8_t GetPayloadType() const override { return header_.payloadType; } uint32_t GetSsrc() const override { return ssrc_; } uint32_t GetTimestamp() const override { return header_.timestamp; } - rtc::ArrayView GetContributingSources() const override { - return rtc::ArrayView(header_.arrOfCSRCs, header_.numCSRCs); + ArrayView GetContributingSources() const override { + return ArrayView(header_.arrOfCSRCs, header_.numCSRCs); } Direction GetDirection() const override { return Direction::kReceiver; } - const absl::optional SequenceNumber() const override { + std::string GetMimeType() const override { return codec_mime_type_; } + const std::optional SequenceNumber() const override { return header_.sequenceNumber; } - absl::optional AbsoluteCaptureTimestamp() const override { + std::optional AbsoluteCaptureTimestamp() const override { // This could be extracted from received header extensions + extrapolation, // if required in future, eg for being able to re-send received frames. - return absl::nullopt; + return std::nullopt; } const RTPHeader& Header() const { return header_; } FrameType Type() const override { - return header_.extension.voiceActivity ? FrameType::kAudioFrameSpeech - : FrameType::kAudioFrameCN; + if (!header_.extension.audio_level()) { + // Audio level extension not set. + return FrameType::kAudioFrameCN; + } + return header_.extension.audio_level()->voice_activity() + ? FrameType::kAudioFrameSpeech + : FrameType::kAudioFrameCN; + } + + std::optional AudioLevel() const override { + if (header_.extension.audio_level()) { + return header_.extension.audio_level()->level(); + } + return std::nullopt; + } + + std::optional ReceiveTime() const override { + return receive_time_ == Timestamp::MinusInfinity() + ? std::nullopt + : std::optional(receive_time_); + } + + std::optional CaptureTime() const override { + if (header_.extension.absolute_capture_time) { + return Timestamp::Micros(UQ32x32ToInt64Us( + header_.extension.absolute_capture_time->absolute_capture_timestamp)); + } + return std::nullopt; + } + + std::optional SenderCaptureTimeOffset() const override { + if (header_.extension.absolute_capture_time && + header_.extension.absolute_capture_time + ->estimated_capture_clock_offset) { + return TimeDelta::Micros( + Q32x32ToInt64Us(*header_.extension.absolute_capture_time + ->estimated_capture_clock_offset)); + } + return std::nullopt; } private: - rtc::Buffer payload_; + Buffer payload_; RTPHeader header_; uint32_t ssrc_; + std::string codec_mime_type_; + Timestamp receive_time_; }; -} // namespace ChannelReceiveFrameTransformerDelegate::ChannelReceiveFrameTransformerDelegate( ReceiveFrameCallback receive_frame_callback, - rtc::scoped_refptr frame_transformer, + scoped_refptr frame_transformer, TaskQueueBase* channel_receive_thread) : receive_frame_callback_(receive_frame_callback), frame_transformer_(std::move(frame_transformer)), @@ -79,7 +136,7 @@ ChannelReceiveFrameTransformerDelegate::ChannelReceiveFrameTransformerDelegate( void ChannelReceiveFrameTransformerDelegate::Init() { RTC_DCHECK_RUN_ON(&sequence_checker_); frame_transformer_->RegisterTransformedFrameCallback( - rtc::scoped_refptr(this)); + scoped_refptr(this)); } void ChannelReceiveFrameTransformerDelegate::Reset() { @@ -90,34 +147,51 @@ void ChannelReceiveFrameTransformerDelegate::Reset() { } void ChannelReceiveFrameTransformerDelegate::Transform( - rtc::ArrayView packet, + ArrayView packet, const RTPHeader& header, - uint32_t ssrc) { + uint32_t ssrc, + const std::string& codec_mime_type, + Timestamp receive_time) { RTC_DCHECK_RUN_ON(&sequence_checker_); - frame_transformer_->Transform( - std::make_unique(packet, header, ssrc)); + if (short_circuit_) { + receive_frame_callback_(packet, header, receive_time); + } else { + frame_transformer_->Transform( + std::make_unique( + packet, header, ssrc, codec_mime_type, receive_time)); + } } void ChannelReceiveFrameTransformerDelegate::OnTransformedFrame( std::unique_ptr frame) { - rtc::scoped_refptr delegate(this); + scoped_refptr delegate(this); channel_receive_thread_->PostTask( [delegate = std::move(delegate), frame = std::move(frame)]() mutable { delegate->ReceiveFrame(std::move(frame)); }); } +void ChannelReceiveFrameTransformerDelegate::StartShortCircuiting() { + scoped_refptr delegate(this); + channel_receive_thread_->PostTask([delegate = std::move(delegate)]() mutable { + RTC_DCHECK_RUN_ON(&delegate->sequence_checker_); + delegate->short_circuit_ = true; + }); +} + void ChannelReceiveFrameTransformerDelegate::ReceiveFrame( std::unique_ptr frame) const { RTC_DCHECK_RUN_ON(&sequence_checker_); if (!receive_frame_callback_) return; + auto* transformed_frame = + static_cast(frame.get()); + Timestamp receive_time = + transformed_frame->ReceiveTime().value_or(Timestamp::MinusInfinity()); RTPHeader header; if (frame->GetDirection() == TransformableFrameInterface::Direction::kSender) { - auto* transformed_frame = - static_cast(frame.get()); header.payloadType = transformed_frame->GetPayloadType(); header.timestamp = transformed_frame->GetTimestamp(); header.ssrc = transformed_frame->GetSsrc(); @@ -127,15 +201,34 @@ void ChannelReceiveFrameTransformerDelegate::ReceiveFrame( transformed_frame->AbsoluteCaptureTimestamp().value(); } } else { - auto* transformed_frame = + auto* transformed_incoming_frame = static_cast(frame.get()); - header = transformed_frame->Header(); + header = transformed_incoming_frame->Header(); } // TODO(crbug.com/1464860): Take an explicit struct with the required // information rather than the RTPHeader to make it easier to // construct the required information when injecting transformed frames not // originally from this receiver. - receive_frame_callback_(frame->GetData(), header); + receive_frame_callback_(frame->GetData(), header, receive_time); +} + +scoped_refptr +ChannelReceiveFrameTransformerDelegate::FrameTransformer() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return frame_transformer_; +} + +std::unique_ptr CloneReceiverAudioFrame( + TransformableAudioFrameInterface* original) { + RTC_CHECK(original->GetDirection() == + TransformableFrameInterface::Direction::kReceiver); + + auto* original_incoming_frame = + static_cast(original); + return std::make_unique( + original->GetData(), original_incoming_frame->Header(), + original->GetSsrc(), original->GetMimeType(), + original->ReceiveTime().value_or(Timestamp::MinusInfinity())); } } // namespace webrtc diff --git a/audio/channel_receive_frame_transformer_delegate.h b/audio/channel_receive_frame_transformer_delegate.h index 04ad7c4695..b856337442 100644 --- a/audio/channel_receive_frame_transformer_delegate.h +++ b/audio/channel_receive_frame_transformer_delegate.h @@ -12,11 +12,14 @@ #define AUDIO_CHANNEL_RECEIVE_FRAME_TRANSFORMER_DELEGATE_H_ #include +#include #include "api/frame_transformer_interface.h" +#include "api/rtp_headers.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/units/timestamp.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_queue.h" #include "rtc_base/thread.h" namespace webrtc { @@ -27,11 +30,12 @@ namespace webrtc { class ChannelReceiveFrameTransformerDelegate : public TransformedFrameCallback { public: using ReceiveFrameCallback = - std::function packet, - const RTPHeader& header)>; + std::function packet, + const RTPHeader& header, + Timestamp receive_time)>; ChannelReceiveFrameTransformerDelegate( ReceiveFrameCallback receive_frame_callback, - rtc::scoped_refptr frame_transformer, + scoped_refptr frame_transformer, TaskQueueBase* channel_receive_thread); // Registers `this` as callback for `frame_transformer_`, to get the @@ -46,18 +50,24 @@ class ChannelReceiveFrameTransformerDelegate : public TransformedFrameCallback { // Delegates the call to FrameTransformerInterface::Transform, to transform // the frame asynchronously. - void Transform(rtc::ArrayView packet, + void Transform(ArrayView packet, const RTPHeader& header, - uint32_t ssrc); + uint32_t ssrc, + const std::string& codec_mime_type, + Timestamp receive_time); // Implements TransformedFrameCallback. Can be called on any thread. void OnTransformedFrame( std::unique_ptr frame) override; + void StartShortCircuiting() override; + // Delegates the call to ChannelReceive::OnReceivedPayloadData on the // `channel_receive_thread_`, by calling `receive_frame_callback_`. void ReceiveFrame(std::unique_ptr frame) const; + scoped_refptr FrameTransformer(); + protected: ~ChannelReceiveFrameTransformerDelegate() override = default; @@ -65,10 +75,14 @@ class ChannelReceiveFrameTransformerDelegate : public TransformedFrameCallback { RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; ReceiveFrameCallback receive_frame_callback_ RTC_GUARDED_BY(sequence_checker_); - rtc::scoped_refptr frame_transformer_ + scoped_refptr frame_transformer_ RTC_GUARDED_BY(sequence_checker_); TaskQueueBase* const channel_receive_thread_; + bool short_circuit_ RTC_GUARDED_BY(sequence_checker_) = false; }; +std::unique_ptr CloneReceiverAudioFrame( + TransformableAudioFrameInterface* original); + } // namespace webrtc #endif // AUDIO_CHANNEL_RECEIVE_FRAME_TRANSFORMER_DELEGATE_H_ diff --git a/audio/channel_receive_frame_transformer_delegate_unittest.cc b/audio/channel_receive_frame_transformer_delegate_unittest.cc index 38ceb6d96d..95bfd50b13 100644 --- a/audio/channel_receive_frame_transformer_delegate_unittest.cc +++ b/audio/channel_receive_frame_transformer_delegate_unittest.cc @@ -10,14 +10,23 @@ #include "audio/channel_receive_frame_transformer_delegate.h" +#include #include #include -#include "audio/channel_send_frame_transformer_delegate.h" +#include "api/array_view.h" +#include "api/frame_transformer_factory.h" +#include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" +#include "api/test/mock_frame_transformer.h" +#include "api/test/mock_transformable_audio_frame.h" +#include "api/units/timestamp.h" +#include "rtc_base/thread.h" +#include "system_wrappers/include/ntp_time.h" #include "test/gmock.h" #include "test/gtest.h" -#include "test/mock_frame_transformer.h" -#include "test/mock_transformable_frame.h" namespace webrtc { namespace { @@ -27,25 +36,31 @@ using ::testing::ElementsAre; using ::testing::NiceMock; using ::testing::SaveArg; +constexpr Timestamp kFakeReceiveTimestamp = Timestamp::Millis(1234567); + class MockChannelReceive { public: MOCK_METHOD(void, ReceiveFrame, - (rtc::ArrayView packet, const RTPHeader& header)); + (webrtc::ArrayView packet, + const RTPHeader& header, + Timestamp receive_time)); ChannelReceiveFrameTransformerDelegate::ReceiveFrameCallback callback() { - return [this](rtc::ArrayView packet, - const RTPHeader& header) { ReceiveFrame(packet, header); }; + return [this](ArrayView packet, const RTPHeader& header, + Timestamp receive_time) { + ReceiveFrame(packet, header, receive_time); + }; } }; // Test that the delegate registers itself with the frame transformer on Init(). TEST(ChannelReceiveFrameTransformerDelegateTest, RegisterTransformedFrameCallbackOnInit) { - rtc::scoped_refptr mock_frame_transformer = - rtc::make_ref_counted(); - rtc::scoped_refptr delegate = - rtc::make_ref_counted( + scoped_refptr mock_frame_transformer = + make_ref_counted(); + scoped_refptr delegate = + make_ref_counted( ChannelReceiveFrameTransformerDelegate::ReceiveFrameCallback(), mock_frame_transformer, nullptr); EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback); @@ -56,10 +71,10 @@ TEST(ChannelReceiveFrameTransformerDelegateTest, // Reset(). TEST(ChannelReceiveFrameTransformerDelegateTest, UnregisterTransformedFrameCallbackOnReset) { - rtc::scoped_refptr mock_frame_transformer = - rtc::make_ref_counted(); - rtc::scoped_refptr delegate = - rtc::make_ref_counted( + scoped_refptr mock_frame_transformer = + make_ref_counted(); + scoped_refptr delegate = + make_ref_counted( ChannelReceiveFrameTransformerDelegate::ReceiveFrameCallback(), mock_frame_transformer, nullptr); EXPECT_CALL(*mock_frame_transformer, UnregisterTransformedFrameCallback); @@ -70,22 +85,21 @@ TEST(ChannelReceiveFrameTransformerDelegateTest, // transformer, it passes it to the channel using the ReceiveFrameCallback. TEST(ChannelReceiveFrameTransformerDelegateTest, TransformRunsChannelReceiveCallback) { - rtc::AutoThread main_thread; - rtc::scoped_refptr mock_frame_transformer = - rtc::make_ref_counted>(); + AutoThread main_thread; + scoped_refptr mock_frame_transformer = + make_ref_counted>(); MockChannelReceive mock_channel; - rtc::scoped_refptr delegate = - rtc::make_ref_counted( - mock_channel.callback(), mock_frame_transformer, - rtc::Thread::Current()); - rtc::scoped_refptr callback; + scoped_refptr delegate = + make_ref_counted( + mock_channel.callback(), mock_frame_transformer, Thread::Current()); + scoped_refptr callback; EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback) .WillOnce(SaveArg<0>(&callback)); delegate->Init(); ASSERT_TRUE(callback); const uint8_t data[] = {1, 2, 3, 4}; - rtc::ArrayView packet(data, sizeof(data)); + ArrayView packet(data, sizeof(data)); RTPHeader header; EXPECT_CALL(mock_channel, ReceiveFrame); ON_CALL(*mock_frame_transformer, Transform) @@ -93,41 +107,43 @@ TEST(ChannelReceiveFrameTransformerDelegateTest, [&callback](std::unique_ptr frame) { callback->OnTransformedFrame(std::move(frame)); }); - delegate->Transform(packet, header, 1111 /*ssrc*/); - rtc::ThreadManager::ProcessAllMessageQueuesForTesting(); + delegate->Transform(packet, header, /*ssrc=*/1111, /*mimeType=*/"audio/opus", + kFakeReceiveTimestamp); + ThreadManager::ProcessAllMessageQueuesForTesting(); } // Test that when the delegate receives a Outgoing frame from the frame // transformer, it passes it to the channel using the ReceiveFrameCallback. TEST(ChannelReceiveFrameTransformerDelegateTest, TransformRunsChannelReceiveCallbackForSenderFrame) { - rtc::AutoThread main_thread; - rtc::scoped_refptr mock_frame_transformer = - rtc::make_ref_counted>(); + AutoThread main_thread; + scoped_refptr mock_frame_transformer = + make_ref_counted>(); MockChannelReceive mock_channel; - rtc::scoped_refptr delegate = - rtc::make_ref_counted( - mock_channel.callback(), mock_frame_transformer, - rtc::Thread::Current()); - rtc::scoped_refptr callback; + scoped_refptr delegate = + make_ref_counted( + mock_channel.callback(), mock_frame_transformer, Thread::Current()); + scoped_refptr callback; EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback) .WillOnce(SaveArg<0>(&callback)); delegate->Init(); ASSERT_TRUE(callback); const uint8_t data[] = {1, 2, 3, 4}; - rtc::ArrayView packet(data, sizeof(data)); + ArrayView packet(data, sizeof(data)); RTPHeader header; - EXPECT_CALL(mock_channel, ReceiveFrame(ElementsAre(1, 2, 3, 4), _)); + EXPECT_CALL(mock_channel, + ReceiveFrame(ElementsAre(1, 2, 3, 4), _, kFakeReceiveTimestamp)); ON_CALL(*mock_frame_transformer, Transform) - .WillByDefault([&callback]( - std::unique_ptr frame) { - auto* transformed_frame = - static_cast(frame.get()); - callback->OnTransformedFrame(CloneSenderAudioFrame(transformed_frame)); - }); - delegate->Transform(packet, header, 1111 /*ssrc*/); - rtc::ThreadManager::ProcessAllMessageQueuesForTesting(); + .WillByDefault( + [&callback](std::unique_ptr frame) { + auto* transformed_frame = + static_cast(frame.get()); + callback->OnTransformedFrame(CloneAudioFrame(transformed_frame)); + }); + delegate->Transform(packet, header, /*ssrc=*/1111, /*mimeType=*/"audio/opus", + kFakeReceiveTimestamp); + ThreadManager::ProcessAllMessageQueuesForTesting(); } // Test that if the delegate receives a transformed frame after it has been @@ -135,19 +151,161 @@ TEST(ChannelReceiveFrameTransformerDelegateTest, // after resetting the delegate. TEST(ChannelReceiveFrameTransformerDelegateTest, OnTransformedDoesNotRunChannelReceiveCallbackAfterReset) { - rtc::AutoThread main_thread; - rtc::scoped_refptr mock_frame_transformer = - rtc::make_ref_counted>(); + AutoThread main_thread; + scoped_refptr mock_frame_transformer = + make_ref_counted>(); MockChannelReceive mock_channel; - rtc::scoped_refptr delegate = - rtc::make_ref_counted( - mock_channel.callback(), mock_frame_transformer, - rtc::Thread::Current()); + scoped_refptr delegate = + make_ref_counted( + mock_channel.callback(), mock_frame_transformer, Thread::Current()); delegate->Reset(); EXPECT_CALL(mock_channel, ReceiveFrame).Times(0); delegate->OnTransformedFrame(std::make_unique()); - rtc::ThreadManager::ProcessAllMessageQueuesForTesting(); + ThreadManager::ProcessAllMessageQueuesForTesting(); +} + +TEST(ChannelReceiveFrameTransformerDelegateTest, + ShortCircuitingSkipsTransform) { + AutoThread main_thread; + scoped_refptr mock_frame_transformer = + make_ref_counted>(); + MockChannelReceive mock_channel; + scoped_refptr delegate = + make_ref_counted( + mock_channel.callback(), mock_frame_transformer, Thread::Current()); + const uint8_t data[] = {1, 2, 3, 4}; + ArrayView packet(data, sizeof(data)); + RTPHeader header; + + delegate->StartShortCircuiting(); + ThreadManager::ProcessAllMessageQueuesForTesting(); + + // Will not call the actual transformer. + EXPECT_CALL(*mock_frame_transformer, Transform).Times(0); + // Will pass the frame straight to the channel. + EXPECT_CALL(mock_channel, ReceiveFrame); + delegate->Transform(packet, header, /*ssrc=*/1111, /*mimeType=*/"audio/opus", + kFakeReceiveTimestamp); +} + +TEST(ChannelReceiveFrameTransformerDelegateTest, + AudioLevelAndCaptureTimeAbsentWithoutExtension) { + AutoThread main_thread; + scoped_refptr mock_frame_transformer = + make_ref_counted>(); + scoped_refptr delegate = + make_ref_counted( + /*receive_frame_callback=*/nullptr, mock_frame_transformer, + Thread::Current()); + scoped_refptr callback; + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback) + .WillOnce(SaveArg<0>(&callback)); + delegate->Init(); + ASSERT_TRUE(callback); + + const uint8_t data[] = {1, 2, 3, 4}; + ArrayView packet(data, sizeof(data)); + RTPHeader header; + std::unique_ptr frame; + ON_CALL(*mock_frame_transformer, Transform) + .WillByDefault( + [&](std::unique_ptr transform_frame) { + frame = std::move(transform_frame); + }); + delegate->Transform(packet, header, /*ssrc=*/1111, /*mimeType=*/"audio/opus", + kFakeReceiveTimestamp); + + EXPECT_TRUE(frame); + auto* audio_frame = + static_cast(frame.get()); + EXPECT_FALSE(audio_frame->AudioLevel()); + EXPECT_FALSE(audio_frame->CaptureTime()); + EXPECT_FALSE(audio_frame->SenderCaptureTimeOffset()); + EXPECT_EQ(audio_frame->Type(), + TransformableAudioFrameInterface::FrameType::kAudioFrameCN); +} + +TEST(ChannelReceiveFrameTransformerDelegateTest, + AudioLevelPresentWithExtension) { + AutoThread main_thread; + scoped_refptr mock_frame_transformer = + make_ref_counted>(); + scoped_refptr delegate = + make_ref_counted( + /*receive_frame_callback=*/nullptr, mock_frame_transformer, + Thread::Current()); + scoped_refptr callback; + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback) + .WillOnce(SaveArg<0>(&callback)); + delegate->Init(); + ASSERT_TRUE(callback); + + const uint8_t data[] = {1, 2, 3, 4}; + ArrayView packet(data, sizeof(data)); + RTPHeader header; + uint8_t audio_level_dbov = 67; + AudioLevel audio_level(/*voice_activity=*/true, audio_level_dbov); + header.extension.set_audio_level(audio_level); + std::unique_ptr frame; + ON_CALL(*mock_frame_transformer, Transform) + .WillByDefault( + [&](std::unique_ptr transform_frame) { + frame = std::move(transform_frame); + }); + delegate->Transform(packet, header, /*ssrc=*/1111, /*mimeType=*/"audio/opus", + kFakeReceiveTimestamp); + + EXPECT_TRUE(frame); + auto* audio_frame = + static_cast(frame.get()); + EXPECT_EQ(*audio_frame->AudioLevel(), audio_level_dbov); + EXPECT_EQ(audio_frame->Type(), + TransformableAudioFrameInterface::FrameType::kAudioFrameSpeech); +} + +TEST(ChannelReceiveFrameTransformerDelegateTest, + CaptureTimePresentWithExtension) { + AutoThread main_thread; + scoped_refptr mock_frame_transformer = + make_ref_counted>(); + scoped_refptr delegate = + make_ref_counted( + /*receive_frame_callback=*/nullptr, mock_frame_transformer, + Thread::Current()); + scoped_refptr callback; + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback) + .WillOnce(SaveArg<0>(&callback)); + delegate->Init(); + ASSERT_TRUE(callback); + + const uint8_t data[] = {1, 2, 3, 4}; + ArrayView packet(data, sizeof(data)); + Timestamp capture_time = Timestamp::Millis(1234); + TimeDelta sender_capture_time_offsets[] = {TimeDelta::Millis(56), + TimeDelta::Millis(-79)}; + for (auto offset : sender_capture_time_offsets) { + AbsoluteCaptureTime absolute_capture_time = { + .absolute_capture_timestamp = Int64MsToUQ32x32(capture_time.ms()), + .estimated_capture_clock_offset = Int64MsToQ32x32(offset.ms())}; + RTPHeader header; + header.extension.absolute_capture_time = absolute_capture_time; + + std::unique_ptr frame; + ON_CALL(*mock_frame_transformer, Transform) + .WillByDefault( + [&](std::unique_ptr transform_frame) { + frame = std::move(transform_frame); + }); + delegate->Transform(packet, header, /*ssrc=*/1111, + /*mimeType=*/"audio/opus", kFakeReceiveTimestamp); + + EXPECT_TRUE(frame); + auto* audio_frame = + static_cast(frame.get()); + EXPECT_EQ(*audio_frame->CaptureTime(), capture_time); + EXPECT_EQ(*audio_frame->SenderCaptureTimeOffset(), offset); + } } } // namespace diff --git a/audio/channel_receive_unittest.cc b/audio/channel_receive_unittest.cc index 4b7b7c0231..7cedba677a 100644 --- a/audio/channel_receive_unittest.cc +++ b/audio/channel_receive_unittest.cc @@ -11,19 +11,19 @@ #include "audio/channel_receive.h" #include "absl/strings/escaping.h" +#include "api/audio/audio_device.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/crypto/frame_decryptor_interface.h" -#include "api/task_queue/default_task_queue_factory.h" -#include "logging/rtc_event_log/mock/mock_rtc_event_log.h" -#include "modules/audio_device/include/audio_device.h" +#include "api/environment/environment_factory.h" +#include "api/test/mock_frame_transformer.h" #include "modules/audio_device/include/mock_audio_device.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" #include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "modules/rtp_rtcp/source/rtcp_packet/sdes.h" #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "modules/rtp_rtcp/source/time_util.h" #include "rtc_base/logging.h" #include "rtc_base/thread.h" #include "test/gmock.h" @@ -60,14 +60,14 @@ class ChannelReceiveTest : public Test { std::unique_ptr CreateTestChannelReceive() { CryptoOptions crypto_options; auto channel = CreateChannelReceive( - time_controller_.GetClock(), + CreateEnvironment(time_controller_.GetClock()), /* neteq_factory= */ nullptr, audio_device_module_.get(), &transport_, - &event_log_, kLocalSsrc, kRemoteSsrc, + kLocalSsrc, kRemoteSsrc, /* jitter_buffer_max_packets= */ 0, /* jitter_buffer_fast_playout= */ false, /* jitter_buffer_min_delay_ms= */ 0, /* enable_non_sender_rtt= */ false, audio_decoder_factory_, - /* codec_pair_id= */ absl::nullopt, + /* codec_pair_id= */ std::nullopt, /* frame_decryptor_interface= */ nullptr, crypto_options, /* frame_transformer= */ nullptr); channel->SetReceiveCodecs( @@ -79,7 +79,7 @@ class ChannelReceiveTest : public Test { uint32_t RtpNow() { // Note - the "random" offset of this timestamp is zero. - return rtc::TimeMillis() * 1000 / kSampleRateHz; + return TimeMillis() * 1000 / kSampleRateHz; } RtpPacketReceived CreateRtpPacket() { @@ -128,14 +128,14 @@ class ChannelReceiveTest : public Test { return packet; } - void HandleGeneratedRtcp(ChannelReceiveInterface& channel, - rtc::ArrayView packet) { + void HandleGeneratedRtcp(ChannelReceiveInterface& /* channel */, + ArrayView packet) { if (packet[1] == rtcp::ReceiverReport::kPacketType) { // Ignore RR, it requires no response } else { RTC_LOG(LS_ERROR) << "Unexpected RTCP packet generated"; RTC_LOG(LS_ERROR) << "Packet content " - << rtc::hex_encode_with_delimiter( + << hex_encode_with_delimiter( absl::string_view( reinterpret_cast(packet.data()[0]), packet.size()), @@ -151,15 +151,14 @@ class ChannelReceiveTest : public Test { channel.OnRtpPacket(CreateRtpPacket()); channel.GetAudioFrameWithInfo(kSampleRateHz, &audio_frame); CallReceiveStatistics stats = channel.GetRTCPStatistics(); - return stats.capture_start_ntp_time_ms_; + return stats.capture_start_ntp_time_ms; } protected: GlobalSimulatedTimeController time_controller_; - rtc::scoped_refptr audio_device_module_; - rtc::scoped_refptr audio_decoder_factory_; + scoped_refptr audio_device_module_; + scoped_refptr audio_decoder_factory_; MockTransport transport_; - NiceMock event_log_; }; TEST_F(ChannelReceiveTest, CreateAndDestroy) { @@ -172,7 +171,7 @@ TEST_F(ChannelReceiveTest, ReceiveReportGeneratedOnTime) { bool receiver_report_sent = false; EXPECT_CALL(transport_, SendRtcp) - .WillRepeatedly([&](rtc::ArrayView packet) { + .WillRepeatedly([&](ArrayView packet) { if (packet.size() >= 2 && packet[1] == rtcp::ReceiverReport::kPacketType) { receiver_report_sent = true; @@ -190,7 +189,7 @@ TEST_F(ChannelReceiveTest, CaptureStartTimeBecomesValid) { auto channel = CreateTestChannelReceive(); EXPECT_CALL(transport_, SendRtcp) - .WillRepeatedly([&](rtc::ArrayView packet) { + .WillRepeatedly([&](ArrayView packet) { HandleGeneratedRtcp(*channel, packet); return true; }); @@ -226,6 +225,41 @@ TEST_F(ChannelReceiveTest, CaptureStartTimeBecomesValid) { EXPECT_NE(ProbeCaptureStartNtpTime(*channel), -1); } +TEST_F(ChannelReceiveTest, SettingFrameTransformer) { + auto channel = CreateTestChannelReceive(); + + scoped_refptr mock_frame_transformer = + make_ref_counted(); + + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback); + channel->SetDepacketizerToDecoderFrameTransformer(mock_frame_transformer); + + // Must start playout, otherwise packet is discarded. + channel->StartPlayout(); + + RtpPacketReceived packet = CreateRtpPacket(); + + // Receive one RTP packet, this should be transformed. + EXPECT_CALL(*mock_frame_transformer, Transform); + channel->OnRtpPacket(packet); +} + +TEST_F(ChannelReceiveTest, SettingFrameTransformerMultipleTimes) { + auto channel = CreateTestChannelReceive(); + + scoped_refptr mock_frame_transformer = + make_ref_counted(); + + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback); + channel->SetDepacketizerToDecoderFrameTransformer(mock_frame_transformer); + + // Set the same transformer again, shouldn't cause any additional callback + // registration calls. + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback) + .Times(0); + channel->SetDepacketizerToDecoderFrameTransformer(mock_frame_transformer); +} + } // namespace } // namespace voe } // namespace webrtc diff --git a/audio/channel_send.cc b/audio/channel_send.cc index e3058fca0d..52817f31ff 100644 --- a/audio/channel_send.cc +++ b/audio/channel_send.cc @@ -11,37 +11,63 @@ #include "audio/channel_send.h" #include -#include +#include +#include +#include #include +#include #include #include #include +#include "absl/functional/any_invocable.h" +#include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/call/bitrate_allocation.h" #include "api/call/transport.h" +#include "api/crypto/crypto_options.h" #include "api/crypto/frame_encryptor_interface.h" -#include "api/rtc_event_log/rtc_event_log.h" +#include "api/environment/environment.h" +#include "api/frame_transformer_interface.h" +#include "api/function_view.h" +#include "api/make_ref_counted.h" +#include "api/media_types.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "audio/channel_send_frame_transformer_delegate.h" #include "audio/utility/audio_frame_operations.h" #include "call/rtp_transport_controller_send_interface.h" -#include "logging/rtc_event_log/events/rtc_event_audio_playout.h" -#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" #include "modules/audio_coding/include/audio_coding_module.h" +#include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/audio_processing/rms_level.h" #include "modules/pacing/packet_router.h" +#include "modules/rtp_rtcp/include/report_block_data.h" +#include "modules/rtp_rtcp/include/rtcp_statistics.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "modules/rtp_rtcp/source/rtp_sender_audio.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/race_checker.h" #include "rtc_base/rate_limiter.h" +#include "rtc_base/strings/string_builder.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/time_utils.h" +#include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/clock.h" #include "system_wrappers/include/metrics.h" namespace webrtc { @@ -49,40 +75,79 @@ namespace voe { namespace { -constexpr int64_t kMaxRetransmissionWindowMs = 1000; -constexpr int64_t kMinRetransmissionWindowMs = 30; +constexpr TimeDelta kMaxRetransmissionWindow = TimeDelta::Seconds(1); +constexpr TimeDelta kMinRetransmissionWindow = TimeDelta::Millis(30); class RtpPacketSenderProxy; class TransportSequenceNumberProxy; +class AudioBitrateAccountant { + public: + void RegisterPacketOverhead(int packet_byte_overhead) { + packet_overhead_ = DataSize::Bytes(packet_byte_overhead); + } + + void Reset() { + rate_last_frame_ = DataRate::BitsPerSec(0); + next_frame_duration_ = TimeDelta::Millis(0); + report_rate_ = std::nullopt; + } + + // A new frame is formed when bytesize is nonzero. + void UpdateBpsEstimate(DataSize payload_size, TimeDelta frame_duration) { + next_frame_duration_ += frame_duration; + // Do not have a full frame yet. + if (payload_size.bytes() == 0) + return; + + // We report the larger of the rates computed using the last frame, and + // second last frame. Under DTX, frame sizes sometimes alternate, it is + // preferable to report the upper envelop. + DataRate rate_cur_frame = + (payload_size + packet_overhead_) / next_frame_duration_; + + report_rate_ = + (rate_cur_frame > rate_last_frame_) ? rate_cur_frame : rate_last_frame_; + + rate_last_frame_ = rate_cur_frame; + next_frame_duration_ = TimeDelta::Millis(0); + } + + std::optional GetUsedRate() const { return report_rate_; } + + private: + TimeDelta next_frame_duration_ = TimeDelta::Millis(0); + DataSize packet_overhead_ = DataSize::Bytes(72); + DataRate rate_last_frame_ = DataRate::BitsPerSec(0); + std::optional report_rate_; +}; + class ChannelSend : public ChannelSendInterface, public AudioPacketizationCallback, // receive encoded // packets from the ACM public RtcpPacketTypeCounterObserver, public ReportBlockDataObserver { public: - ChannelSend(Clock* clock, - TaskQueueFactory* task_queue_factory, + ChannelSend(const Environment& env, Transport* rtp_transport, RtcpRttStats* rtcp_rtt_stats, - RtcEventLog* rtc_event_log, FrameEncryptorInterface* frame_encryptor, const webrtc::CryptoOptions& crypto_options, bool extmap_allow_mixed, int rtcp_report_interval_ms, uint32_t ssrc, - rtc::scoped_refptr frame_transformer, - RtpTransportControllerSendInterface* transport_controller, - const FieldTrialsView& field_trials); + scoped_refptr frame_transformer, + RtpTransportControllerSendInterface* transport_controller); ~ChannelSend() override; // Send using this encoder, with this payload type. void SetEncoder(int payload_type, + const SdpAudioFormat& encoder_format, std::unique_ptr encoder) override; - void ModifyEncoder(rtc::FunctionView*)> - modifier) override; - void CallEncoder(rtc::FunctionView modifier) override; + void ModifyEncoder( + FunctionView*)> modifier) override; + void CallEncoder(FunctionView modifier) override; // API methods void StartSend() override; @@ -132,16 +197,14 @@ class ChannelSend : public ChannelSendInterface, // packet. void ProcessAndEncodeAudio(std::unique_ptr audio_frame) override; - int64_t GetRTT() const override; - // E2EE Custom Audio Frame Encryption void SetFrameEncryptor( - rtc::scoped_refptr frame_encryptor) override; + scoped_refptr frame_encryptor) override; // Sets a frame transformer between encoder and packetizer, to transform // encoded frames before sending them out the network. void SetEncoderToPacketizerFrameTransformer( - rtc::scoped_refptr frame_transformer) + scoped_refptr frame_transformer) override; // RtcpPacketTypeCounterObserver. @@ -152,6 +215,17 @@ class ChannelSend : public ChannelSendInterface, // ReportBlockDataObserver. void OnReportBlockDataUpdated(ReportBlockData report_block) override; + // Reports actual bitrate used (vs allocated). + std::optional GetUsedRate() const override { + MutexLock lock(&bitrate_accountant_mutex_); + return bitrate_accountant_.GetUsedRate(); + } + + void RegisterPacketOverhead(int packet_byte_overhead) override { + MutexLock lock(&bitrate_accountant_mutex_); + bitrate_accountant_.RegisterPacketOverhead(packet_byte_overhead); + } + private: // From AudioPacketizationCallback in the ACM int32_t SendData(AudioFrameType frameType, @@ -166,33 +240,38 @@ class ChannelSend : public ChannelSendInterface, int32_t SendRtpAudio(AudioFrameType frameType, uint8_t payloadType, uint32_t rtp_timestamp_without_offset, - rtc::ArrayView payload, - int64_t absolute_capture_timestamp_ms) - RTC_RUN_ON(encoder_queue_); + ArrayView payload, + int64_t absolute_capture_timestamp_ms, + ArrayView csrcs, + std::optional audio_level_dbov) + RTC_RUN_ON(encoder_queue_checker_); void OnReceivedRtt(int64_t rtt_ms); void InitFrameTransformerDelegate( - rtc::scoped_refptr frame_transformer); + scoped_refptr frame_transformer); + + // Calls the encoder on the encoder queue (instead of blocking). + void CallEncoderAsync(absl::AnyInvocable modifier); + + const Environment env_; // Thread checkers document and lock usage of some methods on voe::Channel to // specific threads we know about. The goal is to eventually split up // voe::Channel into parts with single-threaded semantics, and thereby reduce // the need for locks. - SequenceChecker worker_thread_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_; // Methods accessed from audio and video threads are checked for sequential- // only access. We don't necessarily own and control these threads, so thread // checkers cannot be used. E.g. Chromium may transfer "ownership" from one // audio thread to another, but access is still sequential. - rtc::RaceChecker audio_thread_race_checker_; + RaceChecker audio_thread_race_checker_; mutable Mutex volume_settings_mutex_; const uint32_t ssrc_; bool sending_ RTC_GUARDED_BY(&worker_thread_checker_) = false; - RtcEventLog* const event_log_; - std::unique_ptr rtp_rtcp_; std::unique_ptr rtp_sender_audio_; @@ -200,43 +279,48 @@ class ChannelSend : public ChannelSendInterface, // This is just an offset, RTP module will add its own random offset. uint32_t timestamp_ RTC_GUARDED_BY(audio_thread_race_checker_) = 0; - absl::optional last_capture_timestamp_ms_ + std::optional last_capture_timestamp_ms_ RTC_GUARDED_BY(audio_thread_race_checker_); - RmsLevel rms_level_ RTC_GUARDED_BY(encoder_queue_); + RmsLevel rms_level_ RTC_GUARDED_BY(encoder_queue_checker_); bool input_mute_ RTC_GUARDED_BY(volume_settings_mutex_) = false; - bool previous_frame_muted_ RTC_GUARDED_BY(encoder_queue_) = false; + bool previous_frame_muted_ RTC_GUARDED_BY(encoder_queue_checker_) = false; PacketRouter* packet_router_ RTC_GUARDED_BY(&worker_thread_checker_) = nullptr; const std::unique_ptr rtp_packet_pacer_proxy_; const std::unique_ptr retransmission_rate_limiter_; - SequenceChecker construction_thread_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker construction_thread_; std::atomic include_audio_level_indication_ = false; std::atomic encoder_queue_is_active_ = false; std::atomic first_frame_ = true; // E2EE Audio Frame Encryption - rtc::scoped_refptr frame_encryptor_ - RTC_GUARDED_BY(encoder_queue_); + scoped_refptr frame_encryptor_ + RTC_GUARDED_BY(encoder_queue_checker_); // E2EE Frame Encryption Options const webrtc::CryptoOptions crypto_options_; // Delegates calls to a frame transformer to transform audio, and // receives callbacks with the transformed frames; delegates calls to // ChannelSend::SendRtpAudio to send the transformed audio. - rtc::scoped_refptr - frame_transformer_delegate_ RTC_GUARDED_BY(encoder_queue_); + scoped_refptr frame_transformer_delegate_ + RTC_GUARDED_BY(encoder_queue_checker_); mutable Mutex rtcp_counter_mutex_; RtcpPacketTypeCounter rtcp_packet_type_counter_ RTC_GUARDED_BY(rtcp_counter_mutex_); - // Defined last to ensure that there are no running tasks when the other - // members are destroyed. - rtc::TaskQueue encoder_queue_; + std::unique_ptr encoder_queue_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker encoder_queue_checker_; + + SdpAudioFormat encoder_format_; + + mutable Mutex bitrate_accountant_mutex_; + AudioBitrateAccountant bitrate_accountant_ + RTC_GUARDED_BY(bitrate_accountant_mutex_); }; const int kTelephoneEventAttenuationdB = 10; @@ -254,16 +338,32 @@ class RtpPacketSenderProxy : public RtpPacketSender { void EnqueuePackets( std::vector> packets) override { MutexLock lock(&mutex_); + + // Since we allow having an instance with no rtp_packet_pacer_ set we + // should handle calls to member functions in this state gracefully rather + // than null dereferencing. + if (!rtp_packet_pacer_) { + RTC_DLOG(LS_WARNING) + << "Dropping packets queued while rtp_packet_pacer_ is null."; + return; + } rtp_packet_pacer_->EnqueuePackets(std::move(packets)); } void RemovePacketsForSsrc(uint32_t ssrc) override { MutexLock lock(&mutex_); + + // Since we allow having an instance with no rtp_packet_pacer_ set we + // should handle calls to member functions in this state gracefully rather + // than null dereferencing. + if (!rtp_packet_pacer_) { + return; + } rtp_packet_pacer_->RemovePacketsForSsrc(ssrc); } private: - SequenceChecker thread_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker thread_checker_; Mutex mutex_; RtpPacketSender* rtp_packet_pacer_ RTC_GUARDED_BY(&mutex_); }; @@ -274,29 +374,44 @@ int32_t ChannelSend::SendData(AudioFrameType frameType, const uint8_t* payloadData, size_t payloadSize, int64_t absolute_capture_timestamp_ms) { - RTC_DCHECK_RUN_ON(&encoder_queue_); - rtc::ArrayView payload(payloadData, payloadSize); + RTC_DCHECK_RUN_ON(&encoder_queue_checker_); + ArrayView payload(payloadData, payloadSize); + + std::optional audio_level_dbov; + if (include_audio_level_indication_.load()) { + // Take the averaged audio levels from rms_level_ and reset it before + // invoking any async transformer. + audio_level_dbov = rms_level_.Average(); + } + if (frame_transformer_delegate_) { // Asynchronously transform the payload before sending it. After the payload // is transformed, the delegate will call SendRtpAudio to send it. + char buf[1024]; + SimpleStringBuilder mime_type(buf); + mime_type << webrtc::MediaTypeToString(webrtc::MediaType::AUDIO) << "/" + << encoder_format_.name; frame_transformer_delegate_->Transform( frameType, payloadType, rtp_timestamp + rtp_rtcp_->StartTimestamp(), payloadData, payloadSize, absolute_capture_timestamp_ms, - rtp_rtcp_->SSRC()); + rtp_rtcp_->SSRC(), mime_type.str(), audio_level_dbov); return 0; } return SendRtpAudio(frameType, payloadType, rtp_timestamp, payload, - absolute_capture_timestamp_ms); + absolute_capture_timestamp_ms, /*csrcs=*/{}, + audio_level_dbov); } int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType, uint8_t payloadType, uint32_t rtp_timestamp_without_offset, - rtc::ArrayView payload, - int64_t absolute_capture_timestamp_ms) { + ArrayView payload, + int64_t absolute_capture_timestamp_ms, + ArrayView csrcs, + std::optional audio_level_dbov) { // E2EE Custom Audio Frame Encryption (This is optional). // Keep this buffer around for the lifetime of the send call. - rtc::Buffer encrypted_audio_payload; + Buffer encrypted_audio_payload; // We don't invoke encryptor if payload is empty, which means we are to send // DTMF, or the encoder entered DTX. // TODO(minyue): see whether DTMF packets should be encrypted or not. In @@ -306,15 +421,15 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType, // TODO(benwright@webrtc.org) - Allocate enough to always encrypt inline. // Allocate a buffer to hold the maximum possible encrypted payload. size_t max_ciphertext_size = frame_encryptor_->GetMaxCiphertextByteSize( - cricket::MEDIA_TYPE_AUDIO, payload.size()); + webrtc::MediaType::AUDIO, payload.size()); encrypted_audio_payload.SetSize(max_ciphertext_size); // Encrypt the audio payload into the buffer. size_t bytes_written = 0; - int encrypt_status = frame_encryptor_->Encrypt( - cricket::MEDIA_TYPE_AUDIO, rtp_rtcp_->SSRC(), - /*additional_data=*/nullptr, payload, encrypted_audio_payload, - &bytes_written); + int encrypt_status = + frame_encryptor_->Encrypt(webrtc::MediaType::AUDIO, rtp_rtcp_->SSRC(), + /*additional_data=*/nullptr, payload, + encrypted_audio_payload, &bytes_written); if (encrypt_status != 0) { RTC_DLOG(LS_ERROR) << "Channel::SendData() failed encrypt audio payload: " @@ -335,10 +450,7 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType, // Push data from ACM to RTP/RTCP-module to deliver audio frame for // packetization. if (!rtp_rtcp_->OnSendingRtpFrame(rtp_timestamp_without_offset, - // Leaving the time when this frame was - // received from the capture device as - // undefined for voice for now. - -1, payloadType, + absolute_capture_timestamp_ms, payloadType, /*force_sender_report=*/false)) { return -1; } @@ -355,12 +467,13 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType, .payload = payload, .payload_id = payloadType, .rtp_timestamp = - rtp_timestamp_without_offset + rtp_rtcp_->StartTimestamp()}; + rtp_timestamp_without_offset + rtp_rtcp_->StartTimestamp(), + .csrcs = csrcs}; if (absolute_capture_timestamp_ms > 0) { frame.capture_time = Timestamp::Millis(absolute_capture_timestamp_ms); } - if (include_audio_level_indication_.load()) { - frame.audio_level_dbov = rms_level_.Average(); + if (include_audio_level_indication_.load() && audio_level_dbov) { + frame.audio_level_dbov = *audio_level_dbov; } if (!rtp_sender_audio_->SendAudio(frame)) { RTC_DLOG(LS_ERROR) @@ -372,58 +485,53 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType, } ChannelSend::ChannelSend( - Clock* clock, - TaskQueueFactory* task_queue_factory, + const Environment& env, Transport* rtp_transport, RtcpRttStats* rtcp_rtt_stats, - RtcEventLog* rtc_event_log, FrameEncryptorInterface* frame_encryptor, const webrtc::CryptoOptions& crypto_options, bool extmap_allow_mixed, int rtcp_report_interval_ms, uint32_t ssrc, - rtc::scoped_refptr frame_transformer, - RtpTransportControllerSendInterface* transport_controller, - const FieldTrialsView& field_trials) - : ssrc_(ssrc), - event_log_(rtc_event_log), + scoped_refptr frame_transformer, + RtpTransportControllerSendInterface* transport_controller) + : env_(env), + ssrc_(ssrc), rtp_packet_pacer_proxy_(new RtpPacketSenderProxy()), retransmission_rate_limiter_( - new RateLimiter(clock, kMaxRetransmissionWindowMs)), + new RateLimiter(&env_.clock(), kMaxRetransmissionWindow.ms())), frame_encryptor_(frame_encryptor), crypto_options_(crypto_options), - encoder_queue_(task_queue_factory->CreateTaskQueue( + encoder_queue_(env_.task_queue_factory().CreateTaskQueue( "AudioEncoder", - TaskQueueFactory::Priority::NORMAL)) { + TaskQueueFactory::Priority::NORMAL)), + encoder_queue_checker_(encoder_queue_.get()), + encoder_format_("x-unknown", 0, 0) { audio_coding_ = AudioCodingModule::Create(); RtpRtcpInterface::Configuration configuration; configuration.report_block_data_observer = this; configuration.network_link_rtcp_observer = transport_controller->GetRtcpObserver(); - configuration.transport_feedback_callback = - transport_controller->transport_feedback_observer(); - configuration.clock = (clock ? clock : Clock::GetRealTimeClock()); configuration.audio = true; configuration.outgoing_transport = rtp_transport; configuration.paced_sender = rtp_packet_pacer_proxy_.get(); - - configuration.event_log = event_log_; configuration.rtt_stats = rtcp_rtt_stats; - configuration.retransmission_rate_limiter = - retransmission_rate_limiter_.get(); + if (env_.field_trials().IsDisabled("WebRTC-DisableRtxRateLimiter")) { + configuration.retransmission_rate_limiter = + retransmission_rate_limiter_.get(); + } configuration.extmap_allow_mixed = extmap_allow_mixed; configuration.rtcp_report_interval_ms = rtcp_report_interval_ms; configuration.rtcp_packet_type_counter_observer = this; - configuration.local_media_ssrc = ssrc; - rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(configuration); + rtp_rtcp_ = std::make_unique(env_, configuration); rtp_rtcp_->SetSendingMediaStatus(false); - rtp_sender_audio_ = std::make_unique(configuration.clock, - rtp_rtcp_->RtpSender()); + rtp_sender_audio_ = + std::make_unique(&env_.clock(), rtp_rtcp_->RtpSender()); // Ensure that RTCP is enabled by default for the created channel. rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound); @@ -444,6 +552,10 @@ ChannelSend::~ChannelSend() { StopSend(); int error = audio_coding_->RegisterTransportCallback(NULL); RTC_DCHECK_EQ(0, error); + + // Delete the encoder task queue first to ensure that there are no running + // tasks when the other members are destroyed. + encoder_queue_ = nullptr; } void ChannelSend::StartSend() { @@ -472,13 +584,13 @@ void ChannelSend::StopSend() { // Wait until all pending encode tasks are executed and clear any remaining // buffers in the encoder. - rtc::Event flush; - encoder_queue_.PostTask([this, &flush]() { - RTC_DCHECK_RUN_ON(&encoder_queue_); - CallEncoder([](AudioEncoder* encoder) { encoder->Reset(); }); + Event flush; + encoder_queue_->PostTask([this, &flush]() { + RTC_DCHECK_RUN_ON(&encoder_queue_checker_); + audio_coding_->Reset(); flush.Set(); }); - flush.Wait(rtc::Event::kForever); + flush.Wait(Event::kForever); // Reset sending SSRC and sequence number and triggers direct transmission // of RTCP BYE @@ -493,6 +605,7 @@ void ChannelSend::StopSend() { } void ChannelSend::SetEncoder(int payload_type, + const SdpAudioFormat& encoder_format, std::unique_ptr encoder) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK_GE(payload_type, 0); @@ -506,18 +619,19 @@ void ChannelSend::SetEncoder(int payload_type, encoder->RtpTimestampRateHz(), encoder->NumChannels(), 0); + encoder_format_ = encoder_format; audio_coding_->SetEncoder(std::move(encoder)); } void ChannelSend::ModifyEncoder( - rtc::FunctionView*)> modifier) { + FunctionView*)> modifier) { // This method can be called on the worker thread, module process thread // or network thread. Audio coding is thread safe, so we do not need to // enforce the calling thread. audio_coding_->ModifyEncoder(modifier); } -void ChannelSend::CallEncoder(rtc::FunctionView modifier) { +void ChannelSend::CallEncoder(FunctionView modifier) { ModifyEncoder([modifier](std::unique_ptr* encoder_ptr) { if (*encoder_ptr) { modifier(encoder_ptr->get()); @@ -527,14 +641,15 @@ void ChannelSend::CallEncoder(rtc::FunctionView modifier) { }); } +void ChannelSend::CallEncoderAsync( + absl::AnyInvocable modifier) { + encoder_queue_->PostTask([this, modifier = std::move(modifier)]() mutable { + CallEncoder(modifier); + }); +} + void ChannelSend::OnBitrateAllocation(BitrateAllocationUpdate update) { - // This method can be called on the worker thread, module process thread - // or on a TaskQueue via VideoSendStreamImpl::OnEncoderConfigurationChanged. - // TODO(solenberg): Figure out a good way to check this or enforce calling - // rules. - // RTC_DCHECK(worker_thread_checker_.IsCurrent() || - // module_process_thread_checker_.IsCurrent()); - CallEncoder([&](AudioEncoder* encoder) { + CallEncoderAsync([update](AudioEncoder* encoder) { encoder->OnReceivedUplinkAllocation(update); }); retransmission_rate_limiter_->SetMaxRate(update.target_bitrate.bps()); @@ -546,7 +661,7 @@ int ChannelSend::GetTargetBitrate() const { void ChannelSend::OnReportBlockDataUpdated(ReportBlockData report_block) { float packet_loss_rate = report_block.fraction_lost(); - CallEncoder([&](AudioEncoder* encoder) { + CallEncoderAsync([packet_loss_rate](AudioEncoder* encoder) { encoder->OnReceivedUplinkPacketLossFraction(packet_loss_rate); }); } @@ -555,23 +670,18 @@ void ChannelSend::ReceivedRTCPPacket(const uint8_t* data, size_t length) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); // Deliver RTCP packet to RTP/RTCP module for parsing - rtp_rtcp_->IncomingRtcpPacket(rtc::MakeArrayView(data, length)); + rtp_rtcp_->IncomingRtcpPacket(MakeArrayView(data, length)); - int64_t rtt = GetRTT(); - if (rtt == 0) { + std::optional rtt = rtp_rtcp_->LastRtt(); + if (!rtt.has_value()) { // Waiting for valid RTT. return; } - int64_t nack_window_ms = rtt; - if (nack_window_ms < kMinRetransmissionWindowMs) { - nack_window_ms = kMinRetransmissionWindowMs; - } else if (nack_window_ms > kMaxRetransmissionWindowMs) { - nack_window_ms = kMaxRetransmissionWindowMs; - } - retransmission_rate_limiter_->SetWindowSize(nack_window_ms); + retransmission_rate_limiter_->SetWindowSize( + rtt->Clamped(kMinRetransmissionWindow, kMaxRetransmissionWindow).ms()); - OnReceivedRtt(rtt); + OnReceivedRtt(rtt->ms()); } void ChannelSend::SetInputMute(bool enable) { @@ -623,9 +733,9 @@ void ChannelSend::SetSendAudioLevelIndicationStatus(bool enable, int id) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); include_audio_level_indication_.store(enable); if (enable) { - rtp_rtcp_->RegisterRtpHeaderExtension(AudioLevel::Uri(), id); + rtp_rtcp_->RegisterRtpHeaderExtension(AudioLevelExtension::Uri(), id); } else { - rtp_rtcp_->DeregisterSendRtpHeaderExtension(AudioLevel::Uri()); + rtp_rtcp_->DeregisterSendRtpHeaderExtension(AudioLevelExtension::Uri()); } } @@ -670,7 +780,7 @@ std::vector ChannelSend::GetRemoteRTCPReportBlocks() const { CallSendStatistics ChannelSend::GetRTCPStatistics() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); CallSendStatistics stats = {0}; - stats.rttMs = GetRTT(); + stats.rttMs = rtp_rtcp_->LastRtt().value_or(TimeDelta::Zero()).ms(); StreamDataCounters rtp_stats; StreamDataCounters rtx_stats; @@ -745,9 +855,9 @@ void ChannelSend::ProcessAndEncodeAudio( // Profile time between when the audio frame is added to the task queue and // when the task is actually executed. audio_frame->UpdateProfileTimeStamp(); - encoder_queue_.PostTask( + encoder_queue_->PostTask( [this, audio_frame = std::move(audio_frame)]() mutable { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(&encoder_queue_checker_); if (!encoder_queue_is_active_.load()) { return; } @@ -769,7 +879,7 @@ void ChannelSend::ProcessAndEncodeAudio( rms_level_.AnalyzeMuted(length); } else { rms_level_.Analyze( - rtc::ArrayView(audio_frame->data(), length)); + ArrayView(audio_frame->data(), length)); } } previous_frame_muted_ = is_muted; @@ -778,10 +888,15 @@ void ChannelSend::ProcessAndEncodeAudio( // encoding is done and payload is ready for packetization and // transmission. Otherwise, it will return without invoking the // callback. - if (audio_coding_->Add10MsData(*audio_frame) < 0) { + int32_t encoded_bytes = audio_coding_->Add10MsData(*audio_frame); + MutexLock lock(&bitrate_accountant_mutex_); + if (encoded_bytes < 0) { RTC_DLOG(LS_ERROR) << "ACM::Add10MsData() failed."; + bitrate_accountant_.Reset(); return; } + bitrate_accountant_.UpdateBpsEstimate(DataSize::Bytes(encoded_bytes), + TimeDelta::Millis(10)); }); } @@ -794,49 +909,36 @@ RtpRtcpInterface* ChannelSend::GetRtpRtcp() const { return rtp_rtcp_.get(); } -int64_t ChannelSend::GetRTT() const { - std::vector report_blocks = - rtp_rtcp_->GetLatestReportBlockData(); - if (report_blocks.empty()) { - return 0; - } - - // We don't know in advance the remote ssrc used by the other end's receiver - // reports, so use the first report block for the RTT. - return report_blocks.front().last_rtt().ms(); -} - void ChannelSend::SetFrameEncryptor( - rtc::scoped_refptr frame_encryptor) { + scoped_refptr frame_encryptor) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - encoder_queue_.PostTask([this, frame_encryptor]() mutable { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, frame_encryptor]() mutable { + RTC_DCHECK_RUN_ON(&encoder_queue_checker_); frame_encryptor_ = std::move(frame_encryptor); }); } void ChannelSend::SetEncoderToPacketizerFrameTransformer( - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); if (!frame_transformer) return; - encoder_queue_.PostTask( + encoder_queue_->PostTask( [this, frame_transformer = std::move(frame_transformer)]() mutable { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(&encoder_queue_checker_); InitFrameTransformerDelegate(std::move(frame_transformer)); }); } void ChannelSend::OnReceivedRtt(int64_t rtt_ms) { - // Invoke audio encoders OnReceivedRtt(). - CallEncoder( + CallEncoderAsync( [rtt_ms](AudioEncoder* encoder) { encoder->OnReceivedRtt(rtt_ms); }); } void ChannelSend::InitFrameTransformerDelegate( - rtc::scoped_refptr frame_transformer) { - RTC_DCHECK_RUN_ON(&encoder_queue_); + scoped_refptr frame_transformer) { + RTC_DCHECK_RUN_ON(&encoder_queue_checker_); RTC_DCHECK(frame_transformer); RTC_DCHECK(!frame_transformer_delegate_); @@ -845,42 +947,40 @@ void ChannelSend::InitFrameTransformerDelegate( ChannelSendFrameTransformerDelegate::SendFrameCallback send_audio_callback = [this](AudioFrameType frameType, uint8_t payloadType, uint32_t rtp_timestamp_with_offset, - rtc::ArrayView payload, - int64_t absolute_capture_timestamp_ms) { - RTC_DCHECK_RUN_ON(&encoder_queue_); + ArrayView payload, + int64_t absolute_capture_timestamp_ms, + ArrayView csrcs, + std::optional audio_level_dbov) { + RTC_DCHECK_RUN_ON(&encoder_queue_checker_); return SendRtpAudio( frameType, payloadType, rtp_timestamp_with_offset - rtp_rtcp_->StartTimestamp(), payload, - absolute_capture_timestamp_ms); + absolute_capture_timestamp_ms, csrcs, audio_level_dbov); }; frame_transformer_delegate_ = - rtc::make_ref_counted( + make_ref_counted( std::move(send_audio_callback), std::move(frame_transformer), - &encoder_queue_); + encoder_queue_.get()); frame_transformer_delegate_->Init(); } } // namespace std::unique_ptr CreateChannelSend( - Clock* clock, - TaskQueueFactory* task_queue_factory, + const Environment& env, Transport* rtp_transport, RtcpRttStats* rtcp_rtt_stats, - RtcEventLog* rtc_event_log, FrameEncryptorInterface* frame_encryptor, const webrtc::CryptoOptions& crypto_options, bool extmap_allow_mixed, int rtcp_report_interval_ms, uint32_t ssrc, - rtc::scoped_refptr frame_transformer, - RtpTransportControllerSendInterface* transport_controller, - const FieldTrialsView& field_trials) { + scoped_refptr frame_transformer, + RtpTransportControllerSendInterface* transport_controller) { return std::make_unique( - clock, task_queue_factory, rtp_transport, rtcp_rtt_stats, rtc_event_log, - frame_encryptor, crypto_options, extmap_allow_mixed, - rtcp_report_interval_ms, ssrc, std::move(frame_transformer), - transport_controller, field_trials); + env, rtp_transport, rtcp_rtt_stats, frame_encryptor, crypto_options, + extmap_allow_mixed, rtcp_report_interval_ms, ssrc, + std::move(frame_transformer), transport_controller); } } // namespace voe diff --git a/audio/channel_send.h b/audio/channel_send.h index 00d954c952..8991fc0f44 100644 --- a/audio/channel_send.h +++ b/audio/channel_send.h @@ -11,25 +11,31 @@ #ifndef AUDIO_CHANNEL_SEND_H_ #define AUDIO_CHANNEL_SEND_H_ +#include +#include #include -#include +#include #include +#include "absl/strings/string_view.h" #include "api/audio/audio_frame.h" #include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/call/bitrate_allocation.h" #include "api/crypto/crypto_options.h" -#include "api/field_trials_view.h" +#include "api/environment/environment.h" #include "api/frame_transformer_interface.h" #include "api/function_view.h" -#include "api/task_queue/task_queue_factory.h" +#include "api/scoped_refptr.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" #include "modules/rtp_rtcp/include/report_block_data.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" -#include "modules/rtp_rtcp/source/rtp_sender_audio.h" namespace webrtc { class FrameEncryptorInterface; -class RtcEventLog; class RtpTransportControllerSendInterface; struct CallSendStatistics { @@ -62,10 +68,11 @@ class ChannelSendInterface { virtual CallSendStatistics GetRTCPStatistics() const = 0; virtual void SetEncoder(int payload_type, + const SdpAudioFormat& encoder_format, std::unique_ptr encoder) = 0; virtual void ModifyEncoder( - rtc::FunctionView*)> modifier) = 0; - virtual void CallEncoder(rtc::FunctionView modifier) = 0; + FunctionView*)> modifier) = 0; + virtual void CallEncoder(FunctionView modifier) = 0; // Use 0 to indicate that the extension should not be registered. virtual void SetRTCP_CNAME(absl::string_view c_name) = 0; @@ -88,45 +95,36 @@ class ChannelSendInterface { std::unique_ptr audio_frame) = 0; virtual RtpRtcpInterface* GetRtpRtcp() const = 0; - // In RTP we currently rely on RTCP packets (`ReceivedRTCPPacket`) to inform - // about RTT. - // In media transport we rely on the TargetTransferRateObserver instead. - // In other words, if you are using RTP, you should expect - // `ReceivedRTCPPacket` to be called, if you are using media transport, - // `OnTargetTransferRate` will be called. - // - // In future, RTP media will move to the media transport implementation and - // these conditions will be removed. - // Returns the RTT in milliseconds. - virtual int64_t GetRTT() const = 0; virtual void StartSend() = 0; virtual void StopSend() = 0; // E2EE Custom Audio Frame Encryption (Optional) virtual void SetFrameEncryptor( - rtc::scoped_refptr frame_encryptor) = 0; + scoped_refptr frame_encryptor) = 0; // Sets a frame transformer between encoder and packetizer, to transform // encoded frames before sending them out the network. virtual void SetEncoderToPacketizerFrameTransformer( - rtc::scoped_refptr - frame_transformer) = 0; + scoped_refptr frame_transformer) = 0; + + // Returns payload bitrate actually used. + virtual std::optional GetUsedRate() const = 0; + + // Registers per packet byte overhead. + virtual void RegisterPacketOverhead(int packet_byte_overhead) = 0; }; std::unique_ptr CreateChannelSend( - Clock* clock, - TaskQueueFactory* task_queue_factory, + const Environment& env, Transport* rtp_transport, RtcpRttStats* rtcp_rtt_stats, - RtcEventLog* rtc_event_log, FrameEncryptorInterface* frame_encryptor, const webrtc::CryptoOptions& crypto_options, bool extmap_allow_mixed, int rtcp_report_interval_ms, uint32_t ssrc, - rtc::scoped_refptr frame_transformer, - RtpTransportControllerSendInterface* transport_controller, - const FieldTrialsView& field_trials); + scoped_refptr frame_transformer, + RtpTransportControllerSendInterface* transport_controller); } // namespace voe } // namespace webrtc diff --git a/audio/channel_send_frame_transformer_delegate.cc b/audio/channel_send_frame_transformer_delegate.cc index 9b88d4d949..8a94ce4c7c 100644 --- a/audio/channel_send_frame_transformer_delegate.cc +++ b/audio/channel_send_frame_transformer_delegate.cc @@ -11,6 +11,10 @@ #include "audio/channel_send_frame_transformer_delegate.h" #include +#include + +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" namespace webrtc { namespace { @@ -44,26 +48,37 @@ AudioFrameType InterfaceFrameTypeToInternalFrameType( RTC_DCHECK_NOTREACHED(); return AudioFrameType::kEmptyFrame; } +} // namespace class TransformableOutgoingAudioFrame : public TransformableAudioFrameInterface { public: - TransformableOutgoingAudioFrame(AudioFrameType frame_type, - uint8_t payload_type, - uint32_t rtp_timestamp_with_offset, - const uint8_t* payload_data, - size_t payload_size, - int64_t absolute_capture_timestamp_ms, - uint32_t ssrc) - : frame_type_(frame_type), + TransformableOutgoingAudioFrame( + AudioFrameType frame_type, + uint8_t payload_type, + uint32_t rtp_timestamp_with_offset, + const uint8_t* payload_data, + size_t payload_size, + std::optional absolute_capture_timestamp_ms, + uint32_t ssrc, + std::vector csrcs, + const std::string& codec_mime_type, + std::optional sequence_number, + std::optional audio_level_dbov) + : TransformableAudioFrameInterface(Passkey()), + frame_type_(frame_type), payload_type_(payload_type), rtp_timestamp_with_offset_(rtp_timestamp_with_offset), payload_(payload_data, payload_size), absolute_capture_timestamp_ms_(absolute_capture_timestamp_ms), - ssrc_(ssrc) {} + ssrc_(ssrc), + csrcs_(std::move(csrcs)), + codec_mime_type_(codec_mime_type), + sequence_number_(sequence_number), + audio_level_dbov_(audio_level_dbov) {} ~TransformableOutgoingAudioFrame() override = default; - rtc::ArrayView GetData() const override { return payload_; } - void SetData(rtc::ArrayView data) override { + ArrayView GetData() const override { return payload_; } + void SetData(ArrayView data) override { payload_.SetData(data.data(), data.size()); } uint32_t GetTimestamp() const override { return rtp_timestamp_with_offset_; } @@ -75,44 +90,58 @@ class TransformableOutgoingAudioFrame uint8_t GetPayloadType() const override { return payload_type_; } Direction GetDirection() const override { return Direction::kSender; } + std::string GetMimeType() const override { return codec_mime_type_; } - rtc::ArrayView GetContributingSources() const override { - return {}; + ArrayView GetContributingSources() const override { + return csrcs_; } - const absl::optional SequenceNumber() const override { - return absl::nullopt; + const std::optional SequenceNumber() const override { + return sequence_number_; } void SetRTPTimestamp(uint32_t rtp_timestamp_with_offset) override { rtp_timestamp_with_offset_ = rtp_timestamp_with_offset; } - absl::optional AbsoluteCaptureTimestamp() const override { + std::optional AbsoluteCaptureTimestamp() const override { return absolute_capture_timestamp_ms_; } + std::optional AudioLevel() const override { + return audio_level_dbov_; + } + + std::optional ReceiveTime() const override { return std::nullopt; } + std::optional CaptureTime() const override { return std::nullopt; } + std::optional SenderCaptureTimeOffset() const override { + return std::nullopt; + } + private: AudioFrameType frame_type_; uint8_t payload_type_; uint32_t rtp_timestamp_with_offset_; - rtc::Buffer payload_; - int64_t absolute_capture_timestamp_ms_; + Buffer payload_; + std::optional absolute_capture_timestamp_ms_; uint32_t ssrc_; + std::vector csrcs_; + std::string codec_mime_type_; + std::optional sequence_number_; + std::optional audio_level_dbov_; }; -} // namespace ChannelSendFrameTransformerDelegate::ChannelSendFrameTransformerDelegate( SendFrameCallback send_frame_callback, - rtc::scoped_refptr frame_transformer, - rtc::TaskQueue* encoder_queue) + scoped_refptr frame_transformer, + TaskQueueBase* encoder_queue) : send_frame_callback_(send_frame_callback), frame_transformer_(std::move(frame_transformer)), encoder_queue_(encoder_queue) {} void ChannelSendFrameTransformerDelegate::Init() { frame_transformer_->RegisterTransformedFrameCallback( - rtc::scoped_refptr(this)); + scoped_refptr(this)); } void ChannelSendFrameTransformerDelegate::Reset() { @@ -130,11 +159,25 @@ void ChannelSendFrameTransformerDelegate::Transform( const uint8_t* payload_data, size_t payload_size, int64_t absolute_capture_timestamp_ms, - uint32_t ssrc) { + uint32_t ssrc, + const std::string& codec_mimetype, + std::optional audio_level_dbov) { + { + MutexLock lock(&send_lock_); + if (short_circuit_) { + send_frame_callback_(frame_type, payload_type, rtp_timestamp, + ArrayView(payload_data, payload_size), + absolute_capture_timestamp_ms, /*csrcs=*/{}, + audio_level_dbov); + return; + } + } frame_transformer_->Transform( std::make_unique( frame_type, payload_type, rtp_timestamp, payload_data, payload_size, - absolute_capture_timestamp_ms, ssrc)); + absolute_capture_timestamp_ms, ssrc, + /*csrcs=*/std::vector(), codec_mimetype, + /*sequence_number=*/std::nullopt, audio_level_dbov)); } void ChannelSendFrameTransformerDelegate::OnTransformedFrame( @@ -142,13 +185,18 @@ void ChannelSendFrameTransformerDelegate::OnTransformedFrame( MutexLock lock(&send_lock_); if (!send_frame_callback_) return; - rtc::scoped_refptr delegate(this); + scoped_refptr delegate(this); encoder_queue_->PostTask( [delegate = std::move(delegate), frame = std::move(frame)]() mutable { delegate->SendFrame(std::move(frame)); }); } +void ChannelSendFrameTransformerDelegate::StartShortCircuiting() { + MutexLock lock(&send_lock_); + short_circuit_ = true; +} + void ChannelSendFrameTransformerDelegate::SendFrame( std::unique_ptr frame) const { MutexLock lock(&send_lock_); @@ -163,17 +211,23 @@ void ChannelSendFrameTransformerDelegate::SendFrame( transformed_frame->GetData(), transformed_frame->AbsoluteCaptureTimestamp() ? *transformed_frame->AbsoluteCaptureTimestamp() - : 0); + : 0, + transformed_frame->GetContributingSources(), + transformed_frame->AudioLevel()); } std::unique_ptr CloneSenderAudioFrame( TransformableAudioFrameInterface* original) { - // TODO(crbug.com/webrtc/14949): Ensure the correct timestamps are passed. + std::vector csrcs; + csrcs.assign(original->GetContributingSources().begin(), + original->GetContributingSources().end()); return std::make_unique( InterfaceFrameTypeToInternalFrameType(original->Type()), original->GetPayloadType(), original->GetTimestamp(), original->GetData().data(), original->GetData().size(), - original->GetTimestamp(), original->GetSsrc()); + original->AbsoluteCaptureTimestamp(), original->GetSsrc(), + std::move(csrcs), original->GetMimeType(), original->SequenceNumber(), + original->AudioLevel()); } } // namespace webrtc diff --git a/audio/channel_send_frame_transformer_delegate.h b/audio/channel_send_frame_transformer_delegate.h index eb0027e4c8..eae9cac407 100644 --- a/audio/channel_send_frame_transformer_delegate.h +++ b/audio/channel_send_frame_transformer_delegate.h @@ -12,13 +12,14 @@ #define AUDIO_CHANNEL_SEND_FRAME_TRANSFORMER_DELEGATE_H_ #include +#include #include "api/frame_transformer_interface.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "rtc_base/buffer.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_queue.h" namespace webrtc { @@ -33,12 +34,14 @@ class ChannelSendFrameTransformerDelegate : public TransformedFrameCallback { std::function payload, - int64_t absolute_capture_timestamp_ms)>; + webrtc::ArrayView payload, + int64_t absolute_capture_timestamp_ms, + webrtc::ArrayView csrcs, + std::optional audio_level_dbov)>; ChannelSendFrameTransformerDelegate( SendFrameCallback send_frame_callback, - rtc::scoped_refptr frame_transformer, - rtc::TaskQueue* encoder_queue); + scoped_refptr frame_transformer, + TaskQueueBase* encoder_queue); // Registers `this` as callback for `frame_transformer_`, to get the // transformed frames. @@ -57,12 +60,16 @@ class ChannelSendFrameTransformerDelegate : public TransformedFrameCallback { const uint8_t* payload_data, size_t payload_size, int64_t absolute_capture_timestamp_ms, - uint32_t ssrc); + uint32_t ssrc, + const std::string& codec_mime_type, + std::optional audio_level_dbov); // Implements TransformedFrameCallback. Can be called on any thread. void OnTransformedFrame( std::unique_ptr frame) override; + void StartShortCircuiting() override; + // Delegates the call to ChannelSend::SendRtpAudio on the `encoder_queue_`, // by calling `send_audio_callback_`. void SendFrame(std::unique_ptr frame) const; @@ -73,8 +80,9 @@ class ChannelSendFrameTransformerDelegate : public TransformedFrameCallback { private: mutable Mutex send_lock_; SendFrameCallback send_frame_callback_ RTC_GUARDED_BY(send_lock_); - rtc::scoped_refptr frame_transformer_; - rtc::TaskQueue* encoder_queue_ RTC_GUARDED_BY(send_lock_); + scoped_refptr frame_transformer_; + TaskQueueBase* const encoder_queue_; + bool short_circuit_ RTC_GUARDED_BY(send_lock_) = false; }; std::unique_ptr CloneSenderAudioFrame( diff --git a/audio/channel_send_frame_transformer_delegate_unittest.cc b/audio/channel_send_frame_transformer_delegate_unittest.cc index f75d4a8ab7..2a526f0570 100644 --- a/audio/channel_send_frame_transformer_delegate_unittest.cc +++ b/audio/channel_send_frame_transformer_delegate_unittest.cc @@ -10,24 +10,37 @@ #include "audio/channel_send_frame_transformer_delegate.h" +#include #include +#include #include +#include +#include "absl/memory/memory.h" +#include "api/array_view.h" +#include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "api/test/mock_frame_transformer.h" +#include "api/test/mock_transformable_audio_frame.h" +#include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "rtc_base/task_queue_for_test.h" #include "test/gmock.h" #include "test/gtest.h" -#include "test/mock_frame_transformer.h" -#include "test/mock_transformable_frame.h" namespace webrtc { namespace { using ::testing::_; using ::testing::ElementsAre; +using ::testing::ElementsAreArray; using ::testing::NiceMock; +using ::testing::Optional; using ::testing::Return; using ::testing::SaveArg; +const uint8_t mock_data[] = {1, 2, 3, 4}; + class MockChannelSend { public: MockChannelSend() = default; @@ -38,38 +51,69 @@ class MockChannelSend { (AudioFrameType frameType, uint8_t payloadType, uint32_t rtp_timestamp, - rtc::ArrayView payload, - int64_t absolute_capture_timestamp_ms)); + webrtc::ArrayView payload, + int64_t absolute_capture_timestamp_ms, + webrtc::ArrayView csrcs, + std::optional audio_level_dbov)); ChannelSendFrameTransformerDelegate::SendFrameCallback callback() { return [this](AudioFrameType frameType, uint8_t payloadType, - uint32_t rtp_timestamp, rtc::ArrayView payload, - int64_t absolute_capture_timestamp_ms) { + uint32_t rtp_timestamp, ArrayView payload, + int64_t absolute_capture_timestamp_ms, + ArrayView csrcs, + std::optional audio_level_dbov) { return SendFrame(frameType, payloadType, rtp_timestamp, payload, - absolute_capture_timestamp_ms); + absolute_capture_timestamp_ms, csrcs, audio_level_dbov); }; } }; -std::unique_ptr CreateMockReceiverFrame() { - const uint8_t mock_data[] = {1, 2, 3, 4}; +std::unique_ptr CreateMockReceiverFrame( + const std::vector& csrcs, + std::optional audio_level_dbov) { std::unique_ptr mock_frame = - std::make_unique(); - rtc::ArrayView payload(mock_data); + std::make_unique>(); + ArrayView payload(mock_data); ON_CALL(*mock_frame, GetData).WillByDefault(Return(payload)); ON_CALL(*mock_frame, GetPayloadType).WillByDefault(Return(0)); ON_CALL(*mock_frame, GetDirection) .WillByDefault(Return(TransformableFrameInterface::Direction::kReceiver)); + ON_CALL(*mock_frame, GetContributingSources).WillByDefault(Return(csrcs)); + ON_CALL(*mock_frame, SequenceNumber).WillByDefault(Return(987654321)); + ON_CALL(*mock_frame, AudioLevel).WillByDefault(Return(audio_level_dbov)); return mock_frame; } +std::unique_ptr CreateFrame() { + TaskQueueForTest channel_queue("channel_queue"); + scoped_refptr mock_frame_transformer = + make_ref_counted>(); + MockChannelSend mock_channel; + scoped_refptr delegate = + make_ref_counted( + mock_channel.callback(), mock_frame_transformer, channel_queue.Get()); + + std::unique_ptr frame; + ON_CALL(*mock_frame_transformer, Transform) + .WillByDefault( + [&frame]( + std::unique_ptr transform_frame) { + frame = std::move(transform_frame); + }); + delegate->Transform( + AudioFrameType::kEmptyFrame, 0, 0, mock_data, sizeof(mock_data), 0, + /*ssrc=*/0, /*mimeType=*/"audio/opus", /*audio_level_dbov=*/123); + return absl::WrapUnique( + static_cast(frame.release())); +} + // Test that the delegate registers itself with the frame transformer on Init(). TEST(ChannelSendFrameTransformerDelegateTest, RegisterTransformedFrameCallbackOnInit) { - rtc::scoped_refptr mock_frame_transformer = - rtc::make_ref_counted(); - rtc::scoped_refptr delegate = - rtc::make_ref_counted( + scoped_refptr mock_frame_transformer = + make_ref_counted(); + scoped_refptr delegate = + make_ref_counted( ChannelSendFrameTransformerDelegate::SendFrameCallback(), mock_frame_transformer, nullptr); EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback); @@ -80,10 +124,10 @@ TEST(ChannelSendFrameTransformerDelegateTest, // Reset(). TEST(ChannelSendFrameTransformerDelegateTest, UnregisterTransformedFrameCallbackOnReset) { - rtc::scoped_refptr mock_frame_transformer = - rtc::make_ref_counted(); - rtc::scoped_refptr delegate = - rtc::make_ref_counted( + scoped_refptr mock_frame_transformer = + make_ref_counted(); + scoped_refptr delegate = + make_ref_counted( ChannelSendFrameTransformerDelegate::SendFrameCallback(), mock_frame_transformer, nullptr); EXPECT_CALL(*mock_frame_transformer, UnregisterTransformedFrameCallback); @@ -95,13 +139,13 @@ TEST(ChannelSendFrameTransformerDelegateTest, TEST(ChannelSendFrameTransformerDelegateTest, TransformRunsChannelSendCallback) { TaskQueueForTest channel_queue("channel_queue"); - rtc::scoped_refptr mock_frame_transformer = - rtc::make_ref_counted>(); + scoped_refptr mock_frame_transformer = + make_ref_counted>(); MockChannelSend mock_channel; - rtc::scoped_refptr delegate = - rtc::make_ref_counted( - mock_channel.callback(), mock_frame_transformer, &channel_queue); - rtc::scoped_refptr callback; + scoped_refptr delegate = + make_ref_counted( + mock_channel.callback(), mock_frame_transformer, channel_queue.Get()); + scoped_refptr callback; EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback) .WillOnce(SaveArg<0>(&callback)); delegate->Init(); @@ -115,7 +159,8 @@ TEST(ChannelSendFrameTransformerDelegateTest, callback->OnTransformedFrame(std::move(frame)); }); delegate->Transform(AudioFrameType::kEmptyFrame, 0, 0, data, sizeof(data), 0, - 0); + /*ssrc=*/0, /*mimeType=*/"audio/opus", + /*audio_level_dbov=*/31); channel_queue.WaitForPreviouslyPostedTasks(); } @@ -124,28 +169,34 @@ TEST(ChannelSendFrameTransformerDelegateTest, TEST(ChannelSendFrameTransformerDelegateTest, TransformRunsChannelSendCallbackForIncomingFrame) { TaskQueueForTest channel_queue("channel_queue"); - rtc::scoped_refptr mock_frame_transformer = - rtc::make_ref_counted>(); + scoped_refptr mock_frame_transformer = + make_ref_counted>(); MockChannelSend mock_channel; - rtc::scoped_refptr delegate = - rtc::make_ref_counted( - mock_channel.callback(), mock_frame_transformer, &channel_queue); - rtc::scoped_refptr callback; + scoped_refptr delegate = + make_ref_counted( + mock_channel.callback(), mock_frame_transformer, channel_queue.Get()); + scoped_refptr callback; EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback) .WillOnce(SaveArg<0>(&callback)); delegate->Init(); ASSERT_TRUE(callback); - const uint8_t data[] = {1, 2, 3, 4}; + const std::vector csrcs = {123, 234, 345, 456}; + const uint8_t audio_level_dbov = 17; EXPECT_CALL(mock_channel, SendFrame).Times(0); - EXPECT_CALL(mock_channel, SendFrame(_, 0, 0, ElementsAre(1, 2, 3, 4), _)); + EXPECT_CALL(mock_channel, + SendFrame(_, 0, 0, ElementsAreArray(mock_data), _, + ElementsAreArray(csrcs), Optional(audio_level_dbov))); ON_CALL(*mock_frame_transformer, Transform) .WillByDefault( - [&callback](std::unique_ptr frame) { - callback->OnTransformedFrame(CreateMockReceiverFrame()); + [&](std::unique_ptr /* frame */) { + callback->OnTransformedFrame(CreateMockReceiverFrame( + csrcs, std::optional(audio_level_dbov))); }); - delegate->Transform(AudioFrameType::kEmptyFrame, 0, 0, data, sizeof(data), 0, - 0); + delegate->Transform(AudioFrameType::kEmptyFrame, 0, 0, mock_data, + sizeof(mock_data), 0, + /*ssrc=*/0, /*mimeType=*/"audio/opus", + /*audio_level_dbov=*/std::nullopt); channel_queue.WaitForPreviouslyPostedTasks(); } @@ -155,12 +206,12 @@ TEST(ChannelSendFrameTransformerDelegateTest, TEST(ChannelSendFrameTransformerDelegateTest, OnTransformedDoesNotRunChannelSendCallbackAfterReset) { TaskQueueForTest channel_queue("channel_queue"); - rtc::scoped_refptr mock_frame_transformer = - rtc::make_ref_counted>(); + scoped_refptr mock_frame_transformer = + make_ref_counted>(); MockChannelSend mock_channel; - rtc::scoped_refptr delegate = - rtc::make_ref_counted( - mock_channel.callback(), mock_frame_transformer, &channel_queue); + scoped_refptr delegate = + make_ref_counted( + mock_channel.callback(), mock_frame_transformer, channel_queue.Get()); delegate->Reset(); EXPECT_CALL(mock_channel, SendFrame).Times(0); @@ -168,5 +219,64 @@ TEST(ChannelSendFrameTransformerDelegateTest, channel_queue.WaitForPreviouslyPostedTasks(); } +TEST(ChannelSendFrameTransformerDelegateTest, ShortCircuitingSkipsTransform) { + TaskQueueForTest channel_queue("channel_queue"); + scoped_refptr mock_frame_transformer = + make_ref_counted>(); + MockChannelSend mock_channel; + scoped_refptr delegate = + make_ref_counted( + mock_channel.callback(), mock_frame_transformer, channel_queue.Get()); + + delegate->StartShortCircuiting(); + + // Will not call the actual transformer. + EXPECT_CALL(*mock_frame_transformer, Transform).Times(0); + // Will pass the frame straight to the channel. + EXPECT_CALL(mock_channel, SendFrame); + const uint8_t data[] = {1, 2, 3, 4}; + delegate->Transform(AudioFrameType::kEmptyFrame, 0, 0, data, sizeof(data), 0, + /*ssrc=*/0, /*mimeType=*/"audio/opus", + /*audio_level_dbov=*/std::nullopt); +} + +TEST(ChannelSendFrameTransformerDelegateTest, + CloningSenderFramePreservesInformation) { + std::unique_ptr frame = CreateFrame(); + std::unique_ptr cloned_frame = + CloneSenderAudioFrame(frame.get()); + + EXPECT_EQ(cloned_frame->GetTimestamp(), frame->GetTimestamp()); + EXPECT_EQ(cloned_frame->GetSsrc(), frame->GetSsrc()); + EXPECT_EQ(cloned_frame->Type(), frame->Type()); + EXPECT_EQ(cloned_frame->GetPayloadType(), frame->GetPayloadType()); + EXPECT_EQ(cloned_frame->GetMimeType(), frame->GetMimeType()); + EXPECT_THAT(cloned_frame->GetContributingSources(), + ElementsAreArray(frame->GetContributingSources())); + EXPECT_EQ(cloned_frame->AudioLevel(), frame->AudioLevel()); +} + +TEST(ChannelSendFrameTransformerDelegateTest, CloningReceiverFrameWithCsrcs) { + std::unique_ptr frame = + CreateMockReceiverFrame(/*csrcs=*/{123, 234, 345}, + std::optional(72)); + std::unique_ptr cloned_frame = + CloneSenderAudioFrame(frame.get()); + + EXPECT_EQ(cloned_frame->GetTimestamp(), frame->GetTimestamp()); + EXPECT_EQ(cloned_frame->GetSsrc(), frame->GetSsrc()); + EXPECT_EQ(cloned_frame->Type(), frame->Type()); + EXPECT_EQ(cloned_frame->GetPayloadType(), frame->GetPayloadType()); + EXPECT_EQ(cloned_frame->GetMimeType(), frame->GetMimeType()); + EXPECT_EQ(cloned_frame->AbsoluteCaptureTimestamp(), + frame->AbsoluteCaptureTimestamp()); + + ASSERT_NE(frame->GetContributingSources().size(), 0u); + EXPECT_THAT(cloned_frame->GetContributingSources(), + ElementsAreArray(frame->GetContributingSources())); + EXPECT_EQ(cloned_frame->SequenceNumber(), frame->SequenceNumber()); + EXPECT_EQ(cloned_frame->AudioLevel(), frame->AudioLevel()); +} + } // namespace } // namespace webrtc diff --git a/audio/channel_send_unittest.cc b/audio/channel_send_unittest.cc index b9406e1523..03ce5bceeb 100644 --- a/audio/channel_send_unittest.cc +++ b/audio/channel_send_unittest.cc @@ -10,27 +10,53 @@ #include "audio/channel_send.h" +#include +#include +#include +#include #include +#include +#include "api/array_view.h" #include "api/audio/audio_frame.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" -#include "api/rtc_event_log/rtc_event_log.h" +#include "api/call/bitrate_allocation.h" +#include "api/call/transport.h" +#include "api/crypto/crypto_options.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" +#include "api/rtp_headers.h" #include "api/scoped_refptr.h" +#include "api/test/mock_frame_transformer.h" +#include "api/test/mock_transformable_audio_frame.h" +#include "api/test/rtc_error_matchers.h" +#include "api/transport/bitrate_settings.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "call/rtp_transport_config.h" #include "call/rtp_transport_controller_send.h" -#include "rtc_base/gunit.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "test/gmock.h" #include "test/gtest.h" -#include "test/mock_frame_transformer.h" #include "test/mock_transport.h" #include "test/scoped_key_value_config.h" #include "test/time_controller/simulated_time_controller.h" +#include "test/wait_until.h" namespace webrtc { namespace voe { namespace { +using ::testing::Eq; using ::testing::Invoke; +using ::testing::IsTrue; using ::testing::NiceMock; using ::testing::Return; using ::testing::SaveArg; @@ -53,52 +79,57 @@ class ChannelSendTest : public ::testing::Test { protected: ChannelSendTest() : time_controller_(Timestamp::Seconds(1)), + env_(CreateEnvironment(&field_trials_, + time_controller_.GetClock(), + time_controller_.CreateTaskQueueFactory())), transport_controller_( - time_controller_.GetClock(), - RtpTransportConfig{ - .bitrate_config = GetBitrateConfig(), - .event_log = &event_log_, - .task_queue_factory = time_controller_.GetTaskQueueFactory(), - .trials = &field_trials_, - }) { - channel_ = voe::CreateChannelSend( - time_controller_.GetClock(), time_controller_.GetTaskQueueFactory(), - &transport_, nullptr, &event_log_, nullptr, crypto_options_, false, - kRtcpIntervalMs, kSsrc, nullptr, &transport_controller_, field_trials_); + RtpTransportConfig{.env = env_, + .bitrate_config = GetBitrateConfig()}) { + channel_ = voe::CreateChannelSend(env_, &transport_, nullptr, nullptr, + crypto_options_, false, kRtcpIntervalMs, + kSsrc, nullptr, &transport_controller_); encoder_factory_ = CreateBuiltinAudioEncoderFactory(); - std::unique_ptr encoder = encoder_factory_->MakeAudioEncoder( - kPayloadType, SdpAudioFormat("opus", kRtpRateHz, 2), {}); - channel_->SetEncoder(kPayloadType, std::move(encoder)); + SdpAudioFormat opus = SdpAudioFormat("opus", kRtpRateHz, 2); + std::unique_ptr encoder = + encoder_factory_->Create(env_, opus, {.payload_type = kPayloadType}); + channel_->SetEncoder(kPayloadType, opus, std::move(encoder)); transport_controller_.EnsureStarted(); channel_->RegisterSenderCongestionControlObjects(&transport_controller_); ON_CALL(transport_, SendRtcp).WillByDefault(Return(true)); ON_CALL(transport_, SendRtp).WillByDefault(Return(true)); } - std::unique_ptr CreateAudioFrame() { + std::unique_ptr CreateAudioFrame(uint8_t data_init_value = 0) { auto frame = std::make_unique(); frame->sample_rate_hz_ = kSampleRateHz; frame->samples_per_channel_ = kSampleRateHz / 100; frame->num_channels_ = 1; frame->set_absolute_capture_timestamp_ms( time_controller_.GetClock()->TimeInMilliseconds()); + int16_t* dest = frame->mutable_data(); + for (size_t i = 0; i < frame->samples_per_channel_ * frame->num_channels_; + i++, dest++) { + *dest = data_init_value; + } return frame; } - void ProcessNextFrame() { - channel_->ProcessAndEncodeAudio(CreateAudioFrame()); + void ProcessNextFrame(std::unique_ptr audio_frame) { + channel_->ProcessAndEncodeAudio(std::move(audio_frame)); // Advance time to process the task queue. time_controller_.AdvanceTime(TimeDelta::Millis(10)); } + void ProcessNextFrame() { ProcessNextFrame(CreateAudioFrame()); } + GlobalSimulatedTimeController time_controller_; webrtc::test::ScopedKeyValueConfig field_trials_; - RtcEventLogNull event_log_; + Environment env_; NiceMock transport_; CryptoOptions crypto_options_; RtpTransportControllerSend transport_controller_; std::unique_ptr channel_; - rtc::scoped_refptr encoder_factory_; + scoped_refptr encoder_factory_; }; TEST_F(ChannelSendTest, StopSendShouldResetEncoder) { @@ -112,6 +143,7 @@ TEST_F(ChannelSendTest, StopSendShouldResetEncoder) { ProcessNextFrame(); // StopSend should clear the previous audio frame stored in the encoder. channel_->StopSend(); + channel_->StartSend(); // The following frame should not trigger a new packet since the encoder // needs 20 ms audio. @@ -123,8 +155,8 @@ TEST_F(ChannelSendTest, IncreaseRtpTimestampByPauseDuration) { channel_->StartSend(); uint32_t timestamp; int sent_packets = 0; - auto send_rtp = [&](rtc::ArrayView data, - const PacketOptions& options) { + auto send_rtp = [&](ArrayView data, + const PacketOptions& /* options */) { ++sent_packets; RtpPacketReceived packet; packet.Parse(data); @@ -149,17 +181,17 @@ TEST_F(ChannelSendTest, IncreaseRtpTimestampByPauseDuration) { } TEST_F(ChannelSendTest, FrameTransformerGetsCorrectTimestamp) { - rtc::scoped_refptr mock_frame_transformer = - rtc::make_ref_counted(); + scoped_refptr mock_frame_transformer = + make_ref_counted(); channel_->SetEncoderToPacketizerFrameTransformer(mock_frame_transformer); - rtc::scoped_refptr callback; + scoped_refptr callback; EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback) .WillOnce(SaveArg<0>(&callback)); EXPECT_CALL(*mock_frame_transformer, UnregisterTransformedFrameCallback); - absl::optional sent_timestamp; - auto send_rtp = [&](rtc::ArrayView data, - const PacketOptions& options) { + std::optional sent_timestamp; + auto send_rtp = [&](ArrayView data, + const PacketOptions& /* options */) { RtpPacketReceived packet; packet.Parse(data); if (!sent_timestamp) { @@ -183,11 +215,200 @@ TEST_F(ChannelSendTest, FrameTransformerGetsCorrectTimestamp) { // Ensure the RTP timestamp on the frame passed to the transformer // includes the RTP offset and matches the actual RTP timestamp on the sent // packet. - EXPECT_EQ_WAIT(transformable_frame_timestamp, - 0 + channel_->GetRtpRtcp()->StartTimestamp(), 1000); - EXPECT_TRUE_WAIT(sent_timestamp, 1000); + EXPECT_THAT( + WaitUntil([&] { return 0 + channel_->GetRtpRtcp()->StartTimestamp(); }, + Eq(transformable_frame_timestamp)), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return sent_timestamp; }, IsTrue()), IsRtcOk()); EXPECT_EQ(*sent_timestamp, transformable_frame_timestamp); } + +// Ensure that AudioLevel calculations are performed correctly per-packet even +// if there's an async Encoded Frame Transform happening. +TEST_F(ChannelSendTest, AudioLevelsAttachedToCorrectTransformedFrame) { + channel_->SetSendAudioLevelIndicationStatus(true, /*id=*/1); + RtpPacketReceived::ExtensionManager extension_manager; + extension_manager.RegisterByType(1, kRtpExtensionAudioLevel); + + scoped_refptr mock_frame_transformer = + make_ref_counted(); + channel_->SetEncoderToPacketizerFrameTransformer(mock_frame_transformer); + scoped_refptr callback; + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback) + .WillOnce(SaveArg<0>(&callback)); + EXPECT_CALL(*mock_frame_transformer, UnregisterTransformedFrameCallback); + + std::vector sent_audio_levels; + auto send_rtp = [&](ArrayView data, + const PacketOptions& /* options */) { + RtpPacketReceived packet(&extension_manager); + packet.Parse(data); + RTPHeader header; + packet.GetHeader(&header); + sent_audio_levels.push_back(header.extension.audio_level()->level()); + return true; + }; + EXPECT_CALL(transport_, SendRtp).WillRepeatedly(Invoke(send_rtp)); + + channel_->StartSend(); + std::vector> frames; + EXPECT_CALL(*mock_frame_transformer, Transform) + .Times(2) + .WillRepeatedly([&](std::unique_ptr frame) { + frames.push_back(std::move(frame)); + }); + + // Insert two frames of 7s which should trigger a new packet. + ProcessNextFrame(CreateAudioFrame(/*data_init_value=*/7)); + ProcessNextFrame(CreateAudioFrame(/*data_init_value=*/7)); + + // Insert two more frames of 3s, meaning a second packet is + // prepared and sent to the transform before the first packet has + // been sent. + ProcessNextFrame(CreateAudioFrame(/*data_init_value=*/3)); + ProcessNextFrame(CreateAudioFrame(/*data_init_value=*/3)); + + // Wait for both packets to be encoded and sent to the transform. + EXPECT_THAT(WaitUntil([&] { return frames.size(); }, Eq(2ul)), IsRtcOk()); + // Complete the transforms on both frames at the same time + callback->OnTransformedFrame(std::move(frames[0])); + callback->OnTransformedFrame(std::move(frames[1])); + + // Allow things posted back to the encoder queue to run. + time_controller_.AdvanceTime(TimeDelta::Millis(10)); + + // Ensure the audio levels on both sent packets is present and + // matches their contents. + EXPECT_THAT(WaitUntil([&] { return sent_audio_levels.size(); }, Eq(2ul)), + IsRtcOk()); + // rms dbov of the packet with raw audio of 7s is 73. + EXPECT_EQ(sent_audio_levels[0], 73); + // rms dbov of the second packet with raw audio of 3s is 81. + EXPECT_EQ(sent_audio_levels[1], 81); +} + +// Ensure that AudioLevels are attached to frames injected into the +// Encoded Frame transform. +TEST_F(ChannelSendTest, AudioLevelsAttachedToInsertedTransformedFrame) { + channel_->SetSendAudioLevelIndicationStatus(true, /*id=*/1); + RtpPacketReceived::ExtensionManager extension_manager; + extension_manager.RegisterByType(1, kRtpExtensionAudioLevel); + + scoped_refptr mock_frame_transformer = + make_ref_counted(); + channel_->SetEncoderToPacketizerFrameTransformer(mock_frame_transformer); + scoped_refptr callback; + EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameCallback) + .WillOnce(SaveArg<0>(&callback)); + EXPECT_CALL(*mock_frame_transformer, UnregisterTransformedFrameCallback); + + std::optional sent_audio_level; + auto send_rtp = [&](ArrayView data, + const PacketOptions& /* options */) { + RtpPacketReceived packet(&extension_manager); + packet.Parse(data); + RTPHeader header; + packet.GetHeader(&header); + sent_audio_level = header.extension.audio_level()->level(); + return true; + }; + EXPECT_CALL(transport_, SendRtp).WillRepeatedly(Invoke(send_rtp)); + + channel_->StartSend(); + + time_controller_.AdvanceTime(TimeDelta::Millis(10)); + // Inject a frame encoded elsewhere. + auto mock_frame = std::make_unique>(); + uint8_t audio_level = 67; + ON_CALL(*mock_frame, AudioLevel()).WillByDefault(Return(audio_level)); + uint8_t payload[10]; + ON_CALL(*mock_frame, GetData()) + .WillByDefault(Return(ArrayView(&payload[0], 10))); + EXPECT_THAT(WaitUntil([&] { return callback; }, IsTrue()), IsRtcOk()); + callback->OnTransformedFrame(std::move(mock_frame)); + + // Allow things posted back to the encoder queue to run. + time_controller_.AdvanceTime(TimeDelta::Millis(10)); + + // Ensure the audio levels is set on the sent packet. + EXPECT_THAT(WaitUntil([&] { return sent_audio_level; }, IsTrue()), IsRtcOk()); + EXPECT_EQ(*sent_audio_level, audio_level); +} + +// Ensure that GetUsedRate returns null if no frames are coded. +TEST_F(ChannelSendTest, NoUsedRateInitially) { + channel_->StartSend(); + auto used_rate = channel_->GetUsedRate(); + EXPECT_EQ(used_rate, std::nullopt); +} + +// Ensure that GetUsedRate returns value with one coded frame. +TEST_F(ChannelSendTest, ValidUsedRateWithOneCodedFrame) { + channel_->StartSend(); + EXPECT_CALL(transport_, SendRtp).Times(1); + ProcessNextFrame(); + ProcessNextFrame(); + auto used_rate = channel_->GetUsedRate(); + EXPECT_GT(used_rate.value().bps(), 0); +} + +// Ensure that GetUsedRate returns value with one coded frame. +TEST_F(ChannelSendTest, UsedRateIsLargerofLastTwoFrames) { + channel_->StartSend(); + channel_->CallEncoder( + [&](AudioEncoder* encoder) { encoder->OnReceivedOverhead(72); }); + DataRate lowrate = DataRate::BitsPerSec(40000); + DataRate highrate = DataRate::BitsPerSec(80000); + BitrateAllocationUpdate update; + update.bwe_period = TimeDelta::Millis(100); + + update.target_bitrate = lowrate; + channel_->OnBitrateAllocation(update); + EXPECT_CALL(transport_, SendRtp).Times(1); + ProcessNextFrame(); + ProcessNextFrame(); + // Last two frames have rates [32kbps, -], yielding 32kbps. + auto used_rate_1 = channel_->GetUsedRate(); + + update.target_bitrate = highrate; + channel_->OnBitrateAllocation(update); + EXPECT_CALL(transport_, SendRtp).Times(1); + ProcessNextFrame(); + ProcessNextFrame(); + // Last two frames have rates [54kbps, 32kbps], yielding 54kbps + auto used_rate_2 = channel_->GetUsedRate(); + + update.target_bitrate = lowrate; + channel_->OnBitrateAllocation(update); + EXPECT_CALL(transport_, SendRtp).Times(1); + ProcessNextFrame(); + ProcessNextFrame(); + // Last two frames have rates [32kbps 54kbps], yielding 54kbps + auto used_rate_3 = channel_->GetUsedRate(); + + EXPECT_GT(used_rate_2, used_rate_1); + EXPECT_EQ(used_rate_3, used_rate_2); +} + +// Test that we gracefully handle packets while the congestion control objects +// are not configured. This can happen during calls +// AudioSendStream::ConfigureStream +TEST_F(ChannelSendTest, EnqueuePacketsGracefullyHandlesNonInitializedPacer) { + EXPECT_CALL(transport_, SendRtp).Times(1); + channel_->StartSend(); + channel_->ResetSenderCongestionControlObjects(); + // This should trigger a packet, but congestion control is not configured + // so it should be dropped + ProcessNextFrame(); + ProcessNextFrame(); + + channel_->RegisterSenderCongestionControlObjects(&transport_controller_); + // Now that we reconfigured the congestion control objects the new frame + // should be processed + ProcessNextFrame(); + ProcessNextFrame(); +} + } // namespace } // namespace voe } // namespace webrtc diff --git a/audio/mock_voe_channel_proxy.h b/audio/mock_voe_channel_proxy.h index 29005173df..0038f3aa24 100644 --- a/audio/mock_voe_channel_proxy.h +++ b/audio/mock_voe_channel_proxy.h @@ -11,17 +11,36 @@ #ifndef AUDIO_MOCK_VOE_CHANNEL_PROXY_H_ #define AUDIO_MOCK_VOE_CHANNEL_PROXY_H_ +#include +#include #include #include -#include +#include #include #include +#include "absl/strings/string_view.h" +#include "api/audio/audio_frame.h" +#include "api/audio/audio_mixer.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/call/audio_sink.h" +#include "api/call/bitrate_allocation.h" #include "api/crypto/frame_decryptor_interface.h" -#include "api/test/mock_frame_encryptor.h" +#include "api/crypto/frame_encryptor_interface.h" +#include "api/frame_transformer_interface.h" +#include "api/function_view.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" +#include "api/transport/rtp/rtp_source.h" +#include "api/units/data_rate.h" #include "audio/channel_receive.h" #include "audio/channel_send.h" +#include "call/syncable.h" +#include "modules/audio_coding/include/audio_coding_module_typedefs.h" +#include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "test/gmock.h" namespace webrtc { @@ -30,6 +49,7 @@ namespace test { class MockChannelReceive : public voe::ChannelReceiveInterface { public: MOCK_METHOD(void, SetNACKStatus, (bool enable, int max_packets), (override)); + MOCK_METHOD(void, SetRtcpMode, (RtcpMode mode), (override)); MOCK_METHOD(void, SetNonSenderRttMeasurement, (bool enabled), (override)); MOCK_METHOD(void, RegisterReceiverCongestionControlObjects, @@ -61,11 +81,7 @@ class MockChannelReceive : public voe::ChannelReceiveInterface { (int sample_rate_hz, AudioFrame*), (override)); MOCK_METHOD(int, PreferredSampleRate, (), (const, override)); - MOCK_METHOD(void, SetSourceTracker, (SourceTracker*), (override)); - MOCK_METHOD(void, - SetAssociatedSendChannel, - (const voe::ChannelSendInterface*), - (override)); + MOCK_METHOD(std::vector, GetSources, (), (const, override)); MOCK_METHOD(bool, GetPlayoutRtpTimestamp, (uint32_t*, int64_t*), @@ -74,18 +90,18 @@ class MockChannelReceive : public voe::ChannelReceiveInterface { SetEstimatedPlayoutNtpTimestampMs, (int64_t ntp_timestamp_ms, int64_t time_ms), (override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, GetCurrentEstimatedPlayoutNtpTimestampMs, (int64_t now_ms), (const, override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, GetSyncInfo, (), (const, override)); MOCK_METHOD(bool, SetMinimumPlayoutDelay, (int delay_ms), (override)); MOCK_METHOD(bool, SetBaseMinimumPlayoutDelayMs, (int delay_ms), (override)); MOCK_METHOD(int, GetBaseMinimumPlayoutDelayMs, (), (const, override)); - MOCK_METHOD((absl::optional>), + MOCK_METHOD((std::optional>), GetReceiveCodec, (), (const, override)); @@ -95,34 +111,35 @@ class MockChannelReceive : public voe::ChannelReceiveInterface { (override)); MOCK_METHOD(void, StartPlayout, (), (override)); MOCK_METHOD(void, StopPlayout, (), (override)); - MOCK_METHOD( - void, - SetDepacketizerToDecoderFrameTransformer, - (rtc::scoped_refptr frame_transformer), - (override)); + MOCK_METHOD(void, + SetDepacketizerToDecoderFrameTransformer, + (webrtc::scoped_refptr + frame_transformer), + (override)); MOCK_METHOD( void, SetFrameDecryptor, - (rtc::scoped_refptr frame_decryptor), + (webrtc::scoped_refptr frame_decryptor), (override)); MOCK_METHOD(void, OnLocalSsrcChange, (uint32_t local_ssrc), (override)); - MOCK_METHOD(uint32_t, GetLocalSsrc, (), (const, override)); }; class MockChannelSend : public voe::ChannelSendInterface { public: MOCK_METHOD(void, SetEncoder, - (int payload_type, std::unique_ptr encoder), + (int payload_type, + const SdpAudioFormat& encoder_format, + std::unique_ptr encoder), (override)); MOCK_METHOD( void, ModifyEncoder, - (rtc::FunctionView*)> modifier), + (webrtc::FunctionView*)> modifier), (override)); MOCK_METHOD(void, CallEncoder, - (rtc::FunctionView modifier), + (webrtc::FunctionView modifier), (override)); MOCK_METHOD(void, SetRTCP_CNAME, (absl::string_view c_name), (override)); MOCK_METHOD(void, @@ -167,18 +184,22 @@ class MockChannelSend : public voe::ChannelSendInterface { (override)); MOCK_METHOD(RtpRtcpInterface*, GetRtpRtcp, (), (const, override)); MOCK_METHOD(int, GetTargetBitrate, (), (const, override)); - MOCK_METHOD(int64_t, GetRTT, (), (const, override)); MOCK_METHOD(void, StartSend, (), (override)); MOCK_METHOD(void, StopSend, (), (override)); MOCK_METHOD(void, SetFrameEncryptor, - (rtc::scoped_refptr frame_encryptor), + (webrtc::scoped_refptr frame_encryptor), + (override)); + MOCK_METHOD(void, + SetEncoderToPacketizerFrameTransformer, + (webrtc::scoped_refptr + frame_transformer), + (override)); + MOCK_METHOD(std::optional, GetUsedRate, (), (const, override)); + MOCK_METHOD(void, + RegisterPacketOverhead, + (int packet_byte_overhead), (override)); - MOCK_METHOD( - void, - SetEncoderToPacketizerFrameTransformer, - (rtc::scoped_refptr frame_transformer), - (override)); }; } // namespace test } // namespace webrtc diff --git a/audio/remix_resample.cc b/audio/remix_resample.cc index 178af622a1..93e86517ae 100644 --- a/audio/remix_resample.cc +++ b/audio/remix_resample.cc @@ -10,10 +10,13 @@ #include "audio/remix_resample.h" +#include + #include "api/audio/audio_frame.h" #include "audio/utility/audio_frame_operations.h" #include "common_audio/resampler/include/push_resampler.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" namespace webrtc { namespace voe { @@ -21,68 +24,64 @@ namespace voe { void RemixAndResample(const AudioFrame& src_frame, PushResampler* resampler, AudioFrame* dst_frame) { - RemixAndResample(src_frame.data(), src_frame.samples_per_channel_, - src_frame.num_channels_, src_frame.sample_rate_hz_, - resampler, dst_frame); + RemixAndResample(src_frame.data_view(), src_frame.sample_rate_hz_, resampler, + dst_frame); dst_frame->timestamp_ = src_frame.timestamp_; dst_frame->elapsed_time_ms_ = src_frame.elapsed_time_ms_; dst_frame->ntp_time_ms_ = src_frame.ntp_time_ms_; dst_frame->packet_infos_ = src_frame.packet_infos_; } -void RemixAndResample(const int16_t* src_data, - size_t samples_per_channel, - size_t num_channels, +void RemixAndResample(InterleavedView src_data, int sample_rate_hz, PushResampler* resampler, AudioFrame* dst_frame) { - const int16_t* audio_ptr = src_data; - size_t audio_ptr_num_channels = num_channels; - int16_t downmixed_audio[AudioFrame::kMaxDataSizeSamples]; + // The `samples_per_channel_` members must have been set correctly based on + // the associated sample rate and the assumed 10ms buffer size. + // TODO(tommi): Remove the `sample_rate_hz` param. + RTC_DCHECK_EQ(SampleRateToDefaultChannelSize(sample_rate_hz), + src_data.samples_per_channel()); + RTC_DCHECK_EQ(SampleRateToDefaultChannelSize(dst_frame->sample_rate_hz_), + dst_frame->samples_per_channel()); + + // Temporary buffer in case downmixing is required. + std::array downmixed_audio; // Downmix before resampling. - if (num_channels > dst_frame->num_channels_) { - RTC_DCHECK(num_channels == 2 || num_channels == 4) - << "num_channels: " << num_channels; + if (src_data.num_channels() > dst_frame->num_channels_) { + RTC_DCHECK(src_data.num_channels() == 2 || src_data.num_channels() == 4) + << "num_channels: " << src_data.num_channels(); RTC_DCHECK(dst_frame->num_channels_ == 1 || dst_frame->num_channels_ == 2) << "dst_frame->num_channels_: " << dst_frame->num_channels_; - AudioFrameOperations::DownmixChannels( - src_data, num_channels, samples_per_channel, dst_frame->num_channels_, - downmixed_audio); - audio_ptr = downmixed_audio; - audio_ptr_num_channels = dst_frame->num_channels_; - } - - if (resampler->InitializeIfNeeded(sample_rate_hz, dst_frame->sample_rate_hz_, - audio_ptr_num_channels) == -1) { - RTC_FATAL() << "InitializeIfNeeded failed: sample_rate_hz = " - << sample_rate_hz << ", dst_frame->sample_rate_hz_ = " - << dst_frame->sample_rate_hz_ - << ", audio_ptr_num_channels = " << audio_ptr_num_channels; + InterleavedView downmixed(downmixed_audio.data(), + src_data.samples_per_channel(), + dst_frame->num_channels_); + AudioFrameOperations::DownmixChannels(src_data, downmixed); + src_data = downmixed; } // TODO(yujo): for muted input frames, don't resample. Either 1) allow // resampler to return output length without doing the resample, so we know // how much to zero here; or 2) make resampler accept a hint that the input is // zeroed. - const size_t src_length = samples_per_channel * audio_ptr_num_channels; - int out_length = - resampler->Resample(audio_ptr, src_length, dst_frame->mutable_data(), - AudioFrame::kMaxDataSizeSamples); - if (out_length == -1) { - RTC_FATAL() << "Resample failed: audio_ptr = " << audio_ptr - << ", src_length = " << src_length - << ", dst_frame->mutable_data() = " - << dst_frame->mutable_data(); - } - dst_frame->samples_per_channel_ = out_length / audio_ptr_num_channels; + + // Stash away the originally requested number of channels. Then provide + // `dst_frame` as a target buffer with the same number of channels as the + // source. + auto original_dst_number_of_channels = dst_frame->num_channels_; + int out_length = resampler->Resample( + src_data, dst_frame->mutable_data(dst_frame->samples_per_channel_, + src_data.num_channels())); + RTC_CHECK_NE(out_length, -1) << "src_data.size=" << src_data.size(); + RTC_DCHECK_EQ(dst_frame->samples_per_channel(), + out_length / src_data.num_channels()); // Upmix after resampling. - if (num_channels == 1 && dst_frame->num_channels_ == 2) { + if (src_data.num_channels() == 1 && original_dst_number_of_channels == 2) { // The audio in dst_frame really is mono at this point; MonoToStereo will // set this back to stereo. - dst_frame->num_channels_ = 1; + RTC_DCHECK_EQ(dst_frame->num_channels_, 1); AudioFrameOperations::UpmixChannels(2, dst_frame); } } diff --git a/audio/remix_resample.h b/audio/remix_resample.h index bd8da76c6a..d2f34686dd 100644 --- a/audio/remix_resample.h +++ b/audio/remix_resample.h @@ -12,11 +12,14 @@ #define AUDIO_REMIX_RESAMPLE_H_ #include "api/audio/audio_frame.h" +#include "api/audio/audio_view.h" #include "common_audio/resampler/include/push_resampler.h" namespace webrtc { namespace voe { +// Note: The RemixAndResample methods assume 10ms buffer sizes. + // Upmix or downmix and resample the audio to `dst_frame`. Expects `dst_frame` // to have its sample rate and channels members set to the desired values. // Updates the `samples_per_channel_` member accordingly. @@ -28,12 +31,9 @@ void RemixAndResample(const AudioFrame& src_frame, PushResampler* resampler, AudioFrame* dst_frame); -// This version has a pointer to the samples `src_data` as input and receives -// `samples_per_channel`, `num_channels` and `sample_rate_hz` of the data as -// parameters. -void RemixAndResample(const int16_t* src_data, - size_t samples_per_channel, - size_t num_channels, +// TODO(tommi): The `sample_rate_hz` argument can probably be removed since it's +// always related to `src_data.samples_per_frame()'. +void RemixAndResample(InterleavedView src_data, int sample_rate_hz, PushResampler* resampler, AudioFrame* dst_frame); diff --git a/audio/test/audio_end_to_end_test.cc b/audio/test/audio_end_to_end_test.cc index b1e2712f60..746ae3fea7 100644 --- a/audio/test/audio_end_to_end_test.cc +++ b/audio/test/audio_end_to_end_test.cc @@ -15,7 +15,6 @@ #include "api/task_queue/task_queue_base.h" #include "call/fake_network_pipe.h" -#include "call/simulated_network.h" #include "modules/audio_device/include/test_audio_device.h" #include "system_wrappers/include/sleep.h" #include "test/gtest.h" @@ -56,13 +55,13 @@ AudioEndToEndTest::CreateRenderer() { void AudioEndToEndTest::OnFakeAudioDevicesCreated( AudioDeviceModule* send_audio_device, - AudioDeviceModule* recv_audio_device) { + AudioDeviceModule* /* recv_audio_device */) { send_audio_device_ = send_audio_device; } void AudioEndToEndTest::ModifyAudioConfigs( AudioSendStream::Config* send_config, - std::vector* receive_configs) { + std::vector* /* receive_configs */) { // Large bitrate by default. const webrtc::SdpAudioFormat kDefaultFormat("opus", 48000, 2, {{"stereo", "1"}}); diff --git a/audio/test/audio_end_to_end_test.h b/audio/test/audio_end_to_end_test.h index d326b790ff..a5a6397c18 100644 --- a/audio/test/audio_end_to_end_test.h +++ b/audio/test/audio_end_to_end_test.h @@ -14,9 +14,9 @@ #include #include +#include "api/audio/audio_device.h" #include "api/task_queue/task_queue_base.h" #include "api/test/simulated_network.h" -#include "modules/audio_device/include/audio_device.h" #include "modules/audio_device/include/test_audio_device.h" #include "test/call_test.h" diff --git a/audio/test/non_sender_rtt_test.cc b/audio/test/non_sender_rtt_test.cc index 278193e335..17dd97ef09 100644 --- a/audio/test/non_sender_rtt_test.cc +++ b/audio/test/non_sender_rtt_test.cc @@ -8,15 +8,26 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include + +#include "api/task_queue/task_queue_base.h" +#include "api/test/rtc_error_matchers.h" +#include "api/test/simulated_network.h" +#include "api/units/time_delta.h" #include "audio/test/audio_end_to_end_test.h" -#include "rtc_base/gunit.h" +#include "call/audio_receive_stream.h" +#include "call/audio_send_stream.h" #include "rtc_base/task_queue_for_test.h" -#include "system_wrappers/include/sleep.h" +#include "test/call_test.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" namespace webrtc { namespace test { +using ::testing::IsTrue; using NonSenderRttTest = CallTest; TEST_F(NonSenderRttTest, NonSenderRttStats) { @@ -47,7 +58,10 @@ TEST_F(NonSenderRttTest, NonSenderRttStats) { // Wait until we have an RTT measurement, but no longer than // `kLongTimeoutMs`. This usually takes around 5 seconds, but in rare // cases it can take more than 10 seconds. - EXPECT_TRUE_WAIT(HasRoundTripTimeMeasurement(), kLongTimeoutMs); + EXPECT_THAT( + WaitUntil([&] { return HasRoundTripTimeMeasurement(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kLongTimeoutMs)}), + IsRtcOk()); } void OnStreamsStopped() override { diff --git a/audio/utility/BUILD.gn b/audio/utility/BUILD.gn index 983b6286e4..2ced9aa70d 100644 --- a/audio/utility/BUILD.gn +++ b/audio/utility/BUILD.gn @@ -23,14 +23,14 @@ rtc_library("audio_frame_operations") { ] deps = [ + "../../api:array_view", "../../api/audio:audio_frame_api", "../../common_audio", "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:safe_conversions", - "../../system_wrappers:field_trial", + "//third_party/abseil-cpp/absl/base:core_headers", ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] } if (rtc_include_tests) { @@ -48,7 +48,6 @@ if (rtc_include_tests) { "../../rtc_base:logging", "../../rtc_base:macromagic", "../../rtc_base:stringutils", - "../../test:field_trial", "../../test:test_support", "//testing/gtest", ] diff --git a/audio/utility/audio_frame_operations.cc b/audio/utility/audio_frame_operations.cc index 1b936c239b..d602182041 100644 --- a/audio/utility/audio_frame_operations.cc +++ b/audio/utility/audio_frame_operations.cc @@ -29,72 +29,17 @@ const float kMuteFadeInc = 1.0f / kMuteFadeFrames; } // namespace -void AudioFrameOperations::Add(const AudioFrame& frame_to_add, - AudioFrame* result_frame) { - // Sanity check. - RTC_DCHECK(result_frame); - RTC_DCHECK_GT(result_frame->num_channels_, 0); - RTC_DCHECK_EQ(result_frame->num_channels_, frame_to_add.num_channels_); - - bool no_previous_data = result_frame->muted(); - if (result_frame->samples_per_channel_ != frame_to_add.samples_per_channel_) { - // Special case we have no data to start with. - RTC_DCHECK_EQ(result_frame->samples_per_channel_, 0); - result_frame->samples_per_channel_ = frame_to_add.samples_per_channel_; - no_previous_data = true; - } - - if (result_frame->vad_activity_ == AudioFrame::kVadActive || - frame_to_add.vad_activity_ == AudioFrame::kVadActive) { - result_frame->vad_activity_ = AudioFrame::kVadActive; - } else if (result_frame->vad_activity_ == AudioFrame::kVadUnknown || - frame_to_add.vad_activity_ == AudioFrame::kVadUnknown) { - result_frame->vad_activity_ = AudioFrame::kVadUnknown; - } - - if (result_frame->speech_type_ != frame_to_add.speech_type_) - result_frame->speech_type_ = AudioFrame::kUndefined; - - if (!frame_to_add.muted()) { - const int16_t* in_data = frame_to_add.data(); - int16_t* out_data = result_frame->mutable_data(); - size_t length = - frame_to_add.samples_per_channel_ * frame_to_add.num_channels_; - if (no_previous_data) { - std::copy(in_data, in_data + length, out_data); - } else { - for (size_t i = 0; i < length; i++) { - const int32_t wrap_guard = static_cast(out_data[i]) + - static_cast(in_data[i]); - out_data[i] = rtc::saturated_cast(wrap_guard); - } - } - } -} - -int AudioFrameOperations::MonoToStereo(AudioFrame* frame) { - if (frame->num_channels_ != 1) { - return -1; - } - UpmixChannels(2, frame); - return 0; -} - -int AudioFrameOperations::StereoToMono(AudioFrame* frame) { - if (frame->num_channels_ != 2) { - return -1; - } - DownmixChannels(1, frame); - return frame->num_channels_ == 1 ? 0 : -1; -} - -void AudioFrameOperations::QuadToStereo(const int16_t* src_audio, - size_t samples_per_channel, - int16_t* dst_audio) { - for (size_t i = 0; i < samples_per_channel; i++) { - dst_audio[i * 2] = +void AudioFrameOperations::QuadToStereo( + InterleavedView src_audio, + InterleavedView dst_audio) { + RTC_DCHECK_EQ(NumChannels(src_audio), 4); + RTC_DCHECK_EQ(NumChannels(dst_audio), 2); + RTC_DCHECK_EQ(SamplesPerChannel(src_audio), SamplesPerChannel(dst_audio)); + for (size_t i = 0; i < SamplesPerChannel(src_audio); ++i) { + auto dst_frame = i * 2; + dst_audio[dst_frame] = (static_cast(src_audio[4 * i]) + src_audio[4 * i + 1]) >> 1; - dst_audio[i * 2 + 1] = + dst_audio[dst_frame + 1] = (static_cast(src_audio[4 * i + 2]) + src_audio[4 * i + 3]) >> 1; } @@ -109,30 +54,34 @@ int AudioFrameOperations::QuadToStereo(AudioFrame* frame) { AudioFrame::kMaxDataSizeSamples); if (!frame->muted()) { - QuadToStereo(frame->data(), frame->samples_per_channel_, - frame->mutable_data()); + // Note that `src` and `dst` will map in to the same buffer, but the call + // to `mutable_data()` changes the layout of `frame`, so `src` and `dst` + // will have different dimensions (important to call `data_view()` first). + auto src = frame->data_view(); + auto dst = frame->mutable_data(frame->samples_per_channel_, 2); + QuadToStereo(src, dst); + } else { + frame->num_channels_ = 2; } - frame->num_channels_ = 2; return 0; } -void AudioFrameOperations::DownmixChannels(const int16_t* src_audio, - size_t src_channels, - size_t samples_per_channel, - size_t dst_channels, - int16_t* dst_audio) { - if (src_channels > 1 && dst_channels == 1) { - DownmixInterleavedToMono(src_audio, samples_per_channel, src_channels, - dst_audio); - return; - } else if (src_channels == 4 && dst_channels == 2) { - QuadToStereo(src_audio, samples_per_channel, dst_audio); - return; +void AudioFrameOperations::DownmixChannels( + InterleavedView src_audio, + InterleavedView dst_audio) { + RTC_DCHECK_EQ(SamplesPerChannel(src_audio), SamplesPerChannel(dst_audio)); + if (NumChannels(src_audio) > 1 && IsMono(dst_audio)) { + // TODO(tommi): change DownmixInterleavedToMono to support InterleavedView + // and MonoView. + DownmixInterleavedToMono(&src_audio.data()[0], SamplesPerChannel(src_audio), + NumChannels(src_audio), &dst_audio.data()[0]); + } else if (NumChannels(src_audio) == 4 && NumChannels(dst_audio) == 2) { + QuadToStereo(src_audio, dst_audio); + } else { + RTC_DCHECK_NOTREACHED() << "src_channels: " << NumChannels(src_audio) + << ", dst_channels: " << NumChannels(dst_audio); } - - RTC_DCHECK_NOTREACHED() << "src_channels: " << src_channels - << ", dst_channels: " << dst_channels; } void AudioFrameOperations::DownmixChannels(size_t dst_channels, @@ -169,14 +118,16 @@ void AudioFrameOperations::UpmixChannels(size_t target_number_of_channels, if (!frame->muted()) { // Up-mixing done in place. Going backwards through the frame ensure nothing // is irrevocably overwritten. - int16_t* frame_data = frame->mutable_data(); - for (int i = frame->samples_per_channel_ - 1; i >= 0; i--) { + auto frame_data = frame->mutable_data(frame->samples_per_channel_, + target_number_of_channels); + for (int i = frame->samples_per_channel_ - 1; i >= 0; --i) { for (size_t j = 0; j < target_number_of_channels; ++j) { frame_data[target_number_of_channels * i + j] = frame_data[i]; } } + } else { + frame->num_channels_ = target_number_of_channels; } - frame->num_channels_ = target_number_of_channels; } void AudioFrameOperations::SwapStereoChannels(AudioFrame* frame) { @@ -250,35 +201,6 @@ void AudioFrameOperations::Mute(AudioFrame* frame) { Mute(frame, true, true); } -void AudioFrameOperations::ApplyHalfGain(AudioFrame* frame) { - RTC_DCHECK(frame); - RTC_DCHECK_GT(frame->num_channels_, 0); - if (frame->num_channels_ < 1 || frame->muted()) { - return; - } - - int16_t* frame_data = frame->mutable_data(); - for (size_t i = 0; i < frame->samples_per_channel_ * frame->num_channels_; - i++) { - frame_data[i] = frame_data[i] >> 1; - } -} - -int AudioFrameOperations::Scale(float left, float right, AudioFrame* frame) { - if (frame->num_channels_ != 2) { - return -1; - } else if (frame->muted()) { - return 0; - } - - int16_t* frame_data = frame->mutable_data(); - for (size_t i = 0; i < frame->samples_per_channel_; i++) { - frame_data[2 * i] = static_cast(left * frame_data[2 * i]); - frame_data[2 * i + 1] = static_cast(right * frame_data[2 * i + 1]); - } - return 0; -} - int AudioFrameOperations::ScaleWithSat(float scale, AudioFrame* frame) { if (frame->muted()) { return 0; @@ -287,7 +209,7 @@ int AudioFrameOperations::ScaleWithSat(float scale, AudioFrame* frame) { int16_t* frame_data = frame->mutable_data(); for (size_t i = 0; i < frame->samples_per_channel_ * frame->num_channels_; i++) { - frame_data[i] = rtc::saturated_cast(scale * frame_data[i]); + frame_data[i] = saturated_cast(scale * frame_data[i]); } return 0; } diff --git a/audio/utility/audio_frame_operations.h b/audio/utility/audio_frame_operations.h index 2a5f29f4f5..41ea05edf3 100644 --- a/audio/utility/audio_frame_operations.h +++ b/audio/utility/audio_frame_operations.h @@ -15,6 +15,7 @@ #include #include "absl/base/attributes.h" +#include "api/array_view.h" #include "api/audio/audio_frame.h" namespace webrtc { @@ -24,33 +25,11 @@ namespace webrtc { // than a class. class AudioFrameOperations { public: - // Add samples in `frame_to_add` with samples in `result_frame` - // putting the results in `results_frame`. The fields - // `vad_activity_` and `speech_type_` of the result frame are - // updated. If `result_frame` is empty (`samples_per_channel_`==0), - // the samples in `frame_to_add` are added to it. The number of - // channels and number of samples per channel must match except when - // `result_frame` is empty. - static void Add(const AudioFrame& frame_to_add, AudioFrame* result_frame); - - // `frame.num_channels_` will be updated. This version checks for sufficient - // buffer size and that `num_channels_` is mono. Use UpmixChannels - // instead. TODO(bugs.webrtc.org/8649): remove. - ABSL_DEPRECATED("bugs.webrtc.org/8649") - static int MonoToStereo(AudioFrame* frame); - - // `frame.num_channels_` will be updated. This version checks that - // `num_channels_` is stereo. Use DownmixChannels - // instead. TODO(bugs.webrtc.org/8649): remove. - ABSL_DEPRECATED("bugs.webrtc.org/8649") - static int StereoToMono(AudioFrame* frame); - // Downmixes 4 channels `src_audio` to stereo `dst_audio`. This is an in-place // operation, meaning `src_audio` and `dst_audio` may point to the same // buffer. - static void QuadToStereo(const int16_t* src_audio, - size_t samples_per_channel, - int16_t* dst_audio); + static void QuadToStereo(InterleavedView src_audio, + InterleavedView dst_audio); // `frame.num_channels_` will be updated. This version checks that // `num_channels_` is 4 channels. @@ -60,11 +39,8 @@ class AudioFrameOperations { // This is an in-place operation, meaning `src_audio` and `dst_audio` // may point to the same buffer. Supported channel combinations are // Stereo to Mono, Quad to Mono, and Quad to Stereo. - static void DownmixChannels(const int16_t* src_audio, - size_t src_channels, - size_t samples_per_channel, - size_t dst_channels, - int16_t* dst_audio); + static void DownmixChannels(InterleavedView src_audio, + InterleavedView dst_audio); // `frame.num_channels_` will be updated. This version checks that // `num_channels_` and `dst_channels` are valid and performs relevant downmix. @@ -94,11 +70,6 @@ class AudioFrameOperations { // Zero out contents of frame. static void Mute(AudioFrame* frame); - // Halve samples in `frame`. - static void ApplyHalfGain(AudioFrame* frame); - - static int Scale(float left, float right, AudioFrame* frame); - static int ScaleWithSat(float scale, AudioFrame* frame); }; diff --git a/audio/utility/audio_frame_operations_unittest.cc b/audio/utility/audio_frame_operations_unittest.cc index 1a2c16e45f..feb86e8de7 100644 --- a/audio/utility/audio_frame_operations_unittest.cc +++ b/audio/utility/audio_frame_operations_unittest.cc @@ -18,13 +18,10 @@ namespace { class AudioFrameOperationsTest : public ::testing::Test { protected: - AudioFrameOperationsTest() { - // Set typical values. - frame_.samples_per_channel_ = 320; - frame_.num_channels_ = 2; - } + AudioFrameOperationsTest() = default; - AudioFrame frame_; + // Set typical values. + AudioFrame frame_{/*sample_rate=*/32000, /*num_channels*/ 2}; }; class AudioFrameOperationsDeathTest : public AudioFrameOperationsTest {}; @@ -34,7 +31,8 @@ void SetFrameData(int16_t ch1, int16_t ch3, int16_t ch4, AudioFrame* frame) { - int16_t* frame_data = frame->mutable_data(); + InterleavedView frame_data = + frame->mutable_data(frame->samples_per_channel_, 4); for (size_t i = 0; i < frame->samples_per_channel_ * 4; i += 4) { frame_data[i] = ch1; frame_data[i + 1] = ch2; @@ -44,7 +42,8 @@ void SetFrameData(int16_t ch1, } void SetFrameData(int16_t left, int16_t right, AudioFrame* frame) { - int16_t* frame_data = frame->mutable_data(); + InterleavedView frame_data = + frame->mutable_data(frame->samples_per_channel_, 2); for (size_t i = 0; i < frame->samples_per_channel_ * 2; i += 2) { frame_data[i] = left; frame_data[i + 1] = right; @@ -52,7 +51,8 @@ void SetFrameData(int16_t left, int16_t right, AudioFrame* frame) { } void SetFrameData(int16_t data, AudioFrame* frame) { - int16_t* frame_data = frame->mutable_data(); + InterleavedView frame_data = + frame->mutable_data(frame->samples_per_channel_, 1); for (size_t i = 0; i < frame->samples_per_channel_ * frame->num_channels_; i++) { frame_data[i] = data; @@ -60,15 +60,18 @@ void SetFrameData(int16_t data, AudioFrame* frame) { } void VerifyFramesAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) { - EXPECT_EQ(frame1.num_channels_, frame2.num_channels_); - EXPECT_EQ(frame1.samples_per_channel_, frame2.samples_per_channel_); + ASSERT_EQ(frame1.num_channels_, frame2.num_channels_); + ASSERT_EQ(frame1.samples_per_channel_, frame2.samples_per_channel_); + EXPECT_EQ(frame1.muted(), frame2.muted()); const int16_t* frame1_data = frame1.data(); const int16_t* frame2_data = frame2.data(); + // TODO(tommi): Use sample_count() or data_view(). for (size_t i = 0; i < frame1.samples_per_channel_ * frame1.num_channels_; i++) { EXPECT_EQ(frame1_data[i], frame2_data[i]); + if (frame1_data[i] != frame2_data[i]) + break; // To avoid spamming the log. } - EXPECT_EQ(frame1.muted(), frame2.muted()); } void InitFrame(AudioFrame* frame, @@ -76,17 +79,16 @@ void InitFrame(AudioFrame* frame, size_t samples_per_channel, int16_t left_data, int16_t right_data) { - RTC_DCHECK(frame); RTC_DCHECK_GE(2, channels); RTC_DCHECK_GE(AudioFrame::kMaxDataSizeSamples, samples_per_channel * channels); frame->samples_per_channel_ = samples_per_channel; - frame->num_channels_ = channels; if (channels == 2) { SetFrameData(left_data, right_data, frame); } else if (channels == 1) { SetFrameData(left_data, frame); } + ASSERT_EQ(frame->num_channels_, channels); } int16_t GetChannelData(const AudioFrame& frame, size_t channel, size_t index) { @@ -116,7 +118,6 @@ TEST_F(AudioFrameOperationsDeathTest, MonoToStereoFailsWithBadParameters) { #endif TEST_F(AudioFrameOperationsTest, MonoToStereoSucceeds) { - frame_.num_channels_ = 1; SetFrameData(1, &frame_); AudioFrameOperations::UpmixChannels(2, &frame_); @@ -124,7 +125,6 @@ TEST_F(AudioFrameOperationsTest, MonoToStereoSucceeds) { AudioFrame stereo_frame; stereo_frame.samples_per_channel_ = 320; - stereo_frame.num_channels_ = 2; SetFrameData(1, 1, &stereo_frame); VerifyFramesAreEqual(stereo_frame, frame_); } @@ -151,7 +151,6 @@ TEST_F(AudioFrameOperationsTest, StereoToMonoSucceeds) { AudioFrame mono_frame; mono_frame.samples_per_channel_ = 320; - mono_frame.num_channels_ = 1; SetFrameData(3, &mono_frame); VerifyFramesAreEqual(mono_frame, frame_); } @@ -167,16 +166,12 @@ TEST_F(AudioFrameOperationsTest, StereoToMonoBufferSucceeds) { AudioFrame target_frame; SetFrameData(4, 2, &frame_); - target_frame.num_channels_ = 1; - target_frame.samples_per_channel_ = frame_.samples_per_channel_; - - AudioFrameOperations::DownmixChannels(frame_.data(), 2, - frame_.samples_per_channel_, 1, - target_frame.mutable_data()); + AudioFrameOperations::DownmixChannels( + frame_.data_view(), + target_frame.mutable_data(frame_.samples_per_channel_, 1)); AudioFrame mono_frame; mono_frame.samples_per_channel_ = 320; - mono_frame.num_channels_ = 1; SetFrameData(3, &mono_frame); VerifyFramesAreEqual(mono_frame, target_frame); } @@ -187,13 +182,11 @@ TEST_F(AudioFrameOperationsTest, StereoToMonoDoesNotWrapAround) { EXPECT_EQ(1u, frame_.num_channels_); AudioFrame mono_frame; mono_frame.samples_per_channel_ = 320; - mono_frame.num_channels_ = 1; SetFrameData(-32768, &mono_frame); VerifyFramesAreEqual(mono_frame, frame_); } TEST_F(AudioFrameOperationsTest, QuadToMonoSucceeds) { - frame_.num_channels_ = 4; SetFrameData(4, 2, 6, 8, &frame_); AudioFrameOperations::DownmixChannels(1, &frame_); @@ -201,7 +194,6 @@ TEST_F(AudioFrameOperationsTest, QuadToMonoSucceeds) { AudioFrame mono_frame; mono_frame.samples_per_channel_ = 320; - mono_frame.num_channels_ = 1; SetFrameData(5, &mono_frame); VerifyFramesAreEqual(mono_frame, frame_); } @@ -216,31 +208,24 @@ TEST_F(AudioFrameOperationsTest, QuadToMonoMuted) { TEST_F(AudioFrameOperationsTest, QuadToMonoBufferSucceeds) { AudioFrame target_frame; - frame_.num_channels_ = 4; SetFrameData(4, 2, 6, 8, &frame_); - target_frame.num_channels_ = 1; - target_frame.samples_per_channel_ = frame_.samples_per_channel_; - - AudioFrameOperations::DownmixChannels(frame_.data(), 4, - frame_.samples_per_channel_, 1, - target_frame.mutable_data()); + AudioFrameOperations::DownmixChannels( + frame_.data_view(), + target_frame.mutable_data(frame_.samples_per_channel_, 1)); AudioFrame mono_frame; mono_frame.samples_per_channel_ = 320; - mono_frame.num_channels_ = 1; SetFrameData(5, &mono_frame); VerifyFramesAreEqual(mono_frame, target_frame); } TEST_F(AudioFrameOperationsTest, QuadToMonoDoesNotWrapAround) { - frame_.num_channels_ = 4; SetFrameData(-32768, -32768, -32768, -32768, &frame_); AudioFrameOperations::DownmixChannels(1, &frame_); EXPECT_EQ(1u, frame_.num_channels_); AudioFrame mono_frame; mono_frame.samples_per_channel_ = 320; - mono_frame.num_channels_ = 1; SetFrameData(-32768, &mono_frame); VerifyFramesAreEqual(mono_frame, frame_); } @@ -253,13 +238,11 @@ TEST_F(AudioFrameOperationsTest, QuadToStereoFailsWithBadParameters) { } TEST_F(AudioFrameOperationsTest, QuadToStereoSucceeds) { - frame_.num_channels_ = 4; SetFrameData(4, 2, 6, 8, &frame_); EXPECT_EQ(0, AudioFrameOperations::QuadToStereo(&frame_)); AudioFrame stereo_frame; stereo_frame.samples_per_channel_ = 320; - stereo_frame.num_channels_ = 2; SetFrameData(3, 7, &stereo_frame); VerifyFramesAreEqual(stereo_frame, frame_); } @@ -273,29 +256,23 @@ TEST_F(AudioFrameOperationsTest, QuadToStereoMuted) { TEST_F(AudioFrameOperationsTest, QuadToStereoBufferSucceeds) { AudioFrame target_frame; - frame_.num_channels_ = 4; SetFrameData(4, 2, 6, 8, &frame_); - target_frame.num_channels_ = 2; - target_frame.samples_per_channel_ = frame_.samples_per_channel_; - - AudioFrameOperations::QuadToStereo(frame_.data(), frame_.samples_per_channel_, - target_frame.mutable_data()); + AudioFrameOperations::QuadToStereo( + frame_.data_view(), + target_frame.mutable_data(frame_.samples_per_channel_, 2)); AudioFrame stereo_frame; stereo_frame.samples_per_channel_ = 320; - stereo_frame.num_channels_ = 2; SetFrameData(3, 7, &stereo_frame); VerifyFramesAreEqual(stereo_frame, target_frame); } TEST_F(AudioFrameOperationsTest, QuadToStereoDoesNotWrapAround) { - frame_.num_channels_ = 4; SetFrameData(-32768, -32768, -32768, -32768, &frame_); EXPECT_EQ(0, AudioFrameOperations::QuadToStereo(&frame_)); AudioFrame stereo_frame; stereo_frame.samples_per_channel_ = 320; - stereo_frame.num_channels_ = 2; SetFrameData(-32768, -32768, &stereo_frame); VerifyFramesAreEqual(stereo_frame, frame_); } @@ -305,7 +282,6 @@ TEST_F(AudioFrameOperationsTest, SwapStereoChannelsSucceedsOnStereo) { AudioFrame swapped_frame; swapped_frame.samples_per_channel_ = 320; - swapped_frame.num_channels_ = 2; SetFrameData(1, 0, &swapped_frame); AudioFrameOperations::SwapStereoChannels(&frame_); @@ -319,9 +295,9 @@ TEST_F(AudioFrameOperationsTest, SwapStereoChannelsMuted) { } TEST_F(AudioFrameOperationsTest, SwapStereoChannelsFailsOnMono) { - frame_.num_channels_ = 1; // Set data to "stereo", despite it being a mono frame. SetFrameData(0, 1, &frame_); + frame_.num_channels_ = 1; // Reset to mono after SetFrameData(). AudioFrame orig_frame; orig_frame.CopyFrom(frame_); @@ -336,7 +312,6 @@ TEST_F(AudioFrameOperationsTest, MuteDisabled) { AudioFrame muted_frame; muted_frame.samples_per_channel_ = 320; - muted_frame.num_channels_ = 2; SetFrameData(1000, -1000, &muted_frame); VerifyFramesAreEqual(muted_frame, frame_); } @@ -468,79 +443,17 @@ TEST_F(AudioFrameOperationsTest, MuteEndAlreadyMuted) { EXPECT_TRUE(frame_.muted()); } -TEST_F(AudioFrameOperationsTest, ApplyHalfGainSucceeds) { - SetFrameData(2, &frame_); - - AudioFrame half_gain_frame; - half_gain_frame.num_channels_ = frame_.num_channels_; - half_gain_frame.samples_per_channel_ = frame_.samples_per_channel_; - SetFrameData(1, &half_gain_frame); - - AudioFrameOperations::ApplyHalfGain(&frame_); - VerifyFramesAreEqual(half_gain_frame, frame_); -} - -TEST_F(AudioFrameOperationsTest, ApplyHalfGainMuted) { - ASSERT_TRUE(frame_.muted()); - AudioFrameOperations::ApplyHalfGain(&frame_); - EXPECT_TRUE(frame_.muted()); -} - -// TODO(andrew): should not allow negative scales. -TEST_F(AudioFrameOperationsTest, DISABLED_ScaleFailsWithBadParameters) { - frame_.num_channels_ = 1; - EXPECT_EQ(-1, AudioFrameOperations::Scale(1.0, 1.0, &frame_)); - - frame_.num_channels_ = 3; - EXPECT_EQ(-1, AudioFrameOperations::Scale(1.0, 1.0, &frame_)); - - frame_.num_channels_ = 2; - EXPECT_EQ(-1, AudioFrameOperations::Scale(-1.0, 1.0, &frame_)); - EXPECT_EQ(-1, AudioFrameOperations::Scale(1.0, -1.0, &frame_)); -} - -// TODO(andrew): fix the wraparound bug. We should always saturate. -TEST_F(AudioFrameOperationsTest, DISABLED_ScaleDoesNotWrapAround) { - SetFrameData(4000, -4000, &frame_); - EXPECT_EQ(0, AudioFrameOperations::Scale(10.0, 10.0, &frame_)); - - AudioFrame clipped_frame; - clipped_frame.samples_per_channel_ = 320; - clipped_frame.num_channels_ = 2; - SetFrameData(32767, -32768, &clipped_frame); - VerifyFramesAreEqual(clipped_frame, frame_); -} - -TEST_F(AudioFrameOperationsTest, ScaleSucceeds) { - SetFrameData(1, -1, &frame_); - EXPECT_EQ(0, AudioFrameOperations::Scale(2.0, 3.0, &frame_)); - - AudioFrame scaled_frame; - scaled_frame.samples_per_channel_ = 320; - scaled_frame.num_channels_ = 2; - SetFrameData(2, -3, &scaled_frame); - VerifyFramesAreEqual(scaled_frame, frame_); -} - -TEST_F(AudioFrameOperationsTest, ScaleMuted) { - ASSERT_TRUE(frame_.muted()); - EXPECT_EQ(0, AudioFrameOperations::Scale(2.0, 3.0, &frame_)); - EXPECT_TRUE(frame_.muted()); -} - // TODO(andrew): should fail with a negative scale. TEST_F(AudioFrameOperationsTest, DISABLED_ScaleWithSatFailsWithBadParameters) { EXPECT_EQ(-1, AudioFrameOperations::ScaleWithSat(-1.0, &frame_)); } TEST_F(AudioFrameOperationsTest, ScaleWithSatDoesNotWrapAround) { - frame_.num_channels_ = 1; SetFrameData(4000, &frame_); EXPECT_EQ(0, AudioFrameOperations::ScaleWithSat(10.0, &frame_)); AudioFrame clipped_frame; clipped_frame.samples_per_channel_ = 320; - clipped_frame.num_channels_ = 1; SetFrameData(32767, &clipped_frame); VerifyFramesAreEqual(clipped_frame, frame_); @@ -551,13 +464,11 @@ TEST_F(AudioFrameOperationsTest, ScaleWithSatDoesNotWrapAround) { } TEST_F(AudioFrameOperationsTest, ScaleWithSatSucceeds) { - frame_.num_channels_ = 1; SetFrameData(1, &frame_); EXPECT_EQ(0, AudioFrameOperations::ScaleWithSat(2.0, &frame_)); AudioFrame scaled_frame; scaled_frame.samples_per_channel_ = 320; - scaled_frame.num_channels_ = 1; SetFrameData(2, &scaled_frame); VerifyFramesAreEqual(scaled_frame, frame_); } @@ -568,55 +479,5 @@ TEST_F(AudioFrameOperationsTest, ScaleWithSatMuted) { EXPECT_TRUE(frame_.muted()); } -TEST_F(AudioFrameOperationsTest, AddingXToEmptyGivesX) { - // When samples_per_channel_ is 0, the frame counts as empty and zero. - AudioFrame frame_to_add_to; - frame_to_add_to.mutable_data(); // Unmute the frame. - ASSERT_FALSE(frame_to_add_to.muted()); - frame_to_add_to.samples_per_channel_ = 0; - frame_to_add_to.num_channels_ = frame_.num_channels_; - - SetFrameData(1000, &frame_); - AudioFrameOperations::Add(frame_, &frame_to_add_to); - VerifyFramesAreEqual(frame_, frame_to_add_to); -} - -TEST_F(AudioFrameOperationsTest, AddingXToMutedGivesX) { - AudioFrame frame_to_add_to; - ASSERT_TRUE(frame_to_add_to.muted()); - frame_to_add_to.samples_per_channel_ = frame_.samples_per_channel_; - frame_to_add_to.num_channels_ = frame_.num_channels_; - - SetFrameData(1000, &frame_); - AudioFrameOperations::Add(frame_, &frame_to_add_to); - VerifyFramesAreEqual(frame_, frame_to_add_to); -} - -TEST_F(AudioFrameOperationsTest, AddingMutedToXGivesX) { - AudioFrame frame_to_add_to; - frame_to_add_to.samples_per_channel_ = frame_.samples_per_channel_; - frame_to_add_to.num_channels_ = frame_.num_channels_; - SetFrameData(1000, &frame_to_add_to); - - AudioFrame frame_copy; - frame_copy.CopyFrom(frame_to_add_to); - - ASSERT_TRUE(frame_.muted()); - AudioFrameOperations::Add(frame_, &frame_to_add_to); - VerifyFramesAreEqual(frame_copy, frame_to_add_to); -} - -TEST_F(AudioFrameOperationsTest, AddingTwoFramesProducesTheirSum) { - AudioFrame frame_to_add_to; - frame_to_add_to.samples_per_channel_ = frame_.samples_per_channel_; - frame_to_add_to.num_channels_ = frame_.num_channels_; - SetFrameData(1000, &frame_to_add_to); - SetFrameData(2000, &frame_); - - AudioFrameOperations::Add(frame_, &frame_to_add_to); - SetFrameData(frame_.data()[0] + 1000, &frame_); - VerifyFramesAreEqual(frame_, frame_to_add_to); -} - } // namespace } // namespace webrtc diff --git a/audio/utility/channel_mixer.cc b/audio/utility/channel_mixer.cc index 0f1e663873..7049e85503 100644 --- a/audio/utility/channel_mixer.cc +++ b/audio/utility/channel_mixer.cc @@ -18,17 +18,26 @@ namespace webrtc { ChannelMixer::ChannelMixer(ChannelLayout input_layout, - ChannelLayout output_layout) + size_t input_channels, + ChannelLayout output_layout, + size_t output_channels) : input_layout_(input_layout), output_layout_(output_layout), - input_channels_(ChannelLayoutToChannelCount(input_layout)), - output_channels_(ChannelLayoutToChannelCount(output_layout)) { + input_channels_(input_channels), + output_channels_(output_channels) { // Create the transformation matrix. ChannelMixingMatrix matrix_builder(input_layout_, input_channels_, output_layout_, output_channels_); remapping_ = matrix_builder.CreateTransformationMatrix(&matrix_); } +ChannelMixer::ChannelMixer(ChannelLayout input_layout, + ChannelLayout output_layout) + : ChannelMixer(input_layout, + ChannelLayoutToChannelCount(input_layout), + output_layout, + ChannelLayoutToChannelCount(output_layout)) {} + ChannelMixer::~ChannelMixer() = default; void ChannelMixer::Transform(AudioFrame* frame) { @@ -49,8 +58,7 @@ void ChannelMixer::Transform(AudioFrame* frame) { // Only change the number of output channels if the audio frame is muted. if (frame->muted()) { - frame->num_channels_ = output_channels_; - frame->channel_layout_ = output_layout_; + frame->SetLayoutAndNumChannels(output_layout_, output_channels_); return; } @@ -82,13 +90,12 @@ void ChannelMixer::Transform(AudioFrame* frame) { } const size_t index = output_channels_ * i + output_ch; RTC_CHECK_LE(index, audio_vector_size_); - out_audio[index] = rtc::saturated_cast(acc_value); + out_audio[index] = saturated_cast(acc_value); } } // Update channel information. - frame->num_channels_ = output_channels_; - frame->channel_layout_ = output_layout_; + frame->SetLayoutAndNumChannels(output_layout_, output_channels_); // Copy the output result to the audio frame in `frame`. memcpy( diff --git a/audio/utility/channel_mixer.h b/audio/utility/channel_mixer.h index 2dea8eb45b..d8a0d3a254 100644 --- a/audio/utility/channel_mixer.h +++ b/audio/utility/channel_mixer.h @@ -35,6 +35,10 @@ class ChannelMixer { // (1 / sqrt(2)) gain to each. static constexpr float kHalfPower = 0.707106781186547524401f; + ChannelMixer(ChannelLayout input_layout, + size_t input_channels, + ChannelLayout output_layout, + size_t output_channels); ChannelMixer(ChannelLayout input_layout, ChannelLayout output_layout); ~ChannelMixer(); diff --git a/audio/utility/channel_mixer_unittest.cc b/audio/utility/channel_mixer_unittest.cc index 94cb1ac7e3..7aa3810936 100644 --- a/audio/utility/channel_mixer_unittest.cc +++ b/audio/utility/channel_mixer_unittest.cc @@ -163,7 +163,7 @@ TEST_F(ChannelMixerTest, ConstructAllPossibleLayouts) { continue; } - rtc::StringBuilder ss; + StringBuilder ss; ss << "Input Layout: " << input_layout << ", Output Layout: " << output_layout; SCOPED_TRACE(ss.str()); diff --git a/audio/utility/channel_mixing_matrix.cc b/audio/utility/channel_mixing_matrix.cc index 1244653f63..83fee65958 100644 --- a/audio/utility/channel_mixing_matrix.cc +++ b/audio/utility/channel_mixing_matrix.cc @@ -17,16 +17,24 @@ #include "audio/utility/channel_mixer.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { -// Selects the default usage of VoIP channel mapping adjustments. -bool UseChannelMappingAdjustmentsByDefault() { - return !field_trial::IsEnabled( - "WebRTC-VoIPChannelRemixingAdjustmentKillSwitch"); +ChannelLayout CheckInputLayout(ChannelLayout input_layout, + ChannelLayout output_layout) { + // Special case for 5.0, 5.1 with back channels when upmixed to 7.0, 7.1, + // which should map the back LR to side LR. + if (input_layout == CHANNEL_LAYOUT_5_0_BACK && + output_layout == CHANNEL_LAYOUT_7_0) { + return CHANNEL_LAYOUT_5_0; + } else if (input_layout == CHANNEL_LAYOUT_5_1_BACK && + output_layout == CHANNEL_LAYOUT_7_1) { + return CHANNEL_LAYOUT_5_1; + } + + return input_layout; } } // namespace @@ -66,9 +74,7 @@ ChannelMixingMatrix::ChannelMixingMatrix(ChannelLayout input_layout, int input_channels, ChannelLayout output_layout, int output_channels) - : use_voip_channel_mapping_adjustments_( - UseChannelMappingAdjustmentsByDefault()), - input_layout_(input_layout), + : input_layout_(CheckInputLayout(input_layout, output_layout)), input_channels_(input_channels), output_layout_(output_layout), output_channels_(output_channels) { @@ -80,16 +86,6 @@ ChannelMixingMatrix::ChannelMixingMatrix(ChannelLayout input_layout, ValidateLayout(input_layout); if (output_layout != CHANNEL_LAYOUT_DISCRETE) ValidateLayout(output_layout); - - // Special case for 5.0, 5.1 with back channels when upmixed to 7.0, 7.1, - // which should map the back LR to side LR. - if (input_layout_ == CHANNEL_LAYOUT_5_0_BACK && - output_layout_ == CHANNEL_LAYOUT_7_0) { - input_layout_ = CHANNEL_LAYOUT_5_0; - } else if (input_layout_ == CHANNEL_LAYOUT_5_1_BACK && - output_layout_ == CHANNEL_LAYOUT_7_1) { - input_layout_ = CHANNEL_LAYOUT_5_1; - } } ChannelMixingMatrix::~ChannelMixingMatrix() = default; @@ -118,8 +114,7 @@ bool ChannelMixingMatrix::CreateTransformationMatrix( } // If specified, use adjusted channel mapping for the VoIP scenario. - if (use_voip_channel_mapping_adjustments_ && - input_layout_ == CHANNEL_LAYOUT_MONO && + if (input_layout_ == CHANNEL_LAYOUT_MONO && ChannelLayoutToChannelCount(output_layout_) >= 2) { // Only place the mono input in the front left and right channels. (*matrix_)[0][0] = 1.f; diff --git a/audio/utility/channel_mixing_matrix.h b/audio/utility/channel_mixing_matrix.h index ee00860846..0f34366cda 100644 --- a/audio/utility/channel_mixing_matrix.h +++ b/audio/utility/channel_mixing_matrix.h @@ -36,16 +36,14 @@ class ChannelMixingMatrix { bool CreateTransformationMatrix(std::vector>* matrix); private: - const bool use_voip_channel_mapping_adjustments_; - // Result transformation of input channels to output channels std::vector>* matrix_; // Input and output channel layout provided during construction. - ChannelLayout input_layout_; - int input_channels_; - ChannelLayout output_layout_; - int output_channels_; + const ChannelLayout input_layout_; + const int input_channels_; + const ChannelLayout output_layout_; + const int output_channels_; // Helper variable for tracking which inputs are currently unaccounted, // should be empty after construction completes. diff --git a/audio/utility/channel_mixing_matrix_unittest.cc b/audio/utility/channel_mixing_matrix_unittest.cc index a4efb4fd38..d7a7f3f038 100644 --- a/audio/utility/channel_mixing_matrix_unittest.cc +++ b/audio/utility/channel_mixing_matrix_unittest.cc @@ -16,7 +16,6 @@ #include "rtc_base/arraysize.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "test/field_trial.h" #include "test/gtest.h" namespace webrtc { @@ -44,7 +43,7 @@ TEST(ChannelMixingMatrixTest, ConstructAllPossibleLayouts) { continue; } - rtc::StringBuilder ss; + StringBuilder ss; ss << "Input Layout: " << input_layout << ", Output Layout: " << output_layout; SCOPED_TRACE(ss.str()); @@ -125,32 +124,7 @@ TEST(ChannelMixingMatrixTest, MonoToStereo) { EXPECT_EQ(1.0f, matrix[1][0]); } -TEST(ChannelMixingMatrixTest, MonoToTwoOneWithoutVoIPAdjustments) { - test::ScopedFieldTrials field_trials( - "WebRTC-VoIPChannelRemixingAdjustmentKillSwitch/Enabled/"); - ChannelLayout input_layout = CHANNEL_LAYOUT_MONO; - ChannelLayout output_layout = CHANNEL_LAYOUT_2_1; - ChannelMixingMatrix matrix_builder( - input_layout, ChannelLayoutToChannelCount(input_layout), output_layout, - ChannelLayoutToChannelCount(output_layout)); - std::vector> matrix; - bool remapping = matrix_builder.CreateTransformationMatrix(&matrix); - - // Input: mono - // CENTER - // Output: 2.1 FRONT_LEFT 1 - // FRONT_RIGHT 1 - // BACK_CENTER 0 - // - EXPECT_FALSE(remapping); - EXPECT_EQ(3u, matrix.size()); - EXPECT_EQ(1u, matrix[0].size()); - EXPECT_EQ(1.0f, matrix[0][0]); - EXPECT_EQ(1.0f, matrix[1][0]); - EXPECT_EQ(0.0f, matrix[2][0]); -} - -TEST(ChannelMixingMatrixTest, MonoToTwoOneWithVoIPAdjustments) { +TEST(ChannelMixingMatrixTest, MonoToTwoOne) { ChannelLayout input_layout = CHANNEL_LAYOUT_MONO; ChannelLayout output_layout = CHANNEL_LAYOUT_2_1; ChannelMixingMatrix matrix_builder( @@ -173,39 +147,7 @@ TEST(ChannelMixingMatrixTest, MonoToTwoOneWithVoIPAdjustments) { EXPECT_EQ(0.0f, matrix[2][0]); } -TEST(ChannelMixingMatrixTest, MonoToFiveOneWithoutVoIPAdjustments) { - test::ScopedFieldTrials field_trials( - "WebRTC-VoIPChannelRemixingAdjustmentKillSwitch/Enabled/"); - ChannelLayout input_layout = CHANNEL_LAYOUT_MONO; - ChannelLayout output_layout = CHANNEL_LAYOUT_5_1; - const int input_channels = ChannelLayoutToChannelCount(input_layout); - const int output_channels = ChannelLayoutToChannelCount(output_layout); - ChannelMixingMatrix matrix_builder(input_layout, input_channels, - output_layout, output_channels); - std::vector> matrix; - bool remapping = matrix_builder.CreateTransformationMatrix(&matrix); - // Input: mono - // CENTER - // Output: 5.1 LEFT 0 - // RIGHT 0 - // CENTER 1 - // LFE 0 - // SIDE_LEFT 0 - // SIDE_RIGHT 0 - // - EXPECT_TRUE(remapping); - EXPECT_EQ(static_cast(output_channels), matrix.size()); - for (int n = 0; n < output_channels; n++) { - EXPECT_EQ(static_cast(input_channels), matrix[n].size()); - if (n == CENTER) { - EXPECT_EQ(1.0f, matrix[CENTER][0]); - } else { - EXPECT_EQ(0.0f, matrix[n][0]); - } - } -} - -TEST(ChannelMixingMatrixTest, MonoToFiveOneWithVoIPAdjustments) { +TEST(ChannelMixingMatrixTest, MonoToFiveOne) { ChannelLayout input_layout = CHANNEL_LAYOUT_MONO; ChannelLayout output_layout = CHANNEL_LAYOUT_5_1; const int input_channels = ChannelLayoutToChannelCount(input_layout); @@ -235,41 +177,7 @@ TEST(ChannelMixingMatrixTest, MonoToFiveOneWithVoIPAdjustments) { } } -TEST(ChannelMixingMatrixTest, MonoToSevenOneWithoutVoIPAdjustments) { - test::ScopedFieldTrials field_trials( - "WebRTC-VoIPChannelRemixingAdjustmentKillSwitch/Enabled/"); - ChannelLayout input_layout = CHANNEL_LAYOUT_MONO; - ChannelLayout output_layout = CHANNEL_LAYOUT_7_1; - const int input_channels = ChannelLayoutToChannelCount(input_layout); - const int output_channels = ChannelLayoutToChannelCount(output_layout); - ChannelMixingMatrix matrix_builder(input_layout, input_channels, - output_layout, output_channels); - std::vector> matrix; - bool remapping = matrix_builder.CreateTransformationMatrix(&matrix); - // Input: mono - // CENTER - // Output: 7.1 LEFT 0 - // RIGHT 0 - // CENTER 1 - // LFE 0 - // SIDE_LEFT 0 - // SIDE_RIGHT 0 - // BACK_LEFT 0 - // BACK_RIGHT 0 - // - EXPECT_TRUE(remapping); - EXPECT_EQ(static_cast(output_channels), matrix.size()); - for (int n = 0; n < output_channels; n++) { - EXPECT_EQ(static_cast(input_channels), matrix[n].size()); - if (n == CENTER) { - EXPECT_EQ(1.0f, matrix[CENTER][0]); - } else { - EXPECT_EQ(0.0f, matrix[n][0]); - } - } -} - -TEST(ChannelMixingMatrixTest, MonoToSevenOneWithVoIPAdjustments) { +TEST(ChannelMixingMatrixTest, MonoToSevenOne) { ChannelLayout input_layout = CHANNEL_LAYOUT_MONO; ChannelLayout output_layout = CHANNEL_LAYOUT_7_1; const int input_channels = ChannelLayoutToChannelCount(input_layout); diff --git a/audio/voip/BUILD.gn b/audio/voip/BUILD.gn index e807e2276b..3754bbd266 100644 --- a/audio/voip/BUILD.gn +++ b/audio/voip/BUILD.gn @@ -17,17 +17,17 @@ rtc_library("voip_core") { ":audio_channel", "..:audio", "../../api:scoped_refptr", + "../../api/audio:audio_device", + "../../api/audio:audio_processing", "../../api/audio_codecs:audio_codecs_api", + "../../api/environment", "../../api/task_queue", "../../api/voip:voip_api", - "../../modules/audio_device:audio_device_api", "../../modules/audio_mixer:audio_mixer_impl", - "../../modules/audio_processing:api", "../../rtc_base:criticalsection", "../../rtc_base:logging", "../../rtc_base/synchronization:mutex", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("audio_channel") { @@ -42,7 +42,6 @@ rtc_library("audio_channel") { "../../api/audio_codecs:audio_codecs_api", "../../api/task_queue", "../../api/voip:voip_api", - "../../modules/audio_device:audio_device_api", "../../modules/rtp_rtcp", "../../modules/rtp_rtcp:rtp_rtcp_format", "../../rtc_base:criticalsection", @@ -64,6 +63,9 @@ rtc_library("audio_ingress") { "../../api:transport_api", "../../api/audio:audio_mixer_api", "../../api/audio_codecs:audio_codecs_api", + "../../api/environment", + "../../api/neteq:default_neteq_factory", + "../../api/neteq:neteq_api", "../../api/voip:voip_api", "../../modules/audio_coding", "../../modules/rtp_rtcp", @@ -76,7 +78,6 @@ rtc_library("audio_ingress") { "../../rtc_base/synchronization:mutex", "../utility:audio_frame_operations", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("audio_egress") { @@ -88,13 +89,13 @@ rtc_library("audio_egress") { "..:audio", "../../api:sequence_checker", "../../api/audio_codecs:audio_codecs_api", + "../../api/environment", "../../api/task_queue", "../../call:audio_sender_interface", "../../modules/audio_coding", "../../modules/rtp_rtcp", "../../modules/rtp_rtcp:rtp_rtcp_format", "../../rtc_base:logging", - "../../rtc_base:rtc_task_queue", "../../rtc_base:timeutils", "../../rtc_base/synchronization:mutex", "../../rtc_base/system:no_unique_address", diff --git a/audio/voip/audio_channel.cc b/audio/voip/audio_channel.cc index a70e33ec38..81a01b670c 100644 --- a/audio/voip/audio_channel.cc +++ b/audio/voip/audio_channel.cc @@ -27,37 +27,32 @@ constexpr int kRtcpReportIntervalMs = 5000; } // namespace -AudioChannel::AudioChannel( - Transport* transport, - uint32_t local_ssrc, - TaskQueueFactory* task_queue_factory, - AudioMixer* audio_mixer, - rtc::scoped_refptr decoder_factory) +AudioChannel::AudioChannel(const Environment& env, + Transport* transport, + uint32_t local_ssrc, + AudioMixer* audio_mixer, + scoped_refptr decoder_factory) : audio_mixer_(audio_mixer) { - RTC_DCHECK(task_queue_factory); RTC_DCHECK(audio_mixer); - Clock* clock = Clock::GetRealTimeClock(); - receive_statistics_ = ReceiveStatistics::Create(clock); + receive_statistics_ = ReceiveStatistics::Create(&env.clock()); RtpRtcpInterface::Configuration rtp_config; - rtp_config.clock = clock; rtp_config.audio = true; rtp_config.receive_statistics = receive_statistics_.get(); rtp_config.rtcp_report_interval_ms = kRtcpReportIntervalMs; rtp_config.outgoing_transport = transport; rtp_config.local_media_ssrc = local_ssrc; - rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(rtp_config); + rtp_rtcp_ = std::make_unique(env, rtp_config); rtp_rtcp_->SetSendingMediaStatus(false); rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound); - ingress_ = std::make_unique(rtp_rtcp_.get(), clock, + ingress_ = std::make_unique(env, rtp_rtcp_.get(), receive_statistics_.get(), std::move(decoder_factory)); - egress_ = - std::make_unique(rtp_rtcp_.get(), clock, task_queue_factory); + egress_ = std::make_unique(env, rtp_rtcp_.get()); // Set the instance of audio ingress to be part of audio mixer for ADM to // fetch audio samples to play. diff --git a/audio/voip/audio_channel.h b/audio/voip/audio_channel.h index 7338d9faab..b43c9cd814 100644 --- a/audio/voip/audio_channel.h +++ b/audio/voip/audio_channel.h @@ -29,13 +29,13 @@ namespace webrtc { // AudioChannel represents a single media session and provides APIs over // AudioIngress and AudioEgress. Note that a single RTP stack is shared with // these two classes as it has both sending and receiving capabilities. -class AudioChannel : public rtc::RefCountInterface { +class AudioChannel : public RefCountInterface { public: - AudioChannel(Transport* transport, + AudioChannel(const Environment& env, + Transport* transport, uint32_t local_ssrc, - TaskQueueFactory* task_queue_factory, AudioMixer* audio_mixer, - rtc::scoped_refptr decoder_factory); + scoped_refptr decoder_factory); ~AudioChannel() override; // Set and get ChannelId that this audio channel belongs for debugging and @@ -59,7 +59,7 @@ class AudioChannel : public rtc::RefCountInterface { std::unique_ptr encoder) { egress_->SetEncoder(payload_type, encoder_format, std::move(encoder)); } - absl::optional GetEncoderFormat() const { + std::optional GetEncoderFormat() const { return egress_->GetEncoderFormat(); } void RegisterTelephoneEventType(int rtp_payload_type, int sample_rate_hz) { @@ -72,10 +72,10 @@ class AudioChannel : public rtc::RefCountInterface { // APIs relayed to AudioIngress. bool IsPlaying() const { return ingress_->IsPlaying(); } - void ReceivedRTPPacket(rtc::ArrayView rtp_packet) { + void ReceivedRTPPacket(ArrayView rtp_packet) { ingress_->ReceivedRTPPacket(rtp_packet); } - void ReceivedRTCPPacket(rtc::ArrayView rtcp_packet) { + void ReceivedRTCPPacket(ArrayView rtcp_packet) { ingress_->ReceivedRTCPPacket(rtcp_packet); } void SetReceiveCodecs(const std::map& codecs) { diff --git a/audio/voip/audio_egress.cc b/audio/voip/audio_egress.cc index 95a1a3351e..e0fef278b8 100644 --- a/audio/voip/audio_egress.cc +++ b/audio/voip/audio_egress.cc @@ -13,24 +13,28 @@ #include #include +#include "api/sequence_checker.h" #include "rtc_base/logging.h" namespace webrtc { -AudioEgress::AudioEgress(RtpRtcpInterface* rtp_rtcp, - Clock* clock, - TaskQueueFactory* task_queue_factory) +AudioEgress::AudioEgress(const Environment& env, RtpRtcpInterface* rtp_rtcp) : rtp_rtcp_(rtp_rtcp), - rtp_sender_audio_(clock, rtp_rtcp_->RtpSender()), + rtp_sender_audio_(&env.clock(), rtp_rtcp_->RtpSender()), audio_coding_(AudioCodingModule::Create()), - encoder_queue_(task_queue_factory->CreateTaskQueue( + encoder_queue_(env.task_queue_factory().CreateTaskQueue( "AudioEncoder", - TaskQueueFactory::Priority::NORMAL)) { + TaskQueueFactory::Priority::NORMAL)), + encoder_queue_checker_(encoder_queue_.get()) { audio_coding_->RegisterTransportCallback(this); } AudioEgress::~AudioEgress() { audio_coding_->RegisterTransportCallback(nullptr); + + // Delete first to ensure that there are no running tasks when the other + // members are destroyed. + encoder_queue_ = nullptr; } bool AudioEgress::IsSending() const { @@ -73,9 +77,9 @@ void AudioEgress::SendAudioData(std::unique_ptr audio_frame) { RTC_DCHECK_GT(audio_frame->samples_per_channel_, 0); RTC_DCHECK_LE(audio_frame->num_channels_, 8); - encoder_queue_.PostTask( + encoder_queue_->PostTask( [this, audio_frame = std::move(audio_frame)]() mutable { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(&encoder_queue_checker_); if (!rtp_rtcp_->SendingMedia()) { return; } @@ -103,7 +107,7 @@ void AudioEgress::SendAudioData(std::unique_ptr audio_frame) { } encoder_context_.frame_rtp_timestamp_ += - rtc::dchecked_cast(audio_frame->samples_per_channel_); + dchecked_cast(audio_frame->samples_per_channel_); }); } @@ -112,9 +116,9 @@ int32_t AudioEgress::SendData(AudioFrameType frame_type, uint32_t timestamp, const uint8_t* payload_data, size_t payload_size) { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(&encoder_queue_checker_); - rtc::ArrayView payload(payload_data, payload_size); + ArrayView payload(payload_data, payload_size); // Currently we don't get a capture time from downstream modules (ADM, // AudioTransportImpl). @@ -175,8 +179,8 @@ bool AudioEgress::SendTelephoneEvent(int dtmf_event, int duration_ms) { } void AudioEgress::SetMute(bool mute) { - encoder_queue_.PostTask([this, mute] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, mute] { + RTC_DCHECK_RUN_ON(&encoder_queue_checker_); encoder_context_.mute_ = mute; }); } diff --git a/audio/voip/audio_egress.h b/audio/voip/audio_egress.h index 989e5bda59..7955fed4b3 100644 --- a/audio/voip/audio_egress.h +++ b/audio/voip/audio_egress.h @@ -15,7 +15,9 @@ #include #include "api/audio_codecs/audio_format.h" +#include "api/environment/environment.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "audio/audio_level.h" #include "audio/utility/audio_frame_operations.h" @@ -25,7 +27,7 @@ #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_sender_audio.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_queue.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/time_utils.h" namespace webrtc { @@ -45,9 +47,7 @@ namespace webrtc { // smaller footprint. class AudioEgress : public AudioSender, public AudioPacketizationCallback { public: - AudioEgress(RtpRtcpInterface* rtp_rtcp, - Clock* clock, - TaskQueueFactory* task_queue_factory); + AudioEgress(const Environment& env, RtpRtcpInterface* rtp_rtcp); ~AudioEgress() override; // Set the encoder format and payload type for AudioCodingModule. @@ -74,7 +74,7 @@ class AudioEgress : public AudioSender, public AudioPacketizationCallback { // Retrieve current encoder format info. This returns encoder format set // by SetEncoder() and if encoder is not set, this will return nullopt. - absl::optional GetEncoderFormat() const { + std::optional GetEncoderFormat() const { MutexLock lock(&lock_); return encoder_format_; } @@ -119,7 +119,7 @@ class AudioEgress : public AudioSender, public AudioPacketizationCallback { mutable Mutex lock_; // Current encoder format selected by caller. - absl::optional encoder_format_ RTC_GUARDED_BY(lock_); + std::optional encoder_format_ RTC_GUARDED_BY(lock_); // Synchronization is handled internally by RtpRtcp. RtpRtcpInterface* const rtp_rtcp_; @@ -146,11 +146,10 @@ class AudioEgress : public AudioSender, public AudioPacketizationCallback { bool previously_muted_ = false; }; - EncoderContext encoder_context_ RTC_GUARDED_BY(encoder_queue_); + EncoderContext encoder_context_ RTC_GUARDED_BY(encoder_queue_checker_); - // Defined last to ensure that there are no running tasks when the other - // members are destroyed. - rtc::TaskQueue encoder_queue_; + std::unique_ptr encoder_queue_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker encoder_queue_checker_; }; } // namespace webrtc diff --git a/audio/voip/audio_ingress.cc b/audio/voip/audio_ingress.cc index 80f21152c0..3fcfa5547b 100644 --- a/audio/voip/audio_ingress.cc +++ b/audio/voip/audio_ingress.cc @@ -11,10 +11,12 @@ #include "audio/voip/audio_ingress.h" #include +#include #include #include #include "api/audio_codecs/audio_format.h" +#include "api/neteq/default_neteq_factory.h" #include "audio/utility/audio_frame_operations.h" #include "modules/audio_coding/include/audio_coding_module.h" #include "modules/rtp_rtcp/source/byte_io.h" @@ -29,28 +31,28 @@ namespace webrtc { namespace { -acm2::AcmReceiver::Config CreateAcmConfig( - rtc::scoped_refptr decoder_factory) { - acm2::AcmReceiver::Config acm_config; - acm_config.neteq_config.enable_muted_state = true; - acm_config.decoder_factory = decoder_factory; - return acm_config; +NetEq::Config CreateNetEqConfig() { + NetEq::Config config; + config.enable_muted_state = true; + return config; } } // namespace -AudioIngress::AudioIngress( - RtpRtcpInterface* rtp_rtcp, - Clock* clock, - ReceiveStatistics* receive_statistics, - rtc::scoped_refptr decoder_factory) - : playing_(false), +AudioIngress::AudioIngress(const Environment& env, + RtpRtcpInterface* rtp_rtcp, + ReceiveStatistics* receive_statistics, + scoped_refptr decoder_factory) + : env_(env), + playing_(false), remote_ssrc_(0), first_rtp_timestamp_(-1), rtp_receive_statistics_(receive_statistics), rtp_rtcp_(rtp_rtcp), - acm_receiver_(CreateAcmConfig(decoder_factory)), - ntp_estimator_(clock) {} + neteq_(DefaultNetEqFactory().Create(env, + CreateNetEqConfig(), + decoder_factory)), + ntp_estimator_(&env_.clock()) {} AudioIngress::~AudioIngress() = default; @@ -61,13 +63,17 @@ AudioMixer::Source::AudioFrameInfo AudioIngress::GetAudioFrameWithInfo( // Get 10ms raw PCM data from the ACM. bool muted = false; - if (acm_receiver_.GetAudio(sampling_rate, audio_frame, &muted) == -1) { - RTC_DLOG(LS_ERROR) << "GetAudio() failed!"; - // In all likelihood, the audio in this frame is garbage. We return an - // error so that the audio mixer module doesn't add it to the mix. As - // a result, it won't be played out and the actions skipped here are - // irrelevant. - return AudioMixer::Source::AudioFrameInfo::kError; + { + MutexLock lock(&lock_); + if ((neteq_->GetAudio(audio_frame, &muted) != NetEq::kOK) || + !resampler_helper_.MaybeResample(sampling_rate, audio_frame)) { + RTC_DLOG(LS_ERROR) << "GetAudio() failed!"; + // In all likelihood, the audio in this frame is garbage. We return an + // error so that the audio mixer module doesn't add it to the mix. As + // a result, it won't be played out and the actions skipped here are + // irrelevant. + return AudioMixer::Source::AudioFrameInfo::kError; + } } if (muted) { @@ -101,10 +107,10 @@ AudioMixer::Source::AudioFrameInfo AudioIngress::GetAudioFrameWithInfo( } // For clock rate, default to the playout sampling rate if we haven't // received any packets yet. - absl::optional> decoder = - acm_receiver_.LastDecoder(); - int clock_rate = decoder ? decoder->second.clockrate_hz - : acm_receiver_.last_output_sample_rate_hz(); + std::optional decoder = + neteq_->GetCurrentDecoderFormat(); + int clock_rate = decoder ? decoder->sdp_format.clockrate_hz + : neteq_->last_output_sample_rate_hz(); RTC_DCHECK_GT(clock_rate, 0); audio_frame->elapsed_time_ms_ = (unwrap_timestamp - first_rtp_timestamp_) / (clock_rate / 1000); @@ -134,10 +140,10 @@ void AudioIngress::SetReceiveCodecs( receive_codec_info_[kv.first] = kv.second.clockrate_hz; } } - acm_receiver_.SetCodecs(codecs); + neteq_->SetCodecs(codecs); } -void AudioIngress::ReceivedRTPPacket(rtc::ArrayView rtp_packet) { +void AudioIngress::ReceivedRTPPacket(ArrayView rtp_packet) { RtpPacketReceived rtp_packet_received; rtp_packet_received.Parse(rtp_packet.data(), rtp_packet.size()); @@ -181,17 +187,19 @@ void AudioIngress::ReceivedRTPPacket(rtc::ArrayView rtp_packet) { const uint8_t* payload = rtp_packet_received.data() + header.headerLength; size_t payload_length = packet_length - header.headerLength; size_t payload_data_length = payload_length - header.paddingLength; - auto data_view = rtc::ArrayView(payload, payload_data_length); + auto data_view = ArrayView(payload, payload_data_length); // Push the incoming payload (parsed and ready for decoding) into the ACM. - if (acm_receiver_.InsertPacket(header, data_view) != 0) { - RTC_DLOG(LS_ERROR) << "AudioIngress::ReceivedRTPPacket() unable to " - "push data to the ACM"; + if (data_view.empty()) { + neteq_->InsertEmptyPacket(header); + } else if (neteq_->InsertPacket(header, data_view, + env_.clock().CurrentTime()) < 0) { + RTC_DLOG(LS_ERROR) << "ChannelReceive::OnReceivedPayloadData() unable to " + "insert packet into NetEq"; } } -void AudioIngress::ReceivedRTCPPacket( - rtc::ArrayView rtcp_packet) { +void AudioIngress::ReceivedRTCPPacket(ArrayView rtcp_packet) { rtcp::CommonHeader rtcp_header; if (rtcp_header.Parse(rtcp_packet.data(), rtcp_packet.size()) && (rtcp_header.type() == rtcp::SenderReport::kPacketType || @@ -212,13 +220,13 @@ void AudioIngress::ReceivedRTCPPacket( // Deliver RTCP packet to RTP/RTCP module for parsing and processing. rtp_rtcp_->IncomingRtcpPacket(rtcp_packet); - absl::optional rtt = rtp_rtcp_->LastRtt(); + std::optional rtt = rtp_rtcp_->LastRtt(); if (!rtt.has_value()) { // Waiting for valid RTT. return; } - absl::optional last_sr = + std::optional last_sr = rtp_rtcp_->GetSenderReportStats(); if (!last_sr.has_value()) { // Waiting for RTCP. @@ -227,21 +235,68 @@ void AudioIngress::ReceivedRTCPPacket( { MutexLock lock(&lock_); - ntp_estimator_.UpdateRtcpTimestamp(*rtt, last_sr->last_remote_timestamp, + ntp_estimator_.UpdateRtcpTimestamp(*rtt, last_sr->last_remote_ntp_timestamp, last_sr->last_remote_rtp_timestamp); } } +NetworkStatistics AudioIngress::GetNetworkStatistics() const { + NetworkStatistics stats; + stats.currentExpandRate = 0; + stats.currentSpeechExpandRate = 0; + stats.currentPreemptiveRate = 0; + stats.currentAccelerateRate = 0; + stats.currentSecondaryDecodedRate = 0; + stats.currentSecondaryDiscardedRate = 0; + stats.meanWaitingTimeMs = -1; + stats.maxWaitingTimeMs = 1; + + NetEqNetworkStatistics neteq_stat = neteq_->CurrentNetworkStatistics(); + stats.currentBufferSize = neteq_stat.current_buffer_size_ms; + stats.preferredBufferSize = neteq_stat.preferred_buffer_size_ms; + stats.jitterPeaksFound = neteq_stat.jitter_peaks_found ? true : false; + + NetEqLifetimeStatistics neteq_lifetime_stat = neteq_->GetLifetimeStatistics(); + stats.totalSamplesReceived = neteq_lifetime_stat.total_samples_received; + stats.concealedSamples = neteq_lifetime_stat.concealed_samples; + stats.silentConcealedSamples = neteq_lifetime_stat.silent_concealed_samples; + stats.concealmentEvents = neteq_lifetime_stat.concealment_events; + stats.jitterBufferDelayMs = neteq_lifetime_stat.jitter_buffer_delay_ms; + stats.jitterBufferTargetDelayMs = + neteq_lifetime_stat.jitter_buffer_target_delay_ms; + stats.jitterBufferMinimumDelayMs = + neteq_lifetime_stat.jitter_buffer_minimum_delay_ms; + stats.jitterBufferEmittedCount = + neteq_lifetime_stat.jitter_buffer_emitted_count; + stats.delayedPacketOutageSamples = + neteq_lifetime_stat.delayed_packet_outage_samples; + stats.relativePacketArrivalDelayMs = + neteq_lifetime_stat.relative_packet_arrival_delay_ms; + stats.interruptionCount = neteq_lifetime_stat.interruption_count; + stats.totalInterruptionDurationMs = + neteq_lifetime_stat.total_interruption_duration_ms; + stats.insertedSamplesForDeceleration = + neteq_lifetime_stat.inserted_samples_for_deceleration; + stats.removedSamplesForAcceleration = + neteq_lifetime_stat.removed_samples_for_acceleration; + stats.fecPacketsReceived = neteq_lifetime_stat.fec_packets_received; + stats.fecPacketsDiscarded = neteq_lifetime_stat.fec_packets_discarded; + stats.totalProcessingDelayUs = neteq_lifetime_stat.total_processing_delay_us; + stats.packetsDiscarded = neteq_lifetime_stat.packets_discarded; + + NetEqOperationsAndState neteq_operations_and_state = + neteq_->GetOperationsAndState(); + stats.packetBufferFlushes = neteq_operations_and_state.packet_buffer_flushes; + + return stats; +} + ChannelStatistics AudioIngress::GetChannelStatistics() { ChannelStatistics channel_stats; // Get clockrate for current decoder ahead of jitter calculation. - uint32_t clockrate_hz = 0; - absl::optional> decoder = - acm_receiver_.LastDecoder(); - if (decoder) { - clockrate_hz = decoder->second.clockrate_hz; - } + auto decoder = neteq_->GetCurrentDecoderFormat(); + const uint32_t clockrate_hz = decoder ? decoder->sdp_format.clockrate_hz : 0; StreamStatistician* statistician = rtp_receive_statistics_->GetStatistician(remote_ssrc_); diff --git a/audio/voip/audio_ingress.h b/audio/voip/audio_ingress.h index 11bde7ce28..97ee2acb62 100644 --- a/audio/voip/audio_ingress.h +++ b/audio/voip/audio_ingress.h @@ -15,16 +15,19 @@ #include #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/audio_mixer.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/environment/environment.h" +#include "api/neteq/neteq.h" #include "api/rtp_headers.h" #include "api/scoped_refptr.h" #include "api/voip/voip_statistics.h" #include "audio/audio_level.h" -#include "modules/audio_coding/acm2/acm_receiver.h" +#include "modules/audio_coding/acm2/acm_resampler.h" #include "modules/audio_coding/include/audio_coding_module.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h" @@ -46,10 +49,10 @@ namespace webrtc { // smaller footprint. class AudioIngress : public AudioMixer::Source { public: - AudioIngress(RtpRtcpInterface* rtp_rtcp, - Clock* clock, + AudioIngress(const Environment& env, + RtpRtcpInterface* rtp_rtcp, ReceiveStatistics* receive_statistics, - rtc::scoped_refptr decoder_factory); + scoped_refptr decoder_factory); ~AudioIngress() override; // Start or stop receiving operation of AudioIngress. @@ -67,8 +70,8 @@ class AudioIngress : public AudioMixer::Source { void SetReceiveCodecs(const std::map& codecs); // APIs to handle received RTP/RTCP packets from caller. - void ReceivedRTPPacket(rtc::ArrayView rtp_packet); - void ReceivedRTCPPacket(rtc::ArrayView rtcp_packet); + void ReceivedRTPPacket(ArrayView rtp_packet); + void ReceivedRTCPPacket(ArrayView rtcp_packet); // See comments on LevelFullRange, TotalEnergy, TotalDuration from // audio/audio_level.h. @@ -80,12 +83,7 @@ class AudioIngress : public AudioMixer::Source { return output_audio_level_.TotalDuration(); } - NetworkStatistics GetNetworkStatistics() const { - NetworkStatistics stats; - acm_receiver_.GetNetworkStatistics(&stats, - /*get_and_clear_legacy_stats=*/false); - return stats; - } + NetworkStatistics GetNetworkStatistics() const; ChannelStatistics GetChannelStatistics(); @@ -93,18 +91,21 @@ class AudioIngress : public AudioMixer::Source { AudioMixer::Source::AudioFrameInfo GetAudioFrameWithInfo( int sampling_rate, AudioFrame* audio_frame) override; - int Ssrc() const override { - return rtc::dchecked_cast(remote_ssrc_.load()); - } + int Ssrc() const override { return dchecked_cast(remote_ssrc_.load()); } int PreferredSampleRate() const override { + std::optional decoder = + neteq_->GetCurrentDecoderFormat(); + // If we haven't received any RTP packet from remote and thus // last_packet_sampling_rate is not available then use NetEq's sampling // rate as that would be what would be used for audio output sample. - return std::max(acm_receiver_.last_packet_sample_rate_hz().value_or(0), - acm_receiver_.last_output_sample_rate_hz()); + return std::max(decoder ? decoder->sample_rate_hz : 0, + neteq_->last_output_sample_rate_hz()); } private: + const Environment env_; + // Indicates AudioIngress status as caller invokes Start/StopPlaying. // If not playing, incoming RTP data processing is skipped, thus // producing no data to output device. @@ -123,8 +124,8 @@ class AudioIngress : public AudioMixer::Source { // Synchronizaton is handled internally by RtpRtcpInterface. RtpRtcpInterface* const rtp_rtcp_; - // Synchronizaton is handled internally by acm2::AcmReceiver. - acm2::AcmReceiver acm_receiver_; + // Synchronizaton is handled internally by NetEq. + const std::unique_ptr neteq_; // Synchronizaton is handled internally by voe::AudioLevel. voe::AudioLevel output_audio_level_; @@ -138,6 +139,9 @@ class AudioIngress : public AudioMixer::Source { std::map receive_codec_info_ RTC_GUARDED_BY(lock_); RtpTimestampUnwrapper timestamp_wrap_handler_ RTC_GUARDED_BY(lock_); + + // Resampler for the output audio. + acm2::ResamplerHelper resampler_helper_ RTC_GUARDED_BY(lock_); }; } // namespace webrtc diff --git a/audio/voip/test/BUILD.gn b/audio/voip/test/BUILD.gn index 00e9bee622..7f120065b5 100644 --- a/audio/voip/test/BUILD.gn +++ b/audio/voip/test/BUILD.gn @@ -28,6 +28,7 @@ if (rtc_include_tests) { "..:voip_core", "../../../api/audio_codecs:builtin_audio_decoder_factory", "../../../api/audio_codecs:builtin_audio_encoder_factory", + "../../../api/environment:environment_factory", "../../../api/task_queue:default_task_queue_factory", "../../../modules/audio_device:mock_audio_device", "../../../modules/audio_processing:mocks", @@ -48,6 +49,8 @@ if (rtc_include_tests) { "../../../api:transport_api", "../../../api/audio_codecs:builtin_audio_decoder_factory", "../../../api/audio_codecs:builtin_audio_encoder_factory", + "../../../api/environment", + "../../../api/environment:environment_factory", "../../../api/task_queue:task_queue", "../../../modules/audio_mixer:audio_mixer_impl", "../../../modules/audio_mixer:audio_mixer_test_utils", @@ -56,8 +59,8 @@ if (rtc_include_tests) { "../../../rtc_base:logging", "../../../test:mock_transport", "../../../test:test_support", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable" ] } rtc_library("audio_ingress_unittests") { @@ -69,6 +72,8 @@ if (rtc_include_tests) { "../../../api:transport_api", "../../../api/audio_codecs:builtin_audio_decoder_factory", "../../../api/audio_codecs:builtin_audio_encoder_factory", + "../../../api/environment", + "../../../api/environment:environment_factory", "../../../api/task_queue:default_task_queue_factory", "../../../api/units:time_delta", "../../../api/units:timestamp", @@ -90,6 +95,8 @@ if (rtc_include_tests) { "..:audio_egress", "../../../api:transport_api", "../../../api/audio_codecs:builtin_audio_encoder_factory", + "../../../api/environment", + "../../../api/environment:environment_factory", "../../../api/task_queue:default_task_queue_factory", "../../../api/units:time_delta", "../../../api/units:timestamp", diff --git a/audio/voip/test/audio_channel_unittest.cc b/audio/voip/test/audio_channel_unittest.cc index 7097e7f6c9..9a47e03710 100644 --- a/audio/voip/test/audio_channel_unittest.cc +++ b/audio/voip/test/audio_channel_unittest.cc @@ -14,6 +14,8 @@ #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/call/transport.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "audio/voip/test/mock_task_queue.h" @@ -44,8 +46,11 @@ class AudioChannelTest : public ::testing::Test { const SdpAudioFormat kPcmuFormat = {"pcmu", 8000, 1}; AudioChannelTest() - : fake_clock_(kStartTime), wave_generator_(1000.0, kAudioLevel) { - task_queue_factory_ = std::make_unique(&task_queue_); + : fake_clock_(kStartTime), + wave_generator_(1000.0, kAudioLevel), + env_(CreateEnvironment( + &fake_clock_, + std::make_unique(&task_queue_))) { audio_mixer_ = AudioMixerImpl::Create(); encoder_factory_ = CreateBuiltinAudioEncoderFactory(); decoder_factory_ = CreateBuiltinAudioDecoderFactory(); @@ -53,26 +58,25 @@ class AudioChannelTest : public ::testing::Test { // By default, run the queued task immediately. ON_CALL(task_queue_, PostTaskImpl) .WillByDefault(WithArg<0>( - [](absl::AnyInvocable task) { std::move(task)(); })); + [](absl::AnyInvocable task) { std::move(task)(); })); } void SetUp() override { audio_channel_ = CreateAudioChannel(kLocalSsrc); } void TearDown() override { audio_channel_ = nullptr; } - rtc::scoped_refptr CreateAudioChannel(uint32_t ssrc) { + scoped_refptr CreateAudioChannel(uint32_t ssrc) { // Use same audio mixer here for simplicity sake as we are not checking // audio activity of RTP in our testcases. If we need to do test on audio // signal activity then we need to assign audio mixer for each channel. // Also this uses the same transport object for different audio channel to // simplify network routing logic. - rtc::scoped_refptr audio_channel = - rtc::make_ref_counted( - &transport_, ssrc, task_queue_factory_.get(), audio_mixer_.get(), - decoder_factory_); - audio_channel->SetEncoder(kPcmuPayload, kPcmuFormat, - encoder_factory_->MakeAudioEncoder( - kPcmuPayload, kPcmuFormat, absl::nullopt)); + scoped_refptr audio_channel = make_ref_counted( + env_, &transport_, ssrc, audio_mixer_.get(), decoder_factory_); + audio_channel->SetEncoder( + kPcmuPayload, kPcmuFormat, + encoder_factory_->Create(env_, kPcmuFormat, + {.payload_type = kPcmuPayload})); audio_channel->SetReceiveCodecs({{kPcmuPayload, kPcmuFormat}}); audio_channel->StartSend(); audio_channel->StartPlay(); @@ -93,18 +97,18 @@ class AudioChannelTest : public ::testing::Test { SineWaveGenerator wave_generator_; NiceMock transport_; NiceMock task_queue_; - std::unique_ptr task_queue_factory_; - rtc::scoped_refptr audio_mixer_; - rtc::scoped_refptr decoder_factory_; - rtc::scoped_refptr encoder_factory_; - rtc::scoped_refptr audio_channel_; + const Environment env_; + scoped_refptr audio_mixer_; + scoped_refptr decoder_factory_; + scoped_refptr encoder_factory_; + scoped_refptr audio_channel_; }; // Validate RTP packet generation by feeding audio frames with sine wave. // Resulted RTP packet is looped back into AudioChannel and gets decoded into // audio frame to see if it has some signal to indicate its validity. TEST_F(AudioChannelTest, PlayRtpByLocalLoop) { - auto loop_rtp = [&](rtc::ArrayView packet, Unused) { + auto loop_rtp = [&](ArrayView packet, Unused) { audio_channel_->ReceivedRTPPacket(packet); return true; }; @@ -129,7 +133,7 @@ TEST_F(AudioChannelTest, PlayRtpByLocalLoop) { // Validate assigned local SSRC is resulted in RTP packet. TEST_F(AudioChannelTest, VerifyLocalSsrcAsAssigned) { RtpPacketReceived rtp; - auto loop_rtp = [&](rtc::ArrayView packet, Unused) { + auto loop_rtp = [&](ArrayView packet, Unused) { rtp.Parse(packet); return true; }; @@ -144,7 +148,7 @@ TEST_F(AudioChannelTest, VerifyLocalSsrcAsAssigned) { // Check metrics after processing an RTP packet. TEST_F(AudioChannelTest, TestIngressStatistics) { - auto loop_rtp = [&](rtc::ArrayView packet, Unused) { + auto loop_rtp = [&](ArrayView packet, Unused) { audio_channel_->ReceivedRTPPacket(packet); return true; }; @@ -158,7 +162,7 @@ TEST_F(AudioChannelTest, TestIngressStatistics) { audio_mixer_->Mix(/*number_of_channels=*/1, &audio_frame); audio_mixer_->Mix(/*number_of_channels=*/1, &audio_frame); - absl::optional ingress_stats = + std::optional ingress_stats = audio_channel_->GetIngressStatistics(); EXPECT_TRUE(ingress_stats); EXPECT_EQ(ingress_stats->neteq_stats.total_samples_received, 160ULL); @@ -221,18 +225,18 @@ TEST_F(AudioChannelTest, TestIngressStatistics) { // Check ChannelStatistics metric after processing RTP and RTCP packets. TEST_F(AudioChannelTest, TestChannelStatistics) { - auto loop_rtp = [&](rtc::ArrayView packet, Unused) { + auto loop_rtp = [&](ArrayView packet, Unused) { audio_channel_->ReceivedRTPPacket(packet); return true; }; - auto loop_rtcp = [&](rtc::ArrayView packet) { + auto loop_rtcp = [&](ArrayView packet) { audio_channel_->ReceivedRTCPPacket(packet); return true; }; EXPECT_CALL(transport_, SendRtp).WillRepeatedly(Invoke(loop_rtp)); EXPECT_CALL(transport_, SendRtcp).WillRepeatedly(Invoke(loop_rtcp)); - // Simulate microphone giving audio frame (10 ms). This will trigger tranport + // Simulate microphone giving audio frame (10 ms). This will trigger transport // to send RTP as handled in loop_rtp above. auto audio_sender = audio_channel_->GetAudioSender(); audio_sender->SendAudioData(GetAudioFrame(0)); @@ -245,11 +249,11 @@ TEST_F(AudioChannelTest, TestChannelStatistics) { audio_mixer_->Mix(/*number_of_channels=*/1, &audio_frame); // Force sending RTCP SR report in order to have remote_rtcp field available - // in channel statistics. This will trigger tranport to send RTCP as handled + // in channel statistics. This will trigger transport to send RTCP as handled // in loop_rtcp above. audio_channel_->SendRTCPReportForTesting(kRtcpSr); - absl::optional channel_stats = + std::optional channel_stats = audio_channel_->GetChannelStatistics(); EXPECT_TRUE(channel_stats); @@ -287,10 +291,10 @@ TEST_F(AudioChannelTest, RttIsAvailableAfterChangeOfRemoteSsrc) { auto ac_2 = CreateAudioChannel(kAc2Ssrc); auto ac_3 = CreateAudioChannel(kAc3Ssrc); - auto send_recv_rtp = [&](rtc::scoped_refptr rtp_sender, - rtc::scoped_refptr rtp_receiver) { + auto send_recv_rtp = [&](scoped_refptr rtp_sender, + scoped_refptr rtp_receiver) { // Setup routing logic via transport_. - auto route_rtp = [&](rtc::ArrayView packet, Unused) { + auto route_rtp = [&](ArrayView packet, Unused) { rtp_receiver->ReceivedRTPPacket(packet); return true; }; @@ -309,10 +313,10 @@ TEST_F(AudioChannelTest, RttIsAvailableAfterChangeOfRemoteSsrc) { ON_CALL(transport_, SendRtp).WillByDefault(Return(true)); }; - auto send_recv_rtcp = [&](rtc::scoped_refptr rtcp_sender, - rtc::scoped_refptr rtcp_receiver) { + auto send_recv_rtcp = [&](scoped_refptr rtcp_sender, + scoped_refptr rtcp_receiver) { // Setup routing logic via transport_. - auto route_rtcp = [&](rtc::ArrayView packet) { + auto route_rtcp = [&](ArrayView packet) { rtcp_receiver->ReceivedRTCPPacket(packet); return true; }; @@ -331,7 +335,7 @@ TEST_F(AudioChannelTest, RttIsAvailableAfterChangeOfRemoteSsrc) { send_recv_rtcp(audio_channel_, ac_2); send_recv_rtcp(ac_2, audio_channel_); - absl::optional channel_stats = + std::optional channel_stats = audio_channel_->GetChannelStatistics(); ASSERT_TRUE(channel_stats); EXPECT_EQ(channel_stats->remote_ssrc, kAc2Ssrc); diff --git a/audio/voip/test/audio_egress_unittest.cc b/audio/voip/test/audio_egress_unittest.cc index 8501b2d3d9..50b0b99636 100644 --- a/audio/voip/test/audio_egress_unittest.cc +++ b/audio/voip/test/audio_egress_unittest.cc @@ -12,7 +12,8 @@ #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/call/transport.h" -#include "api/task_queue/default_task_queue_factory.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/audio_mixer/sine_wave_generator.h" @@ -33,16 +34,15 @@ using ::testing::Invoke; using ::testing::NiceMock; using ::testing::Unused; -std::unique_ptr CreateRtpStack(Clock* clock, +std::unique_ptr CreateRtpStack(const Environment& env, Transport* transport, uint32_t remote_ssrc) { RtpRtcpInterface::Configuration rtp_config; - rtp_config.clock = clock; rtp_config.audio = true; rtp_config.rtcp_report_interval_ms = 5000; rtp_config.outgoing_transport = transport; rtp_config.local_media_ssrc = remote_ssrc; - auto rtp_rtcp = ModuleRtpRtcpImpl2::Create(rtp_config); + auto rtp_rtcp = std::make_unique(env, rtp_config); rtp_rtcp->SetSendingMediaStatus(false); rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound); return rtp_rtcp; @@ -67,15 +67,12 @@ class AudioEgressTest : public ::testing::Test { // Prepare test on audio egress by using PCMu codec with specific // sequence number and its status to be running. void SetUp() override { - rtp_rtcp_ = - CreateRtpStack(time_controller_.GetClock(), &transport_, kRemoteSsrc); - egress_ = std::make_unique( - rtp_rtcp_.get(), time_controller_.GetClock(), - time_controller_.GetTaskQueueFactory()); + rtp_rtcp_ = CreateRtpStack(env_, &transport_, kRemoteSsrc); + egress_ = std::make_unique(env_, rtp_rtcp_.get()); constexpr int kPcmuPayload = 0; egress_->SetEncoder(kPcmuPayload, kPcmuFormat, - encoder_factory_->MakeAudioEncoder( - kPcmuPayload, kPcmuFormat, absl::nullopt)); + encoder_factory_->Create( + env_, kPcmuFormat, {.payload_type = kPcmuPayload})); egress_->StartSend(); rtp_rtcp_->SetSequenceNumber(kSeqNum); rtp_rtcp_->SetSendingStatus(true); @@ -104,10 +101,13 @@ class AudioEgressTest : public ::testing::Test { } GlobalSimulatedTimeController time_controller_{Timestamp::Micros(kStartTime)}; + const Environment env_ = + CreateEnvironment(time_controller_.GetClock(), + time_controller_.GetTaskQueueFactory()); NiceMock transport_; SineWaveGenerator wave_generator_; std::unique_ptr rtp_rtcp_; - rtc::scoped_refptr encoder_factory_; + scoped_refptr encoder_factory_; std::unique_ptr egress_; }; @@ -119,10 +119,10 @@ TEST_F(AudioEgressTest, SendingStatusAfterStartAndStop) { TEST_F(AudioEgressTest, ProcessAudioWithMute) { constexpr int kExpected = 10; - rtc::Event event; + Event event; int rtp_count = 0; RtpPacketReceived rtp; - auto rtp_sent = [&](rtc::ArrayView packet, Unused) { + auto rtp_sent = [&](ArrayView packet, Unused) { rtp.Parse(packet); if (++rtp_count == kExpected) { event.Set(); @@ -157,10 +157,10 @@ TEST_F(AudioEgressTest, ProcessAudioWithMute) { TEST_F(AudioEgressTest, ProcessAudioWithSineWave) { constexpr int kExpected = 10; - rtc::Event event; + Event event; int rtp_count = 0; RtpPacketReceived rtp; - auto rtp_sent = [&](rtc::ArrayView packet, Unused) { + auto rtp_sent = [&](ArrayView packet, Unused) { rtp.Parse(packet); if (++rtp_count == kExpected) { event.Set(); @@ -193,9 +193,9 @@ TEST_F(AudioEgressTest, ProcessAudioWithSineWave) { TEST_F(AudioEgressTest, SkipAudioEncodingAfterStopSend) { constexpr int kExpected = 10; - rtc::Event event; + Event event; int rtp_count = 0; - auto rtp_sent = [&](rtc::ArrayView packet, Unused) { + auto rtp_sent = [&](ArrayView /* packet */, Unused) { if (++rtp_count == kExpected) { event.Set(); } @@ -218,7 +218,7 @@ TEST_F(AudioEgressTest, SkipAudioEncodingAfterStopSend) { // It should be safe to exit the test case while encoder_queue_ has // outstanding data to process. We are making sure that this doesn't - // result in crahses or sanitizer errors due to remaining data. + // result in crashes or sanitizer errors due to remaining data. for (size_t i = 0; i < kExpected * 2; i++) { egress_->SendAudioData(GetAudioFrame(i)); time_controller_.AdvanceTime(TimeDelta::Millis(10)); @@ -226,7 +226,7 @@ TEST_F(AudioEgressTest, SkipAudioEncodingAfterStopSend) { } TEST_F(AudioEgressTest, ChangeEncoderFromPcmuToOpus) { - absl::optional pcmu = egress_->GetEncoderFormat(); + std::optional pcmu = egress_->GetEncoderFormat(); EXPECT_TRUE(pcmu); EXPECT_EQ(pcmu->clockrate_hz, kPcmuFormat.clockrate_hz); EXPECT_EQ(pcmu->num_channels, kPcmuFormat.num_channels); @@ -235,10 +235,10 @@ TEST_F(AudioEgressTest, ChangeEncoderFromPcmuToOpus) { const SdpAudioFormat kOpusFormat = {"opus", 48000, 2}; egress_->SetEncoder(kOpusPayload, kOpusFormat, - encoder_factory_->MakeAudioEncoder( - kOpusPayload, kOpusFormat, absl::nullopt)); + encoder_factory_->Create(env_, kOpusFormat, + {.payload_type = kOpusPayload})); - absl::optional opus = egress_->GetEncoderFormat(); + std::optional opus = egress_->GetEncoderFormat(); EXPECT_TRUE(opus); EXPECT_EQ(opus->clockrate_hz, kOpusFormat.clockrate_hz); EXPECT_EQ(opus->num_channels, kOpusFormat.num_channels); @@ -257,7 +257,7 @@ TEST_F(AudioEgressTest, SendDTMF) { // 5, 6, 7 @ 100 ms (last one sends 3 dtmf) egress_->SendTelephoneEvent(kEvent, kDurationMs); - rtc::Event event; + Event event; int dtmf_count = 0; auto is_dtmf = [&](RtpPacketReceived& rtp) { return (rtp.PayloadType() == kPayloadType && @@ -269,7 +269,7 @@ TEST_F(AudioEgressTest, SendDTMF) { // It's possible that we may have actual audio RTP packets along with // DTMF packtets. We are only interested in the exact number of DTMF // packets rtp stack is emitting. - auto rtp_sent = [&](rtc::ArrayView packet, Unused) { + auto rtp_sent = [&](ArrayView packet, Unused) { RtpPacketReceived rtp; rtp.Parse(packet); if (is_dtmf(rtp) && ++dtmf_count == kExpected) { @@ -294,9 +294,9 @@ TEST_F(AudioEgressTest, TestAudioInputLevelAndEnergyDuration) { // Per audio_level's kUpdateFrequency, we need more than 10 audio samples to // get audio level from input source. constexpr int kExpected = 6; - rtc::Event event; + Event event; int rtp_count = 0; - auto rtp_sent = [&](rtc::ArrayView packet, Unused) { + auto rtp_sent = [&](ArrayView /* packet */, Unused) { if (++rtp_count == kExpected) { event.Set(); } diff --git a/audio/voip/test/audio_ingress_unittest.cc b/audio/voip/test/audio_ingress_unittest.cc index c7736b247a..282501c38f 100644 --- a/audio/voip/test/audio_ingress_unittest.cc +++ b/audio/voip/test/audio_ingress_unittest.cc @@ -13,6 +13,8 @@ #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/call/transport.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/units/time_delta.h" #include "audio/voip/audio_egress.h" @@ -44,13 +46,12 @@ class AudioIngressTest : public ::testing::Test { ReceiveStatistics::Create(time_controller_.GetClock()); RtpRtcpInterface::Configuration rtp_config; - rtp_config.clock = time_controller_.GetClock(); rtp_config.audio = true; rtp_config.receive_statistics = receive_statistics_.get(); rtp_config.rtcp_report_interval_ms = 5000; rtp_config.outgoing_transport = &transport_; rtp_config.local_media_ssrc = 0xdeadc0de; - rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(rtp_config); + rtp_rtcp_ = std::make_unique(env_, rtp_config); rtp_rtcp_->SetSendingMediaStatus(false); rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound); @@ -62,16 +63,13 @@ class AudioIngressTest : public ::testing::Test { void SetUp() override { constexpr int kPcmuPayload = 0; ingress_ = std::make_unique( - rtp_rtcp_.get(), time_controller_.GetClock(), receive_statistics_.get(), - decoder_factory_); + env_, rtp_rtcp_.get(), receive_statistics_.get(), decoder_factory_); ingress_->SetReceiveCodecs({{kPcmuPayload, kPcmuFormat}}); - egress_ = std::make_unique( - rtp_rtcp_.get(), time_controller_.GetClock(), - time_controller_.GetTaskQueueFactory()); + egress_ = std::make_unique(env_, rtp_rtcp_.get()); egress_->SetEncoder(kPcmuPayload, kPcmuFormat, - encoder_factory_->MakeAudioEncoder( - kPcmuPayload, kPcmuFormat, absl::nullopt)); + encoder_factory_->Create( + env_, kPcmuFormat, {.payload_type = kPcmuPayload})); egress_->StartSend(); ingress_->StartPlay(); rtp_rtcp_->SetSendingStatus(true); @@ -96,12 +94,15 @@ class AudioIngressTest : public ::testing::Test { } GlobalSimulatedTimeController time_controller_{Timestamp::Micros(123456789)}; + const Environment env_ = + CreateEnvironment(time_controller_.GetClock(), + time_controller_.GetTaskQueueFactory()); SineWaveGenerator wave_generator_; NiceMock transport_; std::unique_ptr receive_statistics_; std::unique_ptr rtp_rtcp_; - rtc::scoped_refptr encoder_factory_; - rtc::scoped_refptr decoder_factory_; + scoped_refptr encoder_factory_; + scoped_refptr decoder_factory_; std::unique_ptr ingress_; std::unique_ptr egress_; }; @@ -113,8 +114,8 @@ TEST_F(AudioIngressTest, PlayingAfterStartAndStop) { } TEST_F(AudioIngressTest, GetAudioFrameAfterRtpReceived) { - rtc::Event event; - auto handle_rtp = [&](rtc::ArrayView packet, Unused) { + Event event; + auto handle_rtp = [&](ArrayView packet, Unused) { ingress_->ReceivedRTPPacket(packet); event.Set(); return true; @@ -143,8 +144,8 @@ TEST_F(AudioIngressTest, TestSpeechOutputLevelAndEnergyDuration) { // get audio level from output source. constexpr int kNumRtp = 6; int rtp_count = 0; - rtc::Event event; - auto handle_rtp = [&](rtc::ArrayView packet, Unused) { + Event event; + auto handle_rtp = [&](ArrayView packet, Unused) { ingress_->ReceivedRTPPacket(packet); if (++rtp_count == kNumRtp) { event.Set(); @@ -174,8 +175,8 @@ TEST_F(AudioIngressTest, TestSpeechOutputLevelAndEnergyDuration) { } TEST_F(AudioIngressTest, PreferredSampleRate) { - rtc::Event event; - auto handle_rtp = [&](rtc::ArrayView packet, Unused) { + Event event; + auto handle_rtp = [&](ArrayView packet, Unused) { ingress_->ReceivedRTPPacket(packet); event.Set(); return true; @@ -203,8 +204,8 @@ TEST_F(AudioIngressTest, GetMutedAudioFrameAfterRtpReceivedAndStopPlay) { // valid speech level. constexpr int kNumRtp = 6; int rtp_count = 0; - rtc::Event event; - auto handle_rtp = [&](rtc::ArrayView packet, Unused) { + Event event; + auto handle_rtp = [&](ArrayView packet, Unused) { ingress_->ReceivedRTPPacket(packet); if (++rtp_count == kNumRtp) { event.Set(); diff --git a/audio/voip/test/mock_task_queue.h b/audio/voip/test/mock_task_queue.h index 547b0d3f75..6a769bfefb 100644 --- a/audio/voip/test/mock_task_queue.h +++ b/audio/voip/test/mock_task_queue.h @@ -39,8 +39,8 @@ class MockTaskQueueFactory : public TaskQueueFactory { : task_queue_(task_queue) {} std::unique_ptr CreateTaskQueue( - absl::string_view name, - Priority priority) const override { + absl::string_view /* name */, + Priority /* priority */) const override { // Default MockTaskQueue::Delete is no-op, therefore it's safe to pass the // raw pointer. return std::unique_ptr(task_queue_); diff --git a/audio/voip/test/voip_core_unittest.cc b/audio/voip/test/voip_core_unittest.cc index b432506b12..5b13cc3eb1 100644 --- a/audio/voip/test/voip_core_unittest.cc +++ b/audio/voip/test/voip_core_unittest.cc @@ -12,7 +12,7 @@ #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" -#include "api/task_queue/default_task_queue_factory.h" +#include "api/environment/environment_factory.h" #include "modules/audio_device/include/mock_audio_device.h" #include "modules/audio_processing/include/mock_audio_processing.h" #include "test/gtest.h" @@ -39,19 +39,18 @@ class VoipCoreTest : public ::testing::Test { void SetUp() override { auto encoder_factory = CreateBuiltinAudioEncoderFactory(); auto decoder_factory = CreateBuiltinAudioDecoderFactory(); - rtc::scoped_refptr audio_processing = - rtc::make_ref_counted>(); + scoped_refptr audio_processing = + make_ref_counted>(); voip_core_ = std::make_unique( - std::move(encoder_factory), std::move(decoder_factory), - CreateDefaultTaskQueueFactory(), audio_device_, - std::move(audio_processing)); + CreateEnvironment(), std::move(encoder_factory), + std::move(decoder_factory), audio_device_, std::move(audio_processing)); } test::RunLoop run_loop_; std::unique_ptr voip_core_; NiceMock transport_; - rtc::scoped_refptr audio_device_; + scoped_refptr audio_device_; }; // Validate expected API calls that involves with VoipCore. Some verification is diff --git a/audio/voip/voip_core.cc b/audio/voip/voip_core.cc index 8df1c594aa..11586d0f64 100644 --- a/audio/voip/voip_core.cc +++ b/audio/voip/voip_core.cc @@ -37,16 +37,16 @@ static constexpr int kMaxChannelId = 100000; } // namespace -VoipCore::VoipCore(rtc::scoped_refptr encoder_factory, - rtc::scoped_refptr decoder_factory, - std::unique_ptr task_queue_factory, - rtc::scoped_refptr audio_device_module, - rtc::scoped_refptr audio_processing) { - encoder_factory_ = std::move(encoder_factory); - decoder_factory_ = std::move(decoder_factory); - task_queue_factory_ = std::move(task_queue_factory); - audio_device_module_ = std::move(audio_device_module); - audio_processing_ = std::move(audio_processing); +VoipCore::VoipCore(const Environment& env, + scoped_refptr encoder_factory, + scoped_refptr decoder_factory, + scoped_refptr audio_device_module, + scoped_refptr audio_processing) + : env_(env), + encoder_factory_(std::move(encoder_factory)), + decoder_factory_(std::move(decoder_factory)), + audio_processing_(std::move(audio_processing)), + audio_device_module_(std::move(audio_device_module)) { audio_mixer_ = AudioMixerImpl::Create(); // AudioTransportImpl depends on audio mixer and audio processing instances. @@ -122,19 +122,18 @@ bool VoipCore::InitializeIfNeeded() { } ChannelId VoipCore::CreateChannel(Transport* transport, - absl::optional local_ssrc) { + std::optional local_ssrc) { ChannelId channel_id; // Set local ssrc to random if not set by caller. if (!local_ssrc) { - Random random(rtc::TimeMicros()); + Random random(TimeMicros()); local_ssrc = random.Rand(); } - rtc::scoped_refptr channel = - rtc::make_ref_counted(transport, local_ssrc.value(), - task_queue_factory_.get(), - audio_mixer_.get(), decoder_factory_); + scoped_refptr channel = + make_ref_counted(env_, transport, local_ssrc.value(), + audio_mixer_.get(), decoder_factory_); { MutexLock lock(&lock_); @@ -155,7 +154,7 @@ ChannelId VoipCore::CreateChannel(Transport* transport, VoipResult VoipCore::ReleaseChannel(ChannelId channel_id) { // Destroy channel outside of the lock. - rtc::scoped_refptr channel; + scoped_refptr channel; bool no_channels_after_release = false; @@ -194,8 +193,8 @@ VoipResult VoipCore::ReleaseChannel(ChannelId channel_id) { return status_code; } -rtc::scoped_refptr VoipCore::GetChannel(ChannelId channel_id) { - rtc::scoped_refptr channel; +scoped_refptr VoipCore::GetChannel(ChannelId channel_id) { + scoped_refptr channel; { MutexLock lock(&lock_); auto iter = channels_.find(channel_id); @@ -222,7 +221,7 @@ bool VoipCore::UpdateAudioTransportWithSenders() { // Reserve to prevent run time vector re-allocation. audio_senders.reserve(channels_.size()); for (auto kv : channels_) { - rtc::scoped_refptr& channel = kv.second; + scoped_refptr& channel = kv.second; if (channel->IsSendingMedia()) { auto encoder_format = channel->GetEncoderFormat(); if (!encoder_format) { @@ -270,7 +269,7 @@ bool VoipCore::UpdateAudioTransportWithSenders() { } VoipResult VoipCore::StartSend(ChannelId channel_id) { - rtc::scoped_refptr channel = GetChannel(channel_id); + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; @@ -285,7 +284,7 @@ VoipResult VoipCore::StartSend(ChannelId channel_id) { } VoipResult VoipCore::StopSend(ChannelId channel_id) { - rtc::scoped_refptr channel = GetChannel(channel_id); + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; @@ -298,7 +297,7 @@ VoipResult VoipCore::StopSend(ChannelId channel_id) { } VoipResult VoipCore::StartPlayout(ChannelId channel_id) { - rtc::scoped_refptr channel = GetChannel(channel_id); + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; @@ -332,7 +331,7 @@ VoipResult VoipCore::StartPlayout(ChannelId channel_id) { } VoipResult VoipCore::StopPlayout(ChannelId channel_id) { - rtc::scoped_refptr channel = GetChannel(channel_id); + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; @@ -343,10 +342,9 @@ VoipResult VoipCore::StopPlayout(ChannelId channel_id) { return VoipResult::kOk; } -VoipResult VoipCore::ReceivedRTPPacket( - ChannelId channel_id, - rtc::ArrayView rtp_packet) { - rtc::scoped_refptr channel = GetChannel(channel_id); +VoipResult VoipCore::ReceivedRTPPacket(ChannelId channel_id, + ArrayView rtp_packet) { + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; @@ -357,10 +355,9 @@ VoipResult VoipCore::ReceivedRTPPacket( return VoipResult::kOk; } -VoipResult VoipCore::ReceivedRTCPPacket( - ChannelId channel_id, - rtc::ArrayView rtcp_packet) { - rtc::scoped_refptr channel = GetChannel(channel_id); +VoipResult VoipCore::ReceivedRTCPPacket(ChannelId channel_id, + ArrayView rtcp_packet) { + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; @@ -374,14 +371,14 @@ VoipResult VoipCore::ReceivedRTCPPacket( VoipResult VoipCore::SetSendCodec(ChannelId channel_id, int payload_type, const SdpAudioFormat& encoder_format) { - rtc::scoped_refptr channel = GetChannel(channel_id); + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; } - auto encoder = encoder_factory_->MakeAudioEncoder( - payload_type, encoder_format, absl::nullopt); + auto encoder = encoder_factory_->Create(env_, encoder_format, + {.payload_type = payload_type}); channel->SetEncoder(payload_type, encoder_format, std::move(encoder)); return VoipResult::kOk; @@ -390,7 +387,7 @@ VoipResult VoipCore::SetSendCodec(ChannelId channel_id, VoipResult VoipCore::SetReceiveCodecs( ChannelId channel_id, const std::map& decoder_specs) { - rtc::scoped_refptr channel = GetChannel(channel_id); + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; @@ -404,7 +401,7 @@ VoipResult VoipCore::SetReceiveCodecs( VoipResult VoipCore::RegisterTelephoneEventType(ChannelId channel_id, int rtp_payload_type, int sample_rate_hz) { - rtc::scoped_refptr channel = GetChannel(channel_id); + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; @@ -418,7 +415,7 @@ VoipResult VoipCore::RegisterTelephoneEventType(ChannelId channel_id, VoipResult VoipCore::SendDtmfEvent(ChannelId channel_id, DtmfEvent dtmf_event, int duration_ms) { - rtc::scoped_refptr channel = GetChannel(channel_id); + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; @@ -431,7 +428,7 @@ VoipResult VoipCore::SendDtmfEvent(ChannelId channel_id, VoipResult VoipCore::GetIngressStatistics(ChannelId channel_id, IngressStatistics& ingress_stats) { - rtc::scoped_refptr channel = GetChannel(channel_id); + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; @@ -444,7 +441,7 @@ VoipResult VoipCore::GetIngressStatistics(ChannelId channel_id, VoipResult VoipCore::GetChannelStatistics(ChannelId channel_id, ChannelStatistics& channel_stats) { - rtc::scoped_refptr channel = GetChannel(channel_id); + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; @@ -456,7 +453,7 @@ VoipResult VoipCore::GetChannelStatistics(ChannelId channel_id, } VoipResult VoipCore::SetInputMuted(ChannelId channel_id, bool enable) { - rtc::scoped_refptr channel = GetChannel(channel_id); + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; @@ -469,7 +466,7 @@ VoipResult VoipCore::SetInputMuted(ChannelId channel_id, bool enable) { VoipResult VoipCore::GetInputVolumeInfo(ChannelId channel_id, VolumeInfo& input_volume) { - rtc::scoped_refptr channel = GetChannel(channel_id); + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; @@ -484,7 +481,7 @@ VoipResult VoipCore::GetInputVolumeInfo(ChannelId channel_id, VoipResult VoipCore::GetOutputVolumeInfo(ChannelId channel_id, VolumeInfo& output_volume) { - rtc::scoped_refptr channel = GetChannel(channel_id); + scoped_refptr channel = GetChannel(channel_id); if (!channel) { return VoipResult::kInvalidArgument; diff --git a/audio/voip/voip_core.h b/audio/voip/voip_core.h index 6c3aec6fa2..01933623c2 100644 --- a/audio/voip/voip_core.h +++ b/audio/voip/voip_core.h @@ -17,10 +17,12 @@ #include #include +#include "api/audio/audio_device.h" +#include "api/audio/audio_processing.h" #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder_factory.h" +#include "api/environment/environment.h" #include "api/scoped_refptr.h" -#include "api/task_queue/task_queue_factory.h" #include "api/voip/voip_base.h" #include "api/voip/voip_codec.h" #include "api/voip/voip_dtmf.h" @@ -30,9 +32,7 @@ #include "api/voip/voip_volume_control.h" #include "audio/audio_transport_impl.h" #include "audio/voip/audio_channel.h" -#include "modules/audio_device/include/audio_device.h" #include "modules/audio_mixer/audio_mixer_impl.h" -#include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -52,12 +52,11 @@ class VoipCore : public VoipEngine, public VoipStatistics, public VoipVolumeControl { public: - // Construct VoipCore with provided arguments. - VoipCore(rtc::scoped_refptr encoder_factory, - rtc::scoped_refptr decoder_factory, - std::unique_ptr task_queue_factory, - rtc::scoped_refptr audio_device_module, - rtc::scoped_refptr audio_processing); + VoipCore(const Environment& env, + scoped_refptr encoder_factory, + scoped_refptr decoder_factory, + scoped_refptr audio_device_module, + scoped_refptr audio_processing); ~VoipCore() override = default; // Implements VoipEngine interfaces. @@ -70,7 +69,7 @@ class VoipCore : public VoipEngine, // Implements VoipBase interfaces. ChannelId CreateChannel(Transport* transport, - absl::optional local_ssrc) override; + std::optional local_ssrc) override; VoipResult ReleaseChannel(ChannelId channel_id) override; VoipResult StartSend(ChannelId channel_id) override; VoipResult StopSend(ChannelId channel_id) override; @@ -78,12 +77,10 @@ class VoipCore : public VoipEngine, VoipResult StopPlayout(ChannelId channel_id) override; // Implements VoipNetwork interfaces. - VoipResult ReceivedRTPPacket( - ChannelId channel_id, - rtc::ArrayView rtp_packet) override; - VoipResult ReceivedRTCPPacket( - ChannelId channel_id, - rtc::ArrayView rtcp_packet) override; + VoipResult ReceivedRTPPacket(ChannelId channel_id, + ArrayView rtp_packet) override; + VoipResult ReceivedRTCPPacket(ChannelId channel_id, + ArrayView rtcp_packet) override; // Implements VoipCodec interfaces. VoipResult SetSendCodec(ChannelId channel_id, @@ -127,7 +124,7 @@ class VoipCore : public VoipEngine, // Fetches the corresponding AudioChannel assigned with given `channel`. // Returns nullptr if not found. - rtc::scoped_refptr GetChannel(ChannelId channel_id); + scoped_refptr GetChannel(ChannelId channel_id); // Updates AudioTransportImpl with a new set of actively sending AudioSender // (AudioEgress). This needs to be invoked whenever StartSend/StopSend is @@ -136,24 +133,24 @@ class VoipCore : public VoipEngine, bool UpdateAudioTransportWithSenders(); // Synchronization for these are handled internally. - rtc::scoped_refptr encoder_factory_; - rtc::scoped_refptr decoder_factory_; - std::unique_ptr task_queue_factory_; + const Environment env_; + scoped_refptr encoder_factory_; + scoped_refptr decoder_factory_; // Synchronization is handled internally by AudioProcessing. // Must be placed before `audio_device_module_` for proper destruction. - rtc::scoped_refptr audio_processing_; + scoped_refptr audio_processing_; // Synchronization is handled internally by AudioMixer. // Must be placed before `audio_device_module_` for proper destruction. - rtc::scoped_refptr audio_mixer_; + scoped_refptr audio_mixer_; // Synchronization is handled internally by AudioTransportImpl. // Must be placed before `audio_device_module_` for proper destruction. std::unique_ptr audio_transport_; // Synchronization is handled internally by AudioDeviceModule. - rtc::scoped_refptr audio_device_module_; + scoped_refptr audio_device_module_; Mutex lock_; @@ -162,7 +159,7 @@ class VoipCore : public VoipEngine, // Container to track currently active AudioChannel objects mapped by // ChannelId. - std::unordered_map> channels_ + std::unordered_map> channels_ RTC_GUARDED_BY(lock_); // Boolean flag to ensure initialization only occurs once. diff --git a/build_overrides/OWNERS b/build_overrides/OWNERS index 48e6927746..d4a4d8f1e7 100644 --- a/build_overrides/OWNERS +++ b/build_overrides/OWNERS @@ -1 +1,3 @@ +jansson@webrtc.org +jleconte@webrtc.org mbonadei@webrtc.org diff --git a/build_overrides/build.gni b/build_overrides/build.gni index cfa795870a..10cedb7525 100644 --- a/build_overrides/build.gni +++ b/build_overrides/build.gni @@ -12,12 +12,10 @@ enable_java_templates = true # Enables assertions on safety checks in libc++. enable_safe_libcxx = true -# Don't set this variable to true when building stadalone WebRTC, it is +# Don't set this variable to true when building standalone WebRTC, it is # only needed to support both WebRTC standalone and Chromium builds. build_with_chromium = false -use_cxx17 = !is_android - # Use our own suppressions files. asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc" lsan_suppressions_file = "//tools_webrtc/sanitizers/lsan_suppressions_webrtc.cc" @@ -34,6 +32,9 @@ ubsan_vptr_ignorelist_path = # so we just ignore that assert. See https://crbug.com/648948 for more info. ignore_elf32_limitations = true +perfetto_build_with_embedder = true +enable_perfetto_trace_processor = true + # Use bundled hermetic Xcode installation maintainted by Chromium, # except for local iOS builds where it's unsupported. # Allow for mac cross compile on linux machines. @@ -52,6 +53,8 @@ declare_args() { # purposes. In any case, it doesn't depend on //third_party/perfetto which # is used for base tracing, so this feature is disabled. enable_base_tracing = false + + # TODO(b/336718643): Remove use_perfetto_client_library_flag. use_perfetto_client_library = false use_perfetto_trace_processor = false diff --git a/build_overrides/partition_alloc.gni b/build_overrides/partition_alloc.gni index 044036879a..3f674f8e58 100644 --- a/build_overrides/partition_alloc.gni +++ b/build_overrides/partition_alloc.gni @@ -6,7 +6,34 @@ # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. -# Use default values for PartitionAlloc as standalone library from -# base/allocator/partition_allocator/build_overrides/partition_alloc.gni -import( - "//base/allocator/partition_allocator/build_overrides/partition_alloc.gni") +# Dependencies from //build defining variables used by partition_alloc. +import("//build/config/android/config.gni") +import("//build/config/cast.gni") +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/cronet/config.gni") +import("//build/config/dcheck_always_on.gni") +import("//build/config/logging.gni") +partition_alloc_enable_arc_config = "//build/config/compiler:enable_arc" + +# partition_alloc is performance critical and it should generally be optimized +# for speed, even in debug mode. WebRTC doesn't really need its debug version. +partition_alloc_remove_configs = + [ "//build/config/compiler:default_optimization" ] +partition_alloc_add_configs = [ "//build/config/compiler:optimize_speed" ] + +# Disable partition_alloc for WebRTC standalone builds. +use_partition_alloc_as_malloc_default = false +use_allocator_shim_default = false +enable_backup_ref_ptr_support_default = false +enable_backup_ref_ptr_slow_checks_default = false +enable_dangling_raw_ptr_checks_default = false + +# raw_ptr behave like naked pointers, except they are always initialized. This +# avoids relying on undefined behaviors. +raw_ptr_zero_on_construct_default = true +raw_ptr_zero_on_move_default = true +raw_ptr_zero_on_destruct_default = false + +# WebRTC used C++20 by default: +assert_cpp20_default = true diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/__init__.py b/build_overrides/protobuf.gni similarity index 71% rename from modules/audio_processing/test/py_quality_assessment/quality_assessment/__init__.py rename to build_overrides/protobuf.gni index b870dfaef3..a4230f9cdb 100644 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/__init__.py +++ b/build_overrides/protobuf.gni @@ -1,7 +1,9 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# Copyright (c) 2025 The WebRTC project authors. All Rights Reserved. # # Use of this source code is governed by a BSD-style license # that can be found in the LICENSE file in the root of the source # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. + +# No overrides currently specified because the defaults work fine. diff --git a/call/BUILD.gn b/call/BUILD.gn index 39cbc0a9c1..073b4099e6 100644 --- a/call/BUILD.gn +++ b/call/BUILD.gn @@ -38,50 +38,48 @@ rtc_library("call_interfaces") { deps = [ ":audio_sender_interface", + ":payload_type", ":receive_stream_interface", ":rtp_interfaces", - ":video_stream_api", + ":video_receive_stream_api", + ":video_send_stream_api", "../api:fec_controller_api", "../api:field_trials_view", "../api:frame_transformer_interface", "../api:network_state_predictor_api", - "../api:rtc_error", + "../api:ref_count", "../api:rtp_headers", "../api:rtp_parameters", "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:transport_api", "../api/adaptation:resource_adaptation_api", - "../api/audio:audio_frame_processor", + "../api/audio:audio_device", "../api/audio:audio_mixer_api", + "../api/audio:audio_processing", + "../api/audio:audio_processing_statistics", "../api/audio_codecs:audio_codecs_api", + "../api/crypto:frame_decryptor_interface", "../api/crypto:frame_encryptor_interface", "../api/crypto:options", + "../api/environment", "../api/metronome", "../api/neteq:neteq_api", "../api/task_queue", "../api/transport:bitrate_settings", "../api/transport:network_control", + "../api/units:time_delta", + "../api/units:timestamp", "../modules/async_audio_processing", - "../modules/audio_device", - "../modules/audio_processing", - "../modules/audio_processing:api", - "../modules/audio_processing:audio_processing_statistics", "../modules/rtp_rtcp", "../modules/rtp_rtcp:rtp_rtcp_format", - "../rtc_base:audio_format_to_string", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", - "../rtc_base:network_route", - "../rtc_base:refcount", "../rtc_base:stringutils", "../rtc_base/network:sent_packet", - ] - absl_deps = [ + "../video/config:encoder_config", "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/functional:bind_front", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -111,26 +109,26 @@ rtc_library("rtp_interfaces") { deps = [ "../api:array_view", "../api:fec_controller_api", - "../api:field_trials_view", "../api:frame_transformer_interface", "../api:network_state_predictor_api", "../api:rtp_headers", + "../api:rtp_packet_sender", "../api:rtp_parameters", + "../api:scoped_refptr", "../api/crypto:options", - "../api/rtc_event_log", + "../api/environment", + "../api/transport:bandwidth_estimation_settings", "../api/transport:bitrate_settings", "../api/transport:network_control", + "../api/units:time_delta", "../api/units:timestamp", "../common_video:frame_counts", "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", - "../rtc_base:rtc_task_queue", + "../rtc_base:network_route", "../rtc_base:stringutils", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -147,20 +145,17 @@ rtc_library("rtp_receiver") { deps = [ ":rtp_interfaces", "../api:array_view", - "../api:rtp_headers", "../api:sequence_checker", "../modules/rtp_rtcp", "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", "../rtc_base:logging", + "../rtc_base:macromagic", "../rtc_base:stringutils", "../rtc_base/containers:flat_map", "../rtc_base/containers:flat_set", "../rtc_base/system:no_unique_address", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -182,25 +177,41 @@ rtc_library("rtp_sender") { "../api:bitrate_allocation", "../api:fec_controller_api", "../api:field_trials_view", - "../api:network_state_predictor_api", + "../api:frame_transformer_interface", + "../api:rtp_headers", + "../api:rtp_packet_sender", "../api:rtp_parameters", + "../api:scoped_refptr", "../api:sequence_checker", "../api:transport_api", + "../api/crypto:options", + "../api/environment", "../api/rtc_event_log", + "../api/task_queue", "../api/task_queue:pending_task_safety_flag", - "../api/task_queue:task_queue", - "../api/transport:field_trial_based_config", + "../api/transport:bandwidth_estimation_settings", + "../api/transport:bitrate_settings", "../api/transport:goog_cc", "../api/transport:network_control", + "../api/transport/rtp:dependency_descriptor", "../api/units:data_rate", + "../api/units:data_size", + "../api/units:frequency", "../api/units:time_delta", "../api/units:timestamp", + "../api/video:encoded_image", + "../api/video:render_resolution", + "../api/video:video_bitrate_allocation", + "../api/video:video_codec_constants", "../api/video:video_frame", + "../api/video:video_frame_type", "../api/video:video_layers_allocation", "../api/video:video_rtp_headers", "../api/video_codecs:video_codecs_api", + "../common_video:frame_counts", + "../common_video/generic_frame_descriptor", "../logging:rtc_event_bwe", - "../modules/congestion_controller", + "../modules:module_fec_api", "../modules/congestion_controller/rtp:control_handler", "../modules/congestion_controller/rtp:transport_feedback", "../modules/pacing", @@ -216,20 +227,20 @@ rtc_library("rtp_sender") { "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:network_route", - "../rtc_base:race_checker", "../rtc_base:random", "../rtc_base:rate_limiter", - "../rtc_base:rtc_task_queue", + "../rtc_base:safe_conversions", "../rtc_base:timeutils", + "../rtc_base/experiments:field_trial_parser", + "../rtc_base/network:sent_packet", "../rtc_base/synchronization:mutex", + "../rtc_base/system:no_unique_address", "../rtc_base/task_utils:repeating_task", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/base:nullability", "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -239,15 +250,10 @@ rtc_library("bitrate_configurator") { "rtp_bitrate_configurator.h", ] deps = [ - ":rtp_interfaces", - - # For api/bitrate_constraints.h - "../api:libjingle_peerconnection_api", "../api/transport:bitrate_settings", "../api/units:data_rate", "../rtc_base:checks", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("bitrate_allocator") { @@ -257,28 +263,26 @@ rtc_library("bitrate_allocator") { ] deps = [ "../api:bitrate_allocation", + "../api:field_trials_view", "../api:sequence_checker", "../api/transport:network_control", "../api/units:data_rate", "../api/units:time_delta", "../rtc_base:checks", "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:safe_conversions", "../rtc_base:safe_minmax", + "../rtc_base/experiments:field_trial_parser", "../rtc_base/system:no_unique_address", - "../system_wrappers", - "../system_wrappers:field_trial", "../system_wrappers:metrics", + "//third_party/abseil-cpp/absl/algorithm:container", ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } rtc_library("call") { sources = [ "call.cc", - "call_factory.cc", - "call_factory.h", - "degraded_call.cc", - "degraded_call.h", "flexfec_receive_stream_impl.cc", "flexfec_receive_stream_impl.h", "receive_time_calculator.cc", @@ -288,45 +292,50 @@ rtc_library("call") { deps = [ ":bitrate_allocator", ":call_interfaces", - ":fake_network", + ":payload_type", + ":payload_type_picker", + ":receive_stream_interface", ":rtp_interfaces", ":rtp_receiver", ":rtp_sender", - ":simulated_network", ":version", - ":video_stream_api", + ":video_receive_stream_api", + ":video_send_stream_api", "../api:array_view", - "../api:callfactory_api", "../api:fec_controller_api", "../api:field_trials_view", + "../api:rtc_error", "../api:rtp_headers", "../api:rtp_parameters", + "../api:scoped_refptr", "../api:sequence_checker", - "../api:simulated_network_api", - "../api:transport_api", + "../api/adaptation:resource_adaptation_api", + "../api/environment", "../api/rtc_event_log", + "../api/task_queue", "../api/task_queue:pending_task_safety_flag", + "../api/transport:bitrate_settings", "../api/transport:network_control", + "../api/units:data_rate", + "../api/units:data_size", "../api/units:time_delta", - "../api/video_codecs:video_codecs_api", + "../api/units:timestamp", "../audio", "../logging:rtc_event_audio", "../logging:rtc_event_rtp_rtcp", "../logging:rtc_event_video", "../logging:rtc_stream_config", + "../media:codec", "../modules/congestion_controller", - "../modules/pacing", "../modules/rtp_rtcp", "../modules/rtp_rtcp:rtp_rtcp_format", "../modules/video_coding", + "../modules/video_coding:nack_requester", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", "../rtc_base:event_tracer", "../rtc_base:logging", "../rtc_base:macromagic", - "../rtc_base:rate_limiter", - "../rtc_base:rtc_event", - "../rtc_base:rtc_task_queue", "../rtc_base:safe_minmax", "../rtc_base:stringutils", "../rtc_base:threading", @@ -336,18 +345,40 @@ rtc_library("call") { "../rtc_base/system:no_unique_address", "../rtc_base/task_utils:repeating_task", "../system_wrappers", - "../system_wrappers:field_trial", "../system_wrappers:metrics", "../video", "../video:decode_synchronizer", "../video/config:encoder_config", "adaptation:resource_adaptation", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:bind_front", - "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("payload_type_picker") { + sources = [ + "payload_type_picker.cc", + "payload_type_picker.h", + ] + deps = [ + ":payload_type", + "../api:rtc_error", + "../api/audio_codecs:audio_codecs_api", + "../media:codec", + "../media:media_constants", + "../rtc_base:checks", + "../rtc_base:logging", + "../rtc_base:stringutils", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_source_set("payload_type") { + sources = [ "payload_type.h" ] + deps = [ + "../api:rtc_error", + "../media:codec", + "../rtc_base:strong_alias", ] } @@ -355,34 +386,28 @@ rtc_source_set("receive_stream_interface") { sources = [ "receive_stream.h" ] deps = [ "../api:frame_transformer_interface", - "../api:rtp_parameters", + "../api:rtp_headers", "../api:scoped_refptr", "../api/crypto:frame_decryptor_interface", "../api/transport/rtp:rtp_source", - "../modules/rtp_rtcp:rtp_rtcp_format", ] } -rtc_library("video_stream_api") { +rtc_library("video_send_stream_api") { sources = [ - "video_receive_stream.cc", - "video_receive_stream.h", "video_send_stream.cc", "video_send_stream.h", ] deps = [ - ":receive_stream_interface", ":rtp_interfaces", "../api:frame_transformer_interface", - "../api:rtp_headers", "../api:rtp_parameters", "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:transport_api", "../api/adaptation:resource_adaptation_api", - "../api/crypto:frame_encryptor_interface", "../api/crypto:options", - "../api/video:recordable_encoded_frame", + "../api/units:data_rate", "../api/video:video_frame", "../api/video:video_rtp_headers", "../api/video:video_stream_encoder", @@ -395,39 +420,42 @@ rtc_library("video_stream_api") { "../rtc_base:stringutils", "../video/config:encoder_config", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/types:optional", - ] } - -rtc_library("simulated_network") { +rtc_library("video_receive_stream_api") { sources = [ - "simulated_network.cc", - "simulated_network.h", + "video_receive_stream.cc", + "video_receive_stream.h", ] deps = [ - "../api:sequence_checker", - "../api:simulated_network_api", - "../api/units:data_rate", - "../api/units:data_size", + ":receive_stream_interface", + ":rtp_interfaces", + "../api:frame_transformer_interface", + "../api:rtp_headers", + "../api:scoped_refptr", + "../api:transport_api", + "../api/crypto:frame_decryptor_interface", + "../api/crypto:options", "../api/units:time_delta", "../api/units:timestamp", - "../rtc_base:checks", - "../rtc_base:macromagic", - "../rtc_base:race_checker", - "../rtc_base:random", - "../rtc_base/synchronization:mutex", + "../api/video:recordable_encoded_frame", + "../api/video:video_frame", + "../api/video:video_rtp_headers", + "../api/video_codecs:video_codecs_api", + "../common_video:frame_counts", + "../modules/rtp_rtcp:rtp_rtcp_format", + "../rtc_base:stringutils", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("simulated_network") { + # TODO(bugs.webrtc.org/14525): Remove target and directly depend on test/network:simulated_network. + sources = [ "simulated_network.h" ] + deps = [ "../test/network:simulated_network" ] } rtc_source_set("simulated_packet_receiver") { sources = [ "simulated_packet_receiver.h" ] - deps = [ - ":call_interfaces", - "../api:simulated_network_api", - ] + deps = [ ":call_interfaces" ] } rtc_library("fake_network") { @@ -436,16 +464,15 @@ rtc_library("fake_network") { "fake_network_pipe.h", ] deps = [ - ":call_interfaces", - ":simulated_network", ":simulated_packet_receiver", + "../api:array_view", "../api:rtp_parameters", - "../api:sequence_checker", "../api:simulated_network_api", "../api:transport_api", "../api/units:timestamp", "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", + "../rtc_base:copy_on_write_buffer", "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base/synchronization:mutex", @@ -463,6 +490,7 @@ if (rtc_include_tests) { "bitrate_estimator_tests.cc", "call_unittest.cc", "flexfec_receive_stream_unittest.cc", + "payload_type_picker_unittest.cc", "receive_time_calculator_unittest.cc", "rtp_bitrate_configurator_unittest.cc", "rtp_demuxer_unittest.cc", @@ -476,38 +504,61 @@ if (rtc_include_tests) { ":call", ":call_interfaces", ":mock_rtp_interfaces", + ":payload_type", + ":payload_type_picker", ":rtp_interfaces", ":rtp_receiver", ":rtp_sender", - ":simulated_network", + ":video_receive_stream_api", + ":video_send_stream_api", "../api:array_view", + "../api:bitrate_allocation", "../api:create_frame_generator", + "../api:frame_transformer_interface", + "../api:make_ref_counted", "../api:mock_audio_mixer", + "../api:mock_frame_transformer", "../api:rtp_headers", "../api:rtp_parameters", + "../api:scoped_refptr", + "../api:simulated_network_api", "../api:transport_api", - "../api/audio_codecs:builtin_audio_decoder_factory", - "../api/rtc_event_log", - "../api/task_queue:default_task_queue_factory", + "../api/adaptation:resource_adaptation_api", + "../api/crypto:options", + "../api/environment", + "../api/environment:environment_factory", + "../api/test/network_emulation", "../api/test/video:function_video_factory", + "../api/transport:bitrate_settings", "../api/transport:field_trial_based_config", + "../api/transport:network_control", + "../api/transport/rtp:dependency_descriptor", + "../api/units:data_rate", + "../api/units:data_size", + "../api/units:time_delta", "../api/units:timestamp", "../api/video:builtin_video_bitrate_allocator_factory", + "../api/video:encoded_image", + "../api/video:video_codec_constants", "../api/video:video_frame", + "../api/video:video_frame_type", "../api/video:video_rtp_headers", + "../api/video_codecs:video_codecs_api", "../audio", + "../common_video:frame_counts", + "../common_video/generic_frame_descriptor", + "../media:codec", + "../media:media_constants", "../modules/audio_device:mock_audio_device", - "../modules/audio_mixer", - "../modules/audio_mixer:audio_mixer_impl", "../modules/audio_processing:mocks", - "../modules/congestion_controller", - "../modules/pacing", "../modules/rtp_rtcp", "../modules/rtp_rtcp:mock_rtp_rtcp", "../modules/rtp_rtcp:rtp_rtcp_format", + "../modules/rtp_rtcp:rtp_video_header", "../modules/video_coding", "../modules/video_coding:codec_globals_headers", "../modules/video_coding:video_codec_interface", + "../rtc_base:buffer", "../rtc_base:checks", "../rtc_base:logging", "../rtc_base:macromagic", @@ -519,15 +570,11 @@ if (rtc_include_tests) { "../rtc_base:threading", "../rtc_base:timeutils", "../rtc_base/synchronization:mutex", - "../system_wrappers", "../test:audio_codec_mocks", - "../test:direct_transport", "../test:encoder_settings", "../test:explicit_key_value_config", "../test:fake_video_codecs", - "../test:field_trial", "../test:frame_generator_capturer", - "../test:mock_frame_transformer", "../test:mock_transport", "../test:run_loop", "../test:scoped_key_value_config", @@ -535,19 +582,13 @@ if (rtc_include_tests) { "../test:test_support", "../test:video_test_constants", "../test/scenario", - "../test/time_controller:time_controller", + "../test/time_controller", "../video", + "../video/config:encoder_config", "adaptation:resource_adaptation_test_utilities", - "//testing/gmock", - "//testing/gtest", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -561,61 +602,64 @@ if (rtc_include_tests) { ] deps = [ ":call_interfaces", - ":simulated_network", - ":video_stream_api", + ":fake_network", + ":video_receive_stream_api", + ":video_send_stream_api", + "../api:array_view", + "../api:field_trials_view", + "../api:make_ref_counted", "../api:rtc_event_log_output_file", + "../api:rtp_parameters", + "../api:scoped_refptr", + "../api:sequence_checker", "../api:simulated_network_api", + "../api/audio:audio_device", + "../api/audio:builtin_audio_processing_builder", "../api/audio_codecs:builtin_audio_encoder_factory", + "../api/environment", "../api/numerics", "../api/rtc_event_log", "../api/rtc_event_log:rtc_event_log_factory", "../api/task_queue", - "../api/task_queue:default_task_queue_factory", "../api/task_queue:pending_task_safety_flag", "../api/test/metrics:global_metrics_logger_and_exporter", "../api/test/metrics:metric", + "../api/test/video:function_video_factory", + "../api/transport:bitrate_settings", + "../api/units:data_rate", + "../api/units:time_delta", + "../api/units:timestamp", "../api/video:builtin_video_bitrate_allocator_factory", "../api/video:video_bitrate_allocation", + "../api/video:video_bitrate_allocator_factory", + "../api/video:video_frame", "../api/video_codecs:video_codecs_api", "../media:rtc_internal_video_codecs", "../media:rtc_simulcast_encoder_adapter", - "../modules/audio_coding", - "../modules/audio_device", "../modules/audio_device:test_audio_device_module", "../modules/audio_mixer:audio_mixer_impl", - "../modules/rtp_rtcp", - "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", "../rtc_base:logging", - "../rtc_base:macromagic", - "../rtc_base:platform_thread", "../rtc_base:rtc_event", "../rtc_base:stringutils", "../rtc_base:task_queue_for_test", "../rtc_base:threading", - "../rtc_base:timeutils", - "../rtc_base/synchronization:mutex", "../rtc_base/task_utils:repeating_task", - "../system_wrappers", "../system_wrappers:metrics", - "../test:direct_transport", "../test:encoder_settings", "../test:fake_video_codecs", "../test:field_trial", "../test:fileutils", "../test:frame_generator_capturer", - "../test:null_transport", "../test:test_common", + "../test:test_flags", "../test:test_support", "../test:video_test_common", "../test:video_test_constants", - "../video", + "../test/network:simulated_network", "../video/config:encoder_config", - "//testing/gtest", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } } @@ -630,18 +674,21 @@ if (rtc_include_tests) { ] deps = [ ":rtp_interfaces", + "../api:fec_controller_api", "../api:frame_transformer_interface", - "../api:libjingle_peerconnection_api", - "../api/crypto:frame_encryptor_interface", - "../api/crypto:options", + "../api:scoped_refptr", + "../api/transport:bandwidth_estimation_settings", "../api/transport:bitrate_settings", + "../api/transport:network_control", + "../api/units:timestamp", "../modules/pacing", + "../modules/rtp_rtcp", + "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:network_route", - "../rtc_base:rate_limiter", "../rtc_base/network:sent_packet", "../test:test_support", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_source_set("mock_bitrate_allocator") { testonly = true @@ -655,33 +702,52 @@ if (rtc_include_tests) { rtc_source_set("mock_call_interfaces") { testonly = true - sources = [ "test/mock_audio_send_stream.h" ] + sources = [ + "test/mock_audio_receive_stream.h", + "test/mock_audio_send_stream.h", + ] deps = [ ":call_interfaces", + "../api:frame_transformer_interface", + "../api:rtp_headers", + "../api:rtp_sender_interface", + "../api:scoped_refptr", + "../api/audio:audio_frame_api", + "../api/audio:audio_mixer_api", + "../api/audio_codecs:audio_codecs_api", + "../api/crypto:frame_decryptor_interface", + "../api/transport/rtp:rtp_source", "../test:test_support", ] } + rtc_source_set("fake_payload_type_suggester") { + testonly = true + sources = [ "fake_payload_type_suggester.h" ] + deps = [ + ":payload_type", + ":payload_type_picker", + "../api:rtc_error", + "../media:codec", + ] + } + rtc_library("fake_network_pipe_unittests") { testonly = true - sources = [ - "fake_network_pipe_unittest.cc", - "simulated_network_unittest.cc", - ] + sources = [ "fake_network_pipe_unittest.cc" ] deps = [ ":fake_network", - ":simulated_network", "../api:simulated_network_api", "../api/units:data_rate", "../api/units:time_delta", "../api/units:timestamp", "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", + "../rtc_base:copy_on_write_buffer", "../system_wrappers", "../test:test_support", - "//testing/gtest", + "../test/network:simulated_network", ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } } diff --git a/call/DEPS b/call/DEPS index b1b66ac3ce..98a8a4b68d 100644 --- a/call/DEPS +++ b/call/DEPS @@ -28,5 +28,50 @@ specific_include_rules = { ], "call_perf_tests\.cc": [ "+media/engine", + ], + "simulated_network\.h": [ + "+test/network/simulated_network.h", + ], + "rtp_payload_params\.cc": [ + "+common_video/generic_frame_descriptor", + ], + "rtp_payload_params\.h": [ + "+common_video/generic_frame_descriptor", + ], + "rtp_payload_params_unittest\.cc": [ + "+common_video/generic_frame_descriptor", + ], + "rtp_video_sender\.cc": [ + "+common_video/frame_counts.h", + "+common_video/generic_frame_descriptor", + ], + "rtp_video_sender.h": [ + "+common_video/frame_counts.h", + ], + "rtp_video_sender_unittest.cc": [ + "+common_video/frame_counts.h", + "+common_video/generic_frame_descriptor", + ], + "payload_type\.h": [ + "+media/base/codec.h", + ], + "payload_type_picker\.h": [ + "+media/base/codec.h", + "+media/base/media_constants.h", + ], + "payload_type_picker\.cc": [ + "+media/base/codec.h", + "+media/base/codec_comparators.h", + "+media/base/media_constants.h", + ], + "payload_type_picker_unittest\.cc": [ + "+media/base/codec.h", + "+media/base/media_constants.h", + ], + "call\.cc": [ + "+media/base/codec.h", + ], + "fake_payload_type_suggester": [ + "+media/base/codec.h", ] } diff --git a/call/OWNERS b/call/OWNERS index e275834bb4..d37ac06b3c 100644 --- a/call/OWNERS +++ b/call/OWNERS @@ -4,5 +4,6 @@ brandtr@webrtc.org tommi@webrtc.org mflodman@webrtc.org stefan@webrtc.org +perkj@webrtc.org per-file version.cc=webrtc-version-updater@webrtc-ci.iam.gserviceaccount.com diff --git a/call/adaptation/BUILD.gn b/call/adaptation/BUILD.gn index b69196f021..d3a8ad6f32 100644 --- a/call/adaptation/BUILD.gn +++ b/call/adaptation/BUILD.gn @@ -34,22 +34,19 @@ rtc_library("resource_adaptation") { deps = [ "../../api:field_trials_view", "../../api:make_ref_counted", + "../../api:ref_count", "../../api:rtp_parameters", "../../api:scoped_refptr", "../../api:sequence_checker", "../../api/adaptation:resource_adaptation_api", - "../../api/task_queue:task_queue", + "../../api/task_queue", "../../api/video:video_adaptation", "../../api/video:video_frame", - "../../api/video:video_stream_encoder", "../../api/video_codecs:video_codecs_api", - "../../modules/video_coding:video_coding_utility", "../../modules/video_coding/svc:scalability_mode_util", "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:macromagic", - "../../rtc_base:refcount", - "../../rtc_base:rtc_task_queue", "../../rtc_base:safe_conversions", "../../rtc_base:stringutils", "../../rtc_base/experiments:balanced_degradation_settings", @@ -57,12 +54,8 @@ rtc_library("resource_adaptation") { "../../rtc_base/system:no_unique_address", "../../video:video_stream_encoder_interface", "../../video/config:encoder_config", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -81,26 +74,26 @@ if (rtc_include_tests) { deps = [ ":resource_adaptation", ":resource_adaptation_test_utilities", + "../../api:rtc_error_matchers", + "../../api:rtp_parameters", "../../api:scoped_refptr", + "../../api:sequence_checker", "../../api/adaptation:resource_adaptation_api", - "../../api/task_queue:default_task_queue_factory", - "../../api/task_queue:task_queue", + "../../api/units:time_delta", "../../api/video:video_adaptation", + "../../api/video:video_frame", "../../api/video_codecs:video_codecs_api", "../../rtc_base:checks", - "../../rtc_base:gunit_helpers", + "../../rtc_base:macromagic", "../../rtc_base:rtc_event", - "../../rtc_base:rtc_task_queue", "../../rtc_base:stringutils", "../../rtc_base:task_queue_for_test", - "../../rtc_base/synchronization:mutex", - "../../test:field_trial", - "../../test:rtc_expect_death", + "../../rtc_base:threading", "../../test:scoped_key_value_config", "../../test:test_support", + "../../test:wait_until", "../../video/config:encoder_config", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("resource_adaptation_test_utilities") { @@ -121,17 +114,14 @@ if (rtc_include_tests) { ":resource_adaptation", "../../api:make_ref_counted", "../../api:scoped_refptr", - "../../api:sequence_checker", "../../api/adaptation:resource_adaptation_api", - "../../api/task_queue:task_queue", - "../../api/video:video_stream_encoder", + "../../api/video:video_adaptation", + "../../api/video:video_bitrate_allocation", + "../../api/video_codecs:video_codecs_api", "../../test:test_support", "../../video:video_stream_encoder_interface", "../../video/config:encoder_config", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } } diff --git a/call/adaptation/adaptation_constraint.h b/call/adaptation/adaptation_constraint.h index 9ad6414cd1..e3dad3a944 100644 --- a/call/adaptation/adaptation_constraint.h +++ b/call/adaptation/adaptation_constraint.h @@ -13,7 +13,6 @@ #include -#include "api/adaptation/resource.h" #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_input_state.h" diff --git a/call/adaptation/broadcast_resource_listener.cc b/call/adaptation/broadcast_resource_listener.cc index 505036db3d..5305964225 100644 --- a/call/adaptation/broadcast_resource_listener.cc +++ b/call/adaptation/broadcast_resource_listener.cc @@ -13,11 +13,15 @@ #include #include #include +#include #include "absl/strings/string_view.h" +#include "api/adaptation/resource.h" #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" #include "rtc_base/checks.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -33,7 +37,7 @@ class BroadcastResourceListener::AdapterResource : public Resource { MutexLock lock(&lock_); if (!listener_) return; - listener_->OnResourceUsageStateMeasured(rtc::scoped_refptr(this), + listener_->OnResourceUsageStateMeasured(scoped_refptr(this), usage_state); } @@ -52,7 +56,7 @@ class BroadcastResourceListener::AdapterResource : public Resource { }; BroadcastResourceListener::BroadcastResourceListener( - rtc::scoped_refptr source_resource) + scoped_refptr source_resource) : source_resource_(source_resource), is_listening_(false) { RTC_DCHECK(source_resource_); } @@ -61,7 +65,7 @@ BroadcastResourceListener::~BroadcastResourceListener() { RTC_DCHECK(!is_listening_); } -rtc::scoped_refptr BroadcastResourceListener::SourceResource() const { +scoped_refptr BroadcastResourceListener::SourceResource() const { return source_resource_; } @@ -80,28 +84,26 @@ void BroadcastResourceListener::StopListening() { is_listening_ = false; } -rtc::scoped_refptr -BroadcastResourceListener::CreateAdapterResource() { +scoped_refptr BroadcastResourceListener::CreateAdapterResource() { MutexLock lock(&lock_); RTC_DCHECK(is_listening_); - rtc::scoped_refptr adapter = - rtc::make_ref_counted(source_resource_->Name() + - "Adapter"); + scoped_refptr adapter = + make_ref_counted(source_resource_->Name() + "Adapter"); adapters_.push_back(adapter); return adapter; } void BroadcastResourceListener::RemoveAdapterResource( - rtc::scoped_refptr resource) { + scoped_refptr resource) { MutexLock lock(&lock_); auto it = std::find(adapters_.begin(), adapters_.end(), resource); RTC_DCHECK(it != adapters_.end()); adapters_.erase(it); } -std::vector> +std::vector> BroadcastResourceListener::GetAdapterResources() { - std::vector> resources; + std::vector> resources; MutexLock lock(&lock_); for (const auto& adapter : adapters_) { resources.push_back(adapter); @@ -110,7 +112,7 @@ BroadcastResourceListener::GetAdapterResources() { } void BroadcastResourceListener::OnResourceUsageStateMeasured( - rtc::scoped_refptr resource, + scoped_refptr resource, ResourceUsageState usage_state) { RTC_DCHECK_EQ(resource, source_resource_); MutexLock lock(&lock_); diff --git a/call/adaptation/broadcast_resource_listener.h b/call/adaptation/broadcast_resource_listener.h index 2c5a5c703b..9d4ae70dd6 100644 --- a/call/adaptation/broadcast_resource_listener.h +++ b/call/adaptation/broadcast_resource_listener.h @@ -16,6 +16,7 @@ #include "api/adaptation/resource.h" #include "api/scoped_refptr.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -35,39 +36,37 @@ namespace webrtc { // and DCHECK that a Resource's listener is never overwritten. class BroadcastResourceListener : public ResourceListener { public: - explicit BroadcastResourceListener( - rtc::scoped_refptr source_resource); + explicit BroadcastResourceListener(scoped_refptr source_resource); ~BroadcastResourceListener() override; - rtc::scoped_refptr SourceResource() const; + scoped_refptr SourceResource() const; void StartListening(); void StopListening(); // Creates a Resource that redirects any resource usage measurements that // BroadcastResourceListener receives to its listener. - rtc::scoped_refptr CreateAdapterResource(); + scoped_refptr CreateAdapterResource(); // Unregister the adapter from the BroadcastResourceListener; it will no // longer receive resource usage measurement and will no longer be referenced. // Use this to prevent memory leaks of old adapters. - void RemoveAdapterResource(rtc::scoped_refptr resource); - std::vector> GetAdapterResources(); + void RemoveAdapterResource(scoped_refptr resource); + std::vector> GetAdapterResources(); // ResourceListener implementation. - void OnResourceUsageStateMeasured(rtc::scoped_refptr resource, + void OnResourceUsageStateMeasured(scoped_refptr resource, ResourceUsageState usage_state) override; private: class AdapterResource; friend class AdapterResource; - const rtc::scoped_refptr source_resource_; + const scoped_refptr source_resource_; Mutex lock_; bool is_listening_ RTC_GUARDED_BY(lock_); // The AdapterResource unregisters itself prior to destruction, guaranteeing // that these pointers are safe to use. - std::vector> adapters_ - RTC_GUARDED_BY(lock_); + std::vector> adapters_ RTC_GUARDED_BY(lock_); }; } // namespace webrtc diff --git a/call/adaptation/broadcast_resource_listener_unittest.cc b/call/adaptation/broadcast_resource_listener_unittest.cc index 9cd80500c2..124a83f8ed 100644 --- a/call/adaptation/broadcast_resource_listener_unittest.cc +++ b/call/adaptation/broadcast_resource_listener_unittest.cc @@ -10,6 +10,10 @@ #include "call/adaptation/broadcast_resource_listener.h" +#include + +#include "api/adaptation/resource.h" +#include "api/scoped_refptr.h" #include "call/adaptation/test/fake_resource.h" #include "call/adaptation/test/mock_resource_listener.h" #include "test/gmock.h" @@ -21,17 +25,17 @@ using ::testing::_; using ::testing::StrictMock; TEST(BroadcastResourceListenerTest, CreateAndRemoveAdapterResource) { - rtc::scoped_refptr source_resource = + scoped_refptr source_resource = FakeResource::Create("SourceResource"); BroadcastResourceListener broadcast_resource_listener(source_resource); broadcast_resource_listener.StartListening(); EXPECT_TRUE(broadcast_resource_listener.GetAdapterResources().empty()); - rtc::scoped_refptr adapter = + scoped_refptr adapter = broadcast_resource_listener.CreateAdapterResource(); StrictMock listener; adapter->SetResourceListener(&listener); - EXPECT_EQ(std::vector>{adapter}, + EXPECT_EQ(std::vector>{adapter}, broadcast_resource_listener.GetAdapterResources()); // The removed adapter is not referenced by the broadcaster. @@ -46,12 +50,12 @@ TEST(BroadcastResourceListenerTest, CreateAndRemoveAdapterResource) { } TEST(BroadcastResourceListenerTest, AdapterNameIsBasedOnSourceResourceName) { - rtc::scoped_refptr source_resource = + scoped_refptr source_resource = FakeResource::Create("FooBarResource"); BroadcastResourceListener broadcast_resource_listener(source_resource); broadcast_resource_listener.StartListening(); - rtc::scoped_refptr adapter = + scoped_refptr adapter = broadcast_resource_listener.CreateAdapterResource(); EXPECT_EQ("FooBarResourceAdapter", adapter->Name()); @@ -60,31 +64,31 @@ TEST(BroadcastResourceListenerTest, AdapterNameIsBasedOnSourceResourceName) { } TEST(BroadcastResourceListenerTest, AdaptersForwardsUsageMeasurements) { - rtc::scoped_refptr source_resource = + scoped_refptr source_resource = FakeResource::Create("SourceResource"); BroadcastResourceListener broadcast_resource_listener(source_resource); broadcast_resource_listener.StartListening(); StrictMock destination_listener1; StrictMock destination_listener2; - rtc::scoped_refptr adapter1 = + scoped_refptr adapter1 = broadcast_resource_listener.CreateAdapterResource(); adapter1->SetResourceListener(&destination_listener1); - rtc::scoped_refptr adapter2 = + scoped_refptr adapter2 = broadcast_resource_listener.CreateAdapterResource(); adapter2->SetResourceListener(&destination_listener2); // Expect kOveruse to be echoed. EXPECT_CALL(destination_listener1, OnResourceUsageStateMeasured(_, _)) .Times(1) - .WillOnce([adapter1](rtc::scoped_refptr resource, + .WillOnce([adapter1](scoped_refptr resource, ResourceUsageState usage_state) { EXPECT_EQ(adapter1, resource); EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); }); EXPECT_CALL(destination_listener2, OnResourceUsageStateMeasured(_, _)) .Times(1) - .WillOnce([adapter2](rtc::scoped_refptr resource, + .WillOnce([adapter2](scoped_refptr resource, ResourceUsageState usage_state) { EXPECT_EQ(adapter2, resource); EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); @@ -94,14 +98,14 @@ TEST(BroadcastResourceListenerTest, AdaptersForwardsUsageMeasurements) { // Expect kUnderuse to be echoed. EXPECT_CALL(destination_listener1, OnResourceUsageStateMeasured(_, _)) .Times(1) - .WillOnce([adapter1](rtc::scoped_refptr resource, + .WillOnce([adapter1](scoped_refptr resource, ResourceUsageState usage_state) { EXPECT_EQ(adapter1, resource); EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state); }); EXPECT_CALL(destination_listener2, OnResourceUsageStateMeasured(_, _)) .Times(1) - .WillOnce([adapter2](rtc::scoped_refptr resource, + .WillOnce([adapter2](scoped_refptr resource, ResourceUsageState usage_state) { EXPECT_EQ(adapter2, resource); EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state); diff --git a/call/adaptation/encoder_settings.cc b/call/adaptation/encoder_settings.cc index c894e833ed..171f8a1640 100644 --- a/call/adaptation/encoder_settings.cc +++ b/call/adaptation/encoder_settings.cc @@ -10,8 +10,14 @@ #include "call/adaptation/encoder_settings.h" +#include #include +#include "api/video/video_codec_type.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "video/config/video_encoder_config.h" + namespace webrtc { EncoderSettings::EncoderSettings(VideoEncoder::EncoderInfo encoder_info, @@ -46,7 +52,7 @@ const VideoCodec& EncoderSettings::video_codec() const { } VideoCodecType GetVideoCodecTypeOrGeneric( - const absl::optional& settings) { + const std::optional& settings) { return settings.has_value() ? settings->encoder_config().codec_type : kVideoCodecGeneric; } diff --git a/call/adaptation/encoder_settings.h b/call/adaptation/encoder_settings.h index 30ce0a05bc..04f074644a 100644 --- a/call/adaptation/encoder_settings.h +++ b/call/adaptation/encoder_settings.h @@ -11,7 +11,9 @@ #ifndef CALL_ADAPTATION_ENCODER_SETTINGS_H_ #define CALL_ADAPTATION_ENCODER_SETTINGS_H_ -#include "absl/types/optional.h" +#include + +#include "api/video/video_codec_type.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" #include "video/config/video_encoder_config.h" @@ -41,7 +43,7 @@ class EncoderSettings { }; VideoCodecType GetVideoCodecTypeOrGeneric( - const absl::optional& settings); + const std::optional& settings); } // namespace webrtc diff --git a/call/adaptation/resource_adaptation_processor.cc b/call/adaptation/resource_adaptation_processor.cc index f4d1bf3538..c6b1374e48 100644 --- a/call/adaptation/resource_adaptation_processor.cc +++ b/call/adaptation/resource_adaptation_processor.cc @@ -11,16 +11,27 @@ #include "call/adaptation/resource_adaptation_processor.h" #include +#include #include +#include #include +#include #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" +#include "api/adaptation/resource.h" +#include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "api/video/video_adaptation_counters.h" +#include "call/adaptation/resource_adaptation_processor_interface.h" +#include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_adapter.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -37,12 +48,12 @@ void ResourceAdaptationProcessor::ResourceListenerDelegate:: } void ResourceAdaptationProcessor::ResourceListenerDelegate:: - OnResourceUsageStateMeasured(rtc::scoped_refptr resource, + OnResourceUsageStateMeasured(scoped_refptr resource, ResourceUsageState usage_state) { if (!task_queue_->IsCurrent()) { task_queue_->PostTask( - [this_ref = rtc::scoped_refptr(this), - resource, usage_state] { + [this_ref = scoped_refptr(this), resource, + usage_state] { this_ref->OnResourceUsageStateMeasured(resource, usage_state); }); return; @@ -66,7 +77,7 @@ ResourceAdaptationProcessor::ResourceAdaptationProcessor( VideoStreamAdapter* stream_adapter) : task_queue_(TaskQueueBase::Current()), resource_listener_delegate_( - rtc::make_ref_counted(this)), + make_ref_counted(this)), resources_(), stream_adapter_(stream_adapter), last_reported_source_restrictions_(), @@ -105,7 +116,7 @@ void ResourceAdaptationProcessor::RemoveResourceLimitationsListener( } void ResourceAdaptationProcessor::AddResource( - rtc::scoped_refptr resource) { + scoped_refptr resource) { RTC_DCHECK(resource); { MutexLock crit(&resources_lock_); @@ -117,14 +128,14 @@ void ResourceAdaptationProcessor::AddResource( RTC_LOG(LS_INFO) << "Registered resource \"" << resource->Name() << "\"."; } -std::vector> -ResourceAdaptationProcessor::GetResources() const { +std::vector> ResourceAdaptationProcessor::GetResources() + const { MutexLock crit(&resources_lock_); return resources_; } void ResourceAdaptationProcessor::RemoveResource( - rtc::scoped_refptr resource) { + scoped_refptr resource) { RTC_DCHECK(resource); RTC_LOG(LS_INFO) << "Removing resource \"" << resource->Name() << "\"."; resource->SetResourceListener(nullptr); @@ -139,7 +150,7 @@ void ResourceAdaptationProcessor::RemoveResource( } void ResourceAdaptationProcessor::RemoveLimitationsImposedByResource( - rtc::scoped_refptr resource) { + scoped_refptr resource) { if (!task_queue_->IsCurrent()) { task_queue_->PostTask( [this, resource]() { RemoveLimitationsImposedByResource(resource); }); @@ -182,7 +193,7 @@ void ResourceAdaptationProcessor::RemoveLimitationsImposedByResource( } void ResourceAdaptationProcessor::OnResourceUsageStateMeasured( - rtc::scoped_refptr resource, + scoped_refptr resource, ResourceUsageState usage_state) { RTC_DCHECK_RUN_ON(task_queue_); RTC_DCHECK(resource); @@ -225,19 +236,19 @@ void ResourceAdaptationProcessor::OnResourceUsageStateMeasured( ResourceAdaptationProcessor::MitigationResultAndLogMessage ResourceAdaptationProcessor::OnResourceUnderuse( - rtc::scoped_refptr reason_resource) { + scoped_refptr reason_resource) { RTC_DCHECK_RUN_ON(task_queue_); // How can this stream be adapted up? Adaptation adaptation = stream_adapter_->GetAdaptationUp(); if (adaptation.status() != Adaptation::Status::kValid) { - rtc::StringBuilder message; + StringBuilder message; message << "Not adapting up because VideoStreamAdapter returned " << Adaptation::StatusToString(adaptation.status()); return MitigationResultAndLogMessage(MitigationResult::kRejectedByAdapter, message.Release()); } // Check that resource is most limited. - std::vector> most_limited_resources; + std::vector> most_limited_resources; VideoStreamAdapter::RestrictionsWithCounters most_limited_restrictions; std::tie(most_limited_resources, most_limited_restrictions) = FindMostLimitedResources(); @@ -251,7 +262,7 @@ ResourceAdaptationProcessor::OnResourceUnderuse( // adaptation. if (absl::c_find(most_limited_resources, reason_resource) == most_limited_resources.end()) { - rtc::StringBuilder message; + StringBuilder message; message << "Resource \"" << reason_resource->Name() << "\" was not the most limited resource."; return MitigationResultAndLogMessage( @@ -263,7 +274,7 @@ ResourceAdaptationProcessor::OnResourceUnderuse( // before the adaptation is applied. UpdateResourceLimitations(reason_resource, adaptation.restrictions(), adaptation.counters()); - rtc::StringBuilder message; + StringBuilder message; message << "Resource \"" << reason_resource->Name() << "\" was not the only most limited resource."; return MitigationResultAndLogMessage( @@ -272,7 +283,7 @@ ResourceAdaptationProcessor::OnResourceUnderuse( } // Apply adaptation. stream_adapter_->ApplyAdaptation(adaptation, reason_resource); - rtc::StringBuilder message; + StringBuilder message; message << "Adapted up successfully. Unfiltered adaptations: " << stream_adapter_->adaptation_counters().ToString(); return MitigationResultAndLogMessage(MitigationResult::kAdaptationApplied, @@ -281,7 +292,7 @@ ResourceAdaptationProcessor::OnResourceUnderuse( ResourceAdaptationProcessor::MitigationResultAndLogMessage ResourceAdaptationProcessor::OnResourceOveruse( - rtc::scoped_refptr reason_resource) { + scoped_refptr reason_resource) { RTC_DCHECK_RUN_ON(task_queue_); // How can this stream be adapted up? Adaptation adaptation = stream_adapter_->GetAdaptationDown(); @@ -293,7 +304,7 @@ ResourceAdaptationProcessor::OnResourceOveruse( restrictions.counters); } if (adaptation.status() != Adaptation::Status::kValid) { - rtc::StringBuilder message; + StringBuilder message; message << "Not adapting down because VideoStreamAdapter returned " << Adaptation::StatusToString(adaptation.status()); return MitigationResultAndLogMessage(MitigationResult::kRejectedByAdapter, @@ -303,17 +314,17 @@ ResourceAdaptationProcessor::OnResourceOveruse( UpdateResourceLimitations(reason_resource, adaptation.restrictions(), adaptation.counters()); stream_adapter_->ApplyAdaptation(adaptation, reason_resource); - rtc::StringBuilder message; + StringBuilder message; message << "Adapted down successfully. Unfiltered adaptations: " << stream_adapter_->adaptation_counters().ToString(); return MitigationResultAndLogMessage(MitigationResult::kAdaptationApplied, message.Release()); } -std::pair>, +std::pair>, VideoStreamAdapter::RestrictionsWithCounters> ResourceAdaptationProcessor::FindMostLimitedResources() const { - std::vector> most_limited_resources; + std::vector> most_limited_resources; VideoStreamAdapter::RestrictionsWithCounters most_limited_restrictions{ VideoSourceRestrictions(), VideoAdaptationCounters()}; @@ -336,7 +347,7 @@ ResourceAdaptationProcessor::FindMostLimitedResources() const { } void ResourceAdaptationProcessor::UpdateResourceLimitations( - rtc::scoped_refptr reason_resource, + scoped_refptr reason_resource, const VideoSourceRestrictions& restrictions, const VideoAdaptationCounters& counters) { auto& adaptation_limits = adaptation_limits_by_resources_[reason_resource]; @@ -346,7 +357,7 @@ void ResourceAdaptationProcessor::UpdateResourceLimitations( } adaptation_limits = {restrictions, counters}; - std::map, VideoAdaptationCounters> limitations; + std::map, VideoAdaptationCounters> limitations; for (const auto& p : adaptation_limits_by_resources_) { limitations.insert(std::make_pair(p.first, p.second.counters)); } @@ -357,9 +368,9 @@ void ResourceAdaptationProcessor::UpdateResourceLimitations( } void ResourceAdaptationProcessor::OnVideoSourceRestrictionsUpdated( - VideoSourceRestrictions restrictions, + VideoSourceRestrictions /* restrictions */, const VideoAdaptationCounters& adaptation_counters, - rtc::scoped_refptr reason, + scoped_refptr reason, const VideoSourceRestrictions& unfiltered_restrictions) { RTC_DCHECK_RUN_ON(task_queue_); if (reason) { diff --git a/call/adaptation/resource_adaptation_processor.h b/call/adaptation/resource_adaptation_processor.h index db3b4c2506..97d3e47bc0 100644 --- a/call/adaptation/resource_adaptation_processor.h +++ b/call/adaptation/resource_adaptation_processor.h @@ -12,25 +12,22 @@ #define CALL_ADAPTATION_RESOURCE_ADAPTATION_PROCESSOR_H_ #include -#include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/adaptation/resource.h" -#include "api/rtp_parameters.h" +#include "api/ref_count.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/video/video_adaptation_counters.h" -#include "api/video/video_frame.h" #include "call/adaptation/resource_adaptation_processor_interface.h" #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_adapter.h" -#include "call/adaptation/video_stream_input_state.h" -#include "call/adaptation/video_stream_input_state_provider.h" -#include "video/video_stream_encoder_observer.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -64,27 +61,27 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, ResourceLimitationsListener* limitations_listener) override; void RemoveResourceLimitationsListener( ResourceLimitationsListener* limitations_listener) override; - void AddResource(rtc::scoped_refptr resource) override; - std::vector> GetResources() const override; - void RemoveResource(rtc::scoped_refptr resource) override; + void AddResource(scoped_refptr resource) override; + std::vector> GetResources() const override; + void RemoveResource(scoped_refptr resource) override; // ResourceListener implementation. // Triggers OnResourceUnderuse() or OnResourceOveruse(). - void OnResourceUsageStateMeasured(rtc::scoped_refptr resource, + void OnResourceUsageStateMeasured(scoped_refptr resource, ResourceUsageState usage_state) override; // VideoSourceRestrictionsListener implementation. void OnVideoSourceRestrictionsUpdated( VideoSourceRestrictions restrictions, const VideoAdaptationCounters& adaptation_counters, - rtc::scoped_refptr reason, + scoped_refptr reason, const VideoSourceRestrictions& unfiltered_restrictions) override; private: // If resource usage measurements happens off the adaptation task queue, this // class takes care of posting the measurement for the processor to handle it // on the adaptation task queue. - class ResourceListenerDelegate : public rtc::RefCountInterface, + class ResourceListenerDelegate : public RefCountInterface, public ResourceListener { public: explicit ResourceListenerDelegate(ResourceAdaptationProcessor* processor); @@ -92,7 +89,7 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, void OnProcessorDestroyed(); // ResourceListener implementation. - void OnResourceUsageStateMeasured(rtc::scoped_refptr resource, + void OnResourceUsageStateMeasured(scoped_refptr resource, ResourceUsageState usage_state) override; private: @@ -119,11 +116,11 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, // informing listeners of the new VideoSourceRestriction and adaptation // counters. MitigationResultAndLogMessage OnResourceUnderuse( - rtc::scoped_refptr reason_resource); + scoped_refptr reason_resource); MitigationResultAndLogMessage OnResourceOveruse( - rtc::scoped_refptr reason_resource); + scoped_refptr reason_resource); - void UpdateResourceLimitations(rtc::scoped_refptr reason_resource, + void UpdateResourceLimitations(scoped_refptr reason_resource, const VideoSourceRestrictions& restrictions, const VideoAdaptationCounters& counters) RTC_RUN_ON(task_queue_); @@ -133,23 +130,22 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, // resource performing the adaptation is the only most limited resource. This // function returns the list of all most limited resources as well as the // corresponding adaptation of that resource. - std::pair>, + std::pair>, VideoStreamAdapter::RestrictionsWithCounters> FindMostLimitedResources() const RTC_RUN_ON(task_queue_); - void RemoveLimitationsImposedByResource( - rtc::scoped_refptr resource); + void RemoveLimitationsImposedByResource(scoped_refptr resource); TaskQueueBase* task_queue_; - rtc::scoped_refptr resource_listener_delegate_; + scoped_refptr resource_listener_delegate_; // Input and output. mutable Mutex resources_lock_; - std::vector> resources_ + std::vector> resources_ RTC_GUARDED_BY(resources_lock_); std::vector resource_limitations_listeners_ RTC_GUARDED_BY(task_queue_); // Purely used for statistics, does not ensure mapped resources stay alive. - std::map, + std::map, VideoStreamAdapter::RestrictionsWithCounters> adaptation_limits_by_resources_ RTC_GUARDED_BY(task_queue_); // Responsible for generating and applying possible adaptations. diff --git a/call/adaptation/resource_adaptation_processor_interface.h b/call/adaptation/resource_adaptation_processor_interface.h index 4729488150..a1535f1c27 100644 --- a/call/adaptation/resource_adaptation_processor_interface.h +++ b/call/adaptation/resource_adaptation_processor_interface.h @@ -14,16 +14,9 @@ #include #include -#include "absl/types/optional.h" #include "api/adaptation/resource.h" -#include "api/rtp_parameters.h" #include "api/scoped_refptr.h" -#include "api/task_queue/task_queue_base.h" #include "api/video/video_adaptation_counters.h" -#include "api/video/video_frame.h" -#include "call/adaptation/adaptation_constraint.h" -#include "call/adaptation/encoder_settings.h" -#include "call/adaptation/video_source_restrictions.h" namespace webrtc { @@ -34,8 +27,8 @@ class ResourceLimitationsListener { // The limitations on a resource were changed. This does not mean the current // video restrictions have changed. virtual void OnResourceLimitationChanged( - rtc::scoped_refptr resource, - const std::map, VideoAdaptationCounters>& + scoped_refptr resource, + const std::map, VideoAdaptationCounters>& resource_limitations) = 0; }; @@ -57,9 +50,9 @@ class ResourceAdaptationProcessorInterface { // with AddResource() and RemoveResource() instead. When the processor is // multi-stream aware, stream-specific resouces will get added and removed // over time. - virtual void AddResource(rtc::scoped_refptr resource) = 0; - virtual std::vector> GetResources() const = 0; - virtual void RemoveResource(rtc::scoped_refptr resource) = 0; + virtual void AddResource(scoped_refptr resource) = 0; + virtual std::vector> GetResources() const = 0; + virtual void RemoveResource(scoped_refptr resource) = 0; }; } // namespace webrtc diff --git a/call/adaptation/resource_adaptation_processor_unittest.cc b/call/adaptation/resource_adaptation_processor_unittest.cc index ccccd3fe04..79433c7bb3 100644 --- a/call/adaptation/resource_adaptation_processor_unittest.cc +++ b/call/adaptation/resource_adaptation_processor_unittest.cc @@ -10,25 +10,36 @@ #include "call/adaptation/resource_adaptation_processor.h" +#include +#include + #include "api/adaptation/resource.h" +#include "api/rtp_parameters.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" #include "api/video/video_adaptation_counters.h" -#include "call/adaptation/resource_adaptation_processor_interface.h" #include "call/adaptation/test/fake_frame_rate_provider.h" #include "call/adaptation/test/fake_resource.h" #include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_adapter.h" #include "call/adaptation/video_stream_input_state_provider.h" #include "rtc_base/event.h" -#include "rtc_base/gunit.h" -#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" +#include "test/wait_until.h" namespace webrtc { namespace { +using ::testing::Eq; + const int kDefaultFrameRate = 30; const int kDefaultFrameSize = 1280 * 720; constexpr TimeDelta kDefaultTimeout = TimeDelta::Seconds(5); @@ -55,7 +66,7 @@ class VideoSourceRestrictionsListenerForTesting RTC_DCHECK_RUN_ON(&sequence_checker_); return adaptation_counters_; } - rtc::scoped_refptr reason() const { + scoped_refptr reason() const { RTC_DCHECK_RUN_ON(&sequence_checker_); return reason_; } @@ -64,8 +75,8 @@ class VideoSourceRestrictionsListenerForTesting void OnVideoSourceRestrictionsUpdated( VideoSourceRestrictions restrictions, const VideoAdaptationCounters& adaptation_counters, - rtc::scoped_refptr reason, - const VideoSourceRestrictions& unfiltered_restrictions) override { + scoped_refptr reason, + const VideoSourceRestrictions& /* unfiltered_restrictions */) override { RTC_DCHECK_RUN_ON(&sequence_checker_); ++restrictions_updated_count_; restrictions_ = restrictions; @@ -79,7 +90,7 @@ class VideoSourceRestrictionsListenerForTesting VideoSourceRestrictions restrictions_ RTC_GUARDED_BY(&sequence_checker_); VideoAdaptationCounters adaptation_counters_ RTC_GUARDED_BY(&sequence_checker_); - rtc::scoped_refptr reason_ RTC_GUARDED_BY(&sequence_checker_); + scoped_refptr reason_ RTC_GUARDED_BY(&sequence_checker_); }; class ResourceAdaptationProcessorTest : public ::testing::Test { @@ -131,16 +142,16 @@ class ResourceAdaptationProcessorTest : public ::testing::Test { } static void WaitUntilTaskQueueIdle() { - ASSERT_TRUE(rtc::Thread::Current()->ProcessMessages(0)); + ASSERT_TRUE(Thread::Current()->ProcessMessages(0)); } protected: - rtc::AutoThread main_thread_; + AutoThread main_thread_; webrtc::test::ScopedKeyValueConfig field_trials_; FakeFrameRateProvider frame_rate_provider_; VideoStreamInputStateProvider input_state_provider_; - rtc::scoped_refptr resource_; - rtc::scoped_refptr other_resource_; + scoped_refptr resource_; + scoped_refptr other_resource_; std::unique_ptr video_stream_adapter_; std::unique_ptr processor_; VideoSourceRestrictionsListenerForTesting restrictions_listener_; @@ -433,8 +444,11 @@ TEST_F(ResourceAdaptationProcessorTest, resource_task_queue.PostTask( [&]() { resource_->SetUsageState(ResourceUsageState::kOveruse); }); - EXPECT_EQ_WAIT(1u, restrictions_listener_.restrictions_updated_count(), - kDefaultTimeout.ms()); + EXPECT_THAT( + WaitUntil( + [&] { return restrictions_listener_.restrictions_updated_count(); }, + Eq(1u)), + IsRtcOk()); } TEST_F(ResourceAdaptationProcessorTest, @@ -445,7 +459,7 @@ TEST_F(ResourceAdaptationProcessorTest, // Wait for `resource_` to signal oversue first so we know that the delegate // has passed it on to the processor's task queue. - rtc::Event resource_event; + Event resource_event; TaskQueueForTest resource_task_queue("ResourceTaskQueue"); resource_task_queue.PostTask([&]() { resource_->SetUsageState(ResourceUsageState::kOveruse); @@ -467,7 +481,7 @@ TEST_F(ResourceAdaptationProcessorTest, DegradationPreference::MAINTAIN_FRAMERATE); SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - rtc::Event overuse_event; + Event overuse_event; TaskQueueForTest resource_task_queue("ResourceTaskQueue"); // Queues task for `resource_` overuse while `processor_` is still listening. resource_task_queue.PostTask([&]() { diff --git a/call/adaptation/resource_unittest.cc b/call/adaptation/resource_unittest.cc index a2291dfdce..d3b7622daa 100644 --- a/call/adaptation/resource_unittest.cc +++ b/call/adaptation/resource_unittest.cc @@ -10,7 +10,6 @@ #include "api/adaptation/resource.h" -#include #include "api/scoped_refptr.h" #include "call/adaptation/test/fake_resource.h" @@ -28,7 +27,7 @@ class ResourceTest : public ::testing::Test { ResourceTest() : fake_resource_(FakeResource::Create("FakeResource")) {} protected: - rtc::scoped_refptr fake_resource_; + scoped_refptr fake_resource_; }; TEST_F(ResourceTest, RegisteringListenerReceivesCallbacks) { @@ -36,7 +35,7 @@ TEST_F(ResourceTest, RegisteringListenerReceivesCallbacks) { fake_resource_->SetResourceListener(&resource_listener); EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_, _)) .Times(1) - .WillOnce([](rtc::scoped_refptr resource, + .WillOnce([](scoped_refptr /* resource */, ResourceUsageState usage_state) { EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); }); diff --git a/call/adaptation/test/fake_adaptation_constraint.cc b/call/adaptation/test/fake_adaptation_constraint.cc index dbb31f0d3b..acd29e9583 100644 --- a/call/adaptation/test/fake_adaptation_constraint.cc +++ b/call/adaptation/test/fake_adaptation_constraint.cc @@ -10,9 +10,11 @@ #include "call/adaptation/test/fake_adaptation_constraint.h" -#include +#include #include "absl/strings/string_view.h" +#include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_input_state.h" namespace webrtc { @@ -31,9 +33,9 @@ std::string FakeAdaptationConstraint::Name() const { } bool FakeAdaptationConstraint::IsAdaptationUpAllowed( - const VideoStreamInputState& input_state, - const VideoSourceRestrictions& restrictions_before, - const VideoSourceRestrictions& restrictions_after) const { + const VideoStreamInputState& /* input_state */, + const VideoSourceRestrictions& /* restrictions_before */, + const VideoSourceRestrictions& /* restrictions_after */) const { return is_adaptation_up_allowed_; } diff --git a/call/adaptation/test/fake_adaptation_constraint.h b/call/adaptation/test/fake_adaptation_constraint.h index 5c684335f2..31c9110ea9 100644 --- a/call/adaptation/test/fake_adaptation_constraint.h +++ b/call/adaptation/test/fake_adaptation_constraint.h @@ -15,6 +15,8 @@ #include "absl/strings/string_view.h" #include "call/adaptation/adaptation_constraint.h" +#include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_input_state.h" namespace webrtc { diff --git a/call/adaptation/test/fake_frame_rate_provider.h b/call/adaptation/test/fake_frame_rate_provider.h index b8815f592a..10ec9f016e 100644 --- a/call/adaptation/test/fake_frame_rate_provider.h +++ b/call/adaptation/test/fake_frame_rate_provider.h @@ -11,10 +11,15 @@ #ifndef CALL_ADAPTATION_TEST_FAKE_FRAME_RATE_PROVIDER_H_ #define CALL_ADAPTATION_TEST_FAKE_FRAME_RATE_PROVIDER_H_ -#include #include +#include "api/video/video_adaptation_counters.h" +#include "api/video/video_adaptation_reason.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" #include "test/gmock.h" +#include "video/config/video_encoder_config.h" #include "video/video_stream_encoder_observer.h" namespace webrtc { diff --git a/call/adaptation/test/fake_resource.cc b/call/adaptation/test/fake_resource.cc index 48b4768550..e8c87089ef 100644 --- a/call/adaptation/test/fake_resource.cc +++ b/call/adaptation/test/fake_resource.cc @@ -10,17 +10,18 @@ #include "call/adaptation/test/fake_resource.h" -#include -#include +#include #include "absl/strings/string_view.h" +#include "api/adaptation/resource.h" #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" namespace webrtc { // static -rtc::scoped_refptr FakeResource::Create(absl::string_view name) { - return rtc::make_ref_counted(name); +scoped_refptr FakeResource::Create(absl::string_view name) { + return make_ref_counted(name); } FakeResource::FakeResource(absl::string_view name) @@ -30,7 +31,7 @@ FakeResource::~FakeResource() {} void FakeResource::SetUsageState(ResourceUsageState usage_state) { if (listener_) { - listener_->OnResourceUsageStateMeasured(rtc::scoped_refptr(this), + listener_->OnResourceUsageStateMeasured(scoped_refptr(this), usage_state); } } diff --git a/call/adaptation/test/fake_resource.h b/call/adaptation/test/fake_resource.h index 1119a9614f..f5b26b2e27 100644 --- a/call/adaptation/test/fake_resource.h +++ b/call/adaptation/test/fake_resource.h @@ -12,10 +12,8 @@ #define CALL_ADAPTATION_TEST_FAKE_RESOURCE_H_ #include -#include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/adaptation/resource.h" #include "api/scoped_refptr.h" @@ -24,7 +22,7 @@ namespace webrtc { // Fake resource used for testing. class FakeResource : public Resource { public: - static rtc::scoped_refptr Create(absl::string_view name); + static scoped_refptr Create(absl::string_view name); explicit FakeResource(absl::string_view name); ~FakeResource() override; diff --git a/call/adaptation/test/fake_video_stream_input_state_provider.cc b/call/adaptation/test/fake_video_stream_input_state_provider.cc index ce92dfb204..5e695835b7 100644 --- a/call/adaptation/test/fake_video_stream_input_state_provider.cc +++ b/call/adaptation/test/fake_video_stream_input_state_provider.cc @@ -10,6 +10,9 @@ #include "call/adaptation/test/fake_video_stream_input_state_provider.h" +#include "call/adaptation/video_stream_input_state.h" +#include "call/adaptation/video_stream_input_state_provider.h" + namespace webrtc { FakeVideoStreamInputStateProvider::FakeVideoStreamInputStateProvider() diff --git a/call/adaptation/test/fake_video_stream_input_state_provider.h b/call/adaptation/test/fake_video_stream_input_state_provider.h index 93f7dba7e6..dd207b8283 100644 --- a/call/adaptation/test/fake_video_stream_input_state_provider.h +++ b/call/adaptation/test/fake_video_stream_input_state_provider.h @@ -11,6 +11,7 @@ #ifndef CALL_ADAPTATION_TEST_FAKE_VIDEO_STREAM_INPUT_STATE_PROVIDER_H_ #define CALL_ADAPTATION_TEST_FAKE_VIDEO_STREAM_INPUT_STATE_PROVIDER_H_ +#include "call/adaptation/video_stream_input_state.h" #include "call/adaptation/video_stream_input_state_provider.h" namespace webrtc { diff --git a/call/adaptation/test/mock_resource_listener.h b/call/adaptation/test/mock_resource_listener.h index 1c4df31a13..74d8d05c61 100644 --- a/call/adaptation/test/mock_resource_listener.h +++ b/call/adaptation/test/mock_resource_listener.h @@ -12,6 +12,7 @@ #define CALL_ADAPTATION_TEST_MOCK_RESOURCE_LISTENER_H_ #include "api/adaptation/resource.h" +#include "api/scoped_refptr.h" #include "test/gmock.h" namespace webrtc { @@ -20,7 +21,7 @@ class MockResourceListener : public ResourceListener { public: MOCK_METHOD(void, OnResourceUsageStateMeasured, - (rtc::scoped_refptr resource, + (scoped_refptr resource, ResourceUsageState usage_state), (override)); }; diff --git a/call/adaptation/video_source_restrictions.cc b/call/adaptation/video_source_restrictions.cc index 719bc53278..92dfa24336 100644 --- a/call/adaptation/video_source_restrictions.cc +++ b/call/adaptation/video_source_restrictions.cc @@ -11,7 +11,11 @@ #include "call/adaptation/video_source_restrictions.h" #include +#include #include +#include +#include +#include #include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" @@ -19,14 +23,14 @@ namespace webrtc { VideoSourceRestrictions::VideoSourceRestrictions() - : max_pixels_per_frame_(absl::nullopt), - target_pixels_per_frame_(absl::nullopt), - max_frame_rate_(absl::nullopt) {} + : max_pixels_per_frame_(std::nullopt), + target_pixels_per_frame_(std::nullopt), + max_frame_rate_(std::nullopt) {} VideoSourceRestrictions::VideoSourceRestrictions( - absl::optional max_pixels_per_frame, - absl::optional target_pixels_per_frame, - absl::optional max_frame_rate) + std::optional max_pixels_per_frame, + std::optional target_pixels_per_frame, + std::optional max_frame_rate) : max_pixels_per_frame_(std::move(max_pixels_per_frame)), target_pixels_per_frame_(std::move(target_pixels_per_frame)), max_frame_rate_(std::move(max_frame_rate)) { @@ -39,7 +43,7 @@ VideoSourceRestrictions::VideoSourceRestrictions( } std::string VideoSourceRestrictions::ToString() const { - rtc::StringBuilder ss; + StringBuilder ss; ss << "{"; if (max_frame_rate_) ss << " max_fps=" << max_frame_rate_.value(); @@ -51,32 +55,32 @@ std::string VideoSourceRestrictions::ToString() const { return ss.Release(); } -const absl::optional& VideoSourceRestrictions::max_pixels_per_frame() +const std::optional& VideoSourceRestrictions::max_pixels_per_frame() const { return max_pixels_per_frame_; } -const absl::optional& VideoSourceRestrictions::target_pixels_per_frame() +const std::optional& VideoSourceRestrictions::target_pixels_per_frame() const { return target_pixels_per_frame_; } -const absl::optional& VideoSourceRestrictions::max_frame_rate() const { +const std::optional& VideoSourceRestrictions::max_frame_rate() const { return max_frame_rate_; } void VideoSourceRestrictions::set_max_pixels_per_frame( - absl::optional max_pixels_per_frame) { + std::optional max_pixels_per_frame) { max_pixels_per_frame_ = std::move(max_pixels_per_frame); } void VideoSourceRestrictions::set_target_pixels_per_frame( - absl::optional target_pixels_per_frame) { + std::optional target_pixels_per_frame) { target_pixels_per_frame_ = std::move(target_pixels_per_frame); } void VideoSourceRestrictions::set_max_frame_rate( - absl::optional max_frame_rate) { + std::optional max_frame_rate) { max_frame_rate_ = std::move(max_frame_rate); } diff --git a/call/adaptation/video_source_restrictions.h b/call/adaptation/video_source_restrictions.h index be8520a385..d32397aad6 100644 --- a/call/adaptation/video_source_restrictions.h +++ b/call/adaptation/video_source_restrictions.h @@ -11,10 +11,9 @@ #ifndef CALL_ADAPTATION_VIDEO_SOURCE_RESTRICTIONS_H_ #define CALL_ADAPTATION_VIDEO_SOURCE_RESTRICTIONS_H_ +#include +#include #include -#include - -#include "absl/types/optional.h" namespace webrtc { @@ -26,9 +25,9 @@ class VideoSourceRestrictions { VideoSourceRestrictions(); // All values must be positive or nullopt. // TODO(hbos): Support expressing "disable this stream"? - VideoSourceRestrictions(absl::optional max_pixels_per_frame, - absl::optional target_pixels_per_frame, - absl::optional max_frame_rate); + VideoSourceRestrictions(std::optional max_pixels_per_frame, + std::optional target_pixels_per_frame, + std::optional max_frame_rate); bool operator==(const VideoSourceRestrictions& rhs) const { return max_pixels_per_frame_ == rhs.max_pixels_per_frame_ && @@ -43,7 +42,7 @@ class VideoSourceRestrictions { // The source must produce a resolution less than or equal to // max_pixels_per_frame(). - const absl::optional& max_pixels_per_frame() const; + const std::optional& max_pixels_per_frame() const; // The source should produce a resolution as close to the // target_pixels_per_frame() as possible, provided this does not exceed // max_pixels_per_frame(). @@ -52,23 +51,23 @@ class VideoSourceRestrictions { // the camera in the smallest resolution that is greater than or equal to the // target and scale it down to the target if it is greater. Is this an // accurate description of what this does today, or do we do something else? - const absl::optional& target_pixels_per_frame() const; - const absl::optional& max_frame_rate() const; + const std::optional& target_pixels_per_frame() const; + const std::optional& max_frame_rate() const; - void set_max_pixels_per_frame(absl::optional max_pixels_per_frame); + void set_max_pixels_per_frame(std::optional max_pixels_per_frame); void set_target_pixels_per_frame( - absl::optional target_pixels_per_frame); - void set_max_frame_rate(absl::optional max_frame_rate); + std::optional target_pixels_per_frame); + void set_max_frame_rate(std::optional max_frame_rate); // Update `this` with min(`this`, `other`). void UpdateMin(const VideoSourceRestrictions& other); private: - // These map to rtc::VideoSinkWants's `max_pixel_count` and + // These map to VideoSinkWants's `max_pixel_count` and // `target_pixel_count`. - absl::optional max_pixels_per_frame_; - absl::optional target_pixels_per_frame_; - absl::optional max_frame_rate_; + std::optional max_pixels_per_frame_; + std::optional target_pixels_per_frame_; + std::optional max_frame_rate_; }; bool DidRestrictionsIncrease(VideoSourceRestrictions before, diff --git a/call/adaptation/video_source_restrictions_unittest.cc b/call/adaptation/video_source_restrictions_unittest.cc index 8c1ae4c896..aac76547f1 100644 --- a/call/adaptation/video_source_restrictions_unittest.cc +++ b/call/adaptation/video_source_restrictions_unittest.cc @@ -10,8 +10,10 @@ #include "call/adaptation/video_source_restrictions.h" -#include "test/gtest.h" +#include +#include +#include "test/gtest.h" namespace webrtc { namespace { @@ -19,19 +21,19 @@ namespace { const size_t kHdPixels = 1280 * 720; const VideoSourceRestrictions kUnlimited; -const VideoSourceRestrictions k15fps(absl::nullopt, absl::nullopt, 15.0); -const VideoSourceRestrictions kHd(kHdPixels, kHdPixels, absl::nullopt); +const VideoSourceRestrictions k15fps(std::nullopt, std::nullopt, 15.0); +const VideoSourceRestrictions kHd(kHdPixels, kHdPixels, std::nullopt); const VideoSourceRestrictions kHd15fps(kHdPixels, kHdPixels, 15.0); const VideoSourceRestrictions kVga7fps(kHdPixels / 2, kHdPixels / 2, 7.0); VideoSourceRestrictions RestrictionsFromMaxPixelsPerFrame( size_t max_pixels_per_frame) { - return VideoSourceRestrictions(max_pixels_per_frame, absl::nullopt, - absl::nullopt); + return VideoSourceRestrictions(max_pixels_per_frame, std::nullopt, + std::nullopt); } VideoSourceRestrictions RestrictionsFromMaxFrameRate(double max_frame_rate) { - return VideoSourceRestrictions(absl::nullopt, absl::nullopt, max_frame_rate); + return VideoSourceRestrictions(std::nullopt, std::nullopt, max_frame_rate); } } // namespace diff --git a/call/adaptation/video_stream_adapter.cc b/call/adaptation/video_stream_adapter.cc index 5a970fb2ef..5863931dac 100644 --- a/call/adaptation/video_stream_adapter.cc +++ b/call/adaptation/video_stream_adapter.cc @@ -11,21 +11,30 @@ #include "call/adaptation/video_stream_adapter.h" #include +#include +#include #include +#include #include +#include -#include "absl/types/optional.h" -#include "absl/types/variant.h" +#include "api/adaptation/resource.h" +#include "api/field_trials_view.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/video/video_adaptation_counters.h" -#include "api/video/video_adaptation_reason.h" -#include "api/video_codecs/video_encoder.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/video_codec.h" +#include "call/adaptation/adaptation_constraint.h" #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_input_state.h" +#include "call/adaptation/video_stream_input_state_provider.h" #include "modules/video_coding/svc/scalability_mode_util.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" +#include "video/video_stream_encoder_observer.h" namespace webrtc { @@ -38,7 +47,7 @@ int GetLowerFrameRateThan(int fps) { RTC_DCHECK(fps != std::numeric_limits::max()); return (fps * 2) / 3; } -// TODO(hbos): Use absl::optional<> instead? +// TODO(hbos): Use std::optional<> instead? int GetHigherFrameRateThan(int fps) { return fps != std::numeric_limits::max() ? (fps * 3) / 2 @@ -66,7 +75,7 @@ bool CanDecreaseResolutionTo(int target_pixels, const VideoStreamInputState& input_state, const VideoSourceRestrictions& restrictions) { int max_pixels_per_frame = - rtc::dchecked_cast(restrictions.max_pixels_per_frame().value_or( + dchecked_cast(restrictions.max_pixels_per_frame().value_or( std::numeric_limits::max())); return target_pixels < max_pixels_per_frame && target_pixels_min >= input_state.min_pixels_per_frame(); @@ -76,7 +85,7 @@ bool CanIncreaseResolutionTo(int target_pixels, const VideoSourceRestrictions& restrictions) { int max_pixels_wanted = GetIncreasedMaxPixelsWanted(target_pixels); int max_pixels_per_frame = - rtc::dchecked_cast(restrictions.max_pixels_per_frame().value_or( + dchecked_cast(restrictions.max_pixels_per_frame().value_or( std::numeric_limits::max())); return max_pixels_wanted > max_pixels_per_frame; } @@ -84,15 +93,14 @@ bool CanIncreaseResolutionTo(int target_pixels, bool CanDecreaseFrameRateTo(int max_frame_rate, const VideoSourceRestrictions& restrictions) { const int fps_wanted = std::max(kMinFrameRateFps, max_frame_rate); - return fps_wanted < - rtc::dchecked_cast(restrictions.max_frame_rate().value_or( - std::numeric_limits::max())); + return fps_wanted < dchecked_cast(restrictions.max_frame_rate().value_or( + std::numeric_limits::max())); } bool CanIncreaseFrameRateTo(int max_frame_rate, const VideoSourceRestrictions& restrictions) { return max_frame_rate > - rtc::dchecked_cast(restrictions.max_frame_rate().value_or( + dchecked_cast(restrictions.max_frame_rate().value_or( std::numeric_limits::max())); } @@ -118,16 +126,16 @@ VideoSourceRestrictions FilterRestrictionsByDegradationPreference( case DegradationPreference::BALANCED: break; case DegradationPreference::MAINTAIN_FRAMERATE: - source_restrictions.set_max_frame_rate(absl::nullopt); + source_restrictions.set_max_frame_rate(std::nullopt); break; case DegradationPreference::MAINTAIN_RESOLUTION: - source_restrictions.set_max_pixels_per_frame(absl::nullopt); - source_restrictions.set_target_pixels_per_frame(absl::nullopt); + source_restrictions.set_max_pixels_per_frame(std::nullopt); + source_restrictions.set_target_pixels_per_frame(std::nullopt); break; case DegradationPreference::DISABLED: - source_restrictions.set_max_pixels_per_frame(absl::nullopt); - source_restrictions.set_target_pixels_per_frame(absl::nullopt); - source_restrictions.set_max_frame_rate(absl::nullopt); + source_restrictions.set_max_pixels_per_frame(std::nullopt); + source_restrictions.set_target_pixels_per_frame(std::nullopt); + source_restrictions.set_max_frame_rate(std::nullopt); } return source_restrictions; } @@ -145,7 +153,7 @@ int GetLowerResolutionThan(int pixel_count) { return (pixel_count * 3) / 5; } -// TODO(hbos): Use absl::optional<> instead? +// TODO(hbos): Use std::optional<> instead? int GetHigherResolutionThan(int pixel_count) { return pixel_count != std::numeric_limits::max() ? (pixel_count * 5) / 3 @@ -211,7 +219,7 @@ VideoStreamAdapter::VideoStreamAdapter( balanced_settings_(field_trials), adaptation_validation_id_(0), degradation_preference_(DegradationPreference::DISABLED), - awaiting_frame_size_change_(absl::nullopt) { + awaiting_frame_size_change_(std::nullopt) { sequence_checker_.Detach(); RTC_DCHECK(input_state_provider_); RTC_DCHECK(encoder_stats_observer_); @@ -240,7 +248,7 @@ void VideoStreamAdapter::ClearRestrictions() { ++adaptation_validation_id_; current_restrictions_ = {VideoSourceRestrictions(), VideoAdaptationCounters()}; - awaiting_frame_size_change_ = absl::nullopt; + awaiting_frame_size_change_ = std::nullopt; BroadcastVideoRestrictionsUpdate(input_state_provider_->InputState(), nullptr); } @@ -319,7 +327,7 @@ Adaptation VideoStreamAdapter::RestrictionsOrStateToAdaptation( VideoStreamAdapter::RestrictionsOrState step_or_state, const VideoStreamInputState& input_state) const { RTC_DCHECK(!step_or_state.valueless_by_exception()); - return absl::visit( + return std::visit( RestrictionsOrStateVisitor{adaptation_validation_id_, input_state}, step_or_state); } @@ -328,9 +336,9 @@ Adaptation VideoStreamAdapter::GetAdaptationUp( const VideoStreamInputState& input_state) const { RestrictionsOrState step = GetAdaptationUpStep(input_state); // If an adaptation proposed, check with the constraints that it is ok. - if (absl::holds_alternative(step)) { + if (std::holds_alternative(step)) { RestrictionsWithCounters restrictions = - absl::get(step); + std::get(step); for (const auto* constraint : adaptation_constraints_) { if (!constraint->IsAdaptationUpAllowed(input_state, current_restrictions_.restrictions, @@ -372,7 +380,7 @@ VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::GetAdaptationUpStep( // Attempt to increase target frame rate. RestrictionsOrState increase_frame_rate = IncreaseFramerate(input_state, current_restrictions_); - if (absl::holds_alternative( + if (std::holds_alternative( increase_frame_rate)) { return increase_frame_rate; } @@ -404,11 +412,9 @@ Adaptation VideoStreamAdapter::GetAdaptationDown() { } // Check for min_fps if (degradation_preference_ == DegradationPreference::BALANCED && - absl::holds_alternative( - restrictions_or_state)) { + std::holds_alternative(restrictions_or_state)) { restrictions_or_state = AdaptIfFpsDiffInsufficient( - input_state, - absl::get(restrictions_or_state)); + input_state, std::get(restrictions_or_state)); } return RestrictionsOrStateToAdaptation(restrictions_or_state, input_state); } @@ -420,7 +426,7 @@ VideoStreamAdapter::AdaptIfFpsDiffInsufficient( RTC_DCHECK_EQ(degradation_preference_, DegradationPreference::BALANCED); int frame_size_pixels = input_state.single_active_stream_pixels().value_or( input_state.frame_size_pixels().value()); - absl::optional min_fps_diff = + std::optional min_fps_diff = balanced_settings_.MinFpsDiff(frame_size_pixels); if (current_restrictions_.counters.fps_adaptations < restrictions.counters.fps_adaptations && @@ -456,7 +462,7 @@ VideoStreamAdapter::GetAdaptationDownStep( // Try scale down framerate, if lower. RestrictionsOrState decrease_frame_rate = DecreaseFramerate(input_state, current_restrictions); - if (absl::holds_alternative( + if (std::holds_alternative( decrease_frame_rate)) { return decrease_frame_rate; } @@ -492,9 +498,9 @@ VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::DecreaseResolution( RTC_LOG(LS_INFO) << "Scaling down resolution, max pixels: " << target_pixels; new_restrictions.restrictions.set_max_pixels_per_frame( target_pixels != std::numeric_limits::max() - ? absl::optional(target_pixels) - : absl::nullopt); - new_restrictions.restrictions.set_target_pixels_per_frame(absl::nullopt); + ? std::optional(target_pixels) + : std::nullopt); + new_restrictions.restrictions.set_target_pixels_per_frame(std::nullopt); ++new_restrictions.counters.resolution_adaptations; return new_restrictions; } @@ -523,8 +529,8 @@ VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::DecreaseFramerate( RTC_LOG(LS_INFO) << "Scaling down framerate: " << max_frame_rate; new_restrictions.restrictions.set_max_frame_rate( max_frame_rate != std::numeric_limits::max() - ? absl::optional(max_frame_rate) - : absl::nullopt); + ? std::optional(max_frame_rate) + : std::nullopt); ++new_restrictions.counters.fps_adaptations; return new_restrictions; } @@ -548,12 +554,12 @@ VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::IncreaseResolution( << max_pixels_wanted; new_restrictions.restrictions.set_max_pixels_per_frame( max_pixels_wanted != std::numeric_limits::max() - ? absl::optional(max_pixels_wanted) - : absl::nullopt); + ? std::optional(max_pixels_wanted) + : std::nullopt); new_restrictions.restrictions.set_target_pixels_per_frame( max_pixels_wanted != std::numeric_limits::max() - ? absl::optional(target_pixels) - : absl::nullopt); + ? std::optional(target_pixels) + : std::nullopt); --new_restrictions.counters.resolution_adaptations; RTC_DCHECK_GE(new_restrictions.counters.resolution_adaptations, 0); return new_restrictions; @@ -601,8 +607,8 @@ VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::IncreaseFramerate( RestrictionsWithCounters new_restrictions = current_restrictions; new_restrictions.restrictions.set_max_frame_rate( max_frame_rate != std::numeric_limits::max() - ? absl::optional(max_frame_rate) - : absl::nullopt); + ? std::optional(max_frame_rate) + : std::nullopt); --new_restrictions.counters.fps_adaptations; RTC_DCHECK_GE(new_restrictions.counters.fps_adaptations, 0); return new_restrictions; @@ -633,10 +639,10 @@ VideoStreamAdapter::GetAdaptDownResolutionStepForBalanced( const VideoStreamInputState& input_state) const { // Adapt twice if the first adaptation did not decrease resolution. auto first_step = GetAdaptationDownStep(input_state, current_restrictions_); - if (!absl::holds_alternative(first_step)) { + if (!std::holds_alternative(first_step)) { return first_step; } - auto first_restrictions = absl::get(first_step); + auto first_restrictions = std::get(first_step); if (first_restrictions.counters.resolution_adaptations > current_restrictions_.counters.resolution_adaptations) { return first_step; @@ -644,16 +650,15 @@ VideoStreamAdapter::GetAdaptDownResolutionStepForBalanced( // We didn't decrease resolution so force it; amend a resolution resuction // to the existing framerate reduction in `first_restrictions`. auto second_step = DecreaseResolution(input_state, first_restrictions); - if (absl::holds_alternative(second_step)) { + if (std::holds_alternative(second_step)) { return second_step; } // If the second step was not successful then settle for the first one. return first_step; } -void VideoStreamAdapter::ApplyAdaptation( - const Adaptation& adaptation, - rtc::scoped_refptr resource) { +void VideoStreamAdapter::ApplyAdaptation(const Adaptation& adaptation, + scoped_refptr resource) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK_EQ(adaptation.validation_id_, adaptation_validation_id_); if (adaptation.status() != Adaptation::Status::kValid) @@ -669,7 +674,7 @@ void VideoStreamAdapter::ApplyAdaptation( awaiting_frame_size_change_.emplace( false, adaptation.input_state().frame_size_pixels().value()); } else { - awaiting_frame_size_change_ = absl::nullopt; + awaiting_frame_size_change_ = std::nullopt; } current_restrictions_ = {adaptation.restrictions(), adaptation.counters()}; BroadcastVideoRestrictionsUpdate(adaptation.input_state(), resource); @@ -686,8 +691,8 @@ Adaptation VideoStreamAdapter::GetAdaptationTo( } void VideoStreamAdapter::BroadcastVideoRestrictionsUpdate( - const VideoStreamInputState& input_state, - const rtc::scoped_refptr& resource) { + const VideoStreamInputState& /* input_state */, + const scoped_refptr& resource) { RTC_DCHECK_RUN_ON(&sequence_checker_); VideoSourceRestrictions filtered = FilterRestrictionsByDegradationPreference( source_restrictions(), degradation_preference_); @@ -717,10 +722,10 @@ VideoStreamAdapter::AwaitingFrameSizeChange::AwaitingFrameSizeChange( : pixels_increased(pixels_increased), frame_size_pixels(frame_size_pixels) {} -absl::optional VideoStreamAdapter::GetSingleActiveLayerPixels( +std::optional VideoStreamAdapter::GetSingleActiveLayerPixels( const VideoCodec& codec) { int num_active = 0; - absl::optional pixels; + std::optional pixels; if (codec.codecType == VideoCodecType::kVideoCodecAV1 && codec.GetScalabilityMode().has_value()) { for (int i = 0; @@ -747,7 +752,7 @@ absl::optional VideoStreamAdapter::GetSingleActiveLayerPixels( } } } - return (num_active > 1) ? absl::nullopt : pixels; + return (num_active > 1) ? std::nullopt : pixels; } } // namespace webrtc diff --git a/call/adaptation/video_stream_adapter.h b/call/adaptation/video_stream_adapter.h index 5c174178e4..285655a168 100644 --- a/call/adaptation/video_stream_adapter.h +++ b/call/adaptation/video_stream_adapter.h @@ -11,22 +11,22 @@ #ifndef CALL_ADAPTATION_VIDEO_STREAM_ADAPTER_H_ #define CALL_ADAPTATION_VIDEO_STREAM_ADAPTER_H_ -#include -#include +#include +#include +#include #include -#include "absl/types/optional.h" -#include "absl/types/variant.h" #include "api/adaptation/resource.h" #include "api/field_trials_view.h" #include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/video/video_adaptation_counters.h" +#include "api/video_codecs/video_codec.h" #include "call/adaptation/adaptation_constraint.h" -#include "call/adaptation/degradation_preference_provider.h" #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_input_state.h" #include "call/adaptation/video_stream_input_state_provider.h" -#include "modules/video_coding/utility/quality_scaler.h" #include "rtc_base/experiments/balanced_degradation_settings.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" @@ -46,7 +46,7 @@ class VideoSourceRestrictionsListener { virtual void OnVideoSourceRestrictionsUpdated( VideoSourceRestrictions restrictions, const VideoAdaptationCounters& adaptation_counters, - rtc::scoped_refptr reason, + scoped_refptr reason, const VideoSourceRestrictions& unfiltered_restrictions) = 0; }; @@ -158,26 +158,26 @@ class VideoStreamAdapter { // Updates source_restrictions() the Adaptation. void ApplyAdaptation(const Adaptation& adaptation, - rtc::scoped_refptr resource); + scoped_refptr resource); struct RestrictionsWithCounters { VideoSourceRestrictions restrictions; VideoAdaptationCounters counters; }; - static absl::optional GetSingleActiveLayerPixels( + static std::optional GetSingleActiveLayerPixels( const VideoCodec& codec); private: void BroadcastVideoRestrictionsUpdate( const VideoStreamInputState& input_state, - const rtc::scoped_refptr& resource); + const scoped_refptr& resource); bool HasSufficientInputForAdaptation(const VideoStreamInputState& input_state) const RTC_RUN_ON(&sequence_checker_); using RestrictionsOrState = - absl::variant; + std::variant; RestrictionsOrState GetAdaptationUpStep( const VideoStreamInputState& input_state) const RTC_RUN_ON(&sequence_checker_); @@ -221,8 +221,7 @@ class VideoStreamAdapter { const VideoStreamInputState& input_state) const RTC_RUN_ON(&sequence_checker_); - RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_ - RTC_GUARDED_BY(&sequence_checker_); + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; // Gets the input state which is the basis of all adaptations. // Thread safe. VideoStreamInputStateProvider* input_state_provider_; @@ -249,7 +248,7 @@ class VideoStreamAdapter { const bool pixels_increased; const int frame_size_pixels; }; - absl::optional awaiting_frame_size_change_ + std::optional awaiting_frame_size_change_ RTC_GUARDED_BY(&sequence_checker_); // The previous restrictions value. Starts as unrestricted. VideoSourceRestrictions last_video_source_restrictions_ diff --git a/call/adaptation/video_stream_adapter_unittest.cc b/call/adaptation/video_stream_adapter_unittest.cc index d4bc650856..83720b4318 100644 --- a/call/adaptation/video_stream_adapter_unittest.cc +++ b/call/adaptation/video_stream_adapter_unittest.cc @@ -10,34 +10,31 @@ #include "call/adaptation/video_stream_adapter.h" +#include +#include #include -#include -#include "absl/types/optional.h" +#include "api/adaptation/resource.h" +#include "api/rtp_parameters.h" #include "api/scoped_refptr.h" -#include "api/video/video_adaptation_reason.h" -#include "api/video_codecs/video_codec.h" +#include "api/video/video_adaptation_counters.h" #include "api/video_codecs/video_encoder.h" #include "call/adaptation/adaptation_constraint.h" -#include "call/adaptation/encoder_settings.h" #include "call/adaptation/test/fake_frame_rate_provider.h" #include "call/adaptation/test/fake_resource.h" #include "call/adaptation/test/fake_video_stream_input_state_provider.h" #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_input_state.h" +#include "rtc_base/checks.h" #include "rtc_base/string_encode.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" -#include "test/testsupport/rtc_expect_death.h" -#include "video/config/video_encoder_config.h" namespace webrtc { using ::testing::_; -using ::testing::DoAll; using ::testing::Return; -using ::testing::SaveArg; namespace { @@ -52,12 +49,12 @@ const int kBalancedLowFrameRateFps = 10; std::string BalancedFieldTrialConfig() { return "WebRTC-Video-BalancedDegradationSettings/pixels:" + - rtc::ToString(kBalancedLowResolutionPixels) + "|" + - rtc::ToString(kBalancedMediumResolutionPixels) + "|" + - rtc::ToString(kBalancedHighResolutionPixels) + - ",fps:" + rtc::ToString(kBalancedLowFrameRateFps) + "|" + - rtc::ToString(kBalancedMediumFrameRateFps) + "|" + - rtc::ToString(kBalancedHighFrameRateFps) + "/"; + absl::StrCat(kBalancedLowResolutionPixels) + "|" + + absl::StrCat(kBalancedMediumResolutionPixels) + "|" + + absl::StrCat(kBalancedHighResolutionPixels) + + ",fps:" + absl::StrCat(kBalancedLowFrameRateFps) + "|" + + absl::StrCat(kBalancedMediumFrameRateFps) + "|" + + absl::StrCat(kBalancedHighFrameRateFps) + "/"; } // Responsible for adjusting the inputs to VideoStreamAdapter (SetInput), such @@ -114,9 +111,9 @@ class FakeVideoStream { class FakeVideoStreamAdapterListner : public VideoSourceRestrictionsListener { public: void OnVideoSourceRestrictionsUpdated( - VideoSourceRestrictions restrictions, - const VideoAdaptationCounters& adaptation_counters, - rtc::scoped_refptr reason, + VideoSourceRestrictions /* restrictions */, + const VideoAdaptationCounters& /* adaptation_counters */, + scoped_refptr /* reason */, const VideoSourceRestrictions& unfiltered_restrictions) override { calls_++; last_restrictions_ = unfiltered_restrictions; @@ -160,7 +157,7 @@ class VideoStreamAdapterTest : public ::testing::Test { protected: webrtc::test::ScopedKeyValueConfig field_trials_; FakeVideoStreamInputStateProvider input_state_provider_; - rtc::scoped_refptr resource_; + scoped_refptr resource_; testing::StrictMock encoder_stats_observer_; VideoStreamAdapter adapter_; }; @@ -180,9 +177,9 @@ TEST_F(VideoStreamAdapterTest, MaintainFramerate_DecreasesPixelsToThreeFifths) { adapter_.ApplyAdaptation(adaptation, nullptr); EXPECT_EQ(static_cast((kInputPixels * 3) / 5), adapter_.source_restrictions().max_pixels_per_frame()); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, adapter_.source_restrictions().target_pixels_per_frame()); - EXPECT_EQ(absl::nullopt, adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(std::nullopt, adapter_.source_restrictions().max_frame_rate()); EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); } @@ -219,7 +216,7 @@ TEST_F(VideoStreamAdapterTest, MaintainFramerate_IncreasePixelsToFiveThirds) { adapter_.source_restrictions().max_pixels_per_frame()); EXPECT_EQ(static_cast(target), adapter_.source_restrictions().target_pixels_per_frame()); - EXPECT_EQ(absl::nullopt, adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(std::nullopt, adapter_.source_restrictions().max_frame_rate()); EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); } @@ -247,9 +244,9 @@ TEST_F(VideoStreamAdapterTest, MaintainResolution_DecreasesFpsToTwoThirds) { Adaptation adaptation = adapter_.GetAdaptationDown(); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); adapter_.ApplyAdaptation(adaptation, nullptr); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, adapter_.source_restrictions().max_pixels_per_frame()); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, adapter_.source_restrictions().target_pixels_per_frame()); EXPECT_EQ(static_cast((kInputFps * 2) / 3), adapter_.source_restrictions().max_frame_rate()); @@ -286,9 +283,9 @@ TEST_F(VideoStreamAdapterTest, MaintainResolution_IncreaseFpsToThreeHalves) { Adaptation adaptation = adapter_.GetAdaptationUp(); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); fake_stream.ApplyAdaptation(adaptation); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, adapter_.source_restrictions().max_pixels_per_frame()); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, adapter_.source_restrictions().target_pixels_per_frame()); EXPECT_EQ(static_cast((input_fps * 3) / 2), adapter_.source_restrictions().max_frame_rate()); @@ -321,9 +318,9 @@ TEST_F(VideoStreamAdapterTest, Balanced_DecreaseFrameRate) { Adaptation adaptation = adapter_.GetAdaptationDown(); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); adapter_.ApplyAdaptation(adaptation, nullptr); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, adapter_.source_restrictions().max_pixels_per_frame()); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, adapter_.source_restrictions().target_pixels_per_frame()); EXPECT_EQ(static_cast(kBalancedMediumFrameRateFps), adapter_.source_restrictions().max_frame_rate()); @@ -349,9 +346,9 @@ TEST_F(VideoStreamAdapterTest, Balanced_DecreaseResolution) { EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); fake_stream.ApplyAdaptation(adaptation); } - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, adapter_.source_restrictions().max_pixels_per_frame()); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, adapter_.source_restrictions().target_pixels_per_frame()); EXPECT_EQ(static_cast(kBalancedHighFrameRateFps), adapter_.source_restrictions().max_frame_rate()); @@ -368,7 +365,7 @@ TEST_F(VideoStreamAdapterTest, Balanced_DecreaseResolution) { static_cast((kBalancedHighResolutionPixels * 3) / 5); EXPECT_EQ(kReducedPixelsFirstStep, adapter_.source_restrictions().max_pixels_per_frame()); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, adapter_.source_restrictions().target_pixels_per_frame()); EXPECT_EQ(static_cast(kBalancedHighFrameRateFps), adapter_.source_restrictions().max_frame_rate()); @@ -387,7 +384,7 @@ TEST_F(VideoStreamAdapterTest, Balanced_DecreaseResolution) { } EXPECT_EQ(kReducedPixelsSecondStep, adapter_.source_restrictions().max_pixels_per_frame()); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, adapter_.source_restrictions().target_pixels_per_frame()); EXPECT_EQ(static_cast(kBalancedHighFrameRateFps), adapter_.source_restrictions().max_frame_rate()); @@ -482,7 +479,7 @@ TEST_F(VideoStreamAdapterTest, Balanced_IncreaseFrameRateAndResolution) { Adaptation adaptation = adapter_.GetAdaptationUp(); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); fake_stream.ApplyAdaptation(adaptation); - EXPECT_EQ(absl::nullopt, adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(std::nullopt, adapter_.source_restrictions().max_frame_rate()); EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations); EXPECT_EQ(0, adapter_.adaptation_counters().fps_adaptations); } diff --git a/call/adaptation/video_stream_input_state.cc b/call/adaptation/video_stream_input_state.cc index 9c0d475902..a2a9f11b4c 100644 --- a/call/adaptation/video_stream_input_state.cc +++ b/call/adaptation/video_stream_input_state.cc @@ -10,24 +10,27 @@ #include "call/adaptation/video_stream_input_state.h" +#include + +#include "api/video/video_codec_type.h" #include "api/video_codecs/video_encoder.h" namespace webrtc { VideoStreamInputState::VideoStreamInputState() : has_input_(false), - frame_size_pixels_(absl::nullopt), + frame_size_pixels_(std::nullopt), frames_per_second_(0), video_codec_type_(VideoCodecType::kVideoCodecGeneric), min_pixels_per_frame_(kDefaultMinPixelsPerFrame), - single_active_stream_pixels_(absl::nullopt) {} + single_active_stream_pixels_(std::nullopt) {} void VideoStreamInputState::set_has_input(bool has_input) { has_input_ = has_input; } void VideoStreamInputState::set_frame_size_pixels( - absl::optional frame_size_pixels) { + std::optional frame_size_pixels) { frame_size_pixels_ = frame_size_pixels; } @@ -45,7 +48,7 @@ void VideoStreamInputState::set_min_pixels_per_frame(int min_pixels_per_frame) { } void VideoStreamInputState::set_single_active_stream_pixels( - absl::optional single_active_stream_pixels) { + std::optional single_active_stream_pixels) { single_active_stream_pixels_ = single_active_stream_pixels; } @@ -53,7 +56,7 @@ bool VideoStreamInputState::has_input() const { return has_input_; } -absl::optional VideoStreamInputState::frame_size_pixels() const { +std::optional VideoStreamInputState::frame_size_pixels() const { return frame_size_pixels_; } @@ -69,7 +72,7 @@ int VideoStreamInputState::min_pixels_per_frame() const { return min_pixels_per_frame_; } -absl::optional VideoStreamInputState::single_active_stream_pixels() const { +std::optional VideoStreamInputState::single_active_stream_pixels() const { return single_active_stream_pixels_; } diff --git a/call/adaptation/video_stream_input_state.h b/call/adaptation/video_stream_input_state.h index 191e22386a..744788dd0e 100644 --- a/call/adaptation/video_stream_input_state.h +++ b/call/adaptation/video_stream_input_state.h @@ -11,7 +11,8 @@ #ifndef CALL_ADAPTATION_VIDEO_STREAM_INPUT_STATE_H_ #define CALL_ADAPTATION_VIDEO_STREAM_INPUT_STATE_H_ -#include "absl/types/optional.h" +#include + #include "api/video/video_codec_type.h" namespace webrtc { @@ -23,29 +24,29 @@ class VideoStreamInputState { VideoStreamInputState(); void set_has_input(bool has_input); - void set_frame_size_pixels(absl::optional frame_size_pixels); + void set_frame_size_pixels(std::optional frame_size_pixels); void set_frames_per_second(int frames_per_second); void set_video_codec_type(VideoCodecType video_codec_type); void set_min_pixels_per_frame(int min_pixels_per_frame); void set_single_active_stream_pixels( - absl::optional single_active_stream_pixels); + std::optional single_active_stream_pixels); bool has_input() const; - absl::optional frame_size_pixels() const; + std::optional frame_size_pixels() const; int frames_per_second() const; VideoCodecType video_codec_type() const; int min_pixels_per_frame() const; - absl::optional single_active_stream_pixels() const; + std::optional single_active_stream_pixels() const; bool HasInputFrameSizeAndFramesPerSecond() const; private: bool has_input_; - absl::optional frame_size_pixels_; + std::optional frame_size_pixels_; int frames_per_second_; VideoCodecType video_codec_type_; int min_pixels_per_frame_; - absl::optional single_active_stream_pixels_; + std::optional single_active_stream_pixels_; }; } // namespace webrtc diff --git a/call/adaptation/video_stream_input_state_provider.cc b/call/adaptation/video_stream_input_state_provider.cc index 3261af39ea..1a2203826b 100644 --- a/call/adaptation/video_stream_input_state_provider.cc +++ b/call/adaptation/video_stream_input_state_provider.cc @@ -10,7 +10,12 @@ #include "call/adaptation/video_stream_input_state_provider.h" +#include "call/adaptation/encoder_settings.h" #include "call/adaptation/video_stream_adapter.h" +#include "call/adaptation/video_stream_input_state.h" +#include "rtc_base/checks.h" +#include "rtc_base/synchronization/mutex.h" +#include "video/video_stream_encoder_observer.h" namespace webrtc { diff --git a/call/adaptation/video_stream_input_state_provider.h b/call/adaptation/video_stream_input_state_provider.h index 81996e6eb9..1702c7296d 100644 --- a/call/adaptation/video_stream_input_state_provider.h +++ b/call/adaptation/video_stream_input_state_provider.h @@ -14,6 +14,7 @@ #include "call/adaptation/encoder_settings.h" #include "call/adaptation/video_stream_input_state.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" #include "video/video_stream_encoder_observer.h" namespace webrtc { diff --git a/call/adaptation/video_stream_input_state_provider_unittest.cc b/call/adaptation/video_stream_input_state_provider_unittest.cc index 5da2ef21cd..b31c54a25e 100644 --- a/call/adaptation/video_stream_input_state_provider_unittest.cc +++ b/call/adaptation/video_stream_input_state_provider_unittest.cc @@ -10,12 +10,17 @@ #include "call/adaptation/video_stream_input_state_provider.h" +#include #include +#include "api/video/video_codec_type.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" #include "call/adaptation/encoder_settings.h" #include "call/adaptation/test/fake_frame_rate_provider.h" +#include "call/adaptation/video_stream_input_state.h" #include "test/gtest.h" +#include "video/config/video_encoder_config.h" namespace webrtc { @@ -24,11 +29,11 @@ TEST(VideoStreamInputStateProviderTest, DefaultValues) { VideoStreamInputStateProvider input_state_provider(&frame_rate_provider); VideoStreamInputState input_state = input_state_provider.InputState(); EXPECT_EQ(false, input_state.has_input()); - EXPECT_EQ(absl::nullopt, input_state.frame_size_pixels()); + EXPECT_EQ(std::nullopt, input_state.frame_size_pixels()); EXPECT_EQ(0, input_state.frames_per_second()); EXPECT_EQ(VideoCodecType::kVideoCodecGeneric, input_state.video_codec_type()); EXPECT_EQ(kDefaultMinPixelsPerFrame, input_state.min_pixels_per_frame()); - EXPECT_EQ(absl::nullopt, input_state.single_active_stream_pixels()); + EXPECT_EQ(std::nullopt, input_state.single_active_stream_pixels()); } TEST(VideoStreamInputStateProviderTest, ValuesSet) { diff --git a/call/audio_receive_stream.h b/call/audio_receive_stream.h index 4879311fdb..4ae9ba04de 100644 --- a/call/audio_receive_stream.h +++ b/call/audio_receive_stream.h @@ -11,16 +11,24 @@ #ifndef CALL_AUDIO_RECEIVE_STREAM_H_ #define CALL_AUDIO_RECEIVE_STREAM_H_ +#include +#include #include -#include +#include #include -#include -#include "absl/types/optional.h" +#include "api/audio/audio_mixer.h" +#include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_format.h" #include "api/call/transport.h" #include "api/crypto/crypto_options.h" -#include "api/rtp_parameters.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/frame_transformer_interface.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "call/receive_stream.h" #include "call/rtp_config.h" @@ -42,7 +50,7 @@ class AudioReceiveStreamInterface : public MediaReceiveStreamInterface { uint64_t packets_discarded = 0; uint32_t nacks_sent = 0; std::string codec_name; - absl::optional codec_payload_type; + std::optional codec_payload_type; uint32_t jitter_ms = 0; uint32_t jitter_buffer_ms = 0; uint32_t jitter_buffer_preferred_ms = 0; @@ -62,6 +70,7 @@ class AudioReceiveStreamInterface : public MediaReceiveStreamInterface { double jitter_buffer_minimum_delay_seconds = 0.0; uint64_t inserted_samples_for_deceleration = 0; uint64_t removed_samples_for_acceleration = 0; + double total_processing_delay_seconds = 0.0; // Stats below DO NOT correspond directly to anything in the WebRTC stats float expand_rate = 0.0f; float speech_expand_rate = 0.0f; @@ -83,21 +92,24 @@ class AudioReceiveStreamInterface : public MediaReceiveStreamInterface { // The timestamp at which the last packet was received, i.e. the time of the // local clock when it was received - not the RTP timestamp of that packet. // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-lastpacketreceivedtimestamp - absl::optional last_packet_received; + std::optional last_packet_received; uint64_t jitter_buffer_flushes = 0; double relative_packet_arrival_delay_seconds = 0.0; int32_t interruption_count = 0; int32_t total_interruption_duration_ms = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-estimatedplayouttimestamp - absl::optional estimated_playout_ntp_timestamp_ms; + std::optional estimated_playout_ntp_timestamp_ms; // Remote outbound stats derived by the received RTCP sender reports. // https://w3c.github.io/webrtc-stats/#remoteoutboundrtpstats-dict* - absl::optional last_sender_report_timestamp_ms; - absl::optional last_sender_report_remote_timestamp_ms; + std::optional last_sender_report_timestamp; + // TODO: bugs.webrtc.org/370535296 - Remove the utc timestamp when linked + // issue is fixed. + std::optional last_sender_report_utc_timestamp; + std::optional last_sender_report_remote_utc_timestamp; uint64_t sender_reports_packets_sent = 0; uint64_t sender_reports_bytes_sent = 0; uint64_t sender_reports_reports_count = 0; - absl::optional round_trip_time; + std::optional round_trip_time; TimeDelta total_round_trip_time = TimeDelta::Zero(); int round_trip_time_measurements = 0; }; @@ -117,6 +129,7 @@ class AudioReceiveStreamInterface : public MediaReceiveStreamInterface { // See NackConfig for description. NackConfig nack; + RtcpMode rtcp_mode = RtcpMode::kCompound; } rtp; // Receive-side RTT. @@ -137,9 +150,9 @@ class AudioReceiveStreamInterface : public MediaReceiveStreamInterface { // Decoder specifications for every payload type that we can receive. std::map decoder_map; - rtc::scoped_refptr decoder_factory; + scoped_refptr decoder_factory; - absl::optional codec_pair_id; + std::optional codec_pair_id; // Per PeerConnection crypto options. webrtc::CryptoOptions crypto_options; @@ -150,14 +163,14 @@ class AudioReceiveStreamInterface : public MediaReceiveStreamInterface { // TODO(tommi): Remove this member variable from the struct. It's not // a part of the AudioReceiveStreamInterface state but rather a pass through // variable. - rtc::scoped_refptr frame_decryptor; + scoped_refptr frame_decryptor; // An optional frame transformer used by insertable streams to transform // encoded frames. // TODO(tommi): Remove this member variable from the struct. It's not // a part of the AudioReceiveStreamInterface state but rather a pass through // variable. - rtc::scoped_refptr frame_transformer; + scoped_refptr frame_transformer; }; // Methods that support reconfiguring the stream post initialization. @@ -198,6 +211,10 @@ class AudioReceiveStreamInterface : public MediaReceiveStreamInterface { // post initialization. virtual uint32_t remote_ssrc() const = 0; + // Get the object suitable to inject into the AudioMixer + // (normally "this"). + virtual AudioMixer::Source* source() = 0; + protected: virtual ~AudioReceiveStreamInterface() {} }; diff --git a/call/audio_send_stream.cc b/call/audio_send_stream.cc index a36050a9f7..612d27a60c 100644 --- a/call/audio_send_stream.cc +++ b/call/audio_send_stream.cc @@ -12,7 +12,11 @@ #include -#include "rtc_base/strings/audio_format_to_string.h" +#include + +#include "api/audio_codecs/audio_format.h" +#include "api/call/transport.h" +#include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" namespace webrtc { @@ -26,7 +30,7 @@ AudioSendStream::Config::Config(Transport* send_transport) AudioSendStream::Config::~Config() = default; std::string AudioSendStream::Config::ToString() const { - rtc::StringBuilder ss; + StringBuilder ss; ss << "{rtp: " << rtp.ToString(); ss << ", rtcp_report_interval_ms: " << rtcp_report_interval_ms; ss << ", send_transport: " << (send_transport ? "(Transport)" : "null"); @@ -47,7 +51,7 @@ AudioSendStream::Config::Rtp::~Rtp() = default; std::string AudioSendStream::Config::Rtp::ToString() const { char buf[1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "{ssrc: " << ssrc; if (!rid.empty()) { ss << ", rid: " << rid; @@ -77,17 +81,16 @@ AudioSendStream::Config::SendCodecSpec::~SendCodecSpec() = default; std::string AudioSendStream::Config::SendCodecSpec::ToString() const { char buf[1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "{nack_enabled: " << (nack_enabled ? "true" : "false"); - ss << ", transport_cc_enabled: " << (transport_cc_enabled ? "true" : "false"); ss << ", enable_non_sender_rtt: " << (enable_non_sender_rtt ? "true" : "false"); ss << ", cng_payload_type: " - << (cng_payload_type ? rtc::ToString(*cng_payload_type) : ""); + << (cng_payload_type ? absl::StrCat(*cng_payload_type) : ""); ss << ", red_payload_type: " - << (red_payload_type ? rtc::ToString(*red_payload_type) : ""); + << (red_payload_type ? absl::StrCat(*red_payload_type) : ""); ss << ", payload_type: " << payload_type; - ss << ", format: " << rtc::ToString(format); + ss << ", format: " << absl::StrCat(format); ss << '}'; return ss.str(); } @@ -95,7 +98,6 @@ std::string AudioSendStream::Config::SendCodecSpec::ToString() const { bool AudioSendStream::Config::SendCodecSpec::operator==( const AudioSendStream::Config::SendCodecSpec& rhs) const { if (nack_enabled == rhs.nack_enabled && - transport_cc_enabled == rhs.transport_cc_enabled && enable_non_sender_rtt == rhs.enable_non_sender_rtt && cng_payload_type == rhs.cng_payload_type && red_payload_type == rhs.red_payload_type && diff --git a/call/audio_send_stream.h b/call/audio_send_stream.h index 9c2fad652f..d1b3e64ba3 100644 --- a/call/audio_send_stream.h +++ b/call/audio_send_stream.h @@ -11,11 +11,12 @@ #ifndef CALL_AUDIO_SEND_STREAM_H_ #define CALL_AUDIO_SEND_STREAM_H_ -#include +#include +#include #include #include -#include "absl/types/optional.h" +#include "api/audio/audio_processing_statistics.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_encoder_factory.h" @@ -24,12 +25,12 @@ #include "api/crypto/crypto_options.h" #include "api/crypto/frame_encryptor_interface.h" #include "api/frame_transformer_interface.h" +#include "api/rtp_headers.h" #include "api/rtp_parameters.h" #include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" +#include "api/units/time_delta.h" #include "call/audio_sender.h" -#include "call/rtp_config.h" -#include "modules/audio_processing/include/audio_processing_statistics.h" #include "modules/rtp_rtcp/include/report_block_data.h" namespace webrtc { @@ -54,7 +55,7 @@ class AudioSendStream : public AudioSender { int32_t packets_lost = -1; float fraction_lost = -1.0f; std::string codec_name; - absl::optional codec_payload_type; + std::optional codec_payload_type; int32_t jitter_ms = -1; int64_t rtt_ms = -1; int16_t audio_level = 0; @@ -106,6 +107,9 @@ class AudioSendStream : public AudioSender { // RTCP CNAME, see RFC 3550. std::string c_name; + + // Compound or reduced size RTCP. + RtcpMode rtcp_mode = RtcpMode::kCompound; } rtp; // Time interval between RTCP report for audio @@ -126,7 +130,7 @@ class AudioSendStream : public AudioSender { // Defines whether to turn on audio network adaptor, and defines its config // string. - absl::optional audio_network_adaptor_config; + std::optional audio_network_adaptor_config; struct SendCodecSpec { SendCodecSpec(int payload_type, const SdpAudioFormat& format); @@ -141,17 +145,16 @@ class AudioSendStream : public AudioSender { int payload_type; SdpAudioFormat format; bool nack_enabled = false; - bool transport_cc_enabled = false; bool enable_non_sender_rtt = false; - absl::optional cng_payload_type; - absl::optional red_payload_type; + std::optional cng_payload_type; + std::optional red_payload_type; // If unset, use the encoder's default target bitrate. - absl::optional target_bitrate_bps; + std::optional target_bitrate_bps; }; - absl::optional send_codec_spec; - rtc::scoped_refptr encoder_factory; - absl::optional codec_pair_id; + std::optional send_codec_spec; + scoped_refptr encoder_factory; + std::optional codec_pair_id; // Track ID as specified during track creation. std::string track_id; @@ -162,11 +165,11 @@ class AudioSendStream : public AudioSender { // An optional custom frame encryptor that allows the entire frame to be // encryptor in whatever way the caller choses. This is not required by // default. - rtc::scoped_refptr frame_encryptor; + scoped_refptr frame_encryptor; // An optional frame transformer used by insertable streams to transform // encoded frames. - rtc::scoped_refptr frame_transformer; + scoped_refptr frame_transformer; }; virtual ~AudioSendStream() = default; diff --git a/call/audio_state.h b/call/audio_state.h index 79fb5cf981..d58b7ff97e 100644 --- a/call/audio_state.h +++ b/call/audio_state.h @@ -10,12 +10,12 @@ #ifndef CALL_AUDIO_STATE_H_ #define CALL_AUDIO_STATE_H_ +#include "api/audio/audio_device.h" #include "api/audio/audio_mixer.h" +#include "api/audio/audio_processing.h" +#include "api/ref_count.h" #include "api/scoped_refptr.h" #include "modules/async_audio_processing/async_audio_processing.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "rtc_base/ref_count.h" namespace webrtc { @@ -23,7 +23,7 @@ class AudioTransport; // AudioState holds the state which must be shared between multiple instances of // webrtc::Call for audio processing purposes. -class AudioState : public rtc::RefCountInterface { +class AudioState : public RefCountInterface { public: struct Config { Config(); @@ -31,16 +31,15 @@ class AudioState : public rtc::RefCountInterface { // The audio mixer connected to active receive streams. One per // AudioState. - rtc::scoped_refptr audio_mixer; + scoped_refptr audio_mixer; // The audio processing module. - rtc::scoped_refptr audio_processing; + scoped_refptr audio_processing; // TODO(solenberg): Temporary: audio device module. - rtc::scoped_refptr audio_device_module; + scoped_refptr audio_device_module; - rtc::scoped_refptr - async_audio_processing_factory; + scoped_refptr async_audio_processing_factory; }; virtual AudioProcessing* audio_processing() = 0; @@ -59,8 +58,7 @@ class AudioState : public rtc::RefCountInterface { virtual void SetStereoChannelSwapping(bool enable) = 0; - static rtc::scoped_refptr Create( - const AudioState::Config& config); + static scoped_refptr Create(const AudioState::Config& config); ~AudioState() override {} }; diff --git a/call/bitrate_allocator.cc b/call/bitrate_allocator.cc index 2684a1650e..0a63398878 100644 --- a/call/bitrate_allocator.cc +++ b/call/bitrate_allocator.cc @@ -13,16 +13,25 @@ #include #include -#include -#include +#include +#include +#include +#include +#include +#include #include "absl/algorithm/container.h" +#include "api/call/bitrate_allocation.h" +#include "api/field_trials_view.h" +#include "api/sequence_checker.h" +#include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/safe_minmax.h" -#include "system_wrappers/include/clock.h" #include "system_wrappers/include/metrics.h" namespace webrtc { @@ -274,8 +283,7 @@ std::map NormalRateAllocation( allocation[observer_config.observer]; if (priority_margin > 0 && bitrate > 0) { int64_t extra_bitrate = std::min(priority_margin, bitrate); - allocation[observer_config.observer] += - rtc::dchecked_cast(extra_bitrate); + allocation[observer_config.observer] += dchecked_cast(extra_bitrate); observers_capacities[observer_config.observer] -= extra_bitrate; bitrate -= extra_bitrate; } @@ -295,7 +303,7 @@ std::map NormalRateAllocation( std::map MaxRateAllocation( const std::vector& allocatable_tracks, uint32_t bitrate, - uint32_t sum_max_bitrates) { + uint32_t /* sum_max_bitrates */) { std::map allocation; for (const auto& observer_config : allocatable_tracks) { @@ -317,9 +325,103 @@ std::map ZeroRateAllocation( return allocation; } +// Returns new allocation if modified, std::nullopt otherwise. +std::optional> MaybeApplySurplus( + const std::map& allocation, + const std::vector& allocatable_tracks, + DataRate bitrate, + DataRate upper_elastic_limit) { + if (upper_elastic_limit.IsZero()) + return std::nullopt; + + // In this first pass looping over all `allocatable_tracks`, we aggregates + // - `surplus`: sum of unused rates for all kCanContribute* tracks, + // - `sum_demand`: sum of `bitrate_priority` for all tracks that can consume + // more bitrate to allow proportional sharing of surplus later, + // - `sum_allocated`: sum of allocated bitrates for all tracks, which might + // be larger than `bitrate` e.g. when min_bitrate_bps are enforced. + DataRate surplus = DataRate::Zero(); + double sum_demand = 0.0; + DataRate sum_allocated = DataRate::Zero(); + + for (const auto& observer_config : allocatable_tracks) { + const auto it = allocation.find(observer_config.observer); + if (it == allocation.end()) { + // No allocation for this track. + continue; + } + const DataRate allocated = DataRate::BitsPerSec(it->second); + sum_allocated += allocated; + if (const std::optional elasticity = + observer_config.config.rate_elasticity) { + bool inactive_can_contribute_and_consume = false; + if (elasticity == TrackRateElasticity::kCanContributeUnusedRate || + elasticity == TrackRateElasticity::kCanContributeAndConsume) { + if (const std::optional used = + observer_config.observer->GetUsedRate()) { + if (*used < allocated) { + surplus += allocated - *used; + if (elasticity == TrackRateElasticity::kCanContributeAndConsume && + *used < allocated / 2) { + inactive_can_contribute_and_consume = true; + } + } + } + } + if (!inactive_can_contribute_and_consume && + (elasticity == TrackRateElasticity::kCanConsumeExtraRate || + elasticity == TrackRateElasticity::kCanContributeAndConsume)) { + sum_demand += observer_config.config.bitrate_priority; + } + } + } + + // `sum_allocated` can exceed `bitrate` if sum minBitrates exceeds + // estimated rate. The real `surplus` should cover the difference. + DataRate overshoot = + (sum_allocated >= bitrate) ? (sum_allocated - bitrate) : DataRate::Zero(); + if (sum_demand < 0.0001 || overshoot > surplus) { + // No demand for extra bitrate or no available surplus. + return std::nullopt; + } + surplus -= overshoot; + + auto new_allocation = allocation; + // We loop over all allocatable_tracks again, and proportionally assign + // `surplus` to each track according to `bitrate_priority`. + for (const auto& observer_config : allocatable_tracks) { + auto it = new_allocation.find(observer_config.observer); + if (it == new_allocation.end()) { + // No allocation for this track. + continue; + } + std::optional elasticity = + observer_config.config.rate_elasticity; + if (elasticity == TrackRateElasticity::kCanConsumeExtraRate || + elasticity == TrackRateElasticity::kCanContributeAndConsume) { + DataRate allocated = DataRate::BitsPerSec(it->second); + if (allocated < upper_elastic_limit) { + allocated += + surplus * (observer_config.config.bitrate_priority / sum_demand); + if (allocated > upper_elastic_limit) + allocated = upper_elastic_limit; + } + DataRate max_bitrate = + DataRate::BitsPerSec(observer_config.config.max_bitrate_bps); + if (allocated > max_bitrate) { + allocated = max_bitrate; + } + // Save new allocated rate back to `new_allocation`. + it->second = allocated.bps(); + } + } + return new_allocation; +} + std::map AllocateBitrates( const std::vector& allocatable_tracks, - uint32_t bitrate) { + uint32_t bitrate, + DataRate upper_elastic_limit) { if (allocatable_tracks.empty()) return std::map(); @@ -342,8 +444,13 @@ std::map AllocateBitrates( // All observers will get their min bitrate plus a share of the rest. This // share is allocated to each observer based on its bitrate_priority. - if (bitrate <= sum_max_bitrates) - return NormalRateAllocation(allocatable_tracks, bitrate, sum_min_bitrates); + if (bitrate <= sum_max_bitrates) { + auto allocation = + NormalRateAllocation(allocatable_tracks, bitrate, sum_min_bitrates); + return MaybeApplySurplus(allocation, allocatable_tracks, + DataRate::BitsPerSec(bitrate), upper_elastic_limit) + .value_or(allocation); + } // All observers will get up to transmission_max_bitrate_multiplier_ x max. return MaxRateAllocation(allocatable_tracks, bitrate, sum_max_bitrates); @@ -351,7 +458,8 @@ std::map AllocateBitrates( } // namespace -BitrateAllocator::BitrateAllocator(LimitObserver* limit_observer) +BitrateAllocator::BitrateAllocator(LimitObserver* limit_observer, + DataRate upper_elastic_rate_limit) : limit_observer_(limit_observer), last_target_bps_(0), last_stable_target_bps_(0), @@ -360,7 +468,8 @@ BitrateAllocator::BitrateAllocator(LimitObserver* limit_observer) last_rtt_(0), last_bwe_period_ms_(1000), num_pause_events_(0), - last_bwe_log_time_(0) { + last_bwe_log_time_(0), + upper_elastic_rate_limit_(upper_elastic_rate_limit) { sequenced_checker_.Detach(); } @@ -383,7 +492,7 @@ void BitrateAllocator::OnNetworkEstimateChanged(TargetTransferRate msg) { int loss_ratio_255 = msg.network_estimate.loss_rate_ratio * 255; last_fraction_loss_ = - rtc::dchecked_cast(rtc::SafeClamp(loss_ratio_255, 0, 255)); + dchecked_cast(SafeClamp(loss_ratio_255, 0, 255)); last_rtt_ = msg.network_estimate.round_trip_time.ms(); last_bwe_period_ms_ = msg.network_estimate.bwe_period.ms(); @@ -394,14 +503,15 @@ void BitrateAllocator::OnNetworkEstimateChanged(TargetTransferRate msg) { last_bwe_log_time_ = now; } - auto allocation = AllocateBitrates(allocatable_tracks_, last_target_bps_); - auto stable_bitrate_allocation = - AllocateBitrates(allocatable_tracks_, last_stable_target_bps_); + auto allocation = AllocateBitrates(allocatable_tracks_, last_target_bps_, + upper_elastic_rate_limit_); + auto stable_bitrate_allocation = AllocateBitrates( + allocatable_tracks_, last_stable_target_bps_, DataRate::Zero()); - for (auto& config : allocatable_tracks_) { - uint32_t allocated_bitrate = allocation[config.observer]; + for (auto& track : allocatable_tracks_) { + uint32_t allocated_bitrate = allocation[track.observer]; uint32_t allocated_stable_target_rate = - stable_bitrate_allocation[config.observer]; + stable_bitrate_allocation[track.observer]; BitrateAllocationUpdate update; update.target_bitrate = DataRate::BitsPerSec(allocated_bitrate); update.stable_target_bitrate = @@ -410,35 +520,36 @@ void BitrateAllocator::OnNetworkEstimateChanged(TargetTransferRate msg) { update.round_trip_time = TimeDelta::Millis(last_rtt_); update.bwe_period = TimeDelta::Millis(last_bwe_period_ms_); update.cwnd_reduce_ratio = msg.cwnd_reduce_ratio; - uint32_t protection_bitrate = config.observer->OnBitrateUpdated(update); + uint32_t protection_bitrate = track.observer->OnBitrateUpdated(update); - if (allocated_bitrate == 0 && config.allocated_bitrate_bps > 0) { + if (allocated_bitrate == 0 && track.allocated_bitrate_bps > 0) { if (last_target_bps_ > 0) ++num_pause_events_; // The protection bitrate is an estimate based on the ratio between media // and protection used before this observer was muted. uint32_t predicted_protection_bps = - (1.0 - config.media_ratio) * config.config.min_bitrate_bps; - RTC_LOG(LS_INFO) << "Pausing observer " << config.observer + (1.0 - track.media_ratio) * track.config.min_bitrate_bps; + RTC_LOG(LS_INFO) << "Pausing observer " << track.observer << " with configured min bitrate " - << config.config.min_bitrate_bps + << track.config.min_bitrate_bps << " and current estimate of " << last_target_bps_ << " and protection bitrate " << predicted_protection_bps; - } else if (allocated_bitrate > 0 && config.allocated_bitrate_bps == 0) { + } else if (allocated_bitrate > 0 && track.allocated_bitrate_bps == 0) { if (last_target_bps_ > 0) ++num_pause_events_; - RTC_LOG(LS_INFO) << "Resuming observer " << config.observer + RTC_LOG(LS_INFO) << "Resuming observer " << track.observer << ", configured min bitrate " - << config.config.min_bitrate_bps + << track.config.min_bitrate_bps << ", current allocation " << allocated_bitrate << " and protection bitrate " << protection_bitrate; } // Only update the media ratio if the observer got an allocation. if (allocated_bitrate > 0) - config.media_ratio = MediaRatio(allocated_bitrate, protection_bitrate); - config.allocated_bitrate_bps = allocated_bitrate; + track.media_ratio = MediaRatio(allocated_bitrate, protection_bitrate); + track.allocated_bitrate_bps = allocated_bitrate; + track.last_used_bitrate = track.observer->GetUsedRate(); } UpdateAllocationLimits(); } @@ -461,13 +572,14 @@ void BitrateAllocator::AddObserver(BitrateAllocatorObserver* observer, if (last_target_bps_ > 0) { // Calculate a new allocation and update all observers. - auto allocation = AllocateBitrates(allocatable_tracks_, last_target_bps_); - auto stable_bitrate_allocation = - AllocateBitrates(allocatable_tracks_, last_stable_target_bps_); - for (auto& config : allocatable_tracks_) { - uint32_t allocated_bitrate = allocation[config.observer]; + auto allocation = AllocateBitrates(allocatable_tracks_, last_target_bps_, + upper_elastic_rate_limit_); + auto stable_bitrate_allocation = AllocateBitrates( + allocatable_tracks_, last_stable_target_bps_, DataRate::Zero()); + for (auto& track : allocatable_tracks_) { + uint32_t allocated_bitrate = allocation[track.observer]; uint32_t allocated_stable_bitrate = - stable_bitrate_allocation[config.observer]; + stable_bitrate_allocation[track.observer]; BitrateAllocationUpdate update; update.target_bitrate = DataRate::BitsPerSec(allocated_bitrate); update.stable_target_bitrate = @@ -475,10 +587,11 @@ void BitrateAllocator::AddObserver(BitrateAllocatorObserver* observer, update.packet_loss_ratio = last_fraction_loss_ / 256.0; update.round_trip_time = TimeDelta::Millis(last_rtt_); update.bwe_period = TimeDelta::Millis(last_bwe_period_ms_); - uint32_t protection_bitrate = config.observer->OnBitrateUpdated(update); - config.allocated_bitrate_bps = allocated_bitrate; + uint32_t protection_bitrate = track.observer->OnBitrateUpdated(update); + track.allocated_bitrate_bps = allocated_bitrate; + track.last_used_bitrate = track.observer->GetUsedRate(); if (allocated_bitrate > 0) - config.media_ratio = MediaRatio(allocated_bitrate, protection_bitrate); + track.media_ratio = MediaRatio(allocated_bitrate, protection_bitrate); } } else { // Currently, an encoder is not allowed to produce frames. @@ -496,20 +609,92 @@ void BitrateAllocator::AddObserver(BitrateAllocatorObserver* observer, UpdateAllocationLimits(); } +bool BitrateAllocator::RecomputeAllocationIfNeeded() { + RTC_DCHECK_RUN_ON(&sequenced_checker_); + + if (upper_elastic_rate_limit_.IsZero()) { + return false; + } + + bool need_recompute = false; + bool has_contributor = false; + bool has_consumer = false; + + // Recomputes if there is a kCanContribute* track whose current bitrate usage + // has a jump (i.e., increase only) larger than 20% of allocated_bitrate. + constexpr double kUsageJumpRatioThreshold = 0.2; + for (auto& track : allocatable_tracks_) { + if (track.config.rate_elasticity.has_value()) { + const TrackRateElasticity elasticity = *track.config.rate_elasticity; + if (elasticity == TrackRateElasticity::kCanContributeUnusedRate || + elasticity == TrackRateElasticity::kCanContributeAndConsume) { + DataRate current_usage = + track.observer->GetUsedRate().value_or(DataRate::Zero()); + DataRate last_usage = + track.last_used_bitrate.value_or(DataRate::Zero()); + if (!last_usage.IsZero()) { + has_contributor = true; + DataRate recompute_threshold = + DataRate::BitsPerSec(track.LastAllocatedBitrate()) * + kUsageJumpRatioThreshold; + if (current_usage > last_usage + recompute_threshold) { + need_recompute = true; + } + } + } + if (elasticity == TrackRateElasticity::kCanConsumeExtraRate || + elasticity == TrackRateElasticity::kCanContributeAndConsume) { + has_consumer = true; + } + } + } + if (has_contributor == false || has_consumer == false) + return false; + + if (need_recompute && last_target_bps_ > 0) { + // Calculate a new allocation and update all observers. + auto allocation = AllocateBitrates(allocatable_tracks_, last_target_bps_, + upper_elastic_rate_limit_); + auto stable_bitrate_allocation = AllocateBitrates( + allocatable_tracks_, last_stable_target_bps_, DataRate::Zero()); + for (auto& track : allocatable_tracks_) { + DataRate allocated_bitrate = + DataRate::BitsPerSec(allocation[track.observer]); + DataRate allocated_stable_bitrate = + DataRate::BitsPerSec(stable_bitrate_allocation[track.observer]); + BitrateAllocationUpdate update; + update.target_bitrate = allocated_bitrate; + update.stable_target_bitrate = allocated_stable_bitrate; + update.packet_loss_ratio = last_fraction_loss_ / 256.0; + update.round_trip_time = TimeDelta::Millis(last_rtt_); + update.bwe_period = TimeDelta::Millis(last_bwe_period_ms_); + DataRate protection_bitrate = + DataRate::BitsPerSec(track.observer->OnBitrateUpdated(update)); + track.allocated_bitrate_bps = allocated_bitrate.bps(); + track.last_used_bitrate = track.observer->GetUsedRate(); + if (allocated_bitrate.bps() > 0) + track.media_ratio = + MediaRatio(allocated_bitrate.bps(), protection_bitrate.bps()); + } + UpdateAllocationLimits(); + } + return true; +} + void BitrateAllocator::UpdateAllocationLimits() { BitrateAllocationLimits limits; - for (const auto& config : allocatable_tracks_) { - uint32_t stream_padding = config.config.pad_up_bitrate_bps; - if (config.config.enforce_min_bitrate) { + for (const auto& track : allocatable_tracks_) { + uint32_t stream_padding = track.config.pad_up_bitrate_bps; + if (track.config.enforce_min_bitrate) { limits.min_allocatable_rate += - DataRate::BitsPerSec(config.config.min_bitrate_bps); - } else if (config.allocated_bitrate_bps == 0) { + DataRate::BitsPerSec(track.config.min_bitrate_bps); + } else if (track.allocated_bitrate_bps == 0) { stream_padding = - std::max(config.MinBitrateWithHysteresis(), stream_padding); + std::max(track.MinBitrateWithHysteresis(), stream_padding); } limits.max_padding_rate += DataRate::BitsPerSec(stream_padding); limits.max_allocatable_rate += - DataRate::BitsPerSec(config.config.max_bitrate_bps); + DataRate::BitsPerSec(track.config.max_bitrate_bps); } if (limits.min_allocatable_rate == current_limits_.min_allocatable_rate && @@ -590,4 +775,15 @@ uint32_t bitrate_allocator_impl::AllocatableTrack::MinBitrateWithHysteresis() return min_bitrate; } +// TODO(b/350555527): Remove after experiment +const char kElasticBitrateAllocator[] = "WebRTC-ElasticBitrateAllocation"; +DataRate GetElasticRateAllocationFieldTrialParameter( + const FieldTrialsView& field_trials) { + FieldTrialParameter elastic_rate_limit("upper_limit", + DataRate::Zero()); + std::string trial_string = field_trials.Lookup(kElasticBitrateAllocator); + ParseFieldTrial({&elastic_rate_limit}, trial_string); + return elastic_rate_limit.Get(); +} + } // namespace webrtc diff --git a/call/bitrate_allocator.h b/call/bitrate_allocator.h index 204fc6f94d..270a34e1f7 100644 --- a/call/bitrate_allocator.h +++ b/call/bitrate_allocator.h @@ -13,16 +13,16 @@ #include -#include -#include -#include -#include +#include #include #include "api/call/bitrate_allocation.h" +#include "api/field_trials_view.h" #include "api/sequence_checker.h" #include "api/transport/network_types.h" +#include "api/units/data_rate.h" #include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -37,6 +37,8 @@ class BitrateAllocatorObserver { // Returns the amount of protection used by the BitrateAllocatorObserver // implementation, as bitrate in bps. virtual uint32_t OnBitrateUpdated(BitrateAllocationUpdate update) = 0; + // Returns the bitrate consumed (vs allocated) by BitrateAllocatorObserver + virtual std::optional GetUsedRate() const = 0; protected: virtual ~BitrateAllocatorObserver() {} @@ -45,6 +47,12 @@ class BitrateAllocatorObserver { // Struct describing parameters for how a media stream should get bitrate // allocated to it. +enum class TrackRateElasticity { + kCanContributeUnusedRate, + kCanConsumeExtraRate, + kCanContributeAndConsume +}; + struct MediaStreamAllocationConfig { // Minimum bitrate supported by track. 0 equals no min bitrate. uint32_t min_bitrate_bps; @@ -61,6 +69,7 @@ struct MediaStreamAllocationConfig { // observers. If an observer has twice the bitrate_priority of other // observers, it should be allocated twice the bitrate above its min. double bitrate_priority; + std::optional rate_elasticity; }; // Interface used for mocking @@ -86,6 +95,7 @@ struct AllocatableTrack { BitrateAllocatorObserver* observer; MediaStreamAllocationConfig config; int64_t allocated_bitrate_bps; + std::optional last_used_bitrate; double media_ratio; // Part of the total bitrate used for media [0.0, 1.0]. uint32_t LastAllocatedBitrate() const; @@ -110,7 +120,11 @@ class BitrateAllocator : public BitrateAllocatorInterface { virtual ~LimitObserver() = default; }; - explicit BitrateAllocator(LimitObserver* limit_observer); + // `upper_elastic_rate_limit` specifies the rate ceiling an observer can + // reach when unused bits are added. A value of zero disables borrowing of + // unused rates. + BitrateAllocator(LimitObserver* limit_observer, + DataRate upper_elastic_rate_limit); ~BitrateAllocator() override; void UpdateStartRate(uint32_t start_rate_bps); @@ -128,6 +142,11 @@ class BitrateAllocator : public BitrateAllocatorInterface { void AddObserver(BitrateAllocatorObserver* observer, MediaStreamAllocationConfig config) override; + // Checks and recomputes bitrate allocation if necessary (when an + // elastic/audio bitrate increases significantly). Returns whether there is an + // active contributing and active consuming stream. + bool RecomputeAllocationIfNeeded(); + // Removes a previously added observer, but will not trigger a new bitrate // allocation. void RemoveObserver(BitrateAllocatorObserver* observer) override; @@ -164,7 +183,12 @@ class BitrateAllocator : public BitrateAllocatorInterface { int num_pause_events_ RTC_GUARDED_BY(&sequenced_checker_); int64_t last_bwe_log_time_ RTC_GUARDED_BY(&sequenced_checker_); BitrateAllocationLimits current_limits_ RTC_GUARDED_BY(&sequenced_checker_); + const DataRate upper_elastic_rate_limit_ RTC_GUARDED_BY(&sequenced_checker_); }; +// TODO(b/350555527): Remove after experiment +DataRate GetElasticRateAllocationFieldTrialParameter( + const FieldTrialsView& field_trials); + } // namespace webrtc #endif // CALL_BITRATE_ALLOCATOR_H_ diff --git a/call/bitrate_allocator_unittest.cc b/call/bitrate_allocator_unittest.cc index 69bdd83397..63f270f3e2 100644 --- a/call/bitrate_allocator_unittest.cc +++ b/call/bitrate_allocator_unittest.cc @@ -11,11 +11,19 @@ #include "call/bitrate_allocator.h" #include +#include #include -#include +#include +#include #include "absl/strings/string_view.h" -#include "system_wrappers/include/clock.h" +#include "api/call/bitrate_allocation.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" @@ -70,11 +78,12 @@ class TestBitrateObserver : public BitrateAllocatorObserver { uint32_t OnBitrateUpdated(BitrateAllocationUpdate update) override { last_bitrate_bps_ = update.target_bitrate.bps(); last_fraction_loss_ = - rtc::dchecked_cast(update.packet_loss_ratio * 256); + dchecked_cast(update.packet_loss_ratio * 256); last_rtt_ms_ = update.round_trip_time.ms(); last_probing_interval_ms_ = update.bwe_period.ms(); return update.target_bitrate.bps() * protection_ratio_; } + std::optional GetUsedRate() const override { return std::nullopt; } uint32_t last_bitrate_bps_; uint8_t last_fraction_loss_; int64_t last_rtt_ms_; @@ -82,6 +91,13 @@ class TestBitrateObserver : public BitrateAllocatorObserver { double protection_ratio_; }; +class TestContributingBitrateObserver : public TestBitrateObserver { + public: + TestContributingBitrateObserver() : rate_usage_(DataRate::Zero()) {} + std::optional GetUsedRate() const override { return rate_usage_; } + DataRate rate_usage_; +}; + constexpr int64_t kDefaultProbingIntervalMs = 3000; const double kDefaultBitratePriority = 1.0; @@ -105,21 +121,24 @@ TargetTransferRate CreateTargetRateMessage(uint32_t target_bitrate_bps, class BitrateAllocatorTest : public ::testing::Test { protected: - BitrateAllocatorTest() : allocator_(new BitrateAllocator(&limit_observer_)) { + BitrateAllocatorTest() + : allocator_(new BitrateAllocator(&limit_observer_, DataRate::Zero())) { allocator_->OnNetworkEstimateChanged( CreateTargetRateMessage(300000u, 0, 0, kDefaultProbingIntervalMs)); } ~BitrateAllocatorTest() {} - void AddObserver(BitrateAllocatorObserver* observer, - uint32_t min_bitrate_bps, - uint32_t max_bitrate_bps, - uint32_t pad_up_bitrate_bps, - bool enforce_min_bitrate, - double bitrate_priority) { + void AddObserver( + BitrateAllocatorObserver* observer, + uint32_t min_bitrate_bps, + uint32_t max_bitrate_bps, + uint32_t pad_up_bitrate_bps, + bool enforce_min_bitrate, + double bitrate_priority, + std::optional rate_elasticity = std::nullopt) { allocator_->AddObserver( - observer, - {min_bitrate_bps, max_bitrate_bps, pad_up_bitrate_bps, - /* priority_bitrate */ 0, enforce_min_bitrate, bitrate_priority}); + observer, {min_bitrate_bps, max_bitrate_bps, pad_up_bitrate_bps, + /* priority_bitrate */ 0, enforce_min_bitrate, + bitrate_priority, rate_elasticity}); } MediaStreamAllocationConfig DefaultConfig() const { MediaStreamAllocationConfig default_config; @@ -131,6 +150,12 @@ class BitrateAllocatorTest : public ::testing::Test { default_config.bitrate_priority = kDefaultBitratePriority; return default_config; } + void ReconfigureAllocator(DataRate elastic_rate_upper_limit) { + allocator_.reset( + new BitrateAllocator(&limit_observer_, elastic_rate_upper_limit)); + allocator_->OnNetworkEstimateChanged( + CreateTargetRateMessage(300000u, 0, 0, kDefaultProbingIntervalMs)); + } NiceMock limit_observer_; std::unique_ptr allocator_; @@ -297,7 +322,7 @@ TEST_F(BitrateAllocatorTest, RemoveObserverTriggersLimitObserver) { class BitrateAllocatorTestNoEnforceMin : public ::testing::Test { protected: BitrateAllocatorTestNoEnforceMin() - : allocator_(new BitrateAllocator(&limit_observer_)) { + : allocator_(new BitrateAllocator(&limit_observer_, DataRate::Zero())) { allocator_->OnNetworkEstimateChanged( CreateTargetRateMessage(300000u, 0, 0, kDefaultProbingIntervalMs)); } @@ -307,7 +332,7 @@ class BitrateAllocatorTestNoEnforceMin : public ::testing::Test { uint32_t max_bitrate_bps, uint32_t pad_up_bitrate_bps, bool enforce_min_bitrate, - absl::string_view track_id, + absl::string_view /* track_id */, double bitrate_priority) { allocator_->AddObserver( observer, {min_bitrate_bps, max_bitrate_bps, pad_up_bitrate_bps, 0, @@ -1034,4 +1059,132 @@ TEST_F(BitrateAllocatorTest, PriorityRateThreeObserversTwoAllocatedToMax) { allocator_->RemoveObserver(&observer_high); } +TEST_F(BitrateAllocatorTest, ElasticRateAllocationCanBorrowUnsedRate) { + test::ExplicitKeyValueConfig field_trials( + "WebRTC-ElasticBitrateAllocation/upper_limit:200bps/"); + ReconfigureAllocator( + GetElasticRateAllocationFieldTrialParameter(field_trials)); + TestBitrateObserver observer_consume; + TestContributingBitrateObserver observer_contribute; + AddObserver(&observer_consume, 10, 100, 0, false, 1.0, + TrackRateElasticity::kCanConsumeExtraRate); + AddObserver(&observer_contribute, 10, 100, 0, false, 1.0, + TrackRateElasticity::kCanContributeUnusedRate); + + observer_contribute.rate_usage_ = DataRate::BitsPerSec(20); + allocator_->OnNetworkEstimateChanged( + CreateTargetRateMessage(100, 0, 0, kDefaultProbingIntervalMs)); + + // observer_contribute is allocated 50 but only used 20, so 30 is borrowed to + // observer_consume who gets 50+30=80. + EXPECT_EQ(80u, observer_consume.last_bitrate_bps_); + EXPECT_EQ(50u, observer_contribute.last_bitrate_bps_); + + allocator_->RemoveObserver(&observer_consume); + allocator_->RemoveObserver(&observer_contribute); +} + +TEST_F(BitrateAllocatorTest, ElasticRateAllocationDefaultsInactive) { + test::ExplicitKeyValueConfig field_trials(""); + ReconfigureAllocator( + GetElasticRateAllocationFieldTrialParameter(field_trials)); + TestBitrateObserver observer_consume; + TestContributingBitrateObserver observer_contribute; + AddObserver(&observer_consume, 10, 100, 0, false, 1.0, + TrackRateElasticity::kCanConsumeExtraRate); + AddObserver(&observer_contribute, 10, 100, 0, false, 1.0, + TrackRateElasticity::kCanContributeUnusedRate); + + observer_contribute.rate_usage_ = DataRate::BitsPerSec(20); + allocator_->OnNetworkEstimateChanged( + CreateTargetRateMessage(100, 0, 0, kDefaultProbingIntervalMs)); + + EXPECT_EQ(50u, observer_consume.last_bitrate_bps_); + EXPECT_EQ(50u, observer_contribute.last_bitrate_bps_); + + allocator_->RemoveObserver(&observer_consume); + allocator_->RemoveObserver(&observer_contribute); +} + +TEST_F(BitrateAllocatorTest, ElasticRateAllocationDontExceedMaxBitrate) { + test::ExplicitKeyValueConfig field_trials( + "WebRTC-ElasticBitrateAllocation/upper_limit:200bps/"); + ReconfigureAllocator( + GetElasticRateAllocationFieldTrialParameter(field_trials)); + TestBitrateObserver observer_consume; + TestContributingBitrateObserver observer_contribute; + AddObserver(&observer_consume, 10, 100, 0, false, 1.0, + TrackRateElasticity::kCanConsumeExtraRate); + AddObserver(&observer_contribute, 10, 100, 0, false, 1.0, + TrackRateElasticity::kCanContributeUnusedRate); + + observer_contribute.rate_usage_ = DataRate::BitsPerSec(20); + allocator_->OnNetworkEstimateChanged( + CreateTargetRateMessage(140, 0, 0, kDefaultProbingIntervalMs)); + + // observer_contribute is allocated 70 but only used 20, so 50 is borrowed to + // observer_consume who could get 70+50=120, but is capped by max-bitrate to + // 100. + EXPECT_EQ(100u, observer_consume.last_bitrate_bps_); + EXPECT_EQ(70u, observer_contribute.last_bitrate_bps_); + + allocator_->RemoveObserver(&observer_consume); + allocator_->RemoveObserver(&observer_contribute); +} + +TEST_F(BitrateAllocatorTest, ElasticRateAllocationStayWithinUpperLimit) { + uint32_t upper_limit = 70; + test::ExplicitKeyValueConfig field_trials( + "WebRTC-ElasticBitrateAllocation/upper_limit:" + + std::to_string(upper_limit) + "bps/"); + ReconfigureAllocator( + GetElasticRateAllocationFieldTrialParameter(field_trials)); + TestBitrateObserver observer_consume; + TestContributingBitrateObserver observer_contribute; + AddObserver(&observer_consume, 10, 100, 0, false, 1.0, + TrackRateElasticity::kCanConsumeExtraRate); + AddObserver(&observer_contribute, 10, 100, 0, false, 1.0, + TrackRateElasticity::kCanContributeUnusedRate); + + observer_contribute.rate_usage_ = DataRate::BitsPerSec(20); + allocator_->OnNetworkEstimateChanged( + CreateTargetRateMessage(100, 0, 0, kDefaultProbingIntervalMs)); + + // observer_contribute is allocated 50 but only used 20, so 30 is borrowed to + // observer_consume who could get 30+50=80, but is capped by upper_limit. + EXPECT_EQ(upper_limit, observer_consume.last_bitrate_bps_); + EXPECT_EQ(50u, observer_contribute.last_bitrate_bps_); + + allocator_->RemoveObserver(&observer_consume); + allocator_->RemoveObserver(&observer_contribute); +} + +TEST_F(BitrateAllocatorTest, ElasticRateAllocationDontReduceAllocation) { + uint32_t upper_limit = 70; + test::ExplicitKeyValueConfig field_trials( + "WebRTC-ElasticBitrateAllocation/upper_limit:" + + std::to_string(upper_limit) + "bps/"); + ReconfigureAllocator( + GetElasticRateAllocationFieldTrialParameter(field_trials)); + TestBitrateObserver observer_consume; + TestContributingBitrateObserver observer_contribute; + AddObserver(&observer_consume, 10, 100, 0, false, 1.0, + TrackRateElasticity::kCanConsumeExtraRate); + AddObserver(&observer_contribute, 10, 100, 0, false, 1.0, + TrackRateElasticity::kCanContributeUnusedRate); + + observer_contribute.rate_usage_ = DataRate::BitsPerSec(20); + allocator_->OnNetworkEstimateChanged( + CreateTargetRateMessage(200, 0, 0, kDefaultProbingIntervalMs)); + + // observer_contribute is allocated 100 but only used 20, so 80 can be + // borrowed to observer_consume. But observer_consume already has 100 + // (above upper_limit), so no bitrate is borrowed. + EXPECT_EQ(100u, observer_consume.last_bitrate_bps_); + EXPECT_EQ(100u, observer_contribute.last_bitrate_bps_); + + allocator_->RemoveObserver(&observer_consume); + allocator_->RemoveObserver(&observer_contribute); +} + } // namespace webrtc diff --git a/call/bitrate_estimator_tests.cc b/call/bitrate_estimator_tests.cc index f17a037ed2..28d401d309 100644 --- a/call/bitrate_estimator_tests.cc +++ b/call/bitrate_estimator_tests.cc @@ -11,12 +11,21 @@ #include #include #include +#include #include +#include +#include "absl/strings/match.h" #include "absl/strings/string_view.h" +#include "api/rtp_parameters.h" #include "api/test/create_frame_generator.h" +#include "api/test/simulated_network.h" +#include "api/test/video/function_video_decoder_factory.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/sdp_video_format.h" #include "call/call.h" -#include "call/simulated_network.h" +#include "call/video_receive_stream.h" +#include "call/video_send_stream.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" @@ -26,10 +35,10 @@ #include "test/call_test.h" #include "test/encoder_settings.h" #include "test/fake_decoder.h" -#include "test/fake_encoder.h" #include "test/frame_generator_capturer.h" #include "test/gtest.h" #include "test/video_test_constants.h" +#include "video/config/video_encoder_config.h" namespace webrtc { namespace { @@ -37,9 +46,9 @@ namespace { // writing tests that don't depend on the logging system. class LogObserver { public: - LogObserver() { rtc::LogMessage::AddLogToStream(&callback_, rtc::LS_INFO); } + LogObserver() { LogMessage::AddLogToStream(&callback_, LS_INFO); } - ~LogObserver() { rtc::LogMessage::RemoveLogToStream(&callback_); } + ~LogObserver() { LogMessage::RemoveLogToStream(&callback_); } void PushExpectedLogLine(absl::string_view expected_log_line) { callback_.PushExpectedLogLine(expected_log_line); @@ -48,7 +57,7 @@ class LogObserver { bool Wait() { return callback_.Wait(); } private: - class Callback : public rtc::LogSink { + class Callback : public LogSink { public: void OnLogMessage(const std::string& message) override { OnLogMessage(absl::string_view(message)); @@ -59,8 +68,8 @@ class LogObserver { // Ignore log lines that are due to missing AST extensions, these are // logged when we switch back from AST to TOF until the wrapping bitrate // estimator gives up on using AST. - if (message.find("BitrateEstimator") != absl::string_view::npos && - message.find("packet is missing") == absl::string_view::npos) { + if (absl::StrContains(message, "BitrateEstimator") && + !absl::StrContains(message, "packet is missing")) { received_log_lines_.push_back(std::string(message)); } @@ -95,7 +104,7 @@ class LogObserver { Mutex mutex_; Strings received_log_lines_ RTC_GUARDED_BY(mutex_); Strings expected_log_lines_ RTC_GUARDED_BY(mutex_); - rtc::Event done_; + Event done_; }; Callback callback_; @@ -178,13 +187,13 @@ class BitrateEstimatorTest : public test::CallTest { RTC_DCHECK_EQ(1, test_->GetVideoEncoderConfig()->number_of_streams); frame_generator_capturer_ = std::make_unique( - test->clock_, + &test->env().clock(), test::CreateSquareFrameGenerator( test::VideoTestConstants::kDefaultWidth, - test::VideoTestConstants::kDefaultHeight, absl::nullopt, - absl::nullopt), + test::VideoTestConstants::kDefaultHeight, std::nullopt, + std::nullopt), test::VideoTestConstants::kDefaultFramerate, - *test->task_queue_factory_); + test->env().task_queue_factory()); frame_generator_capturer_->Init(); frame_generator_capturer_->Start(); send_stream_->SetSource(frame_generator_capturer_.get(), diff --git a/call/call.cc b/call/call.cc index fa5d14d204..ace69e717a 100644 --- a/call/call.cc +++ b/call/call.cc @@ -17,49 +17,75 @@ #include #include #include +#include #include +#include #include #include #include "absl/functional/bind_front.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/adaptation/resource.h" +#include "api/environment/environment.h" +#include "api/fec_controller.h" +#include "api/field_trials_view.h" #include "api/media_types.h" +#include "api/rtc_error.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/bitrate_settings.h" #include "api/transport/network_control.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "audio/audio_receive_stream.h" #include "audio/audio_send_stream.h" #include "audio/audio_state.h" #include "call/adaptation/broadcast_resource_listener.h" #include "call/bitrate_allocator.h" +#include "call/call_config.h" +#include "call/flexfec_receive_stream.h" #include "call/flexfec_receive_stream_impl.h" #include "call/packet_receiver.h" +#include "call/payload_type.h" +#include "call/payload_type_picker.h" +#include "call/receive_stream.h" #include "call/receive_time_calculator.h" +#include "call/rtp_config.h" #include "call/rtp_stream_receiver_controller.h" -#include "call/rtp_transport_controller_send.h" #include "call/rtp_transport_controller_send_factory.h" #include "call/version.h" +#include "call/video_receive_stream.h" +#include "call/video_send_stream.h" #include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" #include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h" #include "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h" #include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" #include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" #include "logging/rtc_event_log/rtc_stream_config.h" +#include "media/base/codec.h" #include "modules/congestion_controller/include/receive_side_congestion_controller.h" #include "modules/rtp_rtcp/include/flexfec_receiver.h" -#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_util.h" #include "modules/video_coding/fec_controller_default.h" +#include "modules/video_coding/nack_requester.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" +#include "rtc_base/network/sent_packet.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_utils/repeating_task.h" +#include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -67,15 +93,37 @@ #include "system_wrappers/include/cpu_info.h" #include "system_wrappers/include/metrics.h" #include "video/call_stats2.h" +#include "video/config/video_encoder_config.h" +#include "video/decode_synchronizer.h" #include "video/send_delay_stats.h" #include "video/stats_counter.h" #include "video/video_receive_stream2.h" -#include "video/video_send_stream.h" +#include "video/video_send_stream_impl.h" namespace webrtc { namespace { +// In normal operation, the PTS comes from the PeerConnection. +// However, it is too much of a bother to insert it in all tests, +// so defaulting here. +class PayloadTypeSuggesterForTests : public PayloadTypeSuggester { + public: + PayloadTypeSuggesterForTests() = default; + RTCErrorOr SuggestPayloadType(const std::string& /* mid */, + Codec codec) override { + return payload_type_picker_.SuggestMapping(codec, nullptr); + } + RTCError AddLocalMapping(const std::string& /* mid */, + PayloadType /* payload_type */, + const Codec& /* codec */) override { + return RTCError::OK(); + } + + private: + PayloadTypePicker payload_type_picker_; +}; + const int* FindKeyByValue(const std::map& m, int v) { for (const auto& kv : m) { if (kv.second == v) @@ -129,7 +177,7 @@ std::unique_ptr CreateRtcLogStreamConfig( TaskQueueBase* GetCurrentTaskQueueOrThread() { TaskQueueBase* current = TaskQueueBase::Current(); if (!current) - current = rtc::ThreadManager::Instance()->CurrentThread(); + current = ThreadManager::Instance()->CurrentThread(); return current; } @@ -141,8 +189,8 @@ namespace internal { // and removing adapter resources to individual VideoSendStreams. class ResourceVideoSendStreamForwarder { public: - ResourceVideoSendStreamForwarder( - rtc::scoped_refptr resource) + explicit ResourceVideoSendStreamForwarder( + scoped_refptr resource) : broadcast_resource_listener_(resource) { broadcast_resource_listener_.StartListening(); } @@ -151,7 +199,7 @@ class ResourceVideoSendStreamForwarder { broadcast_resource_listener_.StopListening(); } - rtc::scoped_refptr Resource() const { + scoped_refptr Resource() const { return broadcast_resource_listener_.SourceResource(); } @@ -174,7 +222,7 @@ class ResourceVideoSendStreamForwarder { private: BroadcastResourceListener broadcast_resource_listener_; - std::map> + std::map> adapter_resources_; }; @@ -183,10 +231,8 @@ class Call final : public webrtc::Call, public TargetTransferRateObserver, public BitrateAllocator::LimitObserver { public: - Call(Clock* clock, - const Call::Config& config, - std::unique_ptr transport_send, - TaskQueueFactory* task_queue_factory); + Call(CallConfig config, + std::unique_ptr transport_send); ~Call() override; Call(const Call&) = delete; @@ -223,18 +269,25 @@ class Call final : public webrtc::Call, void DestroyFlexfecReceiveStream( FlexfecReceiveStream* receive_stream) override; - void AddAdaptationResource(rtc::scoped_refptr resource) override; + void AddAdaptationResource(scoped_refptr resource) override; RtpTransportControllerSendInterface* GetTransportControllerSend() override; + PayloadTypeSuggester* GetPayloadTypeSuggester() override; + void SetPayloadTypeSuggester(PayloadTypeSuggester* suggester) override; + Stats GetStats() const override; + void EnableSendCongestionControlFeedbackAccordingToRfc8888() override; + int FeedbackAccordingToRfc8888Count() override; + int FeedbackAccordingToTransportCcCount() override; + const FieldTrialsView& trials() const override; TaskQueueBase* network_thread() const override; TaskQueueBase* worker_thread() const override; - void DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) override; + void DeliverRtcpPacket(CopyOnWriteBuffer packet) override; void DeliverRtpPacket( MediaType media_type, @@ -256,7 +309,7 @@ class Call final : public webrtc::Call, void OnUpdateSyncGroup(webrtc::AudioReceiveStreamInterface& stream, absl::string_view sync_group) override; - void OnSentPacket(const rtc::SentPacket& sent_packet) override; + void OnSentPacket(const SentPacketInfo& sent_packet) override; // Implements TargetTransferRateObserver, void OnTargetTransferRate(TargetTransferRate msg) override; @@ -289,13 +342,13 @@ class Call final : public webrtc::Call, RTC_GUARDED_BY(sequence_checker_); RateCounter received_rtcp_bytes_per_second_counter_ RTC_GUARDED_BY(sequence_checker_); - absl::optional first_received_rtp_audio_timestamp_ + std::optional first_received_rtp_audio_timestamp_ RTC_GUARDED_BY(sequence_checker_); - absl::optional last_received_rtp_audio_timestamp_ + std::optional last_received_rtp_audio_timestamp_ RTC_GUARDED_BY(sequence_checker_); - absl::optional first_received_rtp_video_timestamp_ + std::optional first_received_rtp_video_timestamp_ RTC_GUARDED_BY(sequence_checker_); - absl::optional last_received_rtp_video_timestamp_ + std::optional last_received_rtp_video_timestamp_ RTC_GUARDED_BY(sequence_checker_); }; @@ -307,7 +360,7 @@ class Call final : public webrtc::Call, explicit SendStats(Clock* clock); ~SendStats(); - void SetFirstPacketTime(absl::optional first_sent_packet_time); + void SetFirstPacketTime(std::optional first_sent_packet_time); void PauseSendAndPacerBitrateCounters(); void AddTargetBitrateSample(uint32_t target_bitrate_bps); void SetMinAllocatableRate(BitrateAllocationLimits limits); @@ -321,11 +374,11 @@ class Call final : public webrtc::Call, AvgCounter pacer_bitrate_kbps_counter_ RTC_GUARDED_BY(sequence_checker_); uint32_t min_allocated_send_bitrate_bps_ RTC_GUARDED_BY(sequence_checker_){ 0}; - absl::optional first_sent_packet_time_ + std::optional first_sent_packet_time_ RTC_GUARDED_BY(destructor_sequence_checker_); }; - void DeliverRtcp(MediaType media_type, rtc::CopyOnWriteBuffer packet) + void DeliverRtcp(MediaType media_type, CopyOnWriteBuffer packet) RTC_RUN_ON(network_thread_); AudioReceiveStreamImpl* FindAudioStreamForSyncGroup( @@ -345,8 +398,7 @@ class Call final : public webrtc::Call, // callbacks have been registered. void EnsureStarted() RTC_RUN_ON(worker_thread_); - Clock* const clock_; - TaskQueueFactory* const task_queue_factory_; + const Environment env_; TaskQueueBase* const worker_thread_; TaskQueueBase* const network_thread_; const std::unique_ptr decode_sync_; @@ -355,9 +407,7 @@ class Call final : public webrtc::Call, const int num_cpu_cores_; const std::unique_ptr call_stats_; const std::unique_ptr bitrate_allocator_; - const Call::Config config_ RTC_GUARDED_BY(worker_thread_); - // Maps to config_.trials, can be used from any thread via `trials()`. - const FieldTrialsView& trials_; + const CallConfig config_ RTC_GUARDED_BY(worker_thread_); NetworkState audio_network_state_ RTC_GUARDED_BY(worker_thread_); NetworkState video_network_state_ RTC_GUARDED_BY(worker_thread_); @@ -388,19 +438,15 @@ class Call final : public webrtc::Call, RTC_NO_UNIQUE_ADDRESS SequenceChecker receive_11993_checker_; - // TODO(bugs.webrtc.org/11993): Move receive_rtp_config_ over to the - // network thread. - std::map receive_rtp_config_ - RTC_GUARDED_BY(&receive_11993_checker_); - // Audio and Video send streams are owned by the client that creates them. // TODO(bugs.webrtc.org/11993): `audio_send_ssrcs_` and `video_send_ssrcs_` // should be accessed on the network thread. std::map audio_send_ssrcs_ RTC_GUARDED_BY(worker_thread_); - std::map video_send_ssrcs_ + std::map video_send_ssrcs_ + RTC_GUARDED_BY(worker_thread_); + std::set video_send_streams_ RTC_GUARDED_BY(worker_thread_); - std::set video_send_streams_ RTC_GUARDED_BY(worker_thread_); // True if `video_send_streams_` is empty, false if not. The atomic variable // is used to decide UMA send statistics behavior and enables avoiding a // PostTask(). @@ -418,8 +464,6 @@ class Call final : public webrtc::Call, RtpPayloadStateMap suspended_video_payload_states_ RTC_GUARDED_BY(worker_thread_); - webrtc::RtcEventLog* const event_log_; - // TODO(bugs.webrtc.org/11993) ready to move stats access to the network // thread. ReceiveStats receive_stats_ RTC_GUARDED_BY(worker_thread_); @@ -431,6 +475,7 @@ class Call final : public webrtc::Call, ReceiveSideCongestionController receive_side_cc_; RepeatingTaskHandle receive_side_cc_periodic_task_; + RepeatingTaskHandle elastic_bandwidth_allocation_task_; const std::unique_ptr receive_time_calculator_; @@ -451,23 +496,27 @@ class Call final : public webrtc::Call, // https://bugs.chromium.org/p/chromium/issues/detail?id=992640 RtpTransportControllerSendInterface* const transport_send_ptr_ RTC_GUARDED_BY(send_transport_sequence_checker_); - // Declared last since it will issue callbacks from a task queue. Declaring it - // last ensures that it is destroyed first and any running tasks are finished. - const std::unique_ptr transport_send_; bool is_started_ RTC_GUARDED_BY(worker_thread_) = false; + // Mechanism for proposing payload types in RTP mappings. + PayloadTypeSuggester* pt_suggester_ = nullptr; + std::unique_ptr owned_pt_suggester_; + // Sequence checker for outgoing network traffic. Could be the network thread. - // Could also be a pacer owned thread or TQ such as the TaskQueuePacedSender. + // Could also be a pacer owned thread or TQ such as the TaskQueueSender. RTC_NO_UNIQUE_ADDRESS SequenceChecker sent_packet_sequence_checker_; - absl::optional last_sent_packet_ + std::optional last_sent_packet_ RTC_GUARDED_BY(sent_packet_sequence_checker_); + // Declared last since it will issue callbacks from a task queue. Declaring it + // last ensures that it is destroyed first and any running tasks are finished. + const std::unique_ptr transport_send_; }; } // namespace internal std::string Call::Stats::ToString(int64_t time_ms) const { char buf[1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "Call stats: " << time_ms << ", {"; ss << "send_bw_bps: " << send_bandwidth_bps << ", "; ss << "recv_bw_bps: " << recv_bandwidth_bps << ", "; @@ -478,29 +527,27 @@ std::string Call::Stats::ToString(int64_t time_ms) const { return ss.str(); } -Call* Call::Create(const Call::Config& config) { - Clock* clock = Clock::GetRealTimeClock(); - return Create(config, clock, - RtpTransportControllerSendFactory().Create( - config.ExtractTransportConfig(), clock)); -} +std::unique_ptr Call::Create(CallConfig config) { + std::unique_ptr transport_send; + if (config.rtp_transport_controller_send_factory != nullptr) { + transport_send = config.rtp_transport_controller_send_factory->Create( + config.ExtractTransportConfig()); + } else { + transport_send = RtpTransportControllerSendFactory().Create( + config.ExtractTransportConfig()); + } -Call* Call::Create(const Call::Config& config, - Clock* clock, - std::unique_ptr - transportControllerSend) { - RTC_DCHECK(config.task_queue_factory); - return new internal::Call(clock, config, std::move(transportControllerSend), - config.task_queue_factory); + return std::make_unique(std::move(config), + std::move(transport_send)); } // This method here to avoid subclasses has to implement this method. // Call perf test will use Internal::Call::CreateVideoSendStream() to inject // FecController. VideoSendStream* Call::CreateVideoSendStream( - VideoSendStream::Config config, - VideoEncoderConfig encoder_config, - std::unique_ptr fec_controller) { + VideoSendStream::Config /* config */, + VideoEncoderConfig /* encoder_config */, + std::unique_ptr /* fec_controller */) { return nullptr; } @@ -631,7 +678,7 @@ Call::SendStats::~SendStats() { } void Call::SendStats::SetFirstPacketTime( - absl::optional first_sent_packet_time) { + std::optional first_sent_packet_time) { RTC_DCHECK_RUN_ON(&destructor_sequence_checker_); first_sent_packet_time_ = first_sent_packet_time; } @@ -657,47 +704,42 @@ void Call::SendStats::SetMinAllocatableRate(BitrateAllocationLimits limits) { min_allocated_send_bitrate_bps_ = limits.min_allocatable_rate.bps(); } -Call::Call(Clock* clock, - const Call::Config& config, - std::unique_ptr transport_send, - TaskQueueFactory* task_queue_factory) - : clock_(clock), - task_queue_factory_(task_queue_factory), +Call::Call(CallConfig config, + std::unique_ptr transport_send) + : env_(config.env), worker_thread_(GetCurrentTaskQueueOrThread()), // If `network_task_queue_` was set to nullptr, network related calls // must be made on `worker_thread_` (i.e. they're one and the same). network_thread_(config.network_task_queue_ ? config.network_task_queue_ : worker_thread_), - decode_sync_(config.metronome - ? std::make_unique(clock_, - config.metronome, - worker_thread_) - : nullptr), + decode_sync_( + config.decode_metronome + ? std::make_unique(&env_.clock(), + config.decode_metronome, + worker_thread_) + : nullptr), num_cpu_cores_(CpuInfo::DetectNumberOfCores()), - call_stats_(new CallStats(clock_, worker_thread_)), - bitrate_allocator_(new BitrateAllocator(this)), - config_(config), - trials_(*config.trials), + call_stats_(new CallStats(&env_.clock(), worker_thread_)), + bitrate_allocator_(new BitrateAllocator( + this, + GetElasticRateAllocationFieldTrialParameter(env_.field_trials()))), + config_(std::move(config)), audio_network_state_(kNetworkDown), video_network_state_(kNetworkDown), aggregate_network_up_(false), - event_log_(config.event_log), - receive_stats_(clock_), - send_stats_(clock_), - receive_side_cc_(clock, + receive_stats_(&env_.clock()), + send_stats_(&env_.clock()), + receive_side_cc_(env_, absl::bind_front(&PacketRouter::SendCombinedRtcpPacket, transport_send->packet_router()), absl::bind_front(&PacketRouter::SendRemb, - transport_send->packet_router()), - /*network_state_estimator=*/nullptr), + transport_send->packet_router())), receive_time_calculator_( - ReceiveTimeCalculator::CreateFromFieldTrial(*config.trials)), - video_send_delay_stats_(new SendDelayStats(clock_)), - start_of_call_(clock_->CurrentTime()), + ReceiveTimeCalculator::CreateFromFieldTrial(env_.field_trials())), + video_send_delay_stats_(new SendDelayStats(&env_.clock())), + start_of_call_(env_.clock().CurrentTime()), transport_send_ptr_(transport_send.get()), transport_send_(std::move(transport_send)) { - RTC_DCHECK(config.event_log != nullptr); - RTC_DCHECK(config.trials != nullptr); RTC_DCHECK(network_thread_); RTC_DCHECK(worker_thread_->IsCurrent()); @@ -715,7 +757,23 @@ Call::Call(Clock* clock, receive_side_cc_periodic_task_ = RepeatingTaskHandle::Start( worker_thread_, [receive_side_cc] { return receive_side_cc->MaybeProcess(); }, - TaskQueueBase::DelayPrecision::kLow, clock_); + TaskQueueBase::DelayPrecision::kLow, &env_.clock()); + + // TODO(b/350555527): Remove after experiment + if (GetElasticRateAllocationFieldTrialParameter(env_.field_trials()) != + DataRate::Zero()) { + elastic_bandwidth_allocation_task_ = RepeatingTaskHandle::Start( + worker_thread_, + [this] { + TimeDelta next_schedule_interval = TimeDelta::Millis(25); + if (bitrate_allocator_) { + if (!bitrate_allocator_->RecomputeAllocationIfNeeded()) + next_schedule_interval = TimeDelta::Millis(300); + } + return next_schedule_interval; + }, + TaskQueueBase::DelayPrecision::kLow, &env_.clock()); + } } Call::~Call() { @@ -728,12 +786,13 @@ Call::~Call() { RTC_CHECK(video_receive_streams_.empty()); receive_side_cc_periodic_task_.Stop(); + elastic_bandwidth_allocation_task_.Stop(); call_stats_->DeregisterStatsObserver(&receive_side_cc_); send_stats_.SetFirstPacketTime(transport_send_->GetFirstPacketTime()); RTC_HISTOGRAM_COUNTS_100000( "WebRTC.Call.LifetimeInSeconds", - (clock_->CurrentTime() - start_of_call_).seconds()); + (env_.clock().CurrentTime() - start_of_call_).seconds()); } void Call::EnsureStarted() { @@ -769,7 +828,7 @@ webrtc::AudioSendStream* Call::CreateAudioSendStream( // Stream config is logged in AudioSendStream::ConfigureStream, as it may // change during the stream's lifetime. - absl::optional suspended_rtp_state; + std::optional suspended_rtp_state; { const auto& iter = suspended_audio_send_ssrcs_.find(config.rtp.ssrc); if (iter != suspended_audio_send_ssrcs_.end()) { @@ -777,22 +836,14 @@ webrtc::AudioSendStream* Call::CreateAudioSendStream( } } - AudioSendStream* send_stream = new AudioSendStream( - clock_, config, config_.audio_state, task_queue_factory_, - transport_send_.get(), bitrate_allocator_.get(), event_log_, - call_stats_->AsRtcpRttStats(), suspended_rtp_state, trials()); + AudioSendStream* send_stream = + new AudioSendStream(env_, config, config_.audio_state, + transport_send_.get(), bitrate_allocator_.get(), + call_stats_->AsRtcpRttStats(), suspended_rtp_state); RTC_DCHECK(audio_send_ssrcs_.find(config.rtp.ssrc) == audio_send_ssrcs_.end()); audio_send_ssrcs_[config.rtp.ssrc] = send_stream; - // TODO(bugs.webrtc.org/11993): call AssociateSendStream and - // UpdateAggregateNetworkState asynchronously on the network thread. - for (AudioReceiveStreamImpl* stream : audio_receive_streams_) { - if (stream->local_ssrc() == config.rtp.ssrc) { - stream->AssociateSendStream(send_stream); - } - } - UpdateAggregateNetworkState(); return send_stream; @@ -813,14 +864,6 @@ void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { size_t num_deleted = audio_send_ssrcs_.erase(ssrc); RTC_DCHECK_EQ(1, num_deleted); - // TODO(bugs.webrtc.org/11993): call AssociateSendStream and - // UpdateAggregateNetworkState asynchronously on the network thread. - for (AudioReceiveStreamImpl* stream : audio_receive_streams_) { - if (stream->local_ssrc() == ssrc) { - stream->AssociateSendStream(nullptr); - } - } - UpdateAggregateNetworkState(); delete send_stream; @@ -831,12 +874,12 @@ webrtc::AudioReceiveStreamInterface* Call::CreateAudioReceiveStream( TRACE_EVENT0("webrtc", "Call::CreateAudioReceiveStream"); RTC_DCHECK_RUN_ON(worker_thread_); EnsureStarted(); - event_log_->Log(std::make_unique( + env_.event_log().Log(std::make_unique( CreateRtcLogStreamConfig(config))); AudioReceiveStreamImpl* receive_stream = new AudioReceiveStreamImpl( - clock_, transport_send_->packet_router(), config_.neteq_factory, config, - config_.audio_state, event_log_); + env_, transport_send_->packet_router(), config_.neteq_factory, config, + config_.audio_state); audio_receive_streams_.insert(receive_stream); // TODO(bugs.webrtc.org/11993): Make the registration on the network thread @@ -844,18 +887,8 @@ webrtc::AudioReceiveStreamInterface* Call::CreateAudioReceiveStream( // to live on the network thread. receive_stream->RegisterWithTransport(&audio_receiver_controller_); - // TODO(bugs.webrtc.org/11993): Update the below on the network thread. - // We could possibly set up the audio_receiver_controller_ association up - // as part of the async setup. - RegisterReceiveStream(config.rtp.remote_ssrc, receive_stream); - ConfigureSync(config.sync_group); - auto it = audio_send_ssrcs_.find(config.rtp.local_ssrc); - if (it != audio_send_ssrcs_.end()) { - receive_stream->AssociateSendStream(it->second); - } - UpdateAggregateNetworkState(); return receive_stream; } @@ -883,8 +916,6 @@ void Call::DestroyAudioReceiveStream( // for this sync_group. ConfigureSync(audio_receive_stream->sync_group()); - UnregisterReceiveStream(ssrc); - UpdateAggregateNetworkState(); // TODO(bugs.webrtc.org/11993): Consider if deleting `audio_receive_stream` // on the network thread would be better or if we'd need to tear down the @@ -905,7 +936,7 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream( video_send_delay_stats_->AddSsrcs(config); for (size_t ssrc_index = 0; ssrc_index < config.rtp.ssrcs.size(); ++ssrc_index) { - event_log_->Log(std::make_unique( + env_.event_log().Log(std::make_unique( CreateRtcLogStreamConfig(config, ssrc_index))); } @@ -914,13 +945,12 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream( // Copy ssrcs from `config` since `config` is moved. std::vector ssrcs = config.rtp.ssrcs; - VideoSendStream* send_stream = new VideoSendStream( - clock_, num_cpu_cores_, task_queue_factory_, network_thread_, - call_stats_->AsRtcpRttStats(), transport_send_.get(), - bitrate_allocator_.get(), video_send_delay_stats_.get(), event_log_, - std::move(config), std::move(encoder_config), suspended_video_send_ssrcs_, - suspended_video_payload_states_, std::move(fec_controller), - *config_.trials); + VideoSendStreamImpl* send_stream = new VideoSendStreamImpl( + env_, num_cpu_cores_, call_stats_->AsRtcpRttStats(), + transport_send_.get(), config_.encode_metronome, bitrate_allocator_.get(), + video_send_delay_stats_.get(), std::move(config), + std::move(encoder_config), suspended_video_send_ssrcs_, + suspended_video_payload_states_, std::move(fec_controller)); for (uint32_t ssrc : ssrcs) { RTC_DCHECK(video_send_ssrcs_.find(ssrc) == video_send_ssrcs_.end()); @@ -948,8 +978,8 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream( } std::unique_ptr fec_controller = config_.fec_controller_factory - ? config_.fec_controller_factory->CreateFecController() - : std::make_unique(clock_); + ? config_.fec_controller_factory->CreateFecController(env_) + : std::make_unique(env_); return CreateVideoSendStream(std::move(config), std::move(encoder_config), std::move(fec_controller)); } @@ -959,12 +989,12 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { RTC_DCHECK(send_stream != nullptr); RTC_DCHECK_RUN_ON(worker_thread_); - VideoSendStream* send_stream_impl = - static_cast(send_stream); + VideoSendStreamImpl* send_stream_impl = + static_cast(send_stream); auto it = video_send_ssrcs_.begin(); while (it != video_send_ssrcs_.end()) { - if (it->second == static_cast(send_stream)) { + if (it->second == static_cast(send_stream)) { send_stream_impl = it->second; video_send_ssrcs_.erase(it++); } else { @@ -980,8 +1010,8 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { if (video_send_streams_.empty()) video_send_streams_empty_.store(true, std::memory_order_relaxed); - VideoSendStream::RtpStateMap rtp_states; - VideoSendStream::RtpPayloadStateMap rtp_payload_states; + VideoSendStreamImpl::RtpStateMap rtp_states; + VideoSendStreamImpl::RtpPayloadStateMap rtp_payload_states; send_stream_impl->StopPermanentlyAndGetRtpStates(&rtp_states, &rtp_payload_states); for (const auto& kv : rtp_states) { @@ -1004,7 +1034,7 @@ webrtc::VideoReceiveStreamInterface* Call::CreateVideoReceiveStream( EnsureStarted(); - event_log_->Log(std::make_unique( + env_.event_log().Log(std::make_unique( CreateRtcLogStreamConfig(configuration))); // TODO(bugs.webrtc.org/11993): Move the registration between `receive_stream` @@ -1014,22 +1044,13 @@ webrtc::VideoReceiveStreamInterface* Call::CreateVideoReceiveStream( // TODO(crbug.com/1381982): Re-enable decode synchronizer once the Chromium // API has adapted to the new Metronome interface. VideoReceiveStream2* receive_stream = new VideoReceiveStream2( - task_queue_factory_, this, num_cpu_cores_, - transport_send_->packet_router(), std::move(configuration), - call_stats_.get(), clock_, std::make_unique(clock_, trials()), - &nack_periodic_processor_, decode_sync_.get(), event_log_); + env_, this, num_cpu_cores_, transport_send_->packet_router(), + std::move(configuration), call_stats_.get(), + std::make_unique(&env_.clock(), trials()), + &nack_periodic_processor_, decode_sync_.get()); // TODO(bugs.webrtc.org/11993): Set this up asynchronously on the network // thread. receive_stream->RegisterWithTransport(&video_receiver_controller_); - - if (receive_stream->rtx_ssrc()) { - // We record identical config for the rtx stream as for the main - // stream. Since the transport_send_cc negotiation is per payload - // type, we may get an incorrect value for the rtx stream, but - // that is unlikely to matter in practice. - RegisterReceiveStream(receive_stream->rtx_ssrc(), receive_stream); - } - RegisterReceiveStream(receive_stream->remote_ssrc(), receive_stream); video_receive_streams_.insert(receive_stream); ConfigureSync(receive_stream->sync_group()); @@ -1048,14 +1069,6 @@ void Call::DestroyVideoReceiveStream( static_cast(receive_stream); // TODO(bugs.webrtc.org/11993): Unregister on the network thread. receive_stream_impl->UnregisterFromTransport(); - - // Remove all ssrcs pointing to a receive stream. As RTX retransmits on a - // separate SSRC there can be either one or two. - UnregisterReceiveStream(receive_stream_impl->remote_ssrc()); - - if (receive_stream_impl->rtx_ssrc()) { - UnregisterReceiveStream(receive_stream_impl->rtx_ssrc()); - } video_receive_streams_.erase(receive_stream_impl); ConfigureSync(receive_stream_impl->sync_group()); @@ -1077,14 +1090,12 @@ FlexfecReceiveStream* Call::CreateFlexfecReceiveStream( // OnRtpPacket until the constructor is finished and the object is // in a valid state, since OnRtpPacket runs on the same thread. FlexfecReceiveStreamImpl* receive_stream = new FlexfecReceiveStreamImpl( - clock_, std::move(config), &video_receiver_controller_, + env_, std::move(config), &video_receiver_controller_, call_stats_->AsRtcpRttStats()); // TODO(bugs.webrtc.org/11993): Set this up asynchronously on the network // thread. receive_stream->RegisterWithTransport(&video_receiver_controller_); - RegisterReceiveStream(receive_stream->remote_ssrc(), receive_stream); - // TODO(brandtr): Store config in RtcEventLog here. return receive_stream; @@ -1100,8 +1111,6 @@ void Call::DestroyFlexfecReceiveStream(FlexfecReceiveStream* receive_stream) { receive_stream_impl->UnregisterFromTransport(); auto ssrc = receive_stream_impl->remote_ssrc(); - UnregisterReceiveStream(ssrc); - // Remove all SSRCs pointing to the FlexfecReceiveStreamImpl to be // destroyed. receive_side_cc_.RemoveStream(ssrc); @@ -1109,7 +1118,7 @@ void Call::DestroyFlexfecReceiveStream(FlexfecReceiveStream* receive_stream) { delete receive_stream_impl; } -void Call::AddAdaptationResource(rtc::scoped_refptr resource) { +void Call::AddAdaptationResource(scoped_refptr resource) { RTC_DCHECK_RUN_ON(worker_thread_); adaptation_resource_forwarders_.push_back( std::make_unique(resource)); @@ -1123,6 +1132,24 @@ RtpTransportControllerSendInterface* Call::GetTransportControllerSend() { return transport_send_.get(); } +PayloadTypeSuggester* Call::GetPayloadTypeSuggester() { + // TODO: https://issues.webrtc.org/360058654 - make mandatory at + // initialization. Currently, only some channels use it. + RTC_DCHECK_RUN_ON(worker_thread_); + if (!pt_suggester_) { + // Make something that will work most of the time for testing. + owned_pt_suggester_ = std::make_unique(); + SetPayloadTypeSuggester(owned_pt_suggester_.get()); + } + return pt_suggester_; +} + +void Call::SetPayloadTypeSuggester(PayloadTypeSuggester* suggester) { + RTC_CHECK(!pt_suggester_) + << "SetPayloadTypeSuggester can be called only once"; + pt_suggester_ = suggester; +} + Call::Stats Call::GetStats() const { RTC_DCHECK_RUN_ON(worker_thread_); @@ -1144,8 +1171,21 @@ Call::Stats Call::GetStats() const { return stats; } +void Call::EnableSendCongestionControlFeedbackAccordingToRfc8888() { + receive_side_cc_.EnableSendCongestionControlFeedbackAccordingToRfc8888(); + transport_send_->EnableCongestionControlFeedbackAccordingToRfc8888(); +} + +int Call::FeedbackAccordingToRfc8888Count() { + return transport_send_->ReceivedCongestionControlFeedbackCount(); +} + +int Call::FeedbackAccordingToTransportCcCount() { + return transport_send_->ReceivedTransportCcFeedbackCount(); +} + const FieldTrialsView& Call::trials() const { - return trials_; + return env_.field_trials(); } TaskQueueBase* Call::network_thread() const { @@ -1233,13 +1273,7 @@ void Call::UpdateAggregateNetworkState() { void Call::OnLocalSsrcUpdated(webrtc::AudioReceiveStreamInterface& stream, uint32_t local_ssrc) { RTC_DCHECK_RUN_ON(worker_thread_); - webrtc::AudioReceiveStreamImpl& receive_stream = - static_cast(stream); - - receive_stream.SetLocalSsrc(local_ssrc); - auto it = audio_send_ssrcs_.find(local_ssrc); - receive_stream.AssociateSendStream(it != audio_send_ssrcs_.end() ? it->second - : nullptr); + static_cast(stream).SetLocalSsrc(local_ssrc); } void Call::OnLocalSsrcUpdated(VideoReceiveStreamInterface& stream, @@ -1263,7 +1297,7 @@ void Call::OnUpdateSyncGroup(webrtc::AudioReceiveStreamInterface& stream, ConfigureSync(sync_group); } -void Call::OnSentPacket(const rtc::SentPacket& sent_packet) { +void Call::OnSentPacket(const SentPacketInfo& sent_packet) { RTC_DCHECK_RUN_ON(&sent_packet_sequence_checker_); // When bundling is in effect, multiple senders may be sharing the same // transport. It means every |sent_packet| will be multiply notified from @@ -1285,7 +1319,7 @@ void Call::OnSentPacket(const rtc::SentPacket& sent_packet) { // on a ProcessThread. This is alright as is since we forward the call to // implementations that either just do a PostTask or use locking. video_send_delay_stats_->OnSentPacket(sent_packet.packet_id, - clock_->CurrentTime()); + env_.clock().CurrentTime()); transport_send_->OnSentPacket(sent_packet); } @@ -1365,7 +1399,7 @@ void Call::ConfigureSync(absl::string_view sync_group) { } } -void Call::DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) { +void Call::DeliverRtcpPacket(CopyOnWriteBuffer packet) { RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(IsRtcpPacket(packet)); TRACE_EVENT0("webrtc", "Call::DeliverRtcp"); @@ -1382,7 +1416,7 @@ void Call::DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) { rtcp_delivered = true; } - for (VideoSendStream* stream : video_send_streams_) { + for (VideoSendStreamImpl* stream : video_send_streams_) { stream->DeliverRtcp(packet.cdata(), packet.size()); rtcp_delivered = true; } @@ -1393,7 +1427,7 @@ void Call::DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) { } if (rtcp_delivered) { - event_log_->Log(std::make_unique(packet)); + env_.event_log().Log(std::make_unique(packet)); } } @@ -1409,17 +1443,22 @@ void Call::DeliverRtpPacket( // Repair packet_time_us for clock resets by comparing a new read of // the same clock (TimeUTCMicros) to a monotonic clock reading. packet_time_us = receive_time_calculator_->ReconcileReceiveTimes( - packet_time_us, rtc::TimeUTCMicros(), clock_->TimeInMicroseconds()); + packet_time_us, TimeUTCMicros(), env_.clock().TimeInMicroseconds()); packet.set_arrival_time(Timestamp::Micros(packet_time_us)); } NotifyBweOfReceivedPacket(packet, media_type); - event_log_->Log(std::make_unique(packet)); + env_.event_log().Log(std::make_unique(packet)); if (media_type != MediaType::AUDIO && media_type != MediaType::VIDEO) { return; } + const TimeDelta nw_to_deliver_delay = + env_.clock().CurrentTime() - packet.arrival_time(); + RTC_HISTOGRAM_COUNTS_100000("WebRTC.TimeFromNetworkToDeliverRtpPacketUs", + nw_to_deliver_delay.us()); + RtpStreamReceiverController& receiver_controller = media_type == MediaType::AUDIO ? audio_receiver_controller_ : video_receiver_controller_; @@ -1465,26 +1504,6 @@ void Call::NotifyBweOfReceivedPacket(const RtpPacketReceived& packet, receive_side_cc_.OnReceivedPacket(packet, media_type); } -bool Call::RegisterReceiveStream(uint32_t ssrc, - ReceiveStreamInterface* stream) { - RTC_DCHECK_RUN_ON(&receive_11993_checker_); - RTC_DCHECK(stream); - auto inserted = receive_rtp_config_.emplace(ssrc, stream); - if (!inserted.second) { - RTC_DLOG(LS_WARNING) << "ssrc already registered: " << ssrc; - } - return inserted.second; -} - -bool Call::UnregisterReceiveStream(uint32_t ssrc) { - RTC_DCHECK_RUN_ON(&receive_11993_checker_); - size_t erased = receive_rtp_config_.erase(ssrc); - if (!erased) { - RTC_DLOG(LS_WARNING) << "ssrc wasn't registered: " << ssrc; - } - return erased != 0u; -} - } // namespace internal } // namespace webrtc diff --git a/call/call.h b/call/call.h index 366978392e..87e1884583 100644 --- a/call/call.h +++ b/call/call.h @@ -10,27 +10,31 @@ #ifndef CALL_CALL_H_ #define CALL_CALL_H_ -#include +#include #include #include -#include #include "absl/strings/string_view.h" #include "api/adaptation/resource.h" +#include "api/fec_controller.h" +#include "api/field_trials_view.h" #include "api/media_types.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" #include "api/task_queue/task_queue_base.h" +#include "api/transport/bitrate_settings.h" #include "call/audio_receive_stream.h" #include "call/audio_send_stream.h" #include "call/call_config.h" #include "call/flexfec_receive_stream.h" #include "call/packet_receiver.h" +#include "call/payload_type.h" #include "call/rtp_transport_controller_send_interface.h" #include "call/video_receive_stream.h" #include "call/video_send_stream.h" -#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/checks.h" #include "rtc_base/network/sent_packet.h" -#include "rtc_base/network_route.h" -#include "rtc_base/ref_count.h" +#include "video/config/video_encoder_config.h" namespace webrtc { @@ -46,8 +50,6 @@ namespace webrtc { class Call { public: - using Config = CallConfig; - struct Stats { std::string ToString(int64_t time_ms) const; @@ -58,11 +60,7 @@ class Call { int64_t rtt_ms = -1; }; - static Call* Create(const Call::Config& config); - static Call* Create(const Call::Config& config, - Clock* clock, - std::unique_ptr - transportControllerSend); + static std::unique_ptr Create(CallConfig config); virtual AudioSendStream* CreateAudioSendStream( const AudioSendStream::Config& config) = 0; @@ -99,7 +97,7 @@ class Call { // When a resource is overused, the Call will try to reduce the load on the // sysem, for example by reducing the resolution or frame rate of encoded // streams. - virtual void AddAdaptationResource(rtc::scoped_refptr resource) = 0; + virtual void AddAdaptationResource(scoped_refptr resource) = 0; // All received RTP and RTCP packets for the call should be inserted to this // PacketReceiver. The PacketReceiver pointer is valid as long as the @@ -113,6 +111,18 @@ class Call { // remove this method interface. virtual RtpTransportControllerSendInterface* GetTransportControllerSend() = 0; + // A class that keeps track of payload types on the transport(s), and + // suggests new ones when needed. + virtual PayloadTypeSuggester* GetPayloadTypeSuggester() { + // TODO: https://issues.webrtc.org/360058654 - make pure virtual + RTC_CHECK_NOTREACHED(); + return nullptr; + } + virtual void SetPayloadTypeSuggester(PayloadTypeSuggester* /* suggester */) { + // TODO: https://issues.webrtc.org/360058654 - make pure virtual + RTC_CHECK_NOTREACHED(); + } + // Returns the call statistics, such as estimated send and receive bandwidth, // pacing delay, etc. virtual Stats GetStats() const = 0; @@ -138,11 +148,15 @@ class Call { virtual void OnUpdateSyncGroup(AudioReceiveStreamInterface& stream, absl::string_view sync_group) = 0; - virtual void OnSentPacket(const rtc::SentPacket& sent_packet) = 0; + virtual void OnSentPacket(const SentPacketInfo& sent_packet) = 0; virtual void SetClientBitratePreferences( const BitrateSettings& preferences) = 0; + virtual void EnableSendCongestionControlFeedbackAccordingToRfc8888() = 0; + virtual int FeedbackAccordingToRfc8888Count() = 0; + virtual int FeedbackAccordingToTransportCcCount() = 0; + virtual const FieldTrialsView& trials() const = 0; virtual TaskQueueBase* network_thread() const = 0; diff --git a/call/call_config.cc b/call/call_config.cc index 93f6b1aec4..34615ecee2 100644 --- a/call/call_config.cc +++ b/call/call_config.cc @@ -10,30 +10,28 @@ #include "call/call_config.h" -#include "rtc_base/checks.h" +#include "api/environment/environment.h" +#include "api/task_queue/task_queue_base.h" +#include "call/rtp_transport_config.h" namespace webrtc { -CallConfig::CallConfig(RtcEventLog* event_log, - TaskQueueBase* network_task_queue /* = nullptr*/) - : event_log(event_log), network_task_queue_(network_task_queue) { - RTC_DCHECK(event_log); -} - -CallConfig::CallConfig(const CallConfig& config) = default; +CallConfig::CallConfig(const Environment& env, + TaskQueueBase* network_task_queue) + : env(env), network_task_queue_(network_task_queue) {} RtpTransportConfig CallConfig::ExtractTransportConfig() const { - RtpTransportConfig transportConfig; - transportConfig.bitrate_config = bitrate_config; - transportConfig.event_log = event_log; - transportConfig.network_controller_factory = network_controller_factory; - transportConfig.network_state_predictor_factory = + RtpTransportConfig transport_config = {.env = env}; + transport_config.bitrate_config = bitrate_config; + transport_config.network_controller_factory = + per_call_network_controller_factory + ? per_call_network_controller_factory.get() + : network_controller_factory; + transport_config.network_state_predictor_factory = network_state_predictor_factory; - transportConfig.task_queue_factory = task_queue_factory; - transportConfig.trials = trials; - transportConfig.pacer_burst_interval = pacer_burst_interval; + transport_config.pacer_burst_interval = pacer_burst_interval; - return transportConfig; + return transport_config; } CallConfig::~CallConfig() = default; diff --git a/call/call_config.h b/call/call_config.h index 918c077435..819297f067 100644 --- a/call/call_config.h +++ b/call/call_config.h @@ -10,15 +10,19 @@ #ifndef CALL_CALL_CONFIG_H_ #define CALL_CALL_CONFIG_H_ +#include +#include + +#include "api/environment/environment.h" #include "api/fec_controller.h" -#include "api/field_trials_view.h" #include "api/metronome/metronome.h" #include "api/neteq/neteq_factory.h" #include "api/network_state_predictor.h" -#include "api/rtc_error.h" -#include "api/task_queue/task_queue_factory.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" #include "api/transport/bitrate_settings.h" #include "api/transport/network_control.h" +#include "api/units/time_delta.h" #include "call/audio_state.h" #include "call/rtp_transport_config.h" #include "call/rtp_transport_controller_send_factory_interface.h" @@ -26,61 +30,62 @@ namespace webrtc { class AudioProcessing; -class RtcEventLog; struct CallConfig { // If `network_task_queue` is set to nullptr, Call will assume that network // related callbacks will be made on the same TQ as the Call instance was // constructed on. - explicit CallConfig(RtcEventLog* event_log, + explicit CallConfig(const Environment& env, TaskQueueBase* network_task_queue = nullptr); - CallConfig(const CallConfig&); - RtpTransportConfig ExtractTransportConfig() const; + + // Move-only. + CallConfig(CallConfig&&) = default; + CallConfig& operator=(CallConfig&& other) = default; + ~CallConfig(); + RtpTransportConfig ExtractTransportConfig() const; + + Environment env; + // Bitrate config used until valid bitrate estimates are calculated. Also // used to cap total bitrate used. This comes from the remote connection. BitrateConstraints bitrate_config; // AudioState which is possibly shared between multiple calls. - rtc::scoped_refptr audio_state; + scoped_refptr audio_state; // Audio Processing Module to be used in this call. AudioProcessing* audio_processing = nullptr; - // RtcEventLog to use for this call. Required. - // Use webrtc::RtcEventLog::CreateNull() for a null implementation. - RtcEventLog* const event_log = nullptr; - // FecController to use for this call. FecControllerFactoryInterface* fec_controller_factory = nullptr; - // Task Queue Factory to be used in this call. Required. - TaskQueueFactory* task_queue_factory = nullptr; - // NetworkStatePredictor to use for this call. NetworkStatePredictorFactoryInterface* network_state_predictor_factory = nullptr; - // Network controller factory to use for this call. + // Call-specific Network controller factory to use. If this is set, it + // takes precedence over network_controller_factory. + std::unique_ptr + per_call_network_controller_factory; + // Network controller factory to use for this call if + // per_call_network_controller_factory is null. NetworkControllerFactoryInterface* network_controller_factory = nullptr; // NetEq factory to use for this call. NetEqFactory* neteq_factory = nullptr; - // Key-value mapping of internal configurations to apply, - // e.g. field trials. - const FieldTrialsView* trials = nullptr; - - TaskQueueBase* const network_task_queue_ = nullptr; + TaskQueueBase* network_task_queue_ = nullptr; // RtpTransportControllerSend to use for this call. RtpTransportControllerSendFactoryInterface* rtp_transport_controller_send_factory = nullptr; - Metronome* metronome = nullptr; + Metronome* decode_metronome = nullptr; + Metronome* encode_metronome = nullptr; // The burst interval of the pacer, see TaskQueuePacedSender constructor. - absl::optional pacer_burst_interval; + std::optional pacer_burst_interval; // Enables send packet batching from the egress RTP sender. bool enable_send_packet_batching = false; diff --git a/call/call_factory.cc b/call/call_factory.cc deleted file mode 100644 index 380e80ce12..0000000000 --- a/call/call_factory.cc +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "call/call_factory.h" - -#include - -#include -#include -#include -#include - -#include "absl/memory/memory.h" -#include "absl/types/optional.h" -#include "api/test/simulated_network.h" -#include "api/units/time_delta.h" -#include "call/call.h" -#include "call/degraded_call.h" -#include "call/rtp_transport_config.h" -#include "rtc_base/checks.h" -#include "rtc_base/experiments/field_trial_list.h" -#include "rtc_base/experiments/field_trial_parser.h" - -namespace webrtc { -namespace { -using TimeScopedNetworkConfig = DegradedCall::TimeScopedNetworkConfig; - -std::vector GetNetworkConfigs( - const FieldTrialsView& trials, - bool send) { - FieldTrialStructList trials_list( - {FieldTrialStructMember("queue_length_packets", - [](TimeScopedNetworkConfig* p) { - // FieldTrialParser does not natively support - // size_t type, so use this ugly cast as - // workaround. - return reinterpret_cast( - &p->queue_length_packets); - }), - FieldTrialStructMember( - "queue_delay_ms", - [](TimeScopedNetworkConfig* p) { return &p->queue_delay_ms; }), - FieldTrialStructMember("delay_standard_deviation_ms", - [](TimeScopedNetworkConfig* p) { - return &p->delay_standard_deviation_ms; - }), - FieldTrialStructMember( - "link_capacity_kbps", - [](TimeScopedNetworkConfig* p) { return &p->link_capacity_kbps; }), - FieldTrialStructMember( - "loss_percent", - [](TimeScopedNetworkConfig* p) { return &p->loss_percent; }), - FieldTrialStructMember( - "allow_reordering", - [](TimeScopedNetworkConfig* p) { return &p->allow_reordering; }), - FieldTrialStructMember("avg_burst_loss_length", - [](TimeScopedNetworkConfig* p) { - return &p->avg_burst_loss_length; - }), - FieldTrialStructMember( - "packet_overhead", - [](TimeScopedNetworkConfig* p) { return &p->packet_overhead; }), - FieldTrialStructMember( - "duration", - [](TimeScopedNetworkConfig* p) { return &p->duration; })}, - {}); - ParseFieldTrial({&trials_list}, - trials.Lookup(send ? "WebRTC-FakeNetworkSendConfig" - : "WebRTC-FakeNetworkReceiveConfig")); - return trials_list.Get(); -} - -} // namespace - -CallFactory::CallFactory() { - call_thread_.Detach(); -} - -Call* CallFactory::CreateCall(const Call::Config& config) { - RTC_DCHECK_RUN_ON(&call_thread_); - RTC_DCHECK(config.trials); - - std::vector send_degradation_configs = - GetNetworkConfigs(*config.trials, /*send=*/true); - std::vector - receive_degradation_configs = - GetNetworkConfigs(*config.trials, /*send=*/false); - - RtpTransportConfig transportConfig = config.ExtractTransportConfig(); - - Call* call = - Call::Create(config, Clock::GetRealTimeClock(), - config.rtp_transport_controller_send_factory->Create( - transportConfig, Clock::GetRealTimeClock())); - - if (!send_degradation_configs.empty() || - !receive_degradation_configs.empty()) { - return new DegradedCall(absl::WrapUnique(call), send_degradation_configs, - receive_degradation_configs); - } - - return call; -} - -std::unique_ptr CreateCallFactory() { - return std::unique_ptr(new CallFactory()); -} - -} // namespace webrtc diff --git a/call/call_factory.h b/call/call_factory.h deleted file mode 100644 index 9feed7bbb6..0000000000 --- a/call/call_factory.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef CALL_CALL_FACTORY_H_ -#define CALL_CALL_FACTORY_H_ - -#include "api/call/call_factory_interface.h" -#include "api/sequence_checker.h" -#include "call/call.h" -#include "call/call_config.h" -#include "rtc_base/system/no_unique_address.h" - -namespace webrtc { - -class CallFactory : public CallFactoryInterface { - public: - CallFactory(); - - private: - ~CallFactory() override {} - - Call* CreateCall(const CallConfig& config) override; - - RTC_NO_UNIQUE_ADDRESS SequenceChecker call_thread_; -}; - -} // namespace webrtc - -#endif // CALL_CALL_FACTORY_H_ diff --git a/call/call_perf_tests.cc b/call/call_perf_tests.cc index f1ea970db8..b72558d978 100644 --- a/call/call_perf_tests.cc +++ b/call/call_perf_tests.cc @@ -9,54 +9,81 @@ */ #include +#include +#include +#include +#include #include +#include #include #include +#include +#include +#include "absl/flags/flag.h" #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/audio/audio_device.h" +#include "api/audio/builtin_audio_processing_builder.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/make_ref_counted.h" #include "api/numerics/samples_stats_counter.h" -#include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/metrics/metric.h" #include "api/test/simulated_network.h" +#include "api/test/video/function_video_encoder_factory.h" +#include "api/transport/bitrate_settings.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "call/audio_receive_stream.h" +#include "call/audio_send_stream.h" +#include "call/audio_state.h" #include "call/call.h" +#include "call/call_config.h" #include "call/fake_network_pipe.h" -#include "call/simulated_network.h" +#include "call/video_receive_stream.h" +#include "call/video_send_stream.h" #include "media/engine/internal_encoder_factory.h" #include "media/engine/simulcast_encoder_adapter.h" -#include "modules/audio_coding/include/audio_coding_module.h" -#include "modules/audio_device/include/audio_device.h" #include "modules/audio_device/include/test_audio_device.h" #include "modules/audio_mixer/audio_mixer_impl.h" -#include "modules/rtp_rtcp/source/rtp_packet.h" #include "rtc_base/checks.h" +#include "rtc_base/event.h" #include "rtc_base/logging.h" -#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/thread.h" -#include "rtc_base/thread_annotations.h" #include "system_wrappers/include/metrics.h" #include "test/call_test.h" -#include "test/direct_transport.h" #include "test/drifting_clock.h" #include "test/encoder_settings.h" #include "test/fake_encoder.h" #include "test/field_trial.h" #include "test/frame_generator_capturer.h" #include "test/gtest.h" -#include "test/null_transport.h" +#include "test/network/simulated_network.h" #include "test/rtp_rtcp_observer.h" +#include "test/test_flags.h" #include "test/testsupport/file_utils.h" #include "test/video_encoder_proxy_factory.h" #include "test/video_test_constants.h" #include "video/config/video_encoder_config.h" -#include "video/transport_adapter.h" using webrtc::test::DriftingClock; @@ -108,7 +135,7 @@ class CallPerfTest : public test::CallTest { }; class VideoRtcpAndSyncObserver : public test::RtpRtcpObserver, - public rtc::VideoSinkInterface { + public VideoSinkInterface { static const int kInSyncThresholdMs = 50; static const int kStartupTimeMs = 2000; static const int kMinRunTimeMs = 30000; @@ -123,7 +150,7 @@ class VideoRtcpAndSyncObserver : public test::RtpRtcpObserver, creation_time_ms_(clock_->TimeInMilliseconds()), task_queue_(task_queue) {} - void OnFrame(const VideoFrame& video_frame) override { + void OnFrame(const VideoFrame& /* video_frame */) override { task_queue_->PostTask([this]() { CheckStats(); }); } @@ -208,9 +235,9 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, SendTask(task_queue(), [&]() { metrics::Reset(); - rtc::scoped_refptr fake_audio_device = + scoped_refptr fake_audio_device = TestAudioDeviceModule::Create( - task_queue_factory_.get(), + &env().task_queue_factory(), TestAudioDeviceModule::CreatePulsedNoiseCapturer(256, 48000), TestAudioDeviceModule::CreateDiscardRenderer(48000), audio_rtp_speed); @@ -219,16 +246,16 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, AudioState::Config send_audio_state_config; send_audio_state_config.audio_mixer = AudioMixerImpl::Create(); send_audio_state_config.audio_processing = - AudioProcessingBuilder().Create(); + BuiltinAudioProcessingBuilder().Build(env()); send_audio_state_config.audio_device_module = fake_audio_device; - Call::Config sender_config(send_event_log_.get()); + CallConfig sender_config = SendCallConfig(); auto audio_state = AudioState::Create(send_audio_state_config); fake_audio_device->RegisterAudioCallback(audio_state->audio_transport()); sender_config.audio_state = audio_state; - Call::Config receiver_config(recv_event_log_.get()); + CallConfig receiver_config = RecvCallConfig(); receiver_config.audio_state = audio_state; - CreateCalls(sender_config, receiver_config); + CreateCalls(std::move(sender_config), std::move(receiver_config)); std::copy_if(std::begin(payload_type_map_), std::end(payload_type_map_), std::inserter(audio_pt_map, audio_pt_map.end()), @@ -317,7 +344,8 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, } EXPECT_EQ(1u, video_receive_streams_.size()); observer->set_receive_stream(video_receive_streams_[0]); - drifting_clock = std::make_unique(clock_, video_ntp_speed); + drifting_clock = + std::make_unique(&env().clock(), video_ntp_speed); CreateFrameGeneratorCapturerWithDrift( drifting_clock.get(), video_rtp_speed, test::VideoTestConstants::kDefaultFramerate, @@ -361,7 +389,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, observer->PrintResults(); // In quick test synchronization may not be achieved in time. - if (!field_trial::IsEnabled("WebRTC-QuickPerfTest")) { + if (!absl::GetFlag(FLAGS_webrtc_quick_perf_test)) { // TODO(bugs.webrtc.org/10417): Reenable this for iOS #if !defined(WEBRTC_IOS) EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.AVSyncOffsetInMs")); @@ -401,162 +429,6 @@ TEST_F(CallPerfTest, DriftingClock::PercentsSlower(30.0f), "_video_faster"); } -void CallPerfTest::TestCaptureNtpTime( - const BuiltInNetworkBehaviorConfig& net_config, - int threshold_ms, - int start_time_ms, - int run_time_ms) { - class CaptureNtpTimeObserver : public test::EndToEndTest, - public rtc::VideoSinkInterface { - public: - CaptureNtpTimeObserver(const BuiltInNetworkBehaviorConfig& net_config, - int threshold_ms, - int start_time_ms, - int run_time_ms) - : EndToEndTest(test::VideoTestConstants::kLongTimeout), - net_config_(net_config), - clock_(Clock::GetRealTimeClock()), - threshold_ms_(threshold_ms), - start_time_ms_(start_time_ms), - run_time_ms_(run_time_ms), - creation_time_ms_(clock_->TimeInMilliseconds()), - capturer_(nullptr), - rtp_start_timestamp_set_(false), - rtp_start_timestamp_(0) {} - - private: - BuiltInNetworkBehaviorConfig GetSendTransportConfig() const override { - return net_config_; - } - - BuiltInNetworkBehaviorConfig GetReceiveTransportConfig() const override { - return net_config_; - } - - void OnFrame(const VideoFrame& video_frame) override { - MutexLock lock(&mutex_); - if (video_frame.ntp_time_ms() <= 0) { - // Haven't got enough RTCP SR in order to calculate the capture ntp - // time. - return; - } - - int64_t now_ms = clock_->TimeInMilliseconds(); - int64_t time_since_creation = now_ms - creation_time_ms_; - if (time_since_creation < start_time_ms_) { - // Wait for `start_time_ms_` before start measuring. - return; - } - - if (time_since_creation > run_time_ms_) { - observation_complete_.Set(); - } - - FrameCaptureTimeList::iterator iter = - capture_time_list_.find(video_frame.timestamp()); - EXPECT_TRUE(iter != capture_time_list_.end()); - - // The real capture time has been wrapped to uint32_t before converted - // to rtp timestamp in the sender side. So here we convert the estimated - // capture time to a uint32_t 90k timestamp also for comparing. - uint32_t estimated_capture_timestamp = - 90 * static_cast(video_frame.ntp_time_ms()); - uint32_t real_capture_timestamp = iter->second; - int time_offset_ms = real_capture_timestamp - estimated_capture_timestamp; - time_offset_ms = time_offset_ms / 90; - time_offset_ms_list_.AddSample(time_offset_ms); - - EXPECT_TRUE(std::abs(time_offset_ms) < threshold_ms_); - } - - Action OnSendRtp(rtc::ArrayView packet) override { - MutexLock lock(&mutex_); - RtpPacket rtp_packet; - EXPECT_TRUE(rtp_packet.Parse(packet)); - - if (!rtp_start_timestamp_set_) { - // Calculate the rtp timestamp offset in order to calculate the real - // capture time. - uint32_t first_capture_timestamp = - 90 * static_cast(capturer_->first_frame_capture_time()); - rtp_start_timestamp_ = rtp_packet.Timestamp() - first_capture_timestamp; - rtp_start_timestamp_set_ = true; - } - - uint32_t capture_timestamp = - rtp_packet.Timestamp() - rtp_start_timestamp_; - capture_time_list_.insert( - capture_time_list_.end(), - std::make_pair(rtp_packet.Timestamp(), capture_timestamp)); - return SEND_PACKET; - } - - void OnFrameGeneratorCapturerCreated( - test::FrameGeneratorCapturer* frame_generator_capturer) override { - capturer_ = frame_generator_capturer; - } - - void ModifyVideoConfigs( - VideoSendStream::Config* send_config, - std::vector* receive_configs, - VideoEncoderConfig* encoder_config) override { - (*receive_configs)[0].renderer = this; - // Enable the receiver side rtt calculation. - (*receive_configs)[0].rtp.rtcp_xr.receiver_reference_time_report = true; - } - - void PerformTest() override { - EXPECT_TRUE(Wait()) << "Timed out while waiting for estimated capture " - "NTP time to be within bounds."; - GetGlobalMetricsLogger()->LogMetric( - "capture_ntp_time", "real - estimated", time_offset_ms_list_, - Unit::kMilliseconds, ImprovementDirection::kNeitherIsBetter); - } - - Mutex mutex_; - const BuiltInNetworkBehaviorConfig net_config_; - Clock* const clock_; - const int threshold_ms_; - const int start_time_ms_; - const int run_time_ms_; - const int64_t creation_time_ms_; - test::FrameGeneratorCapturer* capturer_; - bool rtp_start_timestamp_set_; - uint32_t rtp_start_timestamp_; - typedef std::map FrameCaptureTimeList; - FrameCaptureTimeList capture_time_list_ RTC_GUARDED_BY(&mutex_); - SamplesStatsCounter time_offset_ms_list_; - } test(net_config, threshold_ms, start_time_ms, run_time_ms); - - RunBaseTest(&test); -} - -// Flaky tests, disabled on Mac and Windows due to webrtc:8291. -#if !(defined(WEBRTC_MAC) || defined(WEBRTC_WIN)) -TEST_F(CallPerfTest, Real_Estimated_CaptureNtpTimeWithNetworkDelay) { - BuiltInNetworkBehaviorConfig net_config; - net_config.queue_delay_ms = 100; - // TODO(wu): lower the threshold as the calculation/estimation becomes more - // accurate. - const int kThresholdMs = 100; - const int kStartTimeMs = 10000; - const int kRunTimeMs = 20000; - TestCaptureNtpTime(net_config, kThresholdMs, kStartTimeMs, kRunTimeMs); -} - -TEST_F(CallPerfTest, Real_Estimated_CaptureNtpTimeWithNetworkJitter) { - BuiltInNetworkBehaviorConfig net_config; - net_config.queue_delay_ms = 100; - net_config.delay_standard_deviation_ms = 10; - // TODO(wu): lower the threshold as the calculation/estimation becomes more - // accurate. - const int kThresholdMs = 100; - const int kStartTimeMs = 10000; - const int kRunTimeMs = 20000; - TestCaptureNtpTime(net_config, kThresholdMs, kStartTimeMs, kRunTimeMs); -} -#endif - TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) { // Minimal normal usage at the start, then 30s overuse to allow filter to // settle, and then 80s underuse to allow plenty of time for rampup again. @@ -580,34 +452,36 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) { // OnSinkWantsChanged is called when FrameGeneratorCapturer::AddOrUpdateSink // is called. // TODO(sprang): Add integration test for maintain-framerate mode? - void OnSinkWantsChanged(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override { - // The sink wants can change either because an adaptation happened (i.e. - // the pixels or frame rate changed) or for other reasons, such as encoded - // resolutions being communicated (happens whenever we capture a new frame - // size). In this test, we only care about adaptations. + void OnSinkWantsChanged(VideoSinkInterface* /* sink */, + const VideoSinkWants& wants) override { + RTC_LOG(LS_INFO) << "OnSinkWantsChanged fps:" << wants.max_framerate_fps + << " max_pixel_count " << wants.max_pixel_count + << " target_pixel_count" + << wants.target_pixel_count.value_or(-1); + // The sink wants can change either because an adaptation happened + // (i.e. the pixels or frame rate changed) or for other reasons, such + // as encoded resolutions being communicated (happens whenever we + // capture a new frame size). In this test, we only care about + // adaptations. bool did_adapt = last_wants_.max_pixel_count != wants.max_pixel_count || last_wants_.target_pixel_count != wants.target_pixel_count || last_wants_.max_framerate_fps != wants.max_framerate_fps; last_wants_ = wants; if (!did_adapt) { + if (test_phase_ == TestPhase::kInit) { + test_phase_ = TestPhase::kStart; + } return; } // At kStart expect CPU overuse. Then expect CPU underuse when the encoder // delay has been decreased. switch (test_phase_) { case TestPhase::kInit: - // Max framerate should be set initially. - if (wants.max_framerate_fps != std::numeric_limits::max() && - wants.max_pixel_count == std::numeric_limits::max()) { - test_phase_ = TestPhase::kStart; - } else { - ADD_FAILURE() << "Got unexpected adaptation request, max res = " - << wants.max_pixel_count << ", target res = " - << wants.target_pixel_count.value_or(-1) - << ", max fps = " << wants.max_framerate_fps; - } + ADD_FAILURE() << "Got unexpected adaptation request, max res = " + << wants.max_pixel_count << ", target res = " + << wants.target_pixel_count.value_or(-1) + << ", max fps = " << wants.max_framerate_fps; break; case TestPhase::kStart: if (wants.max_pixel_count < std::numeric_limits::max()) { @@ -644,9 +518,9 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) { } void ModifyVideoConfigs( - VideoSendStream::Config* send_config, - std::vector* receive_configs, - VideoEncoderConfig* encoder_config) override {} + VideoSendStream::Config* /* send_config */, + std::vector* /* receive_configs */, + VideoEncoderConfig* /* encoder_config */) override {} void PerformTest() override { EXPECT_TRUE(Wait()) << "Timed out before receiving an overuse callback."; @@ -660,7 +534,7 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) { } test_phase_; private: - rtc::VideoSinkWants last_wants_; + VideoSinkWants last_wants_; } test; RunBaseTest(&test); @@ -695,7 +569,7 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) { private: // TODO(holmer): Run this with a timer instead of once per packet. - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView /* packet */) override { task_queue_->PostTask(SafeTask(task_safety_flag_, [this]() { VideoSendStream::Stats stats = send_stream_->GetStats(); @@ -720,15 +594,15 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) { void OnVideoStreamsCreated(VideoSendStream* send_stream, const std::vector& - receive_streams) override { + /* receive_streams */) override { send_stream_ = send_stream; } void OnStreamsStopped() override { task_safety_flag_->SetNotAlive(); } void ModifyVideoConfigs( - VideoSendStream::Config* send_config, - std::vector* receive_configs, + VideoSendStream::Config* /* send_config */, + std::vector* /* receive_configs */, VideoEncoderConfig* encoder_config) override { if (pad_to_min_bitrate_) { encoder_config->min_transmit_bitrate_bps = kMinTransmitBitrateBps; @@ -755,7 +629,7 @@ void CallPerfTest::TestMinTransmitBitrate(bool pad_to_min_bitrate) { int num_bitrate_observations_in_range_; SamplesStatsCounter bitrate_kbps_list_; TaskQueueBase* task_queue_; - rtc::scoped_refptr task_safety_flag_; + scoped_refptr task_safety_flag_; } test(pad_to_min_bitrate, task_queue()); fake_encoder_max_bitrate_ = kMaxEncodeBitrateKbps; @@ -790,6 +664,7 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { private: std::vector CreateEncoderStreams( + const FieldTrialsView& /*field_trials*/, int frame_width, int frame_height, const webrtc::VideoEncoderConfig& encoder_config) override { @@ -803,9 +678,9 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { class BitrateObserver : public test::EndToEndTest, public test::FakeEncoder { public: - explicit BitrateObserver(TaskQueueBase* task_queue) + explicit BitrateObserver(const Environment& env, TaskQueueBase* task_queue) : EndToEndTest(test::VideoTestConstants::kDefaultTimeout), - FakeEncoder(Clock::GetRealTimeClock()), + FakeEncoder(env), encoder_inits_(0), last_set_bitrate_kbps_(0), send_stream_(nullptr), @@ -857,21 +732,21 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* /* receive_configs */, VideoEncoderConfig* encoder_config) override { send_config->encoder_settings.encoder_factory = &encoder_factory_; send_config->encoder_settings.bitrate_allocator_factory = bitrate_allocator_factory_.get(); encoder_config->max_bitrate_bps = 2 * kReconfigureThresholdKbps * 1000; encoder_config->video_stream_factory = - rtc::make_ref_counted(); + make_ref_counted(); encoder_config_ = encoder_config->Copy(); } void OnVideoStreamsCreated(VideoSendStream* send_stream, const std::vector& - receive_streams) override { + /* receive_streams */) override { send_stream_ = send_stream; } @@ -896,7 +771,7 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { } private: - rtc::Event time_to_reconfigure_; + Event time_to_reconfigure_; int encoder_inits_; uint32_t last_set_bitrate_kbps_; VideoSendStream* send_stream_; @@ -905,7 +780,7 @@ TEST_F(CallPerfTest, MAYBE_KeepsHighBitrateWhenReconfiguringSender) { std::unique_ptr bitrate_allocator_factory_; VideoEncoderConfig encoder_config_; TaskQueueBase* task_queue_; - } test(task_queue()); + } test(env(), task_queue()); RunBaseTest(&test); } @@ -952,7 +827,7 @@ void CallPerfTest::TestMinAudioVideoBitrate(int test_bitrate_from, protected: BuiltInNetworkBehaviorConfig GetFakeNetworkPipeConfig() const { BuiltInNetworkBehaviorConfig pipe_config; - pipe_config.link_capacity_kbps = test_bitrate_from_; + pipe_config.link_capacity = DataRate::KilobitsPerSec(test_bitrate_from_); return pipe_config; } @@ -964,9 +839,9 @@ void CallPerfTest::TestMinAudioVideoBitrate(int test_bitrate_from, } void OnTransportCreated( - test::PacketTransport* to_receiver, + test::PacketTransport* /* to_receiver */, SimulatedNetworkInterface* sender_network, - test::PacketTransport* to_sender, + test::PacketTransport* /* to_sender */, SimulatedNetworkInterface* receiver_network) override { send_simulated_network_ = sender_network; receive_simulated_network_ = receiver_network; @@ -975,8 +850,8 @@ void CallPerfTest::TestMinAudioVideoBitrate(int test_bitrate_from, void PerformTest() override { // Quick test mode, just to exercise all the code paths without actually // caring about performance measurements. - const bool quick_perf_test = - field_trial::IsEnabled("WebRTC-QuickPerfTest"); + const bool quick_perf_test = absl::GetFlag(FLAGS_webrtc_quick_perf_test); + int last_passed_test_bitrate = -1; for (int test_bitrate = test_bitrate_from_; test_bitrate_from_ < test_bitrate_to_ @@ -984,12 +859,12 @@ void CallPerfTest::TestMinAudioVideoBitrate(int test_bitrate_from, : test_bitrate >= test_bitrate_to_; test_bitrate += test_bitrate_step_) { BuiltInNetworkBehaviorConfig pipe_config; - pipe_config.link_capacity_kbps = test_bitrate; + pipe_config.link_capacity = DataRate::KilobitsPerSec(test_bitrate); send_simulated_network_->SetConfig(pipe_config); receive_simulated_network_->SetConfig(pipe_config); - rtc::Thread::SleepMs(quick_perf_test ? kShortDelayMs - : kBitrateStabilizationMs); + Thread::SleepMs(quick_perf_test ? kShortDelayMs + : kBitrateStabilizationMs); int64_t avg_rtt = 0; for (int i = 0; i < kBitrateMeasurements; i++) { @@ -998,8 +873,8 @@ void CallPerfTest::TestMinAudioVideoBitrate(int test_bitrate_from, call_stats = sender_call_->GetStats(); }); avg_rtt += call_stats.rtt_ms; - rtc::Thread::SleepMs(quick_perf_test ? kShortDelayMs - : kBitrateMeasurementMs); + Thread::SleepMs(quick_perf_test ? kShortDelayMs + : kBitrateMeasurementMs); } avg_rtt = avg_rtt / kBitrateMeasurements; if (avg_rtt > kMinGoodRttMs) { @@ -1019,7 +894,7 @@ void CallPerfTest::TestMinAudioVideoBitrate(int test_bitrate_from, Unit::kUnitless, ImprovementDirection::kNeitherIsBetter); } - void OnCallsCreated(Call* sender_call, Call* receiver_call) override { + void OnCallsCreated(Call* sender_call, Call* /* receiver_call */) override { sender_call_ = sender_call; BitrateConstraints bitrate_config; bitrate_config.min_bitrate_bps = min_bwe_; @@ -1085,8 +960,8 @@ void CallPerfTest::TestEncodeFramerate(VideoEncoderFactory* encoder_factory, frame_generator_capturer->ChangeResolution(640, 360); } - void OnSinkWantsChanged(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override {} + void OnSinkWantsChanged(VideoSinkInterface* /* sink */, + const VideoSinkWants& /* wants */) override {} void ModifySenderBitrateConfig( BitrateConstraints* bitrate_config) override { @@ -1095,7 +970,7 @@ void CallPerfTest::TestEncodeFramerate(VideoEncoderFactory* encoder_factory, void OnVideoStreamsCreated(VideoSendStream* send_stream, const std::vector& - receive_streams) override { + /* receive_streams */) override { send_stream_ = send_stream; } @@ -1105,7 +980,7 @@ void CallPerfTest::TestEncodeFramerate(VideoEncoderFactory* encoder_factory, void ModifyVideoConfigs( VideoSendStream::Config* send_config, - std::vector* receive_configs, + std::vector* /* receive_configs */, VideoEncoderConfig* encoder_config) override { send_config->encoder_settings.encoder_factory = encoder_factory_; send_config->rtp.payload_name = payload_name_; @@ -1125,8 +1000,7 @@ void CallPerfTest::TestEncodeFramerate(VideoEncoderFactory* encoder_factory, } void VerifyStats() const { - const bool quick_perf_test = - field_trial::IsEnabled("WebRTC-QuickPerfTest"); + const bool quick_perf_test = absl::GetFlag(FLAGS_webrtc_quick_perf_test); double input_fps = 0.0; for (const auto& configured_framerate : configured_framerates_) { input_fps = std::max(configured_framerate.second, input_fps); @@ -1147,7 +1021,7 @@ void CallPerfTest::TestEncodeFramerate(VideoEncoderFactory* encoder_factory, } } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView /* packet */) override { const Timestamp now = clock_->CurrentTime(); if (now - last_getstats_time_ > kMinGetStatsInterval) { last_getstats_time_ = now; @@ -1184,9 +1058,10 @@ void CallPerfTest::TestEncodeFramerate(VideoEncoderFactory* encoder_factory, TEST_F(CallPerfTest, TestEncodeFramerateVp8Simulcast) { InternalEncoderFactory internal_encoder_factory; test::FunctionVideoEncoderFactory encoder_factory( - [&internal_encoder_factory]() { + [&internal_encoder_factory](const Environment& env, + const SdpVideoFormat& /* format */) { return std::make_unique( - &internal_encoder_factory, SdpVideoFormat("VP8")); + env, &internal_encoder_factory, nullptr, SdpVideoFormat::VP8()); }); TestEncodeFramerate(&encoder_factory, "VP8", @@ -1196,9 +1071,10 @@ TEST_F(CallPerfTest, TestEncodeFramerateVp8Simulcast) { TEST_F(CallPerfTest, TestEncodeFramerateVp8SimulcastLowerInputFps) { InternalEncoderFactory internal_encoder_factory; test::FunctionVideoEncoderFactory encoder_factory( - [&internal_encoder_factory]() { + [&internal_encoder_factory](const Environment& env, + const SdpVideoFormat& /* format */) { return std::make_unique( - &internal_encoder_factory, SdpVideoFormat("VP8")); + env, &internal_encoder_factory, nullptr, SdpVideoFormat::VP8()); }); TestEncodeFramerate(&encoder_factory, "VP8", diff --git a/call/call_unittest.cc b/call/call_unittest.cc index 01476eed19..d891853dcc 100644 --- a/call/call_unittest.cc +++ b/call/call_unittest.cc @@ -10,81 +10,84 @@ #include "call/call.h" +#include #include -#include #include +#include #include +#include -#include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/adaptation/resource.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/make_ref_counted.h" #include "api/media_types.h" -#include "api/rtc_event_log/rtc_event_log.h" -#include "api/task_queue/default_task_queue_factory.h" +#include "api/scoped_refptr.h" #include "api/test/mock_audio_mixer.h" #include "api/test/video/function_video_encoder_factory.h" -#include "api/transport/field_trial_based_config.h" #include "api/units/timestamp.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" -#include "audio/audio_receive_stream.h" +#include "api/video_codecs/sdp_video_format.h" #include "audio/audio_send_stream.h" #include "call/adaptation/test/fake_resource.h" #include "call/adaptation/test/mock_resource_listener.h" +#include "call/audio_receive_stream.h" +#include "call/audio_send_stream.h" #include "call/audio_state.h" +#include "call/call_config.h" +#include "call/flexfec_receive_stream.h" +#include "call/video_send_stream.h" #include "modules/audio_device/include/mock_audio_device.h" #include "modules/audio_processing/include/mock_audio_processing.h" -#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "test/fake_encoder.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/mock_audio_decoder_factory.h" #include "test/mock_transport.h" #include "test/run_loop.h" +#include "video/config/video_encoder_config.h" +namespace webrtc { namespace { using ::testing::_; -using ::testing::Contains; using ::testing::MockFunction; using ::testing::NiceMock; using ::testing::StrictMock; +using ::webrtc::test::FakeEncoder; +using ::webrtc::test::FunctionVideoEncoderFactory; +using ::webrtc::test::MockAudioDeviceModule; +using ::webrtc::test::MockAudioMixer; +using ::webrtc::test::MockAudioProcessing; +using ::webrtc::test::RunLoop; struct CallHelper { explicit CallHelper(bool use_null_audio_processing) { - task_queue_factory_ = webrtc::CreateDefaultTaskQueueFactory(); - webrtc::AudioState::Config audio_state_config; - audio_state_config.audio_mixer = - rtc::make_ref_counted(); + AudioState::Config audio_state_config; + audio_state_config.audio_mixer = make_ref_counted(); audio_state_config.audio_processing = use_null_audio_processing ? nullptr - : rtc::make_ref_counted< - NiceMock>(); + : make_ref_counted>(); audio_state_config.audio_device_module = - rtc::make_ref_counted(); - webrtc::Call::Config config(&event_log_); - config.audio_state = webrtc::AudioState::Create(audio_state_config); - config.task_queue_factory = task_queue_factory_.get(); - config.trials = &field_trials_; - call_.reset(webrtc::Call::Create(config)); + make_ref_counted(); + CallConfig config(CreateEnvironment()); + config.audio_state = AudioState::Create(audio_state_config); + call_ = Call::Create(std::move(config)); } - webrtc::Call* operator->() { return call_.get(); } + Call* operator->() { return call_.get(); } private: - webrtc::test::RunLoop loop_; - webrtc::RtcEventLogNull event_log_; - webrtc::FieldTrialBasedConfig field_trials_; - std::unique_ptr task_queue_factory_; - std::unique_ptr call_; + RunLoop loop_; + std::unique_ptr call_; }; -} // namespace - -namespace webrtc { -namespace { - -rtc::scoped_refptr FindResourceWhoseNameContains( - const std::vector>& resources, +scoped_refptr FindResourceWhoseNameContains( + const std::vector>& resources, absl::string_view name_contains) { for (const auto& resource : resources) { if (resource->Name().find(std::string(name_contains)) != std::string::npos) @@ -121,7 +124,7 @@ TEST(CallTest, CreateDestroy_AudioReceiveStream) { config.rtp.remote_ssrc = 42; config.rtcp_send_transport = &rtcp_send_transport; config.decoder_factory = - rtc::make_ref_counted(); + make_ref_counted(); AudioReceiveStreamInterface* stream = call->CreateAudioReceiveStream(config); EXPECT_NE(stream, nullptr); @@ -161,7 +164,7 @@ TEST(CallTest, CreateDestroy_AudioReceiveStreams) { MockTransport rtcp_send_transport; config.rtcp_send_transport = &rtcp_send_transport; config.decoder_factory = - rtc::make_ref_counted(); + make_ref_counted(); std::list streams; for (int i = 0; i < 2; ++i) { for (uint32_t ssrc = 0; ssrc < 1234567; ssrc += 34567) { @@ -183,70 +186,6 @@ TEST(CallTest, CreateDestroy_AudioReceiveStreams) { } } -TEST(CallTest, CreateDestroy_AssociateAudioSendReceiveStreams_RecvFirst) { - for (bool use_null_audio_processing : {false, true}) { - CallHelper call(use_null_audio_processing); - AudioReceiveStreamInterface::Config recv_config; - MockTransport rtcp_send_transport; - recv_config.rtp.remote_ssrc = 42; - recv_config.rtp.local_ssrc = 777; - recv_config.rtcp_send_transport = &rtcp_send_transport; - recv_config.decoder_factory = - rtc::make_ref_counted(); - AudioReceiveStreamInterface* recv_stream = - call->CreateAudioReceiveStream(recv_config); - EXPECT_NE(recv_stream, nullptr); - - MockTransport send_transport; - AudioSendStream::Config send_config(&send_transport); - send_config.rtp.ssrc = 777; - AudioSendStream* send_stream = call->CreateAudioSendStream(send_config); - EXPECT_NE(send_stream, nullptr); - - AudioReceiveStreamImpl* internal_recv_stream = - static_cast(recv_stream); - EXPECT_EQ(send_stream, - internal_recv_stream->GetAssociatedSendStreamForTesting()); - - call->DestroyAudioSendStream(send_stream); - EXPECT_EQ(nullptr, - internal_recv_stream->GetAssociatedSendStreamForTesting()); - - call->DestroyAudioReceiveStream(recv_stream); - } -} - -TEST(CallTest, CreateDestroy_AssociateAudioSendReceiveStreams_SendFirst) { - for (bool use_null_audio_processing : {false, true}) { - CallHelper call(use_null_audio_processing); - MockTransport send_transport; - AudioSendStream::Config send_config(&send_transport); - send_config.rtp.ssrc = 777; - AudioSendStream* send_stream = call->CreateAudioSendStream(send_config); - EXPECT_NE(send_stream, nullptr); - - AudioReceiveStreamInterface::Config recv_config; - MockTransport rtcp_send_transport; - recv_config.rtp.remote_ssrc = 42; - recv_config.rtp.local_ssrc = 777; - recv_config.rtcp_send_transport = &rtcp_send_transport; - recv_config.decoder_factory = - rtc::make_ref_counted(); - AudioReceiveStreamInterface* recv_stream = - call->CreateAudioReceiveStream(recv_config); - EXPECT_NE(recv_stream, nullptr); - - AudioReceiveStreamImpl* internal_recv_stream = - static_cast(recv_stream); - EXPECT_EQ(send_stream, - internal_recv_stream->GetAssociatedSendStreamForTesting()); - - call->DestroyAudioReceiveStream(recv_stream); - - call->DestroyAudioSendStream(send_stream); - } -} - TEST(CallTest, CreateDestroy_FlexfecReceiveStream) { for (bool use_null_audio_processing : {false, true}) { CallHelper call(use_null_audio_processing); @@ -395,9 +334,10 @@ TEST(CallTest, RecreatingAudioStreamWithSameSsrcReusesRtpState) { TEST(CallTest, AddAdaptationResourceAfterCreatingVideoSendStream) { CallHelper call(true); // Create a VideoSendStream. - test::FunctionVideoEncoderFactory fake_encoder_factory([]() { - return std::make_unique(Clock::GetRealTimeClock()); - }); + FunctionVideoEncoderFactory fake_encoder_factory( + [](const Environment& env, const SdpVideoFormat& /* format */) { + return std::make_unique(env); + }); auto bitrate_allocator_factory = CreateBuiltinVideoBitrateAllocatorFactory(); MockTransport send_transport; VideoSendStream::Config config(&send_transport); @@ -432,7 +372,7 @@ TEST(CallTest, AddAdaptationResourceAfterCreatingVideoSendStream) { StrictMock resource_listener1; EXPECT_CALL(resource_listener1, OnResourceUsageStateMeasured(_, _)) .Times(1) - .WillOnce([injected_resource1](rtc::scoped_refptr resource, + .WillOnce([injected_resource1](scoped_refptr resource, ResourceUsageState usage_state) { EXPECT_EQ(injected_resource1, resource); EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); @@ -442,7 +382,7 @@ TEST(CallTest, AddAdaptationResourceAfterCreatingVideoSendStream) { StrictMock resource_listener2; EXPECT_CALL(resource_listener2, OnResourceUsageStateMeasured(_, _)) .Times(1) - .WillOnce([injected_resource2](rtc::scoped_refptr resource, + .WillOnce([injected_resource2](scoped_refptr resource, ResourceUsageState usage_state) { EXPECT_EQ(injected_resource2, resource); EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); @@ -460,9 +400,10 @@ TEST(CallTest, AddAdaptationResourceBeforeCreatingVideoSendStream) { auto fake_resource = FakeResource::Create("FakeResource"); call->AddAdaptationResource(fake_resource); // Create a VideoSendStream. - test::FunctionVideoEncoderFactory fake_encoder_factory([]() { - return std::make_unique(Clock::GetRealTimeClock()); - }); + FunctionVideoEncoderFactory fake_encoder_factory( + [](const Environment& env, const SdpVideoFormat& /* format */) { + return std::make_unique(env); + }); auto bitrate_allocator_factory = CreateBuiltinVideoBitrateAllocatorFactory(); MockTransport send_transport; VideoSendStream::Config config(&send_transport); @@ -494,7 +435,7 @@ TEST(CallTest, AddAdaptationResourceBeforeCreatingVideoSendStream) { StrictMock resource_listener1; EXPECT_CALL(resource_listener1, OnResourceUsageStateMeasured(_, _)) .Times(1) - .WillOnce([injected_resource1](rtc::scoped_refptr resource, + .WillOnce([injected_resource1](scoped_refptr resource, ResourceUsageState usage_state) { EXPECT_EQ(injected_resource1, resource); EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state); @@ -504,7 +445,7 @@ TEST(CallTest, AddAdaptationResourceBeforeCreatingVideoSendStream) { StrictMock resource_listener2; EXPECT_CALL(resource_listener2, OnResourceUsageStateMeasured(_, _)) .Times(1) - .WillOnce([injected_resource2](rtc::scoped_refptr resource, + .WillOnce([injected_resource2](scoped_refptr resource, ResourceUsageState usage_state) { EXPECT_EQ(injected_resource2, resource); EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state); diff --git a/call/degraded_call.cc b/call/degraded_call.cc deleted file mode 100644 index a511eda7bd..0000000000 --- a/call/degraded_call.cc +++ /dev/null @@ -1,380 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "call/degraded_call.h" - -#include -#include - -#include "absl/strings/string_view.h" -#include "api/sequence_checker.h" -#include "modules/rtp_rtcp/source/rtp_util.h" -#include "rtc_base/thread.h" - -namespace webrtc { - -DegradedCall::FakeNetworkPipeOnTaskQueue::FakeNetworkPipeOnTaskQueue( - TaskQueueBase* task_queue, - rtc::scoped_refptr call_alive, - Clock* clock, - std::unique_ptr network_behavior) - : clock_(clock), - task_queue_(task_queue), - call_alive_(std::move(call_alive)), - pipe_(clock, std::move(network_behavior)) {} - -void DegradedCall::FakeNetworkPipeOnTaskQueue::SendRtp( - rtc::ArrayView packet, - const PacketOptions& options, - Transport* transport) { - pipe_.SendRtp(packet, options, transport); - Process(); -} - -void DegradedCall::FakeNetworkPipeOnTaskQueue::SendRtcp( - rtc::ArrayView packet, - Transport* transport) { - pipe_.SendRtcp(packet, transport); - Process(); -} - -void DegradedCall::FakeNetworkPipeOnTaskQueue::AddActiveTransport( - Transport* transport) { - pipe_.AddActiveTransport(transport); -} - -void DegradedCall::FakeNetworkPipeOnTaskQueue::RemoveActiveTransport( - Transport* transport) { - pipe_.RemoveActiveTransport(transport); -} - -bool DegradedCall::FakeNetworkPipeOnTaskQueue::Process() { - pipe_.Process(); - auto time_to_next = pipe_.TimeUntilNextProcess(); - if (!time_to_next) { - // Packet was probably sent immediately. - return false; - } - - task_queue_->PostTask(SafeTask(call_alive_, [this, time_to_next] { - RTC_DCHECK_RUN_ON(task_queue_); - int64_t next_process_time = *time_to_next + clock_->TimeInMilliseconds(); - if (!next_process_ms_ || next_process_time < *next_process_ms_) { - next_process_ms_ = next_process_time; - task_queue_->PostDelayedHighPrecisionTask( - SafeTask(call_alive_, - [this] { - RTC_DCHECK_RUN_ON(task_queue_); - if (!Process()) { - next_process_ms_.reset(); - } - }), - TimeDelta::Millis(*time_to_next)); - } - })); - - return true; -} - -DegradedCall::FakeNetworkPipeTransportAdapter::FakeNetworkPipeTransportAdapter( - FakeNetworkPipeOnTaskQueue* fake_network, - Call* call, - Clock* clock, - Transport* real_transport) - : network_pipe_(fake_network), - call_(call), - clock_(clock), - real_transport_(real_transport) { - network_pipe_->AddActiveTransport(real_transport); -} - -DegradedCall::FakeNetworkPipeTransportAdapter:: - ~FakeNetworkPipeTransportAdapter() { - network_pipe_->RemoveActiveTransport(real_transport_); -} - -bool DegradedCall::FakeNetworkPipeTransportAdapter::SendRtp( - rtc::ArrayView packet, - const PacketOptions& options) { - // A call here comes from the RTP stack (probably pacer). We intercept it and - // put it in the fake network pipe instead, but report to Call that is has - // been sent, so that the bandwidth estimator sees the delay we add. - network_pipe_->SendRtp(packet, options, real_transport_); - if (options.packet_id != -1) { - rtc::SentPacket sent_packet; - sent_packet.packet_id = options.packet_id; - sent_packet.send_time_ms = clock_->TimeInMilliseconds(); - sent_packet.info.included_in_feedback = options.included_in_feedback; - sent_packet.info.included_in_allocation = options.included_in_allocation; - sent_packet.info.packet_size_bytes = packet.size(); - sent_packet.info.packet_type = rtc::PacketType::kData; - call_->OnSentPacket(sent_packet); - } - return true; -} - -bool DegradedCall::FakeNetworkPipeTransportAdapter::SendRtcp( - rtc::ArrayView packet) { - network_pipe_->SendRtcp(packet, real_transport_); - return true; -} - -DegradedCall::DegradedCall( - std::unique_ptr call, - const std::vector& send_configs, - const std::vector& receive_configs) - : clock_(Clock::GetRealTimeClock()), - call_(std::move(call)), - call_alive_(PendingTaskSafetyFlag::CreateDetached()), - send_config_index_(0), - send_configs_(send_configs), - send_simulated_network_(nullptr), - receive_config_index_(0), - receive_configs_(receive_configs) { - if (!receive_configs_.empty()) { - auto network = std::make_unique(receive_configs_[0]); - receive_simulated_network_ = network.get(); - receive_pipe_ = - std::make_unique(clock_, std::move(network)); - receive_pipe_->SetReceiver(call_->Receiver()); - if (receive_configs_.size() > 1) { - call_->network_thread()->PostDelayedTask( - SafeTask(call_alive_, [this] { UpdateReceiveNetworkConfig(); }), - receive_configs_[0].duration); - } - } - if (!send_configs_.empty()) { - auto network = std::make_unique(send_configs_[0]); - send_simulated_network_ = network.get(); - send_pipe_ = std::make_unique( - call_->network_thread(), call_alive_, clock_, std::move(network)); - if (send_configs_.size() > 1) { - call_->network_thread()->PostDelayedTask( - SafeTask(call_alive_, [this] { UpdateSendNetworkConfig(); }), - send_configs_[0].duration); - } - } -} - -DegradedCall::~DegradedCall() { - RTC_DCHECK_RUN_ON(call_->worker_thread()); - // Thread synchronization is required to call `SetNotAlive`. - // Otherwise, when the `DegradedCall` object is destroyed but - // `SetNotAlive` has not yet been called, - // another Closure guarded by `call_alive_` may be called. - // TODO(https://crbug.com/webrtc/12649): Remove this block-invoke. - static_cast(call_->network_thread()) - ->BlockingCall( - [flag = std::move(call_alive_)]() mutable { flag->SetNotAlive(); }); -} - -AudioSendStream* DegradedCall::CreateAudioSendStream( - const AudioSendStream::Config& config) { - if (!send_configs_.empty()) { - auto transport_adapter = std::make_unique( - send_pipe_.get(), call_.get(), clock_, config.send_transport); - AudioSendStream::Config degrade_config = config; - degrade_config.send_transport = transport_adapter.get(); - AudioSendStream* send_stream = call_->CreateAudioSendStream(degrade_config); - if (send_stream) { - audio_send_transport_adapters_[send_stream] = - std::move(transport_adapter); - } - return send_stream; - } - return call_->CreateAudioSendStream(config); -} - -void DegradedCall::DestroyAudioSendStream(AudioSendStream* send_stream) { - call_->DestroyAudioSendStream(send_stream); - audio_send_transport_adapters_.erase(send_stream); -} - -AudioReceiveStreamInterface* DegradedCall::CreateAudioReceiveStream( - const AudioReceiveStreamInterface::Config& config) { - return call_->CreateAudioReceiveStream(config); -} - -void DegradedCall::DestroyAudioReceiveStream( - AudioReceiveStreamInterface* receive_stream) { - call_->DestroyAudioReceiveStream(receive_stream); -} - -VideoSendStream* DegradedCall::CreateVideoSendStream( - VideoSendStream::Config config, - VideoEncoderConfig encoder_config) { - std::unique_ptr transport_adapter; - if (!send_configs_.empty()) { - transport_adapter = std::make_unique( - send_pipe_.get(), call_.get(), clock_, config.send_transport); - config.send_transport = transport_adapter.get(); - } - VideoSendStream* send_stream = call_->CreateVideoSendStream( - std::move(config), std::move(encoder_config)); - if (send_stream && transport_adapter) { - video_send_transport_adapters_[send_stream] = std::move(transport_adapter); - } - return send_stream; -} - -VideoSendStream* DegradedCall::CreateVideoSendStream( - VideoSendStream::Config config, - VideoEncoderConfig encoder_config, - std::unique_ptr fec_controller) { - std::unique_ptr transport_adapter; - if (!send_configs_.empty()) { - transport_adapter = std::make_unique( - send_pipe_.get(), call_.get(), clock_, config.send_transport); - config.send_transport = transport_adapter.get(); - } - VideoSendStream* send_stream = call_->CreateVideoSendStream( - std::move(config), std::move(encoder_config), std::move(fec_controller)); - if (send_stream && transport_adapter) { - video_send_transport_adapters_[send_stream] = std::move(transport_adapter); - } - return send_stream; -} - -void DegradedCall::DestroyVideoSendStream(VideoSendStream* send_stream) { - call_->DestroyVideoSendStream(send_stream); - video_send_transport_adapters_.erase(send_stream); -} - -VideoReceiveStreamInterface* DegradedCall::CreateVideoReceiveStream( - VideoReceiveStreamInterface::Config configuration) { - return call_->CreateVideoReceiveStream(std::move(configuration)); -} - -void DegradedCall::DestroyVideoReceiveStream( - VideoReceiveStreamInterface* receive_stream) { - call_->DestroyVideoReceiveStream(receive_stream); -} - -FlexfecReceiveStream* DegradedCall::CreateFlexfecReceiveStream( - const FlexfecReceiveStream::Config config) { - return call_->CreateFlexfecReceiveStream(std::move(config)); -} - -void DegradedCall::DestroyFlexfecReceiveStream( - FlexfecReceiveStream* receive_stream) { - call_->DestroyFlexfecReceiveStream(receive_stream); -} - -void DegradedCall::AddAdaptationResource( - rtc::scoped_refptr resource) { - call_->AddAdaptationResource(std::move(resource)); -} - -PacketReceiver* DegradedCall::Receiver() { - if (!receive_configs_.empty()) { - return this; - } - return call_->Receiver(); -} - -RtpTransportControllerSendInterface* -DegradedCall::GetTransportControllerSend() { - return call_->GetTransportControllerSend(); -} - -Call::Stats DegradedCall::GetStats() const { - return call_->GetStats(); -} - -const FieldTrialsView& DegradedCall::trials() const { - return call_->trials(); -} - -TaskQueueBase* DegradedCall::network_thread() const { - return call_->network_thread(); -} - -TaskQueueBase* DegradedCall::worker_thread() const { - return call_->worker_thread(); -} - -void DegradedCall::SignalChannelNetworkState(MediaType media, - NetworkState state) { - call_->SignalChannelNetworkState(media, state); -} - -void DegradedCall::OnAudioTransportOverheadChanged( - int transport_overhead_per_packet) { - call_->OnAudioTransportOverheadChanged(transport_overhead_per_packet); -} - -void DegradedCall::OnLocalSsrcUpdated(AudioReceiveStreamInterface& stream, - uint32_t local_ssrc) { - call_->OnLocalSsrcUpdated(stream, local_ssrc); -} - -void DegradedCall::OnLocalSsrcUpdated(VideoReceiveStreamInterface& stream, - uint32_t local_ssrc) { - call_->OnLocalSsrcUpdated(stream, local_ssrc); -} - -void DegradedCall::OnLocalSsrcUpdated(FlexfecReceiveStream& stream, - uint32_t local_ssrc) { - call_->OnLocalSsrcUpdated(stream, local_ssrc); -} - -void DegradedCall::OnUpdateSyncGroup(AudioReceiveStreamInterface& stream, - absl::string_view sync_group) { - call_->OnUpdateSyncGroup(stream, sync_group); -} - -void DegradedCall::OnSentPacket(const rtc::SentPacket& sent_packet) { - if (!send_configs_.empty()) { - // If we have a degraded send-transport, we have already notified call - // about the supposed network send time. Discard the actual network send - // time in order to properly fool the BWE. - return; - } - call_->OnSentPacket(sent_packet); -} - -void DegradedCall::DeliverRtpPacket( - MediaType media_type, - RtpPacketReceived packet, - OnUndemuxablePacketHandler undemuxable_packet_handler) { - RTC_DCHECK_RUN_ON(&received_packet_sequence_checker_); - receive_pipe_->DeliverRtpPacket(media_type, std::move(packet), - std::move(undemuxable_packet_handler)); - receive_pipe_->Process(); -} - -void DegradedCall::DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) { - RTC_DCHECK_RUN_ON(&received_packet_sequence_checker_); - receive_pipe_->DeliverRtcpPacket(std::move(packet)); - receive_pipe_->Process(); -} - -void DegradedCall::SetClientBitratePreferences( - const webrtc::BitrateSettings& preferences) { - call_->SetClientBitratePreferences(preferences); -} - -void DegradedCall::UpdateSendNetworkConfig() { - send_config_index_ = (send_config_index_ + 1) % send_configs_.size(); - send_simulated_network_->SetConfig(send_configs_[send_config_index_]); - call_->network_thread()->PostDelayedTask( - SafeTask(call_alive_, [this] { UpdateSendNetworkConfig(); }), - send_configs_[send_config_index_].duration); -} - -void DegradedCall::UpdateReceiveNetworkConfig() { - receive_config_index_ = (receive_config_index_ + 1) % receive_configs_.size(); - receive_simulated_network_->SetConfig( - receive_configs_[receive_config_index_]); - call_->network_thread()->PostDelayedTask( - SafeTask(call_alive_, [this] { UpdateReceiveNetworkConfig(); }), - receive_configs_[receive_config_index_].duration); -} -} // namespace webrtc diff --git a/call/degraded_call.h b/call/degraded_call.h deleted file mode 100644 index 14892f0607..0000000000 --- a/call/degraded_call.h +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef CALL_DEGRADED_CALL_H_ -#define CALL_DEGRADED_CALL_H_ - -#include -#include - -#include -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/call/transport.h" -#include "api/fec_controller.h" -#include "api/media_types.h" -#include "api/rtp_headers.h" -#include "api/task_queue/pending_task_safety_flag.h" -#include "api/test/simulated_network.h" -#include "call/audio_receive_stream.h" -#include "call/audio_send_stream.h" -#include "call/call.h" -#include "call/fake_network_pipe.h" -#include "call/flexfec_receive_stream.h" -#include "call/packet_receiver.h" -#include "call/rtp_transport_controller_send_interface.h" -#include "call/simulated_network.h" -#include "call/video_receive_stream.h" -#include "call/video_send_stream.h" -#include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/network/sent_packet.h" -#include "rtc_base/task_queue.h" -#include "system_wrappers/include/clock.h" -#include "video/config/video_encoder_config.h" - -namespace webrtc { -class DegradedCall : public Call, private PacketReceiver { - public: - struct TimeScopedNetworkConfig : public BuiltInNetworkBehaviorConfig { - TimeDelta duration = TimeDelta::PlusInfinity(); - }; - - explicit DegradedCall( - std::unique_ptr call, - const std::vector& send_configs, - const std::vector& receive_configs); - ~DegradedCall() override; - - // Implements Call. - AudioSendStream* CreateAudioSendStream( - const AudioSendStream::Config& config) override; - void DestroyAudioSendStream(AudioSendStream* send_stream) override; - - AudioReceiveStreamInterface* CreateAudioReceiveStream( - const AudioReceiveStreamInterface::Config& config) override; - void DestroyAudioReceiveStream( - AudioReceiveStreamInterface* receive_stream) override; - - VideoSendStream* CreateVideoSendStream( - VideoSendStream::Config config, - VideoEncoderConfig encoder_config) override; - VideoSendStream* CreateVideoSendStream( - VideoSendStream::Config config, - VideoEncoderConfig encoder_config, - std::unique_ptr fec_controller) override; - void DestroyVideoSendStream(VideoSendStream* send_stream) override; - - VideoReceiveStreamInterface* CreateVideoReceiveStream( - VideoReceiveStreamInterface::Config configuration) override; - void DestroyVideoReceiveStream( - VideoReceiveStreamInterface* receive_stream) override; - - FlexfecReceiveStream* CreateFlexfecReceiveStream( - const FlexfecReceiveStream::Config config) override; - void DestroyFlexfecReceiveStream( - FlexfecReceiveStream* receive_stream) override; - - void AddAdaptationResource(rtc::scoped_refptr resource) override; - - PacketReceiver* Receiver() override; - - RtpTransportControllerSendInterface* GetTransportControllerSend() override; - - Stats GetStats() const override; - - const FieldTrialsView& trials() const override; - - TaskQueueBase* network_thread() const override; - TaskQueueBase* worker_thread() const override; - - void SignalChannelNetworkState(MediaType media, NetworkState state) override; - void OnAudioTransportOverheadChanged( - int transport_overhead_per_packet) override; - void OnLocalSsrcUpdated(AudioReceiveStreamInterface& stream, - uint32_t local_ssrc) override; - void OnLocalSsrcUpdated(VideoReceiveStreamInterface& stream, - uint32_t local_ssrc) override; - void OnLocalSsrcUpdated(FlexfecReceiveStream& stream, - uint32_t local_ssrc) override; - void OnUpdateSyncGroup(AudioReceiveStreamInterface& stream, - absl::string_view sync_group) override; - void OnSentPacket(const rtc::SentPacket& sent_packet) override; - - protected: - // Implements PacketReceiver. - void DeliverRtpPacket( - MediaType media_type, - RtpPacketReceived packet, - OnUndemuxablePacketHandler undemuxable_packet_handler) override; - void DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) override; - - private: - class FakeNetworkPipeOnTaskQueue { - public: - FakeNetworkPipeOnTaskQueue( - TaskQueueBase* task_queue, - rtc::scoped_refptr call_alive, - Clock* clock, - std::unique_ptr network_behavior); - - void SendRtp(rtc::ArrayView packet, - const PacketOptions& options, - Transport* transport); - void SendRtcp(rtc::ArrayView packet, Transport* transport); - - void AddActiveTransport(Transport* transport); - void RemoveActiveTransport(Transport* transport); - - private: - // Try to process packets on the fake network queue. - // Returns true if call resulted in a delayed process, false if queue empty. - bool Process(); - - Clock* const clock_; - TaskQueueBase* const task_queue_; - rtc::scoped_refptr call_alive_; - FakeNetworkPipe pipe_; - absl::optional next_process_ms_ RTC_GUARDED_BY(&task_queue_); - }; - - // For audio/video send stream, a TransportAdapter instance is used to - // intercept packets to be sent, and put them into a common FakeNetworkPipe - // in such as way that they will eventually (unless dropped) be forwarded to - // the correct Transport for that stream. - class FakeNetworkPipeTransportAdapter : public Transport { - public: - FakeNetworkPipeTransportAdapter(FakeNetworkPipeOnTaskQueue* fake_network, - Call* call, - Clock* clock, - Transport* real_transport); - ~FakeNetworkPipeTransportAdapter(); - - bool SendRtp(rtc::ArrayView packet, - const PacketOptions& options) override; - bool SendRtcp(rtc::ArrayView packet) override; - - private: - FakeNetworkPipeOnTaskQueue* const network_pipe_; - Call* const call_; - Clock* const clock_; - Transport* const real_transport_; - }; - - void SetClientBitratePreferences( - const webrtc::BitrateSettings& preferences) override; - void UpdateSendNetworkConfig(); - void UpdateReceiveNetworkConfig(); - - Clock* const clock_; - const std::unique_ptr call_; - // For cancelling tasks on the network thread when DegradedCall is destroyed - rtc::scoped_refptr call_alive_; - size_t send_config_index_; - const std::vector send_configs_; - SimulatedNetwork* send_simulated_network_; - std::unique_ptr send_pipe_; - std::map> - audio_send_transport_adapters_; - std::map> - video_send_transport_adapters_; - - size_t receive_config_index_; - const std::vector receive_configs_; - SimulatedNetwork* receive_simulated_network_; - SequenceChecker received_packet_sequence_checker_; - std::unique_ptr receive_pipe_ - RTC_GUARDED_BY(received_packet_sequence_checker_); -}; - -} // namespace webrtc - -#endif // CALL_DEGRADED_CALL_H_ diff --git a/call/fake_network_pipe.cc b/call/fake_network_pipe.cc index 3c7207bd84..a1150b30c1 100644 --- a/call/fake_network_pipe.cc +++ b/call/fake_network_pipe.cc @@ -13,15 +13,23 @@ #include #include +#include +#include +#include #include #include #include +#include "api/array_view.h" +#include "api/call/transport.h" #include "api/media_types.h" +#include "api/test/simulated_network.h" #include "api/units/timestamp.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -30,13 +38,13 @@ namespace { constexpr int64_t kLogIntervalMs = 5000; } // namespace -NetworkPacket::NetworkPacket(rtc::CopyOnWriteBuffer packet, +NetworkPacket::NetworkPacket(CopyOnWriteBuffer packet, int64_t send_time, int64_t arrival_time, - absl::optional packet_options, + std::optional packet_options, bool is_rtcp, MediaType media_type, - absl::optional packet_time_us, + std::optional packet_time_us, Transport* transport) : packet_(std::move(packet)), send_time_(send_time), @@ -102,7 +110,7 @@ FakeNetworkPipe::FakeNetworkPipe( Clock* clock, std::unique_ptr network_behavior, PacketReceiver* receiver, - uint64_t seed) + uint64_t /* seed */) : clock_(clock), network_behavior_(std::move(network_behavior)), receiver_(receiver), @@ -135,34 +143,34 @@ void FakeNetworkPipe::RemoveActiveTransport(Transport* transport) { } } -bool FakeNetworkPipe::SendRtp(rtc::ArrayView packet, +bool FakeNetworkPipe::SendRtp(ArrayView packet, const PacketOptions& options, Transport* transport) { RTC_DCHECK(transport); - EnqueuePacket(rtc::CopyOnWriteBuffer(packet), options, false, transport); + EnqueuePacket(CopyOnWriteBuffer(packet), options, false, transport); return true; } -bool FakeNetworkPipe::SendRtcp(rtc::ArrayView packet, +bool FakeNetworkPipe::SendRtcp(ArrayView packet, Transport* transport) { RTC_DCHECK(transport); - EnqueuePacket(rtc::CopyOnWriteBuffer(packet), absl::nullopt, true, transport); + EnqueuePacket(CopyOnWriteBuffer(packet), std::nullopt, true, transport); return true; } void FakeNetworkPipe::DeliverRtpPacket( MediaType media_type, RtpPacketReceived packet, - OnUndemuxablePacketHandler undemuxable_packet_handler) { + OnUndemuxablePacketHandler /* undemuxable_packet_handler */) { MutexLock lock(&process_lock_); int64_t time_now_us = clock_->TimeInMicroseconds(); EnqueuePacket( NetworkPacket(std::move(packet), media_type, time_now_us, time_now_us)); } -void FakeNetworkPipe::DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) { - EnqueuePacket(std::move(packet), absl::nullopt, true, MediaType::ANY, - absl::nullopt); +void FakeNetworkPipe::DeliverRtcpPacket(CopyOnWriteBuffer packet) { + EnqueuePacket(std::move(packet), std::nullopt, true, MediaType::ANY, + std::nullopt); } void FakeNetworkPipe::SetClockOffset(int64_t offset_ms) { @@ -173,11 +181,11 @@ void FakeNetworkPipe::SetClockOffset(int64_t offset_ms) { FakeNetworkPipe::StoredPacket::StoredPacket(NetworkPacket&& packet) : packet(std::move(packet)) {} -bool FakeNetworkPipe::EnqueuePacket(rtc::CopyOnWriteBuffer packet, - absl::optional options, +bool FakeNetworkPipe::EnqueuePacket(CopyOnWriteBuffer packet, + std::optional options, bool is_rtcp, MediaType media_type, - absl::optional packet_time_us) { + std::optional packet_time_us) { MutexLock lock(&process_lock_); int64_t time_now_us = clock_->TimeInMicroseconds(); return EnqueuePacket(NetworkPacket(std::move(packet), time_now_us, @@ -185,15 +193,15 @@ bool FakeNetworkPipe::EnqueuePacket(rtc::CopyOnWriteBuffer packet, packet_time_us, nullptr)); } -bool FakeNetworkPipe::EnqueuePacket(rtc::CopyOnWriteBuffer packet, - absl::optional options, +bool FakeNetworkPipe::EnqueuePacket(CopyOnWriteBuffer packet, + std::optional options, bool is_rtcp, Transport* transport) { MutexLock lock(&process_lock_); int64_t time_now_us = clock_->TimeInMicroseconds(); return EnqueuePacket(NetworkPacket(std::move(packet), time_now_us, time_now_us, options, is_rtcp, - MediaType::ANY, absl::nullopt, transport)); + MediaType::ANY, std::nullopt, transport)); } bool FakeNetworkPipe::EnqueuePacket(NetworkPacket&& net_packet) { @@ -315,12 +323,10 @@ void FakeNetworkPipe::DeliverNetworkPacket(NetworkPacket* packet) { return; } if (packet->is_rtcp()) { - transport->SendRtcp( - rtc::MakeArrayView(packet->data(), packet->data_length())); + transport->SendRtcp(MakeArrayView(packet->data(), packet->data_length())); } else { - transport->SendRtp( - rtc::MakeArrayView(packet->data(), packet->data_length()), - packet->packet_options()); + transport->SendRtp(MakeArrayView(packet->data(), packet->data_length()), + packet->packet_options()); } } else if (receiver_) { int64_t packet_time_us = packet->packet_time_us().value_or(-1); @@ -348,14 +354,14 @@ void FakeNetworkPipe::DeliverNetworkPacket(NetworkPacket* packet) { } } -absl::optional FakeNetworkPipe::TimeUntilNextProcess() { +std::optional FakeNetworkPipe::TimeUntilNextProcess() { MutexLock lock(&process_lock_); - absl::optional delivery_us = network_behavior_->NextDeliveryTimeUs(); + std::optional delivery_us = network_behavior_->NextDeliveryTimeUs(); if (delivery_us) { int64_t delay_us = *delivery_us - clock_->TimeInMicroseconds(); return std::max((delay_us + 500) / 1000, 0); } - return absl::nullopt; + return std::nullopt; } bool FakeNetworkPipe::HasReceiver() const { diff --git a/call/fake_network_pipe.h b/call/fake_network_pipe.h index 7bc7e0f060..a30c50eecb 100644 --- a/call/fake_network_pipe.h +++ b/call/fake_network_pipe.h @@ -11,18 +11,19 @@ #ifndef CALL_FAKE_NETWORK_PIPE_H_ #define CALL_FAKE_NETWORK_PIPE_H_ +#include +#include #include #include #include -#include -#include -#include -#include +#include +#include "api/array_view.h" #include "api/call/transport.h" #include "api/test/simulated_network.h" #include "call/simulated_packet_receiver.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -34,13 +35,13 @@ enum class MediaType; class NetworkPacket { public: - NetworkPacket(rtc::CopyOnWriteBuffer packet, + NetworkPacket(CopyOnWriteBuffer packet, int64_t send_time, int64_t arrival_time, - absl::optional packet_options, + std::optional packet_options, bool is_rtcp, MediaType media_type, - absl::optional packet_time_us, + std::optional packet_time_us, Transport* transport); NetworkPacket(RtpPacketReceived packet, @@ -58,7 +59,7 @@ class NetworkPacket { const uint8_t* data() const { return packet_.data(); } size_t data_length() const { return packet_.size(); } - rtc::CopyOnWriteBuffer* raw_packet() { return &packet_; } + CopyOnWriteBuffer* raw_packet() { return &packet_; } int64_t send_time() const { return send_time_; } int64_t arrival_time() const { return arrival_time_; } void IncrementArrivalTime(int64_t extra_delay) { @@ -69,32 +70,32 @@ class NetworkPacket { } bool is_rtcp() const { return is_rtcp_; } MediaType media_type() const { return media_type_; } - absl::optional packet_time_us() const { return packet_time_us_; } + std::optional packet_time_us() const { return packet_time_us_; } RtpPacketReceived* packet_received() { return packet_received_ ? &packet_received_.value() : nullptr; } - absl::optional packet_received() const { + std::optional packet_received() const { return packet_received_; } Transport* transport() const { return transport_; } private: - rtc::CopyOnWriteBuffer packet_; + CopyOnWriteBuffer packet_; // The time the packet was sent out on the network. int64_t send_time_; // The time the packet should arrive at the receiver. int64_t arrival_time_; // If using a Transport for outgoing degradation, populate with // PacketOptions (transport-wide sequence number) for RTP. - absl::optional packet_options_; + std::optional packet_options_; bool is_rtcp_; // If using a PacketReceiver for incoming degradation, populate with // appropriate MediaType and packet time. This type/timing will be kept and // forwarded. The packet time might be altered to reflect time spent in fake // network pipe. MediaType media_type_; - absl::optional packet_time_us_; - absl::optional packet_received_; + std::optional packet_time_us_; + std::optional packet_received_; Transport* transport_; }; @@ -132,10 +133,10 @@ class FakeNetworkPipe : public SimulatedPacketReceiverInterface { // Methods for use with Transport interface. When/if packets are delivered, // they will be passed to the instance specified by the `transport` parameter. // Note that that instance must be in the map of active transports. - bool SendRtp(rtc::ArrayView packet, + bool SendRtp(ArrayView packet, const PacketOptions& options, Transport* transport); - bool SendRtcp(rtc::ArrayView packet, Transport* transport); + bool SendRtcp(ArrayView packet, Transport* transport); // Implements the PacketReceiver interface. When/if packets are delivered, // they will be passed directly to the receiver instance given in @@ -145,12 +146,12 @@ class FakeNetworkPipe : public SimulatedPacketReceiverInterface { MediaType media_type, RtpPacketReceived packet, OnUndemuxablePacketHandler undemuxable_packet_handler) override; - void DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) override; + void DeliverRtcpPacket(CopyOnWriteBuffer packet) override; // Processes the network queues and trigger PacketReceiver::IncomingPacket for // packets ready to be delivered. void Process() override; - absl::optional TimeUntilNextProcess() override; + std::optional TimeUntilNextProcess() override; // Get statistics. float PercentageLoss(); @@ -178,16 +179,16 @@ class FakeNetworkPipe : public SimulatedPacketReceiverInterface { // Returns true if enqueued, or false if packet was dropped. Use this method // when enqueueing packets that should be received by PacketReceiver instance. - bool EnqueuePacket(rtc::CopyOnWriteBuffer packet, - absl::optional options, + bool EnqueuePacket(CopyOnWriteBuffer packet, + std::optional options, bool is_rtcp, MediaType media_type, - absl::optional packet_time_us); + std::optional packet_time_us); // Returns true if enqueued, or false if packet was dropped. Use this method // when enqueueing packets that should be received by Transport instance. - bool EnqueuePacket(rtc::CopyOnWriteBuffer packet, - absl::optional options, + bool EnqueuePacket(CopyOnWriteBuffer packet, + std::optional options, bool is_rtcp, Transport* transport); diff --git a/call/fake_network_pipe_unittest.cc b/call/fake_network_pipe_unittest.cc index 31f97fc85c..05a348cb36 100644 --- a/call/fake_network_pipe_unittest.cc +++ b/call/fake_network_pipe_unittest.cc @@ -10,19 +10,26 @@ #include "call/fake_network_pipe.h" +#include +#include +#include #include #include +#include +#include "api/test/simulated_network.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "call/simulated_network.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "system_wrappers/include/clock.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/network/simulated_network.h" using ::testing::_; using ::testing::Property; @@ -31,10 +38,7 @@ using ::testing::WithArg; namespace webrtc { class MockReceiver : public PacketReceiver { public: - MOCK_METHOD(void, - DeliverRtcpPacket, - (rtc::CopyOnWriteBuffer packet), - (override)); + MOCK_METHOD(void, DeliverRtcpPacket, (CopyOnWriteBuffer packet), (override)); MOCK_METHOD(void, DeliverRtpPacket, (MediaType media_type, @@ -47,9 +51,9 @@ class MockReceiver : public PacketReceiver { class ReorderTestReceiver : public MockReceiver { public: void DeliverRtpPacket( - MediaType media_type, + MediaType /* media_type */, RtpPacketReceived packet, - OnUndemuxablePacketHandler undemuxable_packet_handler) override { + OnUndemuxablePacketHandler /* undemuxable_packet_handler */) override { RTC_DCHECK_GE(packet.size(), sizeof(int)); delivered_sequence_numbers_.push_back(packet.SequenceNumber()); } @@ -75,8 +79,8 @@ class FakeNetworkPipeTest : public ::testing::Test { } } - int PacketTimeMs(int capacity_kbps, int packet_size) const { - return 8 * packet_size / capacity_kbps; + int PacketTimeMs(DataRate capacity, int packet_size) const { + return 8 * packet_size / capacity.kbps(); } SimulatedClock fake_clock_; @@ -86,7 +90,7 @@ class FakeNetworkPipeTest : public ::testing::Test { TEST_F(FakeNetworkPipeTest, CapacityTest) { BuiltInNetworkBehaviorConfig config; config.queue_length_packets = 20; - config.link_capacity_kbps = 80; + config.link_capacity = DataRate::KilobitsPerSec(80); MockReceiver receiver; auto simulated_network = std::make_unique(config); std::unique_ptr pipe(new FakeNetworkPipe( @@ -99,8 +103,7 @@ TEST_F(FakeNetworkPipeTest, CapacityTest) { SendPackets(pipe.get(), kNumPackets, kPacketSize); // Time to get one packet through the link. - const int kPacketTimeMs = - PacketTimeMs(config.link_capacity_kbps, kPacketSize); + const int kPacketTimeMs = PacketTimeMs(config.link_capacity, kPacketSize); // Time haven't increased yet, so we souldn't get any packets. EXPECT_CALL(receiver, DeliverRtpPacket).Times(0); @@ -127,7 +130,7 @@ TEST_F(FakeNetworkPipeTest, ExtraDelayTest) { BuiltInNetworkBehaviorConfig config; config.queue_length_packets = 20; config.queue_delay_ms = 100; - config.link_capacity_kbps = 80; + config.link_capacity = DataRate::KilobitsPerSec(80); MockReceiver receiver; auto simulated_network = std::make_unique(config); std::unique_ptr pipe(new FakeNetworkPipe( @@ -138,8 +141,7 @@ TEST_F(FakeNetworkPipeTest, ExtraDelayTest) { SendPackets(pipe.get(), kNumPackets, kPacketSize); // Time to get one packet through the link. - const int kPacketTimeMs = - PacketTimeMs(config.link_capacity_kbps, kPacketSize); + const int kPacketTimeMs = PacketTimeMs(config.link_capacity, kPacketSize); // Increase more than kPacketTimeMs, but not more than the extra delay. fake_clock_.AdvanceTimeMilliseconds(kPacketTimeMs); @@ -162,15 +164,14 @@ TEST_F(FakeNetworkPipeTest, ExtraDelayTest) { TEST_F(FakeNetworkPipeTest, QueueLengthTest) { BuiltInNetworkBehaviorConfig config; config.queue_length_packets = 2; - config.link_capacity_kbps = 80; + config.link_capacity = DataRate::KilobitsPerSec(80); MockReceiver receiver; auto simulated_network = std::make_unique(config); std::unique_ptr pipe(new FakeNetworkPipe( &fake_clock_, std::move(simulated_network), &receiver)); const int kPacketSize = 1000; - const int kPacketTimeMs = - PacketTimeMs(config.link_capacity_kbps, kPacketSize); + const int kPacketTimeMs = PacketTimeMs(config.link_capacity, kPacketSize); // Send three packets and verify only 2 are delivered. SendPackets(pipe.get(), 3, kPacketSize); @@ -187,15 +188,14 @@ TEST_F(FakeNetworkPipeTest, StatisticsTest) { BuiltInNetworkBehaviorConfig config; config.queue_length_packets = 2; config.queue_delay_ms = 20; - config.link_capacity_kbps = 80; + config.link_capacity = DataRate::KilobitsPerSec(80); MockReceiver receiver; auto simulated_network = std::make_unique(config); std::unique_ptr pipe(new FakeNetworkPipe( &fake_clock_, std::move(simulated_network), &receiver)); const int kPacketSize = 1000; - const int kPacketTimeMs = - PacketTimeMs(config.link_capacity_kbps, kPacketSize); + const int kPacketTimeMs = PacketTimeMs(config.link_capacity, kPacketSize); // Send three packets and verify only 2 are delivered. SendPackets(pipe.get(), 3, kPacketSize); @@ -218,7 +218,7 @@ TEST_F(FakeNetworkPipeTest, StatisticsTest) { TEST_F(FakeNetworkPipeTest, ChangingCapacityWithEmptyPipeTest) { BuiltInNetworkBehaviorConfig config; config.queue_length_packets = 20; - config.link_capacity_kbps = 80; + config.link_capacity = DataRate::KilobitsPerSec(80); MockReceiver receiver; std::unique_ptr network(new SimulatedNetwork(config)); SimulatedNetwork* simulated_network = network.get(); @@ -232,7 +232,7 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithEmptyPipeTest) { SendPackets(pipe.get(), kNumPackets, kPacketSize); // Time to get one packet through the link. - int packet_time_ms = PacketTimeMs(config.link_capacity_kbps, kPacketSize); + int packet_time_ms = PacketTimeMs(config.link_capacity, kPacketSize); // Time hasn't increased yet, so we souldn't get any packets. EXPECT_CALL(receiver, DeliverRtpPacket).Times(0); @@ -246,7 +246,7 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithEmptyPipeTest) { } // Change the capacity. - config.link_capacity_kbps /= 2; // Reduce to 50%. + config.link_capacity = config.link_capacity / 2; // Reduce to 50%. simulated_network->SetConfig(config); // Add another 10 packets of 1000 bytes, = 80 kb, and verify it takes two @@ -254,7 +254,7 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithEmptyPipeTest) { SendPackets(pipe.get(), kNumPackets, kPacketSize); // Time to get one packet through the link. - packet_time_ms = PacketTimeMs(config.link_capacity_kbps, kPacketSize); + packet_time_ms = PacketTimeMs(config.link_capacity, kPacketSize); // Time hasn't increased yet, so we souldn't get any packets. EXPECT_CALL(receiver, DeliverRtpPacket).Times(0); @@ -280,7 +280,7 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithEmptyPipeTest) { TEST_F(FakeNetworkPipeTest, ChangingCapacityWithPacketsInPipeTest) { BuiltInNetworkBehaviorConfig config; config.queue_length_packets = 20; - config.link_capacity_kbps = 80; + config.link_capacity = DataRate::KilobitsPerSec(80); MockReceiver receiver; std::unique_ptr network(new SimulatedNetwork(config)); SimulatedNetwork* simulated_network = network.get(); @@ -297,7 +297,7 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithPacketsInPipeTest) { pipe->Process(); // Advance time in steps to release half of the packets one at a time. - int step_ms = PacketTimeMs(config.link_capacity_kbps, kPacketSize); + int step_ms = PacketTimeMs(config.link_capacity, kPacketSize); for (int i = 0; i < kNumPackets / 2; ++i) { fake_clock_.AdvanceTimeMilliseconds(step_ms); EXPECT_CALL(receiver, DeliverRtpPacket).Times(1); @@ -305,11 +305,11 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithPacketsInPipeTest) { } // Change the capacity. - config.link_capacity_kbps *= 2; // Double the capacity. + config.link_capacity = 2 * config.link_capacity; simulated_network->SetConfig(config); // Advance time in steps to release remaining packets one at a time. - step_ms = PacketTimeMs(config.link_capacity_kbps, kPacketSize); + step_ms = PacketTimeMs(config.link_capacity, kPacketSize); for (int i = 0; i < kNumPackets / 2; ++i) { fake_clock_.AdvanceTimeMilliseconds(step_ms); EXPECT_CALL(receiver, DeliverRtpPacket).Times(1); @@ -328,7 +328,7 @@ TEST_F(FakeNetworkPipeTest, ChangingCapacityWithPacketsInPipeTest) { TEST_F(FakeNetworkPipeTest, DisallowReorderingThenAllowReordering) { BuiltInNetworkBehaviorConfig config; config.queue_length_packets = 1000; - config.link_capacity_kbps = 800; + config.link_capacity = DataRate::KilobitsPerSec(80); config.queue_delay_ms = 100; config.delay_standard_deviation_ms = 10; ReorderTestReceiver receiver; @@ -415,15 +415,14 @@ TEST_F(FakeNetworkPipeTest, BurstLoss) { TEST_F(FakeNetworkPipeTest, SetReceiver) { BuiltInNetworkBehaviorConfig config; - config.link_capacity_kbps = 800; + config.link_capacity = DataRate::KilobitsPerSec(800); MockReceiver receiver; auto simulated_network = std::make_unique(config); std::unique_ptr pipe(new FakeNetworkPipe( &fake_clock_, std::move(simulated_network), &receiver)); const int kPacketSize = 1000; - const int kPacketTimeMs = - PacketTimeMs(config.link_capacity_kbps, kPacketSize); + const int kPacketTimeMs = PacketTimeMs(config.link_capacity, kPacketSize); SendPackets(pipe.get(), 1, kPacketSize); fake_clock_.AdvanceTimeMilliseconds(kPacketTimeMs); EXPECT_CALL(receiver, DeliverRtpPacket).Times(1); @@ -495,14 +494,14 @@ TEST_F(FakeNetworkPipeTest, DeliverRtcpPacket) { std::unique_ptr pipe(new FakeNetworkPipe( &fake_clock_, std::move(simulated_network), &receiver)); - rtc::CopyOnWriteBuffer buffer(100); + CopyOnWriteBuffer buffer(100); memset(buffer.MutableData(), 0, 100); pipe->DeliverRtcpPacket(std::move(buffer)); // Advance the network delay to get the first packet. fake_clock_.AdvanceTimeMilliseconds(config.queue_delay_ms); EXPECT_CALL(receiver, - DeliverRtcpPacket(Property(&rtc::CopyOnWriteBuffer::size, 100))); + DeliverRtcpPacket(Property(&CopyOnWriteBuffer::size, 100))); pipe->Process(); } diff --git a/call/fake_payload_type_suggester.h b/call/fake_payload_type_suggester.h new file mode 100644 index 0000000000..f98d387d6b --- /dev/null +++ b/call/fake_payload_type_suggester.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_FAKE_PAYLOAD_TYPE_SUGGESTER_H_ +#define CALL_FAKE_PAYLOAD_TYPE_SUGGESTER_H_ + +#include + +#include "api/rtc_error.h" +#include "call/payload_type.h" +#include "call/payload_type_picker.h" +#include "media/base/codec.h" + +namespace webrtc { +// Fake payload type suggester, for use in tests. +// It uses a real PayloadTypePicker in order to do consistent PT +// assignment. +class FakePayloadTypeSuggester : public webrtc::PayloadTypeSuggester { + public: + webrtc::RTCErrorOr SuggestPayloadType( + const std::string& mid, + Codec codec) override { + // Ignores mid argument. + return pt_picker_.SuggestMapping(codec, nullptr); + } + webrtc::RTCError AddLocalMapping(const std::string& mid, + webrtc::PayloadType payload_type, + const Codec& codec) override { + return webrtc::RTCError::OK(); + } + + private: + webrtc::PayloadTypePicker pt_picker_; +}; + +} // namespace webrtc + +#endif // CALL_FAKE_PAYLOAD_TYPE_SUGGESTER_H_ diff --git a/call/flexfec_receive_stream.cc b/call/flexfec_receive_stream.cc index ab6dde37b4..27261b8bfe 100644 --- a/call/flexfec_receive_stream.cc +++ b/call/flexfec_receive_stream.cc @@ -10,6 +10,7 @@ #include "call/flexfec_receive_stream.h" +#include "api/call/transport.h" #include "rtc_base/checks.h" namespace webrtc { diff --git a/call/flexfec_receive_stream.h b/call/flexfec_receive_stream.h index c5ac0f9fb6..eb70e206ec 100644 --- a/call/flexfec_receive_stream.h +++ b/call/flexfec_receive_stream.h @@ -18,7 +18,6 @@ #include "api/call/transport.h" #include "api/rtp_headers.h" -#include "api/rtp_parameters.h" #include "call/receive_stream.h" #include "call/rtp_packet_sink_interface.h" #include "modules/rtp_rtcp/include/receive_statistics.h" diff --git a/call/flexfec_receive_stream_impl.cc b/call/flexfec_receive_stream_impl.cc index e20f1b6ac5..2158e5775b 100644 --- a/call/flexfec_receive_stream_impl.cc +++ b/call/flexfec_receive_stream_impl.cc @@ -13,25 +13,25 @@ #include #include +#include #include -#include #include "api/array_view.h" -#include "api/call/transport.h" -#include "api/rtp_parameters.h" +#include "api/environment/environment.h" +#include "api/sequence_checker.h" +#include "call/flexfec_receive_stream.h" #include "call/rtp_stream_receiver_controller_interface.h" #include "modules/rtp_rtcp/include/flexfec_receiver.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "system_wrappers/include/clock.h" namespace webrtc { std::string FlexfecReceiveStream::Config::ToString() const { char buf[1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "{payload_type: " << payload_type; ss << ", remote_ssrc: " << rtp.remote_ssrc; ss << ", local_ssrc: " << rtp.local_ssrc; @@ -100,45 +100,33 @@ std::unique_ptr MaybeCreateFlexfecReceiver( recovered_packet_receiver)); } -std::unique_ptr CreateRtpRtcpModule( - Clock* clock, - ReceiveStatistics* receive_statistics, - const FlexfecReceiveStreamImpl::Config& config, - RtcpRttStats* rtt_stats) { - RtpRtcpInterface::Configuration configuration; - configuration.audio = false; - configuration.receiver_only = true; - configuration.clock = clock; - configuration.receive_statistics = receive_statistics; - configuration.outgoing_transport = config.rtcp_send_transport; - configuration.rtt_stats = rtt_stats; - configuration.local_media_ssrc = config.rtp.local_ssrc; - return ModuleRtpRtcpImpl2::Create(configuration); -} - } // namespace FlexfecReceiveStreamImpl::FlexfecReceiveStreamImpl( - Clock* clock, + const Environment& env, Config config, RecoveredPacketReceiver* recovered_packet_receiver, RtcpRttStats* rtt_stats) : remote_ssrc_(config.rtp.remote_ssrc), payload_type_(config.payload_type), - receiver_( - MaybeCreateFlexfecReceiver(clock, config, recovered_packet_receiver)), - rtp_receive_statistics_(ReceiveStatistics::Create(clock)), - rtp_rtcp_(CreateRtpRtcpModule(clock, - rtp_receive_statistics_.get(), - config, - rtt_stats)) { + receiver_(MaybeCreateFlexfecReceiver(&env.clock(), + config, + recovered_packet_receiver)), + rtp_receive_statistics_(ReceiveStatistics::Create(&env.clock())), + rtp_rtcp_(env, + {.audio = false, + .receiver_only = true, + .receive_statistics = rtp_receive_statistics_.get(), + .outgoing_transport = config.rtcp_send_transport, + .rtt_stats = rtt_stats, + .local_media_ssrc = config.rtp.local_ssrc}) { RTC_LOG(LS_INFO) << "FlexfecReceiveStreamImpl: " << config.ToString(); RTC_DCHECK_GE(payload_type_, -1); packet_sequence_checker_.Detach(); // RTCP reporting. - rtp_rtcp_->SetRTCPStatus(config.rtcp_mode); + rtp_rtcp_.SetRTCPStatus(config.rtcp_mode); } FlexfecReceiveStreamImpl::~FlexfecReceiveStreamImpl() { @@ -192,10 +180,10 @@ int FlexfecReceiveStreamImpl::payload_type() const { void FlexfecReceiveStreamImpl::SetLocalSsrc(uint32_t local_ssrc) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - if (local_ssrc == rtp_rtcp_->local_media_ssrc()) + if (local_ssrc == rtp_rtcp_.local_media_ssrc()) return; - rtp_rtcp_->SetLocalSsrc(local_ssrc); + rtp_rtcp_.SetLocalSsrc(local_ssrc); } } // namespace webrtc diff --git a/call/flexfec_receive_stream_impl.h b/call/flexfec_receive_stream_impl.h index 5ce2cb6f0e..2e1b9c452c 100644 --- a/call/flexfec_receive_stream_impl.h +++ b/call/flexfec_receive_stream_impl.h @@ -11,14 +11,17 @@ #ifndef CALL_FLEXFEC_RECEIVE_STREAM_IMPL_H_ #define CALL_FLEXFEC_RECEIVE_STREAM_IMPL_H_ +#include #include -#include +#include "api/environment/environment.h" +#include "api/rtp_headers.h" +#include "api/sequence_checker.h" #include "call/flexfec_receive_stream.h" #include "call/rtp_packet_sink_interface.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "rtc_base/system/no_unique_address.h" -#include "system_wrappers/include/clock.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -27,13 +30,12 @@ class ReceiveStatistics; class RecoveredPacketReceiver; class RtcpRttStats; class RtpPacketReceived; -class RtpRtcp; class RtpStreamReceiverControllerInterface; class RtpStreamReceiverInterface; class FlexfecReceiveStreamImpl : public FlexfecReceiveStream { public: - FlexfecReceiveStreamImpl(Clock* clock, + FlexfecReceiveStreamImpl(const Environment& env, Config config, RecoveredPacketReceiver* recovered_packet_receiver, RtcpRttStats* rtt_stats); @@ -67,7 +69,7 @@ class FlexfecReceiveStreamImpl : public FlexfecReceiveStream { void SetRtcpMode(RtcpMode mode) override { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - rtp_rtcp_->SetRTCPStatus(mode); + rtp_rtcp_.SetRTCPStatus(mode); } const ReceiveStatistics* GetStats() const override { @@ -88,7 +90,7 @@ class FlexfecReceiveStreamImpl : public FlexfecReceiveStream { // RTCP reporting. const std::unique_ptr rtp_receive_statistics_; - const std::unique_ptr rtp_rtcp_; + ModuleRtpRtcpImpl2 rtp_rtcp_; std::unique_ptr rtp_stream_receiver_ RTC_GUARDED_BY(packet_sequence_checker_); diff --git a/call/flexfec_receive_stream_unittest.cc b/call/flexfec_receive_stream_unittest.cc index c575a3f41d..e49d02d31b 100644 --- a/call/flexfec_receive_stream_unittest.cc +++ b/call/flexfec_receive_stream_unittest.cc @@ -16,15 +16,13 @@ #include "api/array_view.h" #include "api/call/transport.h" +#include "api/environment/environment_factory.h" #include "api/rtp_headers.h" -#include "api/rtp_parameters.h" #include "call/flexfec_receive_stream_impl.h" #include "call/rtp_stream_receiver_controller.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.h" #include "modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h" #include "modules/rtp_rtcp/source/byte_io.h" -#include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/thread.h" #include "test/gmock.h" @@ -35,7 +33,6 @@ namespace webrtc { namespace { -using ::testing::_; using ::testing::Eq; using ::testing::Property; @@ -54,7 +51,7 @@ FlexfecReceiveStream::Config CreateDefaultConfig( return config; } -RtpPacketReceived ParsePacket(rtc::ArrayView packet) { +RtpPacketReceived ParsePacket(ArrayView packet) { RtpPacketReceived parsed_packet(nullptr); EXPECT_TRUE(parsed_packet.Parse(packet)); return parsed_packet; @@ -88,14 +85,13 @@ class FlexfecReceiveStreamTest : public ::testing::Test { FlexfecReceiveStreamTest() : config_(CreateDefaultConfig(&rtcp_send_transport_)) { receive_stream_ = std::make_unique( - Clock::GetRealTimeClock(), config_, &recovered_packet_receiver_, - &rtt_stats_); + CreateEnvironment(), config_, &recovered_packet_receiver_, &rtt_stats_); receive_stream_->RegisterWithTransport(&rtp_stream_receiver_controller_); } ~FlexfecReceiveStreamTest() { receive_stream_->UnregisterFromTransport(); } - rtc::AutoThread main_thread_; + AutoThread main_thread_; MockTransport rtcp_send_transport_; FlexfecReceiveStream::Config config_; MockRecoveredPacketReceiver recovered_packet_receiver_; diff --git a/call/packet_receiver.h b/call/packet_receiver.h index cdcf7bfc73..c149f8e3cc 100644 --- a/call/packet_receiver.h +++ b/call/packet_receiver.h @@ -13,7 +13,6 @@ #include "absl/functional/any_invocable.h" #include "api/media_types.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" namespace webrtc { @@ -21,9 +20,9 @@ namespace webrtc { class PacketReceiver { public: // Demux RTCP packets. Must be called on the worker thread. - virtual void DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) = 0; + virtual void DeliverRtcpPacket(CopyOnWriteBuffer packet) = 0; - // Invoked once when a packet packet is received that can not be demuxed. + // Invoked once when a packet is received that can not be demuxed. // If the method returns true, a new attempt is made to demux the packet. using OnUndemuxablePacketHandler = absl::AnyInvocable; diff --git a/call/payload_type.h b/call/payload_type.h new file mode 100644 index 0000000000..d91da50fcc --- /dev/null +++ b/call/payload_type.h @@ -0,0 +1,63 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_PAYLOAD_TYPE_H_ +#define CALL_PAYLOAD_TYPE_H_ + +#include +#include + +#include "api/rtc_error.h" +#include "media/base/codec.h" +#include "rtc_base/strong_alias.h" + +namespace webrtc { + +class PayloadType : public StrongAlias { + public: + // Non-explicit conversions from and to ints are to be deprecated and + // removed once calling code is upgraded. + PayloadType(uint8_t pt) { value_ = pt; } // NOLINT: explicit + constexpr operator uint8_t() const& { return value_; } // NOLINT: Explicit + static bool IsValid(PayloadType id, bool rtcp_mux) { + // A payload type is a 7-bit value in the RTP header, so max = 127. + // If RTCP multiplexing is used, the numbers from 64 to 95 are reserved + // for RTCP packets. + if (rtcp_mux && (id > 63 && id < 96)) { + return false; + } + return id >= 0 && id <= 127; + } + template + friend void AbslStringify(Sink& sink, const PayloadType pt) { + absl::Format(&sink, "%d", pt.value_); + } +}; + +class PayloadTypeSuggester { + public: + virtual ~PayloadTypeSuggester() = default; + + // Suggest a payload type for a given codec on a given media section. + // Media section is indicated by MID. + // The function will either return a PT already in use on the connection + // or a newly suggested one. + virtual RTCErrorOr SuggestPayloadType(const std::string& mid, + Codec codec) = 0; + // Register a payload type as mapped to a specific codec for this MID + // at this time. + virtual RTCError AddLocalMapping(const std::string& mid, + PayloadType payload_type, + const Codec& codec) = 0; +}; + +} // namespace webrtc + +#endif // CALL_PAYLOAD_TYPE_H_ diff --git a/call/payload_type_picker.cc b/call/payload_type_picker.cc new file mode 100644 index 0000000000..cee6d46094 --- /dev/null +++ b/call/payload_type_picker.cc @@ -0,0 +1,335 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/payload_type_picker.h" + +#include +#include +#include +#include +#include + +#include "absl/strings/match.h" +#include "api/audio_codecs/audio_format.h" +#include "api/rtc_error.h" +#include "call/payload_type.h" +#include "media/base/codec.h" +#include "media/base/codec_comparators.h" +#include "media/base/media_constants.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/string_encode.h" + +namespace webrtc { + +namespace { + +// Due to interoperability issues with old Chrome/WebRTC versions that +// ignore the [35, 63] range prefer the lower range for new codecs. +static const int kFirstDynamicPayloadTypeLowerRange = 35; +static const int kLastDynamicPayloadTypeLowerRange = 63; + +static const int kFirstDynamicPayloadTypeUpperRange = 96; +static const int kLastDynamicPayloadTypeUpperRange = 127; + +// Note: The only fields we need from a Codec are the type (audio/video), +// the subtype (vp8/h264/....), the clock rate, the channel count, and the +// fmtp parameters. The use of Codec, which contains more fields, +// is only a temporary measure. + +struct MapTableEntry { + webrtc::SdpAudioFormat format; + int payload_type; +}; + +// Helper function to determine whether a codec should use the [35, 63] range. +// Should be used when adding new codecs (or variants). +bool CodecPrefersLowerRange(const Codec& codec) { + // All audio codecs prefer upper range. + if (codec.type == Codec::Type::kAudio) { + return absl::EqualsIgnoreCase(codec.name, kRedCodecName); + } + if (absl::EqualsIgnoreCase(codec.name, kFlexfecCodecName) || + absl::EqualsIgnoreCase(codec.name, kAv1CodecName) || + absl::EqualsIgnoreCase(codec.name, kH265CodecName)) { + return true; + } else if (absl::EqualsIgnoreCase(codec.name, kH264CodecName)) { + std::string profile_level_id; + std::string packetization_mode; + + if (codec.GetParam(kH264FmtpProfileLevelId, &profile_level_id)) { + if (absl::StartsWithIgnoreCase(profile_level_id, "4d00")) { + if (codec.GetParam(kH264FmtpPacketizationMode, &packetization_mode)) { + return packetization_mode == "0"; + } + } + // H264 with YUV444. + return absl::StartsWithIgnoreCase(profile_level_id, "f400"); + } + } else if (absl::EqualsIgnoreCase(codec.name, kVp9CodecName)) { + std::string profile_id; + + if (codec.GetParam(kVP9ProfileId, &profile_id)) { + if (profile_id == "1" || profile_id == "3") { + return true; + } + } + } else if (absl::EqualsIgnoreCase(codec.name, kRtxCodecName)) { + // For RTX prefer lower range if the associated codec is in that range. + std::string associated_pt_str; + int associated_pt; + return codec.GetParam(kCodecParamAssociatedPayloadType, + &associated_pt_str) && + FromString(associated_pt_str, &associated_pt) && + associated_pt >= kFirstDynamicPayloadTypeLowerRange && + associated_pt <= kLastDynamicPayloadTypeLowerRange; + } + return false; +} + +RTCErrorOr FindFreePayloadType(const Codec& codec, + std::set seen_pt) { + // Prefer to use lower range for codecs that can handle it. + bool prefer_lower_range = CodecPrefersLowerRange(codec); + if (prefer_lower_range) { + for (auto i = kFirstDynamicPayloadTypeLowerRange; + i <= kLastDynamicPayloadTypeLowerRange; i++) { + if (seen_pt.count(PayloadType(i)) == 0) { + return PayloadType(i); + } + } + } + for (auto i = kFirstDynamicPayloadTypeUpperRange; + i <= kLastDynamicPayloadTypeUpperRange; i++) { + if (seen_pt.count(PayloadType(i)) == 0) { + return PayloadType(i); + } + } + // If the upper range is full, we do lower range also for codecs + // that prefer the upper range. + if (!prefer_lower_range) { + for (auto i = kFirstDynamicPayloadTypeLowerRange; + i <= kLastDynamicPayloadTypeLowerRange; i++) { + if (seen_pt.count(PayloadType(i)) == 0) { + return PayloadType(i); + } + } + } + if (prefer_lower_range) { + return RTCError(RTCErrorType::RESOURCE_EXHAUSTED, + "All available dynamic PTs have been assigned"); + } else { + return RTCError( + RTCErrorType::RESOURCE_EXHAUSTED, + "All available dynamic PTs have been assigned, codec preferred upper"); + } +} + +} // namespace + +PayloadTypePicker::PayloadTypePicker() { + // Default audio codecs. Duplicates media/engine/payload_type_mapper.cc + const MapTableEntry default_audio_mappings[] = { + // Static payload type assignments according to RFC 3551. + {{kPcmuCodecName, 8000, 1}, 0}, + {{"GSM", 8000, 1}, 3}, + {{"G723", 8000, 1}, 4}, + {{"DVI4", 8000, 1}, 5}, + {{"DVI4", 16000, 1}, 6}, + {{"LPC", 8000, 1}, 7}, + {{kPcmaCodecName, 8000, 1}, 8}, + {{kG722CodecName, 8000, 1}, 9}, + {{kL16CodecName, 44100, 2}, 10}, + {{kL16CodecName, 44100, 1}, 11}, + {{"QCELP", 8000, 1}, 12}, + {{kCnCodecName, 8000, 1}, 13}, + // RFC 4566 is a bit ambiguous on the contents of the "encoding + // parameters" field, which, for audio, encodes the number of + // channels. It is "optional and may be omitted if the number of + // channels is one". Does that necessarily imply that an omitted + // encoding parameter means one channel? Since RFC 3551 doesn't + // specify a value for this parameter for MPA, I've included both 0 + // and 1 here, to increase the chances it will be correctly used if + // someone implements an MPEG audio encoder/decoder. + {{"MPA", 90000, 0}, 14}, + {{"MPA", 90000, 1}, 14}, + {{"G728", 8000, 1}, 15}, + {{"DVI4", 11025, 1}, 16}, + {{"DVI4", 22050, 1}, 17}, + {{"G729", 8000, 1}, 18}, + + // Payload type assignments currently used by WebRTC. + // Includes data to reduce collisions (and thus reassignments) + // TODO(bugs.webrtc.org/400630582): Delete this, it's only for test + // stability. + {{"reserved-do-not-use", 0, 0}, 102}, + {{kCnCodecName, 16000, 1}, 105}, + {{kCnCodecName, 32000, 1}, 106}, + {{kOpusCodecName, + 48000, + 2, + {{kCodecParamMinPTime, "10"}, + {kCodecParamUseInbandFec, kParamValueTrue}}}, + 111}, + // RED for opus is assigned in the lower range, starting at the top. + // Note that the FMTP refers to the opus payload type. + {{kRedCodecName, + 48000, + 2, + {{kCodecParamNotInNameValueFormat, "111/111"}}}, + 63}, + // TODO(solenberg): Remove the hard coded 16k,32k,48k DTMF once we + // assign payload types dynamically for send side as well. + {{kDtmfCodecName, 48000, 1}, 110}, + {{kDtmfCodecName, 32000, 1}, 112}, + {{kDtmfCodecName, 16000, 1}, 113}, + {{kDtmfCodecName, 8000, 1}, 126}}; + for (const MapTableEntry& entry : default_audio_mappings) { + AddMapping(PayloadType(entry.payload_type), CreateAudioCodec(entry.format)); + } +} + +RTCErrorOr PayloadTypePicker::SuggestMapping( + Codec codec, + const PayloadTypeRecorder* excluder) { + // Test compatibility: If the codec contains a PT, and it is free, use it. + // This saves having to rewrite tests that set the codec ID themselves. + // Codecs with unassigned IDs should have -1 as their id. + if (codec.id >= 0 && codec.id <= kLastDynamicPayloadTypeUpperRange && + seen_payload_types_.count(PayloadType(codec.id)) == 0) { + AddMapping(PayloadType(codec.id), codec); + return PayloadType(codec.id); + } + // The first matching entry is returned, unless excluder + // maps it to something different. + for (const MapEntry& entry : entries_) { + if (MatchesWithReferenceAttributes(entry.codec(), codec)) { + if (excluder) { + auto result = excluder->LookupCodec(entry.payload_type()); + if (result.ok() && + !MatchesWithReferenceAttributes(result.value(), codec)) { + continue; + } + } + return entry.payload_type(); + } + } + // Assign the first free payload type. + RTCErrorOr found_pt = + FindFreePayloadType(codec, seen_payload_types_); + if (found_pt.ok()) { + AddMapping(found_pt.value(), codec); + } + return found_pt; +} + +RTCError PayloadTypePicker::AddMapping(PayloadType payload_type, Codec codec) { + // Completely duplicate mappings are ignored. + // Multiple mappings for the same codec and the same PT are legal; + for (const MapEntry& entry : entries_) { + if (payload_type == entry.payload_type() && + MatchesWithReferenceAttributes(codec, entry.codec())) { + return RTCError::OK(); + } + } + entries_.emplace_back(MapEntry(payload_type, codec)); + seen_payload_types_.emplace(payload_type); + return RTCError::OK(); +} + +RTCError PayloadTypeRecorder::AddMapping(PayloadType payload_type, + Codec codec) { + auto existing_codec_it = payload_type_to_codec_.find(payload_type); + if (existing_codec_it != payload_type_to_codec_.end() && + !MatchesWithCodecRules(codec, existing_codec_it->second)) { + // Redefinition attempted. + if (disallow_redefinition_level_ > 0) { + if (accepted_definitions_.count(payload_type) > 0) { + // We have already defined this PT in this scope. + RTC_LOG(LS_WARNING) + << "Rejected attempt to redefine mapping for PT " << payload_type + << " from " << existing_codec_it->second << " to " << codec; + return RTCError(RTCErrorType::INVALID_MODIFICATION, + "Attempt to redefine a codec mapping"); + } + } + if (absl::EqualsIgnoreCase(codec.name, existing_codec_it->second.name)) { + // The difference is in clock rate, channels or FMTP parameters. + RTC_LOG(LS_INFO) << "Warning: Attempt to change a codec's parameters"; + // Some FMTP value changes are harmless, others are harmful. + // This is done in production today, so we can't return an error. + } else { + RTC_LOG(LS_WARNING) << "Warning: You attempted to redefine a codec from " + << existing_codec_it->second << " to " + << " new codec " << codec; + // This is a spec violation. + // TODO: https://issues.webrtc.org/41480892 - return an error. + } + // Accept redefinition. + accepted_definitions_.emplace(payload_type); + payload_type_to_codec_.insert_or_assign(payload_type, codec); + return RTCError::OK(); + } + accepted_definitions_.emplace(payload_type); + payload_type_to_codec_.emplace(payload_type, codec); + suggester_.AddMapping(payload_type, codec); + return RTCError::OK(); +} + +std::vector> PayloadTypeRecorder::GetMappings() + const { + return std::vector>{}; +} + +RTCErrorOr PayloadTypeRecorder::LookupPayloadType( + Codec codec) const { + // Note that having multiple PTs mapping to the same codec is NOT an error. + // In this case, we return the first found (not deterministic). + auto result = + std::find_if(payload_type_to_codec_.begin(), payload_type_to_codec_.end(), + [codec](const auto& iter) { + return MatchesWithReferenceAttributes(iter.second, codec); + }); + if (result == payload_type_to_codec_.end()) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "No payload type found for codec"); + } + return result->first; +} + +RTCErrorOr PayloadTypeRecorder::LookupCodec( + PayloadType payload_type) const { + auto result = payload_type_to_codec_.find(payload_type); + if (result == payload_type_to_codec_.end()) { + return RTCError(RTCErrorType::INVALID_PARAMETER, "No such payload type"); + } + return result->second; +} + +void PayloadTypeRecorder::DisallowRedefinition() { + if (disallow_redefinition_level_ == 0) { + accepted_definitions_.clear(); + } + ++disallow_redefinition_level_; +} + +void PayloadTypeRecorder::ReallowRedefinition() { + RTC_CHECK(disallow_redefinition_level_ > 0); + --disallow_redefinition_level_; +} + +void PayloadTypeRecorder::Commit() { + checkpoint_payload_type_to_codec_ = payload_type_to_codec_; +} +void PayloadTypeRecorder::Rollback() { + payload_type_to_codec_ = checkpoint_payload_type_to_codec_; +} + +} // namespace webrtc diff --git a/call/payload_type_picker.h b/call/payload_type_picker.h new file mode 100644 index 0000000000..85cc0bb318 --- /dev/null +++ b/call/payload_type_picker.h @@ -0,0 +1,101 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_PAYLOAD_TYPE_PICKER_H_ +#define CALL_PAYLOAD_TYPE_PICKER_H_ + +#include +#include +#include +#include + +#include "api/rtc_error.h" +#include "call/payload_type.h" +#include "media/base/codec.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +class PayloadTypeRecorder; + +class PayloadTypePicker { + public: + PayloadTypePicker(); + PayloadTypePicker(const PayloadTypePicker&) = delete; + PayloadTypePicker& operator=(const PayloadTypePicker&) = delete; + PayloadTypePicker(PayloadTypePicker&&) = delete; + PayloadTypePicker& operator=(PayloadTypePicker&&) = delete; + // Suggest a payload type for the codec. + // If the excluder maps it to something different, don't suggest it. + RTCErrorOr SuggestMapping(Codec codec, + const PayloadTypeRecorder* excluder); + RTCError AddMapping(PayloadType payload_type, Codec codec); + + private: + class MapEntry { + public: + MapEntry(PayloadType payload_type, Codec codec) + : payload_type_(payload_type), codec_(codec) {} + PayloadType payload_type() const { return payload_type_; } + Codec codec() const { return codec_; } + + private: + PayloadType payload_type_; + Codec codec_; + }; + std::vector entries_; + std::set seen_payload_types_; + template + friend void AbslStringify(Sink& sink, const PayloadTypePicker& picker) { + sink.Append("Reserved:"); + for (PayloadType pt : picker.seen_payload_types_) { + absl::Format(&sink, " %v", pt); + } + } +}; + +class PayloadTypeRecorder { + public: + explicit PayloadTypeRecorder(PayloadTypePicker& suggester) + : suggester_(suggester) {} + ~PayloadTypeRecorder() { + // Ensure consistent use of paired Disallow/ReallowRedefintion calls. + RTC_DCHECK(disallow_redefinition_level_ == 0); + } + + RTCError AddMapping(PayloadType payload_type, Codec codec); + std::vector> GetMappings() const; + RTCErrorOr LookupPayloadType(Codec codec) const; + RTCErrorOr LookupCodec(PayloadType payload_type) const; + // Redefinition guard. + // In some scenarios, redefinition must be allowed between one offer/answer + // set and the next offer/answer set, but within the processing of one + // SDP, it should never be allowed. + // Implemented as a stack push/pop for convenience; if Disallow has + // been called more times than Reallow, redefinition is prohibited. + void DisallowRedefinition(); + void ReallowRedefinition(); + // Transaction support. + // Commit() commits previous changes. + void Commit(); + // Rollback() rolls back to the previous checkpoint. + void Rollback(); + + private: + PayloadTypePicker& suggester_; + std::map payload_type_to_codec_; + std::map checkpoint_payload_type_to_codec_; + int disallow_redefinition_level_ = 0; + std::set accepted_definitions_; +}; + +} // namespace webrtc + +#endif // CALL_PAYLOAD_TYPE_PICKER_H_ diff --git a/call/payload_type_picker_unittest.cc b/call/payload_type_picker_unittest.cc new file mode 100644 index 0000000000..e6593d1494 --- /dev/null +++ b/call/payload_type_picker_unittest.cc @@ -0,0 +1,248 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/payload_type_picker.h" + +#include "api/video_codecs/sdp_video_format.h" +#include "call/payload_type.h" +#include "media/base/codec.h" +#include "media/base/media_constants.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { + +using testing::Eq; +using testing::Ge; +using testing::Le; +using testing::Ne; + +TEST(PayloadTypePicker, PayloadTypeAssignmentWorks) { + // Note: This behavior is due to be deprecated and removed. + PayloadType pt_a(1); + PayloadType pt_b = 1; // Implicit conversion + EXPECT_EQ(pt_a, pt_b); + int pt_as_int = pt_a; // Implicit conversion + EXPECT_EQ(1, pt_as_int); +} + +TEST(PayloadTypePicker, InstantiateTypes) { + PayloadTypePicker picker; + PayloadTypeRecorder recorder(picker); +} + +TEST(PayloadTypePicker, StoreAndRecall) { + PayloadTypePicker picker; + PayloadTypeRecorder recorder(picker); + const PayloadType a_payload_type(123); + const PayloadType not_a_payload_type(44); + Codec a_codec = CreateVideoCodec(0, "vp8"); + auto error = recorder.AddMapping(a_payload_type, a_codec); + ASSERT_TRUE(error.ok()); + auto result = recorder.LookupCodec(a_payload_type); + ASSERT_TRUE(result.ok()); + EXPECT_EQ(result.value(), a_codec); + auto result_pt = recorder.LookupPayloadType(a_codec); + ASSERT_TRUE(result_pt.ok()); + EXPECT_EQ(result_pt.value(), a_payload_type); + EXPECT_FALSE(recorder.LookupCodec(not_a_payload_type).ok()); +} + +TEST(PayloadTypePicker, ModifyingPtIsIgnored) { + // Arguably a spec violation, but happens in production. + // To be decided: Whether we should disallow codec change, fmtp change + // or both. + PayloadTypePicker picker; + PayloadTypeRecorder recorder(picker); + const PayloadType a_payload_type(123); + Codec a_codec = CreateVideoCodec(Codec::kIdNotSet, "vp8"); + Codec b_codec = CreateVideoCodec(Codec::kIdNotSet, "vp9"); + recorder.AddMapping(a_payload_type, a_codec); + auto error = recorder.AddMapping(a_payload_type, b_codec); + EXPECT_TRUE(error.ok()); + auto result = recorder.LookupCodec(a_payload_type); + // Redefinition should be accepted. + EXPECT_EQ(result.value(), b_codec); +} + +TEST(PayloadTypePicker, ModifyingPtIsAnErrorIfDisallowed) { + PayloadTypePicker picker; + PayloadTypeRecorder recorder(picker); + const PayloadType a_payload_type(123); + Codec a_codec = CreateVideoCodec(Codec::kIdNotSet, "vp8"); + Codec b_codec = CreateVideoCodec(Codec::kIdNotSet, "vp9"); + recorder.DisallowRedefinition(); + recorder.AddMapping(a_payload_type, a_codec); + auto error = recorder.AddMapping(a_payload_type, b_codec); + EXPECT_FALSE(error.ok()); + auto result = recorder.LookupCodec(a_payload_type); + // Attempted redefinition should be ignored. + EXPECT_EQ(result.value(), a_codec); + recorder.ReallowRedefinition(); +} + +TEST(PayloadTypePicker, RollbackAndCommit) { + PayloadTypePicker picker; + PayloadTypeRecorder recorder(picker); + const PayloadType a_payload_type(123); + const PayloadType b_payload_type(124); + const PayloadType not_a_payload_type(44); + + Codec a_codec = CreateVideoCodec(0, "vp8"); + + Codec b_codec = CreateVideoCodec(0, "vp9"); + auto error = recorder.AddMapping(a_payload_type, a_codec); + ASSERT_TRUE(error.ok()); + recorder.Commit(); + ASSERT_TRUE(recorder.AddMapping(b_payload_type, b_codec).ok()); + { + auto result = recorder.LookupCodec(a_payload_type); + ASSERT_TRUE(result.ok()); + EXPECT_EQ(result.value(), a_codec); + } + { + auto result = recorder.LookupCodec(b_payload_type); + ASSERT_TRUE(result.ok()); + EXPECT_EQ(result.value(), b_codec); + } + recorder.Rollback(); + { + auto result = recorder.LookupCodec(a_payload_type); + ASSERT_TRUE(result.ok()); + EXPECT_EQ(result.value(), a_codec); + } + { + auto result = recorder.LookupCodec(b_payload_type); + ASSERT_FALSE(result.ok()); + } + ASSERT_TRUE(recorder.AddMapping(b_payload_type, b_codec).ok()); + // Rollback after a new checkpoint has no effect. + recorder.Commit(); + recorder.Rollback(); + { + auto result = recorder.LookupCodec(b_payload_type); + ASSERT_TRUE(result.ok()); + EXPECT_EQ(result.value(), b_codec); + } +} + +TEST(PayloadTypePicker, StaticValueIsGood) { + PayloadTypePicker picker; + Codec a_codec = CreateAudioCodec(-1, kPcmuCodecName, 8000, 1); + auto result = picker.SuggestMapping(a_codec, nullptr); + // In the absence of existing mappings, PCMU always has 0 as PT. + ASSERT_TRUE(result.ok()); + EXPECT_EQ(result.value(), PayloadType(0)); +} + +TEST(PayloadTypePicker, DynamicValueIsGood) { + PayloadTypePicker picker; + Codec a_codec = CreateAudioCodec(-1, "lyra", 8000, 1); + auto result = picker.SuggestMapping(a_codec, nullptr); + // This should result in a value from the dynamic range; since this is the + // first assignment, it should be in the upper range. + ASSERT_TRUE(result.ok()); + EXPECT_GE(result.value(), PayloadType(96)); + EXPECT_LE(result.value(), PayloadType(127)); +} + +TEST(PayloadTypePicker, RecordedValueReturned) { + PayloadTypePicker picker; + PayloadTypeRecorder recorder(picker); + Codec a_codec = CreateAudioCodec(-1, "lyra", 8000, 1); + recorder.AddMapping(47, a_codec); + auto result = picker.SuggestMapping(a_codec, &recorder); + ASSERT_TRUE(result.ok()); + EXPECT_EQ(47, result.value()); +} + +TEST(PayloadTypePicker, RecordedValueExcluded) { + PayloadTypePicker picker; + PayloadTypeRecorder recorder1(picker); + PayloadTypeRecorder recorder2(picker); + Codec a_codec = CreateAudioCodec(-1, "lyra", 8000, 1); + Codec b_codec = CreateAudioCodec(-1, "mlcodec", 8000, 1); + recorder1.AddMapping(47, a_codec); + recorder2.AddMapping(47, b_codec); + auto result = picker.SuggestMapping(b_codec, &recorder1); + ASSERT_TRUE(result.ok()); + EXPECT_NE(47, result.value()); +} + +TEST(PayloadTypePicker, AudioGetsHigherRange) { + PayloadTypePicker picker; + Codec an_audio_codec = CreateAudioCodec(-1, "lyra", 8000, 1); + auto result = picker.SuggestMapping(an_audio_codec, nullptr).value(); + EXPECT_THAT(result, Ge(96)); +} + +TEST(PayloadTypePicker, AudioRedGetsLowerRange) { + PayloadTypePicker picker; + Codec an_audio_codec = CreateAudioCodec(-1, "red", 48000, 2); + auto result = picker.SuggestMapping(an_audio_codec, nullptr).value(); + EXPECT_THAT(result, Le(63)); +} + +TEST(PayloadTypePicker, VideoGetsTreatedSpecially) { + PayloadTypePicker picker; + Codec h264_constrained = CreateVideoCodec( + SdpVideoFormat(kH264CodecName, {{kH264FmtpProfileLevelId, "42e01f"}, + {kH264FmtpLevelAsymmetryAllowed, "1"}, + {kH264FmtpPacketizationMode, "1"}})); + Codec h264_yuv444 = CreateVideoCodec( + SdpVideoFormat(kH264CodecName, {{kH264FmtpProfileLevelId, "f4001f"}, + {kH264FmtpLevelAsymmetryAllowed, "1"}, + {kH264FmtpPacketizationMode, "1"}})); + Codec vp9_profile_2 = + CreateVideoCodec({kVp9CodecName, {{kVP9ProfileId, "2"}}}); + Codec vp9_profile_3 = + CreateVideoCodec({kVp9CodecName, {{kVP9ProfileId, "3"}}}); + Codec h265 = CreateVideoCodec( + SdpVideoFormat(kH265CodecName, {{kH265FmtpProfileId, "1"}, + {kH265FmtpTierFlag, "0"}, + {kH265FmtpLevelId, "93"}, + {kH265FmtpTxMode, "SRST"}})); + // Valid for high range only. + EXPECT_THAT(picker.SuggestMapping(h264_constrained, nullptr).value(), Ge(96)); + EXPECT_THAT(picker.SuggestMapping(vp9_profile_2, nullptr).value(), Ge(96)); + // Valid for lower range. + EXPECT_THAT(picker.SuggestMapping(h264_yuv444, nullptr).value(), Le(63)); + EXPECT_THAT(picker.SuggestMapping(vp9_profile_3, nullptr).value(), Le(63)); + EXPECT_THAT(picker.SuggestMapping(h265, nullptr).value(), Le(63)); + + // RTX with a primary codec in the lower range is valid for lower range. + Codec lower_range_rtx = CreateVideoRtxCodec(Codec::kIdNotSet, 63); + EXPECT_THAT(picker.SuggestMapping(lower_range_rtx, nullptr).value(), Le(63)); +} + +TEST(PayloadTypePicker, ChoosingH264Profiles) { + // No opinion on whether these are right or wrong, just that their + // behavior is consistent. + PayloadTypePicker picker; + Codec h264_constrained = CreateVideoCodec( + SdpVideoFormat(kH264CodecName, {{kH264FmtpProfileLevelId, "42e01f"}, + {kH264FmtpLevelAsymmetryAllowed, "1"}, + {kH264FmtpPacketizationMode, "1"}})); + Codec h264_high_1f = CreateVideoCodec( + SdpVideoFormat(kH264CodecName, {{kH264FmtpProfileLevelId, "640c1f"}, + {kH264FmtpLevelAsymmetryAllowed, "1"}, + {kH264FmtpPacketizationMode, "1"}})); + Codec h264_high_2a = CreateVideoCodec( + SdpVideoFormat(kH264CodecName, {{kH264FmtpProfileLevelId, "640c2a"}, + {kH264FmtpLevelAsymmetryAllowed, "1"}, + {kH264FmtpPacketizationMode, "1"}})); + PayloadType pt_constrained = + picker.SuggestMapping(h264_constrained, nullptr).value(); + PayloadType pt_high_1f = picker.SuggestMapping(h264_high_1f, nullptr).value(); + PayloadType pt_high_2a = picker.SuggestMapping(h264_high_2a, nullptr).value(); + EXPECT_THAT(pt_constrained, Ne(pt_high_1f)); + EXPECT_THAT(pt_high_1f, Eq(pt_high_2a)); +} +} // namespace webrtc diff --git a/call/rampup_tests.cc b/call/rampup_tests.cc index 8ddce83a2e..5b121f01b5 100644 --- a/call/rampup_tests.cc +++ b/call/rampup_tests.cc @@ -10,27 +10,48 @@ #include "call/rampup_tests.h" +#include +#include #include +#include +#include +#include #include "absl/flags/flag.h" #include "absl/strings/string_view.h" +#include "api/field_trials_view.h" +#include "api/make_ref_counted.h" +#include "api/rtc_event_log/rtc_event_log.h" #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/rtc_event_log_output_file.h" -#include "api/task_queue/default_task_queue_factory.h" +#include "api/rtp_parameters.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" -#include "api/task_queue/task_queue_factory.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/metrics/metric.h" +#include "api/test/simulated_network.h" +#include "api/transport/bitrate_settings.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/sdp_video_format.h" +#include "call/audio_receive_stream.h" +#include "call/audio_send_stream.h" +#include "call/call.h" #include "call/fake_network_pipe.h" +#include "call/flexfec_receive_stream.h" +#include "call/video_receive_stream.h" +#include "call/video_send_stream.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/platform_thread.h" #include "rtc_base/string_encode.h" #include "rtc_base/task_queue_for_test.h" -#include "rtc_base/time_utils.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "test/call_test.h" #include "test/encoder_settings.h" #include "test/gtest.h" +#include "test/rtp_rtcp_observer.h" #include "test/video_test_constants.h" +#include "video/config/video_encoder_config.h" ABSL_FLAG(std::string, ramp_dump_name, @@ -108,7 +129,7 @@ void RampUpTester::ModifySenderBitrateConfig( void RampUpTester::OnVideoStreamsCreated( VideoSendStream* send_stream, - const std::vector& receive_streams) { + const std::vector& /* receive_streams */) { send_stream_ = send_stream; } @@ -135,6 +156,7 @@ class RampUpTester::VideoStreamFactory private: std::vector CreateEncoderStreams( + const FieldTrialsView& /*field_trials*/, int frame_width, int frame_height, const VideoEncoderConfig& encoder_config) override { @@ -155,7 +177,7 @@ void RampUpTester::ModifyVideoConfigs( encoder_config->number_of_streams = num_video_streams_; encoder_config->max_bitrate_bps = 2000000; encoder_config->video_stream_factory = - rtc::make_ref_counted(); + make_ref_counted(); if (num_video_streams_ == 1) { // For single stream rampup until 1mbps expected_bitrate_bps_ = kSingleStreamTargetBps; @@ -262,7 +284,8 @@ void RampUpTester::ModifyFlexfecConfigs( (*receive_configs)[0].rtp.local_ssrc = video_ssrcs_[0]; } -void RampUpTester::OnCallsCreated(Call* sender_call, Call* receiver_call) { +void RampUpTester::OnCallsCreated(Call* sender_call, + Call* /* receiver_call */) { RTC_DCHECK(sender_call); sender_call_ = sender_call; pending_task_ = RepeatingTaskHandle::Start(task_queue_, [this] { @@ -274,8 +297,8 @@ void RampUpTester::OnCallsCreated(Call* sender_call, Call* receiver_call) { void RampUpTester::OnTransportCreated( test::PacketTransport* to_receiver, SimulatedNetworkInterface* sender_network, - test::PacketTransport* to_sender, - SimulatedNetworkInterface* receiver_network) { + test::PacketTransport* /* to_sender */, + SimulatedNetworkInterface* /* receiver_network */) { RTC_DCHECK_RUN_ON(task_queue_); send_transport_ = to_receiver; @@ -329,7 +352,7 @@ void RampUpTester::TriggerTestDone() { RTC_DCHECK_GE(test_start_ms_, 0); // Stop polling stats. - // Corner case for field_trials=WebRTC-QuickPerfTest/Enabled/ + // Corner case for webrtc_quick_perf_test SendTask(task_queue_, [this] { pending_task_.Stop(); }); // TODO(holmer): Add audio send stats here too when those APIs are available. @@ -411,7 +434,8 @@ RampUpDownUpTester::RampUpDownUpTester(size_t num_video_streams, interval_start_ms_(clock_->TimeInMilliseconds()), sent_bytes_(0), loss_rates_(loss_rates) { - forward_transport_config_.link_capacity_kbps = link_rates_[test_state_]; + forward_transport_config_.link_capacity = + DataRate::KilobitsPerSec(link_rates_[test_state_]); forward_transport_config_.queue_delay_ms = 100; forward_transport_config_.loss_percent = loss_rates_[test_state_]; } @@ -449,13 +473,13 @@ void RampUpDownUpTester::ModifyReceiverBitrateConfig( std::string RampUpDownUpTester::GetModifierString() const { std::string str("_"); if (num_video_streams_ > 0) { - str += rtc::ToString(num_video_streams_); + str += absl::StrCat(num_video_streams_); str += "stream"; str += (num_video_streams_ > 1 ? "s" : ""); str += "_"; } if (num_audio_streams_ > 0) { - str += rtc::ToString(num_audio_streams_); + str += absl::StrCat(num_audio_streams_); str += "stream"; str += (num_audio_streams_ > 1 ? "s" : ""); str += "_"; @@ -550,7 +574,8 @@ void RampUpDownUpTester::EvolveTestState(int bitrate_bps, bool suspended) { case kTransitionToNextState: if (!ExpectingFec() || GetFecBytes() > 0) { test_state_ = next_state_; - forward_transport_config_.link_capacity_kbps = link_rates_[test_state_]; + forward_transport_config_.link_capacity = + DataRate::KilobitsPerSec(link_rates_[test_state_]); // No loss while ramping up and down as it may affect the BWE // negatively, making the test flaky. forward_transport_config_.loss_percent = 0; @@ -565,30 +590,29 @@ void RampUpDownUpTester::EvolveTestState(int bitrate_bps, bool suspended) { class RampUpTest : public test::CallTest { public: - RampUpTest() - : task_queue_factory_(CreateDefaultTaskQueueFactory()), - rtc_event_log_factory_(task_queue_factory_.get()) { + RampUpTest() { std::string dump_name(absl::GetFlag(FLAGS_ramp_dump_name)); if (!dump_name.empty()) { - send_event_log_ = rtc_event_log_factory_.CreateRtcEventLog( - RtcEventLog::EncodingType::Legacy); - recv_event_log_ = rtc_event_log_factory_.CreateRtcEventLog( - RtcEventLog::EncodingType::Legacy); + std::unique_ptr send_event_log = + rtc_event_log_factory_.Create(env()); + std::unique_ptr recv_event_log = + rtc_event_log_factory_.Create(env()); bool event_log_started = - send_event_log_->StartLogging( + send_event_log->StartLogging( std::make_unique( dump_name + ".send.rtc.dat", RtcEventLog::kUnlimitedOutput), RtcEventLog::kImmediateOutput) && - recv_event_log_->StartLogging( + recv_event_log->StartLogging( std::make_unique( dump_name + ".recv.rtc.dat", RtcEventLog::kUnlimitedOutput), RtcEventLog::kImmediateOutput); RTC_DCHECK(event_log_started); + SetSendEventLog(std::move(send_event_log)); + SetRecvEventLog(std::move(recv_event_log)); } } private: - const std::unique_ptr task_queue_factory_; RtcEventLogFactory rtc_event_log_factory_; }; diff --git a/call/rampup_tests.h b/call/rampup_tests.h index ba9989d25c..259aa79954 100644 --- a/call/rampup_tests.h +++ b/call/rampup_tests.h @@ -11,22 +11,27 @@ #ifndef CALL_RAMPUP_TESTS_H_ #define CALL_RAMPUP_TESTS_H_ +#include +#include #include -#include #include -#include #include #include "absl/strings/string_view.h" -#include "api/rtc_event_log/rtc_event_log.h" #include "api/task_queue/task_queue_base.h" #include "api/test/metrics/metric.h" #include "api/test/simulated_network.h" +#include "api/transport/bitrate_settings.h" +#include "call/audio_receive_stream.h" +#include "call/audio_send_stream.h" #include "call/call.h" -#include "call/simulated_network.h" -#include "rtc_base/event.h" +#include "call/flexfec_receive_stream.h" +#include "call/video_receive_stream.h" +#include "call/video_send_stream.h" #include "rtc_base/task_utils/repeating_task.h" #include "test/call_test.h" +#include "test/rtp_rtcp_observer.h" +#include "video/config/video_encoder_config.h" namespace webrtc { diff --git a/call/receive_stream.h b/call/receive_stream.h index 8a99059ec5..32678cbd8d 100644 --- a/call/receive_stream.h +++ b/call/receive_stream.h @@ -11,11 +11,12 @@ #ifndef CALL_RECEIVE_STREAM_H_ #define CALL_RECEIVE_STREAM_H_ +#include #include #include "api/crypto/frame_decryptor_interface.h" #include "api/frame_transformer_interface.h" -#include "api/media_types.h" +#include "api/rtp_headers.h" #include "api/scoped_refptr.h" #include "api/transport/rtp/rtp_source.h" @@ -58,13 +59,14 @@ class MediaReceiveStreamInterface : public ReceiveStreamInterface { virtual void Stop() = 0; virtual void SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr - frame_transformer) = 0; + scoped_refptr frame_transformer) = 0; virtual void SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) = 0; + scoped_refptr frame_decryptor) = 0; virtual std::vector GetSources() const = 0; + + virtual void SetRtcpMode(RtcpMode mode) = 0; }; } // namespace webrtc diff --git a/call/receive_time_calculator.cc b/call/receive_time_calculator.cc index 417168b15d..a7ac189f61 100644 --- a/call/receive_time_calculator.cc +++ b/call/receive_time_calculator.cc @@ -10,10 +10,12 @@ #include "call/receive_time_calculator.h" +#include #include #include -#include +#include "api/field_trials_view.h" +#include "api/units/time_delta.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/safe_minmax.h" @@ -55,7 +57,7 @@ int64_t ReceiveTimeCalculator::ReconcileReceiveTimes(int64_t packet_time_us, int64_t safe_time_us) { int64_t stall_time_us = system_time_us - packet_time_us; if (total_system_time_passed_us_ < config_.stall_threshold->us()) { - stall_time_us = rtc::SafeMin(stall_time_us, config_.max_stall->us()); + stall_time_us = SafeMin(stall_time_us, config_.max_stall->us()); } int64_t corrected_time_us = safe_time_us - stall_time_us; @@ -105,8 +107,8 @@ int64_t ReceiveTimeCalculator::ReconcileReceiveTimes(int64_t packet_time_us, if (forward_clock_reset || obvious_backward_clock_reset || small_reset_during_stall_) { corrected_time_us = last_corrected_time_us_ + - rtc::SafeClamp(packet_time_delta_us, 0, - config_.max_packet_time_repair->us()); + SafeClamp(packet_time_delta_us, 0, + config_.max_packet_time_repair->us()); } } diff --git a/call/receive_time_calculator_unittest.cc b/call/receive_time_calculator_unittest.cc index f2e3d54f0c..0dd8e45545 100644 --- a/call/receive_time_calculator_unittest.cc +++ b/call/receive_time_calculator_unittest.cc @@ -15,9 +15,9 @@ #include #include #include +#include #include -#include "absl/types/optional.h" #include "rtc_base/random.h" #include "rtc_base/time_utils.h" #include "test/gtest.h" @@ -50,7 +50,7 @@ class EmulatedClock { private: int64_t clock_us_; - absl::optional last_query_us_; + std::optional last_query_us_; float drift_; float accumulated_drift_us_ = 0; }; @@ -93,12 +93,12 @@ class EmulatedMonotoneousClock : public EmulatedClock { bool Stalled() const { return stall_recovery_time_us_ > 0; } - int64_t GetRemainingStall(int64_t time_us) const { + int64_t GetRemainingStall(int64_t /* time_us */) const { return stall_recovery_time_us_ > 0 ? stall_recovery_time_us_ - GetClockUs() : 0; } - const int64_t kMaxStallDurationUs = rtc::kNumMicrosecsPerSec; + const int64_t kMaxStallDurationUs = kNumMicrosecsPerSec; private: const float kChanceOfStallPerUs = 5e-6f; @@ -159,9 +159,9 @@ class EmulatedNonMonotoneousClock : public EmulatedClock { private: const float kChanceOfResetPerUs = 1e-6f; - const int64_t kMaxAbsResetUs = rtc::kNumMicrosecsPerSec; - const int64_t kMinTimeBetweenResetsUs = 3 * rtc::kNumMicrosecsPerSec; - const int64_t kResolutionUs = rtc::kNumMicrosecsPerMillisec; + const int64_t kMaxAbsResetUs = kNumMicrosecsPerSec; + const int64_t kMinTimeBetweenResetsUs = 3 * kNumMicrosecsPerSec; + const int64_t kResolutionUs = kNumMicrosecsPerMillisec; int64_t last_reset_query_time_us_ = 0; int64_t last_reset_size_us_ = 0; std::vector pregenerated_clock_; @@ -172,13 +172,13 @@ TEST(ClockRepair, NoClockDrift) { webrtc::test::ScopedKeyValueConfig field_trials; const int kSeeds = 10; const int kFirstSeed = 1; - const int64_t kRuntimeUs = 10 * rtc::kNumMicrosecsPerSec; + const int64_t kRuntimeUs = 10 * kNumMicrosecsPerSec; const float kDrift = 0.0f; - const int64_t kMaxPacketInterarrivalUs = 50 * rtc::kNumMicrosecsPerMillisec; + const int64_t kMaxPacketInterarrivalUs = 50 * kNumMicrosecsPerMillisec; for (int seed = kFirstSeed; seed < kSeeds + kFirstSeed; ++seed) { EmulatedMonotoneousClock monotone_clock(seed); EmulatedNonMonotoneousClock non_monotone_clock( - seed + 1, kRuntimeUs + rtc::kNumMicrosecsPerSec, kDrift); + seed + 1, kRuntimeUs + kNumMicrosecsPerSec, kDrift); ReceiveTimeCalculator reception_time_tracker(field_trials); int64_t corrected_clock_0 = 0; int64_t reset_during_stall_tol_us = 0; @@ -233,11 +233,10 @@ TEST(ClockRepair, NoClockDrift) { // Resets during stalls may lead to small errors temporarily. int64_t lower_tol_us = accumulated_lower_bound_tolerance_us - reset_during_stall_tol_us - monotone_noise_us - - 2 * rtc::kNumMicrosecsPerMillisec; + 2 * kNumMicrosecsPerMillisec; EXPECT_GE(err, lower_tol_us); int64_t upper_tol_us = accumulated_upper_bound_tolerance_us + - monotone_noise_us + - 2 * rtc::kNumMicrosecsPerMillisec; + monotone_noise_us + 2 * kNumMicrosecsPerMillisec; EXPECT_LE(err, upper_tol_us); last_time_us = time_us; diff --git a/call/rtp_bitrate_configurator.cc b/call/rtp_bitrate_configurator.cc index 264dcdcb81..3e7912b6bc 100644 --- a/call/rtp_bitrate_configurator.cc +++ b/call/rtp_bitrate_configurator.cc @@ -11,7 +11,10 @@ #include "call/rtp_bitrate_configurator.h" #include +#include +#include "api/transport/bitrate_settings.h" +#include "api/units/data_rate.h" #include "rtc_base/checks.h" namespace { @@ -49,7 +52,7 @@ BitrateConstraints RtpBitrateConfigurator::GetConfig() const { return bitrate_config_; } -absl::optional +std::optional RtpBitrateConfigurator::UpdateWithSdpParameters( const BitrateConstraints& bitrate_config) { RTC_DCHECK_GE(bitrate_config.min_bitrate_bps, 0); @@ -58,7 +61,7 @@ RtpBitrateConfigurator::UpdateWithSdpParameters( RTC_DCHECK_GT(bitrate_config.max_bitrate_bps, 0); } - absl::optional new_start; + std::optional new_start; // Only update the "start" bitrate if it's set, and different from the old // value. In practice, this value comes from the x-google-start-bitrate codec // parameter in SDP, and setting the same remote description twice shouldn't @@ -72,7 +75,7 @@ RtpBitrateConfigurator::UpdateWithSdpParameters( return UpdateConstraints(new_start); } -absl::optional +std::optional RtpBitrateConfigurator::UpdateWithClientPreferences( const BitrateSettings& bitrate_mask) { bitrate_config_mask_ = bitrate_mask; @@ -80,17 +83,17 @@ RtpBitrateConfigurator::UpdateWithClientPreferences( } // Relay cap can change only max bitrate. -absl::optional RtpBitrateConfigurator::UpdateWithRelayCap( +std::optional RtpBitrateConfigurator::UpdateWithRelayCap( DataRate cap) { if (cap.IsFinite()) { RTC_DCHECK(!cap.IsZero()); } max_bitrate_over_relay_ = cap; - return UpdateConstraints(absl::nullopt); + return UpdateConstraints(std::nullopt); } -absl::optional RtpBitrateConfigurator::UpdateConstraints( - const absl::optional& new_start) { +std::optional RtpBitrateConfigurator::UpdateConstraints( + const std::optional& new_start) { BitrateConstraints updated; updated.min_bitrate_bps = std::max(bitrate_config_mask_.min_bitrate_bps.value_or(0), @@ -114,7 +117,7 @@ absl::optional RtpBitrateConfigurator::UpdateConstraints( if (updated.min_bitrate_bps == bitrate_config_.min_bitrate_bps && updated.max_bitrate_bps == bitrate_config_.max_bitrate_bps && !new_start) { - return absl::nullopt; + return std::nullopt; } if (new_start) { diff --git a/call/rtp_bitrate_configurator.h b/call/rtp_bitrate_configurator.h index 5cb779a3b3..754629db41 100644 --- a/call/rtp_bitrate_configurator.h +++ b/call/rtp_bitrate_configurator.h @@ -11,7 +11,8 @@ #ifndef CALL_RTP_BITRATE_CONFIGURATOR_H_ #define CALL_RTP_BITRATE_CONFIGURATOR_H_ -#include "absl/types/optional.h" +#include + #include "api/transport/bitrate_settings.h" #include "api/units/data_rate.h" @@ -36,7 +37,7 @@ class RtpBitrateConfigurator { // implemented. Passing -1 leaves the start bitrate unchanged. Behavior is not // guaranteed for other negative values or 0. // The optional return value is set with new configuration if it was updated. - absl::optional UpdateWithSdpParameters( + std::optional UpdateWithSdpParameters( const BitrateConstraints& bitrate_config_); // The greater min and smaller max set by this and SetSdpBitrateParameters @@ -45,17 +46,17 @@ class RtpBitrateConfigurator { // Assumes 0 <= min <= start <= max holds for set parameters. // Update the bitrate configuration // The optional return value is set with new configuration if it was updated. - absl::optional UpdateWithClientPreferences( + std::optional UpdateWithClientPreferences( const BitrateSettings& bitrate_mask); // Apply a cap for relayed calls. - absl::optional UpdateWithRelayCap(DataRate cap); + std::optional UpdateWithRelayCap(DataRate cap); private: // Applies update to the BitrateConstraints cached in `config_`, resetting // with `new_start` if set. - absl::optional UpdateConstraints( - const absl::optional& new_start); + std::optional UpdateConstraints( + const std::optional& new_start); // Bitrate config used until valid bitrate estimates are calculated. Also // used to cap total bitrate used. This comes from the remote connection. diff --git a/call/rtp_bitrate_configurator_unittest.cc b/call/rtp_bitrate_configurator_unittest.cc index 6449a1a0f5..a034e2c048 100644 --- a/call/rtp_bitrate_configurator_unittest.cc +++ b/call/rtp_bitrate_configurator_unittest.cc @@ -10,11 +10,13 @@ #include "call/rtp_bitrate_configurator.h" #include +#include +#include "api/transport/bitrate_settings.h" #include "test/gtest.h" namespace webrtc { -using absl::nullopt; +using std::nullopt; class RtpBitrateConfiguratorTest : public ::testing::Test { public: @@ -22,10 +24,10 @@ class RtpBitrateConfiguratorTest : public ::testing::Test { : configurator_(new RtpBitrateConfigurator(BitrateConstraints())) {} std::unique_ptr configurator_; void UpdateConfigMatches(BitrateConstraints bitrate_config, - absl::optional min_bitrate_bps, - absl::optional start_bitrate_bps, - absl::optional max_bitrate_bps) { - absl::optional result = + std::optional min_bitrate_bps, + std::optional start_bitrate_bps, + std::optional max_bitrate_bps) { + std::optional result = configurator_->UpdateWithSdpParameters(bitrate_config); EXPECT_TRUE(result.has_value()); if (start_bitrate_bps.has_value()) @@ -37,10 +39,10 @@ class RtpBitrateConfiguratorTest : public ::testing::Test { } void UpdateMaskMatches(BitrateSettings bitrate_mask, - absl::optional min_bitrate_bps, - absl::optional start_bitrate_bps, - absl::optional max_bitrate_bps) { - absl::optional result = + std::optional min_bitrate_bps, + std::optional start_bitrate_bps, + std::optional max_bitrate_bps) { + std::optional result = configurator_->UpdateWithClientPreferences(bitrate_mask); EXPECT_TRUE(result.has_value()); if (start_bitrate_bps.has_value()) diff --git a/call/rtp_config.cc b/call/rtp_config.cc index 5457a94696..115912cd46 100644 --- a/call/rtp_config.cc +++ b/call/rtp_config.cc @@ -10,10 +10,17 @@ #include "call/rtp_config.h" +#include +#include #include +#include +#include +#include +#include #include "absl/algorithm/container.h" #include "api/array_view.h" +#include "api/rtp_headers.h" #include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" @@ -41,7 +48,7 @@ std::string LntfConfig::ToString() const { std::string NackConfig::ToString() const { char buf[1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "{rtp_history_ms: " << rtp_history_ms; ss << '}'; return ss.str(); @@ -49,7 +56,7 @@ std::string NackConfig::ToString() const { std::string UlpfecConfig::ToString() const { char buf[1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "{ulpfec_payload_type: " << ulpfec_payload_type; ss << ", red_payload_type: " << red_payload_type; ss << ", red_rtx_payload_type: " << red_rtx_payload_type; @@ -63,6 +70,30 @@ bool UlpfecConfig::operator==(const UlpfecConfig& other) const { red_rtx_payload_type == other.red_rtx_payload_type; } +std::string RtpStreamConfig::ToString() const { + char buf[1024]; + SimpleStringBuilder ss(buf); + ss << "{ssrc: " << ssrc; + ss << ", rid: " << rid; + ss << ", payload_name: " << payload_name; + ss << ", payload_type: " << payload_type; + ss << ", raw_payload: " << (raw_payload ? "true" : "false"); + if (rtx.has_value()) { + ss << ", rtx: " << rtx->ToString(); + } + ss << '}'; + return ss.str(); +} + +std::string RtpStreamConfig::Rtx::ToString() const { + char buf[1024]; + SimpleStringBuilder ss(buf); + ss << "{ssrc: " << ssrc; + ss << ", payload_type: " << payload_type; + ss << '}'; + return ss.str(); +} + RtpConfig::RtpConfig() = default; RtpConfig::RtpConfig(const RtpConfig&) = default; RtpConfig::~RtpConfig() = default; @@ -73,7 +104,7 @@ RtpConfig::Flexfec::~Flexfec() = default; std::string RtpConfig::ToString() const { char buf[2 * 1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "{ssrcs: ["; for (size_t i = 0; i < ssrcs.size(); ++i) { ss << ssrcs[i]; @@ -107,6 +138,14 @@ std::string RtpConfig::ToString() const { ss << ", payload_type: " << payload_type; ss << ", raw_payload: " << (raw_payload ? "true" : "false"); + ss << ", stream_configs: ["; + for (size_t i = 0; i < stream_configs.size(); ++i) { + ss << stream_configs[i].ToString(); + if (i != stream_configs.size() - 1) + ss << ", "; + } + ss << ']'; + ss << ", flexfec: {payload_type: " << flexfec.payload_type; ss << ", ssrc: " << flexfec.ssrc; ss << ", protected_media_ssrcs: ["; @@ -129,7 +168,7 @@ RtpConfig::Rtx::~Rtx() = default; std::string RtpConfig::Rtx::ToString() const { char buf[1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "{ssrcs: ["; for (size_t i = 0; i < ssrcs.size(); ++i) { ss << ssrcs[i]; @@ -155,12 +194,12 @@ bool RtpConfig::IsFlexfecSsrc(uint32_t ssrc) const { return flexfec.payload_type != -1 && ssrc == flexfec.ssrc; } -absl::optional RtpConfig::GetRtxSsrcAssociatedWithMediaSsrc( +std::optional RtpConfig::GetRtxSsrcAssociatedWithMediaSsrc( uint32_t media_ssrc) const { RTC_DCHECK(IsMediaSsrc(media_ssrc)); // If we don't use RTX there is no association. if (rtx.ssrcs.empty()) - return absl::nullopt; + return std::nullopt; // If we use RTX there MUST be an association ssrcs[i] <-> rtx.ssrcs[i]. RTC_DCHECK_EQ(ssrcs.size(), rtx.ssrcs.size()); return FindAssociatedSsrc(media_ssrc, ssrcs, rtx.ssrcs); @@ -189,7 +228,7 @@ uint32_t RtpConfig::GetMediaSsrcAssociatedWithFlexfecSsrc( return media_ssrc; } -absl::optional RtpConfig::GetRidForSsrc(uint32_t ssrc) const { +std::optional RtpConfig::GetRidForSsrc(uint32_t ssrc) const { auto it = std::find(ssrcs.begin(), ssrcs.end(), ssrc); if (it != ssrcs.end()) { size_t ssrc_index = std::distance(ssrcs.begin(), it); @@ -197,7 +236,34 @@ absl::optional RtpConfig::GetRidForSsrc(uint32_t ssrc) const { return rids[ssrc_index]; } } - return absl::nullopt; + return std::nullopt; +} + +RtpStreamConfig RtpConfig::GetStreamConfig(size_t index) const { + // GetStreamConfig function usually returns stream_configs[index], but if + // stream_configs is not initialized (i.e., index >= stream_configs.size()), + // it creates and returns an RtpStreamConfig using fields such as ssrcs, rids, + // payload_name, and payload_type from RtpConfig. + RTC_DCHECK_LT(index, ssrcs.size()); + if (index < stream_configs.size()) { + return stream_configs[index]; + } + RtpStreamConfig stream_config; + stream_config.ssrc = ssrcs[index]; + if (index < rids.size()) { + stream_config.rid = rids[index]; + } + stream_config.payload_name = payload_name; + stream_config.payload_type = payload_type; + stream_config.raw_payload = raw_payload; + if (!rtx.ssrcs.empty()) { + RTC_DCHECK_EQ(ssrcs.size(), rtx.ssrcs.size()); + auto& stream_config_rtx = stream_config.rtx.emplace(); + stream_config_rtx.ssrc = rtx.ssrcs[index]; + stream_config_rtx.payload_type = rtx.payload_type; + } + + return stream_config; } } // namespace webrtc diff --git a/call/rtp_config.h b/call/rtp_config.h index a01a902ba9..d77289febc 100644 --- a/call/rtp_config.h +++ b/call/rtp_config.h @@ -14,10 +14,10 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/rtp_headers.h" #include "api/rtp_parameters.h" @@ -27,6 +27,7 @@ struct RtpPayloadState { int16_t picture_id = -1; uint8_t tl0_pic_idx = 0; int64_t shared_frame_id = 0; + int64_t frame_id = 0; }; // Settings for LNTF (LossNotification). Still highly experimental. @@ -67,6 +68,25 @@ struct UlpfecConfig { int red_rtx_payload_type; }; +struct RtpStreamConfig { + std::string ToString() const; + + uint32_t ssrc = 0; + std::string rid; + std::string payload_name; + int payload_type = -1; + bool raw_payload = false; + struct Rtx { + std::string ToString() const; + // SSRC to use for the RTX stream. + uint32_t ssrc = 0; + + // Payload type to use for the RTX stream. + int payload_type = -1; + }; + std::optional rtx; +}; + static const size_t kDefaultMaxPacketSize = 1500 - 40; // TCP over IPv4. struct RtpConfig { RtpConfig(); @@ -114,6 +134,9 @@ struct RtpConfig { // frame descriptor RTP header extension). bool raw_payload = false; + // Configurations for each RTP stream + std::vector stream_configs; + // See LntfConfig for description. LntfConfig lntf; @@ -165,11 +188,14 @@ struct RtpConfig { bool IsMediaSsrc(uint32_t ssrc) const; bool IsRtxSsrc(uint32_t ssrc) const; bool IsFlexfecSsrc(uint32_t ssrc) const; - absl::optional GetRtxSsrcAssociatedWithMediaSsrc( + std::optional GetRtxSsrcAssociatedWithMediaSsrc( uint32_t media_ssrc) const; uint32_t GetMediaSsrcAssociatedWithRtxSsrc(uint32_t rtx_ssrc) const; uint32_t GetMediaSsrcAssociatedWithFlexfecSsrc(uint32_t flexfec_ssrc) const; - absl::optional GetRidForSsrc(uint32_t ssrc) const; + std::optional GetRidForSsrc(uint32_t ssrc) const; + + // Returns send config for RTP stream by provided simulcast `index`. + RtpStreamConfig GetStreamConfig(size_t index) const; }; } // namespace webrtc #endif // CALL_RTP_CONFIG_H_ diff --git a/call/rtp_demuxer.cc b/call/rtp_demuxer.cc index 0b74f2ac0a..b27661aced 100644 --- a/call/rtp_demuxer.cc +++ b/call/rtp_demuxer.cc @@ -10,11 +10,19 @@ #include "call/rtp_demuxer.h" +#include +#include +#include +#include +#include + #include "absl/strings/string_view.h" #include "call/rtp_packet_sink_interface.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" +#include "rtc_base/containers/flat_set.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" @@ -40,25 +48,14 @@ size_t RemoveFromMapByValue(Map* map, const Value& value) { return EraseIf(*map, [&](const auto& elem) { return elem.second == value; }); } -// Temp fix: MID in SDP is allowed to be slightly longer than what's allowed -// in the RTP demuxer. Truncate if needed; this won't match, but it only -// makes sense in places that wouldn't use this for matching anyway. -// TODO(bugs.webrtc.org/12517): remove when length 16 is policed by parser. -std::string CheckMidLength(absl::string_view mid) { - std::string new_mid(mid); - if (new_mid.length() > BaseRtpStringExtension::kMaxValueSizeBytes) { - RTC_LOG(LS_WARNING) << "`mid` attribute too long. Truncating."; - new_mid.resize(BaseRtpStringExtension::kMaxValueSizeBytes); - } - return new_mid; -} - } // namespace RtpDemuxerCriteria::RtpDemuxerCriteria( absl::string_view mid, absl::string_view rsid /*= absl::string_view()*/) - : mid_(CheckMidLength(mid)), rsid_(rsid) {} + : mid_(mid), rsid_(rsid) { + RTC_DCHECK(mid.length() <= BaseRtpStringExtension::kMaxValueSizeBytes); +} RtpDemuxerCriteria::RtpDemuxerCriteria() = default; RtpDemuxerCriteria::~RtpDemuxerCriteria() = default; @@ -73,7 +70,7 @@ bool RtpDemuxerCriteria::operator!=(const RtpDemuxerCriteria& other) const { } std::string RtpDemuxerCriteria::ToString() const { - rtc::StringBuilder sb; + StringBuilder sb; sb << "{mid: " << (mid_.empty() ? "" : mid_) << ", rsid: " << (rsid_.empty() ? "" : rsid_) << ", ssrcs: ["; @@ -93,7 +90,7 @@ std::string RtpDemuxerCriteria::ToString() const { // static std::string RtpDemuxer::DescribePacket(const RtpPacketReceived& packet) { - rtc::StringBuilder sb; + StringBuilder sb; sb << "PT=" << packet.PayloadType() << " SSRC=" << packet.Ssrc(); std::string mid; if (packet.GetExtension(&mid)) { @@ -255,6 +252,19 @@ bool RtpDemuxer::RemoveSink(const RtpPacketSinkInterface* sink) { return num_removed > 0; } +flat_set RtpDemuxer::GetSsrcsForSink( + const RtpPacketSinkInterface* sink) const { + flat_set ssrcs; + if (sink) { + for (const auto& it : sink_by_ssrc_) { + if (it.second == sink) { + ssrcs.insert(it.first); + } + } + } + return ssrcs; +} + bool RtpDemuxer::OnRtpPacket(const RtpPacketReceived& packet) { RtpPacketSinkInterface* sink = ResolveSink(packet); if (sink != nullptr) { diff --git a/call/rtp_demuxer.h b/call/rtp_demuxer.h index 53eeb0b6b6..ad0103eafb 100644 --- a/call/rtp_demuxer.h +++ b/call/rtp_demuxer.h @@ -11,10 +11,10 @@ #ifndef CALL_RTP_DEMUXER_H_ #define CALL_RTP_DEMUXER_H_ +#include #include #include #include -#include #include "absl/strings/string_view.h" #include "rtc_base/containers/flat_map.h" @@ -151,6 +151,9 @@ class RtpDemuxer { // Null pointer is not allowed. bool RemoveSink(const RtpPacketSinkInterface* sink); + // Returns the set of SSRCs associated with a sink. + flat_set GetSsrcsForSink(const RtpPacketSinkInterface* sink) const; + // Demuxes the given packet and forwards it to the chosen sink. Returns true // if the packet was forwarded and false if the packet was dropped. bool OnRtpPacket(const RtpPacketReceived& packet); diff --git a/call/rtp_demuxer_unittest.cc b/call/rtp_demuxer_unittest.cc index e85052810a..e2702267b4 100644 --- a/call/rtp_demuxer_unittest.cc +++ b/call/rtp_demuxer_unittest.cc @@ -10,13 +10,14 @@ #include "call/rtp_demuxer.h" +#include +#include #include #include #include #include "absl/strings/string_view.h" #include "call/test/mock_rtp_packet_sink_interface.h" -#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/arraysize.h" @@ -309,8 +310,7 @@ TEST_F(RtpDemuxerTest, OnRtpPacketCalledOnCorrectSinkByRsid) { } for (size_t i = 0; i < arraysize(rsids); i++) { - auto packet = - CreatePacketWithSsrcRsid(rtc::checked_cast(i), rsids[i]); + auto packet = CreatePacketWithSsrcRsid(checked_cast(i), rsids[i]); EXPECT_CALL(sinks[i], OnRtpPacket(SamePacketAs(*packet))).Times(1); EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); } @@ -324,8 +324,7 @@ TEST_F(RtpDemuxerTest, OnRtpPacketCalledOnCorrectSinkByMid) { } for (size_t i = 0; i < arraysize(mids); i++) { - auto packet = - CreatePacketWithSsrcMid(rtc::checked_cast(i), mids[i]); + auto packet = CreatePacketWithSsrcMid(checked_cast(i), mids[i]); EXPECT_CALL(sinks[i], OnRtpPacket(SamePacketAs(*packet))).Times(1); EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); } @@ -379,7 +378,7 @@ TEST_F(RtpDemuxerTest, PacketsDeliveredInRightOrder) { std::unique_ptr packets[5]; for (size_t i = 0; i < arraysize(packets); i++) { packets[i] = CreatePacketWithSsrc(ssrc); - packets[i]->SetSequenceNumber(rtc::checked_cast(i)); + packets[i]->SetSequenceNumber(checked_cast(i)); } InSequence sequence; @@ -1238,26 +1237,14 @@ TEST_F(RtpDemuxerTest, PacketWithMidAndUnknownRsidIsNotRoutedByPayloadType) { EXPECT_FALSE(demuxer_.OnRtpPacket(*packet)); } -TEST_F(RtpDemuxerTest, MidMustNotExceedMaximumLength) { +#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) + +TEST_F(RtpDemuxerDeathTest, MidMustNotExceedMaximumLength) { MockRtpPacketSink sink1; std::string mid1(BaseRtpStringExtension::kMaxValueSizeBytes + 1, 'a'); - // Adding the sink should pass even though the supplied mid is too long. - // The mid will be truncated though. - EXPECT_TRUE(AddSinkOnlyMid(mid1, &sink1)); - - // Adding a second sink with a mid that matches the truncated mid that was - // just added, should fail. - MockRtpPacketSink sink2; - std::string mid2(mid1.substr(0, BaseRtpStringExtension::kMaxValueSizeBytes)); - EXPECT_FALSE(AddSinkOnlyMid(mid2, &sink2)); - EXPECT_FALSE(RemoveSink(&sink2)); - - // Remove the original sink. - EXPECT_TRUE(RemoveSink(&sink1)); + EXPECT_DEATH(AddSinkOnlyMid(mid1, &sink1), ""); } -#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) - TEST_F(RtpDemuxerDeathTest, CriteriaMustBeNonEmpty) { MockRtpPacketSink sink; RtpDemuxerCriteria criteria; diff --git a/call/rtp_payload_params.cc b/call/rtp_payload_params.cc index f4b09ce913..2941f6375c 100644 --- a/call/rtp_payload_params.cc +++ b/call/rtp_payload_params.cc @@ -13,16 +13,29 @@ #include #include +#include +#include #include "absl/container/inlined_vector.h" #include "absl/strings/match.h" -#include "absl/types/variant.h" +#include "api/field_trials_view.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/encoded_image.h" +#include "api/video/render_resolution.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "api/video/video_timing.h" +#include "call/rtp_config.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/frame_dependencies_calculator.h" +#include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -35,10 +48,11 @@ namespace { constexpr int kMaxSimulatedSpatialLayers = 3; void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info, - absl::optional spatial_index, + std::optional spatial_index, RTPVideoHeader* rtp) { rtp->codec = info.codecType; rtp->is_last_frame_in_picture = info.end_of_picture; + rtp->frame_instrumentation_data = info.frame_instrumentation_data; switch (info.codecType) { case kVideoCodecVP8: { auto& vp8_header = rtp->video_type_header.emplace(); @@ -96,11 +110,10 @@ void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info, info.codecSpecific.H264.packetization_mode; return; } - case kVideoCodecMultiplex: + // These codec types do not have codec-specifics. case kVideoCodecGeneric: - rtp->codec = kVideoCodecGeneric; - return; - default: + case kVideoCodecH265: + case kVideoCodecAV1: return; } } @@ -178,16 +191,17 @@ RtpPayloadParams::RtpPayloadParams(const uint32_t ssrc, simulate_generic_structure_(absl::StartsWith( trials.Lookup("WebRTC-GenericCodecDependencyDescriptor"), "Enabled")) { - for (auto& spatial_layer : last_shared_frame_id_) + for (auto& spatial_layer : last_frame_id_) spatial_layer.fill(-1); chain_last_frame_id_.fill(-1); buffer_id_to_frame_id_.fill(-1); - Random random(rtc::TimeMicros()); + Random random(TimeMicros()); state_.picture_id = state ? state->picture_id : (random.Rand() & 0x7FFF); state_.tl0_pic_idx = state ? state->tl0_pic_idx : (random.Rand()); + state_.frame_id = state ? state->frame_id : random.Rand(); } RtpPayloadParams::RtpPayloadParams(const RtpPayloadParams& other) = default; @@ -197,7 +211,14 @@ RtpPayloadParams::~RtpPayloadParams() {} RTPVideoHeader RtpPayloadParams::GetRtpVideoHeader( const EncodedImage& image, const CodecSpecificInfo* codec_specific_info, - int64_t shared_frame_id) { + std::optional shared_frame_id) { + int64_t frame_id; + if (shared_frame_id) { + frame_id = *shared_frame_id; + } else { + frame_id = state_.frame_id++; + } + RTPVideoHeader rtp_video_header; if (codec_specific_info) { PopulateRtpWithCodecSpecifics(*codec_specific_info, image.SpatialIndex(), @@ -211,8 +232,8 @@ RTPVideoHeader RtpPayloadParams::GetRtpVideoHeader( rtp_video_header.width = image._encodedWidth; rtp_video_header.height = image._encodedHeight; rtp_video_header.color_space = image.ColorSpace() - ? absl::make_optional(*image.ColorSpace()) - : absl::nullopt; + ? std::make_optional(*image.ColorSpace()) + : std::nullopt; rtp_video_header.video_frame_tracking_id = image.VideoFrameTrackingId(); SetVideoTiming(image, &rtp_video_header.video_timing); @@ -224,8 +245,7 @@ RTPVideoHeader RtpPayloadParams::GetRtpVideoHeader( SetCodecSpecific(&rtp_video_header, first_frame_in_picture); - SetGeneric(codec_specific_info, shared_frame_id, is_keyframe, - &rtp_video_header); + SetGeneric(codec_specific_info, frame_id, is_keyframe, &rtp_video_header); return rtp_video_header; } @@ -246,7 +266,7 @@ void RtpPayloadParams::SetCodecSpecific(RTPVideoHeader* rtp_video_header, } if (rtp_video_header->codec == kVideoCodecVP8) { auto& vp8_header = - absl::get(rtp_video_header->video_type_header); + std::get(rtp_video_header->video_type_header); vp8_header.pictureId = state_.picture_id; if (vp8_header.temporalIdx != kNoTemporalIdx) { @@ -258,7 +278,7 @@ void RtpPayloadParams::SetCodecSpecific(RTPVideoHeader* rtp_video_header, } if (rtp_video_header->codec == kVideoCodecVP9) { auto& vp9_header = - absl::get(rtp_video_header->video_type_header); + std::get(rtp_video_header->video_type_header); vp9_header.picture_id = state_.picture_id; // Note that in the case that we have no temporal layers but we do have @@ -331,7 +351,8 @@ void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info, } return; case VideoCodecType::kVideoCodecAV1: - // TODO(philipel): Implement AV1 to generic descriptor. + // Codec-specifics is not supported for AV1. We convert from the + // generic_frame_info. return; case VideoCodecType::kVideoCodecH264: if (codec_specific_info) { @@ -339,16 +360,18 @@ void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info, is_keyframe, rtp_video_header); } return; - case VideoCodecType::kVideoCodecMultiplex: + case VideoCodecType::kVideoCodecH265: + // Codec-specifics is not supported for H.265. We convert from the + // generic_frame_info. return; } RTC_DCHECK_NOTREACHED() << "Unsupported codec."; } -absl::optional RtpPayloadParams::GenericStructure( +std::optional RtpPayloadParams::GenericStructure( const CodecSpecificInfo* codec_specific_info) { if (codec_specific_info == nullptr) { - return absl::nullopt; + return std::nullopt; } // This helper shouldn't be used when template structure is specified // explicetly. @@ -357,17 +380,16 @@ absl::optional RtpPayloadParams::GenericStructure( case VideoCodecType::kVideoCodecGeneric: if (simulate_generic_structure_) { return MinimalisticStructure(/*num_spatial_layers=*/1, - /*num_temporal_layer=*/1); + /*num_temporal_layers=*/1); } - return absl::nullopt; + return std::nullopt; case VideoCodecType::kVideoCodecVP8: return MinimalisticStructure(/*num_spatial_layers=*/1, - /*num_temporal_layer=*/kMaxTemporalStreams); + /*num_temporal_layers=*/kMaxTemporalStreams); case VideoCodecType::kVideoCodecVP9: { - absl::optional structure = - MinimalisticStructure( - /*num_spatial_layers=*/kMaxSimulatedSpatialLayers, - /*num_temporal_layer=*/kMaxTemporalStreams); + std::optional structure = MinimalisticStructure( + /*num_spatial_layers=*/kMaxSimulatedSpatialLayers, + /*num_temporal_layers=*/kMaxTemporalStreams); const CodecSpecificInfoVP9& vp9 = codec_specific_info->codecSpecific.VP9; if (vp9.ss_data_available && vp9.spatial_layer_resolution_present) { RenderResolution first_valid; @@ -400,39 +422,42 @@ absl::optional RtpPayloadParams::GenericStructure( } return structure; } - case VideoCodecType::kVideoCodecAV1: case VideoCodecType::kVideoCodecH264: - case VideoCodecType::kVideoCodecMultiplex: - return absl::nullopt; + return MinimalisticStructure( + /*num_spatial_layers=*/1, + /*num_temporal_layers=*/kMaxTemporalStreams); + case VideoCodecType::kVideoCodecAV1: + case VideoCodecType::kVideoCodecH265: + return std::nullopt; } RTC_DCHECK_NOTREACHED() << "Unsupported codec."; } -void RtpPayloadParams::GenericToGeneric(int64_t shared_frame_id, +void RtpPayloadParams::GenericToGeneric(int64_t frame_id, bool is_keyframe, RTPVideoHeader* rtp_video_header) { RTPVideoHeader::GenericDescriptorInfo& generic = rtp_video_header->generic.emplace(); - generic.frame_id = shared_frame_id; + generic.frame_id = frame_id; generic.decode_target_indications.push_back(DecodeTargetIndication::kSwitch); if (is_keyframe) { generic.chain_diffs.push_back(0); - last_shared_frame_id_[0].fill(-1); + last_frame_id_[0].fill(-1); } else { - int64_t frame_id = last_shared_frame_id_[0][0]; - RTC_DCHECK_NE(frame_id, -1); - RTC_DCHECK_LT(frame_id, shared_frame_id); - generic.chain_diffs.push_back(shared_frame_id - frame_id); - generic.dependencies.push_back(frame_id); + int64_t last_frame_id = last_frame_id_[0][0]; + RTC_DCHECK_NE(last_frame_id, -1); + RTC_DCHECK_LT(last_frame_id, frame_id); + generic.chain_diffs.push_back(frame_id - last_frame_id); + generic.dependencies.push_back(last_frame_id); } - last_shared_frame_id_[0][0] = shared_frame_id; + last_frame_id_[0][0] = frame_id; } void RtpPayloadParams::H264ToGeneric(const CodecSpecificInfoH264& h264_info, - int64_t shared_frame_id, + int64_t frame_id, bool is_keyframe, RTPVideoHeader* rtp_video_header) { const int temporal_index = @@ -447,49 +472,60 @@ void RtpPayloadParams::H264ToGeneric(const CodecSpecificInfoH264& h264_info, RTPVideoHeader::GenericDescriptorInfo& generic = rtp_video_header->generic.emplace(); - generic.frame_id = shared_frame_id; + generic.frame_id = frame_id; generic.temporal_index = temporal_index; + // Generate decode target indications. + RTC_DCHECK_LT(temporal_index, kMaxTemporalStreams); + generic.decode_target_indications.resize(kMaxTemporalStreams); + auto it = std::fill_n(generic.decode_target_indications.begin(), + temporal_index, DecodeTargetIndication::kNotPresent); + std::fill(it, generic.decode_target_indications.end(), + DecodeTargetIndication::kSwitch); + generic.chain_diffs = { + (is_keyframe || last_frame_id_[0][0] < 0) + ? 0 + : static_cast(frame_id - last_frame_id_[0][0])}; + if (is_keyframe) { RTC_DCHECK_EQ(temporal_index, 0); - last_shared_frame_id_[/*spatial index*/ 0].fill(-1); - last_shared_frame_id_[/*spatial index*/ 0][temporal_index] = - shared_frame_id; + last_frame_id_[/*spatial index*/ 0].fill(-1); + last_frame_id_[/*spatial index*/ 0][temporal_index] = frame_id; return; } if (h264_info.base_layer_sync) { - int64_t tl0_frame_id = last_shared_frame_id_[/*spatial index*/ 0][0]; + int64_t tl0_frame_id = last_frame_id_[/*spatial index*/ 0][0]; for (int i = 1; i < RtpGenericFrameDescriptor::kMaxTemporalLayers; ++i) { - if (last_shared_frame_id_[/*spatial index*/ 0][i] < tl0_frame_id) { - last_shared_frame_id_[/*spatial index*/ 0][i] = -1; + if (last_frame_id_[/*spatial index*/ 0][i] < tl0_frame_id) { + last_frame_id_[/*spatial index*/ 0][i] = -1; } } RTC_DCHECK_GE(tl0_frame_id, 0); - RTC_DCHECK_LT(tl0_frame_id, shared_frame_id); + RTC_DCHECK_LT(tl0_frame_id, frame_id); generic.dependencies.push_back(tl0_frame_id); } else { for (int i = 0; i <= temporal_index; ++i) { - int64_t frame_id = last_shared_frame_id_[/*spatial index*/ 0][i]; + int64_t last_frame_id = last_frame_id_[/*spatial index*/ 0][i]; - if (frame_id != -1) { - RTC_DCHECK_LT(frame_id, shared_frame_id); - generic.dependencies.push_back(frame_id); + if (last_frame_id != -1) { + RTC_DCHECK_LT(last_frame_id, frame_id); + generic.dependencies.push_back(last_frame_id); } } } - last_shared_frame_id_[/*spatial_index*/ 0][temporal_index] = shared_frame_id; + last_frame_id_[/*spatial_index*/ 0][temporal_index] = frame_id; } void RtpPayloadParams::Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info, - int64_t shared_frame_id, + int64_t frame_id, bool is_keyframe, RTPVideoHeader* rtp_video_header) { const auto& vp8_header = - absl::get(rtp_video_header->video_type_header); + std::get(rtp_video_header->video_type_header); const int spatial_index = 0; const int temporal_index = vp8_header.temporalIdx != kNoTemporalIdx ? vp8_header.temporalIdx : 0; @@ -504,7 +540,7 @@ void RtpPayloadParams::Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info, RTPVideoHeader::GenericDescriptorInfo& generic = rtp_video_header->generic.emplace(); - generic.frame_id = shared_frame_id; + generic.frame_id = frame_id; generic.spatial_index = spatial_index; generic.temporal_index = temporal_index; @@ -518,29 +554,29 @@ void RtpPayloadParams::Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info, // Frame dependencies. if (vp8_info.useExplicitDependencies) { - SetDependenciesVp8New(vp8_info, shared_frame_id, is_keyframe, - vp8_header.layerSync, &generic); + SetDependenciesVp8New(vp8_info, frame_id, is_keyframe, vp8_header.layerSync, + &generic); } else { - SetDependenciesVp8Deprecated(vp8_info, shared_frame_id, is_keyframe, - spatial_index, temporal_index, - vp8_header.layerSync, &generic); + SetDependenciesVp8Deprecated(vp8_info, frame_id, is_keyframe, spatial_index, + temporal_index, vp8_header.layerSync, + &generic); } // Calculate chains. generic.chain_diffs = { (is_keyframe || chain_last_frame_id_[0] < 0) ? 0 - : static_cast(shared_frame_id - chain_last_frame_id_[0])}; + : static_cast(frame_id - chain_last_frame_id_[0])}; if (temporal_index == 0) { - chain_last_frame_id_[0] = shared_frame_id; + chain_last_frame_id_[0] = frame_id; } } -void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, - int64_t shared_frame_id, +void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& /* vp9_info */, + int64_t frame_id, RTPVideoHeader& rtp_video_header) { const auto& vp9_header = - absl::get(rtp_video_header.video_type_header); + std::get(rtp_video_header.video_type_header); const int num_spatial_layers = kMaxSimulatedSpatialLayers; const int first_active_spatial_id = vp9_header.first_active_layer; const int last_active_spatial_id = vp9_header.num_spatial_layers - 1; @@ -573,7 +609,7 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, RTPVideoHeader::GenericDescriptorInfo& result = rtp_video_header.generic.emplace(); - result.frame_id = shared_frame_id; + result.frame_id = frame_id; result.spatial_index = spatial_index; result.temporal_index = temporal_index; @@ -629,7 +665,7 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, } } last_vp9_frame_id_[vp9_header.picture_id % kPictureDiffLimit] - [spatial_index] = shared_frame_id; + [spatial_index] = frame_id; } else { // Implementing general conversion logic for non-flexible mode requires some // work and we will almost certainly never need it, so for now support only @@ -645,7 +681,7 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, // last frame id. result.dependencies.push_back(last_vp9_frame_id_[0][0]); } - last_vp9_frame_id_[0][0] = shared_frame_id; + last_vp9_frame_id_[0][0] = frame_id; } result.active_decode_targets = @@ -667,11 +703,11 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, result.chain_diffs[sid] = 0; continue; } - int64_t chain_diff = shared_frame_id - chain_last_frame_id_[sid]; + int64_t chain_diff = frame_id - chain_last_frame_id_[sid]; if (chain_diff >= 256) { RTC_LOG(LS_ERROR) << "Too many frames since last VP9 T0 frame for spatial layer #" - << sid << " at frame#" << shared_frame_id; + << sid << " at frame#" << frame_id; chain_last_frame_id_[sid] = -1; chain_diff = 0; } @@ -679,10 +715,10 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, } if (temporal_index == 0) { - chain_last_frame_id_[spatial_index] = shared_frame_id; + chain_last_frame_id_[spatial_index] = frame_id; if (!vp9_header.non_ref_for_inter_layer_pred) { for (int sid = spatial_index + 1; sid <= last_active_spatial_id; ++sid) { - chain_last_frame_id_[sid] = shared_frame_id; + chain_last_frame_id_[sid] = frame_id; } } } @@ -690,7 +726,7 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, void RtpPayloadParams::SetDependenciesVp8Deprecated( const CodecSpecificInfoVP8& vp8_info, - int64_t shared_frame_id, + int64_t frame_id, bool is_keyframe, int spatial_index, int temporal_index, @@ -702,42 +738,42 @@ void RtpPayloadParams::SetDependenciesVp8Deprecated( if (is_keyframe) { RTC_DCHECK_EQ(temporal_index, 0); - last_shared_frame_id_[spatial_index].fill(-1); - last_shared_frame_id_[spatial_index][temporal_index] = shared_frame_id; + last_frame_id_[spatial_index].fill(-1); + last_frame_id_[spatial_index][temporal_index] = frame_id; return; } if (layer_sync) { - int64_t tl0_frame_id = last_shared_frame_id_[spatial_index][0]; + int64_t tl0_frame_id = last_frame_id_[spatial_index][0]; for (int i = 1; i < RtpGenericFrameDescriptor::kMaxTemporalLayers; ++i) { - if (last_shared_frame_id_[spatial_index][i] < tl0_frame_id) { - last_shared_frame_id_[spatial_index][i] = -1; + if (last_frame_id_[spatial_index][i] < tl0_frame_id) { + last_frame_id_[spatial_index][i] = -1; } } RTC_DCHECK_GE(tl0_frame_id, 0); - RTC_DCHECK_LT(tl0_frame_id, shared_frame_id); + RTC_DCHECK_LT(tl0_frame_id, frame_id); generic->dependencies.push_back(tl0_frame_id); } else { for (int i = 0; i <= temporal_index; ++i) { - int64_t frame_id = last_shared_frame_id_[spatial_index][i]; + int64_t last_frame_id = last_frame_id_[spatial_index][i]; - if (frame_id != -1) { - RTC_DCHECK_LT(frame_id, shared_frame_id); - generic->dependencies.push_back(frame_id); + if (last_frame_id != -1) { + RTC_DCHECK_LT(last_frame_id, frame_id); + generic->dependencies.push_back(last_frame_id); } } } - last_shared_frame_id_[spatial_index][temporal_index] = shared_frame_id; + last_frame_id_[spatial_index][temporal_index] = frame_id; } void RtpPayloadParams::SetDependenciesVp8New( const CodecSpecificInfoVP8& vp8_info, - int64_t shared_frame_id, + int64_t frame_id, bool is_keyframe, - bool layer_sync, + bool /* layer_sync */, RTPVideoHeader::GenericDescriptorInfo* generic) { RTC_DCHECK(vp8_info.useExplicitDependencies); RTC_DCHECK(!new_version_used_.has_value() || new_version_used_.value()); @@ -745,7 +781,7 @@ void RtpPayloadParams::SetDependenciesVp8New( if (is_keyframe) { RTC_DCHECK_EQ(vp8_info.referencedBuffersCount, 0u); - buffer_id_to_frame_id_.fill(shared_frame_id); + buffer_id_to_frame_id_.fill(frame_id); return; } @@ -763,7 +799,7 @@ void RtpPayloadParams::SetDependenciesVp8New( const int64_t dependency_frame_id = buffer_id_to_frame_id_[referenced_buffer]; RTC_DCHECK_GE(dependency_frame_id, 0); - RTC_DCHECK_LT(dependency_frame_id, shared_frame_id); + RTC_DCHECK_LT(dependency_frame_id, frame_id); const bool is_new_dependency = std::find(generic->dependencies.begin(), generic->dependencies.end(), @@ -776,7 +812,7 @@ void RtpPayloadParams::SetDependenciesVp8New( RTC_DCHECK_LE(vp8_info.updatedBuffersCount, kBuffersCountVp8); for (size_t i = 0; i < vp8_info.updatedBuffersCount; ++i) { const size_t updated_id = vp8_info.updatedBuffers[i]; - buffer_id_to_frame_id_[updated_id] = shared_frame_id; + buffer_id_to_frame_id_[updated_id] = frame_id; } RTC_DCHECK_LE(buffer_id_to_frame_id_.size(), kBuffersCountVp8); diff --git a/call/rtp_payload_params.h b/call/rtp_payload_params.h index 5feee11ab0..ea585c0cd5 100644 --- a/call/rtp_payload_params.h +++ b/call/rtp_payload_params.h @@ -12,12 +12,17 @@ #define CALL_RTP_PAYLOAD_PARAMS_H_ #include +#include +#include +#include #include -#include "absl/types/optional.h" #include "api/field_trials_view.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/encoded_image.h" #include "api/video_codecs/video_encoder.h" #include "call/rtp_config.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/chain_diff_calculator.h" @@ -38,11 +43,11 @@ class RtpPayloadParams final { RTPVideoHeader GetRtpVideoHeader(const EncodedImage& image, const CodecSpecificInfo* codec_specific_info, - int64_t shared_frame_id); + std::optional shared_frame_id); // Returns structure that aligns with simulated generic info generated by // `GetRtpVideoHeader` for the `codec_specific_info` - absl::optional GenericStructure( + std::optional GenericStructure( const CodecSpecificInfo* codec_specific_info); uint32_t ssrc() const; @@ -61,20 +66,20 @@ class RtpPayloadParams final { RTPVideoHeader* rtp_video_header); void Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info, - int64_t shared_frame_id, + int64_t frame_id, bool is_keyframe, RTPVideoHeader* rtp_video_header); void Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, - int64_t shared_frame_id, + int64_t frame_id, RTPVideoHeader& rtp_video_header); void H264ToGeneric(const CodecSpecificInfoH264& h264_info, - int64_t shared_frame_id, + int64_t frame_id, bool is_keyframe, RTPVideoHeader* rtp_video_header); - void GenericToGeneric(int64_t shared_frame_id, + void GenericToGeneric(int64_t frame_id, bool is_keyframe, RTPVideoHeader* rtp_video_header); @@ -83,14 +88,14 @@ class RtpPayloadParams final { // wrappers have been updated. void SetDependenciesVp8Deprecated( const CodecSpecificInfoVP8& vp8_info, - int64_t shared_frame_id, + int64_t frame_id, bool is_keyframe, int spatial_index, int temporal_index, bool layer_sync, RTPVideoHeader::GenericDescriptorInfo* generic); void SetDependenciesVp8New(const CodecSpecificInfoVP8& vp8_info, - int64_t shared_frame_id, + int64_t frame_id, bool is_keyframe, bool layer_sync, RTPVideoHeader::GenericDescriptorInfo* generic); @@ -101,7 +106,7 @@ class RtpPayloadParams final { // Holds the last shared frame id for a given (spatial, temporal) layer. std::array, RtpGenericFrameDescriptor::kMaxSpatialLayers> - last_shared_frame_id_; + last_frame_id_; // circular buffer of frame ids for the last 128 vp9 pictures. // ids for the `picture_id` are stored at the index `picture_id % 128`. std::vector> @@ -122,7 +127,7 @@ class RtpPayloadParams final { // that, for a given object, we either always use // SetDependenciesVp8Deprecated(), or always use SetDependenciesVp8New(). // TODO(bugs.webrtc.org/10242): Remove. - absl::optional new_version_used_; + std::optional new_version_used_; const uint32_t ssrc_; RtpPayloadState state_; diff --git a/call/rtp_payload_params_unittest.cc b/call/rtp_payload_params_unittest.cc index 45f00061ee..1f3214454b 100644 --- a/call/rtp_payload_params_unittest.cc +++ b/call/rtp_payload_params_unittest.cc @@ -10,23 +10,31 @@ #include "call/rtp_payload_params.h" -#include - +#include #include +#include #include +#include +#include #include "absl/container/inlined_vector.h" -#include "absl/types/optional.h" -#include "absl/types/variant.h" #include "api/transport/field_trial_based_config.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/color_space.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" #include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" #include "api/video/video_rotation.h" -#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "call/rtp_config.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" @@ -37,6 +45,7 @@ namespace { using ::testing::AllOf; using ::testing::Each; using ::testing::ElementsAre; +using ::testing::ElementsAreArray; using ::testing::Eq; using ::testing::Field; using ::testing::IsEmpty; @@ -87,7 +96,7 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp8) { EXPECT_EQ(1, header.simulcastIdx); EXPECT_EQ(kVideoCodecVP8, header.codec); const auto& vp8_header = - absl::get(header.video_type_header); + std::get(header.video_type_header); EXPECT_EQ(kPictureId + 2, vp8_header.pictureId); EXPECT_EQ(kTemporalIdx, vp8_header.temporalIdx); EXPECT_EQ(kTl0PicIdx + 1, vp8_header.tl0PicIdx); @@ -121,7 +130,7 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp9) { EXPECT_EQ(kVideoCodecVP9, header.codec); EXPECT_FALSE(header.color_space); const auto& vp9_header = - absl::get(header.video_type_header); + std::get(header.video_type_header); EXPECT_EQ(kPictureId + 1, vp9_header.picture_id); EXPECT_EQ(kTl0PicIdx, vp9_header.tl0_pic_idx); EXPECT_EQ(vp9_header.temporal_idx, codec_info.codecSpecific.VP9.temporal_idx); @@ -144,7 +153,7 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp9) { EXPECT_EQ(kVideoRotation_90, header.rotation); EXPECT_EQ(VideoContentType::SCREENSHARE, header.content_type); EXPECT_EQ(kVideoCodecVP9, header.codec); - EXPECT_EQ(absl::make_optional(color_space), header.color_space); + EXPECT_EQ(std::make_optional(color_space), header.color_space); EXPECT_EQ(kPictureId + 1, vp9_header.picture_id); EXPECT_EQ(kTl0PicIdx, vp9_header.tl0_pic_idx); EXPECT_EQ(vp9_header.temporal_idx, codec_info.codecSpecific.VP9.temporal_idx); @@ -168,7 +177,7 @@ TEST(RtpPayloadParamsTest, PictureIdIsSetForVp8) { params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare); EXPECT_EQ(kVideoCodecVP8, header.codec); EXPECT_EQ(kInitialPictureId1 + 1, - absl::get(header.video_type_header).pictureId); + std::get(header.video_type_header).pictureId); // State should hold latest used picture id and tl0_pic_idx. state = params.state(); @@ -190,8 +199,7 @@ TEST(RtpPayloadParamsTest, PictureIdWraps) { RTPVideoHeader header = params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare); EXPECT_EQ(kVideoCodecVP8, header.codec); - EXPECT_EQ(0, - absl::get(header.video_type_header).pictureId); + EXPECT_EQ(0, std::get(header.video_type_header).pictureId); // State should hold latest used picture id and tl0_pic_idx. EXPECT_EQ(0, params.state().picture_id); // Wrapped. @@ -297,7 +305,7 @@ TEST(RtpPayloadParamsTest, Tl0PicIdxUpdatedForVp8) { EXPECT_EQ(kVideoCodecVP8, header.codec); const auto& vp8_header = - absl::get(header.video_type_header); + std::get(header.video_type_header); EXPECT_EQ(kInitialPictureId1 + 1, vp8_header.pictureId); EXPECT_EQ(kInitialTl0PicIdx1, vp8_header.tl0PicIdx); @@ -333,7 +341,7 @@ TEST(RtpPayloadParamsTest, Tl0PicIdxUpdatedForVp9) { EXPECT_EQ(kVideoCodecVP9, header.codec); const auto& vp9_header = - absl::get(header.video_type_header); + std::get(header.video_type_header); EXPECT_EQ(kInitialPictureId1 + 1, vp9_header.picture_id); EXPECT_EQ(kInitialTl0PicIdx1, vp9_header.tl0_pic_idx); @@ -360,6 +368,30 @@ TEST(RtpPayloadParamsTest, Tl0PicIdxUpdatedForVp9) { EXPECT_EQ(kInitialTl0PicIdx1 + 1, params.state().tl0_pic_idx); } +TEST(RtpPayloadParamsTest, GenerateFrameIdWhenExternalFrameIdsAreNotProvided) { + RtpPayloadState state; + state.frame_id = 123; + + EncodedImage encoded_image; + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + CodecSpecificInfo codec_info; + codec_info.codecType = kVideoCodecGeneric; + + RtpPayloadParams params(kSsrc1, &state, FieldTrialBasedConfig()); + RTPVideoHeader header = + params.GetRtpVideoHeader(encoded_image, &codec_info, std::nullopt); + + EXPECT_THAT(header.codec, Eq(kVideoCodecGeneric)); + + ASSERT_TRUE(header.generic); + EXPECT_THAT(header.generic->frame_id, Eq(123)); + + encoded_image._frameType = VideoFrameType::kVideoFrameDelta; + header = params.GetRtpVideoHeader(encoded_image, &codec_info, std::nullopt); + ASSERT_TRUE(header.generic); + EXPECT_THAT(header.generic->frame_id, Eq(124)); +} + TEST(RtpPayloadParamsTest, PictureIdForOldGenericFormat) { test::ScopedKeyValueConfig field_trials("WebRTC-GenericPictureId/Enabled/"); RtpPayloadState state{}; @@ -375,14 +407,13 @@ TEST(RtpPayloadParamsTest, PictureIdForOldGenericFormat) { EXPECT_EQ(kVideoCodecGeneric, header.codec); const auto* generic = - absl::get_if(&header.video_type_header); + std::get_if(&header.video_type_header); ASSERT_TRUE(generic); EXPECT_EQ(0, generic->picture_id); encoded_image._frameType = VideoFrameType::kVideoFrameDelta; header = params.GetRtpVideoHeader(encoded_image, &codec_info, 20); - generic = - absl::get_if(&header.video_type_header); + generic = std::get_if(&header.video_type_header); ASSERT_TRUE(generic); EXPECT_EQ(1, generic->picture_id); } @@ -1313,7 +1344,13 @@ class RtpPayloadParamsH264ToGenericTest : public ::testing::Test { LayerSync layer_sync, const std::set& expected_deps, uint16_t width = 0, - uint16_t height = 0) { + uint16_t height = 0, + const std::vector& + expected_decode_target_indication = { + DecodeTargetIndication::kSwitch, + DecodeTargetIndication::kSwitch, + DecodeTargetIndication::kSwitch, + DecodeTargetIndication::kSwitch}) { EncodedImage encoded_image; encoded_image._frameType = frame_type; encoded_image._encodedWidth = width; @@ -1338,6 +1375,9 @@ class RtpPayloadParamsH264ToGenericTest : public ::testing::Test { EXPECT_EQ(header.width, width); EXPECT_EQ(header.height, height); + + EXPECT_THAT(header.generic->decode_target_indications, + ElementsAreArray(expected_decode_target_indication)); } protected: @@ -1369,29 +1409,42 @@ TEST_F(RtpPayloadParamsH264ToGenericTest, TooHighTemporalIndex) { } TEST_F(RtpPayloadParamsH264ToGenericTest, LayerSync) { + constexpr auto kSwitch = DecodeTargetIndication::kSwitch; + constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent; + // 02120212 pattern ConvertAndCheck(0, 0, VideoFrameType::kVideoFrameKey, kNoSync, {}, 480, 360); - ConvertAndCheck(2, 1, VideoFrameType::kVideoFrameDelta, kNoSync, {0}); - ConvertAndCheck(1, 2, VideoFrameType::kVideoFrameDelta, kNoSync, {0}); - ConvertAndCheck(2, 3, VideoFrameType::kVideoFrameDelta, kNoSync, {0, 1, 2}); - - ConvertAndCheck(0, 4, VideoFrameType::kVideoFrameDelta, kNoSync, {0}); - ConvertAndCheck(2, 5, VideoFrameType::kVideoFrameDelta, kNoSync, {2, 3, 4}); - ConvertAndCheck(1, 6, VideoFrameType::kVideoFrameDelta, kSync, - {4}); // layer sync - ConvertAndCheck(2, 7, VideoFrameType::kVideoFrameDelta, kNoSync, {4, 5, 6}); + ConvertAndCheck(2, 1, VideoFrameType::kVideoFrameDelta, kNoSync, {0}, 0, 0, + {kNotPresent, kNotPresent, kSwitch, kSwitch}); + ConvertAndCheck(1, 2, VideoFrameType::kVideoFrameDelta, kNoSync, {0}, 0, 0, + {kNotPresent, kSwitch, kSwitch, kSwitch}); + ConvertAndCheck(2, 3, VideoFrameType::kVideoFrameDelta, kNoSync, {0, 1, 2}, 0, + 0, {kNotPresent, kNotPresent, kSwitch, kSwitch}); + ConvertAndCheck(0, 4, VideoFrameType::kVideoFrameDelta, kNoSync, {0}, 0, 0); + ConvertAndCheck(2, 5, VideoFrameType::kVideoFrameDelta, kNoSync, {2, 3, 4}, 0, + 0, {kNotPresent, kNotPresent, kSwitch, kSwitch}); + ConvertAndCheck(1, 6, VideoFrameType::kVideoFrameDelta, kSync, {4}, 0, 0, + {kNotPresent, kSwitch, kSwitch, kSwitch}); // layer sync + ConvertAndCheck(2, 7, VideoFrameType::kVideoFrameDelta, kNoSync, {4, 5, 6}, 0, + 0, {kNotPresent, kNotPresent, kSwitch, kSwitch}); } TEST_F(RtpPayloadParamsH264ToGenericTest, FrameIdGaps) { + constexpr auto kSwitch = DecodeTargetIndication::kSwitch; + constexpr auto kNotPresent = DecodeTargetIndication::kNotPresent; + // 0101 pattern ConvertAndCheck(0, 0, VideoFrameType::kVideoFrameKey, kNoSync, {}, 480, 360); - ConvertAndCheck(1, 1, VideoFrameType::kVideoFrameDelta, kNoSync, {0}); + ConvertAndCheck(1, 1, VideoFrameType::kVideoFrameDelta, kNoSync, {0}, 0, 0, + {kNotPresent, kSwitch, kSwitch, kSwitch}); ConvertAndCheck(0, 5, VideoFrameType::kVideoFrameDelta, kNoSync, {0}); - ConvertAndCheck(1, 10, VideoFrameType::kVideoFrameDelta, kNoSync, {1, 5}); + ConvertAndCheck(1, 10, VideoFrameType::kVideoFrameDelta, kNoSync, {1, 5}, 0, + 0, {kNotPresent, kSwitch, kSwitch, kSwitch}); ConvertAndCheck(0, 15, VideoFrameType::kVideoFrameDelta, kNoSync, {5}); - ConvertAndCheck(1, 20, VideoFrameType::kVideoFrameDelta, kNoSync, {10, 15}); + ConvertAndCheck(1, 20, VideoFrameType::kVideoFrameDelta, kNoSync, {10, 15}, 0, + 0, {kNotPresent, kSwitch, kSwitch, kSwitch}); } } // namespace diff --git a/call/rtp_stream_receiver_controller.cc b/call/rtp_stream_receiver_controller.cc index 993a4fc76e..9367b87a57 100644 --- a/call/rtp_stream_receiver_controller.cc +++ b/call/rtp_stream_receiver_controller.cc @@ -10,8 +10,12 @@ #include "call/rtp_stream_receiver_controller.h" +#include #include +#include "api/sequence_checker.h" +#include "call/rtp_packet_sink_interface.h" +#include "call/rtp_stream_receiver_controller_interface.h" #include "rtc_base/logging.h" namespace webrtc { diff --git a/call/rtp_stream_receiver_controller.h b/call/rtp_stream_receiver_controller.h index 1040632639..1954c4fc2f 100644 --- a/call/rtp_stream_receiver_controller.h +++ b/call/rtp_stream_receiver_controller.h @@ -10,12 +10,14 @@ #ifndef CALL_RTP_STREAM_RECEIVER_CONTROLLER_H_ #define CALL_RTP_STREAM_RECEIVER_CONTROLLER_H_ +#include #include #include "api/sequence_checker.h" #include "call/rtp_demuxer.h" #include "call/rtp_stream_receiver_controller_interface.h" #include "modules/rtp_rtcp/include/recovered_packet_receiver.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { diff --git a/call/rtp_stream_receiver_controller_interface.h b/call/rtp_stream_receiver_controller_interface.h index 793d0bc145..51ae6554b9 100644 --- a/call/rtp_stream_receiver_controller_interface.h +++ b/call/rtp_stream_receiver_controller_interface.h @@ -10,6 +10,7 @@ #ifndef CALL_RTP_STREAM_RECEIVER_CONTROLLER_INTERFACE_H_ #define CALL_RTP_STREAM_RECEIVER_CONTROLLER_INTERFACE_H_ +#include #include #include "call/rtp_packet_sink_interface.h" diff --git a/call/rtp_transport_config.h b/call/rtp_transport_config.h index 6c94f7d911..5664b628eb 100644 --- a/call/rtp_transport_config.h +++ b/call/rtp_transport_config.h @@ -11,29 +11,23 @@ #ifndef CALL_RTP_TRANSPORT_CONFIG_H_ #define CALL_RTP_TRANSPORT_CONFIG_H_ -#include +#include -#include "api/field_trials_view.h" +#include "api/environment/environment.h" #include "api/network_state_predictor.h" -#include "api/rtc_event_log/rtc_event_log.h" #include "api/transport/bitrate_settings.h" #include "api/transport/network_control.h" -#include "rtc_base/task_queue.h" +#include "api/units/time_delta.h" namespace webrtc { struct RtpTransportConfig { + Environment env; + // Bitrate config used until valid bitrate estimates are calculated. Also // used to cap total bitrate used. This comes from the remote connection. BitrateConstraints bitrate_config; - // RtcEventLog to use for this call. Required. - // Use webrtc::RtcEventLog::CreateNull() for a null implementation. - RtcEventLog* event_log = nullptr; - - // Task Queue Factory to be used in this call. Required. - TaskQueueFactory* task_queue_factory = nullptr; - // NetworkStatePredictor to use for this call. NetworkStatePredictorFactoryInterface* network_state_predictor_factory = nullptr; @@ -41,12 +35,8 @@ struct RtpTransportConfig { // Network controller factory to use for this call. NetworkControllerFactoryInterface* network_controller_factory = nullptr; - // Key-value mapping of internal configurations to apply, - // e.g. field trials. - const FieldTrialsView* trials = nullptr; - // The burst interval of the pacer, see TaskQueuePacedSender constructor. - absl::optional pacer_burst_interval; + std::optional pacer_burst_interval; }; } // namespace webrtc diff --git a/call/rtp_transport_controller_send.cc b/call/rtp_transport_controller_send.cc index dc1d37168e..992e3a436c 100644 --- a/call/rtp_transport_controller_send.cc +++ b/call/rtp_transport_controller_send.cc @@ -9,27 +9,56 @@ */ #include "call/rtp_transport_controller_send.h" +#include +#include +#include #include +#include +#include #include #include -#include "absl/strings/match.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/call/transport.h" +#include "api/fec_controller.h" +#include "api/frame_transformer_interface.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtp_packet_sender.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" +#include "api/transport/bandwidth_estimation_settings.h" +#include "api/transport/bitrate_settings.h" #include "api/transport/goog_cc_factory.h" +#include "api/transport/network_control.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" +#include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "call/rtp_config.h" +#include "call/rtp_transport_config.h" +#include "call/rtp_transport_controller_send_interface.h" #include "call/rtp_video_sender.h" -#include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" +#include "call/rtp_video_sender_interface.h" #include "logging/rtc_event_log/events/rtc_event_route_change.h" +#include "modules/congestion_controller/rtp/control_handler.h" +#include "modules/pacing/packet_router.h" +#include "modules/rtp_rtcp/include/report_block_data.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" #include "rtc_base/rate_limiter.h" +#include "rtc_base/task_utils/repeating_task.h" namespace webrtc { namespace { @@ -62,60 +91,63 @@ TargetRateConstraints ConvertConstraints(const BitrateConstraints& contraints, contraints.start_bitrate_bps, clock); } -bool IsEnabled(const FieldTrialsView& trials, absl::string_view key) { - return absl::StartsWith(trials.Lookup(key), "Enabled"); -} - -bool IsRelayed(const rtc::NetworkRoute& route) { +bool IsRelayed(const NetworkRoute& route) { return route.local.uses_turn() || route.remote.uses_turn(); } } // namespace RtpTransportControllerSend::RtpTransportControllerSend( - Clock* clock, const RtpTransportConfig& config) - : clock_(clock), - event_log_(config.event_log), - task_queue_factory_(config.task_queue_factory), + : env_(config.env), task_queue_(TaskQueueBase::Current()), bitrate_configurator_(config.bitrate_config), pacer_started_(false), - pacer_(clock, + pacer_(&env_.clock(), &packet_router_, - *config.trials, + env_.field_trials(), TimeDelta::Millis(5), - 3, - config.pacer_burst_interval), + 3), observer_(nullptr), controller_factory_override_(config.network_controller_factory), controller_factory_fallback_( std::make_unique( - config.network_state_predictor_factory)), + GoogCcFactoryConfig{.network_state_predictor_factory = + config.network_state_predictor_factory})), process_interval_(controller_factory_fallback_->GetProcessInterval()), - last_report_block_time_(Timestamp::Millis(clock_->TimeInMilliseconds())), + last_report_block_time_( + Timestamp::Millis(env_.clock().TimeInMilliseconds())), + initial_config_(env_), reset_feedback_on_route_change_( - !IsEnabled(*config.trials, "WebRTC-Bwe-NoFeedbackReset")), - add_pacing_to_cwin_( - IsEnabled(*config.trials, - "WebRTC-AddPacingToCongestionWindowPushback")), + !env_.field_trials().IsEnabled("WebRTC-Bwe-NoFeedbackReset")), + add_pacing_to_cwin_(env_.field_trials().IsEnabled( + "WebRTC-AddPacingToCongestionWindowPushback")), + reset_bwe_on_adapter_id_change_( + env_.field_trials().IsEnabled("WebRTC-Bwe-ResetOnAdapterIdChange")), relay_bandwidth_cap_("relay_cap", DataRate::PlusInfinity()), transport_overhead_bytes_per_packet_(0), network_available_(false), congestion_window_size_(DataSize::PlusInfinity()), is_congested_(false), - retransmission_rate_limiter_(clock, kRetransmitWindowSizeMs), - field_trials_(*config.trials) { - ParseFieldTrial({&relay_bandwidth_cap_}, - config.trials->Lookup("WebRTC-Bwe-NetworkRouteConstraints")); + retransmission_rate_limiter_(&env_.clock(), kRetransmitWindowSizeMs) { + ParseFieldTrial( + {&relay_bandwidth_cap_}, + env_.field_trials().Lookup("WebRTC-Bwe-NetworkRouteConstraints")); initial_config_.constraints = - ConvertConstraints(config.bitrate_config, clock_); - initial_config_.event_log = config.event_log; - initial_config_.key_value_config = config.trials; + ConvertConstraints(config.bitrate_config, &env_.clock()); RTC_DCHECK(config.bitrate_config.start_bitrate_bps > 0); pacer_.SetPacingRates( DataRate::BitsPerSec(config.bitrate_config.start_bitrate_bps), DataRate::Zero()); + if (config.pacer_burst_interval) { + // Default burst interval overriden by config. + pacer_.SetSendBurstInterval(*config.pacer_burst_interval); + } + packet_router_.RegisterNotifyBweCallback( + [this](const RtpPacketToSend& packet, + const PacedPacketInfo& pacing_info) { + return NotifyBweOfPacedSentPacket(packet, pacing_info); + }); } RtpTransportControllerSend::~RtpTransportControllerSend() { @@ -132,20 +164,18 @@ RtpVideoSenderInterface* RtpTransportControllerSend::CreateRtpVideoSender( int rtcp_report_interval_ms, Transport* send_transport, const RtpSenderObservers& observers, - RtcEventLog* event_log, std::unique_ptr fec_controller, const RtpSenderFrameEncryptionConfig& frame_encryption_config, - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&sequence_checker_); video_rtp_senders_.push_back(std::make_unique( - clock_, suspended_ssrcs, states, rtp_config, rtcp_report_interval_ms, - send_transport, observers, + env_, task_queue_, suspended_ssrcs, states, rtp_config, + rtcp_report_interval_ms, send_transport, observers, // TODO(holmer): Remove this circular dependency by injecting // the parts of RtpTransportControllerSendInterface that are really used. - this, event_log, &retransmission_rate_limiter_, std::move(fec_controller), + this, &retransmission_rate_limiter_, std::move(fec_controller), frame_encryption_config.frame_encryptor, - frame_encryption_config.crypto_options, std::move(frame_transformer), - field_trials_, task_queue_factory_)); + frame_encryption_config.crypto_options, std::move(frame_transformer))); return video_rtp_senders_.back().get(); } @@ -163,8 +193,39 @@ void RtpTransportControllerSend::DestroyRtpVideoSender( video_rtp_senders_.erase(it); } +void RtpTransportControllerSend::RegisterSendingRtpStream( + RtpRtcpInterface& rtp_module) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + // Allow pacer to send packets using this module. + packet_router_.AddSendRtpModule(&rtp_module, + /*remb_candidate=*/true); + pacer_.SetAllowProbeWithoutMediaPacket( + bwe_settings_.allow_probe_without_media && + packet_router_.SupportsRtxPayloadPadding()); +} + +void RtpTransportControllerSend::DeRegisterSendingRtpStream( + RtpRtcpInterface& rtp_module) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + // Disabling media, remove from packet router map to reduce size and + // prevent any stray packets in the pacer from asynchronously arriving + // to a disabled module. + packet_router_.RemoveSendRtpModule(&rtp_module); + // Clear the pacer queue of any packets pertaining to this module. + pacer_.RemovePacketsForSsrc(rtp_module.SSRC()); + if (rtp_module.RtxSsrc().has_value()) { + pacer_.RemovePacketsForSsrc(*rtp_module.RtxSsrc()); + } + if (rtp_module.FlexfecSsrc().has_value()) { + pacer_.RemovePacketsForSsrc(*rtp_module.FlexfecSsrc()); + } + pacer_.SetAllowProbeWithoutMediaPacket( + bwe_settings_.allow_probe_without_media && + packet_router_.SupportsRtxPayloadPadding()); +} + void RtpTransportControllerSend::UpdateControlState() { - absl::optional update = control_handler_->GetUpdate(); + std::optional update = control_handler_->GetUpdate(); if (!update) return; retransmission_rate_limiter_.SetMaxRate(update->target_rate.bps()); @@ -180,13 +241,13 @@ void RtpTransportControllerSend::UpdateCongestedState() { } } -absl::optional RtpTransportControllerSend::GetCongestedStateUpdate() +std::optional RtpTransportControllerSend::GetCongestedStateUpdate() const { bool congested = transport_feedback_adapter_.GetOutstandingData() >= congestion_window_size_; if (congested != is_congested_) return congested; - return absl::nullopt; + return std::nullopt; } PacketRouter* RtpTransportControllerSend::packet_router() { @@ -198,11 +259,6 @@ RtpTransportControllerSend::network_state_estimate_observer() { return this; } -TransportFeedbackObserver* -RtpTransportControllerSend::transport_feedback_observer() { - return this; -} - RtpPacketSender* RtpTransportControllerSend::packet_sender() { return &pacer_; } @@ -228,6 +284,32 @@ RtpTransportControllerSend::GetStreamFeedbackProvider() { return &feedback_demuxer_; } +void RtpTransportControllerSend::ReconfigureBandwidthEstimation( + const BandwidthEstimationSettings& settings) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + bwe_settings_ = settings; + + streams_config_.enable_repeated_initial_probing = + bwe_settings_.allow_probe_without_media; + bool allow_probe_without_media = bwe_settings_.allow_probe_without_media && + packet_router_.SupportsRtxPayloadPadding(); + pacer_.SetAllowProbeWithoutMediaPacket(allow_probe_without_media); + + if (controller_) { + // Recreate the controller and handler. + control_handler_ = nullptr; + controller_ = nullptr; + // The BWE controller is created when/if the network is available. + MaybeCreateControllers(); + if (controller_) { + BitrateConstraints constraints = bitrate_configurator_.GetConfig(); + UpdateBitrateConstraints(constraints); + UpdateStreamsConfig(); + UpdateNetworkAvailability(); + } + } +} + void RtpTransportControllerSend::RegisterTargetTransferRateObserver( TargetTransferRateObserver* observer) { RTC_DCHECK_RUN_ON(&sequence_checker_); @@ -238,25 +320,30 @@ void RtpTransportControllerSend::RegisterTargetTransferRateObserver( } bool RtpTransportControllerSend::IsRelevantRouteChange( - const rtc::NetworkRoute& old_route, - const rtc::NetworkRoute& new_route) const { - // TODO(bugs.webrtc.org/11438): Experiment with using more information/ - // other conditions. + const NetworkRoute& old_route, + const NetworkRoute& new_route) const { bool connected_changed = old_route.connected != new_route.connected; - bool route_ids_changed = - old_route.local.network_id() != new_route.local.network_id() || - old_route.remote.network_id() != new_route.remote.network_id(); - if (relay_bandwidth_cap_->IsFinite()) { - bool relaying_changed = IsRelayed(old_route) != IsRelayed(new_route); - return connected_changed || route_ids_changed || relaying_changed; + bool route_ids_changed = false; + bool relaying_changed = false; + + if (reset_bwe_on_adapter_id_change_) { + route_ids_changed = + old_route.local.adapter_id() != new_route.local.adapter_id() || + old_route.remote.adapter_id() != new_route.remote.adapter_id(); } else { - return connected_changed || route_ids_changed; + route_ids_changed = + old_route.local.network_id() != new_route.local.network_id() || + old_route.remote.network_id() != new_route.remote.network_id(); } + if (relay_bandwidth_cap_->IsFinite()) { + relaying_changed = IsRelayed(old_route) != IsRelayed(new_route); + } + return connected_changed || route_ids_changed || relaying_changed; } void RtpTransportControllerSend::OnNetworkRouteChanged( absl::string_view transport_name, - const rtc::NetworkRoute& network_route) { + const NetworkRoute& network_route) { RTC_DCHECK_RUN_ON(&sequence_checker_); // Check if the network route is connected. if (!network_route.connected) { @@ -265,7 +352,7 @@ void RtpTransportControllerSend::OnNetworkRouteChanged( return; } - absl::optional relay_constraint_update = + std::optional relay_constraint_update = ApplyOrLiftRelayCap(IsRelayed(network_route)); // Check whether the network route has changed on each transport. @@ -293,7 +380,7 @@ void RtpTransportControllerSend::OnNetworkRouteChanged( return; } - const rtc::NetworkRoute old_route = kv->second; + const NetworkRoute old_route = kv->second; kv->second = network_route; // Check if enough conditions of the new/old route has changed @@ -307,13 +394,15 @@ void RtpTransportControllerSend::OnNetworkRouteChanged( << " bps."; RTC_DCHECK_GT(bitrate_config.start_bitrate_bps, 0); - if (event_log_) { - event_log_->Log(std::make_unique( - network_route.connected, network_route.packet_overhead)); + env_.event_log().Log(std::make_unique( + network_route.connected, network_route.packet_overhead)); + if (transport_maybe_support_ecn_) { + sending_packets_as_ect1_ = true; + packet_router_.ConfigureForRfc8888Feedback(sending_packets_as_ect1_); } NetworkRouteChange msg; - msg.at_time = Timestamp::Millis(clock_->TimeInMilliseconds()); - msg.constraints = ConvertConstraints(bitrate_config, clock_); + msg.at_time = Timestamp::Millis(env_.clock().TimeInMilliseconds()); + msg.constraints = ConvertConstraints(bitrate_config, &env_.clock()); transport_overhead_bytes_per_packet_ = network_route.packet_overhead; if (reset_feedback_on_route_change_) { transport_feedback_adapter_.SetNetworkRoute(network_route); @@ -331,9 +420,6 @@ void RtpTransportControllerSend::OnNetworkAvailability(bool network_available) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_LOG(LS_VERBOSE) << "SignalNetworkState " << (network_available ? "Up" : "Down"); - NetworkAvailability msg; - msg.at_time = Timestamp::Millis(clock_->TimeInMilliseconds()); - msg.network_available = network_available; network_available_ = network_available; if (network_available) { pacer_.Resume(); @@ -343,13 +429,10 @@ void RtpTransportControllerSend::OnNetworkAvailability(bool network_available) { is_congested_ = false; pacer_.SetCongested(false); - if (controller_) { - control_handler_->SetNetworkAvailability(network_available); - PostUpdates(controller_->OnNetworkAvailability(msg)); - UpdateControlState(); - } else { + if (!controller_) { MaybeCreateControllers(); } + UpdateNetworkAvailability(); for (auto& rtp_sender : video_rtp_senders_) { rtp_sender->OnNetworkAvailability(network_available); } @@ -360,7 +443,7 @@ NetworkLinkRtcpObserver* RtpTransportControllerSend::GetRtcpObserver() { int64_t RtpTransportControllerSend::GetPacerQueuingDelayMs() const { return pacer_.OldestPacketWaitTime().ms(); } -absl::optional RtpTransportControllerSend::GetFirstPacketTime() +std::optional RtpTransportControllerSend::GetFirstPacketTime() const { return pacer_.FirstSentPacketTime(); } @@ -371,7 +454,7 @@ void RtpTransportControllerSend::EnablePeriodicAlrProbing(bool enable) { UpdateStreamsConfig(); } void RtpTransportControllerSend::OnSentPacket( - const rtc::SentPacket& sent_packet) { + const SentPacketInfo& sent_packet) { // Normally called on the network thread! // TODO(crbug.com/1373439): Clarify other thread contexts calling in, // and simplify task posting logic when the combined network/worker project @@ -389,9 +472,9 @@ void RtpTransportControllerSend::OnSentPacket( } void RtpTransportControllerSend::ProcessSentPacket( - const rtc::SentPacket& sent_packet) { + const SentPacketInfo& sent_packet) { RTC_DCHECK_RUN_ON(&sequence_checker_); - absl::optional packet_msg = + std::optional packet_msg = transport_feedback_adapter_.ProcessSentPacket(sent_packet); if (!packet_msg) return; @@ -429,7 +512,7 @@ void RtpTransportControllerSend::OnReceivedPacket( void RtpTransportControllerSend::UpdateBitrateConstraints( const BitrateConstraints& updated) { RTC_DCHECK_RUN_ON(&sequence_checker_); - TargetRateConstraints msg = ConvertConstraints(updated, clock_); + TargetRateConstraints msg = ConvertConstraints(updated, &env_.clock()); if (controller_) { PostUpdates(controller_->OnTargetRateConstraints(msg)); } else { @@ -440,7 +523,7 @@ void RtpTransportControllerSend::UpdateBitrateConstraints( void RtpTransportControllerSend::SetSdpBitrateParameters( const BitrateConstraints& constraints) { RTC_DCHECK_RUN_ON(&sequence_checker_); - absl::optional updated = + std::optional updated = bitrate_configurator_.UpdateWithSdpParameters(constraints); if (updated.has_value()) { UpdateBitrateConstraints(*updated); @@ -454,7 +537,7 @@ void RtpTransportControllerSend::SetSdpBitrateParameters( void RtpTransportControllerSend::SetClientBitratePreferences( const BitrateSettings& preferences) { RTC_DCHECK_RUN_ON(&sequence_checker_); - absl::optional updated = + std::optional updated = bitrate_configurator_.UpdateWithClientPreferences(preferences); if (updated.has_value()) { UpdateBitrateConstraints(*updated); @@ -465,7 +548,7 @@ void RtpTransportControllerSend::SetClientBitratePreferences( } } -absl::optional +std::optional RtpTransportControllerSend::ApplyOrLiftRelayCap(bool is_relayed) { DataRate cap = is_relayed ? relay_bandwidth_cap_ : DataRate::PlusInfinity(); return bitrate_configurator_.UpdateWithRelayCap(cap); @@ -529,40 +612,94 @@ void RtpTransportControllerSend::OnRttUpdate(Timestamp receive_time, PostUpdates(controller_->OnRoundTripTimeUpdate(report)); } -void RtpTransportControllerSend::OnAddPacket( - const RtpPacketSendInfo& packet_info) { +void RtpTransportControllerSend::NotifyBweOfPacedSentPacket( + const RtpPacketToSend& packet, + const PacedPacketInfo& pacing_info) { RTC_DCHECK_RUN_ON(&sequence_checker_); - Timestamp creation_time = Timestamp::Millis(clock_->TimeInMilliseconds()); - feedback_demuxer_.AddPacket(packet_info); + + if (!packet.transport_sequence_number()) { + return; + } + if (!packet.packet_type()) { + RTC_DCHECK_NOTREACHED() << "Unknown packet type"; + return; + } + if (packet.HasExtension()) { + // TODO: bugs.webrtc.org/42225697 - Refactor TransportFeedbackDemuxer to use + // TransportPacketsFeedback instead of directly using + // rtcp::TransportFeedback. For now, only use it if TransportSeqeunce number + // header extension is used. + RtpPacketSendInfo packet_info = + RtpPacketSendInfo::From(packet, pacing_info); + feedback_demuxer_.AddPacket(packet_info); + } + Timestamp creation_time = + Timestamp::Millis(env_.clock().TimeInMilliseconds()); transport_feedback_adapter_.AddPacket( - packet_info, transport_overhead_bytes_per_packet_, creation_time); + packet, pacing_info, transport_overhead_bytes_per_packet_, creation_time); +} + +void RtpTransportControllerSend:: + EnableCongestionControlFeedbackAccordingToRfc8888() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + transport_maybe_support_ecn_ = true; + sending_packets_as_ect1_ = true; + packet_router_.ConfigureForRfc8888Feedback(sending_packets_as_ect1_); } void RtpTransportControllerSend::OnTransportFeedback( Timestamp receive_time, const rtcp::TransportFeedback& feedback) { RTC_DCHECK_RUN_ON(&sequence_checker_); + ++transport_cc_feedback_count_; feedback_demuxer_.OnTransportFeedback(feedback); - absl::optional feedback_msg = + std::optional feedback_msg = transport_feedback_adapter_.ProcessTransportFeedback(feedback, receive_time); if (feedback_msg) { - if (controller_) - PostUpdates(controller_->OnTransportPacketsFeedback(*feedback_msg)); + HandleTransportPacketsFeedback(*feedback_msg); + } +} - // Only update outstanding data if any packet is first time acked. - UpdateCongestedState(); +void RtpTransportControllerSend::OnCongestionControlFeedback( + Timestamp receive_time, + const rtcp::CongestionControlFeedback& feedback) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + ++feedback_count_; + // TODO: bugs.webrtc.org/42225697 - update feedback demuxer for RFC 8888. + // Suggest feedback_demuxer_.OnTransportFeedback use TransportPacketFeedback + // instead. See usage in OnTransportFeedback. + std::optional feedback_msg = + transport_feedback_adapter_.ProcessCongestionControlFeedback( + feedback, receive_time); + if (feedback_msg) { + HandleTransportPacketsFeedback(*feedback_msg); } } +void RtpTransportControllerSend::HandleTransportPacketsFeedback( + const TransportPacketsFeedback& feedback) { + if (sending_packets_as_ect1_) { + // If transport does not support ECN, packets should not be sent as ECT(1). + // TODO: bugs.webrtc.org/42225697 - adapt to ECN feedback and continue to + // send packets as ECT(1) if transport is ECN capable. + sending_packets_as_ect1_ = false; + RTC_LOG(LS_INFO) << " Transport is " + << (feedback.transport_supports_ecn ? "" : " not ") + << " ECN capable. Stop sending ECT(1)."; + packet_router_.ConfigureForRfc8888Feedback(sending_packets_as_ect1_); + } + if (controller_) + PostUpdates(controller_->OnTransportPacketsFeedback(feedback)); + + // Only update outstanding data if any packet is first time acked. + UpdateCongestedState(); +} + void RtpTransportControllerSend::OnRemoteNetworkEstimate( NetworkStateEstimate estimate) { RTC_DCHECK_RUN_ON(&sequence_checker_); - if (event_log_) { - event_log_->Log(std::make_unique( - estimate.link_capacity_lower, estimate.link_capacity_upper)); - } - estimate.update_time = Timestamp::Millis(clock_->TimeInMilliseconds()); + estimate.update_time = Timestamp::Millis(env_.clock().TimeInMilliseconds()); if (controller_) PostUpdates(controller_->OnNetworkStateEstimate(estimate)); } @@ -576,7 +713,7 @@ void RtpTransportControllerSend::MaybeCreateControllers() { control_handler_ = std::make_unique(); initial_config_.constraints.at_time = - Timestamp::Millis(clock_->TimeInMilliseconds()); + Timestamp::Millis(env_.clock().TimeInMilliseconds()); initial_config_.stream_based_config = streams_config_; // TODO(srte): Use fallback controller if no feedback is available. @@ -593,6 +730,18 @@ void RtpTransportControllerSend::MaybeCreateControllers() { StartProcessPeriodicTasks(); } +void RtpTransportControllerSend::UpdateNetworkAvailability() { + if (!controller_) { + return; + } + NetworkAvailability msg; + msg.at_time = Timestamp::Millis(env_.clock().TimeInMilliseconds()); + msg.network_available = network_available_; + control_handler_->SetNetworkAvailability(network_available_); + PostUpdates(controller_->OnNetworkAvailability(msg)); + UpdateControlState(); +} + void RtpTransportControllerSend::UpdateInitialConstraints( TargetRateConstraints new_contraints) { if (!new_contraints.starting_rate) @@ -627,14 +776,15 @@ void RtpTransportControllerSend::StartProcessPeriodicTasks() { void RtpTransportControllerSend::UpdateControllerWithTimeInterval() { RTC_DCHECK(controller_); ProcessInterval msg; - msg.at_time = Timestamp::Millis(clock_->TimeInMilliseconds()); + msg.at_time = Timestamp::Millis(env_.clock().TimeInMilliseconds()); if (add_pacing_to_cwin_) msg.pacer_queue = pacer_.QueueSizeData(); PostUpdates(controller_->OnProcessInterval(msg)); } void RtpTransportControllerSend::UpdateStreamsConfig() { - streams_config_.at_time = Timestamp::Millis(clock_->TimeInMilliseconds()); + streams_config_.at_time = + Timestamp::Millis(env_.clock().TimeInMilliseconds()); if (controller_) PostUpdates(controller_->OnStreamsConfig(streams_config_)); } @@ -659,7 +809,7 @@ void RtpTransportControllerSend::PostUpdates(NetworkControlUpdate update) { void RtpTransportControllerSend::OnReport( Timestamp receive_time, - rtc::ArrayView report_blocks) { + ArrayView report_blocks) { RTC_DCHECK_RUN_ON(&sequence_checker_); if (report_blocks.empty()) return; diff --git a/call/rtp_transport_controller_send.h b/call/rtp_transport_controller_send.h index 1aace1ce65..980856ea3d 100644 --- a/call/rtp_transport_controller_send.h +++ b/call/rtp_transport_controller_send.h @@ -11,20 +11,33 @@ #ifndef CALL_RTP_TRANSPORT_CONTROLLER_SEND_H_ #define CALL_RTP_TRANSPORT_CONTROLLER_SEND_H_ -#include +#include +#include #include #include +#include #include #include #include "absl/strings/string_view.h" -#include "api/network_state_predictor.h" +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/fec_controller.h" +#include "api/frame_transformer_interface.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" -#include "api/task_queue/task_queue_factory.h" +#include "api/transport/bandwidth_estimation_settings.h" +#include "api/transport/bitrate_settings.h" #include "api/transport/network_control.h" +#include "api/transport/network_types.h" #include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "call/rtp_bitrate_configurator.h" +#include "call/rtp_config.h" #include "call/rtp_transport_config.h" #include "call/rtp_transport_controller_send_interface.h" #include "call/rtp_video_sender.h" @@ -32,26 +45,25 @@ #include "modules/congestion_controller/rtp/transport_feedback_adapter.h" #include "modules/congestion_controller/rtp/transport_feedback_demuxer.h" #include "modules/pacing/packet_router.h" -#include "modules/pacing/rtp_packet_pacer.h" #include "modules/pacing/task_queue_paced_sender.h" +#include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/network_route.h" -#include "rtc_base/race_checker.h" -#include "rtc_base/task_queue.h" +#include "rtc_base/rate_limiter.h" #include "rtc_base/task_utils/repeating_task.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { -class Clock; class FrameEncryptorInterface; -class RtcEventLog; class RtpTransportControllerSend final : public RtpTransportControllerSendInterface, public NetworkLinkRtcpObserver, - public TransportFeedbackObserver, public NetworkStateEstimateObserver { public: - RtpTransportControllerSend(Clock* clock, const RtpTransportConfig& config); + explicit RtpTransportControllerSend(const RtpTransportConfig& config); ~RtpTransportControllerSend() override; RtpTransportControllerSend(const RtpTransportControllerSend&) = delete; @@ -67,21 +79,23 @@ class RtpTransportControllerSend final int rtcp_report_interval_ms, Transport* send_transport, const RtpSenderObservers& observers, - RtcEventLog* event_log, std::unique_ptr fec_controller, const RtpSenderFrameEncryptionConfig& frame_encryption_config, - rtc::scoped_refptr frame_transformer) override; + scoped_refptr frame_transformer) override; void DestroyRtpVideoSender( RtpVideoSenderInterface* rtp_video_sender) override; // Implements RtpTransportControllerSendInterface + void RegisterSendingRtpStream(RtpRtcpInterface& rtp_module) override; + void DeRegisterSendingRtpStream(RtpRtcpInterface& rtp_module) override; PacketRouter* packet_router() override; NetworkStateEstimateObserver* network_state_estimate_observer() override; - TransportFeedbackObserver* transport_feedback_observer() override; RtpPacketSender* packet_sender() override; void SetAllocatedSendBitrateLimits(BitrateAllocationLimits limits) override; + void ReconfigureBandwidthEstimation( + const BandwidthEstimationSettings& settings) override; void SetPacingFactor(float pacing_factor) override; void SetQueueTimeLimit(int limit_ms) override; @@ -89,13 +103,13 @@ class RtpTransportControllerSend final void RegisterTargetTransferRateObserver( TargetTransferRateObserver* observer) override; void OnNetworkRouteChanged(absl::string_view transport_name, - const rtc::NetworkRoute& network_route) override; + const NetworkRoute& network_route) override; void OnNetworkAvailability(bool network_available) override; NetworkLinkRtcpObserver* GetRtcpObserver() override; int64_t GetPacerQueuingDelayMs() const override; - absl::optional GetFirstPacketTime() const override; + std::optional GetFirstPacketTime() const override; void EnablePeriodicAlrProbing(bool enable) override; - void OnSentPacket(const rtc::SentPacket& sent_packet) override; + void OnSentPacket(const SentPacketInfo& sent_packet) override; void OnReceivedPacket(const ReceivedPacket& packet_msg) override; void SetSdpBitrateParameters(const BitrateConstraints& constraints) override; @@ -112,51 +126,75 @@ class RtpTransportControllerSend final void OnReceiverEstimatedMaxBitrate(Timestamp receive_time, DataRate bitrate) override; void OnReport(Timestamp receive_time, - rtc::ArrayView report_blocks) override; + ArrayView report_blocks) override; void OnRttUpdate(Timestamp receive_time, TimeDelta rtt) override; void OnTransportFeedback(Timestamp receive_time, const rtcp::TransportFeedback& feedback) override; - - // Implements TransportFeedbackObserver interface - void OnAddPacket(const RtpPacketSendInfo& packet_info) override; + void OnCongestionControlFeedback( + Timestamp receive_time, + const rtcp::CongestionControlFeedback& feedback) override; // Implements NetworkStateEstimateObserver interface void OnRemoteNetworkEstimate(NetworkStateEstimate estimate) override; + NetworkControllerInterface* GetNetworkController() override { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return controller_.get(); + } + + // Called once it's known that the remote end supports RFC 8888. + void EnableCongestionControlFeedbackAccordingToRfc8888() override; + + int ReceivedCongestionControlFeedbackCount() const override { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return feedback_count_; + } + int ReceivedTransportCcFeedbackCount() const override { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return transport_cc_feedback_count_; + } + private: void MaybeCreateControllers() RTC_RUN_ON(sequence_checker_); + void HandleTransportPacketsFeedback(const TransportPacketsFeedback& feedback) + RTC_RUN_ON(sequence_checker_); + void UpdateNetworkAvailability() RTC_RUN_ON(sequence_checker_); void UpdateInitialConstraints(TargetRateConstraints new_contraints) RTC_RUN_ON(sequence_checker_); void StartProcessPeriodicTasks() RTC_RUN_ON(sequence_checker_); void UpdateControllerWithTimeInterval() RTC_RUN_ON(sequence_checker_); - absl::optional ApplyOrLiftRelayCap(bool is_relayed); - bool IsRelevantRouteChange(const rtc::NetworkRoute& old_route, - const rtc::NetworkRoute& new_route) const; + std::optional ApplyOrLiftRelayCap(bool is_relayed); + bool IsRelevantRouteChange(const NetworkRoute& old_route, + const NetworkRoute& new_route) const; void UpdateBitrateConstraints(const BitrateConstraints& updated); void UpdateStreamsConfig() RTC_RUN_ON(sequence_checker_); void PostUpdates(NetworkControlUpdate update) RTC_RUN_ON(sequence_checker_); void UpdateControlState() RTC_RUN_ON(sequence_checker_); void UpdateCongestedState() RTC_RUN_ON(sequence_checker_); - absl::optional GetCongestedStateUpdate() const + std::optional GetCongestedStateUpdate() const RTC_RUN_ON(sequence_checker_); - void ProcessSentPacket(const rtc::SentPacket& sent_packet) + + // Called by packet router just before packet is sent to the RTP modules. + void NotifyBweOfPacedSentPacket(const RtpPacketToSend& packet, + const PacedPacketInfo& pacing_info); + void ProcessSentPacket(const SentPacketInfo& sent_packet) RTC_RUN_ON(sequence_checker_); void ProcessSentPacketUpdates(NetworkControlUpdate updates) RTC_RUN_ON(sequence_checker_); - Clock* const clock_; - RtcEventLog* const event_log_; - TaskQueueFactory* const task_queue_factory_; + const Environment env_; SequenceChecker sequence_checker_; TaskQueueBase* task_queue_; PacketRouter packet_router_; + std::vector> video_rtp_senders_ RTC_GUARDED_BY(&sequence_checker_); RtpBitrateConfigurator bitrate_configurator_; - std::map network_routes_ + std::map network_routes_ RTC_GUARDED_BY(sequence_checker_); + BandwidthEstimationSettings bwe_settings_ RTC_GUARDED_BY(sequence_checker_); bool pacer_started_ RTC_GUARDED_BY(sequence_checker_); TaskQueuePacedSender pacer_; @@ -192,6 +230,8 @@ class RtpTransportControllerSend final const bool reset_feedback_on_route_change_; const bool add_pacing_to_cwin_; + const bool reset_bwe_on_adapter_id_change_; + FieldTrialParameter relay_bandwidth_cap_; size_t transport_overhead_bytes_per_packet_ RTC_GUARDED_BY(sequence_checker_); @@ -202,13 +242,17 @@ class RtpTransportControllerSend final DataSize congestion_window_size_ RTC_GUARDED_BY(sequence_checker_); bool is_congested_ RTC_GUARDED_BY(sequence_checker_); + bool transport_maybe_support_ecn_ = + false; // True if RFC8888 has been negotiated. + bool sending_packets_as_ect1_ = false; + // Count of feedback messages received. + int feedback_count_ RTC_GUARDED_BY(sequence_checker_) = 0; + int transport_cc_feedback_count_ RTC_GUARDED_BY(sequence_checker_) = 0; // Protected by internal locks. RateLimiter retransmission_rate_limiter_; ScopedTaskSafety safety_; - - const FieldTrialsView& field_trials_; }; } // namespace webrtc diff --git a/call/rtp_transport_controller_send_factory.h b/call/rtp_transport_controller_send_factory.h index 6349302e45..2a1c2c8ae7 100644 --- a/call/rtp_transport_controller_send_factory.h +++ b/call/rtp_transport_controller_send_factory.h @@ -12,20 +12,19 @@ #define CALL_RTP_TRANSPORT_CONTROLLER_SEND_FACTORY_H_ #include -#include +#include "call/rtp_transport_config.h" #include "call/rtp_transport_controller_send.h" #include "call/rtp_transport_controller_send_factory_interface.h" +#include "call/rtp_transport_controller_send_interface.h" namespace webrtc { class RtpTransportControllerSendFactory : public RtpTransportControllerSendFactoryInterface { public: std::unique_ptr Create( - const RtpTransportConfig& config, - Clock* clock) override { - RTC_CHECK(config.trials); - return std::make_unique(clock, config); + const RtpTransportConfig& config) override { + return std::make_unique(config); } virtual ~RtpTransportControllerSendFactory() {} diff --git a/call/rtp_transport_controller_send_factory_interface.h b/call/rtp_transport_controller_send_factory_interface.h index 0f4c36c221..8683a34c9e 100644 --- a/call/rtp_transport_controller_send_factory_interface.h +++ b/call/rtp_transport_controller_send_factory_interface.h @@ -20,11 +20,10 @@ namespace webrtc { // controller. class RtpTransportControllerSendFactoryInterface { public: - virtual std::unique_ptr Create( - const RtpTransportConfig& config, - Clock* clock) = 0; + virtual ~RtpTransportControllerSendFactoryInterface() = default; - virtual ~RtpTransportControllerSendFactoryInterface() {} + virtual std::unique_ptr Create( + const RtpTransportConfig& config) = 0; }; } // namespace webrtc #endif // CALL_RTP_TRANSPORT_CONTROLLER_SEND_FACTORY_INTERFACE_H_ diff --git a/call/rtp_transport_controller_send_interface.h b/call/rtp_transport_controller_send_interface.h index 349fe68039..1c7b9aa7f7 100644 --- a/call/rtp_transport_controller_send_interface.h +++ b/call/rtp_transport_controller_send_interface.h @@ -15,38 +15,36 @@ #include #include -#include -#include +#include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/crypto/crypto_options.h" #include "api/fec_controller.h" #include "api/frame_transformer_interface.h" -#include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtp_packet_sender.h" +#include "api/scoped_refptr.h" +#include "api/transport/bandwidth_estimation_settings.h" #include "api/transport/bitrate_settings.h" +#include "api/transport/network_control.h" +#include "api/transport/network_types.h" #include "api/units/timestamp.h" #include "call/rtp_config.h" #include "common_video/frame_counts.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" -#include "modules/rtp_rtcp/include/rtp_packet_sender.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/network_route.h" -namespace rtc { -struct SentPacket; -struct NetworkRoute; -class TaskQueue; -} // namespace rtc namespace webrtc { +struct SentPacketInfo; class FrameEncryptorInterface; class TargetTransferRateObserver; class Transport; class PacketRouter; class RtpVideoSenderInterface; class RtpPacketSender; +class RtpRtcpInterface; struct RtpSenderObservers { RtcpRttStats* rtcp_rtt_stats; @@ -57,7 +55,6 @@ struct RtpSenderObservers { BitrateStatisticsObserver* bitrate_observer; FrameCountObserver* frame_count_observer; RtcpPacketTypeCounterObserver* rtcp_type_observer; - SendSideDelayObserver* send_delay_observer; SendPacketObserver* send_packet_observer; }; @@ -102,15 +99,19 @@ class RtpTransportControllerSendInterface { int rtcp_report_interval_ms, Transport* send_transport, const RtpSenderObservers& observers, - RtcEventLog* event_log, std::unique_ptr fec_controller, const RtpSenderFrameEncryptionConfig& frame_encryption_config, - rtc::scoped_refptr frame_transformer) = 0; + scoped_refptr frame_transformer) = 0; virtual void DestroyRtpVideoSender( RtpVideoSenderInterface* rtp_video_sender) = 0; + // Register a specific RTP stream as sending. This means that the pacer and + // packet router can send packets using this RTP stream. + virtual void RegisterSendingRtpStream(RtpRtcpInterface& rtp_module) = 0; + // Pacer and PacketRouter stop using this RTP stream. + virtual void DeRegisterSendingRtpStream(RtpRtcpInterface& rtp_module) = 0; + virtual NetworkStateEstimateObserver* network_state_estimate_observer() = 0; - virtual TransportFeedbackObserver* transport_feedback_observer() = 0; virtual RtpPacketSender* packet_sender() = 0; @@ -119,26 +120,28 @@ class RtpTransportControllerSendInterface { virtual void SetAllocatedSendBitrateLimits( BitrateAllocationLimits limits) = 0; + virtual void ReconfigureBandwidthEstimation( + const BandwidthEstimationSettings& settings) = 0; + virtual void SetPacingFactor(float pacing_factor) = 0; virtual void SetQueueTimeLimit(int limit_ms) = 0; virtual StreamFeedbackProvider* GetStreamFeedbackProvider() = 0; virtual void RegisterTargetTransferRateObserver( TargetTransferRateObserver* observer) = 0; - virtual void OnNetworkRouteChanged( - absl::string_view transport_name, - const rtc::NetworkRoute& network_route) = 0; + virtual void OnNetworkRouteChanged(absl::string_view transport_name, + const NetworkRoute& network_route) = 0; virtual void OnNetworkAvailability(bool network_available) = 0; virtual NetworkLinkRtcpObserver* GetRtcpObserver() = 0; virtual int64_t GetPacerQueuingDelayMs() const = 0; - virtual absl::optional GetFirstPacketTime() const = 0; + virtual std::optional GetFirstPacketTime() const = 0; virtual void EnablePeriodicAlrProbing(bool enable) = 0; // Called when a packet has been sent. // The call should arrive on the network thread, but may not in all cases // (some tests don't adhere to this). Implementations today should not block // the calling thread or make assumptions about the thread context. - virtual void OnSentPacket(const rtc::SentPacket& sent_packet) = 0; + virtual void OnSentPacket(const SentPacketInfo& sent_packet) = 0; virtual void OnReceivedPacket(const ReceivedPacket& received_packet) = 0; @@ -154,6 +157,14 @@ class RtpTransportControllerSendInterface { virtual void IncludeOverheadInPacedSender() = 0; virtual void EnsureStarted() = 0; + virtual NetworkControllerInterface* GetNetworkController() = 0; + + // Called once it's known that the remote end supports RFC 8888. + virtual void EnableCongestionControlFeedbackAccordingToRfc8888() = 0; + // Count of RFC8888 feedback reports received + virtual int ReceivedCongestionControlFeedbackCount() const = 0; + // Count of transport-cc feedback reports received + virtual int ReceivedTransportCcFeedbackCount() const = 0; }; } // namespace webrtc diff --git a/call/rtp_video_sender.cc b/call/rtp_video_sender.cc index de50f7c73e..4943443db1 100644 --- a/call/rtp_video_sender.cc +++ b/call/rtp_video_sender.cc @@ -11,27 +11,65 @@ #include "call/rtp_video_sender.h" #include +#include +#include +#include +#include #include +#include #include #include +#include #include "absl/algorithm/container.h" +#include "absl/base/nullability.h" #include "absl/strings/match.h" #include "absl/strings/string_view.h" #include "api/array_view.h" -#include "api/task_queue/task_queue_factory.h" -#include "api/transport/field_trial_based_config.h" +#include "api/call/bitrate_allocation.h" +#include "api/crypto/crypto_options.h" +#include "api/environment/environment.h" +#include "api/fec_controller.h" +#include "api/field_trials_view.h" +#include "api/frame_transformer_interface.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/frequency.h" #include "api/units/time_delta.h" +#include "api/video/encoded_image.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_layers_allocation.h" #include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "call/rtp_config.h" +#include "call/rtp_payload_params.h" #include "call/rtp_transport_controller_send_interface.h" +#include "common_video/frame_counts.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/include/module_fec_types.h" #include "modules/pacing/packet_router.h" +#include "modules/rtp_rtcp/include/flexfec_sender.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/rtp_sender.h" +#include "modules/rtp_rtcp/source/rtp_sender_video.h" +#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" +#include "modules/rtp_rtcp/source/ulpfec_generator.h" +#include "modules/rtp_rtcp/source/video_fec_generator.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/task_queue.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/trace_event.h" namespace webrtc { @@ -125,11 +163,10 @@ bool ShouldDisableRedAndUlpfec(bool flexfec_enabled, // TODO(brandtr): Update this function when we support multistream protection. std::unique_ptr MaybeCreateFecGenerator( - Clock* clock, + const Environment& env, const RtpConfig& rtp, const std::map& suspended_ssrcs, - int simulcast_index, - const FieldTrialsView& trials) { + int simulcast_index) { // If flexfec is configured that takes priority. if (rtp.flexfec.payload_type >= 0) { RTC_DCHECK_GE(rtp.flexfec.payload_type, 0); @@ -170,16 +207,16 @@ std::unique_ptr MaybeCreateFecGenerator( RTC_DCHECK_EQ(1U, rtp.flexfec.protected_media_ssrcs.size()); return std::make_unique( - rtp.flexfec.payload_type, rtp.flexfec.ssrc, + env, rtp.flexfec.payload_type, rtp.flexfec.ssrc, rtp.flexfec.protected_media_ssrcs[0], rtp.mid, rtp.extensions, - RTPSender::FecExtensionSizes(), rtp_state, clock); + RTPSender::FecExtensionSizes(), rtp_state); } else if (rtp.ulpfec.red_payload_type >= 0 && rtp.ulpfec.ulpfec_payload_type >= 0 && !ShouldDisableRedAndUlpfec(/*flexfec_enabled=*/false, rtp, - trials)) { + env.field_trials())) { // Flexfec not configured, but ulpfec is and is not disabled. - return std::make_unique( - rtp.ulpfec.red_payload_type, rtp.ulpfec.ulpfec_payload_type, clock); + return std::make_unique(env, rtp.ulpfec.red_payload_type, + rtp.ulpfec.ulpfec_payload_type); } // Not a single FEC is given. @@ -187,25 +224,20 @@ std::unique_ptr MaybeCreateFecGenerator( } std::vector CreateRtpStreamSenders( - Clock* clock, + const Environment& env, const RtpConfig& rtp_config, const RtpSenderObservers& observers, int rtcp_report_interval_ms, Transport* send_transport, RtpTransportControllerSendInterface* transport, const std::map& suspended_ssrcs, - RtcEventLog* event_log, RateLimiter* retransmission_rate_limiter, FrameEncryptorInterface* frame_encryptor, const CryptoOptions& crypto_options, - rtc::scoped_refptr frame_transformer, - const FieldTrialsView& trials, - TaskQueueFactory* task_queue_factory) { + scoped_refptr frame_transformer) { RTC_DCHECK_GT(rtp_config.ssrcs.size(), 0); - RTC_DCHECK(task_queue_factory); RtpRtcpInterface::Configuration configuration; - configuration.clock = clock; configuration.audio = false; configuration.receiver_only = false; configuration.outgoing_transport = send_transport; @@ -215,8 +247,6 @@ std::vector CreateRtpStreamSenders( configuration.network_link_rtcp_observer = transport->GetRtcpObserver(); configuration.network_state_estimate_observer = transport->network_state_estimate_observer(); - configuration.transport_feedback_callback = - transport->transport_feedback_observer(); configuration.rtt_stats = observers.rtcp_rtt_stats; configuration.rtcp_packet_type_counter_observer = observers.rtcp_type_observer; @@ -224,17 +254,16 @@ std::vector CreateRtpStreamSenders( observers.report_block_data_observer; configuration.paced_sender = transport->packet_sender(); configuration.send_bitrate_observer = observers.bitrate_observer; - configuration.send_side_delay_observer = observers.send_delay_observer; configuration.send_packet_observer = observers.send_packet_observer; - configuration.event_log = event_log; - configuration.retransmission_rate_limiter = retransmission_rate_limiter; + if (env.field_trials().IsDisabled("WebRTC-DisableRtxRateLimiter")) { + configuration.retransmission_rate_limiter = retransmission_rate_limiter; + } configuration.rtp_stats_callback = observers.rtp_stats; configuration.frame_encryptor = frame_encryptor; configuration.require_frame_encryption = crypto_options.sframe.require_frame_encryption; configuration.extmap_allow_mixed = rtp_config.extmap_allow_mixed; configuration.rtcp_report_interval_ms = rtcp_report_interval_ms; - configuration.field_trials = &trials; configuration.enable_send_packet_batching = rtp_config.enable_send_packet_batching; @@ -253,7 +282,7 @@ std::vector CreateRtpStreamSenders( configuration.local_media_ssrc = rtp_config.ssrcs[i]; std::unique_ptr fec_generator = - MaybeCreateFecGenerator(clock, rtp_config, suspended_ssrcs, i, trials); + MaybeCreateFecGenerator(env, rtp_config, suspended_ssrcs, i); configuration.fec_generator = fec_generator.get(); configuration.rtx_send_ssrc = @@ -265,20 +294,19 @@ std::vector CreateRtpStreamSenders( configuration.need_rtp_packet_infos = rtp_config.lntf.enabled; - std::unique_ptr rtp_rtcp( - ModuleRtpRtcpImpl2::Create(configuration)); + auto rtp_rtcp = std::make_unique(env, configuration); rtp_rtcp->SetSendingStatus(false); rtp_rtcp->SetSendingMediaStatus(false); rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound); // Set NACK. rtp_rtcp->SetStorePacketsStatus(true, kMinSendSidePacketHistorySize); - video_config.clock = configuration.clock; + video_config.clock = &env.clock(); video_config.rtp_sender = rtp_rtcp->RtpSender(); video_config.frame_encryptor = frame_encryptor; video_config.require_frame_encryption = crypto_options.sframe.require_frame_encryption; - video_config.field_trials = &trials; + video_config.field_trials = &env.field_trials(); video_config.enable_retransmit_all_layers = !video_config.field_trials->IsDisabled( "WebRTC-Video-EnableRetransmitAllLayers"); @@ -286,8 +314,8 @@ std::vector CreateRtpStreamSenders( const bool using_flexfec = fec_generator && fec_generator->GetFecType() == VideoFecGenerator::FecType::kFlexFec; - const bool should_disable_red_and_ulpfec = - ShouldDisableRedAndUlpfec(using_flexfec, rtp_config, trials); + const bool should_disable_red_and_ulpfec = ShouldDisableRedAndUlpfec( + using_flexfec, rtp_config, env.field_trials()); if (!should_disable_red_and_ulpfec && rtp_config.ulpfec.red_payload_type != -1) { video_config.red_payload_type = rtp_config.ulpfec.red_payload_type; @@ -297,7 +325,7 @@ std::vector CreateRtpStreamSenders( video_config.fec_overhead_bytes = fec_generator->MaxPacketOverhead(); } video_config.frame_transformer = frame_transformer; - video_config.task_queue_factory = task_queue_factory; + video_config.task_queue_factory = &env.task_queue_factory(); auto sender_video = std::make_unique(video_config); rtp_streams.emplace_back(std::move(rtp_rtcp), std::move(sender_video), std::move(fec_generator)); @@ -305,11 +333,13 @@ std::vector CreateRtpStreamSenders( return rtp_streams; } -absl::optional GetVideoCodecType(const RtpConfig& config) { - if (config.raw_payload) { - return absl::nullopt; +std::optional GetVideoCodecType(const RtpConfig& config, + size_t simulcast_index) { + auto stream_config = config.GetStreamConfig(simulcast_index); + if (stream_config.raw_payload) { + return std::nullopt; } - return PayloadStringToCodecType(config.payload_name); + return PayloadStringToCodecType(stream_config.payload_name); } bool TransportSeqNumExtensionConfigured(const RtpConfig& config) { return absl::c_any_of(config.extensions, [](const RtpExtension& ext) { @@ -361,7 +391,8 @@ bool IsFirstFrameOfACodedVideoSequence( } // namespace RtpVideoSender::RtpVideoSender( - Clock* clock, + const Environment& env, + TaskQueueBase* absl_nonnull transport_queue, const std::map& suspended_ssrcs, const std::map& states, const RtpConfig& rtp_config, @@ -369,43 +400,42 @@ RtpVideoSender::RtpVideoSender( Transport* send_transport, const RtpSenderObservers& observers, RtpTransportControllerSendInterface* transport, - RtcEventLog* event_log, RateLimiter* retransmission_limiter, std::unique_ptr fec_controller, FrameEncryptorInterface* frame_encryptor, const CryptoOptions& crypto_options, - rtc::scoped_refptr frame_transformer, - const FieldTrialsView& field_trials, - TaskQueueFactory* task_queue_factory) - : field_trials_(field_trials), + scoped_refptr frame_transformer) + : env_(env), use_frame_rate_for_overhead_(absl::StartsWith( - field_trials_.Lookup("WebRTC-Video-UseFrameRateForOverhead"), + env.field_trials().Lookup("WebRTC-Video-UseFrameRateForOverhead"), "Enabled")), has_packet_feedback_(TransportSeqNumExtensionConfigured(rtp_config)), + transport_queue_(*transport_queue), active_(false), fec_controller_(std::move(fec_controller)), fec_allowed_(true), - rtp_streams_(CreateRtpStreamSenders(clock, + rtp_streams_(CreateRtpStreamSenders(env, rtp_config, observers, rtcp_report_interval_ms, send_transport, transport, suspended_ssrcs, - event_log, retransmission_limiter, frame_encryptor, crypto_options, - std::move(frame_transformer), - field_trials_, - task_queue_factory)), + std::move(frame_transformer))), rtp_config_(rtp_config), - codec_type_(GetVideoCodecType(rtp_config)), transport_(transport), + independent_frame_ids_( + env.field_trials().IsDisabled("WebRTC-GenericDescriptorAuth")), transport_overhead_bytes_per_packet_(0), encoder_target_rate_bps_(0), frame_counts_(rtp_config.ssrcs.size()), - frame_count_observer_(observers.frame_count_observer) { + frame_count_observer_(observers.frame_count_observer), + safety_(PendingTaskSafetyFlag::CreateAttachedToTaskQueue( + /*alive=*/true, + transport_queue)) { transport_checker_.Detach(); RTC_DCHECK_EQ(rtp_config_.ssrcs.size(), rtp_streams_.size()); if (has_packet_feedback_) @@ -419,7 +449,7 @@ RtpVideoSender::RtpVideoSender( state = &it->second; shared_frame_id_ = std::max(shared_frame_id_, state->shared_frame_id); } - params_.push_back(RtpPayloadParams(ssrc, state, field_trials_)); + params_.push_back(RtpPayloadParams(ssrc, state, env.field_trials())); } // RTP/RTCP initialization. @@ -442,12 +472,14 @@ RtpVideoSender::RtpVideoSender( } bool fec_enabled = false; - for (const RtpStreamSender& stream : rtp_streams_) { + for (size_t i = 0; i < rtp_streams_.size(); i++) { + const RtpStreamSender& stream = rtp_streams_[i]; // Simulcast has one module for each layer. Set the CNAME on all modules. stream.rtp_rtcp->SetCNAME(rtp_config_.c_name.c_str()); stream.rtp_rtcp->SetMaxRtpPacketSize(rtp_config_.max_packet_size); - stream.rtp_rtcp->RegisterSendPayloadFrequency(rtp_config_.payload_type, - kVideoPayloadTypeFrequency); + stream.rtp_rtcp->RegisterSendPayloadFrequency( + rtp_config_.GetStreamConfig(i).payload_type, + kVideoPayloadTypeFrequency); if (stream.fec_generator != nullptr) { fec_enabled = true; } @@ -469,86 +501,48 @@ RtpVideoSender::RtpVideoSender( } RtpVideoSender::~RtpVideoSender() { - // TODO(bugs.webrtc.org/13517): Remove once RtpVideoSender gets deleted on the - // transport task queue. - transport_checker_.Detach(); - - SetActiveModulesLocked( - std::vector(rtp_streams_.size(), /*active=*/false)); - - RTC_DCHECK(!registered_for_feedback_); + RTC_DCHECK_RUN_ON(&transport_checker_); + SetActiveModulesLocked(/*sending=*/false); } -void RtpVideoSender::Stop() { +void RtpVideoSender::SetSending(bool enabled) { RTC_DCHECK_RUN_ON(&transport_checker_); MutexLock lock(&mutex_); - if (!active_) + if (enabled == active_) { return; - - const std::vector active_modules(rtp_streams_.size(), false); - SetActiveModulesLocked(active_modules); + } + SetActiveModulesLocked(/*sending=*/enabled); } -void RtpVideoSender::SetActiveModules(const std::vector& active_modules) { +void RtpVideoSender::SetActiveModulesLocked(bool sending) { RTC_DCHECK_RUN_ON(&transport_checker_); - MutexLock lock(&mutex_); - return SetActiveModulesLocked(active_modules); + if (active_ == sending) { + return; + } + active_ = sending; + for (const RtpStreamSender& stream : rtp_streams_) { + SetModuleIsActive(sending, *stream.rtp_rtcp); + } + auto* feedback_provider = transport_->GetStreamFeedbackProvider(); + if (!sending) { + feedback_provider->DeRegisterStreamFeedbackObserver(this); + } else { + feedback_provider->RegisterStreamFeedbackObserver(rtp_config_.ssrcs, this); + } } -void RtpVideoSender::SetActiveModulesLocked( - const std::vector& active_modules) { - RTC_DCHECK_RUN_ON(&transport_checker_); - RTC_CHECK_EQ(rtp_streams_.size(), active_modules.size()); - active_ = false; - for (size_t i = 0; i < active_modules.size(); ++i) { - if (active_modules[i]) { - active_ = true; - } - - RtpRtcpInterface& rtp_module = *rtp_streams_[i].rtp_rtcp; - const bool was_active = rtp_module.Sending(); - const bool should_be_active = active_modules[i]; - - // Sends a kRtcpByeCode when going from true to false. - rtp_module.SetSendingStatus(active_modules[i]); - - if (was_active && !should_be_active) { - // Disabling media, remove from packet router map to reduce size and - // prevent any stray packets in the pacer from asynchronously arriving - // to a disabled module. - transport_->packet_router()->RemoveSendRtpModule(&rtp_module); - - // Clear the pacer queue of any packets pertaining to this module. - transport_->packet_sender()->RemovePacketsForSsrc(rtp_module.SSRC()); - if (rtp_module.RtxSsrc().has_value()) { - transport_->packet_sender()->RemovePacketsForSsrc( - *rtp_module.RtxSsrc()); - } - if (rtp_module.FlexfecSsrc().has_value()) { - transport_->packet_sender()->RemovePacketsForSsrc( - *rtp_module.FlexfecSsrc()); - } - } - - // If set to false this module won't send media. - rtp_module.SetSendingMediaStatus(active_modules[i]); - - if (!was_active && should_be_active) { - // Turning on media, register with packet router. - transport_->packet_router()->AddSendRtpModule(&rtp_module, - /*remb_candidate=*/true); - } +void RtpVideoSender::SetModuleIsActive(bool sending, + RtpRtcpInterface& rtp_module) { + if (rtp_module.SendingMedia() == sending) { + return; } - if (!active_) { - auto* feedback_provider = transport_->GetStreamFeedbackProvider(); - if (registered_for_feedback_) { - feedback_provider->DeRegisterStreamFeedbackObserver(this); - registered_for_feedback_ = false; - } - } else if (!registered_for_feedback_) { - auto* feedback_provider = transport_->GetStreamFeedbackProvider(); - feedback_provider->RegisterStreamFeedbackObserver(rtp_config_.ssrcs, this); - registered_for_feedback_ = true; + + rtp_module.SetSendingStatus(sending); + rtp_module.SetSendingMediaStatus(sending); + if (sending) { + transport_->RegisterSendingRtpStream(rtp_module); + } else { + transport_->DeRegisterSendingRtpStream(rtp_module); } } @@ -577,7 +571,7 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( RTC_DCHECK_LT(simulcast_index, rtp_streams_.size()); uint32_t rtp_timestamp = - encoded_image.Timestamp() + + encoded_image.RtpTimestamp() + rtp_streams_[simulcast_index].rtp_rtcp->StartTimestamp(); // RTCPSender has it's own copy of the timestamp offset, added in @@ -585,8 +579,8 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( // TODO(nisse): Delete RTCPSender:timestamp_offset_, and see if we can confine // knowledge of the offset to a single place. if (!rtp_streams_[simulcast_index].rtp_rtcp->OnSendingRtpFrame( - encoded_image.Timestamp(), encoded_image.capture_time_ms_, - rtp_config_.payload_type, + encoded_image.RtpTimestamp(), encoded_image.capture_time_ms_, + rtp_config_.GetStreamConfig(simulcast_index).payload_type, encoded_image._frameType == VideoFrameType::kVideoFrameKey)) { // The payload router could be active but this module isn't sending. return Result(Result::ERROR_SEND_FAILED); @@ -610,7 +604,7 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( RTPSenderVideo& sender_video = *rtp_streams_[simulcast_index].sender_video; if (codec_specific_info && codec_specific_info->template_structure) { sender_video.SetVideoStructure(&*codec_specific_info->template_structure); - } else if (absl::optional structure = + } else if (std::optional structure = params_[simulcast_index].GenericStructure( codec_specific_info)) { sender_video.SetVideoStructure(&*structure); @@ -619,11 +613,18 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( } } + std::optional frame_id; + if (!independent_frame_ids_) { + frame_id = shared_frame_id_; + } + bool send_result = rtp_streams_[simulcast_index].sender_video->SendEncodedImage( - rtp_config_.payload_type, codec_type_, rtp_timestamp, encoded_image, + rtp_config_.GetStreamConfig(simulcast_index).payload_type, + GetVideoCodecType(rtp_config_, simulcast_index), rtp_timestamp, + encoded_image, params_[simulcast_index].GetRtpVideoHeader( - encoded_image, codec_specific_info, shared_frame_id_), + encoded_image, codec_specific_info, frame_id), expected_retransmission_time); if (frame_count_observer_) { FrameCounts& counts = frame_counts_[simulcast_index]; @@ -652,7 +653,7 @@ void RtpVideoSender::OnBitrateAllocationUpdated( // If spatial scalability is enabled, it is covered by a single stream. rtp_streams_[0].rtp_rtcp->SetVideoBitrateAllocation(bitrate); } else { - std::vector> layer_bitrates = + std::vector> layer_bitrates = bitrate.GetSimulcastAllocations(); // Simulcast is in use, split the VideoBitrateAllocation into one struct // per rtp stream, moving over the temporal layer allocation. @@ -671,6 +672,7 @@ void RtpVideoSender::OnBitrateAllocationUpdated( } } } + void RtpVideoSender::OnVideoLayersAllocationUpdated( const VideoLayersAllocation& allocation) { MutexLock lock(&mutex_); @@ -680,15 +682,28 @@ void RtpVideoSender::OnVideoLayersAllocationUpdated( stream_allocation.rtp_stream_index = i; rtp_streams_[i].sender_video->SetVideoLayersAllocation( std::move(stream_allocation)); - // Only send video frames on the rtp module if the encoder is configured - // to send. This is to prevent stray frames to be sent after an encoder - // has been reconfigured. - rtp_streams_[i].rtp_rtcp->SetSendingMediaStatus( - absl::c_any_of(allocation.active_spatial_layers, - [&i](const VideoLayersAllocation::SpatialLayer layer) { - return layer.rtp_stream_index == static_cast(i); - })); } + + // Only send video frames on the rtp module if the encoder is configured + // to send. This is to prevent stray frames to be sent after an encoder + // has been reconfigured. + // Reconfiguration of the RtpRtcp modules must happen on the transport queue + // to avoid races with batch sending of packets. + std::vector sending(rtp_streams_.size(), false); + for (const VideoLayersAllocation::SpatialLayer& layer : + allocation.active_spatial_layers) { + if (layer.rtp_stream_index < static_cast(sending.size())) { + sending[layer.rtp_stream_index] = true; + } + } + transport_queue_.PostTask( + SafeTask(safety_.flag(), [this, sending = std::move(sending)] { + RTC_DCHECK_RUN_ON(&transport_checker_); + RTC_CHECK_EQ(sending.size(), rtp_streams_.size()); + for (size_t i = 0; i < sending.size(); ++i) { + SetModuleIsActive(sending[i], *rtp_streams_[i].rtp_rtcp); + } + })); } } @@ -711,7 +726,7 @@ DataRate RtpVideoSender::GetPostEncodeOverhead() const { void RtpVideoSender::DeliverRtcp(const uint8_t* packet, size_t length) { // Runs on a network thread. for (const RtpStreamSender& stream : rtp_streams_) - stream.rtp_rtcp->IncomingRtcpPacket(rtc::MakeArrayView(packet, length)); + stream.rtp_rtcp->IncomingRtcpPacket(MakeArrayView(packet, length)); } void RtpVideoSender::ConfigureSsrcs( @@ -745,9 +760,12 @@ void RtpVideoSender::ConfigureSsrcs( // Configure RTX payload types. RTC_DCHECK_GE(rtp_config_.rtx.payload_type, 0); - for (const RtpStreamSender& stream : rtp_streams_) { - stream.rtp_rtcp->SetRtxSendPayloadType(rtp_config_.rtx.payload_type, - rtp_config_.payload_type); + for (size_t i = 0; i < rtp_streams_.size(); ++i) { + const RtpStreamSender& stream = rtp_streams_[i]; + RtpStreamConfig stream_config = rtp_config_.GetStreamConfig(i); + RTC_DCHECK(stream_config.rtx); + stream.rtp_rtcp->SetRtxSendPayloadType(stream_config.rtx->payload_type, + stream_config.payload_type); stream.rtp_rtcp->SetRtxSendStatus(kRtxRetransmitted | kRtxRedundantPayloads); } @@ -779,11 +797,11 @@ std::map RtpVideoSender::GetRtpStates() const { // Only happens during shutdown, when RTP module is already inactive, // so OK to call fec generator here. if (rtp_streams_[i].fec_generator) { - absl::optional fec_state = + std::optional fec_state = rtp_streams_[i].fec_generator->GetRtpState(); if (fec_state) { - uint32_t ssrc = rtp_config_.flexfec.ssrc; - rtp_states[ssrc] = *fec_state; + uint32_t fec_ssrc = rtp_config_.flexfec.ssrc; + rtp_states[fec_ssrc] = *fec_state; } } } @@ -846,8 +864,8 @@ void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, CalculateOverheadRate(update.target_bitrate, max_total_packet_size, packet_overhead, Frequency::Hertz(framerate)); // TODO(srte): We probably should not accept 0 payload bitrate here. - payload_bitrate_bps = rtc::saturated_cast(payload_bitrate_bps - - overhead_rate.bps()); + payload_bitrate_bps = + saturated_cast(payload_bitrate_bps - overhead_rate.bps()); } // Get the encoder target rate. It is the estimated network rate - @@ -855,7 +873,7 @@ void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, // TODO(srte): We should multiply with 255 here. encoder_target_rate_bps_ = fec_controller_->UpdateFecRates( payload_bitrate_bps, framerate, - rtc::saturated_cast(update.packet_loss_ratio * 256), + saturated_cast(update.packet_loss_ratio * 256), loss_mask_vector_, update.round_trip_time.ms()); if (!fec_allowed_) { encoder_target_rate_bps_ = payload_bitrate_bps; @@ -906,7 +924,7 @@ uint32_t RtpVideoSender::GetProtectionBitrateBps() const { std::vector RtpVideoSender::GetSentRtpPacketInfos( uint32_t ssrc, - rtc::ArrayView sequence_numbers) const { + ArrayView sequence_numbers) const { for (const auto& rtp_stream : rtp_streams_) { if (ssrc == rtp_stream.rtp_rtcp->SSRC()) { return rtp_stream.rtp_rtcp->GetSentRtpPacketInfos(sequence_numbers); @@ -939,7 +957,7 @@ int RtpVideoSender::ProtectionRequest(const FecProtectionParams* delta_params, void RtpVideoSender::SetRetransmissionMode(int retransmission_mode) { MutexLock lock(&mutex_); for (const RtpStreamSender& stream : rtp_streams_) { - stream.sender_video->SetRetransmissionSetting(retransmission_mode); + stream.sender_video->SetRetransmissionSetting(retransmission_mode); } } @@ -1001,7 +1019,7 @@ void RtpVideoSender::OnPacketFeedbackVector( // clean up anyway. continue; } - rtc::ArrayView rtp_sequence_numbers(kv.second); + ArrayView rtp_sequence_numbers(kv.second); it->second->OnPacketsAcknowledged(rtp_sequence_numbers); } } diff --git a/call/rtp_video_sender.h b/call/rtp_video_sender.h index 10b0d19d05..6fce841da7 100644 --- a/call/rtp_video_sender.h +++ b/call/rtp_video_sender.h @@ -11,34 +11,44 @@ #ifndef CALL_RTP_VIDEO_SENDER_H_ #define CALL_RTP_VIDEO_SENDER_H_ +#include +#include #include #include -#include #include -#include "absl/types/optional.h" +#include "absl/base/nullability.h" #include "api/array_view.h" +#include "api/call/bitrate_allocation.h" #include "api/call/transport.h" +#include "api/crypto/crypto_options.h" +#include "api/environment/environment.h" #include "api/fec_controller.h" -#include "api/fec_controller_override.h" -#include "api/field_trials_view.h" -#include "api/rtc_event_log/rtc_event_log.h" +#include "api/frame_transformer_interface.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" -#include "api/task_queue/task_queue_factory.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/frequency.h" +#include "api/video/encoded_image.h" +#include "api/video/video_layers_allocation.h" #include "api/video_codecs/video_encoder.h" #include "call/rtp_config.h" #include "call/rtp_payload_params.h" #include "call/rtp_transport_controller_send_interface.h" #include "call/rtp_video_sender_interface.h" -#include "modules/rtp_rtcp/include/flexfec_sender.h" +#include "common_video/frame_counts.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/rtp_sender.h" #include "modules/rtp_rtcp/source/rtp_sender_video.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" -#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/rtp_rtcp/source/video_fec_generator.h" #include "rtc_base/rate_limiter.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -74,7 +84,8 @@ class RtpVideoSender : public RtpVideoSenderInterface, public: // Rtp modules are assumed to be sorted in simulcast index order. RtpVideoSender( - Clock* clock, + const Environment& env, + TaskQueueBase* absl_nonnull transport_queue, const std::map& suspended_ssrcs, const std::map& states, const RtpConfig& rtp_config, @@ -82,24 +93,17 @@ class RtpVideoSender : public RtpVideoSenderInterface, Transport* send_transport, const RtpSenderObservers& observers, RtpTransportControllerSendInterface* transport, - RtcEventLog* event_log, RateLimiter* retransmission_limiter, // move inside RtpTransport std::unique_ptr fec_controller, FrameEncryptorInterface* frame_encryptor, const CryptoOptions& crypto_options, // move inside RtpTransport - rtc::scoped_refptr frame_transformer, - const FieldTrialsView& field_trials, - TaskQueueFactory* task_queue_factory); + scoped_refptr frame_transformer); ~RtpVideoSender() override; RtpVideoSender(const RtpVideoSender&) = delete; RtpVideoSender& operator=(const RtpVideoSender&) = delete; - // Sets the sending status of the rtp modules and appropriately sets the - // payload router to active if any rtp modules are active. - void SetActiveModules(const std::vector& active_modules) - RTC_LOCKS_EXCLUDED(mutex_) override; - void Stop() RTC_LOCKS_EXCLUDED(mutex_) override; + void SetSending(bool enabled) RTC_LOCKS_EXCLUDED(mutex_) override; bool IsActive() RTC_LOCKS_EXCLUDED(mutex_) override; void OnNetworkAvailability(bool network_available) @@ -150,7 +154,7 @@ class RtpVideoSender : public RtpVideoSenderInterface, std::vector GetSentRtpPacketInfos( uint32_t ssrc, - rtc::ArrayView sequence_numbers) const + ArrayView sequence_numbers) const RTC_LOCKS_EXCLUDED(mutex_) override; // From StreamFeedbackObserver. @@ -160,7 +164,7 @@ class RtpVideoSender : public RtpVideoSenderInterface, private: bool IsActiveLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void SetActiveModulesLocked(const std::vector& active_modules) + void SetActiveModulesLocked(bool sending) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void UpdateModuleSendingState() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void ConfigureProtection(); @@ -171,20 +175,22 @@ class RtpVideoSender : public RtpVideoSenderInterface, DataSize packet_size, DataSize overhead_per_packet, Frequency framerate) const; + void SetModuleIsActive(bool sending, RtpRtcpInterface& rtp_module) + RTC_RUN_ON(transport_checker_); - const FieldTrialsView& field_trials_; + const Environment env_; const bool use_frame_rate_for_overhead_; const bool has_packet_feedback_; // Semantically equivalent to checking for `transport_->GetWorkerQueue()` // but some tests need to be updated to call from the correct context. RTC_NO_UNIQUE_ADDRESS SequenceChecker transport_checker_; + TaskQueueBase& transport_queue_; // TODO(bugs.webrtc.org/13517): Remove mutex_ once RtpVideoSender runs on the // transport task queue. mutable Mutex mutex_; bool active_ RTC_GUARDED_BY(mutex_); - bool registered_for_feedback_ RTC_GUARDED_BY(transport_checker_) = false; const std::unique_ptr fec_controller_; bool fec_allowed_ RTC_GUARDED_BY(mutex_); @@ -193,7 +199,6 @@ class RtpVideoSender : public RtpVideoSenderInterface, const std::vector rtp_streams_; const RtpConfig rtp_config_; - const absl::optional codec_type_; RtpTransportControllerSendInterface* const transport_; // When using the generic descriptor we want all simulcast streams to share @@ -201,6 +206,7 @@ class RtpVideoSender : public RtpVideoSenderInterface, // rewrite the frame id), therefore `shared_frame_id` has to live in a place // where we are aware of all the different streams. int64_t shared_frame_id_ = 0; + const bool independent_frame_ids_; std::vector params_ RTC_GUARDED_BY(mutex_); size_t transport_overhead_bytes_per_packet_ RTC_GUARDED_BY(mutex_); @@ -216,6 +222,8 @@ class RtpVideoSender : public RtpVideoSenderInterface, // This map is set at construction time and never changed, but it's // non-trivial to make it properly const. std::map ssrc_to_rtp_module_; + + ScopedTaskSafety safety_; }; } // namespace webrtc diff --git a/call/rtp_video_sender_interface.h b/call/rtp_video_sender_interface.h index 3f2877155a..069a2a8b36 100644 --- a/call/rtp_video_sender_interface.h +++ b/call/rtp_video_sender_interface.h @@ -11,18 +11,19 @@ #ifndef CALL_RTP_VIDEO_SENDER_INTERFACE_H_ #define CALL_RTP_VIDEO_SENDER_INTERFACE_H_ +#include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/call/bitrate_allocation.h" #include "api/fec_controller_override.h" #include "api/video/video_layers_allocation.h" +#include "api/video_codecs/video_encoder.h" #include "call/rtp_config.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" -#include "modules/video_coding/include/video_codec_interface.h" namespace webrtc { class VideoBitrateAllocation; @@ -31,12 +32,8 @@ struct FecProtectionParams; class RtpVideoSenderInterface : public EncodedImageCallback, public FecControllerOverride { public: - // Sets the sending status of the rtp modules and appropriately sets the - // RtpVideoSender to active if any rtp modules are active. - // A module will only send packet if beeing active. - virtual void SetActiveModules(const std::vector& active_modules) = 0; - // Set the sending status of all rtp modules to inactive. - virtual void Stop() = 0; + // Sets weather or not RTP packets is allowed to be sent on this sender. + virtual void SetSending(bool enabled) = 0; virtual bool IsActive() = 0; virtual void OnNetworkAvailability(bool network_available) = 0; @@ -60,7 +57,7 @@ class RtpVideoSenderInterface : public EncodedImageCallback, size_t num_temporal_layers) = 0; virtual std::vector GetSentRtpPacketInfos( uint32_t ssrc, - rtc::ArrayView sequence_numbers) const = 0; + ArrayView sequence_numbers) const = 0; // Implements FecControllerOverride. void SetFecAllowed(bool fec_allowed) override = 0; diff --git a/call/rtp_video_sender_unittest.cc b/call/rtp_video_sender_unittest.cc index 77d87dfc97..cee6e8d9f0 100644 --- a/call/rtp_video_sender_unittest.cc +++ b/call/rtp_video_sender_unittest.cc @@ -10,40 +10,82 @@ #include "call/rtp_video_sender.h" -#include +#include +#include +#include #include -#include -#include - -#include "absl/functional/any_invocable.h" +#include +#include + +#include "api/array_view.h" +#include "api/call/bitrate_allocation.h" +#include "api/call/transport.h" +#include "api/crypto/crypto_options.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/test/mock_frame_transformer.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/transport/bitrate_settings.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/video_encoder.h" +#include "call/rtp_config.h" +#include "call/rtp_transport_config.h" #include "call/rtp_transport_controller_send.h" +#include "call/rtp_transport_controller_send_interface.h" +#include "call/video_send_stream.h" +#include "common_video/frame_counts.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/rtp_rtcp/include/report_block_data.h" +#include "modules/rtp_rtcp/include/rtcp_statistics.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtcp_packet/nack.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_packet.h" +#include "modules/rtp_rtcp/source/rtp_sender_video.h" +#include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/fec_controller_default.h" #include "modules/video_coding/include/video_codec_interface.h" +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" #include "rtc_base/rate_limiter.h" +#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" -#include "test/mock_frame_transformer.h" #include "test/mock_transport.h" #include "test/scenario/scenario.h" +#include "test/scenario/scenario_config.h" #include "test/scoped_key_value_config.h" #include "test/time_controller/simulated_time_controller.h" -#include "video/send_delay_stats.h" +#include "video/config/video_encoder_config.h" #include "video/send_statistics_proxy.h" namespace webrtc { namespace { using ::testing::_; +using ::testing::Ge; +using ::testing::IsEmpty; +using ::testing::IsNull; using ::testing::NiceMock; +using ::testing::NotNull; using ::testing::SaveArg; using ::testing::SizeIs; const int8_t kPayloadType = 96; +const int8_t kPayloadType2 = 98; const uint32_t kSsrc1 = 12345; const uint32_t kSsrc2 = 23456; const uint32_t kRtxSsrc1 = 34567; @@ -62,17 +104,14 @@ class MockRtcpIntraFrameObserver : public RtcpIntraFrameObserver { }; RtpSenderObservers CreateObservers( - RtcpRttStats* rtcp_rtt_stats, RtcpIntraFrameObserver* intra_frame_callback, ReportBlockDataObserver* report_block_data_observer, StreamDataCountersCallback* rtp_stats, BitrateStatisticsObserver* bitrate_observer, FrameCountObserver* frame_count_observer, - RtcpPacketTypeCounterObserver* rtcp_type_observer, - SendSideDelayObserver* send_delay_observer, - SendPacketObserver* send_packet_observer) { + RtcpPacketTypeCounterObserver* rtcp_type_observer) { RtpSenderObservers observers; - observers.rtcp_rtt_stats = rtcp_rtt_stats; + observers.rtcp_rtt_stats = nullptr; observers.intra_frame_callback = intra_frame_callback; observers.rtcp_loss_notification_observer = nullptr; observers.report_block_data_observer = report_block_data_observer; @@ -80,8 +119,7 @@ RtpSenderObservers CreateObservers( observers.bitrate_observer = bitrate_observer; observers.frame_count_observer = frame_count_observer; observers.rtcp_type_observer = rtcp_type_observer; - observers.send_delay_observer = send_delay_observer; - observers.send_packet_observer = send_packet_observer; + observers.send_packet_observer = nullptr; return observers; } @@ -97,7 +135,8 @@ VideoSendStream::Config CreateVideoSendStreamConfig( Transport* transport, const std::vector& ssrcs, const std::vector& rtx_ssrcs, - int payload_type) { + int payload_type, + ArrayView payload_types) { VideoSendStream::Config config(transport); config.rtp.ssrcs = ssrcs; config.rtp.rtx.ssrcs = rtx_ssrcs; @@ -109,6 +148,20 @@ VideoSendStream::Config CreateVideoSendStreamConfig( config.rtp.extensions.emplace_back(RtpDependencyDescriptorExtension::Uri(), kDependencyDescriptorExtensionId); config.rtp.extmap_allow_mixed = true; + + if (!payload_types.empty()) { + RTC_CHECK_EQ(payload_types.size(), ssrcs.size()); + for (size_t i = 0; i < ssrcs.size(); ++i) { + auto& stream_config = config.rtp.stream_configs.emplace_back(); + stream_config.ssrc = ssrcs[i]; + stream_config.payload_type = payload_types[i]; + if (i < rtx_ssrcs.size()) { + auto& rtx = stream_config.rtx.emplace(); + rtx.ssrc = rtx_ssrcs[i]; + rtx.payload_type = payload_types[i] + 1; + } + } + } return config; } @@ -120,43 +173,56 @@ class RtpVideoSenderTestFixture { int payload_type, const std::map& suspended_payload_states, FrameCountObserver* frame_count_observer, - rtc::scoped_refptr frame_transformer, + scoped_refptr frame_transformer, + const std::vector& payload_types, const FieldTrialsView* field_trials = nullptr) : time_controller_(Timestamp::Millis(1000000)), + env_(CreateEnvironment(&field_trials_, + field_trials, + time_controller_.GetClock(), + time_controller_.CreateTaskQueueFactory())), config_(CreateVideoSendStreamConfig(&transport_, ssrcs, rtx_ssrcs, - payload_type)), - send_delay_stats_(time_controller_.GetClock()), + payload_type, + payload_types)), bitrate_config_(GetBitrateConfig()), transport_controller_( - time_controller_.GetClock(), - RtpTransportConfig{ - .bitrate_config = bitrate_config_, - .event_log = &event_log_, - .task_queue_factory = time_controller_.GetTaskQueueFactory(), - .trials = field_trials ? field_trials : &field_trials_, - }), + RtpTransportConfig{.env = env_, .bitrate_config = bitrate_config_}), stats_proxy_(time_controller_.GetClock(), config_, VideoEncoderConfig::ContentType::kRealtimeVideo, - field_trials ? *field_trials : field_trials_), + env_.field_trials()), retransmission_rate_limiter_(time_controller_.GetClock(), kRetransmitWindowSizeMs) { transport_controller_.EnsureStarted(); std::map suspended_ssrcs; router_ = std::make_unique( - time_controller_.GetClock(), suspended_ssrcs, suspended_payload_states, - config_.rtp, config_.rtcp_report_interval_ms, &transport_, - CreateObservers(nullptr, &encoder_feedback_, &stats_proxy_, - &stats_proxy_, &stats_proxy_, frame_count_observer, - &stats_proxy_, &stats_proxy_, &send_delay_stats_), - &transport_controller_, &event_log_, &retransmission_rate_limiter_, - std::make_unique(time_controller_.GetClock()), - nullptr, CryptoOptions{}, frame_transformer, - field_trials ? *field_trials : field_trials_, - time_controller_.GetTaskQueueFactory()); + env_, time_controller_.GetMainThread(), suspended_ssrcs, + suspended_payload_states, config_.rtp, config_.rtcp_report_interval_ms, + &transport_, + CreateObservers(&encoder_feedback_, &stats_proxy_, &stats_proxy_, + &stats_proxy_, frame_count_observer, &stats_proxy_), + &transport_controller_, &retransmission_rate_limiter_, + std::make_unique(env_), nullptr, CryptoOptions{}, + frame_transformer); } + RtpVideoSenderTestFixture( + const std::vector& ssrcs, + const std::vector& rtx_ssrcs, + int payload_type, + const std::map& suspended_payload_states, + FrameCountObserver* frame_count_observer, + scoped_refptr frame_transformer, + const FieldTrialsView* field_trials = nullptr) + : RtpVideoSenderTestFixture(ssrcs, + rtx_ssrcs, + payload_type, + suspended_payload_states, + frame_count_observer, + frame_transformer, + /*payload_types=*/{}, + field_trials) {} RtpVideoSenderTestFixture( const std::vector& ssrcs, @@ -171,6 +237,7 @@ class RtpVideoSenderTestFixture { suspended_payload_states, frame_count_observer, /*frame_transformer=*/nullptr, + /*payload_types=*/{}, field_trials) {} RtpVideoSenderTestFixture( @@ -185,28 +252,24 @@ class RtpVideoSenderTestFixture { suspended_payload_states, /*frame_count_observer=*/nullptr, /*frame_transformer=*/nullptr, + /*payload_types=*/{}, field_trials) {} - ~RtpVideoSenderTestFixture() { Stop(); } + ~RtpVideoSenderTestFixture() { SetSending(false); } RtpVideoSender* router() { return router_.get(); } MockTransport& transport() { return transport_; } void AdvanceTime(TimeDelta delta) { time_controller_.AdvanceTime(delta); } - void Stop() { router_->Stop(); } - - void SetActiveModules(const std::vector& active_modules) { - router_->SetActiveModules(active_modules); - } + void SetSending(bool sending) { router_->SetSending(sending); } private: test::ScopedKeyValueConfig field_trials_; NiceMock transport_; NiceMock encoder_feedback_; GlobalSimulatedTimeController time_controller_; - RtcEventLogNull event_log_; + Environment env_; VideoSendStream::Config config_; - SendDelayStats send_delay_stats_; BitrateConstraints bitrate_config_; RtpTransportControllerSend transport_controller_; SendStatisticsProxy stats_proxy_; @@ -226,7 +289,7 @@ BitrateAllocationUpdate CreateBitrateAllocationUpdate(int target_bitrate_bps) { TEST(RtpVideoSenderTest, SendOnOneModule) { constexpr uint8_t kPayload = 'a'; EncodedImage encoded_image; - encoded_image.SetTimestamp(1); + encoded_image.SetRtpTimestamp(1); encoded_image.capture_time_ms_ = 2; encoded_image._frameType = VideoFrameType::kVideoFrameKey; encoded_image.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1)); @@ -235,23 +298,23 @@ TEST(RtpVideoSenderTest, SendOnOneModule) { EXPECT_NE(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image, nullptr).error); - test.SetActiveModules({true}); + test.SetSending(true); EXPECT_EQ(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image, nullptr).error); - test.SetActiveModules({false}); + test.SetSending(false); EXPECT_NE(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image, nullptr).error); - test.SetActiveModules({true}); + test.SetSending(true); EXPECT_EQ(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image, nullptr).error); } -TEST(RtpVideoSenderTest, SendSimulcastSetActive) { +TEST(RtpVideoSenderTest, OnEncodedImageReturnOkWhenSendingTrue) { constexpr uint8_t kPayload = 'a'; EncodedImage encoded_image_1; - encoded_image_1.SetTimestamp(1); + encoded_image_1.SetRtpTimestamp(1); encoded_image_1.capture_time_ms_ = 2; encoded_image_1._frameType = VideoFrameType::kVideoFrameKey; encoded_image_1.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1)); @@ -262,7 +325,7 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActive) { CodecSpecificInfo codec_info; codec_info.codecType = kVideoCodecVP8; - test.SetActiveModules({true, true}); + test.SetSending(true); EXPECT_EQ(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); @@ -270,23 +333,12 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActive) { encoded_image_2.SetSimulcastIndex(1); EXPECT_EQ(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image_2, &codec_info).error); - - // Inactive. - test.Stop(); - EXPECT_NE(EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); - EXPECT_NE(EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage(encoded_image_2, &codec_info).error); } -// Tests how setting individual rtp modules to active affects the overall -// behavior of the payload router. First sets one module to active and checks -// that outgoing data can be sent on this module, and checks that no data can -// be sent if both modules are inactive. -TEST(RtpVideoSenderTest, SendSimulcastSetActiveModules) { +TEST(RtpVideoSenderTest, OnEncodedImageReturnErrorCodeWhenSendingFalse) { constexpr uint8_t kPayload = 'a'; EncodedImage encoded_image_1; - encoded_image_1.SetTimestamp(1); + encoded_image_1.SetRtpTimestamp(1); encoded_image_1.capture_time_ms_ = 2; encoded_image_1._frameType = VideoFrameType::kVideoFrameKey; encoded_image_1.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1)); @@ -299,30 +351,22 @@ TEST(RtpVideoSenderTest, SendSimulcastSetActiveModules) { CodecSpecificInfo codec_info; codec_info.codecType = kVideoCodecVP8; - // Only setting one stream to active will still set the payload router to - // active and allow sending data on the active stream. - std::vector active_modules({true, false}); - test.SetActiveModules(active_modules); - EXPECT_EQ(EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); - - // Setting both streams to inactive will turn the payload router to + // Setting rtp streams to inactive will turn the payload router to // inactive. - active_modules = {false, false}; - test.SetActiveModules(active_modules); + test.SetSending(false); // An incoming encoded image will not ask the module to send outgoing data // because the payload router is inactive. EXPECT_NE(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); EXPECT_NE(EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); + test.router()->OnEncodedImage(encoded_image_2, &codec_info).error); } TEST(RtpVideoSenderTest, DiscardsHigherSimulcastFramesAfterLayerDisabledInVideoLayersAllocation) { constexpr uint8_t kPayload = 'a'; EncodedImage encoded_image_1; - encoded_image_1.SetTimestamp(1); + encoded_image_1.SetRtpTimestamp(1); encoded_image_1.capture_time_ms_ = 2; encoded_image_1._frameType = VideoFrameType::kVideoFrameKey; encoded_image_1.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1)); @@ -332,7 +376,7 @@ TEST(RtpVideoSenderTest, codec_info.codecType = kVideoCodecVP8; RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, {}); - test.SetActiveModules({true, true}); + test.SetSending(true); // A layer is sent on both rtp streams. test.router()->OnVideoLayersAllocationUpdated( {.active_spatial_layers = {{.rtp_stream_index = 0}, @@ -346,6 +390,7 @@ TEST(RtpVideoSenderTest, // Only rtp stream index 0 is configured to send a stream. test.router()->OnVideoLayersAllocationUpdated( {.active_spatial_layers = {{.rtp_stream_index = 0}}}); + test.AdvanceTime(TimeDelta::Millis(33)); EXPECT_EQ(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image_1, &codec_info).error); EXPECT_NE(EncodedImageCallback::Result::OK, @@ -355,7 +400,7 @@ TEST(RtpVideoSenderTest, TEST(RtpVideoSenderTest, CreateWithNoPreviousStates) { RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, {}); - test.SetActiveModules({true, true}); + test.SetSending(true); std::map initial_states = test.router()->GetRtpPayloadStates(); @@ -380,7 +425,7 @@ TEST(RtpVideoSenderTest, CreateWithPreviousStates) { RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, states); - test.SetActiveModules({true, true}); + test.SetSending(true); std::map initial_states = test.router()->GetRtpPayloadStates(); @@ -407,7 +452,7 @@ TEST(RtpVideoSenderTest, FrameCountCallbacks) { constexpr uint8_t kPayload = 'a'; EncodedImage encoded_image; - encoded_image.SetTimestamp(1); + encoded_image.SetRtpTimestamp(1); encoded_image.capture_time_ms_ = 2; encoded_image._frameType = VideoFrameType::kVideoFrameKey; encoded_image.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1)); @@ -420,7 +465,7 @@ TEST(RtpVideoSenderTest, FrameCountCallbacks) { test.router()->OnEncodedImage(encoded_image, nullptr).error); ::testing::Mock::VerifyAndClearExpectations(&callback); - test.SetActiveModules({true}); + test.SetSending(true); FrameCounts frame_counts; EXPECT_CALL(callback, FrameCountUpdated(_, kSsrc1)) @@ -449,11 +494,11 @@ TEST(RtpVideoSenderTest, FrameCountCallbacks) { TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) { RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, {}); - test.SetActiveModules({true, true}); + test.SetSending(true); constexpr uint8_t kPayload = 'a'; EncodedImage encoded_image; - encoded_image.SetTimestamp(1); + encoded_image.SetRtpTimestamp(1); encoded_image.capture_time_ms_ = 2; encoded_image._frameType = VideoFrameType::kVideoFrameKey; encoded_image.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1)); @@ -463,18 +508,18 @@ TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) { std::vector transport_sequence_numbers; EXPECT_CALL(test.transport(), SendRtp) .Times(2) - .WillRepeatedly([&rtp_sequence_numbers, &transport_sequence_numbers]( - rtc::ArrayView packet, - const PacketOptions& options) { - RtpPacket rtp_packet; - EXPECT_TRUE(rtp_packet.Parse(packet)); - rtp_sequence_numbers.push_back(rtp_packet.SequenceNumber()); - transport_sequence_numbers.push_back(options.packet_id); - return true; - }); + .WillRepeatedly( + [&rtp_sequence_numbers, &transport_sequence_numbers]( + ArrayView packet, const PacketOptions& options) { + RtpPacket rtp_packet; + EXPECT_TRUE(rtp_packet.Parse(packet)); + rtp_sequence_numbers.push_back(rtp_packet.SequenceNumber()); + transport_sequence_numbers.push_back(options.packet_id); + return true; + }); EXPECT_EQ(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image, nullptr).error); - encoded_image.SetTimestamp(2); + encoded_image.SetRtpTimestamp(2); encoded_image.capture_time_ms_ = 3; EXPECT_EQ(EncodedImageCallback::Result::OK, test.router()->OnEncodedImage(encoded_image, nullptr).error); @@ -485,19 +530,19 @@ TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) { rtcp::Nack nack; nack.SetMediaSsrc(kSsrc1); nack.SetPacketIds(rtp_sequence_numbers); - rtc::Buffer nack_buffer = nack.Build(); + Buffer nack_buffer = nack.Build(); std::vector retransmitted_rtp_sequence_numbers; EXPECT_CALL(test.transport(), SendRtp) .Times(2) .WillRepeatedly([&retransmitted_rtp_sequence_numbers]( - rtc::ArrayView packet, + ArrayView packet, const PacketOptions& options) { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); EXPECT_EQ(rtp_packet.Ssrc(), kRtxSsrc1); // Capture the retransmitted sequence number from the RTX header. - rtc::ArrayView payload = rtp_packet.payload(); + ArrayView payload = rtp_packet.payload(); retransmitted_rtp_sequence_numbers.push_back( ByteReader::ReadBigEndian(payload.data())); return true; @@ -532,13 +577,13 @@ TEST(RtpVideoSenderTest, DoesNotRetrasmitAckedPackets) { // still be retransmitted. test.AdvanceTime(TimeDelta::Millis(33)); EXPECT_CALL(test.transport(), SendRtp) - .WillOnce([&lost_packet_feedback](rtc::ArrayView packet, + .WillOnce([&lost_packet_feedback](ArrayView packet, const PacketOptions& options) { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); EXPECT_EQ(rtp_packet.Ssrc(), kRtxSsrc1); // Capture the retransmitted sequence number from the RTX header. - rtc::ArrayView payload = rtp_packet.payload(); + ArrayView payload = rtp_packet.payload(); EXPECT_EQ(lost_packet_feedback.rtp_sequence_number, ByteReader::ReadBigEndian(payload.data())); return true; @@ -614,11 +659,11 @@ TEST(RtpVideoSenderTest, RetransmitsOnTransportWideLossInfo) { TEST(RtpVideoSenderTest, EarlyRetransmits) { RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, {}); - test.SetActiveModules({true, true}); + test.SetSending(true); const uint8_t kPayload[1] = {'a'}; EncodedImage encoded_image; - encoded_image.SetTimestamp(1); + encoded_image.SetRtpTimestamp(1); encoded_image.capture_time_ms_ = 2; encoded_image._frameType = VideoFrameType::kVideoFrameKey; encoded_image.SetEncodedData( @@ -635,8 +680,7 @@ TEST(RtpVideoSenderTest, EarlyRetransmits) { EXPECT_CALL(test.transport(), SendRtp) .WillOnce( [&frame1_rtp_sequence_number, &frame1_transport_sequence_number]( - rtc::ArrayView packet, - const PacketOptions& options) { + ArrayView packet, const PacketOptions& options) { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); frame1_rtp_sequence_number = rtp_packet.SequenceNumber(); @@ -655,8 +699,7 @@ TEST(RtpVideoSenderTest, EarlyRetransmits) { EXPECT_CALL(test.transport(), SendRtp) .WillOnce( [&frame2_rtp_sequence_number, &frame2_transport_sequence_number]( - rtc::ArrayView packet, - const PacketOptions& options) { + ArrayView packet, const PacketOptions& options) { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); frame2_rtp_sequence_number = rtp_packet.SequenceNumber(); @@ -673,16 +716,15 @@ TEST(RtpVideoSenderTest, EarlyRetransmits) { // Inject a transport feedback where the packet for the first frame is lost, // expect a retransmission for it. EXPECT_CALL(test.transport(), SendRtp) - .WillOnce([&frame1_rtp_sequence_number]( - rtc::ArrayView packet, - const PacketOptions& options) { + .WillOnce([&frame1_rtp_sequence_number](ArrayView packet, + const PacketOptions& options) { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); EXPECT_EQ(rtp_packet.Ssrc(), kRtxSsrc1); // Retransmitted sequence number from the RTX header should match // the lost packet. - rtc::ArrayView payload = rtp_packet.payload(); + ArrayView payload = rtp_packet.payload(); EXPECT_EQ(ByteReader::ReadBigEndian(payload.data()), frame1_rtp_sequence_number); return true; @@ -709,23 +751,23 @@ TEST(RtpVideoSenderTest, EarlyRetransmits) { TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) { RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); - test.SetActiveModules({true}); + test.SetSending(true); RtpHeaderExtensionMap extensions; extensions.Register( kDependencyDescriptorExtensionId); std::vector sent_packets; ON_CALL(test.transport(), SendRtp) - .WillByDefault([&](rtc::ArrayView packet, - const PacketOptions& options) { - sent_packets.emplace_back(&extensions); - EXPECT_TRUE(sent_packets.back().Parse(packet)); - return true; - }); + .WillByDefault( + [&](ArrayView packet, const PacketOptions& options) { + sent_packets.emplace_back(&extensions); + EXPECT_TRUE(sent_packets.back().Parse(packet)); + return true; + }); const uint8_t kPayload[1] = {'a'}; EncodedImage encoded_image; - encoded_image.SetTimestamp(1); + encoded_image.SetRtpTimestamp(1); encoded_image.capture_time_ms_ = 2; encoded_image.SetEncodedData( EncodedImageBuffer::Create(kPayload, sizeof(kPayload))); @@ -755,7 +797,7 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) { // Send in delta frame. encoded_image._frameType = VideoFrameType::kVideoFrameDelta; - codec_specific.template_structure = absl::nullopt; + codec_specific.template_structure = std::nullopt; codec_specific.generic_frame_info = GenericFrameInfo::Builder().T(1).Dtis("D").Build(); codec_specific.generic_frame_info->encoder_buffers = {{0, true, false}}; @@ -767,6 +809,197 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) { sent_packets.back().HasExtension()); } +TEST(RtpVideoSenderTest, SimulcastIndependentFrameIds) { + test::ExplicitKeyValueConfig field_trials( + "WebRTC-GenericDescriptorAuth/Disabled/"); + const std::map kPayloadStates = { + {kSsrc1, {.frame_id = 100}}, {kSsrc2, {.frame_id = 200}}}; + RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {}, kPayloadType, + kPayloadStates, &field_trials); + test.SetSending(true); + + RtpHeaderExtensionMap extensions; + extensions.Register( + kDependencyDescriptorExtensionId); + std::vector sent_packets; + ON_CALL(test.transport(), SendRtp) + .WillByDefault( + [&](ArrayView packet, const PacketOptions& options) { + sent_packets.emplace_back(&extensions); + EXPECT_TRUE(sent_packets.back().Parse(packet)); + return true; + }); + + const uint8_t kPayload[1] = {'a'}; + EncodedImage encoded_image; + encoded_image.SetEncodedData( + EncodedImageBuffer::Create(kPayload, sizeof(kPayload))); + + CodecSpecificInfo codec_specific; + codec_specific.codecType = VideoCodecType::kVideoCodecGeneric; + codec_specific.template_structure.emplace(); + codec_specific.template_structure->num_decode_targets = 1; + codec_specific.template_structure->templates = { + FrameDependencyTemplate().T(0).Dtis("S"), + FrameDependencyTemplate().T(0).Dtis("S").FrameDiffs({1}), + }; + codec_specific.generic_frame_info = + GenericFrameInfo::Builder().T(0).Dtis("S").Build(); + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + codec_specific.generic_frame_info->encoder_buffers = {{0, false, true}}; + + encoded_image.SetSimulcastIndex(0); + EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, + EncodedImageCallback::Result::OK); + encoded_image.SetSimulcastIndex(1); + EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, + EncodedImageCallback::Result::OK); + + test.AdvanceTime(TimeDelta::Millis(33)); + ASSERT_THAT(sent_packets, SizeIs(2)); + DependencyDescriptorMandatory dd_s0; + DependencyDescriptorMandatory dd_s1; + ASSERT_TRUE( + sent_packets[0].GetExtension(&dd_s0)); + ASSERT_TRUE( + sent_packets[1].GetExtension(&dd_s1)); + EXPECT_EQ(dd_s0.frame_number(), 100); + EXPECT_EQ(dd_s1.frame_number(), 200); +} + +TEST(RtpVideoSenderTest, + SimulcastNoIndependentFrameIdsIfGenericDescriptorAuthIsEnabled) { + test::ExplicitKeyValueConfig field_trials( + "WebRTC-GenericDescriptorAuth/Enabled/"); + const std::map kPayloadStates = { + {kSsrc1, {.shared_frame_id = 1000, .frame_id = 100}}, + {kSsrc2, {.shared_frame_id = 1000, .frame_id = 200}}}; + RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {}, kPayloadType, + kPayloadStates, &field_trials); + test.SetSending(true); + + RtpHeaderExtensionMap extensions; + extensions.Register( + kDependencyDescriptorExtensionId); + std::vector sent_packets; + ON_CALL(test.transport(), SendRtp) + .WillByDefault( + [&](ArrayView packet, const PacketOptions& options) { + sent_packets.emplace_back(&extensions); + EXPECT_TRUE(sent_packets.back().Parse(packet)); + return true; + }); + + const uint8_t kPayload[1] = {'a'}; + EncodedImage encoded_image; + encoded_image.SetEncodedData( + EncodedImageBuffer::Create(kPayload, sizeof(kPayload))); + + CodecSpecificInfo codec_specific; + codec_specific.codecType = VideoCodecType::kVideoCodecGeneric; + codec_specific.template_structure.emplace(); + codec_specific.template_structure->num_decode_targets = 1; + codec_specific.template_structure->templates = { + FrameDependencyTemplate().T(0).Dtis("S"), + FrameDependencyTemplate().T(0).Dtis("S").FrameDiffs({1}), + }; + codec_specific.generic_frame_info = + GenericFrameInfo::Builder().T(0).Dtis("S").Build(); + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + codec_specific.generic_frame_info->encoder_buffers = {{0, false, true}}; + + encoded_image.SetSimulcastIndex(0); + EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, + EncodedImageCallback::Result::OK); + encoded_image.SetSimulcastIndex(1); + EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, + EncodedImageCallback::Result::OK); + + test.AdvanceTime(TimeDelta::Millis(33)); + ASSERT_THAT(sent_packets, SizeIs(2)); + DependencyDescriptorMandatory dd_s0; + DependencyDescriptorMandatory dd_s1; + ASSERT_TRUE( + sent_packets[0].GetExtension(&dd_s0)); + ASSERT_TRUE( + sent_packets[1].GetExtension(&dd_s1)); + EXPECT_EQ(dd_s0.frame_number(), 1001); + EXPECT_EQ(dd_s1.frame_number(), 1002); +} + +TEST(RtpVideoSenderTest, MixedCodecSimulcastPayloadType) { + // When multiple payload types are set, verify that the payload type switches + // corresponding to the simulcast index. + RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, + kPayloadType, {}, nullptr, nullptr, + {kPayloadType, kPayloadType2}); + test.SetSending(true); + + std::vector rtp_sequence_numbers; + std::vector sent_packets; + EXPECT_CALL(test.transport(), SendRtp) + .Times(3) + .WillRepeatedly([&](ArrayView packet, + const PacketOptions& options) -> bool { + RtpPacket& rtp_packet = sent_packets.emplace_back(); + EXPECT_TRUE(rtp_packet.Parse(packet)); + rtp_sequence_numbers.push_back(rtp_packet.SequenceNumber()); + return true; + }); + + const uint8_t kPayload[1] = {'a'}; + EncodedImage encoded_image; + encoded_image.SetEncodedData( + EncodedImageBuffer::Create(kPayload, sizeof(kPayload))); + + CodecSpecificInfo codec_specific; + codec_specific.codecType = VideoCodecType::kVideoCodecVP8; + + encoded_image.SetSimulcastIndex(0); + ASSERT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, + EncodedImageCallback::Result::OK); + ASSERT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, + EncodedImageCallback::Result::OK); + encoded_image.SetSimulcastIndex(1); + ASSERT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, + EncodedImageCallback::Result::OK); + + test.AdvanceTime(TimeDelta::Millis(33)); + ASSERT_THAT(sent_packets, SizeIs(3)); + EXPECT_EQ(sent_packets[0].PayloadType(), kPayloadType); + EXPECT_EQ(sent_packets[1].PayloadType(), kPayloadType); + EXPECT_EQ(sent_packets[2].PayloadType(), kPayloadType2); + + // Verify that NACK is sent to the RTX payload type corresponding to the + // payload type. + rtcp::Nack nack1, nack2; + nack1.SetMediaSsrc(kSsrc1); + nack2.SetMediaSsrc(kSsrc2); + nack1.SetPacketIds({rtp_sequence_numbers[0], rtp_sequence_numbers[1]}); + nack2.SetPacketIds({rtp_sequence_numbers[2]}); + Buffer nack_buffer1 = nack1.Build(); + Buffer nack_buffer2 = nack2.Build(); + + std::vector sent_rtx_packets; + EXPECT_CALL(test.transport(), SendRtp) + .Times(3) + .WillRepeatedly( + [&](ArrayView packet, const PacketOptions& options) { + RtpPacket& rtp_packet = sent_rtx_packets.emplace_back(); + EXPECT_TRUE(rtp_packet.Parse(packet)); + return true; + }); + test.router()->DeliverRtcp(nack_buffer1.data(), nack_buffer1.size()); + test.router()->DeliverRtcp(nack_buffer2.data(), nack_buffer2.size()); + + test.AdvanceTime(TimeDelta::Millis(33)); + + ASSERT_THAT(sent_rtx_packets, SizeIs(3)); + EXPECT_EQ(sent_rtx_packets[0].PayloadType(), kPayloadType + 1); + EXPECT_EQ(sent_rtx_packets[1].PayloadType(), kPayloadType + 1); + EXPECT_EQ(sent_rtx_packets[2].PayloadType(), kPayloadType2 + 1); +} + TEST(RtpVideoSenderTest, SupportsDependencyDescriptorForVp8NotProvidedByEncoder) { constexpr uint8_t kPayload[1] = {'a'}; @@ -777,11 +1010,11 @@ TEST(RtpVideoSenderTest, std::vector sent_packets; ON_CALL(test.transport(), SendRtp) .WillByDefault( - [&](rtc::ArrayView packet, const PacketOptions&) { + [&](ArrayView packet, const PacketOptions&) { EXPECT_TRUE(sent_packets.emplace_back(&extensions).Parse(packet)); return true; }); - test.SetActiveModules({true}); + test.SetSending(true); EncodedImage key_frame_image; key_frame_image._frameType = VideoFrameType::kVideoFrameKey; @@ -815,23 +1048,23 @@ TEST(RtpVideoSenderTest, TEST(RtpVideoSenderTest, SupportsDependencyDescriptorForVp9) { RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); - test.SetActiveModules({true}); + test.SetSending(true); RtpHeaderExtensionMap extensions; extensions.Register( kDependencyDescriptorExtensionId); std::vector sent_packets; ON_CALL(test.transport(), SendRtp) - .WillByDefault([&](rtc::ArrayView packet, - const PacketOptions& options) { - sent_packets.emplace_back(&extensions); - EXPECT_TRUE(sent_packets.back().Parse(packet)); - return true; - }); + .WillByDefault( + [&](ArrayView packet, const PacketOptions& options) { + sent_packets.emplace_back(&extensions); + EXPECT_TRUE(sent_packets.back().Parse(packet)); + return true; + }); const uint8_t kPayload[1] = {'a'}; EncodedImage encoded_image; - encoded_image.SetTimestamp(1); + encoded_image.SetRtpTimestamp(1); encoded_image.capture_time_ms_ = 2; encoded_image._frameType = VideoFrameType::kVideoFrameKey; encoded_image.SetEncodedData( @@ -854,7 +1087,7 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptorForVp9) { EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, EncodedImageCallback::Result::OK); // Send in 2nd spatial layer. - codec_specific.template_structure = absl::nullopt; + codec_specific.template_structure = std::nullopt; codec_specific.generic_frame_info = GenericFrameInfo::Builder().S(1).Dtis("-S").Build(); codec_specific.generic_frame_info->encoder_buffers = {{0, true, false}, @@ -871,23 +1104,23 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptorForVp9) { TEST(RtpVideoSenderTest, SupportsDependencyDescriptorForVp9NotProvidedByEncoder) { RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); - test.SetActiveModules({true}); + test.SetSending(true); RtpHeaderExtensionMap extensions; extensions.Register( kDependencyDescriptorExtensionId); std::vector sent_packets; ON_CALL(test.transport(), SendRtp) - .WillByDefault([&](rtc::ArrayView packet, - const PacketOptions& options) { - sent_packets.emplace_back(&extensions); - EXPECT_TRUE(sent_packets.back().Parse(packet)); - return true; - }); + .WillByDefault( + [&](ArrayView packet, const PacketOptions& options) { + sent_packets.emplace_back(&extensions); + EXPECT_TRUE(sent_packets.back().Parse(packet)); + return true; + }); const uint8_t kPayload[1] = {'a'}; EncodedImage encoded_image; - encoded_image.SetTimestamp(1); + encoded_image.SetRtpTimestamp(1); encoded_image.capture_time_ms_ = 2; encoded_image._frameType = VideoFrameType::kVideoFrameKey; encoded_image._encodedWidth = 320; @@ -909,7 +1142,7 @@ TEST(RtpVideoSenderTest, // Send in 2nd picture. encoded_image._frameType = VideoFrameType::kVideoFrameDelta; - encoded_image.SetTimestamp(3000); + encoded_image.SetRtpTimestamp(3000); codec_specific.codecSpecific.VP9.inter_pic_predicted = true; codec_specific.codecSpecific.VP9.num_ref_pics = 1; codec_specific.codecSpecific.VP9.p_diff[0] = 1; @@ -922,27 +1155,84 @@ TEST(RtpVideoSenderTest, EXPECT_TRUE(sent_packets[1].HasExtension()); } +TEST(RtpVideoSenderTest, + SupportsDependencyDescriptorForH264NotProvidedByEncoder) { + RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); + test.SetSending(true); + + RtpHeaderExtensionMap extensions; + extensions.Register( + kDependencyDescriptorExtensionId); + std::vector sent_packets; + EXPECT_CALL(test.transport(), SendRtp(_, _)) + .Times(2) + .WillRepeatedly([&](ArrayView packet, + const PacketOptions& options) -> bool { + sent_packets.emplace_back(&extensions); + EXPECT_TRUE(sent_packets.back().Parse(packet)); + return true; + }); + + const uint8_t kPayload[1] = {'a'}; + EncodedImage encoded_image; + encoded_image.SetRtpTimestamp(1); + encoded_image.capture_time_ms_ = 2; + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + encoded_image._encodedWidth = 320; + encoded_image._encodedHeight = 180; + encoded_image.SetEncodedData( + EncodedImageBuffer::Create(kPayload, sizeof(kPayload))); + + CodecSpecificInfo codec_specific; + codec_specific.codecType = VideoCodecType::kVideoCodecH264; + codec_specific.codecSpecific.H264.temporal_idx = kNoTemporalIdx; + + // Send two tiny images, each mapping to single RTP packet. + EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, + EncodedImageCallback::Result::OK); + + // Send in 2nd picture. + encoded_image._frameType = VideoFrameType::kVideoFrameDelta; + encoded_image.SetRtpTimestamp(3000); + EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, + EncodedImageCallback::Result::OK); + + test.AdvanceTime(TimeDelta::Millis(33)); + + ASSERT_THAT(sent_packets, SizeIs(2)); + DependencyDescriptor dd_key; + // Key frame should have attached structure. + EXPECT_TRUE(sent_packets[0].GetExtension( + nullptr, &dd_key)); + EXPECT_THAT(dd_key.attached_structure, NotNull()); + // Delta frame does not have attached structure. + DependencyDescriptor dd_delta; + EXPECT_TRUE(sent_packets[1].GetExtension( + dd_key.attached_structure.get(), &dd_delta)); + EXPECT_THAT(dd_delta.attached_structure, IsNull()); +} + TEST(RtpVideoSenderTest, GenerateDependecyDescriptorForGenericCodecs) { test::ScopedKeyValueConfig field_trials( "WebRTC-GenericCodecDependencyDescriptor/Enabled/"); RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}, &field_trials); - test.SetActiveModules({true}); + test.SetSending(true); RtpHeaderExtensionMap extensions; extensions.Register( kDependencyDescriptorExtensionId); std::vector sent_packets; ON_CALL(test.transport(), SendRtp) - .WillByDefault([&](rtc::ArrayView packet, - const PacketOptions& options) { - sent_packets.emplace_back(&extensions); - EXPECT_TRUE(sent_packets.back().Parse(packet)); - return true; - }); + .WillByDefault( + [&](ArrayView packet, const PacketOptions& options) { + sent_packets.emplace_back(&extensions); + EXPECT_TRUE(sent_packets.back().Parse(packet)); + return true; + }); const uint8_t kPayload[1] = {'a'}; EncodedImage encoded_image; - encoded_image.SetTimestamp(1); + encoded_image.SetRtpTimestamp(1); encoded_image.capture_time_ms_ = 2; encoded_image._frameType = VideoFrameType::kVideoFrameKey; encoded_image._encodedWidth = 320; @@ -960,7 +1250,7 @@ TEST(RtpVideoSenderTest, GenerateDependecyDescriptorForGenericCodecs) { // Send in 2nd picture. encoded_image._frameType = VideoFrameType::kVideoFrameDelta; - encoded_image.SetTimestamp(3000); + encoded_image.SetRtpTimestamp(3000); EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, EncodedImageCallback::Result::OK); @@ -972,23 +1262,23 @@ TEST(RtpVideoSenderTest, GenerateDependecyDescriptorForGenericCodecs) { TEST(RtpVideoSenderTest, SupportsStoppingUsingDependencyDescriptor) { RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); - test.SetActiveModules({true}); + test.SetSending(true); RtpHeaderExtensionMap extensions; extensions.Register( kDependencyDescriptorExtensionId); std::vector sent_packets; ON_CALL(test.transport(), SendRtp) - .WillByDefault([&](rtc::ArrayView packet, - const PacketOptions& options) { - sent_packets.emplace_back(&extensions); - EXPECT_TRUE(sent_packets.back().Parse(packet)); - return true; - }); + .WillByDefault( + [&](ArrayView packet, const PacketOptions& options) { + sent_packets.emplace_back(&extensions); + EXPECT_TRUE(sent_packets.back().Parse(packet)); + return true; + }); const uint8_t kPayload[1] = {'a'}; EncodedImage encoded_image; - encoded_image.SetTimestamp(1); + encoded_image.SetRtpTimestamp(1); encoded_image.capture_time_ms_ = 2; encoded_image.SetEncodedData( EncodedImageBuffer::Create(kPayload, sizeof(kPayload))); @@ -1018,7 +1308,7 @@ TEST(RtpVideoSenderTest, SupportsStoppingUsingDependencyDescriptor) { // Send in a new key frame without the support for the dependency descriptor. encoded_image._frameType = VideoFrameType::kVideoFrameKey; - codec_specific.template_structure = absl::nullopt; + codec_specific.template_structure = std::nullopt; EXPECT_EQ(test.router()->OnEncodedImage(encoded_image, &codec_specific).error, EncodedImageCallback::Result::OK); test.AdvanceTime(TimeDelta::Millis(33)); @@ -1034,8 +1324,8 @@ TEST(RtpVideoSenderTest, CanSetZeroBitrate) { } TEST(RtpVideoSenderTest, SimulcastSenderRegistersFrameTransformers) { - rtc::scoped_refptr transformer = - rtc::make_ref_counted(); + scoped_refptr transformer = + make_ref_counted(); EXPECT_CALL(*transformer, RegisterTransformedFrameSinkCallback(_, kSsrc1)); EXPECT_CALL(*transformer, RegisterTransformedFrameSinkCallback(_, kSsrc2)); @@ -1057,7 +1347,7 @@ TEST(RtpVideoSenderTest, OverheadIsSubtractedFromTargetBitrate) { kRtpHeaderSizeBytes + kTransportPacketOverheadBytes; RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}, &field_trials); test.router()->OnTransportOverheadChanged(kTransportPacketOverheadBytes); - test.SetActiveModules({true}); + test.SetSending(true); { test.router()->OnBitrateUpdated(CreateBitrateAllocationUpdate(300000), @@ -1084,30 +1374,30 @@ TEST(RtpVideoSenderTest, OverheadIsSubtractedFromTargetBitrate) { TEST(RtpVideoSenderTest, ClearsPendingPacketsOnInactivation) { RtpVideoSenderTestFixture test({kSsrc1}, {kRtxSsrc1}, kPayloadType, {}); - test.SetActiveModules({true}); + test.SetSending(true); RtpHeaderExtensionMap extensions; extensions.Register( kDependencyDescriptorExtensionId); std::vector sent_packets; ON_CALL(test.transport(), SendRtp) - .WillByDefault([&](rtc::ArrayView packet, - const PacketOptions& options) { - sent_packets.emplace_back(&extensions); - EXPECT_TRUE(sent_packets.back().Parse(packet)); - return true; - }); + .WillByDefault( + [&](ArrayView packet, const PacketOptions& options) { + sent_packets.emplace_back(&extensions); + EXPECT_TRUE(sent_packets.back().Parse(packet)); + return true; + }); // Set a very low bitrate. test.router()->OnBitrateUpdated( - CreateBitrateAllocationUpdate(/*rate_bps=*/30'000), + CreateBitrateAllocationUpdate(/*target_bitrate_bps=*/10'000), /*framerate=*/30); // Create and send a large keyframe. const size_t kImageSizeBytes = 10000; constexpr uint8_t kPayload[kImageSizeBytes] = {'a'}; EncodedImage encoded_image; - encoded_image.SetTimestamp(1); + encoded_image.SetRtpTimestamp(1); encoded_image.capture_time_ms_ = 2; encoded_image._frameType = VideoFrameType::kVideoFrameKey; encoded_image.SetEncodedData( @@ -1127,7 +1417,7 @@ TEST(RtpVideoSenderTest, ClearsPendingPacketsOnInactivation) { EXPECT_FALSE(packet.Marker()); } EXPECT_GT(transmittedPayload, DataSize::Zero()); - EXPECT_LT(transmittedPayload, DataSize::Bytes(kImageSizeBytes / 4)); + EXPECT_LT(transmittedPayload, DataSize::Bytes(kImageSizeBytes / 3)); // Record the RTP timestamp of the first frame. const uint32_t first_frame_timestamp = sent_packets[0].Timestamp(); @@ -1135,18 +1425,18 @@ TEST(RtpVideoSenderTest, ClearsPendingPacketsOnInactivation) { // Disable the sending module and advance time slightly. No packets should be // sent. - test.SetActiveModules({false}); + test.SetSending(false); test.AdvanceTime(TimeDelta::Millis(20)); EXPECT_TRUE(sent_packets.empty()); // Reactive the send module - any packets should have been removed, so nothing // should be transmitted. - test.SetActiveModules({true}); + test.SetSending(true); test.AdvanceTime(TimeDelta::Millis(33)); EXPECT_TRUE(sent_packets.empty()); // Send a new frame. - encoded_image.SetTimestamp(3); + encoded_image.SetRtpTimestamp(3); encoded_image.capture_time_ms_ = 4; EXPECT_EQ(test.router() ->OnEncodedImage(encoded_image, /*codec_specific=*/nullptr) @@ -1159,17 +1449,98 @@ TEST(RtpVideoSenderTest, ClearsPendingPacketsOnInactivation) { EXPECT_NE(sent_packets[0].Timestamp(), first_frame_timestamp); } +TEST(RtpVideoSenderTest, + ClearsPendingPacketsOnInactivationWithLayerAllocation) { + RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {}, kPayloadType, {}); + test.SetSending(true); + + RtpHeaderExtensionMap extensions; + extensions.Register( + kDependencyDescriptorExtensionId); + std::vector sent_packets; + ON_CALL(test.transport(), SendRtp) + .WillByDefault( + [&](ArrayView packet, const PacketOptions& options) { + sent_packets.emplace_back(&extensions); + EXPECT_TRUE(sent_packets.back().Parse(packet)); + return true; + }); + + // Set a very low bitrate. + test.router()->OnBitrateUpdated( + CreateBitrateAllocationUpdate(/*target_bitrate_bps=*/10'000), + /*framerate=*/30); + + // Create and send a large keyframe. + constexpr uint8_t kImage[10'000] = {}; + EncodedImage encoded_image; + encoded_image.SetSimulcastIndex(0); + encoded_image.SetRtpTimestamp(1); + encoded_image.capture_time_ms_ = 2; + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + encoded_image.SetEncodedData( + EncodedImageBuffer::Create(kImage, std::size(kImage))); + EXPECT_EQ(test.router() + ->OnEncodedImage(encoded_image, /*codec_specific=*/nullptr) + .error, + EncodedImageCallback::Result::OK); + + // Advance time a small amount, check that sent data is only part of the + // image. + test.AdvanceTime(TimeDelta::Millis(5)); + DataSize transmitted_payload = DataSize::Zero(); + for (const RtpPacket& packet : sent_packets) { + transmitted_payload += DataSize::Bytes(packet.payload_size()); + // Make sure we don't see the end of the frame. + EXPECT_FALSE(packet.Marker()); + } + EXPECT_GT(transmitted_payload, DataSize::Zero()); + EXPECT_LT(transmitted_payload, DataSize::Bytes(std::size(kImage)) / 3); + + // Record the RTP timestamp of the first frame. + const uint32_t first_frame_timestamp = sent_packets[0].Timestamp(); + sent_packets.clear(); + + // Disable the 1st sending module and advance time slightly. No packets should + // be sent. + test.router()->OnVideoLayersAllocationUpdated( + {.active_spatial_layers = {{.rtp_stream_index = 1}}}); + test.AdvanceTime(TimeDelta::Millis(20)); + EXPECT_THAT(sent_packets, IsEmpty()); + + // Reactive the send module - any packets should have been removed, so nothing + // should be transmitted. + test.router()->OnVideoLayersAllocationUpdated( + {.active_spatial_layers = {{.rtp_stream_index = 0}, + {.rtp_stream_index = 1}}}); + test.AdvanceTime(TimeDelta::Millis(33)); + EXPECT_THAT(sent_packets, IsEmpty()); + + // Send a new frame. + encoded_image.SetRtpTimestamp(3); + encoded_image.capture_time_ms_ = 4; + EXPECT_EQ(test.router() + ->OnEncodedImage(encoded_image, /*codec_specific=*/nullptr) + .error, + EncodedImageCallback::Result::OK); + test.AdvanceTime(TimeDelta::Millis(33)); + + // Advance time, check we get new packets - but only for the second frame. + ASSERT_THAT(sent_packets, SizeIs(Ge(1))); + EXPECT_NE(sent_packets[0].Timestamp(), first_frame_timestamp); +} + // Integration test verifying that when retransmission mode is set to // kRetransmitBaseLayer,only base layer is retransmitted. TEST(RtpVideoSenderTest, RetransmitsBaseLayerOnly) { RtpVideoSenderTestFixture test({kSsrc1, kSsrc2}, {kRtxSsrc1, kRtxSsrc2}, kPayloadType, {}); - test.SetActiveModules({true, true}); + test.SetSending(true); test.router()->SetRetransmissionMode(kRetransmitBaseLayer); constexpr uint8_t kPayload = 'a'; EncodedImage encoded_image; - encoded_image.SetTimestamp(1); + encoded_image.SetRtpTimestamp(1); encoded_image.capture_time_ms_ = 2; encoded_image._frameType = VideoFrameType::kVideoFrameKey; encoded_image.SetEncodedData(EncodedImageBuffer::Create(&kPayload, 1)); @@ -1180,30 +1551,30 @@ TEST(RtpVideoSenderTest, RetransmitsBaseLayerOnly) { std::vector base_sequence_numbers; EXPECT_CALL(test.transport(), SendRtp) .Times(2) - .WillRepeatedly([&rtp_sequence_numbers, &transport_sequence_numbers]( - rtc::ArrayView packet, - const PacketOptions& options) { - RtpPacket rtp_packet; - EXPECT_TRUE(rtp_packet.Parse(packet)); - rtp_sequence_numbers.push_back(rtp_packet.SequenceNumber()); - transport_sequence_numbers.push_back(options.packet_id); - return true; - }); + .WillRepeatedly( + [&rtp_sequence_numbers, &transport_sequence_numbers]( + ArrayView packet, const PacketOptions& options) { + RtpPacket rtp_packet; + EXPECT_TRUE(rtp_packet.Parse(packet)); + rtp_sequence_numbers.push_back(rtp_packet.SequenceNumber()); + transport_sequence_numbers.push_back(options.packet_id); + return true; + }); CodecSpecificInfo key_codec_info; key_codec_info.codecType = kVideoCodecVP8; key_codec_info.codecSpecific.VP8.temporalIdx = 0; - EXPECT_EQ(EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage( - encoded_image, &key_codec_info).error); - encoded_image.SetTimestamp(2); + EXPECT_EQ( + EncodedImageCallback::Result::OK, + test.router()->OnEncodedImage(encoded_image, &key_codec_info).error); + encoded_image.SetRtpTimestamp(2); encoded_image.capture_time_ms_ = 3; encoded_image._frameType = VideoFrameType::kVideoFrameDelta; CodecSpecificInfo delta_codec_info; delta_codec_info.codecType = kVideoCodecVP8; delta_codec_info.codecSpecific.VP8.temporalIdx = 1; - EXPECT_EQ(EncodedImageCallback::Result::OK, - test.router()->OnEncodedImage( - encoded_image, &delta_codec_info).error); + EXPECT_EQ( + EncodedImageCallback::Result::OK, + test.router()->OnEncodedImage(encoded_image, &delta_codec_info).error); test.AdvanceTime(TimeDelta::Millis(33)); @@ -1211,19 +1582,19 @@ TEST(RtpVideoSenderTest, RetransmitsBaseLayerOnly) { rtcp::Nack nack; nack.SetMediaSsrc(kSsrc1); nack.SetPacketIds(rtp_sequence_numbers); - rtc::Buffer nack_buffer = nack.Build(); + Buffer nack_buffer = nack.Build(); std::vector retransmitted_rtp_sequence_numbers; EXPECT_CALL(test.transport(), SendRtp) .Times(1) .WillRepeatedly([&retransmitted_rtp_sequence_numbers]( - rtc::ArrayView packet, + ArrayView packet, const PacketOptions& options) { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); EXPECT_EQ(rtp_packet.Ssrc(), kRtxSsrc1); // Capture the retransmitted sequence number from the RTX header. - rtc::ArrayView payload = rtp_packet.payload(); + ArrayView payload = rtp_packet.payload(); retransmitted_rtp_sequence_numbers.push_back( ByteReader::ReadBigEndian(payload.data())); return true; @@ -1232,8 +1603,8 @@ TEST(RtpVideoSenderTest, RetransmitsBaseLayerOnly) { test.AdvanceTime(TimeDelta::Millis(33)); // Verify that only base layer packet was retransmitted. - std::vector base_rtp_sequence_numbers(rtp_sequence_numbers.begin(), - rtp_sequence_numbers.begin() + 1); + std::vector base_rtp_sequence_numbers( + rtp_sequence_numbers.begin(), rtp_sequence_numbers.begin() + 1); EXPECT_EQ(retransmitted_rtp_sequence_numbers, base_rtp_sequence_numbers); } diff --git a/call/rtx_receive_stream.cc b/call/rtx_receive_stream.cc index 6c5fa3f859..1607ebbade 100644 --- a/call/rtx_receive_stream.cc +++ b/call/rtx_receive_stream.cc @@ -12,9 +12,13 @@ #include +#include +#include #include #include "api/array_view.h" +#include "api/sequence_checker.h" +#include "call/rtp_packet_sink_interface.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" @@ -52,7 +56,7 @@ void RtxReceiveStream::OnRtpPacket(const RtpPacketReceived& rtx_packet) { if (rtp_receive_statistics_) { rtp_receive_statistics_->OnRtpPacket(rtx_packet); } - rtc::ArrayView payload = rtx_packet.payload(); + ArrayView payload = rtx_packet.payload(); if (payload.size() < kRtxHeaderSize) { return; @@ -75,7 +79,7 @@ void RtxReceiveStream::OnRtpPacket(const RtpPacketReceived& rtx_packet) { media_packet.set_arrival_time(rtx_packet.arrival_time()); // Skip the RTX header. - rtc::ArrayView rtx_payload = payload.subview(kRtxHeaderSize); + ArrayView rtx_payload = payload.subview(kRtxHeaderSize); uint8_t* media_payload = media_packet.AllocatePayload(rtx_payload.size()); RTC_DCHECK(media_payload != nullptr); diff --git a/call/rtx_receive_stream.h b/call/rtx_receive_stream.h index 79b03d306b..e98055ae3b 100644 --- a/call/rtx_receive_stream.h +++ b/call/rtx_receive_stream.h @@ -17,6 +17,7 @@ #include "api/sequence_checker.h" #include "call/rtp_packet_sink_interface.h" #include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { diff --git a/call/rtx_receive_stream_unittest.cc b/call/rtx_receive_stream_unittest.cc index b06990820f..342e80f162 100644 --- a/call/rtx_receive_stream_unittest.cc +++ b/call/rtx_receive_stream_unittest.cc @@ -10,8 +10,17 @@ #include "call/rtx_receive_stream.h" +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/units/timestamp.h" +#include "api/video/video_rotation.h" #include "call/test/mock_rtp_packet_sink_interface.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "test/gmock.h" @@ -21,7 +30,6 @@ namespace webrtc { namespace { -using ::testing::_; using ::testing::Property; using ::testing::StrictMock; @@ -105,7 +113,7 @@ std::map PayloadTypeMapping() { } template -rtc::ArrayView Truncate(rtc::ArrayView a, size_t drop) { +ArrayView Truncate(ArrayView a, size_t drop) { return a.subview(0, a.size() - drop); } @@ -115,7 +123,7 @@ TEST(RtxReceiveStreamTest, RestoresPacketPayload) { StrictMock media_sink; RtxReceiveStream rtx_sink(&media_sink, PayloadTypeMapping(), kMediaSSRC); RtpPacketReceived rtx_packet; - EXPECT_TRUE(rtx_packet.Parse(rtc::ArrayView(kRtxPacket))); + EXPECT_TRUE(rtx_packet.Parse(ArrayView(kRtxPacket))); EXPECT_CALL(media_sink, OnRtpPacket) .WillOnce([](const RtpPacketReceived& packet) { @@ -132,7 +140,7 @@ TEST(RtxReceiveStreamTest, SetsRecoveredFlag) { StrictMock media_sink; RtxReceiveStream rtx_sink(&media_sink, PayloadTypeMapping(), kMediaSSRC); RtpPacketReceived rtx_packet; - EXPECT_TRUE(rtx_packet.Parse(rtc::ArrayView(kRtxPacket))); + EXPECT_TRUE(rtx_packet.Parse(ArrayView(kRtxPacket))); EXPECT_FALSE(rtx_packet.recovered()); EXPECT_CALL(media_sink, OnRtpPacket) .WillOnce([](const RtpPacketReceived& packet) { @@ -149,7 +157,7 @@ TEST(RtxReceiveStreamTest, IgnoresUnknownPayloadType) { RtxReceiveStream rtx_sink(&media_sink, payload_type_mapping, kMediaSSRC); RtpPacketReceived rtx_packet; - EXPECT_TRUE(rtx_packet.Parse(rtc::ArrayView(kRtxPacket))); + EXPECT_TRUE(rtx_packet.Parse(ArrayView(kRtxPacket))); rtx_sink.OnRtpPacket(rtx_packet); } @@ -158,7 +166,7 @@ TEST(RtxReceiveStreamTest, IgnoresTruncatedPacket) { RtxReceiveStream rtx_sink(&media_sink, PayloadTypeMapping(), kMediaSSRC); RtpPacketReceived rtx_packet; EXPECT_TRUE( - rtx_packet.Parse(Truncate(rtc::ArrayView(kRtxPacket), 2))); + rtx_packet.Parse(Truncate(ArrayView(kRtxPacket), 2))); rtx_sink.OnRtpPacket(rtx_packet); } @@ -168,8 +176,7 @@ TEST(RtxReceiveStreamTest, CopiesRtpHeaderExtensions) { RtpHeaderExtensionMap extension_map; extension_map.RegisterByType(3, kRtpExtensionVideoRotation); RtpPacketReceived rtx_packet(&extension_map); - EXPECT_TRUE( - rtx_packet.Parse(rtc::ArrayView(kRtxPacketWithCVO))); + EXPECT_TRUE(rtx_packet.Parse(ArrayView(kRtxPacketWithCVO))); VideoRotation rotation = kVideoRotation_0; EXPECT_TRUE(rtx_packet.GetExtension(&rotation)); @@ -193,7 +200,7 @@ TEST(RtxReceiveStreamTest, PropagatesArrivalTime) { StrictMock media_sink; RtxReceiveStream rtx_sink(&media_sink, PayloadTypeMapping(), kMediaSSRC); RtpPacketReceived rtx_packet(nullptr); - EXPECT_TRUE(rtx_packet.Parse(rtc::ArrayView(kRtxPacket))); + EXPECT_TRUE(rtx_packet.Parse(ArrayView(kRtxPacket))); rtx_packet.set_arrival_time(Timestamp::Millis(123)); EXPECT_CALL(media_sink, OnRtpPacket(Property(&RtpPacketReceived::arrival_time, Timestamp::Millis(123)))); @@ -208,15 +215,14 @@ TEST(RtxReceiveStreamTest, SupportsLargePacket) { constexpr int kRtxPayloadOffset = 14; uint8_t large_rtx_packet[kRtxPacketSize]; memcpy(large_rtx_packet, kRtxPacket, sizeof(kRtxPacket)); - rtc::ArrayView payload(large_rtx_packet + kRtxPayloadOffset, - kRtxPacketSize - kRtxPayloadOffset); + ArrayView payload(large_rtx_packet + kRtxPayloadOffset, + kRtxPacketSize - kRtxPayloadOffset); // Fill payload. for (size_t i = 0; i < payload.size(); i++) { payload[i] = i; } - EXPECT_TRUE( - rtx_packet.Parse(rtc::ArrayView(large_rtx_packet))); + EXPECT_TRUE(rtx_packet.Parse(ArrayView(large_rtx_packet))); EXPECT_CALL(media_sink, OnRtpPacket) .WillOnce([&](const RtpPacketReceived& packet) { @@ -239,10 +245,10 @@ TEST(RtxReceiveStreamTest, SupportsLargePacketWithPadding) { uint8_t large_rtx_packet[kRtxPacketSize]; memcpy(large_rtx_packet, kRtxPacketWithPadding, sizeof(kRtxPacketWithPadding)); - rtc::ArrayView payload( + ArrayView payload( large_rtx_packet + kRtxPayloadOffset, kRtxPacketSize - kRtxPayloadOffset - kRtxPaddingSize); - rtc::ArrayView padding( + ArrayView padding( large_rtx_packet + kRtxPacketSize - kRtxPaddingSize, kRtxPaddingSize); // Fill payload. @@ -254,8 +260,7 @@ TEST(RtxReceiveStreamTest, SupportsLargePacketWithPadding) { padding[i] = kRtxPaddingSize; } - EXPECT_TRUE( - rtx_packet.Parse(rtc::ArrayView(large_rtx_packet))); + EXPECT_TRUE(rtx_packet.Parse(ArrayView(large_rtx_packet))); EXPECT_CALL(media_sink, OnRtpPacket) .WillOnce([&](const RtpPacketReceived& packet) { diff --git a/call/simulated_network.h b/call/simulated_network.h index 8597367add..5003e379fc 100644 --- a/call/simulated_network.h +++ b/call/simulated_network.h @@ -10,125 +10,8 @@ #ifndef CALL_SIMULATED_NETWORK_H_ #define CALL_SIMULATED_NETWORK_H_ -#include - -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/sequence_checker.h" -#include "api/test/simulated_network.h" -#include "api/units/data_size.h" -#include "api/units/timestamp.h" -#include "rtc_base/race_checker.h" -#include "rtc_base/random.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_annotations.h" - -namespace webrtc { - -// Class simulating a network link. -// -// This is a basic implementation of NetworkBehaviorInterface that supports: -// - Packet loss -// - Capacity delay -// - Extra delay with or without packets reorder -// - Packet overhead -// - Queue max capacity -class SimulatedNetwork : public SimulatedNetworkInterface { - public: - using Config = BuiltInNetworkBehaviorConfig; - explicit SimulatedNetwork(Config config, uint64_t random_seed = 1); - ~SimulatedNetwork() override; - - // Sets a new configuration. This will affect packets that will be sent with - // EnqueuePacket but also packets in the network that have not left the - // network emulation. Packets that are ready to be retrieved by - // DequeueDeliverablePackets are not affected by the new configuration. - // TODO(bugs.webrtc.org/14525): Fix SetConfig and make it apply only to the - // part of the packet that is currently being sent (instead of applying to - // all of it). - void SetConfig(const Config& config) override; - void UpdateConfig(std::function - config_modifier) override; - void PauseTransmissionUntil(int64_t until_us) override; - - // NetworkBehaviorInterface - bool EnqueuePacket(PacketInFlightInfo packet) override; - std::vector DequeueDeliverablePackets( - int64_t receive_time_us) override; - - absl::optional NextDeliveryTimeUs() const override; - - private: - struct PacketInfo { - PacketInFlightInfo packet; - // Time when the packet has left (or will leave) the network. - int64_t arrival_time_us; - }; - // Contains current configuration state. - struct ConfigState { - // Static link configuration. - Config config; - // The probability to drop the packet if we are currently dropping a - // burst of packet - double prob_loss_bursting; - // The probability to drop a burst of packets. - double prob_start_bursting; - // Used for temporary delay spikes. - int64_t pause_transmission_until_us = 0; - }; - - // Moves packets from capacity- to delay link. - void UpdateCapacityQueue(ConfigState state, int64_t time_now_us) - RTC_RUN_ON(&process_checker_); - ConfigState GetConfigState() const; - - mutable Mutex config_lock_; - - // Guards the data structures involved in delay and loss processing, such as - // the packet queues. - rtc::RaceChecker process_checker_; - // Models the capacity of the network by rejecting packets if the queue is - // full and keeping them in the queue until they are ready to exit (according - // to the link capacity, which cannot be violated, e.g. a 1 kbps link will - // only be able to deliver 1000 bits per second). - // - // Invariant: - // The head of the `capacity_link_` has arrival_time_us correctly set to the - // time when the packet is supposed to be delivered (without accounting - // potential packet loss or potential extra delay and without accounting for a - // new configuration of the network, which requires a re-computation of the - // arrival_time_us). - std::queue capacity_link_ RTC_GUARDED_BY(process_checker_); - // Models the extra delay of the network (see `queue_delay_ms` - // and `delay_standard_deviation_ms` in BuiltInNetworkBehaviorConfig), packets - // in the `delay_link_` have technically already left the network and don't - // use its capacity but they are not delivered yet. - std::deque delay_link_ RTC_GUARDED_BY(process_checker_); - // Represents the next moment in time when the network is supposed to deliver - // packets to the client (either by pulling them from `delay_link_` or - // `capacity_link_` or both). - absl::optional next_process_time_us_ - RTC_GUARDED_BY(process_checker_); - - ConfigState config_state_ RTC_GUARDED_BY(config_lock_); - - Random random_ RTC_GUARDED_BY(process_checker_); - // Are we currently dropping a burst of packets? - bool bursting_; - - // The send time of the last enqueued packet, this is only used to check that - // the send time of enqueued packets is monotonically increasing. - int64_t last_enqueue_time_us_; - - // The last time a packet left the capacity_link_ (used to enforce - // the capacity of the link and avoid packets starts to get sent before - // the link it free). - int64_t last_capacity_link_exit_time_; -}; - -} // namespace webrtc +// TODO(bugs.webrtc.org/14525): Remove once downstream projects does not use +// it. +#include "test/network/simulated_network.h" #endif // CALL_SIMULATED_NETWORK_H_ diff --git a/call/simulated_packet_receiver.h b/call/simulated_packet_receiver.h index 2db46e8c38..629ca10f9c 100644 --- a/call/simulated_packet_receiver.h +++ b/call/simulated_packet_receiver.h @@ -11,7 +11,9 @@ #ifndef CALL_SIMULATED_PACKET_RECEIVER_H_ #define CALL_SIMULATED_PACKET_RECEIVER_H_ -#include "api/test/simulated_network.h" +#include +#include + #include "call/packet_receiver.h" namespace webrtc { @@ -34,7 +36,7 @@ class SimulatedPacketReceiverInterface : public PacketReceiver { // Returns the time until next process or nullopt to indicate that the next // process time is unknown. If the next process time is unknown, this should // be checked again any time a packet is enqueued. - virtual absl::optional TimeUntilNextProcess() = 0; + virtual std::optional TimeUntilNextProcess() = 0; }; } // namespace webrtc diff --git a/call/syncable.h b/call/syncable.h index 6817be9c55..09c0f22172 100644 --- a/call/syncable.h +++ b/call/syncable.h @@ -16,7 +16,7 @@ #include -#include "absl/types/optional.h" +#include namespace webrtc { @@ -34,7 +34,7 @@ class Syncable { virtual ~Syncable(); virtual uint32_t id() const = 0; - virtual absl::optional GetInfo() const = 0; + virtual std::optional GetInfo() const = 0; virtual bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const = 0; virtual bool SetMinimumPlayoutDelay(int delay_ms) = 0; diff --git a/call/test/mock_audio_receive_stream.h b/call/test/mock_audio_receive_stream.h new file mode 100644 index 0000000000..0d3a6906ac --- /dev/null +++ b/call/test/mock_audio_receive_stream.h @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_TEST_MOCK_AUDIO_RECEIVE_STREAM_H_ +#define CALL_TEST_MOCK_AUDIO_RECEIVE_STREAM_H_ + +#include +#include +#include + +#include "api/audio/audio_frame.h" +#include "api/audio/audio_mixer.h" +#include "api/audio_codecs/audio_format.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/frame_transformer_interface.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" +#include "api/transport/rtp/rtp_source.h" +#include "call/audio_receive_stream.h" +#include "test/gmock.h" + +namespace webrtc { +namespace test { + +class MockAudioReceiveStream : public AudioReceiveStreamInterface, + public AudioMixer::Source { + public: + MOCK_METHOD(uint32_t, remote_ssrc, (), (const override)); + MOCK_METHOD(void, Start, (), (override)); + MOCK_METHOD(void, Stop, (), (override)); + MOCK_METHOD(bool, IsRunning, (), (const override)); + MOCK_METHOD(void, + SetDepacketizerToDecoderFrameTransformer, + (scoped_refptr), + (override)); + MOCK_METHOD(void, + SetDecoderMap, + ((std::map)), + (override)); + MOCK_METHOD(void, SetNackHistory, (int), (override)); + MOCK_METHOD(void, SetRtcpMode, (RtcpMode), (override)); + MOCK_METHOD(void, SetNonSenderRttMeasurement, (bool), (override)); + MOCK_METHOD(void, + SetFrameDecryptor, + (scoped_refptr), + (override)); + + MOCK_METHOD(webrtc::AudioReceiveStreamInterface::Stats, + GetStats, + (bool), + (const override)); + MOCK_METHOD(void, SetSink, (webrtc::AudioSinkInterface*), (override)); + MOCK_METHOD(void, SetGain, (float), (override)); + MOCK_METHOD(bool, SetBaseMinimumPlayoutDelayMs, (int), (override)); + MOCK_METHOD(int, GetBaseMinimumPlayoutDelayMs, (), (const override)); + MOCK_METHOD(std::vector, GetSources, (), (const override)); + + // TODO (b/397376626): Create a MockAudioMixerSource, and instead + // have a member variable here. + AudioMixer::Source* source() override { return this; } + + MOCK_METHOD(AudioFrameInfo, + GetAudioFrameWithInfo, + (int, AudioFrame*), + (override)); + MOCK_METHOD(int, Ssrc, (), (const override)); + MOCK_METHOD(int, PreferredSampleRate, (), (const override)); +}; + +} // namespace test +} // namespace webrtc + +#endif // CALL_TEST_MOCK_AUDIO_RECEIVE_STREAM_H_ diff --git a/call/test/mock_audio_send_stream.h b/call/test/mock_audio_send_stream.h index 1993de8de0..d0c3fd8051 100644 --- a/call/test/mock_audio_send_stream.h +++ b/call/test/mock_audio_send_stream.h @@ -13,6 +13,8 @@ #include +#include "api/audio/audio_frame.h" +#include "api/rtp_sender_interface.h" #include "call/audio_send_stream.h" #include "test/gmock.h" diff --git a/call/test/mock_bitrate_allocator.h b/call/test/mock_bitrate_allocator.h index b08916fe4f..c97a6f6f8b 100644 --- a/call/test/mock_bitrate_allocator.h +++ b/call/test/mock_bitrate_allocator.h @@ -10,7 +10,6 @@ #ifndef CALL_TEST_MOCK_BITRATE_ALLOCATOR_H_ #define CALL_TEST_MOCK_BITRATE_ALLOCATOR_H_ -#include #include "call/bitrate_allocator.h" #include "test/gmock.h" diff --git a/call/test/mock_rtp_transport_controller_send.h b/call/test/mock_rtp_transport_controller_send.h index b24e5a59ec..844f6601c7 100644 --- a/call/test/mock_rtp_transport_controller_send.h +++ b/call/test/mock_rtp_transport_controller_send.h @@ -11,21 +11,28 @@ #ifndef CALL_TEST_MOCK_RTP_TRANSPORT_CONTROLLER_SEND_H_ #define CALL_TEST_MOCK_RTP_TRANSPORT_CONTROLLER_SEND_H_ +#include +#include #include #include -#include -#include +#include #include "absl/strings/string_view.h" -#include "api/crypto/crypto_options.h" -#include "api/crypto/frame_encryptor_interface.h" +#include "api/fec_controller.h" #include "api/frame_transformer_interface.h" +#include "api/scoped_refptr.h" +#include "api/transport/bandwidth_estimation_settings.h" #include "api/transport/bitrate_settings.h" +#include "api/transport/network_control.h" +#include "api/transport/network_types.h" +#include "api/units/timestamp.h" +#include "call/rtp_config.h" #include "call/rtp_transport_controller_send_interface.h" #include "modules/pacing/packet_router.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" -#include "rtc_base/rate_limiter.h" #include "test/gmock.h" namespace webrtc { @@ -41,29 +48,33 @@ class MockRtpTransportControllerSend int rtcp_report_interval_ms, Transport*, const RtpSenderObservers&, - RtcEventLog*, std::unique_ptr, const RtpSenderFrameEncryptionConfig&, - rtc::scoped_refptr), + scoped_refptr), (override)); MOCK_METHOD(void, DestroyRtpVideoSender, (RtpVideoSenderInterface*), (override)); + MOCK_METHOD(void, RegisterSendingRtpStream, (RtpRtcpInterface&), (override)); + MOCK_METHOD(void, + DeRegisterSendingRtpStream, + (RtpRtcpInterface&), + (override)); MOCK_METHOD(PacketRouter*, packet_router, (), (override)); MOCK_METHOD(NetworkStateEstimateObserver*, network_state_estimate_observer, (), (override)); - MOCK_METHOD(TransportFeedbackObserver*, - transport_feedback_observer, - (), - (override)); MOCK_METHOD(RtpPacketSender*, packet_sender, (), (override)); MOCK_METHOD(void, SetAllocatedSendBitrateLimits, (BitrateAllocationLimits), (override)); + MOCK_METHOD(void, + ReconfigureBandwidthEstimation, + (const BandwidthEstimationSettings&), + (override)); MOCK_METHOD(void, SetPacingFactor, (float), (override)); MOCK_METHOD(void, SetQueueTimeLimit, (int), (override)); MOCK_METHOD(StreamFeedbackProvider*, @@ -76,17 +87,17 @@ class MockRtpTransportControllerSend (override)); MOCK_METHOD(void, OnNetworkRouteChanged, - (absl::string_view, const rtc::NetworkRoute&), + (absl::string_view, const webrtc::NetworkRoute&), (override)); MOCK_METHOD(void, OnNetworkAvailability, (bool), (override)); MOCK_METHOD(NetworkLinkRtcpObserver*, GetRtcpObserver, (), (override)); MOCK_METHOD(int64_t, GetPacerQueuingDelayMs, (), (const, override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, GetFirstPacketTime, (), (const, override)); MOCK_METHOD(void, EnablePeriodicAlrProbing, (bool), (override)); - MOCK_METHOD(void, OnSentPacket, (const rtc::SentPacket&), (override)); + MOCK_METHOD(void, OnSentPacket, (const SentPacketInfo&), (override)); MOCK_METHOD(void, SetSdpBitrateParameters, (const BitrateConstraints&), @@ -100,6 +111,19 @@ class MockRtpTransportControllerSend MOCK_METHOD(void, IncludeOverheadInPacedSender, (), (override)); MOCK_METHOD(void, OnReceivedPacket, (const ReceivedPacket&), (override)); MOCK_METHOD(void, EnsureStarted, (), (override)); + MOCK_METHOD(NetworkControllerInterface*, + GetNetworkController, + (), + (override)); + MOCK_METHOD(void, + EnableCongestionControlFeedbackAccordingToRfc8888, + (), + (override)); + MOCK_METHOD(int, + ReceivedCongestionControlFeedbackCount, + (), + (const, override)); + MOCK_METHOD(int, ReceivedTransportCcFeedbackCount, (), (const, override)); }; } // namespace webrtc #endif // CALL_TEST_MOCK_RTP_TRANSPORT_CONTROLLER_SEND_H_ diff --git a/call/version.cc b/call/version.cc index 0d10edf2cc..7684820d5d 100644 --- a/call/version.cc +++ b/call/version.cc @@ -13,7 +13,7 @@ namespace webrtc { // The timestamp is always in UTC. -const char* const kSourceTimestamp = "WebRTC source stamp 2023-09-05T04:12:20"; +const char* const kSourceTimestamp = "WebRTC source stamp 2025-04-28T04:05:58"; void LoadWebRTCVersionInRegister() { // Using volatile to instruct the compiler to not optimize `p` away even diff --git a/call/video_receive_stream.cc b/call/video_receive_stream.cc index 3e2a51322f..3bfb35297f 100644 --- a/call/video_receive_stream.cc +++ b/call/video_receive_stream.cc @@ -10,6 +10,14 @@ #include "call/video_receive_stream.h" +#include +#include +#include +#include + +#include "api/call/transport.h" +#include "api/rtp_headers.h" +#include "api/video_codecs/sdp_video_format.h" #include "rtc_base/strings/string_builder.h" namespace webrtc { @@ -29,7 +37,7 @@ bool VideoReceiveStreamInterface::Decoder::operator==( std::string VideoReceiveStreamInterface::Decoder::ToString() const { char buf[1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "{payload_type: " << payload_type; ss << ", payload_name: " << video_format.name; ss << ", codec_params: {"; @@ -52,7 +60,7 @@ VideoReceiveStreamInterface::Stats::~Stats() = default; std::string VideoReceiveStreamInterface::Stats::ToString( int64_t time_ms) const { char buf[2048]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "VideoReceiveStreamInterface stats: " << time_ms << ", {ssrc: " << ssrc << ", "; ss << "total_bps: " << total_bitrate_bps << ", "; @@ -84,7 +92,7 @@ std::string VideoReceiveStreamInterface::Stats::ToString( << jitter_buffer_target_delay.seconds() << ", "; ss << "jitterBufferEmittedCount: " << jitter_buffer_emitted_count << ", "; ss << "jitterBufferMinimumDelay: " - << jitter_buffer_minimum_delay.seconds(); + << jitter_buffer_minimum_delay.seconds() << ", "; ss << "totalDecodeTime: " << total_decode_time.seconds() << ", "; ss << "totalProcessingDelay: " << total_processing_delay.seconds() << ", "; @@ -112,7 +120,7 @@ VideoReceiveStreamInterface::Config::Config::~Config() = default; std::string VideoReceiveStreamInterface::Config::ToString() const { char buf[4 * 1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "{decoders: ["; for (size_t i = 0; i < decoders.size(); ++i) { ss << decoders[i].ToString(); @@ -136,7 +144,7 @@ VideoReceiveStreamInterface::Config::Rtp::~Rtp() = default; std::string VideoReceiveStreamInterface::Config::Rtp::ToString() const { char buf[2 * 1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "{remote_ssrc: " << remote_ssrc; ss << ", local_ssrc: " << local_ssrc; ss << ", rtcp_mode: " diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h index 12f6bf60c8..e7c6c85db7 100644 --- a/call/video_receive_stream.h +++ b/call/video_receive_stream.h @@ -12,8 +12,10 @@ #define CALL_VIDEO_RECEIVE_STREAM_H_ #include +#include #include #include +#include #include #include #include @@ -21,8 +23,12 @@ #include "api/call/transport.h" #include "api/crypto/crypto_options.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/frame_transformer_interface.h" #include "api/rtp_headers.h" -#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/recordable_encoded_frame.h" #include "api/video/video_content_type.h" #include "api/video/video_frame.h" @@ -34,7 +40,6 @@ #include "common_video/frame_counts.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/checks.h" namespace webrtc { @@ -55,7 +60,7 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { std::function callback; // Memento of when a keyframe request was last sent. The // VideoReceiveStreamInterface client should not interpret the attribute. - absl::optional last_keyframe_request_ms; + std::optional last_keyframe_request_ms; }; // TODO(mflodman) Move all these settings to VideoDecoder and move the @@ -88,8 +93,8 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { uint32_t frames_rendered = 0; // Decoder stats. - absl::optional decoder_implementation_name; - absl::optional power_efficient_decoder; + std::optional decoder_implementation_name; + std::optional power_efficient_decoder; FrameCounts frame_counts; int decode_ms = 0; int max_decode_ms = 0; @@ -115,9 +120,11 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { TimeDelta total_decode_time = TimeDelta::Zero(); // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalprocessingdelay TimeDelta total_processing_delay = TimeDelta::Zero(); - // TODO(bugs.webrtc.org/13986): standardize + + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalassemblytime TimeDelta total_assembly_time = TimeDelta::Zero(); uint32_t frames_assembled_from_multiple_packets = 0; + // Total inter frame delay in seconds. // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalinterframedelay double total_inter_frame_delay = 0; @@ -125,7 +132,19 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalsqauredinterframedelay double total_squared_inter_frame_delay = 0; int64_t first_frame_received_to_decoded_ms = -1; - absl::optional qp_sum; + std::optional qp_sum; + + // Corruption score, indicating the probability of corruption. Its value is + // between 0 and 1, where 0 means no corruption and 1 means that the + // compressed frame is corrupted. + // However, note that the corruption score may not accurately reflect + // corruption. E.g. even if the corruption score is 0, the compressed frame + // may still be corrupted and vice versa. + std::optional corruption_score_sum; + std::optional corruption_score_squared_sum; + // Number of frames the `corruption_score` was calculated on. This is + // usually not the same as `frames_decoded`. + uint32_t corruption_score_count = 0; int current_payload_type = -1; @@ -142,18 +161,29 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { VideoContentType content_type = VideoContentType::UNSPECIFIED; // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-estimatedplayouttimestamp - absl::optional estimated_playout_ntp_timestamp_ms; + std::optional estimated_playout_ntp_timestamp_ms; int sync_offset_ms = std::numeric_limits::max(); uint32_t ssrc = 0; std::string c_name; RtpReceiveStats rtp_stats; RtcpPacketTypeCounter rtcp_packet_type_counts; - absl::optional rtx_rtp_stats; + std::optional rtx_rtp_stats; // Timing frame info: all important timestamps for a full lifetime of a // single 'timing frame'. - absl::optional timing_frame_info; + std::optional timing_frame_info; + + // Remote outbound stats derived by the received RTCP sender reports. + // https://w3c.github.io/webrtc-stats/#remoteoutboundrtpstats-dict* + std::optional last_sender_report_timestamp; + // TODO: bugs.webrtc.org/370535296 - Remove the utc timestamp when linked + // issue is fixed. + std::optional last_sender_report_utc_timestamp; + std::optional last_sender_report_remote_utc_timestamp; + uint32_t sender_reports_packets_sent = 0; + uint64_t sender_reports_bytes_sent = 0; + uint64_t sender_reports_reports_count = 0; }; struct Config { @@ -238,7 +268,7 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { Transport* rtcp_send_transport = nullptr; // Must always be set. - rtc::VideoSinkInterface* renderer = nullptr; + VideoSinkInterface* renderer = nullptr; // Expected delay needed by the renderer, i.e. the frame will be delivered // this many milliseconds, if possible, earlier than the ideal render time. @@ -256,12 +286,12 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { // An optional custom frame decryptor that allows the entire frame to be // decrypted in whatever way the caller choses. This is not required by // default. - rtc::scoped_refptr frame_decryptor; + scoped_refptr frame_decryptor; // Per PeerConnection cryptography options. CryptoOptions crypto_options; - rtc::scoped_refptr frame_transformer; + scoped_refptr frame_transformer; }; // TODO(pbos): Add info on currently-received codec to Stats. @@ -291,8 +321,6 @@ class VideoReceiveStreamInterface : public MediaReceiveStreamInterface { // Cause eventual generation of a key frame from the sender. virtual void GenerateKeyFrame() = 0; - virtual void SetRtcpMode(RtcpMode mode) = 0; - // Sets or clears a flexfec RTP sink. This affects `rtp.packet_sink_` and // `rtp.protected_by_flexfec` parts of the configuration. Must be called on // the packet delivery thread. diff --git a/call/video_send_stream.cc b/call/video_send_stream.cc index e8532a7a26..79a911c828 100644 --- a/call/video_send_stream.cc +++ b/call/video_send_stream.cc @@ -10,9 +10,13 @@ #include "call/video_send_stream.h" +#include +#include #include -#include "api/crypto/frame_encryptor_interface.h" +#include "api/call/transport.h" +#include "api/video_codecs/video_encoder.h" +#include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/strings/string_format.h" @@ -39,7 +43,7 @@ VideoSendStream::StreamStats::~StreamStats() = default; std::string VideoSendStream::StreamStats::ToString() const { char buf[1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "type: " << StreamTypeToString(type); if (referenced_media_ssrc.has_value()) ss << " (for: " << referenced_media_ssrc.value() << ")"; @@ -68,9 +72,9 @@ VideoSendStream::Stats::~Stats() = default; std::string VideoSendStream::Stats::ToString(int64_t time_ms) const { char buf[2048]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "VideoSendStream stats: " << time_ms << ", {"; - ss << "input_fps: " << rtc::StringFormat("%.1f", input_frame_rate) << ", "; + ss << "input_fps: " << StringFormat("%.1f", input_frame_rate) << ", "; ss << "encode_fps: " << encode_frame_rate << ", "; ss << "encode_ms: " << avg_encode_time_ms << ", "; ss << "encode_usage_perc: " << encode_usage_percent << ", "; @@ -110,7 +114,7 @@ VideoSendStream::Config::Config::~Config() = default; std::string VideoSendStream::Config::ToString() const { char buf[2 * 1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "{encoder_settings: { experiment_cpu_load_estimator: " << (encoder_settings.experiment_cpu_load_estimator ? "on" : "off") << "}}"; ss << ", rtp: " << rtp.ToString(); diff --git a/call/video_send_stream.h b/call/video_send_stream.h index 5fde44a719..906d79598b 100644 --- a/call/video_send_stream.h +++ b/call/video_send_stream.h @@ -14,10 +14,10 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/adaptation/resource.h" #include "api/call/transport.h" #include "api/crypto/crypto_options.h" @@ -25,18 +25,20 @@ #include "api/rtp_parameters.h" #include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" +#include "api/units/data_rate.h" #include "api/video/video_content_type.h" #include "api/video/video_frame.h" -#include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "api/video/video_stream_encoder_settings.h" #include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/video_encoder_factory.h" #include "call/rtp_config.h" #include "common_video/frame_counts.h" #include "common_video/include/quality_limitation_reason.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "rtc_base/checks.h" #include "video/config/video_encoder_config.h" namespace webrtc { @@ -72,7 +74,7 @@ class VideoSendStream { // If `type` is kRtx or kFlexfec this value is present. The referenced SSRC // is the kMedia stream that this stream is performing retransmissions or // FEC for. If `type` is kMedia, this value is null. - absl::optional referenced_media_ssrc; + std::optional referenced_media_ssrc; FrameCounts frame_counts; int width = 0; int height = 0; @@ -87,21 +89,24 @@ class VideoSendStream { RtcpPacketTypeCounter rtcp_packet_type_counts; // A snapshot of the most recent Report Block with additional data of // interest to statistics. Used to implement RTCRemoteInboundRtpStreamStats. - absl::optional report_block_data; + std::optional report_block_data; double encode_frame_rate = 0.0; int frames_encoded = 0; - absl::optional qp_sum; + std::optional qp_sum; uint64_t total_encode_time_ms = 0; uint64_t total_encoded_bytes_target = 0; uint32_t huge_frames_sent = 0; - absl::optional scalability_mode; + std::optional scalability_mode; + // The target bitrate is what we tell the encoder to produce. What the + // encoder actually produces is the sum of encoded bytes. + std::optional target_bitrate; }; struct Stats { Stats(); ~Stats(); std::string ToString(int64_t time_ms) const; - absl::optional encoder_implementation_name; + std::optional encoder_implementation_name; double input_frame_rate = 0; int encode_frame_rate = 0; int avg_encode_time_ms = 0; @@ -113,12 +118,16 @@ class VideoSendStream { uint64_t total_encoded_bytes_target = 0; uint32_t frames = 0; uint32_t frames_dropped_by_capturer = 0; + uint32_t frames_dropped_by_bad_timestamp = 0; uint32_t frames_dropped_by_encoder_queue = 0; uint32_t frames_dropped_by_rate_limiter = 0; uint32_t frames_dropped_by_congestion_window = 0; uint32_t frames_dropped_by_encoder = 0; - // Bitrate the encoder is currently configured to use due to bandwidth - // limitations. + // Metric only used by legacy getStats()'s BWE. + // - Similar to `StreamStats::target_bitrate` except this is for the whole + // stream as opposed to being per substream (per SSRC). + // - Unlike what you would expect, it is not equal to the sum of all + // substream targets and may sometimes over-report e.g. webrtc:392424845. int target_media_bitrate_bps = 0; // Bitrate the encoder is actually producing. int media_bitrate_bps = 0; @@ -144,7 +153,7 @@ class VideoSendStream { webrtc::VideoContentType::UNSPECIFIED; uint32_t frames_sent = 0; uint32_t huge_frames_sent = 0; - absl::optional power_efficient_encoder; + std::optional power_efficient_encoder; }; struct Config { @@ -193,7 +202,7 @@ class VideoSendStream { // An optional custom frame encryptor that allows the entire frame to be // encrypted in whatever way the caller chooses. This is not required by // default. - rtc::scoped_refptr frame_encryptor; + scoped_refptr frame_encryptor; // An optional encoder selector provided by the user. // Overrides VideoEncoderFactory::GetEncoderSelector(). @@ -203,7 +212,7 @@ class VideoSendStream { // Per PeerConnection cryptography options. CryptoOptions crypto_options; - rtc::scoped_refptr frame_transformer; + scoped_refptr frame_transformer; private: // Access to the copy constructor is private to force use of the Copy() @@ -211,20 +220,8 @@ class VideoSendStream { Config(const Config&); }; - // Updates the sending state for all simulcast layers that the video send - // stream owns. This can mean updating the activity one or for multiple - // layers. The ordering of active layers is the order in which the - // rtp modules are stored in the VideoSendStream. - // Note: This starts stream activity if it is inactive and one of the layers - // is active. This stops stream activity if it is active and all layers are - // inactive. - // `active_layers` should have the same size as the number of configured - // simulcast layers or one if only one rtp stream is used. - virtual void StartPerRtpStream(std::vector active_layers) = 0; - // Starts stream activity. // When a stream is active, it can receive, process and deliver packets. - // Prefer to use StartPerRtpStream. virtual void Start() = 0; // Stops stream activity. @@ -233,11 +230,8 @@ class VideoSendStream { // Accessor for determining if the stream is active. This is an inexpensive // call that must be made on the same thread as `Start()` and `Stop()` methods - // are called on and will return `true` iff activity has been started either - // via `Start()` or `StartPerRtpStream()`. If activity is either - // stopped or is in the process of being stopped as a result of a call to - // either `Stop()` or `StartPerRtpStream()` where all layers were - // deactivated, the return value will be `false`. + // are called on and will return `true` iff activity has been started + // via `Start()`. virtual bool started() = 0; // If the resource is overusing, the VideoSendStream will try to reduce @@ -245,12 +239,11 @@ class VideoSendStream { // TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor // is moved to Call this method could be deleted altogether in favor of // Call-level APIs only. - virtual void AddAdaptationResource(rtc::scoped_refptr resource) = 0; - virtual std::vector> - GetAdaptationResources() = 0; + virtual void AddAdaptationResource(scoped_refptr resource) = 0; + virtual std::vector> GetAdaptationResources() = 0; virtual void SetSource( - rtc::VideoSourceInterface* source, + VideoSourceInterface* source, const DegradationPreference& degradation_preference) = 0; // Set which streams to send. Must have at least as many SSRCs as configured @@ -263,6 +256,9 @@ class VideoSendStream { virtual Stats GetStats() = 0; + // TODO: webrtc:40644448 - Make this pure virtual. + virtual void SetStats(const Stats& stats) { RTC_CHECK_NOTREACHED(); } + virtual void GenerateKeyFrame(const std::vector& rids) = 0; protected: diff --git a/common_audio/BUILD.gn b/common_audio/BUILD.gn index 2ae6d32710..9a7478d8db 100644 --- a/common_audio/BUILD.gn +++ b/common_audio/BUILD.gn @@ -46,6 +46,7 @@ rtc_library("common_audio") { ":common_audio_c", ":sinc_resampler", "../api:array_view", + "../api/audio:audio_frame_api", "../rtc_base:checks", "../rtc_base:gtest_prod", "../rtc_base:logging", @@ -58,7 +59,6 @@ rtc_library("common_audio") { "../system_wrappers", "third_party/ooura:fft_size_256", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] defines = [] @@ -117,7 +117,6 @@ rtc_library("common_audio_c") { "signal_processing/filter_ma_fast_q12.c", "signal_processing/get_hanning_window.c", "signal_processing/get_scaling_square.c", - "signal_processing/ilbc_specific_functions.c", "signal_processing/include/real_fft.h", "signal_processing/include/signal_processing_library.h", "signal_processing/include/spl_inl.h", @@ -139,6 +138,7 @@ rtc_library("common_audio_c") { "signal_processing/spl_sqrt.c", "signal_processing/splitting_filter.c", "signal_processing/sqrt_of_one_minus_x_squared.c", + "signal_processing/vector_operations.c", "signal_processing/vector_scaling_operations.c", "vad/include/webrtc_vad.h", "vad/vad_core.c", @@ -191,6 +191,7 @@ rtc_library("common_audio_c") { } rtc_library("common_audio_cc") { + visibility += webrtc_default_visibility sources = [ "signal_processing/dot_product_with_scale.cc", "signal_processing/dot_product_with_scale.h", @@ -369,10 +370,12 @@ if (rtc_include_tests && !build_with_chromium) { deps = [ ":common_audio", ":common_audio_c", + ":common_audio_cc", ":fir_filter", ":fir_filter_factory", ":sinc_resampler", "../rtc_base:checks", + "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:rtc_base_tests_utils", "../rtc_base:stringutils", @@ -387,8 +390,6 @@ if (rtc_include_tests && !build_with_chromium) { ] if (is_android) { - deps += [ "//testing/android/native_test:native_test_support" ] - shard_timeout = 900 } } diff --git a/common_audio/audio_util_unittest.cc b/common_audio/audio_util_unittest.cc index a215a123b1..497ff7329f 100644 --- a/common_audio/audio_util_unittest.cc +++ b/common_audio/audio_util_unittest.cc @@ -124,20 +124,23 @@ TEST(AudioUtilTest, FloatS16ToDbfs) { } TEST(AudioUtilTest, InterleavingStereo) { - const int16_t kInterleaved[] = {2, 3, 4, 9, 8, 27, 16, 81}; - const size_t kSamplesPerChannel = 4; - const int kNumChannels = 2; - const size_t kLength = kSamplesPerChannel * kNumChannels; - int16_t left[kSamplesPerChannel], right[kSamplesPerChannel]; - int16_t* deinterleaved[] = {left, right}; - Deinterleave(kInterleaved, kSamplesPerChannel, kNumChannels, deinterleaved); + constexpr int16_t kInterleaved[] = {2, 3, 4, 9, 8, 27, 16, 81}; + constexpr size_t kSamplesPerChannel = 4; + constexpr int kNumChannels = 2; + constexpr size_t kLength = kSamplesPerChannel * kNumChannels; + int16_t deinterleaved[kLength] = {}; + DeinterleavedView deinterleaved_view( + &deinterleaved[0], kSamplesPerChannel, kNumChannels); + Deinterleave({&kInterleaved[0], kSamplesPerChannel, kNumChannels}, + deinterleaved_view); const int16_t kRefLeft[] = {2, 4, 8, 16}; const int16_t kRefRight[] = {3, 9, 27, 81}; - ExpectArraysEq(kRefLeft, left, kSamplesPerChannel); - ExpectArraysEq(kRefRight, right, kSamplesPerChannel); + ExpectArraysEq(kRefLeft, deinterleaved_view[0].data(), kSamplesPerChannel); + ExpectArraysEq(kRefRight, deinterleaved_view[1].data(), kSamplesPerChannel); int16_t interleaved[kLength]; - Interleave(deinterleaved, kSamplesPerChannel, kNumChannels, interleaved); + Interleave({&deinterleaved[0], kSamplesPerChannel, kNumChannels}, + {&interleaved[0], kSamplesPerChannel, kNumChannels}); ExpectArraysEq(kInterleaved, interleaved, kLength); } @@ -146,12 +149,16 @@ TEST(AudioUtilTest, InterleavingMonoIsIdentical) { const size_t kSamplesPerChannel = 5; const int kNumChannels = 1; int16_t mono[kSamplesPerChannel]; - int16_t* deinterleaved[] = {mono}; - Deinterleave(kInterleaved, kSamplesPerChannel, kNumChannels, deinterleaved); - ExpectArraysEq(kInterleaved, mono, kSamplesPerChannel); + DeinterleavedView deinterleaved_view(&mono[0], kSamplesPerChannel, + kNumChannels); + Deinterleave({kInterleaved, kSamplesPerChannel, kNumChannels}, + deinterleaved_view); + ExpectArraysEq(kInterleaved, deinterleaved_view.AsMono().data(), + kSamplesPerChannel); int16_t interleaved[kSamplesPerChannel]; - Interleave(deinterleaved, kSamplesPerChannel, kNumChannels, interleaved); + Interleave(deinterleaved_view, + {&interleaved[0], kSamplesPerChannel, kNumChannels}); ExpectArraysEq(mono, interleaved, kSamplesPerChannel); } @@ -194,57 +201,5 @@ TEST(AudioUtilTest, DownmixInterleavedToMono) { } } -TEST(AudioUtilTest, DownmixToMonoTest) { - { - const size_t kNumFrames = 4; - const int kNumChannels = 1; - const float input_data[kNumChannels][kNumFrames] = {{1.f, 2.f, -1.f, -3.f}}; - const float* input[kNumChannels]; - for (int i = 0; i < kNumChannels; ++i) { - input[i] = input_data[i]; - } - - float downmixed[kNumFrames]; - - DownmixToMono(input, kNumFrames, kNumChannels, downmixed); - - EXPECT_THAT(downmixed, ElementsAreArray(input_data[0])); - } - { - const size_t kNumFrames = 3; - const int kNumChannels = 2; - const float input_data[kNumChannels][kNumFrames] = {{1.f, 2.f, -1.f}, - {3.f, 0.f, 1.f}}; - const float* input[kNumChannels]; - for (int i = 0; i < kNumChannels; ++i) { - input[i] = input_data[i]; - } - - float downmixed[kNumFrames]; - const float expected[kNumFrames] = {2.f, 1.f, 0.f}; - - DownmixToMono(input, kNumFrames, kNumChannels, downmixed); - - EXPECT_THAT(downmixed, ElementsAreArray(expected)); - } - { - const size_t kNumFrames = 3; - const int kNumChannels = 3; - const int16_t input_data[kNumChannels][kNumFrames] = { - {30000, -5, -30000}, {30000, -10, -30999}, {24001, -20, -30000}}; - const int16_t* input[kNumChannels]; - for (int i = 0; i < kNumChannels; ++i) { - input[i] = input_data[i]; - } - - int16_t downmixed[kNumFrames]; - const int16_t expected[kNumFrames] = {28000, -11, -30333}; - - DownmixToMono(input, kNumFrames, kNumChannels, downmixed); - - EXPECT_THAT(downmixed, ElementsAreArray(expected)); - } -} - } // namespace } // namespace webrtc diff --git a/common_audio/channel_buffer.h b/common_audio/channel_buffer.h index 9f08d6089b..efe761cf60 100644 --- a/common_audio/channel_buffer.h +++ b/common_audio/channel_buffer.h @@ -23,6 +23,44 @@ namespace webrtc { +// TODO: b/335805780 - Remove this method. Instead, use Deinterleave() from +// audio_util.h which requires size checked buffer views. +template +void Deinterleave(const T* interleaved, + size_t samples_per_channel, + size_t num_channels, + T* const* deinterleaved) { + for (size_t i = 0; i < num_channels; ++i) { + T* channel = deinterleaved[i]; + size_t interleaved_idx = i; + for (size_t j = 0; j < samples_per_channel; ++j) { + channel[j] = interleaved[interleaved_idx]; + interleaved_idx += num_channels; + } + } +} + +// `Interleave()` variant for cases where the deinterleaved channels aren't +// represented by a `DeinterleavedView`. +// TODO: b/335805780 - Remove this method. Instead, use Deinterleave() from +// audio_util.h which requires size checked buffer views. +template +void Interleave(const T* const* deinterleaved, + size_t samples_per_channel, + size_t num_channels, + InterleavedView& interleaved) { + RTC_DCHECK_EQ(NumChannels(interleaved), num_channels); + RTC_DCHECK_EQ(SamplesPerChannel(interleaved), samples_per_channel); + for (size_t i = 0; i < num_channels; ++i) { + const T* channel = deinterleaved[i]; + size_t interleaved_idx = i; + for (size_t j = 0; j < samples_per_channel; ++j) { + interleaved[interleaved_idx] = channel[j]; + interleaved_idx += num_channels; + } + } +} + // Helper to encapsulate a contiguous data buffer, full or split into frequency // bands, with access to a pointer arrays of the deinterleaved channels and // bands. The buffer is zero initialized at creation. @@ -52,22 +90,20 @@ class ChannelBuffer { num_channels_(num_channels), num_bands_(num_bands), bands_view_(num_allocated_channels_, - std::vector>(num_bands_)), - channels_view_( - num_bands_, - std::vector>(num_allocated_channels_)) { + std::vector>(num_bands_)), + channels_view_(num_bands_, + std::vector>(num_allocated_channels_)) { // Temporarily cast away const_ness to allow populating the array views. auto* bands_view = - const_cast>>*>(&bands_view_); + const_cast>>*>(&bands_view_); auto* channels_view = - const_cast>>*>( - &channels_view_); + const_cast>>*>(&channels_view_); for (size_t ch = 0; ch < num_allocated_channels_; ++ch) { for (size_t band = 0; band < num_bands_; ++band) { - (*channels_view)[band][ch] = rtc::ArrayView( - &data_[ch * num_frames_ + band * num_frames_per_band_], - num_frames_per_band_); + (*channels_view)[band][ch] = + ArrayView(&data_[ch * num_frames_ + band * num_frames_per_band_], + num_frames_per_band_); (*bands_view)[ch][band] = channels_view_[band][ch]; channels_[band * num_allocated_channels_ + ch] = channels_view_[band][ch].data(); @@ -98,10 +134,10 @@ class ChannelBuffer { const ChannelBuffer* t = this; return const_cast(t->channels(band)); } - rtc::ArrayView> channels_view(size_t band = 0) { + ArrayView> channels_view(size_t band = 0) { return channels_view_[band]; } - rtc::ArrayView> channels_view(size_t band = 0) const { + ArrayView> channels_view(size_t band = 0) const { return channels_view_[band]; } @@ -122,10 +158,10 @@ class ChannelBuffer { return const_cast(t->bands(channel)); } - rtc::ArrayView> bands_view(size_t channel) { + ArrayView> bands_view(size_t channel) { return bands_view_[channel]; } - rtc::ArrayView> bands_view(size_t channel) const { + ArrayView> bands_view(size_t channel) const { return bands_view_[channel]; } @@ -169,8 +205,8 @@ class ChannelBuffer { // Number of channels the user sees. size_t num_channels_; const size_t num_bands_; - const std::vector>> bands_view_; - const std::vector>> channels_view_; + const std::vector>> bands_view_; + const std::vector>> channels_view_; }; // One int16_t and one float ChannelBuffer that are kept in sync. The sync is diff --git a/common_audio/fir_filter_unittest.cc b/common_audio/fir_filter_unittest.cc index 5c5880b5eb..9b973882e2 100644 --- a/common_audio/fir_filter_unittest.cc +++ b/common_audio/fir_filter_unittest.cc @@ -12,6 +12,7 @@ #include +#include #include #include "common_audio/fir_filter_factory.h" @@ -20,84 +21,81 @@ namespace webrtc { namespace { -static const float kCoefficients[] = {0.2f, 0.3f, 0.5f, 0.7f, 0.11f}; -static const size_t kCoefficientsLength = - sizeof(kCoefficients) / sizeof(kCoefficients[0]); +static constexpr size_t kCoefficientsSize = 5; +static const std::array kCoefficients = { + 0.2f, 0.3f, 0.5f, 0.7f, 0.11f}; -static const float kInput[] = {1.f, 2.f, 3.f, 4.f, 5.f, - 6.f, 7.f, 8.f, 9.f, 10.f}; -static const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); - -void VerifyOutput(const float* expected_output, - const float* output, - size_t length) { - EXPECT_EQ( - 0, memcmp(expected_output, output, length * sizeof(expected_output[0]))); -} +static constexpr size_t kInputSize = 10; +static const std::array kInput = {1.f, 2.f, 3.f, 4.f, 5.f, + 6.f, 7.f, 8.f, 9.f, 10.f}; } // namespace TEST(FIRFilterTest, FilterAsIdentity) { - const float kCoefficients[] = {1.f, 0.f, 0.f, 0.f, 0.f}; - float output[kInputLength]; - std::unique_ptr filter( - CreateFirFilter(kCoefficients, kCoefficientsLength, kInputLength)); - filter->Filter(kInput, kInputLength, output); - - VerifyOutput(kInput, output, kInputLength); + const std::array kCoefficientsTested = { + 1.f, 0.f, 0.f, 0.f, 0.f}; + std::array output; + std::unique_ptr filter(CreateFirFilter( + kCoefficientsTested.data(), kCoefficientsTested.size(), kInput.size())); + filter->Filter(kInput.data(), kInput.size(), output.data()); + for (size_t i = 0; i < kInput.size(); i++) { + EXPECT_EQ(kInput[i], output[i]); + } } TEST(FIRFilterTest, FilterUsedAsScalarMultiplication) { - const float kCoefficients[] = {5.f, 0.f, 0.f, 0.f, 0.f}; - float output[kInputLength]; - std::unique_ptr filter( - CreateFirFilter(kCoefficients, kCoefficientsLength, kInputLength)); - filter->Filter(kInput, kInputLength, output); + const std::array kCoefficientsTested = { + 5.f, 0.f, 0.f, 0.f, 0.f}; + std::array output; + std::unique_ptr filter(CreateFirFilter( + kCoefficientsTested.data(), kCoefficientsTested.size(), kInput.size())); + filter->Filter(kInput.data(), kInput.size(), output.data()); EXPECT_FLOAT_EQ(5.f, output[0]); EXPECT_FLOAT_EQ(20.f, output[3]); EXPECT_FLOAT_EQ(25.f, output[4]); - EXPECT_FLOAT_EQ(50.f, output[kInputLength - 1]); + EXPECT_FLOAT_EQ(50.f, output[kInput.size() - 1]); } TEST(FIRFilterTest, FilterUsedAsInputShifting) { - const float kCoefficients[] = {0.f, 0.f, 0.f, 0.f, 1.f}; - float output[kInputLength]; - std::unique_ptr filter( - CreateFirFilter(kCoefficients, kCoefficientsLength, kInputLength)); - filter->Filter(kInput, kInputLength, output); + const std::array kCoefficientsTested = { + 0.f, 0.f, 0.f, 0.f, 1.f}; + std::array output; + std::unique_ptr filter(CreateFirFilter( + kCoefficientsTested.data(), kCoefficientsTested.size(), kInput.size())); + filter->Filter(kInput.data(), kInput.size(), output.data()); EXPECT_FLOAT_EQ(0.f, output[0]); EXPECT_FLOAT_EQ(0.f, output[3]); EXPECT_FLOAT_EQ(1.f, output[4]); EXPECT_FLOAT_EQ(2.f, output[5]); - EXPECT_FLOAT_EQ(6.f, output[kInputLength - 1]); + EXPECT_FLOAT_EQ(6.f, output[kInput.size() - 1]); } TEST(FIRFilterTest, FilterUsedAsArbitraryWeighting) { - float output[kInputLength]; - std::unique_ptr filter( - CreateFirFilter(kCoefficients, kCoefficientsLength, kInputLength)); - filter->Filter(kInput, kInputLength, output); + std::array output; + std::unique_ptr filter(CreateFirFilter( + kCoefficients.data(), kCoefficients.size(), kInput.size())); + filter->Filter(kInput.data(), kInput.size(), output.data()); EXPECT_FLOAT_EQ(0.2f, output[0]); EXPECT_FLOAT_EQ(3.4f, output[3]); EXPECT_FLOAT_EQ(5.21f, output[4]); EXPECT_FLOAT_EQ(7.02f, output[5]); - EXPECT_FLOAT_EQ(14.26f, output[kInputLength - 1]); + EXPECT_FLOAT_EQ(14.26f, output[kInput.size() - 1]); } TEST(FIRFilterTest, FilterInLengthLesserOrEqualToCoefficientsLength) { - float output[kInputLength]; + std::array output; std::unique_ptr filter( - CreateFirFilter(kCoefficients, kCoefficientsLength, 2)); - filter->Filter(kInput, 2, output); + CreateFirFilter(kCoefficients.data(), kCoefficients.size(), 2)); + filter->Filter(kInput.data(), 2, output.data()); EXPECT_FLOAT_EQ(0.2f, output[0]); EXPECT_FLOAT_EQ(0.7f, output[1]); - filter.reset( - CreateFirFilter(kCoefficients, kCoefficientsLength, kCoefficientsLength)); - filter->Filter(kInput, kCoefficientsLength, output); + filter.reset(CreateFirFilter(kCoefficients.data(), kCoefficients.size(), + kCoefficients.size())); + filter->Filter(kInput.data(), kCoefficients.size(), output.data()); EXPECT_FLOAT_EQ(0.2f, output[0]); EXPECT_FLOAT_EQ(3.4f, output[3]); @@ -105,103 +103,101 @@ TEST(FIRFilterTest, FilterInLengthLesserOrEqualToCoefficientsLength) { } TEST(FIRFilterTest, MultipleFilterCalls) { - float output[kInputLength]; + std::array output; std::unique_ptr filter( - CreateFirFilter(kCoefficients, kCoefficientsLength, 3)); - filter->Filter(kInput, 2, output); + CreateFirFilter(kCoefficients.data(), kCoefficients.size(), 3)); + filter->Filter(kInput.data(), 2, output.data()); EXPECT_FLOAT_EQ(0.2f, output[0]); EXPECT_FLOAT_EQ(0.7f, output[1]); - filter->Filter(kInput, 2, output); + filter->Filter(kInput.data(), 2, output.data()); EXPECT_FLOAT_EQ(1.3f, output[0]); EXPECT_FLOAT_EQ(2.4f, output[1]); - filter->Filter(kInput, 2, output); + filter->Filter(kInput.data(), 2, output.data()); EXPECT_FLOAT_EQ(2.81f, output[0]); EXPECT_FLOAT_EQ(2.62f, output[1]); - filter->Filter(kInput, 2, output); + filter->Filter(kInput.data(), 2, output.data()); EXPECT_FLOAT_EQ(2.81f, output[0]); EXPECT_FLOAT_EQ(2.62f, output[1]); - filter->Filter(&kInput[3], 3, output); + filter->Filter(&kInput[3], 3, output.data()); EXPECT_FLOAT_EQ(3.41f, output[0]); EXPECT_FLOAT_EQ(4.12f, output[1]); EXPECT_FLOAT_EQ(6.21f, output[2]); - filter->Filter(&kInput[3], 3, output); + filter->Filter(&kInput[3], 3, output.data()); EXPECT_FLOAT_EQ(8.12f, output[0]); EXPECT_FLOAT_EQ(9.14f, output[1]); EXPECT_FLOAT_EQ(9.45f, output[2]); } TEST(FIRFilterTest, VerifySampleBasedVsBlockBasedFiltering) { - float output_block_based[kInputLength]; - std::unique_ptr filter( - CreateFirFilter(kCoefficients, kCoefficientsLength, kInputLength)); - filter->Filter(kInput, kInputLength, output_block_based); + float output_block_based[kInput.size()]; + std::unique_ptr filter(CreateFirFilter( + kCoefficients.data(), kCoefficients.size(), kInput.size())); + filter->Filter(kInput.data(), kInput.size(), output_block_based); - float output_sample_based[kInputLength]; - filter.reset(CreateFirFilter(kCoefficients, kCoefficientsLength, 1)); - for (size_t i = 0; i < kInputLength; ++i) { + float output_sample_based[kInput.size()]; + filter.reset(CreateFirFilter(kCoefficients.data(), kCoefficients.size(), 1)); + for (size_t i = 0; i < kInput.size(); ++i) { filter->Filter(&kInput[i], 1, &output_sample_based[i]); } - EXPECT_EQ(0, memcmp(output_sample_based, output_block_based, kInputLength)); + EXPECT_EQ(0, memcmp(output_sample_based, output_block_based, kInput.size())); } TEST(FIRFilterTest, SimplestHighPassFilter) { - const float kCoefficients[] = {1.f, -1.f}; - const size_t kCoefficientsLength = - sizeof(kCoefficients) / sizeof(kCoefficients[0]); - - float kConstantInput[] = {1.f, 1.f, 1.f, 1.f, 1.f, 1.f, 1.f, 1.f}; - const size_t kConstantInputLength = - sizeof(kConstantInput) / sizeof(kConstantInput[0]); - - float output[kConstantInputLength]; - std::unique_ptr filter(CreateFirFilter( - kCoefficients, kCoefficientsLength, kConstantInputLength)); - filter->Filter(kConstantInput, kConstantInputLength, output); + const std::array kCoefficientsTested = {1.f, -1.f}; + + std::array kConstantInput = {1.f, 1.f, 1.f, 1.f, + 1.f, 1.f, 1.f, 1.f}; + std::array output; + std::unique_ptr filter(CreateFirFilter(kCoefficientsTested.data(), + kCoefficientsTested.size(), + kConstantInput.size())); + filter->Filter(kConstantInput.data(), kConstantInput.size(), output.data()); EXPECT_FLOAT_EQ(1.f, output[0]); - for (size_t i = kCoefficientsLength - 1; i < kConstantInputLength; ++i) { + for (size_t i = kCoefficientsTested.size() - 1; i < kConstantInput.size(); + ++i) { EXPECT_FLOAT_EQ(0.f, output[i]); } } TEST(FIRFilterTest, SimplestLowPassFilter) { - const float kCoefficients[] = {1.f, 1.f}; - const size_t kCoefficientsLength = - sizeof(kCoefficients) / sizeof(kCoefficients[0]); + const std::array kCoefficientsTested = {1.f, 1.f}; - float kHighFrequencyInput[] = {-1.f, 1.f, -1.f, 1.f, -1.f, 1.f, -1.f, 1.f}; - const size_t kHighFrequencyInputLength = - sizeof(kHighFrequencyInput) / sizeof(kHighFrequencyInput[0]); - - float output[kHighFrequencyInputLength]; - std::unique_ptr filter(CreateFirFilter( - kCoefficients, kCoefficientsLength, kHighFrequencyInputLength)); - filter->Filter(kHighFrequencyInput, kHighFrequencyInputLength, output); + const std::array kHighFrequencyInput = {-1.f, 1.f, -1.f, 1.f, + -1.f, 1.f, -1.f, 1.f}; + std::array output; + std::unique_ptr filter( + CreateFirFilter(kCoefficientsTested.data(), kCoefficientsTested.size(), + kHighFrequencyInput.size())); + filter->Filter(kHighFrequencyInput.data(), kHighFrequencyInput.size(), + output.data()); EXPECT_FLOAT_EQ(-1.f, output[0]); - for (size_t i = kCoefficientsLength - 1; i < kHighFrequencyInputLength; ++i) { + for (size_t i = kCoefficientsTested.size() - 1; + i < kHighFrequencyInput.size(); ++i) { EXPECT_FLOAT_EQ(0.f, output[i]); } } TEST(FIRFilterTest, SameOutputWhenSwapedCoefficientsAndInput) { - float output[kCoefficientsLength]; - float output_swaped[kCoefficientsLength]; - std::unique_ptr filter( - CreateFirFilter(kCoefficients, kCoefficientsLength, kCoefficientsLength)); - // Use kCoefficientsLength for in_length to get same-length outputs. - filter->Filter(kInput, kCoefficientsLength, output); + std::array output; + std::array output_swapped; + std::unique_ptr filter(CreateFirFilter( + kCoefficients.data(), kCoefficients.size(), kCoefficients.size())); + // Use kCoefficients.size() for in_length to get same-length outputs. + filter->Filter(kInput.data(), kCoefficients.size(), output.data()); - filter.reset( - CreateFirFilter(kInput, kCoefficientsLength, kCoefficientsLength)); - filter->Filter(kCoefficients, kCoefficientsLength, output_swaped); + filter.reset(CreateFirFilter(kInput.data(), kCoefficients.size(), + kCoefficients.size())); + filter->Filter(kCoefficients.data(), kCoefficients.size(), + output_swapped.data()); - for (size_t i = 0; i < kCoefficientsLength; ++i) { - EXPECT_FLOAT_EQ(output[i], output_swaped[i]); + for (size_t i = 0; i < kCoefficients.size(); ++i) { + EXPECT_FLOAT_EQ(output[i], output_swapped[i]); } } diff --git a/common_audio/include/audio_util.h b/common_audio/include/audio_util.h index 4ce46800f1..672ebd1aa0 100644 --- a/common_audio/include/audio_util.h +++ b/common_audio/include/audio_util.h @@ -18,12 +18,24 @@ #include #include +#include "api/audio/audio_view.h" #include "rtc_base/checks.h" namespace webrtc { typedef std::numeric_limits limits_int16; +// TODO(tommi, peah): Move these constants to their own header, e.g. +// `audio_constants.h`. Also consider if they should be in api/. + +// Absolute highest acceptable sample rate supported for audio processing, +// capture and codecs. Note that for some components some cases a lower limit +// applies which typically is 48000 but in some cases is lower. +constexpr int kMaxSampleRateHz = 384000; + +// Number of samples per channel for 10ms of audio at the highest sample rate. +constexpr size_t kMaxSamplesPerChannel10ms = kMaxSampleRateHz / 100u; + // The conversion functions use the following naming convention: // S16: int16_t [-32768, 32767] // Float: float [-1.0, 1.0] @@ -94,6 +106,7 @@ inline float FloatS16ToDbfs(float v) { // Copy audio from `src` channels to `dest` channels unless `src` and `dest` // point to the same address. `src` and `dest` must have the same number of // channels, and there must be sufficient space allocated in `dest`. +// TODO: b/335805780 - Accept ArrayView. template void CopyAudioIfNeeded(const T* const* src, int num_frames, @@ -111,12 +124,15 @@ void CopyAudioIfNeeded(const T* const* src, // `deinterleaved` buffers (`num_channel` buffers with `samples_per_channel` // per buffer). template -void Deinterleave(const T* interleaved, - size_t samples_per_channel, - size_t num_channels, - T* const* deinterleaved) { +void Deinterleave(const InterleavedView& interleaved, + const DeinterleavedView& deinterleaved) { + RTC_DCHECK_EQ(NumChannels(interleaved), NumChannels(deinterleaved)); + RTC_DCHECK_EQ(SamplesPerChannel(interleaved), + SamplesPerChannel(deinterleaved)); + const auto num_channels = NumChannels(interleaved); + const auto samples_per_channel = SamplesPerChannel(interleaved); for (size_t i = 0; i < num_channels; ++i) { - T* channel = deinterleaved[i]; + MonoView channel = deinterleaved[i]; size_t interleaved_idx = i; for (size_t j = 0; j < samples_per_channel; ++j) { channel[j] = interleaved[interleaved_idx]; @@ -129,52 +145,24 @@ void Deinterleave(const T* interleaved, // `interleaved`. There must be sufficient space allocated in `interleaved` // (`samples_per_channel` * `num_channels`). template -void Interleave(const T* const* deinterleaved, - size_t samples_per_channel, - size_t num_channels, - T* interleaved) { - for (size_t i = 0; i < num_channels; ++i) { - const T* channel = deinterleaved[i]; +void Interleave(const DeinterleavedView& deinterleaved, + const InterleavedView& interleaved) { + RTC_DCHECK_EQ(NumChannels(interleaved), NumChannels(deinterleaved)); + RTC_DCHECK_EQ(SamplesPerChannel(interleaved), + SamplesPerChannel(deinterleaved)); + for (size_t i = 0; i < deinterleaved.num_channels(); ++i) { + const auto channel = deinterleaved[i]; size_t interleaved_idx = i; - for (size_t j = 0; j < samples_per_channel; ++j) { + for (size_t j = 0; j < deinterleaved.samples_per_channel(); ++j) { interleaved[interleaved_idx] = channel[j]; - interleaved_idx += num_channels; - } - } -} - -// Copies audio from a single channel buffer pointed to by `mono` to each -// channel of `interleaved`. There must be sufficient space allocated in -// `interleaved` (`samples_per_channel` * `num_channels`). -template -void UpmixMonoToInterleaved(const T* mono, - int num_frames, - int num_channels, - T* interleaved) { - int interleaved_idx = 0; - for (int i = 0; i < num_frames; ++i) { - for (int j = 0; j < num_channels; ++j) { - interleaved[interleaved_idx++] = mono[i]; - } - } -} - -template -void DownmixToMono(const T* const* input_channels, - size_t num_frames, - int num_channels, - T* out) { - for (size_t i = 0; i < num_frames; ++i) { - Intermediate value = input_channels[0][i]; - for (int j = 1; j < num_channels; ++j) { - value += input_channels[j][i]; + interleaved_idx += deinterleaved.num_channels(); } - out[i] = value / num_channels; } } // Downmixes an interleaved multichannel signal to a single channel by averaging // all channels. +// TODO: b/335805780 - Accept InterleavedView and DeinterleavedView. template void DownmixInterleavedToMonoImpl(const T* interleaved, size_t num_frames, @@ -197,12 +185,14 @@ void DownmixInterleavedToMonoImpl(const T* interleaved, } } +// TODO: b/335805780 - Accept InterleavedView and DeinterleavedView. template void DownmixInterleavedToMono(const T* interleaved, size_t num_frames, int num_channels, T* deinterleaved); +// TODO: b/335805780 - Accept InterleavedView and DeinterleavedView. template <> void DownmixInterleavedToMono(const int16_t* interleaved, size_t num_frames, diff --git a/common_audio/mocks/mock_smoothing_filter.h b/common_audio/mocks/mock_smoothing_filter.h index 9df49dd11a..02966592da 100644 --- a/common_audio/mocks/mock_smoothing_filter.h +++ b/common_audio/mocks/mock_smoothing_filter.h @@ -19,7 +19,7 @@ namespace webrtc { class MockSmoothingFilter : public SmoothingFilter { public: MOCK_METHOD(void, AddSample, (float), (override)); - MOCK_METHOD(absl::optional, GetAverage, (), (override)); + MOCK_METHOD(std::optional, GetAverage, (), (override)); MOCK_METHOD(bool, SetTimeConstantMs, (int), (override)); }; diff --git a/common_audio/resampler/include/push_resampler.h b/common_audio/resampler/include/push_resampler.h index 3da67120f0..394e96b133 100644 --- a/common_audio/resampler/include/push_resampler.h +++ b/common_audio/resampler/include/push_resampler.h @@ -14,45 +14,44 @@ #include #include +#include "api/audio/audio_view.h" + namespace webrtc { class PushSincResampler; // Wraps PushSincResampler to provide stereo support. -// TODO(ajm): add support for an arbitrary number of channels. +// Note: This implementation assumes 10ms buffer sizes throughout. template -class PushResampler { +class PushResampler final { public: PushResampler(); - virtual ~PushResampler(); - - // Must be called whenever the parameters change. Free to be called at any - // time as it is a no-op if parameters have not changed since the last call. - int InitializeIfNeeded(int src_sample_rate_hz, - int dst_sample_rate_hz, - size_t num_channels); + PushResampler(size_t src_samples_per_channel, + size_t dst_samples_per_channel, + size_t num_channels); + ~PushResampler(); // Returns the total number of samples provided in destination (e.g. 32 kHz, // 2 channel audio gives 640 samples). - int Resample(const T* src, size_t src_length, T* dst, size_t dst_capacity); + int Resample(InterleavedView src, InterleavedView dst); + // For when a deinterleaved/mono channel already exists and we can skip the + // deinterleaved operation. + int Resample(MonoView src, MonoView dst); private: - int src_sample_rate_hz_; - int dst_sample_rate_hz_; - size_t num_channels_; - // Vector that is needed to provide the proper inputs and outputs to the - // interleave/de-interleave methods used in Resample. This needs to be - // heap-allocated on the state to support an arbitrary number of channels - // without doing run-time heap-allocations in the Resample method. - std::vector channel_data_array_; - - struct ChannelResampler { - std::unique_ptr resampler; - std::vector source; - std::vector destination; - }; - - std::vector channel_resamplers_; + // Ensures that source and destination buffers for deinterleaving are + // correctly configured prior to resampling that requires deinterleaving. + void EnsureInitialized(size_t src_samples_per_channel, + size_t dst_samples_per_channel, + size_t num_channels); + + // Buffers used for when a deinterleaving step is necessary. + std::unique_ptr source_; + std::unique_ptr destination_; + DeinterleavedView source_view_; + DeinterleavedView destination_view_; + + std::vector> resamplers_; }; } // namespace webrtc diff --git a/common_audio/resampler/push_resampler.cc b/common_audio/resampler/push_resampler.cc index 810d778993..2e75679c82 100644 --- a/common_audio/resampler/push_resampler.cc +++ b/common_audio/resampler/push_resampler.cc @@ -15,105 +15,109 @@ #include +#include "api/audio/audio_frame.h" #include "common_audio/include/audio_util.h" #include "common_audio/resampler/push_sinc_resampler.h" #include "rtc_base/checks.h" namespace webrtc { +namespace { +// Maximum concurrent number of channels for `PushResampler<>`. +// Note that this may be different from what the maximum is for audio codecs. +constexpr int kMaxNumberOfChannels = 8; +} // namespace + +template +PushResampler::PushResampler() = default; + template -PushResampler::PushResampler() - : src_sample_rate_hz_(0), dst_sample_rate_hz_(0), num_channels_(0) {} +PushResampler::PushResampler(size_t src_samples_per_channel, + size_t dst_samples_per_channel, + size_t num_channels) { + EnsureInitialized(src_samples_per_channel, dst_samples_per_channel, + num_channels); +} template -PushResampler::~PushResampler() {} +PushResampler::~PushResampler() = default; template -int PushResampler::InitializeIfNeeded(int src_sample_rate_hz, - int dst_sample_rate_hz, +void PushResampler::EnsureInitialized(size_t src_samples_per_channel, + size_t dst_samples_per_channel, size_t num_channels) { - // These checks used to be factored out of this template function due to - // Windows debug build issues with clang. http://crbug.com/615050 - RTC_DCHECK_GT(src_sample_rate_hz, 0); - RTC_DCHECK_GT(dst_sample_rate_hz, 0); + RTC_DCHECK_GT(src_samples_per_channel, 0); + RTC_DCHECK_GT(dst_samples_per_channel, 0); RTC_DCHECK_GT(num_channels, 0); + RTC_DCHECK_LE(src_samples_per_channel, kMaxSamplesPerChannel10ms); + RTC_DCHECK_LE(dst_samples_per_channel, kMaxSamplesPerChannel10ms); + RTC_DCHECK_LE(num_channels, kMaxNumberOfChannels); - if (src_sample_rate_hz == src_sample_rate_hz_ && - dst_sample_rate_hz == dst_sample_rate_hz_ && - num_channels == num_channels_) { + if (src_samples_per_channel == SamplesPerChannel(source_view_) && + dst_samples_per_channel == SamplesPerChannel(destination_view_) && + num_channels == NumChannels(source_view_)) { // No-op if settings haven't changed. - return 0; + return; } - if (src_sample_rate_hz <= 0 || dst_sample_rate_hz <= 0 || num_channels <= 0) { - return -1; - } - - src_sample_rate_hz_ = src_sample_rate_hz; - dst_sample_rate_hz_ = dst_sample_rate_hz; - num_channels_ = num_channels; - - const size_t src_size_10ms_mono = - static_cast(src_sample_rate_hz / 100); - const size_t dst_size_10ms_mono = - static_cast(dst_sample_rate_hz / 100); - channel_resamplers_.clear(); + // Allocate two buffers for all source and destination channels. + // Then organize source and destination views together with an array of + // resamplers for each channel in the deinterlaved buffers. + source_.reset(new T[src_samples_per_channel * num_channels]); + destination_.reset(new T[dst_samples_per_channel * num_channels]); + source_view_ = DeinterleavedView(source_.get(), src_samples_per_channel, + num_channels); + destination_view_ = DeinterleavedView( + destination_.get(), dst_samples_per_channel, num_channels); + resamplers_.resize(num_channels); for (size_t i = 0; i < num_channels; ++i) { - channel_resamplers_.push_back(ChannelResampler()); - auto channel_resampler = channel_resamplers_.rbegin(); - channel_resampler->resampler = std::make_unique( - src_size_10ms_mono, dst_size_10ms_mono); - channel_resampler->source.resize(src_size_10ms_mono); - channel_resampler->destination.resize(dst_size_10ms_mono); + resamplers_[i] = std::make_unique( + src_samples_per_channel, dst_samples_per_channel); } - - channel_data_array_.resize(num_channels_); - - return 0; } template -int PushResampler::Resample(const T* src, - size_t src_length, - T* dst, - size_t dst_capacity) { - // These checks used to be factored out of this template function due to - // Windows debug build issues with clang. http://crbug.com/615050 - const size_t src_size_10ms = (src_sample_rate_hz_ / 100) * num_channels_; - const size_t dst_size_10ms = (dst_sample_rate_hz_ / 100) * num_channels_; - RTC_DCHECK_EQ(src_length, src_size_10ms); - RTC_DCHECK_GE(dst_capacity, dst_size_10ms); - - if (src_sample_rate_hz_ == dst_sample_rate_hz_) { +int PushResampler::Resample(InterleavedView src, + InterleavedView dst) { + EnsureInitialized(SamplesPerChannel(src), SamplesPerChannel(dst), + NumChannels(src)); + + RTC_DCHECK_EQ(NumChannels(src), NumChannels(source_view_)); + RTC_DCHECK_EQ(NumChannels(dst), NumChannels(destination_view_)); + RTC_DCHECK_EQ(SamplesPerChannel(src), SamplesPerChannel(source_view_)); + RTC_DCHECK_EQ(SamplesPerChannel(dst), SamplesPerChannel(destination_view_)); + + if (SamplesPerChannel(src) == SamplesPerChannel(dst)) { // The old resampler provides this memcpy facility in the case of matching // sample rates, so reproduce it here for the sinc resampler. - memcpy(dst, src, src_length * sizeof(T)); - return static_cast(src_length); + CopySamples(dst, src); + return static_cast(src.data().size()); } - const size_t src_length_mono = src_length / num_channels_; - const size_t dst_capacity_mono = dst_capacity / num_channels_; + Deinterleave(src, source_view_); - for (size_t ch = 0; ch < num_channels_; ++ch) { - channel_data_array_[ch] = channel_resamplers_[ch].source.data(); + for (size_t i = 0; i < resamplers_.size(); ++i) { + size_t dst_length_mono = + resamplers_[i]->Resample(source_view_[i], destination_view_[i]); + RTC_DCHECK_EQ(dst_length_mono, SamplesPerChannel(dst)); } - Deinterleave(src, src_length_mono, num_channels_, channel_data_array_.data()); - - size_t dst_length_mono = 0; - - for (auto& resampler : channel_resamplers_) { - dst_length_mono = resampler.resampler->Resample( - resampler.source.data(), src_length_mono, resampler.destination.data(), - dst_capacity_mono); - } + Interleave(destination_view_, dst); + return static_cast(dst.size()); +} - for (size_t ch = 0; ch < num_channels_; ++ch) { - channel_data_array_[ch] = channel_resamplers_[ch].destination.data(); +template +int PushResampler::Resample(MonoView src, MonoView dst) { + RTC_DCHECK_EQ(resamplers_.size(), 1); + RTC_DCHECK_EQ(SamplesPerChannel(src), SamplesPerChannel(source_view_)); + RTC_DCHECK_EQ(SamplesPerChannel(dst), SamplesPerChannel(destination_view_)); + + if (SamplesPerChannel(src) == SamplesPerChannel(dst)) { + CopySamples(dst, src); + return static_cast(src.size()); } - Interleave(channel_data_array_.data(), dst_length_mono, num_channels_, dst); - return static_cast(dst_length_mono * num_channels_); + return resamplers_[0]->Resample(src, dst); } // Explictly generate required instantiations. diff --git a/common_audio/resampler/push_resampler_unittest.cc b/common_audio/resampler/push_resampler_unittest.cc index 91f2233aad..4fba2f412e 100644 --- a/common_audio/resampler/push_resampler_unittest.cc +++ b/common_audio/resampler/push_resampler_unittest.cc @@ -19,29 +19,24 @@ namespace webrtc { TEST(PushResamplerTest, VerifiesInputParameters) { - PushResampler resampler; - EXPECT_EQ(0, resampler.InitializeIfNeeded(16000, 16000, 1)); - EXPECT_EQ(0, resampler.InitializeIfNeeded(16000, 16000, 2)); - EXPECT_EQ(0, resampler.InitializeIfNeeded(16000, 16000, 8)); + PushResampler resampler1(160, 160, 1); + PushResampler resampler2(160, 160, 2); + PushResampler resampler3(160, 160, 8); } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) TEST(PushResamplerDeathTest, VerifiesBadInputParameters1) { - PushResampler resampler; - RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(-1, 16000, 1), - "src_sample_rate_hz"); + RTC_EXPECT_DEATH(PushResampler(-1, 160, 1), + "src_samples_per_channel"); } TEST(PushResamplerDeathTest, VerifiesBadInputParameters2) { - PushResampler resampler; - RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(16000, -1, 1), - "dst_sample_rate_hz"); + RTC_EXPECT_DEATH(PushResampler(160, -1, 1), + "dst_samples_per_channel"); } TEST(PushResamplerDeathTest, VerifiesBadInputParameters3) { - PushResampler resampler; - RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(16000, 16000, 0), - "num_channels"); + RTC_EXPECT_DEATH(PushResampler(160, 16000, 0), "num_channels"); } #endif diff --git a/common_audio/resampler/push_sinc_resampler.cc b/common_audio/resampler/push_sinc_resampler.cc index d4b7eed026..75bf33a7df 100644 --- a/common_audio/resampler/push_sinc_resampler.cc +++ b/common_audio/resampler/push_sinc_resampler.cc @@ -33,7 +33,7 @@ PushSincResampler::~PushSincResampler() {} size_t PushSincResampler::Resample(const int16_t* source, size_t source_length, int16_t* destination, - size_t destination_capacity) { + size_t /* destination_capacity */) { if (!float_buffer_.get()) float_buffer_.reset(new float[destination_frames_]); diff --git a/common_audio/resampler/push_sinc_resampler.h b/common_audio/resampler/push_sinc_resampler.h index 7946ef8f82..b2c71555a9 100644 --- a/common_audio/resampler/push_sinc_resampler.h +++ b/common_audio/resampler/push_sinc_resampler.h @@ -16,6 +16,7 @@ #include +#include "api/audio/audio_view.h" #include "common_audio/resampler/sinc_resampler.h" namespace webrtc { @@ -40,6 +41,12 @@ class PushSincResampler : public SincResamplerCallback { // at least as large as `destination_frames`. Returns the number of samples // provided in destination (for convenience, since this will always be equal // to `destination_frames`). + template + size_t Resample(const MonoView& source, const MonoView& destination) { + return Resample(&source[0], SamplesPerChannel(source), &destination[0], + SamplesPerChannel(destination)); + } + size_t Resample(const int16_t* source, size_t source_frames, int16_t* destination, diff --git a/common_audio/resampler/push_sinc_resampler_unittest.cc b/common_audio/resampler/push_sinc_resampler_unittest.cc index 8f82199d1d..d3cfe9d82c 100644 --- a/common_audio/resampler/push_sinc_resampler_unittest.cc +++ b/common_audio/resampler/push_sinc_resampler_unittest.cc @@ -86,17 +86,16 @@ void PushSincResamplerTest::ResampleBenchmarkTest(bool int_format) { const double io_ratio = input_rate_ / static_cast(output_rate_); SincResampler sinc_resampler(io_ratio, SincResampler::kDefaultRequestSize, &resampler_source); - int64_t start = rtc::TimeNanos(); + int64_t start = TimeNanos(); for (int i = 0; i < kResampleIterations; ++i) { sinc_resampler.Resample(output_samples, resampled_destination.get()); } - double total_time_sinc_us = - (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec; + double total_time_sinc_us = (TimeNanos() - start) / kNumNanosecsPerMicrosec; printf("SincResampler took %.2f us per frame.\n", total_time_sinc_us / kResampleIterations); PushSincResampler resampler(input_samples, output_samples); - start = rtc::TimeNanos(); + start = TimeNanos(); if (int_format) { for (int i = 0; i < kResampleIterations; ++i) { EXPECT_EQ(output_samples, @@ -110,8 +109,7 @@ void PushSincResamplerTest::ResampleBenchmarkTest(bool int_format) { output_samples)); } } - double total_time_us = - (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec; + double total_time_us = (TimeNanos() - start) / kNumNanosecsPerMicrosec; printf( "PushSincResampler took %.2f us per frame; which is a %.1f%% overhead " "on SincResampler.\n\n", diff --git a/common_audio/resampler/resampler_unittest.cc b/common_audio/resampler/resampler_unittest.cc index 1b90d3e30b..b8043ec5fe 100644 --- a/common_audio/resampler/resampler_unittest.cc +++ b/common_audio/resampler/resampler_unittest.cc @@ -20,9 +20,6 @@ namespace webrtc { namespace { -const int kNumChannels[] = {1, 2}; -const size_t kNumChannelsSize = sizeof(kNumChannels) / sizeof(*kNumChannels); - // Rates we must support. const int kMaxRate = 96000; const int kRates[] = {8000, 16000, 32000, 44000, 48000, kMaxRate}; @@ -66,7 +63,7 @@ void ResamplerTest::TearDown() {} void ResamplerTest::ResetIfNeededAndPush(int in_rate, int out_rate, int num_channels) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Input rate: " << in_rate << ", output rate: " << out_rate << ", channel count: " << num_channels; SCOPED_TRACE(ss.str()); @@ -84,6 +81,9 @@ void ResamplerTest::ResetIfNeededAndPush(int in_rate, } TEST_F(ResamplerTest, Reset) { + const int kNumChannels[] = {1, 2}; + const size_t kNumChannelsSize = std::size(kNumChannels); + // The only failure mode for the constructor is if Reset() fails. For the // time being then (until an Init function is added), we rely on Reset() // to test the constructor. @@ -92,7 +92,7 @@ TEST_F(ResamplerTest, Reset) { for (size_t i = 0; i < kRatesSize; ++i) { for (size_t j = 0; j < kRatesSize; ++j) { for (size_t k = 0; k < kNumChannelsSize; ++k) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j] << ", channels: " << kNumChannels[k]; SCOPED_TRACE(ss.str()); @@ -111,7 +111,7 @@ TEST_F(ResamplerTest, Mono) { const int kChannels = 1; for (size_t i = 0; i < kRatesSize; ++i) { for (size_t j = 0; j < kRatesSize; ++j) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j]; SCOPED_TRACE(ss.str()); @@ -133,7 +133,7 @@ TEST_F(ResamplerTest, Stereo) { const int kChannels = 2; for (size_t i = 0; i < kRatesSize; ++i) { for (size_t j = 0; j < kRatesSize; ++j) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j]; SCOPED_TRACE(ss.str()); diff --git a/common_audio/resampler/sinc_resampler_unittest.cc b/common_audio/resampler/sinc_resampler_unittest.cc index b267c89c8b..483d3d0374 100644 --- a/common_audio/resampler/sinc_resampler_unittest.cc +++ b/common_audio/resampler/sinc_resampler_unittest.cc @@ -107,11 +107,10 @@ TEST(SincResamplerTest, DISABLED_SetRatioBench) { SincResampler resampler(kSampleRateRatio, SincResampler::kDefaultRequestSize, &mock_source); - int64_t start = rtc::TimeNanos(); + int64_t start = TimeNanos(); for (int i = 1; i < 10000; ++i) resampler.SetRatio(1.0 / i); - double total_time_c_us = - (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec; + double total_time_c_us = (TimeNanos() - start) / kNumNanosecsPerMicrosec; printf("SetRatio() took %.2fms.\n", total_time_c_us / 1000); } @@ -170,14 +169,13 @@ TEST(SincResamplerTest, ConvolveBenchmark) { printf("Benchmarking %d iterations:\n", kConvolveIterations); // Benchmark Convolve_C(). - int64_t start = rtc::TimeNanos(); + int64_t start = TimeNanos(); for (int i = 0; i < kConvolveIterations; ++i) { resampler.Convolve_C( resampler.kernel_storage_.get(), resampler.kernel_storage_.get(), resampler.kernel_storage_.get(), kKernelInterpolationFactor); } - double total_time_c_us = - (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec; + double total_time_c_us = (TimeNanos() - start) / kNumNanosecsPerMicrosec; printf("Convolve_C took %.2fms.\n", total_time_c_us / 1000); #if defined(WEBRTC_ARCH_X86_FAMILY) @@ -187,14 +185,14 @@ TEST(SincResamplerTest, ConvolveBenchmark) { #endif // Benchmark with unaligned input pointer. - start = rtc::TimeNanos(); + start = TimeNanos(); for (int j = 0; j < kConvolveIterations; ++j) { resampler.convolve_proc_( resampler.kernel_storage_.get() + 1, resampler.kernel_storage_.get(), resampler.kernel_storage_.get(), kKernelInterpolationFactor); } double total_time_optimized_unaligned_us = - (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec; + (TimeNanos() - start) / kNumNanosecsPerMicrosec; printf( "convolve_proc_(unaligned) took %.2fms; which is %.2fx " "faster than Convolve_C.\n", @@ -202,14 +200,14 @@ TEST(SincResamplerTest, ConvolveBenchmark) { total_time_c_us / total_time_optimized_unaligned_us); // Benchmark with aligned input pointer. - start = rtc::TimeNanos(); + start = TimeNanos(); for (int j = 0; j < kConvolveIterations; ++j) { resampler.convolve_proc_( resampler.kernel_storage_.get(), resampler.kernel_storage_.get(), resampler.kernel_storage_.get(), kKernelInterpolationFactor); } double total_time_optimized_aligned_us = - (rtc::TimeNanos() - start) / rtc::kNumNanosecsPerMicrosec; + (TimeNanos() - start) / kNumNanosecsPerMicrosec; printf( "convolve_proc_ (aligned) took %.2fms; which is %.2fx " "faster than Convolve_C and %.2fx faster than " diff --git a/common_audio/ring_buffer.c b/common_audio/ring_buffer.c index 590f5f9bf1..a3fabd058f 100644 --- a/common_audio/ring_buffer.c +++ b/common_audio/ring_buffer.c @@ -28,10 +28,9 @@ static size_t GetBufferReadRegions(RingBuffer* buf, size_t* data_ptr_bytes_1, void** data_ptr_2, size_t* data_ptr_bytes_2) { - const size_t readable_elements = WebRtc_available_read(buf); - const size_t read_elements = (readable_elements < element_count ? - readable_elements : element_count); + const size_t read_elements = + (readable_elements < element_count ? readable_elements : element_count); const size_t margin = buf->element_count - buf->read_pos; // Check to see if read is not contiguous. @@ -99,7 +98,6 @@ size_t WebRtc_ReadBuffer(RingBuffer* self, void** data_ptr, void* data, size_t element_count) { - if (self == NULL) { return 0; } @@ -112,17 +110,14 @@ size_t WebRtc_ReadBuffer(RingBuffer* self, void* buf_ptr_2 = NULL; size_t buf_ptr_bytes_1 = 0; size_t buf_ptr_bytes_2 = 0; - const size_t read_count = GetBufferReadRegions(self, - element_count, - &buf_ptr_1, - &buf_ptr_bytes_1, - &buf_ptr_2, - &buf_ptr_bytes_2); + const size_t read_count = + GetBufferReadRegions(self, element_count, &buf_ptr_1, &buf_ptr_bytes_1, + &buf_ptr_2, &buf_ptr_bytes_2); if (buf_ptr_bytes_2 > 0) { // We have a wrap around when reading the buffer. Copy the buffer data to // `data` and point to it. memcpy(data, buf_ptr_1, buf_ptr_bytes_1); - memcpy(((char*) data) + buf_ptr_bytes_1, buf_ptr_2, buf_ptr_bytes_2); + memcpy(((char*)data) + buf_ptr_bytes_1, buf_ptr_2, buf_ptr_bytes_2); buf_ptr_1 = data; } else if (!data_ptr) { // No wrap, but a memcpy was requested. @@ -134,7 +129,7 @@ size_t WebRtc_ReadBuffer(RingBuffer* self, } // Update read position - WebRtc_MoveReadPtr(self, (int) read_count); + WebRtc_MoveReadPtr(self, (int)read_count); return read_count; } @@ -152,21 +147,21 @@ size_t WebRtc_WriteBuffer(RingBuffer* self, { const size_t free_elements = WebRtc_available_write(self); - const size_t write_elements = (free_elements < element_count ? free_elements - : element_count); + const size_t write_elements = + (free_elements < element_count ? free_elements : element_count); size_t n = write_elements; const size_t margin = self->element_count - self->write_pos; if (write_elements > margin) { // Buffer wrap around when writing. - memcpy(self->data + self->write_pos * self->element_size, - data, margin * self->element_size); + memcpy(self->data + self->write_pos * self->element_size, data, + margin * self->element_size); self->write_pos = 0; n -= margin; self->rw_wrap = DIFF_WRAP; } memcpy(self->data + self->write_pos * self->element_size, - ((const char*) data) + ((write_elements - n) * self->element_size), + ((const char*)data) + ((write_elements - n) * self->element_size), n * self->element_size); self->write_pos += n; @@ -182,9 +177,9 @@ int WebRtc_MoveReadPtr(RingBuffer* self, int element_count) { { // We need to be able to take care of negative changes, hence use "int" // instead of "size_t". - const int free_elements = (int) WebRtc_available_write(self); - const int readable_elements = (int) WebRtc_available_read(self); - int read_pos = (int) self->read_pos; + const int free_elements = (int)WebRtc_available_write(self); + const int readable_elements = (int)WebRtc_available_read(self); + int read_pos = (int)self->read_pos; if (element_count > readable_elements) { element_count = readable_elements; @@ -194,18 +189,18 @@ int WebRtc_MoveReadPtr(RingBuffer* self, int element_count) { } read_pos += element_count; - if (read_pos > (int) self->element_count) { + if (read_pos > (int)self->element_count) { // Buffer wrap around. Restart read position and wrap indicator. - read_pos -= (int) self->element_count; + read_pos -= (int)self->element_count; self->rw_wrap = SAME_WRAP; } if (read_pos < 0) { // Buffer wrap around. Restart read position and wrap indicator. - read_pos += (int) self->element_count; + read_pos += (int)self->element_count; self->rw_wrap = DIFF_WRAP; } - self->read_pos = (size_t) read_pos; + self->read_pos = (size_t)read_pos; return element_count; } diff --git a/common_audio/ring_buffer.h b/common_audio/ring_buffer.h index de0b4fed80..99f9547fb2 100644 --- a/common_audio/ring_buffer.h +++ b/common_audio/ring_buffer.h @@ -14,7 +14,7 @@ #ifndef COMMON_AUDIO_RING_BUFFER_H_ #define COMMON_AUDIO_RING_BUFFER_H_ -// TODO(alessiob): Used by AEC, AECm and AudioRingBuffer. Remove when possible. +// TODO(https://issues.webrtc.org/379542219): Remove when AECm gets removed. #ifdef __cplusplus extern "C" { diff --git a/common_audio/signal_processing/auto_corr_to_refl_coef.c b/common_audio/signal_processing/auto_corr_to_refl_coef.c index a3ec24f5da..d5b0bd27a9 100644 --- a/common_audio/signal_processing/auto_corr_to_refl_coef.c +++ b/common_audio/signal_processing/auto_corr_to_refl_coef.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the function WebRtcSpl_AutoCorrToReflCoef(). * The description header can be found in signal_processing_library.h @@ -17,87 +16,79 @@ #include "common_audio/signal_processing/include/signal_processing_library.h" -void WebRtcSpl_AutoCorrToReflCoef(const int32_t *R, int use_order, int16_t *K) -{ - int i, n; - int16_t tmp; - const int32_t *rptr; - int32_t L_num, L_den; - int16_t *acfptr, *pptr, *wptr, *p1ptr, *w1ptr, ACF[WEBRTC_SPL_MAX_LPC_ORDER], - P[WEBRTC_SPL_MAX_LPC_ORDER], W[WEBRTC_SPL_MAX_LPC_ORDER]; +void WebRtcSpl_AutoCorrToReflCoef(const int32_t* R, int use_order, int16_t* K) { + int i, n; + int16_t tmp; + const int32_t* rptr; + int32_t L_num, L_den; + int16_t *acfptr, *pptr, *wptr, *p1ptr, *w1ptr, ACF[WEBRTC_SPL_MAX_LPC_ORDER], + P[WEBRTC_SPL_MAX_LPC_ORDER], W[WEBRTC_SPL_MAX_LPC_ORDER]; - // Initialize loop and pointers. - acfptr = ACF; - rptr = R; - pptr = P; - p1ptr = &P[1]; - w1ptr = &W[1]; - wptr = w1ptr; + // Initialize loop and pointers. + acfptr = ACF; + rptr = R; + pptr = P; + p1ptr = &P[1]; + w1ptr = &W[1]; + wptr = w1ptr; - // First loop; n=0. Determine shifting. - tmp = WebRtcSpl_NormW32(*R); + // First loop; n=0. Determine shifting. + tmp = WebRtcSpl_NormW32(*R); + *acfptr = (int16_t)((*rptr++ << tmp) >> 16); + *pptr++ = *acfptr++; + + // Initialize ACF, P and W. + for (i = 1; i <= use_order; i++) { *acfptr = (int16_t)((*rptr++ << tmp) >> 16); + *wptr++ = *acfptr; *pptr++ = *acfptr++; + } - // Initialize ACF, P and W. - for (i = 1; i <= use_order; i++) - { - *acfptr = (int16_t)((*rptr++ << tmp) >> 16); - *wptr++ = *acfptr; - *pptr++ = *acfptr++; - } - - // Compute reflection coefficients. - for (n = 1; n <= use_order; n++, K++) - { - tmp = WEBRTC_SPL_ABS_W16(*p1ptr); - if (*P < tmp) - { - for (i = n; i <= use_order; i++) - *K++ = 0; + // Compute reflection coefficients. + for (n = 1; n <= use_order; n++, K++) { + tmp = WEBRTC_SPL_ABS_W16(*p1ptr); + if (*P < tmp) { + for (i = n; i <= use_order; i++) + *K++ = 0; - return; - } + return; + } - // Division: WebRtcSpl_div(tmp, *P) - *K = 0; - if (tmp != 0) - { - L_num = tmp; - L_den = *P; - i = 15; - while (i--) - { - (*K) <<= 1; - L_num <<= 1; - if (L_num >= L_den) - { - L_num -= L_den; - (*K)++; - } - } - if (*p1ptr > 0) - *K = -*K; + // Division: WebRtcSpl_div(tmp, *P) + *K = 0; + if (tmp != 0) { + L_num = tmp; + L_den = *P; + i = 15; + while (i--) { + (*K) <<= 1; + L_num <<= 1; + if (L_num >= L_den) { + L_num -= L_den; + (*K)++; } + } + if (*p1ptr > 0) + *K = -*K; + } - // Last iteration; don't do Schur recursion. - if (n == use_order) - return; + // Last iteration; don't do Schur recursion. + if (n == use_order) + return; - // Schur recursion. - pptr = P; - wptr = w1ptr; - tmp = (int16_t)(((int32_t)*p1ptr * (int32_t)*K + 16384) >> 15); - *pptr = WebRtcSpl_AddSatW16(*pptr, tmp); - pptr++; - for (i = 1; i <= use_order - n; i++) - { - tmp = (int16_t)(((int32_t)*wptr * (int32_t)*K + 16384) >> 15); - *pptr = WebRtcSpl_AddSatW16(*(pptr + 1), tmp); - pptr++; - tmp = (int16_t)(((int32_t)*pptr * (int32_t)*K + 16384) >> 15); - *wptr = WebRtcSpl_AddSatW16(*wptr, tmp); - wptr++; - } + // Schur recursion. + pptr = P; + wptr = w1ptr; + tmp = (int16_t)(((int32_t)*p1ptr * (int32_t)*K + 16384) >> 15); + *pptr = WebRtcSpl_AddSatW16(*pptr, tmp); + pptr++; + for (i = 1; i <= use_order - n; i++) { + tmp = (int16_t)(((int32_t)*wptr * (int32_t)*K + 16384) >> 15); + *pptr = WebRtcSpl_AddSatW16(*(pptr + 1), tmp); + pptr++; + tmp = (int16_t)(((int32_t)*pptr * (int32_t)*K + 16384) >> 15); + *wptr = WebRtcSpl_AddSatW16(*wptr, tmp); + wptr++; } + } } diff --git a/common_audio/signal_processing/auto_correlation.c b/common_audio/signal_processing/auto_correlation.c index 1455820e8f..d9e74e3f0c 100644 --- a/common_audio/signal_processing/auto_correlation.c +++ b/common_audio/signal_processing/auto_correlation.c @@ -9,7 +9,6 @@ */ #include "common_audio/signal_processing/include/signal_processing_library.h" - #include "rtc_base/checks.h" size_t WebRtcSpl_AutoCorrelation(const int16_t* in_vector, diff --git a/common_audio/signal_processing/complex_bit_reverse.c b/common_audio/signal_processing/complex_bit_reverse.c index 1c82cff50f..c26e232ee4 100644 --- a/common_audio/signal_processing/complex_bit_reverse.c +++ b/common_audio/signal_processing/complex_bit_reverse.c @@ -18,33 +18,32 @@ /* Indexes for the case of stages == 7. */ static const int16_t index_7[112] = { - 1, 64, 2, 32, 3, 96, 4, 16, 5, 80, 6, 48, 7, 112, 9, 72, 10, 40, 11, 104, - 12, 24, 13, 88, 14, 56, 15, 120, 17, 68, 18, 36, 19, 100, 21, 84, 22, 52, - 23, 116, 25, 76, 26, 44, 27, 108, 29, 92, 30, 60, 31, 124, 33, 66, 35, 98, - 37, 82, 38, 50, 39, 114, 41, 74, 43, 106, 45, 90, 46, 58, 47, 122, 49, 70, - 51, 102, 53, 86, 55, 118, 57, 78, 59, 110, 61, 94, 63, 126, 67, 97, 69, - 81, 71, 113, 75, 105, 77, 89, 79, 121, 83, 101, 87, 117, 91, 109, 95, 125, - 103, 115, 111, 123 -}; + 1, 64, 2, 32, 3, 96, 4, 16, 5, 80, 6, 48, 7, 112, 9, 72, + 10, 40, 11, 104, 12, 24, 13, 88, 14, 56, 15, 120, 17, 68, 18, 36, + 19, 100, 21, 84, 22, 52, 23, 116, 25, 76, 26, 44, 27, 108, 29, 92, + 30, 60, 31, 124, 33, 66, 35, 98, 37, 82, 38, 50, 39, 114, 41, 74, + 43, 106, 45, 90, 46, 58, 47, 122, 49, 70, 51, 102, 53, 86, 55, 118, + 57, 78, 59, 110, 61, 94, 63, 126, 67, 97, 69, 81, 71, 113, 75, 105, + 77, 89, 79, 121, 83, 101, 87, 117, 91, 109, 95, 125, 103, 115, 111, 123}; /* Indexes for the case of stages == 8. */ static const int16_t index_8[240] = { - 1, 128, 2, 64, 3, 192, 4, 32, 5, 160, 6, 96, 7, 224, 8, 16, 9, 144, 10, 80, - 11, 208, 12, 48, 13, 176, 14, 112, 15, 240, 17, 136, 18, 72, 19, 200, 20, - 40, 21, 168, 22, 104, 23, 232, 25, 152, 26, 88, 27, 216, 28, 56, 29, 184, - 30, 120, 31, 248, 33, 132, 34, 68, 35, 196, 37, 164, 38, 100, 39, 228, 41, - 148, 42, 84, 43, 212, 44, 52, 45, 180, 46, 116, 47, 244, 49, 140, 50, 76, - 51, 204, 53, 172, 54, 108, 55, 236, 57, 156, 58, 92, 59, 220, 61, 188, 62, - 124, 63, 252, 65, 130, 67, 194, 69, 162, 70, 98, 71, 226, 73, 146, 74, 82, - 75, 210, 77, 178, 78, 114, 79, 242, 81, 138, 83, 202, 85, 170, 86, 106, 87, - 234, 89, 154, 91, 218, 93, 186, 94, 122, 95, 250, 97, 134, 99, 198, 101, - 166, 103, 230, 105, 150, 107, 214, 109, 182, 110, 118, 111, 246, 113, 142, - 115, 206, 117, 174, 119, 238, 121, 158, 123, 222, 125, 190, 127, 254, 131, - 193, 133, 161, 135, 225, 137, 145, 139, 209, 141, 177, 143, 241, 147, 201, - 149, 169, 151, 233, 155, 217, 157, 185, 159, 249, 163, 197, 167, 229, 171, - 213, 173, 181, 175, 245, 179, 205, 183, 237, 187, 221, 191, 253, 199, 227, - 203, 211, 207, 243, 215, 235, 223, 251, 239, 247 -}; + 1, 128, 2, 64, 3, 192, 4, 32, 5, 160, 6, 96, 7, 224, 8, + 16, 9, 144, 10, 80, 11, 208, 12, 48, 13, 176, 14, 112, 15, 240, + 17, 136, 18, 72, 19, 200, 20, 40, 21, 168, 22, 104, 23, 232, 25, + 152, 26, 88, 27, 216, 28, 56, 29, 184, 30, 120, 31, 248, 33, 132, + 34, 68, 35, 196, 37, 164, 38, 100, 39, 228, 41, 148, 42, 84, 43, + 212, 44, 52, 45, 180, 46, 116, 47, 244, 49, 140, 50, 76, 51, 204, + 53, 172, 54, 108, 55, 236, 57, 156, 58, 92, 59, 220, 61, 188, 62, + 124, 63, 252, 65, 130, 67, 194, 69, 162, 70, 98, 71, 226, 73, 146, + 74, 82, 75, 210, 77, 178, 78, 114, 79, 242, 81, 138, 83, 202, 85, + 170, 86, 106, 87, 234, 89, 154, 91, 218, 93, 186, 94, 122, 95, 250, + 97, 134, 99, 198, 101, 166, 103, 230, 105, 150, 107, 214, 109, 182, 110, + 118, 111, 246, 113, 142, 115, 206, 117, 174, 119, 238, 121, 158, 123, 222, + 125, 190, 127, 254, 131, 193, 133, 161, 135, 225, 137, 145, 139, 209, 141, + 177, 143, 241, 147, 201, 149, 169, 151, 233, 155, 217, 157, 185, 159, 249, + 163, 197, 167, 229, 171, 213, 173, 181, 175, 245, 179, 205, 183, 237, 187, + 221, 191, 253, 199, 227, 203, 211, 207, 243, 215, 235, 223, 251, 239, 247}; void WebRtcSpl_ComplexBitReverse(int16_t* __restrict complex_data, int stages) { /* For any specific value of stages, we know exactly the indexes that are @@ -71,12 +70,11 @@ void WebRtcSpl_ComplexBitReverse(int16_t* __restrict complex_data, int stages) { int32_t* complex_data_ptr = (int32_t*)complex_data; int32_t temp = 0; - temp = complex_data_ptr[index[m]]; /* Real and imaginary */ + temp = complex_data_ptr[index[m]]; /* Real and imaginary */ complex_data_ptr[index[m]] = complex_data_ptr[index[m + 1]]; complex_data_ptr[index[m + 1]] = temp; } - } - else { + } else { int m = 0, mr = 0, l = 0; int n = 1 << stages; int nn = n - 1; @@ -100,7 +98,7 @@ void WebRtcSpl_ComplexBitReverse(int16_t* __restrict complex_data, int stages) { /* Swap the elements with bit-reversed indexes. * This is similar to the loop in the stages == 7 or 8 cases. */ - temp = complex_data_ptr[m]; /* Real and imaginary */ + temp = complex_data_ptr[m]; /* Real and imaginary */ complex_data_ptr[m] = complex_data_ptr[mr]; complex_data_ptr[mr] = temp; } diff --git a/common_audio/signal_processing/complex_bit_reverse_mips.c b/common_audio/signal_processing/complex_bit_reverse_mips.c index 9007b19cf6..4df3f005af 100644 --- a/common_audio/signal_processing/complex_bit_reverse_mips.c +++ b/common_audio/signal_processing/complex_bit_reverse_mips.c @@ -8,58 +8,37 @@ * be found in the AUTHORS file in the root of the source tree. */ - #include "common_audio/signal_processing/include/signal_processing_library.h" static int16_t coefTable_7[] = { - 4, 256, 8, 128, 12, 384, 16, 64, - 20, 320, 24, 192, 28, 448, 36, 288, - 40, 160, 44, 416, 48, 96, 52, 352, - 56, 224, 60, 480, 68, 272, 72, 144, - 76, 400, 84, 336, 88, 208, 92, 464, - 100, 304, 104, 176, 108, 432, 116, 368, - 120, 240, 124, 496, 132, 264, 140, 392, - 148, 328, 152, 200, 156, 456, 164, 296, - 172, 424, 180, 360, 184, 232, 188, 488, - 196, 280, 204, 408, 212, 344, 220, 472, - 228, 312, 236, 440, 244, 376, 252, 504, - 268, 388, 276, 324, 284, 452, 300, 420, - 308, 356, 316, 484, 332, 404, 348, 468, - 364, 436, 380, 500, 412, 460, 444, 492 -}; + 4, 256, 8, 128, 12, 384, 16, 64, 20, 320, 24, 192, 28, 448, + 36, 288, 40, 160, 44, 416, 48, 96, 52, 352, 56, 224, 60, 480, + 68, 272, 72, 144, 76, 400, 84, 336, 88, 208, 92, 464, 100, 304, + 104, 176, 108, 432, 116, 368, 120, 240, 124, 496, 132, 264, 140, 392, + 148, 328, 152, 200, 156, 456, 164, 296, 172, 424, 180, 360, 184, 232, + 188, 488, 196, 280, 204, 408, 212, 344, 220, 472, 228, 312, 236, 440, + 244, 376, 252, 504, 268, 388, 276, 324, 284, 452, 300, 420, 308, 356, + 316, 484, 332, 404, 348, 468, 364, 436, 380, 500, 412, 460, 444, 492}; static int16_t coefTable_8[] = { - 4, 512, 8, 256, 12, 768, 16, 128, - 20, 640, 24, 384, 28, 896, 32, 64, - 36, 576, 40, 320, 44, 832, 48, 192, - 52, 704, 56, 448, 60, 960, 68, 544, - 72, 288, 76, 800, 80, 160, 84, 672, - 88, 416, 92, 928, 100, 608, 104, 352, - 108, 864, 112, 224, 116, 736, 120, 480, - 124, 992, 132, 528, 136, 272, 140, 784, - 148, 656, 152, 400, 156, 912, 164, 592, - 168, 336, 172, 848, 176, 208, 180, 720, - 184, 464, 188, 976, 196, 560, 200, 304, - 204, 816, 212, 688, 216, 432, 220, 944, - 228, 624, 232, 368, 236, 880, 244, 752, - 248, 496, 252, 1008, 260, 520, 268, 776, - 276, 648, 280, 392, 284, 904, 292, 584, - 296, 328, 300, 840, 308, 712, 312, 456, - 316, 968, 324, 552, 332, 808, 340, 680, - 344, 424, 348, 936, 356, 616, 364, 872, - 372, 744, 376, 488, 380, 1000, 388, 536, - 396, 792, 404, 664, 412, 920, 420, 600, - 428, 856, 436, 728, 440, 472, 444, 984, - 452, 568, 460, 824, 468, 696, 476, 952, - 484, 632, 492, 888, 500, 760, 508, 1016, - 524, 772, 532, 644, 540, 900, 548, 580, - 556, 836, 564, 708, 572, 964, 588, 804, - 596, 676, 604, 932, 620, 868, 628, 740, - 636, 996, 652, 788, 668, 916, 684, 852, - 692, 724, 700, 980, 716, 820, 732, 948, - 748, 884, 764, 1012, 796, 908, 812, 844, - 828, 972, 860, 940, 892, 1004, 956, 988 -}; + 4, 512, 8, 256, 12, 768, 16, 128, 20, 640, 24, 384, 28, 896, + 32, 64, 36, 576, 40, 320, 44, 832, 48, 192, 52, 704, 56, 448, + 60, 960, 68, 544, 72, 288, 76, 800, 80, 160, 84, 672, 88, 416, + 92, 928, 100, 608, 104, 352, 108, 864, 112, 224, 116, 736, 120, 480, + 124, 992, 132, 528, 136, 272, 140, 784, 148, 656, 152, 400, 156, 912, + 164, 592, 168, 336, 172, 848, 176, 208, 180, 720, 184, 464, 188, 976, + 196, 560, 200, 304, 204, 816, 212, 688, 216, 432, 220, 944, 228, 624, + 232, 368, 236, 880, 244, 752, 248, 496, 252, 1008, 260, 520, 268, 776, + 276, 648, 280, 392, 284, 904, 292, 584, 296, 328, 300, 840, 308, 712, + 312, 456, 316, 968, 324, 552, 332, 808, 340, 680, 344, 424, 348, 936, + 356, 616, 364, 872, 372, 744, 376, 488, 380, 1000, 388, 536, 396, 792, + 404, 664, 412, 920, 420, 600, 428, 856, 436, 728, 440, 472, 444, 984, + 452, 568, 460, 824, 468, 696, 476, 952, 484, 632, 492, 888, 500, 760, + 508, 1016, 524, 772, 532, 644, 540, 900, 548, 580, 556, 836, 564, 708, + 572, 964, 588, 804, 596, 676, 604, 932, 620, 868, 628, 740, 636, 996, + 652, 788, 668, 916, 684, 852, 692, 724, 700, 980, 716, 820, 732, 948, + 748, 884, 764, 1012, 796, 908, 812, 844, 828, 972, 860, 940, 892, 1004, + 956, 988}; void WebRtcSpl_ComplexBitReverse(int16_t frfi[], int stages) { int l; @@ -71,106 +50,104 @@ void WebRtcSpl_ComplexBitReverse(int16_t frfi[], int stages) { if (stages == 8) { int16_t* pcoeftable_8 = coefTable_8; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[l], $zero, 120 \n\t" - "1: \n\t" - "addiu %[l], %[l], -4 \n\t" - "lh %[tr], 0(%[pcoeftable_8]) \n\t" - "lh %[ti], 2(%[pcoeftable_8]) \n\t" - "lh %[tmp3], 4(%[pcoeftable_8]) \n\t" - "lh %[tmp4], 6(%[pcoeftable_8]) \n\t" - "addu %[ptr_i], %[frfi], %[tr] \n\t" - "addu %[ptr_j], %[frfi], %[ti] \n\t" - "addu %[tr], %[frfi], %[tmp3] \n\t" - "addu %[ti], %[frfi], %[tmp4] \n\t" - "ulw %[tmp1], 0(%[ptr_i]) \n\t" - "ulw %[tmp2], 0(%[ptr_j]) \n\t" - "ulw %[tmp3], 0(%[tr]) \n\t" - "ulw %[tmp4], 0(%[ti]) \n\t" - "usw %[tmp1], 0(%[ptr_j]) \n\t" - "usw %[tmp2], 0(%[ptr_i]) \n\t" - "usw %[tmp4], 0(%[tr]) \n\t" - "usw %[tmp3], 0(%[ti]) \n\t" - "lh %[tmp1], 8(%[pcoeftable_8]) \n\t" - "lh %[tmp2], 10(%[pcoeftable_8]) \n\t" - "lh %[tr], 12(%[pcoeftable_8]) \n\t" - "lh %[ti], 14(%[pcoeftable_8]) \n\t" - "addu %[ptr_i], %[frfi], %[tmp1] \n\t" - "addu %[ptr_j], %[frfi], %[tmp2] \n\t" - "addu %[tr], %[frfi], %[tr] \n\t" - "addu %[ti], %[frfi], %[ti] \n\t" - "ulw %[tmp1], 0(%[ptr_i]) \n\t" - "ulw %[tmp2], 0(%[ptr_j]) \n\t" - "ulw %[tmp3], 0(%[tr]) \n\t" - "ulw %[tmp4], 0(%[ti]) \n\t" - "usw %[tmp1], 0(%[ptr_j]) \n\t" - "usw %[tmp2], 0(%[ptr_i]) \n\t" - "usw %[tmp4], 0(%[tr]) \n\t" - "usw %[tmp3], 0(%[ti]) \n\t" - "bgtz %[l], 1b \n\t" - " addiu %[pcoeftable_8], %[pcoeftable_8], 16 \n\t" - ".set pop \n\t" + __asm __volatile( + ".set push \n\t" + ".set noreorder \n\t" + "addiu %[l], $zero, 120 \n\t" + "1: \n\t" + "addiu %[l], %[l], -4 \n\t" + "lh %[tr], 0(%[pcoeftable_8]) \n\t" + "lh %[ti], 2(%[pcoeftable_8]) \n\t" + "lh %[tmp3], 4(%[pcoeftable_8]) \n\t" + "lh %[tmp4], 6(%[pcoeftable_8]) \n\t" + "addu %[ptr_i], %[frfi], %[tr] \n\t" + "addu %[ptr_j], %[frfi], %[ti] \n\t" + "addu %[tr], %[frfi], %[tmp3] \n\t" + "addu %[ti], %[frfi], %[tmp4] \n\t" + "ulw %[tmp1], 0(%[ptr_i]) \n\t" + "ulw %[tmp2], 0(%[ptr_j]) \n\t" + "ulw %[tmp3], 0(%[tr]) \n\t" + "ulw %[tmp4], 0(%[ti]) \n\t" + "usw %[tmp1], 0(%[ptr_j]) \n\t" + "usw %[tmp2], 0(%[ptr_i]) \n\t" + "usw %[tmp4], 0(%[tr]) \n\t" + "usw %[tmp3], 0(%[ti]) \n\t" + "lh %[tmp1], 8(%[pcoeftable_8]) \n\t" + "lh %[tmp2], 10(%[pcoeftable_8]) \n\t" + "lh %[tr], 12(%[pcoeftable_8]) \n\t" + "lh %[ti], 14(%[pcoeftable_8]) \n\t" + "addu %[ptr_i], %[frfi], %[tmp1] \n\t" + "addu %[ptr_j], %[frfi], %[tmp2] \n\t" + "addu %[tr], %[frfi], %[tr] \n\t" + "addu %[ti], %[frfi], %[ti] \n\t" + "ulw %[tmp1], 0(%[ptr_i]) \n\t" + "ulw %[tmp2], 0(%[ptr_j]) \n\t" + "ulw %[tmp3], 0(%[tr]) \n\t" + "ulw %[tmp4], 0(%[ti]) \n\t" + "usw %[tmp1], 0(%[ptr_j]) \n\t" + "usw %[tmp2], 0(%[ptr_i]) \n\t" + "usw %[tmp4], 0(%[tr]) \n\t" + "usw %[tmp3], 0(%[ti]) \n\t" + "bgtz %[l], 1b \n\t" + " addiu %[pcoeftable_8], %[pcoeftable_8], 16 \n\t" + ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [ptr_i] "=&r" (ptr_i), - [ptr_j] "=&r" (ptr_j), [tr] "=&r" (tr), [l] "=&r" (l), - [tmp3] "=&r" (tmp3), [pcoeftable_8] "+r" (pcoeftable_8), - [ti] "=&r" (ti), [tmp4] "=&r" (tmp4) - : [frfi] "r" (frfi) - : "memory" - ); + : [tmp1] "=&r"(tmp1), [tmp2] "=&r"(tmp2), [ptr_i] "=&r"(ptr_i), + [ptr_j] "=&r"(ptr_j), [tr] "=&r"(tr), [l] "=&r"(l), + [tmp3] "=&r"(tmp3), [pcoeftable_8] "+r"(pcoeftable_8), [ti] "=&r"(ti), + [tmp4] "=&r"(tmp4) + : [frfi] "r"(frfi) + : "memory"); } else if (stages == 7) { int16_t* pcoeftable_7 = coefTable_7; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[l], $zero, 56 \n\t" - "1: \n\t" - "addiu %[l], %[l], -4 \n\t" - "lh %[tr], 0(%[pcoeftable_7]) \n\t" - "lh %[ti], 2(%[pcoeftable_7]) \n\t" - "lh %[tmp3], 4(%[pcoeftable_7]) \n\t" - "lh %[tmp4], 6(%[pcoeftable_7]) \n\t" - "addu %[ptr_i], %[frfi], %[tr] \n\t" - "addu %[ptr_j], %[frfi], %[ti] \n\t" - "addu %[tr], %[frfi], %[tmp3] \n\t" - "addu %[ti], %[frfi], %[tmp4] \n\t" - "ulw %[tmp1], 0(%[ptr_i]) \n\t" - "ulw %[tmp2], 0(%[ptr_j]) \n\t" - "ulw %[tmp3], 0(%[tr]) \n\t" - "ulw %[tmp4], 0(%[ti]) \n\t" - "usw %[tmp1], 0(%[ptr_j]) \n\t" - "usw %[tmp2], 0(%[ptr_i]) \n\t" - "usw %[tmp4], 0(%[tr]) \n\t" - "usw %[tmp3], 0(%[ti]) \n\t" - "lh %[tmp1], 8(%[pcoeftable_7]) \n\t" - "lh %[tmp2], 10(%[pcoeftable_7]) \n\t" - "lh %[tr], 12(%[pcoeftable_7]) \n\t" - "lh %[ti], 14(%[pcoeftable_7]) \n\t" - "addu %[ptr_i], %[frfi], %[tmp1] \n\t" - "addu %[ptr_j], %[frfi], %[tmp2] \n\t" - "addu %[tr], %[frfi], %[tr] \n\t" - "addu %[ti], %[frfi], %[ti] \n\t" - "ulw %[tmp1], 0(%[ptr_i]) \n\t" - "ulw %[tmp2], 0(%[ptr_j]) \n\t" - "ulw %[tmp3], 0(%[tr]) \n\t" - "ulw %[tmp4], 0(%[ti]) \n\t" - "usw %[tmp1], 0(%[ptr_j]) \n\t" - "usw %[tmp2], 0(%[ptr_i]) \n\t" - "usw %[tmp4], 0(%[tr]) \n\t" - "usw %[tmp3], 0(%[ti]) \n\t" - "bgtz %[l], 1b \n\t" - " addiu %[pcoeftable_7], %[pcoeftable_7], 16 \n\t" - ".set pop \n\t" + __asm __volatile( + ".set push \n\t" + ".set noreorder \n\t" + "addiu %[l], $zero, 56 \n\t" + "1: \n\t" + "addiu %[l], %[l], -4 \n\t" + "lh %[tr], 0(%[pcoeftable_7]) \n\t" + "lh %[ti], 2(%[pcoeftable_7]) \n\t" + "lh %[tmp3], 4(%[pcoeftable_7]) \n\t" + "lh %[tmp4], 6(%[pcoeftable_7]) \n\t" + "addu %[ptr_i], %[frfi], %[tr] \n\t" + "addu %[ptr_j], %[frfi], %[ti] \n\t" + "addu %[tr], %[frfi], %[tmp3] \n\t" + "addu %[ti], %[frfi], %[tmp4] \n\t" + "ulw %[tmp1], 0(%[ptr_i]) \n\t" + "ulw %[tmp2], 0(%[ptr_j]) \n\t" + "ulw %[tmp3], 0(%[tr]) \n\t" + "ulw %[tmp4], 0(%[ti]) \n\t" + "usw %[tmp1], 0(%[ptr_j]) \n\t" + "usw %[tmp2], 0(%[ptr_i]) \n\t" + "usw %[tmp4], 0(%[tr]) \n\t" + "usw %[tmp3], 0(%[ti]) \n\t" + "lh %[tmp1], 8(%[pcoeftable_7]) \n\t" + "lh %[tmp2], 10(%[pcoeftable_7]) \n\t" + "lh %[tr], 12(%[pcoeftable_7]) \n\t" + "lh %[ti], 14(%[pcoeftable_7]) \n\t" + "addu %[ptr_i], %[frfi], %[tmp1] \n\t" + "addu %[ptr_j], %[frfi], %[tmp2] \n\t" + "addu %[tr], %[frfi], %[tr] \n\t" + "addu %[ti], %[frfi], %[ti] \n\t" + "ulw %[tmp1], 0(%[ptr_i]) \n\t" + "ulw %[tmp2], 0(%[ptr_j]) \n\t" + "ulw %[tmp3], 0(%[tr]) \n\t" + "ulw %[tmp4], 0(%[ti]) \n\t" + "usw %[tmp1], 0(%[ptr_j]) \n\t" + "usw %[tmp2], 0(%[ptr_i]) \n\t" + "usw %[tmp4], 0(%[tr]) \n\t" + "usw %[tmp3], 0(%[ti]) \n\t" + "bgtz %[l], 1b \n\t" + " addiu %[pcoeftable_7], %[pcoeftable_7], 16 \n\t" + ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [ptr_i] "=&r" (ptr_i), - [ptr_j] "=&r" (ptr_j), [ti] "=&r" (ti), [tr] "=&r" (tr), - [l] "=&r" (l), [pcoeftable_7] "+r" (pcoeftable_7), - [tmp3] "=&r" (tmp3), [tmp4] "=&r" (tmp4) - : [frfi] "r" (frfi) - : "memory" - ); + : [tmp1] "=&r"(tmp1), [tmp2] "=&r"(tmp2), [ptr_i] "=&r"(ptr_i), + [ptr_j] "=&r"(ptr_j), [ti] "=&r"(ti), [tr] "=&r"(tr), [l] "=&r"(l), + [pcoeftable_7] "+r"(pcoeftable_7), [tmp3] "=&r"(tmp3), + [tmp4] "=&r"(tmp4) + : [frfi] "r"(frfi) + : "memory"); } } diff --git a/common_audio/signal_processing/complex_fft.c b/common_audio/signal_processing/complex_fft.c index ddc9a97b59..d9d970d336 100644 --- a/common_audio/signal_processing/complex_fft.c +++ b/common_audio/signal_processing/complex_fft.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the function WebRtcSpl_ComplexFFT(). * The description header can be found in signal_processing_library.h @@ -26,274 +25,243 @@ #define CIFFTSFT 14 #define CIFFTRND 1 - -int WebRtcSpl_ComplexFFT(int16_t frfi[], int stages, int mode) -{ - int i, j, l, k, istep, n, m; - int16_t wr, wi; - int32_t tr32, ti32, qr32, qi32; - - /* The 1024-value is a constant given from the size of kSinTable1024[], - * and should not be changed depending on the input parameter 'stages' - */ - n = 1 << stages; - if (n > 1024) - return -1; - - l = 1; - k = 10 - 1; /* Constant for given kSinTable1024[]. Do not change - depending on the input parameter 'stages' */ - - if (mode == 0) - { - // mode==0: Low-complexity and Low-accuracy mode - while (l < n) - { - istep = l << 1; - - for (m = 0; m < l; ++m) - { - j = m << k; - - /* The 256-value is a constant given as 1/4 of the size of - * kSinTable1024[], and should not be changed depending on the input - * parameter 'stages'. It will result in 0 <= j < N_SINE_WAVE/2 - */ - wr = kSinTable1024[j + 256]; - wi = -kSinTable1024[j]; - - for (i = m; i < n; i += istep) - { - j = i + l; - - tr32 = (wr * frfi[2 * j] - wi * frfi[2 * j + 1]) >> 15; - - ti32 = (wr * frfi[2 * j + 1] + wi * frfi[2 * j]) >> 15; - - qr32 = (int32_t)frfi[2 * i]; - qi32 = (int32_t)frfi[2 * i + 1]; - frfi[2 * j] = (int16_t)((qr32 - tr32) >> 1); - frfi[2 * j + 1] = (int16_t)((qi32 - ti32) >> 1); - frfi[2 * i] = (int16_t)((qr32 + tr32) >> 1); - frfi[2 * i + 1] = (int16_t)((qi32 + ti32) >> 1); - } - } - - --k; - l = istep; - +int WebRtcSpl_ComplexFFT(int16_t frfi[], int stages, int mode) { + int i, j, l, k, istep, n, m; + int16_t wr, wi; + int32_t tr32, ti32, qr32, qi32; + + /* The 1024-value is a constant given from the size of kSinTable1024[], + * and should not be changed depending on the input parameter 'stages' + */ + n = 1 << stages; + if (n > 1024) + return -1; + + l = 1; + k = 10 - 1; /* Constant for given kSinTable1024[]. Do not change + depending on the input parameter 'stages' */ + + if (mode == 0) { + // mode==0: Low-complexity and Low-accuracy mode + while (l < n) { + istep = l << 1; + + for (m = 0; m < l; ++m) { + j = m << k; + + /* The 256-value is a constant given as 1/4 of the size of + * kSinTable1024[], and should not be changed depending on the input + * parameter 'stages'. It will result in 0 <= j < N_SINE_WAVE/2 + */ + wr = kSinTable1024[j + 256]; + wi = -kSinTable1024[j]; + + for (i = m; i < n; i += istep) { + j = i + l; + + tr32 = (wr * frfi[2 * j] - wi * frfi[2 * j + 1]) >> 15; + + ti32 = (wr * frfi[2 * j + 1] + wi * frfi[2 * j]) >> 15; + + qr32 = (int32_t)frfi[2 * i]; + qi32 = (int32_t)frfi[2 * i + 1]; + frfi[2 * j] = (int16_t)((qr32 - tr32) >> 1); + frfi[2 * j + 1] = (int16_t)((qi32 - ti32) >> 1); + frfi[2 * i] = (int16_t)((qr32 + tr32) >> 1); + frfi[2 * i + 1] = (int16_t)((qi32 + ti32) >> 1); } + } + + --k; + l = istep; + } - } else - { - // mode==1: High-complexity and High-accuracy mode - while (l < n) - { - istep = l << 1; + } else { + // mode==1: High-complexity and High-accuracy mode + while (l < n) { + istep = l << 1; - for (m = 0; m < l; ++m) - { - j = m << k; + for (m = 0; m < l; ++m) { + j = m << k; - /* The 256-value is a constant given as 1/4 of the size of - * kSinTable1024[], and should not be changed depending on the input - * parameter 'stages'. It will result in 0 <= j < N_SINE_WAVE/2 - */ - wr = kSinTable1024[j + 256]; - wi = -kSinTable1024[j]; + /* The 256-value is a constant given as 1/4 of the size of + * kSinTable1024[], and should not be changed depending on the input + * parameter 'stages'. It will result in 0 <= j < N_SINE_WAVE/2 + */ + wr = kSinTable1024[j + 256]; + wi = -kSinTable1024[j]; #ifdef WEBRTC_ARCH_ARM_V7 - int32_t wri = 0; - __asm __volatile("pkhbt %0, %1, %2, lsl #16" : "=r"(wri) : - "r"((int32_t)wr), "r"((int32_t)wi)); + int32_t wri = 0; + __asm __volatile("pkhbt %0, %1, %2, lsl #16" + : "=r"(wri) + : "r"((int32_t)wr), "r"((int32_t)wi)); #endif - for (i = m; i < n; i += istep) - { - j = i + l; + for (i = m; i < n; i += istep) { + j = i + l; #ifdef WEBRTC_ARCH_ARM_V7 - register int32_t frfi_r; - __asm __volatile( - "pkhbt %[frfi_r], %[frfi_even], %[frfi_odd]," - " lsl #16\n\t" - "smlsd %[tr32], %[wri], %[frfi_r], %[cfftrnd]\n\t" - "smladx %[ti32], %[wri], %[frfi_r], %[cfftrnd]\n\t" - :[frfi_r]"=&r"(frfi_r), - [tr32]"=&r"(tr32), - [ti32]"=r"(ti32) - :[frfi_even]"r"((int32_t)frfi[2*j]), - [frfi_odd]"r"((int32_t)frfi[2*j +1]), - [wri]"r"(wri), - [cfftrnd]"r"(CFFTRND)); + register int32_t frfi_r; + __asm __volatile( + "pkhbt %[frfi_r], %[frfi_even], %[frfi_odd]," + " lsl #16\n\t" + "smlsd %[tr32], %[wri], %[frfi_r], %[cfftrnd]\n\t" + "smladx %[ti32], %[wri], %[frfi_r], %[cfftrnd]\n\t" + : [frfi_r] "=&r"(frfi_r), [tr32] "=&r"(tr32), [ti32] "=r"(ti32) + : [frfi_even] "r"((int32_t)frfi[2 * j]), + [frfi_odd] "r"((int32_t)frfi[2 * j + 1]), [wri] "r"(wri), + [cfftrnd] "r"(CFFTRND)); #else - tr32 = wr * frfi[2 * j] - wi * frfi[2 * j + 1] + CFFTRND; + tr32 = wr * frfi[2 * j] - wi * frfi[2 * j + 1] + CFFTRND; - ti32 = wr * frfi[2 * j + 1] + wi * frfi[2 * j] + CFFTRND; + ti32 = wr * frfi[2 * j + 1] + wi * frfi[2 * j] + CFFTRND; #endif - tr32 >>= 15 - CFFTSFT; - ti32 >>= 15 - CFFTSFT; - - qr32 = ((int32_t)frfi[2 * i]) * (1 << CFFTSFT); - qi32 = ((int32_t)frfi[2 * i + 1]) * (1 << CFFTSFT); - - frfi[2 * j] = (int16_t)( - (qr32 - tr32 + CFFTRND2) >> (1 + CFFTSFT)); - frfi[2 * j + 1] = (int16_t)( - (qi32 - ti32 + CFFTRND2) >> (1 + CFFTSFT)); - frfi[2 * i] = (int16_t)( - (qr32 + tr32 + CFFTRND2) >> (1 + CFFTSFT)); - frfi[2 * i + 1] = (int16_t)( - (qi32 + ti32 + CFFTRND2) >> (1 + CFFTSFT)); - } - } - - --k; - l = istep; + tr32 >>= 15 - CFFTSFT; + ti32 >>= 15 - CFFTSFT; + + qr32 = ((int32_t)frfi[2 * i]) * (1 << CFFTSFT); + qi32 = ((int32_t)frfi[2 * i + 1]) * (1 << CFFTSFT); + + frfi[2 * j] = (int16_t)((qr32 - tr32 + CFFTRND2) >> (1 + CFFTSFT)); + frfi[2 * j + 1] = + (int16_t)((qi32 - ti32 + CFFTRND2) >> (1 + CFFTSFT)); + frfi[2 * i] = (int16_t)((qr32 + tr32 + CFFTRND2) >> (1 + CFFTSFT)); + frfi[2 * i + 1] = + (int16_t)((qi32 + ti32 + CFFTRND2) >> (1 + CFFTSFT)); } + } + + --k; + l = istep; } - return 0; + } + return 0; } -int WebRtcSpl_ComplexIFFT(int16_t frfi[], int stages, int mode) -{ - size_t i, j, l, istep, n, m; - int k, scale, shift; - int16_t wr, wi; - int32_t tr32, ti32, qr32, qi32; - int32_t tmp32, round2; - - /* The 1024-value is a constant given from the size of kSinTable1024[], - * and should not be changed depending on the input parameter 'stages' - */ - n = ((size_t)1) << stages; - if (n > 1024) - return -1; - - scale = 0; - - l = 1; - k = 10 - 1; /* Constant for given kSinTable1024[]. Do not change - depending on the input parameter 'stages' */ - - while (l < n) - { - // variable scaling, depending upon data - shift = 0; - round2 = 8192; - - tmp32 = WebRtcSpl_MaxAbsValueW16(frfi, 2 * n); - if (tmp32 > 13573) - { - shift++; - scale++; - round2 <<= 1; - } - if (tmp32 > 27146) - { - shift++; - scale++; - round2 <<= 1; +int WebRtcSpl_ComplexIFFT(int16_t frfi[], int stages, int mode) { + size_t i, j, l, istep, n, m; + int k, scale, shift; + int16_t wr, wi; + int32_t tr32, ti32, qr32, qi32; + int32_t tmp32, round2; + + /* The 1024-value is a constant given from the size of kSinTable1024[], + * and should not be changed depending on the input parameter 'stages' + */ + n = ((size_t)1) << stages; + if (n > 1024) + return -1; + + scale = 0; + + l = 1; + k = 10 - 1; /* Constant for given kSinTable1024[]. Do not change + depending on the input parameter 'stages' */ + + while (l < n) { + // variable scaling, depending upon data + shift = 0; + round2 = 8192; + + tmp32 = WebRtcSpl_MaxAbsValueW16(frfi, 2 * n); + if (tmp32 > 13573) { + shift++; + scale++; + round2 <<= 1; + } + if (tmp32 > 27146) { + shift++; + scale++; + round2 <<= 1; + } + + istep = l << 1; + + if (mode == 0) { + // mode==0: Low-complexity and Low-accuracy mode + for (m = 0; m < l; ++m) { + j = m << k; + + /* The 256-value is a constant given as 1/4 of the size of + * kSinTable1024[], and should not be changed depending on the input + * parameter 'stages'. It will result in 0 <= j < N_SINE_WAVE/2 + */ + wr = kSinTable1024[j + 256]; + wi = kSinTable1024[j]; + + for (i = m; i < n; i += istep) { + j = i + l; + + tr32 = (wr * frfi[2 * j] - wi * frfi[2 * j + 1]) >> 15; + + ti32 = (wr * frfi[2 * j + 1] + wi * frfi[2 * j]) >> 15; + + qr32 = (int32_t)frfi[2 * i]; + qi32 = (int32_t)frfi[2 * i + 1]; + frfi[2 * j] = (int16_t)((qr32 - tr32) >> shift); + frfi[2 * j + 1] = (int16_t)((qi32 - ti32) >> shift); + frfi[2 * i] = (int16_t)((qr32 + tr32) >> shift); + frfi[2 * i + 1] = (int16_t)((qi32 + ti32) >> shift); } + } + } else { + // mode==1: High-complexity and High-accuracy mode - istep = l << 1; - - if (mode == 0) - { - // mode==0: Low-complexity and Low-accuracy mode - for (m = 0; m < l; ++m) - { - j = m << k; - - /* The 256-value is a constant given as 1/4 of the size of - * kSinTable1024[], and should not be changed depending on the input - * parameter 'stages'. It will result in 0 <= j < N_SINE_WAVE/2 - */ - wr = kSinTable1024[j + 256]; - wi = kSinTable1024[j]; - - for (i = m; i < n; i += istep) - { - j = i + l; - - tr32 = (wr * frfi[2 * j] - wi * frfi[2 * j + 1]) >> 15; - - ti32 = (wr * frfi[2 * j + 1] + wi * frfi[2 * j]) >> 15; - - qr32 = (int32_t)frfi[2 * i]; - qi32 = (int32_t)frfi[2 * i + 1]; - frfi[2 * j] = (int16_t)((qr32 - tr32) >> shift); - frfi[2 * j + 1] = (int16_t)((qi32 - ti32) >> shift); - frfi[2 * i] = (int16_t)((qr32 + tr32) >> shift); - frfi[2 * i + 1] = (int16_t)((qi32 + ti32) >> shift); - } - } - } else - { - // mode==1: High-complexity and High-accuracy mode - - for (m = 0; m < l; ++m) - { - j = m << k; - - /* The 256-value is a constant given as 1/4 of the size of - * kSinTable1024[], and should not be changed depending on the input - * parameter 'stages'. It will result in 0 <= j < N_SINE_WAVE/2 - */ - wr = kSinTable1024[j + 256]; - wi = kSinTable1024[j]; + for (m = 0; m < l; ++m) { + j = m << k; + + /* The 256-value is a constant given as 1/4 of the size of + * kSinTable1024[], and should not be changed depending on the input + * parameter 'stages'. It will result in 0 <= j < N_SINE_WAVE/2 + */ + wr = kSinTable1024[j + 256]; + wi = kSinTable1024[j]; #ifdef WEBRTC_ARCH_ARM_V7 - int32_t wri = 0; - __asm __volatile("pkhbt %0, %1, %2, lsl #16" : "=r"(wri) : - "r"((int32_t)wr), "r"((int32_t)wi)); + int32_t wri = 0; + __asm __volatile("pkhbt %0, %1, %2, lsl #16" + : "=r"(wri) + : "r"((int32_t)wr), "r"((int32_t)wi)); #endif - for (i = m; i < n; i += istep) - { - j = i + l; + for (i = m; i < n; i += istep) { + j = i + l; #ifdef WEBRTC_ARCH_ARM_V7 - register int32_t frfi_r; - __asm __volatile( - "pkhbt %[frfi_r], %[frfi_even], %[frfi_odd], lsl #16\n\t" - "smlsd %[tr32], %[wri], %[frfi_r], %[cifftrnd]\n\t" - "smladx %[ti32], %[wri], %[frfi_r], %[cifftrnd]\n\t" - :[frfi_r]"=&r"(frfi_r), - [tr32]"=&r"(tr32), - [ti32]"=r"(ti32) - :[frfi_even]"r"((int32_t)frfi[2*j]), - [frfi_odd]"r"((int32_t)frfi[2*j +1]), - [wri]"r"(wri), - [cifftrnd]"r"(CIFFTRND) - ); + register int32_t frfi_r; + __asm __volatile( + "pkhbt %[frfi_r], %[frfi_even], %[frfi_odd], lsl #16\n\t" + "smlsd %[tr32], %[wri], %[frfi_r], %[cifftrnd]\n\t" + "smladx %[ti32], %[wri], %[frfi_r], %[cifftrnd]\n\t" + : [frfi_r] "=&r"(frfi_r), [tr32] "=&r"(tr32), [ti32] "=r"(ti32) + : [frfi_even] "r"((int32_t)frfi[2 * j]), + [frfi_odd] "r"((int32_t)frfi[2 * j + 1]), [wri] "r"(wri), + [cifftrnd] "r"(CIFFTRND)); #else - tr32 = wr * frfi[2 * j] - wi * frfi[2 * j + 1] + CIFFTRND; + tr32 = wr * frfi[2 * j] - wi * frfi[2 * j + 1] + CIFFTRND; - ti32 = wr * frfi[2 * j + 1] + wi * frfi[2 * j] + CIFFTRND; + ti32 = wr * frfi[2 * j + 1] + wi * frfi[2 * j] + CIFFTRND; #endif - tr32 >>= 15 - CIFFTSFT; - ti32 >>= 15 - CIFFTSFT; - - qr32 = ((int32_t)frfi[2 * i]) * (1 << CIFFTSFT); - qi32 = ((int32_t)frfi[2 * i + 1]) * (1 << CIFFTSFT); - - frfi[2 * j] = (int16_t)( - (qr32 - tr32 + round2) >> (shift + CIFFTSFT)); - frfi[2 * j + 1] = (int16_t)( - (qi32 - ti32 + round2) >> (shift + CIFFTSFT)); - frfi[2 * i] = (int16_t)( - (qr32 + tr32 + round2) >> (shift + CIFFTSFT)); - frfi[2 * i + 1] = (int16_t)( - (qi32 + ti32 + round2) >> (shift + CIFFTSFT)); - } - } - + tr32 >>= 15 - CIFFTSFT; + ti32 >>= 15 - CIFFTSFT; + + qr32 = ((int32_t)frfi[2 * i]) * (1 << CIFFTSFT); + qi32 = ((int32_t)frfi[2 * i + 1]) * (1 << CIFFTSFT); + + frfi[2 * j] = (int16_t)((qr32 - tr32 + round2) >> (shift + CIFFTSFT)); + frfi[2 * j + 1] = + (int16_t)((qi32 - ti32 + round2) >> (shift + CIFFTSFT)); + frfi[2 * i] = (int16_t)((qr32 + tr32 + round2) >> (shift + CIFFTSFT)); + frfi[2 * i + 1] = + (int16_t)((qi32 + ti32 + round2) >> (shift + CIFFTSFT)); } - --k; - l = istep; + } } - return scale; + --k; + l = istep; + } + return scale; } diff --git a/common_audio/signal_processing/complex_fft_mips.c b/common_audio/signal_processing/complex_fft_mips.c index 27071f8b39..70e5933328 100644 --- a/common_audio/signal_processing/complex_fft_mips.c +++ b/common_audio/signal_processing/complex_fft_mips.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - #include "common_audio/signal_processing/complex_fft_tables.h" #include "common_audio/signal_processing/include/signal_processing_library.h" @@ -42,106 +41,107 @@ int WebRtcSpl_ComplexFFT(int16_t frfi[], int stages, int mode) { return -1; } - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" + __asm __volatile( + ".set push \n\t" + ".set noreorder \n\t" - "addiu %[k], $zero, 10 \n\t" - "addiu %[l], $zero, 1 \n\t" - "3: \n\t" - "sll %[istep], %[l], 1 \n\t" - "move %[m], $zero \n\t" - "sll %[tmp], %[l], 2 \n\t" - "move %[i], $zero \n\t" - "2: \n\t" + "addiu %[k], $zero, 10 \n\t" + "addiu %[l], $zero, 1 \n\t" + "3: \n\t" + "sll %[istep], %[l], 1 \n\t" + "move %[m], $zero \n\t" + "sll %[tmp], %[l], 2 \n\t" + "move %[i], $zero \n\t" + "2: \n\t" #if defined(MIPS_DSP_R1_LE) - "sllv %[tmp3], %[m], %[k] \n\t" - "addiu %[tmp2], %[tmp3], 512 \n\t" - "addiu %[m], %[m], 1 \n\t" - "lhx %[wi], %[tmp3](%[kSinTable1024]) \n\t" - "lhx %[wr], %[tmp2](%[kSinTable1024]) \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "sllv %[tmp3], %[m], %[k] \n\t" - "addu %[ptr_j], %[tmp3], %[kSinTable1024] \n\t" - "addiu %[ptr_i], %[ptr_j], 512 \n\t" - "addiu %[m], %[m], 1 \n\t" - "lh %[wi], 0(%[ptr_j]) \n\t" - "lh %[wr], 0(%[ptr_i]) \n\t" + "sllv %[tmp3], %[m], %[k] \n\t" + "addiu %[tmp2], %[tmp3], 512 \n\t" + "addiu %[m], %[m], 1 \n\t" + "lhx %[wi], %[tmp3](%[kSinTable1024]) \n\t" + "lhx %[wr], %[tmp2](%[kSinTable1024]) \n\t" +#else // #if defined(MIPS_DSP_R1_LE) + "sllv %[tmp3], %[m], %[k] \n\t" + "addu %[ptr_j], %[tmp3], %[kSinTable1024] \n\t" + "addiu %[ptr_i], %[ptr_j], 512 \n\t" + "addiu %[m], %[m], 1 \n\t" + "lh %[wi], 0(%[ptr_j]) \n\t" + "lh %[wr], 0(%[ptr_i]) \n\t" #endif // #if defined(MIPS_DSP_R1_LE) - "1: \n\t" - "sll %[tmp1], %[i], 2 \n\t" - "addu %[ptr_i], %[frfi], %[tmp1] \n\t" - "addu %[ptr_j], %[ptr_i], %[tmp] \n\t" - "lh %[tmp6], 0(%[ptr_i]) \n\t" - "lh %[tmp5], 2(%[ptr_i]) \n\t" - "lh %[tmp3], 0(%[ptr_j]) \n\t" - "lh %[tmp4], 2(%[ptr_j]) \n\t" - "addu %[i], %[i], %[istep] \n\t" + "1: \n\t" + "sll %[tmp1], %[i], 2 \n\t" + "addu %[ptr_i], %[frfi], %[tmp1] \n\t" + "addu %[ptr_j], %[ptr_i], %[tmp] \n\t" + "lh %[tmp6], 0(%[ptr_i]) \n\t" + "lh %[tmp5], 2(%[ptr_i]) \n\t" + "lh %[tmp3], 0(%[ptr_j]) \n\t" + "lh %[tmp4], 2(%[ptr_j]) \n\t" + "addu %[i], %[i], %[istep] \n\t" #if defined(MIPS_DSP_R2_LE) - "mult %[wr], %[tmp3] \n\t" - "madd %[wi], %[tmp4] \n\t" - "mult $ac1, %[wr], %[tmp4] \n\t" - "msub $ac1, %[wi], %[tmp3] \n\t" - "mflo %[tmp1] \n\t" - "mflo %[tmp2], $ac1 \n\t" - "sll %[tmp6], %[tmp6], 14 \n\t" - "sll %[tmp5], %[tmp5], 14 \n\t" - "shra_r.w %[tmp1], %[tmp1], 1 \n\t" - "shra_r.w %[tmp2], %[tmp2], 1 \n\t" - "subu %[tmp4], %[tmp6], %[tmp1] \n\t" - "addu %[tmp1], %[tmp6], %[tmp1] \n\t" - "addu %[tmp6], %[tmp5], %[tmp2] \n\t" - "subu %[tmp5], %[tmp5], %[tmp2] \n\t" - "shra_r.w %[tmp1], %[tmp1], 15 \n\t" - "shra_r.w %[tmp6], %[tmp6], 15 \n\t" - "shra_r.w %[tmp4], %[tmp4], 15 \n\t" - "shra_r.w %[tmp5], %[tmp5], 15 \n\t" -#else // #if defined(MIPS_DSP_R2_LE) - "mul %[tmp2], %[wr], %[tmp4] \n\t" - "mul %[tmp1], %[wr], %[tmp3] \n\t" - "mul %[tmp4], %[wi], %[tmp4] \n\t" - "mul %[tmp3], %[wi], %[tmp3] \n\t" - "sll %[tmp6], %[tmp6], 14 \n\t" - "sll %[tmp5], %[tmp5], 14 \n\t" - "addiu %[tmp6], %[tmp6], 16384 \n\t" - "addiu %[tmp5], %[tmp5], 16384 \n\t" - "addu %[tmp1], %[tmp1], %[tmp4] \n\t" - "subu %[tmp2], %[tmp2], %[tmp3] \n\t" - "addiu %[tmp1], %[tmp1], 1 \n\t" - "addiu %[tmp2], %[tmp2], 1 \n\t" - "sra %[tmp1], %[tmp1], 1 \n\t" - "sra %[tmp2], %[tmp2], 1 \n\t" - "subu %[tmp4], %[tmp6], %[tmp1] \n\t" - "addu %[tmp1], %[tmp6], %[tmp1] \n\t" - "addu %[tmp6], %[tmp5], %[tmp2] \n\t" - "subu %[tmp5], %[tmp5], %[tmp2] \n\t" - "sra %[tmp4], %[tmp4], 15 \n\t" - "sra %[tmp1], %[tmp1], 15 \n\t" - "sra %[tmp6], %[tmp6], 15 \n\t" - "sra %[tmp5], %[tmp5], 15 \n\t" + "mult %[wr], %[tmp3] \n\t" + "madd %[wi], %[tmp4] \n\t" + "mult $ac1, %[wr], %[tmp4] \n\t" + "msub $ac1, %[wi], %[tmp3] \n\t" + "mflo %[tmp1] \n\t" + "mflo %[tmp2], $ac1 \n\t" + "sll %[tmp6], %[tmp6], 14 \n\t" + "sll %[tmp5], %[tmp5], 14 \n\t" + "shra_r.w %[tmp1], %[tmp1], 1 \n\t" + "shra_r.w %[tmp2], %[tmp2], 1 \n\t" + "subu %[tmp4], %[tmp6], %[tmp1] \n\t" + "addu %[tmp1], %[tmp6], %[tmp1] \n\t" + "addu %[tmp6], %[tmp5], %[tmp2] \n\t" + "subu %[tmp5], %[tmp5], %[tmp2] \n\t" + "shra_r.w %[tmp1], %[tmp1], 15 \n\t" + "shra_r.w %[tmp6], %[tmp6], 15 \n\t" + "shra_r.w %[tmp4], %[tmp4], 15 \n\t" + "shra_r.w %[tmp5], %[tmp5], 15 \n\t" +#else // #if defined(MIPS_DSP_R2_LE) + "mul %[tmp2], %[wr], %[tmp4] \n\t" + "mul %[tmp1], %[wr], %[tmp3] \n\t" + "mul %[tmp4], %[wi], %[tmp4] \n\t" + "mul %[tmp3], %[wi], %[tmp3] \n\t" + "sll %[tmp6], %[tmp6], 14 \n\t" + "sll %[tmp5], %[tmp5], 14 \n\t" + "addiu %[tmp6], %[tmp6], 16384 \n\t" + "addiu %[tmp5], %[tmp5], 16384 \n\t" + "addu %[tmp1], %[tmp1], %[tmp4] \n\t" + "subu %[tmp2], %[tmp2], %[tmp3] \n\t" + "addiu %[tmp1], %[tmp1], 1 \n\t" + "addiu %[tmp2], %[tmp2], 1 \n\t" + "sra %[tmp1], %[tmp1], 1 \n\t" + "sra %[tmp2], %[tmp2], 1 \n\t" + "subu %[tmp4], %[tmp6], %[tmp1] \n\t" + "addu %[tmp1], %[tmp6], %[tmp1] \n\t" + "addu %[tmp6], %[tmp5], %[tmp2] \n\t" + "subu %[tmp5], %[tmp5], %[tmp2] \n\t" + "sra %[tmp4], %[tmp4], 15 \n\t" + "sra %[tmp1], %[tmp1], 15 \n\t" + "sra %[tmp6], %[tmp6], 15 \n\t" + "sra %[tmp5], %[tmp5], 15 \n\t" #endif // #if defined(MIPS_DSP_R2_LE) - "sh %[tmp1], 0(%[ptr_i]) \n\t" - "sh %[tmp6], 2(%[ptr_i]) \n\t" - "sh %[tmp4], 0(%[ptr_j]) \n\t" - "blt %[i], %[n], 1b \n\t" - " sh %[tmp5], 2(%[ptr_j]) \n\t" - "blt %[m], %[l], 2b \n\t" - " addu %[i], $zero, %[m] \n\t" - "move %[l], %[istep] \n\t" - "blt %[l], %[n], 3b \n\t" - " addiu %[k], %[k], -1 \n\t" + "sh %[tmp1], 0(%[ptr_i]) \n\t" + "sh %[tmp6], 2(%[ptr_i]) \n\t" + "sh %[tmp4], 0(%[ptr_j]) \n\t" + "blt %[i], %[n], 1b \n\t" + " sh %[tmp5], 2(%[ptr_j]) \n\t" + "blt %[m], %[l], 2b \n\t" + " addu %[i], $zero, %[m] \n\t" + "move %[l], %[istep] \n\t" + "blt %[l], %[n], 3b \n\t" + " addiu %[k], %[k], -1 \n\t" - ".set pop \n\t" + ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [tmp3] "=&r" (tmp3), - [tmp4] "=&r" (tmp4), [tmp5] "=&r" (tmp5), [tmp6] "=&r" (tmp6), - [ptr_i] "=&r" (ptr_i), [i] "=&r" (i), [wi] "=&r" (wi), [wr] "=&r" (wr), - [m] "=&r" (m), [istep] "=&r" (istep), [l] "=&r" (l), [k] "=&r" (k), - [ptr_j] "=&r" (ptr_j), [tmp] "=&r" (tmp) - : [n] "r" (n), [frfi] "r" (frfi), [kSinTable1024] "r" (kSinTable1024) - : "hi", "lo", "memory" + : [tmp1] "=&r"(tmp1), [tmp2] "=&r"(tmp2), [tmp3] "=&r"(tmp3), + [tmp4] "=&r"(tmp4), [tmp5] "=&r"(tmp5), [tmp6] "=&r"(tmp6), + [ptr_i] "=&r"(ptr_i), [i] "=&r"(i), [wi] "=&r"(wi), [wr] "=&r"(wr), + [m] "=&r"(m), [istep] "=&r"(istep), [l] "=&r"(l), [k] "=&r"(k), + [ptr_j] "=&r"(ptr_j), [tmp] "=&r"(tmp) + : [n] "r"(n), [frfi] "r"(frfi), [kSinTable1024] "r"(kSinTable1024) + : "hi", "lo", "memory" #if defined(MIPS_DSP_R2_LE) - , "$ac1hi", "$ac1lo" + , + "$ac1hi", "$ac1lo" #endif // #if defined(MIPS_DSP_R2_LE) ); @@ -163,166 +163,166 @@ int WebRtcSpl_ComplexIFFT(int16_t frfi[], int stages, int mode) { return -1; } - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" + __asm __volatile( + ".set push \n\t" + ".set noreorder \n\t" - "addiu %[k], $zero, 10 \n\t" - "addiu %[l], $zero, 1 \n\t" - "move %[scale], $zero \n\t" - "3: \n\t" - "addiu %[shift], $zero, 14 \n\t" - "addiu %[round2], $zero, 8192 \n\t" - "move %[ptr_i], %[frfi] \n\t" - "move %[tempMax], $zero \n\t" - "addu %[i], %[n], %[n] \n\t" - "5: \n\t" - "lh %[tmp1], 0(%[ptr_i]) \n\t" - "lh %[tmp2], 2(%[ptr_i]) \n\t" - "lh %[tmp3], 4(%[ptr_i]) \n\t" - "lh %[tmp4], 6(%[ptr_i]) \n\t" + "addiu %[k], $zero, 10 \n\t" + "addiu %[l], $zero, 1 \n\t" + "move %[scale], $zero \n\t" + "3: \n\t" + "addiu %[shift], $zero, 14 \n\t" + "addiu %[round2], $zero, 8192 \n\t" + "move %[ptr_i], %[frfi] \n\t" + "move %[tempMax], $zero \n\t" + "addu %[i], %[n], %[n] \n\t" + "5: \n\t" + "lh %[tmp1], 0(%[ptr_i]) \n\t" + "lh %[tmp2], 2(%[ptr_i]) \n\t" + "lh %[tmp3], 4(%[ptr_i]) \n\t" + "lh %[tmp4], 6(%[ptr_i]) \n\t" #if defined(MIPS_DSP_R1_LE) - "absq_s.w %[tmp1], %[tmp1] \n\t" - "absq_s.w %[tmp2], %[tmp2] \n\t" - "absq_s.w %[tmp3], %[tmp3] \n\t" - "absq_s.w %[tmp4], %[tmp4] \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "slt %[tmp5], %[tmp1], $zero \n\t" - "subu %[tmp6], $zero, %[tmp1] \n\t" - "movn %[tmp1], %[tmp6], %[tmp5] \n\t" - "slt %[tmp5], %[tmp2], $zero \n\t" - "subu %[tmp6], $zero, %[tmp2] \n\t" - "movn %[tmp2], %[tmp6], %[tmp5] \n\t" - "slt %[tmp5], %[tmp3], $zero \n\t" - "subu %[tmp6], $zero, %[tmp3] \n\t" - "movn %[tmp3], %[tmp6], %[tmp5] \n\t" - "slt %[tmp5], %[tmp4], $zero \n\t" - "subu %[tmp6], $zero, %[tmp4] \n\t" - "movn %[tmp4], %[tmp6], %[tmp5] \n\t" + "absq_s.w %[tmp1], %[tmp1] \n\t" + "absq_s.w %[tmp2], %[tmp2] \n\t" + "absq_s.w %[tmp3], %[tmp3] \n\t" + "absq_s.w %[tmp4], %[tmp4] \n\t" +#else // #if defined(MIPS_DSP_R1_LE) + "slt %[tmp5], %[tmp1], $zero \n\t" + "subu %[tmp6], $zero, %[tmp1] \n\t" + "movn %[tmp1], %[tmp6], %[tmp5] \n\t" + "slt %[tmp5], %[tmp2], $zero \n\t" + "subu %[tmp6], $zero, %[tmp2] \n\t" + "movn %[tmp2], %[tmp6], %[tmp5] \n\t" + "slt %[tmp5], %[tmp3], $zero \n\t" + "subu %[tmp6], $zero, %[tmp3] \n\t" + "movn %[tmp3], %[tmp6], %[tmp5] \n\t" + "slt %[tmp5], %[tmp4], $zero \n\t" + "subu %[tmp6], $zero, %[tmp4] \n\t" + "movn %[tmp4], %[tmp6], %[tmp5] \n\t" #endif // #if defined(MIPS_DSP_R1_LE) - "slt %[tmp5], %[tempMax], %[tmp1] \n\t" - "movn %[tempMax], %[tmp1], %[tmp5] \n\t" - "addiu %[i], %[i], -4 \n\t" - "slt %[tmp5], %[tempMax], %[tmp2] \n\t" - "movn %[tempMax], %[tmp2], %[tmp5] \n\t" - "slt %[tmp5], %[tempMax], %[tmp3] \n\t" - "movn %[tempMax], %[tmp3], %[tmp5] \n\t" - "slt %[tmp5], %[tempMax], %[tmp4] \n\t" - "movn %[tempMax], %[tmp4], %[tmp5] \n\t" - "bgtz %[i], 5b \n\t" - " addiu %[ptr_i], %[ptr_i], 8 \n\t" - "addiu %[tmp1], $zero, 13573 \n\t" - "addiu %[tmp2], $zero, 27146 \n\t" + "slt %[tmp5], %[tempMax], %[tmp1] \n\t" + "movn %[tempMax], %[tmp1], %[tmp5] \n\t" + "addiu %[i], %[i], -4 \n\t" + "slt %[tmp5], %[tempMax], %[tmp2] \n\t" + "movn %[tempMax], %[tmp2], %[tmp5] \n\t" + "slt %[tmp5], %[tempMax], %[tmp3] \n\t" + "movn %[tempMax], %[tmp3], %[tmp5] \n\t" + "slt %[tmp5], %[tempMax], %[tmp4] \n\t" + "movn %[tempMax], %[tmp4], %[tmp5] \n\t" + "bgtz %[i], 5b \n\t" + " addiu %[ptr_i], %[ptr_i], 8 \n\t" + "addiu %[tmp1], $zero, 13573 \n\t" + "addiu %[tmp2], $zero, 27146 \n\t" #if !defined(MIPS32_R2_LE) - "sll %[tempMax], %[tempMax], 16 \n\t" - "sra %[tempMax], %[tempMax], 16 \n\t" -#else // #if !defined(MIPS32_R2_LE) - "seh %[tempMax] \n\t" + "sll %[tempMax], %[tempMax], 16 \n\t" + "sra %[tempMax], %[tempMax], 16 \n\t" +#else // #if !defined(MIPS32_R2_LE) + "seh %[tempMax] \n\t" #endif // #if !defined(MIPS32_R2_LE) - "slt %[tmp1], %[tmp1], %[tempMax] \n\t" - "slt %[tmp2], %[tmp2], %[tempMax] \n\t" - "addu %[tmp1], %[tmp1], %[tmp2] \n\t" - "addu %[shift], %[shift], %[tmp1] \n\t" - "addu %[scale], %[scale], %[tmp1] \n\t" - "sllv %[round2], %[round2], %[tmp1] \n\t" - "sll %[istep], %[l], 1 \n\t" - "move %[m], $zero \n\t" - "sll %[tmp], %[l], 2 \n\t" - "2: \n\t" + "slt %[tmp1], %[tmp1], %[tempMax] \n\t" + "slt %[tmp2], %[tmp2], %[tempMax] \n\t" + "addu %[tmp1], %[tmp1], %[tmp2] \n\t" + "addu %[shift], %[shift], %[tmp1] \n\t" + "addu %[scale], %[scale], %[tmp1] \n\t" + "sllv %[round2], %[round2], %[tmp1] \n\t" + "sll %[istep], %[l], 1 \n\t" + "move %[m], $zero \n\t" + "sll %[tmp], %[l], 2 \n\t" + "2: \n\t" #if defined(MIPS_DSP_R1_LE) - "sllv %[tmp3], %[m], %[k] \n\t" - "addiu %[tmp2], %[tmp3], 512 \n\t" - "addiu %[m], %[m], 1 \n\t" - "lhx %[wi], %[tmp3](%[kSinTable1024]) \n\t" - "lhx %[wr], %[tmp2](%[kSinTable1024]) \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "sllv %[tmp3], %[m], %[k] \n\t" - "addu %[ptr_j], %[tmp3], %[kSinTable1024] \n\t" - "addiu %[ptr_i], %[ptr_j], 512 \n\t" - "addiu %[m], %[m], 1 \n\t" - "lh %[wi], 0(%[ptr_j]) \n\t" - "lh %[wr], 0(%[ptr_i]) \n\t" + "sllv %[tmp3], %[m], %[k] \n\t" + "addiu %[tmp2], %[tmp3], 512 \n\t" + "addiu %[m], %[m], 1 \n\t" + "lhx %[wi], %[tmp3](%[kSinTable1024]) \n\t" + "lhx %[wr], %[tmp2](%[kSinTable1024]) \n\t" +#else // #if defined(MIPS_DSP_R1_LE) + "sllv %[tmp3], %[m], %[k] \n\t" + "addu %[ptr_j], %[tmp3], %[kSinTable1024] \n\t" + "addiu %[ptr_i], %[ptr_j], 512 \n\t" + "addiu %[m], %[m], 1 \n\t" + "lh %[wi], 0(%[ptr_j]) \n\t" + "lh %[wr], 0(%[ptr_i]) \n\t" #endif // #if defined(MIPS_DSP_R1_LE) - "1: \n\t" - "sll %[tmp1], %[i], 2 \n\t" - "addu %[ptr_i], %[frfi], %[tmp1] \n\t" - "addu %[ptr_j], %[ptr_i], %[tmp] \n\t" - "lh %[tmp3], 0(%[ptr_j]) \n\t" - "lh %[tmp4], 2(%[ptr_j]) \n\t" - "lh %[tmp6], 0(%[ptr_i]) \n\t" - "lh %[tmp5], 2(%[ptr_i]) \n\t" - "addu %[i], %[i], %[istep] \n\t" + "1: \n\t" + "sll %[tmp1], %[i], 2 \n\t" + "addu %[ptr_i], %[frfi], %[tmp1] \n\t" + "addu %[ptr_j], %[ptr_i], %[tmp] \n\t" + "lh %[tmp3], 0(%[ptr_j]) \n\t" + "lh %[tmp4], 2(%[ptr_j]) \n\t" + "lh %[tmp6], 0(%[ptr_i]) \n\t" + "lh %[tmp5], 2(%[ptr_i]) \n\t" + "addu %[i], %[i], %[istep] \n\t" #if defined(MIPS_DSP_R2_LE) - "mult %[wr], %[tmp3] \n\t" - "msub %[wi], %[tmp4] \n\t" - "mult $ac1, %[wr], %[tmp4] \n\t" - "madd $ac1, %[wi], %[tmp3] \n\t" - "mflo %[tmp1] \n\t" - "mflo %[tmp2], $ac1 \n\t" - "sll %[tmp6], %[tmp6], 14 \n\t" - "sll %[tmp5], %[tmp5], 14 \n\t" - "shra_r.w %[tmp1], %[tmp1], 1 \n\t" - "shra_r.w %[tmp2], %[tmp2], 1 \n\t" - "addu %[tmp6], %[tmp6], %[round2] \n\t" - "addu %[tmp5], %[tmp5], %[round2] \n\t" - "subu %[tmp4], %[tmp6], %[tmp1] \n\t" - "addu %[tmp1], %[tmp6], %[tmp1] \n\t" - "addu %[tmp6], %[tmp5], %[tmp2] \n\t" - "subu %[tmp5], %[tmp5], %[tmp2] \n\t" - "srav %[tmp4], %[tmp4], %[shift] \n\t" - "srav %[tmp1], %[tmp1], %[shift] \n\t" - "srav %[tmp6], %[tmp6], %[shift] \n\t" - "srav %[tmp5], %[tmp5], %[shift] \n\t" -#else // #if defined(MIPS_DSP_R2_LE) - "mul %[tmp1], %[wr], %[tmp3] \n\t" - "mul %[tmp2], %[wr], %[tmp4] \n\t" - "mul %[tmp4], %[wi], %[tmp4] \n\t" - "mul %[tmp3], %[wi], %[tmp3] \n\t" - "sll %[tmp6], %[tmp6], 14 \n\t" - "sll %[tmp5], %[tmp5], 14 \n\t" - "sub %[tmp1], %[tmp1], %[tmp4] \n\t" - "addu %[tmp2], %[tmp2], %[tmp3] \n\t" - "addiu %[tmp1], %[tmp1], 1 \n\t" - "addiu %[tmp2], %[tmp2], 1 \n\t" - "sra %[tmp2], %[tmp2], 1 \n\t" - "sra %[tmp1], %[tmp1], 1 \n\t" - "addu %[tmp6], %[tmp6], %[round2] \n\t" - "addu %[tmp5], %[tmp5], %[round2] \n\t" - "subu %[tmp4], %[tmp6], %[tmp1] \n\t" - "addu %[tmp1], %[tmp6], %[tmp1] \n\t" - "addu %[tmp6], %[tmp5], %[tmp2] \n\t" - "subu %[tmp5], %[tmp5], %[tmp2] \n\t" - "sra %[tmp4], %[tmp4], %[shift] \n\t" - "sra %[tmp1], %[tmp1], %[shift] \n\t" - "sra %[tmp6], %[tmp6], %[shift] \n\t" - "sra %[tmp5], %[tmp5], %[shift] \n\t" + "mult %[wr], %[tmp3] \n\t" + "msub %[wi], %[tmp4] \n\t" + "mult $ac1, %[wr], %[tmp4] \n\t" + "madd $ac1, %[wi], %[tmp3] \n\t" + "mflo %[tmp1] \n\t" + "mflo %[tmp2], $ac1 \n\t" + "sll %[tmp6], %[tmp6], 14 \n\t" + "sll %[tmp5], %[tmp5], 14 \n\t" + "shra_r.w %[tmp1], %[tmp1], 1 \n\t" + "shra_r.w %[tmp2], %[tmp2], 1 \n\t" + "addu %[tmp6], %[tmp6], %[round2] \n\t" + "addu %[tmp5], %[tmp5], %[round2] \n\t" + "subu %[tmp4], %[tmp6], %[tmp1] \n\t" + "addu %[tmp1], %[tmp6], %[tmp1] \n\t" + "addu %[tmp6], %[tmp5], %[tmp2] \n\t" + "subu %[tmp5], %[tmp5], %[tmp2] \n\t" + "srav %[tmp4], %[tmp4], %[shift] \n\t" + "srav %[tmp1], %[tmp1], %[shift] \n\t" + "srav %[tmp6], %[tmp6], %[shift] \n\t" + "srav %[tmp5], %[tmp5], %[shift] \n\t" +#else // #if defined(MIPS_DSP_R2_LE) + "mul %[tmp1], %[wr], %[tmp3] \n\t" + "mul %[tmp2], %[wr], %[tmp4] \n\t" + "mul %[tmp4], %[wi], %[tmp4] \n\t" + "mul %[tmp3], %[wi], %[tmp3] \n\t" + "sll %[tmp6], %[tmp6], 14 \n\t" + "sll %[tmp5], %[tmp5], 14 \n\t" + "sub %[tmp1], %[tmp1], %[tmp4] \n\t" + "addu %[tmp2], %[tmp2], %[tmp3] \n\t" + "addiu %[tmp1], %[tmp1], 1 \n\t" + "addiu %[tmp2], %[tmp2], 1 \n\t" + "sra %[tmp2], %[tmp2], 1 \n\t" + "sra %[tmp1], %[tmp1], 1 \n\t" + "addu %[tmp6], %[tmp6], %[round2] \n\t" + "addu %[tmp5], %[tmp5], %[round2] \n\t" + "subu %[tmp4], %[tmp6], %[tmp1] \n\t" + "addu %[tmp1], %[tmp6], %[tmp1] \n\t" + "addu %[tmp6], %[tmp5], %[tmp2] \n\t" + "subu %[tmp5], %[tmp5], %[tmp2] \n\t" + "sra %[tmp4], %[tmp4], %[shift] \n\t" + "sra %[tmp1], %[tmp1], %[shift] \n\t" + "sra %[tmp6], %[tmp6], %[shift] \n\t" + "sra %[tmp5], %[tmp5], %[shift] \n\t" #endif // #if defined(MIPS_DSP_R2_LE) - "sh %[tmp1], 0(%[ptr_i]) \n\t" - "sh %[tmp6], 2(%[ptr_i]) \n\t" - "sh %[tmp4], 0(%[ptr_j]) \n\t" - "blt %[i], %[n], 1b \n\t" - " sh %[tmp5], 2(%[ptr_j]) \n\t" - "blt %[m], %[l], 2b \n\t" - " addu %[i], $zero, %[m] \n\t" - "move %[l], %[istep] \n\t" - "blt %[l], %[n], 3b \n\t" - " addiu %[k], %[k], -1 \n\t" + "sh %[tmp1], 0(%[ptr_i]) \n\t" + "sh %[tmp6], 2(%[ptr_i]) \n\t" + "sh %[tmp4], 0(%[ptr_j]) \n\t" + "blt %[i], %[n], 1b \n\t" + " sh %[tmp5], 2(%[ptr_j]) \n\t" + "blt %[m], %[l], 2b \n\t" + " addu %[i], $zero, %[m] \n\t" + "move %[l], %[istep] \n\t" + "blt %[l], %[n], 3b \n\t" + " addiu %[k], %[k], -1 \n\t" - ".set pop \n\t" + ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [tmp3] "=&r" (tmp3), - [tmp4] "=&r" (tmp4), [tmp5] "=&r" (tmp5), [tmp6] "=&r" (tmp6), - [ptr_i] "=&r" (ptr_i), [i] "=&r" (i), [m] "=&r" (m), [tmp] "=&r" (tmp), - [istep] "=&r" (istep), [wi] "=&r" (wi), [wr] "=&r" (wr), [l] "=&r" (l), - [k] "=&r" (k), [round2] "=&r" (round2), [ptr_j] "=&r" (ptr_j), - [shift] "=&r" (shift), [scale] "=&r" (scale), [tempMax] "=&r" (tempMax) - : [n] "r" (n), [frfi] "r" (frfi), [kSinTable1024] "r" (kSinTable1024) - : "hi", "lo", "memory" + : [tmp1] "=&r"(tmp1), [tmp2] "=&r"(tmp2), [tmp3] "=&r"(tmp3), + [tmp4] "=&r"(tmp4), [tmp5] "=&r"(tmp5), [tmp6] "=&r"(tmp6), + [ptr_i] "=&r"(ptr_i), [i] "=&r"(i), [m] "=&r"(m), [tmp] "=&r"(tmp), + [istep] "=&r"(istep), [wi] "=&r"(wi), [wr] "=&r"(wr), [l] "=&r"(l), + [k] "=&r"(k), [round2] "=&r"(round2), [ptr_j] "=&r"(ptr_j), + [shift] "=&r"(shift), [scale] "=&r"(scale), [tempMax] "=&r"(tempMax) + : [n] "r"(n), [frfi] "r"(frfi), [kSinTable1024] "r"(kSinTable1024) + : "hi", "lo", "memory" #if defined(MIPS_DSP_R2_LE) - , "$ac1hi", "$ac1lo" + , + "$ac1hi", "$ac1lo" #endif // #if defined(MIPS_DSP_R2_LE) ); return scale; - } diff --git a/common_audio/signal_processing/copy_set_operations.c b/common_audio/signal_processing/copy_set_operations.c index ae709d40f0..059b0a198b 100644 --- a/common_audio/signal_processing/copy_set_operations.c +++ b/common_audio/signal_processing/copy_set_operations.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the implementation of functions * WebRtcSpl_MemSetW16() @@ -23,60 +22,51 @@ */ #include -#include "common_audio/signal_processing/include/signal_processing_library.h" +#include "common_audio/signal_processing/include/signal_processing_library.h" -void WebRtcSpl_MemSetW16(int16_t *ptr, int16_t set_value, size_t length) -{ - size_t j; - int16_t *arrptr = ptr; +void WebRtcSpl_MemSetW16(int16_t* ptr, int16_t set_value, size_t length) { + size_t j; + int16_t* arrptr = ptr; - for (j = length; j > 0; j--) - { - *arrptr++ = set_value; - } + for (j = length; j > 0; j--) { + *arrptr++ = set_value; + } } -void WebRtcSpl_MemSetW32(int32_t *ptr, int32_t set_value, size_t length) -{ - size_t j; - int32_t *arrptr = ptr; +void WebRtcSpl_MemSetW32(int32_t* ptr, int32_t set_value, size_t length) { + size_t j; + int32_t* arrptr = ptr; - for (j = length; j > 0; j--) - { - *arrptr++ = set_value; - } + for (j = length; j > 0; j--) { + *arrptr++ = set_value; + } } void WebRtcSpl_MemCpyReversedOrder(int16_t* dest, int16_t* source, - size_t length) -{ - size_t j; - int16_t* destPtr = dest; - int16_t* sourcePtr = source; + size_t length) { + size_t j; + int16_t* destPtr = dest; + int16_t* sourcePtr = source; - for (j = 0; j < length; j++) - { - *destPtr-- = *sourcePtr++; - } + for (j = 0; j < length; j++) { + *destPtr-- = *sourcePtr++; + } } -void WebRtcSpl_CopyFromEndW16(const int16_t *vector_in, +void WebRtcSpl_CopyFromEndW16(const int16_t* vector_in, size_t length, size_t samples, - int16_t *vector_out) -{ - // Copy the last of the input vector to vector_out - WEBRTC_SPL_MEMCPY_W16(vector_out, &vector_in[length - samples], samples); + int16_t* vector_out) { + // Copy the last of the input vector to vector_out + WEBRTC_SPL_MEMCPY_W16(vector_out, &vector_in[length - samples], samples); } -void WebRtcSpl_ZerosArrayW16(int16_t *vector, size_t length) -{ - WebRtcSpl_MemSetW16(vector, 0, length); +void WebRtcSpl_ZerosArrayW16(int16_t* vector, size_t length) { + WebRtcSpl_MemSetW16(vector, 0, length); } -void WebRtcSpl_ZerosArrayW32(int32_t *vector, size_t length) -{ - WebRtcSpl_MemSetW32(vector, 0, length); +void WebRtcSpl_ZerosArrayW32(int32_t* vector, size_t length) { + WebRtcSpl_MemSetW32(vector, 0, length); } diff --git a/common_audio/signal_processing/cross_correlation_mips.c b/common_audio/signal_processing/cross_correlation_mips.c index c395101900..c755e9900c 100644 --- a/common_audio/signal_processing/cross_correlation_mips.c +++ b/common_audio/signal_processing/cross_correlation_mips.c @@ -17,88 +17,86 @@ void WebRtcSpl_CrossCorrelation_mips(int32_t* cross_correlation, size_t dim_cross_correlation, int right_shifts, int step_seq2) { - int32_t t0 = 0, t1 = 0, t2 = 0, t3 = 0, sum = 0; - int16_t *pseq2 = NULL; - int16_t *pseq1 = NULL; - int16_t *pseq1_0 = (int16_t*)&seq1[0]; - int16_t *pseq2_0 = (int16_t*)&seq2[0]; + int16_t* pseq2 = NULL; + int16_t* pseq1 = NULL; + int16_t* pseq1_0 = (int16_t*)&seq1[0]; + int16_t* pseq2_0 = (int16_t*)&seq2[0]; int k = 0; - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "sll %[step_seq2], %[step_seq2], 1 \n\t" - "andi %[t0], %[dim_seq], 1 \n\t" - "bgtz %[t0], 3f \n\t" - " nop \n\t" - "1: \n\t" - "move %[pseq1], %[pseq1_0] \n\t" - "move %[pseq2], %[pseq2_0] \n\t" - "sra %[k], %[dim_seq], 1 \n\t" - "addiu %[dim_cc], %[dim_cc], -1 \n\t" - "xor %[sum], %[sum], %[sum] \n\t" - "2: \n\t" - "lh %[t0], 0(%[pseq1]) \n\t" - "lh %[t1], 0(%[pseq2]) \n\t" - "lh %[t2], 2(%[pseq1]) \n\t" - "lh %[t3], 2(%[pseq2]) \n\t" - "mul %[t0], %[t0], %[t1] \n\t" - "addiu %[k], %[k], -1 \n\t" - "mul %[t2], %[t2], %[t3] \n\t" - "addiu %[pseq1], %[pseq1], 4 \n\t" - "addiu %[pseq2], %[pseq2], 4 \n\t" - "srav %[t0], %[t0], %[right_shifts] \n\t" - "addu %[sum], %[sum], %[t0] \n\t" - "srav %[t2], %[t2], %[right_shifts] \n\t" - "bgtz %[k], 2b \n\t" - " addu %[sum], %[sum], %[t2] \n\t" - "addu %[pseq2_0], %[pseq2_0], %[step_seq2] \n\t" - "sw %[sum], 0(%[cc]) \n\t" - "bgtz %[dim_cc], 1b \n\t" - " addiu %[cc], %[cc], 4 \n\t" - "b 6f \n\t" - " nop \n\t" - "3: \n\t" - "move %[pseq1], %[pseq1_0] \n\t" - "move %[pseq2], %[pseq2_0] \n\t" - "sra %[k], %[dim_seq], 1 \n\t" - "addiu %[dim_cc], %[dim_cc], -1 \n\t" - "beqz %[k], 5f \n\t" - " xor %[sum], %[sum], %[sum] \n\t" - "4: \n\t" - "lh %[t0], 0(%[pseq1]) \n\t" - "lh %[t1], 0(%[pseq2]) \n\t" - "lh %[t2], 2(%[pseq1]) \n\t" - "lh %[t3], 2(%[pseq2]) \n\t" - "mul %[t0], %[t0], %[t1] \n\t" - "addiu %[k], %[k], -1 \n\t" - "mul %[t2], %[t2], %[t3] \n\t" - "addiu %[pseq1], %[pseq1], 4 \n\t" - "addiu %[pseq2], %[pseq2], 4 \n\t" - "srav %[t0], %[t0], %[right_shifts] \n\t" - "addu %[sum], %[sum], %[t0] \n\t" - "srav %[t2], %[t2], %[right_shifts] \n\t" - "bgtz %[k], 4b \n\t" - " addu %[sum], %[sum], %[t2] \n\t" - "5: \n\t" - "lh %[t0], 0(%[pseq1]) \n\t" - "lh %[t1], 0(%[pseq2]) \n\t" - "mul %[t0], %[t0], %[t1] \n\t" - "srav %[t0], %[t0], %[right_shifts] \n\t" - "addu %[sum], %[sum], %[t0] \n\t" - "addu %[pseq2_0], %[pseq2_0], %[step_seq2] \n\t" - "sw %[sum], 0(%[cc]) \n\t" - "bgtz %[dim_cc], 3b \n\t" - " addiu %[cc], %[cc], 4 \n\t" - "6: \n\t" - ".set pop \n\t" - : [step_seq2] "+r" (step_seq2), [t0] "=&r" (t0), [t1] "=&r" (t1), - [t2] "=&r" (t2), [t3] "=&r" (t3), [pseq1] "=&r" (pseq1), - [pseq2] "=&r" (pseq2), [pseq1_0] "+r" (pseq1_0), [pseq2_0] "+r" (pseq2_0), - [k] "=&r" (k), [dim_cc] "+r" (dim_cross_correlation), [sum] "=&r" (sum), - [cc] "+r" (cross_correlation) - : [dim_seq] "r" (dim_seq), [right_shifts] "r" (right_shifts) - : "hi", "lo", "memory" - ); + __asm __volatile( + ".set push \n\t" + ".set noreorder \n\t" + "sll %[step_seq2], %[step_seq2], 1 \n\t" + "andi %[t0], %[dim_seq], 1 \n\t" + "bgtz %[t0], 3f \n\t" + " nop \n\t" + "1: \n\t" + "move %[pseq1], %[pseq1_0] \n\t" + "move %[pseq2], %[pseq2_0] \n\t" + "sra %[k], %[dim_seq], 1 \n\t" + "addiu %[dim_cc], %[dim_cc], -1 \n\t" + "xor %[sum], %[sum], %[sum] \n\t" + "2: \n\t" + "lh %[t0], 0(%[pseq1]) \n\t" + "lh %[t1], 0(%[pseq2]) \n\t" + "lh %[t2], 2(%[pseq1]) \n\t" + "lh %[t3], 2(%[pseq2]) \n\t" + "mul %[t0], %[t0], %[t1] \n\t" + "addiu %[k], %[k], -1 \n\t" + "mul %[t2], %[t2], %[t3] \n\t" + "addiu %[pseq1], %[pseq1], 4 \n\t" + "addiu %[pseq2], %[pseq2], 4 \n\t" + "srav %[t0], %[t0], %[right_shifts] \n\t" + "addu %[sum], %[sum], %[t0] \n\t" + "srav %[t2], %[t2], %[right_shifts] \n\t" + "bgtz %[k], 2b \n\t" + " addu %[sum], %[sum], %[t2] \n\t" + "addu %[pseq2_0], %[pseq2_0], %[step_seq2] \n\t" + "sw %[sum], 0(%[cc]) \n\t" + "bgtz %[dim_cc], 1b \n\t" + " addiu %[cc], %[cc], 4 \n\t" + "b 6f \n\t" + " nop \n\t" + "3: \n\t" + "move %[pseq1], %[pseq1_0] \n\t" + "move %[pseq2], %[pseq2_0] \n\t" + "sra %[k], %[dim_seq], 1 \n\t" + "addiu %[dim_cc], %[dim_cc], -1 \n\t" + "beqz %[k], 5f \n\t" + " xor %[sum], %[sum], %[sum] \n\t" + "4: \n\t" + "lh %[t0], 0(%[pseq1]) \n\t" + "lh %[t1], 0(%[pseq2]) \n\t" + "lh %[t2], 2(%[pseq1]) \n\t" + "lh %[t3], 2(%[pseq2]) \n\t" + "mul %[t0], %[t0], %[t1] \n\t" + "addiu %[k], %[k], -1 \n\t" + "mul %[t2], %[t2], %[t3] \n\t" + "addiu %[pseq1], %[pseq1], 4 \n\t" + "addiu %[pseq2], %[pseq2], 4 \n\t" + "srav %[t0], %[t0], %[right_shifts] \n\t" + "addu %[sum], %[sum], %[t0] \n\t" + "srav %[t2], %[t2], %[right_shifts] \n\t" + "bgtz %[k], 4b \n\t" + " addu %[sum], %[sum], %[t2] \n\t" + "5: \n\t" + "lh %[t0], 0(%[pseq1]) \n\t" + "lh %[t1], 0(%[pseq2]) \n\t" + "mul %[t0], %[t0], %[t1] \n\t" + "srav %[t0], %[t0], %[right_shifts] \n\t" + "addu %[sum], %[sum], %[t0] \n\t" + "addu %[pseq2_0], %[pseq2_0], %[step_seq2] \n\t" + "sw %[sum], 0(%[cc]) \n\t" + "bgtz %[dim_cc], 3b \n\t" + " addiu %[cc], %[cc], 4 \n\t" + "6: \n\t" + ".set pop \n\t" + : [step_seq2] "+r"(step_seq2), [t0] "=&r"(t0), [t1] "=&r"(t1), + [t2] "=&r"(t2), [t3] "=&r"(t3), [pseq1] "=&r"(pseq1), + [pseq2] "=&r"(pseq2), [pseq1_0] "+r"(pseq1_0), [pseq2_0] "+r"(pseq2_0), + [k] "=&r"(k), [dim_cc] "+r"(dim_cross_correlation), [sum] "=&r"(sum), + [cc] "+r"(cross_correlation) + : [dim_seq] "r"(dim_seq), [right_shifts] "r"(right_shifts) + : "hi", "lo", "memory"); } diff --git a/common_audio/signal_processing/cross_correlation_neon.c b/common_audio/signal_processing/cross_correlation_neon.c index f2afbdf9f5..409e734362 100644 --- a/common_audio/signal_processing/cross_correlation_neon.c +++ b/common_audio/signal_processing/cross_correlation_neon.c @@ -8,11 +8,11 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include + #include "common_audio/signal_processing/include/signal_processing_library.h" #include "rtc_base/system/arch.h" -#include - static inline void DotProductWithScaleNeon(int32_t* cross_correlation, const int16_t* vector1, const int16_t* vector2, @@ -28,14 +28,14 @@ static inline void DotProductWithScaleNeon(int32_t* cross_correlation, int16x8_t seq1_16x8 = vld1q_s16(vector1); int16x8_t seq2_16x8 = vld1q_s16(vector2); #if defined(WEBRTC_ARCH_ARM64) - int32x4_t tmp0 = vmull_s16(vget_low_s16(seq1_16x8), - vget_low_s16(seq2_16x8)); + int32x4_t tmp0 = + vmull_s16(vget_low_s16(seq1_16x8), vget_low_s16(seq2_16x8)); int32x4_t tmp1 = vmull_high_s16(seq1_16x8, seq2_16x8); #else - int32x4_t tmp0 = vmull_s16(vget_low_s16(seq1_16x8), - vget_low_s16(seq2_16x8)); - int32x4_t tmp1 = vmull_s16(vget_high_s16(seq1_16x8), - vget_high_s16(seq2_16x8)); + int32x4_t tmp0 = + vmull_s16(vget_low_s16(seq1_16x8), vget_low_s16(seq2_16x8)); + int32x4_t tmp1 = + vmull_s16(vget_high_s16(seq1_16x8), vget_high_s16(seq2_16x8)); #endif sum0 = vpadalq_s32(sum0, tmp0); sum1 = vpadalq_s32(sum1, tmp1); @@ -72,16 +72,13 @@ void WebRtcSpl_CrossCorrelationNeon(int32_t* cross_correlation, size_t dim_cross_correlation, int right_shifts, int step_seq2) { - size_t i = 0; + int i = 0; - for (i = 0; i < dim_cross_correlation; i++) { + for (i = 0; i < (int)dim_cross_correlation; i++) { const int16_t* seq1_ptr = seq1; const int16_t* seq2_ptr = seq2 + (step_seq2 * i); - DotProductWithScaleNeon(cross_correlation, - seq1_ptr, - seq2_ptr, - dim_seq, + DotProductWithScaleNeon(cross_correlation, seq1_ptr, seq2_ptr, dim_seq, right_shifts); cross_correlation++; } diff --git a/common_audio/signal_processing/division_operations.c b/common_audio/signal_processing/division_operations.c index 4764ddfccd..d0fbc24714 100644 --- a/common_audio/signal_processing/division_operations.c +++ b/common_audio/signal_processing/division_operations.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains implementations of the divisions * WebRtcSpl_DivU32U16() @@ -24,117 +23,101 @@ #include "common_audio/signal_processing/include/signal_processing_library.h" #include "rtc_base/sanitizer.h" -uint32_t WebRtcSpl_DivU32U16(uint32_t num, uint16_t den) -{ - // Guard against division with 0 - if (den != 0) - { - return (uint32_t)(num / den); - } else - { - return (uint32_t)0xFFFFFFFF; - } +uint32_t WebRtcSpl_DivU32U16(uint32_t num, uint16_t den) { + // Guard against division with 0 + if (den != 0) { + return (uint32_t)(num / den); + } else { + return (uint32_t)0xFFFFFFFF; + } } -int32_t WebRtcSpl_DivW32W16(int32_t num, int16_t den) -{ - // Guard against division with 0 - if (den != 0) - { - return (int32_t)(num / den); - } else - { - return (int32_t)0x7FFFFFFF; - } +int32_t WebRtcSpl_DivW32W16(int32_t num, int16_t den) { + // Guard against division with 0 + if (den != 0) { + return (int32_t)(num / den); + } else { + return (int32_t)0x7FFFFFFF; + } } -int16_t WebRtcSpl_DivW32W16ResW16(int32_t num, int16_t den) -{ - // Guard against division with 0 - if (den != 0) - { - return (int16_t)(num / den); - } else - { - return (int16_t)0x7FFF; - } +int16_t WebRtcSpl_DivW32W16ResW16(int32_t num, int16_t den) { + // Guard against division with 0 + if (den != 0) { + return (int16_t)(num / den); + } else { + return (int16_t)0x7FFF; + } } -int32_t WebRtcSpl_DivResultInQ31(int32_t num, int32_t den) -{ - int32_t L_num = num; - int32_t L_den = den; - int32_t div = 0; - int k = 31; - int change_sign = 0; - - if (num == 0) - return 0; - - if (num < 0) - { - change_sign++; - L_num = -num; - } - if (den < 0) - { - change_sign++; - L_den = -den; - } - while (k--) - { - div <<= 1; - L_num <<= 1; - if (L_num >= L_den) - { - L_num -= L_den; - div++; - } - } - if (change_sign == 1) - { - div = -div; +int32_t WebRtcSpl_DivResultInQ31(int32_t num, int32_t den) { + int32_t L_num = num; + int32_t L_den = den; + int32_t div = 0; + int k = 31; + int change_sign = 0; + + if (num == 0) + return 0; + + if (num < 0) { + change_sign++; + L_num = -num; + } + if (den < 0) { + change_sign++; + L_den = -den; + } + while (k--) { + div <<= 1; + L_num <<= 1; + if (L_num >= L_den) { + L_num -= L_den; + div++; } - return div; + } + if (change_sign == 1) { + div = -div; + } + return div; } -int32_t WebRtcSpl_DivW32HiLow(int32_t num, int16_t den_hi, int16_t den_low) -{ - int16_t approx, tmp_hi, tmp_low, num_hi, num_low; - int32_t tmpW32; +int32_t WebRtcSpl_DivW32HiLow(int32_t num, int16_t den_hi, int16_t den_low) { + int16_t approx, tmp_hi, tmp_low, num_hi, num_low; + int32_t tmpW32; - approx = (int16_t)WebRtcSpl_DivW32W16((int32_t)0x1FFFFFFF, den_hi); - // result in Q14 (Note: 3FFFFFFF = 0.5 in Q30) + approx = (int16_t)WebRtcSpl_DivW32W16((int32_t)0x1FFFFFFF, den_hi); + // result in Q14 (Note: 3FFFFFFF = 0.5 in Q30) - // tmpW32 = 1/den = approx * (2.0 - den * approx) (in Q30) - tmpW32 = (den_hi * approx << 1) + ((den_low * approx >> 15) << 1); - // tmpW32 = den * approx + // tmpW32 = 1/den = approx * (2.0 - den * approx) (in Q30) + tmpW32 = (den_hi * approx << 1) + ((den_low * approx >> 15) << 1); + // tmpW32 = den * approx - // result in Q30 (tmpW32 = 2.0-(den*approx)) - tmpW32 = (int32_t)((int64_t)0x7fffffffL - tmpW32); + // result in Q30 (tmpW32 = 2.0-(den*approx)) + tmpW32 = (int32_t)((int64_t)0x7fffffffL - tmpW32); - // Store tmpW32 in hi and low format - tmp_hi = (int16_t)(tmpW32 >> 16); - tmp_low = (int16_t)((tmpW32 - ((int32_t)tmp_hi << 16)) >> 1); + // Store tmpW32 in hi and low format + tmp_hi = (int16_t)(tmpW32 >> 16); + tmp_low = (int16_t)((tmpW32 - ((int32_t)tmp_hi << 16)) >> 1); - // tmpW32 = 1/den in Q29 - tmpW32 = (tmp_hi * approx + (tmp_low * approx >> 15)) << 1; + // tmpW32 = 1/den in Q29 + tmpW32 = (tmp_hi * approx + (tmp_low * approx >> 15)) << 1; - // 1/den in hi and low format - tmp_hi = (int16_t)(tmpW32 >> 16); - tmp_low = (int16_t)((tmpW32 - ((int32_t)tmp_hi << 16)) >> 1); + // 1/den in hi and low format + tmp_hi = (int16_t)(tmpW32 >> 16); + tmp_low = (int16_t)((tmpW32 - ((int32_t)tmp_hi << 16)) >> 1); - // Store num in hi and low format - num_hi = (int16_t)(num >> 16); - num_low = (int16_t)((num - ((int32_t)num_hi << 16)) >> 1); + // Store num in hi and low format + num_hi = (int16_t)(num >> 16); + num_low = (int16_t)((num - ((int32_t)num_hi << 16)) >> 1); - // num * (1/den) by 32 bit multiplication (result in Q28) + // num * (1/den) by 32 bit multiplication (result in Q28) - tmpW32 = num_hi * tmp_hi + (num_hi * tmp_low >> 15) + - (num_low * tmp_hi >> 15); + tmpW32 = + num_hi * tmp_hi + (num_hi * tmp_low >> 15) + (num_low * tmp_hi >> 15); - // Put result in Q31 (convert from Q28) - tmpW32 = WEBRTC_SPL_LSHIFT_W32(tmpW32, 3); + // Put result in Q31 (convert from Q28) + tmpW32 = WEBRTC_SPL_LSHIFT_W32(tmpW32, 3); - return tmpW32; + return tmpW32; } diff --git a/common_audio/signal_processing/dot_product_with_scale.cc b/common_audio/signal_processing/dot_product_with_scale.cc index 00799dae02..56e0bcd5c7 100644 --- a/common_audio/signal_processing/dot_product_with_scale.cc +++ b/common_audio/signal_processing/dot_product_with_scale.cc @@ -30,5 +30,5 @@ int32_t WebRtcSpl_DotProductWithScale(const int16_t* vector1, sum += (vector1[i] * vector2[i]) >> scaling; } - return rtc::saturated_cast(sum); + return webrtc::saturated_cast(sum); } diff --git a/common_audio/signal_processing/downsample_fast.c b/common_audio/signal_processing/downsample_fast.c index 80fdc58a49..c9d9021ef3 100644 --- a/common_audio/signal_processing/downsample_fast.c +++ b/common_audio/signal_processing/downsample_fast.c @@ -9,7 +9,6 @@ */ #include "common_audio/signal_processing/include/signal_processing_library.h" - #include "rtc_base/checks.h" #include "rtc_base/sanitizer.h" @@ -30,8 +29,8 @@ int WebRtcSpl_DownsampleFastC(const int16_t* data_in, size_t endpos = delay + factor * (data_out_length - 1) + 1; // Return error if any of the running conditions doesn't meet. - if (data_out_length == 0 || coefficients_length == 0 - || data_in_length < endpos) { + if (data_out_length == 0 || coefficients_length == 0 || + data_in_length < endpos) { return -1; } @@ -45,10 +44,10 @@ int WebRtcSpl_DownsampleFastC(const int16_t* data_in, // Negative overflow is permitted here, because this is // auto-regressive filters, and the state for each batch run is // stored in the "negative" positions of the output vector. - rtc_MsanCheckInitialized(&data_in[(ptrdiff_t) i - (ptrdiff_t) j], - sizeof(data_in[0]), 1); + rtc_MsanCheckInitialized(&data_in[(ptrdiff_t)i - (ptrdiff_t)j], + sizeof(data_in[0]), 1); // out_s32 is in Q12 domain. - out_s32 += coefficients[j] * data_in[(ptrdiff_t) i - (ptrdiff_t) j]; + out_s32 += coefficients[j] * data_in[(ptrdiff_t)i - (ptrdiff_t)j]; } out_s32 >>= 12; // Q0. diff --git a/common_audio/signal_processing/downsample_fast_mips.c b/common_audio/signal_processing/downsample_fast_mips.c index 0f3f3a069f..1cd373fc8b 100644 --- a/common_audio/signal_processing/downsample_fast_mips.c +++ b/common_audio/signal_processing/downsample_fast_mips.c @@ -25,7 +25,7 @@ int WebRtcSpl_DownsampleFast_mips(const int16_t* data_in, int32_t out_s32 = 0; size_t endpos = delay + factor * (data_out_length - 1) + 1; - int32_t tmp1, tmp2, tmp3, tmp4, factor_2; + int32_t tmp1, tmp2, tmp3, tmp4, factor_2; int16_t* p_coefficients; int16_t* p_data_in; int16_t* p_data_in_0 = (int16_t*)&data_in[delay]; @@ -36,134 +36,132 @@ int WebRtcSpl_DownsampleFast_mips(const int16_t* data_in, #endif // #if !defined(MIPS_DSP_R1_LE) // Return error if any of the running conditions doesn't meet. - if (data_out_length == 0 || coefficients_length == 0 - || data_in_length < endpos) { + if (data_out_length == 0 || coefficients_length == 0 || + data_in_length < endpos) { return -1; } #if defined(MIPS_DSP_R2_LE) - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "subu %[i], %[endpos], %[delay] \n\t" - "sll %[factor_2], %[factor], 1 \n\t" - "1: \n\t" - "move %[p_data_in], %[p_data_in_0] \n\t" - "mult $zero, $zero \n\t" - "move %[p_coefs], %[p_coefs_0] \n\t" - "sra %[j], %[coef_length], 2 \n\t" - "beq %[j], $zero, 3f \n\t" - " andi %[k], %[coef_length], 3 \n\t" - "2: \n\t" - "lwl %[tmp1], 1(%[p_data_in]) \n\t" - "lwl %[tmp2], 3(%[p_coefs]) \n\t" - "lwl %[tmp3], -3(%[p_data_in]) \n\t" - "lwl %[tmp4], 7(%[p_coefs]) \n\t" - "lwr %[tmp1], -2(%[p_data_in]) \n\t" - "lwr %[tmp2], 0(%[p_coefs]) \n\t" - "lwr %[tmp3], -6(%[p_data_in]) \n\t" - "lwr %[tmp4], 4(%[p_coefs]) \n\t" - "packrl.ph %[tmp1], %[tmp1], %[tmp1] \n\t" - "packrl.ph %[tmp3], %[tmp3], %[tmp3] \n\t" - "dpa.w.ph $ac0, %[tmp1], %[tmp2] \n\t" - "dpa.w.ph $ac0, %[tmp3], %[tmp4] \n\t" - "addiu %[j], %[j], -1 \n\t" - "addiu %[p_data_in], %[p_data_in], -8 \n\t" - "bgtz %[j], 2b \n\t" - " addiu %[p_coefs], %[p_coefs], 8 \n\t" - "3: \n\t" - "beq %[k], $zero, 5f \n\t" - " nop \n\t" - "4: \n\t" - "lhu %[tmp1], 0(%[p_data_in]) \n\t" - "lhu %[tmp2], 0(%[p_coefs]) \n\t" - "addiu %[p_data_in], %[p_data_in], -2 \n\t" - "addiu %[k], %[k], -1 \n\t" - "dpa.w.ph $ac0, %[tmp1], %[tmp2] \n\t" - "bgtz %[k], 4b \n\t" - " addiu %[p_coefs], %[p_coefs], 2 \n\t" - "5: \n\t" - "extr_r.w %[out_s32], $ac0, 12 \n\t" - "addu %[p_data_in_0], %[p_data_in_0], %[factor_2] \n\t" - "subu %[i], %[i], %[factor] \n\t" - "shll_s.w %[out_s32], %[out_s32], 16 \n\t" - "sra %[out_s32], %[out_s32], 16 \n\t" - "sh %[out_s32], 0(%[data_out]) \n\t" - "bgtz %[i], 1b \n\t" - " addiu %[data_out], %[data_out], 2 \n\t" - ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [tmp3] "=&r" (tmp3), - [tmp4] "=&r" (tmp4), [p_data_in] "=&r" (p_data_in), - [p_data_in_0] "+r" (p_data_in_0), [p_coefs] "=&r" (p_coefficients), - [j] "=&r" (j), [out_s32] "=&r" (out_s32), [factor_2] "=&r" (factor_2), - [i] "=&r" (i), [k] "=&r" (k) - : [coef_length] "r" (coefficients_length), [data_out] "r" (data_out), - [p_coefs_0] "r" (p_coefficients_0), [endpos] "r" (endpos), - [delay] "r" (delay), [factor] "r" (factor) - : "memory", "hi", "lo" - ); + __asm __volatile( + ".set push \n\t" + ".set noreorder \n\t" + "subu %[i], %[endpos], %[delay] \n\t" + "sll %[factor_2], %[factor], 1 \n\t" + "1: \n\t" + "move %[p_data_in], %[p_data_in_0] \n\t" + "mult $zero, $zero \n\t" + "move %[p_coefs], %[p_coefs_0] \n\t" + "sra %[j], %[coef_length], 2 \n\t" + "beq %[j], $zero, 3f \n\t" + " andi %[k], %[coef_length], 3 \n\t" + "2: \n\t" + "lwl %[tmp1], 1(%[p_data_in]) \n\t" + "lwl %[tmp2], 3(%[p_coefs]) \n\t" + "lwl %[tmp3], -3(%[p_data_in]) \n\t" + "lwl %[tmp4], 7(%[p_coefs]) \n\t" + "lwr %[tmp1], -2(%[p_data_in]) \n\t" + "lwr %[tmp2], 0(%[p_coefs]) \n\t" + "lwr %[tmp3], -6(%[p_data_in]) \n\t" + "lwr %[tmp4], 4(%[p_coefs]) \n\t" + "packrl.ph %[tmp1], %[tmp1], %[tmp1] \n\t" + "packrl.ph %[tmp3], %[tmp3], %[tmp3] \n\t" + "dpa.w.ph $ac0, %[tmp1], %[tmp2] \n\t" + "dpa.w.ph $ac0, %[tmp3], %[tmp4] \n\t" + "addiu %[j], %[j], -1 \n\t" + "addiu %[p_data_in], %[p_data_in], -8 \n\t" + "bgtz %[j], 2b \n\t" + " addiu %[p_coefs], %[p_coefs], 8 \n\t" + "3: \n\t" + "beq %[k], $zero, 5f \n\t" + " nop \n\t" + "4: \n\t" + "lhu %[tmp1], 0(%[p_data_in]) \n\t" + "lhu %[tmp2], 0(%[p_coefs]) \n\t" + "addiu %[p_data_in], %[p_data_in], -2 \n\t" + "addiu %[k], %[k], -1 \n\t" + "dpa.w.ph $ac0, %[tmp1], %[tmp2] \n\t" + "bgtz %[k], 4b \n\t" + " addiu %[p_coefs], %[p_coefs], 2 \n\t" + "5: \n\t" + "extr_r.w %[out_s32], $ac0, 12 \n\t" + "addu %[p_data_in_0], %[p_data_in_0], %[factor_2] \n\t" + "subu %[i], %[i], %[factor] \n\t" + "shll_s.w %[out_s32], %[out_s32], 16 \n\t" + "sra %[out_s32], %[out_s32], 16 \n\t" + "sh %[out_s32], 0(%[data_out]) \n\t" + "bgtz %[i], 1b \n\t" + " addiu %[data_out], %[data_out], 2 \n\t" + ".set pop \n\t" + : [tmp1] "=&r"(tmp1), [tmp2] "=&r"(tmp2), [tmp3] "=&r"(tmp3), + [tmp4] "=&r"(tmp4), [p_data_in] "=&r"(p_data_in), + [p_data_in_0] "+r"(p_data_in_0), [p_coefs] "=&r"(p_coefficients), + [j] "=&r"(j), [out_s32] "=&r"(out_s32), [factor_2] "=&r"(factor_2), + [i] "=&r"(i), [k] "=&r"(k) + : [coef_length] "r"(coefficients_length), [data_out] "r"(data_out), + [p_coefs_0] "r"(p_coefficients_0), [endpos] "r"(endpos), + [delay] "r"(delay), [factor] "r"(factor) + : "memory", "hi", "lo"); #else // #if defined(MIPS_DSP_R2_LE) - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "sll %[factor_2], %[factor], 1 \n\t" - "subu %[i], %[endpos], %[delay] \n\t" - "1: \n\t" - "move %[p_data_in], %[p_data_in_0] \n\t" - "addiu %[out_s32], $zero, 2048 \n\t" - "move %[p_coefs], %[p_coefs_0] \n\t" - "sra %[j], %[coef_length], 1 \n\t" - "beq %[j], $zero, 3f \n\t" - " andi %[k], %[coef_length], 1 \n\t" - "2: \n\t" - "lh %[tmp1], 0(%[p_data_in]) \n\t" - "lh %[tmp2], 0(%[p_coefs]) \n\t" - "lh %[tmp3], -2(%[p_data_in]) \n\t" - "lh %[tmp4], 2(%[p_coefs]) \n\t" - "mul %[tmp1], %[tmp1], %[tmp2] \n\t" - "addiu %[p_coefs], %[p_coefs], 4 \n\t" - "mul %[tmp3], %[tmp3], %[tmp4] \n\t" - "addiu %[j], %[j], -1 \n\t" - "addiu %[p_data_in], %[p_data_in], -4 \n\t" - "addu %[tmp1], %[tmp1], %[tmp3] \n\t" - "bgtz %[j], 2b \n\t" - " addu %[out_s32], %[out_s32], %[tmp1] \n\t" - "3: \n\t" - "beq %[k], $zero, 4f \n\t" - " nop \n\t" - "lh %[tmp1], 0(%[p_data_in]) \n\t" - "lh %[tmp2], 0(%[p_coefs]) \n\t" - "mul %[tmp1], %[tmp1], %[tmp2] \n\t" - "addu %[out_s32], %[out_s32], %[tmp1] \n\t" - "4: \n\t" - "sra %[out_s32], %[out_s32], 12 \n\t" - "addu %[p_data_in_0], %[p_data_in_0], %[factor_2] \n\t" + __asm __volatile( + ".set push \n\t" + ".set noreorder \n\t" + "sll %[factor_2], %[factor], 1 \n\t" + "subu %[i], %[endpos], %[delay] \n\t" + "1: \n\t" + "move %[p_data_in], %[p_data_in_0] \n\t" + "addiu %[out_s32], $zero, 2048 \n\t" + "move %[p_coefs], %[p_coefs_0] \n\t" + "sra %[j], %[coef_length], 1 \n\t" + "beq %[j], $zero, 3f \n\t" + " andi %[k], %[coef_length], 1 \n\t" + "2: \n\t" + "lh %[tmp1], 0(%[p_data_in]) \n\t" + "lh %[tmp2], 0(%[p_coefs]) \n\t" + "lh %[tmp3], -2(%[p_data_in]) \n\t" + "lh %[tmp4], 2(%[p_coefs]) \n\t" + "mul %[tmp1], %[tmp1], %[tmp2] \n\t" + "addiu %[p_coefs], %[p_coefs], 4 \n\t" + "mul %[tmp3], %[tmp3], %[tmp4] \n\t" + "addiu %[j], %[j], -1 \n\t" + "addiu %[p_data_in], %[p_data_in], -4 \n\t" + "addu %[tmp1], %[tmp1], %[tmp3] \n\t" + "bgtz %[j], 2b \n\t" + " addu %[out_s32], %[out_s32], %[tmp1] \n\t" + "3: \n\t" + "beq %[k], $zero, 4f \n\t" + " nop \n\t" + "lh %[tmp1], 0(%[p_data_in]) \n\t" + "lh %[tmp2], 0(%[p_coefs]) \n\t" + "mul %[tmp1], %[tmp1], %[tmp2] \n\t" + "addu %[out_s32], %[out_s32], %[tmp1] \n\t" + "4: \n\t" + "sra %[out_s32], %[out_s32], 12 \n\t" + "addu %[p_data_in_0], %[p_data_in_0], %[factor_2] \n\t" #if defined(MIPS_DSP_R1_LE) - "shll_s.w %[out_s32], %[out_s32], 16 \n\t" - "sra %[out_s32], %[out_s32], 16 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "slt %[tmp1], %[max_16], %[out_s32] \n\t" - "movn %[out_s32], %[max_16], %[tmp1] \n\t" - "slt %[tmp1], %[out_s32], %[min_16] \n\t" - "movn %[out_s32], %[min_16], %[tmp1] \n\t" + "shll_s.w %[out_s32], %[out_s32], 16 \n\t" + "sra %[out_s32], %[out_s32], 16 \n\t" +#else // #if defined(MIPS_DSP_R1_LE) + "slt %[tmp1], %[max_16], %[out_s32] \n\t" + "movn %[out_s32], %[max_16], %[tmp1] \n\t" + "slt %[tmp1], %[out_s32], %[min_16] \n\t" + "movn %[out_s32], %[min_16], %[tmp1] \n\t" #endif // #if defined(MIPS_DSP_R1_LE) - "subu %[i], %[i], %[factor] \n\t" - "sh %[out_s32], 0(%[data_out]) \n\t" - "bgtz %[i], 1b \n\t" - " addiu %[data_out], %[data_out], 2 \n\t" - ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [tmp3] "=&r" (tmp3), - [tmp4] "=&r" (tmp4), [p_data_in] "=&r" (p_data_in), [k] "=&r" (k), - [p_data_in_0] "+r" (p_data_in_0), [p_coefs] "=&r" (p_coefficients), - [j] "=&r" (j), [out_s32] "=&r" (out_s32), [factor_2] "=&r" (factor_2), - [i] "=&r" (i) - : [coef_length] "r" (coefficients_length), [data_out] "r" (data_out), - [p_coefs_0] "r" (p_coefficients_0), [endpos] "r" (endpos), + "subu %[i], %[i], %[factor] \n\t" + "sh %[out_s32], 0(%[data_out]) \n\t" + "bgtz %[i], 1b \n\t" + " addiu %[data_out], %[data_out], 2 \n\t" + ".set pop \n\t" + : [tmp1] "=&r"(tmp1), [tmp2] "=&r"(tmp2), [tmp3] "=&r"(tmp3), + [tmp4] "=&r"(tmp4), [p_data_in] "=&r"(p_data_in), [k] "=&r"(k), + [p_data_in_0] "+r"(p_data_in_0), [p_coefs] "=&r"(p_coefficients), + [j] "=&r"(j), [out_s32] "=&r"(out_s32), [factor_2] "=&r"(factor_2), + [i] "=&r"(i) + : [coef_length] "r"(coefficients_length), [data_out] "r"(data_out), + [p_coefs_0] "r"(p_coefficients_0), [endpos] "r"(endpos), #if !defined(MIPS_DSP_R1_LE) - [max_16] "r" (max_16), [min_16] "r" (min_16), + [max_16] "r"(max_16), [min_16] "r"(min_16), #endif // #if !defined(MIPS_DSP_R1_LE) - [delay] "r" (delay), [factor] "r" (factor) - : "memory", "hi", "lo" - ); + [delay] "r"(delay), [factor] "r"(factor) + : "memory", "hi", "lo"); #endif // #if defined(MIPS_DSP_R2_LE) return 0; } diff --git a/common_audio/signal_processing/downsample_fast_neon.c b/common_audio/signal_processing/downsample_fast_neon.c index 36fc0c8aee..e97ca4783d 100644 --- a/common_audio/signal_processing/downsample_fast_neon.c +++ b/common_audio/signal_processing/downsample_fast_neon.c @@ -8,10 +8,11 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "common_audio/signal_processing/include/signal_processing_library.h" - #include +#include "common_audio/signal_processing/include/signal_processing_library.h" +#include "rtc_base/checks.h" + // NEON intrinsics version of WebRtcSpl_DownsampleFast() // for ARM 32-bit/64-bit platforms. int WebRtcSpl_DownsampleFastNeon(const int16_t* data_in, @@ -22,19 +23,24 @@ int WebRtcSpl_DownsampleFastNeon(const int16_t* data_in, size_t coefficients_length, int factor, size_t delay) { - size_t i = 0; - size_t j = 0; + // Using signed indexes to be able to compute negative i-j that + // is used to index data_in. + int i = 0; + int j = 0; int32_t out_s32 = 0; - size_t endpos = delay + factor * (data_out_length - 1) + 1; + int endpos = delay + factor * (data_out_length - 1) + 1; size_t res = data_out_length & 0x7; - size_t endpos1 = endpos - factor * res; + int endpos1 = endpos - factor * res; // Return error if any of the running conditions doesn't meet. - if (data_out_length == 0 || coefficients_length == 0 - || data_in_length < endpos) { + if (data_out_length == 0 || coefficients_length == 0 || + (int)data_in_length < endpos) { return -1; } + RTC_DCHECK_GE(endpos, 0); + RTC_DCHECK_GE(endpos1, 0); + // First part, unroll the loop 8 times, with 3 subcases // (factor == 2, 4, others). switch (factor) { @@ -46,7 +52,7 @@ int WebRtcSpl_DownsampleFastNeon(const int16_t* data_in, #if defined(WEBRTC_ARCH_ARM64) // Unroll the loop 2 times. - for (j = 0; j < coefficients_length - 1; j += 2) { + for (j = 0; j < (int)coefficients_length - 1; j += 2) { int32x2_t coeff32 = vld1_dup_s32((int32_t*)&coefficients[j]); int16x4_t coeff16x4 = vreinterpret_s16_s32(coeff32); int16x8x2_t in16x8x2 = vld2q_s16(&data_in[i - j - 1]); @@ -68,7 +74,7 @@ int WebRtcSpl_DownsampleFastNeon(const int16_t* data_in, out32x4_1 = vmlal_lane_s16(out32x4_1, in16x4_3, coeff16x4, 0); } - for (; j < coefficients_length; j++) { + for (; j < (int)coefficients_length; j++) { int16x4_t coeff16x4 = vld1_dup_s16(&coefficients[j]); int16x8x2_t in16x8x2 = vld2q_s16(&data_in[i - j]); @@ -87,7 +93,7 @@ int WebRtcSpl_DownsampleFastNeon(const int16_t* data_in, #else // On ARMv7, the loop unrolling 2 times results in performance // regression. - for (j = 0; j < coefficients_length; j++) { + for (j = 0; j < (int)coefficients_length; j++) { int16x4_t coeff16x4 = vld1_dup_s16(&coefficients[j]); int16x8x2_t in16x8x2 = vld2q_s16(&data_in[i - j]); @@ -114,7 +120,7 @@ int WebRtcSpl_DownsampleFastNeon(const int16_t* data_in, int32x4_t out32x4_1 = vdupq_n_s32(2048); // Unroll the loop 4 times. - for (j = 0; j < coefficients_length - 3; j += 4) { + for (j = 0; j < (int)coefficients_length - 3; j += 4) { int16x4_t coeff16x4 = vld1_s16(&coefficients[j]); int16x8x4_t in16x8x4 = vld4q_s16(&data_in[i - j - 3]); @@ -143,7 +149,7 @@ int WebRtcSpl_DownsampleFastNeon(const int16_t* data_in, out32x4_1 = vmlal_lane_s16(out32x4_1, in16x4_7, coeff16x4, 0); } - for (; j < coefficients_length; j++) { + for (; j < (int)coefficients_length; j++) { int16x4_t coeff16x4 = vld1_dup_s16(&coefficients[j]); int16x8x4_t in16x8x4 = vld4q_s16(&data_in[i - j]); @@ -174,7 +180,7 @@ int WebRtcSpl_DownsampleFastNeon(const int16_t* data_in, int32x4_t out32x4_0 = vdupq_n_s32(2048); int32x4_t out32x4_1 = vdupq_n_s32(2048); - for (j = 0; j < coefficients_length; j++) { + for (j = 0; j < (int)coefficients_length; j++) { int16x4_t coeff16x4 = vld1_dup_s16(&coefficients[j]); int16x4_t in16x4_0 = vld1_dup_s16(&data_in[i - j]); in16x4_0 = vld1_lane_s16(&data_in[i + factor - j], in16x4_0, 1); @@ -204,7 +210,7 @@ int WebRtcSpl_DownsampleFastNeon(const int16_t* data_in, for (; i < endpos; i += factor) { out_s32 = 2048; // Round value, 0.5 in Q12. - for (j = 0; j < coefficients_length; j++) { + for (j = 0; j < (int)coefficients_length; j++) { out_s32 = WebRtc_MulAccumW16(coefficients[j], data_in[i - j], out_s32); } diff --git a/common_audio/signal_processing/energy.c b/common_audio/signal_processing/energy.c index 5cce6b8777..f69ec1e4cd 100644 --- a/common_audio/signal_processing/energy.c +++ b/common_audio/signal_processing/energy.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the function WebRtcSpl_Energy(). * The description header can be found in signal_processing_library.h @@ -19,21 +18,19 @@ int32_t WebRtcSpl_Energy(int16_t* vector, size_t vector_length, - int* scale_factor) -{ - int32_t en = 0; - size_t i; - int scaling = - WebRtcSpl_GetScalingSquare(vector, vector_length, vector_length); - size_t looptimes = vector_length; - int16_t *vectorptr = vector; + int* scale_factor) { + int32_t en = 0; + size_t i; + int scaling = + WebRtcSpl_GetScalingSquare(vector, vector_length, vector_length); + size_t looptimes = vector_length; + int16_t* vectorptr = vector; - for (i = 0; i < looptimes; i++) - { - en += (*vectorptr * *vectorptr) >> scaling; - vectorptr++; - } - *scale_factor = scaling; + for (i = 0; i < looptimes; i++) { + en += (*vectorptr * *vectorptr) >> scaling; + vectorptr++; + } + *scale_factor = scaling; - return en; + return en; } diff --git a/common_audio/signal_processing/filter_ar.c b/common_audio/signal_processing/filter_ar.c index b1f666d723..bab1973b18 100644 --- a/common_audio/signal_processing/filter_ar.c +++ b/common_audio/signal_processing/filter_ar.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the function WebRtcSpl_FilterAR(). * The description header can be found in signal_processing_library.h @@ -16,7 +15,6 @@ */ #include "common_audio/signal_processing/include/signal_processing_library.h" - #include "rtc_base/checks.h" size_t WebRtcSpl_FilterAR(const int16_t* a, @@ -26,70 +24,60 @@ size_t WebRtcSpl_FilterAR(const int16_t* a, int16_t* state, size_t state_length, int16_t* state_low, - size_t state_low_length, int16_t* filtered, - int16_t* filtered_low, - size_t filtered_low_length) -{ - int64_t o; - int32_t oLOW; - size_t i, j, stop; - const int16_t* x_ptr = &x[0]; - int16_t* filteredFINAL_ptr = filtered; - int16_t* filteredFINAL_LOW_ptr = filtered_low; - - for (i = 0; i < x_length; i++) - { - // Calculate filtered[i] and filtered_low[i] - const int16_t* a_ptr = &a[1]; - // The index can become negative, but the arrays will never be indexed - // with it when negative. Nevertheless, the index cannot be a size_t - // because of this. - int filtered_ix = (int)i - 1; - int16_t* state_ptr = &state[state_length - 1]; - int16_t* state_low_ptr = &state_low[state_length - 1]; + int16_t* filtered_low) { + int64_t o; + int32_t oLOW; + size_t i, j, stop; + const int16_t* x_ptr = &x[0]; + int16_t* filteredFINAL_ptr = filtered; + int16_t* filteredFINAL_LOW_ptr = filtered_low; - o = (int32_t)(*x_ptr++) * (1 << 12); - oLOW = (int32_t)0; + for (i = 0; i < x_length; i++) { + // Calculate filtered[i] and filtered_low[i] + const int16_t* a_ptr = &a[1]; + // The index can become negative, but the arrays will never be indexed + // with it when negative. Nevertheless, the index cannot be a size_t + // because of this. + int filtered_ix = (int)i - 1; + int16_t* state_ptr = &state[state_length - 1]; + int16_t* state_low_ptr = &state_low[state_length - 1]; - stop = (i < a_length) ? i + 1 : a_length; - for (j = 1; j < stop; j++) - { - RTC_DCHECK_GE(filtered_ix, 0); - o -= *a_ptr * filtered[filtered_ix]; - oLOW -= *a_ptr++ * filtered_low[filtered_ix]; - --filtered_ix; - } - for (j = i + 1; j < a_length; j++) - { - o -= *a_ptr * *state_ptr--; - oLOW -= *a_ptr++ * *state_low_ptr--; - } + o = (int32_t)(*x_ptr++) * (1 << 12); + oLOW = (int32_t)0; - o += (oLOW >> 12); - *filteredFINAL_ptr = (int16_t)((o + (int32_t)2048) >> 12); - *filteredFINAL_LOW_ptr++ = - (int16_t)(o - ((int32_t)(*filteredFINAL_ptr++) * (1 << 12))); + stop = (i < a_length) ? i + 1 : a_length; + for (j = 1; j < stop; j++) { + RTC_DCHECK_GE(filtered_ix, 0); + o -= *a_ptr * filtered[filtered_ix]; + oLOW -= *a_ptr++ * filtered_low[filtered_ix]; + --filtered_ix; } + for (j = i + 1; j < a_length; j++) { + o -= *a_ptr * *state_ptr--; + oLOW -= *a_ptr++ * *state_low_ptr--; + } + + o += (oLOW >> 12); + *filteredFINAL_ptr = (int16_t)((o + (int32_t)2048) >> 12); + *filteredFINAL_LOW_ptr++ = + (int16_t)(o - ((int32_t)(*filteredFINAL_ptr++) * (1 << 12))); + } - // Save the filter state - if (x_length >= state_length) - { - WebRtcSpl_CopyFromEndW16(filtered, x_length, a_length - 1, state); - WebRtcSpl_CopyFromEndW16(filtered_low, x_length, a_length - 1, state_low); - } else - { - for (i = 0; i < state_length - x_length; i++) - { - state[i] = state[i + x_length]; - state_low[i] = state_low[i + x_length]; - } - for (i = 0; i < x_length; i++) - { - state[state_length - x_length + i] = filtered[i]; - state_low[state_length - x_length + i] = filtered_low[i]; - } + // Save the filter state + if (x_length >= state_length) { + WebRtcSpl_CopyFromEndW16(filtered, x_length, a_length - 1, state); + WebRtcSpl_CopyFromEndW16(filtered_low, x_length, a_length - 1, state_low); + } else { + for (i = 0; i < state_length - x_length; i++) { + state[i] = state[i + x_length]; + state_low[i] = state_low[i + x_length]; + } + for (i = 0; i < x_length; i++) { + state[state_length - x_length + i] = filtered[i]; + state_low[state_length - x_length + i] = filtered_low[i]; } + } - return x_length; + return x_length; } diff --git a/common_audio/signal_processing/filter_ar_fast_q12.c b/common_audio/signal_processing/filter_ar_fast_q12.c index 8b8bdb1af5..eceef4cf1c 100644 --- a/common_audio/signal_processing/filter_ar_fast_q12.c +++ b/common_audio/signal_processing/filter_ar_fast_q12.c @@ -8,10 +8,10 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "stddef.h" +#include -#include "rtc_base/checks.h" #include "common_audio/signal_processing/include/signal_processing_library.h" +#include "rtc_base/checks.h" // TODO(bjornv): Change the return type to report errors. @@ -34,7 +34,7 @@ void WebRtcSpl_FilterARFastQ12(const int16_t* data_in, // Negative overflow is permitted here, because this is // auto-regressive filters, and the state for each batch run is // stored in the "negative" positions of the output vector. - sum += coefficients[j] * data_out[(ptrdiff_t) i - (ptrdiff_t) j]; + sum += coefficients[j] * data_out[(ptrdiff_t)i - (ptrdiff_t)j]; } output = coefficients[0] * data_in[i]; diff --git a/common_audio/signal_processing/filter_ar_fast_q12_mips.c b/common_audio/signal_processing/filter_ar_fast_q12_mips.c index b9ad30f006..53b295b286 100644 --- a/common_audio/signal_processing/filter_ar_fast_q12_mips.c +++ b/common_audio/signal_processing/filter_ar_fast_q12_mips.c @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "rtc_base/checks.h" #include "common_audio/signal_processing/include/signal_processing_library.h" +#include "rtc_base/checks.h" void WebRtcSpl_FilterARFastQ12(const int16_t* data_in, int16_t* data_out, @@ -28,113 +28,110 @@ void WebRtcSpl_FilterARFastQ12(const int16_t* data_in, RTC_DCHECK_GT(data_length, 0); RTC_DCHECK_GT(coefficients_length, 1); - __asm __volatile ( - ".set push \n\t" - ".set noreorder \n\t" - "addiu %[i], %[data_length], 0 \n\t" - "lh %[coef0], 0(%[coefficients]) \n\t" - "addiu %[j], %[coefficients_length], -1 \n\t" - "andi %[k], %[j], 1 \n\t" - "sll %[offset], %[j], 1 \n\t" - "subu %[outptr], %[data_out], %[offset] \n\t" - "addiu %[inptr], %[data_in], 0 \n\t" - "bgtz %[k], 3f \n\t" - " addu %[coefptr], %[coefficients], %[offset] \n\t" - "1: \n\t" - "lh %[r0], 0(%[inptr]) \n\t" - "addiu %[i], %[i], -1 \n\t" - "addiu %[tmpout], %[outptr], 0 \n\t" - "mult %[r0], %[coef0] \n\t" - "2: \n\t" - "lh %[r0], 0(%[tmpout]) \n\t" - "lh %[r1], 0(%[coefptr]) \n\t" - "lh %[r2], 2(%[tmpout]) \n\t" - "lh %[r3], -2(%[coefptr]) \n\t" - "addiu %[tmpout], %[tmpout], 4 \n\t" - "msub %[r0], %[r1] \n\t" - "msub %[r2], %[r3] \n\t" - "addiu %[j], %[j], -2 \n\t" - "bgtz %[j], 2b \n\t" - " addiu %[coefptr], %[coefptr], -4 \n\t" + __asm __volatile( + ".set push \n\t" + ".set noreorder \n\t" + "addiu %[i], %[data_length], 0 \n\t" + "lh %[coef0], 0(%[coefficients]) \n\t" + "addiu %[j], %[coefficients_length], -1 \n\t" + "andi %[k], %[j], 1 \n\t" + "sll %[offset], %[j], 1 \n\t" + "subu %[outptr], %[data_out], %[offset] \n\t" + "addiu %[inptr], %[data_in], 0 \n\t" + "bgtz %[k], 3f \n\t" + " addu %[coefptr], %[coefficients], %[offset] \n\t" + "1: \n\t" + "lh %[r0], 0(%[inptr]) \n\t" + "addiu %[i], %[i], -1 \n\t" + "addiu %[tmpout], %[outptr], 0 \n\t" + "mult %[r0], %[coef0] \n\t" + "2: \n\t" + "lh %[r0], 0(%[tmpout]) \n\t" + "lh %[r1], 0(%[coefptr]) \n\t" + "lh %[r2], 2(%[tmpout]) \n\t" + "lh %[r3], -2(%[coefptr]) \n\t" + "addiu %[tmpout], %[tmpout], 4 \n\t" + "msub %[r0], %[r1] \n\t" + "msub %[r2], %[r3] \n\t" + "addiu %[j], %[j], -2 \n\t" + "bgtz %[j], 2b \n\t" + " addiu %[coefptr], %[coefptr], -4 \n\t" #if defined(MIPS_DSP_R1_LE) - "extr_r.w %[r0], $ac0, 12 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "mflo %[r0] \n\t" + "extr_r.w %[r0], $ac0, 12 \n\t" +#else // #if defined(MIPS_DSP_R1_LE) + "mflo %[r0] \n\t" #endif // #if defined(MIPS_DSP_R1_LE) - "addu %[coefptr], %[coefficients], %[offset] \n\t" - "addiu %[inptr], %[inptr], 2 \n\t" - "addiu %[j], %[coefficients_length], -1 \n\t" + "addu %[coefptr], %[coefficients], %[offset] \n\t" + "addiu %[inptr], %[inptr], 2 \n\t" + "addiu %[j], %[coefficients_length], -1 \n\t" #if defined(MIPS_DSP_R1_LE) - "shll_s.w %[r0], %[r0], 16 \n\t" - "sra %[r0], %[r0], 16 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r0], %[r0], 2048 \n\t" - "sra %[r0], %[r0], 12 \n\t" - "slt %[r1], %[max16], %[r0] \n\t" - "movn %[r0], %[max16], %[r1] \n\t" - "slt %[r1], %[r0], %[min16] \n\t" - "movn %[r0], %[min16], %[r1] \n\t" + "shll_s.w %[r0], %[r0], 16 \n\t" + "sra %[r0], %[r0], 16 \n\t" +#else // #if defined(MIPS_DSP_R1_LE) + "addiu %[r0], %[r0], 2048 \n\t" + "sra %[r0], %[r0], 12 \n\t" + "slt %[r1], %[max16], %[r0] \n\t" + "movn %[r0], %[max16], %[r1] \n\t" + "slt %[r1], %[r0], %[min16] \n\t" + "movn %[r0], %[min16], %[r1] \n\t" #endif // #if defined(MIPS_DSP_R1_LE) - "sh %[r0], 0(%[tmpout]) \n\t" - "bgtz %[i], 1b \n\t" - " addiu %[outptr], %[outptr], 2 \n\t" - "b 5f \n\t" - " nop \n\t" - "3: \n\t" - "lh %[r0], 0(%[inptr]) \n\t" - "addiu %[i], %[i], -1 \n\t" - "addiu %[tmpout], %[outptr], 0 \n\t" - "mult %[r0], %[coef0] \n\t" - "4: \n\t" - "lh %[r0], 0(%[tmpout]) \n\t" - "lh %[r1], 0(%[coefptr]) \n\t" - "lh %[r2], 2(%[tmpout]) \n\t" - "lh %[r3], -2(%[coefptr]) \n\t" - "addiu %[tmpout], %[tmpout], 4 \n\t" - "msub %[r0], %[r1] \n\t" - "msub %[r2], %[r3] \n\t" - "addiu %[j], %[j], -2 \n\t" - "bgtz %[j], 4b \n\t" - " addiu %[coefptr], %[coefptr], -4 \n\t" - "lh %[r0], 0(%[tmpout]) \n\t" - "lh %[r1], 0(%[coefptr]) \n\t" - "msub %[r0], %[r1] \n\t" + "sh %[r0], 0(%[tmpout]) \n\t" + "bgtz %[i], 1b \n\t" + " addiu %[outptr], %[outptr], 2 \n\t" + "b 5f \n\t" + " nop \n\t" + "3: \n\t" + "lh %[r0], 0(%[inptr]) \n\t" + "addiu %[i], %[i], -1 \n\t" + "addiu %[tmpout], %[outptr], 0 \n\t" + "mult %[r0], %[coef0] \n\t" + "4: \n\t" + "lh %[r0], 0(%[tmpout]) \n\t" + "lh %[r1], 0(%[coefptr]) \n\t" + "lh %[r2], 2(%[tmpout]) \n\t" + "lh %[r3], -2(%[coefptr]) \n\t" + "addiu %[tmpout], %[tmpout], 4 \n\t" + "msub %[r0], %[r1] \n\t" + "msub %[r2], %[r3] \n\t" + "addiu %[j], %[j], -2 \n\t" + "bgtz %[j], 4b \n\t" + " addiu %[coefptr], %[coefptr], -4 \n\t" + "lh %[r0], 0(%[tmpout]) \n\t" + "lh %[r1], 0(%[coefptr]) \n\t" + "msub %[r0], %[r1] \n\t" #if defined(MIPS_DSP_R1_LE) - "extr_r.w %[r0], $ac0, 12 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "mflo %[r0] \n\t" + "extr_r.w %[r0], $ac0, 12 \n\t" +#else // #if defined(MIPS_DSP_R1_LE) + "mflo %[r0] \n\t" #endif // #if defined(MIPS_DSP_R1_LE) - "addu %[coefptr], %[coefficients], %[offset] \n\t" - "addiu %[inptr], %[inptr], 2 \n\t" - "addiu %[j], %[coefficients_length], -1 \n\t" + "addu %[coefptr], %[coefficients], %[offset] \n\t" + "addiu %[inptr], %[inptr], 2 \n\t" + "addiu %[j], %[coefficients_length], -1 \n\t" #if defined(MIPS_DSP_R1_LE) - "shll_s.w %[r0], %[r0], 16 \n\t" - "sra %[r0], %[r0], 16 \n\t" -#else // #if defined(MIPS_DSP_R1_LE) - "addiu %[r0], %[r0], 2048 \n\t" - "sra %[r0], %[r0], 12 \n\t" - "slt %[r1], %[max16], %[r0] \n\t" - "movn %[r0], %[max16], %[r1] \n\t" - "slt %[r1], %[r0], %[min16] \n\t" - "movn %[r0], %[min16], %[r1] \n\t" + "shll_s.w %[r0], %[r0], 16 \n\t" + "sra %[r0], %[r0], 16 \n\t" +#else // #if defined(MIPS_DSP_R1_LE) + "addiu %[r0], %[r0], 2048 \n\t" + "sra %[r0], %[r0], 12 \n\t" + "slt %[r1], %[max16], %[r0] \n\t" + "movn %[r0], %[max16], %[r1] \n\t" + "slt %[r1], %[r0], %[min16] \n\t" + "movn %[r0], %[min16], %[r1] \n\t" #endif // #if defined(MIPS_DSP_R1_LE) - "sh %[r0], 2(%[tmpout]) \n\t" - "bgtz %[i], 3b \n\t" - " addiu %[outptr], %[outptr], 2 \n\t" - "5: \n\t" - ".set pop \n\t" - : [i] "=&r" (i), [j] "=&r" (j), [k] "=&r" (k), [r0] "=&r" (r0), - [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3), - [coef0] "=&r" (coef0), [offset] "=&r" (offset), - [outptr] "=&r" (outptr), [inptr] "=&r" (inptr), - [coefptr] "=&r" (coefptr), [tmpout] "=&r" (tmpout) - : [coefficients] "r" (coefficients), [data_length] "r" (data_length), - [coefficients_length] "r" (coefficients_length), + "sh %[r0], 2(%[tmpout]) \n\t" + "bgtz %[i], 3b \n\t" + " addiu %[outptr], %[outptr], 2 \n\t" + "5: \n\t" + ".set pop \n\t" + : [i] "=&r"(i), [j] "=&r"(j), [k] "=&r"(k), [r0] "=&r"(r0), + [r1] "=&r"(r1), [r2] "=&r"(r2), [r3] "=&r"(r3), [coef0] "=&r"(coef0), + [offset] "=&r"(offset), [outptr] "=&r"(outptr), [inptr] "=&r"(inptr), + [coefptr] "=&r"(coefptr), [tmpout] "=&r"(tmpout) + : [coefficients] "r"(coefficients), [data_length] "r"(data_length), + [coefficients_length] "r"(coefficients_length), #if !defined(MIPS_DSP_R1_LE) - [max16] "r" (max16), [min16] "r" (min16), + [max16] "r"(max16), [min16] "r"(min16), #endif - [data_out] "r" (data_out), [data_in] "r" (data_in) - : "hi", "lo", "memory" - ); + [data_out] "r"(data_out), [data_in] "r"(data_in) + : "hi", "lo", "memory"); } - diff --git a/common_audio/signal_processing/filter_ma_fast_q12.c b/common_audio/signal_processing/filter_ma_fast_q12.c index 329d47e14f..57f5929b5a 100644 --- a/common_audio/signal_processing/filter_ma_fast_q12.c +++ b/common_audio/signal_processing/filter_ma_fast_q12.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the function WebRtcSpl_FilterMAFastQ12(). * The description header can be found in signal_processing_library.h @@ -16,40 +15,36 @@ */ #include "common_audio/signal_processing/include/signal_processing_library.h" - #include "rtc_base/sanitizer.h" void WebRtcSpl_FilterMAFastQ12(const int16_t* in_ptr, int16_t* out_ptr, const int16_t* B, size_t B_length, - size_t length) -{ - size_t i, j; + size_t length) { + size_t i, j; - rtc_MsanCheckInitialized(B, sizeof(B[0]), B_length); - rtc_MsanCheckInitialized(in_ptr - B_length + 1, sizeof(in_ptr[0]), - B_length + length - 1); + rtc_MsanCheckInitialized(B, sizeof(B[0]), B_length); + rtc_MsanCheckInitialized(in_ptr - B_length + 1, sizeof(in_ptr[0]), + B_length + length - 1); - for (i = 0; i < length; i++) - { - int32_t o = 0; + for (i = 0; i < length; i++) { + int32_t o = 0; - for (j = 0; j < B_length; j++) - { - // Negative overflow is permitted here, because this is - // auto-regressive filters, and the state for each batch run is - // stored in the "negative" positions of the output vector. - o += B[j] * in_ptr[(ptrdiff_t) i - (ptrdiff_t) j]; - } + for (j = 0; j < B_length; j++) { + // Negative overflow is permitted here, because this is + // auto-regressive filters, and the state for each batch run is + // stored in the "negative" positions of the output vector. + o += B[j] * in_ptr[(ptrdiff_t)i - (ptrdiff_t)j]; + } - // If output is higher than 32768, saturate it. Same with negative side - // 2^27 = 134217728, which corresponds to 32768 in Q12 + // If output is higher than 32768, saturate it. Same with negative side + // 2^27 = 134217728, which corresponds to 32768 in Q12 - // Saturate the output - o = WEBRTC_SPL_SAT((int32_t)134215679, o, (int32_t)-134217728); + // Saturate the output + o = WEBRTC_SPL_SAT((int32_t)134215679, o, (int32_t)-134217728); - *out_ptr++ = (int16_t)((o + (int32_t)2048) >> 12); - } - return; + *out_ptr++ = (int16_t)((o + (int32_t)2048) >> 12); + } + return; } diff --git a/common_audio/signal_processing/get_hanning_window.c b/common_audio/signal_processing/get_hanning_window.c index 8f29da8d9b..0a6aa5bc6d 100644 --- a/common_audio/signal_processing/get_hanning_window.c +++ b/common_audio/signal_processing/get_hanning_window.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the function WebRtcSpl_GetHanningWindow(). * The description header can be found in signal_processing_library.h @@ -19,59 +18,47 @@ // Hanning table with 256 entries static const int16_t kHanningTable[] = { - 1, 2, 6, 10, 15, 22, 30, 39, - 50, 62, 75, 89, 104, 121, 138, 157, - 178, 199, 222, 246, 271, 297, 324, 353, - 383, 413, 446, 479, 513, 549, 586, 624, - 663, 703, 744, 787, 830, 875, 920, 967, - 1015, 1064, 1114, 1165, 1218, 1271, 1325, 1381, - 1437, 1494, 1553, 1612, 1673, 1734, 1796, 1859, - 1924, 1989, 2055, 2122, 2190, 2259, 2329, 2399, - 2471, 2543, 2617, 2691, 2765, 2841, 2918, 2995, - 3073, 3152, 3232, 3312, 3393, 3475, 3558, 3641, - 3725, 3809, 3895, 3980, 4067, 4154, 4242, 4330, - 4419, 4509, 4599, 4689, 4781, 4872, 4964, 5057, - 5150, 5244, 5338, 5432, 5527, 5622, 5718, 5814, - 5910, 6007, 6104, 6202, 6299, 6397, 6495, 6594, - 6693, 6791, 6891, 6990, 7090, 7189, 7289, 7389, - 7489, 7589, 7690, 7790, 7890, 7991, 8091, 8192, - 8293, 8393, 8494, 8594, 8694, 8795, 8895, 8995, - 9095, 9195, 9294, 9394, 9493, 9593, 9691, 9790, - 9889, 9987, 10085, 10182, 10280, 10377, 10474, 10570, -10666, 10762, 10857, 10952, 11046, 11140, 11234, 11327, -11420, 11512, 11603, 11695, 11785, 11875, 11965, 12054, -12142, 12230, 12317, 12404, 12489, 12575, 12659, 12743, -12826, 12909, 12991, 13072, 13152, 13232, 13311, 13389, -13466, 13543, 13619, 13693, 13767, 13841, 13913, 13985, -14055, 14125, 14194, 14262, 14329, 14395, 14460, 14525, -14588, 14650, 14711, 14772, 14831, 14890, 14947, 15003, -15059, 15113, 15166, 15219, 15270, 15320, 15369, 15417, -15464, 15509, 15554, 15597, 15640, 15681, 15721, 15760, -15798, 15835, 15871, 15905, 15938, 15971, 16001, 16031, -16060, 16087, 16113, 16138, 16162, 16185, 16206, 16227, -16246, 16263, 16280, 16295, 16309, 16322, 16334, 16345, -16354, 16362, 16369, 16374, 16378, 16382, 16383, 16384 -}; - -void WebRtcSpl_GetHanningWindow(int16_t *v, size_t size) -{ - size_t jj; - int16_t *vptr1; + 1, 2, 6, 10, 15, 22, 30, 39, 50, 62, 75, + 89, 104, 121, 138, 157, 178, 199, 222, 246, 271, 297, + 324, 353, 383, 413, 446, 479, 513, 549, 586, 624, 663, + 703, 744, 787, 830, 875, 920, 967, 1015, 1064, 1114, 1165, + 1218, 1271, 1325, 1381, 1437, 1494, 1553, 1612, 1673, 1734, 1796, + 1859, 1924, 1989, 2055, 2122, 2190, 2259, 2329, 2399, 2471, 2543, + 2617, 2691, 2765, 2841, 2918, 2995, 3073, 3152, 3232, 3312, 3393, + 3475, 3558, 3641, 3725, 3809, 3895, 3980, 4067, 4154, 4242, 4330, + 4419, 4509, 4599, 4689, 4781, 4872, 4964, 5057, 5150, 5244, 5338, + 5432, 5527, 5622, 5718, 5814, 5910, 6007, 6104, 6202, 6299, 6397, + 6495, 6594, 6693, 6791, 6891, 6990, 7090, 7189, 7289, 7389, 7489, + 7589, 7690, 7790, 7890, 7991, 8091, 8192, 8293, 8393, 8494, 8594, + 8694, 8795, 8895, 8995, 9095, 9195, 9294, 9394, 9493, 9593, 9691, + 9790, 9889, 9987, 10085, 10182, 10280, 10377, 10474, 10570, 10666, 10762, + 10857, 10952, 11046, 11140, 11234, 11327, 11420, 11512, 11603, 11695, 11785, + 11875, 11965, 12054, 12142, 12230, 12317, 12404, 12489, 12575, 12659, 12743, + 12826, 12909, 12991, 13072, 13152, 13232, 13311, 13389, 13466, 13543, 13619, + 13693, 13767, 13841, 13913, 13985, 14055, 14125, 14194, 14262, 14329, 14395, + 14460, 14525, 14588, 14650, 14711, 14772, 14831, 14890, 14947, 15003, 15059, + 15113, 15166, 15219, 15270, 15320, 15369, 15417, 15464, 15509, 15554, 15597, + 15640, 15681, 15721, 15760, 15798, 15835, 15871, 15905, 15938, 15971, 16001, + 16031, 16060, 16087, 16113, 16138, 16162, 16185, 16206, 16227, 16246, 16263, + 16280, 16295, 16309, 16322, 16334, 16345, 16354, 16362, 16369, 16374, 16378, + 16382, 16383, 16384}; - int32_t index; - int32_t factor = ((int32_t)0x40000000); +void WebRtcSpl_GetHanningWindow(int16_t* v, size_t size) { + size_t jj; + int16_t* vptr1; - factor = WebRtcSpl_DivW32W16(factor, (int16_t)size); - if (size < 513) - index = (int32_t)-0x200000; - else - index = (int32_t)-0x100000; - vptr1 = v; + int32_t index; + int32_t factor = ((int32_t)0x40000000); - for (jj = 0; jj < size; jj++) - { - index += factor; - (*vptr1++) = kHanningTable[index >> 22]; - } + factor = WebRtcSpl_DivW32W16(factor, (int16_t)size); + if (size < 513) + index = (int32_t)-0x200000; + else + index = (int32_t)-0x100000; + vptr1 = v; + for (jj = 0; jj < size; jj++) { + index += factor; + (*vptr1++) = kHanningTable[index >> 22]; + } } diff --git a/common_audio/signal_processing/get_scaling_square.c b/common_audio/signal_processing/get_scaling_square.c index 4eb126941e..4b4986b766 100644 --- a/common_audio/signal_processing/get_scaling_square.c +++ b/common_audio/signal_processing/get_scaling_square.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the function WebRtcSpl_GetScalingSquare(). * The description header can be found in signal_processing_library.h @@ -19,28 +18,24 @@ int16_t WebRtcSpl_GetScalingSquare(int16_t* in_vector, size_t in_vector_length, - size_t times) -{ - int16_t nbits = WebRtcSpl_GetSizeInBits((uint32_t)times); - size_t i; - int16_t smax = -1; - int16_t sabs; - int16_t *sptr = in_vector; - int16_t t; - size_t looptimes = in_vector_length; + size_t times) { + int16_t nbits = WebRtcSpl_GetSizeInBits((uint32_t)times); + size_t i; + int16_t smax = -1; + int16_t sabs; + int16_t* sptr = in_vector; + int16_t t; + size_t looptimes = in_vector_length; - for (i = looptimes; i > 0; i--) - { - sabs = (*sptr > 0 ? *sptr++ : -*sptr++); - smax = (sabs > smax ? sabs : smax); - } - t = WebRtcSpl_NormW32(WEBRTC_SPL_MUL(smax, smax)); + for (i = looptimes; i > 0; i--) { + sabs = (*sptr > 0 ? *sptr++ : -*sptr++); + smax = (sabs > smax ? sabs : smax); + } + t = WebRtcSpl_NormW32(WEBRTC_SPL_MUL(smax, smax)); - if (smax == 0) - { - return 0; // Since norm(0) returns 0 - } else - { - return (t > nbits) ? 0 : nbits - t; - } + if (smax == 0) { + return 0; // Since norm(0) returns 0 + } else { + return (t > nbits) ? 0 : nbits - t; + } } diff --git a/common_audio/signal_processing/ilbc_specific_functions.c b/common_audio/signal_processing/ilbc_specific_functions.c deleted file mode 100644 index cbdd3dcbcd..0000000000 --- a/common_audio/signal_processing/ilbc_specific_functions.c +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - - -/* - * This file contains implementations of the iLBC specific functions - * WebRtcSpl_ReverseOrderMultArrayElements() - * WebRtcSpl_ElementwiseVectorMult() - * WebRtcSpl_AddVectorsAndShift() - * WebRtcSpl_AddAffineVectorToVector() - * WebRtcSpl_AffineTransformVector() - * - */ - -#include "common_audio/signal_processing/include/signal_processing_library.h" - -void WebRtcSpl_ReverseOrderMultArrayElements(int16_t *out, const int16_t *in, - const int16_t *win, - size_t vector_length, - int16_t right_shifts) -{ - size_t i; - int16_t *outptr = out; - const int16_t *inptr = in; - const int16_t *winptr = win; - for (i = 0; i < vector_length; i++) - { - *outptr++ = (int16_t)((*inptr++ * *winptr--) >> right_shifts); - } -} - -void WebRtcSpl_ElementwiseVectorMult(int16_t *out, const int16_t *in, - const int16_t *win, size_t vector_length, - int16_t right_shifts) -{ - size_t i; - int16_t *outptr = out; - const int16_t *inptr = in; - const int16_t *winptr = win; - for (i = 0; i < vector_length; i++) - { - *outptr++ = (int16_t)((*inptr++ * *winptr++) >> right_shifts); - } -} - -void WebRtcSpl_AddVectorsAndShift(int16_t *out, const int16_t *in1, - const int16_t *in2, size_t vector_length, - int16_t right_shifts) -{ - size_t i; - int16_t *outptr = out; - const int16_t *in1ptr = in1; - const int16_t *in2ptr = in2; - for (i = vector_length; i > 0; i--) - { - (*outptr++) = (int16_t)(((*in1ptr++) + (*in2ptr++)) >> right_shifts); - } -} - -void WebRtcSpl_AddAffineVectorToVector(int16_t *out, const int16_t *in, - int16_t gain, int32_t add_constant, - int16_t right_shifts, - size_t vector_length) -{ - size_t i; - - for (i = 0; i < vector_length; i++) - { - out[i] += (int16_t)((in[i] * gain + add_constant) >> right_shifts); - } -} - -void WebRtcSpl_AffineTransformVector(int16_t *out, const int16_t *in, - int16_t gain, int32_t add_constant, - int16_t right_shifts, size_t vector_length) -{ - size_t i; - - for (i = 0; i < vector_length; i++) - { - out[i] = (int16_t)((in[i] * gain + add_constant) >> right_shifts); - } -} diff --git a/common_audio/signal_processing/include/signal_processing_library.h b/common_audio/signal_processing/include/signal_processing_library.h index 48c9b309b4..72c5fc400c 100644 --- a/common_audio/signal_processing/include/signal_processing_library.h +++ b/common_audio/signal_processing/include/signal_processing_library.h @@ -40,7 +40,7 @@ #define WEBRTC_SPL_MUL_16_U16(a, b) ((int32_t)(int16_t)(a) * (uint16_t)(b)) // clang-format off -// clang-format would choose some identation +// clang-format would choose some indentation // leading to presubmit error (cpplint.py) #ifndef WEBRTC_ARCH_ARM_V7 // For ARMv7 platforms, these are inline functions in spl_inl_armv7.h @@ -389,36 +389,121 @@ int WebRtcSpl_ScaleAndAddVectorsWithRound_mips(const int16_t* in_vector1, #endif // End: Vector scaling operations. -// iLBC specific functions. Implementations in ilbc_specific_functions.c. -// Description at bottom of file. +// +// WebRtcSpl_ReverseOrderMultArrayElements(...) +// +// Performs the vector operation: +// out_vector[n] = (in_vector[n]*window[-n])>>right_shifts +// +// Input: +// - in_vector : Input vector +// - window : Window vector (should be reversed). The pointer +// should be set to the last value in the vector +// - right_shifts : Number of right bit shift to be applied after the +// multiplication +// - vector_length : Number of elements in `in_vector` +// +// Output: +// - out_vector : Output vector (can be same as `in_vector`) +// void WebRtcSpl_ReverseOrderMultArrayElements(int16_t* out_vector, const int16_t* in_vector, const int16_t* window, size_t vector_length, int16_t right_shifts); + +// +// WebRtcSpl_ElementwiseVectorMult(...) +// +// Performs the vector operation: +// out_vector[n] = (in_vector[n]*window[n])>>right_shifts +// +// Input: +// - in_vector : Input vector +// - window : Window vector. +// - right_shifts : Number of right bit shift to be applied after the +// multiplication +// - vector_length : Number of elements in `in_vector` +// +// Output: +// - out_vector : Output vector (can be same as `in_vector`) +// void WebRtcSpl_ElementwiseVectorMult(int16_t* out_vector, const int16_t* in_vector, const int16_t* window, size_t vector_length, int16_t right_shifts); + +// +// WebRtcSpl_AddVectorsAndShift(...) +// +// Performs the vector operation: +// out_vector[k] = (in_vector1[k] + in_vector2[k])>>right_shifts +// +// Input: +// - in_vector1 : Input vector 1 +// - in_vector2 : Input vector 2 +// - right_shifts : Number of right bit shift to be applied after the +// multiplication +// - vector_length : Number of elements in `in_vector1` and `in_vector2` +// +// Output: +// - out_vector : Output vector (can be same as `in_vector1`) +// void WebRtcSpl_AddVectorsAndShift(int16_t* out_vector, const int16_t* in_vector1, const int16_t* in_vector2, size_t vector_length, int16_t right_shifts); + +// +// WebRtcSpl_AddAffineVectorToVector(...) +// +// Adds an affine transformed vector to another vector `out_vector`, i.e, +// performs +// out_vector[k] += (in_vector[k]*gain+add_constant)>>right_shifts +// +// Input: +// - in_vector : Input vector +// - gain : Gain value, used to multiply the in vector with +// - add_constant : Constant value to add (usually 1<<(right_shifts-1), +// but others can be used as well +// - right_shifts : Number of right bit shifts (0-16) +// - vector_length : Number of samples in `in_vector` and `out_vector` +// +// Output: +// - out_vector : Vector with the output +// void WebRtcSpl_AddAffineVectorToVector(int16_t* out_vector, const int16_t* in_vector, int16_t gain, int32_t add_constant, int16_t right_shifts, size_t vector_length); + +// +// WebRtcSpl_AffineTransformVector(...) +// +// Affine transforms a vector, i.e, performs +// out_vector[k] = (in_vector[k]*gain+add_constant)>>right_shifts +// +// Input: +// - in_vector : Input vector +// - gain : Gain value, used to multiply the in vector with +// - add_constant : Constant value to add (usually 1<<(right_shifts-1), +// but others can be used as well +// - right_shifts : Number of right bit shifts (0-16) +// - vector_length : Number of samples in `in_vector` and `out_vector` +// +// Output: +// - out_vector : Vector with the output +// void WebRtcSpl_AffineTransformVector(int16_t* out_vector, const int16_t* in_vector, int16_t gain, int32_t add_constant, int16_t right_shifts, size_t vector_length); -// End: iLBC specific functions. // Signal processing operations. @@ -621,10 +706,8 @@ size_t WebRtcSpl_FilterAR(const int16_t* ar_coef, int16_t* filter_state, size_t filter_state_length, int16_t* filter_state_low, - size_t filter_state_low_length, int16_t* out_vector, - int16_t* out_vector_low, - size_t out_vector_low_length); + int16_t* out_vector_low); // WebRtcSpl_FilterMAFastQ12(...) // @@ -1058,7 +1141,7 @@ void WebRtcSpl_SynthesisQMF(const int16_t* low_band, // meaning that the first sample of `in_vector` is copied to the last sample of // the `out_vector`. The procedure continues until the last sample of // `in_vector` has been copied to the first sample of `out_vector`. This -// creates a reversed vector. Used in e.g. prediction in iLBC. +// creates a reversed vector. // // Input: // - in_vector : Pointer to the first sample in a int16_t vector @@ -1188,95 +1271,6 @@ void WebRtcSpl_SynthesisQMF(const int16_t* low_band, // - out_vector : Output vector // -// -// WebRtcSpl_ReverseOrderMultArrayElements(...) -// -// Performs the vector operation: -// out_vector[n] = (in_vector[n]*window[-n])>>right_shifts -// -// Input: -// - in_vector : Input vector -// - window : Window vector (should be reversed). The pointer -// should be set to the last value in the vector -// - right_shifts : Number of right bit shift to be applied after the -// multiplication -// - vector_length : Number of elements in `in_vector` -// -// Output: -// - out_vector : Output vector (can be same as `in_vector`) -// - -// -// WebRtcSpl_ElementwiseVectorMult(...) -// -// Performs the vector operation: -// out_vector[n] = (in_vector[n]*window[n])>>right_shifts -// -// Input: -// - in_vector : Input vector -// - window : Window vector. -// - right_shifts : Number of right bit shift to be applied after the -// multiplication -// - vector_length : Number of elements in `in_vector` -// -// Output: -// - out_vector : Output vector (can be same as `in_vector`) -// - -// -// WebRtcSpl_AddVectorsAndShift(...) -// -// Performs the vector operation: -// out_vector[k] = (in_vector1[k] + in_vector2[k])>>right_shifts -// -// Input: -// - in_vector1 : Input vector 1 -// - in_vector2 : Input vector 2 -// - right_shifts : Number of right bit shift to be applied after the -// multiplication -// - vector_length : Number of elements in `in_vector1` and `in_vector2` -// -// Output: -// - out_vector : Output vector (can be same as `in_vector1`) -// - -// -// WebRtcSpl_AddAffineVectorToVector(...) -// -// Adds an affine transformed vector to another vector `out_vector`, i.e, -// performs -// out_vector[k] += (in_vector[k]*gain+add_constant)>>right_shifts -// -// Input: -// - in_vector : Input vector -// - gain : Gain value, used to multiply the in vector with -// - add_constant : Constant value to add (usually 1<<(right_shifts-1), -// but others can be used as well -// - right_shifts : Number of right bit shifts (0-16) -// - vector_length : Number of samples in `in_vector` and `out_vector` -// -// Output: -// - out_vector : Vector with the output -// - -// -// WebRtcSpl_AffineTransformVector(...) -// -// Affine transforms a vector, i.e, performs -// out_vector[k] = (in_vector[k]*gain+add_constant)>>right_shifts -// -// Input: -// - in_vector : Input vector -// - gain : Gain value, used to multiply the in vector with -// - add_constant : Constant value to add (usually 1<<(right_shifts-1), -// but others can be used as well -// - right_shifts : Number of right bit shifts (0-16) -// - vector_length : Number of samples in `in_vector` and `out_vector` -// -// Output: -// - out_vector : Vector with the output -// - // // WebRtcSpl_IncreaseSeed(...) // @@ -1464,9 +1458,6 @@ void WebRtcSpl_SynthesisQMF(const int16_t* low_band, // - filter_state : Current state (higher part) of the filter. // - filter_state_length : Length (in samples) of `filter_state`. // - filter_state_low : Current state (lower part) of the filter. -// - filter_state_low_length : Length (in samples) of `filter_state_low`. -// - out_vector_low_length : Maximum length (in samples) of -// `out_vector_low`. // // Output: // - filter_state : Updated state (upper part) vector. diff --git a/common_audio/signal_processing/levinson_durbin.c b/common_audio/signal_processing/levinson_durbin.c index 2c5cbaeeaa..7ed1685819 100644 --- a/common_audio/signal_processing/levinson_durbin.c +++ b/common_audio/signal_processing/levinson_durbin.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the function WebRtcSpl_LevinsonDurbin(). * The description header can be found in signal_processing_library.h @@ -21,229 +20,224 @@ #define SPL_LEVINSON_MAXORDER 20 int16_t RTC_NO_SANITIZE("signed-integer-overflow") // bugs.webrtc.org/5486 -WebRtcSpl_LevinsonDurbin(const int32_t* R, int16_t* A, int16_t* K, - size_t order) -{ - size_t i, j; - // Auto-correlation coefficients in high precision - int16_t R_hi[SPL_LEVINSON_MAXORDER + 1], R_low[SPL_LEVINSON_MAXORDER + 1]; - // LPC coefficients in high precision - int16_t A_hi[SPL_LEVINSON_MAXORDER + 1], A_low[SPL_LEVINSON_MAXORDER + 1]; - // LPC coefficients for next iteration - int16_t A_upd_hi[SPL_LEVINSON_MAXORDER + 1], A_upd_low[SPL_LEVINSON_MAXORDER + 1]; - // Reflection coefficient in high precision - int16_t K_hi, K_low; - // Prediction gain Alpha in high precision and with scale factor - int16_t Alpha_hi, Alpha_low, Alpha_exp; - int16_t tmp_hi, tmp_low; - int32_t temp1W32, temp2W32, temp3W32; - int16_t norm; - - // Normalize the autocorrelation R[0]...R[order+1] - - norm = WebRtcSpl_NormW32(R[0]); - - for (i = 0; i <= order; ++i) - { - temp1W32 = R[i] * (1 << norm); - // UBSan: 12 * 268435456 cannot be represented in type 'int' - - // Put R in hi and low format - R_hi[i] = (int16_t)(temp1W32 >> 16); - R_low[i] = (int16_t)((temp1W32 - ((int32_t)R_hi[i] * 65536)) >> 1); + WebRtcSpl_LevinsonDurbin(const int32_t* R, + int16_t* A, + int16_t* K, + size_t order) { + size_t i, j; + // Auto-correlation coefficients in high precision + int16_t R_hi[SPL_LEVINSON_MAXORDER + 1], R_low[SPL_LEVINSON_MAXORDER + 1]; + // LPC coefficients in high precision + int16_t A_hi[SPL_LEVINSON_MAXORDER + 1], A_low[SPL_LEVINSON_MAXORDER + 1]; + // LPC coefficients for next iteration + int16_t A_upd_hi[SPL_LEVINSON_MAXORDER + 1], + A_upd_low[SPL_LEVINSON_MAXORDER + 1]; + // Reflection coefficient in high precision + int16_t K_hi, K_low; + // Prediction gain Alpha in high precision and with scale factor + int16_t Alpha_hi, Alpha_low, Alpha_exp; + int16_t tmp_hi, tmp_low; + int32_t temp1W32, temp2W32, temp3W32; + int16_t norm; + + // Normalize the autocorrelation R[0]...R[order+1] + + norm = WebRtcSpl_NormW32(R[0]); + + for (i = 0; i <= order; ++i) { + temp1W32 = R[i] * (1 << norm); + // UBSan: 12 * 268435456 cannot be represented in type 'int' + + // Put R in hi and low format + R_hi[i] = (int16_t)(temp1W32 >> 16); + R_low[i] = (int16_t)((temp1W32 - ((int32_t)R_hi[i] * 65536)) >> 1); + } + + // K = A[1] = -R[1] / R[0] + + temp2W32 = R[1] * (1 << norm); // R[1] in Q31 + temp3W32 = WEBRTC_SPL_ABS_W32(temp2W32); // abs R[1] + temp1W32 = WebRtcSpl_DivW32HiLow(temp3W32, R_hi[0], + R_low[0]); // abs(R[1])/R[0] in Q31 + // Put back the sign on R[1] + if (temp2W32 > 0) { + temp1W32 = -temp1W32; + } + + // Put K in hi and low format + K_hi = (int16_t)(temp1W32 >> 16); + K_low = (int16_t)((temp1W32 - ((int32_t)K_hi * 65536)) >> 1); + + // Store first reflection coefficient + K[0] = K_hi; + + temp1W32 >>= 4; // A[1] in Q27. + + // Put A[1] in hi and low format + A_hi[1] = (int16_t)(temp1W32 >> 16); + A_low[1] = (int16_t)((temp1W32 - ((int32_t)A_hi[1] * 65536)) >> 1); + + // Alpha = R[0] * (1-K^2) + + temp1W32 = ((K_hi * K_low >> 14) + K_hi * K_hi) * 2; // = k^2 in Q31 + + temp1W32 = WEBRTC_SPL_ABS_W32(temp1W32); // Guard against <0 + temp1W32 = + (int32_t)0x7fffffffL - temp1W32; // temp1W32 = (1 - K[0]*K[0]) in Q31 + + // Store temp1W32 = 1 - K[0]*K[0] on hi and low format + tmp_hi = (int16_t)(temp1W32 >> 16); + tmp_low = (int16_t)((temp1W32 - ((int32_t)tmp_hi << 16)) >> 1); + + // Calculate Alpha in Q31 + temp1W32 = + (R_hi[0] * tmp_hi + (R_hi[0] * tmp_low >> 15) + (R_low[0] * tmp_hi >> 15)) + << 1; + + // Normalize Alpha and put it in hi and low format + + Alpha_exp = WebRtcSpl_NormW32(temp1W32); + temp1W32 = WEBRTC_SPL_LSHIFT_W32(temp1W32, Alpha_exp); + Alpha_hi = (int16_t)(temp1W32 >> 16); + Alpha_low = (int16_t)((temp1W32 - ((int32_t)Alpha_hi << 16)) >> 1); + + // Perform the iterative calculations in the Levinson-Durbin algorithm + + for (i = 2; i <= order; i++) { + /* ---- + temp1W32 = R[i] + > R[j]*A[i-j] + / + ---- + j=1..i-1 + */ + + temp1W32 = 0; + + for (j = 1; j < i; j++) { + // temp1W32 is in Q31 + temp1W32 += + (R_hi[j] * A_hi[i - j] * 2) + + (((R_hi[j] * A_low[i - j] >> 15) + (R_low[j] * A_hi[i - j] >> 15)) * + 2); } - // K = A[1] = -R[1] / R[0] + temp1W32 = temp1W32 * 16; + temp1W32 += ((int32_t)R_hi[i] * 65536) + + WEBRTC_SPL_LSHIFT_W32((int32_t)R_low[i], 1); + + // K = -temp1W32 / Alpha + temp2W32 = WEBRTC_SPL_ABS_W32(temp1W32); // abs(temp1W32) + temp3W32 = WebRtcSpl_DivW32HiLow(temp2W32, Alpha_hi, + Alpha_low); // abs(temp1W32)/Alpha + + // Put the sign of temp1W32 back again + if (temp1W32 > 0) { + temp3W32 = -temp3W32; + } + + // Use the Alpha shifts from earlier to de-normalize + norm = WebRtcSpl_NormW32(temp3W32); + if ((Alpha_exp <= norm) || (temp3W32 == 0)) { + temp3W32 = temp3W32 * (1 << Alpha_exp); + } else { + if (temp3W32 > 0) { + temp3W32 = (int32_t)0x7fffffffL; + } else { + temp3W32 = (int32_t)0x80000000L; + } + } + + // Put K on hi and low format + K_hi = (int16_t)(temp3W32 >> 16); + K_low = (int16_t)((temp3W32 - ((int32_t)K_hi * 65536)) >> 1); + + // Store Reflection coefficient in Q15 + K[i - 1] = K_hi; - temp2W32 = R[1] * (1 << norm); // R[1] in Q31 - temp3W32 = WEBRTC_SPL_ABS_W32(temp2W32); // abs R[1] - temp1W32 = WebRtcSpl_DivW32HiLow(temp3W32, R_hi[0], R_low[0]); // abs(R[1])/R[0] in Q31 - // Put back the sign on R[1] - if (temp2W32 > 0) - { - temp1W32 = -temp1W32; + // Test for unstable filter. + // If unstable return 0 and let the user decide what to do in that case + + if ((int32_t)WEBRTC_SPL_ABS_W16(K_hi) > (int32_t)32750) { + return 0; // Unstable filter } - // Put K in hi and low format - K_hi = (int16_t)(temp1W32 >> 16); - K_low = (int16_t)((temp1W32 - ((int32_t)K_hi * 65536)) >> 1); + /* + Compute updated LPC coefficient: Anew[i] + Anew[j]= A[j] + K*A[i-j] for j=1..i-1 + Anew[i]= K + */ + + for (j = 1; j < i; j++) { + // temp1W32 = A[j] in Q27 + temp1W32 = (int32_t)A_hi[j] * 65536 + + WEBRTC_SPL_LSHIFT_W32((int32_t)A_low[j], 1); + + // temp1W32 += K*A[i-j] in Q27 + temp1W32 += (K_hi * A_hi[i - j] + (K_hi * A_low[i - j] >> 15) + + (K_low * A_hi[i - j] >> 15)) * + 2; - // Store first reflection coefficient - K[0] = K_hi; + // Put Anew in hi and low format + A_upd_hi[j] = (int16_t)(temp1W32 >> 16); + A_upd_low[j] = + (int16_t)((temp1W32 - ((int32_t)A_upd_hi[j] * 65536)) >> 1); + } - temp1W32 >>= 4; // A[1] in Q27. + // temp3W32 = K in Q27 (Convert from Q31 to Q27) + temp3W32 >>= 4; - // Put A[1] in hi and low format - A_hi[1] = (int16_t)(temp1W32 >> 16); - A_low[1] = (int16_t)((temp1W32 - ((int32_t)A_hi[1] * 65536)) >> 1); + // Store Anew in hi and low format + A_upd_hi[i] = (int16_t)(temp3W32 >> 16); + A_upd_low[i] = (int16_t)((temp3W32 - ((int32_t)A_upd_hi[i] * 65536)) >> 1); - // Alpha = R[0] * (1-K^2) + // Alpha = Alpha * (1-K^2) - temp1W32 = ((K_hi * K_low >> 14) + K_hi * K_hi) * 2; // = k^2 in Q31 + temp1W32 = ((K_hi * K_low >> 14) + K_hi * K_hi) * 2; // K*K in Q31 - temp1W32 = WEBRTC_SPL_ABS_W32(temp1W32); // Guard against <0 - temp1W32 = (int32_t)0x7fffffffL - temp1W32; // temp1W32 = (1 - K[0]*K[0]) in Q31 + temp1W32 = WEBRTC_SPL_ABS_W32(temp1W32); // Guard against <0 + temp1W32 = (int32_t)0x7fffffffL - temp1W32; // 1 - K*K in Q31 - // Store temp1W32 = 1 - K[0]*K[0] on hi and low format + // Convert 1- K^2 in hi and low format tmp_hi = (int16_t)(temp1W32 >> 16); tmp_low = (int16_t)((temp1W32 - ((int32_t)tmp_hi << 16)) >> 1); - // Calculate Alpha in Q31 - temp1W32 = (R_hi[0] * tmp_hi + (R_hi[0] * tmp_low >> 15) + - (R_low[0] * tmp_hi >> 15)) << 1; + // Calculate Alpha = Alpha * (1-K^2) in Q31 + temp1W32 = (Alpha_hi * tmp_hi + (Alpha_hi * tmp_low >> 15) + + (Alpha_low * tmp_hi >> 15)) + << 1; - // Normalize Alpha and put it in hi and low format + // Normalize Alpha and store it on hi and low format + + norm = WebRtcSpl_NormW32(temp1W32); + temp1W32 = WEBRTC_SPL_LSHIFT_W32(temp1W32, norm); - Alpha_exp = WebRtcSpl_NormW32(temp1W32); - temp1W32 = WEBRTC_SPL_LSHIFT_W32(temp1W32, Alpha_exp); Alpha_hi = (int16_t)(temp1W32 >> 16); Alpha_low = (int16_t)((temp1W32 - ((int32_t)Alpha_hi << 16)) >> 1); - // Perform the iterative calculations in the Levinson-Durbin algorithm - - for (i = 2; i <= order; i++) - { - /* ---- - temp1W32 = R[i] + > R[j]*A[i-j] - / - ---- - j=1..i-1 - */ - - temp1W32 = 0; - - for (j = 1; j < i; j++) - { - // temp1W32 is in Q31 - temp1W32 += (R_hi[j] * A_hi[i - j] * 2) + - (((R_hi[j] * A_low[i - j] >> 15) + - (R_low[j] * A_hi[i - j] >> 15)) * 2); - } - - temp1W32 = temp1W32 * 16; - temp1W32 += ((int32_t)R_hi[i] * 65536) - + WEBRTC_SPL_LSHIFT_W32((int32_t)R_low[i], 1); - - // K = -temp1W32 / Alpha - temp2W32 = WEBRTC_SPL_ABS_W32(temp1W32); // abs(temp1W32) - temp3W32 = WebRtcSpl_DivW32HiLow(temp2W32, Alpha_hi, Alpha_low); // abs(temp1W32)/Alpha - - // Put the sign of temp1W32 back again - if (temp1W32 > 0) - { - temp3W32 = -temp3W32; - } - - // Use the Alpha shifts from earlier to de-normalize - norm = WebRtcSpl_NormW32(temp3W32); - if ((Alpha_exp <= norm) || (temp3W32 == 0)) - { - temp3W32 = temp3W32 * (1 << Alpha_exp); - } else - { - if (temp3W32 > 0) - { - temp3W32 = (int32_t)0x7fffffffL; - } else - { - temp3W32 = (int32_t)0x80000000L; - } - } - - // Put K on hi and low format - K_hi = (int16_t)(temp3W32 >> 16); - K_low = (int16_t)((temp3W32 - ((int32_t)K_hi * 65536)) >> 1); - - // Store Reflection coefficient in Q15 - K[i - 1] = K_hi; - - // Test for unstable filter. - // If unstable return 0 and let the user decide what to do in that case - - if ((int32_t)WEBRTC_SPL_ABS_W16(K_hi) > (int32_t)32750) - { - return 0; // Unstable filter - } - - /* - Compute updated LPC coefficient: Anew[i] - Anew[j]= A[j] + K*A[i-j] for j=1..i-1 - Anew[i]= K - */ - - for (j = 1; j < i; j++) - { - // temp1W32 = A[j] in Q27 - temp1W32 = (int32_t)A_hi[j] * 65536 - + WEBRTC_SPL_LSHIFT_W32((int32_t)A_low[j],1); - - // temp1W32 += K*A[i-j] in Q27 - temp1W32 += (K_hi * A_hi[i - j] + (K_hi * A_low[i - j] >> 15) + - (K_low * A_hi[i - j] >> 15)) * 2; - - // Put Anew in hi and low format - A_upd_hi[j] = (int16_t)(temp1W32 >> 16); - A_upd_low[j] = (int16_t)( - (temp1W32 - ((int32_t)A_upd_hi[j] * 65536)) >> 1); - } - - // temp3W32 = K in Q27 (Convert from Q31 to Q27) - temp3W32 >>= 4; - - // Store Anew in hi and low format - A_upd_hi[i] = (int16_t)(temp3W32 >> 16); - A_upd_low[i] = (int16_t)( - (temp3W32 - ((int32_t)A_upd_hi[i] * 65536)) >> 1); - - // Alpha = Alpha * (1-K^2) - - temp1W32 = ((K_hi * K_low >> 14) + K_hi * K_hi) * 2; // K*K in Q31 - - temp1W32 = WEBRTC_SPL_ABS_W32(temp1W32); // Guard against <0 - temp1W32 = (int32_t)0x7fffffffL - temp1W32; // 1 - K*K in Q31 - - // Convert 1- K^2 in hi and low format - tmp_hi = (int16_t)(temp1W32 >> 16); - tmp_low = (int16_t)((temp1W32 - ((int32_t)tmp_hi << 16)) >> 1); - - // Calculate Alpha = Alpha * (1-K^2) in Q31 - temp1W32 = (Alpha_hi * tmp_hi + (Alpha_hi * tmp_low >> 15) + - (Alpha_low * tmp_hi >> 15)) << 1; - - // Normalize Alpha and store it on hi and low format - - norm = WebRtcSpl_NormW32(temp1W32); - temp1W32 = WEBRTC_SPL_LSHIFT_W32(temp1W32, norm); - - Alpha_hi = (int16_t)(temp1W32 >> 16); - Alpha_low = (int16_t)((temp1W32 - ((int32_t)Alpha_hi << 16)) >> 1); - - // Update the total normalization of Alpha - Alpha_exp = Alpha_exp + norm; - - // Update A[] - - for (j = 1; j <= i; j++) - { - A_hi[j] = A_upd_hi[j]; - A_low[j] = A_upd_low[j]; - } - } - - /* - Set A[0] to 1.0 and store the A[i] i=1...order in Q12 - (Convert from Q27 and use rounding) - */ + // Update the total normalization of Alpha + Alpha_exp = Alpha_exp + norm; - A[0] = 4096; + // Update A[] - for (i = 1; i <= order; i++) - { - // temp1W32 in Q27 - temp1W32 = (int32_t)A_hi[i] * 65536 - + WEBRTC_SPL_LSHIFT_W32((int32_t)A_low[i], 1); - // Round and store upper word - A[i] = (int16_t)(((temp1W32 * 2) + 32768) >> 16); + for (j = 1; j <= i; j++) { + A_hi[j] = A_upd_hi[j]; + A_low[j] = A_upd_low[j]; } - return 1; // Stable filters + } + + /* + Set A[0] to 1.0 and store the A[i] i=1...order in Q12 + (Convert from Q27 and use rounding) + */ + + A[0] = 4096; + + for (i = 1; i <= order; i++) { + // temp1W32 in Q27 + temp1W32 = + (int32_t)A_hi[i] * 65536 + WEBRTC_SPL_LSHIFT_W32((int32_t)A_low[i], 1); + // Round and store upper word + A[i] = (int16_t)(((temp1W32 * 2) + 32768) >> 16); + } + return 1; // Stable filters } diff --git a/common_audio/signal_processing/lpc_to_refl_coef.c b/common_audio/signal_processing/lpc_to_refl_coef.c index 7a5e25191b..2a7c35ea78 100644 --- a/common_audio/signal_processing/lpc_to_refl_coef.c +++ b/common_audio/signal_processing/lpc_to_refl_coef.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the function WebRtcSpl_LpcToReflCoef(). * The description header can be found in signal_processing_library.h @@ -19,38 +18,35 @@ #define SPL_LPC_TO_REFL_COEF_MAX_AR_MODEL_ORDER 50 -void WebRtcSpl_LpcToReflCoef(int16_t* a16, int use_order, int16_t* k16) -{ - int m, k; - int32_t tmp32[SPL_LPC_TO_REFL_COEF_MAX_AR_MODEL_ORDER]; - int32_t tmp_inv_denom32; - int16_t tmp_inv_denom16; - - k16[use_order - 1] = a16[use_order] << 3; // Q12<<3 => Q15 - for (m = use_order - 1; m > 0; m--) - { - // (1 - k^2) in Q30 - tmp_inv_denom32 = 1073741823 - k16[m] * k16[m]; - // (1 - k^2) in Q15 - tmp_inv_denom16 = (int16_t)(tmp_inv_denom32 >> 15); - - for (k = 1; k <= m; k++) - { - // tmp[k] = (a[k] - RC[m] * a[m-k+1]) / (1.0 - RC[m]*RC[m]); - - // [Q12<<16 - (Q15*Q12)<<1] = [Q28 - Q28] = Q28 - tmp32[k] = (a16[k] << 16) - (k16[m] * a16[m - k + 1] << 1); - - tmp32[k] = WebRtcSpl_DivW32W16(tmp32[k], tmp_inv_denom16); //Q28/Q15 = Q13 - } - - for (k = 1; k < m; k++) - { - a16[k] = (int16_t)(tmp32[k] >> 1); // Q13>>1 => Q12 - } - - tmp32[m] = WEBRTC_SPL_SAT(8191, tmp32[m], -8191); - k16[m - 1] = (int16_t)WEBRTC_SPL_LSHIFT_W32(tmp32[m], 2); //Q13<<2 => Q15 +void WebRtcSpl_LpcToReflCoef(int16_t* a16, int use_order, int16_t* k16) { + int m, k; + int32_t tmp32[SPL_LPC_TO_REFL_COEF_MAX_AR_MODEL_ORDER]; + int32_t tmp_inv_denom32; + int16_t tmp_inv_denom16; + + k16[use_order - 1] = a16[use_order] << 3; // Q12<<3 => Q15 + for (m = use_order - 1; m > 0; m--) { + // (1 - k^2) in Q30 + tmp_inv_denom32 = 1073741823 - k16[m] * k16[m]; + // (1 - k^2) in Q15 + tmp_inv_denom16 = (int16_t)(tmp_inv_denom32 >> 15); + + for (k = 1; k <= m; k++) { + // tmp[k] = (a[k] - RC[m] * a[m-k+1]) / (1.0 - RC[m]*RC[m]); + + // [Q12<<16 - (Q15*Q12)<<1] = [Q28 - Q28] = Q28 + tmp32[k] = (a16[k] << 16) - (k16[m] * a16[m - k + 1] << 1); + + tmp32[k] = + WebRtcSpl_DivW32W16(tmp32[k], tmp_inv_denom16); // Q28/Q15 = Q13 + } + + for (k = 1; k < m; k++) { + a16[k] = (int16_t)(tmp32[k] >> 1); // Q13>>1 => Q12 } - return; + + tmp32[m] = WEBRTC_SPL_SAT(8191, tmp32[m], -8191); + k16[m - 1] = (int16_t)WEBRTC_SPL_LSHIFT_W32(tmp32[m], 2); // Q13<<2 => Q15 + } + return; } diff --git a/common_audio/signal_processing/min_max_operations.c b/common_audio/signal_processing/min_max_operations.c index 6acf88287b..2a7c82647e 100644 --- a/common_audio/signal_processing/min_max_operations.c +++ b/common_audio/signal_processing/min_max_operations.c @@ -24,11 +24,11 @@ * */ -#include #include +#include -#include "rtc_base/checks.h" #include "common_audio/signal_processing/include/signal_processing_library.h" +#include "rtc_base/checks.h" // TODO(bjorn/kma): Consolidate function pairs (e.g. combine // WebRtcSpl_MaxAbsValueW16C and WebRtcSpl_MaxAbsIndexW16 into a single one.) @@ -235,8 +235,10 @@ size_t WebRtcSpl_MinIndexW32(const int32_t* vector, size_t length) { } // Finds both the minimum and maximum elements in an array of 16-bit integers. -void WebRtcSpl_MinMaxW16(const int16_t* vector, size_t length, - int16_t* min_val, int16_t* max_val) { +void WebRtcSpl_MinMaxW16(const int16_t* vector, + size_t length, + int16_t* min_val, + int16_t* max_val) { #if defined(WEBRTC_HAS_NEON) return WebRtcSpl_MinMaxW16Neon(vector, length, min_val, max_val); #else diff --git a/common_audio/signal_processing/min_max_operations_mips.c b/common_audio/signal_processing/min_max_operations_mips.c index 8a7fc65c42..5ae8ef6681 100644 --- a/common_audio/signal_processing/min_max_operations_mips.c +++ b/common_audio/signal_processing/min_max_operations_mips.c @@ -16,8 +16,8 @@ * */ -#include "rtc_base/checks.h" #include "common_audio/signal_processing/include/signal_processing_library.h" +#include "rtc_base/checks.h" // Maximum absolute value of word16 vector. int16_t WebRtcSpl_MaxAbsValueW16_mips(const int16_t* vector, size_t length) { @@ -32,190 +32,184 @@ int16_t WebRtcSpl_MaxAbsValueW16_mips(const int16_t* vector, size_t length) { loop_size = length >> 4; for (i = 0; i < loop_size; i++) { - __asm__ volatile ( - "lw %[tmp32_0], 0(%[tmpvec32]) \n\t" - "lw %[tmp32_1], 4(%[tmpvec32]) \n\t" - "lw %[tmp32_2], 8(%[tmpvec32]) \n\t" - "lw %[tmp32_3], 12(%[tmpvec32]) \n\t" - - "absq_s.ph %[tmp32_0], %[tmp32_0] \n\t" - "absq_s.ph %[tmp32_1], %[tmp32_1] \n\t" - "cmp.lt.ph %[totMax], %[tmp32_0] \n\t" - "pick.ph %[totMax], %[tmp32_0], %[totMax] \n\t" - - "lw %[tmp32_0], 16(%[tmpvec32]) \n\t" - "absq_s.ph %[tmp32_2], %[tmp32_2] \n\t" - "cmp.lt.ph %[totMax], %[tmp32_1] \n\t" - "pick.ph %[totMax], %[tmp32_1], %[totMax] \n\t" - - "lw %[tmp32_1], 20(%[tmpvec32]) \n\t" - "absq_s.ph %[tmp32_3], %[tmp32_3] \n\t" - "cmp.lt.ph %[totMax], %[tmp32_2] \n\t" - "pick.ph %[totMax], %[tmp32_2], %[totMax] \n\t" - - "lw %[tmp32_2], 24(%[tmpvec32]) \n\t" - "cmp.lt.ph %[totMax], %[tmp32_3] \n\t" - "pick.ph %[totMax], %[tmp32_3], %[totMax] \n\t" - - "lw %[tmp32_3], 28(%[tmpvec32]) \n\t" - "absq_s.ph %[tmp32_0], %[tmp32_0] \n\t" - "absq_s.ph %[tmp32_1], %[tmp32_1] \n\t" + __asm__ volatile( + "lw %[tmp32_0], 0(%[tmpvec32]) \n\t" + "lw %[tmp32_1], 4(%[tmpvec32]) \n\t" + "lw %[tmp32_2], 8(%[tmpvec32]) \n\t" + "lw %[tmp32_3], 12(%[tmpvec32]) \n\t" + + "absq_s.ph %[tmp32_0], %[tmp32_0] \n\t" + "absq_s.ph %[tmp32_1], %[tmp32_1] \n\t" + "cmp.lt.ph %[totMax], %[tmp32_0] \n\t" + "pick.ph %[totMax], %[tmp32_0], %[totMax] \n\t" + + "lw %[tmp32_0], 16(%[tmpvec32]) \n\t" + "absq_s.ph %[tmp32_2], %[tmp32_2] \n\t" + "cmp.lt.ph %[totMax], %[tmp32_1] \n\t" + "pick.ph %[totMax], %[tmp32_1], %[totMax] \n\t" + + "lw %[tmp32_1], 20(%[tmpvec32]) \n\t" + "absq_s.ph %[tmp32_3], %[tmp32_3] \n\t" + "cmp.lt.ph %[totMax], %[tmp32_2] \n\t" + "pick.ph %[totMax], %[tmp32_2], %[totMax] \n\t" + + "lw %[tmp32_2], 24(%[tmpvec32]) \n\t" + "cmp.lt.ph %[totMax], %[tmp32_3] \n\t" + "pick.ph %[totMax], %[tmp32_3], %[totMax] \n\t" + + "lw %[tmp32_3], 28(%[tmpvec32]) \n\t" + "absq_s.ph %[tmp32_0], %[tmp32_0] \n\t" + "absq_s.ph %[tmp32_1], %[tmp32_1] \n\t" + "cmp.lt.ph %[totMax], %[tmp32_0] \n\t" + "pick.ph %[totMax], %[tmp32_0], %[totMax] \n\t" + + "absq_s.ph %[tmp32_2], %[tmp32_2] \n\t" + "cmp.lt.ph %[totMax], %[tmp32_1] \n\t" + "pick.ph %[totMax], %[tmp32_1], %[totMax] \n\t" + "absq_s.ph %[tmp32_3], %[tmp32_3] \n\t" + "cmp.lt.ph %[totMax], %[tmp32_2] \n\t" + "pick.ph %[totMax], %[tmp32_2], %[totMax] \n\t" + + "cmp.lt.ph %[totMax], %[tmp32_3] \n\t" + "pick.ph %[totMax], %[tmp32_3], %[totMax] \n\t" + + "addiu %[tmpvec32], %[tmpvec32], 32 \n\t" + : [tmp32_0] "=&r"(tmp32_0), [tmp32_1] "=&r"(tmp32_1), + [tmp32_2] "=&r"(tmp32_2), [tmp32_3] "=&r"(tmp32_3), + [totMax] "+r"(totMax), [tmpvec32] "+r"(tmpvec32) + : + : "memory"); + } + __asm__ volatile( + "rotr %[tmp32_0], %[totMax], 16 \n\t" "cmp.lt.ph %[totMax], %[tmp32_0] \n\t" "pick.ph %[totMax], %[tmp32_0], %[totMax] \n\t" - - "absq_s.ph %[tmp32_2], %[tmp32_2] \n\t" - "cmp.lt.ph %[totMax], %[tmp32_1] \n\t" - "pick.ph %[totMax], %[tmp32_1], %[totMax] \n\t" - "absq_s.ph %[tmp32_3], %[tmp32_3] \n\t" - "cmp.lt.ph %[totMax], %[tmp32_2] \n\t" - "pick.ph %[totMax], %[tmp32_2], %[totMax] \n\t" - - "cmp.lt.ph %[totMax], %[tmp32_3] \n\t" - "pick.ph %[totMax], %[tmp32_3], %[totMax] \n\t" - - "addiu %[tmpvec32], %[tmpvec32], 32 \n\t" - : [tmp32_0] "=&r" (tmp32_0), [tmp32_1] "=&r" (tmp32_1), - [tmp32_2] "=&r" (tmp32_2), [tmp32_3] "=&r" (tmp32_3), - [totMax] "+r" (totMax), [tmpvec32] "+r" (tmpvec32) - : - : "memory" - ); - } - __asm__ volatile ( - "rotr %[tmp32_0], %[totMax], 16 \n\t" - "cmp.lt.ph %[totMax], %[tmp32_0] \n\t" - "pick.ph %[totMax], %[tmp32_0], %[totMax] \n\t" - "packrl.ph %[totMax], $0, %[totMax] \n\t" - : [tmp32_0] "=&r" (tmp32_0), [totMax] "+r" (totMax) - : - ); + "packrl.ph %[totMax], $0, %[totMax] \n\t" + : [tmp32_0] "=&r"(tmp32_0), [totMax] "+r"(totMax) + :); loop_size = length & 0xf; for (i = 0; i < loop_size; i++) { - __asm__ volatile ( - "lh %[tmp32_0], 0(%[tmpvec32]) \n\t" - "addiu %[tmpvec32], %[tmpvec32], 2 \n\t" - "absq_s.w %[tmp32_0], %[tmp32_0] \n\t" - "slt %[tmp32_1], %[totMax], %[tmp32_0] \n\t" - "movn %[totMax], %[tmp32_0], %[tmp32_1] \n\t" - : [tmp32_0] "=&r" (tmp32_0), [tmp32_1] "=&r" (tmp32_1), - [tmpvec32] "+r" (tmpvec32), [totMax] "+r" (totMax) - : - : "memory" - ); + __asm__ volatile( + "lh %[tmp32_0], 0(%[tmpvec32]) \n\t" + "addiu %[tmpvec32], %[tmpvec32], 2 \n\t" + "absq_s.w %[tmp32_0], %[tmp32_0] \n\t" + "slt %[tmp32_1], %[totMax], %[tmp32_0] \n\t" + "movn %[totMax], %[tmp32_0], %[tmp32_1] \n\t" + : [tmp32_0] "=&r"(tmp32_0), [tmp32_1] "=&r"(tmp32_1), + [tmpvec32] "+r"(tmpvec32), [totMax] "+r"(totMax) + : + : "memory"); } -#else // #if defined(MIPS_DSP_R1) +#else // #if defined(MIPS_DSP_R1) int32_t v16MaxMax = WEBRTC_SPL_WORD16_MAX; int32_t r, r1, r2, r3; const int16_t* tmpvector = vector; loop_size = length >> 4; for (i = 0; i < loop_size; i++) { - __asm__ volatile ( - "lh %[tmp32_0], 0(%[tmpvector]) \n\t" - "lh %[tmp32_1], 2(%[tmpvector]) \n\t" - "lh %[tmp32_2], 4(%[tmpvector]) \n\t" - "lh %[tmp32_3], 6(%[tmpvector]) \n\t" - - "abs %[tmp32_0], %[tmp32_0] \n\t" - "abs %[tmp32_1], %[tmp32_1] \n\t" - "abs %[tmp32_2], %[tmp32_2] \n\t" - "abs %[tmp32_3], %[tmp32_3] \n\t" - - "slt %[r], %[totMax], %[tmp32_0] \n\t" - "movn %[totMax], %[tmp32_0], %[r] \n\t" - "slt %[r1], %[totMax], %[tmp32_1] \n\t" - "movn %[totMax], %[tmp32_1], %[r1] \n\t" - "slt %[r2], %[totMax], %[tmp32_2] \n\t" - "movn %[totMax], %[tmp32_2], %[r2] \n\t" - "slt %[r3], %[totMax], %[tmp32_3] \n\t" - "movn %[totMax], %[tmp32_3], %[r3] \n\t" - - "lh %[tmp32_0], 8(%[tmpvector]) \n\t" - "lh %[tmp32_1], 10(%[tmpvector]) \n\t" - "lh %[tmp32_2], 12(%[tmpvector]) \n\t" - "lh %[tmp32_3], 14(%[tmpvector]) \n\t" - - "abs %[tmp32_0], %[tmp32_0] \n\t" - "abs %[tmp32_1], %[tmp32_1] \n\t" - "abs %[tmp32_2], %[tmp32_2] \n\t" - "abs %[tmp32_3], %[tmp32_3] \n\t" - - "slt %[r], %[totMax], %[tmp32_0] \n\t" - "movn %[totMax], %[tmp32_0], %[r] \n\t" - "slt %[r1], %[totMax], %[tmp32_1] \n\t" - "movn %[totMax], %[tmp32_1], %[r1] \n\t" - "slt %[r2], %[totMax], %[tmp32_2] \n\t" - "movn %[totMax], %[tmp32_2], %[r2] \n\t" - "slt %[r3], %[totMax], %[tmp32_3] \n\t" - "movn %[totMax], %[tmp32_3], %[r3] \n\t" - - "lh %[tmp32_0], 16(%[tmpvector]) \n\t" - "lh %[tmp32_1], 18(%[tmpvector]) \n\t" - "lh %[tmp32_2], 20(%[tmpvector]) \n\t" - "lh %[tmp32_3], 22(%[tmpvector]) \n\t" - - "abs %[tmp32_0], %[tmp32_0] \n\t" - "abs %[tmp32_1], %[tmp32_1] \n\t" - "abs %[tmp32_2], %[tmp32_2] \n\t" - "abs %[tmp32_3], %[tmp32_3] \n\t" - - "slt %[r], %[totMax], %[tmp32_0] \n\t" - "movn %[totMax], %[tmp32_0], %[r] \n\t" - "slt %[r1], %[totMax], %[tmp32_1] \n\t" - "movn %[totMax], %[tmp32_1], %[r1] \n\t" - "slt %[r2], %[totMax], %[tmp32_2] \n\t" - "movn %[totMax], %[tmp32_2], %[r2] \n\t" - "slt %[r3], %[totMax], %[tmp32_3] \n\t" - "movn %[totMax], %[tmp32_3], %[r3] \n\t" - - "lh %[tmp32_0], 24(%[tmpvector]) \n\t" - "lh %[tmp32_1], 26(%[tmpvector]) \n\t" - "lh %[tmp32_2], 28(%[tmpvector]) \n\t" - "lh %[tmp32_3], 30(%[tmpvector]) \n\t" - - "abs %[tmp32_0], %[tmp32_0] \n\t" - "abs %[tmp32_1], %[tmp32_1] \n\t" - "abs %[tmp32_2], %[tmp32_2] \n\t" - "abs %[tmp32_3], %[tmp32_3] \n\t" - - "slt %[r], %[totMax], %[tmp32_0] \n\t" - "movn %[totMax], %[tmp32_0], %[r] \n\t" - "slt %[r1], %[totMax], %[tmp32_1] \n\t" - "movn %[totMax], %[tmp32_1], %[r1] \n\t" - "slt %[r2], %[totMax], %[tmp32_2] \n\t" - "movn %[totMax], %[tmp32_2], %[r2] \n\t" - "slt %[r3], %[totMax], %[tmp32_3] \n\t" - "movn %[totMax], %[tmp32_3], %[r3] \n\t" - - "addiu %[tmpvector], %[tmpvector], 32 \n\t" - : [tmp32_0] "=&r" (tmp32_0), [tmp32_1] "=&r" (tmp32_1), - [tmp32_2] "=&r" (tmp32_2), [tmp32_3] "=&r" (tmp32_3), - [totMax] "+r" (totMax), [r] "=&r" (r), [tmpvector] "+r" (tmpvector), - [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3) - : - : "memory" - ); + __asm__ volatile( + "lh %[tmp32_0], 0(%[tmpvector]) \n\t" + "lh %[tmp32_1], 2(%[tmpvector]) \n\t" + "lh %[tmp32_2], 4(%[tmpvector]) \n\t" + "lh %[tmp32_3], 6(%[tmpvector]) \n\t" + + "abs %[tmp32_0], %[tmp32_0] \n\t" + "abs %[tmp32_1], %[tmp32_1] \n\t" + "abs %[tmp32_2], %[tmp32_2] \n\t" + "abs %[tmp32_3], %[tmp32_3] \n\t" + + "slt %[r], %[totMax], %[tmp32_0] \n\t" + "movn %[totMax], %[tmp32_0], %[r] \n\t" + "slt %[r1], %[totMax], %[tmp32_1] \n\t" + "movn %[totMax], %[tmp32_1], %[r1] \n\t" + "slt %[r2], %[totMax], %[tmp32_2] \n\t" + "movn %[totMax], %[tmp32_2], %[r2] \n\t" + "slt %[r3], %[totMax], %[tmp32_3] \n\t" + "movn %[totMax], %[tmp32_3], %[r3] \n\t" + + "lh %[tmp32_0], 8(%[tmpvector]) \n\t" + "lh %[tmp32_1], 10(%[tmpvector]) \n\t" + "lh %[tmp32_2], 12(%[tmpvector]) \n\t" + "lh %[tmp32_3], 14(%[tmpvector]) \n\t" + + "abs %[tmp32_0], %[tmp32_0] \n\t" + "abs %[tmp32_1], %[tmp32_1] \n\t" + "abs %[tmp32_2], %[tmp32_2] \n\t" + "abs %[tmp32_3], %[tmp32_3] \n\t" + + "slt %[r], %[totMax], %[tmp32_0] \n\t" + "movn %[totMax], %[tmp32_0], %[r] \n\t" + "slt %[r1], %[totMax], %[tmp32_1] \n\t" + "movn %[totMax], %[tmp32_1], %[r1] \n\t" + "slt %[r2], %[totMax], %[tmp32_2] \n\t" + "movn %[totMax], %[tmp32_2], %[r2] \n\t" + "slt %[r3], %[totMax], %[tmp32_3] \n\t" + "movn %[totMax], %[tmp32_3], %[r3] \n\t" + + "lh %[tmp32_0], 16(%[tmpvector]) \n\t" + "lh %[tmp32_1], 18(%[tmpvector]) \n\t" + "lh %[tmp32_2], 20(%[tmpvector]) \n\t" + "lh %[tmp32_3], 22(%[tmpvector]) \n\t" + + "abs %[tmp32_0], %[tmp32_0] \n\t" + "abs %[tmp32_1], %[tmp32_1] \n\t" + "abs %[tmp32_2], %[tmp32_2] \n\t" + "abs %[tmp32_3], %[tmp32_3] \n\t" + + "slt %[r], %[totMax], %[tmp32_0] \n\t" + "movn %[totMax], %[tmp32_0], %[r] \n\t" + "slt %[r1], %[totMax], %[tmp32_1] \n\t" + "movn %[totMax], %[tmp32_1], %[r1] \n\t" + "slt %[r2], %[totMax], %[tmp32_2] \n\t" + "movn %[totMax], %[tmp32_2], %[r2] \n\t" + "slt %[r3], %[totMax], %[tmp32_3] \n\t" + "movn %[totMax], %[tmp32_3], %[r3] \n\t" + + "lh %[tmp32_0], 24(%[tmpvector]) \n\t" + "lh %[tmp32_1], 26(%[tmpvector]) \n\t" + "lh %[tmp32_2], 28(%[tmpvector]) \n\t" + "lh %[tmp32_3], 30(%[tmpvector]) \n\t" + + "abs %[tmp32_0], %[tmp32_0] \n\t" + "abs %[tmp32_1], %[tmp32_1] \n\t" + "abs %[tmp32_2], %[tmp32_2] \n\t" + "abs %[tmp32_3], %[tmp32_3] \n\t" + + "slt %[r], %[totMax], %[tmp32_0] \n\t" + "movn %[totMax], %[tmp32_0], %[r] \n\t" + "slt %[r1], %[totMax], %[tmp32_1] \n\t" + "movn %[totMax], %[tmp32_1], %[r1] \n\t" + "slt %[r2], %[totMax], %[tmp32_2] \n\t" + "movn %[totMax], %[tmp32_2], %[r2] \n\t" + "slt %[r3], %[totMax], %[tmp32_3] \n\t" + "movn %[totMax], %[tmp32_3], %[r3] \n\t" + + "addiu %[tmpvector], %[tmpvector], 32 \n\t" + : [tmp32_0] "=&r"(tmp32_0), [tmp32_1] "=&r"(tmp32_1), + [tmp32_2] "=&r"(tmp32_2), [tmp32_3] "=&r"(tmp32_3), + [totMax] "+r"(totMax), [r] "=&r"(r), [tmpvector] "+r"(tmpvector), + [r1] "=&r"(r1), [r2] "=&r"(r2), [r3] "=&r"(r3) + : + : "memory"); } loop_size = length & 0xf; for (i = 0; i < loop_size; i++) { - __asm__ volatile ( - "lh %[tmp32_0], 0(%[tmpvector]) \n\t" - "addiu %[tmpvector], %[tmpvector], 2 \n\t" - "abs %[tmp32_0], %[tmp32_0] \n\t" - "slt %[tmp32_1], %[totMax], %[tmp32_0] \n\t" - "movn %[totMax], %[tmp32_0], %[tmp32_1] \n\t" - : [tmp32_0] "=&r" (tmp32_0), [tmp32_1] "=&r" (tmp32_1), - [tmpvector] "+r" (tmpvector), [totMax] "+r" (totMax) - : - : "memory" - ); + __asm__ volatile( + "lh %[tmp32_0], 0(%[tmpvector]) \n\t" + "addiu %[tmpvector], %[tmpvector], 2 \n\t" + "abs %[tmp32_0], %[tmp32_0] \n\t" + "slt %[tmp32_1], %[totMax], %[tmp32_0] \n\t" + "movn %[totMax], %[tmp32_0], %[tmp32_1] \n\t" + : [tmp32_0] "=&r"(tmp32_0), [tmp32_1] "=&r"(tmp32_1), + [tmpvector] "+r"(tmpvector), [totMax] "+r"(totMax) + : + : "memory"); } - __asm__ volatile ( - "slt %[r], %[v16MaxMax], %[totMax] \n\t" - "movn %[totMax], %[v16MaxMax], %[r] \n\t" - : [totMax] "+r" (totMax), [r] "=&r" (r) - : [v16MaxMax] "r" (v16MaxMax) - ); + __asm__ volatile( + "slt %[r], %[v16MaxMax], %[totMax] \n\t" + "movn %[totMax], %[v16MaxMax], %[r] \n\t" + : [totMax] "+r"(totMax), [r] "=&r"(r) + : [v16MaxMax] "r"(v16MaxMax)); #endif // #if defined(MIPS_DSP_R1) return (int16_t)totMax; } @@ -231,27 +225,26 @@ int32_t WebRtcSpl_MaxAbsValueW32_mips(const int32_t* vector, size_t length) { RTC_DCHECK_GT(length, 0); - __asm__ volatile ( - ".set push \n\t" - ".set noreorder \n\t" + __asm__ volatile( + ".set push \n\t" + ".set noreorder \n\t" - "1: \n\t" - "lw %[absolute], 0(%[vector]) \n\t" - "absq_s.w %[absolute], %[absolute] \n\t" - "addiu %[length], %[length], -1 \n\t" - "slt %[tmp1], %[maximum], %[absolute] \n\t" - "movn %[maximum], %[absolute], %[tmp1] \n\t" - "bgtz %[length], 1b \n\t" - " addiu %[vector], %[vector], 4 \n\t" - "slt %[tmp1], %[max_value], %[maximum] \n\t" - "movn %[maximum], %[max_value], %[tmp1] \n\t" + "1: \n\t" + "lw %[absolute], 0(%[vector]) \n\t" + "absq_s.w %[absolute], %[absolute] \n\t" + "addiu %[length], %[length], -1 \n\t" + "slt %[tmp1], %[maximum], %[absolute] \n\t" + "movn %[maximum], %[absolute], %[tmp1] \n\t" + "bgtz %[length], 1b \n\t" + " addiu %[vector], %[vector], 4 \n\t" + "slt %[tmp1], %[max_value], %[maximum] \n\t" + "movn %[maximum], %[max_value], %[tmp1] \n\t" - ".set pop \n\t" + ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [maximum] "+r" (maximum), [absolute] "+r" (absolute) - : [vector] "r" (vector), [length] "r" (length), [max_value] "r" (max_value) - : "memory" - ); + : [tmp1] "=&r"(tmp1), [maximum] "+r"(maximum), [absolute] "+r"(absolute) + : [vector] "r"(vector), [length] "r"(length), [max_value] "r"(max_value) + : "memory"); return (int32_t)maximum; } @@ -265,23 +258,22 @@ int16_t WebRtcSpl_MaxValueW16_mips(const int16_t* vector, size_t length) { RTC_DCHECK_GT(length, 0); - __asm__ volatile ( - ".set push \n\t" - ".set noreorder \n\t" + __asm__ volatile( + ".set push \n\t" + ".set noreorder \n\t" - "1: \n\t" - "lh %[value], 0(%[vector]) \n\t" - "addiu %[length], %[length], -1 \n\t" - "slt %[tmp1], %[maximum], %[value] \n\t" - "movn %[maximum], %[value], %[tmp1] \n\t" - "bgtz %[length], 1b \n\t" - " addiu %[vector], %[vector], 2 \n\t" - ".set pop \n\t" + "1: \n\t" + "lh %[value], 0(%[vector]) \n\t" + "addiu %[length], %[length], -1 \n\t" + "slt %[tmp1], %[maximum], %[value] \n\t" + "movn %[maximum], %[value], %[tmp1] \n\t" + "bgtz %[length], 1b \n\t" + " addiu %[vector], %[vector], 2 \n\t" + ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [maximum] "+r" (maximum), [value] "=&r" (value) - : [vector] "r" (vector), [length] "r" (length) - : "memory" - ); + : [tmp1] "=&r"(tmp1), [maximum] "+r"(maximum), [value] "=&r"(value) + : [vector] "r"(vector), [length] "r"(length) + : "memory"); return maximum; } @@ -293,24 +285,23 @@ int32_t WebRtcSpl_MaxValueW32_mips(const int32_t* vector, size_t length) { RTC_DCHECK_GT(length, 0); - __asm__ volatile ( - ".set push \n\t" - ".set noreorder \n\t" + __asm__ volatile( + ".set push \n\t" + ".set noreorder \n\t" - "1: \n\t" - "lw %[value], 0(%[vector]) \n\t" - "addiu %[length], %[length], -1 \n\t" - "slt %[tmp1], %[maximum], %[value] \n\t" - "movn %[maximum], %[value], %[tmp1] \n\t" - "bgtz %[length], 1b \n\t" - " addiu %[vector], %[vector], 4 \n\t" + "1: \n\t" + "lw %[value], 0(%[vector]) \n\t" + "addiu %[length], %[length], -1 \n\t" + "slt %[tmp1], %[maximum], %[value] \n\t" + "movn %[maximum], %[value], %[tmp1] \n\t" + "bgtz %[length], 1b \n\t" + " addiu %[vector], %[vector], 4 \n\t" - ".set pop \n\t" + ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [maximum] "+r" (maximum), [value] "=&r" (value) - : [vector] "r" (vector), [length] "r" (length) - : "memory" - ); + : [tmp1] "=&r"(tmp1), [maximum] "+r"(maximum), [value] "=&r"(value) + : [vector] "r"(vector), [length] "r"(length) + : "memory"); return maximum; } @@ -323,24 +314,23 @@ int16_t WebRtcSpl_MinValueW16_mips(const int16_t* vector, size_t length) { RTC_DCHECK_GT(length, 0); - __asm__ volatile ( - ".set push \n\t" - ".set noreorder \n\t" + __asm__ volatile( + ".set push \n\t" + ".set noreorder \n\t" - "1: \n\t" - "lh %[value], 0(%[vector]) \n\t" - "addiu %[length], %[length], -1 \n\t" - "slt %[tmp1], %[value], %[minimum] \n\t" - "movn %[minimum], %[value], %[tmp1] \n\t" - "bgtz %[length], 1b \n\t" - " addiu %[vector], %[vector], 2 \n\t" + "1: \n\t" + "lh %[value], 0(%[vector]) \n\t" + "addiu %[length], %[length], -1 \n\t" + "slt %[tmp1], %[value], %[minimum] \n\t" + "movn %[minimum], %[value], %[tmp1] \n\t" + "bgtz %[length], 1b \n\t" + " addiu %[vector], %[vector], 2 \n\t" - ".set pop \n\t" + ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [minimum] "+r" (minimum), [value] "=&r" (value) - : [vector] "r" (vector), [length] "r" (length) - : "memory" - ); + : [tmp1] "=&r"(tmp1), [minimum] "+r"(minimum), [value] "=&r"(value) + : [vector] "r"(vector), [length] "r"(length) + : "memory"); return minimum; } @@ -352,24 +342,23 @@ int32_t WebRtcSpl_MinValueW32_mips(const int32_t* vector, size_t length) { RTC_DCHECK_GT(length, 0); - __asm__ volatile ( - ".set push \n\t" - ".set noreorder \n\t" + __asm__ volatile( + ".set push \n\t" + ".set noreorder \n\t" - "1: \n\t" - "lw %[value], 0(%[vector]) \n\t" - "addiu %[length], %[length], -1 \n\t" - "slt %[tmp1], %[value], %[minimum] \n\t" - "movn %[minimum], %[value], %[tmp1] \n\t" - "bgtz %[length], 1b \n\t" - " addiu %[vector], %[vector], 4 \n\t" + "1: \n\t" + "lw %[value], 0(%[vector]) \n\t" + "addiu %[length], %[length], -1 \n\t" + "slt %[tmp1], %[value], %[minimum] \n\t" + "movn %[minimum], %[value], %[tmp1] \n\t" + "bgtz %[length], 1b \n\t" + " addiu %[vector], %[vector], 4 \n\t" - ".set pop \n\t" + ".set pop \n\t" - : [tmp1] "=&r" (tmp1), [minimum] "+r" (minimum), [value] "=&r" (value) - : [vector] "r" (vector), [length] "r" (length) - : "memory" - ); + : [tmp1] "=&r"(tmp1), [minimum] "+r"(minimum), [value] "=&r"(value) + : [vector] "r"(vector), [length] "r"(length) + : "memory"); return minimum; } diff --git a/common_audio/signal_processing/min_max_operations_neon.c b/common_audio/signal_processing/min_max_operations_neon.c index e5b4b7c71b..7cc241e255 100644 --- a/common_audio/signal_processing/min_max_operations_neon.c +++ b/common_audio/signal_processing/min_max_operations_neon.c @@ -11,8 +11,8 @@ #include #include -#include "rtc_base/checks.h" #include "common_audio/signal_processing/include/signal_processing_library.h" +#include "rtc_base/checks.h" // Maximum absolute value of word16 vector. C version for generic platforms. int16_t WebRtcSpl_MaxAbsValueW16Neon(const int16_t* vector, size_t length) { @@ -282,8 +282,10 @@ int32_t WebRtcSpl_MinValueW32Neon(const int32_t* vector, size_t length) { } // Finds both the minimum and maximum elements in an array of 16-bit integers. -void WebRtcSpl_MinMaxW16Neon(const int16_t* vector, size_t length, - int16_t* min_val, int16_t* max_val) { +void WebRtcSpl_MinMaxW16Neon(const int16_t* vector, + size_t length, + int16_t* min_val, + int16_t* max_val) { int16_t minimum = WEBRTC_SPL_WORD16_MAX; int16_t maximum = WEBRTC_SPL_WORD16_MIN; size_t i = 0; diff --git a/common_audio/signal_processing/randomization_functions.c b/common_audio/signal_processing/randomization_functions.c index a445c572c7..adedad07a2 100644 --- a/common_audio/signal_processing/randomization_functions.c +++ b/common_audio/signal_processing/randomization_functions.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains implementations of the randomization functions * WebRtcSpl_RandU() @@ -24,71 +23,63 @@ static const uint32_t kMaxSeedUsed = 0x80000000; static const int16_t kRandNTable[] = { - 9178, -7260, 40, 10189, 4894, -3531, -13779, 14764, - -4008, -8884, -8990, 1008, 7368, 5184, 3251, -5817, - -9786, 5963, 1770, 8066, -7135, 10772, -2298, 1361, - 6484, 2241, -8633, 792, 199, -3344, 6553, -10079, - -15040, 95, 11608, -12469, 14161, -4176, 2476, 6403, - 13685, -16005, 6646, 2239, 10916, -3004, -602, -3141, - 2142, 14144, -5829, 5305, 8209, 4713, 2697, -5112, - 16092, -1210, -2891, -6631, -5360, -11878, -6781, -2739, - -6392, 536, 10923, 10872, 5059, -4748, -7770, 5477, - 38, -1025, -2892, 1638, 6304, 14375, -11028, 1553, - -1565, 10762, -393, 4040, 5257, 12310, 6554, -4799, - 4899, -6354, 1603, -1048, -2220, 8247, -186, -8944, - -12004, 2332, 4801, -4933, 6371, 131, 8614, -5927, - -8287, -22760, 4033, -15162, 3385, 3246, 3153, -5250, - 3766, 784, 6494, -62, 3531, -1582, 15572, 662, - -3952, -330, -3196, 669, 7236, -2678, -6569, 23319, - -8645, -741, 14830, -15976, 4903, 315, -11342, 10311, - 1858, -7777, 2145, 5436, 5677, -113, -10033, 826, - -1353, 17210, 7768, 986, -1471, 8291, -4982, 8207, - -14911, -6255, -2449, -11881, -7059, -11703, -4338, 8025, - 7538, -2823, -12490, 9470, -1613, -2529, -10092, -7807, - 9480, 6970, -12844, 5123, 3532, 4816, 4803, -8455, - -5045, 14032, -4378, -1643, 5756, -11041, -2732, -16618, - -6430, -18375, -3320, 6098, 5131, -4269, -8840, 2482, - -7048, 1547, -21890, -6505, -7414, -424, -11722, 7955, - 1653, -17299, 1823, 473, -9232, 3337, 1111, 873, - 4018, -8982, 9889, 3531, -11763, -3799, 7373, -4539, - 3231, 7054, -8537, 7616, 6244, 16635, 447, -2915, - 13967, 705, -2669, -1520, -1771, -16188, 5956, 5117, - 6371, -9936, -1448, 2480, 5128, 7550, -8130, 5236, - 8213, -6443, 7707, -1950, -13811, 7218, 7031, -3883, - 67, 5731, -2874, 13480, -3743, 9298, -3280, 3552, - -4425, -18, -3785, -9988, -5357, 5477, -11794, 2117, - 1416, -9935, 3376, 802, -5079, -8243, 12652, 66, - 3653, -2368, 6781, -21895, -7227, 2487, 7839, -385, - 6646, -7016, -4658, 5531, -1705, 834, 129, 3694, - -1343, 2238, -22640, -6417, -11139, 11301, -2945, -3494, - -5626, 185, -3615, -2041, -7972, -3106, -60, -23497, - -1566, 17064, 3519, 2518, 304, -6805, -10269, 2105, - 1936, -426, -736, -8122, -1467, 4238, -6939, -13309, - 360, 7402, -7970, 12576, 3287, 12194, -6289, -16006, - 9171, 4042, -9193, 9123, -2512, 6388, -4734, -8739, - 1028, -5406, -1696, 5889, -666, -4736, 4971, 3565, - 9362, -6292, 3876, -3652, -19666, 7523, -4061, 391, - -11773, 7502, -3763, 4929, -9478, 13278, 2805, 4496, - 7814, 16419, 12455, -14773, 2127, -2746, 3763, 4847, - 3698, 6978, 4751, -6957, -3581, -45, 6252, 1513, - -4797, -7925, 11270, 16188, -2359, -5269, 9376, -10777, - 7262, 20031, -6515, -2208, -5353, 8085, -1341, -1303, - 7333, 5576, 3625, 5763, -7931, 9833, -3371, -10305, - 6534, -13539, -9971, 997, 8464, -4064, -1495, 1857, - 13624, 5458, 9490, -11086, -4524, 12022, -550, -198, - 408, -8455, -7068, 10289, 9712, -3366, 9028, -7621, - -5243, 2362, 6909, 4672, -4933, -1799, 4709, -4563, - -62, -566, 1624, -7010, 14730, -17791, -3697, -2344, - -1741, 7099, -9509, -6855, -1989, 3495, -2289, 2031, - 12784, 891, 14189, -3963, -5683, 421, -12575, 1724, - -12682, -5970, -8169, 3143, -1824, -5488, -5130, 8536, - 12799, 794, 5738, 3459, -11689, -258, -3738, -3775, - -8742, 2333, 8312, -9383, 10331, 13119, 8398, 10644, - -19433, -6446, -16277, -11793, 16284, 9345, 15222, 15834, - 2009, -7349, 130, -14547, 338, -5998, 3337, 21492, - 2406, 7703, -951, 11196, -564, 3406, 2217, 4806, - 2374, -5797, 11839, 8940, -11874, 18213, 2855, 10492 -}; + 9178, -7260, 40, 10189, 4894, -3531, -13779, 14764, -4008, + -8884, -8990, 1008, 7368, 5184, 3251, -5817, -9786, 5963, + 1770, 8066, -7135, 10772, -2298, 1361, 6484, 2241, -8633, + 792, 199, -3344, 6553, -10079, -15040, 95, 11608, -12469, + 14161, -4176, 2476, 6403, 13685, -16005, 6646, 2239, 10916, + -3004, -602, -3141, 2142, 14144, -5829, 5305, 8209, 4713, + 2697, -5112, 16092, -1210, -2891, -6631, -5360, -11878, -6781, + -2739, -6392, 536, 10923, 10872, 5059, -4748, -7770, 5477, + 38, -1025, -2892, 1638, 6304, 14375, -11028, 1553, -1565, + 10762, -393, 4040, 5257, 12310, 6554, -4799, 4899, -6354, + 1603, -1048, -2220, 8247, -186, -8944, -12004, 2332, 4801, + -4933, 6371, 131, 8614, -5927, -8287, -22760, 4033, -15162, + 3385, 3246, 3153, -5250, 3766, 784, 6494, -62, 3531, + -1582, 15572, 662, -3952, -330, -3196, 669, 7236, -2678, + -6569, 23319, -8645, -741, 14830, -15976, 4903, 315, -11342, + 10311, 1858, -7777, 2145, 5436, 5677, -113, -10033, 826, + -1353, 17210, 7768, 986, -1471, 8291, -4982, 8207, -14911, + -6255, -2449, -11881, -7059, -11703, -4338, 8025, 7538, -2823, + -12490, 9470, -1613, -2529, -10092, -7807, 9480, 6970, -12844, + 5123, 3532, 4816, 4803, -8455, -5045, 14032, -4378, -1643, + 5756, -11041, -2732, -16618, -6430, -18375, -3320, 6098, 5131, + -4269, -8840, 2482, -7048, 1547, -21890, -6505, -7414, -424, + -11722, 7955, 1653, -17299, 1823, 473, -9232, 3337, 1111, + 873, 4018, -8982, 9889, 3531, -11763, -3799, 7373, -4539, + 3231, 7054, -8537, 7616, 6244, 16635, 447, -2915, 13967, + 705, -2669, -1520, -1771, -16188, 5956, 5117, 6371, -9936, + -1448, 2480, 5128, 7550, -8130, 5236, 8213, -6443, 7707, + -1950, -13811, 7218, 7031, -3883, 67, 5731, -2874, 13480, + -3743, 9298, -3280, 3552, -4425, -18, -3785, -9988, -5357, + 5477, -11794, 2117, 1416, -9935, 3376, 802, -5079, -8243, + 12652, 66, 3653, -2368, 6781, -21895, -7227, 2487, 7839, + -385, 6646, -7016, -4658, 5531, -1705, 834, 129, 3694, + -1343, 2238, -22640, -6417, -11139, 11301, -2945, -3494, -5626, + 185, -3615, -2041, -7972, -3106, -60, -23497, -1566, 17064, + 3519, 2518, 304, -6805, -10269, 2105, 1936, -426, -736, + -8122, -1467, 4238, -6939, -13309, 360, 7402, -7970, 12576, + 3287, 12194, -6289, -16006, 9171, 4042, -9193, 9123, -2512, + 6388, -4734, -8739, 1028, -5406, -1696, 5889, -666, -4736, + 4971, 3565, 9362, -6292, 3876, -3652, -19666, 7523, -4061, + 391, -11773, 7502, -3763, 4929, -9478, 13278, 2805, 4496, + 7814, 16419, 12455, -14773, 2127, -2746, 3763, 4847, 3698, + 6978, 4751, -6957, -3581, -45, 6252, 1513, -4797, -7925, + 11270, 16188, -2359, -5269, 9376, -10777, 7262, 20031, -6515, + -2208, -5353, 8085, -1341, -1303, 7333, 5576, 3625, 5763, + -7931, 9833, -3371, -10305, 6534, -13539, -9971, 997, 8464, + -4064, -1495, 1857, 13624, 5458, 9490, -11086, -4524, 12022, + -550, -198, 408, -8455, -7068, 10289, 9712, -3366, 9028, + -7621, -5243, 2362, 6909, 4672, -4933, -1799, 4709, -4563, + -62, -566, 1624, -7010, 14730, -17791, -3697, -2344, -1741, + 7099, -9509, -6855, -1989, 3495, -2289, 2031, 12784, 891, + 14189, -3963, -5683, 421, -12575, 1724, -12682, -5970, -8169, + 3143, -1824, -5488, -5130, 8536, 12799, 794, 5738, 3459, + -11689, -258, -3738, -3775, -8742, 2333, 8312, -9383, 10331, + 13119, 8398, 10644, -19433, -6446, -16277, -11793, 16284, 9345, + 15222, 15834, 2009, -7349, 130, -14547, 338, -5998, 3337, + 21492, 2406, 7703, -951, 11196, -564, 3406, 2217, 4806, + 2374, -5797, 11839, 8940, -11874, 18213, 2855, 10492}; static uint32_t IncreaseSeed(uint32_t* seed) { seed[0] = (seed[0] * ((int32_t)69069) + 1) & (kMaxSeedUsed - 1); diff --git a/common_audio/signal_processing/refl_coef_to_lpc.c b/common_audio/signal_processing/refl_coef_to_lpc.c index b0858b2b0e..93e878726b 100644 --- a/common_audio/signal_processing/refl_coef_to_lpc.c +++ b/common_audio/signal_processing/refl_coef_to_lpc.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the function WebRtcSpl_ReflCoefToLpc(). * The description header can be found in signal_processing_library.h @@ -17,43 +16,39 @@ #include "common_audio/signal_processing/include/signal_processing_library.h" -void WebRtcSpl_ReflCoefToLpc(const int16_t *k, int use_order, int16_t *a) -{ - int16_t any[WEBRTC_SPL_MAX_LPC_ORDER + 1]; - int16_t *aptr, *aptr2, *anyptr; - const int16_t *kptr; - int m, i; - - kptr = k; - *a = 4096; // i.e., (Word16_MAX >> 3)+1. - *any = *a; - a[1] = *k >> 3; - - for (m = 1; m < use_order; m++) - { - kptr++; - aptr = a; - aptr++; - aptr2 = &a[m]; - anyptr = any; - anyptr++; - - any[m + 1] = *kptr >> 3; - for (i = 0; i < m; i++) - { - *anyptr = *aptr + (int16_t)((*aptr2 * *kptr) >> 15); - anyptr++; - aptr++; - aptr2--; - } +void WebRtcSpl_ReflCoefToLpc(const int16_t* k, int use_order, int16_t* a) { + int16_t any[WEBRTC_SPL_MAX_LPC_ORDER + 1]; + int16_t *aptr, *aptr2, *anyptr; + const int16_t* kptr; + int m, i; + + kptr = k; + *a = 4096; // i.e., (Word16_MAX >> 3)+1. + *any = *a; + a[1] = *k >> 3; + + for (m = 1; m < use_order; m++) { + kptr++; + aptr = a; + aptr++; + aptr2 = &a[m]; + anyptr = any; + anyptr++; + + any[m + 1] = *kptr >> 3; + for (i = 0; i < m; i++) { + *anyptr = *aptr + (int16_t)((*aptr2 * *kptr) >> 15); + anyptr++; + aptr++; + aptr2--; + } - aptr = a; - anyptr = any; - for (i = 0; i < (m + 2); i++) - { - *aptr = *anyptr; - aptr++; - anyptr++; - } + aptr = a; + anyptr = any; + for (i = 0; i < (m + 2); i++) { + *aptr = *anyptr; + aptr++; + anyptr++; } + } } diff --git a/common_audio/signal_processing/resample.c b/common_audio/signal_processing/resample.c index d4b2736476..4a534aefe1 100644 --- a/common_audio/signal_processing/resample.c +++ b/common_audio/signal_processing/resample.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the resampling functions for 22 kHz. * The description header can be found in signal_processing_library.h @@ -19,89 +18,88 @@ #include "common_audio/signal_processing/resample_by_2_internal.h" // Declaration of internally used functions -static void WebRtcSpl_32khzTo22khzIntToShort(const int32_t *In, int16_t *Out, +static void WebRtcSpl_32khzTo22khzIntToShort(const int32_t* In, + int16_t* Out, int32_t K); -void WebRtcSpl_32khzTo22khzIntToInt(const int32_t *In, int32_t *Out, - int32_t K); +void WebRtcSpl_32khzTo22khzIntToInt(const int32_t* In, int32_t* Out, int32_t K); // interpolation coefficients static const int16_t kCoefficients32To22[5][9] = { - {127, -712, 2359, -6333, 23456, 16775, -3695, 945, -154}, - {-39, 230, -830, 2785, 32366, -2324, 760, -218, 38}, - {117, -663, 2222, -6133, 26634, 13070, -3174, 831, -137}, - {-77, 457, -1677, 5958, 31175, -4136, 1405, -408, 71}, - { 98, -560, 1900, -5406, 29240, 9423, -2480, 663, -110} -}; + {127, -712, 2359, -6333, 23456, 16775, -3695, 945, -154}, + {-39, 230, -830, 2785, 32366, -2324, 760, -218, 38}, + {117, -663, 2222, -6133, 26634, 13070, -3174, 831, -137}, + {-77, 457, -1677, 5958, 31175, -4136, 1405, -408, 71}, + {98, -560, 1900, -5406, 29240, 9423, -2480, 663, -110}}; ////////////////////// // 22 kHz -> 16 kHz // ////////////////////// // number of subblocks; options: 1, 2, 4, 5, 10 -#define SUB_BLOCKS_22_16 5 +#define SUB_BLOCKS_22_16 5 // 22 -> 16 resampler -void WebRtcSpl_Resample22khzTo16khz(const int16_t* in, int16_t* out, - WebRtcSpl_State22khzTo16khz* state, int32_t* tmpmem) -{ - int k; - - // process two blocks of 10/SUB_BLOCKS_22_16 ms (to reduce temp buffer size) - for (k = 0; k < SUB_BLOCKS_22_16; k++) - { - ///// 22 --> 44 ///// - // int16_t in[220/SUB_BLOCKS_22_16] - // int32_t out[440/SUB_BLOCKS_22_16] - ///// - WebRtcSpl_UpBy2ShortToInt(in, 220 / SUB_BLOCKS_22_16, tmpmem + 16, state->S_22_44); - - ///// 44 --> 32 ///// - // int32_t in[440/SUB_BLOCKS_22_16] - // int32_t out[320/SUB_BLOCKS_22_16] - ///// - // copy state to and from input array - tmpmem[8] = state->S_44_32[0]; - tmpmem[9] = state->S_44_32[1]; - tmpmem[10] = state->S_44_32[2]; - tmpmem[11] = state->S_44_32[3]; - tmpmem[12] = state->S_44_32[4]; - tmpmem[13] = state->S_44_32[5]; - tmpmem[14] = state->S_44_32[6]; - tmpmem[15] = state->S_44_32[7]; - state->S_44_32[0] = tmpmem[440 / SUB_BLOCKS_22_16 + 8]; - state->S_44_32[1] = tmpmem[440 / SUB_BLOCKS_22_16 + 9]; - state->S_44_32[2] = tmpmem[440 / SUB_BLOCKS_22_16 + 10]; - state->S_44_32[3] = tmpmem[440 / SUB_BLOCKS_22_16 + 11]; - state->S_44_32[4] = tmpmem[440 / SUB_BLOCKS_22_16 + 12]; - state->S_44_32[5] = tmpmem[440 / SUB_BLOCKS_22_16 + 13]; - state->S_44_32[6] = tmpmem[440 / SUB_BLOCKS_22_16 + 14]; - state->S_44_32[7] = tmpmem[440 / SUB_BLOCKS_22_16 + 15]; - - WebRtcSpl_Resample44khzTo32khz(tmpmem + 8, tmpmem, 40 / SUB_BLOCKS_22_16); - - ///// 32 --> 16 ///// - // int32_t in[320/SUB_BLOCKS_22_16] - // int32_t out[160/SUB_BLOCKS_22_16] - ///// - WebRtcSpl_DownBy2IntToShort(tmpmem, 320 / SUB_BLOCKS_22_16, out, state->S_32_16); - - // move input/output pointers 10/SUB_BLOCKS_22_16 ms seconds ahead - in += 220 / SUB_BLOCKS_22_16; - out += 160 / SUB_BLOCKS_22_16; - } +void WebRtcSpl_Resample22khzTo16khz(const int16_t* in, + int16_t* out, + WebRtcSpl_State22khzTo16khz* state, + int32_t* tmpmem) { + int k; + + // process two blocks of 10/SUB_BLOCKS_22_16 ms (to reduce temp buffer size) + for (k = 0; k < SUB_BLOCKS_22_16; k++) { + ///// 22 --> 44 ///// + // int16_t in[220/SUB_BLOCKS_22_16] + // int32_t out[440/SUB_BLOCKS_22_16] + ///// + WebRtcSpl_UpBy2ShortToInt(in, 220 / SUB_BLOCKS_22_16, tmpmem + 16, + state->S_22_44); + + ///// 44 --> 32 ///// + // int32_t in[440/SUB_BLOCKS_22_16] + // int32_t out[320/SUB_BLOCKS_22_16] + ///// + // copy state to and from input array + tmpmem[8] = state->S_44_32[0]; + tmpmem[9] = state->S_44_32[1]; + tmpmem[10] = state->S_44_32[2]; + tmpmem[11] = state->S_44_32[3]; + tmpmem[12] = state->S_44_32[4]; + tmpmem[13] = state->S_44_32[5]; + tmpmem[14] = state->S_44_32[6]; + tmpmem[15] = state->S_44_32[7]; + state->S_44_32[0] = tmpmem[440 / SUB_BLOCKS_22_16 + 8]; + state->S_44_32[1] = tmpmem[440 / SUB_BLOCKS_22_16 + 9]; + state->S_44_32[2] = tmpmem[440 / SUB_BLOCKS_22_16 + 10]; + state->S_44_32[3] = tmpmem[440 / SUB_BLOCKS_22_16 + 11]; + state->S_44_32[4] = tmpmem[440 / SUB_BLOCKS_22_16 + 12]; + state->S_44_32[5] = tmpmem[440 / SUB_BLOCKS_22_16 + 13]; + state->S_44_32[6] = tmpmem[440 / SUB_BLOCKS_22_16 + 14]; + state->S_44_32[7] = tmpmem[440 / SUB_BLOCKS_22_16 + 15]; + + WebRtcSpl_Resample44khzTo32khz(tmpmem + 8, tmpmem, 40 / SUB_BLOCKS_22_16); + + ///// 32 --> 16 ///// + // int32_t in[320/SUB_BLOCKS_22_16] + // int32_t out[160/SUB_BLOCKS_22_16] + ///// + WebRtcSpl_DownBy2IntToShort(tmpmem, 320 / SUB_BLOCKS_22_16, out, + state->S_32_16); + + // move input/output pointers 10/SUB_BLOCKS_22_16 ms seconds ahead + in += 220 / SUB_BLOCKS_22_16; + out += 160 / SUB_BLOCKS_22_16; + } } // initialize state of 22 -> 16 resampler -void WebRtcSpl_ResetResample22khzTo16khz(WebRtcSpl_State22khzTo16khz* state) -{ - int k; - for (k = 0; k < 8; k++) - { - state->S_22_44[k] = 0; - state->S_44_32[k] = 0; - state->S_32_16[k] = 0; - } +void WebRtcSpl_ResetResample22khzTo16khz(WebRtcSpl_State22khzTo16khz* state) { + int k; + for (k = 0; k < 8; k++) { + state->S_22_44[k] = 0; + state->S_44_32[k] = 0; + state->S_32_16[k] = 0; + } } ////////////////////// @@ -109,62 +107,61 @@ void WebRtcSpl_ResetResample22khzTo16khz(WebRtcSpl_State22khzTo16khz* state) ////////////////////// // number of subblocks; options: 1, 2, 4, 5, 10 -#define SUB_BLOCKS_16_22 4 +#define SUB_BLOCKS_16_22 4 // 16 -> 22 resampler -void WebRtcSpl_Resample16khzTo22khz(const int16_t* in, int16_t* out, - WebRtcSpl_State16khzTo22khz* state, int32_t* tmpmem) -{ - int k; - - // process two blocks of 10/SUB_BLOCKS_16_22 ms (to reduce temp buffer size) - for (k = 0; k < SUB_BLOCKS_16_22; k++) - { - ///// 16 --> 32 ///// - // int16_t in[160/SUB_BLOCKS_16_22] - // int32_t out[320/SUB_BLOCKS_16_22] - ///// - WebRtcSpl_UpBy2ShortToInt(in, 160 / SUB_BLOCKS_16_22, tmpmem + 8, state->S_16_32); - - ///// 32 --> 22 ///// - // int32_t in[320/SUB_BLOCKS_16_22] - // int32_t out[220/SUB_BLOCKS_16_22] - ///// - // copy state to and from input array - tmpmem[0] = state->S_32_22[0]; - tmpmem[1] = state->S_32_22[1]; - tmpmem[2] = state->S_32_22[2]; - tmpmem[3] = state->S_32_22[3]; - tmpmem[4] = state->S_32_22[4]; - tmpmem[5] = state->S_32_22[5]; - tmpmem[6] = state->S_32_22[6]; - tmpmem[7] = state->S_32_22[7]; - state->S_32_22[0] = tmpmem[320 / SUB_BLOCKS_16_22]; - state->S_32_22[1] = tmpmem[320 / SUB_BLOCKS_16_22 + 1]; - state->S_32_22[2] = tmpmem[320 / SUB_BLOCKS_16_22 + 2]; - state->S_32_22[3] = tmpmem[320 / SUB_BLOCKS_16_22 + 3]; - state->S_32_22[4] = tmpmem[320 / SUB_BLOCKS_16_22 + 4]; - state->S_32_22[5] = tmpmem[320 / SUB_BLOCKS_16_22 + 5]; - state->S_32_22[6] = tmpmem[320 / SUB_BLOCKS_16_22 + 6]; - state->S_32_22[7] = tmpmem[320 / SUB_BLOCKS_16_22 + 7]; - - WebRtcSpl_32khzTo22khzIntToShort(tmpmem, out, 20 / SUB_BLOCKS_16_22); - - // move input/output pointers 10/SUB_BLOCKS_16_22 ms seconds ahead - in += 160 / SUB_BLOCKS_16_22; - out += 220 / SUB_BLOCKS_16_22; - } +void WebRtcSpl_Resample16khzTo22khz(const int16_t* in, + int16_t* out, + WebRtcSpl_State16khzTo22khz* state, + int32_t* tmpmem) { + int k; + + // process two blocks of 10/SUB_BLOCKS_16_22 ms (to reduce temp buffer size) + for (k = 0; k < SUB_BLOCKS_16_22; k++) { + ///// 16 --> 32 ///// + // int16_t in[160/SUB_BLOCKS_16_22] + // int32_t out[320/SUB_BLOCKS_16_22] + ///// + WebRtcSpl_UpBy2ShortToInt(in, 160 / SUB_BLOCKS_16_22, tmpmem + 8, + state->S_16_32); + + ///// 32 --> 22 ///// + // int32_t in[320/SUB_BLOCKS_16_22] + // int32_t out[220/SUB_BLOCKS_16_22] + ///// + // copy state to and from input array + tmpmem[0] = state->S_32_22[0]; + tmpmem[1] = state->S_32_22[1]; + tmpmem[2] = state->S_32_22[2]; + tmpmem[3] = state->S_32_22[3]; + tmpmem[4] = state->S_32_22[4]; + tmpmem[5] = state->S_32_22[5]; + tmpmem[6] = state->S_32_22[6]; + tmpmem[7] = state->S_32_22[7]; + state->S_32_22[0] = tmpmem[320 / SUB_BLOCKS_16_22]; + state->S_32_22[1] = tmpmem[320 / SUB_BLOCKS_16_22 + 1]; + state->S_32_22[2] = tmpmem[320 / SUB_BLOCKS_16_22 + 2]; + state->S_32_22[3] = tmpmem[320 / SUB_BLOCKS_16_22 + 3]; + state->S_32_22[4] = tmpmem[320 / SUB_BLOCKS_16_22 + 4]; + state->S_32_22[5] = tmpmem[320 / SUB_BLOCKS_16_22 + 5]; + state->S_32_22[6] = tmpmem[320 / SUB_BLOCKS_16_22 + 6]; + state->S_32_22[7] = tmpmem[320 / SUB_BLOCKS_16_22 + 7]; + + WebRtcSpl_32khzTo22khzIntToShort(tmpmem, out, 20 / SUB_BLOCKS_16_22); + + // move input/output pointers 10/SUB_BLOCKS_16_22 ms seconds ahead + in += 160 / SUB_BLOCKS_16_22; + out += 220 / SUB_BLOCKS_16_22; + } } // initialize state of 16 -> 22 resampler -void WebRtcSpl_ResetResample16khzTo22khz(WebRtcSpl_State16khzTo22khz* state) -{ - int k; - for (k = 0; k < 8; k++) - { - state->S_16_32[k] = 0; - state->S_32_22[k] = 0; - } +void WebRtcSpl_ResetResample16khzTo22khz(WebRtcSpl_State16khzTo22khz* state) { + int k; + for (k = 0; k < 8; k++) { + state->S_16_32[k] = 0; + state->S_32_22[k] = 0; + } } ////////////////////// @@ -172,70 +169,70 @@ void WebRtcSpl_ResetResample16khzTo22khz(WebRtcSpl_State16khzTo22khz* state) ////////////////////// // number of subblocks; options: 1, 2, 5, 10 -#define SUB_BLOCKS_22_8 2 +#define SUB_BLOCKS_22_8 2 // 22 -> 8 resampler -void WebRtcSpl_Resample22khzTo8khz(const int16_t* in, int16_t* out, - WebRtcSpl_State22khzTo8khz* state, int32_t* tmpmem) -{ - int k; - - // process two blocks of 10/SUB_BLOCKS_22_8 ms (to reduce temp buffer size) - for (k = 0; k < SUB_BLOCKS_22_8; k++) - { - ///// 22 --> 22 lowpass ///// - // int16_t in[220/SUB_BLOCKS_22_8] - // int32_t out[220/SUB_BLOCKS_22_8] - ///// - WebRtcSpl_LPBy2ShortToInt(in, 220 / SUB_BLOCKS_22_8, tmpmem + 16, state->S_22_22); - - ///// 22 --> 16 ///// - // int32_t in[220/SUB_BLOCKS_22_8] - // int32_t out[160/SUB_BLOCKS_22_8] - ///// - // copy state to and from input array - tmpmem[8] = state->S_22_16[0]; - tmpmem[9] = state->S_22_16[1]; - tmpmem[10] = state->S_22_16[2]; - tmpmem[11] = state->S_22_16[3]; - tmpmem[12] = state->S_22_16[4]; - tmpmem[13] = state->S_22_16[5]; - tmpmem[14] = state->S_22_16[6]; - tmpmem[15] = state->S_22_16[7]; - state->S_22_16[0] = tmpmem[220 / SUB_BLOCKS_22_8 + 8]; - state->S_22_16[1] = tmpmem[220 / SUB_BLOCKS_22_8 + 9]; - state->S_22_16[2] = tmpmem[220 / SUB_BLOCKS_22_8 + 10]; - state->S_22_16[3] = tmpmem[220 / SUB_BLOCKS_22_8 + 11]; - state->S_22_16[4] = tmpmem[220 / SUB_BLOCKS_22_8 + 12]; - state->S_22_16[5] = tmpmem[220 / SUB_BLOCKS_22_8 + 13]; - state->S_22_16[6] = tmpmem[220 / SUB_BLOCKS_22_8 + 14]; - state->S_22_16[7] = tmpmem[220 / SUB_BLOCKS_22_8 + 15]; - - WebRtcSpl_Resample44khzTo32khz(tmpmem + 8, tmpmem, 20 / SUB_BLOCKS_22_8); - - ///// 16 --> 8 ///// - // int32_t in[160/SUB_BLOCKS_22_8] - // int32_t out[80/SUB_BLOCKS_22_8] - ///// - WebRtcSpl_DownBy2IntToShort(tmpmem, 160 / SUB_BLOCKS_22_8, out, state->S_16_8); - - // move input/output pointers 10/SUB_BLOCKS_22_8 ms seconds ahead - in += 220 / SUB_BLOCKS_22_8; - out += 80 / SUB_BLOCKS_22_8; - } +void WebRtcSpl_Resample22khzTo8khz(const int16_t* in, + int16_t* out, + WebRtcSpl_State22khzTo8khz* state, + int32_t* tmpmem) { + int k; + + // process two blocks of 10/SUB_BLOCKS_22_8 ms (to reduce temp buffer size) + for (k = 0; k < SUB_BLOCKS_22_8; k++) { + ///// 22 --> 22 lowpass ///// + // int16_t in[220/SUB_BLOCKS_22_8] + // int32_t out[220/SUB_BLOCKS_22_8] + ///// + WebRtcSpl_LPBy2ShortToInt(in, 220 / SUB_BLOCKS_22_8, tmpmem + 16, + state->S_22_22); + + ///// 22 --> 16 ///// + // int32_t in[220/SUB_BLOCKS_22_8] + // int32_t out[160/SUB_BLOCKS_22_8] + ///// + // copy state to and from input array + tmpmem[8] = state->S_22_16[0]; + tmpmem[9] = state->S_22_16[1]; + tmpmem[10] = state->S_22_16[2]; + tmpmem[11] = state->S_22_16[3]; + tmpmem[12] = state->S_22_16[4]; + tmpmem[13] = state->S_22_16[5]; + tmpmem[14] = state->S_22_16[6]; + tmpmem[15] = state->S_22_16[7]; + state->S_22_16[0] = tmpmem[220 / SUB_BLOCKS_22_8 + 8]; + state->S_22_16[1] = tmpmem[220 / SUB_BLOCKS_22_8 + 9]; + state->S_22_16[2] = tmpmem[220 / SUB_BLOCKS_22_8 + 10]; + state->S_22_16[3] = tmpmem[220 / SUB_BLOCKS_22_8 + 11]; + state->S_22_16[4] = tmpmem[220 / SUB_BLOCKS_22_8 + 12]; + state->S_22_16[5] = tmpmem[220 / SUB_BLOCKS_22_8 + 13]; + state->S_22_16[6] = tmpmem[220 / SUB_BLOCKS_22_8 + 14]; + state->S_22_16[7] = tmpmem[220 / SUB_BLOCKS_22_8 + 15]; + + WebRtcSpl_Resample44khzTo32khz(tmpmem + 8, tmpmem, 20 / SUB_BLOCKS_22_8); + + ///// 16 --> 8 ///// + // int32_t in[160/SUB_BLOCKS_22_8] + // int32_t out[80/SUB_BLOCKS_22_8] + ///// + WebRtcSpl_DownBy2IntToShort(tmpmem, 160 / SUB_BLOCKS_22_8, out, + state->S_16_8); + + // move input/output pointers 10/SUB_BLOCKS_22_8 ms seconds ahead + in += 220 / SUB_BLOCKS_22_8; + out += 80 / SUB_BLOCKS_22_8; + } } // initialize state of 22 -> 8 resampler -void WebRtcSpl_ResetResample22khzTo8khz(WebRtcSpl_State22khzTo8khz* state) -{ - int k; - for (k = 0; k < 8; k++) - { - state->S_22_22[k] = 0; - state->S_22_22[k + 8] = 0; - state->S_22_16[k] = 0; - state->S_16_8[k] = 0; - } +void WebRtcSpl_ResetResample22khzTo8khz(WebRtcSpl_State22khzTo8khz* state) { + int k; + for (k = 0; k < 8; k++) { + state->S_22_22[k] = 0; + state->S_22_22[k + 8] = 0; + state->S_22_16[k] = 0; + state->S_16_8[k] = 0; + } } ////////////////////// @@ -243,217 +240,223 @@ void WebRtcSpl_ResetResample22khzTo8khz(WebRtcSpl_State22khzTo8khz* state) ////////////////////// // number of subblocks; options: 1, 2, 5, 10 -#define SUB_BLOCKS_8_22 2 +#define SUB_BLOCKS_8_22 2 // 8 -> 22 resampler -void WebRtcSpl_Resample8khzTo22khz(const int16_t* in, int16_t* out, - WebRtcSpl_State8khzTo22khz* state, int32_t* tmpmem) -{ - int k; - - // process two blocks of 10/SUB_BLOCKS_8_22 ms (to reduce temp buffer size) - for (k = 0; k < SUB_BLOCKS_8_22; k++) - { - ///// 8 --> 16 ///// - // int16_t in[80/SUB_BLOCKS_8_22] - // int32_t out[160/SUB_BLOCKS_8_22] - ///// - WebRtcSpl_UpBy2ShortToInt(in, 80 / SUB_BLOCKS_8_22, tmpmem + 18, state->S_8_16); - - ///// 16 --> 11 ///// - // int32_t in[160/SUB_BLOCKS_8_22] - // int32_t out[110/SUB_BLOCKS_8_22] - ///// - // copy state to and from input array - tmpmem[10] = state->S_16_11[0]; - tmpmem[11] = state->S_16_11[1]; - tmpmem[12] = state->S_16_11[2]; - tmpmem[13] = state->S_16_11[3]; - tmpmem[14] = state->S_16_11[4]; - tmpmem[15] = state->S_16_11[5]; - tmpmem[16] = state->S_16_11[6]; - tmpmem[17] = state->S_16_11[7]; - state->S_16_11[0] = tmpmem[160 / SUB_BLOCKS_8_22 + 10]; - state->S_16_11[1] = tmpmem[160 / SUB_BLOCKS_8_22 + 11]; - state->S_16_11[2] = tmpmem[160 / SUB_BLOCKS_8_22 + 12]; - state->S_16_11[3] = tmpmem[160 / SUB_BLOCKS_8_22 + 13]; - state->S_16_11[4] = tmpmem[160 / SUB_BLOCKS_8_22 + 14]; - state->S_16_11[5] = tmpmem[160 / SUB_BLOCKS_8_22 + 15]; - state->S_16_11[6] = tmpmem[160 / SUB_BLOCKS_8_22 + 16]; - state->S_16_11[7] = tmpmem[160 / SUB_BLOCKS_8_22 + 17]; - - WebRtcSpl_32khzTo22khzIntToInt(tmpmem + 10, tmpmem, 10 / SUB_BLOCKS_8_22); - - ///// 11 --> 22 ///// - // int32_t in[110/SUB_BLOCKS_8_22] - // int16_t out[220/SUB_BLOCKS_8_22] - ///// - WebRtcSpl_UpBy2IntToShort(tmpmem, 110 / SUB_BLOCKS_8_22, out, state->S_11_22); - - // move input/output pointers 10/SUB_BLOCKS_8_22 ms seconds ahead - in += 80 / SUB_BLOCKS_8_22; - out += 220 / SUB_BLOCKS_8_22; - } +void WebRtcSpl_Resample8khzTo22khz(const int16_t* in, + int16_t* out, + WebRtcSpl_State8khzTo22khz* state, + int32_t* tmpmem) { + int k; + + // process two blocks of 10/SUB_BLOCKS_8_22 ms (to reduce temp buffer size) + for (k = 0; k < SUB_BLOCKS_8_22; k++) { + ///// 8 --> 16 ///// + // int16_t in[80/SUB_BLOCKS_8_22] + // int32_t out[160/SUB_BLOCKS_8_22] + ///// + WebRtcSpl_UpBy2ShortToInt(in, 80 / SUB_BLOCKS_8_22, tmpmem + 18, + state->S_8_16); + + ///// 16 --> 11 ///// + // int32_t in[160/SUB_BLOCKS_8_22] + // int32_t out[110/SUB_BLOCKS_8_22] + ///// + // copy state to and from input array + tmpmem[10] = state->S_16_11[0]; + tmpmem[11] = state->S_16_11[1]; + tmpmem[12] = state->S_16_11[2]; + tmpmem[13] = state->S_16_11[3]; + tmpmem[14] = state->S_16_11[4]; + tmpmem[15] = state->S_16_11[5]; + tmpmem[16] = state->S_16_11[6]; + tmpmem[17] = state->S_16_11[7]; + state->S_16_11[0] = tmpmem[160 / SUB_BLOCKS_8_22 + 10]; + state->S_16_11[1] = tmpmem[160 / SUB_BLOCKS_8_22 + 11]; + state->S_16_11[2] = tmpmem[160 / SUB_BLOCKS_8_22 + 12]; + state->S_16_11[3] = tmpmem[160 / SUB_BLOCKS_8_22 + 13]; + state->S_16_11[4] = tmpmem[160 / SUB_BLOCKS_8_22 + 14]; + state->S_16_11[5] = tmpmem[160 / SUB_BLOCKS_8_22 + 15]; + state->S_16_11[6] = tmpmem[160 / SUB_BLOCKS_8_22 + 16]; + state->S_16_11[7] = tmpmem[160 / SUB_BLOCKS_8_22 + 17]; + + WebRtcSpl_32khzTo22khzIntToInt(tmpmem + 10, tmpmem, 10 / SUB_BLOCKS_8_22); + + ///// 11 --> 22 ///// + // int32_t in[110/SUB_BLOCKS_8_22] + // int16_t out[220/SUB_BLOCKS_8_22] + ///// + WebRtcSpl_UpBy2IntToShort(tmpmem, 110 / SUB_BLOCKS_8_22, out, + state->S_11_22); + + // move input/output pointers 10/SUB_BLOCKS_8_22 ms seconds ahead + in += 80 / SUB_BLOCKS_8_22; + out += 220 / SUB_BLOCKS_8_22; + } } // initialize state of 8 -> 22 resampler -void WebRtcSpl_ResetResample8khzTo22khz(WebRtcSpl_State8khzTo22khz* state) -{ - int k; - for (k = 0; k < 8; k++) - { - state->S_8_16[k] = 0; - state->S_16_11[k] = 0; - state->S_11_22[k] = 0; - } +void WebRtcSpl_ResetResample8khzTo22khz(WebRtcSpl_State8khzTo22khz* state) { + int k; + for (k = 0; k < 8; k++) { + state->S_8_16[k] = 0; + state->S_16_11[k] = 0; + state->S_11_22[k] = 0; + } } // compute two inner-products and store them to output array -static void WebRtcSpl_DotProdIntToInt(const int32_t* in1, const int32_t* in2, - const int16_t* coef_ptr, int32_t* out1, - int32_t* out2) -{ - int32_t tmp1 = 16384; - int32_t tmp2 = 16384; - int16_t coef; - - coef = coef_ptr[0]; - tmp1 += coef * in1[0]; - tmp2 += coef * in2[-0]; - - coef = coef_ptr[1]; - tmp1 += coef * in1[1]; - tmp2 += coef * in2[-1]; - - coef = coef_ptr[2]; - tmp1 += coef * in1[2]; - tmp2 += coef * in2[-2]; - - coef = coef_ptr[3]; - tmp1 += coef * in1[3]; - tmp2 += coef * in2[-3]; - - coef = coef_ptr[4]; - tmp1 += coef * in1[4]; - tmp2 += coef * in2[-4]; - - coef = coef_ptr[5]; - tmp1 += coef * in1[5]; - tmp2 += coef * in2[-5]; - - coef = coef_ptr[6]; - tmp1 += coef * in1[6]; - tmp2 += coef * in2[-6]; - - coef = coef_ptr[7]; - tmp1 += coef * in1[7]; - tmp2 += coef * in2[-7]; - - coef = coef_ptr[8]; - *out1 = tmp1 + coef * in1[8]; - *out2 = tmp2 + coef * in2[-8]; +static void WebRtcSpl_DotProdIntToInt(const int32_t* in1, + const int32_t* in2, + const int16_t* coef_ptr, + int32_t* out1, + int32_t* out2) { + int32_t tmp1 = 16384; + int32_t tmp2 = 16384; + int16_t coef; + + coef = coef_ptr[0]; + tmp1 += coef * in1[0]; + tmp2 += coef * in2[-0]; + + coef = coef_ptr[1]; + tmp1 += coef * in1[1]; + tmp2 += coef * in2[-1]; + + coef = coef_ptr[2]; + tmp1 += coef * in1[2]; + tmp2 += coef * in2[-2]; + + coef = coef_ptr[3]; + tmp1 += coef * in1[3]; + tmp2 += coef * in2[-3]; + + coef = coef_ptr[4]; + tmp1 += coef * in1[4]; + tmp2 += coef * in2[-4]; + + coef = coef_ptr[5]; + tmp1 += coef * in1[5]; + tmp2 += coef * in2[-5]; + + coef = coef_ptr[6]; + tmp1 += coef * in1[6]; + tmp2 += coef * in2[-6]; + + coef = coef_ptr[7]; + tmp1 += coef * in1[7]; + tmp2 += coef * in2[-7]; + + coef = coef_ptr[8]; + *out1 = tmp1 + coef * in1[8]; + *out2 = tmp2 + coef * in2[-8]; } // compute two inner-products and store them to output array -static void WebRtcSpl_DotProdIntToShort(const int32_t* in1, const int32_t* in2, - const int16_t* coef_ptr, int16_t* out1, - int16_t* out2) -{ - int32_t tmp1 = 16384; - int32_t tmp2 = 16384; - int16_t coef; - - coef = coef_ptr[0]; - tmp1 += coef * in1[0]; - tmp2 += coef * in2[-0]; - - coef = coef_ptr[1]; - tmp1 += coef * in1[1]; - tmp2 += coef * in2[-1]; - - coef = coef_ptr[2]; - tmp1 += coef * in1[2]; - tmp2 += coef * in2[-2]; - - coef = coef_ptr[3]; - tmp1 += coef * in1[3]; - tmp2 += coef * in2[-3]; - - coef = coef_ptr[4]; - tmp1 += coef * in1[4]; - tmp2 += coef * in2[-4]; - - coef = coef_ptr[5]; - tmp1 += coef * in1[5]; - tmp2 += coef * in2[-5]; - - coef = coef_ptr[6]; - tmp1 += coef * in1[6]; - tmp2 += coef * in2[-6]; - - coef = coef_ptr[7]; - tmp1 += coef * in1[7]; - tmp2 += coef * in2[-7]; - - coef = coef_ptr[8]; - tmp1 += coef * in1[8]; - tmp2 += coef * in2[-8]; - - // scale down, round and saturate - tmp1 >>= 15; - if (tmp1 > (int32_t)0x00007FFF) - tmp1 = 0x00007FFF; - if (tmp1 < (int32_t)0xFFFF8000) - tmp1 = 0xFFFF8000; - tmp2 >>= 15; - if (tmp2 > (int32_t)0x00007FFF) - tmp2 = 0x00007FFF; - if (tmp2 < (int32_t)0xFFFF8000) - tmp2 = 0xFFFF8000; - *out1 = (int16_t)tmp1; - *out2 = (int16_t)tmp2; +static void WebRtcSpl_DotProdIntToShort(const int32_t* in1, + const int32_t* in2, + const int16_t* coef_ptr, + int16_t* out1, + int16_t* out2) { + int32_t tmp1 = 16384; + int32_t tmp2 = 16384; + int16_t coef; + + coef = coef_ptr[0]; + tmp1 += coef * in1[0]; + tmp2 += coef * in2[-0]; + + coef = coef_ptr[1]; + tmp1 += coef * in1[1]; + tmp2 += coef * in2[-1]; + + coef = coef_ptr[2]; + tmp1 += coef * in1[2]; + tmp2 += coef * in2[-2]; + + coef = coef_ptr[3]; + tmp1 += coef * in1[3]; + tmp2 += coef * in2[-3]; + + coef = coef_ptr[4]; + tmp1 += coef * in1[4]; + tmp2 += coef * in2[-4]; + + coef = coef_ptr[5]; + tmp1 += coef * in1[5]; + tmp2 += coef * in2[-5]; + + coef = coef_ptr[6]; + tmp1 += coef * in1[6]; + tmp2 += coef * in2[-6]; + + coef = coef_ptr[7]; + tmp1 += coef * in1[7]; + tmp2 += coef * in2[-7]; + + coef = coef_ptr[8]; + tmp1 += coef * in1[8]; + tmp2 += coef * in2[-8]; + + // scale down, round and saturate + tmp1 >>= 15; + if (tmp1 > (int32_t)0x00007FFF) + tmp1 = 0x00007FFF; + if (tmp1 < (int32_t)0xFFFF8000) + tmp1 = 0xFFFF8000; + tmp2 >>= 15; + if (tmp2 > (int32_t)0x00007FFF) + tmp2 = 0x00007FFF; + if (tmp2 < (int32_t)0xFFFF8000) + tmp2 = 0xFFFF8000; + *out1 = (int16_t)tmp1; + *out2 = (int16_t)tmp2; } // Resampling ratio: 11/16 // input: int32_t (normalized, not saturated) :: size 16 * K -// output: int32_t (shifted 15 positions to the left, + offset 16384) :: size 11 * K +// output: int32_t (shifted 15 positions to the left, + offset 16384) :: size 11 +// * K // K: Number of blocks void WebRtcSpl_32khzTo22khzIntToInt(const int32_t* In, int32_t* Out, - int32_t K) -{ - ///////////////////////////////////////////////////////////// - // Filter operation: - // - // Perform resampling (16 input samples -> 11 output samples); - // process in sub blocks of size 16 samples. - int32_t m; - - for (m = 0; m < K; m++) - { - // first output sample - Out[0] = ((int32_t)In[3] << 15) + (1 << 14); - - // sum and accumulate filter coefficients and input samples - WebRtcSpl_DotProdIntToInt(&In[0], &In[22], kCoefficients32To22[0], &Out[1], &Out[10]); - - // sum and accumulate filter coefficients and input samples - WebRtcSpl_DotProdIntToInt(&In[2], &In[20], kCoefficients32To22[1], &Out[2], &Out[9]); - - // sum and accumulate filter coefficients and input samples - WebRtcSpl_DotProdIntToInt(&In[3], &In[19], kCoefficients32To22[2], &Out[3], &Out[8]); - - // sum and accumulate filter coefficients and input samples - WebRtcSpl_DotProdIntToInt(&In[5], &In[17], kCoefficients32To22[3], &Out[4], &Out[7]); - - // sum and accumulate filter coefficients and input samples - WebRtcSpl_DotProdIntToInt(&In[6], &In[16], kCoefficients32To22[4], &Out[5], &Out[6]); - - // update pointers - In += 16; - Out += 11; - } + int32_t K) { + ///////////////////////////////////////////////////////////// + // Filter operation: + // + // Perform resampling (16 input samples -> 11 output samples); + // process in sub blocks of size 16 samples. + int32_t m; + + for (m = 0; m < K; m++) { + // first output sample + Out[0] = ((int32_t)In[3] << 15) + (1 << 14); + + // sum and accumulate filter coefficients and input samples + WebRtcSpl_DotProdIntToInt(&In[0], &In[22], kCoefficients32To22[0], &Out[1], + &Out[10]); + + // sum and accumulate filter coefficients and input samples + WebRtcSpl_DotProdIntToInt(&In[2], &In[20], kCoefficients32To22[1], &Out[2], + &Out[9]); + + // sum and accumulate filter coefficients and input samples + WebRtcSpl_DotProdIntToInt(&In[3], &In[19], kCoefficients32To22[2], &Out[3], + &Out[8]); + + // sum and accumulate filter coefficients and input samples + WebRtcSpl_DotProdIntToInt(&In[5], &In[17], kCoefficients32To22[3], &Out[4], + &Out[7]); + + // sum and accumulate filter coefficients and input samples + WebRtcSpl_DotProdIntToInt(&In[6], &In[16], kCoefficients32To22[4], &Out[5], + &Out[6]); + + // update pointers + In += 16; + Out += 11; + } } // Resampling ratio: 11/16 @@ -461,45 +464,48 @@ void WebRtcSpl_32khzTo22khzIntToInt(const int32_t* In, // output: int16_t (saturated) :: size 11 * K // K: Number of blocks -void WebRtcSpl_32khzTo22khzIntToShort(const int32_t *In, - int16_t *Out, - int32_t K) -{ - ///////////////////////////////////////////////////////////// - // Filter operation: - // - // Perform resampling (16 input samples -> 11 output samples); - // process in sub blocks of size 16 samples. - int32_t tmp; - int32_t m; - - for (m = 0; m < K; m++) - { - // first output sample - tmp = In[3]; - if (tmp > (int32_t)0x00007FFF) - tmp = 0x00007FFF; - if (tmp < (int32_t)0xFFFF8000) - tmp = 0xFFFF8000; - Out[0] = (int16_t)tmp; - - // sum and accumulate filter coefficients and input samples - WebRtcSpl_DotProdIntToShort(&In[0], &In[22], kCoefficients32To22[0], &Out[1], &Out[10]); - - // sum and accumulate filter coefficients and input samples - WebRtcSpl_DotProdIntToShort(&In[2], &In[20], kCoefficients32To22[1], &Out[2], &Out[9]); - - // sum and accumulate filter coefficients and input samples - WebRtcSpl_DotProdIntToShort(&In[3], &In[19], kCoefficients32To22[2], &Out[3], &Out[8]); - - // sum and accumulate filter coefficients and input samples - WebRtcSpl_DotProdIntToShort(&In[5], &In[17], kCoefficients32To22[3], &Out[4], &Out[7]); - - // sum and accumulate filter coefficients and input samples - WebRtcSpl_DotProdIntToShort(&In[6], &In[16], kCoefficients32To22[4], &Out[5], &Out[6]); - - // update pointers - In += 16; - Out += 11; - } +void WebRtcSpl_32khzTo22khzIntToShort(const int32_t* In, + int16_t* Out, + int32_t K) { + ///////////////////////////////////////////////////////////// + // Filter operation: + // + // Perform resampling (16 input samples -> 11 output samples); + // process in sub blocks of size 16 samples. + int32_t tmp; + int32_t m; + + for (m = 0; m < K; m++) { + // first output sample + tmp = In[3]; + if (tmp > (int32_t)0x00007FFF) + tmp = 0x00007FFF; + if (tmp < (int32_t)0xFFFF8000) + tmp = 0xFFFF8000; + Out[0] = (int16_t)tmp; + + // sum and accumulate filter coefficients and input samples + WebRtcSpl_DotProdIntToShort(&In[0], &In[22], kCoefficients32To22[0], + &Out[1], &Out[10]); + + // sum and accumulate filter coefficients and input samples + WebRtcSpl_DotProdIntToShort(&In[2], &In[20], kCoefficients32To22[1], + &Out[2], &Out[9]); + + // sum and accumulate filter coefficients and input samples + WebRtcSpl_DotProdIntToShort(&In[3], &In[19], kCoefficients32To22[2], + &Out[3], &Out[8]); + + // sum and accumulate filter coefficients and input samples + WebRtcSpl_DotProdIntToShort(&In[5], &In[17], kCoefficients32To22[3], + &Out[4], &Out[7]); + + // sum and accumulate filter coefficients and input samples + WebRtcSpl_DotProdIntToShort(&In[6], &In[16], kCoefficients32To22[4], + &Out[5], &Out[6]); + + // update pointers + In += 16; + Out += 11; + } } diff --git a/common_audio/signal_processing/resample_48khz.c b/common_audio/signal_processing/resample_48khz.c index 8518e7b1ce..d083379f59 100644 --- a/common_audio/signal_processing/resample_48khz.c +++ b/common_audio/signal_processing/resample_48khz.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains resampling functions between 48 kHz and nb/wb. * The description header can be found in signal_processing_library.h @@ -16,6 +15,7 @@ */ #include + #include "common_audio/signal_processing/include/signal_processing_library.h" #include "common_audio/signal_processing/resample_by_2_internal.h" @@ -24,37 +24,37 @@ //////////////////////////// // 48 -> 16 resampler -void WebRtcSpl_Resample48khzTo16khz(const int16_t* in, int16_t* out, - WebRtcSpl_State48khzTo16khz* state, int32_t* tmpmem) -{ - ///// 48 --> 48(LP) ///// - // int16_t in[480] - // int32_t out[480] - ///// - WebRtcSpl_LPBy2ShortToInt(in, 480, tmpmem + 16, state->S_48_48); - - ///// 48 --> 32 ///// - // int32_t in[480] - // int32_t out[320] - ///// - // copy state to and from input array - memcpy(tmpmem + 8, state->S_48_32, 8 * sizeof(int32_t)); - memcpy(state->S_48_32, tmpmem + 488, 8 * sizeof(int32_t)); - WebRtcSpl_Resample48khzTo32khz(tmpmem + 8, tmpmem, 160); - - ///// 32 --> 16 ///// - // int32_t in[320] - // int16_t out[160] - ///// - WebRtcSpl_DownBy2IntToShort(tmpmem, 320, out, state->S_32_16); +void WebRtcSpl_Resample48khzTo16khz(const int16_t* in, + int16_t* out, + WebRtcSpl_State48khzTo16khz* state, + int32_t* tmpmem) { + ///// 48 --> 48(LP) ///// + // int16_t in[480] + // int32_t out[480] + ///// + WebRtcSpl_LPBy2ShortToInt(in, 480, tmpmem + 16, state->S_48_48); + + ///// 48 --> 32 ///// + // int32_t in[480] + // int32_t out[320] + ///// + // copy state to and from input array + memcpy(tmpmem + 8, state->S_48_32, 8 * sizeof(int32_t)); + memcpy(state->S_48_32, tmpmem + 488, 8 * sizeof(int32_t)); + WebRtcSpl_Resample48khzTo32khz(tmpmem + 8, tmpmem, 160); + + ///// 32 --> 16 ///// + // int32_t in[320] + // int16_t out[160] + ///// + WebRtcSpl_DownBy2IntToShort(tmpmem, 320, out, state->S_32_16); } // initialize state of 48 -> 16 resampler -void WebRtcSpl_ResetResample48khzTo16khz(WebRtcSpl_State48khzTo16khz* state) -{ - memset(state->S_48_48, 0, 16 * sizeof(int32_t)); - memset(state->S_48_32, 0, 8 * sizeof(int32_t)); - memset(state->S_32_16, 0, 8 * sizeof(int32_t)); +void WebRtcSpl_ResetResample48khzTo16khz(WebRtcSpl_State48khzTo16khz* state) { + memset(state->S_48_48, 0, 16 * sizeof(int32_t)); + memset(state->S_48_32, 0, 8 * sizeof(int32_t)); + memset(state->S_32_16, 0, 8 * sizeof(int32_t)); } //////////////////////////// @@ -62,37 +62,37 @@ void WebRtcSpl_ResetResample48khzTo16khz(WebRtcSpl_State48khzTo16khz* state) //////////////////////////// // 16 -> 48 resampler -void WebRtcSpl_Resample16khzTo48khz(const int16_t* in, int16_t* out, - WebRtcSpl_State16khzTo48khz* state, int32_t* tmpmem) -{ - ///// 16 --> 32 ///// - // int16_t in[160] - // int32_t out[320] - ///// - WebRtcSpl_UpBy2ShortToInt(in, 160, tmpmem + 16, state->S_16_32); - - ///// 32 --> 24 ///// - // int32_t in[320] - // int32_t out[240] - // copy state to and from input array - ///// - memcpy(tmpmem + 8, state->S_32_24, 8 * sizeof(int32_t)); - memcpy(state->S_32_24, tmpmem + 328, 8 * sizeof(int32_t)); - WebRtcSpl_Resample32khzTo24khz(tmpmem + 8, tmpmem, 80); - - ///// 24 --> 48 ///// - // int32_t in[240] - // int16_t out[480] - ///// - WebRtcSpl_UpBy2IntToShort(tmpmem, 240, out, state->S_24_48); +void WebRtcSpl_Resample16khzTo48khz(const int16_t* in, + int16_t* out, + WebRtcSpl_State16khzTo48khz* state, + int32_t* tmpmem) { + ///// 16 --> 32 ///// + // int16_t in[160] + // int32_t out[320] + ///// + WebRtcSpl_UpBy2ShortToInt(in, 160, tmpmem + 16, state->S_16_32); + + ///// 32 --> 24 ///// + // int32_t in[320] + // int32_t out[240] + // copy state to and from input array + ///// + memcpy(tmpmem + 8, state->S_32_24, 8 * sizeof(int32_t)); + memcpy(state->S_32_24, tmpmem + 328, 8 * sizeof(int32_t)); + WebRtcSpl_Resample32khzTo24khz(tmpmem + 8, tmpmem, 80); + + ///// 24 --> 48 ///// + // int32_t in[240] + // int16_t out[480] + ///// + WebRtcSpl_UpBy2IntToShort(tmpmem, 240, out, state->S_24_48); } // initialize state of 16 -> 48 resampler -void WebRtcSpl_ResetResample16khzTo48khz(WebRtcSpl_State16khzTo48khz* state) -{ - memset(state->S_16_32, 0, 8 * sizeof(int32_t)); - memset(state->S_32_24, 0, 8 * sizeof(int32_t)); - memset(state->S_24_48, 0, 8 * sizeof(int32_t)); +void WebRtcSpl_ResetResample16khzTo48khz(WebRtcSpl_State16khzTo48khz* state) { + memset(state->S_16_32, 0, 8 * sizeof(int32_t)); + memset(state->S_32_24, 0, 8 * sizeof(int32_t)); + memset(state->S_24_48, 0, 8 * sizeof(int32_t)); } //////////////////////////// @@ -100,44 +100,44 @@ void WebRtcSpl_ResetResample16khzTo48khz(WebRtcSpl_State16khzTo48khz* state) //////////////////////////// // 48 -> 8 resampler -void WebRtcSpl_Resample48khzTo8khz(const int16_t* in, int16_t* out, - WebRtcSpl_State48khzTo8khz* state, int32_t* tmpmem) -{ - ///// 48 --> 24 ///// - // int16_t in[480] - // int32_t out[240] - ///// - WebRtcSpl_DownBy2ShortToInt(in, 480, tmpmem + 256, state->S_48_24); - - ///// 24 --> 24(LP) ///// - // int32_t in[240] - // int32_t out[240] - ///// - WebRtcSpl_LPBy2IntToInt(tmpmem + 256, 240, tmpmem + 16, state->S_24_24); - - ///// 24 --> 16 ///// - // int32_t in[240] - // int32_t out[160] - ///// - // copy state to and from input array - memcpy(tmpmem + 8, state->S_24_16, 8 * sizeof(int32_t)); - memcpy(state->S_24_16, tmpmem + 248, 8 * sizeof(int32_t)); - WebRtcSpl_Resample48khzTo32khz(tmpmem + 8, tmpmem, 80); - - ///// 16 --> 8 ///// - // int32_t in[160] - // int16_t out[80] - ///// - WebRtcSpl_DownBy2IntToShort(tmpmem, 160, out, state->S_16_8); +void WebRtcSpl_Resample48khzTo8khz(const int16_t* in, + int16_t* out, + WebRtcSpl_State48khzTo8khz* state, + int32_t* tmpmem) { + ///// 48 --> 24 ///// + // int16_t in[480] + // int32_t out[240] + ///// + WebRtcSpl_DownBy2ShortToInt(in, 480, tmpmem + 256, state->S_48_24); + + ///// 24 --> 24(LP) ///// + // int32_t in[240] + // int32_t out[240] + ///// + WebRtcSpl_LPBy2IntToInt(tmpmem + 256, 240, tmpmem + 16, state->S_24_24); + + ///// 24 --> 16 ///// + // int32_t in[240] + // int32_t out[160] + ///// + // copy state to and from input array + memcpy(tmpmem + 8, state->S_24_16, 8 * sizeof(int32_t)); + memcpy(state->S_24_16, tmpmem + 248, 8 * sizeof(int32_t)); + WebRtcSpl_Resample48khzTo32khz(tmpmem + 8, tmpmem, 80); + + ///// 16 --> 8 ///// + // int32_t in[160] + // int16_t out[80] + ///// + WebRtcSpl_DownBy2IntToShort(tmpmem, 160, out, state->S_16_8); } // initialize state of 48 -> 8 resampler -void WebRtcSpl_ResetResample48khzTo8khz(WebRtcSpl_State48khzTo8khz* state) -{ - memset(state->S_48_24, 0, 8 * sizeof(int32_t)); - memset(state->S_24_24, 0, 16 * sizeof(int32_t)); - memset(state->S_24_16, 0, 8 * sizeof(int32_t)); - memset(state->S_16_8, 0, 8 * sizeof(int32_t)); +void WebRtcSpl_ResetResample48khzTo8khz(WebRtcSpl_State48khzTo8khz* state) { + memset(state->S_48_24, 0, 8 * sizeof(int32_t)); + memset(state->S_24_24, 0, 16 * sizeof(int32_t)); + memset(state->S_24_16, 0, 8 * sizeof(int32_t)); + memset(state->S_16_8, 0, 8 * sizeof(int32_t)); } //////////////////////////// @@ -145,42 +145,42 @@ void WebRtcSpl_ResetResample48khzTo8khz(WebRtcSpl_State48khzTo8khz* state) //////////////////////////// // 8 -> 48 resampler -void WebRtcSpl_Resample8khzTo48khz(const int16_t* in, int16_t* out, - WebRtcSpl_State8khzTo48khz* state, int32_t* tmpmem) -{ - ///// 8 --> 16 ///// - // int16_t in[80] - // int32_t out[160] - ///// - WebRtcSpl_UpBy2ShortToInt(in, 80, tmpmem + 264, state->S_8_16); - - ///// 16 --> 12 ///// - // int32_t in[160] - // int32_t out[120] - ///// - // copy state to and from input array - memcpy(tmpmem + 256, state->S_16_12, 8 * sizeof(int32_t)); - memcpy(state->S_16_12, tmpmem + 416, 8 * sizeof(int32_t)); - WebRtcSpl_Resample32khzTo24khz(tmpmem + 256, tmpmem + 240, 40); - - ///// 12 --> 24 ///// - // int32_t in[120] - // int16_t out[240] - ///// - WebRtcSpl_UpBy2IntToInt(tmpmem + 240, 120, tmpmem, state->S_12_24); - - ///// 24 --> 48 ///// - // int32_t in[240] - // int16_t out[480] - ///// - WebRtcSpl_UpBy2IntToShort(tmpmem, 240, out, state->S_24_48); +void WebRtcSpl_Resample8khzTo48khz(const int16_t* in, + int16_t* out, + WebRtcSpl_State8khzTo48khz* state, + int32_t* tmpmem) { + ///// 8 --> 16 ///// + // int16_t in[80] + // int32_t out[160] + ///// + WebRtcSpl_UpBy2ShortToInt(in, 80, tmpmem + 264, state->S_8_16); + + ///// 16 --> 12 ///// + // int32_t in[160] + // int32_t out[120] + ///// + // copy state to and from input array + memcpy(tmpmem + 256, state->S_16_12, 8 * sizeof(int32_t)); + memcpy(state->S_16_12, tmpmem + 416, 8 * sizeof(int32_t)); + WebRtcSpl_Resample32khzTo24khz(tmpmem + 256, tmpmem + 240, 40); + + ///// 12 --> 24 ///// + // int32_t in[120] + // int16_t out[240] + ///// + WebRtcSpl_UpBy2IntToInt(tmpmem + 240, 120, tmpmem, state->S_12_24); + + ///// 24 --> 48 ///// + // int32_t in[240] + // int16_t out[480] + ///// + WebRtcSpl_UpBy2IntToShort(tmpmem, 240, out, state->S_24_48); } // initialize state of 8 -> 48 resampler -void WebRtcSpl_ResetResample8khzTo48khz(WebRtcSpl_State8khzTo48khz* state) -{ - memset(state->S_8_16, 0, 8 * sizeof(int32_t)); - memset(state->S_16_12, 0, 8 * sizeof(int32_t)); - memset(state->S_12_24, 0, 8 * sizeof(int32_t)); - memset(state->S_24_48, 0, 8 * sizeof(int32_t)); +void WebRtcSpl_ResetResample8khzTo48khz(WebRtcSpl_State8khzTo48khz* state) { + memset(state->S_8_16, 0, 8 * sizeof(int32_t)); + memset(state->S_16_12, 0, 8 * sizeof(int32_t)); + memset(state->S_12_24, 0, 8 * sizeof(int32_t)); + memset(state->S_24_48, 0, 8 * sizeof(int32_t)); } diff --git a/common_audio/signal_processing/resample_by_2.c b/common_audio/signal_processing/resample_by_2.c index 73e1950654..3172154dec 100644 --- a/common_audio/signal_processing/resample_by_2.c +++ b/common_audio/signal_processing/resample_by_2.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the resampling by two functions. * The description header can be found in signal_processing_library.h @@ -21,8 +20,7 @@ // allpass filter coefficients. static const uint32_t kResampleAllpass1[3] = {3284, 24441, 49528 << 15}; -static const uint32_t kResampleAllpass2[3] = - {12199, 37471 << 15, 60255 << 15}; +static const uint32_t kResampleAllpass2[3] = {12199, 37471 << 15, 60255 << 15}; // Multiply two 32-bit values and accumulate to another input value. // Return: state + ((diff * tbl_value) >> 16) @@ -31,8 +29,9 @@ static __inline int32_t MUL_ACCUM_1(int32_t tbl_value, int32_t diff, int32_t state) { int32_t result; - __asm __volatile ("smlawb %0, %1, %2, %3": "=r"(result): "r"(diff), - "r"(tbl_value), "r"(state)); + __asm __volatile("smlawb %0, %1, %2, %3" + : "=r"(result) + : "r"(diff), "r"(tbl_value), "r"(state)); return result; } @@ -40,15 +39,16 @@ static __inline int32_t MUL_ACCUM_1(int32_t tbl_value, // Return: Return: state + (((diff << 1) * tbl_value) >> 32) // // The reason to introduce this function is that, in case we can't use smlawb -// instruction (in MUL_ACCUM_1) due to input value range, we can still use +// instruction (in MUL_ACCUM_1) due to input value range, we can still use // smmla to save some cycles. static __inline int32_t MUL_ACCUM_2(int32_t tbl_value, int32_t diff, int32_t state) { int32_t result; - __asm __volatile ("smmla %0, %1, %2, %3": "=r"(result): "r"(diff << 1), - "r"(tbl_value), "r"(state)); + __asm __volatile("smmla %0, %1, %2, %3" + : "=r"(result) + : "r"(diff << 1), "r"(tbl_value), "r"(state)); return result; } @@ -64,11 +64,12 @@ static const uint16_t kResampleAllpass2[3] = {12199, 37471, 60255}; #endif // WEBRTC_ARCH_ARM_V7 - // decimator #if !defined(MIPS32_LE) -void WebRtcSpl_DownsampleBy2(const int16_t* in, size_t len, - int16_t* out, int32_t* filtState) { +void WebRtcSpl_DownsampleBy2(const int16_t* in, + size_t len, + int16_t* out, + int32_t* filtState) { int32_t tmp1, tmp2, diff, in32, out32; size_t i; @@ -124,9 +125,10 @@ void WebRtcSpl_DownsampleBy2(const int16_t* in, size_t len, } #endif // #if defined(MIPS32_LE) - -void WebRtcSpl_UpsampleBy2(const int16_t* in, size_t len, - int16_t* out, int32_t* filtState) { +void WebRtcSpl_UpsampleBy2(const int16_t* in, + size_t len, + int16_t* out, + int32_t* filtState) { int32_t tmp1, tmp2, diff, in32, out32; size_t i; diff --git a/common_audio/signal_processing/resample_by_2_internal.c b/common_audio/signal_processing/resample_by_2_internal.c index 99592b20b5..a68eced7af 100644 --- a/common_audio/signal_processing/resample_by_2_internal.c +++ b/common_audio/signal_processing/resample_by_2_internal.c @@ -8,195 +8,188 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This header file contains some internal resampling functions. * */ #include "common_audio/signal_processing/resample_by_2_internal.h" + #include "rtc_base/sanitizer.h" // allpass filter coefficients. -static const int16_t kResampleAllpass[2][3] = { - {821, 6110, 12382}, - {3050, 9368, 15063} -}; +static const int16_t kResampleAllpass[2][3] = {{821, 6110, 12382}, + {3050, 9368, 15063}}; // // decimator -// input: int32_t (shifted 15 positions to the left, + offset 16384) OVERWRITTEN! -// output: int16_t (saturated) (of length len/2) -// state: filter state array; length = 8 +// input: int32_t (shifted 15 positions to the left, + offset 16384) +// OVERWRITTEN! output: int16_t (saturated) (of length len/2) state: filter +// state array; length = 8 void RTC_NO_SANITIZE("signed-integer-overflow") // bugs.webrtc.org/5486 -WebRtcSpl_DownBy2IntToShort(int32_t *in, int32_t len, int16_t *out, - int32_t *state) -{ - int32_t tmp0, tmp1, diff; - int32_t i; - - len >>= 1; - - // lower allpass filter (operates on even input samples) - for (i = 0; i < len; i++) - { - tmp0 = in[i << 1]; - diff = tmp0 - state[1]; - // UBSan: -1771017321 - 999586185 cannot be represented in type 'int' - - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[0] + diff * kResampleAllpass[1][0]; - state[0] = tmp0; - diff = tmp1 - state[2]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[1] + diff * kResampleAllpass[1][1]; - state[1] = tmp1; - diff = tmp0 - state[3]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[3] = state[2] + diff * kResampleAllpass[1][2]; - state[2] = tmp0; - - // divide by two and store temporarily - in[i << 1] = (state[3] >> 1); - } - - in++; - - // upper allpass filter (operates on odd input samples) - for (i = 0; i < len; i++) - { - tmp0 = in[i << 1]; - diff = tmp0 - state[5]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[4] + diff * kResampleAllpass[0][0]; - state[4] = tmp0; - diff = tmp1 - state[6]; - // scale down and round - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[5] + diff * kResampleAllpass[0][1]; - state[5] = tmp1; - diff = tmp0 - state[7]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[7] = state[6] + diff * kResampleAllpass[0][2]; - state[6] = tmp0; - - // divide by two and store temporarily - in[i << 1] = (state[7] >> 1); - } - - in--; - - // combine allpass outputs - for (i = 0; i < len; i += 2) - { - // divide by two, add both allpass outputs and round - tmp0 = (in[i << 1] + in[(i << 1) + 1]) >> 15; - tmp1 = (in[(i << 1) + 2] + in[(i << 1) + 3]) >> 15; - if (tmp0 > (int32_t)0x00007FFF) - tmp0 = 0x00007FFF; - if (tmp0 < (int32_t)0xFFFF8000) - tmp0 = 0xFFFF8000; - out[i] = (int16_t)tmp0; - if (tmp1 > (int32_t)0x00007FFF) - tmp1 = 0x00007FFF; - if (tmp1 < (int32_t)0xFFFF8000) - tmp1 = 0xFFFF8000; - out[i + 1] = (int16_t)tmp1; - } + WebRtcSpl_DownBy2IntToShort(int32_t* in, + int32_t len, + int16_t* out, + int32_t* state) { + int32_t tmp0, tmp1, diff; + int32_t i; + + len >>= 1; + + // lower allpass filter (operates on even input samples) + for (i = 0; i < len; i++) { + tmp0 = in[i << 1]; + diff = tmp0 - state[1]; + // UBSan: -1771017321 - 999586185 cannot be represented in type 'int' + + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[0] + diff * kResampleAllpass[1][0]; + state[0] = tmp0; + diff = tmp1 - state[2]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[1] + diff * kResampleAllpass[1][1]; + state[1] = tmp1; + diff = tmp0 - state[3]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[3] = state[2] + diff * kResampleAllpass[1][2]; + state[2] = tmp0; + + // divide by two and store temporarily + in[i << 1] = (state[3] >> 1); + } + + in++; + + // upper allpass filter (operates on odd input samples) + for (i = 0; i < len; i++) { + tmp0 = in[i << 1]; + diff = tmp0 - state[5]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[4] + diff * kResampleAllpass[0][0]; + state[4] = tmp0; + diff = tmp1 - state[6]; + // scale down and round + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[5] + diff * kResampleAllpass[0][1]; + state[5] = tmp1; + diff = tmp0 - state[7]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[7] = state[6] + diff * kResampleAllpass[0][2]; + state[6] = tmp0; + + // divide by two and store temporarily + in[i << 1] = (state[7] >> 1); + } + + in--; + + // combine allpass outputs + for (i = 0; i < len; i += 2) { + // divide by two, add both allpass outputs and round + tmp0 = (in[i << 1] + in[(i << 1) + 1]) >> 15; + tmp1 = (in[(i << 1) + 2] + in[(i << 1) + 3]) >> 15; + if (tmp0 > (int32_t)0x00007FFF) + tmp0 = 0x00007FFF; + if (tmp0 < (int32_t)0xFFFF8000) + tmp0 = 0xFFFF8000; + out[i] = (int16_t)tmp0; + if (tmp1 > (int32_t)0x00007FFF) + tmp1 = 0x00007FFF; + if (tmp1 < (int32_t)0xFFFF8000) + tmp1 = 0xFFFF8000; + out[i + 1] = (int16_t)tmp1; + } } // // decimator // input: int16_t -// output: int32_t (shifted 15 positions to the left, + offset 16384) (of length len/2) -// state: filter state array; length = 8 +// output: int32_t (shifted 15 positions to the left, + offset 16384) (of length +// len/2) state: filter state array; length = 8 void RTC_NO_SANITIZE("signed-integer-overflow") // bugs.webrtc.org/5486 -WebRtcSpl_DownBy2ShortToInt(const int16_t *in, - int32_t len, - int32_t *out, - int32_t *state) -{ - int32_t tmp0, tmp1, diff; - int32_t i; - - len >>= 1; - - // lower allpass filter (operates on even input samples) - for (i = 0; i < len; i++) - { - tmp0 = ((int32_t)in[i << 1] << 15) + (1 << 14); - diff = tmp0 - state[1]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[0] + diff * kResampleAllpass[1][0]; - state[0] = tmp0; - diff = tmp1 - state[2]; - // UBSan: -1379909682 - 834099714 cannot be represented in type 'int' - - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[1] + diff * kResampleAllpass[1][1]; - state[1] = tmp1; - diff = tmp0 - state[3]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[3] = state[2] + diff * kResampleAllpass[1][2]; - state[2] = tmp0; - - // divide by two and store temporarily - out[i] = (state[3] >> 1); - } - - in++; - - // upper allpass filter (operates on odd input samples) - for (i = 0; i < len; i++) - { - tmp0 = ((int32_t)in[i << 1] << 15) + (1 << 14); - diff = tmp0 - state[5]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[4] + diff * kResampleAllpass[0][0]; - state[4] = tmp0; - diff = tmp1 - state[6]; - // scale down and round - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[5] + diff * kResampleAllpass[0][1]; - state[5] = tmp1; - diff = tmp0 - state[7]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[7] = state[6] + diff * kResampleAllpass[0][2]; - state[6] = tmp0; - - // divide by two and store temporarily - out[i] += (state[7] >> 1); - } - - in--; + WebRtcSpl_DownBy2ShortToInt(const int16_t* in, + int32_t len, + int32_t* out, + int32_t* state) { + int32_t tmp0, tmp1, diff; + int32_t i; + + len >>= 1; + + // lower allpass filter (operates on even input samples) + for (i = 0; i < len; i++) { + tmp0 = ((int32_t)in[i << 1] << 15) + (1 << 14); + diff = tmp0 - state[1]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[0] + diff * kResampleAllpass[1][0]; + state[0] = tmp0; + diff = tmp1 - state[2]; + // UBSan: -1379909682 - 834099714 cannot be represented in type 'int' + + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[1] + diff * kResampleAllpass[1][1]; + state[1] = tmp1; + diff = tmp0 - state[3]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[3] = state[2] + diff * kResampleAllpass[1][2]; + state[2] = tmp0; + + // divide by two and store temporarily + out[i] = (state[3] >> 1); + } + + in++; + + // upper allpass filter (operates on odd input samples) + for (i = 0; i < len; i++) { + tmp0 = ((int32_t)in[i << 1] << 15) + (1 << 14); + diff = tmp0 - state[5]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[4] + diff * kResampleAllpass[0][0]; + state[4] = tmp0; + diff = tmp1 - state[6]; + // scale down and round + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[5] + diff * kResampleAllpass[0][1]; + state[5] = tmp1; + diff = tmp0 - state[7]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[7] = state[6] + diff * kResampleAllpass[0][2]; + state[6] = tmp0; + + // divide by two and store temporarily + out[i] += (state[7] >> 1); + } + + in--; } // @@ -204,139 +197,137 @@ WebRtcSpl_DownBy2ShortToInt(const int16_t *in, // input: int16_t // output: int32_t (normalized, not saturated) (of length len*2) // state: filter state array; length = 8 -void WebRtcSpl_UpBy2ShortToInt(const int16_t *in, int32_t len, int32_t *out, - int32_t *state) -{ - int32_t tmp0, tmp1, diff; - int32_t i; - - // upper allpass filter (generates odd output samples) - for (i = 0; i < len; i++) - { - tmp0 = ((int32_t)in[i] << 15) + (1 << 14); - diff = tmp0 - state[5]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[4] + diff * kResampleAllpass[0][0]; - state[4] = tmp0; - diff = tmp1 - state[6]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[5] + diff * kResampleAllpass[0][1]; - state[5] = tmp1; - diff = tmp0 - state[7]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[7] = state[6] + diff * kResampleAllpass[0][2]; - state[6] = tmp0; - - // scale down, round and store - out[i << 1] = state[7] >> 15; - } - - out++; - - // lower allpass filter (generates even output samples) - for (i = 0; i < len; i++) - { - tmp0 = ((int32_t)in[i] << 15) + (1 << 14); - diff = tmp0 - state[1]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[0] + diff * kResampleAllpass[1][0]; - state[0] = tmp0; - diff = tmp1 - state[2]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[1] + diff * kResampleAllpass[1][1]; - state[1] = tmp1; - diff = tmp0 - state[3]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[3] = state[2] + diff * kResampleAllpass[1][2]; - state[2] = tmp0; - - // scale down, round and store - out[i << 1] = state[3] >> 15; - } +void WebRtcSpl_UpBy2ShortToInt(const int16_t* in, + int32_t len, + int32_t* out, + int32_t* state) { + int32_t tmp0, tmp1, diff; + int32_t i; + + // upper allpass filter (generates odd output samples) + for (i = 0; i < len; i++) { + tmp0 = ((int32_t)in[i] << 15) + (1 << 14); + diff = tmp0 - state[5]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[4] + diff * kResampleAllpass[0][0]; + state[4] = tmp0; + diff = tmp1 - state[6]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[5] + diff * kResampleAllpass[0][1]; + state[5] = tmp1; + diff = tmp0 - state[7]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[7] = state[6] + diff * kResampleAllpass[0][2]; + state[6] = tmp0; + + // scale down, round and store + out[i << 1] = state[7] >> 15; + } + + out++; + + // lower allpass filter (generates even output samples) + for (i = 0; i < len; i++) { + tmp0 = ((int32_t)in[i] << 15) + (1 << 14); + diff = tmp0 - state[1]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[0] + diff * kResampleAllpass[1][0]; + state[0] = tmp0; + diff = tmp1 - state[2]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[1] + diff * kResampleAllpass[1][1]; + state[1] = tmp1; + diff = tmp0 - state[3]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[3] = state[2] + diff * kResampleAllpass[1][2]; + state[2] = tmp0; + + // scale down, round and store + out[i << 1] = state[3] >> 15; + } } // // interpolator // input: int32_t (shifted 15 positions to the left, + offset 16384) -// output: int32_t (shifted 15 positions to the left, + offset 16384) (of length len*2) -// state: filter state array; length = 8 -void WebRtcSpl_UpBy2IntToInt(const int32_t *in, int32_t len, int32_t *out, - int32_t *state) -{ - int32_t tmp0, tmp1, diff; - int32_t i; - - // upper allpass filter (generates odd output samples) - for (i = 0; i < len; i++) - { - tmp0 = in[i]; - diff = tmp0 - state[5]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[4] + diff * kResampleAllpass[0][0]; - state[4] = tmp0; - diff = tmp1 - state[6]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[5] + diff * kResampleAllpass[0][1]; - state[5] = tmp1; - diff = tmp0 - state[7]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[7] = state[6] + diff * kResampleAllpass[0][2]; - state[6] = tmp0; - - // scale down, round and store - out[i << 1] = state[7]; - } - - out++; - - // lower allpass filter (generates even output samples) - for (i = 0; i < len; i++) - { - tmp0 = in[i]; - diff = tmp0 - state[1]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[0] + diff * kResampleAllpass[1][0]; - state[0] = tmp0; - diff = tmp1 - state[2]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[1] + diff * kResampleAllpass[1][1]; - state[1] = tmp1; - diff = tmp0 - state[3]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[3] = state[2] + diff * kResampleAllpass[1][2]; - state[2] = tmp0; - - // scale down, round and store - out[i << 1] = state[3]; - } +// output: int32_t (shifted 15 positions to the left, + offset 16384) (of length +// len*2) state: filter state array; length = 8 +void WebRtcSpl_UpBy2IntToInt(const int32_t* in, + int32_t len, + int32_t* out, + int32_t* state) { + int32_t tmp0, tmp1, diff; + int32_t i; + + // upper allpass filter (generates odd output samples) + for (i = 0; i < len; i++) { + tmp0 = in[i]; + diff = tmp0 - state[5]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[4] + diff * kResampleAllpass[0][0]; + state[4] = tmp0; + diff = tmp1 - state[6]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[5] + diff * kResampleAllpass[0][1]; + state[5] = tmp1; + diff = tmp0 - state[7]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[7] = state[6] + diff * kResampleAllpass[0][2]; + state[6] = tmp0; + + // scale down, round and store + out[i << 1] = state[7]; + } + + out++; + + // lower allpass filter (generates even output samples) + for (i = 0; i < len; i++) { + tmp0 = in[i]; + diff = tmp0 - state[1]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[0] + diff * kResampleAllpass[1][0]; + state[0] = tmp0; + diff = tmp1 - state[2]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[1] + diff * kResampleAllpass[1][1]; + state[1] = tmp1; + diff = tmp0 - state[3]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[3] = state[2] + diff * kResampleAllpass[1][2]; + state[2] = tmp0; + + // scale down, round and store + out[i << 1] = state[3]; + } } // @@ -344,212 +335,208 @@ void WebRtcSpl_UpBy2IntToInt(const int32_t *in, int32_t len, int32_t *out, // input: int32_t (shifted 15 positions to the left, + offset 16384) // output: int16_t (saturated) (of length len*2) // state: filter state array; length = 8 -void WebRtcSpl_UpBy2IntToShort(const int32_t *in, int32_t len, int16_t *out, - int32_t *state) -{ - int32_t tmp0, tmp1, diff; - int32_t i; - - // upper allpass filter (generates odd output samples) - for (i = 0; i < len; i++) - { - tmp0 = in[i]; - diff = tmp0 - state[5]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[4] + diff * kResampleAllpass[0][0]; - state[4] = tmp0; - diff = tmp1 - state[6]; - // scale down and round - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[5] + diff * kResampleAllpass[0][1]; - state[5] = tmp1; - diff = tmp0 - state[7]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[7] = state[6] + diff * kResampleAllpass[0][2]; - state[6] = tmp0; - - // scale down, saturate and store - tmp1 = state[7] >> 15; - if (tmp1 > (int32_t)0x00007FFF) - tmp1 = 0x00007FFF; - if (tmp1 < (int32_t)0xFFFF8000) - tmp1 = 0xFFFF8000; - out[i << 1] = (int16_t)tmp1; - } - - out++; - - // lower allpass filter (generates even output samples) - for (i = 0; i < len; i++) - { - tmp0 = in[i]; - diff = tmp0 - state[1]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[0] + diff * kResampleAllpass[1][0]; - state[0] = tmp0; - diff = tmp1 - state[2]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[1] + diff * kResampleAllpass[1][1]; - state[1] = tmp1; - diff = tmp0 - state[3]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[3] = state[2] + diff * kResampleAllpass[1][2]; - state[2] = tmp0; - - // scale down, saturate and store - tmp1 = state[3] >> 15; - if (tmp1 > (int32_t)0x00007FFF) - tmp1 = 0x00007FFF; - if (tmp1 < (int32_t)0xFFFF8000) - tmp1 = 0xFFFF8000; - out[i << 1] = (int16_t)tmp1; - } +void WebRtcSpl_UpBy2IntToShort(const int32_t* in, + int32_t len, + int16_t* out, + int32_t* state) { + int32_t tmp0, tmp1, diff; + int32_t i; + + // upper allpass filter (generates odd output samples) + for (i = 0; i < len; i++) { + tmp0 = in[i]; + diff = tmp0 - state[5]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[4] + diff * kResampleAllpass[0][0]; + state[4] = tmp0; + diff = tmp1 - state[6]; + // scale down and round + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[5] + diff * kResampleAllpass[0][1]; + state[5] = tmp1; + diff = tmp0 - state[7]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[7] = state[6] + diff * kResampleAllpass[0][2]; + state[6] = tmp0; + + // scale down, saturate and store + tmp1 = state[7] >> 15; + if (tmp1 > (int32_t)0x00007FFF) + tmp1 = 0x00007FFF; + if (tmp1 < (int32_t)0xFFFF8000) + tmp1 = 0xFFFF8000; + out[i << 1] = (int16_t)tmp1; + } + + out++; + + // lower allpass filter (generates even output samples) + for (i = 0; i < len; i++) { + tmp0 = in[i]; + diff = tmp0 - state[1]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[0] + diff * kResampleAllpass[1][0]; + state[0] = tmp0; + diff = tmp1 - state[2]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[1] + diff * kResampleAllpass[1][1]; + state[1] = tmp1; + diff = tmp0 - state[3]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[3] = state[2] + diff * kResampleAllpass[1][2]; + state[2] = tmp0; + + // scale down, saturate and store + tmp1 = state[3] >> 15; + if (tmp1 > (int32_t)0x00007FFF) + tmp1 = 0x00007FFF; + if (tmp1 < (int32_t)0xFFFF8000) + tmp1 = 0xFFFF8000; + out[i << 1] = (int16_t)tmp1; + } } // lowpass filter // input: int16_t // output: int32_t (normalized, not saturated) // state: filter state array; length = 8 -void WebRtcSpl_LPBy2ShortToInt(const int16_t* in, int32_t len, int32_t* out, - int32_t* state) -{ - int32_t tmp0, tmp1, diff; - int32_t i; - - len >>= 1; - - // lower allpass filter: odd input -> even output samples - in++; - // initial state of polyphase delay element - tmp0 = state[12]; - for (i = 0; i < len; i++) - { - diff = tmp0 - state[1]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[0] + diff * kResampleAllpass[1][0]; - state[0] = tmp0; - diff = tmp1 - state[2]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[1] + diff * kResampleAllpass[1][1]; - state[1] = tmp1; - diff = tmp0 - state[3]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[3] = state[2] + diff * kResampleAllpass[1][2]; - state[2] = tmp0; - - // scale down, round and store - out[i << 1] = state[3] >> 1; - tmp0 = ((int32_t)in[i << 1] << 15) + (1 << 14); - } - in--; - - // upper allpass filter: even input -> even output samples - for (i = 0; i < len; i++) - { - tmp0 = ((int32_t)in[i << 1] << 15) + (1 << 14); - diff = tmp0 - state[5]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[4] + diff * kResampleAllpass[0][0]; - state[4] = tmp0; - diff = tmp1 - state[6]; - // scale down and round - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[5] + diff * kResampleAllpass[0][1]; - state[5] = tmp1; - diff = tmp0 - state[7]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[7] = state[6] + diff * kResampleAllpass[0][2]; - state[6] = tmp0; - - // average the two allpass outputs, scale down and store - out[i << 1] = (out[i << 1] + (state[7] >> 1)) >> 15; - } - - // switch to odd output samples - out++; - - // lower allpass filter: even input -> odd output samples - for (i = 0; i < len; i++) - { - tmp0 = ((int32_t)in[i << 1] << 15) + (1 << 14); - diff = tmp0 - state[9]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[8] + diff * kResampleAllpass[1][0]; - state[8] = tmp0; - diff = tmp1 - state[10]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[9] + diff * kResampleAllpass[1][1]; - state[9] = tmp1; - diff = tmp0 - state[11]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[11] = state[10] + diff * kResampleAllpass[1][2]; - state[10] = tmp0; - - // scale down, round and store - out[i << 1] = state[11] >> 1; - } - - // upper allpass filter: odd input -> odd output samples - in++; - for (i = 0; i < len; i++) - { - tmp0 = ((int32_t)in[i << 1] << 15) + (1 << 14); - diff = tmp0 - state[13]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[12] + diff * kResampleAllpass[0][0]; - state[12] = tmp0; - diff = tmp1 - state[14]; - // scale down and round - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[13] + diff * kResampleAllpass[0][1]; - state[13] = tmp1; - diff = tmp0 - state[15]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[15] = state[14] + diff * kResampleAllpass[0][2]; - state[14] = tmp0; - - // average the two allpass outputs, scale down and store - out[i << 1] = (out[i << 1] + (state[15] >> 1)) >> 15; - } +void WebRtcSpl_LPBy2ShortToInt(const int16_t* in, + int32_t len, + int32_t* out, + int32_t* state) { + int32_t tmp0, tmp1, diff; + int32_t i; + + len >>= 1; + + // lower allpass filter: odd input -> even output samples + in++; + // initial state of polyphase delay element + tmp0 = state[12]; + for (i = 0; i < len; i++) { + diff = tmp0 - state[1]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[0] + diff * kResampleAllpass[1][0]; + state[0] = tmp0; + diff = tmp1 - state[2]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[1] + diff * kResampleAllpass[1][1]; + state[1] = tmp1; + diff = tmp0 - state[3]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[3] = state[2] + diff * kResampleAllpass[1][2]; + state[2] = tmp0; + + // scale down, round and store + out[i << 1] = state[3] >> 1; + tmp0 = ((int32_t)in[i << 1] << 15) + (1 << 14); + } + in--; + + // upper allpass filter: even input -> even output samples + for (i = 0; i < len; i++) { + tmp0 = ((int32_t)in[i << 1] << 15) + (1 << 14); + diff = tmp0 - state[5]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[4] + diff * kResampleAllpass[0][0]; + state[4] = tmp0; + diff = tmp1 - state[6]; + // scale down and round + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[5] + diff * kResampleAllpass[0][1]; + state[5] = tmp1; + diff = tmp0 - state[7]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[7] = state[6] + diff * kResampleAllpass[0][2]; + state[6] = tmp0; + + // average the two allpass outputs, scale down and store + out[i << 1] = (out[i << 1] + (state[7] >> 1)) >> 15; + } + + // switch to odd output samples + out++; + + // lower allpass filter: even input -> odd output samples + for (i = 0; i < len; i++) { + tmp0 = ((int32_t)in[i << 1] << 15) + (1 << 14); + diff = tmp0 - state[9]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[8] + diff * kResampleAllpass[1][0]; + state[8] = tmp0; + diff = tmp1 - state[10]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[9] + diff * kResampleAllpass[1][1]; + state[9] = tmp1; + diff = tmp0 - state[11]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[11] = state[10] + diff * kResampleAllpass[1][2]; + state[10] = tmp0; + + // scale down, round and store + out[i << 1] = state[11] >> 1; + } + + // upper allpass filter: odd input -> odd output samples + in++; + for (i = 0; i < len; i++) { + tmp0 = ((int32_t)in[i << 1] << 15) + (1 << 14); + diff = tmp0 - state[13]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[12] + diff * kResampleAllpass[0][0]; + state[12] = tmp0; + diff = tmp1 - state[14]; + // scale down and round + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[13] + diff * kResampleAllpass[0][1]; + state[13] = tmp1; + diff = tmp0 - state[15]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[15] = state[14] + diff * kResampleAllpass[0][2]; + state[14] = tmp0; + + // average the two allpass outputs, scale down and store + out[i << 1] = (out[i << 1] + (state[15] >> 1)) >> 15; + } } // lowpass filter @@ -557,133 +544,130 @@ void WebRtcSpl_LPBy2ShortToInt(const int16_t* in, int32_t len, int32_t* out, // output: int32_t (normalized, not saturated) // state: filter state array; length = 8 void RTC_NO_SANITIZE("signed-integer-overflow") // bugs.webrtc.org/5486 -WebRtcSpl_LPBy2IntToInt(const int32_t* in, int32_t len, int32_t* out, - int32_t* state) -{ - int32_t tmp0, tmp1, diff; - int32_t i; - - len >>= 1; - - // lower allpass filter: odd input -> even output samples - in++; - // initial state of polyphase delay element - tmp0 = state[12]; - for (i = 0; i < len; i++) - { - diff = tmp0 - state[1]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[0] + diff * kResampleAllpass[1][0]; - state[0] = tmp0; - diff = tmp1 - state[2]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[1] + diff * kResampleAllpass[1][1]; - state[1] = tmp1; - diff = tmp0 - state[3]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[3] = state[2] + diff * kResampleAllpass[1][2]; - state[2] = tmp0; - - // scale down, round and store - out[i << 1] = state[3] >> 1; - tmp0 = in[i << 1]; - } - in--; - - // upper allpass filter: even input -> even output samples - for (i = 0; i < len; i++) - { - tmp0 = in[i << 1]; - diff = tmp0 - state[5]; - // UBSan: -794814117 - 1566149201 cannot be represented in type 'int' - - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[4] + diff * kResampleAllpass[0][0]; - state[4] = tmp0; - diff = tmp1 - state[6]; - // scale down and round - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[5] + diff * kResampleAllpass[0][1]; - state[5] = tmp1; - diff = tmp0 - state[7]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[7] = state[6] + diff * kResampleAllpass[0][2]; - state[6] = tmp0; - - // average the two allpass outputs, scale down and store - out[i << 1] = (out[i << 1] + (state[7] >> 1)) >> 15; - } - - // switch to odd output samples - out++; - - // lower allpass filter: even input -> odd output samples - for (i = 0; i < len; i++) - { - tmp0 = in[i << 1]; - diff = tmp0 - state[9]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[8] + diff * kResampleAllpass[1][0]; - state[8] = tmp0; - diff = tmp1 - state[10]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[9] + diff * kResampleAllpass[1][1]; - state[9] = tmp1; - diff = tmp0 - state[11]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[11] = state[10] + diff * kResampleAllpass[1][2]; - state[10] = tmp0; - - // scale down, round and store - out[i << 1] = state[11] >> 1; - } - - // upper allpass filter: odd input -> odd output samples - in++; - for (i = 0; i < len; i++) - { - tmp0 = in[i << 1]; - diff = tmp0 - state[13]; - // scale down and round - diff = (diff + (1 << 13)) >> 14; - tmp1 = state[12] + diff * kResampleAllpass[0][0]; - state[12] = tmp0; - diff = tmp1 - state[14]; - // scale down and round - diff = diff >> 14; - if (diff < 0) - diff += 1; - tmp0 = state[13] + diff * kResampleAllpass[0][1]; - state[13] = tmp1; - diff = tmp0 - state[15]; - // scale down and truncate - diff = diff >> 14; - if (diff < 0) - diff += 1; - state[15] = state[14] + diff * kResampleAllpass[0][2]; - state[14] = tmp0; - - // average the two allpass outputs, scale down and store - out[i << 1] = (out[i << 1] + (state[15] >> 1)) >> 15; - } + WebRtcSpl_LPBy2IntToInt(const int32_t* in, + int32_t len, + int32_t* out, + int32_t* state) { + int32_t tmp0, tmp1, diff; + int32_t i; + + len >>= 1; + + // lower allpass filter: odd input -> even output samples + in++; + // initial state of polyphase delay element + tmp0 = state[12]; + for (i = 0; i < len; i++) { + diff = tmp0 - state[1]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[0] + diff * kResampleAllpass[1][0]; + state[0] = tmp0; + diff = tmp1 - state[2]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[1] + diff * kResampleAllpass[1][1]; + state[1] = tmp1; + diff = tmp0 - state[3]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[3] = state[2] + diff * kResampleAllpass[1][2]; + state[2] = tmp0; + + // scale down, round and store + out[i << 1] = state[3] >> 1; + tmp0 = in[i << 1]; + } + in--; + + // upper allpass filter: even input -> even output samples + for (i = 0; i < len; i++) { + tmp0 = in[i << 1]; + diff = tmp0 - state[5]; + // UBSan: -794814117 - 1566149201 cannot be represented in type 'int' + + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[4] + diff * kResampleAllpass[0][0]; + state[4] = tmp0; + diff = tmp1 - state[6]; + // scale down and round + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[5] + diff * kResampleAllpass[0][1]; + state[5] = tmp1; + diff = tmp0 - state[7]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[7] = state[6] + diff * kResampleAllpass[0][2]; + state[6] = tmp0; + + // average the two allpass outputs, scale down and store + out[i << 1] = (out[i << 1] + (state[7] >> 1)) >> 15; + } + + // switch to odd output samples + out++; + + // lower allpass filter: even input -> odd output samples + for (i = 0; i < len; i++) { + tmp0 = in[i << 1]; + diff = tmp0 - state[9]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[8] + diff * kResampleAllpass[1][0]; + state[8] = tmp0; + diff = tmp1 - state[10]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[9] + diff * kResampleAllpass[1][1]; + state[9] = tmp1; + diff = tmp0 - state[11]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[11] = state[10] + diff * kResampleAllpass[1][2]; + state[10] = tmp0; + + // scale down, round and store + out[i << 1] = state[11] >> 1; + } + + // upper allpass filter: odd input -> odd output samples + in++; + for (i = 0; i < len; i++) { + tmp0 = in[i << 1]; + diff = tmp0 - state[13]; + // scale down and round + diff = (diff + (1 << 13)) >> 14; + tmp1 = state[12] + diff * kResampleAllpass[0][0]; + state[12] = tmp0; + diff = tmp1 - state[14]; + // scale down and round + diff = diff >> 14; + if (diff < 0) + diff += 1; + tmp0 = state[13] + diff * kResampleAllpass[0][1]; + state[13] = tmp1; + diff = tmp0 - state[15]; + // scale down and truncate + diff = diff >> 14; + if (diff < 0) + diff += 1; + state[15] = state[14] + diff * kResampleAllpass[0][2]; + state[14] = tmp0; + + // average the two allpass outputs, scale down and store + out[i << 1] = (out[i << 1] + (state[15] >> 1)) >> 15; + } } diff --git a/common_audio/signal_processing/resample_by_2_mips.c b/common_audio/signal_processing/resample_by_2_mips.c index f41bab7519..23e58c6ebd 100644 --- a/common_audio/signal_processing/resample_by_2_mips.c +++ b/common_audio/signal_processing/resample_by_2_mips.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the resampling by two functions. * The description header can be found in signal_processing_library.h @@ -49,12 +48,12 @@ void WebRtcSpl_DownsampleBy2(const int16_t* in, #if defined(MIPS_DSP_R2_LE) int32_t k1Res0, k1Res1, k1Res2, k2Res0, k2Res1, k2Res2; - k1Res0= 3284; - k1Res1= 24441; - k1Res2= 49528; - k2Res0= 12199; - k2Res1= 37471; - k2Res2= 60255; + k1Res0 = 3284; + k1Res1 = 24441; + k1Res2 = 49528; + k2Res0 = 12199; + k2Res1 = 37471; + k2Res2 = 60255; len1 = (len >> 1); const int32_t* inw = (int32_t*)in; @@ -62,97 +61,92 @@ void WebRtcSpl_DownsampleBy2(const int16_t* in, int32_t in322, in321; int32_t diff1, diff2; for (i = len1; i > 0; i--) { - __asm__ volatile ( - "lh %[in321], 0(%[inw]) \n\t" - "lh %[in322], 2(%[inw]) \n\t" + __asm__ volatile( + "lh %[in321], 0(%[inw]) \n\t" + "lh %[in322], 2(%[inw]) \n\t" - "sll %[in321], %[in321], 10 \n\t" - "sll %[in322], %[in322], 10 \n\t" + "sll %[in321], %[in321], 10 \n\t" + "sll %[in322], %[in322], 10 \n\t" - "addiu %[inw], %[inw], 4 \n\t" + "addiu %[inw], %[inw], 4 \n\t" - "subu %[diff1], %[in321], %[state1] \n\t" - "subu %[diff2], %[in322], %[state5] \n\t" + "subu %[diff1], %[in321], %[state1] \n\t" + "subu %[diff2], %[in322], %[state5] \n\t" - : [in322] "=&r" (in322), [in321] "=&r" (in321), - [diff1] "=&r" (diff1), [diff2] "=r" (diff2), [inw] "+r" (inw) - : [state1] "r" (state1), [state5] "r" (state5) - : "memory" - ); + : [in322] "=&r"(in322), [in321] "=&r"(in321), [diff1] "=&r"(diff1), + [diff2] "=r"(diff2), [inw] "+r"(inw) + : [state1] "r"(state1), [state5] "r"(state5) + : "memory"); - __asm__ volatile ( - "mult $ac0, %[diff1], %[k2Res0] \n\t" - "mult $ac1, %[diff2], %[k1Res0] \n\t" + __asm__ volatile( + "mult $ac0, %[diff1], %[k2Res0] \n\t" + "mult $ac1, %[diff2], %[k1Res0] \n\t" - "extr.w %[tmp11], $ac0, 16 \n\t" - "extr.w %[tmp12], $ac1, 16 \n\t" + "extr.w %[tmp11], $ac0, 16 \n\t" + "extr.w %[tmp12], $ac1, 16 \n\t" - "addu %[tmp11], %[state0], %[tmp11] \n\t" - "addu %[tmp12], %[state4], %[tmp12] \n\t" + "addu %[tmp11], %[state0], %[tmp11] \n\t" + "addu %[tmp12], %[state4], %[tmp12] \n\t" - "addiu %[state0], %[in321], 0 \n\t" - "addiu %[state4], %[in322], 0 \n\t" + "addiu %[state0], %[in321], 0 \n\t" + "addiu %[state4], %[in322], 0 \n\t" - "subu %[diff1], %[tmp11], %[state2] \n\t" - "subu %[diff2], %[tmp12], %[state6] \n\t" + "subu %[diff1], %[tmp11], %[state2] \n\t" + "subu %[diff2], %[tmp12], %[state6] \n\t" - "mult $ac0, %[diff1], %[k2Res1] \n\t" - "mult $ac1, %[diff2], %[k1Res1] \n\t" + "mult $ac0, %[diff1], %[k2Res1] \n\t" + "mult $ac1, %[diff2], %[k1Res1] \n\t" - "extr.w %[tmp21], $ac0, 16 \n\t" - "extr.w %[tmp22], $ac1, 16 \n\t" + "extr.w %[tmp21], $ac0, 16 \n\t" + "extr.w %[tmp22], $ac1, 16 \n\t" - "addu %[tmp21], %[state1], %[tmp21] \n\t" - "addu %[tmp22], %[state5], %[tmp22] \n\t" + "addu %[tmp21], %[state1], %[tmp21] \n\t" + "addu %[tmp22], %[state5], %[tmp22] \n\t" - "addiu %[state1], %[tmp11], 0 \n\t" - "addiu %[state5], %[tmp12], 0 \n\t" - : [tmp22] "=r" (tmp22), [tmp21] "=&r" (tmp21), - [tmp11] "=&r" (tmp11), [state0] "+r" (state0), - [state1] "+r" (state1), - [state2] "+r" (state2), - [state4] "+r" (state4), [tmp12] "=&r" (tmp12), - [state6] "+r" (state6), [state5] "+r" (state5) - : [k1Res1] "r" (k1Res1), [k2Res1] "r" (k2Res1), [k2Res0] "r" (k2Res0), - [diff2] "r" (diff2), [diff1] "r" (diff1), [in322] "r" (in322), - [in321] "r" (in321), [k1Res0] "r" (k1Res0) - : "hi", "lo", "$ac1hi", "$ac1lo" - ); + "addiu %[state1], %[tmp11], 0 \n\t" + "addiu %[state5], %[tmp12], 0 \n\t" + : [tmp22] "=r"(tmp22), [tmp21] "=&r"(tmp21), [tmp11] "=&r"(tmp11), + [state0] "+r"(state0), [state1] "+r"(state1), [state2] "+r"(state2), + [state4] "+r"(state4), [tmp12] "=&r"(tmp12), [state6] "+r"(state6), + [state5] "+r"(state5) + : [k1Res1] "r"(k1Res1), [k2Res1] "r"(k2Res1), [k2Res0] "r"(k2Res0), + [diff2] "r"(diff2), [diff1] "r"(diff1), [in322] "r"(in322), + [in321] "r"(in321), [k1Res0] "r"(k1Res0) + : "hi", "lo", "$ac1hi", "$ac1lo"); // upper allpass filter - __asm__ volatile ( - "subu %[diff1], %[tmp21], %[state3] \n\t" - "subu %[diff2], %[tmp22], %[state7] \n\t" - - "mult $ac0, %[diff1], %[k2Res2] \n\t" - "mult $ac1, %[diff2], %[k1Res2] \n\t" - "extr.w %[state3], $ac0, 16 \n\t" - "extr.w %[state7], $ac1, 16 \n\t" - "addu %[state3], %[state2], %[state3] \n\t" - "addu %[state7], %[state6], %[state7] \n\t" - - "addiu %[state2], %[tmp21], 0 \n\t" - "addiu %[state6], %[tmp22], 0 \n\t" - - // add two allpass outputs, divide by two and round - "addu %[out32], %[state3], %[state7] \n\t" - "addiu %[out32], %[out32], 1024 \n\t" - "sra %[out32], %[out32], 11 \n\t" - : [state3] "+r" (state3), [state6] "+r" (state6), - [state2] "+r" (state2), [diff2] "=&r" (diff2), - [out32] "=r" (out32), [diff1] "=&r" (diff1), [state7] "+r" (state7) - : [tmp22] "r" (tmp22), [tmp21] "r" (tmp21), - [k1Res2] "r" (k1Res2), [k2Res2] "r" (k2Res2) - : "hi", "lo", "$ac1hi", "$ac1lo" - ); + __asm__ volatile( + "subu %[diff1], %[tmp21], %[state3] \n\t" + "subu %[diff2], %[tmp22], %[state7] \n\t" + + "mult $ac0, %[diff1], %[k2Res2] \n\t" + "mult $ac1, %[diff2], %[k1Res2] \n\t" + "extr.w %[state3], $ac0, 16 \n\t" + "extr.w %[state7], $ac1, 16 \n\t" + "addu %[state3], %[state2], %[state3] \n\t" + "addu %[state7], %[state6], %[state7] \n\t" + + "addiu %[state2], %[tmp21], 0 \n\t" + "addiu %[state6], %[tmp22], 0 \n\t" + + // add two allpass outputs, divide by two and round + "addu %[out32], %[state3], %[state7] \n\t" + "addiu %[out32], %[out32], 1024 \n\t" + "sra %[out32], %[out32], 11 \n\t" + : [state3] "+r"(state3), [state6] "+r"(state6), [state2] "+r"(state2), + [diff2] "=&r"(diff2), [out32] "=r"(out32), [diff1] "=&r"(diff1), + [state7] "+r"(state7) + : [tmp22] "r"(tmp22), [tmp21] "r"(tmp21), [k1Res2] "r"(k1Res2), + [k2Res2] "r"(k2Res2) + : "hi", "lo", "$ac1hi", "$ac1lo"); // limit amplitude to prevent wrap-around, and write to output array *out++ = WebRtcSpl_SatW32ToW16(out32); } -#else // #if defined(MIPS_DSP_R2_LE) +#else // #if defined(MIPS_DSP_R2_LE) int32_t tmp1, tmp2, diff; int32_t in32; - len1 = (len >> 1)/4; + len1 = (len >> 1) / 4; for (i = len1; i > 0; i--) { // lower allpass filter in32 = (int32_t)(*in++) << 10; @@ -272,21 +266,20 @@ void WebRtcSpl_DownsampleBy2(const int16_t* in, *out++ = WebRtcSpl_SatW32ToW16(out32); } #endif // #if defined(MIPS_DSP_R2_LE) - __asm__ volatile ( - "sw %[state0], 0(%[filtState]) \n\t" - "sw %[state1], 4(%[filtState]) \n\t" - "sw %[state2], 8(%[filtState]) \n\t" - "sw %[state3], 12(%[filtState]) \n\t" - "sw %[state4], 16(%[filtState]) \n\t" - "sw %[state5], 20(%[filtState]) \n\t" - "sw %[state6], 24(%[filtState]) \n\t" - "sw %[state7], 28(%[filtState]) \n\t" - : - : [state0] "r" (state0), [state1] "r" (state1), [state2] "r" (state2), - [state3] "r" (state3), [state4] "r" (state4), [state5] "r" (state5), - [state6] "r" (state6), [state7] "r" (state7), [filtState] "r" (filtState) - : "memory" - ); + __asm__ volatile( + "sw %[state0], 0(%[filtState]) \n\t" + "sw %[state1], 4(%[filtState]) \n\t" + "sw %[state2], 8(%[filtState]) \n\t" + "sw %[state3], 12(%[filtState]) \n\t" + "sw %[state4], 16(%[filtState]) \n\t" + "sw %[state5], 20(%[filtState]) \n\t" + "sw %[state6], 24(%[filtState]) \n\t" + "sw %[state7], 28(%[filtState]) \n\t" + : + : [state0] "r"(state0), [state1] "r"(state1), [state2] "r"(state2), + [state3] "r"(state3), [state4] "r"(state4), [state5] "r"(state5), + [state6] "r"(state6), [state7] "r"(state7), [filtState] "r"(filtState) + : "memory"); } #endif // #if defined(MIPS32_LE) diff --git a/common_audio/signal_processing/resample_fractional.c b/common_audio/signal_processing/resample_fractional.c index 9ffe0aca60..ea7af8156d 100644 --- a/common_audio/signal_processing/resample_fractional.c +++ b/common_audio/signal_processing/resample_fractional.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the resampling functions between 48, 44, 32 and 24 kHz. * The description headers can be found in signal_processing_library.h @@ -19,122 +18,117 @@ // interpolation coefficients static const int16_t kCoefficients48To32[2][8] = { - {778, -2050, 1087, 23285, 12903, -3783, 441, 222}, - {222, 441, -3783, 12903, 23285, 1087, -2050, 778} -}; + {778, -2050, 1087, 23285, 12903, -3783, 441, 222}, + {222, 441, -3783, 12903, 23285, 1087, -2050, 778}}; static const int16_t kCoefficients32To24[3][8] = { - {767, -2362, 2434, 24406, 10620, -3838, 721, 90}, - {386, -381, -2646, 19062, 19062, -2646, -381, 386}, - {90, 721, -3838, 10620, 24406, 2434, -2362, 767} -}; + {767, -2362, 2434, 24406, 10620, -3838, 721, 90}, + {386, -381, -2646, 19062, 19062, -2646, -381, 386}, + {90, 721, -3838, 10620, 24406, 2434, -2362, 767}}; static const int16_t kCoefficients44To32[4][9] = { - {117, -669, 2245, -6183, 26267, 13529, -3245, 845, -138}, - {-101, 612, -2283, 8532, 29790, -5138, 1789, -524, 91}, - {50, -292, 1016, -3064, 32010, 3933, -1147, 315, -53}, - {-156, 974, -3863, 18603, 21691, -6246, 2353, -712, 126} -}; + {117, -669, 2245, -6183, 26267, 13529, -3245, 845, -138}, + {-101, 612, -2283, 8532, 29790, -5138, 1789, -524, 91}, + {50, -292, 1016, -3064, 32010, 3933, -1147, 315, -53}, + {-156, 974, -3863, 18603, 21691, -6246, 2353, -712, 126}}; // Resampling ratio: 2/3 // input: int32_t (normalized, not saturated) :: size 3 * K -// output: int32_t (shifted 15 positions to the left, + offset 16384) :: size 2 * K +// output: int32_t (shifted 15 positions to the left, + offset 16384) :: size 2 +// * K // K: number of blocks -void WebRtcSpl_Resample48khzTo32khz(const int32_t *In, int32_t *Out, size_t K) -{ - ///////////////////////////////////////////////////////////// - // Filter operation: - // - // Perform resampling (3 input samples -> 2 output samples); - // process in sub blocks of size 3 samples. - int32_t tmp; - size_t m; - - for (m = 0; m < K; m++) - { - tmp = 1 << 14; - tmp += kCoefficients48To32[0][0] * In[0]; - tmp += kCoefficients48To32[0][1] * In[1]; - tmp += kCoefficients48To32[0][2] * In[2]; - tmp += kCoefficients48To32[0][3] * In[3]; - tmp += kCoefficients48To32[0][4] * In[4]; - tmp += kCoefficients48To32[0][5] * In[5]; - tmp += kCoefficients48To32[0][6] * In[6]; - tmp += kCoefficients48To32[0][7] * In[7]; - Out[0] = tmp; - - tmp = 1 << 14; - tmp += kCoefficients48To32[1][0] * In[1]; - tmp += kCoefficients48To32[1][1] * In[2]; - tmp += kCoefficients48To32[1][2] * In[3]; - tmp += kCoefficients48To32[1][3] * In[4]; - tmp += kCoefficients48To32[1][4] * In[5]; - tmp += kCoefficients48To32[1][5] * In[6]; - tmp += kCoefficients48To32[1][6] * In[7]; - tmp += kCoefficients48To32[1][7] * In[8]; - Out[1] = tmp; - - // update pointers - In += 3; - Out += 2; - } +void WebRtcSpl_Resample48khzTo32khz(const int32_t* In, int32_t* Out, size_t K) { + ///////////////////////////////////////////////////////////// + // Filter operation: + // + // Perform resampling (3 input samples -> 2 output samples); + // process in sub blocks of size 3 samples. + int32_t tmp; + size_t m; + + for (m = 0; m < K; m++) { + tmp = 1 << 14; + tmp += kCoefficients48To32[0][0] * In[0]; + tmp += kCoefficients48To32[0][1] * In[1]; + tmp += kCoefficients48To32[0][2] * In[2]; + tmp += kCoefficients48To32[0][3] * In[3]; + tmp += kCoefficients48To32[0][4] * In[4]; + tmp += kCoefficients48To32[0][5] * In[5]; + tmp += kCoefficients48To32[0][6] * In[6]; + tmp += kCoefficients48To32[0][7] * In[7]; + Out[0] = tmp; + + tmp = 1 << 14; + tmp += kCoefficients48To32[1][0] * In[1]; + tmp += kCoefficients48To32[1][1] * In[2]; + tmp += kCoefficients48To32[1][2] * In[3]; + tmp += kCoefficients48To32[1][3] * In[4]; + tmp += kCoefficients48To32[1][4] * In[5]; + tmp += kCoefficients48To32[1][5] * In[6]; + tmp += kCoefficients48To32[1][6] * In[7]; + tmp += kCoefficients48To32[1][7] * In[8]; + Out[1] = tmp; + + // update pointers + In += 3; + Out += 2; + } } // Resampling ratio: 3/4 // input: int32_t (normalized, not saturated) :: size 4 * K -// output: int32_t (shifted 15 positions to the left, + offset 16384) :: size 3 * K +// output: int32_t (shifted 15 positions to the left, + offset 16384) :: size 3 +// * K // K: number of blocks -void WebRtcSpl_Resample32khzTo24khz(const int32_t *In, int32_t *Out, size_t K) -{ - ///////////////////////////////////////////////////////////// - // Filter operation: - // - // Perform resampling (4 input samples -> 3 output samples); - // process in sub blocks of size 4 samples. - size_t m; - int32_t tmp; - - for (m = 0; m < K; m++) - { - tmp = 1 << 14; - tmp += kCoefficients32To24[0][0] * In[0]; - tmp += kCoefficients32To24[0][1] * In[1]; - tmp += kCoefficients32To24[0][2] * In[2]; - tmp += kCoefficients32To24[0][3] * In[3]; - tmp += kCoefficients32To24[0][4] * In[4]; - tmp += kCoefficients32To24[0][5] * In[5]; - tmp += kCoefficients32To24[0][6] * In[6]; - tmp += kCoefficients32To24[0][7] * In[7]; - Out[0] = tmp; - - tmp = 1 << 14; - tmp += kCoefficients32To24[1][0] * In[1]; - tmp += kCoefficients32To24[1][1] * In[2]; - tmp += kCoefficients32To24[1][2] * In[3]; - tmp += kCoefficients32To24[1][3] * In[4]; - tmp += kCoefficients32To24[1][4] * In[5]; - tmp += kCoefficients32To24[1][5] * In[6]; - tmp += kCoefficients32To24[1][6] * In[7]; - tmp += kCoefficients32To24[1][7] * In[8]; - Out[1] = tmp; - - tmp = 1 << 14; - tmp += kCoefficients32To24[2][0] * In[2]; - tmp += kCoefficients32To24[2][1] * In[3]; - tmp += kCoefficients32To24[2][2] * In[4]; - tmp += kCoefficients32To24[2][3] * In[5]; - tmp += kCoefficients32To24[2][4] * In[6]; - tmp += kCoefficients32To24[2][5] * In[7]; - tmp += kCoefficients32To24[2][6] * In[8]; - tmp += kCoefficients32To24[2][7] * In[9]; - Out[2] = tmp; - - // update pointers - In += 4; - Out += 3; - } +void WebRtcSpl_Resample32khzTo24khz(const int32_t* In, int32_t* Out, size_t K) { + ///////////////////////////////////////////////////////////// + // Filter operation: + // + // Perform resampling (4 input samples -> 3 output samples); + // process in sub blocks of size 4 samples. + size_t m; + int32_t tmp; + + for (m = 0; m < K; m++) { + tmp = 1 << 14; + tmp += kCoefficients32To24[0][0] * In[0]; + tmp += kCoefficients32To24[0][1] * In[1]; + tmp += kCoefficients32To24[0][2] * In[2]; + tmp += kCoefficients32To24[0][3] * In[3]; + tmp += kCoefficients32To24[0][4] * In[4]; + tmp += kCoefficients32To24[0][5] * In[5]; + tmp += kCoefficients32To24[0][6] * In[6]; + tmp += kCoefficients32To24[0][7] * In[7]; + Out[0] = tmp; + + tmp = 1 << 14; + tmp += kCoefficients32To24[1][0] * In[1]; + tmp += kCoefficients32To24[1][1] * In[2]; + tmp += kCoefficients32To24[1][2] * In[3]; + tmp += kCoefficients32To24[1][3] * In[4]; + tmp += kCoefficients32To24[1][4] * In[5]; + tmp += kCoefficients32To24[1][5] * In[6]; + tmp += kCoefficients32To24[1][6] * In[7]; + tmp += kCoefficients32To24[1][7] * In[8]; + Out[1] = tmp; + + tmp = 1 << 14; + tmp += kCoefficients32To24[2][0] * In[2]; + tmp += kCoefficients32To24[2][1] * In[3]; + tmp += kCoefficients32To24[2][2] * In[4]; + tmp += kCoefficients32To24[2][3] * In[5]; + tmp += kCoefficients32To24[2][4] * In[6]; + tmp += kCoefficients32To24[2][5] * In[7]; + tmp += kCoefficients32To24[2][6] * In[8]; + tmp += kCoefficients32To24[2][7] * In[9]; + Out[2] = tmp; + + // update pointers + In += 4; + Out += 3; + } } // @@ -144,96 +138,99 @@ void WebRtcSpl_Resample32khzTo24khz(const int32_t *In, int32_t *Out, size_t K) // // compute two inner-products and store them to output array -static void WebRtcSpl_ResampDotProduct(const int32_t *in1, const int32_t *in2, - const int16_t *coef_ptr, int32_t *out1, - int32_t *out2) -{ - int32_t tmp1 = 16384; - int32_t tmp2 = 16384; - int16_t coef; - - coef = coef_ptr[0]; - tmp1 += coef * in1[0]; - tmp2 += coef * in2[-0]; - - coef = coef_ptr[1]; - tmp1 += coef * in1[1]; - tmp2 += coef * in2[-1]; - - coef = coef_ptr[2]; - tmp1 += coef * in1[2]; - tmp2 += coef * in2[-2]; - - coef = coef_ptr[3]; - tmp1 += coef * in1[3]; - tmp2 += coef * in2[-3]; - - coef = coef_ptr[4]; - tmp1 += coef * in1[4]; - tmp2 += coef * in2[-4]; - - coef = coef_ptr[5]; - tmp1 += coef * in1[5]; - tmp2 += coef * in2[-5]; - - coef = coef_ptr[6]; - tmp1 += coef * in1[6]; - tmp2 += coef * in2[-6]; - - coef = coef_ptr[7]; - tmp1 += coef * in1[7]; - tmp2 += coef * in2[-7]; - - coef = coef_ptr[8]; - *out1 = tmp1 + coef * in1[8]; - *out2 = tmp2 + coef * in2[-8]; +static void WebRtcSpl_ResampDotProduct(const int32_t* in1, + const int32_t* in2, + const int16_t* coef_ptr, + int32_t* out1, + int32_t* out2) { + int32_t tmp1 = 16384; + int32_t tmp2 = 16384; + int16_t coef; + + coef = coef_ptr[0]; + tmp1 += coef * in1[0]; + tmp2 += coef * in2[-0]; + + coef = coef_ptr[1]; + tmp1 += coef * in1[1]; + tmp2 += coef * in2[-1]; + + coef = coef_ptr[2]; + tmp1 += coef * in1[2]; + tmp2 += coef * in2[-2]; + + coef = coef_ptr[3]; + tmp1 += coef * in1[3]; + tmp2 += coef * in2[-3]; + + coef = coef_ptr[4]; + tmp1 += coef * in1[4]; + tmp2 += coef * in2[-4]; + + coef = coef_ptr[5]; + tmp1 += coef * in1[5]; + tmp2 += coef * in2[-5]; + + coef = coef_ptr[6]; + tmp1 += coef * in1[6]; + tmp2 += coef * in2[-6]; + + coef = coef_ptr[7]; + tmp1 += coef * in1[7]; + tmp2 += coef * in2[-7]; + + coef = coef_ptr[8]; + *out1 = tmp1 + coef * in1[8]; + *out2 = tmp2 + coef * in2[-8]; } // Resampling ratio: 8/11 // input: int32_t (normalized, not saturated) :: size 11 * K -// output: int32_t (shifted 15 positions to the left, + offset 16384) :: size 8 * K +// output: int32_t (shifted 15 positions to the left, + offset 16384) :: size 8 +// * K // K: number of blocks -void WebRtcSpl_Resample44khzTo32khz(const int32_t *In, int32_t *Out, size_t K) -{ - ///////////////////////////////////////////////////////////// - // Filter operation: - // - // Perform resampling (11 input samples -> 8 output samples); - // process in sub blocks of size 11 samples. - int32_t tmp; - size_t m; - - for (m = 0; m < K; m++) - { - tmp = 1 << 14; - - // first output sample - Out[0] = ((int32_t)In[3] << 15) + tmp; - - // sum and accumulate filter coefficients and input samples - tmp += kCoefficients44To32[3][0] * In[5]; - tmp += kCoefficients44To32[3][1] * In[6]; - tmp += kCoefficients44To32[3][2] * In[7]; - tmp += kCoefficients44To32[3][3] * In[8]; - tmp += kCoefficients44To32[3][4] * In[9]; - tmp += kCoefficients44To32[3][5] * In[10]; - tmp += kCoefficients44To32[3][6] * In[11]; - tmp += kCoefficients44To32[3][7] * In[12]; - tmp += kCoefficients44To32[3][8] * In[13]; - Out[4] = tmp; - - // sum and accumulate filter coefficients and input samples - WebRtcSpl_ResampDotProduct(&In[0], &In[17], kCoefficients44To32[0], &Out[1], &Out[7]); - - // sum and accumulate filter coefficients and input samples - WebRtcSpl_ResampDotProduct(&In[2], &In[15], kCoefficients44To32[1], &Out[2], &Out[6]); - - // sum and accumulate filter coefficients and input samples - WebRtcSpl_ResampDotProduct(&In[3], &In[14], kCoefficients44To32[2], &Out[3], &Out[5]); - - // update pointers - In += 11; - Out += 8; - } +void WebRtcSpl_Resample44khzTo32khz(const int32_t* In, int32_t* Out, size_t K) { + ///////////////////////////////////////////////////////////// + // Filter operation: + // + // Perform resampling (11 input samples -> 8 output samples); + // process in sub blocks of size 11 samples. + int32_t tmp; + size_t m; + + for (m = 0; m < K; m++) { + tmp = 1 << 14; + + // first output sample + Out[0] = ((int32_t)In[3] << 15) + tmp; + + // sum and accumulate filter coefficients and input samples + tmp += kCoefficients44To32[3][0] * In[5]; + tmp += kCoefficients44To32[3][1] * In[6]; + tmp += kCoefficients44To32[3][2] * In[7]; + tmp += kCoefficients44To32[3][3] * In[8]; + tmp += kCoefficients44To32[3][4] * In[9]; + tmp += kCoefficients44To32[3][5] * In[10]; + tmp += kCoefficients44To32[3][6] * In[11]; + tmp += kCoefficients44To32[3][7] * In[12]; + tmp += kCoefficients44To32[3][8] * In[13]; + Out[4] = tmp; + + // sum and accumulate filter coefficients and input samples + WebRtcSpl_ResampDotProduct(&In[0], &In[17], kCoefficients44To32[0], &Out[1], + &Out[7]); + + // sum and accumulate filter coefficients and input samples + WebRtcSpl_ResampDotProduct(&In[2], &In[15], kCoefficients44To32[1], &Out[2], + &Out[6]); + + // sum and accumulate filter coefficients and input samples + WebRtcSpl_ResampDotProduct(&In[3], &In[14], kCoefficients44To32[2], &Out[3], + &Out[5]); + + // update pointers + In += 11; + Out += 8; + } } diff --git a/common_audio/signal_processing/signal_processing_unittest.cc b/common_audio/signal_processing/signal_processing_unittest.cc index 80d605bc0b..7495702d28 100644 --- a/common_audio/signal_processing/signal_processing_unittest.cc +++ b/common_audio/signal_processing/signal_processing_unittest.cc @@ -9,21 +9,26 @@ */ #include +#include +#include +#include +#include "common_audio/signal_processing/dot_product_with_scale.h" #include "common_audio/signal_processing/include/signal_processing_library.h" +#include "common_audio/signal_processing/include/spl_inl.h" #include "rtc_base/strings/string_builder.h" #include "test/gtest.h" -static const size_t kVector16Size = 9; -static const int16_t vector16[kVector16Size] = {1, - -15511, - 4323, - 1963, - WEBRTC_SPL_WORD16_MAX, - 0, - WEBRTC_SPL_WORD16_MIN + 5, - -3333, - 345}; +static const int16_t vector16[] = {1, + -15511, + 4323, + 1963, + WEBRTC_SPL_WORD16_MAX, + 0, + WEBRTC_SPL_WORD16_MIN + 5, + -3333, + 345}; +static const size_t kVector16Size = std::size(vector16); TEST(SplTest, MacroTest) { // Macros with inputs. @@ -128,7 +133,7 @@ TEST(SplTest, AddSubSatW32) { INT32_MIN, std::min(INT32_MAX, static_cast(a) + b)); const int64_t diff = std::max( INT32_MIN, std::min(INT32_MAX, static_cast(a) - b)); - rtc::StringBuilder ss; + webrtc::StringBuilder ss; ss << a << " +/- " << b << ": sum " << sum << ", diff " << diff; SCOPED_TRACE(ss.str()); EXPECT_EQ(sum, WebRtcSpl_AddSatW32(a, b)); @@ -163,7 +168,12 @@ TEST(SplTest, CountLeadingZeros64) { } } +// TODO(bugs.webrtc.org/345674544): Fix/enable. +#if defined(__has_feature) && __has_feature(undefined_behavior_sanitizer) +TEST(SplTest, DISABLED_MathOperationsTest) { +#else TEST(SplTest, MathOperationsTest) { +#endif int A = 1134567892; int32_t num = 117; int32_t den = -5; @@ -248,104 +258,105 @@ TEST(SplTest, MinMaxOperationsTest) { // Vectors to test the cases where minimum values have to be caught // outside of the unrolled loops in ARM-Neon. - int16_t vector16[kVectorSize] = {-1, - 7485, - 0, - 3333, - -18283, - 0, - 12334, - -29871, - 988, - -3333, - 345, - -456, - 222, - 999, - 888, - 8774, - WEBRTC_SPL_WORD16_MIN}; - int32_t vector32[kVectorSize] = {-1, - 0, - 283211, - 3333, - 8712345, - 0, - -3333, - 89345, - -374585456, - 222, - 999, - 122345334, - -12389756, - -987329871, - 888, - -2, - WEBRTC_SPL_WORD32_MIN}; + int16_t vector16_arm[kVectorSize] = {-1, + 7485, + 0, + 3333, + -18283, + 0, + 12334, + -29871, + 988, + -3333, + 345, + -456, + 222, + 999, + 888, + 8774, + WEBRTC_SPL_WORD16_MIN}; + int32_t vector32_arm[kVectorSize] = {-1, + 0, + 283211, + 3333, + 8712345, + 0, + -3333, + 89345, + -374585456, + 222, + 999, + 122345334, + -12389756, + -987329871, + 888, + -2, + WEBRTC_SPL_WORD32_MIN}; EXPECT_EQ(WEBRTC_SPL_WORD16_MIN, - WebRtcSpl_MinValueW16(vector16, kVectorSize)); + WebRtcSpl_MinValueW16(vector16_arm, kVectorSize)); EXPECT_EQ(WEBRTC_SPL_WORD32_MIN, - WebRtcSpl_MinValueW32(vector32, kVectorSize)); - EXPECT_EQ(kVectorSize - 1, WebRtcSpl_MinIndexW16(vector16, kVectorSize)); - EXPECT_EQ(kVectorSize - 1, WebRtcSpl_MinIndexW32(vector32, kVectorSize)); + WebRtcSpl_MinValueW32(vector32_arm, kVectorSize)); + EXPECT_EQ(kVectorSize - 1, WebRtcSpl_MinIndexW16(vector16_arm, kVectorSize)); + EXPECT_EQ(kVectorSize - 1, WebRtcSpl_MinIndexW32(vector32_arm, kVectorSize)); EXPECT_EQ(WEBRTC_SPL_WORD16_MIN, - WebRtcSpl_MaxAbsElementW16(vector16, kVectorSize)); + WebRtcSpl_MaxAbsElementW16(vector16_arm, kVectorSize)); int16_t min_value, max_value; - WebRtcSpl_MinMaxW16(vector16, kVectorSize, &min_value, &max_value); + WebRtcSpl_MinMaxW16(vector16_arm, kVectorSize, &min_value, &max_value); EXPECT_EQ(WEBRTC_SPL_WORD16_MIN, min_value); EXPECT_EQ(12334, max_value); // Test the cases where maximum values have to be caught // outside of the unrolled loops in ARM-Neon. - vector16[kVectorSize - 1] = WEBRTC_SPL_WORD16_MAX; - vector32[kVectorSize - 1] = WEBRTC_SPL_WORD32_MAX; + vector16_arm[kVectorSize - 1] = WEBRTC_SPL_WORD16_MAX; + vector32_arm[kVectorSize - 1] = WEBRTC_SPL_WORD32_MAX; EXPECT_EQ(WEBRTC_SPL_WORD16_MAX, - WebRtcSpl_MaxAbsValueW16(vector16, kVectorSize)); + WebRtcSpl_MaxAbsValueW16(vector16_arm, kVectorSize)); EXPECT_EQ(WEBRTC_SPL_WORD16_MAX, - WebRtcSpl_MaxValueW16(vector16, kVectorSize)); + WebRtcSpl_MaxValueW16(vector16_arm, kVectorSize)); EXPECT_EQ(WEBRTC_SPL_WORD32_MAX, - WebRtcSpl_MaxAbsValueW32(vector32, kVectorSize)); + WebRtcSpl_MaxAbsValueW32(vector32_arm, kVectorSize)); EXPECT_EQ(WEBRTC_SPL_WORD32_MAX, - WebRtcSpl_MaxValueW32(vector32, kVectorSize)); - EXPECT_EQ(kVectorSize - 1, WebRtcSpl_MaxAbsIndexW16(vector16, kVectorSize)); - EXPECT_EQ(kVectorSize - 1, WebRtcSpl_MaxIndexW16(vector16, kVectorSize)); - EXPECT_EQ(kVectorSize - 1, WebRtcSpl_MaxIndexW32(vector32, kVectorSize)); + WebRtcSpl_MaxValueW32(vector32_arm, kVectorSize)); + EXPECT_EQ(kVectorSize - 1, + WebRtcSpl_MaxAbsIndexW16(vector16_arm, kVectorSize)); + EXPECT_EQ(kVectorSize - 1, WebRtcSpl_MaxIndexW16(vector16_arm, kVectorSize)); + EXPECT_EQ(kVectorSize - 1, WebRtcSpl_MaxIndexW32(vector32_arm, kVectorSize)); EXPECT_EQ(WEBRTC_SPL_WORD16_MAX, - WebRtcSpl_MaxAbsElementW16(vector16, kVectorSize)); - WebRtcSpl_MinMaxW16(vector16, kVectorSize, &min_value, &max_value); + WebRtcSpl_MaxAbsElementW16(vector16_arm, kVectorSize)); + WebRtcSpl_MinMaxW16(vector16_arm, kVectorSize, &min_value, &max_value); EXPECT_EQ(-29871, min_value); EXPECT_EQ(WEBRTC_SPL_WORD16_MAX, max_value); // Test the cases where multiple maximum and minimum values are present. - vector16[1] = WEBRTC_SPL_WORD16_MAX; - vector16[6] = WEBRTC_SPL_WORD16_MIN; - vector16[11] = WEBRTC_SPL_WORD16_MIN; - vector32[1] = WEBRTC_SPL_WORD32_MAX; - vector32[6] = WEBRTC_SPL_WORD32_MIN; - vector32[11] = WEBRTC_SPL_WORD32_MIN; + vector16_arm[1] = WEBRTC_SPL_WORD16_MAX; + vector16_arm[6] = WEBRTC_SPL_WORD16_MIN; + vector16_arm[11] = WEBRTC_SPL_WORD16_MIN; + vector32_arm[1] = WEBRTC_SPL_WORD32_MAX; + vector32_arm[6] = WEBRTC_SPL_WORD32_MIN; + vector32_arm[11] = WEBRTC_SPL_WORD32_MIN; EXPECT_EQ(WEBRTC_SPL_WORD16_MAX, - WebRtcSpl_MaxAbsValueW16(vector16, kVectorSize)); + WebRtcSpl_MaxAbsValueW16(vector16_arm, kVectorSize)); EXPECT_EQ(WEBRTC_SPL_WORD16_MAX, - WebRtcSpl_MaxValueW16(vector16, kVectorSize)); + WebRtcSpl_MaxValueW16(vector16_arm, kVectorSize)); EXPECT_EQ(WEBRTC_SPL_WORD16_MIN, - WebRtcSpl_MinValueW16(vector16, kVectorSize)); + WebRtcSpl_MinValueW16(vector16_arm, kVectorSize)); EXPECT_EQ(WEBRTC_SPL_WORD32_MAX, - WebRtcSpl_MaxAbsValueW32(vector32, kVectorSize)); + WebRtcSpl_MaxAbsValueW32(vector32_arm, kVectorSize)); EXPECT_EQ(WEBRTC_SPL_WORD32_MAX, - WebRtcSpl_MaxValueW32(vector32, kVectorSize)); + WebRtcSpl_MaxValueW32(vector32_arm, kVectorSize)); EXPECT_EQ(WEBRTC_SPL_WORD32_MIN, - WebRtcSpl_MinValueW32(vector32, kVectorSize)); - EXPECT_EQ(6u, WebRtcSpl_MaxAbsIndexW16(vector16, kVectorSize)); - EXPECT_EQ(1u, WebRtcSpl_MaxIndexW16(vector16, kVectorSize)); - EXPECT_EQ(1u, WebRtcSpl_MaxIndexW32(vector32, kVectorSize)); - EXPECT_EQ(6u, WebRtcSpl_MinIndexW16(vector16, kVectorSize)); - EXPECT_EQ(6u, WebRtcSpl_MinIndexW32(vector32, kVectorSize)); + WebRtcSpl_MinValueW32(vector32_arm, kVectorSize)); + EXPECT_EQ(6u, WebRtcSpl_MaxAbsIndexW16(vector16_arm, kVectorSize)); + EXPECT_EQ(1u, WebRtcSpl_MaxIndexW16(vector16_arm, kVectorSize)); + EXPECT_EQ(1u, WebRtcSpl_MaxIndexW32(vector32_arm, kVectorSize)); + EXPECT_EQ(6u, WebRtcSpl_MinIndexW16(vector16_arm, kVectorSize)); + EXPECT_EQ(6u, WebRtcSpl_MinIndexW32(vector32_arm, kVectorSize)); EXPECT_EQ(WEBRTC_SPL_WORD16_MIN, - WebRtcSpl_MaxAbsElementW16(vector16, kVectorSize)); - WebRtcSpl_MinMaxW16(vector16, kVectorSize, &min_value, &max_value); + WebRtcSpl_MaxAbsElementW16(vector16_arm, kVectorSize)); + WebRtcSpl_MinMaxW16(vector16_arm, kVectorSize, &min_value, &max_value); EXPECT_EQ(WEBRTC_SPL_WORD16_MIN, min_value); EXPECT_EQ(WEBRTC_SPL_WORD16_MAX, max_value); @@ -494,9 +505,9 @@ TEST(SplTest, FilterTest) { kFilterOrder + 1, 1); EXPECT_EQ(0, data_out[kFilterOrder]); - EXPECT_EQ(kVectorSize, WebRtcSpl_FilterAR(A5, 5, data_in, kVectorSize, bState, - kVectorSize, bStateLow, kVectorSize, - data_out, bTmp16Low, kVectorSize)); + EXPECT_EQ(kVectorSize, + WebRtcSpl_FilterAR(A5, 5, data_in, kVectorSize, bState, kVectorSize, + bStateLow, data_out, bTmp16Low)); } TEST(SplTest, RandTest) { diff --git a/common_audio/signal_processing/spl_inl.c b/common_audio/signal_processing/spl_inl.c index d09e308ed3..01897f2fac 100644 --- a/common_audio/signal_processing/spl_inl.c +++ b/common_audio/signal_processing/spl_inl.c @@ -8,10 +8,10 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include - #include "common_audio/signal_processing/include/spl_inl.h" +#include + // Table used by WebRtcSpl_CountLeadingZeros32_NotBuiltin. For each uint32_t n // that's a sequence of 0 bits followed by a sequence of 1 bits, the entry at // index (n * 0x8c0b2891) >> 26 in this table gives the number of zero bits in diff --git a/common_audio/signal_processing/spl_sqrt.c b/common_audio/signal_processing/spl_sqrt.c index cf9448ac97..2ef119d825 100644 --- a/common_audio/signal_processing/spl_sqrt.c +++ b/common_audio/signal_processing/spl_sqrt.c @@ -8,187 +8,181 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the function WebRtcSpl_Sqrt(). * The description header can be found in signal_processing_library.h * */ -#include "rtc_base/checks.h" #include "common_audio/signal_processing/include/signal_processing_library.h" +#include "rtc_base/checks.h" int32_t WebRtcSpl_SqrtLocal(int32_t in); -int32_t WebRtcSpl_SqrtLocal(int32_t in) -{ +int32_t WebRtcSpl_SqrtLocal(int32_t in) { + int16_t x_half, t16; + int32_t A, B, x2; - int16_t x_half, t16; - int32_t A, B, x2; + /* The following block performs: + y=in/2 + x=y-2^30 + x_half=x/2^31 + t = 1 + (x_half) - 0.5*((x_half)^2) + 0.5*((x_half)^3) - 0.625*((x_half)^4) + + 0.875*((x_half)^5) + */ - /* The following block performs: - y=in/2 - x=y-2^30 - x_half=x/2^31 - t = 1 + (x_half) - 0.5*((x_half)^2) + 0.5*((x_half)^3) - 0.625*((x_half)^4) - + 0.875*((x_half)^5) - */ + B = in / 2; - B = in / 2; + B = B - ((int32_t)0x40000000); // B = in/2 - 1/2 + x_half = (int16_t)(B >> 16); // x_half = x/2 = (in-1)/2 + B = B + ((int32_t)0x40000000); // B = 1 + x/2 + B = B + + ((int32_t)0x40000000); // Add 0.5 twice (since 1.0 does not exist in Q31) - B = B - ((int32_t)0x40000000); // B = in/2 - 1/2 - x_half = (int16_t)(B >> 16); // x_half = x/2 = (in-1)/2 - B = B + ((int32_t)0x40000000); // B = 1 + x/2 - B = B + ((int32_t)0x40000000); // Add 0.5 twice (since 1.0 does not exist in Q31) + x2 = ((int32_t)x_half) * ((int32_t)x_half) * 2; // A = (x/2)^2 + A = -x2; // A = -(x/2)^2 + B = B + (A >> 1); // B = 1 + x/2 - 0.5*(x/2)^2 - x2 = ((int32_t)x_half) * ((int32_t)x_half) * 2; // A = (x/2)^2 - A = -x2; // A = -(x/2)^2 - B = B + (A >> 1); // B = 1 + x/2 - 0.5*(x/2)^2 + A >>= 16; + A = A * A * 2; // A = (x/2)^4 + t16 = (int16_t)(A >> 16); + B += -20480 * t16 * 2; // B = B - 0.625*A + // After this, B = 1 + x/2 - 0.5*(x/2)^2 - 0.625*(x/2)^4 - A >>= 16; - A = A * A * 2; // A = (x/2)^4 - t16 = (int16_t)(A >> 16); - B += -20480 * t16 * 2; // B = B - 0.625*A - // After this, B = 1 + x/2 - 0.5*(x/2)^2 - 0.625*(x/2)^4 + A = x_half * t16 * 2; // A = (x/2)^5 + t16 = (int16_t)(A >> 16); + B += 28672 * t16 * 2; // B = B + 0.875*A + // After this, B = 1 + x/2 - 0.5*(x/2)^2 - 0.625*(x/2)^4 + 0.875*(x/2)^5 - A = x_half * t16 * 2; // A = (x/2)^5 - t16 = (int16_t)(A >> 16); - B += 28672 * t16 * 2; // B = B + 0.875*A - // After this, B = 1 + x/2 - 0.5*(x/2)^2 - 0.625*(x/2)^4 + 0.875*(x/2)^5 + t16 = (int16_t)(x2 >> 16); + A = x_half * t16 * 2; // A = x/2^3 - t16 = (int16_t)(x2 >> 16); - A = x_half * t16 * 2; // A = x/2^3 + B = B + (A >> 1); // B = B + 0.5*A + // After this, B = 1 + x/2 - 0.5*(x/2)^2 + 0.5*(x/2)^3 - 0.625*(x/2)^4 + + // 0.875*(x/2)^5 - B = B + (A >> 1); // B = B + 0.5*A - // After this, B = 1 + x/2 - 0.5*(x/2)^2 + 0.5*(x/2)^3 - 0.625*(x/2)^4 + 0.875*(x/2)^5 + B = B + ((int32_t)32768); // Round off bit - B = B + ((int32_t)32768); // Round off bit - - return B; + return B; } -int32_t WebRtcSpl_Sqrt(int32_t value) -{ - /* - Algorithm: - - Six term Taylor Series is used here to compute the square root of a number - y^0.5 = (1+x)^0.5 where x = y-1 - = 1+(x/2)-0.5*((x/2)^2+0.5*((x/2)^3-0.625*((x/2)^4+0.875*((x/2)^5) - 0.5 <= x < 1 - - Example of how the algorithm works, with ut=sqrt(in), and - with in=73632 and ut=271 (even shift value case): - - in=73632 - y= in/131072 - x=y-1 - t = 1 + (x/2) - 0.5*((x/2)^2) + 0.5*((x/2)^3) - 0.625*((x/2)^4) + 0.875*((x/2)^5) - ut=t*(1/sqrt(2))*512 - - or: - - in=73632 - in2=73632*2^14 - y= in2/2^31 - x=y-1 - t = 1 + (x/2) - 0.5*((x/2)^2) + 0.5*((x/2)^3) - 0.625*((x/2)^4) + 0.875*((x/2)^5) - ut=t*(1/sqrt(2)) - ut2=ut*2^9 - - which gives: - - in = 73632 - in2 = 1206386688 - y = 0.56176757812500 - x = -0.43823242187500 - t = 0.74973506527313 - ut = 0.53014274874797 - ut2 = 2.714330873589594e+002 - - or: - - in=73632 - in2=73632*2^14 - y=in2/2 - x=y-2^30 - x_half=x/2^31 - t = 1 + (x_half) - 0.5*((x_half)^2) + 0.5*((x_half)^3) - 0.625*((x_half)^4) - + 0.875*((x_half)^5) - ut=t*(1/sqrt(2)) - ut2=ut*2^9 - - which gives: - - in = 73632 - in2 = 1206386688 - y = 603193344 - x = -470548480 - x_half = -0.21911621093750 - t = 0.74973506527313 - ut = 0.53014274874797 - ut2 = 2.714330873589594e+002 - - */ - - int16_t x_norm, nshift, t16, sh; - int32_t A; - - int16_t k_sqrt_2 = 23170; // 1/sqrt2 (==5a82) - - A = value; - - // The convention in this function is to calculate sqrt(abs(A)). Negate the - // input if it is negative. - if (A < 0) { - if (A == WEBRTC_SPL_WORD32_MIN) { - // This number cannot be held in an int32_t after negating. - // Map it to the maximum positive value. - A = WEBRTC_SPL_WORD32_MAX; - } else { - A = -A; - } - } else if (A == 0) { - return 0; // sqrt(0) = 0 +int32_t WebRtcSpl_Sqrt(int32_t value) { + /* + Algorithm: + + Six term Taylor Series is used here to compute the square root of a number + y^0.5 = (1+x)^0.5 where x = y-1 + = 1+(x/2)-0.5*((x/2)^2+0.5*((x/2)^3-0.625*((x/2)^4+0.875*((x/2)^5) + 0.5 <= x < 1 + + Example of how the algorithm works, with ut=sqrt(in), and + with in=73632 and ut=271 (even shift value case): + + in=73632 + y= in/131072 + x=y-1 + t = 1 + (x/2) - 0.5*((x/2)^2) + 0.5*((x/2)^3) - 0.625*((x/2)^4) + + 0.875*((x/2)^5) ut=t*(1/sqrt(2))*512 + + or: + + in=73632 + in2=73632*2^14 + y= in2/2^31 + x=y-1 + t = 1 + (x/2) - 0.5*((x/2)^2) + 0.5*((x/2)^3) - 0.625*((x/2)^4) + + 0.875*((x/2)^5) ut=t*(1/sqrt(2)) ut2=ut*2^9 + + which gives: + + in = 73632 + in2 = 1206386688 + y = 0.56176757812500 + x = -0.43823242187500 + t = 0.74973506527313 + ut = 0.53014274874797 + ut2 = 2.714330873589594e+002 + + or: + + in=73632 + in2=73632*2^14 + y=in2/2 + x=y-2^30 + x_half=x/2^31 + t = 1 + (x_half) - 0.5*((x_half)^2) + 0.5*((x_half)^3) - 0.625*((x_half)^4) + + 0.875*((x_half)^5) + ut=t*(1/sqrt(2)) + ut2=ut*2^9 + + which gives: + + in = 73632 + in2 = 1206386688 + y = 603193344 + x = -470548480 + x_half = -0.21911621093750 + t = 0.74973506527313 + ut = 0.53014274874797 + ut2 = 2.714330873589594e+002 + + */ + + int16_t x_norm, nshift, t16, sh; + int32_t A; + + int16_t k_sqrt_2 = 23170; // 1/sqrt2 (==5a82) + + A = value; + + // The convention in this function is to calculate sqrt(abs(A)). Negate the + // input if it is negative. + if (A < 0) { + if (A == WEBRTC_SPL_WORD32_MIN) { + // This number cannot be held in an int32_t after negating. + // Map it to the maximum positive value. + A = WEBRTC_SPL_WORD32_MAX; + } else { + A = -A; } + } else if (A == 0) { + return 0; // sqrt(0) = 0 + } - sh = WebRtcSpl_NormW32(A); // # shifts to normalize A - A = WEBRTC_SPL_LSHIFT_W32(A, sh); // Normalize A - if (A < (WEBRTC_SPL_WORD32_MAX - 32767)) - { - A = A + ((int32_t)32768); // Round off bit - } else - { - A = WEBRTC_SPL_WORD32_MAX; - } + sh = WebRtcSpl_NormW32(A); // # shifts to normalize A + A = WEBRTC_SPL_LSHIFT_W32(A, sh); // Normalize A + if (A < (WEBRTC_SPL_WORD32_MAX - 32767)) { + A = A + ((int32_t)32768); // Round off bit + } else { + A = WEBRTC_SPL_WORD32_MAX; + } - x_norm = (int16_t)(A >> 16); // x_norm = AH + x_norm = (int16_t)(A >> 16); // x_norm = AH - nshift = (sh / 2); - RTC_DCHECK_GE(nshift, 0); + nshift = (sh / 2); + RTC_DCHECK_GE(nshift, 0); - A = (int32_t)WEBRTC_SPL_LSHIFT_W32((int32_t)x_norm, 16); - A = WEBRTC_SPL_ABS_W32(A); // A = abs(x_norm<<16) - A = WebRtcSpl_SqrtLocal(A); // A = sqrt(A) + A = (int32_t)WEBRTC_SPL_LSHIFT_W32((int32_t)x_norm, 16); + A = WEBRTC_SPL_ABS_W32(A); // A = abs(x_norm<<16) + A = WebRtcSpl_SqrtLocal(A); // A = sqrt(A) - if (2 * nshift == sh) { - // Even shift value case + if (2 * nshift == sh) { + // Even shift value case - t16 = (int16_t)(A >> 16); // t16 = AH + t16 = (int16_t)(A >> 16); // t16 = AH - A = k_sqrt_2 * t16 * 2; // A = 1/sqrt(2)*t16 - A = A + ((int32_t)32768); // Round off - A = A & ((int32_t)0x7fff0000); // Round off + A = k_sqrt_2 * t16 * 2; // A = 1/sqrt(2)*t16 + A = A + ((int32_t)32768); // Round off + A = A & ((int32_t)0x7fff0000); // Round off - A >>= 15; // A = A>>16 + A >>= 15; // A = A>>16 - } else - { - A >>= 16; // A = A>>16 - } + } else { + A >>= 16; // A = A>>16 + } - A = A & ((int32_t)0x0000ffff); - A >>= nshift; // De-normalize the result. + A = A & ((int32_t)0x0000ffff); + A >>= nshift; // De-normalize the result. - return A; + return A; } diff --git a/common_audio/signal_processing/splitting_filter.c b/common_audio/signal_processing/splitting_filter.c index 27a0a2a8c9..60ce547197 100644 --- a/common_audio/signal_processing/splitting_filter.c +++ b/common_audio/signal_processing/splitting_filter.c @@ -13,13 +13,12 @@ * */ -#include "rtc_base/checks.h" #include "common_audio/signal_processing/include/signal_processing_library.h" +#include "rtc_base/checks.h" // Maximum number of samples in a low/high-band frame. -enum -{ - kMaxBandFrameLength = 320 // 10 ms at 64 kHz. +enum { + kMaxBandFrameLength = 320 // 10 ms at 64 kHz. }; // QMF filter coefficients in Q16. @@ -48,164 +47,171 @@ static void WebRtcSpl_AllPassQMF(int32_t* in_data, size_t data_length, int32_t* out_data, const uint16_t* filter_coefficients, - int32_t* filter_state) -{ - // The procedure is to filter the input with three first order all pass - // filters (cascade operations). - // - // a_3 + q^-1 a_2 + q^-1 a_1 + q^-1 - // y[n] = ----------- ----------- ----------- x[n] - // 1 + a_3q^-1 1 + a_2q^-1 1 + a_1q^-1 - // - // The input vector `filter_coefficients` includes these three filter - // coefficients. The filter state contains the in_data state, in_data[-1], - // followed by the out_data state, out_data[-1]. This is repeated for each - // cascade. The first cascade filter will filter the `in_data` and store - // the output in `out_data`. The second will the take the `out_data` as - // input and make an intermediate storage in `in_data`, to save memory. The - // third, and final, cascade filter operation takes the `in_data` (which is - // the output from the previous cascade filter) and store the output in - // `out_data`. Note that the input vector values are changed during the - // process. - size_t k; - int32_t diff; - // First all-pass cascade; filter from in_data to out_data. - - // Let y_i[n] indicate the output of cascade filter i (with filter - // coefficient a_i) at vector position n. Then the final output will be - // y[n] = y_3[n] - - // First loop, use the states stored in memory. - // "diff" should be safe from wrap around since max values are 2^25 - // diff = (x[0] - y_1[-1]) - diff = WebRtcSpl_SubSatW32(in_data[0], filter_state[1]); - // y_1[0] = x[-1] + a_1 * (x[0] - y_1[-1]) - out_data[0] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[0], diff, filter_state[0]); - - // For the remaining loops, use previous values. - for (k = 1; k < data_length; k++) - { - // diff = (x[n] - y_1[n-1]) - diff = WebRtcSpl_SubSatW32(in_data[k], out_data[k - 1]); - // y_1[n] = x[n-1] + a_1 * (x[n] - y_1[n-1]) - out_data[k] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[0], diff, in_data[k - 1]); - } - - // Update states. - filter_state[0] = in_data[data_length - 1]; // x[N-1], becomes x[-1] next time - filter_state[1] = out_data[data_length - 1]; // y_1[N-1], becomes y_1[-1] next time - - // Second all-pass cascade; filter from out_data to in_data. - // diff = (y_1[0] - y_2[-1]) - diff = WebRtcSpl_SubSatW32(out_data[0], filter_state[3]); + int32_t* filter_state) { + // The procedure is to filter the input with three first order all pass + // filters (cascade operations). + // + // a_3 + q^-1 a_2 + q^-1 a_1 + q^-1 + // y[n] = ----------- ----------- ----------- x[n] + // 1 + a_3q^-1 1 + a_2q^-1 1 + a_1q^-1 + // + // The input vector `filter_coefficients` includes these three filter + // coefficients. The filter state contains the in_data state, in_data[-1], + // followed by the out_data state, out_data[-1]. This is repeated for each + // cascade. The first cascade filter will filter the `in_data` and store + // the output in `out_data`. The second will the take the `out_data` as + // input and make an intermediate storage in `in_data`, to save memory. The + // third, and final, cascade filter operation takes the `in_data` (which is + // the output from the previous cascade filter) and store the output in + // `out_data`. Note that the input vector values are changed during the + // process. + size_t k; + int32_t diff; + // First all-pass cascade; filter from in_data to out_data. + + // Let y_i[n] indicate the output of cascade filter i (with filter + // coefficient a_i) at vector position n. Then the final output will be + // y[n] = y_3[n] + + // First loop, use the states stored in memory. + // "diff" should be safe from wrap around since max values are 2^25 + // diff = (x[0] - y_1[-1]) + diff = WebRtcSpl_SubSatW32(in_data[0], filter_state[1]); + // y_1[0] = x[-1] + a_1 * (x[0] - y_1[-1]) + out_data[0] = + WEBRTC_SPL_SCALEDIFF32(filter_coefficients[0], diff, filter_state[0]); + + // For the remaining loops, use previous values. + for (k = 1; k < data_length; k++) { + // diff = (x[n] - y_1[n-1]) + diff = WebRtcSpl_SubSatW32(in_data[k], out_data[k - 1]); + // y_1[n] = x[n-1] + a_1 * (x[n] - y_1[n-1]) + out_data[k] = + WEBRTC_SPL_SCALEDIFF32(filter_coefficients[0], diff, in_data[k - 1]); + } + + // Update states. + filter_state[0] = + in_data[data_length - 1]; // x[N-1], becomes x[-1] next time + filter_state[1] = + out_data[data_length - 1]; // y_1[N-1], becomes y_1[-1] next time + + // Second all-pass cascade; filter from out_data to in_data. + // diff = (y_1[0] - y_2[-1]) + diff = WebRtcSpl_SubSatW32(out_data[0], filter_state[3]); + // y_2[0] = y_1[-1] + a_2 * (y_1[0] - y_2[-1]) + in_data[0] = + WEBRTC_SPL_SCALEDIFF32(filter_coefficients[1], diff, filter_state[2]); + for (k = 1; k < data_length; k++) { + // diff = (y_1[n] - y_2[n-1]) + diff = WebRtcSpl_SubSatW32(out_data[k], in_data[k - 1]); // y_2[0] = y_1[-1] + a_2 * (y_1[0] - y_2[-1]) - in_data[0] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[1], diff, filter_state[2]); - for (k = 1; k < data_length; k++) - { - // diff = (y_1[n] - y_2[n-1]) - diff = WebRtcSpl_SubSatW32(out_data[k], in_data[k - 1]); - // y_2[0] = y_1[-1] + a_2 * (y_1[0] - y_2[-1]) - in_data[k] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[1], diff, out_data[k-1]); - } - - filter_state[2] = out_data[data_length - 1]; // y_1[N-1], becomes y_1[-1] next time - filter_state[3] = in_data[data_length - 1]; // y_2[N-1], becomes y_2[-1] next time - - // Third all-pass cascade; filter from in_data to out_data. - // diff = (y_2[0] - y[-1]) - diff = WebRtcSpl_SubSatW32(in_data[0], filter_state[5]); - // y[0] = y_2[-1] + a_3 * (y_2[0] - y[-1]) - out_data[0] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[2], diff, filter_state[4]); - for (k = 1; k < data_length; k++) - { - // diff = (y_2[n] - y[n-1]) - diff = WebRtcSpl_SubSatW32(in_data[k], out_data[k - 1]); - // y[n] = y_2[n-1] + a_3 * (y_2[n] - y[n-1]) - out_data[k] = WEBRTC_SPL_SCALEDIFF32(filter_coefficients[2], diff, in_data[k-1]); - } - filter_state[4] = in_data[data_length - 1]; // y_2[N-1], becomes y_2[-1] next time - filter_state[5] = out_data[data_length - 1]; // y[N-1], becomes y[-1] next time + in_data[k] = + WEBRTC_SPL_SCALEDIFF32(filter_coefficients[1], diff, out_data[k - 1]); + } + + filter_state[2] = + out_data[data_length - 1]; // y_1[N-1], becomes y_1[-1] next time + filter_state[3] = + in_data[data_length - 1]; // y_2[N-1], becomes y_2[-1] next time + + // Third all-pass cascade; filter from in_data to out_data. + // diff = (y_2[0] - y[-1]) + diff = WebRtcSpl_SubSatW32(in_data[0], filter_state[5]); + // y[0] = y_2[-1] + a_3 * (y_2[0] - y[-1]) + out_data[0] = + WEBRTC_SPL_SCALEDIFF32(filter_coefficients[2], diff, filter_state[4]); + for (k = 1; k < data_length; k++) { + // diff = (y_2[n] - y[n-1]) + diff = WebRtcSpl_SubSatW32(in_data[k], out_data[k - 1]); + // y[n] = y_2[n-1] + a_3 * (y_2[n] - y[n-1]) + out_data[k] = + WEBRTC_SPL_SCALEDIFF32(filter_coefficients[2], diff, in_data[k - 1]); + } + filter_state[4] = + in_data[data_length - 1]; // y_2[N-1], becomes y_2[-1] next time + filter_state[5] = + out_data[data_length - 1]; // y[N-1], becomes y[-1] next time } -void WebRtcSpl_AnalysisQMF(const int16_t* in_data, size_t in_data_length, - int16_t* low_band, int16_t* high_band, - int32_t* filter_state1, int32_t* filter_state2) -{ - size_t i; - int16_t k; - int32_t tmp; - int32_t half_in1[kMaxBandFrameLength]; - int32_t half_in2[kMaxBandFrameLength]; - int32_t filter1[kMaxBandFrameLength]; - int32_t filter2[kMaxBandFrameLength]; - const size_t band_length = in_data_length / 2; - RTC_DCHECK_EQ(0, in_data_length % 2); - RTC_DCHECK_LE(band_length, kMaxBandFrameLength); - - // Split even and odd samples. Also shift them to Q10. - for (i = 0, k = 0; i < band_length; i++, k += 2) - { - half_in2[i] = ((int32_t)in_data[k]) * (1 << 10); - half_in1[i] = ((int32_t)in_data[k + 1]) * (1 << 10); - } - - // All pass filter even and odd samples, independently. - WebRtcSpl_AllPassQMF(half_in1, band_length, filter1, - WebRtcSpl_kAllPassFilter1, filter_state1); - WebRtcSpl_AllPassQMF(half_in2, band_length, filter2, - WebRtcSpl_kAllPassFilter2, filter_state2); - - // Take the sum and difference of filtered version of odd and even - // branches to get upper & lower band. - for (i = 0; i < band_length; i++) - { - tmp = (filter1[i] + filter2[i] + 1024) >> 11; - low_band[i] = WebRtcSpl_SatW32ToW16(tmp); - - tmp = (filter1[i] - filter2[i] + 1024) >> 11; - high_band[i] = WebRtcSpl_SatW32ToW16(tmp); - } +void WebRtcSpl_AnalysisQMF(const int16_t* in_data, + size_t in_data_length, + int16_t* low_band, + int16_t* high_band, + int32_t* filter_state1, + int32_t* filter_state2) { + size_t i; + int16_t k; + int32_t tmp; + int32_t half_in1[kMaxBandFrameLength]; + int32_t half_in2[kMaxBandFrameLength]; + int32_t filter1[kMaxBandFrameLength]; + int32_t filter2[kMaxBandFrameLength]; + const size_t band_length = in_data_length / 2; + RTC_DCHECK_EQ(0, in_data_length % 2); + RTC_DCHECK_LE(band_length, kMaxBandFrameLength); + + // Split even and odd samples. Also shift them to Q10. + for (i = 0, k = 0; i < band_length; i++, k += 2) { + half_in2[i] = ((int32_t)in_data[k]) * (1 << 10); + half_in1[i] = ((int32_t)in_data[k + 1]) * (1 << 10); + } + + // All pass filter even and odd samples, independently. + WebRtcSpl_AllPassQMF(half_in1, band_length, filter1, + WebRtcSpl_kAllPassFilter1, filter_state1); + WebRtcSpl_AllPassQMF(half_in2, band_length, filter2, + WebRtcSpl_kAllPassFilter2, filter_state2); + + // Take the sum and difference of filtered version of odd and even + // branches to get upper & lower band. + for (i = 0; i < band_length; i++) { + tmp = (filter1[i] + filter2[i] + 1024) >> 11; + low_band[i] = WebRtcSpl_SatW32ToW16(tmp); + + tmp = (filter1[i] - filter2[i] + 1024) >> 11; + high_band[i] = WebRtcSpl_SatW32ToW16(tmp); + } } -void WebRtcSpl_SynthesisQMF(const int16_t* low_band, const int16_t* high_band, - size_t band_length, int16_t* out_data, - int32_t* filter_state1, int32_t* filter_state2) -{ - int32_t tmp; - int32_t half_in1[kMaxBandFrameLength]; - int32_t half_in2[kMaxBandFrameLength]; - int32_t filter1[kMaxBandFrameLength]; - int32_t filter2[kMaxBandFrameLength]; - size_t i; - int16_t k; - RTC_DCHECK_LE(band_length, kMaxBandFrameLength); - - // Obtain the sum and difference channels out of upper and lower-band channels. - // Also shift to Q10 domain. - for (i = 0; i < band_length; i++) - { - tmp = (int32_t)low_band[i] + (int32_t)high_band[i]; - half_in1[i] = tmp * (1 << 10); - tmp = (int32_t)low_band[i] - (int32_t)high_band[i]; - half_in2[i] = tmp * (1 << 10); - } - - // all-pass filter the sum and difference channels - WebRtcSpl_AllPassQMF(half_in1, band_length, filter1, - WebRtcSpl_kAllPassFilter2, filter_state1); - WebRtcSpl_AllPassQMF(half_in2, band_length, filter2, - WebRtcSpl_kAllPassFilter1, filter_state2); - - // The filtered signals are even and odd samples of the output. Combine - // them. The signals are Q10 should shift them back to Q0 and take care of - // saturation. - for (i = 0, k = 0; i < band_length; i++) - { - tmp = (filter2[i] + 512) >> 10; - out_data[k++] = WebRtcSpl_SatW32ToW16(tmp); - - tmp = (filter1[i] + 512) >> 10; - out_data[k++] = WebRtcSpl_SatW32ToW16(tmp); - } - +void WebRtcSpl_SynthesisQMF(const int16_t* low_band, + const int16_t* high_band, + size_t band_length, + int16_t* out_data, + int32_t* filter_state1, + int32_t* filter_state2) { + int32_t tmp; + int32_t half_in1[kMaxBandFrameLength]; + int32_t half_in2[kMaxBandFrameLength]; + int32_t filter1[kMaxBandFrameLength]; + int32_t filter2[kMaxBandFrameLength]; + size_t i; + int16_t k; + RTC_DCHECK_LE(band_length, kMaxBandFrameLength); + + // Obtain the sum and difference channels out of upper and lower-band + // channels. Also shift to Q10 domain. + for (i = 0; i < band_length; i++) { + tmp = (int32_t)low_band[i] + (int32_t)high_band[i]; + half_in1[i] = tmp * (1 << 10); + tmp = (int32_t)low_band[i] - (int32_t)high_band[i]; + half_in2[i] = tmp * (1 << 10); + } + + // all-pass filter the sum and difference channels + WebRtcSpl_AllPassQMF(half_in1, band_length, filter1, + WebRtcSpl_kAllPassFilter2, filter_state1); + WebRtcSpl_AllPassQMF(half_in2, band_length, filter2, + WebRtcSpl_kAllPassFilter1, filter_state2); + + // The filtered signals are even and odd samples of the output. Combine + // them. The signals are Q10 should shift them back to Q0 and take care of + // saturation. + for (i = 0, k = 0; i < band_length; i++) { + tmp = (filter2[i] + 512) >> 10; + out_data[k++] = WebRtcSpl_SatW32ToW16(tmp); + + tmp = (filter1[i] + 512) >> 10; + out_data[k++] = WebRtcSpl_SatW32ToW16(tmp); + } } diff --git a/common_audio/signal_processing/sqrt_of_one_minus_x_squared.c b/common_audio/signal_processing/sqrt_of_one_minus_x_squared.c index a77fd4063f..07e845a5e5 100644 --- a/common_audio/signal_processing/sqrt_of_one_minus_x_squared.c +++ b/common_audio/signal_processing/sqrt_of_one_minus_x_squared.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains the function WebRtcSpl_SqrtOfOneMinusXSquared(). * The description header can be found in signal_processing_library.h @@ -17,19 +16,19 @@ #include "common_audio/signal_processing/include/signal_processing_library.h" -void WebRtcSpl_SqrtOfOneMinusXSquared(int16_t *xQ15, size_t vector_length, - int16_t *yQ15) -{ - int32_t sq; - size_t m; - int16_t tmp; +void WebRtcSpl_SqrtOfOneMinusXSquared(int16_t* xQ15, + size_t vector_length, + int16_t* yQ15) { + int32_t sq; + size_t m; + int16_t tmp; - for (m = 0; m < vector_length; m++) - { - tmp = xQ15[m]; - sq = tmp * tmp; // x^2 in Q30 - sq = 1073741823 - sq; // 1-x^2, where 1 ~= 0.99999999906 is 1073741823 in Q30 - sq = WebRtcSpl_Sqrt(sq); // sqrt(1-x^2) in Q15 - yQ15[m] = (int16_t)sq; - } + for (m = 0; m < vector_length; m++) { + tmp = xQ15[m]; + sq = tmp * tmp; // x^2 in Q30 + sq = 1073741823 - + sq; // 1-x^2, where 1 ~= 0.99999999906 is 1073741823 in Q30 + sq = WebRtcSpl_Sqrt(sq); // sqrt(1-x^2) in Q15 + yQ15[m] = (int16_t)sq; + } } diff --git a/common_audio/signal_processing/vector_operations.c b/common_audio/signal_processing/vector_operations.c new file mode 100644 index 0000000000..880605c3cd --- /dev/null +++ b/common_audio/signal_processing/vector_operations.c @@ -0,0 +1,79 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "common_audio/signal_processing/include/signal_processing_library.h" + +void WebRtcSpl_ReverseOrderMultArrayElements(int16_t* out, + const int16_t* in, + const int16_t* win, + size_t vector_length, + int16_t right_shifts) { + size_t i; + int16_t* outptr = out; + const int16_t* inptr = in; + const int16_t* winptr = win; + for (i = 0; i < vector_length; i++) { + *outptr++ = (int16_t)((*inptr++ * *winptr--) >> right_shifts); + } +} + +void WebRtcSpl_ElementwiseVectorMult(int16_t* out, + const int16_t* in, + const int16_t* win, + size_t vector_length, + int16_t right_shifts) { + size_t i; + int16_t* outptr = out; + const int16_t* inptr = in; + const int16_t* winptr = win; + for (i = 0; i < vector_length; i++) { + *outptr++ = (int16_t)((*inptr++ * *winptr++) >> right_shifts); + } +} + +void WebRtcSpl_AddVectorsAndShift(int16_t* out, + const int16_t* in1, + const int16_t* in2, + size_t vector_length, + int16_t right_shifts) { + size_t i; + int16_t* outptr = out; + const int16_t* in1ptr = in1; + const int16_t* in2ptr = in2; + for (i = vector_length; i > 0; i--) { + (*outptr++) = (int16_t)(((*in1ptr++) + (*in2ptr++)) >> right_shifts); + } +} + +void WebRtcSpl_AddAffineVectorToVector(int16_t* out, + const int16_t* in, + int16_t gain, + int32_t add_constant, + int16_t right_shifts, + size_t vector_length) { + size_t i; + + for (i = 0; i < vector_length; i++) { + out[i] += (int16_t)((in[i] * gain + add_constant) >> right_shifts); + } +} + +void WebRtcSpl_AffineTransformVector(int16_t* out, + const int16_t* in, + int16_t gain, + int32_t add_constant, + int16_t right_shifts, + size_t vector_length) { + size_t i; + + for (i = 0; i < vector_length; i++) { + out[i] = (int16_t)((in[i] * gain + add_constant) >> right_shifts); + } +} diff --git a/common_audio/signal_processing/vector_scaling_operations.c b/common_audio/signal_processing/vector_scaling_operations.c index 7307dc78ff..a280ebd327 100644 --- a/common_audio/signal_processing/vector_scaling_operations.c +++ b/common_audio/signal_processing/vector_scaling_operations.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains implementations of the functions * WebRtcSpl_VectorBitShiftW16() @@ -22,50 +21,44 @@ #include "common_audio/signal_processing/include/signal_processing_library.h" -void WebRtcSpl_VectorBitShiftW16(int16_t *res, size_t length, - const int16_t *in, int16_t right_shifts) -{ - size_t i; - - if (right_shifts > 0) - { - for (i = length; i > 0; i--) - { - (*res++) = ((*in++) >> right_shifts); - } - } else - { - for (i = length; i > 0; i--) - { - (*res++) = ((*in++) * (1 << (-right_shifts))); - } +void WebRtcSpl_VectorBitShiftW16(int16_t* res, + size_t length, + const int16_t* in, + int16_t right_shifts) { + size_t i; + + if (right_shifts > 0) { + for (i = length; i > 0; i--) { + (*res++) = ((*in++) >> right_shifts); + } + } else { + for (i = length; i > 0; i--) { + (*res++) = ((*in++) * (1 << (-right_shifts))); } + } } -void WebRtcSpl_VectorBitShiftW32(int32_t *out_vector, +void WebRtcSpl_VectorBitShiftW32(int32_t* out_vector, size_t vector_length, - const int32_t *in_vector, - int16_t right_shifts) -{ - size_t i; - - if (right_shifts > 0) - { - for (i = vector_length; i > 0; i--) - { - (*out_vector++) = ((*in_vector++) >> right_shifts); - } - } else - { - for (i = vector_length; i > 0; i--) - { - (*out_vector++) = ((*in_vector++) << (-right_shifts)); - } + const int32_t* in_vector, + int16_t right_shifts) { + size_t i; + + if (right_shifts > 0) { + for (i = vector_length; i > 0; i--) { + (*out_vector++) = ((*in_vector++) >> right_shifts); } + } else { + for (i = vector_length; i > 0; i--) { + (*out_vector++) = ((*in_vector++) << (-right_shifts)); + } + } } -void WebRtcSpl_VectorBitShiftW32ToW16(int16_t* out, size_t length, - const int32_t* in, int right_shifts) { +void WebRtcSpl_VectorBitShiftW32ToW16(int16_t* out, + size_t length, + const int32_t* in, + int right_shifts) { size_t i; int32_t tmp_w32; @@ -83,60 +76,64 @@ void WebRtcSpl_VectorBitShiftW32ToW16(int16_t* out, size_t length, } } -void WebRtcSpl_ScaleVector(const int16_t *in_vector, int16_t *out_vector, - int16_t gain, size_t in_vector_length, - int16_t right_shifts) -{ - // Performs vector operation: out_vector = (gain*in_vector)>>right_shifts - size_t i; - const int16_t *inptr; - int16_t *outptr; - - inptr = in_vector; - outptr = out_vector; - - for (i = 0; i < in_vector_length; i++) - { - *outptr++ = (int16_t)((*inptr++ * gain) >> right_shifts); - } +void WebRtcSpl_ScaleVector(const int16_t* in_vector, + int16_t* out_vector, + int16_t gain, + size_t in_vector_length, + int16_t right_shifts) { + // Performs vector operation: out_vector = (gain*in_vector)>>right_shifts + size_t i; + const int16_t* inptr; + int16_t* outptr; + + inptr = in_vector; + outptr = out_vector; + + for (i = 0; i < in_vector_length; i++) { + *outptr++ = (int16_t)((*inptr++ * gain) >> right_shifts); + } } -void WebRtcSpl_ScaleVectorWithSat(const int16_t *in_vector, int16_t *out_vector, - int16_t gain, size_t in_vector_length, - int16_t right_shifts) -{ - // Performs vector operation: out_vector = (gain*in_vector)>>right_shifts - size_t i; - const int16_t *inptr; - int16_t *outptr; +void WebRtcSpl_ScaleVectorWithSat(const int16_t* in_vector, + int16_t* out_vector, + int16_t gain, + size_t in_vector_length, + int16_t right_shifts) { + // Performs vector operation: out_vector = (gain*in_vector)>>right_shifts + size_t i; + const int16_t* inptr; + int16_t* outptr; - inptr = in_vector; - outptr = out_vector; + inptr = in_vector; + outptr = out_vector; - for (i = 0; i < in_vector_length; i++) { - *outptr++ = WebRtcSpl_SatW32ToW16((*inptr++ * gain) >> right_shifts); - } + for (i = 0; i < in_vector_length; i++) { + *outptr++ = WebRtcSpl_SatW32ToW16((*inptr++ * gain) >> right_shifts); + } } -void WebRtcSpl_ScaleAndAddVectors(const int16_t *in1, int16_t gain1, int shift1, - const int16_t *in2, int16_t gain2, int shift2, - int16_t *out, size_t vector_length) -{ - // Performs vector operation: out = (gain1*in1)>>shift1 + (gain2*in2)>>shift2 - size_t i; - const int16_t *in1ptr; - const int16_t *in2ptr; - int16_t *outptr; - - in1ptr = in1; - in2ptr = in2; - outptr = out; - - for (i = 0; i < vector_length; i++) - { - *outptr++ = (int16_t)((gain1 * *in1ptr++) >> shift1) + - (int16_t)((gain2 * *in2ptr++) >> shift2); - } +void WebRtcSpl_ScaleAndAddVectors(const int16_t* in1, + int16_t gain1, + int shift1, + const int16_t* in2, + int16_t gain2, + int shift2, + int16_t* out, + size_t vector_length) { + // Performs vector operation: out = (gain1*in1)>>shift1 + (gain2*in2)>>shift2 + size_t i; + const int16_t* in1ptr; + const int16_t* in2ptr; + int16_t* outptr; + + in1ptr = in1; + in2ptr = in2; + outptr = out; + + for (i = 0; i < vector_length; i++) { + *outptr++ = (int16_t)((gain1 * *in1ptr++) >> shift1) + + (int16_t)((gain2 * *in2ptr++) >> shift2); + } } // C version of WebRtcSpl_ScaleAndAddVectorsWithRound() for generic platforms. @@ -156,9 +153,10 @@ int WebRtcSpl_ScaleAndAddVectorsWithRoundC(const int16_t* in_vector1, } for (i = 0; i < length; i++) { - out_vector[i] = (int16_t)(( - in_vector1[i] * in_vector1_scale + in_vector2[i] * in_vector2_scale + - round_value) >> right_shifts); + out_vector[i] = + (int16_t)((in_vector1[i] * in_vector1_scale + + in_vector2[i] * in_vector2_scale + round_value) >> + right_shifts); } return 0; diff --git a/common_audio/signal_processing/vector_scaling_operations_mips.c b/common_audio/signal_processing/vector_scaling_operations_mips.c index ba2d26d422..0f2e823bcf 100644 --- a/common_audio/signal_processing/vector_scaling_operations_mips.c +++ b/common_audio/signal_processing/vector_scaling_operations_mips.c @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - /* * This file contains implementations of the functions * WebRtcSpl_ScaleAndAddVectorsWithRound_mips() @@ -24,9 +23,9 @@ int WebRtcSpl_ScaleAndAddVectorsWithRound_mips(const int16_t* in_vector1, int16_t* out_vector, size_t length) { int16_t r0 = 0, r1 = 0; - int16_t *in1 = (int16_t*)in_vector1; - int16_t *in2 = (int16_t*)in_vector2; - int16_t *out = out_vector; + int16_t* in1 = (int16_t*)in_vector1; + int16_t* in2 = (int16_t*)in_vector2; + int16_t* out = out_vector; size_t i = 0; int value32 = 0; @@ -35,23 +34,31 @@ int WebRtcSpl_ScaleAndAddVectorsWithRound_mips(const int16_t* in_vector1, return -1; } for (i = 0; i < length; i++) { - __asm __volatile ( - "lh %[r0], 0(%[in1]) \n\t" - "lh %[r1], 0(%[in2]) \n\t" - "mult %[r0], %[in_vector1_scale] \n\t" - "madd %[r1], %[in_vector2_scale] \n\t" - "extrv_r.w %[value32], $ac0, %[right_shifts] \n\t" - "addiu %[in1], %[in1], 2 \n\t" - "addiu %[in2], %[in2], 2 \n\t" - "sh %[value32], 0(%[out]) \n\t" - "addiu %[out], %[out], 2 \n\t" - : [value32] "=&r" (value32), [out] "+r" (out), [in1] "+r" (in1), - [in2] "+r" (in2), [r0] "=&r" (r0), [r1] "=&r" (r1) - : [in_vector1_scale] "r" (in_vector1_scale), - [in_vector2_scale] "r" (in_vector2_scale), - [right_shifts] "r" (right_shifts) - : "hi", "lo", "memory" - ); + __asm __volatile( + "lh %[r0], 0(%[in1]) " + "\n\t" + "lh %[r1], 0(%[in2]) " + "\n\t" + "mult %[r0], %[in_vector1_scale] " + "\n\t" + "madd %[r1], %[in_vector2_scale] " + "\n\t" + "extrv_r.w %[value32], $ac0, %[right_shifts] " + "\n\t" + "addiu %[in1], %[in1], 2 " + "\n\t" + "addiu %[in2], %[in2], 2 " + "\n\t" + "sh %[value32], 0(%[out]) " + "\n\t" + "addiu %[out], %[out], 2 " + "\n\t" + : [value32] "=&r"(value32), [out] "+r"(out), [in1] "+r"(in1), + [in2] "+r"(in2), [r0] "=&r"(r0), [r1] "=&r"(r1) + : [in_vector1_scale] "r"(in_vector1_scale), + [in_vector2_scale] "r"(in_vector2_scale), + [right_shifts] "r"(right_shifts) + : "hi", "lo", "memory"); } return 0; } diff --git a/common_audio/smoothing_filter.cc b/common_audio/smoothing_filter.cc index eaaf3a0033..624182f3f4 100644 --- a/common_audio/smoothing_filter.cc +++ b/common_audio/smoothing_filter.cc @@ -40,7 +40,7 @@ SmoothingFilterImpl::SmoothingFilterImpl(int init_time_ms) SmoothingFilterImpl::~SmoothingFilterImpl() = default; void SmoothingFilterImpl::AddSample(float sample) { - const int64_t now_ms = rtc::TimeMillis(); + const int64_t now_ms = TimeMillis(); if (!init_end_time_ms_) { // This is equivalent to assuming the filter has been receiving the same @@ -55,12 +55,12 @@ void SmoothingFilterImpl::AddSample(float sample) { last_sample_ = sample; } -absl::optional SmoothingFilterImpl::GetAverage() { +std::optional SmoothingFilterImpl::GetAverage() { if (!init_end_time_ms_) { // `init_end_time_ms_` undefined since we have not received any sample. - return absl::nullopt; + return std::nullopt; } - ExtrapolateLastSample(rtc::TimeMillis()); + ExtrapolateLastSample(TimeMillis()); return state_; } diff --git a/common_audio/smoothing_filter.h b/common_audio/smoothing_filter.h index 3419de7db3..488cc97a09 100644 --- a/common_audio/smoothing_filter.h +++ b/common_audio/smoothing_filter.h @@ -13,7 +13,7 @@ #include -#include "absl/types/optional.h" +#include namespace webrtc { @@ -21,7 +21,7 @@ class SmoothingFilter { public: virtual ~SmoothingFilter() = default; virtual void AddSample(float sample) = 0; - virtual absl::optional GetAverage() = 0; + virtual std::optional GetAverage() = 0; virtual bool SetTimeConstantMs(int time_constant_ms) = 0; }; @@ -49,7 +49,7 @@ class SmoothingFilterImpl final : public SmoothingFilter { ~SmoothingFilterImpl() override; void AddSample(float sample) override; - absl::optional GetAverage() override; + std::optional GetAverage() override; bool SetTimeConstantMs(int time_constant_ms) override; // Methods used for unittests. @@ -63,7 +63,7 @@ class SmoothingFilterImpl final : public SmoothingFilter { const float init_factor_; const float init_const_; - absl::optional init_end_time_ms_; + std::optional init_end_time_ms_; float last_sample_; float alpha_; float state_; diff --git a/common_audio/smoothing_filter_unittest.cc b/common_audio/smoothing_filter_unittest.cc index 47f6c717ec..01bfa56fee 100644 --- a/common_audio/smoothing_filter_unittest.cc +++ b/common_audio/smoothing_filter_unittest.cc @@ -28,7 +28,7 @@ struct SmoothingFilterStates { : smoothing_filter(init_time_ms) { fake_clock.AdvanceTime(TimeDelta::Millis(kClockInitialTime)); } - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; SmoothingFilterImpl smoothing_filter; }; diff --git a/common_audio/third_party/.clang-format b/common_audio/third_party/.clang-format new file mode 100644 index 0000000000..e3845288a2 --- /dev/null +++ b/common_audio/third_party/.clang-format @@ -0,0 +1 @@ +DisableFormat: true diff --git a/common_audio/third_party/ooura/README.chromium b/common_audio/third_party/ooura/README.chromium index 459df13042..f414ac28d7 100644 --- a/common_audio/third_party/ooura/README.chromium +++ b/common_audio/third_party/ooura/README.chromium @@ -1,9 +1,9 @@ Name: General Purpose FFT (Fast Fourier/Cosine/Sine Transform) Package Short Name: fft4g URL: http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html -Version: 0 +Version: N/A Date: 2018-06-19 -License: Custome license +License: LicenseRef-takuya-ooura License File: LICENSE Security Critical: yes Shipped: yes diff --git a/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc b/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc index 693312012b..b7a529d1b1 100644 --- a/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc +++ b/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc @@ -313,7 +313,7 @@ static void rftbsub_128_C(float* a) { } // namespace -OouraFft::OouraFft(bool sse2_available) { +OouraFft::OouraFft([[maybe_unused]] bool sse2_available) { #if defined(WEBRTC_ARCH_X86_FAMILY) use_sse2_ = sse2_available; #else diff --git a/common_audio/third_party/spl_sqrt_floor/README.chromium b/common_audio/third_party/spl_sqrt_floor/README.chromium index b2c4309bd6..da3f8d3ff8 100644 --- a/common_audio/third_party/spl_sqrt_floor/README.chromium +++ b/common_audio/third_party/spl_sqrt_floor/README.chromium @@ -1,9 +1,9 @@ -Name: sql sqrt floor -Short Name: sql_sqrt_floor +Name: spl sqrt floor +Short Name: spl_sqrt_floor URL: http://www.pertinentdetail.org/sqrt -Version: 0 +Version: N/A Date: 2018-03-22 -License: Custom license +License: SPL-SQRT-FLOOR License File: LICENSE Security Critical: yes Shipped: yes diff --git a/common_audio/vad/vad_core.c b/common_audio/vad/vad_core.c index 0872449a7c..9b40f42108 100644 --- a/common_audio/vad/vad_core.c +++ b/common_audio/vad/vad_core.c @@ -10,48 +10,48 @@ #include "common_audio/vad/vad_core.h" -#include "rtc_base/sanitizer.h" #include "common_audio/signal_processing/include/signal_processing_library.h" #include "common_audio/vad/vad_filterbank.h" #include "common_audio/vad/vad_gmm.h" #include "common_audio/vad/vad_sp.h" +#include "rtc_base/sanitizer.h" // Spectrum Weighting -static const int16_t kSpectrumWeight[kNumChannels] = { 6, 8, 10, 12, 14, 16 }; -static const int16_t kNoiseUpdateConst = 655; // Q15 -static const int16_t kSpeechUpdateConst = 6554; // Q15 -static const int16_t kBackEta = 154; // Q8 +static const int16_t kSpectrumWeight[kNumChannels] = {6, 8, 10, 12, 14, 16}; +static const int16_t kNoiseUpdateConst = 655; // Q15 +static const int16_t kSpeechUpdateConst = 6554; // Q15 +static const int16_t kBackEta = 154; // Q8 // Minimum difference between the two models, Q5 -static const int16_t kMinimumDifference[kNumChannels] = { - 544, 544, 576, 576, 576, 576 }; +static const int16_t kMinimumDifference[kNumChannels] = {544, 544, 576, + 576, 576, 576}; // Upper limit of mean value for speech model, Q7 -static const int16_t kMaximumSpeech[kNumChannels] = { - 11392, 11392, 11520, 11520, 11520, 11520 }; +static const int16_t kMaximumSpeech[kNumChannels] = {11392, 11392, 11520, + 11520, 11520, 11520}; // Minimum value for mean value -static const int16_t kMinimumMean[kNumGaussians] = { 640, 768 }; +static const int16_t kMinimumMean[kNumGaussians] = {640, 768}; // Upper limit of mean value for noise model, Q7 -static const int16_t kMaximumNoise[kNumChannels] = { - 9216, 9088, 8960, 8832, 8704, 8576 }; +static const int16_t kMaximumNoise[kNumChannels] = {9216, 9088, 8960, + 8832, 8704, 8576}; // Start values for the Gaussian models, Q7 // Weights for the two Gaussians for the six channels (noise) -static const int16_t kNoiseDataWeights[kTableSize] = { - 34, 62, 72, 66, 53, 25, 94, 66, 56, 62, 75, 103 }; +static const int16_t kNoiseDataWeights[kTableSize] = {34, 62, 72, 66, 53, 25, + 94, 66, 56, 62, 75, 103}; // Weights for the two Gaussians for the six channels (speech) -static const int16_t kSpeechDataWeights[kTableSize] = { - 48, 82, 45, 87, 50, 47, 80, 46, 83, 41, 78, 81 }; +static const int16_t kSpeechDataWeights[kTableSize] = {48, 82, 45, 87, 50, 47, + 80, 46, 83, 41, 78, 81}; // Means for the two Gaussians for the six channels (noise) static const int16_t kNoiseDataMeans[kTableSize] = { - 6738, 4892, 7065, 6715, 6771, 3369, 7646, 3863, 7820, 7266, 5020, 4362 }; + 6738, 4892, 7065, 6715, 6771, 3369, 7646, 3863, 7820, 7266, 5020, 4362}; // Means for the two Gaussians for the six channels (speech) -static const int16_t kSpeechDataMeans[kTableSize] = { - 8306, 10085, 10078, 11823, 11843, 6309, 9473, 9571, 10879, 7581, 8180, 7483 -}; +static const int16_t kSpeechDataMeans[kTableSize] = {8306, 10085, 10078, 11823, + 11843, 6309, 9473, 9571, + 10879, 7581, 8180, 7483}; // Stds for the two Gaussians for the six channels (noise) static const int16_t kNoiseDataStds[kTableSize] = { - 378, 1064, 493, 582, 688, 593, 474, 697, 475, 688, 421, 455 }; + 378, 1064, 493, 582, 688, 593, 474, 697, 475, 688, 421, 455}; // Stds for the two Gaussians for the six channels (speech) static const int16_t kSpeechDataStds[kTableSize] = { - 555, 505, 567, 524, 585, 1231, 509, 828, 492, 1540, 1079, 850 }; + 555, 505, 567, 524, 585, 1231, 509, 828, 492, 1540, 1079, 850}; // Constants used in GmmProbability(). // @@ -70,25 +70,25 @@ static const int kInitCheck = 42; // Thresholds for different frame lengths (10 ms, 20 ms and 30 ms). // // Mode 0, Quality. -static const int16_t kOverHangMax1Q[3] = { 8, 4, 3 }; -static const int16_t kOverHangMax2Q[3] = { 14, 7, 5 }; -static const int16_t kLocalThresholdQ[3] = { 24, 21, 24 }; -static const int16_t kGlobalThresholdQ[3] = { 57, 48, 57 }; +static const int16_t kOverHangMax1Q[3] = {8, 4, 3}; +static const int16_t kOverHangMax2Q[3] = {14, 7, 5}; +static const int16_t kLocalThresholdQ[3] = {24, 21, 24}; +static const int16_t kGlobalThresholdQ[3] = {57, 48, 57}; // Mode 1, Low bitrate. -static const int16_t kOverHangMax1LBR[3] = { 8, 4, 3 }; -static const int16_t kOverHangMax2LBR[3] = { 14, 7, 5 }; -static const int16_t kLocalThresholdLBR[3] = { 37, 32, 37 }; -static const int16_t kGlobalThresholdLBR[3] = { 100, 80, 100 }; +static const int16_t kOverHangMax1LBR[3] = {8, 4, 3}; +static const int16_t kOverHangMax2LBR[3] = {14, 7, 5}; +static const int16_t kLocalThresholdLBR[3] = {37, 32, 37}; +static const int16_t kGlobalThresholdLBR[3] = {100, 80, 100}; // Mode 2, Aggressive. -static const int16_t kOverHangMax1AGG[3] = { 6, 3, 2 }; -static const int16_t kOverHangMax2AGG[3] = { 9, 5, 3 }; -static const int16_t kLocalThresholdAGG[3] = { 82, 78, 82 }; -static const int16_t kGlobalThresholdAGG[3] = { 285, 260, 285 }; +static const int16_t kOverHangMax1AGG[3] = {6, 3, 2}; +static const int16_t kOverHangMax2AGG[3] = {9, 5, 3}; +static const int16_t kLocalThresholdAGG[3] = {82, 78, 82}; +static const int16_t kGlobalThresholdAGG[3] = {285, 260, 285}; // Mode 3, Very aggressive. -static const int16_t kOverHangMax1VAG[3] = { 6, 3, 2 }; -static const int16_t kOverHangMax2VAG[3] = { 9, 5, 3 }; -static const int16_t kLocalThresholdVAG[3] = { 94, 94, 94 }; -static const int16_t kGlobalThresholdVAG[3] = { 1100, 1050, 1100 }; +static const int16_t kOverHangMax1VAG[3] = {6, 3, 2}; +static const int16_t kOverHangMax2VAG[3] = {9, 5, 3}; +static const int16_t kLocalThresholdVAG[3] = {94, 94, 94}; +static const int16_t kGlobalThresholdVAG[3] = {1100, 1050, 1100}; // Calculates the weighted average w.r.t. number of Gaussians. The `data` are // updated with an `offset` before averaging. @@ -98,7 +98,8 @@ static const int16_t kGlobalThresholdVAG[3] = { 1100, 1050, 1100 }; // - weights [i] : Weights used for averaging. // // returns : The weighted average. -static int32_t WeightedAverage(int16_t* data, int16_t offset, +static int32_t WeightedAverage(int16_t* data, + int16_t offset, const int16_t* weights) { int k; int32_t weighted_average = 0; @@ -130,8 +131,10 @@ static inline int32_t RTC_NO_SANITIZE("signed-integer-overflow") // - frame_length [i] : Number of input samples // // - returns : the VAD decision (0 - noise, 1 - speech). -static int16_t GmmProbability(VadInstT* self, int16_t* features, - int16_t total_power, size_t frame_length) { +static int16_t GmmProbability(VadInstT* self, + int16_t* features, + int16_t total_power, + size_t frame_length) { int channel, k; int16_t feature_minimum; int16_t h0, h1; @@ -145,8 +148,8 @@ static int16_t GmmProbability(VadInstT* self, int16_t* features, int16_t delt, ndelt; int16_t maxspe, maxmu; int16_t deltaN[kTableSize], deltaS[kTableSize]; - int16_t ngprvec[kTableSize] = { 0 }; // Conditional probability = 0. - int16_t sgprvec[kTableSize] = { 0 }; // Conditional probability = 0. + int16_t ngprvec[kTableSize] = {0}; // Conditional probability = 0. + int16_t sgprvec[kTableSize] = {0}; // Conditional probability = 0. int32_t h0_test, h1_test; int32_t tmp1_s32, tmp2_s32; int32_t sum_log_likelihood_ratios = 0; @@ -194,19 +197,17 @@ static int16_t GmmProbability(VadInstT* self, int16_t* features, gaussian = channel + k * kNumChannels; // Probability under H0, that is, probability of frame being noise. // Value given in Q27 = Q7 * Q20. - tmp1_s32 = WebRtcVad_GaussianProbability(features[channel], - self->noise_means[gaussian], - self->noise_stds[gaussian], - &deltaN[gaussian]); + tmp1_s32 = WebRtcVad_GaussianProbability( + features[channel], self->noise_means[gaussian], + self->noise_stds[gaussian], &deltaN[gaussian]); noise_probability[k] = kNoiseDataWeights[gaussian] * tmp1_s32; h0_test += noise_probability[k]; // Q27 // Probability under H1, that is, probability of frame being speech. // Value given in Q27 = Q7 * Q20. - tmp1_s32 = WebRtcVad_GaussianProbability(features[channel], - self->speech_means[gaussian], - self->speech_stds[gaussian], - &deltaS[gaussian]); + tmp1_s32 = WebRtcVad_GaussianProbability( + features[channel], self->speech_means[gaussian], + self->speech_stds[gaussian], &deltaS[gaussian]); speech_probability[k] = kSpeechDataWeights[gaussian] * tmp1_s32; h1_test += speech_probability[k]; // Q27 } @@ -237,7 +238,7 @@ static int16_t GmmProbability(VadInstT* self, int16_t* features, // Update `sum_log_likelihood_ratios` with spectrum weighting. This is // used for the global VAD decision. sum_log_likelihood_ratios += - (int32_t) (log_likelihood_ratio * kSpectrumWeight[channel]); + (int32_t)(log_likelihood_ratio * kSpectrumWeight[channel]); // Local VAD decision. if ((log_likelihood_ratio * 4) > individualTest) { @@ -247,12 +248,12 @@ static int16_t GmmProbability(VadInstT* self, int16_t* features, // TODO(bjornv): The conditional probabilities below are applied on the // hard coded number of Gaussians set to two. Find a way to generalize. // Calculate local noise probabilities used later when updating the GMM. - h0 = (int16_t) (h0_test >> 12); // Q15 + h0 = (int16_t)(h0_test >> 12); // Q15 if (h0 > 0) { // High probability of noise. Assign conditional probabilities for each // Gaussian in the GMM. - tmp1_s32 = (noise_probability[0] & 0xFFFFF000) << 2; // Q29 - ngprvec[channel] = (int16_t) WebRtcSpl_DivW32W16(tmp1_s32, h0); // Q14 + tmp1_s32 = (noise_probability[0] & 0xFFFFF000) << 2; // Q29 + ngprvec[channel] = (int16_t)WebRtcSpl_DivW32W16(tmp1_s32, h0); // Q14 ngprvec[channel + kNumChannels] = 16384 - ngprvec[channel]; } else { // Low noise probability. Assign conditional probability 1 to the first @@ -261,12 +262,12 @@ static int16_t GmmProbability(VadInstT* self, int16_t* features, } // Calculate local speech probabilities used later when updating the GMM. - h1 = (int16_t) (h1_test >> 12); // Q15 + h1 = (int16_t)(h1_test >> 12); // Q15 if (h1 > 0) { // High probability of speech. Assign conditional probabilities for each // Gaussian in the GMM. Otherwise use the initialized values, i.e., 0. - tmp1_s32 = (speech_probability[0] & 0xFFFFF000) << 2; // Q29 - sgprvec[channel] = (int16_t) WebRtcSpl_DivW32W16(tmp1_s32, h1); // Q14 + tmp1_s32 = (speech_probability[0] & 0xFFFFF000) << 2; // Q29 + sgprvec[channel] = (int16_t)WebRtcSpl_DivW32W16(tmp1_s32, h1); // Q14 sgprvec[channel + kNumChannels] = 16384 - sgprvec[channel]; } } @@ -277,14 +278,13 @@ static int16_t GmmProbability(VadInstT* self, int16_t* features, // Update the model parameters. maxspe = 12800; for (channel = 0; channel < kNumChannels; channel++) { - // Get minimum value in past which is used for long term correction in Q4. feature_minimum = WebRtcVad_FindMinimum(self, features[channel], channel); // Compute the "global" mean, that is the sum of the two means weighted. noise_global_mean = WeightedAverage(&self->noise_means[channel], 0, &kNoiseDataWeights[channel]); - tmp1_s16 = (int16_t) (noise_global_mean >> 6); // Q8 + tmp1_s16 = (int16_t)(noise_global_mean >> 6); // Q8 for (k = 0; k < kNumGaussians; k++) { gaussian = channel + k * kNumChannels; @@ -314,11 +314,11 @@ static int16_t GmmProbability(VadInstT* self, int16_t* features, nmk3 = nmk2 + (int16_t)((ndelt * kBackEta) >> 9); // Control that the noise mean does not drift to much. - tmp_s16 = (int16_t) ((k + 5) << 7); + tmp_s16 = (int16_t)((k + 5) << 7); if (nmk3 < tmp_s16) { nmk3 = tmp_s16; } - tmp_s16 = (int16_t) ((72 + k - channel) << 7); + tmp_s16 = (int16_t)((72 + k - channel) << 7); if (nmk3 > tmp_s16) { nmk3 = tmp_s16; } @@ -362,9 +362,9 @@ static int16_t GmmProbability(VadInstT* self, int16_t* features, // 0.1 * Q20 / Q7 = Q13. if (tmp2_s32 > 0) { - tmp_s16 = (int16_t) WebRtcSpl_DivW32W16(tmp2_s32, ssk * 10); + tmp_s16 = (int16_t)WebRtcSpl_DivW32W16(tmp2_s32, ssk * 10); } else { - tmp_s16 = (int16_t) WebRtcSpl_DivW32W16(-tmp2_s32, ssk * 10); + tmp_s16 = (int16_t)WebRtcSpl_DivW32W16(-tmp2_s32, ssk * 10); tmp_s16 = -tmp_s16; } // Divide by 4 giving an update factor of 0.025 (= 0.1 / 4). @@ -394,12 +394,12 @@ static int16_t GmmProbability(VadInstT* self, int16_t* features, // Q20 / Q7 = Q13. if (tmp1_s32 > 0) { - tmp_s16 = (int16_t) WebRtcSpl_DivW32W16(tmp1_s32, nsk); + tmp_s16 = (int16_t)WebRtcSpl_DivW32W16(tmp1_s32, nsk); } else { - tmp_s16 = (int16_t) WebRtcSpl_DivW32W16(-tmp1_s32, nsk); + tmp_s16 = (int16_t)WebRtcSpl_DivW32W16(-tmp1_s32, nsk); tmp_s16 = -tmp_s16; } - tmp_s16 += 32; // Rounding + tmp_s16 += 32; // Rounding nsk += tmp_s16 >> 6; // Q13 >> 6 = Q7. if (nsk < kMinStd) { nsk = kMinStd; @@ -419,8 +419,8 @@ static int16_t GmmProbability(VadInstT* self, int16_t* features, // `diff` = "global" speech mean - "global" noise mean. // (Q14 >> 9) - (Q14 >> 9) = Q5. - diff = (int16_t) (speech_global_mean >> 9) - - (int16_t) (noise_global_mean >> 9); + diff = (int16_t)(speech_global_mean >> 9) - + (int16_t)(noise_global_mean >> 9); if (diff < kMinimumDifference[channel]) { tmp_s16 = kMinimumDifference[channel] - diff; @@ -432,21 +432,21 @@ static int16_t GmmProbability(VadInstT* self, int16_t* features, // Move Gaussian means for speech model by `tmp1_s16` and update // `speech_global_mean`. Note that `self->speech_means[channel]` is // changed after the call. - speech_global_mean = WeightedAverage(&self->speech_means[channel], - tmp1_s16, - &kSpeechDataWeights[channel]); + speech_global_mean = + WeightedAverage(&self->speech_means[channel], tmp1_s16, + &kSpeechDataWeights[channel]); // Move Gaussian means for noise model by -`tmp2_s16` and update // `noise_global_mean`. Note that `self->noise_means[channel]` is // changed after the call. - noise_global_mean = WeightedAverage(&self->noise_means[channel], - -tmp2_s16, - &kNoiseDataWeights[channel]); + noise_global_mean = + WeightedAverage(&self->noise_means[channel], -tmp2_s16, + &kNoiseDataWeights[channel]); } // Control that the speech & noise means do not drift to much. maxspe = kMaximumSpeech[channel]; - tmp2_s16 = (int16_t) (speech_global_mean >> 7); + tmp2_s16 = (int16_t)(speech_global_mean >> 7); if (tmp2_s16 > maxspe) { // Upper limit of speech model. tmp2_s16 -= maxspe; @@ -456,7 +456,7 @@ static int16_t GmmProbability(VadInstT* self, int16_t* features, } } - tmp2_s16 = (int16_t) (noise_global_mean >> 7); + tmp2_s16 = (int16_t)(noise_global_mean >> 7); if (tmp2_s16 > kMaximumNoise[channel]) { tmp2_s16 -= kMaximumNoise[channel]; @@ -555,10 +555,8 @@ int WebRtcVad_set_mode_core(VadInstT* self, int mode) { sizeof(self->over_hang_max_1)); memcpy(self->over_hang_max_2, kOverHangMax2Q, sizeof(self->over_hang_max_2)); - memcpy(self->individual, kLocalThresholdQ, - sizeof(self->individual)); - memcpy(self->total, kGlobalThresholdQ, - sizeof(self->total)); + memcpy(self->individual, kLocalThresholdQ, sizeof(self->individual)); + memcpy(self->total, kGlobalThresholdQ, sizeof(self->total)); break; case 1: // Low bitrate mode. @@ -566,10 +564,8 @@ int WebRtcVad_set_mode_core(VadInstT* self, int mode) { sizeof(self->over_hang_max_1)); memcpy(self->over_hang_max_2, kOverHangMax2LBR, sizeof(self->over_hang_max_2)); - memcpy(self->individual, kLocalThresholdLBR, - sizeof(self->individual)); - memcpy(self->total, kGlobalThresholdLBR, - sizeof(self->total)); + memcpy(self->individual, kLocalThresholdLBR, sizeof(self->individual)); + memcpy(self->total, kGlobalThresholdLBR, sizeof(self->total)); break; case 2: // Aggressive mode. @@ -577,10 +573,8 @@ int WebRtcVad_set_mode_core(VadInstT* self, int mode) { sizeof(self->over_hang_max_1)); memcpy(self->over_hang_max_2, kOverHangMax2AGG, sizeof(self->over_hang_max_2)); - memcpy(self->individual, kLocalThresholdAGG, - sizeof(self->individual)); - memcpy(self->total, kGlobalThresholdAGG, - sizeof(self->total)); + memcpy(self->individual, kLocalThresholdAGG, sizeof(self->individual)); + memcpy(self->total, kGlobalThresholdAGG, sizeof(self->total)); break; case 3: // Very aggressive mode. @@ -588,10 +582,8 @@ int WebRtcVad_set_mode_core(VadInstT* self, int mode) { sizeof(self->over_hang_max_1)); memcpy(self->over_hang_max_2, kOverHangMax2VAG, sizeof(self->over_hang_max_2)); - memcpy(self->individual, kLocalThresholdVAG, - sizeof(self->individual)); - memcpy(self->total, kGlobalThresholdVAG, - sizeof(self->total)); + memcpy(self->individual, kLocalThresholdVAG, sizeof(self->individual)); + memcpy(self->total, kGlobalThresholdVAG, sizeof(self->total)); break; default: return_value = -1; @@ -604,14 +596,15 @@ int WebRtcVad_set_mode_core(VadInstT* self, int mode) { // Calculate VAD decision by first extracting feature values and then calculate // probability for both speech and background noise. -int WebRtcVad_CalcVad48khz(VadInstT* inst, const int16_t* speech_frame, +int WebRtcVad_CalcVad48khz(VadInstT* inst, + const int16_t* speech_frame, size_t frame_length) { int vad; size_t i; int16_t speech_nb[240]; // 30 ms in 8 kHz. // `tmp_mem` is a temporary memory used by resample function, length is // frame length in 10 ms (480 samples) + 256 extra. - int32_t tmp_mem[480 + 256] = { 0 }; + int32_t tmp_mem[480 + 256] = {0}; const size_t kFrameLen10ms48khz = 480; const size_t kFrameLen10ms8khz = 80; size_t num_10ms_frames = frame_length / kFrameLen10ms48khz; @@ -619,8 +612,7 @@ int WebRtcVad_CalcVad48khz(VadInstT* inst, const int16_t* speech_frame, for (i = 0; i < num_10ms_frames; i++) { WebRtcSpl_Resample48khzTo8khz(speech_frame, &speech_nb[i * kFrameLen10ms8khz], - &inst->state_48_to_8, - tmp_mem); + &inst->state_48_to_8, tmp_mem); } // Do VAD on an 8 kHz signal @@ -629,57 +621,57 @@ int WebRtcVad_CalcVad48khz(VadInstT* inst, const int16_t* speech_frame, return vad; } -int WebRtcVad_CalcVad32khz(VadInstT* inst, const int16_t* speech_frame, - size_t frame_length) -{ - size_t len; - int vad; - int16_t speechWB[480]; // Downsampled speech frame: 960 samples (30ms in SWB) - int16_t speechNB[240]; // Downsampled speech frame: 480 samples (30ms in WB) - +int WebRtcVad_CalcVad32khz(VadInstT* inst, + const int16_t* speech_frame, + size_t frame_length) { + size_t len; + int vad; + int16_t speechWB[480]; // Downsampled speech frame: 960 samples (30ms in SWB) + int16_t speechNB[240]; // Downsampled speech frame: 480 samples (30ms in WB) - // Downsample signal 32->16->8 before doing VAD - WebRtcVad_Downsampling(speech_frame, speechWB, &(inst->downsampling_filter_states[2]), - frame_length); - len = frame_length / 2; + // Downsample signal 32->16->8 before doing VAD + WebRtcVad_Downsampling(speech_frame, speechWB, + &(inst->downsampling_filter_states[2]), frame_length); + len = frame_length / 2; - WebRtcVad_Downsampling(speechWB, speechNB, inst->downsampling_filter_states, len); - len /= 2; + WebRtcVad_Downsampling(speechWB, speechNB, inst->downsampling_filter_states, + len); + len /= 2; - // Do VAD on an 8 kHz signal - vad = WebRtcVad_CalcVad8khz(inst, speechNB, len); + // Do VAD on an 8 kHz signal + vad = WebRtcVad_CalcVad8khz(inst, speechNB, len); - return vad; + return vad; } -int WebRtcVad_CalcVad16khz(VadInstT* inst, const int16_t* speech_frame, - size_t frame_length) -{ - size_t len; - int vad; - int16_t speechNB[240]; // Downsampled speech frame: 480 samples (30ms in WB) +int WebRtcVad_CalcVad16khz(VadInstT* inst, + const int16_t* speech_frame, + size_t frame_length) { + size_t len; + int vad; + int16_t speechNB[240]; // Downsampled speech frame: 480 samples (30ms in WB) - // Wideband: Downsample signal before doing VAD - WebRtcVad_Downsampling(speech_frame, speechNB, inst->downsampling_filter_states, - frame_length); + // Wideband: Downsample signal before doing VAD + WebRtcVad_Downsampling(speech_frame, speechNB, + inst->downsampling_filter_states, frame_length); - len = frame_length / 2; - vad = WebRtcVad_CalcVad8khz(inst, speechNB, len); + len = frame_length / 2; + vad = WebRtcVad_CalcVad8khz(inst, speechNB, len); - return vad; + return vad; } -int WebRtcVad_CalcVad8khz(VadInstT* inst, const int16_t* speech_frame, - size_t frame_length) -{ - int16_t feature_vector[kNumChannels], total_power; +int WebRtcVad_CalcVad8khz(VadInstT* inst, + const int16_t* speech_frame, + size_t frame_length) { + int16_t feature_vector[kNumChannels], total_power; - // Get power in the bands - total_power = WebRtcVad_CalculateFeatures(inst, speech_frame, frame_length, - feature_vector); + // Get power in the bands + total_power = WebRtcVad_CalculateFeatures(inst, speech_frame, frame_length, + feature_vector); - // Make a VAD - inst->vad = GmmProbability(inst, feature_vector, total_power, frame_length); + // Make a VAD + inst->vad = GmmProbability(inst, feature_vector, total_power, frame_length); - return inst->vad; + return inst->vad; } diff --git a/common_audio/vad/vad_core_unittest.cc b/common_audio/vad/vad_core_unittest.cc index 3131a86ae3..186ffa5e70 100644 --- a/common_audio/vad/vad_core_unittest.cc +++ b/common_audio/vad/vad_core_unittest.cc @@ -54,7 +54,12 @@ TEST_F(VadTest, set_mode_core) { free(self); } +// TODO(bugs.webrtc.org/345674542): Fix/enable. +#if defined(__has_feature) && __has_feature(undefined_behavior_sanitizer) +TEST_F(VadTest, DISABLED_CalcVad) { +#else TEST_F(VadTest, CalcVad) { +#endif VadInstT* self = reinterpret_cast(malloc(sizeof(VadInstT))); int16_t speech[kMaxFrameLength]; diff --git a/common_audio/vad/vad_filterbank.c b/common_audio/vad/vad_filterbank.c index aff63f79cd..32830fa06f 100644 --- a/common_audio/vad/vad_filterbank.c +++ b/common_audio/vad/vad_filterbank.c @@ -10,23 +10,23 @@ #include "common_audio/vad/vad_filterbank.h" -#include "rtc_base/checks.h" #include "common_audio/signal_processing/include/signal_processing_library.h" +#include "rtc_base/checks.h" // Constants used in LogOfEnergy(). -static const int16_t kLogConst = 24660; // 160*log10(2) in Q9. +static const int16_t kLogConst = 24660; // 160*log10(2) in Q9. static const int16_t kLogEnergyIntPart = 14336; // 14 in Q10 // Coefficients used by HighPassFilter, Q14. -static const int16_t kHpZeroCoefs[3] = { 6631, -13262, 6631 }; -static const int16_t kHpPoleCoefs[3] = { 16384, -7756, 5620 }; +static const int16_t kHpZeroCoefs[3] = {6631, -13262, 6631}; +static const int16_t kHpPoleCoefs[3] = {16384, -7756, 5620}; // Allpass filter coefficients, upper and lower, in Q15. // Upper: 0.64, Lower: 0.17 -static const int16_t kAllPassCoefsQ15[2] = { 20972, 5571 }; +static const int16_t kAllPassCoefsQ15[2] = {20972, 5571}; // Adjustment for division with two in SplitFilter. -static const int16_t kOffsetVector[6] = { 368, 368, 272, 176, 176, 176 }; +static const int16_t kOffsetVector[6] = {368, 368, 272, 176, 176, 176}; // High pass filtering, with a cut-off frequency at 80 Hz, if the `data_in` is // sampled at 500 Hz. @@ -36,14 +36,15 @@ static const int16_t kOffsetVector[6] = { 368, 368, 272, 176, 176, 176 }; // - filter_state [i/o] : State of the filter. // - data_out [o] : Output audio data in the frequency interval // 80 - 250 Hz. -static void HighPassFilter(const int16_t* data_in, size_t data_length, - int16_t* filter_state, int16_t* data_out) { +static void HighPassFilter(const int16_t* data_in, + size_t data_length, + int16_t* filter_state, + int16_t* data_out) { size_t i; const int16_t* in_ptr = data_in; int16_t* out_ptr = data_out; int32_t tmp32 = 0; - // The sum of the absolute values of the impulse response: // The zero/pole-filter has a max amplification of a single sample of: 1.4546 // Impulse response: 0.4047 -0.6179 -0.0266 0.1993 0.1035 -0.0194 @@ -64,7 +65,7 @@ static void HighPassFilter(const int16_t* data_in, size_t data_length, tmp32 -= kHpPoleCoefs[1] * filter_state[2]; tmp32 -= kHpPoleCoefs[2] * filter_state[3]; filter_state[3] = filter_state[2]; - filter_state[2] = (int16_t) (tmp32 >> 14); + filter_state[2] = (int16_t)(tmp32 >> 14); *out_ptr++ = filter_state[2]; } } @@ -78,8 +79,10 @@ static void HighPassFilter(const int16_t* data_in, size_t data_length, // - filter_coefficient [i] : Given in Q15. // - filter_state [i/o] : State of the filter given in Q(-1). // - data_out [o] : Output audio signal given in Q(-1). -static void AllPassFilter(const int16_t* data_in, size_t data_length, - int16_t filter_coefficient, int16_t* filter_state, +static void AllPassFilter(const int16_t* data_in, + size_t data_length, + int16_t filter_coefficient, + int16_t* filter_state, int16_t* data_out) { // The filter can only cause overflow (in the w16 output variable) // if more than 4 consecutive input numbers are of maximum value and @@ -90,18 +93,18 @@ static void AllPassFilter(const int16_t* data_in, size_t data_length, size_t i; int16_t tmp16 = 0; int32_t tmp32 = 0; - int32_t state32 = ((int32_t) (*filter_state) * (1 << 16)); // Q15 + int32_t state32 = ((int32_t)(*filter_state) * (1 << 16)); // Q15 for (i = 0; i < data_length; i++) { tmp32 = state32 + filter_coefficient * *data_in; - tmp16 = (int16_t) (tmp32 >> 16); // Q(-1) + tmp16 = (int16_t)(tmp32 >> 16); // Q(-1) *data_out++ = tmp16; state32 = (*data_in * (1 << 14)) - filter_coefficient * tmp16; // Q14 - state32 *= 2; // Q15. + state32 *= 2; // Q15. data_in += 2; } - *filter_state = (int16_t) (state32 >> 16); // Q(-1) + *filter_state = (int16_t)(state32 >> 16); // Q(-1) } // Splits `data_in` into `hp_data_out` and `lp_data_out` corresponding to @@ -115,9 +118,12 @@ static void AllPassFilter(const int16_t* data_in, size_t data_length, // The length is `data_length` / 2. // - lp_data_out [o] : Output audio data of the lower half of the spectrum. // The length is `data_length` / 2. -static void SplitFilter(const int16_t* data_in, size_t data_length, - int16_t* upper_state, int16_t* lower_state, - int16_t* hp_data_out, int16_t* lp_data_out) { +static void SplitFilter(const int16_t* data_in, + size_t data_length, + int16_t* upper_state, + int16_t* lower_state, + int16_t* hp_data_out, + int16_t* lp_data_out) { size_t i; size_t half_length = data_length >> 1; // Downsampling by 2. int16_t tmp_out; @@ -149,8 +155,10 @@ static void SplitFilter(const int16_t* data_in, size_t data_length, // NOTE: `total_energy` is only updated if // `total_energy` <= `kMinEnergy`. // - log_energy [o] : 10 * log10("energy of `data_in`") given in Q4. -static void LogOfEnergy(const int16_t* data_in, size_t data_length, - int16_t offset, int16_t* total_energy, +static void LogOfEnergy(const int16_t* data_in, + size_t data_length, + int16_t offset, + int16_t* total_energy, int16_t* log_energy) { // `tot_rshifts` accumulates the number of right shifts performed on `energy`. int tot_rshifts = 0; @@ -161,8 +169,8 @@ static void LogOfEnergy(const int16_t* data_in, size_t data_length, RTC_DCHECK(data_in); RTC_DCHECK_GT(data_length, 0); - energy = (uint32_t) WebRtcSpl_Energy((int16_t*) data_in, data_length, - &tot_rshifts); + energy = + (uint32_t)WebRtcSpl_Energy((int16_t*)data_in, data_length, &tot_rshifts); if (energy != 0) { // By construction, normalizing to 15 bits is equivalent with 17 leading @@ -205,12 +213,12 @@ static void LogOfEnergy(const int16_t* data_in, size_t data_length, // Note that frac_Q15 = (`energy` & 0x00003FFF) // Calculate and add the fractional part to `log2_energy`. - log2_energy += (int16_t) ((energy & 0x00003FFF) >> 4); + log2_energy += (int16_t)((energy & 0x00003FFF) >> 4); // `kLogConst` is in Q9, `log2_energy` in Q10 and `tot_rshifts` in Q0. // Note that we in our derivation above have accounted for an output in Q4. *log_energy = (int16_t)(((kLogConst * log2_energy) >> 19) + - ((tot_rshifts * kLogConst) >> 9)); + ((tot_rshifts * kLogConst) >> 9)); if (*log_energy < 0) { *log_energy = 0; @@ -235,13 +243,15 @@ static void LogOfEnergy(const int16_t* data_in, size_t data_length, // right shifted `energy` will fit in an int16_t. In addition, adding the // value to `total_energy` is wrap around safe as long as // `kMinEnergy` < 8192. - *total_energy += (int16_t) (energy >> -tot_rshifts); // Q0. + *total_energy += (int16_t)(energy >> -tot_rshifts); // Q0. } } } -int16_t WebRtcVad_CalculateFeatures(VadInstT* self, const int16_t* data_in, - size_t data_length, int16_t* features) { +int16_t WebRtcVad_CalculateFeatures(VadInstT* self, + const int16_t* data_in, + size_t data_length, + int16_t* features) { int16_t total_energy = 0; // We expect `data_length` to be 80, 160 or 240 samples, which corresponds to // 10, 20 or 30 ms in 8 kHz. Therefore, the intermediate downsampled data will @@ -256,8 +266,8 @@ int16_t WebRtcVad_CalculateFeatures(VadInstT* self, const int16_t* data_in, // Initialize variables for the first SplitFilter(). int frequency_band = 0; const int16_t* in_ptr = data_in; // [0 - 4000] Hz. - int16_t* hp_out_ptr = hp_120; // [2000 - 4000] Hz. - int16_t* lp_out_ptr = lp_120; // [0 - 2000] Hz. + int16_t* hp_out_ptr = hp_120; // [2000 - 4000] Hz. + int16_t* lp_out_ptr = lp_120; // [0 - 2000] Hz. RTC_DCHECK_LE(data_length, 240); RTC_DCHECK_LT(4, kNumChannels - 1); // Checking maximum `frequency_band`. @@ -268,7 +278,7 @@ int16_t WebRtcVad_CalculateFeatures(VadInstT* self, const int16_t* data_in, // For the upper band (2000 Hz - 4000 Hz) split at 3000 Hz and downsample. frequency_band = 1; - in_ptr = hp_120; // [2000 - 4000] Hz. + in_ptr = hp_120; // [2000 - 4000] Hz. hp_out_ptr = hp_60; // [3000 - 4000] Hz. lp_out_ptr = lp_60; // [2000 - 3000] Hz. SplitFilter(in_ptr, length, &self->upper_state[frequency_band], @@ -284,9 +294,9 @@ int16_t WebRtcVad_CalculateFeatures(VadInstT* self, const int16_t* data_in, // For the lower band (0 Hz - 2000 Hz) split at 1000 Hz and downsample. frequency_band = 2; - in_ptr = lp_120; // [0 - 2000] Hz. - hp_out_ptr = hp_60; // [1000 - 2000] Hz. - lp_out_ptr = lp_60; // [0 - 1000] Hz. + in_ptr = lp_120; // [0 - 2000] Hz. + hp_out_ptr = hp_60; // [1000 - 2000] Hz. + lp_out_ptr = lp_60; // [0 - 1000] Hz. length = half_data_length; // `data_length` / 2 <=> bandwidth = 2000 Hz. SplitFilter(in_ptr, length, &self->upper_state[frequency_band], &self->lower_state[frequency_band], hp_out_ptr, lp_out_ptr); @@ -297,7 +307,7 @@ int16_t WebRtcVad_CalculateFeatures(VadInstT* self, const int16_t* data_in, // For the lower band (0 Hz - 1000 Hz) split at 500 Hz and downsample. frequency_band = 3; - in_ptr = lp_60; // [0 - 1000] Hz. + in_ptr = lp_60; // [0 - 1000] Hz. hp_out_ptr = hp_120; // [500 - 1000] Hz. lp_out_ptr = lp_120; // [0 - 500] Hz. SplitFilter(in_ptr, length, &self->upper_state[frequency_band], @@ -309,7 +319,7 @@ int16_t WebRtcVad_CalculateFeatures(VadInstT* self, const int16_t* data_in, // For the lower band (0 Hz - 500 Hz) split at 250 Hz and downsample. frequency_band = 4; - in_ptr = lp_120; // [0 - 500] Hz. + in_ptr = lp_120; // [0 - 500] Hz. hp_out_ptr = hp_60; // [250 - 500] Hz. lp_out_ptr = lp_60; // [0 - 250] Hz. SplitFilter(in_ptr, length, &self->upper_state[frequency_band], diff --git a/common_audio/vad/vad_gmm.c b/common_audio/vad/vad_gmm.c index 4a7fe67d09..46d2de165a 100644 --- a/common_audio/vad/vad_gmm.c +++ b/common_audio/vad/vad_gmm.c @@ -36,8 +36,8 @@ int32_t WebRtcVad_GaussianProbability(int16_t input, // Calculate `inv_std` = 1 / s, in Q10. // 131072 = 1 in Q17, and (`std` >> 1) is for rounding instead of truncation. // Q-domain: Q17 / Q7 = Q10. - tmp32 = (int32_t) 131072 + (int32_t) (std >> 1); - inv_std = (int16_t) WebRtcSpl_DivW32W16(tmp32, std); + tmp32 = (int32_t)131072 + (int32_t)(std >> 1); + inv_std = (int16_t)WebRtcSpl_DivW32W16(tmp32, std); // Calculate `inv_std2` = 1 / s^2, in Q14. tmp16 = (inv_std >> 2); // Q10 -> Q8. diff --git a/common_audio/vad/vad_gmm_unittest.cc b/common_audio/vad/vad_gmm_unittest.cc index be61f7f971..f203cf28c5 100644 --- a/common_audio/vad/vad_gmm_unittest.cc +++ b/common_audio/vad/vad_gmm_unittest.cc @@ -18,7 +18,12 @@ extern "C" { namespace webrtc { namespace test { +// TODO(bugs.webrtc.org/345674543): Fix/enable. +#if defined(__has_feature) && __has_feature(undefined_behavior_sanitizer) +TEST_F(VadTest, DISABLED_vad_gmm) { +#else TEST_F(VadTest, vad_gmm) { +#endif int16_t delta = 0; // Input value at mean. EXPECT_EQ(1048576, WebRtcVad_GaussianProbability(0, 0, 128, &delta)); diff --git a/common_audio/vad/vad_sp.c b/common_audio/vad/vad_sp.c index 3d24cf64b3..b745465ad3 100644 --- a/common_audio/vad/vad_sp.c +++ b/common_audio/vad/vad_sp.c @@ -10,15 +10,15 @@ #include "common_audio/vad/vad_sp.h" -#include "rtc_base/checks.h" #include "common_audio/signal_processing/include/signal_processing_library.h" #include "common_audio/vad/vad_core.h" +#include "rtc_base/checks.h" // Allpass filter coefficients, upper and lower, in Q13. // Upper: 0.64, Lower: 0.17. -static const int16_t kAllPassCoefsQ13[2] = { 5243, 1392 }; // Q13. -static const int16_t kSmoothingDown = 6553; // 0.2 in Q15. -static const int16_t kSmoothingUp = 32439; // 0.99 in Q15. +static const int16_t kAllPassCoefsQ13[2] = {5243, 1392}; // Q13. +static const int16_t kSmoothingDown = 6553; // 0.2 in Q15. +static const int16_t kSmoothingUp = 32439; // 0.99 in Q15. // TODO(bjornv): Move this function to vad_filterbank.c. // Downsampling filter based on splitting filter and allpass functions. @@ -36,14 +36,14 @@ void WebRtcVad_Downsampling(const int16_t* signal_in, // Filter coefficients in Q13, filter state in Q0. for (n = 0; n < half_length; n++) { // All-pass filtering upper branch. - tmp16_1 = (int16_t) ((tmp32_1 >> 1) + - ((kAllPassCoefsQ13[0] * *signal_in) >> 14)); + tmp16_1 = + (int16_t)((tmp32_1 >> 1) + ((kAllPassCoefsQ13[0] * *signal_in) >> 14)); *signal_out = tmp16_1; tmp32_1 = (int32_t)(*signal_in++) - ((kAllPassCoefsQ13[0] * tmp16_1) >> 12); // All-pass filtering lower branch. - tmp16_2 = (int16_t) ((tmp32_2 >> 1) + - ((kAllPassCoefsQ13[1] * *signal_in) >> 14)); + tmp16_2 = + (int16_t)((tmp32_2 >> 1) + ((kAllPassCoefsQ13[1] * *signal_in) >> 14)); *signal_out++ += tmp16_2; tmp32_2 = (int32_t)(*signal_in++) - ((kAllPassCoefsQ13[1] * tmp16_2) >> 12); } @@ -170,7 +170,7 @@ int16_t WebRtcVad_FindMinimum(VadInstT* self, tmp32 = (alpha + 1) * self->mean_value[channel]; tmp32 += (WEBRTC_SPL_WORD16_MAX - alpha) * current_median; tmp32 += 16384; - self->mean_value[channel] = (int16_t) (tmp32 >> 15); + self->mean_value[channel] = (int16_t)(tmp32 >> 15); return self->mean_value[channel]; } diff --git a/common_audio/vad/vad_unittest.cc b/common_audio/vad/vad_unittest.cc index c54014efce..5f6afb110d 100644 --- a/common_audio/vad/vad_unittest.cc +++ b/common_audio/vad/vad_unittest.cc @@ -12,9 +12,11 @@ #include +#include +#include + #include "common_audio/signal_processing/include/signal_processing_library.h" #include "common_audio/vad/include/webrtc_vad.h" -#include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "test/gtest.h" @@ -54,7 +56,12 @@ bool VadTest::ValidRatesAndFrameLengths(int rate, size_t frame_length) { namespace webrtc { namespace test { +// TODO(bugs.webrtc.org/345674542): Fix/enable. +#if defined(__has_feature) && __has_feature(undefined_behavior_sanitizer) +TEST_F(VadTest, DISABLED_ApiTest) { +#else TEST_F(VadTest, ApiTest) { +#endif // This API test runs through the APIs for all possible valid and invalid // combinations. @@ -123,20 +130,21 @@ TEST_F(VadTest, ValidRatesFrameLengths) { // This test verifies valid and invalid rate/frame_length combinations. We // loop through some sampling rates and frame lengths from negative values to // values larger than possible. - const int kRates[] = {-8000, -4000, 0, 4000, 8000, 8001, - 15999, 16000, 32000, 48000, 48001, 96000}; - - const size_t kFrameLengths[] = {0, 80, 81, 159, 160, 240, - 320, 480, 640, 960, 1440, 2000}; - - for (size_t i = 0; i < arraysize(kRates); i++) { - for (size_t j = 0; j < arraysize(kFrameLengths); j++) { - if (ValidRatesAndFrameLengths(kRates[i], kFrameLengths[j])) { - EXPECT_EQ( - 0, WebRtcVad_ValidRateAndFrameLength(kRates[i], kFrameLengths[j])); + const int kInvalidRates[] = {-8000, -4000, 0, 4000, 8000, 8001, + 15999, 16000, 32000, 48000, 48001, 96000}; + + const size_t kInvalidFrameLengths[] = {0, 80, 81, 159, 160, 240, + 320, 480, 640, 960, 1440, 2000}; + + for (size_t i = 0; i < std::size(kInvalidRates); i++) { + for (size_t j = 0; j < std::size(kInvalidFrameLengths); j++) { + if (ValidRatesAndFrameLengths(kInvalidRates[i], + kInvalidFrameLengths[j])) { + EXPECT_EQ(0, WebRtcVad_ValidRateAndFrameLength( + kInvalidRates[i], kInvalidFrameLengths[j])); } else { - EXPECT_EQ( - -1, WebRtcVad_ValidRateAndFrameLength(kRates[i], kFrameLengths[j])); + EXPECT_EQ(-1, WebRtcVad_ValidRateAndFrameLength( + kInvalidRates[i], kInvalidFrameLengths[j])); } } } diff --git a/common_audio/vad/webrtc_vad.c b/common_audio/vad/webrtc_vad.c index 6dd14d8b55..d3c8b08cbe 100644 --- a/common_audio/vad/webrtc_vad.c +++ b/common_audio/vad/webrtc_vad.c @@ -17,7 +17,7 @@ #include "common_audio/vad/vad_core.h" static const int kInitCheck = 42; -static const int kValidRates[] = { 8000, 16000, 32000, 48000 }; +static const int kValidRates[] = {8000, 16000, 32000, 48000}; static const size_t kRatesSize = sizeof(kValidRates) / sizeof(*kValidRates); static const int kMaxFrameLengthMs = 30; @@ -36,12 +36,12 @@ void WebRtcVad_Free(VadInst* handle) { // TODO(bjornv): Move WebRtcVad_InitCore() code here. int WebRtcVad_Init(VadInst* handle) { // Initialize the core VAD component. - return WebRtcVad_InitCore((VadInstT*) handle); + return WebRtcVad_InitCore((VadInstT*)handle); } // TODO(bjornv): Move WebRtcVad_set_mode_core() code here. int WebRtcVad_set_mode(VadInst* handle, int mode) { - VadInstT* self = (VadInstT*) handle; + VadInstT* self = (VadInstT*)handle; if (handle == NULL) { return -1; @@ -53,10 +53,12 @@ int WebRtcVad_set_mode(VadInst* handle, int mode) { return WebRtcVad_set_mode_core(self, mode); } -int WebRtcVad_Process(VadInst* handle, int fs, const int16_t* audio_frame, +int WebRtcVad_Process(VadInst* handle, + int fs, + const int16_t* audio_frame, size_t frame_length) { int vad = -1; - VadInstT* self = (VadInstT*) handle; + VadInstT* self = (VadInstT*)handle; if (handle == NULL) { return -1; @@ -73,7 +75,7 @@ int WebRtcVad_Process(VadInst* handle, int fs, const int16_t* audio_frame, } if (fs == 48000) { - vad = WebRtcVad_CalcVad48khz(self, audio_frame, frame_length); + vad = WebRtcVad_CalcVad48khz(self, audio_frame, frame_length); } else if (fs == 32000) { vad = WebRtcVad_CalcVad32khz(self, audio_frame, frame_length); } else if (fs == 16000) { @@ -99,7 +101,7 @@ int WebRtcVad_ValidRateAndFrameLength(int rate, size_t frame_length) { for (i = 0; i < kRatesSize; i++) { if (kValidRates[i] == rate) { for (valid_length_ms = 10; valid_length_ms <= kMaxFrameLengthMs; - valid_length_ms += 10) { + valid_length_ms += 10) { valid_length = (size_t)(kValidRates[i] / 1000 * valid_length_ms); if (frame_length == valid_length) { return_value = 0; diff --git a/common_audio/wav_file_unittest.cc b/common_audio/wav_file_unittest.cc index 97cecc345f..fbfa4aa73d 100644 --- a/common_audio/wav_file_unittest.cc +++ b/common_audio/wav_file_unittest.cc @@ -17,6 +17,7 @@ #include #include "common_audio/wav_header.h" +#include "rtc_base/logging.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -31,11 +32,24 @@ namespace webrtc { +namespace { +const char* SampleFormatToStr(WavFile::SampleFormat format) { + switch (format) { + case WavFile::SampleFormat::kInt16: + return "int16"; + case WavFile::SampleFormat::kFloat: + return "float"; + } + RTC_CHECK_NOTREACHED(); +} +} // namespace + static const float kSamples[] = {0.0, 10.0, 4e4, -1e9}; // Write a tiny WAV file with the C++ interface and verify the result. TEST(WavWriterTest, MAYBE_CPP) { - const std::string outfile = test::OutputPath() + "wavtest1.wav"; + const std::string outfile = + test::OutputPathWithRandomDirectory() + "wavtest1.wav"; static const size_t kNumSamples = 3; { WavWriter w(outfile, 14099, 1); @@ -112,7 +126,8 @@ TEST(WavWriterTest, LargeFile) { {WavFile::SampleFormat::kInt16, WavFile::SampleFormat::kFloat}) { for (WavFile::SampleFormat read_format : {WavFile::SampleFormat::kInt16, WavFile::SampleFormat::kFloat}) { - std::string outfile = test::OutputPath() + "wavtest3.wav"; + std::string outdir = test::OutputPathWithRandomDirectory(); + std::string outfile = outdir + "wavtest3.wav"; float samples[kNumSamples]; for (size_t i = 0; i < kNumSamples; i += kNumChannels) { // A nice periodic beeping sound. @@ -123,6 +138,9 @@ TEST(WavWriterTest, LargeFile) { std::sin(t * kToneHz * 2 * M_PI); samples[i] = std::pow(std::sin(t * 2 * 2 * M_PI), 10) * x; samples[i + 1] = std::pow(std::cos(t * 2 * 2 * M_PI), 10) * x; + // See https://issues.webrtc.org/issues/379973428 + RTC_CHECK(isfinite(samples[i])); + RTC_CHECK(isfinite(samples[i + 1])); } { WavWriter w(outfile, kSampleRate, kNumChannels, wav_format); @@ -130,7 +148,7 @@ TEST(WavWriterTest, LargeFile) { EXPECT_EQ(kNumChannels, w.num_channels()); EXPECT_EQ(0u, w.num_samples()); if (write_format == WavFile::SampleFormat::kFloat) { - float truncated_samples[kNumSamples]; + int16_t truncated_samples[kNumSamples]; for (size_t k = 0; k < kNumSamples; ++k) { truncated_samples[k] = static_cast(samples[k]); } @@ -159,6 +177,14 @@ TEST(WavWriterTest, LargeFile) { EXPECT_EQ(kNumSamples, r.ReadSamples(kNumSamples, read_samples)); for (size_t i = 0; i < kNumSamples; ++i) { EXPECT_NEAR(samples[i], read_samples[i], 1); + if (!isfinite(samples[i])) { + // See https://issues.webrtc.org/issues/379973428 + RTC_LOG(LS_ERROR) + << "samples[" << i << "] is not finite. " + << "wav_format=" << SampleFormatToStr(wav_format) + << ", write_format=" << SampleFormatToStr(write_format) + << ", read_format=" << SampleFormatToStr(read_format); + } } EXPECT_EQ(0u, r.ReadSamples(kNumSamples, read_samples)); } else { @@ -166,10 +192,20 @@ TEST(WavWriterTest, LargeFile) { EXPECT_EQ(kNumSamples, r.ReadSamples(kNumSamples, read_samples)); for (size_t i = 0; i < kNumSamples; ++i) { EXPECT_NEAR(samples[i], static_cast(read_samples[i]), 1); + if (!isfinite(samples[i])) { + // See https://issues.webrtc.org/issues/379973428 + RTC_LOG(LS_ERROR) + << "samples[" << i << "] is not finite. " + << "wav_format=" << SampleFormatToStr(wav_format) + << ", write_format=" << SampleFormatToStr(write_format) + << ", read_format=" << SampleFormatToStr(read_format); + } } EXPECT_EQ(0u, r.ReadSamples(kNumSamples, read_samples)); } } + RTC_CHECK(test::RemoveFile(outfile)); + RTC_CHECK(test::RemoveDir(outdir)); } } } @@ -177,7 +213,8 @@ TEST(WavWriterTest, LargeFile) { // Write a tiny WAV file with the C++ interface then read-reset-read. TEST(WavReaderTest, MAYBE_CPPReset) { - const std::string outfile = test::OutputPath() + "wavtest4.wav"; + const std::string outfile = + test::OutputPathWithRandomDirectory() + "wavtest4.wav"; static const size_t kNumSamples = 3; { WavWriter w(outfile, 14099, 1); diff --git a/common_audio/wav_header.cc b/common_audio/wav_header.cc index bca209a665..467dcc78bb 100644 --- a/common_audio/wav_header.cc +++ b/common_audio/wav_header.cc @@ -205,7 +205,7 @@ void WritePcmWavHeader(size_t num_channels, RTC_CHECK(buf); RTC_CHECK(header_size); *header_size = kPcmWavHeaderSize; - auto header = rtc::MsanUninitialized({}); + auto header = MsanUninitialized({}); const size_t bytes_in_payload = bytes_per_sample * num_samples; header.riff.header.ID = PackFourCC('R', 'I', 'F', 'F'); @@ -236,7 +236,7 @@ void WriteIeeeFloatWavHeader(size_t num_channels, RTC_CHECK(buf); RTC_CHECK(header_size); *header_size = kIeeeFloatWavHeaderSize; - auto header = rtc::MsanUninitialized({}); + auto header = MsanUninitialized({}); const size_t bytes_in_payload = bytes_per_sample * num_samples; header.riff.header.ID = PackFourCC('R', 'I', 'F', 'F'); @@ -374,7 +374,7 @@ bool ReadWavHeader(WavHeaderReader* readable, size_t* num_samples, int64_t* data_start_pos) { // Read using the PCM header, even though it might be float Wav file - auto header = rtc::MsanUninitialized({}); + auto header = MsanUninitialized({}); // Read RIFF chunk. if (readable->Read(&header.riff, sizeof(header.riff)) != sizeof(header.riff)) diff --git a/common_video/BUILD.gn b/common_video/BUILD.gn index 4736aa5fb6..1a69762a65 100644 --- a/common_video/BUILD.gn +++ b/common_video/BUILD.gn @@ -8,6 +8,30 @@ import("../webrtc.gni") +rtc_library("corruption_detection_converters") { + sources = [ + "corruption_detection_converters.cc", + "corruption_detection_converters.h", + ] + deps = [ + ":frame_instrumentation_data", + "../api/transport/rtp:corruption_detection_message", + "../rtc_base:checks", + ] +} + +rtc_library("frame_instrumentation_data") { + sources = [ "frame_instrumentation_data.h" ] +} + +rtc_library("corruption_score_calculator") { + sources = [ "include/corruption_score_calculator.h" ] + deps = [ + ":frame_instrumentation_data", + "../api/video:video_frame", + ] +} + rtc_library("common_video") { visibility = [ "*" ] @@ -80,12 +104,12 @@ rtc_library("common_video") { "../rtc_base:race_checker", "../rtc_base:rate_statistics", "../rtc_base:refcount", - "../rtc_base:rtc_task_queue", "../rtc_base:safe_minmax", "../rtc_base:timeutils", "../rtc_base/synchronization:mutex", "../rtc_base/system:rtc_export", "../system_wrappers:metrics", + "//third_party/abseil-cpp/absl/numeric:bits", "//third_party/libyuv", ] if (rtc_use_h265) { @@ -94,10 +118,6 @@ rtc_library("common_video") { "../rtc_base/containers:flat_map", ] } - absl_deps = [ - "//third_party/abseil-cpp/absl/numeric:bits", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_source_set("frame_counts") { @@ -117,6 +137,17 @@ if (rtc_include_tests && !build_with_chromium) { } } + rtc_library("corruption_detection_converters_unittest") { + testonly = true + sources = [ "corruption_detection_converters_unittest.cc" ] + deps = [ + ":corruption_detection_converters", + ":frame_instrumentation_data", + "../api/transport/rtp:corruption_detection_message", + "../test:test_support", + ] + } + rtc_test("common_video_unittests") { testonly = true @@ -144,6 +175,7 @@ if (rtc_include_tests && !build_with_chromium) { deps = [ ":common_video", + ":corruption_detection_converters_unittest", "../api:scoped_refptr", "../api/units:time_delta", "../api/video:video_frame", @@ -167,11 +199,8 @@ if (rtc_include_tests && !build_with_chromium) { "//third_party/libyuv", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - data = common_video_resources if (is_android) { - deps += [ "//testing/android/native_test:native_test_support" ] shard_timeout = 900 } diff --git a/common_video/OWNERS b/common_video/OWNERS index 455e247d90..1c080874f0 100644 --- a/common_video/OWNERS +++ b/common_video/OWNERS @@ -1,4 +1,5 @@ magjed@webrtc.org marpan@webrtc.org sprang@webrtc.org +ssilkin@webrtc.org stefan@webrtc.org diff --git a/common_video/bitrate_adjuster.cc b/common_video/bitrate_adjuster.cc index c53c3a02f6..8f3ddeb532 100644 --- a/common_video/bitrate_adjuster.cc +++ b/common_video/bitrate_adjuster.cc @@ -67,14 +67,14 @@ uint32_t BitrateAdjuster::GetAdjustedBitrateBps() const { return adjusted_bitrate_bps_; } -absl::optional BitrateAdjuster::GetEstimatedBitrateBps() { +std::optional BitrateAdjuster::GetEstimatedBitrateBps() { MutexLock lock(&mutex_); - return bitrate_tracker_.Rate(rtc::TimeMillis()); + return bitrate_tracker_.Rate(TimeMillis()); } void BitrateAdjuster::Update(size_t frame_size) { MutexLock lock(&mutex_); - uint32_t current_time_ms = rtc::TimeMillis(); + uint32_t current_time_ms = TimeMillis(); bitrate_tracker_.Update(frame_size, current_time_ms); UpdateBitrate(current_time_ms); } diff --git a/common_video/bitrate_adjuster_unittest.cc b/common_video/bitrate_adjuster_unittest.cc index 995aac1c27..1ff911c4be 100644 --- a/common_video/bitrate_adjuster_unittest.cc +++ b/common_video/bitrate_adjuster_unittest.cc @@ -64,7 +64,7 @@ class BitrateAdjusterTest : public ::testing::Test { protected: static const float kMinAdjustedBitratePct; static const float kMaxAdjustedBitratePct; - rtc::ScopedFakeClock clock_; + ScopedFakeClock clock_; BitrateAdjuster adjuster_; }; diff --git a/common_video/corruption_detection_converters.cc b/common_video/corruption_detection_converters.cc new file mode 100644 index 0000000000..dfd1c38201 --- /dev/null +++ b/common_video/corruption_detection_converters.cc @@ -0,0 +1,139 @@ +/* + * Copyright 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "common_video/corruption_detection_converters.h" + +#include +#include + +#include "api/transport/rtp/corruption_detection_message.h" +#include "common_video/frame_instrumentation_data.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +namespace { + +int GetFullSequenceIndex(int previous_sequence_index, + int sequence_index_update, + bool update_the_most_significant_bits) { + RTC_CHECK_GE(previous_sequence_index, 0) + << "previous_sequence_index must not be negative"; + RTC_CHECK_LE(previous_sequence_index, 0x7FFF) + << "previous_sequence_index must be at most 15 bits"; + RTC_CHECK_GE(sequence_index_update, 0) + << "sequence_index_update must not be negative"; + RTC_CHECK_LE(sequence_index_update, 0b0111'1111) + << "sequence_index_update must be at most 7 bits"; + if (update_the_most_significant_bits) { + // Reset LSB. + return sequence_index_update << 7; + } + int upper_bits = previous_sequence_index & 0b0011'1111'1000'0000; + if (sequence_index_update < (previous_sequence_index & 0b0111'1111)) { + // Assume one and only one wraparound has happened. + upper_bits += 0b1000'0000; + } + // Replace the lowest bits with the bits from the update. + return upper_bits + sequence_index_update; +} + +int GetSequenceIndexForMessage(int sequence_index, + bool communicate_upper_bits) { + return communicate_upper_bits ? (sequence_index >> 7) + : (sequence_index & 0b0111'1111); +} + +} // namespace + +std::optional +ConvertCorruptionDetectionMessageToFrameInstrumentationData( + const CorruptionDetectionMessage& message, + int previous_sequence_index) { + if (previous_sequence_index < 0) { + return std::nullopt; + } + if (message.sample_values().empty()) { + return std::nullopt; + } + int full_sequence_index = GetFullSequenceIndex( + previous_sequence_index, message.sequence_index(), + message.interpret_sequence_index_as_most_significant_bits()); + std::vector sample_values(message.sample_values().cbegin(), + message.sample_values().cend()); + return FrameInstrumentationData{ + .sequence_index = full_sequence_index, + .communicate_upper_bits = + message.interpret_sequence_index_as_most_significant_bits(), + .std_dev = message.std_dev(), + .luma_error_threshold = message.luma_error_threshold(), + .chroma_error_threshold = message.chroma_error_threshold(), + .sample_values = sample_values}; +} + +std::optional +ConvertCorruptionDetectionMessageToFrameInstrumentationSyncData( + const CorruptionDetectionMessage& message, + int previous_sequence_index) { + if (previous_sequence_index < 0) { + return std::nullopt; + } + if (!message.sample_values().empty()) { + return std::nullopt; + } + if (!message.interpret_sequence_index_as_most_significant_bits()) { + return std::nullopt; + } + return FrameInstrumentationSyncData{ + .sequence_index = GetFullSequenceIndex( + previous_sequence_index, message.sequence_index(), + /*update_the_most_significant_bits=*/true), + .communicate_upper_bits = true}; +} + +std::optional +ConvertFrameInstrumentationDataToCorruptionDetectionMessage( + const FrameInstrumentationData& data) { + if (data.sequence_index < 0 || data.sequence_index > 0b0011'1111'1111'1111) { + return std::nullopt; + } + // Frame instrumentation data must have sample values. + if (data.sample_values.empty()) { + return std::nullopt; + } + return CorruptionDetectionMessage::Builder() + .WithSequenceIndex(GetSequenceIndexForMessage( + data.sequence_index, data.communicate_upper_bits)) + .WithInterpretSequenceIndexAsMostSignificantBits( + data.communicate_upper_bits) + .WithStdDev(data.std_dev) + .WithLumaErrorThreshold(data.luma_error_threshold) + .WithChromaErrorThreshold(data.chroma_error_threshold) + .WithSampleValues(data.sample_values) + .Build(); +} + +std::optional +ConvertFrameInstrumentationSyncDataToCorruptionDetectionMessage( + const FrameInstrumentationSyncData& data) { + RTC_DCHECK(data.communicate_upper_bits) + << "FrameInstrumentationSyncData data must always send the upper bits."; + + if (data.sequence_index < 0 || data.sequence_index > 0b0011'1111'1111'1111) { + return std::nullopt; + } + return CorruptionDetectionMessage::Builder() + .WithSequenceIndex(GetSequenceIndexForMessage( + data.sequence_index, data.communicate_upper_bits)) + .WithInterpretSequenceIndexAsMostSignificantBits(true) + .Build(); +} + +} // namespace webrtc diff --git a/common_video/corruption_detection_converters.h b/common_video/corruption_detection_converters.h new file mode 100644 index 0000000000..370f924800 --- /dev/null +++ b/common_video/corruption_detection_converters.h @@ -0,0 +1,37 @@ +/* + * Copyright 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef COMMON_VIDEO_CORRUPTION_DETECTION_CONVERTERS_H_ +#define COMMON_VIDEO_CORRUPTION_DETECTION_CONVERTERS_H_ + +#include + +#include "api/transport/rtp/corruption_detection_message.h" +#include "common_video/frame_instrumentation_data.h" + +namespace webrtc { + +std::optional +ConvertCorruptionDetectionMessageToFrameInstrumentationData( + const CorruptionDetectionMessage& message, + int previous_sequence_index); +std::optional +ConvertCorruptionDetectionMessageToFrameInstrumentationSyncData( + const CorruptionDetectionMessage& message, + int previous_sequence_index); +std::optional +ConvertFrameInstrumentationDataToCorruptionDetectionMessage( + const FrameInstrumentationData& frame_instrumentation_data); +std::optional +ConvertFrameInstrumentationSyncDataToCorruptionDetectionMessage( + const FrameInstrumentationSyncData& frame_instrumentation_sync_data); +} // namespace webrtc + +#endif // COMMON_VIDEO_CORRUPTION_DETECTION_CONVERTERS_H_ diff --git a/common_video/corruption_detection_converters_unittest.cc b/common_video/corruption_detection_converters_unittest.cc new file mode 100644 index 0000000000..8b3372e653 --- /dev/null +++ b/common_video/corruption_detection_converters_unittest.cc @@ -0,0 +1,390 @@ +/* + * Copyright 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "common_video/corruption_detection_converters.h" + +#include +#include + +#include "api/transport/rtp/corruption_detection_message.h" +#include "common_video/frame_instrumentation_data.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::_; +using ::testing::ElementsAre; + +TEST(FrameInstrumentationDataToCorruptionDetectionMessageTest, + ConvertsValidData) { + FrameInstrumentationData data = {.sequence_index = 1, + .communicate_upper_bits = false, + .std_dev = 1.0, + .luma_error_threshold = 5, + .chroma_error_threshold = 5, + .sample_values = {1.0, 2.0, 3.0, 4.0, 5.0}}; + + std::optional message = + ConvertFrameInstrumentationDataToCorruptionDetectionMessage(data); + ASSERT_TRUE(message.has_value()); + EXPECT_EQ(message->sequence_index(), 1); + EXPECT_FALSE(message->interpret_sequence_index_as_most_significant_bits()); + EXPECT_EQ(message->std_dev(), 1.0); + EXPECT_EQ(message->luma_error_threshold(), 5); + EXPECT_EQ(message->chroma_error_threshold(), 5); + EXPECT_THAT(message->sample_values(), ElementsAre(1.0, 2.0, 3.0, 4.0, 5.0)); +} + +TEST(FrameInstrumentationDataToCorruptionDetectionMessageTest, + ReturnsNulloptWhenSequenceIndexIsNegative) { + FrameInstrumentationData data = {.sequence_index = -1, + .communicate_upper_bits = false, + .std_dev = 1.0, + .luma_error_threshold = 5, + .chroma_error_threshold = 5, + .sample_values = {1.0, 2.0, 3.0, 4.0, 5.0}}; + + std::optional message = + ConvertFrameInstrumentationDataToCorruptionDetectionMessage(data); + ASSERT_FALSE(message.has_value()); +} + +TEST(FrameInstrumentationDataToCorruptionDetectionMessageTest, + ReturnsNulloptWhenSequenceIndexIsTooLarge) { + // Sequence index must be at max 14 bits. + FrameInstrumentationData data = {.sequence_index = 0x4000, + .communicate_upper_bits = false, + .std_dev = 1.0, + .luma_error_threshold = 5, + .chroma_error_threshold = 5, + .sample_values = {1.0, 2.0, 3.0, 4.0, 5.0}}; + + std::optional message = + ConvertFrameInstrumentationDataToCorruptionDetectionMessage(data); + ASSERT_FALSE(message.has_value()); +} + +TEST(FrameInstrumentationDataToCorruptionDetectionMessageTest, + ReturnsNulloptWhenThereAreNoSampleValues) { + // FrameInstrumentationData must by definition have at least one sample value. + FrameInstrumentationData data = {.sequence_index = 1, + .communicate_upper_bits = false, + .std_dev = 1.0, + .luma_error_threshold = 5, + .chroma_error_threshold = 5, + .sample_values = {}}; + + std::optional message = + ConvertFrameInstrumentationDataToCorruptionDetectionMessage(data); + ASSERT_FALSE(message.has_value()); +} + +TEST(FrameInstrumentationDataToCorruptionDetectionMessageTest, + ReturnsNulloptWhenNotSpecifyingSampleValues) { + FrameInstrumentationData data = {.sequence_index = 1, + .communicate_upper_bits = false, + .std_dev = 1.0, + .luma_error_threshold = 5, + .chroma_error_threshold = 5}; + + std::optional message = + ConvertFrameInstrumentationDataToCorruptionDetectionMessage(data); + ASSERT_FALSE(message.has_value()); +} + +TEST(FrameInstrumentationDataToCorruptionDetectionMessageTest, + ConvertsSequenceIndexWhenSetToUseUpperBits) { + FrameInstrumentationData data = {.sequence_index = 0b0000'0110'0000'0101, + .communicate_upper_bits = true, + .std_dev = 1.0, + .luma_error_threshold = 5, + .chroma_error_threshold = 5, + .sample_values = {1.0, 2.0, 3.0, 4.0, 5.0}}; + + std::optional message = + ConvertFrameInstrumentationDataToCorruptionDetectionMessage(data); + ASSERT_TRUE(message.has_value()); + EXPECT_EQ(message->sequence_index(), 0b0000'1100); + EXPECT_TRUE(message->interpret_sequence_index_as_most_significant_bits()); + EXPECT_EQ(message->std_dev(), 1.0); + EXPECT_EQ(message->luma_error_threshold(), 5); + EXPECT_EQ(message->chroma_error_threshold(), 5); + EXPECT_THAT(message->sample_values(), ElementsAre(1.0, 2.0, 3.0, 4.0, 5.0)); +} + +TEST(FrameInstrumentationDataToCorruptionDetectionMessageTest, + ConvertsSequenceIndexWhenSetToUseLowerBits) { + FrameInstrumentationData data = {.sequence_index = 0b0000'0110'0000'0101, + .communicate_upper_bits = false, + .std_dev = 1.0, + .luma_error_threshold = 5, + .chroma_error_threshold = 5, + .sample_values = {1.0, 2.0, 3.0, 4.0, 5.0}}; + + std::optional message = + ConvertFrameInstrumentationDataToCorruptionDetectionMessage(data); + ASSERT_TRUE(message.has_value()); + EXPECT_EQ(message->sequence_index(), 0b0000'0101); + EXPECT_FALSE(message->interpret_sequence_index_as_most_significant_bits()); + EXPECT_EQ(message->std_dev(), 1.0); + EXPECT_EQ(message->luma_error_threshold(), 5); + EXPECT_EQ(message->chroma_error_threshold(), 5); + EXPECT_THAT(message->sample_values(), ElementsAre(1.0, 2.0, 3.0, 4.0, 5.0)); +} + +TEST(FrameInstrumentationSyncDataToCorruptionDetectionMessageTest, + ConvertsValidSyncData) { + FrameInstrumentationSyncData data = {.sequence_index = 1, + .communicate_upper_bits = true}; + + std::optional message = + ConvertFrameInstrumentationSyncDataToCorruptionDetectionMessage(data); + ASSERT_TRUE(message.has_value()); + EXPECT_EQ(message->sequence_index(), 0); + EXPECT_TRUE(message->interpret_sequence_index_as_most_significant_bits()); +} + +#if GTEST_HAS_DEATH_TEST && RTC_DCHECK_IS_ON +TEST(FrameInstrumentationSyncDataToCorruptionDetectionMessageTest, + FailsWhenSetToNotCommunicateUpperBits) { + FrameInstrumentationSyncData data = {.sequence_index = 1, + .communicate_upper_bits = false}; + + EXPECT_DEATH( + ConvertFrameInstrumentationSyncDataToCorruptionDetectionMessage(data), _); +} +#endif // GTEST_HAS_DEATH_TEST + +TEST(FrameInstrumentationSyncDataToCorruptionDetectionMessageTest, + ReturnsNulloptWhenSyncSequenceIndexIsNegative) { + FrameInstrumentationSyncData data = {.sequence_index = -1, + .communicate_upper_bits = true}; + + std::optional message = + ConvertFrameInstrumentationSyncDataToCorruptionDetectionMessage(data); + ASSERT_FALSE(message.has_value()); +} + +TEST(FrameInstrumentationSyncDataToCorruptionDetectionMessageTest, + ReturnsNulloptWhenSyncSequenceIndexIsTooLarge) { + FrameInstrumentationSyncData data = {.sequence_index = 0x4000, + .communicate_upper_bits = true}; + + std::optional message = + ConvertFrameInstrumentationSyncDataToCorruptionDetectionMessage(data); + ASSERT_FALSE(message.has_value()); +} + +TEST(CorruptionDetectionMessageToFrameInstrumentationData, + FailWhenPreviousSequenceIndexIsNegative) { + std::vector sample_values = {1.0, 2.0, 3.0, 4.0, 5.0}; + std::optional message = + CorruptionDetectionMessage::Builder() + .WithSampleValues(sample_values) + .Build(); + ASSERT_TRUE(message.has_value()); + + EXPECT_FALSE( + ConvertCorruptionDetectionMessageToFrameInstrumentationData(*message, -1) + .has_value()); +} + +TEST(CorruptionDetectionMessageToFrameInstrumentationData, + FailWhenNoSampleValuesAreProvided) { + std::optional message = + CorruptionDetectionMessage::Builder().Build(); + ASSERT_TRUE(message.has_value()); + + EXPECT_FALSE( + ConvertCorruptionDetectionMessageToFrameInstrumentationData(*message, 0) + .has_value()); +} + +TEST(CorruptionDetectionMessageToFrameInstrumentationData, + IgnorePreviousSequenceIndexWhenSetToUpdateTheMostSignificantBits) { + std::vector sample_values = {1.0, 2.0, 3.0, 4.0, 5.0}; + std::optional message = + CorruptionDetectionMessage::Builder() + .WithSequenceIndex(11) + .WithInterpretSequenceIndexAsMostSignificantBits(true) + .WithSampleValues(sample_values) + .Build(); + ASSERT_TRUE(message.has_value()); + + std::optional data = + ConvertCorruptionDetectionMessageToFrameInstrumentationData(*message, 12); + + ASSERT_TRUE(data.has_value()); + EXPECT_EQ(data->sequence_index, 0b0101'1000'0000); +} + +TEST(CorruptionDetectionMessageToFrameInstrumentationData, + UseMessageSequenceIndexWhenHigherThanPrevious) { + std::vector sample_values = {1.0, 2.0, 3.0, 4.0, 5.0}; + std::optional message = + CorruptionDetectionMessage::Builder() + .WithSequenceIndex(11) + .WithInterpretSequenceIndexAsMostSignificantBits(false) + .WithSampleValues(sample_values) + .Build(); + ASSERT_TRUE(message.has_value()); + + std::optional data = + ConvertCorruptionDetectionMessageToFrameInstrumentationData(*message, 0); + + ASSERT_TRUE(data.has_value()); + EXPECT_EQ(data->sequence_index, 11); +} + +TEST(CorruptionDetectionMessageToFrameInstrumentationData, + IncreaseThePreviousIdxUntilLsbsAreEqualToTheUpdateWhenTheUpdateIsLsbs) { + std::vector sample_values = {1.0, 2.0, 3.0, 4.0, 5.0}; + std::optional message = + CorruptionDetectionMessage::Builder() + .WithSequenceIndex(11) + .WithInterpretSequenceIndexAsMostSignificantBits(false) + .WithSampleValues(sample_values) + .Build(); + ASSERT_TRUE(message.has_value()); + + std::optional data = + ConvertCorruptionDetectionMessageToFrameInstrumentationData(*message, + 1 + 128); + + ASSERT_TRUE(data.has_value()); + EXPECT_EQ(data->sequence_index, 11 + 128); +} + +TEST(CorruptionDetectionMessageToFrameInstrumentationData, + IgnoreIndexUpdateWhenTheLowerBitsSuppliedAreTheSameAsInThePreviousIndex) { + std::vector sample_values = {1.0, 2.0, 3.0, 4.0, 5.0}; + std::optional message = + CorruptionDetectionMessage::Builder() + .WithSequenceIndex(11) + .WithInterpretSequenceIndexAsMostSignificantBits(false) + .WithSampleValues(sample_values) + .Build(); + ASSERT_TRUE(message.has_value()); + + std::optional data = + ConvertCorruptionDetectionMessageToFrameInstrumentationData(*message, + 11 + 128); + + ASSERT_TRUE(data.has_value()); + EXPECT_EQ(data->sequence_index, 11 + 128); +} + +TEST( + CorruptionDetectionMessageToFrameInstrumentationData, + IncreaseTheMsbsByOneAndSetTheMessagesLsbWhenMessageLsbIsLowerThanPrevious) { + std::vector sample_values = {1.0, 2.0, 3.0, 4.0, 5.0}; + std::optional message = + CorruptionDetectionMessage::Builder() + .WithSequenceIndex(11) + .WithInterpretSequenceIndexAsMostSignificantBits(false) + .WithSampleValues(sample_values) + .Build(); + ASSERT_TRUE(message.has_value()); + + std::optional data = + ConvertCorruptionDetectionMessageToFrameInstrumentationData(*message, 12); + + ASSERT_TRUE(data.has_value()); + EXPECT_EQ(data->sequence_index, 11 + 128); +} + +TEST(CorruptionDetectionMessageToFrameInstrumentationData, ConvertAllFields) { + std::vector sample_values = {1.0, 2.0, 3.0, 4.0, 5.0}; + std::optional message = + CorruptionDetectionMessage::Builder() + .WithSequenceIndex(11) + .WithInterpretSequenceIndexAsMostSignificantBits(false) + .WithStdDev(1.2) + .WithLumaErrorThreshold(10) + .WithChromaErrorThreshold(10) + .WithSampleValues(sample_values) + .Build(); + ASSERT_TRUE(message.has_value()); + + std::optional data = + ConvertCorruptionDetectionMessageToFrameInstrumentationData(*message, 0); + + ASSERT_TRUE(data.has_value()); + EXPECT_EQ(data->sequence_index, 11); + EXPECT_FALSE(data->communicate_upper_bits); + EXPECT_NEAR(data->std_dev, 1.2, 0.024); // ~2% + EXPECT_EQ(data->luma_error_threshold, 10); + EXPECT_EQ(data->chroma_error_threshold, 10); + EXPECT_THAT(data->sample_values, ElementsAre(1.0, 2.0, 3.0, 4.0, 5.0)); +} + +TEST(CorruptionDetectionMessageToFrameInstrumentationSyncData, + FailWhenPreviousSequenceIndexIsNegative) { + std::optional message = + CorruptionDetectionMessage::Builder() + .WithInterpretSequenceIndexAsMostSignificantBits(true) + .Build(); + ASSERT_TRUE(message.has_value()); + + EXPECT_FALSE(ConvertCorruptionDetectionMessageToFrameInstrumentationSyncData( + *message, -1) + .has_value()); +} + +TEST(CorruptionDetectionMessageToFrameInstrumentationSyncData, + FailWhenSampleValuesArePresent) { + std::vector sample_values = {1.0, 2.0, 3.0, 4.0, 5.0}; + std::optional message = + CorruptionDetectionMessage::Builder() + .WithInterpretSequenceIndexAsMostSignificantBits(true) + .WithSampleValues(sample_values) + .Build(); + ASSERT_TRUE(message.has_value()); + + EXPECT_FALSE(ConvertCorruptionDetectionMessageToFrameInstrumentationSyncData( + *message, 0) + .has_value()); +} + +TEST(CorruptionDetectionMessageToFrameInstrumentationSyncData, + FailWhenSetToUpdateTheLowerBits) { + std::optional message = + CorruptionDetectionMessage::Builder() + .WithInterpretSequenceIndexAsMostSignificantBits(false) + .Build(); + ASSERT_TRUE(message.has_value()); + + EXPECT_FALSE(ConvertCorruptionDetectionMessageToFrameInstrumentationSyncData( + *message, 0) + .has_value()); +} + +TEST(CorruptionDetectionMessageToFrameInstrumentationSyncData, + IgnorePreviousSequenceIndex) { + std::optional message = + CorruptionDetectionMessage::Builder() + .WithSequenceIndex(11) + .WithInterpretSequenceIndexAsMostSignificantBits(true) + .Build(); + ASSERT_TRUE(message.has_value()); + + std::optional data = + ConvertCorruptionDetectionMessageToFrameInstrumentationSyncData(*message, + 12); + + ASSERT_TRUE(data.has_value()); + EXPECT_EQ(data->sequence_index, 0b0101'1000'0000); + EXPECT_TRUE(data->communicate_upper_bits); +} + +} // namespace +} // namespace webrtc diff --git a/common_video/frame_instrumentation_data.h b/common_video/frame_instrumentation_data.h new file mode 100644 index 0000000000..290688d473 --- /dev/null +++ b/common_video/frame_instrumentation_data.h @@ -0,0 +1,35 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef COMMON_VIDEO_FRAME_INSTRUMENTATION_DATA_H_ +#define COMMON_VIDEO_FRAME_INSTRUMENTATION_DATA_H_ + +#include + +namespace webrtc { + +// TODO: bugs.webrtc.org/358039777 - Error handling: negative values etc. +struct FrameInstrumentationSyncData { + int sequence_index; + bool communicate_upper_bits; +}; + +struct FrameInstrumentationData { + int sequence_index; + bool communicate_upper_bits; + double std_dev; + int luma_error_threshold; + int chroma_error_threshold; + std::vector sample_values; +}; + +} // namespace webrtc + +#endif // COMMON_VIDEO_FRAME_INSTRUMENTATION_DATA_H_ diff --git a/common_video/frame_rate_estimator.cc b/common_video/frame_rate_estimator.cc index 4c5a341ac0..c83f268c6c 100644 --- a/common_video/frame_rate_estimator.cc +++ b/common_video/frame_rate_estimator.cc @@ -22,21 +22,20 @@ void FrameRateEstimator::OnFrame(Timestamp time) { frame_times_.push_back(time); } -absl::optional FrameRateEstimator::GetAverageFps() const { +std::optional FrameRateEstimator::GetAverageFps() const { if (frame_times_.size() < 2) { - return absl::nullopt; + return std::nullopt; } TimeDelta time_span = frame_times_.back() - frame_times_.front(); if (time_span < TimeDelta::Micros(1)) { - return absl::nullopt; + return std::nullopt; } TimeDelta avg_frame_interval = time_span / (frame_times_.size() - 1); - return static_cast(rtc::kNumMicrosecsPerSec) / - avg_frame_interval.us(); + return static_cast(kNumMicrosecsPerSec) / avg_frame_interval.us(); } -absl::optional FrameRateEstimator::GetAverageFps(Timestamp now) { +std::optional FrameRateEstimator::GetAverageFps(Timestamp now) { CullOld(now); return GetAverageFps(); } diff --git a/common_video/frame_rate_estimator.h b/common_video/frame_rate_estimator.h index 95219a534d..55e6e209b1 100644 --- a/common_video/frame_rate_estimator.h +++ b/common_video/frame_rate_estimator.h @@ -12,8 +12,8 @@ #define COMMON_VIDEO_FRAME_RATE_ESTIMATOR_H_ #include +#include -#include "absl/types/optional.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -41,10 +41,10 @@ class FrameRateEstimator { void OnFrame(Timestamp time); // Get the current average FPS, based on the frames currently in the window. - absl::optional GetAverageFps() const; + std::optional GetAverageFps() const; // Move the window so it ends at `now`, and return the new fps estimate. - absl::optional GetAverageFps(Timestamp now); + std::optional GetAverageFps(Timestamp now); // Completely clear the averaging window. void Reset(); diff --git a/common_video/framerate_controller.cc b/common_video/framerate_controller.cc index 23e9c70cbd..771a369bca 100644 --- a/common_video/framerate_controller.cc +++ b/common_video/framerate_controller.cc @@ -41,7 +41,7 @@ bool FramerateController::ShouldDropFrame(int64_t in_timestamp_ns) { // If `max_framerate_` is not set (i.e. maxdouble), `frame_interval_ns` is // rounded to 0. - int64_t frame_interval_ns = rtc::kNumNanosecsPerSec / max_framerate_; + int64_t frame_interval_ns = kNumNanosecsPerSec / max_framerate_; if (frame_interval_ns <= 0) { // Frame rate throttling not enabled. return false; @@ -71,7 +71,7 @@ bool FramerateController::ShouldDropFrame(int64_t in_timestamp_ns) { void FramerateController::Reset() { max_framerate_ = std::numeric_limits::max(); - next_frame_timestamp_ns_ = absl::nullopt; + next_frame_timestamp_ns_ = std::nullopt; } void FramerateController::KeepFrame(int64_t in_timestamp_ns) { @@ -79,7 +79,7 @@ void FramerateController::KeepFrame(int64_t in_timestamp_ns) { if (max_framerate_ < kMinFramerate) return; - int64_t frame_interval_ns = rtc::kNumNanosecsPerSec / max_framerate_; + int64_t frame_interval_ns = kNumNanosecsPerSec / max_framerate_; if (next_frame_timestamp_ns_) *next_frame_timestamp_ns_ += frame_interval_ns; } diff --git a/common_video/framerate_controller.h b/common_video/framerate_controller.h index 371ffd419f..44e2e672cb 100644 --- a/common_video/framerate_controller.h +++ b/common_video/framerate_controller.h @@ -13,7 +13,7 @@ #include -#include "absl/types/optional.h" +#include namespace webrtc { @@ -38,7 +38,7 @@ class FramerateController { private: double max_framerate_; - absl::optional next_frame_timestamp_ns_; + std::optional next_frame_timestamp_ns_; }; } // namespace webrtc diff --git a/common_video/framerate_controller_unittest.cc b/common_video/framerate_controller_unittest.cc index 690076ca61..a0a323c654 100644 --- a/common_video/framerate_controller_unittest.cc +++ b/common_video/framerate_controller_unittest.cc @@ -24,12 +24,12 @@ constexpr int kNumFrames = 60; class FramerateControllerTest : public ::testing::Test { protected: int64_t GetNextTimestampNs() { - int64_t interval_us = rtc::kNumMicrosecsPerSec / kInputFps; + int64_t interval_us = kNumMicrosecsPerSec / kInputFps; next_timestamp_us_ += interval_us; - return next_timestamp_us_ * rtc::kNumNanosecsPerMicrosec; + return next_timestamp_us_ * kNumNanosecsPerMicrosec; } - int64_t next_timestamp_us_ = rtc::TimeMicros(); + int64_t next_timestamp_us_ = TimeMicros(); FramerateController controller_; }; @@ -97,7 +97,7 @@ TEST_F(FramerateControllerTest, NoFrameDroppedForLargeTimestampOffset) { const int64_t kLargeOffsetNs = -987654321LL * 1000; EXPECT_FALSE(controller_.ShouldDropFrame(kLargeOffsetNs)); - int64_t input_interval_ns = rtc::kNumNanosecsPerSec / kInputFps; + int64_t input_interval_ns = kNumNanosecsPerSec / kInputFps; EXPECT_FALSE(controller_.ShouldDropFrame(kLargeOffsetNs + input_interval_ns)); } @@ -105,7 +105,7 @@ TEST_F(FramerateControllerTest, NoFrameDroppedIfInputWithJitterRequested) { controller_.SetMaxFramerate(kInputFps); // Input fps with jitter. - int64_t input_interval_ns = rtc::kNumNanosecsPerSec / kInputFps; + int64_t input_interval_ns = kNumNanosecsPerSec / kInputFps; EXPECT_FALSE(controller_.ShouldDropFrame(input_interval_ns * 0 / 10)); EXPECT_FALSE(controller_.ShouldDropFrame(input_interval_ns * 10 / 10 - 1)); EXPECT_FALSE(controller_.ShouldDropFrame(input_interval_ns * 25 / 10)); diff --git a/common_video/generic_frame_descriptor/BUILD.gn b/common_video/generic_frame_descriptor/BUILD.gn index ab97e887f2..79314f8ef9 100644 --- a/common_video/generic_frame_descriptor/BUILD.gn +++ b/common_video/generic_frame_descriptor/BUILD.gn @@ -19,10 +19,8 @@ rtc_library("generic_frame_descriptor") { "../../api/transport/rtp:dependency_descriptor", "../../api/video:video_codec_constants", "../../rtc_base:checks", - ] - absl_deps = [ + "../../rtc_base/system:rtc_export", "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } diff --git a/common_video/generic_frame_descriptor/generic_frame_info.h b/common_video/generic_frame_descriptor/generic_frame_info.h index 19f413b5d4..30db423e37 100644 --- a/common_video/generic_frame_descriptor/generic_frame_info.h +++ b/common_video/generic_frame_descriptor/generic_frame_info.h @@ -19,6 +19,7 @@ #include "absl/strings/string_view.h" #include "api/transport/rtp/dependency_descriptor.h" #include "api/video/video_codec_constants.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -32,7 +33,7 @@ struct CodecBufferUsage { bool updated = false; }; -struct GenericFrameInfo : public FrameDependencyTemplate { +struct RTC_EXPORT GenericFrameInfo : public FrameDependencyTemplate { class Builder; GenericFrameInfo(); diff --git a/common_video/h264/h264_bitstream_parser.cc b/common_video/h264/h264_bitstream_parser.cc index 2311d0d2ee..1d2d0496a5 100644 --- a/common_video/h264/h264_bitstream_parser.cc +++ b/common_video/h264/h264_bitstream_parser.cc @@ -31,15 +31,13 @@ H264BitstreamParser::H264BitstreamParser() = default; H264BitstreamParser::~H264BitstreamParser() = default; H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu( - const uint8_t* source, - size_t source_length, + ArrayView source, uint8_t nalu_type) { if (!sps_ || !pps_) return kInvalidStream; - last_slice_qp_delta_ = absl::nullopt; - const std::vector slice_rbsp = - H264::ParseRbsp(source, source_length); + last_slice_qp_delta_ = std::nullopt; + const std::vector slice_rbsp = H264::ParseRbsp(source); if (slice_rbsp.size() < H264::kNaluTypeSize) return kInvalidStream; @@ -51,6 +49,11 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu( bool is_idr = (source[0] & 0x0F) == H264::NaluType::kIdr; uint8_t nal_ref_idc = (source[0] & 0x60) >> 5; + uint32_t num_ref_idx_l0_active_minus1 = + pps_->num_ref_idx_l0_default_active_minus1; + uint32_t num_ref_idx_l1_active_minus1 = + pps_->num_ref_idx_l1_default_active_minus1; + // first_mb_in_slice: ue(v) slice_reader.ReadExponentialGolomb(); // slice_type: ue(v) @@ -114,10 +117,18 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu( // num_ref_idx_active_override_flag: u(1) if (slice_reader.Read()) { // num_ref_idx_l0_active_minus1: ue(v) - slice_reader.ReadExponentialGolomb(); + num_ref_idx_l0_active_minus1 = slice_reader.ReadExponentialGolomb(); + if (!slice_reader.Ok() || + num_ref_idx_l0_active_minus1 > H264::kMaxReferenceIndex) { + return kInvalidStream; + } if (slice_type == H264::SliceType::kB) { // num_ref_idx_l1_active_minus1: ue(v) - slice_reader.ReadExponentialGolomb(); + num_ref_idx_l1_active_minus1 = slice_reader.ReadExponentialGolomb(); + if (!slice_reader.Ok() || + num_ref_idx_l1_active_minus1 > H264::kMaxReferenceIndex) { + return kInvalidStream; + } } } break; @@ -180,17 +191,67 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu( if (!slice_reader.Ok()) { return kInvalidStream; } - // TODO(pbos): Do we need support for pred_weight_table()? if ((pps_->weighted_pred_flag && (slice_type == H264::SliceType::kP || slice_type == H264::SliceType::kSp)) || (pps_->weighted_bipred_idc == 1 && slice_type == H264::SliceType::kB)) { - RTC_LOG(LS_ERROR) << "Streams with pred_weight_table unsupported."; - return kUnsupportedStream; + // pred_weight_table() + // luma_log2_weight_denom: ue(v) + slice_reader.ReadExponentialGolomb(); + + // If separate_colour_plane_flag is equal to 0, ChromaArrayType is set equal + // to chroma_format_idc. Otherwise(separate_colour_plane_flag is equal to + // 1), ChromaArrayType is set equal to 0. + uint8_t chroma_array_type = + sps_->separate_colour_plane_flag == 0 ? sps_->chroma_format_idc : 0; + + if (chroma_array_type != 0) { + // chroma_log2_weight_denom: ue(v) + slice_reader.ReadExponentialGolomb(); + } + + for (uint32_t i = 0; i <= num_ref_idx_l0_active_minus1; i++) { + // luma_weight_l0_flag 2 u(1) + if (slice_reader.Read()) { + // luma_weight_l0[i] 2 se(v) + slice_reader.ReadExponentialGolomb(); + // luma_offset_l0[i] 2 se(v) + slice_reader.ReadExponentialGolomb(); + } + if (chroma_array_type != 0) { + // chroma_weight_l0_flag: u(1) + if (slice_reader.Read()) { + for (uint8_t j = 0; j < 2; j++) { + // chroma_weight_l0[i][j] 2 se(v) + slice_reader.ReadExponentialGolomb(); + // chroma_offset_l0[i][j] 2 se(v) + slice_reader.ReadExponentialGolomb(); + } + } + } + } + if (slice_type % 5 == 1) { + for (uint32_t i = 0; i <= num_ref_idx_l1_active_minus1; i++) { + // luma_weight_l1_flag: u(1) + if (slice_reader.Read()) { + // luma_weight_l1[i] 2 se(v) + slice_reader.ReadExponentialGolomb(); + // luma_offset_l1[i] 2 se(v) + slice_reader.ReadExponentialGolomb(); + } + if (chroma_array_type != 0) { + // chroma_weight_l1_flag: u(1) + if (slice_reader.Read()) { + for (uint8_t j = 0; j < 2; j++) { + // chroma_weight_l1[i][j] 2 se(v) + slice_reader.ReadExponentialGolomb(); + // chroma_offset_l1[i][j] 2 se(v) + slice_reader.ReadExponentialGolomb(); + } + } + } + } + } } - // if ((weighted_pred_flag && (slice_type == P || slice_type == SP)) || - // (weighted_bipred_idc == 1 && slice_type == B)) { - // pred_weight_table() - // } if (nal_ref_idc != 0) { // dec_ref_pic_marking(): if (is_idr) { @@ -247,51 +308,52 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu( return kOk; } -void H264BitstreamParser::ParseSlice(const uint8_t* slice, size_t length) { +void H264BitstreamParser::ParseSlice(ArrayView slice) { + if (slice.empty()) { + return; + } H264::NaluType nalu_type = H264::ParseNaluType(slice[0]); switch (nalu_type) { case H264::NaluType::kSps: { - sps_ = SpsParser::ParseSps(slice + H264::kNaluTypeSize, - length - H264::kNaluTypeSize); + sps_ = SpsParser::ParseSps(slice.subview(H264::kNaluTypeSize)); if (!sps_) RTC_DLOG(LS_WARNING) << "Unable to parse SPS from H264 bitstream."; break; } case H264::NaluType::kPps: { - pps_ = PpsParser::ParsePps(slice + H264::kNaluTypeSize, - length - H264::kNaluTypeSize); + pps_ = PpsParser::ParsePps(slice.subview(H264::kNaluTypeSize)); if (!pps_) RTC_DLOG(LS_WARNING) << "Unable to parse PPS from H264 bitstream."; break; } case H264::NaluType::kAud: + case H264::NaluType::kFiller: case H264::NaluType::kSei: case H264::NaluType::kPrefix: break; // Ignore these nalus, as we don't care about their contents. default: - Result res = ParseNonParameterSetNalu(slice, length, nalu_type); + Result res = ParseNonParameterSetNalu(slice, nalu_type); if (res != kOk) - RTC_DLOG(LS_INFO) << "Failed to parse bitstream. Error: " << res; + RTC_DLOG(LS_INFO) << "Failed to parse bitstream. NAL type " + << static_cast(nalu_type) << ", error: " << res; break; } } -void H264BitstreamParser::ParseBitstream( - rtc::ArrayView bitstream) { - std::vector nalu_indices = - H264::FindNaluIndices(bitstream.data(), bitstream.size()); +void H264BitstreamParser::ParseBitstream(ArrayView bitstream) { + std::vector nalu_indices = H264::FindNaluIndices(bitstream); for (const H264::NaluIndex& index : nalu_indices) - ParseSlice(bitstream.data() + index.payload_start_offset, - index.payload_size); + ParseSlice( + bitstream.subview(index.payload_start_offset, index.payload_size)); } -absl::optional H264BitstreamParser::GetLastSliceQp() const { +std::optional H264BitstreamParser::GetLastSliceQp() const { if (!last_slice_qp_delta_ || !pps_) - return absl::nullopt; + return std::nullopt; const int qp = 26 + pps_->pic_init_qp_minus26 + *last_slice_qp_delta_; if (qp < kMinQpValue || qp > kMaxQpValue) { RTC_LOG(LS_ERROR) << "Parsed invalid QP from bitstream."; - return absl::nullopt; + return std::nullopt; } return qp; } diff --git a/common_video/h264/h264_bitstream_parser.h b/common_video/h264/h264_bitstream_parser.h index 05427825ac..e308fe1c10 100644 --- a/common_video/h264/h264_bitstream_parser.h +++ b/common_video/h264/h264_bitstream_parser.h @@ -13,7 +13,8 @@ #include #include -#include "absl/types/optional.h" +#include + #include "api/video_codecs/bitstream_parser.h" #include "common_video/h264/pps_parser.h" #include "common_video/h264/sps_parser.h" @@ -31,8 +32,8 @@ class H264BitstreamParser : public BitstreamParser { H264BitstreamParser(); ~H264BitstreamParser() override; - void ParseBitstream(rtc::ArrayView bitstream) override; - absl::optional GetLastSliceQp() const override; + void ParseBitstream(ArrayView bitstream) override; + std::optional GetLastSliceQp() const override; protected: enum Result { @@ -40,17 +41,16 @@ class H264BitstreamParser : public BitstreamParser { kInvalidStream, kUnsupportedStream, }; - void ParseSlice(const uint8_t* slice, size_t length); - Result ParseNonParameterSetNalu(const uint8_t* source, - size_t source_length, + void ParseSlice(ArrayView slice); + Result ParseNonParameterSetNalu(ArrayView source, uint8_t nalu_type); // SPS/PPS state, updated when parsing new SPS/PPS, used to parse slices. - absl::optional sps_; - absl::optional pps_; + std::optional sps_; + std::optional pps_; // Last parsed slice QP. - absl::optional last_slice_qp_delta_; + std::optional last_slice_qp_delta_; }; } // namespace webrtc diff --git a/common_video/h264/h264_bitstream_parser_unittest.cc b/common_video/h264/h264_bitstream_parser_unittest.cc index 3f4f202af2..00186a6a34 100644 --- a/common_video/h264/h264_bitstream_parser_unittest.cc +++ b/common_video/h264/h264_bitstream_parser_unittest.cc @@ -44,6 +44,70 @@ uint8_t kH264BitstreamNextImageSliceChunkCabac[] = { 0x70, 0xbf, 0xc1, 0x4a, 0x16, 0x8f, 0x51, 0xf4, 0xca, 0xfb, 0xa3, 0x65, }; +uint8_t kH264BitstreamWeightedPred[] = { + 0x00, 0x00, 0x00, 0x01, 0x67, 0x64, 0x00, 0x28, 0xac, 0xb4, 0x03, 0xc0, + 0x11, 0x3f, 0x2e, 0x02, 0xd4, 0x04, 0x04, 0x05, 0x00, 0x00, 0x03, 0x00, + 0x01, 0x00, 0x00, 0x03, 0x00, 0x30, 0x8f, 0x18, 0x32, 0xa0, 0x00, 0x00, + 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x68, 0xef, 0x3c, 0xb0, 0x00, 0x00, + 0x00, 0xc0, 0x00, 0x00, 0x00, 0x01, 0x41, 0x9a, 0x26, 0x21, 0xf7, 0xff, + 0xfe, 0x9e, 0x10, 0x00, 0x00, 0x08, 0x78, 0x00, 0x00, 0x00, 0x12}; + +// First 4 P frames of CVWP1_TOSHIBA_E test file. +uint8_t H264BitstreamCVWP1SPS[] = {0x00, 0x00, 0x00, 0x01, 0x27, 0x4d, 0x40, + 0x14, 0xd9, 0x81, 0x60, 0x94, 0x40}; + +uint8_t H264BitstreamCVWP1PFrame1[] = { + 0x00, 0x00, 0x00, 0x01, 0x28, 0xcf, 0x1b, 0x88, 0x00, 0x00, 0x00, + 0x01, 0x21, 0x9a, 0x21, 0x8f, 0x02, 0xd8, 0x1b, 0xe0, 0x2c, 0xc3, + 0x80, 0x20, 0x00, 0xe4, 0xcd, 0x72, 0xfe, 0x1c, 0xfc, 0x2a, 0x00, + 0x02, 0x00, 0x26, 0x09, 0x04, 0xc1, 0x38, 0xe2, 0x9b, 0xcc, 0x60, + 0x54, 0xee, 0x62, 0x6b, 0x00, 0x28, 0x86, 0xce, 0x81, 0x0f, 0xd2, + 0x17, 0x26, 0x0d, 0x2f, 0x1c, 0x1d, 0xe3, 0x80, 0x01}; + +uint8_t H264BitstreamCVWP1PFrame2[] = { + 0x00, 0x00, 0x00, 0x01, 0x28, 0xca, 0xc6, 0xe2, 0x00, 0x00, 0x00, + 0x01, 0x21, 0x9a, 0x41, 0xcb, 0x01, 0x8e, 0x02, 0x76, 0x28, 0x68, + 0x20, 0x01, 0x9a, 0x33, 0x60, 0x58, 0xc3, 0x0d, 0x7c, 0x32, 0x00, + 0x02, 0x00, 0x7c, 0x5d, 0xf7, 0x22, 0x6c, 0x3d, 0xa3, 0xcc, 0x60, + 0x5a, 0x3d, 0x98, 0x3b, 0xf0, 0x14, 0x48, 0x1b, 0xa0, 0xdf, 0x69, + 0xfc, 0xf2, 0x66, 0x21, 0x4d, 0x72, 0x99, 0xc2, 0x1c}; + +uint8_t H264BitstreamCVWP1PFrame3[] = { + 0x00, 0x00, 0x00, 0x01, 0x28, 0xcb, 0xc6, 0xe2, 0x00, 0x00, 0x00, + 0x01, 0x21, 0x9a, 0x61, 0xcf, 0x04, 0xc0, 0x24, 0x20, 0x33, 0xc0, + 0x5d, 0x80, 0x80, 0x05, 0x08, 0x0a, 0xb0, 0x30, 0x81, 0xf8, 0x0d, + 0x70, 0x13, 0xa0, 0x31, 0x8e, 0x86, 0x94, 0x6c, 0x43, 0xbb, 0x58, + 0x44, 0xc2, 0x41, 0x7c, 0x92, 0x04, 0x7e, 0x9f, 0xbf, 0x01, 0xe9, + 0xab, 0x53, 0xfe, 0x8f, 0x1c, 0x00, 0x04, 0x1f, 0x23}; + +uint8_t H264BitstreamCVWP1PFrame4[] = { + 0x00, 0x00, 0x00, 0x01, 0x28, 0xc9, 0x31, 0xb8, 0x80, 0x00, 0x00, + 0x00, 0x01, 0x21, 0x9a, 0x81, 0xe1, 0x04, 0xe0, 0x4f, 0x0f, 0x12, + 0xc6, 0x58, 0x74, 0x34, 0x06, 0x73, 0x9f, 0x43, 0xa7, 0xd0, 0x3c, + 0x9c, 0x9c, 0x92, 0x4f, 0x84, 0x4f, 0xd6, 0x36, 0x63, 0xff, 0xa0, + 0x5b, 0x1c, 0x6f, 0x01, 0x0b, 0xc2, 0x5e, 0x7b, 0xb0, 0xd7, 0x8f, + 0x19, 0x70, 0x81, 0xfa, 0x93, 0x4d, 0x48, 0x4f, 0xd2}; + +// First 2 B frames of CVWP2_TOSHIBA_E test file. +uint8_t H264BitstreamCVWP2SPS[] = {0x00, 0x00, 0x00, 0x01, 0x27, 0x4d, 0x40, + 0x14, 0xec, 0xc0, 0xb0, 0x4a, 0x20}; + +uint8_t H264BitstreamCVWP2BFrame1[] = { + 0x00, 0x00, 0x00, 0x01, 0x28, 0xce, 0x1b, 0x88, 0x00, 0x00, 0x00, + 0x01, 0x01, 0x9a, 0x3e, 0x19, 0x69, 0xa1, 0xc4, 0x1e, 0x5d, 0xea, + 0x84, 0x1c, 0x10, 0x65, 0x87, 0xc0, 0x25, 0x1b, 0x6d, 0x1e, 0xcf, + 0xf9, 0x8d, 0xf1, 0x2f, 0xec, 0xf8, 0xc2, 0x07, 0xfe, 0x02, 0x27, + 0xec, 0xcb, 0x74, 0x75, 0x59, 0xd5, 0x6e, 0xc0, 0x01, 0x4b, 0xb2, + 0xe7, 0x68, 0xfe, 0xef, 0xaf, 0xb6, 0x76, 0xc6, 0xc5}; + +uint8_t H264BitstreamCVWP2BFrame2[] = { + 0x00, 0x00, 0x00, 0x01, 0x28, 0xce, 0x1b, 0x88, 0x00, 0x00, 0x00, + 0x01, 0x01, 0x9a, 0x3e, 0x19, 0x69, 0xa1, 0xc4, 0x1e, 0x5d, 0xea, + 0x84, 0x1c, 0x10, 0x65, 0x87, 0xc0, 0x25, 0x1b, 0x6d, 0x1e, 0xcf, + 0xf9, 0x8d, 0xf1, 0x2f, 0xec, 0xf8, 0xc2, 0x07, 0xfe, 0x02, 0x27, + 0xec, 0xcb, 0x74, 0x75, 0x59, 0xd5, 0x6e, 0xc0, 0x01, 0x4b, 0xb2, + 0xe7, 0x68, 0xfe, 0xef, 0xaf, 0xb6, 0x76, 0xc6, 0xc5}; + TEST(H264BitstreamParserTest, ReportsNoQpWithoutParsedSlices) { H264BitstreamParser h264_parser; EXPECT_FALSE(h264_parser.GetLastSliceQp().has_value()); @@ -58,7 +122,7 @@ TEST(H264BitstreamParserTest, ReportsNoQpWithOnlyParsedPpsAndSpsSlices) { TEST(H264BitstreamParserTest, ReportsLastSliceQpForImageSlices) { H264BitstreamParser h264_parser; h264_parser.ParseBitstream(kH264BitstreamChunk); - absl::optional qp = h264_parser.GetLastSliceQp(); + std::optional qp = h264_parser.GetLastSliceQp(); ASSERT_TRUE(qp.has_value()); EXPECT_EQ(35, *qp); @@ -76,9 +140,60 @@ TEST(H264BitstreamParserTest, ReportsLastSliceQpForCABACImageSlices) { // Parse an additional image slice. h264_parser.ParseBitstream(kH264BitstreamNextImageSliceChunkCabac); - absl::optional qp = h264_parser.GetLastSliceQp(); + std::optional qp = h264_parser.GetLastSliceQp(); ASSERT_TRUE(qp.has_value()); EXPECT_EQ(24, *qp); } +TEST(H264BitstreamParserTest, ReportsLastSliceQpForWeightedPredSlices) { + H264BitstreamParser h264_parser; + h264_parser.ParseBitstream(kH264BitstreamWeightedPred); + + std::optional qp = h264_parser.GetLastSliceQp(); + ASSERT_TRUE(qp.has_value()); + EXPECT_EQ(11, *qp); +} + +TEST(H264BitstreamParserTest, ReportsLastSliceQpForWeightedPredSlicesL0Active) { + H264BitstreamParser h264_parser; + std::optional qp; + h264_parser.ParseBitstream(H264BitstreamCVWP1SPS); + + h264_parser.ParseBitstream(H264BitstreamCVWP1PFrame1); + qp = h264_parser.GetLastSliceQp(); + ASSERT_TRUE(qp.has_value()); + EXPECT_EQ(25, *qp); + + h264_parser.ParseBitstream(H264BitstreamCVWP1PFrame2); + qp = h264_parser.GetLastSliceQp(); + ASSERT_TRUE(qp.has_value()); + EXPECT_EQ(25, *qp); + + h264_parser.ParseBitstream(H264BitstreamCVWP1PFrame3); + qp = h264_parser.GetLastSliceQp(); + ASSERT_TRUE(qp.has_value()); + EXPECT_EQ(25, *qp); + + h264_parser.ParseBitstream(H264BitstreamCVWP1PFrame4); + qp = h264_parser.GetLastSliceQp(); + ASSERT_TRUE(qp.has_value()); + EXPECT_EQ(25, *qp); +} + +TEST(H264BitstreamParserTest, ReportsLastSliceQpForWeightedPredSlicesL1Active) { + H264BitstreamParser h264_parser; + std::optional qp; + h264_parser.ParseBitstream(H264BitstreamCVWP2SPS); + + h264_parser.ParseBitstream(H264BitstreamCVWP2BFrame1); + qp = h264_parser.GetLastSliceQp(); + ASSERT_TRUE(qp.has_value()); + EXPECT_EQ(25, *qp); + + h264_parser.ParseBitstream(H264BitstreamCVWP2BFrame1); + qp = h264_parser.GetLastSliceQp(); + ASSERT_TRUE(qp.has_value()); + EXPECT_EQ(25, *qp); +} + } // namespace webrtc diff --git a/common_video/h264/h264_common.cc b/common_video/h264/h264_common.cc index 06d94e0305..5444fcd6eb 100644 --- a/common_video/h264/h264_common.cc +++ b/common_video/h264/h264_common.cc @@ -17,19 +17,18 @@ namespace H264 { const uint8_t kNaluTypeMask = 0x1F; -std::vector FindNaluIndices(const uint8_t* buffer, - size_t buffer_size) { +std::vector FindNaluIndices(ArrayView buffer) { // This is sorta like Boyer-Moore, but with only the first optimization step: // given a 3-byte sequence we're looking at, if the 3rd byte isn't 1 or 0, // skip ahead to the next 3-byte sequence. 0s and 1s are relatively rare, so // this will skip the majority of reads/checks. std::vector sequences; - if (buffer_size < kNaluShortStartSequenceSize) + if (buffer.size() < kNaluShortStartSequenceSize) return sequences; static_assert(kNaluShortStartSequenceSize >= 2, "kNaluShortStartSequenceSize must be larger or equals to 2"); - const size_t end = buffer_size - kNaluShortStartSequenceSize; + const size_t end = buffer.size() - kNaluShortStartSequenceSize; for (size_t i = 0; i < end;) { if (buffer[i + 2] > 1) { i += 3; @@ -57,7 +56,7 @@ std::vector FindNaluIndices(const uint8_t* buffer, // Update length of last entry, if any. auto it = sequences.rbegin(); if (it != sequences.rend()) - it->payload_size = buffer_size - it->payload_start_offset; + it->payload_size = buffer.size() - it->payload_start_offset; return sequences; } @@ -66,16 +65,16 @@ NaluType ParseNaluType(uint8_t data) { return static_cast(data & kNaluTypeMask); } -std::vector ParseRbsp(const uint8_t* data, size_t length) { +std::vector ParseRbsp(ArrayView data) { std::vector out; - out.reserve(length); + out.reserve(data.size()); - for (size_t i = 0; i < length;) { + for (size_t i = 0; i < data.size();) { // Be careful about over/underflow here. byte_length_ - 3 can underflow, and // i + 3 can overflow, but byte_length_ - i can't, because i < byte_length_ // above, and that expression will produce the number of bytes left in // the stream including the byte at i. - if (length - i >= 3 && !data[i] && !data[i + 1] && data[i + 2] == 3) { + if (data.size() - i >= 3 && !data[i] && !data[i + 1] && data[i + 2] == 3) { // Two rbsp bytes. out.push_back(data[i++]); out.push_back(data[i++]); @@ -89,14 +88,13 @@ std::vector ParseRbsp(const uint8_t* data, size_t length) { return out; } -void WriteRbsp(const uint8_t* bytes, size_t length, rtc::Buffer* destination) { +void WriteRbsp(ArrayView bytes, Buffer* destination) { static const uint8_t kZerosInStartSequence = 2; static const uint8_t kEmulationByte = 0x03u; size_t num_consecutive_zeros = 0; - destination->EnsureCapacity(destination->size() + length); + destination->EnsureCapacity(destination->size() + bytes.size()); - for (size_t i = 0; i < length; ++i) { - uint8_t byte = bytes[i]; + for (uint8_t byte : bytes) { if (byte <= kEmulationByte && num_consecutive_zeros >= kZerosInStartSequence) { // Need to escape. diff --git a/common_video/h264/h264_common.h b/common_video/h264/h264_common.h index 0b1843ee38..e197fb4407 100644 --- a/common_video/h264/h264_common.h +++ b/common_video/h264/h264_common.h @@ -17,6 +17,7 @@ #include #include "rtc_base/buffer.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -32,6 +33,9 @@ const size_t kNaluShortStartSequenceSize = 3; // The size of the NALU type byte (1). const size_t kNaluTypeSize = 1; +// Maximum reference index for reference pictures. +constexpr int kMaxReferenceIndex = 31; + enum NaluType : uint8_t { kSlice = 1, kIdr = 5, @@ -59,11 +63,11 @@ struct NaluIndex { }; // Returns a vector of the NALU indices in the given buffer. -std::vector FindNaluIndices(const uint8_t* buffer, - size_t buffer_size); +RTC_EXPORT std::vector FindNaluIndices( + ArrayView buffer); // Get the NAL type from the header byte immediately following start sequence. -NaluType ParseNaluType(uint8_t data); +RTC_EXPORT NaluType ParseNaluType(uint8_t data); // Methods for parsing and writing RBSP. See section 7.4.1 of the H264 spec. // @@ -79,12 +83,24 @@ NaluType ParseNaluType(uint8_t data); // the 03 emulation byte. // Parse the given data and remove any emulation byte escaping. -std::vector ParseRbsp(const uint8_t* data, size_t length); +std::vector ParseRbsp(ArrayView data); + +// TODO: bugs.webrtc.org/42225170 - Deprecate. +inline std::vector ParseRbsp(const uint8_t* data, size_t length) { + return ParseRbsp(MakeArrayView(data, length)); +} // Write the given data to the destination buffer, inserting and emulation // bytes in order to escape any data the could be interpreted as a start // sequence. -void WriteRbsp(const uint8_t* bytes, size_t length, rtc::Buffer* destination); +void WriteRbsp(ArrayView bytes, Buffer* destination); + +// TODO: bugs.webrtc.org/42225170 - Deprecate. +inline void WriteRbsp(const uint8_t* bytes, + size_t length, + Buffer* destination) { + WriteRbsp(MakeArrayView(bytes, length), destination); +} } // namespace H264 } // namespace webrtc diff --git a/common_video/h264/pps_parser.cc b/common_video/h264/pps_parser.cc index 2fc9749e8c..9d9d04a2c2 100644 --- a/common_video/h264/pps_parser.cc +++ b/common_video/h264/pps_parser.cc @@ -29,16 +29,15 @@ constexpr int kMinPicInitQpDeltaValue = -26; // You can find it on this page: // http://www.itu.int/rec/T-REC-H.264 -absl::optional PpsParser::ParsePps(const uint8_t* data, - size_t length) { +std::optional PpsParser::ParsePps( + ArrayView data) { // First, parse out rbsp, which is basically the source buffer minus emulation // bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in // section 7.3.1 of the H.264 standard. - return ParseInternal(H264::ParseRbsp(data, length)); + return ParseInternal(H264::ParseRbsp(data)); } -bool PpsParser::ParsePpsIds(const uint8_t* data, - size_t length, +bool PpsParser::ParsePpsIds(ArrayView data, uint32_t* pps_id, uint32_t* sps_id) { RTC_DCHECK(pps_id); @@ -46,32 +45,36 @@ bool PpsParser::ParsePpsIds(const uint8_t* data, // First, parse out rbsp, which is basically the source buffer minus emulation // bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in // section 7.3.1 of the H.264 standard. - std::vector unpacked_buffer = H264::ParseRbsp(data, length); + std::vector unpacked_buffer = H264::ParseRbsp(data); BitstreamReader reader(unpacked_buffer); *pps_id = reader.ReadExponentialGolomb(); *sps_id = reader.ReadExponentialGolomb(); return reader.Ok(); } -absl::optional PpsParser::ParsePpsIdFromSlice(const uint8_t* data, - size_t length) { - std::vector unpacked_buffer = H264::ParseRbsp(data, length); +std::optional PpsParser::ParseSliceHeader( + ArrayView data) { + std::vector unpacked_buffer = H264::ParseRbsp(data); BitstreamReader slice_reader(unpacked_buffer); + PpsParser::SliceHeader slice_header; // first_mb_in_slice: ue(v) - slice_reader.ReadExponentialGolomb(); + slice_header.first_mb_in_slice = slice_reader.ReadExponentialGolomb(); // slice_type: ue(v) slice_reader.ReadExponentialGolomb(); // pic_parameter_set_id: ue(v) - uint32_t slice_pps_id = slice_reader.ReadExponentialGolomb(); + slice_header.pic_parameter_set_id = slice_reader.ReadExponentialGolomb(); + + // The rest of the slice header requires information from the SPS to parse. + if (!slice_reader.Ok()) { - return absl::nullopt; + return std::nullopt; } - return slice_pps_id; + return slice_header; } -absl::optional PpsParser::ParseInternal( - rtc::ArrayView buffer) { +std::optional PpsParser::ParseInternal( + ArrayView buffer) { BitstreamReader reader(buffer); PpsState pps; pps.id = reader.ReadExponentialGolomb(); @@ -120,15 +123,19 @@ absl::optional PpsParser::ParseInternal( int64_t bits_to_consume = int64_t{slice_group_id_bits} * pic_size_in_map_units; if (!reader.Ok() || bits_to_consume > std::numeric_limits::max()) { - return absl::nullopt; + return std::nullopt; } reader.ConsumeBits(bits_to_consume); } } // num_ref_idx_l0_default_active_minus1: ue(v) - reader.ReadExponentialGolomb(); + pps.num_ref_idx_l0_default_active_minus1 = reader.ReadExponentialGolomb(); // num_ref_idx_l1_default_active_minus1: ue(v) - reader.ReadExponentialGolomb(); + pps.num_ref_idx_l1_default_active_minus1 = reader.ReadExponentialGolomb(); + if (pps.num_ref_idx_l0_default_active_minus1 > H264::kMaxReferenceIndex || + pps.num_ref_idx_l1_default_active_minus1 > H264::kMaxReferenceIndex) { + return std::nullopt; + } // weighted_pred_flag: u(1) pps.weighted_pred_flag = reader.Read(); // weighted_bipred_idc: u(2) @@ -139,7 +146,7 @@ absl::optional PpsParser::ParseInternal( // Sanity-check parsed value if (!reader.Ok() || pps.pic_init_qp_minus26 > kMaxPicInitQpDeltaValue || pps.pic_init_qp_minus26 < kMinPicInitQpDeltaValue) { - return absl::nullopt; + return std::nullopt; } // pic_init_qs_minus26: se(v) reader.ReadExponentialGolomb(); @@ -151,7 +158,7 @@ absl::optional PpsParser::ParseInternal( // redundant_pic_cnt_present_flag: u(1) pps.redundant_pic_cnt_present_flag = reader.ReadBit(); if (!reader.Ok()) { - return absl::nullopt; + return std::nullopt; } return pps; diff --git a/common_video/h264/pps_parser.h b/common_video/h264/pps_parser.h index 52717dcc26..cdf3fb79a0 100644 --- a/common_video/h264/pps_parser.h +++ b/common_video/h264/pps_parser.h @@ -14,7 +14,8 @@ #include #include -#include "absl/types/optional.h" +#include + #include "api/array_view.h" namespace webrtc { @@ -30,6 +31,8 @@ class PpsParser { bool bottom_field_pic_order_in_frame_present_flag = false; bool weighted_pred_flag = false; bool entropy_coding_mode_flag = false; + uint32_t num_ref_idx_l0_default_active_minus1 = 0; + uint32_t num_ref_idx_l1_default_active_minus1 = 0; uint32_t weighted_bipred_idc = false; uint32_t redundant_pic_cnt_present_flag = 0; int pic_init_qp_minus26 = 0; @@ -37,22 +40,32 @@ class PpsParser { uint32_t sps_id = 0; }; + struct SliceHeader { + SliceHeader() = default; + + uint32_t first_mb_in_slice = 0; + uint32_t pic_parameter_set_id = 0; + }; + // Unpack RBSP and parse PPS state from the supplied buffer. - static absl::optional ParsePps(const uint8_t* data, size_t length); + static std::optional ParsePps(ArrayView data); + // TODO: bugs.webrtc.org/42225170 - Deprecate. + static inline std::optional ParsePps(const uint8_t* data, + size_t length) { + return ParsePps(MakeArrayView(data, length)); + } - static bool ParsePpsIds(const uint8_t* data, - size_t length, + static bool ParsePpsIds(ArrayView data, uint32_t* pps_id, uint32_t* sps_id); - static absl::optional ParsePpsIdFromSlice(const uint8_t* data, - size_t length); + static std::optional ParseSliceHeader( + ArrayView data); protected: // Parse the PPS state, for a buffer where RBSP decoding has already been // performed. - static absl::optional ParseInternal( - rtc::ArrayView buffer); + static std::optional ParseInternal(ArrayView buffer); }; } // namespace webrtc diff --git a/common_video/h264/pps_parser_unittest.cc b/common_video/h264/pps_parser_unittest.cc index 4fe742d2e6..287909f533 100644 --- a/common_video/h264/pps_parser_unittest.cc +++ b/common_video/h264/pps_parser_unittest.cc @@ -36,9 +36,9 @@ void WritePps(const PpsParser::PpsState& pps, int slice_group_map_type, int num_slice_groups, int pic_size_in_map_units, - rtc::Buffer* out_buffer) { + Buffer* out_buffer) { uint8_t data[kPpsBufferMaxSize] = {0}; - rtc::BitBufferWriter bit_buffer(data, kPpsBufferMaxSize); + BitBufferWriter bit_buffer(data, kPpsBufferMaxSize); // pic_parameter_set_id: ue(v) bit_buffer.WriteExponentialGolomb(pps.id); @@ -106,9 +106,9 @@ void WritePps(const PpsParser::PpsState& pps, } // num_ref_idx_l0_default_active_minus1: ue(v) - bit_buffer.WriteExponentialGolomb(kIgnored); + bit_buffer.WriteExponentialGolomb(pps.num_ref_idx_l0_default_active_minus1); // num_ref_idx_l1_default_active_minus1: ue(v) - bit_buffer.WriteExponentialGolomb(kIgnored); + bit_buffer.WriteExponentialGolomb(pps.num_ref_idx_l1_default_active_minus1); // weighted_pred_flag: u(1) bit_buffer.WriteBits(pps.weighted_pred_flag ? 1 : 0, 1); // weighted_bipred_idc: u(2) @@ -134,7 +134,7 @@ void WritePps(const PpsParser::PpsState& pps, bit_buffer.GetCurrentOffset(&byte_offset, &bit_offset); } - H264::WriteRbsp(data, byte_offset, out_buffer); + H264::WriteRbsp(MakeArrayView(data, byte_offset), out_buffer); } class PpsParserTest : public ::testing::Test { @@ -175,10 +175,14 @@ class PpsParserTest : public ::testing::Test { buffer_.Clear(); WritePps(pps, slice_group_map_type, num_slice_groups, pic_size_in_map_units, &buffer_); - parsed_pps_ = PpsParser::ParsePps(buffer_.data(), buffer_.size()); + parsed_pps_ = PpsParser::ParsePps(buffer_); ASSERT_TRUE(parsed_pps_); EXPECT_EQ(pps.bottom_field_pic_order_in_frame_present_flag, parsed_pps_->bottom_field_pic_order_in_frame_present_flag); + EXPECT_EQ(pps.num_ref_idx_l0_default_active_minus1, + parsed_pps_->num_ref_idx_l0_default_active_minus1); + EXPECT_EQ(pps.num_ref_idx_l1_default_active_minus1, + parsed_pps_->num_ref_idx_l1_default_active_minus1); EXPECT_EQ(pps.weighted_pred_flag, parsed_pps_->weighted_pred_flag); EXPECT_EQ(pps.weighted_bipred_idc, parsed_pps_->weighted_bipred_idc); EXPECT_EQ(pps.entropy_coding_mode_flag, @@ -191,8 +195,8 @@ class PpsParserTest : public ::testing::Test { } PpsParser::PpsState generated_pps_; - rtc::Buffer buffer_; - absl::optional parsed_pps_; + Buffer buffer_; + std::optional parsed_pps_; }; TEST_F(PpsParserTest, ZeroPps) { @@ -214,17 +218,21 @@ TEST_F(PpsParserTest, MaxPps) { RunTest(); } -TEST_F(PpsParserTest, PpsIdFromSlice) { - std::vector nalu_indices = - H264::FindNaluIndices(kH264BitstreamChunk, sizeof(kH264BitstreamChunk)); +TEST_F(PpsParserTest, ParseSliceHeader) { + ArrayView chunk(kH264BitstreamChunk); + std::vector nalu_indices = H264::FindNaluIndices(chunk); EXPECT_EQ(nalu_indices.size(), 3ull); for (const auto& index : nalu_indices) { H264::NaluType nalu_type = - H264::ParseNaluType(kH264BitstreamChunk[index.payload_start_offset]); + H264::ParseNaluType(chunk[index.payload_start_offset]); if (nalu_type == H264::NaluType::kIdr) { - absl::optional pps_id = PpsParser::ParsePpsIdFromSlice( - kH264BitstreamChunk + index.payload_start_offset, index.payload_size); - EXPECT_EQ(pps_id, 0u); + // Skip NAL type header and parse slice header. + std::optional slice_header = + PpsParser::ParseSliceHeader(chunk.subview( + index.payload_start_offset + 1, index.payload_size - 1)); + ASSERT_TRUE(slice_header.has_value()); + EXPECT_EQ(slice_header->first_mb_in_slice, 0u); + EXPECT_EQ(slice_header->pic_parameter_set_id, 0u); break; } } diff --git a/common_video/h264/sps_parser.cc b/common_video/h264/sps_parser.cc index e14334249c..197fac9573 100644 --- a/common_video/h264/sps_parser.cc +++ b/common_video/h264/sps_parser.cc @@ -32,14 +32,14 @@ SpsParser::SpsState::~SpsState() = default; // http://www.itu.int/rec/T-REC-H.264 // Unpack RBSP and parse SPS state from the supplied buffer. -absl::optional SpsParser::ParseSps(const uint8_t* data, - size_t length) { - std::vector unpacked_buffer = H264::ParseRbsp(data, length); +std::optional SpsParser::ParseSps( + ArrayView data) { + std::vector unpacked_buffer = H264::ParseRbsp(data); BitstreamReader reader(unpacked_buffer); return ParseSpsUpToVui(reader); } -absl::optional SpsParser::ParseSpsUpToVui( +std::optional SpsParser::ParseSpsUpToVui( BitstreamReader& reader) { // Now, we need to use a bitstream reader to parse through the actual AVC SPS // format. See Section 7.3.2.1.1 ("Sequence parameter set data syntax") of the @@ -56,7 +56,7 @@ absl::optional SpsParser::ParseSpsUpToVui( // chroma_format_idc will be ChromaArrayType if separate_colour_plane_flag is // 0. It defaults to 1, when not specified. - uint32_t chroma_format_idc = 1; + sps.chroma_format_idc = 1; // profile_idc: u(8). We need it to determine if we need to read/skip chroma // formats. @@ -73,8 +73,8 @@ absl::optional SpsParser::ParseSpsUpToVui( profile_idc == 86 || profile_idc == 118 || profile_idc == 128 || profile_idc == 138 || profile_idc == 139 || profile_idc == 134) { // chroma_format_idc: ue(v) - chroma_format_idc = reader.ReadExponentialGolomb(); - if (chroma_format_idc == 3) { + sps.chroma_format_idc = reader.ReadExponentialGolomb(); + if (sps.chroma_format_idc == 3) { // separate_colour_plane_flag: u(1) sps.separate_colour_plane_flag = reader.ReadBit(); } @@ -89,7 +89,7 @@ absl::optional SpsParser::ParseSpsUpToVui( // Process the scaling lists just enough to be able to properly // skip over them, so we can still read the resolution on streams // where this is included. - int scaling_list_count = (chroma_format_idc == 3 ? 12 : 8); + int scaling_list_count = (sps.chroma_format_idc == 3 ? 12 : 8); for (int i = 0; i < scaling_list_count; ++i) { // seq_scaling_list_present_flag[i] : u(1) if (reader.Read()) { @@ -102,7 +102,7 @@ absl::optional SpsParser::ParseSpsUpToVui( int delta_scale = reader.ReadSignedExponentialGolomb(); if (!reader.Ok() || delta_scale < kScalingDeltaMin || delta_scale > kScaldingDeltaMax) { - return absl::nullopt; + return std::nullopt; } next_scale = (last_scale + delta_scale + 256) % 256; } @@ -122,7 +122,7 @@ absl::optional SpsParser::ParseSpsUpToVui( // log2_max_frame_num_minus4: ue(v) uint32_t log2_max_frame_num_minus4 = reader.ReadExponentialGolomb(); if (!reader.Ok() || log2_max_frame_num_minus4 > kMaxLog2Minus4) { - return absl::nullopt; + return std::nullopt; } sps.log2_max_frame_num = log2_max_frame_num_minus4 + 4; @@ -132,7 +132,7 @@ absl::optional SpsParser::ParseSpsUpToVui( // log2_max_pic_order_cnt_lsb_minus4: ue(v) uint32_t log2_max_pic_order_cnt_lsb_minus4 = reader.ReadExponentialGolomb(); if (!reader.Ok() || log2_max_pic_order_cnt_lsb_minus4 > kMaxLog2Minus4) { - return absl::nullopt; + return std::nullopt; } sps.log2_max_pic_order_cnt_lsb = log2_max_pic_order_cnt_lsb_minus4 + 4; } else if (sps.pic_order_cnt_type == 1) { @@ -149,7 +149,7 @@ absl::optional SpsParser::ParseSpsUpToVui( // offset_for_ref_frame[i]: se(v) reader.ReadExponentialGolomb(); if (!reader.Ok()) { - return absl::nullopt; + return std::nullopt; } } } @@ -197,22 +197,22 @@ absl::optional SpsParser::ParseSpsUpToVui( // Far enough! We don't use the rest of the SPS. if (!reader.Ok()) { - return absl::nullopt; + return std::nullopt; } // Figure out the crop units in pixels. That's based on the chroma format's // sampling, which is indicated by chroma_format_idc. - if (sps.separate_colour_plane_flag || chroma_format_idc == 0) { + if (sps.separate_colour_plane_flag || sps.chroma_format_idc == 0) { frame_crop_bottom_offset *= (2 - sps.frame_mbs_only_flag); frame_crop_top_offset *= (2 - sps.frame_mbs_only_flag); - } else if (!sps.separate_colour_plane_flag && chroma_format_idc > 0) { + } else if (!sps.separate_colour_plane_flag && sps.chroma_format_idc > 0) { // Width multipliers for formats 1 (4:2:0) and 2 (4:2:2). - if (chroma_format_idc == 1 || chroma_format_idc == 2) { + if (sps.chroma_format_idc == 1 || sps.chroma_format_idc == 2) { frame_crop_left_offset *= 2; frame_crop_right_offset *= 2; } // Height multipliers for format 1 (4:2:0). - if (chroma_format_idc == 1) { + if (sps.chroma_format_idc == 1) { frame_crop_top_offset *= 2; frame_crop_bottom_offset *= 2; } diff --git a/common_video/h264/sps_parser.h b/common_video/h264/sps_parser.h index da328b48b0..a8a6675e0a 100644 --- a/common_video/h264/sps_parser.h +++ b/common_video/h264/sps_parser.h @@ -11,17 +11,19 @@ #ifndef COMMON_VIDEO_H264_SPS_PARSER_H_ #define COMMON_VIDEO_H264_SPS_PARSER_H_ -#include "absl/types/optional.h" +#include + #include "rtc_base/bitstream_reader.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { // A class for parsing out sequence parameter set (SPS) data from an H264 NALU. -class SpsParser { +class RTC_EXPORT SpsParser { public: // The parsed state of the SPS. Only some select values are stored. // Add more as they are actually needed. - struct SpsState { + struct RTC_EXPORT SpsState { SpsState(); SpsState(const SpsState&); ~SpsState(); @@ -29,6 +31,7 @@ class SpsParser { uint32_t width = 0; uint32_t height = 0; uint32_t delta_pic_order_always_zero_flag = 0; + uint32_t chroma_format_idc = 1; uint32_t separate_colour_plane_flag = 0; uint32_t frame_mbs_only_flag = 0; uint32_t log2_max_frame_num = 4; // Smallest valid value. @@ -40,12 +43,12 @@ class SpsParser { }; // Unpack RBSP and parse SPS state from the supplied buffer. - static absl::optional ParseSps(const uint8_t* data, size_t length); + static std::optional ParseSps(ArrayView data); protected: // Parse the SPS state, up till the VUI part, for a buffer where RBSP // decoding has already been performed. - static absl::optional ParseSpsUpToVui(BitstreamReader& reader); + static std::optional ParseSpsUpToVui(BitstreamReader& reader); }; } // namespace webrtc diff --git a/common_video/h264/sps_parser_unittest.cc b/common_video/h264/sps_parser_unittest.cc index c9326e4b28..0f20eb6a31 100644 --- a/common_video/h264/sps_parser_unittest.cc +++ b/common_video/h264/sps_parser_unittest.cc @@ -45,9 +45,9 @@ void GenerateFakeSps(uint16_t width, int id, uint32_t log2_max_frame_num_minus4, uint32_t log2_max_pic_order_cnt_lsb_minus4, - rtc::Buffer* out_buffer) { + Buffer* out_buffer) { uint8_t rbsp[kSpsBufferMaxSize] = {0}; - rtc::BitBufferWriter writer(rbsp, kSpsBufferMaxSize); + BitBufferWriter writer(rbsp, kSpsBufferMaxSize); // Profile byte. writer.WriteUInt8(0); // Constraint sets and reserved zero bits. @@ -107,7 +107,7 @@ void GenerateFakeSps(uint16_t width, } out_buffer->Clear(); - H264::WriteRbsp(rbsp, byte_count, out_buffer); + H264::WriteRbsp(MakeArrayView(rbsp, byte_count), out_buffer); } TEST(H264SpsParserTest, TestSampleSPSHdLandscape) { @@ -116,8 +116,7 @@ TEST(H264SpsParserTest, TestSampleSPSHdLandscape) { const uint8_t buffer[] = {0x7A, 0x00, 0x1F, 0xBC, 0xD9, 0x40, 0x50, 0x05, 0xBA, 0x10, 0x00, 0x00, 0x03, 0x00, 0xC0, 0x00, 0x00, 0x2A, 0xE0, 0xF1, 0x83, 0x19, 0x60}; - absl::optional sps = - SpsParser::ParseSps(buffer, arraysize(buffer)); + std::optional sps = SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(1280u, sps->width); EXPECT_EQ(720u, sps->height); @@ -129,8 +128,7 @@ TEST(H264SpsParserTest, TestSampleSPSVgaLandscape) { const uint8_t buffer[] = {0x7A, 0x00, 0x1E, 0xBC, 0xD9, 0x40, 0xA0, 0x2F, 0xF8, 0x98, 0x40, 0x00, 0x00, 0x03, 0x01, 0x80, 0x00, 0x00, 0x56, 0x83, 0xC5, 0x8B, 0x65, 0x80}; - absl::optional sps = - SpsParser::ParseSps(buffer, arraysize(buffer)); + std::optional sps = SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(640u, sps->width); EXPECT_EQ(360u, sps->height); @@ -142,18 +140,16 @@ TEST(H264SpsParserTest, TestSampleSPSWeirdResolution) { const uint8_t buffer[] = {0x7A, 0x00, 0x0D, 0xBC, 0xD9, 0x43, 0x43, 0x3E, 0x5E, 0x10, 0x00, 0x00, 0x03, 0x00, 0x60, 0x00, 0x00, 0x15, 0xA0, 0xF1, 0x42, 0x99, 0x60}; - absl::optional sps = - SpsParser::ParseSps(buffer, arraysize(buffer)); + std::optional sps = SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(200u, sps->width); EXPECT_EQ(400u, sps->height); } TEST(H264SpsParserTest, TestSyntheticSPSQvgaLandscape) { - rtc::Buffer buffer; + Buffer buffer; GenerateFakeSps(320u, 180u, 1, 0, 0, &buffer); - absl::optional sps = - SpsParser::ParseSps(buffer.data(), buffer.size()); + std::optional sps = SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(320u, sps->width); EXPECT_EQ(180u, sps->height); @@ -161,10 +157,9 @@ TEST(H264SpsParserTest, TestSyntheticSPSQvgaLandscape) { } TEST(H264SpsParserTest, TestSyntheticSPSWeirdResolution) { - rtc::Buffer buffer; + Buffer buffer; GenerateFakeSps(156u, 122u, 2, 0, 0, &buffer); - absl::optional sps = - SpsParser::ParseSps(buffer.data(), buffer.size()); + std::optional sps = SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(156u, sps->width); EXPECT_EQ(122u, sps->height); @@ -178,18 +173,16 @@ TEST(H264SpsParserTest, TestSampleSPSWithScalingLists) { 0x10, 0xc2, 0x00, 0x84, 0x3b, 0x50, 0x3c, 0x01, 0x13, 0xf2, 0xcd, 0xc0, 0x40, 0x40, 0x50, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x01, 0xe8, 0x40}; - absl::optional sps = - SpsParser::ParseSps(buffer, arraysize(buffer)); + std::optional sps = SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(1920u, sps->width); EXPECT_EQ(1080u, sps->height); } TEST(H264SpsParserTest, TestLog2MaxFrameNumMinus4) { - rtc::Buffer buffer; + Buffer buffer; GenerateFakeSps(320u, 180u, 1, 0, 0, &buffer); - absl::optional sps = - SpsParser::ParseSps(buffer.data(), buffer.size()); + std::optional sps = SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(320u, sps->width); EXPECT_EQ(180u, sps->height); @@ -197,7 +190,7 @@ TEST(H264SpsParserTest, TestLog2MaxFrameNumMinus4) { EXPECT_EQ(4u, sps->log2_max_frame_num); GenerateFakeSps(320u, 180u, 1, 12, 0, &buffer); - sps = SpsParser::ParseSps(buffer.data(), buffer.size()); + sps = SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(320u, sps->width); EXPECT_EQ(180u, sps->height); @@ -205,14 +198,13 @@ TEST(H264SpsParserTest, TestLog2MaxFrameNumMinus4) { EXPECT_EQ(16u, sps->log2_max_frame_num); GenerateFakeSps(320u, 180u, 1, 13, 0, &buffer); - EXPECT_FALSE(SpsParser::ParseSps(buffer.data(), buffer.size())); + EXPECT_FALSE(SpsParser::ParseSps(buffer)); } TEST(H264SpsParserTest, TestLog2MaxPicOrderCntMinus4) { - rtc::Buffer buffer; + Buffer buffer; GenerateFakeSps(320u, 180u, 1, 0, 0, &buffer); - absl::optional sps = - SpsParser::ParseSps(buffer.data(), buffer.size()); + std::optional sps = SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(320u, sps->width); EXPECT_EQ(180u, sps->height); @@ -220,15 +212,14 @@ TEST(H264SpsParserTest, TestLog2MaxPicOrderCntMinus4) { EXPECT_EQ(4u, sps->log2_max_pic_order_cnt_lsb); GenerateFakeSps(320u, 180u, 1, 0, 12, &buffer); - EXPECT_TRUE(static_cast( - sps = SpsParser::ParseSps(buffer.data(), buffer.size()))); + EXPECT_TRUE(static_cast(sps = SpsParser::ParseSps(buffer))); EXPECT_EQ(320u, sps->width); EXPECT_EQ(180u, sps->height); EXPECT_EQ(1u, sps->id); EXPECT_EQ(16u, sps->log2_max_pic_order_cnt_lsb); GenerateFakeSps(320u, 180u, 1, 0, 13, &buffer); - EXPECT_FALSE(SpsParser::ParseSps(buffer.data(), buffer.size())); + EXPECT_FALSE(SpsParser::ParseSps(buffer)); } } // namespace webrtc diff --git a/common_video/h264/sps_vui_rewriter.cc b/common_video/h264/sps_vui_rewriter.cc index 117e92a1e5..b102cf4c78 100644 --- a/common_video/h264/sps_vui_rewriter.cc +++ b/common_video/h264/sps_vui_rewriter.cc @@ -54,7 +54,7 @@ enum SpsValidEvent { } \ } while (0) -uint8_t CopyUInt8(BitstreamReader& source, rtc::BitBufferWriter& destination) { +uint8_t CopyUInt8(BitstreamReader& source, BitBufferWriter& destination) { uint8_t tmp = source.Read(); if (!destination.WriteUInt8(tmp)) { source.Invalidate(); @@ -62,8 +62,7 @@ uint8_t CopyUInt8(BitstreamReader& source, rtc::BitBufferWriter& destination) { return tmp; } -uint32_t CopyExpGolomb(BitstreamReader& source, - rtc::BitBufferWriter& destination) { +uint32_t CopyExpGolomb(BitstreamReader& source, BitBufferWriter& destination) { uint32_t tmp = source.ReadExponentialGolomb(); if (!destination.WriteExponentialGolomb(tmp)) { source.Invalidate(); @@ -73,7 +72,7 @@ uint32_t CopyExpGolomb(BitstreamReader& source, uint32_t CopyBits(int bits, BitstreamReader& source, - rtc::BitBufferWriter& destination) { + BitBufferWriter& destination) { RTC_DCHECK_GT(bits, 0); RTC_DCHECK_LE(bits, 32); uint64_t tmp = source.ReadBits(bits); @@ -85,24 +84,22 @@ uint32_t CopyBits(int bits, bool CopyAndRewriteVui(const SpsParser::SpsState& sps, BitstreamReader& source, - rtc::BitBufferWriter& destination, + BitBufferWriter& destination, const webrtc::ColorSpace* color_space, SpsVuiRewriter::ParseResult& out_vui_rewritten); -void CopyHrdParameters(BitstreamReader& source, - rtc::BitBufferWriter& destination); -bool AddBitstreamRestriction(rtc::BitBufferWriter* destination, +void CopyHrdParameters(BitstreamReader& source, BitBufferWriter& destination); +bool AddBitstreamRestriction(BitBufferWriter* destination, uint32_t max_num_ref_frames); bool IsDefaultColorSpace(const ColorSpace& color_space); -bool AddVideoSignalTypeInfo(rtc::BitBufferWriter& destination, +bool AddVideoSignalTypeInfo(BitBufferWriter& destination, const ColorSpace& color_space); bool CopyOrRewriteVideoSignalTypeInfo( BitstreamReader& source, - rtc::BitBufferWriter& destination, + BitBufferWriter& destination, const ColorSpace* color_space, SpsVuiRewriter::ParseResult& out_vui_rewritten); -bool CopyRemainingBits(BitstreamReader& source, - rtc::BitBufferWriter& destination); +bool CopyRemainingBits(BitstreamReader& source, BitBufferWriter& destination); } // namespace void SpsVuiRewriter::UpdateStats(ParseResult result, Direction direction) { @@ -135,16 +132,15 @@ void SpsVuiRewriter::UpdateStats(ParseResult result, Direction direction) { } SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps( - const uint8_t* buffer, - size_t length, - absl::optional* sps, + ArrayView buffer, + std::optional* sps, const webrtc::ColorSpace* color_space, - rtc::Buffer* destination) { + Buffer* destination) { // Create temporary RBSP decoded buffer of the payload (exlcuding the // leading nalu type header byte (the SpsParser uses only the payload). - std::vector rbsp_buffer = H264::ParseRbsp(buffer, length); + std::vector rbsp_buffer = H264::ParseRbsp(buffer); BitstreamReader source_buffer(rbsp_buffer); - absl::optional sps_state = + std::optional sps_state = SpsParser::ParseSpsUpToVui(source_buffer); if (!sps_state) return ParseResult::kFailure; @@ -153,8 +149,8 @@ SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps( // We're going to completely muck up alignment, so we need a BitBufferWriter // to write with. - rtc::Buffer out_buffer(length + kMaxVuiSpsIncrease); - rtc::BitBufferWriter sps_writer(out_buffer.data(), out_buffer.size()); + Buffer out_buffer(buffer.size() + kMaxVuiSpsIncrease); + BitBufferWriter sps_writer(out_buffer.data(), out_buffer.size()); // Check how far the SpsParser has read, and copy that data in bulk. RTC_DCHECK(source_buffer.Ok()); @@ -200,49 +196,49 @@ SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps( bit_offset = 0; } - RTC_DCHECK(byte_offset <= length + kMaxVuiSpsIncrease); + RTC_DCHECK(byte_offset <= buffer.size() + kMaxVuiSpsIncrease); RTC_CHECK(destination != nullptr); out_buffer.SetSize(byte_offset); // Write updates SPS to destination with added RBSP - H264::WriteRbsp(out_buffer.data(), out_buffer.size(), destination); + H264::WriteRbsp(out_buffer, destination); return ParseResult::kVuiRewritten; } SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps( - const uint8_t* buffer, - size_t length, - absl::optional* sps, + ArrayView buffer, + std::optional* sps, const webrtc::ColorSpace* color_space, - rtc::Buffer* destination, + Buffer* destination, Direction direction) { ParseResult result = - ParseAndRewriteSps(buffer, length, sps, color_space, destination); + ParseAndRewriteSps(buffer, sps, color_space, destination); UpdateStats(result, direction); return result; } -rtc::Buffer SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite( - rtc::ArrayView buffer, +Buffer SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite( + ArrayView buffer, const webrtc::ColorSpace* color_space) { - std::vector nalus = - H264::FindNaluIndices(buffer.data(), buffer.size()); + std::vector nalus = H264::FindNaluIndices(buffer); // Allocate some extra space for potentially adding a missing VUI. - rtc::Buffer output_buffer(/*size=*/0, /*capacity=*/buffer.size() + - nalus.size() * kMaxVuiSpsIncrease); + Buffer output_buffer(/*size=*/0, /*capacity=*/buffer.size() + + nalus.size() * kMaxVuiSpsIncrease); - for (const H264::NaluIndex& nalu : nalus) { + for (const H264::NaluIndex& nalu_index : nalus) { // Copy NAL unit start code. - const uint8_t* start_code_ptr = buffer.data() + nalu.start_offset; - const size_t start_code_length = - nalu.payload_start_offset - nalu.start_offset; - const uint8_t* nalu_ptr = buffer.data() + nalu.payload_start_offset; - const size_t nalu_length = nalu.payload_size; - - if (H264::ParseNaluType(nalu_ptr[0]) == H264::NaluType::kSps) { + ArrayView start_code = buffer.subview( + nalu_index.start_offset, + nalu_index.payload_start_offset - nalu_index.start_offset); + ArrayView nalu = buffer.subview( + nalu_index.payload_start_offset, nalu_index.payload_size); + if (nalu.empty()) { + continue; + } + if (H264::ParseNaluType(nalu[0]) == H264::NaluType::kSps) { // Check if stream uses picture order count type 0, and if so rewrite it // to enable faster decoding. Streams in that format incur additional // delay because it allows decode order to differ from render order. @@ -254,29 +250,29 @@ rtc::Buffer SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite( // protect legacy receive clients) in RtpDepacketizerH264::ParseSingleNalu // (receive side, in orderer to protect us from unknown or legacy send // clients). - absl::optional sps; - rtc::Buffer output_nalu; + std::optional sps; + Buffer output_nalu; // Add the type header to the output buffer first, so that the rewriter // can append modified payload on top of that. - output_nalu.AppendData(nalu_ptr[0]); + output_nalu.AppendData(nalu[0]); - ParseResult result = ParseAndRewriteSps( - nalu_ptr + H264::kNaluTypeSize, nalu_length - H264::kNaluTypeSize, - &sps, color_space, &output_nalu, Direction::kOutgoing); + ParseResult result = + ParseAndRewriteSps(nalu.subview(H264::kNaluTypeSize), &sps, + color_space, &output_nalu, Direction::kOutgoing); if (result == ParseResult::kVuiRewritten) { - output_buffer.AppendData(start_code_ptr, start_code_length); + output_buffer.AppendData(start_code); output_buffer.AppendData(output_nalu.data(), output_nalu.size()); continue; } - } else if (H264::ParseNaluType(nalu_ptr[0]) == H264::NaluType::kAud) { + } else if (H264::ParseNaluType(nalu[0]) == H264::NaluType::kAud) { // Skip the access unit delimiter copy. continue; } // vui wasn't rewritten and it is not aud, copy the nal unit as is. - output_buffer.AppendData(start_code_ptr, start_code_length); - output_buffer.AppendData(nalu_ptr, nalu_length); + output_buffer.AppendData(start_code); + output_buffer.AppendData(nalu); } return output_buffer; } @@ -284,7 +280,7 @@ rtc::Buffer SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite( namespace { bool CopyAndRewriteVui(const SpsParser::SpsState& sps, BitstreamReader& source, - rtc::BitBufferWriter& destination, + BitBufferWriter& destination, const webrtc::ColorSpace* color_space, SpsVuiRewriter::ParseResult& out_vui_rewritten) { out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiOk; @@ -417,8 +413,7 @@ bool CopyAndRewriteVui(const SpsParser::SpsState& sps, } // Copies a VUI HRD parameters segment. -void CopyHrdParameters(BitstreamReader& source, - rtc::BitBufferWriter& destination) { +void CopyHrdParameters(BitstreamReader& source, BitBufferWriter& destination) { // cbp_cnt_minus1: ue(v) uint32_t cbp_cnt_minus1 = CopyExpGolomb(source, destination); // bit_rate_scale and cbp_size_scale: u(4) each @@ -442,7 +437,7 @@ void CopyHrdParameters(BitstreamReader& source, // http://www.itu.int/rec/T-REC-H.264 // Adds a bitstream restriction VUI segment. -bool AddBitstreamRestriction(rtc::BitBufferWriter* destination, +bool AddBitstreamRestriction(BitBufferWriter* destination, uint32_t max_num_ref_frames) { // motion_vectors_over_pic_boundaries_flag: u(1) // Default is 1 when not present. @@ -474,7 +469,7 @@ bool IsDefaultColorSpace(const ColorSpace& color_space) { color_space.matrix() == ColorSpace::MatrixID::kUnspecified; } -bool AddVideoSignalTypeInfo(rtc::BitBufferWriter& destination, +bool AddVideoSignalTypeInfo(BitBufferWriter& destination, const ColorSpace& color_space) { // video_format: u(3). RETURN_FALSE_ON_FAIL(destination.WriteBits(5, 3)); // 5 = Unspecified @@ -497,7 +492,7 @@ bool AddVideoSignalTypeInfo(rtc::BitBufferWriter& destination, bool CopyOrRewriteVideoSignalTypeInfo( BitstreamReader& source, - rtc::BitBufferWriter& destination, + BitBufferWriter& destination, const ColorSpace* color_space, SpsVuiRewriter::ParseResult& out_vui_rewritten) { // Read. @@ -590,8 +585,7 @@ bool CopyOrRewriteVideoSignalTypeInfo( return true; } -bool CopyRemainingBits(BitstreamReader& source, - rtc::BitBufferWriter& destination) { +bool CopyRemainingBits(BitstreamReader& source, BitBufferWriter& destination) { // Try to get at least the destination aligned. if (source.RemainingBitCount() > 0 && source.RemainingBitCount() % 8 != 0) { size_t misaligned_bits = source.RemainingBitCount() % 8; diff --git a/common_video/h264/sps_vui_rewriter.h b/common_video/h264/sps_vui_rewriter.h index ef80d5b60e..8cdc04e5ef 100644 --- a/common_video/h264/sps_vui_rewriter.h +++ b/common_video/h264/sps_vui_rewriter.h @@ -15,7 +15,8 @@ #include #include -#include "absl/types/optional.h" +#include + #include "api/video/color_space.h" #include "common_video/h264/sps_parser.h" #include "rtc_base/buffer.h" @@ -42,27 +43,23 @@ class SpsVuiRewriter : private SpsParser { // SPS state. This function assumes that any previous headers // (NALU start, type, Stap-A, etc) have already been parsed and that RBSP // decoding has been performed. - static ParseResult ParseAndRewriteSps( - const uint8_t* buffer, - size_t length, - absl::optional* sps, - const ColorSpace* color_space, - rtc::Buffer* destination, - Direction Direction); + static ParseResult ParseAndRewriteSps(ArrayView buffer, + std::optional* sps, + const ColorSpace* color_space, + Buffer* destination, + Direction Direction); // Parses NAL units from `buffer`, strips AUD blocks and rewrites VUI in SPS // blocks if necessary. - static rtc::Buffer ParseOutgoingBitstreamAndRewrite( - rtc::ArrayView buffer, + static Buffer ParseOutgoingBitstreamAndRewrite( + ArrayView buffer, const ColorSpace* color_space); private: - static ParseResult ParseAndRewriteSps( - const uint8_t* buffer, - size_t length, - absl::optional* sps, - const ColorSpace* color_space, - rtc::Buffer* destination); + static ParseResult ParseAndRewriteSps(ArrayView buffer, + std::optional* sps, + const ColorSpace* color_space, + Buffer* destination); static void UpdateStats(ParseResult result, Direction direction); }; diff --git a/common_video/h264/sps_vui_rewriter_unittest.cc b/common_video/h264/sps_vui_rewriter_unittest.cc index 2907949e6c..581b9ef276 100644 --- a/common_video/h264/sps_vui_rewriter_unittest.cc +++ b/common_video/h264/sps_vui_rewriter_unittest.cc @@ -193,9 +193,9 @@ static const webrtc::ColorSpace kColorSpaceBt709LimitedRange( // The fake SPS that this generates also always has at least one emulation byte // at offset 2, since the first two bytes are always 0, and has a 0x3 as the // level_idc, to make sure the parser doesn't eat all 0x3 bytes. -void GenerateFakeSps(const VuiHeader& vui, rtc::Buffer* out_buffer) { +void GenerateFakeSps(const VuiHeader& vui, Buffer* out_buffer) { uint8_t rbsp[kSpsBufferMaxSize] = {0}; - rtc::BitBufferWriter writer(rbsp, kSpsBufferMaxSize); + BitBufferWriter writer(rbsp, kSpsBufferMaxSize); // Profile byte. writer.WriteUInt8(0); // Constraint sets and reserved zero bits. @@ -297,21 +297,21 @@ void GenerateFakeSps(const VuiHeader& vui, rtc::Buffer* out_buffer) { byte_count++; } - H264::WriteRbsp(rbsp, byte_count, out_buffer); + H264::WriteRbsp(MakeArrayView(rbsp, byte_count), out_buffer); } void TestSps(const VuiHeader& vui, const ColorSpace* color_space, SpsVuiRewriter::ParseResult expected_parse_result) { - rtc::LogMessage::LogToDebug(rtc::LS_VERBOSE); - rtc::Buffer original_sps; + LogMessage::LogToDebug(LS_VERBOSE); + Buffer original_sps; GenerateFakeSps(vui, &original_sps); - absl::optional sps; - rtc::Buffer rewritten_sps; + std::optional sps; + Buffer rewritten_sps; SpsVuiRewriter::ParseResult result = SpsVuiRewriter::ParseAndRewriteSps( - original_sps.data(), original_sps.size(), &sps, color_space, - &rewritten_sps, SpsVuiRewriter::Direction::kIncoming); + original_sps, &sps, color_space, &rewritten_sps, + SpsVuiRewriter::Direction::kIncoming); EXPECT_EQ(expected_parse_result, result); ASSERT_TRUE(sps); EXPECT_EQ(sps->width, kWidth); @@ -322,9 +322,9 @@ void TestSps(const VuiHeader& vui, if (result == SpsVuiRewriter::ParseResult::kVuiRewritten) { // Ensure that added/rewritten SPS is parsable. - rtc::Buffer tmp; + Buffer tmp; result = SpsVuiRewriter::ParseAndRewriteSps( - rewritten_sps.data(), rewritten_sps.size(), &sps, nullptr, &tmp, + rewritten_sps, &sps, nullptr, &tmp, SpsVuiRewriter::Direction::kIncoming); EXPECT_EQ(SpsVuiRewriter::ParseResult::kVuiOk, result); ASSERT_TRUE(sps); @@ -391,12 +391,12 @@ INSTANTIATE_TEST_SUITE_P( SpsVuiRewriter::ParseResult::kVuiRewritten))); TEST(SpsVuiRewriterOutgoingVuiTest, ParseOutgoingBitstreamOptimalVui) { - rtc::LogMessage::LogToDebug(rtc::LS_VERBOSE); + LogMessage::LogToDebug(LS_VERBOSE); - rtc::Buffer optimal_sps; + Buffer optimal_sps; GenerateFakeSps(kVuiNoFrameBuffering, &optimal_sps); - rtc::Buffer buffer; + Buffer buffer; buffer.AppendData(kStartSequence); buffer.AppendData(optimal_sps); buffer.AppendData(kStartSequence); @@ -407,12 +407,12 @@ TEST(SpsVuiRewriterOutgoingVuiTest, ParseOutgoingBitstreamOptimalVui) { } TEST(SpsVuiRewriterOutgoingVuiTest, ParseOutgoingBitstreamNoVui) { - rtc::LogMessage::LogToDebug(rtc::LS_VERBOSE); + LogMessage::LogToDebug(LS_VERBOSE); - rtc::Buffer sps; + Buffer sps; GenerateFakeSps(kVuiNotPresent, &sps); - rtc::Buffer buffer; + Buffer buffer; buffer.AppendData(kStartSequence); buffer.AppendData(kIdr1); buffer.AppendData(kStartSequence); @@ -421,10 +421,10 @@ TEST(SpsVuiRewriterOutgoingVuiTest, ParseOutgoingBitstreamNoVui) { buffer.AppendData(kStartSequence); buffer.AppendData(kIdr2); - rtc::Buffer optimal_sps; + Buffer optimal_sps; GenerateFakeSps(kVuiNoFrameBuffering, &optimal_sps); - rtc::Buffer expected_buffer; + Buffer expected_buffer; expected_buffer.AppendData(kStartSequence); expected_buffer.AppendData(kIdr1); expected_buffer.AppendData(kStartSequence); @@ -438,12 +438,12 @@ TEST(SpsVuiRewriterOutgoingVuiTest, ParseOutgoingBitstreamNoVui) { } TEST(SpsVuiRewriterOutgoingAudTest, ParseOutgoingBitstreamWithAud) { - rtc::LogMessage::LogToDebug(rtc::LS_VERBOSE); + LogMessage::LogToDebug(LS_VERBOSE); - rtc::Buffer optimal_sps; + Buffer optimal_sps; GenerateFakeSps(kVuiNoFrameBuffering, &optimal_sps); - rtc::Buffer buffer; + Buffer buffer; buffer.AppendData(kStartSequence); buffer.AppendData(kAud); buffer.AppendData(kStartSequence); @@ -451,7 +451,7 @@ TEST(SpsVuiRewriterOutgoingAudTest, ParseOutgoingBitstreamWithAud) { buffer.AppendData(kStartSequence); buffer.AppendData(kIdr1); - rtc::Buffer expected_buffer; + Buffer expected_buffer; expected_buffer.AppendData(kStartSequence); expected_buffer.AppendData(optimal_sps); expected_buffer.AppendData(kStartSequence); diff --git a/common_video/h265/h265_bitstream_parser.cc b/common_video/h265/h265_bitstream_parser.cc index ee77166705..b3f9793a2a 100644 --- a/common_video/h265/h265_bitstream_parser.cc +++ b/common_video/h265/h265_bitstream_parser.cc @@ -11,11 +11,17 @@ #include +#include #include +#include +#include #include +#include "api/array_view.h" #include "common_video/h265/h265_common.h" -#include "rtc_base/bit_buffer.h" +#include "common_video/h265/h265_pps_parser.h" +#include "common_video/h265/h265_sps_parser.h" +#include "common_video/h265/h265_vps_parser.h" #include "rtc_base/bitstream_reader.h" #include "rtc_base/logging.h" @@ -37,7 +43,7 @@ " to be" \ << " in range [" << (min) << ":" << (max) << "]" \ << " found " << (val) << " instead"; \ - return absl::nullopt; \ + return std::nullopt; \ } \ } while (0) @@ -79,13 +85,11 @@ H265BitstreamParser::~H265BitstreamParser() = default; // section 7.3.6.1. You can find it on this page: // http://www.itu.int/rec/T-REC-H.265 H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( - const uint8_t* source, - size_t source_length, + ArrayView source, uint8_t nalu_type) { - last_slice_qp_delta_ = absl::nullopt; - last_slice_pps_id_ = absl::nullopt; - const std::vector slice_rbsp = - H265::ParseRbsp(source, source_length); + last_slice_qp_delta_ = std::nullopt; + last_slice_pps_id_ = std::nullopt; + const std::vector slice_rbsp = H265::ParseRbsp(source); if (slice_rbsp.size() < H265::kNaluHeaderSize) return kInvalidStream; @@ -129,6 +133,8 @@ H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( uint32_t slice_segment_address_bits = H265::Log2Ceiling(pic_height_in_ctbs_y * pic_width_in_ctbs_y); + TRUE_OR_RETURN(slice_segment_address_bits != + std::numeric_limits::max()); slice_reader.ConsumeBits(slice_segment_address_bits); } @@ -138,8 +144,7 @@ H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( slice_reader.ConsumeBits(1); } // slice_type: ue(v) - uint32_t slice_type = 0; - slice_type = slice_reader.ReadExponentialGolomb(); + uint32_t slice_type = slice_reader.ReadExponentialGolomb(); IN_RANGE_OR_RETURN(slice_type, 0, 2); if (pps->output_flag_present_flag) { // pic_output_flag: u(1) @@ -165,7 +170,7 @@ H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( // short_term_ref_pic_set_sps_flag: u(1) short_term_ref_pic_set_sps_flag = slice_reader.Read(); if (!short_term_ref_pic_set_sps_flag) { - absl::optional ref_pic_set = + std::optional ref_pic_set = H265SpsParser::ParseShortTermRefPicSet( sps->num_short_term_ref_pic_sets, sps->num_short_term_ref_pic_sets, sps->short_term_ref_pic_set, @@ -274,18 +279,16 @@ H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( } uint32_t num_pic_total_curr = 0; - uint32_t curr_sps_idx; + uint32_t curr_rps_idx = 0; if (short_term_ref_pic_set_sps_flag) { - curr_sps_idx = short_term_ref_pic_set_idx; + curr_rps_idx = short_term_ref_pic_set_idx; } else { - curr_sps_idx = sps->num_short_term_ref_pic_sets; - } - if (sps->short_term_ref_pic_set.size() <= curr_sps_idx) { - TRUE_OR_RETURN(!(curr_sps_idx != 0 || short_term_ref_pic_set_sps_flag)); + curr_rps_idx = sps->num_short_term_ref_pic_sets; } + const H265SpsParser::ShortTermRefPicSet* ref_pic_set; - if (curr_sps_idx < sps->short_term_ref_pic_set.size()) { - ref_pic_set = &(sps->short_term_ref_pic_set[curr_sps_idx]); + if (curr_rps_idx < sps->short_term_ref_pic_set.size()) { + ref_pic_set = &(sps->short_term_ref_pic_set[curr_rps_idx]); } else { ref_pic_set = &short_term_ref_pic_set; } @@ -366,13 +369,111 @@ H265BitstreamParser::Result H265BitstreamParser::ParseNonParameterSetNalu( } } } - if (!slice_reader.Ok() || - ((pps->weighted_pred_flag && slice_type == H265::SliceType::kP) || - (pps->weighted_bipred_flag && slice_type == H265::SliceType::kB))) { - // pred_weight_table() - RTC_LOG(LS_ERROR) << "Streams with pred_weight_table unsupported."; - return kUnsupportedStream; + + // pred_weight_table() + if ((pps->weighted_pred_flag && slice_type == H265::SliceType::kP) || + (pps->weighted_bipred_flag && slice_type == H265::SliceType::kB)) { + uint32_t luma_log2_weight_denom = slice_reader.ReadExponentialGolomb(); + IN_RANGE_OR_RETURN(luma_log2_weight_denom, 0, 7); + uint32_t chroma_array_type = + sps->separate_colour_plane_flag == 0 ? sps->chroma_format_idc : 0; + int32_t chroma_log2_weight_denom = luma_log2_weight_denom; + // wp_offset_half_range_c and wp_offset_half_range_y depends on + // sps.high_precision_offsets_enable_flag. Since range extension is not + // supported, so for now below two are fixed to 128 instead of 1 << + // (sps.bit_depth_luma|chroma_minus8 + 7). + int32_t wp_offset_half_range_c = (1 << 7); + int32_t wp_offset_half_range_y = (1 << 7); + if (chroma_array_type != 0) { + // delta_chroma_log2_weight_denom: se(v) + int32_t delta_chroma_log2_weight_denom = + slice_reader.ReadSignedExponentialGolomb(); + IN_RANGE_OR_RETURN(delta_chroma_log2_weight_denom, -7, 7); + chroma_log2_weight_denom += delta_chroma_log2_weight_denom; + } + IN_RANGE_OR_RETURN(chroma_log2_weight_denom, 0, 7); + + bool luma_weight_flag_l0[kMaxRefIdxActive] = {}; + bool chroma_weight_flag_l0[kMaxRefIdxActive] = {}; + int32_t delta_chroma_weight_l0[kMaxRefIdxActive][2] = {}; + int32_t luma_offset_l0[kMaxRefIdxActive] = {}; + int32_t delta_chroma_offset_l0[kMaxRefIdxActive][2] = {}; + for (uint32_t i = 0; i <= num_ref_idx_l0_active_minus1; i++) { + // luma_weight_l0_flag: u(1). By syntax this should conditionally + // check if the POC or layer ID of the reference picture is different, + // but we don't support encoding referencing different layers in the + // same AU. Skip the check for now. + luma_weight_flag_l0[i] = slice_reader.Read(); + } + if (chroma_array_type != 0) { + for (uint32_t i = 0; i <= num_ref_idx_l0_active_minus1; i++) { + // chroma_weight_l0_flag: u(1) + chroma_weight_flag_l0[i] = slice_reader.Read(); + } + } + for (uint32_t i = 0; i <= num_ref_idx_l0_active_minus1; i++) { + if (luma_weight_flag_l0[i]) { + int32_t delta_luma_weight_l0[kMaxRefIdxActive] = {}; + delta_luma_weight_l0[i] = + slice_reader.ReadSignedExponentialGolomb(); + IN_RANGE_OR_RETURN(delta_luma_weight_l0[i], -128, 127); + luma_offset_l0[i] = slice_reader.ReadSignedExponentialGolomb(); + IN_RANGE_OR_RETURN(luma_offset_l0[i], -wp_offset_half_range_y, + wp_offset_half_range_y - 1); + } + if (chroma_weight_flag_l0[i]) { + for (uint32_t j = 0; j < 2; j++) { + delta_chroma_weight_l0[i][j] = + slice_reader.ReadSignedExponentialGolomb(); + IN_RANGE_OR_RETURN(delta_chroma_weight_l0[i][j], -128, 127); + delta_chroma_offset_l0[i][j] = + slice_reader.ReadSignedExponentialGolomb(); + IN_RANGE_OR_RETURN(delta_chroma_offset_l0[i][j], + -4 * wp_offset_half_range_c, + 4 * wp_offset_half_range_c - 1); + } + } + } + if (slice_type == H265::SliceType::kB) { + bool luma_weight_flag_l1[kMaxRefIdxActive] = {}; + bool chroma_weight_flag_l1[kMaxRefIdxActive] = {}; + int32_t delta_chroma_weight_l1[kMaxRefIdxActive][2] = {}; + int32_t luma_offset_l1[kMaxRefIdxActive] = {}; + int32_t delta_chroma_offset_l1[kMaxRefIdxActive][2] = {}; + for (uint32_t i = 0; i < num_ref_idx_l1_active_minus1; i++) { + luma_weight_flag_l1[i] = slice_reader.Read(); + } + if (chroma_array_type != 0) { + for (uint32_t i = 0; i <= num_ref_idx_l1_active_minus1; i++) { + chroma_weight_flag_l1[i] = slice_reader.Read(); + } + } + for (uint32_t i = 0; i <= num_ref_idx_l1_active_minus1; i++) { + if (luma_weight_flag_l1[i]) { + int32_t delta_luma_weight_l1[kMaxRefIdxActive] = {}; + delta_luma_weight_l1[i] = + slice_reader.ReadSignedExponentialGolomb(); + IN_RANGE_OR_RETURN(delta_luma_weight_l1[i], -128, 127); + luma_offset_l1[i] = slice_reader.ReadSignedExponentialGolomb(); + IN_RANGE_OR_RETURN(luma_offset_l1[i], -wp_offset_half_range_y, + wp_offset_half_range_y - 1); + } + if (chroma_weight_flag_l1[i]) { + for (uint32_t j = 0; j < 2; j++) { + delta_chroma_weight_l1[i][j] = + slice_reader.ReadSignedExponentialGolomb(); + IN_RANGE_OR_RETURN(delta_chroma_weight_l1[i][j], -128, 127); + delta_chroma_offset_l1[i][j] = + slice_reader.ReadSignedExponentialGolomb(); + IN_RANGE_OR_RETURN(delta_chroma_offset_l1[i][j], + -4 * wp_offset_half_range_c, + 4 * wp_offset_half_range_c - 1); + } + } + } + } } + // five_minus_max_num_merge_cand: ue(v) uint32_t five_minus_max_num_merge_cand = slice_reader.ReadExponentialGolomb(); @@ -420,14 +521,18 @@ const H265SpsParser::SpsState* H265BitstreamParser::GetSPS(uint32_t id) const { return &it->second; } -void H265BitstreamParser::ParseSlice(const uint8_t* slice, size_t length) { +void H265BitstreamParser::ParseSlice(ArrayView slice) { + if (slice.empty()) { + RTC_LOG(LS_WARNING) << "Empty slice in H265 bitstream."; + return; + } H265::NaluType nalu_type = H265::ParseNaluType(slice[0]); switch (nalu_type) { case H265::NaluType::kVps: { - absl::optional vps_state; - if (length >= H265::kNaluHeaderSize) { - vps_state = H265VpsParser::ParseVps(slice + H265::kNaluHeaderSize, - length - H265::kNaluHeaderSize); + std::optional vps_state; + if (slice.size() >= H265::kNaluHeaderSize) { + vps_state = + H265VpsParser::ParseVps(slice.subview(H265::kNaluHeaderSize)); } if (!vps_state) { @@ -438,10 +543,10 @@ void H265BitstreamParser::ParseSlice(const uint8_t* slice, size_t length) { break; } case H265::NaluType::kSps: { - absl::optional sps_state; - if (length >= H265::kNaluHeaderSize) { - sps_state = H265SpsParser::ParseSps(slice + H265::kNaluHeaderSize, - length - H265::kNaluHeaderSize); + std::optional sps_state; + if (slice.size() >= H265::kNaluHeaderSize) { + sps_state = + H265SpsParser::ParseSps(slice.subview(H265::kNaluHeaderSize)); } if (!sps_state) { RTC_LOG(LS_WARNING) << "Unable to parse SPS from H265 bitstream."; @@ -451,10 +556,10 @@ void H265BitstreamParser::ParseSlice(const uint8_t* slice, size_t length) { break; } case H265::NaluType::kPps: { - absl::optional pps_state; - if (length >= H265::kNaluHeaderSize) { - std::vector unpacked_buffer = H265::ParseRbsp( - slice + H265::kNaluHeaderSize, length - H265::kNaluHeaderSize); + std::optional pps_state; + if (slice.size() >= H265::kNaluHeaderSize) { + std::vector unpacked_buffer = + H265::ParseRbsp(slice.subview(H265::kNaluHeaderSize)); BitstreamReader slice_reader(unpacked_buffer); // pic_parameter_set_id: ue(v) uint32_t pps_id = slice_reader.ReadExponentialGolomb(); @@ -463,8 +568,8 @@ void H265BitstreamParser::ParseSlice(const uint8_t* slice, size_t length) { uint32_t sps_id = slice_reader.ReadExponentialGolomb(); IN_RANGE_OR_RETURN_VOID(sps_id, 0, 15); const H265SpsParser::SpsState* sps = GetSPS(sps_id); - pps_state = H265PpsParser::ParsePps( - slice + H265::kNaluHeaderSize, length - H265::kNaluHeaderSize, sps); + pps_state = + H265PpsParser::ParsePps(slice.subview(H265::kNaluHeaderSize), sps); } if (!pps_state) { RTC_LOG(LS_WARNING) << "Unable to parse PPS from H265 bitstream."; @@ -476,11 +581,11 @@ void H265BitstreamParser::ParseSlice(const uint8_t* slice, size_t length) { case H265::NaluType::kAud: case H265::NaluType::kPrefixSei: case H265::NaluType::kSuffixSei: - case H265::NaluType::kAP: - case H265::NaluType::kFU: + case H265::NaluType::kAp: + case H265::NaluType::kFu: break; default: - Result res = ParseNonParameterSetNalu(slice, length, nalu_type); + Result res = ParseNonParameterSetNalu(slice, nalu_type); if (res != kOk) { RTC_LOG(LS_INFO) << "Failed to parse bitstream. Error: " << res; } @@ -488,17 +593,17 @@ void H265BitstreamParser::ParseSlice(const uint8_t* slice, size_t length) { } } -absl::optional -H265BitstreamParser::ParsePpsIdFromSliceSegmentLayerRbsp(const uint8_t* data, - size_t length, - uint8_t nalu_type) { - std::vector unpacked_buffer = H265::ParseRbsp(data, length); +std::optional +H265BitstreamParser::ParsePpsIdFromSliceSegmentLayerRbsp( + ArrayView data, + uint8_t nalu_type) { + std::vector unpacked_buffer = H265::ParseRbsp(data); BitstreamReader slice_reader(unpacked_buffer); // first_slice_segment_in_pic_flag: u(1) slice_reader.ConsumeBits(1); if (!slice_reader.Ok()) { - return absl::nullopt; + return std::nullopt; } if (nalu_type >= H265::NaluType::kBlaWLp && @@ -511,34 +616,55 @@ H265BitstreamParser::ParsePpsIdFromSliceSegmentLayerRbsp(const uint8_t* data, uint32_t slice_pic_parameter_set_id = slice_reader.ReadExponentialGolomb(); IN_RANGE_OR_RETURN_NULL(slice_pic_parameter_set_id, 0, 63); if (!slice_reader.Ok()) { - return absl::nullopt; + return std::nullopt; } return slice_pic_parameter_set_id; } -void H265BitstreamParser::ParseBitstream( - rtc::ArrayView bitstream) { - std::vector nalu_indices = - H265::FindNaluIndices(bitstream.data(), bitstream.size()); +std::optional H265BitstreamParser::IsFirstSliceSegmentInPic( + ArrayView data) { + std::vector unpacked_buffer = H265::ParseRbsp(data); + BitstreamReader slice_reader(unpacked_buffer); + + // first_slice_segment_in_pic_flag: u(1) + bool first_slice_segment_in_pic_flag = slice_reader.Read(); + if (!slice_reader.Ok()) { + return std::nullopt; + } + + return first_slice_segment_in_pic_flag; +} + +void H265BitstreamParser::ParseBitstream(ArrayView bitstream) { + std::vector nalu_indices = H265::FindNaluIndices(bitstream); for (const H265::NaluIndex& index : nalu_indices) - ParseSlice(&bitstream[index.payload_start_offset], index.payload_size); + ParseSlice( + bitstream.subview(index.payload_start_offset, index.payload_size)); } -absl::optional H265BitstreamParser::GetLastSliceQp() const { +std::optional H265BitstreamParser::GetLastSliceQp() const { if (!last_slice_qp_delta_ || !last_slice_pps_id_) { - return absl::nullopt; + return std::nullopt; } - uint32_t pps_id = 0; - const H265PpsParser::PpsState* pps = GetPPS(pps_id); + const H265PpsParser::PpsState* pps = GetPPS(last_slice_pps_id_.value()); if (!pps) - return absl::nullopt; + return std::nullopt; const int parsed_qp = 26 + pps->init_qp_minus26 + *last_slice_qp_delta_; if (parsed_qp < kMinQpValue || parsed_qp > kMaxQpValue) { RTC_LOG(LS_ERROR) << "Parsed invalid QP from bitstream."; - return absl::nullopt; + return std::nullopt; } return parsed_qp; } +std::optional H265BitstreamParser::GetLastSlicePpsId() const { + if (!last_slice_pps_id_) { + RTC_LOG(LS_ERROR) << "Failed to parse PPS id from bitstream."; + return std::nullopt; + } + + return last_slice_pps_id_; +} + } // namespace webrtc diff --git a/common_video/h265/h265_bitstream_parser.h b/common_video/h265/h265_bitstream_parser.h index 3c0883c7a1..7c337f8be4 100644 --- a/common_video/h265/h265_bitstream_parser.h +++ b/common_video/h265/h265_bitstream_parser.h @@ -14,42 +14,48 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/video_codecs/bitstream_parser.h" #include "common_video/h265/h265_pps_parser.h" #include "common_video/h265/h265_sps_parser.h" #include "common_video/h265/h265_vps_parser.h" #include "rtc_base/containers/flat_map.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { // Stateful H265 bitstream parser (due to VPS/SPS/PPS). Used to parse out QP // values from the bitstream. -class H265BitstreamParser : public BitstreamParser { +class RTC_EXPORT H265BitstreamParser : public BitstreamParser { public: H265BitstreamParser(); ~H265BitstreamParser() override; // New interface. - void ParseBitstream(rtc::ArrayView bitstream) override; - absl::optional GetLastSliceQp() const override; + void ParseBitstream(ArrayView bitstream) override; + std::optional GetLastSliceQp() const override; - static absl::optional ParsePpsIdFromSliceSegmentLayerRbsp( - const uint8_t* data, - size_t length, + std::optional GetLastSlicePpsId() const; + + static std::optional ParsePpsIdFromSliceSegmentLayerRbsp( + ArrayView data, uint8_t nalu_type); + // Returns true if the slice segment is the first in the picture; otherwise + // return false. If parse failed, return nullopt. + static std::optional IsFirstSliceSegmentInPic( + ArrayView data); + protected: enum Result { kOk, kInvalidStream, kUnsupportedStream, }; - void ParseSlice(const uint8_t* slice, size_t length); - Result ParseNonParameterSetNalu(const uint8_t* source, - size_t source_length, + void ParseSlice(ArrayView slice); + Result ParseNonParameterSetNalu(ArrayView source, uint8_t nalu_type); const H265PpsParser::PpsState* GetPPS(uint32_t id) const; @@ -62,8 +68,8 @@ class H265BitstreamParser : public BitstreamParser { flat_map pps_; // Last parsed slice QP. - absl::optional last_slice_qp_delta_; - absl::optional last_slice_pps_id_; + std::optional last_slice_qp_delta_; + std::optional last_slice_pps_id_; }; } // namespace webrtc diff --git a/common_video/h265/h265_bitstream_parser_unittest.cc b/common_video/h265/h265_bitstream_parser_unittest.cc index 7ca979433a..e359c5c2ae 100644 --- a/common_video/h265/h265_bitstream_parser_unittest.cc +++ b/common_video/h265/h265_bitstream_parser_unittest.cc @@ -11,8 +11,12 @@ #include "common_video/h265/h265_bitstream_parser.h" #include "common_video/h265/h265_common.h" +#include "test/gmock.h" #include "test/gtest.h" +using ::testing::Eq; +using ::testing::Optional; + namespace webrtc { // VPS/SPS/PPS part of below chunk. @@ -53,6 +57,12 @@ const uint8_t kH265SliceChunk[] = { 0x26, 0x0f, 0x7b, 0x30, 0x1c, 0xd7, 0xd4, 0x3a, 0xec, 0xad, 0xef, 0x73, }; +// Contains enough of data for the second slice of a frame. +const uint8_t kH265SecondSliceChunkInAFrame[] = { + 0x02, 0x01, 0x23, 0xfc, 0x20, 0x22, 0xad, 0x13, 0x68, 0xce, 0xc3, 0x5a, + 0x00, 0xdc, 0xeb, 0x86, 0x4b, 0x0b, 0xa7, 0x6a, 0xe1, 0x9c, 0x5c, 0xea, +}; + // Contains short term ref pic set slice to verify Log2Ceiling path. const uint8_t kH265SliceStrChunk[] = { 0x00, 0x00, 0x00, 0x01, 0x40, 0x01, 0x0c, 0x01, 0xff, 0xff, 0x01, 0x00, @@ -91,6 +101,24 @@ const uint8_t kH265BitstreamInvalidQPChunk52[] = { 0x00, 0x01, 0x26, 0x01, 0xaf, 0x03, 0x44, }; +// Bitstream that contains pred_weight_table. Contains enough data to parse +// over pred_weight_table for slice QP. This is bear.hevc from Chromium source, +// used for H265 hardware decoder's parser test, with some slices truncated. +const uint8_t kH265BitstreamWithPredWeightTable[] = { + 0x00, 0x00, 0x00, 0x01, 0x40, 0x01, 0x0c, 0x01, 0xff, 0xff, 0x01, 0x60, + 0x00, 0x00, 0x03, 0x00, 0x80, 0x00, 0x00, 0x03, 0x00, 0x00, 0x03, 0x00, + 0x3c, 0x95, 0xc0, 0x90, 0x00, 0x00, 0x00, 0x01, 0x42, 0x01, 0x01, 0x01, + 0x60, 0x00, 0x00, 0x03, 0x00, 0x80, 0x00, 0x00, 0x03, 0x00, 0x00, 0x03, + 0x00, 0x3c, 0xa0, 0x0a, 0x08, 0x0b, 0x9f, 0x79, 0x65, 0x79, 0x24, 0xca, + 0xe0, 0x10, 0x00, 0x00, 0x06, 0x40, 0x00, 0x00, 0xbb, 0x50, 0x80, 0x00, + 0x00, 0x00, 0x01, 0x44, 0x01, 0xc1, 0x73, 0xd1, 0x89, 0x00, 0x00, 0x00, + 0x01, 0x02, 0x01, 0xd0, 0x21, 0x49, 0xe8, 0xee, 0x50, 0x9c, 0x27, 0x20, + 0x42, 0xc4, 0xcd, 0x33, 0xf0, 0xb1, 0x23, 0x7b, 0xfe, 0x4d, 0xcf, 0x40, + 0xeb, 0x17, 0x37, 0x91, 0x1c, 0xb6, 0xba, 0x21, 0x42, 0xf7, 0xef, 0x01, + 0x08, 0x90, 0x49, 0xdc, 0xfc, 0x10, 0x1f, 0x5e, 0x02, 0xd9, 0xaa, 0xe8, + 0x32, 0xeb, 0x74, 0xbc, 0xdb, 0x2c, 0xa3, 0xec, +}; + TEST(H265BitstreamParserTest, ReportsNoQpWithoutParsedSlices) { H265BitstreamParser h265_parser; EXPECT_FALSE(h265_parser.GetLastSliceQp().has_value()); @@ -102,10 +130,18 @@ TEST(H265BitstreamParserTest, ReportsNoQpWithOnlyParsedPpsAndSpsSlices) { EXPECT_FALSE(h265_parser.GetLastSliceQp().has_value()); } +TEST(H265BitstreamParserTest, ReportQpWithPredWeightTable) { + H265BitstreamParser h265_parser; + h265_parser.ParseBitstream(kH265BitstreamWithPredWeightTable); + std::optional qp = h265_parser.GetLastSliceQp(); + ASSERT_TRUE(qp.has_value()); + EXPECT_EQ(34, *qp); +} + TEST(H265BitstreamParserTest, ReportsLastSliceQpForImageSlices) { H265BitstreamParser h265_parser; h265_parser.ParseBitstream(kH265BitstreamChunk); - absl::optional qp = h265_parser.GetLastSliceQp(); + std::optional qp = h265_parser.GetLastSliceQp(); ASSERT_TRUE(qp.has_value()); EXPECT_EQ(34, *qp); @@ -119,16 +155,16 @@ TEST(H265BitstreamParserTest, ReportsLastSliceQpForImageSlices) { TEST(H265BitstreamParserTest, ReportsLastSliceQpFromShortTermReferenceSlices) { H265BitstreamParser h265_parser; h265_parser.ParseBitstream(kH265SliceStrChunk); - absl::optional qp = h265_parser.GetLastSliceQp(); + std::optional qp = h265_parser.GetLastSliceQp(); ASSERT_TRUE(qp.has_value()); EXPECT_EQ(33, *qp); } TEST(H265BitstreamParserTest, PpsIdFromSlice) { H265BitstreamParser h265_parser; - absl::optional pps_id = - h265_parser.ParsePpsIdFromSliceSegmentLayerRbsp( - kH265SliceChunk, sizeof(kH265SliceChunk), H265::NaluType::kTrailR); + std::optional pps_id = + h265_parser.ParsePpsIdFromSliceSegmentLayerRbsp(kH265SliceChunk, + H265::NaluType::kTrailR); ASSERT_TRUE(pps_id); EXPECT_EQ(1u, *pps_id); } @@ -136,7 +172,7 @@ TEST(H265BitstreamParserTest, PpsIdFromSlice) { TEST(H265BitstreamParserTest, ReportsLastSliceQpInvalidQPSlices) { H265BitstreamParser h265_parser; h265_parser.ParseBitstream(kH265BitstreamInvalidQPChunk); - absl::optional qp = h265_parser.GetLastSliceQp(); + std::optional qp = h265_parser.GetLastSliceQp(); ASSERT_FALSE(qp.has_value()); h265_parser.ParseBitstream(kH265BitstreamInvalidQPChunk52); @@ -144,4 +180,22 @@ TEST(H265BitstreamParserTest, ReportsLastSliceQpInvalidQPSlices) { ASSERT_FALSE(qp.has_value()); } +TEST(H265BitstreamParserTest, ReportsFirstSliceSegmentInPic) { + EXPECT_THAT(H265BitstreamParser::IsFirstSliceSegmentInPic(kH265SliceChunk), + Optional(Eq(true))); +} + +TEST(H265BitstreamParserTest, ReportsFirstSliceSegmentInPicFalse) { + EXPECT_THAT(H265BitstreamParser::IsFirstSliceSegmentInPic( + kH265SecondSliceChunkInAFrame), + Optional(Eq(false))); +} + +TEST(H265BitstreamParserTest, ReportsFirstSliceSegmentInPicParseInvalidSlice) { + ArrayView slice_data(kH265SliceChunk); + EXPECT_THAT( + H265BitstreamParser::IsFirstSliceSegmentInPic(slice_data.subview(50)), + Eq(std::nullopt)); +} + } // namespace webrtc diff --git a/common_video/h265/h265_common.cc b/common_video/h265/h265_common.cc index 70864495bc..cb5557dc0f 100644 --- a/common_video/h265/h265_common.cc +++ b/common_video/h265/h265_common.cc @@ -17,10 +17,8 @@ namespace H265 { constexpr uint8_t kNaluTypeMask = 0x7E; -std::vector FindNaluIndices(const uint8_t* buffer, - size_t buffer_size) { - std::vector indices = - H264::FindNaluIndices(buffer, buffer_size); +std::vector FindNaluIndices(ArrayView buffer) { + std::vector indices = H264::FindNaluIndices(buffer); std::vector results; for (auto& index : indices) { results.push_back( @@ -33,12 +31,12 @@ NaluType ParseNaluType(uint8_t data) { return static_cast((data & kNaluTypeMask) >> 1); } -std::vector ParseRbsp(const uint8_t* data, size_t length) { - return H264::ParseRbsp(data, length); +std::vector ParseRbsp(ArrayView data) { + return H264::ParseRbsp(data); } -void WriteRbsp(const uint8_t* bytes, size_t length, rtc::Buffer* destination) { - H264::WriteRbsp(bytes, length, destination); +void WriteRbsp(ArrayView bytes, Buffer* destination) { + H264::WriteRbsp(bytes, destination); } uint32_t Log2Ceiling(uint32_t value) { diff --git a/common_video/h265/h265_common.h b/common_video/h265/h265_common.h index 1a76077b34..7bba7f84a7 100644 --- a/common_video/h265/h265_common.h +++ b/common_video/h265/h265_common.h @@ -16,6 +16,7 @@ #include "common_video/h265/h265_inline.h" #include "rtc_base/buffer.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -49,36 +50,47 @@ enum NaluType : uint8_t { kIdrNLp = 20, kCra = 21, kRsvIrapVcl23 = 23, + kRsvVcl31 = 31, kVps = 32, kSps = 33, kPps = 34, kAud = 35, kPrefixSei = 39, kSuffixSei = 40, - kAP = 48, - kFU = 49 + // Aggregation packets, refer to section 4.4.2 in RFC 7798. + kAp = 48, + // Fragmentation units, refer to section 4.4.3 in RFC 7798. + kFu = 49, + // PACI packets, refer to section 4.4.4 in RFC 7798. + kPaci = 50 }; -// Slice type definition. See table 7-7 of the H265 spec +// Slice type definition. See table 7-7 of the H.265 spec enum SliceType : uint8_t { kB = 0, kP = 1, kI = 2 }; struct NaluIndex { // Start index of NALU, including start sequence. - size_t start_offset; + size_t start_offset = 0; // Start index of NALU payload, typically type header. - size_t payload_start_offset; + size_t payload_start_offset = 0; // Length of NALU payload, in bytes, counting from payload_start_offset. - size_t payload_size; + size_t payload_size = 0; }; // Returns a vector of the NALU indices in the given buffer. -std::vector FindNaluIndices(const uint8_t* buffer, - size_t buffer_size); +RTC_EXPORT std::vector FindNaluIndices( + ArrayView buffer); + +// TODO: bugs.webrtc.org/42225170 - Deprecate. +inline std::vector FindNaluIndices(const uint8_t* buffer, + size_t buffer_size) { + return FindNaluIndices(MakeArrayView(buffer, buffer_size)); +} // Get the NAL type from the header byte immediately following start sequence. -NaluType ParseNaluType(uint8_t data); +RTC_EXPORT NaluType ParseNaluType(uint8_t data); -// Methods for parsing and writing RBSP. See section 7.4.2 of the H265 spec. +// Methods for parsing and writing RBSP. See section 7.4.2 of the H.265 spec. // // The following sequences are illegal, and need to be escaped when encoding: // 00 00 00 -> 00 00 03 00 @@ -92,12 +104,24 @@ NaluType ParseNaluType(uint8_t data); // the 03 emulation byte. // Parse the given data and remove any emulation byte escaping. -std::vector ParseRbsp(const uint8_t* data, size_t length); +std::vector ParseRbsp(ArrayView data); + +// TODO: bugs.webrtc.org/42225170 - Deprecate. +inline std::vector ParseRbsp(const uint8_t* data, size_t length) { + return ParseRbsp(MakeArrayView(data, length)); +} // Write the given data to the destination buffer, inserting and emulation // bytes in order to escape any data the could be interpreted as a start // sequence. -void WriteRbsp(const uint8_t* bytes, size_t length, rtc::Buffer* destination); +void WriteRbsp(ArrayView bytes, Buffer* destination); + +// TODO: bugs.webrtc.org/42225170 - Deprecate. +inline void WriteRbsp(const uint8_t* bytes, + size_t length, + Buffer* destination) { + WriteRbsp(MakeArrayView(bytes, length), destination); +} uint32_t Log2Ceiling(uint32_t value); diff --git a/common_video/h265/h265_pps_parser.cc b/common_video/h265/h265_pps_parser.cc index 1cc9abd794..52c1026f1e 100644 --- a/common_video/h265/h265_pps_parser.cc +++ b/common_video/h265/h265_pps_parser.cc @@ -11,9 +11,9 @@ #include "common_video/h265/h265_pps_parser.h" #include +#include #include -#include "absl/types/optional.h" #include "common_video/h265/h265_common.h" #include "rtc_base/bit_buffer.h" #include "rtc_base/bitstream_reader.h" @@ -26,7 +26,7 @@ " to be" \ << " in range [" << (min) << ":" << (max) << "]" \ << " found " << (val) << " instead"; \ - return absl::nullopt; \ + return std::nullopt; \ } \ } while (0) @@ -46,7 +46,7 @@ if (!reader.Ok() || !(a)) { \ RTC_LOG(LS_WARNING) << "Error in stream: invalid value, expected " \ << #a; \ - return absl::nullopt; \ + return std::nullopt; \ } \ } while (0) @@ -62,18 +62,16 @@ namespace webrtc { // You can find it on this page: // http://www.itu.int/rec/T-REC-H.265 -absl::optional H265PpsParser::ParsePps( - const uint8_t* data, - size_t length, +std::optional H265PpsParser::ParsePps( + ArrayView data, const H265SpsParser::SpsState* sps) { // First, parse out rbsp, which is basically the source buffer minus emulation // bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in // section 7.3.1.1 of the H.265 standard. - return ParseInternal(H265::ParseRbsp(data, length), sps); + return ParseInternal(H265::ParseRbsp(data), sps); } -bool H265PpsParser::ParsePpsIds(const uint8_t* data, - size_t length, +bool H265PpsParser::ParsePpsIds(ArrayView data, uint32_t* pps_id, uint32_t* sps_id) { RTC_DCHECK(pps_id); @@ -81,7 +79,7 @@ bool H265PpsParser::ParsePpsIds(const uint8_t* data, // First, parse out rbsp, which is basically the source buffer minus emulation // bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in // section 7.3.1.1 of the H.265 standard. - std::vector unpacked_buffer = H265::ParseRbsp(data, length); + std::vector unpacked_buffer = H265::ParseRbsp(data); BitstreamReader reader(unpacked_buffer); *pps_id = reader.ReadExponentialGolomb(); IN_RANGE_OR_RETURN_FALSE(*pps_id, 0, 63); @@ -90,18 +88,18 @@ bool H265PpsParser::ParsePpsIds(const uint8_t* data, return reader.Ok(); } -absl::optional H265PpsParser::ParseInternal( - rtc::ArrayView buffer, +std::optional H265PpsParser::ParseInternal( + ArrayView buffer, const H265SpsParser::SpsState* sps) { BitstreamReader reader(buffer); PpsState pps; if (!sps) { - return absl::nullopt; + return std::nullopt; } if (!ParsePpsIdsInternal(reader, pps.pps_id, pps.sps_id)) { - return absl::nullopt; + return std::nullopt; } // dependent_slice_segments_enabled_flag: u(1) @@ -223,14 +221,14 @@ absl::optional H265PpsParser::ParseInternal( if (pps_scaling_list_data_present_flag) { // scaling_list_data() if (!H265SpsParser::ParseScalingListData(reader)) { - return absl::nullopt; + return std::nullopt; } } // lists_modification_present_flag: u(1) pps.lists_modification_present_flag = reader.Read(); if (!reader.Ok()) { - return absl::nullopt; + return std::nullopt; } return pps; diff --git a/common_video/h265/h265_pps_parser.h b/common_video/h265/h265_pps_parser.h index 625869d8d5..ab4fd9a481 100644 --- a/common_video/h265/h265_pps_parser.h +++ b/common_video/h265/h265_pps_parser.h @@ -11,15 +11,17 @@ #ifndef COMMON_VIDEO_H265_H265_PPS_PARSER_H_ #define COMMON_VIDEO_H265_H265_PPS_PARSER_H_ -#include "absl/types/optional.h" +#include + #include "api/array_view.h" #include "common_video/h265/h265_sps_parser.h" #include "rtc_base/bitstream_reader.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { // A class for parsing out picture parameter set (PPS) data from a H265 NALU. -class H265PpsParser { +class RTC_EXPORT H265PpsParser { public: // The parsed state of the PPS. Only some select values are stored. // Add more as they are actually needed. @@ -42,20 +44,32 @@ class H265PpsParser { }; // Unpack RBSP and parse PPS state from the supplied buffer. - static absl::optional ParsePps(const uint8_t* data, - size_t length, - const H265SpsParser::SpsState* sps); + static std::optional ParsePps(ArrayView data, + const H265SpsParser::SpsState* sps); + // TODO: bugs.webrtc.org/42225170 - Deprecate. + static inline std::optional ParsePps( + const uint8_t* data, + size_t length, + const H265SpsParser::SpsState* sps) { + return ParsePps(MakeArrayView(data, length), sps); + } - static bool ParsePpsIds(const uint8_t* data, - size_t length, + static bool ParsePpsIds(ArrayView data, uint32_t* pps_id, uint32_t* sps_id); + // TODO: bugs.webrtc.org/42225170 - Deprecate. + static inline bool ParsePpsIds(const uint8_t* data, + size_t length, + uint32_t* pps_id, + uint32_t* sps_id) { + return ParsePpsIds(MakeArrayView(data, length), pps_id, sps_id); + } protected: // Parse the PPS state, for a bit buffer where RBSP decoding has already been // performed. - static absl::optional ParseInternal( - rtc::ArrayView buffer, + static std::optional ParseInternal( + ArrayView buffer, const H265SpsParser::SpsState* sps); static bool ParsePpsIdsInternal(BitstreamReader& reader, uint32_t& pps_id, diff --git a/common_video/h265/h265_pps_parser_unittest.cc b/common_video/h265/h265_pps_parser_unittest.cc index d91fc1a55c..ff0058b2cf 100644 --- a/common_video/h265/h265_pps_parser_unittest.cc +++ b/common_video/h265/h265_pps_parser_unittest.cc @@ -35,9 +35,9 @@ void WritePps(const H265PpsParser::PpsState& pps, bool pps_deblocking_filter_disabled_flag, bool pps_scaling_list_data_present_flag, bool scaling_list_pred_mode_flag, - rtc::Buffer* out_buffer) { + Buffer* out_buffer) { uint8_t data[kPpsBufferMaxSize] = {0}; - rtc::BitBufferWriter bit_buffer(data, kPpsBufferMaxSize); + BitBufferWriter bit_buffer(data, kPpsBufferMaxSize); // pic_parameter_set_id: ue(v) bit_buffer.WriteExponentialGolomb(pps.pps_id); @@ -161,7 +161,7 @@ void WritePps(const H265PpsParser::PpsState& pps, bit_buffer.GetCurrentOffset(&byte_offset, &bit_offset); } - H265::WriteRbsp(data, byte_offset, out_buffer); + H265::WriteRbsp(MakeArrayView(data, byte_offset), out_buffer); } class H265PpsParserTest : public ::testing::Test { @@ -196,9 +196,8 @@ class H265PpsParserTest : public ::testing::Test { 0x16, 0x59, 0x59, 0xa4, 0x93, 0x2b, 0x80, 0x40, 0x00, 0x00, 0x03, 0x00, 0x40, 0x00, 0x00, 0x07, 0x82}; H265SpsParser::SpsState parsed_sps = - H265SpsParser::ParseSps(sps_buffer, arraysize(sps_buffer)).value(); - parsed_pps_ = - H265PpsParser::ParsePps(buffer_.data(), buffer_.size(), &parsed_sps); + H265SpsParser::ParseSps(sps_buffer).value(); + parsed_pps_ = H265PpsParser::ParsePps(buffer_, &parsed_sps); ASSERT_TRUE(parsed_pps_); EXPECT_EQ(pps.dependent_slice_segments_enabled_flag, parsed_pps_->dependent_slice_segments_enabled_flag); @@ -222,9 +221,9 @@ class H265PpsParserTest : public ::testing::Test { } H265PpsParser::PpsState generated_pps_; - rtc::Buffer buffer_; - absl::optional parsed_pps_; - absl::optional parsed_sps_; + Buffer buffer_; + std::optional parsed_pps_; + std::optional parsed_sps_; }; TEST_F(H265PpsParserTest, ZeroPps) { diff --git a/common_video/h265/h265_sps_parser.cc b/common_video/h265/h265_sps_parser.cc index 96aee7c569..1461b7400c 100644 --- a/common_video/h265/h265_sps_parser.cc +++ b/common_video/h265/h265_sps_parser.cc @@ -11,11 +11,15 @@ #include "common_video/h265/h265_sps_parser.h" #include -#include +#include +#include +#include +#include #include +#include "api/array_view.h" #include "common_video/h265/h265_common.h" -#include "rtc_base/bit_buffer.h" +#include "rtc_base/bitstream_reader.h" #include "rtc_base/logging.h" #define IN_RANGE_OR_RETURN_NULL(val, min, max) \ @@ -25,7 +29,7 @@ " to be" \ << " in range [" << (min) << ":" << (max) << "]" \ << " found " << (val) << " instead"; \ - return absl::nullopt; \ + return std::nullopt; \ } \ } while (0) @@ -45,16 +49,16 @@ if (!reader.Ok() || !(a)) { \ RTC_LOG(LS_WARNING) << "Error in stream: invalid value, expected " \ << #a; \ - return absl::nullopt; \ + return std::nullopt; \ } \ } while (0) namespace { -using OptionalSps = absl::optional; +using OptionalSps = std::optional; using OptionalShortTermRefPicSet = - absl::optional; + std::optional; using OptionalProfileTierLevel = - absl::optional; + std::optional; constexpr int kMaxNumSizeIds = 4; constexpr int kMaxNumMatrixIds = 6; @@ -103,15 +107,13 @@ size_t H265SpsParser::GetDpbMaxPicBuf(int general_profile_idc) { // http://www.itu.int/rec/T-REC-H.265 // Unpack RBSP and parse SPS state from the supplied buffer. -absl::optional H265SpsParser::ParseSps( - const uint8_t* data, - size_t length) { - RTC_DCHECK(data); - return ParseSpsInternal(H265::ParseRbsp(data, length)); +std::optional H265SpsParser::ParseSps( + ArrayView data) { + return ParseSpsInternal(H265::ParseRbsp(data)); } bool H265SpsParser::ParseScalingListData(BitstreamReader& reader) { - int32_t scaling_list_dc_coef_minus8[kMaxNumSizeIds][kMaxNumMatrixIds]; + int32_t scaling_list_dc_coef_minus8[kMaxNumSizeIds][kMaxNumMatrixIds] = {}; for (int size_id = 0; size_id < kMaxNumSizeIds; size_id++) { for (int matrix_id = 0; matrix_id < kMaxNumMatrixIds; matrix_id += (size_id == 3) ? 3 : 1) { @@ -148,7 +150,7 @@ bool H265SpsParser::ParseScalingListData(BitstreamReader& reader) { return reader.Ok(); } -absl::optional +std::optional H265SpsParser::ParseShortTermRefPicSet( uint32_t st_rps_idx, uint32_t num_short_term_ref_pic_sets, @@ -182,8 +184,8 @@ H265SpsParser::ParseShortTermRefPicSet( short_term_ref_pic_set[ref_rps_idx].num_delta_pocs; IN_RANGE_OR_RETURN_NULL(num_delta_pocs, 0, kMaxShortTermRefPicSets); const ShortTermRefPicSet& ref_set = short_term_ref_pic_set[ref_rps_idx]; - bool used_by_curr_pic_flag[kMaxShortTermRefPicSets]; - bool use_delta_flag[kMaxShortTermRefPicSets]; + bool used_by_curr_pic_flag[kMaxShortTermRefPicSets] = {}; + bool use_delta_flag[kMaxShortTermRefPicSets] = {}; // 7.4.8 - use_delta_flag defaults to 1 if not present. std::fill_n(use_delta_flag, kMaxShortTermRefPicSets, true); @@ -266,7 +268,7 @@ H265SpsParser::ParseShortTermRefPicSet( for (uint32_t i = 0; i < st_ref_pic_set.num_negative_pics; i++) { // delta_poc_s0_minus1: ue(v) - int delta_poc_s0_minus1; + int delta_poc_s0_minus1 = 0; delta_poc_s0_minus1 = reader.ReadExponentialGolomb(); IN_RANGE_OR_RETURN_NULL(delta_poc_s0_minus1, 0, 0x7FFF); if (i == 0) { @@ -281,7 +283,7 @@ H265SpsParser::ParseShortTermRefPicSet( for (uint32_t i = 0; i < st_ref_pic_set.num_positive_pics; i++) { // delta_poc_s1_minus1: ue(v) - int delta_poc_s1_minus1; + int delta_poc_s1_minus1 = 0; delta_poc_s1_minus1 = reader.ReadExponentialGolomb(); IN_RANGE_OR_RETURN_NULL(delta_poc_s1_minus1, 0, 0x7FFF); if (i == 0) { @@ -299,13 +301,13 @@ H265SpsParser::ParseShortTermRefPicSet( st_ref_pic_set.num_negative_pics + st_ref_pic_set.num_positive_pics; if (!reader.Ok()) { - return absl::nullopt; + return std::nullopt; } return OptionalShortTermRefPicSet(st_ref_pic_set); } -absl::optional +std::optional H265SpsParser::ParseProfileTierLevel(bool profile_present, int max_num_sub_layers_minus1, BitstreamReader& reader) { @@ -319,10 +321,8 @@ H265SpsParser::ParseProfileTierLevel(bool profile_present, reader.ConsumeBits(1); pf_tier_level.general_profile_idc = reader.ReadBits(5); IN_RANGE_OR_RETURN_NULL(pf_tier_level.general_profile_idc, 0, 11); - uint16_t general_profile_compatibility_flag_high16; - uint16_t general_profile_compatibility_flag_low16; - general_profile_compatibility_flag_high16 = reader.ReadBits(16); - general_profile_compatibility_flag_low16 = reader.ReadBits(16); + uint16_t general_profile_compatibility_flag_high16 = reader.ReadBits(16); + uint16_t general_profile_compatibility_flag_low16 = reader.ReadBits(16); pf_tier_level.general_profile_compatibility_flags = (general_profile_compatibility_flag_high16 << 16) + general_profile_compatibility_flag_low16; @@ -331,7 +331,7 @@ H265SpsParser::ParseProfileTierLevel(bool profile_present, if (!reader.Ok() || (!pf_tier_level.general_progressive_source_flag && pf_tier_level.general_interlaced_source_flag)) { RTC_LOG(LS_WARNING) << "Interlaced streams not supported"; - return absl::nullopt; + return std::nullopt; } pf_tier_level.general_non_packed_constraint_flag = reader.ReadBits(1); pf_tier_level.general_frame_only_constraint_flag = reader.ReadBits(1); @@ -344,8 +344,8 @@ H265SpsParser::ParseProfileTierLevel(bool profile_present, reader.ConsumeBits(1); } pf_tier_level.general_level_idc = reader.ReadBits(8); - bool sub_layer_profile_present_flag[8]; - bool sub_layer_level_present_flag[8]; + bool sub_layer_profile_present_flag[8] = {}; + bool sub_layer_level_present_flag[8] = {}; for (int i = 0; i < max_num_sub_layers_minus1; ++i) { sub_layer_profile_present_flag[i] = reader.ReadBits(1); sub_layer_level_present_flag[i] = reader.ReadBits(1); @@ -382,14 +382,14 @@ H265SpsParser::ParseProfileTierLevel(bool profile_present, } if (!reader.Ok()) { - return absl::nullopt; + return std::nullopt; } return OptionalProfileTierLevel(pf_tier_level); } -absl::optional H265SpsParser::ParseSpsInternal( - rtc::ArrayView buffer) { +std::optional H265SpsParser::ParseSpsInternal( + ArrayView buffer) { BitstreamReader reader(buffer); // Now, we need to use a bit buffer to parse through the actual H265 SPS @@ -420,7 +420,7 @@ absl::optional H265SpsParser::ParseSpsInternal( OptionalProfileTierLevel profile_tier_level = ParseProfileTierLevel(true, sps.sps_max_sub_layers_minus1, reader); if (!profile_tier_level) { - return absl::nullopt; + return std::nullopt; } // sps_seq_parameter_set_id: ue(v) sps.sps_id = reader.ReadExponentialGolomb(); @@ -443,7 +443,7 @@ absl::optional H265SpsParser::ParseSpsInternal( // Equation A-2: Calculate max_dpb_size. uint32_t max_luma_ps = GetMaxLumaPs(profile_tier_level->general_level_idc); - uint32_t max_dpb_size; + uint32_t max_dpb_size = 0; uint32_t pic_size_in_samples_y = pic_height_in_luma_samples; pic_size_in_samples_y *= pic_width_in_luma_samples; size_t max_dpb_pic_buf = @@ -464,12 +464,12 @@ absl::optional H265SpsParser::ParseSpsInternal( uint32_t conf_win_right_offset = 0; uint32_t conf_win_top_offset = 0; uint32_t conf_win_bottom_offset = 0; - int sub_width_c = + const int sub_width_c = ((1 == sps.chroma_format_idc) || (2 == sps.chroma_format_idc)) && (0 == sps.separate_colour_plane_flag) ? 2 : 1; - int sub_height_c = + const int sub_height_c = (1 == sps.chroma_format_idc) && (0 == sps.separate_colour_plane_flag) ? 2 : 1; if (conformance_window_flag) { @@ -503,7 +503,7 @@ absl::optional H265SpsParser::ParseSpsInternal( uint32_t sps_sub_layer_ordering_info_present_flag = 0; // sps_sub_layer_ordering_info_present_flag: u(1) sps_sub_layer_ordering_info_present_flag = reader.Read(); - uint32_t sps_max_num_reorder_pics[kMaxSubLayers]; + uint32_t sps_max_num_reorder_pics[kMaxSubLayers] = {}; for (uint32_t i = (sps_sub_layer_ordering_info_present_flag != 0) ? 0 : sps_max_sub_layers_minus1; @@ -552,8 +552,8 @@ absl::optional H265SpsParser::ParseSpsInternal( // log2_min_luma_transform_block_size_minus2: ue(v) int log2_min_luma_transform_block_size_minus2 = reader.ReadExponentialGolomb(); - TRUE_OR_RETURN(log2_min_luma_transform_block_size_minus2 < - min_cb_log2_size_y - 2); + IN_RANGE_OR_RETURN_NULL(log2_min_luma_transform_block_size_minus2, 0, + min_cb_log2_size_y - 3); int min_tb_log2_size_y = log2_min_luma_transform_block_size_minus2 + 2; // log2_diff_max_min_luma_transform_block_size: ue(v) int log2_diff_max_min_luma_transform_block_size = @@ -576,7 +576,7 @@ absl::optional H265SpsParser::ParseSpsInternal( if (sps_scaling_list_data_present_flag) { // scaling_list_data() if (!ParseScalingListData(reader)) { - return absl::nullopt; + return std::nullopt; } } } @@ -626,7 +626,7 @@ absl::optional H265SpsParser::ParseSpsInternal( if (ref_pic_set) { sps.short_term_ref_pic_set[st_rps_idx] = *ref_pic_set; } else { - return absl::nullopt; + return std::nullopt; } } @@ -663,15 +663,6 @@ absl::optional H265SpsParser::ParseSpsInternal( sps.height = pic_height_in_luma_samples; if (conformance_window_flag) { - int sub_width_c = - ((1 == sps.chroma_format_idc) || (2 == sps.chroma_format_idc)) && - (0 == sps.separate_colour_plane_flag) - ? 2 - : 1; - int sub_height_c = - (1 == sps.chroma_format_idc) && (0 == sps.separate_colour_plane_flag) - ? 2 - : 1; // the offset includes the pixel within conformance window. so don't need to // +1 as per spec sps.width -= sub_width_c * (conf_win_right_offset + conf_win_left_offset); @@ -679,7 +670,7 @@ absl::optional H265SpsParser::ParseSpsInternal( } if (!reader.Ok()) { - return absl::nullopt; + return std::nullopt; } return OptionalSps(sps); diff --git a/common_video/h265/h265_sps_parser.h b/common_video/h265/h265_sps_parser.h index 2dece2b722..7830477462 100644 --- a/common_video/h265/h265_sps_parser.h +++ b/common_video/h265/h265_sps_parser.h @@ -11,11 +11,12 @@ #ifndef COMMON_VIDEO_H265_H265_SPS_PARSER_H_ #define COMMON_VIDEO_H265_H265_SPS_PARSER_H_ +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "rtc_base/bitstream_reader.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -42,39 +43,39 @@ enum H265ProfileIdc { }; // A class for parsing out sequence parameter set (SPS) data from an H265 NALU. -class H265SpsParser { +class RTC_EXPORT H265SpsParser { public: struct ProfileTierLevel { ProfileTierLevel(); // Syntax elements. - int general_profile_idc; - int general_level_idc; // 30x the actual level. - uint32_t general_profile_compatibility_flags; - bool general_progressive_source_flag; - bool general_interlaced_source_flag; - bool general_non_packed_constraint_flag; - bool general_frame_only_constraint_flag; - bool general_one_picture_only_constraint_flag; + int general_profile_idc = 0; + int general_level_idc = 0; // 30x the actual level. + uint32_t general_profile_compatibility_flags = 0; + bool general_progressive_source_flag = false; + bool general_interlaced_source_flag = false; + bool general_non_packed_constraint_flag = false; + bool general_frame_only_constraint_flag = false; + bool general_one_picture_only_constraint_flag = false; }; struct ShortTermRefPicSet { ShortTermRefPicSet(); // Syntax elements. - uint32_t num_negative_pics; - uint32_t num_positive_pics; - uint32_t delta_poc_s0[kMaxShortTermRefPicSets]; - uint32_t used_by_curr_pic_s0[kMaxShortTermRefPicSets]; - uint32_t delta_poc_s1[kMaxShortTermRefPicSets]; - uint32_t used_by_curr_pic_s1[kMaxShortTermRefPicSets]; + uint32_t num_negative_pics = 0; + uint32_t num_positive_pics = 0; + uint32_t delta_poc_s0[kMaxShortTermRefPicSets] = {}; + uint32_t used_by_curr_pic_s0[kMaxShortTermRefPicSets] = {}; + uint32_t delta_poc_s1[kMaxShortTermRefPicSets] = {}; + uint32_t used_by_curr_pic_s1[kMaxShortTermRefPicSets] = {}; // Calculated fields. - uint32_t num_delta_pocs; + uint32_t num_delta_pocs = 0; }; // The parsed state of the SPS. Only some select values are stored. // Add more as they are actually needed. - struct SpsState { + struct RTC_EXPORT SpsState { SpsState() = default; uint32_t sps_max_sub_layers_minus1 = 0; @@ -103,18 +104,23 @@ class H265SpsParser { }; // Unpack RBSP and parse SPS state from the supplied buffer. - static absl::optional ParseSps(const uint8_t* data, size_t length); + static std::optional ParseSps(ArrayView data); + // TODO: bugs.webrtc.org/42225170 - Deprecate. + static inline std::optional ParseSps(const uint8_t* data, + size_t length) { + return ParseSps(MakeArrayView(data, length)); + } static bool ParseScalingListData(BitstreamReader& reader); - static absl::optional ParseShortTermRefPicSet( + static std::optional ParseShortTermRefPicSet( uint32_t st_rps_idx, uint32_t num_short_term_ref_pic_sets, const std::vector& ref_pic_sets, uint32_t sps_max_dec_pic_buffering_minus1, BitstreamReader& reader); - static absl::optional ParseProfileTierLevel( + static std::optional ParseProfileTierLevel( bool profile_present, int max_num_sub_layers_minus1, BitstreamReader& reader); @@ -122,10 +128,8 @@ class H265SpsParser { protected: // Parse the SPS state, for a bit buffer where RBSP decoding has already been // performed. - static absl::optional ParseSpsInternal( - rtc::ArrayView buffer); - static bool ParseProfileTierLevel(BitstreamReader& reader, - uint32_t sps_max_sub_layers_minus1); + static std::optional ParseSpsInternal( + ArrayView buffer); // From Table A.8 - General tier and level limits. static int GetMaxLumaPs(int general_level_idc); diff --git a/common_video/h265/h265_sps_parser_unittest.cc b/common_video/h265/h265_sps_parser_unittest.cc index 26af4b1170..c51b7ab1c2 100644 --- a/common_video/h265/h265_sps_parser_unittest.cc +++ b/common_video/h265/h265_sps_parser_unittest.cc @@ -34,9 +34,9 @@ void WriteSps(uint16_t width, uint32_t max_num_sublayer_minus1, bool sub_layer_ordering_info_present_flag, bool long_term_ref_pics_present_flag, - rtc::Buffer* out_buffer) { + Buffer* out_buffer) { uint8_t rbsp[kSpsBufferMaxSize] = {0}; - rtc::BitBufferWriter writer(rbsp, kSpsBufferMaxSize); + BitBufferWriter writer(rbsp, kSpsBufferMaxSize); // sps_video_parameter_set_id writer.WriteBits(0, 4); // sps_max_sub_layers_minus1 @@ -365,7 +365,7 @@ void WriteSps(uint16_t width, } out_buffer->Clear(); - H265::WriteRbsp(rbsp, byte_count, out_buffer); + H265::WriteRbsp(MakeArrayView(rbsp, byte_count), out_buffer); } class H265SpsParserTest : public ::testing::Test { @@ -389,8 +389,7 @@ TEST_F(H265SpsParserTest, TestSampleSPSHdLandscape) { 0x02, 0x80, 0x80, 0x2d, 0x16, 0x59, 0x59, 0xa4, 0x93, 0x2b, 0x80, 0x40, 0x00, 0x00, 0x03, 0x00, 0x40, 0x00, 0x00, 0x07, 0x82}; - absl::optional sps = - H265SpsParser::ParseSps(buffer, arraysize(buffer)); + std::optional sps = H265SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(1280u, sps->width); EXPECT_EQ(720u, sps->height); @@ -418,8 +417,7 @@ TEST_F(H265SpsParserTest, TestSampleSPSVerticalCropLandscape) { 0x05, 0x02, 0x01, 0x09, 0xf2, 0xe5, 0x95, 0x9a, 0x49, 0x32, 0xb8, 0x04, 0x00, 0x00, 0x03, 0x00, 0x04, 0x00, 0x00, 0x03, 0x00, 0x78, 0x20}; - absl::optional sps = - H265SpsParser::ParseSps(buffer, arraysize(buffer)); + std::optional sps = H265SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(640u, sps->width); EXPECT_EQ(260u, sps->height); @@ -446,18 +444,16 @@ TEST_F(H265SpsParserTest, TestSampleSPSHorizontalAndVerticalCrop) { 0x08, 0x48, 0x04, 0x27, 0x72, 0xe5, 0x95, 0x9a, 0x49, 0x32, 0xb8, 0x04, 0x00, 0x00, 0x03, 0x00, 0x04, 0x00, 0x00, 0x03, 0x00, 0x78, 0x20}; - absl::optional sps = - H265SpsParser::ParseSps(buffer, arraysize(buffer)); + std::optional sps = H265SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(260u, sps->width); EXPECT_EQ(260u, sps->height); } TEST_F(H265SpsParserTest, TestSyntheticSPSQvgaLandscape) { - rtc::Buffer buffer; + Buffer buffer; WriteSps(320u, 180u, 1, 0, 1, 0, &buffer); - absl::optional sps = - H265SpsParser::ParseSps(buffer.data(), buffer.size()); + std::optional sps = H265SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(320u, sps->width); EXPECT_EQ(180u, sps->height); @@ -465,10 +461,9 @@ TEST_F(H265SpsParserTest, TestSyntheticSPSQvgaLandscape) { } TEST_F(H265SpsParserTest, TestSyntheticSPSWeirdResolution) { - rtc::Buffer buffer; + Buffer buffer; WriteSps(156u, 122u, 2, 0, 1, 0, &buffer); - absl::optional sps = - H265SpsParser::ParseSps(buffer.data(), buffer.size()); + std::optional sps = H265SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(156u, sps->width); EXPECT_EQ(122u, sps->height); @@ -476,10 +471,9 @@ TEST_F(H265SpsParserTest, TestSyntheticSPSWeirdResolution) { } TEST_F(H265SpsParserTest, TestLog2MaxSubLayersMinus1) { - rtc::Buffer buffer; + Buffer buffer; WriteSps(320u, 180u, 1, 0, 1, 0, &buffer); - absl::optional sps = - H265SpsParser::ParseSps(buffer.data(), buffer.size()); + std::optional sps = H265SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(320u, sps->width); EXPECT_EQ(180u, sps->height); @@ -487,8 +481,7 @@ TEST_F(H265SpsParserTest, TestLog2MaxSubLayersMinus1) { EXPECT_EQ(0u, sps->sps_max_sub_layers_minus1); WriteSps(320u, 180u, 1, 6, 1, 0, &buffer); - absl::optional sps1 = - H265SpsParser::ParseSps(buffer.data(), buffer.size()); + std::optional sps1 = H265SpsParser::ParseSps(buffer); ASSERT_TRUE(sps1.has_value()); EXPECT_EQ(320u, sps1->width); EXPECT_EQ(180u, sps1->height); @@ -496,16 +489,15 @@ TEST_F(H265SpsParserTest, TestLog2MaxSubLayersMinus1) { EXPECT_EQ(6u, sps1->sps_max_sub_layers_minus1); WriteSps(320u, 180u, 1, 7, 1, 0, &buffer); - absl::optional result = - H265SpsParser::ParseSps(buffer.data(), buffer.size()); + std::optional result = + H265SpsParser::ParseSps(buffer); EXPECT_FALSE(result.has_value()); } TEST_F(H265SpsParserTest, TestSubLayerOrderingInfoPresentFlag) { - rtc::Buffer buffer; + Buffer buffer; WriteSps(320u, 180u, 1, 6, 1, 0, &buffer); - absl::optional sps = - H265SpsParser::ParseSps(buffer.data(), buffer.size()); + std::optional sps = H265SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(320u, sps->width); EXPECT_EQ(180u, sps->height); @@ -513,8 +505,7 @@ TEST_F(H265SpsParserTest, TestSubLayerOrderingInfoPresentFlag) { EXPECT_EQ(6u, sps->sps_max_sub_layers_minus1); WriteSps(320u, 180u, 1, 6, 1, 0, &buffer); - absl::optional sps1 = - H265SpsParser::ParseSps(buffer.data(), buffer.size()); + std::optional sps1 = H265SpsParser::ParseSps(buffer); ASSERT_TRUE(sps1.has_value()); EXPECT_EQ(320u, sps1->width); EXPECT_EQ(180u, sps1->height); @@ -523,10 +514,9 @@ TEST_F(H265SpsParserTest, TestSubLayerOrderingInfoPresentFlag) { } TEST_F(H265SpsParserTest, TestLongTermRefPicsPresentFlag) { - rtc::Buffer buffer; + Buffer buffer; WriteSps(320u, 180u, 1, 0, 1, 0, &buffer); - absl::optional sps = - H265SpsParser::ParseSps(buffer.data(), buffer.size()); + std::optional sps = H265SpsParser::ParseSps(buffer); ASSERT_TRUE(sps.has_value()); EXPECT_EQ(320u, sps->width); EXPECT_EQ(180u, sps->height); @@ -534,8 +524,7 @@ TEST_F(H265SpsParserTest, TestLongTermRefPicsPresentFlag) { EXPECT_EQ(0u, sps->long_term_ref_pics_present_flag); WriteSps(320u, 180u, 1, 6, 1, 1, &buffer); - absl::optional sps1 = - H265SpsParser::ParseSps(buffer.data(), buffer.size()); + std::optional sps1 = H265SpsParser::ParseSps(buffer); ASSERT_TRUE(sps1.has_value()); EXPECT_EQ(320u, sps1->width); EXPECT_EQ(180u, sps1->height); diff --git a/common_video/h265/h265_vps_parser.cc b/common_video/h265/h265_vps_parser.cc index 16b967dad4..5b3fc6b12f 100644 --- a/common_video/h265/h265_vps_parser.cc +++ b/common_video/h265/h265_vps_parser.cc @@ -24,15 +24,13 @@ H265VpsParser::VpsState::VpsState() = default; // http://www.itu.int/rec/T-REC-H.265 // Unpack RBSP and parse VPS state from the supplied buffer. -absl::optional H265VpsParser::ParseVps( - const uint8_t* data, - size_t length) { - RTC_DCHECK(data); - return ParseInternal(H265::ParseRbsp(data, length)); +std::optional H265VpsParser::ParseVps( + ArrayView data) { + return ParseInternal(H265::ParseRbsp(data)); } -absl::optional H265VpsParser::ParseInternal( - rtc::ArrayView buffer) { +std::optional H265VpsParser::ParseInternal( + ArrayView buffer) { BitstreamReader reader(buffer); // Now, we need to use a bit buffer to parse through the actual H265 VPS @@ -44,7 +42,7 @@ absl::optional H265VpsParser::ParseInternal( vps.id = reader.ReadBits(4); if (!reader.Ok()) { - return absl::nullopt; + return std::nullopt; } return vps; diff --git a/common_video/h265/h265_vps_parser.h b/common_video/h265/h265_vps_parser.h index e391d47401..619ae2e211 100644 --- a/common_video/h265/h265_vps_parser.h +++ b/common_video/h265/h265_vps_parser.h @@ -11,30 +11,36 @@ #ifndef COMMON_VIDEO_H265_H265_VPS_PARSER_H_ #define COMMON_VIDEO_H265_H265_VPS_PARSER_H_ -#include "absl/types/optional.h" +#include + #include "api/array_view.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { // A class for parsing out video parameter set (VPS) data from an H265 NALU. -class H265VpsParser { +class RTC_EXPORT H265VpsParser { public: // The parsed state of the VPS. Only some select values are stored. // Add more as they are actually needed. - struct VpsState { + struct RTC_EXPORT VpsState { VpsState(); uint32_t id = 0; }; // Unpack RBSP and parse VPS state from the supplied buffer. - static absl::optional ParseVps(const uint8_t* data, size_t length); + static std::optional ParseVps(ArrayView data); + // TODO: bugs.webrtc.org/42225170 - Deprecate. + static inline std::optional ParseVps(const uint8_t* data, + size_t length) { + return ParseVps(MakeArrayView(data, length)); + } protected: // Parse the VPS state, for a bit buffer where RBSP decoding has already been // performed. - static absl::optional ParseInternal( - rtc::ArrayView buffer); + static std::optional ParseInternal(ArrayView buffer); }; } // namespace webrtc diff --git a/common_video/h265/h265_vps_parser_unittest.cc b/common_video/h265/h265_vps_parser_unittest.cc index 24e8a77154..fd2e8a8f52 100644 --- a/common_video/h265/h265_vps_parser_unittest.cc +++ b/common_video/h265/h265_vps_parser_unittest.cc @@ -32,7 +32,7 @@ class H265VpsParserTest : public ::testing::Test { H265VpsParserTest() {} ~H265VpsParserTest() override {} - absl::optional vps_; + std::optional vps_; }; TEST_F(H265VpsParserTest, TestSampleVPSId) { @@ -41,8 +41,7 @@ TEST_F(H265VpsParserTest, TestSampleVPSId) { 0x1c, 0x01, 0xff, 0xff, 0x04, 0x08, 0x00, 0x00, 0x03, 0x00, 0x9d, 0x08, 0x00, 0x00, 0x03, 0x00, 0x00, 0x78, 0x95, 0x98, 0x09, }; - EXPECT_TRUE(static_cast( - vps_ = H265VpsParser::ParseVps(buffer, arraysize(buffer)))); + EXPECT_TRUE(static_cast(vps_ = H265VpsParser::ParseVps(buffer))); EXPECT_EQ(1u, vps_->id); } diff --git a/common_video/include/bitrate_adjuster.h b/common_video/include/bitrate_adjuster.h index 4b208307a1..966c35dd7e 100644 --- a/common_video/include/bitrate_adjuster.h +++ b/common_video/include/bitrate_adjuster.h @@ -14,7 +14,8 @@ #include #include -#include "absl/types/optional.h" +#include + #include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/rtc_export.h" @@ -48,7 +49,7 @@ class RTC_EXPORT BitrateAdjuster { uint32_t GetAdjustedBitrateBps() const; // Returns what we think the current bitrate is. - absl::optional GetEstimatedBitrateBps(); + std::optional GetEstimatedBitrateBps(); // This should be called after each frame is encoded. The timestamp at which // it is called is used to estimate the output bitrate of the encoder. diff --git a/common_video/include/corruption_score_calculator.h b/common_video/include/corruption_score_calculator.h new file mode 100644 index 0000000000..77ea5335f0 --- /dev/null +++ b/common_video/include/corruption_score_calculator.h @@ -0,0 +1,34 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef COMMON_VIDEO_INCLUDE_CORRUPTION_SCORE_CALCULATOR_H_ +#define COMMON_VIDEO_INCLUDE_CORRUPTION_SCORE_CALCULATOR_H_ + +#include + +#include "api/video/video_frame.h" +#include "common_video/frame_instrumentation_data.h" + +namespace webrtc { + +// Allow classes to have their own implementations of how to calculate a score +// for automatic corruption detection. +class CorruptionScoreCalculator { + public: + virtual ~CorruptionScoreCalculator() = default; + + virtual std::optional CalculateCorruptionScore( + const VideoFrame& frame, + const FrameInstrumentationData& frame_instrumentation_data) = 0; +}; + +} // namespace webrtc + +#endif // COMMON_VIDEO_INCLUDE_CORRUPTION_SCORE_CALCULATOR_H_ diff --git a/common_video/include/video_frame_buffer.h b/common_video/include/video_frame_buffer.h index 1f6331b94d..e0c6dbd858 100644 --- a/common_video/include/video_frame_buffer.h +++ b/common_video/include/video_frame_buffer.h @@ -20,7 +20,7 @@ namespace webrtc { -rtc::scoped_refptr WrapI420Buffer( +scoped_refptr WrapI420Buffer( int width, int height, const uint8_t* y_plane, @@ -31,7 +31,7 @@ rtc::scoped_refptr WrapI420Buffer( int v_stride, std::function no_longer_used); -rtc::scoped_refptr WrapI422Buffer( +scoped_refptr WrapI422Buffer( int width, int height, const uint8_t* y_plane, @@ -42,7 +42,7 @@ rtc::scoped_refptr WrapI422Buffer( int v_stride, std::function no_longer_used); -rtc::scoped_refptr WrapI444Buffer( +scoped_refptr WrapI444Buffer( int width, int height, const uint8_t* y_plane, @@ -53,7 +53,7 @@ rtc::scoped_refptr WrapI444Buffer( int v_stride, std::function no_longer_used); -rtc::scoped_refptr WrapI420ABuffer( +scoped_refptr WrapI420ABuffer( int width, int height, const uint8_t* y_plane, @@ -66,7 +66,7 @@ rtc::scoped_refptr WrapI420ABuffer( int a_stride, std::function no_longer_used); -rtc::scoped_refptr WrapYuvBuffer( +scoped_refptr WrapYuvBuffer( VideoFrameBuffer::Type type, int width, int height, @@ -78,7 +78,7 @@ rtc::scoped_refptr WrapYuvBuffer( int v_stride, std::function no_longer_used); -rtc::scoped_refptr WrapI010Buffer( +scoped_refptr WrapI010Buffer( int width, int height, const uint16_t* y_plane, @@ -89,7 +89,7 @@ rtc::scoped_refptr WrapI010Buffer( int v_stride, std::function no_longer_used); -rtc::scoped_refptr WrapI210Buffer( +scoped_refptr WrapI210Buffer( int width, int height, const uint16_t* y_plane, @@ -100,7 +100,7 @@ rtc::scoped_refptr WrapI210Buffer( int v_stride, std::function no_longer_used); -rtc::scoped_refptr WrapI410Buffer( +scoped_refptr WrapI410Buffer( int width, int height, const uint16_t* y_plane, diff --git a/common_video/include/video_frame_buffer_pool.h b/common_video/include/video_frame_buffer_pool.h index 3d94bc5669..2eabe910ea 100644 --- a/common_video/include/video_frame_buffer_pool.h +++ b/common_video/include/video_frame_buffer_pool.h @@ -46,13 +46,13 @@ class VideoFrameBufferPool { // Returns a buffer from the pool. If no suitable buffer exist in the pool // and there are less than `max_number_of_buffers` pending, a buffer is // created. Returns null otherwise. - rtc::scoped_refptr CreateI420Buffer(int width, int height); - rtc::scoped_refptr CreateI422Buffer(int width, int height); - rtc::scoped_refptr CreateI444Buffer(int width, int height); - rtc::scoped_refptr CreateI010Buffer(int width, int height); - rtc::scoped_refptr CreateI210Buffer(int width, int height); - rtc::scoped_refptr CreateI410Buffer(int width, int height); - rtc::scoped_refptr CreateNV12Buffer(int width, int height); + scoped_refptr CreateI420Buffer(int width, int height); + scoped_refptr CreateI422Buffer(int width, int height); + scoped_refptr CreateI444Buffer(int width, int height); + scoped_refptr CreateI010Buffer(int width, int height); + scoped_refptr CreateI210Buffer(int width, int height); + scoped_refptr CreateI410Buffer(int width, int height); + scoped_refptr CreateNV12Buffer(int width, int height); // Changes the max amount of buffers in the pool to the new value. // Returns true if change was successful and false if the amount of already @@ -64,11 +64,11 @@ class VideoFrameBufferPool { void Release(); private: - rtc::scoped_refptr + scoped_refptr GetExistingBuffer(int width, int height, VideoFrameBuffer::Type type); - rtc::RaceChecker race_checker_; - std::list> buffers_; + RaceChecker race_checker_; + std::list> buffers_; // If true, newly allocated buffers are zero-initialized. Note that recycled // buffers are not zero'd before reuse. This is required of buffers used by // FFmpeg according to http://crbug.com/390941, which only requires it for the diff --git a/common_video/libyuv/include/webrtc_libyuv.h b/common_video/libyuv/include/webrtc_libyuv.h index 68831c70ab..7d504bcb04 100644 --- a/common_video/libyuv/include/webrtc_libyuv.h +++ b/common_video/libyuv/include/webrtc_libyuv.h @@ -64,7 +64,7 @@ size_t CalcBufferSize(VideoType type, int width, int height); // insufficient, an error will be returned. // - buffer : Pointer to buffer // Return value: length of buffer if OK, < 0 otherwise. -int ExtractBuffer(const rtc::scoped_refptr& input_frame, +int ExtractBuffer(const scoped_refptr& input_frame, size_t size, uint8_t* buffer); int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer); @@ -81,7 +81,7 @@ int ConvertFromI420(const VideoFrame& src_frame, int dst_sample_size, uint8_t* dst_frame); -rtc::scoped_refptr ScaleVideoFrameBuffer( +scoped_refptr ScaleVideoFrameBuffer( const I420BufferInterface& source, int dst_width, int dst_height); diff --git a/common_video/libyuv/libyuv_unittest.cc b/common_video/libyuv/libyuv_unittest.cc index f9c82f6284..fbaf67cad4 100644 --- a/common_video/libyuv/libyuv_unittest.cc +++ b/common_video/libyuv/libyuv_unittest.cc @@ -15,9 +15,16 @@ #include +#include "api/video/i010_buffer.h" +#include "api/video/i210_buffer.h" +#include "api/video/i410_buffer.h" #include "api/video/i420_buffer.h" +#include "api/video/i422_buffer.h" +#include "api/video/i444_buffer.h" +#include "api/video/nv12_buffer.h" #include "api/video/video_frame.h" #include "common_video/libyuv/include/webrtc_libyuv.h" +#include "rtc_base/logging.h" #include "test/frame_utils.h" #include "test/gmock.h" #include "test/gtest.h" @@ -96,7 +103,7 @@ void TestLibYuv::SetUp() { ASSERT_TRUE(source_file_ != NULL) << "Cannot read file: " << input_file_name << "\n"; - rtc::scoped_refptr buffer( + scoped_refptr buffer( test::ReadI420Buffer(width_, height_, source_file_)); orig_frame_ = @@ -124,7 +131,7 @@ TEST_F(TestLibYuv, ConvertTest) { double psnr = 0.0; - rtc::scoped_refptr res_i420_buffer = + scoped_refptr res_i420_buffer = I420Buffer::Create(width_, height_); printf("\nConvert #%d I420 <-> I420 \n", j); @@ -294,7 +301,7 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) { int stride_uv = 0; Calc16ByteAlignedStride(width_, &stride_y, &stride_uv); - rtc::scoped_refptr res_i420_buffer = + scoped_refptr res_i420_buffer = I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv); std::unique_ptr out_i420_buffer(new uint8_t[frame_length_]); EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kI420, 0, @@ -367,13 +374,13 @@ TEST_F(TestLibYuv, NV12Scale4x4to2x2) { TEST(I420WeightedPSNRTest, SmokeTest) { uint8_t ref_y[] = {0, 0, 0, 0}; uint8_t ref_uv[] = {0}; - rtc::scoped_refptr ref_buffer = + scoped_refptr ref_buffer = I420Buffer::Copy(/*width=*/2, /*height=*/2, ref_y, /*stride_y=*/2, ref_uv, /*stride_u=*/1, ref_uv, /*stride_v=*/1); uint8_t test_y[] = {1, 1, 1, 1}; uint8_t test_uv[] = {2}; - rtc::scoped_refptr test_buffer = I420Buffer::Copy( + scoped_refptr test_buffer = I420Buffer::Copy( /*width=*/2, /*height=*/2, test_y, /*stride_y=*/2, test_uv, /*stride_u=*/1, test_uv, /*stride_v=*/1); @@ -383,4 +390,101 @@ TEST(I420WeightedPSNRTest, SmokeTest) { /*abs_error=*/0.001); } +#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) +// Check that we catch int overflow if invalid dimensions get passed to +// `I420Buffer::Create()`. +TEST_F(TestLibYuv, I420DimensionsTooLarge) { + // Dimensions large enough to cause overflow. + constexpr int kWidth = 0xFFFF; + constexpr int kHeight = 0xAAB0; + // Sanity check for this test. This calculation, which is part of what + // `I420Buffer::Create()` will do, should cause an `int` overflow. + static_assert( + (int64_t{kWidth} * int64_t{kHeight}) > std::numeric_limits::max(), + ""); + + EXPECT_DEATH(I010Buffer::Create(kWidth, kHeight), + "IsValueInRangeForNumericType"); + EXPECT_DEATH(I210Buffer::Create(kWidth, kHeight), + "IsValueInRangeForNumericType"); + + int stride_uv = (kWidth + 1) / 2; + EXPECT_DEATH(I410Buffer::Create(kWidth, kHeight, /*stride_y=*/kWidth, + stride_uv, stride_uv), + "IsValueInRangeForNumericType"); + EXPECT_DEATH(I420Buffer::Create(kWidth, kHeight, /*stride_y=*/kWidth, + stride_uv, stride_uv), + "IsValueInRangeForNumericType"); + EXPECT_DEATH(I422Buffer::Create(kWidth, kHeight, /*stride_y=*/kWidth, + stride_uv, stride_uv), + "IsValueInRangeForNumericType"); + EXPECT_DEATH(I444Buffer::Create(kWidth, kHeight, /*stride_y=*/kWidth, + stride_uv, stride_uv), + "IsValueInRangeForNumericType"); + EXPECT_DEATH( + NV12Buffer::Create(kWidth, kHeight, /*stride_y=*/kWidth, stride_uv), + "IsValueInRangeForNumericType"); +} + +template +void TestInvalidDimensions5Params() { + EXPECT_DEATH(T::Create(-11, 1, /*stride_y=*/1, + /*stride_u=*/1, + /*stride_v=*/1), + "> 0"); + EXPECT_DEATH(T::Create(1, -11, /*stride_y=*/1, + /*stride_u=*/1, + /*stride_v=*/1), + "> 0"); + EXPECT_DEATH(T::Create(1, 1, /*stride_y=*/-12, + /*stride_u=*/1, + /*stride_v=*/1), + ">= width"); + EXPECT_DEATH(T::Create(1, 1, /*stride_y=*/1, + /*stride_u=*/-12, + /*stride_v=*/1), + "> 0"); + EXPECT_DEATH(T::Create(1, 1, /*stride_y=*/1, + /*stride_u=*/1, + /*stride_v=*/-12), + "> 0"); +} + +template +void TestInvalidDimensions4Params() { + EXPECT_DEATH(T::Create(-11, 1, /*stride_y=*/1, + /*stride_uv=*/1), + "> 0"); + EXPECT_DEATH(T::Create(1, -11, /*stride_y=*/1, + /*stride_uv=*/1), + "> 0"); + EXPECT_DEATH(T::Create(1, 1, /*stride_y=*/-12, + /*stride_uv=*/1), + ">= width"); + EXPECT_DEATH(T::Create(1, 1, /*stride_y=*/1, + /*stride_uv=*/-12), + "> 0"); +} + +template +void TestInvalidDimensions2Param() { + EXPECT_DEATH(T::Create(-11, 1), "> 0"); + EXPECT_DEATH(T::Create(1, -11), "> 0"); +} + +TEST_F(TestLibYuv, I420InvalidDimensions) { + // Only width and height provided to `Create()`. + TestInvalidDimensions2Param(); + TestInvalidDimensions2Param(); + // `Create() is provided with width, height, y, u, v. + TestInvalidDimensions5Params(); + TestInvalidDimensions5Params(); + TestInvalidDimensions5Params(); + TestInvalidDimensions5Params(); + // `Create() is provided with width, height, y, u_and_v. + TestInvalidDimensions4Params(); +} + +#endif // GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) + } // namespace webrtc diff --git a/common_video/libyuv/webrtc_libyuv.cc b/common_video/libyuv/webrtc_libyuv.cc index 31ba1feca3..8524bcbb11 100644 --- a/common_video/libyuv/webrtc_libyuv.cc +++ b/common_video/libyuv/webrtc_libyuv.cc @@ -50,7 +50,7 @@ size_t CalcBufferSize(VideoType type, int width, int height) { return 0; } -int ExtractBuffer(const rtc::scoped_refptr& input_frame, +int ExtractBuffer(const scoped_refptr& input_frame, size_t size, uint8_t* buffer) { RTC_DCHECK(buffer); @@ -119,7 +119,7 @@ int ConvertFromI420(const VideoFrame& src_frame, VideoType dst_video_type, int dst_sample_size, uint8_t* dst_frame) { - rtc::scoped_refptr i420_buffer = + scoped_refptr i420_buffer = src_frame.video_frame_buffer()->ToI420(); return libyuv::ConvertFromI420( i420_buffer->DataY(), i420_buffer->StrideY(), i420_buffer->DataU(), @@ -128,20 +128,20 @@ int ConvertFromI420(const VideoFrame& src_frame, ConvertVideoType(dst_video_type)); } -rtc::scoped_refptr ScaleI420ABuffer( +scoped_refptr ScaleI420ABuffer( const I420ABufferInterface& buffer, int target_width, int target_height) { - rtc::scoped_refptr yuv_buffer = + scoped_refptr yuv_buffer = I420Buffer::Create(target_width, target_height); yuv_buffer->ScaleFrom(buffer); - rtc::scoped_refptr axx_buffer = + scoped_refptr axx_buffer = I420Buffer::Create(target_width, target_height); libyuv::ScalePlane(buffer.DataA(), buffer.StrideA(), buffer.width(), buffer.height(), axx_buffer->MutableDataY(), axx_buffer->StrideY(), target_width, target_height, libyuv::kFilterBox); - rtc::scoped_refptr merged_buffer = WrapI420ABuffer( + scoped_refptr merged_buffer = WrapI420ABuffer( yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(), yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(), yuv_buffer->DataV(), yuv_buffer->StrideV(), axx_buffer->DataY(), @@ -151,11 +151,11 @@ rtc::scoped_refptr ScaleI420ABuffer( return merged_buffer; } -rtc::scoped_refptr ScaleVideoFrameBuffer( +scoped_refptr ScaleVideoFrameBuffer( const I420BufferInterface& source, int dst_width, int dst_height) { - rtc::scoped_refptr scaled_buffer = + scoped_refptr scaled_buffer = I420Buffer::Create(dst_width, dst_height); scaled_buffer->ScaleFrom(source); return scaled_buffer; @@ -190,7 +190,7 @@ double I420APSNR(const I420ABufferInterface& ref_buffer, RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height()); if ((ref_buffer.width() != test_buffer.width()) || (ref_buffer.height() != test_buffer.height())) { - rtc::scoped_refptr scaled_buffer = + scoped_refptr scaled_buffer = ScaleI420ABuffer(test_buffer, ref_buffer.width(), ref_buffer.height()); return I420APSNR(ref_buffer, *scaled_buffer); } @@ -236,7 +236,7 @@ double I420PSNR(const I420BufferInterface& ref_buffer, RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height()); if ((ref_buffer.width() != test_buffer.width()) || (ref_buffer.height() != test_buffer.height())) { - rtc::scoped_refptr scaled_buffer = + scoped_refptr scaled_buffer = I420Buffer::Create(ref_buffer.width(), ref_buffer.height()); scaled_buffer->ScaleFrom(test_buffer); return I420PSNR(ref_buffer, *scaled_buffer); @@ -266,7 +266,7 @@ double I420WeightedPSNR(const I420BufferInterface& ref_buffer, RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height()); if ((ref_buffer.width() != test_buffer.width()) || (ref_buffer.height() != test_buffer.height())) { - rtc::scoped_refptr scaled_ref_buffer = + scoped_refptr scaled_ref_buffer = I420Buffer::Create(test_buffer.width(), test_buffer.height()); scaled_ref_buffer->ScaleFrom(ref_buffer); return I420WeightedPSNR(*scaled_ref_buffer, test_buffer); @@ -306,7 +306,7 @@ double I420ASSIM(const I420ABufferInterface& ref_buffer, RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height()); if ((ref_buffer.width() != test_buffer.width()) || (ref_buffer.height() != test_buffer.height())) { - rtc::scoped_refptr scaled_buffer = + scoped_refptr scaled_buffer = ScaleI420ABuffer(test_buffer, ref_buffer.width(), ref_buffer.height()); return I420ASSIM(ref_buffer, *scaled_buffer); } @@ -341,7 +341,7 @@ double I420SSIM(const I420BufferInterface& ref_buffer, RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height()); if ((ref_buffer.width() != test_buffer.width()) || (ref_buffer.height() != test_buffer.height())) { - rtc::scoped_refptr scaled_buffer = + scoped_refptr scaled_buffer = I420Buffer::Create(ref_buffer.width(), ref_buffer.height()); scaled_buffer->ScaleFrom(test_buffer); return I420SSIM(ref_buffer, *scaled_buffer); diff --git a/common_video/video_frame_buffer.cc b/common_video/video_frame_buffer.cc index ca2916e580..e88cd0f3f0 100644 --- a/common_video/video_frame_buffer.cc +++ b/common_video/video_frame_buffer.cc @@ -60,7 +60,7 @@ class WrappedYuvBuffer : public Base { int StrideV() const override { return v_stride_; } private: - friend class rtc::RefCountedObject; + friend class RefCountedObject; const int width_; const int height_; @@ -110,12 +110,11 @@ class WrappedYuvaBuffer : public WrappedYuvBuffer { class I444BufferBase : public I444BufferInterface { public: - rtc::scoped_refptr ToI420() final; + scoped_refptr ToI420() final; }; -rtc::scoped_refptr I444BufferBase::ToI420() { - rtc::scoped_refptr i420_buffer = - I420Buffer::Create(width(), height()); +scoped_refptr I444BufferBase::ToI420() { + scoped_refptr i420_buffer = I420Buffer::Create(width(), height()); libyuv::I444ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), i420_buffer->MutableDataY(), i420_buffer->StrideY(), i420_buffer->MutableDataU(), i420_buffer->StrideU(), @@ -126,12 +125,11 @@ rtc::scoped_refptr I444BufferBase::ToI420() { class I422BufferBase : public I422BufferInterface { public: - rtc::scoped_refptr ToI420() final; + scoped_refptr ToI420() final; }; -rtc::scoped_refptr I422BufferBase::ToI420() { - rtc::scoped_refptr i420_buffer = - I420Buffer::Create(width(), height()); +scoped_refptr I422BufferBase::ToI420() { + scoped_refptr i420_buffer = I420Buffer::Create(width(), height()); libyuv::I422ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), i420_buffer->MutableDataY(), i420_buffer->StrideY(), i420_buffer->MutableDataU(), i420_buffer->StrideU(), @@ -182,7 +180,7 @@ class WrappedYuv16BBuffer : public Base { int StrideV() const override { return v_stride_; } private: - friend class rtc::RefCountedObject; + friend class RefCountedObject; const int width_; const int height_; @@ -197,12 +195,11 @@ class WrappedYuv16BBuffer : public Base { class I010BufferBase : public I010BufferInterface { public: - rtc::scoped_refptr ToI420() final; + scoped_refptr ToI420() final; }; -rtc::scoped_refptr I010BufferBase::ToI420() { - rtc::scoped_refptr i420_buffer = - I420Buffer::Create(width(), height()); +scoped_refptr I010BufferBase::ToI420() { + scoped_refptr i420_buffer = I420Buffer::Create(width(), height()); libyuv::I010ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), i420_buffer->MutableDataY(), i420_buffer->StrideY(), i420_buffer->MutableDataU(), i420_buffer->StrideU(), @@ -213,12 +210,11 @@ rtc::scoped_refptr I010BufferBase::ToI420() { class I210BufferBase : public I210BufferInterface { public: - rtc::scoped_refptr ToI420() final; + scoped_refptr ToI420() final; }; -rtc::scoped_refptr I210BufferBase::ToI420() { - rtc::scoped_refptr i420_buffer = - I420Buffer::Create(width(), height()); +scoped_refptr I210BufferBase::ToI420() { + scoped_refptr i420_buffer = I420Buffer::Create(width(), height()); libyuv::I210ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), i420_buffer->MutableDataY(), i420_buffer->StrideY(), i420_buffer->MutableDataU(), i420_buffer->StrideU(), @@ -229,12 +225,11 @@ rtc::scoped_refptr I210BufferBase::ToI420() { class I410BufferBase : public I410BufferInterface { public: - rtc::scoped_refptr ToI420() final; + scoped_refptr ToI420() final; }; -rtc::scoped_refptr I410BufferBase::ToI420() { - rtc::scoped_refptr i420_buffer = - I420Buffer::Create(width(), height()); +scoped_refptr I410BufferBase::ToI420() { + scoped_refptr i420_buffer = I420Buffer::Create(width(), height()); libyuv::I410ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), i420_buffer->MutableDataY(), i420_buffer->StrideY(), i420_buffer->MutableDataU(), i420_buffer->StrideU(), @@ -245,7 +240,7 @@ rtc::scoped_refptr I410BufferBase::ToI420() { } // namespace -rtc::scoped_refptr WrapI420Buffer( +scoped_refptr WrapI420Buffer( int width, int height, const uint8_t* y_plane, @@ -255,13 +250,13 @@ rtc::scoped_refptr WrapI420Buffer( const uint8_t* v_plane, int v_stride, std::function no_longer_used) { - return rtc::scoped_refptr( - rtc::make_ref_counted>( + return scoped_refptr( + make_ref_counted>( width, height, y_plane, y_stride, u_plane, u_stride, v_plane, v_stride, no_longer_used)); } -rtc::scoped_refptr WrapI420ABuffer( +scoped_refptr WrapI420ABuffer( int width, int height, const uint8_t* y_plane, @@ -273,13 +268,13 @@ rtc::scoped_refptr WrapI420ABuffer( const uint8_t* a_plane, int a_stride, std::function no_longer_used) { - return rtc::scoped_refptr( - rtc::make_ref_counted>( + return scoped_refptr( + make_ref_counted>( width, height, y_plane, y_stride, u_plane, u_stride, v_plane, v_stride, a_plane, a_stride, no_longer_used)); } -rtc::scoped_refptr WrapI422Buffer( +scoped_refptr WrapI422Buffer( int width, int height, const uint8_t* y_plane, @@ -289,13 +284,13 @@ rtc::scoped_refptr WrapI422Buffer( const uint8_t* v_plane, int v_stride, std::function no_longer_used) { - return rtc::scoped_refptr( - rtc::make_ref_counted>( + return scoped_refptr( + make_ref_counted>( width, height, y_plane, y_stride, u_plane, u_stride, v_plane, v_stride, no_longer_used)); } -rtc::scoped_refptr WrapI444Buffer( +scoped_refptr WrapI444Buffer( int width, int height, const uint8_t* y_plane, @@ -305,13 +300,13 @@ rtc::scoped_refptr WrapI444Buffer( const uint8_t* v_plane, int v_stride, std::function no_longer_used) { - return rtc::scoped_refptr( - rtc::make_ref_counted>( + return scoped_refptr( + make_ref_counted>( width, height, y_plane, y_stride, u_plane, u_stride, v_plane, v_stride, no_longer_used)); } -rtc::scoped_refptr WrapYuvBuffer( +scoped_refptr WrapYuvBuffer( VideoFrameBuffer::Type type, int width, int height, @@ -337,7 +332,7 @@ rtc::scoped_refptr WrapYuvBuffer( } } -rtc::scoped_refptr WrapI010Buffer( +scoped_refptr WrapI010Buffer( int width, int height, const uint16_t* y_plane, @@ -347,13 +342,13 @@ rtc::scoped_refptr WrapI010Buffer( const uint16_t* v_plane, int v_stride, std::function no_longer_used) { - return rtc::scoped_refptr( - rtc::make_ref_counted>( + return scoped_refptr( + make_ref_counted>( width, height, y_plane, y_stride, u_plane, u_stride, v_plane, v_stride, no_longer_used)); } -rtc::scoped_refptr WrapI210Buffer( +scoped_refptr WrapI210Buffer( int width, int height, const uint16_t* y_plane, @@ -363,13 +358,13 @@ rtc::scoped_refptr WrapI210Buffer( const uint16_t* v_plane, int v_stride, std::function no_longer_used) { - return rtc::scoped_refptr( - rtc::make_ref_counted>( + return scoped_refptr( + make_ref_counted>( width, height, y_plane, y_stride, u_plane, u_stride, v_plane, v_stride, no_longer_used)); } -rtc::scoped_refptr WrapI410Buffer( +scoped_refptr WrapI410Buffer( int width, int height, const uint16_t* y_plane, @@ -379,8 +374,8 @@ rtc::scoped_refptr WrapI410Buffer( const uint16_t* v_plane, int v_stride, std::function no_longer_used) { - return rtc::scoped_refptr( - rtc::make_ref_counted>( + return scoped_refptr( + make_ref_counted>( width, height, y_plane, y_stride, u_plane, u_stride, v_plane, v_stride, no_longer_used)); } diff --git a/common_video/video_frame_buffer_pool.cc b/common_video/video_frame_buffer_pool.cc index c0215110fd..d7349e1924 100644 --- a/common_video/video_frame_buffer_pool.cc +++ b/common_video/video_frame_buffer_pool.cc @@ -18,38 +18,38 @@ namespace webrtc { namespace { -bool HasOneRef(const rtc::scoped_refptr& buffer) { - // Cast to rtc::RefCountedObject is safe because this function is only called +bool HasOneRef(const scoped_refptr& buffer) { + // Cast to RefCountedObject is safe because this function is only called // on locally created VideoFrameBuffers, which are either - // `rtc::RefCountedObject`, `rtc::RefCountedObject` or - // `rtc::RefCountedObject`. + // `RefCountedObject`, `RefCountedObject` or + // `RefCountedObject`. switch (buffer->type()) { case VideoFrameBuffer::Type::kI420: { - return static_cast*>(buffer.get()) + return static_cast*>(buffer.get()) ->HasOneRef(); } case VideoFrameBuffer::Type::kI444: { - return static_cast*>(buffer.get()) + return static_cast*>(buffer.get()) ->HasOneRef(); } case VideoFrameBuffer::Type::kI422: { - return static_cast*>(buffer.get()) + return static_cast*>(buffer.get()) ->HasOneRef(); } case VideoFrameBuffer::Type::kI010: { - return static_cast*>(buffer.get()) + return static_cast*>(buffer.get()) ->HasOneRef(); } case VideoFrameBuffer::Type::kI210: { - return static_cast*>(buffer.get()) + return static_cast*>(buffer.get()) ->HasOneRef(); } case VideoFrameBuffer::Type::kI410: { - return static_cast*>(buffer.get()) + return static_cast*>(buffer.get()) ->HasOneRef(); } case VideoFrameBuffer::Type::kNV12: { - return static_cast*>(buffer.get()) + return static_cast*>(buffer.get()) ->HasOneRef(); } default: @@ -80,7 +80,7 @@ void VideoFrameBufferPool::Release() { bool VideoFrameBufferPool::Resize(size_t max_number_of_buffers) { RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); size_t used_buffers_count = 0; - for (const rtc::scoped_refptr& buffer : buffers_) { + for (const scoped_refptr& buffer : buffers_) { // If the buffer is in use, the ref count will be >= 2, one from the list we // are looping over and one from the application. If the ref count is 1, // then the list we are looping over holds the only reference and it's safe @@ -107,29 +107,28 @@ bool VideoFrameBufferPool::Resize(size_t max_number_of_buffers) { return true; } -rtc::scoped_refptr VideoFrameBufferPool::CreateI420Buffer( - int width, - int height) { +scoped_refptr VideoFrameBufferPool::CreateI420Buffer(int width, + int height) { RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - rtc::scoped_refptr existing_buffer = + scoped_refptr existing_buffer = GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI420); if (existing_buffer) { // Cast is safe because the only way kI420 buffer is created is // in the same function below, where `RefCountedObject` is // created. - rtc::RefCountedObject* raw_buffer = - static_cast*>(existing_buffer.get()); + RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); // Creates a new scoped_refptr, which is also pointing to the same // RefCountedObject as buffer, increasing ref count. - return rtc::scoped_refptr(raw_buffer); + return scoped_refptr(raw_buffer); } if (buffers_.size() >= max_number_of_buffers_) return nullptr; // Allocate new buffer. - rtc::scoped_refptr buffer = - rtc::make_ref_counted(width, height); + scoped_refptr buffer = + make_ref_counted(width, height); if (zero_initialize_) buffer->InitializeData(); @@ -138,29 +137,28 @@ rtc::scoped_refptr VideoFrameBufferPool::CreateI420Buffer( return buffer; } -rtc::scoped_refptr VideoFrameBufferPool::CreateI444Buffer( - int width, - int height) { +scoped_refptr VideoFrameBufferPool::CreateI444Buffer(int width, + int height) { RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - rtc::scoped_refptr existing_buffer = + scoped_refptr existing_buffer = GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI444); if (existing_buffer) { // Cast is safe because the only way kI444 buffer is created is // in the same function below, where |RefCountedObject| // is created. - rtc::RefCountedObject* raw_buffer = - static_cast*>(existing_buffer.get()); + RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); // Creates a new scoped_refptr, which is also pointing to the same // RefCountedObject as buffer, increasing ref count. - return rtc::scoped_refptr(raw_buffer); + return scoped_refptr(raw_buffer); } if (buffers_.size() >= max_number_of_buffers_) return nullptr; // Allocate new buffer. - rtc::scoped_refptr buffer = - rtc::make_ref_counted(width, height); + scoped_refptr buffer = + make_ref_counted(width, height); if (zero_initialize_) buffer->InitializeData(); @@ -169,29 +167,28 @@ rtc::scoped_refptr VideoFrameBufferPool::CreateI444Buffer( return buffer; } -rtc::scoped_refptr VideoFrameBufferPool::CreateI422Buffer( - int width, - int height) { +scoped_refptr VideoFrameBufferPool::CreateI422Buffer(int width, + int height) { RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - rtc::scoped_refptr existing_buffer = + scoped_refptr existing_buffer = GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI422); if (existing_buffer) { // Cast is safe because the only way kI422 buffer is created is // in the same function below, where |RefCountedObject| // is created. - rtc::RefCountedObject* raw_buffer = - static_cast*>(existing_buffer.get()); + RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); // Creates a new scoped_refptr, which is also pointing to the same // RefCountedObject as buffer, increasing ref count. - return rtc::scoped_refptr(raw_buffer); + return scoped_refptr(raw_buffer); } if (buffers_.size() >= max_number_of_buffers_) return nullptr; // Allocate new buffer. - rtc::scoped_refptr buffer = - rtc::make_ref_counted(width, height); + scoped_refptr buffer = + make_ref_counted(width, height); if (zero_initialize_) buffer->InitializeData(); @@ -200,29 +197,28 @@ rtc::scoped_refptr VideoFrameBufferPool::CreateI422Buffer( return buffer; } -rtc::scoped_refptr VideoFrameBufferPool::CreateNV12Buffer( - int width, - int height) { +scoped_refptr VideoFrameBufferPool::CreateNV12Buffer(int width, + int height) { RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - rtc::scoped_refptr existing_buffer = + scoped_refptr existing_buffer = GetExistingBuffer(width, height, VideoFrameBuffer::Type::kNV12); if (existing_buffer) { // Cast is safe because the only way kI420 buffer is created is // in the same function below, where `RefCountedObject` is // created. - rtc::RefCountedObject* raw_buffer = - static_cast*>(existing_buffer.get()); + RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); // Creates a new scoped_refptr, which is also pointing to the same // RefCountedObject as buffer, increasing ref count. - return rtc::scoped_refptr(raw_buffer); + return scoped_refptr(raw_buffer); } if (buffers_.size() >= max_number_of_buffers_) return nullptr; // Allocate new buffer. - rtc::scoped_refptr buffer = - rtc::make_ref_counted(width, height); + scoped_refptr buffer = + make_ref_counted(width, height); if (zero_initialize_) buffer->InitializeData(); @@ -231,88 +227,85 @@ rtc::scoped_refptr VideoFrameBufferPool::CreateNV12Buffer( return buffer; } -rtc::scoped_refptr VideoFrameBufferPool::CreateI010Buffer( - int width, - int height) { +scoped_refptr VideoFrameBufferPool::CreateI010Buffer(int width, + int height) { RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - rtc::scoped_refptr existing_buffer = + scoped_refptr existing_buffer = GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI010); if (existing_buffer) { // Cast is safe because the only way kI010 buffer is created is // in the same function below, where |RefCountedObject| // is created. - rtc::RefCountedObject* raw_buffer = - static_cast*>(existing_buffer.get()); + RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); // Creates a new scoped_refptr, which is also pointing to the same // RefCountedObject as buffer, increasing ref count. - return rtc::scoped_refptr(raw_buffer); + return scoped_refptr(raw_buffer); } if (buffers_.size() >= max_number_of_buffers_) return nullptr; // Allocate new buffer. - rtc::scoped_refptr buffer = I010Buffer::Create(width, height); + scoped_refptr buffer = I010Buffer::Create(width, height); buffers_.push_back(buffer); return buffer; } -rtc::scoped_refptr VideoFrameBufferPool::CreateI210Buffer( - int width, - int height) { +scoped_refptr VideoFrameBufferPool::CreateI210Buffer(int width, + int height) { RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - rtc::scoped_refptr existing_buffer = + scoped_refptr existing_buffer = GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI210); if (existing_buffer) { // Cast is safe because the only way kI210 buffer is created is // in the same function below, where |RefCountedObject| // is created. - rtc::RefCountedObject* raw_buffer = - static_cast*>(existing_buffer.get()); + RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); // Creates a new scoped_refptr, which is also pointing to the same // RefCountedObject as buffer, increasing ref count. - return rtc::scoped_refptr(raw_buffer); + return scoped_refptr(raw_buffer); } if (buffers_.size() >= max_number_of_buffers_) return nullptr; // Allocate new buffer. - rtc::scoped_refptr buffer = I210Buffer::Create(width, height); + scoped_refptr buffer = I210Buffer::Create(width, height); buffers_.push_back(buffer); return buffer; } -rtc::scoped_refptr VideoFrameBufferPool::CreateI410Buffer( - int width, - int height) { +scoped_refptr VideoFrameBufferPool::CreateI410Buffer(int width, + int height) { RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - rtc::scoped_refptr existing_buffer = + scoped_refptr existing_buffer = GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI410); if (existing_buffer) { // Cast is safe because the only way kI410 buffer is created is // in the same function below, where |RefCountedObject| // is created. - rtc::RefCountedObject* raw_buffer = - static_cast*>(existing_buffer.get()); + RefCountedObject* raw_buffer = + static_cast*>(existing_buffer.get()); // Creates a new scoped_refptr, which is also pointing to the same // RefCountedObject as buffer, increasing ref count. - return rtc::scoped_refptr(raw_buffer); + return scoped_refptr(raw_buffer); } if (buffers_.size() >= max_number_of_buffers_) return nullptr; // Allocate new buffer. - rtc::scoped_refptr buffer = I410Buffer::Create(width, height); + scoped_refptr buffer = I410Buffer::Create(width, height); buffers_.push_back(buffer); return buffer; } -rtc::scoped_refptr VideoFrameBufferPool::GetExistingBuffer( +scoped_refptr VideoFrameBufferPool::GetExistingBuffer( int width, int height, VideoFrameBuffer::Type type) { @@ -327,7 +320,7 @@ rtc::scoped_refptr VideoFrameBufferPool::GetExistingBuffer( } } // Look for a free buffer. - for (const rtc::scoped_refptr& buffer : buffers_) { + for (const scoped_refptr& buffer : buffers_) { // If the buffer is in use, the ref count will be >= 2, one from the list we // are looping over and one from the application. If the ref count is 1, // then the list we are looping over holds the only reference and it's safe diff --git a/common_video/video_frame_buffer_pool_unittest.cc b/common_video/video_frame_buffer_pool_unittest.cc index f177468617..0d8a37bbd2 100644 --- a/common_video/video_frame_buffer_pool_unittest.cc +++ b/common_video/video_frame_buffer_pool_unittest.cc @@ -54,7 +54,7 @@ TEST(TestVideoFrameBufferPool, FailToReuseWrongSize) { } TEST(TestVideoFrameBufferPool, FrameValidAfterPoolDestruction) { - rtc::scoped_refptr buffer; + scoped_refptr buffer; { VideoFrameBufferPool pool; buffer = pool.CreateI420Buffer(16, 16); diff --git a/common_video/video_frame_unittest.cc b/common_video/video_frame_unittest.cc index ae8e54e7d3..c2611aa3ff 100644 --- a/common_video/video_frame_unittest.cc +++ b/common_video/video_frame_unittest.cc @@ -58,8 +58,8 @@ SubSampling SubSamplingForType(VideoFrameBuffer::Type type) { // Helper function to create a buffer and fill it with a gradient for // PlanarYuvBuffer based buffers. template -rtc::scoped_refptr CreateGradient(int width, int height) { - rtc::scoped_refptr buffer(T::Create(width, height)); +scoped_refptr CreateGradient(int width, int height) { + scoped_refptr buffer(T::Create(width, height)); // Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { @@ -81,9 +81,8 @@ rtc::scoped_refptr CreateGradient(int width, int height) { } // Helper function to create a buffer and fill it with a gradient. -rtc::scoped_refptr CreateNV12Gradient(int width, - int height) { - rtc::scoped_refptr buffer(NV12Buffer::Create(width, height)); +scoped_refptr CreateNV12Gradient(int width, int height) { + scoped_refptr buffer(NV12Buffer::Create(width, height)); // Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { @@ -211,8 +210,8 @@ TEST(TestVideoFrame, WidthHeightValues) { const int valid_value = 10; EXPECT_EQ(valid_value, frame.width()); EXPECT_EQ(valid_value, frame.height()); - frame.set_timestamp(123u); - EXPECT_EQ(123u, frame.timestamp()); + frame.set_rtp_timestamp(123u); + EXPECT_EQ(123u, frame.rtp_timestamp()); frame.set_ntp_time_ms(456); EXPECT_EQ(456, frame.ntp_time_ms()); EXPECT_EQ(789, frame.render_time_ms()); @@ -246,7 +245,7 @@ TEST(TestVideoFrame, ShallowCopy) { .set_rotation(kRotation) .set_timestamp_us(0) .build(); - frame1.set_timestamp(timestamp); + frame1.set_rtp_timestamp(timestamp); frame1.set_ntp_time_ms(ntp_time_ms); frame1.set_timestamp_us(timestamp_us); VideoFrame frame2(frame1); @@ -260,17 +259,17 @@ TEST(TestVideoFrame, ShallowCopy) { EXPECT_EQ(yuv1->DataU(), yuv2->DataU()); EXPECT_EQ(yuv1->DataV(), yuv2->DataV()); - EXPECT_EQ(frame2.timestamp(), frame1.timestamp()); + EXPECT_EQ(frame2.rtp_timestamp(), frame1.rtp_timestamp()); EXPECT_EQ(frame2.ntp_time_ms(), frame1.ntp_time_ms()); EXPECT_EQ(frame2.timestamp_us(), frame1.timestamp_us()); EXPECT_EQ(frame2.rotation(), frame1.rotation()); - frame2.set_timestamp(timestamp + 1); + frame2.set_rtp_timestamp(timestamp + 1); frame2.set_ntp_time_ms(ntp_time_ms + 1); frame2.set_timestamp_us(timestamp_us + 1); frame2.set_rotation(kVideoRotation_90); - EXPECT_NE(frame2.timestamp(), frame1.timestamp()); + EXPECT_NE(frame2.rtp_timestamp(), frame1.rtp_timestamp()); EXPECT_NE(frame2.ntp_time_ms(), frame1.ntp_time_ms()); EXPECT_NE(frame2.timestamp_us(), frame1.timestamp_us()); EXPECT_NE(frame2.rotation(), frame1.rotation()); @@ -281,14 +280,14 @@ TEST(TestVideoFrame, TextureInitialValues) { 640, 480, 100, 10, webrtc::kVideoRotation_0); EXPECT_EQ(640, frame.width()); EXPECT_EQ(480, frame.height()); - EXPECT_EQ(100u, frame.timestamp()); + EXPECT_EQ(100u, frame.rtp_timestamp()); EXPECT_EQ(10, frame.render_time_ms()); ASSERT_TRUE(frame.video_frame_buffer() != nullptr); EXPECT_TRUE(frame.video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative); - frame.set_timestamp(200); - EXPECT_EQ(200u, frame.timestamp()); + frame.set_rtp_timestamp(200); + EXPECT_EQ(200u, frame.rtp_timestamp()); frame.set_timestamp_us(20); EXPECT_EQ(20, frame.timestamp_us()); } @@ -298,7 +297,7 @@ class TestPlanarYuvBuffer : public ::testing::Test {}; TYPED_TEST_SUITE_P(TestPlanarYuvBuffer); template -rtc::scoped_refptr CreateAndFillBuffer() { +scoped_refptr CreateAndFillBuffer() { auto buf = T::Create(20, 10); memset(buf->MutableDataY(), 1, 200); @@ -319,43 +318,43 @@ rtc::scoped_refptr CreateAndFillBuffer() { } TYPED_TEST_P(TestPlanarYuvBuffer, Copy) { - rtc::scoped_refptr buf1 = CreateAndFillBuffer(); - rtc::scoped_refptr buf2 = TypeParam::Copy(*buf1); + scoped_refptr buf1 = CreateAndFillBuffer(); + scoped_refptr buf2 = TypeParam::Copy(*buf1); EXPECT_TRUE(test::FrameBufsEqual(buf1, buf2)); } TYPED_TEST_P(TestPlanarYuvBuffer, CropXCenter) { - rtc::scoped_refptr buf = CreateGradient(200, 100); + scoped_refptr buf = CreateGradient(200, 100); // Pure center cropping, no scaling. - rtc::scoped_refptr scaled_buffer = TypeParam::Create(100, 100); + scoped_refptr scaled_buffer = TypeParam::Create(100, 100); scaled_buffer->CropAndScaleFrom(*buf, 50, 0, 100, 100); CheckCrop(*scaled_buffer, 0.25, 0.0, 0.5, 1.0); } TYPED_TEST_P(TestPlanarYuvBuffer, CropXNotCenter) { - rtc::scoped_refptr buf = CreateGradient(200, 100); + scoped_refptr buf = CreateGradient(200, 100); // Non-center cropping, no scaling. - rtc::scoped_refptr scaled_buffer = TypeParam::Create(100, 100); + scoped_refptr scaled_buffer = TypeParam::Create(100, 100); scaled_buffer->CropAndScaleFrom(*buf, 25, 0, 100, 100); CheckCrop(*scaled_buffer, 0.125, 0.0, 0.5, 1.0); } TYPED_TEST_P(TestPlanarYuvBuffer, CropYCenter) { - rtc::scoped_refptr buf = CreateGradient(100, 200); + scoped_refptr buf = CreateGradient(100, 200); // Pure center cropping, no scaling. - rtc::scoped_refptr scaled_buffer = TypeParam::Create(100, 100); + scoped_refptr scaled_buffer = TypeParam::Create(100, 100); scaled_buffer->CropAndScaleFrom(*buf, 0, 50, 100, 100); CheckCrop(*scaled_buffer, 0.0, 0.25, 1.0, 0.5); } TYPED_TEST_P(TestPlanarYuvBuffer, CropYNotCenter) { - rtc::scoped_refptr buf = CreateGradient(100, 200); + scoped_refptr buf = CreateGradient(100, 200); // Pure center cropping, no scaling. - rtc::scoped_refptr scaled_buffer = TypeParam::Create(100, 100); + scoped_refptr scaled_buffer = TypeParam::Create(100, 100); scaled_buffer->CropAndScaleFrom(*buf, 0, 25, 100, 100); CheckCrop(*scaled_buffer, 0.0, 0.125, 1.0, 0.5); } @@ -365,14 +364,14 @@ TYPED_TEST_P(TestPlanarYuvBuffer, CropAndScale16x9) { const int buffer_height = 480; const int crop_width = 320; const int crop_height = 180; - rtc::scoped_refptr buf = CreateGradient(640, 480); + scoped_refptr buf = CreateGradient(640, 480); // Pure center cropping, no scaling. const int out_width = std::min(buffer_width, crop_width * buffer_height / crop_height); const int out_height = std::min(buffer_height, crop_height * buffer_width / crop_width); - rtc::scoped_refptr scaled_buffer = + scoped_refptr scaled_buffer = TypeParam::Create(out_width, out_height); scaled_buffer->CropAndScaleFrom(*buf, (buffer_width - out_width) / 2, (buffer_height - out_height) / 2, out_width, @@ -401,10 +400,10 @@ class TestPlanarYuvBufferScale : public ::testing::Test {}; TYPED_TEST_SUITE_P(TestPlanarYuvBufferScale); TYPED_TEST_P(TestPlanarYuvBufferScale, Scale) { - rtc::scoped_refptr buf = CreateGradient(200, 100); + scoped_refptr buf = CreateGradient(200, 100); // Pure scaling, no cropping. - rtc::scoped_refptr scaled_buffer = TypeParam::Create(150, 75); + scoped_refptr scaled_buffer = TypeParam::Create(150, 75); scaled_buffer->ScaleFrom(*buf); CheckCrop(*scaled_buffer, 0.0, 0.0, 1.0, 1.0); } @@ -427,8 +426,8 @@ TYPED_TEST_SUITE_P(TestPlanarYuvBufferRotate); TYPED_TEST_P(TestPlanarYuvBufferRotate, Rotates) { for (const webrtc::VideoRotation& rotation : this->RotationParams) { - rtc::scoped_refptr buffer = CreateGradient(640, 480); - rtc::scoped_refptr rotated_buffer = + scoped_refptr buffer = CreateGradient(640, 480); + scoped_refptr rotated_buffer = TypeParam::Rotate(*buffer, rotation); CheckRotate(640, 480, rotation, *rotated_buffer); } @@ -452,10 +451,10 @@ TEST(TestNV12Buffer, CropAndScale) { const int kCropRight = 0; const int kCropBottom = 30; - rtc::scoped_refptr buf = + scoped_refptr buf = CreateNV12Gradient(kSourceWidth, kSourceHeight); - rtc::scoped_refptr scaled_buffer = buf->CropAndScale( + scoped_refptr scaled_buffer = buf->CropAndScale( kCropLeft, kCropTop, kSourceWidth - kCropLeft - kCropRight, kSourceHeight - kCropTop - kCropBottom, kScaledWidth, kScaledHeight); diff --git a/docs/bug-reporting.md b/docs/bug-reporting.md index 7948cda8b7..75b877289e 100644 --- a/docs/bug-reporting.md +++ b/docs/bug-reporting.md @@ -115,7 +115,7 @@ Anyone with a [Google account][1] can file bugs in the Chrome and WebRTC tracker ### Filing a Security Bug The WebRTC team takes security very seriously. If you find a vulnerability in -WebRTC, please file a [Chromium security bug][ChromeSecurity], even if the bug +WebRTC, please file a [Chromium security bug][ChromeSecurityBug], even if the bug only affects native WebRTC code and not Chromium. A history of fixed Chromium security bugs is best found via [security notes in @@ -161,13 +161,14 @@ page. [1]: https://accounts.google.com/ [2]: http://www.chromium.org/for-testers/bug-reporting-guidelines/reporting-crash-bug [3]: https://code.google.com/p/chromium/issues/entry?template=Audio/Video%20Issue -[4]: https://bugs.chromium.org/p/webrtc/issues/entry +[4]: https://issues.webrtc.org/issues/new?component=1363538&template=1986396 [5]: native-code/logging.md [ChromeSecurity]: https://www.chromium.org/Home/chromium-security/reporting-security-bugs [DiscussWebRTC]: https://groups.google.com/group/discuss-webrtc [ChromeSecurityBlog]: https://chromereleases.googleblog.com/search/label/Stable%20updates -[ChromeBugList]: https://bugs.chromium.org/p/chromium/issues/list?can=1&q=Type%3DBug-Security+component%3ABlink%3EWebRTC+-status%3ADuplicate%2CWontfix&sort=-closed&colspec=ID+Pri+M+Component+Status+Owner+Summary+OS+Closed&x=m&y=releaseblock&cells=ids -[WebRtcBugList]: https://bugs.chromium.org/p/webrtc/issues/list?q=Type%3DBug-Security&can=1 +[ChromeSecurityBug]: https://issues.chromium.org/issues/new?component=1363614&template=1922342 +[ChromeBugList]: https://issues.chromium.org/issues?q=type:vulnerability +[WebRtcBugList]: https://issues.chromium.org/issues?q=type:vulnerability%20componentid:1456096 [ChromeSecurity]: https://www.chromium.org/Home/chromium-security [SeverityGuidelines]: https://chromium.googlesource.com/chromium/src/+/main/docs/security/severity-guidelines.md [SecurityFaq]: https://chromium.googlesource.com/chromium/src/+/main/docs/security/faq.md diff --git a/docs/faq.md b/docs/faq.md index 67535857d3..3c48a399e4 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -56,15 +56,6 @@ millions of VoIP endpoints. This codec is included as part of the WebRTC project. -### What is the iLBC audio codec? - -iLBC is a free narrowband voice codec that was developed by Global IP -Solutions, and is used in many Voice over IP (VoIP) and streaming audio -applications. In 2004, the final IETF RFC versions of the iLBC codec -specification and the iLBC RTP Profile draft became available. This codec is -included as part of the WebRTC project. - - ### What is the VP8 video codec? VP8 is a highly-efficient video compression technology developed by the WebM Project. It is the video codec included with WebRTC. @@ -173,7 +164,7 @@ stem from the GIPS acquisition. ### What codecs are supported in WebRTC? -The currently supported voice codecs are G.711, G.722, iLBC, and iSAC, and VP8 +The currently supported voice codecs are G.711, G.722, and Opus, while VP8 is the supported video codec. The list of supported codecs may change in the future. diff --git a/docs/monorail-bug-tracker-migration/monorail-issue-tracker-map.csv b/docs/monorail-bug-tracker-migration/monorail-issue-tracker-map.csv new file mode 100644 index 0000000000..3713a418d4 --- /dev/null +++ b/docs/monorail-bug-tracker-migration/monorail-issue-tracker-map.csv @@ -0,0 +1,15789 @@ +monorail_uri,issue_tracker_url +https://crbug.com/webrtc/9501,https://issues.webrtc.org/issues/40009167 +https://crbug.com/webrtc/10428,https://issues.webrtc.org/issues/40096319 +https://crbug.com/webrtc/10922,https://issues.webrtc.org/issues/40096320 +https://crbug.com/webrtc/11290,https://issues.webrtc.org/issues/40096321 +https://crbug.com/webrtc/12497,https://issues.webrtc.org/issues/40096322 +https://crbug.com/webrtc/14175,https://issues.webrtc.org/issues/40096323 +https://crbug.com/webrtc/14419,https://issues.webrtc.org/issues/40096324 +https://crbug.com/webrtc/14578,https://issues.webrtc.org/issues/40096325 +https://crbug.com/webrtc/14605,https://issues.webrtc.org/issues/40096326 +https://crbug.com/webrtc/4624,https://issues.webrtc.org/issues/40096327 +https://crbug.com/webrtc/5305,https://issues.webrtc.org/issues/40096328 +https://crbug.com/webrtc/10047,https://issues.webrtc.org/issues/40644296 +https://crbug.com/webrtc/10083,https://issues.webrtc.org/issues/40644297 +https://crbug.com/webrtc/10165,https://issues.webrtc.org/issues/40644298 +https://crbug.com/webrtc/10188,https://issues.webrtc.org/issues/40644299 +https://crbug.com/webrtc/10261,https://issues.webrtc.org/issues/40644300 +https://crbug.com/webrtc/10277,https://issues.webrtc.org/issues/40644301 +https://crbug.com/webrtc/10352,https://issues.webrtc.org/issues/40644302 +https://crbug.com/webrtc/10358,https://issues.webrtc.org/issues/40644303 +https://crbug.com/webrtc/10399,https://issues.webrtc.org/issues/40644304 +https://crbug.com/webrtc/10419,https://issues.webrtc.org/issues/40644305 +https://crbug.com/webrtc/10421,https://issues.webrtc.org/issues/40644306 +https://crbug.com/webrtc/10422,https://issues.webrtc.org/issues/40644307 +https://crbug.com/webrtc/10423,https://issues.webrtc.org/issues/40644308 +https://crbug.com/webrtc/10507,https://issues.webrtc.org/issues/40644309 +https://crbug.com/webrtc/1053,https://issues.webrtc.org/issues/40644310 +https://crbug.com/webrtc/10753,https://issues.webrtc.org/issues/40644311 +https://crbug.com/webrtc/10849,https://issues.webrtc.org/issues/40644312 +https://crbug.com/webrtc/10944,https://issues.webrtc.org/issues/40644313 +https://crbug.com/webrtc/10967,https://issues.webrtc.org/issues/40644314 +https://crbug.com/webrtc/110,https://issues.webrtc.org/issues/40644315 +https://crbug.com/webrtc/11008,https://issues.webrtc.org/issues/40644316 +https://crbug.com/webrtc/11011,https://issues.webrtc.org/issues/40644317 +https://crbug.com/webrtc/11019,https://issues.webrtc.org/issues/40644318 +https://crbug.com/webrtc/11031,https://issues.webrtc.org/issues/40644319 +https://crbug.com/webrtc/11057,https://issues.webrtc.org/issues/40644320 +https://crbug.com/webrtc/11084,https://issues.webrtc.org/issues/40644321 +https://crbug.com/webrtc/111,https://issues.webrtc.org/issues/40644322 +https://crbug.com/webrtc/11127,https://issues.webrtc.org/issues/40644323 +https://crbug.com/webrtc/11129,https://issues.webrtc.org/issues/40644324 +https://crbug.com/webrtc/11170,https://issues.webrtc.org/issues/40644325 +https://crbug.com/webrtc/112,https://issues.webrtc.org/issues/40644326 +https://crbug.com/webrtc/11249,https://issues.webrtc.org/issues/40644327 +https://crbug.com/webrtc/11306,https://issues.webrtc.org/issues/40644328 +https://crbug.com/webrtc/11404,https://issues.webrtc.org/issues/40644329 +https://crbug.com/webrtc/11485,https://issues.webrtc.org/issues/40644330 +https://crbug.com/webrtc/11501,https://issues.webrtc.org/issues/40644331 +https://crbug.com/webrtc/11580,https://issues.webrtc.org/issues/40644332 +https://crbug.com/webrtc/11628,https://issues.webrtc.org/issues/40644333 +https://crbug.com/webrtc/11679,https://issues.webrtc.org/issues/40644334 +https://crbug.com/webrtc/11720,https://issues.webrtc.org/issues/40644335 +https://crbug.com/webrtc/11748,https://issues.webrtc.org/issues/40644336 +https://crbug.com/webrtc/11763,https://issues.webrtc.org/issues/40644337 +https://crbug.com/webrtc/11819,https://issues.webrtc.org/issues/40644338 +https://crbug.com/webrtc/11908,https://issues.webrtc.org/issues/40644339 +https://crbug.com/webrtc/11945,https://issues.webrtc.org/issues/40644340 +https://crbug.com/webrtc/11977,https://issues.webrtc.org/issues/40644341 +https://crbug.com/webrtc/12023,https://issues.webrtc.org/issues/40644342 +https://crbug.com/webrtc/12066,https://issues.webrtc.org/issues/40644343 +https://crbug.com/webrtc/12072,https://issues.webrtc.org/issues/40644344 +https://crbug.com/webrtc/12101,https://issues.webrtc.org/issues/40644345 +https://crbug.com/webrtc/12197,https://issues.webrtc.org/issues/40644346 +https://crbug.com/webrtc/1220,https://issues.webrtc.org/issues/40644347 +https://crbug.com/webrtc/12258,https://issues.webrtc.org/issues/40644348 +https://crbug.com/webrtc/12299,https://issues.webrtc.org/issues/40644349 +https://crbug.com/webrtc/12328,https://issues.webrtc.org/issues/40644350 +https://crbug.com/webrtc/12404,https://issues.webrtc.org/issues/40644351 +https://crbug.com/webrtc/12451,https://issues.webrtc.org/issues/40644352 +https://crbug.com/webrtc/12461,https://issues.webrtc.org/issues/40644353 +https://crbug.com/webrtc/12469,https://issues.webrtc.org/issues/40644354 +https://crbug.com/webrtc/12515,https://issues.webrtc.org/issues/40644355 +https://crbug.com/webrtc/12598,https://issues.webrtc.org/issues/40644356 +https://crbug.com/webrtc/12603,https://issues.webrtc.org/issues/40644357 +https://crbug.com/webrtc/12616,https://issues.webrtc.org/issues/40644358 +https://crbug.com/webrtc/12741,https://issues.webrtc.org/issues/40644359 +https://crbug.com/webrtc/12796,https://issues.webrtc.org/issues/40644360 +https://crbug.com/webrtc/12924,https://issues.webrtc.org/issues/40644361 +https://crbug.com/webrtc/12950,https://issues.webrtc.org/issues/40644362 +https://crbug.com/webrtc/1316,https://issues.webrtc.org/issues/40644363 +https://crbug.com/webrtc/13197,https://issues.webrtc.org/issues/40644364 +https://crbug.com/webrtc/13220,https://issues.webrtc.org/issues/40644365 +https://crbug.com/webrtc/13275,https://issues.webrtc.org/issues/40644366 +https://crbug.com/webrtc/1331,https://issues.webrtc.org/issues/40644367 +https://crbug.com/webrtc/13315,https://issues.webrtc.org/issues/40644368 +https://crbug.com/webrtc/13318,https://issues.webrtc.org/issues/40644369 +https://crbug.com/webrtc/13458,https://issues.webrtc.org/issues/40644370 +https://crbug.com/webrtc/13560,https://issues.webrtc.org/issues/40644371 +https://crbug.com/webrtc/13583,https://issues.webrtc.org/issues/40644372 +https://crbug.com/webrtc/13604,https://issues.webrtc.org/issues/40644373 +https://crbug.com/webrtc/13824,https://issues.webrtc.org/issues/40644374 +https://crbug.com/webrtc/13930,https://issues.webrtc.org/issues/40644375 +https://crbug.com/webrtc/13973,https://issues.webrtc.org/issues/40644376 +https://crbug.com/webrtc/14005,https://issues.webrtc.org/issues/40644377 +https://crbug.com/webrtc/14050,https://issues.webrtc.org/issues/40644378 +https://crbug.com/webrtc/14061,https://issues.webrtc.org/issues/40644379 +https://crbug.com/webrtc/14113,https://issues.webrtc.org/issues/40644380 +https://crbug.com/webrtc/1413,https://issues.webrtc.org/issues/40644381 +https://crbug.com/webrtc/14152,https://issues.webrtc.org/issues/40644382 +https://crbug.com/webrtc/14161,https://issues.webrtc.org/issues/40644383 +https://crbug.com/webrtc/14195,https://issues.webrtc.org/issues/40644384 +https://crbug.com/webrtc/14363,https://issues.webrtc.org/issues/40644385 +https://crbug.com/webrtc/14483,https://issues.webrtc.org/issues/40644386 +https://crbug.com/webrtc/14652,https://issues.webrtc.org/issues/40644387 +https://crbug.com/webrtc/14653,https://issues.webrtc.org/issues/40644388 +https://crbug.com/webrtc/14671,https://issues.webrtc.org/issues/40644389 +https://crbug.com/webrtc/14744,https://issues.webrtc.org/issues/40644390 +https://crbug.com/webrtc/14750,https://issues.webrtc.org/issues/40644391 +https://crbug.com/webrtc/14818,https://issues.webrtc.org/issues/40644392 +https://crbug.com/webrtc/14929,https://issues.webrtc.org/issues/40644393 +https://crbug.com/webrtc/14946,https://issues.webrtc.org/issues/40644394 +https://crbug.com/webrtc/1498,https://issues.webrtc.org/issues/40644395 +https://crbug.com/webrtc/15065,https://issues.webrtc.org/issues/40644396 +https://crbug.com/webrtc/15147,https://issues.webrtc.org/issues/40644397 +https://crbug.com/webrtc/15256,https://issues.webrtc.org/issues/40644398 +https://crbug.com/webrtc/15396,https://issues.webrtc.org/issues/40644399 +https://crbug.com/webrtc/15511,https://issues.webrtc.org/issues/40644400 +https://crbug.com/webrtc/1764,https://issues.webrtc.org/issues/40644401 +https://crbug.com/webrtc/1971,https://issues.webrtc.org/issues/40644402 +https://crbug.com/webrtc/2031,https://issues.webrtc.org/issues/40644403 +https://crbug.com/webrtc/2131,https://issues.webrtc.org/issues/40644404 +https://crbug.com/webrtc/2154,https://issues.webrtc.org/issues/40644405 +https://crbug.com/webrtc/2597,https://issues.webrtc.org/issues/40644406 +https://crbug.com/webrtc/2598,https://issues.webrtc.org/issues/40644407 +https://crbug.com/webrtc/2644,https://issues.webrtc.org/issues/40644408 +https://crbug.com/webrtc/2999,https://issues.webrtc.org/issues/40644409 +https://crbug.com/webrtc/3095,https://issues.webrtc.org/issues/40644410 +https://crbug.com/webrtc/3241,https://issues.webrtc.org/issues/40644411 +https://crbug.com/webrtc/3378,https://issues.webrtc.org/issues/40644412 +https://crbug.com/webrtc/34,https://issues.webrtc.org/issues/40644413 +https://crbug.com/webrtc/3402,https://issues.webrtc.org/issues/40644414 +https://crbug.com/webrtc/3534,https://issues.webrtc.org/issues/40644415 +https://crbug.com/webrtc/3787,https://issues.webrtc.org/issues/40644416 +https://crbug.com/webrtc/3817,https://issues.webrtc.org/issues/40644417 +https://crbug.com/webrtc/3879,https://issues.webrtc.org/issues/40644418 +https://crbug.com/webrtc/3884,https://issues.webrtc.org/issues/40644419 +https://crbug.com/webrtc/3994,https://issues.webrtc.org/issues/40644420 +https://crbug.com/webrtc/4117,https://issues.webrtc.org/issues/40644421 +https://crbug.com/webrtc/4137,https://issues.webrtc.org/issues/40644422 +https://crbug.com/webrtc/4138,https://issues.webrtc.org/issues/40644423 +https://crbug.com/webrtc/4144,https://issues.webrtc.org/issues/40644424 +https://crbug.com/webrtc/4183,https://issues.webrtc.org/issues/40644425 +https://crbug.com/webrtc/4233,https://issues.webrtc.org/issues/40644426 +https://crbug.com/webrtc/4256,https://issues.webrtc.org/issues/40644427 +https://crbug.com/webrtc/4423,https://issues.webrtc.org/issues/40644428 +https://crbug.com/webrtc/4521,https://issues.webrtc.org/issues/40644429 +https://crbug.com/webrtc/4534,https://issues.webrtc.org/issues/40644430 +https://crbug.com/webrtc/4568,https://issues.webrtc.org/issues/40644431 +https://crbug.com/webrtc/4612,https://issues.webrtc.org/issues/40644432 +https://crbug.com/webrtc/4676,https://issues.webrtc.org/issues/40644433 +https://crbug.com/webrtc/4678,https://issues.webrtc.org/issues/40644434 +https://crbug.com/webrtc/4703,https://issues.webrtc.org/issues/40644435 +https://crbug.com/webrtc/4707,https://issues.webrtc.org/issues/40644436 +https://crbug.com/webrtc/4745,https://issues.webrtc.org/issues/40644437 +https://crbug.com/webrtc/4755,https://issues.webrtc.org/issues/40644438 +https://crbug.com/webrtc/4799,https://issues.webrtc.org/issues/40644439 +https://crbug.com/webrtc/4883,https://issues.webrtc.org/issues/40644440 +https://crbug.com/webrtc/4935,https://issues.webrtc.org/issues/40644441 +https://crbug.com/webrtc/496,https://issues.webrtc.org/issues/40644442 +https://crbug.com/webrtc/4986,https://issues.webrtc.org/issues/40644443 +https://crbug.com/webrtc/5006,https://issues.webrtc.org/issues/40644444 +https://crbug.com/webrtc/5121,https://issues.webrtc.org/issues/40644445 +https://crbug.com/webrtc/5124,https://issues.webrtc.org/issues/40644446 +https://crbug.com/webrtc/5208,https://issues.webrtc.org/issues/40644447 +https://crbug.com/webrtc/5361,https://issues.webrtc.org/issues/40644448 +https://crbug.com/webrtc/5506,https://issues.webrtc.org/issues/40644449 +https://crbug.com/webrtc/5596,https://issues.webrtc.org/issues/40644450 +https://crbug.com/webrtc/5628,https://issues.webrtc.org/issues/40644451 +https://crbug.com/webrtc/5708,https://issues.webrtc.org/issues/40644452 +https://crbug.com/webrtc/5713,https://issues.webrtc.org/issues/40644453 +https://crbug.com/webrtc/5757,https://issues.webrtc.org/issues/40644454 +https://crbug.com/webrtc/5763,https://issues.webrtc.org/issues/40644455 +https://crbug.com/webrtc/5772,https://issues.webrtc.org/issues/40644456 +https://crbug.com/webrtc/5774,https://issues.webrtc.org/issues/40644457 +https://crbug.com/webrtc/5777,https://issues.webrtc.org/issues/40644458 +https://crbug.com/webrtc/5778,https://issues.webrtc.org/issues/40644459 +https://crbug.com/webrtc/5785,https://issues.webrtc.org/issues/40644460 +https://crbug.com/webrtc/5793,https://issues.webrtc.org/issues/40644461 +https://crbug.com/webrtc/5794,https://issues.webrtc.org/issues/40644462 +https://crbug.com/webrtc/5820,https://issues.webrtc.org/issues/40644463 +https://crbug.com/webrtc/5854,https://issues.webrtc.org/issues/40644464 +https://crbug.com/webrtc/5905,https://issues.webrtc.org/issues/40644465 +https://crbug.com/webrtc/5919,https://issues.webrtc.org/issues/40644466 +https://crbug.com/webrtc/5967,https://issues.webrtc.org/issues/40644467 +https://crbug.com/webrtc/5985,https://issues.webrtc.org/issues/40644468 +https://crbug.com/webrtc/6018,https://issues.webrtc.org/issues/40644469 +https://crbug.com/webrtc/6027,https://issues.webrtc.org/issues/40644470 +https://crbug.com/webrtc/6028,https://issues.webrtc.org/issues/40644471 +https://crbug.com/webrtc/6029,https://issues.webrtc.org/issues/40644472 +https://crbug.com/webrtc/6317,https://issues.webrtc.org/issues/40644473 +https://crbug.com/webrtc/6356,https://issues.webrtc.org/issues/40644474 +https://crbug.com/webrtc/6437,https://issues.webrtc.org/issues/40644475 +https://crbug.com/webrtc/6450,https://issues.webrtc.org/issues/40644476 +https://crbug.com/webrtc/6484,https://issues.webrtc.org/issues/40644477 +https://crbug.com/webrtc/6541,https://issues.webrtc.org/issues/40644478 +https://crbug.com/webrtc/6583,https://issues.webrtc.org/issues/40644479 +https://crbug.com/webrtc/6625,https://issues.webrtc.org/issues/40644480 +https://crbug.com/webrtc/6627,https://issues.webrtc.org/issues/40644481 +https://crbug.com/webrtc/6636,https://issues.webrtc.org/issues/40644482 +https://crbug.com/webrtc/6641,https://issues.webrtc.org/issues/40644483 +https://crbug.com/webrtc/6657,https://issues.webrtc.org/issues/40644484 +https://crbug.com/webrtc/6754,https://issues.webrtc.org/issues/40644485 +https://crbug.com/webrtc/6755,https://issues.webrtc.org/issues/40644486 +https://crbug.com/webrtc/6756,https://issues.webrtc.org/issues/40644487 +https://crbug.com/webrtc/6757,https://issues.webrtc.org/issues/40644488 +https://crbug.com/webrtc/6758,https://issues.webrtc.org/issues/40644489 +https://crbug.com/webrtc/6768,https://issues.webrtc.org/issues/40644490 +https://crbug.com/webrtc/6804,https://issues.webrtc.org/issues/40644491 +https://crbug.com/webrtc/6826,https://issues.webrtc.org/issues/40644492 +https://crbug.com/webrtc/6871,https://issues.webrtc.org/issues/40644493 +https://crbug.com/webrtc/6872,https://issues.webrtc.org/issues/40644494 +https://crbug.com/webrtc/6875,https://issues.webrtc.org/issues/40644495 +https://crbug.com/webrtc/6920,https://issues.webrtc.org/issues/40644496 +https://crbug.com/webrtc/6934,https://issues.webrtc.org/issues/40644497 +https://crbug.com/webrtc/6986,https://issues.webrtc.org/issues/40644498 +https://crbug.com/webrtc/7024,https://issues.webrtc.org/issues/40644499 +https://crbug.com/webrtc/7030,https://issues.webrtc.org/issues/40644500 +https://crbug.com/webrtc/7038,https://issues.webrtc.org/issues/40644501 +https://crbug.com/webrtc/7060,https://issues.webrtc.org/issues/40644502 +https://crbug.com/webrtc/7061,https://issues.webrtc.org/issues/40644503 +https://crbug.com/webrtc/7062,https://issues.webrtc.org/issues/40644504 +https://crbug.com/webrtc/7063,https://issues.webrtc.org/issues/40644505 +https://crbug.com/webrtc/7064,https://issues.webrtc.org/issues/40644506 +https://crbug.com/webrtc/7065,https://issues.webrtc.org/issues/40644507 +https://crbug.com/webrtc/7066,https://issues.webrtc.org/issues/40644508 +https://crbug.com/webrtc/7067,https://issues.webrtc.org/issues/40644509 +https://crbug.com/webrtc/7161,https://issues.webrtc.org/issues/40644510 +https://crbug.com/webrtc/7229,https://issues.webrtc.org/issues/40644511 +https://crbug.com/webrtc/7307,https://issues.webrtc.org/issues/40644512 +https://crbug.com/webrtc/7314,https://issues.webrtc.org/issues/40644513 +https://crbug.com/webrtc/7355,https://issues.webrtc.org/issues/40644514 +https://crbug.com/webrtc/7361,https://issues.webrtc.org/issues/40644515 +https://crbug.com/webrtc/7429,https://issues.webrtc.org/issues/40644516 +https://crbug.com/webrtc/7553,https://issues.webrtc.org/issues/40644517 +https://crbug.com/webrtc/7565,https://issues.webrtc.org/issues/40644518 +https://crbug.com/webrtc/7567,https://issues.webrtc.org/issues/40644519 +https://crbug.com/webrtc/7600,https://issues.webrtc.org/issues/40644520 +https://crbug.com/webrtc/7602,https://issues.webrtc.org/issues/40644521 +https://crbug.com/webrtc/7634,https://issues.webrtc.org/issues/40644522 +https://crbug.com/webrtc/7700,https://issues.webrtc.org/issues/40644523 +https://crbug.com/webrtc/7774,https://issues.webrtc.org/issues/40644524 +https://crbug.com/webrtc/7815,https://issues.webrtc.org/issues/40644525 +https://crbug.com/webrtc/7844,https://issues.webrtc.org/issues/40644526 +https://crbug.com/webrtc/7855,https://issues.webrtc.org/issues/40644527 +https://crbug.com/webrtc/7885,https://issues.webrtc.org/issues/40644528 +https://crbug.com/webrtc/7932,https://issues.webrtc.org/issues/40644529 +https://crbug.com/webrtc/7933,https://issues.webrtc.org/issues/40644530 +https://crbug.com/webrtc/7940,https://issues.webrtc.org/issues/40644531 +https://crbug.com/webrtc/7978,https://issues.webrtc.org/issues/40644532 +https://crbug.com/webrtc/7979,https://issues.webrtc.org/issues/40644533 +https://crbug.com/webrtc/8030,https://issues.webrtc.org/issues/40644534 +https://crbug.com/webrtc/8099,https://issues.webrtc.org/issues/40644535 +https://crbug.com/webrtc/8107,https://issues.webrtc.org/issues/40644536 +https://crbug.com/webrtc/8109,https://issues.webrtc.org/issues/40644537 +https://crbug.com/webrtc/8221,https://issues.webrtc.org/issues/40644538 +https://crbug.com/webrtc/8315,https://issues.webrtc.org/issues/40644539 +https://crbug.com/webrtc/8356,https://issues.webrtc.org/issues/40644540 +https://crbug.com/webrtc/8365,https://issues.webrtc.org/issues/40644541 +https://crbug.com/webrtc/8366,https://issues.webrtc.org/issues/40644542 +https://crbug.com/webrtc/8377,https://issues.webrtc.org/issues/40644543 +https://crbug.com/webrtc/8425,https://issues.webrtc.org/issues/40644544 +https://crbug.com/webrtc/8473,https://issues.webrtc.org/issues/40644545 +https://crbug.com/webrtc/8506,https://issues.webrtc.org/issues/40644546 +https://crbug.com/webrtc/8530,https://issues.webrtc.org/issues/40644547 +https://crbug.com/webrtc/857,https://issues.webrtc.org/issues/40644548 +https://crbug.com/webrtc/8571,https://issues.webrtc.org/issues/40644549 +https://crbug.com/webrtc/8612,https://issues.webrtc.org/issues/40644550 +https://crbug.com/webrtc/8664,https://issues.webrtc.org/issues/40644551 +https://crbug.com/webrtc/8730,https://issues.webrtc.org/issues/40644552 +https://crbug.com/webrtc/8734,https://issues.webrtc.org/issues/40644553 +https://crbug.com/webrtc/8809,https://issues.webrtc.org/issues/40644554 +https://crbug.com/webrtc/8895,https://issues.webrtc.org/issues/40644555 +https://crbug.com/webrtc/8908,https://issues.webrtc.org/issues/40644556 +https://crbug.com/webrtc/9017,https://issues.webrtc.org/issues/40644557 +https://crbug.com/webrtc/9037,https://issues.webrtc.org/issues/40644558 +https://crbug.com/webrtc/9071,https://issues.webrtc.org/issues/40644559 +https://crbug.com/webrtc/912,https://issues.webrtc.org/issues/40644560 +https://crbug.com/webrtc/9141,https://issues.webrtc.org/issues/40644561 +https://crbug.com/webrtc/9143,https://issues.webrtc.org/issues/40644562 +https://crbug.com/webrtc/9201,https://issues.webrtc.org/issues/40644563 +https://crbug.com/webrtc/9205,https://issues.webrtc.org/issues/40644564 +https://crbug.com/webrtc/9218,https://issues.webrtc.org/issues/40644565 +https://crbug.com/webrtc/9356,https://issues.webrtc.org/issues/40644566 +https://crbug.com/webrtc/9394,https://issues.webrtc.org/issues/40644567 +https://crbug.com/webrtc/9401,https://issues.webrtc.org/issues/40644568 +https://crbug.com/webrtc/9410,https://issues.webrtc.org/issues/40644569 +https://crbug.com/webrtc/9419,https://issues.webrtc.org/issues/40644570 +https://crbug.com/webrtc/9470,https://issues.webrtc.org/issues/40644571 +https://crbug.com/webrtc/9522,https://issues.webrtc.org/issues/40644572 +https://crbug.com/webrtc/9540,https://issues.webrtc.org/issues/40644573 +https://crbug.com/webrtc/9545,https://issues.webrtc.org/issues/40644574 +https://crbug.com/webrtc/9553,https://issues.webrtc.org/issues/40644575 +https://crbug.com/webrtc/9706,https://issues.webrtc.org/issues/40644576 +https://crbug.com/webrtc/9725,https://issues.webrtc.org/issues/40644577 +https://crbug.com/webrtc/9740,https://issues.webrtc.org/issues/40644578 +https://crbug.com/webrtc/9770,https://issues.webrtc.org/issues/40644579 +https://crbug.com/webrtc/9777,https://issues.webrtc.org/issues/40644580 +https://crbug.com/webrtc/9861,https://issues.webrtc.org/issues/40644581 +https://crbug.com/webrtc/9878,https://issues.webrtc.org/issues/40644582 +https://crbug.com/webrtc/9936,https://issues.webrtc.org/issues/40644583 +https://crbug.com/webrtc/9956,https://issues.webrtc.org/issues/40644584 +https://crbug.com/webrtc/10007,https://issues.webrtc.org/issues/41480825 +https://crbug.com/webrtc/10186,https://issues.webrtc.org/issues/41480826 +https://crbug.com/webrtc/10199,https://issues.webrtc.org/issues/41480827 +https://crbug.com/webrtc/10240,https://issues.webrtc.org/issues/41480828 +https://crbug.com/webrtc/10258,https://issues.webrtc.org/issues/41480829 +https://crbug.com/webrtc/10294,https://issues.webrtc.org/issues/41480830 +https://crbug.com/webrtc/10401,https://issues.webrtc.org/issues/41480831 +https://crbug.com/webrtc/10404,https://issues.webrtc.org/issues/41480832 +https://crbug.com/webrtc/10459,https://issues.webrtc.org/issues/41480833 +https://crbug.com/webrtc/10468,https://issues.webrtc.org/issues/41480834 +https://crbug.com/webrtc/10540,https://issues.webrtc.org/issues/41480835 +https://crbug.com/webrtc/10636,https://issues.webrtc.org/issues/41480836 +https://crbug.com/webrtc/10673,https://issues.webrtc.org/issues/41480837 +https://crbug.com/webrtc/1072,https://issues.webrtc.org/issues/41480838 +https://crbug.com/webrtc/10887,https://issues.webrtc.org/issues/41480839 +https://crbug.com/webrtc/10901,https://issues.webrtc.org/issues/41480840 +https://crbug.com/webrtc/10903,https://issues.webrtc.org/issues/41480841 +https://crbug.com/webrtc/10939,https://issues.webrtc.org/issues/41480842 +https://crbug.com/webrtc/10940,https://issues.webrtc.org/issues/41480843 +https://crbug.com/webrtc/11061,https://issues.webrtc.org/issues/41480844 +https://crbug.com/webrtc/11066,https://issues.webrtc.org/issues/41480845 +https://crbug.com/webrtc/11096,https://issues.webrtc.org/issues/41480846 +https://crbug.com/webrtc/11141,https://issues.webrtc.org/issues/41480847 +https://crbug.com/webrtc/11147,https://issues.webrtc.org/issues/41480848 +https://crbug.com/webrtc/11155,https://issues.webrtc.org/issues/41480849 +https://crbug.com/webrtc/11162,https://issues.webrtc.org/issues/41480850 +https://crbug.com/webrtc/11212,https://issues.webrtc.org/issues/41480851 +https://crbug.com/webrtc/11244,https://issues.webrtc.org/issues/41480852 +https://crbug.com/webrtc/11349,https://issues.webrtc.org/issues/41480853 +https://crbug.com/webrtc/11429,https://issues.webrtc.org/issues/41480854 +https://crbug.com/webrtc/11437,https://issues.webrtc.org/issues/41480855 +https://crbug.com/webrtc/11568,https://issues.webrtc.org/issues/41480856 +https://crbug.com/webrtc/11604,https://issues.webrtc.org/issues/41480857 +https://crbug.com/webrtc/11620,https://issues.webrtc.org/issues/41480858 +https://crbug.com/webrtc/11653,https://issues.webrtc.org/issues/41480859 +https://crbug.com/webrtc/11686,https://issues.webrtc.org/issues/41480860 +https://crbug.com/webrtc/11737,https://issues.webrtc.org/issues/41480861 +https://crbug.com/webrtc/11840,https://issues.webrtc.org/issues/41480862 +https://crbug.com/webrtc/11870,https://issues.webrtc.org/issues/41480863 +https://crbug.com/webrtc/11904,https://issues.webrtc.org/issues/41480864 +https://crbug.com/webrtc/11941,https://issues.webrtc.org/issues/41480865 +https://crbug.com/webrtc/11982,https://issues.webrtc.org/issues/41480866 +https://crbug.com/webrtc/12035,https://issues.webrtc.org/issues/41480867 +https://crbug.com/webrtc/12036,https://issues.webrtc.org/issues/41480868 +https://crbug.com/webrtc/12050,https://issues.webrtc.org/issues/41480869 +https://crbug.com/webrtc/12063,https://issues.webrtc.org/issues/41480870 +https://crbug.com/webrtc/12070,https://issues.webrtc.org/issues/41480871 +https://crbug.com/webrtc/12079,https://issues.webrtc.org/issues/41480872 +https://crbug.com/webrtc/12123,https://issues.webrtc.org/issues/41480873 +https://crbug.com/webrtc/12136,https://issues.webrtc.org/issues/41480874 +https://crbug.com/webrtc/12176,https://issues.webrtc.org/issues/41480875 +https://crbug.com/webrtc/12191,https://issues.webrtc.org/issues/41480876 +https://crbug.com/webrtc/12238,https://issues.webrtc.org/issues/41480877 +https://crbug.com/webrtc/12268,https://issues.webrtc.org/issues/41480878 +https://crbug.com/webrtc/12368,https://issues.webrtc.org/issues/41480879 +https://crbug.com/webrtc/12378,https://issues.webrtc.org/issues/41480880 +https://crbug.com/webrtc/12386,https://issues.webrtc.org/issues/41480881 +https://crbug.com/webrtc/12393,https://issues.webrtc.org/issues/41480882 +https://crbug.com/webrtc/12458,https://issues.webrtc.org/issues/41480883 +https://crbug.com/webrtc/12487,https://issues.webrtc.org/issues/41480884 +https://crbug.com/webrtc/12536,https://issues.webrtc.org/issues/41480885 +https://crbug.com/webrtc/12653,https://issues.webrtc.org/issues/41480886 +https://crbug.com/webrtc/12660,https://issues.webrtc.org/issues/41480887 +https://crbug.com/webrtc/12695,https://issues.webrtc.org/issues/41480888 +https://crbug.com/webrtc/12723,https://issues.webrtc.org/issues/41480889 +https://crbug.com/webrtc/12731,https://issues.webrtc.org/issues/41480890 +https://crbug.com/webrtc/12746,https://issues.webrtc.org/issues/41480891 +https://crbug.com/webrtc/12747,https://issues.webrtc.org/issues/41480892 +https://crbug.com/webrtc/12790,https://issues.webrtc.org/issues/41480893 +https://crbug.com/webrtc/12960,https://issues.webrtc.org/issues/41480894 +https://crbug.com/webrtc/12988,https://issues.webrtc.org/issues/41480895 +https://crbug.com/webrtc/13030,https://issues.webrtc.org/issues/41480896 +https://crbug.com/webrtc/13073,https://issues.webrtc.org/issues/41480897 +https://crbug.com/webrtc/13109,https://issues.webrtc.org/issues/41480898 +https://crbug.com/webrtc/13115,https://issues.webrtc.org/issues/41480899 +https://crbug.com/webrtc/13186,https://issues.webrtc.org/issues/41480900 +https://crbug.com/webrtc/13387,https://issues.webrtc.org/issues/41480901 +https://crbug.com/webrtc/13426,https://issues.webrtc.org/issues/41480902 +https://crbug.com/webrtc/13468,https://issues.webrtc.org/issues/41480903 +https://crbug.com/webrtc/13485,https://issues.webrtc.org/issues/41480904 +https://crbug.com/webrtc/13508,https://issues.webrtc.org/issues/41480905 +https://crbug.com/webrtc/13528,https://issues.webrtc.org/issues/41480906 +https://crbug.com/webrtc/13596,https://issues.webrtc.org/issues/41480907 +https://crbug.com/webrtc/13652,https://issues.webrtc.org/issues/41480908 +https://crbug.com/webrtc/13667,https://issues.webrtc.org/issues/41480909 +https://crbug.com/webrtc/13751,https://issues.webrtc.org/issues/41480910 +https://crbug.com/webrtc/13937,https://issues.webrtc.org/issues/41480911 +https://crbug.com/webrtc/13972,https://issues.webrtc.org/issues/41480912 +https://crbug.com/webrtc/14008,https://issues.webrtc.org/issues/41480913 +https://crbug.com/webrtc/14220,https://issues.webrtc.org/issues/41480914 +https://crbug.com/webrtc/14225,https://issues.webrtc.org/issues/41480915 +https://crbug.com/webrtc/14674,https://issues.webrtc.org/issues/41480916 +https://crbug.com/webrtc/147,https://issues.webrtc.org/issues/41480917 +https://crbug.com/webrtc/14773,https://issues.webrtc.org/issues/41480918 +https://crbug.com/webrtc/14782,https://issues.webrtc.org/issues/41480919 +https://crbug.com/webrtc/15034,https://issues.webrtc.org/issues/41480920 +https://crbug.com/webrtc/15041,https://issues.webrtc.org/issues/41480921 +https://crbug.com/webrtc/15060,https://issues.webrtc.org/issues/41480922 +https://crbug.com/webrtc/15073,https://issues.webrtc.org/issues/41480923 +https://crbug.com/webrtc/15128,https://issues.webrtc.org/issues/41480924 +https://crbug.com/webrtc/15173,https://issues.webrtc.org/issues/41480925 +https://crbug.com/webrtc/15223,https://issues.webrtc.org/issues/41480926 +https://crbug.com/webrtc/15397,https://issues.webrtc.org/issues/41480927 +https://crbug.com/webrtc/15512,https://issues.webrtc.org/issues/41480928 +https://crbug.com/webrtc/15629,https://issues.webrtc.org/issues/41480929 +https://crbug.com/webrtc/15635,https://issues.webrtc.org/issues/41480930 +https://crbug.com/webrtc/15660,https://issues.webrtc.org/issues/41480931 +https://crbug.com/webrtc/1600,https://issues.webrtc.org/issues/41480932 +https://crbug.com/webrtc/1628,https://issues.webrtc.org/issues/41480933 +https://crbug.com/webrtc/1667,https://issues.webrtc.org/issues/41480934 +https://crbug.com/webrtc/1757,https://issues.webrtc.org/issues/41480935 +https://crbug.com/webrtc/1958,https://issues.webrtc.org/issues/41480936 +https://crbug.com/webrtc/2076,https://issues.webrtc.org/issues/41480937 +https://crbug.com/webrtc/2143,https://issues.webrtc.org/issues/41480938 +https://crbug.com/webrtc/2157,https://issues.webrtc.org/issues/41480939 +https://crbug.com/webrtc/2243,https://issues.webrtc.org/issues/41480940 +https://crbug.com/webrtc/2276,https://issues.webrtc.org/issues/41480941 +https://crbug.com/webrtc/2481,https://issues.webrtc.org/issues/41480942 +https://crbug.com/webrtc/2489,https://issues.webrtc.org/issues/41480943 +https://crbug.com/webrtc/2822,https://issues.webrtc.org/issues/41480944 +https://crbug.com/webrtc/2863,https://issues.webrtc.org/issues/41480945 +https://crbug.com/webrtc/3138,https://issues.webrtc.org/issues/41480946 +https://crbug.com/webrtc/3485,https://issues.webrtc.org/issues/41480947 +https://crbug.com/webrtc/3495,https://issues.webrtc.org/issues/41480948 +https://crbug.com/webrtc/3611,https://issues.webrtc.org/issues/41480949 +https://crbug.com/webrtc/3666,https://issues.webrtc.org/issues/41480950 +https://crbug.com/webrtc/3711,https://issues.webrtc.org/issues/41480951 +https://crbug.com/webrtc/3769,https://issues.webrtc.org/issues/41480952 +https://crbug.com/webrtc/3772,https://issues.webrtc.org/issues/41480953 +https://crbug.com/webrtc/3940,https://issues.webrtc.org/issues/41480954 +https://crbug.com/webrtc/3969,https://issues.webrtc.org/issues/41480955 +https://crbug.com/webrtc/3970,https://issues.webrtc.org/issues/41480956 +https://crbug.com/webrtc/4018,https://issues.webrtc.org/issues/41480957 +https://crbug.com/webrtc/4033,https://issues.webrtc.org/issues/41480958 +https://crbug.com/webrtc/4076,https://issues.webrtc.org/issues/41480959 +https://crbug.com/webrtc/4096,https://issues.webrtc.org/issues/41480960 +https://crbug.com/webrtc/4105,https://issues.webrtc.org/issues/41480961 +https://crbug.com/webrtc/4106,https://issues.webrtc.org/issues/41480962 +https://crbug.com/webrtc/4201,https://issues.webrtc.org/issues/41480963 +https://crbug.com/webrtc/4457,https://issues.webrtc.org/issues/41480964 +https://crbug.com/webrtc/4495,https://issues.webrtc.org/issues/41480965 +https://crbug.com/webrtc/4532,https://issues.webrtc.org/issues/41480966 +https://crbug.com/webrtc/4535,https://issues.webrtc.org/issues/41480967 +https://crbug.com/webrtc/4542,https://issues.webrtc.org/issues/41480968 +https://crbug.com/webrtc/4570,https://issues.webrtc.org/issues/41480969 +https://crbug.com/webrtc/4601,https://issues.webrtc.org/issues/41480970 +https://crbug.com/webrtc/4614,https://issues.webrtc.org/issues/41480971 +https://crbug.com/webrtc/4639,https://issues.webrtc.org/issues/41480972 +https://crbug.com/webrtc/4688,https://issues.webrtc.org/issues/41480973 +https://crbug.com/webrtc/4699,https://issues.webrtc.org/issues/41480974 +https://crbug.com/webrtc/4700,https://issues.webrtc.org/issues/41480975 +https://crbug.com/webrtc/4705,https://issues.webrtc.org/issues/41480976 +https://crbug.com/webrtc/4727,https://issues.webrtc.org/issues/41480977 +https://crbug.com/webrtc/4753,https://issues.webrtc.org/issues/41480978 +https://crbug.com/webrtc/4807,https://issues.webrtc.org/issues/41480979 +https://crbug.com/webrtc/4810,https://issues.webrtc.org/issues/41480980 +https://crbug.com/webrtc/4824,https://issues.webrtc.org/issues/41480981 +https://crbug.com/webrtc/4830,https://issues.webrtc.org/issues/41480982 +https://crbug.com/webrtc/4862,https://issues.webrtc.org/issues/41480983 +https://crbug.com/webrtc/4870,https://issues.webrtc.org/issues/41480984 +https://crbug.com/webrtc/4889,https://issues.webrtc.org/issues/41480985 +https://crbug.com/webrtc/4906,https://issues.webrtc.org/issues/41480986 +https://crbug.com/webrtc/4994,https://issues.webrtc.org/issues/41480987 +https://crbug.com/webrtc/5079,https://issues.webrtc.org/issues/41480988 +https://crbug.com/webrtc/5092,https://issues.webrtc.org/issues/41480989 +https://crbug.com/webrtc/5150,https://issues.webrtc.org/issues/41480990 +https://crbug.com/webrtc/5187,https://issues.webrtc.org/issues/41480991 +https://crbug.com/webrtc/5222,https://issues.webrtc.org/issues/41480992 +https://crbug.com/webrtc/5231,https://issues.webrtc.org/issues/41480993 +https://crbug.com/webrtc/529,https://issues.webrtc.org/issues/41480994 +https://crbug.com/webrtc/5410,https://issues.webrtc.org/issues/41480995 +https://crbug.com/webrtc/5424,https://issues.webrtc.org/issues/41480996 +https://crbug.com/webrtc/5427,https://issues.webrtc.org/issues/41480997 +https://crbug.com/webrtc/5428,https://issues.webrtc.org/issues/41480998 +https://crbug.com/webrtc/5456,https://issues.webrtc.org/issues/41480999 +https://crbug.com/webrtc/5525,https://issues.webrtc.org/issues/41481000 +https://crbug.com/webrtc/5558,https://issues.webrtc.org/issues/41481001 +https://crbug.com/webrtc/5562,https://issues.webrtc.org/issues/41481002 +https://crbug.com/webrtc/5581,https://issues.webrtc.org/issues/41481003 +https://crbug.com/webrtc/5586,https://issues.webrtc.org/issues/41481004 +https://crbug.com/webrtc/5588,https://issues.webrtc.org/issues/41481005 +https://crbug.com/webrtc/5607,https://issues.webrtc.org/issues/41481006 +https://crbug.com/webrtc/5624,https://issues.webrtc.org/issues/41481007 +https://crbug.com/webrtc/5696,https://issues.webrtc.org/issues/41481008 +https://crbug.com/webrtc/5717,https://issues.webrtc.org/issues/41481009 +https://crbug.com/webrtc/5730,https://issues.webrtc.org/issues/41481010 +https://crbug.com/webrtc/5731,https://issues.webrtc.org/issues/41481011 +https://crbug.com/webrtc/5732,https://issues.webrtc.org/issues/41481012 +https://crbug.com/webrtc/5741,https://issues.webrtc.org/issues/41481013 +https://crbug.com/webrtc/5788,https://issues.webrtc.org/issues/41481014 +https://crbug.com/webrtc/5795,https://issues.webrtc.org/issues/41481015 +https://crbug.com/webrtc/5807,https://issues.webrtc.org/issues/41481016 +https://crbug.com/webrtc/5836,https://issues.webrtc.org/issues/41481017 +https://crbug.com/webrtc/5847,https://issues.webrtc.org/issues/41481018 +https://crbug.com/webrtc/5855,https://issues.webrtc.org/issues/41481019 +https://crbug.com/webrtc/5857,https://issues.webrtc.org/issues/41481020 +https://crbug.com/webrtc/5875,https://issues.webrtc.org/issues/41481021 +https://crbug.com/webrtc/5949,https://issues.webrtc.org/issues/41481022 +https://crbug.com/webrtc/6001,https://issues.webrtc.org/issues/41481023 +https://crbug.com/webrtc/6023,https://issues.webrtc.org/issues/41481024 +https://crbug.com/webrtc/6081,https://issues.webrtc.org/issues/41481025 +https://crbug.com/webrtc/6133,https://issues.webrtc.org/issues/41481026 +https://crbug.com/webrtc/6222,https://issues.webrtc.org/issues/41481027 +https://crbug.com/webrtc/6320,https://issues.webrtc.org/issues/41481028 +https://crbug.com/webrtc/6323,https://issues.webrtc.org/issues/41481029 +https://crbug.com/webrtc/6337,https://issues.webrtc.org/issues/41481030 +https://crbug.com/webrtc/6402,https://issues.webrtc.org/issues/41481031 +https://crbug.com/webrtc/6412,https://issues.webrtc.org/issues/41481032 +https://crbug.com/webrtc/6441,https://issues.webrtc.org/issues/41481033 +https://crbug.com/webrtc/6493,https://issues.webrtc.org/issues/41481034 +https://crbug.com/webrtc/6500,https://issues.webrtc.org/issues/41481035 +https://crbug.com/webrtc/6646,https://issues.webrtc.org/issues/41481036 +https://crbug.com/webrtc/6820,https://issues.webrtc.org/issues/41481037 +https://crbug.com/webrtc/6972,https://issues.webrtc.org/issues/41481038 +https://crbug.com/webrtc/704,https://issues.webrtc.org/issues/41481039 +https://crbug.com/webrtc/7304,https://issues.webrtc.org/issues/41481040 +https://crbug.com/webrtc/7332,https://issues.webrtc.org/issues/41481041 +https://crbug.com/webrtc/743,https://issues.webrtc.org/issues/41481042 +https://crbug.com/webrtc/7443,https://issues.webrtc.org/issues/41481043 +https://crbug.com/webrtc/7445,https://issues.webrtc.org/issues/41481044 +https://crbug.com/webrtc/7493,https://issues.webrtc.org/issues/41481045 +https://crbug.com/webrtc/7551,https://issues.webrtc.org/issues/41481046 +https://crbug.com/webrtc/7578,https://issues.webrtc.org/issues/41481047 +https://crbug.com/webrtc/7706,https://issues.webrtc.org/issues/41481048 +https://crbug.com/webrtc/7712,https://issues.webrtc.org/issues/41481049 +https://crbug.com/webrtc/7770,https://issues.webrtc.org/issues/41481050 +https://crbug.com/webrtc/7916,https://issues.webrtc.org/issues/41481051 +https://crbug.com/webrtc/8102,https://issues.webrtc.org/issues/41481052 +https://crbug.com/webrtc/8133,https://issues.webrtc.org/issues/41481053 +https://crbug.com/webrtc/8183,https://issues.webrtc.org/issues/41481054 +https://crbug.com/webrtc/8191,https://issues.webrtc.org/issues/41481055 +https://crbug.com/webrtc/8232,https://issues.webrtc.org/issues/41481056 +https://crbug.com/webrtc/8239,https://issues.webrtc.org/issues/41481057 +https://crbug.com/webrtc/8317,https://issues.webrtc.org/issues/41481058 +https://crbug.com/webrtc/8339,https://issues.webrtc.org/issues/41481059 +https://crbug.com/webrtc/8536,https://issues.webrtc.org/issues/41481060 +https://crbug.com/webrtc/8538,https://issues.webrtc.org/issues/41481061 +https://crbug.com/webrtc/8562,https://issues.webrtc.org/issues/41481062 +https://crbug.com/webrtc/8589,https://issues.webrtc.org/issues/41481063 +https://crbug.com/webrtc/8652,https://issues.webrtc.org/issues/41481064 +https://crbug.com/webrtc/8690,https://issues.webrtc.org/issues/41481065 +https://crbug.com/webrtc/8698,https://issues.webrtc.org/issues/41481066 +https://crbug.com/webrtc/8742,https://issues.webrtc.org/issues/41481067 +https://crbug.com/webrtc/8860,https://issues.webrtc.org/issues/41481068 +https://crbug.com/webrtc/9093,https://issues.webrtc.org/issues/41481069 +https://crbug.com/webrtc/9096,https://issues.webrtc.org/issues/41481070 +https://crbug.com/webrtc/9273,https://issues.webrtc.org/issues/41481071 +https://crbug.com/webrtc/9369,https://issues.webrtc.org/issues/41481072 +https://crbug.com/webrtc/9402,https://issues.webrtc.org/issues/41481073 +https://crbug.com/webrtc/9442,https://issues.webrtc.org/issues/41481074 +https://crbug.com/webrtc/9476,https://issues.webrtc.org/issues/41481075 +https://crbug.com/webrtc/9547,https://issues.webrtc.org/issues/41481076 +https://crbug.com/webrtc/9624,https://issues.webrtc.org/issues/41481077 +https://crbug.com/webrtc/9696,https://issues.webrtc.org/issues/41481078 +https://crbug.com/webrtc/9775,https://issues.webrtc.org/issues/41481079 +https://crbug.com/webrtc/9922,https://issues.webrtc.org/issues/41481080 +https://crbug.com/webrtc/10134,https://issues.webrtc.org/issues/41488610 +https://crbug.com/webrtc/10356,https://issues.webrtc.org/issues/41488611 +https://crbug.com/webrtc/10467,https://issues.webrtc.org/issues/41488612 +https://crbug.com/webrtc/3333,https://issues.webrtc.org/issues/41488613 +https://crbug.com/webrtc/4825,https://issues.webrtc.org/issues/41488614 +https://crbug.com/webrtc/4971,https://issues.webrtc.org/issues/41488615 +https://crbug.com/webrtc/5527,https://issues.webrtc.org/issues/41488616 +https://crbug.com/webrtc/6093,https://issues.webrtc.org/issues/41488617 +https://crbug.com/webrtc/6240,https://issues.webrtc.org/issues/41488618 +https://crbug.com/webrtc/7033,https://issues.webrtc.org/issues/41488619 +https://crbug.com/webrtc/7437,https://issues.webrtc.org/issues/41488620 +https://crbug.com/webrtc/7505,https://issues.webrtc.org/issues/41488621 +https://crbug.com/webrtc/7688,https://issues.webrtc.org/issues/41488622 +https://crbug.com/webrtc/7693,https://issues.webrtc.org/issues/41488623 +https://crbug.com/webrtc/7791,https://issues.webrtc.org/issues/41488624 +https://crbug.com/webrtc/8078,https://issues.webrtc.org/issues/41488625 +https://crbug.com/webrtc/8226,https://issues.webrtc.org/issues/41488626 +https://crbug.com/webrtc/8234,https://issues.webrtc.org/issues/41488627 +https://crbug.com/webrtc/8357,https://issues.webrtc.org/issues/41488628 +https://crbug.com/webrtc/839,https://issues.webrtc.org/issues/41488629 +https://crbug.com/webrtc/8537,https://issues.webrtc.org/issues/41488630 +https://crbug.com/webrtc/8539,https://issues.webrtc.org/issues/41488631 +https://crbug.com/webrtc/8574,https://issues.webrtc.org/issues/41488632 +https://crbug.com/webrtc/8586,https://issues.webrtc.org/issues/41488633 +https://crbug.com/webrtc/8824,https://issues.webrtc.org/issues/41488634 +https://crbug.com/webrtc/9234,https://issues.webrtc.org/issues/41488635 +https://crbug.com/webrtc/9814,https://issues.webrtc.org/issues/41488636 +https://crbug.com/webrtc/12318,https://issues.webrtc.org/issues/41497335 +https://crbug.com/webrtc/11890,https://issues.webrtc.org/issues/42182516 +https://crbug.com/webrtc/10,https://issues.webrtc.org/issues/42220019 +https://crbug.com/webrtc/100,https://issues.webrtc.org/issues/42220020 +https://crbug.com/webrtc/1000,https://issues.webrtc.org/issues/42220021 +https://crbug.com/webrtc/10000,https://issues.webrtc.org/issues/42220022 +https://crbug.com/webrtc/10001,https://issues.webrtc.org/issues/42220023 +https://crbug.com/webrtc/10002,https://issues.webrtc.org/issues/42220024 +https://crbug.com/webrtc/10003,https://issues.webrtc.org/issues/42220025 +https://crbug.com/webrtc/10004,https://issues.webrtc.org/issues/42220026 +https://crbug.com/webrtc/10005,https://issues.webrtc.org/issues/42220027 +https://crbug.com/webrtc/10006,https://issues.webrtc.org/issues/42220028 +https://crbug.com/webrtc/10008,https://issues.webrtc.org/issues/42220029 +https://crbug.com/webrtc/10009,https://issues.webrtc.org/issues/42220030 +https://crbug.com/webrtc/1001,https://issues.webrtc.org/issues/42220031 +https://crbug.com/webrtc/10010,https://issues.webrtc.org/issues/42220032 +https://crbug.com/webrtc/10011,https://issues.webrtc.org/issues/42220033 +https://crbug.com/webrtc/10012,https://issues.webrtc.org/issues/42220034 +https://crbug.com/webrtc/10013,https://issues.webrtc.org/issues/42220035 +https://crbug.com/webrtc/10014,https://issues.webrtc.org/issues/42220036 +https://crbug.com/webrtc/10015,https://issues.webrtc.org/issues/42220037 +https://crbug.com/webrtc/10016,https://issues.webrtc.org/issues/42220038 +https://crbug.com/webrtc/10017,https://issues.webrtc.org/issues/42220039 +https://crbug.com/webrtc/10018,https://issues.webrtc.org/issues/42220040 +https://crbug.com/webrtc/10019,https://issues.webrtc.org/issues/42220041 +https://crbug.com/webrtc/1002,https://issues.webrtc.org/issues/42220042 +https://crbug.com/webrtc/10020,https://issues.webrtc.org/issues/42220043 +https://crbug.com/webrtc/10021,https://issues.webrtc.org/issues/42220044 +https://crbug.com/webrtc/10022,https://issues.webrtc.org/issues/42220045 +https://crbug.com/webrtc/10023,https://issues.webrtc.org/issues/42220046 +https://crbug.com/webrtc/10024,https://issues.webrtc.org/issues/42220047 +https://crbug.com/webrtc/10025,https://issues.webrtc.org/issues/42220048 +https://crbug.com/webrtc/10026,https://issues.webrtc.org/issues/42220049 +https://crbug.com/webrtc/10027,https://issues.webrtc.org/issues/42220050 +https://crbug.com/webrtc/10028,https://issues.webrtc.org/issues/42220051 +https://crbug.com/webrtc/10029,https://issues.webrtc.org/issues/42220052 +https://crbug.com/webrtc/1003,https://issues.webrtc.org/issues/42220053 +https://crbug.com/webrtc/10030,https://issues.webrtc.org/issues/42220054 +https://crbug.com/webrtc/10031,https://issues.webrtc.org/issues/42220055 +https://crbug.com/webrtc/10032,https://issues.webrtc.org/issues/42220056 +https://crbug.com/webrtc/10033,https://issues.webrtc.org/issues/42220057 +https://crbug.com/webrtc/10034,https://issues.webrtc.org/issues/42220058 +https://crbug.com/webrtc/10035,https://issues.webrtc.org/issues/42220059 +https://crbug.com/webrtc/10036,https://issues.webrtc.org/issues/42220060 +https://crbug.com/webrtc/10037,https://issues.webrtc.org/issues/42220061 +https://crbug.com/webrtc/10038,https://issues.webrtc.org/issues/42220062 +https://crbug.com/webrtc/10039,https://issues.webrtc.org/issues/42220063 +https://crbug.com/webrtc/1004,https://issues.webrtc.org/issues/42220064 +https://crbug.com/webrtc/10040,https://issues.webrtc.org/issues/42220065 +https://crbug.com/webrtc/10041,https://issues.webrtc.org/issues/42220066 +https://crbug.com/webrtc/10042,https://issues.webrtc.org/issues/42220067 +https://crbug.com/webrtc/10043,https://issues.webrtc.org/issues/42220068 +https://crbug.com/webrtc/10044,https://issues.webrtc.org/issues/42220069 +https://crbug.com/webrtc/10045,https://issues.webrtc.org/issues/42220070 +https://crbug.com/webrtc/10046,https://issues.webrtc.org/issues/42220071 +https://crbug.com/webrtc/10048,https://issues.webrtc.org/issues/42220072 +https://crbug.com/webrtc/10049,https://issues.webrtc.org/issues/42220073 +https://crbug.com/webrtc/1005,https://issues.webrtc.org/issues/42220074 +https://crbug.com/webrtc/10050,https://issues.webrtc.org/issues/42220075 +https://crbug.com/webrtc/10051,https://issues.webrtc.org/issues/42220076 +https://crbug.com/webrtc/10052,https://issues.webrtc.org/issues/42220077 +https://crbug.com/webrtc/10053,https://issues.webrtc.org/issues/42220078 +https://crbug.com/webrtc/10054,https://issues.webrtc.org/issues/42220079 +https://crbug.com/webrtc/10055,https://issues.webrtc.org/issues/42220080 +https://crbug.com/webrtc/10056,https://issues.webrtc.org/issues/42220081 +https://crbug.com/webrtc/10057,https://issues.webrtc.org/issues/42220082 +https://crbug.com/webrtc/10058,https://issues.webrtc.org/issues/42220083 +https://crbug.com/webrtc/10059,https://issues.webrtc.org/issues/42220084 +https://crbug.com/webrtc/1006,https://issues.webrtc.org/issues/42220085 +https://crbug.com/webrtc/10060,https://issues.webrtc.org/issues/42220086 +https://crbug.com/webrtc/10061,https://issues.webrtc.org/issues/42220087 +https://crbug.com/webrtc/10062,https://issues.webrtc.org/issues/42220088 +https://crbug.com/webrtc/10063,https://issues.webrtc.org/issues/42220089 +https://crbug.com/webrtc/10064,https://issues.webrtc.org/issues/42220090 +https://crbug.com/webrtc/10065,https://issues.webrtc.org/issues/42220091 +https://crbug.com/webrtc/10066,https://issues.webrtc.org/issues/42220092 +https://crbug.com/webrtc/10067,https://issues.webrtc.org/issues/42220093 +https://crbug.com/webrtc/10068,https://issues.webrtc.org/issues/42220094 +https://crbug.com/webrtc/10069,https://issues.webrtc.org/issues/42220095 +https://crbug.com/webrtc/1007,https://issues.webrtc.org/issues/42220096 +https://crbug.com/webrtc/10070,https://issues.webrtc.org/issues/42220097 +https://crbug.com/webrtc/10071,https://issues.webrtc.org/issues/42220098 +https://crbug.com/webrtc/10072,https://issues.webrtc.org/issues/42220099 +https://crbug.com/webrtc/10073,https://issues.webrtc.org/issues/42220100 +https://crbug.com/webrtc/10074,https://issues.webrtc.org/issues/42220101 +https://crbug.com/webrtc/10075,https://issues.webrtc.org/issues/42220102 +https://crbug.com/webrtc/10076,https://issues.webrtc.org/issues/42220103 +https://crbug.com/webrtc/10077,https://issues.webrtc.org/issues/42220104 +https://crbug.com/webrtc/10078,https://issues.webrtc.org/issues/42220105 +https://crbug.com/webrtc/10079,https://issues.webrtc.org/issues/42220106 +https://crbug.com/webrtc/1008,https://issues.webrtc.org/issues/42220107 +https://crbug.com/webrtc/10080,https://issues.webrtc.org/issues/42220108 +https://crbug.com/webrtc/10081,https://issues.webrtc.org/issues/42220109 +https://crbug.com/webrtc/10082,https://issues.webrtc.org/issues/42220110 +https://crbug.com/webrtc/10084,https://issues.webrtc.org/issues/42220111 +https://crbug.com/webrtc/10085,https://issues.webrtc.org/issues/42220112 +https://crbug.com/webrtc/10087,https://issues.webrtc.org/issues/42220113 +https://crbug.com/webrtc/10088,https://issues.webrtc.org/issues/42220114 +https://crbug.com/webrtc/10089,https://issues.webrtc.org/issues/42220115 +https://crbug.com/webrtc/1009,https://issues.webrtc.org/issues/42220116 +https://crbug.com/webrtc/10090,https://issues.webrtc.org/issues/42220117 +https://crbug.com/webrtc/10091,https://issues.webrtc.org/issues/42220118 +https://crbug.com/webrtc/10092,https://issues.webrtc.org/issues/42220119 +https://crbug.com/webrtc/10093,https://issues.webrtc.org/issues/42220120 +https://crbug.com/webrtc/10094,https://issues.webrtc.org/issues/42220121 +https://crbug.com/webrtc/10095,https://issues.webrtc.org/issues/42220122 +https://crbug.com/webrtc/10096,https://issues.webrtc.org/issues/42220123 +https://crbug.com/webrtc/10097,https://issues.webrtc.org/issues/42220124 +https://crbug.com/webrtc/10098,https://issues.webrtc.org/issues/42220125 +https://crbug.com/webrtc/10099,https://issues.webrtc.org/issues/42220126 +https://crbug.com/webrtc/101,https://issues.webrtc.org/issues/42220127 +https://crbug.com/webrtc/1010,https://issues.webrtc.org/issues/42220128 +https://crbug.com/webrtc/10100,https://issues.webrtc.org/issues/42220129 +https://crbug.com/webrtc/10101,https://issues.webrtc.org/issues/42220130 +https://crbug.com/webrtc/10102,https://issues.webrtc.org/issues/42220131 +https://crbug.com/webrtc/10103,https://issues.webrtc.org/issues/42220132 +https://crbug.com/webrtc/10104,https://issues.webrtc.org/issues/42220133 +https://crbug.com/webrtc/10105,https://issues.webrtc.org/issues/42220134 +https://crbug.com/webrtc/10106,https://issues.webrtc.org/issues/42220135 +https://crbug.com/webrtc/10107,https://issues.webrtc.org/issues/42220136 +https://crbug.com/webrtc/10108,https://issues.webrtc.org/issues/42220137 +https://crbug.com/webrtc/10109,https://issues.webrtc.org/issues/42220138 +https://crbug.com/webrtc/1011,https://issues.webrtc.org/issues/42220139 +https://crbug.com/webrtc/10110,https://issues.webrtc.org/issues/42220140 +https://crbug.com/webrtc/10111,https://issues.webrtc.org/issues/42220141 +https://crbug.com/webrtc/10112,https://issues.webrtc.org/issues/42220142 +https://crbug.com/webrtc/10113,https://issues.webrtc.org/issues/42220143 +https://crbug.com/webrtc/10114,https://issues.webrtc.org/issues/42220144 +https://crbug.com/webrtc/10115,https://issues.webrtc.org/issues/42220145 +https://crbug.com/webrtc/10116,https://issues.webrtc.org/issues/42220146 +https://crbug.com/webrtc/10117,https://issues.webrtc.org/issues/42220147 +https://crbug.com/webrtc/10118,https://issues.webrtc.org/issues/42220148 +https://crbug.com/webrtc/10119,https://issues.webrtc.org/issues/42220149 +https://crbug.com/webrtc/1012,https://issues.webrtc.org/issues/42220150 +https://crbug.com/webrtc/10120,https://issues.webrtc.org/issues/42220151 +https://crbug.com/webrtc/10121,https://issues.webrtc.org/issues/42220152 +https://crbug.com/webrtc/10122,https://issues.webrtc.org/issues/42220153 +https://crbug.com/webrtc/10124,https://issues.webrtc.org/issues/42220154 +https://crbug.com/webrtc/10125,https://issues.webrtc.org/issues/42220155 +https://crbug.com/webrtc/10126,https://issues.webrtc.org/issues/42220156 +https://crbug.com/webrtc/10127,https://issues.webrtc.org/issues/42220157 +https://crbug.com/webrtc/10128,https://issues.webrtc.org/issues/42220158 +https://crbug.com/webrtc/10129,https://issues.webrtc.org/issues/42220159 +https://crbug.com/webrtc/1013,https://issues.webrtc.org/issues/42220160 +https://crbug.com/webrtc/10130,https://issues.webrtc.org/issues/42220161 +https://crbug.com/webrtc/10131,https://issues.webrtc.org/issues/42220162 +https://crbug.com/webrtc/10132,https://issues.webrtc.org/issues/42220163 +https://crbug.com/webrtc/10133,https://issues.webrtc.org/issues/42220164 +https://crbug.com/webrtc/10135,https://issues.webrtc.org/issues/42220165 +https://crbug.com/webrtc/10136,https://issues.webrtc.org/issues/42220166 +https://crbug.com/webrtc/10137,https://issues.webrtc.org/issues/42220167 +https://crbug.com/webrtc/10138,https://issues.webrtc.org/issues/42220168 +https://crbug.com/webrtc/10139,https://issues.webrtc.org/issues/42220169 +https://crbug.com/webrtc/1014,https://issues.webrtc.org/issues/42220170 +https://crbug.com/webrtc/10140,https://issues.webrtc.org/issues/42220171 +https://crbug.com/webrtc/10141,https://issues.webrtc.org/issues/42220172 +https://crbug.com/webrtc/10142,https://issues.webrtc.org/issues/42220173 +https://crbug.com/webrtc/10143,https://issues.webrtc.org/issues/42220174 +https://crbug.com/webrtc/10144,https://issues.webrtc.org/issues/42220175 +https://crbug.com/webrtc/10145,https://issues.webrtc.org/issues/42220176 +https://crbug.com/webrtc/10146,https://issues.webrtc.org/issues/42220177 +https://crbug.com/webrtc/10147,https://issues.webrtc.org/issues/42220178 +https://crbug.com/webrtc/10148,https://issues.webrtc.org/issues/42220179 +https://crbug.com/webrtc/10149,https://issues.webrtc.org/issues/42220180 +https://crbug.com/webrtc/1015,https://issues.webrtc.org/issues/42220181 +https://crbug.com/webrtc/10150,https://issues.webrtc.org/issues/42220182 +https://crbug.com/webrtc/10151,https://issues.webrtc.org/issues/42220183 +https://crbug.com/webrtc/10152,https://issues.webrtc.org/issues/42220184 +https://crbug.com/webrtc/10153,https://issues.webrtc.org/issues/42220185 +https://crbug.com/webrtc/10154,https://issues.webrtc.org/issues/42220186 +https://crbug.com/webrtc/10155,https://issues.webrtc.org/issues/42220187 +https://crbug.com/webrtc/10156,https://issues.webrtc.org/issues/42220188 +https://crbug.com/webrtc/10157,https://issues.webrtc.org/issues/42220189 +https://crbug.com/webrtc/10158,https://issues.webrtc.org/issues/42220190 +https://crbug.com/webrtc/10159,https://issues.webrtc.org/issues/42220191 +https://crbug.com/webrtc/1016,https://issues.webrtc.org/issues/42220192 +https://crbug.com/webrtc/10160,https://issues.webrtc.org/issues/42220193 +https://crbug.com/webrtc/10161,https://issues.webrtc.org/issues/42220194 +https://crbug.com/webrtc/10162,https://issues.webrtc.org/issues/42220195 +https://crbug.com/webrtc/10163,https://issues.webrtc.org/issues/42220196 +https://crbug.com/webrtc/10164,https://issues.webrtc.org/issues/42220197 +https://crbug.com/webrtc/10166,https://issues.webrtc.org/issues/42220198 +https://crbug.com/webrtc/10167,https://issues.webrtc.org/issues/42220199 +https://crbug.com/webrtc/10168,https://issues.webrtc.org/issues/42220200 +https://crbug.com/webrtc/10169,https://issues.webrtc.org/issues/42220201 +https://crbug.com/webrtc/1017,https://issues.webrtc.org/issues/42220202 +https://crbug.com/webrtc/10170,https://issues.webrtc.org/issues/42220203 +https://crbug.com/webrtc/10171,https://issues.webrtc.org/issues/42220204 +https://crbug.com/webrtc/10172,https://issues.webrtc.org/issues/42220205 +https://crbug.com/webrtc/10173,https://issues.webrtc.org/issues/42220206 +https://crbug.com/webrtc/10174,https://issues.webrtc.org/issues/42220207 +https://crbug.com/webrtc/10175,https://issues.webrtc.org/issues/42220208 +https://crbug.com/webrtc/10176,https://issues.webrtc.org/issues/42220209 +https://crbug.com/webrtc/10177,https://issues.webrtc.org/issues/42220210 +https://crbug.com/webrtc/10178,https://issues.webrtc.org/issues/42220211 +https://crbug.com/webrtc/10179,https://issues.webrtc.org/issues/42220212 +https://crbug.com/webrtc/1018,https://issues.webrtc.org/issues/42220213 +https://crbug.com/webrtc/10180,https://issues.webrtc.org/issues/42220214 +https://crbug.com/webrtc/10181,https://issues.webrtc.org/issues/42220215 +https://crbug.com/webrtc/10182,https://issues.webrtc.org/issues/42220216 +https://crbug.com/webrtc/10183,https://issues.webrtc.org/issues/42220217 +https://crbug.com/webrtc/10184,https://issues.webrtc.org/issues/42220218 +https://crbug.com/webrtc/10185,https://issues.webrtc.org/issues/42220219 +https://crbug.com/webrtc/10187,https://issues.webrtc.org/issues/42220220 +https://crbug.com/webrtc/10189,https://issues.webrtc.org/issues/42220221 +https://crbug.com/webrtc/1019,https://issues.webrtc.org/issues/42220222 +https://crbug.com/webrtc/10190,https://issues.webrtc.org/issues/42220223 +https://crbug.com/webrtc/10191,https://issues.webrtc.org/issues/42220224 +https://crbug.com/webrtc/10192,https://issues.webrtc.org/issues/42220225 +https://crbug.com/webrtc/10193,https://issues.webrtc.org/issues/42220226 +https://crbug.com/webrtc/10194,https://issues.webrtc.org/issues/42220227 +https://crbug.com/webrtc/10195,https://issues.webrtc.org/issues/42220228 +https://crbug.com/webrtc/10196,https://issues.webrtc.org/issues/42220229 +https://crbug.com/webrtc/10197,https://issues.webrtc.org/issues/42220230 +https://crbug.com/webrtc/10198,https://issues.webrtc.org/issues/42220231 +https://crbug.com/webrtc/102,https://issues.webrtc.org/issues/42220232 +https://crbug.com/webrtc/1020,https://issues.webrtc.org/issues/42220233 +https://crbug.com/webrtc/10200,https://issues.webrtc.org/issues/42220234 +https://crbug.com/webrtc/10201,https://issues.webrtc.org/issues/42220235 +https://crbug.com/webrtc/10202,https://issues.webrtc.org/issues/42220236 +https://crbug.com/webrtc/10203,https://issues.webrtc.org/issues/42220237 +https://crbug.com/webrtc/10204,https://issues.webrtc.org/issues/42220238 +https://crbug.com/webrtc/10205,https://issues.webrtc.org/issues/42220239 +https://crbug.com/webrtc/10206,https://issues.webrtc.org/issues/42220240 +https://crbug.com/webrtc/10207,https://issues.webrtc.org/issues/42220241 +https://crbug.com/webrtc/10208,https://issues.webrtc.org/issues/42220242 +https://crbug.com/webrtc/10209,https://issues.webrtc.org/issues/42220243 +https://crbug.com/webrtc/1021,https://issues.webrtc.org/issues/42220244 +https://crbug.com/webrtc/10210,https://issues.webrtc.org/issues/42220245 +https://crbug.com/webrtc/10211,https://issues.webrtc.org/issues/42220246 +https://crbug.com/webrtc/10212,https://issues.webrtc.org/issues/42220247 +https://crbug.com/webrtc/10213,https://issues.webrtc.org/issues/42220248 +https://crbug.com/webrtc/10214,https://issues.webrtc.org/issues/42220249 +https://crbug.com/webrtc/10215,https://issues.webrtc.org/issues/42220250 +https://crbug.com/webrtc/10216,https://issues.webrtc.org/issues/42220251 +https://crbug.com/webrtc/10217,https://issues.webrtc.org/issues/42220252 +https://crbug.com/webrtc/10218,https://issues.webrtc.org/issues/42220253 +https://crbug.com/webrtc/10219,https://issues.webrtc.org/issues/42220254 +https://crbug.com/webrtc/1022,https://issues.webrtc.org/issues/42220255 +https://crbug.com/webrtc/10220,https://issues.webrtc.org/issues/42220256 +https://crbug.com/webrtc/10221,https://issues.webrtc.org/issues/42220257 +https://crbug.com/webrtc/10222,https://issues.webrtc.org/issues/42220258 +https://crbug.com/webrtc/10223,https://issues.webrtc.org/issues/42220259 +https://crbug.com/webrtc/10224,https://issues.webrtc.org/issues/42220260 +https://crbug.com/webrtc/10225,https://issues.webrtc.org/issues/42220261 +https://crbug.com/webrtc/10226,https://issues.webrtc.org/issues/42220262 +https://crbug.com/webrtc/10227,https://issues.webrtc.org/issues/42220263 +https://crbug.com/webrtc/10228,https://issues.webrtc.org/issues/42220264 +https://crbug.com/webrtc/10229,https://issues.webrtc.org/issues/42220265 +https://crbug.com/webrtc/1023,https://issues.webrtc.org/issues/42220266 +https://crbug.com/webrtc/10230,https://issues.webrtc.org/issues/42220267 +https://crbug.com/webrtc/10231,https://issues.webrtc.org/issues/42220268 +https://crbug.com/webrtc/10232,https://issues.webrtc.org/issues/42220269 +https://crbug.com/webrtc/10233,https://issues.webrtc.org/issues/42220270 +https://crbug.com/webrtc/10234,https://issues.webrtc.org/issues/42220271 +https://crbug.com/webrtc/10235,https://issues.webrtc.org/issues/42220272 +https://crbug.com/webrtc/10236,https://issues.webrtc.org/issues/42220273 +https://crbug.com/webrtc/10237,https://issues.webrtc.org/issues/42220274 +https://crbug.com/webrtc/10238,https://issues.webrtc.org/issues/42220275 +https://crbug.com/webrtc/10239,https://issues.webrtc.org/issues/42220276 +https://crbug.com/webrtc/1024,https://issues.webrtc.org/issues/42220277 +https://crbug.com/webrtc/10241,https://issues.webrtc.org/issues/42220278 +https://crbug.com/webrtc/10242,https://issues.webrtc.org/issues/42220279 +https://crbug.com/webrtc/10243,https://issues.webrtc.org/issues/42220280 +https://crbug.com/webrtc/10244,https://issues.webrtc.org/issues/42220281 +https://crbug.com/webrtc/10245,https://issues.webrtc.org/issues/42220282 +https://crbug.com/webrtc/10246,https://issues.webrtc.org/issues/42220283 +https://crbug.com/webrtc/10247,https://issues.webrtc.org/issues/42220284 +https://crbug.com/webrtc/10248,https://issues.webrtc.org/issues/42220285 +https://crbug.com/webrtc/10249,https://issues.webrtc.org/issues/42220286 +https://crbug.com/webrtc/1025,https://issues.webrtc.org/issues/42220287 +https://crbug.com/webrtc/10250,https://issues.webrtc.org/issues/42220288 +https://crbug.com/webrtc/10251,https://issues.webrtc.org/issues/42220289 +https://crbug.com/webrtc/10252,https://issues.webrtc.org/issues/42220290 +https://crbug.com/webrtc/10253,https://issues.webrtc.org/issues/42220291 +https://crbug.com/webrtc/10254,https://issues.webrtc.org/issues/42220292 +https://crbug.com/webrtc/10255,https://issues.webrtc.org/issues/42220293 +https://crbug.com/webrtc/10256,https://issues.webrtc.org/issues/42220294 +https://crbug.com/webrtc/10257,https://issues.webrtc.org/issues/42220295 +https://crbug.com/webrtc/10259,https://issues.webrtc.org/issues/42220296 +https://crbug.com/webrtc/1026,https://issues.webrtc.org/issues/42220297 +https://crbug.com/webrtc/10260,https://issues.webrtc.org/issues/42220298 +https://crbug.com/webrtc/10262,https://issues.webrtc.org/issues/42220299 +https://crbug.com/webrtc/10263,https://issues.webrtc.org/issues/42220300 +https://crbug.com/webrtc/10264,https://issues.webrtc.org/issues/42220301 +https://crbug.com/webrtc/10265,https://issues.webrtc.org/issues/42220302 +https://crbug.com/webrtc/10266,https://issues.webrtc.org/issues/42220303 +https://crbug.com/webrtc/10267,https://issues.webrtc.org/issues/42220304 +https://crbug.com/webrtc/10268,https://issues.webrtc.org/issues/42220305 +https://crbug.com/webrtc/10269,https://issues.webrtc.org/issues/42220306 +https://crbug.com/webrtc/1027,https://issues.webrtc.org/issues/42220307 +https://crbug.com/webrtc/10270,https://issues.webrtc.org/issues/42220308 +https://crbug.com/webrtc/10271,https://issues.webrtc.org/issues/42220309 +https://crbug.com/webrtc/10272,https://issues.webrtc.org/issues/42220310 +https://crbug.com/webrtc/10273,https://issues.webrtc.org/issues/42220311 +https://crbug.com/webrtc/10274,https://issues.webrtc.org/issues/42220312 +https://crbug.com/webrtc/10275,https://issues.webrtc.org/issues/42220313 +https://crbug.com/webrtc/10276,https://issues.webrtc.org/issues/42220314 +https://crbug.com/webrtc/10278,https://issues.webrtc.org/issues/42220315 +https://crbug.com/webrtc/10279,https://issues.webrtc.org/issues/42220316 +https://crbug.com/webrtc/1028,https://issues.webrtc.org/issues/42220317 +https://crbug.com/webrtc/10280,https://issues.webrtc.org/issues/42220318 +https://crbug.com/webrtc/10281,https://issues.webrtc.org/issues/42220319 +https://crbug.com/webrtc/10282,https://issues.webrtc.org/issues/42220320 +https://crbug.com/webrtc/10283,https://issues.webrtc.org/issues/42220321 +https://crbug.com/webrtc/10284,https://issues.webrtc.org/issues/42220322 +https://crbug.com/webrtc/10285,https://issues.webrtc.org/issues/42220323 +https://crbug.com/webrtc/10286,https://issues.webrtc.org/issues/42220324 +https://crbug.com/webrtc/10287,https://issues.webrtc.org/issues/42220325 +https://crbug.com/webrtc/10288,https://issues.webrtc.org/issues/42220326 +https://crbug.com/webrtc/10289,https://issues.webrtc.org/issues/42220327 +https://crbug.com/webrtc/1029,https://issues.webrtc.org/issues/42220328 +https://crbug.com/webrtc/10290,https://issues.webrtc.org/issues/42220329 +https://crbug.com/webrtc/10291,https://issues.webrtc.org/issues/42220330 +https://crbug.com/webrtc/10292,https://issues.webrtc.org/issues/42220331 +https://crbug.com/webrtc/10293,https://issues.webrtc.org/issues/42220332 +https://crbug.com/webrtc/10295,https://issues.webrtc.org/issues/42220333 +https://crbug.com/webrtc/10296,https://issues.webrtc.org/issues/42220334 +https://crbug.com/webrtc/10297,https://issues.webrtc.org/issues/42220335 +https://crbug.com/webrtc/10298,https://issues.webrtc.org/issues/42220336 +https://crbug.com/webrtc/10299,https://issues.webrtc.org/issues/42220337 +https://crbug.com/webrtc/103,https://issues.webrtc.org/issues/42220338 +https://crbug.com/webrtc/1030,https://issues.webrtc.org/issues/42220339 +https://crbug.com/webrtc/10300,https://issues.webrtc.org/issues/42220340 +https://crbug.com/webrtc/10301,https://issues.webrtc.org/issues/42220341 +https://crbug.com/webrtc/10302,https://issues.webrtc.org/issues/42220342 +https://crbug.com/webrtc/10303,https://issues.webrtc.org/issues/42220343 +https://crbug.com/webrtc/10304,https://issues.webrtc.org/issues/42220344 +https://crbug.com/webrtc/10305,https://issues.webrtc.org/issues/42220345 +https://crbug.com/webrtc/10306,https://issues.webrtc.org/issues/42220346 +https://crbug.com/webrtc/10307,https://issues.webrtc.org/issues/42220347 +https://crbug.com/webrtc/10308,https://issues.webrtc.org/issues/42220348 +https://crbug.com/webrtc/10309,https://issues.webrtc.org/issues/42220349 +https://crbug.com/webrtc/1031,https://issues.webrtc.org/issues/42220350 +https://crbug.com/webrtc/10310,https://issues.webrtc.org/issues/42220351 +https://crbug.com/webrtc/10311,https://issues.webrtc.org/issues/42220352 +https://crbug.com/webrtc/10312,https://issues.webrtc.org/issues/42220353 +https://crbug.com/webrtc/10313,https://issues.webrtc.org/issues/42220354 +https://crbug.com/webrtc/10314,https://issues.webrtc.org/issues/42220355 +https://crbug.com/webrtc/10315,https://issues.webrtc.org/issues/42220356 +https://crbug.com/webrtc/10316,https://issues.webrtc.org/issues/42220357 +https://crbug.com/webrtc/10317,https://issues.webrtc.org/issues/42220358 +https://crbug.com/webrtc/10318,https://issues.webrtc.org/issues/42220359 +https://crbug.com/webrtc/10319,https://issues.webrtc.org/issues/42220360 +https://crbug.com/webrtc/1032,https://issues.webrtc.org/issues/42220361 +https://crbug.com/webrtc/10320,https://issues.webrtc.org/issues/42220362 +https://crbug.com/webrtc/10321,https://issues.webrtc.org/issues/42220363 +https://crbug.com/webrtc/10322,https://issues.webrtc.org/issues/42220364 +https://crbug.com/webrtc/10323,https://issues.webrtc.org/issues/42220365 +https://crbug.com/webrtc/10324,https://issues.webrtc.org/issues/42220366 +https://crbug.com/webrtc/10325,https://issues.webrtc.org/issues/42220367 +https://crbug.com/webrtc/10326,https://issues.webrtc.org/issues/42220368 +https://crbug.com/webrtc/10327,https://issues.webrtc.org/issues/42220369 +https://crbug.com/webrtc/10328,https://issues.webrtc.org/issues/42220370 +https://crbug.com/webrtc/10329,https://issues.webrtc.org/issues/42220371 +https://crbug.com/webrtc/1033,https://issues.webrtc.org/issues/42220372 +https://crbug.com/webrtc/10330,https://issues.webrtc.org/issues/42220373 +https://crbug.com/webrtc/10331,https://issues.webrtc.org/issues/42220374 +https://crbug.com/webrtc/10332,https://issues.webrtc.org/issues/42220375 +https://crbug.com/webrtc/10333,https://issues.webrtc.org/issues/42220376 +https://crbug.com/webrtc/10334,https://issues.webrtc.org/issues/42220377 +https://crbug.com/webrtc/10335,https://issues.webrtc.org/issues/42220378 +https://crbug.com/webrtc/10336,https://issues.webrtc.org/issues/42220379 +https://crbug.com/webrtc/10337,https://issues.webrtc.org/issues/42220380 +https://crbug.com/webrtc/10338,https://issues.webrtc.org/issues/42220381 +https://crbug.com/webrtc/10339,https://issues.webrtc.org/issues/42220382 +https://crbug.com/webrtc/1034,https://issues.webrtc.org/issues/42220383 +https://crbug.com/webrtc/10340,https://issues.webrtc.org/issues/42220384 +https://crbug.com/webrtc/10341,https://issues.webrtc.org/issues/42220385 +https://crbug.com/webrtc/10342,https://issues.webrtc.org/issues/42220386 +https://crbug.com/webrtc/10343,https://issues.webrtc.org/issues/42220387 +https://crbug.com/webrtc/10344,https://issues.webrtc.org/issues/42220388 +https://crbug.com/webrtc/10345,https://issues.webrtc.org/issues/42220389 +https://crbug.com/webrtc/10346,https://issues.webrtc.org/issues/42220390 +https://crbug.com/webrtc/10347,https://issues.webrtc.org/issues/42220391 +https://crbug.com/webrtc/10348,https://issues.webrtc.org/issues/42220392 +https://crbug.com/webrtc/10349,https://issues.webrtc.org/issues/42220393 +https://crbug.com/webrtc/1035,https://issues.webrtc.org/issues/42220394 +https://crbug.com/webrtc/10350,https://issues.webrtc.org/issues/42220395 +https://crbug.com/webrtc/10351,https://issues.webrtc.org/issues/42220396 +https://crbug.com/webrtc/10353,https://issues.webrtc.org/issues/42220397 +https://crbug.com/webrtc/10354,https://issues.webrtc.org/issues/42220398 +https://crbug.com/webrtc/10355,https://issues.webrtc.org/issues/42220399 +https://crbug.com/webrtc/10357,https://issues.webrtc.org/issues/42220400 +https://crbug.com/webrtc/10359,https://issues.webrtc.org/issues/42220401 +https://crbug.com/webrtc/1036,https://issues.webrtc.org/issues/42220402 +https://crbug.com/webrtc/10360,https://issues.webrtc.org/issues/42220403 +https://crbug.com/webrtc/10361,https://issues.webrtc.org/issues/42220404 +https://crbug.com/webrtc/10362,https://issues.webrtc.org/issues/42220405 +https://crbug.com/webrtc/10363,https://issues.webrtc.org/issues/42220406 +https://crbug.com/webrtc/10364,https://issues.webrtc.org/issues/42220407 +https://crbug.com/webrtc/10365,https://issues.webrtc.org/issues/42220408 +https://crbug.com/webrtc/10366,https://issues.webrtc.org/issues/42220409 +https://crbug.com/webrtc/10367,https://issues.webrtc.org/issues/42220410 +https://crbug.com/webrtc/10368,https://issues.webrtc.org/issues/42220411 +https://crbug.com/webrtc/10369,https://issues.webrtc.org/issues/42220412 +https://crbug.com/webrtc/1037,https://issues.webrtc.org/issues/42220413 +https://crbug.com/webrtc/10370,https://issues.webrtc.org/issues/42220414 +https://crbug.com/webrtc/10371,https://issues.webrtc.org/issues/42220415 +https://crbug.com/webrtc/10372,https://issues.webrtc.org/issues/42220416 +https://crbug.com/webrtc/10373,https://issues.webrtc.org/issues/42220417 +https://crbug.com/webrtc/10374,https://issues.webrtc.org/issues/42220418 +https://crbug.com/webrtc/10375,https://issues.webrtc.org/issues/42220419 +https://crbug.com/webrtc/10376,https://issues.webrtc.org/issues/42220420 +https://crbug.com/webrtc/10377,https://issues.webrtc.org/issues/42220421 +https://crbug.com/webrtc/10378,https://issues.webrtc.org/issues/42220422 +https://crbug.com/webrtc/10379,https://issues.webrtc.org/issues/42220423 +https://crbug.com/webrtc/1038,https://issues.webrtc.org/issues/42220424 +https://crbug.com/webrtc/10380,https://issues.webrtc.org/issues/42220425 +https://crbug.com/webrtc/10381,https://issues.webrtc.org/issues/42220426 +https://crbug.com/webrtc/10382,https://issues.webrtc.org/issues/42220427 +https://crbug.com/webrtc/10383,https://issues.webrtc.org/issues/42220428 +https://crbug.com/webrtc/10384,https://issues.webrtc.org/issues/42220429 +https://crbug.com/webrtc/10385,https://issues.webrtc.org/issues/42220430 +https://crbug.com/webrtc/10386,https://issues.webrtc.org/issues/42220431 +https://crbug.com/webrtc/10387,https://issues.webrtc.org/issues/42220432 +https://crbug.com/webrtc/10388,https://issues.webrtc.org/issues/42220433 +https://crbug.com/webrtc/10389,https://issues.webrtc.org/issues/42220434 +https://crbug.com/webrtc/1039,https://issues.webrtc.org/issues/42220435 +https://crbug.com/webrtc/10390,https://issues.webrtc.org/issues/42220436 +https://crbug.com/webrtc/10391,https://issues.webrtc.org/issues/42220437 +https://crbug.com/webrtc/10392,https://issues.webrtc.org/issues/42220438 +https://crbug.com/webrtc/10393,https://issues.webrtc.org/issues/42220439 +https://crbug.com/webrtc/10394,https://issues.webrtc.org/issues/42220440 +https://crbug.com/webrtc/10395,https://issues.webrtc.org/issues/42220441 +https://crbug.com/webrtc/10396,https://issues.webrtc.org/issues/42220442 +https://crbug.com/webrtc/10397,https://issues.webrtc.org/issues/42220443 +https://crbug.com/webrtc/10398,https://issues.webrtc.org/issues/42220444 +https://crbug.com/webrtc/104,https://issues.webrtc.org/issues/42220445 +https://crbug.com/webrtc/1040,https://issues.webrtc.org/issues/42220446 +https://crbug.com/webrtc/10400,https://issues.webrtc.org/issues/42220447 +https://crbug.com/webrtc/10402,https://issues.webrtc.org/issues/42220448 +https://crbug.com/webrtc/10403,https://issues.webrtc.org/issues/42220449 +https://crbug.com/webrtc/10405,https://issues.webrtc.org/issues/42220450 +https://crbug.com/webrtc/10406,https://issues.webrtc.org/issues/42220451 +https://crbug.com/webrtc/10407,https://issues.webrtc.org/issues/42220452 +https://crbug.com/webrtc/10408,https://issues.webrtc.org/issues/42220453 +https://crbug.com/webrtc/10409,https://issues.webrtc.org/issues/42220454 +https://crbug.com/webrtc/1041,https://issues.webrtc.org/issues/42220455 +https://crbug.com/webrtc/10410,https://issues.webrtc.org/issues/42220456 +https://crbug.com/webrtc/10411,https://issues.webrtc.org/issues/42220457 +https://crbug.com/webrtc/10412,https://issues.webrtc.org/issues/42220458 +https://crbug.com/webrtc/10413,https://issues.webrtc.org/issues/42220459 +https://crbug.com/webrtc/10414,https://issues.webrtc.org/issues/42220460 +https://crbug.com/webrtc/10415,https://issues.webrtc.org/issues/42220461 +https://crbug.com/webrtc/10416,https://issues.webrtc.org/issues/42220462 +https://crbug.com/webrtc/10417,https://issues.webrtc.org/issues/42220463 +https://crbug.com/webrtc/10418,https://issues.webrtc.org/issues/42220464 +https://crbug.com/webrtc/1042,https://issues.webrtc.org/issues/42220465 +https://crbug.com/webrtc/10420,https://issues.webrtc.org/issues/42220466 +https://crbug.com/webrtc/10424,https://issues.webrtc.org/issues/42220467 +https://crbug.com/webrtc/10425,https://issues.webrtc.org/issues/42220468 +https://crbug.com/webrtc/10426,https://issues.webrtc.org/issues/42220469 +https://crbug.com/webrtc/10427,https://issues.webrtc.org/issues/42220470 +https://crbug.com/webrtc/10429,https://issues.webrtc.org/issues/42220471 +https://crbug.com/webrtc/1043,https://issues.webrtc.org/issues/42220472 +https://crbug.com/webrtc/10430,https://issues.webrtc.org/issues/42220473 +https://crbug.com/webrtc/10431,https://issues.webrtc.org/issues/42220474 +https://crbug.com/webrtc/10432,https://issues.webrtc.org/issues/42220475 +https://crbug.com/webrtc/10433,https://issues.webrtc.org/issues/42220476 +https://crbug.com/webrtc/10434,https://issues.webrtc.org/issues/42220477 +https://crbug.com/webrtc/10435,https://issues.webrtc.org/issues/42220478 +https://crbug.com/webrtc/10436,https://issues.webrtc.org/issues/42220479 +https://crbug.com/webrtc/10437,https://issues.webrtc.org/issues/42220480 +https://crbug.com/webrtc/10438,https://issues.webrtc.org/issues/42220481 +https://crbug.com/webrtc/10439,https://issues.webrtc.org/issues/42220482 +https://crbug.com/webrtc/1044,https://issues.webrtc.org/issues/42220483 +https://crbug.com/webrtc/10440,https://issues.webrtc.org/issues/42220484 +https://crbug.com/webrtc/10441,https://issues.webrtc.org/issues/42220485 +https://crbug.com/webrtc/10442,https://issues.webrtc.org/issues/42220486 +https://crbug.com/webrtc/10443,https://issues.webrtc.org/issues/42220487 +https://crbug.com/webrtc/10444,https://issues.webrtc.org/issues/42220488 +https://crbug.com/webrtc/10445,https://issues.webrtc.org/issues/42220489 +https://crbug.com/webrtc/10446,https://issues.webrtc.org/issues/42220490 +https://crbug.com/webrtc/10447,https://issues.webrtc.org/issues/42220491 +https://crbug.com/webrtc/10448,https://issues.webrtc.org/issues/42220492 +https://crbug.com/webrtc/10449,https://issues.webrtc.org/issues/42220493 +https://crbug.com/webrtc/1045,https://issues.webrtc.org/issues/42220494 +https://crbug.com/webrtc/10450,https://issues.webrtc.org/issues/42220495 +https://crbug.com/webrtc/10451,https://issues.webrtc.org/issues/42220496 +https://crbug.com/webrtc/10452,https://issues.webrtc.org/issues/42220497 +https://crbug.com/webrtc/10453,https://issues.webrtc.org/issues/42220498 +https://crbug.com/webrtc/10454,https://issues.webrtc.org/issues/42220499 +https://crbug.com/webrtc/10455,https://issues.webrtc.org/issues/42220500 +https://crbug.com/webrtc/10456,https://issues.webrtc.org/issues/42220501 +https://crbug.com/webrtc/10457,https://issues.webrtc.org/issues/42220502 +https://crbug.com/webrtc/10458,https://issues.webrtc.org/issues/42220503 +https://crbug.com/webrtc/1046,https://issues.webrtc.org/issues/42220504 +https://crbug.com/webrtc/10460,https://issues.webrtc.org/issues/42220505 +https://crbug.com/webrtc/10461,https://issues.webrtc.org/issues/42220506 +https://crbug.com/webrtc/10462,https://issues.webrtc.org/issues/42220507 +https://crbug.com/webrtc/10463,https://issues.webrtc.org/issues/42220508 +https://crbug.com/webrtc/10464,https://issues.webrtc.org/issues/42220509 +https://crbug.com/webrtc/10465,https://issues.webrtc.org/issues/42220510 +https://crbug.com/webrtc/10466,https://issues.webrtc.org/issues/42220511 +https://crbug.com/webrtc/10469,https://issues.webrtc.org/issues/42220512 +https://crbug.com/webrtc/1047,https://issues.webrtc.org/issues/42220513 +https://crbug.com/webrtc/10470,https://issues.webrtc.org/issues/42220514 +https://crbug.com/webrtc/10471,https://issues.webrtc.org/issues/42220515 +https://crbug.com/webrtc/10472,https://issues.webrtc.org/issues/42220516 +https://crbug.com/webrtc/10473,https://issues.webrtc.org/issues/42220517 +https://crbug.com/webrtc/10474,https://issues.webrtc.org/issues/42220518 +https://crbug.com/webrtc/10475,https://issues.webrtc.org/issues/42220519 +https://crbug.com/webrtc/10476,https://issues.webrtc.org/issues/42220520 +https://crbug.com/webrtc/10477,https://issues.webrtc.org/issues/42220521 +https://crbug.com/webrtc/10478,https://issues.webrtc.org/issues/42220522 +https://crbug.com/webrtc/10479,https://issues.webrtc.org/issues/42220523 +https://crbug.com/webrtc/1048,https://issues.webrtc.org/issues/42220524 +https://crbug.com/webrtc/10480,https://issues.webrtc.org/issues/42220525 +https://crbug.com/webrtc/10481,https://issues.webrtc.org/issues/42220526 +https://crbug.com/webrtc/10482,https://issues.webrtc.org/issues/42220527 +https://crbug.com/webrtc/10483,https://issues.webrtc.org/issues/42220528 +https://crbug.com/webrtc/10484,https://issues.webrtc.org/issues/42220529 +https://crbug.com/webrtc/10485,https://issues.webrtc.org/issues/42220530 +https://crbug.com/webrtc/10487,https://issues.webrtc.org/issues/42220531 +https://crbug.com/webrtc/10488,https://issues.webrtc.org/issues/42220532 +https://crbug.com/webrtc/10489,https://issues.webrtc.org/issues/42220533 +https://crbug.com/webrtc/1049,https://issues.webrtc.org/issues/42220534 +https://crbug.com/webrtc/10490,https://issues.webrtc.org/issues/42220535 +https://crbug.com/webrtc/10491,https://issues.webrtc.org/issues/42220536 +https://crbug.com/webrtc/10492,https://issues.webrtc.org/issues/42220537 +https://crbug.com/webrtc/10493,https://issues.webrtc.org/issues/42220538 +https://crbug.com/webrtc/10494,https://issues.webrtc.org/issues/42220539 +https://crbug.com/webrtc/10495,https://issues.webrtc.org/issues/42220540 +https://crbug.com/webrtc/10496,https://issues.webrtc.org/issues/42220541 +https://crbug.com/webrtc/10497,https://issues.webrtc.org/issues/42220542 +https://crbug.com/webrtc/10498,https://issues.webrtc.org/issues/42220543 +https://crbug.com/webrtc/10499,https://issues.webrtc.org/issues/42220544 +https://crbug.com/webrtc/105,https://issues.webrtc.org/issues/42220545 +https://crbug.com/webrtc/1050,https://issues.webrtc.org/issues/42220546 +https://crbug.com/webrtc/10500,https://issues.webrtc.org/issues/42220547 +https://crbug.com/webrtc/10501,https://issues.webrtc.org/issues/42220548 +https://crbug.com/webrtc/10502,https://issues.webrtc.org/issues/42220549 +https://crbug.com/webrtc/10503,https://issues.webrtc.org/issues/42220550 +https://crbug.com/webrtc/10504,https://issues.webrtc.org/issues/42220551 +https://crbug.com/webrtc/10505,https://issues.webrtc.org/issues/42220552 +https://crbug.com/webrtc/10506,https://issues.webrtc.org/issues/42220553 +https://crbug.com/webrtc/10508,https://issues.webrtc.org/issues/42220554 +https://crbug.com/webrtc/10509,https://issues.webrtc.org/issues/42220555 +https://crbug.com/webrtc/1051,https://issues.webrtc.org/issues/42220556 +https://crbug.com/webrtc/10510,https://issues.webrtc.org/issues/42220557 +https://crbug.com/webrtc/10511,https://issues.webrtc.org/issues/42220558 +https://crbug.com/webrtc/10512,https://issues.webrtc.org/issues/42220559 +https://crbug.com/webrtc/10513,https://issues.webrtc.org/issues/42220560 +https://crbug.com/webrtc/10514,https://issues.webrtc.org/issues/42220561 +https://crbug.com/webrtc/10515,https://issues.webrtc.org/issues/42220562 +https://crbug.com/webrtc/10516,https://issues.webrtc.org/issues/42220563 +https://crbug.com/webrtc/10517,https://issues.webrtc.org/issues/42220564 +https://crbug.com/webrtc/10518,https://issues.webrtc.org/issues/42220565 +https://crbug.com/webrtc/10519,https://issues.webrtc.org/issues/42220566 +https://crbug.com/webrtc/1052,https://issues.webrtc.org/issues/42220567 +https://crbug.com/webrtc/10520,https://issues.webrtc.org/issues/42220568 +https://crbug.com/webrtc/10521,https://issues.webrtc.org/issues/42220569 +https://crbug.com/webrtc/10522,https://issues.webrtc.org/issues/42220570 +https://crbug.com/webrtc/10523,https://issues.webrtc.org/issues/42220571 +https://crbug.com/webrtc/10524,https://issues.webrtc.org/issues/42220572 +https://crbug.com/webrtc/10525,https://issues.webrtc.org/issues/42220573 +https://crbug.com/webrtc/10526,https://issues.webrtc.org/issues/42220574 +https://crbug.com/webrtc/10527,https://issues.webrtc.org/issues/42220575 +https://crbug.com/webrtc/10528,https://issues.webrtc.org/issues/42220576 +https://crbug.com/webrtc/10529,https://issues.webrtc.org/issues/42220577 +https://crbug.com/webrtc/10530,https://issues.webrtc.org/issues/42220578 +https://crbug.com/webrtc/10531,https://issues.webrtc.org/issues/42220579 +https://crbug.com/webrtc/10532,https://issues.webrtc.org/issues/42220580 +https://crbug.com/webrtc/10533,https://issues.webrtc.org/issues/42220581 +https://crbug.com/webrtc/10534,https://issues.webrtc.org/issues/42220582 +https://crbug.com/webrtc/10535,https://issues.webrtc.org/issues/42220583 +https://crbug.com/webrtc/10536,https://issues.webrtc.org/issues/42220584 +https://crbug.com/webrtc/10537,https://issues.webrtc.org/issues/42220585 +https://crbug.com/webrtc/10538,https://issues.webrtc.org/issues/42220586 +https://crbug.com/webrtc/10539,https://issues.webrtc.org/issues/42220587 +https://crbug.com/webrtc/1054,https://issues.webrtc.org/issues/42220588 +https://crbug.com/webrtc/10541,https://issues.webrtc.org/issues/42220589 +https://crbug.com/webrtc/10542,https://issues.webrtc.org/issues/42220590 +https://crbug.com/webrtc/10543,https://issues.webrtc.org/issues/42220591 +https://crbug.com/webrtc/10544,https://issues.webrtc.org/issues/42220592 +https://crbug.com/webrtc/10545,https://issues.webrtc.org/issues/42220593 +https://crbug.com/webrtc/10546,https://issues.webrtc.org/issues/42220594 +https://crbug.com/webrtc/10547,https://issues.webrtc.org/issues/42220595 +https://crbug.com/webrtc/10548,https://issues.webrtc.org/issues/42220596 +https://crbug.com/webrtc/10549,https://issues.webrtc.org/issues/42220597 +https://crbug.com/webrtc/1055,https://issues.webrtc.org/issues/42220598 +https://crbug.com/webrtc/10550,https://issues.webrtc.org/issues/42220599 +https://crbug.com/webrtc/10551,https://issues.webrtc.org/issues/42220600 +https://crbug.com/webrtc/10552,https://issues.webrtc.org/issues/42220601 +https://crbug.com/webrtc/10553,https://issues.webrtc.org/issues/42220602 +https://crbug.com/webrtc/10554,https://issues.webrtc.org/issues/42220603 +https://crbug.com/webrtc/10555,https://issues.webrtc.org/issues/42220604 +https://crbug.com/webrtc/10556,https://issues.webrtc.org/issues/42220605 +https://crbug.com/webrtc/10557,https://issues.webrtc.org/issues/42220606 +https://crbug.com/webrtc/10558,https://issues.webrtc.org/issues/42220607 +https://crbug.com/webrtc/10559,https://issues.webrtc.org/issues/42220608 +https://crbug.com/webrtc/1056,https://issues.webrtc.org/issues/42220609 +https://crbug.com/webrtc/10560,https://issues.webrtc.org/issues/42220610 +https://crbug.com/webrtc/10561,https://issues.webrtc.org/issues/42220611 +https://crbug.com/webrtc/10562,https://issues.webrtc.org/issues/42220612 +https://crbug.com/webrtc/10563,https://issues.webrtc.org/issues/42220613 +https://crbug.com/webrtc/10564,https://issues.webrtc.org/issues/42220614 +https://crbug.com/webrtc/10565,https://issues.webrtc.org/issues/42220615 +https://crbug.com/webrtc/10566,https://issues.webrtc.org/issues/42220616 +https://crbug.com/webrtc/10567,https://issues.webrtc.org/issues/42220617 +https://crbug.com/webrtc/10568,https://issues.webrtc.org/issues/42220618 +https://crbug.com/webrtc/10569,https://issues.webrtc.org/issues/42220619 +https://crbug.com/webrtc/1057,https://issues.webrtc.org/issues/42220620 +https://crbug.com/webrtc/10570,https://issues.webrtc.org/issues/42220621 +https://crbug.com/webrtc/10571,https://issues.webrtc.org/issues/42220622 +https://crbug.com/webrtc/10572,https://issues.webrtc.org/issues/42220623 +https://crbug.com/webrtc/10573,https://issues.webrtc.org/issues/42220624 +https://crbug.com/webrtc/10574,https://issues.webrtc.org/issues/42220625 +https://crbug.com/webrtc/10575,https://issues.webrtc.org/issues/42220626 +https://crbug.com/webrtc/10576,https://issues.webrtc.org/issues/42220627 +https://crbug.com/webrtc/10577,https://issues.webrtc.org/issues/42220628 +https://crbug.com/webrtc/10578,https://issues.webrtc.org/issues/42220629 +https://crbug.com/webrtc/10579,https://issues.webrtc.org/issues/42220630 +https://crbug.com/webrtc/1058,https://issues.webrtc.org/issues/42220631 +https://crbug.com/webrtc/10580,https://issues.webrtc.org/issues/42220632 +https://crbug.com/webrtc/10581,https://issues.webrtc.org/issues/42220633 +https://crbug.com/webrtc/10582,https://issues.webrtc.org/issues/42220634 +https://crbug.com/webrtc/10583,https://issues.webrtc.org/issues/42220635 +https://crbug.com/webrtc/10584,https://issues.webrtc.org/issues/42220636 +https://crbug.com/webrtc/10585,https://issues.webrtc.org/issues/42220637 +https://crbug.com/webrtc/10586,https://issues.webrtc.org/issues/42220638 +https://crbug.com/webrtc/10587,https://issues.webrtc.org/issues/42220639 +https://crbug.com/webrtc/10588,https://issues.webrtc.org/issues/42220640 +https://crbug.com/webrtc/10589,https://issues.webrtc.org/issues/42220641 +https://crbug.com/webrtc/1059,https://issues.webrtc.org/issues/42220642 +https://crbug.com/webrtc/10590,https://issues.webrtc.org/issues/42220643 +https://crbug.com/webrtc/10591,https://issues.webrtc.org/issues/42220644 +https://crbug.com/webrtc/10592,https://issues.webrtc.org/issues/42220645 +https://crbug.com/webrtc/10593,https://issues.webrtc.org/issues/42220646 +https://crbug.com/webrtc/10594,https://issues.webrtc.org/issues/42220647 +https://crbug.com/webrtc/10595,https://issues.webrtc.org/issues/42220648 +https://crbug.com/webrtc/10596,https://issues.webrtc.org/issues/42220649 +https://crbug.com/webrtc/10597,https://issues.webrtc.org/issues/42220650 +https://crbug.com/webrtc/10598,https://issues.webrtc.org/issues/42220651 +https://crbug.com/webrtc/10599,https://issues.webrtc.org/issues/42220652 +https://crbug.com/webrtc/106,https://issues.webrtc.org/issues/42220653 +https://crbug.com/webrtc/1060,https://issues.webrtc.org/issues/42220654 +https://crbug.com/webrtc/10600,https://issues.webrtc.org/issues/42220655 +https://crbug.com/webrtc/10601,https://issues.webrtc.org/issues/42220656 +https://crbug.com/webrtc/10602,https://issues.webrtc.org/issues/42220657 +https://crbug.com/webrtc/10603,https://issues.webrtc.org/issues/42220658 +https://crbug.com/webrtc/10604,https://issues.webrtc.org/issues/42220659 +https://crbug.com/webrtc/10605,https://issues.webrtc.org/issues/42220660 +https://crbug.com/webrtc/10606,https://issues.webrtc.org/issues/42220661 +https://crbug.com/webrtc/10607,https://issues.webrtc.org/issues/42220662 +https://crbug.com/webrtc/10608,https://issues.webrtc.org/issues/42220663 +https://crbug.com/webrtc/10609,https://issues.webrtc.org/issues/42220664 +https://crbug.com/webrtc/1061,https://issues.webrtc.org/issues/42220665 +https://crbug.com/webrtc/10610,https://issues.webrtc.org/issues/42220666 +https://crbug.com/webrtc/10611,https://issues.webrtc.org/issues/42220667 +https://crbug.com/webrtc/10612,https://issues.webrtc.org/issues/42220668 +https://crbug.com/webrtc/10613,https://issues.webrtc.org/issues/42220669 +https://crbug.com/webrtc/10614,https://issues.webrtc.org/issues/42220670 +https://crbug.com/webrtc/10615,https://issues.webrtc.org/issues/42220671 +https://crbug.com/webrtc/10616,https://issues.webrtc.org/issues/42220672 +https://crbug.com/webrtc/10617,https://issues.webrtc.org/issues/42220673 +https://crbug.com/webrtc/10618,https://issues.webrtc.org/issues/42220674 +https://crbug.com/webrtc/10619,https://issues.webrtc.org/issues/42220675 +https://crbug.com/webrtc/1062,https://issues.webrtc.org/issues/42220676 +https://crbug.com/webrtc/10620,https://issues.webrtc.org/issues/42220677 +https://crbug.com/webrtc/10621,https://issues.webrtc.org/issues/42220678 +https://crbug.com/webrtc/10622,https://issues.webrtc.org/issues/42220679 +https://crbug.com/webrtc/10623,https://issues.webrtc.org/issues/42220680 +https://crbug.com/webrtc/10624,https://issues.webrtc.org/issues/42220681 +https://crbug.com/webrtc/10625,https://issues.webrtc.org/issues/42220682 +https://crbug.com/webrtc/10626,https://issues.webrtc.org/issues/42220683 +https://crbug.com/webrtc/10627,https://issues.webrtc.org/issues/42220684 +https://crbug.com/webrtc/10628,https://issues.webrtc.org/issues/42220685 +https://crbug.com/webrtc/10629,https://issues.webrtc.org/issues/42220686 +https://crbug.com/webrtc/1063,https://issues.webrtc.org/issues/42220687 +https://crbug.com/webrtc/10630,https://issues.webrtc.org/issues/42220688 +https://crbug.com/webrtc/10631,https://issues.webrtc.org/issues/42220689 +https://crbug.com/webrtc/10632,https://issues.webrtc.org/issues/42220690 +https://crbug.com/webrtc/10633,https://issues.webrtc.org/issues/42220691 +https://crbug.com/webrtc/10634,https://issues.webrtc.org/issues/42220692 +https://crbug.com/webrtc/10635,https://issues.webrtc.org/issues/42220693 +https://crbug.com/webrtc/10637,https://issues.webrtc.org/issues/42220694 +https://crbug.com/webrtc/10638,https://issues.webrtc.org/issues/42220695 +https://crbug.com/webrtc/10639,https://issues.webrtc.org/issues/42220696 +https://crbug.com/webrtc/1064,https://issues.webrtc.org/issues/42220697 +https://crbug.com/webrtc/10640,https://issues.webrtc.org/issues/42220698 +https://crbug.com/webrtc/10641,https://issues.webrtc.org/issues/42220699 +https://crbug.com/webrtc/10642,https://issues.webrtc.org/issues/42220700 +https://crbug.com/webrtc/10643,https://issues.webrtc.org/issues/42220701 +https://crbug.com/webrtc/10644,https://issues.webrtc.org/issues/42220702 +https://crbug.com/webrtc/10645,https://issues.webrtc.org/issues/42220703 +https://crbug.com/webrtc/10646,https://issues.webrtc.org/issues/42220704 +https://crbug.com/webrtc/10647,https://issues.webrtc.org/issues/42220705 +https://crbug.com/webrtc/10648,https://issues.webrtc.org/issues/42220706 +https://crbug.com/webrtc/10649,https://issues.webrtc.org/issues/42220707 +https://crbug.com/webrtc/1065,https://issues.webrtc.org/issues/42220708 +https://crbug.com/webrtc/10650,https://issues.webrtc.org/issues/42220709 +https://crbug.com/webrtc/10651,https://issues.webrtc.org/issues/42220710 +https://crbug.com/webrtc/10652,https://issues.webrtc.org/issues/42220711 +https://crbug.com/webrtc/10653,https://issues.webrtc.org/issues/42220712 +https://crbug.com/webrtc/10654,https://issues.webrtc.org/issues/42220713 +https://crbug.com/webrtc/10655,https://issues.webrtc.org/issues/42220714 +https://crbug.com/webrtc/10656,https://issues.webrtc.org/issues/42220715 +https://crbug.com/webrtc/10657,https://issues.webrtc.org/issues/42220716 +https://crbug.com/webrtc/10658,https://issues.webrtc.org/issues/42220717 +https://crbug.com/webrtc/10659,https://issues.webrtc.org/issues/42220718 +https://crbug.com/webrtc/1066,https://issues.webrtc.org/issues/42220719 +https://crbug.com/webrtc/10660,https://issues.webrtc.org/issues/42220720 +https://crbug.com/webrtc/10661,https://issues.webrtc.org/issues/42220721 +https://crbug.com/webrtc/10662,https://issues.webrtc.org/issues/42220722 +https://crbug.com/webrtc/10663,https://issues.webrtc.org/issues/42220723 +https://crbug.com/webrtc/10664,https://issues.webrtc.org/issues/42220724 +https://crbug.com/webrtc/10665,https://issues.webrtc.org/issues/42220725 +https://crbug.com/webrtc/10666,https://issues.webrtc.org/issues/42220726 +https://crbug.com/webrtc/10667,https://issues.webrtc.org/issues/42220727 +https://crbug.com/webrtc/10668,https://issues.webrtc.org/issues/42220728 +https://crbug.com/webrtc/10669,https://issues.webrtc.org/issues/42220729 +https://crbug.com/webrtc/1067,https://issues.webrtc.org/issues/42220730 +https://crbug.com/webrtc/10670,https://issues.webrtc.org/issues/42220731 +https://crbug.com/webrtc/10671,https://issues.webrtc.org/issues/42220732 +https://crbug.com/webrtc/10672,https://issues.webrtc.org/issues/42220733 +https://crbug.com/webrtc/10674,https://issues.webrtc.org/issues/42220734 +https://crbug.com/webrtc/10675,https://issues.webrtc.org/issues/42220735 +https://crbug.com/webrtc/10676,https://issues.webrtc.org/issues/42220736 +https://crbug.com/webrtc/10677,https://issues.webrtc.org/issues/42220737 +https://crbug.com/webrtc/10678,https://issues.webrtc.org/issues/42220738 +https://crbug.com/webrtc/10679,https://issues.webrtc.org/issues/42220739 +https://crbug.com/webrtc/1068,https://issues.webrtc.org/issues/42220740 +https://crbug.com/webrtc/10680,https://issues.webrtc.org/issues/42220741 +https://crbug.com/webrtc/10681,https://issues.webrtc.org/issues/42220742 +https://crbug.com/webrtc/10682,https://issues.webrtc.org/issues/42220743 +https://crbug.com/webrtc/10683,https://issues.webrtc.org/issues/42220744 +https://crbug.com/webrtc/10684,https://issues.webrtc.org/issues/42220745 +https://crbug.com/webrtc/10685,https://issues.webrtc.org/issues/42220746 +https://crbug.com/webrtc/10686,https://issues.webrtc.org/issues/42220747 +https://crbug.com/webrtc/10687,https://issues.webrtc.org/issues/42220748 +https://crbug.com/webrtc/10688,https://issues.webrtc.org/issues/42220749 +https://crbug.com/webrtc/10689,https://issues.webrtc.org/issues/42220750 +https://crbug.com/webrtc/1069,https://issues.webrtc.org/issues/42220751 +https://crbug.com/webrtc/10690,https://issues.webrtc.org/issues/42220752 +https://crbug.com/webrtc/10691,https://issues.webrtc.org/issues/42220753 +https://crbug.com/webrtc/10692,https://issues.webrtc.org/issues/42220754 +https://crbug.com/webrtc/10693,https://issues.webrtc.org/issues/42220755 +https://crbug.com/webrtc/10694,https://issues.webrtc.org/issues/42220756 +https://crbug.com/webrtc/10695,https://issues.webrtc.org/issues/42220757 +https://crbug.com/webrtc/10696,https://issues.webrtc.org/issues/42220758 +https://crbug.com/webrtc/10698,https://issues.webrtc.org/issues/42220759 +https://crbug.com/webrtc/10699,https://issues.webrtc.org/issues/42220760 +https://crbug.com/webrtc/107,https://issues.webrtc.org/issues/42220761 +https://crbug.com/webrtc/1070,https://issues.webrtc.org/issues/42220762 +https://crbug.com/webrtc/10700,https://issues.webrtc.org/issues/42220763 +https://crbug.com/webrtc/10701,https://issues.webrtc.org/issues/42220764 +https://crbug.com/webrtc/10702,https://issues.webrtc.org/issues/42220765 +https://crbug.com/webrtc/10703,https://issues.webrtc.org/issues/42220766 +https://crbug.com/webrtc/10704,https://issues.webrtc.org/issues/42220767 +https://crbug.com/webrtc/10705,https://issues.webrtc.org/issues/42220768 +https://crbug.com/webrtc/10706,https://issues.webrtc.org/issues/42220769 +https://crbug.com/webrtc/10707,https://issues.webrtc.org/issues/42220770 +https://crbug.com/webrtc/10708,https://issues.webrtc.org/issues/42220771 +https://crbug.com/webrtc/10709,https://issues.webrtc.org/issues/42220772 +https://crbug.com/webrtc/1071,https://issues.webrtc.org/issues/42220773 +https://crbug.com/webrtc/10710,https://issues.webrtc.org/issues/42220774 +https://crbug.com/webrtc/10711,https://issues.webrtc.org/issues/42220775 +https://crbug.com/webrtc/10712,https://issues.webrtc.org/issues/42220776 +https://crbug.com/webrtc/10713,https://issues.webrtc.org/issues/42220777 +https://crbug.com/webrtc/10714,https://issues.webrtc.org/issues/42220778 +https://crbug.com/webrtc/10715,https://issues.webrtc.org/issues/42220779 +https://crbug.com/webrtc/10716,https://issues.webrtc.org/issues/42220780 +https://crbug.com/webrtc/10717,https://issues.webrtc.org/issues/42220781 +https://crbug.com/webrtc/10718,https://issues.webrtc.org/issues/42220782 +https://crbug.com/webrtc/10719,https://issues.webrtc.org/issues/42220783 +https://crbug.com/webrtc/10720,https://issues.webrtc.org/issues/42220784 +https://crbug.com/webrtc/10721,https://issues.webrtc.org/issues/42220785 +https://crbug.com/webrtc/10722,https://issues.webrtc.org/issues/42220786 +https://crbug.com/webrtc/10723,https://issues.webrtc.org/issues/42220787 +https://crbug.com/webrtc/10724,https://issues.webrtc.org/issues/42220788 +https://crbug.com/webrtc/10725,https://issues.webrtc.org/issues/42220789 +https://crbug.com/webrtc/10726,https://issues.webrtc.org/issues/42220790 +https://crbug.com/webrtc/10727,https://issues.webrtc.org/issues/42220791 +https://crbug.com/webrtc/10728,https://issues.webrtc.org/issues/42220792 +https://crbug.com/webrtc/10729,https://issues.webrtc.org/issues/42220793 +https://crbug.com/webrtc/1073,https://issues.webrtc.org/issues/42220794 +https://crbug.com/webrtc/10730,https://issues.webrtc.org/issues/42220795 +https://crbug.com/webrtc/10731,https://issues.webrtc.org/issues/42220796 +https://crbug.com/webrtc/10732,https://issues.webrtc.org/issues/42220797 +https://crbug.com/webrtc/10733,https://issues.webrtc.org/issues/42220798 +https://crbug.com/webrtc/10734,https://issues.webrtc.org/issues/42220799 +https://crbug.com/webrtc/10735,https://issues.webrtc.org/issues/42220800 +https://crbug.com/webrtc/10736,https://issues.webrtc.org/issues/42220801 +https://crbug.com/webrtc/10737,https://issues.webrtc.org/issues/42220802 +https://crbug.com/webrtc/10738,https://issues.webrtc.org/issues/42220803 +https://crbug.com/webrtc/10739,https://issues.webrtc.org/issues/42220804 +https://crbug.com/webrtc/1074,https://issues.webrtc.org/issues/42220805 +https://crbug.com/webrtc/10740,https://issues.webrtc.org/issues/42220806 +https://crbug.com/webrtc/10741,https://issues.webrtc.org/issues/42220807 +https://crbug.com/webrtc/10742,https://issues.webrtc.org/issues/42220808 +https://crbug.com/webrtc/10743,https://issues.webrtc.org/issues/42220809 +https://crbug.com/webrtc/10744,https://issues.webrtc.org/issues/42220810 +https://crbug.com/webrtc/10745,https://issues.webrtc.org/issues/42220811 +https://crbug.com/webrtc/10746,https://issues.webrtc.org/issues/42220812 +https://crbug.com/webrtc/10747,https://issues.webrtc.org/issues/42220813 +https://crbug.com/webrtc/10748,https://issues.webrtc.org/issues/42220814 +https://crbug.com/webrtc/10749,https://issues.webrtc.org/issues/42220815 +https://crbug.com/webrtc/1075,https://issues.webrtc.org/issues/42220816 +https://crbug.com/webrtc/10750,https://issues.webrtc.org/issues/42220817 +https://crbug.com/webrtc/10751,https://issues.webrtc.org/issues/42220818 +https://crbug.com/webrtc/10752,https://issues.webrtc.org/issues/42220819 +https://crbug.com/webrtc/10754,https://issues.webrtc.org/issues/42220820 +https://crbug.com/webrtc/10755,https://issues.webrtc.org/issues/42220821 +https://crbug.com/webrtc/10756,https://issues.webrtc.org/issues/42220822 +https://crbug.com/webrtc/10757,https://issues.webrtc.org/issues/42220823 +https://crbug.com/webrtc/10758,https://issues.webrtc.org/issues/42220824 +https://crbug.com/webrtc/10759,https://issues.webrtc.org/issues/42220825 +https://crbug.com/webrtc/1076,https://issues.webrtc.org/issues/42220826 +https://crbug.com/webrtc/10760,https://issues.webrtc.org/issues/42220827 +https://crbug.com/webrtc/10761,https://issues.webrtc.org/issues/42220828 +https://crbug.com/webrtc/10762,https://issues.webrtc.org/issues/42220829 +https://crbug.com/webrtc/10763,https://issues.webrtc.org/issues/42220830 +https://crbug.com/webrtc/10764,https://issues.webrtc.org/issues/42220831 +https://crbug.com/webrtc/10765,https://issues.webrtc.org/issues/42220832 +https://crbug.com/webrtc/10766,https://issues.webrtc.org/issues/42220833 +https://crbug.com/webrtc/10767,https://issues.webrtc.org/issues/42220834 +https://crbug.com/webrtc/10768,https://issues.webrtc.org/issues/42220835 +https://crbug.com/webrtc/10769,https://issues.webrtc.org/issues/42220836 +https://crbug.com/webrtc/1077,https://issues.webrtc.org/issues/42220837 +https://crbug.com/webrtc/10770,https://issues.webrtc.org/issues/42220838 +https://crbug.com/webrtc/10771,https://issues.webrtc.org/issues/42220839 +https://crbug.com/webrtc/10772,https://issues.webrtc.org/issues/42220840 +https://crbug.com/webrtc/10773,https://issues.webrtc.org/issues/42220841 +https://crbug.com/webrtc/10774,https://issues.webrtc.org/issues/42220842 +https://crbug.com/webrtc/10775,https://issues.webrtc.org/issues/42220843 +https://crbug.com/webrtc/10776,https://issues.webrtc.org/issues/42220844 +https://crbug.com/webrtc/10777,https://issues.webrtc.org/issues/42220845 +https://crbug.com/webrtc/10778,https://issues.webrtc.org/issues/42220846 +https://crbug.com/webrtc/10779,https://issues.webrtc.org/issues/42220847 +https://crbug.com/webrtc/1078,https://issues.webrtc.org/issues/42220848 +https://crbug.com/webrtc/10780,https://issues.webrtc.org/issues/42220849 +https://crbug.com/webrtc/10782,https://issues.webrtc.org/issues/42220850 +https://crbug.com/webrtc/10783,https://issues.webrtc.org/issues/42220851 +https://crbug.com/webrtc/10784,https://issues.webrtc.org/issues/42220852 +https://crbug.com/webrtc/10785,https://issues.webrtc.org/issues/42220853 +https://crbug.com/webrtc/10786,https://issues.webrtc.org/issues/42220854 +https://crbug.com/webrtc/10787,https://issues.webrtc.org/issues/42220855 +https://crbug.com/webrtc/10788,https://issues.webrtc.org/issues/42220856 +https://crbug.com/webrtc/10789,https://issues.webrtc.org/issues/42220857 +https://crbug.com/webrtc/1079,https://issues.webrtc.org/issues/42220858 +https://crbug.com/webrtc/10790,https://issues.webrtc.org/issues/42220859 +https://crbug.com/webrtc/10791,https://issues.webrtc.org/issues/42220860 +https://crbug.com/webrtc/10792,https://issues.webrtc.org/issues/42220861 +https://crbug.com/webrtc/10793,https://issues.webrtc.org/issues/42220862 +https://crbug.com/webrtc/10794,https://issues.webrtc.org/issues/42220863 +https://crbug.com/webrtc/10795,https://issues.webrtc.org/issues/42220864 +https://crbug.com/webrtc/10796,https://issues.webrtc.org/issues/42220865 +https://crbug.com/webrtc/10797,https://issues.webrtc.org/issues/42220866 +https://crbug.com/webrtc/10798,https://issues.webrtc.org/issues/42220867 +https://crbug.com/webrtc/10799,https://issues.webrtc.org/issues/42220868 +https://crbug.com/webrtc/108,https://issues.webrtc.org/issues/42220869 +https://crbug.com/webrtc/1080,https://issues.webrtc.org/issues/42220870 +https://crbug.com/webrtc/10800,https://issues.webrtc.org/issues/42220871 +https://crbug.com/webrtc/10801,https://issues.webrtc.org/issues/42220872 +https://crbug.com/webrtc/10802,https://issues.webrtc.org/issues/42220873 +https://crbug.com/webrtc/10803,https://issues.webrtc.org/issues/42220874 +https://crbug.com/webrtc/10804,https://issues.webrtc.org/issues/42220875 +https://crbug.com/webrtc/10805,https://issues.webrtc.org/issues/42220876 +https://crbug.com/webrtc/10806,https://issues.webrtc.org/issues/42220877 +https://crbug.com/webrtc/10807,https://issues.webrtc.org/issues/42220878 +https://crbug.com/webrtc/10808,https://issues.webrtc.org/issues/42220879 +https://crbug.com/webrtc/10809,https://issues.webrtc.org/issues/42220880 +https://crbug.com/webrtc/1081,https://issues.webrtc.org/issues/42220881 +https://crbug.com/webrtc/10810,https://issues.webrtc.org/issues/42220882 +https://crbug.com/webrtc/10811,https://issues.webrtc.org/issues/42220883 +https://crbug.com/webrtc/10812,https://issues.webrtc.org/issues/42220884 +https://crbug.com/webrtc/10813,https://issues.webrtc.org/issues/42220885 +https://crbug.com/webrtc/10814,https://issues.webrtc.org/issues/42220886 +https://crbug.com/webrtc/10815,https://issues.webrtc.org/issues/42220887 +https://crbug.com/webrtc/10816,https://issues.webrtc.org/issues/42220888 +https://crbug.com/webrtc/10817,https://issues.webrtc.org/issues/42220889 +https://crbug.com/webrtc/10818,https://issues.webrtc.org/issues/42220890 +https://crbug.com/webrtc/10819,https://issues.webrtc.org/issues/42220891 +https://crbug.com/webrtc/1082,https://issues.webrtc.org/issues/42220892 +https://crbug.com/webrtc/10820,https://issues.webrtc.org/issues/42220893 +https://crbug.com/webrtc/10821,https://issues.webrtc.org/issues/42220894 +https://crbug.com/webrtc/10822,https://issues.webrtc.org/issues/42220895 +https://crbug.com/webrtc/10823,https://issues.webrtc.org/issues/42220896 +https://crbug.com/webrtc/10824,https://issues.webrtc.org/issues/42220897 +https://crbug.com/webrtc/10825,https://issues.webrtc.org/issues/42220898 +https://crbug.com/webrtc/10826,https://issues.webrtc.org/issues/42220899 +https://crbug.com/webrtc/10827,https://issues.webrtc.org/issues/42220900 +https://crbug.com/webrtc/10828,https://issues.webrtc.org/issues/42220901 +https://crbug.com/webrtc/10829,https://issues.webrtc.org/issues/42220902 +https://crbug.com/webrtc/1083,https://issues.webrtc.org/issues/42220903 +https://crbug.com/webrtc/10830,https://issues.webrtc.org/issues/42220904 +https://crbug.com/webrtc/10831,https://issues.webrtc.org/issues/42220905 +https://crbug.com/webrtc/10832,https://issues.webrtc.org/issues/42220906 +https://crbug.com/webrtc/10833,https://issues.webrtc.org/issues/42220907 +https://crbug.com/webrtc/10834,https://issues.webrtc.org/issues/42220908 +https://crbug.com/webrtc/10835,https://issues.webrtc.org/issues/42220909 +https://crbug.com/webrtc/10836,https://issues.webrtc.org/issues/42220910 +https://crbug.com/webrtc/10837,https://issues.webrtc.org/issues/42220911 +https://crbug.com/webrtc/10838,https://issues.webrtc.org/issues/42220912 +https://crbug.com/webrtc/10839,https://issues.webrtc.org/issues/42220913 +https://crbug.com/webrtc/1084,https://issues.webrtc.org/issues/42220914 +https://crbug.com/webrtc/10840,https://issues.webrtc.org/issues/42220915 +https://crbug.com/webrtc/10841,https://issues.webrtc.org/issues/42220916 +https://crbug.com/webrtc/10842,https://issues.webrtc.org/issues/42220917 +https://crbug.com/webrtc/10843,https://issues.webrtc.org/issues/42220918 +https://crbug.com/webrtc/10844,https://issues.webrtc.org/issues/42220919 +https://crbug.com/webrtc/10845,https://issues.webrtc.org/issues/42220920 +https://crbug.com/webrtc/10846,https://issues.webrtc.org/issues/42220921 +https://crbug.com/webrtc/10847,https://issues.webrtc.org/issues/42220922 +https://crbug.com/webrtc/10848,https://issues.webrtc.org/issues/42220923 +https://crbug.com/webrtc/1085,https://issues.webrtc.org/issues/42220924 +https://crbug.com/webrtc/10850,https://issues.webrtc.org/issues/42220925 +https://crbug.com/webrtc/10851,https://issues.webrtc.org/issues/42220926 +https://crbug.com/webrtc/10852,https://issues.webrtc.org/issues/42220927 +https://crbug.com/webrtc/10853,https://issues.webrtc.org/issues/42220928 +https://crbug.com/webrtc/10854,https://issues.webrtc.org/issues/42220929 +https://crbug.com/webrtc/10855,https://issues.webrtc.org/issues/42220930 +https://crbug.com/webrtc/10856,https://issues.webrtc.org/issues/42220931 +https://crbug.com/webrtc/10857,https://issues.webrtc.org/issues/42220932 +https://crbug.com/webrtc/10858,https://issues.webrtc.org/issues/42220933 +https://crbug.com/webrtc/10859,https://issues.webrtc.org/issues/42220934 +https://crbug.com/webrtc/1086,https://issues.webrtc.org/issues/42220935 +https://crbug.com/webrtc/10860,https://issues.webrtc.org/issues/42220936 +https://crbug.com/webrtc/10861,https://issues.webrtc.org/issues/42220937 +https://crbug.com/webrtc/10862,https://issues.webrtc.org/issues/42220938 +https://crbug.com/webrtc/10863,https://issues.webrtc.org/issues/42220939 +https://crbug.com/webrtc/10864,https://issues.webrtc.org/issues/42220940 +https://crbug.com/webrtc/10865,https://issues.webrtc.org/issues/42220941 +https://crbug.com/webrtc/10866,https://issues.webrtc.org/issues/42220942 +https://crbug.com/webrtc/10867,https://issues.webrtc.org/issues/42220943 +https://crbug.com/webrtc/10868,https://issues.webrtc.org/issues/42220944 +https://crbug.com/webrtc/10869,https://issues.webrtc.org/issues/42220945 +https://crbug.com/webrtc/1087,https://issues.webrtc.org/issues/42220946 +https://crbug.com/webrtc/10870,https://issues.webrtc.org/issues/42220947 +https://crbug.com/webrtc/10871,https://issues.webrtc.org/issues/42220948 +https://crbug.com/webrtc/10872,https://issues.webrtc.org/issues/42220949 +https://crbug.com/webrtc/10873,https://issues.webrtc.org/issues/42220950 +https://crbug.com/webrtc/10874,https://issues.webrtc.org/issues/42220951 +https://crbug.com/webrtc/10875,https://issues.webrtc.org/issues/42220952 +https://crbug.com/webrtc/10876,https://issues.webrtc.org/issues/42220953 +https://crbug.com/webrtc/10877,https://issues.webrtc.org/issues/42220954 +https://crbug.com/webrtc/10878,https://issues.webrtc.org/issues/42220955 +https://crbug.com/webrtc/10879,https://issues.webrtc.org/issues/42220956 +https://crbug.com/webrtc/1088,https://issues.webrtc.org/issues/42220957 +https://crbug.com/webrtc/10880,https://issues.webrtc.org/issues/42220958 +https://crbug.com/webrtc/10881,https://issues.webrtc.org/issues/42220959 +https://crbug.com/webrtc/10882,https://issues.webrtc.org/issues/42220960 +https://crbug.com/webrtc/10883,https://issues.webrtc.org/issues/42220961 +https://crbug.com/webrtc/10884,https://issues.webrtc.org/issues/42220962 +https://crbug.com/webrtc/10885,https://issues.webrtc.org/issues/42220963 +https://crbug.com/webrtc/10886,https://issues.webrtc.org/issues/42220964 +https://crbug.com/webrtc/10888,https://issues.webrtc.org/issues/42220965 +https://crbug.com/webrtc/10889,https://issues.webrtc.org/issues/42220966 +https://crbug.com/webrtc/1089,https://issues.webrtc.org/issues/42220967 +https://crbug.com/webrtc/10890,https://issues.webrtc.org/issues/42220968 +https://crbug.com/webrtc/10891,https://issues.webrtc.org/issues/42220969 +https://crbug.com/webrtc/10892,https://issues.webrtc.org/issues/42220970 +https://crbug.com/webrtc/10893,https://issues.webrtc.org/issues/42220971 +https://crbug.com/webrtc/10894,https://issues.webrtc.org/issues/42220972 +https://crbug.com/webrtc/10896,https://issues.webrtc.org/issues/42220973 +https://crbug.com/webrtc/10897,https://issues.webrtc.org/issues/42220974 +https://crbug.com/webrtc/10898,https://issues.webrtc.org/issues/42220975 +https://crbug.com/webrtc/10899,https://issues.webrtc.org/issues/42220976 +https://crbug.com/webrtc/109,https://issues.webrtc.org/issues/42220977 +https://crbug.com/webrtc/1090,https://issues.webrtc.org/issues/42220978 +https://crbug.com/webrtc/10900,https://issues.webrtc.org/issues/42220979 +https://crbug.com/webrtc/10902,https://issues.webrtc.org/issues/42220980 +https://crbug.com/webrtc/10904,https://issues.webrtc.org/issues/42220981 +https://crbug.com/webrtc/10905,https://issues.webrtc.org/issues/42220982 +https://crbug.com/webrtc/10906,https://issues.webrtc.org/issues/42220983 +https://crbug.com/webrtc/10907,https://issues.webrtc.org/issues/42220984 +https://crbug.com/webrtc/10908,https://issues.webrtc.org/issues/42220985 +https://crbug.com/webrtc/10909,https://issues.webrtc.org/issues/42220986 +https://crbug.com/webrtc/1091,https://issues.webrtc.org/issues/42220987 +https://crbug.com/webrtc/10910,https://issues.webrtc.org/issues/42220988 +https://crbug.com/webrtc/10911,https://issues.webrtc.org/issues/42220989 +https://crbug.com/webrtc/10912,https://issues.webrtc.org/issues/42220990 +https://crbug.com/webrtc/10913,https://issues.webrtc.org/issues/42220991 +https://crbug.com/webrtc/10914,https://issues.webrtc.org/issues/42220992 +https://crbug.com/webrtc/10915,https://issues.webrtc.org/issues/42220993 +https://crbug.com/webrtc/10916,https://issues.webrtc.org/issues/42220994 +https://crbug.com/webrtc/10917,https://issues.webrtc.org/issues/42220995 +https://crbug.com/webrtc/10918,https://issues.webrtc.org/issues/42220996 +https://crbug.com/webrtc/10919,https://issues.webrtc.org/issues/42220997 +https://crbug.com/webrtc/1092,https://issues.webrtc.org/issues/42220998 +https://crbug.com/webrtc/10920,https://issues.webrtc.org/issues/42220999 +https://crbug.com/webrtc/10921,https://issues.webrtc.org/issues/42221000 +https://crbug.com/webrtc/10923,https://issues.webrtc.org/issues/42221001 +https://crbug.com/webrtc/10924,https://issues.webrtc.org/issues/42221002 +https://crbug.com/webrtc/10925,https://issues.webrtc.org/issues/42221003 +https://crbug.com/webrtc/10926,https://issues.webrtc.org/issues/42221004 +https://crbug.com/webrtc/10927,https://issues.webrtc.org/issues/42221005 +https://crbug.com/webrtc/10928,https://issues.webrtc.org/issues/42221006 +https://crbug.com/webrtc/10929,https://issues.webrtc.org/issues/42221007 +https://crbug.com/webrtc/1093,https://issues.webrtc.org/issues/42221008 +https://crbug.com/webrtc/10930,https://issues.webrtc.org/issues/42221009 +https://crbug.com/webrtc/10931,https://issues.webrtc.org/issues/42221010 +https://crbug.com/webrtc/10932,https://issues.webrtc.org/issues/42221011 +https://crbug.com/webrtc/10933,https://issues.webrtc.org/issues/42221012 +https://crbug.com/webrtc/10934,https://issues.webrtc.org/issues/42221013 +https://crbug.com/webrtc/10935,https://issues.webrtc.org/issues/42221014 +https://crbug.com/webrtc/10936,https://issues.webrtc.org/issues/42221015 +https://crbug.com/webrtc/10937,https://issues.webrtc.org/issues/42221016 +https://crbug.com/webrtc/10938,https://issues.webrtc.org/issues/42221017 +https://crbug.com/webrtc/1094,https://issues.webrtc.org/issues/42221018 +https://crbug.com/webrtc/10941,https://issues.webrtc.org/issues/42221019 +https://crbug.com/webrtc/10942,https://issues.webrtc.org/issues/42221020 +https://crbug.com/webrtc/10943,https://issues.webrtc.org/issues/42221021 +https://crbug.com/webrtc/10945,https://issues.webrtc.org/issues/42221022 +https://crbug.com/webrtc/10946,https://issues.webrtc.org/issues/42221023 +https://crbug.com/webrtc/10947,https://issues.webrtc.org/issues/42221024 +https://crbug.com/webrtc/10948,https://issues.webrtc.org/issues/42221025 +https://crbug.com/webrtc/10949,https://issues.webrtc.org/issues/42221026 +https://crbug.com/webrtc/1095,https://issues.webrtc.org/issues/42221027 +https://crbug.com/webrtc/10950,https://issues.webrtc.org/issues/42221028 +https://crbug.com/webrtc/10951,https://issues.webrtc.org/issues/42221029 +https://crbug.com/webrtc/10952,https://issues.webrtc.org/issues/42221030 +https://crbug.com/webrtc/10953,https://issues.webrtc.org/issues/42221031 +https://crbug.com/webrtc/10954,https://issues.webrtc.org/issues/42221032 +https://crbug.com/webrtc/10955,https://issues.webrtc.org/issues/42221033 +https://crbug.com/webrtc/10956,https://issues.webrtc.org/issues/42221034 +https://crbug.com/webrtc/10957,https://issues.webrtc.org/issues/42221035 +https://crbug.com/webrtc/10958,https://issues.webrtc.org/issues/42221036 +https://crbug.com/webrtc/10959,https://issues.webrtc.org/issues/42221037 +https://crbug.com/webrtc/1096,https://issues.webrtc.org/issues/42221038 +https://crbug.com/webrtc/10960,https://issues.webrtc.org/issues/42221039 +https://crbug.com/webrtc/10961,https://issues.webrtc.org/issues/42221040 +https://crbug.com/webrtc/10962,https://issues.webrtc.org/issues/42221041 +https://crbug.com/webrtc/10963,https://issues.webrtc.org/issues/42221042 +https://crbug.com/webrtc/10964,https://issues.webrtc.org/issues/42221043 +https://crbug.com/webrtc/10965,https://issues.webrtc.org/issues/42221044 +https://crbug.com/webrtc/10966,https://issues.webrtc.org/issues/42221045 +https://crbug.com/webrtc/10968,https://issues.webrtc.org/issues/42221046 +https://crbug.com/webrtc/10969,https://issues.webrtc.org/issues/42221047 +https://crbug.com/webrtc/1097,https://issues.webrtc.org/issues/42221048 +https://crbug.com/webrtc/10970,https://issues.webrtc.org/issues/42221049 +https://crbug.com/webrtc/10971,https://issues.webrtc.org/issues/42221050 +https://crbug.com/webrtc/10972,https://issues.webrtc.org/issues/42221051 +https://crbug.com/webrtc/10973,https://issues.webrtc.org/issues/42221052 +https://crbug.com/webrtc/10974,https://issues.webrtc.org/issues/42221053 +https://crbug.com/webrtc/10975,https://issues.webrtc.org/issues/42221054 +https://crbug.com/webrtc/10976,https://issues.webrtc.org/issues/42221055 +https://crbug.com/webrtc/10977,https://issues.webrtc.org/issues/42221056 +https://crbug.com/webrtc/10978,https://issues.webrtc.org/issues/42221057 +https://crbug.com/webrtc/10979,https://issues.webrtc.org/issues/42221058 +https://crbug.com/webrtc/1098,https://issues.webrtc.org/issues/42221059 +https://crbug.com/webrtc/10980,https://issues.webrtc.org/issues/42221060 +https://crbug.com/webrtc/10981,https://issues.webrtc.org/issues/42221061 +https://crbug.com/webrtc/10982,https://issues.webrtc.org/issues/42221062 +https://crbug.com/webrtc/10983,https://issues.webrtc.org/issues/42221063 +https://crbug.com/webrtc/10984,https://issues.webrtc.org/issues/42221064 +https://crbug.com/webrtc/10985,https://issues.webrtc.org/issues/42221065 +https://crbug.com/webrtc/10986,https://issues.webrtc.org/issues/42221066 +https://crbug.com/webrtc/10987,https://issues.webrtc.org/issues/42221067 +https://crbug.com/webrtc/10988,https://issues.webrtc.org/issues/42221068 +https://crbug.com/webrtc/10989,https://issues.webrtc.org/issues/42221069 +https://crbug.com/webrtc/1099,https://issues.webrtc.org/issues/42221070 +https://crbug.com/webrtc/10990,https://issues.webrtc.org/issues/42221071 +https://crbug.com/webrtc/10991,https://issues.webrtc.org/issues/42221072 +https://crbug.com/webrtc/10992,https://issues.webrtc.org/issues/42221073 +https://crbug.com/webrtc/10993,https://issues.webrtc.org/issues/42221074 +https://crbug.com/webrtc/10994,https://issues.webrtc.org/issues/42221075 +https://crbug.com/webrtc/10995,https://issues.webrtc.org/issues/42221076 +https://crbug.com/webrtc/10996,https://issues.webrtc.org/issues/42221077 +https://crbug.com/webrtc/10997,https://issues.webrtc.org/issues/42221078 +https://crbug.com/webrtc/10998,https://issues.webrtc.org/issues/42221079 +https://crbug.com/webrtc/10999,https://issues.webrtc.org/issues/42221080 +https://crbug.com/webrtc/11,https://issues.webrtc.org/issues/42221081 +https://crbug.com/webrtc/1100,https://issues.webrtc.org/issues/42221082 +https://crbug.com/webrtc/11000,https://issues.webrtc.org/issues/42221083 +https://crbug.com/webrtc/11001,https://issues.webrtc.org/issues/42221084 +https://crbug.com/webrtc/11002,https://issues.webrtc.org/issues/42221085 +https://crbug.com/webrtc/11003,https://issues.webrtc.org/issues/42221086 +https://crbug.com/webrtc/11004,https://issues.webrtc.org/issues/42221087 +https://crbug.com/webrtc/11005,https://issues.webrtc.org/issues/42221088 +https://crbug.com/webrtc/11006,https://issues.webrtc.org/issues/42221089 +https://crbug.com/webrtc/11007,https://issues.webrtc.org/issues/42221090 +https://crbug.com/webrtc/11009,https://issues.webrtc.org/issues/42221091 +https://crbug.com/webrtc/1101,https://issues.webrtc.org/issues/42221092 +https://crbug.com/webrtc/11010,https://issues.webrtc.org/issues/42221093 +https://crbug.com/webrtc/11012,https://issues.webrtc.org/issues/42221094 +https://crbug.com/webrtc/11013,https://issues.webrtc.org/issues/42221095 +https://crbug.com/webrtc/11014,https://issues.webrtc.org/issues/42221096 +https://crbug.com/webrtc/11015,https://issues.webrtc.org/issues/42221097 +https://crbug.com/webrtc/11016,https://issues.webrtc.org/issues/42221098 +https://crbug.com/webrtc/11017,https://issues.webrtc.org/issues/42221099 +https://crbug.com/webrtc/11018,https://issues.webrtc.org/issues/42221100 +https://crbug.com/webrtc/1102,https://issues.webrtc.org/issues/42221101 +https://crbug.com/webrtc/11020,https://issues.webrtc.org/issues/42221102 +https://crbug.com/webrtc/11021,https://issues.webrtc.org/issues/42221103 +https://crbug.com/webrtc/11022,https://issues.webrtc.org/issues/42221104 +https://crbug.com/webrtc/11023,https://issues.webrtc.org/issues/42221105 +https://crbug.com/webrtc/11024,https://issues.webrtc.org/issues/42221106 +https://crbug.com/webrtc/11025,https://issues.webrtc.org/issues/42221107 +https://crbug.com/webrtc/11026,https://issues.webrtc.org/issues/42221108 +https://crbug.com/webrtc/11027,https://issues.webrtc.org/issues/42221109 +https://crbug.com/webrtc/11028,https://issues.webrtc.org/issues/42221110 +https://crbug.com/webrtc/11029,https://issues.webrtc.org/issues/42221111 +https://crbug.com/webrtc/1103,https://issues.webrtc.org/issues/42221112 +https://crbug.com/webrtc/11030,https://issues.webrtc.org/issues/42221113 +https://crbug.com/webrtc/11032,https://issues.webrtc.org/issues/42221114 +https://crbug.com/webrtc/11033,https://issues.webrtc.org/issues/42221115 +https://crbug.com/webrtc/11034,https://issues.webrtc.org/issues/42221116 +https://crbug.com/webrtc/11035,https://issues.webrtc.org/issues/42221117 +https://crbug.com/webrtc/11036,https://issues.webrtc.org/issues/42221118 +https://crbug.com/webrtc/11037,https://issues.webrtc.org/issues/42221119 +https://crbug.com/webrtc/11038,https://issues.webrtc.org/issues/42221120 +https://crbug.com/webrtc/11039,https://issues.webrtc.org/issues/42221121 +https://crbug.com/webrtc/1104,https://issues.webrtc.org/issues/42221122 +https://crbug.com/webrtc/11040,https://issues.webrtc.org/issues/42221123 +https://crbug.com/webrtc/11041,https://issues.webrtc.org/issues/42221124 +https://crbug.com/webrtc/11042,https://issues.webrtc.org/issues/42221125 +https://crbug.com/webrtc/11043,https://issues.webrtc.org/issues/42221126 +https://crbug.com/webrtc/11044,https://issues.webrtc.org/issues/42221127 +https://crbug.com/webrtc/11045,https://issues.webrtc.org/issues/42221128 +https://crbug.com/webrtc/11046,https://issues.webrtc.org/issues/42221129 +https://crbug.com/webrtc/11047,https://issues.webrtc.org/issues/42221130 +https://crbug.com/webrtc/11048,https://issues.webrtc.org/issues/42221131 +https://crbug.com/webrtc/11049,https://issues.webrtc.org/issues/42221132 +https://crbug.com/webrtc/1105,https://issues.webrtc.org/issues/42221133 +https://crbug.com/webrtc/11050,https://issues.webrtc.org/issues/42221134 +https://crbug.com/webrtc/11051,https://issues.webrtc.org/issues/42221135 +https://crbug.com/webrtc/11052,https://issues.webrtc.org/issues/42221136 +https://crbug.com/webrtc/11053,https://issues.webrtc.org/issues/42221137 +https://crbug.com/webrtc/11054,https://issues.webrtc.org/issues/42221138 +https://crbug.com/webrtc/11055,https://issues.webrtc.org/issues/42221139 +https://crbug.com/webrtc/11056,https://issues.webrtc.org/issues/42221140 +https://crbug.com/webrtc/11058,https://issues.webrtc.org/issues/42221141 +https://crbug.com/webrtc/11059,https://issues.webrtc.org/issues/42221142 +https://crbug.com/webrtc/1106,https://issues.webrtc.org/issues/42221143 +https://crbug.com/webrtc/11060,https://issues.webrtc.org/issues/42221144 +https://crbug.com/webrtc/11062,https://issues.webrtc.org/issues/42221145 +https://crbug.com/webrtc/11063,https://issues.webrtc.org/issues/42221146 +https://crbug.com/webrtc/11064,https://issues.webrtc.org/issues/42221147 +https://crbug.com/webrtc/11065,https://issues.webrtc.org/issues/42221148 +https://crbug.com/webrtc/11067,https://issues.webrtc.org/issues/42221149 +https://crbug.com/webrtc/11068,https://issues.webrtc.org/issues/42221150 +https://crbug.com/webrtc/11069,https://issues.webrtc.org/issues/42221151 +https://crbug.com/webrtc/1107,https://issues.webrtc.org/issues/42221152 +https://crbug.com/webrtc/11070,https://issues.webrtc.org/issues/42221153 +https://crbug.com/webrtc/11071,https://issues.webrtc.org/issues/42221154 +https://crbug.com/webrtc/11072,https://issues.webrtc.org/issues/42221155 +https://crbug.com/webrtc/11073,https://issues.webrtc.org/issues/42221156 +https://crbug.com/webrtc/11074,https://issues.webrtc.org/issues/42221157 +https://crbug.com/webrtc/11075,https://issues.webrtc.org/issues/42221158 +https://crbug.com/webrtc/11076,https://issues.webrtc.org/issues/42221159 +https://crbug.com/webrtc/11077,https://issues.webrtc.org/issues/42221160 +https://crbug.com/webrtc/11078,https://issues.webrtc.org/issues/42221161 +https://crbug.com/webrtc/11079,https://issues.webrtc.org/issues/42221162 +https://crbug.com/webrtc/1108,https://issues.webrtc.org/issues/42221163 +https://crbug.com/webrtc/11080,https://issues.webrtc.org/issues/42221164 +https://crbug.com/webrtc/11081,https://issues.webrtc.org/issues/42221165 +https://crbug.com/webrtc/11082,https://issues.webrtc.org/issues/42221166 +https://crbug.com/webrtc/11083,https://issues.webrtc.org/issues/42221167 +https://crbug.com/webrtc/11085,https://issues.webrtc.org/issues/42221168 +https://crbug.com/webrtc/11086,https://issues.webrtc.org/issues/42221169 +https://crbug.com/webrtc/11087,https://issues.webrtc.org/issues/42221170 +https://crbug.com/webrtc/11088,https://issues.webrtc.org/issues/42221171 +https://crbug.com/webrtc/11089,https://issues.webrtc.org/issues/42221172 +https://crbug.com/webrtc/1109,https://issues.webrtc.org/issues/42221173 +https://crbug.com/webrtc/11090,https://issues.webrtc.org/issues/42221174 +https://crbug.com/webrtc/11091,https://issues.webrtc.org/issues/42221175 +https://crbug.com/webrtc/11092,https://issues.webrtc.org/issues/42221176 +https://crbug.com/webrtc/11093,https://issues.webrtc.org/issues/42221177 +https://crbug.com/webrtc/11094,https://issues.webrtc.org/issues/42221178 +https://crbug.com/webrtc/11095,https://issues.webrtc.org/issues/42221179 +https://crbug.com/webrtc/11097,https://issues.webrtc.org/issues/42221180 +https://crbug.com/webrtc/11098,https://issues.webrtc.org/issues/42221181 +https://crbug.com/webrtc/11099,https://issues.webrtc.org/issues/42221182 +https://crbug.com/webrtc/1110,https://issues.webrtc.org/issues/42221183 +https://crbug.com/webrtc/11100,https://issues.webrtc.org/issues/42221184 +https://crbug.com/webrtc/11101,https://issues.webrtc.org/issues/42221185 +https://crbug.com/webrtc/11102,https://issues.webrtc.org/issues/42221186 +https://crbug.com/webrtc/11103,https://issues.webrtc.org/issues/42221187 +https://crbug.com/webrtc/11104,https://issues.webrtc.org/issues/42221188 +https://crbug.com/webrtc/11105,https://issues.webrtc.org/issues/42221189 +https://crbug.com/webrtc/11106,https://issues.webrtc.org/issues/42221190 +https://crbug.com/webrtc/11107,https://issues.webrtc.org/issues/42221191 +https://crbug.com/webrtc/11108,https://issues.webrtc.org/issues/42221192 +https://crbug.com/webrtc/11109,https://issues.webrtc.org/issues/42221193 +https://crbug.com/webrtc/1111,https://issues.webrtc.org/issues/42221194 +https://crbug.com/webrtc/11110,https://issues.webrtc.org/issues/42221195 +https://crbug.com/webrtc/11111,https://issues.webrtc.org/issues/42221196 +https://crbug.com/webrtc/11112,https://issues.webrtc.org/issues/42221197 +https://crbug.com/webrtc/11113,https://issues.webrtc.org/issues/42221198 +https://crbug.com/webrtc/11114,https://issues.webrtc.org/issues/42221199 +https://crbug.com/webrtc/11115,https://issues.webrtc.org/issues/42221200 +https://crbug.com/webrtc/11116,https://issues.webrtc.org/issues/42221201 +https://crbug.com/webrtc/11117,https://issues.webrtc.org/issues/42221202 +https://crbug.com/webrtc/11118,https://issues.webrtc.org/issues/42221203 +https://crbug.com/webrtc/11119,https://issues.webrtc.org/issues/42221204 +https://crbug.com/webrtc/1112,https://issues.webrtc.org/issues/42221205 +https://crbug.com/webrtc/11120,https://issues.webrtc.org/issues/42221206 +https://crbug.com/webrtc/11121,https://issues.webrtc.org/issues/42221207 +https://crbug.com/webrtc/11122,https://issues.webrtc.org/issues/42221208 +https://crbug.com/webrtc/11123,https://issues.webrtc.org/issues/42221209 +https://crbug.com/webrtc/11124,https://issues.webrtc.org/issues/42221210 +https://crbug.com/webrtc/11125,https://issues.webrtc.org/issues/42221211 +https://crbug.com/webrtc/11126,https://issues.webrtc.org/issues/42221212 +https://crbug.com/webrtc/11128,https://issues.webrtc.org/issues/42221213 +https://crbug.com/webrtc/1113,https://issues.webrtc.org/issues/42221214 +https://crbug.com/webrtc/11130,https://issues.webrtc.org/issues/42221215 +https://crbug.com/webrtc/11131,https://issues.webrtc.org/issues/42221216 +https://crbug.com/webrtc/11132,https://issues.webrtc.org/issues/42221217 +https://crbug.com/webrtc/11133,https://issues.webrtc.org/issues/42221218 +https://crbug.com/webrtc/11134,https://issues.webrtc.org/issues/42221219 +https://crbug.com/webrtc/11135,https://issues.webrtc.org/issues/42221220 +https://crbug.com/webrtc/11136,https://issues.webrtc.org/issues/42221221 +https://crbug.com/webrtc/11137,https://issues.webrtc.org/issues/42221222 +https://crbug.com/webrtc/11138,https://issues.webrtc.org/issues/42221223 +https://crbug.com/webrtc/11139,https://issues.webrtc.org/issues/42221224 +https://crbug.com/webrtc/1114,https://issues.webrtc.org/issues/42221225 +https://crbug.com/webrtc/11140,https://issues.webrtc.org/issues/42221226 +https://crbug.com/webrtc/11142,https://issues.webrtc.org/issues/42221227 +https://crbug.com/webrtc/11143,https://issues.webrtc.org/issues/42221228 +https://crbug.com/webrtc/11144,https://issues.webrtc.org/issues/42221229 +https://crbug.com/webrtc/11145,https://issues.webrtc.org/issues/42221230 +https://crbug.com/webrtc/11146,https://issues.webrtc.org/issues/42221231 +https://crbug.com/webrtc/11148,https://issues.webrtc.org/issues/42221232 +https://crbug.com/webrtc/11149,https://issues.webrtc.org/issues/42221233 +https://crbug.com/webrtc/1115,https://issues.webrtc.org/issues/42221234 +https://crbug.com/webrtc/11150,https://issues.webrtc.org/issues/42221235 +https://crbug.com/webrtc/11151,https://issues.webrtc.org/issues/42221236 +https://crbug.com/webrtc/11152,https://issues.webrtc.org/issues/42221237 +https://crbug.com/webrtc/11153,https://issues.webrtc.org/issues/42221238 +https://crbug.com/webrtc/11154,https://issues.webrtc.org/issues/42221239 +https://crbug.com/webrtc/11156,https://issues.webrtc.org/issues/42221240 +https://crbug.com/webrtc/11157,https://issues.webrtc.org/issues/42221241 +https://crbug.com/webrtc/11158,https://issues.webrtc.org/issues/42221242 +https://crbug.com/webrtc/11159,https://issues.webrtc.org/issues/42221243 +https://crbug.com/webrtc/1116,https://issues.webrtc.org/issues/42221244 +https://crbug.com/webrtc/11160,https://issues.webrtc.org/issues/42221245 +https://crbug.com/webrtc/11161,https://issues.webrtc.org/issues/42221246 +https://crbug.com/webrtc/11163,https://issues.webrtc.org/issues/42221247 +https://crbug.com/webrtc/11164,https://issues.webrtc.org/issues/42221248 +https://crbug.com/webrtc/11165,https://issues.webrtc.org/issues/42221249 +https://crbug.com/webrtc/11166,https://issues.webrtc.org/issues/42221250 +https://crbug.com/webrtc/11167,https://issues.webrtc.org/issues/42221251 +https://crbug.com/webrtc/11168,https://issues.webrtc.org/issues/42221252 +https://crbug.com/webrtc/11169,https://issues.webrtc.org/issues/42221253 +https://crbug.com/webrtc/1117,https://issues.webrtc.org/issues/42221254 +https://crbug.com/webrtc/11171,https://issues.webrtc.org/issues/42221255 +https://crbug.com/webrtc/11172,https://issues.webrtc.org/issues/42221256 +https://crbug.com/webrtc/11173,https://issues.webrtc.org/issues/42221257 +https://crbug.com/webrtc/11174,https://issues.webrtc.org/issues/42221258 +https://crbug.com/webrtc/11175,https://issues.webrtc.org/issues/42221259 +https://crbug.com/webrtc/11176,https://issues.webrtc.org/issues/42221260 +https://crbug.com/webrtc/11177,https://issues.webrtc.org/issues/42221261 +https://crbug.com/webrtc/11178,https://issues.webrtc.org/issues/42221262 +https://crbug.com/webrtc/11179,https://issues.webrtc.org/issues/42221263 +https://crbug.com/webrtc/1118,https://issues.webrtc.org/issues/42221264 +https://crbug.com/webrtc/11180,https://issues.webrtc.org/issues/42221265 +https://crbug.com/webrtc/11181,https://issues.webrtc.org/issues/42221266 +https://crbug.com/webrtc/11182,https://issues.webrtc.org/issues/42221267 +https://crbug.com/webrtc/11183,https://issues.webrtc.org/issues/42221268 +https://crbug.com/webrtc/11184,https://issues.webrtc.org/issues/42221269 +https://crbug.com/webrtc/11185,https://issues.webrtc.org/issues/42221270 +https://crbug.com/webrtc/11186,https://issues.webrtc.org/issues/42221271 +https://crbug.com/webrtc/11187,https://issues.webrtc.org/issues/42221272 +https://crbug.com/webrtc/11188,https://issues.webrtc.org/issues/42221273 +https://crbug.com/webrtc/11189,https://issues.webrtc.org/issues/42221274 +https://crbug.com/webrtc/1119,https://issues.webrtc.org/issues/42221275 +https://crbug.com/webrtc/11190,https://issues.webrtc.org/issues/42221276 +https://crbug.com/webrtc/11191,https://issues.webrtc.org/issues/42221277 +https://crbug.com/webrtc/11192,https://issues.webrtc.org/issues/42221278 +https://crbug.com/webrtc/11193,https://issues.webrtc.org/issues/42221279 +https://crbug.com/webrtc/11194,https://issues.webrtc.org/issues/42221280 +https://crbug.com/webrtc/11195,https://issues.webrtc.org/issues/42221281 +https://crbug.com/webrtc/11196,https://issues.webrtc.org/issues/42221282 +https://crbug.com/webrtc/11197,https://issues.webrtc.org/issues/42221283 +https://crbug.com/webrtc/11198,https://issues.webrtc.org/issues/42221284 +https://crbug.com/webrtc/11199,https://issues.webrtc.org/issues/42221285 +https://crbug.com/webrtc/1120,https://issues.webrtc.org/issues/42221286 +https://crbug.com/webrtc/11200,https://issues.webrtc.org/issues/42221287 +https://crbug.com/webrtc/11201,https://issues.webrtc.org/issues/42221288 +https://crbug.com/webrtc/11202,https://issues.webrtc.org/issues/42221289 +https://crbug.com/webrtc/11203,https://issues.webrtc.org/issues/42221290 +https://crbug.com/webrtc/11204,https://issues.webrtc.org/issues/42221291 +https://crbug.com/webrtc/11205,https://issues.webrtc.org/issues/42221292 +https://crbug.com/webrtc/11206,https://issues.webrtc.org/issues/42221293 +https://crbug.com/webrtc/11207,https://issues.webrtc.org/issues/42221294 +https://crbug.com/webrtc/11208,https://issues.webrtc.org/issues/42221295 +https://crbug.com/webrtc/11209,https://issues.webrtc.org/issues/42221296 +https://crbug.com/webrtc/1121,https://issues.webrtc.org/issues/42221297 +https://crbug.com/webrtc/11210,https://issues.webrtc.org/issues/42221298 +https://crbug.com/webrtc/11211,https://issues.webrtc.org/issues/42221299 +https://crbug.com/webrtc/11213,https://issues.webrtc.org/issues/42221300 +https://crbug.com/webrtc/11214,https://issues.webrtc.org/issues/42221301 +https://crbug.com/webrtc/11215,https://issues.webrtc.org/issues/42221302 +https://crbug.com/webrtc/11216,https://issues.webrtc.org/issues/42221303 +https://crbug.com/webrtc/11217,https://issues.webrtc.org/issues/42221304 +https://crbug.com/webrtc/11218,https://issues.webrtc.org/issues/42221305 +https://crbug.com/webrtc/11219,https://issues.webrtc.org/issues/42221306 +https://crbug.com/webrtc/1122,https://issues.webrtc.org/issues/42221307 +https://crbug.com/webrtc/11220,https://issues.webrtc.org/issues/42221308 +https://crbug.com/webrtc/11221,https://issues.webrtc.org/issues/42221309 +https://crbug.com/webrtc/11222,https://issues.webrtc.org/issues/42221310 +https://crbug.com/webrtc/11223,https://issues.webrtc.org/issues/42221311 +https://crbug.com/webrtc/11224,https://issues.webrtc.org/issues/42221312 +https://crbug.com/webrtc/11225,https://issues.webrtc.org/issues/42221313 +https://crbug.com/webrtc/11226,https://issues.webrtc.org/issues/42221314 +https://crbug.com/webrtc/11227,https://issues.webrtc.org/issues/42221315 +https://crbug.com/webrtc/11228,https://issues.webrtc.org/issues/42221316 +https://crbug.com/webrtc/11229,https://issues.webrtc.org/issues/42221317 +https://crbug.com/webrtc/1123,https://issues.webrtc.org/issues/42221318 +https://crbug.com/webrtc/11230,https://issues.webrtc.org/issues/42221319 +https://crbug.com/webrtc/11231,https://issues.webrtc.org/issues/42221320 +https://crbug.com/webrtc/11232,https://issues.webrtc.org/issues/42221321 +https://crbug.com/webrtc/11233,https://issues.webrtc.org/issues/42221322 +https://crbug.com/webrtc/11234,https://issues.webrtc.org/issues/42221323 +https://crbug.com/webrtc/11235,https://issues.webrtc.org/issues/42221324 +https://crbug.com/webrtc/11236,https://issues.webrtc.org/issues/42221325 +https://crbug.com/webrtc/11237,https://issues.webrtc.org/issues/42221326 +https://crbug.com/webrtc/11238,https://issues.webrtc.org/issues/42221327 +https://crbug.com/webrtc/11239,https://issues.webrtc.org/issues/42221328 +https://crbug.com/webrtc/1124,https://issues.webrtc.org/issues/42221329 +https://crbug.com/webrtc/11240,https://issues.webrtc.org/issues/42221330 +https://crbug.com/webrtc/11241,https://issues.webrtc.org/issues/42221331 +https://crbug.com/webrtc/11242,https://issues.webrtc.org/issues/42221332 +https://crbug.com/webrtc/11243,https://issues.webrtc.org/issues/42221333 +https://crbug.com/webrtc/11245,https://issues.webrtc.org/issues/42221334 +https://crbug.com/webrtc/11246,https://issues.webrtc.org/issues/42221335 +https://crbug.com/webrtc/11247,https://issues.webrtc.org/issues/42221336 +https://crbug.com/webrtc/11248,https://issues.webrtc.org/issues/42221337 +https://crbug.com/webrtc/1125,https://issues.webrtc.org/issues/42221338 +https://crbug.com/webrtc/11250,https://issues.webrtc.org/issues/42221339 +https://crbug.com/webrtc/11251,https://issues.webrtc.org/issues/42221340 +https://crbug.com/webrtc/11252,https://issues.webrtc.org/issues/42221341 +https://crbug.com/webrtc/11253,https://issues.webrtc.org/issues/42221342 +https://crbug.com/webrtc/11254,https://issues.webrtc.org/issues/42221343 +https://crbug.com/webrtc/11255,https://issues.webrtc.org/issues/42221344 +https://crbug.com/webrtc/11256,https://issues.webrtc.org/issues/42221345 +https://crbug.com/webrtc/11257,https://issues.webrtc.org/issues/42221346 +https://crbug.com/webrtc/11258,https://issues.webrtc.org/issues/42221347 +https://crbug.com/webrtc/11259,https://issues.webrtc.org/issues/42221348 +https://crbug.com/webrtc/1126,https://issues.webrtc.org/issues/42221349 +https://crbug.com/webrtc/11260,https://issues.webrtc.org/issues/42221350 +https://crbug.com/webrtc/11261,https://issues.webrtc.org/issues/42221351 +https://crbug.com/webrtc/11262,https://issues.webrtc.org/issues/42221352 +https://crbug.com/webrtc/11263,https://issues.webrtc.org/issues/42221353 +https://crbug.com/webrtc/11264,https://issues.webrtc.org/issues/42221354 +https://crbug.com/webrtc/11265,https://issues.webrtc.org/issues/42221355 +https://crbug.com/webrtc/11266,https://issues.webrtc.org/issues/42221356 +https://crbug.com/webrtc/11267,https://issues.webrtc.org/issues/42221357 +https://crbug.com/webrtc/11268,https://issues.webrtc.org/issues/42221358 +https://crbug.com/webrtc/11269,https://issues.webrtc.org/issues/42221359 +https://crbug.com/webrtc/1127,https://issues.webrtc.org/issues/42221360 +https://crbug.com/webrtc/11270,https://issues.webrtc.org/issues/42221361 +https://crbug.com/webrtc/11271,https://issues.webrtc.org/issues/42221362 +https://crbug.com/webrtc/11273,https://issues.webrtc.org/issues/42221363 +https://crbug.com/webrtc/11274,https://issues.webrtc.org/issues/42221364 +https://crbug.com/webrtc/11275,https://issues.webrtc.org/issues/42221365 +https://crbug.com/webrtc/11276,https://issues.webrtc.org/issues/42221366 +https://crbug.com/webrtc/11277,https://issues.webrtc.org/issues/42221367 +https://crbug.com/webrtc/11278,https://issues.webrtc.org/issues/42221368 +https://crbug.com/webrtc/11279,https://issues.webrtc.org/issues/42221369 +https://crbug.com/webrtc/1128,https://issues.webrtc.org/issues/42221370 +https://crbug.com/webrtc/11280,https://issues.webrtc.org/issues/42221371 +https://crbug.com/webrtc/11281,https://issues.webrtc.org/issues/42221372 +https://crbug.com/webrtc/11282,https://issues.webrtc.org/issues/42221373 +https://crbug.com/webrtc/11283,https://issues.webrtc.org/issues/42221374 +https://crbug.com/webrtc/11284,https://issues.webrtc.org/issues/42221375 +https://crbug.com/webrtc/11285,https://issues.webrtc.org/issues/42221376 +https://crbug.com/webrtc/11286,https://issues.webrtc.org/issues/42221377 +https://crbug.com/webrtc/11287,https://issues.webrtc.org/issues/42221378 +https://crbug.com/webrtc/11288,https://issues.webrtc.org/issues/42221379 +https://crbug.com/webrtc/11289,https://issues.webrtc.org/issues/42221380 +https://crbug.com/webrtc/1129,https://issues.webrtc.org/issues/42221381 +https://crbug.com/webrtc/11291,https://issues.webrtc.org/issues/42221382 +https://crbug.com/webrtc/11292,https://issues.webrtc.org/issues/42221383 +https://crbug.com/webrtc/11293,https://issues.webrtc.org/issues/42221384 +https://crbug.com/webrtc/11294,https://issues.webrtc.org/issues/42221385 +https://crbug.com/webrtc/11295,https://issues.webrtc.org/issues/42221386 +https://crbug.com/webrtc/11296,https://issues.webrtc.org/issues/42221387 +https://crbug.com/webrtc/11297,https://issues.webrtc.org/issues/42221388 +https://crbug.com/webrtc/11298,https://issues.webrtc.org/issues/42221389 +https://crbug.com/webrtc/113,https://issues.webrtc.org/issues/42221390 +https://crbug.com/webrtc/1130,https://issues.webrtc.org/issues/42221391 +https://crbug.com/webrtc/11300,https://issues.webrtc.org/issues/42221392 +https://crbug.com/webrtc/11301,https://issues.webrtc.org/issues/42221393 +https://crbug.com/webrtc/11302,https://issues.webrtc.org/issues/42221394 +https://crbug.com/webrtc/11303,https://issues.webrtc.org/issues/42221395 +https://crbug.com/webrtc/11304,https://issues.webrtc.org/issues/42221396 +https://crbug.com/webrtc/11305,https://issues.webrtc.org/issues/42221397 +https://crbug.com/webrtc/11307,https://issues.webrtc.org/issues/42221398 +https://crbug.com/webrtc/11308,https://issues.webrtc.org/issues/42221399 +https://crbug.com/webrtc/11309,https://issues.webrtc.org/issues/42221400 +https://crbug.com/webrtc/1131,https://issues.webrtc.org/issues/42221401 +https://crbug.com/webrtc/11310,https://issues.webrtc.org/issues/42221402 +https://crbug.com/webrtc/11311,https://issues.webrtc.org/issues/42221403 +https://crbug.com/webrtc/11312,https://issues.webrtc.org/issues/42221404 +https://crbug.com/webrtc/11313,https://issues.webrtc.org/issues/42221405 +https://crbug.com/webrtc/11314,https://issues.webrtc.org/issues/42221406 +https://crbug.com/webrtc/11315,https://issues.webrtc.org/issues/42221407 +https://crbug.com/webrtc/11316,https://issues.webrtc.org/issues/42221408 +https://crbug.com/webrtc/11317,https://issues.webrtc.org/issues/42221409 +https://crbug.com/webrtc/11318,https://issues.webrtc.org/issues/42221410 +https://crbug.com/webrtc/11319,https://issues.webrtc.org/issues/42221411 +https://crbug.com/webrtc/1132,https://issues.webrtc.org/issues/42221412 +https://crbug.com/webrtc/11320,https://issues.webrtc.org/issues/42221413 +https://crbug.com/webrtc/11321,https://issues.webrtc.org/issues/42221414 +https://crbug.com/webrtc/11322,https://issues.webrtc.org/issues/42221415 +https://crbug.com/webrtc/11323,https://issues.webrtc.org/issues/42221416 +https://crbug.com/webrtc/11324,https://issues.webrtc.org/issues/42221417 +https://crbug.com/webrtc/11325,https://issues.webrtc.org/issues/42221418 +https://crbug.com/webrtc/11326,https://issues.webrtc.org/issues/42221419 +https://crbug.com/webrtc/11327,https://issues.webrtc.org/issues/42221420 +https://crbug.com/webrtc/11328,https://issues.webrtc.org/issues/42221421 +https://crbug.com/webrtc/11329,https://issues.webrtc.org/issues/42221422 +https://crbug.com/webrtc/1133,https://issues.webrtc.org/issues/42221423 +https://crbug.com/webrtc/11330,https://issues.webrtc.org/issues/42221424 +https://crbug.com/webrtc/11331,https://issues.webrtc.org/issues/42221425 +https://crbug.com/webrtc/11332,https://issues.webrtc.org/issues/42221426 +https://crbug.com/webrtc/11333,https://issues.webrtc.org/issues/42221427 +https://crbug.com/webrtc/11334,https://issues.webrtc.org/issues/42221428 +https://crbug.com/webrtc/11335,https://issues.webrtc.org/issues/42221429 +https://crbug.com/webrtc/11336,https://issues.webrtc.org/issues/42221430 +https://crbug.com/webrtc/11337,https://issues.webrtc.org/issues/42221431 +https://crbug.com/webrtc/11338,https://issues.webrtc.org/issues/42221432 +https://crbug.com/webrtc/11339,https://issues.webrtc.org/issues/42221433 +https://crbug.com/webrtc/1134,https://issues.webrtc.org/issues/42221434 +https://crbug.com/webrtc/11340,https://issues.webrtc.org/issues/42221435 +https://crbug.com/webrtc/11341,https://issues.webrtc.org/issues/42221436 +https://crbug.com/webrtc/11342,https://issues.webrtc.org/issues/42221437 +https://crbug.com/webrtc/11343,https://issues.webrtc.org/issues/42221438 +https://crbug.com/webrtc/11344,https://issues.webrtc.org/issues/42221439 +https://crbug.com/webrtc/11345,https://issues.webrtc.org/issues/42221440 +https://crbug.com/webrtc/11346,https://issues.webrtc.org/issues/42221441 +https://crbug.com/webrtc/11347,https://issues.webrtc.org/issues/42221442 +https://crbug.com/webrtc/11348,https://issues.webrtc.org/issues/42221443 +https://crbug.com/webrtc/1135,https://issues.webrtc.org/issues/42221444 +https://crbug.com/webrtc/11350,https://issues.webrtc.org/issues/42221445 +https://crbug.com/webrtc/11351,https://issues.webrtc.org/issues/42221446 +https://crbug.com/webrtc/11352,https://issues.webrtc.org/issues/42221447 +https://crbug.com/webrtc/11353,https://issues.webrtc.org/issues/42221448 +https://crbug.com/webrtc/11354,https://issues.webrtc.org/issues/42221449 +https://crbug.com/webrtc/11355,https://issues.webrtc.org/issues/42221450 +https://crbug.com/webrtc/11356,https://issues.webrtc.org/issues/42221451 +https://crbug.com/webrtc/11357,https://issues.webrtc.org/issues/42221452 +https://crbug.com/webrtc/11358,https://issues.webrtc.org/issues/42221453 +https://crbug.com/webrtc/11359,https://issues.webrtc.org/issues/42221454 +https://crbug.com/webrtc/1136,https://issues.webrtc.org/issues/42221455 +https://crbug.com/webrtc/11360,https://issues.webrtc.org/issues/42221456 +https://crbug.com/webrtc/11361,https://issues.webrtc.org/issues/42221457 +https://crbug.com/webrtc/11362,https://issues.webrtc.org/issues/42221458 +https://crbug.com/webrtc/11363,https://issues.webrtc.org/issues/42221459 +https://crbug.com/webrtc/11364,https://issues.webrtc.org/issues/42221460 +https://crbug.com/webrtc/11365,https://issues.webrtc.org/issues/42221461 +https://crbug.com/webrtc/11366,https://issues.webrtc.org/issues/42221462 +https://crbug.com/webrtc/11367,https://issues.webrtc.org/issues/42221463 +https://crbug.com/webrtc/11368,https://issues.webrtc.org/issues/42221464 +https://crbug.com/webrtc/11369,https://issues.webrtc.org/issues/42221465 +https://crbug.com/webrtc/1137,https://issues.webrtc.org/issues/42221466 +https://crbug.com/webrtc/11370,https://issues.webrtc.org/issues/42221467 +https://crbug.com/webrtc/11371,https://issues.webrtc.org/issues/42221468 +https://crbug.com/webrtc/11372,https://issues.webrtc.org/issues/42221469 +https://crbug.com/webrtc/11373,https://issues.webrtc.org/issues/42221470 +https://crbug.com/webrtc/11374,https://issues.webrtc.org/issues/42221471 +https://crbug.com/webrtc/11375,https://issues.webrtc.org/issues/42221472 +https://crbug.com/webrtc/11376,https://issues.webrtc.org/issues/42221473 +https://crbug.com/webrtc/11377,https://issues.webrtc.org/issues/42221474 +https://crbug.com/webrtc/11378,https://issues.webrtc.org/issues/42221475 +https://crbug.com/webrtc/11379,https://issues.webrtc.org/issues/42221476 +https://crbug.com/webrtc/1138,https://issues.webrtc.org/issues/42221477 +https://crbug.com/webrtc/11380,https://issues.webrtc.org/issues/42221478 +https://crbug.com/webrtc/11381,https://issues.webrtc.org/issues/42221479 +https://crbug.com/webrtc/11382,https://issues.webrtc.org/issues/42221480 +https://crbug.com/webrtc/11383,https://issues.webrtc.org/issues/42221481 +https://crbug.com/webrtc/11384,https://issues.webrtc.org/issues/42221482 +https://crbug.com/webrtc/11385,https://issues.webrtc.org/issues/42221483 +https://crbug.com/webrtc/11386,https://issues.webrtc.org/issues/42221484 +https://crbug.com/webrtc/11387,https://issues.webrtc.org/issues/42221485 +https://crbug.com/webrtc/11388,https://issues.webrtc.org/issues/42221486 +https://crbug.com/webrtc/11389,https://issues.webrtc.org/issues/42221487 +https://crbug.com/webrtc/1139,https://issues.webrtc.org/issues/42221488 +https://crbug.com/webrtc/11390,https://issues.webrtc.org/issues/42221489 +https://crbug.com/webrtc/11391,https://issues.webrtc.org/issues/42221490 +https://crbug.com/webrtc/11392,https://issues.webrtc.org/issues/42221491 +https://crbug.com/webrtc/11393,https://issues.webrtc.org/issues/42221492 +https://crbug.com/webrtc/11394,https://issues.webrtc.org/issues/42221493 +https://crbug.com/webrtc/11395,https://issues.webrtc.org/issues/42221494 +https://crbug.com/webrtc/11396,https://issues.webrtc.org/issues/42221495 +https://crbug.com/webrtc/11398,https://issues.webrtc.org/issues/42221496 +https://crbug.com/webrtc/11399,https://issues.webrtc.org/issues/42221497 +https://crbug.com/webrtc/114,https://issues.webrtc.org/issues/42221498 +https://crbug.com/webrtc/1140,https://issues.webrtc.org/issues/42221499 +https://crbug.com/webrtc/11400,https://issues.webrtc.org/issues/42221500 +https://crbug.com/webrtc/11401,https://issues.webrtc.org/issues/42221501 +https://crbug.com/webrtc/11402,https://issues.webrtc.org/issues/42221502 +https://crbug.com/webrtc/11403,https://issues.webrtc.org/issues/42221503 +https://crbug.com/webrtc/11405,https://issues.webrtc.org/issues/42221504 +https://crbug.com/webrtc/11406,https://issues.webrtc.org/issues/42221505 +https://crbug.com/webrtc/11407,https://issues.webrtc.org/issues/42221506 +https://crbug.com/webrtc/11408,https://issues.webrtc.org/issues/42221507 +https://crbug.com/webrtc/11409,https://issues.webrtc.org/issues/42221508 +https://crbug.com/webrtc/1141,https://issues.webrtc.org/issues/42221509 +https://crbug.com/webrtc/11410,https://issues.webrtc.org/issues/42221510 +https://crbug.com/webrtc/11411,https://issues.webrtc.org/issues/42221511 +https://crbug.com/webrtc/11412,https://issues.webrtc.org/issues/42221512 +https://crbug.com/webrtc/11413,https://issues.webrtc.org/issues/42221513 +https://crbug.com/webrtc/11414,https://issues.webrtc.org/issues/42221514 +https://crbug.com/webrtc/11415,https://issues.webrtc.org/issues/42221515 +https://crbug.com/webrtc/11416,https://issues.webrtc.org/issues/42221516 +https://crbug.com/webrtc/11417,https://issues.webrtc.org/issues/42221517 +https://crbug.com/webrtc/11418,https://issues.webrtc.org/issues/42221518 +https://crbug.com/webrtc/11419,https://issues.webrtc.org/issues/42221519 +https://crbug.com/webrtc/1142,https://issues.webrtc.org/issues/42221520 +https://crbug.com/webrtc/11420,https://issues.webrtc.org/issues/42221521 +https://crbug.com/webrtc/11421,https://issues.webrtc.org/issues/42221522 +https://crbug.com/webrtc/11422,https://issues.webrtc.org/issues/42221523 +https://crbug.com/webrtc/11423,https://issues.webrtc.org/issues/42221524 +https://crbug.com/webrtc/11424,https://issues.webrtc.org/issues/42221525 +https://crbug.com/webrtc/11425,https://issues.webrtc.org/issues/42221526 +https://crbug.com/webrtc/11426,https://issues.webrtc.org/issues/42221527 +https://crbug.com/webrtc/11427,https://issues.webrtc.org/issues/42221528 +https://crbug.com/webrtc/11428,https://issues.webrtc.org/issues/42221529 +https://crbug.com/webrtc/1143,https://issues.webrtc.org/issues/42221530 +https://crbug.com/webrtc/11430,https://issues.webrtc.org/issues/42221531 +https://crbug.com/webrtc/11431,https://issues.webrtc.org/issues/42221532 +https://crbug.com/webrtc/11432,https://issues.webrtc.org/issues/42221533 +https://crbug.com/webrtc/11433,https://issues.webrtc.org/issues/42221534 +https://crbug.com/webrtc/11434,https://issues.webrtc.org/issues/42221535 +https://crbug.com/webrtc/11435,https://issues.webrtc.org/issues/42221536 +https://crbug.com/webrtc/11436,https://issues.webrtc.org/issues/42221537 +https://crbug.com/webrtc/11438,https://issues.webrtc.org/issues/42221538 +https://crbug.com/webrtc/11439,https://issues.webrtc.org/issues/42221539 +https://crbug.com/webrtc/1144,https://issues.webrtc.org/issues/42221540 +https://crbug.com/webrtc/11440,https://issues.webrtc.org/issues/42221541 +https://crbug.com/webrtc/11441,https://issues.webrtc.org/issues/42221542 +https://crbug.com/webrtc/11442,https://issues.webrtc.org/issues/42221543 +https://crbug.com/webrtc/11443,https://issues.webrtc.org/issues/42221544 +https://crbug.com/webrtc/11444,https://issues.webrtc.org/issues/42221545 +https://crbug.com/webrtc/11445,https://issues.webrtc.org/issues/42221546 +https://crbug.com/webrtc/11446,https://issues.webrtc.org/issues/42221547 +https://crbug.com/webrtc/11447,https://issues.webrtc.org/issues/42221548 +https://crbug.com/webrtc/11448,https://issues.webrtc.org/issues/42221549 +https://crbug.com/webrtc/11449,https://issues.webrtc.org/issues/42221550 +https://crbug.com/webrtc/1145,https://issues.webrtc.org/issues/42221551 +https://crbug.com/webrtc/11450,https://issues.webrtc.org/issues/42221552 +https://crbug.com/webrtc/11451,https://issues.webrtc.org/issues/42221553 +https://crbug.com/webrtc/11452,https://issues.webrtc.org/issues/42221554 +https://crbug.com/webrtc/11453,https://issues.webrtc.org/issues/42221555 +https://crbug.com/webrtc/11454,https://issues.webrtc.org/issues/42221556 +https://crbug.com/webrtc/11455,https://issues.webrtc.org/issues/42221557 +https://crbug.com/webrtc/11456,https://issues.webrtc.org/issues/42221558 +https://crbug.com/webrtc/11457,https://issues.webrtc.org/issues/42221559 +https://crbug.com/webrtc/11458,https://issues.webrtc.org/issues/42221560 +https://crbug.com/webrtc/11459,https://issues.webrtc.org/issues/42221561 +https://crbug.com/webrtc/1146,https://issues.webrtc.org/issues/42221562 +https://crbug.com/webrtc/11460,https://issues.webrtc.org/issues/42221563 +https://crbug.com/webrtc/11461,https://issues.webrtc.org/issues/42221564 +https://crbug.com/webrtc/11462,https://issues.webrtc.org/issues/42221565 +https://crbug.com/webrtc/11463,https://issues.webrtc.org/issues/42221566 +https://crbug.com/webrtc/11464,https://issues.webrtc.org/issues/42221567 +https://crbug.com/webrtc/11465,https://issues.webrtc.org/issues/42221568 +https://crbug.com/webrtc/11466,https://issues.webrtc.org/issues/42221569 +https://crbug.com/webrtc/11467,https://issues.webrtc.org/issues/42221570 +https://crbug.com/webrtc/11468,https://issues.webrtc.org/issues/42221571 +https://crbug.com/webrtc/11469,https://issues.webrtc.org/issues/42221572 +https://crbug.com/webrtc/1147,https://issues.webrtc.org/issues/42221573 +https://crbug.com/webrtc/11470,https://issues.webrtc.org/issues/42221574 +https://crbug.com/webrtc/11471,https://issues.webrtc.org/issues/42221575 +https://crbug.com/webrtc/11473,https://issues.webrtc.org/issues/42221576 +https://crbug.com/webrtc/11474,https://issues.webrtc.org/issues/42221577 +https://crbug.com/webrtc/11475,https://issues.webrtc.org/issues/42221578 +https://crbug.com/webrtc/11476,https://issues.webrtc.org/issues/42221579 +https://crbug.com/webrtc/11477,https://issues.webrtc.org/issues/42221580 +https://crbug.com/webrtc/11479,https://issues.webrtc.org/issues/42221581 +https://crbug.com/webrtc/1148,https://issues.webrtc.org/issues/42221582 +https://crbug.com/webrtc/11480,https://issues.webrtc.org/issues/42221583 +https://crbug.com/webrtc/11481,https://issues.webrtc.org/issues/42221584 +https://crbug.com/webrtc/11482,https://issues.webrtc.org/issues/42221585 +https://crbug.com/webrtc/11483,https://issues.webrtc.org/issues/42221586 +https://crbug.com/webrtc/11484,https://issues.webrtc.org/issues/42221587 +https://crbug.com/webrtc/11486,https://issues.webrtc.org/issues/42221588 +https://crbug.com/webrtc/11487,https://issues.webrtc.org/issues/42221589 +https://crbug.com/webrtc/11488,https://issues.webrtc.org/issues/42221590 +https://crbug.com/webrtc/11489,https://issues.webrtc.org/issues/42221591 +https://crbug.com/webrtc/1149,https://issues.webrtc.org/issues/42221592 +https://crbug.com/webrtc/11490,https://issues.webrtc.org/issues/42221593 +https://crbug.com/webrtc/11491,https://issues.webrtc.org/issues/42221594 +https://crbug.com/webrtc/11492,https://issues.webrtc.org/issues/42221595 +https://crbug.com/webrtc/11493,https://issues.webrtc.org/issues/42221596 +https://crbug.com/webrtc/11494,https://issues.webrtc.org/issues/42221597 +https://crbug.com/webrtc/11495,https://issues.webrtc.org/issues/42221598 +https://crbug.com/webrtc/11496,https://issues.webrtc.org/issues/42221599 +https://crbug.com/webrtc/11497,https://issues.webrtc.org/issues/42221600 +https://crbug.com/webrtc/11498,https://issues.webrtc.org/issues/42221601 +https://crbug.com/webrtc/11499,https://issues.webrtc.org/issues/42221602 +https://crbug.com/webrtc/115,https://issues.webrtc.org/issues/42221603 +https://crbug.com/webrtc/1150,https://issues.webrtc.org/issues/42221604 +https://crbug.com/webrtc/11500,https://issues.webrtc.org/issues/42221605 +https://crbug.com/webrtc/11502,https://issues.webrtc.org/issues/42221606 +https://crbug.com/webrtc/11503,https://issues.webrtc.org/issues/42221607 +https://crbug.com/webrtc/11504,https://issues.webrtc.org/issues/42221608 +https://crbug.com/webrtc/11505,https://issues.webrtc.org/issues/42221609 +https://crbug.com/webrtc/11506,https://issues.webrtc.org/issues/42221610 +https://crbug.com/webrtc/11507,https://issues.webrtc.org/issues/42221611 +https://crbug.com/webrtc/11508,https://issues.webrtc.org/issues/42221612 +https://crbug.com/webrtc/11509,https://issues.webrtc.org/issues/42221613 +https://crbug.com/webrtc/1151,https://issues.webrtc.org/issues/42221614 +https://crbug.com/webrtc/11510,https://issues.webrtc.org/issues/42221615 +https://crbug.com/webrtc/11511,https://issues.webrtc.org/issues/42221616 +https://crbug.com/webrtc/11512,https://issues.webrtc.org/issues/42221617 +https://crbug.com/webrtc/11513,https://issues.webrtc.org/issues/42221618 +https://crbug.com/webrtc/11514,https://issues.webrtc.org/issues/42221619 +https://crbug.com/webrtc/11516,https://issues.webrtc.org/issues/42221620 +https://crbug.com/webrtc/11517,https://issues.webrtc.org/issues/42221621 +https://crbug.com/webrtc/11518,https://issues.webrtc.org/issues/42221622 +https://crbug.com/webrtc/11519,https://issues.webrtc.org/issues/42221623 +https://crbug.com/webrtc/1152,https://issues.webrtc.org/issues/42221624 +https://crbug.com/webrtc/11520,https://issues.webrtc.org/issues/42221625 +https://crbug.com/webrtc/11521,https://issues.webrtc.org/issues/42221626 +https://crbug.com/webrtc/11522,https://issues.webrtc.org/issues/42221627 +https://crbug.com/webrtc/11523,https://issues.webrtc.org/issues/42221628 +https://crbug.com/webrtc/11524,https://issues.webrtc.org/issues/42221629 +https://crbug.com/webrtc/11525,https://issues.webrtc.org/issues/42221630 +https://crbug.com/webrtc/11526,https://issues.webrtc.org/issues/42221631 +https://crbug.com/webrtc/11528,https://issues.webrtc.org/issues/42221632 +https://crbug.com/webrtc/1153,https://issues.webrtc.org/issues/42221633 +https://crbug.com/webrtc/11530,https://issues.webrtc.org/issues/42221634 +https://crbug.com/webrtc/11531,https://issues.webrtc.org/issues/42221635 +https://crbug.com/webrtc/11532,https://issues.webrtc.org/issues/42221636 +https://crbug.com/webrtc/11533,https://issues.webrtc.org/issues/42221637 +https://crbug.com/webrtc/11534,https://issues.webrtc.org/issues/42221638 +https://crbug.com/webrtc/11535,https://issues.webrtc.org/issues/42221639 +https://crbug.com/webrtc/11536,https://issues.webrtc.org/issues/42221640 +https://crbug.com/webrtc/11537,https://issues.webrtc.org/issues/42221641 +https://crbug.com/webrtc/11538,https://issues.webrtc.org/issues/42221642 +https://crbug.com/webrtc/11539,https://issues.webrtc.org/issues/42221643 +https://crbug.com/webrtc/1154,https://issues.webrtc.org/issues/42221644 +https://crbug.com/webrtc/11540,https://issues.webrtc.org/issues/42221645 +https://crbug.com/webrtc/11541,https://issues.webrtc.org/issues/42221646 +https://crbug.com/webrtc/11542,https://issues.webrtc.org/issues/42221647 +https://crbug.com/webrtc/11543,https://issues.webrtc.org/issues/42221648 +https://crbug.com/webrtc/11544,https://issues.webrtc.org/issues/42221649 +https://crbug.com/webrtc/11545,https://issues.webrtc.org/issues/42221650 +https://crbug.com/webrtc/11546,https://issues.webrtc.org/issues/42221651 +https://crbug.com/webrtc/11547,https://issues.webrtc.org/issues/42221652 +https://crbug.com/webrtc/11548,https://issues.webrtc.org/issues/42221653 +https://crbug.com/webrtc/11549,https://issues.webrtc.org/issues/42221654 +https://crbug.com/webrtc/1155,https://issues.webrtc.org/issues/42221655 +https://crbug.com/webrtc/11550,https://issues.webrtc.org/issues/42221656 +https://crbug.com/webrtc/11551,https://issues.webrtc.org/issues/42221657 +https://crbug.com/webrtc/11552,https://issues.webrtc.org/issues/42221658 +https://crbug.com/webrtc/11553,https://issues.webrtc.org/issues/42221659 +https://crbug.com/webrtc/11554,https://issues.webrtc.org/issues/42221660 +https://crbug.com/webrtc/11555,https://issues.webrtc.org/issues/42221661 +https://crbug.com/webrtc/11556,https://issues.webrtc.org/issues/42221662 +https://crbug.com/webrtc/11557,https://issues.webrtc.org/issues/42221663 +https://crbug.com/webrtc/11558,https://issues.webrtc.org/issues/42221664 +https://crbug.com/webrtc/11559,https://issues.webrtc.org/issues/42221665 +https://crbug.com/webrtc/1156,https://issues.webrtc.org/issues/42221666 +https://crbug.com/webrtc/11560,https://issues.webrtc.org/issues/42221667 +https://crbug.com/webrtc/11561,https://issues.webrtc.org/issues/42221668 +https://crbug.com/webrtc/11562,https://issues.webrtc.org/issues/42221669 +https://crbug.com/webrtc/11563,https://issues.webrtc.org/issues/42221670 +https://crbug.com/webrtc/11564,https://issues.webrtc.org/issues/42221671 +https://crbug.com/webrtc/11565,https://issues.webrtc.org/issues/42221672 +https://crbug.com/webrtc/11566,https://issues.webrtc.org/issues/42221673 +https://crbug.com/webrtc/11567,https://issues.webrtc.org/issues/42221674 +https://crbug.com/webrtc/11569,https://issues.webrtc.org/issues/42221675 +https://crbug.com/webrtc/1157,https://issues.webrtc.org/issues/42221676 +https://crbug.com/webrtc/11570,https://issues.webrtc.org/issues/42221677 +https://crbug.com/webrtc/11571,https://issues.webrtc.org/issues/42221678 +https://crbug.com/webrtc/11572,https://issues.webrtc.org/issues/42221679 +https://crbug.com/webrtc/11573,https://issues.webrtc.org/issues/42221680 +https://crbug.com/webrtc/11574,https://issues.webrtc.org/issues/42221681 +https://crbug.com/webrtc/11575,https://issues.webrtc.org/issues/42221682 +https://crbug.com/webrtc/11576,https://issues.webrtc.org/issues/42221683 +https://crbug.com/webrtc/11577,https://issues.webrtc.org/issues/42221684 +https://crbug.com/webrtc/11579,https://issues.webrtc.org/issues/42221685 +https://crbug.com/webrtc/1158,https://issues.webrtc.org/issues/42221686 +https://crbug.com/webrtc/11581,https://issues.webrtc.org/issues/42221687 +https://crbug.com/webrtc/11582,https://issues.webrtc.org/issues/42221688 +https://crbug.com/webrtc/11583,https://issues.webrtc.org/issues/42221689 +https://crbug.com/webrtc/11584,https://issues.webrtc.org/issues/42221690 +https://crbug.com/webrtc/11585,https://issues.webrtc.org/issues/42221691 +https://crbug.com/webrtc/11586,https://issues.webrtc.org/issues/42221692 +https://crbug.com/webrtc/11587,https://issues.webrtc.org/issues/42221693 +https://crbug.com/webrtc/11588,https://issues.webrtc.org/issues/42221694 +https://crbug.com/webrtc/11589,https://issues.webrtc.org/issues/42221695 +https://crbug.com/webrtc/1159,https://issues.webrtc.org/issues/42221696 +https://crbug.com/webrtc/11590,https://issues.webrtc.org/issues/42221697 +https://crbug.com/webrtc/11591,https://issues.webrtc.org/issues/42221698 +https://crbug.com/webrtc/11592,https://issues.webrtc.org/issues/42221699 +https://crbug.com/webrtc/11593,https://issues.webrtc.org/issues/42221700 +https://crbug.com/webrtc/11594,https://issues.webrtc.org/issues/42221701 +https://crbug.com/webrtc/11595,https://issues.webrtc.org/issues/42221702 +https://crbug.com/webrtc/11596,https://issues.webrtc.org/issues/42221703 +https://crbug.com/webrtc/11597,https://issues.webrtc.org/issues/42221704 +https://crbug.com/webrtc/11598,https://issues.webrtc.org/issues/42221705 +https://crbug.com/webrtc/11599,https://issues.webrtc.org/issues/42221706 +https://crbug.com/webrtc/116,https://issues.webrtc.org/issues/42221707 +https://crbug.com/webrtc/1160,https://issues.webrtc.org/issues/42221708 +https://crbug.com/webrtc/11600,https://issues.webrtc.org/issues/42221709 +https://crbug.com/webrtc/11601,https://issues.webrtc.org/issues/42221710 +https://crbug.com/webrtc/11602,https://issues.webrtc.org/issues/42221711 +https://crbug.com/webrtc/11603,https://issues.webrtc.org/issues/42221712 +https://crbug.com/webrtc/11605,https://issues.webrtc.org/issues/42221713 +https://crbug.com/webrtc/11606,https://issues.webrtc.org/issues/42221714 +https://crbug.com/webrtc/11607,https://issues.webrtc.org/issues/42221715 +https://crbug.com/webrtc/11608,https://issues.webrtc.org/issues/42221716 +https://crbug.com/webrtc/11609,https://issues.webrtc.org/issues/42221717 +https://crbug.com/webrtc/1161,https://issues.webrtc.org/issues/42221718 +https://crbug.com/webrtc/11610,https://issues.webrtc.org/issues/42221719 +https://crbug.com/webrtc/11611,https://issues.webrtc.org/issues/42221720 +https://crbug.com/webrtc/11612,https://issues.webrtc.org/issues/42221721 +https://crbug.com/webrtc/11613,https://issues.webrtc.org/issues/42221722 +https://crbug.com/webrtc/11614,https://issues.webrtc.org/issues/42221723 +https://crbug.com/webrtc/11615,https://issues.webrtc.org/issues/42221724 +https://crbug.com/webrtc/11616,https://issues.webrtc.org/issues/42221725 +https://crbug.com/webrtc/11617,https://issues.webrtc.org/issues/42221726 +https://crbug.com/webrtc/11618,https://issues.webrtc.org/issues/42221727 +https://crbug.com/webrtc/11619,https://issues.webrtc.org/issues/42221728 +https://crbug.com/webrtc/1162,https://issues.webrtc.org/issues/42221729 +https://crbug.com/webrtc/11621,https://issues.webrtc.org/issues/42221730 +https://crbug.com/webrtc/11622,https://issues.webrtc.org/issues/42221731 +https://crbug.com/webrtc/11623,https://issues.webrtc.org/issues/42221732 +https://crbug.com/webrtc/11624,https://issues.webrtc.org/issues/42221733 +https://crbug.com/webrtc/11625,https://issues.webrtc.org/issues/42221734 +https://crbug.com/webrtc/11626,https://issues.webrtc.org/issues/42221735 +https://crbug.com/webrtc/11627,https://issues.webrtc.org/issues/42221736 +https://crbug.com/webrtc/11629,https://issues.webrtc.org/issues/42221737 +https://crbug.com/webrtc/1163,https://issues.webrtc.org/issues/42221738 +https://crbug.com/webrtc/11630,https://issues.webrtc.org/issues/42221739 +https://crbug.com/webrtc/11631,https://issues.webrtc.org/issues/42221740 +https://crbug.com/webrtc/11632,https://issues.webrtc.org/issues/42221741 +https://crbug.com/webrtc/11633,https://issues.webrtc.org/issues/42221742 +https://crbug.com/webrtc/11634,https://issues.webrtc.org/issues/42221743 +https://crbug.com/webrtc/11635,https://issues.webrtc.org/issues/42221744 +https://crbug.com/webrtc/11636,https://issues.webrtc.org/issues/42221745 +https://crbug.com/webrtc/11637,https://issues.webrtc.org/issues/42221746 +https://crbug.com/webrtc/11638,https://issues.webrtc.org/issues/42221747 +https://crbug.com/webrtc/11639,https://issues.webrtc.org/issues/42221748 +https://crbug.com/webrtc/1164,https://issues.webrtc.org/issues/42221749 +https://crbug.com/webrtc/11640,https://issues.webrtc.org/issues/42221750 +https://crbug.com/webrtc/11641,https://issues.webrtc.org/issues/42221751 +https://crbug.com/webrtc/11642,https://issues.webrtc.org/issues/42221752 +https://crbug.com/webrtc/11643,https://issues.webrtc.org/issues/42221753 +https://crbug.com/webrtc/11644,https://issues.webrtc.org/issues/42221754 +https://crbug.com/webrtc/11645,https://issues.webrtc.org/issues/42221755 +https://crbug.com/webrtc/11646,https://issues.webrtc.org/issues/42221756 +https://crbug.com/webrtc/11647,https://issues.webrtc.org/issues/42221757 +https://crbug.com/webrtc/11648,https://issues.webrtc.org/issues/42221758 +https://crbug.com/webrtc/11649,https://issues.webrtc.org/issues/42221759 +https://crbug.com/webrtc/1165,https://issues.webrtc.org/issues/42221760 +https://crbug.com/webrtc/11650,https://issues.webrtc.org/issues/42221761 +https://crbug.com/webrtc/11651,https://issues.webrtc.org/issues/42221762 +https://crbug.com/webrtc/11652,https://issues.webrtc.org/issues/42221763 +https://crbug.com/webrtc/11654,https://issues.webrtc.org/issues/42221764 +https://crbug.com/webrtc/11655,https://issues.webrtc.org/issues/42221765 +https://crbug.com/webrtc/11656,https://issues.webrtc.org/issues/42221766 +https://crbug.com/webrtc/11657,https://issues.webrtc.org/issues/42221767 +https://crbug.com/webrtc/11658,https://issues.webrtc.org/issues/42221768 +https://crbug.com/webrtc/11659,https://issues.webrtc.org/issues/42221769 +https://crbug.com/webrtc/1166,https://issues.webrtc.org/issues/42221770 +https://crbug.com/webrtc/11660,https://issues.webrtc.org/issues/42221771 +https://crbug.com/webrtc/11661,https://issues.webrtc.org/issues/42221772 +https://crbug.com/webrtc/11662,https://issues.webrtc.org/issues/42221773 +https://crbug.com/webrtc/11663,https://issues.webrtc.org/issues/42221774 +https://crbug.com/webrtc/11664,https://issues.webrtc.org/issues/42221775 +https://crbug.com/webrtc/11665,https://issues.webrtc.org/issues/42221776 +https://crbug.com/webrtc/11666,https://issues.webrtc.org/issues/42221777 +https://crbug.com/webrtc/11667,https://issues.webrtc.org/issues/42221778 +https://crbug.com/webrtc/11668,https://issues.webrtc.org/issues/42221779 +https://crbug.com/webrtc/11669,https://issues.webrtc.org/issues/42221780 +https://crbug.com/webrtc/1167,https://issues.webrtc.org/issues/42221781 +https://crbug.com/webrtc/11670,https://issues.webrtc.org/issues/42221782 +https://crbug.com/webrtc/11671,https://issues.webrtc.org/issues/42221783 +https://crbug.com/webrtc/11672,https://issues.webrtc.org/issues/42221784 +https://crbug.com/webrtc/11673,https://issues.webrtc.org/issues/42221785 +https://crbug.com/webrtc/11674,https://issues.webrtc.org/issues/42221786 +https://crbug.com/webrtc/11675,https://issues.webrtc.org/issues/42221787 +https://crbug.com/webrtc/11676,https://issues.webrtc.org/issues/42221788 +https://crbug.com/webrtc/11677,https://issues.webrtc.org/issues/42221789 +https://crbug.com/webrtc/11678,https://issues.webrtc.org/issues/42221790 +https://crbug.com/webrtc/1168,https://issues.webrtc.org/issues/42221791 +https://crbug.com/webrtc/11680,https://issues.webrtc.org/issues/42221792 +https://crbug.com/webrtc/11681,https://issues.webrtc.org/issues/42221793 +https://crbug.com/webrtc/11682,https://issues.webrtc.org/issues/42221794 +https://crbug.com/webrtc/11683,https://issues.webrtc.org/issues/42221795 +https://crbug.com/webrtc/11684,https://issues.webrtc.org/issues/42221796 +https://crbug.com/webrtc/11685,https://issues.webrtc.org/issues/42221797 +https://crbug.com/webrtc/11687,https://issues.webrtc.org/issues/42221798 +https://crbug.com/webrtc/11688,https://issues.webrtc.org/issues/42221799 +https://crbug.com/webrtc/11689,https://issues.webrtc.org/issues/42221800 +https://crbug.com/webrtc/1169,https://issues.webrtc.org/issues/42221801 +https://crbug.com/webrtc/11690,https://issues.webrtc.org/issues/42221802 +https://crbug.com/webrtc/11691,https://issues.webrtc.org/issues/42221803 +https://crbug.com/webrtc/11692,https://issues.webrtc.org/issues/42221804 +https://crbug.com/webrtc/11693,https://issues.webrtc.org/issues/42221805 +https://crbug.com/webrtc/11694,https://issues.webrtc.org/issues/42221806 +https://crbug.com/webrtc/11695,https://issues.webrtc.org/issues/42221807 +https://crbug.com/webrtc/11696,https://issues.webrtc.org/issues/42221808 +https://crbug.com/webrtc/11697,https://issues.webrtc.org/issues/42221809 +https://crbug.com/webrtc/11698,https://issues.webrtc.org/issues/42221810 +https://crbug.com/webrtc/11699,https://issues.webrtc.org/issues/42221811 +https://crbug.com/webrtc/1170,https://issues.webrtc.org/issues/42221812 +https://crbug.com/webrtc/11700,https://issues.webrtc.org/issues/42221813 +https://crbug.com/webrtc/11701,https://issues.webrtc.org/issues/42221814 +https://crbug.com/webrtc/11702,https://issues.webrtc.org/issues/42221815 +https://crbug.com/webrtc/11703,https://issues.webrtc.org/issues/42221816 +https://crbug.com/webrtc/11704,https://issues.webrtc.org/issues/42221817 +https://crbug.com/webrtc/11705,https://issues.webrtc.org/issues/42221818 +https://crbug.com/webrtc/11706,https://issues.webrtc.org/issues/42221819 +https://crbug.com/webrtc/11707,https://issues.webrtc.org/issues/42221820 +https://crbug.com/webrtc/11708,https://issues.webrtc.org/issues/42221821 +https://crbug.com/webrtc/1171,https://issues.webrtc.org/issues/42221822 +https://crbug.com/webrtc/11710,https://issues.webrtc.org/issues/42221823 +https://crbug.com/webrtc/11711,https://issues.webrtc.org/issues/42221824 +https://crbug.com/webrtc/11712,https://issues.webrtc.org/issues/42221825 +https://crbug.com/webrtc/11713,https://issues.webrtc.org/issues/42221826 +https://crbug.com/webrtc/11714,https://issues.webrtc.org/issues/42221827 +https://crbug.com/webrtc/11715,https://issues.webrtc.org/issues/42221828 +https://crbug.com/webrtc/11716,https://issues.webrtc.org/issues/42221829 +https://crbug.com/webrtc/11717,https://issues.webrtc.org/issues/42221830 +https://crbug.com/webrtc/11718,https://issues.webrtc.org/issues/42221831 +https://crbug.com/webrtc/11719,https://issues.webrtc.org/issues/42221832 +https://crbug.com/webrtc/1172,https://issues.webrtc.org/issues/42221833 +https://crbug.com/webrtc/11721,https://issues.webrtc.org/issues/42221834 +https://crbug.com/webrtc/11722,https://issues.webrtc.org/issues/42221835 +https://crbug.com/webrtc/11723,https://issues.webrtc.org/issues/42221836 +https://crbug.com/webrtc/11724,https://issues.webrtc.org/issues/42221837 +https://crbug.com/webrtc/11725,https://issues.webrtc.org/issues/42221838 +https://crbug.com/webrtc/11726,https://issues.webrtc.org/issues/42221839 +https://crbug.com/webrtc/11727,https://issues.webrtc.org/issues/42221840 +https://crbug.com/webrtc/11728,https://issues.webrtc.org/issues/42221841 +https://crbug.com/webrtc/11729,https://issues.webrtc.org/issues/42221842 +https://crbug.com/webrtc/1173,https://issues.webrtc.org/issues/42221843 +https://crbug.com/webrtc/11730,https://issues.webrtc.org/issues/42221844 +https://crbug.com/webrtc/11731,https://issues.webrtc.org/issues/42221845 +https://crbug.com/webrtc/11732,https://issues.webrtc.org/issues/42221846 +https://crbug.com/webrtc/11733,https://issues.webrtc.org/issues/42221847 +https://crbug.com/webrtc/11734,https://issues.webrtc.org/issues/42221848 +https://crbug.com/webrtc/11735,https://issues.webrtc.org/issues/42221849 +https://crbug.com/webrtc/11736,https://issues.webrtc.org/issues/42221850 +https://crbug.com/webrtc/11738,https://issues.webrtc.org/issues/42221851 +https://crbug.com/webrtc/11739,https://issues.webrtc.org/issues/42221852 +https://crbug.com/webrtc/1174,https://issues.webrtc.org/issues/42221853 +https://crbug.com/webrtc/11740,https://issues.webrtc.org/issues/42221854 +https://crbug.com/webrtc/11741,https://issues.webrtc.org/issues/42221855 +https://crbug.com/webrtc/11742,https://issues.webrtc.org/issues/42221856 +https://crbug.com/webrtc/11743,https://issues.webrtc.org/issues/42221857 +https://crbug.com/webrtc/11744,https://issues.webrtc.org/issues/42221858 +https://crbug.com/webrtc/11745,https://issues.webrtc.org/issues/42221859 +https://crbug.com/webrtc/11746,https://issues.webrtc.org/issues/42221860 +https://crbug.com/webrtc/11747,https://issues.webrtc.org/issues/42221861 +https://crbug.com/webrtc/11749,https://issues.webrtc.org/issues/42221862 +https://crbug.com/webrtc/1175,https://issues.webrtc.org/issues/42221863 +https://crbug.com/webrtc/11750,https://issues.webrtc.org/issues/42221864 +https://crbug.com/webrtc/11751,https://issues.webrtc.org/issues/42221865 +https://crbug.com/webrtc/11752,https://issues.webrtc.org/issues/42221866 +https://crbug.com/webrtc/11753,https://issues.webrtc.org/issues/42221867 +https://crbug.com/webrtc/11754,https://issues.webrtc.org/issues/42221868 +https://crbug.com/webrtc/11755,https://issues.webrtc.org/issues/42221869 +https://crbug.com/webrtc/11756,https://issues.webrtc.org/issues/42221870 +https://crbug.com/webrtc/11757,https://issues.webrtc.org/issues/42221871 +https://crbug.com/webrtc/11758,https://issues.webrtc.org/issues/42221872 +https://crbug.com/webrtc/11759,https://issues.webrtc.org/issues/42221873 +https://crbug.com/webrtc/1176,https://issues.webrtc.org/issues/42221874 +https://crbug.com/webrtc/11760,https://issues.webrtc.org/issues/42221875 +https://crbug.com/webrtc/11761,https://issues.webrtc.org/issues/42221876 +https://crbug.com/webrtc/11762,https://issues.webrtc.org/issues/42221877 +https://crbug.com/webrtc/11764,https://issues.webrtc.org/issues/42221878 +https://crbug.com/webrtc/11765,https://issues.webrtc.org/issues/42221879 +https://crbug.com/webrtc/11766,https://issues.webrtc.org/issues/42221880 +https://crbug.com/webrtc/11767,https://issues.webrtc.org/issues/42221881 +https://crbug.com/webrtc/11768,https://issues.webrtc.org/issues/42221882 +https://crbug.com/webrtc/11769,https://issues.webrtc.org/issues/42221883 +https://crbug.com/webrtc/1177,https://issues.webrtc.org/issues/42221884 +https://crbug.com/webrtc/11770,https://issues.webrtc.org/issues/42221885 +https://crbug.com/webrtc/11771,https://issues.webrtc.org/issues/42221886 +https://crbug.com/webrtc/11772,https://issues.webrtc.org/issues/42221887 +https://crbug.com/webrtc/11773,https://issues.webrtc.org/issues/42221888 +https://crbug.com/webrtc/11774,https://issues.webrtc.org/issues/42221889 +https://crbug.com/webrtc/11776,https://issues.webrtc.org/issues/42221890 +https://crbug.com/webrtc/11777,https://issues.webrtc.org/issues/42221891 +https://crbug.com/webrtc/11778,https://issues.webrtc.org/issues/42221892 +https://crbug.com/webrtc/11779,https://issues.webrtc.org/issues/42221893 +https://crbug.com/webrtc/1178,https://issues.webrtc.org/issues/42221894 +https://crbug.com/webrtc/11780,https://issues.webrtc.org/issues/42221895 +https://crbug.com/webrtc/11781,https://issues.webrtc.org/issues/42221896 +https://crbug.com/webrtc/11782,https://issues.webrtc.org/issues/42221897 +https://crbug.com/webrtc/11783,https://issues.webrtc.org/issues/42221898 +https://crbug.com/webrtc/11784,https://issues.webrtc.org/issues/42221899 +https://crbug.com/webrtc/11785,https://issues.webrtc.org/issues/42221900 +https://crbug.com/webrtc/11786,https://issues.webrtc.org/issues/42221901 +https://crbug.com/webrtc/11787,https://issues.webrtc.org/issues/42221902 +https://crbug.com/webrtc/11788,https://issues.webrtc.org/issues/42221903 +https://crbug.com/webrtc/11789,https://issues.webrtc.org/issues/42221904 +https://crbug.com/webrtc/1179,https://issues.webrtc.org/issues/42221905 +https://crbug.com/webrtc/11790,https://issues.webrtc.org/issues/42221906 +https://crbug.com/webrtc/11791,https://issues.webrtc.org/issues/42221907 +https://crbug.com/webrtc/11792,https://issues.webrtc.org/issues/42221908 +https://crbug.com/webrtc/11793,https://issues.webrtc.org/issues/42221909 +https://crbug.com/webrtc/11794,https://issues.webrtc.org/issues/42221910 +https://crbug.com/webrtc/11795,https://issues.webrtc.org/issues/42221911 +https://crbug.com/webrtc/11796,https://issues.webrtc.org/issues/42221912 +https://crbug.com/webrtc/11797,https://issues.webrtc.org/issues/42221913 +https://crbug.com/webrtc/11798,https://issues.webrtc.org/issues/42221914 +https://crbug.com/webrtc/11799,https://issues.webrtc.org/issues/42221915 +https://crbug.com/webrtc/118,https://issues.webrtc.org/issues/42221916 +https://crbug.com/webrtc/1180,https://issues.webrtc.org/issues/42221917 +https://crbug.com/webrtc/11800,https://issues.webrtc.org/issues/42221918 +https://crbug.com/webrtc/11801,https://issues.webrtc.org/issues/42221919 +https://crbug.com/webrtc/11802,https://issues.webrtc.org/issues/42221920 +https://crbug.com/webrtc/11803,https://issues.webrtc.org/issues/42221921 +https://crbug.com/webrtc/11804,https://issues.webrtc.org/issues/42221922 +https://crbug.com/webrtc/11805,https://issues.webrtc.org/issues/42221923 +https://crbug.com/webrtc/11806,https://issues.webrtc.org/issues/42221924 +https://crbug.com/webrtc/11807,https://issues.webrtc.org/issues/42221925 +https://crbug.com/webrtc/11808,https://issues.webrtc.org/issues/42221926 +https://crbug.com/webrtc/11809,https://issues.webrtc.org/issues/42221927 +https://crbug.com/webrtc/1181,https://issues.webrtc.org/issues/42221928 +https://crbug.com/webrtc/11810,https://issues.webrtc.org/issues/42221929 +https://crbug.com/webrtc/11811,https://issues.webrtc.org/issues/42221930 +https://crbug.com/webrtc/11812,https://issues.webrtc.org/issues/42221931 +https://crbug.com/webrtc/11813,https://issues.webrtc.org/issues/42221932 +https://crbug.com/webrtc/11814,https://issues.webrtc.org/issues/42221933 +https://crbug.com/webrtc/11815,https://issues.webrtc.org/issues/42221934 +https://crbug.com/webrtc/11816,https://issues.webrtc.org/issues/42221935 +https://crbug.com/webrtc/11817,https://issues.webrtc.org/issues/42221936 +https://crbug.com/webrtc/11818,https://issues.webrtc.org/issues/42221937 +https://crbug.com/webrtc/1182,https://issues.webrtc.org/issues/42221938 +https://crbug.com/webrtc/11820,https://issues.webrtc.org/issues/42221939 +https://crbug.com/webrtc/11821,https://issues.webrtc.org/issues/42221940 +https://crbug.com/webrtc/11822,https://issues.webrtc.org/issues/42221941 +https://crbug.com/webrtc/11823,https://issues.webrtc.org/issues/42221942 +https://crbug.com/webrtc/11824,https://issues.webrtc.org/issues/42221943 +https://crbug.com/webrtc/11825,https://issues.webrtc.org/issues/42221944 +https://crbug.com/webrtc/11826,https://issues.webrtc.org/issues/42221945 +https://crbug.com/webrtc/11827,https://issues.webrtc.org/issues/42221946 +https://crbug.com/webrtc/11828,https://issues.webrtc.org/issues/42221947 +https://crbug.com/webrtc/11829,https://issues.webrtc.org/issues/42221948 +https://crbug.com/webrtc/1183,https://issues.webrtc.org/issues/42221949 +https://crbug.com/webrtc/11830,https://issues.webrtc.org/issues/42221950 +https://crbug.com/webrtc/11831,https://issues.webrtc.org/issues/42221951 +https://crbug.com/webrtc/11832,https://issues.webrtc.org/issues/42221952 +https://crbug.com/webrtc/11833,https://issues.webrtc.org/issues/42221953 +https://crbug.com/webrtc/11834,https://issues.webrtc.org/issues/42221954 +https://crbug.com/webrtc/11835,https://issues.webrtc.org/issues/42221955 +https://crbug.com/webrtc/11836,https://issues.webrtc.org/issues/42221956 +https://crbug.com/webrtc/11837,https://issues.webrtc.org/issues/42221957 +https://crbug.com/webrtc/11838,https://issues.webrtc.org/issues/42221958 +https://crbug.com/webrtc/11839,https://issues.webrtc.org/issues/42221959 +https://crbug.com/webrtc/1184,https://issues.webrtc.org/issues/42221960 +https://crbug.com/webrtc/11841,https://issues.webrtc.org/issues/42221961 +https://crbug.com/webrtc/11842,https://issues.webrtc.org/issues/42221962 +https://crbug.com/webrtc/11843,https://issues.webrtc.org/issues/42221963 +https://crbug.com/webrtc/11844,https://issues.webrtc.org/issues/42221964 +https://crbug.com/webrtc/11845,https://issues.webrtc.org/issues/42221965 +https://crbug.com/webrtc/11846,https://issues.webrtc.org/issues/42221966 +https://crbug.com/webrtc/11847,https://issues.webrtc.org/issues/42221967 +https://crbug.com/webrtc/11848,https://issues.webrtc.org/issues/42221968 +https://crbug.com/webrtc/11849,https://issues.webrtc.org/issues/42221969 +https://crbug.com/webrtc/1185,https://issues.webrtc.org/issues/42221970 +https://crbug.com/webrtc/11851,https://issues.webrtc.org/issues/42221971 +https://crbug.com/webrtc/11852,https://issues.webrtc.org/issues/42221972 +https://crbug.com/webrtc/11853,https://issues.webrtc.org/issues/42221973 +https://crbug.com/webrtc/11854,https://issues.webrtc.org/issues/42221974 +https://crbug.com/webrtc/11855,https://issues.webrtc.org/issues/42221975 +https://crbug.com/webrtc/11858,https://issues.webrtc.org/issues/42221976 +https://crbug.com/webrtc/11859,https://issues.webrtc.org/issues/42221977 +https://crbug.com/webrtc/1186,https://issues.webrtc.org/issues/42221978 +https://crbug.com/webrtc/11860,https://issues.webrtc.org/issues/42221979 +https://crbug.com/webrtc/11861,https://issues.webrtc.org/issues/42221980 +https://crbug.com/webrtc/11862,https://issues.webrtc.org/issues/42221981 +https://crbug.com/webrtc/11863,https://issues.webrtc.org/issues/42221982 +https://crbug.com/webrtc/11864,https://issues.webrtc.org/issues/42221983 +https://crbug.com/webrtc/11865,https://issues.webrtc.org/issues/42221984 +https://crbug.com/webrtc/11866,https://issues.webrtc.org/issues/42221985 +https://crbug.com/webrtc/11867,https://issues.webrtc.org/issues/42221986 +https://crbug.com/webrtc/11868,https://issues.webrtc.org/issues/42221987 +https://crbug.com/webrtc/11869,https://issues.webrtc.org/issues/42221988 +https://crbug.com/webrtc/1187,https://issues.webrtc.org/issues/42221989 +https://crbug.com/webrtc/11871,https://issues.webrtc.org/issues/42221990 +https://crbug.com/webrtc/11872,https://issues.webrtc.org/issues/42221991 +https://crbug.com/webrtc/11873,https://issues.webrtc.org/issues/42221992 +https://crbug.com/webrtc/11874,https://issues.webrtc.org/issues/42221993 +https://crbug.com/webrtc/11875,https://issues.webrtc.org/issues/42221994 +https://crbug.com/webrtc/11876,https://issues.webrtc.org/issues/42221995 +https://crbug.com/webrtc/11877,https://issues.webrtc.org/issues/42221996 +https://crbug.com/webrtc/11878,https://issues.webrtc.org/issues/42221997 +https://crbug.com/webrtc/11879,https://issues.webrtc.org/issues/42221998 +https://crbug.com/webrtc/1188,https://issues.webrtc.org/issues/42221999 +https://crbug.com/webrtc/11880,https://issues.webrtc.org/issues/42222000 +https://crbug.com/webrtc/11881,https://issues.webrtc.org/issues/42222001 +https://crbug.com/webrtc/11882,https://issues.webrtc.org/issues/42222002 +https://crbug.com/webrtc/11883,https://issues.webrtc.org/issues/42222003 +https://crbug.com/webrtc/11884,https://issues.webrtc.org/issues/42222004 +https://crbug.com/webrtc/11885,https://issues.webrtc.org/issues/42222005 +https://crbug.com/webrtc/11886,https://issues.webrtc.org/issues/42222006 +https://crbug.com/webrtc/11887,https://issues.webrtc.org/issues/42222007 +https://crbug.com/webrtc/11888,https://issues.webrtc.org/issues/42222008 +https://crbug.com/webrtc/11889,https://issues.webrtc.org/issues/42222009 +https://crbug.com/webrtc/1189,https://issues.webrtc.org/issues/42222010 +https://crbug.com/webrtc/11891,https://issues.webrtc.org/issues/42222011 +https://crbug.com/webrtc/11892,https://issues.webrtc.org/issues/42222012 +https://crbug.com/webrtc/11893,https://issues.webrtc.org/issues/42222013 +https://crbug.com/webrtc/11894,https://issues.webrtc.org/issues/42222014 +https://crbug.com/webrtc/11895,https://issues.webrtc.org/issues/42222015 +https://crbug.com/webrtc/11896,https://issues.webrtc.org/issues/42222016 +https://crbug.com/webrtc/11897,https://issues.webrtc.org/issues/42222017 +https://crbug.com/webrtc/11898,https://issues.webrtc.org/issues/42222018 +https://crbug.com/webrtc/11899,https://issues.webrtc.org/issues/42222019 +https://crbug.com/webrtc/119,https://issues.webrtc.org/issues/42222020 +https://crbug.com/webrtc/1190,https://issues.webrtc.org/issues/42222021 +https://crbug.com/webrtc/11900,https://issues.webrtc.org/issues/42222022 +https://crbug.com/webrtc/11901,https://issues.webrtc.org/issues/42222023 +https://crbug.com/webrtc/11902,https://issues.webrtc.org/issues/42222024 +https://crbug.com/webrtc/11903,https://issues.webrtc.org/issues/42222025 +https://crbug.com/webrtc/11905,https://issues.webrtc.org/issues/42222026 +https://crbug.com/webrtc/11906,https://issues.webrtc.org/issues/42222027 +https://crbug.com/webrtc/11907,https://issues.webrtc.org/issues/42222028 +https://crbug.com/webrtc/11909,https://issues.webrtc.org/issues/42222029 +https://crbug.com/webrtc/1191,https://issues.webrtc.org/issues/42222030 +https://crbug.com/webrtc/11910,https://issues.webrtc.org/issues/42222031 +https://crbug.com/webrtc/11911,https://issues.webrtc.org/issues/42222032 +https://crbug.com/webrtc/11912,https://issues.webrtc.org/issues/42222033 +https://crbug.com/webrtc/11913,https://issues.webrtc.org/issues/42222034 +https://crbug.com/webrtc/11914,https://issues.webrtc.org/issues/42222035 +https://crbug.com/webrtc/11915,https://issues.webrtc.org/issues/42222036 +https://crbug.com/webrtc/11916,https://issues.webrtc.org/issues/42222037 +https://crbug.com/webrtc/11917,https://issues.webrtc.org/issues/42222038 +https://crbug.com/webrtc/11918,https://issues.webrtc.org/issues/42222039 +https://crbug.com/webrtc/11919,https://issues.webrtc.org/issues/42222040 +https://crbug.com/webrtc/1192,https://issues.webrtc.org/issues/42222041 +https://crbug.com/webrtc/11920,https://issues.webrtc.org/issues/42222042 +https://crbug.com/webrtc/11921,https://issues.webrtc.org/issues/42222043 +https://crbug.com/webrtc/11922,https://issues.webrtc.org/issues/42222044 +https://crbug.com/webrtc/11923,https://issues.webrtc.org/issues/42222045 +https://crbug.com/webrtc/11924,https://issues.webrtc.org/issues/42222046 +https://crbug.com/webrtc/11925,https://issues.webrtc.org/issues/42222047 +https://crbug.com/webrtc/11926,https://issues.webrtc.org/issues/42222048 +https://crbug.com/webrtc/11927,https://issues.webrtc.org/issues/42222049 +https://crbug.com/webrtc/11928,https://issues.webrtc.org/issues/42222050 +https://crbug.com/webrtc/11929,https://issues.webrtc.org/issues/42222051 +https://crbug.com/webrtc/1193,https://issues.webrtc.org/issues/42222052 +https://crbug.com/webrtc/11930,https://issues.webrtc.org/issues/42222053 +https://crbug.com/webrtc/11931,https://issues.webrtc.org/issues/42222054 +https://crbug.com/webrtc/11932,https://issues.webrtc.org/issues/42222055 +https://crbug.com/webrtc/11933,https://issues.webrtc.org/issues/42222056 +https://crbug.com/webrtc/11934,https://issues.webrtc.org/issues/42222057 +https://crbug.com/webrtc/11935,https://issues.webrtc.org/issues/42222058 +https://crbug.com/webrtc/11936,https://issues.webrtc.org/issues/42222059 +https://crbug.com/webrtc/11937,https://issues.webrtc.org/issues/42222060 +https://crbug.com/webrtc/11938,https://issues.webrtc.org/issues/42222061 +https://crbug.com/webrtc/11939,https://issues.webrtc.org/issues/42222062 +https://crbug.com/webrtc/1194,https://issues.webrtc.org/issues/42222063 +https://crbug.com/webrtc/11940,https://issues.webrtc.org/issues/42222064 +https://crbug.com/webrtc/11942,https://issues.webrtc.org/issues/42222065 +https://crbug.com/webrtc/11943,https://issues.webrtc.org/issues/42222066 +https://crbug.com/webrtc/11944,https://issues.webrtc.org/issues/42222067 +https://crbug.com/webrtc/11946,https://issues.webrtc.org/issues/42222068 +https://crbug.com/webrtc/11947,https://issues.webrtc.org/issues/42222069 +https://crbug.com/webrtc/11948,https://issues.webrtc.org/issues/42222070 +https://crbug.com/webrtc/11949,https://issues.webrtc.org/issues/42222071 +https://crbug.com/webrtc/1195,https://issues.webrtc.org/issues/42222072 +https://crbug.com/webrtc/11950,https://issues.webrtc.org/issues/42222073 +https://crbug.com/webrtc/11951,https://issues.webrtc.org/issues/42222074 +https://crbug.com/webrtc/11952,https://issues.webrtc.org/issues/42222075 +https://crbug.com/webrtc/11953,https://issues.webrtc.org/issues/42222076 +https://crbug.com/webrtc/11954,https://issues.webrtc.org/issues/42222077 +https://crbug.com/webrtc/11955,https://issues.webrtc.org/issues/42222078 +https://crbug.com/webrtc/11956,https://issues.webrtc.org/issues/42222079 +https://crbug.com/webrtc/11957,https://issues.webrtc.org/issues/42222080 +https://crbug.com/webrtc/11958,https://issues.webrtc.org/issues/42222081 +https://crbug.com/webrtc/11959,https://issues.webrtc.org/issues/42222082 +https://crbug.com/webrtc/1196,https://issues.webrtc.org/issues/42222083 +https://crbug.com/webrtc/11961,https://issues.webrtc.org/issues/42222085 +https://crbug.com/webrtc/11962,https://issues.webrtc.org/issues/42222086 +https://crbug.com/webrtc/11963,https://issues.webrtc.org/issues/42222087 +https://crbug.com/webrtc/11964,https://issues.webrtc.org/issues/42222088 +https://crbug.com/webrtc/11965,https://issues.webrtc.org/issues/42222089 +https://crbug.com/webrtc/11966,https://issues.webrtc.org/issues/42222090 +https://crbug.com/webrtc/11967,https://issues.webrtc.org/issues/42222091 +https://crbug.com/webrtc/11968,https://issues.webrtc.org/issues/42222092 +https://crbug.com/webrtc/11969,https://issues.webrtc.org/issues/42222093 +https://crbug.com/webrtc/1197,https://issues.webrtc.org/issues/42222094 +https://crbug.com/webrtc/11970,https://issues.webrtc.org/issues/42222095 +https://crbug.com/webrtc/11971,https://issues.webrtc.org/issues/42222096 +https://crbug.com/webrtc/11972,https://issues.webrtc.org/issues/42222097 +https://crbug.com/webrtc/11973,https://issues.webrtc.org/issues/42222098 +https://crbug.com/webrtc/11974,https://issues.webrtc.org/issues/42222099 +https://crbug.com/webrtc/11975,https://issues.webrtc.org/issues/42222100 +https://crbug.com/webrtc/11976,https://issues.webrtc.org/issues/42222101 +https://crbug.com/webrtc/11978,https://issues.webrtc.org/issues/42222102 +https://crbug.com/webrtc/11979,https://issues.webrtc.org/issues/42222103 +https://crbug.com/webrtc/1198,https://issues.webrtc.org/issues/42222104 +https://crbug.com/webrtc/11980,https://issues.webrtc.org/issues/42222105 +https://crbug.com/webrtc/11981,https://issues.webrtc.org/issues/42222106 +https://crbug.com/webrtc/11983,https://issues.webrtc.org/issues/42222107 +https://crbug.com/webrtc/11984,https://issues.webrtc.org/issues/42222108 +https://crbug.com/webrtc/11985,https://issues.webrtc.org/issues/42222109 +https://crbug.com/webrtc/11986,https://issues.webrtc.org/issues/42222110 +https://crbug.com/webrtc/11987,https://issues.webrtc.org/issues/42222111 +https://crbug.com/webrtc/11988,https://issues.webrtc.org/issues/42222112 +https://crbug.com/webrtc/11989,https://issues.webrtc.org/issues/42222113 +https://crbug.com/webrtc/1199,https://issues.webrtc.org/issues/42222114 +https://crbug.com/webrtc/11991,https://issues.webrtc.org/issues/42222115 +https://crbug.com/webrtc/11992,https://issues.webrtc.org/issues/42222116 +https://crbug.com/webrtc/11993,https://issues.webrtc.org/issues/42222117 +https://crbug.com/webrtc/11994,https://issues.webrtc.org/issues/42222118 +https://crbug.com/webrtc/11995,https://issues.webrtc.org/issues/42222119 +https://crbug.com/webrtc/11996,https://issues.webrtc.org/issues/42222120 +https://crbug.com/webrtc/11997,https://issues.webrtc.org/issues/42222121 +https://crbug.com/webrtc/11998,https://issues.webrtc.org/issues/42222122 +https://crbug.com/webrtc/11999,https://issues.webrtc.org/issues/42222123 +https://crbug.com/webrtc/12,https://issues.webrtc.org/issues/42222124 +https://crbug.com/webrtc/120,https://issues.webrtc.org/issues/42222125 +https://crbug.com/webrtc/1200,https://issues.webrtc.org/issues/42222126 +https://crbug.com/webrtc/12000,https://issues.webrtc.org/issues/42222127 +https://crbug.com/webrtc/12001,https://issues.webrtc.org/issues/42222128 +https://crbug.com/webrtc/12002,https://issues.webrtc.org/issues/42222129 +https://crbug.com/webrtc/12003,https://issues.webrtc.org/issues/42222130 +https://crbug.com/webrtc/12004,https://issues.webrtc.org/issues/42222131 +https://crbug.com/webrtc/12005,https://issues.webrtc.org/issues/42222132 +https://crbug.com/webrtc/12006,https://issues.webrtc.org/issues/42222133 +https://crbug.com/webrtc/12007,https://issues.webrtc.org/issues/42222134 +https://crbug.com/webrtc/12008,https://issues.webrtc.org/issues/42222135 +https://crbug.com/webrtc/12009,https://issues.webrtc.org/issues/42222136 +https://crbug.com/webrtc/1201,https://issues.webrtc.org/issues/42222137 +https://crbug.com/webrtc/12010,https://issues.webrtc.org/issues/42222138 +https://crbug.com/webrtc/12011,https://issues.webrtc.org/issues/42222139 +https://crbug.com/webrtc/12012,https://issues.webrtc.org/issues/42222140 +https://crbug.com/webrtc/12013,https://issues.webrtc.org/issues/42222141 +https://crbug.com/webrtc/12014,https://issues.webrtc.org/issues/42222142 +https://crbug.com/webrtc/12015,https://issues.webrtc.org/issues/42222143 +https://crbug.com/webrtc/12016,https://issues.webrtc.org/issues/42222144 +https://crbug.com/webrtc/12017,https://issues.webrtc.org/issues/42222145 +https://crbug.com/webrtc/12018,https://issues.webrtc.org/issues/42222146 +https://crbug.com/webrtc/12019,https://issues.webrtc.org/issues/42222147 +https://crbug.com/webrtc/1202,https://issues.webrtc.org/issues/42222148 +https://crbug.com/webrtc/12020,https://issues.webrtc.org/issues/42222149 +https://crbug.com/webrtc/12021,https://issues.webrtc.org/issues/42222150 +https://crbug.com/webrtc/12022,https://issues.webrtc.org/issues/42222151 +https://crbug.com/webrtc/12024,https://issues.webrtc.org/issues/42222152 +https://crbug.com/webrtc/12025,https://issues.webrtc.org/issues/42222153 +https://crbug.com/webrtc/12026,https://issues.webrtc.org/issues/42222154 +https://crbug.com/webrtc/12027,https://issues.webrtc.org/issues/42222155 +https://crbug.com/webrtc/12028,https://issues.webrtc.org/issues/42222156 +https://crbug.com/webrtc/12029,https://issues.webrtc.org/issues/42222157 +https://crbug.com/webrtc/1203,https://issues.webrtc.org/issues/42222158 +https://crbug.com/webrtc/12030,https://issues.webrtc.org/issues/42222159 +https://crbug.com/webrtc/12031,https://issues.webrtc.org/issues/42222160 +https://crbug.com/webrtc/12032,https://issues.webrtc.org/issues/42222161 +https://crbug.com/webrtc/12033,https://issues.webrtc.org/issues/42222162 +https://crbug.com/webrtc/12034,https://issues.webrtc.org/issues/42222163 +https://crbug.com/webrtc/12037,https://issues.webrtc.org/issues/42222164 +https://crbug.com/webrtc/12038,https://issues.webrtc.org/issues/42222165 +https://crbug.com/webrtc/12039,https://issues.webrtc.org/issues/42222166 +https://crbug.com/webrtc/1204,https://issues.webrtc.org/issues/42222167 +https://crbug.com/webrtc/12040,https://issues.webrtc.org/issues/42222168 +https://crbug.com/webrtc/12041,https://issues.webrtc.org/issues/42222169 +https://crbug.com/webrtc/12042,https://issues.webrtc.org/issues/42222170 +https://crbug.com/webrtc/12043,https://issues.webrtc.org/issues/42222171 +https://crbug.com/webrtc/12044,https://issues.webrtc.org/issues/42222172 +https://crbug.com/webrtc/12045,https://issues.webrtc.org/issues/42222173 +https://crbug.com/webrtc/12046,https://issues.webrtc.org/issues/42222174 +https://crbug.com/webrtc/12047,https://issues.webrtc.org/issues/42222175 +https://crbug.com/webrtc/12048,https://issues.webrtc.org/issues/42222176 +https://crbug.com/webrtc/12049,https://issues.webrtc.org/issues/42222177 +https://crbug.com/webrtc/1205,https://issues.webrtc.org/issues/42222178 +https://crbug.com/webrtc/12051,https://issues.webrtc.org/issues/42222179 +https://crbug.com/webrtc/12052,https://issues.webrtc.org/issues/42222180 +https://crbug.com/webrtc/12053,https://issues.webrtc.org/issues/42222181 +https://crbug.com/webrtc/12054,https://issues.webrtc.org/issues/42222182 +https://crbug.com/webrtc/12055,https://issues.webrtc.org/issues/42222183 +https://crbug.com/webrtc/12056,https://issues.webrtc.org/issues/42222184 +https://crbug.com/webrtc/12057,https://issues.webrtc.org/issues/42222185 +https://crbug.com/webrtc/12058,https://issues.webrtc.org/issues/42222186 +https://crbug.com/webrtc/12059,https://issues.webrtc.org/issues/42222187 +https://crbug.com/webrtc/1206,https://issues.webrtc.org/issues/42222188 +https://crbug.com/webrtc/12060,https://issues.webrtc.org/issues/42222189 +https://crbug.com/webrtc/12061,https://issues.webrtc.org/issues/42222190 +https://crbug.com/webrtc/12062,https://issues.webrtc.org/issues/42222191 +https://crbug.com/webrtc/12064,https://issues.webrtc.org/issues/42222192 +https://crbug.com/webrtc/12065,https://issues.webrtc.org/issues/42222193 +https://crbug.com/webrtc/12067,https://issues.webrtc.org/issues/42222194 +https://crbug.com/webrtc/12068,https://issues.webrtc.org/issues/42222195 +https://crbug.com/webrtc/12069,https://issues.webrtc.org/issues/42222196 +https://crbug.com/webrtc/1207,https://issues.webrtc.org/issues/42222197 +https://crbug.com/webrtc/12071,https://issues.webrtc.org/issues/42222198 +https://crbug.com/webrtc/12073,https://issues.webrtc.org/issues/42222199 +https://crbug.com/webrtc/12074,https://issues.webrtc.org/issues/42222200 +https://crbug.com/webrtc/12075,https://issues.webrtc.org/issues/42222201 +https://crbug.com/webrtc/12076,https://issues.webrtc.org/issues/42222202 +https://crbug.com/webrtc/12077,https://issues.webrtc.org/issues/42222203 +https://crbug.com/webrtc/12078,https://issues.webrtc.org/issues/42222204 +https://crbug.com/webrtc/1208,https://issues.webrtc.org/issues/42222205 +https://crbug.com/webrtc/12080,https://issues.webrtc.org/issues/42222206 +https://crbug.com/webrtc/12081,https://issues.webrtc.org/issues/42222207 +https://crbug.com/webrtc/12082,https://issues.webrtc.org/issues/42222208 +https://crbug.com/webrtc/12083,https://issues.webrtc.org/issues/42222209 +https://crbug.com/webrtc/12084,https://issues.webrtc.org/issues/42222210 +https://crbug.com/webrtc/12085,https://issues.webrtc.org/issues/42222211 +https://crbug.com/webrtc/12086,https://issues.webrtc.org/issues/42222212 +https://crbug.com/webrtc/12087,https://issues.webrtc.org/issues/42222213 +https://crbug.com/webrtc/12088,https://issues.webrtc.org/issues/42222214 +https://crbug.com/webrtc/12089,https://issues.webrtc.org/issues/42222215 +https://crbug.com/webrtc/1209,https://issues.webrtc.org/issues/42222216 +https://crbug.com/webrtc/12090,https://issues.webrtc.org/issues/42222217 +https://crbug.com/webrtc/12091,https://issues.webrtc.org/issues/42222218 +https://crbug.com/webrtc/12092,https://issues.webrtc.org/issues/42222219 +https://crbug.com/webrtc/12093,https://issues.webrtc.org/issues/42222220 +https://crbug.com/webrtc/12094,https://issues.webrtc.org/issues/42222221 +https://crbug.com/webrtc/12095,https://issues.webrtc.org/issues/42222222 +https://crbug.com/webrtc/12096,https://issues.webrtc.org/issues/42222223 +https://crbug.com/webrtc/12097,https://issues.webrtc.org/issues/42222224 +https://crbug.com/webrtc/12098,https://issues.webrtc.org/issues/42222225 +https://crbug.com/webrtc/12099,https://issues.webrtc.org/issues/42222226 +https://crbug.com/webrtc/121,https://issues.webrtc.org/issues/42222227 +https://crbug.com/webrtc/1210,https://issues.webrtc.org/issues/42222228 +https://crbug.com/webrtc/12100,https://issues.webrtc.org/issues/42222229 +https://crbug.com/webrtc/12102,https://issues.webrtc.org/issues/42222230 +https://crbug.com/webrtc/12103,https://issues.webrtc.org/issues/42222231 +https://crbug.com/webrtc/12104,https://issues.webrtc.org/issues/42222232 +https://crbug.com/webrtc/12105,https://issues.webrtc.org/issues/42222233 +https://crbug.com/webrtc/12106,https://issues.webrtc.org/issues/42222234 +https://crbug.com/webrtc/12107,https://issues.webrtc.org/issues/42222235 +https://crbug.com/webrtc/12108,https://issues.webrtc.org/issues/42222236 +https://crbug.com/webrtc/12109,https://issues.webrtc.org/issues/42222237 +https://crbug.com/webrtc/1211,https://issues.webrtc.org/issues/42222238 +https://crbug.com/webrtc/12110,https://issues.webrtc.org/issues/42222239 +https://crbug.com/webrtc/12111,https://issues.webrtc.org/issues/42222240 +https://crbug.com/webrtc/12112,https://issues.webrtc.org/issues/42222241 +https://crbug.com/webrtc/12113,https://issues.webrtc.org/issues/42222242 +https://crbug.com/webrtc/12114,https://issues.webrtc.org/issues/42222243 +https://crbug.com/webrtc/12115,https://issues.webrtc.org/issues/42222244 +https://crbug.com/webrtc/12116,https://issues.webrtc.org/issues/42222245 +https://crbug.com/webrtc/12117,https://issues.webrtc.org/issues/42222246 +https://crbug.com/webrtc/12118,https://issues.webrtc.org/issues/42222247 +https://crbug.com/webrtc/12119,https://issues.webrtc.org/issues/42222248 +https://crbug.com/webrtc/1212,https://issues.webrtc.org/issues/42222249 +https://crbug.com/webrtc/12120,https://issues.webrtc.org/issues/42222250 +https://crbug.com/webrtc/12121,https://issues.webrtc.org/issues/42222251 +https://crbug.com/webrtc/12122,https://issues.webrtc.org/issues/42222252 +https://crbug.com/webrtc/12124,https://issues.webrtc.org/issues/42222253 +https://crbug.com/webrtc/12125,https://issues.webrtc.org/issues/42222254 +https://crbug.com/webrtc/12126,https://issues.webrtc.org/issues/42222255 +https://crbug.com/webrtc/12127,https://issues.webrtc.org/issues/42222256 +https://crbug.com/webrtc/12128,https://issues.webrtc.org/issues/42222257 +https://crbug.com/webrtc/12129,https://issues.webrtc.org/issues/42222258 +https://crbug.com/webrtc/1213,https://issues.webrtc.org/issues/42222259 +https://crbug.com/webrtc/12130,https://issues.webrtc.org/issues/42222260 +https://crbug.com/webrtc/12131,https://issues.webrtc.org/issues/42222261 +https://crbug.com/webrtc/12132,https://issues.webrtc.org/issues/42222262 +https://crbug.com/webrtc/12133,https://issues.webrtc.org/issues/42222263 +https://crbug.com/webrtc/12134,https://issues.webrtc.org/issues/42222264 +https://crbug.com/webrtc/12135,https://issues.webrtc.org/issues/42222265 +https://crbug.com/webrtc/12137,https://issues.webrtc.org/issues/42222266 +https://crbug.com/webrtc/12138,https://issues.webrtc.org/issues/42222267 +https://crbug.com/webrtc/12139,https://issues.webrtc.org/issues/42222268 +https://crbug.com/webrtc/1214,https://issues.webrtc.org/issues/42222269 +https://crbug.com/webrtc/12140,https://issues.webrtc.org/issues/42222270 +https://crbug.com/webrtc/12141,https://issues.webrtc.org/issues/42222271 +https://crbug.com/webrtc/12142,https://issues.webrtc.org/issues/42222272 +https://crbug.com/webrtc/12143,https://issues.webrtc.org/issues/42222273 +https://crbug.com/webrtc/12144,https://issues.webrtc.org/issues/42222274 +https://crbug.com/webrtc/12145,https://issues.webrtc.org/issues/42222275 +https://crbug.com/webrtc/12146,https://issues.webrtc.org/issues/42222276 +https://crbug.com/webrtc/12147,https://issues.webrtc.org/issues/42222277 +https://crbug.com/webrtc/12148,https://issues.webrtc.org/issues/42222278 +https://crbug.com/webrtc/12149,https://issues.webrtc.org/issues/42222279 +https://crbug.com/webrtc/1215,https://issues.webrtc.org/issues/42222280 +https://crbug.com/webrtc/12150,https://issues.webrtc.org/issues/42222281 +https://crbug.com/webrtc/12151,https://issues.webrtc.org/issues/42222282 +https://crbug.com/webrtc/12152,https://issues.webrtc.org/issues/42222283 +https://crbug.com/webrtc/12153,https://issues.webrtc.org/issues/42222284 +https://crbug.com/webrtc/12154,https://issues.webrtc.org/issues/42222285 +https://crbug.com/webrtc/12155,https://issues.webrtc.org/issues/42222286 +https://crbug.com/webrtc/12156,https://issues.webrtc.org/issues/42222287 +https://crbug.com/webrtc/12157,https://issues.webrtc.org/issues/42222288 +https://crbug.com/webrtc/12158,https://issues.webrtc.org/issues/42222289 +https://crbug.com/webrtc/12159,https://issues.webrtc.org/issues/42222290 +https://crbug.com/webrtc/1216,https://issues.webrtc.org/issues/42222291 +https://crbug.com/webrtc/12160,https://issues.webrtc.org/issues/42222292 +https://crbug.com/webrtc/12161,https://issues.webrtc.org/issues/42222293 +https://crbug.com/webrtc/12162,https://issues.webrtc.org/issues/42222294 +https://crbug.com/webrtc/12163,https://issues.webrtc.org/issues/42222295 +https://crbug.com/webrtc/12164,https://issues.webrtc.org/issues/42222296 +https://crbug.com/webrtc/12165,https://issues.webrtc.org/issues/42222297 +https://crbug.com/webrtc/12166,https://issues.webrtc.org/issues/42222298 +https://crbug.com/webrtc/12167,https://issues.webrtc.org/issues/42222299 +https://crbug.com/webrtc/12168,https://issues.webrtc.org/issues/42222300 +https://crbug.com/webrtc/12169,https://issues.webrtc.org/issues/42222301 +https://crbug.com/webrtc/1217,https://issues.webrtc.org/issues/42222302 +https://crbug.com/webrtc/12170,https://issues.webrtc.org/issues/42222303 +https://crbug.com/webrtc/12171,https://issues.webrtc.org/issues/42222304 +https://crbug.com/webrtc/12172,https://issues.webrtc.org/issues/42222305 +https://crbug.com/webrtc/12173,https://issues.webrtc.org/issues/42222306 +https://crbug.com/webrtc/12174,https://issues.webrtc.org/issues/42222307 +https://crbug.com/webrtc/12175,https://issues.webrtc.org/issues/42222308 +https://crbug.com/webrtc/12177,https://issues.webrtc.org/issues/42222309 +https://crbug.com/webrtc/12178,https://issues.webrtc.org/issues/42222310 +https://crbug.com/webrtc/12179,https://issues.webrtc.org/issues/42222311 +https://crbug.com/webrtc/1218,https://issues.webrtc.org/issues/42222312 +https://crbug.com/webrtc/12180,https://issues.webrtc.org/issues/42222313 +https://crbug.com/webrtc/12181,https://issues.webrtc.org/issues/42222314 +https://crbug.com/webrtc/12182,https://issues.webrtc.org/issues/42222315 +https://crbug.com/webrtc/12183,https://issues.webrtc.org/issues/42222316 +https://crbug.com/webrtc/12184,https://issues.webrtc.org/issues/42222317 +https://crbug.com/webrtc/12185,https://issues.webrtc.org/issues/42222318 +https://crbug.com/webrtc/12186,https://issues.webrtc.org/issues/42222319 +https://crbug.com/webrtc/12187,https://issues.webrtc.org/issues/42222320 +https://crbug.com/webrtc/12188,https://issues.webrtc.org/issues/42222321 +https://crbug.com/webrtc/12189,https://issues.webrtc.org/issues/42222322 +https://crbug.com/webrtc/1219,https://issues.webrtc.org/issues/42222323 +https://crbug.com/webrtc/12190,https://issues.webrtc.org/issues/42222324 +https://crbug.com/webrtc/12192,https://issues.webrtc.org/issues/42222325 +https://crbug.com/webrtc/12193,https://issues.webrtc.org/issues/42222326 +https://crbug.com/webrtc/12194,https://issues.webrtc.org/issues/42222327 +https://crbug.com/webrtc/12195,https://issues.webrtc.org/issues/42222328 +https://crbug.com/webrtc/12196,https://issues.webrtc.org/issues/42222329 +https://crbug.com/webrtc/12198,https://issues.webrtc.org/issues/42222330 +https://crbug.com/webrtc/12199,https://issues.webrtc.org/issues/42222331 +https://crbug.com/webrtc/122,https://issues.webrtc.org/issues/42222332 +https://crbug.com/webrtc/12200,https://issues.webrtc.org/issues/42222333 +https://crbug.com/webrtc/12201,https://issues.webrtc.org/issues/42222334 +https://crbug.com/webrtc/12203,https://issues.webrtc.org/issues/42222335 +https://crbug.com/webrtc/12204,https://issues.webrtc.org/issues/42222336 +https://crbug.com/webrtc/12205,https://issues.webrtc.org/issues/42222337 +https://crbug.com/webrtc/12206,https://issues.webrtc.org/issues/42222338 +https://crbug.com/webrtc/12207,https://issues.webrtc.org/issues/42222339 +https://crbug.com/webrtc/12208,https://issues.webrtc.org/issues/42222340 +https://crbug.com/webrtc/12209,https://issues.webrtc.org/issues/42222341 +https://crbug.com/webrtc/1221,https://issues.webrtc.org/issues/42222342 +https://crbug.com/webrtc/12210,https://issues.webrtc.org/issues/42222343 +https://crbug.com/webrtc/12211,https://issues.webrtc.org/issues/42222344 +https://crbug.com/webrtc/12212,https://issues.webrtc.org/issues/42222345 +https://crbug.com/webrtc/12213,https://issues.webrtc.org/issues/42222346 +https://crbug.com/webrtc/12214,https://issues.webrtc.org/issues/42222347 +https://crbug.com/webrtc/12215,https://issues.webrtc.org/issues/42222348 +https://crbug.com/webrtc/12216,https://issues.webrtc.org/issues/42222349 +https://crbug.com/webrtc/12217,https://issues.webrtc.org/issues/42222350 +https://crbug.com/webrtc/12218,https://issues.webrtc.org/issues/42222351 +https://crbug.com/webrtc/12219,https://issues.webrtc.org/issues/42222352 +https://crbug.com/webrtc/1222,https://issues.webrtc.org/issues/42222353 +https://crbug.com/webrtc/12220,https://issues.webrtc.org/issues/42222354 +https://crbug.com/webrtc/12221,https://issues.webrtc.org/issues/42222355 +https://crbug.com/webrtc/12222,https://issues.webrtc.org/issues/42222356 +https://crbug.com/webrtc/12223,https://issues.webrtc.org/issues/42222357 +https://crbug.com/webrtc/12224,https://issues.webrtc.org/issues/42222358 +https://crbug.com/webrtc/12225,https://issues.webrtc.org/issues/42222359 +https://crbug.com/webrtc/12226,https://issues.webrtc.org/issues/42222360 +https://crbug.com/webrtc/12227,https://issues.webrtc.org/issues/42222361 +https://crbug.com/webrtc/12228,https://issues.webrtc.org/issues/42222362 +https://crbug.com/webrtc/12229,https://issues.webrtc.org/issues/42222363 +https://crbug.com/webrtc/1223,https://issues.webrtc.org/issues/42222364 +https://crbug.com/webrtc/12230,https://issues.webrtc.org/issues/42222365 +https://crbug.com/webrtc/12231,https://issues.webrtc.org/issues/42222366 +https://crbug.com/webrtc/12232,https://issues.webrtc.org/issues/42222367 +https://crbug.com/webrtc/12233,https://issues.webrtc.org/issues/42222368 +https://crbug.com/webrtc/12234,https://issues.webrtc.org/issues/42222369 +https://crbug.com/webrtc/12235,https://issues.webrtc.org/issues/42222370 +https://crbug.com/webrtc/12236,https://issues.webrtc.org/issues/42222371 +https://crbug.com/webrtc/12237,https://issues.webrtc.org/issues/42222372 +https://crbug.com/webrtc/12239,https://issues.webrtc.org/issues/42222373 +https://crbug.com/webrtc/1224,https://issues.webrtc.org/issues/42222374 +https://crbug.com/webrtc/12240,https://issues.webrtc.org/issues/42222375 +https://crbug.com/webrtc/12241,https://issues.webrtc.org/issues/42222376 +https://crbug.com/webrtc/12242,https://issues.webrtc.org/issues/42222377 +https://crbug.com/webrtc/12243,https://issues.webrtc.org/issues/42222378 +https://crbug.com/webrtc/12244,https://issues.webrtc.org/issues/42222379 +https://crbug.com/webrtc/12245,https://issues.webrtc.org/issues/42222380 +https://crbug.com/webrtc/12246,https://issues.webrtc.org/issues/42222381 +https://crbug.com/webrtc/12247,https://issues.webrtc.org/issues/42222382 +https://crbug.com/webrtc/12248,https://issues.webrtc.org/issues/42222383 +https://crbug.com/webrtc/12249,https://issues.webrtc.org/issues/42222384 +https://crbug.com/webrtc/1225,https://issues.webrtc.org/issues/42222385 +https://crbug.com/webrtc/12250,https://issues.webrtc.org/issues/42222386 +https://crbug.com/webrtc/12251,https://issues.webrtc.org/issues/42222387 +https://crbug.com/webrtc/12252,https://issues.webrtc.org/issues/42222388 +https://crbug.com/webrtc/12253,https://issues.webrtc.org/issues/42222389 +https://crbug.com/webrtc/12254,https://issues.webrtc.org/issues/42222390 +https://crbug.com/webrtc/12255,https://issues.webrtc.org/issues/42222391 +https://crbug.com/webrtc/12256,https://issues.webrtc.org/issues/42222392 +https://crbug.com/webrtc/12257,https://issues.webrtc.org/issues/42222393 +https://crbug.com/webrtc/12259,https://issues.webrtc.org/issues/42222394 +https://crbug.com/webrtc/1226,https://issues.webrtc.org/issues/42222395 +https://crbug.com/webrtc/12260,https://issues.webrtc.org/issues/42222396 +https://crbug.com/webrtc/12261,https://issues.webrtc.org/issues/42222397 +https://crbug.com/webrtc/12262,https://issues.webrtc.org/issues/42222398 +https://crbug.com/webrtc/12263,https://issues.webrtc.org/issues/42222399 +https://crbug.com/webrtc/12264,https://issues.webrtc.org/issues/42222400 +https://crbug.com/webrtc/12265,https://issues.webrtc.org/issues/42222401 +https://crbug.com/webrtc/12266,https://issues.webrtc.org/issues/42222402 +https://crbug.com/webrtc/12267,https://issues.webrtc.org/issues/42222403 +https://crbug.com/webrtc/12269,https://issues.webrtc.org/issues/42222404 +https://crbug.com/webrtc/1227,https://issues.webrtc.org/issues/42222405 +https://crbug.com/webrtc/12270,https://issues.webrtc.org/issues/42222406 +https://crbug.com/webrtc/12271,https://issues.webrtc.org/issues/42222407 +https://crbug.com/webrtc/12272,https://issues.webrtc.org/issues/42222408 +https://crbug.com/webrtc/12273,https://issues.webrtc.org/issues/42222409 +https://crbug.com/webrtc/12274,https://issues.webrtc.org/issues/42222410 +https://crbug.com/webrtc/12275,https://issues.webrtc.org/issues/42222411 +https://crbug.com/webrtc/12276,https://issues.webrtc.org/issues/42222412 +https://crbug.com/webrtc/12277,https://issues.webrtc.org/issues/42222413 +https://crbug.com/webrtc/12278,https://issues.webrtc.org/issues/42222414 +https://crbug.com/webrtc/12279,https://issues.webrtc.org/issues/42222415 +https://crbug.com/webrtc/1228,https://issues.webrtc.org/issues/42222416 +https://crbug.com/webrtc/12280,https://issues.webrtc.org/issues/42222417 +https://crbug.com/webrtc/12281,https://issues.webrtc.org/issues/42222418 +https://crbug.com/webrtc/12282,https://issues.webrtc.org/issues/42222419 +https://crbug.com/webrtc/12283,https://issues.webrtc.org/issues/42222420 +https://crbug.com/webrtc/12284,https://issues.webrtc.org/issues/42222421 +https://crbug.com/webrtc/12285,https://issues.webrtc.org/issues/42222422 +https://crbug.com/webrtc/12286,https://issues.webrtc.org/issues/42222423 +https://crbug.com/webrtc/12287,https://issues.webrtc.org/issues/42222424 +https://crbug.com/webrtc/12288,https://issues.webrtc.org/issues/42222425 +https://crbug.com/webrtc/12289,https://issues.webrtc.org/issues/42222426 +https://crbug.com/webrtc/1229,https://issues.webrtc.org/issues/42222427 +https://crbug.com/webrtc/12290,https://issues.webrtc.org/issues/42222428 +https://crbug.com/webrtc/12291,https://issues.webrtc.org/issues/42222429 +https://crbug.com/webrtc/12292,https://issues.webrtc.org/issues/42222430 +https://crbug.com/webrtc/12293,https://issues.webrtc.org/issues/42222431 +https://crbug.com/webrtc/12294,https://issues.webrtc.org/issues/42222432 +https://crbug.com/webrtc/12295,https://issues.webrtc.org/issues/42222433 +https://crbug.com/webrtc/12296,https://issues.webrtc.org/issues/42222434 +https://crbug.com/webrtc/12297,https://issues.webrtc.org/issues/42222435 +https://crbug.com/webrtc/12298,https://issues.webrtc.org/issues/42222436 +https://crbug.com/webrtc/123,https://issues.webrtc.org/issues/42222437 +https://crbug.com/webrtc/1230,https://issues.webrtc.org/issues/42222438 +https://crbug.com/webrtc/12300,https://issues.webrtc.org/issues/42222439 +https://crbug.com/webrtc/12301,https://issues.webrtc.org/issues/42222440 +https://crbug.com/webrtc/12302,https://issues.webrtc.org/issues/42222441 +https://crbug.com/webrtc/12303,https://issues.webrtc.org/issues/42222442 +https://crbug.com/webrtc/12304,https://issues.webrtc.org/issues/42222443 +https://crbug.com/webrtc/12305,https://issues.webrtc.org/issues/42222444 +https://crbug.com/webrtc/12306,https://issues.webrtc.org/issues/42222445 +https://crbug.com/webrtc/12307,https://issues.webrtc.org/issues/42222446 +https://crbug.com/webrtc/12308,https://issues.webrtc.org/issues/42222447 +https://crbug.com/webrtc/12309,https://issues.webrtc.org/issues/42222448 +https://crbug.com/webrtc/1231,https://issues.webrtc.org/issues/42222449 +https://crbug.com/webrtc/12310,https://issues.webrtc.org/issues/42222450 +https://crbug.com/webrtc/12311,https://issues.webrtc.org/issues/42222451 +https://crbug.com/webrtc/12312,https://issues.webrtc.org/issues/42222452 +https://crbug.com/webrtc/12313,https://issues.webrtc.org/issues/42222453 +https://crbug.com/webrtc/12314,https://issues.webrtc.org/issues/42222454 +https://crbug.com/webrtc/12315,https://issues.webrtc.org/issues/42222455 +https://crbug.com/webrtc/12316,https://issues.webrtc.org/issues/42222456 +https://crbug.com/webrtc/12317,https://issues.webrtc.org/issues/42222457 +https://crbug.com/webrtc/12319,https://issues.webrtc.org/issues/42222458 +https://crbug.com/webrtc/1232,https://issues.webrtc.org/issues/42222459 +https://crbug.com/webrtc/12320,https://issues.webrtc.org/issues/42222460 +https://crbug.com/webrtc/12321,https://issues.webrtc.org/issues/42222461 +https://crbug.com/webrtc/12322,https://issues.webrtc.org/issues/42222462 +https://crbug.com/webrtc/12323,https://issues.webrtc.org/issues/42222463 +https://crbug.com/webrtc/12324,https://issues.webrtc.org/issues/42222464 +https://crbug.com/webrtc/12325,https://issues.webrtc.org/issues/42222465 +https://crbug.com/webrtc/12326,https://issues.webrtc.org/issues/42222466 +https://crbug.com/webrtc/12327,https://issues.webrtc.org/issues/42222467 +https://crbug.com/webrtc/12329,https://issues.webrtc.org/issues/42222468 +https://crbug.com/webrtc/1233,https://issues.webrtc.org/issues/42222469 +https://crbug.com/webrtc/12330,https://issues.webrtc.org/issues/42222470 +https://crbug.com/webrtc/12331,https://issues.webrtc.org/issues/42222471 +https://crbug.com/webrtc/12332,https://issues.webrtc.org/issues/42222472 +https://crbug.com/webrtc/12333,https://issues.webrtc.org/issues/42222473 +https://crbug.com/webrtc/12334,https://issues.webrtc.org/issues/42222474 +https://crbug.com/webrtc/12335,https://issues.webrtc.org/issues/42222475 +https://crbug.com/webrtc/12336,https://issues.webrtc.org/issues/42222476 +https://crbug.com/webrtc/12337,https://issues.webrtc.org/issues/42222477 +https://crbug.com/webrtc/12338,https://issues.webrtc.org/issues/42222478 +https://crbug.com/webrtc/12339,https://issues.webrtc.org/issues/42222479 +https://crbug.com/webrtc/1234,https://issues.webrtc.org/issues/42222480 +https://crbug.com/webrtc/12340,https://issues.webrtc.org/issues/42222481 +https://crbug.com/webrtc/12341,https://issues.webrtc.org/issues/42222482 +https://crbug.com/webrtc/12342,https://issues.webrtc.org/issues/42222483 +https://crbug.com/webrtc/12343,https://issues.webrtc.org/issues/42222484 +https://crbug.com/webrtc/12344,https://issues.webrtc.org/issues/42222485 +https://crbug.com/webrtc/12345,https://issues.webrtc.org/issues/42222486 +https://crbug.com/webrtc/12346,https://issues.webrtc.org/issues/42222487 +https://crbug.com/webrtc/12347,https://issues.webrtc.org/issues/42222488 +https://crbug.com/webrtc/12348,https://issues.webrtc.org/issues/42222489 +https://crbug.com/webrtc/12349,https://issues.webrtc.org/issues/42222490 +https://crbug.com/webrtc/1235,https://issues.webrtc.org/issues/42222491 +https://crbug.com/webrtc/12350,https://issues.webrtc.org/issues/42222492 +https://crbug.com/webrtc/12351,https://issues.webrtc.org/issues/42222493 +https://crbug.com/webrtc/12352,https://issues.webrtc.org/issues/42222494 +https://crbug.com/webrtc/12353,https://issues.webrtc.org/issues/42222495 +https://crbug.com/webrtc/12354,https://issues.webrtc.org/issues/42222496 +https://crbug.com/webrtc/12355,https://issues.webrtc.org/issues/42222497 +https://crbug.com/webrtc/12356,https://issues.webrtc.org/issues/42222498 +https://crbug.com/webrtc/12357,https://issues.webrtc.org/issues/42222499 +https://crbug.com/webrtc/12358,https://issues.webrtc.org/issues/42222500 +https://crbug.com/webrtc/12359,https://issues.webrtc.org/issues/42222501 +https://crbug.com/webrtc/1236,https://issues.webrtc.org/issues/42222502 +https://crbug.com/webrtc/12360,https://issues.webrtc.org/issues/42222503 +https://crbug.com/webrtc/12361,https://issues.webrtc.org/issues/42222504 +https://crbug.com/webrtc/12362,https://issues.webrtc.org/issues/42222505 +https://crbug.com/webrtc/12363,https://issues.webrtc.org/issues/42222506 +https://crbug.com/webrtc/12365,https://issues.webrtc.org/issues/42222507 +https://crbug.com/webrtc/12366,https://issues.webrtc.org/issues/42222508 +https://crbug.com/webrtc/12367,https://issues.webrtc.org/issues/42222509 +https://crbug.com/webrtc/12369,https://issues.webrtc.org/issues/42222510 +https://crbug.com/webrtc/1237,https://issues.webrtc.org/issues/42222511 +https://crbug.com/webrtc/12370,https://issues.webrtc.org/issues/42222512 +https://crbug.com/webrtc/12371,https://issues.webrtc.org/issues/42222513 +https://crbug.com/webrtc/12372,https://issues.webrtc.org/issues/42222514 +https://crbug.com/webrtc/12373,https://issues.webrtc.org/issues/42222515 +https://crbug.com/webrtc/12374,https://issues.webrtc.org/issues/42222516 +https://crbug.com/webrtc/12375,https://issues.webrtc.org/issues/42222517 +https://crbug.com/webrtc/12376,https://issues.webrtc.org/issues/42222518 +https://crbug.com/webrtc/12377,https://issues.webrtc.org/issues/42222519 +https://crbug.com/webrtc/12379,https://issues.webrtc.org/issues/42222520 +https://crbug.com/webrtc/1238,https://issues.webrtc.org/issues/42222521 +https://crbug.com/webrtc/12380,https://issues.webrtc.org/issues/42222522 +https://crbug.com/webrtc/12381,https://issues.webrtc.org/issues/42222523 +https://crbug.com/webrtc/12382,https://issues.webrtc.org/issues/42222524 +https://crbug.com/webrtc/12383,https://issues.webrtc.org/issues/42222525 +https://crbug.com/webrtc/12384,https://issues.webrtc.org/issues/42222526 +https://crbug.com/webrtc/12385,https://issues.webrtc.org/issues/42222527 +https://crbug.com/webrtc/12387,https://issues.webrtc.org/issues/42222528 +https://crbug.com/webrtc/12388,https://issues.webrtc.org/issues/42222529 +https://crbug.com/webrtc/12389,https://issues.webrtc.org/issues/42222530 +https://crbug.com/webrtc/1239,https://issues.webrtc.org/issues/42222531 +https://crbug.com/webrtc/12390,https://issues.webrtc.org/issues/42222532 +https://crbug.com/webrtc/12391,https://issues.webrtc.org/issues/42222533 +https://crbug.com/webrtc/12392,https://issues.webrtc.org/issues/42222534 +https://crbug.com/webrtc/12394,https://issues.webrtc.org/issues/42222535 +https://crbug.com/webrtc/12395,https://issues.webrtc.org/issues/42222536 +https://crbug.com/webrtc/12396,https://issues.webrtc.org/issues/42222537 +https://crbug.com/webrtc/12397,https://issues.webrtc.org/issues/42222538 +https://crbug.com/webrtc/12398,https://issues.webrtc.org/issues/42222539 +https://crbug.com/webrtc/12399,https://issues.webrtc.org/issues/42222540 +https://crbug.com/webrtc/124,https://issues.webrtc.org/issues/42222541 +https://crbug.com/webrtc/1240,https://issues.webrtc.org/issues/42222542 +https://crbug.com/webrtc/12400,https://issues.webrtc.org/issues/42222543 +https://crbug.com/webrtc/12401,https://issues.webrtc.org/issues/42222544 +https://crbug.com/webrtc/12402,https://issues.webrtc.org/issues/42222545 +https://crbug.com/webrtc/12403,https://issues.webrtc.org/issues/42222546 +https://crbug.com/webrtc/12405,https://issues.webrtc.org/issues/42222547 +https://crbug.com/webrtc/12406,https://issues.webrtc.org/issues/42222548 +https://crbug.com/webrtc/12407,https://issues.webrtc.org/issues/42222549 +https://crbug.com/webrtc/12408,https://issues.webrtc.org/issues/42222550 +https://crbug.com/webrtc/12409,https://issues.webrtc.org/issues/42222551 +https://crbug.com/webrtc/1241,https://issues.webrtc.org/issues/42222552 +https://crbug.com/webrtc/12410,https://issues.webrtc.org/issues/42222553 +https://crbug.com/webrtc/12411,https://issues.webrtc.org/issues/42222554 +https://crbug.com/webrtc/12412,https://issues.webrtc.org/issues/42222555 +https://crbug.com/webrtc/12413,https://issues.webrtc.org/issues/42222556 +https://crbug.com/webrtc/12414,https://issues.webrtc.org/issues/42222557 +https://crbug.com/webrtc/12415,https://issues.webrtc.org/issues/42222558 +https://crbug.com/webrtc/12416,https://issues.webrtc.org/issues/42222559 +https://crbug.com/webrtc/12417,https://issues.webrtc.org/issues/42222560 +https://crbug.com/webrtc/12418,https://issues.webrtc.org/issues/42222561 +https://crbug.com/webrtc/12419,https://issues.webrtc.org/issues/42222562 +https://crbug.com/webrtc/1242,https://issues.webrtc.org/issues/42222563 +https://crbug.com/webrtc/12420,https://issues.webrtc.org/issues/42222564 +https://crbug.com/webrtc/12421,https://issues.webrtc.org/issues/42222565 +https://crbug.com/webrtc/12422,https://issues.webrtc.org/issues/42222566 +https://crbug.com/webrtc/12423,https://issues.webrtc.org/issues/42222567 +https://crbug.com/webrtc/12424,https://issues.webrtc.org/issues/42222568 +https://crbug.com/webrtc/12425,https://issues.webrtc.org/issues/42222569 +https://crbug.com/webrtc/12426,https://issues.webrtc.org/issues/42222570 +https://crbug.com/webrtc/12427,https://issues.webrtc.org/issues/42222571 +https://crbug.com/webrtc/12428,https://issues.webrtc.org/issues/42222572 +https://crbug.com/webrtc/12429,https://issues.webrtc.org/issues/42222573 +https://crbug.com/webrtc/1243,https://issues.webrtc.org/issues/42222574 +https://crbug.com/webrtc/12430,https://issues.webrtc.org/issues/42222575 +https://crbug.com/webrtc/12431,https://issues.webrtc.org/issues/42222576 +https://crbug.com/webrtc/12432,https://issues.webrtc.org/issues/42222577 +https://crbug.com/webrtc/12433,https://issues.webrtc.org/issues/42222578 +https://crbug.com/webrtc/12434,https://issues.webrtc.org/issues/42222579 +https://crbug.com/webrtc/12435,https://issues.webrtc.org/issues/42222580 +https://crbug.com/webrtc/12436,https://issues.webrtc.org/issues/42222581 +https://crbug.com/webrtc/12437,https://issues.webrtc.org/issues/42222582 +https://crbug.com/webrtc/12438,https://issues.webrtc.org/issues/42222583 +https://crbug.com/webrtc/12439,https://issues.webrtc.org/issues/42222584 +https://crbug.com/webrtc/1244,https://issues.webrtc.org/issues/42222585 +https://crbug.com/webrtc/12441,https://issues.webrtc.org/issues/42222586 +https://crbug.com/webrtc/12442,https://issues.webrtc.org/issues/42222587 +https://crbug.com/webrtc/12443,https://issues.webrtc.org/issues/42222588 +https://crbug.com/webrtc/12444,https://issues.webrtc.org/issues/42222589 +https://crbug.com/webrtc/12445,https://issues.webrtc.org/issues/42222590 +https://crbug.com/webrtc/12446,https://issues.webrtc.org/issues/42222591 +https://crbug.com/webrtc/12447,https://issues.webrtc.org/issues/42222592 +https://crbug.com/webrtc/12448,https://issues.webrtc.org/issues/42222593 +https://crbug.com/webrtc/12449,https://issues.webrtc.org/issues/42222594 +https://crbug.com/webrtc/1245,https://issues.webrtc.org/issues/42222595 +https://crbug.com/webrtc/12450,https://issues.webrtc.org/issues/42222596 +https://crbug.com/webrtc/12452,https://issues.webrtc.org/issues/42222597 +https://crbug.com/webrtc/12453,https://issues.webrtc.org/issues/42222598 +https://crbug.com/webrtc/12454,https://issues.webrtc.org/issues/42222599 +https://crbug.com/webrtc/12455,https://issues.webrtc.org/issues/42222600 +https://crbug.com/webrtc/12456,https://issues.webrtc.org/issues/42222601 +https://crbug.com/webrtc/12457,https://issues.webrtc.org/issues/42222602 +https://crbug.com/webrtc/12459,https://issues.webrtc.org/issues/42222603 +https://crbug.com/webrtc/1246,https://issues.webrtc.org/issues/42222604 +https://crbug.com/webrtc/12460,https://issues.webrtc.org/issues/42222605 +https://crbug.com/webrtc/12462,https://issues.webrtc.org/issues/42222606 +https://crbug.com/webrtc/12463,https://issues.webrtc.org/issues/42222607 +https://crbug.com/webrtc/12464,https://issues.webrtc.org/issues/42222608 +https://crbug.com/webrtc/12465,https://issues.webrtc.org/issues/42222609 +https://crbug.com/webrtc/12466,https://issues.webrtc.org/issues/42222610 +https://crbug.com/webrtc/12467,https://issues.webrtc.org/issues/42222611 +https://crbug.com/webrtc/12468,https://issues.webrtc.org/issues/42222612 +https://crbug.com/webrtc/1247,https://issues.webrtc.org/issues/42222613 +https://crbug.com/webrtc/12470,https://issues.webrtc.org/issues/42222614 +https://crbug.com/webrtc/12471,https://issues.webrtc.org/issues/42222615 +https://crbug.com/webrtc/12472,https://issues.webrtc.org/issues/42222616 +https://crbug.com/webrtc/12473,https://issues.webrtc.org/issues/42222617 +https://crbug.com/webrtc/12474,https://issues.webrtc.org/issues/42222618 +https://crbug.com/webrtc/12475,https://issues.webrtc.org/issues/42222619 +https://crbug.com/webrtc/12476,https://issues.webrtc.org/issues/42222620 +https://crbug.com/webrtc/12477,https://issues.webrtc.org/issues/42222621 +https://crbug.com/webrtc/12478,https://issues.webrtc.org/issues/42222622 +https://crbug.com/webrtc/12479,https://issues.webrtc.org/issues/42222623 +https://crbug.com/webrtc/1248,https://issues.webrtc.org/issues/42222624 +https://crbug.com/webrtc/12480,https://issues.webrtc.org/issues/42222625 +https://crbug.com/webrtc/12481,https://issues.webrtc.org/issues/42222626 +https://crbug.com/webrtc/12482,https://issues.webrtc.org/issues/42222627 +https://crbug.com/webrtc/12483,https://issues.webrtc.org/issues/42222628 +https://crbug.com/webrtc/12484,https://issues.webrtc.org/issues/42222629 +https://crbug.com/webrtc/12485,https://issues.webrtc.org/issues/42222630 +https://crbug.com/webrtc/12486,https://issues.webrtc.org/issues/42222631 +https://crbug.com/webrtc/12488,https://issues.webrtc.org/issues/42222632 +https://crbug.com/webrtc/12489,https://issues.webrtc.org/issues/42222633 +https://crbug.com/webrtc/1249,https://issues.webrtc.org/issues/42222634 +https://crbug.com/webrtc/12490,https://issues.webrtc.org/issues/42222635 +https://crbug.com/webrtc/12491,https://issues.webrtc.org/issues/42222636 +https://crbug.com/webrtc/12492,https://issues.webrtc.org/issues/42222637 +https://crbug.com/webrtc/12493,https://issues.webrtc.org/issues/42222638 +https://crbug.com/webrtc/12494,https://issues.webrtc.org/issues/42222639 +https://crbug.com/webrtc/12495,https://issues.webrtc.org/issues/42222640 +https://crbug.com/webrtc/12496,https://issues.webrtc.org/issues/42222641 +https://crbug.com/webrtc/12498,https://issues.webrtc.org/issues/42222642 +https://crbug.com/webrtc/12499,https://issues.webrtc.org/issues/42222643 +https://crbug.com/webrtc/125,https://issues.webrtc.org/issues/42222644 +https://crbug.com/webrtc/1250,https://issues.webrtc.org/issues/42222645 +https://crbug.com/webrtc/12500,https://issues.webrtc.org/issues/42222646 +https://crbug.com/webrtc/12501,https://issues.webrtc.org/issues/42222647 +https://crbug.com/webrtc/12502,https://issues.webrtc.org/issues/42222648 +https://crbug.com/webrtc/12503,https://issues.webrtc.org/issues/42222649 +https://crbug.com/webrtc/12504,https://issues.webrtc.org/issues/42222650 +https://crbug.com/webrtc/12505,https://issues.webrtc.org/issues/42222651 +https://crbug.com/webrtc/12506,https://issues.webrtc.org/issues/42222652 +https://crbug.com/webrtc/12507,https://issues.webrtc.org/issues/42222653 +https://crbug.com/webrtc/12508,https://issues.webrtc.org/issues/42222654 +https://crbug.com/webrtc/12509,https://issues.webrtc.org/issues/42222655 +https://crbug.com/webrtc/1251,https://issues.webrtc.org/issues/42222656 +https://crbug.com/webrtc/12510,https://issues.webrtc.org/issues/42222657 +https://crbug.com/webrtc/12511,https://issues.webrtc.org/issues/42222658 +https://crbug.com/webrtc/12512,https://issues.webrtc.org/issues/42222659 +https://crbug.com/webrtc/12513,https://issues.webrtc.org/issues/42222660 +https://crbug.com/webrtc/12514,https://issues.webrtc.org/issues/42222661 +https://crbug.com/webrtc/12516,https://issues.webrtc.org/issues/42222662 +https://crbug.com/webrtc/12517,https://issues.webrtc.org/issues/42222663 +https://crbug.com/webrtc/12518,https://issues.webrtc.org/issues/42222664 +https://crbug.com/webrtc/12519,https://issues.webrtc.org/issues/42222665 +https://crbug.com/webrtc/1252,https://issues.webrtc.org/issues/42222666 +https://crbug.com/webrtc/12520,https://issues.webrtc.org/issues/42222667 +https://crbug.com/webrtc/12521,https://issues.webrtc.org/issues/42222668 +https://crbug.com/webrtc/12522,https://issues.webrtc.org/issues/42222669 +https://crbug.com/webrtc/12523,https://issues.webrtc.org/issues/42222670 +https://crbug.com/webrtc/12524,https://issues.webrtc.org/issues/42222671 +https://crbug.com/webrtc/12525,https://issues.webrtc.org/issues/42222672 +https://crbug.com/webrtc/12526,https://issues.webrtc.org/issues/42222673 +https://crbug.com/webrtc/12527,https://issues.webrtc.org/issues/42222674 +https://crbug.com/webrtc/12528,https://issues.webrtc.org/issues/42222675 +https://crbug.com/webrtc/12529,https://issues.webrtc.org/issues/42222676 +https://crbug.com/webrtc/1253,https://issues.webrtc.org/issues/42222677 +https://crbug.com/webrtc/12530,https://issues.webrtc.org/issues/42222678 +https://crbug.com/webrtc/12531,https://issues.webrtc.org/issues/42222679 +https://crbug.com/webrtc/12532,https://issues.webrtc.org/issues/42222680 +https://crbug.com/webrtc/12533,https://issues.webrtc.org/issues/42222681 +https://crbug.com/webrtc/12534,https://issues.webrtc.org/issues/42222682 +https://crbug.com/webrtc/12535,https://issues.webrtc.org/issues/42222683 +https://crbug.com/webrtc/12537,https://issues.webrtc.org/issues/42222684 +https://crbug.com/webrtc/12538,https://issues.webrtc.org/issues/42222685 +https://crbug.com/webrtc/12539,https://issues.webrtc.org/issues/42222686 +https://crbug.com/webrtc/1254,https://issues.webrtc.org/issues/42222687 +https://crbug.com/webrtc/12540,https://issues.webrtc.org/issues/42222688 +https://crbug.com/webrtc/12541,https://issues.webrtc.org/issues/42222689 +https://crbug.com/webrtc/12542,https://issues.webrtc.org/issues/42222690 +https://crbug.com/webrtc/12543,https://issues.webrtc.org/issues/42222691 +https://crbug.com/webrtc/12544,https://issues.webrtc.org/issues/42222692 +https://crbug.com/webrtc/12545,https://issues.webrtc.org/issues/42222693 +https://crbug.com/webrtc/12546,https://issues.webrtc.org/issues/42222694 +https://crbug.com/webrtc/12547,https://issues.webrtc.org/issues/42222695 +https://crbug.com/webrtc/12548,https://issues.webrtc.org/issues/42222696 +https://crbug.com/webrtc/12549,https://issues.webrtc.org/issues/42222697 +https://crbug.com/webrtc/1255,https://issues.webrtc.org/issues/42222698 +https://crbug.com/webrtc/12550,https://issues.webrtc.org/issues/42222699 +https://crbug.com/webrtc/12551,https://issues.webrtc.org/issues/42222700 +https://crbug.com/webrtc/12552,https://issues.webrtc.org/issues/42222701 +https://crbug.com/webrtc/12553,https://issues.webrtc.org/issues/42222702 +https://crbug.com/webrtc/12554,https://issues.webrtc.org/issues/42222703 +https://crbug.com/webrtc/12555,https://issues.webrtc.org/issues/42222704 +https://crbug.com/webrtc/12556,https://issues.webrtc.org/issues/42222705 +https://crbug.com/webrtc/12557,https://issues.webrtc.org/issues/42222706 +https://crbug.com/webrtc/12558,https://issues.webrtc.org/issues/42222707 +https://crbug.com/webrtc/12559,https://issues.webrtc.org/issues/42222708 +https://crbug.com/webrtc/1256,https://issues.webrtc.org/issues/42222709 +https://crbug.com/webrtc/12560,https://issues.webrtc.org/issues/42222710 +https://crbug.com/webrtc/12561,https://issues.webrtc.org/issues/42222711 +https://crbug.com/webrtc/12562,https://issues.webrtc.org/issues/42222712 +https://crbug.com/webrtc/12563,https://issues.webrtc.org/issues/42222713 +https://crbug.com/webrtc/12564,https://issues.webrtc.org/issues/42222714 +https://crbug.com/webrtc/12565,https://issues.webrtc.org/issues/42222715 +https://crbug.com/webrtc/12566,https://issues.webrtc.org/issues/42222716 +https://crbug.com/webrtc/12567,https://issues.webrtc.org/issues/42222717 +https://crbug.com/webrtc/12568,https://issues.webrtc.org/issues/42222718 +https://crbug.com/webrtc/12569,https://issues.webrtc.org/issues/42222719 +https://crbug.com/webrtc/1257,https://issues.webrtc.org/issues/42222720 +https://crbug.com/webrtc/12570,https://issues.webrtc.org/issues/42222721 +https://crbug.com/webrtc/12571,https://issues.webrtc.org/issues/42222722 +https://crbug.com/webrtc/12572,https://issues.webrtc.org/issues/42222723 +https://crbug.com/webrtc/12573,https://issues.webrtc.org/issues/42222724 +https://crbug.com/webrtc/12574,https://issues.webrtc.org/issues/42222725 +https://crbug.com/webrtc/12575,https://issues.webrtc.org/issues/42222726 +https://crbug.com/webrtc/12576,https://issues.webrtc.org/issues/42222727 +https://crbug.com/webrtc/12577,https://issues.webrtc.org/issues/42222728 +https://crbug.com/webrtc/12578,https://issues.webrtc.org/issues/42222729 +https://crbug.com/webrtc/12579,https://issues.webrtc.org/issues/42222730 +https://crbug.com/webrtc/1258,https://issues.webrtc.org/issues/42222731 +https://crbug.com/webrtc/12580,https://issues.webrtc.org/issues/42222732 +https://crbug.com/webrtc/12581,https://issues.webrtc.org/issues/42222733 +https://crbug.com/webrtc/12582,https://issues.webrtc.org/issues/42222734 +https://crbug.com/webrtc/12583,https://issues.webrtc.org/issues/42222735 +https://crbug.com/webrtc/12584,https://issues.webrtc.org/issues/42222736 +https://crbug.com/webrtc/12585,https://issues.webrtc.org/issues/42222737 +https://crbug.com/webrtc/12586,https://issues.webrtc.org/issues/42222738 +https://crbug.com/webrtc/12587,https://issues.webrtc.org/issues/42222739 +https://crbug.com/webrtc/12588,https://issues.webrtc.org/issues/42222740 +https://crbug.com/webrtc/12589,https://issues.webrtc.org/issues/42222741 +https://crbug.com/webrtc/1259,https://issues.webrtc.org/issues/42222742 +https://crbug.com/webrtc/12590,https://issues.webrtc.org/issues/42222743 +https://crbug.com/webrtc/12591,https://issues.webrtc.org/issues/42222744 +https://crbug.com/webrtc/12592,https://issues.webrtc.org/issues/42222745 +https://crbug.com/webrtc/12593,https://issues.webrtc.org/issues/42222746 +https://crbug.com/webrtc/12594,https://issues.webrtc.org/issues/42222747 +https://crbug.com/webrtc/12595,https://issues.webrtc.org/issues/42222748 +https://crbug.com/webrtc/12596,https://issues.webrtc.org/issues/42222749 +https://crbug.com/webrtc/12597,https://issues.webrtc.org/issues/42222750 +https://crbug.com/webrtc/12599,https://issues.webrtc.org/issues/42222751 +https://crbug.com/webrtc/126,https://issues.webrtc.org/issues/42222752 +https://crbug.com/webrtc/1260,https://issues.webrtc.org/issues/42222753 +https://crbug.com/webrtc/12600,https://issues.webrtc.org/issues/42222754 +https://crbug.com/webrtc/12601,https://issues.webrtc.org/issues/42222755 +https://crbug.com/webrtc/12602,https://issues.webrtc.org/issues/42222756 +https://crbug.com/webrtc/12604,https://issues.webrtc.org/issues/42222757 +https://crbug.com/webrtc/12605,https://issues.webrtc.org/issues/42222758 +https://crbug.com/webrtc/12606,https://issues.webrtc.org/issues/42222759 +https://crbug.com/webrtc/12607,https://issues.webrtc.org/issues/42222760 +https://crbug.com/webrtc/12608,https://issues.webrtc.org/issues/42222761 +https://crbug.com/webrtc/12609,https://issues.webrtc.org/issues/42222762 +https://crbug.com/webrtc/1261,https://issues.webrtc.org/issues/42222763 +https://crbug.com/webrtc/12610,https://issues.webrtc.org/issues/42222764 +https://crbug.com/webrtc/12611,https://issues.webrtc.org/issues/42222765 +https://crbug.com/webrtc/12612,https://issues.webrtc.org/issues/42222766 +https://crbug.com/webrtc/12613,https://issues.webrtc.org/issues/42222767 +https://crbug.com/webrtc/12614,https://issues.webrtc.org/issues/42222768 +https://crbug.com/webrtc/12615,https://issues.webrtc.org/issues/42222769 +https://crbug.com/webrtc/12617,https://issues.webrtc.org/issues/42222770 +https://crbug.com/webrtc/12618,https://issues.webrtc.org/issues/42222771 +https://crbug.com/webrtc/12619,https://issues.webrtc.org/issues/42222772 +https://crbug.com/webrtc/1262,https://issues.webrtc.org/issues/42222773 +https://crbug.com/webrtc/12620,https://issues.webrtc.org/issues/42222774 +https://crbug.com/webrtc/12621,https://issues.webrtc.org/issues/42222775 +https://crbug.com/webrtc/12623,https://issues.webrtc.org/issues/42222776 +https://crbug.com/webrtc/12624,https://issues.webrtc.org/issues/42222777 +https://crbug.com/webrtc/12625,https://issues.webrtc.org/issues/42222778 +https://crbug.com/webrtc/12626,https://issues.webrtc.org/issues/42222779 +https://crbug.com/webrtc/12627,https://issues.webrtc.org/issues/42222780 +https://crbug.com/webrtc/12628,https://issues.webrtc.org/issues/42222781 +https://crbug.com/webrtc/12629,https://issues.webrtc.org/issues/42222782 +https://crbug.com/webrtc/1263,https://issues.webrtc.org/issues/42222783 +https://crbug.com/webrtc/12630,https://issues.webrtc.org/issues/42222784 +https://crbug.com/webrtc/12631,https://issues.webrtc.org/issues/42222785 +https://crbug.com/webrtc/12632,https://issues.webrtc.org/issues/42222786 +https://crbug.com/webrtc/12633,https://issues.webrtc.org/issues/42222787 +https://crbug.com/webrtc/12634,https://issues.webrtc.org/issues/42222788 +https://crbug.com/webrtc/12635,https://issues.webrtc.org/issues/42222789 +https://crbug.com/webrtc/12636,https://issues.webrtc.org/issues/42222790 +https://crbug.com/webrtc/12637,https://issues.webrtc.org/issues/42222791 +https://crbug.com/webrtc/12638,https://issues.webrtc.org/issues/42222792 +https://crbug.com/webrtc/12639,https://issues.webrtc.org/issues/42222793 +https://crbug.com/webrtc/1264,https://issues.webrtc.org/issues/42222794 +https://crbug.com/webrtc/12640,https://issues.webrtc.org/issues/42222795 +https://crbug.com/webrtc/12641,https://issues.webrtc.org/issues/42222796 +https://crbug.com/webrtc/12642,https://issues.webrtc.org/issues/42222797 +https://crbug.com/webrtc/12643,https://issues.webrtc.org/issues/42222798 +https://crbug.com/webrtc/12644,https://issues.webrtc.org/issues/42222799 +https://crbug.com/webrtc/12645,https://issues.webrtc.org/issues/42222800 +https://crbug.com/webrtc/12646,https://issues.webrtc.org/issues/42222801 +https://crbug.com/webrtc/12647,https://issues.webrtc.org/issues/42222802 +https://crbug.com/webrtc/12648,https://issues.webrtc.org/issues/42222803 +https://crbug.com/webrtc/12649,https://issues.webrtc.org/issues/42222804 +https://crbug.com/webrtc/1265,https://issues.webrtc.org/issues/42222805 +https://crbug.com/webrtc/12650,https://issues.webrtc.org/issues/42222806 +https://crbug.com/webrtc/12651,https://issues.webrtc.org/issues/42222807 +https://crbug.com/webrtc/12652,https://issues.webrtc.org/issues/42222808 +https://crbug.com/webrtc/12654,https://issues.webrtc.org/issues/42222809 +https://crbug.com/webrtc/12655,https://issues.webrtc.org/issues/42222810 +https://crbug.com/webrtc/12656,https://issues.webrtc.org/issues/42222811 +https://crbug.com/webrtc/12657,https://issues.webrtc.org/issues/42222812 +https://crbug.com/webrtc/12658,https://issues.webrtc.org/issues/42222813 +https://crbug.com/webrtc/12659,https://issues.webrtc.org/issues/42222814 +https://crbug.com/webrtc/1266,https://issues.webrtc.org/issues/42222815 +https://crbug.com/webrtc/12661,https://issues.webrtc.org/issues/42222816 +https://crbug.com/webrtc/12662,https://issues.webrtc.org/issues/42222817 +https://crbug.com/webrtc/12663,https://issues.webrtc.org/issues/42222818 +https://crbug.com/webrtc/12664,https://issues.webrtc.org/issues/42222819 +https://crbug.com/webrtc/12665,https://issues.webrtc.org/issues/42222820 +https://crbug.com/webrtc/12666,https://issues.webrtc.org/issues/42222821 +https://crbug.com/webrtc/12667,https://issues.webrtc.org/issues/42222822 +https://crbug.com/webrtc/12668,https://issues.webrtc.org/issues/42222823 +https://crbug.com/webrtc/12669,https://issues.webrtc.org/issues/42222824 +https://crbug.com/webrtc/1267,https://issues.webrtc.org/issues/42222825 +https://crbug.com/webrtc/12670,https://issues.webrtc.org/issues/42222826 +https://crbug.com/webrtc/12671,https://issues.webrtc.org/issues/42222827 +https://crbug.com/webrtc/12672,https://issues.webrtc.org/issues/42222828 +https://crbug.com/webrtc/12673,https://issues.webrtc.org/issues/42222829 +https://crbug.com/webrtc/12674,https://issues.webrtc.org/issues/42222830 +https://crbug.com/webrtc/12675,https://issues.webrtc.org/issues/42222831 +https://crbug.com/webrtc/12676,https://issues.webrtc.org/issues/42222832 +https://crbug.com/webrtc/12677,https://issues.webrtc.org/issues/42222833 +https://crbug.com/webrtc/12679,https://issues.webrtc.org/issues/42222834 +https://crbug.com/webrtc/1268,https://issues.webrtc.org/issues/42222835 +https://crbug.com/webrtc/12680,https://issues.webrtc.org/issues/42222836 +https://crbug.com/webrtc/12681,https://issues.webrtc.org/issues/42222837 +https://crbug.com/webrtc/12682,https://issues.webrtc.org/issues/42222838 +https://crbug.com/webrtc/12683,https://issues.webrtc.org/issues/42222839 +https://crbug.com/webrtc/12684,https://issues.webrtc.org/issues/42222840 +https://crbug.com/webrtc/12685,https://issues.webrtc.org/issues/42222841 +https://crbug.com/webrtc/12686,https://issues.webrtc.org/issues/42222842 +https://crbug.com/webrtc/12687,https://issues.webrtc.org/issues/42222843 +https://crbug.com/webrtc/12688,https://issues.webrtc.org/issues/42222844 +https://crbug.com/webrtc/12689,https://issues.webrtc.org/issues/42222845 +https://crbug.com/webrtc/1269,https://issues.webrtc.org/issues/42222846 +https://crbug.com/webrtc/12690,https://issues.webrtc.org/issues/42222847 +https://crbug.com/webrtc/12691,https://issues.webrtc.org/issues/42222848 +https://crbug.com/webrtc/12692,https://issues.webrtc.org/issues/42222849 +https://crbug.com/webrtc/12693,https://issues.webrtc.org/issues/42222850 +https://crbug.com/webrtc/12694,https://issues.webrtc.org/issues/42222851 +https://crbug.com/webrtc/12696,https://issues.webrtc.org/issues/42222852 +https://crbug.com/webrtc/12697,https://issues.webrtc.org/issues/42222853 +https://crbug.com/webrtc/12698,https://issues.webrtc.org/issues/42222854 +https://crbug.com/webrtc/12699,https://issues.webrtc.org/issues/42222855 +https://crbug.com/webrtc/127,https://issues.webrtc.org/issues/42222856 +https://crbug.com/webrtc/1270,https://issues.webrtc.org/issues/42222857 +https://crbug.com/webrtc/12700,https://issues.webrtc.org/issues/42222858 +https://crbug.com/webrtc/12701,https://issues.webrtc.org/issues/42222859 +https://crbug.com/webrtc/12702,https://issues.webrtc.org/issues/42222860 +https://crbug.com/webrtc/12703,https://issues.webrtc.org/issues/42222861 +https://crbug.com/webrtc/12704,https://issues.webrtc.org/issues/42222862 +https://crbug.com/webrtc/12705,https://issues.webrtc.org/issues/42222863 +https://crbug.com/webrtc/12706,https://issues.webrtc.org/issues/42222864 +https://crbug.com/webrtc/12707,https://issues.webrtc.org/issues/42222865 +https://crbug.com/webrtc/12708,https://issues.webrtc.org/issues/42222866 +https://crbug.com/webrtc/1271,https://issues.webrtc.org/issues/42222867 +https://crbug.com/webrtc/12710,https://issues.webrtc.org/issues/42222868 +https://crbug.com/webrtc/12711,https://issues.webrtc.org/issues/42222869 +https://crbug.com/webrtc/12712,https://issues.webrtc.org/issues/42222870 +https://crbug.com/webrtc/12713,https://issues.webrtc.org/issues/42222871 +https://crbug.com/webrtc/12714,https://issues.webrtc.org/issues/42222872 +https://crbug.com/webrtc/12715,https://issues.webrtc.org/issues/42222873 +https://crbug.com/webrtc/12716,https://issues.webrtc.org/issues/42222874 +https://crbug.com/webrtc/12717,https://issues.webrtc.org/issues/42222875 +https://crbug.com/webrtc/12718,https://issues.webrtc.org/issues/42222876 +https://crbug.com/webrtc/12719,https://issues.webrtc.org/issues/42222877 +https://crbug.com/webrtc/1272,https://issues.webrtc.org/issues/42222878 +https://crbug.com/webrtc/12720,https://issues.webrtc.org/issues/42222879 +https://crbug.com/webrtc/12721,https://issues.webrtc.org/issues/42222880 +https://crbug.com/webrtc/12722,https://issues.webrtc.org/issues/42222881 +https://crbug.com/webrtc/12724,https://issues.webrtc.org/issues/42222882 +https://crbug.com/webrtc/12725,https://issues.webrtc.org/issues/42222883 +https://crbug.com/webrtc/12726,https://issues.webrtc.org/issues/42222884 +https://crbug.com/webrtc/12727,https://issues.webrtc.org/issues/42222885 +https://crbug.com/webrtc/12728,https://issues.webrtc.org/issues/42222886 +https://crbug.com/webrtc/12729,https://issues.webrtc.org/issues/42222887 +https://crbug.com/webrtc/1273,https://issues.webrtc.org/issues/42222888 +https://crbug.com/webrtc/12730,https://issues.webrtc.org/issues/42222889 +https://crbug.com/webrtc/12732,https://issues.webrtc.org/issues/42222890 +https://crbug.com/webrtc/12733,https://issues.webrtc.org/issues/42222891 +https://crbug.com/webrtc/12734,https://issues.webrtc.org/issues/42222892 +https://crbug.com/webrtc/12735,https://issues.webrtc.org/issues/42222893 +https://crbug.com/webrtc/12736,https://issues.webrtc.org/issues/42222894 +https://crbug.com/webrtc/12737,https://issues.webrtc.org/issues/42222895 +https://crbug.com/webrtc/12738,https://issues.webrtc.org/issues/42222896 +https://crbug.com/webrtc/12739,https://issues.webrtc.org/issues/42222897 +https://crbug.com/webrtc/1274,https://issues.webrtc.org/issues/42222898 +https://crbug.com/webrtc/12740,https://issues.webrtc.org/issues/42222899 +https://crbug.com/webrtc/12742,https://issues.webrtc.org/issues/42222900 +https://crbug.com/webrtc/12743,https://issues.webrtc.org/issues/42222901 +https://crbug.com/webrtc/12744,https://issues.webrtc.org/issues/42222902 +https://crbug.com/webrtc/12745,https://issues.webrtc.org/issues/42222903 +https://crbug.com/webrtc/12748,https://issues.webrtc.org/issues/42222904 +https://crbug.com/webrtc/12749,https://issues.webrtc.org/issues/42222905 +https://crbug.com/webrtc/1275,https://issues.webrtc.org/issues/42222906 +https://crbug.com/webrtc/12750,https://issues.webrtc.org/issues/42222907 +https://crbug.com/webrtc/12751,https://issues.webrtc.org/issues/42222908 +https://crbug.com/webrtc/12752,https://issues.webrtc.org/issues/42222909 +https://crbug.com/webrtc/12753,https://issues.webrtc.org/issues/42222910 +https://crbug.com/webrtc/12754,https://issues.webrtc.org/issues/42222911 +https://crbug.com/webrtc/12755,https://issues.webrtc.org/issues/42222912 +https://crbug.com/webrtc/12756,https://issues.webrtc.org/issues/42222913 +https://crbug.com/webrtc/12757,https://issues.webrtc.org/issues/42222914 +https://crbug.com/webrtc/12758,https://issues.webrtc.org/issues/42222915 +https://crbug.com/webrtc/12759,https://issues.webrtc.org/issues/42222916 +https://crbug.com/webrtc/1276,https://issues.webrtc.org/issues/42222917 +https://crbug.com/webrtc/12760,https://issues.webrtc.org/issues/42222918 +https://crbug.com/webrtc/12761,https://issues.webrtc.org/issues/42222919 +https://crbug.com/webrtc/12762,https://issues.webrtc.org/issues/42222920 +https://crbug.com/webrtc/12763,https://issues.webrtc.org/issues/42222921 +https://crbug.com/webrtc/12764,https://issues.webrtc.org/issues/42222922 +https://crbug.com/webrtc/12765,https://issues.webrtc.org/issues/42222923 +https://crbug.com/webrtc/12766,https://issues.webrtc.org/issues/42222924 +https://crbug.com/webrtc/12767,https://issues.webrtc.org/issues/42222925 +https://crbug.com/webrtc/12768,https://issues.webrtc.org/issues/42222926 +https://crbug.com/webrtc/12769,https://issues.webrtc.org/issues/42222927 +https://crbug.com/webrtc/1277,https://issues.webrtc.org/issues/42222928 +https://crbug.com/webrtc/12770,https://issues.webrtc.org/issues/42222929 +https://crbug.com/webrtc/12771,https://issues.webrtc.org/issues/42222930 +https://crbug.com/webrtc/12772,https://issues.webrtc.org/issues/42222931 +https://crbug.com/webrtc/12773,https://issues.webrtc.org/issues/42222932 +https://crbug.com/webrtc/12774,https://issues.webrtc.org/issues/42222933 +https://crbug.com/webrtc/12775,https://issues.webrtc.org/issues/42222934 +https://crbug.com/webrtc/12776,https://issues.webrtc.org/issues/42222935 +https://crbug.com/webrtc/12777,https://issues.webrtc.org/issues/42222936 +https://crbug.com/webrtc/12778,https://issues.webrtc.org/issues/42222937 +https://crbug.com/webrtc/12779,https://issues.webrtc.org/issues/42222938 +https://crbug.com/webrtc/1278,https://issues.webrtc.org/issues/42222939 +https://crbug.com/webrtc/12780,https://issues.webrtc.org/issues/42222940 +https://crbug.com/webrtc/12781,https://issues.webrtc.org/issues/42222941 +https://crbug.com/webrtc/12782,https://issues.webrtc.org/issues/42222942 +https://crbug.com/webrtc/12783,https://issues.webrtc.org/issues/42222943 +https://crbug.com/webrtc/12784,https://issues.webrtc.org/issues/42222944 +https://crbug.com/webrtc/12785,https://issues.webrtc.org/issues/42222945 +https://crbug.com/webrtc/12786,https://issues.webrtc.org/issues/42222946 +https://crbug.com/webrtc/12787,https://issues.webrtc.org/issues/42222947 +https://crbug.com/webrtc/12788,https://issues.webrtc.org/issues/42222948 +https://crbug.com/webrtc/12789,https://issues.webrtc.org/issues/42222949 +https://crbug.com/webrtc/1279,https://issues.webrtc.org/issues/42222950 +https://crbug.com/webrtc/12791,https://issues.webrtc.org/issues/42222951 +https://crbug.com/webrtc/12792,https://issues.webrtc.org/issues/42222952 +https://crbug.com/webrtc/12793,https://issues.webrtc.org/issues/42222953 +https://crbug.com/webrtc/12794,https://issues.webrtc.org/issues/42222954 +https://crbug.com/webrtc/12795,https://issues.webrtc.org/issues/42222955 +https://crbug.com/webrtc/12797,https://issues.webrtc.org/issues/42222956 +https://crbug.com/webrtc/12798,https://issues.webrtc.org/issues/42222957 +https://crbug.com/webrtc/12799,https://issues.webrtc.org/issues/42222958 +https://crbug.com/webrtc/128,https://issues.webrtc.org/issues/42222959 +https://crbug.com/webrtc/1280,https://issues.webrtc.org/issues/42222960 +https://crbug.com/webrtc/12800,https://issues.webrtc.org/issues/42222961 +https://crbug.com/webrtc/12801,https://issues.webrtc.org/issues/42222962 +https://crbug.com/webrtc/12802,https://issues.webrtc.org/issues/42222963 +https://crbug.com/webrtc/12803,https://issues.webrtc.org/issues/42222964 +https://crbug.com/webrtc/12804,https://issues.webrtc.org/issues/42222965 +https://crbug.com/webrtc/12805,https://issues.webrtc.org/issues/42222966 +https://crbug.com/webrtc/12806,https://issues.webrtc.org/issues/42222967 +https://crbug.com/webrtc/12807,https://issues.webrtc.org/issues/42222968 +https://crbug.com/webrtc/12808,https://issues.webrtc.org/issues/42222969 +https://crbug.com/webrtc/12809,https://issues.webrtc.org/issues/42222970 +https://crbug.com/webrtc/1281,https://issues.webrtc.org/issues/42222971 +https://crbug.com/webrtc/12810,https://issues.webrtc.org/issues/42222972 +https://crbug.com/webrtc/12811,https://issues.webrtc.org/issues/42222973 +https://crbug.com/webrtc/12812,https://issues.webrtc.org/issues/42222974 +https://crbug.com/webrtc/12813,https://issues.webrtc.org/issues/42222975 +https://crbug.com/webrtc/12814,https://issues.webrtc.org/issues/42222976 +https://crbug.com/webrtc/12815,https://issues.webrtc.org/issues/42222977 +https://crbug.com/webrtc/12816,https://issues.webrtc.org/issues/42222978 +https://crbug.com/webrtc/12817,https://issues.webrtc.org/issues/42222979 +https://crbug.com/webrtc/12818,https://issues.webrtc.org/issues/42222980 +https://crbug.com/webrtc/12819,https://issues.webrtc.org/issues/42222981 +https://crbug.com/webrtc/1282,https://issues.webrtc.org/issues/42222982 +https://crbug.com/webrtc/12820,https://issues.webrtc.org/issues/42222983 +https://crbug.com/webrtc/12821,https://issues.webrtc.org/issues/42222984 +https://crbug.com/webrtc/12822,https://issues.webrtc.org/issues/42222985 +https://crbug.com/webrtc/12823,https://issues.webrtc.org/issues/42222986 +https://crbug.com/webrtc/12824,https://issues.webrtc.org/issues/42222987 +https://crbug.com/webrtc/12825,https://issues.webrtc.org/issues/42222988 +https://crbug.com/webrtc/12826,https://issues.webrtc.org/issues/42222989 +https://crbug.com/webrtc/12827,https://issues.webrtc.org/issues/42222990 +https://crbug.com/webrtc/12828,https://issues.webrtc.org/issues/42222991 +https://crbug.com/webrtc/12829,https://issues.webrtc.org/issues/42222992 +https://crbug.com/webrtc/1283,https://issues.webrtc.org/issues/42222993 +https://crbug.com/webrtc/12830,https://issues.webrtc.org/issues/42222994 +https://crbug.com/webrtc/12831,https://issues.webrtc.org/issues/42222995 +https://crbug.com/webrtc/12832,https://issues.webrtc.org/issues/42222996 +https://crbug.com/webrtc/12833,https://issues.webrtc.org/issues/42222997 +https://crbug.com/webrtc/12834,https://issues.webrtc.org/issues/42222998 +https://crbug.com/webrtc/12835,https://issues.webrtc.org/issues/42222999 +https://crbug.com/webrtc/12836,https://issues.webrtc.org/issues/42223000 +https://crbug.com/webrtc/12837,https://issues.webrtc.org/issues/42223001 +https://crbug.com/webrtc/12838,https://issues.webrtc.org/issues/42223002 +https://crbug.com/webrtc/12839,https://issues.webrtc.org/issues/42223003 +https://crbug.com/webrtc/1284,https://issues.webrtc.org/issues/42223004 +https://crbug.com/webrtc/12840,https://issues.webrtc.org/issues/42223005 +https://crbug.com/webrtc/12841,https://issues.webrtc.org/issues/42223006 +https://crbug.com/webrtc/12842,https://issues.webrtc.org/issues/42223007 +https://crbug.com/webrtc/12843,https://issues.webrtc.org/issues/42223008 +https://crbug.com/webrtc/12844,https://issues.webrtc.org/issues/42223009 +https://crbug.com/webrtc/12845,https://issues.webrtc.org/issues/42223010 +https://crbug.com/webrtc/12846,https://issues.webrtc.org/issues/42223011 +https://crbug.com/webrtc/12847,https://issues.webrtc.org/issues/42223012 +https://crbug.com/webrtc/12848,https://issues.webrtc.org/issues/42223013 +https://crbug.com/webrtc/12849,https://issues.webrtc.org/issues/42223014 +https://crbug.com/webrtc/1285,https://issues.webrtc.org/issues/42223015 +https://crbug.com/webrtc/12850,https://issues.webrtc.org/issues/42223016 +https://crbug.com/webrtc/12851,https://issues.webrtc.org/issues/42223017 +https://crbug.com/webrtc/12852,https://issues.webrtc.org/issues/42223018 +https://crbug.com/webrtc/12853,https://issues.webrtc.org/issues/42223019 +https://crbug.com/webrtc/12854,https://issues.webrtc.org/issues/42223020 +https://crbug.com/webrtc/12855,https://issues.webrtc.org/issues/42223021 +https://crbug.com/webrtc/12856,https://issues.webrtc.org/issues/42223022 +https://crbug.com/webrtc/12857,https://issues.webrtc.org/issues/42223023 +https://crbug.com/webrtc/12858,https://issues.webrtc.org/issues/42223024 +https://crbug.com/webrtc/12859,https://issues.webrtc.org/issues/42223025 +https://crbug.com/webrtc/1286,https://issues.webrtc.org/issues/42223026 +https://crbug.com/webrtc/12860,https://issues.webrtc.org/issues/42223027 +https://crbug.com/webrtc/12861,https://issues.webrtc.org/issues/42223028 +https://crbug.com/webrtc/12862,https://issues.webrtc.org/issues/42223029 +https://crbug.com/webrtc/12863,https://issues.webrtc.org/issues/42223030 +https://crbug.com/webrtc/12864,https://issues.webrtc.org/issues/42223031 +https://crbug.com/webrtc/12865,https://issues.webrtc.org/issues/42223032 +https://crbug.com/webrtc/12866,https://issues.webrtc.org/issues/42223033 +https://crbug.com/webrtc/12867,https://issues.webrtc.org/issues/42223034 +https://crbug.com/webrtc/12868,https://issues.webrtc.org/issues/42223035 +https://crbug.com/webrtc/12869,https://issues.webrtc.org/issues/42223036 +https://crbug.com/webrtc/1287,https://issues.webrtc.org/issues/42223037 +https://crbug.com/webrtc/12870,https://issues.webrtc.org/issues/42223038 +https://crbug.com/webrtc/12871,https://issues.webrtc.org/issues/42223039 +https://crbug.com/webrtc/12872,https://issues.webrtc.org/issues/42223040 +https://crbug.com/webrtc/12873,https://issues.webrtc.org/issues/42223041 +https://crbug.com/webrtc/12874,https://issues.webrtc.org/issues/42223042 +https://crbug.com/webrtc/12875,https://issues.webrtc.org/issues/42223043 +https://crbug.com/webrtc/12876,https://issues.webrtc.org/issues/42223044 +https://crbug.com/webrtc/12877,https://issues.webrtc.org/issues/42223045 +https://crbug.com/webrtc/12878,https://issues.webrtc.org/issues/42223046 +https://crbug.com/webrtc/12879,https://issues.webrtc.org/issues/42223047 +https://crbug.com/webrtc/1288,https://issues.webrtc.org/issues/42223048 +https://crbug.com/webrtc/12880,https://issues.webrtc.org/issues/42223049 +https://crbug.com/webrtc/12881,https://issues.webrtc.org/issues/42223050 +https://crbug.com/webrtc/12882,https://issues.webrtc.org/issues/42223051 +https://crbug.com/webrtc/12883,https://issues.webrtc.org/issues/42223052 +https://crbug.com/webrtc/12884,https://issues.webrtc.org/issues/42223053 +https://crbug.com/webrtc/12885,https://issues.webrtc.org/issues/42223054 +https://crbug.com/webrtc/12886,https://issues.webrtc.org/issues/42223055 +https://crbug.com/webrtc/12887,https://issues.webrtc.org/issues/42223056 +https://crbug.com/webrtc/12888,https://issues.webrtc.org/issues/42223057 +https://crbug.com/webrtc/12889,https://issues.webrtc.org/issues/42223058 +https://crbug.com/webrtc/1289,https://issues.webrtc.org/issues/42223059 +https://crbug.com/webrtc/12890,https://issues.webrtc.org/issues/42223060 +https://crbug.com/webrtc/12891,https://issues.webrtc.org/issues/42223061 +https://crbug.com/webrtc/12892,https://issues.webrtc.org/issues/42223062 +https://crbug.com/webrtc/12893,https://issues.webrtc.org/issues/42223063 +https://crbug.com/webrtc/12894,https://issues.webrtc.org/issues/42223064 +https://crbug.com/webrtc/12895,https://issues.webrtc.org/issues/42223065 +https://crbug.com/webrtc/12896,https://issues.webrtc.org/issues/42223066 +https://crbug.com/webrtc/12897,https://issues.webrtc.org/issues/42223067 +https://crbug.com/webrtc/12898,https://issues.webrtc.org/issues/42223068 +https://crbug.com/webrtc/129,https://issues.webrtc.org/issues/42223069 +https://crbug.com/webrtc/1290,https://issues.webrtc.org/issues/42223070 +https://crbug.com/webrtc/12900,https://issues.webrtc.org/issues/42223071 +https://crbug.com/webrtc/12901,https://issues.webrtc.org/issues/42223072 +https://crbug.com/webrtc/12902,https://issues.webrtc.org/issues/42223073 +https://crbug.com/webrtc/12903,https://issues.webrtc.org/issues/42223074 +https://crbug.com/webrtc/12904,https://issues.webrtc.org/issues/42223075 +https://crbug.com/webrtc/12905,https://issues.webrtc.org/issues/42223076 +https://crbug.com/webrtc/12906,https://issues.webrtc.org/issues/42223077 +https://crbug.com/webrtc/12907,https://issues.webrtc.org/issues/42223078 +https://crbug.com/webrtc/12908,https://issues.webrtc.org/issues/42223079 +https://crbug.com/webrtc/12909,https://issues.webrtc.org/issues/42223080 +https://crbug.com/webrtc/1291,https://issues.webrtc.org/issues/42223081 +https://crbug.com/webrtc/12910,https://issues.webrtc.org/issues/42223082 +https://crbug.com/webrtc/12911,https://issues.webrtc.org/issues/42223083 +https://crbug.com/webrtc/12912,https://issues.webrtc.org/issues/42223084 +https://crbug.com/webrtc/12913,https://issues.webrtc.org/issues/42223085 +https://crbug.com/webrtc/12914,https://issues.webrtc.org/issues/42223086 +https://crbug.com/webrtc/12915,https://issues.webrtc.org/issues/42223087 +https://crbug.com/webrtc/12916,https://issues.webrtc.org/issues/42223088 +https://crbug.com/webrtc/12917,https://issues.webrtc.org/issues/42223089 +https://crbug.com/webrtc/12918,https://issues.webrtc.org/issues/42223090 +https://crbug.com/webrtc/12919,https://issues.webrtc.org/issues/42223091 +https://crbug.com/webrtc/1292,https://issues.webrtc.org/issues/42223092 +https://crbug.com/webrtc/12920,https://issues.webrtc.org/issues/42223093 +https://crbug.com/webrtc/12921,https://issues.webrtc.org/issues/42223094 +https://crbug.com/webrtc/12922,https://issues.webrtc.org/issues/42223095 +https://crbug.com/webrtc/12923,https://issues.webrtc.org/issues/42223096 +https://crbug.com/webrtc/12925,https://issues.webrtc.org/issues/42223097 +https://crbug.com/webrtc/12926,https://issues.webrtc.org/issues/42223098 +https://crbug.com/webrtc/12927,https://issues.webrtc.org/issues/42223099 +https://crbug.com/webrtc/12928,https://issues.webrtc.org/issues/42223100 +https://crbug.com/webrtc/12929,https://issues.webrtc.org/issues/42223101 +https://crbug.com/webrtc/1293,https://issues.webrtc.org/issues/42223102 +https://crbug.com/webrtc/12930,https://issues.webrtc.org/issues/42223103 +https://crbug.com/webrtc/12931,https://issues.webrtc.org/issues/42223104 +https://crbug.com/webrtc/12932,https://issues.webrtc.org/issues/42223105 +https://crbug.com/webrtc/12933,https://issues.webrtc.org/issues/42223106 +https://crbug.com/webrtc/12934,https://issues.webrtc.org/issues/42223107 +https://crbug.com/webrtc/12935,https://issues.webrtc.org/issues/42223108 +https://crbug.com/webrtc/12936,https://issues.webrtc.org/issues/42223109 +https://crbug.com/webrtc/12937,https://issues.webrtc.org/issues/42223110 +https://crbug.com/webrtc/12938,https://issues.webrtc.org/issues/42223111 +https://crbug.com/webrtc/1294,https://issues.webrtc.org/issues/42223112 +https://crbug.com/webrtc/12940,https://issues.webrtc.org/issues/42223113 +https://crbug.com/webrtc/12941,https://issues.webrtc.org/issues/42223114 +https://crbug.com/webrtc/12942,https://issues.webrtc.org/issues/42223115 +https://crbug.com/webrtc/12943,https://issues.webrtc.org/issues/42223116 +https://crbug.com/webrtc/12944,https://issues.webrtc.org/issues/42223117 +https://crbug.com/webrtc/12945,https://issues.webrtc.org/issues/42223118 +https://crbug.com/webrtc/12946,https://issues.webrtc.org/issues/42223119 +https://crbug.com/webrtc/12947,https://issues.webrtc.org/issues/42223120 +https://crbug.com/webrtc/12948,https://issues.webrtc.org/issues/42223121 +https://crbug.com/webrtc/12949,https://issues.webrtc.org/issues/42223122 +https://crbug.com/webrtc/1295,https://issues.webrtc.org/issues/42223123 +https://crbug.com/webrtc/12951,https://issues.webrtc.org/issues/42223124 +https://crbug.com/webrtc/12952,https://issues.webrtc.org/issues/42223125 +https://crbug.com/webrtc/12953,https://issues.webrtc.org/issues/42223126 +https://crbug.com/webrtc/12954,https://issues.webrtc.org/issues/42223127 +https://crbug.com/webrtc/12955,https://issues.webrtc.org/issues/42223128 +https://crbug.com/webrtc/12956,https://issues.webrtc.org/issues/42223129 +https://crbug.com/webrtc/12957,https://issues.webrtc.org/issues/42223130 +https://crbug.com/webrtc/12958,https://issues.webrtc.org/issues/42223131 +https://crbug.com/webrtc/12959,https://issues.webrtc.org/issues/42223132 +https://crbug.com/webrtc/1296,https://issues.webrtc.org/issues/42223133 +https://crbug.com/webrtc/12961,https://issues.webrtc.org/issues/42223134 +https://crbug.com/webrtc/12962,https://issues.webrtc.org/issues/42223135 +https://crbug.com/webrtc/12963,https://issues.webrtc.org/issues/42223136 +https://crbug.com/webrtc/12964,https://issues.webrtc.org/issues/42223137 +https://crbug.com/webrtc/12965,https://issues.webrtc.org/issues/42223138 +https://crbug.com/webrtc/12966,https://issues.webrtc.org/issues/42223139 +https://crbug.com/webrtc/12967,https://issues.webrtc.org/issues/42223140 +https://crbug.com/webrtc/12968,https://issues.webrtc.org/issues/42223141 +https://crbug.com/webrtc/12969,https://issues.webrtc.org/issues/42223142 +https://crbug.com/webrtc/1297,https://issues.webrtc.org/issues/42223143 +https://crbug.com/webrtc/12970,https://issues.webrtc.org/issues/42223144 +https://crbug.com/webrtc/12971,https://issues.webrtc.org/issues/42223145 +https://crbug.com/webrtc/12972,https://issues.webrtc.org/issues/42223146 +https://crbug.com/webrtc/12973,https://issues.webrtc.org/issues/42223147 +https://crbug.com/webrtc/12974,https://issues.webrtc.org/issues/42223148 +https://crbug.com/webrtc/12975,https://issues.webrtc.org/issues/42223149 +https://crbug.com/webrtc/12976,https://issues.webrtc.org/issues/42223150 +https://crbug.com/webrtc/12977,https://issues.webrtc.org/issues/42223151 +https://crbug.com/webrtc/12978,https://issues.webrtc.org/issues/42223152 +https://crbug.com/webrtc/12979,https://issues.webrtc.org/issues/42223153 +https://crbug.com/webrtc/1298,https://issues.webrtc.org/issues/42223154 +https://crbug.com/webrtc/12980,https://issues.webrtc.org/issues/42223155 +https://crbug.com/webrtc/12981,https://issues.webrtc.org/issues/42223156 +https://crbug.com/webrtc/12982,https://issues.webrtc.org/issues/42223157 +https://crbug.com/webrtc/12983,https://issues.webrtc.org/issues/42223158 +https://crbug.com/webrtc/12984,https://issues.webrtc.org/issues/42223159 +https://crbug.com/webrtc/12985,https://issues.webrtc.org/issues/42223160 +https://crbug.com/webrtc/12986,https://issues.webrtc.org/issues/42223161 +https://crbug.com/webrtc/12987,https://issues.webrtc.org/issues/42223162 +https://crbug.com/webrtc/12989,https://issues.webrtc.org/issues/42223163 +https://crbug.com/webrtc/1299,https://issues.webrtc.org/issues/42223164 +https://crbug.com/webrtc/12990,https://issues.webrtc.org/issues/42223165 +https://crbug.com/webrtc/12991,https://issues.webrtc.org/issues/42223166 +https://crbug.com/webrtc/12992,https://issues.webrtc.org/issues/42223167 +https://crbug.com/webrtc/12993,https://issues.webrtc.org/issues/42223168 +https://crbug.com/webrtc/12994,https://issues.webrtc.org/issues/42223169 +https://crbug.com/webrtc/12995,https://issues.webrtc.org/issues/42223170 +https://crbug.com/webrtc/12996,https://issues.webrtc.org/issues/42223171 +https://crbug.com/webrtc/12997,https://issues.webrtc.org/issues/42223172 +https://crbug.com/webrtc/12998,https://issues.webrtc.org/issues/42223173 +https://crbug.com/webrtc/12999,https://issues.webrtc.org/issues/42223174 +https://crbug.com/webrtc/13,https://issues.webrtc.org/issues/42223175 +https://crbug.com/webrtc/130,https://issues.webrtc.org/issues/42223176 +https://crbug.com/webrtc/1300,https://issues.webrtc.org/issues/42223177 +https://crbug.com/webrtc/13000,https://issues.webrtc.org/issues/42223178 +https://crbug.com/webrtc/13001,https://issues.webrtc.org/issues/42223179 +https://crbug.com/webrtc/13002,https://issues.webrtc.org/issues/42223180 +https://crbug.com/webrtc/13003,https://issues.webrtc.org/issues/42223181 +https://crbug.com/webrtc/13004,https://issues.webrtc.org/issues/42223182 +https://crbug.com/webrtc/13005,https://issues.webrtc.org/issues/42223183 +https://crbug.com/webrtc/13006,https://issues.webrtc.org/issues/42223184 +https://crbug.com/webrtc/13007,https://issues.webrtc.org/issues/42223185 +https://crbug.com/webrtc/13008,https://issues.webrtc.org/issues/42223186 +https://crbug.com/webrtc/13009,https://issues.webrtc.org/issues/42223187 +https://crbug.com/webrtc/1301,https://issues.webrtc.org/issues/42223188 +https://crbug.com/webrtc/13010,https://issues.webrtc.org/issues/42223189 +https://crbug.com/webrtc/13011,https://issues.webrtc.org/issues/42223190 +https://crbug.com/webrtc/13012,https://issues.webrtc.org/issues/42223191 +https://crbug.com/webrtc/13013,https://issues.webrtc.org/issues/42223192 +https://crbug.com/webrtc/13014,https://issues.webrtc.org/issues/42223193 +https://crbug.com/webrtc/13015,https://issues.webrtc.org/issues/42223194 +https://crbug.com/webrtc/13016,https://issues.webrtc.org/issues/42223195 +https://crbug.com/webrtc/13017,https://issues.webrtc.org/issues/42223196 +https://crbug.com/webrtc/13018,https://issues.webrtc.org/issues/42223197 +https://crbug.com/webrtc/13019,https://issues.webrtc.org/issues/42223198 +https://crbug.com/webrtc/1302,https://issues.webrtc.org/issues/42223199 +https://crbug.com/webrtc/13020,https://issues.webrtc.org/issues/42223200 +https://crbug.com/webrtc/13021,https://issues.webrtc.org/issues/42223201 +https://crbug.com/webrtc/13022,https://issues.webrtc.org/issues/42223202 +https://crbug.com/webrtc/13023,https://issues.webrtc.org/issues/42223203 +https://crbug.com/webrtc/13024,https://issues.webrtc.org/issues/42223204 +https://crbug.com/webrtc/13025,https://issues.webrtc.org/issues/42223205 +https://crbug.com/webrtc/13026,https://issues.webrtc.org/issues/42223206 +https://crbug.com/webrtc/13027,https://issues.webrtc.org/issues/42223207 +https://crbug.com/webrtc/13028,https://issues.webrtc.org/issues/42223208 +https://crbug.com/webrtc/13029,https://issues.webrtc.org/issues/42223209 +https://crbug.com/webrtc/1303,https://issues.webrtc.org/issues/42223210 +https://crbug.com/webrtc/13031,https://issues.webrtc.org/issues/42223211 +https://crbug.com/webrtc/13032,https://issues.webrtc.org/issues/42223212 +https://crbug.com/webrtc/13033,https://issues.webrtc.org/issues/42223213 +https://crbug.com/webrtc/13035,https://issues.webrtc.org/issues/42223214 +https://crbug.com/webrtc/13036,https://issues.webrtc.org/issues/42223215 +https://crbug.com/webrtc/13037,https://issues.webrtc.org/issues/42223216 +https://crbug.com/webrtc/13038,https://issues.webrtc.org/issues/42223217 +https://crbug.com/webrtc/13039,https://issues.webrtc.org/issues/42223218 +https://crbug.com/webrtc/1304,https://issues.webrtc.org/issues/42223219 +https://crbug.com/webrtc/13040,https://issues.webrtc.org/issues/42223220 +https://crbug.com/webrtc/13041,https://issues.webrtc.org/issues/42223221 +https://crbug.com/webrtc/13042,https://issues.webrtc.org/issues/42223222 +https://crbug.com/webrtc/13043,https://issues.webrtc.org/issues/42223223 +https://crbug.com/webrtc/13044,https://issues.webrtc.org/issues/42223224 +https://crbug.com/webrtc/13045,https://issues.webrtc.org/issues/42223225 +https://crbug.com/webrtc/13046,https://issues.webrtc.org/issues/42223226 +https://crbug.com/webrtc/13047,https://issues.webrtc.org/issues/42223227 +https://crbug.com/webrtc/13048,https://issues.webrtc.org/issues/42223228 +https://crbug.com/webrtc/13049,https://issues.webrtc.org/issues/42223229 +https://crbug.com/webrtc/1305,https://issues.webrtc.org/issues/42223230 +https://crbug.com/webrtc/13050,https://issues.webrtc.org/issues/42223231 +https://crbug.com/webrtc/13051,https://issues.webrtc.org/issues/42223232 +https://crbug.com/webrtc/13052,https://issues.webrtc.org/issues/42223233 +https://crbug.com/webrtc/13053,https://issues.webrtc.org/issues/42223234 +https://crbug.com/webrtc/13054,https://issues.webrtc.org/issues/42223235 +https://crbug.com/webrtc/13055,https://issues.webrtc.org/issues/42223236 +https://crbug.com/webrtc/13056,https://issues.webrtc.org/issues/42223237 +https://crbug.com/webrtc/13057,https://issues.webrtc.org/issues/42223238 +https://crbug.com/webrtc/13058,https://issues.webrtc.org/issues/42223239 +https://crbug.com/webrtc/13059,https://issues.webrtc.org/issues/42223240 +https://crbug.com/webrtc/1306,https://issues.webrtc.org/issues/42223241 +https://crbug.com/webrtc/13060,https://issues.webrtc.org/issues/42223242 +https://crbug.com/webrtc/13061,https://issues.webrtc.org/issues/42223243 +https://crbug.com/webrtc/13063,https://issues.webrtc.org/issues/42223244 +https://crbug.com/webrtc/13064,https://issues.webrtc.org/issues/42223245 +https://crbug.com/webrtc/13065,https://issues.webrtc.org/issues/42223246 +https://crbug.com/webrtc/13066,https://issues.webrtc.org/issues/42223247 +https://crbug.com/webrtc/13067,https://issues.webrtc.org/issues/42223248 +https://crbug.com/webrtc/13068,https://issues.webrtc.org/issues/42223249 +https://crbug.com/webrtc/13069,https://issues.webrtc.org/issues/42223250 +https://crbug.com/webrtc/1307,https://issues.webrtc.org/issues/42223251 +https://crbug.com/webrtc/13070,https://issues.webrtc.org/issues/42223252 +https://crbug.com/webrtc/13071,https://issues.webrtc.org/issues/42223253 +https://crbug.com/webrtc/13072,https://issues.webrtc.org/issues/42223254 +https://crbug.com/webrtc/13074,https://issues.webrtc.org/issues/42223255 +https://crbug.com/webrtc/13075,https://issues.webrtc.org/issues/42223256 +https://crbug.com/webrtc/13076,https://issues.webrtc.org/issues/42223257 +https://crbug.com/webrtc/13077,https://issues.webrtc.org/issues/42223258 +https://crbug.com/webrtc/13078,https://issues.webrtc.org/issues/42223259 +https://crbug.com/webrtc/13079,https://issues.webrtc.org/issues/42223260 +https://crbug.com/webrtc/1308,https://issues.webrtc.org/issues/42223261 +https://crbug.com/webrtc/13080,https://issues.webrtc.org/issues/42223262 +https://crbug.com/webrtc/13081,https://issues.webrtc.org/issues/42223263 +https://crbug.com/webrtc/13082,https://issues.webrtc.org/issues/42223264 +https://crbug.com/webrtc/13083,https://issues.webrtc.org/issues/42223265 +https://crbug.com/webrtc/13084,https://issues.webrtc.org/issues/42223266 +https://crbug.com/webrtc/13085,https://issues.webrtc.org/issues/42223267 +https://crbug.com/webrtc/13086,https://issues.webrtc.org/issues/42223268 +https://crbug.com/webrtc/13087,https://issues.webrtc.org/issues/42223269 +https://crbug.com/webrtc/13088,https://issues.webrtc.org/issues/42223270 +https://crbug.com/webrtc/13089,https://issues.webrtc.org/issues/42223271 +https://crbug.com/webrtc/1309,https://issues.webrtc.org/issues/42223272 +https://crbug.com/webrtc/13090,https://issues.webrtc.org/issues/42223273 +https://crbug.com/webrtc/13091,https://issues.webrtc.org/issues/42223274 +https://crbug.com/webrtc/13092,https://issues.webrtc.org/issues/42223275 +https://crbug.com/webrtc/13093,https://issues.webrtc.org/issues/42223276 +https://crbug.com/webrtc/13094,https://issues.webrtc.org/issues/42223277 +https://crbug.com/webrtc/13095,https://issues.webrtc.org/issues/42223278 +https://crbug.com/webrtc/13096,https://issues.webrtc.org/issues/42223279 +https://crbug.com/webrtc/13097,https://issues.webrtc.org/issues/42223280 +https://crbug.com/webrtc/13098,https://issues.webrtc.org/issues/42223281 +https://crbug.com/webrtc/13099,https://issues.webrtc.org/issues/42223282 +https://crbug.com/webrtc/131,https://issues.webrtc.org/issues/42223283 +https://crbug.com/webrtc/1310,https://issues.webrtc.org/issues/42223284 +https://crbug.com/webrtc/13100,https://issues.webrtc.org/issues/42223285 +https://crbug.com/webrtc/13101,https://issues.webrtc.org/issues/42223286 +https://crbug.com/webrtc/13102,https://issues.webrtc.org/issues/42223287 +https://crbug.com/webrtc/13103,https://issues.webrtc.org/issues/42223288 +https://crbug.com/webrtc/13104,https://issues.webrtc.org/issues/42223289 +https://crbug.com/webrtc/13105,https://issues.webrtc.org/issues/42223290 +https://crbug.com/webrtc/13106,https://issues.webrtc.org/issues/42223291 +https://crbug.com/webrtc/13107,https://issues.webrtc.org/issues/42223292 +https://crbug.com/webrtc/13108,https://issues.webrtc.org/issues/42223293 +https://crbug.com/webrtc/1311,https://issues.webrtc.org/issues/42223294 +https://crbug.com/webrtc/13110,https://issues.webrtc.org/issues/42223295 +https://crbug.com/webrtc/13111,https://issues.webrtc.org/issues/42223296 +https://crbug.com/webrtc/13112,https://issues.webrtc.org/issues/42223297 +https://crbug.com/webrtc/13113,https://issues.webrtc.org/issues/42223298 +https://crbug.com/webrtc/13114,https://issues.webrtc.org/issues/42223299 +https://crbug.com/webrtc/13116,https://issues.webrtc.org/issues/42223300 +https://crbug.com/webrtc/13117,https://issues.webrtc.org/issues/42223301 +https://crbug.com/webrtc/13118,https://issues.webrtc.org/issues/42223302 +https://crbug.com/webrtc/13119,https://issues.webrtc.org/issues/42223303 +https://crbug.com/webrtc/1312,https://issues.webrtc.org/issues/42223304 +https://crbug.com/webrtc/13121,https://issues.webrtc.org/issues/42223305 +https://crbug.com/webrtc/13122,https://issues.webrtc.org/issues/42223306 +https://crbug.com/webrtc/13123,https://issues.webrtc.org/issues/42223307 +https://crbug.com/webrtc/13124,https://issues.webrtc.org/issues/42223308 +https://crbug.com/webrtc/13125,https://issues.webrtc.org/issues/42223309 +https://crbug.com/webrtc/13126,https://issues.webrtc.org/issues/42223310 +https://crbug.com/webrtc/13127,https://issues.webrtc.org/issues/42223311 +https://crbug.com/webrtc/13128,https://issues.webrtc.org/issues/42223312 +https://crbug.com/webrtc/13129,https://issues.webrtc.org/issues/42223313 +https://crbug.com/webrtc/1313,https://issues.webrtc.org/issues/42223314 +https://crbug.com/webrtc/13130,https://issues.webrtc.org/issues/42223315 +https://crbug.com/webrtc/13131,https://issues.webrtc.org/issues/42223316 +https://crbug.com/webrtc/13132,https://issues.webrtc.org/issues/42223317 +https://crbug.com/webrtc/13133,https://issues.webrtc.org/issues/42223318 +https://crbug.com/webrtc/13134,https://issues.webrtc.org/issues/42223319 +https://crbug.com/webrtc/13135,https://issues.webrtc.org/issues/42223320 +https://crbug.com/webrtc/13136,https://issues.webrtc.org/issues/42223321 +https://crbug.com/webrtc/13137,https://issues.webrtc.org/issues/42223322 +https://crbug.com/webrtc/13138,https://issues.webrtc.org/issues/42223323 +https://crbug.com/webrtc/13139,https://issues.webrtc.org/issues/42223324 +https://crbug.com/webrtc/1314,https://issues.webrtc.org/issues/42223325 +https://crbug.com/webrtc/13140,https://issues.webrtc.org/issues/42223326 +https://crbug.com/webrtc/13141,https://issues.webrtc.org/issues/42223327 +https://crbug.com/webrtc/13142,https://issues.webrtc.org/issues/42223328 +https://crbug.com/webrtc/13143,https://issues.webrtc.org/issues/42223329 +https://crbug.com/webrtc/13144,https://issues.webrtc.org/issues/42223330 +https://crbug.com/webrtc/13145,https://issues.webrtc.org/issues/42223331 +https://crbug.com/webrtc/13146,https://issues.webrtc.org/issues/42223332 +https://crbug.com/webrtc/13147,https://issues.webrtc.org/issues/42223333 +https://crbug.com/webrtc/13148,https://issues.webrtc.org/issues/42223334 +https://crbug.com/webrtc/13149,https://issues.webrtc.org/issues/42223335 +https://crbug.com/webrtc/1315,https://issues.webrtc.org/issues/42223336 +https://crbug.com/webrtc/13150,https://issues.webrtc.org/issues/42223337 +https://crbug.com/webrtc/13151,https://issues.webrtc.org/issues/42223338 +https://crbug.com/webrtc/13152,https://issues.webrtc.org/issues/42223339 +https://crbug.com/webrtc/13153,https://issues.webrtc.org/issues/42223340 +https://crbug.com/webrtc/13154,https://issues.webrtc.org/issues/42223341 +https://crbug.com/webrtc/13155,https://issues.webrtc.org/issues/42223342 +https://crbug.com/webrtc/13156,https://issues.webrtc.org/issues/42223343 +https://crbug.com/webrtc/13157,https://issues.webrtc.org/issues/42223344 +https://crbug.com/webrtc/13158,https://issues.webrtc.org/issues/42223345 +https://crbug.com/webrtc/13159,https://issues.webrtc.org/issues/42223346 +https://crbug.com/webrtc/13160,https://issues.webrtc.org/issues/42223347 +https://crbug.com/webrtc/13161,https://issues.webrtc.org/issues/42223348 +https://crbug.com/webrtc/13162,https://issues.webrtc.org/issues/42223349 +https://crbug.com/webrtc/13163,https://issues.webrtc.org/issues/42223350 +https://crbug.com/webrtc/13164,https://issues.webrtc.org/issues/42223351 +https://crbug.com/webrtc/13165,https://issues.webrtc.org/issues/42223352 +https://crbug.com/webrtc/13166,https://issues.webrtc.org/issues/42223353 +https://crbug.com/webrtc/13167,https://issues.webrtc.org/issues/42223354 +https://crbug.com/webrtc/13168,https://issues.webrtc.org/issues/42223355 +https://crbug.com/webrtc/13169,https://issues.webrtc.org/issues/42223356 +https://crbug.com/webrtc/1317,https://issues.webrtc.org/issues/42223357 +https://crbug.com/webrtc/13170,https://issues.webrtc.org/issues/42223358 +https://crbug.com/webrtc/13171,https://issues.webrtc.org/issues/42223359 +https://crbug.com/webrtc/13172,https://issues.webrtc.org/issues/42223360 +https://crbug.com/webrtc/13173,https://issues.webrtc.org/issues/42223361 +https://crbug.com/webrtc/13174,https://issues.webrtc.org/issues/42223362 +https://crbug.com/webrtc/13175,https://issues.webrtc.org/issues/42223363 +https://crbug.com/webrtc/13176,https://issues.webrtc.org/issues/42223364 +https://crbug.com/webrtc/13177,https://issues.webrtc.org/issues/42223365 +https://crbug.com/webrtc/13178,https://issues.webrtc.org/issues/42223366 +https://crbug.com/webrtc/13179,https://issues.webrtc.org/issues/42223367 +https://crbug.com/webrtc/1318,https://issues.webrtc.org/issues/42223368 +https://crbug.com/webrtc/13180,https://issues.webrtc.org/issues/42223369 +https://crbug.com/webrtc/13181,https://issues.webrtc.org/issues/42223370 +https://crbug.com/webrtc/13182,https://issues.webrtc.org/issues/42223371 +https://crbug.com/webrtc/13183,https://issues.webrtc.org/issues/42223372 +https://crbug.com/webrtc/13184,https://issues.webrtc.org/issues/42223373 +https://crbug.com/webrtc/13185,https://issues.webrtc.org/issues/42223374 +https://crbug.com/webrtc/13187,https://issues.webrtc.org/issues/42223375 +https://crbug.com/webrtc/13188,https://issues.webrtc.org/issues/42223376 +https://crbug.com/webrtc/13189,https://issues.webrtc.org/issues/42223377 +https://crbug.com/webrtc/1319,https://issues.webrtc.org/issues/42223378 +https://crbug.com/webrtc/13190,https://issues.webrtc.org/issues/42223379 +https://crbug.com/webrtc/13191,https://issues.webrtc.org/issues/42223380 +https://crbug.com/webrtc/13192,https://issues.webrtc.org/issues/42223381 +https://crbug.com/webrtc/13193,https://issues.webrtc.org/issues/42223382 +https://crbug.com/webrtc/13194,https://issues.webrtc.org/issues/42223383 +https://crbug.com/webrtc/13195,https://issues.webrtc.org/issues/42223384 +https://crbug.com/webrtc/13196,https://issues.webrtc.org/issues/42223385 +https://crbug.com/webrtc/13198,https://issues.webrtc.org/issues/42223386 +https://crbug.com/webrtc/13199,https://issues.webrtc.org/issues/42223387 +https://crbug.com/webrtc/132,https://issues.webrtc.org/issues/42223388 +https://crbug.com/webrtc/1320,https://issues.webrtc.org/issues/42223389 +https://crbug.com/webrtc/13200,https://issues.webrtc.org/issues/42223390 +https://crbug.com/webrtc/13201,https://issues.webrtc.org/issues/42223391 +https://crbug.com/webrtc/13202,https://issues.webrtc.org/issues/42223392 +https://crbug.com/webrtc/13203,https://issues.webrtc.org/issues/42223393 +https://crbug.com/webrtc/13204,https://issues.webrtc.org/issues/42223394 +https://crbug.com/webrtc/13205,https://issues.webrtc.org/issues/42223395 +https://crbug.com/webrtc/13206,https://issues.webrtc.org/issues/42223396 +https://crbug.com/webrtc/13207,https://issues.webrtc.org/issues/42223397 +https://crbug.com/webrtc/13208,https://issues.webrtc.org/issues/42223398 +https://crbug.com/webrtc/13209,https://issues.webrtc.org/issues/42223399 +https://crbug.com/webrtc/1321,https://issues.webrtc.org/issues/42223400 +https://crbug.com/webrtc/13210,https://issues.webrtc.org/issues/42223401 +https://crbug.com/webrtc/13211,https://issues.webrtc.org/issues/42223402 +https://crbug.com/webrtc/13212,https://issues.webrtc.org/issues/42223403 +https://crbug.com/webrtc/13214,https://issues.webrtc.org/issues/42223404 +https://crbug.com/webrtc/13215,https://issues.webrtc.org/issues/42223405 +https://crbug.com/webrtc/13216,https://issues.webrtc.org/issues/42223406 +https://crbug.com/webrtc/13217,https://issues.webrtc.org/issues/42223407 +https://crbug.com/webrtc/13218,https://issues.webrtc.org/issues/42223408 +https://crbug.com/webrtc/13219,https://issues.webrtc.org/issues/42223409 +https://crbug.com/webrtc/1322,https://issues.webrtc.org/issues/42223410 +https://crbug.com/webrtc/13221,https://issues.webrtc.org/issues/42223411 +https://crbug.com/webrtc/13222,https://issues.webrtc.org/issues/42223412 +https://crbug.com/webrtc/13223,https://issues.webrtc.org/issues/42223413 +https://crbug.com/webrtc/13224,https://issues.webrtc.org/issues/42223414 +https://crbug.com/webrtc/13225,https://issues.webrtc.org/issues/42223415 +https://crbug.com/webrtc/13226,https://issues.webrtc.org/issues/42223416 +https://crbug.com/webrtc/13227,https://issues.webrtc.org/issues/42223417 +https://crbug.com/webrtc/13228,https://issues.webrtc.org/issues/42223418 +https://crbug.com/webrtc/13229,https://issues.webrtc.org/issues/42223419 +https://crbug.com/webrtc/1323,https://issues.webrtc.org/issues/42223420 +https://crbug.com/webrtc/13230,https://issues.webrtc.org/issues/42223421 +https://crbug.com/webrtc/13231,https://issues.webrtc.org/issues/42223422 +https://crbug.com/webrtc/13232,https://issues.webrtc.org/issues/42223423 +https://crbug.com/webrtc/13233,https://issues.webrtc.org/issues/42223424 +https://crbug.com/webrtc/13234,https://issues.webrtc.org/issues/42223425 +https://crbug.com/webrtc/13235,https://issues.webrtc.org/issues/42223426 +https://crbug.com/webrtc/13236,https://issues.webrtc.org/issues/42223427 +https://crbug.com/webrtc/13237,https://issues.webrtc.org/issues/42223428 +https://crbug.com/webrtc/13238,https://issues.webrtc.org/issues/42223429 +https://crbug.com/webrtc/13239,https://issues.webrtc.org/issues/42223430 +https://crbug.com/webrtc/1324,https://issues.webrtc.org/issues/42223431 +https://crbug.com/webrtc/13240,https://issues.webrtc.org/issues/42223432 +https://crbug.com/webrtc/13241,https://issues.webrtc.org/issues/42223433 +https://crbug.com/webrtc/13242,https://issues.webrtc.org/issues/42223434 +https://crbug.com/webrtc/13243,https://issues.webrtc.org/issues/42223435 +https://crbug.com/webrtc/13244,https://issues.webrtc.org/issues/42223436 +https://crbug.com/webrtc/13245,https://issues.webrtc.org/issues/42223437 +https://crbug.com/webrtc/13246,https://issues.webrtc.org/issues/42223438 +https://crbug.com/webrtc/13247,https://issues.webrtc.org/issues/42223439 +https://crbug.com/webrtc/13248,https://issues.webrtc.org/issues/42223440 +https://crbug.com/webrtc/13249,https://issues.webrtc.org/issues/42223441 +https://crbug.com/webrtc/1325,https://issues.webrtc.org/issues/42223442 +https://crbug.com/webrtc/13250,https://issues.webrtc.org/issues/42223443 +https://crbug.com/webrtc/13251,https://issues.webrtc.org/issues/42223444 +https://crbug.com/webrtc/13252,https://issues.webrtc.org/issues/42223445 +https://crbug.com/webrtc/13253,https://issues.webrtc.org/issues/42223446 +https://crbug.com/webrtc/13254,https://issues.webrtc.org/issues/42223447 +https://crbug.com/webrtc/13255,https://issues.webrtc.org/issues/42223448 +https://crbug.com/webrtc/13256,https://issues.webrtc.org/issues/42223449 +https://crbug.com/webrtc/13257,https://issues.webrtc.org/issues/42223450 +https://crbug.com/webrtc/13258,https://issues.webrtc.org/issues/42223451 +https://crbug.com/webrtc/13259,https://issues.webrtc.org/issues/42223452 +https://crbug.com/webrtc/1326,https://issues.webrtc.org/issues/42223453 +https://crbug.com/webrtc/13260,https://issues.webrtc.org/issues/42223454 +https://crbug.com/webrtc/13261,https://issues.webrtc.org/issues/42223455 +https://crbug.com/webrtc/13262,https://issues.webrtc.org/issues/42223456 +https://crbug.com/webrtc/13263,https://issues.webrtc.org/issues/42223457 +https://crbug.com/webrtc/13264,https://issues.webrtc.org/issues/42223458 +https://crbug.com/webrtc/13265,https://issues.webrtc.org/issues/42223459 +https://crbug.com/webrtc/13266,https://issues.webrtc.org/issues/42223460 +https://crbug.com/webrtc/13267,https://issues.webrtc.org/issues/42223461 +https://crbug.com/webrtc/13268,https://issues.webrtc.org/issues/42223462 +https://crbug.com/webrtc/13269,https://issues.webrtc.org/issues/42223463 +https://crbug.com/webrtc/1327,https://issues.webrtc.org/issues/42223464 +https://crbug.com/webrtc/13270,https://issues.webrtc.org/issues/42223465 +https://crbug.com/webrtc/13271,https://issues.webrtc.org/issues/42223466 +https://crbug.com/webrtc/13272,https://issues.webrtc.org/issues/42223467 +https://crbug.com/webrtc/13273,https://issues.webrtc.org/issues/42223468 +https://crbug.com/webrtc/13274,https://issues.webrtc.org/issues/42223469 +https://crbug.com/webrtc/13276,https://issues.webrtc.org/issues/42223470 +https://crbug.com/webrtc/13277,https://issues.webrtc.org/issues/42223471 +https://crbug.com/webrtc/13278,https://issues.webrtc.org/issues/42223472 +https://crbug.com/webrtc/13279,https://issues.webrtc.org/issues/42223473 +https://crbug.com/webrtc/1328,https://issues.webrtc.org/issues/42223474 +https://crbug.com/webrtc/13280,https://issues.webrtc.org/issues/42223475 +https://crbug.com/webrtc/13281,https://issues.webrtc.org/issues/42223476 +https://crbug.com/webrtc/13282,https://issues.webrtc.org/issues/42223477 +https://crbug.com/webrtc/13283,https://issues.webrtc.org/issues/42223478 +https://crbug.com/webrtc/13284,https://issues.webrtc.org/issues/42223479 +https://crbug.com/webrtc/13285,https://issues.webrtc.org/issues/42223480 +https://crbug.com/webrtc/13286,https://issues.webrtc.org/issues/42223481 +https://crbug.com/webrtc/13287,https://issues.webrtc.org/issues/42223482 +https://crbug.com/webrtc/13288,https://issues.webrtc.org/issues/42223483 +https://crbug.com/webrtc/13289,https://issues.webrtc.org/issues/42223484 +https://crbug.com/webrtc/1329,https://issues.webrtc.org/issues/42223485 +https://crbug.com/webrtc/13290,https://issues.webrtc.org/issues/42223486 +https://crbug.com/webrtc/13291,https://issues.webrtc.org/issues/42223487 +https://crbug.com/webrtc/13292,https://issues.webrtc.org/issues/42223488 +https://crbug.com/webrtc/13293,https://issues.webrtc.org/issues/42223489 +https://crbug.com/webrtc/13294,https://issues.webrtc.org/issues/42223490 +https://crbug.com/webrtc/13295,https://issues.webrtc.org/issues/42223491 +https://crbug.com/webrtc/13296,https://issues.webrtc.org/issues/42223492 +https://crbug.com/webrtc/13297,https://issues.webrtc.org/issues/42223493 +https://crbug.com/webrtc/13298,https://issues.webrtc.org/issues/42223494 +https://crbug.com/webrtc/13299,https://issues.webrtc.org/issues/42223495 +https://crbug.com/webrtc/133,https://issues.webrtc.org/issues/42223496 +https://crbug.com/webrtc/1330,https://issues.webrtc.org/issues/42223497 +https://crbug.com/webrtc/13300,https://issues.webrtc.org/issues/42223498 +https://crbug.com/webrtc/13301,https://issues.webrtc.org/issues/42223499 +https://crbug.com/webrtc/13302,https://issues.webrtc.org/issues/42223500 +https://crbug.com/webrtc/13303,https://issues.webrtc.org/issues/42223501 +https://crbug.com/webrtc/13304,https://issues.webrtc.org/issues/42223502 +https://crbug.com/webrtc/13305,https://issues.webrtc.org/issues/42223503 +https://crbug.com/webrtc/13306,https://issues.webrtc.org/issues/42223504 +https://crbug.com/webrtc/13308,https://issues.webrtc.org/issues/42223505 +https://crbug.com/webrtc/13309,https://issues.webrtc.org/issues/42223506 +https://crbug.com/webrtc/13310,https://issues.webrtc.org/issues/42223507 +https://crbug.com/webrtc/13311,https://issues.webrtc.org/issues/42223508 +https://crbug.com/webrtc/13312,https://issues.webrtc.org/issues/42223509 +https://crbug.com/webrtc/13313,https://issues.webrtc.org/issues/42223510 +https://crbug.com/webrtc/13314,https://issues.webrtc.org/issues/42223511 +https://crbug.com/webrtc/13316,https://issues.webrtc.org/issues/42223512 +https://crbug.com/webrtc/13317,https://issues.webrtc.org/issues/42223513 +https://crbug.com/webrtc/13319,https://issues.webrtc.org/issues/42223514 +https://crbug.com/webrtc/1332,https://issues.webrtc.org/issues/42223515 +https://crbug.com/webrtc/13320,https://issues.webrtc.org/issues/42223516 +https://crbug.com/webrtc/13321,https://issues.webrtc.org/issues/42223517 +https://crbug.com/webrtc/13322,https://issues.webrtc.org/issues/42223518 +https://crbug.com/webrtc/13323,https://issues.webrtc.org/issues/42223519 +https://crbug.com/webrtc/13324,https://issues.webrtc.org/issues/42223520 +https://crbug.com/webrtc/13325,https://issues.webrtc.org/issues/42223521 +https://crbug.com/webrtc/13326,https://issues.webrtc.org/issues/42223522 +https://crbug.com/webrtc/13327,https://issues.webrtc.org/issues/42223523 +https://crbug.com/webrtc/13328,https://issues.webrtc.org/issues/42223524 +https://crbug.com/webrtc/13329,https://issues.webrtc.org/issues/42223525 +https://crbug.com/webrtc/1333,https://issues.webrtc.org/issues/42223526 +https://crbug.com/webrtc/13330,https://issues.webrtc.org/issues/42223527 +https://crbug.com/webrtc/13331,https://issues.webrtc.org/issues/42223528 +https://crbug.com/webrtc/13332,https://issues.webrtc.org/issues/42223529 +https://crbug.com/webrtc/13333,https://issues.webrtc.org/issues/42223530 +https://crbug.com/webrtc/13334,https://issues.webrtc.org/issues/42223531 +https://crbug.com/webrtc/13335,https://issues.webrtc.org/issues/42223532 +https://crbug.com/webrtc/13336,https://issues.webrtc.org/issues/42223533 +https://crbug.com/webrtc/13337,https://issues.webrtc.org/issues/42223534 +https://crbug.com/webrtc/13338,https://issues.webrtc.org/issues/42223535 +https://crbug.com/webrtc/13339,https://issues.webrtc.org/issues/42223536 +https://crbug.com/webrtc/1334,https://issues.webrtc.org/issues/42223537 +https://crbug.com/webrtc/13340,https://issues.webrtc.org/issues/42223538 +https://crbug.com/webrtc/13341,https://issues.webrtc.org/issues/42223539 +https://crbug.com/webrtc/13342,https://issues.webrtc.org/issues/42223540 +https://crbug.com/webrtc/13343,https://issues.webrtc.org/issues/42223541 +https://crbug.com/webrtc/13344,https://issues.webrtc.org/issues/42223542 +https://crbug.com/webrtc/13345,https://issues.webrtc.org/issues/42223543 +https://crbug.com/webrtc/13346,https://issues.webrtc.org/issues/42223544 +https://crbug.com/webrtc/13347,https://issues.webrtc.org/issues/42223545 +https://crbug.com/webrtc/13348,https://issues.webrtc.org/issues/42223546 +https://crbug.com/webrtc/13349,https://issues.webrtc.org/issues/42223547 +https://crbug.com/webrtc/1335,https://issues.webrtc.org/issues/42223548 +https://crbug.com/webrtc/13350,https://issues.webrtc.org/issues/42223549 +https://crbug.com/webrtc/13351,https://issues.webrtc.org/issues/42223550 +https://crbug.com/webrtc/13352,https://issues.webrtc.org/issues/42223551 +https://crbug.com/webrtc/13353,https://issues.webrtc.org/issues/42223552 +https://crbug.com/webrtc/13354,https://issues.webrtc.org/issues/42223553 +https://crbug.com/webrtc/13355,https://issues.webrtc.org/issues/42223554 +https://crbug.com/webrtc/13356,https://issues.webrtc.org/issues/42223555 +https://crbug.com/webrtc/13357,https://issues.webrtc.org/issues/42223556 +https://crbug.com/webrtc/13358,https://issues.webrtc.org/issues/42223557 +https://crbug.com/webrtc/13359,https://issues.webrtc.org/issues/42223558 +https://crbug.com/webrtc/1336,https://issues.webrtc.org/issues/42223559 +https://crbug.com/webrtc/13360,https://issues.webrtc.org/issues/42223560 +https://crbug.com/webrtc/13361,https://issues.webrtc.org/issues/42223561 +https://crbug.com/webrtc/13362,https://issues.webrtc.org/issues/42223562 +https://crbug.com/webrtc/13363,https://issues.webrtc.org/issues/42223563 +https://crbug.com/webrtc/13364,https://issues.webrtc.org/issues/42223564 +https://crbug.com/webrtc/13365,https://issues.webrtc.org/issues/42223565 +https://crbug.com/webrtc/13366,https://issues.webrtc.org/issues/42223566 +https://crbug.com/webrtc/13367,https://issues.webrtc.org/issues/42223567 +https://crbug.com/webrtc/13368,https://issues.webrtc.org/issues/42223568 +https://crbug.com/webrtc/13369,https://issues.webrtc.org/issues/42223569 +https://crbug.com/webrtc/1337,https://issues.webrtc.org/issues/42223570 +https://crbug.com/webrtc/13370,https://issues.webrtc.org/issues/42223571 +https://crbug.com/webrtc/13371,https://issues.webrtc.org/issues/42223572 +https://crbug.com/webrtc/13372,https://issues.webrtc.org/issues/42223573 +https://crbug.com/webrtc/13373,https://issues.webrtc.org/issues/42223574 +https://crbug.com/webrtc/13374,https://issues.webrtc.org/issues/42223575 +https://crbug.com/webrtc/13375,https://issues.webrtc.org/issues/42223576 +https://crbug.com/webrtc/13376,https://issues.webrtc.org/issues/42223577 +https://crbug.com/webrtc/13377,https://issues.webrtc.org/issues/42223578 +https://crbug.com/webrtc/13378,https://issues.webrtc.org/issues/42223579 +https://crbug.com/webrtc/13379,https://issues.webrtc.org/issues/42223580 +https://crbug.com/webrtc/1338,https://issues.webrtc.org/issues/42223581 +https://crbug.com/webrtc/13380,https://issues.webrtc.org/issues/42223582 +https://crbug.com/webrtc/13381,https://issues.webrtc.org/issues/42223583 +https://crbug.com/webrtc/13382,https://issues.webrtc.org/issues/42223584 +https://crbug.com/webrtc/13383,https://issues.webrtc.org/issues/42223585 +https://crbug.com/webrtc/13384,https://issues.webrtc.org/issues/42223586 +https://crbug.com/webrtc/13385,https://issues.webrtc.org/issues/42223587 +https://crbug.com/webrtc/13386,https://issues.webrtc.org/issues/42223588 +https://crbug.com/webrtc/13388,https://issues.webrtc.org/issues/42223589 +https://crbug.com/webrtc/13389,https://issues.webrtc.org/issues/42223590 +https://crbug.com/webrtc/1339,https://issues.webrtc.org/issues/42223591 +https://crbug.com/webrtc/13390,https://issues.webrtc.org/issues/42223592 +https://crbug.com/webrtc/13391,https://issues.webrtc.org/issues/42223593 +https://crbug.com/webrtc/13392,https://issues.webrtc.org/issues/42223594 +https://crbug.com/webrtc/13393,https://issues.webrtc.org/issues/42223595 +https://crbug.com/webrtc/13394,https://issues.webrtc.org/issues/42223596 +https://crbug.com/webrtc/13395,https://issues.webrtc.org/issues/42223597 +https://crbug.com/webrtc/13396,https://issues.webrtc.org/issues/42223598 +https://crbug.com/webrtc/13397,https://issues.webrtc.org/issues/42223599 +https://crbug.com/webrtc/13398,https://issues.webrtc.org/issues/42223600 +https://crbug.com/webrtc/13399,https://issues.webrtc.org/issues/42223601 +https://crbug.com/webrtc/134,https://issues.webrtc.org/issues/42223602 +https://crbug.com/webrtc/1340,https://issues.webrtc.org/issues/42223603 +https://crbug.com/webrtc/13400,https://issues.webrtc.org/issues/42223604 +https://crbug.com/webrtc/13401,https://issues.webrtc.org/issues/42223605 +https://crbug.com/webrtc/13402,https://issues.webrtc.org/issues/42223606 +https://crbug.com/webrtc/13403,https://issues.webrtc.org/issues/42223607 +https://crbug.com/webrtc/13404,https://issues.webrtc.org/issues/42223608 +https://crbug.com/webrtc/13405,https://issues.webrtc.org/issues/42223609 +https://crbug.com/webrtc/13406,https://issues.webrtc.org/issues/42223610 +https://crbug.com/webrtc/13407,https://issues.webrtc.org/issues/42223611 +https://crbug.com/webrtc/13408,https://issues.webrtc.org/issues/42223612 +https://crbug.com/webrtc/13409,https://issues.webrtc.org/issues/42223613 +https://crbug.com/webrtc/1341,https://issues.webrtc.org/issues/42223614 +https://crbug.com/webrtc/13410,https://issues.webrtc.org/issues/42223615 +https://crbug.com/webrtc/13411,https://issues.webrtc.org/issues/42223616 +https://crbug.com/webrtc/13412,https://issues.webrtc.org/issues/42223617 +https://crbug.com/webrtc/13413,https://issues.webrtc.org/issues/42223618 +https://crbug.com/webrtc/13414,https://issues.webrtc.org/issues/42223619 +https://crbug.com/webrtc/13415,https://issues.webrtc.org/issues/42223620 +https://crbug.com/webrtc/13416,https://issues.webrtc.org/issues/42223621 +https://crbug.com/webrtc/13417,https://issues.webrtc.org/issues/42223622 +https://crbug.com/webrtc/13418,https://issues.webrtc.org/issues/42223623 +https://crbug.com/webrtc/13419,https://issues.webrtc.org/issues/42223624 +https://crbug.com/webrtc/1342,https://issues.webrtc.org/issues/42223625 +https://crbug.com/webrtc/13420,https://issues.webrtc.org/issues/42223626 +https://crbug.com/webrtc/13421,https://issues.webrtc.org/issues/42223627 +https://crbug.com/webrtc/13422,https://issues.webrtc.org/issues/42223628 +https://crbug.com/webrtc/13423,https://issues.webrtc.org/issues/42223629 +https://crbug.com/webrtc/13424,https://issues.webrtc.org/issues/42223630 +https://crbug.com/webrtc/13425,https://issues.webrtc.org/issues/42223631 +https://crbug.com/webrtc/13427,https://issues.webrtc.org/issues/42223632 +https://crbug.com/webrtc/13428,https://issues.webrtc.org/issues/42223633 +https://crbug.com/webrtc/13429,https://issues.webrtc.org/issues/42223634 +https://crbug.com/webrtc/1343,https://issues.webrtc.org/issues/42223635 +https://crbug.com/webrtc/13430,https://issues.webrtc.org/issues/42223636 +https://crbug.com/webrtc/13431,https://issues.webrtc.org/issues/42223637 +https://crbug.com/webrtc/13432,https://issues.webrtc.org/issues/42223638 +https://crbug.com/webrtc/13433,https://issues.webrtc.org/issues/42223639 +https://crbug.com/webrtc/13434,https://issues.webrtc.org/issues/42223640 +https://crbug.com/webrtc/13435,https://issues.webrtc.org/issues/42223641 +https://crbug.com/webrtc/13436,https://issues.webrtc.org/issues/42223642 +https://crbug.com/webrtc/13437,https://issues.webrtc.org/issues/42223643 +https://crbug.com/webrtc/13438,https://issues.webrtc.org/issues/42223644 +https://crbug.com/webrtc/13439,https://issues.webrtc.org/issues/42223645 +https://crbug.com/webrtc/1344,https://issues.webrtc.org/issues/42223646 +https://crbug.com/webrtc/13440,https://issues.webrtc.org/issues/42223647 +https://crbug.com/webrtc/13441,https://issues.webrtc.org/issues/42223648 +https://crbug.com/webrtc/13442,https://issues.webrtc.org/issues/42223649 +https://crbug.com/webrtc/13443,https://issues.webrtc.org/issues/42223650 +https://crbug.com/webrtc/13444,https://issues.webrtc.org/issues/42223651 +https://crbug.com/webrtc/13445,https://issues.webrtc.org/issues/42223652 +https://crbug.com/webrtc/13446,https://issues.webrtc.org/issues/42223653 +https://crbug.com/webrtc/13447,https://issues.webrtc.org/issues/42223654 +https://crbug.com/webrtc/13448,https://issues.webrtc.org/issues/42223655 +https://crbug.com/webrtc/13449,https://issues.webrtc.org/issues/42223656 +https://crbug.com/webrtc/1345,https://issues.webrtc.org/issues/42223657 +https://crbug.com/webrtc/13450,https://issues.webrtc.org/issues/42223658 +https://crbug.com/webrtc/13451,https://issues.webrtc.org/issues/42223659 +https://crbug.com/webrtc/13452,https://issues.webrtc.org/issues/42223660 +https://crbug.com/webrtc/13453,https://issues.webrtc.org/issues/42223661 +https://crbug.com/webrtc/13454,https://issues.webrtc.org/issues/42223662 +https://crbug.com/webrtc/13455,https://issues.webrtc.org/issues/42223663 +https://crbug.com/webrtc/13456,https://issues.webrtc.org/issues/42223664 +https://crbug.com/webrtc/13457,https://issues.webrtc.org/issues/42223665 +https://crbug.com/webrtc/13459,https://issues.webrtc.org/issues/42223666 +https://crbug.com/webrtc/1346,https://issues.webrtc.org/issues/42223667 +https://crbug.com/webrtc/13460,https://issues.webrtc.org/issues/42223668 +https://crbug.com/webrtc/13461,https://issues.webrtc.org/issues/42223669 +https://crbug.com/webrtc/13462,https://issues.webrtc.org/issues/42223670 +https://crbug.com/webrtc/13463,https://issues.webrtc.org/issues/42223671 +https://crbug.com/webrtc/13464,https://issues.webrtc.org/issues/42223672 +https://crbug.com/webrtc/13465,https://issues.webrtc.org/issues/42223673 +https://crbug.com/webrtc/13466,https://issues.webrtc.org/issues/42223674 +https://crbug.com/webrtc/13467,https://issues.webrtc.org/issues/42223675 +https://crbug.com/webrtc/13469,https://issues.webrtc.org/issues/42223676 +https://crbug.com/webrtc/1347,https://issues.webrtc.org/issues/42223677 +https://crbug.com/webrtc/13470,https://issues.webrtc.org/issues/42223678 +https://crbug.com/webrtc/13471,https://issues.webrtc.org/issues/42223679 +https://crbug.com/webrtc/13472,https://issues.webrtc.org/issues/42223680 +https://crbug.com/webrtc/13474,https://issues.webrtc.org/issues/42223681 +https://crbug.com/webrtc/13475,https://issues.webrtc.org/issues/42223682 +https://crbug.com/webrtc/13476,https://issues.webrtc.org/issues/42223683 +https://crbug.com/webrtc/13477,https://issues.webrtc.org/issues/42223684 +https://crbug.com/webrtc/13478,https://issues.webrtc.org/issues/42223685 +https://crbug.com/webrtc/13479,https://issues.webrtc.org/issues/42223686 +https://crbug.com/webrtc/1348,https://issues.webrtc.org/issues/42223687 +https://crbug.com/webrtc/13480,https://issues.webrtc.org/issues/42223688 +https://crbug.com/webrtc/13481,https://issues.webrtc.org/issues/42223689 +https://crbug.com/webrtc/13482,https://issues.webrtc.org/issues/42223690 +https://crbug.com/webrtc/13483,https://issues.webrtc.org/issues/42223691 +https://crbug.com/webrtc/13484,https://issues.webrtc.org/issues/42223692 +https://crbug.com/webrtc/13486,https://issues.webrtc.org/issues/42223693 +https://crbug.com/webrtc/13487,https://issues.webrtc.org/issues/42223694 +https://crbug.com/webrtc/13488,https://issues.webrtc.org/issues/42223695 +https://crbug.com/webrtc/13489,https://issues.webrtc.org/issues/42223696 +https://crbug.com/webrtc/1349,https://issues.webrtc.org/issues/42223697 +https://crbug.com/webrtc/13490,https://issues.webrtc.org/issues/42223698 +https://crbug.com/webrtc/13491,https://issues.webrtc.org/issues/42223699 +https://crbug.com/webrtc/13492,https://issues.webrtc.org/issues/42223700 +https://crbug.com/webrtc/13493,https://issues.webrtc.org/issues/42223701 +https://crbug.com/webrtc/13494,https://issues.webrtc.org/issues/42223702 +https://crbug.com/webrtc/13495,https://issues.webrtc.org/issues/42223703 +https://crbug.com/webrtc/13496,https://issues.webrtc.org/issues/42223704 +https://crbug.com/webrtc/13497,https://issues.webrtc.org/issues/42223705 +https://crbug.com/webrtc/13498,https://issues.webrtc.org/issues/42223706 +https://crbug.com/webrtc/13499,https://issues.webrtc.org/issues/42223707 +https://crbug.com/webrtc/135,https://issues.webrtc.org/issues/42223708 +https://crbug.com/webrtc/1350,https://issues.webrtc.org/issues/42223709 +https://crbug.com/webrtc/13500,https://issues.webrtc.org/issues/42223710 +https://crbug.com/webrtc/13501,https://issues.webrtc.org/issues/42223711 +https://crbug.com/webrtc/13502,https://issues.webrtc.org/issues/42223712 +https://crbug.com/webrtc/13503,https://issues.webrtc.org/issues/42223713 +https://crbug.com/webrtc/13504,https://issues.webrtc.org/issues/42223714 +https://crbug.com/webrtc/13505,https://issues.webrtc.org/issues/42223715 +https://crbug.com/webrtc/13506,https://issues.webrtc.org/issues/42223716 +https://crbug.com/webrtc/13507,https://issues.webrtc.org/issues/42223717 +https://crbug.com/webrtc/13509,https://issues.webrtc.org/issues/42223718 +https://crbug.com/webrtc/1351,https://issues.webrtc.org/issues/42223719 +https://crbug.com/webrtc/13510,https://issues.webrtc.org/issues/42223720 +https://crbug.com/webrtc/13511,https://issues.webrtc.org/issues/42223721 +https://crbug.com/webrtc/13512,https://issues.webrtc.org/issues/42223722 +https://crbug.com/webrtc/13513,https://issues.webrtc.org/issues/42223723 +https://crbug.com/webrtc/13514,https://issues.webrtc.org/issues/42223724 +https://crbug.com/webrtc/13515,https://issues.webrtc.org/issues/42223725 +https://crbug.com/webrtc/13516,https://issues.webrtc.org/issues/42223726 +https://crbug.com/webrtc/13517,https://issues.webrtc.org/issues/42223727 +https://crbug.com/webrtc/13518,https://issues.webrtc.org/issues/42223728 +https://crbug.com/webrtc/13519,https://issues.webrtc.org/issues/42223729 +https://crbug.com/webrtc/1352,https://issues.webrtc.org/issues/42223730 +https://crbug.com/webrtc/13520,https://issues.webrtc.org/issues/42223731 +https://crbug.com/webrtc/13521,https://issues.webrtc.org/issues/42223732 +https://crbug.com/webrtc/13522,https://issues.webrtc.org/issues/42223733 +https://crbug.com/webrtc/13523,https://issues.webrtc.org/issues/42223734 +https://crbug.com/webrtc/13524,https://issues.webrtc.org/issues/42223735 +https://crbug.com/webrtc/13525,https://issues.webrtc.org/issues/42223736 +https://crbug.com/webrtc/13526,https://issues.webrtc.org/issues/42223737 +https://crbug.com/webrtc/13527,https://issues.webrtc.org/issues/42223738 +https://crbug.com/webrtc/13529,https://issues.webrtc.org/issues/42223739 +https://crbug.com/webrtc/1353,https://issues.webrtc.org/issues/42223740 +https://crbug.com/webrtc/13530,https://issues.webrtc.org/issues/42223741 +https://crbug.com/webrtc/13531,https://issues.webrtc.org/issues/42223742 +https://crbug.com/webrtc/13532,https://issues.webrtc.org/issues/42223743 +https://crbug.com/webrtc/13534,https://issues.webrtc.org/issues/42223744 +https://crbug.com/webrtc/13535,https://issues.webrtc.org/issues/42223745 +https://crbug.com/webrtc/13536,https://issues.webrtc.org/issues/42223746 +https://crbug.com/webrtc/13537,https://issues.webrtc.org/issues/42223747 +https://crbug.com/webrtc/13538,https://issues.webrtc.org/issues/42223748 +https://crbug.com/webrtc/13539,https://issues.webrtc.org/issues/42223749 +https://crbug.com/webrtc/1354,https://issues.webrtc.org/issues/42223750 +https://crbug.com/webrtc/13540,https://issues.webrtc.org/issues/42223751 +https://crbug.com/webrtc/13541,https://issues.webrtc.org/issues/42223752 +https://crbug.com/webrtc/13542,https://issues.webrtc.org/issues/42223753 +https://crbug.com/webrtc/13543,https://issues.webrtc.org/issues/42223754 +https://crbug.com/webrtc/13544,https://issues.webrtc.org/issues/42223755 +https://crbug.com/webrtc/13545,https://issues.webrtc.org/issues/42223756 +https://crbug.com/webrtc/13546,https://issues.webrtc.org/issues/42223757 +https://crbug.com/webrtc/13547,https://issues.webrtc.org/issues/42223758 +https://crbug.com/webrtc/13548,https://issues.webrtc.org/issues/42223759 +https://crbug.com/webrtc/13549,https://issues.webrtc.org/issues/42223760 +https://crbug.com/webrtc/1355,https://issues.webrtc.org/issues/42223761 +https://crbug.com/webrtc/13550,https://issues.webrtc.org/issues/42223762 +https://crbug.com/webrtc/13551,https://issues.webrtc.org/issues/42223763 +https://crbug.com/webrtc/13554,https://issues.webrtc.org/issues/42223764 +https://crbug.com/webrtc/13555,https://issues.webrtc.org/issues/42223765 +https://crbug.com/webrtc/13556,https://issues.webrtc.org/issues/42223766 +https://crbug.com/webrtc/13557,https://issues.webrtc.org/issues/42223767 +https://crbug.com/webrtc/13558,https://issues.webrtc.org/issues/42223768 +https://crbug.com/webrtc/13559,https://issues.webrtc.org/issues/42223769 +https://crbug.com/webrtc/1356,https://issues.webrtc.org/issues/42223770 +https://crbug.com/webrtc/13561,https://issues.webrtc.org/issues/42223771 +https://crbug.com/webrtc/13562,https://issues.webrtc.org/issues/42223772 +https://crbug.com/webrtc/13563,https://issues.webrtc.org/issues/42223773 +https://crbug.com/webrtc/13564,https://issues.webrtc.org/issues/42223774 +https://crbug.com/webrtc/13565,https://issues.webrtc.org/issues/42223775 +https://crbug.com/webrtc/13566,https://issues.webrtc.org/issues/42223776 +https://crbug.com/webrtc/13567,https://issues.webrtc.org/issues/42223777 +https://crbug.com/webrtc/13568,https://issues.webrtc.org/issues/42223778 +https://crbug.com/webrtc/13569,https://issues.webrtc.org/issues/42223779 +https://crbug.com/webrtc/1357,https://issues.webrtc.org/issues/42223780 +https://crbug.com/webrtc/13570,https://issues.webrtc.org/issues/42223781 +https://crbug.com/webrtc/13571,https://issues.webrtc.org/issues/42223782 +https://crbug.com/webrtc/13572,https://issues.webrtc.org/issues/42223783 +https://crbug.com/webrtc/13573,https://issues.webrtc.org/issues/42223784 +https://crbug.com/webrtc/13574,https://issues.webrtc.org/issues/42223785 +https://crbug.com/webrtc/13575,https://issues.webrtc.org/issues/42223786 +https://crbug.com/webrtc/13576,https://issues.webrtc.org/issues/42223787 +https://crbug.com/webrtc/13577,https://issues.webrtc.org/issues/42223788 +https://crbug.com/webrtc/13578,https://issues.webrtc.org/issues/42223789 +https://crbug.com/webrtc/13579,https://issues.webrtc.org/issues/42223790 +https://crbug.com/webrtc/1358,https://issues.webrtc.org/issues/42223791 +https://crbug.com/webrtc/13580,https://issues.webrtc.org/issues/42223792 +https://crbug.com/webrtc/13581,https://issues.webrtc.org/issues/42223793 +https://crbug.com/webrtc/13582,https://issues.webrtc.org/issues/42223794 +https://crbug.com/webrtc/13584,https://issues.webrtc.org/issues/42223795 +https://crbug.com/webrtc/13585,https://issues.webrtc.org/issues/42223796 +https://crbug.com/webrtc/13586,https://issues.webrtc.org/issues/42223797 +https://crbug.com/webrtc/13587,https://issues.webrtc.org/issues/42223798 +https://crbug.com/webrtc/13588,https://issues.webrtc.org/issues/42223799 +https://crbug.com/webrtc/13589,https://issues.webrtc.org/issues/42223800 +https://crbug.com/webrtc/1359,https://issues.webrtc.org/issues/42223801 +https://crbug.com/webrtc/13590,https://issues.webrtc.org/issues/42223802 +https://crbug.com/webrtc/13591,https://issues.webrtc.org/issues/42223803 +https://crbug.com/webrtc/13592,https://issues.webrtc.org/issues/42223804 +https://crbug.com/webrtc/13593,https://issues.webrtc.org/issues/42223805 +https://crbug.com/webrtc/13594,https://issues.webrtc.org/issues/42223806 +https://crbug.com/webrtc/13595,https://issues.webrtc.org/issues/42223807 +https://crbug.com/webrtc/13597,https://issues.webrtc.org/issues/42223808 +https://crbug.com/webrtc/13598,https://issues.webrtc.org/issues/42223809 +https://crbug.com/webrtc/13599,https://issues.webrtc.org/issues/42223810 +https://crbug.com/webrtc/136,https://issues.webrtc.org/issues/42223811 +https://crbug.com/webrtc/1360,https://issues.webrtc.org/issues/42223812 +https://crbug.com/webrtc/13600,https://issues.webrtc.org/issues/42223813 +https://crbug.com/webrtc/13601,https://issues.webrtc.org/issues/42223814 +https://crbug.com/webrtc/13603,https://issues.webrtc.org/issues/42223815 +https://crbug.com/webrtc/13605,https://issues.webrtc.org/issues/42223816 +https://crbug.com/webrtc/13606,https://issues.webrtc.org/issues/42223817 +https://crbug.com/webrtc/13607,https://issues.webrtc.org/issues/42223818 +https://crbug.com/webrtc/13608,https://issues.webrtc.org/issues/42223819 +https://crbug.com/webrtc/13609,https://issues.webrtc.org/issues/42223820 +https://crbug.com/webrtc/1361,https://issues.webrtc.org/issues/42223821 +https://crbug.com/webrtc/13610,https://issues.webrtc.org/issues/42223822 +https://crbug.com/webrtc/13611,https://issues.webrtc.org/issues/42223823 +https://crbug.com/webrtc/13612,https://issues.webrtc.org/issues/42223824 +https://crbug.com/webrtc/13613,https://issues.webrtc.org/issues/42223825 +https://crbug.com/webrtc/13614,https://issues.webrtc.org/issues/42223826 +https://crbug.com/webrtc/13615,https://issues.webrtc.org/issues/42223827 +https://crbug.com/webrtc/13616,https://issues.webrtc.org/issues/42223828 +https://crbug.com/webrtc/13617,https://issues.webrtc.org/issues/42223829 +https://crbug.com/webrtc/13618,https://issues.webrtc.org/issues/42223830 +https://crbug.com/webrtc/13619,https://issues.webrtc.org/issues/42223831 +https://crbug.com/webrtc/1362,https://issues.webrtc.org/issues/42223832 +https://crbug.com/webrtc/13620,https://issues.webrtc.org/issues/42223833 +https://crbug.com/webrtc/13621,https://issues.webrtc.org/issues/42223834 +https://crbug.com/webrtc/13622,https://issues.webrtc.org/issues/42223835 +https://crbug.com/webrtc/13623,https://issues.webrtc.org/issues/42223836 +https://crbug.com/webrtc/13624,https://issues.webrtc.org/issues/42223837 +https://crbug.com/webrtc/13625,https://issues.webrtc.org/issues/42223838 +https://crbug.com/webrtc/13626,https://issues.webrtc.org/issues/42223839 +https://crbug.com/webrtc/13627,https://issues.webrtc.org/issues/42223840 +https://crbug.com/webrtc/13628,https://issues.webrtc.org/issues/42223841 +https://crbug.com/webrtc/13629,https://issues.webrtc.org/issues/42223842 +https://crbug.com/webrtc/1363,https://issues.webrtc.org/issues/42223843 +https://crbug.com/webrtc/13630,https://issues.webrtc.org/issues/42223844 +https://crbug.com/webrtc/13631,https://issues.webrtc.org/issues/42223845 +https://crbug.com/webrtc/13632,https://issues.webrtc.org/issues/42223846 +https://crbug.com/webrtc/13633,https://issues.webrtc.org/issues/42223847 +https://crbug.com/webrtc/13634,https://issues.webrtc.org/issues/42223848 +https://crbug.com/webrtc/13635,https://issues.webrtc.org/issues/42223849 +https://crbug.com/webrtc/13636,https://issues.webrtc.org/issues/42223850 +https://crbug.com/webrtc/13637,https://issues.webrtc.org/issues/42223851 +https://crbug.com/webrtc/13638,https://issues.webrtc.org/issues/42223852 +https://crbug.com/webrtc/13639,https://issues.webrtc.org/issues/42223853 +https://crbug.com/webrtc/1364,https://issues.webrtc.org/issues/42223854 +https://crbug.com/webrtc/13640,https://issues.webrtc.org/issues/42223855 +https://crbug.com/webrtc/13641,https://issues.webrtc.org/issues/42223856 +https://crbug.com/webrtc/13642,https://issues.webrtc.org/issues/42223857 +https://crbug.com/webrtc/13643,https://issues.webrtc.org/issues/42223858 +https://crbug.com/webrtc/13644,https://issues.webrtc.org/issues/42223859 +https://crbug.com/webrtc/13645,https://issues.webrtc.org/issues/42223860 +https://crbug.com/webrtc/13646,https://issues.webrtc.org/issues/42223861 +https://crbug.com/webrtc/13647,https://issues.webrtc.org/issues/42223862 +https://crbug.com/webrtc/13648,https://issues.webrtc.org/issues/42223863 +https://crbug.com/webrtc/13649,https://issues.webrtc.org/issues/42223864 +https://crbug.com/webrtc/1365,https://issues.webrtc.org/issues/42223865 +https://crbug.com/webrtc/13650,https://issues.webrtc.org/issues/42223866 +https://crbug.com/webrtc/13651,https://issues.webrtc.org/issues/42223867 +https://crbug.com/webrtc/13653,https://issues.webrtc.org/issues/42223868 +https://crbug.com/webrtc/13654,https://issues.webrtc.org/issues/42223869 +https://crbug.com/webrtc/13655,https://issues.webrtc.org/issues/42223870 +https://crbug.com/webrtc/13656,https://issues.webrtc.org/issues/42223871 +https://crbug.com/webrtc/13657,https://issues.webrtc.org/issues/42223872 +https://crbug.com/webrtc/13658,https://issues.webrtc.org/issues/42223873 +https://crbug.com/webrtc/13659,https://issues.webrtc.org/issues/42223874 +https://crbug.com/webrtc/1366,https://issues.webrtc.org/issues/42223875 +https://crbug.com/webrtc/13660,https://issues.webrtc.org/issues/42223876 +https://crbug.com/webrtc/13661,https://issues.webrtc.org/issues/42223877 +https://crbug.com/webrtc/13662,https://issues.webrtc.org/issues/42223878 +https://crbug.com/webrtc/13663,https://issues.webrtc.org/issues/42223879 +https://crbug.com/webrtc/13664,https://issues.webrtc.org/issues/42223880 +https://crbug.com/webrtc/13665,https://issues.webrtc.org/issues/42223881 +https://crbug.com/webrtc/13666,https://issues.webrtc.org/issues/42223882 +https://crbug.com/webrtc/13668,https://issues.webrtc.org/issues/42223883 +https://crbug.com/webrtc/13669,https://issues.webrtc.org/issues/42223884 +https://crbug.com/webrtc/1367,https://issues.webrtc.org/issues/42223885 +https://crbug.com/webrtc/13670,https://issues.webrtc.org/issues/42223886 +https://crbug.com/webrtc/13672,https://issues.webrtc.org/issues/42223887 +https://crbug.com/webrtc/13673,https://issues.webrtc.org/issues/42223888 +https://crbug.com/webrtc/13674,https://issues.webrtc.org/issues/42223889 +https://crbug.com/webrtc/13675,https://issues.webrtc.org/issues/42223890 +https://crbug.com/webrtc/13676,https://issues.webrtc.org/issues/42223891 +https://crbug.com/webrtc/13677,https://issues.webrtc.org/issues/42223892 +https://crbug.com/webrtc/13678,https://issues.webrtc.org/issues/42223893 +https://crbug.com/webrtc/13679,https://issues.webrtc.org/issues/42223894 +https://crbug.com/webrtc/1368,https://issues.webrtc.org/issues/42223895 +https://crbug.com/webrtc/13680,https://issues.webrtc.org/issues/42223896 +https://crbug.com/webrtc/13681,https://issues.webrtc.org/issues/42223897 +https://crbug.com/webrtc/13682,https://issues.webrtc.org/issues/42223898 +https://crbug.com/webrtc/13683,https://issues.webrtc.org/issues/42223899 +https://crbug.com/webrtc/13684,https://issues.webrtc.org/issues/42223900 +https://crbug.com/webrtc/13685,https://issues.webrtc.org/issues/42223901 +https://crbug.com/webrtc/13686,https://issues.webrtc.org/issues/42223902 +https://crbug.com/webrtc/13687,https://issues.webrtc.org/issues/42223903 +https://crbug.com/webrtc/13688,https://issues.webrtc.org/issues/42223904 +https://crbug.com/webrtc/13689,https://issues.webrtc.org/issues/42223905 +https://crbug.com/webrtc/1369,https://issues.webrtc.org/issues/42223906 +https://crbug.com/webrtc/13690,https://issues.webrtc.org/issues/42223907 +https://crbug.com/webrtc/13691,https://issues.webrtc.org/issues/42223908 +https://crbug.com/webrtc/13692,https://issues.webrtc.org/issues/42223909 +https://crbug.com/webrtc/13693,https://issues.webrtc.org/issues/42223910 +https://crbug.com/webrtc/13694,https://issues.webrtc.org/issues/42223911 +https://crbug.com/webrtc/13695,https://issues.webrtc.org/issues/42223912 +https://crbug.com/webrtc/13696,https://issues.webrtc.org/issues/42223913 +https://crbug.com/webrtc/13697,https://issues.webrtc.org/issues/42223914 +https://crbug.com/webrtc/13698,https://issues.webrtc.org/issues/42223915 +https://crbug.com/webrtc/13699,https://issues.webrtc.org/issues/42223916 +https://crbug.com/webrtc/137,https://issues.webrtc.org/issues/42223917 +https://crbug.com/webrtc/1370,https://issues.webrtc.org/issues/42223918 +https://crbug.com/webrtc/13700,https://issues.webrtc.org/issues/42223919 +https://crbug.com/webrtc/13701,https://issues.webrtc.org/issues/42223920 +https://crbug.com/webrtc/13702,https://issues.webrtc.org/issues/42223921 +https://crbug.com/webrtc/13703,https://issues.webrtc.org/issues/42223922 +https://crbug.com/webrtc/13704,https://issues.webrtc.org/issues/42223923 +https://crbug.com/webrtc/13705,https://issues.webrtc.org/issues/42223924 +https://crbug.com/webrtc/13706,https://issues.webrtc.org/issues/42223925 +https://crbug.com/webrtc/13707,https://issues.webrtc.org/issues/42223926 +https://crbug.com/webrtc/13708,https://issues.webrtc.org/issues/42223927 +https://crbug.com/webrtc/13709,https://issues.webrtc.org/issues/42223928 +https://crbug.com/webrtc/1371,https://issues.webrtc.org/issues/42223929 +https://crbug.com/webrtc/13710,https://issues.webrtc.org/issues/42223930 +https://crbug.com/webrtc/13711,https://issues.webrtc.org/issues/42223931 +https://crbug.com/webrtc/13712,https://issues.webrtc.org/issues/42223932 +https://crbug.com/webrtc/13713,https://issues.webrtc.org/issues/42223933 +https://crbug.com/webrtc/13714,https://issues.webrtc.org/issues/42223934 +https://crbug.com/webrtc/13715,https://issues.webrtc.org/issues/42223935 +https://crbug.com/webrtc/13716,https://issues.webrtc.org/issues/42223936 +https://crbug.com/webrtc/13717,https://issues.webrtc.org/issues/42223937 +https://crbug.com/webrtc/13718,https://issues.webrtc.org/issues/42223938 +https://crbug.com/webrtc/13719,https://issues.webrtc.org/issues/42223939 +https://crbug.com/webrtc/1372,https://issues.webrtc.org/issues/42223940 +https://crbug.com/webrtc/13720,https://issues.webrtc.org/issues/42223941 +https://crbug.com/webrtc/13721,https://issues.webrtc.org/issues/42223942 +https://crbug.com/webrtc/13722,https://issues.webrtc.org/issues/42223943 +https://crbug.com/webrtc/13723,https://issues.webrtc.org/issues/42223944 +https://crbug.com/webrtc/13724,https://issues.webrtc.org/issues/42223945 +https://crbug.com/webrtc/13725,https://issues.webrtc.org/issues/42223946 +https://crbug.com/webrtc/13726,https://issues.webrtc.org/issues/42223947 +https://crbug.com/webrtc/13727,https://issues.webrtc.org/issues/42223948 +https://crbug.com/webrtc/13728,https://issues.webrtc.org/issues/42223949 +https://crbug.com/webrtc/13729,https://issues.webrtc.org/issues/42223950 +https://crbug.com/webrtc/1373,https://issues.webrtc.org/issues/42223951 +https://crbug.com/webrtc/13730,https://issues.webrtc.org/issues/42223952 +https://crbug.com/webrtc/13731,https://issues.webrtc.org/issues/42223953 +https://crbug.com/webrtc/13732,https://issues.webrtc.org/issues/42223954 +https://crbug.com/webrtc/13733,https://issues.webrtc.org/issues/42223955 +https://crbug.com/webrtc/13734,https://issues.webrtc.org/issues/42223956 +https://crbug.com/webrtc/13735,https://issues.webrtc.org/issues/42223957 +https://crbug.com/webrtc/13736,https://issues.webrtc.org/issues/42223958 +https://crbug.com/webrtc/13737,https://issues.webrtc.org/issues/42223959 +https://crbug.com/webrtc/13738,https://issues.webrtc.org/issues/42223960 +https://crbug.com/webrtc/13739,https://issues.webrtc.org/issues/42223961 +https://crbug.com/webrtc/1374,https://issues.webrtc.org/issues/42223962 +https://crbug.com/webrtc/13740,https://issues.webrtc.org/issues/42223963 +https://crbug.com/webrtc/13741,https://issues.webrtc.org/issues/42223964 +https://crbug.com/webrtc/13742,https://issues.webrtc.org/issues/42223965 +https://crbug.com/webrtc/13743,https://issues.webrtc.org/issues/42223966 +https://crbug.com/webrtc/13744,https://issues.webrtc.org/issues/42223967 +https://crbug.com/webrtc/13745,https://issues.webrtc.org/issues/42223968 +https://crbug.com/webrtc/13746,https://issues.webrtc.org/issues/42223969 +https://crbug.com/webrtc/13747,https://issues.webrtc.org/issues/42223970 +https://crbug.com/webrtc/13748,https://issues.webrtc.org/issues/42223971 +https://crbug.com/webrtc/13749,https://issues.webrtc.org/issues/42223972 +https://crbug.com/webrtc/1375,https://issues.webrtc.org/issues/42223973 +https://crbug.com/webrtc/13750,https://issues.webrtc.org/issues/42223974 +https://crbug.com/webrtc/13752,https://issues.webrtc.org/issues/42223975 +https://crbug.com/webrtc/13753,https://issues.webrtc.org/issues/42223976 +https://crbug.com/webrtc/13754,https://issues.webrtc.org/issues/42223977 +https://crbug.com/webrtc/13755,https://issues.webrtc.org/issues/42223978 +https://crbug.com/webrtc/13756,https://issues.webrtc.org/issues/42223979 +https://crbug.com/webrtc/13757,https://issues.webrtc.org/issues/42223980 +https://crbug.com/webrtc/13758,https://issues.webrtc.org/issues/42223981 +https://crbug.com/webrtc/13759,https://issues.webrtc.org/issues/42223982 +https://crbug.com/webrtc/1376,https://issues.webrtc.org/issues/42223983 +https://crbug.com/webrtc/13760,https://issues.webrtc.org/issues/42223984 +https://crbug.com/webrtc/13761,https://issues.webrtc.org/issues/42223985 +https://crbug.com/webrtc/13762,https://issues.webrtc.org/issues/42223986 +https://crbug.com/webrtc/13763,https://issues.webrtc.org/issues/42223987 +https://crbug.com/webrtc/13764,https://issues.webrtc.org/issues/42223988 +https://crbug.com/webrtc/13765,https://issues.webrtc.org/issues/42223989 +https://crbug.com/webrtc/13766,https://issues.webrtc.org/issues/42223990 +https://crbug.com/webrtc/13767,https://issues.webrtc.org/issues/42223991 +https://crbug.com/webrtc/13768,https://issues.webrtc.org/issues/42223992 +https://crbug.com/webrtc/13769,https://issues.webrtc.org/issues/42223993 +https://crbug.com/webrtc/1377,https://issues.webrtc.org/issues/42223994 +https://crbug.com/webrtc/13770,https://issues.webrtc.org/issues/42223995 +https://crbug.com/webrtc/13771,https://issues.webrtc.org/issues/42223996 +https://crbug.com/webrtc/13772,https://issues.webrtc.org/issues/42223997 +https://crbug.com/webrtc/13773,https://issues.webrtc.org/issues/42223998 +https://crbug.com/webrtc/13774,https://issues.webrtc.org/issues/42223999 +https://crbug.com/webrtc/13775,https://issues.webrtc.org/issues/42224000 +https://crbug.com/webrtc/13776,https://issues.webrtc.org/issues/42224001 +https://crbug.com/webrtc/13777,https://issues.webrtc.org/issues/42224002 +https://crbug.com/webrtc/13778,https://issues.webrtc.org/issues/42224003 +https://crbug.com/webrtc/13779,https://issues.webrtc.org/issues/42224004 +https://crbug.com/webrtc/1378,https://issues.webrtc.org/issues/42224005 +https://crbug.com/webrtc/13780,https://issues.webrtc.org/issues/42224006 +https://crbug.com/webrtc/13781,https://issues.webrtc.org/issues/42224007 +https://crbug.com/webrtc/13782,https://issues.webrtc.org/issues/42224008 +https://crbug.com/webrtc/13783,https://issues.webrtc.org/issues/42224009 +https://crbug.com/webrtc/13784,https://issues.webrtc.org/issues/42224010 +https://crbug.com/webrtc/13785,https://issues.webrtc.org/issues/42224011 +https://crbug.com/webrtc/13786,https://issues.webrtc.org/issues/42224012 +https://crbug.com/webrtc/13787,https://issues.webrtc.org/issues/42224013 +https://crbug.com/webrtc/13788,https://issues.webrtc.org/issues/42224014 +https://crbug.com/webrtc/1379,https://issues.webrtc.org/issues/42224015 +https://crbug.com/webrtc/13790,https://issues.webrtc.org/issues/42224016 +https://crbug.com/webrtc/13791,https://issues.webrtc.org/issues/42224017 +https://crbug.com/webrtc/13792,https://issues.webrtc.org/issues/42224018 +https://crbug.com/webrtc/13793,https://issues.webrtc.org/issues/42224019 +https://crbug.com/webrtc/13794,https://issues.webrtc.org/issues/42224020 +https://crbug.com/webrtc/13795,https://issues.webrtc.org/issues/42224021 +https://crbug.com/webrtc/13796,https://issues.webrtc.org/issues/42224022 +https://crbug.com/webrtc/13797,https://issues.webrtc.org/issues/42224023 +https://crbug.com/webrtc/13798,https://issues.webrtc.org/issues/42224024 +https://crbug.com/webrtc/13799,https://issues.webrtc.org/issues/42224025 +https://crbug.com/webrtc/138,https://issues.webrtc.org/issues/42224026 +https://crbug.com/webrtc/1380,https://issues.webrtc.org/issues/42224027 +https://crbug.com/webrtc/13800,https://issues.webrtc.org/issues/42224028 +https://crbug.com/webrtc/13801,https://issues.webrtc.org/issues/42224029 +https://crbug.com/webrtc/13802,https://issues.webrtc.org/issues/42224030 +https://crbug.com/webrtc/13803,https://issues.webrtc.org/issues/42224031 +https://crbug.com/webrtc/13804,https://issues.webrtc.org/issues/42224032 +https://crbug.com/webrtc/13805,https://issues.webrtc.org/issues/42224033 +https://crbug.com/webrtc/13806,https://issues.webrtc.org/issues/42224034 +https://crbug.com/webrtc/13807,https://issues.webrtc.org/issues/42224035 +https://crbug.com/webrtc/13808,https://issues.webrtc.org/issues/42224036 +https://crbug.com/webrtc/13809,https://issues.webrtc.org/issues/42224037 +https://crbug.com/webrtc/1381,https://issues.webrtc.org/issues/42224038 +https://crbug.com/webrtc/13810,https://issues.webrtc.org/issues/42224039 +https://crbug.com/webrtc/13811,https://issues.webrtc.org/issues/42224040 +https://crbug.com/webrtc/13812,https://issues.webrtc.org/issues/42224041 +https://crbug.com/webrtc/13813,https://issues.webrtc.org/issues/42224042 +https://crbug.com/webrtc/13814,https://issues.webrtc.org/issues/42224043 +https://crbug.com/webrtc/13815,https://issues.webrtc.org/issues/42224044 +https://crbug.com/webrtc/13816,https://issues.webrtc.org/issues/42224045 +https://crbug.com/webrtc/13817,https://issues.webrtc.org/issues/42224046 +https://crbug.com/webrtc/13818,https://issues.webrtc.org/issues/42224047 +https://crbug.com/webrtc/13819,https://issues.webrtc.org/issues/42224048 +https://crbug.com/webrtc/1382,https://issues.webrtc.org/issues/42224049 +https://crbug.com/webrtc/13820,https://issues.webrtc.org/issues/42224050 +https://crbug.com/webrtc/13821,https://issues.webrtc.org/issues/42224051 +https://crbug.com/webrtc/13822,https://issues.webrtc.org/issues/42224052 +https://crbug.com/webrtc/13823,https://issues.webrtc.org/issues/42224053 +https://crbug.com/webrtc/13825,https://issues.webrtc.org/issues/42224054 +https://crbug.com/webrtc/13826,https://issues.webrtc.org/issues/42224055 +https://crbug.com/webrtc/13827,https://issues.webrtc.org/issues/42224056 +https://crbug.com/webrtc/13828,https://issues.webrtc.org/issues/42224057 +https://crbug.com/webrtc/13829,https://issues.webrtc.org/issues/42224058 +https://crbug.com/webrtc/1383,https://issues.webrtc.org/issues/42224059 +https://crbug.com/webrtc/13830,https://issues.webrtc.org/issues/42224060 +https://crbug.com/webrtc/13831,https://issues.webrtc.org/issues/42224061 +https://crbug.com/webrtc/13832,https://issues.webrtc.org/issues/42224062 +https://crbug.com/webrtc/13833,https://issues.webrtc.org/issues/42224063 +https://crbug.com/webrtc/13834,https://issues.webrtc.org/issues/42224064 +https://crbug.com/webrtc/13835,https://issues.webrtc.org/issues/42224065 +https://crbug.com/webrtc/13836,https://issues.webrtc.org/issues/42224066 +https://crbug.com/webrtc/13837,https://issues.webrtc.org/issues/42224067 +https://crbug.com/webrtc/13838,https://issues.webrtc.org/issues/42224068 +https://crbug.com/webrtc/13839,https://issues.webrtc.org/issues/42224069 +https://crbug.com/webrtc/1384,https://issues.webrtc.org/issues/42224070 +https://crbug.com/webrtc/13840,https://issues.webrtc.org/issues/42224071 +https://crbug.com/webrtc/13841,https://issues.webrtc.org/issues/42224072 +https://crbug.com/webrtc/13842,https://issues.webrtc.org/issues/42224073 +https://crbug.com/webrtc/13843,https://issues.webrtc.org/issues/42224074 +https://crbug.com/webrtc/13844,https://issues.webrtc.org/issues/42224075 +https://crbug.com/webrtc/13845,https://issues.webrtc.org/issues/42224076 +https://crbug.com/webrtc/13846,https://issues.webrtc.org/issues/42224077 +https://crbug.com/webrtc/13847,https://issues.webrtc.org/issues/42224078 +https://crbug.com/webrtc/13848,https://issues.webrtc.org/issues/42224079 +https://crbug.com/webrtc/13849,https://issues.webrtc.org/issues/42224080 +https://crbug.com/webrtc/1385,https://issues.webrtc.org/issues/42224081 +https://crbug.com/webrtc/13850,https://issues.webrtc.org/issues/42224082 +https://crbug.com/webrtc/13851,https://issues.webrtc.org/issues/42224083 +https://crbug.com/webrtc/13852,https://issues.webrtc.org/issues/42224084 +https://crbug.com/webrtc/13853,https://issues.webrtc.org/issues/42224085 +https://crbug.com/webrtc/13854,https://issues.webrtc.org/issues/42224086 +https://crbug.com/webrtc/13855,https://issues.webrtc.org/issues/42224087 +https://crbug.com/webrtc/13856,https://issues.webrtc.org/issues/42224088 +https://crbug.com/webrtc/13858,https://issues.webrtc.org/issues/42224089 +https://crbug.com/webrtc/13859,https://issues.webrtc.org/issues/42224090 +https://crbug.com/webrtc/1386,https://issues.webrtc.org/issues/42224091 +https://crbug.com/webrtc/13860,https://issues.webrtc.org/issues/42224092 +https://crbug.com/webrtc/13861,https://issues.webrtc.org/issues/42224093 +https://crbug.com/webrtc/13862,https://issues.webrtc.org/issues/42224094 +https://crbug.com/webrtc/13863,https://issues.webrtc.org/issues/42224095 +https://crbug.com/webrtc/13864,https://issues.webrtc.org/issues/42224096 +https://crbug.com/webrtc/13865,https://issues.webrtc.org/issues/42224097 +https://crbug.com/webrtc/13866,https://issues.webrtc.org/issues/42224098 +https://crbug.com/webrtc/13867,https://issues.webrtc.org/issues/42224099 +https://crbug.com/webrtc/13868,https://issues.webrtc.org/issues/42224100 +https://crbug.com/webrtc/13869,https://issues.webrtc.org/issues/42224101 +https://crbug.com/webrtc/1387,https://issues.webrtc.org/issues/42224102 +https://crbug.com/webrtc/13870,https://issues.webrtc.org/issues/42224103 +https://crbug.com/webrtc/13871,https://issues.webrtc.org/issues/42224104 +https://crbug.com/webrtc/13872,https://issues.webrtc.org/issues/42224105 +https://crbug.com/webrtc/13873,https://issues.webrtc.org/issues/42224106 +https://crbug.com/webrtc/13874,https://issues.webrtc.org/issues/42224107 +https://crbug.com/webrtc/13875,https://issues.webrtc.org/issues/42224108 +https://crbug.com/webrtc/13876,https://issues.webrtc.org/issues/42224109 +https://crbug.com/webrtc/13877,https://issues.webrtc.org/issues/42224110 +https://crbug.com/webrtc/13878,https://issues.webrtc.org/issues/42224111 +https://crbug.com/webrtc/13879,https://issues.webrtc.org/issues/42224112 +https://crbug.com/webrtc/1388,https://issues.webrtc.org/issues/42224113 +https://crbug.com/webrtc/13880,https://issues.webrtc.org/issues/42224114 +https://crbug.com/webrtc/13881,https://issues.webrtc.org/issues/42224115 +https://crbug.com/webrtc/13882,https://issues.webrtc.org/issues/42224116 +https://crbug.com/webrtc/13883,https://issues.webrtc.org/issues/42224117 +https://crbug.com/webrtc/13884,https://issues.webrtc.org/issues/42224118 +https://crbug.com/webrtc/13885,https://issues.webrtc.org/issues/42224119 +https://crbug.com/webrtc/13886,https://issues.webrtc.org/issues/42224120 +https://crbug.com/webrtc/13887,https://issues.webrtc.org/issues/42224121 +https://crbug.com/webrtc/13888,https://issues.webrtc.org/issues/42224122 +https://crbug.com/webrtc/13889,https://issues.webrtc.org/issues/42224123 +https://crbug.com/webrtc/1389,https://issues.webrtc.org/issues/42224124 +https://crbug.com/webrtc/13890,https://issues.webrtc.org/issues/42224125 +https://crbug.com/webrtc/13891,https://issues.webrtc.org/issues/42224126 +https://crbug.com/webrtc/13892,https://issues.webrtc.org/issues/42224127 +https://crbug.com/webrtc/13893,https://issues.webrtc.org/issues/42224128 +https://crbug.com/webrtc/13894,https://issues.webrtc.org/issues/42224129 +https://crbug.com/webrtc/13895,https://issues.webrtc.org/issues/42224130 +https://crbug.com/webrtc/13896,https://issues.webrtc.org/issues/42224131 +https://crbug.com/webrtc/13897,https://issues.webrtc.org/issues/42224132 +https://crbug.com/webrtc/13898,https://issues.webrtc.org/issues/42224133 +https://crbug.com/webrtc/13899,https://issues.webrtc.org/issues/42224134 +https://crbug.com/webrtc/139,https://issues.webrtc.org/issues/42224135 +https://crbug.com/webrtc/1390,https://issues.webrtc.org/issues/42224136 +https://crbug.com/webrtc/13900,https://issues.webrtc.org/issues/42224137 +https://crbug.com/webrtc/13901,https://issues.webrtc.org/issues/42224138 +https://crbug.com/webrtc/13902,https://issues.webrtc.org/issues/42224139 +https://crbug.com/webrtc/13903,https://issues.webrtc.org/issues/42224140 +https://crbug.com/webrtc/13904,https://issues.webrtc.org/issues/42224141 +https://crbug.com/webrtc/13905,https://issues.webrtc.org/issues/42224142 +https://crbug.com/webrtc/13906,https://issues.webrtc.org/issues/42224143 +https://crbug.com/webrtc/13907,https://issues.webrtc.org/issues/42224144 +https://crbug.com/webrtc/13908,https://issues.webrtc.org/issues/42224145 +https://crbug.com/webrtc/13909,https://issues.webrtc.org/issues/42224146 +https://crbug.com/webrtc/1391,https://issues.webrtc.org/issues/42224147 +https://crbug.com/webrtc/13910,https://issues.webrtc.org/issues/42224148 +https://crbug.com/webrtc/13911,https://issues.webrtc.org/issues/42224149 +https://crbug.com/webrtc/13912,https://issues.webrtc.org/issues/42224150 +https://crbug.com/webrtc/13913,https://issues.webrtc.org/issues/42224151 +https://crbug.com/webrtc/13914,https://issues.webrtc.org/issues/42224152 +https://crbug.com/webrtc/13915,https://issues.webrtc.org/issues/42224153 +https://crbug.com/webrtc/13916,https://issues.webrtc.org/issues/42224154 +https://crbug.com/webrtc/13917,https://issues.webrtc.org/issues/42224155 +https://crbug.com/webrtc/13918,https://issues.webrtc.org/issues/42224156 +https://crbug.com/webrtc/13919,https://issues.webrtc.org/issues/42224157 +https://crbug.com/webrtc/1392,https://issues.webrtc.org/issues/42224158 +https://crbug.com/webrtc/13920,https://issues.webrtc.org/issues/42224159 +https://crbug.com/webrtc/13921,https://issues.webrtc.org/issues/42224160 +https://crbug.com/webrtc/13922,https://issues.webrtc.org/issues/42224161 +https://crbug.com/webrtc/13923,https://issues.webrtc.org/issues/42224162 +https://crbug.com/webrtc/13924,https://issues.webrtc.org/issues/42224163 +https://crbug.com/webrtc/13925,https://issues.webrtc.org/issues/42224164 +https://crbug.com/webrtc/13926,https://issues.webrtc.org/issues/42224165 +https://crbug.com/webrtc/13927,https://issues.webrtc.org/issues/42224166 +https://crbug.com/webrtc/13928,https://issues.webrtc.org/issues/42224167 +https://crbug.com/webrtc/13929,https://issues.webrtc.org/issues/42224168 +https://crbug.com/webrtc/1393,https://issues.webrtc.org/issues/42224169 +https://crbug.com/webrtc/13931,https://issues.webrtc.org/issues/42224170 +https://crbug.com/webrtc/13932,https://issues.webrtc.org/issues/42224171 +https://crbug.com/webrtc/13933,https://issues.webrtc.org/issues/42224172 +https://crbug.com/webrtc/13934,https://issues.webrtc.org/issues/42224173 +https://crbug.com/webrtc/13935,https://issues.webrtc.org/issues/42224174 +https://crbug.com/webrtc/13936,https://issues.webrtc.org/issues/42224175 +https://crbug.com/webrtc/13938,https://issues.webrtc.org/issues/42224176 +https://crbug.com/webrtc/13939,https://issues.webrtc.org/issues/42224177 +https://crbug.com/webrtc/1394,https://issues.webrtc.org/issues/42224178 +https://crbug.com/webrtc/13940,https://issues.webrtc.org/issues/42224179 +https://crbug.com/webrtc/13941,https://issues.webrtc.org/issues/42224180 +https://crbug.com/webrtc/13942,https://issues.webrtc.org/issues/42224181 +https://crbug.com/webrtc/13943,https://issues.webrtc.org/issues/42224182 +https://crbug.com/webrtc/13944,https://issues.webrtc.org/issues/42224183 +https://crbug.com/webrtc/13945,https://issues.webrtc.org/issues/42224184 +https://crbug.com/webrtc/13946,https://issues.webrtc.org/issues/42224185 +https://crbug.com/webrtc/13947,https://issues.webrtc.org/issues/42224186 +https://crbug.com/webrtc/13948,https://issues.webrtc.org/issues/42224187 +https://crbug.com/webrtc/13949,https://issues.webrtc.org/issues/42224188 +https://crbug.com/webrtc/1395,https://issues.webrtc.org/issues/42224189 +https://crbug.com/webrtc/13950,https://issues.webrtc.org/issues/42224190 +https://crbug.com/webrtc/13951,https://issues.webrtc.org/issues/42224191 +https://crbug.com/webrtc/13952,https://issues.webrtc.org/issues/42224192 +https://crbug.com/webrtc/13953,https://issues.webrtc.org/issues/42224193 +https://crbug.com/webrtc/13954,https://issues.webrtc.org/issues/42224194 +https://crbug.com/webrtc/13955,https://issues.webrtc.org/issues/42224195 +https://crbug.com/webrtc/13956,https://issues.webrtc.org/issues/42224196 +https://crbug.com/webrtc/13957,https://issues.webrtc.org/issues/42224197 +https://crbug.com/webrtc/13958,https://issues.webrtc.org/issues/42224198 +https://crbug.com/webrtc/13959,https://issues.webrtc.org/issues/42224199 +https://crbug.com/webrtc/1396,https://issues.webrtc.org/issues/42224200 +https://crbug.com/webrtc/13960,https://issues.webrtc.org/issues/42224201 +https://crbug.com/webrtc/13961,https://issues.webrtc.org/issues/42224202 +https://crbug.com/webrtc/13962,https://issues.webrtc.org/issues/42224203 +https://crbug.com/webrtc/13963,https://issues.webrtc.org/issues/42224204 +https://crbug.com/webrtc/13964,https://issues.webrtc.org/issues/42224205 +https://crbug.com/webrtc/13965,https://issues.webrtc.org/issues/42224206 +https://crbug.com/webrtc/13966,https://issues.webrtc.org/issues/42224207 +https://crbug.com/webrtc/13967,https://issues.webrtc.org/issues/42224208 +https://crbug.com/webrtc/13968,https://issues.webrtc.org/issues/42224209 +https://crbug.com/webrtc/13969,https://issues.webrtc.org/issues/42224210 +https://crbug.com/webrtc/1397,https://issues.webrtc.org/issues/42224211 +https://crbug.com/webrtc/13970,https://issues.webrtc.org/issues/42224212 +https://crbug.com/webrtc/13971,https://issues.webrtc.org/issues/42224213 +https://crbug.com/webrtc/13974,https://issues.webrtc.org/issues/42224214 +https://crbug.com/webrtc/13975,https://issues.webrtc.org/issues/42224215 +https://crbug.com/webrtc/13976,https://issues.webrtc.org/issues/42224216 +https://crbug.com/webrtc/13977,https://issues.webrtc.org/issues/42224217 +https://crbug.com/webrtc/13978,https://issues.webrtc.org/issues/42224218 +https://crbug.com/webrtc/13979,https://issues.webrtc.org/issues/42224219 +https://crbug.com/webrtc/1398,https://issues.webrtc.org/issues/42224220 +https://crbug.com/webrtc/13980,https://issues.webrtc.org/issues/42224221 +https://crbug.com/webrtc/13981,https://issues.webrtc.org/issues/42224222 +https://crbug.com/webrtc/13982,https://issues.webrtc.org/issues/42224223 +https://crbug.com/webrtc/13983,https://issues.webrtc.org/issues/42224224 +https://crbug.com/webrtc/13984,https://issues.webrtc.org/issues/42224225 +https://crbug.com/webrtc/13985,https://issues.webrtc.org/issues/42224226 +https://crbug.com/webrtc/13986,https://issues.webrtc.org/issues/42224227 +https://crbug.com/webrtc/13987,https://issues.webrtc.org/issues/42224228 +https://crbug.com/webrtc/13988,https://issues.webrtc.org/issues/42224229 +https://crbug.com/webrtc/13989,https://issues.webrtc.org/issues/42224230 +https://crbug.com/webrtc/1399,https://issues.webrtc.org/issues/42224231 +https://crbug.com/webrtc/13990,https://issues.webrtc.org/issues/42224232 +https://crbug.com/webrtc/13991,https://issues.webrtc.org/issues/42224233 +https://crbug.com/webrtc/13992,https://issues.webrtc.org/issues/42224234 +https://crbug.com/webrtc/13993,https://issues.webrtc.org/issues/42224235 +https://crbug.com/webrtc/13994,https://issues.webrtc.org/issues/42224236 +https://crbug.com/webrtc/13995,https://issues.webrtc.org/issues/42224237 +https://crbug.com/webrtc/13996,https://issues.webrtc.org/issues/42224238 +https://crbug.com/webrtc/13997,https://issues.webrtc.org/issues/42224239 +https://crbug.com/webrtc/13998,https://issues.webrtc.org/issues/42224240 +https://crbug.com/webrtc/13999,https://issues.webrtc.org/issues/42224241 +https://crbug.com/webrtc/14,https://issues.webrtc.org/issues/42224242 +https://crbug.com/webrtc/140,https://issues.webrtc.org/issues/42224243 +https://crbug.com/webrtc/1400,https://issues.webrtc.org/issues/42224244 +https://crbug.com/webrtc/14000,https://issues.webrtc.org/issues/42224245 +https://crbug.com/webrtc/14002,https://issues.webrtc.org/issues/42224246 +https://crbug.com/webrtc/14003,https://issues.webrtc.org/issues/42224247 +https://crbug.com/webrtc/14004,https://issues.webrtc.org/issues/42224248 +https://crbug.com/webrtc/14006,https://issues.webrtc.org/issues/42224249 +https://crbug.com/webrtc/14007,https://issues.webrtc.org/issues/42224250 +https://crbug.com/webrtc/14009,https://issues.webrtc.org/issues/42224251 +https://crbug.com/webrtc/1401,https://issues.webrtc.org/issues/42224252 +https://crbug.com/webrtc/14010,https://issues.webrtc.org/issues/42224253 +https://crbug.com/webrtc/14011,https://issues.webrtc.org/issues/42224254 +https://crbug.com/webrtc/14012,https://issues.webrtc.org/issues/42224255 +https://crbug.com/webrtc/14013,https://issues.webrtc.org/issues/42224256 +https://crbug.com/webrtc/14014,https://issues.webrtc.org/issues/42224257 +https://crbug.com/webrtc/14015,https://issues.webrtc.org/issues/42224258 +https://crbug.com/webrtc/14016,https://issues.webrtc.org/issues/42224259 +https://crbug.com/webrtc/14017,https://issues.webrtc.org/issues/42224260 +https://crbug.com/webrtc/14018,https://issues.webrtc.org/issues/42224261 +https://crbug.com/webrtc/14019,https://issues.webrtc.org/issues/42224262 +https://crbug.com/webrtc/1402,https://issues.webrtc.org/issues/42224263 +https://crbug.com/webrtc/14020,https://issues.webrtc.org/issues/42224264 +https://crbug.com/webrtc/14021,https://issues.webrtc.org/issues/42224265 +https://crbug.com/webrtc/14022,https://issues.webrtc.org/issues/42224266 +https://crbug.com/webrtc/14023,https://issues.webrtc.org/issues/42224267 +https://crbug.com/webrtc/14024,https://issues.webrtc.org/issues/42224268 +https://crbug.com/webrtc/14025,https://issues.webrtc.org/issues/42224269 +https://crbug.com/webrtc/14026,https://issues.webrtc.org/issues/42224270 +https://crbug.com/webrtc/14027,https://issues.webrtc.org/issues/42224271 +https://crbug.com/webrtc/14028,https://issues.webrtc.org/issues/42224272 +https://crbug.com/webrtc/14029,https://issues.webrtc.org/issues/42224273 +https://crbug.com/webrtc/1403,https://issues.webrtc.org/issues/42224274 +https://crbug.com/webrtc/14030,https://issues.webrtc.org/issues/42224275 +https://crbug.com/webrtc/14031,https://issues.webrtc.org/issues/42224276 +https://crbug.com/webrtc/14032,https://issues.webrtc.org/issues/42224277 +https://crbug.com/webrtc/14033,https://issues.webrtc.org/issues/42224278 +https://crbug.com/webrtc/14034,https://issues.webrtc.org/issues/42224279 +https://crbug.com/webrtc/14035,https://issues.webrtc.org/issues/42224280 +https://crbug.com/webrtc/14036,https://issues.webrtc.org/issues/42224281 +https://crbug.com/webrtc/14037,https://issues.webrtc.org/issues/42224282 +https://crbug.com/webrtc/14038,https://issues.webrtc.org/issues/42224283 +https://crbug.com/webrtc/14039,https://issues.webrtc.org/issues/42224284 +https://crbug.com/webrtc/1404,https://issues.webrtc.org/issues/42224285 +https://crbug.com/webrtc/14040,https://issues.webrtc.org/issues/42224286 +https://crbug.com/webrtc/14041,https://issues.webrtc.org/issues/42224287 +https://crbug.com/webrtc/14042,https://issues.webrtc.org/issues/42224288 +https://crbug.com/webrtc/14043,https://issues.webrtc.org/issues/42224289 +https://crbug.com/webrtc/14044,https://issues.webrtc.org/issues/42224290 +https://crbug.com/webrtc/14045,https://issues.webrtc.org/issues/42224291 +https://crbug.com/webrtc/14046,https://issues.webrtc.org/issues/42224292 +https://crbug.com/webrtc/14047,https://issues.webrtc.org/issues/42224293 +https://crbug.com/webrtc/14048,https://issues.webrtc.org/issues/42224294 +https://crbug.com/webrtc/14049,https://issues.webrtc.org/issues/42224295 +https://crbug.com/webrtc/1405,https://issues.webrtc.org/issues/42224296 +https://crbug.com/webrtc/14051,https://issues.webrtc.org/issues/42224297 +https://crbug.com/webrtc/14052,https://issues.webrtc.org/issues/42224298 +https://crbug.com/webrtc/14053,https://issues.webrtc.org/issues/42224299 +https://crbug.com/webrtc/14054,https://issues.webrtc.org/issues/42224300 +https://crbug.com/webrtc/14056,https://issues.webrtc.org/issues/42224301 +https://crbug.com/webrtc/14057,https://issues.webrtc.org/issues/42224302 +https://crbug.com/webrtc/14058,https://issues.webrtc.org/issues/42224303 +https://crbug.com/webrtc/14059,https://issues.webrtc.org/issues/42224304 +https://crbug.com/webrtc/1406,https://issues.webrtc.org/issues/42224305 +https://crbug.com/webrtc/14060,https://issues.webrtc.org/issues/42224306 +https://crbug.com/webrtc/14062,https://issues.webrtc.org/issues/42224307 +https://crbug.com/webrtc/14063,https://issues.webrtc.org/issues/42224308 +https://crbug.com/webrtc/14064,https://issues.webrtc.org/issues/42224309 +https://crbug.com/webrtc/14065,https://issues.webrtc.org/issues/42224310 +https://crbug.com/webrtc/14066,https://issues.webrtc.org/issues/42224311 +https://crbug.com/webrtc/14067,https://issues.webrtc.org/issues/42224312 +https://crbug.com/webrtc/14068,https://issues.webrtc.org/issues/42224313 +https://crbug.com/webrtc/14069,https://issues.webrtc.org/issues/42224314 +https://crbug.com/webrtc/1407,https://issues.webrtc.org/issues/42224315 +https://crbug.com/webrtc/14070,https://issues.webrtc.org/issues/42224316 +https://crbug.com/webrtc/14071,https://issues.webrtc.org/issues/42224317 +https://crbug.com/webrtc/14072,https://issues.webrtc.org/issues/42224318 +https://crbug.com/webrtc/14073,https://issues.webrtc.org/issues/42224319 +https://crbug.com/webrtc/14074,https://issues.webrtc.org/issues/42224320 +https://crbug.com/webrtc/14075,https://issues.webrtc.org/issues/42224321 +https://crbug.com/webrtc/14076,https://issues.webrtc.org/issues/42224322 +https://crbug.com/webrtc/14077,https://issues.webrtc.org/issues/42224323 +https://crbug.com/webrtc/14078,https://issues.webrtc.org/issues/42224324 +https://crbug.com/webrtc/14079,https://issues.webrtc.org/issues/42224325 +https://crbug.com/webrtc/1408,https://issues.webrtc.org/issues/42224326 +https://crbug.com/webrtc/14080,https://issues.webrtc.org/issues/42224327 +https://crbug.com/webrtc/14081,https://issues.webrtc.org/issues/42224328 +https://crbug.com/webrtc/14082,https://issues.webrtc.org/issues/42224329 +https://crbug.com/webrtc/14083,https://issues.webrtc.org/issues/42224330 +https://crbug.com/webrtc/14084,https://issues.webrtc.org/issues/42224331 +https://crbug.com/webrtc/14085,https://issues.webrtc.org/issues/42224332 +https://crbug.com/webrtc/14086,https://issues.webrtc.org/issues/42224333 +https://crbug.com/webrtc/14087,https://issues.webrtc.org/issues/42224334 +https://crbug.com/webrtc/14088,https://issues.webrtc.org/issues/42224335 +https://crbug.com/webrtc/14089,https://issues.webrtc.org/issues/42224336 +https://crbug.com/webrtc/1409,https://issues.webrtc.org/issues/42224337 +https://crbug.com/webrtc/14090,https://issues.webrtc.org/issues/42224338 +https://crbug.com/webrtc/14091,https://issues.webrtc.org/issues/42224339 +https://crbug.com/webrtc/14092,https://issues.webrtc.org/issues/42224340 +https://crbug.com/webrtc/14093,https://issues.webrtc.org/issues/42224341 +https://crbug.com/webrtc/14094,https://issues.webrtc.org/issues/42224342 +https://crbug.com/webrtc/14095,https://issues.webrtc.org/issues/42224343 +https://crbug.com/webrtc/14096,https://issues.webrtc.org/issues/42224344 +https://crbug.com/webrtc/14097,https://issues.webrtc.org/issues/42224345 +https://crbug.com/webrtc/14098,https://issues.webrtc.org/issues/42224346 +https://crbug.com/webrtc/14099,https://issues.webrtc.org/issues/42224347 +https://crbug.com/webrtc/141,https://issues.webrtc.org/issues/42224348 +https://crbug.com/webrtc/1410,https://issues.webrtc.org/issues/42224349 +https://crbug.com/webrtc/14100,https://issues.webrtc.org/issues/42224350 +https://crbug.com/webrtc/14101,https://issues.webrtc.org/issues/42224351 +https://crbug.com/webrtc/14102,https://issues.webrtc.org/issues/42224352 +https://crbug.com/webrtc/14103,https://issues.webrtc.org/issues/42224353 +https://crbug.com/webrtc/14104,https://issues.webrtc.org/issues/42224354 +https://crbug.com/webrtc/14105,https://issues.webrtc.org/issues/42224355 +https://crbug.com/webrtc/14106,https://issues.webrtc.org/issues/42224356 +https://crbug.com/webrtc/14107,https://issues.webrtc.org/issues/42224357 +https://crbug.com/webrtc/14108,https://issues.webrtc.org/issues/42224358 +https://crbug.com/webrtc/14109,https://issues.webrtc.org/issues/42224359 +https://crbug.com/webrtc/1411,https://issues.webrtc.org/issues/42224360 +https://crbug.com/webrtc/14110,https://issues.webrtc.org/issues/42224361 +https://crbug.com/webrtc/14111,https://issues.webrtc.org/issues/42224362 +https://crbug.com/webrtc/14112,https://issues.webrtc.org/issues/42224363 +https://crbug.com/webrtc/14114,https://issues.webrtc.org/issues/42224364 +https://crbug.com/webrtc/14115,https://issues.webrtc.org/issues/42224365 +https://crbug.com/webrtc/14116,https://issues.webrtc.org/issues/42224366 +https://crbug.com/webrtc/14117,https://issues.webrtc.org/issues/42224367 +https://crbug.com/webrtc/14118,https://issues.webrtc.org/issues/42224368 +https://crbug.com/webrtc/14119,https://issues.webrtc.org/issues/42224369 +https://crbug.com/webrtc/1412,https://issues.webrtc.org/issues/42224370 +https://crbug.com/webrtc/14120,https://issues.webrtc.org/issues/42224371 +https://crbug.com/webrtc/14121,https://issues.webrtc.org/issues/42224372 +https://crbug.com/webrtc/14122,https://issues.webrtc.org/issues/42224373 +https://crbug.com/webrtc/14123,https://issues.webrtc.org/issues/42224374 +https://crbug.com/webrtc/14124,https://issues.webrtc.org/issues/42224375 +https://crbug.com/webrtc/14125,https://issues.webrtc.org/issues/42224376 +https://crbug.com/webrtc/14126,https://issues.webrtc.org/issues/42224377 +https://crbug.com/webrtc/14127,https://issues.webrtc.org/issues/42224378 +https://crbug.com/webrtc/14128,https://issues.webrtc.org/issues/42224379 +https://crbug.com/webrtc/14129,https://issues.webrtc.org/issues/42224380 +https://crbug.com/webrtc/14130,https://issues.webrtc.org/issues/42224381 +https://crbug.com/webrtc/14131,https://issues.webrtc.org/issues/42224382 +https://crbug.com/webrtc/14132,https://issues.webrtc.org/issues/42224383 +https://crbug.com/webrtc/14133,https://issues.webrtc.org/issues/42224384 +https://crbug.com/webrtc/14134,https://issues.webrtc.org/issues/42224385 +https://crbug.com/webrtc/14135,https://issues.webrtc.org/issues/42224386 +https://crbug.com/webrtc/14136,https://issues.webrtc.org/issues/42224387 +https://crbug.com/webrtc/14137,https://issues.webrtc.org/issues/42224388 +https://crbug.com/webrtc/14138,https://issues.webrtc.org/issues/42224389 +https://crbug.com/webrtc/14139,https://issues.webrtc.org/issues/42224390 +https://crbug.com/webrtc/1414,https://issues.webrtc.org/issues/42224391 +https://crbug.com/webrtc/14140,https://issues.webrtc.org/issues/42224392 +https://crbug.com/webrtc/14141,https://issues.webrtc.org/issues/42224393 +https://crbug.com/webrtc/14142,https://issues.webrtc.org/issues/42224394 +https://crbug.com/webrtc/14143,https://issues.webrtc.org/issues/42224395 +https://crbug.com/webrtc/14144,https://issues.webrtc.org/issues/42224396 +https://crbug.com/webrtc/14145,https://issues.webrtc.org/issues/42224397 +https://crbug.com/webrtc/14146,https://issues.webrtc.org/issues/42224398 +https://crbug.com/webrtc/14147,https://issues.webrtc.org/issues/42224399 +https://crbug.com/webrtc/14148,https://issues.webrtc.org/issues/42224400 +https://crbug.com/webrtc/14149,https://issues.webrtc.org/issues/42224401 +https://crbug.com/webrtc/1415,https://issues.webrtc.org/issues/42224402 +https://crbug.com/webrtc/14150,https://issues.webrtc.org/issues/42224403 +https://crbug.com/webrtc/14151,https://issues.webrtc.org/issues/42224404 +https://crbug.com/webrtc/14153,https://issues.webrtc.org/issues/42224405 +https://crbug.com/webrtc/14154,https://issues.webrtc.org/issues/42224406 +https://crbug.com/webrtc/14155,https://issues.webrtc.org/issues/42224407 +https://crbug.com/webrtc/14156,https://issues.webrtc.org/issues/42224408 +https://crbug.com/webrtc/14157,https://issues.webrtc.org/issues/42224409 +https://crbug.com/webrtc/14158,https://issues.webrtc.org/issues/42224410 +https://crbug.com/webrtc/14159,https://issues.webrtc.org/issues/42224411 +https://crbug.com/webrtc/1416,https://issues.webrtc.org/issues/42224412 +https://crbug.com/webrtc/14160,https://issues.webrtc.org/issues/42224413 +https://crbug.com/webrtc/14162,https://issues.webrtc.org/issues/42224414 +https://crbug.com/webrtc/14163,https://issues.webrtc.org/issues/42224415 +https://crbug.com/webrtc/14164,https://issues.webrtc.org/issues/42224416 +https://crbug.com/webrtc/14165,https://issues.webrtc.org/issues/42224417 +https://crbug.com/webrtc/14166,https://issues.webrtc.org/issues/42224418 +https://crbug.com/webrtc/14167,https://issues.webrtc.org/issues/42224419 +https://crbug.com/webrtc/14168,https://issues.webrtc.org/issues/42224420 +https://crbug.com/webrtc/14169,https://issues.webrtc.org/issues/42224421 +https://crbug.com/webrtc/1417,https://issues.webrtc.org/issues/42224422 +https://crbug.com/webrtc/14170,https://issues.webrtc.org/issues/42224423 +https://crbug.com/webrtc/14171,https://issues.webrtc.org/issues/42224424 +https://crbug.com/webrtc/14172,https://issues.webrtc.org/issues/42224425 +https://crbug.com/webrtc/14173,https://issues.webrtc.org/issues/42224426 +https://crbug.com/webrtc/14174,https://issues.webrtc.org/issues/42224427 +https://crbug.com/webrtc/14176,https://issues.webrtc.org/issues/42224428 +https://crbug.com/webrtc/14177,https://issues.webrtc.org/issues/42224429 +https://crbug.com/webrtc/14178,https://issues.webrtc.org/issues/42224430 +https://crbug.com/webrtc/14179,https://issues.webrtc.org/issues/42224431 +https://crbug.com/webrtc/1418,https://issues.webrtc.org/issues/42224432 +https://crbug.com/webrtc/14180,https://issues.webrtc.org/issues/42224433 +https://crbug.com/webrtc/14181,https://issues.webrtc.org/issues/42224434 +https://crbug.com/webrtc/14182,https://issues.webrtc.org/issues/42224435 +https://crbug.com/webrtc/14183,https://issues.webrtc.org/issues/42224436 +https://crbug.com/webrtc/14184,https://issues.webrtc.org/issues/42224437 +https://crbug.com/webrtc/14185,https://issues.webrtc.org/issues/42224438 +https://crbug.com/webrtc/14186,https://issues.webrtc.org/issues/42224439 +https://crbug.com/webrtc/14187,https://issues.webrtc.org/issues/42224440 +https://crbug.com/webrtc/14188,https://issues.webrtc.org/issues/42224441 +https://crbug.com/webrtc/14189,https://issues.webrtc.org/issues/42224442 +https://crbug.com/webrtc/1419,https://issues.webrtc.org/issues/42224443 +https://crbug.com/webrtc/14190,https://issues.webrtc.org/issues/42224444 +https://crbug.com/webrtc/14191,https://issues.webrtc.org/issues/42224445 +https://crbug.com/webrtc/14192,https://issues.webrtc.org/issues/42224446 +https://crbug.com/webrtc/14193,https://issues.webrtc.org/issues/42224447 +https://crbug.com/webrtc/14194,https://issues.webrtc.org/issues/42224448 +https://crbug.com/webrtc/14196,https://issues.webrtc.org/issues/42224449 +https://crbug.com/webrtc/14197,https://issues.webrtc.org/issues/42224450 +https://crbug.com/webrtc/14198,https://issues.webrtc.org/issues/42224451 +https://crbug.com/webrtc/14199,https://issues.webrtc.org/issues/42224452 +https://crbug.com/webrtc/142,https://issues.webrtc.org/issues/42224453 +https://crbug.com/webrtc/1420,https://issues.webrtc.org/issues/42224454 +https://crbug.com/webrtc/14200,https://issues.webrtc.org/issues/42224455 +https://crbug.com/webrtc/14201,https://issues.webrtc.org/issues/42224456 +https://crbug.com/webrtc/14202,https://issues.webrtc.org/issues/42224457 +https://crbug.com/webrtc/14203,https://issues.webrtc.org/issues/42224458 +https://crbug.com/webrtc/14204,https://issues.webrtc.org/issues/42224459 +https://crbug.com/webrtc/14205,https://issues.webrtc.org/issues/42224460 +https://crbug.com/webrtc/14206,https://issues.webrtc.org/issues/42224461 +https://crbug.com/webrtc/14207,https://issues.webrtc.org/issues/42224462 +https://crbug.com/webrtc/14208,https://issues.webrtc.org/issues/42224463 +https://crbug.com/webrtc/14209,https://issues.webrtc.org/issues/42224464 +https://crbug.com/webrtc/1421,https://issues.webrtc.org/issues/42224465 +https://crbug.com/webrtc/14211,https://issues.webrtc.org/issues/42224466 +https://crbug.com/webrtc/14212,https://issues.webrtc.org/issues/42224467 +https://crbug.com/webrtc/14213,https://issues.webrtc.org/issues/42224468 +https://crbug.com/webrtc/14214,https://issues.webrtc.org/issues/42224469 +https://crbug.com/webrtc/14215,https://issues.webrtc.org/issues/42224470 +https://crbug.com/webrtc/14216,https://issues.webrtc.org/issues/42224471 +https://crbug.com/webrtc/14217,https://issues.webrtc.org/issues/42224472 +https://crbug.com/webrtc/14218,https://issues.webrtc.org/issues/42224473 +https://crbug.com/webrtc/14219,https://issues.webrtc.org/issues/42224474 +https://crbug.com/webrtc/1422,https://issues.webrtc.org/issues/42224475 +https://crbug.com/webrtc/14221,https://issues.webrtc.org/issues/42224476 +https://crbug.com/webrtc/14222,https://issues.webrtc.org/issues/42224477 +https://crbug.com/webrtc/14223,https://issues.webrtc.org/issues/42224478 +https://crbug.com/webrtc/14224,https://issues.webrtc.org/issues/42224479 +https://crbug.com/webrtc/14226,https://issues.webrtc.org/issues/42224480 +https://crbug.com/webrtc/14227,https://issues.webrtc.org/issues/42224481 +https://crbug.com/webrtc/14228,https://issues.webrtc.org/issues/42224482 +https://crbug.com/webrtc/14229,https://issues.webrtc.org/issues/42224483 +https://crbug.com/webrtc/1423,https://issues.webrtc.org/issues/42224484 +https://crbug.com/webrtc/14230,https://issues.webrtc.org/issues/42224485 +https://crbug.com/webrtc/14231,https://issues.webrtc.org/issues/42224486 +https://crbug.com/webrtc/14232,https://issues.webrtc.org/issues/42224487 +https://crbug.com/webrtc/14233,https://issues.webrtc.org/issues/42224488 +https://crbug.com/webrtc/14234,https://issues.webrtc.org/issues/42224489 +https://crbug.com/webrtc/14235,https://issues.webrtc.org/issues/42224490 +https://crbug.com/webrtc/14236,https://issues.webrtc.org/issues/42224491 +https://crbug.com/webrtc/14237,https://issues.webrtc.org/issues/42224492 +https://crbug.com/webrtc/14238,https://issues.webrtc.org/issues/42224493 +https://crbug.com/webrtc/14239,https://issues.webrtc.org/issues/42224494 +https://crbug.com/webrtc/1424,https://issues.webrtc.org/issues/42224495 +https://crbug.com/webrtc/14240,https://issues.webrtc.org/issues/42224496 +https://crbug.com/webrtc/14241,https://issues.webrtc.org/issues/42224497 +https://crbug.com/webrtc/14242,https://issues.webrtc.org/issues/42224498 +https://crbug.com/webrtc/14243,https://issues.webrtc.org/issues/42224499 +https://crbug.com/webrtc/14244,https://issues.webrtc.org/issues/42224500 +https://crbug.com/webrtc/14245,https://issues.webrtc.org/issues/42224501 +https://crbug.com/webrtc/14246,https://issues.webrtc.org/issues/42224502 +https://crbug.com/webrtc/14247,https://issues.webrtc.org/issues/42224503 +https://crbug.com/webrtc/14248,https://issues.webrtc.org/issues/42224504 +https://crbug.com/webrtc/14249,https://issues.webrtc.org/issues/42224505 +https://crbug.com/webrtc/1425,https://issues.webrtc.org/issues/42224506 +https://crbug.com/webrtc/14250,https://issues.webrtc.org/issues/42224507 +https://crbug.com/webrtc/14251,https://issues.webrtc.org/issues/42224508 +https://crbug.com/webrtc/14252,https://issues.webrtc.org/issues/42224509 +https://crbug.com/webrtc/14253,https://issues.webrtc.org/issues/42224510 +https://crbug.com/webrtc/14254,https://issues.webrtc.org/issues/42224511 +https://crbug.com/webrtc/14255,https://issues.webrtc.org/issues/42224512 +https://crbug.com/webrtc/14256,https://issues.webrtc.org/issues/42224513 +https://crbug.com/webrtc/14257,https://issues.webrtc.org/issues/42224514 +https://crbug.com/webrtc/14258,https://issues.webrtc.org/issues/42224515 +https://crbug.com/webrtc/14259,https://issues.webrtc.org/issues/42224516 +https://crbug.com/webrtc/1426,https://issues.webrtc.org/issues/42224517 +https://crbug.com/webrtc/14260,https://issues.webrtc.org/issues/42224518 +https://crbug.com/webrtc/14261,https://issues.webrtc.org/issues/42224519 +https://crbug.com/webrtc/14262,https://issues.webrtc.org/issues/42224520 +https://crbug.com/webrtc/14263,https://issues.webrtc.org/issues/42224521 +https://crbug.com/webrtc/14264,https://issues.webrtc.org/issues/42224522 +https://crbug.com/webrtc/14265,https://issues.webrtc.org/issues/42224523 +https://crbug.com/webrtc/14266,https://issues.webrtc.org/issues/42224524 +https://crbug.com/webrtc/14267,https://issues.webrtc.org/issues/42224525 +https://crbug.com/webrtc/14268,https://issues.webrtc.org/issues/42224526 +https://crbug.com/webrtc/14269,https://issues.webrtc.org/issues/42224527 +https://crbug.com/webrtc/1427,https://issues.webrtc.org/issues/42224528 +https://crbug.com/webrtc/14270,https://issues.webrtc.org/issues/42224529 +https://crbug.com/webrtc/14271,https://issues.webrtc.org/issues/42224530 +https://crbug.com/webrtc/14272,https://issues.webrtc.org/issues/42224531 +https://crbug.com/webrtc/14273,https://issues.webrtc.org/issues/42224532 +https://crbug.com/webrtc/14276,https://issues.webrtc.org/issues/42224533 +https://crbug.com/webrtc/14277,https://issues.webrtc.org/issues/42224534 +https://crbug.com/webrtc/14278,https://issues.webrtc.org/issues/42224535 +https://crbug.com/webrtc/14279,https://issues.webrtc.org/issues/42224536 +https://crbug.com/webrtc/1428,https://issues.webrtc.org/issues/42224537 +https://crbug.com/webrtc/14280,https://issues.webrtc.org/issues/42224538 +https://crbug.com/webrtc/14281,https://issues.webrtc.org/issues/42224539 +https://crbug.com/webrtc/14282,https://issues.webrtc.org/issues/42224540 +https://crbug.com/webrtc/14283,https://issues.webrtc.org/issues/42224541 +https://crbug.com/webrtc/14284,https://issues.webrtc.org/issues/42224542 +https://crbug.com/webrtc/14285,https://issues.webrtc.org/issues/42224543 +https://crbug.com/webrtc/14286,https://issues.webrtc.org/issues/42224544 +https://crbug.com/webrtc/14287,https://issues.webrtc.org/issues/42224545 +https://crbug.com/webrtc/14288,https://issues.webrtc.org/issues/42224546 +https://crbug.com/webrtc/14289,https://issues.webrtc.org/issues/42224547 +https://crbug.com/webrtc/1429,https://issues.webrtc.org/issues/42224548 +https://crbug.com/webrtc/14290,https://issues.webrtc.org/issues/42224549 +https://crbug.com/webrtc/14291,https://issues.webrtc.org/issues/42224550 +https://crbug.com/webrtc/14292,https://issues.webrtc.org/issues/42224551 +https://crbug.com/webrtc/14293,https://issues.webrtc.org/issues/42224552 +https://crbug.com/webrtc/14294,https://issues.webrtc.org/issues/42224553 +https://crbug.com/webrtc/14295,https://issues.webrtc.org/issues/42224554 +https://crbug.com/webrtc/14296,https://issues.webrtc.org/issues/42224555 +https://crbug.com/webrtc/14297,https://issues.webrtc.org/issues/42224556 +https://crbug.com/webrtc/14298,https://issues.webrtc.org/issues/42224557 +https://crbug.com/webrtc/14299,https://issues.webrtc.org/issues/42224558 +https://crbug.com/webrtc/143,https://issues.webrtc.org/issues/42224559 +https://crbug.com/webrtc/1430,https://issues.webrtc.org/issues/42224560 +https://crbug.com/webrtc/14300,https://issues.webrtc.org/issues/42224561 +https://crbug.com/webrtc/14301,https://issues.webrtc.org/issues/42224562 +https://crbug.com/webrtc/14302,https://issues.webrtc.org/issues/42224563 +https://crbug.com/webrtc/14303,https://issues.webrtc.org/issues/42224564 +https://crbug.com/webrtc/14304,https://issues.webrtc.org/issues/42224565 +https://crbug.com/webrtc/14305,https://issues.webrtc.org/issues/42224566 +https://crbug.com/webrtc/14306,https://issues.webrtc.org/issues/42224567 +https://crbug.com/webrtc/14307,https://issues.webrtc.org/issues/42224568 +https://crbug.com/webrtc/14308,https://issues.webrtc.org/issues/42224569 +https://crbug.com/webrtc/14309,https://issues.webrtc.org/issues/42224570 +https://crbug.com/webrtc/1431,https://issues.webrtc.org/issues/42224571 +https://crbug.com/webrtc/14310,https://issues.webrtc.org/issues/42224572 +https://crbug.com/webrtc/14311,https://issues.webrtc.org/issues/42224573 +https://crbug.com/webrtc/14312,https://issues.webrtc.org/issues/42224574 +https://crbug.com/webrtc/14313,https://issues.webrtc.org/issues/42224575 +https://crbug.com/webrtc/14314,https://issues.webrtc.org/issues/42224576 +https://crbug.com/webrtc/14315,https://issues.webrtc.org/issues/42224577 +https://crbug.com/webrtc/14316,https://issues.webrtc.org/issues/42224578 +https://crbug.com/webrtc/14317,https://issues.webrtc.org/issues/42224579 +https://crbug.com/webrtc/14318,https://issues.webrtc.org/issues/42224580 +https://crbug.com/webrtc/14319,https://issues.webrtc.org/issues/42224581 +https://crbug.com/webrtc/1432,https://issues.webrtc.org/issues/42224582 +https://crbug.com/webrtc/14320,https://issues.webrtc.org/issues/42224583 +https://crbug.com/webrtc/14321,https://issues.webrtc.org/issues/42224584 +https://crbug.com/webrtc/14322,https://issues.webrtc.org/issues/42224585 +https://crbug.com/webrtc/14323,https://issues.webrtc.org/issues/42224586 +https://crbug.com/webrtc/14324,https://issues.webrtc.org/issues/42224587 +https://crbug.com/webrtc/14325,https://issues.webrtc.org/issues/42224588 +https://crbug.com/webrtc/14326,https://issues.webrtc.org/issues/42224589 +https://crbug.com/webrtc/14327,https://issues.webrtc.org/issues/42224590 +https://crbug.com/webrtc/14328,https://issues.webrtc.org/issues/42224591 +https://crbug.com/webrtc/14329,https://issues.webrtc.org/issues/42224592 +https://crbug.com/webrtc/1433,https://issues.webrtc.org/issues/42224593 +https://crbug.com/webrtc/14330,https://issues.webrtc.org/issues/42224594 +https://crbug.com/webrtc/14331,https://issues.webrtc.org/issues/42224595 +https://crbug.com/webrtc/14332,https://issues.webrtc.org/issues/42224596 +https://crbug.com/webrtc/14333,https://issues.webrtc.org/issues/42224597 +https://crbug.com/webrtc/14334,https://issues.webrtc.org/issues/42224598 +https://crbug.com/webrtc/14335,https://issues.webrtc.org/issues/42224599 +https://crbug.com/webrtc/14336,https://issues.webrtc.org/issues/42224600 +https://crbug.com/webrtc/14337,https://issues.webrtc.org/issues/42224601 +https://crbug.com/webrtc/14338,https://issues.webrtc.org/issues/42224602 +https://crbug.com/webrtc/14339,https://issues.webrtc.org/issues/42224603 +https://crbug.com/webrtc/1434,https://issues.webrtc.org/issues/42224604 +https://crbug.com/webrtc/14340,https://issues.webrtc.org/issues/42224605 +https://crbug.com/webrtc/14341,https://issues.webrtc.org/issues/42224606 +https://crbug.com/webrtc/14342,https://issues.webrtc.org/issues/42224607 +https://crbug.com/webrtc/14343,https://issues.webrtc.org/issues/42224608 +https://crbug.com/webrtc/14344,https://issues.webrtc.org/issues/42224609 +https://crbug.com/webrtc/14345,https://issues.webrtc.org/issues/42224610 +https://crbug.com/webrtc/14346,https://issues.webrtc.org/issues/42224611 +https://crbug.com/webrtc/14347,https://issues.webrtc.org/issues/42224612 +https://crbug.com/webrtc/14348,https://issues.webrtc.org/issues/42224613 +https://crbug.com/webrtc/14349,https://issues.webrtc.org/issues/42224614 +https://crbug.com/webrtc/1435,https://issues.webrtc.org/issues/42224615 +https://crbug.com/webrtc/14350,https://issues.webrtc.org/issues/42224616 +https://crbug.com/webrtc/14351,https://issues.webrtc.org/issues/42224617 +https://crbug.com/webrtc/14352,https://issues.webrtc.org/issues/42224618 +https://crbug.com/webrtc/14353,https://issues.webrtc.org/issues/42224619 +https://crbug.com/webrtc/14354,https://issues.webrtc.org/issues/42224620 +https://crbug.com/webrtc/14355,https://issues.webrtc.org/issues/42224621 +https://crbug.com/webrtc/14356,https://issues.webrtc.org/issues/42224622 +https://crbug.com/webrtc/14357,https://issues.webrtc.org/issues/42224623 +https://crbug.com/webrtc/14358,https://issues.webrtc.org/issues/42224624 +https://crbug.com/webrtc/14359,https://issues.webrtc.org/issues/42224625 +https://crbug.com/webrtc/1436,https://issues.webrtc.org/issues/42224626 +https://crbug.com/webrtc/14360,https://issues.webrtc.org/issues/42224627 +https://crbug.com/webrtc/14361,https://issues.webrtc.org/issues/42224628 +https://crbug.com/webrtc/14362,https://issues.webrtc.org/issues/42224629 +https://crbug.com/webrtc/14364,https://issues.webrtc.org/issues/42224630 +https://crbug.com/webrtc/14365,https://issues.webrtc.org/issues/42224631 +https://crbug.com/webrtc/14366,https://issues.webrtc.org/issues/42224632 +https://crbug.com/webrtc/14367,https://issues.webrtc.org/issues/42224633 +https://crbug.com/webrtc/14368,https://issues.webrtc.org/issues/42224634 +https://crbug.com/webrtc/14369,https://issues.webrtc.org/issues/42224635 +https://crbug.com/webrtc/1437,https://issues.webrtc.org/issues/42224636 +https://crbug.com/webrtc/14370,https://issues.webrtc.org/issues/42224637 +https://crbug.com/webrtc/14371,https://issues.webrtc.org/issues/42224638 +https://crbug.com/webrtc/14375,https://issues.webrtc.org/issues/42224639 +https://crbug.com/webrtc/14376,https://issues.webrtc.org/issues/42224640 +https://crbug.com/webrtc/14377,https://issues.webrtc.org/issues/42224641 +https://crbug.com/webrtc/14378,https://issues.webrtc.org/issues/42224642 +https://crbug.com/webrtc/14379,https://issues.webrtc.org/issues/42224643 +https://crbug.com/webrtc/1438,https://issues.webrtc.org/issues/42224644 +https://crbug.com/webrtc/14380,https://issues.webrtc.org/issues/42224645 +https://crbug.com/webrtc/14381,https://issues.webrtc.org/issues/42224646 +https://crbug.com/webrtc/14382,https://issues.webrtc.org/issues/42224647 +https://crbug.com/webrtc/14383,https://issues.webrtc.org/issues/42224648 +https://crbug.com/webrtc/14384,https://issues.webrtc.org/issues/42224649 +https://crbug.com/webrtc/14385,https://issues.webrtc.org/issues/42224650 +https://crbug.com/webrtc/14386,https://issues.webrtc.org/issues/42224651 +https://crbug.com/webrtc/14387,https://issues.webrtc.org/issues/42224652 +https://crbug.com/webrtc/14388,https://issues.webrtc.org/issues/42224653 +https://crbug.com/webrtc/14389,https://issues.webrtc.org/issues/42224654 +https://crbug.com/webrtc/1439,https://issues.webrtc.org/issues/42224655 +https://crbug.com/webrtc/14390,https://issues.webrtc.org/issues/42224656 +https://crbug.com/webrtc/14391,https://issues.webrtc.org/issues/42224657 +https://crbug.com/webrtc/14392,https://issues.webrtc.org/issues/42224658 +https://crbug.com/webrtc/14393,https://issues.webrtc.org/issues/42224659 +https://crbug.com/webrtc/14394,https://issues.webrtc.org/issues/42224660 +https://crbug.com/webrtc/14395,https://issues.webrtc.org/issues/42224661 +https://crbug.com/webrtc/14396,https://issues.webrtc.org/issues/42224662 +https://crbug.com/webrtc/14397,https://issues.webrtc.org/issues/42224663 +https://crbug.com/webrtc/14398,https://issues.webrtc.org/issues/42224664 +https://crbug.com/webrtc/14399,https://issues.webrtc.org/issues/42224665 +https://crbug.com/webrtc/144,https://issues.webrtc.org/issues/42224666 +https://crbug.com/webrtc/1440,https://issues.webrtc.org/issues/42224667 +https://crbug.com/webrtc/14400,https://issues.webrtc.org/issues/42224668 +https://crbug.com/webrtc/14401,https://issues.webrtc.org/issues/42224669 +https://crbug.com/webrtc/14402,https://issues.webrtc.org/issues/42224670 +https://crbug.com/webrtc/14403,https://issues.webrtc.org/issues/42224671 +https://crbug.com/webrtc/14404,https://issues.webrtc.org/issues/42224672 +https://crbug.com/webrtc/14405,https://issues.webrtc.org/issues/42224673 +https://crbug.com/webrtc/14406,https://issues.webrtc.org/issues/42224674 +https://crbug.com/webrtc/14407,https://issues.webrtc.org/issues/42224675 +https://crbug.com/webrtc/14408,https://issues.webrtc.org/issues/42224676 +https://crbug.com/webrtc/14409,https://issues.webrtc.org/issues/42224677 +https://crbug.com/webrtc/1441,https://issues.webrtc.org/issues/42224678 +https://crbug.com/webrtc/14410,https://issues.webrtc.org/issues/42224679 +https://crbug.com/webrtc/14411,https://issues.webrtc.org/issues/42224680 +https://crbug.com/webrtc/14412,https://issues.webrtc.org/issues/42224681 +https://crbug.com/webrtc/14413,https://issues.webrtc.org/issues/42224682 +https://crbug.com/webrtc/14414,https://issues.webrtc.org/issues/42224683 +https://crbug.com/webrtc/14415,https://issues.webrtc.org/issues/42224684 +https://crbug.com/webrtc/14416,https://issues.webrtc.org/issues/42224685 +https://crbug.com/webrtc/14417,https://issues.webrtc.org/issues/42224686 +https://crbug.com/webrtc/14418,https://issues.webrtc.org/issues/42224687 +https://crbug.com/webrtc/1442,https://issues.webrtc.org/issues/42224688 +https://crbug.com/webrtc/14420,https://issues.webrtc.org/issues/42224689 +https://crbug.com/webrtc/14421,https://issues.webrtc.org/issues/42224690 +https://crbug.com/webrtc/14422,https://issues.webrtc.org/issues/42224691 +https://crbug.com/webrtc/14423,https://issues.webrtc.org/issues/42224692 +https://crbug.com/webrtc/14424,https://issues.webrtc.org/issues/42224693 +https://crbug.com/webrtc/14425,https://issues.webrtc.org/issues/42224694 +https://crbug.com/webrtc/14426,https://issues.webrtc.org/issues/42224695 +https://crbug.com/webrtc/14427,https://issues.webrtc.org/issues/42224696 +https://crbug.com/webrtc/14428,https://issues.webrtc.org/issues/42224697 +https://crbug.com/webrtc/14429,https://issues.webrtc.org/issues/42224698 +https://crbug.com/webrtc/1443,https://issues.webrtc.org/issues/42224699 +https://crbug.com/webrtc/14430,https://issues.webrtc.org/issues/42224700 +https://crbug.com/webrtc/14431,https://issues.webrtc.org/issues/42224701 +https://crbug.com/webrtc/14432,https://issues.webrtc.org/issues/42224702 +https://crbug.com/webrtc/14433,https://issues.webrtc.org/issues/42224703 +https://crbug.com/webrtc/14434,https://issues.webrtc.org/issues/42224704 +https://crbug.com/webrtc/14435,https://issues.webrtc.org/issues/42224705 +https://crbug.com/webrtc/14436,https://issues.webrtc.org/issues/42224706 +https://crbug.com/webrtc/14437,https://issues.webrtc.org/issues/42224707 +https://crbug.com/webrtc/14438,https://issues.webrtc.org/issues/42224708 +https://crbug.com/webrtc/14439,https://issues.webrtc.org/issues/42224709 +https://crbug.com/webrtc/1444,https://issues.webrtc.org/issues/42224710 +https://crbug.com/webrtc/14440,https://issues.webrtc.org/issues/42224711 +https://crbug.com/webrtc/14441,https://issues.webrtc.org/issues/42224712 +https://crbug.com/webrtc/14442,https://issues.webrtc.org/issues/42224713 +https://crbug.com/webrtc/14443,https://issues.webrtc.org/issues/42224714 +https://crbug.com/webrtc/14444,https://issues.webrtc.org/issues/42224715 +https://crbug.com/webrtc/14445,https://issues.webrtc.org/issues/42224716 +https://crbug.com/webrtc/14446,https://issues.webrtc.org/issues/42224717 +https://crbug.com/webrtc/14447,https://issues.webrtc.org/issues/42224718 +https://crbug.com/webrtc/14448,https://issues.webrtc.org/issues/42224719 +https://crbug.com/webrtc/14449,https://issues.webrtc.org/issues/42224720 +https://crbug.com/webrtc/1445,https://issues.webrtc.org/issues/42224721 +https://crbug.com/webrtc/14450,https://issues.webrtc.org/issues/42224722 +https://crbug.com/webrtc/14451,https://issues.webrtc.org/issues/42224723 +https://crbug.com/webrtc/14452,https://issues.webrtc.org/issues/42224724 +https://crbug.com/webrtc/14453,https://issues.webrtc.org/issues/42224725 +https://crbug.com/webrtc/14454,https://issues.webrtc.org/issues/42224726 +https://crbug.com/webrtc/14455,https://issues.webrtc.org/issues/42224727 +https://crbug.com/webrtc/14456,https://issues.webrtc.org/issues/42224728 +https://crbug.com/webrtc/14457,https://issues.webrtc.org/issues/42224729 +https://crbug.com/webrtc/14458,https://issues.webrtc.org/issues/42224730 +https://crbug.com/webrtc/14459,https://issues.webrtc.org/issues/42224731 +https://crbug.com/webrtc/1446,https://issues.webrtc.org/issues/42224732 +https://crbug.com/webrtc/14460,https://issues.webrtc.org/issues/42224733 +https://crbug.com/webrtc/14461,https://issues.webrtc.org/issues/42224734 +https://crbug.com/webrtc/14462,https://issues.webrtc.org/issues/42224735 +https://crbug.com/webrtc/14463,https://issues.webrtc.org/issues/42224736 +https://crbug.com/webrtc/14464,https://issues.webrtc.org/issues/42224737 +https://crbug.com/webrtc/14465,https://issues.webrtc.org/issues/42224738 +https://crbug.com/webrtc/14466,https://issues.webrtc.org/issues/42224739 +https://crbug.com/webrtc/14467,https://issues.webrtc.org/issues/42224740 +https://crbug.com/webrtc/14468,https://issues.webrtc.org/issues/42224741 +https://crbug.com/webrtc/14469,https://issues.webrtc.org/issues/42224742 +https://crbug.com/webrtc/1447,https://issues.webrtc.org/issues/42224743 +https://crbug.com/webrtc/14470,https://issues.webrtc.org/issues/42224744 +https://crbug.com/webrtc/14471,https://issues.webrtc.org/issues/42224745 +https://crbug.com/webrtc/14472,https://issues.webrtc.org/issues/42224746 +https://crbug.com/webrtc/14473,https://issues.webrtc.org/issues/42224747 +https://crbug.com/webrtc/14474,https://issues.webrtc.org/issues/42224748 +https://crbug.com/webrtc/14475,https://issues.webrtc.org/issues/42224749 +https://crbug.com/webrtc/14476,https://issues.webrtc.org/issues/42224750 +https://crbug.com/webrtc/14477,https://issues.webrtc.org/issues/42224751 +https://crbug.com/webrtc/14478,https://issues.webrtc.org/issues/42224752 +https://crbug.com/webrtc/14479,https://issues.webrtc.org/issues/42224753 +https://crbug.com/webrtc/1448,https://issues.webrtc.org/issues/42224754 +https://crbug.com/webrtc/14480,https://issues.webrtc.org/issues/42224755 +https://crbug.com/webrtc/14481,https://issues.webrtc.org/issues/42224756 +https://crbug.com/webrtc/14482,https://issues.webrtc.org/issues/42224757 +https://crbug.com/webrtc/14484,https://issues.webrtc.org/issues/42224758 +https://crbug.com/webrtc/14485,https://issues.webrtc.org/issues/42224759 +https://crbug.com/webrtc/14486,https://issues.webrtc.org/issues/42224760 +https://crbug.com/webrtc/14487,https://issues.webrtc.org/issues/42224761 +https://crbug.com/webrtc/14488,https://issues.webrtc.org/issues/42224762 +https://crbug.com/webrtc/14489,https://issues.webrtc.org/issues/42224763 +https://crbug.com/webrtc/1449,https://issues.webrtc.org/issues/42224764 +https://crbug.com/webrtc/14490,https://issues.webrtc.org/issues/42224765 +https://crbug.com/webrtc/14491,https://issues.webrtc.org/issues/42224766 +https://crbug.com/webrtc/14492,https://issues.webrtc.org/issues/42224767 +https://crbug.com/webrtc/14493,https://issues.webrtc.org/issues/42224768 +https://crbug.com/webrtc/14494,https://issues.webrtc.org/issues/42224769 +https://crbug.com/webrtc/14495,https://issues.webrtc.org/issues/42224770 +https://crbug.com/webrtc/14496,https://issues.webrtc.org/issues/42224771 +https://crbug.com/webrtc/14497,https://issues.webrtc.org/issues/42224772 +https://crbug.com/webrtc/14498,https://issues.webrtc.org/issues/42224773 +https://crbug.com/webrtc/14499,https://issues.webrtc.org/issues/42224774 +https://crbug.com/webrtc/145,https://issues.webrtc.org/issues/42224775 +https://crbug.com/webrtc/1450,https://issues.webrtc.org/issues/42224776 +https://crbug.com/webrtc/14500,https://issues.webrtc.org/issues/42224777 +https://crbug.com/webrtc/14501,https://issues.webrtc.org/issues/42224778 +https://crbug.com/webrtc/14502,https://issues.webrtc.org/issues/42224779 +https://crbug.com/webrtc/14503,https://issues.webrtc.org/issues/42224780 +https://crbug.com/webrtc/14504,https://issues.webrtc.org/issues/42224781 +https://crbug.com/webrtc/14505,https://issues.webrtc.org/issues/42224782 +https://crbug.com/webrtc/14506,https://issues.webrtc.org/issues/42224783 +https://crbug.com/webrtc/14507,https://issues.webrtc.org/issues/42224784 +https://crbug.com/webrtc/14508,https://issues.webrtc.org/issues/42224785 +https://crbug.com/webrtc/14509,https://issues.webrtc.org/issues/42224786 +https://crbug.com/webrtc/1451,https://issues.webrtc.org/issues/42224787 +https://crbug.com/webrtc/14510,https://issues.webrtc.org/issues/42224788 +https://crbug.com/webrtc/14511,https://issues.webrtc.org/issues/42224789 +https://crbug.com/webrtc/14512,https://issues.webrtc.org/issues/42224790 +https://crbug.com/webrtc/14513,https://issues.webrtc.org/issues/42224791 +https://crbug.com/webrtc/14514,https://issues.webrtc.org/issues/42224792 +https://crbug.com/webrtc/14515,https://issues.webrtc.org/issues/42224793 +https://crbug.com/webrtc/14516,https://issues.webrtc.org/issues/42224794 +https://crbug.com/webrtc/14517,https://issues.webrtc.org/issues/42224795 +https://crbug.com/webrtc/14518,https://issues.webrtc.org/issues/42224796 +https://crbug.com/webrtc/14519,https://issues.webrtc.org/issues/42224797 +https://crbug.com/webrtc/1452,https://issues.webrtc.org/issues/42224798 +https://crbug.com/webrtc/14520,https://issues.webrtc.org/issues/42224799 +https://crbug.com/webrtc/14521,https://issues.webrtc.org/issues/42224800 +https://crbug.com/webrtc/14522,https://issues.webrtc.org/issues/42224801 +https://crbug.com/webrtc/14523,https://issues.webrtc.org/issues/42224802 +https://crbug.com/webrtc/14524,https://issues.webrtc.org/issues/42224803 +https://crbug.com/webrtc/14525,https://issues.webrtc.org/issues/42224804 +https://crbug.com/webrtc/14526,https://issues.webrtc.org/issues/42224805 +https://crbug.com/webrtc/14527,https://issues.webrtc.org/issues/42224806 +https://crbug.com/webrtc/14528,https://issues.webrtc.org/issues/42224807 +https://crbug.com/webrtc/14529,https://issues.webrtc.org/issues/42224808 +https://crbug.com/webrtc/1453,https://issues.webrtc.org/issues/42224809 +https://crbug.com/webrtc/14530,https://issues.webrtc.org/issues/42224810 +https://crbug.com/webrtc/14531,https://issues.webrtc.org/issues/42224811 +https://crbug.com/webrtc/14532,https://issues.webrtc.org/issues/42224812 +https://crbug.com/webrtc/14533,https://issues.webrtc.org/issues/42224813 +https://crbug.com/webrtc/14534,https://issues.webrtc.org/issues/42224814 +https://crbug.com/webrtc/14535,https://issues.webrtc.org/issues/42224815 +https://crbug.com/webrtc/14536,https://issues.webrtc.org/issues/42224816 +https://crbug.com/webrtc/14537,https://issues.webrtc.org/issues/42224817 +https://crbug.com/webrtc/14538,https://issues.webrtc.org/issues/42224818 +https://crbug.com/webrtc/14539,https://issues.webrtc.org/issues/42224819 +https://crbug.com/webrtc/1454,https://issues.webrtc.org/issues/42224820 +https://crbug.com/webrtc/14540,https://issues.webrtc.org/issues/42224821 +https://crbug.com/webrtc/14541,https://issues.webrtc.org/issues/42224822 +https://crbug.com/webrtc/14542,https://issues.webrtc.org/issues/42224823 +https://crbug.com/webrtc/14543,https://issues.webrtc.org/issues/42224824 +https://crbug.com/webrtc/14544,https://issues.webrtc.org/issues/42224825 +https://crbug.com/webrtc/14545,https://issues.webrtc.org/issues/42224826 +https://crbug.com/webrtc/14546,https://issues.webrtc.org/issues/42224827 +https://crbug.com/webrtc/14547,https://issues.webrtc.org/issues/42224828 +https://crbug.com/webrtc/14548,https://issues.webrtc.org/issues/42224829 +https://crbug.com/webrtc/14549,https://issues.webrtc.org/issues/42224830 +https://crbug.com/webrtc/1455,https://issues.webrtc.org/issues/42224831 +https://crbug.com/webrtc/14550,https://issues.webrtc.org/issues/42224832 +https://crbug.com/webrtc/14551,https://issues.webrtc.org/issues/42224833 +https://crbug.com/webrtc/14552,https://issues.webrtc.org/issues/42224834 +https://crbug.com/webrtc/14553,https://issues.webrtc.org/issues/42224835 +https://crbug.com/webrtc/14554,https://issues.webrtc.org/issues/42224836 +https://crbug.com/webrtc/14555,https://issues.webrtc.org/issues/42224837 +https://crbug.com/webrtc/14556,https://issues.webrtc.org/issues/42224838 +https://crbug.com/webrtc/14557,https://issues.webrtc.org/issues/42224839 +https://crbug.com/webrtc/14558,https://issues.webrtc.org/issues/42224840 +https://crbug.com/webrtc/14559,https://issues.webrtc.org/issues/42224841 +https://crbug.com/webrtc/1456,https://issues.webrtc.org/issues/42224842 +https://crbug.com/webrtc/14560,https://issues.webrtc.org/issues/42224843 +https://crbug.com/webrtc/14561,https://issues.webrtc.org/issues/42224844 +https://crbug.com/webrtc/14562,https://issues.webrtc.org/issues/42224845 +https://crbug.com/webrtc/14563,https://issues.webrtc.org/issues/42224846 +https://crbug.com/webrtc/14564,https://issues.webrtc.org/issues/42224847 +https://crbug.com/webrtc/14565,https://issues.webrtc.org/issues/42224848 +https://crbug.com/webrtc/14566,https://issues.webrtc.org/issues/42224849 +https://crbug.com/webrtc/14567,https://issues.webrtc.org/issues/42224850 +https://crbug.com/webrtc/14568,https://issues.webrtc.org/issues/42224851 +https://crbug.com/webrtc/14569,https://issues.webrtc.org/issues/42224852 +https://crbug.com/webrtc/1457,https://issues.webrtc.org/issues/42224853 +https://crbug.com/webrtc/14570,https://issues.webrtc.org/issues/42224854 +https://crbug.com/webrtc/14571,https://issues.webrtc.org/issues/42224855 +https://crbug.com/webrtc/14572,https://issues.webrtc.org/issues/42224856 +https://crbug.com/webrtc/14573,https://issues.webrtc.org/issues/42224857 +https://crbug.com/webrtc/14574,https://issues.webrtc.org/issues/42224858 +https://crbug.com/webrtc/14575,https://issues.webrtc.org/issues/42224859 +https://crbug.com/webrtc/14576,https://issues.webrtc.org/issues/42224860 +https://crbug.com/webrtc/14577,https://issues.webrtc.org/issues/42224861 +https://crbug.com/webrtc/14579,https://issues.webrtc.org/issues/42224862 +https://crbug.com/webrtc/1458,https://issues.webrtc.org/issues/42224863 +https://crbug.com/webrtc/14580,https://issues.webrtc.org/issues/42224864 +https://crbug.com/webrtc/14581,https://issues.webrtc.org/issues/42224865 +https://crbug.com/webrtc/14582,https://issues.webrtc.org/issues/42224866 +https://crbug.com/webrtc/14583,https://issues.webrtc.org/issues/42224867 +https://crbug.com/webrtc/14584,https://issues.webrtc.org/issues/42224868 +https://crbug.com/webrtc/14585,https://issues.webrtc.org/issues/42224869 +https://crbug.com/webrtc/14586,https://issues.webrtc.org/issues/42224870 +https://crbug.com/webrtc/14587,https://issues.webrtc.org/issues/42224871 +https://crbug.com/webrtc/14588,https://issues.webrtc.org/issues/42224872 +https://crbug.com/webrtc/14589,https://issues.webrtc.org/issues/42224873 +https://crbug.com/webrtc/1459,https://issues.webrtc.org/issues/42224874 +https://crbug.com/webrtc/14590,https://issues.webrtc.org/issues/42224875 +https://crbug.com/webrtc/14591,https://issues.webrtc.org/issues/42224876 +https://crbug.com/webrtc/14592,https://issues.webrtc.org/issues/42224877 +https://crbug.com/webrtc/14593,https://issues.webrtc.org/issues/42224878 +https://crbug.com/webrtc/14594,https://issues.webrtc.org/issues/42224879 +https://crbug.com/webrtc/14595,https://issues.webrtc.org/issues/42224880 +https://crbug.com/webrtc/14596,https://issues.webrtc.org/issues/42224881 +https://crbug.com/webrtc/14597,https://issues.webrtc.org/issues/42224882 +https://crbug.com/webrtc/14598,https://issues.webrtc.org/issues/42224883 +https://crbug.com/webrtc/14599,https://issues.webrtc.org/issues/42224884 +https://crbug.com/webrtc/146,https://issues.webrtc.org/issues/42224885 +https://crbug.com/webrtc/1460,https://issues.webrtc.org/issues/42224886 +https://crbug.com/webrtc/14600,https://issues.webrtc.org/issues/42224887 +https://crbug.com/webrtc/14601,https://issues.webrtc.org/issues/42224888 +https://crbug.com/webrtc/14602,https://issues.webrtc.org/issues/42224889 +https://crbug.com/webrtc/14603,https://issues.webrtc.org/issues/42224890 +https://crbug.com/webrtc/14604,https://issues.webrtc.org/issues/42224891 +https://crbug.com/webrtc/14606,https://issues.webrtc.org/issues/42224892 +https://crbug.com/webrtc/14607,https://issues.webrtc.org/issues/42224893 +https://crbug.com/webrtc/14608,https://issues.webrtc.org/issues/42224894 +https://crbug.com/webrtc/14609,https://issues.webrtc.org/issues/42224895 +https://crbug.com/webrtc/1461,https://issues.webrtc.org/issues/42224896 +https://crbug.com/webrtc/14610,https://issues.webrtc.org/issues/42224897 +https://crbug.com/webrtc/14611,https://issues.webrtc.org/issues/42224898 +https://crbug.com/webrtc/14612,https://issues.webrtc.org/issues/42224899 +https://crbug.com/webrtc/14613,https://issues.webrtc.org/issues/42224900 +https://crbug.com/webrtc/14614,https://issues.webrtc.org/issues/42224901 +https://crbug.com/webrtc/14615,https://issues.webrtc.org/issues/42224902 +https://crbug.com/webrtc/14616,https://issues.webrtc.org/issues/42224903 +https://crbug.com/webrtc/14617,https://issues.webrtc.org/issues/42224904 +https://crbug.com/webrtc/14618,https://issues.webrtc.org/issues/42224905 +https://crbug.com/webrtc/14619,https://issues.webrtc.org/issues/42224906 +https://crbug.com/webrtc/1462,https://issues.webrtc.org/issues/42224907 +https://crbug.com/webrtc/14620,https://issues.webrtc.org/issues/42224908 +https://crbug.com/webrtc/14621,https://issues.webrtc.org/issues/42224909 +https://crbug.com/webrtc/14622,https://issues.webrtc.org/issues/42224910 +https://crbug.com/webrtc/14623,https://issues.webrtc.org/issues/42224911 +https://crbug.com/webrtc/14624,https://issues.webrtc.org/issues/42224912 +https://crbug.com/webrtc/14625,https://issues.webrtc.org/issues/42224913 +https://crbug.com/webrtc/14626,https://issues.webrtc.org/issues/42224914 +https://crbug.com/webrtc/14627,https://issues.webrtc.org/issues/42224915 +https://crbug.com/webrtc/14628,https://issues.webrtc.org/issues/42224916 +https://crbug.com/webrtc/14629,https://issues.webrtc.org/issues/42224917 +https://crbug.com/webrtc/1463,https://issues.webrtc.org/issues/42224918 +https://crbug.com/webrtc/14630,https://issues.webrtc.org/issues/42224919 +https://crbug.com/webrtc/14631,https://issues.webrtc.org/issues/42224920 +https://crbug.com/webrtc/14632,https://issues.webrtc.org/issues/42224921 +https://crbug.com/webrtc/14633,https://issues.webrtc.org/issues/42224922 +https://crbug.com/webrtc/14634,https://issues.webrtc.org/issues/42224923 +https://crbug.com/webrtc/14635,https://issues.webrtc.org/issues/42224924 +https://crbug.com/webrtc/14636,https://issues.webrtc.org/issues/42224925 +https://crbug.com/webrtc/14637,https://issues.webrtc.org/issues/42224926 +https://crbug.com/webrtc/14638,https://issues.webrtc.org/issues/42224927 +https://crbug.com/webrtc/14639,https://issues.webrtc.org/issues/42224928 +https://crbug.com/webrtc/1464,https://issues.webrtc.org/issues/42224929 +https://crbug.com/webrtc/14640,https://issues.webrtc.org/issues/42224930 +https://crbug.com/webrtc/14641,https://issues.webrtc.org/issues/42224931 +https://crbug.com/webrtc/14642,https://issues.webrtc.org/issues/42224932 +https://crbug.com/webrtc/14643,https://issues.webrtc.org/issues/42224933 +https://crbug.com/webrtc/14644,https://issues.webrtc.org/issues/42224934 +https://crbug.com/webrtc/14645,https://issues.webrtc.org/issues/42224935 +https://crbug.com/webrtc/14646,https://issues.webrtc.org/issues/42224936 +https://crbug.com/webrtc/14647,https://issues.webrtc.org/issues/42224937 +https://crbug.com/webrtc/14648,https://issues.webrtc.org/issues/42224938 +https://crbug.com/webrtc/14649,https://issues.webrtc.org/issues/42224939 +https://crbug.com/webrtc/1465,https://issues.webrtc.org/issues/42224940 +https://crbug.com/webrtc/14650,https://issues.webrtc.org/issues/42224941 +https://crbug.com/webrtc/14651,https://issues.webrtc.org/issues/42224942 +https://crbug.com/webrtc/14654,https://issues.webrtc.org/issues/42224943 +https://crbug.com/webrtc/14655,https://issues.webrtc.org/issues/42224944 +https://crbug.com/webrtc/14656,https://issues.webrtc.org/issues/42224945 +https://crbug.com/webrtc/14657,https://issues.webrtc.org/issues/42224946 +https://crbug.com/webrtc/14658,https://issues.webrtc.org/issues/42224947 +https://crbug.com/webrtc/14659,https://issues.webrtc.org/issues/42224948 +https://crbug.com/webrtc/1466,https://issues.webrtc.org/issues/42224949 +https://crbug.com/webrtc/14660,https://issues.webrtc.org/issues/42224950 +https://crbug.com/webrtc/14661,https://issues.webrtc.org/issues/42224951 +https://crbug.com/webrtc/14662,https://issues.webrtc.org/issues/42224952 +https://crbug.com/webrtc/14663,https://issues.webrtc.org/issues/42224953 +https://crbug.com/webrtc/14664,https://issues.webrtc.org/issues/42224954 +https://crbug.com/webrtc/14665,https://issues.webrtc.org/issues/42224955 +https://crbug.com/webrtc/14666,https://issues.webrtc.org/issues/42224956 +https://crbug.com/webrtc/14667,https://issues.webrtc.org/issues/42224957 +https://crbug.com/webrtc/14668,https://issues.webrtc.org/issues/42224958 +https://crbug.com/webrtc/14669,https://issues.webrtc.org/issues/42224959 +https://crbug.com/webrtc/1467,https://issues.webrtc.org/issues/42224960 +https://crbug.com/webrtc/14670,https://issues.webrtc.org/issues/42224961 +https://crbug.com/webrtc/14672,https://issues.webrtc.org/issues/42224962 +https://crbug.com/webrtc/14673,https://issues.webrtc.org/issues/42224963 +https://crbug.com/webrtc/14675,https://issues.webrtc.org/issues/42224964 +https://crbug.com/webrtc/14676,https://issues.webrtc.org/issues/42224965 +https://crbug.com/webrtc/14677,https://issues.webrtc.org/issues/42224966 +https://crbug.com/webrtc/14678,https://issues.webrtc.org/issues/42224967 +https://crbug.com/webrtc/14679,https://issues.webrtc.org/issues/42224968 +https://crbug.com/webrtc/1468,https://issues.webrtc.org/issues/42224969 +https://crbug.com/webrtc/14680,https://issues.webrtc.org/issues/42224970 +https://crbug.com/webrtc/14681,https://issues.webrtc.org/issues/42224971 +https://crbug.com/webrtc/14682,https://issues.webrtc.org/issues/42224972 +https://crbug.com/webrtc/14683,https://issues.webrtc.org/issues/42224973 +https://crbug.com/webrtc/14684,https://issues.webrtc.org/issues/42224974 +https://crbug.com/webrtc/14685,https://issues.webrtc.org/issues/42224975 +https://crbug.com/webrtc/14686,https://issues.webrtc.org/issues/42224976 +https://crbug.com/webrtc/14687,https://issues.webrtc.org/issues/42224977 +https://crbug.com/webrtc/14688,https://issues.webrtc.org/issues/42224978 +https://crbug.com/webrtc/14689,https://issues.webrtc.org/issues/42224979 +https://crbug.com/webrtc/1469,https://issues.webrtc.org/issues/42224980 +https://crbug.com/webrtc/14690,https://issues.webrtc.org/issues/42224981 +https://crbug.com/webrtc/14691,https://issues.webrtc.org/issues/42224982 +https://crbug.com/webrtc/14692,https://issues.webrtc.org/issues/42224983 +https://crbug.com/webrtc/14693,https://issues.webrtc.org/issues/42224984 +https://crbug.com/webrtc/14694,https://issues.webrtc.org/issues/42224985 +https://crbug.com/webrtc/14695,https://issues.webrtc.org/issues/42224986 +https://crbug.com/webrtc/14696,https://issues.webrtc.org/issues/42224987 +https://crbug.com/webrtc/14697,https://issues.webrtc.org/issues/42224988 +https://crbug.com/webrtc/14698,https://issues.webrtc.org/issues/42224989 +https://crbug.com/webrtc/14699,https://issues.webrtc.org/issues/42224990 +https://crbug.com/webrtc/1470,https://issues.webrtc.org/issues/42224991 +https://crbug.com/webrtc/14700,https://issues.webrtc.org/issues/42224992 +https://crbug.com/webrtc/14701,https://issues.webrtc.org/issues/42224993 +https://crbug.com/webrtc/14702,https://issues.webrtc.org/issues/42224994 +https://crbug.com/webrtc/14703,https://issues.webrtc.org/issues/42224995 +https://crbug.com/webrtc/14704,https://issues.webrtc.org/issues/42224996 +https://crbug.com/webrtc/14705,https://issues.webrtc.org/issues/42224997 +https://crbug.com/webrtc/14706,https://issues.webrtc.org/issues/42224998 +https://crbug.com/webrtc/14707,https://issues.webrtc.org/issues/42224999 +https://crbug.com/webrtc/14708,https://issues.webrtc.org/issues/42225000 +https://crbug.com/webrtc/14709,https://issues.webrtc.org/issues/42225001 +https://crbug.com/webrtc/1471,https://issues.webrtc.org/issues/42225002 +https://crbug.com/webrtc/14710,https://issues.webrtc.org/issues/42225003 +https://crbug.com/webrtc/14711,https://issues.webrtc.org/issues/42225004 +https://crbug.com/webrtc/14712,https://issues.webrtc.org/issues/42225005 +https://crbug.com/webrtc/14713,https://issues.webrtc.org/issues/42225006 +https://crbug.com/webrtc/14714,https://issues.webrtc.org/issues/42225007 +https://crbug.com/webrtc/14715,https://issues.webrtc.org/issues/42225008 +https://crbug.com/webrtc/14716,https://issues.webrtc.org/issues/42225009 +https://crbug.com/webrtc/14717,https://issues.webrtc.org/issues/42225010 +https://crbug.com/webrtc/14718,https://issues.webrtc.org/issues/42225011 +https://crbug.com/webrtc/14719,https://issues.webrtc.org/issues/42225012 +https://crbug.com/webrtc/1472,https://issues.webrtc.org/issues/42225013 +https://crbug.com/webrtc/14720,https://issues.webrtc.org/issues/42225014 +https://crbug.com/webrtc/14721,https://issues.webrtc.org/issues/42225015 +https://crbug.com/webrtc/14722,https://issues.webrtc.org/issues/42225016 +https://crbug.com/webrtc/14723,https://issues.webrtc.org/issues/42225017 +https://crbug.com/webrtc/14724,https://issues.webrtc.org/issues/42225018 +https://crbug.com/webrtc/14725,https://issues.webrtc.org/issues/42225019 +https://crbug.com/webrtc/14726,https://issues.webrtc.org/issues/42225020 +https://crbug.com/webrtc/14727,https://issues.webrtc.org/issues/42225021 +https://crbug.com/webrtc/14728,https://issues.webrtc.org/issues/42225022 +https://crbug.com/webrtc/14729,https://issues.webrtc.org/issues/42225023 +https://crbug.com/webrtc/1473,https://issues.webrtc.org/issues/42225024 +https://crbug.com/webrtc/14730,https://issues.webrtc.org/issues/42225025 +https://crbug.com/webrtc/14731,https://issues.webrtc.org/issues/42225026 +https://crbug.com/webrtc/14732,https://issues.webrtc.org/issues/42225027 +https://crbug.com/webrtc/14733,https://issues.webrtc.org/issues/42225028 +https://crbug.com/webrtc/14734,https://issues.webrtc.org/issues/42225029 +https://crbug.com/webrtc/14735,https://issues.webrtc.org/issues/42225030 +https://crbug.com/webrtc/14736,https://issues.webrtc.org/issues/42225031 +https://crbug.com/webrtc/14737,https://issues.webrtc.org/issues/42225032 +https://crbug.com/webrtc/14738,https://issues.webrtc.org/issues/42225033 +https://crbug.com/webrtc/14739,https://issues.webrtc.org/issues/42225034 +https://crbug.com/webrtc/1474,https://issues.webrtc.org/issues/42225035 +https://crbug.com/webrtc/14741,https://issues.webrtc.org/issues/42225036 +https://crbug.com/webrtc/14742,https://issues.webrtc.org/issues/42225037 +https://crbug.com/webrtc/14743,https://issues.webrtc.org/issues/42225038 +https://crbug.com/webrtc/14745,https://issues.webrtc.org/issues/42225039 +https://crbug.com/webrtc/14746,https://issues.webrtc.org/issues/42225040 +https://crbug.com/webrtc/14747,https://issues.webrtc.org/issues/42225041 +https://crbug.com/webrtc/14748,https://issues.webrtc.org/issues/42225042 +https://crbug.com/webrtc/14749,https://issues.webrtc.org/issues/42225043 +https://crbug.com/webrtc/1475,https://issues.webrtc.org/issues/42225044 +https://crbug.com/webrtc/14751,https://issues.webrtc.org/issues/42225045 +https://crbug.com/webrtc/14752,https://issues.webrtc.org/issues/42225046 +https://crbug.com/webrtc/14753,https://issues.webrtc.org/issues/42225047 +https://crbug.com/webrtc/14754,https://issues.webrtc.org/issues/42225048 +https://crbug.com/webrtc/14755,https://issues.webrtc.org/issues/42225049 +https://crbug.com/webrtc/14756,https://issues.webrtc.org/issues/42225050 +https://crbug.com/webrtc/14757,https://issues.webrtc.org/issues/42225051 +https://crbug.com/webrtc/14758,https://issues.webrtc.org/issues/42225052 +https://crbug.com/webrtc/14759,https://issues.webrtc.org/issues/42225053 +https://crbug.com/webrtc/1476,https://issues.webrtc.org/issues/42225054 +https://crbug.com/webrtc/14760,https://issues.webrtc.org/issues/42225055 +https://crbug.com/webrtc/14761,https://issues.webrtc.org/issues/42225056 +https://crbug.com/webrtc/14762,https://issues.webrtc.org/issues/42225057 +https://crbug.com/webrtc/14763,https://issues.webrtc.org/issues/42225058 +https://crbug.com/webrtc/14764,https://issues.webrtc.org/issues/42225059 +https://crbug.com/webrtc/14765,https://issues.webrtc.org/issues/42225060 +https://crbug.com/webrtc/14766,https://issues.webrtc.org/issues/42225061 +https://crbug.com/webrtc/14767,https://issues.webrtc.org/issues/42225062 +https://crbug.com/webrtc/14768,https://issues.webrtc.org/issues/42225063 +https://crbug.com/webrtc/14769,https://issues.webrtc.org/issues/42225064 +https://crbug.com/webrtc/1477,https://issues.webrtc.org/issues/42225065 +https://crbug.com/webrtc/14770,https://issues.webrtc.org/issues/42225066 +https://crbug.com/webrtc/14771,https://issues.webrtc.org/issues/42225067 +https://crbug.com/webrtc/14772,https://issues.webrtc.org/issues/42225068 +https://crbug.com/webrtc/14774,https://issues.webrtc.org/issues/42225069 +https://crbug.com/webrtc/14775,https://issues.webrtc.org/issues/42225070 +https://crbug.com/webrtc/14776,https://issues.webrtc.org/issues/42225071 +https://crbug.com/webrtc/14777,https://issues.webrtc.org/issues/42225072 +https://crbug.com/webrtc/14778,https://issues.webrtc.org/issues/42225073 +https://crbug.com/webrtc/14779,https://issues.webrtc.org/issues/42225074 +https://crbug.com/webrtc/1478,https://issues.webrtc.org/issues/42225075 +https://crbug.com/webrtc/14780,https://issues.webrtc.org/issues/42225076 +https://crbug.com/webrtc/14781,https://issues.webrtc.org/issues/42225077 +https://crbug.com/webrtc/14783,https://issues.webrtc.org/issues/42225078 +https://crbug.com/webrtc/14784,https://issues.webrtc.org/issues/42225079 +https://crbug.com/webrtc/14785,https://issues.webrtc.org/issues/42225080 +https://crbug.com/webrtc/14786,https://issues.webrtc.org/issues/42225081 +https://crbug.com/webrtc/14787,https://issues.webrtc.org/issues/42225082 +https://crbug.com/webrtc/14788,https://issues.webrtc.org/issues/42225083 +https://crbug.com/webrtc/14789,https://issues.webrtc.org/issues/42225084 +https://crbug.com/webrtc/1479,https://issues.webrtc.org/issues/42225085 +https://crbug.com/webrtc/14790,https://issues.webrtc.org/issues/42225086 +https://crbug.com/webrtc/14791,https://issues.webrtc.org/issues/42225087 +https://crbug.com/webrtc/14792,https://issues.webrtc.org/issues/42225088 +https://crbug.com/webrtc/14793,https://issues.webrtc.org/issues/42225089 +https://crbug.com/webrtc/14794,https://issues.webrtc.org/issues/42225090 +https://crbug.com/webrtc/14795,https://issues.webrtc.org/issues/42225091 +https://crbug.com/webrtc/14796,https://issues.webrtc.org/issues/42225092 +https://crbug.com/webrtc/14797,https://issues.webrtc.org/issues/42225093 +https://crbug.com/webrtc/14798,https://issues.webrtc.org/issues/42225094 +https://crbug.com/webrtc/14799,https://issues.webrtc.org/issues/42225095 +https://crbug.com/webrtc/148,https://issues.webrtc.org/issues/42225096 +https://crbug.com/webrtc/1480,https://issues.webrtc.org/issues/42225097 +https://crbug.com/webrtc/14800,https://issues.webrtc.org/issues/42225098 +https://crbug.com/webrtc/14801,https://issues.webrtc.org/issues/42225099 +https://crbug.com/webrtc/14802,https://issues.webrtc.org/issues/42225100 +https://crbug.com/webrtc/14803,https://issues.webrtc.org/issues/42225101 +https://crbug.com/webrtc/14804,https://issues.webrtc.org/issues/42225102 +https://crbug.com/webrtc/14805,https://issues.webrtc.org/issues/42225103 +https://crbug.com/webrtc/14806,https://issues.webrtc.org/issues/42225104 +https://crbug.com/webrtc/14807,https://issues.webrtc.org/issues/42225105 +https://crbug.com/webrtc/14808,https://issues.webrtc.org/issues/42225106 +https://crbug.com/webrtc/14809,https://issues.webrtc.org/issues/42225107 +https://crbug.com/webrtc/1481,https://issues.webrtc.org/issues/42225108 +https://crbug.com/webrtc/14810,https://issues.webrtc.org/issues/42225109 +https://crbug.com/webrtc/14811,https://issues.webrtc.org/issues/42225110 +https://crbug.com/webrtc/14812,https://issues.webrtc.org/issues/42225111 +https://crbug.com/webrtc/14813,https://issues.webrtc.org/issues/42225112 +https://crbug.com/webrtc/14814,https://issues.webrtc.org/issues/42225113 +https://crbug.com/webrtc/14815,https://issues.webrtc.org/issues/42225114 +https://crbug.com/webrtc/14816,https://issues.webrtc.org/issues/42225115 +https://crbug.com/webrtc/14817,https://issues.webrtc.org/issues/42225116 +https://crbug.com/webrtc/14819,https://issues.webrtc.org/issues/42225117 +https://crbug.com/webrtc/1482,https://issues.webrtc.org/issues/42225118 +https://crbug.com/webrtc/14820,https://issues.webrtc.org/issues/42225119 +https://crbug.com/webrtc/14821,https://issues.webrtc.org/issues/42225120 +https://crbug.com/webrtc/14822,https://issues.webrtc.org/issues/42225121 +https://crbug.com/webrtc/14823,https://issues.webrtc.org/issues/42225122 +https://crbug.com/webrtc/14824,https://issues.webrtc.org/issues/42225123 +https://crbug.com/webrtc/14825,https://issues.webrtc.org/issues/42225124 +https://crbug.com/webrtc/14826,https://issues.webrtc.org/issues/42225125 +https://crbug.com/webrtc/14828,https://issues.webrtc.org/issues/42225126 +https://crbug.com/webrtc/14829,https://issues.webrtc.org/issues/42225127 +https://crbug.com/webrtc/1483,https://issues.webrtc.org/issues/42225128 +https://crbug.com/webrtc/14830,https://issues.webrtc.org/issues/42225129 +https://crbug.com/webrtc/14831,https://issues.webrtc.org/issues/42225130 +https://crbug.com/webrtc/14832,https://issues.webrtc.org/issues/42225131 +https://crbug.com/webrtc/14833,https://issues.webrtc.org/issues/42225132 +https://crbug.com/webrtc/14834,https://issues.webrtc.org/issues/42225133 +https://crbug.com/webrtc/14835,https://issues.webrtc.org/issues/42225134 +https://crbug.com/webrtc/14836,https://issues.webrtc.org/issues/42225135 +https://crbug.com/webrtc/14837,https://issues.webrtc.org/issues/42225136 +https://crbug.com/webrtc/14838,https://issues.webrtc.org/issues/42225137 +https://crbug.com/webrtc/14839,https://issues.webrtc.org/issues/42225138 +https://crbug.com/webrtc/1484,https://issues.webrtc.org/issues/42225139 +https://crbug.com/webrtc/14840,https://issues.webrtc.org/issues/42225140 +https://crbug.com/webrtc/14841,https://issues.webrtc.org/issues/42225141 +https://crbug.com/webrtc/14842,https://issues.webrtc.org/issues/42225142 +https://crbug.com/webrtc/14843,https://issues.webrtc.org/issues/42225143 +https://crbug.com/webrtc/14844,https://issues.webrtc.org/issues/42225144 +https://crbug.com/webrtc/14845,https://issues.webrtc.org/issues/42225145 +https://crbug.com/webrtc/14846,https://issues.webrtc.org/issues/42225146 +https://crbug.com/webrtc/14849,https://issues.webrtc.org/issues/42225147 +https://crbug.com/webrtc/1485,https://issues.webrtc.org/issues/42225148 +https://crbug.com/webrtc/14850,https://issues.webrtc.org/issues/42225149 +https://crbug.com/webrtc/14851,https://issues.webrtc.org/issues/42225150 +https://crbug.com/webrtc/14852,https://issues.webrtc.org/issues/42225151 +https://crbug.com/webrtc/14853,https://issues.webrtc.org/issues/42225152 +https://crbug.com/webrtc/14854,https://issues.webrtc.org/issues/42225153 +https://crbug.com/webrtc/14855,https://issues.webrtc.org/issues/42225154 +https://crbug.com/webrtc/14856,https://issues.webrtc.org/issues/42225155 +https://crbug.com/webrtc/14857,https://issues.webrtc.org/issues/42225156 +https://crbug.com/webrtc/14858,https://issues.webrtc.org/issues/42225157 +https://crbug.com/webrtc/14859,https://issues.webrtc.org/issues/42225158 +https://crbug.com/webrtc/1486,https://issues.webrtc.org/issues/42225159 +https://crbug.com/webrtc/14860,https://issues.webrtc.org/issues/42225160 +https://crbug.com/webrtc/14861,https://issues.webrtc.org/issues/42225161 +https://crbug.com/webrtc/14862,https://issues.webrtc.org/issues/42225162 +https://crbug.com/webrtc/14863,https://issues.webrtc.org/issues/42225163 +https://crbug.com/webrtc/14864,https://issues.webrtc.org/issues/42225164 +https://crbug.com/webrtc/14865,https://issues.webrtc.org/issues/42225165 +https://crbug.com/webrtc/14866,https://issues.webrtc.org/issues/42225166 +https://crbug.com/webrtc/14867,https://issues.webrtc.org/issues/42225167 +https://crbug.com/webrtc/14869,https://issues.webrtc.org/issues/42225168 +https://crbug.com/webrtc/1487,https://issues.webrtc.org/issues/42225169 +https://crbug.com/webrtc/14870,https://issues.webrtc.org/issues/42225170 +https://crbug.com/webrtc/14871,https://issues.webrtc.org/issues/42225171 +https://crbug.com/webrtc/14872,https://issues.webrtc.org/issues/42225172 +https://crbug.com/webrtc/14873,https://issues.webrtc.org/issues/42225173 +https://crbug.com/webrtc/14874,https://issues.webrtc.org/issues/42225174 +https://crbug.com/webrtc/14875,https://issues.webrtc.org/issues/42225175 +https://crbug.com/webrtc/14876,https://issues.webrtc.org/issues/42225176 +https://crbug.com/webrtc/14877,https://issues.webrtc.org/issues/42225177 +https://crbug.com/webrtc/14878,https://issues.webrtc.org/issues/42225178 +https://crbug.com/webrtc/1488,https://issues.webrtc.org/issues/42225179 +https://crbug.com/webrtc/14881,https://issues.webrtc.org/issues/42225180 +https://crbug.com/webrtc/14882,https://issues.webrtc.org/issues/42225181 +https://crbug.com/webrtc/14883,https://issues.webrtc.org/issues/42225182 +https://crbug.com/webrtc/14884,https://issues.webrtc.org/issues/42225183 +https://crbug.com/webrtc/14885,https://issues.webrtc.org/issues/42225184 +https://crbug.com/webrtc/14886,https://issues.webrtc.org/issues/42225185 +https://crbug.com/webrtc/14887,https://issues.webrtc.org/issues/42225186 +https://crbug.com/webrtc/14888,https://issues.webrtc.org/issues/42225187 +https://crbug.com/webrtc/14889,https://issues.webrtc.org/issues/42225188 +https://crbug.com/webrtc/1489,https://issues.webrtc.org/issues/42225189 +https://crbug.com/webrtc/14890,https://issues.webrtc.org/issues/42225190 +https://crbug.com/webrtc/14891,https://issues.webrtc.org/issues/42225191 +https://crbug.com/webrtc/14892,https://issues.webrtc.org/issues/42225192 +https://crbug.com/webrtc/14893,https://issues.webrtc.org/issues/42225193 +https://crbug.com/webrtc/14894,https://issues.webrtc.org/issues/42225194 +https://crbug.com/webrtc/14895,https://issues.webrtc.org/issues/42225195 +https://crbug.com/webrtc/14897,https://issues.webrtc.org/issues/42225196 +https://crbug.com/webrtc/14898,https://issues.webrtc.org/issues/42225197 +https://crbug.com/webrtc/14899,https://issues.webrtc.org/issues/42225198 +https://crbug.com/webrtc/149,https://issues.webrtc.org/issues/42225199 +https://crbug.com/webrtc/1490,https://issues.webrtc.org/issues/42225200 +https://crbug.com/webrtc/14900,https://issues.webrtc.org/issues/42225201 +https://crbug.com/webrtc/14901,https://issues.webrtc.org/issues/42225202 +https://crbug.com/webrtc/14902,https://issues.webrtc.org/issues/42225203 +https://crbug.com/webrtc/14903,https://issues.webrtc.org/issues/42225204 +https://crbug.com/webrtc/14904,https://issues.webrtc.org/issues/42225205 +https://crbug.com/webrtc/14905,https://issues.webrtc.org/issues/42225206 +https://crbug.com/webrtc/14906,https://issues.webrtc.org/issues/42225207 +https://crbug.com/webrtc/14907,https://issues.webrtc.org/issues/42225208 +https://crbug.com/webrtc/14908,https://issues.webrtc.org/issues/42225209 +https://crbug.com/webrtc/14909,https://issues.webrtc.org/issues/42225210 +https://crbug.com/webrtc/1491,https://issues.webrtc.org/issues/42225211 +https://crbug.com/webrtc/14910,https://issues.webrtc.org/issues/42225212 +https://crbug.com/webrtc/14911,https://issues.webrtc.org/issues/42225213 +https://crbug.com/webrtc/14912,https://issues.webrtc.org/issues/42225214 +https://crbug.com/webrtc/14913,https://issues.webrtc.org/issues/42225215 +https://crbug.com/webrtc/14914,https://issues.webrtc.org/issues/42225216 +https://crbug.com/webrtc/14915,https://issues.webrtc.org/issues/42225217 +https://crbug.com/webrtc/14916,https://issues.webrtc.org/issues/42225218 +https://crbug.com/webrtc/14917,https://issues.webrtc.org/issues/42225219 +https://crbug.com/webrtc/14918,https://issues.webrtc.org/issues/42225220 +https://crbug.com/webrtc/14919,https://issues.webrtc.org/issues/42225221 +https://crbug.com/webrtc/1492,https://issues.webrtc.org/issues/42225222 +https://crbug.com/webrtc/14920,https://issues.webrtc.org/issues/42225223 +https://crbug.com/webrtc/14921,https://issues.webrtc.org/issues/42225224 +https://crbug.com/webrtc/14922,https://issues.webrtc.org/issues/42225225 +https://crbug.com/webrtc/14923,https://issues.webrtc.org/issues/42225226 +https://crbug.com/webrtc/14924,https://issues.webrtc.org/issues/42225227 +https://crbug.com/webrtc/14925,https://issues.webrtc.org/issues/42225228 +https://crbug.com/webrtc/14926,https://issues.webrtc.org/issues/42225229 +https://crbug.com/webrtc/14927,https://issues.webrtc.org/issues/42225230 +https://crbug.com/webrtc/14928,https://issues.webrtc.org/issues/42225231 +https://crbug.com/webrtc/1493,https://issues.webrtc.org/issues/42225232 +https://crbug.com/webrtc/14930,https://issues.webrtc.org/issues/42225233 +https://crbug.com/webrtc/14931,https://issues.webrtc.org/issues/42225234 +https://crbug.com/webrtc/14932,https://issues.webrtc.org/issues/42225235 +https://crbug.com/webrtc/14933,https://issues.webrtc.org/issues/42225236 +https://crbug.com/webrtc/14934,https://issues.webrtc.org/issues/42225237 +https://crbug.com/webrtc/14935,https://issues.webrtc.org/issues/42225238 +https://crbug.com/webrtc/14936,https://issues.webrtc.org/issues/42225239 +https://crbug.com/webrtc/14937,https://issues.webrtc.org/issues/42225240 +https://crbug.com/webrtc/14938,https://issues.webrtc.org/issues/42225241 +https://crbug.com/webrtc/14939,https://issues.webrtc.org/issues/42225242 +https://crbug.com/webrtc/1494,https://issues.webrtc.org/issues/42225243 +https://crbug.com/webrtc/14940,https://issues.webrtc.org/issues/42225244 +https://crbug.com/webrtc/14941,https://issues.webrtc.org/issues/42225245 +https://crbug.com/webrtc/14942,https://issues.webrtc.org/issues/42225246 +https://crbug.com/webrtc/14943,https://issues.webrtc.org/issues/42225247 +https://crbug.com/webrtc/14944,https://issues.webrtc.org/issues/42225248 +https://crbug.com/webrtc/14945,https://issues.webrtc.org/issues/42225249 +https://crbug.com/webrtc/14947,https://issues.webrtc.org/issues/42225250 +https://crbug.com/webrtc/14948,https://issues.webrtc.org/issues/42225251 +https://crbug.com/webrtc/14949,https://issues.webrtc.org/issues/42225252 +https://crbug.com/webrtc/1495,https://issues.webrtc.org/issues/42225253 +https://crbug.com/webrtc/14950,https://issues.webrtc.org/issues/42225254 +https://crbug.com/webrtc/14951,https://issues.webrtc.org/issues/42225255 +https://crbug.com/webrtc/14952,https://issues.webrtc.org/issues/42225256 +https://crbug.com/webrtc/14953,https://issues.webrtc.org/issues/42225257 +https://crbug.com/webrtc/14954,https://issues.webrtc.org/issues/42225258 +https://crbug.com/webrtc/14955,https://issues.webrtc.org/issues/42225259 +https://crbug.com/webrtc/14956,https://issues.webrtc.org/issues/42225260 +https://crbug.com/webrtc/14958,https://issues.webrtc.org/issues/42225261 +https://crbug.com/webrtc/14959,https://issues.webrtc.org/issues/42225262 +https://crbug.com/webrtc/1496,https://issues.webrtc.org/issues/42225263 +https://crbug.com/webrtc/14960,https://issues.webrtc.org/issues/42225264 +https://crbug.com/webrtc/14961,https://issues.webrtc.org/issues/42225265 +https://crbug.com/webrtc/14962,https://issues.webrtc.org/issues/42225266 +https://crbug.com/webrtc/14963,https://issues.webrtc.org/issues/42225267 +https://crbug.com/webrtc/14964,https://issues.webrtc.org/issues/42225268 +https://crbug.com/webrtc/14965,https://issues.webrtc.org/issues/42225269 +https://crbug.com/webrtc/14966,https://issues.webrtc.org/issues/42225270 +https://crbug.com/webrtc/14967,https://issues.webrtc.org/issues/42225271 +https://crbug.com/webrtc/14968,https://issues.webrtc.org/issues/42225272 +https://crbug.com/webrtc/14969,https://issues.webrtc.org/issues/42225273 +https://crbug.com/webrtc/1497,https://issues.webrtc.org/issues/42225274 +https://crbug.com/webrtc/14970,https://issues.webrtc.org/issues/42225275 +https://crbug.com/webrtc/14971,https://issues.webrtc.org/issues/42225276 +https://crbug.com/webrtc/14972,https://issues.webrtc.org/issues/42225277 +https://crbug.com/webrtc/14973,https://issues.webrtc.org/issues/42225278 +https://crbug.com/webrtc/14974,https://issues.webrtc.org/issues/42225279 +https://crbug.com/webrtc/14975,https://issues.webrtc.org/issues/42225280 +https://crbug.com/webrtc/14976,https://issues.webrtc.org/issues/42225281 +https://crbug.com/webrtc/14977,https://issues.webrtc.org/issues/42225282 +https://crbug.com/webrtc/14978,https://issues.webrtc.org/issues/42225283 +https://crbug.com/webrtc/14979,https://issues.webrtc.org/issues/42225284 +https://crbug.com/webrtc/14980,https://issues.webrtc.org/issues/42225285 +https://crbug.com/webrtc/14981,https://issues.webrtc.org/issues/42225286 +https://crbug.com/webrtc/14982,https://issues.webrtc.org/issues/42225287 +https://crbug.com/webrtc/14983,https://issues.webrtc.org/issues/42225288 +https://crbug.com/webrtc/14984,https://issues.webrtc.org/issues/42225289 +https://crbug.com/webrtc/14985,https://issues.webrtc.org/issues/42225290 +https://crbug.com/webrtc/14986,https://issues.webrtc.org/issues/42225291 +https://crbug.com/webrtc/14987,https://issues.webrtc.org/issues/42225292 +https://crbug.com/webrtc/14988,https://issues.webrtc.org/issues/42225293 +https://crbug.com/webrtc/14989,https://issues.webrtc.org/issues/42225294 +https://crbug.com/webrtc/1499,https://issues.webrtc.org/issues/42225295 +https://crbug.com/webrtc/14990,https://issues.webrtc.org/issues/42225296 +https://crbug.com/webrtc/14991,https://issues.webrtc.org/issues/42225297 +https://crbug.com/webrtc/14992,https://issues.webrtc.org/issues/42225298 +https://crbug.com/webrtc/14993,https://issues.webrtc.org/issues/42225299 +https://crbug.com/webrtc/14994,https://issues.webrtc.org/issues/42225300 +https://crbug.com/webrtc/14995,https://issues.webrtc.org/issues/42225301 +https://crbug.com/webrtc/14996,https://issues.webrtc.org/issues/42225302 +https://crbug.com/webrtc/14997,https://issues.webrtc.org/issues/42225303 +https://crbug.com/webrtc/14998,https://issues.webrtc.org/issues/42225304 +https://crbug.com/webrtc/14999,https://issues.webrtc.org/issues/42225305 +https://crbug.com/webrtc/15,https://issues.webrtc.org/issues/42225306 +https://crbug.com/webrtc/150,https://issues.webrtc.org/issues/42225307 +https://crbug.com/webrtc/1500,https://issues.webrtc.org/issues/42225308 +https://crbug.com/webrtc/15000,https://issues.webrtc.org/issues/42225309 +https://crbug.com/webrtc/15001,https://issues.webrtc.org/issues/42225310 +https://crbug.com/webrtc/15002,https://issues.webrtc.org/issues/42225311 +https://crbug.com/webrtc/15003,https://issues.webrtc.org/issues/42225312 +https://crbug.com/webrtc/15004,https://issues.webrtc.org/issues/42225313 +https://crbug.com/webrtc/15005,https://issues.webrtc.org/issues/42225314 +https://crbug.com/webrtc/15006,https://issues.webrtc.org/issues/42225315 +https://crbug.com/webrtc/15007,https://issues.webrtc.org/issues/42225316 +https://crbug.com/webrtc/15008,https://issues.webrtc.org/issues/42225317 +https://crbug.com/webrtc/15009,https://issues.webrtc.org/issues/42225318 +https://crbug.com/webrtc/1501,https://issues.webrtc.org/issues/42225319 +https://crbug.com/webrtc/15010,https://issues.webrtc.org/issues/42225320 +https://crbug.com/webrtc/15011,https://issues.webrtc.org/issues/42225321 +https://crbug.com/webrtc/15012,https://issues.webrtc.org/issues/42225322 +https://crbug.com/webrtc/15013,https://issues.webrtc.org/issues/42225323 +https://crbug.com/webrtc/15014,https://issues.webrtc.org/issues/42225324 +https://crbug.com/webrtc/15015,https://issues.webrtc.org/issues/42225325 +https://crbug.com/webrtc/15016,https://issues.webrtc.org/issues/42225326 +https://crbug.com/webrtc/15017,https://issues.webrtc.org/issues/42225327 +https://crbug.com/webrtc/15018,https://issues.webrtc.org/issues/42225328 +https://crbug.com/webrtc/15019,https://issues.webrtc.org/issues/42225329 +https://crbug.com/webrtc/1502,https://issues.webrtc.org/issues/42225330 +https://crbug.com/webrtc/15020,https://issues.webrtc.org/issues/42225331 +https://crbug.com/webrtc/15021,https://issues.webrtc.org/issues/42225332 +https://crbug.com/webrtc/15022,https://issues.webrtc.org/issues/42225333 +https://crbug.com/webrtc/15023,https://issues.webrtc.org/issues/42225334 +https://crbug.com/webrtc/15024,https://issues.webrtc.org/issues/42225335 +https://crbug.com/webrtc/15025,https://issues.webrtc.org/issues/42225336 +https://crbug.com/webrtc/15026,https://issues.webrtc.org/issues/42225337 +https://crbug.com/webrtc/15027,https://issues.webrtc.org/issues/42225338 +https://crbug.com/webrtc/15028,https://issues.webrtc.org/issues/42225339 +https://crbug.com/webrtc/15029,https://issues.webrtc.org/issues/42225340 +https://crbug.com/webrtc/1503,https://issues.webrtc.org/issues/42225341 +https://crbug.com/webrtc/15030,https://issues.webrtc.org/issues/42225342 +https://crbug.com/webrtc/15031,https://issues.webrtc.org/issues/42225343 +https://crbug.com/webrtc/15032,https://issues.webrtc.org/issues/42225344 +https://crbug.com/webrtc/15033,https://issues.webrtc.org/issues/42225345 +https://crbug.com/webrtc/15035,https://issues.webrtc.org/issues/42225346 +https://crbug.com/webrtc/15036,https://issues.webrtc.org/issues/42225347 +https://crbug.com/webrtc/15037,https://issues.webrtc.org/issues/42225348 +https://crbug.com/webrtc/15038,https://issues.webrtc.org/issues/42225349 +https://crbug.com/webrtc/15039,https://issues.webrtc.org/issues/42225350 +https://crbug.com/webrtc/1504,https://issues.webrtc.org/issues/42225351 +https://crbug.com/webrtc/15040,https://issues.webrtc.org/issues/42225352 +https://crbug.com/webrtc/15042,https://issues.webrtc.org/issues/42225353 +https://crbug.com/webrtc/15043,https://issues.webrtc.org/issues/42225354 +https://crbug.com/webrtc/15044,https://issues.webrtc.org/issues/42225355 +https://crbug.com/webrtc/15045,https://issues.webrtc.org/issues/42225356 +https://crbug.com/webrtc/15046,https://issues.webrtc.org/issues/42225357 +https://crbug.com/webrtc/15047,https://issues.webrtc.org/issues/42225358 +https://crbug.com/webrtc/15048,https://issues.webrtc.org/issues/42225359 +https://crbug.com/webrtc/15049,https://issues.webrtc.org/issues/42225360 +https://crbug.com/webrtc/1505,https://issues.webrtc.org/issues/42225361 +https://crbug.com/webrtc/15050,https://issues.webrtc.org/issues/42225362 +https://crbug.com/webrtc/15051,https://issues.webrtc.org/issues/42225363 +https://crbug.com/webrtc/15052,https://issues.webrtc.org/issues/42225364 +https://crbug.com/webrtc/15053,https://issues.webrtc.org/issues/42225365 +https://crbug.com/webrtc/15054,https://issues.webrtc.org/issues/42225366 +https://crbug.com/webrtc/15056,https://issues.webrtc.org/issues/42225367 +https://crbug.com/webrtc/15057,https://issues.webrtc.org/issues/42225368 +https://crbug.com/webrtc/15058,https://issues.webrtc.org/issues/42225369 +https://crbug.com/webrtc/15059,https://issues.webrtc.org/issues/42225370 +https://crbug.com/webrtc/1506,https://issues.webrtc.org/issues/42225371 +https://crbug.com/webrtc/15061,https://issues.webrtc.org/issues/42225372 +https://crbug.com/webrtc/15062,https://issues.webrtc.org/issues/42225373 +https://crbug.com/webrtc/15063,https://issues.webrtc.org/issues/42225374 +https://crbug.com/webrtc/15064,https://issues.webrtc.org/issues/42225375 +https://crbug.com/webrtc/15066,https://issues.webrtc.org/issues/42225376 +https://crbug.com/webrtc/15067,https://issues.webrtc.org/issues/42225377 +https://crbug.com/webrtc/15068,https://issues.webrtc.org/issues/42225378 +https://crbug.com/webrtc/15069,https://issues.webrtc.org/issues/42225379 +https://crbug.com/webrtc/1507,https://issues.webrtc.org/issues/42225380 +https://crbug.com/webrtc/15070,https://issues.webrtc.org/issues/42225381 +https://crbug.com/webrtc/15071,https://issues.webrtc.org/issues/42225382 +https://crbug.com/webrtc/15074,https://issues.webrtc.org/issues/42225383 +https://crbug.com/webrtc/15075,https://issues.webrtc.org/issues/42225384 +https://crbug.com/webrtc/15076,https://issues.webrtc.org/issues/42225385 +https://crbug.com/webrtc/15077,https://issues.webrtc.org/issues/42225386 +https://crbug.com/webrtc/15078,https://issues.webrtc.org/issues/42225387 +https://crbug.com/webrtc/15079,https://issues.webrtc.org/issues/42225388 +https://crbug.com/webrtc/1508,https://issues.webrtc.org/issues/42225389 +https://crbug.com/webrtc/15080,https://issues.webrtc.org/issues/42225390 +https://crbug.com/webrtc/15081,https://issues.webrtc.org/issues/42225391 +https://crbug.com/webrtc/15082,https://issues.webrtc.org/issues/42225392 +https://crbug.com/webrtc/15083,https://issues.webrtc.org/issues/42225393 +https://crbug.com/webrtc/15084,https://issues.webrtc.org/issues/42225394 +https://crbug.com/webrtc/15085,https://issues.webrtc.org/issues/42225395 +https://crbug.com/webrtc/15086,https://issues.webrtc.org/issues/42225396 +https://crbug.com/webrtc/15087,https://issues.webrtc.org/issues/42225397 +https://crbug.com/webrtc/15088,https://issues.webrtc.org/issues/42225398 +https://crbug.com/webrtc/15089,https://issues.webrtc.org/issues/42225399 +https://crbug.com/webrtc/1509,https://issues.webrtc.org/issues/42225400 +https://crbug.com/webrtc/15090,https://issues.webrtc.org/issues/42225401 +https://crbug.com/webrtc/15091,https://issues.webrtc.org/issues/42225402 +https://crbug.com/webrtc/15092,https://issues.webrtc.org/issues/42225403 +https://crbug.com/webrtc/15093,https://issues.webrtc.org/issues/42225404 +https://crbug.com/webrtc/15094,https://issues.webrtc.org/issues/42225405 +https://crbug.com/webrtc/15095,https://issues.webrtc.org/issues/42225406 +https://crbug.com/webrtc/15096,https://issues.webrtc.org/issues/42225407 +https://crbug.com/webrtc/15097,https://issues.webrtc.org/issues/42225408 +https://crbug.com/webrtc/15098,https://issues.webrtc.org/issues/42225409 +https://crbug.com/webrtc/15099,https://issues.webrtc.org/issues/42225410 +https://crbug.com/webrtc/151,https://issues.webrtc.org/issues/42225411 +https://crbug.com/webrtc/1510,https://issues.webrtc.org/issues/42225412 +https://crbug.com/webrtc/15100,https://issues.webrtc.org/issues/42225413 +https://crbug.com/webrtc/15101,https://issues.webrtc.org/issues/42225414 +https://crbug.com/webrtc/15102,https://issues.webrtc.org/issues/42225415 +https://crbug.com/webrtc/15103,https://issues.webrtc.org/issues/42225416 +https://crbug.com/webrtc/15104,https://issues.webrtc.org/issues/42225417 +https://crbug.com/webrtc/15105,https://issues.webrtc.org/issues/42225418 +https://crbug.com/webrtc/15106,https://issues.webrtc.org/issues/42225419 +https://crbug.com/webrtc/15107,https://issues.webrtc.org/issues/42225420 +https://crbug.com/webrtc/15108,https://issues.webrtc.org/issues/42225421 +https://crbug.com/webrtc/15109,https://issues.webrtc.org/issues/42225422 +https://crbug.com/webrtc/1511,https://issues.webrtc.org/issues/42225423 +https://crbug.com/webrtc/15110,https://issues.webrtc.org/issues/42225424 +https://crbug.com/webrtc/15111,https://issues.webrtc.org/issues/42225425 +https://crbug.com/webrtc/15112,https://issues.webrtc.org/issues/42225426 +https://crbug.com/webrtc/15113,https://issues.webrtc.org/issues/42225427 +https://crbug.com/webrtc/15114,https://issues.webrtc.org/issues/42225428 +https://crbug.com/webrtc/15115,https://issues.webrtc.org/issues/42225429 +https://crbug.com/webrtc/15116,https://issues.webrtc.org/issues/42225430 +https://crbug.com/webrtc/15117,https://issues.webrtc.org/issues/42225431 +https://crbug.com/webrtc/15118,https://issues.webrtc.org/issues/42225432 +https://crbug.com/webrtc/15119,https://issues.webrtc.org/issues/42225433 +https://crbug.com/webrtc/1512,https://issues.webrtc.org/issues/42225434 +https://crbug.com/webrtc/15120,https://issues.webrtc.org/issues/42225435 +https://crbug.com/webrtc/15121,https://issues.webrtc.org/issues/42225436 +https://crbug.com/webrtc/15122,https://issues.webrtc.org/issues/42225437 +https://crbug.com/webrtc/15123,https://issues.webrtc.org/issues/42225438 +https://crbug.com/webrtc/15125,https://issues.webrtc.org/issues/42225439 +https://crbug.com/webrtc/15126,https://issues.webrtc.org/issues/42225440 +https://crbug.com/webrtc/15127,https://issues.webrtc.org/issues/42225441 +https://crbug.com/webrtc/15129,https://issues.webrtc.org/issues/42225442 +https://crbug.com/webrtc/1513,https://issues.webrtc.org/issues/42225443 +https://crbug.com/webrtc/15130,https://issues.webrtc.org/issues/42225444 +https://crbug.com/webrtc/15131,https://issues.webrtc.org/issues/42225445 +https://crbug.com/webrtc/15132,https://issues.webrtc.org/issues/42225446 +https://crbug.com/webrtc/15133,https://issues.webrtc.org/issues/42225447 +https://crbug.com/webrtc/15134,https://issues.webrtc.org/issues/42225448 +https://crbug.com/webrtc/15135,https://issues.webrtc.org/issues/42225449 +https://crbug.com/webrtc/15136,https://issues.webrtc.org/issues/42225450 +https://crbug.com/webrtc/15137,https://issues.webrtc.org/issues/42225451 +https://crbug.com/webrtc/15138,https://issues.webrtc.org/issues/42225452 +https://crbug.com/webrtc/15139,https://issues.webrtc.org/issues/42225453 +https://crbug.com/webrtc/1514,https://issues.webrtc.org/issues/42225454 +https://crbug.com/webrtc/15140,https://issues.webrtc.org/issues/42225455 +https://crbug.com/webrtc/15141,https://issues.webrtc.org/issues/42225456 +https://crbug.com/webrtc/15142,https://issues.webrtc.org/issues/42225457 +https://crbug.com/webrtc/15143,https://issues.webrtc.org/issues/42225458 +https://crbug.com/webrtc/15144,https://issues.webrtc.org/issues/42225459 +https://crbug.com/webrtc/15145,https://issues.webrtc.org/issues/42225460 +https://crbug.com/webrtc/15146,https://issues.webrtc.org/issues/42225461 +https://crbug.com/webrtc/15148,https://issues.webrtc.org/issues/42225462 +https://crbug.com/webrtc/15149,https://issues.webrtc.org/issues/42225463 +https://crbug.com/webrtc/1515,https://issues.webrtc.org/issues/42225464 +https://crbug.com/webrtc/15150,https://issues.webrtc.org/issues/42225465 +https://crbug.com/webrtc/15151,https://issues.webrtc.org/issues/42225466 +https://crbug.com/webrtc/15152,https://issues.webrtc.org/issues/42225467 +https://crbug.com/webrtc/15153,https://issues.webrtc.org/issues/42225468 +https://crbug.com/webrtc/15154,https://issues.webrtc.org/issues/42225469 +https://crbug.com/webrtc/15155,https://issues.webrtc.org/issues/42225470 +https://crbug.com/webrtc/15156,https://issues.webrtc.org/issues/42225471 +https://crbug.com/webrtc/15157,https://issues.webrtc.org/issues/42225472 +https://crbug.com/webrtc/15158,https://issues.webrtc.org/issues/42225473 +https://crbug.com/webrtc/15159,https://issues.webrtc.org/issues/42225474 +https://crbug.com/webrtc/1516,https://issues.webrtc.org/issues/42225475 +https://crbug.com/webrtc/15160,https://issues.webrtc.org/issues/42225476 +https://crbug.com/webrtc/15161,https://issues.webrtc.org/issues/42225477 +https://crbug.com/webrtc/15162,https://issues.webrtc.org/issues/42225478 +https://crbug.com/webrtc/15163,https://issues.webrtc.org/issues/42225479 +https://crbug.com/webrtc/15164,https://issues.webrtc.org/issues/42225480 +https://crbug.com/webrtc/15165,https://issues.webrtc.org/issues/42225481 +https://crbug.com/webrtc/15166,https://issues.webrtc.org/issues/42225482 +https://crbug.com/webrtc/15167,https://issues.webrtc.org/issues/42225483 +https://crbug.com/webrtc/15168,https://issues.webrtc.org/issues/42225484 +https://crbug.com/webrtc/15169,https://issues.webrtc.org/issues/42225485 +https://crbug.com/webrtc/1517,https://issues.webrtc.org/issues/42225486 +https://crbug.com/webrtc/15170,https://issues.webrtc.org/issues/42225487 +https://crbug.com/webrtc/15171,https://issues.webrtc.org/issues/42225488 +https://crbug.com/webrtc/15172,https://issues.webrtc.org/issues/42225489 +https://crbug.com/webrtc/15174,https://issues.webrtc.org/issues/42225490 +https://crbug.com/webrtc/15175,https://issues.webrtc.org/issues/42225491 +https://crbug.com/webrtc/15176,https://issues.webrtc.org/issues/42225492 +https://crbug.com/webrtc/15177,https://issues.webrtc.org/issues/42225493 +https://crbug.com/webrtc/15178,https://issues.webrtc.org/issues/42225494 +https://crbug.com/webrtc/1518,https://issues.webrtc.org/issues/42225495 +https://crbug.com/webrtc/15180,https://issues.webrtc.org/issues/42225496 +https://crbug.com/webrtc/15181,https://issues.webrtc.org/issues/42225497 +https://crbug.com/webrtc/15182,https://issues.webrtc.org/issues/42225498 +https://crbug.com/webrtc/15183,https://issues.webrtc.org/issues/42225499 +https://crbug.com/webrtc/15184,https://issues.webrtc.org/issues/42225500 +https://crbug.com/webrtc/15185,https://issues.webrtc.org/issues/42225501 +https://crbug.com/webrtc/15186,https://issues.webrtc.org/issues/42225502 +https://crbug.com/webrtc/15187,https://issues.webrtc.org/issues/42225503 +https://crbug.com/webrtc/15188,https://issues.webrtc.org/issues/42225504 +https://crbug.com/webrtc/15189,https://issues.webrtc.org/issues/42225505 +https://crbug.com/webrtc/1519,https://issues.webrtc.org/issues/42225506 +https://crbug.com/webrtc/15190,https://issues.webrtc.org/issues/42225507 +https://crbug.com/webrtc/15191,https://issues.webrtc.org/issues/42225508 +https://crbug.com/webrtc/15192,https://issues.webrtc.org/issues/42225509 +https://crbug.com/webrtc/15193,https://issues.webrtc.org/issues/42225510 +https://crbug.com/webrtc/15194,https://issues.webrtc.org/issues/42225511 +https://crbug.com/webrtc/15195,https://issues.webrtc.org/issues/42225512 +https://crbug.com/webrtc/15196,https://issues.webrtc.org/issues/42225513 +https://crbug.com/webrtc/15197,https://issues.webrtc.org/issues/42225514 +https://crbug.com/webrtc/15198,https://issues.webrtc.org/issues/42225515 +https://crbug.com/webrtc/15199,https://issues.webrtc.org/issues/42225516 +https://crbug.com/webrtc/152,https://issues.webrtc.org/issues/42225517 +https://crbug.com/webrtc/1520,https://issues.webrtc.org/issues/42225518 +https://crbug.com/webrtc/15200,https://issues.webrtc.org/issues/42225519 +https://crbug.com/webrtc/15201,https://issues.webrtc.org/issues/42225520 +https://crbug.com/webrtc/15202,https://issues.webrtc.org/issues/42225521 +https://crbug.com/webrtc/15203,https://issues.webrtc.org/issues/42225522 +https://crbug.com/webrtc/15204,https://issues.webrtc.org/issues/42225523 +https://crbug.com/webrtc/15206,https://issues.webrtc.org/issues/42225524 +https://crbug.com/webrtc/15208,https://issues.webrtc.org/issues/42225525 +https://crbug.com/webrtc/15209,https://issues.webrtc.org/issues/42225526 +https://crbug.com/webrtc/1521,https://issues.webrtc.org/issues/42225527 +https://crbug.com/webrtc/15210,https://issues.webrtc.org/issues/42225528 +https://crbug.com/webrtc/15211,https://issues.webrtc.org/issues/42225529 +https://crbug.com/webrtc/15212,https://issues.webrtc.org/issues/42225530 +https://crbug.com/webrtc/15213,https://issues.webrtc.org/issues/42225531 +https://crbug.com/webrtc/15214,https://issues.webrtc.org/issues/42225532 +https://crbug.com/webrtc/15215,https://issues.webrtc.org/issues/42225533 +https://crbug.com/webrtc/15216,https://issues.webrtc.org/issues/42225534 +https://crbug.com/webrtc/15217,https://issues.webrtc.org/issues/42225535 +https://crbug.com/webrtc/15218,https://issues.webrtc.org/issues/42225536 +https://crbug.com/webrtc/15219,https://issues.webrtc.org/issues/42225537 +https://crbug.com/webrtc/1522,https://issues.webrtc.org/issues/42225538 +https://crbug.com/webrtc/15220,https://issues.webrtc.org/issues/42225539 +https://crbug.com/webrtc/15222,https://issues.webrtc.org/issues/42225540 +https://crbug.com/webrtc/15224,https://issues.webrtc.org/issues/42225541 +https://crbug.com/webrtc/15225,https://issues.webrtc.org/issues/42225542 +https://crbug.com/webrtc/15226,https://issues.webrtc.org/issues/42225543 +https://crbug.com/webrtc/15227,https://issues.webrtc.org/issues/42225544 +https://crbug.com/webrtc/15228,https://issues.webrtc.org/issues/42225545 +https://crbug.com/webrtc/15229,https://issues.webrtc.org/issues/42225546 +https://crbug.com/webrtc/1523,https://issues.webrtc.org/issues/42225547 +https://crbug.com/webrtc/15230,https://issues.webrtc.org/issues/42225548 +https://crbug.com/webrtc/15231,https://issues.webrtc.org/issues/42225549 +https://crbug.com/webrtc/15232,https://issues.webrtc.org/issues/42225550 +https://crbug.com/webrtc/15233,https://issues.webrtc.org/issues/42225551 +https://crbug.com/webrtc/15234,https://issues.webrtc.org/issues/42225552 +https://crbug.com/webrtc/15235,https://issues.webrtc.org/issues/42225553 +https://crbug.com/webrtc/15237,https://issues.webrtc.org/issues/42225554 +https://crbug.com/webrtc/15238,https://issues.webrtc.org/issues/42225555 +https://crbug.com/webrtc/1524,https://issues.webrtc.org/issues/42225556 +https://crbug.com/webrtc/15240,https://issues.webrtc.org/issues/42225557 +https://crbug.com/webrtc/15241,https://issues.webrtc.org/issues/42225558 +https://crbug.com/webrtc/15242,https://issues.webrtc.org/issues/42225559 +https://crbug.com/webrtc/15243,https://issues.webrtc.org/issues/42225560 +https://crbug.com/webrtc/15244,https://issues.webrtc.org/issues/42225561 +https://crbug.com/webrtc/15245,https://issues.webrtc.org/issues/42225562 +https://crbug.com/webrtc/15246,https://issues.webrtc.org/issues/42225563 +https://crbug.com/webrtc/15247,https://issues.webrtc.org/issues/42225564 +https://crbug.com/webrtc/15248,https://issues.webrtc.org/issues/42225565 +https://crbug.com/webrtc/15249,https://issues.webrtc.org/issues/42225566 +https://crbug.com/webrtc/1525,https://issues.webrtc.org/issues/42225567 +https://crbug.com/webrtc/15250,https://issues.webrtc.org/issues/42225568 +https://crbug.com/webrtc/15251,https://issues.webrtc.org/issues/42225569 +https://crbug.com/webrtc/15252,https://issues.webrtc.org/issues/42225570 +https://crbug.com/webrtc/15253,https://issues.webrtc.org/issues/42225571 +https://crbug.com/webrtc/15254,https://issues.webrtc.org/issues/42225572 +https://crbug.com/webrtc/15255,https://issues.webrtc.org/issues/42225573 +https://crbug.com/webrtc/15257,https://issues.webrtc.org/issues/42225574 +https://crbug.com/webrtc/15258,https://issues.webrtc.org/issues/42225575 +https://crbug.com/webrtc/15259,https://issues.webrtc.org/issues/42225576 +https://crbug.com/webrtc/1526,https://issues.webrtc.org/issues/42225577 +https://crbug.com/webrtc/15260,https://issues.webrtc.org/issues/42225578 +https://crbug.com/webrtc/15261,https://issues.webrtc.org/issues/42225579 +https://crbug.com/webrtc/15262,https://issues.webrtc.org/issues/42225580 +https://crbug.com/webrtc/15263,https://issues.webrtc.org/issues/42225581 +https://crbug.com/webrtc/15264,https://issues.webrtc.org/issues/42225582 +https://crbug.com/webrtc/15265,https://issues.webrtc.org/issues/42225583 +https://crbug.com/webrtc/15266,https://issues.webrtc.org/issues/42225584 +https://crbug.com/webrtc/15267,https://issues.webrtc.org/issues/42225585 +https://crbug.com/webrtc/15268,https://issues.webrtc.org/issues/42225586 +https://crbug.com/webrtc/15269,https://issues.webrtc.org/issues/42225587 +https://crbug.com/webrtc/1527,https://issues.webrtc.org/issues/42225588 +https://crbug.com/webrtc/15270,https://issues.webrtc.org/issues/42225589 +https://crbug.com/webrtc/15271,https://issues.webrtc.org/issues/42225590 +https://crbug.com/webrtc/15272,https://issues.webrtc.org/issues/42225591 +https://crbug.com/webrtc/15273,https://issues.webrtc.org/issues/42225592 +https://crbug.com/webrtc/15274,https://issues.webrtc.org/issues/42225593 +https://crbug.com/webrtc/15275,https://issues.webrtc.org/issues/42225594 +https://crbug.com/webrtc/15276,https://issues.webrtc.org/issues/42225595 +https://crbug.com/webrtc/15277,https://issues.webrtc.org/issues/42225596 +https://crbug.com/webrtc/15278,https://issues.webrtc.org/issues/42225597 +https://crbug.com/webrtc/15279,https://issues.webrtc.org/issues/42225598 +https://crbug.com/webrtc/1528,https://issues.webrtc.org/issues/42225599 +https://crbug.com/webrtc/15280,https://issues.webrtc.org/issues/42225600 +https://crbug.com/webrtc/15281,https://issues.webrtc.org/issues/42225601 +https://crbug.com/webrtc/15282,https://issues.webrtc.org/issues/42225602 +https://crbug.com/webrtc/15283,https://issues.webrtc.org/issues/42225603 +https://crbug.com/webrtc/15284,https://issues.webrtc.org/issues/42225604 +https://crbug.com/webrtc/15285,https://issues.webrtc.org/issues/42225605 +https://crbug.com/webrtc/15286,https://issues.webrtc.org/issues/42225606 +https://crbug.com/webrtc/15287,https://issues.webrtc.org/issues/42225607 +https://crbug.com/webrtc/15288,https://issues.webrtc.org/issues/42225608 +https://crbug.com/webrtc/15289,https://issues.webrtc.org/issues/42225609 +https://crbug.com/webrtc/1529,https://issues.webrtc.org/issues/42225610 +https://crbug.com/webrtc/15290,https://issues.webrtc.org/issues/42225611 +https://crbug.com/webrtc/15291,https://issues.webrtc.org/issues/42225612 +https://crbug.com/webrtc/15292,https://issues.webrtc.org/issues/42225613 +https://crbug.com/webrtc/15293,https://issues.webrtc.org/issues/42225614 +https://crbug.com/webrtc/15294,https://issues.webrtc.org/issues/42225615 +https://crbug.com/webrtc/15295,https://issues.webrtc.org/issues/42225616 +https://crbug.com/webrtc/15296,https://issues.webrtc.org/issues/42225617 +https://crbug.com/webrtc/15297,https://issues.webrtc.org/issues/42225618 +https://crbug.com/webrtc/15298,https://issues.webrtc.org/issues/42225619 +https://crbug.com/webrtc/15299,https://issues.webrtc.org/issues/42225620 +https://crbug.com/webrtc/153,https://issues.webrtc.org/issues/42225621 +https://crbug.com/webrtc/1530,https://issues.webrtc.org/issues/42225622 +https://crbug.com/webrtc/15300,https://issues.webrtc.org/issues/42225623 +https://crbug.com/webrtc/15301,https://issues.webrtc.org/issues/42225624 +https://crbug.com/webrtc/15302,https://issues.webrtc.org/issues/42225625 +https://crbug.com/webrtc/15303,https://issues.webrtc.org/issues/42225626 +https://crbug.com/webrtc/15304,https://issues.webrtc.org/issues/42225627 +https://crbug.com/webrtc/15305,https://issues.webrtc.org/issues/42225628 +https://crbug.com/webrtc/15306,https://issues.webrtc.org/issues/42225629 +https://crbug.com/webrtc/15307,https://issues.webrtc.org/issues/42225630 +https://crbug.com/webrtc/15308,https://issues.webrtc.org/issues/42225631 +https://crbug.com/webrtc/15309,https://issues.webrtc.org/issues/42225632 +https://crbug.com/webrtc/1531,https://issues.webrtc.org/issues/42225633 +https://crbug.com/webrtc/15310,https://issues.webrtc.org/issues/42225634 +https://crbug.com/webrtc/15311,https://issues.webrtc.org/issues/42225635 +https://crbug.com/webrtc/15312,https://issues.webrtc.org/issues/42225636 +https://crbug.com/webrtc/15313,https://issues.webrtc.org/issues/42225637 +https://crbug.com/webrtc/15314,https://issues.webrtc.org/issues/42225638 +https://crbug.com/webrtc/15315,https://issues.webrtc.org/issues/42225639 +https://crbug.com/webrtc/15316,https://issues.webrtc.org/issues/42225640 +https://crbug.com/webrtc/15317,https://issues.webrtc.org/issues/42225641 +https://crbug.com/webrtc/15318,https://issues.webrtc.org/issues/42225642 +https://crbug.com/webrtc/15319,https://issues.webrtc.org/issues/42225643 +https://crbug.com/webrtc/1532,https://issues.webrtc.org/issues/42225644 +https://crbug.com/webrtc/15320,https://issues.webrtc.org/issues/42225645 +https://crbug.com/webrtc/15321,https://issues.webrtc.org/issues/42225646 +https://crbug.com/webrtc/15322,https://issues.webrtc.org/issues/42225647 +https://crbug.com/webrtc/15323,https://issues.webrtc.org/issues/42225648 +https://crbug.com/webrtc/15324,https://issues.webrtc.org/issues/42225649 +https://crbug.com/webrtc/15325,https://issues.webrtc.org/issues/42225650 +https://crbug.com/webrtc/15326,https://issues.webrtc.org/issues/42225651 +https://crbug.com/webrtc/15327,https://issues.webrtc.org/issues/42225652 +https://crbug.com/webrtc/15328,https://issues.webrtc.org/issues/42225653 +https://crbug.com/webrtc/15329,https://issues.webrtc.org/issues/42225654 +https://crbug.com/webrtc/1533,https://issues.webrtc.org/issues/42225655 +https://crbug.com/webrtc/15330,https://issues.webrtc.org/issues/42225656 +https://crbug.com/webrtc/15331,https://issues.webrtc.org/issues/42225657 +https://crbug.com/webrtc/15332,https://issues.webrtc.org/issues/42225658 +https://crbug.com/webrtc/15333,https://issues.webrtc.org/issues/42225659 +https://crbug.com/webrtc/15334,https://issues.webrtc.org/issues/42225660 +https://crbug.com/webrtc/15335,https://issues.webrtc.org/issues/42225661 +https://crbug.com/webrtc/15336,https://issues.webrtc.org/issues/42225662 +https://crbug.com/webrtc/15337,https://issues.webrtc.org/issues/42225663 +https://crbug.com/webrtc/15338,https://issues.webrtc.org/issues/42225664 +https://crbug.com/webrtc/15339,https://issues.webrtc.org/issues/42225665 +https://crbug.com/webrtc/1534,https://issues.webrtc.org/issues/42225666 +https://crbug.com/webrtc/15340,https://issues.webrtc.org/issues/42225667 +https://crbug.com/webrtc/15341,https://issues.webrtc.org/issues/42225668 +https://crbug.com/webrtc/15342,https://issues.webrtc.org/issues/42225669 +https://crbug.com/webrtc/15343,https://issues.webrtc.org/issues/42225670 +https://crbug.com/webrtc/15344,https://issues.webrtc.org/issues/42225671 +https://crbug.com/webrtc/15345,https://issues.webrtc.org/issues/42225672 +https://crbug.com/webrtc/15346,https://issues.webrtc.org/issues/42225673 +https://crbug.com/webrtc/15347,https://issues.webrtc.org/issues/42225674 +https://crbug.com/webrtc/15348,https://issues.webrtc.org/issues/42225675 +https://crbug.com/webrtc/15349,https://issues.webrtc.org/issues/42225676 +https://crbug.com/webrtc/1535,https://issues.webrtc.org/issues/42225677 +https://crbug.com/webrtc/15350,https://issues.webrtc.org/issues/42225678 +https://crbug.com/webrtc/15351,https://issues.webrtc.org/issues/42225679 +https://crbug.com/webrtc/15352,https://issues.webrtc.org/issues/42225680 +https://crbug.com/webrtc/15353,https://issues.webrtc.org/issues/42225681 +https://crbug.com/webrtc/15354,https://issues.webrtc.org/issues/42225682 +https://crbug.com/webrtc/15355,https://issues.webrtc.org/issues/42225683 +https://crbug.com/webrtc/15356,https://issues.webrtc.org/issues/42225684 +https://crbug.com/webrtc/15357,https://issues.webrtc.org/issues/42225685 +https://crbug.com/webrtc/15358,https://issues.webrtc.org/issues/42225686 +https://crbug.com/webrtc/15359,https://issues.webrtc.org/issues/42225687 +https://crbug.com/webrtc/1536,https://issues.webrtc.org/issues/42225688 +https://crbug.com/webrtc/15360,https://issues.webrtc.org/issues/42225689 +https://crbug.com/webrtc/15361,https://issues.webrtc.org/issues/42225690 +https://crbug.com/webrtc/15362,https://issues.webrtc.org/issues/42225691 +https://crbug.com/webrtc/15363,https://issues.webrtc.org/issues/42225692 +https://crbug.com/webrtc/15364,https://issues.webrtc.org/issues/42225693 +https://crbug.com/webrtc/15365,https://issues.webrtc.org/issues/42225694 +https://crbug.com/webrtc/15366,https://issues.webrtc.org/issues/42225695 +https://crbug.com/webrtc/15367,https://issues.webrtc.org/issues/42225696 +https://crbug.com/webrtc/15368,https://issues.webrtc.org/issues/42225697 +https://crbug.com/webrtc/15369,https://issues.webrtc.org/issues/42225698 +https://crbug.com/webrtc/1537,https://issues.webrtc.org/issues/42225699 +https://crbug.com/webrtc/15370,https://issues.webrtc.org/issues/42225700 +https://crbug.com/webrtc/15371,https://issues.webrtc.org/issues/42225701 +https://crbug.com/webrtc/15372,https://issues.webrtc.org/issues/42225702 +https://crbug.com/webrtc/15373,https://issues.webrtc.org/issues/42225703 +https://crbug.com/webrtc/15374,https://issues.webrtc.org/issues/42225704 +https://crbug.com/webrtc/15375,https://issues.webrtc.org/issues/42225705 +https://crbug.com/webrtc/15376,https://issues.webrtc.org/issues/42225706 +https://crbug.com/webrtc/15377,https://issues.webrtc.org/issues/42225707 +https://crbug.com/webrtc/15378,https://issues.webrtc.org/issues/42225708 +https://crbug.com/webrtc/15379,https://issues.webrtc.org/issues/42225709 +https://crbug.com/webrtc/1538,https://issues.webrtc.org/issues/42225710 +https://crbug.com/webrtc/15380,https://issues.webrtc.org/issues/42225711 +https://crbug.com/webrtc/15381,https://issues.webrtc.org/issues/42225712 +https://crbug.com/webrtc/15382,https://issues.webrtc.org/issues/42225713 +https://crbug.com/webrtc/15383,https://issues.webrtc.org/issues/42225714 +https://crbug.com/webrtc/15384,https://issues.webrtc.org/issues/42225715 +https://crbug.com/webrtc/15385,https://issues.webrtc.org/issues/42225716 +https://crbug.com/webrtc/15386,https://issues.webrtc.org/issues/42225717 +https://crbug.com/webrtc/15388,https://issues.webrtc.org/issues/42225718 +https://crbug.com/webrtc/15389,https://issues.webrtc.org/issues/42225719 +https://crbug.com/webrtc/1539,https://issues.webrtc.org/issues/42225720 +https://crbug.com/webrtc/15390,https://issues.webrtc.org/issues/42225721 +https://crbug.com/webrtc/15391,https://issues.webrtc.org/issues/42225722 +https://crbug.com/webrtc/15392,https://issues.webrtc.org/issues/42225723 +https://crbug.com/webrtc/15393,https://issues.webrtc.org/issues/42225724 +https://crbug.com/webrtc/15394,https://issues.webrtc.org/issues/42225725 +https://crbug.com/webrtc/15398,https://issues.webrtc.org/issues/42225726 +https://crbug.com/webrtc/15399,https://issues.webrtc.org/issues/42225727 +https://crbug.com/webrtc/154,https://issues.webrtc.org/issues/42225728 +https://crbug.com/webrtc/1540,https://issues.webrtc.org/issues/42225729 +https://crbug.com/webrtc/15400,https://issues.webrtc.org/issues/42225730 +https://crbug.com/webrtc/15401,https://issues.webrtc.org/issues/42225731 +https://crbug.com/webrtc/15402,https://issues.webrtc.org/issues/42225732 +https://crbug.com/webrtc/15403,https://issues.webrtc.org/issues/42225733 +https://crbug.com/webrtc/15404,https://issues.webrtc.org/issues/42225734 +https://crbug.com/webrtc/15405,https://issues.webrtc.org/issues/42225735 +https://crbug.com/webrtc/15406,https://issues.webrtc.org/issues/42225736 +https://crbug.com/webrtc/15407,https://issues.webrtc.org/issues/42225737 +https://crbug.com/webrtc/15408,https://issues.webrtc.org/issues/42225738 +https://crbug.com/webrtc/15409,https://issues.webrtc.org/issues/42225739 +https://crbug.com/webrtc/1541,https://issues.webrtc.org/issues/42225740 +https://crbug.com/webrtc/15410,https://issues.webrtc.org/issues/42225741 +https://crbug.com/webrtc/15411,https://issues.webrtc.org/issues/42225742 +https://crbug.com/webrtc/15412,https://issues.webrtc.org/issues/42225743 +https://crbug.com/webrtc/15413,https://issues.webrtc.org/issues/42225744 +https://crbug.com/webrtc/15414,https://issues.webrtc.org/issues/42225745 +https://crbug.com/webrtc/15415,https://issues.webrtc.org/issues/42225746 +https://crbug.com/webrtc/15416,https://issues.webrtc.org/issues/42225747 +https://crbug.com/webrtc/15417,https://issues.webrtc.org/issues/42225748 +https://crbug.com/webrtc/15418,https://issues.webrtc.org/issues/42225749 +https://crbug.com/webrtc/15419,https://issues.webrtc.org/issues/42225750 +https://crbug.com/webrtc/1542,https://issues.webrtc.org/issues/42225751 +https://crbug.com/webrtc/15420,https://issues.webrtc.org/issues/42225752 +https://crbug.com/webrtc/15421,https://issues.webrtc.org/issues/42225753 +https://crbug.com/webrtc/15422,https://issues.webrtc.org/issues/42225754 +https://crbug.com/webrtc/15423,https://issues.webrtc.org/issues/42225755 +https://crbug.com/webrtc/15424,https://issues.webrtc.org/issues/42225756 +https://crbug.com/webrtc/15425,https://issues.webrtc.org/issues/42225757 +https://crbug.com/webrtc/15426,https://issues.webrtc.org/issues/42225758 +https://crbug.com/webrtc/15427,https://issues.webrtc.org/issues/42225759 +https://crbug.com/webrtc/15428,https://issues.webrtc.org/issues/42225760 +https://crbug.com/webrtc/15429,https://issues.webrtc.org/issues/42225761 +https://crbug.com/webrtc/1543,https://issues.webrtc.org/issues/42225762 +https://crbug.com/webrtc/15430,https://issues.webrtc.org/issues/42225763 +https://crbug.com/webrtc/15431,https://issues.webrtc.org/issues/42225764 +https://crbug.com/webrtc/15432,https://issues.webrtc.org/issues/42225765 +https://crbug.com/webrtc/15433,https://issues.webrtc.org/issues/42225766 +https://crbug.com/webrtc/15434,https://issues.webrtc.org/issues/42225767 +https://crbug.com/webrtc/15435,https://issues.webrtc.org/issues/42225768 +https://crbug.com/webrtc/15436,https://issues.webrtc.org/issues/42225769 +https://crbug.com/webrtc/15437,https://issues.webrtc.org/issues/42225770 +https://crbug.com/webrtc/15438,https://issues.webrtc.org/issues/42225771 +https://crbug.com/webrtc/15439,https://issues.webrtc.org/issues/42225772 +https://crbug.com/webrtc/1544,https://issues.webrtc.org/issues/42225773 +https://crbug.com/webrtc/15440,https://issues.webrtc.org/issues/42225774 +https://crbug.com/webrtc/15441,https://issues.webrtc.org/issues/42225775 +https://crbug.com/webrtc/15442,https://issues.webrtc.org/issues/42225776 +https://crbug.com/webrtc/15443,https://issues.webrtc.org/issues/42225777 +https://crbug.com/webrtc/15444,https://issues.webrtc.org/issues/42225778 +https://crbug.com/webrtc/15445,https://issues.webrtc.org/issues/42225779 +https://crbug.com/webrtc/15446,https://issues.webrtc.org/issues/42225780 +https://crbug.com/webrtc/15447,https://issues.webrtc.org/issues/42225781 +https://crbug.com/webrtc/15448,https://issues.webrtc.org/issues/42225782 +https://crbug.com/webrtc/15449,https://issues.webrtc.org/issues/42225783 +https://crbug.com/webrtc/1545,https://issues.webrtc.org/issues/42225784 +https://crbug.com/webrtc/15450,https://issues.webrtc.org/issues/42225785 +https://crbug.com/webrtc/15451,https://issues.webrtc.org/issues/42225786 +https://crbug.com/webrtc/15452,https://issues.webrtc.org/issues/42225787 +https://crbug.com/webrtc/15453,https://issues.webrtc.org/issues/42225788 +https://crbug.com/webrtc/15454,https://issues.webrtc.org/issues/42225789 +https://crbug.com/webrtc/15455,https://issues.webrtc.org/issues/42225790 +https://crbug.com/webrtc/15456,https://issues.webrtc.org/issues/42225791 +https://crbug.com/webrtc/15457,https://issues.webrtc.org/issues/42225792 +https://crbug.com/webrtc/15458,https://issues.webrtc.org/issues/42225793 +https://crbug.com/webrtc/15459,https://issues.webrtc.org/issues/42225794 +https://crbug.com/webrtc/1546,https://issues.webrtc.org/issues/42225795 +https://crbug.com/webrtc/15460,https://issues.webrtc.org/issues/42225796 +https://crbug.com/webrtc/15461,https://issues.webrtc.org/issues/42225797 +https://crbug.com/webrtc/15462,https://issues.webrtc.org/issues/42225798 +https://crbug.com/webrtc/15463,https://issues.webrtc.org/issues/42225799 +https://crbug.com/webrtc/15464,https://issues.webrtc.org/issues/42225800 +https://crbug.com/webrtc/15465,https://issues.webrtc.org/issues/42225801 +https://crbug.com/webrtc/15466,https://issues.webrtc.org/issues/42225802 +https://crbug.com/webrtc/15467,https://issues.webrtc.org/issues/42225803 +https://crbug.com/webrtc/15468,https://issues.webrtc.org/issues/42225804 +https://crbug.com/webrtc/15469,https://issues.webrtc.org/issues/42225805 +https://crbug.com/webrtc/1547,https://issues.webrtc.org/issues/42225806 +https://crbug.com/webrtc/15471,https://issues.webrtc.org/issues/42225807 +https://crbug.com/webrtc/15472,https://issues.webrtc.org/issues/42225808 +https://crbug.com/webrtc/15473,https://issues.webrtc.org/issues/42225809 +https://crbug.com/webrtc/15474,https://issues.webrtc.org/issues/42225810 +https://crbug.com/webrtc/15475,https://issues.webrtc.org/issues/42225811 +https://crbug.com/webrtc/15476,https://issues.webrtc.org/issues/42225812 +https://crbug.com/webrtc/15477,https://issues.webrtc.org/issues/42225813 +https://crbug.com/webrtc/15478,https://issues.webrtc.org/issues/42225814 +https://crbug.com/webrtc/15479,https://issues.webrtc.org/issues/42225815 +https://crbug.com/webrtc/1548,https://issues.webrtc.org/issues/42225816 +https://crbug.com/webrtc/15480,https://issues.webrtc.org/issues/42225817 +https://crbug.com/webrtc/15481,https://issues.webrtc.org/issues/42225818 +https://crbug.com/webrtc/15482,https://issues.webrtc.org/issues/42225819 +https://crbug.com/webrtc/15483,https://issues.webrtc.org/issues/42225820 +https://crbug.com/webrtc/15484,https://issues.webrtc.org/issues/42225821 +https://crbug.com/webrtc/15485,https://issues.webrtc.org/issues/42225822 +https://crbug.com/webrtc/15486,https://issues.webrtc.org/issues/42225823 +https://crbug.com/webrtc/15487,https://issues.webrtc.org/issues/42225824 +https://crbug.com/webrtc/15488,https://issues.webrtc.org/issues/42225825 +https://crbug.com/webrtc/15489,https://issues.webrtc.org/issues/42225826 +https://crbug.com/webrtc/1549,https://issues.webrtc.org/issues/42225827 +https://crbug.com/webrtc/15490,https://issues.webrtc.org/issues/42225828 +https://crbug.com/webrtc/15491,https://issues.webrtc.org/issues/42225829 +https://crbug.com/webrtc/15492,https://issues.webrtc.org/issues/42225830 +https://crbug.com/webrtc/15493,https://issues.webrtc.org/issues/42225831 +https://crbug.com/webrtc/15494,https://issues.webrtc.org/issues/42225832 +https://crbug.com/webrtc/15495,https://issues.webrtc.org/issues/42225833 +https://crbug.com/webrtc/15496,https://issues.webrtc.org/issues/42225834 +https://crbug.com/webrtc/15497,https://issues.webrtc.org/issues/42225835 +https://crbug.com/webrtc/15498,https://issues.webrtc.org/issues/42225836 +https://crbug.com/webrtc/15499,https://issues.webrtc.org/issues/42225837 +https://crbug.com/webrtc/155,https://issues.webrtc.org/issues/42225838 +https://crbug.com/webrtc/1550,https://issues.webrtc.org/issues/42225839 +https://crbug.com/webrtc/15500,https://issues.webrtc.org/issues/42225840 +https://crbug.com/webrtc/15501,https://issues.webrtc.org/issues/42225841 +https://crbug.com/webrtc/15502,https://issues.webrtc.org/issues/42225842 +https://crbug.com/webrtc/15503,https://issues.webrtc.org/issues/42225843 +https://crbug.com/webrtc/15504,https://issues.webrtc.org/issues/42225844 +https://crbug.com/webrtc/15505,https://issues.webrtc.org/issues/42225845 +https://crbug.com/webrtc/15506,https://issues.webrtc.org/issues/42225846 +https://crbug.com/webrtc/15507,https://issues.webrtc.org/issues/42225847 +https://crbug.com/webrtc/15508,https://issues.webrtc.org/issues/42225848 +https://crbug.com/webrtc/15509,https://issues.webrtc.org/issues/42225849 +https://crbug.com/webrtc/1551,https://issues.webrtc.org/issues/42225850 +https://crbug.com/webrtc/15510,https://issues.webrtc.org/issues/42225851 +https://crbug.com/webrtc/15513,https://issues.webrtc.org/issues/42225852 +https://crbug.com/webrtc/15514,https://issues.webrtc.org/issues/42225853 +https://crbug.com/webrtc/15515,https://issues.webrtc.org/issues/42225854 +https://crbug.com/webrtc/15516,https://issues.webrtc.org/issues/42225855 +https://crbug.com/webrtc/15517,https://issues.webrtc.org/issues/42225856 +https://crbug.com/webrtc/15518,https://issues.webrtc.org/issues/42225857 +https://crbug.com/webrtc/1552,https://issues.webrtc.org/issues/42225858 +https://crbug.com/webrtc/15520,https://issues.webrtc.org/issues/42225859 +https://crbug.com/webrtc/15521,https://issues.webrtc.org/issues/42225860 +https://crbug.com/webrtc/15522,https://issues.webrtc.org/issues/42225861 +https://crbug.com/webrtc/15523,https://issues.webrtc.org/issues/42225862 +https://crbug.com/webrtc/15524,https://issues.webrtc.org/issues/42225863 +https://crbug.com/webrtc/15525,https://issues.webrtc.org/issues/42225864 +https://crbug.com/webrtc/15526,https://issues.webrtc.org/issues/42225865 +https://crbug.com/webrtc/15527,https://issues.webrtc.org/issues/42225866 +https://crbug.com/webrtc/15528,https://issues.webrtc.org/issues/42225867 +https://crbug.com/webrtc/15529,https://issues.webrtc.org/issues/42225868 +https://crbug.com/webrtc/1553,https://issues.webrtc.org/issues/42225869 +https://crbug.com/webrtc/15530,https://issues.webrtc.org/issues/42225870 +https://crbug.com/webrtc/15531,https://issues.webrtc.org/issues/42225871 +https://crbug.com/webrtc/15532,https://issues.webrtc.org/issues/42225872 +https://crbug.com/webrtc/15533,https://issues.webrtc.org/issues/42225873 +https://crbug.com/webrtc/15534,https://issues.webrtc.org/issues/42225874 +https://crbug.com/webrtc/15535,https://issues.webrtc.org/issues/42225875 +https://crbug.com/webrtc/15536,https://issues.webrtc.org/issues/42225876 +https://crbug.com/webrtc/15537,https://issues.webrtc.org/issues/42225877 +https://crbug.com/webrtc/15538,https://issues.webrtc.org/issues/42225878 +https://crbug.com/webrtc/15539,https://issues.webrtc.org/issues/42225879 +https://crbug.com/webrtc/1554,https://issues.webrtc.org/issues/42225880 +https://crbug.com/webrtc/15540,https://issues.webrtc.org/issues/42225881 +https://crbug.com/webrtc/15541,https://issues.webrtc.org/issues/42225882 +https://crbug.com/webrtc/15543,https://issues.webrtc.org/issues/42225883 +https://crbug.com/webrtc/15544,https://issues.webrtc.org/issues/42225884 +https://crbug.com/webrtc/15545,https://issues.webrtc.org/issues/42225885 +https://crbug.com/webrtc/15546,https://issues.webrtc.org/issues/42225886 +https://crbug.com/webrtc/15547,https://issues.webrtc.org/issues/42225887 +https://crbug.com/webrtc/15548,https://issues.webrtc.org/issues/42225888 +https://crbug.com/webrtc/15549,https://issues.webrtc.org/issues/42225889 +https://crbug.com/webrtc/1555,https://issues.webrtc.org/issues/42225890 +https://crbug.com/webrtc/15551,https://issues.webrtc.org/issues/42225891 +https://crbug.com/webrtc/15552,https://issues.webrtc.org/issues/42225892 +https://crbug.com/webrtc/15553,https://issues.webrtc.org/issues/42225893 +https://crbug.com/webrtc/15554,https://issues.webrtc.org/issues/42225894 +https://crbug.com/webrtc/15555,https://issues.webrtc.org/issues/42225895 +https://crbug.com/webrtc/15556,https://issues.webrtc.org/issues/42225896 +https://crbug.com/webrtc/15557,https://issues.webrtc.org/issues/42225897 +https://crbug.com/webrtc/15559,https://issues.webrtc.org/issues/42225898 +https://crbug.com/webrtc/1556,https://issues.webrtc.org/issues/42225899 +https://crbug.com/webrtc/15560,https://issues.webrtc.org/issues/42225900 +https://crbug.com/webrtc/15561,https://issues.webrtc.org/issues/42225901 +https://crbug.com/webrtc/15562,https://issues.webrtc.org/issues/42225902 +https://crbug.com/webrtc/15563,https://issues.webrtc.org/issues/42225903 +https://crbug.com/webrtc/15564,https://issues.webrtc.org/issues/42225904 +https://crbug.com/webrtc/15565,https://issues.webrtc.org/issues/42225905 +https://crbug.com/webrtc/15566,https://issues.webrtc.org/issues/42225906 +https://crbug.com/webrtc/15567,https://issues.webrtc.org/issues/42225907 +https://crbug.com/webrtc/15568,https://issues.webrtc.org/issues/42225908 +https://crbug.com/webrtc/15569,https://issues.webrtc.org/issues/42225909 +https://crbug.com/webrtc/1557,https://issues.webrtc.org/issues/42225910 +https://crbug.com/webrtc/15570,https://issues.webrtc.org/issues/42225911 +https://crbug.com/webrtc/15571,https://issues.webrtc.org/issues/42225912 +https://crbug.com/webrtc/15572,https://issues.webrtc.org/issues/42225913 +https://crbug.com/webrtc/15573,https://issues.webrtc.org/issues/42225914 +https://crbug.com/webrtc/15574,https://issues.webrtc.org/issues/42225915 +https://crbug.com/webrtc/15575,https://issues.webrtc.org/issues/42225916 +https://crbug.com/webrtc/15576,https://issues.webrtc.org/issues/42225917 +https://crbug.com/webrtc/15577,https://issues.webrtc.org/issues/42225918 +https://crbug.com/webrtc/15578,https://issues.webrtc.org/issues/42225919 +https://crbug.com/webrtc/15579,https://issues.webrtc.org/issues/42225920 +https://crbug.com/webrtc/1558,https://issues.webrtc.org/issues/42225921 +https://crbug.com/webrtc/15580,https://issues.webrtc.org/issues/42225922 +https://crbug.com/webrtc/15581,https://issues.webrtc.org/issues/42225923 +https://crbug.com/webrtc/15582,https://issues.webrtc.org/issues/42225924 +https://crbug.com/webrtc/15583,https://issues.webrtc.org/issues/42225925 +https://crbug.com/webrtc/15584,https://issues.webrtc.org/issues/42225926 +https://crbug.com/webrtc/15585,https://issues.webrtc.org/issues/42225927 +https://crbug.com/webrtc/15586,https://issues.webrtc.org/issues/42225928 +https://crbug.com/webrtc/15587,https://issues.webrtc.org/issues/42225929 +https://crbug.com/webrtc/15588,https://issues.webrtc.org/issues/42225930 +https://crbug.com/webrtc/15589,https://issues.webrtc.org/issues/42225931 +https://crbug.com/webrtc/1559,https://issues.webrtc.org/issues/42225932 +https://crbug.com/webrtc/15590,https://issues.webrtc.org/issues/42225933 +https://crbug.com/webrtc/15591,https://issues.webrtc.org/issues/42225934 +https://crbug.com/webrtc/15592,https://issues.webrtc.org/issues/42225935 +https://crbug.com/webrtc/15593,https://issues.webrtc.org/issues/42225936 +https://crbug.com/webrtc/15594,https://issues.webrtc.org/issues/42225937 +https://crbug.com/webrtc/15595,https://issues.webrtc.org/issues/42225938 +https://crbug.com/webrtc/15596,https://issues.webrtc.org/issues/42225939 +https://crbug.com/webrtc/15597,https://issues.webrtc.org/issues/42225940 +https://crbug.com/webrtc/15598,https://issues.webrtc.org/issues/42225941 +https://crbug.com/webrtc/15599,https://issues.webrtc.org/issues/42225942 +https://crbug.com/webrtc/156,https://issues.webrtc.org/issues/42225943 +https://crbug.com/webrtc/1560,https://issues.webrtc.org/issues/42225944 +https://crbug.com/webrtc/15600,https://issues.webrtc.org/issues/42225945 +https://crbug.com/webrtc/15601,https://issues.webrtc.org/issues/42225946 +https://crbug.com/webrtc/15602,https://issues.webrtc.org/issues/42225947 +https://crbug.com/webrtc/15603,https://issues.webrtc.org/issues/42225948 +https://crbug.com/webrtc/15604,https://issues.webrtc.org/issues/42225949 +https://crbug.com/webrtc/15605,https://issues.webrtc.org/issues/42225950 +https://crbug.com/webrtc/15606,https://issues.webrtc.org/issues/42225951 +https://crbug.com/webrtc/15607,https://issues.webrtc.org/issues/42225952 +https://crbug.com/webrtc/15608,https://issues.webrtc.org/issues/42225953 +https://crbug.com/webrtc/15609,https://issues.webrtc.org/issues/42225954 +https://crbug.com/webrtc/1561,https://issues.webrtc.org/issues/42225955 +https://crbug.com/webrtc/15610,https://issues.webrtc.org/issues/42225956 +https://crbug.com/webrtc/15611,https://issues.webrtc.org/issues/42225957 +https://crbug.com/webrtc/15612,https://issues.webrtc.org/issues/42225958 +https://crbug.com/webrtc/15613,https://issues.webrtc.org/issues/42225959 +https://crbug.com/webrtc/15614,https://issues.webrtc.org/issues/42225960 +https://crbug.com/webrtc/15615,https://issues.webrtc.org/issues/42225961 +https://crbug.com/webrtc/15616,https://issues.webrtc.org/issues/42225962 +https://crbug.com/webrtc/15617,https://issues.webrtc.org/issues/42225963 +https://crbug.com/webrtc/15618,https://issues.webrtc.org/issues/42225964 +https://crbug.com/webrtc/15619,https://issues.webrtc.org/issues/42225965 +https://crbug.com/webrtc/1562,https://issues.webrtc.org/issues/42225966 +https://crbug.com/webrtc/15620,https://issues.webrtc.org/issues/42225967 +https://crbug.com/webrtc/15621,https://issues.webrtc.org/issues/42225968 +https://crbug.com/webrtc/15622,https://issues.webrtc.org/issues/42225969 +https://crbug.com/webrtc/15623,https://issues.webrtc.org/issues/42225970 +https://crbug.com/webrtc/15624,https://issues.webrtc.org/issues/42225971 +https://crbug.com/webrtc/15625,https://issues.webrtc.org/issues/42225972 +https://crbug.com/webrtc/15626,https://issues.webrtc.org/issues/42225973 +https://crbug.com/webrtc/15627,https://issues.webrtc.org/issues/42225974 +https://crbug.com/webrtc/15628,https://issues.webrtc.org/issues/42225975 +https://crbug.com/webrtc/1563,https://issues.webrtc.org/issues/42225976 +https://crbug.com/webrtc/15630,https://issues.webrtc.org/issues/42225977 +https://crbug.com/webrtc/15631,https://issues.webrtc.org/issues/42225978 +https://crbug.com/webrtc/15632,https://issues.webrtc.org/issues/42225979 +https://crbug.com/webrtc/15634,https://issues.webrtc.org/issues/42225980 +https://crbug.com/webrtc/15636,https://issues.webrtc.org/issues/42225981 +https://crbug.com/webrtc/15637,https://issues.webrtc.org/issues/42225982 +https://crbug.com/webrtc/15638,https://issues.webrtc.org/issues/42225983 +https://crbug.com/webrtc/15639,https://issues.webrtc.org/issues/42225984 +https://crbug.com/webrtc/1564,https://issues.webrtc.org/issues/42225985 +https://crbug.com/webrtc/15640,https://issues.webrtc.org/issues/42225986 +https://crbug.com/webrtc/15641,https://issues.webrtc.org/issues/42225987 +https://crbug.com/webrtc/15642,https://issues.webrtc.org/issues/42225988 +https://crbug.com/webrtc/15643,https://issues.webrtc.org/issues/42225989 +https://crbug.com/webrtc/15644,https://issues.webrtc.org/issues/42225990 +https://crbug.com/webrtc/15645,https://issues.webrtc.org/issues/42225991 +https://crbug.com/webrtc/15646,https://issues.webrtc.org/issues/42225992 +https://crbug.com/webrtc/15649,https://issues.webrtc.org/issues/42225993 +https://crbug.com/webrtc/1565,https://issues.webrtc.org/issues/42225994 +https://crbug.com/webrtc/15650,https://issues.webrtc.org/issues/42225995 +https://crbug.com/webrtc/15651,https://issues.webrtc.org/issues/42225996 +https://crbug.com/webrtc/15652,https://issues.webrtc.org/issues/42225997 +https://crbug.com/webrtc/15653,https://issues.webrtc.org/issues/42225998 +https://crbug.com/webrtc/15654,https://issues.webrtc.org/issues/42225999 +https://crbug.com/webrtc/15655,https://issues.webrtc.org/issues/42226000 +https://crbug.com/webrtc/15656,https://issues.webrtc.org/issues/42226001 +https://crbug.com/webrtc/15657,https://issues.webrtc.org/issues/42226002 +https://crbug.com/webrtc/15658,https://issues.webrtc.org/issues/42226003 +https://crbug.com/webrtc/15659,https://issues.webrtc.org/issues/42226004 +https://crbug.com/webrtc/1566,https://issues.webrtc.org/issues/42226005 +https://crbug.com/webrtc/15661,https://issues.webrtc.org/issues/42226006 +https://crbug.com/webrtc/15662,https://issues.webrtc.org/issues/42226007 +https://crbug.com/webrtc/15663,https://issues.webrtc.org/issues/42226008 +https://crbug.com/webrtc/15664,https://issues.webrtc.org/issues/42226009 +https://crbug.com/webrtc/15665,https://issues.webrtc.org/issues/42226010 +https://crbug.com/webrtc/15666,https://issues.webrtc.org/issues/42226011 +https://crbug.com/webrtc/15667,https://issues.webrtc.org/issues/42226012 +https://crbug.com/webrtc/15668,https://issues.webrtc.org/issues/42226013 +https://crbug.com/webrtc/15669,https://issues.webrtc.org/issues/42226014 +https://crbug.com/webrtc/1567,https://issues.webrtc.org/issues/42226015 +https://crbug.com/webrtc/15670,https://issues.webrtc.org/issues/42226016 +https://crbug.com/webrtc/15671,https://issues.webrtc.org/issues/42226017 +https://crbug.com/webrtc/15672,https://issues.webrtc.org/issues/42226018 +https://crbug.com/webrtc/15673,https://issues.webrtc.org/issues/42226019 +https://crbug.com/webrtc/15674,https://issues.webrtc.org/issues/42226020 +https://crbug.com/webrtc/15675,https://issues.webrtc.org/issues/42226021 +https://crbug.com/webrtc/15676,https://issues.webrtc.org/issues/42226022 +https://crbug.com/webrtc/15677,https://issues.webrtc.org/issues/42226023 +https://crbug.com/webrtc/15678,https://issues.webrtc.org/issues/42226024 +https://crbug.com/webrtc/15679,https://issues.webrtc.org/issues/42226025 +https://crbug.com/webrtc/1568,https://issues.webrtc.org/issues/42226026 +https://crbug.com/webrtc/15680,https://issues.webrtc.org/issues/42226027 +https://crbug.com/webrtc/15681,https://issues.webrtc.org/issues/42226028 +https://crbug.com/webrtc/15682,https://issues.webrtc.org/issues/42226029 +https://crbug.com/webrtc/15683,https://issues.webrtc.org/issues/42226030 +https://crbug.com/webrtc/15684,https://issues.webrtc.org/issues/42226031 +https://crbug.com/webrtc/15685,https://issues.webrtc.org/issues/42226032 +https://crbug.com/webrtc/15686,https://issues.webrtc.org/issues/42226033 +https://crbug.com/webrtc/15687,https://issues.webrtc.org/issues/42226034 +https://crbug.com/webrtc/15688,https://issues.webrtc.org/issues/42226035 +https://crbug.com/webrtc/15689,https://issues.webrtc.org/issues/42226036 +https://crbug.com/webrtc/1569,https://issues.webrtc.org/issues/42226037 +https://crbug.com/webrtc/15690,https://issues.webrtc.org/issues/42226038 +https://crbug.com/webrtc/15691,https://issues.webrtc.org/issues/42226039 +https://crbug.com/webrtc/15692,https://issues.webrtc.org/issues/42226040 +https://crbug.com/webrtc/15693,https://issues.webrtc.org/issues/42226041 +https://crbug.com/webrtc/15694,https://issues.webrtc.org/issues/42226042 +https://crbug.com/webrtc/15695,https://issues.webrtc.org/issues/42226043 +https://crbug.com/webrtc/15696,https://issues.webrtc.org/issues/42226044 +https://crbug.com/webrtc/15697,https://issues.webrtc.org/issues/42226045 +https://crbug.com/webrtc/15698,https://issues.webrtc.org/issues/42226046 +https://crbug.com/webrtc/15699,https://issues.webrtc.org/issues/42226047 +https://crbug.com/webrtc/157,https://issues.webrtc.org/issues/42226048 +https://crbug.com/webrtc/1570,https://issues.webrtc.org/issues/42226049 +https://crbug.com/webrtc/15700,https://issues.webrtc.org/issues/42226050 +https://crbug.com/webrtc/15701,https://issues.webrtc.org/issues/42226051 +https://crbug.com/webrtc/15702,https://issues.webrtc.org/issues/42226052 +https://crbug.com/webrtc/15703,https://issues.webrtc.org/issues/42226053 +https://crbug.com/webrtc/15704,https://issues.webrtc.org/issues/42226054 +https://crbug.com/webrtc/15705,https://issues.webrtc.org/issues/42226055 +https://crbug.com/webrtc/15706,https://issues.webrtc.org/issues/42226056 +https://crbug.com/webrtc/15707,https://issues.webrtc.org/issues/42226057 +https://crbug.com/webrtc/15708,https://issues.webrtc.org/issues/42226058 +https://crbug.com/webrtc/15709,https://issues.webrtc.org/issues/42226059 +https://crbug.com/webrtc/1571,https://issues.webrtc.org/issues/42226060 +https://crbug.com/webrtc/15710,https://issues.webrtc.org/issues/42226061 +https://crbug.com/webrtc/15711,https://issues.webrtc.org/issues/42226062 +https://crbug.com/webrtc/15712,https://issues.webrtc.org/issues/42226063 +https://crbug.com/webrtc/15713,https://issues.webrtc.org/issues/42226064 +https://crbug.com/webrtc/15714,https://issues.webrtc.org/issues/42226065 +https://crbug.com/webrtc/15715,https://issues.webrtc.org/issues/42226066 +https://crbug.com/webrtc/15716,https://issues.webrtc.org/issues/42226067 +https://crbug.com/webrtc/15717,https://issues.webrtc.org/issues/42226068 +https://crbug.com/webrtc/15718,https://issues.webrtc.org/issues/42226069 +https://crbug.com/webrtc/15719,https://issues.webrtc.org/issues/42226070 +https://crbug.com/webrtc/1572,https://issues.webrtc.org/issues/42226071 +https://crbug.com/webrtc/15720,https://issues.webrtc.org/issues/42226072 +https://crbug.com/webrtc/15721,https://issues.webrtc.org/issues/42226073 +https://crbug.com/webrtc/15722,https://issues.webrtc.org/issues/42226074 +https://crbug.com/webrtc/15723,https://issues.webrtc.org/issues/42226075 +https://crbug.com/webrtc/15724,https://issues.webrtc.org/issues/42226076 +https://crbug.com/webrtc/15725,https://issues.webrtc.org/issues/42226077 +https://crbug.com/webrtc/15726,https://issues.webrtc.org/issues/42226078 +https://crbug.com/webrtc/15727,https://issues.webrtc.org/issues/42226079 +https://crbug.com/webrtc/15728,https://issues.webrtc.org/issues/42226080 +https://crbug.com/webrtc/15729,https://issues.webrtc.org/issues/42226081 +https://crbug.com/webrtc/1573,https://issues.webrtc.org/issues/42226082 +https://crbug.com/webrtc/15730,https://issues.webrtc.org/issues/42226083 +https://crbug.com/webrtc/15731,https://issues.webrtc.org/issues/42226084 +https://crbug.com/webrtc/15732,https://issues.webrtc.org/issues/42226085 +https://crbug.com/webrtc/15733,https://issues.webrtc.org/issues/42226086 +https://crbug.com/webrtc/15734,https://issues.webrtc.org/issues/42226087 +https://crbug.com/webrtc/15735,https://issues.webrtc.org/issues/42226088 +https://crbug.com/webrtc/15736,https://issues.webrtc.org/issues/42226089 +https://crbug.com/webrtc/15737,https://issues.webrtc.org/issues/42226090 +https://crbug.com/webrtc/15738,https://issues.webrtc.org/issues/42226091 +https://crbug.com/webrtc/15739,https://issues.webrtc.org/issues/42226092 +https://crbug.com/webrtc/1574,https://issues.webrtc.org/issues/42226093 +https://crbug.com/webrtc/15740,https://issues.webrtc.org/issues/42226094 +https://crbug.com/webrtc/15741,https://issues.webrtc.org/issues/42226095 +https://crbug.com/webrtc/15742,https://issues.webrtc.org/issues/42226096 +https://crbug.com/webrtc/15743,https://issues.webrtc.org/issues/42226097 +https://crbug.com/webrtc/15744,https://issues.webrtc.org/issues/42226098 +https://crbug.com/webrtc/15745,https://issues.webrtc.org/issues/42226099 +https://crbug.com/webrtc/15746,https://issues.webrtc.org/issues/42226100 +https://crbug.com/webrtc/15747,https://issues.webrtc.org/issues/42226101 +https://crbug.com/webrtc/15748,https://issues.webrtc.org/issues/42226102 +https://crbug.com/webrtc/15749,https://issues.webrtc.org/issues/42226103 +https://crbug.com/webrtc/1575,https://issues.webrtc.org/issues/42226104 +https://crbug.com/webrtc/15750,https://issues.webrtc.org/issues/42226105 +https://crbug.com/webrtc/15751,https://issues.webrtc.org/issues/42226106 +https://crbug.com/webrtc/15752,https://issues.webrtc.org/issues/42226107 +https://crbug.com/webrtc/15753,https://issues.webrtc.org/issues/42226108 +https://crbug.com/webrtc/15754,https://issues.webrtc.org/issues/42226109 +https://crbug.com/webrtc/15755,https://issues.webrtc.org/issues/42226110 +https://crbug.com/webrtc/15756,https://issues.webrtc.org/issues/42226111 +https://crbug.com/webrtc/15757,https://issues.webrtc.org/issues/42226112 +https://crbug.com/webrtc/15758,https://issues.webrtc.org/issues/42226113 +https://crbug.com/webrtc/15759,https://issues.webrtc.org/issues/42226114 +https://crbug.com/webrtc/1576,https://issues.webrtc.org/issues/42226115 +https://crbug.com/webrtc/15760,https://issues.webrtc.org/issues/42226116 +https://crbug.com/webrtc/15761,https://issues.webrtc.org/issues/42226117 +https://crbug.com/webrtc/15762,https://issues.webrtc.org/issues/42226118 +https://crbug.com/webrtc/15763,https://issues.webrtc.org/issues/42226119 +https://crbug.com/webrtc/15764,https://issues.webrtc.org/issues/42226120 +https://crbug.com/webrtc/15765,https://issues.webrtc.org/issues/42226121 +https://crbug.com/webrtc/15766,https://issues.webrtc.org/issues/42226122 +https://crbug.com/webrtc/15767,https://issues.webrtc.org/issues/42226123 +https://crbug.com/webrtc/15768,https://issues.webrtc.org/issues/42226124 +https://crbug.com/webrtc/15769,https://issues.webrtc.org/issues/42226125 +https://crbug.com/webrtc/1577,https://issues.webrtc.org/issues/42226126 +https://crbug.com/webrtc/15770,https://issues.webrtc.org/issues/42226127 +https://crbug.com/webrtc/15771,https://issues.webrtc.org/issues/42226128 +https://crbug.com/webrtc/15772,https://issues.webrtc.org/issues/42226129 +https://crbug.com/webrtc/15773,https://issues.webrtc.org/issues/42226130 +https://crbug.com/webrtc/15774,https://issues.webrtc.org/issues/42226131 +https://crbug.com/webrtc/15775,https://issues.webrtc.org/issues/42226132 +https://crbug.com/webrtc/15776,https://issues.webrtc.org/issues/42226133 +https://crbug.com/webrtc/15777,https://issues.webrtc.org/issues/42226134 +https://crbug.com/webrtc/15778,https://issues.webrtc.org/issues/42226135 +https://crbug.com/webrtc/15779,https://issues.webrtc.org/issues/42226136 +https://crbug.com/webrtc/1578,https://issues.webrtc.org/issues/42226137 +https://crbug.com/webrtc/15780,https://issues.webrtc.org/issues/42226138 +https://crbug.com/webrtc/15781,https://issues.webrtc.org/issues/42226139 +https://crbug.com/webrtc/15782,https://issues.webrtc.org/issues/42226140 +https://crbug.com/webrtc/15783,https://issues.webrtc.org/issues/42226141 +https://crbug.com/webrtc/15784,https://issues.webrtc.org/issues/42226142 +https://crbug.com/webrtc/15785,https://issues.webrtc.org/issues/42226143 +https://crbug.com/webrtc/15786,https://issues.webrtc.org/issues/42226144 +https://crbug.com/webrtc/15787,https://issues.webrtc.org/issues/42226145 +https://crbug.com/webrtc/15788,https://issues.webrtc.org/issues/42226146 +https://crbug.com/webrtc/15789,https://issues.webrtc.org/issues/42226147 +https://crbug.com/webrtc/1579,https://issues.webrtc.org/issues/42226148 +https://crbug.com/webrtc/15790,https://issues.webrtc.org/issues/42226149 +https://crbug.com/webrtc/15791,https://issues.webrtc.org/issues/42226150 +https://crbug.com/webrtc/15792,https://issues.webrtc.org/issues/42226151 +https://crbug.com/webrtc/15793,https://issues.webrtc.org/issues/42226152 +https://crbug.com/webrtc/15794,https://issues.webrtc.org/issues/42226153 +https://crbug.com/webrtc/15795,https://issues.webrtc.org/issues/42226154 +https://crbug.com/webrtc/15796,https://issues.webrtc.org/issues/42226155 +https://crbug.com/webrtc/15797,https://issues.webrtc.org/issues/42226156 +https://crbug.com/webrtc/15798,https://issues.webrtc.org/issues/42226157 +https://crbug.com/webrtc/15799,https://issues.webrtc.org/issues/42226158 +https://crbug.com/webrtc/158,https://issues.webrtc.org/issues/42226159 +https://crbug.com/webrtc/1580,https://issues.webrtc.org/issues/42226160 +https://crbug.com/webrtc/15800,https://issues.webrtc.org/issues/42226161 +https://crbug.com/webrtc/15801,https://issues.webrtc.org/issues/42226162 +https://crbug.com/webrtc/15802,https://issues.webrtc.org/issues/42226163 +https://crbug.com/webrtc/15803,https://issues.webrtc.org/issues/42226164 +https://crbug.com/webrtc/15804,https://issues.webrtc.org/issues/42226165 +https://crbug.com/webrtc/15805,https://issues.webrtc.org/issues/42226166 +https://crbug.com/webrtc/15806,https://issues.webrtc.org/issues/42226167 +https://crbug.com/webrtc/15807,https://issues.webrtc.org/issues/42226168 +https://crbug.com/webrtc/15808,https://issues.webrtc.org/issues/42226169 +https://crbug.com/webrtc/15809,https://issues.webrtc.org/issues/42226170 +https://crbug.com/webrtc/1581,https://issues.webrtc.org/issues/42226171 +https://crbug.com/webrtc/15810,https://issues.webrtc.org/issues/42226172 +https://crbug.com/webrtc/15811,https://issues.webrtc.org/issues/42226173 +https://crbug.com/webrtc/15812,https://issues.webrtc.org/issues/42226174 +https://crbug.com/webrtc/15813,https://issues.webrtc.org/issues/42226175 +https://crbug.com/webrtc/15814,https://issues.webrtc.org/issues/42226176 +https://crbug.com/webrtc/15815,https://issues.webrtc.org/issues/42226177 +https://crbug.com/webrtc/15816,https://issues.webrtc.org/issues/42226178 +https://crbug.com/webrtc/15817,https://issues.webrtc.org/issues/42226179 +https://crbug.com/webrtc/15818,https://issues.webrtc.org/issues/42226180 +https://crbug.com/webrtc/15819,https://issues.webrtc.org/issues/42226181 +https://crbug.com/webrtc/1582,https://issues.webrtc.org/issues/42226182 +https://crbug.com/webrtc/15820,https://issues.webrtc.org/issues/42226183 +https://crbug.com/webrtc/15821,https://issues.webrtc.org/issues/42226184 +https://crbug.com/webrtc/15822,https://issues.webrtc.org/issues/42226185 +https://crbug.com/webrtc/15823,https://issues.webrtc.org/issues/42226186 +https://crbug.com/webrtc/15824,https://issues.webrtc.org/issues/42226187 +https://crbug.com/webrtc/15825,https://issues.webrtc.org/issues/42226188 +https://crbug.com/webrtc/15826,https://issues.webrtc.org/issues/42226189 +https://crbug.com/webrtc/15827,https://issues.webrtc.org/issues/42226190 +https://crbug.com/webrtc/15828,https://issues.webrtc.org/issues/42226191 +https://crbug.com/webrtc/15829,https://issues.webrtc.org/issues/42226192 +https://crbug.com/webrtc/1583,https://issues.webrtc.org/issues/42226193 +https://crbug.com/webrtc/15830,https://issues.webrtc.org/issues/42226194 +https://crbug.com/webrtc/15831,https://issues.webrtc.org/issues/42226195 +https://crbug.com/webrtc/15832,https://issues.webrtc.org/issues/42226196 +https://crbug.com/webrtc/15833,https://issues.webrtc.org/issues/42226197 +https://crbug.com/webrtc/15834,https://issues.webrtc.org/issues/42226198 +https://crbug.com/webrtc/15835,https://issues.webrtc.org/issues/42226199 +https://crbug.com/webrtc/15836,https://issues.webrtc.org/issues/42226200 +https://crbug.com/webrtc/15837,https://issues.webrtc.org/issues/42226201 +https://crbug.com/webrtc/15838,https://issues.webrtc.org/issues/42226202 +https://crbug.com/webrtc/15839,https://issues.webrtc.org/issues/42226203 +https://crbug.com/webrtc/1584,https://issues.webrtc.org/issues/42226204 +https://crbug.com/webrtc/15840,https://issues.webrtc.org/issues/42226205 +https://crbug.com/webrtc/15841,https://issues.webrtc.org/issues/42226206 +https://crbug.com/webrtc/15842,https://issues.webrtc.org/issues/42226207 +https://crbug.com/webrtc/15843,https://issues.webrtc.org/issues/42226208 +https://crbug.com/webrtc/15844,https://issues.webrtc.org/issues/42226209 +https://crbug.com/webrtc/15845,https://issues.webrtc.org/issues/42226210 +https://crbug.com/webrtc/15846,https://issues.webrtc.org/issues/42226211 +https://crbug.com/webrtc/15847,https://issues.webrtc.org/issues/42226212 +https://crbug.com/webrtc/15848,https://issues.webrtc.org/issues/42226213 +https://crbug.com/webrtc/15849,https://issues.webrtc.org/issues/42226214 +https://crbug.com/webrtc/1585,https://issues.webrtc.org/issues/42226215 +https://crbug.com/webrtc/15850,https://issues.webrtc.org/issues/42226216 +https://crbug.com/webrtc/15851,https://issues.webrtc.org/issues/42226217 +https://crbug.com/webrtc/15852,https://issues.webrtc.org/issues/42226218 +https://crbug.com/webrtc/15853,https://issues.webrtc.org/issues/42226219 +https://crbug.com/webrtc/15854,https://issues.webrtc.org/issues/42226220 +https://crbug.com/webrtc/15855,https://issues.webrtc.org/issues/42226221 +https://crbug.com/webrtc/15856,https://issues.webrtc.org/issues/42226222 +https://crbug.com/webrtc/15857,https://issues.webrtc.org/issues/42226223 +https://crbug.com/webrtc/15858,https://issues.webrtc.org/issues/42226224 +https://crbug.com/webrtc/15859,https://issues.webrtc.org/issues/42226225 +https://crbug.com/webrtc/1586,https://issues.webrtc.org/issues/42226226 +https://crbug.com/webrtc/15860,https://issues.webrtc.org/issues/42226227 +https://crbug.com/webrtc/15861,https://issues.webrtc.org/issues/42226228 +https://crbug.com/webrtc/15862,https://issues.webrtc.org/issues/42226229 +https://crbug.com/webrtc/15863,https://issues.webrtc.org/issues/42226230 +https://crbug.com/webrtc/15864,https://issues.webrtc.org/issues/42226231 +https://crbug.com/webrtc/15865,https://issues.webrtc.org/issues/42226232 +https://crbug.com/webrtc/15866,https://issues.webrtc.org/issues/42226233 +https://crbug.com/webrtc/15867,https://issues.webrtc.org/issues/42226234 +https://crbug.com/webrtc/15868,https://issues.webrtc.org/issues/42226235 +https://crbug.com/webrtc/15869,https://issues.webrtc.org/issues/42226236 +https://crbug.com/webrtc/1587,https://issues.webrtc.org/issues/42226237 +https://crbug.com/webrtc/15870,https://issues.webrtc.org/issues/42226238 +https://crbug.com/webrtc/15871,https://issues.webrtc.org/issues/42226239 +https://crbug.com/webrtc/15872,https://issues.webrtc.org/issues/42226240 +https://crbug.com/webrtc/15873,https://issues.webrtc.org/issues/42226241 +https://crbug.com/webrtc/15874,https://issues.webrtc.org/issues/42226242 +https://crbug.com/webrtc/15875,https://issues.webrtc.org/issues/42226243 +https://crbug.com/webrtc/15876,https://issues.webrtc.org/issues/42226244 +https://crbug.com/webrtc/15877,https://issues.webrtc.org/issues/42226245 +https://crbug.com/webrtc/15878,https://issues.webrtc.org/issues/42226246 +https://crbug.com/webrtc/15879,https://issues.webrtc.org/issues/42226247 +https://crbug.com/webrtc/1588,https://issues.webrtc.org/issues/42226248 +https://crbug.com/webrtc/15880,https://issues.webrtc.org/issues/42226249 +https://crbug.com/webrtc/15881,https://issues.webrtc.org/issues/42226250 +https://crbug.com/webrtc/15882,https://issues.webrtc.org/issues/42226251 +https://crbug.com/webrtc/15883,https://issues.webrtc.org/issues/42226252 +https://crbug.com/webrtc/15884,https://issues.webrtc.org/issues/42226253 +https://crbug.com/webrtc/15885,https://issues.webrtc.org/issues/42226254 +https://crbug.com/webrtc/15886,https://issues.webrtc.org/issues/42226255 +https://crbug.com/webrtc/15887,https://issues.webrtc.org/issues/42226256 +https://crbug.com/webrtc/15888,https://issues.webrtc.org/issues/42226257 +https://crbug.com/webrtc/15889,https://issues.webrtc.org/issues/42226258 +https://crbug.com/webrtc/1589,https://issues.webrtc.org/issues/42226259 +https://crbug.com/webrtc/15890,https://issues.webrtc.org/issues/42226260 +https://crbug.com/webrtc/15891,https://issues.webrtc.org/issues/42226261 +https://crbug.com/webrtc/15892,https://issues.webrtc.org/issues/42226262 +https://crbug.com/webrtc/15893,https://issues.webrtc.org/issues/42226263 +https://crbug.com/webrtc/15894,https://issues.webrtc.org/issues/42226264 +https://crbug.com/webrtc/15895,https://issues.webrtc.org/issues/42226265 +https://crbug.com/webrtc/15896,https://issues.webrtc.org/issues/42226266 +https://crbug.com/webrtc/15897,https://issues.webrtc.org/issues/42226267 +https://crbug.com/webrtc/15898,https://issues.webrtc.org/issues/42226268 +https://crbug.com/webrtc/15899,https://issues.webrtc.org/issues/42226269 +https://crbug.com/webrtc/159,https://issues.webrtc.org/issues/42226270 +https://crbug.com/webrtc/1590,https://issues.webrtc.org/issues/42226271 +https://crbug.com/webrtc/15900,https://issues.webrtc.org/issues/42226272 +https://crbug.com/webrtc/15901,https://issues.webrtc.org/issues/42226273 +https://crbug.com/webrtc/15902,https://issues.webrtc.org/issues/42226274 +https://crbug.com/webrtc/15903,https://issues.webrtc.org/issues/42226275 +https://crbug.com/webrtc/15904,https://issues.webrtc.org/issues/42226276 +https://crbug.com/webrtc/15905,https://issues.webrtc.org/issues/42226277 +https://crbug.com/webrtc/15906,https://issues.webrtc.org/issues/42226278 +https://crbug.com/webrtc/15907,https://issues.webrtc.org/issues/42226279 +https://crbug.com/webrtc/15908,https://issues.webrtc.org/issues/42226280 +https://crbug.com/webrtc/15909,https://issues.webrtc.org/issues/42226281 +https://crbug.com/webrtc/1591,https://issues.webrtc.org/issues/42226282 +https://crbug.com/webrtc/15910,https://issues.webrtc.org/issues/42226283 +https://crbug.com/webrtc/15911,https://issues.webrtc.org/issues/42226284 +https://crbug.com/webrtc/15912,https://issues.webrtc.org/issues/42226285 +https://crbug.com/webrtc/15913,https://issues.webrtc.org/issues/42226286 +https://crbug.com/webrtc/15914,https://issues.webrtc.org/issues/42226287 +https://crbug.com/webrtc/15915,https://issues.webrtc.org/issues/42226288 +https://crbug.com/webrtc/15916,https://issues.webrtc.org/issues/42226289 +https://crbug.com/webrtc/15917,https://issues.webrtc.org/issues/42226290 +https://crbug.com/webrtc/15918,https://issues.webrtc.org/issues/42226291 +https://crbug.com/webrtc/15919,https://issues.webrtc.org/issues/42226292 +https://crbug.com/webrtc/1592,https://issues.webrtc.org/issues/42226293 +https://crbug.com/webrtc/15920,https://issues.webrtc.org/issues/42226294 +https://crbug.com/webrtc/15921,https://issues.webrtc.org/issues/42226295 +https://crbug.com/webrtc/15922,https://issues.webrtc.org/issues/42226296 +https://crbug.com/webrtc/15923,https://issues.webrtc.org/issues/42226297 +https://crbug.com/webrtc/15924,https://issues.webrtc.org/issues/42226298 +https://crbug.com/webrtc/15925,https://issues.webrtc.org/issues/42226299 +https://crbug.com/webrtc/15926,https://issues.webrtc.org/issues/42226300 +https://crbug.com/webrtc/15927,https://issues.webrtc.org/issues/42226301 +https://crbug.com/webrtc/15928,https://issues.webrtc.org/issues/42226302 +https://crbug.com/webrtc/15929,https://issues.webrtc.org/issues/42226303 +https://crbug.com/webrtc/1593,https://issues.webrtc.org/issues/42226304 +https://crbug.com/webrtc/15930,https://issues.webrtc.org/issues/42226305 +https://crbug.com/webrtc/1594,https://issues.webrtc.org/issues/42226306 +https://crbug.com/webrtc/1595,https://issues.webrtc.org/issues/42226307 +https://crbug.com/webrtc/1596,https://issues.webrtc.org/issues/42226308 +https://crbug.com/webrtc/1597,https://issues.webrtc.org/issues/42226309 +https://crbug.com/webrtc/1598,https://issues.webrtc.org/issues/42226310 +https://crbug.com/webrtc/1599,https://issues.webrtc.org/issues/42226311 +https://crbug.com/webrtc/16,https://issues.webrtc.org/issues/42226312 +https://crbug.com/webrtc/160,https://issues.webrtc.org/issues/42226313 +https://crbug.com/webrtc/1601,https://issues.webrtc.org/issues/42226314 +https://crbug.com/webrtc/1602,https://issues.webrtc.org/issues/42226315 +https://crbug.com/webrtc/1603,https://issues.webrtc.org/issues/42226316 +https://crbug.com/webrtc/1604,https://issues.webrtc.org/issues/42226317 +https://crbug.com/webrtc/1605,https://issues.webrtc.org/issues/42226318 +https://crbug.com/webrtc/1606,https://issues.webrtc.org/issues/42226319 +https://crbug.com/webrtc/1607,https://issues.webrtc.org/issues/42226320 +https://crbug.com/webrtc/1608,https://issues.webrtc.org/issues/42226321 +https://crbug.com/webrtc/1609,https://issues.webrtc.org/issues/42226322 +https://crbug.com/webrtc/161,https://issues.webrtc.org/issues/42226323 +https://crbug.com/webrtc/1610,https://issues.webrtc.org/issues/42226324 +https://crbug.com/webrtc/1611,https://issues.webrtc.org/issues/42226325 +https://crbug.com/webrtc/1612,https://issues.webrtc.org/issues/42226326 +https://crbug.com/webrtc/1613,https://issues.webrtc.org/issues/42226327 +https://crbug.com/webrtc/1614,https://issues.webrtc.org/issues/42226328 +https://crbug.com/webrtc/1615,https://issues.webrtc.org/issues/42226329 +https://crbug.com/webrtc/1616,https://issues.webrtc.org/issues/42226330 +https://crbug.com/webrtc/1617,https://issues.webrtc.org/issues/42226331 +https://crbug.com/webrtc/1618,https://issues.webrtc.org/issues/42226332 +https://crbug.com/webrtc/1619,https://issues.webrtc.org/issues/42226333 +https://crbug.com/webrtc/162,https://issues.webrtc.org/issues/42226334 +https://crbug.com/webrtc/1620,https://issues.webrtc.org/issues/42226335 +https://crbug.com/webrtc/1621,https://issues.webrtc.org/issues/42226336 +https://crbug.com/webrtc/1622,https://issues.webrtc.org/issues/42226337 +https://crbug.com/webrtc/1623,https://issues.webrtc.org/issues/42226338 +https://crbug.com/webrtc/1624,https://issues.webrtc.org/issues/42226339 +https://crbug.com/webrtc/1625,https://issues.webrtc.org/issues/42226340 +https://crbug.com/webrtc/1626,https://issues.webrtc.org/issues/42226341 +https://crbug.com/webrtc/1627,https://issues.webrtc.org/issues/42226342 +https://crbug.com/webrtc/1629,https://issues.webrtc.org/issues/42226343 +https://crbug.com/webrtc/163,https://issues.webrtc.org/issues/42226344 +https://crbug.com/webrtc/1630,https://issues.webrtc.org/issues/42226345 +https://crbug.com/webrtc/1631,https://issues.webrtc.org/issues/42226346 +https://crbug.com/webrtc/1632,https://issues.webrtc.org/issues/42226347 +https://crbug.com/webrtc/1633,https://issues.webrtc.org/issues/42226348 +https://crbug.com/webrtc/1634,https://issues.webrtc.org/issues/42226349 +https://crbug.com/webrtc/1635,https://issues.webrtc.org/issues/42226350 +https://crbug.com/webrtc/1636,https://issues.webrtc.org/issues/42226351 +https://crbug.com/webrtc/1637,https://issues.webrtc.org/issues/42226352 +https://crbug.com/webrtc/1638,https://issues.webrtc.org/issues/42226353 +https://crbug.com/webrtc/1639,https://issues.webrtc.org/issues/42226354 +https://crbug.com/webrtc/164,https://issues.webrtc.org/issues/42226355 +https://crbug.com/webrtc/1640,https://issues.webrtc.org/issues/42226356 +https://crbug.com/webrtc/1641,https://issues.webrtc.org/issues/42226357 +https://crbug.com/webrtc/1642,https://issues.webrtc.org/issues/42226358 +https://crbug.com/webrtc/1643,https://issues.webrtc.org/issues/42226359 +https://crbug.com/webrtc/1644,https://issues.webrtc.org/issues/42226360 +https://crbug.com/webrtc/1645,https://issues.webrtc.org/issues/42226361 +https://crbug.com/webrtc/1646,https://issues.webrtc.org/issues/42226362 +https://crbug.com/webrtc/1647,https://issues.webrtc.org/issues/42226363 +https://crbug.com/webrtc/1648,https://issues.webrtc.org/issues/42226364 +https://crbug.com/webrtc/1649,https://issues.webrtc.org/issues/42226365 +https://crbug.com/webrtc/165,https://issues.webrtc.org/issues/42226366 +https://crbug.com/webrtc/1650,https://issues.webrtc.org/issues/42226367 +https://crbug.com/webrtc/1651,https://issues.webrtc.org/issues/42226368 +https://crbug.com/webrtc/1652,https://issues.webrtc.org/issues/42226369 +https://crbug.com/webrtc/1653,https://issues.webrtc.org/issues/42226370 +https://crbug.com/webrtc/1654,https://issues.webrtc.org/issues/42226371 +https://crbug.com/webrtc/1655,https://issues.webrtc.org/issues/42226372 +https://crbug.com/webrtc/1656,https://issues.webrtc.org/issues/42226373 +https://crbug.com/webrtc/1657,https://issues.webrtc.org/issues/42226374 +https://crbug.com/webrtc/1658,https://issues.webrtc.org/issues/42226375 +https://crbug.com/webrtc/1659,https://issues.webrtc.org/issues/42226376 +https://crbug.com/webrtc/166,https://issues.webrtc.org/issues/42226377 +https://crbug.com/webrtc/1660,https://issues.webrtc.org/issues/42226378 +https://crbug.com/webrtc/1661,https://issues.webrtc.org/issues/42226379 +https://crbug.com/webrtc/1662,https://issues.webrtc.org/issues/42226380 +https://crbug.com/webrtc/1663,https://issues.webrtc.org/issues/42226381 +https://crbug.com/webrtc/1664,https://issues.webrtc.org/issues/42226382 +https://crbug.com/webrtc/1665,https://issues.webrtc.org/issues/42226383 +https://crbug.com/webrtc/1666,https://issues.webrtc.org/issues/42226384 +https://crbug.com/webrtc/1668,https://issues.webrtc.org/issues/42226385 +https://crbug.com/webrtc/1669,https://issues.webrtc.org/issues/42226386 +https://crbug.com/webrtc/167,https://issues.webrtc.org/issues/42226387 +https://crbug.com/webrtc/1670,https://issues.webrtc.org/issues/42226388 +https://crbug.com/webrtc/1671,https://issues.webrtc.org/issues/42226389 +https://crbug.com/webrtc/1672,https://issues.webrtc.org/issues/42226390 +https://crbug.com/webrtc/1673,https://issues.webrtc.org/issues/42226391 +https://crbug.com/webrtc/1674,https://issues.webrtc.org/issues/42226392 +https://crbug.com/webrtc/1675,https://issues.webrtc.org/issues/42226393 +https://crbug.com/webrtc/1676,https://issues.webrtc.org/issues/42226394 +https://crbug.com/webrtc/1677,https://issues.webrtc.org/issues/42226395 +https://crbug.com/webrtc/1678,https://issues.webrtc.org/issues/42226396 +https://crbug.com/webrtc/1679,https://issues.webrtc.org/issues/42226397 +https://crbug.com/webrtc/168,https://issues.webrtc.org/issues/42226398 +https://crbug.com/webrtc/1680,https://issues.webrtc.org/issues/42226399 +https://crbug.com/webrtc/1681,https://issues.webrtc.org/issues/42226400 +https://crbug.com/webrtc/1682,https://issues.webrtc.org/issues/42226401 +https://crbug.com/webrtc/1683,https://issues.webrtc.org/issues/42226402 +https://crbug.com/webrtc/1684,https://issues.webrtc.org/issues/42226403 +https://crbug.com/webrtc/1685,https://issues.webrtc.org/issues/42226404 +https://crbug.com/webrtc/1686,https://issues.webrtc.org/issues/42226405 +https://crbug.com/webrtc/1687,https://issues.webrtc.org/issues/42226406 +https://crbug.com/webrtc/1688,https://issues.webrtc.org/issues/42226407 +https://crbug.com/webrtc/1689,https://issues.webrtc.org/issues/42226408 +https://crbug.com/webrtc/169,https://issues.webrtc.org/issues/42226409 +https://crbug.com/webrtc/1690,https://issues.webrtc.org/issues/42226410 +https://crbug.com/webrtc/1691,https://issues.webrtc.org/issues/42226411 +https://crbug.com/webrtc/1692,https://issues.webrtc.org/issues/42226412 +https://crbug.com/webrtc/1693,https://issues.webrtc.org/issues/42226413 +https://crbug.com/webrtc/1694,https://issues.webrtc.org/issues/42226414 +https://crbug.com/webrtc/1695,https://issues.webrtc.org/issues/42226415 +https://crbug.com/webrtc/1696,https://issues.webrtc.org/issues/42226416 +https://crbug.com/webrtc/1697,https://issues.webrtc.org/issues/42226417 +https://crbug.com/webrtc/1698,https://issues.webrtc.org/issues/42226418 +https://crbug.com/webrtc/1699,https://issues.webrtc.org/issues/42226419 +https://crbug.com/webrtc/17,https://issues.webrtc.org/issues/42226420 +https://crbug.com/webrtc/170,https://issues.webrtc.org/issues/42226421 +https://crbug.com/webrtc/1700,https://issues.webrtc.org/issues/42226422 +https://crbug.com/webrtc/1701,https://issues.webrtc.org/issues/42226423 +https://crbug.com/webrtc/1702,https://issues.webrtc.org/issues/42226424 +https://crbug.com/webrtc/1703,https://issues.webrtc.org/issues/42226425 +https://crbug.com/webrtc/1704,https://issues.webrtc.org/issues/42226426 +https://crbug.com/webrtc/1705,https://issues.webrtc.org/issues/42226427 +https://crbug.com/webrtc/1706,https://issues.webrtc.org/issues/42226428 +https://crbug.com/webrtc/1707,https://issues.webrtc.org/issues/42226429 +https://crbug.com/webrtc/1708,https://issues.webrtc.org/issues/42226430 +https://crbug.com/webrtc/1709,https://issues.webrtc.org/issues/42226431 +https://crbug.com/webrtc/171,https://issues.webrtc.org/issues/42226432 +https://crbug.com/webrtc/1710,https://issues.webrtc.org/issues/42226433 +https://crbug.com/webrtc/1711,https://issues.webrtc.org/issues/42226434 +https://crbug.com/webrtc/1712,https://issues.webrtc.org/issues/42226435 +https://crbug.com/webrtc/1713,https://issues.webrtc.org/issues/42226436 +https://crbug.com/webrtc/1714,https://issues.webrtc.org/issues/42226437 +https://crbug.com/webrtc/1715,https://issues.webrtc.org/issues/42226438 +https://crbug.com/webrtc/1716,https://issues.webrtc.org/issues/42226439 +https://crbug.com/webrtc/1717,https://issues.webrtc.org/issues/42226440 +https://crbug.com/webrtc/1718,https://issues.webrtc.org/issues/42226441 +https://crbug.com/webrtc/1719,https://issues.webrtc.org/issues/42226442 +https://crbug.com/webrtc/172,https://issues.webrtc.org/issues/42226443 +https://crbug.com/webrtc/1720,https://issues.webrtc.org/issues/42226444 +https://crbug.com/webrtc/1721,https://issues.webrtc.org/issues/42226445 +https://crbug.com/webrtc/1722,https://issues.webrtc.org/issues/42226446 +https://crbug.com/webrtc/1723,https://issues.webrtc.org/issues/42226447 +https://crbug.com/webrtc/1724,https://issues.webrtc.org/issues/42226448 +https://crbug.com/webrtc/1725,https://issues.webrtc.org/issues/42226449 +https://crbug.com/webrtc/1726,https://issues.webrtc.org/issues/42226450 +https://crbug.com/webrtc/1727,https://issues.webrtc.org/issues/42226451 +https://crbug.com/webrtc/1728,https://issues.webrtc.org/issues/42226452 +https://crbug.com/webrtc/1729,https://issues.webrtc.org/issues/42226453 +https://crbug.com/webrtc/173,https://issues.webrtc.org/issues/42226454 +https://crbug.com/webrtc/1730,https://issues.webrtc.org/issues/42226455 +https://crbug.com/webrtc/1731,https://issues.webrtc.org/issues/42226456 +https://crbug.com/webrtc/1732,https://issues.webrtc.org/issues/42226457 +https://crbug.com/webrtc/1733,https://issues.webrtc.org/issues/42226458 +https://crbug.com/webrtc/1734,https://issues.webrtc.org/issues/42226459 +https://crbug.com/webrtc/1735,https://issues.webrtc.org/issues/42226460 +https://crbug.com/webrtc/1736,https://issues.webrtc.org/issues/42226461 +https://crbug.com/webrtc/1737,https://issues.webrtc.org/issues/42226462 +https://crbug.com/webrtc/1738,https://issues.webrtc.org/issues/42226463 +https://crbug.com/webrtc/1739,https://issues.webrtc.org/issues/42226464 +https://crbug.com/webrtc/174,https://issues.webrtc.org/issues/42226465 +https://crbug.com/webrtc/1740,https://issues.webrtc.org/issues/42226466 +https://crbug.com/webrtc/1741,https://issues.webrtc.org/issues/42226467 +https://crbug.com/webrtc/1742,https://issues.webrtc.org/issues/42226468 +https://crbug.com/webrtc/1743,https://issues.webrtc.org/issues/42226469 +https://crbug.com/webrtc/1744,https://issues.webrtc.org/issues/42226470 +https://crbug.com/webrtc/1745,https://issues.webrtc.org/issues/42226471 +https://crbug.com/webrtc/1746,https://issues.webrtc.org/issues/42226472 +https://crbug.com/webrtc/1747,https://issues.webrtc.org/issues/42226473 +https://crbug.com/webrtc/1748,https://issues.webrtc.org/issues/42226474 +https://crbug.com/webrtc/1749,https://issues.webrtc.org/issues/42226475 +https://crbug.com/webrtc/175,https://issues.webrtc.org/issues/42226476 +https://crbug.com/webrtc/1750,https://issues.webrtc.org/issues/42226477 +https://crbug.com/webrtc/1751,https://issues.webrtc.org/issues/42226478 +https://crbug.com/webrtc/1752,https://issues.webrtc.org/issues/42226479 +https://crbug.com/webrtc/1753,https://issues.webrtc.org/issues/42226480 +https://crbug.com/webrtc/1754,https://issues.webrtc.org/issues/42226481 +https://crbug.com/webrtc/1755,https://issues.webrtc.org/issues/42226482 +https://crbug.com/webrtc/1756,https://issues.webrtc.org/issues/42226483 +https://crbug.com/webrtc/1758,https://issues.webrtc.org/issues/42226484 +https://crbug.com/webrtc/1759,https://issues.webrtc.org/issues/42226485 +https://crbug.com/webrtc/176,https://issues.webrtc.org/issues/42226486 +https://crbug.com/webrtc/1760,https://issues.webrtc.org/issues/42226487 +https://crbug.com/webrtc/1761,https://issues.webrtc.org/issues/42226488 +https://crbug.com/webrtc/1762,https://issues.webrtc.org/issues/42226489 +https://crbug.com/webrtc/1763,https://issues.webrtc.org/issues/42226490 +https://crbug.com/webrtc/1765,https://issues.webrtc.org/issues/42226491 +https://crbug.com/webrtc/1766,https://issues.webrtc.org/issues/42226492 +https://crbug.com/webrtc/1767,https://issues.webrtc.org/issues/42226493 +https://crbug.com/webrtc/1768,https://issues.webrtc.org/issues/42226494 +https://crbug.com/webrtc/1769,https://issues.webrtc.org/issues/42226495 +https://crbug.com/webrtc/177,https://issues.webrtc.org/issues/42226496 +https://crbug.com/webrtc/1770,https://issues.webrtc.org/issues/42226497 +https://crbug.com/webrtc/1771,https://issues.webrtc.org/issues/42226498 +https://crbug.com/webrtc/1772,https://issues.webrtc.org/issues/42226499 +https://crbug.com/webrtc/1773,https://issues.webrtc.org/issues/42226500 +https://crbug.com/webrtc/1774,https://issues.webrtc.org/issues/42226501 +https://crbug.com/webrtc/1775,https://issues.webrtc.org/issues/42226502 +https://crbug.com/webrtc/1776,https://issues.webrtc.org/issues/42226503 +https://crbug.com/webrtc/1777,https://issues.webrtc.org/issues/42226504 +https://crbug.com/webrtc/1778,https://issues.webrtc.org/issues/42226505 +https://crbug.com/webrtc/1779,https://issues.webrtc.org/issues/42226506 +https://crbug.com/webrtc/178,https://issues.webrtc.org/issues/42226507 +https://crbug.com/webrtc/1780,https://issues.webrtc.org/issues/42226508 +https://crbug.com/webrtc/1781,https://issues.webrtc.org/issues/42226509 +https://crbug.com/webrtc/1782,https://issues.webrtc.org/issues/42226510 +https://crbug.com/webrtc/1783,https://issues.webrtc.org/issues/42226511 +https://crbug.com/webrtc/1784,https://issues.webrtc.org/issues/42226512 +https://crbug.com/webrtc/1785,https://issues.webrtc.org/issues/42226513 +https://crbug.com/webrtc/1786,https://issues.webrtc.org/issues/42226514 +https://crbug.com/webrtc/1787,https://issues.webrtc.org/issues/42226515 +https://crbug.com/webrtc/1788,https://issues.webrtc.org/issues/42226516 +https://crbug.com/webrtc/1789,https://issues.webrtc.org/issues/42226517 +https://crbug.com/webrtc/179,https://issues.webrtc.org/issues/42226518 +https://crbug.com/webrtc/1790,https://issues.webrtc.org/issues/42226519 +https://crbug.com/webrtc/1791,https://issues.webrtc.org/issues/42226520 +https://crbug.com/webrtc/1792,https://issues.webrtc.org/issues/42226521 +https://crbug.com/webrtc/1793,https://issues.webrtc.org/issues/42226522 +https://crbug.com/webrtc/1794,https://issues.webrtc.org/issues/42226523 +https://crbug.com/webrtc/1795,https://issues.webrtc.org/issues/42226524 +https://crbug.com/webrtc/1796,https://issues.webrtc.org/issues/42226525 +https://crbug.com/webrtc/1797,https://issues.webrtc.org/issues/42226526 +https://crbug.com/webrtc/1798,https://issues.webrtc.org/issues/42226527 +https://crbug.com/webrtc/1799,https://issues.webrtc.org/issues/42226528 +https://crbug.com/webrtc/18,https://issues.webrtc.org/issues/42226529 +https://crbug.com/webrtc/180,https://issues.webrtc.org/issues/42226530 +https://crbug.com/webrtc/1800,https://issues.webrtc.org/issues/42226531 +https://crbug.com/webrtc/1801,https://issues.webrtc.org/issues/42226532 +https://crbug.com/webrtc/1802,https://issues.webrtc.org/issues/42226533 +https://crbug.com/webrtc/1803,https://issues.webrtc.org/issues/42226534 +https://crbug.com/webrtc/1804,https://issues.webrtc.org/issues/42226535 +https://crbug.com/webrtc/1805,https://issues.webrtc.org/issues/42226536 +https://crbug.com/webrtc/1806,https://issues.webrtc.org/issues/42226537 +https://crbug.com/webrtc/1807,https://issues.webrtc.org/issues/42226538 +https://crbug.com/webrtc/1808,https://issues.webrtc.org/issues/42226539 +https://crbug.com/webrtc/1809,https://issues.webrtc.org/issues/42226540 +https://crbug.com/webrtc/181,https://issues.webrtc.org/issues/42226541 +https://crbug.com/webrtc/1810,https://issues.webrtc.org/issues/42226542 +https://crbug.com/webrtc/1811,https://issues.webrtc.org/issues/42226543 +https://crbug.com/webrtc/1812,https://issues.webrtc.org/issues/42226544 +https://crbug.com/webrtc/1813,https://issues.webrtc.org/issues/42226545 +https://crbug.com/webrtc/1814,https://issues.webrtc.org/issues/42226546 +https://crbug.com/webrtc/1815,https://issues.webrtc.org/issues/42226547 +https://crbug.com/webrtc/1816,https://issues.webrtc.org/issues/42226548 +https://crbug.com/webrtc/1817,https://issues.webrtc.org/issues/42226549 +https://crbug.com/webrtc/1818,https://issues.webrtc.org/issues/42226550 +https://crbug.com/webrtc/1819,https://issues.webrtc.org/issues/42226551 +https://crbug.com/webrtc/182,https://issues.webrtc.org/issues/42226552 +https://crbug.com/webrtc/1820,https://issues.webrtc.org/issues/42226553 +https://crbug.com/webrtc/1821,https://issues.webrtc.org/issues/42226554 +https://crbug.com/webrtc/1822,https://issues.webrtc.org/issues/42226555 +https://crbug.com/webrtc/1823,https://issues.webrtc.org/issues/42226556 +https://crbug.com/webrtc/1824,https://issues.webrtc.org/issues/42226557 +https://crbug.com/webrtc/1825,https://issues.webrtc.org/issues/42226558 +https://crbug.com/webrtc/1826,https://issues.webrtc.org/issues/42226559 +https://crbug.com/webrtc/1827,https://issues.webrtc.org/issues/42226560 +https://crbug.com/webrtc/1828,https://issues.webrtc.org/issues/42226561 +https://crbug.com/webrtc/1829,https://issues.webrtc.org/issues/42226562 +https://crbug.com/webrtc/183,https://issues.webrtc.org/issues/42226563 +https://crbug.com/webrtc/1830,https://issues.webrtc.org/issues/42226564 +https://crbug.com/webrtc/1831,https://issues.webrtc.org/issues/42226565 +https://crbug.com/webrtc/1832,https://issues.webrtc.org/issues/42226566 +https://crbug.com/webrtc/1833,https://issues.webrtc.org/issues/42226567 +https://crbug.com/webrtc/1834,https://issues.webrtc.org/issues/42226568 +https://crbug.com/webrtc/1835,https://issues.webrtc.org/issues/42226569 +https://crbug.com/webrtc/1836,https://issues.webrtc.org/issues/42226570 +https://crbug.com/webrtc/1837,https://issues.webrtc.org/issues/42226571 +https://crbug.com/webrtc/1838,https://issues.webrtc.org/issues/42226572 +https://crbug.com/webrtc/1839,https://issues.webrtc.org/issues/42226573 +https://crbug.com/webrtc/184,https://issues.webrtc.org/issues/42226574 +https://crbug.com/webrtc/1840,https://issues.webrtc.org/issues/42226575 +https://crbug.com/webrtc/1841,https://issues.webrtc.org/issues/42226576 +https://crbug.com/webrtc/1842,https://issues.webrtc.org/issues/42226577 +https://crbug.com/webrtc/1843,https://issues.webrtc.org/issues/42226578 +https://crbug.com/webrtc/1844,https://issues.webrtc.org/issues/42226579 +https://crbug.com/webrtc/1845,https://issues.webrtc.org/issues/42226580 +https://crbug.com/webrtc/1846,https://issues.webrtc.org/issues/42226581 +https://crbug.com/webrtc/1847,https://issues.webrtc.org/issues/42226582 +https://crbug.com/webrtc/1848,https://issues.webrtc.org/issues/42226583 +https://crbug.com/webrtc/1849,https://issues.webrtc.org/issues/42226584 +https://crbug.com/webrtc/185,https://issues.webrtc.org/issues/42226585 +https://crbug.com/webrtc/1850,https://issues.webrtc.org/issues/42226586 +https://crbug.com/webrtc/1851,https://issues.webrtc.org/issues/42226587 +https://crbug.com/webrtc/1852,https://issues.webrtc.org/issues/42226588 +https://crbug.com/webrtc/1853,https://issues.webrtc.org/issues/42226589 +https://crbug.com/webrtc/1854,https://issues.webrtc.org/issues/42226590 +https://crbug.com/webrtc/1855,https://issues.webrtc.org/issues/42226591 +https://crbug.com/webrtc/1856,https://issues.webrtc.org/issues/42226592 +https://crbug.com/webrtc/1857,https://issues.webrtc.org/issues/42226593 +https://crbug.com/webrtc/1858,https://issues.webrtc.org/issues/42226594 +https://crbug.com/webrtc/1859,https://issues.webrtc.org/issues/42226595 +https://crbug.com/webrtc/186,https://issues.webrtc.org/issues/42226596 +https://crbug.com/webrtc/1860,https://issues.webrtc.org/issues/42226597 +https://crbug.com/webrtc/1861,https://issues.webrtc.org/issues/42226598 +https://crbug.com/webrtc/1862,https://issues.webrtc.org/issues/42226599 +https://crbug.com/webrtc/1863,https://issues.webrtc.org/issues/42226600 +https://crbug.com/webrtc/1864,https://issues.webrtc.org/issues/42226601 +https://crbug.com/webrtc/1865,https://issues.webrtc.org/issues/42226602 +https://crbug.com/webrtc/1866,https://issues.webrtc.org/issues/42226603 +https://crbug.com/webrtc/1867,https://issues.webrtc.org/issues/42226604 +https://crbug.com/webrtc/1868,https://issues.webrtc.org/issues/42226605 +https://crbug.com/webrtc/1869,https://issues.webrtc.org/issues/42226606 +https://crbug.com/webrtc/187,https://issues.webrtc.org/issues/42226607 +https://crbug.com/webrtc/1870,https://issues.webrtc.org/issues/42226608 +https://crbug.com/webrtc/1871,https://issues.webrtc.org/issues/42226609 +https://crbug.com/webrtc/1872,https://issues.webrtc.org/issues/42226610 +https://crbug.com/webrtc/1873,https://issues.webrtc.org/issues/42226611 +https://crbug.com/webrtc/1874,https://issues.webrtc.org/issues/42226612 +https://crbug.com/webrtc/1875,https://issues.webrtc.org/issues/42226613 +https://crbug.com/webrtc/1876,https://issues.webrtc.org/issues/42226614 +https://crbug.com/webrtc/1877,https://issues.webrtc.org/issues/42226615 +https://crbug.com/webrtc/1878,https://issues.webrtc.org/issues/42226616 +https://crbug.com/webrtc/1879,https://issues.webrtc.org/issues/42226617 +https://crbug.com/webrtc/188,https://issues.webrtc.org/issues/42226618 +https://crbug.com/webrtc/1880,https://issues.webrtc.org/issues/42226619 +https://crbug.com/webrtc/1881,https://issues.webrtc.org/issues/42226620 +https://crbug.com/webrtc/1882,https://issues.webrtc.org/issues/42226621 +https://crbug.com/webrtc/1883,https://issues.webrtc.org/issues/42226622 +https://crbug.com/webrtc/1884,https://issues.webrtc.org/issues/42226623 +https://crbug.com/webrtc/1885,https://issues.webrtc.org/issues/42226624 +https://crbug.com/webrtc/1886,https://issues.webrtc.org/issues/42226625 +https://crbug.com/webrtc/1887,https://issues.webrtc.org/issues/42226626 +https://crbug.com/webrtc/1888,https://issues.webrtc.org/issues/42226627 +https://crbug.com/webrtc/1889,https://issues.webrtc.org/issues/42226628 +https://crbug.com/webrtc/189,https://issues.webrtc.org/issues/42226629 +https://crbug.com/webrtc/1890,https://issues.webrtc.org/issues/42226630 +https://crbug.com/webrtc/1891,https://issues.webrtc.org/issues/42226631 +https://crbug.com/webrtc/1892,https://issues.webrtc.org/issues/42226632 +https://crbug.com/webrtc/1893,https://issues.webrtc.org/issues/42226633 +https://crbug.com/webrtc/1894,https://issues.webrtc.org/issues/42226634 +https://crbug.com/webrtc/1895,https://issues.webrtc.org/issues/42226635 +https://crbug.com/webrtc/1896,https://issues.webrtc.org/issues/42226636 +https://crbug.com/webrtc/1897,https://issues.webrtc.org/issues/42226637 +https://crbug.com/webrtc/1898,https://issues.webrtc.org/issues/42226638 +https://crbug.com/webrtc/1899,https://issues.webrtc.org/issues/42226639 +https://crbug.com/webrtc/19,https://issues.webrtc.org/issues/42226640 +https://crbug.com/webrtc/190,https://issues.webrtc.org/issues/42226641 +https://crbug.com/webrtc/1900,https://issues.webrtc.org/issues/42226642 +https://crbug.com/webrtc/1901,https://issues.webrtc.org/issues/42226643 +https://crbug.com/webrtc/1902,https://issues.webrtc.org/issues/42226644 +https://crbug.com/webrtc/1903,https://issues.webrtc.org/issues/42226645 +https://crbug.com/webrtc/1904,https://issues.webrtc.org/issues/42226646 +https://crbug.com/webrtc/1905,https://issues.webrtc.org/issues/42226647 +https://crbug.com/webrtc/1906,https://issues.webrtc.org/issues/42226648 +https://crbug.com/webrtc/1907,https://issues.webrtc.org/issues/42226649 +https://crbug.com/webrtc/1908,https://issues.webrtc.org/issues/42226650 +https://crbug.com/webrtc/1909,https://issues.webrtc.org/issues/42226651 +https://crbug.com/webrtc/191,https://issues.webrtc.org/issues/42226652 +https://crbug.com/webrtc/1910,https://issues.webrtc.org/issues/42226653 +https://crbug.com/webrtc/1911,https://issues.webrtc.org/issues/42226654 +https://crbug.com/webrtc/1912,https://issues.webrtc.org/issues/42226655 +https://crbug.com/webrtc/1913,https://issues.webrtc.org/issues/42226656 +https://crbug.com/webrtc/1914,https://issues.webrtc.org/issues/42226657 +https://crbug.com/webrtc/1915,https://issues.webrtc.org/issues/42226658 +https://crbug.com/webrtc/1916,https://issues.webrtc.org/issues/42226659 +https://crbug.com/webrtc/1917,https://issues.webrtc.org/issues/42226660 +https://crbug.com/webrtc/1918,https://issues.webrtc.org/issues/42226661 +https://crbug.com/webrtc/1919,https://issues.webrtc.org/issues/42226662 +https://crbug.com/webrtc/192,https://issues.webrtc.org/issues/42226663 +https://crbug.com/webrtc/1920,https://issues.webrtc.org/issues/42226664 +https://crbug.com/webrtc/1921,https://issues.webrtc.org/issues/42226665 +https://crbug.com/webrtc/1922,https://issues.webrtc.org/issues/42226666 +https://crbug.com/webrtc/1923,https://issues.webrtc.org/issues/42226667 +https://crbug.com/webrtc/1924,https://issues.webrtc.org/issues/42226668 +https://crbug.com/webrtc/1925,https://issues.webrtc.org/issues/42226669 +https://crbug.com/webrtc/1926,https://issues.webrtc.org/issues/42226670 +https://crbug.com/webrtc/1927,https://issues.webrtc.org/issues/42226671 +https://crbug.com/webrtc/1928,https://issues.webrtc.org/issues/42226672 +https://crbug.com/webrtc/1929,https://issues.webrtc.org/issues/42226673 +https://crbug.com/webrtc/193,https://issues.webrtc.org/issues/42226674 +https://crbug.com/webrtc/1930,https://issues.webrtc.org/issues/42226675 +https://crbug.com/webrtc/1931,https://issues.webrtc.org/issues/42226676 +https://crbug.com/webrtc/1932,https://issues.webrtc.org/issues/42226677 +https://crbug.com/webrtc/1933,https://issues.webrtc.org/issues/42226678 +https://crbug.com/webrtc/1934,https://issues.webrtc.org/issues/42226679 +https://crbug.com/webrtc/1935,https://issues.webrtc.org/issues/42226680 +https://crbug.com/webrtc/1936,https://issues.webrtc.org/issues/42226681 +https://crbug.com/webrtc/1937,https://issues.webrtc.org/issues/42226682 +https://crbug.com/webrtc/1938,https://issues.webrtc.org/issues/42226683 +https://crbug.com/webrtc/1939,https://issues.webrtc.org/issues/42226684 +https://crbug.com/webrtc/194,https://issues.webrtc.org/issues/42226685 +https://crbug.com/webrtc/1940,https://issues.webrtc.org/issues/42226686 +https://crbug.com/webrtc/1941,https://issues.webrtc.org/issues/42226687 +https://crbug.com/webrtc/1942,https://issues.webrtc.org/issues/42226688 +https://crbug.com/webrtc/1943,https://issues.webrtc.org/issues/42226689 +https://crbug.com/webrtc/1944,https://issues.webrtc.org/issues/42226690 +https://crbug.com/webrtc/1945,https://issues.webrtc.org/issues/42226691 +https://crbug.com/webrtc/1946,https://issues.webrtc.org/issues/42226692 +https://crbug.com/webrtc/1947,https://issues.webrtc.org/issues/42226693 +https://crbug.com/webrtc/1948,https://issues.webrtc.org/issues/42226694 +https://crbug.com/webrtc/1949,https://issues.webrtc.org/issues/42226695 +https://crbug.com/webrtc/195,https://issues.webrtc.org/issues/42226696 +https://crbug.com/webrtc/1950,https://issues.webrtc.org/issues/42226697 +https://crbug.com/webrtc/1951,https://issues.webrtc.org/issues/42226698 +https://crbug.com/webrtc/1952,https://issues.webrtc.org/issues/42226699 +https://crbug.com/webrtc/1953,https://issues.webrtc.org/issues/42226700 +https://crbug.com/webrtc/1954,https://issues.webrtc.org/issues/42226701 +https://crbug.com/webrtc/1955,https://issues.webrtc.org/issues/42226702 +https://crbug.com/webrtc/1956,https://issues.webrtc.org/issues/42226703 +https://crbug.com/webrtc/1957,https://issues.webrtc.org/issues/42226704 +https://crbug.com/webrtc/1959,https://issues.webrtc.org/issues/42226705 +https://crbug.com/webrtc/196,https://issues.webrtc.org/issues/42226706 +https://crbug.com/webrtc/1960,https://issues.webrtc.org/issues/42226707 +https://crbug.com/webrtc/1961,https://issues.webrtc.org/issues/42226708 +https://crbug.com/webrtc/1962,https://issues.webrtc.org/issues/42226709 +https://crbug.com/webrtc/1963,https://issues.webrtc.org/issues/42226710 +https://crbug.com/webrtc/1964,https://issues.webrtc.org/issues/42226711 +https://crbug.com/webrtc/1965,https://issues.webrtc.org/issues/42226712 +https://crbug.com/webrtc/1966,https://issues.webrtc.org/issues/42226713 +https://crbug.com/webrtc/1967,https://issues.webrtc.org/issues/42226714 +https://crbug.com/webrtc/1968,https://issues.webrtc.org/issues/42226715 +https://crbug.com/webrtc/1969,https://issues.webrtc.org/issues/42226716 +https://crbug.com/webrtc/197,https://issues.webrtc.org/issues/42226717 +https://crbug.com/webrtc/1970,https://issues.webrtc.org/issues/42226718 +https://crbug.com/webrtc/1972,https://issues.webrtc.org/issues/42226719 +https://crbug.com/webrtc/1973,https://issues.webrtc.org/issues/42226720 +https://crbug.com/webrtc/1974,https://issues.webrtc.org/issues/42226721 +https://crbug.com/webrtc/1975,https://issues.webrtc.org/issues/42226722 +https://crbug.com/webrtc/1976,https://issues.webrtc.org/issues/42226723 +https://crbug.com/webrtc/1977,https://issues.webrtc.org/issues/42226724 +https://crbug.com/webrtc/1978,https://issues.webrtc.org/issues/42226725 +https://crbug.com/webrtc/1979,https://issues.webrtc.org/issues/42226726 +https://crbug.com/webrtc/198,https://issues.webrtc.org/issues/42226727 +https://crbug.com/webrtc/1980,https://issues.webrtc.org/issues/42226728 +https://crbug.com/webrtc/1981,https://issues.webrtc.org/issues/42226729 +https://crbug.com/webrtc/1982,https://issues.webrtc.org/issues/42226730 +https://crbug.com/webrtc/1983,https://issues.webrtc.org/issues/42226731 +https://crbug.com/webrtc/1984,https://issues.webrtc.org/issues/42226732 +https://crbug.com/webrtc/1985,https://issues.webrtc.org/issues/42226733 +https://crbug.com/webrtc/1986,https://issues.webrtc.org/issues/42226734 +https://crbug.com/webrtc/1987,https://issues.webrtc.org/issues/42226735 +https://crbug.com/webrtc/1988,https://issues.webrtc.org/issues/42226736 +https://crbug.com/webrtc/1989,https://issues.webrtc.org/issues/42226737 +https://crbug.com/webrtc/199,https://issues.webrtc.org/issues/42226738 +https://crbug.com/webrtc/1990,https://issues.webrtc.org/issues/42226739 +https://crbug.com/webrtc/1991,https://issues.webrtc.org/issues/42226740 +https://crbug.com/webrtc/1992,https://issues.webrtc.org/issues/42226741 +https://crbug.com/webrtc/1993,https://issues.webrtc.org/issues/42226742 +https://crbug.com/webrtc/1994,https://issues.webrtc.org/issues/42226743 +https://crbug.com/webrtc/1995,https://issues.webrtc.org/issues/42226744 +https://crbug.com/webrtc/1996,https://issues.webrtc.org/issues/42226745 +https://crbug.com/webrtc/1997,https://issues.webrtc.org/issues/42226746 +https://crbug.com/webrtc/1998,https://issues.webrtc.org/issues/42226747 +https://crbug.com/webrtc/1999,https://issues.webrtc.org/issues/42226748 +https://crbug.com/webrtc/2,https://issues.webrtc.org/issues/42226749 +https://crbug.com/webrtc/20,https://issues.webrtc.org/issues/42226750 +https://crbug.com/webrtc/200,https://issues.webrtc.org/issues/42226751 +https://crbug.com/webrtc/2000,https://issues.webrtc.org/issues/42226752 +https://crbug.com/webrtc/2001,https://issues.webrtc.org/issues/42226753 +https://crbug.com/webrtc/2002,https://issues.webrtc.org/issues/42226754 +https://crbug.com/webrtc/2003,https://issues.webrtc.org/issues/42226755 +https://crbug.com/webrtc/2004,https://issues.webrtc.org/issues/42226756 +https://crbug.com/webrtc/2005,https://issues.webrtc.org/issues/42226757 +https://crbug.com/webrtc/2006,https://issues.webrtc.org/issues/42226758 +https://crbug.com/webrtc/2007,https://issues.webrtc.org/issues/42226759 +https://crbug.com/webrtc/2008,https://issues.webrtc.org/issues/42226760 +https://crbug.com/webrtc/2009,https://issues.webrtc.org/issues/42226761 +https://crbug.com/webrtc/201,https://issues.webrtc.org/issues/42226762 +https://crbug.com/webrtc/2010,https://issues.webrtc.org/issues/42226763 +https://crbug.com/webrtc/2011,https://issues.webrtc.org/issues/42226764 +https://crbug.com/webrtc/2012,https://issues.webrtc.org/issues/42226765 +https://crbug.com/webrtc/2013,https://issues.webrtc.org/issues/42226766 +https://crbug.com/webrtc/2014,https://issues.webrtc.org/issues/42226767 +https://crbug.com/webrtc/2015,https://issues.webrtc.org/issues/42226768 +https://crbug.com/webrtc/2016,https://issues.webrtc.org/issues/42226769 +https://crbug.com/webrtc/2017,https://issues.webrtc.org/issues/42226770 +https://crbug.com/webrtc/2018,https://issues.webrtc.org/issues/42226771 +https://crbug.com/webrtc/2019,https://issues.webrtc.org/issues/42226772 +https://crbug.com/webrtc/202,https://issues.webrtc.org/issues/42226773 +https://crbug.com/webrtc/2020,https://issues.webrtc.org/issues/42226774 +https://crbug.com/webrtc/2021,https://issues.webrtc.org/issues/42226775 +https://crbug.com/webrtc/2022,https://issues.webrtc.org/issues/42226776 +https://crbug.com/webrtc/2023,https://issues.webrtc.org/issues/42226777 +https://crbug.com/webrtc/2024,https://issues.webrtc.org/issues/42226778 +https://crbug.com/webrtc/2025,https://issues.webrtc.org/issues/42226779 +https://crbug.com/webrtc/2026,https://issues.webrtc.org/issues/42226780 +https://crbug.com/webrtc/2027,https://issues.webrtc.org/issues/42226781 +https://crbug.com/webrtc/2028,https://issues.webrtc.org/issues/42226782 +https://crbug.com/webrtc/2029,https://issues.webrtc.org/issues/42226783 +https://crbug.com/webrtc/203,https://issues.webrtc.org/issues/42226784 +https://crbug.com/webrtc/2030,https://issues.webrtc.org/issues/42226785 +https://crbug.com/webrtc/2032,https://issues.webrtc.org/issues/42226786 +https://crbug.com/webrtc/2033,https://issues.webrtc.org/issues/42226787 +https://crbug.com/webrtc/2034,https://issues.webrtc.org/issues/42226788 +https://crbug.com/webrtc/2035,https://issues.webrtc.org/issues/42226789 +https://crbug.com/webrtc/2036,https://issues.webrtc.org/issues/42226790 +https://crbug.com/webrtc/2037,https://issues.webrtc.org/issues/42226791 +https://crbug.com/webrtc/2038,https://issues.webrtc.org/issues/42226792 +https://crbug.com/webrtc/2039,https://issues.webrtc.org/issues/42226793 +https://crbug.com/webrtc/204,https://issues.webrtc.org/issues/42226794 +https://crbug.com/webrtc/2040,https://issues.webrtc.org/issues/42226795 +https://crbug.com/webrtc/2041,https://issues.webrtc.org/issues/42226796 +https://crbug.com/webrtc/2042,https://issues.webrtc.org/issues/42226797 +https://crbug.com/webrtc/2043,https://issues.webrtc.org/issues/42226798 +https://crbug.com/webrtc/2044,https://issues.webrtc.org/issues/42226799 +https://crbug.com/webrtc/2045,https://issues.webrtc.org/issues/42226800 +https://crbug.com/webrtc/2046,https://issues.webrtc.org/issues/42226801 +https://crbug.com/webrtc/2047,https://issues.webrtc.org/issues/42226802 +https://crbug.com/webrtc/2048,https://issues.webrtc.org/issues/42226803 +https://crbug.com/webrtc/2049,https://issues.webrtc.org/issues/42226804 +https://crbug.com/webrtc/205,https://issues.webrtc.org/issues/42226805 +https://crbug.com/webrtc/2050,https://issues.webrtc.org/issues/42226806 +https://crbug.com/webrtc/2051,https://issues.webrtc.org/issues/42226807 +https://crbug.com/webrtc/2052,https://issues.webrtc.org/issues/42226808 +https://crbug.com/webrtc/2053,https://issues.webrtc.org/issues/42226809 +https://crbug.com/webrtc/2054,https://issues.webrtc.org/issues/42226810 +https://crbug.com/webrtc/2055,https://issues.webrtc.org/issues/42226811 +https://crbug.com/webrtc/2056,https://issues.webrtc.org/issues/42226812 +https://crbug.com/webrtc/2057,https://issues.webrtc.org/issues/42226813 +https://crbug.com/webrtc/2058,https://issues.webrtc.org/issues/42226814 +https://crbug.com/webrtc/2059,https://issues.webrtc.org/issues/42226815 +https://crbug.com/webrtc/206,https://issues.webrtc.org/issues/42226816 +https://crbug.com/webrtc/2060,https://issues.webrtc.org/issues/42226817 +https://crbug.com/webrtc/2061,https://issues.webrtc.org/issues/42226818 +https://crbug.com/webrtc/2062,https://issues.webrtc.org/issues/42226819 +https://crbug.com/webrtc/2063,https://issues.webrtc.org/issues/42226820 +https://crbug.com/webrtc/2064,https://issues.webrtc.org/issues/42226821 +https://crbug.com/webrtc/2065,https://issues.webrtc.org/issues/42226822 +https://crbug.com/webrtc/2066,https://issues.webrtc.org/issues/42226823 +https://crbug.com/webrtc/2067,https://issues.webrtc.org/issues/42226824 +https://crbug.com/webrtc/2068,https://issues.webrtc.org/issues/42226825 +https://crbug.com/webrtc/2069,https://issues.webrtc.org/issues/42226826 +https://crbug.com/webrtc/207,https://issues.webrtc.org/issues/42226827 +https://crbug.com/webrtc/2070,https://issues.webrtc.org/issues/42226828 +https://crbug.com/webrtc/2071,https://issues.webrtc.org/issues/42226829 +https://crbug.com/webrtc/2072,https://issues.webrtc.org/issues/42226830 +https://crbug.com/webrtc/2073,https://issues.webrtc.org/issues/42226831 +https://crbug.com/webrtc/2074,https://issues.webrtc.org/issues/42226832 +https://crbug.com/webrtc/2075,https://issues.webrtc.org/issues/42226833 +https://crbug.com/webrtc/2077,https://issues.webrtc.org/issues/42226834 +https://crbug.com/webrtc/2078,https://issues.webrtc.org/issues/42226835 +https://crbug.com/webrtc/2079,https://issues.webrtc.org/issues/42226836 +https://crbug.com/webrtc/208,https://issues.webrtc.org/issues/42226837 +https://crbug.com/webrtc/2080,https://issues.webrtc.org/issues/42226838 +https://crbug.com/webrtc/2081,https://issues.webrtc.org/issues/42226839 +https://crbug.com/webrtc/2082,https://issues.webrtc.org/issues/42226840 +https://crbug.com/webrtc/2083,https://issues.webrtc.org/issues/42226841 +https://crbug.com/webrtc/2084,https://issues.webrtc.org/issues/42226842 +https://crbug.com/webrtc/2085,https://issues.webrtc.org/issues/42226843 +https://crbug.com/webrtc/2086,https://issues.webrtc.org/issues/42226844 +https://crbug.com/webrtc/2087,https://issues.webrtc.org/issues/42226845 +https://crbug.com/webrtc/2088,https://issues.webrtc.org/issues/42226846 +https://crbug.com/webrtc/2089,https://issues.webrtc.org/issues/42226847 +https://crbug.com/webrtc/209,https://issues.webrtc.org/issues/42226848 +https://crbug.com/webrtc/2090,https://issues.webrtc.org/issues/42226849 +https://crbug.com/webrtc/2091,https://issues.webrtc.org/issues/42226850 +https://crbug.com/webrtc/2092,https://issues.webrtc.org/issues/42226851 +https://crbug.com/webrtc/2093,https://issues.webrtc.org/issues/42226852 +https://crbug.com/webrtc/2094,https://issues.webrtc.org/issues/42226853 +https://crbug.com/webrtc/2095,https://issues.webrtc.org/issues/42226854 +https://crbug.com/webrtc/2096,https://issues.webrtc.org/issues/42226855 +https://crbug.com/webrtc/2097,https://issues.webrtc.org/issues/42226856 +https://crbug.com/webrtc/2098,https://issues.webrtc.org/issues/42226857 +https://crbug.com/webrtc/2099,https://issues.webrtc.org/issues/42226858 +https://crbug.com/webrtc/21,https://issues.webrtc.org/issues/42226859 +https://crbug.com/webrtc/210,https://issues.webrtc.org/issues/42226860 +https://crbug.com/webrtc/2100,https://issues.webrtc.org/issues/42226861 +https://crbug.com/webrtc/2101,https://issues.webrtc.org/issues/42226862 +https://crbug.com/webrtc/2102,https://issues.webrtc.org/issues/42226863 +https://crbug.com/webrtc/2103,https://issues.webrtc.org/issues/42226864 +https://crbug.com/webrtc/2104,https://issues.webrtc.org/issues/42226865 +https://crbug.com/webrtc/2105,https://issues.webrtc.org/issues/42226866 +https://crbug.com/webrtc/2106,https://issues.webrtc.org/issues/42226867 +https://crbug.com/webrtc/2107,https://issues.webrtc.org/issues/42226868 +https://crbug.com/webrtc/2108,https://issues.webrtc.org/issues/42226869 +https://crbug.com/webrtc/2109,https://issues.webrtc.org/issues/42226870 +https://crbug.com/webrtc/211,https://issues.webrtc.org/issues/42226871 +https://crbug.com/webrtc/2110,https://issues.webrtc.org/issues/42226872 +https://crbug.com/webrtc/2111,https://issues.webrtc.org/issues/42226873 +https://crbug.com/webrtc/2112,https://issues.webrtc.org/issues/42226874 +https://crbug.com/webrtc/2113,https://issues.webrtc.org/issues/42226875 +https://crbug.com/webrtc/2114,https://issues.webrtc.org/issues/42226876 +https://crbug.com/webrtc/2115,https://issues.webrtc.org/issues/42226877 +https://crbug.com/webrtc/2116,https://issues.webrtc.org/issues/42226878 +https://crbug.com/webrtc/2117,https://issues.webrtc.org/issues/42226879 +https://crbug.com/webrtc/2118,https://issues.webrtc.org/issues/42226880 +https://crbug.com/webrtc/2119,https://issues.webrtc.org/issues/42226881 +https://crbug.com/webrtc/212,https://issues.webrtc.org/issues/42226882 +https://crbug.com/webrtc/2120,https://issues.webrtc.org/issues/42226883 +https://crbug.com/webrtc/2121,https://issues.webrtc.org/issues/42226884 +https://crbug.com/webrtc/2122,https://issues.webrtc.org/issues/42226885 +https://crbug.com/webrtc/2123,https://issues.webrtc.org/issues/42226886 +https://crbug.com/webrtc/2124,https://issues.webrtc.org/issues/42226887 +https://crbug.com/webrtc/2125,https://issues.webrtc.org/issues/42226888 +https://crbug.com/webrtc/2126,https://issues.webrtc.org/issues/42226889 +https://crbug.com/webrtc/2127,https://issues.webrtc.org/issues/42226890 +https://crbug.com/webrtc/2128,https://issues.webrtc.org/issues/42226891 +https://crbug.com/webrtc/2129,https://issues.webrtc.org/issues/42226892 +https://crbug.com/webrtc/213,https://issues.webrtc.org/issues/42226893 +https://crbug.com/webrtc/2130,https://issues.webrtc.org/issues/42226894 +https://crbug.com/webrtc/2132,https://issues.webrtc.org/issues/42226895 +https://crbug.com/webrtc/2133,https://issues.webrtc.org/issues/42226896 +https://crbug.com/webrtc/2134,https://issues.webrtc.org/issues/42226897 +https://crbug.com/webrtc/2135,https://issues.webrtc.org/issues/42226898 +https://crbug.com/webrtc/2136,https://issues.webrtc.org/issues/42226899 +https://crbug.com/webrtc/2137,https://issues.webrtc.org/issues/42226900 +https://crbug.com/webrtc/2138,https://issues.webrtc.org/issues/42226901 +https://crbug.com/webrtc/2139,https://issues.webrtc.org/issues/42226902 +https://crbug.com/webrtc/214,https://issues.webrtc.org/issues/42226903 +https://crbug.com/webrtc/2140,https://issues.webrtc.org/issues/42226904 +https://crbug.com/webrtc/2141,https://issues.webrtc.org/issues/42226905 +https://crbug.com/webrtc/2142,https://issues.webrtc.org/issues/42226906 +https://crbug.com/webrtc/2144,https://issues.webrtc.org/issues/42226907 +https://crbug.com/webrtc/2145,https://issues.webrtc.org/issues/42226908 +https://crbug.com/webrtc/2146,https://issues.webrtc.org/issues/42226909 +https://crbug.com/webrtc/2147,https://issues.webrtc.org/issues/42226910 +https://crbug.com/webrtc/2148,https://issues.webrtc.org/issues/42226911 +https://crbug.com/webrtc/2149,https://issues.webrtc.org/issues/42226912 +https://crbug.com/webrtc/215,https://issues.webrtc.org/issues/42226913 +https://crbug.com/webrtc/2150,https://issues.webrtc.org/issues/42226914 +https://crbug.com/webrtc/2151,https://issues.webrtc.org/issues/42226915 +https://crbug.com/webrtc/2152,https://issues.webrtc.org/issues/42226916 +https://crbug.com/webrtc/2153,https://issues.webrtc.org/issues/42226917 +https://crbug.com/webrtc/2155,https://issues.webrtc.org/issues/42226918 +https://crbug.com/webrtc/2156,https://issues.webrtc.org/issues/42226919 +https://crbug.com/webrtc/2158,https://issues.webrtc.org/issues/42226920 +https://crbug.com/webrtc/2159,https://issues.webrtc.org/issues/42226921 +https://crbug.com/webrtc/216,https://issues.webrtc.org/issues/42226922 +https://crbug.com/webrtc/2160,https://issues.webrtc.org/issues/42226923 +https://crbug.com/webrtc/2161,https://issues.webrtc.org/issues/42226924 +https://crbug.com/webrtc/2162,https://issues.webrtc.org/issues/42226925 +https://crbug.com/webrtc/2163,https://issues.webrtc.org/issues/42226926 +https://crbug.com/webrtc/2164,https://issues.webrtc.org/issues/42226927 +https://crbug.com/webrtc/2165,https://issues.webrtc.org/issues/42226928 +https://crbug.com/webrtc/2166,https://issues.webrtc.org/issues/42226929 +https://crbug.com/webrtc/2167,https://issues.webrtc.org/issues/42226930 +https://crbug.com/webrtc/2168,https://issues.webrtc.org/issues/42226931 +https://crbug.com/webrtc/2169,https://issues.webrtc.org/issues/42226932 +https://crbug.com/webrtc/217,https://issues.webrtc.org/issues/42226933 +https://crbug.com/webrtc/2170,https://issues.webrtc.org/issues/42226934 +https://crbug.com/webrtc/2171,https://issues.webrtc.org/issues/42226935 +https://crbug.com/webrtc/2172,https://issues.webrtc.org/issues/42226936 +https://crbug.com/webrtc/2173,https://issues.webrtc.org/issues/42226937 +https://crbug.com/webrtc/2174,https://issues.webrtc.org/issues/42226938 +https://crbug.com/webrtc/2175,https://issues.webrtc.org/issues/42226939 +https://crbug.com/webrtc/2176,https://issues.webrtc.org/issues/42226940 +https://crbug.com/webrtc/2177,https://issues.webrtc.org/issues/42226941 +https://crbug.com/webrtc/2178,https://issues.webrtc.org/issues/42226942 +https://crbug.com/webrtc/2179,https://issues.webrtc.org/issues/42226943 +https://crbug.com/webrtc/218,https://issues.webrtc.org/issues/42226944 +https://crbug.com/webrtc/2180,https://issues.webrtc.org/issues/42226945 +https://crbug.com/webrtc/2181,https://issues.webrtc.org/issues/42226946 +https://crbug.com/webrtc/2182,https://issues.webrtc.org/issues/42226947 +https://crbug.com/webrtc/2183,https://issues.webrtc.org/issues/42226948 +https://crbug.com/webrtc/2184,https://issues.webrtc.org/issues/42226949 +https://crbug.com/webrtc/2185,https://issues.webrtc.org/issues/42226950 +https://crbug.com/webrtc/2186,https://issues.webrtc.org/issues/42226951 +https://crbug.com/webrtc/2187,https://issues.webrtc.org/issues/42226952 +https://crbug.com/webrtc/2188,https://issues.webrtc.org/issues/42226953 +https://crbug.com/webrtc/2189,https://issues.webrtc.org/issues/42226954 +https://crbug.com/webrtc/219,https://issues.webrtc.org/issues/42226955 +https://crbug.com/webrtc/2190,https://issues.webrtc.org/issues/42226956 +https://crbug.com/webrtc/2191,https://issues.webrtc.org/issues/42226957 +https://crbug.com/webrtc/2192,https://issues.webrtc.org/issues/42226958 +https://crbug.com/webrtc/2193,https://issues.webrtc.org/issues/42226959 +https://crbug.com/webrtc/2194,https://issues.webrtc.org/issues/42226960 +https://crbug.com/webrtc/2195,https://issues.webrtc.org/issues/42226961 +https://crbug.com/webrtc/2196,https://issues.webrtc.org/issues/42226962 +https://crbug.com/webrtc/2197,https://issues.webrtc.org/issues/42226963 +https://crbug.com/webrtc/2198,https://issues.webrtc.org/issues/42226964 +https://crbug.com/webrtc/2199,https://issues.webrtc.org/issues/42226965 +https://crbug.com/webrtc/22,https://issues.webrtc.org/issues/42226966 +https://crbug.com/webrtc/220,https://issues.webrtc.org/issues/42226967 +https://crbug.com/webrtc/2200,https://issues.webrtc.org/issues/42226968 +https://crbug.com/webrtc/2201,https://issues.webrtc.org/issues/42226969 +https://crbug.com/webrtc/2202,https://issues.webrtc.org/issues/42226970 +https://crbug.com/webrtc/2203,https://issues.webrtc.org/issues/42226971 +https://crbug.com/webrtc/2204,https://issues.webrtc.org/issues/42226972 +https://crbug.com/webrtc/2205,https://issues.webrtc.org/issues/42226973 +https://crbug.com/webrtc/2206,https://issues.webrtc.org/issues/42226974 +https://crbug.com/webrtc/2207,https://issues.webrtc.org/issues/42226975 +https://crbug.com/webrtc/2208,https://issues.webrtc.org/issues/42226976 +https://crbug.com/webrtc/2209,https://issues.webrtc.org/issues/42226977 +https://crbug.com/webrtc/221,https://issues.webrtc.org/issues/42226978 +https://crbug.com/webrtc/2210,https://issues.webrtc.org/issues/42226979 +https://crbug.com/webrtc/2211,https://issues.webrtc.org/issues/42226980 +https://crbug.com/webrtc/2212,https://issues.webrtc.org/issues/42226981 +https://crbug.com/webrtc/2213,https://issues.webrtc.org/issues/42226982 +https://crbug.com/webrtc/2214,https://issues.webrtc.org/issues/42226983 +https://crbug.com/webrtc/2215,https://issues.webrtc.org/issues/42226984 +https://crbug.com/webrtc/2216,https://issues.webrtc.org/issues/42226985 +https://crbug.com/webrtc/2217,https://issues.webrtc.org/issues/42226986 +https://crbug.com/webrtc/2218,https://issues.webrtc.org/issues/42226987 +https://crbug.com/webrtc/2219,https://issues.webrtc.org/issues/42226988 +https://crbug.com/webrtc/222,https://issues.webrtc.org/issues/42226989 +https://crbug.com/webrtc/2220,https://issues.webrtc.org/issues/42226990 +https://crbug.com/webrtc/2221,https://issues.webrtc.org/issues/42226991 +https://crbug.com/webrtc/2222,https://issues.webrtc.org/issues/42226992 +https://crbug.com/webrtc/2223,https://issues.webrtc.org/issues/42226993 +https://crbug.com/webrtc/2224,https://issues.webrtc.org/issues/42226994 +https://crbug.com/webrtc/2225,https://issues.webrtc.org/issues/42226995 +https://crbug.com/webrtc/2226,https://issues.webrtc.org/issues/42226996 +https://crbug.com/webrtc/2227,https://issues.webrtc.org/issues/42226997 +https://crbug.com/webrtc/2228,https://issues.webrtc.org/issues/42226998 +https://crbug.com/webrtc/2229,https://issues.webrtc.org/issues/42226999 +https://crbug.com/webrtc/223,https://issues.webrtc.org/issues/42227000 +https://crbug.com/webrtc/2230,https://issues.webrtc.org/issues/42227001 +https://crbug.com/webrtc/2231,https://issues.webrtc.org/issues/42227002 +https://crbug.com/webrtc/2232,https://issues.webrtc.org/issues/42227003 +https://crbug.com/webrtc/2233,https://issues.webrtc.org/issues/42227004 +https://crbug.com/webrtc/2234,https://issues.webrtc.org/issues/42227005 +https://crbug.com/webrtc/2235,https://issues.webrtc.org/issues/42227006 +https://crbug.com/webrtc/2236,https://issues.webrtc.org/issues/42227007 +https://crbug.com/webrtc/2237,https://issues.webrtc.org/issues/42227008 +https://crbug.com/webrtc/2238,https://issues.webrtc.org/issues/42227009 +https://crbug.com/webrtc/2239,https://issues.webrtc.org/issues/42227010 +https://crbug.com/webrtc/224,https://issues.webrtc.org/issues/42227011 +https://crbug.com/webrtc/2240,https://issues.webrtc.org/issues/42227012 +https://crbug.com/webrtc/2241,https://issues.webrtc.org/issues/42227013 +https://crbug.com/webrtc/2242,https://issues.webrtc.org/issues/42227014 +https://crbug.com/webrtc/2244,https://issues.webrtc.org/issues/42227015 +https://crbug.com/webrtc/2245,https://issues.webrtc.org/issues/42227016 +https://crbug.com/webrtc/2246,https://issues.webrtc.org/issues/42227017 +https://crbug.com/webrtc/2247,https://issues.webrtc.org/issues/42227018 +https://crbug.com/webrtc/2248,https://issues.webrtc.org/issues/42227019 +https://crbug.com/webrtc/2249,https://issues.webrtc.org/issues/42227020 +https://crbug.com/webrtc/225,https://issues.webrtc.org/issues/42227021 +https://crbug.com/webrtc/2250,https://issues.webrtc.org/issues/42227022 +https://crbug.com/webrtc/2251,https://issues.webrtc.org/issues/42227023 +https://crbug.com/webrtc/2252,https://issues.webrtc.org/issues/42227024 +https://crbug.com/webrtc/2253,https://issues.webrtc.org/issues/42227025 +https://crbug.com/webrtc/2254,https://issues.webrtc.org/issues/42227026 +https://crbug.com/webrtc/2255,https://issues.webrtc.org/issues/42227027 +https://crbug.com/webrtc/2256,https://issues.webrtc.org/issues/42227028 +https://crbug.com/webrtc/2257,https://issues.webrtc.org/issues/42227029 +https://crbug.com/webrtc/2258,https://issues.webrtc.org/issues/42227030 +https://crbug.com/webrtc/2259,https://issues.webrtc.org/issues/42227031 +https://crbug.com/webrtc/226,https://issues.webrtc.org/issues/42227032 +https://crbug.com/webrtc/2260,https://issues.webrtc.org/issues/42227033 +https://crbug.com/webrtc/2261,https://issues.webrtc.org/issues/42227034 +https://crbug.com/webrtc/2262,https://issues.webrtc.org/issues/42227035 +https://crbug.com/webrtc/2263,https://issues.webrtc.org/issues/42227036 +https://crbug.com/webrtc/2264,https://issues.webrtc.org/issues/42227037 +https://crbug.com/webrtc/2265,https://issues.webrtc.org/issues/42227038 +https://crbug.com/webrtc/2266,https://issues.webrtc.org/issues/42227039 +https://crbug.com/webrtc/2267,https://issues.webrtc.org/issues/42227040 +https://crbug.com/webrtc/2268,https://issues.webrtc.org/issues/42227041 +https://crbug.com/webrtc/2269,https://issues.webrtc.org/issues/42227042 +https://crbug.com/webrtc/227,https://issues.webrtc.org/issues/42227043 +https://crbug.com/webrtc/2270,https://issues.webrtc.org/issues/42227044 +https://crbug.com/webrtc/2271,https://issues.webrtc.org/issues/42227045 +https://crbug.com/webrtc/2272,https://issues.webrtc.org/issues/42227046 +https://crbug.com/webrtc/2273,https://issues.webrtc.org/issues/42227047 +https://crbug.com/webrtc/2274,https://issues.webrtc.org/issues/42227048 +https://crbug.com/webrtc/2275,https://issues.webrtc.org/issues/42227049 +https://crbug.com/webrtc/2277,https://issues.webrtc.org/issues/42227050 +https://crbug.com/webrtc/2278,https://issues.webrtc.org/issues/42227051 +https://crbug.com/webrtc/2279,https://issues.webrtc.org/issues/42227052 +https://crbug.com/webrtc/228,https://issues.webrtc.org/issues/42227053 +https://crbug.com/webrtc/2280,https://issues.webrtc.org/issues/42227054 +https://crbug.com/webrtc/2281,https://issues.webrtc.org/issues/42227055 +https://crbug.com/webrtc/2282,https://issues.webrtc.org/issues/42227056 +https://crbug.com/webrtc/2283,https://issues.webrtc.org/issues/42227057 +https://crbug.com/webrtc/2284,https://issues.webrtc.org/issues/42227058 +https://crbug.com/webrtc/2285,https://issues.webrtc.org/issues/42227059 +https://crbug.com/webrtc/2286,https://issues.webrtc.org/issues/42227060 +https://crbug.com/webrtc/2287,https://issues.webrtc.org/issues/42227061 +https://crbug.com/webrtc/2288,https://issues.webrtc.org/issues/42227062 +https://crbug.com/webrtc/2289,https://issues.webrtc.org/issues/42227063 +https://crbug.com/webrtc/229,https://issues.webrtc.org/issues/42227064 +https://crbug.com/webrtc/2290,https://issues.webrtc.org/issues/42227065 +https://crbug.com/webrtc/2291,https://issues.webrtc.org/issues/42227066 +https://crbug.com/webrtc/2292,https://issues.webrtc.org/issues/42227067 +https://crbug.com/webrtc/2293,https://issues.webrtc.org/issues/42227068 +https://crbug.com/webrtc/2294,https://issues.webrtc.org/issues/42227069 +https://crbug.com/webrtc/2295,https://issues.webrtc.org/issues/42227070 +https://crbug.com/webrtc/2296,https://issues.webrtc.org/issues/42227071 +https://crbug.com/webrtc/2297,https://issues.webrtc.org/issues/42227072 +https://crbug.com/webrtc/2298,https://issues.webrtc.org/issues/42227073 +https://crbug.com/webrtc/2299,https://issues.webrtc.org/issues/42227074 +https://crbug.com/webrtc/23,https://issues.webrtc.org/issues/42227075 +https://crbug.com/webrtc/230,https://issues.webrtc.org/issues/42227076 +https://crbug.com/webrtc/2300,https://issues.webrtc.org/issues/42227077 +https://crbug.com/webrtc/2301,https://issues.webrtc.org/issues/42227078 +https://crbug.com/webrtc/2302,https://issues.webrtc.org/issues/42227079 +https://crbug.com/webrtc/2303,https://issues.webrtc.org/issues/42227080 +https://crbug.com/webrtc/2304,https://issues.webrtc.org/issues/42227081 +https://crbug.com/webrtc/2305,https://issues.webrtc.org/issues/42227082 +https://crbug.com/webrtc/2306,https://issues.webrtc.org/issues/42227083 +https://crbug.com/webrtc/2307,https://issues.webrtc.org/issues/42227084 +https://crbug.com/webrtc/2308,https://issues.webrtc.org/issues/42227085 +https://crbug.com/webrtc/2309,https://issues.webrtc.org/issues/42227086 +https://crbug.com/webrtc/231,https://issues.webrtc.org/issues/42227087 +https://crbug.com/webrtc/2310,https://issues.webrtc.org/issues/42227088 +https://crbug.com/webrtc/2311,https://issues.webrtc.org/issues/42227089 +https://crbug.com/webrtc/2312,https://issues.webrtc.org/issues/42227090 +https://crbug.com/webrtc/2313,https://issues.webrtc.org/issues/42227091 +https://crbug.com/webrtc/2314,https://issues.webrtc.org/issues/42227092 +https://crbug.com/webrtc/2315,https://issues.webrtc.org/issues/42227093 +https://crbug.com/webrtc/2316,https://issues.webrtc.org/issues/42227094 +https://crbug.com/webrtc/2317,https://issues.webrtc.org/issues/42227095 +https://crbug.com/webrtc/2318,https://issues.webrtc.org/issues/42227096 +https://crbug.com/webrtc/2319,https://issues.webrtc.org/issues/42227097 +https://crbug.com/webrtc/232,https://issues.webrtc.org/issues/42227098 +https://crbug.com/webrtc/2320,https://issues.webrtc.org/issues/42227099 +https://crbug.com/webrtc/2321,https://issues.webrtc.org/issues/42227100 +https://crbug.com/webrtc/2322,https://issues.webrtc.org/issues/42227101 +https://crbug.com/webrtc/2323,https://issues.webrtc.org/issues/42227102 +https://crbug.com/webrtc/2324,https://issues.webrtc.org/issues/42227103 +https://crbug.com/webrtc/2325,https://issues.webrtc.org/issues/42227104 +https://crbug.com/webrtc/2326,https://issues.webrtc.org/issues/42227105 +https://crbug.com/webrtc/2327,https://issues.webrtc.org/issues/42227106 +https://crbug.com/webrtc/2328,https://issues.webrtc.org/issues/42227107 +https://crbug.com/webrtc/2329,https://issues.webrtc.org/issues/42227108 +https://crbug.com/webrtc/233,https://issues.webrtc.org/issues/42227109 +https://crbug.com/webrtc/2330,https://issues.webrtc.org/issues/42227110 +https://crbug.com/webrtc/2331,https://issues.webrtc.org/issues/42227111 +https://crbug.com/webrtc/2332,https://issues.webrtc.org/issues/42227112 +https://crbug.com/webrtc/2333,https://issues.webrtc.org/issues/42227113 +https://crbug.com/webrtc/2334,https://issues.webrtc.org/issues/42227114 +https://crbug.com/webrtc/2335,https://issues.webrtc.org/issues/42227115 +https://crbug.com/webrtc/2336,https://issues.webrtc.org/issues/42227116 +https://crbug.com/webrtc/2337,https://issues.webrtc.org/issues/42227117 +https://crbug.com/webrtc/2338,https://issues.webrtc.org/issues/42227118 +https://crbug.com/webrtc/2339,https://issues.webrtc.org/issues/42227119 +https://crbug.com/webrtc/234,https://issues.webrtc.org/issues/42227120 +https://crbug.com/webrtc/2340,https://issues.webrtc.org/issues/42227121 +https://crbug.com/webrtc/2341,https://issues.webrtc.org/issues/42227122 +https://crbug.com/webrtc/2342,https://issues.webrtc.org/issues/42227123 +https://crbug.com/webrtc/2343,https://issues.webrtc.org/issues/42227124 +https://crbug.com/webrtc/2344,https://issues.webrtc.org/issues/42227125 +https://crbug.com/webrtc/2345,https://issues.webrtc.org/issues/42227126 +https://crbug.com/webrtc/2346,https://issues.webrtc.org/issues/42227127 +https://crbug.com/webrtc/2347,https://issues.webrtc.org/issues/42227128 +https://crbug.com/webrtc/2348,https://issues.webrtc.org/issues/42227129 +https://crbug.com/webrtc/2349,https://issues.webrtc.org/issues/42227130 +https://crbug.com/webrtc/235,https://issues.webrtc.org/issues/42227131 +https://crbug.com/webrtc/2350,https://issues.webrtc.org/issues/42227132 +https://crbug.com/webrtc/2351,https://issues.webrtc.org/issues/42227133 +https://crbug.com/webrtc/2352,https://issues.webrtc.org/issues/42227134 +https://crbug.com/webrtc/2353,https://issues.webrtc.org/issues/42227135 +https://crbug.com/webrtc/2354,https://issues.webrtc.org/issues/42227136 +https://crbug.com/webrtc/2355,https://issues.webrtc.org/issues/42227137 +https://crbug.com/webrtc/2356,https://issues.webrtc.org/issues/42227138 +https://crbug.com/webrtc/2357,https://issues.webrtc.org/issues/42227139 +https://crbug.com/webrtc/2358,https://issues.webrtc.org/issues/42227140 +https://crbug.com/webrtc/2359,https://issues.webrtc.org/issues/42227141 +https://crbug.com/webrtc/236,https://issues.webrtc.org/issues/42227142 +https://crbug.com/webrtc/2360,https://issues.webrtc.org/issues/42227143 +https://crbug.com/webrtc/2361,https://issues.webrtc.org/issues/42227144 +https://crbug.com/webrtc/2362,https://issues.webrtc.org/issues/42227145 +https://crbug.com/webrtc/2363,https://issues.webrtc.org/issues/42227146 +https://crbug.com/webrtc/2364,https://issues.webrtc.org/issues/42227147 +https://crbug.com/webrtc/2365,https://issues.webrtc.org/issues/42227148 +https://crbug.com/webrtc/2366,https://issues.webrtc.org/issues/42227149 +https://crbug.com/webrtc/2367,https://issues.webrtc.org/issues/42227150 +https://crbug.com/webrtc/2368,https://issues.webrtc.org/issues/42227151 +https://crbug.com/webrtc/2369,https://issues.webrtc.org/issues/42227152 +https://crbug.com/webrtc/237,https://issues.webrtc.org/issues/42227153 +https://crbug.com/webrtc/2370,https://issues.webrtc.org/issues/42227154 +https://crbug.com/webrtc/2371,https://issues.webrtc.org/issues/42227155 +https://crbug.com/webrtc/2372,https://issues.webrtc.org/issues/42227156 +https://crbug.com/webrtc/2373,https://issues.webrtc.org/issues/42227157 +https://crbug.com/webrtc/2374,https://issues.webrtc.org/issues/42227158 +https://crbug.com/webrtc/2375,https://issues.webrtc.org/issues/42227159 +https://crbug.com/webrtc/2376,https://issues.webrtc.org/issues/42227160 +https://crbug.com/webrtc/2377,https://issues.webrtc.org/issues/42227161 +https://crbug.com/webrtc/2378,https://issues.webrtc.org/issues/42227162 +https://crbug.com/webrtc/2379,https://issues.webrtc.org/issues/42227163 +https://crbug.com/webrtc/238,https://issues.webrtc.org/issues/42227164 +https://crbug.com/webrtc/2380,https://issues.webrtc.org/issues/42227165 +https://crbug.com/webrtc/2381,https://issues.webrtc.org/issues/42227166 +https://crbug.com/webrtc/2382,https://issues.webrtc.org/issues/42227167 +https://crbug.com/webrtc/2383,https://issues.webrtc.org/issues/42227168 +https://crbug.com/webrtc/2384,https://issues.webrtc.org/issues/42227169 +https://crbug.com/webrtc/2385,https://issues.webrtc.org/issues/42227170 +https://crbug.com/webrtc/2386,https://issues.webrtc.org/issues/42227171 +https://crbug.com/webrtc/2387,https://issues.webrtc.org/issues/42227172 +https://crbug.com/webrtc/2388,https://issues.webrtc.org/issues/42227173 +https://crbug.com/webrtc/2389,https://issues.webrtc.org/issues/42227174 +https://crbug.com/webrtc/239,https://issues.webrtc.org/issues/42227175 +https://crbug.com/webrtc/2390,https://issues.webrtc.org/issues/42227176 +https://crbug.com/webrtc/2391,https://issues.webrtc.org/issues/42227177 +https://crbug.com/webrtc/2392,https://issues.webrtc.org/issues/42227178 +https://crbug.com/webrtc/2393,https://issues.webrtc.org/issues/42227179 +https://crbug.com/webrtc/2394,https://issues.webrtc.org/issues/42227180 +https://crbug.com/webrtc/2395,https://issues.webrtc.org/issues/42227181 +https://crbug.com/webrtc/2396,https://issues.webrtc.org/issues/42227182 +https://crbug.com/webrtc/2397,https://issues.webrtc.org/issues/42227183 +https://crbug.com/webrtc/2398,https://issues.webrtc.org/issues/42227184 +https://crbug.com/webrtc/2399,https://issues.webrtc.org/issues/42227185 +https://crbug.com/webrtc/24,https://issues.webrtc.org/issues/42227186 +https://crbug.com/webrtc/240,https://issues.webrtc.org/issues/42227187 +https://crbug.com/webrtc/2400,https://issues.webrtc.org/issues/42227188 +https://crbug.com/webrtc/2401,https://issues.webrtc.org/issues/42227189 +https://crbug.com/webrtc/2402,https://issues.webrtc.org/issues/42227190 +https://crbug.com/webrtc/2403,https://issues.webrtc.org/issues/42227191 +https://crbug.com/webrtc/2404,https://issues.webrtc.org/issues/42227192 +https://crbug.com/webrtc/2405,https://issues.webrtc.org/issues/42227193 +https://crbug.com/webrtc/2406,https://issues.webrtc.org/issues/42227194 +https://crbug.com/webrtc/2407,https://issues.webrtc.org/issues/42227195 +https://crbug.com/webrtc/2408,https://issues.webrtc.org/issues/42227196 +https://crbug.com/webrtc/2409,https://issues.webrtc.org/issues/42227197 +https://crbug.com/webrtc/241,https://issues.webrtc.org/issues/42227198 +https://crbug.com/webrtc/2410,https://issues.webrtc.org/issues/42227199 +https://crbug.com/webrtc/2411,https://issues.webrtc.org/issues/42227200 +https://crbug.com/webrtc/2412,https://issues.webrtc.org/issues/42227201 +https://crbug.com/webrtc/2413,https://issues.webrtc.org/issues/42227202 +https://crbug.com/webrtc/2414,https://issues.webrtc.org/issues/42227203 +https://crbug.com/webrtc/2415,https://issues.webrtc.org/issues/42227204 +https://crbug.com/webrtc/2416,https://issues.webrtc.org/issues/42227205 +https://crbug.com/webrtc/2417,https://issues.webrtc.org/issues/42227206 +https://crbug.com/webrtc/2418,https://issues.webrtc.org/issues/42227207 +https://crbug.com/webrtc/2419,https://issues.webrtc.org/issues/42227208 +https://crbug.com/webrtc/242,https://issues.webrtc.org/issues/42227209 +https://crbug.com/webrtc/2420,https://issues.webrtc.org/issues/42227210 +https://crbug.com/webrtc/2421,https://issues.webrtc.org/issues/42227211 +https://crbug.com/webrtc/2422,https://issues.webrtc.org/issues/42227212 +https://crbug.com/webrtc/2423,https://issues.webrtc.org/issues/42227213 +https://crbug.com/webrtc/2424,https://issues.webrtc.org/issues/42227214 +https://crbug.com/webrtc/2425,https://issues.webrtc.org/issues/42227215 +https://crbug.com/webrtc/2426,https://issues.webrtc.org/issues/42227216 +https://crbug.com/webrtc/2427,https://issues.webrtc.org/issues/42227217 +https://crbug.com/webrtc/2428,https://issues.webrtc.org/issues/42227218 +https://crbug.com/webrtc/2429,https://issues.webrtc.org/issues/42227219 +https://crbug.com/webrtc/243,https://issues.webrtc.org/issues/42227220 +https://crbug.com/webrtc/2430,https://issues.webrtc.org/issues/42227221 +https://crbug.com/webrtc/2431,https://issues.webrtc.org/issues/42227222 +https://crbug.com/webrtc/2432,https://issues.webrtc.org/issues/42227223 +https://crbug.com/webrtc/2433,https://issues.webrtc.org/issues/42227224 +https://crbug.com/webrtc/2434,https://issues.webrtc.org/issues/42227225 +https://crbug.com/webrtc/2435,https://issues.webrtc.org/issues/42227226 +https://crbug.com/webrtc/2436,https://issues.webrtc.org/issues/42227227 +https://crbug.com/webrtc/2437,https://issues.webrtc.org/issues/42227228 +https://crbug.com/webrtc/2438,https://issues.webrtc.org/issues/42227229 +https://crbug.com/webrtc/2439,https://issues.webrtc.org/issues/42227230 +https://crbug.com/webrtc/244,https://issues.webrtc.org/issues/42227231 +https://crbug.com/webrtc/2440,https://issues.webrtc.org/issues/42227232 +https://crbug.com/webrtc/2441,https://issues.webrtc.org/issues/42227233 +https://crbug.com/webrtc/2442,https://issues.webrtc.org/issues/42227234 +https://crbug.com/webrtc/2443,https://issues.webrtc.org/issues/42227235 +https://crbug.com/webrtc/2444,https://issues.webrtc.org/issues/42227236 +https://crbug.com/webrtc/2445,https://issues.webrtc.org/issues/42227237 +https://crbug.com/webrtc/2446,https://issues.webrtc.org/issues/42227238 +https://crbug.com/webrtc/2447,https://issues.webrtc.org/issues/42227239 +https://crbug.com/webrtc/2448,https://issues.webrtc.org/issues/42227240 +https://crbug.com/webrtc/2449,https://issues.webrtc.org/issues/42227241 +https://crbug.com/webrtc/245,https://issues.webrtc.org/issues/42227242 +https://crbug.com/webrtc/2450,https://issues.webrtc.org/issues/42227243 +https://crbug.com/webrtc/2451,https://issues.webrtc.org/issues/42227244 +https://crbug.com/webrtc/2452,https://issues.webrtc.org/issues/42227245 +https://crbug.com/webrtc/2453,https://issues.webrtc.org/issues/42227246 +https://crbug.com/webrtc/2454,https://issues.webrtc.org/issues/42227247 +https://crbug.com/webrtc/2455,https://issues.webrtc.org/issues/42227248 +https://crbug.com/webrtc/2456,https://issues.webrtc.org/issues/42227249 +https://crbug.com/webrtc/2457,https://issues.webrtc.org/issues/42227250 +https://crbug.com/webrtc/2458,https://issues.webrtc.org/issues/42227251 +https://crbug.com/webrtc/2459,https://issues.webrtc.org/issues/42227252 +https://crbug.com/webrtc/246,https://issues.webrtc.org/issues/42227253 +https://crbug.com/webrtc/2460,https://issues.webrtc.org/issues/42227254 +https://crbug.com/webrtc/2461,https://issues.webrtc.org/issues/42227255 +https://crbug.com/webrtc/2462,https://issues.webrtc.org/issues/42227256 +https://crbug.com/webrtc/2463,https://issues.webrtc.org/issues/42227257 +https://crbug.com/webrtc/2464,https://issues.webrtc.org/issues/42227258 +https://crbug.com/webrtc/2465,https://issues.webrtc.org/issues/42227259 +https://crbug.com/webrtc/2466,https://issues.webrtc.org/issues/42227260 +https://crbug.com/webrtc/2467,https://issues.webrtc.org/issues/42227261 +https://crbug.com/webrtc/2468,https://issues.webrtc.org/issues/42227262 +https://crbug.com/webrtc/2469,https://issues.webrtc.org/issues/42227263 +https://crbug.com/webrtc/247,https://issues.webrtc.org/issues/42227264 +https://crbug.com/webrtc/2470,https://issues.webrtc.org/issues/42227265 +https://crbug.com/webrtc/2471,https://issues.webrtc.org/issues/42227266 +https://crbug.com/webrtc/2472,https://issues.webrtc.org/issues/42227267 +https://crbug.com/webrtc/2473,https://issues.webrtc.org/issues/42227268 +https://crbug.com/webrtc/2474,https://issues.webrtc.org/issues/42227269 +https://crbug.com/webrtc/2475,https://issues.webrtc.org/issues/42227270 +https://crbug.com/webrtc/2476,https://issues.webrtc.org/issues/42227271 +https://crbug.com/webrtc/2477,https://issues.webrtc.org/issues/42227272 +https://crbug.com/webrtc/2478,https://issues.webrtc.org/issues/42227273 +https://crbug.com/webrtc/2479,https://issues.webrtc.org/issues/42227274 +https://crbug.com/webrtc/248,https://issues.webrtc.org/issues/42227275 +https://crbug.com/webrtc/2480,https://issues.webrtc.org/issues/42227276 +https://crbug.com/webrtc/2482,https://issues.webrtc.org/issues/42227277 +https://crbug.com/webrtc/2483,https://issues.webrtc.org/issues/42227278 +https://crbug.com/webrtc/2484,https://issues.webrtc.org/issues/42227279 +https://crbug.com/webrtc/2485,https://issues.webrtc.org/issues/42227280 +https://crbug.com/webrtc/2486,https://issues.webrtc.org/issues/42227281 +https://crbug.com/webrtc/2487,https://issues.webrtc.org/issues/42227282 +https://crbug.com/webrtc/2488,https://issues.webrtc.org/issues/42227283 +https://crbug.com/webrtc/249,https://issues.webrtc.org/issues/42227284 +https://crbug.com/webrtc/2490,https://issues.webrtc.org/issues/42227285 +https://crbug.com/webrtc/2491,https://issues.webrtc.org/issues/42227286 +https://crbug.com/webrtc/2492,https://issues.webrtc.org/issues/42227287 +https://crbug.com/webrtc/2493,https://issues.webrtc.org/issues/42227288 +https://crbug.com/webrtc/2494,https://issues.webrtc.org/issues/42227289 +https://crbug.com/webrtc/2495,https://issues.webrtc.org/issues/42227290 +https://crbug.com/webrtc/2496,https://issues.webrtc.org/issues/42227291 +https://crbug.com/webrtc/2497,https://issues.webrtc.org/issues/42227292 +https://crbug.com/webrtc/2498,https://issues.webrtc.org/issues/42227293 +https://crbug.com/webrtc/2499,https://issues.webrtc.org/issues/42227294 +https://crbug.com/webrtc/25,https://issues.webrtc.org/issues/42227295 +https://crbug.com/webrtc/250,https://issues.webrtc.org/issues/42227296 +https://crbug.com/webrtc/2500,https://issues.webrtc.org/issues/42227297 +https://crbug.com/webrtc/2501,https://issues.webrtc.org/issues/42227298 +https://crbug.com/webrtc/2502,https://issues.webrtc.org/issues/42227299 +https://crbug.com/webrtc/2503,https://issues.webrtc.org/issues/42227300 +https://crbug.com/webrtc/2504,https://issues.webrtc.org/issues/42227301 +https://crbug.com/webrtc/2505,https://issues.webrtc.org/issues/42227302 +https://crbug.com/webrtc/2506,https://issues.webrtc.org/issues/42227303 +https://crbug.com/webrtc/2507,https://issues.webrtc.org/issues/42227304 +https://crbug.com/webrtc/2508,https://issues.webrtc.org/issues/42227305 +https://crbug.com/webrtc/2509,https://issues.webrtc.org/issues/42227306 +https://crbug.com/webrtc/251,https://issues.webrtc.org/issues/42227307 +https://crbug.com/webrtc/2510,https://issues.webrtc.org/issues/42227308 +https://crbug.com/webrtc/2511,https://issues.webrtc.org/issues/42227309 +https://crbug.com/webrtc/2512,https://issues.webrtc.org/issues/42227310 +https://crbug.com/webrtc/2513,https://issues.webrtc.org/issues/42227311 +https://crbug.com/webrtc/2514,https://issues.webrtc.org/issues/42227312 +https://crbug.com/webrtc/2515,https://issues.webrtc.org/issues/42227313 +https://crbug.com/webrtc/2516,https://issues.webrtc.org/issues/42227314 +https://crbug.com/webrtc/2517,https://issues.webrtc.org/issues/42227315 +https://crbug.com/webrtc/2518,https://issues.webrtc.org/issues/42227316 +https://crbug.com/webrtc/2519,https://issues.webrtc.org/issues/42227317 +https://crbug.com/webrtc/252,https://issues.webrtc.org/issues/42227318 +https://crbug.com/webrtc/2520,https://issues.webrtc.org/issues/42227319 +https://crbug.com/webrtc/2521,https://issues.webrtc.org/issues/42227320 +https://crbug.com/webrtc/2522,https://issues.webrtc.org/issues/42227321 +https://crbug.com/webrtc/2523,https://issues.webrtc.org/issues/42227322 +https://crbug.com/webrtc/2524,https://issues.webrtc.org/issues/42227323 +https://crbug.com/webrtc/2525,https://issues.webrtc.org/issues/42227324 +https://crbug.com/webrtc/2526,https://issues.webrtc.org/issues/42227325 +https://crbug.com/webrtc/2527,https://issues.webrtc.org/issues/42227326 +https://crbug.com/webrtc/2528,https://issues.webrtc.org/issues/42227327 +https://crbug.com/webrtc/2529,https://issues.webrtc.org/issues/42227328 +https://crbug.com/webrtc/253,https://issues.webrtc.org/issues/42227329 +https://crbug.com/webrtc/2530,https://issues.webrtc.org/issues/42227330 +https://crbug.com/webrtc/2531,https://issues.webrtc.org/issues/42227331 +https://crbug.com/webrtc/2532,https://issues.webrtc.org/issues/42227332 +https://crbug.com/webrtc/2533,https://issues.webrtc.org/issues/42227333 +https://crbug.com/webrtc/2534,https://issues.webrtc.org/issues/42227334 +https://crbug.com/webrtc/2535,https://issues.webrtc.org/issues/42227335 +https://crbug.com/webrtc/2536,https://issues.webrtc.org/issues/42227336 +https://crbug.com/webrtc/2537,https://issues.webrtc.org/issues/42227337 +https://crbug.com/webrtc/2538,https://issues.webrtc.org/issues/42227338 +https://crbug.com/webrtc/2539,https://issues.webrtc.org/issues/42227339 +https://crbug.com/webrtc/254,https://issues.webrtc.org/issues/42227340 +https://crbug.com/webrtc/2540,https://issues.webrtc.org/issues/42227341 +https://crbug.com/webrtc/2541,https://issues.webrtc.org/issues/42227342 +https://crbug.com/webrtc/2542,https://issues.webrtc.org/issues/42227343 +https://crbug.com/webrtc/2543,https://issues.webrtc.org/issues/42227344 +https://crbug.com/webrtc/2544,https://issues.webrtc.org/issues/42227345 +https://crbug.com/webrtc/2545,https://issues.webrtc.org/issues/42227346 +https://crbug.com/webrtc/2546,https://issues.webrtc.org/issues/42227347 +https://crbug.com/webrtc/2547,https://issues.webrtc.org/issues/42227348 +https://crbug.com/webrtc/2548,https://issues.webrtc.org/issues/42227349 +https://crbug.com/webrtc/2549,https://issues.webrtc.org/issues/42227350 +https://crbug.com/webrtc/255,https://issues.webrtc.org/issues/42227351 +https://crbug.com/webrtc/2550,https://issues.webrtc.org/issues/42227352 +https://crbug.com/webrtc/2551,https://issues.webrtc.org/issues/42227353 +https://crbug.com/webrtc/2552,https://issues.webrtc.org/issues/42227354 +https://crbug.com/webrtc/2553,https://issues.webrtc.org/issues/42227355 +https://crbug.com/webrtc/2554,https://issues.webrtc.org/issues/42227356 +https://crbug.com/webrtc/2555,https://issues.webrtc.org/issues/42227357 +https://crbug.com/webrtc/2556,https://issues.webrtc.org/issues/42227358 +https://crbug.com/webrtc/2557,https://issues.webrtc.org/issues/42227359 +https://crbug.com/webrtc/2558,https://issues.webrtc.org/issues/42227360 +https://crbug.com/webrtc/2559,https://issues.webrtc.org/issues/42227361 +https://crbug.com/webrtc/256,https://issues.webrtc.org/issues/42227362 +https://crbug.com/webrtc/2560,https://issues.webrtc.org/issues/42227363 +https://crbug.com/webrtc/2561,https://issues.webrtc.org/issues/42227364 +https://crbug.com/webrtc/2562,https://issues.webrtc.org/issues/42227365 +https://crbug.com/webrtc/2563,https://issues.webrtc.org/issues/42227366 +https://crbug.com/webrtc/2564,https://issues.webrtc.org/issues/42227367 +https://crbug.com/webrtc/2565,https://issues.webrtc.org/issues/42227368 +https://crbug.com/webrtc/2566,https://issues.webrtc.org/issues/42227369 +https://crbug.com/webrtc/2567,https://issues.webrtc.org/issues/42227370 +https://crbug.com/webrtc/2568,https://issues.webrtc.org/issues/42227371 +https://crbug.com/webrtc/2569,https://issues.webrtc.org/issues/42227372 +https://crbug.com/webrtc/257,https://issues.webrtc.org/issues/42227373 +https://crbug.com/webrtc/2570,https://issues.webrtc.org/issues/42227374 +https://crbug.com/webrtc/2571,https://issues.webrtc.org/issues/42227375 +https://crbug.com/webrtc/2572,https://issues.webrtc.org/issues/42227376 +https://crbug.com/webrtc/2573,https://issues.webrtc.org/issues/42227377 +https://crbug.com/webrtc/2574,https://issues.webrtc.org/issues/42227378 +https://crbug.com/webrtc/2575,https://issues.webrtc.org/issues/42227379 +https://crbug.com/webrtc/2576,https://issues.webrtc.org/issues/42227380 +https://crbug.com/webrtc/2577,https://issues.webrtc.org/issues/42227381 +https://crbug.com/webrtc/2578,https://issues.webrtc.org/issues/42227382 +https://crbug.com/webrtc/2579,https://issues.webrtc.org/issues/42227383 +https://crbug.com/webrtc/258,https://issues.webrtc.org/issues/42227384 +https://crbug.com/webrtc/2580,https://issues.webrtc.org/issues/42227385 +https://crbug.com/webrtc/2581,https://issues.webrtc.org/issues/42227386 +https://crbug.com/webrtc/2582,https://issues.webrtc.org/issues/42227387 +https://crbug.com/webrtc/2583,https://issues.webrtc.org/issues/42227388 +https://crbug.com/webrtc/2584,https://issues.webrtc.org/issues/42227389 +https://crbug.com/webrtc/2585,https://issues.webrtc.org/issues/42227390 +https://crbug.com/webrtc/2586,https://issues.webrtc.org/issues/42227391 +https://crbug.com/webrtc/2587,https://issues.webrtc.org/issues/42227392 +https://crbug.com/webrtc/2588,https://issues.webrtc.org/issues/42227393 +https://crbug.com/webrtc/2589,https://issues.webrtc.org/issues/42227394 +https://crbug.com/webrtc/259,https://issues.webrtc.org/issues/42227395 +https://crbug.com/webrtc/2590,https://issues.webrtc.org/issues/42227396 +https://crbug.com/webrtc/2591,https://issues.webrtc.org/issues/42227397 +https://crbug.com/webrtc/2592,https://issues.webrtc.org/issues/42227398 +https://crbug.com/webrtc/2593,https://issues.webrtc.org/issues/42227399 +https://crbug.com/webrtc/2594,https://issues.webrtc.org/issues/42227400 +https://crbug.com/webrtc/2595,https://issues.webrtc.org/issues/42227401 +https://crbug.com/webrtc/2596,https://issues.webrtc.org/issues/42227402 +https://crbug.com/webrtc/2599,https://issues.webrtc.org/issues/42227403 +https://crbug.com/webrtc/26,https://issues.webrtc.org/issues/42227404 +https://crbug.com/webrtc/260,https://issues.webrtc.org/issues/42227405 +https://crbug.com/webrtc/2600,https://issues.webrtc.org/issues/42227406 +https://crbug.com/webrtc/2601,https://issues.webrtc.org/issues/42227407 +https://crbug.com/webrtc/2602,https://issues.webrtc.org/issues/42227408 +https://crbug.com/webrtc/2603,https://issues.webrtc.org/issues/42227409 +https://crbug.com/webrtc/2604,https://issues.webrtc.org/issues/42227410 +https://crbug.com/webrtc/2605,https://issues.webrtc.org/issues/42227411 +https://crbug.com/webrtc/2606,https://issues.webrtc.org/issues/42227412 +https://crbug.com/webrtc/2607,https://issues.webrtc.org/issues/42227413 +https://crbug.com/webrtc/2608,https://issues.webrtc.org/issues/42227414 +https://crbug.com/webrtc/2609,https://issues.webrtc.org/issues/42227415 +https://crbug.com/webrtc/261,https://issues.webrtc.org/issues/42227416 +https://crbug.com/webrtc/2610,https://issues.webrtc.org/issues/42227417 +https://crbug.com/webrtc/2611,https://issues.webrtc.org/issues/42227418 +https://crbug.com/webrtc/2612,https://issues.webrtc.org/issues/42227419 +https://crbug.com/webrtc/2613,https://issues.webrtc.org/issues/42227420 +https://crbug.com/webrtc/2614,https://issues.webrtc.org/issues/42227421 +https://crbug.com/webrtc/2615,https://issues.webrtc.org/issues/42227422 +https://crbug.com/webrtc/2616,https://issues.webrtc.org/issues/42227423 +https://crbug.com/webrtc/2617,https://issues.webrtc.org/issues/42227424 +https://crbug.com/webrtc/2618,https://issues.webrtc.org/issues/42227425 +https://crbug.com/webrtc/2619,https://issues.webrtc.org/issues/42227426 +https://crbug.com/webrtc/262,https://issues.webrtc.org/issues/42227427 +https://crbug.com/webrtc/2620,https://issues.webrtc.org/issues/42227428 +https://crbug.com/webrtc/2621,https://issues.webrtc.org/issues/42227429 +https://crbug.com/webrtc/2622,https://issues.webrtc.org/issues/42227430 +https://crbug.com/webrtc/2623,https://issues.webrtc.org/issues/42227431 +https://crbug.com/webrtc/2624,https://issues.webrtc.org/issues/42227432 +https://crbug.com/webrtc/2625,https://issues.webrtc.org/issues/42227433 +https://crbug.com/webrtc/2626,https://issues.webrtc.org/issues/42227434 +https://crbug.com/webrtc/2627,https://issues.webrtc.org/issues/42227435 +https://crbug.com/webrtc/2628,https://issues.webrtc.org/issues/42227436 +https://crbug.com/webrtc/2629,https://issues.webrtc.org/issues/42227437 +https://crbug.com/webrtc/263,https://issues.webrtc.org/issues/42227438 +https://crbug.com/webrtc/2630,https://issues.webrtc.org/issues/42227439 +https://crbug.com/webrtc/2631,https://issues.webrtc.org/issues/42227440 +https://crbug.com/webrtc/2632,https://issues.webrtc.org/issues/42227441 +https://crbug.com/webrtc/2633,https://issues.webrtc.org/issues/42227442 +https://crbug.com/webrtc/2634,https://issues.webrtc.org/issues/42227443 +https://crbug.com/webrtc/2635,https://issues.webrtc.org/issues/42227444 +https://crbug.com/webrtc/2636,https://issues.webrtc.org/issues/42227445 +https://crbug.com/webrtc/2637,https://issues.webrtc.org/issues/42227446 +https://crbug.com/webrtc/2638,https://issues.webrtc.org/issues/42227447 +https://crbug.com/webrtc/2639,https://issues.webrtc.org/issues/42227448 +https://crbug.com/webrtc/264,https://issues.webrtc.org/issues/42227449 +https://crbug.com/webrtc/2640,https://issues.webrtc.org/issues/42227450 +https://crbug.com/webrtc/2641,https://issues.webrtc.org/issues/42227451 +https://crbug.com/webrtc/2642,https://issues.webrtc.org/issues/42227452 +https://crbug.com/webrtc/2643,https://issues.webrtc.org/issues/42227453 +https://crbug.com/webrtc/2645,https://issues.webrtc.org/issues/42227454 +https://crbug.com/webrtc/2646,https://issues.webrtc.org/issues/42227455 +https://crbug.com/webrtc/2647,https://issues.webrtc.org/issues/42227456 +https://crbug.com/webrtc/2648,https://issues.webrtc.org/issues/42227457 +https://crbug.com/webrtc/2649,https://issues.webrtc.org/issues/42227458 +https://crbug.com/webrtc/265,https://issues.webrtc.org/issues/42227459 +https://crbug.com/webrtc/2650,https://issues.webrtc.org/issues/42227460 +https://crbug.com/webrtc/2651,https://issues.webrtc.org/issues/42227461 +https://crbug.com/webrtc/2652,https://issues.webrtc.org/issues/42227462 +https://crbug.com/webrtc/2653,https://issues.webrtc.org/issues/42227463 +https://crbug.com/webrtc/2654,https://issues.webrtc.org/issues/42227464 +https://crbug.com/webrtc/2655,https://issues.webrtc.org/issues/42227465 +https://crbug.com/webrtc/2656,https://issues.webrtc.org/issues/42227466 +https://crbug.com/webrtc/2657,https://issues.webrtc.org/issues/42227467 +https://crbug.com/webrtc/2658,https://issues.webrtc.org/issues/42227468 +https://crbug.com/webrtc/2659,https://issues.webrtc.org/issues/42227469 +https://crbug.com/webrtc/266,https://issues.webrtc.org/issues/42227470 +https://crbug.com/webrtc/2660,https://issues.webrtc.org/issues/42227471 +https://crbug.com/webrtc/2661,https://issues.webrtc.org/issues/42227472 +https://crbug.com/webrtc/2662,https://issues.webrtc.org/issues/42227473 +https://crbug.com/webrtc/2663,https://issues.webrtc.org/issues/42227474 +https://crbug.com/webrtc/2664,https://issues.webrtc.org/issues/42227475 +https://crbug.com/webrtc/2665,https://issues.webrtc.org/issues/42227476 +https://crbug.com/webrtc/2666,https://issues.webrtc.org/issues/42227477 +https://crbug.com/webrtc/2667,https://issues.webrtc.org/issues/42227478 +https://crbug.com/webrtc/2668,https://issues.webrtc.org/issues/42227479 +https://crbug.com/webrtc/2669,https://issues.webrtc.org/issues/42227480 +https://crbug.com/webrtc/267,https://issues.webrtc.org/issues/42227481 +https://crbug.com/webrtc/2670,https://issues.webrtc.org/issues/42227482 +https://crbug.com/webrtc/2671,https://issues.webrtc.org/issues/42227483 +https://crbug.com/webrtc/2672,https://issues.webrtc.org/issues/42227484 +https://crbug.com/webrtc/2673,https://issues.webrtc.org/issues/42227485 +https://crbug.com/webrtc/2674,https://issues.webrtc.org/issues/42227486 +https://crbug.com/webrtc/2675,https://issues.webrtc.org/issues/42227487 +https://crbug.com/webrtc/2676,https://issues.webrtc.org/issues/42227488 +https://crbug.com/webrtc/2677,https://issues.webrtc.org/issues/42227489 +https://crbug.com/webrtc/2678,https://issues.webrtc.org/issues/42227490 +https://crbug.com/webrtc/2679,https://issues.webrtc.org/issues/42227491 +https://crbug.com/webrtc/268,https://issues.webrtc.org/issues/42227492 +https://crbug.com/webrtc/2680,https://issues.webrtc.org/issues/42227493 +https://crbug.com/webrtc/2681,https://issues.webrtc.org/issues/42227494 +https://crbug.com/webrtc/2682,https://issues.webrtc.org/issues/42227495 +https://crbug.com/webrtc/2683,https://issues.webrtc.org/issues/42227496 +https://crbug.com/webrtc/2684,https://issues.webrtc.org/issues/42227497 +https://crbug.com/webrtc/2685,https://issues.webrtc.org/issues/42227498 +https://crbug.com/webrtc/2686,https://issues.webrtc.org/issues/42227499 +https://crbug.com/webrtc/2687,https://issues.webrtc.org/issues/42227500 +https://crbug.com/webrtc/2688,https://issues.webrtc.org/issues/42227501 +https://crbug.com/webrtc/2689,https://issues.webrtc.org/issues/42227502 +https://crbug.com/webrtc/269,https://issues.webrtc.org/issues/42227503 +https://crbug.com/webrtc/2690,https://issues.webrtc.org/issues/42227504 +https://crbug.com/webrtc/2691,https://issues.webrtc.org/issues/42227505 +https://crbug.com/webrtc/2692,https://issues.webrtc.org/issues/42227506 +https://crbug.com/webrtc/2693,https://issues.webrtc.org/issues/42227507 +https://crbug.com/webrtc/2694,https://issues.webrtc.org/issues/42227508 +https://crbug.com/webrtc/2695,https://issues.webrtc.org/issues/42227509 +https://crbug.com/webrtc/2696,https://issues.webrtc.org/issues/42227510 +https://crbug.com/webrtc/2697,https://issues.webrtc.org/issues/42227511 +https://crbug.com/webrtc/2698,https://issues.webrtc.org/issues/42227512 +https://crbug.com/webrtc/2699,https://issues.webrtc.org/issues/42227513 +https://crbug.com/webrtc/27,https://issues.webrtc.org/issues/42227514 +https://crbug.com/webrtc/270,https://issues.webrtc.org/issues/42227515 +https://crbug.com/webrtc/2700,https://issues.webrtc.org/issues/42227516 +https://crbug.com/webrtc/2701,https://issues.webrtc.org/issues/42227517 +https://crbug.com/webrtc/2702,https://issues.webrtc.org/issues/42227518 +https://crbug.com/webrtc/2703,https://issues.webrtc.org/issues/42227519 +https://crbug.com/webrtc/2704,https://issues.webrtc.org/issues/42227520 +https://crbug.com/webrtc/2705,https://issues.webrtc.org/issues/42227521 +https://crbug.com/webrtc/2706,https://issues.webrtc.org/issues/42227522 +https://crbug.com/webrtc/2707,https://issues.webrtc.org/issues/42227523 +https://crbug.com/webrtc/2708,https://issues.webrtc.org/issues/42227524 +https://crbug.com/webrtc/2709,https://issues.webrtc.org/issues/42227525 +https://crbug.com/webrtc/271,https://issues.webrtc.org/issues/42227526 +https://crbug.com/webrtc/2710,https://issues.webrtc.org/issues/42227527 +https://crbug.com/webrtc/2711,https://issues.webrtc.org/issues/42227528 +https://crbug.com/webrtc/2712,https://issues.webrtc.org/issues/42227529 +https://crbug.com/webrtc/2713,https://issues.webrtc.org/issues/42227530 +https://crbug.com/webrtc/2714,https://issues.webrtc.org/issues/42227531 +https://crbug.com/webrtc/2715,https://issues.webrtc.org/issues/42227532 +https://crbug.com/webrtc/2716,https://issues.webrtc.org/issues/42227533 +https://crbug.com/webrtc/2717,https://issues.webrtc.org/issues/42227534 +https://crbug.com/webrtc/2718,https://issues.webrtc.org/issues/42227535 +https://crbug.com/webrtc/2719,https://issues.webrtc.org/issues/42227536 +https://crbug.com/webrtc/272,https://issues.webrtc.org/issues/42227537 +https://crbug.com/webrtc/2720,https://issues.webrtc.org/issues/42227538 +https://crbug.com/webrtc/2721,https://issues.webrtc.org/issues/42227539 +https://crbug.com/webrtc/2722,https://issues.webrtc.org/issues/42227540 +https://crbug.com/webrtc/2723,https://issues.webrtc.org/issues/42227541 +https://crbug.com/webrtc/2724,https://issues.webrtc.org/issues/42227542 +https://crbug.com/webrtc/2725,https://issues.webrtc.org/issues/42227543 +https://crbug.com/webrtc/2726,https://issues.webrtc.org/issues/42227544 +https://crbug.com/webrtc/2727,https://issues.webrtc.org/issues/42227545 +https://crbug.com/webrtc/2728,https://issues.webrtc.org/issues/42227546 +https://crbug.com/webrtc/2729,https://issues.webrtc.org/issues/42227547 +https://crbug.com/webrtc/273,https://issues.webrtc.org/issues/42227548 +https://crbug.com/webrtc/2730,https://issues.webrtc.org/issues/42227549 +https://crbug.com/webrtc/2731,https://issues.webrtc.org/issues/42227550 +https://crbug.com/webrtc/2732,https://issues.webrtc.org/issues/42227551 +https://crbug.com/webrtc/2733,https://issues.webrtc.org/issues/42227552 +https://crbug.com/webrtc/2734,https://issues.webrtc.org/issues/42227553 +https://crbug.com/webrtc/2735,https://issues.webrtc.org/issues/42227554 +https://crbug.com/webrtc/2736,https://issues.webrtc.org/issues/42227555 +https://crbug.com/webrtc/2737,https://issues.webrtc.org/issues/42227556 +https://crbug.com/webrtc/2738,https://issues.webrtc.org/issues/42227557 +https://crbug.com/webrtc/2739,https://issues.webrtc.org/issues/42227558 +https://crbug.com/webrtc/274,https://issues.webrtc.org/issues/42227559 +https://crbug.com/webrtc/2740,https://issues.webrtc.org/issues/42227560 +https://crbug.com/webrtc/2741,https://issues.webrtc.org/issues/42227561 +https://crbug.com/webrtc/2742,https://issues.webrtc.org/issues/42227562 +https://crbug.com/webrtc/2743,https://issues.webrtc.org/issues/42227563 +https://crbug.com/webrtc/2744,https://issues.webrtc.org/issues/42227564 +https://crbug.com/webrtc/2745,https://issues.webrtc.org/issues/42227565 +https://crbug.com/webrtc/2746,https://issues.webrtc.org/issues/42227566 +https://crbug.com/webrtc/2747,https://issues.webrtc.org/issues/42227567 +https://crbug.com/webrtc/2748,https://issues.webrtc.org/issues/42227568 +https://crbug.com/webrtc/2749,https://issues.webrtc.org/issues/42227569 +https://crbug.com/webrtc/275,https://issues.webrtc.org/issues/42227570 +https://crbug.com/webrtc/2750,https://issues.webrtc.org/issues/42227571 +https://crbug.com/webrtc/2751,https://issues.webrtc.org/issues/42227572 +https://crbug.com/webrtc/2752,https://issues.webrtc.org/issues/42227573 +https://crbug.com/webrtc/2753,https://issues.webrtc.org/issues/42227574 +https://crbug.com/webrtc/2754,https://issues.webrtc.org/issues/42227575 +https://crbug.com/webrtc/2755,https://issues.webrtc.org/issues/42227576 +https://crbug.com/webrtc/2756,https://issues.webrtc.org/issues/42227577 +https://crbug.com/webrtc/2757,https://issues.webrtc.org/issues/42227578 +https://crbug.com/webrtc/2758,https://issues.webrtc.org/issues/42227579 +https://crbug.com/webrtc/2759,https://issues.webrtc.org/issues/42227580 +https://crbug.com/webrtc/276,https://issues.webrtc.org/issues/42227581 +https://crbug.com/webrtc/2760,https://issues.webrtc.org/issues/42227582 +https://crbug.com/webrtc/2761,https://issues.webrtc.org/issues/42227583 +https://crbug.com/webrtc/2762,https://issues.webrtc.org/issues/42227584 +https://crbug.com/webrtc/2763,https://issues.webrtc.org/issues/42227585 +https://crbug.com/webrtc/2764,https://issues.webrtc.org/issues/42227586 +https://crbug.com/webrtc/2765,https://issues.webrtc.org/issues/42227587 +https://crbug.com/webrtc/2766,https://issues.webrtc.org/issues/42227588 +https://crbug.com/webrtc/2767,https://issues.webrtc.org/issues/42227589 +https://crbug.com/webrtc/2768,https://issues.webrtc.org/issues/42227590 +https://crbug.com/webrtc/2769,https://issues.webrtc.org/issues/42227591 +https://crbug.com/webrtc/277,https://issues.webrtc.org/issues/42227592 +https://crbug.com/webrtc/2770,https://issues.webrtc.org/issues/42227593 +https://crbug.com/webrtc/2771,https://issues.webrtc.org/issues/42227594 +https://crbug.com/webrtc/2772,https://issues.webrtc.org/issues/42227595 +https://crbug.com/webrtc/2773,https://issues.webrtc.org/issues/42227596 +https://crbug.com/webrtc/2774,https://issues.webrtc.org/issues/42227597 +https://crbug.com/webrtc/2775,https://issues.webrtc.org/issues/42227598 +https://crbug.com/webrtc/2776,https://issues.webrtc.org/issues/42227599 +https://crbug.com/webrtc/2777,https://issues.webrtc.org/issues/42227600 +https://crbug.com/webrtc/2778,https://issues.webrtc.org/issues/42227601 +https://crbug.com/webrtc/2779,https://issues.webrtc.org/issues/42227602 +https://crbug.com/webrtc/278,https://issues.webrtc.org/issues/42227603 +https://crbug.com/webrtc/2780,https://issues.webrtc.org/issues/42227604 +https://crbug.com/webrtc/2781,https://issues.webrtc.org/issues/42227605 +https://crbug.com/webrtc/2782,https://issues.webrtc.org/issues/42227606 +https://crbug.com/webrtc/2783,https://issues.webrtc.org/issues/42227607 +https://crbug.com/webrtc/2784,https://issues.webrtc.org/issues/42227608 +https://crbug.com/webrtc/2785,https://issues.webrtc.org/issues/42227609 +https://crbug.com/webrtc/2786,https://issues.webrtc.org/issues/42227610 +https://crbug.com/webrtc/2787,https://issues.webrtc.org/issues/42227611 +https://crbug.com/webrtc/2788,https://issues.webrtc.org/issues/42227612 +https://crbug.com/webrtc/2789,https://issues.webrtc.org/issues/42227613 +https://crbug.com/webrtc/279,https://issues.webrtc.org/issues/42227614 +https://crbug.com/webrtc/2790,https://issues.webrtc.org/issues/42227615 +https://crbug.com/webrtc/2791,https://issues.webrtc.org/issues/42227616 +https://crbug.com/webrtc/2792,https://issues.webrtc.org/issues/42227617 +https://crbug.com/webrtc/2793,https://issues.webrtc.org/issues/42227618 +https://crbug.com/webrtc/2794,https://issues.webrtc.org/issues/42227619 +https://crbug.com/webrtc/2795,https://issues.webrtc.org/issues/42227620 +https://crbug.com/webrtc/2796,https://issues.webrtc.org/issues/42227621 +https://crbug.com/webrtc/2797,https://issues.webrtc.org/issues/42227622 +https://crbug.com/webrtc/2798,https://issues.webrtc.org/issues/42227623 +https://crbug.com/webrtc/2799,https://issues.webrtc.org/issues/42227624 +https://crbug.com/webrtc/28,https://issues.webrtc.org/issues/42227625 +https://crbug.com/webrtc/280,https://issues.webrtc.org/issues/42227626 +https://crbug.com/webrtc/2800,https://issues.webrtc.org/issues/42227627 +https://crbug.com/webrtc/2801,https://issues.webrtc.org/issues/42227628 +https://crbug.com/webrtc/2802,https://issues.webrtc.org/issues/42227629 +https://crbug.com/webrtc/2803,https://issues.webrtc.org/issues/42227630 +https://crbug.com/webrtc/2804,https://issues.webrtc.org/issues/42227631 +https://crbug.com/webrtc/2805,https://issues.webrtc.org/issues/42227632 +https://crbug.com/webrtc/2806,https://issues.webrtc.org/issues/42227633 +https://crbug.com/webrtc/2807,https://issues.webrtc.org/issues/42227634 +https://crbug.com/webrtc/2808,https://issues.webrtc.org/issues/42227635 +https://crbug.com/webrtc/2809,https://issues.webrtc.org/issues/42227636 +https://crbug.com/webrtc/281,https://issues.webrtc.org/issues/42227637 +https://crbug.com/webrtc/2810,https://issues.webrtc.org/issues/42227638 +https://crbug.com/webrtc/2811,https://issues.webrtc.org/issues/42227639 +https://crbug.com/webrtc/2812,https://issues.webrtc.org/issues/42227640 +https://crbug.com/webrtc/2813,https://issues.webrtc.org/issues/42227641 +https://crbug.com/webrtc/2814,https://issues.webrtc.org/issues/42227642 +https://crbug.com/webrtc/2815,https://issues.webrtc.org/issues/42227643 +https://crbug.com/webrtc/2816,https://issues.webrtc.org/issues/42227644 +https://crbug.com/webrtc/2817,https://issues.webrtc.org/issues/42227645 +https://crbug.com/webrtc/2818,https://issues.webrtc.org/issues/42227646 +https://crbug.com/webrtc/2819,https://issues.webrtc.org/issues/42227647 +https://crbug.com/webrtc/282,https://issues.webrtc.org/issues/42227648 +https://crbug.com/webrtc/2820,https://issues.webrtc.org/issues/42227649 +https://crbug.com/webrtc/2821,https://issues.webrtc.org/issues/42227650 +https://crbug.com/webrtc/2823,https://issues.webrtc.org/issues/42227651 +https://crbug.com/webrtc/2824,https://issues.webrtc.org/issues/42227652 +https://crbug.com/webrtc/2825,https://issues.webrtc.org/issues/42227653 +https://crbug.com/webrtc/2826,https://issues.webrtc.org/issues/42227654 +https://crbug.com/webrtc/2827,https://issues.webrtc.org/issues/42227655 +https://crbug.com/webrtc/2828,https://issues.webrtc.org/issues/42227656 +https://crbug.com/webrtc/2829,https://issues.webrtc.org/issues/42227657 +https://crbug.com/webrtc/283,https://issues.webrtc.org/issues/42227658 +https://crbug.com/webrtc/2830,https://issues.webrtc.org/issues/42227659 +https://crbug.com/webrtc/2831,https://issues.webrtc.org/issues/42227660 +https://crbug.com/webrtc/2832,https://issues.webrtc.org/issues/42227661 +https://crbug.com/webrtc/2833,https://issues.webrtc.org/issues/42227662 +https://crbug.com/webrtc/2834,https://issues.webrtc.org/issues/42227663 +https://crbug.com/webrtc/2835,https://issues.webrtc.org/issues/42227664 +https://crbug.com/webrtc/2836,https://issues.webrtc.org/issues/42227665 +https://crbug.com/webrtc/2837,https://issues.webrtc.org/issues/42227666 +https://crbug.com/webrtc/2838,https://issues.webrtc.org/issues/42227667 +https://crbug.com/webrtc/2839,https://issues.webrtc.org/issues/42227668 +https://crbug.com/webrtc/284,https://issues.webrtc.org/issues/42227669 +https://crbug.com/webrtc/2840,https://issues.webrtc.org/issues/42227670 +https://crbug.com/webrtc/2841,https://issues.webrtc.org/issues/42227671 +https://crbug.com/webrtc/2842,https://issues.webrtc.org/issues/42227672 +https://crbug.com/webrtc/2843,https://issues.webrtc.org/issues/42227673 +https://crbug.com/webrtc/2844,https://issues.webrtc.org/issues/42227674 +https://crbug.com/webrtc/2845,https://issues.webrtc.org/issues/42227675 +https://crbug.com/webrtc/2846,https://issues.webrtc.org/issues/42227676 +https://crbug.com/webrtc/2847,https://issues.webrtc.org/issues/42227677 +https://crbug.com/webrtc/2848,https://issues.webrtc.org/issues/42227678 +https://crbug.com/webrtc/2849,https://issues.webrtc.org/issues/42227679 +https://crbug.com/webrtc/285,https://issues.webrtc.org/issues/42227680 +https://crbug.com/webrtc/2850,https://issues.webrtc.org/issues/42227681 +https://crbug.com/webrtc/2851,https://issues.webrtc.org/issues/42227682 +https://crbug.com/webrtc/2852,https://issues.webrtc.org/issues/42227683 +https://crbug.com/webrtc/2853,https://issues.webrtc.org/issues/42227684 +https://crbug.com/webrtc/2854,https://issues.webrtc.org/issues/42227685 +https://crbug.com/webrtc/2855,https://issues.webrtc.org/issues/42227686 +https://crbug.com/webrtc/2856,https://issues.webrtc.org/issues/42227687 +https://crbug.com/webrtc/2857,https://issues.webrtc.org/issues/42227688 +https://crbug.com/webrtc/2858,https://issues.webrtc.org/issues/42227689 +https://crbug.com/webrtc/2859,https://issues.webrtc.org/issues/42227690 +https://crbug.com/webrtc/286,https://issues.webrtc.org/issues/42227691 +https://crbug.com/webrtc/2860,https://issues.webrtc.org/issues/42227692 +https://crbug.com/webrtc/2861,https://issues.webrtc.org/issues/42227693 +https://crbug.com/webrtc/2862,https://issues.webrtc.org/issues/42227694 +https://crbug.com/webrtc/2864,https://issues.webrtc.org/issues/42227695 +https://crbug.com/webrtc/2865,https://issues.webrtc.org/issues/42227696 +https://crbug.com/webrtc/2866,https://issues.webrtc.org/issues/42227697 +https://crbug.com/webrtc/2867,https://issues.webrtc.org/issues/42227698 +https://crbug.com/webrtc/2868,https://issues.webrtc.org/issues/42227699 +https://crbug.com/webrtc/2869,https://issues.webrtc.org/issues/42227700 +https://crbug.com/webrtc/287,https://issues.webrtc.org/issues/42227701 +https://crbug.com/webrtc/2870,https://issues.webrtc.org/issues/42227702 +https://crbug.com/webrtc/2871,https://issues.webrtc.org/issues/42227703 +https://crbug.com/webrtc/2872,https://issues.webrtc.org/issues/42227704 +https://crbug.com/webrtc/2873,https://issues.webrtc.org/issues/42227705 +https://crbug.com/webrtc/2874,https://issues.webrtc.org/issues/42227706 +https://crbug.com/webrtc/2875,https://issues.webrtc.org/issues/42227707 +https://crbug.com/webrtc/2876,https://issues.webrtc.org/issues/42227708 +https://crbug.com/webrtc/2877,https://issues.webrtc.org/issues/42227709 +https://crbug.com/webrtc/2878,https://issues.webrtc.org/issues/42227710 +https://crbug.com/webrtc/2879,https://issues.webrtc.org/issues/42227711 +https://crbug.com/webrtc/288,https://issues.webrtc.org/issues/42227712 +https://crbug.com/webrtc/2880,https://issues.webrtc.org/issues/42227713 +https://crbug.com/webrtc/2881,https://issues.webrtc.org/issues/42227714 +https://crbug.com/webrtc/2882,https://issues.webrtc.org/issues/42227715 +https://crbug.com/webrtc/2883,https://issues.webrtc.org/issues/42227716 +https://crbug.com/webrtc/2884,https://issues.webrtc.org/issues/42227717 +https://crbug.com/webrtc/2885,https://issues.webrtc.org/issues/42227718 +https://crbug.com/webrtc/2886,https://issues.webrtc.org/issues/42227719 +https://crbug.com/webrtc/2887,https://issues.webrtc.org/issues/42227720 +https://crbug.com/webrtc/2888,https://issues.webrtc.org/issues/42227721 +https://crbug.com/webrtc/2889,https://issues.webrtc.org/issues/42227722 +https://crbug.com/webrtc/289,https://issues.webrtc.org/issues/42227723 +https://crbug.com/webrtc/2890,https://issues.webrtc.org/issues/42227724 +https://crbug.com/webrtc/2891,https://issues.webrtc.org/issues/42227725 +https://crbug.com/webrtc/2892,https://issues.webrtc.org/issues/42227726 +https://crbug.com/webrtc/2893,https://issues.webrtc.org/issues/42227727 +https://crbug.com/webrtc/2894,https://issues.webrtc.org/issues/42227728 +https://crbug.com/webrtc/2895,https://issues.webrtc.org/issues/42227729 +https://crbug.com/webrtc/2896,https://issues.webrtc.org/issues/42227730 +https://crbug.com/webrtc/2897,https://issues.webrtc.org/issues/42227731 +https://crbug.com/webrtc/2898,https://issues.webrtc.org/issues/42227732 +https://crbug.com/webrtc/2899,https://issues.webrtc.org/issues/42227733 +https://crbug.com/webrtc/29,https://issues.webrtc.org/issues/42227734 +https://crbug.com/webrtc/290,https://issues.webrtc.org/issues/42227735 +https://crbug.com/webrtc/2900,https://issues.webrtc.org/issues/42227736 +https://crbug.com/webrtc/2901,https://issues.webrtc.org/issues/42227737 +https://crbug.com/webrtc/2902,https://issues.webrtc.org/issues/42227738 +https://crbug.com/webrtc/2903,https://issues.webrtc.org/issues/42227739 +https://crbug.com/webrtc/2904,https://issues.webrtc.org/issues/42227740 +https://crbug.com/webrtc/2905,https://issues.webrtc.org/issues/42227741 +https://crbug.com/webrtc/2906,https://issues.webrtc.org/issues/42227742 +https://crbug.com/webrtc/2907,https://issues.webrtc.org/issues/42227743 +https://crbug.com/webrtc/2908,https://issues.webrtc.org/issues/42227744 +https://crbug.com/webrtc/2909,https://issues.webrtc.org/issues/42227745 +https://crbug.com/webrtc/291,https://issues.webrtc.org/issues/42227746 +https://crbug.com/webrtc/2910,https://issues.webrtc.org/issues/42227747 +https://crbug.com/webrtc/2911,https://issues.webrtc.org/issues/42227748 +https://crbug.com/webrtc/2912,https://issues.webrtc.org/issues/42227749 +https://crbug.com/webrtc/2913,https://issues.webrtc.org/issues/42227750 +https://crbug.com/webrtc/2914,https://issues.webrtc.org/issues/42227751 +https://crbug.com/webrtc/2915,https://issues.webrtc.org/issues/42227752 +https://crbug.com/webrtc/2916,https://issues.webrtc.org/issues/42227753 +https://crbug.com/webrtc/2917,https://issues.webrtc.org/issues/42227754 +https://crbug.com/webrtc/2918,https://issues.webrtc.org/issues/42227755 +https://crbug.com/webrtc/2919,https://issues.webrtc.org/issues/42227756 +https://crbug.com/webrtc/292,https://issues.webrtc.org/issues/42227757 +https://crbug.com/webrtc/2920,https://issues.webrtc.org/issues/42227758 +https://crbug.com/webrtc/2921,https://issues.webrtc.org/issues/42227759 +https://crbug.com/webrtc/2922,https://issues.webrtc.org/issues/42227760 +https://crbug.com/webrtc/2923,https://issues.webrtc.org/issues/42227761 +https://crbug.com/webrtc/2924,https://issues.webrtc.org/issues/42227762 +https://crbug.com/webrtc/2925,https://issues.webrtc.org/issues/42227763 +https://crbug.com/webrtc/2926,https://issues.webrtc.org/issues/42227764 +https://crbug.com/webrtc/2927,https://issues.webrtc.org/issues/42227765 +https://crbug.com/webrtc/2928,https://issues.webrtc.org/issues/42227766 +https://crbug.com/webrtc/2929,https://issues.webrtc.org/issues/42227767 +https://crbug.com/webrtc/293,https://issues.webrtc.org/issues/42227768 +https://crbug.com/webrtc/2930,https://issues.webrtc.org/issues/42227769 +https://crbug.com/webrtc/2931,https://issues.webrtc.org/issues/42227770 +https://crbug.com/webrtc/2932,https://issues.webrtc.org/issues/42227771 +https://crbug.com/webrtc/2933,https://issues.webrtc.org/issues/42227772 +https://crbug.com/webrtc/2934,https://issues.webrtc.org/issues/42227773 +https://crbug.com/webrtc/2935,https://issues.webrtc.org/issues/42227774 +https://crbug.com/webrtc/2936,https://issues.webrtc.org/issues/42227775 +https://crbug.com/webrtc/2937,https://issues.webrtc.org/issues/42227776 +https://crbug.com/webrtc/2938,https://issues.webrtc.org/issues/42227777 +https://crbug.com/webrtc/2939,https://issues.webrtc.org/issues/42227778 +https://crbug.com/webrtc/294,https://issues.webrtc.org/issues/42227779 +https://crbug.com/webrtc/2940,https://issues.webrtc.org/issues/42227780 +https://crbug.com/webrtc/2941,https://issues.webrtc.org/issues/42227781 +https://crbug.com/webrtc/2942,https://issues.webrtc.org/issues/42227782 +https://crbug.com/webrtc/2943,https://issues.webrtc.org/issues/42227783 +https://crbug.com/webrtc/2944,https://issues.webrtc.org/issues/42227784 +https://crbug.com/webrtc/2945,https://issues.webrtc.org/issues/42227785 +https://crbug.com/webrtc/2946,https://issues.webrtc.org/issues/42227786 +https://crbug.com/webrtc/2947,https://issues.webrtc.org/issues/42227787 +https://crbug.com/webrtc/2948,https://issues.webrtc.org/issues/42227788 +https://crbug.com/webrtc/2949,https://issues.webrtc.org/issues/42227789 +https://crbug.com/webrtc/295,https://issues.webrtc.org/issues/42227790 +https://crbug.com/webrtc/2950,https://issues.webrtc.org/issues/42227791 +https://crbug.com/webrtc/2951,https://issues.webrtc.org/issues/42227792 +https://crbug.com/webrtc/2952,https://issues.webrtc.org/issues/42227793 +https://crbug.com/webrtc/2953,https://issues.webrtc.org/issues/42227794 +https://crbug.com/webrtc/2954,https://issues.webrtc.org/issues/42227795 +https://crbug.com/webrtc/2955,https://issues.webrtc.org/issues/42227796 +https://crbug.com/webrtc/2956,https://issues.webrtc.org/issues/42227797 +https://crbug.com/webrtc/2957,https://issues.webrtc.org/issues/42227798 +https://crbug.com/webrtc/2958,https://issues.webrtc.org/issues/42227799 +https://crbug.com/webrtc/2959,https://issues.webrtc.org/issues/42227800 +https://crbug.com/webrtc/296,https://issues.webrtc.org/issues/42227801 +https://crbug.com/webrtc/2960,https://issues.webrtc.org/issues/42227802 +https://crbug.com/webrtc/2961,https://issues.webrtc.org/issues/42227803 +https://crbug.com/webrtc/2962,https://issues.webrtc.org/issues/42227804 +https://crbug.com/webrtc/2963,https://issues.webrtc.org/issues/42227805 +https://crbug.com/webrtc/2964,https://issues.webrtc.org/issues/42227806 +https://crbug.com/webrtc/2965,https://issues.webrtc.org/issues/42227807 +https://crbug.com/webrtc/2966,https://issues.webrtc.org/issues/42227808 +https://crbug.com/webrtc/2967,https://issues.webrtc.org/issues/42227809 +https://crbug.com/webrtc/2968,https://issues.webrtc.org/issues/42227810 +https://crbug.com/webrtc/2969,https://issues.webrtc.org/issues/42227811 +https://crbug.com/webrtc/297,https://issues.webrtc.org/issues/42227812 +https://crbug.com/webrtc/2970,https://issues.webrtc.org/issues/42227813 +https://crbug.com/webrtc/2971,https://issues.webrtc.org/issues/42227814 +https://crbug.com/webrtc/2972,https://issues.webrtc.org/issues/42227815 +https://crbug.com/webrtc/2973,https://issues.webrtc.org/issues/42227816 +https://crbug.com/webrtc/2974,https://issues.webrtc.org/issues/42227817 +https://crbug.com/webrtc/2975,https://issues.webrtc.org/issues/42227818 +https://crbug.com/webrtc/2976,https://issues.webrtc.org/issues/42227819 +https://crbug.com/webrtc/2977,https://issues.webrtc.org/issues/42227820 +https://crbug.com/webrtc/2978,https://issues.webrtc.org/issues/42227821 +https://crbug.com/webrtc/2979,https://issues.webrtc.org/issues/42227822 +https://crbug.com/webrtc/298,https://issues.webrtc.org/issues/42227823 +https://crbug.com/webrtc/2980,https://issues.webrtc.org/issues/42227824 +https://crbug.com/webrtc/2981,https://issues.webrtc.org/issues/42227825 +https://crbug.com/webrtc/2982,https://issues.webrtc.org/issues/42227826 +https://crbug.com/webrtc/2983,https://issues.webrtc.org/issues/42227827 +https://crbug.com/webrtc/2984,https://issues.webrtc.org/issues/42227828 +https://crbug.com/webrtc/2985,https://issues.webrtc.org/issues/42227829 +https://crbug.com/webrtc/2986,https://issues.webrtc.org/issues/42227830 +https://crbug.com/webrtc/2987,https://issues.webrtc.org/issues/42227831 +https://crbug.com/webrtc/2988,https://issues.webrtc.org/issues/42227832 +https://crbug.com/webrtc/2989,https://issues.webrtc.org/issues/42227833 +https://crbug.com/webrtc/299,https://issues.webrtc.org/issues/42227834 +https://crbug.com/webrtc/2990,https://issues.webrtc.org/issues/42227835 +https://crbug.com/webrtc/2991,https://issues.webrtc.org/issues/42227836 +https://crbug.com/webrtc/2992,https://issues.webrtc.org/issues/42227837 +https://crbug.com/webrtc/2993,https://issues.webrtc.org/issues/42227838 +https://crbug.com/webrtc/2994,https://issues.webrtc.org/issues/42227839 +https://crbug.com/webrtc/2995,https://issues.webrtc.org/issues/42227840 +https://crbug.com/webrtc/2996,https://issues.webrtc.org/issues/42227841 +https://crbug.com/webrtc/2997,https://issues.webrtc.org/issues/42227842 +https://crbug.com/webrtc/2998,https://issues.webrtc.org/issues/42227843 +https://crbug.com/webrtc/3,https://issues.webrtc.org/issues/42227844 +https://crbug.com/webrtc/30,https://issues.webrtc.org/issues/42227845 +https://crbug.com/webrtc/300,https://issues.webrtc.org/issues/42227846 +https://crbug.com/webrtc/3000,https://issues.webrtc.org/issues/42227847 +https://crbug.com/webrtc/3001,https://issues.webrtc.org/issues/42227848 +https://crbug.com/webrtc/3002,https://issues.webrtc.org/issues/42227849 +https://crbug.com/webrtc/3003,https://issues.webrtc.org/issues/42227850 +https://crbug.com/webrtc/3004,https://issues.webrtc.org/issues/42227851 +https://crbug.com/webrtc/3005,https://issues.webrtc.org/issues/42227852 +https://crbug.com/webrtc/3006,https://issues.webrtc.org/issues/42227853 +https://crbug.com/webrtc/3007,https://issues.webrtc.org/issues/42227854 +https://crbug.com/webrtc/3008,https://issues.webrtc.org/issues/42227855 +https://crbug.com/webrtc/3009,https://issues.webrtc.org/issues/42227856 +https://crbug.com/webrtc/301,https://issues.webrtc.org/issues/42227857 +https://crbug.com/webrtc/3010,https://issues.webrtc.org/issues/42227858 +https://crbug.com/webrtc/3011,https://issues.webrtc.org/issues/42227859 +https://crbug.com/webrtc/3012,https://issues.webrtc.org/issues/42227860 +https://crbug.com/webrtc/3013,https://issues.webrtc.org/issues/42227861 +https://crbug.com/webrtc/3014,https://issues.webrtc.org/issues/42227862 +https://crbug.com/webrtc/3015,https://issues.webrtc.org/issues/42227863 +https://crbug.com/webrtc/3016,https://issues.webrtc.org/issues/42227864 +https://crbug.com/webrtc/3017,https://issues.webrtc.org/issues/42227865 +https://crbug.com/webrtc/3018,https://issues.webrtc.org/issues/42227866 +https://crbug.com/webrtc/3019,https://issues.webrtc.org/issues/42227867 +https://crbug.com/webrtc/302,https://issues.webrtc.org/issues/42227868 +https://crbug.com/webrtc/3020,https://issues.webrtc.org/issues/42227869 +https://crbug.com/webrtc/3021,https://issues.webrtc.org/issues/42227870 +https://crbug.com/webrtc/3022,https://issues.webrtc.org/issues/42227871 +https://crbug.com/webrtc/3023,https://issues.webrtc.org/issues/42227872 +https://crbug.com/webrtc/3024,https://issues.webrtc.org/issues/42227873 +https://crbug.com/webrtc/3025,https://issues.webrtc.org/issues/42227874 +https://crbug.com/webrtc/3026,https://issues.webrtc.org/issues/42227875 +https://crbug.com/webrtc/3027,https://issues.webrtc.org/issues/42227876 +https://crbug.com/webrtc/3028,https://issues.webrtc.org/issues/42227877 +https://crbug.com/webrtc/3029,https://issues.webrtc.org/issues/42227878 +https://crbug.com/webrtc/303,https://issues.webrtc.org/issues/42227879 +https://crbug.com/webrtc/3030,https://issues.webrtc.org/issues/42227880 +https://crbug.com/webrtc/3031,https://issues.webrtc.org/issues/42227881 +https://crbug.com/webrtc/3032,https://issues.webrtc.org/issues/42227882 +https://crbug.com/webrtc/3033,https://issues.webrtc.org/issues/42227883 +https://crbug.com/webrtc/3034,https://issues.webrtc.org/issues/42227884 +https://crbug.com/webrtc/3035,https://issues.webrtc.org/issues/42227885 +https://crbug.com/webrtc/3036,https://issues.webrtc.org/issues/42227886 +https://crbug.com/webrtc/3037,https://issues.webrtc.org/issues/42227887 +https://crbug.com/webrtc/3038,https://issues.webrtc.org/issues/42227888 +https://crbug.com/webrtc/3039,https://issues.webrtc.org/issues/42227889 +https://crbug.com/webrtc/304,https://issues.webrtc.org/issues/42227890 +https://crbug.com/webrtc/3040,https://issues.webrtc.org/issues/42227891 +https://crbug.com/webrtc/3041,https://issues.webrtc.org/issues/42227892 +https://crbug.com/webrtc/3042,https://issues.webrtc.org/issues/42227893 +https://crbug.com/webrtc/3043,https://issues.webrtc.org/issues/42227894 +https://crbug.com/webrtc/3044,https://issues.webrtc.org/issues/42227895 +https://crbug.com/webrtc/3045,https://issues.webrtc.org/issues/42227896 +https://crbug.com/webrtc/3046,https://issues.webrtc.org/issues/42227897 +https://crbug.com/webrtc/3047,https://issues.webrtc.org/issues/42227898 +https://crbug.com/webrtc/3048,https://issues.webrtc.org/issues/42227899 +https://crbug.com/webrtc/3049,https://issues.webrtc.org/issues/42227900 +https://crbug.com/webrtc/305,https://issues.webrtc.org/issues/42227901 +https://crbug.com/webrtc/3050,https://issues.webrtc.org/issues/42227902 +https://crbug.com/webrtc/3051,https://issues.webrtc.org/issues/42227903 +https://crbug.com/webrtc/3052,https://issues.webrtc.org/issues/42227904 +https://crbug.com/webrtc/3053,https://issues.webrtc.org/issues/42227905 +https://crbug.com/webrtc/3054,https://issues.webrtc.org/issues/42227906 +https://crbug.com/webrtc/3055,https://issues.webrtc.org/issues/42227907 +https://crbug.com/webrtc/3056,https://issues.webrtc.org/issues/42227908 +https://crbug.com/webrtc/3057,https://issues.webrtc.org/issues/42227909 +https://crbug.com/webrtc/3058,https://issues.webrtc.org/issues/42227910 +https://crbug.com/webrtc/3059,https://issues.webrtc.org/issues/42227911 +https://crbug.com/webrtc/306,https://issues.webrtc.org/issues/42227912 +https://crbug.com/webrtc/3060,https://issues.webrtc.org/issues/42227913 +https://crbug.com/webrtc/3061,https://issues.webrtc.org/issues/42227914 +https://crbug.com/webrtc/3062,https://issues.webrtc.org/issues/42227915 +https://crbug.com/webrtc/3063,https://issues.webrtc.org/issues/42227916 +https://crbug.com/webrtc/3064,https://issues.webrtc.org/issues/42227917 +https://crbug.com/webrtc/3065,https://issues.webrtc.org/issues/42227918 +https://crbug.com/webrtc/3066,https://issues.webrtc.org/issues/42227919 +https://crbug.com/webrtc/3067,https://issues.webrtc.org/issues/42227920 +https://crbug.com/webrtc/3068,https://issues.webrtc.org/issues/42227921 +https://crbug.com/webrtc/3069,https://issues.webrtc.org/issues/42227922 +https://crbug.com/webrtc/307,https://issues.webrtc.org/issues/42227923 +https://crbug.com/webrtc/3070,https://issues.webrtc.org/issues/42227924 +https://crbug.com/webrtc/3071,https://issues.webrtc.org/issues/42227925 +https://crbug.com/webrtc/3072,https://issues.webrtc.org/issues/42227926 +https://crbug.com/webrtc/3073,https://issues.webrtc.org/issues/42227927 +https://crbug.com/webrtc/3074,https://issues.webrtc.org/issues/42227928 +https://crbug.com/webrtc/3075,https://issues.webrtc.org/issues/42227929 +https://crbug.com/webrtc/3076,https://issues.webrtc.org/issues/42227930 +https://crbug.com/webrtc/3077,https://issues.webrtc.org/issues/42227931 +https://crbug.com/webrtc/3078,https://issues.webrtc.org/issues/42227932 +https://crbug.com/webrtc/3079,https://issues.webrtc.org/issues/42227933 +https://crbug.com/webrtc/308,https://issues.webrtc.org/issues/42227934 +https://crbug.com/webrtc/3080,https://issues.webrtc.org/issues/42227935 +https://crbug.com/webrtc/3081,https://issues.webrtc.org/issues/42227936 +https://crbug.com/webrtc/3082,https://issues.webrtc.org/issues/42227937 +https://crbug.com/webrtc/3083,https://issues.webrtc.org/issues/42227938 +https://crbug.com/webrtc/3084,https://issues.webrtc.org/issues/42227939 +https://crbug.com/webrtc/3085,https://issues.webrtc.org/issues/42227940 +https://crbug.com/webrtc/3086,https://issues.webrtc.org/issues/42227941 +https://crbug.com/webrtc/3087,https://issues.webrtc.org/issues/42227942 +https://crbug.com/webrtc/3088,https://issues.webrtc.org/issues/42227943 +https://crbug.com/webrtc/3089,https://issues.webrtc.org/issues/42227944 +https://crbug.com/webrtc/309,https://issues.webrtc.org/issues/42227945 +https://crbug.com/webrtc/3090,https://issues.webrtc.org/issues/42227946 +https://crbug.com/webrtc/3091,https://issues.webrtc.org/issues/42227947 +https://crbug.com/webrtc/3092,https://issues.webrtc.org/issues/42227948 +https://crbug.com/webrtc/3093,https://issues.webrtc.org/issues/42227949 +https://crbug.com/webrtc/3094,https://issues.webrtc.org/issues/42227950 +https://crbug.com/webrtc/3096,https://issues.webrtc.org/issues/42227951 +https://crbug.com/webrtc/3097,https://issues.webrtc.org/issues/42227952 +https://crbug.com/webrtc/3098,https://issues.webrtc.org/issues/42227953 +https://crbug.com/webrtc/3099,https://issues.webrtc.org/issues/42227954 +https://crbug.com/webrtc/31,https://issues.webrtc.org/issues/42227955 +https://crbug.com/webrtc/310,https://issues.webrtc.org/issues/42227956 +https://crbug.com/webrtc/3100,https://issues.webrtc.org/issues/42227957 +https://crbug.com/webrtc/3101,https://issues.webrtc.org/issues/42227958 +https://crbug.com/webrtc/3102,https://issues.webrtc.org/issues/42227959 +https://crbug.com/webrtc/3103,https://issues.webrtc.org/issues/42227960 +https://crbug.com/webrtc/3104,https://issues.webrtc.org/issues/42227961 +https://crbug.com/webrtc/3105,https://issues.webrtc.org/issues/42227962 +https://crbug.com/webrtc/3106,https://issues.webrtc.org/issues/42227963 +https://crbug.com/webrtc/3107,https://issues.webrtc.org/issues/42227964 +https://crbug.com/webrtc/3108,https://issues.webrtc.org/issues/42227965 +https://crbug.com/webrtc/3109,https://issues.webrtc.org/issues/42227966 +https://crbug.com/webrtc/311,https://issues.webrtc.org/issues/42227967 +https://crbug.com/webrtc/3110,https://issues.webrtc.org/issues/42227968 +https://crbug.com/webrtc/3111,https://issues.webrtc.org/issues/42227969 +https://crbug.com/webrtc/3112,https://issues.webrtc.org/issues/42227970 +https://crbug.com/webrtc/3113,https://issues.webrtc.org/issues/42227971 +https://crbug.com/webrtc/3114,https://issues.webrtc.org/issues/42227972 +https://crbug.com/webrtc/3115,https://issues.webrtc.org/issues/42227973 +https://crbug.com/webrtc/3116,https://issues.webrtc.org/issues/42227974 +https://crbug.com/webrtc/3117,https://issues.webrtc.org/issues/42227975 +https://crbug.com/webrtc/3118,https://issues.webrtc.org/issues/42227976 +https://crbug.com/webrtc/3119,https://issues.webrtc.org/issues/42227977 +https://crbug.com/webrtc/312,https://issues.webrtc.org/issues/42227978 +https://crbug.com/webrtc/3120,https://issues.webrtc.org/issues/42227979 +https://crbug.com/webrtc/3121,https://issues.webrtc.org/issues/42227980 +https://crbug.com/webrtc/3122,https://issues.webrtc.org/issues/42227981 +https://crbug.com/webrtc/3123,https://issues.webrtc.org/issues/42227982 +https://crbug.com/webrtc/3124,https://issues.webrtc.org/issues/42227983 +https://crbug.com/webrtc/3125,https://issues.webrtc.org/issues/42227984 +https://crbug.com/webrtc/3126,https://issues.webrtc.org/issues/42227985 +https://crbug.com/webrtc/3127,https://issues.webrtc.org/issues/42227986 +https://crbug.com/webrtc/3128,https://issues.webrtc.org/issues/42227987 +https://crbug.com/webrtc/3129,https://issues.webrtc.org/issues/42227988 +https://crbug.com/webrtc/313,https://issues.webrtc.org/issues/42227989 +https://crbug.com/webrtc/3130,https://issues.webrtc.org/issues/42227990 +https://crbug.com/webrtc/3131,https://issues.webrtc.org/issues/42227991 +https://crbug.com/webrtc/3132,https://issues.webrtc.org/issues/42227992 +https://crbug.com/webrtc/3133,https://issues.webrtc.org/issues/42227993 +https://crbug.com/webrtc/3134,https://issues.webrtc.org/issues/42227994 +https://crbug.com/webrtc/3135,https://issues.webrtc.org/issues/42227995 +https://crbug.com/webrtc/3136,https://issues.webrtc.org/issues/42227996 +https://crbug.com/webrtc/3137,https://issues.webrtc.org/issues/42227997 +https://crbug.com/webrtc/3139,https://issues.webrtc.org/issues/42227998 +https://crbug.com/webrtc/314,https://issues.webrtc.org/issues/42227999 +https://crbug.com/webrtc/3140,https://issues.webrtc.org/issues/42228000 +https://crbug.com/webrtc/3141,https://issues.webrtc.org/issues/42228001 +https://crbug.com/webrtc/3142,https://issues.webrtc.org/issues/42228002 +https://crbug.com/webrtc/3143,https://issues.webrtc.org/issues/42228003 +https://crbug.com/webrtc/3144,https://issues.webrtc.org/issues/42228004 +https://crbug.com/webrtc/3145,https://issues.webrtc.org/issues/42228005 +https://crbug.com/webrtc/3146,https://issues.webrtc.org/issues/42228006 +https://crbug.com/webrtc/3147,https://issues.webrtc.org/issues/42228007 +https://crbug.com/webrtc/3148,https://issues.webrtc.org/issues/42228008 +https://crbug.com/webrtc/3149,https://issues.webrtc.org/issues/42228009 +https://crbug.com/webrtc/315,https://issues.webrtc.org/issues/42228010 +https://crbug.com/webrtc/3150,https://issues.webrtc.org/issues/42228011 +https://crbug.com/webrtc/3151,https://issues.webrtc.org/issues/42228012 +https://crbug.com/webrtc/3152,https://issues.webrtc.org/issues/42228013 +https://crbug.com/webrtc/3153,https://issues.webrtc.org/issues/42228014 +https://crbug.com/webrtc/3154,https://issues.webrtc.org/issues/42228015 +https://crbug.com/webrtc/3155,https://issues.webrtc.org/issues/42228016 +https://crbug.com/webrtc/3156,https://issues.webrtc.org/issues/42228017 +https://crbug.com/webrtc/3157,https://issues.webrtc.org/issues/42228018 +https://crbug.com/webrtc/3158,https://issues.webrtc.org/issues/42228019 +https://crbug.com/webrtc/3159,https://issues.webrtc.org/issues/42228020 +https://crbug.com/webrtc/316,https://issues.webrtc.org/issues/42228021 +https://crbug.com/webrtc/3160,https://issues.webrtc.org/issues/42228022 +https://crbug.com/webrtc/3161,https://issues.webrtc.org/issues/42228023 +https://crbug.com/webrtc/3162,https://issues.webrtc.org/issues/42228024 +https://crbug.com/webrtc/3163,https://issues.webrtc.org/issues/42228025 +https://crbug.com/webrtc/3164,https://issues.webrtc.org/issues/42228026 +https://crbug.com/webrtc/3165,https://issues.webrtc.org/issues/42228027 +https://crbug.com/webrtc/3166,https://issues.webrtc.org/issues/42228028 +https://crbug.com/webrtc/3167,https://issues.webrtc.org/issues/42228029 +https://crbug.com/webrtc/3168,https://issues.webrtc.org/issues/42228030 +https://crbug.com/webrtc/3169,https://issues.webrtc.org/issues/42228031 +https://crbug.com/webrtc/317,https://issues.webrtc.org/issues/42228032 +https://crbug.com/webrtc/3170,https://issues.webrtc.org/issues/42228033 +https://crbug.com/webrtc/3171,https://issues.webrtc.org/issues/42228034 +https://crbug.com/webrtc/3172,https://issues.webrtc.org/issues/42228035 +https://crbug.com/webrtc/3173,https://issues.webrtc.org/issues/42228036 +https://crbug.com/webrtc/3174,https://issues.webrtc.org/issues/42228037 +https://crbug.com/webrtc/3175,https://issues.webrtc.org/issues/42228038 +https://crbug.com/webrtc/3176,https://issues.webrtc.org/issues/42228039 +https://crbug.com/webrtc/3177,https://issues.webrtc.org/issues/42228040 +https://crbug.com/webrtc/3178,https://issues.webrtc.org/issues/42228041 +https://crbug.com/webrtc/3179,https://issues.webrtc.org/issues/42228042 +https://crbug.com/webrtc/318,https://issues.webrtc.org/issues/42228043 +https://crbug.com/webrtc/3180,https://issues.webrtc.org/issues/42228044 +https://crbug.com/webrtc/3181,https://issues.webrtc.org/issues/42228045 +https://crbug.com/webrtc/3182,https://issues.webrtc.org/issues/42228046 +https://crbug.com/webrtc/3183,https://issues.webrtc.org/issues/42228047 +https://crbug.com/webrtc/3184,https://issues.webrtc.org/issues/42228048 +https://crbug.com/webrtc/3185,https://issues.webrtc.org/issues/42228049 +https://crbug.com/webrtc/3186,https://issues.webrtc.org/issues/42228050 +https://crbug.com/webrtc/3187,https://issues.webrtc.org/issues/42228051 +https://crbug.com/webrtc/3188,https://issues.webrtc.org/issues/42228052 +https://crbug.com/webrtc/3189,https://issues.webrtc.org/issues/42228053 +https://crbug.com/webrtc/319,https://issues.webrtc.org/issues/42228054 +https://crbug.com/webrtc/3190,https://issues.webrtc.org/issues/42228055 +https://crbug.com/webrtc/3191,https://issues.webrtc.org/issues/42228056 +https://crbug.com/webrtc/3192,https://issues.webrtc.org/issues/42228057 +https://crbug.com/webrtc/3193,https://issues.webrtc.org/issues/42228058 +https://crbug.com/webrtc/3194,https://issues.webrtc.org/issues/42228059 +https://crbug.com/webrtc/3195,https://issues.webrtc.org/issues/42228060 +https://crbug.com/webrtc/3196,https://issues.webrtc.org/issues/42228061 +https://crbug.com/webrtc/3197,https://issues.webrtc.org/issues/42228062 +https://crbug.com/webrtc/3198,https://issues.webrtc.org/issues/42228063 +https://crbug.com/webrtc/3199,https://issues.webrtc.org/issues/42228064 +https://crbug.com/webrtc/32,https://issues.webrtc.org/issues/42228065 +https://crbug.com/webrtc/320,https://issues.webrtc.org/issues/42228066 +https://crbug.com/webrtc/3200,https://issues.webrtc.org/issues/42228067 +https://crbug.com/webrtc/3201,https://issues.webrtc.org/issues/42228068 +https://crbug.com/webrtc/3202,https://issues.webrtc.org/issues/42228069 +https://crbug.com/webrtc/3203,https://issues.webrtc.org/issues/42228070 +https://crbug.com/webrtc/3204,https://issues.webrtc.org/issues/42228071 +https://crbug.com/webrtc/3205,https://issues.webrtc.org/issues/42228072 +https://crbug.com/webrtc/3206,https://issues.webrtc.org/issues/42228073 +https://crbug.com/webrtc/3207,https://issues.webrtc.org/issues/42228074 +https://crbug.com/webrtc/3208,https://issues.webrtc.org/issues/42228075 +https://crbug.com/webrtc/3209,https://issues.webrtc.org/issues/42228076 +https://crbug.com/webrtc/321,https://issues.webrtc.org/issues/42228077 +https://crbug.com/webrtc/3210,https://issues.webrtc.org/issues/42228078 +https://crbug.com/webrtc/3211,https://issues.webrtc.org/issues/42228079 +https://crbug.com/webrtc/3212,https://issues.webrtc.org/issues/42228080 +https://crbug.com/webrtc/3213,https://issues.webrtc.org/issues/42228081 +https://crbug.com/webrtc/3214,https://issues.webrtc.org/issues/42228082 +https://crbug.com/webrtc/3215,https://issues.webrtc.org/issues/42228083 +https://crbug.com/webrtc/3216,https://issues.webrtc.org/issues/42228084 +https://crbug.com/webrtc/3217,https://issues.webrtc.org/issues/42228085 +https://crbug.com/webrtc/3218,https://issues.webrtc.org/issues/42228086 +https://crbug.com/webrtc/3219,https://issues.webrtc.org/issues/42228087 +https://crbug.com/webrtc/322,https://issues.webrtc.org/issues/42228088 +https://crbug.com/webrtc/3220,https://issues.webrtc.org/issues/42228089 +https://crbug.com/webrtc/3221,https://issues.webrtc.org/issues/42228090 +https://crbug.com/webrtc/3222,https://issues.webrtc.org/issues/42228091 +https://crbug.com/webrtc/3223,https://issues.webrtc.org/issues/42228092 +https://crbug.com/webrtc/3224,https://issues.webrtc.org/issues/42228093 +https://crbug.com/webrtc/3225,https://issues.webrtc.org/issues/42228094 +https://crbug.com/webrtc/3226,https://issues.webrtc.org/issues/42228095 +https://crbug.com/webrtc/3227,https://issues.webrtc.org/issues/42228096 +https://crbug.com/webrtc/3228,https://issues.webrtc.org/issues/42228097 +https://crbug.com/webrtc/3229,https://issues.webrtc.org/issues/42228098 +https://crbug.com/webrtc/323,https://issues.webrtc.org/issues/42228099 +https://crbug.com/webrtc/3230,https://issues.webrtc.org/issues/42228100 +https://crbug.com/webrtc/3231,https://issues.webrtc.org/issues/42228101 +https://crbug.com/webrtc/3232,https://issues.webrtc.org/issues/42228102 +https://crbug.com/webrtc/3233,https://issues.webrtc.org/issues/42228103 +https://crbug.com/webrtc/3234,https://issues.webrtc.org/issues/42228104 +https://crbug.com/webrtc/3235,https://issues.webrtc.org/issues/42228105 +https://crbug.com/webrtc/3236,https://issues.webrtc.org/issues/42228106 +https://crbug.com/webrtc/3237,https://issues.webrtc.org/issues/42228107 +https://crbug.com/webrtc/3238,https://issues.webrtc.org/issues/42228108 +https://crbug.com/webrtc/3239,https://issues.webrtc.org/issues/42228109 +https://crbug.com/webrtc/324,https://issues.webrtc.org/issues/42228110 +https://crbug.com/webrtc/3240,https://issues.webrtc.org/issues/42228111 +https://crbug.com/webrtc/3242,https://issues.webrtc.org/issues/42228112 +https://crbug.com/webrtc/3243,https://issues.webrtc.org/issues/42228113 +https://crbug.com/webrtc/3244,https://issues.webrtc.org/issues/42228114 +https://crbug.com/webrtc/3245,https://issues.webrtc.org/issues/42228115 +https://crbug.com/webrtc/3246,https://issues.webrtc.org/issues/42228116 +https://crbug.com/webrtc/3247,https://issues.webrtc.org/issues/42228117 +https://crbug.com/webrtc/3248,https://issues.webrtc.org/issues/42228118 +https://crbug.com/webrtc/3249,https://issues.webrtc.org/issues/42228119 +https://crbug.com/webrtc/3250,https://issues.webrtc.org/issues/42228120 +https://crbug.com/webrtc/3251,https://issues.webrtc.org/issues/42228121 +https://crbug.com/webrtc/3252,https://issues.webrtc.org/issues/42228122 +https://crbug.com/webrtc/3253,https://issues.webrtc.org/issues/42228123 +https://crbug.com/webrtc/3254,https://issues.webrtc.org/issues/42228124 +https://crbug.com/webrtc/3255,https://issues.webrtc.org/issues/42228125 +https://crbug.com/webrtc/3256,https://issues.webrtc.org/issues/42228126 +https://crbug.com/webrtc/3257,https://issues.webrtc.org/issues/42228127 +https://crbug.com/webrtc/3258,https://issues.webrtc.org/issues/42228128 +https://crbug.com/webrtc/3259,https://issues.webrtc.org/issues/42228129 +https://crbug.com/webrtc/326,https://issues.webrtc.org/issues/42228130 +https://crbug.com/webrtc/3260,https://issues.webrtc.org/issues/42228131 +https://crbug.com/webrtc/3261,https://issues.webrtc.org/issues/42228132 +https://crbug.com/webrtc/3262,https://issues.webrtc.org/issues/42228133 +https://crbug.com/webrtc/3263,https://issues.webrtc.org/issues/42228134 +https://crbug.com/webrtc/3264,https://issues.webrtc.org/issues/42228135 +https://crbug.com/webrtc/3265,https://issues.webrtc.org/issues/42228136 +https://crbug.com/webrtc/3266,https://issues.webrtc.org/issues/42228137 +https://crbug.com/webrtc/3267,https://issues.webrtc.org/issues/42228138 +https://crbug.com/webrtc/3268,https://issues.webrtc.org/issues/42228139 +https://crbug.com/webrtc/3269,https://issues.webrtc.org/issues/42228140 +https://crbug.com/webrtc/327,https://issues.webrtc.org/issues/42228141 +https://crbug.com/webrtc/3270,https://issues.webrtc.org/issues/42228142 +https://crbug.com/webrtc/3271,https://issues.webrtc.org/issues/42228143 +https://crbug.com/webrtc/3272,https://issues.webrtc.org/issues/42228144 +https://crbug.com/webrtc/3273,https://issues.webrtc.org/issues/42228145 +https://crbug.com/webrtc/3274,https://issues.webrtc.org/issues/42228146 +https://crbug.com/webrtc/3275,https://issues.webrtc.org/issues/42228147 +https://crbug.com/webrtc/3276,https://issues.webrtc.org/issues/42228148 +https://crbug.com/webrtc/3277,https://issues.webrtc.org/issues/42228149 +https://crbug.com/webrtc/3278,https://issues.webrtc.org/issues/42228150 +https://crbug.com/webrtc/3279,https://issues.webrtc.org/issues/42228151 +https://crbug.com/webrtc/328,https://issues.webrtc.org/issues/42228152 +https://crbug.com/webrtc/3280,https://issues.webrtc.org/issues/42228153 +https://crbug.com/webrtc/3281,https://issues.webrtc.org/issues/42228154 +https://crbug.com/webrtc/3282,https://issues.webrtc.org/issues/42228155 +https://crbug.com/webrtc/3283,https://issues.webrtc.org/issues/42228156 +https://crbug.com/webrtc/3284,https://issues.webrtc.org/issues/42228157 +https://crbug.com/webrtc/3285,https://issues.webrtc.org/issues/42228158 +https://crbug.com/webrtc/3286,https://issues.webrtc.org/issues/42228159 +https://crbug.com/webrtc/3287,https://issues.webrtc.org/issues/42228160 +https://crbug.com/webrtc/3288,https://issues.webrtc.org/issues/42228161 +https://crbug.com/webrtc/3289,https://issues.webrtc.org/issues/42228162 +https://crbug.com/webrtc/329,https://issues.webrtc.org/issues/42228163 +https://crbug.com/webrtc/3290,https://issues.webrtc.org/issues/42228164 +https://crbug.com/webrtc/3291,https://issues.webrtc.org/issues/42228165 +https://crbug.com/webrtc/3292,https://issues.webrtc.org/issues/42228166 +https://crbug.com/webrtc/3293,https://issues.webrtc.org/issues/42228167 +https://crbug.com/webrtc/3294,https://issues.webrtc.org/issues/42228168 +https://crbug.com/webrtc/3295,https://issues.webrtc.org/issues/42228169 +https://crbug.com/webrtc/3296,https://issues.webrtc.org/issues/42228170 +https://crbug.com/webrtc/3297,https://issues.webrtc.org/issues/42228171 +https://crbug.com/webrtc/3298,https://issues.webrtc.org/issues/42228172 +https://crbug.com/webrtc/3299,https://issues.webrtc.org/issues/42228173 +https://crbug.com/webrtc/33,https://issues.webrtc.org/issues/42228174 +https://crbug.com/webrtc/330,https://issues.webrtc.org/issues/42228175 +https://crbug.com/webrtc/3300,https://issues.webrtc.org/issues/42228176 +https://crbug.com/webrtc/3301,https://issues.webrtc.org/issues/42228177 +https://crbug.com/webrtc/3302,https://issues.webrtc.org/issues/42228178 +https://crbug.com/webrtc/3303,https://issues.webrtc.org/issues/42228179 +https://crbug.com/webrtc/3304,https://issues.webrtc.org/issues/42228180 +https://crbug.com/webrtc/3305,https://issues.webrtc.org/issues/42228181 +https://crbug.com/webrtc/3306,https://issues.webrtc.org/issues/42228182 +https://crbug.com/webrtc/3307,https://issues.webrtc.org/issues/42228183 +https://crbug.com/webrtc/3308,https://issues.webrtc.org/issues/42228184 +https://crbug.com/webrtc/3309,https://issues.webrtc.org/issues/42228185 +https://crbug.com/webrtc/331,https://issues.webrtc.org/issues/42228186 +https://crbug.com/webrtc/3310,https://issues.webrtc.org/issues/42228187 +https://crbug.com/webrtc/3311,https://issues.webrtc.org/issues/42228188 +https://crbug.com/webrtc/3312,https://issues.webrtc.org/issues/42228189 +https://crbug.com/webrtc/3313,https://issues.webrtc.org/issues/42228190 +https://crbug.com/webrtc/3314,https://issues.webrtc.org/issues/42228191 +https://crbug.com/webrtc/3315,https://issues.webrtc.org/issues/42228192 +https://crbug.com/webrtc/3316,https://issues.webrtc.org/issues/42228193 +https://crbug.com/webrtc/3317,https://issues.webrtc.org/issues/42228194 +https://crbug.com/webrtc/3318,https://issues.webrtc.org/issues/42228195 +https://crbug.com/webrtc/3319,https://issues.webrtc.org/issues/42228196 +https://crbug.com/webrtc/332,https://issues.webrtc.org/issues/42228197 +https://crbug.com/webrtc/3320,https://issues.webrtc.org/issues/42228198 +https://crbug.com/webrtc/3321,https://issues.webrtc.org/issues/42228199 +https://crbug.com/webrtc/3322,https://issues.webrtc.org/issues/42228200 +https://crbug.com/webrtc/3323,https://issues.webrtc.org/issues/42228201 +https://crbug.com/webrtc/3324,https://issues.webrtc.org/issues/42228202 +https://crbug.com/webrtc/3325,https://issues.webrtc.org/issues/42228203 +https://crbug.com/webrtc/3326,https://issues.webrtc.org/issues/42228204 +https://crbug.com/webrtc/3327,https://issues.webrtc.org/issues/42228205 +https://crbug.com/webrtc/3328,https://issues.webrtc.org/issues/42228206 +https://crbug.com/webrtc/3329,https://issues.webrtc.org/issues/42228207 +https://crbug.com/webrtc/333,https://issues.webrtc.org/issues/42228208 +https://crbug.com/webrtc/3330,https://issues.webrtc.org/issues/42228209 +https://crbug.com/webrtc/3331,https://issues.webrtc.org/issues/42228210 +https://crbug.com/webrtc/3332,https://issues.webrtc.org/issues/42228211 +https://crbug.com/webrtc/3334,https://issues.webrtc.org/issues/42228212 +https://crbug.com/webrtc/3335,https://issues.webrtc.org/issues/42228213 +https://crbug.com/webrtc/3336,https://issues.webrtc.org/issues/42228214 +https://crbug.com/webrtc/3337,https://issues.webrtc.org/issues/42228215 +https://crbug.com/webrtc/3338,https://issues.webrtc.org/issues/42228216 +https://crbug.com/webrtc/3339,https://issues.webrtc.org/issues/42228217 +https://crbug.com/webrtc/334,https://issues.webrtc.org/issues/42228218 +https://crbug.com/webrtc/3340,https://issues.webrtc.org/issues/42228219 +https://crbug.com/webrtc/3341,https://issues.webrtc.org/issues/42228220 +https://crbug.com/webrtc/3342,https://issues.webrtc.org/issues/42228221 +https://crbug.com/webrtc/3343,https://issues.webrtc.org/issues/42228222 +https://crbug.com/webrtc/3344,https://issues.webrtc.org/issues/42228223 +https://crbug.com/webrtc/3345,https://issues.webrtc.org/issues/42228224 +https://crbug.com/webrtc/3346,https://issues.webrtc.org/issues/42228225 +https://crbug.com/webrtc/3347,https://issues.webrtc.org/issues/42228226 +https://crbug.com/webrtc/3348,https://issues.webrtc.org/issues/42228227 +https://crbug.com/webrtc/3349,https://issues.webrtc.org/issues/42228228 +https://crbug.com/webrtc/335,https://issues.webrtc.org/issues/42228229 +https://crbug.com/webrtc/3350,https://issues.webrtc.org/issues/42228230 +https://crbug.com/webrtc/3351,https://issues.webrtc.org/issues/42228231 +https://crbug.com/webrtc/3352,https://issues.webrtc.org/issues/42228232 +https://crbug.com/webrtc/3353,https://issues.webrtc.org/issues/42228233 +https://crbug.com/webrtc/3354,https://issues.webrtc.org/issues/42228234 +https://crbug.com/webrtc/3355,https://issues.webrtc.org/issues/42228235 +https://crbug.com/webrtc/3356,https://issues.webrtc.org/issues/42228236 +https://crbug.com/webrtc/3357,https://issues.webrtc.org/issues/42228237 +https://crbug.com/webrtc/3358,https://issues.webrtc.org/issues/42228238 +https://crbug.com/webrtc/3359,https://issues.webrtc.org/issues/42228239 +https://crbug.com/webrtc/336,https://issues.webrtc.org/issues/42228240 +https://crbug.com/webrtc/3360,https://issues.webrtc.org/issues/42228241 +https://crbug.com/webrtc/3361,https://issues.webrtc.org/issues/42228242 +https://crbug.com/webrtc/3362,https://issues.webrtc.org/issues/42228243 +https://crbug.com/webrtc/3363,https://issues.webrtc.org/issues/42228244 +https://crbug.com/webrtc/3364,https://issues.webrtc.org/issues/42228245 +https://crbug.com/webrtc/3365,https://issues.webrtc.org/issues/42228246 +https://crbug.com/webrtc/3366,https://issues.webrtc.org/issues/42228247 +https://crbug.com/webrtc/3367,https://issues.webrtc.org/issues/42228248 +https://crbug.com/webrtc/3368,https://issues.webrtc.org/issues/42228249 +https://crbug.com/webrtc/3369,https://issues.webrtc.org/issues/42228250 +https://crbug.com/webrtc/337,https://issues.webrtc.org/issues/42228251 +https://crbug.com/webrtc/3370,https://issues.webrtc.org/issues/42228252 +https://crbug.com/webrtc/3371,https://issues.webrtc.org/issues/42228253 +https://crbug.com/webrtc/3372,https://issues.webrtc.org/issues/42228254 +https://crbug.com/webrtc/3373,https://issues.webrtc.org/issues/42228255 +https://crbug.com/webrtc/3374,https://issues.webrtc.org/issues/42228256 +https://crbug.com/webrtc/3375,https://issues.webrtc.org/issues/42228257 +https://crbug.com/webrtc/3376,https://issues.webrtc.org/issues/42228258 +https://crbug.com/webrtc/3377,https://issues.webrtc.org/issues/42228259 +https://crbug.com/webrtc/3379,https://issues.webrtc.org/issues/42228260 +https://crbug.com/webrtc/338,https://issues.webrtc.org/issues/42228261 +https://crbug.com/webrtc/3380,https://issues.webrtc.org/issues/42228262 +https://crbug.com/webrtc/3381,https://issues.webrtc.org/issues/42228263 +https://crbug.com/webrtc/3382,https://issues.webrtc.org/issues/42228264 +https://crbug.com/webrtc/3383,https://issues.webrtc.org/issues/42228265 +https://crbug.com/webrtc/3384,https://issues.webrtc.org/issues/42228266 +https://crbug.com/webrtc/3385,https://issues.webrtc.org/issues/42228267 +https://crbug.com/webrtc/3386,https://issues.webrtc.org/issues/42228268 +https://crbug.com/webrtc/3387,https://issues.webrtc.org/issues/42228269 +https://crbug.com/webrtc/3388,https://issues.webrtc.org/issues/42228270 +https://crbug.com/webrtc/3389,https://issues.webrtc.org/issues/42228271 +https://crbug.com/webrtc/339,https://issues.webrtc.org/issues/42228272 +https://crbug.com/webrtc/3390,https://issues.webrtc.org/issues/42228273 +https://crbug.com/webrtc/3391,https://issues.webrtc.org/issues/42228274 +https://crbug.com/webrtc/3392,https://issues.webrtc.org/issues/42228275 +https://crbug.com/webrtc/3393,https://issues.webrtc.org/issues/42228276 +https://crbug.com/webrtc/3394,https://issues.webrtc.org/issues/42228277 +https://crbug.com/webrtc/3395,https://issues.webrtc.org/issues/42228278 +https://crbug.com/webrtc/3396,https://issues.webrtc.org/issues/42228279 +https://crbug.com/webrtc/3397,https://issues.webrtc.org/issues/42228280 +https://crbug.com/webrtc/3398,https://issues.webrtc.org/issues/42228281 +https://crbug.com/webrtc/3399,https://issues.webrtc.org/issues/42228282 +https://crbug.com/webrtc/340,https://issues.webrtc.org/issues/42228283 +https://crbug.com/webrtc/3400,https://issues.webrtc.org/issues/42228284 +https://crbug.com/webrtc/3401,https://issues.webrtc.org/issues/42228285 +https://crbug.com/webrtc/3403,https://issues.webrtc.org/issues/42228286 +https://crbug.com/webrtc/3404,https://issues.webrtc.org/issues/42228287 +https://crbug.com/webrtc/3405,https://issues.webrtc.org/issues/42228288 +https://crbug.com/webrtc/3406,https://issues.webrtc.org/issues/42228289 +https://crbug.com/webrtc/3407,https://issues.webrtc.org/issues/42228290 +https://crbug.com/webrtc/3408,https://issues.webrtc.org/issues/42228291 +https://crbug.com/webrtc/3409,https://issues.webrtc.org/issues/42228292 +https://crbug.com/webrtc/341,https://issues.webrtc.org/issues/42228293 +https://crbug.com/webrtc/3410,https://issues.webrtc.org/issues/42228294 +https://crbug.com/webrtc/3411,https://issues.webrtc.org/issues/42228295 +https://crbug.com/webrtc/3412,https://issues.webrtc.org/issues/42228296 +https://crbug.com/webrtc/3413,https://issues.webrtc.org/issues/42228297 +https://crbug.com/webrtc/3414,https://issues.webrtc.org/issues/42228298 +https://crbug.com/webrtc/3415,https://issues.webrtc.org/issues/42228299 +https://crbug.com/webrtc/3416,https://issues.webrtc.org/issues/42228300 +https://crbug.com/webrtc/3417,https://issues.webrtc.org/issues/42228301 +https://crbug.com/webrtc/3418,https://issues.webrtc.org/issues/42228302 +https://crbug.com/webrtc/3419,https://issues.webrtc.org/issues/42228303 +https://crbug.com/webrtc/342,https://issues.webrtc.org/issues/42228304 +https://crbug.com/webrtc/3420,https://issues.webrtc.org/issues/42228305 +https://crbug.com/webrtc/3421,https://issues.webrtc.org/issues/42228306 +https://crbug.com/webrtc/3422,https://issues.webrtc.org/issues/42228307 +https://crbug.com/webrtc/3423,https://issues.webrtc.org/issues/42228308 +https://crbug.com/webrtc/3424,https://issues.webrtc.org/issues/42228309 +https://crbug.com/webrtc/3425,https://issues.webrtc.org/issues/42228310 +https://crbug.com/webrtc/3426,https://issues.webrtc.org/issues/42228311 +https://crbug.com/webrtc/3427,https://issues.webrtc.org/issues/42228312 +https://crbug.com/webrtc/3428,https://issues.webrtc.org/issues/42228313 +https://crbug.com/webrtc/3429,https://issues.webrtc.org/issues/42228314 +https://crbug.com/webrtc/343,https://issues.webrtc.org/issues/42228315 +https://crbug.com/webrtc/3430,https://issues.webrtc.org/issues/42228316 +https://crbug.com/webrtc/3431,https://issues.webrtc.org/issues/42228317 +https://crbug.com/webrtc/3432,https://issues.webrtc.org/issues/42228318 +https://crbug.com/webrtc/3433,https://issues.webrtc.org/issues/42228319 +https://crbug.com/webrtc/3434,https://issues.webrtc.org/issues/42228320 +https://crbug.com/webrtc/3435,https://issues.webrtc.org/issues/42228321 +https://crbug.com/webrtc/3436,https://issues.webrtc.org/issues/42228322 +https://crbug.com/webrtc/3437,https://issues.webrtc.org/issues/42228323 +https://crbug.com/webrtc/3438,https://issues.webrtc.org/issues/42228324 +https://crbug.com/webrtc/3439,https://issues.webrtc.org/issues/42228325 +https://crbug.com/webrtc/344,https://issues.webrtc.org/issues/42228326 +https://crbug.com/webrtc/3440,https://issues.webrtc.org/issues/42228327 +https://crbug.com/webrtc/3441,https://issues.webrtc.org/issues/42228328 +https://crbug.com/webrtc/3442,https://issues.webrtc.org/issues/42228329 +https://crbug.com/webrtc/3443,https://issues.webrtc.org/issues/42228330 +https://crbug.com/webrtc/3444,https://issues.webrtc.org/issues/42228331 +https://crbug.com/webrtc/3445,https://issues.webrtc.org/issues/42228332 +https://crbug.com/webrtc/3446,https://issues.webrtc.org/issues/42228333 +https://crbug.com/webrtc/3447,https://issues.webrtc.org/issues/42228334 +https://crbug.com/webrtc/3448,https://issues.webrtc.org/issues/42228335 +https://crbug.com/webrtc/3449,https://issues.webrtc.org/issues/42228336 +https://crbug.com/webrtc/345,https://issues.webrtc.org/issues/42228337 +https://crbug.com/webrtc/3450,https://issues.webrtc.org/issues/42228338 +https://crbug.com/webrtc/3451,https://issues.webrtc.org/issues/42228339 +https://crbug.com/webrtc/3452,https://issues.webrtc.org/issues/42228340 +https://crbug.com/webrtc/3453,https://issues.webrtc.org/issues/42228341 +https://crbug.com/webrtc/3454,https://issues.webrtc.org/issues/42228342 +https://crbug.com/webrtc/3455,https://issues.webrtc.org/issues/42228343 +https://crbug.com/webrtc/3456,https://issues.webrtc.org/issues/42228344 +https://crbug.com/webrtc/3457,https://issues.webrtc.org/issues/42228345 +https://crbug.com/webrtc/3458,https://issues.webrtc.org/issues/42228346 +https://crbug.com/webrtc/3459,https://issues.webrtc.org/issues/42228347 +https://crbug.com/webrtc/346,https://issues.webrtc.org/issues/42228348 +https://crbug.com/webrtc/3460,https://issues.webrtc.org/issues/42228349 +https://crbug.com/webrtc/3461,https://issues.webrtc.org/issues/42228350 +https://crbug.com/webrtc/3462,https://issues.webrtc.org/issues/42228351 +https://crbug.com/webrtc/3463,https://issues.webrtc.org/issues/42228352 +https://crbug.com/webrtc/3464,https://issues.webrtc.org/issues/42228353 +https://crbug.com/webrtc/3465,https://issues.webrtc.org/issues/42228354 +https://crbug.com/webrtc/3466,https://issues.webrtc.org/issues/42228355 +https://crbug.com/webrtc/3467,https://issues.webrtc.org/issues/42228356 +https://crbug.com/webrtc/3468,https://issues.webrtc.org/issues/42228357 +https://crbug.com/webrtc/3469,https://issues.webrtc.org/issues/42228358 +https://crbug.com/webrtc/347,https://issues.webrtc.org/issues/42228359 +https://crbug.com/webrtc/3470,https://issues.webrtc.org/issues/42228360 +https://crbug.com/webrtc/3471,https://issues.webrtc.org/issues/42228361 +https://crbug.com/webrtc/3472,https://issues.webrtc.org/issues/42228362 +https://crbug.com/webrtc/3473,https://issues.webrtc.org/issues/42228363 +https://crbug.com/webrtc/3474,https://issues.webrtc.org/issues/42228364 +https://crbug.com/webrtc/3475,https://issues.webrtc.org/issues/42228365 +https://crbug.com/webrtc/3476,https://issues.webrtc.org/issues/42228366 +https://crbug.com/webrtc/3477,https://issues.webrtc.org/issues/42228367 +https://crbug.com/webrtc/3478,https://issues.webrtc.org/issues/42228368 +https://crbug.com/webrtc/3479,https://issues.webrtc.org/issues/42228369 +https://crbug.com/webrtc/348,https://issues.webrtc.org/issues/42228370 +https://crbug.com/webrtc/3480,https://issues.webrtc.org/issues/42228371 +https://crbug.com/webrtc/3481,https://issues.webrtc.org/issues/42228372 +https://crbug.com/webrtc/3482,https://issues.webrtc.org/issues/42228373 +https://crbug.com/webrtc/3483,https://issues.webrtc.org/issues/42228374 +https://crbug.com/webrtc/3484,https://issues.webrtc.org/issues/42228375 +https://crbug.com/webrtc/3486,https://issues.webrtc.org/issues/42228376 +https://crbug.com/webrtc/3487,https://issues.webrtc.org/issues/42228377 +https://crbug.com/webrtc/3488,https://issues.webrtc.org/issues/42228378 +https://crbug.com/webrtc/3489,https://issues.webrtc.org/issues/42228379 +https://crbug.com/webrtc/349,https://issues.webrtc.org/issues/42228380 +https://crbug.com/webrtc/3490,https://issues.webrtc.org/issues/42228381 +https://crbug.com/webrtc/3491,https://issues.webrtc.org/issues/42228382 +https://crbug.com/webrtc/3492,https://issues.webrtc.org/issues/42228383 +https://crbug.com/webrtc/3493,https://issues.webrtc.org/issues/42228384 +https://crbug.com/webrtc/3494,https://issues.webrtc.org/issues/42228385 +https://crbug.com/webrtc/3496,https://issues.webrtc.org/issues/42228386 +https://crbug.com/webrtc/3497,https://issues.webrtc.org/issues/42228387 +https://crbug.com/webrtc/3498,https://issues.webrtc.org/issues/42228388 +https://crbug.com/webrtc/3499,https://issues.webrtc.org/issues/42228389 +https://crbug.com/webrtc/35,https://issues.webrtc.org/issues/42228390 +https://crbug.com/webrtc/350,https://issues.webrtc.org/issues/42228391 +https://crbug.com/webrtc/3500,https://issues.webrtc.org/issues/42228392 +https://crbug.com/webrtc/3501,https://issues.webrtc.org/issues/42228393 +https://crbug.com/webrtc/3502,https://issues.webrtc.org/issues/42228394 +https://crbug.com/webrtc/3503,https://issues.webrtc.org/issues/42228395 +https://crbug.com/webrtc/3504,https://issues.webrtc.org/issues/42228396 +https://crbug.com/webrtc/3505,https://issues.webrtc.org/issues/42228397 +https://crbug.com/webrtc/3506,https://issues.webrtc.org/issues/42228398 +https://crbug.com/webrtc/3507,https://issues.webrtc.org/issues/42228399 +https://crbug.com/webrtc/3508,https://issues.webrtc.org/issues/42228400 +https://crbug.com/webrtc/3509,https://issues.webrtc.org/issues/42228401 +https://crbug.com/webrtc/351,https://issues.webrtc.org/issues/42228402 +https://crbug.com/webrtc/3510,https://issues.webrtc.org/issues/42228403 +https://crbug.com/webrtc/3511,https://issues.webrtc.org/issues/42228404 +https://crbug.com/webrtc/3512,https://issues.webrtc.org/issues/42228405 +https://crbug.com/webrtc/3513,https://issues.webrtc.org/issues/42228406 +https://crbug.com/webrtc/3514,https://issues.webrtc.org/issues/42228407 +https://crbug.com/webrtc/3515,https://issues.webrtc.org/issues/42228408 +https://crbug.com/webrtc/3516,https://issues.webrtc.org/issues/42228409 +https://crbug.com/webrtc/3517,https://issues.webrtc.org/issues/42228410 +https://crbug.com/webrtc/3518,https://issues.webrtc.org/issues/42228411 +https://crbug.com/webrtc/3519,https://issues.webrtc.org/issues/42228412 +https://crbug.com/webrtc/352,https://issues.webrtc.org/issues/42228413 +https://crbug.com/webrtc/3520,https://issues.webrtc.org/issues/42228414 +https://crbug.com/webrtc/3521,https://issues.webrtc.org/issues/42228415 +https://crbug.com/webrtc/3522,https://issues.webrtc.org/issues/42228416 +https://crbug.com/webrtc/3523,https://issues.webrtc.org/issues/42228417 +https://crbug.com/webrtc/3524,https://issues.webrtc.org/issues/42228418 +https://crbug.com/webrtc/3525,https://issues.webrtc.org/issues/42228419 +https://crbug.com/webrtc/3526,https://issues.webrtc.org/issues/42228420 +https://crbug.com/webrtc/3527,https://issues.webrtc.org/issues/42228421 +https://crbug.com/webrtc/3528,https://issues.webrtc.org/issues/42228422 +https://crbug.com/webrtc/3529,https://issues.webrtc.org/issues/42228423 +https://crbug.com/webrtc/353,https://issues.webrtc.org/issues/42228424 +https://crbug.com/webrtc/3530,https://issues.webrtc.org/issues/42228425 +https://crbug.com/webrtc/3531,https://issues.webrtc.org/issues/42228426 +https://crbug.com/webrtc/3532,https://issues.webrtc.org/issues/42228427 +https://crbug.com/webrtc/3533,https://issues.webrtc.org/issues/42228428 +https://crbug.com/webrtc/3535,https://issues.webrtc.org/issues/42228429 +https://crbug.com/webrtc/3536,https://issues.webrtc.org/issues/42228430 +https://crbug.com/webrtc/3537,https://issues.webrtc.org/issues/42228431 +https://crbug.com/webrtc/3538,https://issues.webrtc.org/issues/42228432 +https://crbug.com/webrtc/3539,https://issues.webrtc.org/issues/42228433 +https://crbug.com/webrtc/354,https://issues.webrtc.org/issues/42228434 +https://crbug.com/webrtc/3540,https://issues.webrtc.org/issues/42228435 +https://crbug.com/webrtc/3541,https://issues.webrtc.org/issues/42228436 +https://crbug.com/webrtc/3542,https://issues.webrtc.org/issues/42228437 +https://crbug.com/webrtc/3543,https://issues.webrtc.org/issues/42228438 +https://crbug.com/webrtc/3544,https://issues.webrtc.org/issues/42228439 +https://crbug.com/webrtc/3545,https://issues.webrtc.org/issues/42228440 +https://crbug.com/webrtc/3546,https://issues.webrtc.org/issues/42228441 +https://crbug.com/webrtc/3547,https://issues.webrtc.org/issues/42228442 +https://crbug.com/webrtc/3548,https://issues.webrtc.org/issues/42228443 +https://crbug.com/webrtc/3549,https://issues.webrtc.org/issues/42228444 +https://crbug.com/webrtc/355,https://issues.webrtc.org/issues/42228445 +https://crbug.com/webrtc/3550,https://issues.webrtc.org/issues/42228446 +https://crbug.com/webrtc/3551,https://issues.webrtc.org/issues/42228447 +https://crbug.com/webrtc/3552,https://issues.webrtc.org/issues/42228448 +https://crbug.com/webrtc/3553,https://issues.webrtc.org/issues/42228449 +https://crbug.com/webrtc/3554,https://issues.webrtc.org/issues/42228450 +https://crbug.com/webrtc/3555,https://issues.webrtc.org/issues/42228451 +https://crbug.com/webrtc/3556,https://issues.webrtc.org/issues/42228452 +https://crbug.com/webrtc/3557,https://issues.webrtc.org/issues/42228453 +https://crbug.com/webrtc/3558,https://issues.webrtc.org/issues/42228454 +https://crbug.com/webrtc/3559,https://issues.webrtc.org/issues/42228455 +https://crbug.com/webrtc/356,https://issues.webrtc.org/issues/42228456 +https://crbug.com/webrtc/3560,https://issues.webrtc.org/issues/42228457 +https://crbug.com/webrtc/3561,https://issues.webrtc.org/issues/42228458 +https://crbug.com/webrtc/3562,https://issues.webrtc.org/issues/42228459 +https://crbug.com/webrtc/3563,https://issues.webrtc.org/issues/42228460 +https://crbug.com/webrtc/3564,https://issues.webrtc.org/issues/42228461 +https://crbug.com/webrtc/3565,https://issues.webrtc.org/issues/42228462 +https://crbug.com/webrtc/3566,https://issues.webrtc.org/issues/42228463 +https://crbug.com/webrtc/3567,https://issues.webrtc.org/issues/42228464 +https://crbug.com/webrtc/3568,https://issues.webrtc.org/issues/42228465 +https://crbug.com/webrtc/3569,https://issues.webrtc.org/issues/42228466 +https://crbug.com/webrtc/357,https://issues.webrtc.org/issues/42228467 +https://crbug.com/webrtc/3570,https://issues.webrtc.org/issues/42228468 +https://crbug.com/webrtc/3571,https://issues.webrtc.org/issues/42228469 +https://crbug.com/webrtc/3572,https://issues.webrtc.org/issues/42228470 +https://crbug.com/webrtc/3573,https://issues.webrtc.org/issues/42228471 +https://crbug.com/webrtc/3574,https://issues.webrtc.org/issues/42228472 +https://crbug.com/webrtc/3575,https://issues.webrtc.org/issues/42228473 +https://crbug.com/webrtc/3576,https://issues.webrtc.org/issues/42228474 +https://crbug.com/webrtc/3577,https://issues.webrtc.org/issues/42228475 +https://crbug.com/webrtc/3578,https://issues.webrtc.org/issues/42228476 +https://crbug.com/webrtc/3579,https://issues.webrtc.org/issues/42228477 +https://crbug.com/webrtc/358,https://issues.webrtc.org/issues/42228478 +https://crbug.com/webrtc/3580,https://issues.webrtc.org/issues/42228479 +https://crbug.com/webrtc/3581,https://issues.webrtc.org/issues/42228480 +https://crbug.com/webrtc/3582,https://issues.webrtc.org/issues/42228481 +https://crbug.com/webrtc/3583,https://issues.webrtc.org/issues/42228482 +https://crbug.com/webrtc/3584,https://issues.webrtc.org/issues/42228483 +https://crbug.com/webrtc/3585,https://issues.webrtc.org/issues/42228484 +https://crbug.com/webrtc/3586,https://issues.webrtc.org/issues/42228485 +https://crbug.com/webrtc/3587,https://issues.webrtc.org/issues/42228486 +https://crbug.com/webrtc/3588,https://issues.webrtc.org/issues/42228487 +https://crbug.com/webrtc/3589,https://issues.webrtc.org/issues/42228488 +https://crbug.com/webrtc/359,https://issues.webrtc.org/issues/42228489 +https://crbug.com/webrtc/3590,https://issues.webrtc.org/issues/42228490 +https://crbug.com/webrtc/3591,https://issues.webrtc.org/issues/42228491 +https://crbug.com/webrtc/3592,https://issues.webrtc.org/issues/42228492 +https://crbug.com/webrtc/3593,https://issues.webrtc.org/issues/42228493 +https://crbug.com/webrtc/3594,https://issues.webrtc.org/issues/42228494 +https://crbug.com/webrtc/3595,https://issues.webrtc.org/issues/42228495 +https://crbug.com/webrtc/3596,https://issues.webrtc.org/issues/42228496 +https://crbug.com/webrtc/3597,https://issues.webrtc.org/issues/42228497 +https://crbug.com/webrtc/3598,https://issues.webrtc.org/issues/42228498 +https://crbug.com/webrtc/3599,https://issues.webrtc.org/issues/42228499 +https://crbug.com/webrtc/36,https://issues.webrtc.org/issues/42228500 +https://crbug.com/webrtc/360,https://issues.webrtc.org/issues/42228501 +https://crbug.com/webrtc/3600,https://issues.webrtc.org/issues/42228502 +https://crbug.com/webrtc/3601,https://issues.webrtc.org/issues/42228503 +https://crbug.com/webrtc/3602,https://issues.webrtc.org/issues/42228504 +https://crbug.com/webrtc/3603,https://issues.webrtc.org/issues/42228505 +https://crbug.com/webrtc/3604,https://issues.webrtc.org/issues/42228506 +https://crbug.com/webrtc/3605,https://issues.webrtc.org/issues/42228507 +https://crbug.com/webrtc/3606,https://issues.webrtc.org/issues/42228508 +https://crbug.com/webrtc/3607,https://issues.webrtc.org/issues/42228509 +https://crbug.com/webrtc/3608,https://issues.webrtc.org/issues/42228510 +https://crbug.com/webrtc/3609,https://issues.webrtc.org/issues/42228511 +https://crbug.com/webrtc/361,https://issues.webrtc.org/issues/42228512 +https://crbug.com/webrtc/3610,https://issues.webrtc.org/issues/42228513 +https://crbug.com/webrtc/3612,https://issues.webrtc.org/issues/42228514 +https://crbug.com/webrtc/3613,https://issues.webrtc.org/issues/42228515 +https://crbug.com/webrtc/3614,https://issues.webrtc.org/issues/42228516 +https://crbug.com/webrtc/3615,https://issues.webrtc.org/issues/42228517 +https://crbug.com/webrtc/3616,https://issues.webrtc.org/issues/42228518 +https://crbug.com/webrtc/3617,https://issues.webrtc.org/issues/42228519 +https://crbug.com/webrtc/3618,https://issues.webrtc.org/issues/42228520 +https://crbug.com/webrtc/3619,https://issues.webrtc.org/issues/42228521 +https://crbug.com/webrtc/362,https://issues.webrtc.org/issues/42228522 +https://crbug.com/webrtc/3620,https://issues.webrtc.org/issues/42228523 +https://crbug.com/webrtc/3621,https://issues.webrtc.org/issues/42228524 +https://crbug.com/webrtc/3622,https://issues.webrtc.org/issues/42228525 +https://crbug.com/webrtc/3623,https://issues.webrtc.org/issues/42228526 +https://crbug.com/webrtc/3624,https://issues.webrtc.org/issues/42228527 +https://crbug.com/webrtc/3625,https://issues.webrtc.org/issues/42228528 +https://crbug.com/webrtc/3626,https://issues.webrtc.org/issues/42228529 +https://crbug.com/webrtc/3627,https://issues.webrtc.org/issues/42228530 +https://crbug.com/webrtc/3628,https://issues.webrtc.org/issues/42228531 +https://crbug.com/webrtc/3629,https://issues.webrtc.org/issues/42228532 +https://crbug.com/webrtc/363,https://issues.webrtc.org/issues/42228533 +https://crbug.com/webrtc/3630,https://issues.webrtc.org/issues/42228534 +https://crbug.com/webrtc/3631,https://issues.webrtc.org/issues/42228535 +https://crbug.com/webrtc/3632,https://issues.webrtc.org/issues/42228536 +https://crbug.com/webrtc/3633,https://issues.webrtc.org/issues/42228537 +https://crbug.com/webrtc/3634,https://issues.webrtc.org/issues/42228538 +https://crbug.com/webrtc/3635,https://issues.webrtc.org/issues/42228539 +https://crbug.com/webrtc/3636,https://issues.webrtc.org/issues/42228540 +https://crbug.com/webrtc/3637,https://issues.webrtc.org/issues/42228541 +https://crbug.com/webrtc/3638,https://issues.webrtc.org/issues/42228542 +https://crbug.com/webrtc/3639,https://issues.webrtc.org/issues/42228543 +https://crbug.com/webrtc/364,https://issues.webrtc.org/issues/42228544 +https://crbug.com/webrtc/3640,https://issues.webrtc.org/issues/42228545 +https://crbug.com/webrtc/3641,https://issues.webrtc.org/issues/42228546 +https://crbug.com/webrtc/3642,https://issues.webrtc.org/issues/42228547 +https://crbug.com/webrtc/3643,https://issues.webrtc.org/issues/42228548 +https://crbug.com/webrtc/3644,https://issues.webrtc.org/issues/42228549 +https://crbug.com/webrtc/3645,https://issues.webrtc.org/issues/42228550 +https://crbug.com/webrtc/3646,https://issues.webrtc.org/issues/42228551 +https://crbug.com/webrtc/3647,https://issues.webrtc.org/issues/42228552 +https://crbug.com/webrtc/3648,https://issues.webrtc.org/issues/42228553 +https://crbug.com/webrtc/3649,https://issues.webrtc.org/issues/42228554 +https://crbug.com/webrtc/365,https://issues.webrtc.org/issues/42228555 +https://crbug.com/webrtc/3650,https://issues.webrtc.org/issues/42228556 +https://crbug.com/webrtc/3651,https://issues.webrtc.org/issues/42228557 +https://crbug.com/webrtc/3652,https://issues.webrtc.org/issues/42228558 +https://crbug.com/webrtc/3653,https://issues.webrtc.org/issues/42228559 +https://crbug.com/webrtc/3654,https://issues.webrtc.org/issues/42228560 +https://crbug.com/webrtc/3655,https://issues.webrtc.org/issues/42228561 +https://crbug.com/webrtc/3656,https://issues.webrtc.org/issues/42228562 +https://crbug.com/webrtc/3657,https://issues.webrtc.org/issues/42228563 +https://crbug.com/webrtc/3658,https://issues.webrtc.org/issues/42228564 +https://crbug.com/webrtc/3659,https://issues.webrtc.org/issues/42228565 +https://crbug.com/webrtc/366,https://issues.webrtc.org/issues/42228566 +https://crbug.com/webrtc/3660,https://issues.webrtc.org/issues/42228567 +https://crbug.com/webrtc/3661,https://issues.webrtc.org/issues/42228568 +https://crbug.com/webrtc/3662,https://issues.webrtc.org/issues/42228569 +https://crbug.com/webrtc/3663,https://issues.webrtc.org/issues/42228570 +https://crbug.com/webrtc/3664,https://issues.webrtc.org/issues/42228571 +https://crbug.com/webrtc/3665,https://issues.webrtc.org/issues/42228572 +https://crbug.com/webrtc/3667,https://issues.webrtc.org/issues/42228573 +https://crbug.com/webrtc/3668,https://issues.webrtc.org/issues/42228574 +https://crbug.com/webrtc/3669,https://issues.webrtc.org/issues/42228575 +https://crbug.com/webrtc/367,https://issues.webrtc.org/issues/42228576 +https://crbug.com/webrtc/3670,https://issues.webrtc.org/issues/42228577 +https://crbug.com/webrtc/3671,https://issues.webrtc.org/issues/42228578 +https://crbug.com/webrtc/3672,https://issues.webrtc.org/issues/42228579 +https://crbug.com/webrtc/3673,https://issues.webrtc.org/issues/42228580 +https://crbug.com/webrtc/3674,https://issues.webrtc.org/issues/42228581 +https://crbug.com/webrtc/3675,https://issues.webrtc.org/issues/42228582 +https://crbug.com/webrtc/3676,https://issues.webrtc.org/issues/42228583 +https://crbug.com/webrtc/3677,https://issues.webrtc.org/issues/42228584 +https://crbug.com/webrtc/3678,https://issues.webrtc.org/issues/42228585 +https://crbug.com/webrtc/3679,https://issues.webrtc.org/issues/42228586 +https://crbug.com/webrtc/368,https://issues.webrtc.org/issues/42228587 +https://crbug.com/webrtc/3680,https://issues.webrtc.org/issues/42228588 +https://crbug.com/webrtc/3681,https://issues.webrtc.org/issues/42228589 +https://crbug.com/webrtc/3682,https://issues.webrtc.org/issues/42228590 +https://crbug.com/webrtc/3683,https://issues.webrtc.org/issues/42228591 +https://crbug.com/webrtc/3684,https://issues.webrtc.org/issues/42228592 +https://crbug.com/webrtc/3685,https://issues.webrtc.org/issues/42228593 +https://crbug.com/webrtc/3686,https://issues.webrtc.org/issues/42228594 +https://crbug.com/webrtc/3687,https://issues.webrtc.org/issues/42228595 +https://crbug.com/webrtc/3688,https://issues.webrtc.org/issues/42228596 +https://crbug.com/webrtc/3689,https://issues.webrtc.org/issues/42228597 +https://crbug.com/webrtc/369,https://issues.webrtc.org/issues/42228598 +https://crbug.com/webrtc/3690,https://issues.webrtc.org/issues/42228599 +https://crbug.com/webrtc/3691,https://issues.webrtc.org/issues/42228600 +https://crbug.com/webrtc/3692,https://issues.webrtc.org/issues/42228601 +https://crbug.com/webrtc/3693,https://issues.webrtc.org/issues/42228602 +https://crbug.com/webrtc/3694,https://issues.webrtc.org/issues/42228603 +https://crbug.com/webrtc/3695,https://issues.webrtc.org/issues/42228604 +https://crbug.com/webrtc/3696,https://issues.webrtc.org/issues/42228605 +https://crbug.com/webrtc/3697,https://issues.webrtc.org/issues/42228606 +https://crbug.com/webrtc/3698,https://issues.webrtc.org/issues/42228607 +https://crbug.com/webrtc/3699,https://issues.webrtc.org/issues/42228608 +https://crbug.com/webrtc/37,https://issues.webrtc.org/issues/42228609 +https://crbug.com/webrtc/370,https://issues.webrtc.org/issues/42228610 +https://crbug.com/webrtc/3700,https://issues.webrtc.org/issues/42228611 +https://crbug.com/webrtc/3701,https://issues.webrtc.org/issues/42228612 +https://crbug.com/webrtc/3702,https://issues.webrtc.org/issues/42228613 +https://crbug.com/webrtc/3703,https://issues.webrtc.org/issues/42228614 +https://crbug.com/webrtc/3704,https://issues.webrtc.org/issues/42228615 +https://crbug.com/webrtc/3705,https://issues.webrtc.org/issues/42228616 +https://crbug.com/webrtc/3706,https://issues.webrtc.org/issues/42228617 +https://crbug.com/webrtc/3707,https://issues.webrtc.org/issues/42228618 +https://crbug.com/webrtc/3708,https://issues.webrtc.org/issues/42228619 +https://crbug.com/webrtc/3709,https://issues.webrtc.org/issues/42228620 +https://crbug.com/webrtc/371,https://issues.webrtc.org/issues/42228621 +https://crbug.com/webrtc/3710,https://issues.webrtc.org/issues/42228622 +https://crbug.com/webrtc/3712,https://issues.webrtc.org/issues/42228623 +https://crbug.com/webrtc/3713,https://issues.webrtc.org/issues/42228624 +https://crbug.com/webrtc/3714,https://issues.webrtc.org/issues/42228625 +https://crbug.com/webrtc/3715,https://issues.webrtc.org/issues/42228626 +https://crbug.com/webrtc/3716,https://issues.webrtc.org/issues/42228627 +https://crbug.com/webrtc/3717,https://issues.webrtc.org/issues/42228628 +https://crbug.com/webrtc/3718,https://issues.webrtc.org/issues/42228629 +https://crbug.com/webrtc/3719,https://issues.webrtc.org/issues/42228630 +https://crbug.com/webrtc/372,https://issues.webrtc.org/issues/42228631 +https://crbug.com/webrtc/3720,https://issues.webrtc.org/issues/42228632 +https://crbug.com/webrtc/3721,https://issues.webrtc.org/issues/42228633 +https://crbug.com/webrtc/3722,https://issues.webrtc.org/issues/42228634 +https://crbug.com/webrtc/3723,https://issues.webrtc.org/issues/42228635 +https://crbug.com/webrtc/3724,https://issues.webrtc.org/issues/42228636 +https://crbug.com/webrtc/3725,https://issues.webrtc.org/issues/42228637 +https://crbug.com/webrtc/3726,https://issues.webrtc.org/issues/42228638 +https://crbug.com/webrtc/3727,https://issues.webrtc.org/issues/42228639 +https://crbug.com/webrtc/3728,https://issues.webrtc.org/issues/42228640 +https://crbug.com/webrtc/3729,https://issues.webrtc.org/issues/42228641 +https://crbug.com/webrtc/373,https://issues.webrtc.org/issues/42228642 +https://crbug.com/webrtc/3730,https://issues.webrtc.org/issues/42228643 +https://crbug.com/webrtc/3731,https://issues.webrtc.org/issues/42228644 +https://crbug.com/webrtc/3732,https://issues.webrtc.org/issues/42228645 +https://crbug.com/webrtc/3733,https://issues.webrtc.org/issues/42228646 +https://crbug.com/webrtc/3734,https://issues.webrtc.org/issues/42228647 +https://crbug.com/webrtc/3735,https://issues.webrtc.org/issues/42228648 +https://crbug.com/webrtc/3736,https://issues.webrtc.org/issues/42228649 +https://crbug.com/webrtc/3737,https://issues.webrtc.org/issues/42228650 +https://crbug.com/webrtc/3738,https://issues.webrtc.org/issues/42228651 +https://crbug.com/webrtc/3739,https://issues.webrtc.org/issues/42228652 +https://crbug.com/webrtc/374,https://issues.webrtc.org/issues/42228653 +https://crbug.com/webrtc/3740,https://issues.webrtc.org/issues/42228654 +https://crbug.com/webrtc/3741,https://issues.webrtc.org/issues/42228655 +https://crbug.com/webrtc/3742,https://issues.webrtc.org/issues/42228656 +https://crbug.com/webrtc/3743,https://issues.webrtc.org/issues/42228657 +https://crbug.com/webrtc/3744,https://issues.webrtc.org/issues/42228658 +https://crbug.com/webrtc/3745,https://issues.webrtc.org/issues/42228659 +https://crbug.com/webrtc/3746,https://issues.webrtc.org/issues/42228660 +https://crbug.com/webrtc/3747,https://issues.webrtc.org/issues/42228661 +https://crbug.com/webrtc/3748,https://issues.webrtc.org/issues/42228662 +https://crbug.com/webrtc/3749,https://issues.webrtc.org/issues/42228663 +https://crbug.com/webrtc/375,https://issues.webrtc.org/issues/42228664 +https://crbug.com/webrtc/3750,https://issues.webrtc.org/issues/42228665 +https://crbug.com/webrtc/3751,https://issues.webrtc.org/issues/42228666 +https://crbug.com/webrtc/3752,https://issues.webrtc.org/issues/42228667 +https://crbug.com/webrtc/3753,https://issues.webrtc.org/issues/42228668 +https://crbug.com/webrtc/3754,https://issues.webrtc.org/issues/42228669 +https://crbug.com/webrtc/3755,https://issues.webrtc.org/issues/42228670 +https://crbug.com/webrtc/3756,https://issues.webrtc.org/issues/42228671 +https://crbug.com/webrtc/3757,https://issues.webrtc.org/issues/42228672 +https://crbug.com/webrtc/3758,https://issues.webrtc.org/issues/42228673 +https://crbug.com/webrtc/3759,https://issues.webrtc.org/issues/42228674 +https://crbug.com/webrtc/376,https://issues.webrtc.org/issues/42228675 +https://crbug.com/webrtc/3760,https://issues.webrtc.org/issues/42228676 +https://crbug.com/webrtc/3761,https://issues.webrtc.org/issues/42228677 +https://crbug.com/webrtc/3762,https://issues.webrtc.org/issues/42228678 +https://crbug.com/webrtc/3763,https://issues.webrtc.org/issues/42228679 +https://crbug.com/webrtc/3764,https://issues.webrtc.org/issues/42228680 +https://crbug.com/webrtc/3765,https://issues.webrtc.org/issues/42228681 +https://crbug.com/webrtc/3766,https://issues.webrtc.org/issues/42228682 +https://crbug.com/webrtc/3767,https://issues.webrtc.org/issues/42228683 +https://crbug.com/webrtc/3768,https://issues.webrtc.org/issues/42228684 +https://crbug.com/webrtc/377,https://issues.webrtc.org/issues/42228685 +https://crbug.com/webrtc/3770,https://issues.webrtc.org/issues/42228686 +https://crbug.com/webrtc/3771,https://issues.webrtc.org/issues/42228687 +https://crbug.com/webrtc/3773,https://issues.webrtc.org/issues/42228688 +https://crbug.com/webrtc/3774,https://issues.webrtc.org/issues/42228689 +https://crbug.com/webrtc/3775,https://issues.webrtc.org/issues/42228690 +https://crbug.com/webrtc/3776,https://issues.webrtc.org/issues/42228691 +https://crbug.com/webrtc/3777,https://issues.webrtc.org/issues/42228692 +https://crbug.com/webrtc/3778,https://issues.webrtc.org/issues/42228693 +https://crbug.com/webrtc/3779,https://issues.webrtc.org/issues/42228694 +https://crbug.com/webrtc/378,https://issues.webrtc.org/issues/42228695 +https://crbug.com/webrtc/3780,https://issues.webrtc.org/issues/42228696 +https://crbug.com/webrtc/3781,https://issues.webrtc.org/issues/42228697 +https://crbug.com/webrtc/3782,https://issues.webrtc.org/issues/42228698 +https://crbug.com/webrtc/3783,https://issues.webrtc.org/issues/42228699 +https://crbug.com/webrtc/3784,https://issues.webrtc.org/issues/42228700 +https://crbug.com/webrtc/3785,https://issues.webrtc.org/issues/42228701 +https://crbug.com/webrtc/3786,https://issues.webrtc.org/issues/42228702 +https://crbug.com/webrtc/3788,https://issues.webrtc.org/issues/42228703 +https://crbug.com/webrtc/3789,https://issues.webrtc.org/issues/42228704 +https://crbug.com/webrtc/379,https://issues.webrtc.org/issues/42228705 +https://crbug.com/webrtc/3790,https://issues.webrtc.org/issues/42228706 +https://crbug.com/webrtc/3791,https://issues.webrtc.org/issues/42228707 +https://crbug.com/webrtc/3792,https://issues.webrtc.org/issues/42228708 +https://crbug.com/webrtc/3793,https://issues.webrtc.org/issues/42228709 +https://crbug.com/webrtc/3794,https://issues.webrtc.org/issues/42228710 +https://crbug.com/webrtc/3795,https://issues.webrtc.org/issues/42228711 +https://crbug.com/webrtc/3796,https://issues.webrtc.org/issues/42228712 +https://crbug.com/webrtc/3797,https://issues.webrtc.org/issues/42228713 +https://crbug.com/webrtc/3798,https://issues.webrtc.org/issues/42228714 +https://crbug.com/webrtc/3799,https://issues.webrtc.org/issues/42228715 +https://crbug.com/webrtc/38,https://issues.webrtc.org/issues/42228716 +https://crbug.com/webrtc/380,https://issues.webrtc.org/issues/42228717 +https://crbug.com/webrtc/3800,https://issues.webrtc.org/issues/42228718 +https://crbug.com/webrtc/3801,https://issues.webrtc.org/issues/42228719 +https://crbug.com/webrtc/3802,https://issues.webrtc.org/issues/42228720 +https://crbug.com/webrtc/3803,https://issues.webrtc.org/issues/42228721 +https://crbug.com/webrtc/3804,https://issues.webrtc.org/issues/42228722 +https://crbug.com/webrtc/3805,https://issues.webrtc.org/issues/42228723 +https://crbug.com/webrtc/3806,https://issues.webrtc.org/issues/42228724 +https://crbug.com/webrtc/3807,https://issues.webrtc.org/issues/42228725 +https://crbug.com/webrtc/3808,https://issues.webrtc.org/issues/42228726 +https://crbug.com/webrtc/3809,https://issues.webrtc.org/issues/42228727 +https://crbug.com/webrtc/381,https://issues.webrtc.org/issues/42228728 +https://crbug.com/webrtc/3810,https://issues.webrtc.org/issues/42228729 +https://crbug.com/webrtc/3811,https://issues.webrtc.org/issues/42228730 +https://crbug.com/webrtc/3812,https://issues.webrtc.org/issues/42228731 +https://crbug.com/webrtc/3813,https://issues.webrtc.org/issues/42228732 +https://crbug.com/webrtc/3814,https://issues.webrtc.org/issues/42228733 +https://crbug.com/webrtc/3815,https://issues.webrtc.org/issues/42228734 +https://crbug.com/webrtc/3816,https://issues.webrtc.org/issues/42228735 +https://crbug.com/webrtc/3818,https://issues.webrtc.org/issues/42228736 +https://crbug.com/webrtc/3819,https://issues.webrtc.org/issues/42228737 +https://crbug.com/webrtc/382,https://issues.webrtc.org/issues/42228738 +https://crbug.com/webrtc/3820,https://issues.webrtc.org/issues/42228739 +https://crbug.com/webrtc/3821,https://issues.webrtc.org/issues/42228740 +https://crbug.com/webrtc/3822,https://issues.webrtc.org/issues/42228741 +https://crbug.com/webrtc/3823,https://issues.webrtc.org/issues/42228742 +https://crbug.com/webrtc/3824,https://issues.webrtc.org/issues/42228743 +https://crbug.com/webrtc/3825,https://issues.webrtc.org/issues/42228744 +https://crbug.com/webrtc/3826,https://issues.webrtc.org/issues/42228745 +https://crbug.com/webrtc/3827,https://issues.webrtc.org/issues/42228746 +https://crbug.com/webrtc/3828,https://issues.webrtc.org/issues/42228747 +https://crbug.com/webrtc/3829,https://issues.webrtc.org/issues/42228748 +https://crbug.com/webrtc/383,https://issues.webrtc.org/issues/42228749 +https://crbug.com/webrtc/3830,https://issues.webrtc.org/issues/42228750 +https://crbug.com/webrtc/3831,https://issues.webrtc.org/issues/42228751 +https://crbug.com/webrtc/3832,https://issues.webrtc.org/issues/42228752 +https://crbug.com/webrtc/3833,https://issues.webrtc.org/issues/42228753 +https://crbug.com/webrtc/3834,https://issues.webrtc.org/issues/42228754 +https://crbug.com/webrtc/3835,https://issues.webrtc.org/issues/42228755 +https://crbug.com/webrtc/3836,https://issues.webrtc.org/issues/42228756 +https://crbug.com/webrtc/3837,https://issues.webrtc.org/issues/42228757 +https://crbug.com/webrtc/3838,https://issues.webrtc.org/issues/42228758 +https://crbug.com/webrtc/3839,https://issues.webrtc.org/issues/42228759 +https://crbug.com/webrtc/384,https://issues.webrtc.org/issues/42228760 +https://crbug.com/webrtc/3840,https://issues.webrtc.org/issues/42228761 +https://crbug.com/webrtc/3841,https://issues.webrtc.org/issues/42228762 +https://crbug.com/webrtc/3842,https://issues.webrtc.org/issues/42228763 +https://crbug.com/webrtc/3843,https://issues.webrtc.org/issues/42228764 +https://crbug.com/webrtc/3844,https://issues.webrtc.org/issues/42228765 +https://crbug.com/webrtc/3845,https://issues.webrtc.org/issues/42228766 +https://crbug.com/webrtc/3846,https://issues.webrtc.org/issues/42228767 +https://crbug.com/webrtc/3847,https://issues.webrtc.org/issues/42228768 +https://crbug.com/webrtc/3848,https://issues.webrtc.org/issues/42228769 +https://crbug.com/webrtc/3849,https://issues.webrtc.org/issues/42228770 +https://crbug.com/webrtc/385,https://issues.webrtc.org/issues/42228771 +https://crbug.com/webrtc/3850,https://issues.webrtc.org/issues/42228772 +https://crbug.com/webrtc/3851,https://issues.webrtc.org/issues/42228773 +https://crbug.com/webrtc/3852,https://issues.webrtc.org/issues/42228774 +https://crbug.com/webrtc/3853,https://issues.webrtc.org/issues/42228775 +https://crbug.com/webrtc/3854,https://issues.webrtc.org/issues/42228776 +https://crbug.com/webrtc/3855,https://issues.webrtc.org/issues/42228777 +https://crbug.com/webrtc/3856,https://issues.webrtc.org/issues/42228778 +https://crbug.com/webrtc/3857,https://issues.webrtc.org/issues/42228779 +https://crbug.com/webrtc/3858,https://issues.webrtc.org/issues/42228780 +https://crbug.com/webrtc/3859,https://issues.webrtc.org/issues/42228781 +https://crbug.com/webrtc/386,https://issues.webrtc.org/issues/42228782 +https://crbug.com/webrtc/3860,https://issues.webrtc.org/issues/42228783 +https://crbug.com/webrtc/3861,https://issues.webrtc.org/issues/42228784 +https://crbug.com/webrtc/3862,https://issues.webrtc.org/issues/42228785 +https://crbug.com/webrtc/3863,https://issues.webrtc.org/issues/42228786 +https://crbug.com/webrtc/3864,https://issues.webrtc.org/issues/42228787 +https://crbug.com/webrtc/3865,https://issues.webrtc.org/issues/42228788 +https://crbug.com/webrtc/3866,https://issues.webrtc.org/issues/42228789 +https://crbug.com/webrtc/3867,https://issues.webrtc.org/issues/42228790 +https://crbug.com/webrtc/3868,https://issues.webrtc.org/issues/42228791 +https://crbug.com/webrtc/3869,https://issues.webrtc.org/issues/42228792 +https://crbug.com/webrtc/387,https://issues.webrtc.org/issues/42228793 +https://crbug.com/webrtc/3870,https://issues.webrtc.org/issues/42228794 +https://crbug.com/webrtc/3871,https://issues.webrtc.org/issues/42228795 +https://crbug.com/webrtc/3872,https://issues.webrtc.org/issues/42228796 +https://crbug.com/webrtc/3873,https://issues.webrtc.org/issues/42228797 +https://crbug.com/webrtc/3874,https://issues.webrtc.org/issues/42228798 +https://crbug.com/webrtc/3875,https://issues.webrtc.org/issues/42228799 +https://crbug.com/webrtc/3876,https://issues.webrtc.org/issues/42228800 +https://crbug.com/webrtc/3877,https://issues.webrtc.org/issues/42228801 +https://crbug.com/webrtc/3878,https://issues.webrtc.org/issues/42228802 +https://crbug.com/webrtc/388,https://issues.webrtc.org/issues/42228803 +https://crbug.com/webrtc/3880,https://issues.webrtc.org/issues/42228804 +https://crbug.com/webrtc/3881,https://issues.webrtc.org/issues/42228805 +https://crbug.com/webrtc/3882,https://issues.webrtc.org/issues/42228806 +https://crbug.com/webrtc/3883,https://issues.webrtc.org/issues/42228807 +https://crbug.com/webrtc/3885,https://issues.webrtc.org/issues/42228808 +https://crbug.com/webrtc/3886,https://issues.webrtc.org/issues/42228809 +https://crbug.com/webrtc/3887,https://issues.webrtc.org/issues/42228810 +https://crbug.com/webrtc/3888,https://issues.webrtc.org/issues/42228811 +https://crbug.com/webrtc/3889,https://issues.webrtc.org/issues/42228812 +https://crbug.com/webrtc/389,https://issues.webrtc.org/issues/42228813 +https://crbug.com/webrtc/3890,https://issues.webrtc.org/issues/42228814 +https://crbug.com/webrtc/3891,https://issues.webrtc.org/issues/42228815 +https://crbug.com/webrtc/3892,https://issues.webrtc.org/issues/42228816 +https://crbug.com/webrtc/3893,https://issues.webrtc.org/issues/42228817 +https://crbug.com/webrtc/3894,https://issues.webrtc.org/issues/42228818 +https://crbug.com/webrtc/3895,https://issues.webrtc.org/issues/42228819 +https://crbug.com/webrtc/3896,https://issues.webrtc.org/issues/42228820 +https://crbug.com/webrtc/3897,https://issues.webrtc.org/issues/42228821 +https://crbug.com/webrtc/3898,https://issues.webrtc.org/issues/42228822 +https://crbug.com/webrtc/3899,https://issues.webrtc.org/issues/42228823 +https://crbug.com/webrtc/39,https://issues.webrtc.org/issues/42228824 +https://crbug.com/webrtc/390,https://issues.webrtc.org/issues/42228825 +https://crbug.com/webrtc/3900,https://issues.webrtc.org/issues/42228826 +https://crbug.com/webrtc/3901,https://issues.webrtc.org/issues/42228827 +https://crbug.com/webrtc/3902,https://issues.webrtc.org/issues/42228828 +https://crbug.com/webrtc/3903,https://issues.webrtc.org/issues/42228829 +https://crbug.com/webrtc/3904,https://issues.webrtc.org/issues/42228830 +https://crbug.com/webrtc/3905,https://issues.webrtc.org/issues/42228831 +https://crbug.com/webrtc/3906,https://issues.webrtc.org/issues/42228832 +https://crbug.com/webrtc/3907,https://issues.webrtc.org/issues/42228833 +https://crbug.com/webrtc/3908,https://issues.webrtc.org/issues/42228834 +https://crbug.com/webrtc/3909,https://issues.webrtc.org/issues/42228835 +https://crbug.com/webrtc/391,https://issues.webrtc.org/issues/42228836 +https://crbug.com/webrtc/3910,https://issues.webrtc.org/issues/42228837 +https://crbug.com/webrtc/3911,https://issues.webrtc.org/issues/42228838 +https://crbug.com/webrtc/3912,https://issues.webrtc.org/issues/42228839 +https://crbug.com/webrtc/3913,https://issues.webrtc.org/issues/42228840 +https://crbug.com/webrtc/3914,https://issues.webrtc.org/issues/42228841 +https://crbug.com/webrtc/3915,https://issues.webrtc.org/issues/42228842 +https://crbug.com/webrtc/3916,https://issues.webrtc.org/issues/42228843 +https://crbug.com/webrtc/3917,https://issues.webrtc.org/issues/42228844 +https://crbug.com/webrtc/3918,https://issues.webrtc.org/issues/42228845 +https://crbug.com/webrtc/3919,https://issues.webrtc.org/issues/42228846 +https://crbug.com/webrtc/392,https://issues.webrtc.org/issues/42228847 +https://crbug.com/webrtc/3920,https://issues.webrtc.org/issues/42228848 +https://crbug.com/webrtc/3921,https://issues.webrtc.org/issues/42228849 +https://crbug.com/webrtc/3922,https://issues.webrtc.org/issues/42228850 +https://crbug.com/webrtc/3923,https://issues.webrtc.org/issues/42228851 +https://crbug.com/webrtc/3924,https://issues.webrtc.org/issues/42228852 +https://crbug.com/webrtc/3925,https://issues.webrtc.org/issues/42228853 +https://crbug.com/webrtc/3926,https://issues.webrtc.org/issues/42228854 +https://crbug.com/webrtc/3927,https://issues.webrtc.org/issues/42228855 +https://crbug.com/webrtc/3928,https://issues.webrtc.org/issues/42228856 +https://crbug.com/webrtc/3929,https://issues.webrtc.org/issues/42228857 +https://crbug.com/webrtc/393,https://issues.webrtc.org/issues/42228858 +https://crbug.com/webrtc/3930,https://issues.webrtc.org/issues/42228859 +https://crbug.com/webrtc/3931,https://issues.webrtc.org/issues/42228860 +https://crbug.com/webrtc/3932,https://issues.webrtc.org/issues/42228861 +https://crbug.com/webrtc/3933,https://issues.webrtc.org/issues/42228862 +https://crbug.com/webrtc/3934,https://issues.webrtc.org/issues/42228863 +https://crbug.com/webrtc/3935,https://issues.webrtc.org/issues/42228864 +https://crbug.com/webrtc/3936,https://issues.webrtc.org/issues/42228865 +https://crbug.com/webrtc/3937,https://issues.webrtc.org/issues/42228866 +https://crbug.com/webrtc/3938,https://issues.webrtc.org/issues/42228867 +https://crbug.com/webrtc/3939,https://issues.webrtc.org/issues/42228868 +https://crbug.com/webrtc/394,https://issues.webrtc.org/issues/42228869 +https://crbug.com/webrtc/3941,https://issues.webrtc.org/issues/42228870 +https://crbug.com/webrtc/3942,https://issues.webrtc.org/issues/42228871 +https://crbug.com/webrtc/3943,https://issues.webrtc.org/issues/42228872 +https://crbug.com/webrtc/3944,https://issues.webrtc.org/issues/42228873 +https://crbug.com/webrtc/3945,https://issues.webrtc.org/issues/42228874 +https://crbug.com/webrtc/3946,https://issues.webrtc.org/issues/42228875 +https://crbug.com/webrtc/3947,https://issues.webrtc.org/issues/42228876 +https://crbug.com/webrtc/3948,https://issues.webrtc.org/issues/42228877 +https://crbug.com/webrtc/3949,https://issues.webrtc.org/issues/42228878 +https://crbug.com/webrtc/395,https://issues.webrtc.org/issues/42228879 +https://crbug.com/webrtc/3950,https://issues.webrtc.org/issues/42228880 +https://crbug.com/webrtc/3951,https://issues.webrtc.org/issues/42228881 +https://crbug.com/webrtc/3952,https://issues.webrtc.org/issues/42228882 +https://crbug.com/webrtc/3953,https://issues.webrtc.org/issues/42228883 +https://crbug.com/webrtc/3954,https://issues.webrtc.org/issues/42228884 +https://crbug.com/webrtc/3955,https://issues.webrtc.org/issues/42228885 +https://crbug.com/webrtc/3956,https://issues.webrtc.org/issues/42228886 +https://crbug.com/webrtc/3957,https://issues.webrtc.org/issues/42228887 +https://crbug.com/webrtc/3958,https://issues.webrtc.org/issues/42228888 +https://crbug.com/webrtc/3959,https://issues.webrtc.org/issues/42228889 +https://crbug.com/webrtc/396,https://issues.webrtc.org/issues/42228890 +https://crbug.com/webrtc/3960,https://issues.webrtc.org/issues/42228891 +https://crbug.com/webrtc/3961,https://issues.webrtc.org/issues/42228892 +https://crbug.com/webrtc/3962,https://issues.webrtc.org/issues/42228893 +https://crbug.com/webrtc/3963,https://issues.webrtc.org/issues/42228894 +https://crbug.com/webrtc/3964,https://issues.webrtc.org/issues/42228895 +https://crbug.com/webrtc/3965,https://issues.webrtc.org/issues/42228896 +https://crbug.com/webrtc/3966,https://issues.webrtc.org/issues/42228897 +https://crbug.com/webrtc/3967,https://issues.webrtc.org/issues/42228898 +https://crbug.com/webrtc/3968,https://issues.webrtc.org/issues/42228899 +https://crbug.com/webrtc/397,https://issues.webrtc.org/issues/42228900 +https://crbug.com/webrtc/3971,https://issues.webrtc.org/issues/42228901 +https://crbug.com/webrtc/3972,https://issues.webrtc.org/issues/42228902 +https://crbug.com/webrtc/3973,https://issues.webrtc.org/issues/42228903 +https://crbug.com/webrtc/3974,https://issues.webrtc.org/issues/42228904 +https://crbug.com/webrtc/3975,https://issues.webrtc.org/issues/42228905 +https://crbug.com/webrtc/3976,https://issues.webrtc.org/issues/42228906 +https://crbug.com/webrtc/3977,https://issues.webrtc.org/issues/42228907 +https://crbug.com/webrtc/3978,https://issues.webrtc.org/issues/42228908 +https://crbug.com/webrtc/3979,https://issues.webrtc.org/issues/42228909 +https://crbug.com/webrtc/398,https://issues.webrtc.org/issues/42228910 +https://crbug.com/webrtc/3980,https://issues.webrtc.org/issues/42228911 +https://crbug.com/webrtc/3981,https://issues.webrtc.org/issues/42228912 +https://crbug.com/webrtc/3982,https://issues.webrtc.org/issues/42228913 +https://crbug.com/webrtc/3983,https://issues.webrtc.org/issues/42228914 +https://crbug.com/webrtc/3984,https://issues.webrtc.org/issues/42228915 +https://crbug.com/webrtc/3985,https://issues.webrtc.org/issues/42228916 +https://crbug.com/webrtc/3986,https://issues.webrtc.org/issues/42228917 +https://crbug.com/webrtc/3987,https://issues.webrtc.org/issues/42228918 +https://crbug.com/webrtc/3988,https://issues.webrtc.org/issues/42228919 +https://crbug.com/webrtc/3989,https://issues.webrtc.org/issues/42228920 +https://crbug.com/webrtc/399,https://issues.webrtc.org/issues/42228921 +https://crbug.com/webrtc/3990,https://issues.webrtc.org/issues/42228922 +https://crbug.com/webrtc/3991,https://issues.webrtc.org/issues/42228923 +https://crbug.com/webrtc/3992,https://issues.webrtc.org/issues/42228924 +https://crbug.com/webrtc/3993,https://issues.webrtc.org/issues/42228925 +https://crbug.com/webrtc/3995,https://issues.webrtc.org/issues/42228926 +https://crbug.com/webrtc/3996,https://issues.webrtc.org/issues/42228927 +https://crbug.com/webrtc/3997,https://issues.webrtc.org/issues/42228928 +https://crbug.com/webrtc/3998,https://issues.webrtc.org/issues/42228929 +https://crbug.com/webrtc/3999,https://issues.webrtc.org/issues/42228930 +https://crbug.com/webrtc/4,https://issues.webrtc.org/issues/42228931 +https://crbug.com/webrtc/40,https://issues.webrtc.org/issues/42228932 +https://crbug.com/webrtc/400,https://issues.webrtc.org/issues/42228933 +https://crbug.com/webrtc/4000,https://issues.webrtc.org/issues/42228934 +https://crbug.com/webrtc/4001,https://issues.webrtc.org/issues/42228935 +https://crbug.com/webrtc/4002,https://issues.webrtc.org/issues/42228936 +https://crbug.com/webrtc/4003,https://issues.webrtc.org/issues/42228937 +https://crbug.com/webrtc/4004,https://issues.webrtc.org/issues/42228938 +https://crbug.com/webrtc/4005,https://issues.webrtc.org/issues/42228939 +https://crbug.com/webrtc/4006,https://issues.webrtc.org/issues/42228940 +https://crbug.com/webrtc/4007,https://issues.webrtc.org/issues/42228941 +https://crbug.com/webrtc/4008,https://issues.webrtc.org/issues/42228942 +https://crbug.com/webrtc/4009,https://issues.webrtc.org/issues/42228943 +https://crbug.com/webrtc/401,https://issues.webrtc.org/issues/42228944 +https://crbug.com/webrtc/4010,https://issues.webrtc.org/issues/42228945 +https://crbug.com/webrtc/4011,https://issues.webrtc.org/issues/42228946 +https://crbug.com/webrtc/4012,https://issues.webrtc.org/issues/42228947 +https://crbug.com/webrtc/4013,https://issues.webrtc.org/issues/42228948 +https://crbug.com/webrtc/4014,https://issues.webrtc.org/issues/42228949 +https://crbug.com/webrtc/4015,https://issues.webrtc.org/issues/42228950 +https://crbug.com/webrtc/4016,https://issues.webrtc.org/issues/42228951 +https://crbug.com/webrtc/4017,https://issues.webrtc.org/issues/42228952 +https://crbug.com/webrtc/4019,https://issues.webrtc.org/issues/42228953 +https://crbug.com/webrtc/402,https://issues.webrtc.org/issues/42228954 +https://crbug.com/webrtc/4020,https://issues.webrtc.org/issues/42228955 +https://crbug.com/webrtc/4021,https://issues.webrtc.org/issues/42228956 +https://crbug.com/webrtc/4022,https://issues.webrtc.org/issues/42228957 +https://crbug.com/webrtc/4023,https://issues.webrtc.org/issues/42228958 +https://crbug.com/webrtc/4024,https://issues.webrtc.org/issues/42228959 +https://crbug.com/webrtc/4025,https://issues.webrtc.org/issues/42228960 +https://crbug.com/webrtc/4026,https://issues.webrtc.org/issues/42228961 +https://crbug.com/webrtc/4027,https://issues.webrtc.org/issues/42228962 +https://crbug.com/webrtc/4028,https://issues.webrtc.org/issues/42228963 +https://crbug.com/webrtc/4029,https://issues.webrtc.org/issues/42228964 +https://crbug.com/webrtc/403,https://issues.webrtc.org/issues/42228965 +https://crbug.com/webrtc/4030,https://issues.webrtc.org/issues/42228966 +https://crbug.com/webrtc/4031,https://issues.webrtc.org/issues/42228967 +https://crbug.com/webrtc/4032,https://issues.webrtc.org/issues/42228968 +https://crbug.com/webrtc/4034,https://issues.webrtc.org/issues/42228969 +https://crbug.com/webrtc/4035,https://issues.webrtc.org/issues/42228970 +https://crbug.com/webrtc/4036,https://issues.webrtc.org/issues/42228971 +https://crbug.com/webrtc/4037,https://issues.webrtc.org/issues/42228972 +https://crbug.com/webrtc/4038,https://issues.webrtc.org/issues/42228973 +https://crbug.com/webrtc/4039,https://issues.webrtc.org/issues/42228974 +https://crbug.com/webrtc/404,https://issues.webrtc.org/issues/42228975 +https://crbug.com/webrtc/4040,https://issues.webrtc.org/issues/42228976 +https://crbug.com/webrtc/4041,https://issues.webrtc.org/issues/42228977 +https://crbug.com/webrtc/4042,https://issues.webrtc.org/issues/42228978 +https://crbug.com/webrtc/4043,https://issues.webrtc.org/issues/42228979 +https://crbug.com/webrtc/4044,https://issues.webrtc.org/issues/42228980 +https://crbug.com/webrtc/4045,https://issues.webrtc.org/issues/42228981 +https://crbug.com/webrtc/4046,https://issues.webrtc.org/issues/42228982 +https://crbug.com/webrtc/4047,https://issues.webrtc.org/issues/42228983 +https://crbug.com/webrtc/4048,https://issues.webrtc.org/issues/42228984 +https://crbug.com/webrtc/4049,https://issues.webrtc.org/issues/42228985 +https://crbug.com/webrtc/405,https://issues.webrtc.org/issues/42228986 +https://crbug.com/webrtc/4050,https://issues.webrtc.org/issues/42228987 +https://crbug.com/webrtc/4051,https://issues.webrtc.org/issues/42228988 +https://crbug.com/webrtc/4052,https://issues.webrtc.org/issues/42228989 +https://crbug.com/webrtc/4053,https://issues.webrtc.org/issues/42228990 +https://crbug.com/webrtc/4054,https://issues.webrtc.org/issues/42228991 +https://crbug.com/webrtc/4055,https://issues.webrtc.org/issues/42228992 +https://crbug.com/webrtc/4056,https://issues.webrtc.org/issues/42228993 +https://crbug.com/webrtc/4057,https://issues.webrtc.org/issues/42228994 +https://crbug.com/webrtc/4058,https://issues.webrtc.org/issues/42228995 +https://crbug.com/webrtc/4059,https://issues.webrtc.org/issues/42228996 +https://crbug.com/webrtc/406,https://issues.webrtc.org/issues/42228997 +https://crbug.com/webrtc/4060,https://issues.webrtc.org/issues/42228998 +https://crbug.com/webrtc/4061,https://issues.webrtc.org/issues/42228999 +https://crbug.com/webrtc/4062,https://issues.webrtc.org/issues/42229000 +https://crbug.com/webrtc/4063,https://issues.webrtc.org/issues/42229001 +https://crbug.com/webrtc/4064,https://issues.webrtc.org/issues/42229002 +https://crbug.com/webrtc/4065,https://issues.webrtc.org/issues/42229003 +https://crbug.com/webrtc/4066,https://issues.webrtc.org/issues/42229004 +https://crbug.com/webrtc/4067,https://issues.webrtc.org/issues/42229005 +https://crbug.com/webrtc/4068,https://issues.webrtc.org/issues/42229006 +https://crbug.com/webrtc/4069,https://issues.webrtc.org/issues/42229007 +https://crbug.com/webrtc/407,https://issues.webrtc.org/issues/42229008 +https://crbug.com/webrtc/4070,https://issues.webrtc.org/issues/42229009 +https://crbug.com/webrtc/4071,https://issues.webrtc.org/issues/42229010 +https://crbug.com/webrtc/4072,https://issues.webrtc.org/issues/42229011 +https://crbug.com/webrtc/4073,https://issues.webrtc.org/issues/42229012 +https://crbug.com/webrtc/4074,https://issues.webrtc.org/issues/42229013 +https://crbug.com/webrtc/4075,https://issues.webrtc.org/issues/42229014 +https://crbug.com/webrtc/4077,https://issues.webrtc.org/issues/42229015 +https://crbug.com/webrtc/4078,https://issues.webrtc.org/issues/42229016 +https://crbug.com/webrtc/4079,https://issues.webrtc.org/issues/42229017 +https://crbug.com/webrtc/408,https://issues.webrtc.org/issues/42229018 +https://crbug.com/webrtc/4080,https://issues.webrtc.org/issues/42229019 +https://crbug.com/webrtc/4081,https://issues.webrtc.org/issues/42229020 +https://crbug.com/webrtc/4082,https://issues.webrtc.org/issues/42229021 +https://crbug.com/webrtc/4083,https://issues.webrtc.org/issues/42229022 +https://crbug.com/webrtc/4084,https://issues.webrtc.org/issues/42229023 +https://crbug.com/webrtc/4085,https://issues.webrtc.org/issues/42229024 +https://crbug.com/webrtc/4086,https://issues.webrtc.org/issues/42229025 +https://crbug.com/webrtc/4087,https://issues.webrtc.org/issues/42229026 +https://crbug.com/webrtc/4088,https://issues.webrtc.org/issues/42229027 +https://crbug.com/webrtc/4089,https://issues.webrtc.org/issues/42229028 +https://crbug.com/webrtc/409,https://issues.webrtc.org/issues/42229029 +https://crbug.com/webrtc/4090,https://issues.webrtc.org/issues/42229030 +https://crbug.com/webrtc/4091,https://issues.webrtc.org/issues/42229031 +https://crbug.com/webrtc/4092,https://issues.webrtc.org/issues/42229032 +https://crbug.com/webrtc/4093,https://issues.webrtc.org/issues/42229033 +https://crbug.com/webrtc/4094,https://issues.webrtc.org/issues/42229034 +https://crbug.com/webrtc/4095,https://issues.webrtc.org/issues/42229035 +https://crbug.com/webrtc/4097,https://issues.webrtc.org/issues/42229036 +https://crbug.com/webrtc/4098,https://issues.webrtc.org/issues/42229037 +https://crbug.com/webrtc/4099,https://issues.webrtc.org/issues/42229038 +https://crbug.com/webrtc/41,https://issues.webrtc.org/issues/42229039 +https://crbug.com/webrtc/410,https://issues.webrtc.org/issues/42229040 +https://crbug.com/webrtc/4100,https://issues.webrtc.org/issues/42229041 +https://crbug.com/webrtc/4101,https://issues.webrtc.org/issues/42229042 +https://crbug.com/webrtc/4102,https://issues.webrtc.org/issues/42229043 +https://crbug.com/webrtc/4103,https://issues.webrtc.org/issues/42229044 +https://crbug.com/webrtc/4104,https://issues.webrtc.org/issues/42229045 +https://crbug.com/webrtc/4107,https://issues.webrtc.org/issues/42229046 +https://crbug.com/webrtc/4108,https://issues.webrtc.org/issues/42229047 +https://crbug.com/webrtc/4109,https://issues.webrtc.org/issues/42229048 +https://crbug.com/webrtc/411,https://issues.webrtc.org/issues/42229049 +https://crbug.com/webrtc/4110,https://issues.webrtc.org/issues/42229050 +https://crbug.com/webrtc/4111,https://issues.webrtc.org/issues/42229051 +https://crbug.com/webrtc/4112,https://issues.webrtc.org/issues/42229052 +https://crbug.com/webrtc/4113,https://issues.webrtc.org/issues/42229053 +https://crbug.com/webrtc/4114,https://issues.webrtc.org/issues/42229054 +https://crbug.com/webrtc/4115,https://issues.webrtc.org/issues/42229055 +https://crbug.com/webrtc/4116,https://issues.webrtc.org/issues/42229056 +https://crbug.com/webrtc/4118,https://issues.webrtc.org/issues/42229057 +https://crbug.com/webrtc/4119,https://issues.webrtc.org/issues/42229058 +https://crbug.com/webrtc/412,https://issues.webrtc.org/issues/42229059 +https://crbug.com/webrtc/4120,https://issues.webrtc.org/issues/42229060 +https://crbug.com/webrtc/4121,https://issues.webrtc.org/issues/42229061 +https://crbug.com/webrtc/4122,https://issues.webrtc.org/issues/42229062 +https://crbug.com/webrtc/4123,https://issues.webrtc.org/issues/42229063 +https://crbug.com/webrtc/4124,https://issues.webrtc.org/issues/42229064 +https://crbug.com/webrtc/4125,https://issues.webrtc.org/issues/42229065 +https://crbug.com/webrtc/4126,https://issues.webrtc.org/issues/42229066 +https://crbug.com/webrtc/4127,https://issues.webrtc.org/issues/42229067 +https://crbug.com/webrtc/4128,https://issues.webrtc.org/issues/42229068 +https://crbug.com/webrtc/4129,https://issues.webrtc.org/issues/42229069 +https://crbug.com/webrtc/413,https://issues.webrtc.org/issues/42229070 +https://crbug.com/webrtc/4130,https://issues.webrtc.org/issues/42229071 +https://crbug.com/webrtc/4131,https://issues.webrtc.org/issues/42229072 +https://crbug.com/webrtc/4132,https://issues.webrtc.org/issues/42229073 +https://crbug.com/webrtc/4133,https://issues.webrtc.org/issues/42229074 +https://crbug.com/webrtc/4134,https://issues.webrtc.org/issues/42229075 +https://crbug.com/webrtc/4135,https://issues.webrtc.org/issues/42229076 +https://crbug.com/webrtc/4136,https://issues.webrtc.org/issues/42229077 +https://crbug.com/webrtc/4139,https://issues.webrtc.org/issues/42229078 +https://crbug.com/webrtc/414,https://issues.webrtc.org/issues/42229079 +https://crbug.com/webrtc/4140,https://issues.webrtc.org/issues/42229080 +https://crbug.com/webrtc/4141,https://issues.webrtc.org/issues/42229081 +https://crbug.com/webrtc/4142,https://issues.webrtc.org/issues/42229082 +https://crbug.com/webrtc/4143,https://issues.webrtc.org/issues/42229083 +https://crbug.com/webrtc/4145,https://issues.webrtc.org/issues/42229084 +https://crbug.com/webrtc/4146,https://issues.webrtc.org/issues/42229085 +https://crbug.com/webrtc/4147,https://issues.webrtc.org/issues/42229086 +https://crbug.com/webrtc/4148,https://issues.webrtc.org/issues/42229087 +https://crbug.com/webrtc/4149,https://issues.webrtc.org/issues/42229088 +https://crbug.com/webrtc/415,https://issues.webrtc.org/issues/42229089 +https://crbug.com/webrtc/4150,https://issues.webrtc.org/issues/42229090 +https://crbug.com/webrtc/4151,https://issues.webrtc.org/issues/42229091 +https://crbug.com/webrtc/4152,https://issues.webrtc.org/issues/42229092 +https://crbug.com/webrtc/4153,https://issues.webrtc.org/issues/42229093 +https://crbug.com/webrtc/4154,https://issues.webrtc.org/issues/42229094 +https://crbug.com/webrtc/4155,https://issues.webrtc.org/issues/42229095 +https://crbug.com/webrtc/4156,https://issues.webrtc.org/issues/42229096 +https://crbug.com/webrtc/4157,https://issues.webrtc.org/issues/42229097 +https://crbug.com/webrtc/4158,https://issues.webrtc.org/issues/42229098 +https://crbug.com/webrtc/4159,https://issues.webrtc.org/issues/42229099 +https://crbug.com/webrtc/416,https://issues.webrtc.org/issues/42229100 +https://crbug.com/webrtc/4160,https://issues.webrtc.org/issues/42229101 +https://crbug.com/webrtc/4161,https://issues.webrtc.org/issues/42229102 +https://crbug.com/webrtc/4162,https://issues.webrtc.org/issues/42229103 +https://crbug.com/webrtc/4163,https://issues.webrtc.org/issues/42229104 +https://crbug.com/webrtc/4164,https://issues.webrtc.org/issues/42229105 +https://crbug.com/webrtc/4165,https://issues.webrtc.org/issues/42229106 +https://crbug.com/webrtc/4166,https://issues.webrtc.org/issues/42229107 +https://crbug.com/webrtc/4167,https://issues.webrtc.org/issues/42229108 +https://crbug.com/webrtc/4168,https://issues.webrtc.org/issues/42229109 +https://crbug.com/webrtc/4169,https://issues.webrtc.org/issues/42229110 +https://crbug.com/webrtc/417,https://issues.webrtc.org/issues/42229111 +https://crbug.com/webrtc/4170,https://issues.webrtc.org/issues/42229112 +https://crbug.com/webrtc/4171,https://issues.webrtc.org/issues/42229113 +https://crbug.com/webrtc/4172,https://issues.webrtc.org/issues/42229114 +https://crbug.com/webrtc/4173,https://issues.webrtc.org/issues/42229115 +https://crbug.com/webrtc/4174,https://issues.webrtc.org/issues/42229116 +https://crbug.com/webrtc/4175,https://issues.webrtc.org/issues/42229117 +https://crbug.com/webrtc/4176,https://issues.webrtc.org/issues/42229118 +https://crbug.com/webrtc/4177,https://issues.webrtc.org/issues/42229119 +https://crbug.com/webrtc/4178,https://issues.webrtc.org/issues/42229120 +https://crbug.com/webrtc/4179,https://issues.webrtc.org/issues/42229121 +https://crbug.com/webrtc/418,https://issues.webrtc.org/issues/42229122 +https://crbug.com/webrtc/4180,https://issues.webrtc.org/issues/42229123 +https://crbug.com/webrtc/4181,https://issues.webrtc.org/issues/42229124 +https://crbug.com/webrtc/4182,https://issues.webrtc.org/issues/42229125 +https://crbug.com/webrtc/4184,https://issues.webrtc.org/issues/42229126 +https://crbug.com/webrtc/4185,https://issues.webrtc.org/issues/42229127 +https://crbug.com/webrtc/4186,https://issues.webrtc.org/issues/42229128 +https://crbug.com/webrtc/4187,https://issues.webrtc.org/issues/42229129 +https://crbug.com/webrtc/4188,https://issues.webrtc.org/issues/42229130 +https://crbug.com/webrtc/4189,https://issues.webrtc.org/issues/42229131 +https://crbug.com/webrtc/419,https://issues.webrtc.org/issues/42229132 +https://crbug.com/webrtc/4190,https://issues.webrtc.org/issues/42229133 +https://crbug.com/webrtc/4191,https://issues.webrtc.org/issues/42229134 +https://crbug.com/webrtc/4192,https://issues.webrtc.org/issues/42229135 +https://crbug.com/webrtc/4193,https://issues.webrtc.org/issues/42229136 +https://crbug.com/webrtc/4194,https://issues.webrtc.org/issues/42229137 +https://crbug.com/webrtc/4195,https://issues.webrtc.org/issues/42229138 +https://crbug.com/webrtc/4196,https://issues.webrtc.org/issues/42229139 +https://crbug.com/webrtc/4197,https://issues.webrtc.org/issues/42229140 +https://crbug.com/webrtc/4198,https://issues.webrtc.org/issues/42229141 +https://crbug.com/webrtc/4199,https://issues.webrtc.org/issues/42229142 +https://crbug.com/webrtc/42,https://issues.webrtc.org/issues/42229143 +https://crbug.com/webrtc/420,https://issues.webrtc.org/issues/42229144 +https://crbug.com/webrtc/4200,https://issues.webrtc.org/issues/42229145 +https://crbug.com/webrtc/4202,https://issues.webrtc.org/issues/42229146 +https://crbug.com/webrtc/4203,https://issues.webrtc.org/issues/42229147 +https://crbug.com/webrtc/4204,https://issues.webrtc.org/issues/42229148 +https://crbug.com/webrtc/4205,https://issues.webrtc.org/issues/42229149 +https://crbug.com/webrtc/4206,https://issues.webrtc.org/issues/42229150 +https://crbug.com/webrtc/4207,https://issues.webrtc.org/issues/42229151 +https://crbug.com/webrtc/4208,https://issues.webrtc.org/issues/42229152 +https://crbug.com/webrtc/4209,https://issues.webrtc.org/issues/42229153 +https://crbug.com/webrtc/421,https://issues.webrtc.org/issues/42229154 +https://crbug.com/webrtc/4210,https://issues.webrtc.org/issues/42229155 +https://crbug.com/webrtc/4211,https://issues.webrtc.org/issues/42229156 +https://crbug.com/webrtc/4212,https://issues.webrtc.org/issues/42229157 +https://crbug.com/webrtc/4213,https://issues.webrtc.org/issues/42229158 +https://crbug.com/webrtc/4214,https://issues.webrtc.org/issues/42229159 +https://crbug.com/webrtc/4215,https://issues.webrtc.org/issues/42229160 +https://crbug.com/webrtc/4216,https://issues.webrtc.org/issues/42229161 +https://crbug.com/webrtc/4217,https://issues.webrtc.org/issues/42229162 +https://crbug.com/webrtc/4218,https://issues.webrtc.org/issues/42229163 +https://crbug.com/webrtc/4219,https://issues.webrtc.org/issues/42229164 +https://crbug.com/webrtc/422,https://issues.webrtc.org/issues/42229165 +https://crbug.com/webrtc/4220,https://issues.webrtc.org/issues/42229166 +https://crbug.com/webrtc/4221,https://issues.webrtc.org/issues/42229167 +https://crbug.com/webrtc/4222,https://issues.webrtc.org/issues/42229168 +https://crbug.com/webrtc/4223,https://issues.webrtc.org/issues/42229169 +https://crbug.com/webrtc/4224,https://issues.webrtc.org/issues/42229170 +https://crbug.com/webrtc/4225,https://issues.webrtc.org/issues/42229171 +https://crbug.com/webrtc/4226,https://issues.webrtc.org/issues/42229172 +https://crbug.com/webrtc/4227,https://issues.webrtc.org/issues/42229173 +https://crbug.com/webrtc/4228,https://issues.webrtc.org/issues/42229174 +https://crbug.com/webrtc/4229,https://issues.webrtc.org/issues/42229175 +https://crbug.com/webrtc/423,https://issues.webrtc.org/issues/42229176 +https://crbug.com/webrtc/4230,https://issues.webrtc.org/issues/42229177 +https://crbug.com/webrtc/4231,https://issues.webrtc.org/issues/42229178 +https://crbug.com/webrtc/4232,https://issues.webrtc.org/issues/42229179 +https://crbug.com/webrtc/4234,https://issues.webrtc.org/issues/42229180 +https://crbug.com/webrtc/4235,https://issues.webrtc.org/issues/42229181 +https://crbug.com/webrtc/4236,https://issues.webrtc.org/issues/42229182 +https://crbug.com/webrtc/4237,https://issues.webrtc.org/issues/42229183 +https://crbug.com/webrtc/4238,https://issues.webrtc.org/issues/42229184 +https://crbug.com/webrtc/4239,https://issues.webrtc.org/issues/42229185 +https://crbug.com/webrtc/424,https://issues.webrtc.org/issues/42229186 +https://crbug.com/webrtc/4240,https://issues.webrtc.org/issues/42229187 +https://crbug.com/webrtc/4241,https://issues.webrtc.org/issues/42229188 +https://crbug.com/webrtc/4242,https://issues.webrtc.org/issues/42229189 +https://crbug.com/webrtc/4243,https://issues.webrtc.org/issues/42229190 +https://crbug.com/webrtc/4244,https://issues.webrtc.org/issues/42229191 +https://crbug.com/webrtc/4245,https://issues.webrtc.org/issues/42229192 +https://crbug.com/webrtc/4246,https://issues.webrtc.org/issues/42229193 +https://crbug.com/webrtc/4247,https://issues.webrtc.org/issues/42229194 +https://crbug.com/webrtc/4248,https://issues.webrtc.org/issues/42229195 +https://crbug.com/webrtc/4249,https://issues.webrtc.org/issues/42229196 +https://crbug.com/webrtc/425,https://issues.webrtc.org/issues/42229197 +https://crbug.com/webrtc/4250,https://issues.webrtc.org/issues/42229198 +https://crbug.com/webrtc/4251,https://issues.webrtc.org/issues/42229199 +https://crbug.com/webrtc/4252,https://issues.webrtc.org/issues/42229200 +https://crbug.com/webrtc/4253,https://issues.webrtc.org/issues/42229201 +https://crbug.com/webrtc/4254,https://issues.webrtc.org/issues/42229202 +https://crbug.com/webrtc/4255,https://issues.webrtc.org/issues/42229203 +https://crbug.com/webrtc/4257,https://issues.webrtc.org/issues/42229204 +https://crbug.com/webrtc/4258,https://issues.webrtc.org/issues/42229205 +https://crbug.com/webrtc/4259,https://issues.webrtc.org/issues/42229206 +https://crbug.com/webrtc/426,https://issues.webrtc.org/issues/42229207 +https://crbug.com/webrtc/4260,https://issues.webrtc.org/issues/42229208 +https://crbug.com/webrtc/4261,https://issues.webrtc.org/issues/42229209 +https://crbug.com/webrtc/4262,https://issues.webrtc.org/issues/42229210 +https://crbug.com/webrtc/4263,https://issues.webrtc.org/issues/42229211 +https://crbug.com/webrtc/4264,https://issues.webrtc.org/issues/42229212 +https://crbug.com/webrtc/4265,https://issues.webrtc.org/issues/42229213 +https://crbug.com/webrtc/4266,https://issues.webrtc.org/issues/42229214 +https://crbug.com/webrtc/4267,https://issues.webrtc.org/issues/42229215 +https://crbug.com/webrtc/4268,https://issues.webrtc.org/issues/42229216 +https://crbug.com/webrtc/4269,https://issues.webrtc.org/issues/42229217 +https://crbug.com/webrtc/427,https://issues.webrtc.org/issues/42229218 +https://crbug.com/webrtc/4270,https://issues.webrtc.org/issues/42229219 +https://crbug.com/webrtc/4271,https://issues.webrtc.org/issues/42229220 +https://crbug.com/webrtc/4272,https://issues.webrtc.org/issues/42229221 +https://crbug.com/webrtc/4273,https://issues.webrtc.org/issues/42229222 +https://crbug.com/webrtc/4274,https://issues.webrtc.org/issues/42229223 +https://crbug.com/webrtc/4275,https://issues.webrtc.org/issues/42229224 +https://crbug.com/webrtc/4276,https://issues.webrtc.org/issues/42229225 +https://crbug.com/webrtc/4277,https://issues.webrtc.org/issues/42229226 +https://crbug.com/webrtc/4278,https://issues.webrtc.org/issues/42229227 +https://crbug.com/webrtc/4279,https://issues.webrtc.org/issues/42229228 +https://crbug.com/webrtc/428,https://issues.webrtc.org/issues/42229229 +https://crbug.com/webrtc/4280,https://issues.webrtc.org/issues/42229230 +https://crbug.com/webrtc/4281,https://issues.webrtc.org/issues/42229231 +https://crbug.com/webrtc/4282,https://issues.webrtc.org/issues/42229232 +https://crbug.com/webrtc/4283,https://issues.webrtc.org/issues/42229233 +https://crbug.com/webrtc/4284,https://issues.webrtc.org/issues/42229234 +https://crbug.com/webrtc/4285,https://issues.webrtc.org/issues/42229235 +https://crbug.com/webrtc/4286,https://issues.webrtc.org/issues/42229236 +https://crbug.com/webrtc/4287,https://issues.webrtc.org/issues/42229237 +https://crbug.com/webrtc/4288,https://issues.webrtc.org/issues/42229238 +https://crbug.com/webrtc/4289,https://issues.webrtc.org/issues/42229239 +https://crbug.com/webrtc/429,https://issues.webrtc.org/issues/42229240 +https://crbug.com/webrtc/4290,https://issues.webrtc.org/issues/42229241 +https://crbug.com/webrtc/4291,https://issues.webrtc.org/issues/42229242 +https://crbug.com/webrtc/4292,https://issues.webrtc.org/issues/42229243 +https://crbug.com/webrtc/4293,https://issues.webrtc.org/issues/42229244 +https://crbug.com/webrtc/4294,https://issues.webrtc.org/issues/42229245 +https://crbug.com/webrtc/4295,https://issues.webrtc.org/issues/42229246 +https://crbug.com/webrtc/4296,https://issues.webrtc.org/issues/42229247 +https://crbug.com/webrtc/4297,https://issues.webrtc.org/issues/42229248 +https://crbug.com/webrtc/4298,https://issues.webrtc.org/issues/42229249 +https://crbug.com/webrtc/4299,https://issues.webrtc.org/issues/42229250 +https://crbug.com/webrtc/43,https://issues.webrtc.org/issues/42229251 +https://crbug.com/webrtc/430,https://issues.webrtc.org/issues/42229252 +https://crbug.com/webrtc/4300,https://issues.webrtc.org/issues/42229253 +https://crbug.com/webrtc/4301,https://issues.webrtc.org/issues/42229254 +https://crbug.com/webrtc/4302,https://issues.webrtc.org/issues/42229255 +https://crbug.com/webrtc/4303,https://issues.webrtc.org/issues/42229256 +https://crbug.com/webrtc/4304,https://issues.webrtc.org/issues/42229257 +https://crbug.com/webrtc/4305,https://issues.webrtc.org/issues/42229258 +https://crbug.com/webrtc/4306,https://issues.webrtc.org/issues/42229259 +https://crbug.com/webrtc/4307,https://issues.webrtc.org/issues/42229260 +https://crbug.com/webrtc/4308,https://issues.webrtc.org/issues/42229261 +https://crbug.com/webrtc/4309,https://issues.webrtc.org/issues/42229262 +https://crbug.com/webrtc/431,https://issues.webrtc.org/issues/42229263 +https://crbug.com/webrtc/4310,https://issues.webrtc.org/issues/42229264 +https://crbug.com/webrtc/4311,https://issues.webrtc.org/issues/42229265 +https://crbug.com/webrtc/4312,https://issues.webrtc.org/issues/42229266 +https://crbug.com/webrtc/4313,https://issues.webrtc.org/issues/42229267 +https://crbug.com/webrtc/4314,https://issues.webrtc.org/issues/42229268 +https://crbug.com/webrtc/4315,https://issues.webrtc.org/issues/42229269 +https://crbug.com/webrtc/4316,https://issues.webrtc.org/issues/42229270 +https://crbug.com/webrtc/4317,https://issues.webrtc.org/issues/42229271 +https://crbug.com/webrtc/4318,https://issues.webrtc.org/issues/42229272 +https://crbug.com/webrtc/4319,https://issues.webrtc.org/issues/42229273 +https://crbug.com/webrtc/432,https://issues.webrtc.org/issues/42229274 +https://crbug.com/webrtc/4321,https://issues.webrtc.org/issues/42229275 +https://crbug.com/webrtc/4322,https://issues.webrtc.org/issues/42229276 +https://crbug.com/webrtc/4323,https://issues.webrtc.org/issues/42229277 +https://crbug.com/webrtc/4324,https://issues.webrtc.org/issues/42229278 +https://crbug.com/webrtc/4325,https://issues.webrtc.org/issues/42229279 +https://crbug.com/webrtc/4326,https://issues.webrtc.org/issues/42229280 +https://crbug.com/webrtc/4327,https://issues.webrtc.org/issues/42229281 +https://crbug.com/webrtc/4328,https://issues.webrtc.org/issues/42229282 +https://crbug.com/webrtc/4329,https://issues.webrtc.org/issues/42229283 +https://crbug.com/webrtc/433,https://issues.webrtc.org/issues/42229284 +https://crbug.com/webrtc/4330,https://issues.webrtc.org/issues/42229285 +https://crbug.com/webrtc/4331,https://issues.webrtc.org/issues/42229286 +https://crbug.com/webrtc/4332,https://issues.webrtc.org/issues/42229287 +https://crbug.com/webrtc/4333,https://issues.webrtc.org/issues/42229288 +https://crbug.com/webrtc/4334,https://issues.webrtc.org/issues/42229289 +https://crbug.com/webrtc/4335,https://issues.webrtc.org/issues/42229290 +https://crbug.com/webrtc/4336,https://issues.webrtc.org/issues/42229291 +https://crbug.com/webrtc/4337,https://issues.webrtc.org/issues/42229292 +https://crbug.com/webrtc/4338,https://issues.webrtc.org/issues/42229293 +https://crbug.com/webrtc/4339,https://issues.webrtc.org/issues/42229294 +https://crbug.com/webrtc/434,https://issues.webrtc.org/issues/42229295 +https://crbug.com/webrtc/4340,https://issues.webrtc.org/issues/42229296 +https://crbug.com/webrtc/4341,https://issues.webrtc.org/issues/42229297 +https://crbug.com/webrtc/4342,https://issues.webrtc.org/issues/42229298 +https://crbug.com/webrtc/4343,https://issues.webrtc.org/issues/42229299 +https://crbug.com/webrtc/4344,https://issues.webrtc.org/issues/42229300 +https://crbug.com/webrtc/4345,https://issues.webrtc.org/issues/42229301 +https://crbug.com/webrtc/4346,https://issues.webrtc.org/issues/42229302 +https://crbug.com/webrtc/4347,https://issues.webrtc.org/issues/42229303 +https://crbug.com/webrtc/4348,https://issues.webrtc.org/issues/42229304 +https://crbug.com/webrtc/4349,https://issues.webrtc.org/issues/42229305 +https://crbug.com/webrtc/435,https://issues.webrtc.org/issues/42229306 +https://crbug.com/webrtc/4350,https://issues.webrtc.org/issues/42229307 +https://crbug.com/webrtc/4351,https://issues.webrtc.org/issues/42229308 +https://crbug.com/webrtc/4352,https://issues.webrtc.org/issues/42229309 +https://crbug.com/webrtc/4353,https://issues.webrtc.org/issues/42229310 +https://crbug.com/webrtc/4354,https://issues.webrtc.org/issues/42229311 +https://crbug.com/webrtc/4355,https://issues.webrtc.org/issues/42229312 +https://crbug.com/webrtc/4356,https://issues.webrtc.org/issues/42229313 +https://crbug.com/webrtc/4357,https://issues.webrtc.org/issues/42229314 +https://crbug.com/webrtc/4358,https://issues.webrtc.org/issues/42229315 +https://crbug.com/webrtc/4359,https://issues.webrtc.org/issues/42229316 +https://crbug.com/webrtc/436,https://issues.webrtc.org/issues/42229317 +https://crbug.com/webrtc/4360,https://issues.webrtc.org/issues/42229318 +https://crbug.com/webrtc/4361,https://issues.webrtc.org/issues/42229319 +https://crbug.com/webrtc/4362,https://issues.webrtc.org/issues/42229320 +https://crbug.com/webrtc/4363,https://issues.webrtc.org/issues/42229321 +https://crbug.com/webrtc/4364,https://issues.webrtc.org/issues/42229322 +https://crbug.com/webrtc/4365,https://issues.webrtc.org/issues/42229323 +https://crbug.com/webrtc/4366,https://issues.webrtc.org/issues/42229324 +https://crbug.com/webrtc/4367,https://issues.webrtc.org/issues/42229325 +https://crbug.com/webrtc/4368,https://issues.webrtc.org/issues/42229326 +https://crbug.com/webrtc/4369,https://issues.webrtc.org/issues/42229327 +https://crbug.com/webrtc/437,https://issues.webrtc.org/issues/42229328 +https://crbug.com/webrtc/4370,https://issues.webrtc.org/issues/42229329 +https://crbug.com/webrtc/4371,https://issues.webrtc.org/issues/42229330 +https://crbug.com/webrtc/4372,https://issues.webrtc.org/issues/42229331 +https://crbug.com/webrtc/4373,https://issues.webrtc.org/issues/42229332 +https://crbug.com/webrtc/4374,https://issues.webrtc.org/issues/42229333 +https://crbug.com/webrtc/4375,https://issues.webrtc.org/issues/42229334 +https://crbug.com/webrtc/4376,https://issues.webrtc.org/issues/42229335 +https://crbug.com/webrtc/4377,https://issues.webrtc.org/issues/42229336 +https://crbug.com/webrtc/4378,https://issues.webrtc.org/issues/42229337 +https://crbug.com/webrtc/4379,https://issues.webrtc.org/issues/42229338 +https://crbug.com/webrtc/438,https://issues.webrtc.org/issues/42229339 +https://crbug.com/webrtc/4380,https://issues.webrtc.org/issues/42229340 +https://crbug.com/webrtc/4381,https://issues.webrtc.org/issues/42229341 +https://crbug.com/webrtc/4382,https://issues.webrtc.org/issues/42229342 +https://crbug.com/webrtc/4383,https://issues.webrtc.org/issues/42229343 +https://crbug.com/webrtc/4384,https://issues.webrtc.org/issues/42229344 +https://crbug.com/webrtc/4385,https://issues.webrtc.org/issues/42229345 +https://crbug.com/webrtc/4386,https://issues.webrtc.org/issues/42229346 +https://crbug.com/webrtc/4387,https://issues.webrtc.org/issues/42229347 +https://crbug.com/webrtc/4388,https://issues.webrtc.org/issues/42229348 +https://crbug.com/webrtc/4389,https://issues.webrtc.org/issues/42229349 +https://crbug.com/webrtc/439,https://issues.webrtc.org/issues/42229350 +https://crbug.com/webrtc/4390,https://issues.webrtc.org/issues/42229351 +https://crbug.com/webrtc/4391,https://issues.webrtc.org/issues/42229352 +https://crbug.com/webrtc/4392,https://issues.webrtc.org/issues/42229353 +https://crbug.com/webrtc/4393,https://issues.webrtc.org/issues/42229354 +https://crbug.com/webrtc/4394,https://issues.webrtc.org/issues/42229355 +https://crbug.com/webrtc/4395,https://issues.webrtc.org/issues/42229356 +https://crbug.com/webrtc/4396,https://issues.webrtc.org/issues/42229357 +https://crbug.com/webrtc/4397,https://issues.webrtc.org/issues/42229358 +https://crbug.com/webrtc/4398,https://issues.webrtc.org/issues/42229359 +https://crbug.com/webrtc/4399,https://issues.webrtc.org/issues/42229360 +https://crbug.com/webrtc/44,https://issues.webrtc.org/issues/42229361 +https://crbug.com/webrtc/440,https://issues.webrtc.org/issues/42229362 +https://crbug.com/webrtc/4400,https://issues.webrtc.org/issues/42229363 +https://crbug.com/webrtc/4401,https://issues.webrtc.org/issues/42229364 +https://crbug.com/webrtc/4402,https://issues.webrtc.org/issues/42229365 +https://crbug.com/webrtc/4403,https://issues.webrtc.org/issues/42229366 +https://crbug.com/webrtc/4404,https://issues.webrtc.org/issues/42229367 +https://crbug.com/webrtc/4405,https://issues.webrtc.org/issues/42229368 +https://crbug.com/webrtc/4406,https://issues.webrtc.org/issues/42229369 +https://crbug.com/webrtc/4407,https://issues.webrtc.org/issues/42229370 +https://crbug.com/webrtc/4408,https://issues.webrtc.org/issues/42229371 +https://crbug.com/webrtc/4409,https://issues.webrtc.org/issues/42229372 +https://crbug.com/webrtc/441,https://issues.webrtc.org/issues/42229373 +https://crbug.com/webrtc/4410,https://issues.webrtc.org/issues/42229374 +https://crbug.com/webrtc/4411,https://issues.webrtc.org/issues/42229375 +https://crbug.com/webrtc/4412,https://issues.webrtc.org/issues/42229376 +https://crbug.com/webrtc/4413,https://issues.webrtc.org/issues/42229377 +https://crbug.com/webrtc/4414,https://issues.webrtc.org/issues/42229378 +https://crbug.com/webrtc/4415,https://issues.webrtc.org/issues/42229379 +https://crbug.com/webrtc/4416,https://issues.webrtc.org/issues/42229380 +https://crbug.com/webrtc/4417,https://issues.webrtc.org/issues/42229381 +https://crbug.com/webrtc/4418,https://issues.webrtc.org/issues/42229382 +https://crbug.com/webrtc/4419,https://issues.webrtc.org/issues/42229383 +https://crbug.com/webrtc/442,https://issues.webrtc.org/issues/42229384 +https://crbug.com/webrtc/4420,https://issues.webrtc.org/issues/42229385 +https://crbug.com/webrtc/4421,https://issues.webrtc.org/issues/42229386 +https://crbug.com/webrtc/4422,https://issues.webrtc.org/issues/42229387 +https://crbug.com/webrtc/4424,https://issues.webrtc.org/issues/42229388 +https://crbug.com/webrtc/4425,https://issues.webrtc.org/issues/42229389 +https://crbug.com/webrtc/4426,https://issues.webrtc.org/issues/42229390 +https://crbug.com/webrtc/4427,https://issues.webrtc.org/issues/42229391 +https://crbug.com/webrtc/4428,https://issues.webrtc.org/issues/42229392 +https://crbug.com/webrtc/4429,https://issues.webrtc.org/issues/42229393 +https://crbug.com/webrtc/443,https://issues.webrtc.org/issues/42229394 +https://crbug.com/webrtc/4430,https://issues.webrtc.org/issues/42229395 +https://crbug.com/webrtc/4431,https://issues.webrtc.org/issues/42229396 +https://crbug.com/webrtc/4432,https://issues.webrtc.org/issues/42229397 +https://crbug.com/webrtc/4433,https://issues.webrtc.org/issues/42229398 +https://crbug.com/webrtc/4434,https://issues.webrtc.org/issues/42229399 +https://crbug.com/webrtc/4435,https://issues.webrtc.org/issues/42229400 +https://crbug.com/webrtc/4436,https://issues.webrtc.org/issues/42229401 +https://crbug.com/webrtc/4437,https://issues.webrtc.org/issues/42229402 +https://crbug.com/webrtc/4438,https://issues.webrtc.org/issues/42229403 +https://crbug.com/webrtc/4439,https://issues.webrtc.org/issues/42229404 +https://crbug.com/webrtc/444,https://issues.webrtc.org/issues/42229405 +https://crbug.com/webrtc/4440,https://issues.webrtc.org/issues/42229406 +https://crbug.com/webrtc/4441,https://issues.webrtc.org/issues/42229407 +https://crbug.com/webrtc/4442,https://issues.webrtc.org/issues/42229408 +https://crbug.com/webrtc/4443,https://issues.webrtc.org/issues/42229409 +https://crbug.com/webrtc/4444,https://issues.webrtc.org/issues/42229410 +https://crbug.com/webrtc/4445,https://issues.webrtc.org/issues/42229411 +https://crbug.com/webrtc/4446,https://issues.webrtc.org/issues/42229412 +https://crbug.com/webrtc/4447,https://issues.webrtc.org/issues/42229413 +https://crbug.com/webrtc/4448,https://issues.webrtc.org/issues/42229414 +https://crbug.com/webrtc/4449,https://issues.webrtc.org/issues/42229415 +https://crbug.com/webrtc/445,https://issues.webrtc.org/issues/42229416 +https://crbug.com/webrtc/4450,https://issues.webrtc.org/issues/42229417 +https://crbug.com/webrtc/4451,https://issues.webrtc.org/issues/42229418 +https://crbug.com/webrtc/4452,https://issues.webrtc.org/issues/42229419 +https://crbug.com/webrtc/4453,https://issues.webrtc.org/issues/42229420 +https://crbug.com/webrtc/4454,https://issues.webrtc.org/issues/42229421 +https://crbug.com/webrtc/4455,https://issues.webrtc.org/issues/42229422 +https://crbug.com/webrtc/4456,https://issues.webrtc.org/issues/42229423 +https://crbug.com/webrtc/4458,https://issues.webrtc.org/issues/42229424 +https://crbug.com/webrtc/4459,https://issues.webrtc.org/issues/42229425 +https://crbug.com/webrtc/446,https://issues.webrtc.org/issues/42229426 +https://crbug.com/webrtc/4460,https://issues.webrtc.org/issues/42229427 +https://crbug.com/webrtc/4461,https://issues.webrtc.org/issues/42229428 +https://crbug.com/webrtc/4462,https://issues.webrtc.org/issues/42229429 +https://crbug.com/webrtc/4463,https://issues.webrtc.org/issues/42229430 +https://crbug.com/webrtc/4464,https://issues.webrtc.org/issues/42229431 +https://crbug.com/webrtc/4465,https://issues.webrtc.org/issues/42229432 +https://crbug.com/webrtc/4466,https://issues.webrtc.org/issues/42229433 +https://crbug.com/webrtc/4467,https://issues.webrtc.org/issues/42229434 +https://crbug.com/webrtc/4468,https://issues.webrtc.org/issues/42229435 +https://crbug.com/webrtc/4469,https://issues.webrtc.org/issues/42229436 +https://crbug.com/webrtc/447,https://issues.webrtc.org/issues/42229437 +https://crbug.com/webrtc/4470,https://issues.webrtc.org/issues/42229438 +https://crbug.com/webrtc/4471,https://issues.webrtc.org/issues/42229439 +https://crbug.com/webrtc/4472,https://issues.webrtc.org/issues/42229440 +https://crbug.com/webrtc/4473,https://issues.webrtc.org/issues/42229441 +https://crbug.com/webrtc/4474,https://issues.webrtc.org/issues/42229442 +https://crbug.com/webrtc/4475,https://issues.webrtc.org/issues/42229443 +https://crbug.com/webrtc/4476,https://issues.webrtc.org/issues/42229444 +https://crbug.com/webrtc/4477,https://issues.webrtc.org/issues/42229445 +https://crbug.com/webrtc/4478,https://issues.webrtc.org/issues/42229446 +https://crbug.com/webrtc/4479,https://issues.webrtc.org/issues/42229447 +https://crbug.com/webrtc/448,https://issues.webrtc.org/issues/42229448 +https://crbug.com/webrtc/4480,https://issues.webrtc.org/issues/42229449 +https://crbug.com/webrtc/4481,https://issues.webrtc.org/issues/42229450 +https://crbug.com/webrtc/4482,https://issues.webrtc.org/issues/42229451 +https://crbug.com/webrtc/4483,https://issues.webrtc.org/issues/42229452 +https://crbug.com/webrtc/4484,https://issues.webrtc.org/issues/42229453 +https://crbug.com/webrtc/4485,https://issues.webrtc.org/issues/42229454 +https://crbug.com/webrtc/4486,https://issues.webrtc.org/issues/42229455 +https://crbug.com/webrtc/4487,https://issues.webrtc.org/issues/42229456 +https://crbug.com/webrtc/4488,https://issues.webrtc.org/issues/42229457 +https://crbug.com/webrtc/4489,https://issues.webrtc.org/issues/42229458 +https://crbug.com/webrtc/449,https://issues.webrtc.org/issues/42229459 +https://crbug.com/webrtc/4490,https://issues.webrtc.org/issues/42229460 +https://crbug.com/webrtc/4491,https://issues.webrtc.org/issues/42229461 +https://crbug.com/webrtc/4492,https://issues.webrtc.org/issues/42229462 +https://crbug.com/webrtc/4493,https://issues.webrtc.org/issues/42229463 +https://crbug.com/webrtc/4494,https://issues.webrtc.org/issues/42229464 +https://crbug.com/webrtc/4496,https://issues.webrtc.org/issues/42229465 +https://crbug.com/webrtc/4497,https://issues.webrtc.org/issues/42229466 +https://crbug.com/webrtc/4498,https://issues.webrtc.org/issues/42229467 +https://crbug.com/webrtc/4499,https://issues.webrtc.org/issues/42229468 +https://crbug.com/webrtc/45,https://issues.webrtc.org/issues/42229469 +https://crbug.com/webrtc/450,https://issues.webrtc.org/issues/42229470 +https://crbug.com/webrtc/4500,https://issues.webrtc.org/issues/42229471 +https://crbug.com/webrtc/4501,https://issues.webrtc.org/issues/42229472 +https://crbug.com/webrtc/4502,https://issues.webrtc.org/issues/42229473 +https://crbug.com/webrtc/4503,https://issues.webrtc.org/issues/42229474 +https://crbug.com/webrtc/4504,https://issues.webrtc.org/issues/42229475 +https://crbug.com/webrtc/4505,https://issues.webrtc.org/issues/42229476 +https://crbug.com/webrtc/4506,https://issues.webrtc.org/issues/42229477 +https://crbug.com/webrtc/4507,https://issues.webrtc.org/issues/42229478 +https://crbug.com/webrtc/4508,https://issues.webrtc.org/issues/42229479 +https://crbug.com/webrtc/4509,https://issues.webrtc.org/issues/42229480 +https://crbug.com/webrtc/451,https://issues.webrtc.org/issues/42229481 +https://crbug.com/webrtc/4510,https://issues.webrtc.org/issues/42229482 +https://crbug.com/webrtc/4511,https://issues.webrtc.org/issues/42229483 +https://crbug.com/webrtc/4512,https://issues.webrtc.org/issues/42229484 +https://crbug.com/webrtc/4513,https://issues.webrtc.org/issues/42229485 +https://crbug.com/webrtc/4514,https://issues.webrtc.org/issues/42229486 +https://crbug.com/webrtc/4515,https://issues.webrtc.org/issues/42229487 +https://crbug.com/webrtc/4516,https://issues.webrtc.org/issues/42229488 +https://crbug.com/webrtc/4517,https://issues.webrtc.org/issues/42229489 +https://crbug.com/webrtc/4518,https://issues.webrtc.org/issues/42229490 +https://crbug.com/webrtc/4519,https://issues.webrtc.org/issues/42229491 +https://crbug.com/webrtc/452,https://issues.webrtc.org/issues/42229492 +https://crbug.com/webrtc/4520,https://issues.webrtc.org/issues/42229493 +https://crbug.com/webrtc/4522,https://issues.webrtc.org/issues/42229494 +https://crbug.com/webrtc/4523,https://issues.webrtc.org/issues/42229495 +https://crbug.com/webrtc/4524,https://issues.webrtc.org/issues/42229496 +https://crbug.com/webrtc/4525,https://issues.webrtc.org/issues/42229497 +https://crbug.com/webrtc/4526,https://issues.webrtc.org/issues/42229498 +https://crbug.com/webrtc/4527,https://issues.webrtc.org/issues/42229499 +https://crbug.com/webrtc/4528,https://issues.webrtc.org/issues/42229500 +https://crbug.com/webrtc/4529,https://issues.webrtc.org/issues/42229501 +https://crbug.com/webrtc/453,https://issues.webrtc.org/issues/42229502 +https://crbug.com/webrtc/4530,https://issues.webrtc.org/issues/42229503 +https://crbug.com/webrtc/4531,https://issues.webrtc.org/issues/42229504 +https://crbug.com/webrtc/4533,https://issues.webrtc.org/issues/42229505 +https://crbug.com/webrtc/4536,https://issues.webrtc.org/issues/42229506 +https://crbug.com/webrtc/4537,https://issues.webrtc.org/issues/42229507 +https://crbug.com/webrtc/4538,https://issues.webrtc.org/issues/42229508 +https://crbug.com/webrtc/4539,https://issues.webrtc.org/issues/42229509 +https://crbug.com/webrtc/454,https://issues.webrtc.org/issues/42229510 +https://crbug.com/webrtc/4540,https://issues.webrtc.org/issues/42229511 +https://crbug.com/webrtc/4541,https://issues.webrtc.org/issues/42229512 +https://crbug.com/webrtc/4543,https://issues.webrtc.org/issues/42229513 +https://crbug.com/webrtc/4544,https://issues.webrtc.org/issues/42229514 +https://crbug.com/webrtc/4545,https://issues.webrtc.org/issues/42229515 +https://crbug.com/webrtc/4546,https://issues.webrtc.org/issues/42229516 +https://crbug.com/webrtc/4547,https://issues.webrtc.org/issues/42229517 +https://crbug.com/webrtc/4548,https://issues.webrtc.org/issues/42229518 +https://crbug.com/webrtc/4549,https://issues.webrtc.org/issues/42229519 +https://crbug.com/webrtc/455,https://issues.webrtc.org/issues/42229520 +https://crbug.com/webrtc/4550,https://issues.webrtc.org/issues/42229521 +https://crbug.com/webrtc/4551,https://issues.webrtc.org/issues/42229522 +https://crbug.com/webrtc/4552,https://issues.webrtc.org/issues/42229523 +https://crbug.com/webrtc/4553,https://issues.webrtc.org/issues/42229524 +https://crbug.com/webrtc/4554,https://issues.webrtc.org/issues/42229525 +https://crbug.com/webrtc/4555,https://issues.webrtc.org/issues/42229526 +https://crbug.com/webrtc/4556,https://issues.webrtc.org/issues/42229527 +https://crbug.com/webrtc/4557,https://issues.webrtc.org/issues/42229528 +https://crbug.com/webrtc/4558,https://issues.webrtc.org/issues/42229529 +https://crbug.com/webrtc/4559,https://issues.webrtc.org/issues/42229530 +https://crbug.com/webrtc/456,https://issues.webrtc.org/issues/42229531 +https://crbug.com/webrtc/4560,https://issues.webrtc.org/issues/42229532 +https://crbug.com/webrtc/4561,https://issues.webrtc.org/issues/42229533 +https://crbug.com/webrtc/4562,https://issues.webrtc.org/issues/42229534 +https://crbug.com/webrtc/4563,https://issues.webrtc.org/issues/42229535 +https://crbug.com/webrtc/4564,https://issues.webrtc.org/issues/42229536 +https://crbug.com/webrtc/4565,https://issues.webrtc.org/issues/42229537 +https://crbug.com/webrtc/4566,https://issues.webrtc.org/issues/42229538 +https://crbug.com/webrtc/4567,https://issues.webrtc.org/issues/42229539 +https://crbug.com/webrtc/4569,https://issues.webrtc.org/issues/42229540 +https://crbug.com/webrtc/457,https://issues.webrtc.org/issues/42229541 +https://crbug.com/webrtc/4571,https://issues.webrtc.org/issues/42229542 +https://crbug.com/webrtc/4572,https://issues.webrtc.org/issues/42229543 +https://crbug.com/webrtc/4573,https://issues.webrtc.org/issues/42229544 +https://crbug.com/webrtc/4574,https://issues.webrtc.org/issues/42229545 +https://crbug.com/webrtc/4575,https://issues.webrtc.org/issues/42229546 +https://crbug.com/webrtc/4576,https://issues.webrtc.org/issues/42229547 +https://crbug.com/webrtc/4577,https://issues.webrtc.org/issues/42229548 +https://crbug.com/webrtc/4578,https://issues.webrtc.org/issues/42229549 +https://crbug.com/webrtc/4579,https://issues.webrtc.org/issues/42229550 +https://crbug.com/webrtc/458,https://issues.webrtc.org/issues/42229551 +https://crbug.com/webrtc/4580,https://issues.webrtc.org/issues/42229552 +https://crbug.com/webrtc/4581,https://issues.webrtc.org/issues/42229553 +https://crbug.com/webrtc/4582,https://issues.webrtc.org/issues/42229554 +https://crbug.com/webrtc/4583,https://issues.webrtc.org/issues/42229555 +https://crbug.com/webrtc/4584,https://issues.webrtc.org/issues/42229556 +https://crbug.com/webrtc/4585,https://issues.webrtc.org/issues/42229557 +https://crbug.com/webrtc/4586,https://issues.webrtc.org/issues/42229558 +https://crbug.com/webrtc/4587,https://issues.webrtc.org/issues/42229559 +https://crbug.com/webrtc/4588,https://issues.webrtc.org/issues/42229560 +https://crbug.com/webrtc/4589,https://issues.webrtc.org/issues/42229561 +https://crbug.com/webrtc/459,https://issues.webrtc.org/issues/42229562 +https://crbug.com/webrtc/4590,https://issues.webrtc.org/issues/42229563 +https://crbug.com/webrtc/4591,https://issues.webrtc.org/issues/42229564 +https://crbug.com/webrtc/4592,https://issues.webrtc.org/issues/42229565 +https://crbug.com/webrtc/4593,https://issues.webrtc.org/issues/42229566 +https://crbug.com/webrtc/4594,https://issues.webrtc.org/issues/42229567 +https://crbug.com/webrtc/4595,https://issues.webrtc.org/issues/42229568 +https://crbug.com/webrtc/4596,https://issues.webrtc.org/issues/42229569 +https://crbug.com/webrtc/4597,https://issues.webrtc.org/issues/42229570 +https://crbug.com/webrtc/4598,https://issues.webrtc.org/issues/42229571 +https://crbug.com/webrtc/4599,https://issues.webrtc.org/issues/42229572 +https://crbug.com/webrtc/46,https://issues.webrtc.org/issues/42229573 +https://crbug.com/webrtc/460,https://issues.webrtc.org/issues/42229574 +https://crbug.com/webrtc/4600,https://issues.webrtc.org/issues/42229575 +https://crbug.com/webrtc/4602,https://issues.webrtc.org/issues/42229576 +https://crbug.com/webrtc/4603,https://issues.webrtc.org/issues/42229577 +https://crbug.com/webrtc/4604,https://issues.webrtc.org/issues/42229578 +https://crbug.com/webrtc/4605,https://issues.webrtc.org/issues/42229579 +https://crbug.com/webrtc/4606,https://issues.webrtc.org/issues/42229580 +https://crbug.com/webrtc/4607,https://issues.webrtc.org/issues/42229581 +https://crbug.com/webrtc/4608,https://issues.webrtc.org/issues/42229582 +https://crbug.com/webrtc/4609,https://issues.webrtc.org/issues/42229583 +https://crbug.com/webrtc/461,https://issues.webrtc.org/issues/42229584 +https://crbug.com/webrtc/4610,https://issues.webrtc.org/issues/42229585 +https://crbug.com/webrtc/4611,https://issues.webrtc.org/issues/42229586 +https://crbug.com/webrtc/4613,https://issues.webrtc.org/issues/42229587 +https://crbug.com/webrtc/4615,https://issues.webrtc.org/issues/42229588 +https://crbug.com/webrtc/4616,https://issues.webrtc.org/issues/42229589 +https://crbug.com/webrtc/4617,https://issues.webrtc.org/issues/42229590 +https://crbug.com/webrtc/4618,https://issues.webrtc.org/issues/42229591 +https://crbug.com/webrtc/4619,https://issues.webrtc.org/issues/42229592 +https://crbug.com/webrtc/462,https://issues.webrtc.org/issues/42229593 +https://crbug.com/webrtc/4620,https://issues.webrtc.org/issues/42229594 +https://crbug.com/webrtc/4621,https://issues.webrtc.org/issues/42229595 +https://crbug.com/webrtc/4622,https://issues.webrtc.org/issues/42229596 +https://crbug.com/webrtc/4623,https://issues.webrtc.org/issues/42229597 +https://crbug.com/webrtc/4625,https://issues.webrtc.org/issues/42229598 +https://crbug.com/webrtc/4626,https://issues.webrtc.org/issues/42229599 +https://crbug.com/webrtc/4627,https://issues.webrtc.org/issues/42229600 +https://crbug.com/webrtc/4628,https://issues.webrtc.org/issues/42229601 +https://crbug.com/webrtc/4629,https://issues.webrtc.org/issues/42229602 +https://crbug.com/webrtc/463,https://issues.webrtc.org/issues/42229603 +https://crbug.com/webrtc/4630,https://issues.webrtc.org/issues/42229604 +https://crbug.com/webrtc/4631,https://issues.webrtc.org/issues/42229605 +https://crbug.com/webrtc/4632,https://issues.webrtc.org/issues/42229606 +https://crbug.com/webrtc/4633,https://issues.webrtc.org/issues/42229607 +https://crbug.com/webrtc/4634,https://issues.webrtc.org/issues/42229608 +https://crbug.com/webrtc/4635,https://issues.webrtc.org/issues/42229609 +https://crbug.com/webrtc/4636,https://issues.webrtc.org/issues/42229610 +https://crbug.com/webrtc/4637,https://issues.webrtc.org/issues/42229611 +https://crbug.com/webrtc/4638,https://issues.webrtc.org/issues/42229612 +https://crbug.com/webrtc/464,https://issues.webrtc.org/issues/42229613 +https://crbug.com/webrtc/4640,https://issues.webrtc.org/issues/42229614 +https://crbug.com/webrtc/4641,https://issues.webrtc.org/issues/42229615 +https://crbug.com/webrtc/4642,https://issues.webrtc.org/issues/42229616 +https://crbug.com/webrtc/4643,https://issues.webrtc.org/issues/42229617 +https://crbug.com/webrtc/4644,https://issues.webrtc.org/issues/42229618 +https://crbug.com/webrtc/4645,https://issues.webrtc.org/issues/42229619 +https://crbug.com/webrtc/4646,https://issues.webrtc.org/issues/42229620 +https://crbug.com/webrtc/4647,https://issues.webrtc.org/issues/42229621 +https://crbug.com/webrtc/4648,https://issues.webrtc.org/issues/42229622 +https://crbug.com/webrtc/4649,https://issues.webrtc.org/issues/42229623 +https://crbug.com/webrtc/465,https://issues.webrtc.org/issues/42229624 +https://crbug.com/webrtc/4650,https://issues.webrtc.org/issues/42229625 +https://crbug.com/webrtc/4651,https://issues.webrtc.org/issues/42229626 +https://crbug.com/webrtc/4652,https://issues.webrtc.org/issues/42229627 +https://crbug.com/webrtc/4653,https://issues.webrtc.org/issues/42229628 +https://crbug.com/webrtc/4654,https://issues.webrtc.org/issues/42229629 +https://crbug.com/webrtc/4655,https://issues.webrtc.org/issues/42229630 +https://crbug.com/webrtc/4656,https://issues.webrtc.org/issues/42229631 +https://crbug.com/webrtc/4657,https://issues.webrtc.org/issues/42229632 +https://crbug.com/webrtc/4658,https://issues.webrtc.org/issues/42229633 +https://crbug.com/webrtc/4659,https://issues.webrtc.org/issues/42229634 +https://crbug.com/webrtc/466,https://issues.webrtc.org/issues/42229635 +https://crbug.com/webrtc/4660,https://issues.webrtc.org/issues/42229636 +https://crbug.com/webrtc/4661,https://issues.webrtc.org/issues/42229637 +https://crbug.com/webrtc/4662,https://issues.webrtc.org/issues/42229638 +https://crbug.com/webrtc/4663,https://issues.webrtc.org/issues/42229639 +https://crbug.com/webrtc/4664,https://issues.webrtc.org/issues/42229640 +https://crbug.com/webrtc/4665,https://issues.webrtc.org/issues/42229641 +https://crbug.com/webrtc/4666,https://issues.webrtc.org/issues/42229642 +https://crbug.com/webrtc/4667,https://issues.webrtc.org/issues/42229643 +https://crbug.com/webrtc/4668,https://issues.webrtc.org/issues/42229644 +https://crbug.com/webrtc/4669,https://issues.webrtc.org/issues/42229645 +https://crbug.com/webrtc/467,https://issues.webrtc.org/issues/42229646 +https://crbug.com/webrtc/4670,https://issues.webrtc.org/issues/42229647 +https://crbug.com/webrtc/4671,https://issues.webrtc.org/issues/42229648 +https://crbug.com/webrtc/4672,https://issues.webrtc.org/issues/42229649 +https://crbug.com/webrtc/4673,https://issues.webrtc.org/issues/42229650 +https://crbug.com/webrtc/4674,https://issues.webrtc.org/issues/42229651 +https://crbug.com/webrtc/4675,https://issues.webrtc.org/issues/42229652 +https://crbug.com/webrtc/4677,https://issues.webrtc.org/issues/42229653 +https://crbug.com/webrtc/4679,https://issues.webrtc.org/issues/42229654 +https://crbug.com/webrtc/468,https://issues.webrtc.org/issues/42229655 +https://crbug.com/webrtc/4680,https://issues.webrtc.org/issues/42229656 +https://crbug.com/webrtc/4681,https://issues.webrtc.org/issues/42229657 +https://crbug.com/webrtc/4682,https://issues.webrtc.org/issues/42229658 +https://crbug.com/webrtc/4683,https://issues.webrtc.org/issues/42229659 +https://crbug.com/webrtc/4684,https://issues.webrtc.org/issues/42229660 +https://crbug.com/webrtc/4685,https://issues.webrtc.org/issues/42229661 +https://crbug.com/webrtc/4686,https://issues.webrtc.org/issues/42229662 +https://crbug.com/webrtc/4687,https://issues.webrtc.org/issues/42229663 +https://crbug.com/webrtc/4689,https://issues.webrtc.org/issues/42229664 +https://crbug.com/webrtc/469,https://issues.webrtc.org/issues/42229665 +https://crbug.com/webrtc/4690,https://issues.webrtc.org/issues/42229666 +https://crbug.com/webrtc/4691,https://issues.webrtc.org/issues/42229667 +https://crbug.com/webrtc/4692,https://issues.webrtc.org/issues/42229668 +https://crbug.com/webrtc/4693,https://issues.webrtc.org/issues/42229669 +https://crbug.com/webrtc/4694,https://issues.webrtc.org/issues/42229670 +https://crbug.com/webrtc/4695,https://issues.webrtc.org/issues/42229671 +https://crbug.com/webrtc/4696,https://issues.webrtc.org/issues/42229672 +https://crbug.com/webrtc/4697,https://issues.webrtc.org/issues/42229673 +https://crbug.com/webrtc/4698,https://issues.webrtc.org/issues/42229674 +https://crbug.com/webrtc/47,https://issues.webrtc.org/issues/42229675 +https://crbug.com/webrtc/470,https://issues.webrtc.org/issues/42229676 +https://crbug.com/webrtc/4701,https://issues.webrtc.org/issues/42229677 +https://crbug.com/webrtc/4702,https://issues.webrtc.org/issues/42229678 +https://crbug.com/webrtc/4704,https://issues.webrtc.org/issues/42229679 +https://crbug.com/webrtc/4708,https://issues.webrtc.org/issues/42229680 +https://crbug.com/webrtc/4709,https://issues.webrtc.org/issues/42229681 +https://crbug.com/webrtc/471,https://issues.webrtc.org/issues/42229682 +https://crbug.com/webrtc/4710,https://issues.webrtc.org/issues/42229683 +https://crbug.com/webrtc/4711,https://issues.webrtc.org/issues/42229684 +https://crbug.com/webrtc/4712,https://issues.webrtc.org/issues/42229685 +https://crbug.com/webrtc/4713,https://issues.webrtc.org/issues/42229686 +https://crbug.com/webrtc/4714,https://issues.webrtc.org/issues/42229687 +https://crbug.com/webrtc/4715,https://issues.webrtc.org/issues/42229688 +https://crbug.com/webrtc/4716,https://issues.webrtc.org/issues/42229689 +https://crbug.com/webrtc/4717,https://issues.webrtc.org/issues/42229690 +https://crbug.com/webrtc/4718,https://issues.webrtc.org/issues/42229691 +https://crbug.com/webrtc/4719,https://issues.webrtc.org/issues/42229692 +https://crbug.com/webrtc/472,https://issues.webrtc.org/issues/42229693 +https://crbug.com/webrtc/4720,https://issues.webrtc.org/issues/42229694 +https://crbug.com/webrtc/4721,https://issues.webrtc.org/issues/42229695 +https://crbug.com/webrtc/4722,https://issues.webrtc.org/issues/42229696 +https://crbug.com/webrtc/4723,https://issues.webrtc.org/issues/42229697 +https://crbug.com/webrtc/4724,https://issues.webrtc.org/issues/42229698 +https://crbug.com/webrtc/4725,https://issues.webrtc.org/issues/42229699 +https://crbug.com/webrtc/4726,https://issues.webrtc.org/issues/42229700 +https://crbug.com/webrtc/4728,https://issues.webrtc.org/issues/42229701 +https://crbug.com/webrtc/4729,https://issues.webrtc.org/issues/42229702 +https://crbug.com/webrtc/473,https://issues.webrtc.org/issues/42229703 +https://crbug.com/webrtc/4730,https://issues.webrtc.org/issues/42229704 +https://crbug.com/webrtc/4731,https://issues.webrtc.org/issues/42229705 +https://crbug.com/webrtc/4732,https://issues.webrtc.org/issues/42229706 +https://crbug.com/webrtc/4733,https://issues.webrtc.org/issues/42229707 +https://crbug.com/webrtc/4734,https://issues.webrtc.org/issues/42229708 +https://crbug.com/webrtc/4735,https://issues.webrtc.org/issues/42229709 +https://crbug.com/webrtc/4736,https://issues.webrtc.org/issues/42229710 +https://crbug.com/webrtc/4737,https://issues.webrtc.org/issues/42229711 +https://crbug.com/webrtc/4738,https://issues.webrtc.org/issues/42229712 +https://crbug.com/webrtc/4739,https://issues.webrtc.org/issues/42229713 +https://crbug.com/webrtc/474,https://issues.webrtc.org/issues/42229714 +https://crbug.com/webrtc/4740,https://issues.webrtc.org/issues/42229715 +https://crbug.com/webrtc/4741,https://issues.webrtc.org/issues/42229716 +https://crbug.com/webrtc/4742,https://issues.webrtc.org/issues/42229717 +https://crbug.com/webrtc/4743,https://issues.webrtc.org/issues/42229718 +https://crbug.com/webrtc/4744,https://issues.webrtc.org/issues/42229719 +https://crbug.com/webrtc/4746,https://issues.webrtc.org/issues/42229720 +https://crbug.com/webrtc/4747,https://issues.webrtc.org/issues/42229721 +https://crbug.com/webrtc/4748,https://issues.webrtc.org/issues/42229722 +https://crbug.com/webrtc/4749,https://issues.webrtc.org/issues/42229723 +https://crbug.com/webrtc/475,https://issues.webrtc.org/issues/42229724 +https://crbug.com/webrtc/4750,https://issues.webrtc.org/issues/42229725 +https://crbug.com/webrtc/4751,https://issues.webrtc.org/issues/42229726 +https://crbug.com/webrtc/4752,https://issues.webrtc.org/issues/42229727 +https://crbug.com/webrtc/4754,https://issues.webrtc.org/issues/42229728 +https://crbug.com/webrtc/4756,https://issues.webrtc.org/issues/42229729 +https://crbug.com/webrtc/4757,https://issues.webrtc.org/issues/42229730 +https://crbug.com/webrtc/4758,https://issues.webrtc.org/issues/42229731 +https://crbug.com/webrtc/4759,https://issues.webrtc.org/issues/42229732 +https://crbug.com/webrtc/476,https://issues.webrtc.org/issues/42229733 +https://crbug.com/webrtc/4760,https://issues.webrtc.org/issues/42229734 +https://crbug.com/webrtc/4761,https://issues.webrtc.org/issues/42229735 +https://crbug.com/webrtc/4762,https://issues.webrtc.org/issues/42229736 +https://crbug.com/webrtc/4763,https://issues.webrtc.org/issues/42229737 +https://crbug.com/webrtc/4764,https://issues.webrtc.org/issues/42229738 +https://crbug.com/webrtc/4765,https://issues.webrtc.org/issues/42229739 +https://crbug.com/webrtc/4766,https://issues.webrtc.org/issues/42229740 +https://crbug.com/webrtc/4767,https://issues.webrtc.org/issues/42229741 +https://crbug.com/webrtc/4768,https://issues.webrtc.org/issues/42229742 +https://crbug.com/webrtc/4769,https://issues.webrtc.org/issues/42229743 +https://crbug.com/webrtc/477,https://issues.webrtc.org/issues/42229744 +https://crbug.com/webrtc/4770,https://issues.webrtc.org/issues/42229745 +https://crbug.com/webrtc/4771,https://issues.webrtc.org/issues/42229746 +https://crbug.com/webrtc/4772,https://issues.webrtc.org/issues/42229747 +https://crbug.com/webrtc/4773,https://issues.webrtc.org/issues/42229748 +https://crbug.com/webrtc/4774,https://issues.webrtc.org/issues/42229749 +https://crbug.com/webrtc/4775,https://issues.webrtc.org/issues/42229750 +https://crbug.com/webrtc/4776,https://issues.webrtc.org/issues/42229751 +https://crbug.com/webrtc/4777,https://issues.webrtc.org/issues/42229752 +https://crbug.com/webrtc/4778,https://issues.webrtc.org/issues/42229753 +https://crbug.com/webrtc/4779,https://issues.webrtc.org/issues/42229754 +https://crbug.com/webrtc/478,https://issues.webrtc.org/issues/42229755 +https://crbug.com/webrtc/4780,https://issues.webrtc.org/issues/42229756 +https://crbug.com/webrtc/4781,https://issues.webrtc.org/issues/42229757 +https://crbug.com/webrtc/4782,https://issues.webrtc.org/issues/42229758 +https://crbug.com/webrtc/4783,https://issues.webrtc.org/issues/42229759 +https://crbug.com/webrtc/4784,https://issues.webrtc.org/issues/42229760 +https://crbug.com/webrtc/4785,https://issues.webrtc.org/issues/42229761 +https://crbug.com/webrtc/4786,https://issues.webrtc.org/issues/42229762 +https://crbug.com/webrtc/4787,https://issues.webrtc.org/issues/42229763 +https://crbug.com/webrtc/4788,https://issues.webrtc.org/issues/42229764 +https://crbug.com/webrtc/4789,https://issues.webrtc.org/issues/42229765 +https://crbug.com/webrtc/479,https://issues.webrtc.org/issues/42229766 +https://crbug.com/webrtc/4790,https://issues.webrtc.org/issues/42229767 +https://crbug.com/webrtc/4791,https://issues.webrtc.org/issues/42229768 +https://crbug.com/webrtc/4792,https://issues.webrtc.org/issues/42229769 +https://crbug.com/webrtc/4793,https://issues.webrtc.org/issues/42229770 +https://crbug.com/webrtc/4794,https://issues.webrtc.org/issues/42229771 +https://crbug.com/webrtc/4795,https://issues.webrtc.org/issues/42229772 +https://crbug.com/webrtc/4796,https://issues.webrtc.org/issues/42229773 +https://crbug.com/webrtc/4797,https://issues.webrtc.org/issues/42229774 +https://crbug.com/webrtc/4798,https://issues.webrtc.org/issues/42229775 +https://crbug.com/webrtc/48,https://issues.webrtc.org/issues/42229776 +https://crbug.com/webrtc/480,https://issues.webrtc.org/issues/42229777 +https://crbug.com/webrtc/4800,https://issues.webrtc.org/issues/42229778 +https://crbug.com/webrtc/4801,https://issues.webrtc.org/issues/42229779 +https://crbug.com/webrtc/4802,https://issues.webrtc.org/issues/42229780 +https://crbug.com/webrtc/4803,https://issues.webrtc.org/issues/42229781 +https://crbug.com/webrtc/4804,https://issues.webrtc.org/issues/42229782 +https://crbug.com/webrtc/4805,https://issues.webrtc.org/issues/42229783 +https://crbug.com/webrtc/4806,https://issues.webrtc.org/issues/42229784 +https://crbug.com/webrtc/4808,https://issues.webrtc.org/issues/42229785 +https://crbug.com/webrtc/4809,https://issues.webrtc.org/issues/42229786 +https://crbug.com/webrtc/481,https://issues.webrtc.org/issues/42229787 +https://crbug.com/webrtc/4811,https://issues.webrtc.org/issues/42229788 +https://crbug.com/webrtc/4812,https://issues.webrtc.org/issues/42229789 +https://crbug.com/webrtc/4813,https://issues.webrtc.org/issues/42229790 +https://crbug.com/webrtc/4814,https://issues.webrtc.org/issues/42229791 +https://crbug.com/webrtc/4815,https://issues.webrtc.org/issues/42229792 +https://crbug.com/webrtc/4816,https://issues.webrtc.org/issues/42229793 +https://crbug.com/webrtc/4817,https://issues.webrtc.org/issues/42229794 +https://crbug.com/webrtc/4818,https://issues.webrtc.org/issues/42229795 +https://crbug.com/webrtc/482,https://issues.webrtc.org/issues/42229796 +https://crbug.com/webrtc/4820,https://issues.webrtc.org/issues/42229797 +https://crbug.com/webrtc/4821,https://issues.webrtc.org/issues/42229798 +https://crbug.com/webrtc/4822,https://issues.webrtc.org/issues/42229799 +https://crbug.com/webrtc/4823,https://issues.webrtc.org/issues/42229800 +https://crbug.com/webrtc/4826,https://issues.webrtc.org/issues/42229801 +https://crbug.com/webrtc/4827,https://issues.webrtc.org/issues/42229802 +https://crbug.com/webrtc/4828,https://issues.webrtc.org/issues/42229803 +https://crbug.com/webrtc/4829,https://issues.webrtc.org/issues/42229804 +https://crbug.com/webrtc/483,https://issues.webrtc.org/issues/42229805 +https://crbug.com/webrtc/4831,https://issues.webrtc.org/issues/42229806 +https://crbug.com/webrtc/4832,https://issues.webrtc.org/issues/42229807 +https://crbug.com/webrtc/4833,https://issues.webrtc.org/issues/42229808 +https://crbug.com/webrtc/4834,https://issues.webrtc.org/issues/42229809 +https://crbug.com/webrtc/4835,https://issues.webrtc.org/issues/42229810 +https://crbug.com/webrtc/4836,https://issues.webrtc.org/issues/42229811 +https://crbug.com/webrtc/4837,https://issues.webrtc.org/issues/42229812 +https://crbug.com/webrtc/4838,https://issues.webrtc.org/issues/42229813 +https://crbug.com/webrtc/4839,https://issues.webrtc.org/issues/42229814 +https://crbug.com/webrtc/484,https://issues.webrtc.org/issues/42229815 +https://crbug.com/webrtc/4840,https://issues.webrtc.org/issues/42229816 +https://crbug.com/webrtc/4841,https://issues.webrtc.org/issues/42229817 +https://crbug.com/webrtc/4842,https://issues.webrtc.org/issues/42229818 +https://crbug.com/webrtc/4843,https://issues.webrtc.org/issues/42229819 +https://crbug.com/webrtc/4844,https://issues.webrtc.org/issues/42229820 +https://crbug.com/webrtc/4845,https://issues.webrtc.org/issues/42229821 +https://crbug.com/webrtc/4846,https://issues.webrtc.org/issues/42229822 +https://crbug.com/webrtc/4847,https://issues.webrtc.org/issues/42229823 +https://crbug.com/webrtc/4848,https://issues.webrtc.org/issues/42229824 +https://crbug.com/webrtc/4849,https://issues.webrtc.org/issues/42229825 +https://crbug.com/webrtc/485,https://issues.webrtc.org/issues/42229826 +https://crbug.com/webrtc/4850,https://issues.webrtc.org/issues/42229827 +https://crbug.com/webrtc/4851,https://issues.webrtc.org/issues/42229828 +https://crbug.com/webrtc/4852,https://issues.webrtc.org/issues/42229829 +https://crbug.com/webrtc/4853,https://issues.webrtc.org/issues/42229830 +https://crbug.com/webrtc/4854,https://issues.webrtc.org/issues/42229831 +https://crbug.com/webrtc/4855,https://issues.webrtc.org/issues/42229832 +https://crbug.com/webrtc/4856,https://issues.webrtc.org/issues/42229833 +https://crbug.com/webrtc/4857,https://issues.webrtc.org/issues/42229834 +https://crbug.com/webrtc/4858,https://issues.webrtc.org/issues/42229835 +https://crbug.com/webrtc/4859,https://issues.webrtc.org/issues/42229836 +https://crbug.com/webrtc/486,https://issues.webrtc.org/issues/42229837 +https://crbug.com/webrtc/4860,https://issues.webrtc.org/issues/42229838 +https://crbug.com/webrtc/4861,https://issues.webrtc.org/issues/42229839 +https://crbug.com/webrtc/4863,https://issues.webrtc.org/issues/42229840 +https://crbug.com/webrtc/4864,https://issues.webrtc.org/issues/42229841 +https://crbug.com/webrtc/4865,https://issues.webrtc.org/issues/42229842 +https://crbug.com/webrtc/4866,https://issues.webrtc.org/issues/42229843 +https://crbug.com/webrtc/4867,https://issues.webrtc.org/issues/42229844 +https://crbug.com/webrtc/4868,https://issues.webrtc.org/issues/42229845 +https://crbug.com/webrtc/4869,https://issues.webrtc.org/issues/42229846 +https://crbug.com/webrtc/487,https://issues.webrtc.org/issues/42229847 +https://crbug.com/webrtc/4871,https://issues.webrtc.org/issues/42229848 +https://crbug.com/webrtc/4872,https://issues.webrtc.org/issues/42229849 +https://crbug.com/webrtc/4873,https://issues.webrtc.org/issues/42229850 +https://crbug.com/webrtc/4874,https://issues.webrtc.org/issues/42229851 +https://crbug.com/webrtc/4875,https://issues.webrtc.org/issues/42229852 +https://crbug.com/webrtc/4876,https://issues.webrtc.org/issues/42229853 +https://crbug.com/webrtc/4877,https://issues.webrtc.org/issues/42229854 +https://crbug.com/webrtc/4878,https://issues.webrtc.org/issues/42229855 +https://crbug.com/webrtc/4879,https://issues.webrtc.org/issues/42229856 +https://crbug.com/webrtc/488,https://issues.webrtc.org/issues/42229857 +https://crbug.com/webrtc/4880,https://issues.webrtc.org/issues/42229858 +https://crbug.com/webrtc/4881,https://issues.webrtc.org/issues/42229859 +https://crbug.com/webrtc/4882,https://issues.webrtc.org/issues/42229860 +https://crbug.com/webrtc/4884,https://issues.webrtc.org/issues/42229861 +https://crbug.com/webrtc/4885,https://issues.webrtc.org/issues/42229862 +https://crbug.com/webrtc/4886,https://issues.webrtc.org/issues/42229863 +https://crbug.com/webrtc/4887,https://issues.webrtc.org/issues/42229864 +https://crbug.com/webrtc/4888,https://issues.webrtc.org/issues/42229865 +https://crbug.com/webrtc/489,https://issues.webrtc.org/issues/42229866 +https://crbug.com/webrtc/4890,https://issues.webrtc.org/issues/42229867 +https://crbug.com/webrtc/4891,https://issues.webrtc.org/issues/42229868 +https://crbug.com/webrtc/4892,https://issues.webrtc.org/issues/42229869 +https://crbug.com/webrtc/4893,https://issues.webrtc.org/issues/42229870 +https://crbug.com/webrtc/4894,https://issues.webrtc.org/issues/42229871 +https://crbug.com/webrtc/4895,https://issues.webrtc.org/issues/42229872 +https://crbug.com/webrtc/4896,https://issues.webrtc.org/issues/42229873 +https://crbug.com/webrtc/4897,https://issues.webrtc.org/issues/42229874 +https://crbug.com/webrtc/4898,https://issues.webrtc.org/issues/42229875 +https://crbug.com/webrtc/4899,https://issues.webrtc.org/issues/42229876 +https://crbug.com/webrtc/49,https://issues.webrtc.org/issues/42229877 +https://crbug.com/webrtc/490,https://issues.webrtc.org/issues/42229878 +https://crbug.com/webrtc/4900,https://issues.webrtc.org/issues/42229879 +https://crbug.com/webrtc/4901,https://issues.webrtc.org/issues/42229880 +https://crbug.com/webrtc/4902,https://issues.webrtc.org/issues/42229881 +https://crbug.com/webrtc/4903,https://issues.webrtc.org/issues/42229882 +https://crbug.com/webrtc/4904,https://issues.webrtc.org/issues/42229883 +https://crbug.com/webrtc/4905,https://issues.webrtc.org/issues/42229884 +https://crbug.com/webrtc/4907,https://issues.webrtc.org/issues/42229885 +https://crbug.com/webrtc/4908,https://issues.webrtc.org/issues/42229886 +https://crbug.com/webrtc/4909,https://issues.webrtc.org/issues/42229887 +https://crbug.com/webrtc/491,https://issues.webrtc.org/issues/42229888 +https://crbug.com/webrtc/4910,https://issues.webrtc.org/issues/42229889 +https://crbug.com/webrtc/4911,https://issues.webrtc.org/issues/42229890 +https://crbug.com/webrtc/4912,https://issues.webrtc.org/issues/42229891 +https://crbug.com/webrtc/4913,https://issues.webrtc.org/issues/42229892 +https://crbug.com/webrtc/4914,https://issues.webrtc.org/issues/42229893 +https://crbug.com/webrtc/4915,https://issues.webrtc.org/issues/42229894 +https://crbug.com/webrtc/4916,https://issues.webrtc.org/issues/42229895 +https://crbug.com/webrtc/4917,https://issues.webrtc.org/issues/42229896 +https://crbug.com/webrtc/4918,https://issues.webrtc.org/issues/42229897 +https://crbug.com/webrtc/4919,https://issues.webrtc.org/issues/42229898 +https://crbug.com/webrtc/492,https://issues.webrtc.org/issues/42229899 +https://crbug.com/webrtc/4920,https://issues.webrtc.org/issues/42229900 +https://crbug.com/webrtc/4921,https://issues.webrtc.org/issues/42229901 +https://crbug.com/webrtc/4922,https://issues.webrtc.org/issues/42229902 +https://crbug.com/webrtc/4923,https://issues.webrtc.org/issues/42229903 +https://crbug.com/webrtc/4924,https://issues.webrtc.org/issues/42229904 +https://crbug.com/webrtc/4925,https://issues.webrtc.org/issues/42229905 +https://crbug.com/webrtc/4926,https://issues.webrtc.org/issues/42229906 +https://crbug.com/webrtc/4927,https://issues.webrtc.org/issues/42229907 +https://crbug.com/webrtc/4928,https://issues.webrtc.org/issues/42229908 +https://crbug.com/webrtc/4929,https://issues.webrtc.org/issues/42229909 +https://crbug.com/webrtc/493,https://issues.webrtc.org/issues/42229910 +https://crbug.com/webrtc/4930,https://issues.webrtc.org/issues/42229911 +https://crbug.com/webrtc/4931,https://issues.webrtc.org/issues/42229912 +https://crbug.com/webrtc/4932,https://issues.webrtc.org/issues/42229913 +https://crbug.com/webrtc/4933,https://issues.webrtc.org/issues/42229914 +https://crbug.com/webrtc/4934,https://issues.webrtc.org/issues/42229915 +https://crbug.com/webrtc/4936,https://issues.webrtc.org/issues/42229916 +https://crbug.com/webrtc/4937,https://issues.webrtc.org/issues/42229917 +https://crbug.com/webrtc/4938,https://issues.webrtc.org/issues/42229918 +https://crbug.com/webrtc/4939,https://issues.webrtc.org/issues/42229919 +https://crbug.com/webrtc/494,https://issues.webrtc.org/issues/42229920 +https://crbug.com/webrtc/4940,https://issues.webrtc.org/issues/42229921 +https://crbug.com/webrtc/4941,https://issues.webrtc.org/issues/42229922 +https://crbug.com/webrtc/4942,https://issues.webrtc.org/issues/42229923 +https://crbug.com/webrtc/4943,https://issues.webrtc.org/issues/42229924 +https://crbug.com/webrtc/4944,https://issues.webrtc.org/issues/42229925 +https://crbug.com/webrtc/4945,https://issues.webrtc.org/issues/42229926 +https://crbug.com/webrtc/4946,https://issues.webrtc.org/issues/42229927 +https://crbug.com/webrtc/4947,https://issues.webrtc.org/issues/42229928 +https://crbug.com/webrtc/4948,https://issues.webrtc.org/issues/42229929 +https://crbug.com/webrtc/4949,https://issues.webrtc.org/issues/42229930 +https://crbug.com/webrtc/495,https://issues.webrtc.org/issues/42229931 +https://crbug.com/webrtc/4950,https://issues.webrtc.org/issues/42229932 +https://crbug.com/webrtc/4951,https://issues.webrtc.org/issues/42229933 +https://crbug.com/webrtc/4952,https://issues.webrtc.org/issues/42229934 +https://crbug.com/webrtc/4953,https://issues.webrtc.org/issues/42229935 +https://crbug.com/webrtc/4954,https://issues.webrtc.org/issues/42229936 +https://crbug.com/webrtc/4955,https://issues.webrtc.org/issues/42229937 +https://crbug.com/webrtc/4956,https://issues.webrtc.org/issues/42229938 +https://crbug.com/webrtc/4957,https://issues.webrtc.org/issues/42229939 +https://crbug.com/webrtc/4958,https://issues.webrtc.org/issues/42229940 +https://crbug.com/webrtc/4959,https://issues.webrtc.org/issues/42229941 +https://crbug.com/webrtc/4960,https://issues.webrtc.org/issues/42229942 +https://crbug.com/webrtc/4961,https://issues.webrtc.org/issues/42229943 +https://crbug.com/webrtc/4962,https://issues.webrtc.org/issues/42229944 +https://crbug.com/webrtc/4963,https://issues.webrtc.org/issues/42229945 +https://crbug.com/webrtc/4964,https://issues.webrtc.org/issues/42229946 +https://crbug.com/webrtc/4965,https://issues.webrtc.org/issues/42229947 +https://crbug.com/webrtc/4966,https://issues.webrtc.org/issues/42229948 +https://crbug.com/webrtc/4967,https://issues.webrtc.org/issues/42229949 +https://crbug.com/webrtc/4968,https://issues.webrtc.org/issues/42229950 +https://crbug.com/webrtc/4969,https://issues.webrtc.org/issues/42229951 +https://crbug.com/webrtc/497,https://issues.webrtc.org/issues/42229952 +https://crbug.com/webrtc/4970,https://issues.webrtc.org/issues/42229953 +https://crbug.com/webrtc/4972,https://issues.webrtc.org/issues/42229954 +https://crbug.com/webrtc/4973,https://issues.webrtc.org/issues/42229955 +https://crbug.com/webrtc/4974,https://issues.webrtc.org/issues/42229956 +https://crbug.com/webrtc/4975,https://issues.webrtc.org/issues/42229957 +https://crbug.com/webrtc/4976,https://issues.webrtc.org/issues/42229958 +https://crbug.com/webrtc/4977,https://issues.webrtc.org/issues/42229959 +https://crbug.com/webrtc/4978,https://issues.webrtc.org/issues/42229960 +https://crbug.com/webrtc/4979,https://issues.webrtc.org/issues/42229961 +https://crbug.com/webrtc/498,https://issues.webrtc.org/issues/42229962 +https://crbug.com/webrtc/4980,https://issues.webrtc.org/issues/42229963 +https://crbug.com/webrtc/4981,https://issues.webrtc.org/issues/42229964 +https://crbug.com/webrtc/4982,https://issues.webrtc.org/issues/42229965 +https://crbug.com/webrtc/4983,https://issues.webrtc.org/issues/42229966 +https://crbug.com/webrtc/4984,https://issues.webrtc.org/issues/42229967 +https://crbug.com/webrtc/4985,https://issues.webrtc.org/issues/42229968 +https://crbug.com/webrtc/4987,https://issues.webrtc.org/issues/42229969 +https://crbug.com/webrtc/4988,https://issues.webrtc.org/issues/42229970 +https://crbug.com/webrtc/4989,https://issues.webrtc.org/issues/42229971 +https://crbug.com/webrtc/499,https://issues.webrtc.org/issues/42229972 +https://crbug.com/webrtc/4990,https://issues.webrtc.org/issues/42229973 +https://crbug.com/webrtc/4991,https://issues.webrtc.org/issues/42229974 +https://crbug.com/webrtc/4992,https://issues.webrtc.org/issues/42229975 +https://crbug.com/webrtc/4993,https://issues.webrtc.org/issues/42229976 +https://crbug.com/webrtc/4995,https://issues.webrtc.org/issues/42229977 +https://crbug.com/webrtc/4996,https://issues.webrtc.org/issues/42229978 +https://crbug.com/webrtc/4997,https://issues.webrtc.org/issues/42229979 +https://crbug.com/webrtc/4998,https://issues.webrtc.org/issues/42229980 +https://crbug.com/webrtc/4999,https://issues.webrtc.org/issues/42229981 +https://crbug.com/webrtc/5,https://issues.webrtc.org/issues/42229982 +https://crbug.com/webrtc/50,https://issues.webrtc.org/issues/42229983 +https://crbug.com/webrtc/500,https://issues.webrtc.org/issues/42229984 +https://crbug.com/webrtc/5000,https://issues.webrtc.org/issues/42229985 +https://crbug.com/webrtc/5001,https://issues.webrtc.org/issues/42229986 +https://crbug.com/webrtc/5002,https://issues.webrtc.org/issues/42229987 +https://crbug.com/webrtc/5003,https://issues.webrtc.org/issues/42229988 +https://crbug.com/webrtc/5004,https://issues.webrtc.org/issues/42229989 +https://crbug.com/webrtc/5005,https://issues.webrtc.org/issues/42229990 +https://crbug.com/webrtc/5007,https://issues.webrtc.org/issues/42229991 +https://crbug.com/webrtc/5008,https://issues.webrtc.org/issues/42229992 +https://crbug.com/webrtc/5009,https://issues.webrtc.org/issues/42229993 +https://crbug.com/webrtc/501,https://issues.webrtc.org/issues/42229994 +https://crbug.com/webrtc/5010,https://issues.webrtc.org/issues/42229995 +https://crbug.com/webrtc/5011,https://issues.webrtc.org/issues/42229996 +https://crbug.com/webrtc/5012,https://issues.webrtc.org/issues/42229997 +https://crbug.com/webrtc/5013,https://issues.webrtc.org/issues/42229998 +https://crbug.com/webrtc/5014,https://issues.webrtc.org/issues/42229999 +https://crbug.com/webrtc/5015,https://issues.webrtc.org/issues/42230000 +https://crbug.com/webrtc/5016,https://issues.webrtc.org/issues/42230001 +https://crbug.com/webrtc/5017,https://issues.webrtc.org/issues/42230002 +https://crbug.com/webrtc/5018,https://issues.webrtc.org/issues/42230003 +https://crbug.com/webrtc/5019,https://issues.webrtc.org/issues/42230004 +https://crbug.com/webrtc/502,https://issues.webrtc.org/issues/42230005 +https://crbug.com/webrtc/5020,https://issues.webrtc.org/issues/42230006 +https://crbug.com/webrtc/5021,https://issues.webrtc.org/issues/42230007 +https://crbug.com/webrtc/5022,https://issues.webrtc.org/issues/42230008 +https://crbug.com/webrtc/5023,https://issues.webrtc.org/issues/42230009 +https://crbug.com/webrtc/5024,https://issues.webrtc.org/issues/42230010 +https://crbug.com/webrtc/5025,https://issues.webrtc.org/issues/42230011 +https://crbug.com/webrtc/5026,https://issues.webrtc.org/issues/42230012 +https://crbug.com/webrtc/5027,https://issues.webrtc.org/issues/42230013 +https://crbug.com/webrtc/5028,https://issues.webrtc.org/issues/42230014 +https://crbug.com/webrtc/5029,https://issues.webrtc.org/issues/42230015 +https://crbug.com/webrtc/503,https://issues.webrtc.org/issues/42230016 +https://crbug.com/webrtc/5030,https://issues.webrtc.org/issues/42230017 +https://crbug.com/webrtc/5031,https://issues.webrtc.org/issues/42230018 +https://crbug.com/webrtc/5032,https://issues.webrtc.org/issues/42230019 +https://crbug.com/webrtc/5033,https://issues.webrtc.org/issues/42230020 +https://crbug.com/webrtc/5034,https://issues.webrtc.org/issues/42230021 +https://crbug.com/webrtc/5035,https://issues.webrtc.org/issues/42230022 +https://crbug.com/webrtc/5036,https://issues.webrtc.org/issues/42230023 +https://crbug.com/webrtc/5037,https://issues.webrtc.org/issues/42230024 +https://crbug.com/webrtc/5038,https://issues.webrtc.org/issues/42230025 +https://crbug.com/webrtc/5039,https://issues.webrtc.org/issues/42230026 +https://crbug.com/webrtc/504,https://issues.webrtc.org/issues/42230027 +https://crbug.com/webrtc/5040,https://issues.webrtc.org/issues/42230028 +https://crbug.com/webrtc/5041,https://issues.webrtc.org/issues/42230029 +https://crbug.com/webrtc/5042,https://issues.webrtc.org/issues/42230030 +https://crbug.com/webrtc/5043,https://issues.webrtc.org/issues/42230031 +https://crbug.com/webrtc/5044,https://issues.webrtc.org/issues/42230032 +https://crbug.com/webrtc/5045,https://issues.webrtc.org/issues/42230033 +https://crbug.com/webrtc/5046,https://issues.webrtc.org/issues/42230034 +https://crbug.com/webrtc/5047,https://issues.webrtc.org/issues/42230035 +https://crbug.com/webrtc/5048,https://issues.webrtc.org/issues/42230036 +https://crbug.com/webrtc/5049,https://issues.webrtc.org/issues/42230037 +https://crbug.com/webrtc/505,https://issues.webrtc.org/issues/42230038 +https://crbug.com/webrtc/5050,https://issues.webrtc.org/issues/42230039 +https://crbug.com/webrtc/5052,https://issues.webrtc.org/issues/42230040 +https://crbug.com/webrtc/5053,https://issues.webrtc.org/issues/42230041 +https://crbug.com/webrtc/5054,https://issues.webrtc.org/issues/42230042 +https://crbug.com/webrtc/5055,https://issues.webrtc.org/issues/42230043 +https://crbug.com/webrtc/5056,https://issues.webrtc.org/issues/42230044 +https://crbug.com/webrtc/5057,https://issues.webrtc.org/issues/42230045 +https://crbug.com/webrtc/5058,https://issues.webrtc.org/issues/42230046 +https://crbug.com/webrtc/5059,https://issues.webrtc.org/issues/42230047 +https://crbug.com/webrtc/506,https://issues.webrtc.org/issues/42230048 +https://crbug.com/webrtc/5060,https://issues.webrtc.org/issues/42230049 +https://crbug.com/webrtc/5061,https://issues.webrtc.org/issues/42230050 +https://crbug.com/webrtc/5062,https://issues.webrtc.org/issues/42230051 +https://crbug.com/webrtc/5063,https://issues.webrtc.org/issues/42230052 +https://crbug.com/webrtc/5064,https://issues.webrtc.org/issues/42230053 +https://crbug.com/webrtc/5065,https://issues.webrtc.org/issues/42230054 +https://crbug.com/webrtc/5066,https://issues.webrtc.org/issues/42230055 +https://crbug.com/webrtc/5067,https://issues.webrtc.org/issues/42230056 +https://crbug.com/webrtc/5068,https://issues.webrtc.org/issues/42230057 +https://crbug.com/webrtc/5069,https://issues.webrtc.org/issues/42230058 +https://crbug.com/webrtc/507,https://issues.webrtc.org/issues/42230059 +https://crbug.com/webrtc/5070,https://issues.webrtc.org/issues/42230060 +https://crbug.com/webrtc/5071,https://issues.webrtc.org/issues/42230061 +https://crbug.com/webrtc/5072,https://issues.webrtc.org/issues/42230062 +https://crbug.com/webrtc/5073,https://issues.webrtc.org/issues/42230063 +https://crbug.com/webrtc/5074,https://issues.webrtc.org/issues/42230064 +https://crbug.com/webrtc/5075,https://issues.webrtc.org/issues/42230065 +https://crbug.com/webrtc/5076,https://issues.webrtc.org/issues/42230066 +https://crbug.com/webrtc/5077,https://issues.webrtc.org/issues/42230067 +https://crbug.com/webrtc/5078,https://issues.webrtc.org/issues/42230068 +https://crbug.com/webrtc/508,https://issues.webrtc.org/issues/42230069 +https://crbug.com/webrtc/5080,https://issues.webrtc.org/issues/42230070 +https://crbug.com/webrtc/5081,https://issues.webrtc.org/issues/42230071 +https://crbug.com/webrtc/5082,https://issues.webrtc.org/issues/42230072 +https://crbug.com/webrtc/5083,https://issues.webrtc.org/issues/42230073 +https://crbug.com/webrtc/5084,https://issues.webrtc.org/issues/42230074 +https://crbug.com/webrtc/5085,https://issues.webrtc.org/issues/42230075 +https://crbug.com/webrtc/5086,https://issues.webrtc.org/issues/42230076 +https://crbug.com/webrtc/5087,https://issues.webrtc.org/issues/42230077 +https://crbug.com/webrtc/5088,https://issues.webrtc.org/issues/42230078 +https://crbug.com/webrtc/5089,https://issues.webrtc.org/issues/42230079 +https://crbug.com/webrtc/509,https://issues.webrtc.org/issues/42230080 +https://crbug.com/webrtc/5090,https://issues.webrtc.org/issues/42230081 +https://crbug.com/webrtc/5091,https://issues.webrtc.org/issues/42230082 +https://crbug.com/webrtc/5093,https://issues.webrtc.org/issues/42230083 +https://crbug.com/webrtc/5094,https://issues.webrtc.org/issues/42230084 +https://crbug.com/webrtc/5095,https://issues.webrtc.org/issues/42230085 +https://crbug.com/webrtc/5096,https://issues.webrtc.org/issues/42230086 +https://crbug.com/webrtc/5097,https://issues.webrtc.org/issues/42230087 +https://crbug.com/webrtc/5098,https://issues.webrtc.org/issues/42230088 +https://crbug.com/webrtc/5099,https://issues.webrtc.org/issues/42230089 +https://crbug.com/webrtc/51,https://issues.webrtc.org/issues/42230090 +https://crbug.com/webrtc/510,https://issues.webrtc.org/issues/42230091 +https://crbug.com/webrtc/5100,https://issues.webrtc.org/issues/42230092 +https://crbug.com/webrtc/5101,https://issues.webrtc.org/issues/42230093 +https://crbug.com/webrtc/5102,https://issues.webrtc.org/issues/42230094 +https://crbug.com/webrtc/5103,https://issues.webrtc.org/issues/42230095 +https://crbug.com/webrtc/5104,https://issues.webrtc.org/issues/42230096 +https://crbug.com/webrtc/5105,https://issues.webrtc.org/issues/42230097 +https://crbug.com/webrtc/5106,https://issues.webrtc.org/issues/42230098 +https://crbug.com/webrtc/5107,https://issues.webrtc.org/issues/42230099 +https://crbug.com/webrtc/5108,https://issues.webrtc.org/issues/42230100 +https://crbug.com/webrtc/5109,https://issues.webrtc.org/issues/42230101 +https://crbug.com/webrtc/511,https://issues.webrtc.org/issues/42230102 +https://crbug.com/webrtc/5110,https://issues.webrtc.org/issues/42230103 +https://crbug.com/webrtc/5111,https://issues.webrtc.org/issues/42230104 +https://crbug.com/webrtc/5112,https://issues.webrtc.org/issues/42230105 +https://crbug.com/webrtc/5113,https://issues.webrtc.org/issues/42230106 +https://crbug.com/webrtc/5114,https://issues.webrtc.org/issues/42230107 +https://crbug.com/webrtc/5115,https://issues.webrtc.org/issues/42230108 +https://crbug.com/webrtc/5116,https://issues.webrtc.org/issues/42230109 +https://crbug.com/webrtc/5117,https://issues.webrtc.org/issues/42230110 +https://crbug.com/webrtc/5118,https://issues.webrtc.org/issues/42230111 +https://crbug.com/webrtc/5119,https://issues.webrtc.org/issues/42230112 +https://crbug.com/webrtc/512,https://issues.webrtc.org/issues/42230113 +https://crbug.com/webrtc/5120,https://issues.webrtc.org/issues/42230114 +https://crbug.com/webrtc/5122,https://issues.webrtc.org/issues/42230115 +https://crbug.com/webrtc/5123,https://issues.webrtc.org/issues/42230116 +https://crbug.com/webrtc/5125,https://issues.webrtc.org/issues/42230117 +https://crbug.com/webrtc/5126,https://issues.webrtc.org/issues/42230118 +https://crbug.com/webrtc/5127,https://issues.webrtc.org/issues/42230119 +https://crbug.com/webrtc/5128,https://issues.webrtc.org/issues/42230120 +https://crbug.com/webrtc/5129,https://issues.webrtc.org/issues/42230121 +https://crbug.com/webrtc/513,https://issues.webrtc.org/issues/42230122 +https://crbug.com/webrtc/5130,https://issues.webrtc.org/issues/42230123 +https://crbug.com/webrtc/5131,https://issues.webrtc.org/issues/42230124 +https://crbug.com/webrtc/5132,https://issues.webrtc.org/issues/42230125 +https://crbug.com/webrtc/5133,https://issues.webrtc.org/issues/42230126 +https://crbug.com/webrtc/5134,https://issues.webrtc.org/issues/42230127 +https://crbug.com/webrtc/5135,https://issues.webrtc.org/issues/42230128 +https://crbug.com/webrtc/5136,https://issues.webrtc.org/issues/42230129 +https://crbug.com/webrtc/5137,https://issues.webrtc.org/issues/42230130 +https://crbug.com/webrtc/5138,https://issues.webrtc.org/issues/42230131 +https://crbug.com/webrtc/5139,https://issues.webrtc.org/issues/42230132 +https://crbug.com/webrtc/514,https://issues.webrtc.org/issues/42230133 +https://crbug.com/webrtc/5140,https://issues.webrtc.org/issues/42230134 +https://crbug.com/webrtc/5141,https://issues.webrtc.org/issues/42230135 +https://crbug.com/webrtc/5142,https://issues.webrtc.org/issues/42230136 +https://crbug.com/webrtc/5143,https://issues.webrtc.org/issues/42230137 +https://crbug.com/webrtc/5144,https://issues.webrtc.org/issues/42230138 +https://crbug.com/webrtc/5145,https://issues.webrtc.org/issues/42230139 +https://crbug.com/webrtc/5146,https://issues.webrtc.org/issues/42230140 +https://crbug.com/webrtc/5147,https://issues.webrtc.org/issues/42230141 +https://crbug.com/webrtc/5148,https://issues.webrtc.org/issues/42230142 +https://crbug.com/webrtc/5149,https://issues.webrtc.org/issues/42230143 +https://crbug.com/webrtc/515,https://issues.webrtc.org/issues/42230144 +https://crbug.com/webrtc/5151,https://issues.webrtc.org/issues/42230145 +https://crbug.com/webrtc/5152,https://issues.webrtc.org/issues/42230146 +https://crbug.com/webrtc/5153,https://issues.webrtc.org/issues/42230147 +https://crbug.com/webrtc/5154,https://issues.webrtc.org/issues/42230148 +https://crbug.com/webrtc/5155,https://issues.webrtc.org/issues/42230149 +https://crbug.com/webrtc/5156,https://issues.webrtc.org/issues/42230150 +https://crbug.com/webrtc/5157,https://issues.webrtc.org/issues/42230151 +https://crbug.com/webrtc/5158,https://issues.webrtc.org/issues/42230152 +https://crbug.com/webrtc/5159,https://issues.webrtc.org/issues/42230153 +https://crbug.com/webrtc/516,https://issues.webrtc.org/issues/42230154 +https://crbug.com/webrtc/5160,https://issues.webrtc.org/issues/42230155 +https://crbug.com/webrtc/5161,https://issues.webrtc.org/issues/42230156 +https://crbug.com/webrtc/5162,https://issues.webrtc.org/issues/42230157 +https://crbug.com/webrtc/5163,https://issues.webrtc.org/issues/42230158 +https://crbug.com/webrtc/5164,https://issues.webrtc.org/issues/42230159 +https://crbug.com/webrtc/5165,https://issues.webrtc.org/issues/42230160 +https://crbug.com/webrtc/5166,https://issues.webrtc.org/issues/42230161 +https://crbug.com/webrtc/5167,https://issues.webrtc.org/issues/42230162 +https://crbug.com/webrtc/5168,https://issues.webrtc.org/issues/42230163 +https://crbug.com/webrtc/5169,https://issues.webrtc.org/issues/42230164 +https://crbug.com/webrtc/517,https://issues.webrtc.org/issues/42230165 +https://crbug.com/webrtc/5170,https://issues.webrtc.org/issues/42230166 +https://crbug.com/webrtc/5171,https://issues.webrtc.org/issues/42230167 +https://crbug.com/webrtc/5172,https://issues.webrtc.org/issues/42230168 +https://crbug.com/webrtc/5173,https://issues.webrtc.org/issues/42230169 +https://crbug.com/webrtc/5174,https://issues.webrtc.org/issues/42230170 +https://crbug.com/webrtc/5175,https://issues.webrtc.org/issues/42230171 +https://crbug.com/webrtc/5176,https://issues.webrtc.org/issues/42230172 +https://crbug.com/webrtc/5177,https://issues.webrtc.org/issues/42230173 +https://crbug.com/webrtc/5178,https://issues.webrtc.org/issues/42230174 +https://crbug.com/webrtc/5179,https://issues.webrtc.org/issues/42230175 +https://crbug.com/webrtc/518,https://issues.webrtc.org/issues/42230176 +https://crbug.com/webrtc/5180,https://issues.webrtc.org/issues/42230177 +https://crbug.com/webrtc/5181,https://issues.webrtc.org/issues/42230178 +https://crbug.com/webrtc/5182,https://issues.webrtc.org/issues/42230179 +https://crbug.com/webrtc/5183,https://issues.webrtc.org/issues/42230180 +https://crbug.com/webrtc/5184,https://issues.webrtc.org/issues/42230181 +https://crbug.com/webrtc/5185,https://issues.webrtc.org/issues/42230182 +https://crbug.com/webrtc/5186,https://issues.webrtc.org/issues/42230183 +https://crbug.com/webrtc/5188,https://issues.webrtc.org/issues/42230184 +https://crbug.com/webrtc/5189,https://issues.webrtc.org/issues/42230185 +https://crbug.com/webrtc/519,https://issues.webrtc.org/issues/42230186 +https://crbug.com/webrtc/5190,https://issues.webrtc.org/issues/42230187 +https://crbug.com/webrtc/5191,https://issues.webrtc.org/issues/42230188 +https://crbug.com/webrtc/5192,https://issues.webrtc.org/issues/42230189 +https://crbug.com/webrtc/5193,https://issues.webrtc.org/issues/42230190 +https://crbug.com/webrtc/5194,https://issues.webrtc.org/issues/42230191 +https://crbug.com/webrtc/5195,https://issues.webrtc.org/issues/42230192 +https://crbug.com/webrtc/5196,https://issues.webrtc.org/issues/42230193 +https://crbug.com/webrtc/5197,https://issues.webrtc.org/issues/42230194 +https://crbug.com/webrtc/5198,https://issues.webrtc.org/issues/42230195 +https://crbug.com/webrtc/5199,https://issues.webrtc.org/issues/42230196 +https://crbug.com/webrtc/52,https://issues.webrtc.org/issues/42230197 +https://crbug.com/webrtc/520,https://issues.webrtc.org/issues/42230198 +https://crbug.com/webrtc/5200,https://issues.webrtc.org/issues/42230199 +https://crbug.com/webrtc/5201,https://issues.webrtc.org/issues/42230200 +https://crbug.com/webrtc/5202,https://issues.webrtc.org/issues/42230201 +https://crbug.com/webrtc/5204,https://issues.webrtc.org/issues/42230202 +https://crbug.com/webrtc/5205,https://issues.webrtc.org/issues/42230203 +https://crbug.com/webrtc/5206,https://issues.webrtc.org/issues/42230204 +https://crbug.com/webrtc/5207,https://issues.webrtc.org/issues/42230205 +https://crbug.com/webrtc/5209,https://issues.webrtc.org/issues/42230206 +https://crbug.com/webrtc/521,https://issues.webrtc.org/issues/42230207 +https://crbug.com/webrtc/5210,https://issues.webrtc.org/issues/42230208 +https://crbug.com/webrtc/5211,https://issues.webrtc.org/issues/42230209 +https://crbug.com/webrtc/5212,https://issues.webrtc.org/issues/42230210 +https://crbug.com/webrtc/5213,https://issues.webrtc.org/issues/42230211 +https://crbug.com/webrtc/5214,https://issues.webrtc.org/issues/42230212 +https://crbug.com/webrtc/5215,https://issues.webrtc.org/issues/42230213 +https://crbug.com/webrtc/5216,https://issues.webrtc.org/issues/42230214 +https://crbug.com/webrtc/5217,https://issues.webrtc.org/issues/42230215 +https://crbug.com/webrtc/5218,https://issues.webrtc.org/issues/42230216 +https://crbug.com/webrtc/5219,https://issues.webrtc.org/issues/42230217 +https://crbug.com/webrtc/522,https://issues.webrtc.org/issues/42230218 +https://crbug.com/webrtc/5220,https://issues.webrtc.org/issues/42230219 +https://crbug.com/webrtc/5221,https://issues.webrtc.org/issues/42230220 +https://crbug.com/webrtc/5223,https://issues.webrtc.org/issues/42230221 +https://crbug.com/webrtc/5224,https://issues.webrtc.org/issues/42230222 +https://crbug.com/webrtc/5225,https://issues.webrtc.org/issues/42230223 +https://crbug.com/webrtc/5226,https://issues.webrtc.org/issues/42230224 +https://crbug.com/webrtc/5227,https://issues.webrtc.org/issues/42230225 +https://crbug.com/webrtc/5228,https://issues.webrtc.org/issues/42230226 +https://crbug.com/webrtc/5229,https://issues.webrtc.org/issues/42230227 +https://crbug.com/webrtc/523,https://issues.webrtc.org/issues/42230228 +https://crbug.com/webrtc/5230,https://issues.webrtc.org/issues/42230229 +https://crbug.com/webrtc/5232,https://issues.webrtc.org/issues/42230230 +https://crbug.com/webrtc/5233,https://issues.webrtc.org/issues/42230231 +https://crbug.com/webrtc/5234,https://issues.webrtc.org/issues/42230232 +https://crbug.com/webrtc/5235,https://issues.webrtc.org/issues/42230233 +https://crbug.com/webrtc/5236,https://issues.webrtc.org/issues/42230234 +https://crbug.com/webrtc/5237,https://issues.webrtc.org/issues/42230235 +https://crbug.com/webrtc/5238,https://issues.webrtc.org/issues/42230236 +https://crbug.com/webrtc/5239,https://issues.webrtc.org/issues/42230237 +https://crbug.com/webrtc/524,https://issues.webrtc.org/issues/42230238 +https://crbug.com/webrtc/5240,https://issues.webrtc.org/issues/42230239 +https://crbug.com/webrtc/5241,https://issues.webrtc.org/issues/42230240 +https://crbug.com/webrtc/5242,https://issues.webrtc.org/issues/42230241 +https://crbug.com/webrtc/5243,https://issues.webrtc.org/issues/42230242 +https://crbug.com/webrtc/5244,https://issues.webrtc.org/issues/42230243 +https://crbug.com/webrtc/5245,https://issues.webrtc.org/issues/42230244 +https://crbug.com/webrtc/5246,https://issues.webrtc.org/issues/42230245 +https://crbug.com/webrtc/5247,https://issues.webrtc.org/issues/42230246 +https://crbug.com/webrtc/5248,https://issues.webrtc.org/issues/42230247 +https://crbug.com/webrtc/5249,https://issues.webrtc.org/issues/42230248 +https://crbug.com/webrtc/525,https://issues.webrtc.org/issues/42230249 +https://crbug.com/webrtc/5250,https://issues.webrtc.org/issues/42230250 +https://crbug.com/webrtc/5251,https://issues.webrtc.org/issues/42230251 +https://crbug.com/webrtc/5252,https://issues.webrtc.org/issues/42230252 +https://crbug.com/webrtc/5253,https://issues.webrtc.org/issues/42230253 +https://crbug.com/webrtc/5254,https://issues.webrtc.org/issues/42230254 +https://crbug.com/webrtc/5255,https://issues.webrtc.org/issues/42230255 +https://crbug.com/webrtc/5256,https://issues.webrtc.org/issues/42230256 +https://crbug.com/webrtc/5257,https://issues.webrtc.org/issues/42230257 +https://crbug.com/webrtc/5258,https://issues.webrtc.org/issues/42230258 +https://crbug.com/webrtc/5259,https://issues.webrtc.org/issues/42230259 +https://crbug.com/webrtc/526,https://issues.webrtc.org/issues/42230260 +https://crbug.com/webrtc/5260,https://issues.webrtc.org/issues/42230261 +https://crbug.com/webrtc/5261,https://issues.webrtc.org/issues/42230262 +https://crbug.com/webrtc/5262,https://issues.webrtc.org/issues/42230263 +https://crbug.com/webrtc/5263,https://issues.webrtc.org/issues/42230264 +https://crbug.com/webrtc/5264,https://issues.webrtc.org/issues/42230265 +https://crbug.com/webrtc/5265,https://issues.webrtc.org/issues/42230266 +https://crbug.com/webrtc/5266,https://issues.webrtc.org/issues/42230267 +https://crbug.com/webrtc/5267,https://issues.webrtc.org/issues/42230268 +https://crbug.com/webrtc/5268,https://issues.webrtc.org/issues/42230269 +https://crbug.com/webrtc/5269,https://issues.webrtc.org/issues/42230270 +https://crbug.com/webrtc/527,https://issues.webrtc.org/issues/42230271 +https://crbug.com/webrtc/5270,https://issues.webrtc.org/issues/42230272 +https://crbug.com/webrtc/5271,https://issues.webrtc.org/issues/42230273 +https://crbug.com/webrtc/5272,https://issues.webrtc.org/issues/42230274 +https://crbug.com/webrtc/5273,https://issues.webrtc.org/issues/42230275 +https://crbug.com/webrtc/5274,https://issues.webrtc.org/issues/42230276 +https://crbug.com/webrtc/5275,https://issues.webrtc.org/issues/42230277 +https://crbug.com/webrtc/5276,https://issues.webrtc.org/issues/42230278 +https://crbug.com/webrtc/5277,https://issues.webrtc.org/issues/42230279 +https://crbug.com/webrtc/5278,https://issues.webrtc.org/issues/42230280 +https://crbug.com/webrtc/5279,https://issues.webrtc.org/issues/42230281 +https://crbug.com/webrtc/528,https://issues.webrtc.org/issues/42230282 +https://crbug.com/webrtc/5280,https://issues.webrtc.org/issues/42230283 +https://crbug.com/webrtc/5281,https://issues.webrtc.org/issues/42230284 +https://crbug.com/webrtc/5282,https://issues.webrtc.org/issues/42230285 +https://crbug.com/webrtc/5283,https://issues.webrtc.org/issues/42230286 +https://crbug.com/webrtc/5284,https://issues.webrtc.org/issues/42230287 +https://crbug.com/webrtc/5285,https://issues.webrtc.org/issues/42230288 +https://crbug.com/webrtc/5286,https://issues.webrtc.org/issues/42230289 +https://crbug.com/webrtc/5287,https://issues.webrtc.org/issues/42230290 +https://crbug.com/webrtc/5288,https://issues.webrtc.org/issues/42230291 +https://crbug.com/webrtc/5289,https://issues.webrtc.org/issues/42230292 +https://crbug.com/webrtc/5290,https://issues.webrtc.org/issues/42230293 +https://crbug.com/webrtc/5291,https://issues.webrtc.org/issues/42230294 +https://crbug.com/webrtc/5292,https://issues.webrtc.org/issues/42230295 +https://crbug.com/webrtc/5293,https://issues.webrtc.org/issues/42230296 +https://crbug.com/webrtc/5294,https://issues.webrtc.org/issues/42230297 +https://crbug.com/webrtc/5295,https://issues.webrtc.org/issues/42230298 +https://crbug.com/webrtc/5296,https://issues.webrtc.org/issues/42230299 +https://crbug.com/webrtc/5297,https://issues.webrtc.org/issues/42230300 +https://crbug.com/webrtc/5298,https://issues.webrtc.org/issues/42230301 +https://crbug.com/webrtc/5299,https://issues.webrtc.org/issues/42230302 +https://crbug.com/webrtc/53,https://issues.webrtc.org/issues/42230303 +https://crbug.com/webrtc/530,https://issues.webrtc.org/issues/42230304 +https://crbug.com/webrtc/5300,https://issues.webrtc.org/issues/42230305 +https://crbug.com/webrtc/5301,https://issues.webrtc.org/issues/42230306 +https://crbug.com/webrtc/5302,https://issues.webrtc.org/issues/42230307 +https://crbug.com/webrtc/5303,https://issues.webrtc.org/issues/42230308 +https://crbug.com/webrtc/5304,https://issues.webrtc.org/issues/42230309 +https://crbug.com/webrtc/5306,https://issues.webrtc.org/issues/42230310 +https://crbug.com/webrtc/5307,https://issues.webrtc.org/issues/42230311 +https://crbug.com/webrtc/5308,https://issues.webrtc.org/issues/42230312 +https://crbug.com/webrtc/5309,https://issues.webrtc.org/issues/42230313 +https://crbug.com/webrtc/531,https://issues.webrtc.org/issues/42230314 +https://crbug.com/webrtc/5310,https://issues.webrtc.org/issues/42230315 +https://crbug.com/webrtc/5311,https://issues.webrtc.org/issues/42230316 +https://crbug.com/webrtc/5312,https://issues.webrtc.org/issues/42230317 +https://crbug.com/webrtc/5313,https://issues.webrtc.org/issues/42230318 +https://crbug.com/webrtc/5314,https://issues.webrtc.org/issues/42230319 +https://crbug.com/webrtc/5315,https://issues.webrtc.org/issues/42230320 +https://crbug.com/webrtc/5316,https://issues.webrtc.org/issues/42230321 +https://crbug.com/webrtc/5317,https://issues.webrtc.org/issues/42230322 +https://crbug.com/webrtc/5318,https://issues.webrtc.org/issues/42230323 +https://crbug.com/webrtc/5319,https://issues.webrtc.org/issues/42230324 +https://crbug.com/webrtc/532,https://issues.webrtc.org/issues/42230325 +https://crbug.com/webrtc/5320,https://issues.webrtc.org/issues/42230326 +https://crbug.com/webrtc/5321,https://issues.webrtc.org/issues/42230327 +https://crbug.com/webrtc/5322,https://issues.webrtc.org/issues/42230328 +https://crbug.com/webrtc/5323,https://issues.webrtc.org/issues/42230329 +https://crbug.com/webrtc/5324,https://issues.webrtc.org/issues/42230330 +https://crbug.com/webrtc/5325,https://issues.webrtc.org/issues/42230331 +https://crbug.com/webrtc/5326,https://issues.webrtc.org/issues/42230332 +https://crbug.com/webrtc/5327,https://issues.webrtc.org/issues/42230333 +https://crbug.com/webrtc/5328,https://issues.webrtc.org/issues/42230334 +https://crbug.com/webrtc/5329,https://issues.webrtc.org/issues/42230335 +https://crbug.com/webrtc/533,https://issues.webrtc.org/issues/42230337 +https://crbug.com/webrtc/5330,https://issues.webrtc.org/issues/42230338 +https://crbug.com/webrtc/5331,https://issues.webrtc.org/issues/42230339 +https://crbug.com/webrtc/5332,https://issues.webrtc.org/issues/42230340 +https://crbug.com/webrtc/5333,https://issues.webrtc.org/issues/42230341 +https://crbug.com/webrtc/5334,https://issues.webrtc.org/issues/42230342 +https://crbug.com/webrtc/5335,https://issues.webrtc.org/issues/42230343 +https://crbug.com/webrtc/5336,https://issues.webrtc.org/issues/42230344 +https://crbug.com/webrtc/5337,https://issues.webrtc.org/issues/42230345 +https://crbug.com/webrtc/5338,https://issues.webrtc.org/issues/42230346 +https://crbug.com/webrtc/5339,https://issues.webrtc.org/issues/42230347 +https://crbug.com/webrtc/534,https://issues.webrtc.org/issues/42230348 +https://crbug.com/webrtc/5340,https://issues.webrtc.org/issues/42230349 +https://crbug.com/webrtc/5341,https://issues.webrtc.org/issues/42230350 +https://crbug.com/webrtc/5342,https://issues.webrtc.org/issues/42230351 +https://crbug.com/webrtc/5343,https://issues.webrtc.org/issues/42230352 +https://crbug.com/webrtc/5344,https://issues.webrtc.org/issues/42230353 +https://crbug.com/webrtc/5345,https://issues.webrtc.org/issues/42230354 +https://crbug.com/webrtc/5346,https://issues.webrtc.org/issues/42230355 +https://crbug.com/webrtc/5347,https://issues.webrtc.org/issues/42230356 +https://crbug.com/webrtc/5348,https://issues.webrtc.org/issues/42230357 +https://crbug.com/webrtc/5349,https://issues.webrtc.org/issues/42230358 +https://crbug.com/webrtc/535,https://issues.webrtc.org/issues/42230359 +https://crbug.com/webrtc/5350,https://issues.webrtc.org/issues/42230360 +https://crbug.com/webrtc/5351,https://issues.webrtc.org/issues/42230361 +https://crbug.com/webrtc/5352,https://issues.webrtc.org/issues/42230362 +https://crbug.com/webrtc/5353,https://issues.webrtc.org/issues/42230363 +https://crbug.com/webrtc/5354,https://issues.webrtc.org/issues/42230364 +https://crbug.com/webrtc/5355,https://issues.webrtc.org/issues/42230365 +https://crbug.com/webrtc/5356,https://issues.webrtc.org/issues/42230366 +https://crbug.com/webrtc/5357,https://issues.webrtc.org/issues/42230367 +https://crbug.com/webrtc/5358,https://issues.webrtc.org/issues/42230368 +https://crbug.com/webrtc/5359,https://issues.webrtc.org/issues/42230369 +https://crbug.com/webrtc/536,https://issues.webrtc.org/issues/42230370 +https://crbug.com/webrtc/5360,https://issues.webrtc.org/issues/42230371 +https://crbug.com/webrtc/5362,https://issues.webrtc.org/issues/42230372 +https://crbug.com/webrtc/5363,https://issues.webrtc.org/issues/42230373 +https://crbug.com/webrtc/5364,https://issues.webrtc.org/issues/42230374 +https://crbug.com/webrtc/5365,https://issues.webrtc.org/issues/42230375 +https://crbug.com/webrtc/5366,https://issues.webrtc.org/issues/42230376 +https://crbug.com/webrtc/5367,https://issues.webrtc.org/issues/42230377 +https://crbug.com/webrtc/5368,https://issues.webrtc.org/issues/42230378 +https://crbug.com/webrtc/5369,https://issues.webrtc.org/issues/42230379 +https://crbug.com/webrtc/537,https://issues.webrtc.org/issues/42230380 +https://crbug.com/webrtc/5370,https://issues.webrtc.org/issues/42230381 +https://crbug.com/webrtc/5371,https://issues.webrtc.org/issues/42230382 +https://crbug.com/webrtc/5372,https://issues.webrtc.org/issues/42230383 +https://crbug.com/webrtc/5373,https://issues.webrtc.org/issues/42230384 +https://crbug.com/webrtc/5374,https://issues.webrtc.org/issues/42230385 +https://crbug.com/webrtc/5375,https://issues.webrtc.org/issues/42230386 +https://crbug.com/webrtc/5376,https://issues.webrtc.org/issues/42230387 +https://crbug.com/webrtc/5377,https://issues.webrtc.org/issues/42230388 +https://crbug.com/webrtc/5378,https://issues.webrtc.org/issues/42230389 +https://crbug.com/webrtc/5379,https://issues.webrtc.org/issues/42230390 +https://crbug.com/webrtc/538,https://issues.webrtc.org/issues/42230391 +https://crbug.com/webrtc/5380,https://issues.webrtc.org/issues/42230392 +https://crbug.com/webrtc/5381,https://issues.webrtc.org/issues/42230393 +https://crbug.com/webrtc/5382,https://issues.webrtc.org/issues/42230394 +https://crbug.com/webrtc/5383,https://issues.webrtc.org/issues/42230395 +https://crbug.com/webrtc/5384,https://issues.webrtc.org/issues/42230396 +https://crbug.com/webrtc/5385,https://issues.webrtc.org/issues/42230397 +https://crbug.com/webrtc/5386,https://issues.webrtc.org/issues/42230398 +https://crbug.com/webrtc/5387,https://issues.webrtc.org/issues/42230399 +https://crbug.com/webrtc/5388,https://issues.webrtc.org/issues/42230400 +https://crbug.com/webrtc/5389,https://issues.webrtc.org/issues/42230401 +https://crbug.com/webrtc/539,https://issues.webrtc.org/issues/42230402 +https://crbug.com/webrtc/5390,https://issues.webrtc.org/issues/42230403 +https://crbug.com/webrtc/5391,https://issues.webrtc.org/issues/42230404 +https://crbug.com/webrtc/5392,https://issues.webrtc.org/issues/42230405 +https://crbug.com/webrtc/5393,https://issues.webrtc.org/issues/42230406 +https://crbug.com/webrtc/5394,https://issues.webrtc.org/issues/42230407 +https://crbug.com/webrtc/5395,https://issues.webrtc.org/issues/42230408 +https://crbug.com/webrtc/5396,https://issues.webrtc.org/issues/42230409 +https://crbug.com/webrtc/5397,https://issues.webrtc.org/issues/42230410 +https://crbug.com/webrtc/5398,https://issues.webrtc.org/issues/42230411 +https://crbug.com/webrtc/5399,https://issues.webrtc.org/issues/42230412 +https://crbug.com/webrtc/54,https://issues.webrtc.org/issues/42230413 +https://crbug.com/webrtc/540,https://issues.webrtc.org/issues/42230414 +https://crbug.com/webrtc/5400,https://issues.webrtc.org/issues/42230415 +https://crbug.com/webrtc/5401,https://issues.webrtc.org/issues/42230416 +https://crbug.com/webrtc/5402,https://issues.webrtc.org/issues/42230417 +https://crbug.com/webrtc/5403,https://issues.webrtc.org/issues/42230418 +https://crbug.com/webrtc/5404,https://issues.webrtc.org/issues/42230419 +https://crbug.com/webrtc/5405,https://issues.webrtc.org/issues/42230420 +https://crbug.com/webrtc/5406,https://issues.webrtc.org/issues/42230421 +https://crbug.com/webrtc/5407,https://issues.webrtc.org/issues/42230422 +https://crbug.com/webrtc/5408,https://issues.webrtc.org/issues/42230423 +https://crbug.com/webrtc/5409,https://issues.webrtc.org/issues/42230424 +https://crbug.com/webrtc/541,https://issues.webrtc.org/issues/42230425 +https://crbug.com/webrtc/5411,https://issues.webrtc.org/issues/42230426 +https://crbug.com/webrtc/5412,https://issues.webrtc.org/issues/42230427 +https://crbug.com/webrtc/5413,https://issues.webrtc.org/issues/42230428 +https://crbug.com/webrtc/5414,https://issues.webrtc.org/issues/42230429 +https://crbug.com/webrtc/5415,https://issues.webrtc.org/issues/42230430 +https://crbug.com/webrtc/5416,https://issues.webrtc.org/issues/42230431 +https://crbug.com/webrtc/5417,https://issues.webrtc.org/issues/42230432 +https://crbug.com/webrtc/5418,https://issues.webrtc.org/issues/42230433 +https://crbug.com/webrtc/5419,https://issues.webrtc.org/issues/42230434 +https://crbug.com/webrtc/542,https://issues.webrtc.org/issues/42230435 +https://crbug.com/webrtc/5420,https://issues.webrtc.org/issues/42230436 +https://crbug.com/webrtc/5421,https://issues.webrtc.org/issues/42230437 +https://crbug.com/webrtc/5422,https://issues.webrtc.org/issues/42230438 +https://crbug.com/webrtc/5423,https://issues.webrtc.org/issues/42230439 +https://crbug.com/webrtc/5425,https://issues.webrtc.org/issues/42230440 +https://crbug.com/webrtc/5426,https://issues.webrtc.org/issues/42230441 +https://crbug.com/webrtc/5429,https://issues.webrtc.org/issues/42230442 +https://crbug.com/webrtc/543,https://issues.webrtc.org/issues/42230443 +https://crbug.com/webrtc/5430,https://issues.webrtc.org/issues/42230444 +https://crbug.com/webrtc/5431,https://issues.webrtc.org/issues/42230445 +https://crbug.com/webrtc/5432,https://issues.webrtc.org/issues/42230446 +https://crbug.com/webrtc/5433,https://issues.webrtc.org/issues/42230447 +https://crbug.com/webrtc/5434,https://issues.webrtc.org/issues/42230448 +https://crbug.com/webrtc/5435,https://issues.webrtc.org/issues/42230449 +https://crbug.com/webrtc/5436,https://issues.webrtc.org/issues/42230450 +https://crbug.com/webrtc/5437,https://issues.webrtc.org/issues/42230451 +https://crbug.com/webrtc/5438,https://issues.webrtc.org/issues/42230452 +https://crbug.com/webrtc/5439,https://issues.webrtc.org/issues/42230453 +https://crbug.com/webrtc/544,https://issues.webrtc.org/issues/42230454 +https://crbug.com/webrtc/5440,https://issues.webrtc.org/issues/42230455 +https://crbug.com/webrtc/5441,https://issues.webrtc.org/issues/42230456 +https://crbug.com/webrtc/5442,https://issues.webrtc.org/issues/42230457 +https://crbug.com/webrtc/5443,https://issues.webrtc.org/issues/42230458 +https://crbug.com/webrtc/5444,https://issues.webrtc.org/issues/42230459 +https://crbug.com/webrtc/5445,https://issues.webrtc.org/issues/42230460 +https://crbug.com/webrtc/5446,https://issues.webrtc.org/issues/42230461 +https://crbug.com/webrtc/5447,https://issues.webrtc.org/issues/42230462 +https://crbug.com/webrtc/5448,https://issues.webrtc.org/issues/42230463 +https://crbug.com/webrtc/5449,https://issues.webrtc.org/issues/42230465 +https://crbug.com/webrtc/545,https://issues.webrtc.org/issues/42230466 +https://crbug.com/webrtc/5450,https://issues.webrtc.org/issues/42230467 +https://crbug.com/webrtc/5451,https://issues.webrtc.org/issues/42230468 +https://crbug.com/webrtc/5452,https://issues.webrtc.org/issues/42230469 +https://crbug.com/webrtc/5453,https://issues.webrtc.org/issues/42230470 +https://crbug.com/webrtc/5454,https://issues.webrtc.org/issues/42230471 +https://crbug.com/webrtc/5455,https://issues.webrtc.org/issues/42230472 +https://crbug.com/webrtc/5457,https://issues.webrtc.org/issues/42230473 +https://crbug.com/webrtc/5458,https://issues.webrtc.org/issues/42230474 +https://crbug.com/webrtc/5459,https://issues.webrtc.org/issues/42230475 +https://crbug.com/webrtc/546,https://issues.webrtc.org/issues/42230476 +https://crbug.com/webrtc/5460,https://issues.webrtc.org/issues/42230477 +https://crbug.com/webrtc/5461,https://issues.webrtc.org/issues/42230478 +https://crbug.com/webrtc/5462,https://issues.webrtc.org/issues/42230479 +https://crbug.com/webrtc/5463,https://issues.webrtc.org/issues/42230480 +https://crbug.com/webrtc/5464,https://issues.webrtc.org/issues/42230481 +https://crbug.com/webrtc/5465,https://issues.webrtc.org/issues/42230482 +https://crbug.com/webrtc/5466,https://issues.webrtc.org/issues/42230483 +https://crbug.com/webrtc/5467,https://issues.webrtc.org/issues/42230484 +https://crbug.com/webrtc/5468,https://issues.webrtc.org/issues/42230485 +https://crbug.com/webrtc/5469,https://issues.webrtc.org/issues/42230486 +https://crbug.com/webrtc/547,https://issues.webrtc.org/issues/42230487 +https://crbug.com/webrtc/5470,https://issues.webrtc.org/issues/42230488 +https://crbug.com/webrtc/5471,https://issues.webrtc.org/issues/42230489 +https://crbug.com/webrtc/5472,https://issues.webrtc.org/issues/42230490 +https://crbug.com/webrtc/5473,https://issues.webrtc.org/issues/42230491 +https://crbug.com/webrtc/5474,https://issues.webrtc.org/issues/42230492 +https://crbug.com/webrtc/5475,https://issues.webrtc.org/issues/42230493 +https://crbug.com/webrtc/5476,https://issues.webrtc.org/issues/42230494 +https://crbug.com/webrtc/5477,https://issues.webrtc.org/issues/42230495 +https://crbug.com/webrtc/5478,https://issues.webrtc.org/issues/42230496 +https://crbug.com/webrtc/5479,https://issues.webrtc.org/issues/42230497 +https://crbug.com/webrtc/548,https://issues.webrtc.org/issues/42230498 +https://crbug.com/webrtc/5480,https://issues.webrtc.org/issues/42230499 +https://crbug.com/webrtc/5481,https://issues.webrtc.org/issues/42230500 +https://crbug.com/webrtc/5482,https://issues.webrtc.org/issues/42230501 +https://crbug.com/webrtc/5483,https://issues.webrtc.org/issues/42230502 +https://crbug.com/webrtc/5484,https://issues.webrtc.org/issues/42230503 +https://crbug.com/webrtc/5485,https://issues.webrtc.org/issues/42230504 +https://crbug.com/webrtc/5486,https://issues.webrtc.org/issues/42230505 +https://crbug.com/webrtc/5487,https://issues.webrtc.org/issues/42230506 +https://crbug.com/webrtc/5488,https://issues.webrtc.org/issues/42230507 +https://crbug.com/webrtc/5489,https://issues.webrtc.org/issues/42230508 +https://crbug.com/webrtc/549,https://issues.webrtc.org/issues/42230509 +https://crbug.com/webrtc/5490,https://issues.webrtc.org/issues/42230510 +https://crbug.com/webrtc/5491,https://issues.webrtc.org/issues/42230511 +https://crbug.com/webrtc/5492,https://issues.webrtc.org/issues/42230512 +https://crbug.com/webrtc/5493,https://issues.webrtc.org/issues/42230513 +https://crbug.com/webrtc/5494,https://issues.webrtc.org/issues/42230514 +https://crbug.com/webrtc/5495,https://issues.webrtc.org/issues/42230515 +https://crbug.com/webrtc/5496,https://issues.webrtc.org/issues/42230516 +https://crbug.com/webrtc/5497,https://issues.webrtc.org/issues/42230517 +https://crbug.com/webrtc/5498,https://issues.webrtc.org/issues/42230518 +https://crbug.com/webrtc/5499,https://issues.webrtc.org/issues/42230519 +https://crbug.com/webrtc/55,https://issues.webrtc.org/issues/42230520 +https://crbug.com/webrtc/550,https://issues.webrtc.org/issues/42230521 +https://crbug.com/webrtc/5500,https://issues.webrtc.org/issues/42230522 +https://crbug.com/webrtc/5501,https://issues.webrtc.org/issues/42230523 +https://crbug.com/webrtc/5502,https://issues.webrtc.org/issues/42230524 +https://crbug.com/webrtc/5503,https://issues.webrtc.org/issues/42230525 +https://crbug.com/webrtc/5504,https://issues.webrtc.org/issues/42230526 +https://crbug.com/webrtc/5505,https://issues.webrtc.org/issues/42230527 +https://crbug.com/webrtc/5507,https://issues.webrtc.org/issues/42230528 +https://crbug.com/webrtc/5508,https://issues.webrtc.org/issues/42230529 +https://crbug.com/webrtc/5509,https://issues.webrtc.org/issues/42230530 +https://crbug.com/webrtc/551,https://issues.webrtc.org/issues/42230531 +https://crbug.com/webrtc/5510,https://issues.webrtc.org/issues/42230532 +https://crbug.com/webrtc/5511,https://issues.webrtc.org/issues/42230533 +https://crbug.com/webrtc/5512,https://issues.webrtc.org/issues/42230534 +https://crbug.com/webrtc/5513,https://issues.webrtc.org/issues/42230535 +https://crbug.com/webrtc/5514,https://issues.webrtc.org/issues/42230536 +https://crbug.com/webrtc/5515,https://issues.webrtc.org/issues/42230537 +https://crbug.com/webrtc/5516,https://issues.webrtc.org/issues/42230538 +https://crbug.com/webrtc/5517,https://issues.webrtc.org/issues/42230539 +https://crbug.com/webrtc/5518,https://issues.webrtc.org/issues/42230540 +https://crbug.com/webrtc/5519,https://issues.webrtc.org/issues/42230541 +https://crbug.com/webrtc/552,https://issues.webrtc.org/issues/42230542 +https://crbug.com/webrtc/5520,https://issues.webrtc.org/issues/42230543 +https://crbug.com/webrtc/5521,https://issues.webrtc.org/issues/42230544 +https://crbug.com/webrtc/5522,https://issues.webrtc.org/issues/42230545 +https://crbug.com/webrtc/5523,https://issues.webrtc.org/issues/42230546 +https://crbug.com/webrtc/5524,https://issues.webrtc.org/issues/42230547 +https://crbug.com/webrtc/5526,https://issues.webrtc.org/issues/42230548 +https://crbug.com/webrtc/5528,https://issues.webrtc.org/issues/42230549 +https://crbug.com/webrtc/5529,https://issues.webrtc.org/issues/42230550 +https://crbug.com/webrtc/553,https://issues.webrtc.org/issues/42230551 +https://crbug.com/webrtc/5530,https://issues.webrtc.org/issues/42230552 +https://crbug.com/webrtc/5531,https://issues.webrtc.org/issues/42230553 +https://crbug.com/webrtc/5532,https://issues.webrtc.org/issues/42230554 +https://crbug.com/webrtc/5533,https://issues.webrtc.org/issues/42230555 +https://crbug.com/webrtc/5534,https://issues.webrtc.org/issues/42230556 +https://crbug.com/webrtc/5535,https://issues.webrtc.org/issues/42230557 +https://crbug.com/webrtc/5536,https://issues.webrtc.org/issues/42230558 +https://crbug.com/webrtc/5537,https://issues.webrtc.org/issues/42230559 +https://crbug.com/webrtc/5538,https://issues.webrtc.org/issues/42230560 +https://crbug.com/webrtc/5539,https://issues.webrtc.org/issues/42230561 +https://crbug.com/webrtc/554,https://issues.webrtc.org/issues/42230562 +https://crbug.com/webrtc/5540,https://issues.webrtc.org/issues/42230563 +https://crbug.com/webrtc/5541,https://issues.webrtc.org/issues/42230564 +https://crbug.com/webrtc/5542,https://issues.webrtc.org/issues/42230565 +https://crbug.com/webrtc/5543,https://issues.webrtc.org/issues/42230566 +https://crbug.com/webrtc/5544,https://issues.webrtc.org/issues/42230567 +https://crbug.com/webrtc/5545,https://issues.webrtc.org/issues/42230568 +https://crbug.com/webrtc/5546,https://issues.webrtc.org/issues/42230569 +https://crbug.com/webrtc/5547,https://issues.webrtc.org/issues/42230570 +https://crbug.com/webrtc/5548,https://issues.webrtc.org/issues/42230571 +https://crbug.com/webrtc/5549,https://issues.webrtc.org/issues/42230572 +https://crbug.com/webrtc/555,https://issues.webrtc.org/issues/42230573 +https://crbug.com/webrtc/5550,https://issues.webrtc.org/issues/42230574 +https://crbug.com/webrtc/5551,https://issues.webrtc.org/issues/42230575 +https://crbug.com/webrtc/5552,https://issues.webrtc.org/issues/42230576 +https://crbug.com/webrtc/5553,https://issues.webrtc.org/issues/42230577 +https://crbug.com/webrtc/5554,https://issues.webrtc.org/issues/42230578 +https://crbug.com/webrtc/5555,https://issues.webrtc.org/issues/42230579 +https://crbug.com/webrtc/5556,https://issues.webrtc.org/issues/42230580 +https://crbug.com/webrtc/5557,https://issues.webrtc.org/issues/42230581 +https://crbug.com/webrtc/5559,https://issues.webrtc.org/issues/42230582 +https://crbug.com/webrtc/556,https://issues.webrtc.org/issues/42230583 +https://crbug.com/webrtc/5560,https://issues.webrtc.org/issues/42230584 +https://crbug.com/webrtc/5561,https://issues.webrtc.org/issues/42230585 +https://crbug.com/webrtc/5563,https://issues.webrtc.org/issues/42230586 +https://crbug.com/webrtc/5564,https://issues.webrtc.org/issues/42230587 +https://crbug.com/webrtc/5565,https://issues.webrtc.org/issues/42230588 +https://crbug.com/webrtc/5566,https://issues.webrtc.org/issues/42230589 +https://crbug.com/webrtc/5567,https://issues.webrtc.org/issues/42230590 +https://crbug.com/webrtc/5568,https://issues.webrtc.org/issues/42230591 +https://crbug.com/webrtc/5569,https://issues.webrtc.org/issues/42230592 +https://crbug.com/webrtc/557,https://issues.webrtc.org/issues/42230593 +https://crbug.com/webrtc/5570,https://issues.webrtc.org/issues/42230594 +https://crbug.com/webrtc/5571,https://issues.webrtc.org/issues/42230595 +https://crbug.com/webrtc/5572,https://issues.webrtc.org/issues/42230596 +https://crbug.com/webrtc/5573,https://issues.webrtc.org/issues/42230597 +https://crbug.com/webrtc/5574,https://issues.webrtc.org/issues/42230598 +https://crbug.com/webrtc/5575,https://issues.webrtc.org/issues/42230599 +https://crbug.com/webrtc/5576,https://issues.webrtc.org/issues/42230600 +https://crbug.com/webrtc/5577,https://issues.webrtc.org/issues/42230601 +https://crbug.com/webrtc/5578,https://issues.webrtc.org/issues/42230602 +https://crbug.com/webrtc/5579,https://issues.webrtc.org/issues/42230603 +https://crbug.com/webrtc/558,https://issues.webrtc.org/issues/42230604 +https://crbug.com/webrtc/5580,https://issues.webrtc.org/issues/42230605 +https://crbug.com/webrtc/5582,https://issues.webrtc.org/issues/42230606 +https://crbug.com/webrtc/5583,https://issues.webrtc.org/issues/42230607 +https://crbug.com/webrtc/5584,https://issues.webrtc.org/issues/42230608 +https://crbug.com/webrtc/5585,https://issues.webrtc.org/issues/42230609 +https://crbug.com/webrtc/5587,https://issues.webrtc.org/issues/42230610 +https://crbug.com/webrtc/5589,https://issues.webrtc.org/issues/42230611 +https://crbug.com/webrtc/559,https://issues.webrtc.org/issues/42230612 +https://crbug.com/webrtc/5590,https://issues.webrtc.org/issues/42230613 +https://crbug.com/webrtc/5591,https://issues.webrtc.org/issues/42230614 +https://crbug.com/webrtc/5592,https://issues.webrtc.org/issues/42230615 +https://crbug.com/webrtc/5593,https://issues.webrtc.org/issues/42230616 +https://crbug.com/webrtc/5594,https://issues.webrtc.org/issues/42230617 +https://crbug.com/webrtc/5595,https://issues.webrtc.org/issues/42230618 +https://crbug.com/webrtc/5597,https://issues.webrtc.org/issues/42230619 +https://crbug.com/webrtc/5598,https://issues.webrtc.org/issues/42230620 +https://crbug.com/webrtc/5599,https://issues.webrtc.org/issues/42230621 +https://crbug.com/webrtc/56,https://issues.webrtc.org/issues/42230622 +https://crbug.com/webrtc/560,https://issues.webrtc.org/issues/42230623 +https://crbug.com/webrtc/5600,https://issues.webrtc.org/issues/42230624 +https://crbug.com/webrtc/5601,https://issues.webrtc.org/issues/42230625 +https://crbug.com/webrtc/5602,https://issues.webrtc.org/issues/42230626 +https://crbug.com/webrtc/5603,https://issues.webrtc.org/issues/42230627 +https://crbug.com/webrtc/5604,https://issues.webrtc.org/issues/42230628 +https://crbug.com/webrtc/5605,https://issues.webrtc.org/issues/42230629 +https://crbug.com/webrtc/5606,https://issues.webrtc.org/issues/42230630 +https://crbug.com/webrtc/5608,https://issues.webrtc.org/issues/42230631 +https://crbug.com/webrtc/5609,https://issues.webrtc.org/issues/42230632 +https://crbug.com/webrtc/561,https://issues.webrtc.org/issues/42230633 +https://crbug.com/webrtc/5610,https://issues.webrtc.org/issues/42230634 +https://crbug.com/webrtc/5611,https://issues.webrtc.org/issues/42230635 +https://crbug.com/webrtc/5612,https://issues.webrtc.org/issues/42230636 +https://crbug.com/webrtc/5613,https://issues.webrtc.org/issues/42230637 +https://crbug.com/webrtc/5614,https://issues.webrtc.org/issues/42230638 +https://crbug.com/webrtc/5615,https://issues.webrtc.org/issues/42230639 +https://crbug.com/webrtc/5616,https://issues.webrtc.org/issues/42230640 +https://crbug.com/webrtc/5617,https://issues.webrtc.org/issues/42230641 +https://crbug.com/webrtc/5618,https://issues.webrtc.org/issues/42230642 +https://crbug.com/webrtc/5619,https://issues.webrtc.org/issues/42230643 +https://crbug.com/webrtc/562,https://issues.webrtc.org/issues/42230644 +https://crbug.com/webrtc/5620,https://issues.webrtc.org/issues/42230645 +https://crbug.com/webrtc/5621,https://issues.webrtc.org/issues/42230646 +https://crbug.com/webrtc/5622,https://issues.webrtc.org/issues/42230647 +https://crbug.com/webrtc/5623,https://issues.webrtc.org/issues/42230648 +https://crbug.com/webrtc/5625,https://issues.webrtc.org/issues/42230649 +https://crbug.com/webrtc/5626,https://issues.webrtc.org/issues/42230650 +https://crbug.com/webrtc/5627,https://issues.webrtc.org/issues/42230651 +https://crbug.com/webrtc/5629,https://issues.webrtc.org/issues/42230652 +https://crbug.com/webrtc/563,https://issues.webrtc.org/issues/42230653 +https://crbug.com/webrtc/5630,https://issues.webrtc.org/issues/42230654 +https://crbug.com/webrtc/5631,https://issues.webrtc.org/issues/42230655 +https://crbug.com/webrtc/5632,https://issues.webrtc.org/issues/42230656 +https://crbug.com/webrtc/5633,https://issues.webrtc.org/issues/42230657 +https://crbug.com/webrtc/5634,https://issues.webrtc.org/issues/42230658 +https://crbug.com/webrtc/5635,https://issues.webrtc.org/issues/42230659 +https://crbug.com/webrtc/5636,https://issues.webrtc.org/issues/42230660 +https://crbug.com/webrtc/5637,https://issues.webrtc.org/issues/42230661 +https://crbug.com/webrtc/5638,https://issues.webrtc.org/issues/42230662 +https://crbug.com/webrtc/5639,https://issues.webrtc.org/issues/42230663 +https://crbug.com/webrtc/564,https://issues.webrtc.org/issues/42230664 +https://crbug.com/webrtc/5640,https://issues.webrtc.org/issues/42230665 +https://crbug.com/webrtc/5641,https://issues.webrtc.org/issues/42230666 +https://crbug.com/webrtc/5642,https://issues.webrtc.org/issues/42230667 +https://crbug.com/webrtc/5643,https://issues.webrtc.org/issues/42230668 +https://crbug.com/webrtc/5644,https://issues.webrtc.org/issues/42230669 +https://crbug.com/webrtc/5645,https://issues.webrtc.org/issues/42230670 +https://crbug.com/webrtc/5646,https://issues.webrtc.org/issues/42230671 +https://crbug.com/webrtc/5647,https://issues.webrtc.org/issues/42230672 +https://crbug.com/webrtc/5648,https://issues.webrtc.org/issues/42230673 +https://crbug.com/webrtc/5649,https://issues.webrtc.org/issues/42230674 +https://crbug.com/webrtc/565,https://issues.webrtc.org/issues/42230675 +https://crbug.com/webrtc/5650,https://issues.webrtc.org/issues/42230676 +https://crbug.com/webrtc/5651,https://issues.webrtc.org/issues/42230677 +https://crbug.com/webrtc/5652,https://issues.webrtc.org/issues/42230678 +https://crbug.com/webrtc/5653,https://issues.webrtc.org/issues/42230679 +https://crbug.com/webrtc/5654,https://issues.webrtc.org/issues/42230680 +https://crbug.com/webrtc/5655,https://issues.webrtc.org/issues/42230681 +https://crbug.com/webrtc/5656,https://issues.webrtc.org/issues/42230682 +https://crbug.com/webrtc/5657,https://issues.webrtc.org/issues/42230683 +https://crbug.com/webrtc/5658,https://issues.webrtc.org/issues/42230684 +https://crbug.com/webrtc/5659,https://issues.webrtc.org/issues/42230685 +https://crbug.com/webrtc/566,https://issues.webrtc.org/issues/42230686 +https://crbug.com/webrtc/5660,https://issues.webrtc.org/issues/42230687 +https://crbug.com/webrtc/5661,https://issues.webrtc.org/issues/42230688 +https://crbug.com/webrtc/5662,https://issues.webrtc.org/issues/42230689 +https://crbug.com/webrtc/5663,https://issues.webrtc.org/issues/42230690 +https://crbug.com/webrtc/5664,https://issues.webrtc.org/issues/42230691 +https://crbug.com/webrtc/5665,https://issues.webrtc.org/issues/42230692 +https://crbug.com/webrtc/5666,https://issues.webrtc.org/issues/42230693 +https://crbug.com/webrtc/5667,https://issues.webrtc.org/issues/42230694 +https://crbug.com/webrtc/5668,https://issues.webrtc.org/issues/42230695 +https://crbug.com/webrtc/5669,https://issues.webrtc.org/issues/42230696 +https://crbug.com/webrtc/567,https://issues.webrtc.org/issues/42230697 +https://crbug.com/webrtc/5670,https://issues.webrtc.org/issues/42230698 +https://crbug.com/webrtc/5671,https://issues.webrtc.org/issues/42230699 +https://crbug.com/webrtc/5672,https://issues.webrtc.org/issues/42230700 +https://crbug.com/webrtc/5673,https://issues.webrtc.org/issues/42230701 +https://crbug.com/webrtc/5674,https://issues.webrtc.org/issues/42230702 +https://crbug.com/webrtc/5675,https://issues.webrtc.org/issues/42230703 +https://crbug.com/webrtc/5676,https://issues.webrtc.org/issues/42230704 +https://crbug.com/webrtc/5677,https://issues.webrtc.org/issues/42230705 +https://crbug.com/webrtc/5678,https://issues.webrtc.org/issues/42230706 +https://crbug.com/webrtc/5679,https://issues.webrtc.org/issues/42230707 +https://crbug.com/webrtc/568,https://issues.webrtc.org/issues/42230708 +https://crbug.com/webrtc/5680,https://issues.webrtc.org/issues/42230709 +https://crbug.com/webrtc/5681,https://issues.webrtc.org/issues/42230710 +https://crbug.com/webrtc/5682,https://issues.webrtc.org/issues/42230711 +https://crbug.com/webrtc/5683,https://issues.webrtc.org/issues/42230712 +https://crbug.com/webrtc/5684,https://issues.webrtc.org/issues/42230713 +https://crbug.com/webrtc/5685,https://issues.webrtc.org/issues/42230714 +https://crbug.com/webrtc/5686,https://issues.webrtc.org/issues/42230715 +https://crbug.com/webrtc/5687,https://issues.webrtc.org/issues/42230716 +https://crbug.com/webrtc/5688,https://issues.webrtc.org/issues/42230717 +https://crbug.com/webrtc/5689,https://issues.webrtc.org/issues/42230718 +https://crbug.com/webrtc/569,https://issues.webrtc.org/issues/42230719 +https://crbug.com/webrtc/5690,https://issues.webrtc.org/issues/42230720 +https://crbug.com/webrtc/5691,https://issues.webrtc.org/issues/42230721 +https://crbug.com/webrtc/5692,https://issues.webrtc.org/issues/42230722 +https://crbug.com/webrtc/5693,https://issues.webrtc.org/issues/42230723 +https://crbug.com/webrtc/5694,https://issues.webrtc.org/issues/42230724 +https://crbug.com/webrtc/5695,https://issues.webrtc.org/issues/42230725 +https://crbug.com/webrtc/5697,https://issues.webrtc.org/issues/42230726 +https://crbug.com/webrtc/5698,https://issues.webrtc.org/issues/42230727 +https://crbug.com/webrtc/5699,https://issues.webrtc.org/issues/42230728 +https://crbug.com/webrtc/57,https://issues.webrtc.org/issues/42230729 +https://crbug.com/webrtc/570,https://issues.webrtc.org/issues/42230730 +https://crbug.com/webrtc/5700,https://issues.webrtc.org/issues/42230731 +https://crbug.com/webrtc/5701,https://issues.webrtc.org/issues/42230732 +https://crbug.com/webrtc/5702,https://issues.webrtc.org/issues/42230733 +https://crbug.com/webrtc/5703,https://issues.webrtc.org/issues/42230734 +https://crbug.com/webrtc/5704,https://issues.webrtc.org/issues/42230735 +https://crbug.com/webrtc/5705,https://issues.webrtc.org/issues/42230736 +https://crbug.com/webrtc/5706,https://issues.webrtc.org/issues/42230737 +https://crbug.com/webrtc/5707,https://issues.webrtc.org/issues/42230738 +https://crbug.com/webrtc/5709,https://issues.webrtc.org/issues/42230739 +https://crbug.com/webrtc/571,https://issues.webrtc.org/issues/42230740 +https://crbug.com/webrtc/5710,https://issues.webrtc.org/issues/42230741 +https://crbug.com/webrtc/5711,https://issues.webrtc.org/issues/42230742 +https://crbug.com/webrtc/5712,https://issues.webrtc.org/issues/42230743 +https://crbug.com/webrtc/5714,https://issues.webrtc.org/issues/42230744 +https://crbug.com/webrtc/5715,https://issues.webrtc.org/issues/42230745 +https://crbug.com/webrtc/5716,https://issues.webrtc.org/issues/42230746 +https://crbug.com/webrtc/5718,https://issues.webrtc.org/issues/42230747 +https://crbug.com/webrtc/5719,https://issues.webrtc.org/issues/42230748 +https://crbug.com/webrtc/572,https://issues.webrtc.org/issues/42230749 +https://crbug.com/webrtc/5720,https://issues.webrtc.org/issues/42230750 +https://crbug.com/webrtc/5721,https://issues.webrtc.org/issues/42230751 +https://crbug.com/webrtc/5722,https://issues.webrtc.org/issues/42230752 +https://crbug.com/webrtc/5723,https://issues.webrtc.org/issues/42230753 +https://crbug.com/webrtc/5724,https://issues.webrtc.org/issues/42230754 +https://crbug.com/webrtc/5725,https://issues.webrtc.org/issues/42230755 +https://crbug.com/webrtc/5726,https://issues.webrtc.org/issues/42230756 +https://crbug.com/webrtc/5727,https://issues.webrtc.org/issues/42230757 +https://crbug.com/webrtc/5728,https://issues.webrtc.org/issues/42230758 +https://crbug.com/webrtc/5729,https://issues.webrtc.org/issues/42230759 +https://crbug.com/webrtc/573,https://issues.webrtc.org/issues/42230760 +https://crbug.com/webrtc/5733,https://issues.webrtc.org/issues/42230761 +https://crbug.com/webrtc/5734,https://issues.webrtc.org/issues/42230762 +https://crbug.com/webrtc/5735,https://issues.webrtc.org/issues/42230763 +https://crbug.com/webrtc/5736,https://issues.webrtc.org/issues/42230764 +https://crbug.com/webrtc/5737,https://issues.webrtc.org/issues/42230765 +https://crbug.com/webrtc/5738,https://issues.webrtc.org/issues/42230766 +https://crbug.com/webrtc/5739,https://issues.webrtc.org/issues/42230767 +https://crbug.com/webrtc/574,https://issues.webrtc.org/issues/42230768 +https://crbug.com/webrtc/5740,https://issues.webrtc.org/issues/42230769 +https://crbug.com/webrtc/5742,https://issues.webrtc.org/issues/42230770 +https://crbug.com/webrtc/5743,https://issues.webrtc.org/issues/42230771 +https://crbug.com/webrtc/5744,https://issues.webrtc.org/issues/42230772 +https://crbug.com/webrtc/5745,https://issues.webrtc.org/issues/42230773 +https://crbug.com/webrtc/5746,https://issues.webrtc.org/issues/42230774 +https://crbug.com/webrtc/5747,https://issues.webrtc.org/issues/42230775 +https://crbug.com/webrtc/5748,https://issues.webrtc.org/issues/42230776 +https://crbug.com/webrtc/5749,https://issues.webrtc.org/issues/42230777 +https://crbug.com/webrtc/575,https://issues.webrtc.org/issues/42230778 +https://crbug.com/webrtc/5750,https://issues.webrtc.org/issues/42230779 +https://crbug.com/webrtc/5751,https://issues.webrtc.org/issues/42230780 +https://crbug.com/webrtc/5752,https://issues.webrtc.org/issues/42230781 +https://crbug.com/webrtc/5753,https://issues.webrtc.org/issues/42230782 +https://crbug.com/webrtc/5754,https://issues.webrtc.org/issues/42230783 +https://crbug.com/webrtc/5755,https://issues.webrtc.org/issues/42230784 +https://crbug.com/webrtc/5756,https://issues.webrtc.org/issues/42230785 +https://crbug.com/webrtc/5758,https://issues.webrtc.org/issues/42230786 +https://crbug.com/webrtc/5759,https://issues.webrtc.org/issues/42230787 +https://crbug.com/webrtc/576,https://issues.webrtc.org/issues/42230788 +https://crbug.com/webrtc/5760,https://issues.webrtc.org/issues/42230789 +https://crbug.com/webrtc/5761,https://issues.webrtc.org/issues/42230790 +https://crbug.com/webrtc/5762,https://issues.webrtc.org/issues/42230791 +https://crbug.com/webrtc/5764,https://issues.webrtc.org/issues/42230792 +https://crbug.com/webrtc/5765,https://issues.webrtc.org/issues/42230793 +https://crbug.com/webrtc/5766,https://issues.webrtc.org/issues/42230794 +https://crbug.com/webrtc/5767,https://issues.webrtc.org/issues/42230795 +https://crbug.com/webrtc/5768,https://issues.webrtc.org/issues/42230796 +https://crbug.com/webrtc/5769,https://issues.webrtc.org/issues/42230797 +https://crbug.com/webrtc/577,https://issues.webrtc.org/issues/42230798 +https://crbug.com/webrtc/5770,https://issues.webrtc.org/issues/42230799 +https://crbug.com/webrtc/5771,https://issues.webrtc.org/issues/42230800 +https://crbug.com/webrtc/5773,https://issues.webrtc.org/issues/42230801 +https://crbug.com/webrtc/5775,https://issues.webrtc.org/issues/42230802 +https://crbug.com/webrtc/5776,https://issues.webrtc.org/issues/42230803 +https://crbug.com/webrtc/5779,https://issues.webrtc.org/issues/42230804 +https://crbug.com/webrtc/578,https://issues.webrtc.org/issues/42230805 +https://crbug.com/webrtc/5780,https://issues.webrtc.org/issues/42230806 +https://crbug.com/webrtc/5781,https://issues.webrtc.org/issues/42230807 +https://crbug.com/webrtc/5782,https://issues.webrtc.org/issues/42230808 +https://crbug.com/webrtc/5783,https://issues.webrtc.org/issues/42230809 +https://crbug.com/webrtc/5784,https://issues.webrtc.org/issues/42230810 +https://crbug.com/webrtc/5786,https://issues.webrtc.org/issues/42230811 +https://crbug.com/webrtc/5787,https://issues.webrtc.org/issues/42230812 +https://crbug.com/webrtc/5789,https://issues.webrtc.org/issues/42230813 +https://crbug.com/webrtc/579,https://issues.webrtc.org/issues/42230814 +https://crbug.com/webrtc/5790,https://issues.webrtc.org/issues/42230815 +https://crbug.com/webrtc/5791,https://issues.webrtc.org/issues/42230816 +https://crbug.com/webrtc/5792,https://issues.webrtc.org/issues/42230817 +https://crbug.com/webrtc/5796,https://issues.webrtc.org/issues/42230818 +https://crbug.com/webrtc/5797,https://issues.webrtc.org/issues/42230819 +https://crbug.com/webrtc/5798,https://issues.webrtc.org/issues/42230820 +https://crbug.com/webrtc/5799,https://issues.webrtc.org/issues/42230821 +https://crbug.com/webrtc/58,https://issues.webrtc.org/issues/42230822 +https://crbug.com/webrtc/580,https://issues.webrtc.org/issues/42230823 +https://crbug.com/webrtc/5800,https://issues.webrtc.org/issues/42230824 +https://crbug.com/webrtc/5801,https://issues.webrtc.org/issues/42230825 +https://crbug.com/webrtc/5802,https://issues.webrtc.org/issues/42230826 +https://crbug.com/webrtc/5803,https://issues.webrtc.org/issues/42230827 +https://crbug.com/webrtc/5804,https://issues.webrtc.org/issues/42230828 +https://crbug.com/webrtc/5805,https://issues.webrtc.org/issues/42230829 +https://crbug.com/webrtc/5806,https://issues.webrtc.org/issues/42230830 +https://crbug.com/webrtc/5808,https://issues.webrtc.org/issues/42230831 +https://crbug.com/webrtc/5809,https://issues.webrtc.org/issues/42230832 +https://crbug.com/webrtc/581,https://issues.webrtc.org/issues/42230833 +https://crbug.com/webrtc/5810,https://issues.webrtc.org/issues/42230834 +https://crbug.com/webrtc/5811,https://issues.webrtc.org/issues/42230835 +https://crbug.com/webrtc/5812,https://issues.webrtc.org/issues/42230836 +https://crbug.com/webrtc/5813,https://issues.webrtc.org/issues/42230837 +https://crbug.com/webrtc/5814,https://issues.webrtc.org/issues/42230838 +https://crbug.com/webrtc/5815,https://issues.webrtc.org/issues/42230839 +https://crbug.com/webrtc/5816,https://issues.webrtc.org/issues/42230840 +https://crbug.com/webrtc/5817,https://issues.webrtc.org/issues/42230841 +https://crbug.com/webrtc/5818,https://issues.webrtc.org/issues/42230842 +https://crbug.com/webrtc/5819,https://issues.webrtc.org/issues/42230843 +https://crbug.com/webrtc/582,https://issues.webrtc.org/issues/42230844 +https://crbug.com/webrtc/5821,https://issues.webrtc.org/issues/42230845 +https://crbug.com/webrtc/5822,https://issues.webrtc.org/issues/42230846 +https://crbug.com/webrtc/5823,https://issues.webrtc.org/issues/42230847 +https://crbug.com/webrtc/5824,https://issues.webrtc.org/issues/42230848 +https://crbug.com/webrtc/5825,https://issues.webrtc.org/issues/42230849 +https://crbug.com/webrtc/5826,https://issues.webrtc.org/issues/42230850 +https://crbug.com/webrtc/5827,https://issues.webrtc.org/issues/42230851 +https://crbug.com/webrtc/5828,https://issues.webrtc.org/issues/42230852 +https://crbug.com/webrtc/5829,https://issues.webrtc.org/issues/42230853 +https://crbug.com/webrtc/583,https://issues.webrtc.org/issues/42230854 +https://crbug.com/webrtc/5830,https://issues.webrtc.org/issues/42230855 +https://crbug.com/webrtc/5831,https://issues.webrtc.org/issues/42230856 +https://crbug.com/webrtc/5832,https://issues.webrtc.org/issues/42230857 +https://crbug.com/webrtc/5833,https://issues.webrtc.org/issues/42230858 +https://crbug.com/webrtc/5834,https://issues.webrtc.org/issues/42230859 +https://crbug.com/webrtc/5835,https://issues.webrtc.org/issues/42230860 +https://crbug.com/webrtc/5837,https://issues.webrtc.org/issues/42230861 +https://crbug.com/webrtc/5838,https://issues.webrtc.org/issues/42230862 +https://crbug.com/webrtc/5839,https://issues.webrtc.org/issues/42230863 +https://crbug.com/webrtc/584,https://issues.webrtc.org/issues/42230864 +https://crbug.com/webrtc/5840,https://issues.webrtc.org/issues/42230865 +https://crbug.com/webrtc/5841,https://issues.webrtc.org/issues/42230866 +https://crbug.com/webrtc/5842,https://issues.webrtc.org/issues/42230867 +https://crbug.com/webrtc/5843,https://issues.webrtc.org/issues/42230868 +https://crbug.com/webrtc/5844,https://issues.webrtc.org/issues/42230869 +https://crbug.com/webrtc/5845,https://issues.webrtc.org/issues/42230870 +https://crbug.com/webrtc/5846,https://issues.webrtc.org/issues/42230871 +https://crbug.com/webrtc/5848,https://issues.webrtc.org/issues/42230872 +https://crbug.com/webrtc/5849,https://issues.webrtc.org/issues/42230873 +https://crbug.com/webrtc/585,https://issues.webrtc.org/issues/42230874 +https://crbug.com/webrtc/5850,https://issues.webrtc.org/issues/42230875 +https://crbug.com/webrtc/5851,https://issues.webrtc.org/issues/42230876 +https://crbug.com/webrtc/5852,https://issues.webrtc.org/issues/42230877 +https://crbug.com/webrtc/5853,https://issues.webrtc.org/issues/42230878 +https://crbug.com/webrtc/5856,https://issues.webrtc.org/issues/42230879 +https://crbug.com/webrtc/5858,https://issues.webrtc.org/issues/42230880 +https://crbug.com/webrtc/5859,https://issues.webrtc.org/issues/42230881 +https://crbug.com/webrtc/586,https://issues.webrtc.org/issues/42230882 +https://crbug.com/webrtc/5860,https://issues.webrtc.org/issues/42230883 +https://crbug.com/webrtc/5861,https://issues.webrtc.org/issues/42230884 +https://crbug.com/webrtc/5862,https://issues.webrtc.org/issues/42230885 +https://crbug.com/webrtc/5863,https://issues.webrtc.org/issues/42230886 +https://crbug.com/webrtc/5864,https://issues.webrtc.org/issues/42230887 +https://crbug.com/webrtc/5865,https://issues.webrtc.org/issues/42230888 +https://crbug.com/webrtc/5866,https://issues.webrtc.org/issues/42230889 +https://crbug.com/webrtc/5867,https://issues.webrtc.org/issues/42230890 +https://crbug.com/webrtc/5868,https://issues.webrtc.org/issues/42230891 +https://crbug.com/webrtc/5869,https://issues.webrtc.org/issues/42230892 +https://crbug.com/webrtc/587,https://issues.webrtc.org/issues/42230893 +https://crbug.com/webrtc/5870,https://issues.webrtc.org/issues/42230894 +https://crbug.com/webrtc/5871,https://issues.webrtc.org/issues/42230895 +https://crbug.com/webrtc/5872,https://issues.webrtc.org/issues/42230896 +https://crbug.com/webrtc/5873,https://issues.webrtc.org/issues/42230897 +https://crbug.com/webrtc/5874,https://issues.webrtc.org/issues/42230898 +https://crbug.com/webrtc/5876,https://issues.webrtc.org/issues/42230899 +https://crbug.com/webrtc/5877,https://issues.webrtc.org/issues/42230900 +https://crbug.com/webrtc/5878,https://issues.webrtc.org/issues/42230901 +https://crbug.com/webrtc/5879,https://issues.webrtc.org/issues/42230902 +https://crbug.com/webrtc/588,https://issues.webrtc.org/issues/42230903 +https://crbug.com/webrtc/5880,https://issues.webrtc.org/issues/42230904 +https://crbug.com/webrtc/5881,https://issues.webrtc.org/issues/42230905 +https://crbug.com/webrtc/5882,https://issues.webrtc.org/issues/42230906 +https://crbug.com/webrtc/5883,https://issues.webrtc.org/issues/42230907 +https://crbug.com/webrtc/5884,https://issues.webrtc.org/issues/42230908 +https://crbug.com/webrtc/5885,https://issues.webrtc.org/issues/42230909 +https://crbug.com/webrtc/5886,https://issues.webrtc.org/issues/42230911 +https://crbug.com/webrtc/5887,https://issues.webrtc.org/issues/42230912 +https://crbug.com/webrtc/5888,https://issues.webrtc.org/issues/42230913 +https://crbug.com/webrtc/5889,https://issues.webrtc.org/issues/42230914 +https://crbug.com/webrtc/589,https://issues.webrtc.org/issues/42230915 +https://crbug.com/webrtc/5890,https://issues.webrtc.org/issues/42230916 +https://crbug.com/webrtc/5891,https://issues.webrtc.org/issues/42230917 +https://crbug.com/webrtc/5892,https://issues.webrtc.org/issues/42230918 +https://crbug.com/webrtc/5893,https://issues.webrtc.org/issues/42230919 +https://crbug.com/webrtc/5894,https://issues.webrtc.org/issues/42230920 +https://crbug.com/webrtc/5895,https://issues.webrtc.org/issues/42230921 +https://crbug.com/webrtc/5896,https://issues.webrtc.org/issues/42230922 +https://crbug.com/webrtc/5897,https://issues.webrtc.org/issues/42230923 +https://crbug.com/webrtc/5898,https://issues.webrtc.org/issues/42230924 +https://crbug.com/webrtc/5899,https://issues.webrtc.org/issues/42230925 +https://crbug.com/webrtc/59,https://issues.webrtc.org/issues/42230926 +https://crbug.com/webrtc/590,https://issues.webrtc.org/issues/42230927 +https://crbug.com/webrtc/5900,https://issues.webrtc.org/issues/42230928 +https://crbug.com/webrtc/5901,https://issues.webrtc.org/issues/42230929 +https://crbug.com/webrtc/5902,https://issues.webrtc.org/issues/42230930 +https://crbug.com/webrtc/5903,https://issues.webrtc.org/issues/42230931 +https://crbug.com/webrtc/5904,https://issues.webrtc.org/issues/42230932 +https://crbug.com/webrtc/5906,https://issues.webrtc.org/issues/42230933 +https://crbug.com/webrtc/5907,https://issues.webrtc.org/issues/42230934 +https://crbug.com/webrtc/5908,https://issues.webrtc.org/issues/42230935 +https://crbug.com/webrtc/5909,https://issues.webrtc.org/issues/42230936 +https://crbug.com/webrtc/591,https://issues.webrtc.org/issues/42230937 +https://crbug.com/webrtc/5910,https://issues.webrtc.org/issues/42230938 +https://crbug.com/webrtc/5911,https://issues.webrtc.org/issues/42230939 +https://crbug.com/webrtc/5912,https://issues.webrtc.org/issues/42230940 +https://crbug.com/webrtc/5913,https://issues.webrtc.org/issues/42230941 +https://crbug.com/webrtc/5914,https://issues.webrtc.org/issues/42230942 +https://crbug.com/webrtc/5915,https://issues.webrtc.org/issues/42230943 +https://crbug.com/webrtc/5916,https://issues.webrtc.org/issues/42230944 +https://crbug.com/webrtc/5917,https://issues.webrtc.org/issues/42230945 +https://crbug.com/webrtc/5918,https://issues.webrtc.org/issues/42230946 +https://crbug.com/webrtc/592,https://issues.webrtc.org/issues/42230947 +https://crbug.com/webrtc/5920,https://issues.webrtc.org/issues/42230948 +https://crbug.com/webrtc/5921,https://issues.webrtc.org/issues/42230949 +https://crbug.com/webrtc/5922,https://issues.webrtc.org/issues/42230950 +https://crbug.com/webrtc/5923,https://issues.webrtc.org/issues/42230951 +https://crbug.com/webrtc/5924,https://issues.webrtc.org/issues/42230952 +https://crbug.com/webrtc/5925,https://issues.webrtc.org/issues/42230953 +https://crbug.com/webrtc/5926,https://issues.webrtc.org/issues/42230954 +https://crbug.com/webrtc/5927,https://issues.webrtc.org/issues/42230955 +https://crbug.com/webrtc/5928,https://issues.webrtc.org/issues/42230956 +https://crbug.com/webrtc/5929,https://issues.webrtc.org/issues/42230957 +https://crbug.com/webrtc/593,https://issues.webrtc.org/issues/42230958 +https://crbug.com/webrtc/5930,https://issues.webrtc.org/issues/42230959 +https://crbug.com/webrtc/5931,https://issues.webrtc.org/issues/42230960 +https://crbug.com/webrtc/5932,https://issues.webrtc.org/issues/42230961 +https://crbug.com/webrtc/5933,https://issues.webrtc.org/issues/42230962 +https://crbug.com/webrtc/5934,https://issues.webrtc.org/issues/42230963 +https://crbug.com/webrtc/5935,https://issues.webrtc.org/issues/42230964 +https://crbug.com/webrtc/5936,https://issues.webrtc.org/issues/42230965 +https://crbug.com/webrtc/5937,https://issues.webrtc.org/issues/42230966 +https://crbug.com/webrtc/5938,https://issues.webrtc.org/issues/42230967 +https://crbug.com/webrtc/5939,https://issues.webrtc.org/issues/42230968 +https://crbug.com/webrtc/594,https://issues.webrtc.org/issues/42230969 +https://crbug.com/webrtc/5940,https://issues.webrtc.org/issues/42230970 +https://crbug.com/webrtc/5941,https://issues.webrtc.org/issues/42230971 +https://crbug.com/webrtc/5942,https://issues.webrtc.org/issues/42230972 +https://crbug.com/webrtc/5943,https://issues.webrtc.org/issues/42230973 +https://crbug.com/webrtc/5944,https://issues.webrtc.org/issues/42230974 +https://crbug.com/webrtc/5945,https://issues.webrtc.org/issues/42230975 +https://crbug.com/webrtc/5946,https://issues.webrtc.org/issues/42230976 +https://crbug.com/webrtc/5947,https://issues.webrtc.org/issues/42230977 +https://crbug.com/webrtc/5948,https://issues.webrtc.org/issues/42230978 +https://crbug.com/webrtc/595,https://issues.webrtc.org/issues/42230979 +https://crbug.com/webrtc/5950,https://issues.webrtc.org/issues/42230980 +https://crbug.com/webrtc/5951,https://issues.webrtc.org/issues/42230981 +https://crbug.com/webrtc/5952,https://issues.webrtc.org/issues/42230982 +https://crbug.com/webrtc/5953,https://issues.webrtc.org/issues/42230983 +https://crbug.com/webrtc/5954,https://issues.webrtc.org/issues/42230984 +https://crbug.com/webrtc/5955,https://issues.webrtc.org/issues/42230985 +https://crbug.com/webrtc/5956,https://issues.webrtc.org/issues/42230986 +https://crbug.com/webrtc/5957,https://issues.webrtc.org/issues/42230987 +https://crbug.com/webrtc/5958,https://issues.webrtc.org/issues/42230988 +https://crbug.com/webrtc/5959,https://issues.webrtc.org/issues/42230989 +https://crbug.com/webrtc/596,https://issues.webrtc.org/issues/42230990 +https://crbug.com/webrtc/5960,https://issues.webrtc.org/issues/42230991 +https://crbug.com/webrtc/5961,https://issues.webrtc.org/issues/42230992 +https://crbug.com/webrtc/5962,https://issues.webrtc.org/issues/42230993 +https://crbug.com/webrtc/5963,https://issues.webrtc.org/issues/42230994 +https://crbug.com/webrtc/5964,https://issues.webrtc.org/issues/42230995 +https://crbug.com/webrtc/5965,https://issues.webrtc.org/issues/42230996 +https://crbug.com/webrtc/5966,https://issues.webrtc.org/issues/42230997 +https://crbug.com/webrtc/5968,https://issues.webrtc.org/issues/42230998 +https://crbug.com/webrtc/597,https://issues.webrtc.org/issues/42230999 +https://crbug.com/webrtc/5970,https://issues.webrtc.org/issues/42231000 +https://crbug.com/webrtc/5971,https://issues.webrtc.org/issues/42231001 +https://crbug.com/webrtc/5972,https://issues.webrtc.org/issues/42231002 +https://crbug.com/webrtc/5973,https://issues.webrtc.org/issues/42231003 +https://crbug.com/webrtc/5974,https://issues.webrtc.org/issues/42231004 +https://crbug.com/webrtc/5975,https://issues.webrtc.org/issues/42231005 +https://crbug.com/webrtc/5976,https://issues.webrtc.org/issues/42231006 +https://crbug.com/webrtc/5977,https://issues.webrtc.org/issues/42231007 +https://crbug.com/webrtc/5978,https://issues.webrtc.org/issues/42231008 +https://crbug.com/webrtc/5979,https://issues.webrtc.org/issues/42231009 +https://crbug.com/webrtc/598,https://issues.webrtc.org/issues/42231010 +https://crbug.com/webrtc/5980,https://issues.webrtc.org/issues/42231011 +https://crbug.com/webrtc/5981,https://issues.webrtc.org/issues/42231012 +https://crbug.com/webrtc/5982,https://issues.webrtc.org/issues/42231013 +https://crbug.com/webrtc/5983,https://issues.webrtc.org/issues/42231014 +https://crbug.com/webrtc/5984,https://issues.webrtc.org/issues/42231015 +https://crbug.com/webrtc/5986,https://issues.webrtc.org/issues/42231016 +https://crbug.com/webrtc/5987,https://issues.webrtc.org/issues/42231017 +https://crbug.com/webrtc/5988,https://issues.webrtc.org/issues/42231018 +https://crbug.com/webrtc/5989,https://issues.webrtc.org/issues/42231019 +https://crbug.com/webrtc/599,https://issues.webrtc.org/issues/42231020 +https://crbug.com/webrtc/5990,https://issues.webrtc.org/issues/42231021 +https://crbug.com/webrtc/5991,https://issues.webrtc.org/issues/42231022 +https://crbug.com/webrtc/5992,https://issues.webrtc.org/issues/42231023 +https://crbug.com/webrtc/5993,https://issues.webrtc.org/issues/42231024 +https://crbug.com/webrtc/5994,https://issues.webrtc.org/issues/42231025 +https://crbug.com/webrtc/5995,https://issues.webrtc.org/issues/42231026 +https://crbug.com/webrtc/5996,https://issues.webrtc.org/issues/42231027 +https://crbug.com/webrtc/5997,https://issues.webrtc.org/issues/42231028 +https://crbug.com/webrtc/5998,https://issues.webrtc.org/issues/42231029 +https://crbug.com/webrtc/5999,https://issues.webrtc.org/issues/42231030 +https://crbug.com/webrtc/6,https://issues.webrtc.org/issues/42231031 +https://crbug.com/webrtc/60,https://issues.webrtc.org/issues/42231032 +https://crbug.com/webrtc/600,https://issues.webrtc.org/issues/42231033 +https://crbug.com/webrtc/6000,https://issues.webrtc.org/issues/42231034 +https://crbug.com/webrtc/6002,https://issues.webrtc.org/issues/42231035 +https://crbug.com/webrtc/6003,https://issues.webrtc.org/issues/42231036 +https://crbug.com/webrtc/6004,https://issues.webrtc.org/issues/42231037 +https://crbug.com/webrtc/6005,https://issues.webrtc.org/issues/42231038 +https://crbug.com/webrtc/6006,https://issues.webrtc.org/issues/42231039 +https://crbug.com/webrtc/6007,https://issues.webrtc.org/issues/42231040 +https://crbug.com/webrtc/6008,https://issues.webrtc.org/issues/42231041 +https://crbug.com/webrtc/6009,https://issues.webrtc.org/issues/42231042 +https://crbug.com/webrtc/601,https://issues.webrtc.org/issues/42231043 +https://crbug.com/webrtc/6010,https://issues.webrtc.org/issues/42231044 +https://crbug.com/webrtc/6011,https://issues.webrtc.org/issues/42231045 +https://crbug.com/webrtc/6012,https://issues.webrtc.org/issues/42231046 +https://crbug.com/webrtc/6013,https://issues.webrtc.org/issues/42231047 +https://crbug.com/webrtc/6014,https://issues.webrtc.org/issues/42231048 +https://crbug.com/webrtc/6015,https://issues.webrtc.org/issues/42231049 +https://crbug.com/webrtc/6016,https://issues.webrtc.org/issues/42231050 +https://crbug.com/webrtc/6017,https://issues.webrtc.org/issues/42231051 +https://crbug.com/webrtc/6019,https://issues.webrtc.org/issues/42231052 +https://crbug.com/webrtc/602,https://issues.webrtc.org/issues/42231053 +https://crbug.com/webrtc/6020,https://issues.webrtc.org/issues/42231054 +https://crbug.com/webrtc/6021,https://issues.webrtc.org/issues/42231055 +https://crbug.com/webrtc/6022,https://issues.webrtc.org/issues/42231056 +https://crbug.com/webrtc/6024,https://issues.webrtc.org/issues/42231057 +https://crbug.com/webrtc/6025,https://issues.webrtc.org/issues/42231058 +https://crbug.com/webrtc/6026,https://issues.webrtc.org/issues/42231059 +https://crbug.com/webrtc/603,https://issues.webrtc.org/issues/42231060 +https://crbug.com/webrtc/6030,https://issues.webrtc.org/issues/42231061 +https://crbug.com/webrtc/6031,https://issues.webrtc.org/issues/42231062 +https://crbug.com/webrtc/6032,https://issues.webrtc.org/issues/42231063 +https://crbug.com/webrtc/6033,https://issues.webrtc.org/issues/42231064 +https://crbug.com/webrtc/6034,https://issues.webrtc.org/issues/42231065 +https://crbug.com/webrtc/6035,https://issues.webrtc.org/issues/42231066 +https://crbug.com/webrtc/6036,https://issues.webrtc.org/issues/42231067 +https://crbug.com/webrtc/6037,https://issues.webrtc.org/issues/42231068 +https://crbug.com/webrtc/6038,https://issues.webrtc.org/issues/42231069 +https://crbug.com/webrtc/6039,https://issues.webrtc.org/issues/42231070 +https://crbug.com/webrtc/604,https://issues.webrtc.org/issues/42231071 +https://crbug.com/webrtc/6040,https://issues.webrtc.org/issues/42231072 +https://crbug.com/webrtc/6041,https://issues.webrtc.org/issues/42231073 +https://crbug.com/webrtc/6042,https://issues.webrtc.org/issues/42231074 +https://crbug.com/webrtc/6043,https://issues.webrtc.org/issues/42231075 +https://crbug.com/webrtc/6044,https://issues.webrtc.org/issues/42231076 +https://crbug.com/webrtc/6045,https://issues.webrtc.org/issues/42231077 +https://crbug.com/webrtc/6046,https://issues.webrtc.org/issues/42231078 +https://crbug.com/webrtc/6047,https://issues.webrtc.org/issues/42231079 +https://crbug.com/webrtc/6048,https://issues.webrtc.org/issues/42231080 +https://crbug.com/webrtc/6049,https://issues.webrtc.org/issues/42231081 +https://crbug.com/webrtc/605,https://issues.webrtc.org/issues/42231082 +https://crbug.com/webrtc/6050,https://issues.webrtc.org/issues/42231083 +https://crbug.com/webrtc/6051,https://issues.webrtc.org/issues/42231084 +https://crbug.com/webrtc/6052,https://issues.webrtc.org/issues/42231085 +https://crbug.com/webrtc/6053,https://issues.webrtc.org/issues/42231086 +https://crbug.com/webrtc/6054,https://issues.webrtc.org/issues/42231087 +https://crbug.com/webrtc/6055,https://issues.webrtc.org/issues/42231088 +https://crbug.com/webrtc/6056,https://issues.webrtc.org/issues/42231089 +https://crbug.com/webrtc/6057,https://issues.webrtc.org/issues/42231090 +https://crbug.com/webrtc/606,https://issues.webrtc.org/issues/42231091 +https://crbug.com/webrtc/6062,https://issues.webrtc.org/issues/42231092 +https://crbug.com/webrtc/6063,https://issues.webrtc.org/issues/42231093 +https://crbug.com/webrtc/6064,https://issues.webrtc.org/issues/42231094 +https://crbug.com/webrtc/6065,https://issues.webrtc.org/issues/42231095 +https://crbug.com/webrtc/6066,https://issues.webrtc.org/issues/42231096 +https://crbug.com/webrtc/6067,https://issues.webrtc.org/issues/42231097 +https://crbug.com/webrtc/6068,https://issues.webrtc.org/issues/42231098 +https://crbug.com/webrtc/6069,https://issues.webrtc.org/issues/42231099 +https://crbug.com/webrtc/607,https://issues.webrtc.org/issues/42231100 +https://crbug.com/webrtc/6070,https://issues.webrtc.org/issues/42231101 +https://crbug.com/webrtc/6071,https://issues.webrtc.org/issues/42231102 +https://crbug.com/webrtc/6072,https://issues.webrtc.org/issues/42231103 +https://crbug.com/webrtc/6073,https://issues.webrtc.org/issues/42231104 +https://crbug.com/webrtc/6074,https://issues.webrtc.org/issues/42231105 +https://crbug.com/webrtc/6075,https://issues.webrtc.org/issues/42231106 +https://crbug.com/webrtc/6076,https://issues.webrtc.org/issues/42231107 +https://crbug.com/webrtc/6077,https://issues.webrtc.org/issues/42231108 +https://crbug.com/webrtc/6078,https://issues.webrtc.org/issues/42231109 +https://crbug.com/webrtc/6079,https://issues.webrtc.org/issues/42231110 +https://crbug.com/webrtc/608,https://issues.webrtc.org/issues/42231111 +https://crbug.com/webrtc/6080,https://issues.webrtc.org/issues/42231112 +https://crbug.com/webrtc/6082,https://issues.webrtc.org/issues/42231113 +https://crbug.com/webrtc/6083,https://issues.webrtc.org/issues/42231114 +https://crbug.com/webrtc/6084,https://issues.webrtc.org/issues/42231115 +https://crbug.com/webrtc/6085,https://issues.webrtc.org/issues/42231116 +https://crbug.com/webrtc/6086,https://issues.webrtc.org/issues/42231117 +https://crbug.com/webrtc/6087,https://issues.webrtc.org/issues/42231118 +https://crbug.com/webrtc/6088,https://issues.webrtc.org/issues/42231119 +https://crbug.com/webrtc/6089,https://issues.webrtc.org/issues/42231120 +https://crbug.com/webrtc/609,https://issues.webrtc.org/issues/42231121 +https://crbug.com/webrtc/6090,https://issues.webrtc.org/issues/42231122 +https://crbug.com/webrtc/6091,https://issues.webrtc.org/issues/42231123 +https://crbug.com/webrtc/6092,https://issues.webrtc.org/issues/42231124 +https://crbug.com/webrtc/6094,https://issues.webrtc.org/issues/42231125 +https://crbug.com/webrtc/6095,https://issues.webrtc.org/issues/42231126 +https://crbug.com/webrtc/6096,https://issues.webrtc.org/issues/42231127 +https://crbug.com/webrtc/6097,https://issues.webrtc.org/issues/42231128 +https://crbug.com/webrtc/6098,https://issues.webrtc.org/issues/42231129 +https://crbug.com/webrtc/6099,https://issues.webrtc.org/issues/42231130 +https://crbug.com/webrtc/61,https://issues.webrtc.org/issues/42231131 +https://crbug.com/webrtc/610,https://issues.webrtc.org/issues/42231132 +https://crbug.com/webrtc/6100,https://issues.webrtc.org/issues/42231133 +https://crbug.com/webrtc/6101,https://issues.webrtc.org/issues/42231134 +https://crbug.com/webrtc/6102,https://issues.webrtc.org/issues/42231135 +https://crbug.com/webrtc/6103,https://issues.webrtc.org/issues/42231136 +https://crbug.com/webrtc/6104,https://issues.webrtc.org/issues/42231137 +https://crbug.com/webrtc/6105,https://issues.webrtc.org/issues/42231138 +https://crbug.com/webrtc/6106,https://issues.webrtc.org/issues/42231139 +https://crbug.com/webrtc/6107,https://issues.webrtc.org/issues/42231140 +https://crbug.com/webrtc/6108,https://issues.webrtc.org/issues/42231141 +https://crbug.com/webrtc/6109,https://issues.webrtc.org/issues/42231142 +https://crbug.com/webrtc/611,https://issues.webrtc.org/issues/42231143 +https://crbug.com/webrtc/6110,https://issues.webrtc.org/issues/42231144 +https://crbug.com/webrtc/6111,https://issues.webrtc.org/issues/42231145 +https://crbug.com/webrtc/6112,https://issues.webrtc.org/issues/42231146 +https://crbug.com/webrtc/6113,https://issues.webrtc.org/issues/42231147 +https://crbug.com/webrtc/6114,https://issues.webrtc.org/issues/42231148 +https://crbug.com/webrtc/6115,https://issues.webrtc.org/issues/42231149 +https://crbug.com/webrtc/6116,https://issues.webrtc.org/issues/42231150 +https://crbug.com/webrtc/6117,https://issues.webrtc.org/issues/42231151 +https://crbug.com/webrtc/6118,https://issues.webrtc.org/issues/42231152 +https://crbug.com/webrtc/6119,https://issues.webrtc.org/issues/42231153 +https://crbug.com/webrtc/612,https://issues.webrtc.org/issues/42231154 +https://crbug.com/webrtc/6120,https://issues.webrtc.org/issues/42231155 +https://crbug.com/webrtc/6121,https://issues.webrtc.org/issues/42231156 +https://crbug.com/webrtc/6122,https://issues.webrtc.org/issues/42231157 +https://crbug.com/webrtc/6123,https://issues.webrtc.org/issues/42231158 +https://crbug.com/webrtc/6124,https://issues.webrtc.org/issues/42231159 +https://crbug.com/webrtc/6125,https://issues.webrtc.org/issues/42231160 +https://crbug.com/webrtc/6126,https://issues.webrtc.org/issues/42231161 +https://crbug.com/webrtc/6127,https://issues.webrtc.org/issues/42231162 +https://crbug.com/webrtc/6128,https://issues.webrtc.org/issues/42231163 +https://crbug.com/webrtc/6129,https://issues.webrtc.org/issues/42231164 +https://crbug.com/webrtc/613,https://issues.webrtc.org/issues/42231165 +https://crbug.com/webrtc/6130,https://issues.webrtc.org/issues/42231166 +https://crbug.com/webrtc/6131,https://issues.webrtc.org/issues/42231167 +https://crbug.com/webrtc/6132,https://issues.webrtc.org/issues/42231168 +https://crbug.com/webrtc/6134,https://issues.webrtc.org/issues/42231169 +https://crbug.com/webrtc/6135,https://issues.webrtc.org/issues/42231170 +https://crbug.com/webrtc/6136,https://issues.webrtc.org/issues/42231171 +https://crbug.com/webrtc/6137,https://issues.webrtc.org/issues/42231172 +https://crbug.com/webrtc/6138,https://issues.webrtc.org/issues/42231173 +https://crbug.com/webrtc/6139,https://issues.webrtc.org/issues/42231174 +https://crbug.com/webrtc/614,https://issues.webrtc.org/issues/42231175 +https://crbug.com/webrtc/6140,https://issues.webrtc.org/issues/42231176 +https://crbug.com/webrtc/6141,https://issues.webrtc.org/issues/42231177 +https://crbug.com/webrtc/6142,https://issues.webrtc.org/issues/42231178 +https://crbug.com/webrtc/6143,https://issues.webrtc.org/issues/42231179 +https://crbug.com/webrtc/6144,https://issues.webrtc.org/issues/42231180 +https://crbug.com/webrtc/6145,https://issues.webrtc.org/issues/42231181 +https://crbug.com/webrtc/6146,https://issues.webrtc.org/issues/42231182 +https://crbug.com/webrtc/6147,https://issues.webrtc.org/issues/42231183 +https://crbug.com/webrtc/6148,https://issues.webrtc.org/issues/42231184 +https://crbug.com/webrtc/6149,https://issues.webrtc.org/issues/42231185 +https://crbug.com/webrtc/615,https://issues.webrtc.org/issues/42231186 +https://crbug.com/webrtc/6150,https://issues.webrtc.org/issues/42231187 +https://crbug.com/webrtc/6151,https://issues.webrtc.org/issues/42231188 +https://crbug.com/webrtc/6152,https://issues.webrtc.org/issues/42231189 +https://crbug.com/webrtc/6153,https://issues.webrtc.org/issues/42231190 +https://crbug.com/webrtc/6154,https://issues.webrtc.org/issues/42231191 +https://crbug.com/webrtc/6155,https://issues.webrtc.org/issues/42231192 +https://crbug.com/webrtc/6156,https://issues.webrtc.org/issues/42231193 +https://crbug.com/webrtc/6157,https://issues.webrtc.org/issues/42231194 +https://crbug.com/webrtc/6158,https://issues.webrtc.org/issues/42231195 +https://crbug.com/webrtc/6159,https://issues.webrtc.org/issues/42231196 +https://crbug.com/webrtc/616,https://issues.webrtc.org/issues/42231197 +https://crbug.com/webrtc/6160,https://issues.webrtc.org/issues/42231198 +https://crbug.com/webrtc/6161,https://issues.webrtc.org/issues/42231199 +https://crbug.com/webrtc/6162,https://issues.webrtc.org/issues/42231200 +https://crbug.com/webrtc/6163,https://issues.webrtc.org/issues/42231201 +https://crbug.com/webrtc/6164,https://issues.webrtc.org/issues/42231202 +https://crbug.com/webrtc/6165,https://issues.webrtc.org/issues/42231203 +https://crbug.com/webrtc/6166,https://issues.webrtc.org/issues/42231204 +https://crbug.com/webrtc/6167,https://issues.webrtc.org/issues/42231205 +https://crbug.com/webrtc/6168,https://issues.webrtc.org/issues/42231206 +https://crbug.com/webrtc/6169,https://issues.webrtc.org/issues/42231207 +https://crbug.com/webrtc/617,https://issues.webrtc.org/issues/42231208 +https://crbug.com/webrtc/6170,https://issues.webrtc.org/issues/42231209 +https://crbug.com/webrtc/6171,https://issues.webrtc.org/issues/42231210 +https://crbug.com/webrtc/6172,https://issues.webrtc.org/issues/42231211 +https://crbug.com/webrtc/6173,https://issues.webrtc.org/issues/42231212 +https://crbug.com/webrtc/6174,https://issues.webrtc.org/issues/42231213 +https://crbug.com/webrtc/6175,https://issues.webrtc.org/issues/42231214 +https://crbug.com/webrtc/6176,https://issues.webrtc.org/issues/42231215 +https://crbug.com/webrtc/6177,https://issues.webrtc.org/issues/42231216 +https://crbug.com/webrtc/6178,https://issues.webrtc.org/issues/42231217 +https://crbug.com/webrtc/6179,https://issues.webrtc.org/issues/42231218 +https://crbug.com/webrtc/618,https://issues.webrtc.org/issues/42231219 +https://crbug.com/webrtc/6180,https://issues.webrtc.org/issues/42231220 +https://crbug.com/webrtc/6181,https://issues.webrtc.org/issues/42231221 +https://crbug.com/webrtc/6182,https://issues.webrtc.org/issues/42231222 +https://crbug.com/webrtc/6183,https://issues.webrtc.org/issues/42231223 +https://crbug.com/webrtc/6184,https://issues.webrtc.org/issues/42231224 +https://crbug.com/webrtc/6185,https://issues.webrtc.org/issues/42231225 +https://crbug.com/webrtc/6186,https://issues.webrtc.org/issues/42231226 +https://crbug.com/webrtc/6187,https://issues.webrtc.org/issues/42231227 +https://crbug.com/webrtc/6188,https://issues.webrtc.org/issues/42231228 +https://crbug.com/webrtc/6189,https://issues.webrtc.org/issues/42231229 +https://crbug.com/webrtc/619,https://issues.webrtc.org/issues/42231230 +https://crbug.com/webrtc/6190,https://issues.webrtc.org/issues/42231231 +https://crbug.com/webrtc/6191,https://issues.webrtc.org/issues/42231232 +https://crbug.com/webrtc/6192,https://issues.webrtc.org/issues/42231233 +https://crbug.com/webrtc/6193,https://issues.webrtc.org/issues/42231234 +https://crbug.com/webrtc/6194,https://issues.webrtc.org/issues/42231235 +https://crbug.com/webrtc/6195,https://issues.webrtc.org/issues/42231236 +https://crbug.com/webrtc/6196,https://issues.webrtc.org/issues/42231237 +https://crbug.com/webrtc/6197,https://issues.webrtc.org/issues/42231238 +https://crbug.com/webrtc/6198,https://issues.webrtc.org/issues/42231239 +https://crbug.com/webrtc/6199,https://issues.webrtc.org/issues/42231240 +https://crbug.com/webrtc/62,https://issues.webrtc.org/issues/42231241 +https://crbug.com/webrtc/620,https://issues.webrtc.org/issues/42231242 +https://crbug.com/webrtc/6200,https://issues.webrtc.org/issues/42231243 +https://crbug.com/webrtc/6201,https://issues.webrtc.org/issues/42231244 +https://crbug.com/webrtc/6202,https://issues.webrtc.org/issues/42231245 +https://crbug.com/webrtc/6203,https://issues.webrtc.org/issues/42231246 +https://crbug.com/webrtc/6204,https://issues.webrtc.org/issues/42231247 +https://crbug.com/webrtc/6205,https://issues.webrtc.org/issues/42231248 +https://crbug.com/webrtc/6206,https://issues.webrtc.org/issues/42231249 +https://crbug.com/webrtc/6207,https://issues.webrtc.org/issues/42231250 +https://crbug.com/webrtc/6208,https://issues.webrtc.org/issues/42231251 +https://crbug.com/webrtc/6209,https://issues.webrtc.org/issues/42231252 +https://crbug.com/webrtc/621,https://issues.webrtc.org/issues/42231253 +https://crbug.com/webrtc/6210,https://issues.webrtc.org/issues/42231254 +https://crbug.com/webrtc/6211,https://issues.webrtc.org/issues/42231255 +https://crbug.com/webrtc/6212,https://issues.webrtc.org/issues/42231256 +https://crbug.com/webrtc/6213,https://issues.webrtc.org/issues/42231257 +https://crbug.com/webrtc/6214,https://issues.webrtc.org/issues/42231258 +https://crbug.com/webrtc/6215,https://issues.webrtc.org/issues/42231259 +https://crbug.com/webrtc/6216,https://issues.webrtc.org/issues/42231260 +https://crbug.com/webrtc/6217,https://issues.webrtc.org/issues/42231261 +https://crbug.com/webrtc/6218,https://issues.webrtc.org/issues/42231262 +https://crbug.com/webrtc/6219,https://issues.webrtc.org/issues/42231263 +https://crbug.com/webrtc/622,https://issues.webrtc.org/issues/42231264 +https://crbug.com/webrtc/6220,https://issues.webrtc.org/issues/42231265 +https://crbug.com/webrtc/6221,https://issues.webrtc.org/issues/42231266 +https://crbug.com/webrtc/6223,https://issues.webrtc.org/issues/42231267 +https://crbug.com/webrtc/6224,https://issues.webrtc.org/issues/42231268 +https://crbug.com/webrtc/6225,https://issues.webrtc.org/issues/42231269 +https://crbug.com/webrtc/6226,https://issues.webrtc.org/issues/42231270 +https://crbug.com/webrtc/6227,https://issues.webrtc.org/issues/42231271 +https://crbug.com/webrtc/6228,https://issues.webrtc.org/issues/42231272 +https://crbug.com/webrtc/6229,https://issues.webrtc.org/issues/42231273 +https://crbug.com/webrtc/623,https://issues.webrtc.org/issues/42231274 +https://crbug.com/webrtc/6230,https://issues.webrtc.org/issues/42231275 +https://crbug.com/webrtc/6231,https://issues.webrtc.org/issues/42231276 +https://crbug.com/webrtc/6232,https://issues.webrtc.org/issues/42231277 +https://crbug.com/webrtc/6233,https://issues.webrtc.org/issues/42231278 +https://crbug.com/webrtc/6234,https://issues.webrtc.org/issues/42231279 +https://crbug.com/webrtc/6235,https://issues.webrtc.org/issues/42231280 +https://crbug.com/webrtc/6236,https://issues.webrtc.org/issues/42231281 +https://crbug.com/webrtc/6237,https://issues.webrtc.org/issues/42231282 +https://crbug.com/webrtc/6238,https://issues.webrtc.org/issues/42231283 +https://crbug.com/webrtc/6239,https://issues.webrtc.org/issues/42231284 +https://crbug.com/webrtc/624,https://issues.webrtc.org/issues/42231285 +https://crbug.com/webrtc/6241,https://issues.webrtc.org/issues/42231286 +https://crbug.com/webrtc/6242,https://issues.webrtc.org/issues/42231287 +https://crbug.com/webrtc/6243,https://issues.webrtc.org/issues/42231288 +https://crbug.com/webrtc/6244,https://issues.webrtc.org/issues/42231289 +https://crbug.com/webrtc/6245,https://issues.webrtc.org/issues/42231290 +https://crbug.com/webrtc/6246,https://issues.webrtc.org/issues/42231291 +https://crbug.com/webrtc/6247,https://issues.webrtc.org/issues/42231292 +https://crbug.com/webrtc/6248,https://issues.webrtc.org/issues/42231293 +https://crbug.com/webrtc/6249,https://issues.webrtc.org/issues/42231294 +https://crbug.com/webrtc/625,https://issues.webrtc.org/issues/42231295 +https://crbug.com/webrtc/6250,https://issues.webrtc.org/issues/42231296 +https://crbug.com/webrtc/6251,https://issues.webrtc.org/issues/42231297 +https://crbug.com/webrtc/6252,https://issues.webrtc.org/issues/42231298 +https://crbug.com/webrtc/6253,https://issues.webrtc.org/issues/42231299 +https://crbug.com/webrtc/6254,https://issues.webrtc.org/issues/42231300 +https://crbug.com/webrtc/6255,https://issues.webrtc.org/issues/42231301 +https://crbug.com/webrtc/6256,https://issues.webrtc.org/issues/42231302 +https://crbug.com/webrtc/6257,https://issues.webrtc.org/issues/42231303 +https://crbug.com/webrtc/6258,https://issues.webrtc.org/issues/42231304 +https://crbug.com/webrtc/6259,https://issues.webrtc.org/issues/42231305 +https://crbug.com/webrtc/626,https://issues.webrtc.org/issues/42231306 +https://crbug.com/webrtc/6260,https://issues.webrtc.org/issues/42231307 +https://crbug.com/webrtc/6261,https://issues.webrtc.org/issues/42231308 +https://crbug.com/webrtc/6262,https://issues.webrtc.org/issues/42231309 +https://crbug.com/webrtc/6263,https://issues.webrtc.org/issues/42231310 +https://crbug.com/webrtc/6264,https://issues.webrtc.org/issues/42231311 +https://crbug.com/webrtc/6265,https://issues.webrtc.org/issues/42231312 +https://crbug.com/webrtc/6266,https://issues.webrtc.org/issues/42231313 +https://crbug.com/webrtc/6267,https://issues.webrtc.org/issues/42231314 +https://crbug.com/webrtc/6268,https://issues.webrtc.org/issues/42231315 +https://crbug.com/webrtc/6269,https://issues.webrtc.org/issues/42231316 +https://crbug.com/webrtc/627,https://issues.webrtc.org/issues/42231317 +https://crbug.com/webrtc/6270,https://issues.webrtc.org/issues/42231318 +https://crbug.com/webrtc/6271,https://issues.webrtc.org/issues/42231319 +https://crbug.com/webrtc/6272,https://issues.webrtc.org/issues/42231320 +https://crbug.com/webrtc/6273,https://issues.webrtc.org/issues/42231321 +https://crbug.com/webrtc/6274,https://issues.webrtc.org/issues/42231322 +https://crbug.com/webrtc/6275,https://issues.webrtc.org/issues/42231323 +https://crbug.com/webrtc/6276,https://issues.webrtc.org/issues/42231324 +https://crbug.com/webrtc/6277,https://issues.webrtc.org/issues/42231325 +https://crbug.com/webrtc/6278,https://issues.webrtc.org/issues/42231326 +https://crbug.com/webrtc/6279,https://issues.webrtc.org/issues/42231327 +https://crbug.com/webrtc/628,https://issues.webrtc.org/issues/42231328 +https://crbug.com/webrtc/6280,https://issues.webrtc.org/issues/42231329 +https://crbug.com/webrtc/6281,https://issues.webrtc.org/issues/42231330 +https://crbug.com/webrtc/6282,https://issues.webrtc.org/issues/42231331 +https://crbug.com/webrtc/6283,https://issues.webrtc.org/issues/42231332 +https://crbug.com/webrtc/6284,https://issues.webrtc.org/issues/42231333 +https://crbug.com/webrtc/6285,https://issues.webrtc.org/issues/42231334 +https://crbug.com/webrtc/6286,https://issues.webrtc.org/issues/42231335 +https://crbug.com/webrtc/6287,https://issues.webrtc.org/issues/42231336 +https://crbug.com/webrtc/6288,https://issues.webrtc.org/issues/42231337 +https://crbug.com/webrtc/6289,https://issues.webrtc.org/issues/42231338 +https://crbug.com/webrtc/629,https://issues.webrtc.org/issues/42231339 +https://crbug.com/webrtc/6290,https://issues.webrtc.org/issues/42231340 +https://crbug.com/webrtc/6291,https://issues.webrtc.org/issues/42231341 +https://crbug.com/webrtc/6292,https://issues.webrtc.org/issues/42231342 +https://crbug.com/webrtc/6293,https://issues.webrtc.org/issues/42231343 +https://crbug.com/webrtc/6294,https://issues.webrtc.org/issues/42231344 +https://crbug.com/webrtc/6295,https://issues.webrtc.org/issues/42231345 +https://crbug.com/webrtc/6296,https://issues.webrtc.org/issues/42231346 +https://crbug.com/webrtc/6297,https://issues.webrtc.org/issues/42231347 +https://crbug.com/webrtc/6298,https://issues.webrtc.org/issues/42231348 +https://crbug.com/webrtc/6299,https://issues.webrtc.org/issues/42231349 +https://crbug.com/webrtc/63,https://issues.webrtc.org/issues/42231350 +https://crbug.com/webrtc/630,https://issues.webrtc.org/issues/42231351 +https://crbug.com/webrtc/6300,https://issues.webrtc.org/issues/42231352 +https://crbug.com/webrtc/6301,https://issues.webrtc.org/issues/42231353 +https://crbug.com/webrtc/6302,https://issues.webrtc.org/issues/42231354 +https://crbug.com/webrtc/6303,https://issues.webrtc.org/issues/42231355 +https://crbug.com/webrtc/6304,https://issues.webrtc.org/issues/42231356 +https://crbug.com/webrtc/6305,https://issues.webrtc.org/issues/42231357 +https://crbug.com/webrtc/6306,https://issues.webrtc.org/issues/42231358 +https://crbug.com/webrtc/6307,https://issues.webrtc.org/issues/42231359 +https://crbug.com/webrtc/6308,https://issues.webrtc.org/issues/42231360 +https://crbug.com/webrtc/6309,https://issues.webrtc.org/issues/42231361 +https://crbug.com/webrtc/631,https://issues.webrtc.org/issues/42231362 +https://crbug.com/webrtc/6310,https://issues.webrtc.org/issues/42231363 +https://crbug.com/webrtc/6311,https://issues.webrtc.org/issues/42231364 +https://crbug.com/webrtc/6312,https://issues.webrtc.org/issues/42231365 +https://crbug.com/webrtc/6313,https://issues.webrtc.org/issues/42231366 +https://crbug.com/webrtc/6314,https://issues.webrtc.org/issues/42231367 +https://crbug.com/webrtc/6315,https://issues.webrtc.org/issues/42231368 +https://crbug.com/webrtc/6316,https://issues.webrtc.org/issues/42231369 +https://crbug.com/webrtc/6318,https://issues.webrtc.org/issues/42231370 +https://crbug.com/webrtc/6319,https://issues.webrtc.org/issues/42231371 +https://crbug.com/webrtc/632,https://issues.webrtc.org/issues/42231372 +https://crbug.com/webrtc/6321,https://issues.webrtc.org/issues/42231373 +https://crbug.com/webrtc/6322,https://issues.webrtc.org/issues/42231374 +https://crbug.com/webrtc/6324,https://issues.webrtc.org/issues/42231375 +https://crbug.com/webrtc/6325,https://issues.webrtc.org/issues/42231376 +https://crbug.com/webrtc/6326,https://issues.webrtc.org/issues/42231377 +https://crbug.com/webrtc/6327,https://issues.webrtc.org/issues/42231378 +https://crbug.com/webrtc/6328,https://issues.webrtc.org/issues/42231379 +https://crbug.com/webrtc/6329,https://issues.webrtc.org/issues/42231380 +https://crbug.com/webrtc/633,https://issues.webrtc.org/issues/42231381 +https://crbug.com/webrtc/6330,https://issues.webrtc.org/issues/42231382 +https://crbug.com/webrtc/6331,https://issues.webrtc.org/issues/42231383 +https://crbug.com/webrtc/6332,https://issues.webrtc.org/issues/42231384 +https://crbug.com/webrtc/6333,https://issues.webrtc.org/issues/42231385 +https://crbug.com/webrtc/6334,https://issues.webrtc.org/issues/42231386 +https://crbug.com/webrtc/6335,https://issues.webrtc.org/issues/42231387 +https://crbug.com/webrtc/6336,https://issues.webrtc.org/issues/42231388 +https://crbug.com/webrtc/6338,https://issues.webrtc.org/issues/42231389 +https://crbug.com/webrtc/6339,https://issues.webrtc.org/issues/42231390 +https://crbug.com/webrtc/634,https://issues.webrtc.org/issues/42231391 +https://crbug.com/webrtc/6340,https://issues.webrtc.org/issues/42231392 +https://crbug.com/webrtc/6341,https://issues.webrtc.org/issues/42231393 +https://crbug.com/webrtc/6342,https://issues.webrtc.org/issues/42231394 +https://crbug.com/webrtc/6343,https://issues.webrtc.org/issues/42231395 +https://crbug.com/webrtc/6344,https://issues.webrtc.org/issues/42231396 +https://crbug.com/webrtc/6345,https://issues.webrtc.org/issues/42231397 +https://crbug.com/webrtc/6346,https://issues.webrtc.org/issues/42231398 +https://crbug.com/webrtc/6347,https://issues.webrtc.org/issues/42231399 +https://crbug.com/webrtc/6348,https://issues.webrtc.org/issues/42231400 +https://crbug.com/webrtc/6349,https://issues.webrtc.org/issues/42231401 +https://crbug.com/webrtc/635,https://issues.webrtc.org/issues/42231402 +https://crbug.com/webrtc/6350,https://issues.webrtc.org/issues/42231403 +https://crbug.com/webrtc/6351,https://issues.webrtc.org/issues/42231404 +https://crbug.com/webrtc/6352,https://issues.webrtc.org/issues/42231405 +https://crbug.com/webrtc/6353,https://issues.webrtc.org/issues/42231406 +https://crbug.com/webrtc/6354,https://issues.webrtc.org/issues/42231407 +https://crbug.com/webrtc/6355,https://issues.webrtc.org/issues/42231408 +https://crbug.com/webrtc/6357,https://issues.webrtc.org/issues/42231409 +https://crbug.com/webrtc/6358,https://issues.webrtc.org/issues/42231410 +https://crbug.com/webrtc/6359,https://issues.webrtc.org/issues/42231411 +https://crbug.com/webrtc/636,https://issues.webrtc.org/issues/42231412 +https://crbug.com/webrtc/6360,https://issues.webrtc.org/issues/42231413 +https://crbug.com/webrtc/6361,https://issues.webrtc.org/issues/42231414 +https://crbug.com/webrtc/6362,https://issues.webrtc.org/issues/42231415 +https://crbug.com/webrtc/6363,https://issues.webrtc.org/issues/42231416 +https://crbug.com/webrtc/6364,https://issues.webrtc.org/issues/42231417 +https://crbug.com/webrtc/6365,https://issues.webrtc.org/issues/42231418 +https://crbug.com/webrtc/6366,https://issues.webrtc.org/issues/42231419 +https://crbug.com/webrtc/6367,https://issues.webrtc.org/issues/42231420 +https://crbug.com/webrtc/6368,https://issues.webrtc.org/issues/42231421 +https://crbug.com/webrtc/6369,https://issues.webrtc.org/issues/42231422 +https://crbug.com/webrtc/637,https://issues.webrtc.org/issues/42231423 +https://crbug.com/webrtc/6370,https://issues.webrtc.org/issues/42231424 +https://crbug.com/webrtc/6371,https://issues.webrtc.org/issues/42231425 +https://crbug.com/webrtc/6372,https://issues.webrtc.org/issues/42231426 +https://crbug.com/webrtc/6373,https://issues.webrtc.org/issues/42231427 +https://crbug.com/webrtc/6375,https://issues.webrtc.org/issues/42231428 +https://crbug.com/webrtc/6376,https://issues.webrtc.org/issues/42231429 +https://crbug.com/webrtc/6377,https://issues.webrtc.org/issues/42231430 +https://crbug.com/webrtc/6378,https://issues.webrtc.org/issues/42231431 +https://crbug.com/webrtc/6379,https://issues.webrtc.org/issues/42231432 +https://crbug.com/webrtc/638,https://issues.webrtc.org/issues/42231433 +https://crbug.com/webrtc/6380,https://issues.webrtc.org/issues/42231434 +https://crbug.com/webrtc/6381,https://issues.webrtc.org/issues/42231435 +https://crbug.com/webrtc/6382,https://issues.webrtc.org/issues/42231436 +https://crbug.com/webrtc/6383,https://issues.webrtc.org/issues/42231437 +https://crbug.com/webrtc/6384,https://issues.webrtc.org/issues/42231438 +https://crbug.com/webrtc/6385,https://issues.webrtc.org/issues/42231439 +https://crbug.com/webrtc/6386,https://issues.webrtc.org/issues/42231440 +https://crbug.com/webrtc/6387,https://issues.webrtc.org/issues/42231441 +https://crbug.com/webrtc/6388,https://issues.webrtc.org/issues/42231442 +https://crbug.com/webrtc/6389,https://issues.webrtc.org/issues/42231443 +https://crbug.com/webrtc/639,https://issues.webrtc.org/issues/42231444 +https://crbug.com/webrtc/6390,https://issues.webrtc.org/issues/42231445 +https://crbug.com/webrtc/6391,https://issues.webrtc.org/issues/42231446 +https://crbug.com/webrtc/6392,https://issues.webrtc.org/issues/42231447 +https://crbug.com/webrtc/6393,https://issues.webrtc.org/issues/42231448 +https://crbug.com/webrtc/6394,https://issues.webrtc.org/issues/42231449 +https://crbug.com/webrtc/6395,https://issues.webrtc.org/issues/42231450 +https://crbug.com/webrtc/6396,https://issues.webrtc.org/issues/42231451 +https://crbug.com/webrtc/6397,https://issues.webrtc.org/issues/42231452 +https://crbug.com/webrtc/6398,https://issues.webrtc.org/issues/42231453 +https://crbug.com/webrtc/6399,https://issues.webrtc.org/issues/42231454 +https://crbug.com/webrtc/64,https://issues.webrtc.org/issues/42231455 +https://crbug.com/webrtc/640,https://issues.webrtc.org/issues/42231456 +https://crbug.com/webrtc/6400,https://issues.webrtc.org/issues/42231457 +https://crbug.com/webrtc/6401,https://issues.webrtc.org/issues/42231458 +https://crbug.com/webrtc/6403,https://issues.webrtc.org/issues/42231459 +https://crbug.com/webrtc/6404,https://issues.webrtc.org/issues/42231460 +https://crbug.com/webrtc/6405,https://issues.webrtc.org/issues/42231461 +https://crbug.com/webrtc/6406,https://issues.webrtc.org/issues/42231462 +https://crbug.com/webrtc/6407,https://issues.webrtc.org/issues/42231463 +https://crbug.com/webrtc/6408,https://issues.webrtc.org/issues/42231464 +https://crbug.com/webrtc/6409,https://issues.webrtc.org/issues/42231465 +https://crbug.com/webrtc/641,https://issues.webrtc.org/issues/42231466 +https://crbug.com/webrtc/6410,https://issues.webrtc.org/issues/42231467 +https://crbug.com/webrtc/6411,https://issues.webrtc.org/issues/42231468 +https://crbug.com/webrtc/6413,https://issues.webrtc.org/issues/42231469 +https://crbug.com/webrtc/6414,https://issues.webrtc.org/issues/42231470 +https://crbug.com/webrtc/6415,https://issues.webrtc.org/issues/42231471 +https://crbug.com/webrtc/6416,https://issues.webrtc.org/issues/42231472 +https://crbug.com/webrtc/6417,https://issues.webrtc.org/issues/42231473 +https://crbug.com/webrtc/6418,https://issues.webrtc.org/issues/42231474 +https://crbug.com/webrtc/6419,https://issues.webrtc.org/issues/42231475 +https://crbug.com/webrtc/642,https://issues.webrtc.org/issues/42231476 +https://crbug.com/webrtc/6420,https://issues.webrtc.org/issues/42231477 +https://crbug.com/webrtc/6421,https://issues.webrtc.org/issues/42231478 +https://crbug.com/webrtc/6422,https://issues.webrtc.org/issues/42231479 +https://crbug.com/webrtc/6423,https://issues.webrtc.org/issues/42231480 +https://crbug.com/webrtc/6424,https://issues.webrtc.org/issues/42231481 +https://crbug.com/webrtc/6425,https://issues.webrtc.org/issues/42231482 +https://crbug.com/webrtc/6426,https://issues.webrtc.org/issues/42231483 +https://crbug.com/webrtc/6427,https://issues.webrtc.org/issues/42231484 +https://crbug.com/webrtc/6428,https://issues.webrtc.org/issues/42231485 +https://crbug.com/webrtc/6429,https://issues.webrtc.org/issues/42231486 +https://crbug.com/webrtc/643,https://issues.webrtc.org/issues/42231487 +https://crbug.com/webrtc/6430,https://issues.webrtc.org/issues/42231488 +https://crbug.com/webrtc/6431,https://issues.webrtc.org/issues/42231489 +https://crbug.com/webrtc/6432,https://issues.webrtc.org/issues/42231490 +https://crbug.com/webrtc/6433,https://issues.webrtc.org/issues/42231491 +https://crbug.com/webrtc/6434,https://issues.webrtc.org/issues/42231492 +https://crbug.com/webrtc/6435,https://issues.webrtc.org/issues/42231493 +https://crbug.com/webrtc/6436,https://issues.webrtc.org/issues/42231494 +https://crbug.com/webrtc/6438,https://issues.webrtc.org/issues/42231495 +https://crbug.com/webrtc/6439,https://issues.webrtc.org/issues/42231496 +https://crbug.com/webrtc/644,https://issues.webrtc.org/issues/42231497 +https://crbug.com/webrtc/6440,https://issues.webrtc.org/issues/42231498 +https://crbug.com/webrtc/6442,https://issues.webrtc.org/issues/42231499 +https://crbug.com/webrtc/6443,https://issues.webrtc.org/issues/42231500 +https://crbug.com/webrtc/6444,https://issues.webrtc.org/issues/42231501 +https://crbug.com/webrtc/6445,https://issues.webrtc.org/issues/42231502 +https://crbug.com/webrtc/6446,https://issues.webrtc.org/issues/42231503 +https://crbug.com/webrtc/6447,https://issues.webrtc.org/issues/42231504 +https://crbug.com/webrtc/6448,https://issues.webrtc.org/issues/42231505 +https://crbug.com/webrtc/6449,https://issues.webrtc.org/issues/42231506 +https://crbug.com/webrtc/645,https://issues.webrtc.org/issues/42231507 +https://crbug.com/webrtc/6451,https://issues.webrtc.org/issues/42231508 +https://crbug.com/webrtc/6452,https://issues.webrtc.org/issues/42231509 +https://crbug.com/webrtc/6453,https://issues.webrtc.org/issues/42231510 +https://crbug.com/webrtc/6454,https://issues.webrtc.org/issues/42231511 +https://crbug.com/webrtc/6455,https://issues.webrtc.org/issues/42231512 +https://crbug.com/webrtc/6456,https://issues.webrtc.org/issues/42231513 +https://crbug.com/webrtc/6457,https://issues.webrtc.org/issues/42231514 +https://crbug.com/webrtc/6458,https://issues.webrtc.org/issues/42231515 +https://crbug.com/webrtc/6459,https://issues.webrtc.org/issues/42231516 +https://crbug.com/webrtc/646,https://issues.webrtc.org/issues/42231517 +https://crbug.com/webrtc/6460,https://issues.webrtc.org/issues/42231518 +https://crbug.com/webrtc/6461,https://issues.webrtc.org/issues/42231519 +https://crbug.com/webrtc/6462,https://issues.webrtc.org/issues/42231520 +https://crbug.com/webrtc/6463,https://issues.webrtc.org/issues/42231521 +https://crbug.com/webrtc/6464,https://issues.webrtc.org/issues/42231522 +https://crbug.com/webrtc/6465,https://issues.webrtc.org/issues/42231523 +https://crbug.com/webrtc/6466,https://issues.webrtc.org/issues/42231524 +https://crbug.com/webrtc/6467,https://issues.webrtc.org/issues/42231525 +https://crbug.com/webrtc/6468,https://issues.webrtc.org/issues/42231526 +https://crbug.com/webrtc/6469,https://issues.webrtc.org/issues/42231527 +https://crbug.com/webrtc/647,https://issues.webrtc.org/issues/42231528 +https://crbug.com/webrtc/6470,https://issues.webrtc.org/issues/42231529 +https://crbug.com/webrtc/6471,https://issues.webrtc.org/issues/42231530 +https://crbug.com/webrtc/6472,https://issues.webrtc.org/issues/42231531 +https://crbug.com/webrtc/6473,https://issues.webrtc.org/issues/42231532 +https://crbug.com/webrtc/6474,https://issues.webrtc.org/issues/42231533 +https://crbug.com/webrtc/6475,https://issues.webrtc.org/issues/42231534 +https://crbug.com/webrtc/6476,https://issues.webrtc.org/issues/42231535 +https://crbug.com/webrtc/6477,https://issues.webrtc.org/issues/42231536 +https://crbug.com/webrtc/6478,https://issues.webrtc.org/issues/42231537 +https://crbug.com/webrtc/6479,https://issues.webrtc.org/issues/42231538 +https://crbug.com/webrtc/648,https://issues.webrtc.org/issues/42231539 +https://crbug.com/webrtc/6480,https://issues.webrtc.org/issues/42231540 +https://crbug.com/webrtc/6481,https://issues.webrtc.org/issues/42231541 +https://crbug.com/webrtc/6482,https://issues.webrtc.org/issues/42231542 +https://crbug.com/webrtc/6483,https://issues.webrtc.org/issues/42231543 +https://crbug.com/webrtc/6485,https://issues.webrtc.org/issues/42231544 +https://crbug.com/webrtc/6486,https://issues.webrtc.org/issues/42231545 +https://crbug.com/webrtc/6487,https://issues.webrtc.org/issues/42231546 +https://crbug.com/webrtc/6488,https://issues.webrtc.org/issues/42231547 +https://crbug.com/webrtc/6489,https://issues.webrtc.org/issues/42231548 +https://crbug.com/webrtc/649,https://issues.webrtc.org/issues/42231549 +https://crbug.com/webrtc/6490,https://issues.webrtc.org/issues/42231550 +https://crbug.com/webrtc/6491,https://issues.webrtc.org/issues/42231551 +https://crbug.com/webrtc/6492,https://issues.webrtc.org/issues/42231552 +https://crbug.com/webrtc/6494,https://issues.webrtc.org/issues/42231553 +https://crbug.com/webrtc/6495,https://issues.webrtc.org/issues/42231554 +https://crbug.com/webrtc/6496,https://issues.webrtc.org/issues/42231555 +https://crbug.com/webrtc/6497,https://issues.webrtc.org/issues/42231556 +https://crbug.com/webrtc/6498,https://issues.webrtc.org/issues/42231557 +https://crbug.com/webrtc/6499,https://issues.webrtc.org/issues/42231558 +https://crbug.com/webrtc/65,https://issues.webrtc.org/issues/42231559 +https://crbug.com/webrtc/650,https://issues.webrtc.org/issues/42231560 +https://crbug.com/webrtc/6501,https://issues.webrtc.org/issues/42231561 +https://crbug.com/webrtc/6502,https://issues.webrtc.org/issues/42231562 +https://crbug.com/webrtc/6503,https://issues.webrtc.org/issues/42231563 +https://crbug.com/webrtc/6504,https://issues.webrtc.org/issues/42231564 +https://crbug.com/webrtc/6505,https://issues.webrtc.org/issues/42231565 +https://crbug.com/webrtc/6506,https://issues.webrtc.org/issues/42231566 +https://crbug.com/webrtc/6507,https://issues.webrtc.org/issues/42231567 +https://crbug.com/webrtc/6508,https://issues.webrtc.org/issues/42231568 +https://crbug.com/webrtc/6509,https://issues.webrtc.org/issues/42231569 +https://crbug.com/webrtc/651,https://issues.webrtc.org/issues/42231570 +https://crbug.com/webrtc/6510,https://issues.webrtc.org/issues/42231571 +https://crbug.com/webrtc/6511,https://issues.webrtc.org/issues/42231572 +https://crbug.com/webrtc/6512,https://issues.webrtc.org/issues/42231573 +https://crbug.com/webrtc/6513,https://issues.webrtc.org/issues/42231574 +https://crbug.com/webrtc/6514,https://issues.webrtc.org/issues/42231575 +https://crbug.com/webrtc/6515,https://issues.webrtc.org/issues/42231576 +https://crbug.com/webrtc/6516,https://issues.webrtc.org/issues/42231577 +https://crbug.com/webrtc/6517,https://issues.webrtc.org/issues/42231578 +https://crbug.com/webrtc/6518,https://issues.webrtc.org/issues/42231579 +https://crbug.com/webrtc/6519,https://issues.webrtc.org/issues/42231580 +https://crbug.com/webrtc/652,https://issues.webrtc.org/issues/42231581 +https://crbug.com/webrtc/6520,https://issues.webrtc.org/issues/42231582 +https://crbug.com/webrtc/6521,https://issues.webrtc.org/issues/42231583 +https://crbug.com/webrtc/6522,https://issues.webrtc.org/issues/42231584 +https://crbug.com/webrtc/6523,https://issues.webrtc.org/issues/42231585 +https://crbug.com/webrtc/6524,https://issues.webrtc.org/issues/42231586 +https://crbug.com/webrtc/6525,https://issues.webrtc.org/issues/42231587 +https://crbug.com/webrtc/6526,https://issues.webrtc.org/issues/42231588 +https://crbug.com/webrtc/6527,https://issues.webrtc.org/issues/42231589 +https://crbug.com/webrtc/6528,https://issues.webrtc.org/issues/42231590 +https://crbug.com/webrtc/6529,https://issues.webrtc.org/issues/42231591 +https://crbug.com/webrtc/653,https://issues.webrtc.org/issues/42231592 +https://crbug.com/webrtc/6530,https://issues.webrtc.org/issues/42231593 +https://crbug.com/webrtc/6531,https://issues.webrtc.org/issues/42231594 +https://crbug.com/webrtc/6532,https://issues.webrtc.org/issues/42231595 +https://crbug.com/webrtc/6533,https://issues.webrtc.org/issues/42231596 +https://crbug.com/webrtc/6534,https://issues.webrtc.org/issues/42231597 +https://crbug.com/webrtc/6535,https://issues.webrtc.org/issues/42231598 +https://crbug.com/webrtc/6536,https://issues.webrtc.org/issues/42231599 +https://crbug.com/webrtc/6537,https://issues.webrtc.org/issues/42231600 +https://crbug.com/webrtc/6538,https://issues.webrtc.org/issues/42231601 +https://crbug.com/webrtc/6539,https://issues.webrtc.org/issues/42231602 +https://crbug.com/webrtc/654,https://issues.webrtc.org/issues/42231603 +https://crbug.com/webrtc/6540,https://issues.webrtc.org/issues/42231604 +https://crbug.com/webrtc/6542,https://issues.webrtc.org/issues/42231605 +https://crbug.com/webrtc/6543,https://issues.webrtc.org/issues/42231606 +https://crbug.com/webrtc/6544,https://issues.webrtc.org/issues/42231607 +https://crbug.com/webrtc/6545,https://issues.webrtc.org/issues/42231608 +https://crbug.com/webrtc/6546,https://issues.webrtc.org/issues/42231609 +https://crbug.com/webrtc/6547,https://issues.webrtc.org/issues/42231610 +https://crbug.com/webrtc/6548,https://issues.webrtc.org/issues/42231611 +https://crbug.com/webrtc/6549,https://issues.webrtc.org/issues/42231612 +https://crbug.com/webrtc/655,https://issues.webrtc.org/issues/42231613 +https://crbug.com/webrtc/6550,https://issues.webrtc.org/issues/42231614 +https://crbug.com/webrtc/6551,https://issues.webrtc.org/issues/42231615 +https://crbug.com/webrtc/6552,https://issues.webrtc.org/issues/42231616 +https://crbug.com/webrtc/6553,https://issues.webrtc.org/issues/42231617 +https://crbug.com/webrtc/6554,https://issues.webrtc.org/issues/42231618 +https://crbug.com/webrtc/6555,https://issues.webrtc.org/issues/42231619 +https://crbug.com/webrtc/6556,https://issues.webrtc.org/issues/42231620 +https://crbug.com/webrtc/6557,https://issues.webrtc.org/issues/42231621 +https://crbug.com/webrtc/6558,https://issues.webrtc.org/issues/42231622 +https://crbug.com/webrtc/6559,https://issues.webrtc.org/issues/42231623 +https://crbug.com/webrtc/656,https://issues.webrtc.org/issues/42231624 +https://crbug.com/webrtc/6560,https://issues.webrtc.org/issues/42231625 +https://crbug.com/webrtc/6561,https://issues.webrtc.org/issues/42231626 +https://crbug.com/webrtc/6562,https://issues.webrtc.org/issues/42231627 +https://crbug.com/webrtc/6563,https://issues.webrtc.org/issues/42231628 +https://crbug.com/webrtc/6564,https://issues.webrtc.org/issues/42231629 +https://crbug.com/webrtc/6565,https://issues.webrtc.org/issues/42231630 +https://crbug.com/webrtc/6566,https://issues.webrtc.org/issues/42231631 +https://crbug.com/webrtc/6567,https://issues.webrtc.org/issues/42231632 +https://crbug.com/webrtc/6568,https://issues.webrtc.org/issues/42231633 +https://crbug.com/webrtc/6569,https://issues.webrtc.org/issues/42231634 +https://crbug.com/webrtc/657,https://issues.webrtc.org/issues/42231635 +https://crbug.com/webrtc/6570,https://issues.webrtc.org/issues/42231636 +https://crbug.com/webrtc/6571,https://issues.webrtc.org/issues/42231637 +https://crbug.com/webrtc/6572,https://issues.webrtc.org/issues/42231638 +https://crbug.com/webrtc/6573,https://issues.webrtc.org/issues/42231639 +https://crbug.com/webrtc/6574,https://issues.webrtc.org/issues/42231640 +https://crbug.com/webrtc/6575,https://issues.webrtc.org/issues/42231641 +https://crbug.com/webrtc/6576,https://issues.webrtc.org/issues/42231642 +https://crbug.com/webrtc/6577,https://issues.webrtc.org/issues/42231643 +https://crbug.com/webrtc/6578,https://issues.webrtc.org/issues/42231644 +https://crbug.com/webrtc/6579,https://issues.webrtc.org/issues/42231645 +https://crbug.com/webrtc/658,https://issues.webrtc.org/issues/42231646 +https://crbug.com/webrtc/6580,https://issues.webrtc.org/issues/42231647 +https://crbug.com/webrtc/6581,https://issues.webrtc.org/issues/42231648 +https://crbug.com/webrtc/6582,https://issues.webrtc.org/issues/42231649 +https://crbug.com/webrtc/6584,https://issues.webrtc.org/issues/42231650 +https://crbug.com/webrtc/6585,https://issues.webrtc.org/issues/42231651 +https://crbug.com/webrtc/6586,https://issues.webrtc.org/issues/42231652 +https://crbug.com/webrtc/6587,https://issues.webrtc.org/issues/42231653 +https://crbug.com/webrtc/6588,https://issues.webrtc.org/issues/42231654 +https://crbug.com/webrtc/6589,https://issues.webrtc.org/issues/42231655 +https://crbug.com/webrtc/659,https://issues.webrtc.org/issues/42231656 +https://crbug.com/webrtc/6590,https://issues.webrtc.org/issues/42231657 +https://crbug.com/webrtc/6591,https://issues.webrtc.org/issues/42231658 +https://crbug.com/webrtc/6592,https://issues.webrtc.org/issues/42231659 +https://crbug.com/webrtc/6593,https://issues.webrtc.org/issues/42231660 +https://crbug.com/webrtc/6594,https://issues.webrtc.org/issues/42231661 +https://crbug.com/webrtc/6595,https://issues.webrtc.org/issues/42231662 +https://crbug.com/webrtc/6596,https://issues.webrtc.org/issues/42231663 +https://crbug.com/webrtc/6597,https://issues.webrtc.org/issues/42231664 +https://crbug.com/webrtc/6598,https://issues.webrtc.org/issues/42231665 +https://crbug.com/webrtc/6599,https://issues.webrtc.org/issues/42231666 +https://crbug.com/webrtc/66,https://issues.webrtc.org/issues/42231667 +https://crbug.com/webrtc/660,https://issues.webrtc.org/issues/42231668 +https://crbug.com/webrtc/6600,https://issues.webrtc.org/issues/42231669 +https://crbug.com/webrtc/6601,https://issues.webrtc.org/issues/42231670 +https://crbug.com/webrtc/6602,https://issues.webrtc.org/issues/42231671 +https://crbug.com/webrtc/6603,https://issues.webrtc.org/issues/42231672 +https://crbug.com/webrtc/6604,https://issues.webrtc.org/issues/42231673 +https://crbug.com/webrtc/6605,https://issues.webrtc.org/issues/42231674 +https://crbug.com/webrtc/6606,https://issues.webrtc.org/issues/42231675 +https://crbug.com/webrtc/6607,https://issues.webrtc.org/issues/42231676 +https://crbug.com/webrtc/6608,https://issues.webrtc.org/issues/42231677 +https://crbug.com/webrtc/6609,https://issues.webrtc.org/issues/42231678 +https://crbug.com/webrtc/661,https://issues.webrtc.org/issues/42231679 +https://crbug.com/webrtc/6610,https://issues.webrtc.org/issues/42231680 +https://crbug.com/webrtc/6611,https://issues.webrtc.org/issues/42231681 +https://crbug.com/webrtc/6612,https://issues.webrtc.org/issues/42231682 +https://crbug.com/webrtc/6613,https://issues.webrtc.org/issues/42231683 +https://crbug.com/webrtc/6614,https://issues.webrtc.org/issues/42231684 +https://crbug.com/webrtc/6615,https://issues.webrtc.org/issues/42231685 +https://crbug.com/webrtc/6616,https://issues.webrtc.org/issues/42231686 +https://crbug.com/webrtc/6617,https://issues.webrtc.org/issues/42231687 +https://crbug.com/webrtc/6618,https://issues.webrtc.org/issues/42231688 +https://crbug.com/webrtc/6619,https://issues.webrtc.org/issues/42231689 +https://crbug.com/webrtc/662,https://issues.webrtc.org/issues/42231690 +https://crbug.com/webrtc/6620,https://issues.webrtc.org/issues/42231691 +https://crbug.com/webrtc/6621,https://issues.webrtc.org/issues/42231692 +https://crbug.com/webrtc/6622,https://issues.webrtc.org/issues/42231693 +https://crbug.com/webrtc/6623,https://issues.webrtc.org/issues/42231694 +https://crbug.com/webrtc/6624,https://issues.webrtc.org/issues/42231695 +https://crbug.com/webrtc/6626,https://issues.webrtc.org/issues/42231696 +https://crbug.com/webrtc/6628,https://issues.webrtc.org/issues/42231697 +https://crbug.com/webrtc/6629,https://issues.webrtc.org/issues/42231698 +https://crbug.com/webrtc/663,https://issues.webrtc.org/issues/42231699 +https://crbug.com/webrtc/6630,https://issues.webrtc.org/issues/42231700 +https://crbug.com/webrtc/6631,https://issues.webrtc.org/issues/42231701 +https://crbug.com/webrtc/6632,https://issues.webrtc.org/issues/42231702 +https://crbug.com/webrtc/6633,https://issues.webrtc.org/issues/42231703 +https://crbug.com/webrtc/6634,https://issues.webrtc.org/issues/42231704 +https://crbug.com/webrtc/6635,https://issues.webrtc.org/issues/42231705 +https://crbug.com/webrtc/6637,https://issues.webrtc.org/issues/42231706 +https://crbug.com/webrtc/6638,https://issues.webrtc.org/issues/42231707 +https://crbug.com/webrtc/6639,https://issues.webrtc.org/issues/42231708 +https://crbug.com/webrtc/664,https://issues.webrtc.org/issues/42231709 +https://crbug.com/webrtc/6640,https://issues.webrtc.org/issues/42231710 +https://crbug.com/webrtc/6642,https://issues.webrtc.org/issues/42231711 +https://crbug.com/webrtc/6643,https://issues.webrtc.org/issues/42231712 +https://crbug.com/webrtc/6644,https://issues.webrtc.org/issues/42231713 +https://crbug.com/webrtc/6645,https://issues.webrtc.org/issues/42231714 +https://crbug.com/webrtc/6647,https://issues.webrtc.org/issues/42231715 +https://crbug.com/webrtc/6648,https://issues.webrtc.org/issues/42231716 +https://crbug.com/webrtc/6649,https://issues.webrtc.org/issues/42231717 +https://crbug.com/webrtc/665,https://issues.webrtc.org/issues/42231718 +https://crbug.com/webrtc/6650,https://issues.webrtc.org/issues/42231719 +https://crbug.com/webrtc/6651,https://issues.webrtc.org/issues/42231720 +https://crbug.com/webrtc/6652,https://issues.webrtc.org/issues/42231721 +https://crbug.com/webrtc/6653,https://issues.webrtc.org/issues/42231722 +https://crbug.com/webrtc/6654,https://issues.webrtc.org/issues/42231723 +https://crbug.com/webrtc/6655,https://issues.webrtc.org/issues/42231724 +https://crbug.com/webrtc/6656,https://issues.webrtc.org/issues/42231725 +https://crbug.com/webrtc/6658,https://issues.webrtc.org/issues/42231726 +https://crbug.com/webrtc/6659,https://issues.webrtc.org/issues/42231727 +https://crbug.com/webrtc/666,https://issues.webrtc.org/issues/42231728 +https://crbug.com/webrtc/6660,https://issues.webrtc.org/issues/42231729 +https://crbug.com/webrtc/6661,https://issues.webrtc.org/issues/42231730 +https://crbug.com/webrtc/6662,https://issues.webrtc.org/issues/42231731 +https://crbug.com/webrtc/6663,https://issues.webrtc.org/issues/42231732 +https://crbug.com/webrtc/6664,https://issues.webrtc.org/issues/42231733 +https://crbug.com/webrtc/6665,https://issues.webrtc.org/issues/42231734 +https://crbug.com/webrtc/6666,https://issues.webrtc.org/issues/42231735 +https://crbug.com/webrtc/6667,https://issues.webrtc.org/issues/42231736 +https://crbug.com/webrtc/6668,https://issues.webrtc.org/issues/42231737 +https://crbug.com/webrtc/6669,https://issues.webrtc.org/issues/42231738 +https://crbug.com/webrtc/667,https://issues.webrtc.org/issues/42231739 +https://crbug.com/webrtc/6670,https://issues.webrtc.org/issues/42231740 +https://crbug.com/webrtc/6671,https://issues.webrtc.org/issues/42231741 +https://crbug.com/webrtc/6672,https://issues.webrtc.org/issues/42231742 +https://crbug.com/webrtc/6673,https://issues.webrtc.org/issues/42231743 +https://crbug.com/webrtc/6674,https://issues.webrtc.org/issues/42231744 +https://crbug.com/webrtc/6675,https://issues.webrtc.org/issues/42231745 +https://crbug.com/webrtc/6676,https://issues.webrtc.org/issues/42231746 +https://crbug.com/webrtc/6677,https://issues.webrtc.org/issues/42231747 +https://crbug.com/webrtc/6678,https://issues.webrtc.org/issues/42231748 +https://crbug.com/webrtc/6679,https://issues.webrtc.org/issues/42231749 +https://crbug.com/webrtc/668,https://issues.webrtc.org/issues/42231750 +https://crbug.com/webrtc/6680,https://issues.webrtc.org/issues/42231751 +https://crbug.com/webrtc/6681,https://issues.webrtc.org/issues/42231752 +https://crbug.com/webrtc/6682,https://issues.webrtc.org/issues/42231753 +https://crbug.com/webrtc/6683,https://issues.webrtc.org/issues/42231754 +https://crbug.com/webrtc/6684,https://issues.webrtc.org/issues/42231755 +https://crbug.com/webrtc/6685,https://issues.webrtc.org/issues/42231756 +https://crbug.com/webrtc/6686,https://issues.webrtc.org/issues/42231757 +https://crbug.com/webrtc/6687,https://issues.webrtc.org/issues/42231758 +https://crbug.com/webrtc/6688,https://issues.webrtc.org/issues/42231759 +https://crbug.com/webrtc/6689,https://issues.webrtc.org/issues/42231760 +https://crbug.com/webrtc/669,https://issues.webrtc.org/issues/42231761 +https://crbug.com/webrtc/6690,https://issues.webrtc.org/issues/42231762 +https://crbug.com/webrtc/6691,https://issues.webrtc.org/issues/42231763 +https://crbug.com/webrtc/6692,https://issues.webrtc.org/issues/42231764 +https://crbug.com/webrtc/6693,https://issues.webrtc.org/issues/42231765 +https://crbug.com/webrtc/6694,https://issues.webrtc.org/issues/42231766 +https://crbug.com/webrtc/6695,https://issues.webrtc.org/issues/42231767 +https://crbug.com/webrtc/6696,https://issues.webrtc.org/issues/42231768 +https://crbug.com/webrtc/6697,https://issues.webrtc.org/issues/42231769 +https://crbug.com/webrtc/6698,https://issues.webrtc.org/issues/42231770 +https://crbug.com/webrtc/6699,https://issues.webrtc.org/issues/42231771 +https://crbug.com/webrtc/67,https://issues.webrtc.org/issues/42231772 +https://crbug.com/webrtc/670,https://issues.webrtc.org/issues/42231773 +https://crbug.com/webrtc/6700,https://issues.webrtc.org/issues/42231774 +https://crbug.com/webrtc/6701,https://issues.webrtc.org/issues/42231775 +https://crbug.com/webrtc/6702,https://issues.webrtc.org/issues/42231776 +https://crbug.com/webrtc/6703,https://issues.webrtc.org/issues/42231777 +https://crbug.com/webrtc/6704,https://issues.webrtc.org/issues/42231778 +https://crbug.com/webrtc/6705,https://issues.webrtc.org/issues/42231779 +https://crbug.com/webrtc/6706,https://issues.webrtc.org/issues/42231780 +https://crbug.com/webrtc/6707,https://issues.webrtc.org/issues/42231781 +https://crbug.com/webrtc/6708,https://issues.webrtc.org/issues/42231782 +https://crbug.com/webrtc/6709,https://issues.webrtc.org/issues/42231783 +https://crbug.com/webrtc/671,https://issues.webrtc.org/issues/42231784 +https://crbug.com/webrtc/6710,https://issues.webrtc.org/issues/42231785 +https://crbug.com/webrtc/6711,https://issues.webrtc.org/issues/42231786 +https://crbug.com/webrtc/6712,https://issues.webrtc.org/issues/42231787 +https://crbug.com/webrtc/6713,https://issues.webrtc.org/issues/42231788 +https://crbug.com/webrtc/6714,https://issues.webrtc.org/issues/42231789 +https://crbug.com/webrtc/6715,https://issues.webrtc.org/issues/42231790 +https://crbug.com/webrtc/6716,https://issues.webrtc.org/issues/42231791 +https://crbug.com/webrtc/6717,https://issues.webrtc.org/issues/42231792 +https://crbug.com/webrtc/6718,https://issues.webrtc.org/issues/42231793 +https://crbug.com/webrtc/6719,https://issues.webrtc.org/issues/42231794 +https://crbug.com/webrtc/672,https://issues.webrtc.org/issues/42231795 +https://crbug.com/webrtc/6720,https://issues.webrtc.org/issues/42231796 +https://crbug.com/webrtc/6721,https://issues.webrtc.org/issues/42231797 +https://crbug.com/webrtc/6722,https://issues.webrtc.org/issues/42231798 +https://crbug.com/webrtc/6723,https://issues.webrtc.org/issues/42231799 +https://crbug.com/webrtc/6724,https://issues.webrtc.org/issues/42231800 +https://crbug.com/webrtc/6725,https://issues.webrtc.org/issues/42231801 +https://crbug.com/webrtc/6726,https://issues.webrtc.org/issues/42231802 +https://crbug.com/webrtc/6727,https://issues.webrtc.org/issues/42231803 +https://crbug.com/webrtc/6728,https://issues.webrtc.org/issues/42231804 +https://crbug.com/webrtc/6729,https://issues.webrtc.org/issues/42231805 +https://crbug.com/webrtc/673,https://issues.webrtc.org/issues/42231806 +https://crbug.com/webrtc/6730,https://issues.webrtc.org/issues/42231807 +https://crbug.com/webrtc/6731,https://issues.webrtc.org/issues/42231808 +https://crbug.com/webrtc/6732,https://issues.webrtc.org/issues/42231809 +https://crbug.com/webrtc/6733,https://issues.webrtc.org/issues/42231810 +https://crbug.com/webrtc/6734,https://issues.webrtc.org/issues/42231811 +https://crbug.com/webrtc/6735,https://issues.webrtc.org/issues/42231812 +https://crbug.com/webrtc/6736,https://issues.webrtc.org/issues/42231813 +https://crbug.com/webrtc/6737,https://issues.webrtc.org/issues/42231814 +https://crbug.com/webrtc/6738,https://issues.webrtc.org/issues/42231815 +https://crbug.com/webrtc/6739,https://issues.webrtc.org/issues/42231816 +https://crbug.com/webrtc/674,https://issues.webrtc.org/issues/42231817 +https://crbug.com/webrtc/6740,https://issues.webrtc.org/issues/42231818 +https://crbug.com/webrtc/6741,https://issues.webrtc.org/issues/42231819 +https://crbug.com/webrtc/6742,https://issues.webrtc.org/issues/42231820 +https://crbug.com/webrtc/6743,https://issues.webrtc.org/issues/42231821 +https://crbug.com/webrtc/6744,https://issues.webrtc.org/issues/42231822 +https://crbug.com/webrtc/6745,https://issues.webrtc.org/issues/42231823 +https://crbug.com/webrtc/6746,https://issues.webrtc.org/issues/42231824 +https://crbug.com/webrtc/6747,https://issues.webrtc.org/issues/42231825 +https://crbug.com/webrtc/6748,https://issues.webrtc.org/issues/42231826 +https://crbug.com/webrtc/6749,https://issues.webrtc.org/issues/42231827 +https://crbug.com/webrtc/675,https://issues.webrtc.org/issues/42231828 +https://crbug.com/webrtc/6750,https://issues.webrtc.org/issues/42231829 +https://crbug.com/webrtc/6751,https://issues.webrtc.org/issues/42231830 +https://crbug.com/webrtc/6752,https://issues.webrtc.org/issues/42231831 +https://crbug.com/webrtc/6753,https://issues.webrtc.org/issues/42231832 +https://crbug.com/webrtc/6759,https://issues.webrtc.org/issues/42231833 +https://crbug.com/webrtc/676,https://issues.webrtc.org/issues/42231834 +https://crbug.com/webrtc/6760,https://issues.webrtc.org/issues/42231835 +https://crbug.com/webrtc/6761,https://issues.webrtc.org/issues/42231836 +https://crbug.com/webrtc/6762,https://issues.webrtc.org/issues/42231837 +https://crbug.com/webrtc/6763,https://issues.webrtc.org/issues/42231838 +https://crbug.com/webrtc/6764,https://issues.webrtc.org/issues/42231839 +https://crbug.com/webrtc/6765,https://issues.webrtc.org/issues/42231840 +https://crbug.com/webrtc/6766,https://issues.webrtc.org/issues/42231841 +https://crbug.com/webrtc/6767,https://issues.webrtc.org/issues/42231842 +https://crbug.com/webrtc/6769,https://issues.webrtc.org/issues/42231843 +https://crbug.com/webrtc/677,https://issues.webrtc.org/issues/42231844 +https://crbug.com/webrtc/6770,https://issues.webrtc.org/issues/42231845 +https://crbug.com/webrtc/6771,https://issues.webrtc.org/issues/42231846 +https://crbug.com/webrtc/6772,https://issues.webrtc.org/issues/42231847 +https://crbug.com/webrtc/6773,https://issues.webrtc.org/issues/42231848 +https://crbug.com/webrtc/6774,https://issues.webrtc.org/issues/42231849 +https://crbug.com/webrtc/6775,https://issues.webrtc.org/issues/42231850 +https://crbug.com/webrtc/6776,https://issues.webrtc.org/issues/42231851 +https://crbug.com/webrtc/6777,https://issues.webrtc.org/issues/42231852 +https://crbug.com/webrtc/6778,https://issues.webrtc.org/issues/42231853 +https://crbug.com/webrtc/6779,https://issues.webrtc.org/issues/42231854 +https://crbug.com/webrtc/678,https://issues.webrtc.org/issues/42231855 +https://crbug.com/webrtc/6780,https://issues.webrtc.org/issues/42231856 +https://crbug.com/webrtc/6781,https://issues.webrtc.org/issues/42231857 +https://crbug.com/webrtc/6782,https://issues.webrtc.org/issues/42231858 +https://crbug.com/webrtc/6783,https://issues.webrtc.org/issues/42231859 +https://crbug.com/webrtc/6784,https://issues.webrtc.org/issues/42231860 +https://crbug.com/webrtc/6785,https://issues.webrtc.org/issues/42231861 +https://crbug.com/webrtc/6786,https://issues.webrtc.org/issues/42231862 +https://crbug.com/webrtc/6787,https://issues.webrtc.org/issues/42231863 +https://crbug.com/webrtc/6788,https://issues.webrtc.org/issues/42231864 +https://crbug.com/webrtc/6789,https://issues.webrtc.org/issues/42231865 +https://crbug.com/webrtc/679,https://issues.webrtc.org/issues/42231866 +https://crbug.com/webrtc/6790,https://issues.webrtc.org/issues/42231867 +https://crbug.com/webrtc/6791,https://issues.webrtc.org/issues/42231868 +https://crbug.com/webrtc/6792,https://issues.webrtc.org/issues/42231869 +https://crbug.com/webrtc/6793,https://issues.webrtc.org/issues/42231870 +https://crbug.com/webrtc/6794,https://issues.webrtc.org/issues/42231871 +https://crbug.com/webrtc/6795,https://issues.webrtc.org/issues/42231872 +https://crbug.com/webrtc/6796,https://issues.webrtc.org/issues/42231873 +https://crbug.com/webrtc/6797,https://issues.webrtc.org/issues/42231874 +https://crbug.com/webrtc/6798,https://issues.webrtc.org/issues/42231875 +https://crbug.com/webrtc/6799,https://issues.webrtc.org/issues/42231876 +https://crbug.com/webrtc/68,https://issues.webrtc.org/issues/42231877 +https://crbug.com/webrtc/680,https://issues.webrtc.org/issues/42231878 +https://crbug.com/webrtc/6800,https://issues.webrtc.org/issues/42231879 +https://crbug.com/webrtc/6801,https://issues.webrtc.org/issues/42231880 +https://crbug.com/webrtc/6802,https://issues.webrtc.org/issues/42231881 +https://crbug.com/webrtc/6803,https://issues.webrtc.org/issues/42231882 +https://crbug.com/webrtc/6805,https://issues.webrtc.org/issues/42231883 +https://crbug.com/webrtc/6806,https://issues.webrtc.org/issues/42231884 +https://crbug.com/webrtc/6807,https://issues.webrtc.org/issues/42231885 +https://crbug.com/webrtc/6808,https://issues.webrtc.org/issues/42231886 +https://crbug.com/webrtc/6809,https://issues.webrtc.org/issues/42231887 +https://crbug.com/webrtc/681,https://issues.webrtc.org/issues/42231888 +https://crbug.com/webrtc/6810,https://issues.webrtc.org/issues/42231889 +https://crbug.com/webrtc/6811,https://issues.webrtc.org/issues/42231890 +https://crbug.com/webrtc/6812,https://issues.webrtc.org/issues/42231891 +https://crbug.com/webrtc/6813,https://issues.webrtc.org/issues/42231892 +https://crbug.com/webrtc/6814,https://issues.webrtc.org/issues/42231893 +https://crbug.com/webrtc/6815,https://issues.webrtc.org/issues/42231894 +https://crbug.com/webrtc/6816,https://issues.webrtc.org/issues/42231895 +https://crbug.com/webrtc/6817,https://issues.webrtc.org/issues/42231896 +https://crbug.com/webrtc/6818,https://issues.webrtc.org/issues/42231897 +https://crbug.com/webrtc/6819,https://issues.webrtc.org/issues/42231898 +https://crbug.com/webrtc/682,https://issues.webrtc.org/issues/42231899 +https://crbug.com/webrtc/6821,https://issues.webrtc.org/issues/42231900 +https://crbug.com/webrtc/6822,https://issues.webrtc.org/issues/42231901 +https://crbug.com/webrtc/6823,https://issues.webrtc.org/issues/42231902 +https://crbug.com/webrtc/6824,https://issues.webrtc.org/issues/42231903 +https://crbug.com/webrtc/6825,https://issues.webrtc.org/issues/42231904 +https://crbug.com/webrtc/6827,https://issues.webrtc.org/issues/42231905 +https://crbug.com/webrtc/6828,https://issues.webrtc.org/issues/42231906 +https://crbug.com/webrtc/6829,https://issues.webrtc.org/issues/42231907 +https://crbug.com/webrtc/683,https://issues.webrtc.org/issues/42231908 +https://crbug.com/webrtc/6830,https://issues.webrtc.org/issues/42231909 +https://crbug.com/webrtc/6831,https://issues.webrtc.org/issues/42231910 +https://crbug.com/webrtc/6832,https://issues.webrtc.org/issues/42231911 +https://crbug.com/webrtc/6833,https://issues.webrtc.org/issues/42231912 +https://crbug.com/webrtc/6834,https://issues.webrtc.org/issues/42231913 +https://crbug.com/webrtc/6835,https://issues.webrtc.org/issues/42231914 +https://crbug.com/webrtc/6836,https://issues.webrtc.org/issues/42231915 +https://crbug.com/webrtc/6837,https://issues.webrtc.org/issues/42231916 +https://crbug.com/webrtc/6838,https://issues.webrtc.org/issues/42231917 +https://crbug.com/webrtc/6839,https://issues.webrtc.org/issues/42231918 +https://crbug.com/webrtc/684,https://issues.webrtc.org/issues/42231919 +https://crbug.com/webrtc/6840,https://issues.webrtc.org/issues/42231920 +https://crbug.com/webrtc/6841,https://issues.webrtc.org/issues/42231921 +https://crbug.com/webrtc/6842,https://issues.webrtc.org/issues/42231922 +https://crbug.com/webrtc/6843,https://issues.webrtc.org/issues/42231923 +https://crbug.com/webrtc/6844,https://issues.webrtc.org/issues/42231924 +https://crbug.com/webrtc/6845,https://issues.webrtc.org/issues/42231925 +https://crbug.com/webrtc/6846,https://issues.webrtc.org/issues/42231926 +https://crbug.com/webrtc/6847,https://issues.webrtc.org/issues/42231927 +https://crbug.com/webrtc/6848,https://issues.webrtc.org/issues/42231928 +https://crbug.com/webrtc/6849,https://issues.webrtc.org/issues/42231929 +https://crbug.com/webrtc/685,https://issues.webrtc.org/issues/42231930 +https://crbug.com/webrtc/6850,https://issues.webrtc.org/issues/42231931 +https://crbug.com/webrtc/6851,https://issues.webrtc.org/issues/42231932 +https://crbug.com/webrtc/6852,https://issues.webrtc.org/issues/42231933 +https://crbug.com/webrtc/6853,https://issues.webrtc.org/issues/42231934 +https://crbug.com/webrtc/6854,https://issues.webrtc.org/issues/42231935 +https://crbug.com/webrtc/6855,https://issues.webrtc.org/issues/42231936 +https://crbug.com/webrtc/6856,https://issues.webrtc.org/issues/42231937 +https://crbug.com/webrtc/6857,https://issues.webrtc.org/issues/42231938 +https://crbug.com/webrtc/6858,https://issues.webrtc.org/issues/42231939 +https://crbug.com/webrtc/6859,https://issues.webrtc.org/issues/42231940 +https://crbug.com/webrtc/686,https://issues.webrtc.org/issues/42231941 +https://crbug.com/webrtc/6860,https://issues.webrtc.org/issues/42231942 +https://crbug.com/webrtc/6861,https://issues.webrtc.org/issues/42231943 +https://crbug.com/webrtc/6862,https://issues.webrtc.org/issues/42231944 +https://crbug.com/webrtc/6863,https://issues.webrtc.org/issues/42231945 +https://crbug.com/webrtc/6864,https://issues.webrtc.org/issues/42231946 +https://crbug.com/webrtc/6865,https://issues.webrtc.org/issues/42231947 +https://crbug.com/webrtc/6866,https://issues.webrtc.org/issues/42231948 +https://crbug.com/webrtc/6867,https://issues.webrtc.org/issues/42231949 +https://crbug.com/webrtc/6868,https://issues.webrtc.org/issues/42231950 +https://crbug.com/webrtc/6869,https://issues.webrtc.org/issues/42231951 +https://crbug.com/webrtc/687,https://issues.webrtc.org/issues/42231952 +https://crbug.com/webrtc/6870,https://issues.webrtc.org/issues/42231953 +https://crbug.com/webrtc/6873,https://issues.webrtc.org/issues/42231954 +https://crbug.com/webrtc/6874,https://issues.webrtc.org/issues/42231955 +https://crbug.com/webrtc/6876,https://issues.webrtc.org/issues/42231956 +https://crbug.com/webrtc/6877,https://issues.webrtc.org/issues/42231957 +https://crbug.com/webrtc/6878,https://issues.webrtc.org/issues/42231958 +https://crbug.com/webrtc/6879,https://issues.webrtc.org/issues/42231959 +https://crbug.com/webrtc/688,https://issues.webrtc.org/issues/42231960 +https://crbug.com/webrtc/6880,https://issues.webrtc.org/issues/42231961 +https://crbug.com/webrtc/6881,https://issues.webrtc.org/issues/42231962 +https://crbug.com/webrtc/6882,https://issues.webrtc.org/issues/42231963 +https://crbug.com/webrtc/6883,https://issues.webrtc.org/issues/42231964 +https://crbug.com/webrtc/6884,https://issues.webrtc.org/issues/42231965 +https://crbug.com/webrtc/6885,https://issues.webrtc.org/issues/42231966 +https://crbug.com/webrtc/6886,https://issues.webrtc.org/issues/42231967 +https://crbug.com/webrtc/6887,https://issues.webrtc.org/issues/42231968 +https://crbug.com/webrtc/6888,https://issues.webrtc.org/issues/42231969 +https://crbug.com/webrtc/6889,https://issues.webrtc.org/issues/42231970 +https://crbug.com/webrtc/689,https://issues.webrtc.org/issues/42231971 +https://crbug.com/webrtc/6890,https://issues.webrtc.org/issues/42231972 +https://crbug.com/webrtc/6891,https://issues.webrtc.org/issues/42231973 +https://crbug.com/webrtc/6892,https://issues.webrtc.org/issues/42231974 +https://crbug.com/webrtc/6893,https://issues.webrtc.org/issues/42231975 +https://crbug.com/webrtc/6894,https://issues.webrtc.org/issues/42231976 +https://crbug.com/webrtc/6895,https://issues.webrtc.org/issues/42231977 +https://crbug.com/webrtc/6896,https://issues.webrtc.org/issues/42231978 +https://crbug.com/webrtc/6897,https://issues.webrtc.org/issues/42231979 +https://crbug.com/webrtc/6898,https://issues.webrtc.org/issues/42231980 +https://crbug.com/webrtc/6899,https://issues.webrtc.org/issues/42231981 +https://crbug.com/webrtc/69,https://issues.webrtc.org/issues/42231982 +https://crbug.com/webrtc/690,https://issues.webrtc.org/issues/42231983 +https://crbug.com/webrtc/6900,https://issues.webrtc.org/issues/42231984 +https://crbug.com/webrtc/6901,https://issues.webrtc.org/issues/42231985 +https://crbug.com/webrtc/6902,https://issues.webrtc.org/issues/42231986 +https://crbug.com/webrtc/6903,https://issues.webrtc.org/issues/42231987 +https://crbug.com/webrtc/6904,https://issues.webrtc.org/issues/42231988 +https://crbug.com/webrtc/6905,https://issues.webrtc.org/issues/42231989 +https://crbug.com/webrtc/6906,https://issues.webrtc.org/issues/42231990 +https://crbug.com/webrtc/6907,https://issues.webrtc.org/issues/42231991 +https://crbug.com/webrtc/6908,https://issues.webrtc.org/issues/42231992 +https://crbug.com/webrtc/6909,https://issues.webrtc.org/issues/42231993 +https://crbug.com/webrtc/691,https://issues.webrtc.org/issues/42231994 +https://crbug.com/webrtc/6910,https://issues.webrtc.org/issues/42231995 +https://crbug.com/webrtc/6911,https://issues.webrtc.org/issues/42231996 +https://crbug.com/webrtc/6912,https://issues.webrtc.org/issues/42231997 +https://crbug.com/webrtc/6913,https://issues.webrtc.org/issues/42231998 +https://crbug.com/webrtc/6914,https://issues.webrtc.org/issues/42231999 +https://crbug.com/webrtc/6915,https://issues.webrtc.org/issues/42232000 +https://crbug.com/webrtc/6916,https://issues.webrtc.org/issues/42232001 +https://crbug.com/webrtc/6917,https://issues.webrtc.org/issues/42232002 +https://crbug.com/webrtc/6918,https://issues.webrtc.org/issues/42232003 +https://crbug.com/webrtc/6919,https://issues.webrtc.org/issues/42232004 +https://crbug.com/webrtc/692,https://issues.webrtc.org/issues/42232005 +https://crbug.com/webrtc/6921,https://issues.webrtc.org/issues/42232006 +https://crbug.com/webrtc/6922,https://issues.webrtc.org/issues/42232007 +https://crbug.com/webrtc/6923,https://issues.webrtc.org/issues/42232008 +https://crbug.com/webrtc/6924,https://issues.webrtc.org/issues/42232009 +https://crbug.com/webrtc/6925,https://issues.webrtc.org/issues/42232010 +https://crbug.com/webrtc/6926,https://issues.webrtc.org/issues/42232011 +https://crbug.com/webrtc/6927,https://issues.webrtc.org/issues/42232012 +https://crbug.com/webrtc/6928,https://issues.webrtc.org/issues/42232013 +https://crbug.com/webrtc/6929,https://issues.webrtc.org/issues/42232014 +https://crbug.com/webrtc/693,https://issues.webrtc.org/issues/42232015 +https://crbug.com/webrtc/6930,https://issues.webrtc.org/issues/42232016 +https://crbug.com/webrtc/6931,https://issues.webrtc.org/issues/42232017 +https://crbug.com/webrtc/6932,https://issues.webrtc.org/issues/42232018 +https://crbug.com/webrtc/6933,https://issues.webrtc.org/issues/42232019 +https://crbug.com/webrtc/6935,https://issues.webrtc.org/issues/42232020 +https://crbug.com/webrtc/6936,https://issues.webrtc.org/issues/42232021 +https://crbug.com/webrtc/6937,https://issues.webrtc.org/issues/42232022 +https://crbug.com/webrtc/6938,https://issues.webrtc.org/issues/42232023 +https://crbug.com/webrtc/6939,https://issues.webrtc.org/issues/42232024 +https://crbug.com/webrtc/694,https://issues.webrtc.org/issues/42232025 +https://crbug.com/webrtc/6940,https://issues.webrtc.org/issues/42232026 +https://crbug.com/webrtc/6941,https://issues.webrtc.org/issues/42232027 +https://crbug.com/webrtc/6942,https://issues.webrtc.org/issues/42232028 +https://crbug.com/webrtc/6943,https://issues.webrtc.org/issues/42232029 +https://crbug.com/webrtc/6944,https://issues.webrtc.org/issues/42232030 +https://crbug.com/webrtc/6945,https://issues.webrtc.org/issues/42232031 +https://crbug.com/webrtc/6946,https://issues.webrtc.org/issues/42232032 +https://crbug.com/webrtc/6947,https://issues.webrtc.org/issues/42232033 +https://crbug.com/webrtc/6948,https://issues.webrtc.org/issues/42232034 +https://crbug.com/webrtc/6949,https://issues.webrtc.org/issues/42232035 +https://crbug.com/webrtc/695,https://issues.webrtc.org/issues/42232036 +https://crbug.com/webrtc/6950,https://issues.webrtc.org/issues/42232037 +https://crbug.com/webrtc/6951,https://issues.webrtc.org/issues/42232038 +https://crbug.com/webrtc/6952,https://issues.webrtc.org/issues/42232039 +https://crbug.com/webrtc/6953,https://issues.webrtc.org/issues/42232040 +https://crbug.com/webrtc/6954,https://issues.webrtc.org/issues/42232041 +https://crbug.com/webrtc/6955,https://issues.webrtc.org/issues/42232042 +https://crbug.com/webrtc/6956,https://issues.webrtc.org/issues/42232043 +https://crbug.com/webrtc/6957,https://issues.webrtc.org/issues/42232044 +https://crbug.com/webrtc/6958,https://issues.webrtc.org/issues/42232045 +https://crbug.com/webrtc/6959,https://issues.webrtc.org/issues/42232046 +https://crbug.com/webrtc/696,https://issues.webrtc.org/issues/42232047 +https://crbug.com/webrtc/6960,https://issues.webrtc.org/issues/42232048 +https://crbug.com/webrtc/6961,https://issues.webrtc.org/issues/42232049 +https://crbug.com/webrtc/6962,https://issues.webrtc.org/issues/42232050 +https://crbug.com/webrtc/6963,https://issues.webrtc.org/issues/42232051 +https://crbug.com/webrtc/6964,https://issues.webrtc.org/issues/42232052 +https://crbug.com/webrtc/6965,https://issues.webrtc.org/issues/42232053 +https://crbug.com/webrtc/6966,https://issues.webrtc.org/issues/42232054 +https://crbug.com/webrtc/6967,https://issues.webrtc.org/issues/42232055 +https://crbug.com/webrtc/6968,https://issues.webrtc.org/issues/42232056 +https://crbug.com/webrtc/6969,https://issues.webrtc.org/issues/42232057 +https://crbug.com/webrtc/697,https://issues.webrtc.org/issues/42232058 +https://crbug.com/webrtc/6970,https://issues.webrtc.org/issues/42232059 +https://crbug.com/webrtc/6971,https://issues.webrtc.org/issues/42232060 +https://crbug.com/webrtc/6973,https://issues.webrtc.org/issues/42232061 +https://crbug.com/webrtc/6974,https://issues.webrtc.org/issues/42232062 +https://crbug.com/webrtc/6975,https://issues.webrtc.org/issues/42232063 +https://crbug.com/webrtc/6976,https://issues.webrtc.org/issues/42232064 +https://crbug.com/webrtc/6977,https://issues.webrtc.org/issues/42232065 +https://crbug.com/webrtc/6978,https://issues.webrtc.org/issues/42232066 +https://crbug.com/webrtc/698,https://issues.webrtc.org/issues/42232067 +https://crbug.com/webrtc/6980,https://issues.webrtc.org/issues/42232068 +https://crbug.com/webrtc/6981,https://issues.webrtc.org/issues/42232069 +https://crbug.com/webrtc/6982,https://issues.webrtc.org/issues/42232070 +https://crbug.com/webrtc/6983,https://issues.webrtc.org/issues/42232071 +https://crbug.com/webrtc/6984,https://issues.webrtc.org/issues/42232072 +https://crbug.com/webrtc/6985,https://issues.webrtc.org/issues/42232073 +https://crbug.com/webrtc/6987,https://issues.webrtc.org/issues/42232074 +https://crbug.com/webrtc/6988,https://issues.webrtc.org/issues/42232075 +https://crbug.com/webrtc/6989,https://issues.webrtc.org/issues/42232076 +https://crbug.com/webrtc/699,https://issues.webrtc.org/issues/42232077 +https://crbug.com/webrtc/6990,https://issues.webrtc.org/issues/42232078 +https://crbug.com/webrtc/6991,https://issues.webrtc.org/issues/42232079 +https://crbug.com/webrtc/6992,https://issues.webrtc.org/issues/42232080 +https://crbug.com/webrtc/6993,https://issues.webrtc.org/issues/42232081 +https://crbug.com/webrtc/6994,https://issues.webrtc.org/issues/42232082 +https://crbug.com/webrtc/6995,https://issues.webrtc.org/issues/42232083 +https://crbug.com/webrtc/6996,https://issues.webrtc.org/issues/42232084 +https://crbug.com/webrtc/6997,https://issues.webrtc.org/issues/42232085 +https://crbug.com/webrtc/6998,https://issues.webrtc.org/issues/42232086 +https://crbug.com/webrtc/6999,https://issues.webrtc.org/issues/42232087 +https://crbug.com/webrtc/7,https://issues.webrtc.org/issues/42232088 +https://crbug.com/webrtc/70,https://issues.webrtc.org/issues/42232089 +https://crbug.com/webrtc/700,https://issues.webrtc.org/issues/42232090 +https://crbug.com/webrtc/7000,https://issues.webrtc.org/issues/42232091 +https://crbug.com/webrtc/7001,https://issues.webrtc.org/issues/42232092 +https://crbug.com/webrtc/7002,https://issues.webrtc.org/issues/42232093 +https://crbug.com/webrtc/7003,https://issues.webrtc.org/issues/42232094 +https://crbug.com/webrtc/7004,https://issues.webrtc.org/issues/42232095 +https://crbug.com/webrtc/7005,https://issues.webrtc.org/issues/42232096 +https://crbug.com/webrtc/7006,https://issues.webrtc.org/issues/42232097 +https://crbug.com/webrtc/7007,https://issues.webrtc.org/issues/42232098 +https://crbug.com/webrtc/7008,https://issues.webrtc.org/issues/42232099 +https://crbug.com/webrtc/7009,https://issues.webrtc.org/issues/42232100 +https://crbug.com/webrtc/701,https://issues.webrtc.org/issues/42232101 +https://crbug.com/webrtc/7010,https://issues.webrtc.org/issues/42232102 +https://crbug.com/webrtc/7011,https://issues.webrtc.org/issues/42232103 +https://crbug.com/webrtc/7012,https://issues.webrtc.org/issues/42232104 +https://crbug.com/webrtc/7013,https://issues.webrtc.org/issues/42232105 +https://crbug.com/webrtc/7014,https://issues.webrtc.org/issues/42232106 +https://crbug.com/webrtc/7015,https://issues.webrtc.org/issues/42232107 +https://crbug.com/webrtc/7016,https://issues.webrtc.org/issues/42232108 +https://crbug.com/webrtc/7017,https://issues.webrtc.org/issues/42232109 +https://crbug.com/webrtc/7018,https://issues.webrtc.org/issues/42232110 +https://crbug.com/webrtc/7019,https://issues.webrtc.org/issues/42232111 +https://crbug.com/webrtc/702,https://issues.webrtc.org/issues/42232112 +https://crbug.com/webrtc/7020,https://issues.webrtc.org/issues/42232113 +https://crbug.com/webrtc/7021,https://issues.webrtc.org/issues/42232114 +https://crbug.com/webrtc/7022,https://issues.webrtc.org/issues/42232115 +https://crbug.com/webrtc/7023,https://issues.webrtc.org/issues/42232116 +https://crbug.com/webrtc/7025,https://issues.webrtc.org/issues/42232117 +https://crbug.com/webrtc/7026,https://issues.webrtc.org/issues/42232118 +https://crbug.com/webrtc/7027,https://issues.webrtc.org/issues/42232119 +https://crbug.com/webrtc/7028,https://issues.webrtc.org/issues/42232120 +https://crbug.com/webrtc/7029,https://issues.webrtc.org/issues/42232121 +https://crbug.com/webrtc/703,https://issues.webrtc.org/issues/42232122 +https://crbug.com/webrtc/7031,https://issues.webrtc.org/issues/42232123 +https://crbug.com/webrtc/7032,https://issues.webrtc.org/issues/42232124 +https://crbug.com/webrtc/7034,https://issues.webrtc.org/issues/42232125 +https://crbug.com/webrtc/7035,https://issues.webrtc.org/issues/42232126 +https://crbug.com/webrtc/7036,https://issues.webrtc.org/issues/42232127 +https://crbug.com/webrtc/7037,https://issues.webrtc.org/issues/42232128 +https://crbug.com/webrtc/7039,https://issues.webrtc.org/issues/42232129 +https://crbug.com/webrtc/7040,https://issues.webrtc.org/issues/42232130 +https://crbug.com/webrtc/7041,https://issues.webrtc.org/issues/42232131 +https://crbug.com/webrtc/7042,https://issues.webrtc.org/issues/42232132 +https://crbug.com/webrtc/7043,https://issues.webrtc.org/issues/42232133 +https://crbug.com/webrtc/7044,https://issues.webrtc.org/issues/42232134 +https://crbug.com/webrtc/7045,https://issues.webrtc.org/issues/42232135 +https://crbug.com/webrtc/7046,https://issues.webrtc.org/issues/42232136 +https://crbug.com/webrtc/7047,https://issues.webrtc.org/issues/42232137 +https://crbug.com/webrtc/7048,https://issues.webrtc.org/issues/42232138 +https://crbug.com/webrtc/7049,https://issues.webrtc.org/issues/42232139 +https://crbug.com/webrtc/705,https://issues.webrtc.org/issues/42232140 +https://crbug.com/webrtc/7050,https://issues.webrtc.org/issues/42232141 +https://crbug.com/webrtc/7052,https://issues.webrtc.org/issues/42232142 +https://crbug.com/webrtc/7053,https://issues.webrtc.org/issues/42232143 +https://crbug.com/webrtc/7054,https://issues.webrtc.org/issues/42232144 +https://crbug.com/webrtc/7055,https://issues.webrtc.org/issues/42232145 +https://crbug.com/webrtc/7056,https://issues.webrtc.org/issues/42232146 +https://crbug.com/webrtc/7057,https://issues.webrtc.org/issues/42232147 +https://crbug.com/webrtc/7058,https://issues.webrtc.org/issues/42232148 +https://crbug.com/webrtc/7059,https://issues.webrtc.org/issues/42232149 +https://crbug.com/webrtc/706,https://issues.webrtc.org/issues/42232150 +https://crbug.com/webrtc/7068,https://issues.webrtc.org/issues/42232151 +https://crbug.com/webrtc/7069,https://issues.webrtc.org/issues/42232152 +https://crbug.com/webrtc/707,https://issues.webrtc.org/issues/42232153 +https://crbug.com/webrtc/7070,https://issues.webrtc.org/issues/42232154 +https://crbug.com/webrtc/7071,https://issues.webrtc.org/issues/42232155 +https://crbug.com/webrtc/7072,https://issues.webrtc.org/issues/42232156 +https://crbug.com/webrtc/7073,https://issues.webrtc.org/issues/42232157 +https://crbug.com/webrtc/7075,https://issues.webrtc.org/issues/42232158 +https://crbug.com/webrtc/7076,https://issues.webrtc.org/issues/42232159 +https://crbug.com/webrtc/7077,https://issues.webrtc.org/issues/42232160 +https://crbug.com/webrtc/7078,https://issues.webrtc.org/issues/42232161 +https://crbug.com/webrtc/7079,https://issues.webrtc.org/issues/42232162 +https://crbug.com/webrtc/708,https://issues.webrtc.org/issues/42232163 +https://crbug.com/webrtc/7080,https://issues.webrtc.org/issues/42232164 +https://crbug.com/webrtc/7081,https://issues.webrtc.org/issues/42232165 +https://crbug.com/webrtc/7082,https://issues.webrtc.org/issues/42232166 +https://crbug.com/webrtc/7083,https://issues.webrtc.org/issues/42232167 +https://crbug.com/webrtc/7084,https://issues.webrtc.org/issues/42232168 +https://crbug.com/webrtc/7085,https://issues.webrtc.org/issues/42232169 +https://crbug.com/webrtc/7086,https://issues.webrtc.org/issues/42232170 +https://crbug.com/webrtc/7087,https://issues.webrtc.org/issues/42232171 +https://crbug.com/webrtc/7088,https://issues.webrtc.org/issues/42232172 +https://crbug.com/webrtc/7089,https://issues.webrtc.org/issues/42232173 +https://crbug.com/webrtc/709,https://issues.webrtc.org/issues/42232174 +https://crbug.com/webrtc/7090,https://issues.webrtc.org/issues/42232175 +https://crbug.com/webrtc/7091,https://issues.webrtc.org/issues/42232176 +https://crbug.com/webrtc/7092,https://issues.webrtc.org/issues/42232177 +https://crbug.com/webrtc/7093,https://issues.webrtc.org/issues/42232178 +https://crbug.com/webrtc/7094,https://issues.webrtc.org/issues/42232179 +https://crbug.com/webrtc/7095,https://issues.webrtc.org/issues/42232180 +https://crbug.com/webrtc/7096,https://issues.webrtc.org/issues/42232181 +https://crbug.com/webrtc/7097,https://issues.webrtc.org/issues/42232182 +https://crbug.com/webrtc/7099,https://issues.webrtc.org/issues/42232183 +https://crbug.com/webrtc/71,https://issues.webrtc.org/issues/42232184 +https://crbug.com/webrtc/710,https://issues.webrtc.org/issues/42232185 +https://crbug.com/webrtc/7100,https://issues.webrtc.org/issues/42232186 +https://crbug.com/webrtc/7101,https://issues.webrtc.org/issues/42232187 +https://crbug.com/webrtc/7102,https://issues.webrtc.org/issues/42232188 +https://crbug.com/webrtc/7103,https://issues.webrtc.org/issues/42232189 +https://crbug.com/webrtc/7104,https://issues.webrtc.org/issues/42232190 +https://crbug.com/webrtc/7105,https://issues.webrtc.org/issues/42232191 +https://crbug.com/webrtc/7106,https://issues.webrtc.org/issues/42232192 +https://crbug.com/webrtc/7107,https://issues.webrtc.org/issues/42232193 +https://crbug.com/webrtc/7108,https://issues.webrtc.org/issues/42232194 +https://crbug.com/webrtc/7109,https://issues.webrtc.org/issues/42232195 +https://crbug.com/webrtc/711,https://issues.webrtc.org/issues/42232196 +https://crbug.com/webrtc/7110,https://issues.webrtc.org/issues/42232197 +https://crbug.com/webrtc/7111,https://issues.webrtc.org/issues/42232198 +https://crbug.com/webrtc/7112,https://issues.webrtc.org/issues/42232199 +https://crbug.com/webrtc/7113,https://issues.webrtc.org/issues/42232200 +https://crbug.com/webrtc/7114,https://issues.webrtc.org/issues/42232201 +https://crbug.com/webrtc/7115,https://issues.webrtc.org/issues/42232202 +https://crbug.com/webrtc/7116,https://issues.webrtc.org/issues/42232203 +https://crbug.com/webrtc/7117,https://issues.webrtc.org/issues/42232204 +https://crbug.com/webrtc/7118,https://issues.webrtc.org/issues/42232205 +https://crbug.com/webrtc/7119,https://issues.webrtc.org/issues/42232206 +https://crbug.com/webrtc/712,https://issues.webrtc.org/issues/42232207 +https://crbug.com/webrtc/7120,https://issues.webrtc.org/issues/42232208 +https://crbug.com/webrtc/7121,https://issues.webrtc.org/issues/42232209 +https://crbug.com/webrtc/7122,https://issues.webrtc.org/issues/42232210 +https://crbug.com/webrtc/7123,https://issues.webrtc.org/issues/42232211 +https://crbug.com/webrtc/7124,https://issues.webrtc.org/issues/42232212 +https://crbug.com/webrtc/7125,https://issues.webrtc.org/issues/42232213 +https://crbug.com/webrtc/7126,https://issues.webrtc.org/issues/42232214 +https://crbug.com/webrtc/7127,https://issues.webrtc.org/issues/42232215 +https://crbug.com/webrtc/7128,https://issues.webrtc.org/issues/42232216 +https://crbug.com/webrtc/7129,https://issues.webrtc.org/issues/42232217 +https://crbug.com/webrtc/713,https://issues.webrtc.org/issues/42232218 +https://crbug.com/webrtc/7130,https://issues.webrtc.org/issues/42232219 +https://crbug.com/webrtc/7131,https://issues.webrtc.org/issues/42232220 +https://crbug.com/webrtc/7132,https://issues.webrtc.org/issues/42232221 +https://crbug.com/webrtc/7133,https://issues.webrtc.org/issues/42232222 +https://crbug.com/webrtc/7134,https://issues.webrtc.org/issues/42232223 +https://crbug.com/webrtc/7135,https://issues.webrtc.org/issues/42232224 +https://crbug.com/webrtc/7136,https://issues.webrtc.org/issues/42232225 +https://crbug.com/webrtc/7137,https://issues.webrtc.org/issues/42232226 +https://crbug.com/webrtc/7138,https://issues.webrtc.org/issues/42232227 +https://crbug.com/webrtc/7139,https://issues.webrtc.org/issues/42232228 +https://crbug.com/webrtc/714,https://issues.webrtc.org/issues/42232229 +https://crbug.com/webrtc/7140,https://issues.webrtc.org/issues/42232230 +https://crbug.com/webrtc/7141,https://issues.webrtc.org/issues/42232231 +https://crbug.com/webrtc/7142,https://issues.webrtc.org/issues/42232232 +https://crbug.com/webrtc/7143,https://issues.webrtc.org/issues/42232233 +https://crbug.com/webrtc/7144,https://issues.webrtc.org/issues/42232234 +https://crbug.com/webrtc/7145,https://issues.webrtc.org/issues/42232235 +https://crbug.com/webrtc/7146,https://issues.webrtc.org/issues/42232236 +https://crbug.com/webrtc/7147,https://issues.webrtc.org/issues/42232237 +https://crbug.com/webrtc/7148,https://issues.webrtc.org/issues/42232238 +https://crbug.com/webrtc/7149,https://issues.webrtc.org/issues/42232239 +https://crbug.com/webrtc/715,https://issues.webrtc.org/issues/42232240 +https://crbug.com/webrtc/7150,https://issues.webrtc.org/issues/42232241 +https://crbug.com/webrtc/7151,https://issues.webrtc.org/issues/42232242 +https://crbug.com/webrtc/7152,https://issues.webrtc.org/issues/42232243 +https://crbug.com/webrtc/7153,https://issues.webrtc.org/issues/42232244 +https://crbug.com/webrtc/7154,https://issues.webrtc.org/issues/42232245 +https://crbug.com/webrtc/7155,https://issues.webrtc.org/issues/42232246 +https://crbug.com/webrtc/7156,https://issues.webrtc.org/issues/42232247 +https://crbug.com/webrtc/7157,https://issues.webrtc.org/issues/42232248 +https://crbug.com/webrtc/7158,https://issues.webrtc.org/issues/42232249 +https://crbug.com/webrtc/7159,https://issues.webrtc.org/issues/42232250 +https://crbug.com/webrtc/716,https://issues.webrtc.org/issues/42232251 +https://crbug.com/webrtc/7160,https://issues.webrtc.org/issues/42232252 +https://crbug.com/webrtc/7162,https://issues.webrtc.org/issues/42232253 +https://crbug.com/webrtc/7163,https://issues.webrtc.org/issues/42232254 +https://crbug.com/webrtc/7164,https://issues.webrtc.org/issues/42232255 +https://crbug.com/webrtc/7165,https://issues.webrtc.org/issues/42232256 +https://crbug.com/webrtc/7166,https://issues.webrtc.org/issues/42232257 +https://crbug.com/webrtc/7167,https://issues.webrtc.org/issues/42232258 +https://crbug.com/webrtc/7168,https://issues.webrtc.org/issues/42232259 +https://crbug.com/webrtc/7169,https://issues.webrtc.org/issues/42232260 +https://crbug.com/webrtc/717,https://issues.webrtc.org/issues/42232261 +https://crbug.com/webrtc/7170,https://issues.webrtc.org/issues/42232262 +https://crbug.com/webrtc/7171,https://issues.webrtc.org/issues/42232263 +https://crbug.com/webrtc/7172,https://issues.webrtc.org/issues/42232264 +https://crbug.com/webrtc/7173,https://issues.webrtc.org/issues/42232265 +https://crbug.com/webrtc/7174,https://issues.webrtc.org/issues/42232266 +https://crbug.com/webrtc/7175,https://issues.webrtc.org/issues/42232267 +https://crbug.com/webrtc/7176,https://issues.webrtc.org/issues/42232268 +https://crbug.com/webrtc/7177,https://issues.webrtc.org/issues/42232269 +https://crbug.com/webrtc/7178,https://issues.webrtc.org/issues/42232270 +https://crbug.com/webrtc/7179,https://issues.webrtc.org/issues/42232271 +https://crbug.com/webrtc/718,https://issues.webrtc.org/issues/42232272 +https://crbug.com/webrtc/7180,https://issues.webrtc.org/issues/42232273 +https://crbug.com/webrtc/7181,https://issues.webrtc.org/issues/42232274 +https://crbug.com/webrtc/7182,https://issues.webrtc.org/issues/42232275 +https://crbug.com/webrtc/7183,https://issues.webrtc.org/issues/42232276 +https://crbug.com/webrtc/7184,https://issues.webrtc.org/issues/42232277 +https://crbug.com/webrtc/7185,https://issues.webrtc.org/issues/42232278 +https://crbug.com/webrtc/7186,https://issues.webrtc.org/issues/42232279 +https://crbug.com/webrtc/7187,https://issues.webrtc.org/issues/42232280 +https://crbug.com/webrtc/7188,https://issues.webrtc.org/issues/42232281 +https://crbug.com/webrtc/7189,https://issues.webrtc.org/issues/42232282 +https://crbug.com/webrtc/719,https://issues.webrtc.org/issues/42232283 +https://crbug.com/webrtc/7190,https://issues.webrtc.org/issues/42232284 +https://crbug.com/webrtc/7191,https://issues.webrtc.org/issues/42232285 +https://crbug.com/webrtc/7192,https://issues.webrtc.org/issues/42232286 +https://crbug.com/webrtc/7193,https://issues.webrtc.org/issues/42232287 +https://crbug.com/webrtc/7194,https://issues.webrtc.org/issues/42232288 +https://crbug.com/webrtc/7195,https://issues.webrtc.org/issues/42232289 +https://crbug.com/webrtc/7196,https://issues.webrtc.org/issues/42232290 +https://crbug.com/webrtc/7197,https://issues.webrtc.org/issues/42232291 +https://crbug.com/webrtc/7198,https://issues.webrtc.org/issues/42232292 +https://crbug.com/webrtc/7199,https://issues.webrtc.org/issues/42232293 +https://crbug.com/webrtc/72,https://issues.webrtc.org/issues/42232294 +https://crbug.com/webrtc/720,https://issues.webrtc.org/issues/42232295 +https://crbug.com/webrtc/7200,https://issues.webrtc.org/issues/42232296 +https://crbug.com/webrtc/7201,https://issues.webrtc.org/issues/42232297 +https://crbug.com/webrtc/7202,https://issues.webrtc.org/issues/42232298 +https://crbug.com/webrtc/7203,https://issues.webrtc.org/issues/42232299 +https://crbug.com/webrtc/7204,https://issues.webrtc.org/issues/42232300 +https://crbug.com/webrtc/7205,https://issues.webrtc.org/issues/42232301 +https://crbug.com/webrtc/7206,https://issues.webrtc.org/issues/42232302 +https://crbug.com/webrtc/7207,https://issues.webrtc.org/issues/42232303 +https://crbug.com/webrtc/7208,https://issues.webrtc.org/issues/42232304 +https://crbug.com/webrtc/7209,https://issues.webrtc.org/issues/42232305 +https://crbug.com/webrtc/721,https://issues.webrtc.org/issues/42232306 +https://crbug.com/webrtc/7210,https://issues.webrtc.org/issues/42232307 +https://crbug.com/webrtc/7211,https://issues.webrtc.org/issues/42232308 +https://crbug.com/webrtc/7212,https://issues.webrtc.org/issues/42232309 +https://crbug.com/webrtc/7213,https://issues.webrtc.org/issues/42232310 +https://crbug.com/webrtc/7214,https://issues.webrtc.org/issues/42232311 +https://crbug.com/webrtc/7215,https://issues.webrtc.org/issues/42232312 +https://crbug.com/webrtc/7216,https://issues.webrtc.org/issues/42232313 +https://crbug.com/webrtc/7217,https://issues.webrtc.org/issues/42232314 +https://crbug.com/webrtc/7218,https://issues.webrtc.org/issues/42232315 +https://crbug.com/webrtc/7219,https://issues.webrtc.org/issues/42232316 +https://crbug.com/webrtc/722,https://issues.webrtc.org/issues/42232317 +https://crbug.com/webrtc/7220,https://issues.webrtc.org/issues/42232318 +https://crbug.com/webrtc/7221,https://issues.webrtc.org/issues/42232319 +https://crbug.com/webrtc/7222,https://issues.webrtc.org/issues/42232320 +https://crbug.com/webrtc/7223,https://issues.webrtc.org/issues/42232321 +https://crbug.com/webrtc/7224,https://issues.webrtc.org/issues/42232322 +https://crbug.com/webrtc/7225,https://issues.webrtc.org/issues/42232323 +https://crbug.com/webrtc/7226,https://issues.webrtc.org/issues/42232324 +https://crbug.com/webrtc/7227,https://issues.webrtc.org/issues/42232325 +https://crbug.com/webrtc/7228,https://issues.webrtc.org/issues/42232326 +https://crbug.com/webrtc/723,https://issues.webrtc.org/issues/42232327 +https://crbug.com/webrtc/7230,https://issues.webrtc.org/issues/42232328 +https://crbug.com/webrtc/7231,https://issues.webrtc.org/issues/42232329 +https://crbug.com/webrtc/7232,https://issues.webrtc.org/issues/42232330 +https://crbug.com/webrtc/7233,https://issues.webrtc.org/issues/42232331 +https://crbug.com/webrtc/7234,https://issues.webrtc.org/issues/42232332 +https://crbug.com/webrtc/7235,https://issues.webrtc.org/issues/42232333 +https://crbug.com/webrtc/7236,https://issues.webrtc.org/issues/42232334 +https://crbug.com/webrtc/7237,https://issues.webrtc.org/issues/42232335 +https://crbug.com/webrtc/7238,https://issues.webrtc.org/issues/42232336 +https://crbug.com/webrtc/7239,https://issues.webrtc.org/issues/42232337 +https://crbug.com/webrtc/724,https://issues.webrtc.org/issues/42232338 +https://crbug.com/webrtc/7240,https://issues.webrtc.org/issues/42232339 +https://crbug.com/webrtc/7241,https://issues.webrtc.org/issues/42232340 +https://crbug.com/webrtc/7242,https://issues.webrtc.org/issues/42232341 +https://crbug.com/webrtc/7243,https://issues.webrtc.org/issues/42232342 +https://crbug.com/webrtc/7244,https://issues.webrtc.org/issues/42232343 +https://crbug.com/webrtc/7245,https://issues.webrtc.org/issues/42232344 +https://crbug.com/webrtc/7246,https://issues.webrtc.org/issues/42232345 +https://crbug.com/webrtc/7247,https://issues.webrtc.org/issues/42232346 +https://crbug.com/webrtc/7248,https://issues.webrtc.org/issues/42232347 +https://crbug.com/webrtc/7249,https://issues.webrtc.org/issues/42232348 +https://crbug.com/webrtc/725,https://issues.webrtc.org/issues/42232349 +https://crbug.com/webrtc/7250,https://issues.webrtc.org/issues/42232350 +https://crbug.com/webrtc/7251,https://issues.webrtc.org/issues/42232351 +https://crbug.com/webrtc/7252,https://issues.webrtc.org/issues/42232352 +https://crbug.com/webrtc/7253,https://issues.webrtc.org/issues/42232353 +https://crbug.com/webrtc/7254,https://issues.webrtc.org/issues/42232354 +https://crbug.com/webrtc/7255,https://issues.webrtc.org/issues/42232355 +https://crbug.com/webrtc/7256,https://issues.webrtc.org/issues/42232356 +https://crbug.com/webrtc/7257,https://issues.webrtc.org/issues/42232357 +https://crbug.com/webrtc/7258,https://issues.webrtc.org/issues/42232358 +https://crbug.com/webrtc/7259,https://issues.webrtc.org/issues/42232359 +https://crbug.com/webrtc/726,https://issues.webrtc.org/issues/42232360 +https://crbug.com/webrtc/7260,https://issues.webrtc.org/issues/42232361 +https://crbug.com/webrtc/7261,https://issues.webrtc.org/issues/42232362 +https://crbug.com/webrtc/7262,https://issues.webrtc.org/issues/42232363 +https://crbug.com/webrtc/7263,https://issues.webrtc.org/issues/42232364 +https://crbug.com/webrtc/7264,https://issues.webrtc.org/issues/42232365 +https://crbug.com/webrtc/7265,https://issues.webrtc.org/issues/42232366 +https://crbug.com/webrtc/7266,https://issues.webrtc.org/issues/42232367 +https://crbug.com/webrtc/7267,https://issues.webrtc.org/issues/42232368 +https://crbug.com/webrtc/7268,https://issues.webrtc.org/issues/42232369 +https://crbug.com/webrtc/7269,https://issues.webrtc.org/issues/42232370 +https://crbug.com/webrtc/727,https://issues.webrtc.org/issues/42232371 +https://crbug.com/webrtc/7270,https://issues.webrtc.org/issues/42232372 +https://crbug.com/webrtc/7271,https://issues.webrtc.org/issues/42232373 +https://crbug.com/webrtc/7272,https://issues.webrtc.org/issues/42232374 +https://crbug.com/webrtc/7273,https://issues.webrtc.org/issues/42232375 +https://crbug.com/webrtc/7274,https://issues.webrtc.org/issues/42232376 +https://crbug.com/webrtc/7275,https://issues.webrtc.org/issues/42232377 +https://crbug.com/webrtc/7276,https://issues.webrtc.org/issues/42232378 +https://crbug.com/webrtc/7277,https://issues.webrtc.org/issues/42232379 +https://crbug.com/webrtc/7278,https://issues.webrtc.org/issues/42232380 +https://crbug.com/webrtc/7279,https://issues.webrtc.org/issues/42232381 +https://crbug.com/webrtc/728,https://issues.webrtc.org/issues/42232382 +https://crbug.com/webrtc/7280,https://issues.webrtc.org/issues/42232383 +https://crbug.com/webrtc/7281,https://issues.webrtc.org/issues/42232384 +https://crbug.com/webrtc/7282,https://issues.webrtc.org/issues/42232385 +https://crbug.com/webrtc/7283,https://issues.webrtc.org/issues/42232386 +https://crbug.com/webrtc/7284,https://issues.webrtc.org/issues/42232387 +https://crbug.com/webrtc/7285,https://issues.webrtc.org/issues/42232388 +https://crbug.com/webrtc/7286,https://issues.webrtc.org/issues/42232389 +https://crbug.com/webrtc/7287,https://issues.webrtc.org/issues/42232390 +https://crbug.com/webrtc/7288,https://issues.webrtc.org/issues/42232391 +https://crbug.com/webrtc/7289,https://issues.webrtc.org/issues/42232392 +https://crbug.com/webrtc/729,https://issues.webrtc.org/issues/42232393 +https://crbug.com/webrtc/7290,https://issues.webrtc.org/issues/42232394 +https://crbug.com/webrtc/7291,https://issues.webrtc.org/issues/42232395 +https://crbug.com/webrtc/7292,https://issues.webrtc.org/issues/42232396 +https://crbug.com/webrtc/7293,https://issues.webrtc.org/issues/42232397 +https://crbug.com/webrtc/7294,https://issues.webrtc.org/issues/42232398 +https://crbug.com/webrtc/7295,https://issues.webrtc.org/issues/42232399 +https://crbug.com/webrtc/7296,https://issues.webrtc.org/issues/42232400 +https://crbug.com/webrtc/7297,https://issues.webrtc.org/issues/42232401 +https://crbug.com/webrtc/7298,https://issues.webrtc.org/issues/42232402 +https://crbug.com/webrtc/7299,https://issues.webrtc.org/issues/42232403 +https://crbug.com/webrtc/73,https://issues.webrtc.org/issues/42232404 +https://crbug.com/webrtc/730,https://issues.webrtc.org/issues/42232405 +https://crbug.com/webrtc/7300,https://issues.webrtc.org/issues/42232406 +https://crbug.com/webrtc/7301,https://issues.webrtc.org/issues/42232407 +https://crbug.com/webrtc/7302,https://issues.webrtc.org/issues/42232408 +https://crbug.com/webrtc/7303,https://issues.webrtc.org/issues/42232409 +https://crbug.com/webrtc/7305,https://issues.webrtc.org/issues/42232410 +https://crbug.com/webrtc/7306,https://issues.webrtc.org/issues/42232411 +https://crbug.com/webrtc/7308,https://issues.webrtc.org/issues/42232412 +https://crbug.com/webrtc/7309,https://issues.webrtc.org/issues/42232413 +https://crbug.com/webrtc/731,https://issues.webrtc.org/issues/42232414 +https://crbug.com/webrtc/7310,https://issues.webrtc.org/issues/42232415 +https://crbug.com/webrtc/7311,https://issues.webrtc.org/issues/42232416 +https://crbug.com/webrtc/7312,https://issues.webrtc.org/issues/42232417 +https://crbug.com/webrtc/7313,https://issues.webrtc.org/issues/42232418 +https://crbug.com/webrtc/7315,https://issues.webrtc.org/issues/42232419 +https://crbug.com/webrtc/7316,https://issues.webrtc.org/issues/42232420 +https://crbug.com/webrtc/7318,https://issues.webrtc.org/issues/42232421 +https://crbug.com/webrtc/7319,https://issues.webrtc.org/issues/42232422 +https://crbug.com/webrtc/732,https://issues.webrtc.org/issues/42232423 +https://crbug.com/webrtc/7320,https://issues.webrtc.org/issues/42232424 +https://crbug.com/webrtc/7321,https://issues.webrtc.org/issues/42232425 +https://crbug.com/webrtc/7322,https://issues.webrtc.org/issues/42232426 +https://crbug.com/webrtc/7323,https://issues.webrtc.org/issues/42232427 +https://crbug.com/webrtc/7324,https://issues.webrtc.org/issues/42232428 +https://crbug.com/webrtc/7325,https://issues.webrtc.org/issues/42232429 +https://crbug.com/webrtc/7326,https://issues.webrtc.org/issues/42232430 +https://crbug.com/webrtc/7327,https://issues.webrtc.org/issues/42232431 +https://crbug.com/webrtc/7328,https://issues.webrtc.org/issues/42232432 +https://crbug.com/webrtc/7329,https://issues.webrtc.org/issues/42232433 +https://crbug.com/webrtc/733,https://issues.webrtc.org/issues/42232434 +https://crbug.com/webrtc/7330,https://issues.webrtc.org/issues/42232435 +https://crbug.com/webrtc/7331,https://issues.webrtc.org/issues/42232436 +https://crbug.com/webrtc/7333,https://issues.webrtc.org/issues/42232437 +https://crbug.com/webrtc/7334,https://issues.webrtc.org/issues/42232438 +https://crbug.com/webrtc/7335,https://issues.webrtc.org/issues/42232439 +https://crbug.com/webrtc/7336,https://issues.webrtc.org/issues/42232440 +https://crbug.com/webrtc/7337,https://issues.webrtc.org/issues/42232441 +https://crbug.com/webrtc/7338,https://issues.webrtc.org/issues/42232442 +https://crbug.com/webrtc/7339,https://issues.webrtc.org/issues/42232443 +https://crbug.com/webrtc/734,https://issues.webrtc.org/issues/42232444 +https://crbug.com/webrtc/7340,https://issues.webrtc.org/issues/42232445 +https://crbug.com/webrtc/7341,https://issues.webrtc.org/issues/42232446 +https://crbug.com/webrtc/7342,https://issues.webrtc.org/issues/42232447 +https://crbug.com/webrtc/7343,https://issues.webrtc.org/issues/42232448 +https://crbug.com/webrtc/7344,https://issues.webrtc.org/issues/42232449 +https://crbug.com/webrtc/7345,https://issues.webrtc.org/issues/42232450 +https://crbug.com/webrtc/7346,https://issues.webrtc.org/issues/42232451 +https://crbug.com/webrtc/7347,https://issues.webrtc.org/issues/42232452 +https://crbug.com/webrtc/7348,https://issues.webrtc.org/issues/42232453 +https://crbug.com/webrtc/7349,https://issues.webrtc.org/issues/42232454 +https://crbug.com/webrtc/735,https://issues.webrtc.org/issues/42232455 +https://crbug.com/webrtc/7350,https://issues.webrtc.org/issues/42232456 +https://crbug.com/webrtc/7351,https://issues.webrtc.org/issues/42232457 +https://crbug.com/webrtc/7352,https://issues.webrtc.org/issues/42232458 +https://crbug.com/webrtc/7353,https://issues.webrtc.org/issues/42232459 +https://crbug.com/webrtc/7354,https://issues.webrtc.org/issues/42232460 +https://crbug.com/webrtc/7356,https://issues.webrtc.org/issues/42232461 +https://crbug.com/webrtc/7357,https://issues.webrtc.org/issues/42232462 +https://crbug.com/webrtc/7358,https://issues.webrtc.org/issues/42232463 +https://crbug.com/webrtc/7359,https://issues.webrtc.org/issues/42232464 +https://crbug.com/webrtc/736,https://issues.webrtc.org/issues/42232465 +https://crbug.com/webrtc/7360,https://issues.webrtc.org/issues/42232466 +https://crbug.com/webrtc/7362,https://issues.webrtc.org/issues/42232467 +https://crbug.com/webrtc/7363,https://issues.webrtc.org/issues/42232468 +https://crbug.com/webrtc/7364,https://issues.webrtc.org/issues/42232469 +https://crbug.com/webrtc/7365,https://issues.webrtc.org/issues/42232470 +https://crbug.com/webrtc/7366,https://issues.webrtc.org/issues/42232471 +https://crbug.com/webrtc/7367,https://issues.webrtc.org/issues/42232472 +https://crbug.com/webrtc/7368,https://issues.webrtc.org/issues/42232473 +https://crbug.com/webrtc/7369,https://issues.webrtc.org/issues/42232474 +https://crbug.com/webrtc/737,https://issues.webrtc.org/issues/42232475 +https://crbug.com/webrtc/7370,https://issues.webrtc.org/issues/42232476 +https://crbug.com/webrtc/7371,https://issues.webrtc.org/issues/42232477 +https://crbug.com/webrtc/7372,https://issues.webrtc.org/issues/42232478 +https://crbug.com/webrtc/7373,https://issues.webrtc.org/issues/42232479 +https://crbug.com/webrtc/7374,https://issues.webrtc.org/issues/42232480 +https://crbug.com/webrtc/7375,https://issues.webrtc.org/issues/42232481 +https://crbug.com/webrtc/7376,https://issues.webrtc.org/issues/42232482 +https://crbug.com/webrtc/7377,https://issues.webrtc.org/issues/42232483 +https://crbug.com/webrtc/7378,https://issues.webrtc.org/issues/42232484 +https://crbug.com/webrtc/7379,https://issues.webrtc.org/issues/42232485 +https://crbug.com/webrtc/738,https://issues.webrtc.org/issues/42232486 +https://crbug.com/webrtc/7380,https://issues.webrtc.org/issues/42232487 +https://crbug.com/webrtc/7381,https://issues.webrtc.org/issues/42232488 +https://crbug.com/webrtc/7382,https://issues.webrtc.org/issues/42232489 +https://crbug.com/webrtc/7383,https://issues.webrtc.org/issues/42232490 +https://crbug.com/webrtc/7384,https://issues.webrtc.org/issues/42232491 +https://crbug.com/webrtc/7385,https://issues.webrtc.org/issues/42232492 +https://crbug.com/webrtc/7386,https://issues.webrtc.org/issues/42232493 +https://crbug.com/webrtc/7387,https://issues.webrtc.org/issues/42232494 +https://crbug.com/webrtc/7388,https://issues.webrtc.org/issues/42232495 +https://crbug.com/webrtc/7389,https://issues.webrtc.org/issues/42232496 +https://crbug.com/webrtc/739,https://issues.webrtc.org/issues/42232497 +https://crbug.com/webrtc/7390,https://issues.webrtc.org/issues/42232498 +https://crbug.com/webrtc/7391,https://issues.webrtc.org/issues/42232499 +https://crbug.com/webrtc/7392,https://issues.webrtc.org/issues/42232500 +https://crbug.com/webrtc/7393,https://issues.webrtc.org/issues/42232501 +https://crbug.com/webrtc/7394,https://issues.webrtc.org/issues/42232502 +https://crbug.com/webrtc/7395,https://issues.webrtc.org/issues/42232503 +https://crbug.com/webrtc/7396,https://issues.webrtc.org/issues/42232504 +https://crbug.com/webrtc/7397,https://issues.webrtc.org/issues/42232505 +https://crbug.com/webrtc/7398,https://issues.webrtc.org/issues/42232506 +https://crbug.com/webrtc/7399,https://issues.webrtc.org/issues/42232507 +https://crbug.com/webrtc/74,https://issues.webrtc.org/issues/42232508 +https://crbug.com/webrtc/740,https://issues.webrtc.org/issues/42232509 +https://crbug.com/webrtc/7400,https://issues.webrtc.org/issues/42232510 +https://crbug.com/webrtc/7401,https://issues.webrtc.org/issues/42232511 +https://crbug.com/webrtc/7402,https://issues.webrtc.org/issues/42232512 +https://crbug.com/webrtc/7403,https://issues.webrtc.org/issues/42232513 +https://crbug.com/webrtc/7404,https://issues.webrtc.org/issues/42232514 +https://crbug.com/webrtc/7405,https://issues.webrtc.org/issues/42232515 +https://crbug.com/webrtc/7406,https://issues.webrtc.org/issues/42232516 +https://crbug.com/webrtc/7407,https://issues.webrtc.org/issues/42232517 +https://crbug.com/webrtc/7408,https://issues.webrtc.org/issues/42232518 +https://crbug.com/webrtc/7409,https://issues.webrtc.org/issues/42232519 +https://crbug.com/webrtc/741,https://issues.webrtc.org/issues/42232520 +https://crbug.com/webrtc/7410,https://issues.webrtc.org/issues/42232521 +https://crbug.com/webrtc/7411,https://issues.webrtc.org/issues/42232522 +https://crbug.com/webrtc/7412,https://issues.webrtc.org/issues/42232523 +https://crbug.com/webrtc/7413,https://issues.webrtc.org/issues/42232524 +https://crbug.com/webrtc/7414,https://issues.webrtc.org/issues/42232525 +https://crbug.com/webrtc/7415,https://issues.webrtc.org/issues/42232526 +https://crbug.com/webrtc/7416,https://issues.webrtc.org/issues/42232527 +https://crbug.com/webrtc/7417,https://issues.webrtc.org/issues/42232528 +https://crbug.com/webrtc/7418,https://issues.webrtc.org/issues/42232529 +https://crbug.com/webrtc/7419,https://issues.webrtc.org/issues/42232530 +https://crbug.com/webrtc/742,https://issues.webrtc.org/issues/42232531 +https://crbug.com/webrtc/7420,https://issues.webrtc.org/issues/42232532 +https://crbug.com/webrtc/7421,https://issues.webrtc.org/issues/42232533 +https://crbug.com/webrtc/7422,https://issues.webrtc.org/issues/42232534 +https://crbug.com/webrtc/7423,https://issues.webrtc.org/issues/42232535 +https://crbug.com/webrtc/7424,https://issues.webrtc.org/issues/42232536 +https://crbug.com/webrtc/7425,https://issues.webrtc.org/issues/42232537 +https://crbug.com/webrtc/7426,https://issues.webrtc.org/issues/42232538 +https://crbug.com/webrtc/7427,https://issues.webrtc.org/issues/42232539 +https://crbug.com/webrtc/7428,https://issues.webrtc.org/issues/42232540 +https://crbug.com/webrtc/7430,https://issues.webrtc.org/issues/42232541 +https://crbug.com/webrtc/7431,https://issues.webrtc.org/issues/42232542 +https://crbug.com/webrtc/7432,https://issues.webrtc.org/issues/42232543 +https://crbug.com/webrtc/7433,https://issues.webrtc.org/issues/42232544 +https://crbug.com/webrtc/7434,https://issues.webrtc.org/issues/42232545 +https://crbug.com/webrtc/7435,https://issues.webrtc.org/issues/42232546 +https://crbug.com/webrtc/7436,https://issues.webrtc.org/issues/42232547 +https://crbug.com/webrtc/7438,https://issues.webrtc.org/issues/42232548 +https://crbug.com/webrtc/7439,https://issues.webrtc.org/issues/42232549 +https://crbug.com/webrtc/744,https://issues.webrtc.org/issues/42232550 +https://crbug.com/webrtc/7440,https://issues.webrtc.org/issues/42232551 +https://crbug.com/webrtc/7441,https://issues.webrtc.org/issues/42232552 +https://crbug.com/webrtc/7442,https://issues.webrtc.org/issues/42232553 +https://crbug.com/webrtc/7444,https://issues.webrtc.org/issues/42232554 +https://crbug.com/webrtc/7446,https://issues.webrtc.org/issues/42232555 +https://crbug.com/webrtc/7447,https://issues.webrtc.org/issues/42232556 +https://crbug.com/webrtc/7448,https://issues.webrtc.org/issues/42232557 +https://crbug.com/webrtc/7449,https://issues.webrtc.org/issues/42232558 +https://crbug.com/webrtc/745,https://issues.webrtc.org/issues/42232559 +https://crbug.com/webrtc/7450,https://issues.webrtc.org/issues/42232560 +https://crbug.com/webrtc/7451,https://issues.webrtc.org/issues/42232561 +https://crbug.com/webrtc/7452,https://issues.webrtc.org/issues/42232562 +https://crbug.com/webrtc/7454,https://issues.webrtc.org/issues/42232563 +https://crbug.com/webrtc/7455,https://issues.webrtc.org/issues/42232564 +https://crbug.com/webrtc/7456,https://issues.webrtc.org/issues/42232565 +https://crbug.com/webrtc/7457,https://issues.webrtc.org/issues/42232566 +https://crbug.com/webrtc/7458,https://issues.webrtc.org/issues/42232567 +https://crbug.com/webrtc/7459,https://issues.webrtc.org/issues/42232568 +https://crbug.com/webrtc/746,https://issues.webrtc.org/issues/42232569 +https://crbug.com/webrtc/7460,https://issues.webrtc.org/issues/42232570 +https://crbug.com/webrtc/7461,https://issues.webrtc.org/issues/42232571 +https://crbug.com/webrtc/7462,https://issues.webrtc.org/issues/42232572 +https://crbug.com/webrtc/7463,https://issues.webrtc.org/issues/42232573 +https://crbug.com/webrtc/7464,https://issues.webrtc.org/issues/42232574 +https://crbug.com/webrtc/7465,https://issues.webrtc.org/issues/42232575 +https://crbug.com/webrtc/7466,https://issues.webrtc.org/issues/42232576 +https://crbug.com/webrtc/7467,https://issues.webrtc.org/issues/42232577 +https://crbug.com/webrtc/7468,https://issues.webrtc.org/issues/42232578 +https://crbug.com/webrtc/7469,https://issues.webrtc.org/issues/42232579 +https://crbug.com/webrtc/747,https://issues.webrtc.org/issues/42232580 +https://crbug.com/webrtc/7470,https://issues.webrtc.org/issues/42232581 +https://crbug.com/webrtc/7471,https://issues.webrtc.org/issues/42232582 +https://crbug.com/webrtc/7472,https://issues.webrtc.org/issues/42232583 +https://crbug.com/webrtc/7473,https://issues.webrtc.org/issues/42232584 +https://crbug.com/webrtc/7474,https://issues.webrtc.org/issues/42232585 +https://crbug.com/webrtc/7475,https://issues.webrtc.org/issues/42232586 +https://crbug.com/webrtc/7476,https://issues.webrtc.org/issues/42232587 +https://crbug.com/webrtc/7477,https://issues.webrtc.org/issues/42232588 +https://crbug.com/webrtc/7479,https://issues.webrtc.org/issues/42232589 +https://crbug.com/webrtc/748,https://issues.webrtc.org/issues/42232590 +https://crbug.com/webrtc/7480,https://issues.webrtc.org/issues/42232591 +https://crbug.com/webrtc/7481,https://issues.webrtc.org/issues/42232592 +https://crbug.com/webrtc/7482,https://issues.webrtc.org/issues/42232593 +https://crbug.com/webrtc/7483,https://issues.webrtc.org/issues/42232594 +https://crbug.com/webrtc/7484,https://issues.webrtc.org/issues/42232595 +https://crbug.com/webrtc/7485,https://issues.webrtc.org/issues/42232596 +https://crbug.com/webrtc/7486,https://issues.webrtc.org/issues/42232597 +https://crbug.com/webrtc/7487,https://issues.webrtc.org/issues/42232598 +https://crbug.com/webrtc/7488,https://issues.webrtc.org/issues/42232599 +https://crbug.com/webrtc/7489,https://issues.webrtc.org/issues/42232600 +https://crbug.com/webrtc/749,https://issues.webrtc.org/issues/42232601 +https://crbug.com/webrtc/7490,https://issues.webrtc.org/issues/42232602 +https://crbug.com/webrtc/7491,https://issues.webrtc.org/issues/42232603 +https://crbug.com/webrtc/7492,https://issues.webrtc.org/issues/42232604 +https://crbug.com/webrtc/7494,https://issues.webrtc.org/issues/42232605 +https://crbug.com/webrtc/7496,https://issues.webrtc.org/issues/42232606 +https://crbug.com/webrtc/7498,https://issues.webrtc.org/issues/42232607 +https://crbug.com/webrtc/7499,https://issues.webrtc.org/issues/42232608 +https://crbug.com/webrtc/75,https://issues.webrtc.org/issues/42232609 +https://crbug.com/webrtc/750,https://issues.webrtc.org/issues/42232610 +https://crbug.com/webrtc/7500,https://issues.webrtc.org/issues/42232611 +https://crbug.com/webrtc/7501,https://issues.webrtc.org/issues/42232612 +https://crbug.com/webrtc/7502,https://issues.webrtc.org/issues/42232613 +https://crbug.com/webrtc/7503,https://issues.webrtc.org/issues/42232614 +https://crbug.com/webrtc/7504,https://issues.webrtc.org/issues/42232615 +https://crbug.com/webrtc/7506,https://issues.webrtc.org/issues/42232616 +https://crbug.com/webrtc/7507,https://issues.webrtc.org/issues/42232617 +https://crbug.com/webrtc/7508,https://issues.webrtc.org/issues/42232618 +https://crbug.com/webrtc/7509,https://issues.webrtc.org/issues/42232619 +https://crbug.com/webrtc/751,https://issues.webrtc.org/issues/42232620 +https://crbug.com/webrtc/7510,https://issues.webrtc.org/issues/42232621 +https://crbug.com/webrtc/7511,https://issues.webrtc.org/issues/42232622 +https://crbug.com/webrtc/7512,https://issues.webrtc.org/issues/42232623 +https://crbug.com/webrtc/7513,https://issues.webrtc.org/issues/42232624 +https://crbug.com/webrtc/7514,https://issues.webrtc.org/issues/42232625 +https://crbug.com/webrtc/7515,https://issues.webrtc.org/issues/42232626 +https://crbug.com/webrtc/7516,https://issues.webrtc.org/issues/42232627 +https://crbug.com/webrtc/7517,https://issues.webrtc.org/issues/42232628 +https://crbug.com/webrtc/7518,https://issues.webrtc.org/issues/42232629 +https://crbug.com/webrtc/7519,https://issues.webrtc.org/issues/42232630 +https://crbug.com/webrtc/752,https://issues.webrtc.org/issues/42232631 +https://crbug.com/webrtc/7520,https://issues.webrtc.org/issues/42232632 +https://crbug.com/webrtc/7521,https://issues.webrtc.org/issues/42232633 +https://crbug.com/webrtc/7522,https://issues.webrtc.org/issues/42232634 +https://crbug.com/webrtc/7523,https://issues.webrtc.org/issues/42232635 +https://crbug.com/webrtc/7524,https://issues.webrtc.org/issues/42232636 +https://crbug.com/webrtc/7525,https://issues.webrtc.org/issues/42232637 +https://crbug.com/webrtc/7526,https://issues.webrtc.org/issues/42232638 +https://crbug.com/webrtc/7527,https://issues.webrtc.org/issues/42232639 +https://crbug.com/webrtc/7528,https://issues.webrtc.org/issues/42232640 +https://crbug.com/webrtc/7529,https://issues.webrtc.org/issues/42232641 +https://crbug.com/webrtc/753,https://issues.webrtc.org/issues/42232642 +https://crbug.com/webrtc/7530,https://issues.webrtc.org/issues/42232643 +https://crbug.com/webrtc/7531,https://issues.webrtc.org/issues/42232644 +https://crbug.com/webrtc/7532,https://issues.webrtc.org/issues/42232645 +https://crbug.com/webrtc/7533,https://issues.webrtc.org/issues/42232646 +https://crbug.com/webrtc/7534,https://issues.webrtc.org/issues/42232647 +https://crbug.com/webrtc/7535,https://issues.webrtc.org/issues/42232648 +https://crbug.com/webrtc/7536,https://issues.webrtc.org/issues/42232649 +https://crbug.com/webrtc/7537,https://issues.webrtc.org/issues/42232650 +https://crbug.com/webrtc/7538,https://issues.webrtc.org/issues/42232651 +https://crbug.com/webrtc/7539,https://issues.webrtc.org/issues/42232652 +https://crbug.com/webrtc/754,https://issues.webrtc.org/issues/42232653 +https://crbug.com/webrtc/7540,https://issues.webrtc.org/issues/42232654 +https://crbug.com/webrtc/7541,https://issues.webrtc.org/issues/42232655 +https://crbug.com/webrtc/7542,https://issues.webrtc.org/issues/42232656 +https://crbug.com/webrtc/7543,https://issues.webrtc.org/issues/42232657 +https://crbug.com/webrtc/7544,https://issues.webrtc.org/issues/42232658 +https://crbug.com/webrtc/7545,https://issues.webrtc.org/issues/42232659 +https://crbug.com/webrtc/7547,https://issues.webrtc.org/issues/42232660 +https://crbug.com/webrtc/7548,https://issues.webrtc.org/issues/42232661 +https://crbug.com/webrtc/7549,https://issues.webrtc.org/issues/42232662 +https://crbug.com/webrtc/755,https://issues.webrtc.org/issues/42232663 +https://crbug.com/webrtc/7552,https://issues.webrtc.org/issues/42232664 +https://crbug.com/webrtc/7554,https://issues.webrtc.org/issues/42232665 +https://crbug.com/webrtc/7555,https://issues.webrtc.org/issues/42232666 +https://crbug.com/webrtc/7556,https://issues.webrtc.org/issues/42232667 +https://crbug.com/webrtc/7557,https://issues.webrtc.org/issues/42232668 +https://crbug.com/webrtc/7558,https://issues.webrtc.org/issues/42232669 +https://crbug.com/webrtc/7559,https://issues.webrtc.org/issues/42232670 +https://crbug.com/webrtc/756,https://issues.webrtc.org/issues/42232671 +https://crbug.com/webrtc/7560,https://issues.webrtc.org/issues/42232672 +https://crbug.com/webrtc/7561,https://issues.webrtc.org/issues/42232673 +https://crbug.com/webrtc/7562,https://issues.webrtc.org/issues/42232674 +https://crbug.com/webrtc/7563,https://issues.webrtc.org/issues/42232675 +https://crbug.com/webrtc/7566,https://issues.webrtc.org/issues/42232676 +https://crbug.com/webrtc/7568,https://issues.webrtc.org/issues/42232677 +https://crbug.com/webrtc/7569,https://issues.webrtc.org/issues/42232678 +https://crbug.com/webrtc/757,https://issues.webrtc.org/issues/42232679 +https://crbug.com/webrtc/7570,https://issues.webrtc.org/issues/42232680 +https://crbug.com/webrtc/7571,https://issues.webrtc.org/issues/42232681 +https://crbug.com/webrtc/7572,https://issues.webrtc.org/issues/42232682 +https://crbug.com/webrtc/7573,https://issues.webrtc.org/issues/42232683 +https://crbug.com/webrtc/7574,https://issues.webrtc.org/issues/42232684 +https://crbug.com/webrtc/7575,https://issues.webrtc.org/issues/42232685 +https://crbug.com/webrtc/7576,https://issues.webrtc.org/issues/42232686 +https://crbug.com/webrtc/7577,https://issues.webrtc.org/issues/42232687 +https://crbug.com/webrtc/7579,https://issues.webrtc.org/issues/42232688 +https://crbug.com/webrtc/758,https://issues.webrtc.org/issues/42232689 +https://crbug.com/webrtc/7580,https://issues.webrtc.org/issues/42232690 +https://crbug.com/webrtc/7581,https://issues.webrtc.org/issues/42232691 +https://crbug.com/webrtc/7582,https://issues.webrtc.org/issues/42232692 +https://crbug.com/webrtc/7583,https://issues.webrtc.org/issues/42232693 +https://crbug.com/webrtc/7584,https://issues.webrtc.org/issues/42232694 +https://crbug.com/webrtc/7585,https://issues.webrtc.org/issues/42232695 +https://crbug.com/webrtc/7586,https://issues.webrtc.org/issues/42232696 +https://crbug.com/webrtc/7587,https://issues.webrtc.org/issues/42232697 +https://crbug.com/webrtc/7588,https://issues.webrtc.org/issues/42232698 +https://crbug.com/webrtc/7589,https://issues.webrtc.org/issues/42232699 +https://crbug.com/webrtc/759,https://issues.webrtc.org/issues/42232700 +https://crbug.com/webrtc/7590,https://issues.webrtc.org/issues/42232701 +https://crbug.com/webrtc/7591,https://issues.webrtc.org/issues/42232702 +https://crbug.com/webrtc/7592,https://issues.webrtc.org/issues/42232703 +https://crbug.com/webrtc/7593,https://issues.webrtc.org/issues/42232704 +https://crbug.com/webrtc/7594,https://issues.webrtc.org/issues/42232705 +https://crbug.com/webrtc/7595,https://issues.webrtc.org/issues/42232706 +https://crbug.com/webrtc/7596,https://issues.webrtc.org/issues/42232707 +https://crbug.com/webrtc/7597,https://issues.webrtc.org/issues/42232708 +https://crbug.com/webrtc/7598,https://issues.webrtc.org/issues/42232709 +https://crbug.com/webrtc/76,https://issues.webrtc.org/issues/42232710 +https://crbug.com/webrtc/760,https://issues.webrtc.org/issues/42232711 +https://crbug.com/webrtc/7601,https://issues.webrtc.org/issues/42232712 +https://crbug.com/webrtc/7603,https://issues.webrtc.org/issues/42232713 +https://crbug.com/webrtc/7604,https://issues.webrtc.org/issues/42232714 +https://crbug.com/webrtc/7605,https://issues.webrtc.org/issues/42232715 +https://crbug.com/webrtc/7606,https://issues.webrtc.org/issues/42232716 +https://crbug.com/webrtc/7607,https://issues.webrtc.org/issues/42232717 +https://crbug.com/webrtc/7608,https://issues.webrtc.org/issues/42232718 +https://crbug.com/webrtc/7609,https://issues.webrtc.org/issues/42232719 +https://crbug.com/webrtc/761,https://issues.webrtc.org/issues/42232720 +https://crbug.com/webrtc/7610,https://issues.webrtc.org/issues/42232721 +https://crbug.com/webrtc/7611,https://issues.webrtc.org/issues/42232722 +https://crbug.com/webrtc/7612,https://issues.webrtc.org/issues/42232723 +https://crbug.com/webrtc/7613,https://issues.webrtc.org/issues/42232724 +https://crbug.com/webrtc/7615,https://issues.webrtc.org/issues/42232725 +https://crbug.com/webrtc/7616,https://issues.webrtc.org/issues/42232726 +https://crbug.com/webrtc/7617,https://issues.webrtc.org/issues/42232727 +https://crbug.com/webrtc/7618,https://issues.webrtc.org/issues/42232728 +https://crbug.com/webrtc/7619,https://issues.webrtc.org/issues/42232729 +https://crbug.com/webrtc/762,https://issues.webrtc.org/issues/42232730 +https://crbug.com/webrtc/7620,https://issues.webrtc.org/issues/42232731 +https://crbug.com/webrtc/7621,https://issues.webrtc.org/issues/42232732 +https://crbug.com/webrtc/7622,https://issues.webrtc.org/issues/42232733 +https://crbug.com/webrtc/7623,https://issues.webrtc.org/issues/42232734 +https://crbug.com/webrtc/7624,https://issues.webrtc.org/issues/42232735 +https://crbug.com/webrtc/7625,https://issues.webrtc.org/issues/42232736 +https://crbug.com/webrtc/7626,https://issues.webrtc.org/issues/42232737 +https://crbug.com/webrtc/7627,https://issues.webrtc.org/issues/42232738 +https://crbug.com/webrtc/7628,https://issues.webrtc.org/issues/42232739 +https://crbug.com/webrtc/7629,https://issues.webrtc.org/issues/42232740 +https://crbug.com/webrtc/763,https://issues.webrtc.org/issues/42232741 +https://crbug.com/webrtc/7630,https://issues.webrtc.org/issues/42232742 +https://crbug.com/webrtc/7631,https://issues.webrtc.org/issues/42232743 +https://crbug.com/webrtc/7632,https://issues.webrtc.org/issues/42232744 +https://crbug.com/webrtc/7635,https://issues.webrtc.org/issues/42232745 +https://crbug.com/webrtc/7636,https://issues.webrtc.org/issues/42232746 +https://crbug.com/webrtc/7637,https://issues.webrtc.org/issues/42232747 +https://crbug.com/webrtc/764,https://issues.webrtc.org/issues/42232748 +https://crbug.com/webrtc/7640,https://issues.webrtc.org/issues/42232749 +https://crbug.com/webrtc/7641,https://issues.webrtc.org/issues/42232750 +https://crbug.com/webrtc/7642,https://issues.webrtc.org/issues/42232751 +https://crbug.com/webrtc/7643,https://issues.webrtc.org/issues/42232752 +https://crbug.com/webrtc/7644,https://issues.webrtc.org/issues/42232753 +https://crbug.com/webrtc/7645,https://issues.webrtc.org/issues/42232754 +https://crbug.com/webrtc/7646,https://issues.webrtc.org/issues/42232755 +https://crbug.com/webrtc/7647,https://issues.webrtc.org/issues/42232756 +https://crbug.com/webrtc/7648,https://issues.webrtc.org/issues/42232757 +https://crbug.com/webrtc/7649,https://issues.webrtc.org/issues/42232758 +https://crbug.com/webrtc/765,https://issues.webrtc.org/issues/42232759 +https://crbug.com/webrtc/7650,https://issues.webrtc.org/issues/42232760 +https://crbug.com/webrtc/7651,https://issues.webrtc.org/issues/42232761 +https://crbug.com/webrtc/7652,https://issues.webrtc.org/issues/42232762 +https://crbug.com/webrtc/7653,https://issues.webrtc.org/issues/42232763 +https://crbug.com/webrtc/7655,https://issues.webrtc.org/issues/42232764 +https://crbug.com/webrtc/7656,https://issues.webrtc.org/issues/42232765 +https://crbug.com/webrtc/7658,https://issues.webrtc.org/issues/42232766 +https://crbug.com/webrtc/7659,https://issues.webrtc.org/issues/42232767 +https://crbug.com/webrtc/766,https://issues.webrtc.org/issues/42232768 +https://crbug.com/webrtc/7660,https://issues.webrtc.org/issues/42232769 +https://crbug.com/webrtc/7661,https://issues.webrtc.org/issues/42232770 +https://crbug.com/webrtc/7662,https://issues.webrtc.org/issues/42232771 +https://crbug.com/webrtc/7663,https://issues.webrtc.org/issues/42232772 +https://crbug.com/webrtc/7664,https://issues.webrtc.org/issues/42232773 +https://crbug.com/webrtc/7665,https://issues.webrtc.org/issues/42232774 +https://crbug.com/webrtc/7666,https://issues.webrtc.org/issues/42232775 +https://crbug.com/webrtc/7667,https://issues.webrtc.org/issues/42232776 +https://crbug.com/webrtc/7668,https://issues.webrtc.org/issues/42232777 +https://crbug.com/webrtc/7669,https://issues.webrtc.org/issues/42232778 +https://crbug.com/webrtc/767,https://issues.webrtc.org/issues/42232779 +https://crbug.com/webrtc/7670,https://issues.webrtc.org/issues/42232780 +https://crbug.com/webrtc/7671,https://issues.webrtc.org/issues/42232781 +https://crbug.com/webrtc/7672,https://issues.webrtc.org/issues/42232782 +https://crbug.com/webrtc/7673,https://issues.webrtc.org/issues/42232783 +https://crbug.com/webrtc/7674,https://issues.webrtc.org/issues/42232784 +https://crbug.com/webrtc/7675,https://issues.webrtc.org/issues/42232785 +https://crbug.com/webrtc/7676,https://issues.webrtc.org/issues/42232786 +https://crbug.com/webrtc/7677,https://issues.webrtc.org/issues/42232787 +https://crbug.com/webrtc/7678,https://issues.webrtc.org/issues/42232788 +https://crbug.com/webrtc/7679,https://issues.webrtc.org/issues/42232789 +https://crbug.com/webrtc/768,https://issues.webrtc.org/issues/42232790 +https://crbug.com/webrtc/7680,https://issues.webrtc.org/issues/42232791 +https://crbug.com/webrtc/7681,https://issues.webrtc.org/issues/42232792 +https://crbug.com/webrtc/7682,https://issues.webrtc.org/issues/42232793 +https://crbug.com/webrtc/7683,https://issues.webrtc.org/issues/42232794 +https://crbug.com/webrtc/7684,https://issues.webrtc.org/issues/42232795 +https://crbug.com/webrtc/7685,https://issues.webrtc.org/issues/42232796 +https://crbug.com/webrtc/7686,https://issues.webrtc.org/issues/42232797 +https://crbug.com/webrtc/7687,https://issues.webrtc.org/issues/42232798 +https://crbug.com/webrtc/7689,https://issues.webrtc.org/issues/42232799 +https://crbug.com/webrtc/769,https://issues.webrtc.org/issues/42232800 +https://crbug.com/webrtc/7690,https://issues.webrtc.org/issues/42232801 +https://crbug.com/webrtc/7691,https://issues.webrtc.org/issues/42232802 +https://crbug.com/webrtc/7692,https://issues.webrtc.org/issues/42232803 +https://crbug.com/webrtc/7694,https://issues.webrtc.org/issues/42232804 +https://crbug.com/webrtc/7695,https://issues.webrtc.org/issues/42232805 +https://crbug.com/webrtc/7696,https://issues.webrtc.org/issues/42232806 +https://crbug.com/webrtc/7697,https://issues.webrtc.org/issues/42232807 +https://crbug.com/webrtc/7698,https://issues.webrtc.org/issues/42232808 +https://crbug.com/webrtc/7699,https://issues.webrtc.org/issues/42232809 +https://crbug.com/webrtc/77,https://issues.webrtc.org/issues/42232810 +https://crbug.com/webrtc/770,https://issues.webrtc.org/issues/42232811 +https://crbug.com/webrtc/7701,https://issues.webrtc.org/issues/42232812 +https://crbug.com/webrtc/7702,https://issues.webrtc.org/issues/42232813 +https://crbug.com/webrtc/7703,https://issues.webrtc.org/issues/42232814 +https://crbug.com/webrtc/7704,https://issues.webrtc.org/issues/42232815 +https://crbug.com/webrtc/7705,https://issues.webrtc.org/issues/42232816 +https://crbug.com/webrtc/7707,https://issues.webrtc.org/issues/42232817 +https://crbug.com/webrtc/7708,https://issues.webrtc.org/issues/42232818 +https://crbug.com/webrtc/7709,https://issues.webrtc.org/issues/42232819 +https://crbug.com/webrtc/771,https://issues.webrtc.org/issues/42232820 +https://crbug.com/webrtc/7710,https://issues.webrtc.org/issues/42232821 +https://crbug.com/webrtc/7711,https://issues.webrtc.org/issues/42232822 +https://crbug.com/webrtc/7713,https://issues.webrtc.org/issues/42232823 +https://crbug.com/webrtc/7714,https://issues.webrtc.org/issues/42232824 +https://crbug.com/webrtc/7715,https://issues.webrtc.org/issues/42232825 +https://crbug.com/webrtc/7716,https://issues.webrtc.org/issues/42232826 +https://crbug.com/webrtc/7717,https://issues.webrtc.org/issues/42232827 +https://crbug.com/webrtc/7718,https://issues.webrtc.org/issues/42232828 +https://crbug.com/webrtc/7719,https://issues.webrtc.org/issues/42232829 +https://crbug.com/webrtc/772,https://issues.webrtc.org/issues/42232830 +https://crbug.com/webrtc/7720,https://issues.webrtc.org/issues/42232831 +https://crbug.com/webrtc/7721,https://issues.webrtc.org/issues/42232832 +https://crbug.com/webrtc/7722,https://issues.webrtc.org/issues/42232833 +https://crbug.com/webrtc/7723,https://issues.webrtc.org/issues/42232834 +https://crbug.com/webrtc/7724,https://issues.webrtc.org/issues/42232835 +https://crbug.com/webrtc/7725,https://issues.webrtc.org/issues/42232836 +https://crbug.com/webrtc/7726,https://issues.webrtc.org/issues/42232837 +https://crbug.com/webrtc/7727,https://issues.webrtc.org/issues/42232838 +https://crbug.com/webrtc/7728,https://issues.webrtc.org/issues/42232839 +https://crbug.com/webrtc/7729,https://issues.webrtc.org/issues/42232840 +https://crbug.com/webrtc/773,https://issues.webrtc.org/issues/42232841 +https://crbug.com/webrtc/7730,https://issues.webrtc.org/issues/42232842 +https://crbug.com/webrtc/7731,https://issues.webrtc.org/issues/42232843 +https://crbug.com/webrtc/7732,https://issues.webrtc.org/issues/42232844 +https://crbug.com/webrtc/7733,https://issues.webrtc.org/issues/42232845 +https://crbug.com/webrtc/7734,https://issues.webrtc.org/issues/42232846 +https://crbug.com/webrtc/7735,https://issues.webrtc.org/issues/42232847 +https://crbug.com/webrtc/7736,https://issues.webrtc.org/issues/42232848 +https://crbug.com/webrtc/7737,https://issues.webrtc.org/issues/42232849 +https://crbug.com/webrtc/7738,https://issues.webrtc.org/issues/42232850 +https://crbug.com/webrtc/7739,https://issues.webrtc.org/issues/42232851 +https://crbug.com/webrtc/774,https://issues.webrtc.org/issues/42232852 +https://crbug.com/webrtc/7740,https://issues.webrtc.org/issues/42232853 +https://crbug.com/webrtc/7741,https://issues.webrtc.org/issues/42232854 +https://crbug.com/webrtc/7742,https://issues.webrtc.org/issues/42232855 +https://crbug.com/webrtc/7743,https://issues.webrtc.org/issues/42232856 +https://crbug.com/webrtc/7744,https://issues.webrtc.org/issues/42232857 +https://crbug.com/webrtc/7745,https://issues.webrtc.org/issues/42232858 +https://crbug.com/webrtc/7746,https://issues.webrtc.org/issues/42232859 +https://crbug.com/webrtc/7747,https://issues.webrtc.org/issues/42232860 +https://crbug.com/webrtc/7748,https://issues.webrtc.org/issues/42232861 +https://crbug.com/webrtc/7749,https://issues.webrtc.org/issues/42232862 +https://crbug.com/webrtc/775,https://issues.webrtc.org/issues/42232863 +https://crbug.com/webrtc/7750,https://issues.webrtc.org/issues/42232864 +https://crbug.com/webrtc/7751,https://issues.webrtc.org/issues/42232865 +https://crbug.com/webrtc/7752,https://issues.webrtc.org/issues/42232866 +https://crbug.com/webrtc/7753,https://issues.webrtc.org/issues/42232867 +https://crbug.com/webrtc/7754,https://issues.webrtc.org/issues/42232868 +https://crbug.com/webrtc/7755,https://issues.webrtc.org/issues/42232869 +https://crbug.com/webrtc/7756,https://issues.webrtc.org/issues/42232870 +https://crbug.com/webrtc/7757,https://issues.webrtc.org/issues/42232871 +https://crbug.com/webrtc/7758,https://issues.webrtc.org/issues/42232872 +https://crbug.com/webrtc/7759,https://issues.webrtc.org/issues/42232873 +https://crbug.com/webrtc/776,https://issues.webrtc.org/issues/42232874 +https://crbug.com/webrtc/7760,https://issues.webrtc.org/issues/42232875 +https://crbug.com/webrtc/7761,https://issues.webrtc.org/issues/42232876 +https://crbug.com/webrtc/7762,https://issues.webrtc.org/issues/42232877 +https://crbug.com/webrtc/7763,https://issues.webrtc.org/issues/42232878 +https://crbug.com/webrtc/7764,https://issues.webrtc.org/issues/42232879 +https://crbug.com/webrtc/7765,https://issues.webrtc.org/issues/42232880 +https://crbug.com/webrtc/7766,https://issues.webrtc.org/issues/42232881 +https://crbug.com/webrtc/7767,https://issues.webrtc.org/issues/42232882 +https://crbug.com/webrtc/7768,https://issues.webrtc.org/issues/42232883 +https://crbug.com/webrtc/7769,https://issues.webrtc.org/issues/42232884 +https://crbug.com/webrtc/777,https://issues.webrtc.org/issues/42232885 +https://crbug.com/webrtc/7771,https://issues.webrtc.org/issues/42232886 +https://crbug.com/webrtc/7772,https://issues.webrtc.org/issues/42232887 +https://crbug.com/webrtc/7773,https://issues.webrtc.org/issues/42232888 +https://crbug.com/webrtc/7775,https://issues.webrtc.org/issues/42232889 +https://crbug.com/webrtc/7776,https://issues.webrtc.org/issues/42232890 +https://crbug.com/webrtc/7777,https://issues.webrtc.org/issues/42232891 +https://crbug.com/webrtc/7778,https://issues.webrtc.org/issues/42232892 +https://crbug.com/webrtc/7779,https://issues.webrtc.org/issues/42232893 +https://crbug.com/webrtc/778,https://issues.webrtc.org/issues/42232894 +https://crbug.com/webrtc/7780,https://issues.webrtc.org/issues/42232895 +https://crbug.com/webrtc/7781,https://issues.webrtc.org/issues/42232896 +https://crbug.com/webrtc/7782,https://issues.webrtc.org/issues/42232897 +https://crbug.com/webrtc/7783,https://issues.webrtc.org/issues/42232898 +https://crbug.com/webrtc/7784,https://issues.webrtc.org/issues/42232899 +https://crbug.com/webrtc/7785,https://issues.webrtc.org/issues/42232900 +https://crbug.com/webrtc/7786,https://issues.webrtc.org/issues/42232901 +https://crbug.com/webrtc/7787,https://issues.webrtc.org/issues/42232902 +https://crbug.com/webrtc/7788,https://issues.webrtc.org/issues/42232903 +https://crbug.com/webrtc/7789,https://issues.webrtc.org/issues/42232904 +https://crbug.com/webrtc/779,https://issues.webrtc.org/issues/42232905 +https://crbug.com/webrtc/7790,https://issues.webrtc.org/issues/42232906 +https://crbug.com/webrtc/7792,https://issues.webrtc.org/issues/42232907 +https://crbug.com/webrtc/7793,https://issues.webrtc.org/issues/42232908 +https://crbug.com/webrtc/7794,https://issues.webrtc.org/issues/42232909 +https://crbug.com/webrtc/7796,https://issues.webrtc.org/issues/42232910 +https://crbug.com/webrtc/7797,https://issues.webrtc.org/issues/42232911 +https://crbug.com/webrtc/7798,https://issues.webrtc.org/issues/42232912 +https://crbug.com/webrtc/78,https://issues.webrtc.org/issues/42232913 +https://crbug.com/webrtc/780,https://issues.webrtc.org/issues/42232914 +https://crbug.com/webrtc/7800,https://issues.webrtc.org/issues/42232915 +https://crbug.com/webrtc/7801,https://issues.webrtc.org/issues/42232916 +https://crbug.com/webrtc/7802,https://issues.webrtc.org/issues/42232917 +https://crbug.com/webrtc/7803,https://issues.webrtc.org/issues/42232918 +https://crbug.com/webrtc/7804,https://issues.webrtc.org/issues/42232919 +https://crbug.com/webrtc/7805,https://issues.webrtc.org/issues/42232920 +https://crbug.com/webrtc/7806,https://issues.webrtc.org/issues/42232921 +https://crbug.com/webrtc/7807,https://issues.webrtc.org/issues/42232922 +https://crbug.com/webrtc/7808,https://issues.webrtc.org/issues/42232923 +https://crbug.com/webrtc/7809,https://issues.webrtc.org/issues/42232924 +https://crbug.com/webrtc/781,https://issues.webrtc.org/issues/42232925 +https://crbug.com/webrtc/7810,https://issues.webrtc.org/issues/42232926 +https://crbug.com/webrtc/7811,https://issues.webrtc.org/issues/42232927 +https://crbug.com/webrtc/7812,https://issues.webrtc.org/issues/42232928 +https://crbug.com/webrtc/7813,https://issues.webrtc.org/issues/42232929 +https://crbug.com/webrtc/7816,https://issues.webrtc.org/issues/42232930 +https://crbug.com/webrtc/7818,https://issues.webrtc.org/issues/42232931 +https://crbug.com/webrtc/7819,https://issues.webrtc.org/issues/42232932 +https://crbug.com/webrtc/782,https://issues.webrtc.org/issues/42232933 +https://crbug.com/webrtc/7820,https://issues.webrtc.org/issues/42232934 +https://crbug.com/webrtc/7821,https://issues.webrtc.org/issues/42232935 +https://crbug.com/webrtc/7822,https://issues.webrtc.org/issues/42232936 +https://crbug.com/webrtc/7823,https://issues.webrtc.org/issues/42232937 +https://crbug.com/webrtc/7824,https://issues.webrtc.org/issues/42232938 +https://crbug.com/webrtc/7825,https://issues.webrtc.org/issues/42232939 +https://crbug.com/webrtc/7826,https://issues.webrtc.org/issues/42232940 +https://crbug.com/webrtc/7828,https://issues.webrtc.org/issues/42232941 +https://crbug.com/webrtc/7829,https://issues.webrtc.org/issues/42232942 +https://crbug.com/webrtc/783,https://issues.webrtc.org/issues/42232943 +https://crbug.com/webrtc/7830,https://issues.webrtc.org/issues/42232944 +https://crbug.com/webrtc/7831,https://issues.webrtc.org/issues/42232945 +https://crbug.com/webrtc/7832,https://issues.webrtc.org/issues/42232946 +https://crbug.com/webrtc/7833,https://issues.webrtc.org/issues/42232947 +https://crbug.com/webrtc/7834,https://issues.webrtc.org/issues/42232948 +https://crbug.com/webrtc/7835,https://issues.webrtc.org/issues/42232949 +https://crbug.com/webrtc/7836,https://issues.webrtc.org/issues/42232950 +https://crbug.com/webrtc/7837,https://issues.webrtc.org/issues/42232951 +https://crbug.com/webrtc/7838,https://issues.webrtc.org/issues/42232952 +https://crbug.com/webrtc/7839,https://issues.webrtc.org/issues/42232953 +https://crbug.com/webrtc/784,https://issues.webrtc.org/issues/42232954 +https://crbug.com/webrtc/7840,https://issues.webrtc.org/issues/42232955 +https://crbug.com/webrtc/7841,https://issues.webrtc.org/issues/42232956 +https://crbug.com/webrtc/7842,https://issues.webrtc.org/issues/42232957 +https://crbug.com/webrtc/7843,https://issues.webrtc.org/issues/42232958 +https://crbug.com/webrtc/7845,https://issues.webrtc.org/issues/42232959 +https://crbug.com/webrtc/7846,https://issues.webrtc.org/issues/42232960 +https://crbug.com/webrtc/7847,https://issues.webrtc.org/issues/42232961 +https://crbug.com/webrtc/7848,https://issues.webrtc.org/issues/42232962 +https://crbug.com/webrtc/7849,https://issues.webrtc.org/issues/42232963 +https://crbug.com/webrtc/785,https://issues.webrtc.org/issues/42232964 +https://crbug.com/webrtc/7850,https://issues.webrtc.org/issues/42232965 +https://crbug.com/webrtc/7851,https://issues.webrtc.org/issues/42232966 +https://crbug.com/webrtc/7852,https://issues.webrtc.org/issues/42232967 +https://crbug.com/webrtc/7853,https://issues.webrtc.org/issues/42232968 +https://crbug.com/webrtc/7854,https://issues.webrtc.org/issues/42232969 +https://crbug.com/webrtc/7856,https://issues.webrtc.org/issues/42232970 +https://crbug.com/webrtc/7857,https://issues.webrtc.org/issues/42232971 +https://crbug.com/webrtc/7858,https://issues.webrtc.org/issues/42232972 +https://crbug.com/webrtc/7859,https://issues.webrtc.org/issues/42232973 +https://crbug.com/webrtc/786,https://issues.webrtc.org/issues/42232974 +https://crbug.com/webrtc/7860,https://issues.webrtc.org/issues/42232975 +https://crbug.com/webrtc/7861,https://issues.webrtc.org/issues/42232976 +https://crbug.com/webrtc/7862,https://issues.webrtc.org/issues/42232977 +https://crbug.com/webrtc/7863,https://issues.webrtc.org/issues/42232978 +https://crbug.com/webrtc/7864,https://issues.webrtc.org/issues/42232979 +https://crbug.com/webrtc/7865,https://issues.webrtc.org/issues/42232980 +https://crbug.com/webrtc/7866,https://issues.webrtc.org/issues/42232981 +https://crbug.com/webrtc/7867,https://issues.webrtc.org/issues/42232982 +https://crbug.com/webrtc/7868,https://issues.webrtc.org/issues/42232983 +https://crbug.com/webrtc/7869,https://issues.webrtc.org/issues/42232984 +https://crbug.com/webrtc/787,https://issues.webrtc.org/issues/42232985 +https://crbug.com/webrtc/7870,https://issues.webrtc.org/issues/42232986 +https://crbug.com/webrtc/7872,https://issues.webrtc.org/issues/42232987 +https://crbug.com/webrtc/7873,https://issues.webrtc.org/issues/42232988 +https://crbug.com/webrtc/7874,https://issues.webrtc.org/issues/42232989 +https://crbug.com/webrtc/7875,https://issues.webrtc.org/issues/42232990 +https://crbug.com/webrtc/7876,https://issues.webrtc.org/issues/42232991 +https://crbug.com/webrtc/7877,https://issues.webrtc.org/issues/42232992 +https://crbug.com/webrtc/7878,https://issues.webrtc.org/issues/42232993 +https://crbug.com/webrtc/7879,https://issues.webrtc.org/issues/42232994 +https://crbug.com/webrtc/788,https://issues.webrtc.org/issues/42232995 +https://crbug.com/webrtc/7880,https://issues.webrtc.org/issues/42232996 +https://crbug.com/webrtc/7881,https://issues.webrtc.org/issues/42232997 +https://crbug.com/webrtc/7882,https://issues.webrtc.org/issues/42232998 +https://crbug.com/webrtc/7883,https://issues.webrtc.org/issues/42232999 +https://crbug.com/webrtc/7884,https://issues.webrtc.org/issues/42233000 +https://crbug.com/webrtc/7886,https://issues.webrtc.org/issues/42233001 +https://crbug.com/webrtc/7887,https://issues.webrtc.org/issues/42233002 +https://crbug.com/webrtc/7888,https://issues.webrtc.org/issues/42233003 +https://crbug.com/webrtc/7889,https://issues.webrtc.org/issues/42233004 +https://crbug.com/webrtc/789,https://issues.webrtc.org/issues/42233005 +https://crbug.com/webrtc/7890,https://issues.webrtc.org/issues/42233006 +https://crbug.com/webrtc/7891,https://issues.webrtc.org/issues/42233007 +https://crbug.com/webrtc/7892,https://issues.webrtc.org/issues/42233008 +https://crbug.com/webrtc/7893,https://issues.webrtc.org/issues/42233009 +https://crbug.com/webrtc/7894,https://issues.webrtc.org/issues/42233010 +https://crbug.com/webrtc/7895,https://issues.webrtc.org/issues/42233011 +https://crbug.com/webrtc/7896,https://issues.webrtc.org/issues/42233012 +https://crbug.com/webrtc/7897,https://issues.webrtc.org/issues/42233013 +https://crbug.com/webrtc/7898,https://issues.webrtc.org/issues/42233014 +https://crbug.com/webrtc/7899,https://issues.webrtc.org/issues/42233015 +https://crbug.com/webrtc/79,https://issues.webrtc.org/issues/42233016 +https://crbug.com/webrtc/790,https://issues.webrtc.org/issues/42233017 +https://crbug.com/webrtc/7900,https://issues.webrtc.org/issues/42233018 +https://crbug.com/webrtc/7901,https://issues.webrtc.org/issues/42233019 +https://crbug.com/webrtc/7902,https://issues.webrtc.org/issues/42233020 +https://crbug.com/webrtc/7903,https://issues.webrtc.org/issues/42233021 +https://crbug.com/webrtc/7904,https://issues.webrtc.org/issues/42233022 +https://crbug.com/webrtc/7905,https://issues.webrtc.org/issues/42233023 +https://crbug.com/webrtc/7906,https://issues.webrtc.org/issues/42233024 +https://crbug.com/webrtc/7907,https://issues.webrtc.org/issues/42233025 +https://crbug.com/webrtc/7908,https://issues.webrtc.org/issues/42233026 +https://crbug.com/webrtc/7909,https://issues.webrtc.org/issues/42233027 +https://crbug.com/webrtc/791,https://issues.webrtc.org/issues/42233028 +https://crbug.com/webrtc/7910,https://issues.webrtc.org/issues/42233029 +https://crbug.com/webrtc/7911,https://issues.webrtc.org/issues/42233030 +https://crbug.com/webrtc/7912,https://issues.webrtc.org/issues/42233031 +https://crbug.com/webrtc/7913,https://issues.webrtc.org/issues/42233032 +https://crbug.com/webrtc/7914,https://issues.webrtc.org/issues/42233033 +https://crbug.com/webrtc/7915,https://issues.webrtc.org/issues/42233034 +https://crbug.com/webrtc/7917,https://issues.webrtc.org/issues/42233035 +https://crbug.com/webrtc/7918,https://issues.webrtc.org/issues/42233036 +https://crbug.com/webrtc/7919,https://issues.webrtc.org/issues/42233037 +https://crbug.com/webrtc/792,https://issues.webrtc.org/issues/42233038 +https://crbug.com/webrtc/7920,https://issues.webrtc.org/issues/42233039 +https://crbug.com/webrtc/7921,https://issues.webrtc.org/issues/42233040 +https://crbug.com/webrtc/7922,https://issues.webrtc.org/issues/42233041 +https://crbug.com/webrtc/7923,https://issues.webrtc.org/issues/42233042 +https://crbug.com/webrtc/7924,https://issues.webrtc.org/issues/42233043 +https://crbug.com/webrtc/7925,https://issues.webrtc.org/issues/42233044 +https://crbug.com/webrtc/7926,https://issues.webrtc.org/issues/42233045 +https://crbug.com/webrtc/7927,https://issues.webrtc.org/issues/42233046 +https://crbug.com/webrtc/7928,https://issues.webrtc.org/issues/42233047 +https://crbug.com/webrtc/7929,https://issues.webrtc.org/issues/42233048 +https://crbug.com/webrtc/793,https://issues.webrtc.org/issues/42233049 +https://crbug.com/webrtc/7930,https://issues.webrtc.org/issues/42233050 +https://crbug.com/webrtc/7931,https://issues.webrtc.org/issues/42233051 +https://crbug.com/webrtc/7935,https://issues.webrtc.org/issues/42233052 +https://crbug.com/webrtc/7936,https://issues.webrtc.org/issues/42233053 +https://crbug.com/webrtc/7937,https://issues.webrtc.org/issues/42233054 +https://crbug.com/webrtc/7938,https://issues.webrtc.org/issues/42233055 +https://crbug.com/webrtc/7939,https://issues.webrtc.org/issues/42233056 +https://crbug.com/webrtc/794,https://issues.webrtc.org/issues/42233057 +https://crbug.com/webrtc/7941,https://issues.webrtc.org/issues/42233058 +https://crbug.com/webrtc/7942,https://issues.webrtc.org/issues/42233059 +https://crbug.com/webrtc/7943,https://issues.webrtc.org/issues/42233060 +https://crbug.com/webrtc/7944,https://issues.webrtc.org/issues/42233061 +https://crbug.com/webrtc/7945,https://issues.webrtc.org/issues/42233062 +https://crbug.com/webrtc/7946,https://issues.webrtc.org/issues/42233063 +https://crbug.com/webrtc/7947,https://issues.webrtc.org/issues/42233064 +https://crbug.com/webrtc/7948,https://issues.webrtc.org/issues/42233065 +https://crbug.com/webrtc/7949,https://issues.webrtc.org/issues/42233066 +https://crbug.com/webrtc/795,https://issues.webrtc.org/issues/42233067 +https://crbug.com/webrtc/7950,https://issues.webrtc.org/issues/42233068 +https://crbug.com/webrtc/7951,https://issues.webrtc.org/issues/42233069 +https://crbug.com/webrtc/7952,https://issues.webrtc.org/issues/42233070 +https://crbug.com/webrtc/7953,https://issues.webrtc.org/issues/42233071 +https://crbug.com/webrtc/7954,https://issues.webrtc.org/issues/42233072 +https://crbug.com/webrtc/7955,https://issues.webrtc.org/issues/42233073 +https://crbug.com/webrtc/7956,https://issues.webrtc.org/issues/42233074 +https://crbug.com/webrtc/7957,https://issues.webrtc.org/issues/42233075 +https://crbug.com/webrtc/7958,https://issues.webrtc.org/issues/42233076 +https://crbug.com/webrtc/7959,https://issues.webrtc.org/issues/42233077 +https://crbug.com/webrtc/796,https://issues.webrtc.org/issues/42233078 +https://crbug.com/webrtc/7960,https://issues.webrtc.org/issues/42233079 +https://crbug.com/webrtc/7961,https://issues.webrtc.org/issues/42233080 +https://crbug.com/webrtc/7962,https://issues.webrtc.org/issues/42233081 +https://crbug.com/webrtc/7963,https://issues.webrtc.org/issues/42233082 +https://crbug.com/webrtc/7964,https://issues.webrtc.org/issues/42233083 +https://crbug.com/webrtc/7965,https://issues.webrtc.org/issues/42233084 +https://crbug.com/webrtc/7966,https://issues.webrtc.org/issues/42233085 +https://crbug.com/webrtc/7967,https://issues.webrtc.org/issues/42233086 +https://crbug.com/webrtc/7968,https://issues.webrtc.org/issues/42233087 +https://crbug.com/webrtc/7969,https://issues.webrtc.org/issues/42233088 +https://crbug.com/webrtc/797,https://issues.webrtc.org/issues/42233089 +https://crbug.com/webrtc/7970,https://issues.webrtc.org/issues/42233090 +https://crbug.com/webrtc/7971,https://issues.webrtc.org/issues/42233091 +https://crbug.com/webrtc/7973,https://issues.webrtc.org/issues/42233092 +https://crbug.com/webrtc/7974,https://issues.webrtc.org/issues/42233093 +https://crbug.com/webrtc/7975,https://issues.webrtc.org/issues/42233094 +https://crbug.com/webrtc/7976,https://issues.webrtc.org/issues/42233095 +https://crbug.com/webrtc/7977,https://issues.webrtc.org/issues/42233096 +https://crbug.com/webrtc/798,https://issues.webrtc.org/issues/42233097 +https://crbug.com/webrtc/7980,https://issues.webrtc.org/issues/42233098 +https://crbug.com/webrtc/7981,https://issues.webrtc.org/issues/42233099 +https://crbug.com/webrtc/7982,https://issues.webrtc.org/issues/42233100 +https://crbug.com/webrtc/7985,https://issues.webrtc.org/issues/42233101 +https://crbug.com/webrtc/7986,https://issues.webrtc.org/issues/42233102 +https://crbug.com/webrtc/7987,https://issues.webrtc.org/issues/42233103 +https://crbug.com/webrtc/7988,https://issues.webrtc.org/issues/42233104 +https://crbug.com/webrtc/7989,https://issues.webrtc.org/issues/42233105 +https://crbug.com/webrtc/799,https://issues.webrtc.org/issues/42233106 +https://crbug.com/webrtc/7990,https://issues.webrtc.org/issues/42233107 +https://crbug.com/webrtc/7991,https://issues.webrtc.org/issues/42233108 +https://crbug.com/webrtc/7992,https://issues.webrtc.org/issues/42233109 +https://crbug.com/webrtc/7993,https://issues.webrtc.org/issues/42233110 +https://crbug.com/webrtc/7994,https://issues.webrtc.org/issues/42233111 +https://crbug.com/webrtc/7995,https://issues.webrtc.org/issues/42233112 +https://crbug.com/webrtc/7996,https://issues.webrtc.org/issues/42233113 +https://crbug.com/webrtc/7997,https://issues.webrtc.org/issues/42233114 +https://crbug.com/webrtc/7998,https://issues.webrtc.org/issues/42233115 +https://crbug.com/webrtc/7999,https://issues.webrtc.org/issues/42233116 +https://crbug.com/webrtc/8,https://issues.webrtc.org/issues/42233117 +https://crbug.com/webrtc/80,https://issues.webrtc.org/issues/42233118 +https://crbug.com/webrtc/800,https://issues.webrtc.org/issues/42233119 +https://crbug.com/webrtc/8000,https://issues.webrtc.org/issues/42233120 +https://crbug.com/webrtc/8001,https://issues.webrtc.org/issues/42233121 +https://crbug.com/webrtc/8002,https://issues.webrtc.org/issues/42233122 +https://crbug.com/webrtc/8003,https://issues.webrtc.org/issues/42233123 +https://crbug.com/webrtc/8004,https://issues.webrtc.org/issues/42233124 +https://crbug.com/webrtc/8005,https://issues.webrtc.org/issues/42233125 +https://crbug.com/webrtc/8006,https://issues.webrtc.org/issues/42233126 +https://crbug.com/webrtc/8007,https://issues.webrtc.org/issues/42233127 +https://crbug.com/webrtc/8008,https://issues.webrtc.org/issues/42233128 +https://crbug.com/webrtc/8009,https://issues.webrtc.org/issues/42233129 +https://crbug.com/webrtc/801,https://issues.webrtc.org/issues/42233130 +https://crbug.com/webrtc/8010,https://issues.webrtc.org/issues/42233131 +https://crbug.com/webrtc/8011,https://issues.webrtc.org/issues/42233132 +https://crbug.com/webrtc/8012,https://issues.webrtc.org/issues/42233133 +https://crbug.com/webrtc/8013,https://issues.webrtc.org/issues/42233134 +https://crbug.com/webrtc/8014,https://issues.webrtc.org/issues/42233135 +https://crbug.com/webrtc/8015,https://issues.webrtc.org/issues/42233136 +https://crbug.com/webrtc/8016,https://issues.webrtc.org/issues/42233137 +https://crbug.com/webrtc/8017,https://issues.webrtc.org/issues/42233138 +https://crbug.com/webrtc/8018,https://issues.webrtc.org/issues/42233139 +https://crbug.com/webrtc/8019,https://issues.webrtc.org/issues/42233140 +https://crbug.com/webrtc/802,https://issues.webrtc.org/issues/42233141 +https://crbug.com/webrtc/8020,https://issues.webrtc.org/issues/42233142 +https://crbug.com/webrtc/8021,https://issues.webrtc.org/issues/42233143 +https://crbug.com/webrtc/8022,https://issues.webrtc.org/issues/42233144 +https://crbug.com/webrtc/8023,https://issues.webrtc.org/issues/42233145 +https://crbug.com/webrtc/8024,https://issues.webrtc.org/issues/42233146 +https://crbug.com/webrtc/8025,https://issues.webrtc.org/issues/42233147 +https://crbug.com/webrtc/8026,https://issues.webrtc.org/issues/42233148 +https://crbug.com/webrtc/8027,https://issues.webrtc.org/issues/42233149 +https://crbug.com/webrtc/8028,https://issues.webrtc.org/issues/42233150 +https://crbug.com/webrtc/8029,https://issues.webrtc.org/issues/42233151 +https://crbug.com/webrtc/803,https://issues.webrtc.org/issues/42233152 +https://crbug.com/webrtc/8031,https://issues.webrtc.org/issues/42233153 +https://crbug.com/webrtc/8032,https://issues.webrtc.org/issues/42233154 +https://crbug.com/webrtc/8033,https://issues.webrtc.org/issues/42233155 +https://crbug.com/webrtc/8034,https://issues.webrtc.org/issues/42233156 +https://crbug.com/webrtc/8035,https://issues.webrtc.org/issues/42233157 +https://crbug.com/webrtc/8036,https://issues.webrtc.org/issues/42233158 +https://crbug.com/webrtc/8037,https://issues.webrtc.org/issues/42233159 +https://crbug.com/webrtc/8038,https://issues.webrtc.org/issues/42233160 +https://crbug.com/webrtc/8039,https://issues.webrtc.org/issues/42233161 +https://crbug.com/webrtc/804,https://issues.webrtc.org/issues/42233162 +https://crbug.com/webrtc/8040,https://issues.webrtc.org/issues/42233163 +https://crbug.com/webrtc/8041,https://issues.webrtc.org/issues/42233164 +https://crbug.com/webrtc/8042,https://issues.webrtc.org/issues/42233165 +https://crbug.com/webrtc/8043,https://issues.webrtc.org/issues/42233166 +https://crbug.com/webrtc/8044,https://issues.webrtc.org/issues/42233167 +https://crbug.com/webrtc/8045,https://issues.webrtc.org/issues/42233168 +https://crbug.com/webrtc/8046,https://issues.webrtc.org/issues/42233169 +https://crbug.com/webrtc/8047,https://issues.webrtc.org/issues/42233170 +https://crbug.com/webrtc/8048,https://issues.webrtc.org/issues/42233171 +https://crbug.com/webrtc/8049,https://issues.webrtc.org/issues/42233172 +https://crbug.com/webrtc/805,https://issues.webrtc.org/issues/42233173 +https://crbug.com/webrtc/8050,https://issues.webrtc.org/issues/42233174 +https://crbug.com/webrtc/8051,https://issues.webrtc.org/issues/42233175 +https://crbug.com/webrtc/8052,https://issues.webrtc.org/issues/42233176 +https://crbug.com/webrtc/8053,https://issues.webrtc.org/issues/42233177 +https://crbug.com/webrtc/8054,https://issues.webrtc.org/issues/42233178 +https://crbug.com/webrtc/8055,https://issues.webrtc.org/issues/42233179 +https://crbug.com/webrtc/8056,https://issues.webrtc.org/issues/42233180 +https://crbug.com/webrtc/8057,https://issues.webrtc.org/issues/42233181 +https://crbug.com/webrtc/8058,https://issues.webrtc.org/issues/42233182 +https://crbug.com/webrtc/8059,https://issues.webrtc.org/issues/42233183 +https://crbug.com/webrtc/806,https://issues.webrtc.org/issues/42233184 +https://crbug.com/webrtc/8060,https://issues.webrtc.org/issues/42233185 +https://crbug.com/webrtc/8061,https://issues.webrtc.org/issues/42233186 +https://crbug.com/webrtc/8062,https://issues.webrtc.org/issues/42233187 +https://crbug.com/webrtc/8063,https://issues.webrtc.org/issues/42233188 +https://crbug.com/webrtc/8064,https://issues.webrtc.org/issues/42233189 +https://crbug.com/webrtc/8065,https://issues.webrtc.org/issues/42233190 +https://crbug.com/webrtc/8066,https://issues.webrtc.org/issues/42233191 +https://crbug.com/webrtc/8067,https://issues.webrtc.org/issues/42233192 +https://crbug.com/webrtc/8068,https://issues.webrtc.org/issues/42233193 +https://crbug.com/webrtc/8069,https://issues.webrtc.org/issues/42233194 +https://crbug.com/webrtc/807,https://issues.webrtc.org/issues/42233195 +https://crbug.com/webrtc/8070,https://issues.webrtc.org/issues/42233196 +https://crbug.com/webrtc/8071,https://issues.webrtc.org/issues/42233197 +https://crbug.com/webrtc/8072,https://issues.webrtc.org/issues/42233198 +https://crbug.com/webrtc/8073,https://issues.webrtc.org/issues/42233199 +https://crbug.com/webrtc/8074,https://issues.webrtc.org/issues/42233200 +https://crbug.com/webrtc/8075,https://issues.webrtc.org/issues/42233201 +https://crbug.com/webrtc/8076,https://issues.webrtc.org/issues/42233202 +https://crbug.com/webrtc/8077,https://issues.webrtc.org/issues/42233203 +https://crbug.com/webrtc/8079,https://issues.webrtc.org/issues/42233204 +https://crbug.com/webrtc/808,https://issues.webrtc.org/issues/42233205 +https://crbug.com/webrtc/8080,https://issues.webrtc.org/issues/42233206 +https://crbug.com/webrtc/8081,https://issues.webrtc.org/issues/42233207 +https://crbug.com/webrtc/8082,https://issues.webrtc.org/issues/42233208 +https://crbug.com/webrtc/8083,https://issues.webrtc.org/issues/42233209 +https://crbug.com/webrtc/8084,https://issues.webrtc.org/issues/42233210 +https://crbug.com/webrtc/8085,https://issues.webrtc.org/issues/42233211 +https://crbug.com/webrtc/8086,https://issues.webrtc.org/issues/42233212 +https://crbug.com/webrtc/8087,https://issues.webrtc.org/issues/42233213 +https://crbug.com/webrtc/8088,https://issues.webrtc.org/issues/42233214 +https://crbug.com/webrtc/8089,https://issues.webrtc.org/issues/42233215 +https://crbug.com/webrtc/809,https://issues.webrtc.org/issues/42233216 +https://crbug.com/webrtc/8090,https://issues.webrtc.org/issues/42233217 +https://crbug.com/webrtc/8091,https://issues.webrtc.org/issues/42233218 +https://crbug.com/webrtc/8092,https://issues.webrtc.org/issues/42233219 +https://crbug.com/webrtc/8093,https://issues.webrtc.org/issues/42233220 +https://crbug.com/webrtc/8094,https://issues.webrtc.org/issues/42233221 +https://crbug.com/webrtc/8095,https://issues.webrtc.org/issues/42233222 +https://crbug.com/webrtc/8096,https://issues.webrtc.org/issues/42233223 +https://crbug.com/webrtc/8097,https://issues.webrtc.org/issues/42233224 +https://crbug.com/webrtc/8098,https://issues.webrtc.org/issues/42233225 +https://crbug.com/webrtc/81,https://issues.webrtc.org/issues/42233226 +https://crbug.com/webrtc/810,https://issues.webrtc.org/issues/42233227 +https://crbug.com/webrtc/8100,https://issues.webrtc.org/issues/42233228 +https://crbug.com/webrtc/8101,https://issues.webrtc.org/issues/42233229 +https://crbug.com/webrtc/8103,https://issues.webrtc.org/issues/42233230 +https://crbug.com/webrtc/8104,https://issues.webrtc.org/issues/42233231 +https://crbug.com/webrtc/8105,https://issues.webrtc.org/issues/42233232 +https://crbug.com/webrtc/8106,https://issues.webrtc.org/issues/42233233 +https://crbug.com/webrtc/8108,https://issues.webrtc.org/issues/42233234 +https://crbug.com/webrtc/811,https://issues.webrtc.org/issues/42233235 +https://crbug.com/webrtc/8110,https://issues.webrtc.org/issues/42233236 +https://crbug.com/webrtc/8111,https://issues.webrtc.org/issues/42233237 +https://crbug.com/webrtc/8112,https://issues.webrtc.org/issues/42233238 +https://crbug.com/webrtc/8113,https://issues.webrtc.org/issues/42233239 +https://crbug.com/webrtc/8114,https://issues.webrtc.org/issues/42233240 +https://crbug.com/webrtc/8115,https://issues.webrtc.org/issues/42233241 +https://crbug.com/webrtc/8116,https://issues.webrtc.org/issues/42233242 +https://crbug.com/webrtc/8117,https://issues.webrtc.org/issues/42233243 +https://crbug.com/webrtc/8118,https://issues.webrtc.org/issues/42233244 +https://crbug.com/webrtc/8119,https://issues.webrtc.org/issues/42233245 +https://crbug.com/webrtc/812,https://issues.webrtc.org/issues/42233246 +https://crbug.com/webrtc/8120,https://issues.webrtc.org/issues/42233247 +https://crbug.com/webrtc/8121,https://issues.webrtc.org/issues/42233248 +https://crbug.com/webrtc/8122,https://issues.webrtc.org/issues/42233249 +https://crbug.com/webrtc/8123,https://issues.webrtc.org/issues/42233250 +https://crbug.com/webrtc/8124,https://issues.webrtc.org/issues/42233251 +https://crbug.com/webrtc/8125,https://issues.webrtc.org/issues/42233252 +https://crbug.com/webrtc/8126,https://issues.webrtc.org/issues/42233253 +https://crbug.com/webrtc/8127,https://issues.webrtc.org/issues/42233254 +https://crbug.com/webrtc/8128,https://issues.webrtc.org/issues/42233255 +https://crbug.com/webrtc/8129,https://issues.webrtc.org/issues/42233256 +https://crbug.com/webrtc/813,https://issues.webrtc.org/issues/42233257 +https://crbug.com/webrtc/8130,https://issues.webrtc.org/issues/42233258 +https://crbug.com/webrtc/8131,https://issues.webrtc.org/issues/42233259 +https://crbug.com/webrtc/8132,https://issues.webrtc.org/issues/42233260 +https://crbug.com/webrtc/8134,https://issues.webrtc.org/issues/42233261 +https://crbug.com/webrtc/8135,https://issues.webrtc.org/issues/42233262 +https://crbug.com/webrtc/8136,https://issues.webrtc.org/issues/42233263 +https://crbug.com/webrtc/8137,https://issues.webrtc.org/issues/42233264 +https://crbug.com/webrtc/8138,https://issues.webrtc.org/issues/42233265 +https://crbug.com/webrtc/8139,https://issues.webrtc.org/issues/42233266 +https://crbug.com/webrtc/814,https://issues.webrtc.org/issues/42233267 +https://crbug.com/webrtc/8140,https://issues.webrtc.org/issues/42233268 +https://crbug.com/webrtc/8141,https://issues.webrtc.org/issues/42233269 +https://crbug.com/webrtc/8142,https://issues.webrtc.org/issues/42233270 +https://crbug.com/webrtc/8143,https://issues.webrtc.org/issues/42233271 +https://crbug.com/webrtc/8144,https://issues.webrtc.org/issues/42233272 +https://crbug.com/webrtc/8145,https://issues.webrtc.org/issues/42233273 +https://crbug.com/webrtc/8146,https://issues.webrtc.org/issues/42233274 +https://crbug.com/webrtc/8147,https://issues.webrtc.org/issues/42233275 +https://crbug.com/webrtc/8148,https://issues.webrtc.org/issues/42233276 +https://crbug.com/webrtc/8149,https://issues.webrtc.org/issues/42233277 +https://crbug.com/webrtc/815,https://issues.webrtc.org/issues/42233278 +https://crbug.com/webrtc/8150,https://issues.webrtc.org/issues/42233279 +https://crbug.com/webrtc/8151,https://issues.webrtc.org/issues/42233280 +https://crbug.com/webrtc/8153,https://issues.webrtc.org/issues/42233281 +https://crbug.com/webrtc/8154,https://issues.webrtc.org/issues/42233282 +https://crbug.com/webrtc/8155,https://issues.webrtc.org/issues/42233283 +https://crbug.com/webrtc/8156,https://issues.webrtc.org/issues/42233284 +https://crbug.com/webrtc/8157,https://issues.webrtc.org/issues/42233285 +https://crbug.com/webrtc/8158,https://issues.webrtc.org/issues/42233286 +https://crbug.com/webrtc/8159,https://issues.webrtc.org/issues/42233287 +https://crbug.com/webrtc/816,https://issues.webrtc.org/issues/42233288 +https://crbug.com/webrtc/8160,https://issues.webrtc.org/issues/42233289 +https://crbug.com/webrtc/8161,https://issues.webrtc.org/issues/42233290 +https://crbug.com/webrtc/8162,https://issues.webrtc.org/issues/42233291 +https://crbug.com/webrtc/8163,https://issues.webrtc.org/issues/42233292 +https://crbug.com/webrtc/8165,https://issues.webrtc.org/issues/42233293 +https://crbug.com/webrtc/8166,https://issues.webrtc.org/issues/42233294 +https://crbug.com/webrtc/8167,https://issues.webrtc.org/issues/42233295 +https://crbug.com/webrtc/8168,https://issues.webrtc.org/issues/42233296 +https://crbug.com/webrtc/8169,https://issues.webrtc.org/issues/42233297 +https://crbug.com/webrtc/817,https://issues.webrtc.org/issues/42233298 +https://crbug.com/webrtc/8170,https://issues.webrtc.org/issues/42233299 +https://crbug.com/webrtc/8171,https://issues.webrtc.org/issues/42233300 +https://crbug.com/webrtc/8172,https://issues.webrtc.org/issues/42233301 +https://crbug.com/webrtc/8173,https://issues.webrtc.org/issues/42233302 +https://crbug.com/webrtc/8174,https://issues.webrtc.org/issues/42233303 +https://crbug.com/webrtc/8175,https://issues.webrtc.org/issues/42233304 +https://crbug.com/webrtc/8176,https://issues.webrtc.org/issues/42233305 +https://crbug.com/webrtc/8177,https://issues.webrtc.org/issues/42233306 +https://crbug.com/webrtc/8178,https://issues.webrtc.org/issues/42233307 +https://crbug.com/webrtc/8179,https://issues.webrtc.org/issues/42233308 +https://crbug.com/webrtc/818,https://issues.webrtc.org/issues/42233309 +https://crbug.com/webrtc/8180,https://issues.webrtc.org/issues/42233310 +https://crbug.com/webrtc/8181,https://issues.webrtc.org/issues/42233311 +https://crbug.com/webrtc/8182,https://issues.webrtc.org/issues/42233312 +https://crbug.com/webrtc/8184,https://issues.webrtc.org/issues/42233313 +https://crbug.com/webrtc/8185,https://issues.webrtc.org/issues/42233314 +https://crbug.com/webrtc/8187,https://issues.webrtc.org/issues/42233315 +https://crbug.com/webrtc/8188,https://issues.webrtc.org/issues/42233316 +https://crbug.com/webrtc/8189,https://issues.webrtc.org/issues/42233317 +https://crbug.com/webrtc/819,https://issues.webrtc.org/issues/42233318 +https://crbug.com/webrtc/8190,https://issues.webrtc.org/issues/42233319 +https://crbug.com/webrtc/8193,https://issues.webrtc.org/issues/42233320 +https://crbug.com/webrtc/8194,https://issues.webrtc.org/issues/42233321 +https://crbug.com/webrtc/8195,https://issues.webrtc.org/issues/42233322 +https://crbug.com/webrtc/8196,https://issues.webrtc.org/issues/42233323 +https://crbug.com/webrtc/8197,https://issues.webrtc.org/issues/42233324 +https://crbug.com/webrtc/8198,https://issues.webrtc.org/issues/42233325 +https://crbug.com/webrtc/8199,https://issues.webrtc.org/issues/42233326 +https://crbug.com/webrtc/82,https://issues.webrtc.org/issues/42233327 +https://crbug.com/webrtc/820,https://issues.webrtc.org/issues/42233328 +https://crbug.com/webrtc/8200,https://issues.webrtc.org/issues/42233329 +https://crbug.com/webrtc/8201,https://issues.webrtc.org/issues/42233330 +https://crbug.com/webrtc/8202,https://issues.webrtc.org/issues/42233331 +https://crbug.com/webrtc/8203,https://issues.webrtc.org/issues/42233332 +https://crbug.com/webrtc/8204,https://issues.webrtc.org/issues/42233333 +https://crbug.com/webrtc/8205,https://issues.webrtc.org/issues/42233334 +https://crbug.com/webrtc/8206,https://issues.webrtc.org/issues/42233335 +https://crbug.com/webrtc/8207,https://issues.webrtc.org/issues/42233336 +https://crbug.com/webrtc/8208,https://issues.webrtc.org/issues/42233337 +https://crbug.com/webrtc/8209,https://issues.webrtc.org/issues/42233338 +https://crbug.com/webrtc/821,https://issues.webrtc.org/issues/42233339 +https://crbug.com/webrtc/8210,https://issues.webrtc.org/issues/42233340 +https://crbug.com/webrtc/8211,https://issues.webrtc.org/issues/42233341 +https://crbug.com/webrtc/8212,https://issues.webrtc.org/issues/42233342 +https://crbug.com/webrtc/8213,https://issues.webrtc.org/issues/42233343 +https://crbug.com/webrtc/8214,https://issues.webrtc.org/issues/42233344 +https://crbug.com/webrtc/8215,https://issues.webrtc.org/issues/42233345 +https://crbug.com/webrtc/8216,https://issues.webrtc.org/issues/42233346 +https://crbug.com/webrtc/8217,https://issues.webrtc.org/issues/42233347 +https://crbug.com/webrtc/8218,https://issues.webrtc.org/issues/42233348 +https://crbug.com/webrtc/8219,https://issues.webrtc.org/issues/42233349 +https://crbug.com/webrtc/822,https://issues.webrtc.org/issues/42233350 +https://crbug.com/webrtc/8220,https://issues.webrtc.org/issues/42233351 +https://crbug.com/webrtc/8222,https://issues.webrtc.org/issues/42233352 +https://crbug.com/webrtc/8223,https://issues.webrtc.org/issues/42233353 +https://crbug.com/webrtc/8224,https://issues.webrtc.org/issues/42233354 +https://crbug.com/webrtc/8225,https://issues.webrtc.org/issues/42233355 +https://crbug.com/webrtc/8228,https://issues.webrtc.org/issues/42233356 +https://crbug.com/webrtc/8229,https://issues.webrtc.org/issues/42233357 +https://crbug.com/webrtc/823,https://issues.webrtc.org/issues/42233358 +https://crbug.com/webrtc/8230,https://issues.webrtc.org/issues/42233359 +https://crbug.com/webrtc/8231,https://issues.webrtc.org/issues/42233360 +https://crbug.com/webrtc/8233,https://issues.webrtc.org/issues/42233361 +https://crbug.com/webrtc/8235,https://issues.webrtc.org/issues/42233362 +https://crbug.com/webrtc/8236,https://issues.webrtc.org/issues/42233363 +https://crbug.com/webrtc/8237,https://issues.webrtc.org/issues/42233364 +https://crbug.com/webrtc/8238,https://issues.webrtc.org/issues/42233365 +https://crbug.com/webrtc/824,https://issues.webrtc.org/issues/42233366 +https://crbug.com/webrtc/8240,https://issues.webrtc.org/issues/42233367 +https://crbug.com/webrtc/8241,https://issues.webrtc.org/issues/42233368 +https://crbug.com/webrtc/8242,https://issues.webrtc.org/issues/42233369 +https://crbug.com/webrtc/8243,https://issues.webrtc.org/issues/42233370 +https://crbug.com/webrtc/8244,https://issues.webrtc.org/issues/42233371 +https://crbug.com/webrtc/8245,https://issues.webrtc.org/issues/42233372 +https://crbug.com/webrtc/8246,https://issues.webrtc.org/issues/42233373 +https://crbug.com/webrtc/8247,https://issues.webrtc.org/issues/42233374 +https://crbug.com/webrtc/8248,https://issues.webrtc.org/issues/42233375 +https://crbug.com/webrtc/8249,https://issues.webrtc.org/issues/42233376 +https://crbug.com/webrtc/825,https://issues.webrtc.org/issues/42233377 +https://crbug.com/webrtc/8250,https://issues.webrtc.org/issues/42233378 +https://crbug.com/webrtc/8251,https://issues.webrtc.org/issues/42233379 +https://crbug.com/webrtc/8252,https://issues.webrtc.org/issues/42233380 +https://crbug.com/webrtc/8253,https://issues.webrtc.org/issues/42233381 +https://crbug.com/webrtc/8254,https://issues.webrtc.org/issues/42233382 +https://crbug.com/webrtc/8255,https://issues.webrtc.org/issues/42233383 +https://crbug.com/webrtc/8256,https://issues.webrtc.org/issues/42233384 +https://crbug.com/webrtc/8257,https://issues.webrtc.org/issues/42233385 +https://crbug.com/webrtc/8258,https://issues.webrtc.org/issues/42233386 +https://crbug.com/webrtc/8259,https://issues.webrtc.org/issues/42233387 +https://crbug.com/webrtc/826,https://issues.webrtc.org/issues/42233388 +https://crbug.com/webrtc/8260,https://issues.webrtc.org/issues/42233389 +https://crbug.com/webrtc/8261,https://issues.webrtc.org/issues/42233390 +https://crbug.com/webrtc/8262,https://issues.webrtc.org/issues/42233391 +https://crbug.com/webrtc/8263,https://issues.webrtc.org/issues/42233392 +https://crbug.com/webrtc/8265,https://issues.webrtc.org/issues/42233393 +https://crbug.com/webrtc/8266,https://issues.webrtc.org/issues/42233394 +https://crbug.com/webrtc/8267,https://issues.webrtc.org/issues/42233395 +https://crbug.com/webrtc/8268,https://issues.webrtc.org/issues/42233396 +https://crbug.com/webrtc/8269,https://issues.webrtc.org/issues/42233397 +https://crbug.com/webrtc/827,https://issues.webrtc.org/issues/42233398 +https://crbug.com/webrtc/8270,https://issues.webrtc.org/issues/42233399 +https://crbug.com/webrtc/8271,https://issues.webrtc.org/issues/42233400 +https://crbug.com/webrtc/8272,https://issues.webrtc.org/issues/42233401 +https://crbug.com/webrtc/8273,https://issues.webrtc.org/issues/42233402 +https://crbug.com/webrtc/8274,https://issues.webrtc.org/issues/42233403 +https://crbug.com/webrtc/8275,https://issues.webrtc.org/issues/42233404 +https://crbug.com/webrtc/8276,https://issues.webrtc.org/issues/42233405 +https://crbug.com/webrtc/8277,https://issues.webrtc.org/issues/42233406 +https://crbug.com/webrtc/8278,https://issues.webrtc.org/issues/42233407 +https://crbug.com/webrtc/8279,https://issues.webrtc.org/issues/42233408 +https://crbug.com/webrtc/828,https://issues.webrtc.org/issues/42233409 +https://crbug.com/webrtc/8280,https://issues.webrtc.org/issues/42233410 +https://crbug.com/webrtc/8281,https://issues.webrtc.org/issues/42233411 +https://crbug.com/webrtc/8282,https://issues.webrtc.org/issues/42233412 +https://crbug.com/webrtc/8283,https://issues.webrtc.org/issues/42233413 +https://crbug.com/webrtc/8284,https://issues.webrtc.org/issues/42233414 +https://crbug.com/webrtc/8285,https://issues.webrtc.org/issues/42233415 +https://crbug.com/webrtc/8286,https://issues.webrtc.org/issues/42233416 +https://crbug.com/webrtc/8287,https://issues.webrtc.org/issues/42233417 +https://crbug.com/webrtc/8288,https://issues.webrtc.org/issues/42233418 +https://crbug.com/webrtc/8289,https://issues.webrtc.org/issues/42233419 +https://crbug.com/webrtc/829,https://issues.webrtc.org/issues/42233420 +https://crbug.com/webrtc/8290,https://issues.webrtc.org/issues/42233421 +https://crbug.com/webrtc/8291,https://issues.webrtc.org/issues/42233422 +https://crbug.com/webrtc/8292,https://issues.webrtc.org/issues/42233423 +https://crbug.com/webrtc/8293,https://issues.webrtc.org/issues/42233424 +https://crbug.com/webrtc/8294,https://issues.webrtc.org/issues/42233425 +https://crbug.com/webrtc/8295,https://issues.webrtc.org/issues/42233426 +https://crbug.com/webrtc/8296,https://issues.webrtc.org/issues/42233427 +https://crbug.com/webrtc/8297,https://issues.webrtc.org/issues/42233428 +https://crbug.com/webrtc/8298,https://issues.webrtc.org/issues/42233429 +https://crbug.com/webrtc/8299,https://issues.webrtc.org/issues/42233430 +https://crbug.com/webrtc/83,https://issues.webrtc.org/issues/42233431 +https://crbug.com/webrtc/830,https://issues.webrtc.org/issues/42233432 +https://crbug.com/webrtc/8300,https://issues.webrtc.org/issues/42233433 +https://crbug.com/webrtc/8301,https://issues.webrtc.org/issues/42233434 +https://crbug.com/webrtc/8302,https://issues.webrtc.org/issues/42233435 +https://crbug.com/webrtc/8303,https://issues.webrtc.org/issues/42233436 +https://crbug.com/webrtc/8305,https://issues.webrtc.org/issues/42233437 +https://crbug.com/webrtc/8306,https://issues.webrtc.org/issues/42233438 +https://crbug.com/webrtc/8307,https://issues.webrtc.org/issues/42233439 +https://crbug.com/webrtc/8308,https://issues.webrtc.org/issues/42233440 +https://crbug.com/webrtc/8309,https://issues.webrtc.org/issues/42233441 +https://crbug.com/webrtc/831,https://issues.webrtc.org/issues/42233442 +https://crbug.com/webrtc/8310,https://issues.webrtc.org/issues/42233443 +https://crbug.com/webrtc/8311,https://issues.webrtc.org/issues/42233444 +https://crbug.com/webrtc/8312,https://issues.webrtc.org/issues/42233445 +https://crbug.com/webrtc/8313,https://issues.webrtc.org/issues/42233446 +https://crbug.com/webrtc/8314,https://issues.webrtc.org/issues/42233447 +https://crbug.com/webrtc/8316,https://issues.webrtc.org/issues/42233448 +https://crbug.com/webrtc/8318,https://issues.webrtc.org/issues/42233449 +https://crbug.com/webrtc/8319,https://issues.webrtc.org/issues/42233450 +https://crbug.com/webrtc/832,https://issues.webrtc.org/issues/42233451 +https://crbug.com/webrtc/8320,https://issues.webrtc.org/issues/42233452 +https://crbug.com/webrtc/8321,https://issues.webrtc.org/issues/42233453 +https://crbug.com/webrtc/8322,https://issues.webrtc.org/issues/42233454 +https://crbug.com/webrtc/8323,https://issues.webrtc.org/issues/42233455 +https://crbug.com/webrtc/8324,https://issues.webrtc.org/issues/42233456 +https://crbug.com/webrtc/8325,https://issues.webrtc.org/issues/42233457 +https://crbug.com/webrtc/8326,https://issues.webrtc.org/issues/42233458 +https://crbug.com/webrtc/8327,https://issues.webrtc.org/issues/42233459 +https://crbug.com/webrtc/8328,https://issues.webrtc.org/issues/42233460 +https://crbug.com/webrtc/8329,https://issues.webrtc.org/issues/42233461 +https://crbug.com/webrtc/833,https://issues.webrtc.org/issues/42233462 +https://crbug.com/webrtc/8330,https://issues.webrtc.org/issues/42233463 +https://crbug.com/webrtc/8331,https://issues.webrtc.org/issues/42233464 +https://crbug.com/webrtc/8332,https://issues.webrtc.org/issues/42233465 +https://crbug.com/webrtc/8333,https://issues.webrtc.org/issues/42233466 +https://crbug.com/webrtc/8334,https://issues.webrtc.org/issues/42233467 +https://crbug.com/webrtc/8335,https://issues.webrtc.org/issues/42233468 +https://crbug.com/webrtc/8336,https://issues.webrtc.org/issues/42233469 +https://crbug.com/webrtc/8337,https://issues.webrtc.org/issues/42233470 +https://crbug.com/webrtc/8338,https://issues.webrtc.org/issues/42233471 +https://crbug.com/webrtc/834,https://issues.webrtc.org/issues/42233472 +https://crbug.com/webrtc/8340,https://issues.webrtc.org/issues/42233473 +https://crbug.com/webrtc/8341,https://issues.webrtc.org/issues/42233474 +https://crbug.com/webrtc/8342,https://issues.webrtc.org/issues/42233475 +https://crbug.com/webrtc/8343,https://issues.webrtc.org/issues/42233476 +https://crbug.com/webrtc/8344,https://issues.webrtc.org/issues/42233477 +https://crbug.com/webrtc/8345,https://issues.webrtc.org/issues/42233478 +https://crbug.com/webrtc/8346,https://issues.webrtc.org/issues/42233479 +https://crbug.com/webrtc/8347,https://issues.webrtc.org/issues/42233480 +https://crbug.com/webrtc/8348,https://issues.webrtc.org/issues/42233481 +https://crbug.com/webrtc/8349,https://issues.webrtc.org/issues/42233482 +https://crbug.com/webrtc/835,https://issues.webrtc.org/issues/42233483 +https://crbug.com/webrtc/8350,https://issues.webrtc.org/issues/42233484 +https://crbug.com/webrtc/8351,https://issues.webrtc.org/issues/42233485 +https://crbug.com/webrtc/8352,https://issues.webrtc.org/issues/42233486 +https://crbug.com/webrtc/8353,https://issues.webrtc.org/issues/42233487 +https://crbug.com/webrtc/8354,https://issues.webrtc.org/issues/42233488 +https://crbug.com/webrtc/8355,https://issues.webrtc.org/issues/42233489 +https://crbug.com/webrtc/8358,https://issues.webrtc.org/issues/42233490 +https://crbug.com/webrtc/8359,https://issues.webrtc.org/issues/42233491 +https://crbug.com/webrtc/836,https://issues.webrtc.org/issues/42233492 +https://crbug.com/webrtc/8360,https://issues.webrtc.org/issues/42233493 +https://crbug.com/webrtc/8361,https://issues.webrtc.org/issues/42233494 +https://crbug.com/webrtc/8362,https://issues.webrtc.org/issues/42233495 +https://crbug.com/webrtc/8363,https://issues.webrtc.org/issues/42233496 +https://crbug.com/webrtc/8364,https://issues.webrtc.org/issues/42233497 +https://crbug.com/webrtc/8367,https://issues.webrtc.org/issues/42233498 +https://crbug.com/webrtc/8368,https://issues.webrtc.org/issues/42233499 +https://crbug.com/webrtc/8369,https://issues.webrtc.org/issues/42233500 +https://crbug.com/webrtc/837,https://issues.webrtc.org/issues/42233501 +https://crbug.com/webrtc/8370,https://issues.webrtc.org/issues/42233502 +https://crbug.com/webrtc/8372,https://issues.webrtc.org/issues/42233503 +https://crbug.com/webrtc/8373,https://issues.webrtc.org/issues/42233504 +https://crbug.com/webrtc/8374,https://issues.webrtc.org/issues/42233505 +https://crbug.com/webrtc/8375,https://issues.webrtc.org/issues/42233506 +https://crbug.com/webrtc/8376,https://issues.webrtc.org/issues/42233507 +https://crbug.com/webrtc/8378,https://issues.webrtc.org/issues/42233508 +https://crbug.com/webrtc/8379,https://issues.webrtc.org/issues/42233509 +https://crbug.com/webrtc/838,https://issues.webrtc.org/issues/42233510 +https://crbug.com/webrtc/8380,https://issues.webrtc.org/issues/42233511 +https://crbug.com/webrtc/8381,https://issues.webrtc.org/issues/42233512 +https://crbug.com/webrtc/8382,https://issues.webrtc.org/issues/42233513 +https://crbug.com/webrtc/8383,https://issues.webrtc.org/issues/42233514 +https://crbug.com/webrtc/8384,https://issues.webrtc.org/issues/42233515 +https://crbug.com/webrtc/8385,https://issues.webrtc.org/issues/42233516 +https://crbug.com/webrtc/8386,https://issues.webrtc.org/issues/42233517 +https://crbug.com/webrtc/8387,https://issues.webrtc.org/issues/42233518 +https://crbug.com/webrtc/8388,https://issues.webrtc.org/issues/42233519 +https://crbug.com/webrtc/8389,https://issues.webrtc.org/issues/42233520 +https://crbug.com/webrtc/8390,https://issues.webrtc.org/issues/42233521 +https://crbug.com/webrtc/8391,https://issues.webrtc.org/issues/42233522 +https://crbug.com/webrtc/8392,https://issues.webrtc.org/issues/42233523 +https://crbug.com/webrtc/8393,https://issues.webrtc.org/issues/42233524 +https://crbug.com/webrtc/8394,https://issues.webrtc.org/issues/42233525 +https://crbug.com/webrtc/8395,https://issues.webrtc.org/issues/42233526 +https://crbug.com/webrtc/8396,https://issues.webrtc.org/issues/42233527 +https://crbug.com/webrtc/8397,https://issues.webrtc.org/issues/42233528 +https://crbug.com/webrtc/8398,https://issues.webrtc.org/issues/42233529 +https://crbug.com/webrtc/8399,https://issues.webrtc.org/issues/42233530 +https://crbug.com/webrtc/84,https://issues.webrtc.org/issues/42233531 +https://crbug.com/webrtc/840,https://issues.webrtc.org/issues/42233532 +https://crbug.com/webrtc/8400,https://issues.webrtc.org/issues/42233533 +https://crbug.com/webrtc/8401,https://issues.webrtc.org/issues/42233534 +https://crbug.com/webrtc/8402,https://issues.webrtc.org/issues/42233535 +https://crbug.com/webrtc/8403,https://issues.webrtc.org/issues/42233536 +https://crbug.com/webrtc/8404,https://issues.webrtc.org/issues/42233537 +https://crbug.com/webrtc/8405,https://issues.webrtc.org/issues/42233538 +https://crbug.com/webrtc/8406,https://issues.webrtc.org/issues/42233539 +https://crbug.com/webrtc/8407,https://issues.webrtc.org/issues/42233540 +https://crbug.com/webrtc/8408,https://issues.webrtc.org/issues/42233541 +https://crbug.com/webrtc/841,https://issues.webrtc.org/issues/42233542 +https://crbug.com/webrtc/8410,https://issues.webrtc.org/issues/42233543 +https://crbug.com/webrtc/8411,https://issues.webrtc.org/issues/42233544 +https://crbug.com/webrtc/8412,https://issues.webrtc.org/issues/42233545 +https://crbug.com/webrtc/8413,https://issues.webrtc.org/issues/42233546 +https://crbug.com/webrtc/8414,https://issues.webrtc.org/issues/42233547 +https://crbug.com/webrtc/8415,https://issues.webrtc.org/issues/42233548 +https://crbug.com/webrtc/8416,https://issues.webrtc.org/issues/42233549 +https://crbug.com/webrtc/8417,https://issues.webrtc.org/issues/42233550 +https://crbug.com/webrtc/8418,https://issues.webrtc.org/issues/42233551 +https://crbug.com/webrtc/8419,https://issues.webrtc.org/issues/42233552 +https://crbug.com/webrtc/842,https://issues.webrtc.org/issues/42233553 +https://crbug.com/webrtc/8420,https://issues.webrtc.org/issues/42233554 +https://crbug.com/webrtc/8421,https://issues.webrtc.org/issues/42233555 +https://crbug.com/webrtc/8422,https://issues.webrtc.org/issues/42233556 +https://crbug.com/webrtc/8423,https://issues.webrtc.org/issues/42233557 +https://crbug.com/webrtc/8424,https://issues.webrtc.org/issues/42233558 +https://crbug.com/webrtc/8426,https://issues.webrtc.org/issues/42233559 +https://crbug.com/webrtc/8427,https://issues.webrtc.org/issues/42233560 +https://crbug.com/webrtc/8428,https://issues.webrtc.org/issues/42233561 +https://crbug.com/webrtc/8429,https://issues.webrtc.org/issues/42233562 +https://crbug.com/webrtc/843,https://issues.webrtc.org/issues/42233563 +https://crbug.com/webrtc/8430,https://issues.webrtc.org/issues/42233564 +https://crbug.com/webrtc/8431,https://issues.webrtc.org/issues/42233565 +https://crbug.com/webrtc/8432,https://issues.webrtc.org/issues/42233566 +https://crbug.com/webrtc/8433,https://issues.webrtc.org/issues/42233567 +https://crbug.com/webrtc/8434,https://issues.webrtc.org/issues/42233568 +https://crbug.com/webrtc/8435,https://issues.webrtc.org/issues/42233569 +https://crbug.com/webrtc/8436,https://issues.webrtc.org/issues/42233570 +https://crbug.com/webrtc/8437,https://issues.webrtc.org/issues/42233571 +https://crbug.com/webrtc/8438,https://issues.webrtc.org/issues/42233572 +https://crbug.com/webrtc/8439,https://issues.webrtc.org/issues/42233573 +https://crbug.com/webrtc/844,https://issues.webrtc.org/issues/42233574 +https://crbug.com/webrtc/8440,https://issues.webrtc.org/issues/42233575 +https://crbug.com/webrtc/8441,https://issues.webrtc.org/issues/42233576 +https://crbug.com/webrtc/8442,https://issues.webrtc.org/issues/42233577 +https://crbug.com/webrtc/8443,https://issues.webrtc.org/issues/42233578 +https://crbug.com/webrtc/8444,https://issues.webrtc.org/issues/42233579 +https://crbug.com/webrtc/8445,https://issues.webrtc.org/issues/42233580 +https://crbug.com/webrtc/8446,https://issues.webrtc.org/issues/42233581 +https://crbug.com/webrtc/8447,https://issues.webrtc.org/issues/42233582 +https://crbug.com/webrtc/8448,https://issues.webrtc.org/issues/42233583 +https://crbug.com/webrtc/8449,https://issues.webrtc.org/issues/42233584 +https://crbug.com/webrtc/845,https://issues.webrtc.org/issues/42233585 +https://crbug.com/webrtc/8450,https://issues.webrtc.org/issues/42233586 +https://crbug.com/webrtc/8451,https://issues.webrtc.org/issues/42233587 +https://crbug.com/webrtc/8452,https://issues.webrtc.org/issues/42233588 +https://crbug.com/webrtc/8453,https://issues.webrtc.org/issues/42233589 +https://crbug.com/webrtc/8454,https://issues.webrtc.org/issues/42233590 +https://crbug.com/webrtc/8455,https://issues.webrtc.org/issues/42233591 +https://crbug.com/webrtc/8456,https://issues.webrtc.org/issues/42233592 +https://crbug.com/webrtc/8457,https://issues.webrtc.org/issues/42233593 +https://crbug.com/webrtc/8458,https://issues.webrtc.org/issues/42233594 +https://crbug.com/webrtc/8459,https://issues.webrtc.org/issues/42233595 +https://crbug.com/webrtc/846,https://issues.webrtc.org/issues/42233596 +https://crbug.com/webrtc/8460,https://issues.webrtc.org/issues/42233597 +https://crbug.com/webrtc/8461,https://issues.webrtc.org/issues/42233598 +https://crbug.com/webrtc/8462,https://issues.webrtc.org/issues/42233599 +https://crbug.com/webrtc/8463,https://issues.webrtc.org/issues/42233600 +https://crbug.com/webrtc/8464,https://issues.webrtc.org/issues/42233601 +https://crbug.com/webrtc/8465,https://issues.webrtc.org/issues/42233602 +https://crbug.com/webrtc/8466,https://issues.webrtc.org/issues/42233603 +https://crbug.com/webrtc/8467,https://issues.webrtc.org/issues/42233604 +https://crbug.com/webrtc/8468,https://issues.webrtc.org/issues/42233605 +https://crbug.com/webrtc/8469,https://issues.webrtc.org/issues/42233606 +https://crbug.com/webrtc/847,https://issues.webrtc.org/issues/42233607 +https://crbug.com/webrtc/8470,https://issues.webrtc.org/issues/42233608 +https://crbug.com/webrtc/8471,https://issues.webrtc.org/issues/42233609 +https://crbug.com/webrtc/8472,https://issues.webrtc.org/issues/42233610 +https://crbug.com/webrtc/8474,https://issues.webrtc.org/issues/42233611 +https://crbug.com/webrtc/8475,https://issues.webrtc.org/issues/42233612 +https://crbug.com/webrtc/8476,https://issues.webrtc.org/issues/42233613 +https://crbug.com/webrtc/8477,https://issues.webrtc.org/issues/42233614 +https://crbug.com/webrtc/8478,https://issues.webrtc.org/issues/42233615 +https://crbug.com/webrtc/8479,https://issues.webrtc.org/issues/42233616 +https://crbug.com/webrtc/848,https://issues.webrtc.org/issues/42233617 +https://crbug.com/webrtc/8480,https://issues.webrtc.org/issues/42233618 +https://crbug.com/webrtc/8481,https://issues.webrtc.org/issues/42233619 +https://crbug.com/webrtc/8482,https://issues.webrtc.org/issues/42233620 +https://crbug.com/webrtc/8483,https://issues.webrtc.org/issues/42233621 +https://crbug.com/webrtc/8484,https://issues.webrtc.org/issues/42233622 +https://crbug.com/webrtc/8485,https://issues.webrtc.org/issues/42233623 +https://crbug.com/webrtc/8486,https://issues.webrtc.org/issues/42233624 +https://crbug.com/webrtc/8487,https://issues.webrtc.org/issues/42233625 +https://crbug.com/webrtc/8488,https://issues.webrtc.org/issues/42233626 +https://crbug.com/webrtc/8489,https://issues.webrtc.org/issues/42233627 +https://crbug.com/webrtc/849,https://issues.webrtc.org/issues/42233628 +https://crbug.com/webrtc/8490,https://issues.webrtc.org/issues/42233629 +https://crbug.com/webrtc/8491,https://issues.webrtc.org/issues/42233630 +https://crbug.com/webrtc/8492,https://issues.webrtc.org/issues/42233631 +https://crbug.com/webrtc/8493,https://issues.webrtc.org/issues/42233632 +https://crbug.com/webrtc/8494,https://issues.webrtc.org/issues/42233633 +https://crbug.com/webrtc/8495,https://issues.webrtc.org/issues/42233634 +https://crbug.com/webrtc/8496,https://issues.webrtc.org/issues/42233635 +https://crbug.com/webrtc/8497,https://issues.webrtc.org/issues/42233636 +https://crbug.com/webrtc/8498,https://issues.webrtc.org/issues/42233637 +https://crbug.com/webrtc/8499,https://issues.webrtc.org/issues/42233638 +https://crbug.com/webrtc/85,https://issues.webrtc.org/issues/42233639 +https://crbug.com/webrtc/850,https://issues.webrtc.org/issues/42233640 +https://crbug.com/webrtc/8500,https://issues.webrtc.org/issues/42233641 +https://crbug.com/webrtc/8501,https://issues.webrtc.org/issues/42233642 +https://crbug.com/webrtc/8502,https://issues.webrtc.org/issues/42233643 +https://crbug.com/webrtc/8503,https://issues.webrtc.org/issues/42233644 +https://crbug.com/webrtc/8504,https://issues.webrtc.org/issues/42233645 +https://crbug.com/webrtc/8505,https://issues.webrtc.org/issues/42233646 +https://crbug.com/webrtc/8507,https://issues.webrtc.org/issues/42233647 +https://crbug.com/webrtc/8508,https://issues.webrtc.org/issues/42233648 +https://crbug.com/webrtc/8509,https://issues.webrtc.org/issues/42233649 +https://crbug.com/webrtc/851,https://issues.webrtc.org/issues/42233650 +https://crbug.com/webrtc/8510,https://issues.webrtc.org/issues/42233651 +https://crbug.com/webrtc/8511,https://issues.webrtc.org/issues/42233652 +https://crbug.com/webrtc/8512,https://issues.webrtc.org/issues/42233653 +https://crbug.com/webrtc/8513,https://issues.webrtc.org/issues/42233654 +https://crbug.com/webrtc/8514,https://issues.webrtc.org/issues/42233655 +https://crbug.com/webrtc/8515,https://issues.webrtc.org/issues/42233656 +https://crbug.com/webrtc/8516,https://issues.webrtc.org/issues/42233657 +https://crbug.com/webrtc/8517,https://issues.webrtc.org/issues/42233658 +https://crbug.com/webrtc/8518,https://issues.webrtc.org/issues/42233659 +https://crbug.com/webrtc/8519,https://issues.webrtc.org/issues/42233660 +https://crbug.com/webrtc/852,https://issues.webrtc.org/issues/42233661 +https://crbug.com/webrtc/8520,https://issues.webrtc.org/issues/42233662 +https://crbug.com/webrtc/8521,https://issues.webrtc.org/issues/42233663 +https://crbug.com/webrtc/8522,https://issues.webrtc.org/issues/42233664 +https://crbug.com/webrtc/8523,https://issues.webrtc.org/issues/42233665 +https://crbug.com/webrtc/8524,https://issues.webrtc.org/issues/42233666 +https://crbug.com/webrtc/8525,https://issues.webrtc.org/issues/42233667 +https://crbug.com/webrtc/8526,https://issues.webrtc.org/issues/42233668 +https://crbug.com/webrtc/8527,https://issues.webrtc.org/issues/42233669 +https://crbug.com/webrtc/8528,https://issues.webrtc.org/issues/42233670 +https://crbug.com/webrtc/8529,https://issues.webrtc.org/issues/42233671 +https://crbug.com/webrtc/853,https://issues.webrtc.org/issues/42233672 +https://crbug.com/webrtc/8531,https://issues.webrtc.org/issues/42233673 +https://crbug.com/webrtc/8532,https://issues.webrtc.org/issues/42233674 +https://crbug.com/webrtc/8533,https://issues.webrtc.org/issues/42233675 +https://crbug.com/webrtc/8534,https://issues.webrtc.org/issues/42233676 +https://crbug.com/webrtc/8535,https://issues.webrtc.org/issues/42233677 +https://crbug.com/webrtc/854,https://issues.webrtc.org/issues/42233678 +https://crbug.com/webrtc/8540,https://issues.webrtc.org/issues/42233679 +https://crbug.com/webrtc/8541,https://issues.webrtc.org/issues/42233680 +https://crbug.com/webrtc/8542,https://issues.webrtc.org/issues/42233681 +https://crbug.com/webrtc/8543,https://issues.webrtc.org/issues/42233682 +https://crbug.com/webrtc/8544,https://issues.webrtc.org/issues/42233683 +https://crbug.com/webrtc/8545,https://issues.webrtc.org/issues/42233684 +https://crbug.com/webrtc/8546,https://issues.webrtc.org/issues/42233685 +https://crbug.com/webrtc/8547,https://issues.webrtc.org/issues/42233686 +https://crbug.com/webrtc/8548,https://issues.webrtc.org/issues/42233687 +https://crbug.com/webrtc/8549,https://issues.webrtc.org/issues/42233688 +https://crbug.com/webrtc/855,https://issues.webrtc.org/issues/42233689 +https://crbug.com/webrtc/8550,https://issues.webrtc.org/issues/42233690 +https://crbug.com/webrtc/8551,https://issues.webrtc.org/issues/42233691 +https://crbug.com/webrtc/8552,https://issues.webrtc.org/issues/42233692 +https://crbug.com/webrtc/8553,https://issues.webrtc.org/issues/42233693 +https://crbug.com/webrtc/8554,https://issues.webrtc.org/issues/42233694 +https://crbug.com/webrtc/8555,https://issues.webrtc.org/issues/42233695 +https://crbug.com/webrtc/8556,https://issues.webrtc.org/issues/42233696 +https://crbug.com/webrtc/8557,https://issues.webrtc.org/issues/42233697 +https://crbug.com/webrtc/8558,https://issues.webrtc.org/issues/42233698 +https://crbug.com/webrtc/8559,https://issues.webrtc.org/issues/42233699 +https://crbug.com/webrtc/856,https://issues.webrtc.org/issues/42233700 +https://crbug.com/webrtc/8560,https://issues.webrtc.org/issues/42233701 +https://crbug.com/webrtc/8561,https://issues.webrtc.org/issues/42233702 +https://crbug.com/webrtc/8563,https://issues.webrtc.org/issues/42233703 +https://crbug.com/webrtc/8564,https://issues.webrtc.org/issues/42233704 +https://crbug.com/webrtc/8565,https://issues.webrtc.org/issues/42233705 +https://crbug.com/webrtc/8566,https://issues.webrtc.org/issues/42233706 +https://crbug.com/webrtc/8567,https://issues.webrtc.org/issues/42233707 +https://crbug.com/webrtc/8568,https://issues.webrtc.org/issues/42233708 +https://crbug.com/webrtc/8569,https://issues.webrtc.org/issues/42233709 +https://crbug.com/webrtc/8570,https://issues.webrtc.org/issues/42233710 +https://crbug.com/webrtc/8572,https://issues.webrtc.org/issues/42233711 +https://crbug.com/webrtc/8573,https://issues.webrtc.org/issues/42233712 +https://crbug.com/webrtc/8575,https://issues.webrtc.org/issues/42233713 +https://crbug.com/webrtc/8576,https://issues.webrtc.org/issues/42233714 +https://crbug.com/webrtc/8577,https://issues.webrtc.org/issues/42233715 +https://crbug.com/webrtc/8578,https://issues.webrtc.org/issues/42233716 +https://crbug.com/webrtc/8579,https://issues.webrtc.org/issues/42233717 +https://crbug.com/webrtc/858,https://issues.webrtc.org/issues/42233718 +https://crbug.com/webrtc/8580,https://issues.webrtc.org/issues/42233719 +https://crbug.com/webrtc/8581,https://issues.webrtc.org/issues/42233720 +https://crbug.com/webrtc/8582,https://issues.webrtc.org/issues/42233721 +https://crbug.com/webrtc/8583,https://issues.webrtc.org/issues/42233722 +https://crbug.com/webrtc/8584,https://issues.webrtc.org/issues/42233723 +https://crbug.com/webrtc/8585,https://issues.webrtc.org/issues/42233724 +https://crbug.com/webrtc/8587,https://issues.webrtc.org/issues/42233725 +https://crbug.com/webrtc/8588,https://issues.webrtc.org/issues/42233726 +https://crbug.com/webrtc/859,https://issues.webrtc.org/issues/42233727 +https://crbug.com/webrtc/8590,https://issues.webrtc.org/issues/42233728 +https://crbug.com/webrtc/8591,https://issues.webrtc.org/issues/42233729 +https://crbug.com/webrtc/8592,https://issues.webrtc.org/issues/42233730 +https://crbug.com/webrtc/8593,https://issues.webrtc.org/issues/42233731 +https://crbug.com/webrtc/8594,https://issues.webrtc.org/issues/42233732 +https://crbug.com/webrtc/8595,https://issues.webrtc.org/issues/42233733 +https://crbug.com/webrtc/8596,https://issues.webrtc.org/issues/42233734 +https://crbug.com/webrtc/8597,https://issues.webrtc.org/issues/42233735 +https://crbug.com/webrtc/8598,https://issues.webrtc.org/issues/42233736 +https://crbug.com/webrtc/8599,https://issues.webrtc.org/issues/42233737 +https://crbug.com/webrtc/86,https://issues.webrtc.org/issues/42233738 +https://crbug.com/webrtc/860,https://issues.webrtc.org/issues/42233739 +https://crbug.com/webrtc/8600,https://issues.webrtc.org/issues/42233740 +https://crbug.com/webrtc/8601,https://issues.webrtc.org/issues/42233741 +https://crbug.com/webrtc/8602,https://issues.webrtc.org/issues/42233742 +https://crbug.com/webrtc/8603,https://issues.webrtc.org/issues/42233743 +https://crbug.com/webrtc/8604,https://issues.webrtc.org/issues/42233744 +https://crbug.com/webrtc/8605,https://issues.webrtc.org/issues/42233745 +https://crbug.com/webrtc/8606,https://issues.webrtc.org/issues/42233746 +https://crbug.com/webrtc/8607,https://issues.webrtc.org/issues/42233747 +https://crbug.com/webrtc/8608,https://issues.webrtc.org/issues/42233748 +https://crbug.com/webrtc/8609,https://issues.webrtc.org/issues/42233749 +https://crbug.com/webrtc/861,https://issues.webrtc.org/issues/42233750 +https://crbug.com/webrtc/8610,https://issues.webrtc.org/issues/42233751 +https://crbug.com/webrtc/8611,https://issues.webrtc.org/issues/42233752 +https://crbug.com/webrtc/8613,https://issues.webrtc.org/issues/42233753 +https://crbug.com/webrtc/8614,https://issues.webrtc.org/issues/42233754 +https://crbug.com/webrtc/8615,https://issues.webrtc.org/issues/42233755 +https://crbug.com/webrtc/8616,https://issues.webrtc.org/issues/42233756 +https://crbug.com/webrtc/8617,https://issues.webrtc.org/issues/42233757 +https://crbug.com/webrtc/8618,https://issues.webrtc.org/issues/42233758 +https://crbug.com/webrtc/8619,https://issues.webrtc.org/issues/42233759 +https://crbug.com/webrtc/862,https://issues.webrtc.org/issues/42233760 +https://crbug.com/webrtc/8620,https://issues.webrtc.org/issues/42233761 +https://crbug.com/webrtc/8621,https://issues.webrtc.org/issues/42233762 +https://crbug.com/webrtc/8622,https://issues.webrtc.org/issues/42233763 +https://crbug.com/webrtc/8623,https://issues.webrtc.org/issues/42233764 +https://crbug.com/webrtc/8624,https://issues.webrtc.org/issues/42233765 +https://crbug.com/webrtc/8625,https://issues.webrtc.org/issues/42233766 +https://crbug.com/webrtc/8626,https://issues.webrtc.org/issues/42233767 +https://crbug.com/webrtc/8627,https://issues.webrtc.org/issues/42233768 +https://crbug.com/webrtc/8628,https://issues.webrtc.org/issues/42233769 +https://crbug.com/webrtc/8629,https://issues.webrtc.org/issues/42233770 +https://crbug.com/webrtc/863,https://issues.webrtc.org/issues/42233771 +https://crbug.com/webrtc/8630,https://issues.webrtc.org/issues/42233772 +https://crbug.com/webrtc/8631,https://issues.webrtc.org/issues/42233773 +https://crbug.com/webrtc/8632,https://issues.webrtc.org/issues/42233774 +https://crbug.com/webrtc/8633,https://issues.webrtc.org/issues/42233775 +https://crbug.com/webrtc/8634,https://issues.webrtc.org/issues/42233776 +https://crbug.com/webrtc/8635,https://issues.webrtc.org/issues/42233777 +https://crbug.com/webrtc/8636,https://issues.webrtc.org/issues/42233778 +https://crbug.com/webrtc/8637,https://issues.webrtc.org/issues/42233779 +https://crbug.com/webrtc/8638,https://issues.webrtc.org/issues/42233780 +https://crbug.com/webrtc/8639,https://issues.webrtc.org/issues/42233781 +https://crbug.com/webrtc/864,https://issues.webrtc.org/issues/42233782 +https://crbug.com/webrtc/8640,https://issues.webrtc.org/issues/42233783 +https://crbug.com/webrtc/8641,https://issues.webrtc.org/issues/42233784 +https://crbug.com/webrtc/8642,https://issues.webrtc.org/issues/42233785 +https://crbug.com/webrtc/8643,https://issues.webrtc.org/issues/42233786 +https://crbug.com/webrtc/8644,https://issues.webrtc.org/issues/42233787 +https://crbug.com/webrtc/8645,https://issues.webrtc.org/issues/42233788 +https://crbug.com/webrtc/8646,https://issues.webrtc.org/issues/42233789 +https://crbug.com/webrtc/8647,https://issues.webrtc.org/issues/42233790 +https://crbug.com/webrtc/8648,https://issues.webrtc.org/issues/42233791 +https://crbug.com/webrtc/8649,https://issues.webrtc.org/issues/42233792 +https://crbug.com/webrtc/865,https://issues.webrtc.org/issues/42233793 +https://crbug.com/webrtc/8650,https://issues.webrtc.org/issues/42233794 +https://crbug.com/webrtc/8651,https://issues.webrtc.org/issues/42233795 +https://crbug.com/webrtc/8653,https://issues.webrtc.org/issues/42233796 +https://crbug.com/webrtc/8654,https://issues.webrtc.org/issues/42233797 +https://crbug.com/webrtc/8655,https://issues.webrtc.org/issues/42233798 +https://crbug.com/webrtc/8656,https://issues.webrtc.org/issues/42233799 +https://crbug.com/webrtc/8657,https://issues.webrtc.org/issues/42233800 +https://crbug.com/webrtc/8658,https://issues.webrtc.org/issues/42233801 +https://crbug.com/webrtc/8659,https://issues.webrtc.org/issues/42233802 +https://crbug.com/webrtc/866,https://issues.webrtc.org/issues/42233803 +https://crbug.com/webrtc/8660,https://issues.webrtc.org/issues/42233804 +https://crbug.com/webrtc/8661,https://issues.webrtc.org/issues/42233805 +https://crbug.com/webrtc/8662,https://issues.webrtc.org/issues/42233806 +https://crbug.com/webrtc/8663,https://issues.webrtc.org/issues/42233807 +https://crbug.com/webrtc/8665,https://issues.webrtc.org/issues/42233808 +https://crbug.com/webrtc/8666,https://issues.webrtc.org/issues/42233809 +https://crbug.com/webrtc/8667,https://issues.webrtc.org/issues/42233810 +https://crbug.com/webrtc/8668,https://issues.webrtc.org/issues/42233811 +https://crbug.com/webrtc/8669,https://issues.webrtc.org/issues/42233812 +https://crbug.com/webrtc/867,https://issues.webrtc.org/issues/42233813 +https://crbug.com/webrtc/8670,https://issues.webrtc.org/issues/42233814 +https://crbug.com/webrtc/8671,https://issues.webrtc.org/issues/42233815 +https://crbug.com/webrtc/8672,https://issues.webrtc.org/issues/42233816 +https://crbug.com/webrtc/8673,https://issues.webrtc.org/issues/42233817 +https://crbug.com/webrtc/8674,https://issues.webrtc.org/issues/42233818 +https://crbug.com/webrtc/8675,https://issues.webrtc.org/issues/42233819 +https://crbug.com/webrtc/8676,https://issues.webrtc.org/issues/42233820 +https://crbug.com/webrtc/8677,https://issues.webrtc.org/issues/42233821 +https://crbug.com/webrtc/8678,https://issues.webrtc.org/issues/42233822 +https://crbug.com/webrtc/8679,https://issues.webrtc.org/issues/42233823 +https://crbug.com/webrtc/868,https://issues.webrtc.org/issues/42233824 +https://crbug.com/webrtc/8680,https://issues.webrtc.org/issues/42233825 +https://crbug.com/webrtc/8681,https://issues.webrtc.org/issues/42233826 +https://crbug.com/webrtc/8682,https://issues.webrtc.org/issues/42233827 +https://crbug.com/webrtc/8683,https://issues.webrtc.org/issues/42233828 +https://crbug.com/webrtc/8684,https://issues.webrtc.org/issues/42233829 +https://crbug.com/webrtc/8685,https://issues.webrtc.org/issues/42233830 +https://crbug.com/webrtc/8686,https://issues.webrtc.org/issues/42233831 +https://crbug.com/webrtc/8687,https://issues.webrtc.org/issues/42233832 +https://crbug.com/webrtc/8688,https://issues.webrtc.org/issues/42233833 +https://crbug.com/webrtc/8689,https://issues.webrtc.org/issues/42233834 +https://crbug.com/webrtc/869,https://issues.webrtc.org/issues/42233835 +https://crbug.com/webrtc/8691,https://issues.webrtc.org/issues/42233836 +https://crbug.com/webrtc/8692,https://issues.webrtc.org/issues/42233837 +https://crbug.com/webrtc/8693,https://issues.webrtc.org/issues/42233838 +https://crbug.com/webrtc/8694,https://issues.webrtc.org/issues/42233839 +https://crbug.com/webrtc/8695,https://issues.webrtc.org/issues/42233840 +https://crbug.com/webrtc/8696,https://issues.webrtc.org/issues/42233841 +https://crbug.com/webrtc/8697,https://issues.webrtc.org/issues/42233842 +https://crbug.com/webrtc/8699,https://issues.webrtc.org/issues/42233843 +https://crbug.com/webrtc/87,https://issues.webrtc.org/issues/42233844 +https://crbug.com/webrtc/870,https://issues.webrtc.org/issues/42233845 +https://crbug.com/webrtc/8700,https://issues.webrtc.org/issues/42233846 +https://crbug.com/webrtc/8701,https://issues.webrtc.org/issues/42233847 +https://crbug.com/webrtc/8702,https://issues.webrtc.org/issues/42233848 +https://crbug.com/webrtc/8703,https://issues.webrtc.org/issues/42233849 +https://crbug.com/webrtc/8704,https://issues.webrtc.org/issues/42233850 +https://crbug.com/webrtc/8705,https://issues.webrtc.org/issues/42233851 +https://crbug.com/webrtc/8706,https://issues.webrtc.org/issues/42233852 +https://crbug.com/webrtc/8707,https://issues.webrtc.org/issues/42233853 +https://crbug.com/webrtc/8708,https://issues.webrtc.org/issues/42233854 +https://crbug.com/webrtc/8709,https://issues.webrtc.org/issues/42233855 +https://crbug.com/webrtc/871,https://issues.webrtc.org/issues/42233856 +https://crbug.com/webrtc/8710,https://issues.webrtc.org/issues/42233857 +https://crbug.com/webrtc/8711,https://issues.webrtc.org/issues/42233858 +https://crbug.com/webrtc/8712,https://issues.webrtc.org/issues/42233859 +https://crbug.com/webrtc/8713,https://issues.webrtc.org/issues/42233860 +https://crbug.com/webrtc/8714,https://issues.webrtc.org/issues/42233861 +https://crbug.com/webrtc/8715,https://issues.webrtc.org/issues/42233862 +https://crbug.com/webrtc/8716,https://issues.webrtc.org/issues/42233863 +https://crbug.com/webrtc/8717,https://issues.webrtc.org/issues/42233864 +https://crbug.com/webrtc/8718,https://issues.webrtc.org/issues/42233865 +https://crbug.com/webrtc/8719,https://issues.webrtc.org/issues/42233866 +https://crbug.com/webrtc/872,https://issues.webrtc.org/issues/42233867 +https://crbug.com/webrtc/8720,https://issues.webrtc.org/issues/42233868 +https://crbug.com/webrtc/8721,https://issues.webrtc.org/issues/42233869 +https://crbug.com/webrtc/8722,https://issues.webrtc.org/issues/42233870 +https://crbug.com/webrtc/8723,https://issues.webrtc.org/issues/42233871 +https://crbug.com/webrtc/8724,https://issues.webrtc.org/issues/42233872 +https://crbug.com/webrtc/8725,https://issues.webrtc.org/issues/42233873 +https://crbug.com/webrtc/8726,https://issues.webrtc.org/issues/42233874 +https://crbug.com/webrtc/8727,https://issues.webrtc.org/issues/42233875 +https://crbug.com/webrtc/8728,https://issues.webrtc.org/issues/42233876 +https://crbug.com/webrtc/8729,https://issues.webrtc.org/issues/42233877 +https://crbug.com/webrtc/873,https://issues.webrtc.org/issues/42233878 +https://crbug.com/webrtc/8731,https://issues.webrtc.org/issues/42233879 +https://crbug.com/webrtc/8732,https://issues.webrtc.org/issues/42233880 +https://crbug.com/webrtc/8733,https://issues.webrtc.org/issues/42233881 +https://crbug.com/webrtc/8735,https://issues.webrtc.org/issues/42233882 +https://crbug.com/webrtc/8736,https://issues.webrtc.org/issues/42233883 +https://crbug.com/webrtc/8737,https://issues.webrtc.org/issues/42233884 +https://crbug.com/webrtc/8738,https://issues.webrtc.org/issues/42233885 +https://crbug.com/webrtc/8739,https://issues.webrtc.org/issues/42233886 +https://crbug.com/webrtc/874,https://issues.webrtc.org/issues/42233887 +https://crbug.com/webrtc/8740,https://issues.webrtc.org/issues/42233888 +https://crbug.com/webrtc/8741,https://issues.webrtc.org/issues/42233889 +https://crbug.com/webrtc/8743,https://issues.webrtc.org/issues/42233890 +https://crbug.com/webrtc/8744,https://issues.webrtc.org/issues/42233891 +https://crbug.com/webrtc/8745,https://issues.webrtc.org/issues/42233892 +https://crbug.com/webrtc/8746,https://issues.webrtc.org/issues/42233893 +https://crbug.com/webrtc/8747,https://issues.webrtc.org/issues/42233894 +https://crbug.com/webrtc/8748,https://issues.webrtc.org/issues/42233895 +https://crbug.com/webrtc/8749,https://issues.webrtc.org/issues/42233896 +https://crbug.com/webrtc/875,https://issues.webrtc.org/issues/42233897 +https://crbug.com/webrtc/8750,https://issues.webrtc.org/issues/42233898 +https://crbug.com/webrtc/8751,https://issues.webrtc.org/issues/42233899 +https://crbug.com/webrtc/8752,https://issues.webrtc.org/issues/42233900 +https://crbug.com/webrtc/8753,https://issues.webrtc.org/issues/42233901 +https://crbug.com/webrtc/8754,https://issues.webrtc.org/issues/42233902 +https://crbug.com/webrtc/8755,https://issues.webrtc.org/issues/42233903 +https://crbug.com/webrtc/8756,https://issues.webrtc.org/issues/42233904 +https://crbug.com/webrtc/8757,https://issues.webrtc.org/issues/42233905 +https://crbug.com/webrtc/8758,https://issues.webrtc.org/issues/42233906 +https://crbug.com/webrtc/8759,https://issues.webrtc.org/issues/42233907 +https://crbug.com/webrtc/876,https://issues.webrtc.org/issues/42233908 +https://crbug.com/webrtc/8760,https://issues.webrtc.org/issues/42233909 +https://crbug.com/webrtc/8761,https://issues.webrtc.org/issues/42233910 +https://crbug.com/webrtc/8762,https://issues.webrtc.org/issues/42233911 +https://crbug.com/webrtc/8763,https://issues.webrtc.org/issues/42233912 +https://crbug.com/webrtc/8764,https://issues.webrtc.org/issues/42233913 +https://crbug.com/webrtc/8765,https://issues.webrtc.org/issues/42233914 +https://crbug.com/webrtc/8766,https://issues.webrtc.org/issues/42233915 +https://crbug.com/webrtc/8767,https://issues.webrtc.org/issues/42233916 +https://crbug.com/webrtc/8768,https://issues.webrtc.org/issues/42233917 +https://crbug.com/webrtc/8769,https://issues.webrtc.org/issues/42233918 +https://crbug.com/webrtc/877,https://issues.webrtc.org/issues/42233919 +https://crbug.com/webrtc/8770,https://issues.webrtc.org/issues/42233920 +https://crbug.com/webrtc/8771,https://issues.webrtc.org/issues/42233921 +https://crbug.com/webrtc/8772,https://issues.webrtc.org/issues/42233922 +https://crbug.com/webrtc/8773,https://issues.webrtc.org/issues/42233923 +https://crbug.com/webrtc/8774,https://issues.webrtc.org/issues/42233924 +https://crbug.com/webrtc/8775,https://issues.webrtc.org/issues/42233925 +https://crbug.com/webrtc/8776,https://issues.webrtc.org/issues/42233926 +https://crbug.com/webrtc/8777,https://issues.webrtc.org/issues/42233927 +https://crbug.com/webrtc/8778,https://issues.webrtc.org/issues/42233928 +https://crbug.com/webrtc/8779,https://issues.webrtc.org/issues/42233929 +https://crbug.com/webrtc/878,https://issues.webrtc.org/issues/42233930 +https://crbug.com/webrtc/8780,https://issues.webrtc.org/issues/42233931 +https://crbug.com/webrtc/8781,https://issues.webrtc.org/issues/42233932 +https://crbug.com/webrtc/8782,https://issues.webrtc.org/issues/42233933 +https://crbug.com/webrtc/8783,https://issues.webrtc.org/issues/42233934 +https://crbug.com/webrtc/8784,https://issues.webrtc.org/issues/42233935 +https://crbug.com/webrtc/8785,https://issues.webrtc.org/issues/42233936 +https://crbug.com/webrtc/8786,https://issues.webrtc.org/issues/42233937 +https://crbug.com/webrtc/8787,https://issues.webrtc.org/issues/42233938 +https://crbug.com/webrtc/8788,https://issues.webrtc.org/issues/42233939 +https://crbug.com/webrtc/8789,https://issues.webrtc.org/issues/42233940 +https://crbug.com/webrtc/879,https://issues.webrtc.org/issues/42233941 +https://crbug.com/webrtc/8790,https://issues.webrtc.org/issues/42233942 +https://crbug.com/webrtc/8791,https://issues.webrtc.org/issues/42233943 +https://crbug.com/webrtc/8792,https://issues.webrtc.org/issues/42233944 +https://crbug.com/webrtc/8793,https://issues.webrtc.org/issues/42233945 +https://crbug.com/webrtc/8794,https://issues.webrtc.org/issues/42233946 +https://crbug.com/webrtc/8795,https://issues.webrtc.org/issues/42233947 +https://crbug.com/webrtc/8796,https://issues.webrtc.org/issues/42233948 +https://crbug.com/webrtc/8797,https://issues.webrtc.org/issues/42233949 +https://crbug.com/webrtc/8798,https://issues.webrtc.org/issues/42233950 +https://crbug.com/webrtc/8799,https://issues.webrtc.org/issues/42233951 +https://crbug.com/webrtc/88,https://issues.webrtc.org/issues/42233952 +https://crbug.com/webrtc/880,https://issues.webrtc.org/issues/42233953 +https://crbug.com/webrtc/8800,https://issues.webrtc.org/issues/42233954 +https://crbug.com/webrtc/8801,https://issues.webrtc.org/issues/42233955 +https://crbug.com/webrtc/8802,https://issues.webrtc.org/issues/42233956 +https://crbug.com/webrtc/8803,https://issues.webrtc.org/issues/42233957 +https://crbug.com/webrtc/8804,https://issues.webrtc.org/issues/42233958 +https://crbug.com/webrtc/8805,https://issues.webrtc.org/issues/42233959 +https://crbug.com/webrtc/8806,https://issues.webrtc.org/issues/42233960 +https://crbug.com/webrtc/8807,https://issues.webrtc.org/issues/42233961 +https://crbug.com/webrtc/8808,https://issues.webrtc.org/issues/42233962 +https://crbug.com/webrtc/881,https://issues.webrtc.org/issues/42233963 +https://crbug.com/webrtc/8810,https://issues.webrtc.org/issues/42233964 +https://crbug.com/webrtc/8811,https://issues.webrtc.org/issues/42233965 +https://crbug.com/webrtc/8812,https://issues.webrtc.org/issues/42233966 +https://crbug.com/webrtc/8813,https://issues.webrtc.org/issues/42233967 +https://crbug.com/webrtc/8814,https://issues.webrtc.org/issues/42233968 +https://crbug.com/webrtc/8815,https://issues.webrtc.org/issues/42233969 +https://crbug.com/webrtc/8816,https://issues.webrtc.org/issues/42233970 +https://crbug.com/webrtc/8817,https://issues.webrtc.org/issues/42233971 +https://crbug.com/webrtc/8818,https://issues.webrtc.org/issues/42233972 +https://crbug.com/webrtc/8819,https://issues.webrtc.org/issues/42233973 +https://crbug.com/webrtc/882,https://issues.webrtc.org/issues/42233974 +https://crbug.com/webrtc/8820,https://issues.webrtc.org/issues/42233975 +https://crbug.com/webrtc/8821,https://issues.webrtc.org/issues/42233976 +https://crbug.com/webrtc/8822,https://issues.webrtc.org/issues/42233977 +https://crbug.com/webrtc/8823,https://issues.webrtc.org/issues/42233978 +https://crbug.com/webrtc/8825,https://issues.webrtc.org/issues/42233979 +https://crbug.com/webrtc/8826,https://issues.webrtc.org/issues/42233980 +https://crbug.com/webrtc/8827,https://issues.webrtc.org/issues/42233981 +https://crbug.com/webrtc/8828,https://issues.webrtc.org/issues/42233982 +https://crbug.com/webrtc/8829,https://issues.webrtc.org/issues/42233983 +https://crbug.com/webrtc/883,https://issues.webrtc.org/issues/42233984 +https://crbug.com/webrtc/8830,https://issues.webrtc.org/issues/42233985 +https://crbug.com/webrtc/8831,https://issues.webrtc.org/issues/42233986 +https://crbug.com/webrtc/8832,https://issues.webrtc.org/issues/42233987 +https://crbug.com/webrtc/8833,https://issues.webrtc.org/issues/42233988 +https://crbug.com/webrtc/8834,https://issues.webrtc.org/issues/42233989 +https://crbug.com/webrtc/8835,https://issues.webrtc.org/issues/42233990 +https://crbug.com/webrtc/8836,https://issues.webrtc.org/issues/42233991 +https://crbug.com/webrtc/8837,https://issues.webrtc.org/issues/42233992 +https://crbug.com/webrtc/8838,https://issues.webrtc.org/issues/42233993 +https://crbug.com/webrtc/8839,https://issues.webrtc.org/issues/42233994 +https://crbug.com/webrtc/884,https://issues.webrtc.org/issues/42233995 +https://crbug.com/webrtc/8840,https://issues.webrtc.org/issues/42233996 +https://crbug.com/webrtc/8841,https://issues.webrtc.org/issues/42233997 +https://crbug.com/webrtc/8842,https://issues.webrtc.org/issues/42233998 +https://crbug.com/webrtc/8843,https://issues.webrtc.org/issues/42233999 +https://crbug.com/webrtc/8844,https://issues.webrtc.org/issues/42234000 +https://crbug.com/webrtc/8845,https://issues.webrtc.org/issues/42234001 +https://crbug.com/webrtc/8846,https://issues.webrtc.org/issues/42234002 +https://crbug.com/webrtc/8847,https://issues.webrtc.org/issues/42234003 +https://crbug.com/webrtc/8848,https://issues.webrtc.org/issues/42234004 +https://crbug.com/webrtc/8849,https://issues.webrtc.org/issues/42234005 +https://crbug.com/webrtc/885,https://issues.webrtc.org/issues/42234006 +https://crbug.com/webrtc/8850,https://issues.webrtc.org/issues/42234007 +https://crbug.com/webrtc/8851,https://issues.webrtc.org/issues/42234008 +https://crbug.com/webrtc/8852,https://issues.webrtc.org/issues/42234009 +https://crbug.com/webrtc/8853,https://issues.webrtc.org/issues/42234010 +https://crbug.com/webrtc/8854,https://issues.webrtc.org/issues/42234011 +https://crbug.com/webrtc/8855,https://issues.webrtc.org/issues/42234012 +https://crbug.com/webrtc/8856,https://issues.webrtc.org/issues/42234013 +https://crbug.com/webrtc/8857,https://issues.webrtc.org/issues/42234014 +https://crbug.com/webrtc/8858,https://issues.webrtc.org/issues/42234015 +https://crbug.com/webrtc/8859,https://issues.webrtc.org/issues/42234016 +https://crbug.com/webrtc/886,https://issues.webrtc.org/issues/42234017 +https://crbug.com/webrtc/8861,https://issues.webrtc.org/issues/42234018 +https://crbug.com/webrtc/8862,https://issues.webrtc.org/issues/42234019 +https://crbug.com/webrtc/8863,https://issues.webrtc.org/issues/42234020 +https://crbug.com/webrtc/8864,https://issues.webrtc.org/issues/42234021 +https://crbug.com/webrtc/8865,https://issues.webrtc.org/issues/42234022 +https://crbug.com/webrtc/8866,https://issues.webrtc.org/issues/42234023 +https://crbug.com/webrtc/8867,https://issues.webrtc.org/issues/42234024 +https://crbug.com/webrtc/8868,https://issues.webrtc.org/issues/42234025 +https://crbug.com/webrtc/8869,https://issues.webrtc.org/issues/42234026 +https://crbug.com/webrtc/887,https://issues.webrtc.org/issues/42234027 +https://crbug.com/webrtc/8870,https://issues.webrtc.org/issues/42234028 +https://crbug.com/webrtc/8871,https://issues.webrtc.org/issues/42234029 +https://crbug.com/webrtc/8872,https://issues.webrtc.org/issues/42234030 +https://crbug.com/webrtc/8873,https://issues.webrtc.org/issues/42234031 +https://crbug.com/webrtc/8874,https://issues.webrtc.org/issues/42234032 +https://crbug.com/webrtc/8875,https://issues.webrtc.org/issues/42234033 +https://crbug.com/webrtc/8876,https://issues.webrtc.org/issues/42234034 +https://crbug.com/webrtc/8877,https://issues.webrtc.org/issues/42234035 +https://crbug.com/webrtc/8878,https://issues.webrtc.org/issues/42234036 +https://crbug.com/webrtc/8879,https://issues.webrtc.org/issues/42234037 +https://crbug.com/webrtc/888,https://issues.webrtc.org/issues/42234038 +https://crbug.com/webrtc/8880,https://issues.webrtc.org/issues/42234039 +https://crbug.com/webrtc/8881,https://issues.webrtc.org/issues/42234040 +https://crbug.com/webrtc/8882,https://issues.webrtc.org/issues/42234041 +https://crbug.com/webrtc/8883,https://issues.webrtc.org/issues/42234042 +https://crbug.com/webrtc/8884,https://issues.webrtc.org/issues/42234043 +https://crbug.com/webrtc/8885,https://issues.webrtc.org/issues/42234044 +https://crbug.com/webrtc/8886,https://issues.webrtc.org/issues/42234045 +https://crbug.com/webrtc/8887,https://issues.webrtc.org/issues/42234046 +https://crbug.com/webrtc/8888,https://issues.webrtc.org/issues/42234047 +https://crbug.com/webrtc/8889,https://issues.webrtc.org/issues/42234048 +https://crbug.com/webrtc/889,https://issues.webrtc.org/issues/42234049 +https://crbug.com/webrtc/8890,https://issues.webrtc.org/issues/42234050 +https://crbug.com/webrtc/8891,https://issues.webrtc.org/issues/42234051 +https://crbug.com/webrtc/8892,https://issues.webrtc.org/issues/42234052 +https://crbug.com/webrtc/8893,https://issues.webrtc.org/issues/42234053 +https://crbug.com/webrtc/8894,https://issues.webrtc.org/issues/42234054 +https://crbug.com/webrtc/8896,https://issues.webrtc.org/issues/42234055 +https://crbug.com/webrtc/8897,https://issues.webrtc.org/issues/42234056 +https://crbug.com/webrtc/8898,https://issues.webrtc.org/issues/42234057 +https://crbug.com/webrtc/8899,https://issues.webrtc.org/issues/42234058 +https://crbug.com/webrtc/89,https://issues.webrtc.org/issues/42234059 +https://crbug.com/webrtc/890,https://issues.webrtc.org/issues/42234060 +https://crbug.com/webrtc/8900,https://issues.webrtc.org/issues/42234061 +https://crbug.com/webrtc/8901,https://issues.webrtc.org/issues/42234062 +https://crbug.com/webrtc/8902,https://issues.webrtc.org/issues/42234063 +https://crbug.com/webrtc/8903,https://issues.webrtc.org/issues/42234064 +https://crbug.com/webrtc/8904,https://issues.webrtc.org/issues/42234065 +https://crbug.com/webrtc/8905,https://issues.webrtc.org/issues/42234066 +https://crbug.com/webrtc/8906,https://issues.webrtc.org/issues/42234067 +https://crbug.com/webrtc/8907,https://issues.webrtc.org/issues/42234068 +https://crbug.com/webrtc/8909,https://issues.webrtc.org/issues/42234069 +https://crbug.com/webrtc/891,https://issues.webrtc.org/issues/42234070 +https://crbug.com/webrtc/8910,https://issues.webrtc.org/issues/42234071 +https://crbug.com/webrtc/8911,https://issues.webrtc.org/issues/42234072 +https://crbug.com/webrtc/8912,https://issues.webrtc.org/issues/42234073 +https://crbug.com/webrtc/8913,https://issues.webrtc.org/issues/42234074 +https://crbug.com/webrtc/8914,https://issues.webrtc.org/issues/42234075 +https://crbug.com/webrtc/8915,https://issues.webrtc.org/issues/42234076 +https://crbug.com/webrtc/8916,https://issues.webrtc.org/issues/42234077 +https://crbug.com/webrtc/8917,https://issues.webrtc.org/issues/42234078 +https://crbug.com/webrtc/8918,https://issues.webrtc.org/issues/42234079 +https://crbug.com/webrtc/8919,https://issues.webrtc.org/issues/42234080 +https://crbug.com/webrtc/892,https://issues.webrtc.org/issues/42234081 +https://crbug.com/webrtc/8920,https://issues.webrtc.org/issues/42234082 +https://crbug.com/webrtc/8921,https://issues.webrtc.org/issues/42234083 +https://crbug.com/webrtc/8922,https://issues.webrtc.org/issues/42234084 +https://crbug.com/webrtc/8923,https://issues.webrtc.org/issues/42234085 +https://crbug.com/webrtc/8924,https://issues.webrtc.org/issues/42234086 +https://crbug.com/webrtc/8925,https://issues.webrtc.org/issues/42234087 +https://crbug.com/webrtc/8927,https://issues.webrtc.org/issues/42234088 +https://crbug.com/webrtc/8928,https://issues.webrtc.org/issues/42234089 +https://crbug.com/webrtc/8929,https://issues.webrtc.org/issues/42234090 +https://crbug.com/webrtc/893,https://issues.webrtc.org/issues/42234091 +https://crbug.com/webrtc/8930,https://issues.webrtc.org/issues/42234092 +https://crbug.com/webrtc/8931,https://issues.webrtc.org/issues/42234093 +https://crbug.com/webrtc/8932,https://issues.webrtc.org/issues/42234094 +https://crbug.com/webrtc/8933,https://issues.webrtc.org/issues/42234095 +https://crbug.com/webrtc/8934,https://issues.webrtc.org/issues/42234096 +https://crbug.com/webrtc/8935,https://issues.webrtc.org/issues/42234097 +https://crbug.com/webrtc/8936,https://issues.webrtc.org/issues/42234098 +https://crbug.com/webrtc/8937,https://issues.webrtc.org/issues/42234099 +https://crbug.com/webrtc/8938,https://issues.webrtc.org/issues/42234100 +https://crbug.com/webrtc/8939,https://issues.webrtc.org/issues/42234101 +https://crbug.com/webrtc/894,https://issues.webrtc.org/issues/42234102 +https://crbug.com/webrtc/8940,https://issues.webrtc.org/issues/42234103 +https://crbug.com/webrtc/8941,https://issues.webrtc.org/issues/42234104 +https://crbug.com/webrtc/8942,https://issues.webrtc.org/issues/42234105 +https://crbug.com/webrtc/8943,https://issues.webrtc.org/issues/42234106 +https://crbug.com/webrtc/8944,https://issues.webrtc.org/issues/42234107 +https://crbug.com/webrtc/8945,https://issues.webrtc.org/issues/42234108 +https://crbug.com/webrtc/8946,https://issues.webrtc.org/issues/42234109 +https://crbug.com/webrtc/8947,https://issues.webrtc.org/issues/42234110 +https://crbug.com/webrtc/8948,https://issues.webrtc.org/issues/42234111 +https://crbug.com/webrtc/8949,https://issues.webrtc.org/issues/42234112 +https://crbug.com/webrtc/895,https://issues.webrtc.org/issues/42234113 +https://crbug.com/webrtc/8950,https://issues.webrtc.org/issues/42234114 +https://crbug.com/webrtc/8951,https://issues.webrtc.org/issues/42234115 +https://crbug.com/webrtc/8952,https://issues.webrtc.org/issues/42234116 +https://crbug.com/webrtc/8953,https://issues.webrtc.org/issues/42234117 +https://crbug.com/webrtc/8954,https://issues.webrtc.org/issues/42234118 +https://crbug.com/webrtc/8955,https://issues.webrtc.org/issues/42234119 +https://crbug.com/webrtc/8956,https://issues.webrtc.org/issues/42234120 +https://crbug.com/webrtc/8957,https://issues.webrtc.org/issues/42234121 +https://crbug.com/webrtc/8958,https://issues.webrtc.org/issues/42234122 +https://crbug.com/webrtc/8959,https://issues.webrtc.org/issues/42234123 +https://crbug.com/webrtc/896,https://issues.webrtc.org/issues/42234124 +https://crbug.com/webrtc/8960,https://issues.webrtc.org/issues/42234125 +https://crbug.com/webrtc/8961,https://issues.webrtc.org/issues/42234126 +https://crbug.com/webrtc/8962,https://issues.webrtc.org/issues/42234127 +https://crbug.com/webrtc/8963,https://issues.webrtc.org/issues/42234128 +https://crbug.com/webrtc/8964,https://issues.webrtc.org/issues/42234129 +https://crbug.com/webrtc/8965,https://issues.webrtc.org/issues/42234130 +https://crbug.com/webrtc/8966,https://issues.webrtc.org/issues/42234131 +https://crbug.com/webrtc/8967,https://issues.webrtc.org/issues/42234132 +https://crbug.com/webrtc/8968,https://issues.webrtc.org/issues/42234133 +https://crbug.com/webrtc/8969,https://issues.webrtc.org/issues/42234134 +https://crbug.com/webrtc/897,https://issues.webrtc.org/issues/42234135 +https://crbug.com/webrtc/8970,https://issues.webrtc.org/issues/42234136 +https://crbug.com/webrtc/8971,https://issues.webrtc.org/issues/42234137 +https://crbug.com/webrtc/8972,https://issues.webrtc.org/issues/42234138 +https://crbug.com/webrtc/8973,https://issues.webrtc.org/issues/42234139 +https://crbug.com/webrtc/8974,https://issues.webrtc.org/issues/42234140 +https://crbug.com/webrtc/8975,https://issues.webrtc.org/issues/42234141 +https://crbug.com/webrtc/8976,https://issues.webrtc.org/issues/42234142 +https://crbug.com/webrtc/8977,https://issues.webrtc.org/issues/42234143 +https://crbug.com/webrtc/8978,https://issues.webrtc.org/issues/42234144 +https://crbug.com/webrtc/8979,https://issues.webrtc.org/issues/42234145 +https://crbug.com/webrtc/898,https://issues.webrtc.org/issues/42234146 +https://crbug.com/webrtc/8980,https://issues.webrtc.org/issues/42234147 +https://crbug.com/webrtc/8981,https://issues.webrtc.org/issues/42234148 +https://crbug.com/webrtc/8982,https://issues.webrtc.org/issues/42234149 +https://crbug.com/webrtc/8983,https://issues.webrtc.org/issues/42234150 +https://crbug.com/webrtc/8984,https://issues.webrtc.org/issues/42234151 +https://crbug.com/webrtc/8985,https://issues.webrtc.org/issues/42234152 +https://crbug.com/webrtc/8986,https://issues.webrtc.org/issues/42234153 +https://crbug.com/webrtc/8987,https://issues.webrtc.org/issues/42234154 +https://crbug.com/webrtc/8988,https://issues.webrtc.org/issues/42234155 +https://crbug.com/webrtc/8989,https://issues.webrtc.org/issues/42234156 +https://crbug.com/webrtc/899,https://issues.webrtc.org/issues/42234157 +https://crbug.com/webrtc/8990,https://issues.webrtc.org/issues/42234158 +https://crbug.com/webrtc/8991,https://issues.webrtc.org/issues/42234159 +https://crbug.com/webrtc/8992,https://issues.webrtc.org/issues/42234160 +https://crbug.com/webrtc/8993,https://issues.webrtc.org/issues/42234161 +https://crbug.com/webrtc/8994,https://issues.webrtc.org/issues/42234162 +https://crbug.com/webrtc/8995,https://issues.webrtc.org/issues/42234163 +https://crbug.com/webrtc/8996,https://issues.webrtc.org/issues/42234164 +https://crbug.com/webrtc/8997,https://issues.webrtc.org/issues/42234165 +https://crbug.com/webrtc/8998,https://issues.webrtc.org/issues/42234166 +https://crbug.com/webrtc/8999,https://issues.webrtc.org/issues/42234167 +https://crbug.com/webrtc/9,https://issues.webrtc.org/issues/42234168 +https://crbug.com/webrtc/90,https://issues.webrtc.org/issues/42234169 +https://crbug.com/webrtc/900,https://issues.webrtc.org/issues/42234170 +https://crbug.com/webrtc/9000,https://issues.webrtc.org/issues/42234171 +https://crbug.com/webrtc/9001,https://issues.webrtc.org/issues/42234172 +https://crbug.com/webrtc/9002,https://issues.webrtc.org/issues/42234173 +https://crbug.com/webrtc/9003,https://issues.webrtc.org/issues/42234174 +https://crbug.com/webrtc/9004,https://issues.webrtc.org/issues/42234175 +https://crbug.com/webrtc/9005,https://issues.webrtc.org/issues/42234176 +https://crbug.com/webrtc/9006,https://issues.webrtc.org/issues/42234177 +https://crbug.com/webrtc/9007,https://issues.webrtc.org/issues/42234178 +https://crbug.com/webrtc/9008,https://issues.webrtc.org/issues/42234179 +https://crbug.com/webrtc/9009,https://issues.webrtc.org/issues/42234180 +https://crbug.com/webrtc/901,https://issues.webrtc.org/issues/42234181 +https://crbug.com/webrtc/9010,https://issues.webrtc.org/issues/42234182 +https://crbug.com/webrtc/9011,https://issues.webrtc.org/issues/42234183 +https://crbug.com/webrtc/9012,https://issues.webrtc.org/issues/42234184 +https://crbug.com/webrtc/9013,https://issues.webrtc.org/issues/42234185 +https://crbug.com/webrtc/9014,https://issues.webrtc.org/issues/42234186 +https://crbug.com/webrtc/9015,https://issues.webrtc.org/issues/42234187 +https://crbug.com/webrtc/9016,https://issues.webrtc.org/issues/42234188 +https://crbug.com/webrtc/9018,https://issues.webrtc.org/issues/42234189 +https://crbug.com/webrtc/9019,https://issues.webrtc.org/issues/42234190 +https://crbug.com/webrtc/902,https://issues.webrtc.org/issues/42234191 +https://crbug.com/webrtc/9020,https://issues.webrtc.org/issues/42234192 +https://crbug.com/webrtc/9021,https://issues.webrtc.org/issues/42234193 +https://crbug.com/webrtc/9022,https://issues.webrtc.org/issues/42234194 +https://crbug.com/webrtc/9023,https://issues.webrtc.org/issues/42234195 +https://crbug.com/webrtc/9024,https://issues.webrtc.org/issues/42234196 +https://crbug.com/webrtc/9025,https://issues.webrtc.org/issues/42234197 +https://crbug.com/webrtc/9026,https://issues.webrtc.org/issues/42234198 +https://crbug.com/webrtc/9027,https://issues.webrtc.org/issues/42234199 +https://crbug.com/webrtc/9028,https://issues.webrtc.org/issues/42234200 +https://crbug.com/webrtc/9029,https://issues.webrtc.org/issues/42234201 +https://crbug.com/webrtc/903,https://issues.webrtc.org/issues/42234202 +https://crbug.com/webrtc/9030,https://issues.webrtc.org/issues/42234203 +https://crbug.com/webrtc/9031,https://issues.webrtc.org/issues/42234204 +https://crbug.com/webrtc/9032,https://issues.webrtc.org/issues/42234205 +https://crbug.com/webrtc/9033,https://issues.webrtc.org/issues/42234206 +https://crbug.com/webrtc/9034,https://issues.webrtc.org/issues/42234207 +https://crbug.com/webrtc/9035,https://issues.webrtc.org/issues/42234208 +https://crbug.com/webrtc/9036,https://issues.webrtc.org/issues/42234209 +https://crbug.com/webrtc/9038,https://issues.webrtc.org/issues/42234210 +https://crbug.com/webrtc/9039,https://issues.webrtc.org/issues/42234211 +https://crbug.com/webrtc/904,https://issues.webrtc.org/issues/42234212 +https://crbug.com/webrtc/9040,https://issues.webrtc.org/issues/42234213 +https://crbug.com/webrtc/9041,https://issues.webrtc.org/issues/42234214 +https://crbug.com/webrtc/9042,https://issues.webrtc.org/issues/42234215 +https://crbug.com/webrtc/9043,https://issues.webrtc.org/issues/42234216 +https://crbug.com/webrtc/9044,https://issues.webrtc.org/issues/42234217 +https://crbug.com/webrtc/9045,https://issues.webrtc.org/issues/42234218 +https://crbug.com/webrtc/9046,https://issues.webrtc.org/issues/42234219 +https://crbug.com/webrtc/9047,https://issues.webrtc.org/issues/42234220 +https://crbug.com/webrtc/9048,https://issues.webrtc.org/issues/42234221 +https://crbug.com/webrtc/9049,https://issues.webrtc.org/issues/42234222 +https://crbug.com/webrtc/905,https://issues.webrtc.org/issues/42234223 +https://crbug.com/webrtc/9050,https://issues.webrtc.org/issues/42234224 +https://crbug.com/webrtc/9051,https://issues.webrtc.org/issues/42234225 +https://crbug.com/webrtc/9052,https://issues.webrtc.org/issues/42234226 +https://crbug.com/webrtc/9053,https://issues.webrtc.org/issues/42234227 +https://crbug.com/webrtc/9054,https://issues.webrtc.org/issues/42234228 +https://crbug.com/webrtc/9055,https://issues.webrtc.org/issues/42234229 +https://crbug.com/webrtc/9056,https://issues.webrtc.org/issues/42234230 +https://crbug.com/webrtc/9057,https://issues.webrtc.org/issues/42234231 +https://crbug.com/webrtc/9058,https://issues.webrtc.org/issues/42234232 +https://crbug.com/webrtc/9059,https://issues.webrtc.org/issues/42234233 +https://crbug.com/webrtc/906,https://issues.webrtc.org/issues/42234234 +https://crbug.com/webrtc/9060,https://issues.webrtc.org/issues/42234235 +https://crbug.com/webrtc/9061,https://issues.webrtc.org/issues/42234236 +https://crbug.com/webrtc/9062,https://issues.webrtc.org/issues/42234237 +https://crbug.com/webrtc/9063,https://issues.webrtc.org/issues/42234238 +https://crbug.com/webrtc/9064,https://issues.webrtc.org/issues/42234239 +https://crbug.com/webrtc/9065,https://issues.webrtc.org/issues/42234240 +https://crbug.com/webrtc/9066,https://issues.webrtc.org/issues/42234241 +https://crbug.com/webrtc/9067,https://issues.webrtc.org/issues/42234242 +https://crbug.com/webrtc/9068,https://issues.webrtc.org/issues/42234243 +https://crbug.com/webrtc/9069,https://issues.webrtc.org/issues/42234244 +https://crbug.com/webrtc/907,https://issues.webrtc.org/issues/42234245 +https://crbug.com/webrtc/9070,https://issues.webrtc.org/issues/42234246 +https://crbug.com/webrtc/9072,https://issues.webrtc.org/issues/42234247 +https://crbug.com/webrtc/9073,https://issues.webrtc.org/issues/42234248 +https://crbug.com/webrtc/9074,https://issues.webrtc.org/issues/42234249 +https://crbug.com/webrtc/9075,https://issues.webrtc.org/issues/42234250 +https://crbug.com/webrtc/9076,https://issues.webrtc.org/issues/42234251 +https://crbug.com/webrtc/9077,https://issues.webrtc.org/issues/42234252 +https://crbug.com/webrtc/9078,https://issues.webrtc.org/issues/42234253 +https://crbug.com/webrtc/9079,https://issues.webrtc.org/issues/42234254 +https://crbug.com/webrtc/908,https://issues.webrtc.org/issues/42234255 +https://crbug.com/webrtc/9080,https://issues.webrtc.org/issues/42234256 +https://crbug.com/webrtc/9081,https://issues.webrtc.org/issues/42234257 +https://crbug.com/webrtc/9082,https://issues.webrtc.org/issues/42234258 +https://crbug.com/webrtc/9083,https://issues.webrtc.org/issues/42234259 +https://crbug.com/webrtc/9084,https://issues.webrtc.org/issues/42234260 +https://crbug.com/webrtc/9085,https://issues.webrtc.org/issues/42234261 +https://crbug.com/webrtc/9086,https://issues.webrtc.org/issues/42234262 +https://crbug.com/webrtc/9087,https://issues.webrtc.org/issues/42234263 +https://crbug.com/webrtc/9088,https://issues.webrtc.org/issues/42234264 +https://crbug.com/webrtc/9089,https://issues.webrtc.org/issues/42234265 +https://crbug.com/webrtc/909,https://issues.webrtc.org/issues/42234266 +https://crbug.com/webrtc/9090,https://issues.webrtc.org/issues/42234267 +https://crbug.com/webrtc/9091,https://issues.webrtc.org/issues/42234268 +https://crbug.com/webrtc/9092,https://issues.webrtc.org/issues/42234269 +https://crbug.com/webrtc/9094,https://issues.webrtc.org/issues/42234270 +https://crbug.com/webrtc/9095,https://issues.webrtc.org/issues/42234271 +https://crbug.com/webrtc/9097,https://issues.webrtc.org/issues/42234272 +https://crbug.com/webrtc/9098,https://issues.webrtc.org/issues/42234273 +https://crbug.com/webrtc/9099,https://issues.webrtc.org/issues/42234274 +https://crbug.com/webrtc/91,https://issues.webrtc.org/issues/42234275 +https://crbug.com/webrtc/910,https://issues.webrtc.org/issues/42234276 +https://crbug.com/webrtc/9100,https://issues.webrtc.org/issues/42234277 +https://crbug.com/webrtc/9101,https://issues.webrtc.org/issues/42234278 +https://crbug.com/webrtc/9102,https://issues.webrtc.org/issues/42234279 +https://crbug.com/webrtc/9103,https://issues.webrtc.org/issues/42234280 +https://crbug.com/webrtc/9104,https://issues.webrtc.org/issues/42234281 +https://crbug.com/webrtc/9105,https://issues.webrtc.org/issues/42234282 +https://crbug.com/webrtc/9106,https://issues.webrtc.org/issues/42234283 +https://crbug.com/webrtc/9107,https://issues.webrtc.org/issues/42234284 +https://crbug.com/webrtc/9108,https://issues.webrtc.org/issues/42234285 +https://crbug.com/webrtc/9109,https://issues.webrtc.org/issues/42234286 +https://crbug.com/webrtc/911,https://issues.webrtc.org/issues/42234287 +https://crbug.com/webrtc/9110,https://issues.webrtc.org/issues/42234288 +https://crbug.com/webrtc/9111,https://issues.webrtc.org/issues/42234289 +https://crbug.com/webrtc/9112,https://issues.webrtc.org/issues/42234290 +https://crbug.com/webrtc/9113,https://issues.webrtc.org/issues/42234291 +https://crbug.com/webrtc/9114,https://issues.webrtc.org/issues/42234292 +https://crbug.com/webrtc/9115,https://issues.webrtc.org/issues/42234293 +https://crbug.com/webrtc/9116,https://issues.webrtc.org/issues/42234294 +https://crbug.com/webrtc/9117,https://issues.webrtc.org/issues/42234295 +https://crbug.com/webrtc/9118,https://issues.webrtc.org/issues/42234296 +https://crbug.com/webrtc/9119,https://issues.webrtc.org/issues/42234297 +https://crbug.com/webrtc/9120,https://issues.webrtc.org/issues/42234298 +https://crbug.com/webrtc/9121,https://issues.webrtc.org/issues/42234299 +https://crbug.com/webrtc/9122,https://issues.webrtc.org/issues/42234300 +https://crbug.com/webrtc/9123,https://issues.webrtc.org/issues/42234301 +https://crbug.com/webrtc/9124,https://issues.webrtc.org/issues/42234302 +https://crbug.com/webrtc/9125,https://issues.webrtc.org/issues/42234303 +https://crbug.com/webrtc/9126,https://issues.webrtc.org/issues/42234304 +https://crbug.com/webrtc/9127,https://issues.webrtc.org/issues/42234305 +https://crbug.com/webrtc/9128,https://issues.webrtc.org/issues/42234306 +https://crbug.com/webrtc/9129,https://issues.webrtc.org/issues/42234307 +https://crbug.com/webrtc/913,https://issues.webrtc.org/issues/42234308 +https://crbug.com/webrtc/9130,https://issues.webrtc.org/issues/42234309 +https://crbug.com/webrtc/9131,https://issues.webrtc.org/issues/42234310 +https://crbug.com/webrtc/9132,https://issues.webrtc.org/issues/42234311 +https://crbug.com/webrtc/9133,https://issues.webrtc.org/issues/42234312 +https://crbug.com/webrtc/9134,https://issues.webrtc.org/issues/42234313 +https://crbug.com/webrtc/9135,https://issues.webrtc.org/issues/42234314 +https://crbug.com/webrtc/9136,https://issues.webrtc.org/issues/42234315 +https://crbug.com/webrtc/9137,https://issues.webrtc.org/issues/42234316 +https://crbug.com/webrtc/9138,https://issues.webrtc.org/issues/42234317 +https://crbug.com/webrtc/9139,https://issues.webrtc.org/issues/42234318 +https://crbug.com/webrtc/914,https://issues.webrtc.org/issues/42234319 +https://crbug.com/webrtc/9142,https://issues.webrtc.org/issues/42234320 +https://crbug.com/webrtc/9144,https://issues.webrtc.org/issues/42234321 +https://crbug.com/webrtc/9145,https://issues.webrtc.org/issues/42234322 +https://crbug.com/webrtc/9146,https://issues.webrtc.org/issues/42234323 +https://crbug.com/webrtc/9147,https://issues.webrtc.org/issues/42234324 +https://crbug.com/webrtc/9148,https://issues.webrtc.org/issues/42234325 +https://crbug.com/webrtc/9149,https://issues.webrtc.org/issues/42234326 +https://crbug.com/webrtc/915,https://issues.webrtc.org/issues/42234327 +https://crbug.com/webrtc/9150,https://issues.webrtc.org/issues/42234328 +https://crbug.com/webrtc/9151,https://issues.webrtc.org/issues/42234329 +https://crbug.com/webrtc/9152,https://issues.webrtc.org/issues/42234330 +https://crbug.com/webrtc/9153,https://issues.webrtc.org/issues/42234331 +https://crbug.com/webrtc/9154,https://issues.webrtc.org/issues/42234332 +https://crbug.com/webrtc/9155,https://issues.webrtc.org/issues/42234333 +https://crbug.com/webrtc/9156,https://issues.webrtc.org/issues/42234334 +https://crbug.com/webrtc/9157,https://issues.webrtc.org/issues/42234335 +https://crbug.com/webrtc/9158,https://issues.webrtc.org/issues/42234336 +https://crbug.com/webrtc/9159,https://issues.webrtc.org/issues/42234337 +https://crbug.com/webrtc/916,https://issues.webrtc.org/issues/42234338 +https://crbug.com/webrtc/9160,https://issues.webrtc.org/issues/42234339 +https://crbug.com/webrtc/9161,https://issues.webrtc.org/issues/42234340 +https://crbug.com/webrtc/9162,https://issues.webrtc.org/issues/42234341 +https://crbug.com/webrtc/9163,https://issues.webrtc.org/issues/42234342 +https://crbug.com/webrtc/9164,https://issues.webrtc.org/issues/42234343 +https://crbug.com/webrtc/9165,https://issues.webrtc.org/issues/42234344 +https://crbug.com/webrtc/9166,https://issues.webrtc.org/issues/42234345 +https://crbug.com/webrtc/9167,https://issues.webrtc.org/issues/42234346 +https://crbug.com/webrtc/9168,https://issues.webrtc.org/issues/42234347 +https://crbug.com/webrtc/9169,https://issues.webrtc.org/issues/42234348 +https://crbug.com/webrtc/917,https://issues.webrtc.org/issues/42234349 +https://crbug.com/webrtc/9170,https://issues.webrtc.org/issues/42234350 +https://crbug.com/webrtc/9171,https://issues.webrtc.org/issues/42234351 +https://crbug.com/webrtc/9172,https://issues.webrtc.org/issues/42234352 +https://crbug.com/webrtc/9173,https://issues.webrtc.org/issues/42234353 +https://crbug.com/webrtc/9174,https://issues.webrtc.org/issues/42234354 +https://crbug.com/webrtc/9175,https://issues.webrtc.org/issues/42234355 +https://crbug.com/webrtc/9176,https://issues.webrtc.org/issues/42234356 +https://crbug.com/webrtc/9177,https://issues.webrtc.org/issues/42234357 +https://crbug.com/webrtc/9178,https://issues.webrtc.org/issues/42234358 +https://crbug.com/webrtc/9179,https://issues.webrtc.org/issues/42234359 +https://crbug.com/webrtc/918,https://issues.webrtc.org/issues/42234360 +https://crbug.com/webrtc/9180,https://issues.webrtc.org/issues/42234361 +https://crbug.com/webrtc/9181,https://issues.webrtc.org/issues/42234362 +https://crbug.com/webrtc/9182,https://issues.webrtc.org/issues/42234363 +https://crbug.com/webrtc/9183,https://issues.webrtc.org/issues/42234364 +https://crbug.com/webrtc/9184,https://issues.webrtc.org/issues/42234365 +https://crbug.com/webrtc/9185,https://issues.webrtc.org/issues/42234366 +https://crbug.com/webrtc/9186,https://issues.webrtc.org/issues/42234367 +https://crbug.com/webrtc/9187,https://issues.webrtc.org/issues/42234368 +https://crbug.com/webrtc/9188,https://issues.webrtc.org/issues/42234369 +https://crbug.com/webrtc/9189,https://issues.webrtc.org/issues/42234370 +https://crbug.com/webrtc/919,https://issues.webrtc.org/issues/42234371 +https://crbug.com/webrtc/9190,https://issues.webrtc.org/issues/42234372 +https://crbug.com/webrtc/9191,https://issues.webrtc.org/issues/42234373 +https://crbug.com/webrtc/9192,https://issues.webrtc.org/issues/42234374 +https://crbug.com/webrtc/9193,https://issues.webrtc.org/issues/42234375 +https://crbug.com/webrtc/9194,https://issues.webrtc.org/issues/42234376 +https://crbug.com/webrtc/9195,https://issues.webrtc.org/issues/42234377 +https://crbug.com/webrtc/9196,https://issues.webrtc.org/issues/42234378 +https://crbug.com/webrtc/9197,https://issues.webrtc.org/issues/42234379 +https://crbug.com/webrtc/9198,https://issues.webrtc.org/issues/42234380 +https://crbug.com/webrtc/9199,https://issues.webrtc.org/issues/42234381 +https://crbug.com/webrtc/92,https://issues.webrtc.org/issues/42234382 +https://crbug.com/webrtc/920,https://issues.webrtc.org/issues/42234383 +https://crbug.com/webrtc/9200,https://issues.webrtc.org/issues/42234384 +https://crbug.com/webrtc/9202,https://issues.webrtc.org/issues/42234385 +https://crbug.com/webrtc/9203,https://issues.webrtc.org/issues/42234386 +https://crbug.com/webrtc/9204,https://issues.webrtc.org/issues/42234387 +https://crbug.com/webrtc/9206,https://issues.webrtc.org/issues/42234388 +https://crbug.com/webrtc/9207,https://issues.webrtc.org/issues/42234389 +https://crbug.com/webrtc/9208,https://issues.webrtc.org/issues/42234390 +https://crbug.com/webrtc/921,https://issues.webrtc.org/issues/42234391 +https://crbug.com/webrtc/9210,https://issues.webrtc.org/issues/42234392 +https://crbug.com/webrtc/9211,https://issues.webrtc.org/issues/42234393 +https://crbug.com/webrtc/9212,https://issues.webrtc.org/issues/42234394 +https://crbug.com/webrtc/9213,https://issues.webrtc.org/issues/42234395 +https://crbug.com/webrtc/9214,https://issues.webrtc.org/issues/42234396 +https://crbug.com/webrtc/9215,https://issues.webrtc.org/issues/42234397 +https://crbug.com/webrtc/9216,https://issues.webrtc.org/issues/42234398 +https://crbug.com/webrtc/9217,https://issues.webrtc.org/issues/42234399 +https://crbug.com/webrtc/9219,https://issues.webrtc.org/issues/42234400 +https://crbug.com/webrtc/922,https://issues.webrtc.org/issues/42234401 +https://crbug.com/webrtc/9220,https://issues.webrtc.org/issues/42234402 +https://crbug.com/webrtc/9221,https://issues.webrtc.org/issues/42234403 +https://crbug.com/webrtc/9222,https://issues.webrtc.org/issues/42234404 +https://crbug.com/webrtc/9223,https://issues.webrtc.org/issues/42234405 +https://crbug.com/webrtc/9225,https://issues.webrtc.org/issues/42234406 +https://crbug.com/webrtc/9226,https://issues.webrtc.org/issues/42234407 +https://crbug.com/webrtc/9227,https://issues.webrtc.org/issues/42234408 +https://crbug.com/webrtc/9228,https://issues.webrtc.org/issues/42234409 +https://crbug.com/webrtc/9229,https://issues.webrtc.org/issues/42234410 +https://crbug.com/webrtc/923,https://issues.webrtc.org/issues/42234411 +https://crbug.com/webrtc/9230,https://issues.webrtc.org/issues/42234412 +https://crbug.com/webrtc/9231,https://issues.webrtc.org/issues/42234413 +https://crbug.com/webrtc/9232,https://issues.webrtc.org/issues/42234414 +https://crbug.com/webrtc/9235,https://issues.webrtc.org/issues/42234415 +https://crbug.com/webrtc/9236,https://issues.webrtc.org/issues/42234416 +https://crbug.com/webrtc/9237,https://issues.webrtc.org/issues/42234417 +https://crbug.com/webrtc/9238,https://issues.webrtc.org/issues/42234418 +https://crbug.com/webrtc/9239,https://issues.webrtc.org/issues/42234419 +https://crbug.com/webrtc/924,https://issues.webrtc.org/issues/42234420 +https://crbug.com/webrtc/9240,https://issues.webrtc.org/issues/42234421 +https://crbug.com/webrtc/9241,https://issues.webrtc.org/issues/42234422 +https://crbug.com/webrtc/9242,https://issues.webrtc.org/issues/42234423 +https://crbug.com/webrtc/9243,https://issues.webrtc.org/issues/42234424 +https://crbug.com/webrtc/9244,https://issues.webrtc.org/issues/42234425 +https://crbug.com/webrtc/9245,https://issues.webrtc.org/issues/42234426 +https://crbug.com/webrtc/9246,https://issues.webrtc.org/issues/42234427 +https://crbug.com/webrtc/9247,https://issues.webrtc.org/issues/42234428 +https://crbug.com/webrtc/9248,https://issues.webrtc.org/issues/42234429 +https://crbug.com/webrtc/9249,https://issues.webrtc.org/issues/42234430 +https://crbug.com/webrtc/925,https://issues.webrtc.org/issues/42234431 +https://crbug.com/webrtc/9250,https://issues.webrtc.org/issues/42234432 +https://crbug.com/webrtc/9251,https://issues.webrtc.org/issues/42234433 +https://crbug.com/webrtc/9252,https://issues.webrtc.org/issues/42234434 +https://crbug.com/webrtc/9253,https://issues.webrtc.org/issues/42234435 +https://crbug.com/webrtc/9254,https://issues.webrtc.org/issues/42234436 +https://crbug.com/webrtc/9255,https://issues.webrtc.org/issues/42234437 +https://crbug.com/webrtc/9256,https://issues.webrtc.org/issues/42234438 +https://crbug.com/webrtc/9257,https://issues.webrtc.org/issues/42234439 +https://crbug.com/webrtc/9258,https://issues.webrtc.org/issues/42234440 +https://crbug.com/webrtc/9259,https://issues.webrtc.org/issues/42234441 +https://crbug.com/webrtc/926,https://issues.webrtc.org/issues/42234442 +https://crbug.com/webrtc/9260,https://issues.webrtc.org/issues/42234443 +https://crbug.com/webrtc/9261,https://issues.webrtc.org/issues/42234444 +https://crbug.com/webrtc/9262,https://issues.webrtc.org/issues/42234445 +https://crbug.com/webrtc/9263,https://issues.webrtc.org/issues/42234446 +https://crbug.com/webrtc/9264,https://issues.webrtc.org/issues/42234447 +https://crbug.com/webrtc/9265,https://issues.webrtc.org/issues/42234448 +https://crbug.com/webrtc/9266,https://issues.webrtc.org/issues/42234449 +https://crbug.com/webrtc/9267,https://issues.webrtc.org/issues/42234450 +https://crbug.com/webrtc/9268,https://issues.webrtc.org/issues/42234451 +https://crbug.com/webrtc/9269,https://issues.webrtc.org/issues/42234452 +https://crbug.com/webrtc/927,https://issues.webrtc.org/issues/42234453 +https://crbug.com/webrtc/9270,https://issues.webrtc.org/issues/42234454 +https://crbug.com/webrtc/9271,https://issues.webrtc.org/issues/42234455 +https://crbug.com/webrtc/9272,https://issues.webrtc.org/issues/42234456 +https://crbug.com/webrtc/9274,https://issues.webrtc.org/issues/42234457 +https://crbug.com/webrtc/9275,https://issues.webrtc.org/issues/42234458 +https://crbug.com/webrtc/9276,https://issues.webrtc.org/issues/42234459 +https://crbug.com/webrtc/9277,https://issues.webrtc.org/issues/42234460 +https://crbug.com/webrtc/9278,https://issues.webrtc.org/issues/42234461 +https://crbug.com/webrtc/9279,https://issues.webrtc.org/issues/42234462 +https://crbug.com/webrtc/928,https://issues.webrtc.org/issues/42234463 +https://crbug.com/webrtc/9280,https://issues.webrtc.org/issues/42234464 +https://crbug.com/webrtc/9281,https://issues.webrtc.org/issues/42234465 +https://crbug.com/webrtc/9282,https://issues.webrtc.org/issues/42234466 +https://crbug.com/webrtc/9283,https://issues.webrtc.org/issues/42234467 +https://crbug.com/webrtc/9284,https://issues.webrtc.org/issues/42234468 +https://crbug.com/webrtc/9285,https://issues.webrtc.org/issues/42234469 +https://crbug.com/webrtc/9286,https://issues.webrtc.org/issues/42234470 +https://crbug.com/webrtc/9287,https://issues.webrtc.org/issues/42234471 +https://crbug.com/webrtc/9288,https://issues.webrtc.org/issues/42234472 +https://crbug.com/webrtc/9289,https://issues.webrtc.org/issues/42234473 +https://crbug.com/webrtc/929,https://issues.webrtc.org/issues/42234474 +https://crbug.com/webrtc/9290,https://issues.webrtc.org/issues/42234475 +https://crbug.com/webrtc/9291,https://issues.webrtc.org/issues/42234476 +https://crbug.com/webrtc/9292,https://issues.webrtc.org/issues/42234477 +https://crbug.com/webrtc/9293,https://issues.webrtc.org/issues/42234478 +https://crbug.com/webrtc/9294,https://issues.webrtc.org/issues/42234479 +https://crbug.com/webrtc/9295,https://issues.webrtc.org/issues/42234480 +https://crbug.com/webrtc/9296,https://issues.webrtc.org/issues/42234481 +https://crbug.com/webrtc/9297,https://issues.webrtc.org/issues/42234482 +https://crbug.com/webrtc/9298,https://issues.webrtc.org/issues/42234483 +https://crbug.com/webrtc/9299,https://issues.webrtc.org/issues/42234484 +https://crbug.com/webrtc/93,https://issues.webrtc.org/issues/42234485 +https://crbug.com/webrtc/930,https://issues.webrtc.org/issues/42234486 +https://crbug.com/webrtc/9300,https://issues.webrtc.org/issues/42234487 +https://crbug.com/webrtc/9301,https://issues.webrtc.org/issues/42234488 +https://crbug.com/webrtc/9302,https://issues.webrtc.org/issues/42234489 +https://crbug.com/webrtc/9303,https://issues.webrtc.org/issues/42234490 +https://crbug.com/webrtc/9304,https://issues.webrtc.org/issues/42234491 +https://crbug.com/webrtc/9305,https://issues.webrtc.org/issues/42234492 +https://crbug.com/webrtc/9306,https://issues.webrtc.org/issues/42234493 +https://crbug.com/webrtc/9307,https://issues.webrtc.org/issues/42234494 +https://crbug.com/webrtc/9308,https://issues.webrtc.org/issues/42234495 +https://crbug.com/webrtc/9309,https://issues.webrtc.org/issues/42234496 +https://crbug.com/webrtc/931,https://issues.webrtc.org/issues/42234497 +https://crbug.com/webrtc/9310,https://issues.webrtc.org/issues/42234498 +https://crbug.com/webrtc/9311,https://issues.webrtc.org/issues/42234499 +https://crbug.com/webrtc/9312,https://issues.webrtc.org/issues/42234500 +https://crbug.com/webrtc/9313,https://issues.webrtc.org/issues/42234501 +https://crbug.com/webrtc/9314,https://issues.webrtc.org/issues/42234502 +https://crbug.com/webrtc/9315,https://issues.webrtc.org/issues/42234503 +https://crbug.com/webrtc/9316,https://issues.webrtc.org/issues/42234504 +https://crbug.com/webrtc/9317,https://issues.webrtc.org/issues/42234505 +https://crbug.com/webrtc/9318,https://issues.webrtc.org/issues/42234506 +https://crbug.com/webrtc/9319,https://issues.webrtc.org/issues/42234507 +https://crbug.com/webrtc/932,https://issues.webrtc.org/issues/42234508 +https://crbug.com/webrtc/9320,https://issues.webrtc.org/issues/42234509 +https://crbug.com/webrtc/9321,https://issues.webrtc.org/issues/42234510 +https://crbug.com/webrtc/9322,https://issues.webrtc.org/issues/42234511 +https://crbug.com/webrtc/9323,https://issues.webrtc.org/issues/42234512 +https://crbug.com/webrtc/9324,https://issues.webrtc.org/issues/42234513 +https://crbug.com/webrtc/9325,https://issues.webrtc.org/issues/42234514 +https://crbug.com/webrtc/9326,https://issues.webrtc.org/issues/42234515 +https://crbug.com/webrtc/9327,https://issues.webrtc.org/issues/42234516 +https://crbug.com/webrtc/9328,https://issues.webrtc.org/issues/42234517 +https://crbug.com/webrtc/9329,https://issues.webrtc.org/issues/42234518 +https://crbug.com/webrtc/933,https://issues.webrtc.org/issues/42234519 +https://crbug.com/webrtc/9330,https://issues.webrtc.org/issues/42234520 +https://crbug.com/webrtc/9331,https://issues.webrtc.org/issues/42234521 +https://crbug.com/webrtc/9332,https://issues.webrtc.org/issues/42234522 +https://crbug.com/webrtc/9333,https://issues.webrtc.org/issues/42234523 +https://crbug.com/webrtc/9334,https://issues.webrtc.org/issues/42234524 +https://crbug.com/webrtc/9335,https://issues.webrtc.org/issues/42234525 +https://crbug.com/webrtc/9336,https://issues.webrtc.org/issues/42234526 +https://crbug.com/webrtc/9337,https://issues.webrtc.org/issues/42234527 +https://crbug.com/webrtc/9338,https://issues.webrtc.org/issues/42234528 +https://crbug.com/webrtc/9339,https://issues.webrtc.org/issues/42234529 +https://crbug.com/webrtc/934,https://issues.webrtc.org/issues/42234530 +https://crbug.com/webrtc/9340,https://issues.webrtc.org/issues/42234531 +https://crbug.com/webrtc/9341,https://issues.webrtc.org/issues/42234532 +https://crbug.com/webrtc/9342,https://issues.webrtc.org/issues/42234533 +https://crbug.com/webrtc/9343,https://issues.webrtc.org/issues/42234534 +https://crbug.com/webrtc/9344,https://issues.webrtc.org/issues/42234535 +https://crbug.com/webrtc/9345,https://issues.webrtc.org/issues/42234536 +https://crbug.com/webrtc/9346,https://issues.webrtc.org/issues/42234537 +https://crbug.com/webrtc/9347,https://issues.webrtc.org/issues/42234538 +https://crbug.com/webrtc/9348,https://issues.webrtc.org/issues/42234539 +https://crbug.com/webrtc/9349,https://issues.webrtc.org/issues/42234540 +https://crbug.com/webrtc/935,https://issues.webrtc.org/issues/42234541 +https://crbug.com/webrtc/9350,https://issues.webrtc.org/issues/42234542 +https://crbug.com/webrtc/9351,https://issues.webrtc.org/issues/42234543 +https://crbug.com/webrtc/9352,https://issues.webrtc.org/issues/42234544 +https://crbug.com/webrtc/9353,https://issues.webrtc.org/issues/42234545 +https://crbug.com/webrtc/9354,https://issues.webrtc.org/issues/42234546 +https://crbug.com/webrtc/9355,https://issues.webrtc.org/issues/42234547 +https://crbug.com/webrtc/9357,https://issues.webrtc.org/issues/42234548 +https://crbug.com/webrtc/9358,https://issues.webrtc.org/issues/42234549 +https://crbug.com/webrtc/9359,https://issues.webrtc.org/issues/42234550 +https://crbug.com/webrtc/936,https://issues.webrtc.org/issues/42234551 +https://crbug.com/webrtc/9360,https://issues.webrtc.org/issues/42234552 +https://crbug.com/webrtc/9361,https://issues.webrtc.org/issues/42234553 +https://crbug.com/webrtc/9362,https://issues.webrtc.org/issues/42234554 +https://crbug.com/webrtc/9363,https://issues.webrtc.org/issues/42234555 +https://crbug.com/webrtc/9364,https://issues.webrtc.org/issues/42234556 +https://crbug.com/webrtc/9365,https://issues.webrtc.org/issues/42234557 +https://crbug.com/webrtc/9366,https://issues.webrtc.org/issues/42234558 +https://crbug.com/webrtc/9367,https://issues.webrtc.org/issues/42234559 +https://crbug.com/webrtc/9368,https://issues.webrtc.org/issues/42234560 +https://crbug.com/webrtc/937,https://issues.webrtc.org/issues/42234561 +https://crbug.com/webrtc/9370,https://issues.webrtc.org/issues/42234562 +https://crbug.com/webrtc/9371,https://issues.webrtc.org/issues/42234563 +https://crbug.com/webrtc/9372,https://issues.webrtc.org/issues/42234564 +https://crbug.com/webrtc/9373,https://issues.webrtc.org/issues/42234565 +https://crbug.com/webrtc/9374,https://issues.webrtc.org/issues/42234566 +https://crbug.com/webrtc/9375,https://issues.webrtc.org/issues/42234567 +https://crbug.com/webrtc/9376,https://issues.webrtc.org/issues/42234568 +https://crbug.com/webrtc/9377,https://issues.webrtc.org/issues/42234569 +https://crbug.com/webrtc/9378,https://issues.webrtc.org/issues/42234570 +https://crbug.com/webrtc/9379,https://issues.webrtc.org/issues/42234571 +https://crbug.com/webrtc/938,https://issues.webrtc.org/issues/42234572 +https://crbug.com/webrtc/9380,https://issues.webrtc.org/issues/42234573 +https://crbug.com/webrtc/9381,https://issues.webrtc.org/issues/42234574 +https://crbug.com/webrtc/9382,https://issues.webrtc.org/issues/42234575 +https://crbug.com/webrtc/9383,https://issues.webrtc.org/issues/42234576 +https://crbug.com/webrtc/9384,https://issues.webrtc.org/issues/42234577 +https://crbug.com/webrtc/9385,https://issues.webrtc.org/issues/42234578 +https://crbug.com/webrtc/9386,https://issues.webrtc.org/issues/42234579 +https://crbug.com/webrtc/9387,https://issues.webrtc.org/issues/42234580 +https://crbug.com/webrtc/9388,https://issues.webrtc.org/issues/42234581 +https://crbug.com/webrtc/9389,https://issues.webrtc.org/issues/42234582 +https://crbug.com/webrtc/939,https://issues.webrtc.org/issues/42234583 +https://crbug.com/webrtc/9390,https://issues.webrtc.org/issues/42234584 +https://crbug.com/webrtc/9391,https://issues.webrtc.org/issues/42234585 +https://crbug.com/webrtc/9392,https://issues.webrtc.org/issues/42234586 +https://crbug.com/webrtc/9393,https://issues.webrtc.org/issues/42234587 +https://crbug.com/webrtc/9395,https://issues.webrtc.org/issues/42234588 +https://crbug.com/webrtc/9396,https://issues.webrtc.org/issues/42234589 +https://crbug.com/webrtc/9397,https://issues.webrtc.org/issues/42234590 +https://crbug.com/webrtc/9398,https://issues.webrtc.org/issues/42234591 +https://crbug.com/webrtc/9399,https://issues.webrtc.org/issues/42234592 +https://crbug.com/webrtc/94,https://issues.webrtc.org/issues/42234593 +https://crbug.com/webrtc/940,https://issues.webrtc.org/issues/42234594 +https://crbug.com/webrtc/9400,https://issues.webrtc.org/issues/42234595 +https://crbug.com/webrtc/9403,https://issues.webrtc.org/issues/42234596 +https://crbug.com/webrtc/9404,https://issues.webrtc.org/issues/42234597 +https://crbug.com/webrtc/9405,https://issues.webrtc.org/issues/42234598 +https://crbug.com/webrtc/9406,https://issues.webrtc.org/issues/42234599 +https://crbug.com/webrtc/9407,https://issues.webrtc.org/issues/42234600 +https://crbug.com/webrtc/9408,https://issues.webrtc.org/issues/42234601 +https://crbug.com/webrtc/9409,https://issues.webrtc.org/issues/42234602 +https://crbug.com/webrtc/941,https://issues.webrtc.org/issues/42234603 +https://crbug.com/webrtc/9411,https://issues.webrtc.org/issues/42234604 +https://crbug.com/webrtc/9412,https://issues.webrtc.org/issues/42234605 +https://crbug.com/webrtc/9413,https://issues.webrtc.org/issues/42234606 +https://crbug.com/webrtc/9414,https://issues.webrtc.org/issues/42234607 +https://crbug.com/webrtc/9415,https://issues.webrtc.org/issues/42234608 +https://crbug.com/webrtc/9416,https://issues.webrtc.org/issues/42234609 +https://crbug.com/webrtc/9417,https://issues.webrtc.org/issues/42234610 +https://crbug.com/webrtc/9418,https://issues.webrtc.org/issues/42234611 +https://crbug.com/webrtc/942,https://issues.webrtc.org/issues/42234612 +https://crbug.com/webrtc/9420,https://issues.webrtc.org/issues/42234613 +https://crbug.com/webrtc/9421,https://issues.webrtc.org/issues/42234614 +https://crbug.com/webrtc/9422,https://issues.webrtc.org/issues/42234615 +https://crbug.com/webrtc/9423,https://issues.webrtc.org/issues/42234616 +https://crbug.com/webrtc/9424,https://issues.webrtc.org/issues/42234617 +https://crbug.com/webrtc/9425,https://issues.webrtc.org/issues/42234618 +https://crbug.com/webrtc/9426,https://issues.webrtc.org/issues/42234619 +https://crbug.com/webrtc/9427,https://issues.webrtc.org/issues/42234620 +https://crbug.com/webrtc/9428,https://issues.webrtc.org/issues/42234621 +https://crbug.com/webrtc/9429,https://issues.webrtc.org/issues/42234622 +https://crbug.com/webrtc/943,https://issues.webrtc.org/issues/42234623 +https://crbug.com/webrtc/9430,https://issues.webrtc.org/issues/42234624 +https://crbug.com/webrtc/9431,https://issues.webrtc.org/issues/42234625 +https://crbug.com/webrtc/9432,https://issues.webrtc.org/issues/42234626 +https://crbug.com/webrtc/9433,https://issues.webrtc.org/issues/42234627 +https://crbug.com/webrtc/9434,https://issues.webrtc.org/issues/42234628 +https://crbug.com/webrtc/9435,https://issues.webrtc.org/issues/42234629 +https://crbug.com/webrtc/9437,https://issues.webrtc.org/issues/42234630 +https://crbug.com/webrtc/9438,https://issues.webrtc.org/issues/42234631 +https://crbug.com/webrtc/9439,https://issues.webrtc.org/issues/42234632 +https://crbug.com/webrtc/944,https://issues.webrtc.org/issues/42234633 +https://crbug.com/webrtc/9440,https://issues.webrtc.org/issues/42234634 +https://crbug.com/webrtc/9441,https://issues.webrtc.org/issues/42234635 +https://crbug.com/webrtc/9443,https://issues.webrtc.org/issues/42234636 +https://crbug.com/webrtc/9444,https://issues.webrtc.org/issues/42234637 +https://crbug.com/webrtc/9445,https://issues.webrtc.org/issues/42234638 +https://crbug.com/webrtc/9446,https://issues.webrtc.org/issues/42234639 +https://crbug.com/webrtc/9447,https://issues.webrtc.org/issues/42234640 +https://crbug.com/webrtc/9448,https://issues.webrtc.org/issues/42234641 +https://crbug.com/webrtc/9449,https://issues.webrtc.org/issues/42234642 +https://crbug.com/webrtc/945,https://issues.webrtc.org/issues/42234643 +https://crbug.com/webrtc/9450,https://issues.webrtc.org/issues/42234644 +https://crbug.com/webrtc/9451,https://issues.webrtc.org/issues/42234645 +https://crbug.com/webrtc/9452,https://issues.webrtc.org/issues/42234646 +https://crbug.com/webrtc/9453,https://issues.webrtc.org/issues/42234647 +https://crbug.com/webrtc/9454,https://issues.webrtc.org/issues/42234648 +https://crbug.com/webrtc/9455,https://issues.webrtc.org/issues/42234649 +https://crbug.com/webrtc/9456,https://issues.webrtc.org/issues/42234650 +https://crbug.com/webrtc/9457,https://issues.webrtc.org/issues/42234651 +https://crbug.com/webrtc/9458,https://issues.webrtc.org/issues/42234652 +https://crbug.com/webrtc/946,https://issues.webrtc.org/issues/42234653 +https://crbug.com/webrtc/9460,https://issues.webrtc.org/issues/42234654 +https://crbug.com/webrtc/9461,https://issues.webrtc.org/issues/42234655 +https://crbug.com/webrtc/9462,https://issues.webrtc.org/issues/42234656 +https://crbug.com/webrtc/9463,https://issues.webrtc.org/issues/42234657 +https://crbug.com/webrtc/9464,https://issues.webrtc.org/issues/42234658 +https://crbug.com/webrtc/9465,https://issues.webrtc.org/issues/42234659 +https://crbug.com/webrtc/9466,https://issues.webrtc.org/issues/42234660 +https://crbug.com/webrtc/9467,https://issues.webrtc.org/issues/42234661 +https://crbug.com/webrtc/9468,https://issues.webrtc.org/issues/42234662 +https://crbug.com/webrtc/9469,https://issues.webrtc.org/issues/42234663 +https://crbug.com/webrtc/947,https://issues.webrtc.org/issues/42234664 +https://crbug.com/webrtc/9471,https://issues.webrtc.org/issues/42234665 +https://crbug.com/webrtc/9472,https://issues.webrtc.org/issues/42234666 +https://crbug.com/webrtc/9473,https://issues.webrtc.org/issues/42234667 +https://crbug.com/webrtc/9474,https://issues.webrtc.org/issues/42234668 +https://crbug.com/webrtc/9475,https://issues.webrtc.org/issues/42234669 +https://crbug.com/webrtc/9477,https://issues.webrtc.org/issues/42234670 +https://crbug.com/webrtc/9478,https://issues.webrtc.org/issues/42234671 +https://crbug.com/webrtc/9479,https://issues.webrtc.org/issues/42234672 +https://crbug.com/webrtc/948,https://issues.webrtc.org/issues/42234673 +https://crbug.com/webrtc/9480,https://issues.webrtc.org/issues/42234674 +https://crbug.com/webrtc/9481,https://issues.webrtc.org/issues/42234675 +https://crbug.com/webrtc/9482,https://issues.webrtc.org/issues/42234676 +https://crbug.com/webrtc/9483,https://issues.webrtc.org/issues/42234677 +https://crbug.com/webrtc/9484,https://issues.webrtc.org/issues/42234678 +https://crbug.com/webrtc/9485,https://issues.webrtc.org/issues/42234679 +https://crbug.com/webrtc/9486,https://issues.webrtc.org/issues/42234680 +https://crbug.com/webrtc/9487,https://issues.webrtc.org/issues/42234681 +https://crbug.com/webrtc/9488,https://issues.webrtc.org/issues/42234682 +https://crbug.com/webrtc/9489,https://issues.webrtc.org/issues/42234683 +https://crbug.com/webrtc/949,https://issues.webrtc.org/issues/42234684 +https://crbug.com/webrtc/9490,https://issues.webrtc.org/issues/42234685 +https://crbug.com/webrtc/9491,https://issues.webrtc.org/issues/42234686 +https://crbug.com/webrtc/9492,https://issues.webrtc.org/issues/42234687 +https://crbug.com/webrtc/9493,https://issues.webrtc.org/issues/42234688 +https://crbug.com/webrtc/9494,https://issues.webrtc.org/issues/42234689 +https://crbug.com/webrtc/9495,https://issues.webrtc.org/issues/42234690 +https://crbug.com/webrtc/9496,https://issues.webrtc.org/issues/42234691 +https://crbug.com/webrtc/9497,https://issues.webrtc.org/issues/42234692 +https://crbug.com/webrtc/9498,https://issues.webrtc.org/issues/42234693 +https://crbug.com/webrtc/9499,https://issues.webrtc.org/issues/42234694 +https://crbug.com/webrtc/95,https://issues.webrtc.org/issues/42234695 +https://crbug.com/webrtc/950,https://issues.webrtc.org/issues/42234696 +https://crbug.com/webrtc/9500,https://issues.webrtc.org/issues/42234697 +https://crbug.com/webrtc/9502,https://issues.webrtc.org/issues/42234698 +https://crbug.com/webrtc/9503,https://issues.webrtc.org/issues/42234699 +https://crbug.com/webrtc/9504,https://issues.webrtc.org/issues/42234700 +https://crbug.com/webrtc/9505,https://issues.webrtc.org/issues/42234701 +https://crbug.com/webrtc/9506,https://issues.webrtc.org/issues/42234702 +https://crbug.com/webrtc/9507,https://issues.webrtc.org/issues/42234703 +https://crbug.com/webrtc/9508,https://issues.webrtc.org/issues/42234704 +https://crbug.com/webrtc/9509,https://issues.webrtc.org/issues/42234705 +https://crbug.com/webrtc/951,https://issues.webrtc.org/issues/42234706 +https://crbug.com/webrtc/9510,https://issues.webrtc.org/issues/42234707 +https://crbug.com/webrtc/9511,https://issues.webrtc.org/issues/42234708 +https://crbug.com/webrtc/9512,https://issues.webrtc.org/issues/42234709 +https://crbug.com/webrtc/9513,https://issues.webrtc.org/issues/42234710 +https://crbug.com/webrtc/9514,https://issues.webrtc.org/issues/42234711 +https://crbug.com/webrtc/9515,https://issues.webrtc.org/issues/42234712 +https://crbug.com/webrtc/9516,https://issues.webrtc.org/issues/42234713 +https://crbug.com/webrtc/9517,https://issues.webrtc.org/issues/42234714 +https://crbug.com/webrtc/9518,https://issues.webrtc.org/issues/42234715 +https://crbug.com/webrtc/9519,https://issues.webrtc.org/issues/42234716 +https://crbug.com/webrtc/952,https://issues.webrtc.org/issues/42234717 +https://crbug.com/webrtc/9520,https://issues.webrtc.org/issues/42234718 +https://crbug.com/webrtc/9521,https://issues.webrtc.org/issues/42234719 +https://crbug.com/webrtc/9523,https://issues.webrtc.org/issues/42234720 +https://crbug.com/webrtc/9524,https://issues.webrtc.org/issues/42234721 +https://crbug.com/webrtc/9525,https://issues.webrtc.org/issues/42234722 +https://crbug.com/webrtc/9526,https://issues.webrtc.org/issues/42234723 +https://crbug.com/webrtc/9527,https://issues.webrtc.org/issues/42234724 +https://crbug.com/webrtc/9528,https://issues.webrtc.org/issues/42234725 +https://crbug.com/webrtc/9529,https://issues.webrtc.org/issues/42234726 +https://crbug.com/webrtc/953,https://issues.webrtc.org/issues/42234727 +https://crbug.com/webrtc/9530,https://issues.webrtc.org/issues/42234728 +https://crbug.com/webrtc/9531,https://issues.webrtc.org/issues/42234729 +https://crbug.com/webrtc/9532,https://issues.webrtc.org/issues/42234730 +https://crbug.com/webrtc/9533,https://issues.webrtc.org/issues/42234731 +https://crbug.com/webrtc/9534,https://issues.webrtc.org/issues/42234732 +https://crbug.com/webrtc/9535,https://issues.webrtc.org/issues/42234733 +https://crbug.com/webrtc/9536,https://issues.webrtc.org/issues/42234734 +https://crbug.com/webrtc/9537,https://issues.webrtc.org/issues/42234735 +https://crbug.com/webrtc/9538,https://issues.webrtc.org/issues/42234736 +https://crbug.com/webrtc/9539,https://issues.webrtc.org/issues/42234737 +https://crbug.com/webrtc/954,https://issues.webrtc.org/issues/42234738 +https://crbug.com/webrtc/9541,https://issues.webrtc.org/issues/42234739 +https://crbug.com/webrtc/9543,https://issues.webrtc.org/issues/42234740 +https://crbug.com/webrtc/9544,https://issues.webrtc.org/issues/42234741 +https://crbug.com/webrtc/9546,https://issues.webrtc.org/issues/42234742 +https://crbug.com/webrtc/9548,https://issues.webrtc.org/issues/42234743 +https://crbug.com/webrtc/9549,https://issues.webrtc.org/issues/42234744 +https://crbug.com/webrtc/955,https://issues.webrtc.org/issues/42234745 +https://crbug.com/webrtc/9550,https://issues.webrtc.org/issues/42234746 +https://crbug.com/webrtc/9551,https://issues.webrtc.org/issues/42234747 +https://crbug.com/webrtc/9552,https://issues.webrtc.org/issues/42234748 +https://crbug.com/webrtc/9554,https://issues.webrtc.org/issues/42234749 +https://crbug.com/webrtc/9555,https://issues.webrtc.org/issues/42234750 +https://crbug.com/webrtc/9556,https://issues.webrtc.org/issues/42234751 +https://crbug.com/webrtc/9557,https://issues.webrtc.org/issues/42234752 +https://crbug.com/webrtc/9558,https://issues.webrtc.org/issues/42234753 +https://crbug.com/webrtc/9559,https://issues.webrtc.org/issues/42234754 +https://crbug.com/webrtc/956,https://issues.webrtc.org/issues/42234755 +https://crbug.com/webrtc/9560,https://issues.webrtc.org/issues/42234756 +https://crbug.com/webrtc/9561,https://issues.webrtc.org/issues/42234757 +https://crbug.com/webrtc/9562,https://issues.webrtc.org/issues/42234758 +https://crbug.com/webrtc/9563,https://issues.webrtc.org/issues/42234759 +https://crbug.com/webrtc/9564,https://issues.webrtc.org/issues/42234760 +https://crbug.com/webrtc/9565,https://issues.webrtc.org/issues/42234761 +https://crbug.com/webrtc/9566,https://issues.webrtc.org/issues/42234762 +https://crbug.com/webrtc/9567,https://issues.webrtc.org/issues/42234763 +https://crbug.com/webrtc/9568,https://issues.webrtc.org/issues/42234764 +https://crbug.com/webrtc/9569,https://issues.webrtc.org/issues/42234765 +https://crbug.com/webrtc/957,https://issues.webrtc.org/issues/42234766 +https://crbug.com/webrtc/9570,https://issues.webrtc.org/issues/42234767 +https://crbug.com/webrtc/9571,https://issues.webrtc.org/issues/42234768 +https://crbug.com/webrtc/9572,https://issues.webrtc.org/issues/42234769 +https://crbug.com/webrtc/9573,https://issues.webrtc.org/issues/42234770 +https://crbug.com/webrtc/9574,https://issues.webrtc.org/issues/42234771 +https://crbug.com/webrtc/9575,https://issues.webrtc.org/issues/42234772 +https://crbug.com/webrtc/9576,https://issues.webrtc.org/issues/42234773 +https://crbug.com/webrtc/9577,https://issues.webrtc.org/issues/42234774 +https://crbug.com/webrtc/9578,https://issues.webrtc.org/issues/42234775 +https://crbug.com/webrtc/9579,https://issues.webrtc.org/issues/42234776 +https://crbug.com/webrtc/958,https://issues.webrtc.org/issues/42234777 +https://crbug.com/webrtc/9580,https://issues.webrtc.org/issues/42234778 +https://crbug.com/webrtc/9581,https://issues.webrtc.org/issues/42234779 +https://crbug.com/webrtc/9582,https://issues.webrtc.org/issues/42234780 +https://crbug.com/webrtc/9583,https://issues.webrtc.org/issues/42234781 +https://crbug.com/webrtc/9584,https://issues.webrtc.org/issues/42234782 +https://crbug.com/webrtc/9585,https://issues.webrtc.org/issues/42234783 +https://crbug.com/webrtc/9586,https://issues.webrtc.org/issues/42234784 +https://crbug.com/webrtc/9587,https://issues.webrtc.org/issues/42234785 +https://crbug.com/webrtc/9588,https://issues.webrtc.org/issues/42234786 +https://crbug.com/webrtc/9589,https://issues.webrtc.org/issues/42234787 +https://crbug.com/webrtc/959,https://issues.webrtc.org/issues/42234788 +https://crbug.com/webrtc/9590,https://issues.webrtc.org/issues/42234789 +https://crbug.com/webrtc/9591,https://issues.webrtc.org/issues/42234790 +https://crbug.com/webrtc/9592,https://issues.webrtc.org/issues/42234791 +https://crbug.com/webrtc/9593,https://issues.webrtc.org/issues/42234792 +https://crbug.com/webrtc/9594,https://issues.webrtc.org/issues/42234793 +https://crbug.com/webrtc/9595,https://issues.webrtc.org/issues/42234794 +https://crbug.com/webrtc/9596,https://issues.webrtc.org/issues/42234795 +https://crbug.com/webrtc/9597,https://issues.webrtc.org/issues/42234796 +https://crbug.com/webrtc/9598,https://issues.webrtc.org/issues/42234797 +https://crbug.com/webrtc/9599,https://issues.webrtc.org/issues/42234798 +https://crbug.com/webrtc/96,https://issues.webrtc.org/issues/42234799 +https://crbug.com/webrtc/960,https://issues.webrtc.org/issues/42234800 +https://crbug.com/webrtc/9600,https://issues.webrtc.org/issues/42234801 +https://crbug.com/webrtc/9601,https://issues.webrtc.org/issues/42234802 +https://crbug.com/webrtc/9602,https://issues.webrtc.org/issues/42234803 +https://crbug.com/webrtc/9603,https://issues.webrtc.org/issues/42234804 +https://crbug.com/webrtc/9604,https://issues.webrtc.org/issues/42234805 +https://crbug.com/webrtc/9605,https://issues.webrtc.org/issues/42234806 +https://crbug.com/webrtc/9606,https://issues.webrtc.org/issues/42234807 +https://crbug.com/webrtc/9607,https://issues.webrtc.org/issues/42234808 +https://crbug.com/webrtc/9608,https://issues.webrtc.org/issues/42234809 +https://crbug.com/webrtc/9609,https://issues.webrtc.org/issues/42234810 +https://crbug.com/webrtc/961,https://issues.webrtc.org/issues/42234811 +https://crbug.com/webrtc/9610,https://issues.webrtc.org/issues/42234812 +https://crbug.com/webrtc/9611,https://issues.webrtc.org/issues/42234813 +https://crbug.com/webrtc/9612,https://issues.webrtc.org/issues/42234814 +https://crbug.com/webrtc/9613,https://issues.webrtc.org/issues/42234815 +https://crbug.com/webrtc/9614,https://issues.webrtc.org/issues/42234816 +https://crbug.com/webrtc/9615,https://issues.webrtc.org/issues/42234817 +https://crbug.com/webrtc/9616,https://issues.webrtc.org/issues/42234818 +https://crbug.com/webrtc/9617,https://issues.webrtc.org/issues/42234819 +https://crbug.com/webrtc/9618,https://issues.webrtc.org/issues/42234820 +https://crbug.com/webrtc/9619,https://issues.webrtc.org/issues/42234821 +https://crbug.com/webrtc/962,https://issues.webrtc.org/issues/42234822 +https://crbug.com/webrtc/9620,https://issues.webrtc.org/issues/42234823 +https://crbug.com/webrtc/9621,https://issues.webrtc.org/issues/42234824 +https://crbug.com/webrtc/9622,https://issues.webrtc.org/issues/42234825 +https://crbug.com/webrtc/9623,https://issues.webrtc.org/issues/42234826 +https://crbug.com/webrtc/9625,https://issues.webrtc.org/issues/42234827 +https://crbug.com/webrtc/9626,https://issues.webrtc.org/issues/42234828 +https://crbug.com/webrtc/9627,https://issues.webrtc.org/issues/42234829 +https://crbug.com/webrtc/9628,https://issues.webrtc.org/issues/42234830 +https://crbug.com/webrtc/9629,https://issues.webrtc.org/issues/42234831 +https://crbug.com/webrtc/963,https://issues.webrtc.org/issues/42234832 +https://crbug.com/webrtc/9630,https://issues.webrtc.org/issues/42234833 +https://crbug.com/webrtc/9631,https://issues.webrtc.org/issues/42234834 +https://crbug.com/webrtc/9632,https://issues.webrtc.org/issues/42234835 +https://crbug.com/webrtc/9633,https://issues.webrtc.org/issues/42234836 +https://crbug.com/webrtc/9634,https://issues.webrtc.org/issues/42234837 +https://crbug.com/webrtc/9635,https://issues.webrtc.org/issues/42234838 +https://crbug.com/webrtc/9636,https://issues.webrtc.org/issues/42234839 +https://crbug.com/webrtc/9637,https://issues.webrtc.org/issues/42234840 +https://crbug.com/webrtc/9638,https://issues.webrtc.org/issues/42234841 +https://crbug.com/webrtc/9639,https://issues.webrtc.org/issues/42234842 +https://crbug.com/webrtc/964,https://issues.webrtc.org/issues/42234843 +https://crbug.com/webrtc/9640,https://issues.webrtc.org/issues/42234844 +https://crbug.com/webrtc/9641,https://issues.webrtc.org/issues/42234845 +https://crbug.com/webrtc/9642,https://issues.webrtc.org/issues/42234846 +https://crbug.com/webrtc/9643,https://issues.webrtc.org/issues/42234847 +https://crbug.com/webrtc/9644,https://issues.webrtc.org/issues/42234848 +https://crbug.com/webrtc/9645,https://issues.webrtc.org/issues/42234849 +https://crbug.com/webrtc/9646,https://issues.webrtc.org/issues/42234850 +https://crbug.com/webrtc/9647,https://issues.webrtc.org/issues/42234851 +https://crbug.com/webrtc/9648,https://issues.webrtc.org/issues/42234852 +https://crbug.com/webrtc/9649,https://issues.webrtc.org/issues/42234853 +https://crbug.com/webrtc/965,https://issues.webrtc.org/issues/42234854 +https://crbug.com/webrtc/9650,https://issues.webrtc.org/issues/42234855 +https://crbug.com/webrtc/9651,https://issues.webrtc.org/issues/42234856 +https://crbug.com/webrtc/9652,https://issues.webrtc.org/issues/42234857 +https://crbug.com/webrtc/9653,https://issues.webrtc.org/issues/42234858 +https://crbug.com/webrtc/9654,https://issues.webrtc.org/issues/42234859 +https://crbug.com/webrtc/9655,https://issues.webrtc.org/issues/42234860 +https://crbug.com/webrtc/9656,https://issues.webrtc.org/issues/42234861 +https://crbug.com/webrtc/9657,https://issues.webrtc.org/issues/42234862 +https://crbug.com/webrtc/9658,https://issues.webrtc.org/issues/42234863 +https://crbug.com/webrtc/9659,https://issues.webrtc.org/issues/42234864 +https://crbug.com/webrtc/966,https://issues.webrtc.org/issues/42234865 +https://crbug.com/webrtc/9660,https://issues.webrtc.org/issues/42234866 +https://crbug.com/webrtc/9661,https://issues.webrtc.org/issues/42234867 +https://crbug.com/webrtc/9662,https://issues.webrtc.org/issues/42234868 +https://crbug.com/webrtc/9663,https://issues.webrtc.org/issues/42234869 +https://crbug.com/webrtc/9664,https://issues.webrtc.org/issues/42234870 +https://crbug.com/webrtc/9665,https://issues.webrtc.org/issues/42234871 +https://crbug.com/webrtc/9666,https://issues.webrtc.org/issues/42234872 +https://crbug.com/webrtc/9667,https://issues.webrtc.org/issues/42234873 +https://crbug.com/webrtc/9668,https://issues.webrtc.org/issues/42234874 +https://crbug.com/webrtc/9669,https://issues.webrtc.org/issues/42234875 +https://crbug.com/webrtc/967,https://issues.webrtc.org/issues/42234876 +https://crbug.com/webrtc/9670,https://issues.webrtc.org/issues/42234877 +https://crbug.com/webrtc/9671,https://issues.webrtc.org/issues/42234878 +https://crbug.com/webrtc/9672,https://issues.webrtc.org/issues/42234879 +https://crbug.com/webrtc/9673,https://issues.webrtc.org/issues/42234880 +https://crbug.com/webrtc/9674,https://issues.webrtc.org/issues/42234881 +https://crbug.com/webrtc/9675,https://issues.webrtc.org/issues/42234882 +https://crbug.com/webrtc/9676,https://issues.webrtc.org/issues/42234883 +https://crbug.com/webrtc/9677,https://issues.webrtc.org/issues/42234884 +https://crbug.com/webrtc/9678,https://issues.webrtc.org/issues/42234885 +https://crbug.com/webrtc/9679,https://issues.webrtc.org/issues/42234886 +https://crbug.com/webrtc/968,https://issues.webrtc.org/issues/42234887 +https://crbug.com/webrtc/9680,https://issues.webrtc.org/issues/42234888 +https://crbug.com/webrtc/9681,https://issues.webrtc.org/issues/42234889 +https://crbug.com/webrtc/9682,https://issues.webrtc.org/issues/42234890 +https://crbug.com/webrtc/9683,https://issues.webrtc.org/issues/42234891 +https://crbug.com/webrtc/9684,https://issues.webrtc.org/issues/42234892 +https://crbug.com/webrtc/9685,https://issues.webrtc.org/issues/42234893 +https://crbug.com/webrtc/9686,https://issues.webrtc.org/issues/42234894 +https://crbug.com/webrtc/9687,https://issues.webrtc.org/issues/42234895 +https://crbug.com/webrtc/9688,https://issues.webrtc.org/issues/42234896 +https://crbug.com/webrtc/9689,https://issues.webrtc.org/issues/42234897 +https://crbug.com/webrtc/969,https://issues.webrtc.org/issues/42234898 +https://crbug.com/webrtc/9690,https://issues.webrtc.org/issues/42234899 +https://crbug.com/webrtc/9691,https://issues.webrtc.org/issues/42234900 +https://crbug.com/webrtc/9692,https://issues.webrtc.org/issues/42234901 +https://crbug.com/webrtc/9693,https://issues.webrtc.org/issues/42234902 +https://crbug.com/webrtc/9694,https://issues.webrtc.org/issues/42234903 +https://crbug.com/webrtc/9695,https://issues.webrtc.org/issues/42234904 +https://crbug.com/webrtc/9697,https://issues.webrtc.org/issues/42234905 +https://crbug.com/webrtc/9698,https://issues.webrtc.org/issues/42234906 +https://crbug.com/webrtc/9699,https://issues.webrtc.org/issues/42234907 +https://crbug.com/webrtc/97,https://issues.webrtc.org/issues/42234908 +https://crbug.com/webrtc/970,https://issues.webrtc.org/issues/42234909 +https://crbug.com/webrtc/9700,https://issues.webrtc.org/issues/42234910 +https://crbug.com/webrtc/9701,https://issues.webrtc.org/issues/42234911 +https://crbug.com/webrtc/9702,https://issues.webrtc.org/issues/42234912 +https://crbug.com/webrtc/9703,https://issues.webrtc.org/issues/42234913 +https://crbug.com/webrtc/9704,https://issues.webrtc.org/issues/42234914 +https://crbug.com/webrtc/9705,https://issues.webrtc.org/issues/42234915 +https://crbug.com/webrtc/9707,https://issues.webrtc.org/issues/42234916 +https://crbug.com/webrtc/9708,https://issues.webrtc.org/issues/42234917 +https://crbug.com/webrtc/9709,https://issues.webrtc.org/issues/42234918 +https://crbug.com/webrtc/971,https://issues.webrtc.org/issues/42234919 +https://crbug.com/webrtc/9710,https://issues.webrtc.org/issues/42234920 +https://crbug.com/webrtc/9711,https://issues.webrtc.org/issues/42234921 +https://crbug.com/webrtc/9712,https://issues.webrtc.org/issues/42234922 +https://crbug.com/webrtc/9713,https://issues.webrtc.org/issues/42234923 +https://crbug.com/webrtc/9714,https://issues.webrtc.org/issues/42234924 +https://crbug.com/webrtc/9715,https://issues.webrtc.org/issues/42234925 +https://crbug.com/webrtc/9716,https://issues.webrtc.org/issues/42234926 +https://crbug.com/webrtc/9717,https://issues.webrtc.org/issues/42234927 +https://crbug.com/webrtc/9718,https://issues.webrtc.org/issues/42234928 +https://crbug.com/webrtc/9719,https://issues.webrtc.org/issues/42234929 +https://crbug.com/webrtc/972,https://issues.webrtc.org/issues/42234930 +https://crbug.com/webrtc/9720,https://issues.webrtc.org/issues/42234931 +https://crbug.com/webrtc/9721,https://issues.webrtc.org/issues/42234932 +https://crbug.com/webrtc/9722,https://issues.webrtc.org/issues/42234933 +https://crbug.com/webrtc/9723,https://issues.webrtc.org/issues/42234934 +https://crbug.com/webrtc/9724,https://issues.webrtc.org/issues/42234935 +https://crbug.com/webrtc/9726,https://issues.webrtc.org/issues/42234936 +https://crbug.com/webrtc/9727,https://issues.webrtc.org/issues/42234937 +https://crbug.com/webrtc/9728,https://issues.webrtc.org/issues/42234938 +https://crbug.com/webrtc/9729,https://issues.webrtc.org/issues/42234939 +https://crbug.com/webrtc/973,https://issues.webrtc.org/issues/42234940 +https://crbug.com/webrtc/9730,https://issues.webrtc.org/issues/42234941 +https://crbug.com/webrtc/9731,https://issues.webrtc.org/issues/42234942 +https://crbug.com/webrtc/9732,https://issues.webrtc.org/issues/42234943 +https://crbug.com/webrtc/9733,https://issues.webrtc.org/issues/42234944 +https://crbug.com/webrtc/9734,https://issues.webrtc.org/issues/42234945 +https://crbug.com/webrtc/9735,https://issues.webrtc.org/issues/42234946 +https://crbug.com/webrtc/9736,https://issues.webrtc.org/issues/42234947 +https://crbug.com/webrtc/9737,https://issues.webrtc.org/issues/42234948 +https://crbug.com/webrtc/9738,https://issues.webrtc.org/issues/42234949 +https://crbug.com/webrtc/9739,https://issues.webrtc.org/issues/42234950 +https://crbug.com/webrtc/974,https://issues.webrtc.org/issues/42234951 +https://crbug.com/webrtc/9741,https://issues.webrtc.org/issues/42234952 +https://crbug.com/webrtc/9742,https://issues.webrtc.org/issues/42234953 +https://crbug.com/webrtc/9743,https://issues.webrtc.org/issues/42234954 +https://crbug.com/webrtc/9744,https://issues.webrtc.org/issues/42234955 +https://crbug.com/webrtc/9745,https://issues.webrtc.org/issues/42234956 +https://crbug.com/webrtc/9746,https://issues.webrtc.org/issues/42234957 +https://crbug.com/webrtc/9747,https://issues.webrtc.org/issues/42234958 +https://crbug.com/webrtc/9748,https://issues.webrtc.org/issues/42234959 +https://crbug.com/webrtc/9749,https://issues.webrtc.org/issues/42234960 +https://crbug.com/webrtc/975,https://issues.webrtc.org/issues/42234961 +https://crbug.com/webrtc/9750,https://issues.webrtc.org/issues/42234962 +https://crbug.com/webrtc/9751,https://issues.webrtc.org/issues/42234963 +https://crbug.com/webrtc/9752,https://issues.webrtc.org/issues/42234964 +https://crbug.com/webrtc/9753,https://issues.webrtc.org/issues/42234965 +https://crbug.com/webrtc/9754,https://issues.webrtc.org/issues/42234966 +https://crbug.com/webrtc/9755,https://issues.webrtc.org/issues/42234967 +https://crbug.com/webrtc/9756,https://issues.webrtc.org/issues/42234968 +https://crbug.com/webrtc/9757,https://issues.webrtc.org/issues/42234969 +https://crbug.com/webrtc/9758,https://issues.webrtc.org/issues/42234970 +https://crbug.com/webrtc/9759,https://issues.webrtc.org/issues/42234971 +https://crbug.com/webrtc/976,https://issues.webrtc.org/issues/42234972 +https://crbug.com/webrtc/9760,https://issues.webrtc.org/issues/42234973 +https://crbug.com/webrtc/9761,https://issues.webrtc.org/issues/42234974 +https://crbug.com/webrtc/9762,https://issues.webrtc.org/issues/42234975 +https://crbug.com/webrtc/9763,https://issues.webrtc.org/issues/42234976 +https://crbug.com/webrtc/9764,https://issues.webrtc.org/issues/42234977 +https://crbug.com/webrtc/9765,https://issues.webrtc.org/issues/42234978 +https://crbug.com/webrtc/9766,https://issues.webrtc.org/issues/42234979 +https://crbug.com/webrtc/9767,https://issues.webrtc.org/issues/42234980 +https://crbug.com/webrtc/9768,https://issues.webrtc.org/issues/42234981 +https://crbug.com/webrtc/9769,https://issues.webrtc.org/issues/42234982 +https://crbug.com/webrtc/977,https://issues.webrtc.org/issues/42234983 +https://crbug.com/webrtc/9771,https://issues.webrtc.org/issues/42234984 +https://crbug.com/webrtc/9772,https://issues.webrtc.org/issues/42234985 +https://crbug.com/webrtc/9773,https://issues.webrtc.org/issues/42234986 +https://crbug.com/webrtc/9774,https://issues.webrtc.org/issues/42234987 +https://crbug.com/webrtc/9776,https://issues.webrtc.org/issues/42234988 +https://crbug.com/webrtc/9778,https://issues.webrtc.org/issues/42234989 +https://crbug.com/webrtc/9779,https://issues.webrtc.org/issues/42234990 +https://crbug.com/webrtc/978,https://issues.webrtc.org/issues/42234991 +https://crbug.com/webrtc/9780,https://issues.webrtc.org/issues/42234992 +https://crbug.com/webrtc/9781,https://issues.webrtc.org/issues/42234993 +https://crbug.com/webrtc/9782,https://issues.webrtc.org/issues/42234994 +https://crbug.com/webrtc/9783,https://issues.webrtc.org/issues/42234995 +https://crbug.com/webrtc/9784,https://issues.webrtc.org/issues/42234996 +https://crbug.com/webrtc/9785,https://issues.webrtc.org/issues/42234997 +https://crbug.com/webrtc/9786,https://issues.webrtc.org/issues/42234998 +https://crbug.com/webrtc/9787,https://issues.webrtc.org/issues/42234999 +https://crbug.com/webrtc/9788,https://issues.webrtc.org/issues/42235000 +https://crbug.com/webrtc/9789,https://issues.webrtc.org/issues/42235001 +https://crbug.com/webrtc/979,https://issues.webrtc.org/issues/42235002 +https://crbug.com/webrtc/9790,https://issues.webrtc.org/issues/42235003 +https://crbug.com/webrtc/9791,https://issues.webrtc.org/issues/42235004 +https://crbug.com/webrtc/9792,https://issues.webrtc.org/issues/42235005 +https://crbug.com/webrtc/9793,https://issues.webrtc.org/issues/42235006 +https://crbug.com/webrtc/9794,https://issues.webrtc.org/issues/42235007 +https://crbug.com/webrtc/9795,https://issues.webrtc.org/issues/42235008 +https://crbug.com/webrtc/9796,https://issues.webrtc.org/issues/42235009 +https://crbug.com/webrtc/9797,https://issues.webrtc.org/issues/42235010 +https://crbug.com/webrtc/9798,https://issues.webrtc.org/issues/42235011 +https://crbug.com/webrtc/9799,https://issues.webrtc.org/issues/42235012 +https://crbug.com/webrtc/98,https://issues.webrtc.org/issues/42235013 +https://crbug.com/webrtc/980,https://issues.webrtc.org/issues/42235014 +https://crbug.com/webrtc/9800,https://issues.webrtc.org/issues/42235015 +https://crbug.com/webrtc/9801,https://issues.webrtc.org/issues/42235016 +https://crbug.com/webrtc/9802,https://issues.webrtc.org/issues/42235017 +https://crbug.com/webrtc/9803,https://issues.webrtc.org/issues/42235018 +https://crbug.com/webrtc/9804,https://issues.webrtc.org/issues/42235019 +https://crbug.com/webrtc/9805,https://issues.webrtc.org/issues/42235020 +https://crbug.com/webrtc/9806,https://issues.webrtc.org/issues/42235021 +https://crbug.com/webrtc/9807,https://issues.webrtc.org/issues/42235022 +https://crbug.com/webrtc/9808,https://issues.webrtc.org/issues/42235023 +https://crbug.com/webrtc/9809,https://issues.webrtc.org/issues/42235024 +https://crbug.com/webrtc/981,https://issues.webrtc.org/issues/42235025 +https://crbug.com/webrtc/9810,https://issues.webrtc.org/issues/42235026 +https://crbug.com/webrtc/9811,https://issues.webrtc.org/issues/42235027 +https://crbug.com/webrtc/9812,https://issues.webrtc.org/issues/42235028 +https://crbug.com/webrtc/9813,https://issues.webrtc.org/issues/42235029 +https://crbug.com/webrtc/9815,https://issues.webrtc.org/issues/42235030 +https://crbug.com/webrtc/9816,https://issues.webrtc.org/issues/42235031 +https://crbug.com/webrtc/9817,https://issues.webrtc.org/issues/42235032 +https://crbug.com/webrtc/9818,https://issues.webrtc.org/issues/42235033 +https://crbug.com/webrtc/9819,https://issues.webrtc.org/issues/42235034 +https://crbug.com/webrtc/982,https://issues.webrtc.org/issues/42235035 +https://crbug.com/webrtc/9820,https://issues.webrtc.org/issues/42235036 +https://crbug.com/webrtc/9821,https://issues.webrtc.org/issues/42235037 +https://crbug.com/webrtc/9822,https://issues.webrtc.org/issues/42235038 +https://crbug.com/webrtc/9823,https://issues.webrtc.org/issues/42235039 +https://crbug.com/webrtc/9824,https://issues.webrtc.org/issues/42235040 +https://crbug.com/webrtc/9825,https://issues.webrtc.org/issues/42235041 +https://crbug.com/webrtc/9826,https://issues.webrtc.org/issues/42235042 +https://crbug.com/webrtc/9827,https://issues.webrtc.org/issues/42235043 +https://crbug.com/webrtc/9828,https://issues.webrtc.org/issues/42235044 +https://crbug.com/webrtc/9829,https://issues.webrtc.org/issues/42235045 +https://crbug.com/webrtc/983,https://issues.webrtc.org/issues/42235046 +https://crbug.com/webrtc/9830,https://issues.webrtc.org/issues/42235047 +https://crbug.com/webrtc/9831,https://issues.webrtc.org/issues/42235048 +https://crbug.com/webrtc/9832,https://issues.webrtc.org/issues/42235049 +https://crbug.com/webrtc/9833,https://issues.webrtc.org/issues/42235050 +https://crbug.com/webrtc/9834,https://issues.webrtc.org/issues/42235051 +https://crbug.com/webrtc/9835,https://issues.webrtc.org/issues/42235052 +https://crbug.com/webrtc/9836,https://issues.webrtc.org/issues/42235053 +https://crbug.com/webrtc/9837,https://issues.webrtc.org/issues/42235054 +https://crbug.com/webrtc/9838,https://issues.webrtc.org/issues/42235055 +https://crbug.com/webrtc/9839,https://issues.webrtc.org/issues/42235056 +https://crbug.com/webrtc/984,https://issues.webrtc.org/issues/42235057 +https://crbug.com/webrtc/9840,https://issues.webrtc.org/issues/42235058 +https://crbug.com/webrtc/9841,https://issues.webrtc.org/issues/42235059 +https://crbug.com/webrtc/9842,https://issues.webrtc.org/issues/42235060 +https://crbug.com/webrtc/9843,https://issues.webrtc.org/issues/42235061 +https://crbug.com/webrtc/9844,https://issues.webrtc.org/issues/42235062 +https://crbug.com/webrtc/9845,https://issues.webrtc.org/issues/42235063 +https://crbug.com/webrtc/9846,https://issues.webrtc.org/issues/42235064 +https://crbug.com/webrtc/9847,https://issues.webrtc.org/issues/42235065 +https://crbug.com/webrtc/9848,https://issues.webrtc.org/issues/42235066 +https://crbug.com/webrtc/9849,https://issues.webrtc.org/issues/42235067 +https://crbug.com/webrtc/985,https://issues.webrtc.org/issues/42235068 +https://crbug.com/webrtc/9850,https://issues.webrtc.org/issues/42235069 +https://crbug.com/webrtc/9851,https://issues.webrtc.org/issues/42235070 +https://crbug.com/webrtc/9852,https://issues.webrtc.org/issues/42235071 +https://crbug.com/webrtc/9853,https://issues.webrtc.org/issues/42235072 +https://crbug.com/webrtc/9854,https://issues.webrtc.org/issues/42235073 +https://crbug.com/webrtc/9855,https://issues.webrtc.org/issues/42235074 +https://crbug.com/webrtc/9856,https://issues.webrtc.org/issues/42235075 +https://crbug.com/webrtc/9857,https://issues.webrtc.org/issues/42235076 +https://crbug.com/webrtc/9858,https://issues.webrtc.org/issues/42235077 +https://crbug.com/webrtc/9859,https://issues.webrtc.org/issues/42235078 +https://crbug.com/webrtc/986,https://issues.webrtc.org/issues/42235079 +https://crbug.com/webrtc/9860,https://issues.webrtc.org/issues/42235080 +https://crbug.com/webrtc/9862,https://issues.webrtc.org/issues/42235081 +https://crbug.com/webrtc/9863,https://issues.webrtc.org/issues/42235082 +https://crbug.com/webrtc/9864,https://issues.webrtc.org/issues/42235083 +https://crbug.com/webrtc/9865,https://issues.webrtc.org/issues/42235084 +https://crbug.com/webrtc/9866,https://issues.webrtc.org/issues/42235085 +https://crbug.com/webrtc/9867,https://issues.webrtc.org/issues/42235086 +https://crbug.com/webrtc/9868,https://issues.webrtc.org/issues/42235087 +https://crbug.com/webrtc/9869,https://issues.webrtc.org/issues/42235088 +https://crbug.com/webrtc/987,https://issues.webrtc.org/issues/42235089 +https://crbug.com/webrtc/9870,https://issues.webrtc.org/issues/42235090 +https://crbug.com/webrtc/9871,https://issues.webrtc.org/issues/42235091 +https://crbug.com/webrtc/9872,https://issues.webrtc.org/issues/42235092 +https://crbug.com/webrtc/9873,https://issues.webrtc.org/issues/42235093 +https://crbug.com/webrtc/9874,https://issues.webrtc.org/issues/42235094 +https://crbug.com/webrtc/9875,https://issues.webrtc.org/issues/42235095 +https://crbug.com/webrtc/9876,https://issues.webrtc.org/issues/42235096 +https://crbug.com/webrtc/9879,https://issues.webrtc.org/issues/42235097 +https://crbug.com/webrtc/988,https://issues.webrtc.org/issues/42235098 +https://crbug.com/webrtc/9880,https://issues.webrtc.org/issues/42235099 +https://crbug.com/webrtc/9881,https://issues.webrtc.org/issues/42235100 +https://crbug.com/webrtc/9882,https://issues.webrtc.org/issues/42235101 +https://crbug.com/webrtc/9883,https://issues.webrtc.org/issues/42235102 +https://crbug.com/webrtc/9884,https://issues.webrtc.org/issues/42235103 +https://crbug.com/webrtc/9885,https://issues.webrtc.org/issues/42235104 +https://crbug.com/webrtc/9886,https://issues.webrtc.org/issues/42235105 +https://crbug.com/webrtc/9887,https://issues.webrtc.org/issues/42235106 +https://crbug.com/webrtc/9888,https://issues.webrtc.org/issues/42235107 +https://crbug.com/webrtc/9889,https://issues.webrtc.org/issues/42235108 +https://crbug.com/webrtc/989,https://issues.webrtc.org/issues/42235109 +https://crbug.com/webrtc/9890,https://issues.webrtc.org/issues/42235110 +https://crbug.com/webrtc/9891,https://issues.webrtc.org/issues/42235111 +https://crbug.com/webrtc/9892,https://issues.webrtc.org/issues/42235112 +https://crbug.com/webrtc/9893,https://issues.webrtc.org/issues/42235113 +https://crbug.com/webrtc/9894,https://issues.webrtc.org/issues/42235114 +https://crbug.com/webrtc/9895,https://issues.webrtc.org/issues/42235115 +https://crbug.com/webrtc/9896,https://issues.webrtc.org/issues/42235116 +https://crbug.com/webrtc/9897,https://issues.webrtc.org/issues/42235117 +https://crbug.com/webrtc/9898,https://issues.webrtc.org/issues/42235118 +https://crbug.com/webrtc/99,https://issues.webrtc.org/issues/42235119 +https://crbug.com/webrtc/990,https://issues.webrtc.org/issues/42235120 +https://crbug.com/webrtc/9901,https://issues.webrtc.org/issues/42235121 +https://crbug.com/webrtc/9902,https://issues.webrtc.org/issues/42235122 +https://crbug.com/webrtc/9903,https://issues.webrtc.org/issues/42235123 +https://crbug.com/webrtc/9904,https://issues.webrtc.org/issues/42235124 +https://crbug.com/webrtc/9905,https://issues.webrtc.org/issues/42235125 +https://crbug.com/webrtc/9906,https://issues.webrtc.org/issues/42235126 +https://crbug.com/webrtc/9907,https://issues.webrtc.org/issues/42235127 +https://crbug.com/webrtc/9908,https://issues.webrtc.org/issues/42235128 +https://crbug.com/webrtc/9909,https://issues.webrtc.org/issues/42235129 +https://crbug.com/webrtc/991,https://issues.webrtc.org/issues/42235130 +https://crbug.com/webrtc/9910,https://issues.webrtc.org/issues/42235131 +https://crbug.com/webrtc/9911,https://issues.webrtc.org/issues/42235132 +https://crbug.com/webrtc/9912,https://issues.webrtc.org/issues/42235133 +https://crbug.com/webrtc/9913,https://issues.webrtc.org/issues/42235134 +https://crbug.com/webrtc/9914,https://issues.webrtc.org/issues/42235135 +https://crbug.com/webrtc/9915,https://issues.webrtc.org/issues/42235136 +https://crbug.com/webrtc/9916,https://issues.webrtc.org/issues/42235137 +https://crbug.com/webrtc/9917,https://issues.webrtc.org/issues/42235138 +https://crbug.com/webrtc/9918,https://issues.webrtc.org/issues/42235139 +https://crbug.com/webrtc/9919,https://issues.webrtc.org/issues/42235140 +https://crbug.com/webrtc/992,https://issues.webrtc.org/issues/42235141 +https://crbug.com/webrtc/9920,https://issues.webrtc.org/issues/42235142 +https://crbug.com/webrtc/9921,https://issues.webrtc.org/issues/42235143 +https://crbug.com/webrtc/9923,https://issues.webrtc.org/issues/42235144 +https://crbug.com/webrtc/9924,https://issues.webrtc.org/issues/42235145 +https://crbug.com/webrtc/9925,https://issues.webrtc.org/issues/42235146 +https://crbug.com/webrtc/9926,https://issues.webrtc.org/issues/42235147 +https://crbug.com/webrtc/9927,https://issues.webrtc.org/issues/42235148 +https://crbug.com/webrtc/9928,https://issues.webrtc.org/issues/42235149 +https://crbug.com/webrtc/9929,https://issues.webrtc.org/issues/42235150 +https://crbug.com/webrtc/993,https://issues.webrtc.org/issues/42235151 +https://crbug.com/webrtc/9930,https://issues.webrtc.org/issues/42235152 +https://crbug.com/webrtc/9931,https://issues.webrtc.org/issues/42235153 +https://crbug.com/webrtc/9932,https://issues.webrtc.org/issues/42235154 +https://crbug.com/webrtc/9933,https://issues.webrtc.org/issues/42235155 +https://crbug.com/webrtc/9934,https://issues.webrtc.org/issues/42235156 +https://crbug.com/webrtc/9935,https://issues.webrtc.org/issues/42235157 +https://crbug.com/webrtc/9937,https://issues.webrtc.org/issues/42235158 +https://crbug.com/webrtc/9938,https://issues.webrtc.org/issues/42235159 +https://crbug.com/webrtc/9939,https://issues.webrtc.org/issues/42235160 +https://crbug.com/webrtc/994,https://issues.webrtc.org/issues/42235161 +https://crbug.com/webrtc/9940,https://issues.webrtc.org/issues/42235162 +https://crbug.com/webrtc/9941,https://issues.webrtc.org/issues/42235163 +https://crbug.com/webrtc/9942,https://issues.webrtc.org/issues/42235164 +https://crbug.com/webrtc/9943,https://issues.webrtc.org/issues/42235165 +https://crbug.com/webrtc/9944,https://issues.webrtc.org/issues/42235166 +https://crbug.com/webrtc/9945,https://issues.webrtc.org/issues/42235167 +https://crbug.com/webrtc/9946,https://issues.webrtc.org/issues/42235168 +https://crbug.com/webrtc/9947,https://issues.webrtc.org/issues/42235169 +https://crbug.com/webrtc/9948,https://issues.webrtc.org/issues/42235170 +https://crbug.com/webrtc/9949,https://issues.webrtc.org/issues/42235171 +https://crbug.com/webrtc/995,https://issues.webrtc.org/issues/42235172 +https://crbug.com/webrtc/9950,https://issues.webrtc.org/issues/42235173 +https://crbug.com/webrtc/9951,https://issues.webrtc.org/issues/42235174 +https://crbug.com/webrtc/9952,https://issues.webrtc.org/issues/42235175 +https://crbug.com/webrtc/9953,https://issues.webrtc.org/issues/42235176 +https://crbug.com/webrtc/9954,https://issues.webrtc.org/issues/42235177 +https://crbug.com/webrtc/9955,https://issues.webrtc.org/issues/42235178 +https://crbug.com/webrtc/9957,https://issues.webrtc.org/issues/42235179 +https://crbug.com/webrtc/9958,https://issues.webrtc.org/issues/42235180 +https://crbug.com/webrtc/9959,https://issues.webrtc.org/issues/42235181 +https://crbug.com/webrtc/996,https://issues.webrtc.org/issues/42235182 +https://crbug.com/webrtc/9960,https://issues.webrtc.org/issues/42235183 +https://crbug.com/webrtc/9961,https://issues.webrtc.org/issues/42235184 +https://crbug.com/webrtc/9962,https://issues.webrtc.org/issues/42235185 +https://crbug.com/webrtc/9963,https://issues.webrtc.org/issues/42235186 +https://crbug.com/webrtc/9964,https://issues.webrtc.org/issues/42235187 +https://crbug.com/webrtc/9965,https://issues.webrtc.org/issues/42235188 +https://crbug.com/webrtc/9966,https://issues.webrtc.org/issues/42235189 +https://crbug.com/webrtc/9967,https://issues.webrtc.org/issues/42235190 +https://crbug.com/webrtc/9968,https://issues.webrtc.org/issues/42235191 +https://crbug.com/webrtc/9969,https://issues.webrtc.org/issues/42235192 +https://crbug.com/webrtc/997,https://issues.webrtc.org/issues/42235193 +https://crbug.com/webrtc/9970,https://issues.webrtc.org/issues/42235194 +https://crbug.com/webrtc/9971,https://issues.webrtc.org/issues/42235195 +https://crbug.com/webrtc/9972,https://issues.webrtc.org/issues/42235196 +https://crbug.com/webrtc/9973,https://issues.webrtc.org/issues/42235197 +https://crbug.com/webrtc/9974,https://issues.webrtc.org/issues/42235198 +https://crbug.com/webrtc/9975,https://issues.webrtc.org/issues/42235199 +https://crbug.com/webrtc/9976,https://issues.webrtc.org/issues/42235200 +https://crbug.com/webrtc/9977,https://issues.webrtc.org/issues/42235201 +https://crbug.com/webrtc/9978,https://issues.webrtc.org/issues/42235202 +https://crbug.com/webrtc/9979,https://issues.webrtc.org/issues/42235203 +https://crbug.com/webrtc/998,https://issues.webrtc.org/issues/42235204 +https://crbug.com/webrtc/9980,https://issues.webrtc.org/issues/42235205 +https://crbug.com/webrtc/9981,https://issues.webrtc.org/issues/42235206 +https://crbug.com/webrtc/9982,https://issues.webrtc.org/issues/42235207 +https://crbug.com/webrtc/9983,https://issues.webrtc.org/issues/42235208 +https://crbug.com/webrtc/9984,https://issues.webrtc.org/issues/42235209 +https://crbug.com/webrtc/9985,https://issues.webrtc.org/issues/42235210 +https://crbug.com/webrtc/9986,https://issues.webrtc.org/issues/42235211 +https://crbug.com/webrtc/9987,https://issues.webrtc.org/issues/42235212 +https://crbug.com/webrtc/9988,https://issues.webrtc.org/issues/42235213 +https://crbug.com/webrtc/9989,https://issues.webrtc.org/issues/42235214 +https://crbug.com/webrtc/999,https://issues.webrtc.org/issues/42235215 +https://crbug.com/webrtc/9990,https://issues.webrtc.org/issues/42235216 +https://crbug.com/webrtc/9991,https://issues.webrtc.org/issues/42235217 +https://crbug.com/webrtc/9992,https://issues.webrtc.org/issues/42235218 +https://crbug.com/webrtc/9993,https://issues.webrtc.org/issues/42235219 +https://crbug.com/webrtc/9994,https://issues.webrtc.org/issues/42235220 +https://crbug.com/webrtc/9995,https://issues.webrtc.org/issues/42235221 +https://crbug.com/webrtc/9996,https://issues.webrtc.org/issues/42235222 +https://crbug.com/webrtc/9997,https://issues.webrtc.org/issues/42235223 +https://crbug.com/webrtc/9998,https://issues.webrtc.org/issues/42235224 +https://crbug.com/webrtc/9999,https://issues.webrtc.org/issues/42235225 diff --git a/docs/native-code/development/README.md b/docs/native-code/development/README.md index 8a2678e6cf..02d148c7d7 100644 --- a/docs/native-code/development/README.md +++ b/docs/native-code/development/README.md @@ -98,11 +98,7 @@ configuration untouched (stored in the args.gn file), do: $ gn clean out/Default ``` -To build the fuzzers residing in the [test/fuzzers][fuzzers] directory, use -``` -$ gn gen out/fuzzers --args='use_libfuzzer=true optimize_for_fuzzing=true' -``` -Depending on the fuzzer additional arguments like `is_asan`, `is_msan` or `is_ubsan_security` might be required. +To build the fuzzers residing in the [test/fuzzers][fuzzers-dir] directory, read the instructions at the [fuzzers][fuzzers] page. See the [GN][gn-doc] documentation for all available options. There are also more platform specific tips on the [Android][webrtc-android-development] and @@ -129,6 +125,11 @@ $ autoninja all -C out/Default See [Ninja build rules][ninja-build-rules] to read more about difference between `ninja` and `ninja all`. +To build a particular target (like a fuzzer which is not included in the main target) use + +``` +autoninja -C out/Default h264_depacketizer_fuzzer +``` ## Using Another Build System @@ -288,4 +289,5 @@ Target name `turnserver`. Used for unit tests. [rfc-5766]: https://tools.ietf.org/html/rfc5766 [m80-log]: https://webrtc.googlesource.com/src/+log/branch-heads/3987 [m80]: https://webrtc.googlesource.com/src/+/branch-heads/3987 -[fuzzers]: https://webrtc.googlesource.com/src/+/main/test/fuzzers/ +[fuzzers-dir]: https://webrtc.googlesource.com/src/+/main/test/fuzzers/ +[fuzzers]: https://webrtc.googlesource.com/src/+/main/docs/native-code/development/fuzzers/ diff --git a/docs/native-code/development/fuzzers/README.md b/docs/native-code/development/fuzzers/README.md new file mode 100644 index 0000000000..ce7941243f --- /dev/null +++ b/docs/native-code/development/fuzzers/README.md @@ -0,0 +1,89 @@ +# Fuzzing in WebRTC + +## Intro +WebRTC currently uses libfuzzer for fuzz testing however FuzzTest is a new +approach which we have not yet looked into but we will in the future. + +Before continuing, read the [libfuzzer][libfuzzer-getting-started] +and [FuzzTest][fuzztest-getting-started] getting started docs to get familar. +You will also need to download libfuzzer specific libraries, which are not +downloaded by default. The easiest way to do this is to set the `checkout_fuzzer` +custom variable in your .gclient file then run gclient runhooks. +``` + "custom_vars": { + "checkout_fuzzer": True, + }, +``` + +## Compiling locally +To build the fuzzers residing in the [test/fuzzers][fuzzers] directory, use +``` +$ gn gen out/fuzzers --args='enable_rust=true enable_rust_cxx=true optimize_for_fuzzing=true use_fuzztest_wrapper=true use_libfuzzer=true' +``` +Depending on the fuzzer additional arguments like `is_asan`, `is_msan` or +`is_ubsan_security` might be required. + +See the [GN][gn-doc] documentation for all available options. There are also +more platform specific tips on the [Android][webrtc-android-development] and +[iOS][webrtc-ios-development] instructions. + +## Add new fuzzers +Create a new `.cc` file in the [test/fuzzers][fuzzers] directory, use existing +files as a guide. + +Add a new `webrtc_fuzzers_test` build rule in the [test/fuzzers/BUILD.gn][BUILD.gn], +use existing rules as a guide. + +Ensure it compiles and executes locally then add it to a gerrit CL and upload +it for review, e.g. + +``` +$ autoninja -C out/fuzzers test/fuzzers:h264_depacketizer_fuzzer +``` + +It can then be executed like so: +``` +$ out/fuzzers/bin/run_h264_depacketizer_fuzzer +``` + +## Running fuzzers automatically +All fuzzer tests in the [test/fuzzers/BUILD.gn][BUILD.gn] file are compiled per +CL on the [libfuzzer bot][libfuzzer-bot]. This is only to verify that it +compiles, this bot does not do any fuzz testing. + +When WebRTC is [rolled][webrtc-autoroller] into to Chromium, the libfuzz bots +in the [chromium.fuzz][chromium-fuzz] will compile it, zip it and then upload +to https://clusterfuzz.com for execution. + +You can verify that the fuzz test is being executed by: + - Navigate to a bot in the [chromium.fuzz][chromium-fuzz] libfuzzer waterfall, e.g. [ Libfuzzer Upload Linux ASan bot/linux bot][linux-bot]. + - Click on the latest `build#` link. + - Search for `//third_party/webrtc/test/fuzzers` in the `raw_io.output_text_refs_` file in the `calculate_all_fuzzers` step. + - Verify that the new fuzzer (as it's named in the `webrtc_fuzzers_test` build rule) is present. + - Also verify that it's _NOT_ in the `no_clusterfuzz` file in the `calculate_no_clusterfuzz` step. If it is, file a bug at https://bugs.webrtc.org. + +Bugs are filed automatically in https://crbug.com in the blink > WebRTC component +and assigned based on [test/fuzzers/OWNERS][OWNERS] file or the commit history. + +If you are a non-googler, you can only view data from https://clusterfuzz.com +if your account is CC'ed on the reported bug. + +## Additional reading + +[Libfuzzer in Chromium][libfuzzer-chromium] + + +[libfuzzer-chromium]: https://chromium.googlesource.com/chromium/src/+/HEAD/testing/libfuzzer/README.md +[libfuzzer-bot]: https://ci.chromium.org/ui/p/webrtc/builders/luci.webrtc.ci/Linux64%20Release%20%28Libfuzzer%29 +[fuzzers]: https://webrtc.googlesource.com/src/+/main/test/fuzzers/ +[OWNERS]: https://webrtc.googlesource.com/src/+/main/test/fuzzers/OWNERS +[BUILD.gn]: https://webrtc.googlesource.com/src/+/main/test/fuzzers/BUILD.gn +[gn]: https://gn.googlesource.com/gn/+/main/README.md +[gn-doc]: https://gn.googlesource.com/gn/+/main/docs/reference.md#IDE-options +[webrtc-android-development]: https://webrtc.googlesource.com/src/+/main/docs/native-code/android/ +[webrtc-ios-development]: https://webrtc.googlesource.com/src/+/main/docs/native-code/ios/ +[chromium-fuzz]: https://ci.chromium.org/p/chromium/g/chromium.fuzz/console +[linux-bot]: https://ci.chromium.org/ui/p/chromium/builders/ci/Libfuzzer%20Upload%20Linux%20ASan/ +[libfuzzer-getting-started]: https://chromium.googlesource.com/chromium/src/+/main/testing/libfuzzer/getting_started_with_libfuzzer.md +[fuzztest-getting-started]: https://chromium.googlesource.com/chromium/src/+/main/testing/libfuzzer/getting_started.md +[webrtc-autoroller]: https://autoroll.skia.org/r/webrtc-chromium-autoroll diff --git a/docs/native-code/ios/README.md b/docs/native-code/ios/README.md index d10fcf1022..b27d59475a 100644 --- a/docs/native-code/ios/README.md +++ b/docs/native-code/ios/README.md @@ -78,7 +78,7 @@ $ # debug build for 64-bit iOS $ gn gen out/ios_64 --args='target_os="ios" target_cpu="arm64"' $ # debug build for simulator -$ gn gen out/ios_sim --args='target_os="ios" target_cpu="x64"' +$ gn gen out/ios_sim --args='target_os="ios" target_cpu="x64" enable_run_ios_unittests_with_xctest=true' ``` ## Compiling with ninja diff --git a/docs/native-code/logging.md b/docs/native-code/logging.md index 1daadbe2b5..0af216c7cc 100644 --- a/docs/native-code/logging.md +++ b/docs/native-code/logging.md @@ -6,7 +6,7 @@ To enable native logs for a native application, you can either: * Use a debug build of WebRTC (a build where `NDEBUG` is not defined), which will enable `INFO` logging by default. - * Call `rtc::LogMessage::LogToDebug(rtc::LS_INFO)` within your application. + * Call `webrtc::LogMessage::LogToDebug(webrtc::LS_INFO)` within your application. Or use `LS_VERBOSE` to enable `VERBOSE` logging. For the location of the log output on different platforms, see below. diff --git a/docs/native-code/rtp-hdrext/README.md b/docs/native-code/rtp-hdrext/README.md index 081a727c59..bde7349189 100644 --- a/docs/native-code/rtp-hdrext/README.md +++ b/docs/native-code/rtp-hdrext/README.md @@ -1,13 +1,15 @@ # Experimental RTP header extensions -The following subpages define experiemental RTP header extensions: +The following subpages define experimental RTP header extensions: - * [abs-send-time](abs-send-time/README.md) * [abs-capture-time](abs-capture-time/README.md) + * [abs-send-time](abs-send-time/README.md) * [color-space](color-space/README.md) + * [corruption-detection](corruption-detection/README.md) + * [inband-cn](inband-cn/README.md) * [playout-delay](playout-delay/README.md) * [transport-wide-cc-02](transport-wide-cc-02/README.md) * [video-content-type](video-content-type/README.md) + * [video-frame-tracking-id](video-frame-tracking-id/README.md) + * [video-layers-allocation00](video-layers-allocation00/README.md) * [video-timing](video-timing/README.md) - * [inband-cn](inband-cn/README.md) - * [video-layers-allocation00](video-layes-allocation00/README.md) diff --git a/docs/native-code/rtp-hdrext/corruption-detection/README.md b/docs/native-code/rtp-hdrext/corruption-detection/README.md new file mode 100644 index 0000000000..0265c9a76a --- /dev/null +++ b/docs/native-code/rtp-hdrext/corruption-detection/README.md @@ -0,0 +1,354 @@ +# Corruption Detection + +**Name:** +"Corruption Detection"; "Extension for Automatic Detection of Video Corruptions" + +**Formal name:** + + +**Status:** This extension is defined here to allow for experimentation. + +**Contact:** + +NOTE: This explainer is a work in progress and may change without notice. + +The Corruption Detection (sometimes referred to as automatic corruption +detection or ACD) extension is intended to be a part of a system that allows +estimating a likelihood that a video transmission is in a valid state. That is, +the input to the video encoder on the send side corresponds to the output of the +video decoder on the receive side with the only difference being the expected +distortions from lossy compression. + +The goal is to be able to detect outright coding errors caused by things such as +bugs in encoder/decoders, malformed packetization data, incorrect relay +decisions in SFU-type servers, incorrect handling of packet loss/reordering, and +so forth. We want to accomplish this with a high signal-to-noise ratio while +consuming a minimum of resources in terms of bandwidth and/or computation. It +should be noted that it is _not_ a goal to be able to e.g. gauge general video +quality using this method. + +This explainer contains two parts: + +1) A definition of the RTP header extension itself and how it is to be parsed. +2) The intended usage and implementation details for a WebRTC sender and + receiver respectively. + +If this extension has been negotiated, all the client behavior outlined in this +doc MUST be adhered to. + +## RTP Header Extension Format + +### Data Layout Overview + +The message format of the header extension: + + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + |B| seq index | std dev | Y err | UV err| sample 0 | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | sample 1 | sample 2 | … up to sample <=12 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + +### Data Layout Details + +* B (1 bit): If the sequence number should be interpreted as the MSB or LSB + of the full size 14 bit sequence index described in the next point. +* seq index (7 bits): The index into the Halton sequence (used to locate + where the samples should be drawn from). + * If B is set: the 7 most significant bits of the true index. The 7 least + significant bits of the true index shall be interpreted as 0. This is + because this is the point where we can guarantee that the sender and + receiver has the same full index. B MUST be set on keyframes. On droppable + frames B MUST NOT be set. + * If B is not set: The 7 LSB of the true index. The 7 most significant bits + should be inferred based on the most recent message. +* std dev (8 bits): The standard deviation of the Gaussian filter used + to weigh the samples. The value is scaled using a linear map: + 0 = 0.0 to 255 = 40.0. A std dev of 0 is interpreted as directly using + just the sample value at the desired coordinate, without any weighting. +* Y err (4 bits): The allowed error for the luma channel. +* UV err (4 bits): The allowed error for the chroma channels. +* Sample N (8 bits): The N:th filtered sample from the input image. Each + sample represents a new point in one of the image planes, the plane and + coordinates being determined by index into the Halton sequence (starting at + seq# index and is incremented by one for each sample). Each sample has gone + through a Gaussian filter with the std dev specified above. The samples + have been floored to the nearest integer. + +A special case is the so-called "synchronization" message. Such a message +only contains the first byte. They are used to keep the sender and receiver in +sync even if no "full" message has been received for a while. Such messages +MUST NOT be sent on droppable frames. + +### A note on encryption + +Privacy and security are core parts of nearly every WebRTC-based application, +which means that some sort of encryption needs to be present. The most common +form of encryption is SRTP, defined in RFC 3711. However, as mentioned in +section 9.4 of that RFC, RTP header extensions are considered part of the header +and are thus not encrypted. + +The automatic corruption detection header extension is different from most other +header extensions in that it provides not only metadata about the media stream +being transmitted but in practice comprises an extremely sparse representation +of the actual video stream itself. Given a static scene and enough time, a crude +image of the encrypted video can rather trivially be constructed. + +As such, most applications should use this extension with SRTP only if +additional security is present to protect it. That could be for example in the +form of explicit header extension encryption provided by RFC 6904/RFC 9335, or +by encapsulating the entire RTP stream in an additional layer such as IPSec. + +## Usage & Guidelines + +In this section we’ll first look at a general overview of the intended usage of +this header extensions, followed by more details around the expected +implementation. + +### Overview + +The premise of the extension described here is that we can validate the state of +the video pipeline by quasi-randomly selecting a few samples from the raw input +frame to an encoder, and then checking them against the output of a decoder. +Assuming that a lossless codec is used we can follow these steps: + +1) In an image that is to be encoded, quasi-randomly select N sampling positions + and store the samples values for those positions from the raw input image. +2) Encode the image, and attach the selected sample values to the RTP packets + containing the encoded bitstream of that image. +3) Transmit the RTP packets to a remote receiver. +4) At the receiver, collect the attached sample values from the RTP packets when + assembling the frame, and then pass the bitstream to a decoder. +5) Using the same quasi-random sequence as in (1), calculate the corresponding N + sampling positions. +6) Take the output of the decoder and check the values of the samples from the + RTP packets. If they differ significantly, it is likely that an image + corruption has occurred. + +Lossless encoding is however rarely used in practice, and that introduces +problems for the above algorithm. + +* Quantization causes values to be different from the desired value. +* Whole blocks of pixels might be shifted somewhat due to inaccuracies in motion + vectors. +* Inaccuracies caused by in-loop or post-process filtering. +* etc. + +We must therefore take these distortions into consideration, as they are merely +a natural side-effect of the compression and their effect is not to be +considered an “invalid state”. We aim to accomplish this using two tools. + +First, instead of a sample being a single raw sample value let it be a filtered +one: a weighted average of samples in the vicinity of the desired location, with +the weights being a 2D Gaussian centered at that location and the variance +adjusted depending on the magnitude of the expected distortions +(higher distortion => higher variance). This smoothes out inaccuracies caused by +both quantization and motion compensation. + +Secondly, even with a very large filter kernel the new sample might not converge +towards the exact desired value. For that reason, set an “allowed error +threshold” that removes small magnitude differences. Since chroma and luma +channels have different scales, separate error thresholds are available for +them. + +### Sequence Index Handling + +The quasi-random sequence of choice for this extension is a 2D +[Halton Sequence](https://en.wikipedia.org/wiki/Halton_sequence). + +The index into the Halton Sequence is indicated by the header extension and +results in a 14 bit unsigned integer which on overflow will wrap around back to +0. + +For each sample contained within the extension, the sequence index should be +considered to be incremented by one. Thus the sequence index at the start of the +header should be considered “the sequence index for the next sample to be +drawn”. + +The ACD extension may be sent containing either the 7 most significant bits +(B = true) or the 7 least significant bits (B = false) of the sequence index. + +Key-frames MUST be populated with the ACD extension, and those MUST use B = true +indicating only the 7 most significant bits are transmitted. + +The sender may choose any arbitrary starting point. The biggest reason to not +always start with (B = true, seq index = 0) is that with frequent/periodic +keyframes you might end up always sampling the same small subset of image +locations over and over. + +If B = false and the LSB seq index + number of samples exceeds the capacity of +the 7-bit field (i.e. > 0x7F), then the most significant bits of the 14 bit +sequence counter should be considered to be implicitly incremented by the +overflow. + +Delta-frames may be encoded as “droppable” or “non-droppable”. Consider for +example temporal layering using the +[L1T3](https://www.w3.org/TR/webrtc-svc/#L1T3*) mode. In that scenario, +key-frames and all T0 frames are non-droppable, while all T1 and T2 frames are +droppable. + +For non-droppable frames, B MAY be set to true even though there is often little +utility for it. +For droppable frames B MUST NOT be set to true, since a receiver could otherwise +easily end up out of sync with the sender. + +A receiver must store a state containing the last sequence index used. If an ACD +extension is receiver with B = false but the LSB does not match the last known +sequence index state, this indicates that an instrumented frame has been +dropped. The receiver should recover from this by incrementing the last known +sequence index until the 7 least significant bits match. + +Because of this, the sender MUST send ACD messages on non-droppable frames such +that the delta between their sequence indexing (from the last sample of the +previous packet to the first of the next) indexing does not exceed 0x7F. A +synchronization message may be used for this purpose if there is no wish to +instrument the non-droppable frame. + +It is not required to add the ACD extension to every frame. Indeed, for +performance reasons it may be reasonable to only instrument a small subset of +frames, for example using only one frame per second. + +Additionally, when encoding a structure that has independent decode targets +(e.g. L3T3_KEY) - the sender should generate an independent stream ACD sequence +per target resolution so that a receiver can validate the state of the +sub-stream they receive. + +// TODO: Add concrete examples. + +### Sample Selection + +As mentioned above, a Halton Sequence is used to generate sampling coordinates. +Base 2 is used for selecting the rows, and base 3 is used for selecting columns. + +Each sample in the ACD extension represents a single image sample, meaning it +belongs to a single channel rather than e.g. being an RGB pixel. + +The initial version of the ACD extension supports only the I420 chroma +subsampling format. When determining which plane a location belongs to, it is +easiest to visualize it as the chroma planes being “stacked” to the side of the +luma plane: + + +------+---+ + | | U | + + Y +---+ + | | V | + +------+---+ + +In pseudo code: +``` + row = GetHaltonSequence(seq_index, /*base=*/2) * image_height; + col = GetHaltonSequence(seq_index, /*base=*/3) * image_width * 1.5; + + if (col < image_width) { + HandleSample(Y_PLANE, row, col); + } else if (row < image_height / 2) { + HandleSample(U_PLANE, row, col - image_width); + } else { + HandleSample(V_PLANE, row - (image_height / 2), col - image_width); + } + + seq_index++; +``` +Support for other layout types may be added in later versions of this extension. + +Note that the image dimensions are not explicitly a part of the ACD extension - +that has to be inferred from the raw image itself. + +### Sample Filtering + +As mentioned above, when filtering a sample we create a weighted average around +the desired location. Only samples in the same plane are considered. The +weighting consists of a 2D Gaussian centered on the desired location, with the +standard deviation specified in the ACD extension header. + +If the standard deviation is specified as 0.0 - we consider only a singular +sample. Otherwise, we first determine a cutoff distance below which the weights +are considered too small to matter. For now, we have set the weight cutoff to +0.2 - meaning the maximum distance from the center sample we need to consider is +max_d = ceil(sqrt(-2.0 * ln(0.2) * stddev^2) - 1. + +Any samples outside the plane are considered to have weight 0. + +In pseudo-code, that means we get the following: +``` + sample_sum = 0; + weight_sum = 0; + for (y = max(0, row - max_d) to min(plane_height, row + max_d) { + for (x = max(0, col - max_d) to min(plane_width, col + max_d) { + weight = e^(-1 * ((y - row)^2 + (x - col)^2) / (2 * stddev^2)); + sample_sum += SampleAt(x, y) * weight; + weight_sum += weight; + } + } + filtered_sample = sample_sum / weight_sum; +``` +### Receive Side Considerations + +When a frame has been decoded and an ACD message is present, the receiver +performs the following steps: + +* Update the sequence index so that it is consistent with the ACD message. +* Calculate the sample positions from the Halton sequence. +* Filter each sample of the decoded image using the standard deviation provided + in the ACD message. + +We then need to compare the actual samples present in the ACD message and the +samples generated from the locally decoded frame, and take the allowed error +into account: + +``` +for (i = 0 to num_samples) { + // Allowed error from ACD message, depending on which plane sample i is in. + allowed_error = SampleType(i) == Y_PLANE ? Y_ERR : UV_ERR; + delta_i = max(0, abs(RemoteSample(i) - LocalSample(i)) - allowed_error); +} +``` + +It is then up to the receiver how to interpret these deltas. A suggested method +is to calculate a “corruption score” by calculating sum(delta(i)^2), where +delta(i) is the delta for i:th sample in the message, and then scaling and +capping that result to a maximum of 1.0. By squaring the sample, we make sure +that even singular samples that are way outside their expected values cause a +noticeable shift in the score. Another possible way is to calculate the distance +and cap it using a sigmoid function. + +This extension message format does not make recommendations about what a +receiver should do with the corruption scores, but some possibilities are: + +* Expose it as a statistics connected to the video receive stream. Let the + application decide what to do with the information. +* Let the WebRTC application use a corruption signal to take proactive measures. + E.g. request a key-frame in order to recover, or try to switch to another + codec type or implementation. + +### Determining Filter Settings & Error Thresholds + +It is up to the sender to estimate how large the filter kernel and the allowed +error thresholds should be. + +One method to do this is to analyze example outputs from different encoders and +map the average frame QP to suitable settings. There will of course have to be +different such mapping for e.g. AV1 compared to VP8 - but it’s also possible to +get “tighter” values with knowledge of the exact implementation used. E.g. a +mapping designed just for libaom encoder version X running with speed setting Y. + +Another method is to use the actual reconstructor state from the encoder. That +of course means the encoder has to expose that state, which is not common. +A benefit of doing it that way is that the filter size and allowed error can be +very small (really only post-processing could introduce distortions in that +scenario). A drawback is if the reconstructed state already contains corruption +due to an encoder bug - then we would not be able to detect that corruption at +all. + +There are also possibly more accurate but probably much more costly alternatives +as well, such as training an ML model to determine the settings based on both +the content of the source frame and any metadata present in the encoded +bitstream. + +Regardless of method, the implementation at the send side SHOULD strive to set +the filter size and error thresholds such that 99.5% of filtered samples end up +with a delta <= the error threshold for that plane, based on a representative +set of test clips and bandwidth constraints. + +Notes: The extension must not be present in more than 1 packet per video frame. diff --git a/docs/native-code/rtp-hdrext/video-layers-allocation00/README.md b/docs/native-code/rtp-hdrext/video-layers-allocation00/README.md index c4454d8ee1..cfa8c4cb0d 100644 --- a/docs/native-code/rtp-hdrext/video-layers-allocation00/README.md +++ b/docs/native-code/rtp-hdrext/video-layers-allocation00/README.md @@ -22,6 +22,10 @@ rtp stream (SVC), or independent spatial layers sent on multiple rtp streams ## RTP header extension format +Note: when including the optional width, height and maximum framerate +fields, the total data length of the extension can exceed 16 bytes +and is sent as a two-byte header extension [1] + ### Data layout ``` @@ -68,7 +72,7 @@ alignment. layers. Values are stored in ascending order of spatial id. Zero-padded to byte alignment. -Target bitrate in kbps. Values are stored using leb128 encoding [1]. One value per +Target bitrate in kbps. Values are stored using leb128 encoding [2]. One value per temporal layer. Values are stored in (RTP stream id, spatial id, temporal id) ascending order. All bitrates are total required bitrate to receive the corresponding layer, i.e. in simulcast mode they include only corresponding @@ -78,9 +82,11 @@ temporal layers are also included. Resolution and framerate. Optional. Presence is inferred from the rtp header extension size. Encoded (width - 1), 16-bit, (height - 1), 16-bit, max frame rate 8-bit per spatial layer per RTP stream. Values are stored in (RTP stream -id, spatial id) ascending order. +id, spatial id) ascending order. Only sent when the resolution differs from the +last values, the framerate changed by more than 5fps and on key frames. An empty layer allocation (i.e nothing sent on ssrc) is encoded as special case with a single 0 byte. -[1] https://aomediacodec.github.io/av1-spec/#leb128 +[1] https://www.rfc-editor.org/rfc/rfc8285#section-4.3 +[2] https://aomediacodec.github.io/av1-spec/#leb128 diff --git a/docs/native-code/sdp-ext/fmtp-x-google-per-layer-pli.md b/docs/native-code/sdp-ext/fmtp-x-google-per-layer-pli.md new file mode 100644 index 0000000000..6823d6501d --- /dev/null +++ b/docs/native-code/sdp-ext/fmtp-x-google-per-layer-pli.md @@ -0,0 +1,36 @@ +# x-google-per-layer-pli FMTP parameter + +The x-google-per-layer-pli FMTP parameter is a format specific parameter +that can be added to a remote description in the `a=fmtp:` line: + a=fmtp:96 x-google-per-layer-pli=1 + +When using simulcast with more than a single SSRC, it will change how the +simulcast encoder reacts to Picture Loss Indication (PLI) and Full Intra +Request (FIR) RTCP feedback. + +When the parameter value is 1, a PLI requests the generation of a key frame +for the spatial layer associated with the SSRC of the media source and a +FIR does the same for the SSRC value of the media sender. + +When the value is 0 or the parameter is missing, a keyframe is generated +on all spatial layers for backward compability. + +## Experimentation + +This parameter does allow for large-scale A/B testing and opt-in to the +new behavior. For multiparty calls enabling it should reduce the number of +keyframes sent by the client and number of key frames received by the +receivers which results in a better bandwidth utilization. + +This parameter is experimental and may be removed again in the future. + +## IANA considerations + +Since the current behavior of reacting to a PLI for a specific SSRC with +key frames on all spatial layers can be considered an implementation bug +this parameter is not registered with the IANA. + +If experimentation shows that the current behavior is better for some +codecs like VP8 which can share encoding parameters with synchronous +keyframes a standardized variant of this parameter shall be registered +with the IANA. diff --git a/examples/BUILD.gn b/examples/BUILD.gn index 6ae2b71d85..69022724e4 100644 --- a/examples/BUILD.gn +++ b/examples/BUILD.gn @@ -11,6 +11,7 @@ import("../webrtc.gni") if (is_android) { import("//build/config/android/config.gni") import("//build/config/android/rules.gni") + import("//third_party/jni_zero/jni_zero.gni") } else if (is_mac) { import("//build/config/mac/rules.gni") } else if (is_ios) { @@ -41,10 +42,6 @@ group("examples") { } } - if (!build_with_chromium) { - deps += [ ":stun_prober" ] - } - if (is_ios || (is_mac && target_cpu != "x86")) { deps += [ ":AppRTCMobile" ] } @@ -59,10 +56,6 @@ group("examples") { deps += [ ":peerconnection_client" ] } } - - if (is_android || is_win) { - deps += [ ":webrtc_unity_plugin" ] - } } rtc_library("read_auth_file") { @@ -74,8 +67,8 @@ rtc_library("read_auth_file") { deps = [ "../api:array_view", "../rtc_base:stringutils", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ] } if (rtc_include_tests) { @@ -158,6 +151,7 @@ if (is_android) { "../sdk/android:video_api_java", "../sdk/android:video_java", "androidapp/third_party/autobanh:autobanh_java", + "//build/android:build_java", "//third_party/androidx:androidx_annotation_annotation_java", ] } @@ -488,16 +482,17 @@ if (is_ios || (is_mac && target_cpu != "x86")) { ] deps = [ + "../api:enable_media", "../api:libjingle_peerconnection_api", "../api:scoped_refptr", "../api:sequence_checker", + "../api/audio:audio_processing", + "../api/audio:builtin_audio_processing_builder", "../api/audio_codecs:builtin_audio_decoder_factory", "../api/audio_codecs:builtin_audio_encoder_factory", "../api/rtc_event_log:rtc_event_log_factory", "../api/task_queue:default_task_queue_factory", - "../media:rtc_audio_video", "../modules/audio_processing", - "../modules/audio_processing:api", "../pc:libjingle_peerconnection", "../rtc_base/synchronization:mutex", "../sdk:base_objc", @@ -635,7 +630,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { deps = [ ":AppRTCMobile_lib", ":apprtc_signaling", - "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../sdk:mediaconstraints_objc", "../sdk:peerconnectionfactory_base_objc", "../sdk:videocapture_objc", @@ -685,39 +680,55 @@ if (is_linux || is_chromeos || is_win) { ] deps = [ + "../api:array_view", + "../api:async_dns_resolver", "../api:audio_options_api", + "../api:create_frame_generator", "../api:create_peerconnection_factory", + "../api:enable_media", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:media_stream_interface", + "../api:rtc_error", "../api:rtp_sender_interface", "../api:scoped_refptr", + "../api/audio:audio_device", "../api/audio:audio_mixer_api", + "../api/audio:audio_processing", "../api/audio_codecs:audio_codecs_api", + "../api/task_queue", + "../api/task_queue:default_task_queue_factory", "../api/task_queue:pending_task_safety_flag", "../api/units:time_delta", "../api/video:video_frame", "../api/video:video_rtp_headers", "../api/video_codecs:video_codecs_api", "../media:media_channel", - "../media:rtc_media_base", - "../p2p:rtc_p2p", + "../media:video_common", + "../p2p:connection", + "../p2p:port_allocator", "../pc:video_track_source", + "../rtc_base:async_dns_resolver", + "../rtc_base:buffer", "../rtc_base:checks", "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:net_helpers", "../rtc_base:refcount", "../rtc_base:rtc_certificate_generator", - "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:stringutils", "../rtc_base:threading", "../rtc_base/third_party/sigslot", + "../system_wrappers", "../system_wrappers:field_trial", "../test:field_trial", + "../test:frame_generator_capturer", "../test:platform_video_capturer", "../test:rtp_test_utils", + "../test:test_video_capturer", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/jsoncpp", ] if (is_win) { sources += [ @@ -728,7 +739,6 @@ if (is_linux || is_chromeos || is_win) { ] configs += [ "//build/config/win:windowed" ] deps += [ - "../media:rtc_media_base", "../rtc_base:win32", "../rtc_base:win32_socket_init", ] @@ -768,7 +778,6 @@ if (is_linux || is_chromeos || is_win) { "../media:rtc_audio_video", "../modules/audio_device", "../modules/audio_processing", - "../modules/audio_processing:api", "../modules/video_capture:video_capture_module", "../pc:libjingle_peerconnection", "../rtc_base:rtc_json", @@ -792,6 +801,8 @@ if (is_linux || is_chromeos || is_win) { ] deps = [ "../rtc_base:checks", + "../rtc_base:ip_address", + "../rtc_base:net_helpers", "../rtc_base:stringutils", "../system_wrappers:field_trial", "../test:field_trial", @@ -805,8 +816,9 @@ if (is_linux || is_chromeos || is_win) { sources = [ "turnserver/turnserver_main.cc" ] deps = [ ":read_auth_file", + "../p2p:basic_packet_socket_factory", "../p2p:p2p_server_utils", - "../p2p:rtc_p2p", + "../p2p:port_interface", "../pc:rtc_pc", "../rtc_base:async_udp_socket", "../rtc_base:ip_address", @@ -821,7 +833,6 @@ if (is_linux || is_chromeos || is_win) { sources = [ "stunserver/stunserver_main.cc" ] deps = [ "../p2p:p2p_server_utils", - "../p2p:rtc_p2p", "../pc:rtc_pc", "../rtc_base:async_udp_socket", "../rtc_base:socket_address", @@ -831,94 +842,7 @@ if (is_linux || is_chromeos || is_win) { } } -if (is_win || is_android) { - rtc_shared_library("webrtc_unity_plugin") { - testonly = true - sources = [ - "unityplugin/simple_peer_connection.cc", - "unityplugin/simple_peer_connection.h", - "unityplugin/unity_plugin_apis.cc", - "unityplugin/unity_plugin_apis.h", - "unityplugin/video_observer.cc", - "unityplugin/video_observer.h", - ] - - if (is_android) { - sources += [ - "unityplugin/class_reference_holder.cc", - "unityplugin/class_reference_holder.h", - "unityplugin/jni_onload.cc", - ] - suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ] - } - - if (is_win) { - configs += [ "//build/config/win:windowed" ] - } - deps = [ - "../api:create_peerconnection_factory", - "../api:libjingle_peerconnection_api", - "../api:media_stream_interface", - "../api/audio_codecs:builtin_audio_decoder_factory", - "../api/audio_codecs:builtin_audio_encoder_factory", - "../api/video:video_frame", - "../api/video:video_rtp_headers", - "../media:rtc_audio_video", - "../media:rtc_internal_video_codecs", - "../media:rtc_media", - "../media:rtc_media_base", - "../modules/audio_device", - "../modules/audio_processing", - "../modules/audio_processing:api", - "../modules/video_capture:video_capture_module", - "../pc:libjingle_peerconnection", - "../pc:video_track_source", - "../rtc_base:ssl", - "../test:platform_video_capturer", - "../test:video_test_common", - "//third_party/abseil-cpp/absl/memory", - ] - if (is_android) { - deps += [ - "../modules/utility", - "../sdk/android:libjingle_peerconnection_jni", - "../sdk/android:native_api_jni", - ] - } - } -} - if (is_android) { - rtc_android_library("webrtc_unity_java") { - sources = [ "unityplugin/java/src/org/webrtc/UnityUtility.java" ] - deps = [ - "../rtc_base:base_java", - "../sdk/android:camera_java", - "../sdk/android:libjingle_peerconnection_java", - "../sdk/android:peerconnection_java", - "../sdk/android:video_api_java", - "../sdk/android:video_java", - "//third_party/androidx:androidx_annotation_annotation_java", - ] - } - - # TODO(https://bugs.webrtc.org/15095) - Fix or remove this target. - #dist_jar("libwebrtc_unity") { - # _target_dir_name = get_label_info(":$target_name", "dir") - # output = "${root_out_dir}/lib.java${_target_dir_name}/${target_name}.jar" - # direct_deps_only = false - # use_interface_jars = false - # use_unprocessed_jars = false - # requires_android = true - # deps = [ - # ":webrtc_unity_java", - # "../rtc_base:base_java", - # "../sdk/android:libjingle_peerconnection_java", - # "../sdk/android:libjingle_peerconnection_metrics_default_java", - # "//third_party/androidx:androidx_annotation_annotation_java", - # ] - #} - robolectric_binary("android_examples_junit_tests") { sources = [ "androidjunit/src/org/appspot/apprtc/BluetoothManagerTest.java", @@ -932,32 +856,5 @@ if (is_android) { "//third_party/androidx:androidx_test_core_java", "//third_party/google-truth:google_truth_java", ] - - additional_jar_files = [ [ - "../sdk/android/tests/resources/robolectric.properties", - "robolectric.properties", - ] ] - } -} - -if (!build_with_chromium) { - # Doesn't build within Chrome on Win. - rtc_executable("stun_prober") { - testonly = true - sources = [ "stunprober/main.cc" ] - deps = [ - "../p2p:libstunprober", - "../p2p:rtc_p2p", - "../rtc_base:checks", - "../rtc_base:logging", - "../rtc_base:network", - "../rtc_base:socket_address", - "../rtc_base:ssl", - "../rtc_base:threading", - "../rtc_base:timeutils", - "../test:scoped_key_value_config", - "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/flags:parse", - ] } } diff --git a/examples/DEPS b/examples/DEPS index 114cda384b..8d512c2146 100644 --- a/examples/DEPS +++ b/examples/DEPS @@ -1,5 +1,6 @@ include_rules = [ "+common_video", + "+json", "+logging/rtc_event_log/rtc_event_log_factory.h", "+media", "+modules/audio_device", diff --git a/examples/androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java b/examples/androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java index e9077d8bd6..1415f4724f 100644 --- a/examples/androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java +++ b/examples/androidapp/src/org/appspot/apprtc/AppRTCBluetoothManager.java @@ -111,7 +111,7 @@ public void onServiceConnected(int profile, BluetoothProfile proxy) { } @Override - /** Notifies the client when the proxy object has been disconnected from the service. */ + // Notifies the client when the proxy object has been disconnected from the service. public void onServiceDisconnected(int profile) { if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) { return; diff --git a/examples/androidapp/src/org/appspot/apprtc/CallActivity.java b/examples/androidapp/src/org/appspot/apprtc/CallActivity.java index eb5ee8289e..3a4452a87b 100644 --- a/examples/androidapp/src/org/appspot/apprtc/CallActivity.java +++ b/examples/androidapp/src/org/appspot/apprtc/CallActivity.java @@ -360,7 +360,7 @@ public void onClick(View view) { // For command line execution run connection for and exit. if (commandLineRun && runTimeMs > 0) { - (new Handler()).postDelayed(new Runnable() { + new Handler().postDelayed(new Runnable() { @Override public void run() { disconnect(); diff --git a/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java b/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java index 7206c88498..8de3d60243 100644 --- a/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java +++ b/examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java @@ -360,7 +360,7 @@ private void connectToRoom(String roomId, boolean commandLineRun, boolean loopba // roomId is random for loopback. if (loopback) { - roomId = Integer.toString((new Random()).nextInt(100000000)); + roomId = Integer.toString(new Random().nextInt(100000000)); } String roomUrl = sharedPref.getString( diff --git a/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java b/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java index 398b0c3b5e..fd838119ff 100644 --- a/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java +++ b/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java @@ -1269,7 +1269,7 @@ public void onIceConnectionReceivingChange(boolean receiving) { @Override public void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) { - Log.d(TAG, "Selected candidate pair changed because: " + event); + Log.d(TAG, "Selected candidate pair changed because: " + event.reason); } @Override diff --git a/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java b/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java index 6a0f235528..b00589439b 100644 --- a/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java +++ b/examples/androidapp/src/org/appspot/apprtc/RoomParametersFetcher.java @@ -98,7 +98,7 @@ private void roomHttpResponseParse(String response) { String clientId = roomJson.getString("client_id"); String wssUrl = roomJson.getString("wss_url"); String wssPostUrl = roomJson.getString("wss_post_url"); - boolean initiator = (roomJson.getBoolean("is_initiator")); + boolean initiator = roomJson.getBoolean("is_initiator"); if (!initiator) { iceCandidates = new ArrayList<>(); String messagesString = roomJson.getString("messages"); diff --git a/examples/androidjunit/src/org/appspot/apprtc/BluetoothManagerTest.java b/examples/androidjunit/src/org/appspot/apprtc/BluetoothManagerTest.java index d7c190518c..dcc79b1ec2 100644 --- a/examples/androidjunit/src/org/appspot/apprtc/BluetoothManagerTest.java +++ b/examples/androidjunit/src/org/appspot/apprtc/BluetoothManagerTest.java @@ -148,9 +148,9 @@ public void testBluetoothBroadcastReceiversAreRegistered() { @Test public void testBluetoothDefaultStartStopStates() { bluetoothManager.start(); - assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE); + assertEquals(State.HEADSET_UNAVAILABLE, bluetoothManager.getState()); bluetoothManager.stop(); - assertEquals(bluetoothManager.getState(), State.UNINITIALIZED); + assertEquals(State.UNINITIALIZED, bluetoothManager.getState()); } // Verify correct state after receiving BluetoothServiceListener.onServiceConnected() @@ -158,10 +158,10 @@ public void testBluetoothDefaultStartStopStates() { @Test public void testBluetoothServiceListenerConnectedWithNoHeadset() { bluetoothManager.start(); - assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE); + assertEquals(State.HEADSET_UNAVAILABLE, bluetoothManager.getState()); simulateBluetoothServiceConnectedWithNoConnectedHeadset(); verify(mockedAppRtcAudioManager, times(1)).updateAudioDeviceState(); - assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE); + assertEquals(State.HEADSET_UNAVAILABLE, bluetoothManager.getState()); } // Verify correct state after receiving BluetoothServiceListener.onServiceConnected() @@ -170,20 +170,20 @@ public void testBluetoothServiceListenerConnectedWithNoHeadset() { @Test public void testBluetoothServiceListenerConnectedWithHeadset() { bluetoothManager.start(); - assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE); + assertEquals(State.HEADSET_UNAVAILABLE, bluetoothManager.getState()); simulateBluetoothServiceConnectedWithConnectedHeadset(); verify(mockedAppRtcAudioManager, times(1)).updateAudioDeviceState(); - assertEquals(bluetoothManager.getState(), State.HEADSET_AVAILABLE); + assertEquals(State.HEADSET_AVAILABLE, bluetoothManager.getState()); } // Verify correct state after receiving BluetoothProfile.ServiceListener.onServiceDisconnected(). @Test public void testBluetoothServiceListenerDisconnected() { bluetoothManager.start(); - assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE); + assertEquals(State.HEADSET_UNAVAILABLE, bluetoothManager.getState()); simulateBluetoothServiceDisconnected(); verify(mockedAppRtcAudioManager, times(1)).updateAudioDeviceState(); - assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE); + assertEquals(State.HEADSET_UNAVAILABLE, bluetoothManager.getState()); } // Verify correct state after BluetoothServiceListener.onServiceConnected() and @@ -193,11 +193,11 @@ public void testBluetoothServiceListenerDisconnected() { @Test public void testBluetoothHeadsetConnected() { bluetoothManager.start(); - assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE); + assertEquals(State.HEADSET_UNAVAILABLE, bluetoothManager.getState()); simulateBluetoothServiceConnectedWithConnectedHeadset(); simulateBluetoothHeadsetConnected(); verify(mockedAppRtcAudioManager, times(2)).updateAudioDeviceState(); - assertEquals(bluetoothManager.getState(), State.HEADSET_AVAILABLE); + assertEquals(State.HEADSET_AVAILABLE, bluetoothManager.getState()); } // Verify correct state sequence for a case when a BT headset is available, @@ -205,18 +205,18 @@ public void testBluetoothHeadsetConnected() { @Test public void testBluetoothScoAudioStartAndStop() { bluetoothManager.start(); - assertEquals(bluetoothManager.getState(), State.HEADSET_UNAVAILABLE); + assertEquals(State.HEADSET_UNAVAILABLE, bluetoothManager.getState()); simulateBluetoothServiceConnectedWithConnectedHeadset(); - assertEquals(bluetoothManager.getState(), State.HEADSET_AVAILABLE); + assertEquals(State.HEADSET_AVAILABLE, bluetoothManager.getState()); bluetoothManager.startScoAudio(); - assertEquals(bluetoothManager.getState(), State.SCO_CONNECTING); + assertEquals(State.SCO_CONNECTING, bluetoothManager.getState()); simulateBluetoothScoConnectionConnected(); - assertEquals(bluetoothManager.getState(), State.SCO_CONNECTED); + assertEquals(State.SCO_CONNECTED, bluetoothManager.getState()); bluetoothManager.stopScoAudio(); simulateBluetoothScoConnectionDisconnected(); - assertEquals(bluetoothManager.getState(), State.SCO_DISCONNECTING); + assertEquals(State.SCO_DISCONNECTING,bluetoothManager.getState()); bluetoothManager.stop(); - assertEquals(bluetoothManager.getState(), State.UNINITIALIZED); + assertEquals(State.UNINITIALIZED, bluetoothManager.getState()); verify(mockedAppRtcAudioManager, times(3)).updateAudioDeviceState(); } diff --git a/examples/androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java b/examples/androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java index ce550b35e4..8bc1de3686 100644 --- a/examples/androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java +++ b/examples/androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java @@ -17,6 +17,7 @@ import org.junit.After; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; @@ -96,7 +97,9 @@ public void testConnectIPv4() { verify(clientEvents, timeout(CONNECT_TIMEOUT)).onTCPConnected(false); } + // TODO:b/389829614 - Below test is failing. @Test + @Ignore public void testConnectIPv6() { setUpIPv6Server(); try { diff --git a/examples/androidnativeapi/BUILD.gn b/examples/androidnativeapi/BUILD.gn index 9aba1fbd92..65ab875893 100644 --- a/examples/androidnativeapi/BUILD.gn +++ b/examples/androidnativeapi/BUILD.gn @@ -1,6 +1,7 @@ import("//webrtc.gni") if (is_android) { + import("//third_party/jni_zero/jni_zero.gni") rtc_android_apk("androidnativeapi") { testonly = true apk_name = "androidnativeapi" @@ -46,16 +47,17 @@ if (is_android) { deps = [ ":generated_jni", + "../../api:enable_media_with_defaults", "../../api:scoped_refptr", "../../api:sequence_checker", "../../rtc_base:ssl", + "../../rtc_base:ssl_adapter", "../../rtc_base/synchronization:mutex", "//api:libjingle_peerconnection_api", "//api/rtc_event_log:rtc_event_log_factory", "//api/task_queue:default_task_queue_factory", "//media:rtc_audio_video", "//media:rtc_internal_video_codecs", - "//media:rtc_media_engine_defaults", "//modules/utility", "//pc:libjingle_peerconnection", "//sdk/android:native_api_base", diff --git a/examples/androidnativeapi/jni/android_call_client.cc b/examples/androidnativeapi/jni/android_call_client.cc index 2713a563cd..b61af374d4 100644 --- a/examples/androidnativeapi/jni/android_call_client.cc +++ b/examples/androidnativeapi/jni/android_call_client.cc @@ -13,6 +13,7 @@ #include #include +#include "api/enable_media_with_defaults.h" #include "api/peer_connection_interface.h" #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/task_queue/default_task_queue_factory.h" @@ -20,7 +21,6 @@ #include "media/engine/internal_decoder_factory.h" #include "media/engine/internal_encoder_factory.h" #include "media/engine/webrtc_media_engine.h" -#include "media/engine/webrtc_media_engine_defaults.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/native_api/video/wrapper.h" @@ -32,8 +32,8 @@ class AndroidCallClient::PCObserver : public webrtc::PeerConnectionObserver { void OnSignalingChange( webrtc::PeerConnectionInterface::SignalingState new_state) override; - void OnDataChannel( - rtc::scoped_refptr data_channel) override; + void OnDataChannel(webrtc::scoped_refptr + data_channel) override; void OnRenegotiationNeeded() override; void OnIceConnectionChange( webrtc::PeerConnectionInterface::IceConnectionState new_state) override; @@ -50,13 +50,13 @@ namespace { class CreateOfferObserver : public webrtc::CreateSessionDescriptionObserver { public: explicit CreateOfferObserver( - rtc::scoped_refptr pc); + webrtc::scoped_refptr pc); void OnSuccess(webrtc::SessionDescriptionInterface* desc) override; void OnFailure(webrtc::RTCError error) override; private: - const rtc::scoped_refptr pc_; + const webrtc::scoped_refptr pc_; }; class SetRemoteSessionDescriptionObserver @@ -137,15 +137,15 @@ AndroidCallClient::GetJavaVideoCapturerObserver(JNIEnv* env) { } void AndroidCallClient::CreatePeerConnectionFactory() { - network_thread_ = rtc::Thread::CreateWithSocketServer(); + network_thread_ = webrtc::Thread::CreateWithSocketServer(); network_thread_->SetName("network_thread", nullptr); RTC_CHECK(network_thread_->Start()) << "Failed to start thread"; - worker_thread_ = rtc::Thread::Create(); + worker_thread_ = webrtc::Thread::Create(); worker_thread_->SetName("worker_thread", nullptr); RTC_CHECK(worker_thread_->Start()) << "Failed to start thread"; - signaling_thread_ = rtc::Thread::Create(); + signaling_thread_ = webrtc::Thread::Create(); signaling_thread_->SetName("signaling_thread", nullptr); RTC_CHECK(signaling_thread_->Start()) << "Failed to start thread"; @@ -154,19 +154,13 @@ void AndroidCallClient::CreatePeerConnectionFactory() { pcf_deps.worker_thread = worker_thread_.get(); pcf_deps.signaling_thread = signaling_thread_.get(); pcf_deps.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(); - pcf_deps.call_factory = webrtc::CreateCallFactory(); - pcf_deps.event_log_factory = std::make_unique( - pcf_deps.task_queue_factory.get()); + pcf_deps.event_log_factory = std::make_unique(); - cricket::MediaEngineDependencies media_deps; - media_deps.task_queue_factory = pcf_deps.task_queue_factory.get(); - media_deps.video_encoder_factory = + pcf_deps.video_encoder_factory = std::make_unique(); - media_deps.video_decoder_factory = + pcf_deps.video_decoder_factory = std::make_unique(); - webrtc::SetMediaEngineDefaults(&media_deps); - pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps)); - RTC_LOG(LS_INFO) << "Media engine created: " << pcf_deps.media_engine.get(); + EnableMediaWithDefaults(pcf_deps); pcf_ = CreateModularPeerConnectionFactory(std::move(pcf_deps)); RTC_LOG(LS_INFO) << "PeerConnectionFactory created: " << pcf_.get(); @@ -185,20 +179,21 @@ void AndroidCallClient::CreatePeerConnection() { RTC_LOG(LS_INFO) << "PeerConnection created: " << pc_.get(); - rtc::scoped_refptr local_video_track = + webrtc::scoped_refptr local_video_track = pcf_->CreateVideoTrack(video_source_, "video"); - local_video_track->AddOrUpdateSink(local_sink_.get(), rtc::VideoSinkWants()); + local_video_track->AddOrUpdateSink(local_sink_.get(), + webrtc::VideoSinkWants()); pc_->AddTransceiver(local_video_track); RTC_LOG(LS_INFO) << "Local video sink set up: " << local_video_track.get(); - for (const rtc::scoped_refptr& tranceiver : - pc_->GetTransceivers()) { - rtc::scoped_refptr track = + for (const webrtc::scoped_refptr& + tranceiver : pc_->GetTransceivers()) { + webrtc::scoped_refptr track = tranceiver->receiver()->track(); if (track && track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) { static_cast(track.get()) - ->AddOrUpdateSink(remote_sink_.get(), rtc::VideoSinkWants()); + ->AddOrUpdateSink(remote_sink_.get(), webrtc::VideoSinkWants()); RTC_LOG(LS_INFO) << "Remote video sink set up: " << track.get(); break; } @@ -207,7 +202,7 @@ void AndroidCallClient::CreatePeerConnection() { void AndroidCallClient::Connect() { webrtc::MutexLock lock(&pc_mutex_); - pc_->CreateOffer(rtc::make_ref_counted(pc_).get(), + pc_->CreateOffer(webrtc::make_ref_counted(pc_).get(), webrtc::PeerConnectionInterface::RTCOfferAnswerOptions()); } @@ -220,7 +215,7 @@ void AndroidCallClient::PCObserver::OnSignalingChange( } void AndroidCallClient::PCObserver::OnDataChannel( - rtc::scoped_refptr data_channel) { + webrtc::scoped_refptr data_channel) { RTC_LOG(LS_INFO) << "OnDataChannel"; } @@ -247,7 +242,7 @@ void AndroidCallClient::PCObserver::OnIceCandidate( } CreateOfferObserver::CreateOfferObserver( - rtc::scoped_refptr pc) + webrtc::scoped_refptr pc) : pc_(pc) {} void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) { @@ -257,14 +252,15 @@ void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) { // Ownership of desc was transferred to us, now we transfer it forward. pc_->SetLocalDescription( - rtc::make_ref_counted().get(), desc); + webrtc::make_ref_counted().get(), + desc); // Generate a fake answer. std::unique_ptr answer( webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp)); pc_->SetRemoteDescription( std::move(answer), - rtc::make_ref_counted()); + webrtc::make_ref_counted()); } void CreateOfferObserver::OnFailure(webrtc::RTCError error) { diff --git a/examples/androidnativeapi/jni/android_call_client.h b/examples/androidnativeapi/jni/android_call_client.h index c9153d09bd..d50c58b3d1 100644 --- a/examples/androidnativeapi/jni/android_call_client.h +++ b/examples/androidnativeapi/jni/android_call_client.h @@ -52,22 +52,24 @@ class AndroidCallClient { const std::unique_ptr pc_observer_; - rtc::scoped_refptr pcf_ + webrtc::scoped_refptr pcf_ RTC_GUARDED_BY(thread_checker_); - std::unique_ptr network_thread_ RTC_GUARDED_BY(thread_checker_); - std::unique_ptr worker_thread_ RTC_GUARDED_BY(thread_checker_); - std::unique_ptr signaling_thread_ + std::unique_ptr network_thread_ + RTC_GUARDED_BY(thread_checker_); + std::unique_ptr worker_thread_ + RTC_GUARDED_BY(thread_checker_); + std::unique_ptr signaling_thread_ RTC_GUARDED_BY(thread_checker_); - std::unique_ptr> local_sink_ + std::unique_ptr> local_sink_ RTC_GUARDED_BY(thread_checker_); - std::unique_ptr> remote_sink_ + std::unique_ptr> remote_sink_ RTC_GUARDED_BY(thread_checker_); - rtc::scoped_refptr video_source_ + webrtc::scoped_refptr video_source_ RTC_GUARDED_BY(thread_checker_); webrtc::Mutex pc_mutex_; - rtc::scoped_refptr pc_ + webrtc::scoped_refptr pc_ RTC_GUARDED_BY(pc_mutex_); }; diff --git a/examples/androidnativeapi/jni/onload.cc b/examples/androidnativeapi/jni/onload.cc index 6ea5275d2a..e85bc629ef 100644 --- a/examples/androidnativeapi/jni/onload.cc +++ b/examples/androidnativeapi/jni/onload.cc @@ -19,12 +19,12 @@ namespace webrtc_examples { extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) { webrtc::InitAndroid(jvm); webrtc::JVM::Initialize(jvm); - RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()"; + RTC_CHECK(webrtc::InitializeSSL()) << "Failed to InitializeSSL()"; return JNI_VERSION_1_6; } extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) { - RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()"; + RTC_CHECK(webrtc::CleanupSSL()) << "Failed to CleanupSSL()"; } } // namespace webrtc_examples diff --git a/examples/androidvoip/BUILD.gn b/examples/androidvoip/BUILD.gn index b4d53f81be..05bccd7f77 100644 --- a/examples/androidvoip/BUILD.gn +++ b/examples/androidvoip/BUILD.gn @@ -9,6 +9,7 @@ import("//webrtc.gni") if (is_android) { + import("//third_party/jni_zero/jni_zero.gni") rtc_android_apk("androidvoip") { testonly = true apk_name = "androidvoip" @@ -55,22 +56,24 @@ if (is_android) { deps = [ ":generated_jni", + "../../api/audio:builtin_audio_processing_builder", "../../rtc_base:async_packet_socket", "../../rtc_base:async_udp_socket", "../../rtc_base:logging", "../../rtc_base:network", "../../rtc_base:socket_address", "../../rtc_base:socket_server", - "../../rtc_base:ssl", + "../../rtc_base:ssl_adapter", "../../rtc_base:threading", "//api:transport_api", "//api/audio_codecs:audio_codecs_api", "//api/audio_codecs:builtin_audio_decoder_factory", "//api/audio_codecs:builtin_audio_encoder_factory", "//api/task_queue:default_task_queue_factory", + "//api/units:time_delta", "//api/voip:voip_api", "//api/voip:voip_engine_factory", - "//rtc_base/third_party/sigslot:sigslot", + "//rtc_base/network:received_packet", "//sdk/android:native_api_audio_device_module", "//sdk/android:native_api_base", "//sdk/android:native_api_jni", diff --git a/examples/androidvoip/DEPS b/examples/androidvoip/DEPS index edb714dd44..2431116806 100644 --- a/examples/androidvoip/DEPS +++ b/examples/androidvoip/DEPS @@ -1,3 +1,4 @@ include_rules = [ "+sdk/android/native_api", + "+third_party/jni_zero", ] diff --git a/examples/androidvoip/jni/android_voip_client.cc b/examples/androidvoip/jni/android_voip_client.cc index 92fad221d8..f15355d748 100644 --- a/examples/androidvoip/jni/android_voip_client.cc +++ b/examples/androidvoip/jni/android_voip_client.cc @@ -11,23 +11,25 @@ #include "examples/androidvoip/jni/android_voip_client.h" #include -#include +#include // no-presubmit-check #include #include #include -#include -#include #include #include #include "absl/memory/memory.h" +#include "api/audio/builtin_audio_processing_builder.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/task_queue/default_task_queue_factory.h" +#include "api/units/time_delta.h" +#include "api/voip/voip_base.h" #include "api/voip/voip_codec.h" #include "api/voip/voip_engine_factory.h" #include "api/voip/voip_network.h" +#include "api/voip/voip_statistics.h" #include "examples/androidvoip/generated_jni/VoipClient_jni.h" #include "rtc_base/logging.h" #include "rtc_base/network.h" @@ -52,29 +54,30 @@ namespace { // internally, it returns the default local address on a multi-homed // endpoint. Implementation copied from // BasicNetworkManager::QueryDefaultLocalAddress. -rtc::IPAddress QueryDefaultLocalAddress(int family) { +webrtc::IPAddress QueryDefaultLocalAddress(int family) { const char kPublicIPv4Host[] = "8.8.8.8"; const char kPublicIPv6Host[] = "2001:4860:4860::8888"; const int kPublicPort = 53; - std::unique_ptr thread = rtc::Thread::CreateWithSocketServer(); + std::unique_ptr thread = + webrtc::Thread::CreateWithSocketServer(); RTC_DCHECK(thread->socketserver() != nullptr); RTC_DCHECK(family == AF_INET || family == AF_INET6); - std::unique_ptr socket( + std::unique_ptr socket( thread->socketserver()->CreateSocket(family, SOCK_DGRAM)); if (!socket) { RTC_LOG_ERR(LS_ERROR) << "Socket creation failed"; - return rtc::IPAddress(); + return webrtc::IPAddress(); } auto host = family == AF_INET ? kPublicIPv4Host : kPublicIPv6Host; - if (socket->Connect(rtc::SocketAddress(host, kPublicPort)) < 0) { + if (socket->Connect(webrtc::SocketAddress(host, kPublicPort)) < 0) { if (socket->GetError() != ENETUNREACH && socket->GetError() != EHOSTUNREACH) { RTC_LOG(LS_INFO) << "Connect failed with " << socket->GetError(); } - return rtc::IPAddress(); + return webrtc::IPAddress(); } return socket->GetLocalAddress().ipaddr(); } @@ -108,8 +111,6 @@ int GetPayloadType(const std::string& codec_name) { return static_cast(PayloadType::kOpus); } else if (codec_name == "ISAC") { return static_cast(PayloadType::kIsac); - } else if (codec_name == "ILBC") { - return static_cast(PayloadType::kIlbc); } RTC_DCHECK_NOTREACHED(); @@ -122,14 +123,15 @@ namespace webrtc_examples { void AndroidVoipClient::Init( JNIEnv* env, - const webrtc::JavaParamRef& application_context) { + const jni_zero::JavaParamRef& application_context) { webrtc::VoipEngineConfig config; config.encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory(); config.decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory(); config.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(); config.audio_device_module = webrtc::CreateJavaAudioDeviceModule(env, application_context.obj()); - config.audio_processing = webrtc::AudioProcessingBuilder().Create(); + config.audio_processing_builder = + std::make_unique(); voip_thread_->Start(); @@ -166,8 +168,8 @@ AndroidVoipClient::~AndroidVoipClient() { AndroidVoipClient* AndroidVoipClient::Create( JNIEnv* env, - const webrtc::JavaParamRef& application_context, - const webrtc::JavaParamRef& j_voip_client) { + const jni_zero::JavaParamRef& application_context, + const jni_zero::JavaParamRef& j_voip_client) { // Using `new` to access a non-public constructor. auto voip_client = absl::WrapUnique(new AndroidVoipClient(env, j_voip_client)); @@ -182,21 +184,22 @@ void AndroidVoipClient::GetSupportedCodecs(JNIEnv* env) { for (const webrtc::AudioCodecSpec& spec : supported_codecs_) { names.push_back(spec.format.name); } - webrtc::ScopedJavaLocalRef (*convert_function)( + jni_zero::ScopedJavaLocalRef (*convert_function)( JNIEnv*, const std::string&) = &webrtc::NativeToJavaString; Java_VoipClient_onGetSupportedCodecsCompleted( - env_, j_voip_client_, NativeToJavaList(env_, names, convert_function)); + env_, j_voip_client_, + webrtc::NativeToJavaList(env_, names, convert_function)); } void AndroidVoipClient::GetLocalIPAddress(JNIEnv* env) { RUN_ON_VOIP_THREAD(GetLocalIPAddress, env); std::string local_ip_address; - rtc::IPAddress ipv4_address = QueryDefaultLocalAddress(AF_INET); + webrtc::IPAddress ipv4_address = QueryDefaultLocalAddress(AF_INET); if (!ipv4_address.IsNil()) { local_ip_address = ipv4_address.ToString(); } else { - rtc::IPAddress ipv6_address = QueryDefaultLocalAddress(AF_INET6); + webrtc::IPAddress ipv6_address = QueryDefaultLocalAddress(AF_INET6); if (!ipv6_address.IsNil()) { local_ip_address = ipv6_address.ToString(); } @@ -224,7 +227,7 @@ void AndroidVoipClient::SetEncoder(const std::string& encoder) { void AndroidVoipClient::SetEncoder( JNIEnv* env, - const webrtc::JavaParamRef& j_encoder_string) { + const jni_zero::JavaParamRef& j_encoder_string) { const std::string& chosen_encoder = webrtc::JavaToNativeString(env, j_encoder_string); voip_thread_->PostTask( @@ -253,7 +256,7 @@ void AndroidVoipClient::SetDecoders(const std::vector& decoders) { void AndroidVoipClient::SetDecoders( JNIEnv* env, - const webrtc::JavaParamRef& j_decoder_strings) { + const jni_zero::JavaParamRef& j_decoder_strings) { const std::vector& chosen_decoders = webrtc::JavaListToNativeVector( env, j_decoder_strings, &webrtc::JavaToNativeString); @@ -265,13 +268,13 @@ void AndroidVoipClient::SetLocalAddress(const std::string& ip_address, const int port_number) { RTC_DCHECK_RUN_ON(voip_thread_.get()); - rtp_local_address_ = rtc::SocketAddress(ip_address, port_number); - rtcp_local_address_ = rtc::SocketAddress(ip_address, port_number + 1); + rtp_local_address_ = webrtc::SocketAddress(ip_address, port_number); + rtcp_local_address_ = webrtc::SocketAddress(ip_address, port_number + 1); } void AndroidVoipClient::SetLocalAddress( JNIEnv* env, - const webrtc::JavaParamRef& j_ip_address_string, + const jni_zero::JavaParamRef& j_ip_address_string, jint j_port_number_int) { const std::string& ip_address = webrtc::JavaToNativeString(env, j_ip_address_string); @@ -284,13 +287,13 @@ void AndroidVoipClient::SetRemoteAddress(const std::string& ip_address, const int port_number) { RTC_DCHECK_RUN_ON(voip_thread_.get()); - rtp_remote_address_ = rtc::SocketAddress(ip_address, port_number); - rtcp_remote_address_ = rtc::SocketAddress(ip_address, port_number + 1); + rtp_remote_address_ = webrtc::SocketAddress(ip_address, port_number); + rtcp_remote_address_ = webrtc::SocketAddress(ip_address, port_number + 1); } void AndroidVoipClient::SetRemoteAddress( JNIEnv* env, - const webrtc::JavaParamRef& j_ip_address_string, + const jni_zero::JavaParamRef& j_ip_address_string, jint j_port_number_int) { const std::string& ip_address = webrtc::JavaToNativeString(env, j_ip_address_string); @@ -303,31 +306,59 @@ void AndroidVoipClient::StartSession(JNIEnv* env) { RUN_ON_VOIP_THREAD(StartSession, env); // CreateChannel guarantees to return valid channel id. - channel_ = voip_engine_->Base().CreateChannel(this, absl::nullopt); + channel_ = voip_engine_->Base().CreateChannel(this, std::nullopt); - rtp_socket_.reset(rtc::AsyncUDPSocket::Create(voip_thread_->socketserver(), - rtp_local_address_)); + rtp_socket_.reset(webrtc::AsyncUDPSocket::Create(voip_thread_->socketserver(), + rtp_local_address_)); if (!rtp_socket_) { RTC_LOG_ERR(LS_ERROR) << "Socket creation failed"; Java_VoipClient_onStartSessionCompleted(env_, j_voip_client_, /*isSuccessful=*/false); return; } - rtp_socket_->SignalReadPacket.connect( - this, &AndroidVoipClient::OnSignalReadRTPPacket); - - rtcp_socket_.reset(rtc::AsyncUDPSocket::Create(voip_thread_->socketserver(), - rtcp_local_address_)); + rtp_socket_->RegisterReceivedPacketCallback( + [&](webrtc::AsyncPacketSocket* socket, + const webrtc::ReceivedIpPacket& packet) { + OnSignalReadRTPPacket(socket, packet); + }); + + rtcp_socket_.reset(webrtc::AsyncUDPSocket::Create( + voip_thread_->socketserver(), rtcp_local_address_)); if (!rtcp_socket_) { RTC_LOG_ERR(LS_ERROR) << "Socket creation failed"; Java_VoipClient_onStartSessionCompleted(env_, j_voip_client_, /*isSuccessful=*/false); return; } - rtcp_socket_->SignalReadPacket.connect( - this, &AndroidVoipClient::OnSignalReadRTCPPacket); + rtcp_socket_->RegisterReceivedPacketCallback( + [&](webrtc::AsyncPacketSocket* socket, + const webrtc::ReceivedIpPacket& packet) { + OnSignalReadRTCPPacket(socket, packet); + }); Java_VoipClient_onStartSessionCompleted(env_, j_voip_client_, /*isSuccessful=*/true); + voip_thread_->PostTask([this, env] { LogChannelStatistics(env); }); +} + +void AndroidVoipClient::LogChannelStatistics(JNIEnv* env) { + RUN_ON_VOIP_THREAD(LogChannelStatistics, env) + + if (!channel_) + return; + webrtc::ChannelStatistics stats; + if (voip_engine_->Statistics().GetChannelStatistics(*channel_, stats) == + webrtc::VoipResult::kInvalidArgument) + return; + + RTC_LOG(LS_INFO) << "PACKETS SENT: " << stats.packets_sent + << " BYTES SENT: " << stats.bytes_sent + << " PACKETS RECV: " << stats.packets_received + << " BYTES RECV: " << stats.bytes_received + << " JITTER: " << stats.jitter + << " PACKETS LOST: " << stats.packets_lost; + + voip_thread_->PostDelayedTask([this, env] { LogChannelStatistics(env); }, + webrtc::TimeDelta::Seconds(1)); } void AndroidVoipClient::StopSession(JNIEnv* env) { @@ -352,7 +383,7 @@ void AndroidVoipClient::StopSession(JNIEnv* env) { webrtc::VoipResult result = voip_engine_->Base().ReleaseChannel(*channel_); RTC_CHECK(result == webrtc::VoipResult::kOk); - channel_ = absl::nullopt; + channel_ = std::nullopt; Java_VoipClient_onStopSessionCompleted(env_, j_voip_client_, /*isSuccessful=*/true); } @@ -422,15 +453,15 @@ void AndroidVoipClient::SendRtpPacket(const std::vector& packet_copy) { RTC_DCHECK_RUN_ON(voip_thread_.get()); if (!rtp_socket_->SendTo(packet_copy.data(), packet_copy.size(), - rtp_remote_address_, rtc::PacketOptions())) { + rtp_remote_address_, + webrtc::AsyncSocketPacketOptions())) { RTC_LOG(LS_ERROR) << "Failed to send RTP packet"; } } -bool AndroidVoipClient::SendRtp(const uint8_t* packet, - size_t length, +bool AndroidVoipClient::SendRtp(webrtc::ArrayView packet, const webrtc::PacketOptions& options) { - std::vector packet_copy(packet, packet + length); + std::vector packet_copy(packet.begin(), packet.end()); voip_thread_->PostTask([this, packet_copy = std::move(packet_copy)] { SendRtpPacket(packet_copy); }); @@ -442,13 +473,14 @@ void AndroidVoipClient::SendRtcpPacket( RTC_DCHECK_RUN_ON(voip_thread_.get()); if (!rtcp_socket_->SendTo(packet_copy.data(), packet_copy.size(), - rtcp_remote_address_, rtc::PacketOptions())) { + rtcp_remote_address_, + webrtc::AsyncSocketPacketOptions())) { RTC_LOG(LS_ERROR) << "Failed to send RTCP packet"; } } -bool AndroidVoipClient::SendRtcp(const uint8_t* packet, size_t length) { - std::vector packet_copy(packet, packet + length); +bool AndroidVoipClient::SendRtcp(webrtc::ArrayView packet) { + std::vector packet_copy(packet.begin(), packet.end()); voip_thread_->PostTask([this, packet_copy = std::move(packet_copy)] { SendRtcpPacket(packet_copy); }); @@ -464,16 +496,15 @@ void AndroidVoipClient::ReadRTPPacket(const std::vector& packet_copy) { } webrtc::VoipResult result = voip_engine_->Network().ReceivedRTPPacket( *channel_, - rtc::ArrayView(packet_copy.data(), packet_copy.size())); + webrtc::ArrayView(packet_copy.data(), packet_copy.size())); RTC_CHECK(result == webrtc::VoipResult::kOk); } -void AndroidVoipClient::OnSignalReadRTPPacket(rtc::AsyncPacketSocket* socket, - const char* rtp_packet, - size_t size, - const rtc::SocketAddress& addr, - const int64_t& timestamp) { - std::vector packet_copy(rtp_packet, rtp_packet + size); +void AndroidVoipClient::OnSignalReadRTPPacket( + webrtc::AsyncPacketSocket* socket, + const webrtc::ReceivedIpPacket& packet) { + std::vector packet_copy(packet.payload().begin(), + packet.payload().end()); voip_thread_->PostTask([this, packet_copy = std::move(packet_copy)] { ReadRTPPacket(packet_copy); }); @@ -489,16 +520,15 @@ void AndroidVoipClient::ReadRTCPPacket( } webrtc::VoipResult result = voip_engine_->Network().ReceivedRTCPPacket( *channel_, - rtc::ArrayView(packet_copy.data(), packet_copy.size())); + webrtc::ArrayView(packet_copy.data(), packet_copy.size())); RTC_CHECK(result == webrtc::VoipResult::kOk); } -void AndroidVoipClient::OnSignalReadRTCPPacket(rtc::AsyncPacketSocket* socket, - const char* rtcp_packet, - size_t size, - const rtc::SocketAddress& addr, - const int64_t& timestamp) { - std::vector packet_copy(rtcp_packet, rtcp_packet + size); +void AndroidVoipClient::OnSignalReadRTCPPacket( + webrtc::AsyncPacketSocket* socket, + const webrtc::ReceivedIpPacket& packet) { + std::vector packet_copy(packet.payload().begin(), + packet.payload().end()); voip_thread_->PostTask([this, packet_copy = std::move(packet_copy)] { ReadRTCPPacket(packet_copy); }); @@ -506,8 +536,8 @@ void AndroidVoipClient::OnSignalReadRTCPPacket(rtc::AsyncPacketSocket* socket, static jlong JNI_VoipClient_CreateClient( JNIEnv* env, - const webrtc::JavaParamRef& application_context, - const webrtc::JavaParamRef& j_voip_client) { + const jni_zero::JavaParamRef& application_context, + const jni_zero::JavaParamRef& j_voip_client) { return webrtc::NativeToJavaPointer( AndroidVoipClient::Create(env, application_context, j_voip_client)); } diff --git a/examples/androidvoip/jni/android_voip_client.h b/examples/androidvoip/jni/android_voip_client.h index 8e1edd5ef9..29a0fc82e5 100644 --- a/examples/androidvoip/jni/android_voip_client.h +++ b/examples/androidvoip/jni/android_voip_client.h @@ -23,8 +23,8 @@ #include "api/voip/voip_engine.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/async_udp_socket.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/socket_address.h" -#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "sdk/android/native_api/jni/scoped_java_ref.h" @@ -36,12 +36,11 @@ namespace webrtc_examples { // webrtc::Transport to send RTP/RTCP packets to the remote endpoint. // It also creates methods (slots) for sockets to connect to in // order to receive RTP/RTCP packets. AndroidVoipClient does all -// operations with rtc::Thread (voip_thread_), this is to comply +// operations with webrtc::Thread (voip_thread_), this is to comply // with consistent thread usage requirement with ProcessThread used // within VoipEngine, as well as providing asynchronicity to the // caller. AndroidVoipClient is meant to be used by Java through JNI. -class AndroidVoipClient : public webrtc::Transport, - public sigslot::has_slots<> { +class AndroidVoipClient : public webrtc::Transport { public: // Returns a pointer to an AndroidVoipClient object. Clients should // use this factory method to create AndroidVoipClient objects. The @@ -50,8 +49,8 @@ class AndroidVoipClient : public webrtc::Transport, // they are done with it (this class provides a Delete() method). static AndroidVoipClient* Create( JNIEnv* env, - const webrtc::JavaParamRef& application_context, - const webrtc::JavaParamRef& j_voip_client); + const jni_zero::JavaParamRef& application_context, + const jni_zero::JavaParamRef& j_voip_client); ~AndroidVoipClient() override; @@ -67,22 +66,23 @@ class AndroidVoipClient : public webrtc::Transport, // Sets the encoder used by the VoIP API. void SetEncoder(JNIEnv* env, - const webrtc::JavaParamRef& j_encoder_string); + const jni_zero::JavaParamRef& j_encoder_string); // Sets the decoders used by the VoIP API. void SetDecoders(JNIEnv* env, - const webrtc::JavaParamRef& j_decoder_strings); + const jni_zero::JavaParamRef& j_decoder_strings); // Sets two local/remote addresses, one for RTP packets, and another for // RTCP packets. The RTP address will have IP address j_ip_address_string // and port number j_port_number_int, the RTCP address will have IP address // j_ip_address_string and port number j_port_number_int+1. - void SetLocalAddress(JNIEnv* env, - const webrtc::JavaParamRef& j_ip_address_string, - jint j_port_number_int); + void SetLocalAddress( + JNIEnv* env, + const jni_zero::JavaParamRef& j_ip_address_string, + jint j_port_number_int); void SetRemoteAddress( JNIEnv* env, - const webrtc::JavaParamRef& j_ip_address_string, + const jni_zero::JavaParamRef& j_ip_address_string, jint j_port_number_int); // Starts a VoIP session, then calls a callback method with a boolean @@ -118,31 +118,23 @@ class AndroidVoipClient : public webrtc::Transport, void Delete(JNIEnv* env); // Implementation for Transport. - bool SendRtp(const uint8_t* packet, - size_t length, + bool SendRtp(webrtc::ArrayView packet, const webrtc::PacketOptions& options) override; - bool SendRtcp(const uint8_t* packet, size_t length) override; - - // Slots for sockets to connect to. - void OnSignalReadRTPPacket(rtc::AsyncPacketSocket* socket, - const char* rtp_packet, - size_t size, - const rtc::SocketAddress& addr, - const int64_t& timestamp); - void OnSignalReadRTCPPacket(rtc::AsyncPacketSocket* socket, - const char* rtcp_packet, - size_t size, - const rtc::SocketAddress& addr, - const int64_t& timestamp); + bool SendRtcp(webrtc::ArrayView packet) override; + + void OnSignalReadRTPPacket(webrtc::AsyncPacketSocket* socket, + const webrtc::ReceivedIpPacket& packet); + void OnSignalReadRTCPPacket(webrtc::AsyncPacketSocket* socket, + const webrtc::ReceivedIpPacket& packet); private: AndroidVoipClient(JNIEnv* env, - const webrtc::JavaParamRef& j_voip_client) - : voip_thread_(rtc::Thread::CreateWithSocketServer()), + const jni_zero::JavaParamRef& j_voip_client) + : voip_thread_(webrtc::Thread::CreateWithSocketServer()), j_voip_client_(env, j_voip_client) {} void Init(JNIEnv* env, - const webrtc::JavaParamRef& application_context); + const jni_zero::JavaParamRef& application_context); // Overloaded methods having native C++ variables as arguments. void SetEncoder(const std::string& encoder); @@ -158,11 +150,14 @@ class AndroidVoipClient : public webrtc::Transport, void ReadRTPPacket(const std::vector& packet_copy); void ReadRTCPPacket(const std::vector& packet_copy); + // Method to print out ChannelStatistics + void LogChannelStatistics(JNIEnv* env); + // Used to invoke operations and send/receive RTP/RTCP packets. - std::unique_ptr voip_thread_; + std::unique_ptr voip_thread_; // Reference to the VoipClient java instance used to // invoke callbacks when operations are finished. - webrtc::ScopedJavaGlobalRef j_voip_client_ + jni_zero::ScopedJavaGlobalRef j_voip_client_ RTC_GUARDED_BY(voip_thread_); // A list of AudioCodecSpec supported by the built-in // encoder/decoder factories. @@ -173,15 +168,16 @@ class AndroidVoipClient : public webrtc::Transport, // The entry point to all VoIP APIs. std::unique_ptr voip_engine_ RTC_GUARDED_BY(voip_thread_); // Used by the VoIP API to facilitate a VoIP session. - absl::optional channel_ RTC_GUARDED_BY(voip_thread_); + std::optional channel_ RTC_GUARDED_BY(voip_thread_); // Members below are used for network related operations. - std::unique_ptr rtp_socket_ RTC_GUARDED_BY(voip_thread_); - std::unique_ptr rtcp_socket_ + std::unique_ptr rtp_socket_ + RTC_GUARDED_BY(voip_thread_); + std::unique_ptr rtcp_socket_ RTC_GUARDED_BY(voip_thread_); - rtc::SocketAddress rtp_local_address_ RTC_GUARDED_BY(voip_thread_); - rtc::SocketAddress rtcp_local_address_ RTC_GUARDED_BY(voip_thread_); - rtc::SocketAddress rtp_remote_address_ RTC_GUARDED_BY(voip_thread_); - rtc::SocketAddress rtcp_remote_address_ RTC_GUARDED_BY(voip_thread_); + webrtc::SocketAddress rtp_local_address_ RTC_GUARDED_BY(voip_thread_); + webrtc::SocketAddress rtcp_local_address_ RTC_GUARDED_BY(voip_thread_); + webrtc::SocketAddress rtp_remote_address_ RTC_GUARDED_BY(voip_thread_); + webrtc::SocketAddress rtcp_remote_address_ RTC_GUARDED_BY(voip_thread_); }; } // namespace webrtc_examples diff --git a/examples/androidvoip/jni/onload.cc b/examples/androidvoip/jni/onload.cc index b952de348b..9c1de12629 100644 --- a/examples/androidvoip/jni/onload.cc +++ b/examples/androidvoip/jni/onload.cc @@ -17,12 +17,12 @@ namespace webrtc_examples { extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) { webrtc::InitAndroid(jvm); - RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()"; + RTC_CHECK(webrtc::InitializeSSL()) << "Failed to InitializeSSL()"; return JNI_VERSION_1_6; } extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) { - RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()"; + RTC_CHECK(webrtc::CleanupSSL()) << "Failed to CleanupSSL()"; } } // namespace webrtc_examples diff --git a/examples/objc/AppRTCMobile/ARDAppClient+Internal.h b/examples/objc/AppRTCMobile/ARDAppClient+Internal.h index 31e0e4dd7c..4a0f9dc669 100644 --- a/examples/objc/AppRTCMobile/ARDAppClient+Internal.h +++ b/examples/objc/AppRTCMobile/ARDAppClient+Internal.h @@ -18,7 +18,8 @@ @class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface ARDAppClient () +@interface ARDAppClient () // All properties should only be mutated from the main queue. @property(nonatomic, strong) id roomServerClient; @@ -42,7 +43,8 @@ @property(nonatomic, strong) NSURL *webSocketRestURL; @property(nonatomic, readonly) BOOL isLoopback; -@property(nonatomic, strong) RTC_OBJC_TYPE(RTCMediaConstraints) * defaultPeerConnectionConstraints; +@property(nonatomic, strong) RTC_OBJC_TYPE(RTCMediaConstraints) * + defaultPeerConnectionConstraints; - (instancetype)initWithRoomServerClient:(id)rsClient signalingChannel:(id)channel diff --git a/examples/objc/AppRTCMobile/ARDAppClient.h b/examples/objc/AppRTCMobile/ARDAppClient.h index 91d2cef1ce..49571d3a2f 100644 --- a/examples/objc/AppRTCMobile/ARDAppClient.h +++ b/examples/objc/AppRTCMobile/ARDAppClient.h @@ -33,12 +33,15 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) { // main queue. @protocol ARDAppClientDelegate -- (void)appClient:(ARDAppClient *)client didChangeState:(ARDAppClientState)state; +- (void)appClient:(ARDAppClient *)client + didChangeState:(ARDAppClientState)state; -- (void)appClient:(ARDAppClient *)client didChangeConnectionState:(RTCIceConnectionState)state; +- (void)appClient:(ARDAppClient *)client + didChangeConnectionState:(RTCIceConnectionState)state; - (void)appClient:(ARDAppClient *)client - didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer; + didCreateLocalCapturer: + (RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer; - (void)appClient:(ARDAppClient *)client didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack; @@ -48,14 +51,17 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) { - (void)appClient:(ARDAppClient *)client didError:(NSError *)error; -- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats; +- (void)appClient:(ARDAppClient *)client + didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats; @optional - (void)appClient:(ARDAppClient *)client - didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer; + didCreateLocalFileCapturer: + (RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer; - (void)appClient:(ARDAppClient *)client - didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer; + didCreateLocalExternalSampleCapturer: + (ARDExternalSampleCapturer *)externalSampleCapturer; @end diff --git a/examples/objc/AppRTCMobile/ARDAppClient.m b/examples/objc/AppRTCMobile/ARDAppClient.m index 4420972598..72961abad9 100644 --- a/examples/objc/AppRTCMobile/ARDAppClient.m +++ b/examples/objc/AppRTCMobile/ARDAppClient.m @@ -41,25 +41,25 @@ #import "RTCIceCandidate+JSON.h" #import "RTCSessionDescription+JSON.h" -static NSString * const kARDIceServerRequestUrl = @"https://appr.tc/params"; +static NSString *const kARDIceServerRequestUrl = @"https://appr.tc/params"; -static NSString * const kARDAppClientErrorDomain = @"ARDAppClient"; +static NSString *const kARDAppClientErrorDomain = @"ARDAppClient"; static NSInteger const kARDAppClientErrorUnknown = -1; static NSInteger const kARDAppClientErrorRoomFull = -2; static NSInteger const kARDAppClientErrorCreateSDP = -3; static NSInteger const kARDAppClientErrorSetSDP = -4; static NSInteger const kARDAppClientErrorInvalidClient = -5; static NSInteger const kARDAppClientErrorInvalidRoom = -6; -static NSString * const kARDMediaStreamId = @"ARDAMS"; -static NSString * const kARDAudioTrackId = @"ARDAMSa0"; -static NSString * const kARDVideoTrackId = @"ARDAMSv0"; -static NSString * const kARDVideoTrackKind = @"video"; +static NSString *const kARDMediaStreamId = @"ARDAMS"; +static NSString *const kARDAudioTrackId = @"ARDAMSa0"; +static NSString *const kARDVideoTrackId = @"ARDAMSv0"; +static NSString *const kARDVideoTrackKind = @"video"; // TODO(tkchin): Add these as UI options. #if defined(WEBRTC_IOS) static BOOL const kARDAppClientEnableTracing = NO; static BOOL const kARDAppClientEnableRtcEventLog = YES; -static int64_t const kARDAppClientAecDumpMaxSizeInBytes = 5e6; // 5 MB. +static int64_t const kARDAppClientAecDumpMaxSizeInBytes = 5e6; // 5 MB. static int64_t const kARDAppClientRtcEventLogMaxSizeInBytes = 5e6; // 5 MB. #endif static int const kKbpsMultiplier = 1000; @@ -84,7 +84,8 @@ - (instancetype)initWithInterval:(NSTimeInterval)interval repeats:(BOOL)repeats timerHandler:(void (^)(void))timerHandler { NSParameterAssert(timerHandler); - if (self = [super init]) { + self = [super init]; + if (self) { _timerHandler = timerHandler; _timer = [NSTimer scheduledTimerWithTimeInterval:interval target:self @@ -123,7 +124,7 @@ @implementation ARDAppClient { @synthesize factory = _factory; @synthesize messageQueue = _messageQueue; @synthesize isTurnComplete = _isTurnComplete; -@synthesize hasReceivedSdp = _hasReceivedSdp; +@synthesize hasReceivedSdp = _hasReceivedSdp; @synthesize roomId = _roomId; @synthesize clientId = _clientId; @synthesize isInitiator = _isInitiator; @@ -140,7 +141,8 @@ - (instancetype)init { } - (instancetype)initWithDelegate:(id)delegate { - if (self = [super init]) { + self = [super init]; + if (self) { _roomServerClient = [[ARDAppEngineClient alloc] init]; _delegate = delegate; NSURL *turnRequestURL = [NSURL URLWithString:kARDIceServerRequestUrl]; @@ -160,7 +162,8 @@ - (instancetype)initWithRoomServerClient:(id)rsClient NSParameterAssert(rsClient); NSParameterAssert(channel); NSParameterAssert(turnClient); - if (self = [super init]) { + self = [super init]; + if (self) { _roomServerClient = rsClient; _channel = channel; _turnClient = turnClient; @@ -188,18 +191,21 @@ - (void)setShouldGetStats:(BOOL)shouldGetStats { } if (shouldGetStats) { __weak ARDAppClient *weakSelf = self; - _statsTimer = [[ARDTimerProxy alloc] initWithInterval:1 - repeats:YES - timerHandler:^{ - ARDAppClient *strongSelf = weakSelf; - [strongSelf.peerConnection statisticsWithCompletionHandler:^( - RTC_OBJC_TYPE(RTCStatisticsReport) * stats) { - dispatch_async(dispatch_get_main_queue(), ^{ - ARDAppClient *strongSelf = weakSelf; - [strongSelf.delegate appClient:strongSelf didGetStats:stats]; - }); - }]; - }]; + _statsTimer = [[ARDTimerProxy alloc] + initWithInterval:1 + repeats:YES + timerHandler:^{ + ARDAppClient *strongSelf = weakSelf; + [strongSelf.peerConnection + statisticsWithCompletionHandler:^( + RTC_OBJC_TYPE(RTCStatisticsReport) * stats) { + dispatch_async(dispatch_get_main_queue(), ^{ + ARDAppClient *strongSelf = weakSelf; + [strongSelf.delegate appClient:strongSelf + didGetStats:stats]; + }); + }]; + }]; } else { [_statsTimer invalidate]; _statsTimer = nil; @@ -229,13 +235,14 @@ - (void)connectToRoomWithId:(NSString *)roomId RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) *encoderFactory = [[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init]; encoderFactory.preferredCodec = [settings currentVideoCodecSettingFromStore]; - _factory = - [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] initWithEncoderFactory:encoderFactory - decoderFactory:decoderFactory]; + _factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] + initWithEncoderFactory:encoderFactory + decoderFactory:decoderFactory]; #if defined(WEBRTC_IOS) if (kARDAppClientEnableTracing) { - NSString *filePath = [self documentsFilePathForFileName:@"webrtc-trace.txt"]; + NSString *filePath = + [self documentsFilePathForFileName:@"webrtc-trace.txt"]; RTCStartInternalCapture(filePath); } #endif @@ -245,7 +252,8 @@ - (void)connectToRoomWithId:(NSString *)roomId [_turnClient requestServersWithCompletionHandler:^(NSArray *turnServers, NSError *error) { if (error) { - RTCLogError(@"Error retrieving TURN servers: %@", error.localizedDescription); + RTCLogError(@"Error retrieving TURN servers: %@", + error.localizedDescription); } ARDAppClient *strongSelf = weakSelf; [strongSelf.iceServers addObjectsFromArray:turnServers]; @@ -254,40 +262,41 @@ - (void)connectToRoomWithId:(NSString *)roomId }]; // Join room on room server. - [_roomServerClient joinRoomWithRoomId:roomId - isLoopback:isLoopback - completionHandler:^(ARDJoinResponse *response, NSError *error) { - ARDAppClient *strongSelf = weakSelf; - if (error) { - [strongSelf.delegate appClient:strongSelf didError:error]; - return; - } - NSError *joinError = - [[strongSelf class] errorForJoinResultType:response.result]; - if (joinError) { - RTCLogError(@"Failed to join room:%@ on room server.", roomId); - [strongSelf disconnect]; - [strongSelf.delegate appClient:strongSelf didError:joinError]; - return; - } - RTCLog(@"Joined room:%@ on room server.", roomId); - strongSelf.roomId = response.roomId; - strongSelf.clientId = response.clientId; - strongSelf.isInitiator = response.isInitiator; - for (ARDSignalingMessage *message in response.messages) { - if (message.type == kARDSignalingMessageTypeOffer || - message.type == kARDSignalingMessageTypeAnswer) { - strongSelf.hasReceivedSdp = YES; - [strongSelf.messageQueue insertObject:message atIndex:0]; - } else { - [strongSelf.messageQueue addObject:message]; - } - } - strongSelf.webSocketURL = response.webSocketURL; - strongSelf.webSocketRestURL = response.webSocketRestURL; - [strongSelf registerWithColliderIfReady]; - [strongSelf startSignalingIfReady]; - }]; + [_roomServerClient + joinRoomWithRoomId:roomId + isLoopback:isLoopback + completionHandler:^(ARDJoinResponse *response, NSError *error) { + ARDAppClient *strongSelf = weakSelf; + if (error) { + [strongSelf.delegate appClient:strongSelf didError:error]; + return; + } + NSError *joinError = + [[strongSelf class] errorForJoinResultType:response.result]; + if (joinError) { + RTCLogError(@"Failed to join room:%@ on room server.", roomId); + [strongSelf disconnect]; + [strongSelf.delegate appClient:strongSelf didError:joinError]; + return; + } + RTCLog(@"Joined room:%@ on room server.", roomId); + strongSelf.roomId = response.roomId; + strongSelf.clientId = response.clientId; + strongSelf.isInitiator = response.isInitiator; + for (ARDSignalingMessage *message in response.messages) { + if (message.type == kARDSignalingMessageTypeOffer || + message.type == kARDSignalingMessageTypeAnswer) { + strongSelf.hasReceivedSdp = YES; + [strongSelf.messageQueue insertObject:message atIndex:0]; + } else { + [strongSelf.messageQueue addObject:message]; + } + } + strongSelf.webSocketURL = response.webSocketURL; + strongSelf.webSocketRestURL = response.webSocketRestURL; + [strongSelf registerWithColliderIfReady]; + [strongSelf startSignalingIfReady]; + }]; } - (void)disconnect { @@ -385,7 +394,8 @@ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection } - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection - didStartReceivingOnTransceiver:(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver { + didStartReceivingOnTransceiver: + (RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver { RTC_OBJC_TYPE(RTCMediaStreamTrack) *track = transceiver.receiver.track; RTCLog(@"Now receiving %@ on track %@.", track.kind, track.trackId); } @@ -395,7 +405,8 @@ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection RTCLog(@"Stream was removed."); } -- (void)peerConnectionShouldNegotiate:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection { +- (void)peerConnectionShouldNegotiate: + (RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection { RTCLog(@"WARNING: Renegotiation needed but unimplemented."); } @@ -427,8 +438,10 @@ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection } - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection - didFailToGatherIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidateErrorEvent) *)event { - RTCLog(@"Failed to gather ICE candidate. address: %@, port: %d, url: %@, errorCode: %d, " + didFailToGatherIceCandidate: + (RTC_OBJC_TYPE(RTCIceCandidateErrorEvent) *)event { + RTCLog(@"Failed to gather ICE candidate. address: %@, port: %d, url: %@, " + @"errorCode: %d, " @"errorText: %@", event.address, event.port, @@ -438,7 +451,8 @@ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection } - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection - didRemoveIceCandidates:(NSArray *)candidates { + didRemoveIceCandidates: + (NSArray *)candidates { dispatch_async(dispatch_get_main_queue(), ^{ ARDICECandidateRemovalMessage *message = [[ARDICECandidateRemovalMessage alloc] @@ -471,7 +485,7 @@ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection RTCLogError(@"Failed to create session description. Error: %@", error); [self disconnect]; NSDictionary *userInfo = @{ - NSLocalizedDescriptionKey: @"Failed to create session description.", + NSLocalizedDescriptionKey : @"Failed to create session description.", }; NSError *sdpError = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain @@ -481,12 +495,13 @@ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection return; } __weak ARDAppClient *weakSelf = self; - [self.peerConnection setLocalDescription:sdp - completionHandler:^(NSError *error) { - ARDAppClient *strongSelf = weakSelf; - [strongSelf peerConnection:strongSelf.peerConnection - didSetSessionDescriptionWithError:error]; - }]; + [self.peerConnection + setLocalDescription:sdp + completionHandler:^(NSError *error) { + ARDAppClient *strongSelf = weakSelf; + [strongSelf peerConnection:strongSelf.peerConnection + didSetSessionDescriptionWithError:error]; + }]; ARDSessionDescriptionMessage *message = [[ARDSessionDescriptionMessage alloc] initWithDescription:sdp]; [self sendSignalingMessage:message]; @@ -501,7 +516,7 @@ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection RTCLogError(@"Failed to set session description. Error: %@", error); [self disconnect]; NSDictionary *userInfo = @{ - NSLocalizedDescriptionKey: @"Failed to set session description.", + NSLocalizedDescriptionKey : @"Failed to set session description.", }; NSError *sdpError = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain @@ -513,11 +528,13 @@ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection // If we're answering and we've just set the remote offer we need to create // an answer and set the local description. if (!self.isInitiator && !self.peerConnection.localDescription) { - RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultAnswerConstraints]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [self defaultAnswerConstraints]; __weak ARDAppClient *weakSelf = self; [self.peerConnection answerForConstraints:constraints - completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) { + completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, + NSError * error) { ARDAppClient *strongSelf = weakSelf; [strongSelf peerConnection:strongSelf.peerConnection didCreateSessionDescription:sdp @@ -559,10 +576,15 @@ - (void)startSignalingIfReady { self.state = kARDAppClientStateConnected; // Create peer connection. - RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultPeerConnectionConstraints]; - RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; - RTC_OBJC_TYPE(RTCCertificate) *pcert = [RTC_OBJC_TYPE(RTCCertificate) - generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [self defaultPeerConnectionConstraints]; + RTC_OBJC_TYPE(RTCConfiguration) *config = + [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCCertificate) *pcert = + [RTC_OBJC_TYPE(RTCCertificate) generateCertificateWithParams:@{ + @"expires" : @100000, + @"name" : @"RSASSA-PKCS1-v1_5" + }]; config.iceServers = _iceServers; config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; config.certificate = pcert; @@ -577,7 +599,8 @@ - (void)startSignalingIfReady { __weak ARDAppClient *weakSelf = self; [_peerConnection offerForConstraints:[self defaultOfferConstraints] - completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) { + completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, + NSError * error) { ARDAppClient *strongSelf = weakSelf; [strongSelf peerConnection:strongSelf.peerConnection didCreateSessionDescription:sdp @@ -590,18 +613,23 @@ - (void)startSignalingIfReady { #if defined(WEBRTC_IOS) // Start event log. if (kARDAppClientEnableRtcEventLog) { - NSString *filePath = [self documentsFilePathForFileName:@"webrtc-rtceventlog"]; - if (![_peerConnection startRtcEventLogWithFilePath:filePath - maxSizeInBytes:kARDAppClientRtcEventLogMaxSizeInBytes]) { + NSString *filePath = + [self documentsFilePathForFileName:@"webrtc-rtceventlog"]; + if (![_peerConnection + startRtcEventLogWithFilePath:filePath + maxSizeInBytes: + kARDAppClientRtcEventLogMaxSizeInBytes]) { RTCLogError(@"Failed to start event logging."); } } // Start aecdump diagnostic recording. if ([_settings currentCreateAecDumpSettingFromStore]) { - NSString *filePath = [self documentsFilePathForFileName:@"webrtc-audio.aecdump"]; - if (![_factory startAecDumpWithFilePath:filePath - maxSizeInBytes:kARDAppClientAecDumpMaxSizeInBytes]) { + NSString *filePath = + [self documentsFilePathForFileName:@"webrtc-audio.aecdump"]; + if (![_factory + startAecDumpWithFilePath:filePath + maxSizeInBytes:kARDAppClientAecDumpMaxSizeInBytes]) { RTCLogError(@"Failed to start aec dump."); } } @@ -626,13 +654,14 @@ - (void)drainMessageQueueIfReady { // Processes the given signaling message based on its type. - (void)processSignalingMessage:(ARDSignalingMessage *)message { NSParameterAssert(_peerConnection || - message.type == kARDSignalingMessageTypeBye); + message.type == kARDSignalingMessageTypeBye); switch (message.type) { case kARDSignalingMessageTypeOffer: case kARDSignalingMessageTypeAnswer: { ARDSessionDescriptionMessage *sdpMessage = (ARDSessionDescriptionMessage *)message; - RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription; + RTC_OBJC_TYPE(RTCSessionDescription) *description = + sdpMessage.sessionDescription; __weak ARDAppClient *weakSelf = self; [_peerConnection setRemoteDescription:description completionHandler:^(NSError *error) { @@ -650,7 +679,8 @@ - (void)processSignalingMessage:(ARDSignalingMessage *)message { completionHandler:^(NSError *error) { ARDAppClient *strongSelf = weakSelf; if (error) { - [strongSelf.delegate appClient:strongSelf didError:error]; + [strongSelf.delegate appClient:strongSelf + didError:error]; } }]; break; @@ -676,23 +706,23 @@ - (void)processSignalingMessage:(ARDSignalingMessage *)message { - (void)sendSignalingMessage:(ARDSignalingMessage *)message { if (_isInitiator) { __weak ARDAppClient *weakSelf = self; - [_roomServerClient sendMessage:message - forRoomId:_roomId - clientId:_clientId - completionHandler:^(ARDMessageResponse *response, - NSError *error) { - ARDAppClient *strongSelf = weakSelf; - if (error) { - [strongSelf.delegate appClient:strongSelf didError:error]; - return; - } - NSError *messageError = - [[strongSelf class] errorForMessageResultType:response.result]; - if (messageError) { - [strongSelf.delegate appClient:strongSelf didError:messageError]; - return; - } - }]; + [_roomServerClient + sendMessage:message + forRoomId:_roomId + clientId:_clientId + completionHandler:^(ARDMessageResponse *response, NSError *error) { + ARDAppClient *strongSelf = weakSelf; + if (error) { + [strongSelf.delegate appClient:strongSelf didError:error]; + return; + } + NSError *messageError = + [[strongSelf class] errorForMessageResultType:response.result]; + if (messageError) { + [strongSelf.delegate appClient:strongSelf didError:messageError]; + return; + } + }]; } else { [_channel sendMessage:message]; } @@ -702,26 +732,30 @@ - (void)setMaxBitrateForPeerConnectionVideoSender { for (RTC_OBJC_TYPE(RTCRtpSender) * sender in _peerConnection.senders) { if (sender.track != nil) { if ([sender.track.kind isEqualToString:kARDVideoTrackKind]) { - [self setMaxBitrate:[_settings currentMaxBitrateSettingFromStore] forVideoSender:sender]; + [self setMaxBitrate:[_settings currentMaxBitrateSettingFromStore] + forVideoSender:sender]; } } } } -- (void)setMaxBitrate:(NSNumber *)maxBitrate forVideoSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender { +- (void)setMaxBitrate:(NSNumber *)maxBitrate + forVideoSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender { if (maxBitrate.intValue <= 0) { return; } RTC_OBJC_TYPE(RTCRtpParameters) *parametersToModify = sender.parameters; - for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * encoding in parametersToModify.encodings) { + for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * + encoding in parametersToModify.encodings) { encoding.maxBitrateBps = @(maxBitrate.intValue * kKbpsMultiplier); } [sender setParameters:parametersToModify]; } - (RTC_OBJC_TYPE(RTCRtpTransceiver) *)videoTransceiver { - for (RTC_OBJC_TYPE(RTCRtpTransceiver) * transceiver in _peerConnection.transceivers) { + for (RTC_OBJC_TYPE(RTCRtpTransceiver) * + transceiver in _peerConnection.transceivers) { if (transceiver.mediaType == RTCRtpMediaTypeVideo) { return transceiver; } @@ -730,20 +764,24 @@ - (void)setMaxBitrate:(NSNumber *)maxBitrate forVideoSender:(RTC_OBJC_TYPE(RTCRt } - (void)createMediaSenders { - RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultMediaAudioConstraints]; - RTC_OBJC_TYPE(RTCAudioSource) *source = [_factory audioSourceWithConstraints:constraints]; - RTC_OBJC_TYPE(RTCAudioTrack) *track = [_factory audioTrackWithSource:source - trackId:kARDAudioTrackId]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [self defaultMediaAudioConstraints]; + RTC_OBJC_TYPE(RTCAudioSource) *source = + [_factory audioSourceWithConstraints:constraints]; + RTC_OBJC_TYPE(RTCAudioTrack) *track = + [_factory audioTrackWithSource:source trackId:kARDAudioTrackId]; [_peerConnection addTrack:track streamIds:@[ kARDMediaStreamId ]]; _localVideoTrack = [self createLocalVideoTrack]; if (_localVideoTrack) { - [_peerConnection addTrack:_localVideoTrack streamIds:@[ kARDMediaStreamId ]]; + [_peerConnection addTrack:_localVideoTrack + streamIds:@[ kARDMediaStreamId ]]; [_delegate appClient:self didReceiveLocalVideoTrack:_localVideoTrack]; - // We can set up rendering for the remote track right away since the transceiver already has an - // RTC_OBJC_TYPE(RTCRtpReceiver) with a track. The track will automatically get unmuted and - // produce frames once RTP is received. - RTC_OBJC_TYPE(RTCVideoTrack) *track = - (RTC_OBJC_TYPE(RTCVideoTrack) *)([self videoTransceiver].receiver.track); + // We can set up rendering for the remote track right away since the + // transceiver already has an RTC_OBJC_TYPE(RTCRtpReceiver) with a track. + // The track will automatically get unmuted and produce frames once RTP is + // received. + RTC_OBJC_TYPE(RTCVideoTrack) *track = (RTC_OBJC_TYPE(RTCVideoTrack) *)( + [self videoTransceiver].receiver.track); [_delegate appClient:self didReceiveRemoteVideoTrack:track]; } } @@ -786,10 +824,9 @@ - (void)registerWithColliderIfReady { } // Open WebSocket connection. if (!_channel) { - _channel = - [[ARDWebSocketChannel alloc] initWithURL:_websocketURL - restURL:_websocketRestURL - delegate:self]; + _channel = [[ARDWebSocketChannel alloc] initWithURL:_websocketURL + restURL:_websocketRestURL + delegate:self]; if (_isLoopback) { _loopbackChannel = [[ARDLoopbackWebSocketChannel alloc] initWithURL:_websocketURL @@ -807,8 +844,9 @@ - (void)registerWithColliderIfReady { - (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultMediaAudioConstraints { NSDictionary *mandatoryConstraints = @{}; RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = - [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints - optionalConstraints:nil]; + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] + initWithMandatoryConstraints:mandatoryConstraints + optionalConstraints:nil]; return constraints; } @@ -817,13 +855,12 @@ - (void)registerWithColliderIfReady { } - (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultOfferConstraints { - NSDictionary *mandatoryConstraints = @{ - @"OfferToReceiveAudio" : @"true", - @"OfferToReceiveVideo" : @"true" - }; + NSDictionary *mandatoryConstraints = + @{@"OfferToReceiveAudio" : @"true", @"OfferToReceiveVideo" : @"true"}; RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = - [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints - optionalConstraints:nil]; + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] + initWithMandatoryConstraints:mandatoryConstraints + optionalConstraints:nil]; return constraints; } @@ -832,10 +869,11 @@ - (void)registerWithColliderIfReady { return _defaultPeerConnectionConstraints; } NSString *value = _isLoopback ? @"false" : @"true"; - NSDictionary *optionalConstraints = @{ @"DtlsSrtpKeyAgreement" : value }; + NSDictionary *optionalConstraints = @{@"DtlsSrtpKeyAgreement" : value}; RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = - [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil - optionalConstraints:optionalConstraints]; + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] + initWithMandatoryConstraints:nil + optionalConstraints:optionalConstraints]; return constraints; } @@ -847,19 +885,21 @@ + (NSError *)errorForJoinResultType:(ARDJoinResultType)resultType { case kARDJoinResultTypeSuccess: break; case kARDJoinResultTypeUnknown: { - error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain - code:kARDAppClientErrorUnknown - userInfo:@{ - NSLocalizedDescriptionKey: @"Unknown error.", - }]; + error = [[NSError alloc] + initWithDomain:kARDAppClientErrorDomain + code:kARDAppClientErrorUnknown + userInfo:@{ + NSLocalizedDescriptionKey : @"Unknown error.", + }]; break; } case kARDJoinResultTypeFull: { - error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain - code:kARDAppClientErrorRoomFull - userInfo:@{ - NSLocalizedDescriptionKey: @"Room is full.", - }]; + error = + [[NSError alloc] initWithDomain:kARDAppClientErrorDomain + code:kARDAppClientErrorRoomFull + userInfo:@{ + NSLocalizedDescriptionKey : @"Room is full.", + }]; break; } } @@ -872,25 +912,28 @@ + (NSError *)errorForMessageResultType:(ARDMessageResultType)resultType { case kARDMessageResultTypeSuccess: break; case kARDMessageResultTypeUnknown: - error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain - code:kARDAppClientErrorUnknown - userInfo:@{ - NSLocalizedDescriptionKey: @"Unknown error.", - }]; + error = [[NSError alloc] + initWithDomain:kARDAppClientErrorDomain + code:kARDAppClientErrorUnknown + userInfo:@{ + NSLocalizedDescriptionKey : @"Unknown error.", + }]; break; case kARDMessageResultTypeInvalidClient: - error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain - code:kARDAppClientErrorInvalidClient - userInfo:@{ - NSLocalizedDescriptionKey: @"Invalid client.", - }]; + error = [[NSError alloc] + initWithDomain:kARDAppClientErrorDomain + code:kARDAppClientErrorInvalidClient + userInfo:@{ + NSLocalizedDescriptionKey : @"Invalid client.", + }]; break; case kARDMessageResultTypeInvalidRoom: - error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain - code:kARDAppClientErrorInvalidRoom - userInfo:@{ - NSLocalizedDescriptionKey: @"Invalid room.", - }]; + error = + [[NSError alloc] initWithDomain:kARDAppClientErrorDomain + code:kARDAppClientErrorInvalidRoom + userInfo:@{ + NSLocalizedDescriptionKey : @"Invalid room.", + }]; break; } return error; diff --git a/examples/objc/AppRTCMobile/ARDAppEngineClient.m b/examples/objc/AppRTCMobile/ARDAppEngineClient.m index 5139de60d6..b7f8dcf784 100644 --- a/examples/objc/AppRTCMobile/ARDAppEngineClient.m +++ b/examples/objc/AppRTCMobile/ARDAppEngineClient.m @@ -18,18 +18,16 @@ #import "ARDUtilities.h" // TODO(tkchin): move these to a configuration object. -static NSString * const kARDRoomServerHostUrl = - @"https://appr.tc"; -static NSString * const kARDRoomServerJoinFormat = - @"https://appr.tc/join/%@"; -static NSString * const kARDRoomServerJoinFormatLoopback = +static NSString *const kARDRoomServerHostUrl = @"https://appr.tc"; +static NSString *const kARDRoomServerJoinFormat = @"https://appr.tc/join/%@"; +static NSString *const kARDRoomServerJoinFormatLoopback = @"https://appr.tc/join/%@?debug=loopback"; -static NSString * const kARDRoomServerMessageFormat = +static NSString *const kARDRoomServerMessageFormat = @"https://appr.tc/message/%@/%@"; -static NSString * const kARDRoomServerLeaveFormat = +static NSString *const kARDRoomServerLeaveFormat = @"https://appr.tc/leave/%@/%@"; -static NSString * const kARDAppEngineClientErrorDomain = @"ARDAppEngineClient"; +static NSString *const kARDAppEngineClientErrorDomain = @"ARDAppEngineClient"; static NSInteger const kARDAppEngineClientErrorBadResponse = -1; @implementation ARDAppEngineClient @@ -47,34 +45,36 @@ - (void)joinRoomWithRoomId:(NSString *)roomId urlString = [NSString stringWithFormat:kARDRoomServerJoinFormatLoopback, roomId]; } else { - urlString = - [NSString stringWithFormat:kARDRoomServerJoinFormat, roomId]; + urlString = [NSString stringWithFormat:kARDRoomServerJoinFormat, roomId]; } NSURL *roomURL = [NSURL URLWithString:urlString]; RTCLog(@"Joining room:%@ on room server.", roomId); NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:roomURL]; request.HTTPMethod = @"POST"; - [NSURLConnection sendAsyncRequest:request - completionHandler:^(NSURLResponse *response, NSData *data, NSError *error) { - if (error) { - if (completionHandler) { - completionHandler(nil, error); - } - return; - } - ARDJoinResponse *joinResponse = [ARDJoinResponse responseFromJSONData:data]; - if (!joinResponse) { - if (completionHandler) { - NSError *error = [[self class] badResponseError]; - completionHandler(nil, error); - } - return; - } - if (completionHandler) { - completionHandler(joinResponse, nil); - } - }]; + [NSURLConnection + sendAsyncRequest:request + completionHandler:^( + NSURLResponse *response __unused, NSData *data, NSError *error) { + if (error) { + if (completionHandler) { + completionHandler(nil, error); + } + return; + } + ARDJoinResponse *joinResponse = + [ARDJoinResponse responseFromJSONData:data]; + if (!joinResponse) { + if (completionHandler) { + NSError *error = [[self class] badResponseError]; + completionHandler(nil, error); + } + return; + } + if (completionHandler) { + completionHandler(joinResponse, nil); + } + }]; } - (void)sendMessage:(ARDSignalingMessage *)message @@ -88,36 +88,35 @@ - (void)sendMessage:(ARDSignalingMessage *)message NSData *data = [message JSONData]; NSString *urlString = - [NSString stringWithFormat: - kARDRoomServerMessageFormat, roomId, clientId]; + [NSString stringWithFormat:kARDRoomServerMessageFormat, roomId, clientId]; NSURL *url = [NSURL URLWithString:urlString]; RTCLog(@"C->RS POST: %@", message); NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url]; request.HTTPMethod = @"POST"; request.HTTPBody = data; - [NSURLConnection sendAsyncRequest:request - completionHandler:^(NSURLResponse *response, - NSData *data, - NSError *error) { - if (error) { - if (completionHandler) { - completionHandler(nil, error); - } - return; - } - ARDMessageResponse *messageResponse = - [ARDMessageResponse responseFromJSONData:data]; - if (!messageResponse) { - if (completionHandler) { - NSError *error = [[self class] badResponseError]; - completionHandler(nil, error); - } - return; - } - if (completionHandler) { - completionHandler(messageResponse, nil); - } - }]; + [NSURLConnection + sendAsyncRequest:request + completionHandler:^( + NSURLResponse *response __unused, NSData *data, NSError *error) { + if (error) { + if (completionHandler) { + completionHandler(nil, error); + } + return; + } + ARDMessageResponse *messageResponse = + [ARDMessageResponse responseFromJSONData:data]; + if (!messageResponse) { + if (completionHandler) { + NSError *error = [[self class] badResponseError]; + completionHandler(nil, error); + } + return; + } + if (completionHandler) { + completionHandler(messageResponse, nil); + } + }]; } - (void)leaveRoomWithRoomId:(NSString *)roomId @@ -138,17 +137,21 @@ - (void)leaveRoomWithRoomId:(NSString *)roomId // We want a synchronous request so that we know that we've left the room on // room server before we do any further work. dispatch_semaphore_t sem = dispatch_semaphore_create(0); - [NSURLConnection sendAsyncRequest:request - completionHandler:^(NSURLResponse *response, NSData *data, NSError *e) { - if (e) { - error = e; - } - dispatch_semaphore_signal(sem); - }]; + [NSURLConnection + sendAsyncRequest:request + completionHandler:^( + NSURLResponse *response __unused, NSData *data __unused, NSError *e) { + if (e) { + error = e; + } + dispatch_semaphore_signal(sem); + }]; dispatch_semaphore_wait(sem, DISPATCH_TIME_FOREVER); if (error) { - RTCLogError(@"Error leaving room %@ on room server: %@", roomId, error.localizedDescription); + RTCLogError(@"Error leaving room %@ on room server: %@", + roomId, + error.localizedDescription); if (completionHandler) { completionHandler(error); } @@ -163,12 +166,12 @@ - (void)leaveRoomWithRoomId:(NSString *)roomId #pragma mark - Private + (NSError *)badResponseError { - NSError *error = - [[NSError alloc] initWithDomain:kARDAppEngineClientErrorDomain - code:kARDAppEngineClientErrorBadResponse - userInfo:@{ - NSLocalizedDescriptionKey: @"Error parsing response.", - }]; + NSError *error = [[NSError alloc] + initWithDomain:kARDAppEngineClientErrorDomain + code:kARDAppEngineClientErrorBadResponse + userInfo:@{ + NSLocalizedDescriptionKey : @"Error parsing response.", + }]; return error; } diff --git a/examples/objc/AppRTCMobile/ARDCaptureController.h b/examples/objc/AppRTCMobile/ARDCaptureController.h index 4febccee96..a94f460af6 100644 --- a/examples/objc/AppRTCMobile/ARDCaptureController.h +++ b/examples/objc/AppRTCMobile/ARDCaptureController.h @@ -15,7 +15,8 @@ // Controls the camera. Handles starting the capture, switching cameras etc. @interface ARDCaptureController : NSObject -- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer +- (instancetype)initWithCapturer: + (RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer settings:(ARDSettingsModel *)settings; - (void)startCapture; - (void)startCapture:(void (^)(NSError *))completion; diff --git a/examples/objc/AppRTCMobile/ARDCaptureController.m b/examples/objc/AppRTCMobile/ARDCaptureController.m index 26cce9fdaa..8e782fdddc 100644 --- a/examples/objc/AppRTCMobile/ARDCaptureController.m +++ b/examples/objc/AppRTCMobile/ARDCaptureController.m @@ -22,14 +22,15 @@ @implementation ARDCaptureController { BOOL _usingFrontCamera; } -- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer +- (instancetype)initWithCapturer: + (RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer settings:(ARDSettingsModel *)settings { - if (self = [super init]) { + self = [super init]; + if (self) { _capturer = capturer; _settings = settings; _usingFrontCamera = YES; } - return self; } @@ -38,8 +39,9 @@ - (void)startCapture { } - (void)startCapture:(void (^)(NSError *))completion { - AVCaptureDevicePosition position = - _usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; + AVCaptureDevicePosition position = _usingFrontCamera ? + AVCaptureDevicePositionFront : + AVCaptureDevicePositionBack; AVCaptureDevice *device = [self findDeviceForPosition:position]; AVCaptureDeviceFormat *format = [self selectFormatForDevice:device]; @@ -52,7 +54,10 @@ - (void)startCapture:(void (^)(NSError *))completion { NSInteger fps = [self selectFpsForFormat:format]; - [_capturer startCaptureWithDevice:device format:format fps:fps completionHandler:completion]; + [_capturer startCaptureWithDevice:device + format:format + fps:fps + completionHandler:completion]; } - (void)stopCapture { @@ -91,13 +96,17 @@ - (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device { int currentDiff = INT_MAX; for (AVCaptureDeviceFormat *format in formats) { - CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); - int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height); + CMVideoDimensions dimension = + CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = + CMFormatDescriptionGetMediaSubType(format.formatDescription); + int diff = abs(targetWidth - dimension.width) + + abs(targetHeight - dimension.height); if (diff < currentDiff) { selectedFormat = format; currentDiff = diff; - } else if (diff == currentDiff && pixelFormat == [_capturer preferredOutputPixelFormat]) { + } else if (diff == currentDiff && + pixelFormat == [_capturer preferredOutputPixelFormat]) { selectedFormat = format; } } diff --git a/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m b/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m index 8bf6716ddb..95cc6cf5bd 100644 --- a/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m +++ b/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m @@ -21,14 +21,16 @@ @implementation ARDExternalSampleCapturer -- (instancetype)initWithDelegate:(__weak id)delegate { +- (instancetype)initWithDelegate: + (__weak id)delegate { return [super initWithDelegate:delegate]; } #pragma mark - ARDExternalSampleDelegate - (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer { - if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) || + if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || + !CMSampleBufferIsValid(sampleBuffer) || !CMSampleBufferDataIsReady(sampleBuffer)) { return; } @@ -41,7 +43,8 @@ - (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer { RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; int64_t timeStampNs = - CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC; + CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * + NSEC_PER_SEC; RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer rotation:RTCVideoRotation_0 diff --git a/examples/objc/AppRTCMobile/ARDRoomServerClient.h b/examples/objc/AppRTCMobile/ARDRoomServerClient.h index 3a5818d6d6..70694a8c9f 100644 --- a/examples/objc/AppRTCMobile/ARDRoomServerClient.h +++ b/examples/objc/AppRTCMobile/ARDRoomServerClient.h @@ -18,12 +18,14 @@ - (void)joinRoomWithRoomId:(NSString *)roomId isLoopback:(BOOL)isLoopback - completionHandler:(void (^)(ARDJoinResponse *response, NSError *error))completionHandler; + completionHandler:(void (^)(ARDJoinResponse *response, + NSError *error))completionHandler; - (void)sendMessage:(ARDSignalingMessage *)message forRoomId:(NSString *)roomId clientId:(NSString *)clientId - completionHandler:(void (^)(ARDMessageResponse *response, NSError *error))completionHandler; + completionHandler:(void (^)(ARDMessageResponse *response, + NSError *error))completionHandler; - (void)leaveRoomWithRoomId:(NSString *)roomId clientId:(NSString *)clientId diff --git a/examples/objc/AppRTCMobile/ARDSettingsModel.h b/examples/objc/AppRTCMobile/ARDSettingsModel.h index 47c7defacd..117040a1c2 100644 --- a/examples/objc/AppRTCMobile/ARDSettingsModel.h +++ b/examples/objc/AppRTCMobile/ARDSettingsModel.h @@ -34,7 +34,8 @@ NS_ASSUME_NONNULL_BEGIN /** * Returns current video resolution string. * If no resolution is in store, default value of 640x480 is returned. - * When defaulting to value, the default is saved in store for consistency reasons. + * When defaulting to value, the default is saved in store for consistency + * reasons. */ - (NSString *)currentVideoResolutionSettingFromStore; - (int)currentVideoResolutionWidthFromStore; @@ -56,7 +57,8 @@ NS_ASSUME_NONNULL_BEGIN - (NSArray *)availableVideoCodecs; /** - * Returns current video codec setting from store if present or default (H264) otherwise. + * Returns current video codec setting from store if present or default (H264) + * otherwise. */ - (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)currentVideoCodecSettingFromStore; @@ -83,7 +85,8 @@ NS_ASSUME_NONNULL_BEGIN - (void)storeMaxBitrateSetting:(nullable NSNumber *)bitrate; /** - * Returns current audio only setting from store if present or default (NO) otherwise. + * Returns current audio only setting from store if present or default (NO) + * otherwise. */ - (BOOL)currentAudioOnlySettingFromStore; @@ -95,7 +98,8 @@ NS_ASSUME_NONNULL_BEGIN - (void)storeAudioOnlySetting:(BOOL)audioOnly; /** - * Returns current create AecDump setting from store if present or default (NO) otherwise. + * Returns current create AecDump setting from store if present or default (NO) + * otherwise. */ - (BOOL)currentCreateAecDumpSettingFromStore; @@ -107,8 +111,8 @@ NS_ASSUME_NONNULL_BEGIN - (void)storeCreateAecDumpSetting:(BOOL)createAecDump; /** - * Returns current setting whether to use manual audio config from store if present or default (YES) - * otherwise. + * Returns current setting whether to use manual audio config from store if + * present or default (YES) otherwise. */ - (BOOL)currentUseManualAudioConfigSettingFromStore; diff --git a/examples/objc/AppRTCMobile/ARDSettingsModel.m b/examples/objc/AppRTCMobile/ARDSettingsModel.m index 9e709b0553..e37ccbdb92 100644 --- a/examples/objc/AppRTCMobile/ARDSettingsModel.m +++ b/examples/objc/AppRTCMobile/ARDSettingsModel.m @@ -27,19 +27,21 @@ @implementation ARDSettingsModel - (NSArray *)availableVideoResolutions { NSMutableSet *> *resolutions = [[NSMutableSet *> alloc] init]; - for (AVCaptureDevice *device in [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices]) { - for (AVCaptureDeviceFormat *format in - [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device]) { + for (AVCaptureDevice *device in + [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices]) { + for (AVCaptureDeviceFormat *format in [RTC_OBJC_TYPE(RTCCameraVideoCapturer) + supportedFormatsForDevice:device]) { CMVideoDimensions resolution = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - NSArray *resolutionObject = @[ @(resolution.width), @(resolution.height) ]; + NSArray *resolutionObject = + @[ @(resolution.width), @(resolution.height) ]; [resolutions addObject:resolutionObject]; } } - NSArray *> *sortedResolutions = - [[resolutions allObjects] sortedArrayUsingComparator:^NSComparisonResult( - NSArray *obj1, NSArray *obj2) { + NSArray *> *sortedResolutions = [[resolutions allObjects] + sortedArrayUsingComparator:^NSComparisonResult( + NSArray *obj1, NSArray *obj2) { NSComparisonResult cmp = [obj1.firstObject compare:obj2.firstObject]; if (cmp != NSOrderedSame) { return cmp; @@ -47,10 +49,13 @@ @implementation ARDSettingsModel return [obj1.lastObject compare:obj2.lastObject]; }]; - NSMutableArray *resolutionStrings = [[NSMutableArray alloc] init]; + NSMutableArray *resolutionStrings = + [[NSMutableArray alloc] init]; for (NSArray *resolution in sortedResolutions) { NSString *resolutionString = - [NSString stringWithFormat:@"%@x%@", resolution.firstObject, resolution.lastObject]; + [NSString stringWithFormat:@"%@x%@", + resolution.firstObject, + resolution.lastObject]; [resolutionStrings addObject:resolutionString]; } @@ -81,7 +86,9 @@ - (BOOL)storeVideoResolutionSetting:(NSString *)resolution { Class expectedClass = [RTC_OBJC_TYPE(RTCVideoCodecInfo) class]; NSError *error; RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodecSetting = - [NSKeyedUnarchiver unarchivedObjectOfClass:expectedClass fromData:codecData error:&error]; + [NSKeyedUnarchiver unarchivedObjectOfClass:expectedClass + fromData:codecData + error:&error]; if (!error) { return videoCodecSetting; } @@ -176,11 +183,13 @@ - (NSString *)defaultVideoResolutionSetting { return [self availableVideoCodecs].firstObject; } -- (int)videoResolutionComponentAtIndex:(int)index inString:(NSString *)resolution { +- (int)videoResolutionComponentAtIndex:(int)index + inString:(NSString *)resolution { if (index != 0 && index != 1) { return 0; } - NSArray *components = [resolution componentsSeparatedByString:@"x"]; + NSArray *components = + [resolution componentsSeparatedByString:@"x"]; if (components.count != 2) { return 0; } @@ -190,22 +199,25 @@ - (int)videoResolutionComponentAtIndex:(int)index inString:(NSString *)resolutio - (void)registerStoreDefaults { #if defined(WEBRTC_IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_13 NSError *error; - NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:[self defaultVideoCodecSetting] - requiringSecureCoding:NO - error:&error]; + NSData *codecData = [NSKeyedArchiver + archivedDataWithRootObject:[self defaultVideoCodecSetting] + requiringSecureCoding:NO + error:&error]; if (error) { return; } #else - NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:[self defaultVideoCodecSetting]]; + NSData *codecData = [NSKeyedArchiver + archivedDataWithRootObject:[self defaultVideoCodecSetting]]; #endif - [ARDSettingsStore setDefaultsForVideoResolution:[self defaultVideoResolutionSetting] - videoCodec:codecData - bitrate:nil - audioOnly:NO - createAecDump:NO - useManualAudioConfig:YES]; + [ARDSettingsStore + setDefaultsForVideoResolution:[self defaultVideoResolutionSetting] + videoCodec:codecData + bitrate:nil + audioOnly:NO + createAecDump:NO + useManualAudioConfig:YES]; } @end NS_ASSUME_NONNULL_END diff --git a/examples/objc/AppRTCMobile/ARDSettingsStore.m b/examples/objc/AppRTCMobile/ARDSettingsStore.m index a3713e2f0e..6983a4477e 100644 --- a/examples/objc/AppRTCMobile/ARDSettingsStore.m +++ b/examples/objc/AppRTCMobile/ARDSettingsStore.m @@ -15,7 +15,8 @@ static NSString *const kBitrateKey = @"rtc_max_bitrate_key"; static NSString *const kAudioOnlyKey = @"rtc_audio_only_key"; static NSString *const kCreateAecDumpKey = @"rtc_create_aec_dump_key"; -static NSString *const kUseManualAudioConfigKey = @"rtc_use_manual_audio_config_key"; +static NSString *const kUseManualAudioConfigKey = + @"rtc_use_manual_audio_config_key"; NS_ASSUME_NONNULL_BEGIN @interface ARDSettingsStore () { diff --git a/examples/objc/AppRTCMobile/ARDSignalingChannel.h b/examples/objc/AppRTCMobile/ARDSignalingChannel.h index 396b117b17..c484d71e54 100644 --- a/examples/objc/AppRTCMobile/ARDSignalingChannel.h +++ b/examples/objc/AppRTCMobile/ARDSignalingChannel.h @@ -26,9 +26,11 @@ typedef NS_ENUM(NSInteger, ARDSignalingChannelState) { @protocol ARDSignalingChannel; @protocol ARDSignalingChannelDelegate -- (void)channel:(id)channel didChangeState:(ARDSignalingChannelState)state; +- (void)channel:(id)channel + didChangeState:(ARDSignalingChannelState)state; -- (void)channel:(id)channel didReceiveMessage:(ARDSignalingMessage *)message; +- (void)channel:(id)channel + didReceiveMessage:(ARDSignalingMessage *)message; @end diff --git a/examples/objc/AppRTCMobile/ARDSignalingMessage.h b/examples/objc/AppRTCMobile/ARDSignalingMessage.h index ac19e8fba7..b9c75f27f6 100644 --- a/examples/objc/AppRTCMobile/ARDSignalingMessage.h +++ b/examples/objc/AppRTCMobile/ARDSignalingMessage.h @@ -40,17 +40,21 @@ typedef enum { @interface ARDICECandidateRemovalMessage : ARDSignalingMessage -@property(nonatomic, readonly) NSArray *candidates; +@property(nonatomic, readonly) + NSArray *candidates; -- (instancetype)initWithRemovedCandidates:(NSArray *)candidates; +- (instancetype)initWithRemovedCandidates: + (NSArray *)candidates; @end @interface ARDSessionDescriptionMessage : ARDSignalingMessage -@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSessionDescription) * + sessionDescription; -- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description; +- (instancetype)initWithDescription: + (RTC_OBJC_TYPE(RTCSessionDescription) *)description; @end diff --git a/examples/objc/AppRTCMobile/ARDSignalingMessage.m b/examples/objc/AppRTCMobile/ARDSignalingMessage.m index 049c0f5b0a..b1a07b071f 100644 --- a/examples/objc/AppRTCMobile/ARDSignalingMessage.m +++ b/examples/objc/AppRTCMobile/ARDSignalingMessage.m @@ -16,15 +16,16 @@ #import "RTCIceCandidate+JSON.h" #import "RTCSessionDescription+JSON.h" -static NSString * const kARDSignalingMessageTypeKey = @"type"; -static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates"; +static NSString *const kARDSignalingMessageTypeKey = @"type"; +static NSString *const kARDTypeValueRemoveCandidates = @"remove-candidates"; @implementation ARDSignalingMessage @synthesize type = _type; - (instancetype)initWithType:(ARDSignalingMessageType)type { - if (self = [super init]) { + self = [super init]; + if (self) { _type = type; } return self; @@ -53,11 +54,12 @@ + (ARDSignalingMessage *)messageFromJSONString:(NSString *)jsonString { NSArray *candidates = [RTC_OBJC_TYPE(RTCIceCandidate) candidatesFromJSONDictionary:values]; message = [[ARDICECandidateRemovalMessage alloc] - initWithRemovedCandidates:candidates]; + initWithRemovedCandidates:candidates]; } else if ([typeString isEqualToString:@"offer"] || [typeString isEqualToString:@"answer"]) { RTC_OBJC_TYPE(RTCSessionDescription) *description = - [RTC_OBJC_TYPE(RTCSessionDescription) descriptionFromJSONDictionary:values]; + [RTC_OBJC_TYPE(RTCSessionDescription) + descriptionFromJSONDictionary:values]; message = [[ARDSessionDescriptionMessage alloc] initWithDescription:description]; } else if ([typeString isEqualToString:@"bye"]) { @@ -79,7 +81,8 @@ @implementation ARDICECandidateMessage @synthesize candidate = _candidate; - (instancetype)initWithCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate { - if (self = [super initWithType:kARDSignalingMessageTypeCandidate]) { + self = [super initWithType:kARDSignalingMessageTypeCandidate]; + if (self) { _candidate = candidate; } return self; @@ -95,17 +98,20 @@ @implementation ARDICECandidateRemovalMessage @synthesize candidates = _candidates; -- (instancetype)initWithRemovedCandidates:(NSArray *)candidates { +- (instancetype)initWithRemovedCandidates: + (NSArray *)candidates { NSParameterAssert(candidates.count); - if (self = [super initWithType:kARDSignalingMessageTypeCandidateRemoval]) { + self = [super initWithType:kARDSignalingMessageTypeCandidateRemoval]; + if (self) { _candidates = candidates; } return self; } - (NSData *)JSONData { - return [RTC_OBJC_TYPE(RTCIceCandidate) JSONDataForIceCandidates:_candidates - withType:kARDTypeValueRemoveCandidates]; + return [RTC_OBJC_TYPE(RTCIceCandidate) + JSONDataForIceCandidates:_candidates + withType:kARDTypeValueRemoveCandidates]; } @end @@ -114,7 +120,8 @@ @implementation ARDSessionDescriptionMessage @synthesize sessionDescription = _sessionDescription; -- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description { +- (instancetype)initWithDescription: + (RTC_OBJC_TYPE(RTCSessionDescription) *)description { ARDSignalingMessageType messageType = kARDSignalingMessageTypeOffer; RTCSdpType sdpType = description.type; switch (sdpType) { @@ -126,11 +133,13 @@ - (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)desc break; case RTCSdpTypePrAnswer: case RTCSdpTypeRollback: - NSAssert( - NO, @"Unexpected type: %@", [RTC_OBJC_TYPE(RTCSessionDescription) stringForType:sdpType]); + NSAssert(NO, + @"Unexpected type: %@", + [RTC_OBJC_TYPE(RTCSessionDescription) stringForType:sdpType]); break; } - if (self = [super initWithType:messageType]) { + self = [super initWithType:messageType]; + if (self) { _sessionDescription = description; } return self; @@ -149,9 +158,7 @@ - (instancetype)init { } - (NSData *)JSONData { - NSDictionary *message = @{ - @"type": @"bye" - }; + NSDictionary *message = @{@"type" : @"bye"}; return [NSJSONSerialization dataWithJSONObject:message options:NSJSONWritingPrettyPrinted error:NULL]; diff --git a/examples/objc/AppRTCMobile/ARDStatsBuilder.m b/examples/objc/AppRTCMobile/ARDStatsBuilder.m index 7ebf9fb1c7..b411756b8b 100644 --- a/examples/objc/AppRTCMobile/ARDStatsBuilder.m +++ b/examples/objc/AppRTCMobile/ARDStatsBuilder.m @@ -33,4 +33,3 @@ - (NSString *)statsString { } @end - diff --git a/examples/objc/AppRTCMobile/ARDTURNClient.h b/examples/objc/AppRTCMobile/ARDTURNClient.h index 0399736f03..13368a108d 100644 --- a/examples/objc/AppRTCMobile/ARDTURNClient.h +++ b/examples/objc/AppRTCMobile/ARDTURNClient.h @@ -17,7 +17,7 @@ @protocol ARDTURNClient // Returns TURN server urls if successful. -- (void)requestServersWithCompletionHandler:(void (^)(NSArray *turnServers, - NSError *error))completionHandler; +- (void)requestServersWithCompletionHandler: + (void (^)(NSArray *turnServers, NSError *error))completionHandler; @end diff --git a/examples/objc/AppRTCMobile/ARDTURNClient.m b/examples/objc/AppRTCMobile/ARDTURNClient.m index 069231cd7e..9c1173518d 100644 --- a/examples/objc/AppRTCMobile/ARDTURNClient.m +++ b/examples/objc/AppRTCMobile/ARDTURNClient.m @@ -24,7 +24,8 @@ @implementation ARDTURNClient { - (instancetype)initWithURL:(NSURL *)url { NSParameterAssert([url absoluteString].length); - if (self = [super init]) { + self = [super init]; + if (self) { _url = url; } return self; @@ -32,19 +33,20 @@ - (instancetype)initWithURL:(NSURL *)url { - (void)requestServersWithCompletionHandler: (void (^)(NSArray *turnServers, NSError *error))completionHandler { - NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:_url]; - [NSURLConnection sendAsyncRequest:request - completionHandler:^(NSURLResponse *response, NSData *data, NSError *error) { - if (error) { - completionHandler(nil, error); - return; - } - NSDictionary *responseDict = [NSDictionary dictionaryWithJSONData:data]; - NSString *iceServerUrl = responseDict[@"ice_server_url"]; - [self makeTurnServerRequestToURL:[NSURL URLWithString:iceServerUrl] - WithCompletionHandler:completionHandler]; - }]; + [NSURLConnection + sendAsyncRequest:request + completionHandler:^( + NSURLResponse *response __unused, NSData *data, NSError *error) { + if (error) { + completionHandler(nil, error); + return; + } + NSDictionary *responseDict = [NSDictionary dictionaryWithJSONData:data]; + NSString *iceServerUrl = responseDict[@"ice_server_url"]; + [self makeTurnServerRequestToURL:[NSURL URLWithString:iceServerUrl] + WithCompletionHandler:completionHandler]; + }]; } #pragma mark - Private @@ -52,35 +54,41 @@ - (void)requestServersWithCompletionHandler: - (void)makeTurnServerRequestToURL:(NSURL *)url WithCompletionHandler:(void (^)(NSArray *turnServers, NSError *error))completionHandler { - NSMutableURLRequest *iceServerRequest = [NSMutableURLRequest requestWithURL:url]; + NSMutableURLRequest *iceServerRequest = + [NSMutableURLRequest requestWithURL:url]; iceServerRequest.HTTPMethod = @"POST"; - [iceServerRequest addValue:kTURNRefererURLString forHTTPHeaderField:@"referer"]; - [NSURLConnection sendAsyncRequest:iceServerRequest - completionHandler:^(NSURLResponse *response, - NSData *data, - NSError *error) { - if (error) { - completionHandler(nil, error); - return; - } - NSDictionary *turnResponseDict = [NSDictionary dictionaryWithJSONData:data]; - NSMutableArray *turnServers = [NSMutableArray array]; - [turnResponseDict[@"iceServers"] - enumerateObjectsUsingBlock:^(NSDictionary *obj, NSUInteger idx, BOOL *stop) { - [turnServers addObject:[RTC_OBJC_TYPE(RTCIceServer) serverFromJSONDictionary:obj]]; - }]; - if (!turnServers) { - NSError *responseError = - [[NSError alloc] initWithDomain:kARDTURNClientErrorDomain - code:kARDTURNClientErrorBadResponse - userInfo:@{ - NSLocalizedDescriptionKey: @"Bad TURN response.", + [iceServerRequest addValue:kTURNRefererURLString + forHTTPHeaderField:@"referer"]; + [NSURLConnection + sendAsyncRequest:iceServerRequest + completionHandler:^( + NSURLResponse *response __unused, NSData *data, NSError *error) { + if (error) { + completionHandler(nil, error); + return; + } + NSDictionary *turnResponseDict = + [NSDictionary dictionaryWithJSONData:data]; + NSMutableArray *turnServers = [NSMutableArray array]; + [turnResponseDict[@"iceServers"] + enumerateObjectsUsingBlock:^(NSDictionary *obj, + NSUInteger idx __unused, + BOOL *stop __unused) { + [turnServers addObject:[RTC_OBJC_TYPE(RTCIceServer) + serverFromJSONDictionary:obj]]; }]; - completionHandler(nil, responseError); - return; - } - completionHandler(turnServers, nil); - }]; + if (!turnServers) { + NSError *responseError = [[NSError alloc] + initWithDomain:kARDTURNClientErrorDomain + code:kARDTURNClientErrorBadResponse + userInfo:@{ + NSLocalizedDescriptionKey : @"Bad TURN response.", + }]; + completionHandler(nil, responseError); + return; + } + completionHandler(turnServers, nil); + }]; } @end diff --git a/examples/objc/AppRTCMobile/ARDWebSocketChannel.m b/examples/objc/AppRTCMobile/ARDWebSocketChannel.m index bbb0bf87f8..37260d63ba 100644 --- a/examples/objc/AppRTCMobile/ARDWebSocketChannel.m +++ b/examples/objc/AppRTCMobile/ARDWebSocketChannel.m @@ -38,7 +38,8 @@ @implementation ARDWebSocketChannel { - (instancetype)initWithURL:(NSURL *)url restURL:(NSURL *)restURL delegate:(id)delegate { - if (self = [super init]) { + self = [super init]; + if (self) { _url = url; _restURL = restURL; _delegate = delegate; @@ -62,8 +63,7 @@ - (void)setState:(ARDSignalingChannelState)state { [_delegate channel:self didChangeState:_state]; } -- (void)registerForRoomId:(NSString *)roomId - clientId:(NSString *)clientId { +- (void)registerForRoomId:(NSString *)roomId clientId:(NSString *)clientId { NSParameterAssert(roomId.length); NSParameterAssert(clientId.length); _roomId = roomId; @@ -78,11 +78,11 @@ - (void)sendMessage:(ARDSignalingMessage *)message { NSParameterAssert(_roomId.length); NSData *data = [message JSONData]; if (_state == kARDSignalingChannelStateRegistered) { - NSString *payload = - [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding]; + NSString *payload = [[NSString alloc] initWithData:data + encoding:NSUTF8StringEncoding]; NSDictionary *message = @{ - @"cmd": @"send", - @"msg": payload, + @"cmd" : @"send", + @"msg" : payload, }; NSData *messageJSONObject = [NSJSONSerialization dataWithJSONObject:message @@ -94,12 +94,13 @@ - (void)sendMessage:(ARDSignalingMessage *)message { RTCLog(@"C->WSS: %@", messageString); [_socket send:messageString]; } else { - NSString *dataString = - [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding]; + NSString *dataString = [[NSString alloc] initWithData:data + encoding:NSUTF8StringEncoding]; RTCLog(@"C->WSS POST: %@", dataString); - NSString *urlString = - [NSString stringWithFormat:@"%@/%@/%@", - [_restURL absoluteString], _roomId, _clientId]; + NSString *urlString = [NSString stringWithFormat:@"%@/%@/%@", + [_restURL absoluteString], + _roomId, + _clientId]; NSURL *url = [NSURL URLWithString:urlString]; [NSURLConnection sendAsyncPostToURL:url withData:data @@ -114,9 +115,10 @@ - (void)disconnect { } [_socket close]; RTCLog(@"C->WSS DELETE rid:%@ cid:%@", _roomId, _clientId); - NSString *urlString = - [NSString stringWithFormat:@"%@/%@/%@", - [_restURL absoluteString], _roomId, _clientId]; + NSString *urlString = [NSString stringWithFormat:@"%@/%@/%@", + [_restURL absoluteString], + _roomId, + _clientId]; NSURL *url = [NSURL URLWithString:urlString]; NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url]; request.HTTPMethod = @"DELETE"; @@ -167,7 +169,9 @@ - (void)webSocket:(SRWebSocket *)webSocket reason:(NSString *)reason wasClean:(BOOL)wasClean { RTCLog(@"WebSocket closed with code: %ld reason:%@ wasClean:%d", - (long)code, reason, wasClean); + (long)code, + reason, + wasClean); NSParameterAssert(_state != kARDSignalingChannelStateError); self.state = kARDSignalingChannelStateClosed; } @@ -181,7 +185,7 @@ - (void)registerWithCollider { NSParameterAssert(_roomId.length); NSParameterAssert(_clientId.length); NSDictionary *registerMessage = @{ - @"cmd": @"register", + @"cmd" : @"register", @"roomid" : _roomId, @"clientid" : _clientId, }; @@ -218,15 +222,17 @@ - (void)channel:(id)channel // Change message to answer, send back to server. ARDSessionDescriptionMessage *sdpMessage = (ARDSessionDescriptionMessage *)message; - RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription; + RTC_OBJC_TYPE(RTCSessionDescription) *description = + sdpMessage.sessionDescription; NSString *dsc = description.sdp; dsc = [dsc stringByReplacingOccurrencesOfString:@"offer" withString:@"answer"]; RTC_OBJC_TYPE(RTCSessionDescription) *answerDescription = - [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:dsc]; - ARDSignalingMessage *answer = - [[ARDSessionDescriptionMessage alloc] - initWithDescription:answerDescription]; + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] + initWithType:RTCSdpTypeAnswer + sdp:dsc]; + ARDSignalingMessage *answer = [[ARDSessionDescriptionMessage alloc] + initWithDescription:answerDescription]; [self sendMessage:answer]; break; } @@ -249,4 +255,3 @@ - (void)channel:(id)channel } @end - diff --git a/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h b/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h index 5fd823f2de..7f02710ef3 100644 --- a/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h +++ b/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h @@ -13,10 +13,12 @@ @interface RTC_OBJC_TYPE (RTCIceCandidate) (JSON) - + (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary; + + (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary + : (NSDictionary *)dictionary; + (NSArray *)candidatesFromJSONDictionary: (NSDictionary *)dictionary; -+ (NSData *)JSONDataForIceCandidates:(NSArray *)candidates ++ (NSData *)JSONDataForIceCandidates: + (NSArray *)candidates withType:(NSString *)typeValue; - (NSData *)JSONData; diff --git a/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m b/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m index 99cefbff0b..a7c9ea5b22 100644 --- a/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m +++ b/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m @@ -22,7 +22,8 @@ @implementation RTC_OBJC_TYPE (RTCIceCandidate) (JSON) - + (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary { + + (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary + : (NSDictionary *)dictionary { NSString *mid = dictionary[kRTCICECandidateMidKey]; NSString *sdp = dictionary[kRTCICECandidateSdpKey]; NSNumber *num = dictionary[kRTCICECandidateMLineIndexKey]; @@ -32,7 +33,8 @@ @implementation RTC_OBJC_TYPE (RTCIceCandidate) sdpMid:mid]; } -+ (NSData *)JSONDataForIceCandidates:(NSArray *)candidates ++ (NSData *)JSONDataForIceCandidates: + (NSArray *)candidates withType:(NSString *)typeValue { NSMutableArray *jsonCandidates = [NSMutableArray arrayWithCapacity:candidates.count]; @@ -62,8 +64,8 @@ + (NSData *)JSONDataForIceCandidates:(NSArray NSMutableArray *candidates = [NSMutableArray arrayWithCapacity:jsonCandidates.count]; for (NSDictionary *jsonCandidate in jsonCandidates) { - RTC_OBJC_TYPE(RTCIceCandidate) *candidate = - [RTC_OBJC_TYPE(RTCIceCandidate) candidateFromJSONDictionary:jsonCandidate]; + RTC_OBJC_TYPE(RTCIceCandidate) *candidate = [RTC_OBJC_TYPE(RTCIceCandidate) + candidateFromJSONDictionary:jsonCandidate]; [candidates addObject:candidate]; } return candidates; @@ -88,7 +90,7 @@ - (NSData *)JSONData { return data; } -- (NSDictionary *)JSONDictionary{ +- (NSDictionary *)JSONDictionary { NSDictionary *json = @{ kRTCICECandidateMLineIndexKey : @(self.sdpMLineIndex), kRTCICECandidateMidKey : self.sdpMid, diff --git a/examples/objc/AppRTCMobile/RTCIceServer+JSON.h b/examples/objc/AppRTCMobile/RTCIceServer+JSON.h index 35f6af7583..4106470c48 100644 --- a/examples/objc/AppRTCMobile/RTCIceServer+JSON.h +++ b/examples/objc/AppRTCMobile/RTCIceServer+JSON.h @@ -13,6 +13,7 @@ @interface RTC_OBJC_TYPE (RTCIceServer) (JSON) - + (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary; + + (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary + : (NSDictionary *)dictionary; @end diff --git a/examples/objc/AppRTCMobile/RTCIceServer+JSON.m b/examples/objc/AppRTCMobile/RTCIceServer+JSON.m index b5272a2f64..08dceadc4c 100644 --- a/examples/objc/AppRTCMobile/RTCIceServer+JSON.m +++ b/examples/objc/AppRTCMobile/RTCIceServer+JSON.m @@ -13,7 +13,8 @@ @implementation RTC_OBJC_TYPE (RTCIceServer) (JSON) - + (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary { + + (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary + : (NSDictionary *)dictionary { NSArray *turnUrls = dictionary[@"urls"]; NSString *username = dictionary[@"username"] ?: @""; NSString *credential = dictionary[@"credential"] ?: @""; diff --git a/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m b/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m index 28268faa84..17888c9e49 100644 --- a/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m +++ b/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m @@ -21,7 +21,8 @@ @implementation RTC_OBJC_TYPE (RTCSessionDescription) NSString *typeString = dictionary[kRTCSessionDescriptionTypeKey]; RTCSdpType type = [[self class] typeForString:typeString]; NSString *sdp = dictionary[kRTCSessionDescriptionSdpKey]; - return [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:type sdp:sdp]; + return [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:type + sdp:sdp]; } - (NSData *)JSONData { diff --git a/examples/objc/AppRTCMobile/common/ARDUtilities.h b/examples/objc/AppRTCMobile/common/ARDUtilities.h index 5f0d7dbef7..25f786017d 100644 --- a/examples/objc/AppRTCMobile/common/ARDUtilities.h +++ b/examples/objc/AppRTCMobile/common/ARDUtilities.h @@ -22,13 +22,15 @@ // Issues an asynchronous request that calls back on main queue. + (void)sendAsyncRequest:(NSURLRequest *)request - completionHandler: - (void (^)(NSURLResponse *response, NSData *data, NSError *error))completionHandler; + completionHandler:(void (^)(NSURLResponse *response, + NSData *data, + NSError *error))completionHandler; // Posts data to the specified URL. + (void)sendAsyncPostToURL:(NSURL *)url withData:(NSData *)data - completionHandler:(void (^)(BOOL succeeded, NSData *data))completionHandler; + completionHandler: + (void (^)(BOOL succeeded, NSData *data))completionHandler; @end diff --git a/examples/objc/AppRTCMobile/common/ARDUtilities.m b/examples/objc/AppRTCMobile/common/ARDUtilities.m index e0674f5210..9ac5508a51 100644 --- a/examples/objc/AppRTCMobile/common/ARDUtilities.m +++ b/examples/objc/AppRTCMobile/common/ARDUtilities.m @@ -20,8 +20,9 @@ + (NSDictionary *)dictionaryWithJSONString:(NSString *)jsonString { NSParameterAssert(jsonString.length > 0); NSData *data = [jsonString dataUsingEncoding:NSUTF8StringEncoding]; NSError *error = nil; - NSDictionary *dict = - [NSJSONSerialization JSONObjectWithData:data options:0 error:&error]; + NSDictionary *dict = [NSJSONSerialization JSONObjectWithData:data + options:0 + error:&error]; if (error) { RTCLogError(@"Error parsing JSON: %@", error.localizedDescription); } @@ -30,8 +31,9 @@ + (NSDictionary *)dictionaryWithJSONString:(NSString *)jsonString { + (NSDictionary *)dictionaryWithJSONData:(NSData *)jsonData { NSError *error = nil; - NSDictionary *dict = - [NSJSONSerialization JSONObjectWithData:jsonData options:0 error:&error]; + NSDictionary *dict = [NSJSONSerialization JSONObjectWithData:jsonData + options:0 + error:&error]; if (error) { RTCLogError(@"Error parsing JSON: %@", error.localizedDescription); } @@ -49,7 +51,8 @@ + (void)sendAsyncRequest:(NSURLRequest *)request // Kick off an async request which will call back on main thread. NSURLSession *session = [NSURLSession sharedSession]; [[session dataTaskWithRequest:request - completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) { + completionHandler:^( + NSData *data, NSURLResponse *response, NSError *error) { if (completionHandler) { completionHandler(response, data, error); } @@ -59,37 +62,38 @@ + (void)sendAsyncRequest:(NSURLRequest *)request // Posts data to the specified URL. + (void)sendAsyncPostToURL:(NSURL *)url withData:(NSData *)data - completionHandler:(void (^)(BOOL succeeded, - NSData *data))completionHandler { + completionHandler: + (void (^)(BOOL succeeded, NSData *data))completionHandler { NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url]; request.HTTPMethod = @"POST"; request.HTTPBody = data; - [[self class] sendAsyncRequest:request - completionHandler:^(NSURLResponse *response, - NSData *data, - NSError *error) { - if (error) { - RTCLogError(@"Error posting data: %@", error.localizedDescription); - if (completionHandler) { - completionHandler(NO, data); - } - return; - } - NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response; - if (httpResponse.statusCode != 200) { - NSString *serverResponse = data.length > 0 ? - [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding] : - nil; - RTCLogError(@"Received bad response: %@", serverResponse); - if (completionHandler) { - completionHandler(NO, data); - } - return; - } - if (completionHandler) { - completionHandler(YES, data); - } - }]; + [[self class] + sendAsyncRequest:request + completionHandler:^( + NSURLResponse *response, NSData *data, NSError *error) { + if (error) { + RTCLogError(@"Error posting data: %@", error.localizedDescription); + if (completionHandler) { + completionHandler(NO, data); + } + return; + } + NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response; + if (httpResponse.statusCode != 200) { + NSString *serverResponse = data.length > 0 ? + [[NSString alloc] initWithData:data + encoding:NSUTF8StringEncoding] : + nil; + RTCLogError(@"Received bad response: %@", serverResponse); + if (completionHandler) { + completionHandler(NO, data); + } + return; + } + if (completionHandler) { + completionHandler(YES, data); + } + }]; } @end @@ -120,7 +124,7 @@ NSInteger ARDGetCpuUsagePercentage(void) { } // Dealloc the created array. - vm_deallocate(task, (vm_address_t)thread_array, - sizeof(thread_act_t) * thread_count); + vm_deallocate( + task, (vm_address_t)thread_array, sizeof(thread_act_t) * thread_count); return lroundf(cpu_usage_percentage); } diff --git a/examples/objc/AppRTCMobile/ios/ARDAppDelegate.m b/examples/objc/AppRTCMobile/ios/ARDAppDelegate.m index 51e9910b87..9a3b0d561a 100644 --- a/examples/objc/AppRTCMobile/ios/ARDAppDelegate.m +++ b/examples/objc/AppRTCMobile/ios/ARDAppDelegate.m @@ -29,12 +29,12 @@ - (BOOL)application:(UIApplication *)application RTCInitFieldTrialDictionary(fieldTrials); RTCInitializeSSL(); RTCSetupInternalTracer(); - _window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; + _window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; [_window makeKeyAndVisible]; ARDMainViewController *viewController = [[ARDMainViewController alloc] init]; - UINavigationController *root = - [[UINavigationController alloc] initWithRootViewController:viewController]; + UINavigationController *root = [[UINavigationController alloc] + initWithRootViewController:viewController]; root.navigationBar.translucent = NO; _window.rootViewController = root; diff --git a/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h b/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h index 82f8fcdd1b..b20aec283c 100644 --- a/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h +++ b/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h @@ -25,7 +25,8 @@ NS_CLASS_AVAILABLE_IOS(10) * * @param capturer The capturer to be controlled. */ -- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer; +- (instancetype)initWithCapturer: + (RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer; /** * Starts the file capturer. diff --git a/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m b/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m index 2ddde6dd59..98c947b894 100644 --- a/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m +++ b/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m @@ -21,8 +21,10 @@ @interface ARDFileCaptureController () @implementation ARDFileCaptureController @synthesize fileCapturer = _fileCapturer; -- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer { - if (self = [super init]) { +- (instancetype)initWithCapturer: + (RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer { + self = [super init]; + if (self) { _fileCapturer = capturer; } return self; diff --git a/examples/objc/AppRTCMobile/ios/ARDMainView.h b/examples/objc/AppRTCMobile/ios/ARDMainView.h index c6691c2d84..f9404a2952 100644 --- a/examples/objc/AppRTCMobile/ios/ARDMainView.h +++ b/examples/objc/AppRTCMobile/ios/ARDMainView.h @@ -14,7 +14,9 @@ @protocol ARDMainViewDelegate -- (void)mainView:(ARDMainView *)mainView didInputRoom:(NSString *)room isLoopback:(BOOL)isLoopback; +- (void)mainView:(ARDMainView *)mainView + didInputRoom:(NSString *)room + isLoopback:(BOOL)isLoopback; - (void)mainViewDidToggleAudioLoop:(ARDMainView *)mainView; @end diff --git a/examples/objc/AppRTCMobile/ios/ARDMainView.m b/examples/objc/AppRTCMobile/ios/ARDMainView.m index d9521060eb..80b0c74d61 100644 --- a/examples/objc/AppRTCMobile/ios/ARDMainView.m +++ b/examples/objc/AppRTCMobile/ios/ARDMainView.m @@ -26,7 +26,8 @@ @implementation ARDRoomTextField { } - (instancetype)initWithFrame:(CGRect)frame { - if (self = [super initWithFrame:frame]) { + self = [super initWithFrame:frame]; + if (self) { _roomText = [[UITextField alloc] initWithFrame:CGRectZero]; _roomText.borderStyle = UITextBorderStyleNone; _roomText.font = [UIFont systemFontOfSize:12]; @@ -47,7 +48,9 @@ - (instancetype)initWithFrame:(CGRect)frame { - (void)layoutSubviews { _roomText.frame = - CGRectMake(kRoomTextFieldMargin, 0, CGRectGetWidth(self.bounds) - kRoomTextFieldMargin, + CGRectMake(kRoomTextFieldMargin, + 0, + CGRectGetWidth(self.bounds) - kRoomTextFieldMargin, kRoomTextFieldHeight); } @@ -82,7 +85,8 @@ @implementation ARDMainView { @synthesize isAudioLoopPlaying = _isAudioLoopPlaying; - (instancetype)initWithFrame:(CGRect)frame { - if (self = [super initWithFrame:frame]) { + self = [super initWithFrame:frame]; + if (self) { _roomText = [[ARDRoomTextField alloc] initWithFrame:CGRectZero]; [self addSubview:_roomText]; @@ -91,10 +95,15 @@ - (instancetype)initWithFrame:(CGRect)frame { _startRegularCallButton = [UIButton buttonWithType:UIButtonTypeSystem]; _startRegularCallButton.titleLabel.font = controlFont; - [_startRegularCallButton setTitleColor:controlFontColor forState:UIControlStateNormal]; - _startRegularCallButton.backgroundColor - = [UIColor colorWithRed:66.0/255.0 green:200.0/255.0 blue:90.0/255.0 alpha:1.0]; - [_startRegularCallButton setTitle:@"Call room" forState:UIControlStateNormal]; + [_startRegularCallButton setTitleColor:controlFontColor + forState:UIControlStateNormal]; + _startRegularCallButton.backgroundColor = + [UIColor colorWithRed:66.0 / 255.0 + green:200.0 / 255.0 + blue:90.0 / 255.0 + alpha:1.0]; + [_startRegularCallButton setTitle:@"Call room" + forState:UIControlStateNormal]; [_startRegularCallButton addTarget:self action:@selector(onStartRegularCall:) forControlEvents:UIControlEventTouchUpInside]; @@ -102,22 +111,26 @@ - (instancetype)initWithFrame:(CGRect)frame { _startLoopbackCallButton = [UIButton buttonWithType:UIButtonTypeSystem]; _startLoopbackCallButton.titleLabel.font = controlFont; - [_startLoopbackCallButton setTitleColor:controlFontColor forState:UIControlStateNormal]; + [_startLoopbackCallButton setTitleColor:controlFontColor + forState:UIControlStateNormal]; _startLoopbackCallButton.backgroundColor = - [UIColor colorWithRed:0.0 green:122.0/255.0 blue:1.0 alpha:1.0]; - [_startLoopbackCallButton setTitle:@"Loopback call" forState:UIControlStateNormal]; + [UIColor colorWithRed:0.0 green:122.0 / 255.0 blue:1.0 alpha:1.0]; + [_startLoopbackCallButton setTitle:@"Loopback call" + forState:UIControlStateNormal]; [_startLoopbackCallButton addTarget:self action:@selector(onStartLoopbackCall:) forControlEvents:UIControlEventTouchUpInside]; [self addSubview:_startLoopbackCallButton]; - // Used to test what happens to sounds when calls are in progress. _audioLoopButton = [UIButton buttonWithType:UIButtonTypeSystem]; _audioLoopButton.titleLabel.font = controlFont; - [_audioLoopButton setTitleColor:controlFontColor forState:UIControlStateNormal]; - _audioLoopButton.backgroundColor = - [UIColor colorWithRed:1.0 green:149.0/255.0 blue:0.0 alpha:1.0]; + [_audioLoopButton setTitleColor:controlFontColor + forState:UIControlStateNormal]; + _audioLoopButton.backgroundColor = [UIColor colorWithRed:1.0 + green:149.0 / 255.0 + blue:0.0 + alpha:1.0]; [self updateAudioLoopButton]; [_audioLoopButton addTarget:self action:@selector(onToggleAudioLoop:) @@ -141,29 +154,36 @@ - (void)layoutSubviews { CGRect bounds = self.bounds; CGFloat roomTextWidth = bounds.size.width - 2 * kRoomTextFieldMargin; CGFloat roomTextHeight = [_roomText sizeThatFits:bounds.size].height; - _roomText.frame = - CGRectMake(kRoomTextFieldMargin, kRoomTextFieldMargin, roomTextWidth, - roomTextHeight); + _roomText.frame = CGRectMake(kRoomTextFieldMargin, + kRoomTextFieldMargin, + roomTextWidth, + roomTextHeight); CGFloat buttonHeight = - (CGRectGetMaxY(self.bounds) - CGRectGetMaxY(_roomText.frame) - kCallControlMargin * 4) / 3; - - CGFloat regularCallFrameTop = CGRectGetMaxY(_roomText.frame) + kCallControlMargin; - CGRect regularCallFrame = CGRectMake(kCallControlMargin, - regularCallFrameTop, - bounds.size.width - 2*kCallControlMargin, - buttonHeight); - - CGFloat loopbackCallFrameTop = CGRectGetMaxY(regularCallFrame) + kCallControlMargin; - CGRect loopbackCallFrame = CGRectMake(kCallControlMargin, - loopbackCallFrameTop, - bounds.size.width - 2*kCallControlMargin, - buttonHeight); + (CGRectGetMaxY(self.bounds) - CGRectGetMaxY(_roomText.frame) - + kCallControlMargin * 4) / + 3; + + CGFloat regularCallFrameTop = + CGRectGetMaxY(_roomText.frame) + kCallControlMargin; + CGRect regularCallFrame = + CGRectMake(kCallControlMargin, + regularCallFrameTop, + bounds.size.width - 2 * kCallControlMargin, + buttonHeight); + + CGFloat loopbackCallFrameTop = + CGRectGetMaxY(regularCallFrame) + kCallControlMargin; + CGRect loopbackCallFrame = + CGRectMake(kCallControlMargin, + loopbackCallFrameTop, + bounds.size.width - 2 * kCallControlMargin, + buttonHeight); CGFloat audioLoopTop = CGRectGetMaxY(loopbackCallFrame) + kCallControlMargin; CGRect audioLoopFrame = CGRectMake(kCallControlMargin, audioLoopTop, - bounds.size.width - 2*kCallControlMargin, + bounds.size.width - 2 * kCallControlMargin, buttonHeight); _startRegularCallButton.frame = regularCallFrame; diff --git a/examples/objc/AppRTCMobile/ios/ARDMainViewController.m b/examples/objc/AppRTCMobile/ios/ARDMainViewController.m index e8b8112e41..4de38a002e 100644 --- a/examples/objc/AppRTCMobile/ios/ARDMainViewController.m +++ b/examples/objc/AppRTCMobile/ios/ARDMainViewController.m @@ -25,7 +25,8 @@ static NSString *const barButtonImageString = @"ic_settings_black_24dp.png"; -// Launch argument to be passed to indicate that the app should start loopback immediatly +// Launch argument to be passed to indicate that the app should start loopback +// immediatly static NSString *const loopbackLaunchProcessArgument = @"loopback"; @interface ARDMainViewController () *remoteVideoView; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCCameraPreviewView) * + localVideoView; +@property(nonatomic, readonly) + __kindof UIView *remoteVideoView; @property(nonatomic, readonly) ARDStatsView *statsView; @property(nonatomic, weak) id delegate; diff --git a/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m b/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m index 437aea8d56..f1832642ff 100644 --- a/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m +++ b/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m @@ -39,13 +39,15 @@ @implementation ARDVideoCallView { @synthesize delegate = _delegate; - (instancetype)initWithFrame:(CGRect)frame { - if (self = [super initWithFrame:frame]) { - - _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero]; + self = [super initWithFrame:frame]; + if (self) { + _remoteVideoView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero]; [self addSubview:_remoteVideoView]; - _localVideoView = [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero]; + _localVideoView = + [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero]; [self addSubview:_localVideoView]; _statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero]; @@ -69,11 +71,12 @@ - (instancetype)initWithFrame:(CGRect)frame { _cameraSwitchButton.backgroundColor = [UIColor grayColor]; _cameraSwitchButton.layer.cornerRadius = kButtonSize / 2; _cameraSwitchButton.layer.masksToBounds = YES; - image = [UIImage imageForName:@"ic_switch_video_black_24dp.png" color:[UIColor whiteColor]]; + image = [UIImage imageForName:@"ic_switch_video_black_24dp.png" + color:[UIColor whiteColor]]; [_cameraSwitchButton setImage:image forState:UIControlStateNormal]; [_cameraSwitchButton addTarget:self - action:@selector(onCameraSwitch:) - forControlEvents:UIControlEventTouchUpInside]; + action:@selector(onCameraSwitch:) + forControlEvents:UIControlEventTouchUpInside]; [self addSubview:_cameraSwitchButton]; _hangupButton = [UIButton buttonWithType:UIButtonTypeCustom]; @@ -93,10 +96,9 @@ - (instancetype)initWithFrame:(CGRect)frame { _statusLabel.textColor = [UIColor whiteColor]; [self addSubview:_statusLabel]; - UITapGestureRecognizer *tapRecognizer = - [[UITapGestureRecognizer alloc] - initWithTarget:self - action:@selector(didTripleTap:)]; + UITapGestureRecognizer *tapRecognizer = [[UITapGestureRecognizer alloc] + initWithTarget:self + action:@selector(didTripleTap:)]; tapRecognizer.numberOfTapsRequired = 3; [self addGestureRecognizer:tapRecognizer]; } @@ -130,23 +132,23 @@ - (void)layoutSubviews { CGRect localVideoFrame = CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize); // Place the view in the bottom right. - localVideoFrame.origin.x = CGRectGetMaxX(bounds) - - localVideoFrame.size.width - kLocalVideoViewPadding; - localVideoFrame.origin.y = CGRectGetMaxY(bounds) - - localVideoFrame.size.height - kLocalVideoViewPadding; + localVideoFrame.origin.x = CGRectGetMaxX(bounds) - + localVideoFrame.size.width - kLocalVideoViewPadding; + localVideoFrame.origin.y = CGRectGetMaxY(bounds) - + localVideoFrame.size.height - kLocalVideoViewPadding; _localVideoView.frame = localVideoFrame; // Place stats at the top. CGSize statsSize = [_statsView sizeThatFits:bounds.size]; _statsView.frame = CGRectMake(CGRectGetMinX(bounds), CGRectGetMinY(bounds) + kStatusBarHeight, - statsSize.width, statsSize.height); + statsSize.width, + statsSize.height); // Place hangup button in the bottom left. _hangupButton.frame = CGRectMake(CGRectGetMinX(bounds) + kButtonPadding, - CGRectGetMaxY(bounds) - kButtonPadding - - kButtonSize, + CGRectGetMaxY(bounds) - kButtonPadding - kButtonSize, kButtonSize, kButtonSize); @@ -158,8 +160,7 @@ - (void)layoutSubviews { // Place route button to the right of camera button. CGRect routeChangeFrame = _cameraSwitchButton.frame; - routeChangeFrame.origin.x = - CGRectGetMaxX(routeChangeFrame) + kButtonPadding; + routeChangeFrame.origin.x = CGRectGetMaxX(routeChangeFrame) + kButtonPadding; _routeChangeButton.frame = routeChangeFrame; [_statusLabel sizeToFit]; @@ -169,7 +170,8 @@ - (void)layoutSubviews { #pragma mark - RTC_OBJC_TYPE(RTCVideoViewDelegate) -- (void)videoView:(id)videoView didChangeVideoSize:(CGSize)size { +- (void)videoView:(id)videoView + didChangeVideoSize:(CGSize)size { if (videoView == _remoteVideoView) { _remoteVideoSize = size; } diff --git a/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m b/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m index a82d90b290..9b308f2355 100644 --- a/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m +++ b/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m @@ -22,9 +22,10 @@ #import "ARDSettingsModel.h" #import "ARDVideoCallView.h" -@interface ARDVideoCallViewController () +@interface ARDVideoCallViewController () < + ARDAppClientDelegate, + ARDVideoCallViewDelegate, + RTC_OBJC_TYPE (RTCAudioSessionDelegate)> @property(nonatomic, strong) RTC_OBJC_TYPE(RTCVideoTrack) * remoteVideoTrack; @property(nonatomic, readonly) ARDVideoCallView *videoCallView; @property(nonatomic, assign) AVAudioSessionPortOverride portOverride; @@ -45,12 +46,15 @@ @implementation ARDVideoCallViewController { - (instancetype)initForRoom:(NSString *)room isLoopback:(BOOL)isLoopback delegate:(id)delegate { - if (self = [super init]) { + self = [super init]; + if (self) { ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init]; _delegate = delegate; _client = [[ARDAppClient alloc] initWithDelegate:self]; - [_client connectToRoomWithId:room settings:settingsModel isLoopback:isLoopback]; + [_client connectToRoomWithId:room + settings:settingsModel + isLoopback:isLoopback]; } return self; } @@ -62,7 +66,8 @@ - (void)loadView { [self statusTextForState:RTCIceConnectionStateNew]; self.view = _videoCallView; - RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session addDelegate:self]; } @@ -100,19 +105,23 @@ - (void)appClient:(ARDAppClient *)client } - (void)appClient:(ARDAppClient *)client - didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer { + didCreateLocalCapturer: + (RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer { _videoCallView.localVideoView.captureSession = localCapturer.captureSession; ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init]; _captureController = - [[ARDCaptureController alloc] initWithCapturer:localCapturer settings:settingsModel]; + [[ARDCaptureController alloc] initWithCapturer:localCapturer + settings:settingsModel]; [_captureController startCapture]; } - (void)appClient:(ARDAppClient *)client - didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer { + didCreateLocalFileCapturer: + (RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer { #if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0) if (@available(iOS 10, *)) { - _fileCaptureController = [[ARDFileCaptureController alloc] initWithCapturer:fileCapturer]; + _fileCaptureController = + [[ARDFileCaptureController alloc] initWithCapturer:fileCapturer]; [_fileCaptureController startCapture]; } #endif @@ -123,7 +132,8 @@ - (void)appClient:(ARDAppClient *)client } - (void)appClient:(ARDAppClient *)client - didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack { + didReceiveRemoteVideoTrack: + (RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack { self.remoteVideoTrack = remoteVideoTrack; __weak ARDVideoCallViewController *weakSelf = self; dispatch_async(dispatch_get_main_queue(), ^{ @@ -132,13 +142,13 @@ - (void)appClient:(ARDAppClient *)client }); } -- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats { +- (void)appClient:(ARDAppClient *)client + didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats { _videoCallView.statsView.stats = stats; [_videoCallView setNeedsLayout]; } -- (void)appClient:(ARDAppClient *)client - didError:(NSError *)error { +- (void)appClient:(ARDAppClient *)client didError:(NSError *)error { NSString *message = [NSString stringWithFormat:@"%@", error.localizedDescription]; [self hangup]; @@ -163,22 +173,23 @@ - (void)videoCallView:(ARDVideoCallView *)view if (_portOverride == AVAudioSessionPortOverrideNone) { override = AVAudioSessionPortOverrideSpeaker; } - [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeAudioSession - block:^{ - RTC_OBJC_TYPE(RTCAudioSession) *session = - [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; - [session lockForConfiguration]; - NSError *error = nil; - if ([session overrideOutputAudioPort:override - error:&error]) { - self.portOverride = override; - } else { - RTCLogError(@"Error overriding output port: %@", - error.localizedDescription); - } - [session unlockForConfiguration]; - completion(); - }]; + [RTC_OBJC_TYPE(RTCDispatcher) + dispatchAsyncOnType:RTCDispatcherTypeAudioSession + block:^{ + RTC_OBJC_TYPE(RTCAudioSession) *session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + [session lockForConfiguration]; + NSError *error = nil; + if ([session overrideOutputAudioPort:override + error:&error]) { + self.portOverride = override; + } else { + RTCLogError(@"Error overriding output port: %@", + error.localizedDescription); + } + [session unlockForConfiguration]; + completion(); + }]; } - (void)videoCallViewDidEnableStats:(ARDVideoCallView *)view { @@ -232,16 +243,17 @@ - (NSString *)statusTextForState:(RTCIceConnectionState)state { } } -- (void)showAlertWithMessage:(NSString*)message { +- (void)showAlertWithMessage:(NSString *)message { UIAlertController *alert = [UIAlertController alertControllerWithTitle:nil message:message preferredStyle:UIAlertControllerStyleAlert]; - UIAlertAction *defaultAction = [UIAlertAction actionWithTitle:@"OK" - style:UIAlertActionStyleDefault - handler:^(UIAlertAction *action){ - }]; + UIAlertAction *defaultAction = + [UIAlertAction actionWithTitle:@"OK" + style:UIAlertActionStyleDefault + handler:^(UIAlertAction *action){ + }]; [alert addAction:defaultAction]; [self presentViewController:alert animated:YES completion:nil]; diff --git a/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m b/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m index 5e0c52c5c4..58af9b489d 100644 --- a/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m +++ b/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m @@ -19,7 +19,8 @@ - (NSString *)humanReadableDescription { if ([self.name isEqualToString:@"H264"]) { NSString *profileId = self.parameters[@"profile-level-id"]; RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId = - [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:profileId]; + [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] + initWithHexString:profileId]; if (profileLevelId.profile == RTCH264ProfileConstrainedHigh || profileLevelId.profile == RTCH264ProfileHigh) { return @"H264 (High)"; diff --git a/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h b/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h index 2c4a56368a..cfc92b1e67 100644 --- a/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h +++ b/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.h @@ -17,7 +17,8 @@ @protocol ARDExternalSampleDelegate; API_AVAILABLE(ios(10.0)) -@interface ARDBroadcastSampleHandler : RPBroadcastSampleHandler +@interface ARDBroadcastSampleHandler + : RPBroadcastSampleHandler @property(nonatomic, strong) id capturer; diff --git a/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m b/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m index 1c276d965f..bdbf61c20f 100644 --- a/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m +++ b/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m @@ -26,19 +26,23 @@ @implementation ARDBroadcastSampleHandler { @synthesize capturer = _capturer; - (instancetype)init { - if (self = [super init]) { + self = [super init]; + if (self) { _callbackLogger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init]; os_log_t rtc_os_log = os_log_create("com.google.AppRTCMobile", "RTCLog"); [_callbackLogger start:^(NSString *logMessage) { - os_log(rtc_os_log, "%{public}s", [logMessage cStringUsingEncoding:NSUTF8StringEncoding]); + os_log(rtc_os_log, + "%{public}s", + [logMessage cStringUsingEncoding:NSUTF8StringEncoding]); }]; } return self; } -- (void)broadcastStartedWithSetupInfo:(NSDictionary *)setupInfo { - // User has requested to start the broadcast. Setup info from the UI extension can be supplied but - // optional. +- (void)broadcastStartedWithSetupInfo: + (NSDictionary *)setupInfo { + // User has requested to start the broadcast. Setup info from the UI extension + // can be supplied but optional. ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init]; _client = [[ARDAppClient alloc] initWithDelegate:self]; @@ -56,7 +60,8 @@ - (void)broadcastStartedWithSetupInfo:(NSDictionary *)se } - (void)broadcastPaused { - // User has requested to pause the broadcast. Samples will stop being delivered. + // User has requested to pause the broadcast. Samples will stop being + // delivered. } - (void)broadcastResumed { @@ -85,7 +90,8 @@ - (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer #pragma mark - ARDAppClientDelegate -- (void)appClient:(ARDAppClient *)client didChangeState:(ARDAppClientState)state { +- (void)appClient:(ARDAppClient *)client + didChangeState:(ARDAppClientState)state { switch (state) { case kARDAppClientStateConnected: RTCLog(@"Client connected."); @@ -99,16 +105,19 @@ - (void)appClient:(ARDAppClient *)client didChangeState:(ARDAppClientState)state } } -- (void)appClient:(ARDAppClient *)client didChangeConnectionState:(RTCIceConnectionState)state { +- (void)appClient:(ARDAppClient *)client + didChangeConnectionState:(RTCIceConnectionState)state { RTCLog(@"ICE state changed: %ld", (long)state); } - (void)appClient:(ARDAppClient *)client - didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer { + didCreateLocalCapturer: + (RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer { } - (void)appClient:(ARDAppClient *)client - didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer { + didCreateLocalExternalSampleCapturer: + (ARDExternalSampleCapturer *)externalSampleCapturer { self.capturer = externalSampleCapturer; } @@ -117,10 +126,12 @@ - (void)appClient:(ARDAppClient *)client } - (void)appClient:(ARDAppClient *)client - didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack { + didReceiveRemoteVideoTrack: + (RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack { } -- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats { +- (void)appClient:(ARDAppClient *)client + didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats { } - (void)appClient:(ARDAppClient *)client didError:(NSError *)error { diff --git a/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h b/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h index bbf397d8a9..aba4dbe971 100644 --- a/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h +++ b/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.h @@ -12,6 +12,7 @@ #import API_AVAILABLE(ios(11.0)) -@interface ARDBroadcastSetupViewController : UIViewController +@interface ARDBroadcastSetupViewController + : UIViewController @end diff --git a/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m b/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m index 55438f17d8..e299c8d75b 100644 --- a/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m +++ b/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSetupViewController.m @@ -18,7 +18,8 @@ - (void)loadView { UIView *view = [[UIView alloc] initWithFrame:CGRectZero]; view.backgroundColor = [UIColor colorWithWhite:1.0 alpha:0.7]; - UIImageView *imageView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"Icon-180"]]; + UIImageView *imageView = + [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"Icon-180"]]; imageView.translatesAutoresizingMaskIntoConstraints = NO; [view addSubview:imageView]; @@ -52,22 +53,34 @@ - (void)loadView { UILayoutGuide *margin = view.layoutMarginsGuide; [imageView.widthAnchor constraintEqualToConstant:60.0].active = YES; [imageView.heightAnchor constraintEqualToConstant:60.0].active = YES; - [imageView.topAnchor constraintEqualToAnchor:margin.topAnchor constant:20].active = YES; - [imageView.centerXAnchor constraintEqualToAnchor:view.centerXAnchor].active = YES; - - [_roomNameField.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES; - [_roomNameField.topAnchor constraintEqualToAnchor:imageView.bottomAnchor constant:20].active = + [imageView.topAnchor constraintEqualToAnchor:margin.topAnchor constant:20] + .active = YES; + [imageView.centerXAnchor constraintEqualToAnchor:view.centerXAnchor].active = YES; - [_roomNameField.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES; - - [doneButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES; - [doneButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:-20].active = YES; - [cancelButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES; - [cancelButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:-20].active = YES; + [_roomNameField.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor] + .active = YES; + [_roomNameField.topAnchor constraintEqualToAnchor:imageView.bottomAnchor + constant:20] + .active = YES; + [_roomNameField.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor] + .active = YES; + + [doneButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor] + .active = YES; + [doneButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor + constant:-20] + .active = YES; + + [cancelButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor] + .active = YES; + [cancelButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor + constant:-20] + .active = YES; UITapGestureRecognizer *tgr = - [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(didTap:)]; + [[UITapGestureRecognizer alloc] initWithTarget:self + action:@selector(didTap:)]; [view addGestureRecognizer:tgr]; self.view = view; @@ -78,23 +91,28 @@ - (IBAction)didTap:(id)sender { } - (void)userDidFinishSetup { - // URL of the resource where broadcast can be viewed that will be returned to the application - NSURL *broadcastURL = [NSURL - URLWithString:[NSString stringWithFormat:@"https://appr.tc/r/%@", _roomNameField.text]]; - - // Dictionary with setup information that will be provided to broadcast extension when broadcast - // is started + // URL of the resource where broadcast can be viewed that will be returned to + // the application + NSURL *broadcastURL = + [NSURL URLWithString:[NSString stringWithFormat:@"https://appr.tc/r/%@", + _roomNameField.text]]; + + // Dictionary with setup information that will be provided to broadcast + // extension when broadcast is started NSDictionary *setupInfo = @{@"roomName" : _roomNameField.text}; - // Tell ReplayKit that the extension is finished setting up and can begin broadcasting - [self.extensionContext completeRequestWithBroadcastURL:broadcastURL setupInfo:setupInfo]; + // Tell ReplayKit that the extension is finished setting up and can begin + // broadcasting + [self.extensionContext completeRequestWithBroadcastURL:broadcastURL + setupInfo:setupInfo]; } - (void)userDidCancelSetup { // Tell ReplayKit that the extension was cancelled by the user - [self.extensionContext cancelRequestWithError:[NSError errorWithDomain:@"com.google.AppRTCMobile" - code:-1 - userInfo:nil]]; + [self.extensionContext + cancelRequestWithError:[NSError errorWithDomain:@"com.google.AppRTCMobile" + code:-1 + userInfo:nil]]; } #pragma mark - UITextFieldDelegate diff --git a/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.m b/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.m index 36a470021d..4e73f06b9d 100644 --- a/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.m +++ b/examples/objc/AppRTCMobile/mac/APPRTCAppDelegate.m @@ -26,10 +26,8 @@ - (void)applicationDidFinishLaunching:(NSNotification*)notification { RTCInitializeSSL(); NSScreen* screen = [NSScreen mainScreen]; NSRect visibleRect = [screen visibleFrame]; - NSRect windowRect = NSMakeRect(NSMidX(visibleRect), - NSMidY(visibleRect), - 1320, - 1140); + NSRect windowRect = + NSMakeRect(NSMidX(visibleRect), NSMidY(visibleRect), 1320, 1140); NSUInteger styleMask = NSWindowStyleMaskTitled | NSWindowStyleMaskClosable; _window = [[NSWindow alloc] initWithContentRect:windowRect styleMask:styleMask @@ -52,4 +50,3 @@ - (void)windowWillClose:(NSNotification*)notification { } @end - diff --git a/examples/objc/AppRTCMobile/mac/APPRTCViewController.m b/examples/objc/AppRTCMobile/mac/APPRTCViewController.m index 982fa56b43..cf87b49ac7 100644 --- a/examples/objc/AppRTCMobile/mac/APPRTCViewController.m +++ b/examples/objc/AppRTCMobile/mac/APPRTCViewController.m @@ -36,17 +36,20 @@ - (void)appRTCMainView:(APPRTCMainView*)mainView @interface APPRTCMainView : NSView @property(nonatomic, weak) id delegate; -@property(nonatomic, readonly) NSView* localVideoView; -@property(nonatomic, readonly) NSView* remoteVideoView; +@property(nonatomic, readonly) + NSView* localVideoView; +@property(nonatomic, readonly) + NSView* remoteVideoView; @property(nonatomic, readonly) NSTextView* logView; - (void)displayLogMessage:(NSString*)message; @end -@interface APPRTCMainView () +@interface APPRTCMainView () @end -@implementation APPRTCMainView { +@implementation APPRTCMainView { NSScrollView* _scrollView; NSView* _actionItemsView; NSButton* _connectButton; @@ -61,9 +64,10 @@ @implementation APPRTCMainView { @synthesize remoteVideoView = _remoteVideoView; @synthesize logView = _logView; -- (void)displayLogMessage:(NSString *)message { +- (void)displayLogMessage:(NSString*)message { dispatch_async(dispatch_get_main_queue(), ^{ - self.logView.string = [NSString stringWithFormat:@"%@%@\n", self.logView.string, message]; + self.logView.string = + [NSString stringWithFormat:@"%@%@\n", self.logView.string, message]; NSRange range = NSMakeRange(self.logView.string.length, 0); [self.logView scrollRangeToVisible:range]; }); @@ -72,7 +76,8 @@ - (void)displayLogMessage:(NSString *)message { #pragma mark - Private - (instancetype)initWithFrame:(NSRect)frame { - if (self = [super initWithFrame:frame]) { + self = [super initWithFrame:frame]; + if (self) { [self setupViews]; } return self; @@ -83,14 +88,10 @@ + (BOOL)requiresConstraintBasedLayout { } - (void)updateConstraints { - NSParameterAssert( - _roomField != nil && - _scrollView != nil && - _remoteVideoView != nil && - _localVideoView != nil && - _actionItemsView!= nil && - _connectButton != nil && - _loopbackButton != nil); + NSParameterAssert(_roomField != nil && _scrollView != nil && + _remoteVideoView != nil && _localVideoView != nil && + _actionItemsView != nil && _connectButton != nil && + _loopbackButton != nil); [self removeConstraints:[self constraints]]; NSDictionary* viewsDictionary = @@ -114,25 +115,26 @@ - (void)updateConstraints { }; // Declare this separately to avoid compiler warning about splitting string // within an NSArray expression. - NSString* verticalConstraintLeft = - @"V:|-[_remoteVideoView(remoteViewHeight)]-[_scrollView(kBottomViewHeight)]-|"; + NSString* verticalConstraintLeft = @"V:|-[_remoteVideoView(remoteViewHeight)]" + @"-[_scrollView(kBottomViewHeight)]-|"; NSString* verticalConstraintRight = - @"V:|-[_remoteVideoView(remoteViewHeight)]-[_actionItemsView(kBottomViewHeight)]-|"; + @"V:|-[_remoteVideoView(remoteViewHeight)]-[_actionItemsView(" + @"kBottomViewHeight)]-|"; NSArray* constraintFormats = @[ - verticalConstraintLeft, - verticalConstraintRight, - @"H:|-[_remoteVideoView(remoteViewWidth)]-|", - @"V:|-[_localVideoView(localViewHeight)]", - @"H:|-[_localVideoView(localViewWidth)]", - @"H:|-[_scrollView(==_actionItemsView)]-[_actionItemsView]-|" + verticalConstraintLeft, + verticalConstraintRight, + @"H:|-[_remoteVideoView(remoteViewWidth)]-|", + @"V:|-[_localVideoView(localViewHeight)]", + @"H:|-[_localVideoView(localViewWidth)]", + @"H:|-[_scrollView(==_actionItemsView)]-[_actionItemsView]-|" ]; NSArray* actionItemsConstraints = @[ - @"H:|-[_roomField(kRoomFieldWidth)]-[_loopbackButton(kRoomFieldWidth)]", - @"H:|-[_connectButton(kRoomFieldWidth)]", - @"V:|-[_roomField(kActionItemHeight)]-[_connectButton(kActionItemHeight)]", - @"V:|-[_loopbackButton(kActionItemHeight)]", - ]; + @"H:|-[_roomField(kRoomFieldWidth)]-[_loopbackButton(kRoomFieldWidth)]", + @"H:|-[_connectButton(kRoomFieldWidth)]", + @"V:|-[_roomField(kActionItemHeight)]-[_connectButton(kActionItemHeight)]", + @"V:|-[_loopbackButton(kActionItemHeight)]", + ]; [APPRTCMainView addConstraints:constraintFormats toView:self @@ -147,15 +149,16 @@ - (void)updateConstraints { #pragma mark - Constraints helper -+ (void)addConstraints:(NSArray*)constraints toView:(NSView*)view ++ (void)addConstraints:(NSArray*)constraints + toView:(NSView*)view viewsDictionary:(NSDictionary*)viewsDictionary metrics:(NSDictionary*)metrics { for (NSString* constraintFormat in constraints) { NSArray* constraints = - [NSLayoutConstraint constraintsWithVisualFormat:constraintFormat - options:0 - metrics:metrics - views:viewsDictionary]; + [NSLayoutConstraint constraintsWithVisualFormat:constraintFormat + options:0 + metrics:metrics + views:viewsDictionary]; for (NSLayoutConstraint* constraint in constraints) { [view addConstraint:constraint]; } @@ -169,7 +172,8 @@ - (void)startCall:(id)sender { // Generate room id for loopback options. if (_loopbackButton.intValue && [roomString isEqualToString:@""]) { roomString = [NSUUID UUID].UUIDString; - roomString = [roomString stringByReplacingOccurrencesOfString:@"-" withString:@""]; + roomString = [roomString stringByReplacingOccurrencesOfString:@"-" + withString:@""]; } [self.delegate appRTCMainView:self didEnterRoomId:roomString @@ -179,7 +183,8 @@ - (void)startCall:(id)sender { #pragma mark - RTCVideoViewDelegate -- (void)videoView:(id)videoView didChangeVideoSize:(CGSize)size { +- (void)videoView:(id)videoView + didChangeVideoSize:(CGSize)size { if (videoView == _remoteVideoView) { _remoteVideoSize = size; } else if (videoView == _localVideoView) { @@ -215,8 +220,10 @@ - (void)setupViews { [_scrollView setDocumentView:_logView]; [self addSubview:_scrollView]; - _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect]; - _localVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect]; + _remoteVideoView = + [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect]; + _localVideoView = + [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect]; [_remoteVideoView setTranslatesAutoresizingMaskIntoConstraints:NO]; [self addSubview:_remoteVideoView]; @@ -231,7 +238,7 @@ - (void)setupActionItemsView { _roomField = [[NSTextField alloc] initWithFrame:NSZeroRect]; [_roomField setTranslatesAutoresizingMaskIntoConstraints:NO]; - [[_roomField cell] setPlaceholderString: @"Enter AppRTC room id"]; + [[_roomField cell] setPlaceholderString:@"Enter AppRTC room id"]; [_actionItemsView addSubview:_roomField]; [_roomField setEditable:YES]; @@ -255,14 +262,14 @@ - (NSSize)remoteVideoViewSize { return NSMakeSize(kContentWidth, 0); } NSInteger width = MAX(_remoteVideoView.bounds.size.width, kContentWidth); - NSInteger height = (width/16) * 9; + NSInteger height = (width / 16) * 9; return NSMakeSize(width, height); } @end -@interface APPRTCViewController () - +@interface APPRTCViewController () @property(nonatomic, readonly) APPRTCMainView* mainView; @end @@ -297,14 +304,14 @@ - (void)windowWillClose:(NSNotification*)notification { - (void)displayUsageInstructions { [self.mainView displayLogMessage: - @"To start call:\n" - @"• Enter AppRTC room id (not neccessary for loopback)\n" - @"• Start call"]; + @"To start call:\n" + @"• Enter AppRTC room id (not neccessary for loopback)\n" + @"• Start call"]; } #pragma mark - ARDAppClientDelegate -- (void)appClient:(ARDAppClient *)client +- (void)appClient:(ARDAppClient*)client didChangeState:(ARDAppClientState)state { switch (state) { case kARDAppClientStateConnected: @@ -321,15 +328,16 @@ - (void)appClient:(ARDAppClient *)client } } -- (void)appClient:(ARDAppClient *)client +- (void)appClient:(ARDAppClient*)client didChangeConnectionState:(RTCIceConnectionState)state { } - (void)appClient:(ARDAppClient*)client - didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer { - _captureController = - [[ARDCaptureController alloc] initWithCapturer:localCapturer - settings:[[ARDSettingsModel alloc] init]]; + didCreateLocalCapturer: + (RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer { + _captureController = [[ARDCaptureController alloc] + initWithCapturer:localCapturer + settings:[[ARDSettingsModel alloc] init]]; [_captureController startCapture]; } @@ -340,19 +348,18 @@ - (void)appClient:(ARDAppClient*)client } - (void)appClient:(ARDAppClient*)client - didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack { + didReceiveRemoteVideoTrack: + (RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack { _remoteVideoTrack = remoteVideoTrack; [_remoteVideoTrack addRenderer:self.mainView.remoteVideoView]; } -- (void)appClient:(ARDAppClient *)client - didError:(NSError *)error { +- (void)appClient:(ARDAppClient*)client didError:(NSError*)error { [self showAlertWithMessage:[NSString stringWithFormat:@"%@", error]]; [self disconnect]; } -- (void)appClient:(ARDAppClient *)client - didGetStats:(NSArray *)stats { +- (void)appClient:(ARDAppClient*)client didGetStats:(NSArray*)stats { } #pragma mark - APPRTCMainViewDelegate @@ -360,7 +367,6 @@ - (void)appClient:(ARDAppClient *)client - (void)appRTCMainView:(APPRTCMainView*)mainView didEnterRoomId:(NSString*)roomId loopback:(BOOL)isLoopback { - if ([roomId isEqualToString:@""]) { [self.mainView displayLogMessage:@"Missing room id"]; return; @@ -369,7 +375,8 @@ - (void)appRTCMainView:(APPRTCMainView*)mainView [self disconnect]; ARDAppClient* client = [[ARDAppClient alloc] initWithDelegate:self]; [client connectToRoomWithId:roomId - settings:[[ARDSettingsModel alloc] init] // Use default settings. + settings:[[ARDSettingsModel alloc] + init] // Use default settings. isLoopback:isLoopback]; _client = client; } diff --git a/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm b/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm index 2694e49914..e480a2451b 100644 --- a/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm +++ b/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm @@ -35,7 +35,7 @@ - (id)mockRoomServerClientForRoomId:(NSString *)roomId isInitiator:(BOOL)isInitiator messages:(NSArray *)messages messageHandler: - (void (^)(ARDSignalingMessage *))messageHandler { + (void (^)(ARDSignalingMessage *))messageHandler { id mockRoomServerClient = [OCMockObject mockForProtocol:@protocol(ARDRoomServerClient)]; @@ -69,9 +69,9 @@ __unsafe_unretained void (^completionHandler)(ARDMessageResponse *response, messageHandler(message); completionHandler(messageResponse, nil); }] sendMessage:[OCMArg any] - forRoomId:roomId - clientId:clientId - completionHandler:[OCMArg any]]; + forRoomId:roomId + clientId:clientId + completionHandler:[OCMArg any]]; // Do nothing on leave. [[[mockRoomServerClient stub] andDo:^(NSInvocation *invocation) { @@ -89,8 +89,8 @@ __unsafe_unretained void (^completionHandler)(ARDMessageResponse *response, - (id)mockSignalingChannelForRoomId:(NSString *)roomId clientId:(NSString *)clientId - messageHandler: - (void (^)(ARDSignalingMessage *message))messageHandler { + messageHandler:(void (^)(ARDSignalingMessage *message)) + messageHandler { id mockSignalingChannel = [OCMockObject niceMockForProtocol:@protocol(ARDSignalingChannel)]; [[mockSignalingChannel stub] registerForRoomId:roomId clientId:clientId]; @@ -103,8 +103,7 @@ - (id)mockSignalingChannelForRoomId:(NSString *)roomId } - (id)mockTURNClient { - id mockTURNClient = - [OCMockObject mockForProtocol:@protocol(ARDTURNClient)]; + id mockTURNClient = [OCMockObject mockForProtocol:@protocol(ARDTURNClient)]; [[[mockTURNClient stub] andDo:^(NSInvocation *invocation) { // Don't return anything in TURN response. __unsafe_unretained void (^completionHandler)(NSArray *turnServers, @@ -124,24 +123,24 @@ - (id)mockSettingsModel { return model; } -- (ARDAppClient *)createAppClientForRoomId:(NSString *)roomId - clientId:(NSString *)clientId - isInitiator:(BOOL)isInitiator - messages:(NSArray *)messages - messageHandler: - (void (^)(ARDSignalingMessage *message))messageHandler - connectedHandler:(void (^)(void))connectedHandler - localVideoTrackHandler:(void (^)(void))localVideoTrackHandler { +- (ARDAppClient *) + createAppClientForRoomId:(NSString *)roomId + clientId:(NSString *)clientId + isInitiator:(BOOL)isInitiator + messages:(NSArray *)messages + messageHandler: + (void (^)(ARDSignalingMessage *message))messageHandler + connectedHandler:(void (^)(void))connectedHandler + localVideoTrackHandler:(void (^)(void))localVideoTrackHandler { id turnClient = [self mockTURNClient]; id signalingChannel = [self mockSignalingChannelForRoomId:roomId clientId:clientId messageHandler:messageHandler]; - id roomServerClient = - [self mockRoomServerClientForRoomId:roomId - clientId:clientId - isInitiator:isInitiator - messages:messages - messageHandler:messageHandler]; + id roomServerClient = [self mockRoomServerClientForRoomId:roomId + clientId:clientId + isInitiator:isInitiator + messages:messages + messageHandler:messageHandler]; id delegate = [OCMockObject niceMockForProtocol:@protocol(ARDAppClientDelegate)]; [[[delegate stub] andDo:^(NSInvocation *invocation) { @@ -150,8 +149,7 @@ - (ARDAppClient *)createAppClientForRoomId:(NSString *)roomId didChangeConnectionState:RTCIceConnectionStateConnected]; [[[delegate stub] andDo:^(NSInvocation *invocation) { localVideoTrackHandler(); - }] appClient:[OCMArg any] - didReceiveLocalVideoTrack:[OCMArg any]]; + }] appClient:[OCMArg any] didReceiveLocalVideoTrack:[OCMArg any]]; return [[ARDAppClient alloc] initWithRoomServerClient:roomServerClient signalingChannel:signalingChannel @@ -183,55 +181,68 @@ - (void)testSession { [self expectationWithDescription:@"Answerer PC connected"]; caller = [self createAppClientForRoomId:roomId - clientId:callerId - isInitiator:YES - messages:[NSArray array] - messageHandler:^(ARDSignalingMessage *message) { - ARDAppClient *strongAnswerer = weakAnswerer; - [strongAnswerer channel:strongAnswerer.channel didReceiveMessage:message]; - } connectedHandler:^{ - [callerConnectionExpectation fulfill]; - } localVideoTrackHandler:^{ - }]; + clientId:callerId + isInitiator:YES + messages:[NSArray array] + messageHandler:^(ARDSignalingMessage *message) { + ARDAppClient *strongAnswerer = weakAnswerer; + [strongAnswerer channel:strongAnswerer.channel + didReceiveMessage:message]; + } + connectedHandler:^{ + [callerConnectionExpectation fulfill]; + } + localVideoTrackHandler:^{ + }]; // TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion // crash in Debug. - caller.defaultPeerConnectionConstraints = - [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil - optionalConstraints:nil]; + caller.defaultPeerConnectionConstraints = [[RTC_OBJC_TYPE(RTCMediaConstraints) + alloc] initWithMandatoryConstraints:nil optionalConstraints:nil]; weakCaller = caller; answerer = [self createAppClientForRoomId:roomId - clientId:answererId - isInitiator:NO - messages:[NSArray array] - messageHandler:^(ARDSignalingMessage *message) { - ARDAppClient *strongCaller = weakCaller; - [strongCaller channel:strongCaller.channel didReceiveMessage:message]; - } connectedHandler:^{ - [answererConnectionExpectation fulfill]; - } localVideoTrackHandler:^{ - }]; + clientId:answererId + isInitiator:NO + messages:[NSArray array] + messageHandler:^(ARDSignalingMessage *message) { + ARDAppClient *strongCaller = weakCaller; + [strongCaller channel:strongCaller.channel didReceiveMessage:message]; + } + connectedHandler:^{ + [answererConnectionExpectation fulfill]; + } + localVideoTrackHandler:^{ + }]; // TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion // crash in Debug. answerer.defaultPeerConnectionConstraints = - [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil - optionalConstraints:nil]; + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] + initWithMandatoryConstraints:nil + optionalConstraints:nil]; weakAnswerer = answerer; // Kick off connection. - [caller connectToRoomWithId:roomId settings:[self mockSettingsModel] isLoopback:NO]; - [answerer connectToRoomWithId:roomId settings:[self mockSettingsModel] isLoopback:NO]; - [self waitForExpectationsWithTimeout:20 handler:^(NSError *error) { - if (error) { - XCTFail(@"Expectation failed with error %@.", error); - } - }]; + [caller connectToRoomWithId:roomId + settings:[self mockSettingsModel] + isLoopback:NO]; + [answerer connectToRoomWithId:roomId + settings:[self mockSettingsModel] + isLoopback:NO]; + [self waitForExpectationsWithTimeout:20 + handler:^(NSError *error) { + if (error) { + XCTFail(@"Expectation failed with error %@.", + error); + } + }]; } // Test to see that we get a local video connection // Note this will currently pass even when no camera is connected as a local -// video track is created regardless (Perhaps there should be a test for that...) -#if !TARGET_IPHONE_SIMULATOR // Expect to fail on simulator due to no camera support +// video track is created regardless (Perhaps there should be a test for +// that...) +#if !TARGET_IPHONE_SIMULATOR // Expect to fail on simulator due to no camera + // support - (void)testSessionShouldGetLocalVideoTrackCallback { ARDAppClient *caller = nil; NSString *roomId = @"testRoom"; @@ -241,25 +252,31 @@ - (void)testSessionShouldGetLocalVideoTrackCallback { [self expectationWithDescription:@"Caller got local video."]; caller = [self createAppClientForRoomId:roomId - clientId:callerId - isInitiator:YES - messages:[NSArray array] - messageHandler:^(ARDSignalingMessage *message) {} - connectedHandler:^{} - localVideoTrackHandler:^{ [localVideoTrackExpectation fulfill]; }]; - caller.defaultPeerConnectionConstraints = - [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil - optionalConstraints:nil]; + clientId:callerId + isInitiator:YES + messages:[NSArray array] + messageHandler:^(ARDSignalingMessage *message) { + } + connectedHandler:^{ + } + localVideoTrackHandler:^{ + [localVideoTrackExpectation fulfill]; + }]; + caller.defaultPeerConnectionConstraints = [[RTC_OBJC_TYPE(RTCMediaConstraints) + alloc] initWithMandatoryConstraints:nil optionalConstraints:nil]; // Kick off connection. [caller connectToRoomWithId:roomId settings:[self mockSettingsModel] isLoopback:NO]; - [self waitForExpectationsWithTimeout:20 handler:^(NSError *error) { - if (error) { - XCTFail("Expectation timed out with error: %@.", error); - } - }]; + [self waitForExpectationsWithTimeout:20 + handler:^(NSError *error) { + if (error) { + XCTFail( + "Expectation timed out with error: %@.", + error); + } + }]; } #endif diff --git a/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm b/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm index 2e39834190..0a057cb20f 100644 --- a/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm +++ b/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm @@ -31,7 +31,8 @@ @implementation ARDFileCaptureControllerTests - (void)setUp { [super setUp]; - self.fileCapturerMock = OCMClassMock([RTC_OBJC_TYPE(RTCFileVideoCapturer) class]); + self.fileCapturerMock = + OCMClassMock([RTC_OBJC_TYPE(RTCFileVideoCapturer) class]); self.fileCaptureController = [[ARDFileCaptureController alloc] initWithCapturer:self.fileCapturerMock]; } @@ -44,7 +45,8 @@ - (void)tearDown { } - (void)testCaptureIsStarted { - [[self.fileCapturerMock expect] startCapturingFromFileNamed:[OCMArg any] onError:[OCMArg any]]; + [[self.fileCapturerMock expect] startCapturingFromFileNamed:[OCMArg any] + onError:[OCMArg any]]; [self.fileCaptureController startCapture]; diff --git a/examples/objc/AppRTCMobile/tests/ARDSettingsModel_xctest.mm b/examples/objc/AppRTCMobile/tests/ARDSettingsModel_xctest.mm index dc62798963..c435d65c7d 100644 --- a/examples/objc/AppRTCMobile/tests/ARDSettingsModel_xctest.mm +++ b/examples/objc/AppRTCMobile/tests/ARDSettingsModel_xctest.mm @@ -17,7 +17,6 @@ #import "ARDSettingsModel+Private.h" #import "ARDSettingsStore.h" - @interface ARDSettingsModelTests : XCTestCase { ARDSettingsModel *_model; } @@ -50,13 +49,13 @@ - (void)testRetrievingSetting { - (void)testStoringInvalidConstraintReturnsNo { id storeMock = [self setupMockStore]; - [([[storeMock stub] andReturn:@"960x480"])videoResolution]; + [([[storeMock stub] andReturn:@"960x480"]) videoResolution]; XCTAssertFalse([_model storeVideoResolutionSetting:@"960x480"]); } - (void)testWidthConstraintFromStore { id storeMock = [self setupMockStore]; - [([[storeMock stub] andReturn:@"1270x480"])videoResolution]; + [([[storeMock stub] andReturn:@"1270x480"]) videoResolution]; int width = [_model currentVideoResolutionWidthFromStore]; XCTAssertEqual(width, 1270); @@ -64,7 +63,7 @@ - (void)testWidthConstraintFromStore { - (void)testHeightConstraintFromStore { id storeMock = [self setupMockStore]; - [([[storeMock stub] andReturn:@"960x540"])videoResolution]; + [([[storeMock stub] andReturn:@"960x540"]) videoResolution]; int height = [_model currentVideoResolutionHeightFromStore]; XCTAssertEqual(height, 540); @@ -72,7 +71,7 @@ - (void)testHeightConstraintFromStore { - (void)testConstraintComponentIsNilWhenInvalidConstraintString { id storeMock = [self setupMockStore]; - [([[storeMock stub] andReturn:@"invalid"])videoResolution]; + [([[storeMock stub] andReturn:@"invalid"]) videoResolution]; int width = [_model currentVideoResolutionWidthFromStore]; XCTAssertEqual(width, 0); diff --git a/examples/objc/AppRTCMobile/tests/main.mm b/examples/objc/AppRTCMobile/tests/main.mm index 3625ffd7bf..fdac0eaf1c 100644 --- a/examples/objc/AppRTCMobile/tests/main.mm +++ b/examples/objc/AppRTCMobile/tests/main.mm @@ -13,7 +13,7 @@ #include "test/ios/coverage_util_ios.h" int main(int argc, char* argv[]) { - rtc::test::ConfigureCoverageReportPath(); + webrtc::test::ConfigureCoverageReportPath(); @autoreleasepool { return UIApplicationMain(argc, argv, nil, nil); diff --git a/examples/objc/AppRTCMobile/third_party/.clang-format b/examples/objc/AppRTCMobile/third_party/.clang-format new file mode 100644 index 0000000000..e3845288a2 --- /dev/null +++ b/examples/objc/AppRTCMobile/third_party/.clang-format @@ -0,0 +1 @@ +DisableFormat: true diff --git a/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.h b/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.h index a230646073..06d745a0fa 100644 --- a/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.h +++ b/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.h @@ -59,7 +59,8 @@ extern NSString *const SRHTTPResponseErrorKey; // It will be nil until after the handshake completes. @property(nonatomic, readonly, copy) NSString *protocol; -// Protocols should be an array of strings that turn into Sec-WebSocket-Protocol. +// Protocols should be an array of strings that turn into +// Sec-WebSocket-Protocol. - (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols; - (id)initWithURLRequest:(NSURLRequest *)request; @@ -72,11 +73,13 @@ extern NSString *const SRHTTPResponseErrorKey; - (void)setDelegateOperationQueue:(NSOperationQueue *)queue; - (void)setDelegateDispatchQueue:(dispatch_queue_t)queue; -// By default, it will schedule itself on +[NSRunLoop SR_networkRunLoop] using defaultModes. +// By default, it will schedule itself on +[NSRunLoop SR_networkRunLoop] using +// defaultModes. - (void)scheduleInRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode; - (void)unscheduleFromRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode; -// SRWebSockets are intended for one-time-use only. Open should be called once and only once. +// SRWebSockets are intended for one-time-use only. Open should be called once +// and only once. - (void)open; - (void)close; diff --git a/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m b/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m index ab0d1b89bc..68f3e93f7f 100644 --- a/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m +++ b/examples/objc/AppRTCMobile/third_party/SocketRocket/SRWebSocket.m @@ -527,9 +527,11 @@ - (void)didConnect CFHTTPMessageSetHeaderFieldValue(request, CFSTR("Sec-WebSocket-Protocol"), (__bridge CFStringRef)[_requestedProtocols componentsJoinedByString:@", "]); } - [_urlRequest.allHTTPHeaderFields enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop) { - CFHTTPMessageSetHeaderFieldValue(request, (__bridge CFStringRef)key, (__bridge CFStringRef)obj); - }]; + [_urlRequest.allHTTPHeaderFields + enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop __unused) { + CFHTTPMessageSetHeaderFieldValue( + request, (__bridge CFStringRef)key, (__bridge CFStringRef)obj); + }]; NSData *message = CFBridgingRelease(CFHTTPMessageCopySerializedMessage(request)); @@ -1078,12 +1080,19 @@ - (void)_pumpWriting; NSUInteger dataLength = _outputBuffer.length; if (dataLength - _outputBufferOffset > 0 && _outputStream.hasSpaceAvailable) { - NSInteger bytesWritten = [_outputStream write:_outputBuffer.bytes + _outputBufferOffset maxLength:dataLength - _outputBufferOffset]; - if (bytesWritten == -1) { - [self _failWithError:[NSError errorWithDomain:SRWebSocketErrorDomain code:2145 userInfo:[NSDictionary dictionaryWithObject:@"Error writing to stream" forKey:NSLocalizedDescriptionKey]]]; - return; - } - + NSInteger bytesWritten = + [_outputStream write:(const unsigned char *)_outputBuffer.bytes + _outputBufferOffset + maxLength:dataLength - _outputBufferOffset]; + if (bytesWritten == -1) { + [self _failWithError: + [NSError errorWithDomain:SRWebSocketErrorDomain + code:2145 + userInfo:[NSDictionary + dictionaryWithObject:@"Error writing to stream" + forKey:NSLocalizedDescriptionKey]]]; + return; + } + _outputBufferOffset += bytesWritten; if (_outputBufferOffset > 4096 && _outputBufferOffset > (_outputBuffer.length >> 1)) { @@ -1627,7 +1636,7 @@ - (NSString *)SR_origin; //#define SR_ENABLE_LOG -static inline void SRFastLog(NSString *format, ...) { +static inline void SRFastLog(NSString *format, ...) { #ifdef SR_ENABLE_LOG __block va_list arg_list; va_start (arg_list, format); @@ -1637,10 +1646,12 @@ static inline void SRFastLog(NSString *format, ...) { va_end(arg_list); NSLog(@"[SR] %@", formattedString); +#else + // Cannot use [[maybe_unused]] here since this file might compile with GCC in objc context. + (void)format; #endif } - #ifdef HAS_ICU static inline int32_t validate_dispatch_data_partial_string(NSData *data) { diff --git a/examples/objcnativeapi/objc/NADAppDelegate.m b/examples/objcnativeapi/objc/NADAppDelegate.m index 254dd3be76..27b6917e12 100644 --- a/examples/objcnativeapi/objc/NADAppDelegate.m +++ b/examples/objcnativeapi/objc/NADAppDelegate.m @@ -31,33 +31,36 @@ - (BOOL)application:(UIApplication *)application } - (void)applicationWillResignActive:(UIApplication *)application { - // Sent when the application is about to move from active to inactive state. This can occur for - // certain types of temporary interruptions (such as an incoming phone call or SMS message) or - // when the user quits the application and it begins the transition to the background state. Use - // this method to pause ongoing tasks, disable timers, and invalidate graphics rendering + // Sent when the application is about to move from active to inactive state. + // This can occur for certain types of temporary interruptions (such as an + // incoming phone call or SMS message) or when the user quits the application + // and it begins the transition to the background state. Use this method to + // pause ongoing tasks, disable timers, and invalidate graphics rendering // callbacks. Games should use this method to pause the game. } - (void)applicationDidEnterBackground:(UIApplication *)application { - // Use this method to release shared resources, save user data, invalidate timers, and store - // enough application state information to restore your application to its current state in case - // it is terminated later. If your application supports background execution, this method is - // called instead of applicationWillTerminate: when the user quits. + // Use this method to release shared resources, save user data, invalidate + // timers, and store enough application state information to restore your + // application to its current state in case it is terminated later. If your + // application supports background execution, this method is called instead of + // applicationWillTerminate: when the user quits. } - (void)applicationWillEnterForeground:(UIApplication *)application { - // Called as part of the transition from the background to the active state; here you can undo - // many of the changes made on entering the background. + // Called as part of the transition from the background to the active state; + // here you can undo many of the changes made on entering the background. } - (void)applicationDidBecomeActive:(UIApplication *)application { - // Restart any tasks that were paused (or not yet started) while the application was inactive. If - // the application was previously in the background, optionally refresh the user interface. + // Restart any tasks that were paused (or not yet started) while the + // application was inactive. If the application was previously in the + // background, optionally refresh the user interface. } - (void)applicationWillTerminate:(UIApplication *)application { - // Called when the application is about to terminate. Save data if appropriate. See also - // applicationDidEnterBackground:. + // Called when the application is about to terminate. Save data if + // appropriate. See also applicationDidEnterBackground:. } @end diff --git a/examples/objcnativeapi/objc/NADViewController.mm b/examples/objcnativeapi/objc/NADViewController.mm index fd244799f8..679949b47d 100644 --- a/examples/objcnativeapi/objc/NADViewController.mm +++ b/examples/objcnativeapi/objc/NADViewController.mm @@ -23,7 +23,8 @@ @interface NADViewController () @property(nonatomic) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer; @property(nonatomic) RTC_OBJC_TYPE(RTCCameraPreviewView) * localVideoView; -@property(nonatomic) __kindof UIView *remoteVideoView; +@property(nonatomic) + __kindof UIView *remoteVideoView; @property(nonatomic) UIButton *callButton; @property(nonatomic) UIButton *hangUpButton; @@ -46,18 +47,22 @@ @implementation NADViewController { - (void)loadView { _view = [[UIView alloc] initWithFrame:CGRectZero]; - _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero]; + _remoteVideoView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero]; _remoteVideoView.translatesAutoresizingMaskIntoConstraints = NO; [_view addSubview:_remoteVideoView]; - _localVideoView = [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero]; + _localVideoView = + [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero]; _localVideoView.translatesAutoresizingMaskIntoConstraints = NO; [_view addSubview:_localVideoView]; _callButton = [UIButton buttonWithType:UIButtonTypeSystem]; _callButton.translatesAutoresizingMaskIntoConstraints = NO; [_callButton setTitle:@"Call" forState:UIControlStateNormal]; - [_callButton addTarget:self action:@selector(call:) forControlEvents:UIControlEventTouchUpInside]; + [_callButton addTarget:self + action:@selector(call:) + forControlEvents:UIControlEventTouchUpInside]; [_view addSubview:_callButton]; _hangUpButton = [UIButton buttonWithType:UIButtonTypeSystem]; @@ -69,27 +74,40 @@ - (void)loadView { [_view addSubview:_hangUpButton]; UILayoutGuide *margin = _view.layoutMarginsGuide; - [_remoteVideoView.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES; - [_remoteVideoView.topAnchor constraintEqualToAnchor:margin.topAnchor].active = YES; - [_remoteVideoView.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES; - [_remoteVideoView.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor].active = YES; - - [_localVideoView.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor constant:8.0].active = + [_remoteVideoView.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor] + .active = YES; + [_remoteVideoView.topAnchor constraintEqualToAnchor:margin.topAnchor].active = YES; - [_localVideoView.topAnchor constraintEqualToAnchor:margin.topAnchor constant:8.0].active = YES; + [_remoteVideoView.trailingAnchor + constraintEqualToAnchor:margin.trailingAnchor] + .active = YES; + [_remoteVideoView.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor] + .active = YES; + + [_localVideoView.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor + constant:8.0] + .active = YES; + [_localVideoView.topAnchor constraintEqualToAnchor:margin.topAnchor + constant:8.0] + .active = YES; [_localVideoView.widthAnchor constraintEqualToConstant:60].active = YES; [_localVideoView.heightAnchor constraintEqualToConstant:60].active = YES; - [_callButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor constant:8.0].active = - YES; - [_callButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:8.0].active = YES; + [_callButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor + constant:8.0] + .active = YES; + [_callButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor + constant:8.0] + .active = YES; [_callButton.widthAnchor constraintEqualToConstant:100].active = YES; [_callButton.heightAnchor constraintEqualToConstant:40].active = YES; - [_hangUpButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor constant:8.0].active = - YES; - [_hangUpButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:8.0].active = - YES; + [_hangUpButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor + constant:8.0] + .active = YES; + [_hangUpButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor + constant:8.0] + .active = YES; [_hangUpButton.widthAnchor constraintEqualToConstant:100].active = YES; [_hangUpButton.heightAnchor constraintEqualToConstant:40].active = YES; @@ -120,20 +138,27 @@ - (void)viewDidLoad { int targetHeight = 480; int currentDiff = INT_MAX; NSArray *formats = - [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:selectedDevice]; + [RTC_OBJC_TYPE(RTCCameraVideoCapturer) + supportedFormatsForDevice:selectedDevice]; for (AVCaptureDeviceFormat *format in formats) { - CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); - int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height); + CMVideoDimensions dimension = + CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = + CMFormatDescriptionGetMediaSubType(format.formatDescription); + int diff = abs(targetWidth - dimension.width) + + abs(targetHeight - dimension.height); if (diff < currentDiff) { selectedFormat = format; currentDiff = diff; - } else if (diff == currentDiff && pixelFormat == [_capturer preferredOutputPixelFormat]) { + } else if (diff == currentDiff && + pixelFormat == [_capturer preferredOutputPixelFormat]) { selectedFormat = format; } } - [self.capturer startCaptureWithDevice:selectedDevice format:selectedFormat fps:30]; + [self.capturer startCaptureWithDevice:selectedDevice + format:selectedFormat + fps:30]; } - (void)didReceiveMemoryWarning { diff --git a/examples/objcnativeapi/objc/main.m b/examples/objcnativeapi/objc/main.m index 2c3b5fbbfb..07877d81c7 100644 --- a/examples/objcnativeapi/objc/main.m +++ b/examples/objcnativeapi/objc/main.m @@ -13,6 +13,7 @@ int main(int argc, char* argv[]) { @autoreleasepool { - return UIApplicationMain(argc, argv, nil, NSStringFromClass([NADAppDelegate class])); + return UIApplicationMain( + argc, argv, nil, NSStringFromClass([NADAppDelegate class])); } } diff --git a/examples/objcnativeapi/objc/objc_call_client.h b/examples/objcnativeapi/objc/objc_call_client.h index cb8501d9ce..658500d1ac 100644 --- a/examples/objcnativeapi/objc/objc_call_client.h +++ b/examples/objcnativeapi/objc/objc_call_client.h @@ -40,14 +40,17 @@ class ObjCCallClient { public: explicit PCObserver(ObjCCallClient* client); - void OnSignalingChange(webrtc::PeerConnectionInterface::SignalingState new_state) override; - void OnDataChannel(rtc::scoped_refptr data_channel) override; + void OnSignalingChange( + webrtc::PeerConnectionInterface::SignalingState new_state) override; + void OnDataChannel(webrtc::scoped_refptr + data_channel) override; void OnRenegotiationNeeded() override; void OnIceConnectionChange( webrtc::PeerConnectionInterface::IceConnectionState new_state) override; void OnIceGatheringChange( webrtc::PeerConnectionInterface::IceGatheringState new_state) override; - void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override; + void OnIceCandidate( + const webrtc::IceCandidateInterface* candidate) override; private: ObjCCallClient* const client_; @@ -63,18 +66,23 @@ class ObjCCallClient { const std::unique_ptr pc_observer_; - rtc::scoped_refptr pcf_ RTC_GUARDED_BY(thread_checker_); - std::unique_ptr network_thread_ RTC_GUARDED_BY(thread_checker_); - std::unique_ptr worker_thread_ RTC_GUARDED_BY(thread_checker_); - std::unique_ptr signaling_thread_ RTC_GUARDED_BY(thread_checker_); + webrtc::scoped_refptr pcf_ + RTC_GUARDED_BY(thread_checker_); + std::unique_ptr network_thread_ + RTC_GUARDED_BY(thread_checker_); + std::unique_ptr worker_thread_ + RTC_GUARDED_BY(thread_checker_); + std::unique_ptr signaling_thread_ + RTC_GUARDED_BY(thread_checker_); - std::unique_ptr> remote_sink_ + std::unique_ptr> remote_sink_ RTC_GUARDED_BY(thread_checker_); - rtc::scoped_refptr video_source_ + webrtc::scoped_refptr video_source_ RTC_GUARDED_BY(thread_checker_); webrtc::Mutex pc_mutex_; - rtc::scoped_refptr pc_ RTC_GUARDED_BY(pc_mutex_); + webrtc::scoped_refptr pc_ + RTC_GUARDED_BY(pc_mutex_); }; } // namespace webrtc_examples diff --git a/examples/objcnativeapi/objc/objc_call_client.mm b/examples/objcnativeapi/objc/objc_call_client.mm index 90bcfcc35b..15406ad7d0 100644 --- a/examples/objcnativeapi/objc/objc_call_client.mm +++ b/examples/objcnativeapi/objc/objc_call_client.mm @@ -18,13 +18,14 @@ #import "sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h" #import "sdk/objc/helpers/RTCCameraPreviewView.h" +#include "api/audio/audio_processing.h" +#include "api/audio/builtin_audio_processing_builder.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/enable_media.h" #include "api/peer_connection_interface.h" #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/task_queue/default_task_queue_factory.h" -#include "media/engine/webrtc_media_engine.h" -#include "modules/audio_processing/include/audio_processing.h" #include "sdk/objc/native/api/video_capturer.h" #include "sdk/objc/native/api/video_decoder_factory.h" #include "sdk/objc/native/api/video_encoder_factory.h" @@ -36,21 +37,24 @@ class CreateOfferObserver : public webrtc::CreateSessionDescriptionObserver { public: - explicit CreateOfferObserver(rtc::scoped_refptr pc); + explicit CreateOfferObserver( + webrtc::scoped_refptr pc); void OnSuccess(webrtc::SessionDescriptionInterface* desc) override; void OnFailure(webrtc::RTCError error) override; private: - const rtc::scoped_refptr pc_; + const webrtc::scoped_refptr pc_; }; -class SetRemoteSessionDescriptionObserver : public webrtc::SetRemoteDescriptionObserverInterface { +class SetRemoteSessionDescriptionObserver + : public webrtc::SetRemoteDescriptionObserverInterface { public: void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override; }; -class SetLocalSessionDescriptionObserver : public webrtc::SetLocalDescriptionObserverInterface { +class SetLocalSessionDescriptionObserver + : public webrtc::SetLocalDescriptionObserverInterface { public: void OnSetLocalDescriptionComplete(webrtc::RTCError error) override; }; @@ -76,8 +80,8 @@ remote_sink_ = webrtc::ObjCToNativeVideoRenderer(remote_renderer); - video_source_ = - webrtc::ObjCToNativeVideoCapturer(capturer, signaling_thread_.get(), worker_thread_.get()); + video_source_ = webrtc::ObjCToNativeVideoCapturer( + capturer, signaling_thread_.get(), worker_thread_.get()); CreatePeerConnection(); Connect(); @@ -101,15 +105,15 @@ } void ObjCCallClient::CreatePeerConnectionFactory() { - network_thread_ = rtc::Thread::CreateWithSocketServer(); + network_thread_ = webrtc::Thread::CreateWithSocketServer(); network_thread_->SetName("network_thread", nullptr); RTC_CHECK(network_thread_->Start()) << "Failed to start thread"; - worker_thread_ = rtc::Thread::Create(); + worker_thread_ = webrtc::Thread::Create(); worker_thread_->SetName("worker_thread", nullptr); RTC_CHECK(worker_thread_->Start()) << "Failed to start thread"; - signaling_thread_ = rtc::Thread::Create(); + signaling_thread_ = webrtc::Thread::Create(); signaling_thread_->SetName("signaling_thread", nullptr); RTC_CHECK(signaling_thread_->Start()) << "Failed to start thread"; @@ -118,20 +122,19 @@ dependencies.worker_thread = worker_thread_.get(); dependencies.signaling_thread = signaling_thread_.get(); dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(); - cricket::MediaEngineDependencies media_deps; - media_deps.task_queue_factory = dependencies.task_queue_factory.get(); - media_deps.audio_encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory(); - media_deps.audio_decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory(); - media_deps.video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory( + dependencies.audio_encoder_factory = + webrtc::CreateBuiltinAudioEncoderFactory(); + dependencies.audio_decoder_factory = + webrtc::CreateBuiltinAudioDecoderFactory(); + dependencies.video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory( [[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init]); - media_deps.video_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory( + dependencies.video_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory( [[RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) alloc] init]); - media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create(); - dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps)); - RTC_LOG(LS_INFO) << "Media engine created: " << dependencies.media_engine.get(); - dependencies.call_factory = webrtc::CreateCallFactory(); + dependencies.audio_processing_builder = + std::make_unique(); + webrtc::EnableMedia(dependencies); dependencies.event_log_factory = - std::make_unique(dependencies.task_queue_factory.get()); + std::make_unique(); pcf_ = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies)); RTC_LOG(LS_INFO) << "PeerConnectionFactory created: " << pcf_.get(); } @@ -145,20 +148,23 @@ options.disable_encryption = true; pcf_->SetOptions(options); webrtc::PeerConnectionDependencies pc_dependencies(pc_observer_.get()); - pc_ = pcf_->CreatePeerConnectionOrError(config, std::move(pc_dependencies)).MoveValue(); + pc_ = pcf_->CreatePeerConnectionOrError(config, std::move(pc_dependencies)) + .MoveValue(); RTC_LOG(LS_INFO) << "PeerConnection created: " << pc_.get(); - rtc::scoped_refptr local_video_track = + webrtc::scoped_refptr local_video_track = pcf_->CreateVideoTrack(video_source_, "video"); pc_->AddTransceiver(local_video_track); RTC_LOG(LS_INFO) << "Local video sink set up: " << local_video_track.get(); - for (const rtc::scoped_refptr& tranceiver : - pc_->GetTransceivers()) { - rtc::scoped_refptr track = tranceiver->receiver()->track(); - if (track && track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) { + for (const webrtc::scoped_refptr& + tranceiver : pc_->GetTransceivers()) { + webrtc::scoped_refptr track = + tranceiver->receiver()->track(); + if (track && + track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) { static_cast(track.get()) - ->AddOrUpdateSink(remote_sink_.get(), rtc::VideoSinkWants()); + ->AddOrUpdateSink(remote_sink_.get(), webrtc::VideoSinkWants()); RTC_LOG(LS_INFO) << "Remote video sink set up: " << track.get(); break; } @@ -167,11 +173,12 @@ void ObjCCallClient::Connect() { webrtc::MutexLock lock(&pc_mutex_); - pc_->CreateOffer(rtc::make_ref_counted(pc_).get(), + pc_->CreateOffer(webrtc::make_ref_counted(pc_).get(), webrtc::PeerConnectionInterface::RTCOfferAnswerOptions()); } -ObjCCallClient::PCObserver::PCObserver(ObjCCallClient* client) : client_(client) {} +ObjCCallClient::PCObserver::PCObserver(ObjCCallClient* client) + : client_(client) {} void ObjCCallClient::PCObserver::OnSignalingChange( webrtc::PeerConnectionInterface::SignalingState new_state) { @@ -179,7 +186,7 @@ } void ObjCCallClient::PCObserver::OnDataChannel( - rtc::scoped_refptr data_channel) { + webrtc::scoped_refptr data_channel) { RTC_LOG(LS_INFO) << "OnDataChannel"; } @@ -197,14 +204,16 @@ RTC_LOG(LS_INFO) << "OnIceGatheringChange: " << new_state; } -void ObjCCallClient::PCObserver::OnIceCandidate(const webrtc::IceCandidateInterface* candidate) { +void ObjCCallClient::PCObserver::OnIceCandidate( + const webrtc::IceCandidateInterface* candidate) { RTC_LOG(LS_INFO) << "OnIceCandidate: " << candidate->server_url(); webrtc::MutexLock lock(&client_->pc_mutex_); RTC_DCHECK(client_->pc_ != nullptr); client_->pc_->AddIceCandidate(candidate); } -CreateOfferObserver::CreateOfferObserver(rtc::scoped_refptr pc) +CreateOfferObserver::CreateOfferObserver( + webrtc::scoped_refptr pc) : pc_(pc) {} void CreateOfferObserver::OnSuccess(webrtc::SessionDescriptionInterface* desc) { @@ -213,25 +222,29 @@ RTC_LOG(LS_INFO) << "Created offer: " << sdp; // Ownership of desc was transferred to us, now we transfer it forward. - pc_->SetLocalDescription(absl::WrapUnique(desc), - rtc::make_ref_counted()); + pc_->SetLocalDescription( + absl::WrapUnique(desc), + webrtc::make_ref_counted()); // Generate a fake answer. std::unique_ptr answer( webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, sdp)); - pc_->SetRemoteDescription(std::move(answer), - rtc::make_ref_counted()); + pc_->SetRemoteDescription( + std::move(answer), + webrtc::make_ref_counted()); } void CreateOfferObserver::OnFailure(webrtc::RTCError error) { RTC_LOG(LS_INFO) << "Failed to create offer: " << error.message(); } -void SetRemoteSessionDescriptionObserver::OnSetRemoteDescriptionComplete(webrtc::RTCError error) { +void SetRemoteSessionDescriptionObserver::OnSetRemoteDescriptionComplete( + webrtc::RTCError error) { RTC_LOG(LS_INFO) << "Set remote description: " << error.message(); } -void SetLocalSessionDescriptionObserver::OnSetLocalDescriptionComplete(webrtc::RTCError error) { +void SetLocalSessionDescriptionObserver::OnSetLocalDescriptionComplete( + webrtc::RTCError error) { RTC_LOG(LS_INFO) << "Set local description: " << error.message(); } diff --git a/examples/peerconnection/client/conductor.cc b/examples/peerconnection/client/conductor.cc index f94a981a75..8356484207 100644 --- a/examples/peerconnection/client/conductor.cc +++ b/examples/peerconnection/client/conductor.cc @@ -11,48 +11,62 @@ #include "examples/peerconnection/client/conductor.h" #include -#include #include +#include +#include #include #include #include "absl/memory/memory.h" -#include "absl/types/optional.h" -#include "api/audio/audio_mixer.h" -#include "api/audio_codecs/audio_decoder_factory.h" -#include "api/audio_codecs/audio_encoder_factory.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/audio_options.h" #include "api/create_peerconnection_factory.h" +#include "api/enable_media.h" +#include "api/jsep.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" -#include "api/video_codecs/video_decoder_factory.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/test/create_frame_generator.h" +#include "api/video/video_frame.h" +#include "api/video/video_source_interface.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h" -#include "api/video_codecs/video_encoder_factory.h" #include "api/video_codecs/video_encoder_factory_template.h" #include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h" #include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" #include "examples/peerconnection/client/defaults.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" +#include "examples/peerconnection/client/main_wnd.h" +#include "examples/peerconnection/client/peer_connection_client.h" +#include "json/reader.h" +#include "json/value.h" +#include "json/writer.h" #include "modules/video_capture/video_capture.h" #include "modules/video_capture/video_capture_factory.h" -#include "p2p/base/port_allocator.h" #include "pc/video_track_source.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/strings/json.h" -#include "test/vcm_capturer.h" +#include "system_wrappers/include/clock.h" +#include "test/frame_generator_capturer.h" +#include "test/platform_video_capturer.h" +#include "test/test_video_capturer.h" namespace { +using webrtc::test::TestVideoCapturer; + // Names used for a IceCandidate JSON object. const char kCandidateSdpMidName[] = "sdpMid"; const char kCandidateSdpMlineIndexName[] = "sdpMLineIndex"; @@ -65,8 +79,8 @@ const char kSessionDescriptionSdpName[] = "sdp"; class DummySetSessionDescriptionObserver : public webrtc::SetSessionDescriptionObserver { public: - static rtc::scoped_refptr Create() { - return rtc::make_ref_counted(); + static webrtc::scoped_refptr Create() { + return webrtc::make_ref_counted(); } virtual void OnSuccess() { RTC_LOG(LS_INFO) << __FUNCTION__; } virtual void OnFailure(webrtc::RTCError error) { @@ -75,40 +89,53 @@ class DummySetSessionDescriptionObserver } }; +std::unique_ptr CreateCapturer( + webrtc::TaskQueueFactory& task_queue_factory) { + const size_t kWidth = 640; + const size_t kHeight = 480; + const size_t kFps = 30; + std::unique_ptr info( + webrtc::VideoCaptureFactory::CreateDeviceInfo()); + if (!info) { + return nullptr; + } + int num_devices = info->NumberOfDevices(); + for (int i = 0; i < num_devices; ++i) { + std::unique_ptr capturer = + webrtc::test::CreateVideoCapturer(kWidth, kHeight, kFps, i); + if (capturer) { + return capturer; + } + } + auto frame_generator = webrtc::test::CreateSquareFrameGenerator( + kWidth, kHeight, std::nullopt, std::nullopt); + return std::make_unique( + webrtc::Clock::GetRealTimeClock(), std::move(frame_generator), kFps, + task_queue_factory); +} class CapturerTrackSource : public webrtc::VideoTrackSource { public: - static rtc::scoped_refptr Create() { - const size_t kWidth = 640; - const size_t kHeight = 480; - const size_t kFps = 30; - std::unique_ptr capturer; - std::unique_ptr info( - webrtc::VideoCaptureFactory::CreateDeviceInfo()); - if (!info) { - return nullptr; + static webrtc::scoped_refptr Create( + webrtc::TaskQueueFactory& task_queue_factory) { + std::unique_ptr capturer = + CreateCapturer(task_queue_factory); + if (capturer) { + capturer->Start(); + return webrtc::make_ref_counted(std::move(capturer)); } - int num_devices = info->NumberOfDevices(); - for (int i = 0; i < num_devices; ++i) { - capturer = absl::WrapUnique( - webrtc::test::VcmCapturer::Create(kWidth, kHeight, kFps, i)); - if (capturer) { - return rtc::make_ref_counted(std::move(capturer)); - } - } - return nullptr; } protected: - explicit CapturerTrackSource( - std::unique_ptr capturer) + explicit CapturerTrackSource(std::unique_ptr capturer) : VideoTrackSource(/*remote=*/false), capturer_(std::move(capturer)) {} private: - rtc::VideoSourceInterface* source() override { + webrtc::VideoSourceInterface* source() override { return capturer_.get(); } - std::unique_ptr capturer_; + + std::unique_ptr capturer_; }; } // namespace @@ -137,25 +164,31 @@ bool Conductor::InitializePeerConnection() { RTC_DCHECK(!peer_connection_); if (!signaling_thread_.get()) { - signaling_thread_ = rtc::Thread::CreateWithSocketServer(); + signaling_thread_ = webrtc::Thread::CreateWithSocketServer(); signaling_thread_->Start(); } - peer_connection_factory_ = webrtc::CreatePeerConnectionFactory( - nullptr /* network_thread */, nullptr /* worker_thread */, - signaling_thread_.get(), nullptr /* default_adm */, - webrtc::CreateBuiltinAudioEncoderFactory(), - webrtc::CreateBuiltinAudioDecoderFactory(), + + webrtc::PeerConnectionFactoryDependencies deps; + deps.signaling_thread = signaling_thread_.get(); + deps.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(), + deps.audio_encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory(); + deps.audio_decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory(); + deps.video_encoder_factory = std::make_unique>(), + webrtc::LibaomAv1EncoderTemplateAdapter>>(); + deps.video_decoder_factory = std::make_unique>(), - nullptr /* audio_mixer */, nullptr /* audio_processing */); + webrtc::Dav1dDecoderTemplateAdapter>>(); + webrtc::EnableMedia(deps); + task_queue_factory_ = deps.task_queue_factory.get(); + peer_connection_factory_ = + webrtc::CreateModularPeerConnectionFactory(std::move(deps)); if (!peer_connection_factory_) { main_wnd_->MessageBox("Error", "Failed to initialize PeerConnectionFactory", @@ -176,7 +209,7 @@ bool Conductor::InitializePeerConnection() { bool Conductor::ReinitializePeerConnectionForLoopback() { loopback_ = true; - std::vector> senders = + std::vector> senders = peer_connection_->GetSenders(); peer_connection_ = nullptr; // Loopback is only possible if encryption is disabled. @@ -237,8 +270,8 @@ void Conductor::EnsureStreamingUI() { // void Conductor::OnAddTrack( - rtc::scoped_refptr receiver, - const std::vector>& + webrtc::scoped_refptr receiver, + const std::vector>& streams) { RTC_LOG(LS_INFO) << __FUNCTION__ << " " << receiver->id(); main_wnd_->QueueUIThreadCallback(NEW_TRACK_ADDED, @@ -246,7 +279,7 @@ void Conductor::OnAddTrack( } void Conductor::OnRemoveTrack( - rtc::scoped_refptr receiver) { + webrtc::scoped_refptr receiver) { RTC_LOG(LS_INFO) << __FUNCTION__ << " " << receiver->id(); main_wnd_->QueueUIThreadCallback(TRACK_REMOVED, receiver->track().release()); } @@ -345,8 +378,8 @@ void Conductor::OnMessageFromPeer(int peer_id, const std::string& message) { std::string type_str; std::string json_object; - rtc::GetStringFromJsonObject(jmessage, kSessionDescriptionTypeName, - &type_str); + webrtc::GetStringFromJsonObject(jmessage, kSessionDescriptionTypeName, + &type_str); if (!type_str.empty()) { if (type_str == "offer-loopback") { // This is a loopback call. @@ -358,7 +391,7 @@ void Conductor::OnMessageFromPeer(int peer_id, const std::string& message) { } return; } - absl::optional type_maybe = + std::optional type_maybe = webrtc::SdpTypeFromString(type_str); if (!type_maybe) { RTC_LOG(LS_ERROR) << "Unknown SDP type: " << type_str; @@ -366,8 +399,8 @@ void Conductor::OnMessageFromPeer(int peer_id, const std::string& message) { } webrtc::SdpType type = *type_maybe; std::string sdp; - if (!rtc::GetStringFromJsonObject(jmessage, kSessionDescriptionSdpName, - &sdp)) { + if (!webrtc::GetStringFromJsonObject(jmessage, kSessionDescriptionSdpName, + &sdp)) { RTC_LOG(LS_WARNING) << "Can't parse received session description message."; return; @@ -394,11 +427,11 @@ void Conductor::OnMessageFromPeer(int peer_id, const std::string& message) { std::string sdp_mid; int sdp_mlineindex = 0; std::string sdp; - if (!rtc::GetStringFromJsonObject(jmessage, kCandidateSdpMidName, - &sdp_mid) || - !rtc::GetIntFromJsonObject(jmessage, kCandidateSdpMlineIndexName, - &sdp_mlineindex) || - !rtc::GetStringFromJsonObject(jmessage, kCandidateSdpName, &sdp)) { + if (!webrtc::GetStringFromJsonObject(jmessage, kCandidateSdpMidName, + &sdp_mid) || + !webrtc::GetIntFromJsonObject(jmessage, kCandidateSdpMlineIndexName, + &sdp_mlineindex) || + !webrtc::GetStringFromJsonObject(jmessage, kCandidateSdpName, &sdp)) { RTC_LOG(LS_WARNING) << "Can't parse received message."; return; } @@ -469,10 +502,10 @@ void Conductor::AddTracks() { return; // Already added tracks. } - rtc::scoped_refptr audio_track( + webrtc::scoped_refptr audio_track( peer_connection_factory_->CreateAudioTrack( kAudioLabel, - peer_connection_factory_->CreateAudioSource(cricket::AudioOptions()) + peer_connection_factory_->CreateAudioSource(webrtc::AudioOptions()) .get())); auto result_or_error = peer_connection_->AddTrack(audio_track, {kStreamId}); if (!result_or_error.ok()) { @@ -480,10 +513,10 @@ void Conductor::AddTracks() { << result_or_error.error().message(); } - rtc::scoped_refptr video_device = - CapturerTrackSource::Create(); + webrtc::scoped_refptr video_device = + CapturerTrackSource::Create(*task_queue_factory_); if (video_device) { - rtc::scoped_refptr video_track_( + webrtc::scoped_refptr video_track_( peer_connection_factory_->CreateVideoTrack(video_device, kVideoLabel)); main_wnd_->StartLocalRenderer(video_track_.get()); diff --git a/examples/peerconnection/client/conductor.h b/examples/peerconnection/client/conductor.h index 80617d3cf4..ffc5c27012 100644 --- a/examples/peerconnection/client/conductor.h +++ b/examples/peerconnection/client/conductor.h @@ -12,13 +12,18 @@ #define EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_ #include -#include #include #include #include +#include "api/data_channel_interface.h" +#include "api/jsep.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_receiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_factory.h" #include "examples/peerconnection/client/main_wnd.h" #include "examples/peerconnection/client/peer_connection_client.h" #include "rtc_base/thread.h" @@ -27,10 +32,6 @@ namespace webrtc { class VideoCaptureModule; } // namespace webrtc -namespace cricket { -class VideoRenderer; -} // namespace cricket - class Conductor : public webrtc::PeerConnectionObserver, public webrtc::CreateSessionDescriptionObserver, public PeerConnectionClientObserver, @@ -66,13 +67,13 @@ class Conductor : public webrtc::PeerConnectionObserver, void OnSignalingChange( webrtc::PeerConnectionInterface::SignalingState new_state) override {} void OnAddTrack( - rtc::scoped_refptr receiver, - const std::vector>& + webrtc::scoped_refptr receiver, + const std::vector>& streams) override; void OnRemoveTrack( - rtc::scoped_refptr receiver) override; + webrtc::scoped_refptr receiver) override; void OnDataChannel( - rtc::scoped_refptr channel) override {} + webrtc::scoped_refptr channel) override {} void OnRenegotiationNeeded() override {} void OnIceConnectionChange( webrtc::PeerConnectionInterface::IceConnectionState new_state) override {} @@ -123,9 +124,10 @@ class Conductor : public webrtc::PeerConnectionObserver, int peer_id_; bool loopback_; - std::unique_ptr signaling_thread_; - rtc::scoped_refptr peer_connection_; - rtc::scoped_refptr + std::unique_ptr signaling_thread_; + webrtc::TaskQueueFactory* task_queue_factory_ = nullptr; + webrtc::scoped_refptr peer_connection_; + webrtc::scoped_refptr peer_connection_factory_; PeerConnectionClient* client_; MainWindow* main_wnd_; diff --git a/examples/peerconnection/client/linux/main.cc b/examples/peerconnection/client/linux/main.cc index ad3d671073..3278e9d0fd 100644 --- a/examples/peerconnection/client/linux/main.cc +++ b/examples/peerconnection/client/linux/main.cc @@ -24,13 +24,15 @@ #include "system_wrappers/include/field_trial.h" #include "test/field_trial.h" -class CustomSocketServer : public rtc::PhysicalSocketServer { +class CustomSocketServer : public webrtc::PhysicalSocketServer { public: explicit CustomSocketServer(GtkMainWnd* wnd) : wnd_(wnd), conductor_(NULL), client_(NULL) {} virtual ~CustomSocketServer() {} - void SetMessageQueue(rtc::Thread* queue) override { message_queue_ = queue; } + void SetMessageQueue(webrtc::Thread* queue) override { + message_queue_ = queue; + } void set_client(PeerConnectionClient* client) { client_ = client; } void set_conductor(Conductor* conductor) { conductor_ = conductor; } @@ -50,12 +52,12 @@ class CustomSocketServer : public rtc::PhysicalSocketServer { client_ != NULL && !client_->is_connected()) { message_queue_->Quit(); } - return rtc::PhysicalSocketServer::Wait(webrtc::TimeDelta::Zero(), - process_io); + return webrtc::PhysicalSocketServer::Wait(webrtc::TimeDelta::Zero(), + process_io); } protected: - rtc::Thread* message_queue_; + webrtc::Thread* message_queue_; GtkMainWnd* wnd_; Conductor* conductor_; PeerConnectionClient* client_; @@ -96,12 +98,12 @@ int main(int argc, char* argv[]) { wnd.Create(); CustomSocketServer socket_server(&wnd); - rtc::AutoSocketServerThread thread(&socket_server); + webrtc::AutoSocketServerThread thread(&socket_server); - rtc::InitializeSSL(); + webrtc::InitializeSSL(); // Must be constructed after we set the socketserver. PeerConnectionClient client; - auto conductor = rtc::make_ref_counted(&client, &wnd); + auto conductor = webrtc::make_ref_counted(&client, &wnd); socket_server.set_client(&client); socket_server.set_conductor(conductor.get()); @@ -116,6 +118,6 @@ int main(int argc, char* argv[]) { gtk_main_iteration(); } */ - rtc::CleanupSSL(); + webrtc::CleanupSSL(); return 0; } diff --git a/examples/peerconnection/client/linux/main_wnd.cc b/examples/peerconnection/client/linux/main_wnd.cc index 2be75d8f8d..c750365dbc 100644 --- a/examples/peerconnection/client/linux/main_wnd.cc +++ b/examples/peerconnection/client/linux/main_wnd.cc @@ -15,6 +15,7 @@ #include #include #include +#include #include #include #include @@ -22,17 +23,19 @@ #include #include -#include #include -#include +#include "api/media_stream_interface.h" +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" #include "api/video/video_frame_buffer.h" #include "api/video/video_rotation.h" #include "api/video/video_source_interface.h" +#include "examples/peerconnection/client/main_wnd.h" +#include "examples/peerconnection/client/peer_connection_client.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "third_party/libyuv/include/libyuv/convert.h" #include "third_party/libyuv/include/libyuv/convert_from.h" namespace { @@ -312,7 +315,7 @@ void GtkMainWnd::SwitchToPeerList(const Peers& peers) { } else if (draw_area_) { gtk_widget_destroy(draw_area_); draw_area_ = NULL; - draw_buffer_.reset(); + draw_buffer_.SetSize(0); } peer_list_ = gtk_tree_view_new(); @@ -424,68 +427,29 @@ void GtkMainWnd::OnRedraw() { gdk_threads_enter(); VideoRenderer* remote_renderer = remote_renderer_.get(); - if (remote_renderer && remote_renderer->image() != NULL && + if (remote_renderer && !remote_renderer->image().empty() && draw_area_ != NULL) { - width_ = remote_renderer->width(); - height_ = remote_renderer->height(); - - if (!draw_buffer_.get()) { - draw_buffer_size_ = (width_ * height_ * 4) * 4; - draw_buffer_.reset(new uint8_t[draw_buffer_size_]); - gtk_widget_set_size_request(draw_area_, width_ * 2, height_ * 2); - } - - const uint32_t* image = - reinterpret_cast(remote_renderer->image()); - uint32_t* scaled = reinterpret_cast(draw_buffer_.get()); - for (int r = 0; r < height_; ++r) { - for (int c = 0; c < width_; ++c) { - int x = c * 2; - scaled[x] = scaled[x + 1] = image[c]; - } - - uint32_t* prev_line = scaled; - scaled += width_ * 2; - memcpy(scaled, prev_line, (width_ * 2) * 4); - - image += width_; - scaled += width_ * 2; - } - - VideoRenderer* local_renderer = local_renderer_.get(); - if (local_renderer && local_renderer->image()) { - image = reinterpret_cast(local_renderer->image()); - scaled = reinterpret_cast(draw_buffer_.get()); - // Position the local preview on the right side. - scaled += (width_ * 2) - (local_renderer->width() / 2); - // right margin... - scaled -= 10; - // ... towards the bottom. - scaled += (height_ * width_ * 4) - ((local_renderer->height() / 2) * - (local_renderer->width() / 2) * 4); - // bottom margin... - scaled -= (width_ * 2) * 5; - for (int r = 0; r < local_renderer->height(); r += 2) { - for (int c = 0; c < local_renderer->width(); c += 2) { - scaled[c / 2] = image[c + r * local_renderer->width()]; - } - scaled += width_ * 2; - } + if (width_ != remote_renderer->width() || + height_ != remote_renderer->height()) { + width_ = remote_renderer->width(); + height_ = remote_renderer->height(); + gtk_widget_set_size_request(draw_area_, remote_renderer->width(), + remote_renderer->height()); } - + draw_buffer_.SetData(remote_renderer->image()); gtk_widget_queue_draw(draw_area_); } - + // Here we can draw the local preview as well if we want.... gdk_threads_leave(); } void GtkMainWnd::Draw(GtkWidget* widget, cairo_t* cr) { cairo_format_t format = CAIRO_FORMAT_ARGB32; cairo_surface_t* surface = cairo_image_surface_create_for_data( - draw_buffer_.get(), format, width_ * 2, height_ * 2, - cairo_format_stride_for_width(format, width_ * 2)); + draw_buffer_.data(), format, width_, height_, + cairo_format_stride_for_width(format, width_)); cairo_set_source_surface(cr, surface, 0, 0); - cairo_rectangle(cr, 0, 0, width_ * 2, height_ * 2); + cairo_rectangle(cr, 0, 0, width_, height_); cairo_fill(cr); cairo_surface_destroy(surface); } @@ -497,7 +461,7 @@ GtkMainWnd::VideoRenderer::VideoRenderer( height_(0), main_wnd_(main_wnd), rendered_track_(track_to_render) { - rendered_track_->AddOrUpdateSink(this, rtc::VideoSinkWants()); + rendered_track_->AddOrUpdateSink(this, webrtc::VideoSinkWants()); } GtkMainWnd::VideoRenderer::~VideoRenderer() { @@ -513,14 +477,15 @@ void GtkMainWnd::VideoRenderer::SetSize(int width, int height) { width_ = width; height_ = height; - image_.reset(new uint8_t[width * height * 4]); + // ARGB + image_.SetSize(width * height * 4); gdk_threads_leave(); } void GtkMainWnd::VideoRenderer::OnFrame(const webrtc::VideoFrame& video_frame) { gdk_threads_enter(); - rtc::scoped_refptr buffer( + webrtc::scoped_refptr buffer( video_frame.video_frame_buffer()->ToI420()); if (video_frame.rotation() != webrtc::kVideoRotation_0) { buffer = webrtc::I420Buffer::Rotate(*buffer, video_frame.rotation()); @@ -536,7 +501,7 @@ void GtkMainWnd::VideoRenderer::OnFrame(const webrtc::VideoFrame& video_frame) { // native endianness. libyuv::I420ToARGB(buffer->DataY(), buffer->StrideY(), buffer->DataU(), buffer->StrideU(), buffer->DataV(), buffer->StrideV(), - image_.get(), width_ * 4, buffer->width(), + image_.data(), width_ * 4, buffer->width(), buffer->height()); gdk_threads_leave(); diff --git a/examples/peerconnection/client/linux/main_wnd.h b/examples/peerconnection/client/linux/main_wnd.h index 3b31e1be3b..8c13a1db35 100644 --- a/examples/peerconnection/client/linux/main_wnd.h +++ b/examples/peerconnection/client/linux/main_wnd.h @@ -16,12 +16,14 @@ #include #include +#include "api/array_view.h" #include "api/media_stream_interface.h" #include "api/scoped_refptr.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "examples/peerconnection/client/main_wnd.h" #include "examples/peerconnection/client/peer_connection_client.h" +#include "rtc_base/buffer.h" // Forward declarations. typedef struct _GtkWidget GtkWidget; @@ -81,7 +83,7 @@ class GtkMainWnd : public MainWindow { void Draw(GtkWidget* widget, cairo_t* cr); protected: - class VideoRenderer : public rtc::VideoSinkInterface { + class VideoRenderer : public webrtc::VideoSinkInterface { public: VideoRenderer(GtkMainWnd* main_wnd, webrtc::VideoTrackInterface* track_to_render); @@ -90,7 +92,7 @@ class GtkMainWnd : public MainWindow { // VideoSinkInterface implementation void OnFrame(const webrtc::VideoFrame& frame) override; - const uint8_t* image() const { return image_.get(); } + webrtc::ArrayView image() const { return image_; } int width() const { return width_; } @@ -98,11 +100,11 @@ class GtkMainWnd : public MainWindow { protected: void SetSize(int width, int height); - std::unique_ptr image_; + webrtc::Buffer image_; int width_; int height_; GtkMainWnd* main_wnd_; - rtc::scoped_refptr rendered_track_; + webrtc::scoped_refptr rendered_track_; }; protected: @@ -119,9 +121,9 @@ class GtkMainWnd : public MainWindow { bool autocall_; std::unique_ptr local_renderer_; std::unique_ptr remote_renderer_; - int width_; - int height_; - std::unique_ptr draw_buffer_; + int width_ = 0; + int height_ = 0; + webrtc::Buffer draw_buffer_; int draw_buffer_size_; }; diff --git a/examples/peerconnection/client/main.cc b/examples/peerconnection/client/main.cc index 32bc52bda4..c5ac647f72 100644 --- a/examples/peerconnection/client/main.cc +++ b/examples/peerconnection/client/main.cc @@ -61,7 +61,7 @@ WindowsCommandLineArguments::WindowsCommandLineArguments() { // iterate over the returned wide strings; for (int i = 0; i < argc; ++i) { - args_.push_back(rtc::ToUtf8(wide_argv[i], wcslen(wide_argv[i]))); + args_.push_back(webrtc::ToUtf8(wide_argv[i], wcslen(wide_argv[i]))); // make sure the argv array points to the string data. argv_.push_back(const_cast(args_.back().c_str())); } @@ -73,9 +73,9 @@ int PASCAL wWinMain(HINSTANCE instance, HINSTANCE prev_instance, wchar_t* cmd_line, int cmd_show) { - rtc::WinsockInitializer winsock_init; - rtc::PhysicalSocketServer ss; - rtc::AutoSocketServerThread main_thread(&ss); + webrtc::WinsockInitializer winsock_init; + webrtc::PhysicalSocketServer ss; + webrtc::AutoSocketServerThread main_thread(&ss); WindowsCommandLineArguments win_args; int argc = win_args.argc(); @@ -104,9 +104,9 @@ int PASCAL wWinMain(HINSTANCE instance, return -1; } - rtc::InitializeSSL(); + webrtc::InitializeSSL(); PeerConnectionClient client; - auto conductor = rtc::make_ref_counted(&client, &wnd); + auto conductor = webrtc::make_ref_counted(&client, &wnd); // Main loop. MSG msg; @@ -128,6 +128,6 @@ int PASCAL wWinMain(HINSTANCE instance, } } - rtc::CleanupSSL(); + webrtc::CleanupSSL(); return 0; } diff --git a/examples/peerconnection/client/main_wnd.cc b/examples/peerconnection/client/main_wnd.cc index afafa621b3..59854875fb 100644 --- a/examples/peerconnection/client/main_wnd.cc +++ b/examples/peerconnection/client/main_wnd.cc @@ -588,7 +588,7 @@ MainWnd::VideoRenderer::VideoRenderer( bmi_.bmiHeader.biHeight = -height; bmi_.bmiHeader.biSizeImage = width * height * (bmi_.bmiHeader.biBitCount >> 3); - rendered_track_->AddOrUpdateSink(this, rtc::VideoSinkWants()); + rendered_track_->AddOrUpdateSink(this, webrtc::VideoSinkWants()); } MainWnd::VideoRenderer::~VideoRenderer() { @@ -614,7 +614,7 @@ void MainWnd::VideoRenderer::OnFrame(const webrtc::VideoFrame& video_frame) { { AutoLock lock(this); - rtc::scoped_refptr buffer( + webrtc::scoped_refptr buffer( video_frame.video_frame_buffer()->ToI420()); if (video_frame.rotation() != webrtc::kVideoRotation_0) { buffer = webrtc::I420Buffer::Rotate(*buffer, video_frame.rotation()); diff --git a/examples/peerconnection/client/main_wnd.h b/examples/peerconnection/client/main_wnd.h index 898fea9d92..8f8ce7ee12 100644 --- a/examples/peerconnection/client/main_wnd.h +++ b/examples/peerconnection/client/main_wnd.h @@ -104,7 +104,7 @@ class MainWnd : public MainWindow { HWND handle() const { return wnd_; } - class VideoRenderer : public rtc::VideoSinkInterface { + class VideoRenderer : public webrtc::VideoSinkInterface { public: VideoRenderer(HWND wnd, int width, @@ -134,7 +134,7 @@ class MainWnd : public MainWindow { BITMAPINFO bmi_; std::unique_ptr image_; CRITICAL_SECTION buffer_lock_; - rtc::scoped_refptr rendered_track_; + webrtc::scoped_refptr rendered_track_; }; // A little helper class to make sure we always to proper locking and diff --git a/examples/peerconnection/client/peer_connection_client.cc b/examples/peerconnection/client/peer_connection_client.cc index 2746752d80..b646c58226 100644 --- a/examples/peerconnection/client/peer_connection_client.cc +++ b/examples/peerconnection/client/peer_connection_client.cc @@ -12,9 +12,11 @@ #include "api/units/time_delta.h" #include "examples/peerconnection/client/defaults.h" +#include "rtc_base/async_dns_resolver.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/net_helpers.h" +#include "rtc_base/thread.h" namespace { @@ -23,8 +25,8 @@ constexpr char kByeMessage[] = "BYE"; // Delay between server connection retries, in milliseconds constexpr webrtc::TimeDelta kReconnectDelay = webrtc::TimeDelta::Seconds(2); -rtc::Socket* CreateClientSocket(int family) { - rtc::Thread* thread = rtc::Thread::Current(); +webrtc::Socket* CreateClientSocket(int family) { + webrtc::Thread* thread = webrtc::Thread::Current(); RTC_DCHECK(thread != NULL); return thread->socketserver()->CreateSocket(family, SOCK_STREAM); } @@ -32,7 +34,7 @@ rtc::Socket* CreateClientSocket(int family) { } // namespace PeerConnectionClient::PeerConnectionClient() - : callback_(NULL), resolver_(NULL), state_(NOT_CONNECTED), my_id_(-1) {} + : callback_(NULL), resolver_(nullptr), state_(NOT_CONNECTED), my_id_(-1) {} PeerConnectionClient::~PeerConnectionClient() = default; @@ -95,26 +97,32 @@ void PeerConnectionClient::Connect(const std::string& server, client_name_ = client_name; if (server_address_.IsUnresolvedIP()) { + RTC_DCHECK_NE(state_, RESOLVING); + RTC_DCHECK(!resolver_); state_ = RESOLVING; - resolver_ = new rtc::AsyncResolver(); - resolver_->SignalDone.connect(this, &PeerConnectionClient::OnResolveResult); - resolver_->Start(server_address_); + resolver_ = std::make_unique(); + resolver_->Start(server_address_, + [this] { OnResolveResult(resolver_->result()); }); } else { DoConnect(); } } void PeerConnectionClient::OnResolveResult( - rtc::AsyncResolverInterface* resolver) { - if (resolver_->GetError() != 0) { + const webrtc::AsyncDnsResolverResult& result) { + if (result.GetError() != 0) { callback_->OnServerConnectionFailure(); - resolver_->Destroy(false); - resolver_ = NULL; + resolver_.reset(); state_ = NOT_CONNECTED; - } else { - server_address_ = resolver_->address(); - DoConnect(); + return; + } + if (!result.GetResolvedAddress(AF_INET, &server_address_)) { + callback_->OnServerConnectionFailure(); + resolver_.reset(); + state_ = NOT_CONNECTED; + return; } + DoConnect(); } void PeerConnectionClient::DoConnect() { @@ -139,7 +147,7 @@ bool PeerConnectionClient::SendToPeer(int peer_id, const std::string& message) { return false; RTC_DCHECK(is_connected()); - RTC_DCHECK(control_socket_->GetState() == rtc::Socket::CS_CLOSED); + RTC_DCHECK(control_socket_->GetState() == webrtc::Socket::CS_CLOSED); if (!is_connected() || peer_id == -1) return false; @@ -161,17 +169,17 @@ bool PeerConnectionClient::SendHangUp(int peer_id) { bool PeerConnectionClient::IsSendingMessage() { return state_ == CONNECTED && - control_socket_->GetState() != rtc::Socket::CS_CLOSED; + control_socket_->GetState() != webrtc::Socket::CS_CLOSED; } bool PeerConnectionClient::SignOut() { if (state_ == NOT_CONNECTED || state_ == SIGNING_OUT) return true; - if (hanging_get_->GetState() != rtc::Socket::CS_CLOSED) + if (hanging_get_->GetState() != webrtc::Socket::CS_CLOSED) hanging_get_->Close(); - if (control_socket_->GetState() == rtc::Socket::CS_CLOSED) { + if (control_socket_->GetState() == webrtc::Socket::CS_CLOSED) { state_ = SIGNING_OUT; if (my_id_ != -1) { @@ -196,16 +204,13 @@ void PeerConnectionClient::Close() { hanging_get_->Close(); onconnect_data_.clear(); peers_.clear(); - if (resolver_ != NULL) { - resolver_->Destroy(false); - resolver_ = NULL; - } + resolver_.reset(); my_id_ = -1; state_ = NOT_CONNECTED; } bool PeerConnectionClient::ConnectControlSocket() { - RTC_DCHECK(control_socket_->GetState() == rtc::Socket::CS_CLOSED); + RTC_DCHECK(control_socket_->GetState() == webrtc::Socket::CS_CLOSED); int err = control_socket_->Connect(server_address_); if (err == SOCKET_ERROR) { Close(); @@ -214,14 +219,14 @@ bool PeerConnectionClient::ConnectControlSocket() { return true; } -void PeerConnectionClient::OnConnect(rtc::Socket* socket) { +void PeerConnectionClient::OnConnect(webrtc::Socket* socket) { RTC_DCHECK(!onconnect_data_.empty()); size_t sent = socket->Send(onconnect_data_.c_str(), onconnect_data_.length()); RTC_DCHECK(sent == onconnect_data_.length()); onconnect_data_.clear(); } -void PeerConnectionClient::OnHangingGetConnect(rtc::Socket* socket) { +void PeerConnectionClient::OnHangingGetConnect(webrtc::Socket* socket) { char buffer[1024]; snprintf(buffer, sizeof(buffer), "GET /wait?peer_id=%i HTTP/1.0\r\n\r\n", my_id_); @@ -270,7 +275,7 @@ bool PeerConnectionClient::GetHeaderValue(const std::string& data, return false; } -bool PeerConnectionClient::ReadIntoBuffer(rtc::Socket* socket, +bool PeerConnectionClient::ReadIntoBuffer(webrtc::Socket* socket, std::string* data, size_t* content_length) { char buffer[0xffff]; @@ -308,7 +313,7 @@ bool PeerConnectionClient::ReadIntoBuffer(rtc::Socket* socket, return ret; } -void PeerConnectionClient::OnRead(rtc::Socket* socket) { +void PeerConnectionClient::OnRead(webrtc::Socket* socket) { size_t content_length = 0; if (ReadIntoBuffer(socket, &control_data_, &content_length)) { size_t peer_id = 0, eoh = 0; @@ -353,14 +358,14 @@ void PeerConnectionClient::OnRead(rtc::Socket* socket) { control_data_.clear(); if (state_ == SIGNING_IN) { - RTC_DCHECK(hanging_get_->GetState() == rtc::Socket::CS_CLOSED); + RTC_DCHECK(hanging_get_->GetState() == webrtc::Socket::CS_CLOSED); state_ = CONNECTED; hanging_get_->Connect(server_address_); } } } -void PeerConnectionClient::OnHangingGetRead(rtc::Socket* socket) { +void PeerConnectionClient::OnHangingGetRead(webrtc::Socket* socket) { RTC_LOG(LS_INFO) << __FUNCTION__; size_t content_length = 0; if (ReadIntoBuffer(socket, ¬ification_data_, &content_length)) { @@ -397,7 +402,7 @@ void PeerConnectionClient::OnHangingGetRead(rtc::Socket* socket) { notification_data_.clear(); } - if (hanging_get_->GetState() == rtc::Socket::CS_CLOSED && + if (hanging_get_->GetState() == webrtc::Socket::CS_CLOSED && state_ == CONNECTED) { hanging_get_->Connect(server_address_); } @@ -458,7 +463,7 @@ bool PeerConnectionClient::ParseServerResponse(const std::string& response, return true; } -void PeerConnectionClient::OnClose(rtc::Socket* socket, int err) { +void PeerConnectionClient::OnClose(webrtc::Socket* socket, int err) { RTC_LOG(LS_INFO) << __FUNCTION__; socket->Close(); @@ -479,7 +484,7 @@ void PeerConnectionClient::OnClose(rtc::Socket* socket, int err) { } else { if (socket == control_socket_.get()) { RTC_LOG(LS_WARNING) << "Connection refused; retrying in 2 seconds"; - rtc::Thread::Current()->PostDelayedTask( + webrtc::Thread::Current()->PostDelayedTask( SafeTask(safety_.flag(), [this] { DoConnect(); }), kReconnectDelay); } else { Close(); diff --git a/examples/peerconnection/client/peer_connection_client.h b/examples/peerconnection/client/peer_connection_client.h index 8f9c5b6a75..bc82184ebe 100644 --- a/examples/peerconnection/client/peer_connection_client.h +++ b/examples/peerconnection/client/peer_connection_client.h @@ -15,6 +15,7 @@ #include #include +#include "api/async_dns_resolver.h" #include "api/task_queue/pending_task_safety_flag.h" #include "rtc_base/net_helpers.h" #include "rtc_base/physical_socket_server.h" @@ -70,8 +71,8 @@ class PeerConnectionClient : public sigslot::has_slots<> { void Close(); void InitSocketSignals(); bool ConnectControlSocket(); - void OnConnect(rtc::Socket* socket); - void OnHangingGetConnect(rtc::Socket* socket); + void OnConnect(webrtc::Socket* socket); + void OnHangingGetConnect(webrtc::Socket* socket); void OnMessageFromPeer(int peer_id, const std::string& message); // Quick and dirty support for parsing HTTP header values. @@ -86,13 +87,13 @@ class PeerConnectionClient : public sigslot::has_slots<> { std::string* value); // Returns true if the whole response has been read. - bool ReadIntoBuffer(rtc::Socket* socket, + bool ReadIntoBuffer(webrtc::Socket* socket, std::string* data, size_t* content_length); - void OnRead(rtc::Socket* socket); + void OnRead(webrtc::Socket* socket); - void OnHangingGetRead(rtc::Socket* socket); + void OnHangingGetRead(webrtc::Socket* socket); // Parses a single line entry in the form ",," bool ParseEntry(const std::string& entry, @@ -107,15 +108,15 @@ class PeerConnectionClient : public sigslot::has_slots<> { size_t* peer_id, size_t* eoh); - void OnClose(rtc::Socket* socket, int err); + void OnClose(webrtc::Socket* socket, int err); - void OnResolveResult(rtc::AsyncResolverInterface* resolver); + void OnResolveResult(const webrtc::AsyncDnsResolverResult& result); PeerConnectionClientObserver* callback_; - rtc::SocketAddress server_address_; - rtc::AsyncResolver* resolver_; - std::unique_ptr control_socket_; - std::unique_ptr hanging_get_; + webrtc::SocketAddress server_address_; + std::unique_ptr resolver_; + std::unique_ptr control_socket_; + std::unique_ptr hanging_get_; std::string onconnect_data_; std::string control_data_; std::string notification_data_; diff --git a/examples/peerconnection/server/data_socket.h b/examples/peerconnection/server/data_socket.h index 57ad5b9aee..326ecc075b 100644 --- a/examples/peerconnection/server/data_socket.h +++ b/examples/peerconnection/server/data_socket.h @@ -11,14 +11,16 @@ #ifndef EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_ #define EXAMPLES_PEERCONNECTION_SERVER_DATA_SOCKET_H_ +#include + +#include "rtc_base/ip_address.h" +#include "rtc_base/net_helpers.h" + #ifdef WIN32 -#include typedef int socklen_t; typedef SOCKET NativeSocket; #else -#include #include -#include #define closesocket close typedef int NativeSocket; @@ -31,8 +33,6 @@ typedef int NativeSocket; #endif #endif -#include - class SocketBase { public: SocketBase() : socket_(INVALID_SOCKET) {} diff --git a/examples/peerconnection/server/utils.cc b/examples/peerconnection/server/utils.cc index 5e61e601d9..4cdaa0af15 100644 --- a/examples/peerconnection/server/utils.cc +++ b/examples/peerconnection/server/utils.cc @@ -14,12 +14,12 @@ #include "rtc_base/string_encode.h" -using rtc::ToString; +using absl::StrCat; std::string int2str(int i) { - return ToString(i); + return absl::StrCat(i); } std::string size_t2str(size_t i) { - return ToString(i); + return absl::StrCat(i); } diff --git a/examples/stunprober/main.cc b/examples/stunprober/main.cc deleted file mode 100644 index 3b3c06be8f..0000000000 --- a/examples/stunprober/main.cc +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Copyright 2015 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include -#include -#include -#include - -#include "absl/flags/flag.h" -#include "absl/flags/parse.h" -#include "p2p/base/basic_packet_socket_factory.h" -#include "p2p/stunprober/stun_prober.h" -#include "rtc_base/helpers.h" -#include "rtc_base/logging.h" -#include "rtc_base/network.h" -#include "rtc_base/physical_socket_server.h" -#include "rtc_base/socket_address.h" -#include "rtc_base/ssl_adapter.h" -#include "rtc_base/thread.h" -#include "rtc_base/time_utils.h" -#include "test/scoped_key_value_config.h" - -using stunprober::AsyncCallback; -using stunprober::StunProber; - -ABSL_FLAG(int, - interval, - 10, - "Interval of consecutive stun pings in milliseconds"); -ABSL_FLAG(bool, - shared_socket, - false, - "Share socket mode for different remote IPs"); -ABSL_FLAG(int, - pings_per_ip, - 10, - "Number of consecutive stun pings to send for each IP"); -ABSL_FLAG(int, - timeout, - 1000, - "Milliseconds of wait after the last ping sent before exiting"); -ABSL_FLAG( - std::string, - servers, - "stun.l.google.com:19302,stun1.l.google.com:19302,stun2.l.google.com:19302", - "Comma separated STUN server addresses with ports"); - -namespace { - -const char* PrintNatType(stunprober::NatType type) { - switch (type) { - case stunprober::NATTYPE_NONE: - return "Not behind a NAT"; - case stunprober::NATTYPE_UNKNOWN: - return "Unknown NAT type"; - case stunprober::NATTYPE_SYMMETRIC: - return "Symmetric NAT"; - case stunprober::NATTYPE_NON_SYMMETRIC: - return "Non-Symmetric NAT"; - default: - return "Invalid"; - } -} - -void PrintStats(StunProber* prober) { - StunProber::Stats stats; - if (!prober->GetStats(&stats)) { - RTC_LOG(LS_WARNING) << "Results are inconclusive."; - return; - } - - RTC_LOG(LS_INFO) << "Shared Socket Mode: " << stats.shared_socket_mode; - RTC_LOG(LS_INFO) << "Requests sent: " << stats.num_request_sent; - RTC_LOG(LS_INFO) << "Responses received: " << stats.num_response_received; - RTC_LOG(LS_INFO) << "Target interval (ns): " - << stats.target_request_interval_ns; - RTC_LOG(LS_INFO) << "Actual interval (ns): " - << stats.actual_request_interval_ns; - RTC_LOG(LS_INFO) << "NAT Type: " << PrintNatType(stats.nat_type); - RTC_LOG(LS_INFO) << "Host IP: " << stats.host_ip; - RTC_LOG(LS_INFO) << "Server-reflexive ips: "; - for (auto& ip : stats.srflx_addrs) { - RTC_LOG(LS_INFO) << "\t" << ip; - } - - RTC_LOG(LS_INFO) << "Success Precent: " << stats.success_percent; - RTC_LOG(LS_INFO) << "Response Latency:" << stats.average_rtt_ms; -} - -void StopTrial(rtc::Thread* thread, StunProber* prober, int result) { - thread->Quit(); - if (prober) { - RTC_LOG(LS_INFO) << "Result: " << result; - if (result == StunProber::SUCCESS) { - PrintStats(prober); - } - } -} - -} // namespace - -int main(int argc, char* argv[]) { - absl::ParseCommandLine(argc, argv); - - std::vector server_addresses; - std::istringstream servers(absl::GetFlag(FLAGS_servers)); - std::string server; - while (getline(servers, server, ',')) { - rtc::SocketAddress addr; - if (!addr.FromString(server)) { - RTC_LOG(LS_ERROR) << "Parsing " << server << " failed."; - return -1; - } - server_addresses.push_back(addr); - } - - rtc::InitializeSSL(); - rtc::InitRandom(rtc::Time32()); - webrtc::test::ScopedKeyValueConfig field_trials; - rtc::PhysicalSocketServer socket_server; - rtc::AutoSocketServerThread thread(&socket_server); - auto socket_factory = - std::make_unique(&socket_server); - std::unique_ptr network_manager( - new rtc::BasicNetworkManager(&socket_server, &field_trials)); - std::vector networks = network_manager->GetNetworks(); - auto prober = std::make_unique(socket_factory.get(), - rtc::Thread::Current(), networks); - auto finish_callback = [&thread](StunProber* prober, int result) { - StopTrial(&thread, prober, result); - }; - prober->Start(server_addresses, absl::GetFlag(FLAGS_shared_socket), - absl::GetFlag(FLAGS_interval), - absl::GetFlag(FLAGS_pings_per_ip), absl::GetFlag(FLAGS_timeout), - AsyncCallback(finish_callback)); - thread.Run(); - return 0; -} diff --git a/examples/stunserver/stunserver_main.cc b/examples/stunserver/stunserver_main.cc index 8180069bf0..1c8f505dcc 100644 --- a/examples/stunserver/stunserver_main.cc +++ b/examples/stunserver/stunserver_main.cc @@ -9,13 +9,13 @@ */ #include -#include "p2p/base/stun_server.h" +#include "p2p/test/stun_server.h" #include "rtc_base/async_udp_socket.h" #include "rtc_base/socket_address.h" #include "rtc_base/socket_server.h" #include "rtc_base/thread.h" -using cricket::StunServer; +using ::webrtc::StunServer; int main(int argc, char* argv[]) { if (argc != 2) { @@ -23,16 +23,18 @@ int main(int argc, char* argv[]) { return 1; } - rtc::SocketAddress server_addr; + webrtc::SocketAddress server_addr; if (!server_addr.FromString(argv[1])) { std::cerr << "Unable to parse IP address: " << argv[1]; return 1; } - rtc::Thread* pthMain = rtc::Thread::Current(); + webrtc::Thread* pthMain = + webrtc::ThreadManager::Instance()->WrapCurrentThread(); + RTC_DCHECK(pthMain); - rtc::AsyncUDPSocket* server_socket = - rtc::AsyncUDPSocket::Create(pthMain->socketserver(), server_addr); + webrtc::AsyncUDPSocket* server_socket = + webrtc::AsyncUDPSocket::Create(pthMain->socketserver(), server_addr); if (!server_socket) { std::cerr << "Failed to create a UDP socket" << std::endl; return 1; diff --git a/examples/turnserver/read_auth_file.cc b/examples/turnserver/read_auth_file.cc index 4b0b21b8ae..d4cae5636f 100644 --- a/examples/turnserver/read_auth_file.cc +++ b/examples/turnserver/read_auth_file.cc @@ -25,8 +25,8 @@ std::map ReadAuthFile(std::istream* s) { if (sep == std::string::npos) continue; char buf[32]; - size_t len = rtc::hex_decode(rtc::ArrayView(buf), - absl::string_view(line).substr(sep + 1)); + size_t len = webrtc::hex_decode(webrtc::ArrayView(buf), + absl::string_view(line).substr(sep + 1)); if (len > 0) { name_to_key.emplace(line.substr(0, sep), std::string(buf, len)); } diff --git a/examples/turnserver/turnserver_main.cc b/examples/turnserver/turnserver_main.cc index 8db6162306..9248ea2e73 100644 --- a/examples/turnserver/turnserver_main.cc +++ b/examples/turnserver/turnserver_main.cc @@ -18,7 +18,7 @@ #include "examples/turnserver/read_auth_file.h" #include "p2p/base/basic_packet_socket_factory.h" #include "p2p/base/port_interface.h" -#include "p2p/base/turn_server.h" +#include "p2p/test/turn_server.h" #include "rtc_base/async_udp_socket.h" #include "rtc_base/ip_address.h" #include "rtc_base/physical_socket_server.h" @@ -28,7 +28,7 @@ namespace { const char kSoftware[] = "libjingle TurnServer"; -class TurnFileAuth : public cricket::TurnAuthInterface { +class TurnFileAuth : public webrtc::TurnAuthInterface { public: explicit TurnFileAuth(std::map name_to_key) : name_to_key_(std::move(name_to_key)) {} @@ -58,29 +58,29 @@ int main(int argc, char* argv[]) { return 1; } - rtc::SocketAddress int_addr; + webrtc::SocketAddress int_addr; if (!int_addr.FromString(argv[1])) { std::cerr << "Unable to parse IP address: " << argv[1] << std::endl; return 1; } - rtc::IPAddress ext_addr; - if (!IPFromString(argv[2], &ext_addr)) { + webrtc::IPAddress ext_addr; + if (!webrtc::IPFromString(argv[2], &ext_addr)) { std::cerr << "Unable to parse IP address: " << argv[2] << std::endl; return 1; } - rtc::PhysicalSocketServer socket_server; - rtc::AutoSocketServerThread main(&socket_server); - rtc::AsyncUDPSocket* int_socket = - rtc::AsyncUDPSocket::Create(&socket_server, int_addr); + webrtc::PhysicalSocketServer socket_server; + webrtc::AutoSocketServerThread main(&socket_server); + webrtc::AsyncUDPSocket* int_socket = + webrtc::AsyncUDPSocket::Create(&socket_server, int_addr); if (!int_socket) { std::cerr << "Failed to create a UDP socket bound at" << int_addr.ToString() << std::endl; return 1; } - cricket::TurnServer server(&main); + webrtc::TurnServer server(&main); std::fstream auth_file(argv[4], std::fstream::in); TurnFileAuth auth(auth_file.is_open() @@ -89,10 +89,10 @@ int main(int argc, char* argv[]) { server.set_realm(argv[3]); server.set_software(kSoftware); server.set_auth_hook(&auth); - server.AddInternalSocket(int_socket, cricket::PROTO_UDP); + server.AddInternalSocket(int_socket, webrtc::PROTO_UDP); server.SetExternalSocketFactory( - new rtc::BasicPacketSocketFactory(&socket_server), - rtc::SocketAddress(ext_addr, 0)); + new webrtc::BasicPacketSocketFactory(&socket_server), + webrtc::SocketAddress(ext_addr, 0)); std::cout << "Listening internally at " << int_addr.ToString() << std::endl; diff --git a/examples/unityplugin/ANDROID_INSTRUCTION b/examples/unityplugin/ANDROID_INSTRUCTION deleted file mode 100644 index d5f7399bca..0000000000 --- a/examples/unityplugin/ANDROID_INSTRUCTION +++ /dev/null @@ -1,33 +0,0 @@ -Instruction of running webrtc_unity_plugin on Android Unity - -1. On Linux machine, compile target webrtc_unity_plugin. - Checkout WebRTC codebase: fetch --nohooks webrtc_android - If you already have a checkout for linux, add target_os=”android” into .gclient file. - Run gclient sync - Run gn args out/Android, and again set target_os=”android” in the args.gn - Run ninja -C out/Android webrtc_unity_plugin - -2. On Linux machine, build target libwebrtc_unity under webrtc checkout. This is the java code for webrtc to work on Android. - -3. Copy libwebrtc_unity.jar and libwebrtc_unity_plugin.so into Unity project folder, under Assets/Plugins/Android folder. - -4. Rename libwebrtc_unity_plugin.so to libjingle_peerconnection_so.so. This is hacky, and the purpose is to let the java code in libwebrtc_unity.jar to find their JNI implementations. Simultaneously, in your C# wrapper script for the native plugin libjingle_peerconnection_so.so, the dll_path should be set to “jingle_peerconnection_so”. - -5. In the Unity Main Scene’s Start method, write the following code to initialize the Java environment for webrtc (otherwise, webrtc will not be able to access audio device or camera from C++ code): - -#if UNITY_ANDROID - AndroidJavaClass playerClass = new AndroidJavaClass("com.unity3d.player.UnityPlayer"); - AndroidJavaObject activity = playerClass.GetStatic("currentActivity"); - AndroidJavaClass utilityClass = new AndroidJavaClass("org.webrtc.UnityUtility"); - utilityClass.CallStatic("InitializePeerConncectionFactory", new object[1] { activity }); -#endif - -6. Compile the unity project into an APK, and decompile the apk using apktool that you can download from https://ibotpeaches.github.io/Apktool/ - Run apktool d apkname.apk. -Then copy the AndroidManifest.xml in the decompiled folder to the Assets/Plugins/Android folder, and add two lines: - - - -The purpose of using apktool is to get a well-written android manifest xml file. If you know how to write manifest file from scratch, you can skip using apktool. - -7. Compile the unity project into an APK again and deploy it to an android device. diff --git a/examples/unityplugin/DEPS b/examples/unityplugin/DEPS deleted file mode 100644 index 604005ac73..0000000000 --- a/examples/unityplugin/DEPS +++ /dev/null @@ -1,4 +0,0 @@ -include_rules = [ - "+modules/utility", - "+sdk", -] diff --git a/examples/unityplugin/README b/examples/unityplugin/README deleted file mode 100644 index da8f07aa11..0000000000 --- a/examples/unityplugin/README +++ /dev/null @@ -1,309 +0,0 @@ -This directory contains an example Unity native plugin for Windows OS and Android. - -The APIs use Platform Invoke (P/Invoke) technology as required by Unity native plugin. -This plugin dll can also be used by Windows C# applications other than Unity. - -For detailed build instruction on Android, see ANDROID_INSTRUCTION - -An example of wrapping native plugin into a C# managed class in Unity is given as following: - -using System; -using System.Collections.Generic; -using System.Runtime.InteropServices; - -namespace SimplePeerConnectionM { - // A class for ice candidate. - public class IceCandidate { - public IceCandidate(string candidate, int sdpMlineIndex, string sdpMid) { - mCandidate = candidate; - mSdpMlineIndex = sdpMlineIndex; - mSdpMid = sdpMid; - } - string mCandidate; - int mSdpMlineIndex; - string mSdpMid; - - public string Candidate { - get { return mCandidate; } - set { mCandidate = value; } - } - - public int SdpMlineIndex { - get { return mSdpMlineIndex; } - set { mSdpMlineIndex = value; } - } - - public string SdpMid { - get { return mSdpMid; } - set { mSdpMid = value; } - } - } - - // A managed wrapper up class for the native c style peer connection APIs. - public class PeerConnectionM { - private const string dllPath = "webrtc_unity_plugin"; - - //create a peerconnection with turn servers - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern int CreatePeerConnection(string[] turnUrls, int noOfUrls, - string username, string credential); - - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool ClosePeerConnection(int peerConnectionId); - - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool AddStream(int peerConnectionId, bool audioOnly); - - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool AddDataChannel(int peerConnectionId); - - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool CreateOffer(int peerConnectionId); - - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool CreateAnswer(int peerConnectionId); - - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool SendDataViaDataChannel(int peerConnectionId, string data); - - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool SetAudioControl(int peerConnectionId, bool isMute, bool isRecord); - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - private delegate void LocalDataChannelReadyInternalDelegate(); - public delegate void LocalDataChannelReadyDelegate(int id); - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool RegisterOnLocalDataChannelReady( - int peerConnectionId, LocalDataChannelReadyInternalDelegate callback); - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - private delegate void DataFromDataChannelReadyInternalDelegate(string s); - public delegate void DataFromDataChannelReadyDelegate(int id, string s); - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool RegisterOnDataFromDataChannelReady( - int peerConnectionId, DataFromDataChannelReadyInternalDelegate callback); - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - private delegate void FailureMessageInternalDelegate(string msg); - public delegate void FailureMessageDelegate(int id, string msg); - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool RegisterOnFailure(int peerConnectionId, - FailureMessageInternalDelegate callback); - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - private delegate void AudioBusReadyInternalDelegate(IntPtr data, int bitsPerSample, - int sampleRate, int numberOfChannels, int numberOfFrames); - public delegate void AudioBusReadyDelegate(int id, IntPtr data, int bitsPerSample, - int sampleRate, int numberOfChannels, int numberOfFrames); - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool RegisterOnAudioBusReady(int peerConnectionId, - AudioBusReadyInternalDelegate callback); - - // Video callbacks. - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - private delegate void I420FrameReadyInternalDelegate( - IntPtr dataY, IntPtr dataU, IntPtr dataV, - int strideY, int strideU, int strideV, - uint width, uint height); - public delegate void I420FrameReadyDelegate(int id, - IntPtr dataY, IntPtr dataU, IntPtr dataV, - int strideY, int strideU, int strideV, - uint width, uint height); - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool RegisterOnLocalI420FrameReady(int peerConnectionId, - I420FrameReadyInternalDelegate callback); - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool RegisterOnRemoteI420FrameReady(int peerConnectionId, - I420FrameReadyInternalDelegate callback); - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - private delegate void LocalSdpReadytoSendInternalDelegate(string type, string sdp); - public delegate void LocalSdpReadytoSendDelegate(int id, string type, string sdp); - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool RegisterOnLocalSdpReadytoSend(int peerConnectionId, - LocalSdpReadytoSendInternalDelegate callback); - - [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - private delegate void IceCandidateReadytoSendInternalDelegate( - string candidate, int sdpMlineIndex, string sdpMid); - public delegate void IceCandidateReadytoSendDelegate( - int id, string candidate, int sdpMlineIndex, string sdpMid); - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool RegisterOnIceCandidateReadytoSend( - int peerConnectionId, IceCandidateReadytoSendInternalDelegate callback); - - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool SetRemoteDescription(int peerConnectionId, string type, string sdp); - - [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)] - private static extern bool AddIceCandidate(int peerConnectionId, string sdp, - int sdpMlineindex, string sdpMid); - - public PeerConnectionM(List turnUrls, string username, string credential) { - string[] urls = turnUrls != null ? turnUrls.ToArray() : null; - int length = turnUrls != null ? turnUrls.Count : 0; - mPeerConnectionId = CreatePeerConnection(urls, length, username, credential); - RegisterCallbacks(); - } - - public void ClosePeerConnection() { - ClosePeerConnection(mPeerConnectionId); - mPeerConnectionId = -1; - } - - // Return -1 if Peerconnection is not available. - public int GetUniqueId() { - return mPeerConnectionId; - } - - public void AddStream(bool audioOnly) { - AddStream(mPeerConnectionId, audioOnly); - } - - public void AddDataChannel() { - AddDataChannel(mPeerConnectionId); - } - - public void CreateOffer() { - CreateOffer(mPeerConnectionId); - } - - public void CreateAnswer() { - CreateAnswer(mPeerConnectionId); - } - - public void SendDataViaDataChannel(string data) { - SendDataViaDataChannel(mPeerConnectionId, data); - } - - public void SetAudioControl(bool isMute, bool isRecord) { - SetAudioControl(mPeerConnectionId, isMute, isRecord); - } - - public void SetRemoteDescription(string type, string sdp) { - SetRemoteDescription(mPeerConnectionId, type, sdp); - } - - public void AddIceCandidate(string candidate, int sdpMlineindex, string sdpMid) { - AddIceCandidate(mPeerConnectionId, candidate, sdpMlineindex, sdpMid); - } - - private void RegisterCallbacks() { - localDataChannelReadyDelegate = new LocalDataChannelReadyInternalDelegate( - RaiseLocalDataChannelReady); - RegisterOnLocalDataChannelReady(mPeerConnectionId, localDataChannelReadyDelegate); - - dataFromDataChannelReadyDelegate = new DataFromDataChannelReadyInternalDelegate( - RaiseDataFromDataChannelReady); - RegisterOnDataFromDataChannelReady(mPeerConnectionId, dataFromDataChannelReadyDelegate); - - failureMessageDelegate = new FailureMessageInternalDelegate(RaiseFailureMessage); - RegisterOnFailure(mPeerConnectionId, failureMessageDelegate); - - audioBusReadyDelegate = new AudioBusReadyInternalDelegate(RaiseAudioBusReady); - RegisterOnAudioBusReady(mPeerConnectionId, audioBusReadyDelegate); - - localI420FrameReadyDelegate = new I420FrameReadyInternalDelegate( - RaiseLocalVideoFrameReady); - RegisterOnLocalI420FrameReady(mPeerConnectionId, localI420FrameReadyDelegate); - - remoteI420FrameReadyDelegate = new I420FrameReadyInternalDelegate( - RaiseRemoteVideoFrameReady); - RegisterOnRemoteI420FrameReady(mPeerConnectionId, remoteI420FrameReadyDelegate); - - localSdpReadytoSendDelegate = new LocalSdpReadytoSendInternalDelegate( - RaiseLocalSdpReadytoSend); - RegisterOnLocalSdpReadytoSend(mPeerConnectionId, localSdpReadytoSendDelegate); - - iceCandidateReadytoSendDelegate = - new IceCandidateReadytoSendInternalDelegate(RaiseIceCandidateReadytoSend); - RegisterOnIceCandidateReadytoSend( - mPeerConnectionId, iceCandidateReadytoSendDelegate); - } - - private void RaiseLocalDataChannelReady() { - if (OnLocalDataChannelReady != null) - OnLocalDataChannelReady(mPeerConnectionId); - } - - private void RaiseDataFromDataChannelReady(string data) { - if (OnDataFromDataChannelReady != null) - OnDataFromDataChannelReady(mPeerConnectionId, data); - } - - private void RaiseFailureMessage(string msg) { - if (OnFailureMessage != null) - OnFailureMessage(mPeerConnectionId, msg); - } - - private void RaiseAudioBusReady(IntPtr data, int bitsPerSample, - int sampleRate, int numberOfChannels, int numberOfFrames) { - if (OnAudioBusReady != null) - OnAudioBusReady(mPeerConnectionId, data, bitsPerSample, sampleRate, - numberOfChannels, numberOfFrames); - } - - private void RaiseLocalVideoFrameReady( - IntPtr dataY, IntPtr dataU, IntPtr dataV, - int strideY, int strideU, int strideV, - uint width, uint height) { - if (OnLocalVideoFrameReady != null) - OnLocalVideoFrameReady(mPeerConnectionId, dataY, dataU, dataV, strideY, strideU, strideV, - width, height); - } - - private void RaiseRemoteVideoFrameReady( - IntPtr dataY, IntPtr dataU, IntPtr dataV, - int strideY, int strideU, int strideV, - uint width, uint height) { - if (OnRemoteVideoFrameReady != null) - OnRemoteVideoFrameReady(mPeerConnectionId, dataY, dataU, dataV, strideY, strideU, strideV, - width, height); - } - - - private void RaiseLocalSdpReadytoSend(string type, string sdp) { - if (OnLocalSdpReadytoSend != null) - OnLocalSdpReadytoSend(mPeerConnectionId, type, sdp); - } - - private void RaiseIceCandidateReadytoSend(string candidate, int sdpMlineIndex, string sdpMid) { - if (OnIceCandidateReadytoSend != null) - OnIceCandidateReadytoSend(mPeerConnectionId, candidate, sdpMlineIndex, sdpMid); - } - - public void AddQueuedIceCandidate(List iceCandidateQueue) { - if (iceCandidateQueue != null) { - foreach (IceCandidate ic in iceCandidateQueue) { - AddIceCandidate(mPeerConnectionId, ic.Candidate, ic.SdpMlineIndex, ic.SdpMid); - } - } - } - - private LocalDataChannelReadyInternalDelegate localDataChannelReadyDelegate = null; - public event LocalDataChannelReadyDelegate OnLocalDataChannelReady; - - private DataFromDataChannelReadyInternalDelegate dataFromDataChannelReadyDelegate = null; - public event DataFromDataChannelReadyDelegate OnDataFromDataChannelReady; - - private FailureMessageInternalDelegate failureMessageDelegate = null; - public event FailureMessageDelegate OnFailureMessage; - - private AudioBusReadyInternalDelegate audioBusReadyDelegate = null; - public event AudioBusReadyDelegate OnAudioBusReady; - - private I420FrameReadyInternalDelegate localI420FrameReadyDelegate = null; - public event I420FrameReadyDelegate OnLocalVideoFrameReady; - - private I420FrameReadyInternalDelegate remoteI420FrameReadyDelegate = null; - public event I420FrameReadyDelegate OnRemoteVideoFrameReady; - - private LocalSdpReadytoSendInternalDelegate localSdpReadytoSendDelegate = null; - public event LocalSdpReadytoSendDelegate OnLocalSdpReadytoSend; - - private IceCandidateReadytoSendInternalDelegate iceCandidateReadytoSendDelegate = null; - public event IceCandidateReadytoSendDelegate OnIceCandidateReadytoSend; - - private int mPeerConnectionId = -1; - } -} diff --git a/examples/unityplugin/class_reference_holder.cc b/examples/unityplugin/class_reference_holder.cc deleted file mode 100644 index 00ca772e76..0000000000 --- a/examples/unityplugin/class_reference_holder.cc +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "examples/unityplugin/class_reference_holder.h" - -#include - -#include "sdk/android/src/jni/jni_helpers.h" - -namespace unity_plugin { - -// ClassReferenceHolder holds global reference to Java classes in app/webrtc. -class ClassReferenceHolder { - public: - explicit ClassReferenceHolder(JNIEnv* jni); - ~ClassReferenceHolder(); - - void FreeReferences(JNIEnv* jni); - jclass GetClass(const std::string& name); - - void LoadClass(JNIEnv* jni, const std::string& name); - - private: - std::map classes_; -}; - -// Allocated in LoadGlobalClassReferenceHolder(), -// freed in FreeGlobalClassReferenceHolder(). -static ClassReferenceHolder* g_class_reference_holder = nullptr; - -void LoadGlobalClassReferenceHolder() { - RTC_CHECK(g_class_reference_holder == nullptr); - g_class_reference_holder = new ClassReferenceHolder(webrtc::jni::GetEnv()); -} - -void FreeGlobalClassReferenceHolder() { - g_class_reference_holder->FreeReferences( - webrtc::jni::AttachCurrentThreadIfNeeded()); - delete g_class_reference_holder; - g_class_reference_holder = nullptr; -} - -ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) { - LoadClass(jni, "org/webrtc/UnityUtility"); -} - -ClassReferenceHolder::~ClassReferenceHolder() { - RTC_CHECK(classes_.empty()) << "Must call FreeReferences() before dtor!"; -} - -void ClassReferenceHolder::FreeReferences(JNIEnv* jni) { - for (std::map::const_iterator it = classes_.begin(); - it != classes_.end(); ++it) { - jni->DeleteGlobalRef(it->second); - } - classes_.clear(); -} - -jclass ClassReferenceHolder::GetClass(const std::string& name) { - std::map::iterator it = classes_.find(name); - RTC_CHECK(it != classes_.end()) << "Unexpected GetClass() call for: " << name; - return it->second; -} - -void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) { - jclass localRef = jni->FindClass(name.c_str()); - CHECK_EXCEPTION(jni) << "error during FindClass: " << name; - RTC_CHECK(localRef) << name; - jclass globalRef = reinterpret_cast(jni->NewGlobalRef(localRef)); - CHECK_EXCEPTION(jni) << "error during NewGlobalRef: " << name; - RTC_CHECK(globalRef) << name; - bool inserted = classes_.insert(std::make_pair(name, globalRef)).second; - RTC_CHECK(inserted) << "Duplicate class name: " << name; -} - -// Returns a global reference guaranteed to be valid for the lifetime of the -// process. -jclass FindClass(JNIEnv* jni, const char* name) { - return g_class_reference_holder->GetClass(name); -} - -} // namespace unity_plugin diff --git a/examples/unityplugin/class_reference_holder.h b/examples/unityplugin/class_reference_holder.h deleted file mode 100644 index 884d471ceb..0000000000 --- a/examples/unityplugin/class_reference_holder.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is a supplement of webrtc::jni::ClassReferenceHolder. -// The purpose of this ClassReferenceHolder is to load the example -// specific java class into JNI c++ side, so that our c++ code can -// call those java functions. - -#ifndef EXAMPLES_UNITYPLUGIN_CLASS_REFERENCE_HOLDER_H_ -#define EXAMPLES_UNITYPLUGIN_CLASS_REFERENCE_HOLDER_H_ - -#include - -#include -#include -#include - -namespace unity_plugin { - -// LoadGlobalClassReferenceHolder must be called in JNI_OnLoad. -void LoadGlobalClassReferenceHolder(); -// FreeGlobalClassReferenceHolder must be called in JNI_UnLoad. -void FreeGlobalClassReferenceHolder(); - -// Returns a global reference guaranteed to be valid for the lifetime of the -// process. -jclass FindClass(JNIEnv* jni, const char* name); - -} // namespace unity_plugin - -#endif // EXAMPLES_UNITYPLUGIN_CLASS_REFERENCE_HOLDER_H_ diff --git a/examples/unityplugin/java/src/org/webrtc/UnityUtility.java b/examples/unityplugin/java/src/org/webrtc/UnityUtility.java deleted file mode 100644 index bd8bbfa449..0000000000 --- a/examples/unityplugin/java/src/org/webrtc/UnityUtility.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -package org.webrtc; - -import android.content.Context; -import androidx.annotation.Nullable; - -public class UnityUtility { - private static final String VIDEO_CAPTURER_THREAD_NAME = "VideoCapturerThread"; - - public static SurfaceTextureHelper LoadSurfaceTextureHelper() { - final SurfaceTextureHelper surfaceTextureHelper = - SurfaceTextureHelper.create(VIDEO_CAPTURER_THREAD_NAME, null); - return surfaceTextureHelper; - } - - private static boolean useCamera2() { - return Camera2Enumerator.isSupported(ContextUtils.getApplicationContext()); - } - - private static @Nullable VideoCapturer createCameraCapturer(CameraEnumerator enumerator) { - final String[] deviceNames = enumerator.getDeviceNames(); - - for (String deviceName : deviceNames) { - if (enumerator.isFrontFacing(deviceName)) { - VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null); - - if (videoCapturer != null) { - return videoCapturer; - } - } - } - - return null; - } - - public static VideoCapturer LinkCamera( - long nativeTrackSource, SurfaceTextureHelper surfaceTextureHelper) { - VideoCapturer capturer = - createCameraCapturer(new Camera2Enumerator(ContextUtils.getApplicationContext())); - - VideoSource videoSource = new VideoSource(nativeTrackSource); - - capturer.initialize(surfaceTextureHelper, ContextUtils.getApplicationContext(), - videoSource.getCapturerObserver()); - - capturer.startCapture(720, 480, 30); - return capturer; - } - - public static void StopCamera(VideoCapturer camera) throws InterruptedException { - camera.stopCapture(); - camera.dispose(); - } - - public static void InitializePeerConncectionFactory(Context context) throws InterruptedException { - PeerConnectionFactory.initialize( - PeerConnectionFactory.InitializationOptions.builder(context).createInitializationOptions()); - } -} diff --git a/examples/unityplugin/jni_onload.cc b/examples/unityplugin/jni_onload.cc deleted file mode 100644 index b9c92d5ef4..0000000000 --- a/examples/unityplugin/jni_onload.cc +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#undef JNIEXPORT -#define JNIEXPORT __attribute__((visibility("default"))) - -#include "examples/unityplugin/class_reference_holder.h" -#include "rtc_base/ssl_adapter.h" -#include "sdk/android/native_api/jni/class_loader.h" -#include "sdk/android/src/jni/jni_helpers.h" - -namespace webrtc { -namespace jni { - -extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) { - jint ret = InitGlobalJniVariables(jvm); - RTC_DCHECK_GE(ret, 0); - if (ret < 0) - return -1; - - RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()"; - webrtc::InitClassLoader(GetEnv()); - unity_plugin::LoadGlobalClassReferenceHolder(); - - return ret; -} - -extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) { - unity_plugin::FreeGlobalClassReferenceHolder(); - RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()"; -} - -} // namespace jni -} // namespace webrtc diff --git a/examples/unityplugin/simple_peer_connection.cc b/examples/unityplugin/simple_peer_connection.cc deleted file mode 100644 index de49d5cd07..0000000000 --- a/examples/unityplugin/simple_peer_connection.cc +++ /dev/null @@ -1,586 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "examples/unityplugin/simple_peer_connection.h" - -#include - -#include "absl/memory/memory.h" -#include "api/audio_codecs/builtin_audio_decoder_factory.h" -#include "api/audio_codecs/builtin_audio_encoder_factory.h" -#include "api/create_peerconnection_factory.h" -#include "media/engine/internal_decoder_factory.h" -#include "media/engine/internal_encoder_factory.h" -#include "media/engine/multiplex_codec_factory.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "modules/video_capture/video_capture_factory.h" -#include "pc/video_track_source.h" -#include "test/vcm_capturer.h" - -#if defined(WEBRTC_ANDROID) -#include "examples/unityplugin/class_reference_holder.h" -#include "modules/utility/include/helpers_android.h" -#include "sdk/android/src/jni/android_video_track_source.h" -#include "sdk/android/src/jni/jni_helpers.h" -#endif - -// Names used for media stream ids. -const char kAudioLabel[] = "audio_label"; -const char kVideoLabel[] = "video_label"; -const char kStreamId[] = "stream_id"; - -namespace { -static int g_peer_count = 0; -static std::unique_ptr g_worker_thread; -static std::unique_ptr g_signaling_thread; -static rtc::scoped_refptr - g_peer_connection_factory; -#if defined(WEBRTC_ANDROID) -// Android case: the video track does not own the capturer, and it -// relies on the app to dispose the capturer when the peerconnection -// shuts down. -static jobject g_camera = nullptr; -#else -class CapturerTrackSource : public webrtc::VideoTrackSource { - public: - static rtc::scoped_refptr Create() { - const size_t kWidth = 640; - const size_t kHeight = 480; - const size_t kFps = 30; - const size_t kDeviceIndex = 0; - std::unique_ptr capturer = absl::WrapUnique( - webrtc::test::VcmCapturer::Create(kWidth, kHeight, kFps, kDeviceIndex)); - if (!capturer) { - return nullptr; - } - return rtc::make_ref_counted(std::move(capturer)); - } - - protected: - explicit CapturerTrackSource( - std::unique_ptr capturer) - : VideoTrackSource(/*remote=*/false), capturer_(std::move(capturer)) {} - - private: - rtc::VideoSourceInterface* source() override { - return capturer_.get(); - } - std::unique_ptr capturer_; -}; - -#endif - -std::string GetEnvVarOrDefault(const char* env_var_name, - const char* default_value) { - std::string value; - const char* env_var = getenv(env_var_name); - if (env_var) - value = env_var; - - if (value.empty()) - value = default_value; - - return value; -} - -std::string GetPeerConnectionString() { - return GetEnvVarOrDefault("WEBRTC_CONNECT", "stun:stun.l.google.com:19302"); -} - -class DummySetSessionDescriptionObserver - : public webrtc::SetSessionDescriptionObserver { - public: - static rtc::scoped_refptr Create() { - return rtc::make_ref_counted(); - } - virtual void OnSuccess() { RTC_LOG(LS_INFO) << __FUNCTION__; } - virtual void OnFailure(webrtc::RTCError error) { - RTC_LOG(LS_INFO) << __FUNCTION__ << " " << ToString(error.type()) << ": " - << error.message(); - } - - protected: - DummySetSessionDescriptionObserver() {} - ~DummySetSessionDescriptionObserver() {} -}; - -} // namespace - -bool SimplePeerConnection::InitializePeerConnection(const char** turn_urls, - const int no_of_urls, - const char* username, - const char* credential, - bool is_receiver) { - RTC_DCHECK(peer_connection_.get() == nullptr); - - if (g_peer_connection_factory == nullptr) { - g_worker_thread = rtc::Thread::Create(); - g_worker_thread->Start(); - g_signaling_thread = rtc::Thread::Create(); - g_signaling_thread->Start(); - - g_peer_connection_factory = webrtc::CreatePeerConnectionFactory( - g_worker_thread.get(), g_worker_thread.get(), g_signaling_thread.get(), - nullptr, webrtc::CreateBuiltinAudioEncoderFactory(), - webrtc::CreateBuiltinAudioDecoderFactory(), - std::unique_ptr( - new webrtc::MultiplexEncoderFactory( - std::make_unique())), - std::unique_ptr( - new webrtc::MultiplexDecoderFactory( - std::make_unique())), - nullptr, nullptr); - } - if (!g_peer_connection_factory.get()) { - DeletePeerConnection(); - return false; - } - - g_peer_count++; - if (!CreatePeerConnection(turn_urls, no_of_urls, username, credential)) { - DeletePeerConnection(); - return false; - } - - mandatory_receive_ = is_receiver; - return peer_connection_.get() != nullptr; -} - -bool SimplePeerConnection::CreatePeerConnection(const char** turn_urls, - const int no_of_urls, - const char* username, - const char* credential) { - RTC_DCHECK(g_peer_connection_factory.get() != nullptr); - RTC_DCHECK(peer_connection_.get() == nullptr); - - local_video_observer_.reset(new VideoObserver()); - remote_video_observer_.reset(new VideoObserver()); - - // Add the turn server. - if (turn_urls != nullptr) { - if (no_of_urls > 0) { - webrtc::PeerConnectionInterface::IceServer turn_server; - for (int i = 0; i < no_of_urls; i++) { - std::string url(turn_urls[i]); - if (url.length() > 0) - turn_server.urls.push_back(turn_urls[i]); - } - - std::string user_name(username); - if (user_name.length() > 0) - turn_server.username = username; - - std::string password(credential); - if (password.length() > 0) - turn_server.password = credential; - - config_.servers.push_back(turn_server); - } - } - - // Add the stun server. - webrtc::PeerConnectionInterface::IceServer stun_server; - stun_server.uri = GetPeerConnectionString(); - config_.servers.push_back(stun_server); - - auto result = g_peer_connection_factory->CreatePeerConnectionOrError( - config_, webrtc::PeerConnectionDependencies(this)); - if (!result.ok()) { - peer_connection_ = nullptr; - return false; - } - peer_connection_ = result.MoveValue(); - return true; -} - -void SimplePeerConnection::DeletePeerConnection() { - g_peer_count--; - -#if defined(WEBRTC_ANDROID) - if (g_camera) { - JNIEnv* env = webrtc::jni::GetEnv(); - jclass pc_factory_class = - unity_plugin::FindClass(env, "org/webrtc/UnityUtility"); - jmethodID stop_camera_method = webrtc::GetStaticMethodID( - env, pc_factory_class, "StopCamera", "(Lorg/webrtc/VideoCapturer;)V"); - - env->CallStaticVoidMethod(pc_factory_class, stop_camera_method, g_camera); - CHECK_EXCEPTION(env); - - g_camera = nullptr; - } -#endif - - CloseDataChannel(); - peer_connection_ = nullptr; - active_streams_.clear(); - - if (g_peer_count == 0) { - g_peer_connection_factory = nullptr; - g_signaling_thread.reset(); - g_worker_thread.reset(); - } -} - -bool SimplePeerConnection::CreateOffer() { - if (!peer_connection_.get()) - return false; - - webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options; - if (mandatory_receive_) { - options.offer_to_receive_audio = true; - options.offer_to_receive_video = true; - } - peer_connection_->CreateOffer(this, options); - return true; -} - -bool SimplePeerConnection::CreateAnswer() { - if (!peer_connection_.get()) - return false; - - webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options; - if (mandatory_receive_) { - options.offer_to_receive_audio = true; - options.offer_to_receive_video = true; - } - peer_connection_->CreateAnswer(this, options); - return true; -} - -void SimplePeerConnection::OnSuccess( - webrtc::SessionDescriptionInterface* desc) { - peer_connection_->SetLocalDescription( - DummySetSessionDescriptionObserver::Create().get(), desc); - - std::string sdp; - desc->ToString(&sdp); - - if (OnLocalSdpReady) - OnLocalSdpReady(desc->type().c_str(), sdp.c_str()); -} - -void SimplePeerConnection::OnFailure(webrtc::RTCError error) { - RTC_LOG(LS_ERROR) << ToString(error.type()) << ": " << error.message(); - - // TODO(hta): include error.type in the message - if (OnFailureMessage) - OnFailureMessage(error.message()); -} - -void SimplePeerConnection::OnIceCandidate( - const webrtc::IceCandidateInterface* candidate) { - RTC_LOG(LS_INFO) << __FUNCTION__ << " " << candidate->sdp_mline_index(); - - std::string sdp; - if (!candidate->ToString(&sdp)) { - RTC_LOG(LS_ERROR) << "Failed to serialize candidate"; - return; - } - - if (OnIceCandidateReady) - OnIceCandidateReady(sdp.c_str(), candidate->sdp_mline_index(), - candidate->sdp_mid().c_str()); -} - -void SimplePeerConnection::RegisterOnLocalI420FrameReady( - I420FRAMEREADY_CALLBACK callback) { - if (local_video_observer_) - local_video_observer_->SetVideoCallback(callback); -} - -void SimplePeerConnection::RegisterOnRemoteI420FrameReady( - I420FRAMEREADY_CALLBACK callback) { - if (remote_video_observer_) - remote_video_observer_->SetVideoCallback(callback); -} - -void SimplePeerConnection::RegisterOnLocalDataChannelReady( - LOCALDATACHANNELREADY_CALLBACK callback) { - OnLocalDataChannelReady = callback; -} - -void SimplePeerConnection::RegisterOnDataFromDataChannelReady( - DATAFROMEDATECHANNELREADY_CALLBACK callback) { - OnDataFromDataChannelReady = callback; -} - -void SimplePeerConnection::RegisterOnFailure(FAILURE_CALLBACK callback) { - OnFailureMessage = callback; -} - -void SimplePeerConnection::RegisterOnAudioBusReady( - AUDIOBUSREADY_CALLBACK callback) { - OnAudioReady = callback; -} - -void SimplePeerConnection::RegisterOnLocalSdpReadytoSend( - LOCALSDPREADYTOSEND_CALLBACK callback) { - OnLocalSdpReady = callback; -} - -void SimplePeerConnection::RegisterOnIceCandidateReadytoSend( - ICECANDIDATEREADYTOSEND_CALLBACK callback) { - OnIceCandidateReady = callback; -} - -bool SimplePeerConnection::SetRemoteDescription(const char* type, - const char* sdp) { - if (!peer_connection_) - return false; - - std::string remote_desc(sdp); - std::string desc_type(type); - webrtc::SdpParseError error; - webrtc::SessionDescriptionInterface* session_description( - webrtc::CreateSessionDescription(desc_type, remote_desc, &error)); - if (!session_description) { - RTC_LOG(LS_WARNING) << "Can't parse received session description message. " - "SdpParseError was: " - << error.description; - return false; - } - RTC_LOG(LS_INFO) << " Received session description :" << remote_desc; - peer_connection_->SetRemoteDescription( - DummySetSessionDescriptionObserver::Create().get(), session_description); - - return true; -} - -bool SimplePeerConnection::AddIceCandidate(const char* candidate, - const int sdp_mlineindex, - const char* sdp_mid) { - if (!peer_connection_) - return false; - - webrtc::SdpParseError error; - std::unique_ptr ice_candidate( - webrtc::CreateIceCandidate(sdp_mid, sdp_mlineindex, candidate, &error)); - if (!ice_candidate.get()) { - RTC_LOG(LS_WARNING) << "Can't parse received candidate message. " - "SdpParseError was: " - << error.description; - return false; - } - if (!peer_connection_->AddIceCandidate(ice_candidate.get())) { - RTC_LOG(LS_WARNING) << "Failed to apply the received candidate"; - return false; - } - RTC_LOG(LS_INFO) << " Received candidate :" << candidate; - return true; -} - -void SimplePeerConnection::SetAudioControl(bool is_mute, bool is_record) { - is_mute_audio_ = is_mute; - is_record_audio_ = is_record; - - SetAudioControl(); -} - -void SimplePeerConnection::SetAudioControl() { - if (!remote_stream_) - return; - webrtc::AudioTrackVector tracks = remote_stream_->GetAudioTracks(); - if (tracks.empty()) - return; - - rtc::scoped_refptr& audio_track = tracks[0]; - if (is_record_audio_) - audio_track->AddSink(this); - else - audio_track->RemoveSink(this); - - for (auto& track : tracks) { - if (is_mute_audio_) - track->set_enabled(false); - else - track->set_enabled(true); - } -} - -void SimplePeerConnection::OnAddStream( - rtc::scoped_refptr stream) { - RTC_LOG(LS_INFO) << __FUNCTION__ << " " << stream->id(); - remote_stream_ = stream; - if (remote_video_observer_ && !remote_stream_->GetVideoTracks().empty()) { - remote_stream_->GetVideoTracks()[0]->AddOrUpdateSink( - remote_video_observer_.get(), rtc::VideoSinkWants()); - } - SetAudioControl(); -} - -void SimplePeerConnection::AddStreams(bool audio_only) { - if (active_streams_.find(kStreamId) != active_streams_.end()) - return; // Already added. - - rtc::scoped_refptr stream = - g_peer_connection_factory->CreateLocalMediaStream(kStreamId); - - rtc::scoped_refptr audio_track( - g_peer_connection_factory->CreateAudioTrack( - kAudioLabel, - g_peer_connection_factory->CreateAudioSource(cricket::AudioOptions()) - .get())); - stream->AddTrack(audio_track); - - if (!audio_only) { -#if defined(WEBRTC_ANDROID) - JNIEnv* env = webrtc::jni::GetEnv(); - jclass pc_factory_class = - unity_plugin::FindClass(env, "org/webrtc/UnityUtility"); - jmethodID load_texture_helper_method = webrtc::GetStaticMethodID( - env, pc_factory_class, "LoadSurfaceTextureHelper", - "()Lorg/webrtc/SurfaceTextureHelper;"); - jobject texture_helper = env->CallStaticObjectMethod( - pc_factory_class, load_texture_helper_method); - CHECK_EXCEPTION(env); - RTC_DCHECK(texture_helper != nullptr) - << "Cannot get the Surface Texture Helper."; - - auto source = rtc::make_ref_counted( - g_signaling_thread.get(), env, /*is_screencast=*/false, - /*align_timestamps=*/true); - - // link with VideoCapturer (Camera); - jmethodID link_camera_method = webrtc::GetStaticMethodID( - env, pc_factory_class, "LinkCamera", - "(JLorg/webrtc/SurfaceTextureHelper;)Lorg/webrtc/VideoCapturer;"); - jobject camera_tmp = - env->CallStaticObjectMethod(pc_factory_class, link_camera_method, - (jlong)source.get(), texture_helper); - CHECK_EXCEPTION(env); - g_camera = (jobject)env->NewGlobalRef(camera_tmp); - - rtc::scoped_refptr video_track( - g_peer_connection_factory->CreateVideoTrack(source, kVideoLabel)); - stream->AddTrack(video_track); -#else - rtc::scoped_refptr video_device = - CapturerTrackSource::Create(); - if (video_device) { - rtc::scoped_refptr video_track( - g_peer_connection_factory->CreateVideoTrack(video_device, - kVideoLabel)); - - stream->AddTrack(video_track); - } -#endif - if (local_video_observer_ && !stream->GetVideoTracks().empty()) { - stream->GetVideoTracks()[0]->AddOrUpdateSink(local_video_observer_.get(), - rtc::VideoSinkWants()); - } - } - - if (!peer_connection_->AddStream(stream.get())) { - RTC_LOG(LS_ERROR) << "Adding stream to PeerConnection failed"; - } - - typedef std::pair> - MediaStreamPair; - active_streams_.insert(MediaStreamPair(stream->id(), stream)); -} - -bool SimplePeerConnection::CreateDataChannel() { - struct webrtc::DataChannelInit init; - init.ordered = true; - init.reliable = true; - auto result = peer_connection_->CreateDataChannelOrError("Hello", &init); - if (result.ok()) { - data_channel_ = result.MoveValue(); - data_channel_->RegisterObserver(this); - RTC_LOG(LS_INFO) << "Succeeds to create data channel"; - return true; - } else { - RTC_LOG(LS_INFO) << "Fails to create data channel"; - return false; - } -} - -void SimplePeerConnection::CloseDataChannel() { - if (data_channel_.get()) { - data_channel_->UnregisterObserver(); - data_channel_->Close(); - } - data_channel_ = nullptr; -} - -bool SimplePeerConnection::SendDataViaDataChannel(const std::string& data) { - if (!data_channel_.get()) { - RTC_LOG(LS_INFO) << "Data channel is not established"; - return false; - } - webrtc::DataBuffer buffer(data); - data_channel_->Send(buffer); - return true; -} - -// Peerconnection observer -void SimplePeerConnection::OnDataChannel( - rtc::scoped_refptr channel) { - channel->RegisterObserver(this); -} - -void SimplePeerConnection::OnStateChange() { - if (data_channel_) { - webrtc::DataChannelInterface::DataState state = data_channel_->state(); - if (state == webrtc::DataChannelInterface::kOpen) { - if (OnLocalDataChannelReady) - OnLocalDataChannelReady(); - RTC_LOG(LS_INFO) << "Data channel is open"; - } - } -} - -// A data buffer was successfully received. -void SimplePeerConnection::OnMessage(const webrtc::DataBuffer& buffer) { - size_t size = buffer.data.size(); - char* msg = new char[size + 1]; - memcpy(msg, buffer.data.data(), size); - msg[size] = 0; - if (OnDataFromDataChannelReady) - OnDataFromDataChannelReady(msg); - delete[] msg; -} - -// AudioTrackSinkInterface implementation. -void SimplePeerConnection::OnData(const void* audio_data, - int bits_per_sample, - int sample_rate, - size_t number_of_channels, - size_t number_of_frames) { - if (OnAudioReady) - OnAudioReady(audio_data, bits_per_sample, sample_rate, - static_cast(number_of_channels), - static_cast(number_of_frames)); -} - -std::vector SimplePeerConnection::GetRemoteAudioTrackSsrcs() { - std::vector> receivers = - peer_connection_->GetReceivers(); - - std::vector ssrcs; - for (const auto& receiver : receivers) { - if (receiver->media_type() != cricket::MEDIA_TYPE_AUDIO) - continue; - - std::vector params = - receiver->GetParameters().encodings; - - for (const auto& param : params) { - uint32_t ssrc = param.ssrc.value_or(0); - if (ssrc > 0) - ssrcs.push_back(ssrc); - } - } - - return ssrcs; -} diff --git a/examples/unityplugin/simple_peer_connection.h b/examples/unityplugin/simple_peer_connection.h deleted file mode 100644 index de652ef118..0000000000 --- a/examples/unityplugin/simple_peer_connection.h +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef EXAMPLES_UNITYPLUGIN_SIMPLE_PEER_CONNECTION_H_ -#define EXAMPLES_UNITYPLUGIN_SIMPLE_PEER_CONNECTION_H_ - -#include -#include -#include -#include - -#include "api/data_channel_interface.h" -#include "api/media_stream_interface.h" -#include "api/peer_connection_interface.h" -#include "examples/unityplugin/unity_plugin_apis.h" -#include "examples/unityplugin/video_observer.h" - -class SimplePeerConnection : public webrtc::PeerConnectionObserver, - public webrtc::CreateSessionDescriptionObserver, - public webrtc::DataChannelObserver, - public webrtc::AudioTrackSinkInterface { - public: - SimplePeerConnection() {} - ~SimplePeerConnection() {} - - bool InitializePeerConnection(const char** turn_urls, - int no_of_urls, - const char* username, - const char* credential, - bool is_receiver); - void DeletePeerConnection(); - void AddStreams(bool audio_only); - bool CreateDataChannel(); - bool CreateOffer(); - bool CreateAnswer(); - bool SendDataViaDataChannel(const std::string& data); - void SetAudioControl(bool is_mute, bool is_record); - - // Register callback functions. - void RegisterOnLocalI420FrameReady(I420FRAMEREADY_CALLBACK callback); - void RegisterOnRemoteI420FrameReady(I420FRAMEREADY_CALLBACK callback); - void RegisterOnLocalDataChannelReady(LOCALDATACHANNELREADY_CALLBACK callback); - void RegisterOnDataFromDataChannelReady( - DATAFROMEDATECHANNELREADY_CALLBACK callback); - void RegisterOnFailure(FAILURE_CALLBACK callback); - void RegisterOnAudioBusReady(AUDIOBUSREADY_CALLBACK callback); - void RegisterOnLocalSdpReadytoSend(LOCALSDPREADYTOSEND_CALLBACK callback); - void RegisterOnIceCandidateReadytoSend( - ICECANDIDATEREADYTOSEND_CALLBACK callback); - bool SetRemoteDescription(const char* type, const char* sdp); - bool AddIceCandidate(const char* sdp, - int sdp_mlineindex, - const char* sdp_mid); - - protected: - // create a peerconneciton and add the turn servers info to the configuration. - bool CreatePeerConnection(const char** turn_urls, - int no_of_urls, - const char* username, - const char* credential); - void CloseDataChannel(); - void SetAudioControl(); - - // PeerConnectionObserver implementation. - void OnSignalingChange( - webrtc::PeerConnectionInterface::SignalingState new_state) override {} - void OnAddStream( - rtc::scoped_refptr stream) override; - void OnRemoveStream( - rtc::scoped_refptr stream) override {} - void OnDataChannel( - rtc::scoped_refptr channel) override; - void OnRenegotiationNeeded() override {} - void OnIceConnectionChange( - webrtc::PeerConnectionInterface::IceConnectionState new_state) override {} - void OnIceGatheringChange( - webrtc::PeerConnectionInterface::IceGatheringState new_state) override {} - void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override; - void OnIceConnectionReceivingChange(bool receiving) override {} - - // CreateSessionDescriptionObserver implementation. - void OnSuccess(webrtc::SessionDescriptionInterface* desc) override; - void OnFailure(webrtc::RTCError error) override; - - // DataChannelObserver implementation. - void OnStateChange() override; - void OnMessage(const webrtc::DataBuffer& buffer) override; - - // AudioTrackSinkInterface implementation. - void OnData(const void* audio_data, - int bits_per_sample, - int sample_rate, - size_t number_of_channels, - size_t number_of_frames) override; - - // Get remote audio tracks ssrcs. - std::vector GetRemoteAudioTrackSsrcs(); - - private: - rtc::scoped_refptr peer_connection_; - rtc::scoped_refptr data_channel_; - std::map > - active_streams_; - - std::unique_ptr local_video_observer_; - std::unique_ptr remote_video_observer_; - - rtc::scoped_refptr remote_stream_ = nullptr; - webrtc::PeerConnectionInterface::RTCConfiguration config_; - - LOCALDATACHANNELREADY_CALLBACK OnLocalDataChannelReady = nullptr; - DATAFROMEDATECHANNELREADY_CALLBACK OnDataFromDataChannelReady = nullptr; - FAILURE_CALLBACK OnFailureMessage = nullptr; - AUDIOBUSREADY_CALLBACK OnAudioReady = nullptr; - - LOCALSDPREADYTOSEND_CALLBACK OnLocalSdpReady = nullptr; - ICECANDIDATEREADYTOSEND_CALLBACK OnIceCandidateReady = nullptr; - - bool is_mute_audio_ = false; - bool is_record_audio_ = false; - bool mandatory_receive_ = false; - - // disallow copy-and-assign - SimplePeerConnection(const SimplePeerConnection&) = delete; - SimplePeerConnection& operator=(const SimplePeerConnection&) = delete; -}; - -#endif // EXAMPLES_UNITYPLUGIN_SIMPLE_PEER_CONNECTION_H_ diff --git a/examples/unityplugin/unity_plugin_apis.cc b/examples/unityplugin/unity_plugin_apis.cc deleted file mode 100644 index 6e34d7e1e0..0000000000 --- a/examples/unityplugin/unity_plugin_apis.cc +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "examples/unityplugin/unity_plugin_apis.h" - -#include -#include - -#include "examples/unityplugin/simple_peer_connection.h" - -namespace { -static int g_peer_connection_id = 1; -static std::map> - g_peer_connection_map; -} // namespace - -int CreatePeerConnection(const char** turn_urls, - const int no_of_urls, - const char* username, - const char* credential, - bool mandatory_receive_video) { - g_peer_connection_map[g_peer_connection_id] = - rtc::make_ref_counted(); - - if (!g_peer_connection_map[g_peer_connection_id]->InitializePeerConnection( - turn_urls, no_of_urls, username, credential, mandatory_receive_video)) - return -1; - - return g_peer_connection_id++; -} - -bool ClosePeerConnection(int peer_connection_id) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - g_peer_connection_map[peer_connection_id]->DeletePeerConnection(); - g_peer_connection_map.erase(peer_connection_id); - return true; -} - -bool AddStream(int peer_connection_id, bool audio_only) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - g_peer_connection_map[peer_connection_id]->AddStreams(audio_only); - return true; -} - -bool AddDataChannel(int peer_connection_id) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - return g_peer_connection_map[peer_connection_id]->CreateDataChannel(); -} - -bool CreateOffer(int peer_connection_id) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - return g_peer_connection_map[peer_connection_id]->CreateOffer(); -} - -bool CreateAnswer(int peer_connection_id) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - return g_peer_connection_map[peer_connection_id]->CreateAnswer(); -} - -bool SendDataViaDataChannel(int peer_connection_id, const char* data) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - std::string s(data); - g_peer_connection_map[peer_connection_id]->SendDataViaDataChannel(s); - - return true; -} - -bool SetAudioControl(int peer_connection_id, bool is_mute, bool is_record) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - g_peer_connection_map[peer_connection_id]->SetAudioControl(is_mute, - is_record); - return true; -} - -bool SetRemoteDescription(int peer_connection_id, - const char* type, - const char* sdp) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - return g_peer_connection_map[peer_connection_id]->SetRemoteDescription(type, - sdp); -} - -bool AddIceCandidate(const int peer_connection_id, - const char* candidate, - const int sdp_mlineindex, - const char* sdp_mid) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - return g_peer_connection_map[peer_connection_id]->AddIceCandidate( - candidate, sdp_mlineindex, sdp_mid); -} - -// Register callback functions. -bool RegisterOnLocalI420FrameReady(int peer_connection_id, - I420FRAMEREADY_CALLBACK callback) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - g_peer_connection_map[peer_connection_id]->RegisterOnLocalI420FrameReady( - callback); - return true; -} - -bool RegisterOnRemoteI420FrameReady(int peer_connection_id, - I420FRAMEREADY_CALLBACK callback) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - g_peer_connection_map[peer_connection_id]->RegisterOnRemoteI420FrameReady( - callback); - return true; -} - -bool RegisterOnLocalDataChannelReady(int peer_connection_id, - LOCALDATACHANNELREADY_CALLBACK callback) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - g_peer_connection_map[peer_connection_id]->RegisterOnLocalDataChannelReady( - callback); - return true; -} - -bool RegisterOnDataFromDataChannelReady( - int peer_connection_id, - DATAFROMEDATECHANNELREADY_CALLBACK callback) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - g_peer_connection_map[peer_connection_id]->RegisterOnDataFromDataChannelReady( - callback); - return true; -} - -bool RegisterOnFailure(int peer_connection_id, FAILURE_CALLBACK callback) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - g_peer_connection_map[peer_connection_id]->RegisterOnFailure(callback); - return true; -} - -bool RegisterOnAudioBusReady(int peer_connection_id, - AUDIOBUSREADY_CALLBACK callback) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - g_peer_connection_map[peer_connection_id]->RegisterOnAudioBusReady(callback); - return true; -} - -// Singnaling channel related functions. -bool RegisterOnLocalSdpReadytoSend(int peer_connection_id, - LOCALSDPREADYTOSEND_CALLBACK callback) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - g_peer_connection_map[peer_connection_id]->RegisterOnLocalSdpReadytoSend( - callback); - return true; -} - -bool RegisterOnIceCandidateReadytoSend( - int peer_connection_id, - ICECANDIDATEREADYTOSEND_CALLBACK callback) { - if (!g_peer_connection_map.count(peer_connection_id)) - return false; - - g_peer_connection_map[peer_connection_id]->RegisterOnIceCandidateReadytoSend( - callback); - return true; -} diff --git a/examples/unityplugin/unity_plugin_apis.h b/examples/unityplugin/unity_plugin_apis.h deleted file mode 100644 index 9790dc57b9..0000000000 --- a/examples/unityplugin/unity_plugin_apis.h +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This file provides an example of unity native plugin APIs. - -#ifndef EXAMPLES_UNITYPLUGIN_UNITY_PLUGIN_APIS_H_ -#define EXAMPLES_UNITYPLUGIN_UNITY_PLUGIN_APIS_H_ - -#include - -// Definitions of callback functions. -typedef void (*I420FRAMEREADY_CALLBACK)(const uint8_t* data_y, - const uint8_t* data_u, - const uint8_t* data_v, - const uint8_t* data_a, - int stride_y, - int stride_u, - int stride_v, - int stride_a, - uint32_t width, - uint32_t height); -typedef void (*LOCALDATACHANNELREADY_CALLBACK)(); -typedef void (*DATAFROMEDATECHANNELREADY_CALLBACK)(const char* msg); -typedef void (*FAILURE_CALLBACK)(const char* msg); -typedef void (*LOCALSDPREADYTOSEND_CALLBACK)(const char* type, const char* sdp); -typedef void (*ICECANDIDATEREADYTOSEND_CALLBACK)(const char* candidate, - int sdp_mline_index, - const char* sdp_mid); -typedef void (*AUDIOBUSREADY_CALLBACK)(const void* audio_data, - int bits_per_sample, - int sample_rate, - int number_of_channels, - int number_of_frames); - -#if defined(WEBRTC_WIN) -#define WEBRTC_PLUGIN_API __declspec(dllexport) -#elif defined(WEBRTC_ANDROID) -#define WEBRTC_PLUGIN_API __attribute__((visibility("default"))) -#endif -extern "C" { -// Create a peerconnection and return a unique peer connection id. -WEBRTC_PLUGIN_API int CreatePeerConnection(const char** turn_urls, - int no_of_urls, - const char* username, - const char* credential, - bool mandatory_receive_video); -// Close a peerconnection. -WEBRTC_PLUGIN_API bool ClosePeerConnection(int peer_connection_id); -// Add a audio stream. If audio_only is true, the stream only has an audio -// track and no video track. -WEBRTC_PLUGIN_API bool AddStream(int peer_connection_id, bool audio_only); -// Add a data channel to peer connection. -WEBRTC_PLUGIN_API bool AddDataChannel(int peer_connection_id); -// Create a peer connection offer. -WEBRTC_PLUGIN_API bool CreateOffer(int peer_connection_id); -// Create a peer connection answer. -WEBRTC_PLUGIN_API bool CreateAnswer(int peer_connection_id); -// Send data through data channel. -WEBRTC_PLUGIN_API bool SendDataViaDataChannel(int peer_connection_id, - const char* data); -// Set audio control. If is_mute=true, no audio will playout. If is_record=true, -// AUDIOBUSREADY_CALLBACK will be called every 10 ms. -WEBRTC_PLUGIN_API bool SetAudioControl(int peer_connection_id, - bool is_mute, - bool is_record); -// Set remote sdp. -WEBRTC_PLUGIN_API bool SetRemoteDescription(int peer_connection_id, - const char* type, - const char* sdp); -// Add ice candidate. -WEBRTC_PLUGIN_API bool AddIceCandidate(int peer_connection_id, - const char* candidate, - int sdp_mlineindex, - const char* sdp_mid); - -// Register callback functions. -WEBRTC_PLUGIN_API bool RegisterOnLocalI420FrameReady( - int peer_connection_id, - I420FRAMEREADY_CALLBACK callback); -WEBRTC_PLUGIN_API bool RegisterOnRemoteI420FrameReady( - int peer_connection_id, - I420FRAMEREADY_CALLBACK callback); -WEBRTC_PLUGIN_API bool RegisterOnLocalDataChannelReady( - int peer_connection_id, - LOCALDATACHANNELREADY_CALLBACK callback); -WEBRTC_PLUGIN_API bool RegisterOnDataFromDataChannelReady( - int peer_connection_id, - DATAFROMEDATECHANNELREADY_CALLBACK callback); -WEBRTC_PLUGIN_API bool RegisterOnFailure(int peer_connection_id, - FAILURE_CALLBACK callback); -WEBRTC_PLUGIN_API bool RegisterOnAudioBusReady(int peer_connection_id, - AUDIOBUSREADY_CALLBACK callback); -WEBRTC_PLUGIN_API bool RegisterOnLocalSdpReadytoSend( - int peer_connection_id, - LOCALSDPREADYTOSEND_CALLBACK callback); -WEBRTC_PLUGIN_API bool RegisterOnIceCandidateReadytoSend( - int peer_connection_id, - ICECANDIDATEREADYTOSEND_CALLBACK callback); -} - -#endif // EXAMPLES_UNITYPLUGIN_UNITY_PLUGIN_APIS_H_ diff --git a/examples/unityplugin/video_observer.cc b/examples/unityplugin/video_observer.cc deleted file mode 100644 index 7e33b08e27..0000000000 --- a/examples/unityplugin/video_observer.cc +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "examples/unityplugin/video_observer.h" - -void VideoObserver::SetVideoCallback(I420FRAMEREADY_CALLBACK callback) { - std::lock_guard lock(mutex); - OnI420FrameReady = callback; -} - -void VideoObserver::OnFrame(const webrtc::VideoFrame& frame) { - std::unique_lock lock(mutex); - if (!OnI420FrameReady) - return; - - rtc::scoped_refptr buffer( - frame.video_frame_buffer()); - - if (buffer->type() != webrtc::VideoFrameBuffer::Type::kI420A) { - rtc::scoped_refptr i420_buffer = - buffer->ToI420(); - OnI420FrameReady(i420_buffer->DataY(), i420_buffer->DataU(), - i420_buffer->DataV(), nullptr, i420_buffer->StrideY(), - i420_buffer->StrideU(), i420_buffer->StrideV(), 0, - frame.width(), frame.height()); - - } else { - // The buffer has alpha channel. - const webrtc::I420ABufferInterface* i420a_buffer = buffer->GetI420A(); - - OnI420FrameReady(i420a_buffer->DataY(), i420a_buffer->DataU(), - i420a_buffer->DataV(), i420a_buffer->DataA(), - i420a_buffer->StrideY(), i420a_buffer->StrideU(), - i420a_buffer->StrideV(), i420a_buffer->StrideA(), - frame.width(), frame.height()); - } -} diff --git a/examples/unityplugin/video_observer.h b/examples/unityplugin/video_observer.h deleted file mode 100644 index 01ccd2191a..0000000000 --- a/examples/unityplugin/video_observer.h +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef EXAMPLES_UNITYPLUGIN_VIDEO_OBSERVER_H_ -#define EXAMPLES_UNITYPLUGIN_VIDEO_OBSERVER_H_ - -#include - -#include "api/media_stream_interface.h" -#include "api/video/video_sink_interface.h" -#include "examples/unityplugin/unity_plugin_apis.h" - -class VideoObserver : public rtc::VideoSinkInterface { - public: - VideoObserver() {} - ~VideoObserver() {} - void SetVideoCallback(I420FRAMEREADY_CALLBACK callback); - - protected: - // VideoSinkInterface implementation - void OnFrame(const webrtc::VideoFrame& frame) override; - - private: - I420FRAMEREADY_CALLBACK OnI420FrameReady = nullptr; - std::mutex mutex; -}; - -#endif // EXAMPLES_UNITYPLUGIN_VIDEO_OBSERVER_H_ diff --git a/experiments/OWNERS b/experiments/OWNERS new file mode 100644 index 0000000000..116a65328a --- /dev/null +++ b/experiments/OWNERS @@ -0,0 +1,2 @@ +# Allow anyone to update the field trial experiment list. +per-file field_trials.py=* diff --git a/experiments/field_trials.py b/experiments/field_trials.py index c9a73ce1f3..b23b72457b 100755 --- a/experiments/field_trials.py +++ b/experiments/field_trials.py @@ -8,107 +8,1122 @@ # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. +import datetime +from datetime import date +import hashlib import sys -from typing import Set +from typing import FrozenSet, List, Set import argparse import dataclasses -# TODO(bugs.webrtc.org/14154): End date and bug should also be stored. @dataclasses.dataclass(frozen=True) class FieldTrial: - """Representation of all attributes associated with a field trial. + """Representation of all attributes associated with a field trial. - Attributes: - key: Field trial key. - """ - key: str + Attributes: + key: Field trial key. + bug_id: Associated open bug containing more context. + end_date: Date when the field trial expires and must be deleted. + """ + key: str + bug_id: int + end_date: date + + def bug_url(self) -> str: + if self.bug_id <= 0: + return '' + return f'https://issues.webrtc.org/issues/{self.bug_id}' # As per the policy in `g3doc/field-trials.md`, all field trials should be -# registered in the container below. Please keep the keys sorted. -REGISTERED_FIELD_TRIALS: Set[FieldTrial] = { - FieldTrial(''), # TODO(bugs.webrtc.org/14154): Populate -} - - -def RegistryHeader(field_trials: Set[FieldTrial] = None) -> str: - """Generates a C++ header with all field trial keys. - - Args: - field_trials: Field trials to include in the header. - - Returns: - String representation of a C++ header file containing all field trial keys. - - >>> trials = {FieldTrial('B'), FieldTrial('A'), FieldTrial('B')} - >>> print(RegistryHeader(trials)) - // This file was automatically generated. Do not edit. - - #ifndef GEN_REGISTERED_FIELD_TRIALS_H_ - #define GEN_REGISTERED_FIELD_TRIALS_H_ - - #include "absl/strings/string_view.h" - - namespace webrtc { - - inline constexpr absl::string_view kRegisteredFieldTrials[] = { - "A", - "B", - }; - - } // namespace webrtc - - #endif // GEN_REGISTERED_FIELD_TRIALS_H_ - - """ - if not field_trials: - field_trials = REGISTERED_FIELD_TRIALS - registered_keys = [f.key for f in field_trials] - keys = '\n'.join(f' "{k}",' for k in sorted(registered_keys)) - return ('// This file was automatically generated. Do not edit.\n' - '\n' - '#ifndef GEN_REGISTERED_FIELD_TRIALS_H_\n' - '#define GEN_REGISTERED_FIELD_TRIALS_H_\n' - '\n' - '#include "absl/strings/string_view.h"\n' - '\n' - 'namespace webrtc {\n' - '\n' - 'inline constexpr absl::string_view kRegisteredFieldTrials[] = {\n' - f'{keys}\n' - '};\n' - '\n' - '} // namespace webrtc\n' - '\n' - '#endif // GEN_REGISTERED_FIELD_TRIALS_H_\n') - - -def CmdHeader(args: argparse.Namespace) -> None: - args.output.write(RegistryHeader()) +# registered in the container below. +ACTIVE_FIELD_TRIALS: FrozenSet[FieldTrial] = frozenset([ + # keep-sorted start + FieldTrial('WebRTC-Aec3BufferingMaxAllowedExcessRenderBlocksOverride', + 337900458, + date(2024, 9, 1)), + FieldTrial('WebRTC-Audio-GainController2', + 42232605, + date(2024, 4, 1)), + FieldTrial('WebRTC-Audio-OpusDecodeStereoByDefault', + 379996136, + date(2025, 11, 15)), + FieldTrial('WebRTC-Audio-OpusGeneratePlc', + 42223518, + date(2025, 4, 1)), + FieldTrial('WebRTC-Audio-PriorityBitrate', + 42226125, + date(2024, 4, 1)), + FieldTrial('WebRTC-AV1-OverridePriorityBitrate', + 42226119, + date(2024, 4, 1)), + FieldTrial('WebRTC-Av1-GetEncoderInfoOverride', + 42225234, + date(2024, 4, 1)), + FieldTrial('WebRTC-BitrateAdjusterUseNewfangledHeadroomAdjustment', + 349561566, + date(2025, 8, 26)), + FieldTrial('WebRTC-Bwe-LimitPacingFactorByUpperLinkCapacityEstimate', + 42220543, + date(2025, 1, 1)), + FieldTrial('WebRTC-Bwe-ResetOnAdapterIdChange', + 42225231, + date(2025, 5, 30)), + FieldTrial('WebRTC-DataChannelMessageInterleaving', + 41481008, + date(2024, 10, 1)), + FieldTrial('WebRTC-Dav1dDecoder-CropToRenderResolution', + 405341160, + date(2026, 3, 21)), + FieldTrial('WebRTC-DisableRtxRateLimiter', + 42225500, + date(2024, 4, 1)), + FieldTrial('WebRTC-ElasticBitrateAllocation', + 350555527, + date(2025, 3, 1)), + FieldTrial('WebRTC-EncoderDataDumpDirectory', + 296242528, + date(2024, 4, 1)), + FieldTrial('WebRTC-ForceDtls13', + 383141571, + date(2024,9,1)), + FieldTrial('WebRTC-EnableDtlsPqc', + 404763475, + date(2026,6,1)), + FieldTrial('WebRTC-FrameCadenceAdapter-UseVideoFrameTimestamp', + 42226256, + date(2024, 10, 1)), + FieldTrial('WebRTC-IPv6NetworkResolutionFixes', + 42224598, + date(2024, 4, 1)), + FieldTrial('WebRTC-IncomingTimestampOnMarkerBitOnly', + 42224805, + date(2024, 4, 1)), + FieldTrial('WebRTC-IncreaseIceCandidatePriorityHostSrflx', + 42225331, + date(2024, 4, 1)), + FieldTrial('WebRTC-JitterEstimatorConfig', + 42224404, + date(2024, 4, 1)), + FieldTrial('WebRTC-LibaomAv1Encoder-PostEncodeFrameDrop', + 351644568, + date(2026, 1, 30)), + FieldTrial('WebRTC-LibvpxVp8Encoder-AndroidSpecificThreadingSettings', + 42226191, + date(2024, 9, 1)), + FieldTrial('WebRTC-MixedCodecSimulcast', + 362277533, + date(2025, 9, 1)), + FieldTrial('WebRTC-NoSdpMangleUfrag', + 375571816, + date(2025, 10, 11)), + FieldTrial('WebRTC-NoSdpMangleUfragRestrictedAddresses', + 409713509, + date(2025, 10, 11)), + FieldTrial('WebRTC-Pacer-FastRetransmissions', + 40235589, + date(2024, 4, 1)), + FieldTrial('WebRTC-Pacer-KeyframeFlushing', + 42221435, + date(2024, 4, 1)), + FieldTrial('WebRTC-QCM-Dynamic-AV1', + 349860657, + date(2025, 7, 1)), + FieldTrial('WebRTC-QCM-Dynamic-VP8', + 349860657, + date(2025, 7, 1)), + FieldTrial('WebRTC-QCM-Dynamic-VP9', + 349860657, + date(2025, 7, 1)), + FieldTrial('WebRTC-QCM-Static-AV1', + 349860657, + date(2025, 7, 1)), + FieldTrial('WebRTC-QCM-Static-VP8', + 349860657, + date(2025, 7, 1)), + FieldTrial('WebRTC-QCM-Static-VP9', + 349860657, + date(2025, 7, 1)), + FieldTrial('WebRTC-ReceiveBufferSize', + 42225927, + date(2024, 4, 1)), + FieldTrial('WebRTC-RFC8888CongestionControlFeedback', + 42225697, + date(2025, 1, 30)), + FieldTrial('WebRTC-RtcEventLogEncodeDependencyDescriptor', + 42225280, + date(2024, 4, 1)), + FieldTrial('WebRTC-RtcEventLogEncodeNetEqSetMinimumDelayKillSwitch', + 42225058, + date(2024, 4, 1)), + FieldTrial('WebRTC-SetReadyToSendFalseIfSendFail', + 361124449, + date(2024, 12, 1)), + FieldTrial('WebRTC-SrtpRemoveReceiveStream', + 42225949, + date(2024, 10, 1)), + FieldTrial('WebRTC-SwitchEncoderFollowCodecPreferenceOrder', + 378566918, + date(2025, 5, 1)), + FieldTrial('WebRTC-PayloadTypesInTransport', + 360058654, + date(2025, 9, 11)), + FieldTrial('WebRTC-UseAbsCapTimeForG2gMetric', + 401512883, + date(2025, 9, 10)), + FieldTrial('WebRTC-UseNtpTimeAbsoluteSendTime', + 42226305, + date(2024, 9, 1)), + FieldTrial('WebRTC-VP8-MaxFrameInterval', + 42225870, + date(2024, 4, 1)), + FieldTrial('WebRTC-VP9-SvcForSimulcast', + 347737882, + date(2024, 10, 1)), + FieldTrial('WebRTC-Video-EnableRetransmitAllLayers', + 42225262, + date(2024, 4, 1)), + FieldTrial('WebRTC-Video-EncoderFallbackSettings', + 42231704, + date(2024, 4, 1)), + FieldTrial('WebRTC-VideoEncoderSettings', + 40252667, + date(2024, 4, 1)), + FieldTrial('WebRTC-ZeroHertzQueueOverload', + 42225879, + date(2024, 7, 1)), + FieldTrial('WebRTC-Video-H26xPacketBuffer', + 41480904, + date(2024, 6, 1)), + FieldTrial('WebRTC-Video-Vp9FlexibleMode', + 329396373, + date(2025, 6, 26)), + FieldTrial('WebRTC-IceHandshakeDtls', + 367395350, + date(2026, 1, 1)), + FieldTrial('WebRTC-H265-QualityScaling', + 402154973, + date(2026, 1, 1)), + # keep-sorted end +]) # yapf: disable +NO_BUG = -1 +INDEFINITE = date(datetime.MAXYEAR, 1, 1) -def main() -> None: - parser = argparse.ArgumentParser() - subcommand = parser.add_subparsers(dest='cmd') - parser_header = subcommand.add_parser( - 'header', - help='generate C++ header file containing registered field trial keys') - parser_header.add_argument('--output', - default=sys.stdout, - type=argparse.FileType('w'), - required=False, - help='output file') - parser_header.set_defaults(cmd=CmdHeader) - args = parser.parse_args() - - if not args.cmd: - parser.print_help(sys.stderr) +# These field trials precedes the policy in `g3doc/field-trials.md` and are +# therefore not required to follow it. Do not add any new field trials here. +# If you remove an entry you should also update +# POLICY_EXEMPT_FIELD_TRIALS_DIGEST. +POLICY_EXEMPT_FIELD_TRIALS: FrozenSet[FieldTrial] = frozenset([ + # keep-sorted start + FieldTrial('WebRTC-AddNetworkCostToVpn', + 42223280, + date(2024, 4, 1)), + FieldTrial('WebRTC-AddPacingToCongestionWindowPushback', + 42220204, + date(2024, 4, 1)), + FieldTrial('WebRTC-AdjustOpusBandwidth', + 42233664, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3AecStateFullResetKillSwitch', + 42221578, + INDEFINITE), + FieldTrial('WebRTC-Aec3AecStateSubtractorAnalyzerResetKillSwitch', + 42221578, + INDEFINITE), + FieldTrial('WebRTC-Aec3AntiHowlingMinimizationKillSwitch', + 150764764, + INDEFINITE), + FieldTrial('WebRTC-Aec3ClampInstQualityToOneKillSwitch', + 42220991, + INDEFINITE), + FieldTrial('WebRTC-Aec3ClampInstQualityToZeroKillSwitch', + 42220991, + INDEFINITE), + FieldTrial('WebRTC-Aec3CoarseFilterResetHangoverKillSwitch', + 42222401, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3ConservativeTailFreqResponse', + 42223361, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3DeactivateInitialStateResetKillSwitch', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3DelayEstimateSmoothingDelayFoundOverride', + 42222934, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3DelayEstimateSmoothingOverride', + 42222934, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3EchoSaturationDetectionKillSwitch', + 42221578, + INDEFINITE), + FieldTrial('WebRTC-Aec3EnforceCaptureDelayEstimationDownmixing', + 42221238, + INDEFINITE), + FieldTrial( + 'WebRTC-Aec3EnforceCaptureDelayEstimationLeftRightPrioritization', + 42221238, + INDEFINITE), + FieldTrial('WebRTC-Aec3EnforceConservativeHfSuppression', + 42222109, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3EnforceLowActiveRenderLimit', + 42221578, + INDEFINITE), + FieldTrial('WebRTC-Aec3EnforceMoreTransparentNearendSuppressorHfTuning', + 42221589, + INDEFINITE), + FieldTrial('WebRTC-Aec3EnforceMoreTransparentNearendSuppressorTuning', + 42221578, + INDEFINITE), + FieldTrial('WebRTC-Aec3EnforceMoreTransparentNormalSuppressorHfTuning', + 42221589, + INDEFINITE), + FieldTrial('WebRTC-Aec3EnforceMoreTransparentNormalSuppressorTuning', + 42221578, + INDEFINITE), + FieldTrial('WebRTC-Aec3EnforceRapidlyAdjustingNearendSuppressorTunings', + 42221578, + INDEFINITE), + FieldTrial('WebRTC-Aec3EnforceRapidlyAdjustingNormalSuppressorTunings', + 42221578, + INDEFINITE), + FieldTrial('WebRTC-Aec3EnforceRenderDelayEstimationDownmixing', + 42221238, + INDEFINITE), + FieldTrial('WebRTC-Aec3EnforceSlowlyAdjustingNearendSuppressorTunings', + 42221578, + INDEFINITE), + FieldTrial('WebRTC-Aec3EnforceSlowlyAdjustingNormalSuppressorTunings', + 42221578, + INDEFINITE), + FieldTrial('WebRTC-Aec3EnforceStationarityProperties', + 42221578, + INDEFINITE), + FieldTrial('WebRTC-Aec3EnforceStationarityPropertiesAtInit', + 42221578, + INDEFINITE), + FieldTrial('WebRTC-Aec3EnforceVeryLowActiveRenderLimit', + 42221578, + INDEFINITE), + FieldTrial('WebRTC-Aec3HighPassFilterEchoReference', + 42222401, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3MinErleDuringOnsetsKillSwitch', + 42220385, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3NonlinearModeReverbKillSwitch', + 42222109, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3OnsetDetectionKillSwitch', + 42221578, + INDEFINITE), + FieldTrial( + 'WebRTC-Aec3RenderDelayEstimationLeftRightPrioritizationKillSwitch', + 42221238, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3SensitiveDominantNearendActivation', + 42221578, + INDEFINITE), + FieldTrial('WebRTC-Aec3SetupSpecificDefaultConfigDefaultsKillSwitch', + 42221236, + INDEFINITE), + FieldTrial('WebRTC-Aec3ShortHeadroomKillSwitch', + 42220385, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3StereoContentDetectionKillSwitch', + 42225201, + INDEFINITE), + FieldTrial('WebRTC-Aec3SuppressorAntiHowlingGainOverride', + 42221589, + INDEFINITE), + FieldTrial('WebRTC-Aec3SuppressorDominantNearendEnrExitThresholdOverride', + 42221589, + INDEFINITE), + FieldTrial('WebRTC-Aec3SuppressorDominantNearendEnrThresholdOverride', + 42221589, + INDEFINITE), + FieldTrial('WebRTC-Aec3SuppressorDominantNearendHoldDurationOverride', + 42221589, + INDEFINITE), + FieldTrial('WebRTC-Aec3SuppressorDominantNearendSnrThresholdOverride', + 42221589, + INDEFINITE), + FieldTrial('WebRTC-Aec3SuppressorDominantNearendTriggerThresholdOverride', + 42221589, + INDEFINITE), + FieldTrial('WebRTC-Aec3SuppressorNearendHfMaskSuppressOverride', + 42221589, + INDEFINITE), + FieldTrial('WebRTC-Aec3SuppressorNearendHfMaskTransparentOverride', + 42221589, + INDEFINITE), + FieldTrial('WebRTC-Aec3SuppressorNearendLfMaskSuppressOverride', + 42221589, + INDEFINITE), + FieldTrial('WebRTC-Aec3SuppressorNearendLfMaskTransparentOverride', + 42221589, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3SuppressorNearendMaxDecFactorLfOverride', + 42221589, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3SuppressorNearendMaxIncFactorOverride', + 42221589, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3SuppressorNormalHfMaskSuppressOverride', + 42221589, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3SuppressorNormalHfMaskTransparentOverride', + 42221589, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3SuppressorNormalLfMaskSuppressOverride', + 42221589, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3SuppressorNormalLfMaskTransparentOverride', + 42221589, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3SuppressorNormalMaxDecFactorLfOverride', + 42221589, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3SuppressorNormalMaxIncFactorOverride', + 42221589, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3SuppressorTuningOverride', + 42221589, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3TransparentAntiHowlingGain', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3TransparentModeHmm', + 42222401, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3TransparentModeKillSwitch', + 42234438, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3Use1Dot2SecondsInitialStateDuration', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3Use1Dot6SecondsInitialStateDuration', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3Use2Dot0SecondsInitialStateDuration', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3UseDot1SecondsInitialStateDuration', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3UseDot2SecondsInitialStateDuration', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3UseDot3SecondsInitialStateDuration', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3UseDot6SecondsInitialStateDuration', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3UseDot9SecondsInitialStateDuration', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3UseErleOnsetCompensationInDominantNearend', + 42222842, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3UseLowEarlyReflectionsDefaultGain', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3UseLowLateReflectionsDefaultGain', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3UseNearendReverbLen', + 42223329, + INDEFINITE), + FieldTrial('WebRTC-Aec3UseShortConfigChangeDuration', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3UseZeroInitialStateDuration', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Aec3VerySensitiveDominantNearendActivation', + 42221578, + date(2024, 4, 1)), + FieldTrial('WebRTC-Agc2SimdAvx2KillSwitch', + 42232605, + date(2024, 4, 1)), + FieldTrial('WebRTC-Agc2SimdNeonKillSwitch', + 42232605, + date(2024, 4, 1)), + FieldTrial('WebRTC-Agc2SimdSse2KillSwitch', + 42232605, + date(2024, 4, 1)), + FieldTrial('WebRTC-AllowMACBasedIPv6', + 41480878, + date(2024, 4, 1)), + FieldTrial('WebRTC-AlrDetectorParameters', + 42220590, + INDEFINITE), + FieldTrial('WebRTC-AndroidNetworkMonitor-IsAdapterAvailable', + 42223964, + date(2024, 4, 1)), + FieldTrial('WebRTC-ApmExperimentalMultiChannelCaptureKillSwitch', + 42225202, + INDEFINITE), + FieldTrial('WebRTC-ApmExperimentalMultiChannelRenderKillSwitch', + 42225203, + INDEFINITE), + FieldTrial('WebRTC-Audio-2ndAgcMinMicLevelExperiment', + 40207112, + date(2024, 4, 1)), + FieldTrial('WebRTC-Audio-ABWENoTWCC', + 42233370, + INDEFINITE), + FieldTrial('WebRTC-Audio-AdaptivePtime', + 40694579, + date(2024, 4, 1)), + FieldTrial('WebRTC-Audio-Allocation', + 42220324, + INDEFINITE), + FieldTrial('WebRTC-Audio-AlrProbing', + 42220234, + date(2024, 4, 1)), + FieldTrial('WebRTC-Audio-FecAdaptation', + 42233254, + date(2024, 4, 1)), + FieldTrial('WebRTC-Audio-LegacyOverhead', + 42221084, + INDEFINITE), + FieldTrial('WebRTC-Audio-MinimizeResamplingOnMobile', + 42231221, + date(2024, 4, 1)), + FieldTrial('WebRTC-Audio-NetEqDecisionLogicConfig', + 42223518, + date(2024, 4, 1)), + FieldTrial('WebRTC-Audio-NetEqDelayManagerConfig', + 42220376, + date(2024, 4, 1)), + FieldTrial('WebRTC-Audio-NetEqNackTrackerConfig', + 42220211, + date(2024, 4, 1)), + FieldTrial('WebRTC-Audio-NetEqSmartFlushing', + 42222334, + date(2024, 4, 1)), + FieldTrial('WebRTC-Audio-OpusBitrateMultipliers', + 42221139, + date(2024, 4, 1)), + FieldTrial('WebRTC-Audio-Red-For-Opus', + 42221750, + date(2024, 4, 1)), + FieldTrial('WebRTC-Audio-StableTargetAdaptation', + 42221061, + INDEFINITE), + FieldTrial('WebRTC-Audio-iOS-Holding', + 42233253, + date(2024, 4, 1)), + FieldTrial('WebRTC-AudioDevicePlayoutBufferSizeFactor', + 42221006, + date(2024, 4, 1)), + FieldTrial('WebRTC-BindUsingInterfaceName', + 42220770, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-AllocationProbing', + 42220440, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-AlrProbing', + 42220440, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-EstimateBoundedIncrease', + 42220543, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-ExponentialProbing', + 42220440, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-IgnoreProbesLowerThanNetworkStateEstimate', + 42220543, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-InitialProbing', + 42220440, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-InjectedCongestionController', + 'webrtc:8415', + INDEFINITE), + FieldTrial('WebRTC-Bwe-LimitProbesLowerThanThroughputEstimate', + 42221601, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-LossBasedBweV2', + 42222865, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-LossBasedControl', + NO_BUG, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-MaxRttLimit', + 42234928, + INDEFINITE), + FieldTrial('WebRTC-Bwe-MinAllocAsLowerBound', + NO_BUG, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-NetworkRouteConstraints', + 42221535, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-NoFeedbackReset', + 42234928, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-ProbingBehavior', + 42220440, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-ProbingConfiguration', + 42220440, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-ReceiveTimeFix', + 42234228, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-ReceiverLimitCapsOnly', + 42222445, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-RobustThroughputEstimatorSettings', + 42220312, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-SafeResetOnRouteChange', + 42234928, + INDEFINITE), + FieldTrial('WebRTC-Bwe-SeparateAudioPackets', + 42221011, + date(2024, 4, 1)), + FieldTrial('WebRTC-Bwe-TrendlineEstimatorSettings', + 42221011, + date(2024, 4, 1)), + FieldTrial('WebRTC-BweBackOffFactor', + 42233342, + INDEFINITE), + FieldTrial('WebRTC-BweLossExperiment', + 42230863, + date(2024, 4, 1)), + FieldTrial('WebRTC-BweRapidRecoveryExperiment', + 42233136, + date(2024, 4, 1)), + FieldTrial('WebRTC-BweThroughputWindowConfig', + 42220312, + date(2024, 4, 1)), + FieldTrial('WebRTC-BweWindowSizeInPackets', + 42233342, + INDEFINITE), + FieldTrial('WebRTC-CongestionWindow', + 42225197, + INDEFINITE), + FieldTrial('WebRTC-CpuLoadEstimator', + 42233645, + date(2024, 4, 1)), + FieldTrial('WebRTC-Debugging-RtpDump', + 42220735, + INDEFINITE), + FieldTrial('WebRTC-DecoderDataDumpDirectory', + 42224491, + INDEFINITE), + FieldTrial('WebRTC-DefaultBitrateLimitsKillSwitch', + NO_BUG, + date(2024, 4, 1)), + FieldTrial('WebRTC-DependencyDescriptorAdvertised', + 42220386, + date(2024, 4, 1)), + FieldTrial('WebRTC-DisableUlpFecExperiment', + NO_BUG, + date(2024, 4, 1)), + FieldTrial('WebRTC-DontIncreaseDelayBasedBweInAlr', + 42220590, + date(2024, 4, 1)), + FieldTrial('WebRTC-DscpFieldTrial', + 42223835, + date(2024, 4, 1)), + FieldTrial('WebRTC-ExtraICEPing', + 42220311, + date(2024, 4, 1)), + FieldTrial('WebRTC-FakeNetworkReceiveConfig', + 42224493, + INDEFINITE), + FieldTrial('WebRTC-FakeNetworkSendConfig', + 42224493, + INDEFINITE), + FieldTrial('WebRTC-FilterAbsSendTimeExtension', + 42220271, + INDEFINITE), + FieldTrial('WebRTC-FindNetworkHandleWithoutIpv6TemporaryPart', + 42221149, + date(2024, 4, 1)), + FieldTrial('WebRTC-FlexFEC-03', + 42230680, + date(2024, 4, 1)), + FieldTrial('WebRTC-FlexFEC-03-Advertised', + 42230680, + date(2024, 4, 1)), + FieldTrial('WebRTC-ForcePlayoutDelay', + 42222016, + INDEFINITE), + FieldTrial('WebRTC-ForceSendPlayoutDelay', + 42222016, + INDEFINITE), + FieldTrial('WebRTC-ForceSimulatedOveruseIntervalMs', + 42224494, + INDEFINITE), + FieldTrial('WebRTC-FrameDropper', + 42234921, + INDEFINITE), + FieldTrial('WebRTC-FullBandHpfKillSwitch', + 42221279, + date(2024, 4, 1)), + FieldTrial('WebRTC-GenericCodecDependencyDescriptor', + 42225273, + INDEFINITE), + FieldTrial('WebRTC-GenericDescriptorAdvertised', + 42234553, + INDEFINITE), + FieldTrial('WebRTC-GenericDescriptorAuth', + 42220132, + INDEFINITE), + FieldTrial('WebRTC-GenericPictureId', + 42234553, + INDEFINITE), + FieldTrial('WebRTC-GetEncoderInfoOverride', + NO_BUG, + date(2024, 4, 1)), + FieldTrial('WebRTC-H264HighProfile', + 41481030, + date(2024, 4, 1)), + FieldTrial('WebRTC-IPv6Default', + NO_BUG, # chromium:413437 + date(2024, 4, 1)), + FieldTrial('WebRTC-IceControllerFieldTrials', + 40658968, + INDEFINITE), + FieldTrial('WebRTC-IceFieldTrials', + 42221103, + INDEFINITE), + FieldTrial('WebRTC-KeyframeInterval', + 42220470, + INDEFINITE), + FieldTrial('WebRTC-LegacyFrameIdJumpBehavior', + 42223541, + date(2024, 4, 1)), + FieldTrial('WebRTC-LegacySimulcastLayerLimit', + 42233936, + INDEFINITE), + FieldTrial('WebRTC-LowresSimulcastBitrateInterpolation', + 42222558, + date(2024, 4, 1)), + FieldTrial('WebRTC-MutedStateKillSwitch', + 177830919, + date(2024, 4, 1)), + FieldTrial('WebRTC-Network-UseNWPathMonitor', + 42221045, + date(2024, 4, 1)), + FieldTrial('WebRTC-NetworkMonitorAutoDetect', + 42223964, + date(2024, 4, 1)), + FieldTrial('WebRTC-NormalizeSimulcastResolution', + NO_BUG, + INDEFINITE), + FieldTrial('WebRTC-Pacer-BlockAudio', + 42233548, + INDEFINITE), + FieldTrial('WebRTC-Pacer-DrainQueue', + 42233548, + date(2024, 4, 1)), + FieldTrial('WebRTC-Pacer-IgnoreTransportOverhead', + 42235102, + INDEFINITE), + FieldTrial('WebRTC-Pacer-PadInSilence', + 42233548, + date(2024, 4, 1)), + FieldTrial('WebRTC-PacketBufferMaxSize', + 42235070, + INDEFINITE), + FieldTrial('WebRTC-PcFactoryDefaultBitrates', + 42220941, + date(2024, 4, 1)), + FieldTrial('WebRTC-PiggybackIceCheckAcknowledgement', + NO_BUG, + date(2024, 4, 1)), + FieldTrial('WebRTC-PixelLimitResource', + 42222397, + INDEFINITE), + FieldTrial('WebRTC-ProbingScreenshareBwe', + 42232804, + date(2024, 4, 1)), + FieldTrial('WebRTC-ProtectionOverheadRateThreshold', + 42225198, + INDEFINITE), + FieldTrial('WebRTC-QpParsingKillSwitch', + 42222690, + date(2024, 4, 1)), + FieldTrial('WebRTC-RtcEventLogKillSwitch', + 42222210, + INDEFINITE), + FieldTrial('WebRTC-RtcEventLogNewFormat', + 42233237, + date(2024, 4, 1)), + FieldTrial('WebRTC-RtcpLossNotification', + 42220379, + date(2024, 4, 1)), + FieldTrial('WebRTC-SendBufferSizeBytes', + 42222026, + date(2024, 4, 1)), + FieldTrial('WebRTC-SendNackDelayMs', + 42235176, + date(2024, 4, 1)), + FieldTrial('WebRTC-SetSocketReceiveBuffer', + 42223976, + date(2024, 4, 1)), + FieldTrial('WebRTC-SimulcastEncoderAdapter-GetEncoderInfoOverride', + NO_BUG, + date(2024, 4, 1)), + FieldTrial('WebRTC-SimulcastLayerLimitRoundUp', + NO_BUG, + date(2024, 4, 1)), + FieldTrial('WebRTC-SpsPpsIdrIsH264Keyframe', + 42233557, + INDEFINITE), + FieldTrial('WebRTC-StableTargetRate', + 42220156, + date(2024, 4, 1)), + FieldTrial('WebRTC-StrictPacingAndProbing', + 42233198, + date(2024, 4, 1)), + FieldTrial('WebRTC-StunInterPacketDelay', + NO_BUG, + date(2024, 4, 1)), + FieldTrial('WebRTC-SurfaceCellularTypes', + 42221576, + date(2024, 4, 1)), + FieldTrial('WebRTC-SwitchEncoderOnInitializationFailures', + 42223783, + date(2024, 4, 1)), + FieldTrial('WebRTC-Target-Bitrate-Rtcp', + 42235192, + INDEFINITE), + FieldTrial('WebRTC-TransientSuppressorForcedOff', + 40172597, + INDEFINITE), + FieldTrial('WebRTC-UseBaseHeavyVP8TL3RateAllocation', + 42234670, + date(2024, 4, 1)), + FieldTrial('WebRTC-UseDifferentiatedCellularCosts', + 42221576, + date(2024, 4, 1)), + FieldTrial('WebRTC-UseStandardBytesStats', + 42220573, + date(2024, 4, 1)), + FieldTrial('WebRTC-UseTurnServerAsStunServer', + 42221142, + date(2024, 4, 1)), + FieldTrial('WebRTC-VP8-ForcePartitionResilience', + 42221952, + INDEFINITE), + FieldTrial('WebRTC-VP8-Forced-Fallback-Encoder-v2', + 42231704, + INDEFINITE), + FieldTrial('WebRTC-VP8-GetEncoderInfoOverride', + 42221952, + INDEFINITE), + FieldTrial('WebRTC-VP8-Postproc-Config', + 42221657, + date(2024, 4, 1)), + FieldTrial('WebRTC-VP8-Postproc-Config-Arm', + 42231704, + INDEFINITE), + FieldTrial('WebRTC-VP8IosMaxNumberOfThread', + 42220027, + date(2024, 4, 1)), + FieldTrial('WebRTC-VP9-GetEncoderInfoOverride', + NO_BUG, + date(2024, 4, 1)), + FieldTrial('WebRTC-VP9-LowTierOptimizations', + 42224122, + date(2024, 4, 1)), + FieldTrial('WebRTC-VP9-PerformanceFlags', + 42221657, + date(2024, 4, 1)), + FieldTrial('WebRTC-VP9QualityScaler', + 42221411, + date(2024, 4, 1)), + FieldTrial('WebRTC-Video-BalancedDegradation', + 42232717, + INDEFINITE), + FieldTrial('WebRTC-Video-BalancedDegradationSettings', + NO_BUG, + INDEFINITE), + FieldTrial('WebRTC-Video-DisableAutomaticResize', + 42221931, + date(2024, 4, 1)), + FieldTrial('WebRTC-Video-DiscardPacketsWithUnknownSsrc', + 42235091, + INDEFINITE), + FieldTrial('WebRTC-Video-ForcedSwDecoderFallback', + NO_BUG, + INDEFINITE), + FieldTrial('WebRTC-Video-InitialDecoderResolution', + 42222018, + date(2024, 4, 1)), + FieldTrial('WebRTC-Video-MinVideoBitrate', + 42220993, + date(2024, 4, 1)), + FieldTrial('WebRTC-Video-Pacing', + 42220062, + date(2024, 4, 1)), + FieldTrial('WebRTC-Video-PreferTemporalSupportOnBaseLayer', + 42221417, + INDEFINITE), + FieldTrial('WebRTC-Video-QualityScalerSettings', + NO_BUG, + INDEFINITE), + FieldTrial('WebRTC-Video-QualityScaling', + 42234348, + INDEFINITE), + FieldTrial('WebRTC-Video-UseFrameRateForOverhead', + 166341943, + date(2024, 4, 1)), + FieldTrial('WebRTC-VideoFrameTrackingIdAdvertised', + 42222747, + INDEFINITE), + FieldTrial('WebRTC-VideoLayersAllocationAdvertised', + 42222126, + INDEFINITE), + FieldTrial('WebRTC-VideoRateControl', + 42220259, + INDEFINITE), + FieldTrial('WebRTC-Vp9InterLayerPred', + NO_BUG, # chromium:949536 + INDEFINITE), + FieldTrial('WebRTC-Vp9IssueKeyFrameOnLayerDeactivation', + 40595338, + date(2024, 4, 1)), + FieldTrial('WebRTC-ZeroPlayoutDelay', + 40228487, + date(2024, 4, 1)), + # keep-sorted end +]) # yapf: disable + +POLICY_EXEMPT_FIELD_TRIALS_DIGEST: str = \ + 'e10a1af33d0a4cf81a6dc87d963b82cf6092ada3' + +REGISTERED_FIELD_TRIALS: FrozenSet[FieldTrial] = ACTIVE_FIELD_TRIALS.union( + POLICY_EXEMPT_FIELD_TRIALS) + + +def todays_date() -> date: + now = datetime.datetime.now(datetime.timezone.utc) + return date(now.year, now.month, now.day) + + +def registry_header( + field_trials: FrozenSet[FieldTrial] = REGISTERED_FIELD_TRIALS) -> str: + """Generates a C++ header with all field trial keys. + + Args: + field_trials: Field trials to include in the header. + + Returns: + String representation of a C++ header file containing all field trial + keys. + + >>> trials = { + ... FieldTrial('B', '', date(1, 1, 1)), + ... FieldTrial('A', '', date(1, 1, 1)), + ... FieldTrial('B', '', date(2, 2, 2)), + ... } + >>> print(registry_header(trials)) + // This file was automatically generated. Do not edit. + + #ifndef GEN_REGISTERED_FIELD_TRIALS_H_ + #define GEN_REGISTERED_FIELD_TRIALS_H_ + + #include "absl/strings/string_view.h" + + namespace webrtc { + + inline constexpr absl::string_view kRegisteredFieldTrials[] = { + "A", + "B", + }; + + } // namespace webrtc + + #endif // GEN_REGISTERED_FIELD_TRIALS_H_ + + """ + registered_keys = {f.key for f in field_trials} + keys = '\n'.join(f' "{k}",' for k in sorted(registered_keys)) + return ('// This file was automatically generated. Do not edit.\n' + '\n' + '#ifndef GEN_REGISTERED_FIELD_TRIALS_H_\n' + '#define GEN_REGISTERED_FIELD_TRIALS_H_\n' + '\n' + '#include "absl/strings/string_view.h"\n' + '\n' + 'namespace webrtc {\n' + '\n' + 'inline constexpr absl::string_view kRegisteredFieldTrials[] = {\n' + f'{keys}\n' + '};\n' + '\n' + '} // namespace webrtc\n' + '\n' + '#endif // GEN_REGISTERED_FIELD_TRIALS_H_\n') + + +def expired_field_trials( + threshold: date, + field_trials: FrozenSet[FieldTrial] = REGISTERED_FIELD_TRIALS +) -> Set[FieldTrial]: + """Obtains expired field trials. + + Args: + threshold: Date from which to check end date. + field_trials: Field trials to validate. + + Returns: + All expired field trials. + + >>> trials = { + ... FieldTrial('Expired', '', date(1, 1, 1)), + ... FieldTrial('Not-Expired', '', date(1, 1, 2)), + ... } + >>> expired_field_trials(date(1, 1, 1), trials) + {FieldTrial(key='Expired', bug='', end_date=datetime.date(1, 1, 1))} + """ + return {f for f in field_trials if f.end_date <= threshold} + + +def validate_field_trials( + field_trials: FrozenSet[FieldTrial] = ACTIVE_FIELD_TRIALS +) -> List[str]: + """Validate that field trials conforms to the policy. + + Args: + field_trials: Field trials to validate. + + Returns: + A list of explanations for invalid field trials. + """ + invalid = [] + + sha1 = hashlib.sha1() + for trial in sorted(POLICY_EXEMPT_FIELD_TRIALS, key=lambda f: f.key): + sha1.update(trial.key.encode('ascii')) + if sha1.hexdigest() != POLICY_EXEMPT_FIELD_TRIALS_DIGEST: + invalid.append( + 'POLICY_EXEMPT_FIELD_TRIALS has been modified. Please note that ' + 'you must not add any new entries there. If you removed an entry ' + 'you should also update POLICY_EXEMPT_FIELD_TRIALS_DIGEST. The ' + f'new digest is "{sha1.hexdigest()}".') + + for trial in field_trials: + if not trial.key.startswith('WebRTC-'): + invalid.append(f'{trial.key} does not start with "WebRTC-".') + if trial.bug_id <= 0: + invalid.append(f'{trial.key} must have an associated bug.') + if trial.end_date >= INDEFINITE: + invalid.append(f'{trial.key} must have an end date.') + + return invalid + + +def cmd_header(args: argparse.Namespace) -> None: + if not args.no_validation: + if errors := validate_field_trials(): + print('\n'.join(sorted(errors))) + sys.exit(1) + + args.output.write(registry_header()) + + +def cmd_expired(args: argparse.Namespace) -> None: + today = todays_date() + diff = datetime.timedelta(days=args.in_days) + expired = expired_field_trials( + today + diff, + ACTIVE_FIELD_TRIALS if args.no_exempt else REGISTERED_FIELD_TRIALS) + + if len(expired) <= 0: + return + + expired_by_date = sorted(expired, key=lambda f: (f.end_date, f.key)) + print('\n'.join( + f'{f.key} ' + f'{f"<{f.bug_url()}> " if f.bug_url() else ""}' + f'{"expired" if f.end_date <= today else "expires"} on {f.end_date}' + for f in expired_by_date)) + if any(f.end_date <= today for f in expired_by_date): + sys.exit(1) + + +def cmd_validate(args: argparse.Namespace) -> None: + del args + invalid = validate_field_trials() + + if len(invalid) <= 0: + return + + print('\n'.join(sorted(invalid))) sys.exit(1) - args.cmd(args) + +def main() -> None: + parser = argparse.ArgumentParser() + subcommand = parser.add_subparsers(dest='cmd') + + parser_header = subcommand.add_parser( + 'header', + help='generate C++ header file containing registered field trial keys') + parser_header.add_argument('--output', + default=sys.stdout, + type=argparse.FileType('w'), + required=False, + help='output file') + parser_header.add_argument( + '--no-validation', + default=False, + action='store_true', + required=False, + help='whether to validate the field trials before writing') + parser_header.set_defaults(cmd=cmd_header) + + parser_expired = subcommand.add_parser( + 'expired', + help='lists all expired field trials', + description=''' + Lists all expired field trials. Exits with a non-zero exit status if + any field trials has expired, ignoring the --in-days argument. + ''') + parser_expired.add_argument( + '--no-exempt', + default=False, + action='store_true', + required=False, + help='whether to include policy exempt field trials') + parser_expired.add_argument( + '--in-days', + default=0, + type=int, + required=False, + help='number of days relative to today to check') + parser_expired.set_defaults(cmd=cmd_expired) + + parser_validate = subcommand.add_parser( + 'validate', + help='validates that all field trials conforms to the policy.', + description=''' + Validates that all field trials conforms to the policy. Exits with a + non-zero exit status if any field trials does not. + ''') + parser_validate.set_defaults(cmd=cmd_validate) + + args = parser.parse_args() + + if not args.cmd: + parser.print_help(sys.stderr) + sys.exit(1) + + args.cmd(args) if __name__ == '__main__': - main() + main() diff --git a/g3doc/abseil-in-webrtc.md b/g3doc/abseil-in-webrtc.md index 32fc42d953..9bc07436e2 100644 --- a/g3doc/abseil-in-webrtc.md +++ b/g3doc/abseil-in-webrtc.md @@ -1,5 +1,5 @@ - + # Using Abseil in WebRTC @@ -16,36 +16,35 @@ adds the first use. ## How to depend on Abseil For build targets of type `rtc_library`, `rtc_source_set` and -`rtc_static_library`, dependencies on Abseil need to be listed in `absl_deps` -instead of `deps`. +`rtc_static_library`, dependencies on Abseil need to be listed in `deps`. -This is needed in order to support the Abseil component build in Chromium. In -that build mode, WebRTC will depend on a monolithic Abseil build target that -will generate a shared library. +The GN templates will take care of generating the proper dependency when +used within Chromium or standalone. In that build mode, WebRTC will depend +on a monolithic Abseil build target that will generate a shared library. ## **Allowed** * `absl::AnyInvocable` * `absl::bind_front` * `absl::Cleanup` +* [Hash tables, and B-tree ordered][abseil-containers] containers * `absl::InlinedVector` +* `absl_nonnull` and `absl_nullable` * `absl::WrapUnique` -* `absl::optional` and related stuff from `absl/types/optional.h`. * `absl::string_view` * The functions in `absl/strings/ascii.h`, `absl/strings/match.h`, and `absl/strings/str_replace.h`. * The functions in `absl/strings/escaping.h`. -* `absl::is_trivially_copy_constructible`, - `absl::is_trivially_copy_assignable`, and - `absl::is_trivially_destructible` from `absl/meta/type_traits.h`. -* `absl::variant` and related stuff from `absl/types/variant.h`. * The functions in `absl/algorithm/algorithm.h` and `absl/algorithm/container.h`. -* `absl/base/const_init.h` for mutex initialization. * The macros in `absl/base/attributes.h`, `absl/base/config.h` and `absl/base/macros.h`. * `absl/numeric/bits.h` +* Single argument absl::StrCat +* ABSL_FLAG is allowed in tests and tools, but disallowed in in non-test code. + +[abseil-containers]: https://abseil.io/docs/cpp/guides/container ## **Disallowed** @@ -57,28 +56,27 @@ will generate a shared library. *Use `webrtc::Mutex` instead.* -Chromium has a ban on new static initializers, and `absl::Mutex` uses -one. To make `absl::Mutex` available, we would need to nicely ask the -Abseil team to remove that initializer (like they already did for a -spinlock initializer). Additionally, `absl::Mutex` handles time in a -way that may not be compatible with the rest of WebRTC. +### `absl::optional` and `absl::variant` + +*Use `std::optional` and `std::variant` directly.* ### `absl::Span` -*Use `rtc::ArrayView` instead.* +*Use `webrtc::ArrayView` instead.* -`absl::Span` differs from `rtc::ArrayView` on several points, and both -of them differ from the `std::span` that was voted into -C++20—and `std::span` is likely to undergo further changes -before C++20 is finalized. We should just keep using `rtc::ArrayView` -and avoid `absl::Span` until C++20 is finalized and the Abseil team -has decided if they will change `absl::Span` to match. -[Bug](https://bugs.webrtc.org/9214). +`absl::Span` differs from `webrtc::ArrayView` on several points, and both +of them differ from the `std::span` introduced in C++20. We should just keep +using `webrtc::ArrayView` and avoid `absl::Span`. When WebRTC switches to C++20, +we will consider replacing `webrtc::ArrayView` with `std::span`. ### `absl::StrCat`, `absl::StrAppend`, `absl::StrJoin`, `absl::StrSplit` -*Use `rtc::SimpleStringBuilder` to build strings.* +*Use `webrtc::SimpleStringBuilder` to build strings.* These are optimized for speed, not binary size. Even `StrCat` calls with a modest number of arguments can easily add several hundred bytes to the binary. + +Exception: Single-argument absl::StrCat is allowed in order to make it +easy to use AbslStringify. See [TOTW #215](https://abseil.io/tips/215) for +details on AbslStringify. diff --git a/g3doc/become_a_committer.md b/g3doc/become_a_committer.md index b2f49721e1..fcd1849b6e 100644 --- a/g3doc/become_a_committer.md +++ b/g3doc/become_a_committer.md @@ -1,5 +1,5 @@ - + # How to get tryjob access or become WebRTC committer @@ -75,6 +75,14 @@ recommended to apply for WebRTC committer rights obtaining process. 6. Also as any contributor you must sign and return the [Contributor License Agreement][4] +## WebRTC Committers expiration + +If a WebRTC committer isn't active on Gerrit (e.g. by uploading a CL, or +participating in code reviews) for more than 1 year, they will lose their status +as a WebRTC Committer and removed from the list of committers. An email will be +sent one week before the expiration date, giving the committer 7 days to +demonstrate activity and maintain their WebRTC Committer status. + [1]: https://webrtc.googlesource.com/src/+/refs/heads/main/docs/faq.md#to-be-a-contributor_do-i-need-to-sign-any-agreements [2]: https://chromium.googlesource.com/chromium/src/+/refs/heads/main/styleguide/c++/c++.md [3]: https://webrtc.googlesource.com/src/+/refs/heads/main/g3doc/style-guide.md diff --git a/g3doc/field-trials.md b/g3doc/field-trials.md index e4f946a175..724a302fe1 100644 --- a/g3doc/field-trials.md +++ b/g3doc/field-trials.md @@ -39,7 +39,32 @@ The policy for field trials is: - The field trial must have an end date. The end date may be pushed back if necessary, but should not be pushed back indefinitely. - A field trial must be associated with a bug that - - reserves the field trial key, - - is open, - - is assigned to an owner, and - - has the end date specified. + - reserves the field trial key, and + - is assigned to an owner. + +## Creating a field trial + +Before creating a new field trial, make sure to read the [policy](#policy). + +Either create a new or reuse an existing bug and make sure it is assigned to the +correct owner. Take note of the bug ID. Next, decide how long you need the field +trial to last. It should be rare to have field trials lasting more than 12 +months. You can use the `NextAction` field in the bug to help you remember the +end date. + +Using this information, add a new entry to `ACTIVE_FIELD_TRIALS` in +`experiments/field_trials.py`. You may not add new items to +`POLICY_EXEMPT_FIELD_TRIALS` since it is reserved for field trials that were +created before the policy was in place. + +## Removing a field trial + +Any field trial that has expired or otherwise is not needed anymore may be +removed by following these steps: + +- Remove all references from the code base. You can find these by, e.g. + grepping for the field trial key. +- Clean up potential glue code that might have been added. +- Remove the field trial from `ACTIVE_FIELD_TRIALS` in + `experiments/field_trials.py`. +- If all work is finished, also close the associated bug. diff --git a/g3doc/how_to_write_documentation.md b/g3doc/how_to_write_documentation.md index 6c6a4902ee..8e00909dc0 100644 --- a/g3doc/how_to_write_documentation.md +++ b/g3doc/how_to_write_documentation.md @@ -1,5 +1,5 @@ - + # How to write WebRTC documentation diff --git a/g3doc/implementation_basics.md b/g3doc/implementation_basics.md index ae1f199b68..6984589377 100644 --- a/g3doc/implementation_basics.md +++ b/g3doc/implementation_basics.md @@ -1,5 +1,5 @@ - + # Basic concepts and primitives @@ -32,7 +32,7 @@ varies by platform, but they all have the [webrtc::TaskQueueBase][3] API. This API offers primitives for posting tasks, with or without delay. -Some core parts use the [rtc::Thread][2], which is a subclass of TaskQueueBase. +Some core parts use the [webrtc::Thread][2], which is a subclass of TaskQueueBase. This may contain a SocketServer for processing I/O, and is used for policing certain calling pattern between a few core threads (the NetworkThread cannot do Invoke on the Worker thread, for instance). @@ -47,18 +47,18 @@ behave as follows: * FooFactory: Has a Create function that creates a Foo object and returns the object or an owning reference to it (for instance std::unique_ptr or - rtc::scoped_refptr). The Create function should NOT alter the factory + webrtc::scoped_refptr). The Create function should NOT alter the factory state; ideally, it is marked const. Ownership of the returned object is only with the caller. * FooBuilder: Has a Build function that returns ownership of a Foo object (as above). The Builder can only be used once, and resources given to the Builder before the Build function is called are either released or owned by the Foo - object. The Create function may be reference-qualified (declared as ```Foo + object. The Build function may be reference-qualified (declared as ```Foo Build() &&```), which means it is invoked as ```std::move(builder).Build()```, and C++ will ensure that it is not used again. -* FooManager: Has a Create function that returns an rtc::scoped_refptr (if +* FooManager: Has a Create function that returns an webrtc::scoped_refptr (if shared ownership) or a Foo* (if the Manager retains sole ownership). If Create() cannot fail, consider returning a Foo&. The Manager is responsible for keeping track of the object; if the Create function returns a Foo*, the @@ -104,7 +104,7 @@ associated classes. ### Synchronization primitives to be used when needed When it is absolutely necessary to let one thread wait for another thread -to do something, Thread::Invoke can be used. This function is DISCOURAGED, +to do something, Thread::BlockingCall can be used. This function is DISCOURAGED, since it leads to performance issues, but is currently still widespread. When it is absolutely necessary to access one variable from multiple threads, diff --git a/g3doc/index.md b/g3doc/index.md index 8016054d3c..b73b1a713c 100644 --- a/g3doc/index.md +++ b/g3doc/index.md @@ -1,5 +1,5 @@ - + # WebRTC C++ library diff --git a/g3doc/org-contributions.md b/g3doc/org-contributions.md new file mode 100644 index 0000000000..4e94599570 --- /dev/null +++ b/g3doc/org-contributions.md @@ -0,0 +1,72 @@ + + + +# Organizational contributors to WebRTC + +This document outlines procedures for the relationship with longer-term +organizational contributors to WebRTC. + +Note that this is not covering the case of individual, one-off contributions; +those are adequately covered in other documents. + +## Background: Individuals making multiple contributions + +The contribution guidelines can be summarized as: + +* First, contribute something to show understanding of the codebase +* Then, get bot start rights, so that one can test the contributions before + asking for review (this right applies only to bots that operate on the open + source repo) +* After a number of commits, and demonstrating adequate knowledge of the + project’s style and structure, one can ask for committer rights, which will + give the ability to submit code after adequate review (current policy: + review by two WebRTC project members). + +## Organizations making multiple contributions + +At the moment, primary management of the WebRTC code repository and CI infrastructure is being provided by Google. This means that certain actions require cooperation with the responsible team at Google - here we refer to the people working on this at Google as “the WebRTC project”. + +Sometimes, organizations take on a commitment to contribute to WebRTC on a +longer term basis. In these cases, it is good for all parties to have some +guidelines on how the relationship between the WebRTC project and the +organization is managed. + +We should have the following roles in place: + +* A contact person at the contributing organization \ + This person will be responsible for knowing where the organization is making + contributions, and why. All contributors from that organization need to be + known by that contact person; the WebRTC project may redirect queries from + other people in the org to that person if not already CCed. +* At least one person with committer rights (or working towards such rights). + \ + This person will also be a primary reviewer for incoming CLs from the + organization, ensuring a review is done before the WebRTC project members + are asked for review. \ + This can be the same as the contact person, or someone different. + +The WebRTC project will offer to host a contact mailing list, if desirable, and name a point of contact for the relationship. + +When making small contributions like bug fixes, normal review is sufficient. + +When asking to add significant functionality (new CC, new codecs, other new +features), the process should include: + +* Specifying why the feature is needed (requirements, conditions for saying + “it works”, value to the larger community). This should normally be done + by filing a bug on the [issues.webrtc.org](https://issues.webrtc.org) bugtracker + asking for the feature. +* A design document showing how the feature will be implemented and how it + will interact with the rest of the WebRTC implementation +* A plan for who will do the work, and when it’s expected to happen +* A “match list” of the areas affected by the project and the WebRTC project + members available to review contributions in those areas. (This can be + created collaboratively). +* If the work involves field trials and rollouts on Google properties like + Meet and Chrome, there + must be a plan for managing these aspects. + +Normally, an ongoing relationship will require some regular cadence of meetings; +a minimum of one hour per quarter should be aimed for, with other meetings as +needed. + diff --git a/g3doc/sitemap.md b/g3doc/sitemap.md index c1221e5335..1bba1070f0 100644 --- a/g3doc/sitemap.md +++ b/g3doc/sitemap.md @@ -43,7 +43,8 @@ * [Logging](/logging/g3doc/rtc_event_log.md) * Testing * Media Quality and performance - * [PeerConnection Framework](/test/pc/e2e/g3doc/index.md) + * PeerConnection Framework + * [Overview](/test/pc/e2e/g3doc/index.md) * [Architecture](/test/pc/e2e/g3doc/architecture.md) * [Video analyzer](/test/pc/e2e/g3doc/default_video_quality_analyzer.md) * Call framework diff --git a/g3doc/style-guide.md b/g3doc/style-guide.md index 71d1196df2..dd4e2cbcac 100644 --- a/g3doc/style-guide.md +++ b/g3doc/style-guide.md @@ -1,5 +1,5 @@ - + # WebRTC coding style guide @@ -14,19 +14,21 @@ If making large changes to such code, consider first cleaning it up in a WebRTC follows the [Chromium C++ style guide][chr-style] and the [Google C++ style guide][goog-style]. In cases where they conflict, the Chromium style guide trumps the Google style guide, and the rules in this file trump them -both. +both. In addition to style guides it is recommended to follow +[best practices][goog-best-practice] when applicable. [chr-style]: https://chromium.googlesource.com/chromium/src/+/main/styleguide/c++/c++.md [goog-style]: https://google.github.io/styleguide/cppguide.html +[goog-best-practice]: https://abseil.io/tips ### C++ version -WebRTC is written in C++17, but with some restrictions: +WebRTC is written in C++20, but with some restrictions: -* We only allow the subset of C++17 (language and library) that is not banned by +* We only allow the subset of C++20 (language and library) that is not banned by Chromium; see the [list of banned C++ features in Chromium][chr-style-cpp]. -* We only allow the subset of C++17 that is also valid C++20; otherwise, users - would not be able to compile WebRTC in C++20 mode. +* We only allow the subset of C++20 that is also valid C++23; otherwise, users + would not be able to compile WebRTC in C++23 mode. [chr-style-cpp]: https://chromium.googlesource.com/chromium/src/+/main/styleguide/c++/c++-features.md @@ -53,7 +55,7 @@ file type suffix), in the same directory, in the same build target. test `.cc` files, and with `.cc` files that define `main`.) See also the -[examples and exceptions on how to treat `.h` and `.cpp` files](style-guide/h-cc-pairs.md). +[examples and exceptions on how to treat `.h` and `.cc` files](style-guide/h-cc-pairs.md). This makes the source code easier to navigate and organize, and precludes some questionable build system practices such as having build targets that don't pull @@ -65,7 +67,7 @@ Follow the [Google styleguide for `TODO` comments][goog-style-todo]. When referencing a WebRTC bug, prefer using the URL form (excluding the scheme part): ```cpp -// TODO(bugs.webrtc.org/12345): Delete the hack when blocking bugs are resolved. +// TODO: bugs.webrtc.org/12345 - Delete the hack when blocking bugs are resolved. ``` The short form used in commit messages, e.g. `webrtc:12345`, is discouraged. @@ -84,6 +86,17 @@ Like so: std::pony PonyPlz(const std::pony_spec& ps); ``` +Prefer [ABSL_DEPRECATE_AND_INLINE] to deprecate an inline function definition +or a type alias. This macro allows to automate inlining the functions's body or +replacing the type where it is used downstream. e.g., + +```cpp +ABSL_DEPRECATE_AND_INLINE() inline int OldFunc(int x) { + return NewFunc(x, 0); +} +using OldTypeName ABSL_DEPRECATE_AND_INLINE() = NewTypeName; +``` + NOTE 1: The annotation goes on the declaration in the `.h` file, not the definition in the `.cc` file! @@ -97,6 +110,14 @@ inline std::pony PonyPlz(const std::pony_spec& ps) { return DEPRECATED_PonyPlz(ps); } ``` +or wrap the test with + +```cpp +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + TEST_... +#pragma clang diagnostic pop +``` In other words, rename the existing function, and provide an inline wrapper using the original name that calls it. That way, callers who are willing to @@ -109,10 +130,11 @@ readable way. [DEPRECATED]: https://en.cppreference.com/w/cpp/language/attributes/deprecated [ABSL_DEPRECATED]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/abseil-cpp/absl/base/attributes.h?q=ABSL_DEPRECATED +[ABSL_DEPRECATE_AND_INLINE]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/abseil-cpp/absl/base/macros.h?q=ABSL_DEPRECATE_AND_INLINE ### ArrayView -When passing an array of values to a function, use `rtc::ArrayView` +When passing an array of values to a function, use `webrtc::ArrayView` whenever possible—that is, whenever you're not passing ownership of the array, and don't allow the callee to change the array size. @@ -124,9 +146,23 @@ For example, | `const T* ptr, size_t num_elements` | `ArrayView` | | `T* ptr, size_t num_elements` | `ArrayView` | -See the [source code for `rtc::ArrayView`](api/array_view.h) for more detailed +See the [source code for `webrtc::ArrayView`](api/array_view.h) for more detailed docs. +### Strings + +WebRTC uses std::string, with content assumed to be UTF-8. Note that this +has to be verified whenever accepting external input. + +For concatenation of strings, use webrtc::StrJoin or webrtc::SimpleStringBuilder +directly. + +The following string building tools are NOT recommended: +* The + operator. See https://abseil.io/tips/3 for why not. +* absl::StrCat, absl::StrAppend, absl::StrJoin. These are optimized for + speed, not code size, and have significant code size overhead. +* strcat. It is too easy to create buffer overflows. + ### sigslot SIGSLOT IS DEPRECATED. @@ -138,7 +174,7 @@ Prefer `webrtc::CallbackList`, and manage thread safety yourself. The following smart pointer types are recommended: * `std::unique_ptr` for all singly-owned objects - * `rtc::scoped_refptr` for all objects with shared ownership + * `webrtc::scoped_refptr` for all objects with shared ownership Use of `std::shared_ptr` is *not permitted*. It is banned in the Chromium style guide (overriding the Google style guide). See the @@ -148,21 +184,24 @@ information. In most cases, one will want to explicitly control lifetimes, and therefore use `std::unique_ptr`, but in some cases, for instance where references have to exist both from the API users and internally, with no way to invalidate pointers -held by the API user, `rtc::scoped_refptr` can be appropriate. +held by the API user, `scoped_refptr` can be appropriate. [chr-std-shared-ptr]: https://chromium.googlesource.com/chromium/src/+/main/styleguide/c++/c++-features.md#shared-pointers-banned ### `std::bind` Don't use `std::bind`—there are pitfalls, and lambdas are almost as succinct and -already familiar to modern C++ programmers. +already familiar to modern C++ programmers. See [Avoid std::bind][totw-108] for more. + +[totw-108]: https://abseil.io/tips/108 ### `std::function` `std::function` is allowed, but remember that it's not the right tool for every occasion. Prefer to use interfaces when that makes sense, and consider -`rtc::FunctionView` for cases where the callee will not save the function -object. +`webrtc::FunctionView` for cases where the callee will not save the function +object. Prefer `absl::AnyInvocable` over `std::function` when you can accomplish + the task by moving the callable instead of copying it. ### Forward declarations @@ -208,7 +247,12 @@ WebRTC follows the WebRTC follows [Chromium's Python style][chr-py-style]. +Chromium's Python style is now using PEP-8 and not all Python code has been migrated. +For this reason running presubmit on old WebRTC python script might trigger failures. +The failures can either be fixed are ignored by adding the script to the [PYLINT_OLD_STYLE][old-style-lint] list. + [chr-py-style]: https://chromium.googlesource.com/chromium/src/+/main/styleguide/python/python.md +[old-style-lint]: https://source.chromium.org/chromium/_/webrtc/src/+/9b81d2c954128831c62d8a0657c7f955b3c02d32:PRESUBMIT.py;l=50 ## Build files diff --git a/g3doc/style-guide/h-cc-pairs.md b/g3doc/style-guide/h-cc-pairs.md index 08eed85c23..593bbe3399 100644 --- a/g3doc/style-guide/h-cc-pairs.md +++ b/g3doc/style-guide/h-cc-pairs.md @@ -1,5 +1,5 @@ - + # `.h` and `.cc` files come in pairs diff --git a/g3doc/supported-platforms-and-compilers.md b/g3doc/supported-platforms-and-compilers.md index 4e65767499..69a1d1f68c 100644 --- a/g3doc/supported-platforms-and-compilers.md +++ b/g3doc/supported-platforms-and-compilers.md @@ -1,5 +1,5 @@ - + # WebRTC supported plaftorms and compilers @@ -10,7 +10,7 @@ The list of officially supported operating systems and CPUs is: * Android: armeabi-v7a, arm64-v8a, x86, x86_64. * iOS: arm64, x86_64. * Linux: armeabi-v7a, arm64-v8a, x86, x86_64. -* macOS: x86_64, arm64 (M1). +* macOS: x86_64, arm64. * Windows: x86_64. Other platforms are not officially supported (which means there is no CI diff --git a/infra/OWNERS b/infra/OWNERS index eae8171db5..4a9e6f5856 100644 --- a/infra/OWNERS +++ b/infra/OWNERS @@ -3,4 +3,3 @@ jleconte@webrtc.org titovartem@webrtc.org jansson@webrtc.org terelius@webrtc.org -landrey@webrtc.org diff --git a/infra/config/commit-queue.cfg b/infra/config/commit-queue.cfg index 162590f0b4..1038a5189d 100644 --- a/infra/config/commit-queue.cfg +++ b/infra/config/commit-queue.cfg @@ -2,7 +2,7 @@ # Do not modify manually. # # For the schema of this file, see Config message: -# https://luci-config.appspot.com/schemas/projects:commit-queue.cfg +# https://config.luci.app/schemas/projects:commit-queue.cfg cq_status_host: "chromium-cq-status.appspot.com" submit_options { @@ -22,7 +22,7 @@ config_groups { } verifiers { gerrit_cq_ability { - committer_list: "project-webrtc-committers" + committer_list: "project-webrtc-submit-access" dry_run_access_list: "project-webrtc-tryjob-access" allow_owner_if_submittable: DRY_RUN } @@ -30,10 +30,20 @@ config_groups { url: "https://webrtc-status.appspot.com" } tryjob { + builders { + name: "webrtc-internal/g3.webrtc-internal.try/internal_compile" + includable_only: true + owner_whitelist_group: "project-webrtc-internal-tryjob-access" + } builders { name: "webrtc-internal/g3.webrtc-internal.try/internal_compile_lite" owner_whitelist_group: "project-webrtc-internal-tryjob-access" } + builders { + name: "webrtc-internal/g3.webrtc-internal.try/internal_tests" + includable_only: true + owner_whitelist_group: "project-webrtc-internal-tryjob-access" + } builders { name: "webrtc/try/android_arm64_rel" } @@ -83,6 +93,9 @@ config_groups { builders { name: "webrtc/try/ios_dbg_simulator" } + builders { + name: "webrtc/try/iwyu_verifier" + } builders { name: "webrtc/try/linux_asan" } @@ -213,7 +226,7 @@ config_groups { } verifiers { gerrit_cq_ability { - committer_list: "project-webrtc-committers" + committer_list: "project-webrtc-submit-access" dry_run_access_list: "project-webrtc-tryjob-access" } tryjob { @@ -263,6 +276,9 @@ config_groups { builders { name: "webrtc/try/ios_dbg_simulator" } + builders { + name: "webrtc/try/iwyu_verifier" + } builders { name: "webrtc/try/linux_asan" } diff --git a/infra/config/config.star b/infra/config/config.star index 103b4ae318..ec925311d5 100755 --- a/infra/config/config.star +++ b/infra/config/config.star @@ -13,17 +13,18 @@ lucicfg.check_version("1.30.9") WEBRTC_GIT = "https://webrtc.googlesource.com/src" WEBRTC_GERRIT = "https://webrtc-review.googlesource.com/src" WEBRTC_TROOPER_EMAIL = "webrtc-troopers-robots@google.com" -WEBRTC_XCODE13 = "13c100" +WEBRTC_XCODE = "15f31d" DEFAULT_CPU = "x86-64" # Helpers: -def make_reclient_properties(instance, jobs = None): - """Makes a default reclient property with the specified argument. +def make_rbe_properties(instance, jobs = None, use_siso = None): + """Makes a default RBE property with the specified argument. Args: instance: RBE insatnce name. jobs: Number of jobs to be used by the builder. + use_siso: Add $build/siso properties to switch from Ninja to Siso. Returns: A dictonary with the reclient properties. """ @@ -31,9 +32,22 @@ def make_reclient_properties(instance, jobs = None): "instance": instance, "metrics_project": "chromium-reclient-metrics", } + siso_props = { + "project": instance, + "configs": ["builder"], + "enable_cloud_profiler": True, + "enable_cloud_trace": True, + "enable_monitoring": True, + } if jobs: reclient_props["jobs"] = jobs - return {"$build/reclient": reclient_props} + siso_props["remote_jobs"] = jobs + props = { + "$build/reclient": reclient_props, + } + if use_siso: + props["$build/siso"] = siso_props + return props def os_from_name(name): """Returns the 'os' dimension based on a builder name. @@ -218,6 +232,10 @@ luci.realm(name = "pools/try-tests", bindings = [ ), ]) luci.realm(name = "try", bindings = [ + luci.binding( + roles = "role/buildbucket.creator", + groups = "project-webrtc-led-users", + ), luci.binding( roles = "role/swarming.taskTriggerer", groups = "project-webrtc-led-users", @@ -237,6 +255,10 @@ luci.realm(name = "pools/perf", bindings = [ ), ]) luci.realm(name = "perf", bindings = [ + luci.binding( + roles = "role/buildbucket.creator", + groups = "project-webrtc-led-users", + ), luci.binding( roles = "role/swarming.taskTriggerer", groups = "project-webrtc-led-users", @@ -249,6 +271,10 @@ luci.realm(name = "@root", bindings = [ roles = "role/swarming.poolUser", groups = "project-webrtc-admins", ), + luci.binding( + roles = "role/buildbucket.creator", + groups = "project-webrtc-admins", + ), luci.binding( roles = "role/swarming.taskTriggerer", groups = "project-webrtc-admins", @@ -265,6 +291,10 @@ luci.bucket( "project-webrtc-tryjob-access", ]), ], + constraints = luci.bucket_constraints( + pools = ["luci.webrtc.try"], + service_accounts = ["webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com"], + ), ) luci.bucket( @@ -274,6 +304,10 @@ luci.bucket( "project-webrtc-ci-schedulers", ]), ], + constraints = luci.bucket_constraints( + pools = ["luci.webrtc.ci"], + service_accounts = ["webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com"], + ), ) luci.bucket( @@ -287,6 +321,10 @@ luci.bucket( "service-account-chromeperf", ]), ], + constraints = luci.bucket_constraints( + pools = ["luci.webrtc.perf"], + service_accounts = ["webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com"], + ), ) luci.bucket( @@ -300,7 +338,7 @@ luci.cq_group( tree_status_host = "webrtc-status.appspot.com", watch = [cq.refset(repo = WEBRTC_GERRIT, refs = ["refs/heads/main"])], acls = [ - acl.entry(acl.CQ_COMMITTER, groups = ["project-webrtc-committers"]), + acl.entry(acl.CQ_COMMITTER, groups = ["project-webrtc-submit-access"]), acl.entry(acl.CQ_DRY_RUNNER, groups = ["project-webrtc-tryjob-access"]), ], allow_owner_if_submittable = cq.ACTION_DRY_RUN, @@ -312,7 +350,7 @@ luci.cq_group( name = "cq_branch", watch = [cq.refset(repo = WEBRTC_GERRIT, refs = ["refs/branch-heads/.+"])], acls = [ - acl.entry(acl.CQ_COMMITTER, groups = ["project-webrtc-committers"]), + acl.entry(acl.CQ_COMMITTER, groups = ["project-webrtc-submit-access"]), acl.entry(acl.CQ_DRY_RUNNER, groups = ["project-webrtc-tryjob-access"]), ], retry_config = cq.RETRY_ALL_FAILURES, @@ -335,12 +373,29 @@ luci.cq_tryjob_verifier( cq_group = "cq_infra", ) +# Internal-only tryjob always included into CQ: luci.cq_tryjob_verifier( builder = "webrtc-internal:g3.webrtc-internal.try/internal_compile_lite", owner_whitelist = ["project-webrtc-internal-tryjob-access"], cq_group = "cq", ) +# Includable via `Cq-Include-Trybots: webrtc-internal/g3.webrtc-internal.try:internal_compile`: +luci.cq_tryjob_verifier( + builder = "webrtc-internal:g3.webrtc-internal.try/internal_compile", + owner_whitelist = ["project-webrtc-internal-tryjob-access"], + cq_group = "cq", + includable_only = True, +) + +# Includable via `Cq-Include-Trybots: webrtc-internal/g3.webrtc-internal.try:internal_tests`: +luci.cq_tryjob_verifier( + builder = "webrtc-internal:g3.webrtc-internal.try/internal_tests", + owner_whitelist = ["project-webrtc-internal-tryjob-access"], + cq_group = "cq", + includable_only = True, +) + # Notifier definitions: luci.notifier( @@ -374,6 +429,11 @@ luci.notifier( ), ) +# Notify findit about completed builds for code coverage purposes +luci.buildbucket_notification_topic( + name = "projects/findit-for-me/topics/buildbucket_notification", +) + # Tree closer definitions: luci.tree_closer( @@ -511,6 +571,7 @@ def ci_builder( perf_cat = None, prioritized = False, enabled = True, + use_siso = None, **kwargs): """Add a post-submit builder. @@ -521,6 +582,7 @@ def ci_builder( perf_cat: the category + name for the /perf/ console, or None to omit from the console. prioritized: True to make this builder have a higher priority and never batch builds. enabled: False to exclude this builder from consoles and failure notifications. + use_siso: True to switch build system from Ninja to Siso. **kwargs: Pass on to webrtc_builder / luci.builder. Returns: A luci.builder. @@ -542,7 +604,7 @@ def ci_builder( dimensions["builderless"] = "1" properties = properties or {} properties["builder_group"] = "client.webrtc" - properties.update(make_reclient_properties("rbe-webrtc-trusted")) + properties.update(make_rbe_properties("rbe-webrtc-trusted", use_siso = use_siso)) notifies = ["post_submit_failure_notifier", "infra_failure_notifier"] notifies += ["webrtc_tree_closer"] if name not in skipped_lkgr_bots else [] @@ -565,6 +627,7 @@ def try_builder( cq = {}, branch_cq = True, builder = None, + use_siso = None, **kwargs): """Add a pre-submit builder. @@ -575,6 +638,7 @@ def try_builder( cq: None to exclude this from all commit queues, or a dict of kwargs for cq_tryjob_verifier. branch_cq: False to exclude this builder just from the release-branch CQ. builder: builder to set in the dimensions, if None, builderless:1 is used. + use_siso: True to switch build system from Ninja to Siso. **kwargs: Pass on to webrtc_builder / luci.builder. Returns: A luci.builder. @@ -587,7 +651,7 @@ def try_builder( dimensions["builderless"] = "1" properties = properties or {} properties["builder_group"] = "tryserver.webrtc" - properties.update(make_reclient_properties("rbe-webrtc-untrusted")) + properties.update(make_rbe_properties("rbe-webrtc-untrusted", use_siso = use_siso)) if cq != None: luci.cq_tryjob_verifier(name, cq_group = "cq", **cq) if branch_cq: @@ -616,13 +680,9 @@ def perf_builder(name, perf_cat, **kwargs): Notifications are also disabled. """ add_milo(name, {"perf": perf_cat}) - properties = make_reclient_properties("rbe-webrtc-trusted") + properties = make_rbe_properties("rbe-webrtc-trusted") properties["builder_group"] = "client.webrtc.perf" - dimensions = {"pool": "luci.webrtc.perf", "os": "Linux", "cores": "2"} - if "Android" in name or "Fuchsia" in name: - # Android perf testers require more performant bots to finish under 3 hours. - # Fuchsia perf testers encountered "no space left on device" error on multiple runs. - dimensions["cores"] = "8" + dimensions = {"pool": "luci.webrtc.perf", "os": "Linux"} return webrtc_builder( name = name, dimensions = dimensions, @@ -660,6 +720,7 @@ def chromium_try_builder(name, **kwargs): recipe = "chromium_trybot", branch_cq = False, execution_timeout = 3 * time.hour, + use_siso = True, **kwargs ) @@ -677,10 +738,10 @@ def normal_builder_factory(**common_kwargs): # Mixins: ios_builder, ios_try_job = normal_builder_factory( - properties = {"xcode_build_version": WEBRTC_XCODE13}, + properties = {"xcode_build_version": WEBRTC_XCODE}, caches = [swarming.cache( - name = "xcode_ios_" + WEBRTC_XCODE13, - path = "xcode_ios_" + WEBRTC_XCODE13 + ".app", + name = "xcode_ios_" + WEBRTC_XCODE, + path = "xcode_ios_" + WEBRTC_XCODE + ".app", )], ) @@ -717,6 +778,7 @@ ios_try_job("ios_compile_arm64_dbg") ios_builder("iOS64 Release", "iOS|arm64|rel") ios_try_job("ios_compile_arm64_rel") ios_builder("iOS Debug (simulator)", "iOS|x64|sim") + ios_try_job("ios_dbg_simulator") ios_builder("iOS API Framework Builder", "iOS|fat|size", recipe = "ios_api_framework", prioritized = True) ios_try_job("ios_api_framework", recipe = "ios_api_framework") @@ -800,9 +862,13 @@ ci_builder("Win64 ASan", "Win Clang|x64|asan") try_builder("win_asan") ci_builder("Win (more configs)", "Win Clang|x86|more") try_builder("win_x86_more_configs") +try_builder("win11_release", cq = None) +try_builder("win11_debug", cq = None) chromium_try_builder("win_chromium_compile") chromium_try_builder("win_chromium_compile_dbg") +try_builder("iwyu_verifier") + try_builder( "presubmit", recipe = "run_presubmit", diff --git a/infra/config/cr-buildbucket.cfg b/infra/config/cr-buildbucket.cfg index e9ab652b45..9e2c14b094 100644 --- a/infra/config/cr-buildbucket.cfg +++ b/infra/config/cr-buildbucket.cfg @@ -2,7 +2,7 @@ # Do not modify manually. # # For the schema of this file, see BuildbucketCfg message: -# https://luci-config.appspot.com/schemas/projects:buildbucket.cfg +# https://config.luci.app/schemas/projects:buildbucket.cfg buckets { name: "ci" @@ -1593,13 +1593,13 @@ buckets { ' },' ' "builder_group": "client.webrtc",' ' "recipe": "webrtc/ios_api_framework",' - ' "xcode_build_version": "13c100"' + ' "xcode_build_version": "15f31d"' '}' priority: 29 execution_timeout_secs: 7200 caches { - name: "xcode_ios_13c100" - path: "xcode_ios_13c100.app" + name: "xcode_ios_15f31d" + path: "xcode_ios_15f31d.app" } build_numbers: YES service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" @@ -1645,13 +1645,13 @@ buckets { ' },' ' "builder_group": "client.webrtc",' ' "recipe": "webrtc/standalone",' - ' "xcode_build_version": "13c100"' + ' "xcode_build_version": "15f31d"' '}' priority: 30 execution_timeout_secs: 7200 caches { - name: "xcode_ios_13c100" - path: "xcode_ios_13c100.app" + name: "xcode_ios_15f31d" + path: "xcode_ios_15f31d.app" } build_numbers: YES service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" @@ -1697,13 +1697,13 @@ buckets { ' },' ' "builder_group": "client.webrtc",' ' "recipe": "webrtc/standalone",' - ' "xcode_build_version": "13c100"' + ' "xcode_build_version": "15f31d"' '}' priority: 30 execution_timeout_secs: 7200 caches { - name: "xcode_ios_13c100" - path: "xcode_ios_13c100.app" + name: "xcode_ios_15f31d" + path: "xcode_ios_15f31d.app" } build_numbers: YES service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" @@ -1749,13 +1749,13 @@ buckets { ' },' ' "builder_group": "client.webrtc",' ' "recipe": "webrtc/standalone",' - ' "xcode_build_version": "13c100"' + ' "xcode_build_version": "15f31d"' '}' priority: 30 execution_timeout_secs: 7200 caches { - name: "xcode_ios_13c100" - path: "xcode_ios_13c100.app" + name: "xcode_ios_15f31d" + path: "xcode_ios_15f31d.app" } build_numbers: YES service_account: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" @@ -1774,6 +1774,10 @@ buckets { } } } + constraints { + pools: "luci.webrtc.ci" + service_accounts: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" + } } buckets { name: "cron" @@ -2281,7 +2285,6 @@ buckets { name: "Perf Android32 (R Pixel5)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" - dimensions: "cores:8" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2327,7 +2330,6 @@ buckets { name: "Perf Android64 (R Pixel5)" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" - dimensions: "cores:8" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2373,7 +2375,6 @@ buckets { name: "Perf Fuchsia" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" - dimensions: "cores:8" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2419,7 +2420,6 @@ buckets { name: "Perf Linux Bionic" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" - dimensions: "cores:2" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2465,7 +2465,6 @@ buckets { name: "Perf Mac 11" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" - dimensions: "cores:2" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2511,7 +2510,6 @@ buckets { name: "Perf Mac M1 Arm64 12" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" - dimensions: "cores:2" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2557,7 +2555,6 @@ buckets { name: "Perf Win 10" swarming_host: "chromium-swarm.appspot.com" swarming_tags: "vpython:native-python-wrapper" - dimensions: "cores:2" dimensions: "os:Linux" dimensions: "pool:luci.webrtc.perf" exe { @@ -2647,6 +2644,10 @@ buckets { } } } + constraints { + pools: "luci.webrtc.perf" + service_accounts: "webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" + } } buckets { name: "try" @@ -2916,6 +2917,15 @@ buckets { ' "instance": "rbe-webrtc-untrusted",' ' "metrics_project": "chromium-reclient-metrics"' ' },' + ' "$build/siso": {' + ' "configs": [' + ' "builder"' + ' ],' + ' "enable_cloud_profiler": true,' + ' "enable_cloud_trace": true,' + ' "enable_monitoring": true,' + ' "project": "rbe-webrtc-untrusted"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3395,13 +3405,13 @@ buckets { ' },' ' "builder_group": "tryserver.webrtc",' ' "recipe": "webrtc/ios_api_framework",' - ' "xcode_build_version": "13c100"' + ' "xcode_build_version": "15f31d"' '}' priority: 30 execution_timeout_secs: 7200 caches { - name: "xcode_ios_13c100" - path: "xcode_ios_13c100.app" + name: "xcode_ios_15f31d" + path: "xcode_ios_15f31d.app" } build_numbers: YES service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" @@ -3447,13 +3457,13 @@ buckets { ' },' ' "builder_group": "tryserver.webrtc",' ' "recipe": "webrtc/standalone",' - ' "xcode_build_version": "13c100"' + ' "xcode_build_version": "15f31d"' '}' priority: 30 execution_timeout_secs: 7200 caches { - name: "xcode_ios_13c100" - path: "xcode_ios_13c100.app" + name: "xcode_ios_15f31d" + path: "xcode_ios_15f31d.app" } build_numbers: YES service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" @@ -3499,13 +3509,13 @@ buckets { ' },' ' "builder_group": "tryserver.webrtc",' ' "recipe": "webrtc/standalone",' - ' "xcode_build_version": "13c100"' + ' "xcode_build_version": "15f31d"' '}' priority: 30 execution_timeout_secs: 7200 caches { - name: "xcode_ios_13c100" - path: "xcode_ios_13c100.app" + name: "xcode_ios_15f31d" + path: "xcode_ios_15f31d.app" } build_numbers: YES service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" @@ -3551,13 +3561,13 @@ buckets { ' },' ' "builder_group": "tryserver.webrtc",' ' "recipe": "webrtc/standalone",' - ' "xcode_build_version": "13c100"' + ' "xcode_build_version": "15f31d"' '}' priority: 30 execution_timeout_secs: 7200 caches { - name: "xcode_ios_13c100" - path: "xcode_ios_13c100.app" + name: "xcode_ios_15f31d" + path: "xcode_ios_15f31d.app" } build_numbers: YES service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" @@ -3575,6 +3585,53 @@ buckets { } } } + builders { + name: "iwyu_verifier" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "builderless:1" + dimensions: "cpu:x86-64" + dimensions: "os:Linux" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } builders { name: "linux_asan" swarming_host: "chromium-swarm.appspot.com" @@ -3641,6 +3698,15 @@ buckets { ' "instance": "rbe-webrtc-untrusted",' ' "metrics_project": "chromium-reclient-metrics"' ' },' + ' "$build/siso": {' + ' "configs": [' + ' "builder"' + ' ],' + ' "enable_cloud_profiler": true,' + ' "enable_cloud_trace": true,' + ' "enable_monitoring": true,' + ' "project": "rbe-webrtc-untrusted"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -3688,6 +3754,15 @@ buckets { ' "instance": "rbe-webrtc-untrusted",' ' "metrics_project": "chromium-reclient-metrics"' ' },' + ' "$build/siso": {' + ' "configs": [' + ' "builder"' + ' ],' + ' "enable_cloud_profiler": true,' + ' "enable_cloud_trace": true,' + ' "enable_monitoring": true,' + ' "project": "rbe-webrtc-untrusted"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4581,6 +4656,15 @@ buckets { ' "instance": "rbe-webrtc-untrusted",' ' "metrics_project": "chromium-reclient-metrics"' ' },' + ' "$build/siso": {' + ' "configs": [' + ' "builder"' + ' ],' + ' "enable_cloud_profiler": true,' + ' "enable_cloud_trace": true,' + ' "enable_monitoring": true,' + ' "project": "rbe-webrtc-untrusted"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4959,6 +5043,15 @@ buckets { ' "instance": "rbe-webrtc-untrusted",' ' "metrics_project": "chromium-reclient-metrics"' ' },' + ' "$build/siso": {' + ' "configs": [' + ' "builder"' + ' ],' + ' "enable_cloud_profiler": true,' + ' "enable_cloud_trace": true,' + ' "enable_monitoring": true,' + ' "project": "rbe-webrtc-untrusted"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -4987,6 +5080,100 @@ buckets { } } } + builders { + name: "win11_debug" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "builderless:1" + dimensions: "cpu:x86-64" + dimensions: "os:Windows" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } + builders { + name: "win11_release" + swarming_host: "chromium-swarm.appspot.com" + swarming_tags: "vpython:native-python-wrapper" + dimensions: "builderless:1" + dimensions: "cpu:x86-64" + dimensions: "os:Windows" + dimensions: "pool:luci.webrtc.try" + exe { + cipd_package: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" + cipd_version: "refs/heads/main" + cmd: "luciexe" + } + properties: + '{' + ' "$build/reclient": {' + ' "instance": "rbe-webrtc-untrusted",' + ' "metrics_project": "chromium-reclient-metrics"' + ' },' + ' "$recipe_engine/resultdb/test_presentation": {' + ' "column_keys": [],' + ' "grouping_keys": [' + ' "status",' + ' "v.test_suite"' + ' ]' + ' },' + ' "builder_group": "tryserver.webrtc",' + ' "recipe": "webrtc/standalone"' + '}' + priority: 30 + execution_timeout_secs: 7200 + build_numbers: YES + service_account: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + experiments { + key: "luci.recipes.use_python3" + value: 100 + } + resultdb { + enable: true + bq_exports { + project: "webrtc-ci" + dataset: "resultdb" + table: "try_test_results" + test_results {} + } + } + } builders { name: "win_asan" swarming_host: "chromium-swarm.appspot.com" @@ -5053,6 +5240,15 @@ buckets { ' "instance": "rbe-webrtc-untrusted",' ' "metrics_project": "chromium-reclient-metrics"' ' },' + ' "$build/siso": {' + ' "configs": [' + ' "builder"' + ' ],' + ' "enable_cloud_profiler": true,' + ' "enable_cloud_trace": true,' + ' "enable_monitoring": true,' + ' "project": "rbe-webrtc-untrusted"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5100,6 +5296,15 @@ buckets { ' "instance": "rbe-webrtc-untrusted",' ' "metrics_project": "chromium-reclient-metrics"' ' },' + ' "$build/siso": {' + ' "configs": [' + ' "builder"' + ' ],' + ' "enable_cloud_profiler": true,' + ' "enable_cloud_trace": true,' + ' "enable_monitoring": true,' + ' "project": "rbe-webrtc-untrusted"' + ' },' ' "$recipe_engine/resultdb/test_presentation": {' ' "column_keys": [],' ' "grouping_keys": [' @@ -5552,4 +5757,13 @@ buckets { } } } + constraints { + pools: "luci.webrtc.try" + service_accounts: "webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" + } +} +common_config { + builds_notification_topics { + name: "projects/findit-for-me/topics/buildbucket_notification" + } } diff --git a/infra/config/luci-analysis.cfg b/infra/config/luci-analysis.cfg index 3a08bae367..6150da33f7 100644 --- a/infra/config/luci-analysis.cfg +++ b/infra/config/luci-analysis.cfg @@ -1,20 +1,18 @@ # Schema for this config file: ProjectConfig in: # https://luci-config.appspot.com/schemas/projects:luci-analysis.cfg -bug_filing_thresholds { - metric_id: "human-cls-failed-presubmit" - threshold { - # clusters blocking developers should have bugs filed. - one_day: 3 - } -} -bug_filing_thresholds { - metric_id: "test-runs-failed" - threshold { - # clusters that aren't blocking developers but are failing a significant - # amount of tasks should have bugs filed to look into optimizing machine - # resource usage. - one_day: 500 +bug_management { + default_bug_system: MONORAIL + monorail { + project: "webrtc" + default_field_values { + # Type field. + field_id: 38 + value: "Bug" + } + priority_field_id: 36 + monorail_hostname: "bugs.chromium.org" + display_prefix: "bugs.webrtc.org" } } @@ -30,107 +28,4 @@ clustering { like_template: "ninja:${target}/%${suite}.${case}%" } } -monorail { - project: "webrtc" - default_field_values { - # Type field. - field_id: 38 - value: "Bug" - } - priority_field_id: 36 - priorities { - priority: "0" - thresholds { - metric_id: "human-cls-failed-presubmit" - threshold { - one_day: 20 - } - } - } - priorities { - priority: "1" - thresholds { - metric_id: "human-cls-failed-presubmit" - threshold { - one_day: 10 - } - } - } - priorities { - priority: "2" - thresholds { - metric_id: "human-cls-failed-presubmit" - threshold { - one_day: 2 - } - } - } - priorities { - priority: "3" - # Clusters which fail to meet this threshold will be closed. - thresholds { - metric_id: "test-runs-failed" - threshold { - one_day: 2 - } - } - thresholds { - metric_id: "human-cls-failed-presubmit" - threshold { - one_day: 1 - seven_day: 1 - } - } - } - priority_hysteresis_percent: 50 - monorail_hostname: "bugs.chromium.org" - display_prefix: "bugs.webrtc.org" -} -realms { - name: "ci" - test_variant_analysis { - update_test_variant_task { - update_test_variant_task_interval { - seconds: 3600 # 1 hour - } - test_variant_status_update_duration { - seconds: 86400 # 24 hours - } - } - bq_exports { - table { - cloud_project: "webrtc-ci" - dataset: "weetbix" - table: "ci_flaky_test_variants" - } - predicate { - status: FLAKY - } - } - } -} - -realms { - name: "try" - test_variant_analysis { - update_test_variant_task { - update_test_variant_task_interval { - seconds: 3600 # 1 hour - } - test_variant_status_update_duration { - seconds: 86400 # 24 hours - } - } - bq_exports { - table { - cloud_project: "webrtc-ci" - dataset: "weetbix" - table: "try_flaky_test_variants" - } - predicate { - status: FLAKY - } - } - } -} diff --git a/infra/config/luci-logdog.cfg b/infra/config/luci-logdog.cfg index adc75bef49..01a391261d 100644 --- a/infra/config/luci-logdog.cfg +++ b/infra/config/luci-logdog.cfg @@ -2,7 +2,7 @@ # Do not modify manually. # # For the schema of this file, see ProjectConfig message: -# https://luci-config.appspot.com/schemas/projects:luci-logdog.cfg +# https://config.luci.app/schemas/projects:luci-logdog.cfg reader_auth_groups: "all" writer_auth_groups: "luci-logdog-chromium-writers" diff --git a/infra/config/luci-milo.cfg b/infra/config/luci-milo.cfg index c3319ddb63..35cc997e42 100644 --- a/infra/config/luci-milo.cfg +++ b/infra/config/luci-milo.cfg @@ -2,7 +2,7 @@ # Do not modify manually. # # For the schema of this file, see Project message: -# https://luci-config.appspot.com/schemas/projects:luci-milo.cfg +# https://config.luci.app/schemas/projects:luci-milo.cfg consoles { id: "ci" @@ -593,12 +593,21 @@ consoles { builders { name: "buildbucket/luci.webrtc.try/win_x86_more_configs" } + builders { + name: "buildbucket/luci.webrtc.try/win11_release" + } + builders { + name: "buildbucket/luci.webrtc.try/win11_debug" + } builders { name: "buildbucket/luci.webrtc.try/win_chromium_compile" } builders { name: "buildbucket/luci.webrtc.try/win_chromium_compile_dbg" } + builders { + name: "buildbucket/luci.webrtc.try/iwyu_verifier" + } builders { name: "buildbucket/luci.webrtc.try/presubmit" } diff --git a/infra/config/luci-notify.cfg b/infra/config/luci-notify.cfg index 0c22a488fa..53a6c5eb3b 100644 --- a/infra/config/luci-notify.cfg +++ b/infra/config/luci-notify.cfg @@ -2,7 +2,7 @@ # Do not modify manually. # # For the schema of this file, see ProjectConfig message: -# https://luci-config.appspot.com/schemas/projects:luci-notify.cfg +# https://config.luci.app/schemas/projects:luci-notify.cfg notifiers { notifications { @@ -1635,6 +1635,19 @@ notifiers { name: "ios_dbg_simulator" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "iwyu_verifier" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE @@ -2025,6 +2038,32 @@ notifiers { name: "webrtc_linux_chromium" } } +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "win11_debug" + } +} +notifiers { + notifications { + on_new_status: INFRA_FAILURE + email { + recipients: "webrtc-troopers-robots@google.com" + } + template: "infra_failure" + } + builders { + bucket: "try" + name: "win11_release" + } +} notifiers { notifications { on_new_status: INFRA_FAILURE diff --git a/infra/config/luci-scheduler.cfg b/infra/config/luci-scheduler.cfg index c1bafa7a66..275b169bd9 100644 --- a/infra/config/luci-scheduler.cfg +++ b/infra/config/luci-scheduler.cfg @@ -2,7 +2,7 @@ # Do not modify manually. # # For the schema of this file, see ProjectConfig message: -# https://luci-config.appspot.com/schemas/projects:luci-scheduler.cfg +# https://config.luci.app/schemas/projects:luci-scheduler.cfg job { id: "Android32" diff --git a/infra/config/project.cfg b/infra/config/project.cfg index f4a6bbb47c..fae10a3043 100644 --- a/infra/config/project.cfg +++ b/infra/config/project.cfg @@ -2,12 +2,12 @@ # Do not modify manually. # # For the schema of this file, see ProjectCfg message: -# https://luci-config.appspot.com/schemas/projects:project.cfg +# https://config.luci.app/schemas/projects:project.cfg name: "webrtc" access: "group:all" lucicfg { - version: "1.39.11" + version: "1.44.1" package_dir: "." config_dir: "." entry_point: "config.star" diff --git a/infra/config/realms.cfg b/infra/config/realms.cfg index 6d5937a89b..409a7506c5 100644 --- a/infra/config/realms.cfg +++ b/infra/config/realms.cfg @@ -2,7 +2,7 @@ # Do not modify manually. # # For the schema of this file, see RealmsCfg message: -# https://luci-config.appspot.com/schemas/projects:realms.cfg +# https://config.luci.app/schemas/projects:realms.cfg realms { name: "@root" @@ -18,6 +18,10 @@ realms { role: "role/analysis.reader" principals: "group:all" } + bindings { + role: "role/buildbucket.creator" + principals: "group:project-webrtc-admins" + } bindings { role: "role/buildbucket.reader" principals: "group:all" @@ -96,6 +100,10 @@ realms { role: "role/buildbucket.builderServiceAccount" principals: "user:webrtc-ci-builder@chops-service-accounts.iam.gserviceaccount.com" } + bindings { + role: "role/buildbucket.creator" + principals: "group:project-webrtc-led-users" + } bindings { role: "role/buildbucket.triggerer" principals: "group:service-account-chromeperf" @@ -174,6 +182,10 @@ realms { role: "role/buildbucket.builderServiceAccount" principals: "user:webrtc-try-builder@chops-service-accounts.iam.gserviceaccount.com" } + bindings { + role: "role/buildbucket.creator" + principals: "group:project-webrtc-led-users" + } bindings { role: "role/buildbucket.triggerer" principals: "group:project-webrtc-tryjob-access" diff --git a/infra/specs/PRESUBMIT.py b/infra/specs/PRESUBMIT.py index f064cacaf8..a38bc8d566 100644 --- a/infra/specs/PRESUBMIT.py +++ b/infra/specs/PRESUBMIT.py @@ -8,6 +8,7 @@ # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. +import difflib import os import shlex @@ -15,26 +16,38 @@ USE_PYTHON3 = True -def _HasLocalChanges(input_api): - ret = input_api.subprocess.call(['git', 'diff', '--quiet']) - return ret != 0 - - def CheckPatchFormatted(input_api, output_api): results = [] file_filter = lambda x: x.LocalPath().endswith('.pyl') - affected_files = input_api.AffectedFiles(include_deletes=False, - file_filter=file_filter) + affected_files = input_api.AffectedFiles( + include_deletes=False, file_filter=file_filter + ) + diffs = [] for f in affected_files: + # NewContents just reads the file. + prev_content = f.NewContents() + cmd = ['yapf', '-i', f.AbsoluteLocalPath()] if input_api.subprocess.call(cmd): results.append( - output_api.PresubmitError('Error calling "' + shlex.join(cmd) + '"')) - - if _HasLocalChanges(input_api): - msg = ('Diff found after running "yapf -i" on modified .pyl files.\n' - 'Please commit or discard the new changes.') + output_api.PresubmitError('Error calling "' + shlex.join(cmd) + '"') + ) + + # Make sure NewContents reads the updated files from disk and not cache. + new_content = f.NewContents(flush_cache=True) + if new_content != prev_content: + path = f.LocalPath() + diff = difflib.unified_diff(prev_content, new_content, path, path, lineterm='') + diffs.append('\n'.join(diff)) + + if diffs: + combined_diffs = '\n'.join(diffs) + msg = ( + 'Diff found after running "yapf -i" on modified .pyl files:\n\n' + f'{combined_diffs}\n\n' + 'Please commit or discard the new changes.' + ) results.append(output_api.PresubmitError(msg)) return results diff --git a/infra/specs/client.webrtc.json b/infra/specs/client.webrtc.json index 68632f1ad0..31cb220344 100644 --- a/infra/specs/client.webrtc.json +++ b/infra/specs/client.webrtc.json @@ -7,6 +7,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "AppRTCMobile_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -26,6 +27,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "android_instrumentation_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -45,6 +47,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "audio_decoder_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -64,6 +67,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_audio_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -83,6 +87,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_video_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -102,6 +107,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "dcsctp_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -121,6 +127,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -141,6 +148,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -161,6 +169,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "peerconnection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -181,6 +190,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_media_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -200,6 +210,27 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_p2p_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_gtest_merge.py" + }, + "name": "rtc_pc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -219,6 +250,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_stats_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -238,6 +270,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -258,6 +291,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "slow_peer_connection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -277,6 +311,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "svc_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -284,11 +319,11 @@ "swarming": { "dimensions": { "android_devices": "1", - "device_type": "walleye", + "device_type": "crosshatch", "os": "Android" }, "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", - "shards": 4 + "shards": 8 }, "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" @@ -297,6 +332,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "system_wrappers_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -316,6 +352,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "test_support_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -335,6 +372,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "tools_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -354,6 +392,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "video_engine_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -374,6 +413,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "voip_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -393,6 +433,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "webrtc_nonparallel_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -409,25 +450,47 @@ "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ], - "junit_tests": [ + "isolated_scripts": [ { "args": [ "--shards=1" ], + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_sdk_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -439,6 +502,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "AppRTCMobile_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -458,6 +522,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "android_instrumentation_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -477,6 +542,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "audio_decoder_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -496,6 +562,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_audio_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -515,6 +582,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_video_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -534,6 +602,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "dcsctp_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -553,6 +622,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -573,6 +643,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -593,6 +664,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "peerconnection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -613,6 +685,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_media_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -632,6 +705,27 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_p2p_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_gtest_merge.py" + }, + "name": "rtc_pc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -651,6 +745,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_stats_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -670,6 +765,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -690,6 +786,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "slow_peer_connection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -709,6 +806,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "svc_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -716,11 +814,11 @@ "swarming": { "dimensions": { "android_devices": "1", - "device_type": "walleye", + "device_type": "crosshatch", "os": "Android" }, "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", - "shards": 4 + "shards": 8 }, "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" @@ -729,6 +827,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "system_wrappers_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -748,6 +847,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "test_support_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -767,6 +867,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "tools_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -786,6 +887,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "video_engine_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -806,6 +908,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "voip_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -825,6 +928,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "webrtc_nonparallel_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -841,25 +945,47 @@ "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ], - "junit_tests": [ + "isolated_scripts": [ { "args": [ "--shards=1" ], + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_sdk_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -871,6 +997,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "peerconnection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -898,6 +1025,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "AppRTCMobile_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -917,6 +1045,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "android_instrumentation_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -936,6 +1065,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "audio_decoder_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -955,6 +1085,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_audio_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -974,6 +1105,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_video_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -993,6 +1125,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "dcsctp_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1012,6 +1145,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1032,6 +1166,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1052,6 +1187,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "peerconnection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1072,6 +1208,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_media_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1091,6 +1228,27 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_p2p_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_gtest_merge.py" + }, + "name": "rtc_pc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1110,6 +1268,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_stats_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1129,6 +1288,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1149,6 +1309,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "slow_peer_connection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1168,6 +1329,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "svc_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1175,11 +1337,11 @@ "swarming": { "dimensions": { "android_devices": "1", - "device_type": "walleye", + "device_type": "crosshatch", "os": "Android" }, "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", - "shards": 4 + "shards": 8 }, "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" @@ -1188,6 +1350,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "system_wrappers_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1207,6 +1370,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "test_support_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1226,6 +1390,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "tools_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1245,6 +1410,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "video_engine_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1265,6 +1431,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "voip_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1284,6 +1451,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "webrtc_nonparallel_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1300,25 +1468,47 @@ "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ], - "junit_tests": [ + "isolated_scripts": [ { "args": [ "--shards=1" ], + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_sdk_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -1330,6 +1520,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "AppRTCMobile_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1349,6 +1540,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "android_instrumentation_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1368,6 +1560,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "audio_decoder_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1387,6 +1580,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_audio_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1406,6 +1600,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_video_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1425,6 +1620,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "dcsctp_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1444,6 +1640,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1464,6 +1661,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1484,6 +1682,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "peerconnection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1504,6 +1703,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_media_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1523,6 +1723,27 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_p2p_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_gtest_merge.py" + }, + "name": "rtc_pc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1542,6 +1763,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_stats_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1561,6 +1783,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1581,6 +1804,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "slow_peer_connection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1600,6 +1824,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "svc_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1607,11 +1832,11 @@ "swarming": { "dimensions": { "android_devices": "1", - "device_type": "walleye", + "device_type": "crosshatch", "os": "Android" }, "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", - "shards": 4 + "shards": 8 }, "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" @@ -1620,6 +1845,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "system_wrappers_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1639,6 +1865,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "test_support_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1658,6 +1885,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "tools_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1677,6 +1905,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "video_engine_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1697,6 +1926,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "voip_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1716,6 +1946,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "webrtc_nonparallel_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1732,25 +1963,47 @@ "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ], - "junit_tests": [ + "isolated_scripts": [ { "args": [ "--shards=1" ], + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_sdk_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -1768,7 +2021,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -1780,9 +2032,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { @@ -1792,7 +2045,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -1804,9 +2056,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { @@ -1816,7 +2069,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -1828,9 +2080,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { @@ -1840,7 +2093,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -1852,9 +2104,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { @@ -1864,7 +2117,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -1876,9 +2128,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { @@ -1888,11 +2141,10 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests", + "name": "rtc_p2p_unittests", "resultdb": { "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", "result_format": "gtest_json" @@ -1900,10 +2152,11 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" }, { "args": [ @@ -1912,11 +2165,10 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "svc_tests", + "name": "rtc_pc_unittests", "resultdb": { "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", "result_format": "gtest_json" @@ -1924,10 +2176,35 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" - }, - "shards": 4 + "os": "Ubuntu-22.04" + } + }, + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + }, + { + "args": [ + "--test-launcher-summary-output=${ISOLATED_OUTDIR}/gtest_output.json", + "--test-arg=--gtest_output=json:/custom_artifacts/gtest_output.json", + "--test-arg=--undefok=test_launcher_summary_output", + "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" + ], + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "svc_tests", + "resultdb": { + "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", + "result_format": "gtest_json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-22.04" + }, + "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { @@ -1937,7 +2214,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -1949,9 +2225,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { @@ -1961,7 +2238,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -1973,10 +2249,11 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { @@ -1986,7 +2263,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -1998,9 +2274,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { @@ -2010,7 +2287,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2022,9 +2298,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -2032,7 +2309,6 @@ "Linux (more configs)": { "isolated_scripts": [ { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2043,10 +2319,11 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" } ] @@ -2054,7 +2331,6 @@ "Linux Asan": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2065,13 +2341,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2082,13 +2358,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2099,13 +2375,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2116,13 +2392,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2133,14 +2409,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2151,14 +2427,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2169,14 +2445,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2187,13 +2463,30 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2204,13 +2497,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2221,13 +2514,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2238,14 +2531,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "shared_screencast_stream_test", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2256,13 +2549,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "shared_screencast_stream_test", "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2273,13 +2566,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2290,14 +2583,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2308,13 +2601,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2325,13 +2618,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2342,13 +2635,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2359,14 +2652,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2377,13 +2670,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2394,9 +2687,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -2404,7 +2698,6 @@ "Linux MSan": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2418,10 +2711,10 @@ "os": "Ubuntu-20.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2435,10 +2728,10 @@ "os": "Ubuntu-20.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2452,10 +2745,10 @@ "os": "Ubuntu-20.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2469,10 +2762,10 @@ "os": "Ubuntu-20.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2487,10 +2780,10 @@ }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2505,10 +2798,10 @@ }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2523,10 +2816,10 @@ }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2540,10 +2833,27 @@ "os": "Ubuntu-20.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2557,10 +2867,10 @@ "os": "Ubuntu-20.04" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2574,10 +2884,10 @@ "os": "Ubuntu-20.04" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2592,10 +2902,10 @@ }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2609,10 +2919,10 @@ "os": "Ubuntu-20.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2627,10 +2937,10 @@ }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2644,10 +2954,10 @@ "os": "Ubuntu-20.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2661,10 +2971,10 @@ "os": "Ubuntu-20.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2678,10 +2988,10 @@ "os": "Ubuntu-20.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2696,10 +3006,10 @@ }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2713,10 +3023,10 @@ "os": "Ubuntu-20.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2730,6 +3040,7 @@ "os": "Ubuntu-20.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -2737,7 +3048,6 @@ "Linux Tsan v2": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2748,13 +3058,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2765,13 +3075,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2782,13 +3092,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2799,13 +3109,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2816,14 +3126,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2834,14 +3144,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2852,14 +3162,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2870,13 +3180,30 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2887,13 +3214,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2904,13 +3231,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2921,14 +3248,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2939,13 +3266,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2956,14 +3283,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2974,13 +3301,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2991,13 +3318,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3008,13 +3335,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3025,14 +3352,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3043,13 +3370,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3060,9 +3387,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -3070,7 +3398,6 @@ "Linux UBSan": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3081,13 +3408,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3098,13 +3425,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3115,13 +3442,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3132,13 +3459,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3149,14 +3476,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3167,14 +3494,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3185,14 +3512,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3203,13 +3530,30 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3220,13 +3564,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3237,13 +3581,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3254,14 +3598,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "shared_screencast_stream_test", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3272,13 +3616,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "shared_screencast_stream_test", "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3289,13 +3633,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3306,14 +3650,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3324,13 +3668,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3341,13 +3685,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3358,13 +3702,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3375,14 +3719,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3393,13 +3737,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3410,9 +3754,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -3420,7 +3765,6 @@ "Linux UBSan vptr": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3431,13 +3775,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3448,13 +3792,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3465,13 +3809,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3482,13 +3826,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3499,14 +3843,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3517,14 +3861,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3535,14 +3879,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3553,65 +3897,82 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests", + "name": "rtc_p2p_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_stats_unittests", + "name": "rtc_pc_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_unittests", + "name": "rtc_stats_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" - }, - "shards": 6 + "os": "Ubuntu-20.04" + } }, - "test_id_prefix": "ninja://:rtc_unittests/" + "test": "rtc_stats_unittests", + "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + }, + "shards": 6 + }, + "test": "rtc_unittests", + "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "shared_screencast_stream_test", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3622,13 +3983,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "shared_screencast_stream_test", "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3639,13 +4000,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3656,14 +4017,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3674,13 +4035,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3691,13 +4052,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3708,13 +4069,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3725,14 +4086,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3743,13 +4104,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3760,9 +4121,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -3770,7 +4132,6 @@ "Linux32 Debug": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3781,13 +4142,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3798,13 +4159,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3815,13 +4176,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3832,13 +4193,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3849,14 +4210,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3867,14 +4228,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3885,14 +4246,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3903,13 +4264,30 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-22.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3920,13 +4298,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3937,13 +4315,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3954,14 +4332,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3972,13 +4350,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -3989,14 +4367,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4007,13 +4385,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4024,13 +4402,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4041,13 +4419,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4058,14 +4436,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4076,13 +4454,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4093,9 +4471,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -4104,7 +4483,6 @@ "Linux32 Release": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4115,13 +4493,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4132,13 +4510,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4149,13 +4527,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4166,13 +4544,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4183,14 +4561,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4201,14 +4579,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4219,14 +4597,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4237,13 +4615,30 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-22.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4254,13 +4649,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4271,13 +4666,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4288,14 +4683,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4306,13 +4701,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4323,14 +4718,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4341,13 +4736,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4358,13 +4753,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4375,13 +4770,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4392,14 +4787,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4410,13 +4805,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4427,9 +4822,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -4439,7 +4835,6 @@ "Linux64 Debug": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4450,13 +4845,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4467,13 +4862,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4484,13 +4879,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4501,13 +4896,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4518,14 +4913,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4536,14 +4931,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4554,14 +4949,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4572,13 +4967,30 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-22.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4589,13 +5001,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4606,13 +5018,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4623,14 +5035,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "shared_screencast_stream_test", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4641,13 +5053,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "shared_screencast_stream_test", "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4658,13 +5070,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4675,14 +5087,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4693,13 +5105,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4710,13 +5122,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4727,13 +5139,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4744,14 +5156,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4762,13 +5174,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4779,9 +5191,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -4790,7 +5203,6 @@ "Linux64 Release": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4801,13 +5213,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4818,13 +5230,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4835,13 +5247,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4852,13 +5264,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4869,14 +5281,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4887,14 +5299,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4905,14 +5317,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4923,13 +5335,30 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-22.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4940,13 +5369,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4957,13 +5386,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4974,14 +5403,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "shared_screencast_stream_test", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -4992,13 +5421,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "shared_screencast_stream_test", "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5009,13 +5438,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5026,14 +5455,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5044,13 +5473,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5061,13 +5490,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5078,31 +5507,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_capture_tests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_capture_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "dimensions": { - "cpu": "x86-64", - "os": "Ubuntu-18.04", - "pool": "WebRTC-baremetal" - } - }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" - }, - { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5113,14 +5524,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5131,13 +5542,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5148,9 +5559,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -5160,7 +5572,6 @@ "Mac Asan": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5172,13 +5583,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5190,13 +5601,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5208,13 +5619,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5226,13 +5637,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5244,14 +5655,14 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5263,14 +5674,14 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5282,14 +5693,14 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5301,13 +5712,31 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cores": "12", + "cpu": "x86-64", + "os": "Mac-14" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5319,13 +5748,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5337,13 +5766,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5355,14 +5784,14 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5374,13 +5803,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5392,14 +5821,14 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5411,13 +5840,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5429,13 +5858,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5447,13 +5876,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5465,14 +5894,14 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5484,13 +5913,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5502,9 +5931,10 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -5513,7 +5943,6 @@ "Mac64 Debug": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5525,13 +5954,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5543,13 +5972,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5561,13 +5990,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5579,13 +6008,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5597,14 +6026,14 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5616,14 +6045,14 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5635,14 +6064,14 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5654,13 +6083,31 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cores": "12", + "cpu": "x86-64", + "os": "Mac-14" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5672,13 +6119,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5690,13 +6137,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5708,14 +6155,14 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5727,13 +6174,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5745,14 +6192,14 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5764,13 +6211,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5782,13 +6229,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5800,13 +6247,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5818,14 +6265,14 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5837,13 +6284,13 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5855,9 +6302,10 @@ "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -5865,7 +6313,6 @@ "Mac64 Release": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5876,13 +6323,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5893,13 +6340,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5910,13 +6357,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5927,13 +6374,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5944,14 +6391,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5962,14 +6409,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5980,14 +6427,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5998,47 +6445,64 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests", + "name": "rtc_p2p_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_stats_unittests", + "name": "rtc_pc_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/" - }, + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_stats_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Mac-14" + } + }, + "test": "rtc_stats_unittests", + "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6049,14 +6513,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6067,13 +6531,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6084,14 +6548,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6102,13 +6566,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6119,13 +6583,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6136,31 +6600,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_capture_tests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_capture_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "dimensions": { - "cpu": "x86-64", - "os": "Mac-12", - "pool": "WebRTC-baremetal" - } - }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" - }, - { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6171,14 +6617,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6189,13 +6635,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6206,9 +6652,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -6216,7 +6663,6 @@ "MacARM64 M1 Release": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6227,13 +6673,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6244,13 +6690,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6261,13 +6707,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6278,13 +6724,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6295,14 +6741,14 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6313,14 +6759,14 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6331,14 +6777,14 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6349,13 +6795,30 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6366,13 +6829,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6383,13 +6846,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6400,14 +6863,14 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6418,13 +6881,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6435,14 +6898,14 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6453,13 +6916,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6470,13 +6933,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6487,13 +6950,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6504,14 +6967,14 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6522,13 +6985,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6539,9 +7002,10 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -6550,7 +7014,6 @@ "Win (more configs)": { "isolated_scripts": [ { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6565,6 +7028,7 @@ }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" } ] @@ -6574,7 +7038,6 @@ "Win32 Release (Clang)": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6588,10 +7051,10 @@ "os": "Windows-10-19045" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6605,10 +7068,10 @@ "os": "Windows-10-19045" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6622,10 +7085,10 @@ "os": "Windows-10-19045" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6639,10 +7102,10 @@ "os": "Windows-10-19045" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6657,10 +7120,10 @@ }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6675,10 +7138,10 @@ }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6693,10 +7156,10 @@ }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6710,10 +7173,27 @@ "os": "Windows-10-19045" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Windows-10-19045" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6727,10 +7207,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6744,10 +7224,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6762,10 +7242,10 @@ }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6779,10 +7259,10 @@ "os": "Windows-10-19045" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6797,10 +7277,10 @@ }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6814,10 +7294,10 @@ "os": "Windows-10-19045" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6831,10 +7311,10 @@ "os": "Windows-10-19045" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6848,10 +7328,10 @@ "os": "Windows-10-19045" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6866,10 +7346,10 @@ }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6883,10 +7363,10 @@ "os": "Windows-10-19045" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6900,6 +7380,7 @@ "os": "Windows-10-19045" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -6907,7 +7388,6 @@ "Win64 ASan": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6921,10 +7401,10 @@ "os": "Windows-10-19045" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6938,10 +7418,10 @@ "os": "Windows-10-19045" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6955,10 +7435,10 @@ "os": "Windows-10-19045" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6972,10 +7452,10 @@ "os": "Windows-10-19045" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6990,10 +7470,10 @@ }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7008,10 +7488,10 @@ }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7026,10 +7506,10 @@ }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7043,10 +7523,27 @@ "os": "Windows-10-19045" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Windows-10-19045" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7060,10 +7557,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7077,10 +7574,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7095,10 +7592,10 @@ }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7112,10 +7609,10 @@ "os": "Windows-10-19045" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7130,10 +7627,10 @@ }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7147,10 +7644,10 @@ "os": "Windows-10-19045" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7164,10 +7661,10 @@ "os": "Windows-10-19045" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7181,10 +7678,10 @@ "os": "Windows-10-19045" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7199,10 +7696,10 @@ }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7216,10 +7713,10 @@ "os": "Windows-10-19045" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7233,6 +7730,7 @@ "os": "Windows-10-19045" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -7240,7 +7738,6 @@ "Win64 Debug (Clang)": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7254,10 +7751,10 @@ "os": "Windows-10-19045" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7271,10 +7768,10 @@ "os": "Windows-10-19045" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7288,10 +7785,10 @@ "os": "Windows-10-19045" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7305,10 +7802,10 @@ "os": "Windows-10-19045" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7323,10 +7820,10 @@ }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7341,10 +7838,10 @@ }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7359,10 +7856,10 @@ }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7376,10 +7873,27 @@ "os": "Windows-10-19045" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Windows-10-19045" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7393,10 +7907,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7410,10 +7924,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7428,10 +7942,10 @@ }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7445,10 +7959,10 @@ "os": "Windows-10-19045" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7463,10 +7977,10 @@ }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7480,10 +7994,10 @@ "os": "Windows-10-19045" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7497,10 +8011,10 @@ "os": "Windows-10-19045" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7514,10 +8028,10 @@ "os": "Windows-10-19045" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7532,10 +8046,10 @@ }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7549,10 +8063,10 @@ "os": "Windows-10-19045" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7566,6 +8080,7 @@ "os": "Windows-10-19045" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -7573,7 +8088,6 @@ "Win64 Release (Clang)": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7587,10 +8101,10 @@ "os": "Windows-10-19045" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7604,10 +8118,10 @@ "os": "Windows-10-19045" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7621,10 +8135,10 @@ "os": "Windows-10-19045" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7638,10 +8152,10 @@ "os": "Windows-10-19045" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7656,10 +8170,10 @@ }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7674,10 +8188,10 @@ }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7692,10 +8206,10 @@ }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7709,10 +8223,27 @@ "os": "Windows-10-19045" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Windows-10-19045" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7726,10 +8257,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7743,10 +8274,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7761,10 +8292,10 @@ }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7778,10 +8309,10 @@ "os": "Windows-10-19045" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7796,10 +8327,10 @@ }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7813,10 +8344,10 @@ "os": "Windows-10-19045" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7830,10 +8361,10 @@ "os": "Windows-10-19045" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7847,28 +8378,10 @@ "os": "Windows-10-19045" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_capture_tests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_capture_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "dimensions": { - "cpu": "x86-64", - "os": "Windows-10-19045", - "pool": "WebRTC-baremetal" - } - }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" - }, - { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7883,10 +8396,10 @@ }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7900,10 +8413,10 @@ "os": "Windows-10-19045" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7917,6 +8430,7 @@ "os": "Windows-10-19045" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -7925,22 +8439,21 @@ "isolated_scripts": [ { "args": [ + "--xcodebuild-sim-runner", "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "apprtcmobile_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "apprtcmobile_tests iPhone X 14.5", + "name": "apprtcmobile_tests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -7950,46 +8463,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "apprtcmobile_tests", "test_id_prefix": "ninja://examples:apprtcmobile_tests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "apprtcmobile_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "apprtcmobile_tests iPhone X 15.5", + "name": "apprtcmobile_tests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -7999,46 +8512,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "apprtcmobile_tests", "test_id_prefix": "ninja://examples:apprtcmobile_tests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "apprtcmobile_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "apprtcmobile_tests iPhone X 16.2", + "name": "apprtcmobile_tests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8048,45 +8561,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "apprtcmobile_tests", "test_id_prefix": "ninja://examples:apprtcmobile_tests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "audio_decoder_unittests iPhone X 14.5", + "name": "audio_decoder_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8096,45 +8609,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "audio_decoder_unittests iPhone X 15.5", + "name": "audio_decoder_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8144,45 +8657,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "audio_decoder_unittests iPhone X 16.2", + "name": "audio_decoder_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8192,45 +8705,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "common_audio_unittests iPhone X 14.5", + "name": "common_audio_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8240,45 +8753,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "common_audio_unittests iPhone X 15.5", + "name": "common_audio_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8288,45 +8801,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "common_audio_unittests iPhone X 16.2", + "name": "common_audio_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8336,45 +8849,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "common_video_unittests iPhone X 14.5", + "name": "common_video_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8384,45 +8897,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "common_video_unittests iPhone X 15.5", + "name": "common_video_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8432,45 +8945,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "common_video_unittests iPhone X 16.2", + "name": "common_video_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8480,45 +8993,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "dcsctp_unittests iPhone X 14.5", + "name": "dcsctp_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8528,45 +9041,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "dcsctp_unittests iPhone X 15.5", + "name": "dcsctp_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8576,45 +9089,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "dcsctp_unittests iPhone X 16.2", + "name": "dcsctp_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8624,45 +9137,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "modules_tests iPhone X 14.5", + "name": "modules_tests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8672,46 +9185,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "modules_tests iPhone X 15.5", + "name": "modules_tests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8721,46 +9234,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "modules_tests iPhone X 16.2", + "name": "modules_tests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8770,46 +9283,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "modules_unittests iPhone X 14.5", + "name": "modules_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8819,47 +9332,47 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "modules_unittests iPhone X 15.5", + "name": "modules_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8869,47 +9382,47 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "modules_unittests iPhone X 16.2", + "name": "modules_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8919,47 +9432,47 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_media_unittests iPhone X 14.5", + "name": "rtc_media_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -8969,45 +9482,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_media_unittests iPhone X 15.5", + "name": "rtc_media_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9017,45 +9530,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_media_unittests iPhone X 16.2", + "name": "rtc_media_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9065,45 +9578,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests iPhone X 14.5", + "name": "rtc_p2p_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9113,45 +9626,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/", - "variant_id": "iPhone X 14.5" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/", + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests iPhone X 15.5", + "name": "rtc_p2p_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9161,45 +9674,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/", - "variant_id": "iPhone X 15.5" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/", + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests iPhone X 16.2", + "name": "rtc_p2p_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9209,45 +9722,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/", - "variant_id": "iPhone X 16.2" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/", + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_stats_unittests iPhone X 14.5", + "name": "rtc_pc_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9257,45 +9770,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/", - "variant_id": "iPhone X 14.5" + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/", + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_stats_unittests iPhone X 15.5", + "name": "rtc_pc_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9305,45 +9818,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/", - "variant_id": "iPhone X 15.5" + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/", + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_stats_unittests iPhone X 16.2", + "name": "rtc_pc_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9353,45 +9866,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/", - "variant_id": "iPhone X 16.2" + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/", + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_unittests iPhone X 14.5", + "name": "rtc_stats_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9401,46 +9914,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", - "shards": 6 + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://:rtc_unittests/", - "variant_id": "iPhone X 14.5" + "test": "rtc_stats_unittests", + "test_id_prefix": "ninja://stats:rtc_stats_unittests/", + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_unittests iPhone X 15.5", + "name": "rtc_stats_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9450,46 +9962,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", - "shards": 6 + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://:rtc_unittests/", - "variant_id": "iPhone X 15.5" + "test": "rtc_stats_unittests", + "test_id_prefix": "ninja://stats:rtc_stats_unittests/", + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_unittests iPhone X 16.2", + "name": "rtc_stats_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9499,47 +10010,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", - "shards": 6 + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://:rtc_unittests/", - "variant_id": "iPhone X 16.2" + "test": "rtc_stats_unittests", + "test_id_prefix": "ninja://stats:rtc_stats_unittests/", + "variant_id": "iPhone X 16.4" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "sdk_framework_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "sdk_framework_unittests iPhone X 14.5", + "name": "sdk_framework_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9549,46 +10059,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "sdk_framework_unittests", "test_id_prefix": "ninja://sdk:sdk_framework_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "sdk_framework_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "sdk_framework_unittests iPhone X 15.5", + "name": "sdk_framework_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9598,46 +10108,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "sdk_framework_unittests", "test_id_prefix": "ninja://sdk:sdk_framework_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "sdk_framework_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "sdk_framework_unittests iPhone X 16.2", + "name": "sdk_framework_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9647,46 +10157,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "sdk_framework_unittests", "test_id_prefix": "ninja://sdk:sdk_framework_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "sdk_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "sdk_unittests iPhone X 14.5", + "name": "sdk_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9696,46 +10206,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "sdk_unittests", "test_id_prefix": "ninja://sdk:sdk_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "sdk_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "sdk_unittests iPhone X 15.5", + "name": "sdk_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9745,46 +10255,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "sdk_unittests", "test_id_prefix": "ninja://sdk:sdk_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "sdk_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "sdk_unittests iPhone X 16.2", + "name": "sdk_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9794,45 +10304,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "sdk_unittests", "test_id_prefix": "ninja://sdk:sdk_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "svc_tests iPhone X 14.5", + "name": "svc_tests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9842,47 +10352,47 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "svc_tests iPhone X 15.5", + "name": "svc_tests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9892,47 +10402,47 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "svc_tests iPhone X 16.2", + "name": "svc_tests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9942,47 +10452,47 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "system_wrappers_unittests iPhone X 14.5", + "name": "system_wrappers_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -9992,45 +10502,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "system_wrappers_unittests iPhone X 15.5", + "name": "system_wrappers_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10040,45 +10550,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "system_wrappers_unittests iPhone X 16.2", + "name": "system_wrappers_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10088,45 +10598,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "test_support_unittests iPhone X 14.5", + "name": "test_support_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10136,45 +10646,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "test_support_unittests iPhone X 15.5", + "name": "test_support_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10184,45 +10694,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "test_support_unittests iPhone X 16.2", + "name": "test_support_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10232,93 +10742,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", - "--out-dir", - "${ISOLATED_OUTDIR}", - "--xctest", + "17.5", "--xcode-build-version", - "13c100" - ], - "isolate_name": "tools_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "tools_unittests iPhone X 14.5", - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "cipd_packages": [ - { - "cipd_package": "infra/tools/mac_toolchain/${platform}", - "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" - } - ], - "dimensions": { - "cpu": "x86-64", - "os": "Mac-12" - }, - "named_caches": [ - { - "name": "xcode_ios_13c100", - "path": "Xcode.app" - }, - { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" - } - ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test_id_prefix": "ninja://rtc_tools:tools_unittests/", - "variant_id": "iPhone X 14.5" - }, - { - "args": [ - "--platform", - "iPhone X", - "--version", - "15.5", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "tools_unittests iPhone X 15.5", + "name": "tools_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10328,45 +10790,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "16.2", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "tools_unittests iPhone X 16.2", + "name": "tools_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10376,93 +10838,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "14.5", - "--out-dir", - "${ISOLATED_OUTDIR}", - "--xctest", + "16.4", "--xcode-build-version", - "13c100" - ], - "isolate_name": "video_capture_tests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_capture_tests iPhone X 14.5", - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "cipd_packages": [ - { - "cipd_package": "infra/tools/mac_toolchain/${platform}", - "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" - } - ], - "dimensions": { - "cpu": "x86-64", - "os": "Mac-12" - }, - "named_caches": [ - { - "name": "xcode_ios_13c100", - "path": "Xcode.app" - }, - { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" - } - ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/", - "variant_id": "iPhone X 14.5" - }, - { - "args": [ - "--platform", - "iPhone X", - "--version", - "15.5", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "video_capture_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "video_capture_tests iPhone X 15.5", + "name": "tools_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10472,93 +10886,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/", - "variant_id": "iPhone X 15.5" + "test": "tools_unittests", + "test_id_prefix": "ninja://rtc_tools:tools_unittests/", + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "16.2", - "--out-dir", - "${ISOLATED_OUTDIR}", - "--xctest", + "17.5", "--xcode-build-version", - "14c18" - ], - "isolate_name": "video_capture_tests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_capture_tests iPhone X 16.2", - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "cipd_packages": [ - { - "cipd_package": "infra/tools/mac_toolchain/${platform}", - "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" - } - ], - "dimensions": { - "cpu": "x86-64", - "os": "Mac-12" - }, - "named_caches": [ - { - "name": "xcode_ios_14c18", - "path": "Xcode.app" - }, - { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" - } - ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/", - "variant_id": "iPhone X 16.2" - }, - { - "args": [ - "--platform", - "iPhone X", - "--version", - "14.5", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "video_engine_tests iPhone X 14.5", + "name": "video_engine_tests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10568,46 +10934,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "video_engine_tests iPhone X 15.5", + "name": "video_engine_tests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10617,46 +10983,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "video_engine_tests iPhone X 16.2", + "name": "video_engine_tests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10666,46 +11032,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "voip_unittests iPhone X 14.5", + "name": "voip_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10715,45 +11081,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "voip_unittests iPhone X 15.5", + "name": "voip_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10763,45 +11129,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "voip_unittests iPhone X 16.2", + "name": "voip_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10811,45 +11177,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "webrtc_nonparallel_tests iPhone X 14.5", + "name": "webrtc_nonparallel_tests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10859,45 +11225,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "webrtc_nonparallel_tests iPhone X 15.5", + "name": "webrtc_nonparallel_tests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10907,45 +11273,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "webrtc_nonparallel_tests iPhone X 16.2", + "name": "webrtc_nonparallel_tests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -10955,27 +11321,28 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" } ] }, diff --git a/infra/specs/client.webrtc.perf.json b/infra/specs/client.webrtc.perf.json index 80a1bc21b7..6015ac0407 100644 --- a/infra/specs/client.webrtc.perf.json +++ b/infra/specs/client.webrtc.perf.json @@ -14,6 +14,7 @@ ], "script": "//tools_webrtc/perf/process_perf_results.py" }, + "name": "video_codec_perf_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -37,8 +38,8 @@ }, { "args": [ - "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb", - "--nologs" + "--nologs", + "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb" ], "merge": { "args": [ @@ -47,6 +48,7 @@ ], "script": "//tools_webrtc/perf/process_perf_results.py" }, + "name": "webrtc_perf_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -83,6 +85,7 @@ ], "script": "//tools_webrtc/perf/process_perf_results.py" }, + "name": "video_codec_perf_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -106,8 +109,8 @@ }, { "args": [ - "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb", - "--nologs" + "--nologs", + "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb" ], "merge": { "args": [ @@ -116,6 +119,7 @@ ], "script": "//tools_webrtc/perf/process_perf_results.py" }, + "name": "webrtc_perf_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -149,7 +153,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb" ], - "isolate_name": "video_codec_perf_tests", "merge": { "args": [ "--test-suite", @@ -166,7 +169,7 @@ "dimensions": { "cpu": "x86-64", "gce": "1", - "os": "Ubuntu-18.04", + "os": "Ubuntu-22.04", "pool": "WebRTC-perf" }, "expiration": 10800, @@ -174,18 +177,18 @@ "idempotent": false, "io_timeout": 10800 }, + "test": "video_codec_perf_tests", "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" }, { "args": [ + "--nologs", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json", "--test-launcher-summary-output=${ISOLATED_OUTDIR}/gtest_output.json", "--test-arg=--gtest_output=json:/custom_artifacts/gtest_output.json", "--test-arg=--undefok=test_launcher_summary_output", - "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb", - "--nologs" + "--isolated-script-test-perf-output=${ISOLATED_OUTDIR}/perftest-output.pb" ], - "isolate_name": "webrtc_perf_tests", "merge": { "args": [ "--test-suite", @@ -202,7 +205,7 @@ "dimensions": { "cpu": "x86-64", "gce": "1", - "os": "Ubuntu-18.04", + "os": "Ubuntu-22.04", "pool": "WebRTC-perf" }, "expiration": 10800, @@ -210,6 +213,7 @@ "idempotent": false, "io_timeout": 10800 }, + "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" } ] @@ -220,7 +224,6 @@ "args": [ "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "video_codec_perf_tests", "merge": { "args": [ "--test-suite", @@ -244,16 +247,16 @@ "idempotent": false, "io_timeout": 10800 }, + "test": "video_codec_perf_tests", "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" }, { "args": [ "--test_artifacts_dir=${ISOLATED_OUTDIR}", "--save_worst_frame", - "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json", - "--nologs" + "--nologs", + "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "webrtc_perf_tests", "merge": { "args": [ "--test-suite", @@ -277,6 +280,7 @@ "idempotent": false, "io_timeout": 10800 }, + "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" } ] @@ -287,7 +291,6 @@ "args": [ "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "video_codec_perf_tests", "merge": { "args": [ "--test-suite", @@ -312,16 +315,16 @@ "idempotent": false, "io_timeout": 10800 }, + "test": "video_codec_perf_tests", "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" }, { "args": [ "--test_artifacts_dir=${ISOLATED_OUTDIR}", "--save_worst_frame", - "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json", - "--nologs" + "--nologs", + "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "webrtc_perf_tests", "merge": { "args": [ "--test-suite", @@ -346,6 +349,7 @@ "idempotent": false, "io_timeout": 10800 }, + "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" } ] @@ -356,7 +360,6 @@ "args": [ "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "video_codec_perf_tests", "merge": { "args": [ "--test-suite", @@ -381,16 +384,16 @@ "idempotent": false, "io_timeout": 10800 }, + "test": "video_codec_perf_tests", "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" }, { "args": [ "--test_artifacts_dir=${ISOLATED_OUTDIR}", "--save_worst_frame", - "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json", - "--nologs" + "--nologs", + "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "webrtc_perf_tests", "merge": { "args": [ "--test-suite", @@ -415,6 +418,7 @@ "idempotent": false, "io_timeout": 10800 }, + "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" } ] @@ -425,7 +429,6 @@ "args": [ "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "video_codec_perf_tests", "merge": { "args": [ "--test-suite", @@ -449,16 +452,16 @@ "idempotent": false, "io_timeout": 10800 }, + "test": "video_codec_perf_tests", "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" }, { "args": [ "--test_artifacts_dir=${ISOLATED_OUTDIR}", "--save_worst_frame", - "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json", - "--nologs" + "--nologs", + "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "webrtc_perf_tests", "merge": { "args": [ "--test-suite", @@ -482,6 +485,7 @@ "idempotent": false, "io_timeout": 10800 }, + "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" } ] diff --git a/infra/specs/generate_buildbot_json.py b/infra/specs/generate_buildbot_json.py index 43ae366cc8..c45c4079cf 100755 --- a/infra/specs/generate_buildbot_json.py +++ b/infra/specs/generate_buildbot_json.py @@ -17,8 +17,9 @@ _SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) _SRC_DIR = os.path.dirname(os.path.dirname(_SCRIPT_DIR)) +_TESTING_BBOT_DIR = os.path.join(_SRC_DIR, 'testing', 'buildbot') sys.path.insert(0, _SRC_DIR) -sys.path.insert(0, os.path.join(_SRC_DIR, 'testing', 'buildbot')) +sys.path.insert(0, _TESTING_BBOT_DIR) from testing.buildbot import generate_buildbot_json @@ -43,53 +44,54 @@ def generate_mixins_file_from_used_mixins(generator): - chromium_args = generate_buildbot_json.BBJSONGenerator.parse_args(argv=None) - chromium_generator = generate_buildbot_json.BBJSONGenerator(chromium_args) - chromium_generator.load_configuration_files() - - seen_mixins = set() - for waterfall in generator.waterfalls: - seen_mixins = seen_mixins.union(waterfall.get('mixins', set())) - for bot_name, tester in waterfall['machines'].items(): - del bot_name - seen_mixins = seen_mixins.union(tester.get('mixins', set())) - for suite in generator.test_suites.values(): - for test in suite.values(): - if isinstance(test, list): - # This is for mixins defined in variants.pyl. - for variant in test: - seen_mixins = seen_mixins.union(variant.get('mixins', set())) - else: - seen_mixins = seen_mixins.union(test.get('mixins', set())) - - found_mixins = ast.literal_eval(open(WEBRTC_MIXIN_FILE_NAME).read()) - for mixin in seen_mixins: - if mixin not in found_mixins: - found_mixins[mixin] = chromium_generator.mixins[mixin] - elif mixin in chromium_generator.mixins: - assert False, '"%s" is already defined in Chromium\'s mixins.pyl' % mixin - - format_data = { - 'script_name': os.path.basename(__file__), - 'data_source': 'mixins_webrtc.pyl and Chromium\'s mixins.pyl', - 'mixin_data': dict(sorted(found_mixins.items())), - } - with open(MIXIN_FILE_NAME, 'w') as f: - f.write(MIXINS_PYL_TEMPLATE.format(**format_data)) - - return subprocess.call(['yapf', '-i', MIXIN_FILE_NAME]) + chromium_mixins = generator.load_pyl_file( + os.path.join(_TESTING_BBOT_DIR, 'mixins.pyl')) + seen_mixins = set() + for waterfall in generator.waterfalls: + seen_mixins = seen_mixins.union(waterfall.get('mixins', set())) + for bot_name, tester in waterfall['machines'].items(): + del bot_name + seen_mixins = seen_mixins.union(tester.get('mixins', set())) + for suite in generator.test_suites.values(): + for test in suite.values(): + if isinstance(test, list): + # This is for mixins defined in variants.pyl. + for variant in test: + seen_mixins = seen_mixins.union( + variant.get('mixins', set())) + else: + seen_mixins = seen_mixins.union(test.get('mixins', set())) + + found_mixins = ast.literal_eval(open(WEBRTC_MIXIN_FILE_NAME).read()) + for mixin in seen_mixins: + if mixin not in found_mixins: + found_mixins[mixin] = chromium_mixins[mixin] + elif mixin in chromium_mixins: + assert False, ( + '"%s" is already defined in Chromium\'s mixins.pyl' % mixin) + + format_data = { + 'script_name': os.path.basename(__file__), + 'data_source': 'mixins_webrtc.pyl and Chromium\'s mixins.pyl', + 'mixin_data': dict(sorted(found_mixins.items())), + } + with open(MIXIN_FILE_NAME, 'w') as f: + f.write(MIXINS_PYL_TEMPLATE.format(**format_data)) + + return subprocess.call(['yapf', '-i', MIXIN_FILE_NAME]) def main(): - override_args = ['--pyl-files-dir', _SCRIPT_DIR] - webrtc_args = generate_buildbot_json.BBJSONGenerator.parse_args(override_args) - webrtc_generator = generate_buildbot_json.BBJSONGenerator(webrtc_args) - webrtc_generator.load_configuration_files() - webrtc_generator.resolve_configuration_files() + override_args = ['--pyl-files-dir', _SCRIPT_DIR] + webrtc_args = generate_buildbot_json.BBJSONGenerator.parse_args( + override_args) + webrtc_generator = generate_buildbot_json.BBJSONGenerator(webrtc_args) + webrtc_generator.load_configuration_files() + webrtc_generator.resolve_configuration_files() - generate_mixins_file_from_used_mixins(webrtc_generator) - return webrtc_generator.main() + generate_mixins_file_from_used_mixins(webrtc_generator) + return webrtc_generator.main() if __name__ == '__main__': # pragma: no cover - sys.exit(main()) + sys.exit(main()) diff --git a/infra/specs/gn_isolate_map.pyl b/infra/specs/gn_isolate_map.pyl index 7e31965b4e..20de759b01 100644 --- a/infra/specs/gn_isolate_map.pyl +++ b/infra/specs/gn_isolate_map.pyl @@ -25,15 +25,15 @@ }, "android_junit_tests": { "label": "//:android_junit_tests", - "type": "junit_test", + "type": "raw", }, "android_examples_junit_tests": { "label": "//examples:android_examples_junit_tests", - "type": "junit_test", + "type": "raw", }, "android_sdk_junit_tests": { "label": "//sdk/android:android_sdk_junit_tests", - "type": "junit_test", + "type": "raw", }, "apprtcmobile_tests": { "label": "//examples:apprtcmobile_tests", @@ -84,6 +84,10 @@ "label": "//media:rtc_media_unittests", "type": "console_test_launcher", }, + "rtc_p2p_unittests": { + "label": "//:rtc_p2p_unittests", + "type": "console_test_launcher", + }, "rtc_pc_unittests": { "label": "//pc:rtc_pc_unittests", "type": "console_test_launcher", @@ -126,9 +130,7 @@ }, "video_capture_tests": { "label": "//modules/video_capture:video_capture_tests", - "type": "non_parallel_console_test_launcher", - # TODO(bugs.webrtc.org/9292): remove use_webcam and the ensure script. - "use_webcam": True, + "type": "console_test_launcher", }, "video_codec_perf_tests": { "label": "//modules/video_coding:video_codec_perf_tests", diff --git a/infra/specs/internal.client.webrtc.json b/infra/specs/internal.client.webrtc.json index eff9fe0704..87ae7fc306 100644 --- a/infra/specs/internal.client.webrtc.json +++ b/infra/specs/internal.client.webrtc.json @@ -7,11 +7,10 @@ "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -25,32 +24,32 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -64,21 +63,22 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { @@ -86,11 +86,10 @@ "--readline-timeout=1200", "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -104,35 +103,35 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "hard_timeout": 7200, "io_timeout": 7200, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -146,37 +145,37 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests", + "name": "rtc_p2p_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -186,36 +185,36 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_stats_unittests", + "name": "rtc_pc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -225,36 +224,36 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "system_wrappers_unittests", + "name": "rtc_stats_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -264,36 +263,36 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" + "test": "rtc_stats_unittests", + "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "test_support_unittests", + "name": "system_wrappers_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -303,36 +302,36 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://test:test_support_unittests/" + "test": "system_wrappers_unittests", + "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "tools_unittests", + "name": "test_support_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -342,36 +341,36 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://rtc_tools:tools_unittests/" + "test": "test_support_unittests", + "test_id_prefix": "ninja://test:test_support_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "video_capture_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "video_capture_tests", + "name": "tools_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -381,32 +380,32 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" + "test": "tools_unittests", + "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -420,22 +419,23 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" } ] @@ -444,15 +444,14 @@ "isolated_scripts": [ { "args": [ + "--nologs", "--write_perf_output_on_ios", "--xctest", "--xcode-build-version", - "13c100", + "15f31d", "--out-dir", - "${ISOLATED_OUTDIR}", - "--nologs" + "${ISOLATED_OUTDIR}" ], - "isolate_name": "webrtc_perf_tests", "merge": { "args": [ "--test-suite", @@ -470,12 +469,12 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "id": "mac-254-e504", - "os": "iOS-12.4.1", + "cpu": "arm64", + "os": "iOS-17.6.1", "pool": "WebRTC" }, "hard_timeout": 10800, @@ -483,12 +482,13 @@ "io_timeout": 10800, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" } ] @@ -499,11 +499,10 @@ "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -517,32 +516,32 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -556,21 +555,22 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { @@ -578,11 +578,10 @@ "--readline-timeout=1200", "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -596,35 +595,35 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "hard_timeout": 7200, "io_timeout": 7200, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -638,37 +637,37 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests", + "name": "rtc_p2p_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -678,36 +677,36 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_stats_unittests", + "name": "rtc_pc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -717,36 +716,36 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "system_wrappers_unittests", + "name": "rtc_stats_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -756,36 +755,36 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" + "test": "rtc_stats_unittests", + "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "test_support_unittests", + "name": "system_wrappers_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -795,36 +794,36 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://test:test_support_unittests/" + "test": "system_wrappers_unittests", + "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "tools_unittests", + "name": "test_support_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -834,36 +833,36 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://rtc_tools:tools_unittests/" + "test": "test_support_unittests", + "test_id_prefix": "ninja://test:test_support_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "video_capture_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "video_capture_tests", + "name": "tools_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -873,32 +872,32 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" + "test": "tools_unittests", + "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { "args": [ "--xctest", "--xcode-build-version", - "15a5229h", + "15f31d", "--out-dir", "${ISOLATED_OUTDIR}" ], - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -912,22 +911,23 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { - "os": "iOS-16.6", + "os": "iOS-18", "pool": "chrome.tests" }, "named_caches": [ { - "name": "xcode_ios_15a5229h", + "name": "xcode_ios_15f31d", "path": "Xcode.app" } ], "service_account": "chrome-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" } ] diff --git a/infra/specs/mixins.pyl b/infra/specs/mixins.pyl index 055fbeacda..3bba887503 100644 --- a/infra/specs/mixins.pyl +++ b/infra/specs/mixins.pyl @@ -18,17 +18,10 @@ } } }, - 'baremetal-pool': { + 'arm64': { 'swarming': { 'dimensions': { - 'pool': 'WebRTC-baremetal' - } - } - }, - 'baremetal-try-pool': { - 'swarming': { - 'dimensions': { - 'pool': 'WebRTC-baremetal-try' + 'cpu': 'arm64' } } }, @@ -39,6 +32,7 @@ } }, 'chromium-tester-service-account': { + 'fail_if_unused': False, 'swarming': { 'service_account': 'chromium-tester@chops-service-accounts.iam.gserviceaccount.com' @@ -51,6 +45,14 @@ } } }, + 'crosshatch': { + 'swarming': { + 'dimensions': { + 'device_type': 'crosshatch', + 'os': 'Android' + } + } + }, 'fuchsia-gtest-output': { 'args': [ '--test-launcher-summary-output=${ISOLATED_OUTDIR}/gtest_output.json', @@ -64,10 +66,10 @@ 'has_native_resultdb_integration': True } }, - 'ios-device-16.6': { + 'ios-device-18': { 'swarming': { 'dimensions': { - 'os': 'iOS-16.6', + 'os': 'iOS-18', 'pool': 'chrome.tests' } } @@ -76,33 +78,32 @@ 'swarming': { 'idempotent': False, 'dimensions': { - 'os': 'iOS-12.4.1', - 'pool': 'WebRTC', - 'id': 'mac-254-e504' + 'os': 'iOS-17.6.1', + 'pool': 'WebRTC' } } }, - 'ios_runtime_cache_14_5': { + 'ios_runtime_cache_16_4': { 'swarming': { 'named_caches': [{ - 'name': 'runtime_ios_14_5', - 'path': 'Runtime-ios-14.5' + 'name': 'runtime_ios_16_4', + 'path': 'Runtime-ios-16.4' }] } }, - 'ios_runtime_cache_15_5': { + 'ios_runtime_cache_17_5': { 'swarming': { 'named_caches': [{ - 'name': 'runtime_ios_15_5', - 'path': 'Runtime-ios-15.5' + 'name': 'runtime_ios_17_5', + 'path': 'Runtime-ios-17.5' }] } }, - 'ios_runtime_cache_16_2': { + 'ios_runtime_cache_18_0': { 'swarming': { 'named_caches': [{ - 'name': 'runtime_ios_16_2', - 'path': 'Runtime-ios-16.2' + 'name': 'runtime_ios_18_0', + 'path': 'Runtime-ios-18.0' }] } }, @@ -128,6 +129,14 @@ } } }, + 'linux-jammy': { + 'fail_if_unused': False, + 'swarming': { + 'dimensions': { + 'os': 'Ubuntu-22.04' + } + } + }, 'mac-m1-cpu': { 'swarming': { 'dimensions': { @@ -150,11 +159,19 @@ } } }, - 'mac_12_x64': { + 'mac_14_arm64': { + 'swarming': { + 'dimensions': { + 'cpu': 'arm64', + 'os': 'Mac-14' + } + } + }, + 'mac_14_x64': { 'swarming': { 'dimensions': { 'cpu': 'x86-64', - 'os': 'Mac-12' + 'os': 'Mac-14' } } }, @@ -166,7 +183,7 @@ 'location': '.', 'revision': - 'git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb' + 'git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d' }] } }, @@ -210,7 +227,7 @@ 'args': ['--nologs'] }, 'quick-perf-tests': { - 'args': ['--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/', '--nologs'] + 'args': ['--webrtc_quick_perf_test', '--nologs'] }, 'redfin': { 'swarming': { @@ -247,6 +264,11 @@ 'shards': 6 } }, + 'shards-8': { + 'swarming': { + 'shards': 8 + } + }, 'timeout-2h': { 'swarming': { 'hard_timeout': 7200, @@ -284,41 +306,40 @@ } } }, - 'x86-64': { + 'win11': { 'swarming': { 'dimensions': { - 'cpu': 'x86-64' + 'os': 'Windows-11-22000' } } }, - 'xcode_13_main': { - 'args': ['--xcode-build-version', '13c100'], + 'x86-64': { + 'fail_if_unused': False, 'swarming': { - 'named_caches': [{ - 'name': 'xcode_ios_13c100', - 'path': 'Xcode.app' - }] + 'dimensions': { + 'cpu': 'x86-64' + } } }, - 'xcode_14_main': { - 'args': ['--xcode-build-version', '14c18'], + 'xcode_15_main': { + 'args': ['--xcode-build-version', '15f31d'], 'swarming': { 'named_caches': [{ - 'name': 'xcode_ios_14c18', + 'name': 'xcode_ios_15f31d', 'path': 'Xcode.app' }] } }, - 'xcode_15_main': { - 'args': ['--xcode-build-version', '15a5229h'], + 'xcode_16_main': { + 'args': ['--xcode-build-version', '16a5230g'], 'swarming': { 'named_caches': [{ - 'name': 'xcode_ios_15a5229h', + 'name': 'xcode_ios_16a5230g', 'path': 'Xcode.app' }] } }, - 'xcode_parallelization': { - 'args': ['--xcode-parallelization'] + 'xcodebuild_sim_runner': { + 'args': ['--xcodebuild-sim-runner'] } } diff --git a/infra/specs/mixins_webrtc.pyl b/infra/specs/mixins_webrtc.pyl index c6d8e5dc75..c9fcc40948 100644 --- a/infra/specs/mixins_webrtc.pyl +++ b/infra/specs/mixins_webrtc.pyl @@ -14,19 +14,12 @@ }, }, }, - 'baremetal-pool': { + 'arm64': { 'swarming': { 'dimensions': { - 'pool': 'WebRTC-baremetal', - }, - }, - }, - 'baremetal-try-pool': { - 'swarming': { - 'dimensions': { - 'pool': 'WebRTC-baremetal-try', - }, - }, + 'cpu': 'arm64' + } + } }, 'cores-12': { 'swarming': { @@ -35,6 +28,14 @@ } } }, + 'crosshatch': { + 'swarming': { + 'dimensions': { + 'device_type': 'crosshatch', + 'os': 'Android', + }, + }, + }, # Hack to use the test-launcher-summary-output flag + emulator folders for gtest-output # but it's currently the only way to get the file out of the emulator. 'fuchsia-gtest-output': { @@ -44,11 +45,11 @@ '--test-arg=--undefok=test_launcher_summary_output' ], }, - 'ios-device-16.6': { + 'ios-device-18': { 'swarming': { 'dimensions': { - 'os': 'iOS-16.6', - 'pool': 'chrome.tests' + 'os': 'iOS-18', + 'pool': 'chrome.tests', } } }, @@ -56,27 +57,41 @@ 'swarming': { 'idempotent': False, 'dimensions': { - 'os': 'iOS-12.4.1', + 'os': 'iOS-17.6.1', 'pool': 'WebRTC', - 'id': 'mac-254-e504', + #'device_status': 'available' }, }, }, - 'ios_runtime_cache_14_5': { + 'ios_runtime_cache_16_4': { 'swarming': { - 'named_caches': [{ - 'name': 'runtime_ios_14_5', - 'path': 'Runtime-ios-14.5' - }] - } + 'named_caches': [ + { + 'name': 'runtime_ios_16_4', + 'path': 'Runtime-ios-16.4', + }, + ], + }, }, - 'ios_runtime_cache_16_2': { + 'ios_runtime_cache_17_5': { 'swarming': { - 'named_caches': [{ - 'name': 'runtime_ios_16_2', - 'path': 'Runtime-ios-16.2' - }] - } + 'named_caches': [ + { + 'name': 'runtime_ios_17_5', + 'path': 'Runtime-ios-17.5', + }, + ], + }, + }, + 'ios_runtime_cache_18_0': { + 'swarming': { + 'named_caches': [ + { + 'name': 'runtime_ios_18_0', + 'path': 'Runtime-ios-18.0', + }, + ], + }, }, 'limited-capacity': { # Sometimes there are multiple tests that can be run only on one machine. @@ -86,6 +101,13 @@ 'expiration': 10800, }, }, + 'linux-focal': { + 'swarming': { + 'dimensions': { + 'os': 'Ubuntu-20.04' + } + } + }, 'mac-m1-cpu': { 'swarming': { 'dimensions': { @@ -93,6 +115,22 @@ } } }, + 'mac_12_arm64': { + 'swarming': { + 'dimensions': { + 'cpu': 'arm64', + 'os': 'Mac-12' + } + } + }, + 'mac_14_arm64': { + 'swarming': { + 'dimensions': { + 'cpu': 'arm64', + 'os': 'Mac-14' + } + } + }, 'mac11': { 'swarming': { 'dimensions': { @@ -148,7 +186,7 @@ }, 'quick-perf-tests': { 'args': [ - '--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/', + '--webrtc_quick_perf_test', '--nologs', ], }, @@ -189,6 +227,11 @@ 'shards': 6, }, }, + 'shards-8': { + 'swarming': { + 'shards': 8, + }, + }, 'timeout-2h': { 'swarming': { 'hard_timeout': 7200, @@ -201,27 +244,50 @@ 'io_timeout': 10800, }, }, + 'walleye': { + 'swarming': { + 'dimensions': { + 'device_type': 'walleye', + 'os': 'Android' + } + } + }, + 'win11': { + 'swarming': { + 'dimensions': { + 'os': 'Windows-11-22000' + } + } + }, 'webrtc-xctest': { 'args': [ '--xctest', ], }, - 'xcode_13_main': { - 'args': ['--xcode-build-version', '13c100'], + 'xcode_15_main': { + 'args': ['--xcode-build-version', '15f31d'], 'swarming': { 'named_caches': [{ - 'name': 'xcode_ios_13c100', + 'name': 'xcode_ios_15f31d', 'path': 'Xcode.app' }] } }, - 'xcode_14_main': { - 'args': ['--xcode-build-version', '14c18'], + 'xcode_16_main': { + 'args': [ + '--xcode-build-version', + '16a5230g', + ], 'swarming': { - 'named_caches': [{ - 'name': 'xcode_ios_14c18', - 'path': 'Xcode.app' - }] - } + 'named_caches': [ + { + 'name': 'xcode_ios_16a5230g', + 'path': 'Xcode.app', + }, + ], + }, }, + 'xcodebuild_sim_runner': { + 'args': ['--xcodebuild-sim-runner'] + } } diff --git a/infra/specs/test_suites.pyl b/infra/specs/test_suites.pyl index 9430d45a39..180f789d3e 100644 --- a/infra/specs/test_suites.pyl +++ b/infra/specs/test_suites.pyl @@ -35,6 +35,7 @@ 'mixins': ['shards-4'], }, 'rtc_media_unittests': {}, + 'rtc_p2p_unittests': {}, 'rtc_pc_unittests': {}, 'rtc_stats_unittests': {}, 'rtc_unittests': { @@ -42,7 +43,8 @@ }, 'slow_peer_connection_unittests': {}, 'svc_tests': { - 'mixins': ['shards-4'], + 'remove_mixins': ['walleye'], + 'mixins': ['shards-8', 'crosshatch'], }, 'system_wrappers_unittests': {}, 'test_support_unittests': {}, @@ -54,6 +56,9 @@ 'webrtc_nonparallel_tests': {}, }, 'android_tests_tryserver_specific': { + 'video_codec_perf_tests': { + 'mixins': ['shards-2', 'quick-perf-tests'], + }, 'webrtc_perf_tests': { 'mixins': ['quick-perf-tests'], } @@ -73,6 +78,7 @@ 'mixins': ['shards-4'], }, 'rtc_media_unittests': {}, + 'rtc_p2p_unittests': {}, 'rtc_pc_unittests': {}, 'rtc_stats_unittests': {}, 'rtc_unittests': { @@ -95,13 +101,12 @@ 'shared_screencast_stream_test': {}, }, 'desktop_tests_try_server_specific': { - 'video_capture_tests': { - 'mixins': ['baremetal-try-pool'], - }, 'video_codec_perf_tests': { + 'remove_mixins': ['resultdb-json-format'], 'mixins': ['quick-perf-tests', 'resultdb-gtest-json-format'], }, 'webrtc_perf_tests': { + 'remove_mixins': ['resultdb-json-format'], 'mixins': ['quick-perf-tests', 'resultdb-gtest-json-format'], } }, @@ -114,6 +119,7 @@ # TODO(bugs.webrtc.org/14705): Enable when NonGlobalFieldTrialsInstanceDoesNotModifyGlobalString is fixed. # TODO(bugs.webrtc.org/14700): Enable when NetworkTest tests are fixed. # 'rtc_unittests': {}, + 'rtc_p2p_unittests': {}, 'rtc_pc_unittests': {}, 'svc_tests': { 'mixins': ['shards-4'], @@ -147,6 +153,7 @@ 'modules_unittests': { 'mixins': ['shards-6'], }, + 'rtc_p2p_unittests': {}, 'rtc_pc_unittests': {}, 'rtc_stats_unittests': {}, # TODO(bugs.webrtc.org/11362): Real XCTests fail to start on devices. @@ -155,14 +162,13 @@ 'system_wrappers_unittests': {}, 'test_support_unittests': {}, 'tools_unittests': {}, - 'video_capture_tests': {}, 'video_engine_tests': { 'mixins': ['shards-4'], }, }, 'ios_simulator_tests': { 'apprtcmobile_tests': { - 'mixins': ['xcode_parallelization'] + 'mixins': ['xcodebuild_sim_runner'] }, 'audio_decoder_unittests': {}, 'common_audio_unittests': {}, @@ -175,16 +181,18 @@ 'mixins': ['shards-6', 'cores-12'], }, 'rtc_media_unittests': {}, + 'rtc_p2p_unittests': {}, 'rtc_pc_unittests': {}, 'rtc_stats_unittests': {}, - 'rtc_unittests': { - 'mixins': ['shards-6'], - }, + # TODO: b/402029695 - Re-enable these tests. + # 'rtc_unittests': { + # 'mixins': ['shards-6'], + # }, 'sdk_framework_unittests': { - 'mixins': ['xcode_parallelization'] + 'mixins': ['xcodebuild_sim_runner'] }, 'sdk_unittests': { - 'mixins': ['xcode_parallelization'] + 'mixins': ['xcodebuild_sim_runner'] }, 'svc_tests': { 'mixins': ['shards-4', 'cores-12'], @@ -192,7 +200,6 @@ 'system_wrappers_unittests': {}, 'test_support_unittests': {}, 'tools_unittests': {}, - 'video_capture_tests': {}, 'video_engine_tests': { 'mixins': ['shards-4'], }, @@ -231,11 +238,6 @@ ], }, }, - 'video_capture_tests': { - 'video_capture_tests': { - 'mixins': ['baremetal-pool'], - } - }, }, ############################################################################## @@ -250,20 +252,11 @@ 'desktop_tests', 'desktop_tests_try_server_specific', ], - 'desktop_tests_with_video_capture': [ - 'desktop_tests', - 'video_capture_tests', - ], 'linux_desktop_tests_tryserver': [ 'desktop_tests', 'desktop_tests_linux_specific', 'desktop_tests_try_server_specific', ], - 'linux_desktop_tests_with_video_capture': [ - 'desktop_tests', - 'desktop_tests_linux_specific', - 'video_capture_tests', - ], 'linux_tests': [ 'desktop_tests', 'desktop_tests_linux_specific', @@ -277,9 +270,9 @@ 'ios_simulator_tests_matrix': { 'ios_simulator_tests': { 'variants': [ - 'SIM_IPHONE_X_14_5', - 'SIM_IPHONE_X_15_5', - 'SIM_IPHONE_X_16_2', + 'SIM_IPHONE_X_16_4', + 'SIM_IPHONE_14_17_5', + 'SIM_IPHONE_15_18_0', ], }, }, diff --git a/infra/specs/tryserver.webrtc.json b/infra/specs/tryserver.webrtc.json index b89ee5f731..02f0450505 100644 --- a/infra/specs/tryserver.webrtc.json +++ b/infra/specs/tryserver.webrtc.json @@ -7,6 +7,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "AppRTCMobile_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -26,6 +27,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "android_instrumentation_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -45,6 +47,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "audio_decoder_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -64,6 +67,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_audio_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -83,6 +87,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_video_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -102,6 +107,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "dcsctp_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -121,6 +127,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -141,6 +148,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -161,6 +169,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "peerconnection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -181,6 +190,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_media_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -200,6 +210,27 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_p2p_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_gtest_merge.py" + }, + "name": "rtc_pc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -219,6 +250,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_stats_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -238,6 +270,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -258,6 +291,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "slow_peer_connection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -277,6 +311,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "svc_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -284,11 +319,11 @@ "swarming": { "dimensions": { "android_devices": "1", - "device_type": "walleye", + "device_type": "crosshatch", "os": "Android" }, "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", - "shards": 4 + "shards": 8 }, "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" @@ -297,6 +332,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "system_wrappers_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -316,6 +352,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "test_support_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -335,6 +372,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "tools_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -350,10 +388,36 @@ "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, + { + "args": [ + "--webrtc_quick_perf_test", + "--nologs" + ], + "merge": { + "script": "//testing/merge_scripts/standard_gtest_merge.py" + }, + "name": "video_codec_perf_tests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", + "shards": 2 + }, + "test": "video_codec_perf_tests", + "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" + }, { "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "video_engine_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -374,6 +438,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "voip_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -393,6 +458,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "webrtc_nonparallel_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -410,12 +476,13 @@ }, { "args": [ - "--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/", + "--webrtc_quick_perf_test", "--nologs" ], "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "webrtc_perf_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -432,25 +499,47 @@ "test_id_prefix": "ninja://:webrtc_perf_tests/" } ], - "junit_tests": [ + "isolated_scripts": [ { "args": [ "--shards=1" ], + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_sdk_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -462,6 +551,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "AppRTCMobile_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -481,6 +571,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "android_instrumentation_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -500,6 +591,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "audio_decoder_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -519,6 +611,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_audio_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -538,6 +631,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_video_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -557,6 +651,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "dcsctp_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -576,6 +671,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -596,6 +692,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -616,6 +713,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "peerconnection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -636,6 +734,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_media_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -655,6 +754,27 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_p2p_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_gtest_merge.py" + }, + "name": "rtc_pc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -674,6 +794,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_stats_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -693,6 +814,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -713,6 +835,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "slow_peer_connection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -732,6 +855,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "svc_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -739,11 +863,11 @@ "swarming": { "dimensions": { "android_devices": "1", - "device_type": "walleye", + "device_type": "crosshatch", "os": "Android" }, "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", - "shards": 4 + "shards": 8 }, "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" @@ -752,6 +876,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "system_wrappers_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -771,6 +896,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "test_support_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -790,6 +916,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "tools_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -805,10 +932,36 @@ "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, + { + "args": [ + "--webrtc_quick_perf_test", + "--nologs" + ], + "merge": { + "script": "//testing/merge_scripts/standard_gtest_merge.py" + }, + "name": "video_codec_perf_tests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", + "shards": 2 + }, + "test": "video_codec_perf_tests", + "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" + }, { "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "video_engine_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -829,6 +982,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "voip_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -848,6 +1002,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "webrtc_nonparallel_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -865,12 +1020,13 @@ }, { "args": [ - "--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/", + "--webrtc_quick_perf_test", "--nologs" ], "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "webrtc_perf_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -887,25 +1043,47 @@ "test_id_prefix": "ninja://:webrtc_perf_tests/" } ], - "junit_tests": [ + "isolated_scripts": [ { "args": [ "--shards=1" ], + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_sdk_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -917,6 +1095,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "AppRTCMobile_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -936,6 +1115,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "android_instrumentation_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -955,6 +1135,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "audio_decoder_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -974,6 +1155,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_audio_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -993,6 +1175,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_video_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1012,6 +1195,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "dcsctp_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1031,6 +1215,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1051,6 +1236,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1071,6 +1257,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "peerconnection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1091,6 +1278,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_media_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1110,6 +1298,27 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_p2p_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_gtest_merge.py" + }, + "name": "rtc_pc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1129,6 +1338,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_stats_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1148,6 +1358,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1168,6 +1379,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "slow_peer_connection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1187,6 +1399,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "svc_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1194,11 +1407,11 @@ "swarming": { "dimensions": { "android_devices": "1", - "device_type": "walleye", + "device_type": "crosshatch", "os": "Android" }, "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", - "shards": 4 + "shards": 8 }, "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" @@ -1207,6 +1420,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "system_wrappers_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1226,6 +1440,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "test_support_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1245,6 +1460,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "tools_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1260,10 +1476,36 @@ "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, + { + "args": [ + "--webrtc_quick_perf_test", + "--nologs" + ], + "merge": { + "script": "//testing/merge_scripts/standard_gtest_merge.py" + }, + "name": "video_codec_perf_tests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", + "shards": 2 + }, + "test": "video_codec_perf_tests", + "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" + }, { "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "video_engine_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1284,6 +1526,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "voip_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1303,6 +1546,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "webrtc_nonparallel_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1320,12 +1564,13 @@ }, { "args": [ - "--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/", + "--webrtc_quick_perf_test", "--nologs" ], "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "webrtc_perf_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1342,25 +1587,47 @@ "test_id_prefix": "ninja://:webrtc_perf_tests/" } ], - "junit_tests": [ + "isolated_scripts": [ { "args": [ "--shards=1" ], + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_sdk_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -1372,6 +1639,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "peerconnection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1396,6 +1664,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "AppRTCMobile_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1415,6 +1684,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "android_instrumentation_test_apk", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1434,6 +1704,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "audio_decoder_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1453,6 +1724,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_audio_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1472,6 +1744,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "common_video_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1491,6 +1764,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "dcsctp_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1510,6 +1784,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1530,6 +1805,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "modules_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1550,6 +1826,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "peerconnection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1570,6 +1847,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_media_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1589,6 +1867,27 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_p2p_unittests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_gtest_merge.py" + }, + "name": "rtc_pc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1608,6 +1907,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_stats_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1627,6 +1927,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "rtc_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1647,6 +1948,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "slow_peer_connection_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1666,6 +1968,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "svc_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1673,11 +1976,11 @@ "swarming": { "dimensions": { "android_devices": "1", - "device_type": "walleye", + "device_type": "crosshatch", "os": "Android" }, "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", - "shards": 4 + "shards": 8 }, "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" @@ -1686,6 +1989,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "system_wrappers_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1705,6 +2009,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "test_support_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1724,6 +2029,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "tools_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1739,10 +2045,36 @@ "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, + { + "args": [ + "--webrtc_quick_perf_test", + "--nologs" + ], + "merge": { + "script": "//testing/merge_scripts/standard_gtest_merge.py" + }, + "name": "video_codec_perf_tests", + "resultdb": { + "enable": true, + "has_native_resultdb_integration": true + }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", + "shards": 2 + }, + "test": "video_codec_perf_tests", + "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" + }, { "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "video_engine_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1763,6 +2095,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "voip_unittests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1782,6 +2115,7 @@ "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "webrtc_nonparallel_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1799,12 +2133,13 @@ }, { "args": [ - "--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/", + "--webrtc_quick_perf_test", "--nologs" ], "merge": { "script": "//testing/merge_scripts/standard_gtest_merge.py" }, + "name": "webrtc_perf_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -1821,25 +2156,47 @@ "test_id_prefix": "ninja://:webrtc_perf_tests/" } ], - "junit_tests": [ + "isolated_scripts": [ { "args": [ "--shards=1" ], + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_examples_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_examples_junit_tests", "test_id_prefix": "ninja://examples:android_examples_junit_tests/" }, { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, "name": "android_sdk_junit_tests", "resultdb": { "enable": true, "has_native_resultdb_integration": true }, + "swarming": { + "dimensions": { + "android_devices": "1", + "device_type": "walleye", + "os": "Android" + }, + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" + }, "test": "android_sdk_junit_tests", "test_id_prefix": "ninja://sdk/android:android_sdk_junit_tests/" } @@ -1862,7 +2219,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -1874,9 +2230,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { @@ -1886,7 +2243,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -1898,9 +2254,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { @@ -1910,7 +2267,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -1922,9 +2278,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { @@ -1934,7 +2291,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -1946,9 +2302,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { @@ -1958,7 +2315,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -1970,9 +2326,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { @@ -1982,7 +2339,30 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", + "result_format": "gtest_json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-22.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { + "args": [ + "--test-launcher-summary-output=${ISOLATED_OUTDIR}/gtest_output.json", + "--test-arg=--gtest_output=json:/custom_artifacts/gtest_output.json", + "--test-arg=--undefok=test_launcher_summary_output", + "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" + ], "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -1994,9 +2374,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { @@ -2006,7 +2387,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2018,10 +2398,11 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { @@ -2031,7 +2412,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2043,9 +2423,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { @@ -2055,7 +2436,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2067,10 +2447,11 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { @@ -2080,7 +2461,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2092,9 +2472,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { @@ -2104,7 +2485,6 @@ "--test-arg=--undefok=test_launcher_summary_output", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -2116,9 +2496,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -2129,22 +2510,21 @@ "isolated_scripts": [ { "args": [ + "--xcodebuild-sim-runner", "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "apprtcmobile_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "apprtcmobile_tests iPhone X 14.5", + "name": "apprtcmobile_tests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2154,46 +2534,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "apprtcmobile_tests", "test_id_prefix": "ninja://examples:apprtcmobile_tests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "apprtcmobile_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "apprtcmobile_tests iPhone X 15.5", + "name": "apprtcmobile_tests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2203,46 +2583,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "apprtcmobile_tests", "test_id_prefix": "ninja://examples:apprtcmobile_tests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "apprtcmobile_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "apprtcmobile_tests iPhone X 16.2", + "name": "apprtcmobile_tests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2252,45 +2632,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "apprtcmobile_tests", "test_id_prefix": "ninja://examples:apprtcmobile_tests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "audio_decoder_unittests iPhone X 14.5", + "name": "audio_decoder_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2300,45 +2680,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "audio_decoder_unittests iPhone X 15.5", + "name": "audio_decoder_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2348,45 +2728,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "audio_decoder_unittests iPhone X 16.2", + "name": "audio_decoder_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2396,45 +2776,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "common_audio_unittests iPhone X 14.5", + "name": "common_audio_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2444,45 +2824,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "common_audio_unittests iPhone X 15.5", + "name": "common_audio_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2492,45 +2872,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "common_audio_unittests iPhone X 16.2", + "name": "common_audio_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2540,45 +2920,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "common_video_unittests iPhone X 14.5", + "name": "common_video_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2588,45 +2968,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "common_video_unittests iPhone X 15.5", + "name": "common_video_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2636,45 +3016,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "common_video_unittests iPhone X 16.2", + "name": "common_video_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2684,45 +3064,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "dcsctp_unittests iPhone X 14.5", + "name": "dcsctp_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2732,45 +3112,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "dcsctp_unittests iPhone X 15.5", + "name": "dcsctp_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2780,45 +3160,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "dcsctp_unittests iPhone X 16.2", + "name": "dcsctp_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2828,45 +3208,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "modules_tests iPhone X 14.5", + "name": "modules_tests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2876,46 +3256,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "modules_tests iPhone X 15.5", + "name": "modules_tests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2925,46 +3305,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "modules_tests iPhone X 16.2", + "name": "modules_tests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -2974,46 +3354,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "modules_unittests iPhone X 14.5", + "name": "modules_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3023,47 +3403,47 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "modules_unittests iPhone X 15.5", + "name": "modules_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3073,47 +3453,47 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "modules_unittests iPhone X 16.2", + "name": "modules_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3123,47 +3503,47 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_media_unittests iPhone X 14.5", + "name": "rtc_media_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3173,45 +3553,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_media_unittests iPhone X 15.5", + "name": "rtc_media_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3221,45 +3601,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_media_unittests iPhone X 16.2", + "name": "rtc_media_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3269,45 +3649,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests iPhone X 14.5", + "name": "rtc_p2p_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3317,45 +3697,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/", - "variant_id": "iPhone X 14.5" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/", + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests iPhone X 15.5", + "name": "rtc_p2p_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3365,45 +3745,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/", - "variant_id": "iPhone X 15.5" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/", + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests iPhone X 16.2", + "name": "rtc_p2p_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3413,45 +3793,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/", - "variant_id": "iPhone X 16.2" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/", + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_stats_unittests iPhone X 14.5", + "name": "rtc_pc_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3461,45 +3841,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/", - "variant_id": "iPhone X 14.5" + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/", + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_stats_unittests iPhone X 15.5", + "name": "rtc_pc_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3509,45 +3889,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/", - "variant_id": "iPhone X 15.5" + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/", + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_stats_unittests iPhone X 16.2", + "name": "rtc_pc_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3557,45 +3937,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/", - "variant_id": "iPhone X 16.2" + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/", + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_unittests iPhone X 14.5", + "name": "rtc_stats_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3605,46 +3985,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", - "shards": 6 + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://:rtc_unittests/", - "variant_id": "iPhone X 14.5" + "test": "rtc_stats_unittests", + "test_id_prefix": "ninja://stats:rtc_stats_unittests/", + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_unittests iPhone X 15.5", + "name": "rtc_stats_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3654,46 +4033,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", - "shards": 6 + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://:rtc_unittests/", - "variant_id": "iPhone X 15.5" + "test": "rtc_stats_unittests", + "test_id_prefix": "ninja://stats:rtc_stats_unittests/", + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_unittests iPhone X 16.2", + "name": "rtc_stats_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3703,47 +4081,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", - "shards": 6 + "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://:rtc_unittests/", - "variant_id": "iPhone X 16.2" + "test": "rtc_stats_unittests", + "test_id_prefix": "ninja://stats:rtc_stats_unittests/", + "variant_id": "iPhone X 16.4" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "sdk_framework_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "sdk_framework_unittests iPhone X 14.5", + "name": "sdk_framework_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3753,46 +4130,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "sdk_framework_unittests", "test_id_prefix": "ninja://sdk:sdk_framework_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "sdk_framework_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "sdk_framework_unittests iPhone X 15.5", + "name": "sdk_framework_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3802,46 +4179,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "sdk_framework_unittests", "test_id_prefix": "ninja://sdk:sdk_framework_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "sdk_framework_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "sdk_framework_unittests iPhone X 16.2", + "name": "sdk_framework_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3851,46 +4228,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "sdk_framework_unittests", "test_id_prefix": "ninja://sdk:sdk_framework_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "sdk_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "sdk_unittests iPhone X 14.5", + "name": "sdk_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3900,46 +4277,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "sdk_unittests", "test_id_prefix": "ninja://sdk:sdk_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "sdk_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "sdk_unittests iPhone X 15.5", + "name": "sdk_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3949,46 +4326,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "sdk_unittests", "test_id_prefix": "ninja://sdk:sdk_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ + "--xcodebuild-sim-runner", "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-parallelization", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "sdk_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "sdk_unittests iPhone X 16.2", + "name": "sdk_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -3998,45 +4375,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "sdk_unittests", "test_id_prefix": "ninja://sdk:sdk_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "svc_tests iPhone X 14.5", + "name": "svc_tests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4046,47 +4423,47 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "svc_tests iPhone X 15.5", + "name": "svc_tests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4096,47 +4473,47 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "svc_tests iPhone X 16.2", + "name": "svc_tests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4146,47 +4523,47 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "system_wrappers_unittests iPhone X 14.5", + "name": "system_wrappers_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4196,45 +4573,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "system_wrappers_unittests iPhone X 15.5", + "name": "system_wrappers_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4244,45 +4621,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "system_wrappers_unittests iPhone X 16.2", + "name": "system_wrappers_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4292,45 +4669,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "test_support_unittests iPhone X 14.5", + "name": "test_support_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4340,45 +4717,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "test_support_unittests iPhone X 15.5", + "name": "test_support_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4388,45 +4765,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "test_support_unittests iPhone X 16.2", + "name": "test_support_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4436,93 +4813,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", - "--out-dir", - "${ISOLATED_OUTDIR}", - "--xctest", + "17.5", "--xcode-build-version", - "13c100" - ], - "isolate_name": "tools_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "tools_unittests iPhone X 14.5", - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "cipd_packages": [ - { - "cipd_package": "infra/tools/mac_toolchain/${platform}", - "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" - } - ], - "dimensions": { - "cpu": "x86-64", - "os": "Mac-12" - }, - "named_caches": [ - { - "name": "xcode_ios_13c100", - "path": "Xcode.app" - }, - { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" - } - ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test_id_prefix": "ninja://rtc_tools:tools_unittests/", - "variant_id": "iPhone X 14.5" - }, - { - "args": [ - "--platform", - "iPhone X", - "--version", - "15.5", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "tools_unittests iPhone X 15.5", + "name": "tools_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4532,45 +4861,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "16.2", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "tools_unittests iPhone X 16.2", + "name": "tools_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4580,93 +4909,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "14.5", - "--out-dir", - "${ISOLATED_OUTDIR}", - "--xctest", + "16.4", "--xcode-build-version", - "13c100" - ], - "isolate_name": "video_capture_tests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_capture_tests iPhone X 14.5", - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "cipd_packages": [ - { - "cipd_package": "infra/tools/mac_toolchain/${platform}", - "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" - } - ], - "dimensions": { - "cpu": "x86-64", - "os": "Mac-12" - }, - "named_caches": [ - { - "name": "xcode_ios_13c100", - "path": "Xcode.app" - }, - { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" - } - ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/", - "variant_id": "iPhone X 14.5" - }, - { - "args": [ - "--platform", - "iPhone X", - "--version", - "15.5", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "video_capture_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "video_capture_tests iPhone X 15.5", + "name": "tools_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4676,93 +4957,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/", - "variant_id": "iPhone X 15.5" + "test": "tools_unittests", + "test_id_prefix": "ninja://rtc_tools:tools_unittests/", + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "16.2", - "--out-dir", - "${ISOLATED_OUTDIR}", - "--xctest", + "17.5", "--xcode-build-version", - "14c18" - ], - "isolate_name": "video_capture_tests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_capture_tests iPhone X 16.2", - "resultdb": { - "enable": true, - "has_native_resultdb_integration": true - }, - "swarming": { - "cipd_packages": [ - { - "cipd_package": "infra/tools/mac_toolchain/${platform}", - "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" - } - ], - "dimensions": { - "cpu": "x86-64", - "os": "Mac-12" - }, - "named_caches": [ - { - "name": "xcode_ios_14c18", - "path": "Xcode.app" - }, - { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" - } - ], - "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" - }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/", - "variant_id": "iPhone X 16.2" - }, - { - "args": [ - "--platform", - "iPhone X", - "--version", - "14.5", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "video_engine_tests iPhone X 14.5", + "name": "video_engine_tests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4772,46 +5005,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "video_engine_tests iPhone X 15.5", + "name": "video_engine_tests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4821,46 +5054,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "video_engine_tests iPhone X 16.2", + "name": "video_engine_tests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4870,46 +5103,46 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com", "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "voip_unittests iPhone X 14.5", + "name": "voip_unittests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4919,45 +5152,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "voip_unittests iPhone X 15.5", + "name": "voip_unittests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -4967,45 +5200,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "voip_unittests iPhone X 16.2", + "name": "voip_unittests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -5015,45 +5248,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" }, { "args": [ "--platform", - "iPhone X", + "iPhone 14", "--version", - "14.5", + "17.5", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "13c100" + "--xctest" ], - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "webrtc_nonparallel_tests iPhone X 14.5", + "name": "webrtc_nonparallel_tests iPhone 14 17.5", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -5063,45 +5296,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_13c100", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_14_5", - "path": "Runtime-ios-14.5" + "name": "runtime_ios_17_5", + "path": "Runtime-ios-17.5" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/", - "variant_id": "iPhone X 14.5" + "variant_id": "iPhone 14 17.5" }, { "args": [ "--platform", - "iPhone X", + "iPhone 15", "--version", - "15.5", + "18.0", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "webrtc_nonparallel_tests iPhone X 15.5", + "name": "webrtc_nonparallel_tests iPhone 15 18.0", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -5111,45 +5344,45 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_15_5", - "path": "Runtime-ios-15.5" + "name": "runtime_ios_18_0", + "path": "Runtime-ios-18.0" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/", - "variant_id": "iPhone X 15.5" + "variant_id": "iPhone 15 18.0" }, { "args": [ "--platform", "iPhone X", "--version", - "16.2", + "16.4", + "--xcode-build-version", + "16a5230g", "--out-dir", "${ISOLATED_OUTDIR}", - "--xctest", - "--xcode-build-version", - "14c18" + "--xctest" ], - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "webrtc_nonparallel_tests iPhone X 16.2", + "name": "webrtc_nonparallel_tests iPhone X 16.4", "resultdb": { "enable": true, "has_native_resultdb_integration": true @@ -5159,34 +5392,35 @@ { "cipd_package": "infra/tools/mac_toolchain/${platform}", "location": ".", - "revision": "git_revision:59ddedfe3849abf560cbe0b41bb8e431041cd2bb" + "revision": "git_revision:a18b7d95d26f3c6bf9591978b19cf0ca8268ac7d" } ], "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "named_caches": [ { - "name": "xcode_ios_14c18", + "name": "xcode_ios_16a5230g", "path": "Xcode.app" }, { - "name": "runtime_ios_16_2", - "path": "Runtime-ios-16.2" + "name": "runtime_ios_16_4", + "path": "Runtime-ios-16.4" } ], "service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com" }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/", - "variant_id": "iPhone X 16.2" + "variant_id": "iPhone X 16.4" } ] }, + "iwyu_verifier": {}, "linux_asan": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5197,13 +5431,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5214,13 +5448,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5231,13 +5465,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5248,13 +5482,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5265,14 +5499,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5283,14 +5517,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5301,14 +5535,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5319,13 +5553,30 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5336,13 +5587,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5353,13 +5604,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5370,14 +5621,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "shared_screencast_stream_test", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5388,13 +5639,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "shared_screencast_stream_test", "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5405,13 +5656,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5422,14 +5673,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5440,13 +5691,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5457,13 +5708,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5474,13 +5725,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5491,14 +5742,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5509,13 +5760,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5526,9 +5777,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -5542,7 +5794,6 @@ "linux_coverage": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5554,13 +5805,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5572,13 +5823,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5590,13 +5841,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5608,13 +5859,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5626,14 +5877,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5645,14 +5896,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5664,14 +5915,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5683,31 +5934,49 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests", + "name": "rtc_p2p_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { + "isolate_profile_data": true, + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_pc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-22.04" + } + }, + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5719,13 +5988,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5737,14 +6006,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "shared_screencast_stream_test", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5756,13 +6025,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "shared_screencast_stream_test", "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" }, { - "isolate_name": "slow_peer_connection_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5774,13 +6043,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5792,14 +6061,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5811,13 +6080,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5829,13 +6098,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5847,37 +6116,18 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, - { - "isolate_name": "video_capture_tests", - "isolate_profile_data": true, - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_capture_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "dimensions": { - "cpu": "x86-64", - "os": "Ubuntu-18.04", - "pool": "WebRTC-baremetal-try" - } - }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" - }, { "args": [ - "--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/", + "--webrtc_quick_perf_test", "--nologs", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "video_codec_perf_tests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5890,13 +6140,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "video_codec_perf_tests", "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" }, { - "isolate_name": "video_engine_tests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5908,14 +6158,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5927,13 +6177,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5945,18 +6195,18 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" }, { "args": [ - "--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/", + "--webrtc_quick_perf_test", "--nologs", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "webrtc_perf_tests", "isolate_profile_data": true, "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" @@ -5969,9 +6219,10 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" } ] @@ -5979,7 +6230,6 @@ "linux_dbg": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -5990,13 +6240,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6007,13 +6257,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6024,13 +6274,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6041,13 +6291,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6058,14 +6308,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6076,14 +6326,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6094,14 +6344,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6112,13 +6362,30 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-22.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6129,13 +6396,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6146,13 +6413,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6163,14 +6430,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "shared_screencast_stream_test", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6181,13 +6448,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "shared_screencast_stream_test", "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6198,13 +6465,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6215,14 +6482,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6233,13 +6500,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6250,13 +6517,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6267,13 +6534,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6284,14 +6551,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6302,13 +6569,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6319,18 +6586,40 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] }, "linux_libfuzzer_rel": {}, - "linux_memcheck": { + "linux_more_configs": { + "isolated_scripts": [ + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "peerconnection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-22.04" + }, + "shards": 4 + }, + "test": "peerconnection_unittests", + "test_id_prefix": "ninja://pc:peerconnection_unittests/" + } + ] + }, + "linux_msan": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6341,13 +6630,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6358,13 +6647,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6375,13 +6664,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6392,13 +6681,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6409,14 +6698,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6427,14 +6716,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6445,14 +6734,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6463,82 +6752,82 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests", + "name": "rtc_p2p_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_stats_unittests", + "name": "rtc_pc_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_unittests", + "name": "rtc_stats_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" - }, - "shards": 6 + "os": "Ubuntu-20.04" + } }, - "test_id_prefix": "ninja://:rtc_unittests/" + "test": "rtc_stats_unittests", + "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "shared_screencast_stream_test", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "shared_screencast_stream_test", + "name": "rtc_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" - } + "os": "Ubuntu-20.04" + }, + "shards": 6 }, - "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + "test": "rtc_unittests", + "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6549,13 +6838,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6566,14 +6855,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6584,13 +6873,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6601,13 +6890,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6618,13 +6907,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6635,14 +6924,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6653,13 +6942,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6670,39 +6959,17 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] }, - "linux_more_configs": { - "isolated_scripts": [ - { - "isolate_name": "peerconnection_unittests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "peerconnection_unittests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "dimensions": { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - }, - "shards": 4 - }, - "test_id_prefix": "ninja://pc:peerconnection_unittests/" - } - ] - }, - "linux_msan": { + "linux_rel": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6713,13 +6980,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6730,13 +6997,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6747,13 +7014,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6764,13 +7031,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6781,14 +7048,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6799,14 +7066,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6817,14 +7084,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6835,13 +7102,30 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-22.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6852,13 +7136,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6869,13 +7153,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6886,14 +7170,31 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-22.04" + } + }, + "test": "shared_screencast_stream_test", + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6904,13 +7205,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6921,14 +7222,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6939,13 +7240,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6956,13 +7257,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -6973,48 +7274,444 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" + "os": "Ubuntu-22.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", + "args": [ + "--webrtc_quick_perf_test", + "--nologs", + "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" + ], "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "video_engine_tests", + "name": "video_codec_perf_tests", "resultdb": { - "result_format": "json" + "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", + "result_format": "gtest_json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" - }, - "shards": 4 + "os": "Ubuntu-22.04" + } }, - "test_id_prefix": "ninja://:video_engine_tests/" + "test": "video_codec_perf_tests", + "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "voip_unittests", + "name": "video_engine_tests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-20.04" - } + "os": "Ubuntu-22.04" + }, + "shards": 4 + }, + "test": "video_engine_tests", + "test_id_prefix": "ninja://:video_engine_tests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "voip_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-22.04" + } + }, + "test": "voip_unittests", + "test_id_prefix": "ninja://:voip_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "webrtc_nonparallel_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-22.04" + } + }, + "test": "webrtc_nonparallel_tests", + "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" + }, + { + "args": [ + "--webrtc_quick_perf_test", + "--nologs", + "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" + ], + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "webrtc_perf_tests", + "resultdb": { + "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", + "result_format": "gtest_json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-22.04" + } + }, + "test": "webrtc_perf_tests", + "test_id_prefix": "ninja://:webrtc_perf_tests/" + } + ] + }, + "linux_tsan2": { + "isolated_scripts": [ + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "audio_decoder_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "audio_decoder_unittests", + "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_audio_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "common_audio_unittests", + "test_id_prefix": "ninja://common_audio:common_audio_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "common_video_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "common_video_unittests", + "test_id_prefix": "ninja://common_video:common_video_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "dcsctp_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "dcsctp_unittests", + "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "modules_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + }, + "shards": 2 + }, + "test": "modules_tests", + "test_id_prefix": "ninja://modules:modules_tests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "modules_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + }, + "shards": 6 + }, + "test": "modules_unittests", + "test_id_prefix": "ninja://modules:modules_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "peerconnection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + }, + "shards": 4 + }, + "test": "peerconnection_unittests", + "test_id_prefix": "ninja://pc:peerconnection_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_media_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "rtc_media_unittests", + "test_id_prefix": "ninja://media:rtc_media_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_pc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_stats_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "rtc_stats_unittests", + "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + }, + "shards": 6 + }, + "test": "rtc_unittests", + "test_id_prefix": "ninja://:rtc_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "slow_peer_connection_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "slow_peer_connection_unittests", + "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "svc_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + }, + "shards": 4 + }, + "test": "svc_tests", + "test_id_prefix": "ninja://pc:svc_tests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "system_wrappers_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "system_wrappers_unittests", + "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "test_support_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "test_support_unittests", + "test_id_prefix": "ninja://test:test_support_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "tools_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "tools_unittests", + "test_id_prefix": "ninja://rtc_tools:tools_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_engine_tests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + }, + "shards": 4 + }, + "test": "video_engine_tests", + "test_id_prefix": "ninja://:video_engine_tests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "voip_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7028,14 +7725,14 @@ "os": "Ubuntu-20.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] }, - "linux_rel": { + "linux_ubsan": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7046,13 +7743,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7063,13 +7760,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7080,13 +7777,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7097,13 +7794,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7114,14 +7811,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7132,14 +7829,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7150,14 +7847,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7168,13 +7865,30 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7185,13 +7899,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7202,13 +7916,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7219,14 +7933,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "shared_screencast_stream_test", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7237,13 +7951,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "shared_screencast_stream_test", "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7254,13 +7968,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7271,14 +7985,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7289,13 +8003,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7306,13 +8020,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7323,54 +8037,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_capture_tests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_capture_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "dimensions": { - "cpu": "x86-64", - "os": "Ubuntu-18.04", - "pool": "WebRTC-baremetal-try" - } - }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" - }, - { - "args": [ - "--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/", - "--nologs", - "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" - ], - "isolate_name": "video_codec_perf_tests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_codec_perf_tests", - "resultdb": { - "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", - "result_format": "gtest_json" - }, - "swarming": { - "dimensions": { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - }, - "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" - }, - { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7381,14 +8054,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7399,13 +8072,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7416,40 +8089,17 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" - }, - { - "args": [ - "--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/", - "--nologs", - "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" - ], - "isolate_name": "webrtc_perf_tests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "webrtc_perf_tests", - "resultdb": { - "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", - "result_format": "gtest_json" - }, - "swarming": { - "dimensions": { - "cpu": "x86-64", - "os": "Ubuntu-18.04" - } - }, - "test_id_prefix": "ninja://:webrtc_perf_tests/" } ] }, - "linux_tsan2": { + "linux_ubsan_vptr": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7460,13 +8110,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7477,13 +8127,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7494,13 +8144,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7511,13 +8161,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7528,14 +8178,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7546,14 +8196,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7564,14 +8214,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7582,13 +8232,30 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7599,13 +8266,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7616,13 +8283,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7633,14 +8300,31 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "shared_screencast_stream_test", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Ubuntu-20.04" + } + }, + "test": "shared_screencast_stream_test", + "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7651,13 +8335,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7668,14 +8352,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7686,13 +8370,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7703,13 +8387,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7720,13 +8404,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7737,14 +8421,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7755,13 +8439,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7772,17 +8456,17 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-20.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] }, - "linux_ubsan": { + "linux_x86_dbg": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7793,13 +8477,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7810,13 +8494,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7827,13 +8511,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7844,13 +8528,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7861,14 +8545,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7879,14 +8563,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7897,14 +8581,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -7915,82 +8599,82 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests", + "name": "rtc_p2p_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_stats_unittests", + "name": "rtc_pc_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_unittests", + "name": "rtc_stats_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" - }, - "shards": 6 + "os": "Ubuntu-22.04" + } }, - "test_id_prefix": "ninja://:rtc_unittests/" + "test": "rtc_stats_unittests", + "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "shared_screencast_stream_test", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "shared_screencast_stream_test", + "name": "rtc_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" - } + "os": "Ubuntu-22.04" + }, + "shards": 6 }, - "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + "test": "rtc_unittests", + "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8001,13 +8685,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8018,14 +8702,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8036,13 +8720,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8053,13 +8737,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8070,13 +8754,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8087,14 +8771,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8105,13 +8789,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8122,17 +8806,17 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] }, - "linux_ubsan_vptr": { + "linux_x86_rel": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8143,13 +8827,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8160,13 +8844,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8177,13 +8861,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8194,13 +8878,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8211,14 +8895,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8229,14 +8913,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8247,14 +8931,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8265,82 +8949,82 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_pc_unittests", + "name": "rtc_p2p_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, - "test_id_prefix": "ninja://pc:rtc_pc_unittests/" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_stats_unittests", + "name": "rtc_pc_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, - "test_id_prefix": "ninja://stats:rtc_stats_unittests/" + "test": "rtc_pc_unittests", + "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "rtc_unittests", + "name": "rtc_stats_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" - }, - "shards": 6 + "os": "Ubuntu-22.04" + } }, - "test_id_prefix": "ninja://:rtc_unittests/" + "test": "rtc_stats_unittests", + "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "shared_screencast_stream_test", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, - "name": "shared_screencast_stream_test", + "name": "rtc_unittests", "resultdb": { "result_format": "json" }, "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" - } + "os": "Ubuntu-22.04" + }, + "shards": 6 }, - "test_id_prefix": "ninja://modules/desktop_capture:shared_screencast_stream_test/" + "test": "rtc_unittests", + "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8351,13 +9035,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8368,14 +9052,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8386,13 +9070,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8403,13 +9087,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8420,13 +9104,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8437,14 +9121,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8455,13 +9139,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8472,17 +9156,17 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Ubuntu-22.04" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] }, - "linux_x86_dbg": { + "mac_asan": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8492,14 +9176,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8509,14 +9194,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8526,14 +9212,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8543,14 +9230,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8560,15 +9248,16 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8578,15 +9267,16 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8596,15 +9286,16 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8614,14 +9305,33 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cores": "12", + "cpu": "x86-64", + "os": "Mac-14" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8631,14 +9341,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8648,14 +9359,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8665,15 +9377,16 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8683,14 +9396,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8700,15 +9414,16 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8718,14 +9433,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8735,14 +9451,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8752,14 +9469,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8769,15 +9487,16 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8787,14 +9506,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8804,18 +9524,21 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] }, - "linux_x86_rel": { + "mac_compile_dbg": {}, + "mac_compile_rel": {}, + "mac_dbg": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8825,14 +9548,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8842,14 +9566,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8859,14 +9584,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8876,14 +9602,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8893,15 +9620,16 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8911,15 +9639,16 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8929,15 +9658,16 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8947,14 +9677,33 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cores": "12", + "cpu": "x86-64", + "os": "Mac-14" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8964,14 +9713,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8981,14 +9731,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -8998,15 +9749,16 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9016,14 +9768,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9033,15 +9786,16 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9051,14 +9805,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9068,14 +9823,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9085,14 +9841,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9102,15 +9859,16 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9120,14 +9878,15 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9137,18 +9896,19 @@ }, "swarming": { "dimensions": { + "cores": "12", "cpu": "x86-64", - "os": "Ubuntu-18.04" + "os": "Mac-14" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] }, - "mac_asan": { + "mac_dbg_m1": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9158,15 +9918,14 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9176,15 +9935,14 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9194,15 +9952,14 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9212,15 +9969,14 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9230,16 +9986,15 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9249,16 +10004,15 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9268,16 +10022,15 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9287,15 +10040,31 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9305,15 +10074,14 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9323,15 +10091,14 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9341,16 +10108,15 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9360,15 +10126,14 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9378,16 +10143,15 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9397,15 +10161,14 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9415,15 +10178,14 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9433,15 +10195,14 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9451,16 +10212,15 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9470,15 +10230,14 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9488,21 +10247,18 @@ }, "swarming": { "dimensions": { - "cores": "12", - "cpu": "x86-64", - "os": "Mac-12" + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] }, - "mac_compile_dbg": {}, - "mac_compile_rel": {}, - "mac_dbg": { + "mac_rel": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9512,15 +10268,14 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9530,15 +10285,14 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9548,15 +10302,14 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9566,15 +10319,14 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9584,16 +10336,15 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9603,16 +10354,15 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9622,16 +10372,15 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9641,15 +10390,31 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Mac-14" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9659,15 +10424,14 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9677,15 +10441,14 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9695,16 +10458,15 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9714,15 +10476,14 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9732,16 +10493,15 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9751,15 +10511,14 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9769,15 +10528,14 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9787,15 +10545,37 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", + "args": [ + "--webrtc_quick_perf_test", + "--nologs", + "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" + ], + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_codec_perf_tests", + "resultdb": { + "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", + "result_format": "gtest_json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Mac-14" + } + }, + "test": "video_codec_perf_tests", + "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9805,16 +10585,15 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9824,15 +10603,14 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9842,19 +10620,41 @@ }, "swarming": { "dimensions": { - "cores": "12", "cpu": "x86-64", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" + }, + { + "args": [ + "--webrtc_quick_perf_test", + "--nologs", + "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" + ], + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "webrtc_perf_tests", + "resultdb": { + "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", + "result_format": "gtest_json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Mac-14" + } + }, + "test": "webrtc_perf_tests", + "test_id_prefix": "ninja://:webrtc_perf_tests/" } ] }, - "mac_dbg_m1": { + "mac_rel_m1": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9865,13 +10665,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9882,13 +10682,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9899,13 +10699,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9916,13 +10716,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9933,14 +10733,14 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9951,14 +10751,14 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9969,14 +10769,14 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -9987,13 +10787,30 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "arm64-64-Apple_M1", + "os": "Mac-14" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10004,13 +10821,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10021,13 +10838,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10038,14 +10855,14 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10056,13 +10873,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10073,14 +10890,14 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10091,13 +10908,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10108,13 +10925,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10125,13 +10942,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10142,14 +10959,14 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10160,13 +10977,13 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10177,17 +10994,17 @@ "swarming": { "dimensions": { "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "os": "Mac-14" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] }, - "mac_rel": { + "win11_debug": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10198,13 +11015,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10215,13 +11032,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10232,13 +11049,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10249,13 +11066,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10266,14 +11083,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10284,14 +11101,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10302,14 +11119,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10320,13 +11137,30 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Windows-11-22000" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10337,13 +11171,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10354,13 +11188,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10371,14 +11205,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10389,13 +11223,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10406,14 +11240,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10424,13 +11258,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10441,13 +11275,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10458,54 +11292,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_capture_tests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_capture_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "dimensions": { - "cpu": "x86-64", - "os": "Mac-12", - "pool": "WebRTC-baremetal-try" - } - }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" - }, - { - "args": [ - "--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/", - "--nologs", - "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" - ], - "isolate_name": "video_codec_perf_tests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_codec_perf_tests", - "resultdb": { - "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", - "result_format": "gtest_json" - }, - "swarming": { - "dimensions": { - "cpu": "x86-64", - "os": "Mac-12" - } - }, - "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" - }, - { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10516,14 +11309,14 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10534,13 +11327,13 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10551,40 +11344,17 @@ "swarming": { "dimensions": { "cpu": "x86-64", - "os": "Mac-12" + "os": "Windows-11-22000" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" - }, - { - "args": [ - "--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/", - "--nologs", - "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" - ], - "isolate_name": "webrtc_perf_tests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "webrtc_perf_tests", - "resultdb": { - "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", - "result_format": "gtest_json" - }, - "swarming": { - "dimensions": { - "cpu": "x86-64", - "os": "Mac-12" - } - }, - "test_id_prefix": "ninja://:webrtc_perf_tests/" } ] }, - "mac_rel_m1": { + "win11_release": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10594,14 +11364,14 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10611,14 +11381,14 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10628,14 +11398,14 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10645,14 +11415,14 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10662,15 +11432,15 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10680,15 +11450,15 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10698,15 +11468,15 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10716,14 +11486,31 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" + } + }, + "test": "rtc_media_unittests", + "test_id_prefix": "ninja://media:rtc_media_unittests/" + }, + { + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Windows-11-22000" } }, - "test_id_prefix": "ninja://media:rtc_media_unittests/" + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" }, { - "isolate_name": "rtc_pc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10733,14 +11520,14 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10750,14 +11537,14 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10767,15 +11554,15 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10785,14 +11572,14 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10802,15 +11589,15 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10820,14 +11607,14 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10837,14 +11624,14 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10854,14 +11641,37 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", + "args": [ + "--webrtc_quick_perf_test", + "--nologs", + "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" + ], + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "video_codec_perf_tests", + "resultdb": { + "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", + "result_format": "gtest_json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Windows-11-22000" + } + }, + "test": "video_codec_perf_tests", + "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10871,15 +11681,15 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10889,14 +11699,14 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10906,18 +11716,41 @@ }, "swarming": { "dimensions": { - "cpu": "arm64-64-Apple_M1", - "os": "Mac-12" + "cpu": "x86-64", + "os": "Windows-11-22000" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" + }, + { + "args": [ + "--webrtc_quick_perf_test", + "--nologs", + "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" + ], + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "webrtc_perf_tests", + "resultdb": { + "result_file": "${ISOLATED_OUTDIR}/gtest_output.json", + "result_format": "gtest_json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Windows-11-22000" + } + }, + "test": "webrtc_perf_tests", + "test_id_prefix": "ninja://:webrtc_perf_tests/" } ] }, "win_asan": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10931,10 +11764,10 @@ "os": "Windows-10-19045" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10948,10 +11781,10 @@ "os": "Windows-10-19045" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10965,10 +11798,10 @@ "os": "Windows-10-19045" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -10982,10 +11815,10 @@ "os": "Windows-10-19045" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11000,10 +11833,10 @@ }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11018,10 +11851,10 @@ }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11036,10 +11869,10 @@ }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11053,10 +11886,27 @@ "os": "Windows-10-19045" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Windows-10-19045" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11070,10 +11920,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11087,10 +11937,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11105,10 +11955,10 @@ }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11122,10 +11972,10 @@ "os": "Windows-10-19045" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11140,10 +11990,10 @@ }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11157,10 +12007,10 @@ "os": "Windows-10-19045" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11174,10 +12024,10 @@ "os": "Windows-10-19045" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11191,10 +12041,10 @@ "os": "Windows-10-19045" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11209,10 +12059,10 @@ }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11226,10 +12076,10 @@ "os": "Windows-10-19045" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11243,6 +12093,7 @@ "os": "Windows-10-19045" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -11254,7 +12105,6 @@ "win_x64_clang_dbg": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11268,10 +12118,10 @@ "os": "Windows-10-19045" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11285,10 +12135,10 @@ "os": "Windows-10-19045" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11302,10 +12152,10 @@ "os": "Windows-10-19045" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11319,10 +12169,10 @@ "os": "Windows-10-19045" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11337,10 +12187,10 @@ }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11355,10 +12205,10 @@ }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11373,10 +12223,10 @@ }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11390,10 +12240,27 @@ "os": "Windows-10-19045" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Windows-10-19045" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11407,10 +12274,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11424,10 +12291,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11442,10 +12309,10 @@ }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11459,10 +12326,10 @@ "os": "Windows-10-19045" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11477,10 +12344,10 @@ }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11494,10 +12361,10 @@ "os": "Windows-10-19045" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11511,10 +12378,10 @@ "os": "Windows-10-19045" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11528,10 +12395,10 @@ "os": "Windows-10-19045" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11546,10 +12413,10 @@ }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11563,10 +12430,10 @@ "os": "Windows-10-19045" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11580,6 +12447,7 @@ "os": "Windows-10-19045" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -11587,7 +12455,6 @@ "win_x64_clang_rel": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11601,10 +12468,10 @@ "os": "Windows-10-19045" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11618,10 +12485,10 @@ "os": "Windows-10-19045" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11635,10 +12502,10 @@ "os": "Windows-10-19045" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11652,10 +12519,10 @@ "os": "Windows-10-19045" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11670,10 +12537,10 @@ }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11688,10 +12555,10 @@ }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11706,10 +12573,10 @@ }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11723,10 +12590,27 @@ "os": "Windows-10-19045" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Windows-10-19045" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11740,10 +12624,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11757,10 +12641,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11775,10 +12659,10 @@ }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11792,10 +12676,10 @@ "os": "Windows-10-19045" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11810,10 +12694,10 @@ }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11827,10 +12711,10 @@ "os": "Windows-10-19045" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11844,10 +12728,10 @@ "os": "Windows-10-19045" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11861,10 +12745,10 @@ "os": "Windows-10-19045" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11879,10 +12763,10 @@ }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11896,10 +12780,10 @@ "os": "Windows-10-19045" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11913,6 +12797,7 @@ "os": "Windows-10-19045" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -11920,7 +12805,6 @@ "win_x86_clang_dbg": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11934,10 +12818,10 @@ "os": "Windows-10-19045" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11951,10 +12835,10 @@ "os": "Windows-10-19045" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11968,10 +12852,10 @@ "os": "Windows-10-19045" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -11985,10 +12869,10 @@ "os": "Windows-10-19045" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12003,10 +12887,10 @@ }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12021,10 +12905,10 @@ }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12039,10 +12923,10 @@ }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12056,10 +12940,27 @@ "os": "Windows-10-19045" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Windows-10-19045" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12073,10 +12974,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12090,10 +12991,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12108,10 +13009,10 @@ }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12125,10 +13026,10 @@ "os": "Windows-10-19045" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12143,10 +13044,10 @@ }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12160,10 +13061,10 @@ "os": "Windows-10-19045" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12177,10 +13078,10 @@ "os": "Windows-10-19045" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12194,10 +13095,10 @@ "os": "Windows-10-19045" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12212,10 +13113,10 @@ }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12229,10 +13130,10 @@ "os": "Windows-10-19045" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12246,6 +13147,7 @@ "os": "Windows-10-19045" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" } ] @@ -12253,7 +13155,6 @@ "win_x86_clang_rel": { "isolated_scripts": [ { - "isolate_name": "audio_decoder_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12267,10 +13168,10 @@ "os": "Windows-10-19045" } }, + "test": "audio_decoder_unittests", "test_id_prefix": "ninja://modules/audio_coding:audio_decoder_unittests/" }, { - "isolate_name": "common_audio_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12284,10 +13185,10 @@ "os": "Windows-10-19045" } }, + "test": "common_audio_unittests", "test_id_prefix": "ninja://common_audio:common_audio_unittests/" }, { - "isolate_name": "common_video_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12301,10 +13202,10 @@ "os": "Windows-10-19045" } }, + "test": "common_video_unittests", "test_id_prefix": "ninja://common_video:common_video_unittests/" }, { - "isolate_name": "dcsctp_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12318,10 +13219,10 @@ "os": "Windows-10-19045" } }, + "test": "dcsctp_unittests", "test_id_prefix": "ninja://net/dcsctp:dcsctp_unittests/" }, { - "isolate_name": "modules_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12336,10 +13237,10 @@ }, "shards": 2 }, + "test": "modules_tests", "test_id_prefix": "ninja://modules:modules_tests/" }, { - "isolate_name": "modules_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12354,10 +13255,10 @@ }, "shards": 6 }, + "test": "modules_unittests", "test_id_prefix": "ninja://modules:modules_unittests/" }, { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12372,10 +13273,10 @@ }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" }, { - "isolate_name": "rtc_media_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12389,10 +13290,27 @@ "os": "Windows-10-19045" } }, + "test": "rtc_media_unittests", "test_id_prefix": "ninja://media:rtc_media_unittests/" }, { - "isolate_name": "rtc_pc_unittests", + "merge": { + "script": "//testing/merge_scripts/standard_isolated_script_merge.py" + }, + "name": "rtc_p2p_unittests", + "resultdb": { + "result_format": "json" + }, + "swarming": { + "dimensions": { + "cpu": "x86-64", + "os": "Windows-10-19045" + } + }, + "test": "rtc_p2p_unittests", + "test_id_prefix": "ninja://:rtc_p2p_unittests/" + }, + { "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12406,10 +13324,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_pc_unittests", "test_id_prefix": "ninja://pc:rtc_pc_unittests/" }, { - "isolate_name": "rtc_stats_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12423,10 +13341,10 @@ "os": "Windows-10-19045" } }, + "test": "rtc_stats_unittests", "test_id_prefix": "ninja://stats:rtc_stats_unittests/" }, { - "isolate_name": "rtc_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12441,10 +13359,10 @@ }, "shards": 6 }, + "test": "rtc_unittests", "test_id_prefix": "ninja://:rtc_unittests/" }, { - "isolate_name": "slow_peer_connection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12458,10 +13376,10 @@ "os": "Windows-10-19045" } }, + "test": "slow_peer_connection_unittests", "test_id_prefix": "ninja://pc:slow_peer_connection_unittests/" }, { - "isolate_name": "svc_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12476,10 +13394,10 @@ }, "shards": 4 }, + "test": "svc_tests", "test_id_prefix": "ninja://pc:svc_tests/" }, { - "isolate_name": "system_wrappers_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12493,10 +13411,10 @@ "os": "Windows-10-19045" } }, + "test": "system_wrappers_unittests", "test_id_prefix": "ninja://system_wrappers:system_wrappers_unittests/" }, { - "isolate_name": "test_support_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12510,10 +13428,10 @@ "os": "Windows-10-19045" } }, + "test": "test_support_unittests", "test_id_prefix": "ninja://test:test_support_unittests/" }, { - "isolate_name": "tools_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12527,33 +13445,15 @@ "os": "Windows-10-19045" } }, + "test": "tools_unittests", "test_id_prefix": "ninja://rtc_tools:tools_unittests/" }, - { - "isolate_name": "video_capture_tests", - "merge": { - "script": "//testing/merge_scripts/standard_isolated_script_merge.py" - }, - "name": "video_capture_tests", - "resultdb": { - "result_format": "json" - }, - "swarming": { - "dimensions": { - "cpu": "x86-64", - "os": "Windows-10-19045", - "pool": "WebRTC-baremetal-try" - } - }, - "test_id_prefix": "ninja://modules/video_capture:video_capture_tests/" - }, { "args": [ - "--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/", + "--webrtc_quick_perf_test", "--nologs", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "video_codec_perf_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12568,10 +13468,10 @@ "os": "Windows-10-19045" } }, + "test": "video_codec_perf_tests", "test_id_prefix": "ninja://modules/video_coding:video_codec_perf_tests/" }, { - "isolate_name": "video_engine_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12586,10 +13486,10 @@ }, "shards": 4 }, + "test": "video_engine_tests", "test_id_prefix": "ninja://:video_engine_tests/" }, { - "isolate_name": "voip_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12603,10 +13503,10 @@ "os": "Windows-10-19045" } }, + "test": "voip_unittests", "test_id_prefix": "ninja://:voip_unittests/" }, { - "isolate_name": "webrtc_nonparallel_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12620,15 +13520,15 @@ "os": "Windows-10-19045" } }, + "test": "webrtc_nonparallel_tests", "test_id_prefix": "ninja://:webrtc_nonparallel_tests/" }, { "args": [ - "--force_fieldtrials=WebRTC-QuickPerfTest/Enabled/", + "--webrtc_quick_perf_test", "--nologs", "--gtest_output=json:${ISOLATED_OUTDIR}/gtest_output.json" ], - "isolate_name": "webrtc_perf_tests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12643,6 +13543,7 @@ "os": "Windows-10-19045" } }, + "test": "webrtc_perf_tests", "test_id_prefix": "ninja://:webrtc_perf_tests/" } ] @@ -12650,7 +13551,6 @@ "win_x86_more_configs": { "isolated_scripts": [ { - "isolate_name": "peerconnection_unittests", "merge": { "script": "//testing/merge_scripts/standard_isolated_script_merge.py" }, @@ -12665,6 +13565,7 @@ }, "shards": 4 }, + "test": "peerconnection_unittests", "test_id_prefix": "ninja://pc:peerconnection_unittests/" } ] diff --git a/infra/specs/variants.pyl b/infra/specs/variants.pyl index cf050c671e..46be4051e1 100644 --- a/infra/specs/variants.pyl +++ b/infra/specs/variants.pyl @@ -7,43 +7,34 @@ # be found in the AUTHORS file in the root of the source tree. { - 'SIM_IPHONE_X_14_5': { + 'SIM_IPHONE_X_16_4': { 'args': [ '--platform', 'iPhone X', '--version', - '14.5', - ], - 'identifier': 'iPhone X 14.5', - 'mixins': [ - 'xcode_13_main', - 'ios_runtime_cache_14_5', + '16.4', ], + 'identifier': 'iPhone X 16.4', + 'mixins': ['xcode_16_main', 'ios_runtime_cache_16_4'], }, - 'SIM_IPHONE_X_15_5': { + 'SIM_IPHONE_14_17_5': { 'args': [ '--platform', - 'iPhone X', + 'iPhone 14', '--version', - '15.5', - ], - 'identifier': 'iPhone X 15.5', - 'mixins': [ - 'xcode_14_main', - 'ios_runtime_cache_15_5', + '17.5', ], + 'identifier': 'iPhone 14 17.5', + 'mixins': ['xcode_16_main', 'ios_runtime_cache_17_5'], }, - 'SIM_IPHONE_X_16_2': { + 'SIM_IPHONE_15_18_0': { 'args': [ '--platform', - 'iPhone X', + 'iPhone 15', '--version', - '16.2', - ], - 'identifier': 'iPhone X 16.2', - 'mixins': [ - 'xcode_14_main', - 'ios_runtime_cache_16_2', + '18.0', ], + 'identifier': 'iPhone 15 18.0', + 'mixins': ['xcode_16_main', 'ios_runtime_cache_18_0'], }, } diff --git a/infra/specs/waterfalls.pyl b/infra/specs/waterfalls.pyl index 79e8cdd860..dacac43947 100644 --- a/infra/specs/waterfalls.pyl +++ b/infra/specs/waterfalls.pyl @@ -18,7 +18,7 @@ ], 'test_suites': { 'gtest_tests': 'android_tests', - 'junit_tests': 'android_junit_tests', + 'isolated_scripts': 'android_junit_tests', }, }, 'Android32 (dbg)': { @@ -28,7 +28,7 @@ ], 'test_suites': { 'gtest_tests': 'android_tests', - 'junit_tests': 'android_junit_tests', + 'isolated_scripts': 'android_junit_tests', }, }, 'Android32 (more configs)': { @@ -50,7 +50,7 @@ ], 'test_suites': { 'gtest_tests': 'android_tests', - 'junit_tests': 'android_junit_tests', + 'isolated_scripts': 'android_junit_tests', }, }, 'Android64 (dbg)': { @@ -60,7 +60,7 @@ ], 'test_suites': { 'gtest_tests': 'android_tests', - 'junit_tests': 'android_junit_tests', + 'isolated_scripts': 'android_junit_tests', }, }, 'Android64 Builder arm64': {}, @@ -70,7 +70,7 @@ 'os_type': 'linux', 'mixins': [ - 'linux-bionic', 'x86-64', 'fuchsia-gtest-output', + 'linux-jammy', 'x86-64', 'fuchsia-gtest-output', 'resultdb-gtest-json-format' ], 'test_suites': { @@ -79,14 +79,14 @@ }, 'Linux (more configs)': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-jammy', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'more_configs_tests', }, }, 'Linux Asan': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-focal', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'linux_tests', }, @@ -103,7 +103,7 @@ }, 'Linux Tsan v2': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-focal', 'x86-64', 'resultdb-json-format'], 'test_suites': { # TODO(crbug.com/webrtc/14568): Using 'linux_tests' # fails on "ThreadSanitizer: data race on vptr (ctor/dtor vs @@ -113,21 +113,21 @@ }, 'Linux UBSan': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-focal', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'linux_tests', }, }, 'Linux UBSan vptr': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-focal', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'linux_tests', }, }, 'Linux32 Debug': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-jammy', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'desktop_tests', }, @@ -135,7 +135,7 @@ 'Linux32 Debug (ARM)': {}, 'Linux32 Release': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-jammy', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'desktop_tests', }, @@ -144,7 +144,7 @@ 'Linux64 Builder': {}, 'Linux64 Debug': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-jammy', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'linux_tests', }, @@ -152,16 +152,16 @@ 'Linux64 Debug (ARM)': {}, 'Linux64 Release': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-jammy', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'linux_desktop_tests_with_video_capture', + 'isolated_scripts': 'linux_tests', }, }, 'Linux64 Release (ARM)': {}, 'Linux64 Release (Libfuzzer)': {}, 'Mac Asan': { 'os_type': 'mac', - 'mixins': ['mac_12_x64', 'cores-12', 'resultdb-json-format'], + 'mixins': ['mac_14_x64', 'cores-12', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'desktop_tests', }, @@ -169,23 +169,21 @@ 'Mac64 Builder': {}, 'Mac64 Debug': { 'os_type': 'mac', - 'mixins': ['mac_12_x64', 'cores-12', 'resultdb-json-format'], + 'mixins': ['mac_14_x64', 'cores-12', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'desktop_tests', }, }, 'Mac64 Release': { 'os_type': 'mac', - 'mixins': ['mac_12_x64', 'resultdb-json-format'], + 'mixins': ['mac_14_x64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests_with_video_capture', + 'isolated_scripts': 'desktop_tests', }, }, 'MacARM64 M1 Release': { 'os_type': 'mac', - 'mixins': ['mac_12_arm64', 'mac-m1-cpu', 'resultdb-json-format'], - # TODO(b/228171565): Replace desktop_tests by desktop_tests_with_video_capture when - # there is a camera available for the baremetal m1 machines. + 'mixins': ['mac_14_arm64', 'mac-m1-cpu', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'desktop_tests', }, @@ -225,12 +223,12 @@ 'os_type': 'win', 'mixins': ['win10', 'x86-64', 'resultdb-json-format'], 'test_suites': { - 'isolated_scripts': 'desktop_tests_with_video_capture', + 'isolated_scripts': 'desktop_tests', }, }, 'iOS Debug (simulator)': { 'mixins': [ - 'mac_12_x64', 'chromium-tester-service-account', 'mac_toolchain', + 'mac_14_x64', 'chromium-tester-service-account', 'mac_toolchain', 'has_native_resultdb_integration', 'out_dir_arg', 'webrtc-xctest' ], 'test_suites': { @@ -267,7 +265,7 @@ 'os_type': 'linux', 'mixins': [ - 'linux-bionic', 'x86-64', 'perf-pool-vm', 'timeout-3h', + 'linux-jammy', 'x86-64', 'perf-pool-vm', 'timeout-3h', 'resultdb-gtest-json-format', 'fuchsia-gtest-output', 'perf-output' ], 'test_suites': { @@ -324,7 +322,7 @@ 'machines': { 'iOS64 Debug': { 'mixins': [ - 'ios-device-16.6', 'webrtc-xctest', 'chrome-tester-service-account', + 'ios-device-18', 'webrtc-xctest', 'chrome-tester-service-account', 'xcode_15_main', 'mac_toolchain', 'has_native_resultdb_integration', 'out_dir_arg' ], @@ -334,8 +332,8 @@ }, 'iOS64 Perf': { 'mixins': [ - 'ios-device-perf', 'webrtc-xctest', 'timeout-3h', - 'chrome-tester-service-account', 'xcode_13_main', 'mac_toolchain', + 'arm64', 'ios-device-perf', 'webrtc-xctest', 'timeout-3h', + 'chrome-tester-service-account', 'xcode_15_main', 'mac_toolchain', 'has_native_resultdb_integration', 'out_dir_arg' ], 'test_suites': { @@ -347,7 +345,7 @@ }, 'iOS64 Release': { 'mixins': [ - 'ios-device-16.6', 'webrtc-xctest', 'chrome-tester-service-account', + 'ios-device-18', 'webrtc-xctest', 'chrome-tester-service-account', 'xcode_15_main', 'mac_toolchain', 'has_native_resultdb_integration', 'out_dir_arg' ], @@ -368,7 +366,7 @@ ], 'test_suites': { 'gtest_tests': 'android_tests_tryserver', - 'junit_tests': 'android_junit_tests', + 'isolated_scripts': 'android_junit_tests', }, }, 'android_arm64_rel': { @@ -378,7 +376,7 @@ ], 'test_suites': { 'gtest_tests': 'android_tests_tryserver', - 'junit_tests': 'android_junit_tests', + 'isolated_scripts': 'android_junit_tests', }, }, 'android_arm_dbg': { @@ -388,7 +386,7 @@ ], 'test_suites': { 'gtest_tests': 'android_tests_tryserver', - 'junit_tests': 'android_junit_tests', + 'isolated_scripts': 'android_junit_tests', }, }, 'android_arm_more_configs': { @@ -407,7 +405,7 @@ ], 'test_suites': { 'gtest_tests': 'android_tests_tryserver', - 'junit_tests': 'android_junit_tests', + 'isolated_scripts': 'android_junit_tests', }, }, 'android_compile_arm64_dbg': {}, @@ -422,7 +420,7 @@ 'os_type': 'linux', 'mixins': [ - 'linux-bionic', 'x86-64', 'fuchsia-gtest-output', + 'linux-jammy', 'x86-64', 'fuchsia-gtest-output', 'resultdb-gtest-json-format' ], 'test_suites': { @@ -433,16 +431,17 @@ 'ios_compile_arm64_rel': {}, 'ios_dbg_simulator': { 'mixins': [ - 'mac_12_x64', 'chromium-tester-service-account', 'mac_toolchain', + 'mac_14_x64', 'chromium-tester-service-account', 'mac_toolchain', 'has_native_resultdb_integration', 'out_dir_arg', 'webrtc-xctest' ], 'test_suites': { 'isolated_scripts': 'ios_simulator_tests_matrix', }, }, + 'iwyu_verifier': {}, 'linux_asan': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-focal', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'linux_tests', }, @@ -457,7 +456,7 @@ 'os_type': 'linux', 'mixins': [ - 'linux-bionic', 'x86-64', 'resultdb-json-format', + 'linux-jammy', 'x86-64', 'resultdb-json-format', 'isolate_profile_data' ], 'test_suites': { @@ -466,22 +465,15 @@ }, 'linux_dbg': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-jammy', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'linux_tests', }, }, 'linux_libfuzzer_rel': {}, - 'linux_memcheck': { - 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], - 'test_suites': { - 'isolated_scripts': 'linux_tests', - }, - }, 'linux_more_configs': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-jammy', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'more_configs_tests', }, @@ -498,14 +490,14 @@ }, 'linux_rel': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-jammy', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'linux_desktop_tests_tryserver', }, }, 'linux_tsan2': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-focal', 'x86-64', 'resultdb-json-format'], 'test_suites': { # TODO(crbug.com/webrtc/14568): Using 'linux_tests' # fails on "ThreadSanitizer: data race on vptr (ctor/dtor vs @@ -515,35 +507,35 @@ }, 'linux_ubsan': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-focal', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'linux_tests', }, }, 'linux_ubsan_vptr': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-focal', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'linux_tests', }, }, 'linux_x86_dbg': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-jammy', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'desktop_tests', }, }, 'linux_x86_rel': { 'os_type': 'linux', - 'mixins': ['linux-bionic', 'x86-64', 'resultdb-json-format'], + 'mixins': ['linux-jammy', 'x86-64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'desktop_tests', }, }, 'mac_asan': { 'os_type': 'mac', - 'mixins': ['mac_12_x64', 'resultdb-json-format', 'cores-12'], + 'mixins': ['mac_14_x64', 'resultdb-json-format', 'cores-12'], 'test_suites': { 'isolated_scripts': 'desktop_tests', }, @@ -552,34 +544,46 @@ 'mac_compile_rel': {}, 'mac_dbg': { 'os_type': 'mac', - 'mixins': ['mac_12_x64', 'resultdb-json-format', 'cores-12'], + 'mixins': ['mac_14_x64', 'resultdb-json-format', 'cores-12'], 'test_suites': { 'isolated_scripts': 'desktop_tests', }, }, 'mac_dbg_m1': { 'os_type': 'mac', - 'mixins': ['mac_12_arm64', 'mac-m1-cpu', 'resultdb-json-format'], + 'mixins': ['mac_14_arm64', 'mac-m1-cpu', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'desktop_tests', }, }, 'mac_rel': { 'os_type': 'mac', - 'mixins': ['mac_12_x64', 'resultdb-json-format'], + 'mixins': ['mac_14_x64', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'desktop_tests_tryserver', }, }, 'mac_rel_m1': { 'os_type': 'mac', - 'mixins': ['mac_12_arm64', 'mac-m1-cpu', 'resultdb-json-format'], - # TODO(b/228171565): Replace desktop_tests by desktop_tests_tryserver when - # there is a camera available for the baremetal-try m1 machines. + 'mixins': ['mac_14_arm64', 'mac-m1-cpu', 'resultdb-json-format'], 'test_suites': { 'isolated_scripts': 'desktop_tests', }, }, + 'win11_debug': { + 'os_type': 'win', + 'mixins': ['win11', 'x86-64', 'resultdb-json-format'], + 'test_suites': { + 'isolated_scripts': 'desktop_tests', + }, + }, + 'win11_release': { + 'os_type': 'win', + 'mixins': ['win11', 'x86-64', 'resultdb-json-format'], + 'test_suites': { + 'isolated_scripts': 'desktop_tests_tryserver', + }, + }, 'win_asan': { 'os_type': 'win', 'mixins': ['win10', 'x86-64', 'resultdb-json-format'], diff --git a/logging/BUILD.gn b/logging/BUILD.gn index ab1fbbc52b..ea483b29a1 100644 --- a/logging/BUILD.gn +++ b/logging/BUILD.gn @@ -35,8 +35,11 @@ rtc_source_set("rtc_event_log_api") { rtc_source_set("rtc_event_log_parse_status") { sources = [ "rtc_event_log/events/rtc_event_log_parse_status.h" ] - deps = [ "../rtc_base:checks" ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + deps = [ + "../rtc_base:checks", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/strings:string_view", + ] } rtc_library("rtc_event_field") { @@ -61,10 +64,8 @@ rtc_library("rtc_event_field") { "../rtc_base:bitstream_reader", "../rtc_base:checks", "../rtc_base:logging", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -77,11 +78,12 @@ rtc_library("rtc_stream_config") { deps = [ "../api:rtp_headers", "../api:rtp_parameters", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("rtc_event_pacing") { + visibility = [ "*" ] sources = [ "rtc_event_log/events/rtc_event_alr_state.cc", "rtc_event_log/events/rtc_event_alr_state.h", @@ -89,16 +91,17 @@ rtc_library("rtc_event_pacing") { deps = [ ":rtc_event_field", + ":rtc_event_log_parse_status", + "../api:array_view", "../api/rtc_event_log", "../api/units:timestamp", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } rtc_library("rtc_event_audio") { + visibility = [ "*" ] sources = [ "rtc_event_log/events/rtc_event_audio_network_adaptation.cc", "rtc_event_log/events/rtc_event_audio_network_adaptation.h", @@ -114,19 +117,20 @@ rtc_library("rtc_event_audio") { deps = [ ":rtc_event_field", + ":rtc_event_log_parse_status", ":rtc_stream_config", + "../api:array_view", "../api/rtc_event_log", "../api/units:timestamp", "../modules/audio_coding:audio_network_adaptor_config", "../rtc_base:checks", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } rtc_library("rtc_event_begin_end") { + visibility = [ "*" ] sources = [ "rtc_event_log/events/rtc_event_begin_log.cc", "rtc_event_log/events/rtc_event_begin_log.h", @@ -135,14 +139,16 @@ rtc_library("rtc_event_begin_end") { ] deps = [ ":rtc_event_field", + ":rtc_event_log_parse_status", "../api:array_view", "../api/rtc_event_log", "../api/units:timestamp", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("rtc_event_bwe") { + visibility = [ "*" ] sources = [ "rtc_event_log/events/rtc_event_bwe_update_delay_based.cc", "rtc_event_log/events/rtc_event_bwe_update_delay_based.h", @@ -161,34 +167,34 @@ rtc_library("rtc_event_bwe") { deps = [ ":rtc_event_field", - "../api:network_state_predictor_api", + ":rtc_event_log_parse_status", + "../api:array_view", "../api/rtc_event_log", + "../api/transport:bandwidth_usage", "../api/units:data_rate", "../api/units:timestamp", - ] - absl_deps = [ + "../rtc_base:checks", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } rtc_library("rtc_event_frame_events") { + visibility = [ "*" ] sources = [ "rtc_event_log/events/rtc_event_frame_decoded.cc", "rtc_event_log/events/rtc_event_frame_decoded.h", ] deps = [ ":rtc_event_field", + ":rtc_event_log_parse_status", + "../api:array_view", "../api/rtc_event_log", "../api/units:timestamp", "../api/video:video_frame", "../rtc_base:timeutils", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -204,18 +210,18 @@ rtc_library("rtc_event_generic_packet_events") { ] deps = [ ":rtc_event_field", + ":rtc_event_log_parse_status", + "../api:array_view", "../api/rtc_event_log", "../api/units:timestamp", "../rtc_base:timeutils", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } rtc_library("rtc_event_rtp_rtcp") { + visibility = [ "*" ] sources = [ "rtc_event_log/events/logged_rtp_rtcp.h", "rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc", @@ -230,6 +236,7 @@ rtc_library("rtc_event_rtp_rtcp") { deps = [ ":rtc_event_field", + ":rtc_event_log_parse_status", "../api:array_view", "../api:rtp_headers", "../api/rtc_event_log", @@ -237,14 +244,13 @@ rtc_library("rtc_event_rtp_rtcp") { "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:buffer", "../rtc_base:checks", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } rtc_library("rtc_event_video") { + visibility = [ "*" ] sources = [ "rtc_event_log/events/rtc_event_video_receive_stream_config.cc", "rtc_event_log/events/rtc_event_video_receive_stream_config.h", @@ -254,14 +260,14 @@ rtc_library("rtc_event_video") { deps = [ ":rtc_event_field", + ":rtc_event_log_parse_status", ":rtc_stream_config", + "../api:array_view", "../api/rtc_event_log", "../api/units:timestamp", "../rtc_base:checks", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -281,13 +287,8 @@ rtc_library("rtc_event_number_encodings") { "../rtc_base:bit_buffer", "../rtc_base:bitstream_reader", "../rtc_base:checks", - "../rtc_base:ignore_wundef", "../rtc_base:macromagic", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -300,8 +301,8 @@ rtc_library("rtc_event_log_blob_encoding") { ":rtc_event_number_encodings", "../rtc_base:checks", "../rtc_base:logging", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("rtc_event_log_optional_blob_encoding") { @@ -314,10 +315,7 @@ rtc_library("rtc_event_log_optional_blob_encoding") { "../rtc_base:bitstream_reader", "../rtc_base:checks", "../rtc_base:logging", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -333,11 +331,8 @@ rtc_library("rtc_event_log_delta_encoding") { "../rtc_base:checks", "../rtc_base:logging", "../rtc_base:safe_conversions", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -348,6 +343,7 @@ rtc_library("rtc_event_log_impl_encoder") { deps = [ ":rtc_event_number_encodings", + "../api:field_trials_view", "../api:rtp_headers", "../api:rtp_parameters", "../api/transport:network_control", @@ -355,15 +351,8 @@ rtc_library("rtc_event_log_impl_encoder") { "../rtc_base:bitstream_reader", "../rtc_base:buffer", "../rtc_base:checks", - "../rtc_base:ignore_wundef", "../rtc_base:logging", "../rtc_base:safe_conversions", - "../system_wrappers:field_trial", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] if (rtc_enable_protobuf) { @@ -386,7 +375,13 @@ rtc_library("rtc_event_log_impl_encoder") { ":rtc_event_video", ":rtc_stream_config", "../api:array_view", - "../api:network_state_predictor_api", + "../api:candidate", + "../api:dtls_transport_interface", + "../api/rtc_event_log:rtc_event_log", + "../api/transport:bandwidth_usage", + "../api/units:timestamp", + "../api/video:video_frame", + "../api/video:video_rtp_headers", "../modules/audio_coding:audio_network_adaptor", "../modules/rtp_rtcp:rtp_rtcp_format", ] @@ -413,13 +408,11 @@ if (rtc_enable_protobuf) { ":rtc_event_log_optional_blob_encoding", ":rtc_event_log_parse_status", ":rtc_event_log_proto", # Why does this need to be included here? + "../api:array_view", "../rtc_base:bitstream_reader", "../rtc_base:checks", "../rtc_base:logging", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } } @@ -427,20 +420,11 @@ if (rtc_enable_protobuf) { if (rtc_enable_protobuf) { rtc_source_set("rtc_event_log2_proto_include") { sources = [ "rtc_event_log/rtc_event_log2_proto_include.h" ] - deps = [ - ":rtc_event_log2_proto", - "../rtc_base:ignore_wundef", - ] + public_deps += # no-presubmit-check TODO(webrtc:8603) + [ ":rtc_event_log2_proto" ] } } -# TODO(bugs.webrtc.org/6463): For backwards compatibility; delete as -# soon as downstream dependencies are updated. -rtc_source_set("rtc_event_log_impl_output") { - sources = [ "rtc_event_log/output/rtc_event_log_output_file.h" ] - deps = [ "../api:rtc_event_log_output_file" ] -} - if (rtc_enable_protobuf) { rtc_library("rtc_event_log_impl") { visibility = [ @@ -455,8 +439,10 @@ if (rtc_enable_protobuf) { ":ice_log", ":rtc_event_log_api", ":rtc_event_log_impl_encoder", + "../api:field_trials_view", "../api:libjingle_logging_api", "../api:sequence_checker", + "../api/environment", "../api/rtc_event_log", "../api/task_queue", "../api/units:time_delta", @@ -464,15 +450,12 @@ if (rtc_enable_protobuf) { "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:rtc_event", - "../rtc_base:rtc_task_queue", "../rtc_base:safe_conversions", "../rtc_base:safe_minmax", "../rtc_base:timeutils", + "../rtc_base/synchronization:mutex", "../rtc_base/system:no_unique_address", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } } @@ -487,9 +470,13 @@ rtc_library("fake_rtc_event_log") { ] deps = [ + "../api:libjingle_logging_api", + "../api/environment", "../api/rtc_event_log", + "../api/rtc_event_log:rtc_event_log_factory_interface", "../rtc_base:macromagic", "../rtc_base/synchronization:mutex", + "//third_party/abseil-cpp/absl/base:nullability", ] } @@ -528,39 +515,37 @@ if (rtc_enable_protobuf) { ":rtc_event_log_blob_encoding", ":rtc_event_log_delta_encoding", ":rtc_event_log_impl_encoder", + ":rtc_event_log_parse_status", ":rtc_event_log_proto", ":rtc_event_number_encodings", ":rtc_event_pacing", ":rtc_event_rtp_rtcp", ":rtc_event_video", ":rtc_stream_config", + "../api:candidate", + "../api:dtls_transport_interface", "../api:function_view", - "../api:network_state_predictor_api", "../api:rtp_headers", "../api:rtp_parameters", "../api/rtc_event_log", + "../api/transport:bandwidth_usage", "../api/units:data_rate", "../api/units:time_delta", "../api/units:timestamp", "../api/video:video_frame", - "../call:video_stream_api", "../modules:module_api_public", "../modules/audio_coding:audio_network_adaptor", "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", - "../rtc_base:ignore_wundef", "../rtc_base:logging", "../rtc_base:protobuf_utils", "../rtc_base:rtc_numerics", "../rtc_base:safe_conversions", "../rtc_base/system:file_wrapper", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -587,12 +572,14 @@ if (rtc_enable_protobuf) { ":dependency_descriptor_encoder_decoder", ":ice_log", ":rtc_event_audio", + ":rtc_event_begin_end", ":rtc_event_bwe", ":rtc_event_field", ":rtc_event_frame_events", ":rtc_event_generic_packet_events", ":rtc_event_log2_proto", ":rtc_event_log2_proto_include", + ":rtc_event_log_api", ":rtc_event_log_blob_encoding", ":rtc_event_log_delta_encoding", ":rtc_event_log_impl", @@ -606,38 +593,44 @@ if (rtc_enable_protobuf) { ":rtc_event_video", ":rtc_stream_config", "../api:array_view", - "../api:network_state_predictor_api", + "../api:candidate", + "../api:dtls_transport_interface", + "../api:field_trials_view", + "../api:libjingle_logging_api", "../api:rtc_event_log_output_file", "../api:rtp_headers", "../api:rtp_parameters", + "../api/environment", + "../api/environment:environment_factory", "../api/rtc_event_log", "../api/rtc_event_log:rtc_event_log_factory", - "../api/task_queue:default_task_queue_factory", + "../api/transport:bandwidth_usage", + "../api/units:data_rate", "../api/units:time_delta", "../api/units:timestamp", + "../api/video:video_frame", "../call", "../call:call_interfaces", "../modules/audio_coding:audio_network_adaptor", "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:buffer", "../rtc_base:checks", + "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:random", "../rtc_base:rtc_base_tests_utils", + "../rtc_base:safe_conversions", "../rtc_base:timeutils", "../system_wrappers", - "../system_wrappers:field_trial", - "../test:field_trial", + "../test:explicit_key_value_config", "../test:fileutils", "../test:test_support", "../test/logging:log_writer", "../test/time_controller", "//testing/gtest", - ] - absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -647,6 +640,7 @@ if (rtc_enable_protobuf) { sources = [ "rtc_event_log/rtc_event_log2rtp_dump.cc" ] deps = [ ":rtc_event_log_parser", + ":rtc_event_rtp_rtcp", "../api:array_view", "../api:rtp_headers", "../api/rtc_event_log", @@ -654,13 +648,13 @@ if (rtc_enable_protobuf) { "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", "../rtc_base:protobuf_utils", + "../rtc_base:stringutils", "../test:rtp_test_utils", "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", "//third_party/abseil-cpp/absl/flags:usage", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } } @@ -668,6 +662,7 @@ if (rtc_enable_protobuf) { } rtc_library("ice_log") { + visibility = [ "*" ] sources = [ "rtc_event_log/events/rtc_event_dtls_transport_state.cc", "rtc_event_log/events/rtc_event_dtls_transport_state.h", @@ -683,14 +678,15 @@ rtc_library("ice_log") { deps = [ ":rtc_event_field", + ":rtc_event_log_parse_status", + "../api:array_view", + "../api:candidate", "../api:dtls_transport_interface", "../api:libjingle_logging_api", "../api/rtc_event_log", "../api/units:timestamp", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -702,6 +698,7 @@ if (rtc_include_tests) { "rtc_event_log/mock/mock_rtc_event_log.h", ] deps = [ + "../api:libjingle_logging_api", "../api/rtc_event_log", "../test:test_support", ] diff --git a/logging/rtc_event_log/dependency_descriptor_encoder_decoder.cc b/logging/rtc_event_log/dependency_descriptor_encoder_decoder.cc index c74c8c4b63..9dba1076cb 100644 --- a/logging/rtc_event_log/dependency_descriptor_encoder_decoder.cc +++ b/logging/rtc_event_log/dependency_descriptor_encoder_decoder.cc @@ -10,21 +10,27 @@ #include "logging/rtc_event_log/dependency_descriptor_encoder_decoder.h" +#include +#include +#include #include #include +#include "absl/strings/string_view.h" +#include "api/array_view.h" #include "logging/rtc_event_log/encoder/delta_encoding.h" #include "logging/rtc_event_log/encoder/optional_blob_encoding.h" #include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" #include "logging/rtc_event_log/rtc_event_log2_proto_include.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { // static -absl::optional +std::optional RtcEventLogDependencyDescriptorEncoderDecoder::Encode( - const std::vector>& raw_dd_data) { + const std::vector>& raw_dd_data) { if (raw_dd_data.empty()) { return {}; } @@ -37,19 +43,19 @@ RtcEventLogDependencyDescriptorEncoderDecoder::Encode( } rtclog2::DependencyDescriptorsWireInfo res; - const rtc::ArrayView& base_dd = raw_dd_data[0]; + const ArrayView& base_dd = raw_dd_data[0]; auto delta_dds = - rtc::MakeArrayView(raw_dd_data.data(), raw_dd_data.size()).subview(1); + MakeArrayView(raw_dd_data.data(), raw_dd_data.size()).subview(1); // Start and end bit. { - absl::optional start_end_bit; + std::optional start_end_bit; if (!base_dd.empty()) { start_end_bit = (base_dd[0] >> 6); res.set_start_end_bit(*start_end_bit); } if (!delta_dds.empty()) { - std::vector> values(delta_dds.size()); + std::vector> values(delta_dds.size()); for (size_t i = 0; i < delta_dds.size(); ++i) { if (!delta_dds[i].empty()) { values[i] = delta_dds[i][0] >> 6; @@ -64,14 +70,14 @@ RtcEventLogDependencyDescriptorEncoderDecoder::Encode( // Template IDs. { - absl::optional template_id; + std::optional template_id; if (!base_dd.empty()) { template_id = (base_dd[0] & 0b0011'1111); res.set_template_id(*template_id); } if (!delta_dds.empty()) { - std::vector> values(delta_dds.size()); + std::vector> values(delta_dds.size()); for (size_t i = 0; i < delta_dds.size(); ++i) { if (!delta_dds[i].empty()) { values[i] = delta_dds[i][0] & 0b0011'1111; @@ -86,14 +92,14 @@ RtcEventLogDependencyDescriptorEncoderDecoder::Encode( // Frame IDs. { - absl::optional frame_id; + std::optional frame_id; if (!base_dd.empty()) { frame_id = (uint16_t{base_dd[1]} << 8) + base_dd[2]; res.set_frame_id(*frame_id); } if (!delta_dds.empty()) { - std::vector> values(delta_dds.size()); + std::vector> values(delta_dds.size()); for (size_t i = 0; i < delta_dds.size(); ++i) { if (!delta_dds[i].empty()) { values[i] = (uint16_t{delta_dds[i][1]} << 8) + delta_dds[i][2]; @@ -108,7 +114,7 @@ RtcEventLogDependencyDescriptorEncoderDecoder::Encode( // Extended info { - std::vector> values(raw_dd_data.size()); + std::vector> values(raw_dd_data.size()); for (size_t i = 0; i < raw_dd_data.size(); ++i) { if (raw_dd_data[i].size() > 3) { auto extended_info = raw_dd_data[i].subview(3); @@ -137,49 +143,49 @@ RtcEventLogDependencyDescriptorEncoderDecoder::Decode( std::vector> res(num_packets); - absl::optional start_end_bit_base; + std::optional start_end_bit_base; if (dd_wire_info.has_start_end_bit()) { start_end_bit_base = dd_wire_info.start_end_bit(); } - absl::optional template_id_base; + std::optional template_id_base; if (dd_wire_info.has_template_id()) { template_id_base = dd_wire_info.template_id(); } - absl::optional frame_id_base; + std::optional frame_id_base; if (dd_wire_info.has_frame_id()) { frame_id_base = dd_wire_info.frame_id(); } - std::vector> start_end_bit_deltas; + std::vector> start_end_bit_deltas; if (dd_wire_info.has_start_end_bit_deltas()) { start_end_bit_deltas = DecodeDeltas(dd_wire_info.start_end_bit_deltas(), start_end_bit_base, num_packets - 1); RTC_DCHECK(start_end_bit_deltas.empty() || start_end_bit_deltas.size() == (num_packets - 1)); } - std::vector> template_id_deltas; + std::vector> template_id_deltas; if (dd_wire_info.has_template_id_deltas()) { template_id_deltas = DecodeDeltas(dd_wire_info.template_id_deltas(), template_id_base, num_packets - 1); RTC_DCHECK(template_id_deltas.empty() || template_id_deltas.size() == (num_packets - 1)); } - std::vector> frame_id_deltas; + std::vector> frame_id_deltas; if (dd_wire_info.has_frame_id_deltas()) { frame_id_deltas = DecodeDeltas(dd_wire_info.frame_id_deltas(), frame_id_base, num_packets - 1); RTC_DCHECK(frame_id_deltas.empty() || frame_id_deltas.size() == (num_packets - 1)); } - std::vector> extended_infos; + std::vector> extended_infos; if (dd_wire_info.has_extended_infos()) { extended_infos = DecodeOptionalBlobs(dd_wire_info.extended_infos(), num_packets); } - auto recreate_raw_dd = [&](int i, const absl::optional& be, - const absl::optional& tid, - const absl::optional& fid) { + auto recreate_raw_dd = [&](int i, const std::optional& be, + const std::optional& tid, + const std::optional& fid) { absl::string_view ext; if (!extended_infos.empty() && extended_infos[i].has_value()) { ext = *extended_infos[i]; diff --git a/logging/rtc_event_log/dependency_descriptor_encoder_decoder.h b/logging/rtc_event_log/dependency_descriptor_encoder_decoder.h index 6729a38b6d..9c914eb0f2 100644 --- a/logging/rtc_event_log/dependency_descriptor_encoder_decoder.h +++ b/logging/rtc_event_log/dependency_descriptor_encoder_decoder.h @@ -11,20 +11,21 @@ #ifndef LOGGING_RTC_EVENT_LOG_DEPENDENCY_DESCRIPTOR_ENCODER_DECODER_H_ #define LOGGING_RTC_EVENT_LOG_DEPENDENCY_DESCRIPTOR_ENCODER_DECODER_H_ +#include +#include +#include #include -#include "absl/types/optional.h" +#include "api/array_view.h" #include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" #include "logging/rtc_event_log/rtc_event_log2_proto_include.h" -#include "rtc_base/bitstream_reader.h" -#include "rtc_base/checks.h" namespace webrtc { class RtcEventLogDependencyDescriptorEncoderDecoder { public: - static absl::optional Encode( - const std::vector>& raw_dd_data); + static std::optional Encode( + const std::vector>& raw_dd_data); static RtcEventLogParseStatusOr>> Decode( const rtclog2::DependencyDescriptorsWireInfo& dd_wire_info, size_t num_packets); diff --git a/logging/rtc_event_log/dependency_descriptor_encoder_decoder_unittest.cc b/logging/rtc_event_log/dependency_descriptor_encoder_decoder_unittest.cc index 04c7147e28..90f608e4d5 100644 --- a/logging/rtc_event_log/dependency_descriptor_encoder_decoder_unittest.cc +++ b/logging/rtc_event_log/dependency_descriptor_encoder_decoder_unittest.cc @@ -10,12 +10,14 @@ #include "logging/rtc_event_log/dependency_descriptor_encoder_decoder.h" +#include #include #include #include "logging/rtc_event_log/encoder/delta_encoding.h" #include "logging/rtc_event_log/encoder/optional_blob_encoding.h" #include "logging/rtc_event_log/rtc_event_log2_proto_include.h" +#include "rtc_base/checks.h" #include "test/gmock.h" #include "test/gtest.h" diff --git a/logging/rtc_event_log/encoder/bit_writer.cc b/logging/rtc_event_log/encoder/bit_writer.cc index e8748d3db3..fd77d3d962 100644 --- a/logging/rtc_event_log/encoder/bit_writer.cc +++ b/logging/rtc_event_log/encoder/bit_writer.cc @@ -10,6 +10,15 @@ #include "logging/rtc_event_log/encoder/bit_writer.h" +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "rtc_base/checks.h" + namespace webrtc { namespace { diff --git a/logging/rtc_event_log/encoder/bit_writer.h b/logging/rtc_event_log/encoder/bit_writer.h index 421e7c4370..449c07fa24 100644 --- a/logging/rtc_event_log/encoder/bit_writer.h +++ b/logging/rtc_event_log/encoder/bit_writer.h @@ -15,7 +15,6 @@ #include #include -#include #include "absl/strings/string_view.h" #include "rtc_base/bit_buffer.h" @@ -48,7 +47,7 @@ class BitWriter final { private: std::string buffer_; - rtc::BitBufferWriter bit_writer_; + BitBufferWriter bit_writer_; // Note: Counting bits instead of bytes wraps around earlier than it has to, // which means the maximum length is lower than it could be. We don't expect // to go anywhere near the limit, though, so this is good enough. diff --git a/logging/rtc_event_log/encoder/blob_encoding.cc b/logging/rtc_event_log/encoder/blob_encoding.cc index 96699dc96a..e2417ad248 100644 --- a/logging/rtc_event_log/encoder/blob_encoding.cc +++ b/logging/rtc_event_log/encoder/blob_encoding.cc @@ -10,8 +10,13 @@ #include "logging/rtc_event_log/encoder/blob_encoding.h" +#include #include +#include +#include +#include +#include "absl/strings/string_view.h" #include "logging/rtc_event_log/encoder/var_int.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" diff --git a/logging/rtc_event_log/encoder/blob_encoding_unittest.cc b/logging/rtc_event_log/encoder/blob_encoding_unittest.cc index a25923f22d..0d8517170a 100644 --- a/logging/rtc_event_log/encoder/blob_encoding_unittest.cc +++ b/logging/rtc_event_log/encoder/blob_encoding_unittest.cc @@ -10,9 +10,11 @@ #include "logging/rtc_event_log/encoder/blob_encoding.h" +#include #include #include +#include "absl/strings/string_view.h" #include "logging/rtc_event_log/encoder/var_int.h" #include "rtc_base/checks.h" #include "test/gtest.h" diff --git a/logging/rtc_event_log/encoder/delta_encoding.cc b/logging/rtc_event_log/encoder/delta_encoding.cc index c80424574c..70c488c0e7 100644 --- a/logging/rtc_event_log/encoder/delta_encoding.cc +++ b/logging/rtc_event_log/encoder/delta_encoding.cc @@ -11,19 +11,21 @@ #include "logging/rtc_event_log/encoder/delta_encoding.h" #include +#include +#include #include #include -#include +#include +#include +#include #include "absl/memory/memory.h" #include "absl/strings/string_view.h" #include "logging/rtc_event_log/encoder/bit_writer.h" #include "logging/rtc_event_log/encoder/var_int.h" -#include "rtc_base/bit_buffer.h" #include "rtc_base/bitstream_reader.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" namespace webrtc { namespace { @@ -113,8 +115,7 @@ constexpr uint64_t kDefaultValueWidthBits = 64; class FixedLengthEncodingParameters final { public: static bool ValidParameters(uint64_t delta_width_bits, - bool signed_deltas, - bool values_optional, + bool /* signed_deltas */, uint64_t value_width_bits) { return (1 <= delta_width_bits && delta_width_bits <= 64 && 1 <= value_width_bits && value_width_bits <= 64 && @@ -131,8 +132,8 @@ class FixedLengthEncodingParameters final { value_width_bits_(value_width_bits), delta_mask_(MaxUnsignedValueOfBitWidth(delta_width_bits_)), value_mask_(MaxUnsignedValueOfBitWidth(value_width_bits_)) { - RTC_DCHECK(ValidParameters(delta_width_bits, signed_deltas, values_optional, - value_width_bits)); + RTC_DCHECK( + ValidParameters(delta_width_bits, signed_deltas, value_width_bits)); } // Number of bits necessary to hold the widest(*) of the deltas between the @@ -184,8 +185,8 @@ class FixedLengthDeltaEncoder final { // therefore be decoded by FixedLengthDeltaDecoder, or whether it was produced // by a different encoder. static std::string EncodeDeltas( - absl::optional base, - const std::vector>& values); + std::optional base, + const std::vector>& values); FixedLengthDeltaEncoder(const FixedLengthDeltaEncoder&) = delete; FixedLengthDeltaEncoder& operator=(const FixedLengthDeltaEncoder&) = delete; @@ -194,8 +195,8 @@ class FixedLengthDeltaEncoder final { // Calculate min/max values of unsigned/signed deltas, given the bit width // of all the values in the series. static void CalculateMinAndMaxDeltas( - absl::optional base, - const std::vector>& values, + std::optional base, + const std::vector>& values, uint64_t bit_width, uint64_t* max_unsigned_delta, uint64_t* max_pos_signed_delta, @@ -213,8 +214,8 @@ class FixedLengthDeltaEncoder final { // Therefore, it was deemed acceptable to let them have a reference to // `values`, whose lifetime must exceed the lifetime of `this`. FixedLengthDeltaEncoder(const FixedLengthEncodingParameters& params, - absl::optional base, - const std::vector>& values, + std::optional base, + const std::vector>& values, size_t existent_values_count); // Perform delta-encoding using the parameters given to the ctor on the @@ -241,11 +242,11 @@ class FixedLengthDeltaEncoder final { // The encoding scheme assumes that at least one value is transmitted OOB, // so that the first value can be encoded as a delta from that OOB value, // which is `base_`. - const absl::optional base_; + const std::optional base_; // The values to be encoded. // Note: This is a non-owning reference. See comment above ctor for details. - const std::vector>& values_; + const std::vector>& values_; // Buffer into which encoded values will be written. // This is created dynmically as a way to enforce that the rest of the @@ -256,8 +257,8 @@ class FixedLengthDeltaEncoder final { // TODO(eladalon): Reduce the number of passes. std::string FixedLengthDeltaEncoder::EncodeDeltas( - absl::optional base, - const std::vector>& values) { + std::optional base, + const std::vector>& values) { RTC_DCHECK(!values.empty()); // As a special case, if all of the elements are identical to the base, @@ -265,7 +266,7 @@ std::string FixedLengthDeltaEncoder::EncodeDeltas( // the empty string is used to signal that. if (std::all_of( values.cbegin(), values.cend(), - [base](absl::optional val) { return val == base; })) { + [base](std::optional val) { return val == base; })) { return std::string(); } @@ -323,8 +324,8 @@ std::string FixedLengthDeltaEncoder::EncodeDeltas( } void FixedLengthDeltaEncoder::CalculateMinAndMaxDeltas( - absl::optional base, - const std::vector>& values, + std::optional base, + const std::vector>& values, uint64_t bit_width, uint64_t* max_unsigned_delta_out, uint64_t* max_pos_signed_delta_out, @@ -340,7 +341,7 @@ void FixedLengthDeltaEncoder::CalculateMinAndMaxDeltas( uint64_t max_pos_signed_delta = 0; uint64_t min_neg_signed_delta = 0; - absl::optional prev = base; + std::optional prev = base; for (size_t i = 0; i < values.size(); ++i) { if (!values[i].has_value()) { continue; @@ -392,8 +393,8 @@ void FixedLengthDeltaEncoder::ConsiderTestOverrides( FixedLengthDeltaEncoder::FixedLengthDeltaEncoder( const FixedLengthEncodingParameters& params, - absl::optional base, - const std::vector>& values, + std::optional base, + const std::vector>& values, size_t existent_values_count) : params_(params), base_(base), values_(values) { RTC_DCHECK(!values_.empty()); @@ -406,13 +407,13 @@ std::string FixedLengthDeltaEncoder::Encode() { if (params_.values_optional()) { // Encode which values exist and which don't. - for (absl::optional value : values_) { + for (std::optional value : values_) { writer_->WriteBits(value.has_value() ? 1u : 0u, 1); } } - absl::optional previous = base_; - for (absl::optional value : values_) { + std::optional previous = base_; + for (std::optional value : values_) { if (!value.has_value()) { RTC_DCHECK(params_.values_optional()); continue; @@ -457,7 +458,7 @@ size_t FixedLengthDeltaEncoder::EncodedDeltasLengthBits( return values_.size() * params_.delta_width_bits(); } else { RTC_DCHECK_EQ(std::count_if(values_.begin(), values_.end(), - [](absl::optional val) { + [](std::optional val) { return val.has_value(); }), existent_values_count); @@ -562,9 +563,9 @@ class FixedLengthDeltaDecoder final { // original values, this will return the sequence of original values. // If an error occurs (can happen if `input` is corrupt), an empty // vector will be returned. - static std::vector> DecodeDeltas( + static std::vector> DecodeDeltas( absl::string_view input, - absl::optional base, + std::optional base, size_t num_of_deltas); FixedLengthDeltaDecoder(const FixedLengthDeltaDecoder&) = delete; @@ -581,7 +582,7 @@ class FixedLengthDeltaDecoder final { // examined and guaranteed. static std::unique_ptr Create( absl::string_view input, - absl::optional base, + std::optional base, size_t num_of_deltas); // FixedLengthDeltaDecoder objects are to be created by DecodeDeltas() and @@ -592,11 +593,11 @@ class FixedLengthDeltaDecoder final { // of `reader`'s underlying buffer. FixedLengthDeltaDecoder(BitstreamReader reader, const FixedLengthEncodingParameters& params, - absl::optional base, + std::optional base, size_t num_of_deltas); // Perform the decoding using the parameters given to the ctor. - std::vector> Decode(); + std::vector> Decode(); // Add `delta` to `base` to produce the next value in a sequence. // The delta is applied as signed/unsigned depending on the parameters @@ -619,7 +620,7 @@ class FixedLengthDeltaDecoder final { // The encoding scheme assumes that at least one value is transmitted OOB, // so that the first value can be encoded as a delta from that OOB value, // which is `base_`. - const absl::optional base_; + const std::optional base_; // The number of values to be known to be decoded. const size_t num_of_deltas_; @@ -639,13 +640,13 @@ bool FixedLengthDeltaDecoder::IsSuitableDecoderFor(absl::string_view input) { EncodingType::kFixedSizeSignedDeltasEarlyWrapAndOptSupported; } -std::vector> FixedLengthDeltaDecoder::DecodeDeltas( +std::vector> FixedLengthDeltaDecoder::DecodeDeltas( absl::string_view input, - absl::optional base, + std::optional base, size_t num_of_deltas) { auto decoder = FixedLengthDeltaDecoder::Create(input, base, num_of_deltas); if (!decoder) { - return std::vector>(); + return std::vector>(); } return decoder->Decode(); @@ -653,7 +654,7 @@ std::vector> FixedLengthDeltaDecoder::DecodeDeltas( std::unique_ptr FixedLengthDeltaDecoder::Create( absl::string_view input, - absl::optional base, + std::optional base, size_t num_of_deltas) { BitstreamReader reader(input); // Encoding type @@ -699,7 +700,7 @@ std::unique_ptr FixedLengthDeltaDecoder::Create( // for illegal values to be read. We check nevertheless, in case the code // changes in the future in a way that breaks this promise. if (!FixedLengthEncodingParameters::ValidParameters( - delta_width_bits, signed_deltas, values_optional, value_width_bits)) { + delta_width_bits, signed_deltas, value_width_bits)) { RTC_LOG(LS_WARNING) << "Corrupt log; illegal encoding parameters."; return nullptr; } @@ -713,7 +714,7 @@ std::unique_ptr FixedLengthDeltaDecoder::Create( FixedLengthDeltaDecoder::FixedLengthDeltaDecoder( BitstreamReader reader, const FixedLengthEncodingParameters& params, - absl::optional base, + std::optional base, size_t num_of_deltas) : reader_(reader), params_(params), @@ -722,7 +723,7 @@ FixedLengthDeltaDecoder::FixedLengthDeltaDecoder( RTC_DCHECK(reader_.Ok()); } -std::vector> FixedLengthDeltaDecoder::Decode() { +std::vector> FixedLengthDeltaDecoder::Decode() { RTC_DCHECK(reader_.Ok()); std::vector existing_values(num_of_deltas_); if (params_.values_optional()) { @@ -733,8 +734,8 @@ std::vector> FixedLengthDeltaDecoder::Decode() { std::fill(existing_values.begin(), existing_values.end(), true); } - absl::optional previous = base_; - std::vector> values(num_of_deltas_); + std::optional previous = base_; + std::vector> values(num_of_deltas_); for (size_t i = 0; i < num_of_deltas_; ++i) { if (!existing_values[i]) { @@ -798,22 +799,21 @@ uint64_t FixedLengthDeltaDecoder::ApplySignedDelta(uint64_t base, } // namespace -std::string EncodeDeltas(absl::optional base, - const std::vector>& values) { +std::string EncodeDeltas(std::optional base, + const std::vector>& values) { // TODO(eladalon): Support additional encodings. return FixedLengthDeltaEncoder::EncodeDeltas(base, values); } -std::vector> DecodeDeltas( - absl::string_view input, - absl::optional base, - size_t num_of_deltas) { +std::vector> DecodeDeltas(absl::string_view input, + std::optional base, + size_t num_of_deltas) { RTC_DCHECK_GT(num_of_deltas, 0); // Allows empty vector to indicate error. // The empty string is a special case indicating that all values were equal // to the base. if (input.empty()) { - std::vector> result(num_of_deltas); + std::vector> result(num_of_deltas); std::fill(result.begin(), result.end(), base); return result; } @@ -823,7 +823,7 @@ std::vector> DecodeDeltas( } RTC_LOG(LS_WARNING) << "Could not decode delta-encoded stream."; - return std::vector>(); + return std::vector>(); } void SetFixedLengthEncoderDeltaSignednessForTesting(bool signedness) { diff --git a/logging/rtc_event_log/encoder/delta_encoding.h b/logging/rtc_event_log/encoder/delta_encoding.h index 779cdc6b2f..d097de761f 100644 --- a/logging/rtc_event_log/encoder/delta_encoding.h +++ b/logging/rtc_event_log/encoder/delta_encoding.h @@ -14,11 +14,11 @@ #include #include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" namespace webrtc { @@ -29,8 +29,8 @@ namespace webrtc { // be provided separately to the decoder. // This function never fails. // TODO(eladalon): Split into optional and non-optional variants (efficiency). -std::string EncodeDeltas(absl::optional base, - const std::vector>& values); +std::string EncodeDeltas(std::optional base, + const std::vector>& values); // EncodeDeltas() and DecodeDeltas() are inverse operations; // invoking DecodeDeltas() over the output of EncodeDeltas(), will return @@ -39,10 +39,9 @@ std::string EncodeDeltas(absl::optional base, // of `num_of_deltas` elements based on `base`, the function returns an empty // vector, which signals an error. // TODO(eladalon): Split into optional and non-optional variants (efficiency). -std::vector> DecodeDeltas( - absl::string_view input, - absl::optional base, - size_t num_of_deltas); +std::vector> DecodeDeltas(absl::string_view input, + std::optional base, + size_t num_of_deltas); } // namespace webrtc diff --git a/logging/rtc_event_log/encoder/delta_encoding_unittest.cc b/logging/rtc_event_log/encoder/delta_encoding_unittest.cc index d0f7fb93db..6f46ff9c3c 100644 --- a/logging/rtc_event_log/encoder/delta_encoding_unittest.cc +++ b/logging/rtc_event_log/encoder/delta_encoding_unittest.cc @@ -11,13 +11,16 @@ #include "logging/rtc_event_log/encoder/delta_encoding.h" #include +#include +#include +#include #include #include +#include #include #include #include -#include "absl/types/optional.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "rtc_base/random.h" @@ -68,53 +71,52 @@ uint64_t RandomWithMaxBitWidth(Random* prng, uint64_t max_width) { // that it is equal to the original input. // If `encoded_string` is non-null, the encoded result will also be written // into it. -void TestEncodingAndDecoding( - absl::optional base, - const std::vector>& values, - std::string* encoded_string = nullptr) { +void TestEncodingAndDecoding(std::optional base, + const std::vector>& values, + std::string* encoded_string = nullptr) { const std::string encoded = EncodeDeltas(base, values); if (encoded_string) { *encoded_string = encoded; } - const std::vector> decoded = + const std::vector> decoded = DecodeDeltas(encoded, base, values.size()); EXPECT_EQ(decoded, values); } -std::vector> CreateSequenceByFirstValue( +std::vector> CreateSequenceByFirstValue( uint64_t first, size_t sequence_length) { - std::vector> sequence(sequence_length); + std::vector> sequence(sequence_length); std::iota(sequence.begin(), sequence.end(), first); return sequence; } -std::vector> CreateSequenceByLastValue( +std::vector> CreateSequenceByLastValue( uint64_t last, size_t num_values) { const uint64_t first = last - num_values + 1; - std::vector> result(num_values); + std::vector> result(num_values); std::iota(result.begin(), result.end(), first); return result; } // If `sequence_length` is greater than the number of deltas, the sequence of // deltas will wrap around. -std::vector> CreateSequenceByOptionalDeltas( +std::vector> CreateSequenceByOptionalDeltas( uint64_t first, - const std::vector>& deltas, + const std::vector>& deltas, size_t sequence_length) { RTC_DCHECK_GE(sequence_length, 1); - std::vector> sequence(sequence_length); + std::vector> sequence(sequence_length); uint64_t previous = first; for (size_t i = 0, next_delta_index = 0; i < sequence.size(); ++i) { if (deltas[next_delta_index].has_value()) { sequence[i] = - absl::optional(previous + deltas[next_delta_index].value()); + std::optional(previous + deltas[next_delta_index].value()); previous = sequence[i].value(); } next_delta_index = (next_delta_index + 1) % deltas.size(); @@ -126,7 +128,7 @@ std::vector> CreateSequenceByOptionalDeltas( size_t EncodingLengthUpperBound(size_t delta_max_bit_width, size_t num_of_deltas, DeltaSignedness signedness_override) { - absl::optional smallest_header_size_bytes; + std::optional smallest_header_size_bytes; switch (signedness_override) { case DeltaSignedness::kNoOverride: case DeltaSignedness::kForceUnsigned: @@ -143,14 +145,14 @@ size_t EncodingLengthUpperBound(size_t delta_max_bit_width, // If `sequence_length` is greater than the number of deltas, the sequence of // deltas will wrap around. -std::vector> CreateSequenceByDeltas( +std::vector> CreateSequenceByDeltas( uint64_t first, const std::vector& deltas, size_t sequence_length) { RTC_DCHECK(!deltas.empty()); - std::vector> optional_deltas(deltas.size()); + std::vector> optional_deltas(deltas.size()); for (size_t i = 0; i < deltas.size(); ++i) { - optional_deltas[i] = absl::optional(deltas[i]); + optional_deltas[i] = std::optional(deltas[i]); } return CreateSequenceByOptionalDeltas(first, optional_deltas, sequence_length); @@ -187,8 +189,8 @@ class DeltaEncodingTest }; TEST_P(DeltaEncodingTest, AllValuesEqualToExistentBaseValue) { - const absl::optional base(3432); - std::vector> values(num_of_values_); + const std::optional base(3432); + std::vector> values(num_of_values_); std::fill(values.begin(), values.end(), base); std::string encoded; TestEncodingAndDecoding(base, values, &encoded); @@ -203,8 +205,8 @@ TEST_P(DeltaEncodingTest, AllValuesEqualToNonExistentBaseValue) { return; // Test irrelevant for this case. } - const absl::optional base; - std::vector> values(num_of_values_); + const std::optional base; + std::vector> values(num_of_values_); std::fill(values.begin(), values.end(), base); std::string encoded; TestEncodingAndDecoding(base, values, &encoded); @@ -219,8 +221,8 @@ TEST_P(DeltaEncodingTest, BaseNonExistentButSomeOtherValuesExist) { return; // Test irrelevant for this case. } - const absl::optional base; - std::vector> values(num_of_values_); + const std::optional base; + std::vector> values(num_of_values_); Random prng(Seed()); @@ -245,7 +247,7 @@ TEST_P(DeltaEncodingTest, BaseNonExistentButSomeOtherValuesExist) { } TEST_P(DeltaEncodingTest, MinDeltaNoWrapAround) { - const absl::optional base(3432); + const std::optional base(3432); auto values = CreateSequenceByFirstValue(base.value() + 1, num_of_values_); ASSERT_GT(values[values.size() - 1], base) << "Sanity; must not wrap around"; @@ -253,7 +255,7 @@ TEST_P(DeltaEncodingTest, MinDeltaNoWrapAround) { if (optional_values_) { // Arbitrarily make one of the values non-existent, to force // optional-supporting encoding. - values[0] = absl::optional(); + values[0] = std::optional(); } TestEncodingAndDecoding(base, values); @@ -261,7 +263,7 @@ TEST_P(DeltaEncodingTest, MinDeltaNoWrapAround) { TEST_P(DeltaEncodingTest, BigDeltaNoWrapAround) { const uint64_t kBigDelta = 132828; - const absl::optional base(3432); + const std::optional base(3432); auto values = CreateSequenceByFirstValue(base.value() + kBigDelta, num_of_values_); @@ -270,14 +272,14 @@ TEST_P(DeltaEncodingTest, BigDeltaNoWrapAround) { if (optional_values_) { // Arbitrarily make one of the values non-existent, to force // optional-supporting encoding. - values[0] = absl::optional(); + values[0] = std::optional(); } TestEncodingAndDecoding(base, values); } TEST_P(DeltaEncodingTest, MaxDeltaNoWrapAround) { - const absl::optional base(3432); + const std::optional base(3432); auto values = CreateSequenceByLastValue(std::numeric_limits::max(), num_of_values_); @@ -286,7 +288,7 @@ TEST_P(DeltaEncodingTest, MaxDeltaNoWrapAround) { if (optional_values_) { // Arbitrarily make one of the values non-existent, to force // optional-supporting encoding. - values[0] = absl::optional(); + values[0] = std::optional(); } TestEncodingAndDecoding(base, values); @@ -297,7 +299,7 @@ TEST_P(DeltaEncodingTest, SmallDeltaWithWrapAroundComparedToBase) { return; // Inapplicable } - const absl::optional base(std::numeric_limits::max()); + const std::optional base(std::numeric_limits::max()); auto values = CreateSequenceByDeltas(*base, {1, 10, 3}, num_of_values_); ASSERT_LT(values[0], base) << "Sanity; must wrap around"; @@ -305,7 +307,7 @@ TEST_P(DeltaEncodingTest, SmallDeltaWithWrapAroundComparedToBase) { if (optional_values_) { // Arbitrarily make one of the values non-existent, to force // optional-supporting encoding. - values[1] = absl::optional(); + values[1] = std::optional(); } TestEncodingAndDecoding(base, values); @@ -316,7 +318,7 @@ TEST_P(DeltaEncodingTest, SmallDeltaWithWrapAroundInValueSequence) { return; // Inapplicable. } - const absl::optional base(std::numeric_limits::max() - 2); + const std::optional base(std::numeric_limits::max() - 2); auto values = CreateSequenceByDeltas(*base, {1, 10, 3}, num_of_values_); ASSERT_LT(values[values.size() - 1], values[0]) << "Sanity; must wrap around"; @@ -325,7 +327,7 @@ TEST_P(DeltaEncodingTest, SmallDeltaWithWrapAroundInValueSequence) { // Arbitrarily make one of the values non-existent, to force // optional-supporting encoding. RTC_DCHECK_GT(values.size() - 1, 1u); // Wrap around not cancelled. - values[1] = absl::optional(); + values[1] = std::optional(); } TestEncodingAndDecoding(base, values); @@ -342,8 +344,8 @@ TEST_P(DeltaEncodingTest, BigDeltaWithWrapAroundComparedToBase) { } const uint64_t kBigDelta = 132828; - const absl::optional base(std::numeric_limits::max() - - kBigDelta + 3); + const std::optional base(std::numeric_limits::max() - + kBigDelta + 3); auto values = CreateSequenceByFirstValue(base.value() + kBigDelta, num_of_values_); @@ -352,7 +354,7 @@ TEST_P(DeltaEncodingTest, BigDeltaWithWrapAroundComparedToBase) { if (optional_values_) { // Arbitrarily make one of the values non-existent, to force // optional-supporting encoding. - values[1] = absl::optional(); + values[1] = std::optional(); } TestEncodingAndDecoding(base, values); @@ -364,8 +366,8 @@ TEST_P(DeltaEncodingTest, BigDeltaWithWrapAroundInValueSequence) { } const uint64_t kBigDelta = 132828; - const absl::optional base(std::numeric_limits::max() - - kBigDelta + 3); + const std::optional base(std::numeric_limits::max() - + kBigDelta + 3); auto values = CreateSequenceByFirstValue(std::numeric_limits::max(), num_of_values_); @@ -375,7 +377,7 @@ TEST_P(DeltaEncodingTest, BigDeltaWithWrapAroundInValueSequence) { // Arbitrarily make one of the values non-existent, to force // optional-supporting encoding. RTC_DCHECK_GT(values.size() - 1, 1u); // Wrap around not cancelled. - values[1] = absl::optional(); + values[1] = std::optional(); } TestEncodingAndDecoding(base, values); @@ -389,13 +391,13 @@ TEST_P(DeltaEncodingTest, MaxDeltaWithWrapAroundComparedToBase) { return; // Inapplicable } - const absl::optional base(3432); + const std::optional base(3432); auto values = CreateSequenceByFirstValue(*base - 1, num_of_values_); if (optional_values_) { // Arbitrarily make one of the values non-existent, to force // optional-supporting encoding. - values[1] = absl::optional(); + values[1] = std::optional(); } TestEncodingAndDecoding(base, values); @@ -406,7 +408,7 @@ TEST_P(DeltaEncodingTest, MaxDeltaWithWrapAroundInValueSequence) { return; // Inapplicable. } - const absl::optional base(3432); + const std::optional base(3432); auto values = CreateSequenceByDeltas( *base, {0, std::numeric_limits::max(), 3}, num_of_values_); @@ -416,7 +418,7 @@ TEST_P(DeltaEncodingTest, MaxDeltaWithWrapAroundInValueSequence) { // Arbitrarily make one of the values non-existent, to force // optional-supporting encoding. RTC_DCHECK_GT(values.size() - 1, 1u); // Wrap around not cancelled. - values[1] = absl::optional(); + values[1] = std::optional(); } TestEncodingAndDecoding(base, values); @@ -426,7 +428,7 @@ TEST_P(DeltaEncodingTest, MaxDeltaWithWrapAroundInValueSequence) { // already covered by AllValuesEqualToExistentBaseValue, but it doesn't hurt to // test again. For all other cases, we have a new test. TEST_P(DeltaEncodingTest, ZeroDelta) { - const absl::optional base(3432); + const std::optional base(3432); // Arbitrary sequence of deltas with intentional zero deltas, as well as // consecutive zeros. @@ -437,7 +439,7 @@ TEST_P(DeltaEncodingTest, ZeroDelta) { if (optional_values_) { // Arbitrarily make one of the values non-existent, to force // optional-supporting encoding. - values[0] = absl::optional(); + values[0] = std::optional(); } TestEncodingAndDecoding(base, values); @@ -601,10 +603,10 @@ class DeltaEncodingFuzzerLikeTest }; TEST_P(DeltaEncodingFuzzerLikeTest, Test) { - const absl::optional base(3432); + const std::optional base(3432); Random prng(Seed()); - std::vector> deltas(num_of_values_); + std::vector> deltas(num_of_values_); for (size_t i = 0; i < deltas.size(); ++i) { if (!optional_values_ || prng.Rand()) { deltas[i] = RandomWithMaxBitWidth(&prng, delta_max_bit_width_); @@ -643,10 +645,10 @@ class DeltaEncodingSpecificEdgeCasesTest TEST_F(DeltaEncodingSpecificEdgeCasesTest, SignedDeltaWithOnlyTopBitOn) { MaybeSetSignedness(DeltaSignedness::kForceSigned); - const absl::optional base(3432); + const std::optional base(3432); const uint64_t delta = static_cast(1) << 63; - const std::vector> values = {base.value() + delta}; + const std::vector> values = {base.value() + delta}; TestEncodingAndDecoding(base, values); } @@ -654,9 +656,9 @@ TEST_F(DeltaEncodingSpecificEdgeCasesTest, SignedDeltaWithOnlyTopBitOn) { TEST_F(DeltaEncodingSpecificEdgeCasesTest, MaximumUnsignedDelta) { MaybeSetSignedness(DeltaSignedness::kForceUnsigned); - const absl::optional base((static_cast(1) << 63) + 0x123); + const std::optional base((static_cast(1) << 63) + 0x123); - const std::vector> values = {base.value() - 1}; + const std::vector> values = {base.value() - 1}; TestEncodingAndDecoding(base, values); } @@ -672,7 +674,7 @@ TEST_P(DeltaEncodingSpecificEdgeCasesTest, ReverseSequence) { : ((static_cast(1) << width) - 1); const uint64_t base = wrap_around ? 1u : (0xf82d3 & value_mask); - const std::vector> values = { + const std::vector> values = { (base - 1u) & value_mask, (base - 2u) & value_mask, (base - 3u) & value_mask}; diff --git a/logging/rtc_event_log/encoder/optional_blob_encoding.cc b/logging/rtc_event_log/encoder/optional_blob_encoding.cc index 81d2c0625b..3d225ba882 100644 --- a/logging/rtc_event_log/encoder/optional_blob_encoding.cc +++ b/logging/rtc_event_log/encoder/optional_blob_encoding.cc @@ -10,17 +10,21 @@ #include "logging/rtc_event_log/encoder/optional_blob_encoding.h" +#include #include +#include +#include +#include +#include "absl/strings/string_view.h" #include "rtc_base/bit_buffer.h" #include "rtc_base/bitstream_reader.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" namespace webrtc { std::string EncodeOptionalBlobs( - const std::vector>& blobs) { + const std::vector>& blobs) { if (blobs.empty()) { return {}; } @@ -31,7 +35,7 @@ std::string EncodeOptionalBlobs( if (blob.has_value()) { ++num_blobs_present; reserve_size_bits += - (rtc::BitBufferWriter::kMaxLeb128Length.bytes() + blob->size()) * 8; + (BitBufferWriter::kMaxLeb128Length.bytes() + blob->size()) * 8; } } @@ -45,7 +49,7 @@ std::string EncodeOptionalBlobs( } std::vector buffer((reserve_size_bits + 7) / 8); - rtc::BitBufferWriter writer(buffer.data(), buffer.size()); + BitBufferWriter writer(buffer.data(), buffer.size()); // Write present bits if all blobs are not present. writer.WriteBits(all_blobs_present, 1); @@ -75,10 +79,10 @@ std::string EncodeOptionalBlobs( return std::string(buffer.data(), buffer.data() + bytes_written); } -std::vector> DecodeOptionalBlobs( +std::vector> DecodeOptionalBlobs( absl::string_view encoded_blobs, size_t num_of_blobs) { - std::vector> res(num_of_blobs); + std::vector> res(num_of_blobs); if (encoded_blobs.empty() || num_of_blobs == 0) { return res; } diff --git a/logging/rtc_event_log/encoder/optional_blob_encoding.h b/logging/rtc_event_log/encoder/optional_blob_encoding.h index 32f52785c6..cd40523c30 100644 --- a/logging/rtc_event_log/encoder/optional_blob_encoding.h +++ b/logging/rtc_event_log/encoder/optional_blob_encoding.h @@ -13,11 +13,11 @@ #include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" namespace webrtc { @@ -26,12 +26,12 @@ namespace webrtc { // in a way that would allow us to separate them again on the decoding side. // EncodeOptionalBlobs() may not fail but may return an empty string std::string EncodeOptionalBlobs( - const std::vector>& blobs); + const std::vector>& blobs); // Calling DecodeOptionalBlobs() on an empty string, or with `num_of_blobs` set // to 0, is an error. DecodeOptionalBlobs() returns an empty vector if it fails, // which can happen if `encoded_blobs` is corrupted. -std::vector> DecodeOptionalBlobs( +std::vector> DecodeOptionalBlobs( absl::string_view encoded_blobs, size_t num_of_blobs); diff --git a/logging/rtc_event_log/encoder/optional_blob_encoding_unittest.cc b/logging/rtc_event_log/encoder/optional_blob_encoding_unittest.cc index bdb876f707..7e7bccd126 100644 --- a/logging/rtc_event_log/encoder/optional_blob_encoding_unittest.cc +++ b/logging/rtc_event_log/encoder/optional_blob_encoding_unittest.cc @@ -10,6 +10,8 @@ #include "logging/rtc_event_log/encoder/optional_blob_encoding.h" +#include +#include #include #include @@ -70,7 +72,7 @@ TEST(OptionalBlobEncoding, AllBlobsPresent) { } TEST(OptionalBlobEncoding, SomeBlobsPresent) { - std::string encoded = EncodeOptionalBlobs({"a", absl::nullopt, "c"}); + std::string encoded = EncodeOptionalBlobs({"a", std::nullopt, "c"}); std::string expected = BitBuilder() .Bit(0) .Bit(1) @@ -85,12 +87,12 @@ TEST(OptionalBlobEncoding, SomeBlobsPresent) { TEST(OptionalBlobEncoding, NoBlobsPresent) { std::string encoded = - EncodeOptionalBlobs({absl::nullopt, absl::nullopt, absl::nullopt}); + EncodeOptionalBlobs({std::nullopt, std::nullopt, std::nullopt}); EXPECT_THAT(encoded, IsEmpty()); } TEST(OptionalBlobEncoding, EmptyBlobsPresent) { - std::string encoded = EncodeOptionalBlobs({absl::nullopt, "", absl::nullopt}); + std::string encoded = EncodeOptionalBlobs({std::nullopt, "", std::nullopt}); std::string expected = BitBuilder() .Bit(0) .Bit(0) @@ -146,13 +148,12 @@ TEST(OptionalBlobDecoding, SomeBlobsPresent) { .Bytes({0x01, 'c'}) .AsString(); auto decoded = DecodeOptionalBlobs(encoded, 3); - EXPECT_THAT(decoded, ElementsAre("a", absl::nullopt, "c")); + EXPECT_THAT(decoded, ElementsAre("a", std::nullopt, "c")); } TEST(OptionalBlobDecoding, NoBlobsPresent) { auto decoded = DecodeOptionalBlobs("", 3); - EXPECT_THAT(decoded, - ElementsAre(absl::nullopt, absl::nullopt, absl::nullopt)); + EXPECT_THAT(decoded, ElementsAre(std::nullopt, std::nullopt, std::nullopt)); } TEST(OptionalBlobDecoding, EmptyBlobsPresent) { @@ -165,7 +166,7 @@ TEST(OptionalBlobDecoding, EmptyBlobsPresent) { .Bytes({0x0}) .AsString(); auto decoded = DecodeOptionalBlobs(encoded, 3); - EXPECT_THAT(decoded, ElementsAre(absl::nullopt, "", absl::nullopt)); + EXPECT_THAT(decoded, ElementsAre(std::nullopt, "", std::nullopt)); } TEST(OptionalBlobDecoding, ZeroBlobs) { diff --git a/logging/rtc_event_log/encoder/rtc_event_log_encoder.h b/logging/rtc_event_log/encoder/rtc_event_log_encoder.h index 3c3dc78990..633663a1e9 100644 --- a/logging/rtc_event_log/encoder/rtc_event_log_encoder.h +++ b/logging/rtc_event_log/encoder/rtc_event_log_encoder.h @@ -11,6 +11,7 @@ #ifndef LOGGING_RTC_EVENT_LOG_ENCODER_RTC_EVENT_LOG_ENCODER_H_ #define LOGGING_RTC_EVENT_LOG_ENCODER_RTC_EVENT_LOG_ENCODER_H_ +#include #include #include #include diff --git a/logging/rtc_event_log/encoder/rtc_event_log_encoder_common.cc b/logging/rtc_event_log/encoder/rtc_event_log_encoder_common.cc index 7aea47611d..54900997e4 100644 --- a/logging/rtc_event_log/encoder/rtc_event_log_encoder_common.cc +++ b/logging/rtc_event_log/encoder/rtc_event_log_encoder_common.cc @@ -10,6 +10,8 @@ #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_common.h" +#include + #include "rtc_base/checks.h" namespace webrtc { diff --git a/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc b/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc index 5619827246..05da112b0f 100644 --- a/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc +++ b/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc @@ -12,13 +12,19 @@ #include +#include +#include +#include +#include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" -#include "api/network_state_predictor.h" +#include "api/candidate.h" +#include "api/rtc_event_log/rtc_event.h" #include "api/rtp_headers.h" #include "api/rtp_parameters.h" +#include "api/transport/bandwidth_usage.h" #include "api/transport/network_types.h" #include "logging/rtc_event_log/events/rtc_event_alr_state.h" #include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" @@ -41,7 +47,6 @@ #include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" #include "logging/rtc_event_log/rtc_stream_config.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet/app.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" @@ -51,19 +56,16 @@ #include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" #include "modules/rtp_rtcp/source/rtcp_packet/sdes.h" #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" -#include "modules/rtp_rtcp/source/rtp_packet.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/ignore_wundef.h" #include "rtc_base/logging.h" // *.pb.h files are generated at build-time by the protobuf compiler. -RTC_PUSH_IGNORING_WUNDEF() #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/logging/rtc_event_log/rtc_event_log.pb.h" #else #include "logging/rtc_event_log/rtc_event_log.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() namespace webrtc { @@ -134,21 +136,15 @@ ConvertIceCandidatePairConfigType(IceCandidatePairConfigType type) { rtclog::IceCandidatePairConfig::IceCandidateType ConvertIceCandidateType( IceCandidateType type) { switch (type) { - case IceCandidateType::kUnknown: - return rtclog::IceCandidatePairConfig::UNKNOWN_CANDIDATE_TYPE; - case IceCandidateType::kLocal: + case IceCandidateType::kHost: return rtclog::IceCandidatePairConfig::LOCAL; - case IceCandidateType::kStun: + case IceCandidateType::kSrflx: return rtclog::IceCandidatePairConfig::STUN; case IceCandidateType::kPrflx: return rtclog::IceCandidatePairConfig::PRFLX; case IceCandidateType::kRelay: return rtclog::IceCandidatePairConfig::RELAY; - case IceCandidateType::kNumValues: - RTC_DCHECK_NOTREACHED(); } - RTC_DCHECK_NOTREACHED(); - return rtclog::IceCandidatePairConfig::UNKNOWN_CANDIDATE_TYPE; } rtclog::IceCandidatePairConfig::Protocol ConvertIceCandidatePairProtocol( @@ -205,6 +201,7 @@ rtclog::IceCandidatePairConfig::NetworkType ConvertIceCandidateNetworkType( return rtclog::IceCandidatePairConfig::CELLULAR; case IceCandidateNetworkType::kNumValues: RTC_DCHECK_NOTREACHED(); + break; } RTC_DCHECK_NOTREACHED(); return rtclog::IceCandidatePairConfig::UNKNOWN_NETWORK_TYPE; @@ -230,8 +227,9 @@ ConvertIceCandidatePairEventType(IceCandidatePairEventType type) { } // namespace -std::string RtcEventLogEncoderLegacy::EncodeLogStart(int64_t timestamp_us, - int64_t utc_time_us) { +std::string RtcEventLogEncoderLegacy::EncodeLogStart( + int64_t timestamp_us, + int64_t /* utc_time_us */) { rtclog::Event rtclog_event; rtclog_event.set_timestamp_us(timestamp_us); rtclog_event.set_type(rtclog::Event::LOG_START); @@ -717,10 +715,9 @@ std::string RtcEventLogEncoderLegacy::EncodeVideoSendStreamConfig( return Serialize(&rtclog_event); } -std::string RtcEventLogEncoderLegacy::EncodeRtcpPacket( - int64_t timestamp_us, - const rtc::Buffer& packet, - bool is_incoming) { +std::string RtcEventLogEncoderLegacy::EncodeRtcpPacket(int64_t timestamp_us, + const Buffer& packet, + bool is_incoming) { rtclog::Event rtclog_event; rtclog_event.set_timestamp_us(timestamp_us); rtclog_event.set_type(rtclog::Event::RTCP_EVENT); @@ -767,7 +764,7 @@ std::string RtcEventLogEncoderLegacy::EncodeRtcpPacket( std::string RtcEventLogEncoderLegacy::EncodeRtpPacket( int64_t timestamp_us, - rtc::ArrayView header, + ArrayView header, size_t packet_length, int probe_cluster_id, bool is_incoming) { diff --git a/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h b/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h index 33c530789b..c03d3dbb4a 100644 --- a/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h +++ b/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h @@ -11,11 +11,14 @@ #ifndef LOGGING_RTC_EVENT_LOG_ENCODER_RTC_EVENT_LOG_ENCODER_LEGACY_H_ #define LOGGING_RTC_EVENT_LOG_ENCODER_RTC_EVENT_LOG_ENCODER_LEGACY_H_ +#include +#include #include #include #include #include "api/array_view.h" +#include "api/rtc_event_log/rtc_event.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder.h" #include "rtc_base/buffer.h" @@ -94,10 +97,10 @@ class RtcEventLogEncoderLegacy final : public RtcEventLogEncoder { // RTCP/RTP are handled similarly for incoming/outgoing. std::string EncodeRtcpPacket(int64_t timestamp_us, - const rtc::Buffer& packet, + const Buffer& packet, bool is_incoming); std::string EncodeRtpPacket(int64_t timestamp_us, - rtc::ArrayView header, + ArrayView header, size_t packet_length, int probe_cluster_id, bool is_incoming); diff --git a/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc b/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc index 5d83c72a0b..d6fce461d0 100644 --- a/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc +++ b/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc @@ -10,9 +10,27 @@ #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.h" -#include "absl/types/optional.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + #include "api/array_view.h" -#include "api/network_state_predictor.h" +#include "api/candidate.h" +#include "api/dtls_transport_interface.h" +#include "api/field_trials_view.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" +#include "api/transport/bandwidth_usage.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_rotation.h" #include "logging/rtc_event_log/dependency_descriptor_encoder_decoder.h" #include "logging/rtc_event_log/encoder/blob_encoding.h" #include "logging/rtc_event_log/encoder/delta_encoding.h" @@ -47,7 +65,6 @@ #include "logging/rtc_event_log/rtc_stream_config.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" #include "modules/rtp_rtcp/include/rtp_cvo.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet/app.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" @@ -59,20 +76,15 @@ #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" -#include "modules/rtp_rtcp/source/rtp_packet.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/ignore_wundef.h" -#include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" // *.pb.h files are generated at build-time by the protobuf compiler. -RTC_PUSH_IGNORING_WUNDEF() #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/logging/rtc_event_log/rtc_event_log2.pb.h" #else #include "logging/rtc_event_log/rtc_event_log2.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() using webrtc_event_logging::ToUnsigned; @@ -107,9 +119,8 @@ rtclog2::FrameDecodedEvents::Codec ConvertToProtoFormat(VideoCodecType codec) { return rtclog2::FrameDecodedEvents::CODEC_AV1; case VideoCodecType::kVideoCodecH264: return rtclog2::FrameDecodedEvents::CODEC_H264; - case VideoCodecType::kVideoCodecMultiplex: - // This codec type is afaik not used. - return rtclog2::FrameDecodedEvents::CODEC_UNKNOWN; + case VideoCodecType::kVideoCodecH265: + return rtclog2::FrameDecodedEvents::CODEC_H265; } RTC_DCHECK_NOTREACHED(); return rtclog2::FrameDecodedEvents::CODEC_UNKNOWN; @@ -199,18 +210,14 @@ ConvertToProtoFormat(IceCandidatePairConfigType type) { rtclog2::IceCandidatePairConfig::IceCandidateType ConvertToProtoFormat( IceCandidateType type) { switch (type) { - case IceCandidateType::kUnknown: - return rtclog2::IceCandidatePairConfig::UNKNOWN_CANDIDATE_TYPE; - case IceCandidateType::kLocal: + case IceCandidateType::kHost: return rtclog2::IceCandidatePairConfig::LOCAL; - case IceCandidateType::kStun: + case IceCandidateType::kSrflx: return rtclog2::IceCandidatePairConfig::STUN; case IceCandidateType::kPrflx: return rtclog2::IceCandidatePairConfig::PRFLX; case IceCandidateType::kRelay: return rtclog2::IceCandidatePairConfig::RELAY; - case IceCandidateType::kNumValues: - RTC_DCHECK_NOTREACHED(); } RTC_DCHECK_NOTREACHED(); return rtclog2::IceCandidatePairConfig::UNKNOWN_CANDIDATE_TYPE; @@ -294,8 +301,7 @@ rtclog2::IceCandidatePairEvent::IceCandidatePairEventType ConvertToProtoFormat( // Copies all RTCP blocks except APP, SDES and unknown from `packet` to // `buffer`. `buffer` must have space for at least `packet.size()` bytes. -size_t RemoveNonAllowlistedRtcpBlocks(const rtc::Buffer& packet, - uint8_t* buffer) { +size_t RemoveNonAllowlistedRtcpBlocks(const Buffer& packet, uint8_t* buffer) { RTC_DCHECK(buffer != nullptr); rtcp::CommonHeader header; const uint8_t* block_begin = packet.data(); @@ -337,7 +343,7 @@ size_t RemoveNonAllowlistedRtcpBlocks(const rtc::Buffer& packet, } template -void EncodeRtcpPacket(rtc::ArrayView batch, +void EncodeRtcpPacket(ArrayView batch, ProtoType* proto_batch) { if (batch.empty()) { return; @@ -359,7 +365,7 @@ void EncodeRtcpPacket(rtc::ArrayView batch, // Delta encoding proto_batch->set_number_of_deltas(batch.size() - 1); - std::vector> values(batch.size() - 1); + std::vector> values(batch.size() - 1); std::string encoded_deltas; // timestamp_ms @@ -386,10 +392,12 @@ void EncodeRtcpPacket(rtc::ArrayView batch, } proto_batch->set_raw_packet_blobs(EncodeBlobs(scrubed_packets)); } +} // namespace -template -void EncodeRtpPacket(const std::vector& batch, - ProtoType* proto_batch) { +template +void RtcEventLogEncoderNewFormat::EncodeRtpPacket(const Batch& batch, + ProtoType* proto_batch) { + using EventType = std::remove_pointer_t; if (batch.empty()) { return; } @@ -408,7 +416,7 @@ void EncodeRtpPacket(const std::vector& batch, proto_batch->set_padding_size(base_event->padding_length()); // Add header extensions (base event). - absl::optional base_transport_sequence_number; + std::optional base_transport_sequence_number; { uint16_t seqnum; if (base_event->template GetExtension(&seqnum)) { @@ -417,7 +425,7 @@ void EncodeRtpPacket(const std::vector& batch, } } - absl::optional unsigned_base_transmission_time_offset; + std::optional unsigned_base_transmission_time_offset; { int32_t offset; if (base_event->template GetExtension(&offset)) { @@ -426,7 +434,7 @@ void EncodeRtpPacket(const std::vector& batch, } } - absl::optional base_absolute_send_time; + std::optional base_absolute_send_time; { uint32_t sendtime; if (base_event->template GetExtension(&sendtime)) { @@ -435,7 +443,7 @@ void EncodeRtpPacket(const std::vector& batch, } } - absl::optional base_video_rotation; + std::optional base_video_rotation; { VideoRotation video_rotation; if (base_event->template GetExtension(&video_rotation)) { @@ -445,28 +453,25 @@ void EncodeRtpPacket(const std::vector& batch, } } - absl::optional base_audio_level; - absl::optional base_voice_activity; + std::optional base_audio_level; + std::optional base_voice_activity; { - bool voice_activity; - uint8_t audio_level; - if (base_event->template GetExtension(&voice_activity, - &audio_level)) { - RTC_DCHECK_LE(audio_level, 0x7Fu); - base_audio_level = audio_level; - proto_batch->set_audio_level(audio_level); - - base_voice_activity = voice_activity; - proto_batch->set_voice_activity(voice_activity); + AudioLevel audio_level; + if (base_event->template GetExtension(&audio_level)) { + RTC_DCHECK_LE(audio_level.level(), 0x7Fu); + base_audio_level = audio_level.level(); + proto_batch->set_audio_level(audio_level.level()); + + base_voice_activity = audio_level.voice_activity(); + proto_batch->set_voice_activity(audio_level.voice_activity()); } } { // TODO(webrtc:14975) Remove this kill switch after DD in RTC event log has // been rolled out. - if (!webrtc::field_trial::IsDisabled( - "WebRTC-RtcEventLogEncodeDependencyDescriptor")) { - std::vector> raw_dds(batch.size()); + if (encode_dependency_descriptor_) { + std::vector> raw_dds(batch.size()); bool has_dd = false; for (size_t i = 0; i < batch.size(); ++i) { raw_dds[i] = @@ -490,7 +495,7 @@ void EncodeRtpPacket(const std::vector& batch, // Delta encoding proto_batch->set_number_of_deltas(batch.size() - 1); - std::vector> values(batch.size() - 1); + std::vector> values(batch.size() - 1); std::string encoded_deltas; // timestamp_ms (event) @@ -646,12 +651,10 @@ void EncodeRtpPacket(const std::vector& batch, // audio_level (RTP extension) for (size_t i = 0; i < values.size(); ++i) { const EventType* event = batch[i + 1]; - bool voice_activity; - uint8_t audio_level; - if (event->template GetExtension(&voice_activity, - &audio_level)) { - RTC_DCHECK_LE(audio_level, 0x7Fu); - values[i] = audio_level; + AudioLevel audio_level; + if (event->template GetExtension(&audio_level)) { + RTC_DCHECK_LE(audio_level.level(), 0x7F); + values[i] = audio_level.level(); } else { values[i].reset(); } @@ -664,12 +667,10 @@ void EncodeRtpPacket(const std::vector& batch, // voice_activity (RTP extension) for (size_t i = 0; i < values.size(); ++i) { const EventType* event = batch[i + 1]; - bool voice_activity; - uint8_t audio_level; - if (event->template GetExtension(&voice_activity, - &audio_level)) { - RTC_DCHECK_LE(audio_level, 0x7Fu); - values[i] = voice_activity; + AudioLevel audio_level; + if (event->template GetExtension(&audio_level)) { + RTC_DCHECK_LE(audio_level.level(), 0x7F); + values[i] = audio_level.voice_activity(); } else { values[i].reset(); } @@ -679,15 +680,13 @@ void EncodeRtpPacket(const std::vector& batch, proto_batch->set_voice_activity_deltas(encoded_deltas); } } -} // namespace -RtcEventLogEncoderNewFormat::RtcEventLogEncoderNewFormat() { - encode_neteq_set_minimum_delay_kill_switch_ = false; - if (webrtc::field_trial::IsEnabled( - "WebRTC-RtcEventLogEncodeNetEqSetMinimumDelayKillSwitch")) { - encode_neteq_set_minimum_delay_kill_switch_ = true; - } -} +RtcEventLogEncoderNewFormat::RtcEventLogEncoderNewFormat( + const FieldTrialsView& field_trials) + : encode_neteq_set_minimum_delay_kill_switch_(field_trials.IsEnabled( + "WebRTC-RtcEventLogEncodeNetEqSetMinimumDelayKillSwitch")), + encode_dependency_descriptor_(!field_trials.IsDisabled( + "WebRTC-RtcEventLogEncodeDependencyDescriptor")) {} std::string RtcEventLogEncoderNewFormat::EncodeLogStart(int64_t timestamp_us, int64_t utc_time_us) { @@ -971,7 +970,7 @@ std::string RtcEventLogEncoderNewFormat::EncodeBatch( } void RtcEventLogEncoderNewFormat::EncodeAlrState( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { for (const RtcEventAlrState* base_event : batch) { rtclog2::AlrState* proto_batch = event_stream->add_alr_states(); @@ -982,7 +981,7 @@ void RtcEventLogEncoderNewFormat::EncodeAlrState( } void RtcEventLogEncoderNewFormat::EncodeAudioNetworkAdaptation( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { if (batch.empty()) return; @@ -998,7 +997,7 @@ void RtcEventLogEncoderNewFormat::EncodeAudioNetworkAdaptation( proto_batch->set_frame_length_ms( base_event->config().frame_length_ms.value()); } - absl::optional base_uplink_packet_loss_fraction; + std::optional base_uplink_packet_loss_fraction; if (base_event->config().uplink_packet_loss_fraction.has_value()) { base_uplink_packet_loss_fraction = ConvertPacketLossFractionToProtoFormat( base_event->config().uplink_packet_loss_fraction.value()); @@ -1020,7 +1019,7 @@ void RtcEventLogEncoderNewFormat::EncodeAudioNetworkAdaptation( // Delta encoding proto_batch->set_number_of_deltas(batch.size() - 1); - std::vector> values(batch.size() - 1); + std::vector> values(batch.size() - 1); std::string encoded_deltas; // timestamp_ms @@ -1042,10 +1041,10 @@ void RtcEventLogEncoderNewFormat::EncodeAudioNetworkAdaptation( values[i].reset(); } } - const absl::optional unsigned_base_bitrate_bps = + const std::optional unsigned_base_bitrate_bps = base_event->config().bitrate_bps.has_value() ? ToUnsigned(base_event->config().bitrate_bps.value()) - : absl::optional(); + : std::optional(); encoded_deltas = EncodeDeltas(unsigned_base_bitrate_bps, values); if (!encoded_deltas.empty()) { proto_batch->set_bitrate_bps_deltas(encoded_deltas); @@ -1060,10 +1059,10 @@ void RtcEventLogEncoderNewFormat::EncodeAudioNetworkAdaptation( values[i].reset(); } } - const absl::optional unsigned_base_frame_length_ms = + const std::optional unsigned_base_frame_length_ms = base_event->config().frame_length_ms.has_value() ? ToUnsigned(base_event->config().frame_length_ms.value()) - : absl::optional(); + : std::optional(); encoded_deltas = EncodeDeltas(unsigned_base_frame_length_ms, values); if (!encoded_deltas.empty()) { proto_batch->set_frame_length_ms_deltas(encoded_deltas); @@ -1107,7 +1106,7 @@ void RtcEventLogEncoderNewFormat::EncodeAudioNetworkAdaptation( // num_channels for (size_t i = 0; i < values.size(); ++i) { const RtcEventAudioNetworkAdaptation* event = batch[i + 1]; - const absl::optional num_channels = event->config().num_channels; + const std::optional num_channels = event->config().num_channels; if (num_channels.has_value()) { // Since the number of channels is always greater than 0, we can encode // N channels as N-1, thereby making sure that we get smaller deltas. @@ -1122,7 +1121,7 @@ void RtcEventLogEncoderNewFormat::EncodeAudioNetworkAdaptation( } // In the base event, N channels encoded as N channels, but for delta // compression purposes, also shifted down by 1. - absl::optional shifted_base_num_channels; + std::optional shifted_base_num_channels; if (base_event->config().num_channels.has_value()) { RTC_DCHECK_GT(base_event->config().num_channels.value(), 0u); shifted_base_num_channels = base_event->config().num_channels.value() - 1; @@ -1134,7 +1133,7 @@ void RtcEventLogEncoderNewFormat::EncodeAudioNetworkAdaptation( } void RtcEventLogEncoderNewFormat::EncodeAudioPlayout( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { if (batch.empty()) return; @@ -1151,7 +1150,7 @@ void RtcEventLogEncoderNewFormat::EncodeAudioPlayout( // Delta encoding proto_batch->set_number_of_deltas(batch.size() - 1); - std::vector> values(batch.size() - 1); + std::vector> values(batch.size() - 1); std::string encoded_deltas; // timestamp_ms @@ -1176,7 +1175,7 @@ void RtcEventLogEncoderNewFormat::EncodeAudioPlayout( } void RtcEventLogEncoderNewFormat::EncodeNetEqSetMinimumDelay( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { if (encode_neteq_set_minimum_delay_kill_switch_) { return; @@ -1198,7 +1197,7 @@ void RtcEventLogEncoderNewFormat::EncodeNetEqSetMinimumDelay( // Delta encoding proto_batch->set_number_of_deltas(batch.size() - 1); - std::vector> values(batch.size() - 1); + std::vector> values(batch.size() - 1); std::string encoded_deltas; // timestamp_ms @@ -1234,7 +1233,7 @@ void RtcEventLogEncoderNewFormat::EncodeNetEqSetMinimumDelay( } void RtcEventLogEncoderNewFormat::EncodeAudioRecvStreamConfig( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { for (const RtcEventAudioReceiveStreamConfig* base_event : batch) { rtclog2::AudioRecvStreamConfig* proto_batch = @@ -1253,7 +1252,7 @@ void RtcEventLogEncoderNewFormat::EncodeAudioRecvStreamConfig( } void RtcEventLogEncoderNewFormat::EncodeAudioSendStreamConfig( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { for (const RtcEventAudioSendStreamConfig* base_event : batch) { rtclog2::AudioSendStreamConfig* proto_batch = @@ -1271,7 +1270,7 @@ void RtcEventLogEncoderNewFormat::EncodeAudioSendStreamConfig( } void RtcEventLogEncoderNewFormat::EncodeBweUpdateDelayBased( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { if (batch.empty()) return; @@ -1290,7 +1289,7 @@ void RtcEventLogEncoderNewFormat::EncodeBweUpdateDelayBased( // Delta encoding proto_batch->set_number_of_deltas(batch.size() - 1); - std::vector> values(batch.size() - 1); + std::vector> values(batch.size() - 1); std::string encoded_deltas; // timestamp_ms @@ -1328,7 +1327,7 @@ void RtcEventLogEncoderNewFormat::EncodeBweUpdateDelayBased( } void RtcEventLogEncoderNewFormat::EncodeBweUpdateLossBased( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { if (batch.empty()) return; @@ -1347,7 +1346,7 @@ void RtcEventLogEncoderNewFormat::EncodeBweUpdateLossBased( // Delta encoding proto_batch->set_number_of_deltas(batch.size() - 1); - std::vector> values(batch.size() - 1); + std::vector> values(batch.size() - 1); std::string encoded_deltas; // timestamp_ms @@ -1392,7 +1391,7 @@ void RtcEventLogEncoderNewFormat::EncodeBweUpdateLossBased( } void RtcEventLogEncoderNewFormat::EncodeDtlsTransportState( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { for (const RtcEventDtlsTransportState* base_event : batch) { rtclog2::DtlsTransportStateEvent* proto_batch = @@ -1404,7 +1403,7 @@ void RtcEventLogEncoderNewFormat::EncodeDtlsTransportState( } void RtcEventLogEncoderNewFormat::EncodeDtlsWritableState( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { for (const RtcEventDtlsWritableState* base_event : batch) { rtclog2::DtlsWritableState* proto_batch = @@ -1415,7 +1414,7 @@ void RtcEventLogEncoderNewFormat::EncodeDtlsWritableState( } void RtcEventLogEncoderNewFormat::EncodeProbeClusterCreated( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { for (const RtcEventProbeClusterCreated* base_event : batch) { rtclog2::BweProbeCluster* proto_batch = event_stream->add_probe_clusters(); @@ -1428,7 +1427,7 @@ void RtcEventLogEncoderNewFormat::EncodeProbeClusterCreated( } void RtcEventLogEncoderNewFormat::EncodeProbeResultFailure( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { for (const RtcEventProbeResultFailure* base_event : batch) { rtclog2::BweProbeResultFailure* proto_batch = @@ -1442,7 +1441,7 @@ void RtcEventLogEncoderNewFormat::EncodeProbeResultFailure( } void RtcEventLogEncoderNewFormat::EncodeProbeResultSuccess( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { for (const RtcEventProbeResultSuccess* base_event : batch) { rtclog2::BweProbeResultSuccess* proto_batch = @@ -1455,7 +1454,7 @@ void RtcEventLogEncoderNewFormat::EncodeProbeResultSuccess( } void RtcEventLogEncoderNewFormat::EncodeRouteChange( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { for (const RtcEventRouteChange* base_event : batch) { rtclog2::RouteChange* proto_batch = event_stream->add_route_changes(); @@ -1467,7 +1466,7 @@ void RtcEventLogEncoderNewFormat::EncodeRouteChange( } void RtcEventLogEncoderNewFormat::EncodeRemoteEstimate( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { if (batch.empty()) return; @@ -1478,13 +1477,13 @@ void RtcEventLogEncoderNewFormat::EncodeRemoteEstimate( proto_batch->set_timestamp_ms(base_event->timestamp_ms()); - absl::optional base_link_capacity_lower; + std::optional base_link_capacity_lower; if (base_event->link_capacity_lower_.IsFinite()) { base_link_capacity_lower = base_event->link_capacity_lower_.kbps(); proto_batch->set_link_capacity_lower_kbps(*base_link_capacity_lower); } - absl::optional base_link_capacity_upper; + std::optional base_link_capacity_upper; if (base_event->link_capacity_upper_.IsFinite()) { base_link_capacity_upper = base_event->link_capacity_upper_.kbps(); @@ -1496,7 +1495,7 @@ void RtcEventLogEncoderNewFormat::EncodeRemoteEstimate( // Delta encoding proto_batch->set_number_of_deltas(batch.size() - 1); - std::vector> values(batch.size() - 1); + std::vector> values(batch.size() - 1); std::string encoded_deltas; // timestamp_ms @@ -1539,7 +1538,7 @@ void RtcEventLogEncoderNewFormat::EncodeRemoteEstimate( } void RtcEventLogEncoderNewFormat::EncodeRtcpPacketIncoming( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { if (batch.empty()) { return; @@ -1548,7 +1547,7 @@ void RtcEventLogEncoderNewFormat::EncodeRtcpPacketIncoming( } void RtcEventLogEncoderNewFormat::EncodeRtcpPacketOutgoing( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { if (batch.empty()) { return; @@ -1567,7 +1566,7 @@ void RtcEventLogEncoderNewFormat::EncodeRtpPacketIncoming( } void RtcEventLogEncoderNewFormat::EncodeFramesDecoded( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { if (batch.empty()) { return; @@ -1589,7 +1588,7 @@ void RtcEventLogEncoderNewFormat::EncodeFramesDecoded( // Delta encoding proto_batch->set_number_of_deltas(batch.size() - 1); - std::vector> values(batch.size() - 1); + std::vector> values(batch.size() - 1); std::string encoded_deltas; // timestamp_ms @@ -1666,7 +1665,7 @@ void RtcEventLogEncoderNewFormat::EncodeFramesDecoded( } void RtcEventLogEncoderNewFormat::EncodeGenericPacketsSent( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { if (batch.empty()) { return; @@ -1682,7 +1681,7 @@ void RtcEventLogEncoderNewFormat::EncodeGenericPacketsSent( // Delta encoding proto_batch->set_number_of_deltas(batch.size() - 1); - std::vector> values(batch.size() - 1); + std::vector> values(batch.size() - 1); std::string encoded_deltas; if (batch.size() == 1) { @@ -1742,7 +1741,7 @@ void RtcEventLogEncoderNewFormat::EncodeGenericPacketsSent( } void RtcEventLogEncoderNewFormat::EncodeGenericPacketsReceived( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { if (batch.empty()) { return; @@ -1756,7 +1755,7 @@ void RtcEventLogEncoderNewFormat::EncodeGenericPacketsReceived( // Delta encoding proto_batch->set_number_of_deltas(batch.size() - 1); - std::vector> values(batch.size() - 1); + std::vector> values(batch.size() - 1); std::string encoded_deltas; if (batch.size() == 1) { @@ -1796,7 +1795,7 @@ void RtcEventLogEncoderNewFormat::EncodeGenericPacketsReceived( } void RtcEventLogEncoderNewFormat::EncodeGenericAcksReceived( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { if (batch.empty()) { return; @@ -1807,7 +1806,7 @@ void RtcEventLogEncoderNewFormat::EncodeGenericAcksReceived( proto_batch->set_timestamp_ms(base_event->timestamp_ms()); proto_batch->set_packet_number(base_event->packet_number()); proto_batch->set_acked_packet_number(base_event->acked_packet_number()); - absl::optional base_receive_timestamp; + std::optional base_receive_timestamp; if (base_event->receive_acked_packet_time_ms()) { int64_t receive_acked_packet_time_ms = base_event->receive_acked_packet_time_ms().value(); @@ -1817,7 +1816,7 @@ void RtcEventLogEncoderNewFormat::EncodeGenericAcksReceived( // Delta encoding proto_batch->set_number_of_deltas(batch.size() - 1); - std::vector> values(batch.size() - 1); + std::vector> values(batch.size() - 1); std::string encoded_deltas; if (batch.size() == 1) { @@ -1862,7 +1861,7 @@ void RtcEventLogEncoderNewFormat::EncodeGenericAcksReceived( if (event->receive_acked_packet_time_ms()) { values[i] = ToUnsigned(event->receive_acked_packet_time_ms().value()); } else { - values[i] = absl::nullopt; + values[i] = std::nullopt; } } encoded_deltas = EncodeDeltas(base_receive_timestamp, values); @@ -1882,7 +1881,7 @@ void RtcEventLogEncoderNewFormat::EncodeRtpPacketOutgoing( } void RtcEventLogEncoderNewFormat::EncodeVideoRecvStreamConfig( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { for (const RtcEventVideoReceiveStreamConfig* base_event : batch) { rtclog2::VideoRecvStreamConfig* proto_batch = @@ -1902,7 +1901,7 @@ void RtcEventLogEncoderNewFormat::EncodeVideoRecvStreamConfig( } void RtcEventLogEncoderNewFormat::EncodeVideoSendStreamConfig( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { for (const RtcEventVideoSendStreamConfig* base_event : batch) { rtclog2::VideoSendStreamConfig* proto_batch = @@ -1921,7 +1920,7 @@ void RtcEventLogEncoderNewFormat::EncodeVideoSendStreamConfig( } void RtcEventLogEncoderNewFormat::EncodeIceCandidatePairConfig( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { for (const RtcEventIceCandidatePairConfig* base_event : batch) { rtclog2::IceCandidatePairConfig* proto_batch = @@ -1950,7 +1949,7 @@ void RtcEventLogEncoderNewFormat::EncodeIceCandidatePairConfig( } void RtcEventLogEncoderNewFormat::EncodeIceCandidatePairEvent( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream) { for (const RtcEventIceCandidatePair* base_event : batch) { rtclog2::IceCandidatePairEvent* proto_batch = diff --git a/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.h b/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.h index 6747f41f07..d726915382 100644 --- a/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.h +++ b/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.h @@ -11,6 +11,7 @@ #ifndef LOGGING_RTC_EVENT_LOG_ENCODER_RTC_EVENT_LOG_ENCODER_NEW_FORMAT_H_ #define LOGGING_RTC_EVENT_LOG_ENCODER_RTC_EVENT_LOG_ENCODER_NEW_FORMAT_H_ +#include #include #include #include @@ -18,6 +19,8 @@ #include #include "api/array_view.h" +#include "api/field_trials_view.h" +#include "api/rtc_event_log/rtc_event.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder.h" namespace webrtc { @@ -59,7 +62,7 @@ class RtcEventGenericPacketSent; class RtcEventLogEncoderNewFormat final : public RtcEventLogEncoder { public: - RtcEventLogEncoderNewFormat(); + explicit RtcEventLogEncoderNewFormat(const FieldTrialsView& field_trials); ~RtcEventLogEncoderNewFormat() override = default; std::string EncodeBatch( @@ -71,77 +74,74 @@ class RtcEventLogEncoderNewFormat final : public RtcEventLogEncoder { std::string EncodeLogEnd(int64_t timestamp_us) override; private: - bool encode_neteq_set_minimum_delay_kill_switch_ = false; - // Encoding entry-point for the various RtcEvent subclasses. - void EncodeAlrState(rtc::ArrayView batch, + void EncodeAlrState(ArrayView batch, rtclog2::EventStream* event_stream); void EncodeAudioNetworkAdaptation( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); - void EncodeAudioPlayout(rtc::ArrayView batch, + void EncodeAudioPlayout(ArrayView batch, rtclog2::EventStream* event_stream); void EncodeAudioRecvStreamConfig( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeAudioSendStreamConfig( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeBweUpdateDelayBased( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeBweUpdateLossBased( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeDtlsTransportState( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeDtlsWritableState( - rtc::ArrayView batch, - rtclog2::EventStream* event_stream); - void EncodeFramesDecoded( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); + void EncodeFramesDecoded(ArrayView batch, + rtclog2::EventStream* event_stream); void EncodeGenericAcksReceived( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeGenericPacketsReceived( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeGenericPacketsSent( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeIceCandidatePairConfig( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeIceCandidatePairEvent( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); - void EncodeLoggingStarted(rtc::ArrayView batch, + void EncodeLoggingStarted(ArrayView batch, rtclog2::EventStream* event_stream); - void EncodeLoggingStopped(rtc::ArrayView batch, + void EncodeLoggingStopped(ArrayView batch, rtclog2::EventStream* event_stream); void EncodeNetEqSetMinimumDelay( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeProbeClusterCreated( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeProbeResultFailure( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeProbeResultSuccess( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); - void EncodeRouteChange(rtc::ArrayView batch, + void EncodeRouteChange(ArrayView batch, rtclog2::EventStream* event_stream); - void EncodeRemoteEstimate(rtc::ArrayView batch, + void EncodeRemoteEstimate(ArrayView batch, rtclog2::EventStream* event_stream); void EncodeRtcpPacketIncoming( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeRtcpPacketOutgoing( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeRtpPacketIncoming( const std::map>& @@ -152,11 +152,16 @@ class RtcEventLogEncoderNewFormat final : public RtcEventLogEncoder { batch, rtclog2::EventStream* event_stream); void EncodeVideoRecvStreamConfig( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); void EncodeVideoSendStreamConfig( - rtc::ArrayView batch, + ArrayView batch, rtclog2::EventStream* event_stream); + template + void EncodeRtpPacket(const Batch& batch, ProtoType* proto_batch); + + const bool encode_neteq_set_minimum_delay_kill_switch_; + const bool encode_dependency_descriptor_; }; } // namespace webrtc diff --git a/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc b/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc index 612f85bf61..a83dc382d0 100644 --- a/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc +++ b/logging/rtc_event_log/encoder/rtc_event_log_encoder_unittest.cc @@ -8,15 +8,30 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "logging/rtc_event_log/encoder/rtc_event_log_encoder.h" + +#include +#include +#include +#include #include #include +#include #include #include #include - +#include +#include + +#include "api/field_trials_view.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.h" +#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" #include "logging/rtc_event_log/events/rtc_event_alr_state.h" #include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" #include "logging/rtc_event_log/events/rtc_event_audio_playout.h" @@ -24,6 +39,7 @@ #include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" +#include "logging/rtc_event_log/events/rtc_event_frame_decoded.h" #include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" #include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" #include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" @@ -36,14 +52,31 @@ #include "logging/rtc_event_log/rtc_event_log_parser.h" #include "logging/rtc_event_log/rtc_event_log_unittest_helper.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" -#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" +#include "modules/rtp_rtcp/source/rtcp_packet/fir.h" +#include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" +#include "modules/rtp_rtcp/source/rtcp_packet/nack.h" +#include "modules/rtp_rtcp/source/rtcp_packet/pli.h" +#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/remb.h" +#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" +#include "rtc_base/buffer.h" #include "rtc_base/fake_clock.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/random.h" -#include "test/field_trial.h" +#include "rtc_base/time_utils.h" +#include "test/explicit_key_value_config.h" #include "test/gtest.h" namespace webrtc { + +using test::ExplicitKeyValueConfig; + class RtcEventLogEncoderTest : public ::testing::TestWithParam< std::tuple> { @@ -55,22 +88,26 @@ class RtcEventLogEncoderTest event_count_(std::get<2>(GetParam())), force_repeated_fields_(std::get<3>(GetParam())), gen_(seed_ * 880001UL), - verifier_(encoding_type_) { + verifier_(encoding_type_) {} + ~RtcEventLogEncoderTest() override = default; + + std::unique_ptr CreateEncoder( + const FieldTrialsView& field_trials = ExplicitKeyValueConfig("")) { + std::unique_ptr encoder; switch (encoding_type_) { case RtcEventLog::EncodingType::Legacy: - encoder_ = std::make_unique(); + encoder = std::make_unique(); break; case RtcEventLog::EncodingType::NewFormat: - encoder_ = std::make_unique(); + encoder = std::make_unique(field_trials); break; case RtcEventLog::EncodingType::ProtoFree: - encoder_ = std::make_unique(); + encoder = std::make_unique(); break; } - encoded_ = - encoder_->EncodeLogStart(rtc::TimeMillis(), rtc::TimeUTCMillis()); + encoded_ = encoder->EncodeLogStart(TimeMillis(), TimeUTCMillis()); + return encoder; } - ~RtcEventLogEncoderTest() override = default; // ANA events have some optional fields, so we want to make sure that we get // correct behavior both when all of the values are there, as well as when @@ -89,10 +126,9 @@ class RtcEventLogEncoderTest uint32_t ssrc); template - void TestRtpPackets(); + void TestRtpPackets(RtcEventLogEncoder& encoder); std::deque> history_; - std::unique_ptr encoder_; ParsedRtcEventLog parsed_log_; const uint64_t seed_; Random prng_; @@ -108,12 +144,13 @@ void RtcEventLogEncoderTest::TestRtcEventAudioNetworkAdaptation( const std::vector>& events) { ASSERT_TRUE(history_.empty()) << "Function should be called once per test."; + std::unique_ptr encoder = CreateEncoder(); for (auto& event : events) { history_.push_back(event->Copy()); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& ana_configs = parsed_log_.audio_network_adaptation_events(); @@ -165,7 +202,7 @@ RtcEventLogEncoderTest::GetRtpPacketsBySsrc(const ParsedRtcEventLog* parsed_log, } template -void RtcEventLogEncoderTest::TestRtpPackets() { +void RtcEventLogEncoderTest::TestRtpPackets(RtcEventLogEncoder& encoder) { // SSRCs will be randomly assigned out of this small pool, significant only // in that it also covers such edge cases as SSRC = 0 and SSRC = 0xffffffff. // The pool is intentionally small, so as to produce collisions. @@ -193,7 +230,7 @@ void RtcEventLogEncoderTest::TestRtpPackets() { } // Encode and parse. - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder.EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); // For each SSRC, make sure the RTP packets associated with it to have been @@ -213,6 +250,7 @@ void RtcEventLogEncoderTest::TestRtpPackets() { } TEST_P(RtcEventLogEncoderTest, RtcEventAlrState) { + std::unique_ptr encoder = CreateEncoder(); std::vector> events(event_count_); for (size_t i = 0; i < event_count_; ++i) { events[i] = (i == 0 || !force_repeated_fields_) ? gen_.NewAlrState() @@ -220,7 +258,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventAlrState) { history_.push_back(events[i]->Copy()); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& alr_state_events = parsed_log_.alr_state_events(); @@ -234,6 +272,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRouteChange) { if (encoding_type_ == RtcEventLog::EncodingType::Legacy) { return; } + std::unique_ptr encoder = CreateEncoder(); std::vector> events(event_count_); for (size_t i = 0; i < event_count_; ++i) { events[i] = (i == 0 || !force_repeated_fields_) ? gen_.NewRouteChange() @@ -241,7 +280,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRouteChange) { history_.push_back(events[i]->Copy()); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& route_change_events = parsed_log_.route_change_events(); @@ -252,6 +291,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRouteChange) { } TEST_P(RtcEventLogEncoderTest, RtcEventRemoteEstimate) { + std::unique_ptr encoder = CreateEncoder(); std::vector> events(event_count_); for (size_t i = 0; i < event_count_; ++i) { events[i] = (i == 0 || !force_repeated_fields_) @@ -260,7 +300,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRemoteEstimate) { history_.push_back(std::make_unique(*events[i])); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& parsed_events = parsed_log_.remote_estimate_events(); @@ -276,8 +316,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventAudioNetworkAdaptationBitrate) { for (size_t i = 0; i < event_count_; ++i) { if (i == 0 || !force_repeated_fields_) { auto runtime_config = std::make_unique(); - const int bitrate_bps = rtc::checked_cast( - prng_.Rand(0, std::numeric_limits::max())); + const int bitrate_bps = + checked_cast(prng_.Rand(0, std::numeric_limits::max())); runtime_config->bitrate_bps = bitrate_bps; events[i] = std::make_unique( std::move(runtime_config)); @@ -377,8 +417,8 @@ TEST_P(RtcEventLogEncoderTest, RtcEventAudioNetworkAdaptationAll) { for (size_t i = 0; i < event_count_; ++i) { if (i == 0 || !force_repeated_fields_) { auto runtime_config = std::make_unique(); - runtime_config->bitrate_bps = rtc::checked_cast( - prng_.Rand(0, std::numeric_limits::max())); + runtime_config->bitrate_bps = + checked_cast(prng_.Rand(0, std::numeric_limits::max())); runtime_config->frame_length_ms = prng_.Rand(1, 1000); runtime_config->uplink_packet_loss_fraction = std::pow(0.5f, prng_.Rand(1, 8)); @@ -395,6 +435,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventAudioNetworkAdaptationAll) { } TEST_P(RtcEventLogEncoderTest, RtcEventAudioPlayout) { + std::unique_ptr encoder = CreateEncoder(); // SSRCs will be randomly assigned out of this small pool, significant only // in that it also covers such edge cases as SSRC = 0 and SSRC = 0xffffffff. // The pool is intentionally small, so as to produce collisions. @@ -414,7 +455,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventAudioPlayout) { original_events_by_ssrc[ssrc].push_back(std::move(event)); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& parsed_playout_events_by_ssrc = @@ -443,6 +484,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventAudioPlayout) { } TEST_P(RtcEventLogEncoderTest, RtcEventNetEqSetMinimumDelayDecoded) { + std::unique_ptr encoder = CreateEncoder(); // SSRCs will be randomly assigned out of this small pool, significant only // in that it also covers such edge cases as SSRC = 0 and SSRC = 0xffffffff. // The pool is intentionally small, so as to produce collisions. @@ -461,7 +503,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventNetEqSetMinimumDelayDecoded) { original_events_by_ssrc[ssrc].push_back(std::move(event)); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& parsed_neteq_set_minimum_delay_events_by_ssrc = @@ -502,13 +544,14 @@ TEST_P(RtcEventLogEncoderTest, RtcEventNetEqSetMinimumDelayDecoded) { // TODO(eladalon/terelius): Test with multiple events in the batch. TEST_P(RtcEventLogEncoderTest, RtcEventAudioReceiveStreamConfig) { + std::unique_ptr encoder = CreateEncoder(); uint32_t ssrc = prng_.Rand(); RtpHeaderExtensionMap extensions = gen_.NewRtpHeaderExtensionMap(); std::unique_ptr event = gen_.NewAudioReceiveStreamConfig(ssrc, extensions); history_.push_back(event->Copy()); - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& audio_recv_configs = parsed_log_.audio_recv_configs(); @@ -518,13 +561,14 @@ TEST_P(RtcEventLogEncoderTest, RtcEventAudioReceiveStreamConfig) { // TODO(eladalon/terelius): Test with multiple events in the batch. TEST_P(RtcEventLogEncoderTest, RtcEventAudioSendStreamConfig) { + std::unique_ptr encoder = CreateEncoder(); uint32_t ssrc = prng_.Rand(); RtpHeaderExtensionMap extensions = gen_.NewRtpHeaderExtensionMap(); std::unique_ptr event = gen_.NewAudioSendStreamConfig(ssrc, extensions); history_.push_back(event->Copy()); - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& audio_send_configs = parsed_log_.audio_send_configs(); @@ -533,6 +577,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventAudioSendStreamConfig) { } TEST_P(RtcEventLogEncoderTest, RtcEventBweUpdateDelayBased) { + std::unique_ptr encoder = CreateEncoder(); std::vector> events( event_count_); for (size_t i = 0; i < event_count_; ++i) { @@ -542,7 +587,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventBweUpdateDelayBased) { history_.push_back(events[i]->Copy()); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& bwe_delay_updates = parsed_log_.bwe_delay_updates(); @@ -554,6 +599,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventBweUpdateDelayBased) { } TEST_P(RtcEventLogEncoderTest, RtcEventBweUpdateLossBased) { + std::unique_ptr encoder = CreateEncoder(); std::vector> events(event_count_); for (size_t i = 0; i < event_count_; ++i) { events[i] = (i == 0 || !force_repeated_fields_) @@ -562,7 +608,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventBweUpdateLossBased) { history_.push_back(events[i]->Copy()); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& bwe_loss_updates = parsed_log_.bwe_loss_updates(); @@ -577,6 +623,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventGenericPacketReceived) { if (encoding_type_ == RtcEventLog::EncodingType::Legacy) { return; } + std::unique_ptr encoder = CreateEncoder(); std::vector> events( event_count_); for (size_t i = 0; i < event_count_; ++i) { @@ -586,7 +633,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventGenericPacketReceived) { history_.push_back(events[i]->Copy()); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& packets_received = parsed_log_.generic_packets_received(); @@ -602,6 +649,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventGenericPacketSent) { if (encoding_type_ == RtcEventLog::EncodingType::Legacy) { return; } + std::unique_ptr encoder = CreateEncoder(); std::vector> events(event_count_); for (size_t i = 0; i < event_count_; ++i) { events[i] = (i == 0 || !force_repeated_fields_) @@ -610,7 +658,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventGenericPacketSent) { history_.push_back(events[i]->Copy()); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& packets_sent = parsed_log_.generic_packets_sent(); @@ -625,6 +673,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventGenericAcksReceived) { if (encoding_type_ == RtcEventLog::EncodingType::Legacy) { return; } + std::unique_ptr encoder = CreateEncoder(); std::vector> events(event_count_); for (size_t i = 0; i < event_count_; ++i) { events[i] = (i == 0 || !force_repeated_fields_) @@ -633,7 +682,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventGenericAcksReceived) { history_.push_back(events[i]->Copy()); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& decoded_events = parsed_log_.generic_acks_received(); @@ -645,6 +694,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventGenericAcksReceived) { } TEST_P(RtcEventLogEncoderTest, RtcEventDtlsTransportState) { + std::unique_ptr encoder = CreateEncoder(); std::vector> events(event_count_); for (size_t i = 0; i < event_count_; ++i) { events[i] = (i == 0 || !force_repeated_fields_) @@ -653,7 +703,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventDtlsTransportState) { history_.push_back(events[i]->Copy()); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& dtls_transport_states = parsed_log_.dtls_transport_states(); @@ -670,6 +720,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventDtlsTransportState) { } TEST_P(RtcEventLogEncoderTest, RtcEventDtlsWritableState) { + std::unique_ptr encoder = CreateEncoder(); std::vector> events(event_count_); for (size_t i = 0; i < event_count_; ++i) { events[i] = (i == 0 || !force_repeated_fields_) @@ -678,7 +729,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventDtlsWritableState) { history_.push_back(events[i]->Copy()); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& dtls_writable_states = parsed_log_.dtls_writable_states(); @@ -696,6 +747,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventDtlsWritableState) { } TEST_P(RtcEventLogEncoderTest, RtcEventFrameDecoded) { + std::unique_ptr encoder = CreateEncoder(); // SSRCs will be randomly assigned out of this small pool, significant only // in that it also covers such edge cases as SSRC = 0 and SSRC = 0xffffffff. // The pool is intentionally small, so as to produce collisions. @@ -715,7 +767,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventFrameDecoded) { original_events_by_ssrc[ssrc].push_back(std::move(event)); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); auto status = parsed_log_.ParseString(encoded_); if (!status.ok()) RTC_LOG(LS_ERROR) << status.message(); @@ -751,11 +803,12 @@ TEST_P(RtcEventLogEncoderTest, RtcEventFrameDecoded) { // TODO(eladalon/terelius): Test with multiple events in the batch. TEST_P(RtcEventLogEncoderTest, RtcEventIceCandidatePairConfig) { + std::unique_ptr encoder = CreateEncoder(); std::unique_ptr event = gen_.NewIceCandidatePairConfig(); history_.push_back(event->Copy()); - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& ice_candidate_pair_configs = parsed_log_.ice_candidate_pair_configs(); @@ -767,10 +820,11 @@ TEST_P(RtcEventLogEncoderTest, RtcEventIceCandidatePairConfig) { // TODO(eladalon/terelius): Test with multiple events in the batch. TEST_P(RtcEventLogEncoderTest, RtcEventIceCandidatePair) { + std::unique_ptr encoder = CreateEncoder(); std::unique_ptr event = gen_.NewIceCandidatePair(); history_.push_back(event->Copy()); - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& ice_candidate_pair_events = parsed_log_.ice_candidate_pair_events(); @@ -781,11 +835,12 @@ TEST_P(RtcEventLogEncoderTest, RtcEventIceCandidatePair) { } TEST_P(RtcEventLogEncoderTest, RtcEventLoggingStarted) { + std::unique_ptr encoder = CreateEncoder(); const int64_t timestamp_ms = prng_.Rand(1'000'000'000); const int64_t utc_time_ms = prng_.Rand(1'000'000'000); // Overwrite the previously encoded LogStart event. - encoded_ = encoder_->EncodeLogStart(timestamp_ms * 1000, utc_time_ms * 1000); + encoded_ = encoder->EncodeLogStart(timestamp_ms * 1000, utc_time_ms * 1000); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& start_log_events = parsed_log_.start_log_events(); @@ -795,16 +850,17 @@ TEST_P(RtcEventLogEncoderTest, RtcEventLoggingStarted) { } TEST_P(RtcEventLogEncoderTest, RtcEventLoggingStopped) { + std::unique_ptr encoder = CreateEncoder(); const int64_t start_timestamp_ms = prng_.Rand(1'000'000'000); const int64_t start_utc_time_ms = prng_.Rand(1'000'000'000); // Overwrite the previously encoded LogStart event. - encoded_ = encoder_->EncodeLogStart(start_timestamp_ms * 1000, - start_utc_time_ms * 1000); + encoded_ = encoder->EncodeLogStart(start_timestamp_ms * 1000, + start_utc_time_ms * 1000); const int64_t stop_timestamp_ms = prng_.Rand(start_timestamp_ms, 2'000'000'000); - encoded_ += encoder_->EncodeLogEnd(stop_timestamp_ms * 1000); + encoded_ += encoder->EncodeLogEnd(stop_timestamp_ms * 1000); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& stop_log_events = parsed_log_.stop_log_events(); @@ -814,11 +870,12 @@ TEST_P(RtcEventLogEncoderTest, RtcEventLoggingStopped) { // TODO(eladalon/terelius): Test with multiple events in the batch. TEST_P(RtcEventLogEncoderTest, RtcEventProbeClusterCreated) { + std::unique_ptr encoder = CreateEncoder(); std::unique_ptr event = gen_.NewProbeClusterCreated(); history_.push_back(event->Copy()); - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& bwe_probe_cluster_created_events = parsed_log_.bwe_probe_cluster_created_events(); @@ -830,11 +887,12 @@ TEST_P(RtcEventLogEncoderTest, RtcEventProbeClusterCreated) { // TODO(eladalon/terelius): Test with multiple events in the batch. TEST_P(RtcEventLogEncoderTest, RtcEventProbeResultFailure) { + std::unique_ptr encoder = CreateEncoder(); std::unique_ptr event = gen_.NewProbeResultFailure(); history_.push_back(event->Copy()); - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& bwe_probe_failure_events = parsed_log_.bwe_probe_failure_events(); @@ -845,11 +903,12 @@ TEST_P(RtcEventLogEncoderTest, RtcEventProbeResultFailure) { // TODO(eladalon/terelius): Test with multiple events in the batch. TEST_P(RtcEventLogEncoderTest, RtcEventProbeResultSuccess) { + std::unique_ptr encoder = CreateEncoder(); std::unique_ptr event = gen_.NewProbeResultSuccess(); history_.push_back(event->Copy()); - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& bwe_probe_success_events = parsed_log_.bwe_probe_success_events(); @@ -864,6 +923,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpPacketIncoming) { // As a work around, we're removing duplicates in the parser. return; } + std::unique_ptr encoder = CreateEncoder(); std::vector> events(event_count_); for (size_t i = 0; i < event_count_; ++i) { @@ -873,7 +933,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpPacketIncoming) { history_.push_back(events[i]->Copy()); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& incoming_rtcp_packets = parsed_log_.incoming_rtcp_packets(); @@ -886,6 +946,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpPacketIncoming) { } TEST_P(RtcEventLogEncoderTest, RtcEventRtcpPacketOutgoing) { + std::unique_ptr encoder = CreateEncoder(); std::vector> events(event_count_); for (size_t i = 0; i < event_count_; ++i) { events[i] = (i == 0 || !force_repeated_fields_) @@ -894,7 +955,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpPacketOutgoing) { history_.push_back(events[i]->Copy()); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& outgoing_rtcp_packets = parsed_log_.outgoing_rtcp_packets(); @@ -911,16 +972,18 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpReceiverReport) { return; } - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; fake_clock.SetTime(Timestamp::Millis(prng_.Rand())); + std::unique_ptr encoder = CreateEncoder(); + for (auto direction : {kIncomingPacket, kOutgoingPacket}) { std::vector events(event_count_); std::vector timestamps_ms(event_count_); for (size_t i = 0; i < event_count_; ++i) { - timestamps_ms[i] = rtc::TimeMillis(); + timestamps_ms[i] = TimeMillis(); events[i] = gen_.NewReceiverReport(); - rtc::Buffer buffer = events[i].Build(); + Buffer buffer = events[i].Build(); if (direction == kIncomingPacket) { history_.push_back( std::make_unique(buffer)); @@ -931,7 +994,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpReceiverReport) { fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000))); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& receiver_reports = parsed_log_.receiver_reports(direction); @@ -949,16 +1012,18 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpSenderReport) { return; } - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; fake_clock.SetTime(Timestamp::Millis(prng_.Rand())); + std::unique_ptr encoder = CreateEncoder(); + for (auto direction : {kIncomingPacket, kOutgoingPacket}) { std::vector events(event_count_); std::vector timestamps_ms(event_count_); for (size_t i = 0; i < event_count_; ++i) { - timestamps_ms[i] = rtc::TimeMillis(); + timestamps_ms[i] = TimeMillis(); events[i] = gen_.NewSenderReport(); - rtc::Buffer buffer = events[i].Build(); + Buffer buffer = events[i].Build(); if (direction == kIncomingPacket) { history_.push_back( std::make_unique(buffer)); @@ -969,7 +1034,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpSenderReport) { fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000))); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& sender_reports = parsed_log_.sender_reports(direction); @@ -987,16 +1052,18 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpExtendedReports) { return; } - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; fake_clock.SetTime(Timestamp::Millis(prng_.Rand())); + std::unique_ptr encoder = CreateEncoder(); + for (auto direction : {kIncomingPacket, kOutgoingPacket}) { std::vector events(event_count_); std::vector timestamps_ms(event_count_); for (size_t i = 0; i < event_count_; ++i) { - timestamps_ms[i] = rtc::TimeMillis(); + timestamps_ms[i] = TimeMillis(); events[i] = gen_.NewExtendedReports(); - rtc::Buffer buffer = events[i].Build(); + Buffer buffer = events[i].Build(); if (direction == kIncomingPacket) { history_.push_back( std::make_unique(buffer)); @@ -1007,7 +1074,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpExtendedReports) { fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000))); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& extended_reports = parsed_log_.extended_reports(direction); @@ -1025,16 +1092,18 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpFir) { return; } - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; fake_clock.SetTime(Timestamp::Millis(prng_.Rand())); + std::unique_ptr encoder = CreateEncoder(); + for (auto direction : {kIncomingPacket, kOutgoingPacket}) { std::vector events(event_count_); std::vector timestamps_ms(event_count_); for (size_t i = 0; i < event_count_; ++i) { - timestamps_ms[i] = rtc::TimeMillis(); + timestamps_ms[i] = TimeMillis(); events[i] = gen_.NewFir(); - rtc::Buffer buffer = events[i].Build(); + Buffer buffer = events[i].Build(); if (direction == kIncomingPacket) { history_.push_back( std::make_unique(buffer)); @@ -1045,7 +1114,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpFir) { fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000))); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& firs = parsed_log_.firs(direction); @@ -1062,16 +1131,18 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpPli) { return; } - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; fake_clock.SetTime(Timestamp::Millis(prng_.Rand())); + std::unique_ptr encoder = CreateEncoder(); + for (auto direction : {kIncomingPacket, kOutgoingPacket}) { std::vector events(event_count_); std::vector timestamps_ms(event_count_); for (size_t i = 0; i < event_count_; ++i) { - timestamps_ms[i] = rtc::TimeMillis(); + timestamps_ms[i] = TimeMillis(); events[i] = gen_.NewPli(); - rtc::Buffer buffer = events[i].Build(); + Buffer buffer = events[i].Build(); if (direction == kIncomingPacket) { history_.push_back( std::make_unique(buffer)); @@ -1082,7 +1153,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpPli) { fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000))); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& plis = parsed_log_.plis(direction); @@ -1099,16 +1170,18 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpBye) { return; } - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; fake_clock.SetTime(Timestamp::Millis(prng_.Rand())); + std::unique_ptr encoder = CreateEncoder(); + for (auto direction : {kIncomingPacket, kOutgoingPacket}) { std::vector events(event_count_); std::vector timestamps_ms(event_count_); for (size_t i = 0; i < event_count_; ++i) { - timestamps_ms[i] = rtc::TimeMillis(); + timestamps_ms[i] = TimeMillis(); events[i] = gen_.NewBye(); - rtc::Buffer buffer = events[i].Build(); + Buffer buffer = events[i].Build(); if (direction == kIncomingPacket) { history_.push_back( std::make_unique(buffer)); @@ -1119,7 +1192,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpBye) { fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000))); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& byes = parsed_log_.byes(direction); @@ -1136,16 +1209,18 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpNack) { return; } - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; fake_clock.SetTime(Timestamp::Millis(prng_.Rand())); + std::unique_ptr encoder = CreateEncoder(); + for (auto direction : {kIncomingPacket, kOutgoingPacket}) { std::vector events(event_count_); std::vector timestamps_ms(event_count_); for (size_t i = 0; i < event_count_; ++i) { - timestamps_ms[i] = rtc::TimeMillis(); + timestamps_ms[i] = TimeMillis(); events[i] = gen_.NewNack(); - rtc::Buffer buffer = events[i].Build(); + Buffer buffer = events[i].Build(); if (direction == kIncomingPacket) { history_.push_back( std::make_unique(buffer)); @@ -1156,7 +1231,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpNack) { fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000))); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& nacks = parsed_log_.nacks(direction); @@ -1173,16 +1248,18 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpRemb) { return; } - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; fake_clock.SetTime(Timestamp::Millis(prng_.Rand())); + std::unique_ptr encoder = CreateEncoder(); + for (auto direction : {kIncomingPacket, kOutgoingPacket}) { std::vector events(event_count_); std::vector timestamps_ms(event_count_); for (size_t i = 0; i < event_count_; ++i) { - timestamps_ms[i] = rtc::TimeMillis(); + timestamps_ms[i] = TimeMillis(); events[i] = gen_.NewRemb(); - rtc::Buffer buffer = events[i].Build(); + Buffer buffer = events[i].Build(); if (direction == kIncomingPacket) { history_.push_back( std::make_unique(buffer)); @@ -1193,7 +1270,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpRemb) { fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000))); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& rembs = parsed_log_.rembs(direction); @@ -1210,17 +1287,19 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpTransportFeedback) { return; } - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; fake_clock.SetTime(Timestamp::Millis(prng_.Rand())); + std::unique_ptr encoder = CreateEncoder(); + for (auto direction : {kIncomingPacket, kOutgoingPacket}) { std::vector events; events.reserve(event_count_); std::vector timestamps_ms(event_count_); for (size_t i = 0; i < event_count_; ++i) { - timestamps_ms[i] = rtc::TimeMillis(); + timestamps_ms[i] = TimeMillis(); events.emplace_back(gen_.NewTransportFeedback()); - rtc::Buffer buffer = events[i].Build(); + Buffer buffer = events[i].Build(); if (direction == kIncomingPacket) { history_.push_back( std::make_unique(buffer)); @@ -1231,7 +1310,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpTransportFeedback) { fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000))); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& transport_feedbacks = @@ -1250,17 +1329,19 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpLossNotification) { return; } - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; fake_clock.SetTime(Timestamp::Millis(prng_.Rand())); + std::unique_ptr encoder = CreateEncoder(); + for (auto direction : {kIncomingPacket, kOutgoingPacket}) { std::vector events; events.reserve(event_count_); std::vector timestamps_ms(event_count_); for (size_t i = 0; i < event_count_; ++i) { - timestamps_ms[i] = rtc::TimeMillis(); + timestamps_ms[i] = TimeMillis(); events.emplace_back(gen_.NewLossNotification()); - rtc::Buffer buffer = events[i].Build(); + Buffer buffer = events[i].Build(); if (direction == kIncomingPacket) { history_.push_back( std::make_unique(buffer)); @@ -1271,7 +1352,7 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpLossNotification) { fake_clock.AdvanceTime(TimeDelta::Millis(prng_.Rand(0, 1000))); } - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& loss_notifications = parsed_log_.loss_notifications(direction); @@ -1285,36 +1366,43 @@ TEST_P(RtcEventLogEncoderTest, RtcEventRtcpLossNotification) { } TEST_P(RtcEventLogEncoderTest, RtcEventRtpPacketIncoming) { - TestRtpPackets(); + std::unique_ptr encoder = CreateEncoder(); + TestRtpPackets(*encoder); } TEST_P(RtcEventLogEncoderTest, RtcEventRtpPacketOutgoing) { - TestRtpPackets(); + std::unique_ptr encoder = CreateEncoder(); + TestRtpPackets(*encoder); } TEST_P(RtcEventLogEncoderTest, RtcEventRtpPacketIncomingNoDependencyDescriptor) { - test::ScopedFieldTrials no_dd( + ExplicitKeyValueConfig no_dd( "WebRTC-RtcEventLogEncodeDependencyDescriptor/Disabled/"); - TestRtpPackets(); + std::unique_ptr encoder = CreateEncoder(no_dd); + verifier_.ExpectDependencyDescriptorExtensionIsSet(false); + TestRtpPackets(*encoder); } TEST_P(RtcEventLogEncoderTest, RtcEventRtpPacketOutgoingNoDependencyDescriptor) { - test::ScopedFieldTrials no_dd( + ExplicitKeyValueConfig no_dd( "WebRTC-RtcEventLogEncodeDependencyDescriptor/Disabled/"); - TestRtpPackets(); + std::unique_ptr encoder = CreateEncoder(no_dd); + verifier_.ExpectDependencyDescriptorExtensionIsSet(false); + TestRtpPackets(*encoder); } // TODO(eladalon/terelius): Test with multiple events in the batch. TEST_P(RtcEventLogEncoderTest, RtcEventVideoReceiveStreamConfig) { + std::unique_ptr encoder = CreateEncoder(); uint32_t ssrc = prng_.Rand(); RtpHeaderExtensionMap extensions = gen_.NewRtpHeaderExtensionMap(); std::unique_ptr event = gen_.NewVideoReceiveStreamConfig(ssrc, extensions); history_.push_back(event->Copy()); - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& video_recv_configs = parsed_log_.video_recv_configs(); @@ -1324,13 +1412,14 @@ TEST_P(RtcEventLogEncoderTest, RtcEventVideoReceiveStreamConfig) { // TODO(eladalon/terelius): Test with multiple events in the batch. TEST_P(RtcEventLogEncoderTest, RtcEventVideoSendStreamConfig) { + std::unique_ptr encoder = CreateEncoder(); uint32_t ssrc = prng_.Rand(); RtpHeaderExtensionMap extensions = gen_.NewRtpHeaderExtensionMap(); std::unique_ptr event = gen_.NewVideoSendStreamConfig(ssrc, extensions); history_.push_back(event->Copy()); - encoded_ += encoder_->EncodeBatch(history_.begin(), history_.end()); + encoded_ += encoder->EncodeBatch(history_.begin(), history_.end()); ASSERT_TRUE(parsed_log_.ParseString(encoded_).ok()); const auto& video_send_configs = parsed_log_.video_send_configs(); @@ -1357,14 +1446,14 @@ class RtcEventLogEncoderSimpleTest encoder_ = std::make_unique(); break; case RtcEventLog::EncodingType::NewFormat: - encoder_ = std::make_unique(); + encoder_ = std::make_unique( + ExplicitKeyValueConfig("")); break; case RtcEventLog::EncodingType::ProtoFree: encoder_ = std::make_unique(); break; } - encoded_ = - encoder_->EncodeLogStart(rtc::TimeMillis(), rtc::TimeUTCMillis()); + encoded_ = encoder_->EncodeLogStart(TimeMillis(), TimeUTCMillis()); } ~RtcEventLogEncoderSimpleTest() override = default; @@ -1377,7 +1466,7 @@ class RtcEventLogEncoderSimpleTest TEST_P(RtcEventLogEncoderSimpleTest, RtcEventLargeCompoundRtcpPacketIncoming) { // Create a compound packet containing multiple Bye messages. - rtc::Buffer packet; + Buffer packet; size_t index = 0; for (int i = 0; i < 8; i++) { rtcp::Bye bye; diff --git a/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.cc b/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.cc index 131aae1de8..2a5ed41278 100644 --- a/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.cc +++ b/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.cc @@ -10,12 +10,15 @@ #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.h" +#include +#include +#include +#include #include #include -#include "absl/types/optional.h" -#include "logging/rtc_event_log/encoder/rtc_event_log_encoder_common.h" -#include "logging/rtc_event_log/encoder/var_int.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_alr_state.h" #include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" #include "logging/rtc_event_log/events/rtc_event_audio_playout.h" @@ -45,7 +48,6 @@ #include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" #include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" namespace webrtc { diff --git a/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.h b/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.h index cb796ec562..d5d2b6e342 100644 --- a/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.h +++ b/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.h @@ -11,14 +11,16 @@ #ifndef LOGGING_RTC_EVENT_LOG_ENCODER_RTC_EVENT_LOG_ENCODER_V3_H_ #define LOGGING_RTC_EVENT_LOG_ENCODER_RTC_EVENT_LOG_ENCODER_V3_H_ +#include #include +#include #include #include #include #include "api/array_view.h" +#include "api/rtc_event_log/rtc_event.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder.h" -#include "logging/rtc_event_log/events/rtc_event_definition.h" namespace webrtc { @@ -37,7 +39,7 @@ class RtcEventLogEncoderV3 final : public RtcEventLogEncoder { private: std::map)>> + std::function)>> encoders_; }; diff --git a/logging/rtc_event_log/encoder/var_int.cc b/logging/rtc_event_log/encoder/var_int.cc index a84a233d6b..da88bd9ccc 100644 --- a/logging/rtc_event_log/encoder/var_int.cc +++ b/logging/rtc_event_log/encoder/var_int.cc @@ -10,6 +10,12 @@ #include "logging/rtc_event_log/encoder/var_int.h" +#include +#include +#include +#include + +#include "absl/strings/string_view.h" #include "rtc_base/bitstream_reader.h" #include "rtc_base/checks.h" diff --git a/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.cc b/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.cc index 0c93e6226d..fa3fadde9b 100644 --- a/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.cc +++ b/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.cc @@ -11,10 +11,12 @@ #include "logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h" #include +#include +#include -#include "absl/types/optional.h" #include "api/array_view.h" #include "logging/rtc_event_log/events/rtc_event_field_extraction.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" using webrtc_event_logging::MaxUnsignedValueOfBitWidth; @@ -27,7 +29,7 @@ namespace webrtc { FixedLengthEncodingParametersV3 FixedLengthEncodingParametersV3::CalculateParameters( uint64_t base, - const rtc::ArrayView values, + const ArrayView values, uint64_t value_bit_width, bool values_optional) { // As a special case, if all of the elements are identical to the base @@ -89,8 +91,8 @@ FixedLengthEncodingParametersV3::CalculateParameters( // equal". RTC_DCHECK(!use_signed_deltas || delta_bit_width < 64); - RTC_DCHECK(ValidParameters(delta_bit_width, use_signed_deltas, - values_optional, value_bit_width)); + RTC_DCHECK( + ValidParameters(delta_bit_width, use_signed_deltas, value_bit_width)); return FixedLengthEncodingParametersV3(delta_bit_width, use_signed_deltas, values_optional, value_bit_width); } @@ -108,7 +110,7 @@ uint64_t FixedLengthEncodingParametersV3::DeltaHeaderAsInt() const { return header; } -absl::optional +std::optional FixedLengthEncodingParametersV3::ParseDeltaHeader(uint64_t header, uint64_t value_bit_width) { uint64_t delta_bit_width = (header & ((1u << 6) - 1)) + 1; @@ -117,17 +119,16 @@ FixedLengthEncodingParametersV3::ParseDeltaHeader(uint64_t header, if (header >= (1u << 8)) { RTC_LOG(LS_ERROR) << "Failed to parse delta header; unread bits remaining."; - return absl::nullopt; + return std::nullopt; } - if (!ValidParameters(delta_bit_width, signed_deltas, values_optional, - value_bit_width)) { + if (!ValidParameters(delta_bit_width, signed_deltas, value_bit_width)) { RTC_LOG(LS_ERROR) << "Failed to parse delta header. Invalid combination of " "values: delta_bit_width=" << delta_bit_width << " signed_deltas=" << signed_deltas << " values_optional=" << values_optional << " value_bit_width=" << value_bit_width; - return absl::nullopt; + return std::nullopt; } return FixedLengthEncodingParametersV3(delta_bit_width, signed_deltas, diff --git a/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h b/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h index 666fae1c63..9e4f5298f4 100644 --- a/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h +++ b/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h @@ -11,7 +11,9 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_FIXED_LENGTH_ENCODING_PARAMETERS_V3_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_FIXED_LENGTH_ENCODING_PARAMETERS_V3_H_ -#include "absl/types/optional.h" +#include +#include + #include "api/array_view.h" #include "logging/rtc_event_log/events/rtc_event_field_extraction.h" @@ -23,7 +25,6 @@ class FixedLengthEncodingParametersV3 final { public: static bool ValidParameters(uint64_t delta_bit_width, bool signed_deltas, - bool values_optional, uint64_t value_bit_width) { return (1 <= delta_bit_width && delta_bit_width <= 64 && 1 <= value_bit_width && value_bit_width <= 64 && @@ -33,10 +34,10 @@ class FixedLengthEncodingParametersV3 final { static FixedLengthEncodingParametersV3 CalculateParameters( uint64_t base, - rtc::ArrayView values, + ArrayView values, uint64_t value_bit_width, bool values_optional); - static absl::optional ParseDeltaHeader( + static std::optional ParseDeltaHeader( uint64_t header, uint64_t value_bit_width); diff --git a/logging/rtc_event_log/events/logged_rtp_rtcp.h b/logging/rtc_event_log/events/logged_rtp_rtcp.h index fe5fcfd765..f52664e37b 100644 --- a/logging/rtc_event_log/events/logged_rtp_rtcp.h +++ b/logging/rtc_event_log/events/logged_rtp_rtcp.h @@ -11,13 +11,16 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_LOGGED_RTP_RTCP_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_LOGGED_RTP_RTCP_H_ -#include +#include +#include +#include #include #include "absl/strings/string_view.h" #include "api/rtp_headers.h" #include "api/units/timestamp.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" #include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" #include "modules/rtp_rtcp/source/rtcp_packet/fir.h" #include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" @@ -230,6 +233,20 @@ struct LoggedRtcpPacketTransportFeedback { rtcp::TransportFeedback transport_feedback; }; +struct LoggedRtcpCongestionControlFeedback { + LoggedRtcpCongestionControlFeedback( + Timestamp timestamp, + const rtcp::CongestionControlFeedback& congestion_feedback) + : timestamp(timestamp), congestion_feedback(congestion_feedback) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + Timestamp log_time() const { return timestamp; } + + Timestamp timestamp; + rtcp::CongestionControlFeedback congestion_feedback; +}; + struct LoggedRtcpPacketLossNotification { LoggedRtcpPacketLossNotification() = default; LoggedRtcpPacketLossNotification( diff --git a/logging/rtc_event_log/events/rtc_event_alr_state.cc b/logging/rtc_event_log/events/rtc_event_alr_state.cc index 25941eb16b..20fd1fd5d2 100644 --- a/logging/rtc_event_log/events/rtc_event_alr_state.cc +++ b/logging/rtc_event_log/events/rtc_event_alr_state.cc @@ -10,7 +10,14 @@ #include "logging/rtc_event_log/events/rtc_event_alr_state.h" +#include +#include + #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "api/rtc_event_log/rtc_event.h" +#include "logging/rtc_event_log/events/rtc_event_definition.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { constexpr RtcEvent::Type RtcEventAlrState::kType; diff --git a/logging/rtc_event_log/events/rtc_event_alr_state.h b/logging/rtc_event_log/events/rtc_event_alr_state.h index 9f595ecd90..17aa2c5ed6 100644 --- a/logging/rtc_event_log/events/rtc_event_alr_state.h +++ b/logging/rtc_event_log/events/rtc_event_alr_state.h @@ -11,17 +11,18 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ALR_STATE_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ALR_STATE_H_ +#include #include #include #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_definition.h" #include "logging/rtc_event_log/events/rtc_event_field_encoding.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" -#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -52,7 +53,7 @@ class RtcEventAlrState final : public RtcEvent { bool in_alr() const { return in_alr_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView batch) { return RtcEventAlrState::definition_.EncodeBatch(batch); } diff --git a/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.cc b/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.cc index 5f2d55c357..a05e4b4247 100644 --- a/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.cc +++ b/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.cc @@ -10,9 +10,11 @@ #include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" +#include #include #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" #include "rtc_base/checks.h" diff --git a/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h b/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h index d4cae3abfa..7127a94bc9 100644 --- a/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h +++ b/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h @@ -11,14 +11,16 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_NETWORK_ADAPTATION_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_NETWORK_ADAPTATION_H_ +#include #include #include #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" namespace webrtc { @@ -54,15 +56,15 @@ class RtcEventAudioNetworkAdaptation final : public RtcEvent { const AudioEncoderRuntimeConfig& config() const { return *config_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_audio_playout.cc b/logging/rtc_event_log/events/rtc_event_audio_playout.cc index 21a3f9266c..2bc0d71d4c 100644 --- a/logging/rtc_event_log/events/rtc_event_audio_playout.cc +++ b/logging/rtc_event_log/events/rtc_event_audio_playout.cc @@ -10,7 +10,12 @@ #include "logging/rtc_event_log/events/rtc_event_audio_playout.h" +#include +#include + #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" +#include "logging/rtc_event_log/events/rtc_event_definition.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_audio_playout.h b/logging/rtc_event_log/events/rtc_event_audio_playout.h index 196c3ca247..12643f36a4 100644 --- a/logging/rtc_event_log/events/rtc_event_audio_playout.h +++ b/logging/rtc_event_log/events/rtc_event_audio_playout.h @@ -19,9 +19,12 @@ #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_definition.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -52,7 +55,7 @@ class RtcEventAudioPlayout final : public RtcEvent { uint32_t ssrc() const { return ssrc_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView batch) { return RtcEventAudioPlayout::definition_.EncodeBatch(batch); } diff --git a/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.cc b/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.cc index 87caaff098..0025d3f4f6 100644 --- a/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.cc +++ b/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.cc @@ -10,9 +10,11 @@ #include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" +#include #include #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" #include "logging/rtc_event_log/rtc_stream_config.h" #include "rtc_base/checks.h" diff --git a/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h b/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h index 9863e235af..5e74754972 100644 --- a/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h +++ b/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h @@ -11,14 +11,16 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_RECEIVE_STREAM_CONFIG_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_RECEIVE_STREAM_CONFIG_H_ +#include #include #include #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" #include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { @@ -51,15 +53,15 @@ class RtcEventAudioReceiveStreamConfig final : public RtcEvent { const rtclog::StreamConfig& config() const { return *config_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.cc b/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.cc index 681ae11e63..36c2695e7a 100644 --- a/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.cc +++ b/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.cc @@ -10,9 +10,11 @@ #include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" +#include #include #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" #include "logging/rtc_event_log/rtc_stream_config.h" #include "rtc_base/checks.h" diff --git a/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h b/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h index 550723bcf0..861d067b03 100644 --- a/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h +++ b/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h @@ -11,13 +11,16 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_SEND_STREAM_CONFIG_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_SEND_STREAM_CONFIG_H_ +#include #include #include #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" #include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { @@ -50,15 +53,15 @@ class RtcEventAudioSendStreamConfig final : public RtcEvent { const rtclog::StreamConfig& config() const { return *config_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_begin_log.cc b/logging/rtc_event_log/events/rtc_event_begin_log.cc index 49b9effa9e..2bc4d7c64d 100644 --- a/logging/rtc_event_log/events/rtc_event_begin_log.cc +++ b/logging/rtc_event_log/events/rtc_event_begin_log.cc @@ -10,7 +10,17 @@ #include "logging/rtc_event_log/events/rtc_event_begin_log.h" +#include +#include +#include + #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { constexpr RtcEvent::Type RtcEventBeginLog::kType; @@ -26,7 +36,7 @@ RtcEventBeginLog::RtcEventBeginLog(const RtcEventBeginLog& other) RtcEventBeginLog::~RtcEventBeginLog() = default; -std::string RtcEventBeginLog::Encode(rtc::ArrayView batch) { +std::string RtcEventBeginLog::Encode(ArrayView batch) { EventEncoder encoder(event_params_, batch); encoder.EncodeField( @@ -45,12 +55,12 @@ RtcEventLogParseStatus RtcEventBeginLog::Parse( if (!status.ok()) return status; - rtc::ArrayView output_batch = + ArrayView output_batch = ExtendLoggedBatch(output, parser.NumEventsInBatch()); constexpr FieldParameters timestamp_params{ "timestamp_ms", FieldParameters::kTimestampField, FieldType::kVarInt, 64}; - RtcEventLogParseStatusOr> result = + RtcEventLogParseStatusOr> result = parser.ParseNumericField(timestamp_params); if (!result.ok()) return result.status(); diff --git a/logging/rtc_event_log/events/rtc_event_begin_log.h b/logging/rtc_event_log/events/rtc_event_begin_log.h index f3b74c117e..b89fcb1506 100644 --- a/logging/rtc_event_log/events/rtc_event_begin_log.h +++ b/logging/rtc_event_log/events/rtc_event_begin_log.h @@ -11,6 +11,7 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_BEGIN_LOG_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_BEGIN_LOG_H_ +#include #include #include @@ -19,8 +20,7 @@ #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_field_encoding.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" -#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -53,7 +53,7 @@ class RtcEventBeginLog final : public RtcEvent { Type GetType() const override { return kType; } bool IsConfigEvent() const override { return false; } - static std::string Encode(rtc::ArrayView batch); + static std::string Encode(ArrayView batch); static RtcEventLogParseStatus Parse(absl::string_view encoded_bytes, bool batched, diff --git a/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc b/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc index 0e98b2ff11..44c0e43ade 100644 --- a/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc +++ b/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc @@ -10,8 +10,13 @@ #include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" +#include +#include + #include "absl/memory/memory.h" -#include "api/network_state_predictor.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/transport/bandwidth_usage.h" +#include "logging/rtc_event_log/events/rtc_event_definition.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h b/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h index 796f119388..30c6e6b0e7 100644 --- a/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h +++ b/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h @@ -19,10 +19,15 @@ #include #include "absl/strings/string_view.h" -#include "api/network_state_predictor.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" +#include "api/transport/bandwidth_usage.h" #include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_definition.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding.h" +#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" +#include "rtc_base/checks.h" namespace webrtc { @@ -99,7 +104,7 @@ class RtcEventBweUpdateDelayBased final : public RtcEvent { int32_t bitrate_bps() const { return bitrate_bps_; } BandwidthUsage detector_state() const { return detector_state_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView batch) { return RtcEventBweUpdateDelayBased::definition_.EncodeBatch(batch); } diff --git a/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc b/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc index 44524ab033..dc11471956 100644 --- a/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc +++ b/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc @@ -10,7 +10,11 @@ #include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" +#include +#include + #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h b/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h index fd41b316e0..44da6df95c 100644 --- a/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h +++ b/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h @@ -18,9 +18,10 @@ #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -63,15 +64,15 @@ class RtcEventBweUpdateLossBased final : public RtcEvent { uint8_t fraction_loss() const { return fraction_loss_; } int32_t total_packets() const { return total_packets_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_definition.h b/logging/rtc_event_log/events/rtc_event_definition.h index 8688c5fc7b..86f64d32db 100644 --- a/logging/rtc_event_log/events/rtc_event_definition.h +++ b/logging/rtc_event_log/events/rtc_event_definition.h @@ -11,18 +11,16 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DEFINITION_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DEFINITION_H_ -#include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" -#include "api/units/timestamp.h" +#include "api/rtc_event_log/rtc_event.h" #include "logging/rtc_event_log/events/rtc_event_field_encoding.h" #include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" -#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" -#include "rtc_base/logging.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -37,9 +35,8 @@ struct RtcEventFieldDefinition { template class RtcEventDefinitionImpl { public: - void EncodeImpl(EventEncoder&, rtc::ArrayView) const {} - RtcEventLogParseStatus ParseImpl(EventParser&, - rtc::ArrayView) const { + void EncodeImpl(EventEncoder&, ArrayView) const {} + RtcEventLogParseStatus ParseImpl(EventParser&, ArrayView) const { return RtcEventLogParseStatus::Success(); } }; @@ -54,16 +51,15 @@ class RtcEventDefinitionImpl { : field_(field), rest_(rest...) {} void EncodeImpl(EventEncoder& encoder, - rtc::ArrayView batch) const { + ArrayView batch) const { auto values = ExtractRtcEventMember(batch, field_.event_member); encoder.EncodeField(field_.params, values); rest_.EncodeImpl(encoder, batch); } - RtcEventLogParseStatus ParseImpl( - EventParser& parser, - rtc::ArrayView output_batch) const { - RtcEventLogParseStatusOr> result = + RtcEventLogParseStatus ParseImpl(EventParser& parser, + ArrayView output_batch) const { + RtcEventLogParseStatusOr> result = parser.ParseNumericField(field_.params); if (!result.ok()) return result.status(); @@ -110,7 +106,7 @@ class RtcEventDefinition { RtcEventFieldDefinition... fields) : params_(params), fields_(fields...) {} - std::string EncodeBatch(rtc::ArrayView batch) const { + std::string EncodeBatch(ArrayView batch) const { EventEncoder encoder(params_, batch); fields_.EncodeImpl(encoder, batch); return encoder.AsString(); @@ -124,13 +120,13 @@ class RtcEventDefinition { if (!status.ok()) return status; - rtc::ArrayView output_batch = + ArrayView output_batch = ExtendLoggedBatch(output, parser.NumEventsInBatch()); constexpr FieldParameters timestamp_params{"timestamp_ms", FieldParameters::kTimestampField, FieldType::kVarInt, 64}; - RtcEventLogParseStatusOr> result = + RtcEventLogParseStatusOr> result = parser.ParseNumericField(timestamp_params); if (!result.ok()) return result.status(); diff --git a/logging/rtc_event_log/events/rtc_event_dtls_transport_state.cc b/logging/rtc_event_log/events/rtc_event_dtls_transport_state.cc index f00342df72..87bbe59c89 100644 --- a/logging/rtc_event_log/events/rtc_event_dtls_transport_state.cc +++ b/logging/rtc_event_log/events/rtc_event_dtls_transport_state.cc @@ -10,7 +10,11 @@ #include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" +#include + #include "absl/memory/memory.h" +#include "api/dtls_transport_interface.h" +#include "api/rtc_event_log/rtc_event.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h b/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h index b9af213256..94176f78e4 100644 --- a/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h +++ b/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h @@ -11,15 +11,17 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DTLS_TRANSPORT_STATE_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DTLS_TRANSPORT_STATE_H_ +#include #include #include #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/dtls_transport_interface.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -48,15 +50,15 @@ class RtcEventDtlsTransportState : public RtcEvent { return dtls_transport_state_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_dtls_writable_state.cc b/logging/rtc_event_log/events/rtc_event_dtls_writable_state.cc index d4cb093ce6..4fb50ebcaa 100644 --- a/logging/rtc_event_log/events/rtc_event_dtls_writable_state.cc +++ b/logging/rtc_event_log/events/rtc_event_dtls_writable_state.cc @@ -10,7 +10,10 @@ #include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" +#include + #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h b/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h index c820f184d7..d3c42aa34a 100644 --- a/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h +++ b/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h @@ -11,14 +11,16 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DTLS_WRITABLE_STATE_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DTLS_WRITABLE_STATE_H_ +#include #include #include #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -48,15 +50,15 @@ class RtcEventDtlsWritableState : public RtcEvent { bool writable() const { return writable_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_end_log.cc b/logging/rtc_event_log/events/rtc_event_end_log.cc index 52abf9e842..99c53e4dcd 100644 --- a/logging/rtc_event_log/events/rtc_event_end_log.cc +++ b/logging/rtc_event_log/events/rtc_event_end_log.cc @@ -10,7 +10,17 @@ #include "logging/rtc_event_log/events/rtc_event_end_log.h" +#include +#include +#include + #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding.h" +#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { constexpr RtcEvent::Type RtcEventEndLog::kType; @@ -24,7 +34,7 @@ RtcEventEndLog::RtcEventEndLog(const RtcEventEndLog& other) RtcEventEndLog::~RtcEventEndLog() = default; -std::string RtcEventEndLog::Encode(rtc::ArrayView batch) { +std::string RtcEventEndLog::Encode(ArrayView batch) { EventEncoder encoder(event_params_, batch); return encoder.AsString(); } @@ -38,12 +48,12 @@ RtcEventLogParseStatus RtcEventEndLog::Parse( if (!status.ok()) return status; - rtc::ArrayView output_batch = + ArrayView output_batch = ExtendLoggedBatch(output, parser.NumEventsInBatch()); constexpr FieldParameters timestamp_params{ "timestamp_ms", FieldParameters::kTimestampField, FieldType::kVarInt, 64}; - RtcEventLogParseStatusOr> result = + RtcEventLogParseStatusOr> result = parser.ParseNumericField(timestamp_params); if (!result.ok()) return result.status(); diff --git a/logging/rtc_event_log/events/rtc_event_end_log.h b/logging/rtc_event_log/events/rtc_event_end_log.h index 79648bdb8d..6a080001fb 100644 --- a/logging/rtc_event_log/events/rtc_event_end_log.h +++ b/logging/rtc_event_log/events/rtc_event_end_log.h @@ -11,7 +11,7 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_END_LOG_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_END_LOG_H_ -#include +#include #include #include @@ -20,8 +20,7 @@ #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_field_encoding.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" -#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -47,7 +46,7 @@ class RtcEventEndLog final : public RtcEvent { Type GetType() const override { return kType; } bool IsConfigEvent() const override { return false; } - static std::string Encode(rtc::ArrayView batch); + static std::string Encode(ArrayView batch); static RtcEventLogParseStatus Parse(absl::string_view encoded_bytes, bool batched, diff --git a/logging/rtc_event_log/events/rtc_event_field_encoding.cc b/logging/rtc_event_log/events/rtc_event_field_encoding.cc index 68188ce856..781c3927aa 100644 --- a/logging/rtc_event_log/events/rtc_event_field_encoding.cc +++ b/logging/rtc_event_log/events/rtc_event_field_encoding.cc @@ -10,16 +10,21 @@ #include "logging/rtc_event_log/events/rtc_event_field_encoding.h" -#include +#include +#include #include -#include -#include +#include +#include +#include +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/rtc_event_log/rtc_event.h" #include "logging/rtc_event_log/encoder/bit_writer.h" #include "logging/rtc_event_log/encoder/var_int.h" +#include "logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h" #include "logging/rtc_event_log/events/rtc_event_field_extraction.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" using webrtc_event_logging::UnsignedDelta; @@ -83,7 +88,7 @@ std::string EncodeSingleValue(uint64_t value, FieldType field_type) { return std::string(); } -absl::optional ConvertFieldType(uint64_t value) { +std::optional ConvertFieldType(uint64_t value) { switch (value) { case static_cast(FieldType::kFixed8): return FieldType::kFixed8; @@ -96,13 +101,13 @@ absl::optional ConvertFieldType(uint64_t value) { case static_cast(FieldType::kString): return FieldType::kString; default: - return absl::nullopt; + return std::nullopt; } } std::string EncodeDeltasV3(FixedLengthEncodingParametersV3 params, uint64_t base, - rtc::ArrayView values) { + ArrayView values) { size_t outputbound = (values.size() * params.delta_bit_width() + 7) / 8; BitWriter writer(outputbound); @@ -137,7 +142,7 @@ std::string EncodeDeltasV3(FixedLengthEncodingParametersV3 params, } EventEncoder::EventEncoder(EventParameters params, - rtc::ArrayView batch) { + ArrayView batch) { batch_size_ = batch.size(); if (!batch.empty()) { // Encode event type. @@ -180,7 +185,7 @@ void EventEncoder::EncodeField(const FieldParameters& params, RTC_DCHECK_EQ(values.size(), batch_size_); } - if (values.size() == 0) { + if (values.empty()) { // If all values for a particular field is empty/nullopt, // then we completely skip the field even if the the batch is non-empty. return; @@ -205,9 +210,9 @@ void EventEncoder::EncodeField(const FieldParameters& params, const bool values_optional = values.size() != batch_size_; // Compute delta parameters - rtc::ArrayView all_values(values); + ArrayView all_values(values); uint64_t base = values[0]; - rtc::ArrayView remaining_values(all_values.subview(1)); + ArrayView remaining_values(all_values.subview(1)); FixedLengthEncodingParametersV3 delta_params = FixedLengthEncodingParametersV3::CalculateParameters( @@ -235,7 +240,7 @@ void EventEncoder::EncodeField(const FieldParameters& params, const std::vector& values) { RTC_DCHECK_EQ(values.size(), batch_size_); - if (values.size() == 0) { + if (values.empty()) { // If all values for a particular field is empty/nullopt, // then we completely skip the field even if the the batch is non-empty. return; diff --git a/logging/rtc_event_log/events/rtc_event_field_encoding.h b/logging/rtc_event_log/events/rtc_event_field_encoding.h index 33b77b80f5..baa50b2e6d 100644 --- a/logging/rtc_event_log/events/rtc_event_field_encoding.h +++ b/logging/rtc_event_log/events/rtc_event_field_encoding.h @@ -11,16 +11,19 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FIELD_ENCODING_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FIELD_ENCODING_H_ +#include +#include +#include #include +#include #include -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" -#include "logging/rtc_event_log/encoder/rtc_event_log_encoder_common.h" #include "logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h" #include "logging/rtc_event_log/events/rtc_event_field_extraction.h" -#include "rtc_base/logging.h" +#include "rtc_base/checks.h" namespace webrtc { @@ -68,7 +71,7 @@ struct FieldParameters { // The EventEncoder is used to encode a batch of events. class EventEncoder { public: - EventEncoder(EventParameters params, rtc::ArrayView batch); + EventEncoder(EventParameters params, ArrayView batch); void EncodeField(const FieldParameters& params, const std::vector& values, @@ -91,7 +94,7 @@ class EventEncoder { std::string EncodeSingleValue(uint64_t value, FieldType field_type); std::string EncodeDeltasV3(FixedLengthEncodingParametersV3 params, uint64_t base, - rtc::ArrayView values); + ArrayView values); // Given a batch of RtcEvents and a member pointer, extract that // member from each event in the batch. Signed integer members are @@ -104,9 +107,8 @@ std::string EncodeDeltasV3(FixedLengthEncodingParametersV3 params, template ::value, bool> = true> -std::vector ExtractRtcEventMember( - rtc::ArrayView batch, - const T E::*member) { +std::vector ExtractRtcEventMember(ArrayView batch, + const T E::* member) { std::vector values; values.reserve(batch.size()); for (const RtcEvent* event : batch) { @@ -126,14 +128,14 @@ std::vector ExtractRtcEventMember( template ::value, bool> = true> -ValuesWithPositions ExtractRtcEventMember(rtc::ArrayView batch, - const absl::optional E::*member) { +ValuesWithPositions ExtractRtcEventMember(ArrayView batch, + const std::optional E::* member) { ValuesWithPositions result; result.position_mask.reserve(batch.size()); result.values.reserve(batch.size()); for (const RtcEvent* event : batch) { RTC_CHECK_EQ(event->GetType(), E::kType); - absl::optional field = static_cast(event)->*member; + std::optional field = static_cast(event)->*member; result.position_mask.push_back(field.has_value()); if (field.has_value()) { result.values.push_back(EncodeAsUnsigned(field.value())); @@ -147,9 +149,8 @@ ValuesWithPositions ExtractRtcEventMember(rtc::ArrayView batch, template ::value, bool> = true> -std::vector ExtractRtcEventMember( - rtc::ArrayView batch, - const T E::*member) { +std::vector ExtractRtcEventMember(ArrayView batch, + const T E::* member) { std::vector values; values.reserve(batch.size()); for (const RtcEvent* event : batch) { @@ -163,8 +164,8 @@ std::vector ExtractRtcEventMember( // Extract a string field from a batch of RtcEvents. template std::vector ExtractRtcEventMember( - rtc::ArrayView batch, - const std::string E::*member) { + ArrayView batch, + const std::string E::* member) { std::vector values; values.reserve(batch.size()); for (const RtcEvent* event : batch) { diff --git a/logging/rtc_event_log/events/rtc_event_field_encoding_parser.cc b/logging/rtc_event_log/events/rtc_event_field_encoding_parser.cc index f0cdf8a0f7..6d5649a9ec 100644 --- a/logging/rtc_event_log/events/rtc_event_field_encoding_parser.cc +++ b/logging/rtc_event_log/events/rtc_event_field_encoding_parser.cc @@ -11,16 +11,24 @@ #include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include +#include +#include +#include +#include + #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/array_view.h" #include "logging/rtc_event_log/encoder/var_int.h" +#include "logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h" #include "logging/rtc_event_log/events/rtc_event_field_encoding.h" +#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" #include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" #include "rtc_base/bitstream_reader.h" #include "rtc_base/checks.h" namespace { -absl::optional ConvertFieldType(uint64_t value) { +std::optional ConvertFieldType(uint64_t value) { switch (value) { case static_cast(webrtc::FieldType::kFixed8): return webrtc::FieldType::kFixed8; @@ -33,7 +41,7 @@ absl::optional ConvertFieldType(uint64_t value) { case static_cast(webrtc::FieldType::kString): return webrtc::FieldType::kString; default: - return absl::nullopt; + return std::nullopt; } } } // namespace @@ -201,7 +209,7 @@ RtcEventLogParseStatus EventParser::ParseNumericFieldInternal( __FILE__, __LINE__); // NB: value_bit_width may be incorrect for the field, if this isn't the // field we are looking for. - absl::optional delta_header = + std::optional delta_header = FixedLengthEncodingParametersV3::ParseDeltaHeader(header_value, value_bit_width); if (!delta_header.has_value()) { @@ -310,7 +318,7 @@ RtcEventLogParseStatus EventParser::ParseField(const FieldParameters& params) { __FILE__, __LINE__); // Split tag into field ID and field type. field_id = field_tag >> 3; - absl::optional conversion = ConvertFieldType(field_tag & 7u); + std::optional conversion = ConvertFieldType(field_tag & 7u); if (!conversion.has_value()) return RtcEventLogParseStatus::Error("Failed to parse field type", __FILE__, __LINE__); @@ -348,15 +356,15 @@ RtcEventLogParseStatus EventParser::ParseField(const FieldParameters& params) { return RtcEventLogParseStatus::Success(); } -RtcEventLogParseStatusOr> +RtcEventLogParseStatusOr> EventParser::ParseStringField(const FieldParameters& params, bool required_field) { - using StatusOr = RtcEventLogParseStatusOr>; + using StatusOr = RtcEventLogParseStatusOr>; RTC_DCHECK_EQ(params.field_type, FieldType::kString); auto status = ParseField(params); if (!status.ok()) return StatusOr(status); - rtc::ArrayView strings = GetStrings(); + ArrayView strings = GetStrings(); if (required_field && strings.size() != NumEventsInBatch()) { return StatusOr::Error("Required string field not found", __FILE__, __LINE__); @@ -364,15 +372,15 @@ EventParser::ParseStringField(const FieldParameters& params, return StatusOr(strings); } -RtcEventLogParseStatusOr> -EventParser::ParseNumericField(const FieldParameters& params, - bool required_field) { - using StatusOr = RtcEventLogParseStatusOr>; +RtcEventLogParseStatusOr> EventParser::ParseNumericField( + const FieldParameters& params, + bool required_field) { + using StatusOr = RtcEventLogParseStatusOr>; RTC_DCHECK_NE(params.field_type, FieldType::kString); auto status = ParseField(params); if (!status.ok()) return StatusOr(status); - rtc::ArrayView values = GetValues(); + ArrayView values = GetValues(); if (required_field && values.size() != NumEventsInBatch()) { return StatusOr::Error("Required numerical field not found", __FILE__, __LINE__); diff --git a/logging/rtc_event_log/events/rtc_event_field_encoding_parser.h b/logging/rtc_event_log/events/rtc_event_field_encoding_parser.h index 89dbb19298..d46c6ac5ac 100644 --- a/logging/rtc_event_log/events/rtc_event_field_encoding_parser.h +++ b/logging/rtc_event_log/events/rtc_event_field_encoding_parser.h @@ -11,20 +11,30 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FIELD_ENCODING_PARSER_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FIELD_ENCODING_PARSER_H_ +#include +#include +#include #include +#include #include +#include "absl/base/attributes.h" #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h" #include "logging/rtc_event_log/events/rtc_event_field_encoding.h" +#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" #include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" +#include "rtc_base/checks.h" namespace webrtc { class EventParser { public: struct ValueAndPostionView { - rtc::ArrayView values; - rtc::ArrayView positions; + ArrayView values; + ArrayView positions; }; EventParser() = default; @@ -38,10 +48,10 @@ class EventParser { // other fields that may occur before it. If 'required_field == true', // then failing to find the field is an error, otherwise the functions // return success, but with an empty view of values. - RtcEventLogParseStatusOr> ParseStringField( + RtcEventLogParseStatusOr> ParseStringField( const FieldParameters& params, bool required_field = true); - RtcEventLogParseStatusOr> ParseNumericField( + RtcEventLogParseStatusOr> ParseNumericField( const FieldParameters& params, bool required_field = true); RtcEventLogParseStatusOr ParseOptionalNumericField( @@ -80,9 +90,9 @@ class EventParser { void SetError() { error_ = true; } bool Ok() const { return !error_; } - rtc::ArrayView GetValues() { return values_; } - rtc::ArrayView GetPositions() { return positions_; } - rtc::ArrayView GetStrings() { return strings_; } + ArrayView GetValues() { return values_; } + ArrayView GetPositions() { return positions_; } + ArrayView GetStrings() { return strings_; } void ClearTemporaries() { positions_.clear(); @@ -112,9 +122,9 @@ template ::value, bool> = true> ABSL_MUST_USE_RESULT RtcEventLogParseStatus -PopulateRtcEventMember(const rtc::ArrayView values, - T E::*member, - rtc::ArrayView output) { +PopulateRtcEventMember(const ArrayView values, + T E::* member, + ArrayView output) { size_t batch_size = values.size(); RTC_CHECK_EQ(output.size(), batch_size); for (size_t i = 0; i < batch_size; ++i) { @@ -128,10 +138,10 @@ template ::value, bool> = true> ABSL_MUST_USE_RESULT RtcEventLogParseStatus -PopulateRtcEventMember(const rtc::ArrayView positions, - const rtc::ArrayView values, - absl::optional E::*member, - rtc::ArrayView output) { +PopulateRtcEventMember(const ArrayView positions, + const ArrayView values, + std::optional E::* member, + ArrayView output) { size_t batch_size = positions.size(); RTC_CHECK_EQ(output.size(), batch_size); RTC_CHECK_LE(values.size(), batch_size); @@ -142,7 +152,7 @@ PopulateRtcEventMember(const rtc::ArrayView positions, output[i].*member = DecodeFromUnsignedToType(value_it); ++value_it; } else { - output[i].*member = absl::nullopt; + output[i].*member = std::nullopt; } } RTC_CHECK(value_it == values.end()); @@ -154,9 +164,9 @@ template ::value, bool> = true> ABSL_MUST_USE_RESULT RtcEventLogParseStatus -PopulateRtcEventMember(const rtc::ArrayView values, - T E::*member, - rtc::ArrayView output) { +PopulateRtcEventMember(const ArrayView values, + T E::* member, + ArrayView output) { size_t batch_size = values.size(); RTC_CHECK_EQ(output.size(), batch_size); for (size_t i = 0; i < batch_size; ++i) { @@ -172,9 +182,9 @@ PopulateRtcEventMember(const rtc::ArrayView values, // Same as above, but for string fields. template ABSL_MUST_USE_RESULT RtcEventLogParseStatus -PopulateRtcEventMember(const rtc::ArrayView values, - std::string E::*member, - rtc::ArrayView output) { +PopulateRtcEventMember(const ArrayView values, + std::string E::* member, + ArrayView output) { size_t batch_size = values.size(); RTC_CHECK_EQ(output.size(), batch_size); for (size_t i = 0; i < batch_size; ++i) { @@ -187,9 +197,9 @@ PopulateRtcEventMember(const rtc::ArrayView values, // N.B. Assumes that the encoded value uses millisecond precision. template ABSL_MUST_USE_RESULT RtcEventLogParseStatus -PopulateRtcEventTimestamp(const rtc::ArrayView& values, - Timestamp E::*timestamp, - rtc::ArrayView output) { +PopulateRtcEventTimestamp(const ArrayView& values, + Timestamp E::* timestamp, + ArrayView output) { size_t batch_size = values.size(); RTC_CHECK_EQ(batch_size, output.size()); for (size_t i = 0; i < batch_size; ++i) { @@ -200,11 +210,10 @@ PopulateRtcEventTimestamp(const rtc::ArrayView& values, } template -rtc::ArrayView ExtendLoggedBatch(std::vector& output, - size_t new_elements) { +ArrayView ExtendLoggedBatch(std::vector& output, size_t new_elements) { size_t old_size = output.size(); output.insert(output.end(), old_size + new_elements, E()); - rtc::ArrayView output_batch = output; + ArrayView output_batch = output; output_batch.subview(old_size); RTC_DCHECK_EQ(output_batch.size(), new_elements); return output_batch; diff --git a/logging/rtc_event_log/events/rtc_event_field_encoding_unittest.cc b/logging/rtc_event_log/events/rtc_event_field_encoding_unittest.cc index 18beda1417..edeab13ebb 100644 --- a/logging/rtc_event_log/events/rtc_event_field_encoding_unittest.cc +++ b/logging/rtc_event_log/events/rtc_event_field_encoding_unittest.cc @@ -6,16 +6,25 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ + #include "logging/rtc_event_log/events/rtc_event_field_encoding.h" +#include +#include +#include #include -#include +#include #include +#include +#include +#include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "logging/rtc_event_log/encoder/var_int.h" #include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "rtc_base/checks.h" #include "test/gtest.h" namespace webrtc { @@ -85,12 +94,12 @@ size_t ExpectedEncodingSize(const FieldParameters& params, template ::value, bool> = true> size_t ExpectedEncodingSize(const FieldParameters& params, - const std::vector>& v, + const std::vector>& v, size_t expected_bits_per_delta) { size_t num_existing_values = - v.size() - std::count(v.begin(), v.end(), absl::nullopt); + v.size() - std::count(v.begin(), v.end(), std::nullopt); auto first_existing_value = std::find_if( - v.begin(), v.end(), [](absl::optional x) { return x.has_value(); }); + v.begin(), v.end(), [](std::optional x) { return x.has_value(); }); if (num_existing_values == 0) return 0; @@ -108,7 +117,7 @@ size_t ExpectedEncodingSize(const FieldParameters& params, (num_existing_values == v.size() ? 0 : (v.size() + 7) / 8); // Check if there is an element *not* equal to base. if (std::all_of(v.begin(), v.end(), - [base](absl::optional x) { return x == base; })) { + [base](std::optional x) { return x == base; })) { return tag_size + base_size + delta_header_size + positions_size; } @@ -156,8 +165,8 @@ class RtcTestEvent final : public RtcEvent { uint32_t unsigned32, int64_t signed64, uint64_t unsigned64, - absl::optional optional_signed32, - absl::optional optional_signed64, + std::optional optional_signed32, + std::optional optional_signed64, uint32_t wrapping21, absl::string_view string) : b_(b), @@ -203,8 +212,8 @@ class RtcTestEvent final : public RtcEvent { const uint32_t unsigned32_; const int64_t signed64_; const uint64_t unsigned64_; - const absl::optional optional_signed32_ = absl::nullopt; - const absl::optional optional_signed64_ = absl::nullopt; + const std::optional optional_signed32_ = std::nullopt; + const std::optional optional_signed64_ = std::nullopt; const uint32_t wrapping21_ = 0; const std::string string_; }; @@ -234,8 +243,8 @@ class RtcEventFieldTest : public ::testing::Test { const std::vector& unsigned32_values, const std::vector& signed64_values, const std::vector& unsigned64_values, - const std::vector>& optional32_values, - const std::vector>& optional64_values, + const std::vector>& optional32_values, + const std::vector>& optional64_values, const std::vector& wrapping21_values, const std::vector& string_values) { size_t size = bool_values.size(); @@ -332,7 +341,7 @@ class RtcEventFieldTest : public ::testing::Test { template void ParseAndVerifyOptionalField( const FieldParameters& params, - const std::vector>& expected_values, + const std::vector>& expected_values, size_t expected_bits_per_delta, size_t expected_skipped_bytes = 0) { size_t expected_size = @@ -341,8 +350,8 @@ class RtcEventFieldTest : public ::testing::Test { size_t size_before = parser_.RemainingBytes(); auto result = parser_.ParseOptionalNumericField(params); ASSERT_TRUE(result.ok()) << result.message().c_str(); - rtc::ArrayView values = result.value().values; - rtc::ArrayView positions = result.value().positions; + ArrayView values = result.value().values; + ArrayView positions = result.value().positions; ASSERT_EQ(positions.size(), expected_values.size()); auto value_it = values.begin(); for (size_t i = 0; i < expected_values.size(); i++) { @@ -353,7 +362,7 @@ class RtcEventFieldTest : public ::testing::Test { expected_values[i].value()); ++value_it; } else { - EXPECT_EQ(absl::nullopt, expected_values[i]); + EXPECT_EQ(std::nullopt, expected_values[i]); } } EXPECT_EQ(value_it, values.end()); @@ -371,8 +380,8 @@ class RtcEventFieldTest : public ::testing::Test { auto result = parser_.ParseOptionalNumericField(params, /*required_field=*/false); ASSERT_TRUE(result.ok()) << result.message().c_str(); - rtc::ArrayView values = result.value().values; - rtc::ArrayView positions = result.value().positions; + ArrayView values = result.value().values; + ArrayView positions = result.value().positions; EXPECT_EQ(positions.size(), 0u); EXPECT_EQ(values.size(), 0u); } @@ -401,8 +410,8 @@ TEST_F(RtcEventFieldTest, Singleton) { std::vector unsigned32_values = {123456789}; std::vector signed64_values = {-9876543210}; std::vector unsigned64_values = {9876543210}; - std::vector> optional32_values = {kInt32Min}; - std::vector> optional64_values = {kInt64Max}; + std::vector> optional32_values = {kInt32Min}; + std::vector> optional64_values = {kInt64Max}; std::vector wrapping21_values = {(1 << 21) - 1}; std::vector string_values = {"foo"}; @@ -470,9 +479,9 @@ TEST_F(RtcEventFieldTest, EqualElements) { -9876543210}; std::vector unsigned64_values = {9876543210, 9876543210, 9876543210, 9876543210}; - std::vector> optional32_values = { + std::vector> optional32_values = { kInt32Min, kInt32Min, kInt32Min, kInt32Min}; - std::vector> optional64_values = { + std::vector> optional64_values = { kInt64Max, kInt64Max, kInt64Max, kInt64Max}; std::vector wrapping21_values = {(1 << 21) - 1, (1 << 21) - 1, (1 << 21) - 1, (1 << 21) - 1}; @@ -539,9 +548,9 @@ TEST_F(RtcEventFieldTest, Increasing) { std::vector signed64_values = {kInt64Max - 1, kInt64Max, kInt64Min, kInt64Min + 1}; std::vector unsigned64_values = {kUint64Max - 1, kUint64Max, 0, 1}; - std::vector> optional32_values = { + std::vector> optional32_values = { kInt32Max - 1, kInt32Max, kInt32Min, kInt32Min + 1}; - std::vector> optional64_values = { + std::vector> optional64_values = { kInt64Max - 1, kInt64Max, kInt64Min, kInt64Min + 1}; std::vector wrapping21_values = {(1 << 21) - 2, (1 << 21) - 1, 0, 1}; @@ -584,21 +593,21 @@ TEST_F(RtcEventFieldTest, Increasing) { ParseEventHeader(s); ParseAndVerifyTimestamps(); ParseAndVerifyField(RtcTestEvent::bool_params, bool_values, - /*delta bits*/ 1); + /*expected_bits_per_delta=*/1); ParseAndVerifyField(RtcTestEvent::signed32_params, signed32_values, - /*delta bits*/ 1); + /*expected_bits_per_delta=*/1); ParseAndVerifyField(RtcTestEvent::unsigned32_params, unsigned32_values, - /*delta bits*/ 1); + /*expected_bits_per_delta=*/1); ParseAndVerifyField(RtcTestEvent::signed64_params, signed64_values, - /*delta bits*/ 1); + /*expected_bits_per_delta=*/1); ParseAndVerifyField(RtcTestEvent::unsigned64_params, unsigned64_values, - /*delta bits*/ 1); + /*expected_bits_per_delta=*/1); ParseAndVerifyOptionalField(RtcTestEvent::optional32_params, - optional32_values, /*delta bits*/ 1); + optional32_values, /*expected_bits_per_delta=*/1); ParseAndVerifyOptionalField(RtcTestEvent::optional64_params, - optional64_values, /*delta bits*/ 1); + optional64_values, /*expected_bits_per_delta=*/1); ParseAndVerifyField(RtcTestEvent::wrapping21_params, wrapping21_values, - /*delta bits*/ 1); + /*expected_bits_per_delta=*/1); ParseAndVerifyStringField(RtcTestEvent::string_params, string_values); EXPECT_EQ(parser_.RemainingBytes(), 0u); } @@ -610,9 +619,9 @@ TEST_F(RtcEventFieldTest, Decreasing) { std::vector signed64_values = {kInt64Min + 1, kInt64Min, kInt64Max, kInt64Max - 1}; std::vector unsigned64_values = {1, 0, kUint64Max, kUint64Max - 1}; - std::vector> optional32_values = { + std::vector> optional32_values = { kInt32Min + 1, kInt32Min, kInt32Max, kInt32Max - 1}; - std::vector> optional64_values = { + std::vector> optional64_values = { kInt64Min + 1, kInt64Min, kInt64Max, kInt64Max - 1}; std::vector wrapping21_values = {1, 0, (1 << 21) - 1, (1 << 21) - 2}; @@ -655,21 +664,21 @@ TEST_F(RtcEventFieldTest, Decreasing) { ParseEventHeader(s); ParseAndVerifyTimestamps(); ParseAndVerifyField(RtcTestEvent::bool_params, bool_values, - /*delta bits*/ 1); + /*expected_bits_per_delta=*/1); ParseAndVerifyField(RtcTestEvent::signed32_params, signed32_values, - /*delta bits*/ 1); + /*expected_bits_per_delta=*/1); ParseAndVerifyField(RtcTestEvent::unsigned32_params, unsigned32_values, - /*delta bits*/ 1); + /*expected_bits_per_delta=*/1); ParseAndVerifyField(RtcTestEvent::signed64_params, signed64_values, - /*delta bits*/ 1); + /*expected_bits_per_delta=*/1); ParseAndVerifyField(RtcTestEvent::unsigned64_params, unsigned64_values, - /*delta bits*/ 1); + /*expected_bits_per_delta=*/1); ParseAndVerifyOptionalField(RtcTestEvent::optional32_params, - optional32_values, /*delta bits*/ 1); + optional32_values, /*expected_bits_per_delta=*/1); ParseAndVerifyOptionalField(RtcTestEvent::optional64_params, - optional64_values, /*delta bits*/ 1); + optional64_values, /*expected_bits_per_delta=*/1); ParseAndVerifyField(RtcTestEvent::wrapping21_params, wrapping21_values, - /*delta bits*/ 1); + /*expected_bits_per_delta=*/1); ParseAndVerifyStringField(RtcTestEvent::string_params, string_values); EXPECT_EQ(parser_.RemainingBytes(), 0u); } @@ -682,10 +691,10 @@ TEST_F(RtcEventFieldTest, SkipsDeprecatedFields) { std::vector unsigned32_values = {0, kUint32Max / 2}; std::vector signed64_values = {kInt64Min / 2, kInt64Max / 2}; std::vector unsigned64_values = {0, kUint64Max / 2}; - std::vector> optional32_values = {kInt32Max / 2, - kInt32Min / 2}; - std::vector> optional64_values = {kInt64Min / 2, - kInt64Max / 2}; + std::vector> optional32_values = {kInt32Max / 2, + kInt32Min / 2}; + std::vector> optional64_values = {kInt64Min / 2, + kInt64Max / 2}; std::vector wrapping21_values = {0, 1 << 20}; std::vector string_values = {"foo", "bar"}; @@ -732,23 +741,23 @@ TEST_F(RtcEventFieldTest, SkipsDeprecatedFields) { ParseEventHeader(s); ParseAndVerifyTimestamps(); ParseAndVerifyField(RtcTestEvent::bool_params, bool_values, - /*delta_bits=*/1); + /*expected_bits_per_delta=*/1); // Skips parsing the `signed32_values`. The following unsigned fields should // still be found. ParseAndVerifyField(RtcTestEvent::unsigned32_params, unsigned32_values, - /*delta_bits=*/31, + /*expected_bits_per_delta=*/31, /*expected_skipped_bytes=*/signed32_encoding_size); // Skips parsing the `signed64_values`. The following unsigned fields should // still be found. ParseAndVerifyField(RtcTestEvent::unsigned64_params, unsigned64_values, - /*delta_bits=*/63, signed64_encoding_size); + /*expected_bits_per_delta=*/63, signed64_encoding_size); // Skips parsing the `optional32_values`. The following unsigned fields should // still be found. - ParseAndVerifyOptionalField(RtcTestEvent::optional64_params, - optional64_values, - /*delta_bits=*/63, optional32_encoding_size); + ParseAndVerifyOptionalField( + RtcTestEvent::optional64_params, optional64_values, + /*expected_bits_per_delta=*/63, optional32_encoding_size); ParseAndVerifyField(RtcTestEvent::wrapping21_params, wrapping21_values, - /*delta_bits=*/20); + /*expected_bits_per_delta=*/20); ParseAndVerifyStringField(RtcTestEvent::string_params, string_values); EXPECT_EQ(parser_.RemainingBytes(), 0u); } @@ -761,10 +770,10 @@ TEST_F(RtcEventFieldTest, SkipsMissingFields) { std::vector unsigned32_values = {0, kUint32Max / 2}; std::vector signed64_values = {kInt64Min / 2, kInt64Max / 2}; std::vector unsigned64_values = {0, kUint64Max / 2}; - std::vector> optional32_values = {kInt32Max / 2, - kInt32Min / 2}; - std::vector> optional64_values = {kInt64Min / 2, - kInt64Max / 2}; + std::vector> optional32_values = {kInt32Max / 2, + kInt32Min / 2}; + std::vector> optional64_values = {kInt64Min / 2, + kInt64Max / 2}; std::vector wrapping21_values = {0, 1 << 20}; std::vector string_values = {"foo", "foo"}; @@ -798,25 +807,26 @@ TEST_F(RtcEventFieldTest, SkipsMissingFields) { ParseAndVerifyTimestamps(); ParseAndVerifyMissingField(RtcTestEvent::bool_params); ParseAndVerifyField(RtcTestEvent::signed32_params, signed32_values, - /*delta_bits=*/31); + /*expected_bits_per_delta=*/31); ParseAndVerifyMissingField(RtcTestEvent::unsigned32_params); ParseAndVerifyField(RtcTestEvent::signed64_params, signed64_values, - /*delta_bits=*/63); + /*expected_bits_per_delta=*/63); ParseAndVerifyMissingField(RtcTestEvent::unsigned64_params); ParseAndVerifyOptionalField(RtcTestEvent::optional32_params, - optional32_values, /*delta_bits=*/31); + optional32_values, + /*expected_bits_per_delta=*/31); ParseAndVerifyMissingOptionalField(RtcTestEvent::optional64_params); ParseAndVerifyField(RtcTestEvent::wrapping21_params, wrapping21_values, - /*delta_bits=*/20); + /*expected_bits_per_delta=*/20); ParseAndVerifyStringField(RtcTestEvent::string_params, string_values); EXPECT_EQ(parser_.RemainingBytes(), 0u); } TEST_F(RtcEventFieldTest, OptionalFields) { - std::vector> optional32_values = { - 2, absl::nullopt, 4, absl::nullopt, 6, absl::nullopt}; - std::vector> optional64_values = { - absl::nullopt, 1024, absl::nullopt, 1025, absl::nullopt, 1026}; + std::vector> optional32_values = { + 2, std::nullopt, 4, std::nullopt, 6, std::nullopt}; + std::vector> optional64_values = { + std::nullopt, 1024, std::nullopt, 1025, std::nullopt, 1026}; std::vector wrapping21_values = {(1 << 21) - 3, 0, 2, 5, 5, 6}; for (size_t i = 0; i < optional32_values.size(); i++) { @@ -843,20 +853,20 @@ TEST_F(RtcEventFieldTest, OptionalFields) { ParseEventHeader(s); ParseAndVerifyTimestamps(); ParseAndVerifyOptionalField(RtcTestEvent::optional32_params, - optional32_values, /*delta bits*/ 2); + optional32_values, /*expected_bits_per_delta=*/2); ParseAndVerifyOptionalField(RtcTestEvent::optional64_params, - optional64_values, /*delta bits*/ 1); + optional64_values, /*expected_bits_per_delta=*/1); ParseAndVerifyField(RtcTestEvent::wrapping21_params, wrapping21_values, - /*delta bits*/ 2); + /*expected_bits_per_delta=*/2); EXPECT_EQ(parser_.RemainingBytes(), 0u); } TEST_F(RtcEventFieldTest, AllNulloptTreatedAsMissing) { - std::vector> optional32_values = { - absl::nullopt, absl::nullopt, absl::nullopt, - absl::nullopt, absl::nullopt, absl::nullopt}; - std::vector> optional64_values = { - absl::nullopt, 1024, absl::nullopt, 1025, absl::nullopt, 1026}; + std::vector> optional32_values = { + std::nullopt, std::nullopt, std::nullopt, + std::nullopt, std::nullopt, std::nullopt}; + std::vector> optional64_values = { + std::nullopt, 1024, std::nullopt, 1025, std::nullopt, 1026}; for (size_t i = 0; i < optional32_values.size(); i++) { batch_.push_back(new RtcTestEvent(0, 0, 0, 0, 0, optional32_values[i], @@ -879,7 +889,7 @@ TEST_F(RtcEventFieldTest, AllNulloptTreatedAsMissing) { ParseAndVerifyTimestamps(); ParseAndVerifyMissingOptionalField(RtcTestEvent::optional32_params); ParseAndVerifyOptionalField(RtcTestEvent::optional64_params, - optional64_values, /*delta_bits=*/1); + optional64_values, /*expected_bits_per_delta=*/1); EXPECT_EQ(parser_.RemainingBytes(), 0u); } diff --git a/logging/rtc_event_log/events/rtc_event_field_extraction.cc b/logging/rtc_event_log/events/rtc_event_field_extraction.cc index 99f0b3697c..987a9abe10 100644 --- a/logging/rtc_event_log/events/rtc_event_field_extraction.cc +++ b/logging/rtc_event_log/events/rtc_event_field_extraction.cc @@ -11,6 +11,7 @@ #include "logging/rtc_event_log/events/rtc_event_field_extraction.h" #include +#include #include #include "rtc_base/checks.h" diff --git a/logging/rtc_event_log/events/rtc_event_field_extraction.h b/logging/rtc_event_log/events/rtc_event_field_extraction.h index eb9d67f1c2..9a8a1740e2 100644 --- a/logging/rtc_event_log/events/rtc_event_field_extraction.h +++ b/logging/rtc_event_log/events/rtc_event_field_extraction.h @@ -11,13 +11,10 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FIELD_EXTRACTION_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FIELD_EXTRACTION_H_ -#include +#include +#include #include -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/rtc_event_log/rtc_event.h" -#include "api/units/timestamp.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_common.h" #include "rtc_base/logging.h" diff --git a/logging/rtc_event_log/events/rtc_event_field_extraction_unittest.cc b/logging/rtc_event_log/events/rtc_event_field_extraction_unittest.cc index f9fb993af0..09f924a532 100644 --- a/logging/rtc_event_log/events/rtc_event_field_extraction_unittest.cc +++ b/logging/rtc_event_log/events/rtc_event_field_extraction_unittest.cc @@ -9,6 +9,8 @@ #include "logging/rtc_event_log/events/rtc_event_field_extraction.h" +#include + #include "rtc_base/random.h" #include "test/gtest.h" diff --git a/logging/rtc_event_log/events/rtc_event_frame_decoded.cc b/logging/rtc_event_log/events/rtc_event_frame_decoded.cc index cde412e6c4..e5604c87ae 100644 --- a/logging/rtc_event_log/events/rtc_event_frame_decoded.cc +++ b/logging/rtc_event_log/events/rtc_event_frame_decoded.cc @@ -10,7 +10,12 @@ #include "logging/rtc_event_log/events/rtc_event_frame_decoded.h" +#include +#include + #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/video/video_codec_type.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_frame_decoded.h b/logging/rtc_event_log/events/rtc_event_frame_decoded.h index 91190faea9..ea12be7817 100644 --- a/logging/rtc_event_log/events/rtc_event_frame_decoded.h +++ b/logging/rtc_event_log/events/rtc_event_frame_decoded.h @@ -19,10 +19,11 @@ #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" #include "api/video/video_codec_type.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -64,15 +65,15 @@ class RtcEventFrameDecoded final : public RtcEvent { VideoCodecType codec() const { return codec_; } uint8_t qp() const { return qp_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::map>& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::map>& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_generic_ack_received.cc b/logging/rtc_event_log/events/rtc_event_generic_ack_received.cc index ba18d50ab6..4ba44e7b29 100644 --- a/logging/rtc_event_log/events/rtc_event_generic_ack_received.cc +++ b/logging/rtc_event_log/events/rtc_event_generic_ack_received.cc @@ -10,9 +10,13 @@ #include "logging/rtc_event_log/events/rtc_event_generic_ack_received.h" +#include +#include +#include #include #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" #include "rtc_base/time_utils.h" namespace webrtc { @@ -22,7 +26,7 @@ RtcEventGenericAckReceived::CreateLogs( int64_t packet_number, const std::vector& acked_packets) { std::vector> result; - int64_t time_us = rtc::TimeMicros(); + int64_t time_us = TimeMicros(); result.reserve(acked_packets.size()); for (const AckedPacket& packet : acked_packets) { result.emplace_back(new RtcEventGenericAckReceived( @@ -36,7 +40,7 @@ RtcEventGenericAckReceived::RtcEventGenericAckReceived( int64_t timestamp_us, int64_t packet_number, int64_t acked_packet_number, - absl::optional receive_acked_packet_time_ms) + std::optional receive_acked_packet_time_ms) : RtcEvent(timestamp_us), packet_number_(packet_number), acked_packet_number_(acked_packet_number), diff --git a/logging/rtc_event_log/events/rtc_event_generic_ack_received.h b/logging/rtc_event_log/events/rtc_event_generic_ack_received.h index 57fd7cd9a6..eaa9a82eff 100644 --- a/logging/rtc_event_log/events/rtc_event_generic_ack_received.h +++ b/logging/rtc_event_log/events/rtc_event_generic_ack_received.h @@ -11,15 +11,17 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_ACK_RECEIVED_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_ACK_RECEIVED_H_ +#include #include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -28,7 +30,7 @@ struct LoggedGenericAckReceived { LoggedGenericAckReceived(Timestamp timestamp, int64_t packet_number, int64_t acked_packet_number, - absl::optional receive_acked_packet_time_ms) + std::optional receive_acked_packet_time_ms) : timestamp(timestamp), packet_number(packet_number), acked_packet_number(acked_packet_number), @@ -41,7 +43,7 @@ struct LoggedGenericAckReceived { Timestamp timestamp = Timestamp::MinusInfinity(); int64_t packet_number; int64_t acked_packet_number; - absl::optional receive_acked_packet_time_ms; + std::optional receive_acked_packet_time_ms; }; struct AckedPacket { @@ -50,7 +52,7 @@ struct AckedPacket { // The time where the packet was received. Not every ACK will // include the receive timestamp. - absl::optional receive_acked_packet_time_ms; + std::optional receive_acked_packet_time_ms; }; class RtcEventGenericAckReceived final : public RtcEvent { @@ -77,19 +79,19 @@ class RtcEventGenericAckReceived final : public RtcEvent { int64_t acked_packet_number() const { return acked_packet_number_; } // Timestamp when the `acked_packet_number` was received by the remote side. - absl::optional receive_acked_packet_time_ms() const { + std::optional receive_acked_packet_time_ms() const { return receive_acked_packet_time_ms_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } @@ -106,11 +108,11 @@ class RtcEventGenericAckReceived final : public RtcEvent { int64_t timestamp_us, int64_t packet_number, int64_t acked_packet_number, - absl::optional receive_acked_packet_time_ms); + std::optional receive_acked_packet_time_ms); const int64_t packet_number_; const int64_t acked_packet_number_; - const absl::optional receive_acked_packet_time_ms_; + const std::optional receive_acked_packet_time_ms_; }; } // namespace webrtc diff --git a/logging/rtc_event_log/events/rtc_event_generic_packet_received.cc b/logging/rtc_event_log/events/rtc_event_generic_packet_received.cc index 0bdc4dd505..19f9d8fdf4 100644 --- a/logging/rtc_event_log/events/rtc_event_generic_packet_received.cc +++ b/logging/rtc_event_log/events/rtc_event_generic_packet_received.cc @@ -10,6 +10,10 @@ #include "logging/rtc_event_log/events/rtc_event_generic_packet_received.h" +#include +#include +#include + #include "absl/memory/memory.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_generic_packet_received.h b/logging/rtc_event_log/events/rtc_event_generic_packet_received.h index a6006ca4d4..d34574aed0 100644 --- a/logging/rtc_event_log/events/rtc_event_generic_packet_received.h +++ b/logging/rtc_event_log/events/rtc_event_generic_packet_received.h @@ -11,14 +11,17 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_PACKET_RECEIVED_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_PACKET_RECEIVED_H_ +#include +#include #include #include #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -59,15 +62,15 @@ class RtcEventGenericPacketReceived final : public RtcEvent { // including ICE/TURN/IP overheads. size_t packet_length() const { return packet_length_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_generic_packet_sent.cc b/logging/rtc_event_log/events/rtc_event_generic_packet_sent.cc index e8335624b1..e7c5046cec 100644 --- a/logging/rtc_event_log/events/rtc_event_generic_packet_sent.cc +++ b/logging/rtc_event_log/events/rtc_event_generic_packet_sent.cc @@ -10,6 +10,10 @@ #include "logging/rtc_event_log/events/rtc_event_generic_packet_sent.h" +#include +#include +#include + #include "absl/memory/memory.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h b/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h index 903950a398..d35962f7ab 100644 --- a/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h +++ b/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h @@ -11,14 +11,17 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_PACKET_SENT_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_PACKET_SENT_H_ +#include +#include #include #include #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -83,15 +86,15 @@ class RtcEventGenericPacketSent final : public RtcEvent { size_t padding_length() const { return padding_length_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.cc b/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.cc index 2b4b5ba762..15f1351408 100644 --- a/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.cc +++ b/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.cc @@ -10,7 +10,11 @@ #include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" +#include +#include + #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h b/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h index bdacf15a59..5460ec40b4 100644 --- a/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h +++ b/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h @@ -18,9 +18,10 @@ #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -72,15 +73,15 @@ class RtcEventIceCandidatePair final : public RtcEvent { uint32_t candidate_pair_id() const { return candidate_pair_id_; } uint32_t transaction_id() const { return transaction_id_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.cc b/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.cc index eb458c4640..fa3984ae68 100644 --- a/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.cc +++ b/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.cc @@ -10,16 +10,23 @@ #include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" +#include +#include + #include "absl/memory/memory.h" +#include "api/candidate.h" +#include "api/rtc_event_log/rtc_event.h" namespace webrtc { -IceCandidatePairDescription::IceCandidatePairDescription() { - local_candidate_type = IceCandidateType::kUnknown; +IceCandidatePairDescription::IceCandidatePairDescription( + IceCandidateType local_candidate_type, + IceCandidateType remote_candidate_type) + : local_candidate_type(local_candidate_type), + remote_candidate_type(remote_candidate_type) { local_relay_protocol = IceCandidatePairProtocol::kUnknown; local_network_type = IceCandidateNetworkType::kUnknown; local_address_family = IceCandidatePairAddressFamily::kUnknown; - remote_candidate_type = IceCandidateType::kUnknown; remote_address_family = IceCandidatePairAddressFamily::kUnknown; candidate_pair_protocol = IceCandidatePairProtocol::kUnknown; } diff --git a/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h b/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h index e72d999cff..b1c6b0cdc5 100644 --- a/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h +++ b/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h @@ -18,9 +18,11 @@ #include #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/candidate.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -32,17 +34,6 @@ enum class IceCandidatePairConfigType { kNumValues, }; -// TODO(qingsi): Change the names of candidate types to "host", "srflx", "prflx" -// and "relay" after the naming is spec-compliant in the signaling part -enum class IceCandidateType { - kUnknown, - kLocal, - kStun, - kPrflx, - kRelay, - kNumValues, -}; - enum class IceCandidatePairProtocol { kUnknown, kUdp, @@ -88,7 +79,8 @@ struct LoggedIceCandidatePairConfig { class IceCandidatePairDescription { public: - IceCandidatePairDescription(); + IceCandidatePairDescription(IceCandidateType local_candidate_type, + IceCandidateType remote_candidate_type); explicit IceCandidatePairDescription( const IceCandidatePairDescription& other); @@ -126,15 +118,15 @@ class RtcEventIceCandidatePairConfig final : public RtcEvent { return candidate_pair_desc_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_log_parse_status.h b/logging/rtc_event_log/events/rtc_event_log_parse_status.h index e7d6de7d60..6c41a97086 100644 --- a/logging/rtc_event_log/events/rtc_event_log_parse_status.h +++ b/logging/rtc_event_log/events/rtc_event_log_parse_status.h @@ -12,8 +12,8 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_LOG_PARSE_STATUS_H_ #include -#include +#include "absl/base/attributes.h" #include "absl/strings/string_view.h" #include "rtc_base/checks.h" diff --git a/logging/rtc_event_log/events/rtc_event_neteq_set_minimum_delay.cc b/logging/rtc_event_log/events/rtc_event_neteq_set_minimum_delay.cc index 7b958c181b..b30461c969 100644 --- a/logging/rtc_event_log/events/rtc_event_neteq_set_minimum_delay.cc +++ b/logging/rtc_event_log/events/rtc_event_neteq_set_minimum_delay.cc @@ -12,15 +12,6 @@ #include -#include -#include -#include -#include - -#include "api/rtc_event_log/rtc_event.h" -#include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_definition.h" - namespace webrtc { RtcEventNetEqSetMinimumDelay::RtcEventNetEqSetMinimumDelay(uint32_t remote_ssrc, diff --git a/logging/rtc_event_log/events/rtc_event_neteq_set_minimum_delay.h b/logging/rtc_event_log/events/rtc_event_neteq_set_minimum_delay.h index 2e49adb36e..7e0f0be255 100644 --- a/logging/rtc_event_log/events/rtc_event_neteq_set_minimum_delay.h +++ b/logging/rtc_event_log/events/rtc_event_neteq_set_minimum_delay.h @@ -13,16 +13,11 @@ #include -#include #include -#include -#include #include "absl/memory/memory.h" -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_definition.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc b/logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc index c3d9e59b47..a3bb8246e6 100644 --- a/logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc +++ b/logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc @@ -10,7 +10,11 @@ #include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" +#include +#include + #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h b/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h index ae6810c39d..0008c1590d 100644 --- a/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h +++ b/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h @@ -18,9 +18,10 @@ #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -68,15 +69,15 @@ class RtcEventProbeClusterCreated final : public RtcEvent { uint32_t min_probes() const { return min_probes_; } uint32_t min_bytes() const { return min_bytes_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_probe_result_failure.cc b/logging/rtc_event_log/events/rtc_event_probe_result_failure.cc index a79b0c173d..9e77a01c6c 100644 --- a/logging/rtc_event_log/events/rtc_event_probe_result_failure.cc +++ b/logging/rtc_event_log/events/rtc_event_probe_result_failure.cc @@ -10,7 +10,11 @@ #include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" +#include +#include + #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_probe_result_failure.h b/logging/rtc_event_log/events/rtc_event_probe_result_failure.h index 1aa6e75cb7..7de3e7ffbc 100644 --- a/logging/rtc_event_log/events/rtc_event_probe_result_failure.h +++ b/logging/rtc_event_log/events/rtc_event_probe_result_failure.h @@ -18,9 +18,10 @@ #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -62,15 +63,15 @@ class RtcEventProbeResultFailure final : public RtcEvent { int32_t id() const { return id_; } ProbeFailureReason failure_reason() const { return failure_reason_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_probe_result_success.cc b/logging/rtc_event_log/events/rtc_event_probe_result_success.cc index e7bc7c25da..80dc70f75c 100644 --- a/logging/rtc_event_log/events/rtc_event_probe_result_success.cc +++ b/logging/rtc_event_log/events/rtc_event_probe_result_success.cc @@ -10,7 +10,11 @@ #include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" +#include +#include + #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_probe_result_success.h b/logging/rtc_event_log/events/rtc_event_probe_result_success.h index 49d1abec5a..340760469e 100644 --- a/logging/rtc_event_log/events/rtc_event_probe_result_success.h +++ b/logging/rtc_event_log/events/rtc_event_probe_result_success.h @@ -18,9 +18,10 @@ #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -55,15 +56,15 @@ class RtcEventProbeResultSuccess final : public RtcEvent { int32_t id() const { return id_; } int32_t bitrate_bps() const { return bitrate_bps_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_remote_estimate.h b/logging/rtc_event_log/events/rtc_event_remote_estimate.h index 4a39ecc597..c1de1c6fa2 100644 --- a/logging/rtc_event_log/events/rtc_event_remote_estimate.h +++ b/logging/rtc_event_log/events/rtc_event_remote_estimate.h @@ -10,16 +10,17 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_REMOTE_ESTIMATE_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_REMOTE_ESTIMATE_H_ -#include +#include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/data_rate.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -31,8 +32,8 @@ struct LoggedRemoteEstimateEvent { Timestamp log_time() const { return timestamp; } Timestamp timestamp = Timestamp::MinusInfinity(); - absl::optional link_capacity_lower; - absl::optional link_capacity_upper; + std::optional link_capacity_lower; + std::optional link_capacity_upper; }; class RtcEventRemoteEstimate final : public RtcEvent { @@ -47,15 +48,15 @@ class RtcEventRemoteEstimate final : public RtcEvent { Type GetType() const override { return kType; } bool IsConfigEvent() const override { return false; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_route_change.cc b/logging/rtc_event_log/events/rtc_event_route_change.cc index 71bd78b346..78312288b6 100644 --- a/logging/rtc_event_log/events/rtc_event_route_change.cc +++ b/logging/rtc_event_log/events/rtc_event_route_change.cc @@ -10,7 +10,11 @@ #include "logging/rtc_event_log/events/rtc_event_route_change.h" +#include +#include + #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_route_change.h b/logging/rtc_event_log/events/rtc_event_route_change.h index bc1461d7bb..7e4e71cfc9 100644 --- a/logging/rtc_event_log/events/rtc_event_route_change.h +++ b/logging/rtc_event_log/events/rtc_event_route_change.h @@ -11,14 +11,16 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ROUTE_CHANGE_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ROUTE_CHANGE_H_ +#include #include #include #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" namespace webrtc { @@ -51,15 +53,15 @@ class RtcEventRouteChange final : public RtcEvent { bool connected() const { return connected_; } uint32_t overhead() const { return overhead_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc b/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc index 0ea700a024..5c74bd101f 100644 --- a/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc +++ b/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc @@ -10,12 +10,17 @@ #include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h" +#include +#include + #include "absl/memory/memory.h" +#include "api/array_view.h" +#include "api/rtc_event_log/rtc_event.h" namespace webrtc { RtcEventRtcpPacketIncoming::RtcEventRtcpPacketIncoming( - rtc::ArrayView packet) + ArrayView packet) : packet_(packet.data(), packet.size()) {} RtcEventRtcpPacketIncoming::RtcEventRtcpPacketIncoming( diff --git a/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h b/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h index 84fe398e08..69c2e6b852 100644 --- a/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h +++ b/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h @@ -21,7 +21,7 @@ #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "logging/rtc_event_log/events/logged_rtp_rtcp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" #include "rtc_base/buffer.h" namespace webrtc { @@ -30,7 +30,7 @@ class RtcEventRtcpPacketIncoming final : public RtcEvent { public: static constexpr Type kType = Type::RtcpPacketIncoming; - explicit RtcEventRtcpPacketIncoming(rtc::ArrayView packet); + explicit RtcEventRtcpPacketIncoming(ArrayView packet); ~RtcEventRtcpPacketIncoming() override; Type GetType() const override { return kType; } @@ -38,17 +38,17 @@ class RtcEventRtcpPacketIncoming final : public RtcEvent { std::unique_ptr Copy() const; - const rtc::Buffer& packet() const { return packet_; } + const Buffer& packet() const { return packet_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } @@ -56,7 +56,7 @@ class RtcEventRtcpPacketIncoming final : public RtcEvent { private: RtcEventRtcpPacketIncoming(const RtcEventRtcpPacketIncoming& other); - rtc::Buffer packet_; + Buffer packet_; }; } // namespace webrtc diff --git a/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc b/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc index b6a41ac034..28071803c3 100644 --- a/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc +++ b/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc @@ -10,12 +10,17 @@ #include "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h" +#include +#include + #include "absl/memory/memory.h" +#include "api/array_view.h" +#include "api/rtc_event_log/rtc_event.h" namespace webrtc { RtcEventRtcpPacketOutgoing::RtcEventRtcpPacketOutgoing( - rtc::ArrayView packet) + ArrayView packet) : packet_(packet.data(), packet.size()) {} RtcEventRtcpPacketOutgoing::RtcEventRtcpPacketOutgoing( diff --git a/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h b/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h index 687bd319b4..9916c94211 100644 --- a/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h +++ b/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h @@ -21,7 +21,7 @@ #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "logging/rtc_event_log/events/logged_rtp_rtcp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" #include "rtc_base/buffer.h" namespace webrtc { @@ -30,7 +30,7 @@ class RtcEventRtcpPacketOutgoing final : public RtcEvent { public: static constexpr Type kType = Type::RtcpPacketOutgoing; - explicit RtcEventRtcpPacketOutgoing(rtc::ArrayView packet); + explicit RtcEventRtcpPacketOutgoing(ArrayView packet); ~RtcEventRtcpPacketOutgoing() override; Type GetType() const override { return kType; } @@ -38,17 +38,17 @@ class RtcEventRtcpPacketOutgoing final : public RtcEvent { std::unique_ptr Copy() const; - const rtc::Buffer& packet() const { return packet_; } + const Buffer& packet() const { return packet_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } @@ -56,7 +56,7 @@ class RtcEventRtcpPacketOutgoing final : public RtcEvent { private: RtcEventRtcpPacketOutgoing(const RtcEventRtcpPacketOutgoing& other); - rtc::Buffer packet_; + Buffer packet_; }; } // namespace webrtc diff --git a/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc b/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc index 4cf33a238f..978713abb7 100644 --- a/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc +++ b/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc @@ -10,9 +10,11 @@ #include "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h" +#include + #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "rtc_base/checks.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h b/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h index 66ea167a1c..f3b647f18b 100644 --- a/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h +++ b/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h @@ -23,7 +23,7 @@ #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "logging/rtc_event_log/events/logged_rtp_rtcp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" #include "modules/rtp_rtcp/source/rtp_packet.h" namespace webrtc { @@ -44,8 +44,8 @@ class RtcEventRtpPacketIncoming final : public RtcEvent { size_t packet_length() const { return packet_.size(); } - rtc::ArrayView RawHeader() const { - return rtc::MakeArrayView(packet_.data(), header_length()); + ArrayView RawHeader() const { + return MakeArrayView(packet_.data(), header_length()); } uint32_t Ssrc() const { return packet_.Ssrc(); } uint32_t Timestamp() const { return packet_.Timestamp(); } @@ -57,7 +57,7 @@ class RtcEventRtpPacketIncoming final : public RtcEvent { return packet_.GetExtension(std::forward(args)...); } template - rtc::ArrayView GetRawExtension() const { + ArrayView GetRawExtension() const { return packet_.GetRawExtension(); } template @@ -69,15 +69,15 @@ class RtcEventRtpPacketIncoming final : public RtcEvent { size_t header_length() const { return packet_.headers_size(); } size_t padding_length() const { return packet_.padding_size(); } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::map>& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::map>& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc b/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc index a6a4d99702..4094dca2d8 100644 --- a/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc +++ b/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc @@ -10,9 +10,11 @@ #include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" +#include + #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "rtc_base/checks.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h b/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h index 4f4be04445..74e7ad49b8 100644 --- a/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h +++ b/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h @@ -23,7 +23,7 @@ #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "logging/rtc_event_log/events/logged_rtp_rtcp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" #include "modules/rtp_rtcp/source/rtp_packet.h" namespace webrtc { @@ -45,8 +45,8 @@ class RtcEventRtpPacketOutgoing final : public RtcEvent { size_t packet_length() const { return packet_.size(); } - rtc::ArrayView RawHeader() const { - return rtc::MakeArrayView(packet_.data(), header_length()); + ArrayView RawHeader() const { + return MakeArrayView(packet_.data(), header_length()); } uint32_t Ssrc() const { return packet_.Ssrc(); } uint32_t Timestamp() const { return packet_.Timestamp(); } @@ -58,7 +58,7 @@ class RtcEventRtpPacketOutgoing final : public RtcEvent { return packet_.GetExtension(std::forward(args)...); } template - rtc::ArrayView GetRawExtension() const { + ArrayView GetRawExtension() const { return packet_.GetRawExtension(); } template @@ -71,15 +71,15 @@ class RtcEventRtpPacketOutgoing final : public RtcEvent { size_t padding_length() const { return packet_.padding_size(); } int probe_cluster_id() const { return probe_cluster_id_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::map>& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::map>& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.cc b/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.cc index 90ab8185a3..c6b4686863 100644 --- a/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.cc +++ b/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.cc @@ -10,9 +10,12 @@ #include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" +#include #include #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" +#include "logging/rtc_event_log/rtc_stream_config.h" #include "rtc_base/checks.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h b/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h index 0be56c2065..f8c222f88c 100644 --- a/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h +++ b/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h @@ -11,14 +11,16 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_VIDEO_RECEIVE_STREAM_CONFIG_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_VIDEO_RECEIVE_STREAM_CONFIG_H_ +#include #include #include #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" #include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { @@ -51,15 +53,15 @@ class RtcEventVideoReceiveStreamConfig final : public RtcEvent { const rtclog::StreamConfig& config() const { return *config_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/events/rtc_event_video_send_stream_config.cc b/logging/rtc_event_log/events/rtc_event_video_send_stream_config.cc index c28a476d01..05f729cbf4 100644 --- a/logging/rtc_event_log/events/rtc_event_video_send_stream_config.cc +++ b/logging/rtc_event_log/events/rtc_event_video_send_stream_config.cc @@ -10,9 +10,12 @@ #include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" +#include #include #include "absl/memory/memory.h" +#include "api/rtc_event_log/rtc_event.h" +#include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { diff --git a/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h b/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h index f1717b19ea..2d5b85c56f 100644 --- a/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h +++ b/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h @@ -11,14 +11,16 @@ #ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_VIDEO_SEND_STREAM_CONFIG_H_ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_VIDEO_SEND_STREAM_CONFIG_H_ +#include #include #include #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" #include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { @@ -51,15 +53,15 @@ class RtcEventVideoSendStreamConfig final : public RtcEvent { const rtclog::StreamConfig& config() const { return *config_; } - static std::string Encode(rtc::ArrayView batch) { + static std::string Encode(ArrayView /* batch */) { // TODO(terelius): Implement return ""; } static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { + absl::string_view /* encoded_bytes */, + bool /* batched */, + std::vector& /* output */) { // TODO(terelius): Implement return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); } diff --git a/logging/rtc_event_log/fake_rtc_event_log.cc b/logging/rtc_event_log/fake_rtc_event_log.cc index 5a44b00694..ca6b1c4242 100644 --- a/logging/rtc_event_log/fake_rtc_event_log.cc +++ b/logging/rtc_event_log/fake_rtc_event_log.cc @@ -10,16 +10,19 @@ #include "logging/rtc_event_log/fake_rtc_event_log.h" +#include #include #include -#include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/rtc_event_log_output.h" #include "rtc_base/synchronization/mutex.h" namespace webrtc { -bool FakeRtcEventLog::StartLogging(std::unique_ptr output, - int64_t output_period_ms) { +bool FakeRtcEventLog::StartLogging( + std::unique_ptr /* output */, + int64_t /* output_period_ms */) { return true; } diff --git a/logging/rtc_event_log/fake_rtc_event_log.h b/logging/rtc_event_log/fake_rtc_event_log.h index effa7507f1..bdeff8aa2b 100644 --- a/logging/rtc_event_log/fake_rtc_event_log.h +++ b/logging/rtc_event_log/fake_rtc_event_log.h @@ -11,11 +11,13 @@ #ifndef LOGGING_RTC_EVENT_LOG_FAKE_RTC_EVENT_LOG_H_ #define LOGGING_RTC_EVENT_LOG_FAKE_RTC_EVENT_LOG_H_ +#include #include #include #include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtc_event_log_output.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" diff --git a/logging/rtc_event_log/fake_rtc_event_log_factory.cc b/logging/rtc_event_log/fake_rtc_event_log_factory.cc index 47db40c9f4..d5be8fd4e0 100644 --- a/logging/rtc_event_log/fake_rtc_event_log_factory.cc +++ b/logging/rtc_event_log/fake_rtc_event_log_factory.cc @@ -12,22 +12,18 @@ #include +#include "absl/base/nullability.h" +#include "api/environment/environment.h" #include "api/rtc_event_log/rtc_event_log.h" #include "logging/rtc_event_log/fake_rtc_event_log.h" namespace webrtc { -std::unique_ptr FakeRtcEventLogFactory::Create( - RtcEventLog::EncodingType /*encoding_type*/) const { +absl_nonnull std::unique_ptr FakeRtcEventLogFactory::Create( + const Environment& /*env*/) const { auto fake_event_log = std::make_unique(); - const_cast(this)->last_log_created_ = - fake_event_log.get(); + const_cast(last_log_created_) = fake_event_log.get(); return fake_event_log; } -std::unique_ptr FakeRtcEventLogFactory::CreateRtcEventLog( - RtcEventLog::EncodingType encoding_type) { - return Create(encoding_type); -} - } // namespace webrtc diff --git a/logging/rtc_event_log/fake_rtc_event_log_factory.h b/logging/rtc_event_log/fake_rtc_event_log_factory.h index c7ff33dee4..71489cc44a 100644 --- a/logging/rtc_event_log/fake_rtc_event_log_factory.h +++ b/logging/rtc_event_log/fake_rtc_event_log_factory.h @@ -13,6 +13,8 @@ #include +#include "absl/base/nullability.h" +#include "api/environment/environment.h" #include "api/rtc_event_log/rtc_event_log_factory_interface.h" #include "logging/rtc_event_log/fake_rtc_event_log.h" @@ -23,16 +25,13 @@ class FakeRtcEventLogFactory : public RtcEventLogFactoryInterface { FakeRtcEventLogFactory() = default; ~FakeRtcEventLogFactory() override = default; - std::unique_ptr Create( - RtcEventLog::EncodingType encoding_type) const override; + absl_nonnull std::unique_ptr Create( + const Environment& env) const override; - std::unique_ptr CreateRtcEventLog( - RtcEventLog::EncodingType encoding_type) override; - - webrtc::FakeRtcEventLog* last_log_created() { return last_log_created_; } + FakeRtcEventLog* last_log_created() { return last_log_created_; } private: - webrtc::FakeRtcEventLog* last_log_created_; + FakeRtcEventLog* last_log_created_ = nullptr; }; } // namespace webrtc diff --git a/logging/rtc_event_log/ice_logger.cc b/logging/rtc_event_log/ice_logger.cc index 390deda953..a0ddfd92f3 100644 --- a/logging/rtc_event_log/ice_logger.cc +++ b/logging/rtc_event_log/ice_logger.cc @@ -10,9 +10,12 @@ #include "logging/rtc_event_log/ice_logger.h" +#include #include #include "api/rtc_event_log/rtc_event_log.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" namespace webrtc { @@ -26,7 +29,8 @@ void IceEventLog::LogCandidatePairConfig( if (event_log_ == nullptr) { return; } - candidate_pair_desc_by_id_[candidate_pair_id] = candidate_pair_desc; + + candidate_pair_desc_by_id_.emplace(candidate_pair_id, candidate_pair_desc); event_log_->Log(std::make_unique( type, candidate_pair_id, candidate_pair_desc)); } diff --git a/logging/rtc_event_log/ice_logger.h b/logging/rtc_event_log/ice_logger.h index 0dea43bf9d..43857c716e 100644 --- a/logging/rtc_event_log/ice_logger.h +++ b/logging/rtc_event_log/ice_logger.h @@ -11,6 +11,7 @@ #ifndef LOGGING_RTC_EVENT_LOG_ICE_LOGGER_H_ #define LOGGING_RTC_EVENT_LOG_ICE_LOGGER_H_ +#include #include #include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" diff --git a/logging/rtc_event_log/mock/mock_rtc_event_log.h b/logging/rtc_event_log/mock/mock_rtc_event_log.h index 646831de27..4a41572dfa 100644 --- a/logging/rtc_event_log/mock/mock_rtc_event_log.h +++ b/logging/rtc_event_log/mock/mock_rtc_event_log.h @@ -11,9 +11,12 @@ #ifndef LOGGING_RTC_EVENT_LOG_MOCK_MOCK_RTC_EVENT_LOG_H_ #define LOGGING_RTC_EVENT_LOG_MOCK_MOCK_RTC_EVENT_LOG_H_ +#include #include +#include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtc_event_log_output.h" #include "test/gmock.h" namespace webrtc { diff --git a/logging/rtc_event_log/output/rtc_event_log_output_file.h b/logging/rtc_event_log/output/rtc_event_log_output_file.h deleted file mode 100644 index 86be01d884..0000000000 --- a/logging/rtc_event_log/output/rtc_event_log_output_file.h +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef LOGGING_RTC_EVENT_LOG_OUTPUT_RTC_EVENT_LOG_OUTPUT_FILE_H_ -#define LOGGING_RTC_EVENT_LOG_OUTPUT_RTC_EVENT_LOG_OUTPUT_FILE_H_ - -// TODO(bugs.webrtc.org/6463): For backwards compatibility; delete as soon as -// downstream dependencies are updated. - -#include "api/rtc_event_log_output_file.h" - -#endif // LOGGING_RTC_EVENT_LOG_OUTPUT_RTC_EVENT_LOG_OUTPUT_FILE_H_ diff --git a/logging/rtc_event_log/rtc_event_log2.proto b/logging/rtc_event_log/rtc_event_log2.proto index 658df6b6ff..a417ded706 100644 --- a/logging/rtc_event_log/rtc_event_log2.proto +++ b/logging/rtc_event_log/rtc_event_log2.proto @@ -339,6 +339,7 @@ message FrameDecodedEvents { CODEC_VP9 = 3; CODEC_AV1 = 4; CODEC_H264 = 5; + CODEC_H265 = 6; } // required diff --git a/logging/rtc_event_log/rtc_event_log2_proto_include.h b/logging/rtc_event_log/rtc_event_log2_proto_include.h index 3e43103897..43b914fd0c 100644 --- a/logging/rtc_event_log/rtc_event_log2_proto_include.h +++ b/logging/rtc_event_log/rtc_event_log2_proto_include.h @@ -11,15 +11,11 @@ #ifndef LOGGING_RTC_EVENT_LOG_RTC_EVENT_LOG2_PROTO_INCLUDE_H_ #define LOGGING_RTC_EVENT_LOG_RTC_EVENT_LOG2_PROTO_INCLUDE_H_ -#include "rtc_base/ignore_wundef.h" - // *.pb.h files are generated at build-time by the protobuf compiler. -RTC_PUSH_IGNORING_WUNDEF() #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/logging/rtc_event_log/rtc_event_log2.pb.h" #else #include "logging/rtc_event_log/rtc_event_log2.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() #endif // LOGGING_RTC_EVENT_LOG_RTC_EVENT_LOG2_PROTO_INCLUDE_H_ diff --git a/logging/rtc_event_log/rtc_event_log2rtp_dump.cc b/logging/rtc_event_log/rtc_event_log2rtp_dump.cc index a0514259aa..facffeea02 100644 --- a/logging/rtc_event_log/rtc_event_log2rtp_dump.cc +++ b/logging/rtc_event_log/rtc_event_log2rtp_dump.cc @@ -13,25 +13,24 @@ #include #include +#include #include -#include #include #include "absl/flags/flag.h" #include "absl/flags/parse.h" #include "absl/flags/usage.h" -#include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" -#include "api/rtc_event_log/rtc_event_log.h" #include "api/rtp_headers.h" +#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" #include "logging/rtc_event_log/rtc_event_log_parser.h" #include "logging/rtc_event_log/rtc_event_processor.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "rtc_base/checks.h" +#include "rtc_base/string_to_number.h" #include "test/rtp_file_reader.h" #include "test/rtp_file_writer.h" @@ -76,25 +75,14 @@ using MediaType = webrtc::ParsedRtcEventLog::MediaType; // The empty string must be validated as true, because it is the default value // of the command-line flag. In this case, no value is written to the output // variable. -absl::optional ParseSsrc(absl::string_view str) { - // If the input string starts with 0x or 0X it indicates a hexadecimal number. - uint32_t ssrc; - auto read_mode = std::dec; - if (str.size() > 2 && - (str.substr(0, 2) == "0x" || str.substr(0, 2) == "0X")) { - read_mode = std::hex; - str = str.substr(2); - } - std::stringstream ss(std::string{str}); - ss >> read_mode >> ssrc; - if (str.empty() || (!ss.fail() && ss.eof())) - return ssrc; - return absl::nullopt; +std::optional ParseSsrc(absl::string_view str) { + // Set `base` to 0 to allow detection of the "0x" prefix in case hex is used. + return webrtc::StringToNumber(str, 0); } bool ShouldSkipStream(MediaType media_type, uint32_t ssrc, - absl::optional ssrc_filter) { + std::optional ssrc_filter) { if (!absl::GetFlag(FLAGS_audio) && media_type == MediaType::AUDIO) return true; if (!absl::GetFlag(FLAGS_video) && media_type == MediaType::VIDEO) @@ -121,7 +109,7 @@ void ConvertRtpPacket( reconstructed_packet.SetTimestamp(incoming.rtp.header.timestamp); reconstructed_packet.SetSsrc(incoming.rtp.header.ssrc); if (incoming.rtp.header.numCSRCs > 0) { - reconstructed_packet.SetCsrcs(rtc::ArrayView( + reconstructed_packet.SetCsrcs(webrtc::ArrayView( incoming.rtp.header.arrOfCSRCs, incoming.rtp.header.numCSRCs)); } @@ -135,10 +123,9 @@ void ConvertRtpPacket( if (incoming.rtp.header.extension.hasTransportSequenceNumber) reconstructed_packet.SetExtension( incoming.rtp.header.extension.transportSequenceNumber); - if (incoming.rtp.header.extension.hasAudioLevel) - reconstructed_packet.SetExtension( - incoming.rtp.header.extension.voiceActivity, - incoming.rtp.header.extension.audioLevel); + if (incoming.rtp.header.extension.audio_level()) + reconstructed_packet.SetExtension( + *incoming.rtp.header.extension.audio_level()); if (incoming.rtp.header.extension.hasVideoRotation) reconstructed_packet.SetExtension( incoming.rtp.header.extension.videoRotation); @@ -180,7 +167,7 @@ int main(int argc, char* argv[]) { std::string input_file = args[1]; std::string output_file = args[2]; - absl::optional ssrc_filter; + std::optional ssrc_filter; if (!absl::GetFlag(FLAGS_ssrc).empty()) { ssrc_filter = ParseSsrc(absl::GetFlag(FLAGS_ssrc)); RTC_CHECK(ssrc_filter.has_value()) << "Failed to read SSRC filter flag."; diff --git a/logging/rtc_event_log/rtc_event_log_impl.cc b/logging/rtc_event_log/rtc_event_log_impl.cc index f2b3f22d6a..aece126bc4 100644 --- a/logging/rtc_event_log/rtc_event_log_impl.cc +++ b/logging/rtc_event_log/rtc_event_log_impl.cc @@ -10,43 +10,51 @@ #include "logging/rtc_event_log/rtc_event_log_impl.h" +#include +#include #include -#include +#include #include +#include #include -#include #include "absl/strings/string_view.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtc_event_log_output.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" +#include "logging/rtc_event_log/encoder/rtc_event_log_encoder.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/safe_minmax.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" namespace webrtc { +namespace { -std::unique_ptr RtcEventLogImpl::CreateEncoder( - RtcEventLog::EncodingType type) { - switch (type) { - case RtcEventLog::EncodingType::Legacy: - RTC_DLOG(LS_INFO) << "Creating legacy encoder for RTC event log."; - return std::make_unique(); - case RtcEventLog::EncodingType::NewFormat: - RTC_DLOG(LS_INFO) << "Creating new format encoder for RTC event log."; - return std::make_unique(); - default: - RTC_LOG(LS_ERROR) << "Unknown RtcEventLog encoder type (" << int(type) - << ")"; - RTC_DCHECK_NOTREACHED(); - return std::unique_ptr(nullptr); +std::unique_ptr CreateEncoder(const Environment& env) { + if (env.field_trials().IsDisabled("WebRTC-RtcEventLogNewFormat")) { + RTC_DLOG(LS_INFO) << "Creating legacy encoder for RTC event log."; + return std::make_unique(); + } else { + RTC_DLOG(LS_INFO) << "Creating new format encoder for RTC event log."; + return std::make_unique(env.field_trials()); } } +} // namespace + +RtcEventLogImpl::RtcEventLogImpl(const Environment& env) + : RtcEventLogImpl(CreateEncoder(env), &env.task_queue_factory()) {} + RtcEventLogImpl::RtcEventLogImpl(std::unique_ptr encoder, TaskQueueFactory* task_queue_factory, size_t max_events_in_history, @@ -54,11 +62,10 @@ RtcEventLogImpl::RtcEventLogImpl(std::unique_ptr encoder, : max_events_in_history_(max_events_in_history), max_config_events_in_history_(max_config_events_in_history), event_encoder_(std::move(encoder)), - last_output_ms_(rtc::TimeMillis()), - task_queue_( - std::make_unique(task_queue_factory->CreateTaskQueue( - "rtc_event_log", - TaskQueueFactory::Priority::NORMAL))) {} + last_output_ms_(TimeMillis()), + task_queue_(task_queue_factory->CreateTaskQueue( + "rtc_event_log", + TaskQueueFactory::Priority::NORMAL)) {} RtcEventLogImpl::~RtcEventLogImpl() { // If we're logging to the output, this will stop that. Blocking function. @@ -71,10 +78,12 @@ RtcEventLogImpl::~RtcEventLogImpl() { StopLogging(); } - // We want to block on any executing task by invoking ~TaskQueue() before + // Since we are posting tasks bound to `this`, it is critical that the event + // log and its members outlive `task_queue_`. Destruct `task_queue_` first + // to ensure tasks living on the queue can access other members. + // We want to block on any executing task by deleting TaskQueue before // we set unique_ptr's internal pointer to null. - rtc::TaskQueue* tq = task_queue_.get(); - delete tq; + task_queue_.get_deleter()(task_queue_.get()); task_queue_.release(); } @@ -89,8 +98,8 @@ bool RtcEventLogImpl::StartLogging(std::unique_ptr output, return false; } - const int64_t timestamp_us = rtc::TimeMillis() * 1000; - const int64_t utc_time_us = rtc::TimeUTCMillis() * 1000; + const int64_t timestamp_us = TimeMillis() * 1000; + const int64_t utc_time_us = TimeUTCMillis() * 1000; RTC_LOG(LS_INFO) << "Starting WebRTC event log. (Timestamp, UTC) = (" << timestamp_us << ", " << utc_time_us << ")."; @@ -138,9 +147,9 @@ void RtcEventLogImpl::StopLogging() { // TODO(bugs.webrtc.org/14449): Do not block current thread waiting on the // task queue. It might work for now, for current callers, but disallows // caller to share threads with the `task_queue_`. - rtc::Event output_stopped; + Event output_stopped; StopLogging([&output_stopped]() { output_stopped.Set(); }); - output_stopped.Wait(rtc::Event::kForever); + output_stopped.Wait(Event::kForever); RTC_DLOG(LS_INFO) << "WebRTC event log successfully stopped."; } @@ -224,10 +233,10 @@ void RtcEventLogImpl::ScheduleOutput() { LogEventsToOutput(std::move(histories)); } }; - const int64_t now_ms = rtc::TimeMillis(); + const int64_t now_ms = TimeMillis(); const int64_t time_since_output_ms = now_ms - last_output_ms_; - const int32_t delay = rtc::SafeClamp(output_period_ms_ - time_since_output_ms, - 0, output_period_ms_); + const int32_t delay = + SafeClamp(output_period_ms_ - time_since_output_ms, 0, output_period_ms_); task_queue_->PostDelayedTask(std::move(output_task), TimeDelta::Millis(delay)); } @@ -247,7 +256,7 @@ void RtcEventLogImpl::LogToMemory(std::unique_ptr event) { } void RtcEventLogImpl::LogEventsToOutput(EventHistories histories) { - last_output_ms_ = rtc::TimeMillis(); + last_output_ms_ = TimeMillis(); // Serialize the stream configurations. std::string encoded_configs = event_encoder_->EncodeBatch( @@ -308,7 +317,7 @@ void RtcEventLogImpl::StopOutput() { void RtcEventLogImpl::StopLoggingInternal() { if (event_output_) { RTC_DCHECK(event_output_->IsActive()); - const int64_t timestamp_us = rtc::TimeMillis() * 1000; + const int64_t timestamp_us = TimeMillis() * 1000; event_output_->Write(event_encoder_->EncodeLogEnd(timestamp_us)); } StopOutput(); diff --git a/logging/rtc_event_log/rtc_event_log_impl.h b/logging/rtc_event_log/rtc_event_log_impl.h index 3187a7fe87..3070034528 100644 --- a/logging/rtc_event_log/rtc_event_log_impl.h +++ b/logging/rtc_event_log/rtc_event_log_impl.h @@ -14,18 +14,20 @@ #include #include #include +#include #include -#include #include "absl/strings/string_view.h" +#include "api/environment/environment.h" #include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/rtc_event_log_output.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -39,6 +41,7 @@ class RtcEventLogImpl final : public RtcEventLog { // bound to prevent an attack via unreasonable memory use. static constexpr size_t kMaxEventsInConfigHistory = 1000; + explicit RtcEventLogImpl(const Environment& env); RtcEventLogImpl( std::unique_ptr encoder, TaskQueueFactory* task_queue_factory, @@ -49,9 +52,6 @@ class RtcEventLogImpl final : public RtcEventLog { ~RtcEventLogImpl() override; - static std::unique_ptr CreateEncoder( - EncodingType encoding_type); - // TODO(eladalon): We should change these name to reflect that what we're // actually starting/stopping is the output of the log, not the log itself. bool StartLogging(std::unique_ptr output, @@ -114,11 +114,7 @@ class RtcEventLogImpl final : public RtcEventLog { bool immediately_output_mode_ RTC_GUARDED_BY(mutex_) = false; bool need_schedule_output_ RTC_GUARDED_BY(mutex_) = false; - // Since we are posting tasks bound to `this`, it is critical that the event - // log and its members outlive `task_queue_`. Keep the `task_queue_` - // last to ensure it destructs first, or else tasks living on the queue might - // access other members after they've been torn down. - std::unique_ptr task_queue_; + std::unique_ptr task_queue_; Mutex mutex_; }; diff --git a/logging/rtc_event_log/rtc_event_log_impl_unittest.cc b/logging/rtc_event_log/rtc_event_log_impl_unittest.cc index 1e8799bdd6..f2bde7534e 100644 --- a/logging/rtc_event_log/rtc_event_log_impl_unittest.cc +++ b/logging/rtc_event_log/rtc_event_log_impl_unittest.cc @@ -10,9 +10,20 @@ #include "logging/rtc_event_log/rtc_event_log_impl.h" +#include +#include +#include +#include +#include #include -#include +#include "absl/strings/string_view.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/rtc_event_log_output.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "logging/rtc_event_log/encoder/rtc_event_log_encoder.h" +#include "rtc_base/checks.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/time_controller/simulated_time_controller.h" @@ -27,7 +38,6 @@ using ::testing::Mock; using ::testing::Property; using ::testing::Ref; using ::testing::Return; -using ::testing::StrEq; class MockEventEncoder : public RtcEventLogEncoder { public: diff --git a/logging/rtc_event_log/rtc_event_log_parser.cc b/logging/rtc_event_log/rtc_event_log_parser.cc index 0ea96431a7..aff018197a 100644 --- a/logging/rtc_event_log/rtc_event_log_parser.cc +++ b/logging/rtc_event_log/rtc_event_log_parser.cc @@ -16,26 +16,78 @@ #include #include #include +#include +#include +#include +#include #include +#include -#include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/network_state_predictor.h" +#include "api/candidate.h" +#include "api/dtls_transport_interface.h" +#include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/rtp_headers.h" #include "api/rtp_parameters.h" +#include "api/transport/bandwidth_usage.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/video_codec_type.h" #include "logging/rtc_event_log/dependency_descriptor_encoder_decoder.h" #include "logging/rtc_event_log/encoder/blob_encoding.h" #include "logging/rtc_event_log/encoder/delta_encoding.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_common.h" #include "logging/rtc_event_log/encoder/var_int.h" #include "logging/rtc_event_log/events/logged_rtp_rtcp.h" +#include "logging/rtc_event_log/events/rtc_event_alr_state.h" +#include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" +#include "logging/rtc_event_log/events/rtc_event_audio_playout.h" +#include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_begin_log.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" +#include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" +#include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" +#include "logging/rtc_event_log/events/rtc_event_end_log.h" +#include "logging/rtc_event_log/events/rtc_event_frame_decoded.h" +#include "logging/rtc_event_log/events/rtc_event_generic_ack_received.h" +#include "logging/rtc_event_log/events/rtc_event_generic_packet_received.h" +#include "logging/rtc_event_log/events/rtc_event_generic_packet_sent.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" +#include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" +#include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" +#include "logging/rtc_event_log/events/rtc_event_route_change.h" +#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h" +#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h" +#include "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h" +#include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" +#include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" +#include "logging/rtc_event_log/rtc_event_log.pb.h" +#include "logging/rtc_event_log/rtc_event_log2.pb.h" #include "logging/rtc_event_log/rtc_event_processor.h" -#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" +#include "logging/rtc_event_log/rtc_stream_config.h" +#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" #include "modules/rtp_rtcp/include/rtp_cvo.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/rtcp_packet/bye.h" +#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" +#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" +#include "modules/rtp_rtcp/source/rtcp_packet/fir.h" +#include "modules/rtp_rtcp/source/rtcp_packet/nack.h" +#include "modules/rtp_rtcp/source/rtcp_packet/pli.h" +#include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" +#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" @@ -53,7 +105,7 @@ using webrtc_event_logging::ToUnsigned; namespace webrtc { namespace { -constexpr int64_t kMaxLogSize = 250000000; +constexpr size_t kMaxLogSize = 250000000; constexpr size_t kIpv4Overhead = 20; constexpr size_t kIpv6Overhead = 40; @@ -163,22 +215,29 @@ IceCandidatePairConfigType GetRuntimeIceCandidatePairConfigType( return IceCandidatePairConfigType::kAdded; } -IceCandidateType GetRuntimeIceCandidateType( - rtclog::IceCandidatePairConfig::IceCandidateType type) { - switch (type) { +// Converts a log type (proto based) to a matching `IceCandidateType` value +// and checks for validity of the log type (since the enums aren't a perfect +// match). +bool GetRuntimeIceCandidateType( + rtclog::IceCandidatePairConfig::IceCandidateType log_type, + IceCandidateType& parsed_type) { + switch (log_type) { case rtclog::IceCandidatePairConfig::LOCAL: - return IceCandidateType::kLocal; + parsed_type = IceCandidateType::kHost; + break; case rtclog::IceCandidatePairConfig::STUN: - return IceCandidateType::kStun; + parsed_type = IceCandidateType::kSrflx; + break; case rtclog::IceCandidatePairConfig::PRFLX: - return IceCandidateType::kPrflx; + parsed_type = IceCandidateType::kPrflx; + break; case rtclog::IceCandidatePairConfig::RELAY: - return IceCandidateType::kRelay; - case rtclog::IceCandidatePairConfig::UNKNOWN_CANDIDATE_TYPE: - return IceCandidateType::kUnknown; + parsed_type = IceCandidateType::kRelay; + break; + default: + return false; } - RTC_DCHECK_NOTREACHED(); - return IceCandidateType::kUnknown; + return true; } IceCandidatePairProtocol GetRuntimeIceCandidatePairProtocol( @@ -261,13 +320,14 @@ VideoCodecType GetRuntimeCodecType(rtclog2::FrameDecodedEvents::Codec codec) { return VideoCodecType::kVideoCodecAV1; case rtclog2::FrameDecodedEvents::CODEC_H264: return VideoCodecType::kVideoCodecH264; + case rtclog2::FrameDecodedEvents::CODEC_H265: + return VideoCodecType::kVideoCodecH265; case rtclog2::FrameDecodedEvents::CODEC_UNKNOWN: - RTC_LOG(LS_ERROR) << "Unknown codec type. Assuming " - "VideoCodecType::kVideoCodecMultiplex"; - return VideoCodecType::kVideoCodecMultiplex; + RTC_LOG(LS_ERROR) << "Unknown codec type. Returning generic."; + return VideoCodecType::kVideoCodecGeneric; } RTC_DCHECK_NOTREACHED(); - return VideoCodecType::kVideoCodecMultiplex; + return VideoCodecType::kVideoCodecGeneric; } ParsedRtcEventLog::ParseStatus GetHeaderExtensions( @@ -318,45 +378,68 @@ ParsedRtcEventLog::ParseStatus StoreRtpPackets( // Base event { RTPHeader header; - header.markerBit = rtc::checked_cast(proto.marker()); - header.payloadType = rtc::checked_cast(proto.payload_type()); - header.sequenceNumber = - rtc::checked_cast(proto.sequence_number()); - header.timestamp = rtc::checked_cast(proto.rtp_timestamp()); - header.ssrc = rtc::checked_cast(proto.ssrc()); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(proto.marker())); + header.markerBit = static_cast(proto.marker()); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(proto.payload_type())); + header.payloadType = static_cast(proto.payload_type()); + + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(proto.sequence_number())); + header.sequenceNumber = static_cast(proto.sequence_number()); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(proto.rtp_timestamp())); + header.timestamp = static_cast(proto.rtp_timestamp()); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(proto.ssrc())); + header.ssrc = static_cast(proto.ssrc()); header.numCSRCs = 0; // TODO(terelius): Implement CSRC. - header.paddingLength = rtc::checked_cast(proto.padding_size()); - header.headerLength = rtc::checked_cast(proto.header_size()); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(proto.padding_size())); + header.paddingLength = static_cast(proto.padding_size()); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(proto.header_size())); + header.headerLength = static_cast(proto.header_size()); // TODO(terelius): Should we implement payload_type_frequency? if (proto.has_transport_sequence_number()) { header.extension.hasTransportSequenceNumber = true; + RTC_PARSE_CHECK_OR_RETURN(IsValueInRangeForNumericType( + proto.transport_sequence_number())); header.extension.transportSequenceNumber = - rtc::checked_cast(proto.transport_sequence_number()); + static_cast(proto.transport_sequence_number()); } if (proto.has_transmission_time_offset()) { header.extension.hasTransmissionTimeOffset = true; + RTC_PARSE_CHECK_OR_RETURN(IsValueInRangeForNumericType( + proto.transmission_time_offset())); header.extension.transmissionTimeOffset = - rtc::checked_cast(proto.transmission_time_offset()); + static_cast(proto.transmission_time_offset()); } if (proto.has_absolute_send_time()) { header.extension.hasAbsoluteSendTime = true; + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(proto.absolute_send_time())); header.extension.absoluteSendTime = - rtc::checked_cast(proto.absolute_send_time()); + static_cast(proto.absolute_send_time()); } if (proto.has_video_rotation()) { header.extension.hasVideoRotation = true; + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(proto.video_rotation())); header.extension.videoRotation = ConvertCVOByteToVideoRotation( - rtc::checked_cast(proto.video_rotation())); + static_cast(proto.video_rotation())); } if (proto.has_audio_level()) { RTC_PARSE_CHECK_OR_RETURN(proto.has_voice_activity()); - header.extension.hasAudioLevel = true; - header.extension.voiceActivity = - rtc::checked_cast(proto.voice_activity()); - const uint8_t audio_level = - rtc::checked_cast(proto.audio_level()); - RTC_PARSE_CHECK_OR_RETURN_LE(audio_level, 0x7Fu); - header.extension.audioLevel = audio_level; + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(proto.voice_activity())); + bool voice_activity = static_cast(proto.voice_activity()); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(proto.audio_level())); + int audio_level = static_cast(proto.audio_level()); + RTC_PARSE_CHECK_OR_RETURN_LE(audio_level, 0x7F); + header.extension.set_audio_level(AudioLevel(voice_activity, audio_level)); } else { RTC_PARSE_CHECK_OR_RETURN(!proto.has_voice_activity()); } @@ -375,59 +458,59 @@ ParsedRtcEventLog::ParseStatus StoreRtpPackets( } // timestamp_ms (event) - std::vector> timestamp_ms_values = + std::vector> timestamp_ms_values = DecodeDeltas(proto.timestamp_ms_deltas(), ToUnsigned(proto.timestamp_ms()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(timestamp_ms_values.size(), number_of_deltas); // marker (RTP base) - std::vector> marker_values = + std::vector> marker_values = DecodeDeltas(proto.marker_deltas(), proto.marker(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(marker_values.size(), number_of_deltas); // payload_type (RTP base) - std::vector> payload_type_values = DecodeDeltas( + std::vector> payload_type_values = DecodeDeltas( proto.payload_type_deltas(), proto.payload_type(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(payload_type_values.size(), number_of_deltas); // sequence_number (RTP base) - std::vector> sequence_number_values = + std::vector> sequence_number_values = DecodeDeltas(proto.sequence_number_deltas(), proto.sequence_number(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(sequence_number_values.size(), number_of_deltas); // rtp_timestamp (RTP base) - std::vector> rtp_timestamp_values = DecodeDeltas( + std::vector> rtp_timestamp_values = DecodeDeltas( proto.rtp_timestamp_deltas(), proto.rtp_timestamp(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(rtp_timestamp_values.size(), number_of_deltas); // ssrc (RTP base) - std::vector> ssrc_values = + std::vector> ssrc_values = DecodeDeltas(proto.ssrc_deltas(), proto.ssrc(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(ssrc_values.size(), number_of_deltas); // payload_size (RTP base) - std::vector> payload_size_values = DecodeDeltas( + std::vector> payload_size_values = DecodeDeltas( proto.payload_size_deltas(), proto.payload_size(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(payload_size_values.size(), number_of_deltas); // header_size (RTP base) - std::vector> header_size_values = DecodeDeltas( + std::vector> header_size_values = DecodeDeltas( proto.header_size_deltas(), proto.header_size(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(header_size_values.size(), number_of_deltas); // padding_size (RTP base) - std::vector> padding_size_values = DecodeDeltas( + std::vector> padding_size_values = DecodeDeltas( proto.padding_size_deltas(), proto.padding_size(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(padding_size_values.size(), number_of_deltas); // transport_sequence_number (RTP extension) - std::vector> transport_sequence_number_values; + std::vector> transport_sequence_number_values; { - const absl::optional base_transport_sequence_number = + const std::optional base_transport_sequence_number = proto.has_transport_sequence_number() ? proto.transport_sequence_number() - : absl::optional(); + : std::optional(); transport_sequence_number_values = DecodeDeltas(proto.transport_sequence_number_deltas(), base_transport_sequence_number, number_of_deltas); @@ -436,12 +519,12 @@ ParsedRtcEventLog::ParseStatus StoreRtpPackets( } // transmission_time_offset (RTP extension) - std::vector> transmission_time_offset_values; + std::vector> transmission_time_offset_values; { - const absl::optional unsigned_base_transmission_time_offset = + const std::optional unsigned_base_transmission_time_offset = proto.has_transmission_time_offset() ? ToUnsigned(proto.transmission_time_offset()) - : absl::optional(); + : std::optional(); transmission_time_offset_values = DecodeDeltas(proto.transmission_time_offset_deltas(), unsigned_base_transmission_time_offset, number_of_deltas); @@ -450,11 +533,11 @@ ParsedRtcEventLog::ParseStatus StoreRtpPackets( } // absolute_send_time (RTP extension) - std::vector> absolute_send_time_values; + std::vector> absolute_send_time_values; { - const absl::optional base_absolute_send_time = + const std::optional base_absolute_send_time = proto.has_absolute_send_time() ? proto.absolute_send_time() - : absl::optional(); + : std::optional(); absolute_send_time_values = DecodeDeltas(proto.absolute_send_time_deltas(), base_absolute_send_time, number_of_deltas); @@ -463,11 +546,11 @@ ParsedRtcEventLog::ParseStatus StoreRtpPackets( } // video_rotation (RTP extension) - std::vector> video_rotation_values; + std::vector> video_rotation_values; { - const absl::optional base_video_rotation = + const std::optional base_video_rotation = proto.has_video_rotation() ? proto.video_rotation() - : absl::optional(); + : std::optional(); video_rotation_values = DecodeDeltas(proto.video_rotation_deltas(), base_video_rotation, number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(video_rotation_values.size(), @@ -475,22 +558,22 @@ ParsedRtcEventLog::ParseStatus StoreRtpPackets( } // audio_level (RTP extension) - std::vector> audio_level_values; + std::vector> audio_level_values; { - const absl::optional base_audio_level = + const std::optional base_audio_level = proto.has_audio_level() ? proto.audio_level() - : absl::optional(); + : std::optional(); audio_level_values = DecodeDeltas(proto.audio_level_deltas(), base_audio_level, number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(audio_level_values.size(), number_of_deltas); } // voice_activity (RTP extension) - std::vector> voice_activity_values; + std::vector> voice_activity_values; { - const absl::optional base_voice_activity = + const std::optional base_voice_activity = proto.has_voice_activity() ? proto.voice_activity() - : absl::optional(); + : std::optional(); voice_activity_values = DecodeDeltas(proto.voice_activity_deltas(), base_voice_activity, number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(voice_activity_values.size(), @@ -514,21 +597,36 @@ ParsedRtcEventLog::ParseStatus StoreRtpPackets( ToSigned(timestamp_ms_values[i].value(), ×tamp_ms)); RTPHeader header; - header.markerBit = rtc::checked_cast(*marker_values[i]); - header.payloadType = rtc::checked_cast(*payload_type_values[i]); - header.sequenceNumber = - rtc::checked_cast(*sequence_number_values[i]); - header.timestamp = rtc::checked_cast(*rtp_timestamp_values[i]); - header.ssrc = rtc::checked_cast(*ssrc_values[i]); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(*marker_values[i])); + header.markerBit = static_cast(*marker_values[i]); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(*payload_type_values[i])); + header.payloadType = static_cast(*payload_type_values[i]); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(*sequence_number_values[i])); + header.sequenceNumber = static_cast(*sequence_number_values[i]); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(*rtp_timestamp_values[i])); + header.timestamp = static_cast(*rtp_timestamp_values[i]); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(*ssrc_values[i])); + header.ssrc = static_cast(*ssrc_values[i]); header.numCSRCs = 0; // TODO(terelius): Implement CSRC. - header.paddingLength = rtc::checked_cast(*padding_size_values[i]); - header.headerLength = rtc::checked_cast(*header_size_values[i]); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(*padding_size_values[i])); + header.paddingLength = static_cast(*padding_size_values[i]); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(*header_size_values[i])); + header.headerLength = static_cast(*header_size_values[i]); // TODO(terelius): Should we implement payload_type_frequency? if (transport_sequence_number_values.size() > i && transport_sequence_number_values[i].has_value()) { header.extension.hasTransportSequenceNumber = true; - header.extension.transportSequenceNumber = rtc::checked_cast( - transport_sequence_number_values[i].value()); + RTC_PARSE_CHECK_OR_RETURN(IsValueInRangeForNumericType( + transport_sequence_number_values[i].value())); + header.extension.transportSequenceNumber = + static_cast(transport_sequence_number_values[i].value()); } if (transmission_time_offset_values.size() > i && transmission_time_offset_values[i].has_value()) { @@ -542,25 +640,30 @@ ParsedRtcEventLog::ParseStatus StoreRtpPackets( if (absolute_send_time_values.size() > i && absolute_send_time_values[i].has_value()) { header.extension.hasAbsoluteSendTime = true; + RTC_PARSE_CHECK_OR_RETURN(IsValueInRangeForNumericType( + absolute_send_time_values[i].value())); header.extension.absoluteSendTime = - rtc::checked_cast(absolute_send_time_values[i].value()); + static_cast(absolute_send_time_values[i].value()); } if (video_rotation_values.size() > i && video_rotation_values[i].has_value()) { header.extension.hasVideoRotation = true; + RTC_PARSE_CHECK_OR_RETURN(IsValueInRangeForNumericType( + video_rotation_values[i].value())); header.extension.videoRotation = ConvertCVOByteToVideoRotation( - rtc::checked_cast(video_rotation_values[i].value())); + static_cast(video_rotation_values[i].value())); } if (audio_level_values.size() > i && audio_level_values[i].has_value()) { RTC_PARSE_CHECK_OR_RETURN(voice_activity_values.size() > i && voice_activity_values[i].has_value()); - header.extension.hasAudioLevel = true; - header.extension.voiceActivity = - rtc::checked_cast(voice_activity_values[i].value()); - const uint8_t audio_level = - rtc::checked_cast(audio_level_values[i].value()); - RTC_PARSE_CHECK_OR_RETURN_LE(audio_level, 0x7Fu); - header.extension.audioLevel = audio_level; + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(voice_activity_values[i].value())); + bool voice_activity = static_cast(voice_activity_values[i].value()); + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(audio_level_values[i].value())); + int audio_level = static_cast(audio_level_values[i].value()); + RTC_PARSE_CHECK_OR_RETURN_LE(audio_level, 0x7F); + header.extension.set_audio_level(AudioLevel(voice_activity, audio_level)); } else { RTC_PARSE_CHECK_OR_RETURN(voice_activity_values.size() <= i || !voice_activity_values[i].has_value()); @@ -605,7 +708,7 @@ ParsedRtcEventLog::ParseStatus StoreRtcpPackets( } // timestamp_ms - std::vector> timestamp_ms_values = + std::vector> timestamp_ms_values = DecodeDeltas(proto.timestamp_ms_deltas(), ToUnsigned(proto.timestamp_ms()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(timestamp_ms_values.size(), number_of_deltas); @@ -651,6 +754,7 @@ ParsedRtcEventLog::ParseStatus StoreRtcpBlocks( std::vector* pli_list, std::vector* bye_list, std::vector* transport_feedback_list, + std::vector* congestion_feedback_list, std::vector* loss_notification_list) { Timestamp timestamp = Timestamp::Micros(timestamp_us); rtcp::CommonHeader header; @@ -663,6 +767,12 @@ ParsedRtcEventLog::ParseStatus StoreRtcpBlocks( parsed_block.timestamp = timestamp; RTC_PARSE_CHECK_OR_RETURN(parsed_block.transport_feedback.Parse(header)); transport_feedback_list->push_back(std::move(parsed_block)); + } else if (header.type() == rtcp::Rtpfb::kPacketType && + header.fmt() == + rtcp::CongestionControlFeedback::kFeedbackMessageType) { + rtcp::CongestionControlFeedback feedback; + RTC_PARSE_CHECK_OR_RETURN(feedback.Parse(header)); + congestion_feedback_list->emplace_back(timestamp, std::move(feedback)); } else if (header.type() == rtcp::SenderReport::kPacketType) { LoggedRtcpPacketSenderReport parsed_block; parsed_block.timestamp = timestamp; @@ -804,18 +914,39 @@ IceCandidateType GetRuntimeIceCandidateType( rtclog2::IceCandidatePairConfig::IceCandidateType type) { switch (type) { case rtclog2::IceCandidatePairConfig::LOCAL: - return IceCandidateType::kLocal; + return IceCandidateType::kHost; case rtclog2::IceCandidatePairConfig::STUN: - return IceCandidateType::kStun; + return IceCandidateType::kSrflx; case rtclog2::IceCandidatePairConfig::PRFLX: return IceCandidateType::kPrflx; case rtclog2::IceCandidatePairConfig::RELAY: return IceCandidateType::kRelay; - case rtclog2::IceCandidatePairConfig::UNKNOWN_CANDIDATE_TYPE: - return IceCandidateType::kUnknown; + default: + RTC_DCHECK_NOTREACHED(); + return IceCandidateType::kHost; } - RTC_DCHECK_NOTREACHED(); - return IceCandidateType::kUnknown; +} + +bool GetRuntimeIceCandidateType( + rtclog2::IceCandidatePairConfig::IceCandidateType log_type, + IceCandidateType& parsed_type) { + switch (log_type) { + case rtclog2::IceCandidatePairConfig::LOCAL: + parsed_type = IceCandidateType::kHost; + break; + case rtclog2::IceCandidatePairConfig::STUN: + parsed_type = IceCandidateType::kSrflx; + break; + case rtclog2::IceCandidatePairConfig::PRFLX: + parsed_type = IceCandidateType::kPrflx; + break; + case rtclog2::IceCandidatePairConfig::RELAY: + parsed_type = IceCandidateType::kRelay; + break; + default: + return false; + } + return true; } IceCandidatePairProtocol GetRuntimeIceCandidatePairProtocol( @@ -1006,7 +1137,7 @@ ParsedRtcEventLog::GetDefaultHeaderExtensionMap() { constexpr int kDependencyDescriptorDefaultId = 9; webrtc::RtpHeaderExtensionMap default_map(/*extmap_allow_mixed=*/true); - default_map.Register(kAudioLevelDefaultId); + default_map.Register(kAudioLevelDefaultId); default_map.Register(kTimestampOffsetDefaultId); default_map.Register(kAbsSendTimeDefaultId); default_map.Register(kVideoRotationDefaultId); @@ -1103,17 +1234,17 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseFile( } // Compute file size. - long signed_filesize = file.FileSize(); // NOLINT(runtime/int) - RTC_PARSE_CHECK_OR_RETURN_GE(signed_filesize, 0); - RTC_PARSE_CHECK_OR_RETURN_LE(signed_filesize, kMaxLogSize); - size_t filesize = rtc::checked_cast(signed_filesize); + std::optional file_size = file.FileSize(); + RTC_PARSE_CHECK_OR_RETURN(file_size.has_value()); + RTC_PARSE_CHECK_OR_RETURN_GE(*file_size, 0u); + RTC_PARSE_CHECK_OR_RETURN_LE(*file_size, kMaxLogSize); // Read file into memory. - std::string buffer(filesize, '\0'); + std::string buffer(*file_size, '\0'); size_t bytes_read = file.Read(&buffer[0], buffer.size()); - if (bytes_read != filesize) { + if (bytes_read != *file_size) { RTC_LOG(LS_WARNING) << "Failed to read file " << filename; - RTC_PARSE_CHECK_OR_RETURN_EQ(bytes_read, filesize); + RTC_PARSE_CHECK_OR_RETURN_EQ(bytes_read, *file_size); } return ParseStream(buffer); @@ -1186,7 +1317,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( timestamp_us, packet_begin, packet_end, &incoming_sr_, &incoming_rr_, &incoming_xr_, &incoming_remb_, &incoming_nack_, &incoming_fir_, &incoming_pli_, &incoming_bye_, &incoming_transport_feedback_, - &incoming_loss_notification_); + &incoming_congestion_feedback_, &incoming_loss_notification_); RTC_RETURN_IF_ERROR(store_rtcp_status); } @@ -1198,7 +1329,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( timestamp_us, packet_begin, packet_end, &outgoing_sr_, &outgoing_rr_, &outgoing_xr_, &outgoing_remb_, &outgoing_nack_, &outgoing_fir_, &outgoing_pli_, &outgoing_bye_, &outgoing_transport_feedback_, - &outgoing_loss_notification_); + &outgoing_congestion_feedback_, &outgoing_loss_notification_); RTC_RETURN_IF_ERROR(store_rtcp_status); } @@ -1617,7 +1748,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreParsedLegacyEvent( // has a buildin convertion to RTPHeader. RtpPacketReceived rtp_header; RTC_PARSE_CHECK_OR_RETURN( - rtp_header.Parse(rtc::CopyOnWriteBuffer(rtp_packet.header()))); + rtp_header.Parse(CopyOnWriteBuffer(rtp_packet.header()))); if (const RtpHeaderExtensionMap* extension_map = GetRtpHeaderExtensionMap( rtp_packet.incoming(), rtp_header.Ssrc())) { @@ -1708,8 +1839,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreParsedLegacyEvent( case rtclog::Event::AUDIO_NETWORK_ADAPTATION_EVENT: { auto status_or_value = GetAudioNetworkAdaptation(event); RTC_RETURN_IF_ERROR(status_or_value.status()); - LoggedAudioNetworkAdaptationEvent ana_event = status_or_value.value(); - audio_network_adaptation_events_.push_back(ana_event); + audio_network_adaptation_events_.push_back(status_or_value.value()); break; } case rtclog::Event::BWE_PROBE_CLUSTER_CREATED_EVENT: { @@ -2140,8 +2270,8 @@ ParsedRtcEventLog::GetIceCandidatePairConfig( RTC_PARSE_CHECK_OR_RETURN(config.has_candidate_pair_id()); res.candidate_pair_id = config.candidate_pair_id(); RTC_PARSE_CHECK_OR_RETURN(config.has_local_candidate_type()); - res.local_candidate_type = - GetRuntimeIceCandidateType(config.local_candidate_type()); + RTC_PARSE_CHECK_OR_RETURN(GetRuntimeIceCandidateType( + config.local_candidate_type(), res.local_candidate_type)); RTC_PARSE_CHECK_OR_RETURN(config.has_local_relay_protocol()); res.local_relay_protocol = GetRuntimeIceCandidatePairProtocol(config.local_relay_protocol()); @@ -2152,8 +2282,8 @@ ParsedRtcEventLog::GetIceCandidatePairConfig( res.local_address_family = GetRuntimeIceCandidatePairAddressFamily(config.local_address_family()); RTC_PARSE_CHECK_OR_RETURN(config.has_remote_candidate_type()); - res.remote_candidate_type = - GetRuntimeIceCandidateType(config.remote_candidate_type()); + RTC_PARSE_CHECK_OR_RETURN(GetRuntimeIceCandidateType( + config.remote_candidate_type(), res.remote_candidate_type)); RTC_PARSE_CHECK_OR_RETURN(config.has_remote_address_family()); res.remote_address_family = GetRuntimeIceCandidatePairAddressFamily(config.remote_address_family()); @@ -2240,13 +2370,13 @@ std::vector ParsedRtcEventLog::GetRouteChanges() if (candidate.remote_address_family == IceCandidatePairAddressFamily::kIpv6) route.send_overhead += kIpv6Overhead - kIpv4Overhead; - if (candidate.remote_candidate_type != IceCandidateType::kLocal) + if (candidate.remote_candidate_type != IceCandidateType::kHost) route.send_overhead += kStunOverhead; route.return_overhead = kUdpOverhead + kSrtpOverhead + kIpv4Overhead; if (candidate.remote_address_family == IceCandidatePairAddressFamily::kIpv6) route.return_overhead += kIpv6Overhead - kIpv4Overhead; - if (candidate.remote_candidate_type != IceCandidateType::kLocal) + if (candidate.remote_candidate_type != IceCandidateType::kHost) route.return_overhead += kStunOverhead; route_changes.push_back(route); } @@ -2318,10 +2448,11 @@ std::vector ParsedRtcEventLog::GetPacketInfos( int64_t unwrapped_seq_num = seq_num_unwrapper.Unwrap(logged.transport_seq_no); if (indices.find(unwrapped_seq_num) != indices.end()) { - auto prev = packets[indices[unwrapped_seq_num]]; + Timestamp prev_log_packet_time = + packets[indices[unwrapped_seq_num]].log_packet_time; RTC_LOG(LS_WARNING) << "Repeated sent packet sequence number: " << unwrapped_seq_num - << " Packet time:" << prev.log_packet_time.seconds() << "s vs " + << " Packet time:" << prev_log_packet_time.seconds() << "s vs " << logged.log_packet_time.seconds() << "s at:" << rtp.log_time_ms() / 1000; } @@ -2463,7 +2594,7 @@ std::vector ParsedRtcEventLog::GetIceEvents() const { return log_events; } -const std::vector GetNetworkTrace( +std::vector GetNetworkTrace( const ParsedRtcEventLog& parsed_log) { std::vector rtp_rtcp_matched; for (auto& packet : @@ -2610,14 +2741,14 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreRemoteEstimateEvent( LoggedRemoteEstimateEvent base_event; base_event.timestamp = Timestamp::Millis(proto.timestamp_ms()); - absl::optional base_link_capacity_lower_kbps; + std::optional base_link_capacity_lower_kbps; if (proto.has_link_capacity_lower_kbps()) { base_link_capacity_lower_kbps = proto.link_capacity_lower_kbps(); base_event.link_capacity_lower = DataRate::KilobitsPerSec(proto.link_capacity_lower_kbps()); } - absl::optional base_link_capacity_upper_kbps; + std::optional base_link_capacity_upper_kbps; if (proto.has_link_capacity_upper_kbps()) { base_link_capacity_upper_kbps = proto.link_capacity_upper_kbps(); base_event.link_capacity_upper = @@ -2684,13 +2815,13 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreAudioPlayoutEvent( } // timestamp_ms - std::vector> timestamp_ms_values = + std::vector> timestamp_ms_values = DecodeDeltas(proto.timestamp_ms_deltas(), ToUnsigned(proto.timestamp_ms()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(timestamp_ms_values.size(), number_of_deltas); // local_ssrc - std::vector> local_ssrc_values = DecodeDeltas( + std::vector> local_ssrc_values = DecodeDeltas( proto.local_ssrc_deltas(), proto.local_ssrc(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(local_ssrc_values.size(), number_of_deltas); @@ -2731,18 +2862,18 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreNetEqSetMinimumDelay( } // timestamp_ms - std::vector> timestamp_ms_values = + std::vector> timestamp_ms_values = DecodeDeltas(proto.timestamp_ms_deltas(), ToUnsigned(proto.timestamp_ms()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(timestamp_ms_values.size(), number_of_deltas); // remote_ssrc - std::vector> remote_ssrc_values = DecodeDeltas( + std::vector> remote_ssrc_values = DecodeDeltas( proto.remote_ssrc_deltas(), proto.remote_ssrc(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(remote_ssrc_values.size(), number_of_deltas); // minimum_delay_ms - std::vector> minimum_delay_ms_values = + std::vector> minimum_delay_ms_values = DecodeDeltas(proto.minimum_delay_ms_deltas(), ToUnsigned(proto.minimum_delay_ms()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(minimum_delay_ms_values.size(), @@ -2835,23 +2966,23 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreBweLossBasedUpdate( } // timestamp_ms - std::vector> timestamp_ms_values = + std::vector> timestamp_ms_values = DecodeDeltas(proto.timestamp_ms_deltas(), ToUnsigned(proto.timestamp_ms()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(timestamp_ms_values.size(), number_of_deltas); // bitrate_bps - std::vector> bitrate_bps_values = DecodeDeltas( + std::vector> bitrate_bps_values = DecodeDeltas( proto.bitrate_bps_deltas(), proto.bitrate_bps(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(bitrate_bps_values.size(), number_of_deltas); // fraction_loss - std::vector> fraction_loss_values = DecodeDeltas( + std::vector> fraction_loss_values = DecodeDeltas( proto.fraction_loss_deltas(), proto.fraction_loss(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(fraction_loss_values.size(), number_of_deltas); // total_packets - std::vector> total_packets_values = DecodeDeltas( + std::vector> total_packets_values = DecodeDeltas( proto.total_packets_deltas(), proto.total_packets(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(total_packets_values.size(), number_of_deltas); @@ -2905,18 +3036,18 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreBweDelayBasedUpdate( } // timestamp_ms - std::vector> timestamp_ms_values = + std::vector> timestamp_ms_values = DecodeDeltas(proto.timestamp_ms_deltas(), ToUnsigned(proto.timestamp_ms()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(timestamp_ms_values.size(), number_of_deltas); // bitrate_bps - std::vector> bitrate_bps_values = DecodeDeltas( + std::vector> bitrate_bps_values = DecodeDeltas( proto.bitrate_bps_deltas(), proto.bitrate_bps(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(bitrate_bps_values.size(), number_of_deltas); // detector_state - std::vector> detector_state_values = DecodeDeltas( + std::vector> detector_state_values = DecodeDeltas( proto.detector_state_deltas(), static_cast(proto.detector_state()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(detector_state_values.size(), number_of_deltas); @@ -3028,40 +3159,40 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreFrameDecodedEvents( } // timestamp_ms - std::vector> timestamp_ms_values = + std::vector> timestamp_ms_values = DecodeDeltas(proto.timestamp_ms_deltas(), ToUnsigned(proto.timestamp_ms()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(timestamp_ms_values.size(), number_of_deltas); // SSRC - std::vector> ssrc_values = + std::vector> ssrc_values = DecodeDeltas(proto.ssrc_deltas(), proto.ssrc(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(ssrc_values.size(), number_of_deltas); // render_time_ms - std::vector> render_time_ms_values = + std::vector> render_time_ms_values = DecodeDeltas(proto.render_time_ms_deltas(), ToUnsigned(proto.render_time_ms()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(render_time_ms_values.size(), number_of_deltas); // width - std::vector> width_values = DecodeDeltas( + std::vector> width_values = DecodeDeltas( proto.width_deltas(), ToUnsigned(proto.width()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(width_values.size(), number_of_deltas); // height - std::vector> height_values = DecodeDeltas( + std::vector> height_values = DecodeDeltas( proto.height_deltas(), ToUnsigned(proto.height()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(height_values.size(), number_of_deltas); // codec - std::vector> codec_values = + std::vector> codec_values = DecodeDeltas(proto.codec_deltas(), static_cast(proto.codec()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(codec_values.size(), number_of_deltas); // qp - std::vector> qp_values = + std::vector> qp_values = DecodeDeltas(proto.qp_deltas(), proto.qp(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(qp_values.size(), number_of_deltas); @@ -3112,7 +3243,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreGenericAckReceivedEvent( RTC_PARSE_CHECK_OR_RETURN(proto.has_acked_packet_number()); // receive_acked_packet_time_ms is optional. - absl::optional base_receive_acked_packet_time_ms; + std::optional base_receive_acked_packet_time_ms; if (proto.has_receive_acked_packet_time_ms()) { base_receive_acked_packet_time_ms = proto.receive_acked_packet_time_ms(); } @@ -3127,31 +3258,31 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreGenericAckReceivedEvent( } // timestamp_ms - std::vector> timestamp_ms_values = + std::vector> timestamp_ms_values = DecodeDeltas(proto.timestamp_ms_deltas(), ToUnsigned(proto.timestamp_ms()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(timestamp_ms_values.size(), number_of_deltas); // packet_number - std::vector> packet_number_values = + std::vector> packet_number_values = DecodeDeltas(proto.packet_number_deltas(), ToUnsigned(proto.packet_number()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(packet_number_values.size(), number_of_deltas); // acked_packet_number - std::vector> acked_packet_number_values = + std::vector> acked_packet_number_values = DecodeDeltas(proto.acked_packet_number_deltas(), ToUnsigned(proto.acked_packet_number()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(acked_packet_number_values.size(), number_of_deltas); // optional receive_acked_packet_time_ms - const absl::optional unsigned_receive_acked_packet_time_ms_base = + const std::optional unsigned_receive_acked_packet_time_ms_base = proto.has_receive_acked_packet_time_ms() - ? absl::optional( + ? std::optional( ToUnsigned(proto.receive_acked_packet_time_ms())) - : absl::optional(); - std::vector> receive_acked_packet_time_ms_values = + : std::optional(); + std::vector> receive_acked_packet_time_ms_values = DecodeDeltas(proto.receive_acked_packet_time_ms_deltas(), unsigned_receive_acked_packet_time_ms_base, number_of_deltas); @@ -3168,7 +3299,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreGenericAckReceivedEvent( int64_t acked_packet_number; RTC_PARSE_CHECK_OR_RETURN( ToSigned(acked_packet_number_values[i].value(), &acked_packet_number)); - absl::optional receive_acked_packet_time_ms; + std::optional receive_acked_packet_time_ms; if (receive_acked_packet_time_ms_values[i].has_value()) { int64_t value; @@ -3206,27 +3337,27 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreGenericPacketSentEvent( } // timestamp_ms - std::vector> timestamp_ms_values = + std::vector> timestamp_ms_values = DecodeDeltas(proto.timestamp_ms_deltas(), ToUnsigned(proto.timestamp_ms()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(timestamp_ms_values.size(), number_of_deltas); // packet_number - std::vector> packet_number_values = + std::vector> packet_number_values = DecodeDeltas(proto.packet_number_deltas(), ToUnsigned(proto.packet_number()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(packet_number_values.size(), number_of_deltas); - std::vector> overhead_length_values = + std::vector> overhead_length_values = DecodeDeltas(proto.overhead_length_deltas(), proto.overhead_length(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(overhead_length_values.size(), number_of_deltas); - std::vector> payload_length_values = DecodeDeltas( + std::vector> payload_length_values = DecodeDeltas( proto.payload_length_deltas(), proto.payload_length(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(payload_length_values.size(), number_of_deltas); - std::vector> padding_length_values = DecodeDeltas( + std::vector> padding_length_values = DecodeDeltas( proto.padding_length_deltas(), proto.padding_length(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(padding_length_values.size(), number_of_deltas); @@ -3269,18 +3400,18 @@ ParsedRtcEventLog::StoreGenericPacketReceivedEvent( } // timestamp_ms - std::vector> timestamp_ms_values = + std::vector> timestamp_ms_values = DecodeDeltas(proto.timestamp_ms_deltas(), ToUnsigned(proto.timestamp_ms()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(timestamp_ms_values.size(), number_of_deltas); // packet_number - std::vector> packet_number_values = + std::vector> packet_number_values = DecodeDeltas(proto.packet_number_deltas(), ToUnsigned(proto.packet_number()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(packet_number_values.size(), number_of_deltas); - std::vector> packet_length_values = DecodeDeltas( + std::vector> packet_length_values = DecodeDeltas( proto.packet_length_deltas(), proto.packet_length(), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(packet_length_values.size(), number_of_deltas); @@ -3342,54 +3473,54 @@ ParsedRtcEventLog::StoreAudioNetworkAdaptationEvent( } // timestamp_ms - std::vector> timestamp_ms_values = + std::vector> timestamp_ms_values = DecodeDeltas(proto.timestamp_ms_deltas(), ToUnsigned(proto.timestamp_ms()), number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(timestamp_ms_values.size(), number_of_deltas); // bitrate_bps - const absl::optional unsigned_base_bitrate_bps = + const std::optional unsigned_base_bitrate_bps = proto.has_bitrate_bps() - ? absl::optional(ToUnsigned(proto.bitrate_bps())) - : absl::optional(); - std::vector> bitrate_bps_values = DecodeDeltas( + ? std::optional(ToUnsigned(proto.bitrate_bps())) + : std::optional(); + std::vector> bitrate_bps_values = DecodeDeltas( proto.bitrate_bps_deltas(), unsigned_base_bitrate_bps, number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(bitrate_bps_values.size(), number_of_deltas); // frame_length_ms - const absl::optional unsigned_base_frame_length_ms = + const std::optional unsigned_base_frame_length_ms = proto.has_frame_length_ms() - ? absl::optional(ToUnsigned(proto.frame_length_ms())) - : absl::optional(); - std::vector> frame_length_ms_values = + ? std::optional(ToUnsigned(proto.frame_length_ms())) + : std::optional(); + std::vector> frame_length_ms_values = DecodeDeltas(proto.frame_length_ms_deltas(), unsigned_base_frame_length_ms, number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(frame_length_ms_values.size(), number_of_deltas); // uplink_packet_loss_fraction - const absl::optional uplink_packet_loss_fraction = + const std::optional uplink_packet_loss_fraction = proto.has_uplink_packet_loss_fraction() - ? absl::optional(proto.uplink_packet_loss_fraction()) - : absl::optional(); - std::vector> uplink_packet_loss_fraction_values = + ? std::optional(proto.uplink_packet_loss_fraction()) + : std::optional(); + std::vector> uplink_packet_loss_fraction_values = DecodeDeltas(proto.uplink_packet_loss_fraction_deltas(), uplink_packet_loss_fraction, number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(uplink_packet_loss_fraction_values.size(), number_of_deltas); // enable_fec - const absl::optional enable_fec = - proto.has_enable_fec() ? absl::optional(proto.enable_fec()) - : absl::optional(); - std::vector> enable_fec_values = + const std::optional enable_fec = + proto.has_enable_fec() ? std::optional(proto.enable_fec()) + : std::optional(); + std::vector> enable_fec_values = DecodeDeltas(proto.enable_fec_deltas(), enable_fec, number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(enable_fec_values.size(), number_of_deltas); // enable_dtx - const absl::optional enable_dtx = - proto.has_enable_dtx() ? absl::optional(proto.enable_dtx()) - : absl::optional(); - std::vector> enable_dtx_values = + const std::optional enable_dtx = + proto.has_enable_dtx() ? std::optional(proto.enable_dtx()) + : std::optional(); + std::vector> enable_dtx_values = DecodeDeltas(proto.enable_dtx_deltas(), enable_dtx, number_of_deltas); RTC_PARSE_CHECK_OR_RETURN_EQ(enable_dtx_values.size(), number_of_deltas); @@ -3399,12 +3530,12 @@ ParsedRtcEventLog::StoreAudioNetworkAdaptationEvent( // We likewise shift the base event down by one, to get the same base as // encoding had, but then shift all of the values (except the base) back up // to their original value. - absl::optional shifted_base_num_channels; + std::optional shifted_base_num_channels; if (proto.has_num_channels()) { shifted_base_num_channels = - absl::optional(proto.num_channels() - 1); + std::optional(proto.num_channels() - 1); } - std::vector> num_channels_values = DecodeDeltas( + std::vector> num_channels_values = DecodeDeltas( proto.num_channels_deltas(), shifted_base_num_channels, number_of_deltas); for (size_t i = 0; i < num_channels_values.size(); ++i) { if (num_channels_values[i].has_value()) { @@ -3435,23 +3566,30 @@ ParsedRtcEventLog::StoreAudioNetworkAdaptationEvent( } if (uplink_packet_loss_fraction_values[i].has_value()) { float uplink_packet_loss_fraction2; + RTC_PARSE_CHECK_OR_RETURN(IsValueInRangeForNumericType( + uplink_packet_loss_fraction_values[i].value())); RTC_PARSE_CHECK_OR_RETURN(ParsePacketLossFractionFromProtoFormat( - rtc::checked_cast( - uplink_packet_loss_fraction_values[i].value()), + static_cast(uplink_packet_loss_fraction_values[i].value()), &uplink_packet_loss_fraction2)); runtime_config.uplink_packet_loss_fraction = uplink_packet_loss_fraction2; } if (enable_fec_values[i].has_value()) { + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(enable_fec_values[i].value())); runtime_config.enable_fec = - rtc::checked_cast(enable_fec_values[i].value()); + static_cast(enable_fec_values[i].value()); } if (enable_dtx_values[i].has_value()) { + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(enable_dtx_values[i].value())); runtime_config.enable_dtx = - rtc::checked_cast(enable_dtx_values[i].value()); + static_cast(enable_dtx_values[i].value()); } if (num_channels_values[i].has_value()) { + RTC_PARSE_CHECK_OR_RETURN( + IsValueInRangeForNumericType(num_channels_values[i].value())); runtime_config.num_channels = - rtc::checked_cast(num_channels_values[i].value()); + static_cast(num_channels_values[i].value()); } audio_network_adaptation_events_.emplace_back( Timestamp::Millis(timestamp_ms), runtime_config); @@ -3496,8 +3634,8 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreIceCandidatePairConfig( RTC_PARSE_CHECK_OR_RETURN(proto.has_candidate_pair_id()); ice_config.candidate_pair_id = proto.candidate_pair_id(); RTC_PARSE_CHECK_OR_RETURN(proto.has_local_candidate_type()); - ice_config.local_candidate_type = - GetRuntimeIceCandidateType(proto.local_candidate_type()); + RTC_PARSE_CHECK_OR_RETURN(GetRuntimeIceCandidateType( + proto.local_candidate_type(), ice_config.local_candidate_type)); RTC_PARSE_CHECK_OR_RETURN(proto.has_local_relay_protocol()); ice_config.local_relay_protocol = GetRuntimeIceCandidatePairProtocol(proto.local_relay_protocol()); @@ -3508,8 +3646,8 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreIceCandidatePairConfig( ice_config.local_address_family = GetRuntimeIceCandidatePairAddressFamily(proto.local_address_family()); RTC_PARSE_CHECK_OR_RETURN(proto.has_remote_candidate_type()); - ice_config.remote_candidate_type = - GetRuntimeIceCandidateType(proto.remote_candidate_type()); + RTC_PARSE_CHECK_OR_RETURN(GetRuntimeIceCandidateType( + proto.remote_candidate_type(), ice_config.remote_candidate_type)); RTC_PARSE_CHECK_OR_RETURN(proto.has_remote_address_family()); ice_config.remote_address_family = GetRuntimeIceCandidatePairAddressFamily(proto.remote_address_family()); diff --git a/logging/rtc_event_log/rtc_event_log_parser.h b/logging/rtc_event_log/rtc_event_log_parser.h index 9ad96274d3..492fc10b17 100644 --- a/logging/rtc_event_log/rtc_event_log_parser.h +++ b/logging/rtc_event_log/rtc_event_log_parser.h @@ -10,18 +10,23 @@ #ifndef LOGGING_RTC_EVENT_LOG_RTC_EVENT_LOG_PARSER_H_ #define LOGGING_RTC_EVENT_LOG_RTC_EVENT_LOG_PARSER_H_ +#include +#include #include #include #include #include -#include +#include #include #include "absl/base/attributes.h" #include "absl/strings/string_view.h" -#include "api/rtc_event_log/rtc_event_log.h" -#include "call/video_receive_stream.h" -#include "call/video_send_stream.h" +#include "api/candidate.h" +#include "api/dtls_transport_interface.h" +#include "api/rtp_parameters.h" +#include "api/transport/bandwidth_usage.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "logging/rtc_event_log/events/logged_rtp_rtcp.h" #include "logging/rtc_event_log/events/rtc_event_alr_state.h" #include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" @@ -40,24 +45,20 @@ #include "logging/rtc_event_log/events/rtc_event_generic_packet_sent.h" #include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" #include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" +#include "logging/rtc_event_log/events/rtc_event_log_parse_status.h" #include "logging/rtc_event_log/events/rtc_event_neteq_set_minimum_delay.h" #include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" #include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" #include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" #include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" #include "logging/rtc_event_log/events/rtc_event_route_change.h" -#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h" -#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h" -#include "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h" -#include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" #include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" #include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" +#include "logging/rtc_event_log/rtc_stream_config.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" -#include "rtc_base/ignore_wundef.h" +#include "rtc_base/checks.h" // Files generated at build-time by the protobuf compiler. -RTC_PUSH_IGNORING_WUNDEF() #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/logging/rtc_event_log/rtc_event_log.pb.h" #include "external/webrtc/webrtc/logging/rtc_event_log/rtc_event_log2.pb.h" @@ -65,7 +66,6 @@ RTC_PUSH_IGNORING_WUNDEF() #include "logging/rtc_event_log/rtc_event_log.pb.h" #include "logging/rtc_event_log/rtc_event_log2.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() namespace webrtc { @@ -74,12 +74,15 @@ enum PacketDirection { kIncomingPacket = 0, kOutgoingPacket }; enum class LoggedMediaType : uint8_t { kUnknown, kAudio, kVideo }; struct LoggedPacketInfo { + static LoggedPacketInfo CreateEmptyForTesting() { return LoggedPacketInfo(); } + LoggedPacketInfo(const LoggedRtpPacket& rtp, LoggedMediaType media_type, bool rtx, Timestamp capture_time); LoggedPacketInfo(const LoggedPacketInfo&); ~LoggedPacketInfo(); + int64_t log_time_ms() const { return log_packet_time.ms(); } int64_t log_time_us() const { return log_packet_time.us(); } uint32_t ssrc; @@ -117,6 +120,12 @@ struct LoggedPacketInfo { // time, and this is instead calculated as the difference in reported receive // time between this packet and the last packet in the same feedback message. TimeDelta feedback_hold_duration = TimeDelta::MinusInfinity(); + + private: + LoggedPacketInfo() + : capture_time(Timestamp::MinusInfinity()), + log_packet_time(Timestamp::MinusInfinity()), + reported_send_time(Timestamp::MinusInfinity()) {} }; struct InferredRouteChangeEvent { @@ -374,6 +383,9 @@ class ParsedRtcEventLog { UnconfiguredHeaderExtensions::kDontParse, bool allow_incomplete_log = false); + ParsedRtcEventLog(const ParsedRtcEventLog&) = delete; + ParsedRtcEventLog& operator=(const ParsedRtcEventLog&) = delete; + ~ParsedRtcEventLog(); // Clears previously parsed events and resets the ParsedRtcEventLogNew to an @@ -381,7 +393,7 @@ class ParsedRtcEventLog { void Clear(); // Reads an RtcEventLog file and returns success if parsing was successful. - ParseStatus ParseFile(absl::string_view file_name); + ParseStatus ParseFile(absl::string_view filename); // Reads an RtcEventLog from a string and returns success if successful. ParseStatus ParseString(absl::string_view s); @@ -623,6 +635,15 @@ class ParsedRtcEventLog { } } + const std::vector& congestion_feedback( + PacketDirection direction) const { + if (direction == kIncomingPacket) { + return incoming_congestion_feedback_; + } else { + return outgoing_congestion_feedback_; + } + } + const std::vector& loss_notifications( PacketDirection direction) { if (direction == kIncomingPacket) { @@ -774,7 +795,7 @@ class ParsedRtcEventLog { ParseStatus StoreOutgoingRtcpPackets( const rtclog2::OutgoingRtcpPackets& proto); ParseStatus StoreOutgoingRtpPackets(const rtclog2::OutgoingRtpPackets& proto); - ParseStatus StoreParsedNewFormatEvent(const rtclog2::EventStream& event); + ParseStatus StoreParsedNewFormatEvent(const rtclog2::EventStream& stream); ParseStatus StoreRouteChangeEvent(const rtclog2::RouteChange& proto); ParseStatus StoreRemoteEstimateEvent(const rtclog2::RemoteEstimates& proto); ParseStatus StoreStartEvent(const rtclog2::BeginLogEvent& proto); @@ -856,6 +877,10 @@ class ParsedRtcEventLog { std::vector outgoing_bye_; std::vector incoming_transport_feedback_; std::vector outgoing_transport_feedback_; + std::vector + incoming_congestion_feedback_; + std::vector + outgoing_congestion_feedback_; std::vector incoming_loss_notification_; std::vector outgoing_loss_notification_; @@ -933,7 +958,8 @@ struct MatchedSendArrivalTimes { int64_t arrival_time_ms; // kNotReceived for lost packets. int64_t payload_size; }; -const std::vector GetNetworkTrace( + +std::vector GetNetworkTrace( const ParsedRtcEventLog& parsed_log); } // namespace webrtc diff --git a/logging/rtc_event_log/rtc_event_log_unittest.cc b/logging/rtc_event_log/rtc_event_log_unittest.cc index 3730a080dd..a181a85891 100644 --- a/logging/rtc_event_log/rtc_event_log_unittest.cc +++ b/logging/rtc_event_log/rtc_event_log_unittest.cc @@ -11,6 +11,8 @@ #include "api/rtc_event_log/rtc_event_log.h" #include +#include +#include #include #include #include @@ -19,8 +21,12 @@ #include #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/rtc_event_log/rtc_event_log_factory.h" -#include "api/task_queue/default_task_queue_factory.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_alr_state.h" #include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" #include "logging/rtc_event_log/events/rtc_event_audio_playout.h" #include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" @@ -29,12 +35,17 @@ #include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" #include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" #include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" +#include "logging/rtc_event_log/events/rtc_event_frame_decoded.h" #include "logging/rtc_event_log/events/rtc_event_generic_ack_received.h" #include "logging/rtc_event_log/events/rtc_event_generic_packet_received.h" #include "logging/rtc_event_log/events/rtc_event_generic_packet_sent.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" #include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" #include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" #include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" +#include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" +#include "logging/rtc_event_log/events/rtc_event_route_change.h" #include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h" #include "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h" #include "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h" @@ -43,14 +54,16 @@ #include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" #include "logging/rtc_event_log/rtc_event_log_parser.h" #include "logging/rtc_event_log/rtc_event_log_unittest_helper.h" -#include "logging/rtc_event_log/rtc_stream_config.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" -#include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "rtc_base/checks.h" #include "rtc_base/fake_clock.h" #include "rtc_base/random.h" +#include "rtc_base/time_utils.h" +#include "test/explicit_key_value_config.h" #include "test/gtest.h" +#include "test/logging/log_writer.h" #include "test/logging/memory_log_writer.h" #include "test/testsupport/file_utils.h" @@ -58,6 +71,8 @@ namespace webrtc { namespace { +using test::ExplicitKeyValueConfig; + struct EventCounts { size_t audio_send_streams = 0; size_t audio_recv_streams = 0; @@ -105,6 +120,21 @@ struct EventCounts { } }; +std::unique_ptr CreateFieldTrialsFor( + RtcEventLog::EncodingType encoding_type) { + switch (encoding_type) { + case RtcEventLog::EncodingType::Legacy: + return std::make_unique( + "WebRTC-RtcEventLogNewFormat/Disabled/"); + case RtcEventLog::EncodingType::NewFormat: + return std::make_unique( + "WebRTC-RtcEventLogNewFormat/Enabled/"); + case RtcEventLog::EncodingType::ProtoFree: + RTC_CHECK(false); + return nullptr; + } +} + class RtcEventLogSession : public ::testing::TestWithParam< std::tuple> { @@ -128,10 +158,10 @@ class RtcEventLogSession temp_filename_ = test::OutputPath() + test_name; } - // Create and buffer the config events and `num_events_before_log_start` + // Create and buffer the config events and `num_events_before_start` // randomized non-config events. Then call StartLogging and finally create and // write the remaining non-config events. - void WriteLog(EventCounts count, size_t num_events_before_log_start); + void WriteLog(EventCounts count, size_t num_events_before_start); void ReadAndVerifyLog(); bool IsNewFormat() { @@ -204,7 +234,7 @@ class RtcEventLogSession const RtcEventLog::EncodingType encoding_type_; test::EventGenerator gen_; test::EventVerifier verifier_; - rtc::ScopedFakeClock clock_; + ScopedFakeClock clock_; std::string temp_filename_; MemoryLogStorage log_storage_; std::unique_ptr log_output_factory_; @@ -313,9 +343,9 @@ void RtcEventLogSession::WriteVideoSendConfigs(size_t video_send_streams, clock_.AdvanceTime(TimeDelta::Millis(prng_.Rand(20))); uint32_t ssrc = prng_.Rand(); outgoing_extensions_.emplace_back(ssrc, all_extensions); - auto event = gen_.NewVideoSendStreamConfig(ssrc, all_extensions); - event_log->Log(event->Copy()); - video_send_config_list_.push_back(std::move(event)); + auto first_event = gen_.NewVideoSendStreamConfig(ssrc, all_extensions); + event_log->Log(first_event->Copy()); + video_send_config_list_.push_back(std::move(first_event)); for (size_t i = 1; i < video_send_streams; i++) { clock_.AdvanceTime(TimeDelta::Millis(prng_.Rand(20))); do { @@ -336,14 +366,13 @@ void RtcEventLogSession::WriteVideoSendConfigs(size_t video_send_streams, void RtcEventLogSession::WriteLog(EventCounts count, size_t num_events_before_start) { - // TODO(terelius): Allow test to run with either a real or a fake clock_. - // Maybe always use the ScopedFakeClock, but conditionally SleepMs()? + // TODO(terelius): Allow test to run with either a real or a fake clock_ + // e.g. by using clock and task_queue_factory from TimeController + // when RtcEventLogImpl switches to use injected clock from the environment. - auto task_queue_factory = CreateDefaultTaskQueueFactory(); - RtcEventLogFactory rtc_event_log_factory(task_queue_factory.get()); // The log will be flushed to output when the event_log goes out of scope. - std::unique_ptr event_log = - rtc_event_log_factory.CreateRtcEventLog(encoding_type_); + std::unique_ptr event_log = RtcEventLogFactory().Create( + CreateEnvironment(CreateFieldTrialsFor(encoding_type_))); // We can't send or receive packets without configured streams. RTC_CHECK_GE(count.video_recv_streams, 1); @@ -362,14 +391,14 @@ void RtcEventLogSession::WriteLog(EventCounts count, clock_.AdvanceTime(TimeDelta::Millis(prng_.Rand(20))); event_log->StartLogging(log_output_factory_->Create(temp_filename_), output_period_ms_); - start_time_us_ = rtc::TimeMicros(); - utc_start_time_us_ = rtc::TimeUTCMicros(); + start_time_us_ = TimeMicros(); + utc_start_time_us_ = TimeUTCMicros(); } clock_.AdvanceTime(TimeDelta::Millis(prng_.Rand(20))); size_t selection = prng_.Rand(remaining_events - 1); - first_timestamp_ms_ = std::min(first_timestamp_ms_, rtc::TimeMillis()); - last_timestamp_ms_ = std::max(last_timestamp_ms_, rtc::TimeMillis()); + first_timestamp_ms_ = std::min(first_timestamp_ms_, TimeMillis()); + last_timestamp_ms_ = std::max(last_timestamp_ms_, TimeMillis()); if (selection < count.alr_states) { auto event = gen_.NewAlrState(); @@ -576,7 +605,7 @@ void RtcEventLogSession::WriteLog(EventCounts count, } event_log->StopLogging(); - stop_time_us_ = rtc::TimeMicros(); + stop_time_us_ = TimeMicros(); ASSERT_EQ(count.total_nonconfig_events(), static_cast(0)); } @@ -923,8 +952,8 @@ TEST_P(RtcEventLogCircularBufferTest, KeepsMostRecentEvents) { std::replace(test_name.begin(), test_name.end(), '/', '_'); const std::string temp_filename = test::OutputPath() + test_name; - std::unique_ptr fake_clock = - std::make_unique(); + std::unique_ptr fake_clock = + std::make_unique(); fake_clock->SetTime(Timestamp::Seconds(kStartTimeSeconds)); // Create a scope for the TQ and event log factories. @@ -934,12 +963,9 @@ TEST_P(RtcEventLogCircularBufferTest, KeepsMostRecentEvents) { int64_t start_time_us, utc_start_time_us, stop_time_us; { - auto task_queue_factory = CreateDefaultTaskQueueFactory(); - RtcEventLogFactory rtc_event_log_factory(task_queue_factory.get()); - // When `log` goes out of scope, the contents are flushed - // to the output. - std::unique_ptr log = - rtc_event_log_factory.CreateRtcEventLog(encoding_type_); + // When `log` goes out of scope, the contents are flushed to the output. + std::unique_ptr log = RtcEventLogFactory().Create( + CreateEnvironment(CreateFieldTrialsFor(encoding_type_))); for (size_t i = 0; i < kNumEvents; i++) { // The purpose of the test is to verify that the log can handle @@ -952,12 +978,12 @@ TEST_P(RtcEventLogCircularBufferTest, KeepsMostRecentEvents) { i, kStartBitrate + i * 1000)); fake_clock->AdvanceTime(TimeDelta::Millis(10)); } - start_time_us = rtc::TimeMicros(); - utc_start_time_us = rtc::TimeUTCMicros(); + start_time_us = TimeMicros(); + utc_start_time_us = TimeUTCMicros(); log->StartLogging(log_output_factory_->Create(temp_filename), RtcEventLog::kImmediateOutput); fake_clock->AdvanceTime(TimeDelta::Millis(10)); - stop_time_us = rtc::TimeMicros(); + stop_time_us = TimeMicros(); log->StopLogging(); } @@ -991,7 +1017,7 @@ TEST_P(RtcEventLogCircularBufferTest, KeepsMostRecentEvents) { // recreate the clock. However we must ensure that the old fake_clock is // destroyed before the new one is created, so we have to reset() first. fake_clock.reset(); - fake_clock = std::make_unique(); + fake_clock = std::make_unique(); fake_clock->SetTime(Timestamp::Millis(first_timestamp_ms)); for (size_t i = 1; i < probe_success_events.size(); i++) { fake_clock->AdvanceTime(TimeDelta::Millis(10)); diff --git a/logging/rtc_event_log/rtc_event_log_unittest_helper.cc b/logging/rtc_event_log/rtc_event_log_unittest_helper.cc index b378e8ff48..0f5b265182 100644 --- a/logging/rtc_event_log/rtc_event_log_unittest_helper.cc +++ b/logging/rtc_event_log/rtc_event_log_unittest_helper.cc @@ -17,23 +17,70 @@ #include #include #include -#include +#include #include #include +#include "absl/algorithm/container.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" -#include "api/network_state_predictor.h" +#include "api/candidate.h" +#include "api/dtls_transport_interface.h" +#include "api/rtc_event_log/rtc_event_log.h" #include "api/rtp_headers.h" #include "api/rtp_parameters.h" +#include "api/transport/bandwidth_usage.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "api/video/video_codec_type.h" +#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" +#include "logging/rtc_event_log/events/rtc_event_alr_state.h" +#include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" +#include "logging/rtc_event_log/events/rtc_event_audio_playout.h" +#include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_begin_log.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" +#include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" +#include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" +#include "logging/rtc_event_log/events/rtc_event_end_log.h" +#include "logging/rtc_event_log/events/rtc_event_frame_decoded.h" +#include "logging/rtc_event_log/events/rtc_event_generic_ack_received.h" +#include "logging/rtc_event_log/events/rtc_event_generic_packet_received.h" +#include "logging/rtc_event_log/events/rtc_event_generic_packet_sent.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" +#include "logging/rtc_event_log/events/rtc_event_neteq_set_minimum_delay.h" +#include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" +#include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" +#include "logging/rtc_event_log/events/rtc_event_route_change.h" +#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h" +#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h" +#include "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h" +#include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" +#include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" +#include "logging/rtc_event_log/rtc_stream_config.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" #include "modules/rtp_rtcp/include/rtp_cvo.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" +#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" +#include "modules/rtp_rtcp/source/rtcp_packet/fir.h" +#include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" +#include "modules/rtp_rtcp/source/rtcp_packet/nack.h" +#include "modules/rtp_rtcp/source/rtcp_packet/pli.h" +#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/remb.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "modules/rtp_rtcp/source/rtcp_packet/rrtr.h" +#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" #include "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" @@ -41,9 +88,10 @@ #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "rtc_base/buffer.h" #include "rtc_base/checks.h" +#include "rtc_base/random.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/ntp_time.h" +#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { @@ -52,6 +100,9 @@ namespace test { namespace { +using ::testing::ElementsAreArray; +using ::testing::IsEmpty; + struct ExtensionPair { RTPExtensionType type; const char* name; @@ -77,7 +128,7 @@ constexpr ExtensionPair kExtensions[kMaxNumExtensions] = { RtpExtension::kDependencyDescriptorUri}}; template -void ShuffleInPlace(Random* prng, rtc::ArrayView array) { +void ShuffleInPlace(Random* prng, ArrayView array) { RTC_DCHECK_LE(array.size(), std::numeric_limits::max()); for (uint32_t i = 0; i + 1 < array.size(); i++) { uint32_t other = prng->Rand(i, static_cast(array.size() - 1)); @@ -85,13 +136,13 @@ void ShuffleInPlace(Random* prng, rtc::ArrayView array) { } } -absl::optional GetExtensionId(const std::vector& extensions, - absl::string_view uri) { +std::optional GetExtensionId(const std::vector& extensions, + absl::string_view uri) { for (const auto& extension : extensions) { if (extension.uri == uri) return extension.id; } - return absl::nullopt; + return std::nullopt; } } // namespace @@ -169,13 +220,13 @@ std::unique_ptr EventGenerator::NewFrameDecodedEvent( constexpr int kMaxHeight = 8640; constexpr int kMinWidth = 16; constexpr int kMinHeight = 16; - constexpr int kNumCodecTypes = 5; + constexpr int kNumCodecTypes = 6; constexpr VideoCodecType kCodecList[kNumCodecTypes] = { - kVideoCodecGeneric, kVideoCodecVP8, kVideoCodecVP9, kVideoCodecAV1, - kVideoCodecH264}; + kVideoCodecGeneric, kVideoCodecVP8, kVideoCodecVP9, + kVideoCodecAV1, kVideoCodecH264, kVideoCodecH265}; const int64_t render_time_ms = - rtc::TimeMillis() + prng_.Rand(kMinRenderDelayMs, kMaxRenderDelayMs); + TimeMillis() + prng_.Rand(kMinRenderDelayMs, kMaxRenderDelayMs); const int width = prng_.Rand(kMinWidth, kMaxWidth); const int height = prng_.Rand(kMinHeight, kMaxHeight); const VideoCodecType codec = kCodecList[prng_.Rand(0, kNumCodecTypes - 1)]; @@ -217,10 +268,27 @@ EventGenerator::NewProbeResultSuccess() { return std::make_unique(id, bitrate_bps); } +constexpr uint32_t CandidateTypeCount() { + // This switch statement only exists to catch changes to the IceCandidateType + // enumeration. If you get an error here, please update the switch statement + // and the return value. + IceCandidateType type = IceCandidateType::kHost; + switch (type) { + case IceCandidateType::kHost: + case IceCandidateType::kSrflx: + case IceCandidateType::kPrflx: + case IceCandidateType::kRelay: + break; + } + return 4u; +} + std::unique_ptr EventGenerator::NewIceCandidatePairConfig() { - IceCandidateType local_candidate_type = static_cast( - prng_.Rand(static_cast(IceCandidateType::kNumValues) - 1)); + static_assert(static_cast(IceCandidateType::kHost) == 0, + "Expect kLocal to be the first enum value, equal to 0"); + IceCandidateType local_candidate_type = + static_cast(prng_.Rand(CandidateTypeCount() - 1)); IceCandidateNetworkType local_network_type = static_cast(prng_.Rand( static_cast(IceCandidateNetworkType::kNumValues) - 1)); @@ -228,8 +296,8 @@ EventGenerator::NewIceCandidatePairConfig() { static_cast(prng_.Rand( static_cast(IceCandidatePairAddressFamily::kNumValues) - 1)); - IceCandidateType remote_candidate_type = static_cast( - prng_.Rand(static_cast(IceCandidateType::kNumValues) - 1)); + IceCandidateType remote_candidate_type = + static_cast(prng_.Rand(CandidateTypeCount() - 1)); IceCandidatePairAddressFamily remote_address_family = static_cast(prng_.Rand( static_cast(IceCandidatePairAddressFamily::kNumValues) - @@ -238,12 +306,10 @@ EventGenerator::NewIceCandidatePairConfig() { static_cast(prng_.Rand( static_cast(IceCandidatePairProtocol::kNumValues) - 1)); - IceCandidatePairDescription desc; - desc.local_candidate_type = local_candidate_type; + IceCandidatePairDescription desc(local_candidate_type, remote_candidate_type); desc.local_relay_protocol = protocol_type; desc.local_network_type = local_network_type; desc.local_address_family = local_address_family; - desc.remote_candidate_type = remote_candidate_type; desc.remote_address_family = remote_address_family; desc.candidate_pair_protocol = protocol_type; @@ -431,52 +497,52 @@ EventGenerator::NewRtcpPacketIncoming() { switch (type) { case SupportedRtcpTypes::kSenderReport: { rtcp::SenderReport sender_report = NewSenderReport(); - rtc::Buffer buffer = sender_report.Build(); + Buffer buffer = sender_report.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kReceiverReport: { rtcp::ReceiverReport receiver_report = NewReceiverReport(); - rtc::Buffer buffer = receiver_report.Build(); + Buffer buffer = receiver_report.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kExtendedReports: { rtcp::ExtendedReports extended_report = NewExtendedReports(); - rtc::Buffer buffer = extended_report.Build(); + Buffer buffer = extended_report.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kFir: { rtcp::Fir fir = NewFir(); - rtc::Buffer buffer = fir.Build(); + Buffer buffer = fir.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kPli: { rtcp::Pli pli = NewPli(); - rtc::Buffer buffer = pli.Build(); + Buffer buffer = pli.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kNack: { rtcp::Nack nack = NewNack(); - rtc::Buffer buffer = nack.Build(); + Buffer buffer = nack.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kRemb: { rtcp::Remb remb = NewRemb(); - rtc::Buffer buffer = remb.Build(); + Buffer buffer = remb.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kBye: { rtcp::Bye bye = NewBye(); - rtc::Buffer buffer = bye.Build(); + Buffer buffer = bye.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kTransportFeedback: { rtcp::TransportFeedback transport_feedback = NewTransportFeedback(); - rtc::Buffer buffer = transport_feedback.Build(); + Buffer buffer = transport_feedback.Build(); return std::make_unique(buffer); } default: RTC_DCHECK_NOTREACHED(); - rtc::Buffer buffer; + Buffer buffer; return std::make_unique(buffer); } } @@ -500,52 +566,52 @@ EventGenerator::NewRtcpPacketOutgoing() { switch (type) { case SupportedRtcpTypes::kSenderReport: { rtcp::SenderReport sender_report = NewSenderReport(); - rtc::Buffer buffer = sender_report.Build(); + Buffer buffer = sender_report.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kReceiverReport: { rtcp::ReceiverReport receiver_report = NewReceiverReport(); - rtc::Buffer buffer = receiver_report.Build(); + Buffer buffer = receiver_report.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kExtendedReports: { rtcp::ExtendedReports extended_report = NewExtendedReports(); - rtc::Buffer buffer = extended_report.Build(); + Buffer buffer = extended_report.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kFir: { rtcp::Fir fir = NewFir(); - rtc::Buffer buffer = fir.Build(); + Buffer buffer = fir.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kPli: { rtcp::Pli pli = NewPli(); - rtc::Buffer buffer = pli.Build(); + Buffer buffer = pli.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kNack: { rtcp::Nack nack = NewNack(); - rtc::Buffer buffer = nack.Build(); + Buffer buffer = nack.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kRemb: { rtcp::Remb remb = NewRemb(); - rtc::Buffer buffer = remb.Build(); + Buffer buffer = remb.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kBye: { rtcp::Bye bye = NewBye(); - rtc::Buffer buffer = bye.Build(); + Buffer buffer = bye.Build(); return std::make_unique(buffer); } case SupportedRtcpTypes::kTransportFeedback: { rtcp::TransportFeedback transport_feedback = NewTransportFeedback(); - rtc::Buffer buffer = transport_feedback.Build(); + Buffer buffer = transport_feedback.Build(); return std::make_unique(buffer); } default: RTC_DCHECK_NOTREACHED(); - rtc::Buffer buffer; + Buffer buffer; return std::make_unique(buffer); } } @@ -563,7 +629,7 @@ EventGenerator::NewGenericPacketReceived() { } std::unique_ptr EventGenerator::NewGenericAckReceived() { - absl::optional receive_timestamp = absl::nullopt; + std::optional receive_timestamp = std::nullopt; if (prng_.Rand(0, 2) > 0) { receive_timestamp = prng_.Rand(0, 100000); } @@ -598,9 +664,10 @@ void EventGenerator::RandomizeRtpPacket( rtp_packet->SetExtension(prng_.Rand(0x00ffffff)); } - if (extension_map.IsRegistered(AudioLevel::kId) && + if (extension_map.IsRegistered(AudioLevelExtension::kId) && (all_configured_exts || prng_.Rand())) { - rtp_packet->SetExtension(prng_.Rand(), prng_.Rand(127)); + rtp_packet->SetExtension( + AudioLevel(prng_.Rand(), prng_.Rand(127))); } if (extension_map.IsRegistered(AbsoluteSendTime::kId) && @@ -704,14 +771,15 @@ RtpHeaderExtensionMap EventGenerator::NewRtpHeaderExtensionMap( std::vector id(RtpExtension::kOneByteHeaderExtensionMaxId - RtpExtension::kMinId + 1); std::iota(id.begin(), id.end(), RtpExtension::kMinId); - ShuffleInPlace(&prng_, rtc::ArrayView(id)); + ShuffleInPlace(&prng_, ArrayView(id)); auto not_excluded = [&](RTPExtensionType type) -> bool { return !absl::c_linear_search(excluded_extensions, type); }; - if (not_excluded(AudioLevel::kId) && (configure_all || prng_.Rand())) { - extension_map.Register(id[0]); + if (not_excluded(AudioLevelExtension::kId) && + (configure_all || prng_.Rand())) { + extension_map.Register(id[0]); } if (not_excluded(TransmissionOffset::kId) && (configure_all || prng_.Rand())) { @@ -1008,15 +1076,16 @@ void VerifyLoggedRtpHeader(const Event& original_header, } // AudioLevel header extension. - ASSERT_EQ(original_header.template HasExtension(), - logged_header.extension.hasAudioLevel); - if (logged_header.extension.hasAudioLevel) { - bool voice_activity; - uint8_t audio_level; - ASSERT_TRUE(original_header.template GetExtension( - &voice_activity, &audio_level)); - EXPECT_EQ(voice_activity, logged_header.extension.voiceActivity); - EXPECT_EQ(audio_level, logged_header.extension.audioLevel); + ASSERT_EQ(original_header.template HasExtension(), + logged_header.extension.audio_level().has_value()); + if (logged_header.extension.audio_level()) { + AudioLevel audio_level; + ASSERT_TRUE(original_header.template GetExtension( + &audio_level)); + EXPECT_EQ(audio_level.voice_activity(), + logged_header.extension.audio_level()->voice_activity()); + EXPECT_EQ(audio_level.level(), + logged_header.extension.audio_level()->level()); } // VideoOrientation header extension. @@ -1032,23 +1101,15 @@ void VerifyLoggedRtpHeader(const Event& original_header, } template -void VerifyLoggedDependencyDescriptor(const Event& packet, - const std::vector& logged_dd) { - if (webrtc::field_trial::IsDisabled( - "WebRTC-RtcEventLogEncodeDependencyDescriptor")) { - EXPECT_TRUE(logged_dd.empty()); - } else { - rtc::ArrayView original = +void EventVerifier::VerifyLoggedDependencyDescriptor( + const Event& packet, + const std::vector& logged_dd) const { + if (expect_dependency_descriptor_rtp_header_extension_is_set_) { + ArrayView original = packet.template GetRawExtension(); - EXPECT_EQ(logged_dd.size(), original.size()); - bool dd_is_same = true; - for (size_t i = 0; i < logged_dd.size(); ++i) { - dd_is_same = logged_dd[i] == original[i]; - if (!dd_is_same) { - break; - } - } - EXPECT_TRUE(dd_is_same); + EXPECT_THAT(logged_dd, ElementsAreArray(original)); + } else { + EXPECT_THAT(logged_dd, IsEmpty()); } } diff --git a/logging/rtc_event_log/rtc_event_log_unittest_helper.h b/logging/rtc_event_log/rtc_event_log_unittest_helper.h index 950a622f8b..0bcffdb489 100644 --- a/logging/rtc_event_log/rtc_event_log_unittest_helper.h +++ b/logging/rtc_event_log/rtc_event_log_unittest_helper.h @@ -17,21 +17,26 @@ #include #include +#include "api/rtc_event_log/rtc_event_log.h" +#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" #include "logging/rtc_event_log/events/rtc_event_alr_state.h" #include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" #include "logging/rtc_event_log/events/rtc_event_audio_playout.h" #include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" #include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_begin_log.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" #include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" #include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" +#include "logging/rtc_event_log/events/rtc_event_end_log.h" #include "logging/rtc_event_log/events/rtc_event_frame_decoded.h" #include "logging/rtc_event_log/events/rtc_event_generic_ack_received.h" #include "logging/rtc_event_log/events/rtc_event_generic_packet_received.h" #include "logging/rtc_event_log/events/rtc_event_generic_packet_sent.h" #include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" #include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" +#include "logging/rtc_event_log/events/rtc_event_neteq_set_minimum_delay.h" #include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" #include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" #include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" @@ -43,9 +48,8 @@ #include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" #include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" #include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" -#include "logging/rtc_event_log/rtc_event_log_parser.h" -#include "logging/rtc_event_log/rtc_stream_config.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" #include "modules/rtp_rtcp/source/rtcp_packet/fir.h" @@ -56,6 +60,7 @@ #include "modules/rtp_rtcp/source/rtcp_packet/remb.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "rtc_base/random.h" @@ -160,6 +165,10 @@ class EventVerifier { explicit EventVerifier(RtcEventLog::EncodingType encoding_type) : encoding_type_(encoding_type) {} + void ExpectDependencyDescriptorExtensionIsSet(bool value) { + expect_dependency_descriptor_rtp_header_extension_is_set_ = value; + } + void VerifyLoggedAlrStateEvent(const RtcEventAlrState& original_event, const LoggedAlrStateEvent& logged_event) const; @@ -239,8 +248,8 @@ class EventVerifier { const LoggedGenericAckReceived& logged_event) const; template - void VerifyLoggedRtpPacket(const EventType& original_event, - const ParsedType& logged_event) { + void VerifyLoggedRtpPacket(const EventType& /* original_event */, + const ParsedType& /* logged_event */) { static_assert(sizeof(ParsedType) == 0, "You have to use one of the two defined template " "specializations of VerifyLoggedRtpPacket"); @@ -331,7 +340,13 @@ class EventVerifier { void VerifyReportBlock(const rtcp::ReportBlock& original_report_block, const rtcp::ReportBlock& logged_report_block); + template + void VerifyLoggedDependencyDescriptor( + const Event& packet, + const std::vector& logged_dd) const; + RtcEventLog::EncodingType encoding_type_; + bool expect_dependency_descriptor_rtp_header_extension_is_set_ = true; }; } // namespace test diff --git a/logging/rtc_event_log/rtc_event_processor.cc b/logging/rtc_event_log/rtc_event_processor.cc index e1024b1b22..07671cc422 100644 --- a/logging/rtc_event_log/rtc_event_processor.cc +++ b/logging/rtc_event_log/rtc_event_processor.cc @@ -9,6 +9,10 @@ */ #include "logging/rtc_event_log/rtc_event_processor.h" +#include +#include +#include + #include "rtc_base/numerics/sequence_number_util.h" namespace webrtc { @@ -43,8 +47,8 @@ bool RtcEventProcessor::Cmp(const RtcEventProcessor::ListPtrType& a, if (a->GetTypeOrder() != b->GetTypeOrder()) return a->GetTypeOrder() > b->GetTypeOrder(); - absl::optional wrapped_seq_num_a = a->GetTransportSeqNum(); - absl::optional wrapped_seq_num_b = b->GetTransportSeqNum(); + std::optional wrapped_seq_num_a = a->GetTransportSeqNum(); + std::optional wrapped_seq_num_b = b->GetTransportSeqNum(); if (wrapped_seq_num_a && wrapped_seq_num_b) { return AheadOf(*wrapped_seq_num_a, *wrapped_seq_num_b); } else if (wrapped_seq_num_a.has_value() != wrapped_seq_num_b.has_value()) { diff --git a/logging/rtc_event_log/rtc_event_processor.h b/logging/rtc_event_log/rtc_event_processor.h index a46f390862..846fbb842c 100644 --- a/logging/rtc_event_log/rtc_event_processor.h +++ b/logging/rtc_event_log/rtc_event_processor.h @@ -13,12 +13,13 @@ #include -#include +#include #include -#include +#include +#include #include -#include "api/function_view.h" +#include "logging/rtc_event_log/rtc_event_log_parser.h" #include "logging/rtc_event_log/rtc_event_processor_order.h" #include "rtc_base/checks.h" @@ -41,7 +42,7 @@ class ProcessableEventListInterface { virtual bool IsEmpty() const = 0; virtual int64_t GetNextTime() const = 0; virtual int GetTypeOrder() const = 0; - virtual absl::optional GetTransportSeqNum() const = 0; + virtual std::optional GetTransportSeqNum() const = 0; virtual int GetInsertionOrder() const = 0; }; @@ -54,7 +55,7 @@ class ProcessableEventList : public ProcessableEventListInterface { Iterator end, std::function f, int type_order, - std::function(const T&)> + std::function(const T&)> transport_seq_num_accessor, int insertion_order) : begin_(begin), @@ -79,7 +80,7 @@ class ProcessableEventList : public ProcessableEventListInterface { int GetTypeOrder() const override { return type_order_; } - absl::optional GetTransportSeqNum() const override { + std::optional GetTransportSeqNum() const override { RTC_DCHECK(!IsEmpty()); return transport_seq_num_accessor_(*begin_); } @@ -91,7 +92,7 @@ class ProcessableEventList : public ProcessableEventListInterface { Iterator end_; std::function f_; int type_order_; - std::function(const T&)> transport_seq_num_accessor_; + std::function(const T&)> transport_seq_num_accessor_; int insertion_order_; }; @@ -151,7 +152,7 @@ class RtcEventProcessor { const Iterable& iterable, std::function handler, int type_order, - std::function( + std::function( const typename Iterable::value_type&)> transport_seq_num_accessor, int insertion_order) { if (iterable.begin() == iterable.end()) diff --git a/logging/rtc_event_log/rtc_event_processor_order.h b/logging/rtc_event_log/rtc_event_processor_order.h index 47ef346d5f..7c2c7e8ea5 100644 --- a/logging/rtc_event_log/rtc_event_processor_order.h +++ b/logging/rtc_event_log/rtc_event_processor_order.h @@ -13,9 +13,34 @@ #include -#include "absl/types/optional.h" -#include "api/function_view.h" +#include + #include "logging/rtc_event_log/events/logged_rtp_rtcp.h" +#include "logging/rtc_event_log/events/rtc_event_alr_state.h" +#include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" +#include "logging/rtc_event_log/events/rtc_event_audio_playout.h" +#include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_begin_log.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" +#include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" +#include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" +#include "logging/rtc_event_log/events/rtc_event_end_log.h" +#include "logging/rtc_event_log/events/rtc_event_frame_decoded.h" +#include "logging/rtc_event_log/events/rtc_event_generic_ack_received.h" +#include "logging/rtc_event_log/events/rtc_event_generic_packet_received.h" +#include "logging/rtc_event_log/events/rtc_event_generic_packet_sent.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" +#include "logging/rtc_event_log/events/rtc_event_neteq_set_minimum_delay.h" +#include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" +#include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" +#include "logging/rtc_event_log/events/rtc_event_route_change.h" +#include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" +#include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" #include "logging/rtc_event_log/rtc_event_log_parser.h" namespace webrtc { @@ -77,9 +102,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::Start); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedStartEvent&) { - return absl::optional(); + return std::optional(); } }; @@ -87,9 +112,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::Stop); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedStopEvent&) { - return absl::optional(); + return std::optional(); } }; @@ -97,9 +122,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::StreamConfig); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedAudioRecvConfig&) { - return absl::optional(); + return std::optional(); } }; @@ -107,9 +132,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::StreamConfig); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedAudioSendConfig&) { - return absl::optional(); + return std::optional(); } }; @@ -117,9 +142,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::StreamConfig); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedVideoRecvConfig&) { - return absl::optional(); + return std::optional(); } }; @@ -127,9 +152,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::StreamConfig); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedVideoSendConfig&) { - return absl::optional(); + return std::optional(); } }; @@ -138,9 +163,9 @@ class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::IceCondidateConfig); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedIceCandidatePairConfig&) { - return absl::optional(); + return std::optional(); } }; @@ -149,9 +174,9 @@ class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::IceCandidateEvent); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedIceCandidatePairEvent&) { - return absl::optional(); + return std::optional(); } }; @@ -160,9 +185,9 @@ class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::DtlsTransportState); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedDtlsTransportState&) { - return absl::optional(); + return std::optional(); } }; @@ -170,9 +195,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::DtlsWritable); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedDtlsWritableState&) { - return absl::optional(); + return std::optional(); } }; @@ -180,9 +205,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::RouteChange); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedRouteChangeEvent&) { - return absl::optional(); + return std::optional(); } }; @@ -191,9 +216,9 @@ class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::BweRemoteEstimate); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedRemoteEstimateEvent&) { - return absl::optional(); + return std::optional(); } }; @@ -202,9 +227,9 @@ class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::BweProbeFailure); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedBweProbeFailureEvent&) { - return absl::optional(); + return std::optional(); } }; @@ -213,9 +238,9 @@ class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::BweProbeSuccess); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedBweProbeSuccessEvent&) { - return absl::optional(); + return std::optional(); } }; @@ -223,9 +248,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::BweDelayBased); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedBweDelayBasedUpdate&) { - return absl::optional(); + return std::optional(); } }; @@ -233,9 +258,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::BweLossBased); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedBweLossBasedUpdate&) { - return absl::optional(); + return std::optional(); } }; @@ -244,9 +269,9 @@ class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::BweProbeCreated); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedBweProbeClusterCreatedEvent&) { - return absl::optional(); + return std::optional(); } }; @@ -255,9 +280,9 @@ class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::AudioNetworkAdaptation); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedAudioNetworkAdaptationEvent&) { - return absl::optional(); + return std::optional(); } }; @@ -266,9 +291,9 @@ class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::NetEqSetMinDelay); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedNetEqSetMinimumDelayEvent&) { - return absl::optional(); + return std::optional(); } }; @@ -276,9 +301,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::AudioPlayout); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedAudioPlayoutEvent&) { - return absl::optional(); + return std::optional(); } }; @@ -286,9 +311,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::FrameDecoded); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedFrameDecoded&) { - return absl::optional(); + return std::optional(); } }; @@ -297,9 +322,9 @@ class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::GenericPacketIn); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedGenericPacketReceived&) { - return absl::optional(); + return std::optional(); } }; @@ -307,9 +332,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::GenericAckIn); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedGenericAckReceived&) { - return absl::optional(); + return std::optional(); } }; @@ -318,9 +343,9 @@ class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::GenericPacketOut); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedGenericPacketSent&) { - return absl::optional(); + return std::optional(); } }; @@ -332,11 +357,11 @@ class TieBreaker { ? TypeOrder::RtpIn : TypeOrder::RtpOut); } - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedRtpPacket& p) { return p.header.extension.hasTransportSequenceNumber ? p.header.extension.transportSequenceNumber - : absl::optional(); + : std::optional(); } }; @@ -348,10 +373,10 @@ class TieBreaker { ? TypeOrder::RtpIn : TypeOrder::RtpOut); } - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedPacketInfo& p) { return p.has_transport_seq_no ? p.transport_seq_no - : absl::optional(); + : std::optional(); } }; @@ -359,11 +384,11 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::RtpIn); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedRtpPacketIncoming& p) { return p.rtp.header.extension.hasTransportSequenceNumber ? p.rtp.header.extension.transportSequenceNumber - : absl::optional(); + : std::optional(); } }; @@ -371,11 +396,11 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::RtpOut); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedRtpPacketOutgoing& p) { return p.rtp.header.extension.hasTransportSequenceNumber ? p.rtp.header.extension.transportSequenceNumber - : absl::optional(); + : std::optional(); } }; @@ -383,9 +408,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::RtcpIn); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedRtcpPacketIncoming&) { - return absl::optional(); + return std::optional(); } }; @@ -393,9 +418,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::RtcpOut); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedRtcpPacketOutgoing&) { - return absl::optional(); + return std::optional(); } }; @@ -407,9 +432,23 @@ class TieBreaker { ? TypeOrder::RtcpIn : TypeOrder::RtcpOut); } - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedRtcpPacketTransportFeedback&) { - return absl::optional(); + return std::optional(); + } +}; + +template <> +class TieBreaker { + public: + static constexpr int type_order(PacketDirection direction) { + return static_cast(direction == PacketDirection::kIncomingPacket + ? TypeOrder::RtcpIn + : TypeOrder::RtcpOut); + } + static std::optional transport_seq_num_accessor( + const LoggedRtcpPacketSenderReport&) { + return std::optional(); } }; @@ -421,9 +460,9 @@ class TieBreaker { ? TypeOrder::RtcpIn : TypeOrder::RtcpOut); } - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedRtcpPacketReceiverReport&) { - return absl::optional(); + return std::optional(); } }; @@ -431,9 +470,9 @@ template <> class TieBreaker { public: static constexpr int type_order = static_cast(TypeOrder::AlrState); - static absl::optional transport_seq_num_accessor( + static std::optional transport_seq_num_accessor( const LoggedAlrStateEvent&) { - return absl::optional(); + return std::optional(); } }; diff --git a/logging/rtc_event_log/rtc_event_processor_unittest.cc b/logging/rtc_event_log/rtc_event_processor_unittest.cc index d57408d1e6..2427f15c1a 100644 --- a/logging/rtc_event_log/rtc_event_processor_unittest.cc +++ b/logging/rtc_event_log/rtc_event_processor_unittest.cc @@ -15,9 +15,24 @@ #include #include #include - -#include "absl/memory/memory.h" +#include +#include +#include +#include + +#include "api/rtp_headers.h" +#include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" +#include "logging/rtc_event_log/events/rtc_event_begin_log.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" +#include "logging/rtc_event_log/events/rtc_event_end_log.h" +#include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" +#include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" #include "logging/rtc_event_log/rtc_event_log_parser.h" +#include "logging/rtc_event_log/rtc_event_processor_order.h" #include "rtc_base/checks.h" #include "rtc_base/random.h" #include "test/gmock.h" @@ -54,7 +69,7 @@ CreateRandomEventLists(size_t num_lists, size_t num_elements, uint64_t seed) { LoggedRtpPacket CreateRtpPacket(int64_t time_ms, uint32_t ssrc, - absl::optional transport_seq_num) { + std::optional transport_seq_num) { RTPHeader header; header.ssrc = ssrc; header.timestamp = static_cast(time_ms); @@ -222,8 +237,8 @@ TEST(RtcEventProcessor, IncomingFeedbackBeforeBwe) { TEST(RtcEventProcessor, RtpPacketsInTransportSeqNumOrder) { std::vector ssrc_1234{ - CreateRtpPacket(1, 1234, absl::nullopt), - CreateRtpPacket(1, 1234, absl::nullopt)}; + CreateRtpPacket(1, 1234, std::nullopt), + CreateRtpPacket(1, 1234, std::nullopt)}; std::vector ssrc_2345{CreateRtpPacket(1, 2345, 2), CreateRtpPacket(1, 2345, 3), CreateRtpPacket(1, 2345, 6)}; @@ -232,9 +247,9 @@ TEST(RtcEventProcessor, RtpPacketsInTransportSeqNumOrder) { CreateRtpPacket(1, 3456, 5)}; // Store SSRC and transport sequence number for each processed packet. - std::vector>> results; + std::vector>> results; auto get_packet = [&results](const LoggedRtpPacket& packet) { - absl::optional transport_seq_num; + std::optional transport_seq_num; if (packet.header.extension.hasTransportSequenceNumber) transport_seq_num = packet.header.extension.transportSequenceNumber; results.emplace_back(packet.header.ssrc, transport_seq_num); @@ -246,9 +261,9 @@ TEST(RtcEventProcessor, RtpPacketsInTransportSeqNumOrder) { processor.AddEvents(ssrc_3456, get_packet, PacketDirection::kIncomingPacket); processor.ProcessEventsInOrder(); - std::vector>> expected{ - {1234, absl::nullopt}, - {1234, absl::nullopt}, + std::vector>> expected{ + {1234, std::nullopt}, + {1234, std::nullopt}, {3456, 1}, {2345, 2}, {2345, 3}, @@ -267,9 +282,9 @@ TEST(RtcEventProcessor, TransportSeqNumOrderHandlesWrapAround) { CreateRtpPacket(1, 2345, 0), CreateRtpPacket(1, 2345, 3)}; // Store SSRC and transport sequence number for each processed packet. - std::vector>> results; + std::vector>> results; auto get_packet = [&results](const LoggedRtpPacket& packet) { - absl::optional transport_seq_num; + std::optional transport_seq_num; if (packet.header.extension.hasTransportSequenceNumber) transport_seq_num = packet.header.extension.transportSequenceNumber; results.emplace_back(packet.header.ssrc, transport_seq_num); @@ -280,7 +295,7 @@ TEST(RtcEventProcessor, TransportSeqNumOrderHandlesWrapAround) { processor.AddEvents(ssrc_2345, get_packet, PacketDirection::kOutgoingPacket); processor.ProcessEventsInOrder(); - std::vector>> expected{ + std::vector>> expected{ {1234, std::numeric_limits::max() - 1}, {2345, std::numeric_limits::max()}, {2345, 0}, diff --git a/media/BUILD.gn b/media/BUILD.gn index 22eb02ab4f..8042a7ce34 100644 --- a/media/BUILD.gn +++ b/media/BUILD.gn @@ -37,18 +37,27 @@ rtc_library("rtc_sdp_video_format_utils") { ] deps = [ + ":media_constants", + "../api:rtp_parameters", "../api/video_codecs:video_codecs_api", "../rtc_base:checks", "../rtc_base:stringutils", + "//third_party/abseil-cpp/absl/algorithm:container", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("rtc_media_base") { visibility = [ "*" ] defines = [] libs = [] + sources = [ + "base/adapted_video_track_source.h", # Used downstream + "base/video_adapter.h", # Used once downstream + "base/video_broadcaster.h", # Used downstream + "base/video_common.h", # Used downstream + ] deps = [ + ":adapted_video_track_source", ":codec", ":media_channel", ":media_channel_impl", @@ -56,6 +65,10 @@ rtc_library("rtc_media_base") { ":rtc_media_config", ":rtp_utils", ":stream_params", + ":video_adapter", + ":video_broadcaster", + ":video_common", + ":video_source_base", "../api:array_view", "../api:audio_options_api", "../api:call_api", @@ -68,6 +81,7 @@ rtc_library("rtc_media_base") { "../api:scoped_refptr", "../api:sequence_checker", "../api:transport_api", + "../api:video_track_source_constraints", "../api/audio:audio_frame_processor", "../api/audio_codecs:audio_codecs_api", "../api/crypto:frame_decryptor_interface", @@ -80,6 +94,7 @@ rtc_library("rtc_media_base") { "../api/transport/rtp:rtp_source", "../api/units:time_delta", "../api/video:recordable_encoded_frame", + "../api/video:resolution", "../api/video:video_bitrate_allocation", "../api/video:video_bitrate_allocator_factory", "../api/video:video_frame", @@ -87,11 +102,9 @@ rtc_library("rtc_media_base") { "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../call:call_interfaces", - "../call:video_stream_api", "../common_video", "../modules/async_audio_processing", "../modules/audio_device", - "../modules/audio_processing:audio_processing_statistics", "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:async_packet_socket", "../rtc_base:buffer", @@ -102,7 +115,6 @@ rtc_library("rtc_media_base") { "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:network_route", - "../rtc_base:rtc_task_queue", "../rtc_base:sanitizer", "../rtc_base:socket", "../rtc_base:stringutils", @@ -114,30 +126,142 @@ rtc_library("rtc_media_base") { "../rtc_base/system:no_unique_address", "../rtc_base/system:rtc_export", "../rtc_base/third_party/sigslot", - "../system_wrappers:field_trial", "../video/config:encoder_config", + "//third_party/abseil-cpp/absl/base:core_headers", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] +} + +rtc_library("adapted_video_track_source") { sources = [ "base/adapted_video_track_source.cc", "base/adapted_video_track_source.h", - "base/audio_source.h", - "base/media_engine.cc", - "base/media_engine.h", + ] + deps = [ + ":video_adapter", + ":video_broadcaster", + "../api:media_stream_interface", + "../api:scoped_refptr", + "../api:video_track_source_constraints", + "../api/video:recordable_encoded_frame", + "../api/video:video_frame", + "../api/video:video_rtp_headers", + "../rtc_base:checks", + "../rtc_base:macromagic", + "../rtc_base:timeutils", + "../rtc_base/synchronization:mutex", + "../rtc_base/system:rtc_export", + ] +} + +rtc_source_set("audio_source") { + sources = [ "base/audio_source.h" ] +} + +rtc_library("video_adapter") { + sources = [ "base/video_adapter.cc", "base/video_adapter.h", + ] + deps = [ + ":video_common", + "../api/video:resolution", + "../api/video:video_frame", + "../common_video", + "../rtc_base:checks", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:stringutils", + "../rtc_base:timeutils", + "../rtc_base/synchronization:mutex", + "../rtc_base/system:rtc_export", + "../system_wrappers:field_trial", + ] +} + +rtc_library("video_source_base") { + sources = [ + "base/video_source_base.cc", + "base/video_source_base.h", + ] + deps = [ + "../api:sequence_checker", + "../api/video:video_frame", + "../rtc_base:checks", + "../rtc_base:macromagic", + "../rtc_base/system:no_unique_address", + "//third_party/abseil-cpp/absl/algorithm:container", + ] +} + +rtc_library("video_broadcaster") { + sources = [ "base/video_broadcaster.cc", - "base/video_broadcaster.h", + "base/video_broadcaster.h", # Used downstream + ] + deps = [ + ":video_common", + ":video_source_base", + "../api:media_stream_interface", + "../api:scoped_refptr", + "../api:sequence_checker", + "../api:video_track_source_constraints", + "../api/video:video_frame", + "../api/video:video_rtp_headers", + "../rtc_base:checks", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base/synchronization:mutex", + ] +} + +rtc_library("video_common") { + sources = [ "base/video_common.cc", "base/video_common.h", - "base/video_source_base.cc", - "base/video_source_base.h", + ] + deps = [ + "../api:array_view", + "../rtc_base:checks", + "../rtc_base:macromagic", + "../rtc_base:stringutils", + "../rtc_base:timeutils", + "../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/base:core_headers", + ] +} + +rtc_library("media_engine") { + sources = [ + "base/media_engine.cc", + "base/media_engine.h", + ] + deps = [ + ":codec", + ":media_channel", + ":media_channel_impl", + ":rid_description", + ":rtc_media_config", + ":stream_params", + ":video_common", + "../api:array_view", + "../api:audio_options_api", + "../api:field_trials_view", + "../api:rtc_error", + "../api:rtp_parameters", + "../api:rtp_transceiver_direction", + "../api:scoped_refptr", + "../api/audio:audio_device", + "../api/audio_codecs:audio_codecs_api", + "../api/crypto:options", + "../api/video:video_bitrate_allocation", + "../api/video:video_bitrate_allocator_factory", + "../api/video:video_codec_constants", + "../api/video_codecs:scalability_mode", + "../call:call_interfaces", + "../rtc_base:checks", + "../rtc_base:stringutils", + "../rtc_base/system:file_wrapper", + "//third_party/abseil-cpp/absl/algorithm:container", ] } @@ -151,6 +275,7 @@ rtc_library("media_channel_impl") { ":media_channel", ":rtp_utils", ":stream_params", + "../api:array_view", "../api:audio_options_api", "../api:call_api", "../api:frame_transformer_interface", @@ -184,17 +309,15 @@ rtc_library("media_channel_impl") { "../rtc_base:network_route", "../rtc_base:socket", "../rtc_base/network:sent_packet", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } rtc_source_set("media_channel") { sources = [ "base/media_channel.h" ] deps = [ + ":audio_source", ":codec", ":media_constants", ":rtp_utils", @@ -208,12 +331,14 @@ rtc_source_set("media_channel") { "../api:rtp_parameters", "../api:rtp_sender_interface", "../api:scoped_refptr", + "../api/audio:audio_processing_statistics", "../api/audio_codecs:audio_codecs_api", "../api/crypto:frame_decryptor_interface", "../api/crypto:frame_encryptor_interface", "../api/task_queue:pending_task_safety_flag", "../api/transport:datagram_transport_interface", "../api/transport/rtp:rtp_source", + "../api/units:data_rate", "../api/units:time_delta", "../api/units:timestamp", "../api/video:recordable_encoded_frame", @@ -221,9 +346,7 @@ rtc_source_set("media_channel") { "../api/video:video_rtp_headers", "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", - "../call:video_stream_api", "../common_video", - "../modules/audio_processing:audio_processing_statistics", "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:async_packet_socket", "../rtc_base:buffer", @@ -236,11 +359,8 @@ rtc_source_set("media_channel") { "../rtc_base:stringutils", "../rtc_base/network:sent_packet", "../video/config:encoder_config", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -248,23 +368,44 @@ rtc_library("codec") { sources = [ "base/codec.cc", "base/codec.h", + + # Because Codec::Matches uses a function from codec_comparators, + # there's a mutual dependency between these two files. + "base/codec_comparators.cc", + "base/codec_comparators.h", ] deps = [ ":media_constants", - "../api:field_trials_view", "../api:rtp_parameters", "../api/audio_codecs:audio_codecs_api", + "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../rtc_base:checks", "../rtc_base:logging", "../rtc_base:stringutils", "../rtc_base/system:rtc_export", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:str_format", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("codec_list") { + sources = [ + "base/codec_list.cc", + "base/codec_list.h", + ] + deps = [ + ":codec", + ":media_constants", + "../api:rtc_error", + "../rtc_base:checks", + "../rtc_base:logging", + "../rtc_base:stringutils", ] } @@ -280,10 +421,10 @@ rtc_library("rtp_utils") { "../rtc_base:async_packet_socket", "../rtc_base:byte_order", "../rtc_base:checks", - "../rtc_base:ssl", + "../rtc_base:digest", "../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("stream_params") { @@ -294,10 +435,11 @@ rtc_library("stream_params") { deps = [ ":rid_description", "../api:array_view", + "../rtc_base:checks", "../rtc_base:stringutils", "../rtc_base:unique_id_generator", + "//third_party/abseil-cpp/absl/algorithm:container", ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } rtc_library("media_constants") { @@ -325,7 +467,7 @@ rtc_library("rid_description") { "base/rid_description.cc", "base/rid_description.h", ] - deps = [] + deps = [ ":codec" ] } rtc_library("rtc_simulcast_encoder_adapter") { @@ -337,31 +479,40 @@ rtc_library("rtc_simulcast_encoder_adapter") { "engine/simulcast_encoder_adapter.h", ] deps = [ - ":rtc_media_base", + ":rtc_sdp_video_format_utils", + ":video_common", + "../api:array_view", "../api:fec_controller_api", + "../api:field_trials_view", "../api:scoped_refptr", "../api:sequence_checker", + "../api/environment", + "../api/units:data_rate", + "../api/units:timestamp", + "../api/video:encoded_image", + "../api/video:video_bitrate_allocation", + "../api/video:video_bitrate_allocator", "../api/video:video_codec_constants", "../api/video:video_frame", + "../api/video:video_frame_type", "../api/video:video_rtp_headers", "../api/video_codecs:rtc_software_fallback_wrappers", + "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", - "../call:video_stream_api", "../common_video", + "../media:media_constants", "../modules/video_coding:video_codec_interface", "../modules/video_coding:video_coding_utility", "../rtc_base:checks", "../rtc_base:logging", + "../rtc_base:stringutils", "../rtc_base/experiments:encoder_info_settings", "../rtc_base/experiments:rate_control_settings", "../rtc_base/system:no_unique_address", "../rtc_base/system:rtc_export", "../system_wrappers", - "../system_wrappers:field_trial", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/base:nullability", ] } @@ -373,23 +524,22 @@ rtc_library("rtc_internal_video_codecs") { deps = [ ":codec", ":media_constants", - ":rtc_media_base", ":rtc_simulcast_encoder_adapter", + "../api/environment", "../api/video:encoded_image", "../api/video:video_bitrate_allocation", "../api/video:video_frame", "../api/video:video_rtp_headers", "../api/video_codecs:rtc_software_fallback_wrappers", + "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../api/video_codecs:video_encoder_factory_template", "../api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter", "../api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter", "../api/video_codecs:video_encoder_factory_template_open_h264_adapter", "../call:call_interfaces", - "../call:video_stream_api", "../modules/video_coding:video_codec_interface", "../modules/video_coding:webrtc_h264", - "../modules/video_coding:webrtc_multiplex", "../modules/video_coding:webrtc_vp8", "../modules/video_coding:webrtc_vp9", "../rtc_base:checks", @@ -397,6 +547,8 @@ rtc_library("rtc_internal_video_codecs") { "../rtc_base/system:rtc_export", "../system_wrappers:field_trial", "../test:fake_video_codecs", + "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/strings", ] if (enable_libaom) { @@ -409,10 +561,6 @@ rtc_library("rtc_internal_video_codecs") { if (rtc_include_dav1d_in_internal_decoder_factory) { deps += [ "../modules/video_coding/codecs/av1:dav1d_decoder" ] } - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] sources = [ "engine/fake_video_codec_factory.cc", "engine/fake_video_codec_factory.h", @@ -420,8 +568,6 @@ rtc_library("rtc_internal_video_codecs") { "engine/internal_decoder_factory.h", "engine/internal_encoder_factory.cc", "engine/internal_encoder_factory.h", - "engine/multiplex_codec_factory.cc", - "engine/multiplex_codec_factory.h", ] } @@ -431,12 +577,13 @@ rtc_library("rtc_audio_video") { defines = [] libs = [] deps = [ + ":audio_source", ":codec", ":media_channel", ":media_channel_impl", ":media_constants", + ":media_engine", ":rid_description", - ":rtc_media_base", ":rtc_media_config", ":rtp_utils", ":stream_params", @@ -445,114 +592,82 @@ rtc_library("rtc_audio_video") { "../api:call_api", "../api:field_trials_view", "../api:frame_transformer_interface", - "../api:libjingle_peerconnection_api", "../api:make_ref_counted", "../api:media_stream_interface", "../api:priority", "../api:rtc_error", "../api:rtp_headers", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", - "../api:transport_api", + "../api/audio:audio_device", "../api/audio:audio_frame_api", "../api/audio:audio_frame_processor", "../api/audio:audio_mixer_api", + "../api/audio:audio_processing", + "../api/audio:audio_processing_statistics", "../api/audio_codecs:audio_codecs_api", "../api/crypto:frame_decryptor_interface", "../api/crypto:frame_encryptor_interface", "../api/crypto:options", + "../api/environment", "../api/task_queue", "../api/task_queue:pending_task_safety_flag", "../api/transport:bitrate_settings", - "../api/transport:field_trial_based_config", "../api/transport/rtp:rtp_source", "../api/units:data_rate", "../api/units:time_delta", "../api/units:timestamp", "../api/video:recordable_encoded_frame", - "../api/video:resolution", - "../api/video:video_bitrate_allocation", "../api/video:video_bitrate_allocator_factory", - "../api/video:video_codec_constants", "../api/video:video_frame", - "../api/video:video_rtp_headers", "../api/video:video_stream_encoder", - "../api/video_codecs:rtc_software_fallback_wrappers", "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../call", "../call:call_interfaces", + "../call:payload_type", + "../call:payload_type_picker", "../call:receive_stream_interface", "../call:rtp_interfaces", - "../call:video_stream_api", - "../common_video", + "../call:video_receive_stream_api", + "../call:video_send_stream_api", "../common_video:frame_counts", - "../modules/async_audio_processing:async_audio_processing", - "../modules/audio_device", - "../modules/audio_device:audio_device_impl", + "../modules/async_audio_processing", "../modules/audio_mixer:audio_mixer_impl", - "../modules/audio_processing:api", - "../modules/audio_processing:audio_processing_statistics", - "../modules/audio_processing/aec_dump", - "../modules/audio_processing/agc:gain_control_interface", "../modules/rtp_rtcp", "../modules/rtp_rtcp:rtp_rtcp_format", - "../modules/video_coding", - "../modules/video_coding:video_codec_interface", - "../modules/video_coding:video_coding_utility", - "../modules/video_coding:webrtc_vp9_helpers", "../modules/video_coding/svc:scalability_mode_util", - "../rtc_base:audio_format_to_string", - "../rtc_base:buffer", - "../rtc_base:byte_order", "../rtc_base:checks", - "../rtc_base:copy_on_write_buffer", "../rtc_base:dscp", "../rtc_base:event_tracer", - "../rtc_base:ignore_wundef", "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:network_route", "../rtc_base:race_checker", - "../rtc_base:rtc_task_queue", - "../rtc_base:safe_conversions", "../rtc_base:socket", - "../rtc_base:ssl", "../rtc_base:stringutils", - "../rtc_base:threading", "../rtc_base:timeutils", "../rtc_base/experiments:field_trial_parser", - "../rtc_base/experiments:min_video_bitrate_experiment", - "../rtc_base/experiments:normalize_simulcast_size_experiment", - "../rtc_base/experiments:rate_control_settings", "../rtc_base/network:sent_packet", "../rtc_base/synchronization:mutex", "../rtc_base/system:file_wrapper", "../rtc_base/system:no_unique_address", - "../rtc_base/system:rtc_export", - "../rtc_base/third_party/base64", - "../system_wrappers", "../system_wrappers:metrics", "../video/config:encoder_config", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm", "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/functional:bind_front", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "engine/adm_helpers.cc", "engine/adm_helpers.h", - "engine/null_webrtc_video_engine.h", - "engine/payload_type_mapper.cc", - "engine/payload_type_mapper.h", "engine/webrtc_media_engine.cc", "engine/webrtc_media_engine.h", "engine/webrtc_video_engine.cc", @@ -576,41 +691,16 @@ rtc_library("rtc_audio_video") { } } -# Heavy but optional helper for unittests and webrtc users who prefer to use -# defaults factories or do not worry about extra dependencies and binary size. -rtc_library("rtc_media_engine_defaults") { - visibility = [ "*" ] - allow_poison = [ - "audio_codecs", - "default_task_queue", - "software_video_codecs", - ] - sources = [ - "engine/webrtc_media_engine_defaults.cc", - "engine/webrtc_media_engine_defaults.h", - ] - deps = [ - ":rtc_audio_video", - "../api/audio_codecs:builtin_audio_decoder_factory", - "../api/audio_codecs:builtin_audio_encoder_factory", - "../api/task_queue:default_task_queue_factory", - "../api/video:builtin_video_bitrate_allocator_factory", - "../api/video_codecs:builtin_video_decoder_factory", - "../api/video_codecs:builtin_video_encoder_factory", - "../modules/audio_processing:api", - "../rtc_base:checks", - "../rtc_base/system:rtc_export", - ] -} - rtc_source_set("rtc_data_sctp_transport_internal") { sources = [ "sctp/sctp_transport_internal.h" ] deps = [ ":media_channel", + "../api:libjingle_peerconnection_api", + "../api:priority", "../api:rtc_error", "../api/transport:datagram_transport_interface", - "../media:rtc_media_base", - "../p2p:rtc_p2p", + "../p2p:dtls_transport_internal", + "../p2p:packet_transport_internal", "../rtc_base:copy_on_write_buffer", "../rtc_base:threading", ] @@ -626,15 +716,22 @@ if (rtc_build_dcsctp) { ":media_channel", ":rtc_data_sctp_transport_internal", "../api:array_view", + "../api:dtls_transport_interface", + "../api:libjingle_peerconnection_api", + "../api:priority", + "../api:rtc_error", + "../api:sequence_checker", + "../api/environment", "../api/task_queue:pending_task_safety_flag", "../api/task_queue:task_queue", - "../media:rtc_media_base", + "../api/transport:datagram_transport_interface", "../net/dcsctp/public:factory", "../net/dcsctp/public:socket", "../net/dcsctp/public:types", "../net/dcsctp/public:utils", "../net/dcsctp/timer:task_queue_timeout", - "../p2p:rtc_p2p", + "../p2p:dtls_transport_internal", + "../p2p:packet_transport_internal", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", "../rtc_base:event_tracer", @@ -645,12 +742,10 @@ if (rtc_build_dcsctp) { "../rtc_base:stringutils", "../rtc_base:threading", "../rtc_base/containers:flat_map", + "../rtc_base/network:received_packet", "../rtc_base/third_party/sigslot:sigslot", "../system_wrappers", - ] - absl_deps += [ "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", ] } } @@ -663,7 +758,9 @@ rtc_library("rtc_data_sctp_transport_factory") { ] deps = [ ":rtc_data_sctp_transport_internal", + "../api/environment", "../api/transport:sctp_transport_factory_interface", + "../p2p:dtls_transport_internal", "../rtc_base:threading", "../rtc_base/system:unused", ] @@ -694,66 +791,100 @@ if (rtc_include_tests) { defines = [] deps = [ + ":audio_source", ":codec", ":media_channel", ":media_channel_impl", ":media_constants", + ":media_engine", ":rtc_audio_video", ":rtc_internal_video_codecs", ":rtc_media", - ":rtc_media_base", + ":rtc_media_config", ":rtc_simulcast_encoder_adapter", ":rtp_utils", ":stream_params", + ":video_common", + "../api:audio_options_api", "../api:call_api", "../api:fec_controller_api", + "../api:frame_transformer_interface", + "../api:make_ref_counted", + "../api:rtc_error", + "../api:rtp_headers", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", + "../api/adaptation:resource_adaptation_api", + "../api/audio:audio_device", + "../api/audio:audio_frame_api", + "../api/audio:audio_mixer_api", + "../api/audio:audio_processing", + "../api/audio_codecs:audio_codecs_api", + "../api/crypto:frame_decryptor_interface", + "../api/crypto:frame_encryptor_interface", + "../api/crypto:options", + "../api/environment", "../api/task_queue", "../api/task_queue:pending_task_safety_flag", - "../api/transport:field_trial_based_config", + "../api/transport:bitrate_settings", + "../api/transport/rtp:rtp_source", + "../api/units:time_delta", "../api/units:timestamp", "../api/video:encoded_image", + "../api/video:recordable_encoded_frame", "../api/video:video_bitrate_allocation", + "../api/video:video_bitrate_allocator_factory", "../api/video:video_frame", + "../api/video:video_frame_type", "../api/video:video_rtp_headers", + "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../call:call_interfaces", + "../call:fake_payload_type_suggester", "../call:mock_rtp_interfaces", - "../call:video_stream_api", + "../call:payload_type", + "../call:payload_type_picker", + "../call:rtp_interfaces", + "../call:video_receive_stream_api", + "../call:video_send_stream_api", "../common_video", "../modules/audio_processing", - "../modules/audio_processing:api", + "../modules/rtp_rtcp", "../modules/rtp_rtcp:rtp_rtcp_format", "../modules/video_coding:video_codec_interface", "../modules/video_coding:video_coding_utility", "../modules/video_coding/svc:scalability_mode_util", - "../p2p:rtc_p2p", + "../rtc_base:async_packet_socket", "../rtc_base:buffer", "../rtc_base:byte_order", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", "../rtc_base:dscp", "../rtc_base:gunit_helpers", + "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:network_route", "../rtc_base:rtc_event", - "../rtc_base:rtc_task_queue", + "../rtc_base:socket", "../rtc_base:stringutils", "../rtc_base:threading", "../rtc_base:timeutils", + "../rtc_base/network:sent_packet", "../rtc_base/synchronization:mutex", + "../rtc_base/system:file_wrapper", "../rtc_base/third_party/sigslot", + "../test:explicit_key_value_config", "../test:scoped_key_value_config", "../test:test_support", + "../video/config:encoder_config", "../video/config:streams_config", "//testing/gtest", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/base:nullability", "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "base/fake_frame_source.cc", @@ -798,45 +929,74 @@ if (rtc_include_tests) { defines = [] deps = [ + ":audio_source", ":codec", + ":codec_list", ":media_channel", ":media_constants", + ":media_engine", + ":rid_description", ":rtc_audio_video", ":rtc_internal_video_codecs", - ":rtc_media", ":rtc_media_base", - ":rtc_media_engine_defaults", + ":rtc_media_config", ":rtc_media_tests_utils", ":rtc_sdp_video_format_utils", ":rtc_simulcast_encoder_adapter", ":rtp_utils", ":stream_params", ":turn_utils", + ":video_common", + "../api:array_view", + "../api:audio_options_api", + "../api:call_api", "../api:create_simulcast_test_fixture_api", - "../api:libjingle_peerconnection_api", + "../api:fec_controller_api", + "../api:make_ref_counted", "../api:mock_encoder_selector", "../api:mock_video_bitrate_allocator", "../api:mock_video_bitrate_allocator_factory", "../api:mock_video_codec_factory", - "../api:mock_video_encoder", + "../api:mock_video_decoder", + "../api:priority", + "../api:ref_count", + "../api:rtc_error", + "../api:rtp_headers", "../api:rtp_parameters", + "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:simulcast_test_fixture_api", + "../api:transport_api", + "../api:video_track_source_constraints", + "../api/audio:audio_device", + "../api/audio:audio_processing", + "../api/audio:builtin_audio_processing_builder", + "../api/audio_codecs:audio_codecs_api", "../api/audio_codecs:builtin_audio_decoder_factory", "../api/audio_codecs:builtin_audio_encoder_factory", - "../api/rtc_event_log", - "../api/task_queue", - "../api/task_queue:default_task_queue_factory", + "../api/crypto:options", + "../api/environment", + "../api/environment:environment_factory", "../api/test/video:function_video_factory", + "../api/transport:bitrate_settings", + "../api/transport:datagram_transport_interface", "../api/transport:field_trial_based_config", + "../api/transport/rtp:rtp_source", + "../api/units:data_rate", "../api/units:time_delta", "../api/units:timestamp", "../api/video:builtin_video_bitrate_allocator_factory", + "../api/video:encoded_image", + "../api/video:recordable_encoded_frame", "../api/video:resolution", "../api/video:video_bitrate_allocation", + "../api/video:video_bitrate_allocator", + "../api/video:video_bitrate_allocator_factory", "../api/video:video_codec_constants", "../api/video:video_frame", + "../api/video:video_frame_type", "../api/video:video_rtp_headers", + "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../api/video_codecs:video_decoder_factory_template", "../api/video_codecs:video_decoder_factory_template_dav1d_adapter", @@ -848,38 +1008,37 @@ if (rtc_include_tests) { "../api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter", "../api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter", "../api/video_codecs:video_encoder_factory_template_open_h264_adapter", - "../audio", "../call:call_interfaces", + "../call:payload_type_picker", + "../call:video_receive_stream_api", + "../call:video_send_stream_api", "../common_video", "../modules/audio_device:mock_audio_device", "../modules/audio_mixer:audio_mixer_impl", - "../modules/audio_processing", - "../modules/audio_processing:api", "../modules/audio_processing:mocks", - "../modules/rtp_rtcp", "../modules/rtp_rtcp:rtp_rtcp_format", "../modules/video_coding:simulcast_test_fixture_impl", "../modules/video_coding:video_codec_interface", + "../modules/video_coding:video_coding_utility", "../modules/video_coding:webrtc_h264", "../modules/video_coding:webrtc_vp8", "../modules/video_coding/svc:scalability_mode_util", + "../net/dcsctp/public:types", "../p2p:p2p_test_utils", "../rtc_base:async_packet_socket", "../rtc_base:byte_order", "../rtc_base:checks", - "../rtc_base:gunit_helpers", - "../rtc_base:logging", + "../rtc_base:copy_on_write_buffer", + "../rtc_base:dscp", "../rtc_base:macromagic", - "../rtc_base:rtc_base_tests_utils", - "../rtc_base:rtc_event", - "../rtc_base:rtc_task_queue", "../rtc_base:safe_conversions", - "../rtc_base:stringutils", + "../rtc_base:socket", "../rtc_base:threading", "../rtc_base:timeutils", + "../rtc_base:unique_id_generator", "../rtc_base/experiments:min_video_bitrate_experiment", - "../rtc_base/synchronization:mutex", - "../rtc_base/third_party/sigslot", + "../rtc_base/system:file_wrapper", + "../system_wrappers", "../system_wrappers:field_trial", "../test:audio_codec_mocks", "../test:fake_video_codecs", @@ -890,20 +1049,21 @@ if (rtc_include_tests) { "../test:test_support", "../test:video_test_common", "../test/time_controller", + "../video/config:encoder_config", "../video/config:streams_config", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (enable_libaom) { defines += [ "RTC_USE_LIBAOM_AV1_ENCODER" ] } - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] sources = [ + "base/codec_comparators_unittest.cc", + "base/codec_list_unittest.cc", "base/codec_unittest.cc", "base/media_engine_unittest.cc", "base/rtp_utils_unittest.cc", @@ -915,9 +1075,6 @@ if (rtc_include_tests) { "base/video_common_unittest.cc", "engine/internal_decoder_factory_unittest.cc", "engine/internal_encoder_factory_unittest.cc", - "engine/multiplex_codec_factory_unittest.cc", - "engine/null_webrtc_video_engine_unittest.cc", - "engine/payload_type_mapper_unittest.cc", "engine/simulcast_encoder_adapter_unittest.cc", "engine/webrtc_media_engine_unittest.cc", "engine/webrtc_video_engine_unittest.cc", @@ -938,7 +1095,6 @@ if (rtc_include_tests) { data = rtc_media_unittests_resources if (is_android) { - deps += [ "//testing/android/native_test:native_test_support" ] shard_timeout = 900 } diff --git a/media/DEPS b/media/DEPS index 7fbbc0fd47..10636cc5af 100644 --- a/media/DEPS +++ b/media/DEPS @@ -25,6 +25,9 @@ specific_include_rules = { ".*webrtc_video_engine\.h": [ "+video/config", ], + ".*webrtc_video_engine\.cc": [ + "+video/config", + ], ".*media_channel\.h": [ "+video/config", ], @@ -34,4 +37,10 @@ specific_include_rules = { ".*fake_webrtc_call\.cc": [ "+video/config", ], + ".*fake_webrtc_call\.h": [ + "+video/config", + ], + ".*codec\.h": [ + "+absl/strings/str_format.h", + ], } diff --git a/media/OWNERS b/media/OWNERS index 5d8ec5aba6..cf40b1c25d 100644 --- a/media/OWNERS +++ b/media/OWNERS @@ -8,6 +8,3 @@ perkj@webrtc.org # Audio-related changes: peah@webrtc.org saza@webrtc.org - -# Datachannel-related changes: -orphis@webrtc.org diff --git a/media/base/adapted_video_track_source.cc b/media/base/adapted_video_track_source.cc index 816ada5f16..f015956d19 100644 --- a/media/base/adapted_video_track_source.cc +++ b/media/base/adapted_video_track_source.cc @@ -10,14 +10,20 @@ #include "media/base/adapted_video_track_source.h" +#include + #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" #include "api/video/video_frame_buffer.h" #include "api/video/video_rotation.h" -#include "rtc_base/checks.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "api/video_track_source_constraints.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" -namespace rtc { +namespace webrtc { AdaptedVideoTrackSource::AdaptedVideoTrackSource() = default; @@ -27,7 +33,7 @@ AdaptedVideoTrackSource::AdaptedVideoTrackSource(int required_alignment) AdaptedVideoTrackSource::~AdaptedVideoTrackSource() = default; bool AdaptedVideoTrackSource::GetStats(Stats* stats) { - webrtc::MutexLock lock(&stats_mutex_); + MutexLock lock(&stats_mutex_); if (!stats_) { return false; @@ -37,9 +43,8 @@ bool AdaptedVideoTrackSource::GetStats(Stats* stats) { return true; } -void AdaptedVideoTrackSource::OnFrame(const webrtc::VideoFrame& frame) { - rtc::scoped_refptr buffer( - frame.video_frame_buffer()); +void AdaptedVideoTrackSource::OnFrame(const VideoFrame& frame) { + scoped_refptr buffer(frame.video_frame_buffer()); /* Note that this is a "best effort" approach to wants.rotation_applied; apply_rotation_ can change from false to true between the check of apply_rotation() and the call to @@ -49,11 +54,11 @@ void AdaptedVideoTrackSource::OnFrame(const webrtc::VideoFrame& frame) { synchronization for us in this case, by not passing the frame on to sinks which don't want it. */ if (apply_rotation() && frame.rotation() != webrtc::kVideoRotation_0 && - buffer->type() == webrtc::VideoFrameBuffer::Type::kI420) { + buffer->type() == VideoFrameBuffer::Type::kI420) { /* Apply pending rotation. */ - webrtc::VideoFrame rotated_frame(frame); + VideoFrame rotated_frame(frame); rotated_frame.set_video_frame_buffer( - webrtc::I420Buffer::Rotate(*buffer->GetI420(), frame.rotation())); + I420Buffer::Rotate(*buffer->GetI420(), frame.rotation())); rotated_frame.set_rotation(webrtc::kVideoRotation_0); broadcaster_.OnFrame(rotated_frame); } else { @@ -66,14 +71,13 @@ void AdaptedVideoTrackSource::OnFrameDropped() { } void AdaptedVideoTrackSource::AddOrUpdateSink( - rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { + VideoSinkInterface* sink, + const VideoSinkWants& wants) { broadcaster_.AddOrUpdateSink(sink, wants); OnSinkWantsChanged(broadcaster_.wants()); } -void AdaptedVideoTrackSource::RemoveSink( - rtc::VideoSinkInterface* sink) { +void AdaptedVideoTrackSource::RemoveSink(VideoSinkInterface* sink) { broadcaster_.RemoveSink(sink); OnSinkWantsChanged(broadcaster_.wants()); } @@ -82,8 +86,7 @@ bool AdaptedVideoTrackSource::apply_rotation() { return broadcaster_.wants().rotation_applied; } -void AdaptedVideoTrackSource::OnSinkWantsChanged( - const rtc::VideoSinkWants& wants) { +void AdaptedVideoTrackSource::OnSinkWantsChanged(const VideoSinkWants& wants) { video_adapter_.OnSinkWants(wants); } @@ -97,7 +100,7 @@ bool AdaptedVideoTrackSource::AdaptFrame(int width, int* crop_x, int* crop_y) { { - webrtc::MutexLock lock(&stats_mutex_); + MutexLock lock(&stats_mutex_); stats_ = Stats{width, height}; } @@ -106,7 +109,7 @@ bool AdaptedVideoTrackSource::AdaptFrame(int width, } if (!video_adapter_.AdaptFrameResolution( - width, height, time_us * rtc::kNumNanosecsPerMicrosec, crop_width, + width, height, time_us * webrtc::kNumNanosecsPerMicrosec, crop_width, crop_height, out_width, out_height)) { broadcaster_.OnDiscardedFrame(); // VideoAdapter dropped the frame. @@ -119,8 +122,8 @@ bool AdaptedVideoTrackSource::AdaptFrame(int width, } void AdaptedVideoTrackSource::ProcessConstraints( - const webrtc::VideoTrackSourceConstraints& constraints) { + const VideoTrackSourceConstraints& constraints) { broadcaster_.ProcessConstraints(constraints); } -} // namespace rtc +} // namespace webrtc diff --git a/media/base/adapted_video_track_source.h b/media/base/adapted_video_track_source.h index 1c3e0b68d3..74b3de4761 100644 --- a/media/base/adapted_video_track_source.h +++ b/media/base/adapted_video_track_source.h @@ -13,26 +13,29 @@ #include -#include "absl/types/optional.h" +#include + #include "api/media_stream_interface.h" #include "api/notifier.h" +#include "api/video/recordable_encoded_frame.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" +#include "api/video_track_source_constraints.h" #include "media/base/video_adapter.h" #include "media/base/video_broadcaster.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread_annotations.h" -namespace rtc { +namespace webrtc { // Base class for sources which needs video adaptation, e.g., video // capture sources. Sinks must be added and removed on one and only // one thread, while AdaptFrame and OnFrame may be called on any // thread. class RTC_EXPORT AdaptedVideoTrackSource - : public webrtc::Notifier { + : public Notifier { public: AdaptedVideoTrackSource(); ~AdaptedVideoTrackSource() override; @@ -44,7 +47,7 @@ class RTC_EXPORT AdaptedVideoTrackSource // Checks the apply_rotation() flag. If the frame needs rotation, and it is a // plain memory frame, it is rotated. Subclasses producing native frames must // handle apply_rotation() themselves. - void OnFrame(const webrtc::VideoFrame& frame); + void OnFrame(const VideoFrame& frame); // Indication from source that a frame was dropped. void OnFrameDropped(); @@ -68,37 +71,45 @@ class RTC_EXPORT AdaptedVideoTrackSource // become stale before it is used. bool apply_rotation(); - cricket::VideoAdapter* video_adapter() { return &video_adapter_; } + VideoAdapter* video_adapter() { return &video_adapter_; } private: - // Implements rtc::VideoSourceInterface. - void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override; - void RemoveSink(rtc::VideoSinkInterface* sink) override; + // Implements webrtc::VideoSourceInterface. + void AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) override; + void RemoveSink(VideoSinkInterface* sink) override; // Part of VideoTrackSourceInterface. bool GetStats(Stats* stats) override; - void OnSinkWantsChanged(const rtc::VideoSinkWants& wants); + void OnSinkWantsChanged(const VideoSinkWants& wants); // Encoded sinks not implemented for AdaptedVideoTrackSource. bool SupportsEncodedOutput() const override { return false; } void GenerateKeyFrame() override {} void AddEncodedSink( - rtc::VideoSinkInterface* sink) override {} + VideoSinkInterface* /* sink */) override {} void RemoveEncodedSink( - rtc::VideoSinkInterface* sink) override {} + VideoSinkInterface* /* sink */) override {} void ProcessConstraints( - const webrtc::VideoTrackSourceConstraints& constraints) override; + const VideoTrackSourceConstraints& constraints) override; - cricket::VideoAdapter video_adapter_; + VideoAdapter video_adapter_; - webrtc::Mutex stats_mutex_; - absl::optional stats_ RTC_GUARDED_BY(stats_mutex_); + Mutex stats_mutex_; + std::optional stats_ RTC_GUARDED_BY(stats_mutex_); VideoBroadcaster broadcaster_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::AdaptedVideoTrackSource; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_ADAPTED_VIDEO_TRACK_SOURCE_H_ diff --git a/media/base/audio_source.h b/media/base/audio_source.h index 51fe0e13e1..ee05352f2d 100644 --- a/media/base/audio_source.h +++ b/media/base/audio_source.h @@ -12,10 +12,10 @@ #define MEDIA_BASE_AUDIO_SOURCE_H_ #include +#include +#include -#include "absl/types/optional.h" - -namespace cricket { +namespace webrtc { // Abstract interface for providing the audio data. // TODO(deadbeef): Rename this to AudioSourceInterface, and rename @@ -31,7 +31,7 @@ class AudioSource { int sample_rate, size_t number_of_channels, size_t number_of_frames, - absl::optional absolute_capture_timestamp_ms) = 0; + std::optional absolute_capture_timestamp_ms) = 0; // Called when the AudioSource is going away. virtual void OnClose() = 0; @@ -53,6 +53,14 @@ class AudioSource { virtual ~AudioSource() {} }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::AudioSource; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_AUDIO_SOURCE_H_ diff --git a/media/base/codec.cc b/media/base/codec.cc index 7ecf383d9f..b0b6a81cb9 100644 --- a/media/base/codec.cc +++ b/media/base/codec.cc @@ -10,59 +10,32 @@ #include "media/base/codec.h" +#include +#include +#include +#include +#include +#include + #include "absl/algorithm/container.h" #include "absl/strings/match.h" +#include "absl/strings/str_cat.h" #include "api/audio_codecs/audio_format.h" -#include "api/video_codecs/av1_profile.h" +#include "api/media_types.h" +#include "api/rtp_parameters.h" #include "api/video_codecs/h264_profile_level_id.h" -#include "api/video_codecs/vp9_profile.h" +#include "api/video_codecs/sdp_video_format.h" +#ifdef RTC_ENABLE_H265 +#include "api/video_codecs/h265_profile_tier_level.h" // IWYU pragma: keep +#endif +#include "media/base/codec_comparators.h" #include "media/base/media_constants.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" -namespace cricket { -namespace { - -std::string GetH264PacketizationModeOrDefault(const CodecParameterMap& params) { - auto it = params.find(kH264FmtpPacketizationMode); - if (it != params.end()) { - return it->second; - } - // If packetization-mode is not present, default to "0". - // https://tools.ietf.org/html/rfc6184#section-6.2 - return "0"; -} - -bool IsSameH264PacketizationMode(const CodecParameterMap& left, - const CodecParameterMap& right) { - return GetH264PacketizationModeOrDefault(left) == - GetH264PacketizationModeOrDefault(right); -} - -// Some (video) codecs are actually families of codecs and rely on parameters -// to distinguish different incompatible family members. -bool IsSameCodecSpecific(const std::string& name1, - const CodecParameterMap& params1, - const std::string& name2, - const CodecParameterMap& params2) { - // The names might not necessarily match, so check both. - auto either_name_matches = [&](const std::string name) { - return absl::EqualsIgnoreCase(name, name1) || - absl::EqualsIgnoreCase(name, name2); - }; - if (either_name_matches(kH264CodecName)) - return webrtc::H264IsSameProfile(params1, params2) && - IsSameH264PacketizationMode(params1, params2); - if (either_name_matches(kVp9CodecName)) - return webrtc::VP9IsSameProfile(params1, params2); - if (either_name_matches(kAv1CodecName)) - return webrtc::AV1IsSameProfile(params1, params2); - return true; -} - -} // namespace +namespace webrtc { FeedbackParams::FeedbackParams() = default; FeedbackParams::~FeedbackParams() = default; @@ -92,6 +65,15 @@ void FeedbackParams::Add(const FeedbackParam& param) { RTC_CHECK(!HasDuplicateEntries()); } +bool FeedbackParams::Remove(const FeedbackParam& param) { + if (!Has(param)) { + return false; + } + params_.erase(std::remove(params_.begin(), params_.end(), param), + params_.end()); + return true; +} + void FeedbackParams::Intersect(const FeedbackParams& from) { std::vector::iterator iter_to = params_.begin(); while (iter_to != params_.end()) { @@ -130,15 +112,15 @@ Codec::Codec(Type type, bitrate(0), channels(channels) {} -Codec::Codec(Type type) : Codec(type, 0, "", 0) {} +Codec::Codec(Type type) : Codec(type, kIdNotSet, "", 0) {} Codec::Codec(const webrtc::SdpAudioFormat& c) - : Codec(Type::kAudio, 0, c.name, c.clockrate_hz, c.num_channels) { + : Codec(Type::kAudio, kIdNotSet, c.name, c.clockrate_hz, c.num_channels) { params = c.parameters; } Codec::Codec(const webrtc::SdpVideoFormat& c) - : Codec(Type::kVideo, 0, c.name, kVideoCodecClockrate) { + : Codec(Type::kVideo, kIdNotSet, c.name, kVideoCodecClockrate) { params = c.parameters; scalability_modes = c.scalability_modes; } @@ -158,57 +140,8 @@ bool Codec::operator==(const Codec& c) const { : (packetization == c.packetization)); } -bool Codec::Matches(const Codec& codec, - const webrtc::FieldTrialsView* field_trials) const { - // Match the codec id/name based on the typical static/dynamic name rules. - // Matching is case-insensitive. - - // We support the ranges [96, 127] and more recently [35, 65]. - // https://www.iana.org/assignments/rtp-parameters/rtp-parameters.xhtml#rtp-parameters-1 - // Within those ranges we match by codec name, outside by codec id. - // Since no codecs are assigned an id in the range [66, 95] by us, these will - // never match. - const int kLowerDynamicRangeMin = 35; - const int kLowerDynamicRangeMax = 65; - const int kUpperDynamicRangeMin = 96; - const int kUpperDynamicRangeMax = 127; - const bool is_id_in_dynamic_range = - (id >= kLowerDynamicRangeMin && id <= kLowerDynamicRangeMax) || - (id >= kUpperDynamicRangeMin && id <= kUpperDynamicRangeMax); - const bool is_codec_id_in_dynamic_range = - (codec.id >= kLowerDynamicRangeMin && - codec.id <= kLowerDynamicRangeMax) || - (codec.id >= kUpperDynamicRangeMin && codec.id <= kUpperDynamicRangeMax); - bool matches_id = is_id_in_dynamic_range && is_codec_id_in_dynamic_range - ? (absl::EqualsIgnoreCase(name, codec.name)) - : (id == codec.id); - - auto matches_type_specific = [&]() { - switch (type) { - case Type::kAudio: - // If a nonzero clockrate is specified, it must match the actual - // clockrate. If a nonzero bitrate is specified, it must match the - // actual bitrate, unless the codec is VBR (0), where we just force the - // supplied value. The number of channels must match exactly, with the - // exception that channels=0 is treated synonymously as channels=1, per - // RFC 4566 section 6: " [The channels] parameter is OPTIONAL and may be - // omitted if the number of channels is one." - // Preference is ignored. - // TODO(juberti): Treat a zero clockrate as 8000Hz, the RTP default - // clockrate. - return ((codec.clockrate == 0 /*&& clockrate == 8000*/) || - clockrate == codec.clockrate) && - (codec.bitrate == 0 || bitrate <= 0 || - bitrate == codec.bitrate) && - ((codec.channels < 2 && channels < 2) || - channels == codec.channels); - - case Type::kVideo: - return IsSameCodecSpecific(name, params, codec.name, codec.params); - } - }; - - return matches_id && matches_type_specific(); +bool Codec::Matches(const Codec& codec) const { + return webrtc::MatchesWithCodecRules(*this, codec); } bool Codec::MatchesRtpCodec(const webrtc::RtpCodec& codec_capability) const { @@ -216,37 +149,37 @@ bool Codec::MatchesRtpCodec(const webrtc::RtpCodec& codec_capability) const { return codec_parameters.name == codec_capability.name && codec_parameters.kind == codec_capability.kind && - (codec_parameters.name == cricket::kRtxCodecName || - (codec_parameters.num_channels == codec_capability.num_channels && - codec_parameters.clock_rate == codec_capability.clock_rate && - codec_parameters.parameters == codec_capability.parameters)); + codec_parameters.num_channels == codec_capability.num_channels && + codec_parameters.clock_rate == codec_capability.clock_rate && + (codec_parameters.name == kRtxCodecName || + codec_parameters.parameters == codec_capability.parameters); } -bool Codec::GetParam(const std::string& name, std::string* out) const { - CodecParameterMap::const_iterator iter = params.find(name); +bool Codec::GetParam(const std::string& key, std::string* out) const { + webrtc::CodecParameterMap::const_iterator iter = params.find(key); if (iter == params.end()) return false; *out = iter->second; return true; } -bool Codec::GetParam(const std::string& name, int* out) const { - CodecParameterMap::const_iterator iter = params.find(name); +bool Codec::GetParam(const std::string& key, int* out) const { + webrtc::CodecParameterMap::const_iterator iter = params.find(key); if (iter == params.end()) return false; - return rtc::FromString(iter->second, out); + return webrtc::FromString(iter->second, out); } -void Codec::SetParam(const std::string& name, const std::string& value) { - params[name] = value; +void Codec::SetParam(const std::string& key, const std::string& value) { + params[key] = value; } -void Codec::SetParam(const std::string& name, int value) { - params[name] = rtc::ToString(value); +void Codec::SetParam(const std::string& key, int value) { + params[key] = absl::StrCat(value); } -bool Codec::RemoveParam(const std::string& name) { - return params.erase(name) == 1; +bool Codec::RemoveParam(const std::string& key) { + return params.erase(key) == 1; } void Codec::AddFeedbackParam(const FeedbackParam& param) { @@ -271,11 +204,11 @@ webrtc::RtpCodecParameters Codec::ToCodecParameters() const { switch (type) { case Type::kAudio: { codec_params.num_channels = static_cast(channels); - codec_params.kind = MEDIA_TYPE_AUDIO; + codec_params.kind = webrtc::MediaType::AUDIO; break; } case Type::kVideo: { - codec_params.kind = MEDIA_TYPE_VIDEO; + codec_params.kind = webrtc::MediaType::VIDEO; break; } } @@ -284,7 +217,8 @@ webrtc::RtpCodecParameters Codec::ToCodecParameters() const { } bool Codec::IsMediaCodec() const { - return !IsResiliencyCodec(); + return !IsResiliencyCodec() && + !absl::EqualsIgnoreCase(name, kComfortNoiseCodecName); } bool Codec::IsResiliencyCodec() const { @@ -331,7 +265,7 @@ bool Codec::ValidateCodecFormat() const { std::string Codec::ToString() const { char buf[256]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); switch (type) { case Type::kAudio: { sb << "AudioCodec[" << id << ":" << name << ":" << clockrate << ":" @@ -362,6 +296,14 @@ Codec CreateVideoRtxCodec(int rtx_payload_type, int associated_payload_type) { return rtx_codec; } +const Codec* FindCodecById(const std::vector& codecs, int payload_type) { + for (const auto& codec : codecs) { + if (codec.id == payload_type) + return &codec; + } + return nullptr; +} + bool HasLntf(const Codec& codec) { return codec.HasFeedbackParam( FeedbackParam(kRtcpFbParamLntf, kParamValueEmpty)); @@ -382,16 +324,10 @@ bool HasRrtr(const Codec& codec) { FeedbackParam(kRtcpFbParamRrtr, kParamValueEmpty)); } -bool HasTransportCc(const Codec& codec) { - return codec.HasFeedbackParam( - FeedbackParam(kRtcpFbParamTransportCc, kParamValueEmpty)); -} - -const VideoCodec* FindMatchingCodec( - const std::vector& supported_codecs, - const VideoCodec& codec) { +const Codec* FindMatchingVideoCodec(const std::vector& supported_codecs, + const Codec& codec) { webrtc::SdpVideoFormat sdp_video_format{codec.name, codec.params}; - for (const VideoCodec& supported_codec : supported_codecs) { + for (const Codec& supported_codec : supported_codecs) { if (sdp_video_format.IsSameCodec( {supported_codec.name, supported_codec.params})) { return &supported_codec; @@ -400,6 +336,19 @@ const VideoCodec* FindMatchingCodec( return nullptr; } +std::vector FindAllMatchingCodecs( + const std::vector& supported_codecs, + const Codec& codec) { + std::vector result; + webrtc::SdpVideoFormat sdp(codec.name, codec.params); + for (const Codec& supported_codec : supported_codecs) { + if (sdp.IsSameCodec({supported_codec.name, supported_codec.params})) { + result.push_back(&supported_codec); + } + } + return result; +} + // If a decoder supports any H264 profile, it is implicitly assumed to also // support constrained base line even though it's not explicitly listed. void AddH264ConstrainedBaselineProfileToSupportedFormats( @@ -410,8 +359,8 @@ void AddH264ConstrainedBaselineProfileToSupportedFormats( // profile. for (auto it = supported_formats->cbegin(); it != supported_formats->cend(); ++it) { - if (it->name == cricket::kH264CodecName) { - const absl::optional profile_level_id = + if (it->name == kH264CodecName) { + const std::optional profile_level_id = webrtc::ParseSdpForH264ProfileLevelId(it->parameters); if (profile_level_id && profile_level_id->profile != @@ -419,7 +368,7 @@ void AddH264ConstrainedBaselineProfileToSupportedFormats( webrtc::SdpVideoFormat cbp_format = *it; webrtc::H264ProfileLevelId cbp_profile = *profile_level_id; cbp_profile.profile = webrtc::H264Profile::kProfileConstrainedBaseline; - cbp_format.parameters[cricket::kH264FmtpProfileLevelId] = + cbp_format.parameters[kH264FmtpProfileLevelId] = *webrtc::H264ProfileLevelIdToString(cbp_profile); cbr_supported_formats.push_back(cbp_format); } @@ -435,8 +384,8 @@ void AddH264ConstrainedBaselineProfileToSupportedFormats( }); if (supported_formats->size() > original_size) { - RTC_LOG(LS_WARNING) << "Explicitly added H264 constrained baseline to list " - "of supported formats."; + RTC_LOG(LS_INFO) << "Explicitly added H264 constrained baseline to list " + "of supported formats."; } } @@ -452,7 +401,7 @@ Codec CreateAudioCodec(const webrtc::SdpAudioFormat& c) { } Codec CreateVideoCodec(const std::string& name) { - return CreateVideoCodec(0, name); + return CreateVideoCodec(Codec::kIdNotSet, name); } Codec CreateVideoCodec(int id, const std::string& name) { @@ -471,4 +420,10 @@ Codec CreateVideoCodec(const webrtc::SdpVideoFormat& c) { return Codec(c); } -} // namespace cricket +Codec CreateVideoCodec(int id, const webrtc::SdpVideoFormat& sdp) { + Codec c = CreateVideoCodec(sdp); + c.id = id; + return c; +} + +} // namespace webrtc diff --git a/media/base/codec.h b/media/base/codec.h index 5595708cfa..029b93c6ae 100644 --- a/media/base/codec.h +++ b/media/base/codec.h @@ -11,24 +11,23 @@ #ifndef MEDIA_BASE_CODEC_H_ #define MEDIA_BASE_CODEC_H_ -#include -#include +#include +#include #include #include +#include "absl/base/macros.h" #include "absl/container/inlined_vector.h" +#include "absl/strings/str_format.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/audio_codecs/audio_format.h" -#include "api/field_trials_view.h" #include "api/rtp_parameters.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" #include "media/base/media_constants.h" #include "rtc_base/system/rtc_export.h" -namespace cricket { - -typedef std::map CodecParameterMap; +namespace webrtc { class FeedbackParam { public: @@ -58,6 +57,7 @@ class FeedbackParams { bool Has(const FeedbackParam& param) const; void Add(const FeedbackParam& param); + bool Remove(const FeedbackParam& param); void Intersect(const FeedbackParams& from); @@ -82,6 +82,8 @@ struct RTC_EXPORT Codec { kFlexfec, kRtx, }; + // Value of "id" if it's not explicitly set. Exposed for tests. + static const int kIdNotSet = -1; Type type; int id; @@ -95,13 +97,18 @@ struct RTC_EXPORT Codec { size_t channels; // Video only - absl::optional packetization; + std::optional packetization; absl::InlinedVector scalability_modes; + // H.265 only + std::optional tx_mode; + // Non key-value parameters such as the telephone-event "0‐15" are // represented using an empty string as key, i.e. {"": "0-15"}. - CodecParameterMap params; + // The equivalent of fmtp in SDP. + webrtc::CodecParameterMap params; + // The equivalent of rtcp-fb in SDP. FeedbackParams feedback_params; Codec(const Codec& c); @@ -111,21 +118,37 @@ struct RTC_EXPORT Codec { // Indicates if this codec is compatible with the specified codec by // checking the assigned id and profile values for the relevant video codecs. - // H264 levels are not compared. - bool Matches(const Codec& codec, - const webrtc::FieldTrialsView* field_trials = nullptr) const; + // The rules for this comparison, in particular the parameters are + // codec-specific as described in RFC 3264 6.1: + // https://www.rfc-editor.org/rfc/rfc3264#section-6.1 + // For H.264, packetization modes will be compared. + // If H.265 is enabled, TxModes will be compared. + // H.264 (and H.265, if enabled) levels are not compared. + // In all other cases, parameters do not need to match. + // This is used in SDP offer/answer codec matching. + bool Matches(const Codec& codec) const; + + // This is an exact match similar to what is described in + // https://w3c.github.io/webrtc-pc/#dfn-codec-match + // with two differences: + // - rtx which is included in capabilities without the apt parameter + // so number of channels, clock rate or the equality of the parameters + // are not compared. + // - parameters is compared element-wise, not as a string comparison. + // This method should only be used to compare input on our end to something we + // generated, done e.g. by setCodecPreferences or setParameters. bool MatchesRtpCodec(const webrtc::RtpCodec& capability) const; - // Find the parameter for `name` and write the value to `out`. - bool GetParam(const std::string& name, std::string* out) const; - bool GetParam(const std::string& name, int* out) const; + // Find the parameter for `key` and write the value to `out`. + bool GetParam(const std::string& key, std::string* out) const; + bool GetParam(const std::string& key, int* out) const; - void SetParam(const std::string& name, const std::string& value); - void SetParam(const std::string& name, int value); + void SetParam(const std::string& key, const std::string& value); + void SetParam(const std::string& key, int value); // It is safe to input a non-existent parameter. // Returns true if the parameter existed, false if it did not exist. - bool RemoveParam(const std::string& name); + bool RemoveParam(const std::string& key); bool HasFeedbackParam(const FeedbackParam& param) const; void AddFeedbackParam(const FeedbackParam& param); @@ -156,6 +179,26 @@ struct RTC_EXPORT Codec { bool operator!=(const Codec& c) const { return !(*this == c); } + template + friend void AbslStringify(Sink& sink, const Codec& c) { + absl::Format(&sink, "[%d:", c.id); + switch (c.type) { + case Codec::Type::kAudio: + sink.Append("audio/"); + break; + case Codec::Type::kVideo: + sink.Append("video/"); + } + absl::Format(&sink, "%s/%d/%d", c.name, c.clockrate, c.channels); + for (auto param : c.params) { + sink.Append(";"); + sink.Append(param.first); + sink.Append("="); + sink.Append(param.second); + } + sink.Append("]"); + } + protected: // Creates an empty codec. explicit Codec(Type type); @@ -183,9 +226,7 @@ struct RTC_EXPORT Codec { int associated_payload_type); }; -// TODO(webrtc:15214): Compatibility names, to be migrated away and removed. -using VideoCodec = Codec; -using AudioCodec = Codec; +using Codecs = std::vector; Codec CreateAudioCodec(int id, const std::string& name, @@ -196,33 +237,123 @@ Codec CreateAudioRtxCodec(int rtx_payload_type, int associated_payload_type); Codec CreateVideoCodec(const std::string& name); Codec CreateVideoCodec(int id, const std::string& name); Codec CreateVideoCodec(const webrtc::SdpVideoFormat& c); +Codec CreateVideoCodec(int id, const webrtc::SdpVideoFormat& sdp); Codec CreateVideoRtxCodec(int rtx_payload_type, int associated_payload_type); // Get the codec setting associated with `payload_type`. If there // is no codec associated with that payload type it returns nullptr. -template -const Codec* FindCodecById(const std::vector& codecs, int payload_type) { - for (const auto& codec : codecs) { - if (codec.id == payload_type) - return &codec; - } - return nullptr; -} +const Codec* FindCodecById(const std::vector& codecs, int payload_type); bool HasLntf(const Codec& codec); bool HasNack(const Codec& codec); bool HasRemb(const Codec& codec); bool HasRrtr(const Codec& codec); -bool HasTransportCc(const Codec& codec); + // Returns the first codec in `supported_codecs` that matches `codec`, or // nullptr if no codec matches. -const VideoCodec* FindMatchingCodec( - const std::vector& supported_codecs, - const VideoCodec& codec); +const Codec* FindMatchingVideoCodec(const std::vector& supported_codecs, + const Codec& codec); + +// Returns all codecs in `supported_codecs` that matches `codec`. +std::vector FindAllMatchingCodecs( + const std::vector& supported_codecs, + const Codec& codec); RTC_EXPORT void AddH264ConstrainedBaselineProfileToSupportedFormats( std::vector* supported_formats); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { + +using FeedbackParam ABSL_DEPRECATE_AND_INLINE() = webrtc::FeedbackParam; +using FeedbackParams ABSL_DEPRECATE_AND_INLINE() = webrtc::FeedbackParams; +using Codec ABSL_DEPRECATE_AND_INLINE() = webrtc::Codec; +using Codecs ABSL_DEPRECATE_AND_INLINE() = webrtc::Codecs; + +// TODO(webrtc:15214): Compatibility names, to be migrated away and removed. +using VideoCodec ABSL_DEPRECATE_AND_INLINE() = webrtc::Codec; +using AudioCodec ABSL_DEPRECATE_AND_INLINE() = webrtc::Codec; +using VideoCodecs ABSL_DEPRECATE_AND_INLINE() = webrtc::Codecs; +using AudioCodecs ABSL_DEPRECATE_AND_INLINE() = webrtc::Codecs; +using ::webrtc::AddH264ConstrainedBaselineProfileToSupportedFormats; + +ABSL_DEPRECATE_AND_INLINE() +inline webrtc::Codec CreateAudioCodec(int id, + const std::string& name, + int clockrate, + size_t channels) { + return webrtc::CreateAudioCodec(id, name, clockrate, channels); +} +ABSL_DEPRECATE_AND_INLINE() +inline webrtc::Codec CreateAudioCodec(const webrtc::SdpAudioFormat& c) { + return webrtc::CreateAudioCodec(c); +} +ABSL_DEPRECATE_AND_INLINE() +inline webrtc::Codec CreateAudioRtxCodec(int rtx_payload_type, + int associated_payload_type) { + return webrtc::CreateAudioRtxCodec(rtx_payload_type, associated_payload_type); +} +ABSL_DEPRECATE_AND_INLINE() +inline webrtc::Codec CreateVideoCodec(const std::string& name) { + return webrtc::CreateVideoCodec(name); +} +ABSL_DEPRECATE_AND_INLINE() +inline webrtc::Codec CreateVideoCodec(int id, const std::string& name) { + return webrtc::CreateVideoCodec(id, name); +} +ABSL_DEPRECATE_AND_INLINE() +inline webrtc::Codec CreateVideoCodec(const webrtc::SdpVideoFormat& c) { + return webrtc::CreateVideoCodec(c); +} +ABSL_DEPRECATE_AND_INLINE() +inline webrtc::Codec CreateVideoCodec(int id, const webrtc::SdpVideoFormat& c) { + return webrtc::CreateVideoCodec(id, c); +} +ABSL_DEPRECATE_AND_INLINE() +inline webrtc::Codec CreateVideoRtxCodec(int rtx_payload_type, + int associated_payload_type) { + return webrtc::CreateVideoRtxCodec(rtx_payload_type, associated_payload_type); +} + +ABSL_DEPRECATE_AND_INLINE() +inline const webrtc::Codec* FindCodecById( + const std::vector& codecs, + int payload_type) { + return webrtc::FindCodecById(codecs, payload_type); +} + +ABSL_DEPRECATE_AND_INLINE() +inline bool HasLntf(const webrtc::Codec& codec) { + return webrtc::HasLntf(codec); +} +ABSL_DEPRECATE_AND_INLINE() +inline bool HasNack(const webrtc::Codec& codec) { + return webrtc::HasNack(codec); +} +ABSL_DEPRECATE_AND_INLINE() +inline bool HasRemb(const webrtc::Codec& codec) { + return webrtc::HasRemb(codec); +} +ABSL_DEPRECATE_AND_INLINE() inline bool HasRrtr(const webrtc::Codec& codec) { + return webrtc::HasRrtr(codec); +} +ABSL_DEPRECATE_AND_INLINE() +inline const webrtc::Codec* FindMatchingVideoCodec( + const webrtc::Codecs& supported_codecs, + const webrtc::Codec& codec) { + return webrtc::FindMatchingVideoCodec(supported_codecs, codec); +} +ABSL_DEPRECATE_AND_INLINE() +inline std::vector FindAllMatchingCodecs( + const webrtc::Codecs& supported_codecs, + const webrtc::Codec& codec) { + return webrtc::FindAllMatchingCodecs(supported_codecs, codec); +} } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_CODEC_H_ diff --git a/media/base/codec_comparators.cc b/media/base/codec_comparators.cc new file mode 100644 index 0000000000..3dbf316ebf --- /dev/null +++ b/media/base/codec_comparators.cc @@ -0,0 +1,415 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "media/base/codec_comparators.h" + +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/functional/any_invocable.h" +#include "absl/strings/match.h" +#include "absl/strings/string_view.h" +#include "api/media_types.h" +#include "api/rtp_parameters.h" +#include "api/video_codecs/av1_profile.h" +#include "api/video_codecs/h264_profile_level_id.h" +#ifdef RTC_ENABLE_H265 +#include "api/video_codecs/h265_profile_tier_level.h" +#endif +#include "api/video_codecs/vp9_profile.h" +#include "media/base/codec.h" +#include "media/base/media_constants.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/string_encode.h" + +namespace webrtc { + +namespace { + +// TODO(bugs.webrtc.org/15847): remove code duplication of IsSameCodecSpecific +// in api/video_codecs/sdp_video_format.cc +std::string GetFmtpParameterOrDefault(const CodecParameterMap& params, + const std::string& name, + const std::string& default_value) { + const auto it = params.find(name); + if (it != params.end()) { + return it->second; + } + return default_value; +} + +bool HasParameter(const CodecParameterMap& params, const std::string& name) { + return params.find(name) != params.end(); +} + +std::string H264GetPacketizationModeOrDefault(const CodecParameterMap& params) { + // If packetization-mode is not present, default to "0". + // https://tools.ietf.org/html/rfc6184#section-6.2 + return GetFmtpParameterOrDefault(params, kH264FmtpPacketizationMode, "0"); +} + +bool H264IsSamePacketizationMode(const CodecParameterMap& left, + const CodecParameterMap& right) { + return H264GetPacketizationModeOrDefault(left) == + H264GetPacketizationModeOrDefault(right); +} + +std::string AV1GetTierOrDefault(const CodecParameterMap& params) { + // If the parameter is not present, the tier MUST be inferred to be 0. + // https://aomediacodec.github.io/av1-rtp-spec/#72-sdp-parameters + return GetFmtpParameterOrDefault(params, kAv1FmtpTier, "0"); +} + +bool AV1IsSameTier(const CodecParameterMap& left, + const CodecParameterMap& right) { + return AV1GetTierOrDefault(left) == AV1GetTierOrDefault(right); +} + +std::string AV1GetLevelIdxOrDefault(const CodecParameterMap& params) { + // If the parameter is not present, it MUST be inferred to be 5 (level 3.1). + // https://aomediacodec.github.io/av1-rtp-spec/#72-sdp-parameters + return GetFmtpParameterOrDefault(params, kAv1FmtpLevelIdx, "5"); +} + +bool AV1IsSameLevelIdx(const CodecParameterMap& left, + const CodecParameterMap& right) { + return AV1GetLevelIdxOrDefault(left) == AV1GetLevelIdxOrDefault(right); +} + +#ifdef RTC_ENABLE_H265 +std::string GetH265TxModeOrDefault(const CodecParameterMap& params) { + // If TxMode is not present, a value of "SRST" must be inferred. + // https://tools.ietf.org/html/rfc7798@section-7.1 + return GetFmtpParameterOrDefault(params, kH265FmtpTxMode, "SRST"); +} + +bool IsSameH265TxMode(const CodecParameterMap& left, + const CodecParameterMap& right) { + return absl::EqualsIgnoreCase(GetH265TxModeOrDefault(left), + GetH265TxModeOrDefault(right)); +} +#endif + +// Some (video) codecs are actually families of codecs and rely on parameters +// to distinguish different incompatible family members. +bool IsSameCodecSpecific(const std::string& name1, + const CodecParameterMap& params1, + const std::string& name2, + const CodecParameterMap& params2) { + // The names might not necessarily match, so check both. + auto either_name_matches = [&](const std::string name) { + return absl::EqualsIgnoreCase(name, name1) || + absl::EqualsIgnoreCase(name, name2); + }; + if (either_name_matches(kH264CodecName)) + return H264IsSameProfile(params1, params2) && + H264IsSamePacketizationMode(params1, params2); + if (either_name_matches(kVp9CodecName)) + return VP9IsSameProfile(params1, params2); + if (either_name_matches(kAv1CodecName)) + return AV1IsSameProfile(params1, params2) && + AV1IsSameTier(params1, params2) && + AV1IsSameLevelIdx(params1, params2); +#ifdef RTC_ENABLE_H265 + if (either_name_matches(kH265CodecName)) { + return H265IsSameProfile(params1, params2) && + H265IsSameTier(params1, params2) && + IsSameH265TxMode(params1, params2); + } +#endif + return true; +} + +bool ReferencedCodecsMatch(const std::vector& codecs1, + const int codec1_id, + const std::vector& codecs2, + const int codec2_id) { + const Codec* codec1 = FindCodecById(codecs1, codec1_id); + const Codec* codec2 = FindCodecById(codecs2, codec2_id); + return codec1 != nullptr && codec2 != nullptr && codec1->Matches(*codec2); +} + +bool MatchesWithReferenceAttributesAndComparator( + const Codec& codec_to_match, + const Codec& potential_match, + absl::AnyInvocable reference_comparator) { + if (!MatchesWithCodecRules(codec_to_match, potential_match)) { + return false; + } + Codec::ResiliencyType resiliency_type = codec_to_match.GetResiliencyType(); + if (resiliency_type == Codec::ResiliencyType::kRtx) { + int apt_value_1 = 0; + int apt_value_2 = 0; + if (!codec_to_match.GetParam(kCodecParamAssociatedPayloadType, + &apt_value_1) || + !potential_match.GetParam(kCodecParamAssociatedPayloadType, + &apt_value_2)) { + RTC_LOG(LS_WARNING) << "RTX missing associated payload type."; + return false; + } + if (reference_comparator(apt_value_1, apt_value_2)) { + return true; + } + return false; + } + if (resiliency_type == Codec::ResiliencyType::kRed) { + auto red_parameters_1 = + codec_to_match.params.find(kCodecParamNotInNameValueFormat); + auto red_parameters_2 = + potential_match.params.find(kCodecParamNotInNameValueFormat); + bool has_parameters_1 = red_parameters_1 != codec_to_match.params.end(); + bool has_parameters_2 = red_parameters_2 != potential_match.params.end(); + // If codec_to_match has unassigned PT and no parameter, + // we assume that it'll be assigned later and return a match. + // Note - this should be deleted. It's untidy. + if (potential_match.id == Codec::kIdNotSet && !has_parameters_2) { + return true; + } + if (codec_to_match.id == Codec::kIdNotSet && !has_parameters_1) { + return true; + } + if (has_parameters_1 && has_parameters_2) { + // Different levels of redundancy between offer and answer are OK + // since RED is considered to be declarative. + std::vector redundant_payloads_1 = + split(red_parameters_1->second, '/'); + std::vector redundant_payloads_2 = + split(red_parameters_2->second, '/'); + // note: webrtc::split returns at least 1 string even on empty strings. + size_t smallest_size = + std::min(redundant_payloads_1.size(), redundant_payloads_2.size()); + // If the smaller list is equivalent to the longer list, we consider them + // equivalent even if size differs. + for (size_t i = 0; i < smallest_size; i++) { + int red_value_1; + int red_value_2; + if (FromString(redundant_payloads_1[i], &red_value_1) && + FromString(redundant_payloads_2[i], &red_value_2)) { + if (!reference_comparator(red_value_1, red_value_2)) { + return false; + } + } else { + // At least one parameter was not an integer. + // This is a syntax error, but we allow it here if the whole parameter + // equals the other parameter, in order to not generate more errors + // by duplicating the bad parameter. + return red_parameters_1->second == red_parameters_2->second; + } + } + return true; + } + if (!has_parameters_1 && !has_parameters_2) { + // Both parameters are missing. Happens for video RED. + return true; + } + return false; + } + return true; // Not a codec with a PT-valued reference. +} + +CodecParameterMap InsertDefaultParams(const std::string& name, + const CodecParameterMap& params) { + CodecParameterMap updated_params = params; + if (absl::EqualsIgnoreCase(name, kVp9CodecName)) { + if (!HasParameter(params, kVP9FmtpProfileId)) { + if (std::optional default_profile = + ParseSdpForVP9Profile({})) { + updated_params.insert( + {kVP9FmtpProfileId, VP9ProfileToString(*default_profile)}); + } + } + } + if (absl::EqualsIgnoreCase(name, kAv1CodecName)) { + if (!HasParameter(params, kAv1FmtpProfile)) { + if (std::optional default_profile = + ParseSdpForAV1Profile({})) { + updated_params.insert( + {kAv1FmtpProfile, AV1ProfileToString(*default_profile).data()}); + } + } + if (!HasParameter(params, kAv1FmtpTier)) { + updated_params.insert({kAv1FmtpTier, AV1GetTierOrDefault({})}); + } + if (!HasParameter(params, kAv1FmtpLevelIdx)) { + updated_params.insert({kAv1FmtpLevelIdx, AV1GetLevelIdxOrDefault({})}); + } + } + if (absl::EqualsIgnoreCase(name, kH264CodecName)) { + if (!HasParameter(params, kH264FmtpPacketizationMode)) { + updated_params.insert( + {kH264FmtpPacketizationMode, H264GetPacketizationModeOrDefault({})}); + } + } +#ifdef RTC_ENABLE_H265 + if (absl::EqualsIgnoreCase(name, kH265CodecName)) { + if (std::optional default_params = + ParseSdpForH265ProfileTierLevel({})) { + if (!HasParameter(params, kH265FmtpProfileId)) { + updated_params.insert( + {kH265FmtpProfileId, H265ProfileToString(default_params->profile)}); + } + if (!HasParameter(params, kH265FmtpLevelId)) { + updated_params.insert( + {kH265FmtpLevelId, H265LevelToString(default_params->level)}); + } + if (!HasParameter(params, kH265FmtpTierFlag)) { + updated_params.insert( + {kH265FmtpTierFlag, H265TierToString(default_params->tier)}); + } + } + if (!HasParameter(params, kH265FmtpTxMode)) { + updated_params.insert({kH265FmtpTxMode, GetH265TxModeOrDefault({})}); + } + } +#endif + return updated_params; +} + +} // namespace + +bool MatchesWithCodecRules(const Codec& left_codec, const Codec& right_codec) { + // Match the codec id/name based on the typical static/dynamic name rules. + // Matching is case-insensitive. + + // We support the ranges [96, 127] and more recently [35, 65]. + // https://www.iana.org/assignments/rtp-parameters/rtp-parameters.xhtml#rtp-parameters-1 + // Within those ranges we match by codec name, outside by codec id. + // We also match by name if either ID is unassigned. + // Since no codecs are assigned an id in the range [66, 95] by us, these will + // never match. + const int kLowerDynamicRangeMin = 35; + const int kLowerDynamicRangeMax = 65; + const int kUpperDynamicRangeMin = 96; + const int kUpperDynamicRangeMax = 127; + const bool is_id_in_dynamic_range = + (left_codec.id >= kLowerDynamicRangeMin && + left_codec.id <= kLowerDynamicRangeMax) || + (left_codec.id >= kUpperDynamicRangeMin && + left_codec.id <= kUpperDynamicRangeMax); + const bool is_codec_id_in_dynamic_range = + (right_codec.id >= kLowerDynamicRangeMin && + right_codec.id <= kLowerDynamicRangeMax) || + (right_codec.id >= kUpperDynamicRangeMin && + right_codec.id <= kUpperDynamicRangeMax); + bool matches_id; + if ((is_id_in_dynamic_range && is_codec_id_in_dynamic_range) || + left_codec.id == Codec::kIdNotSet || right_codec.id == Codec::kIdNotSet) { + matches_id = absl::EqualsIgnoreCase(left_codec.name, right_codec.name); + } else { + matches_id = (left_codec.id == right_codec.id); + } + + auto matches_type_specific = [&]() { + switch (left_codec.type) { + case Codec::Type::kAudio: + // If a nonzero clockrate is specified, it must match the actual + // clockrate. If a nonzero bitrate is specified, it must match the + // actual bitrate, unless the codec is VBR (0), where we just force the + // supplied value. The number of channels must match exactly, with the + // exception that channels=0 is treated synonymously as channels=1, per + // RFC 4566 section 6: " [The channels] parameter is OPTIONAL and may be + // omitted if the number of channels is one." + // Preference is ignored. + // TODO(juberti): Treat a zero clockrate as 8000Hz, the RTP default + // clockrate. + return ((right_codec.clockrate == 0 /*&& clockrate == 8000*/) || + left_codec.clockrate == right_codec.clockrate) && + (right_codec.bitrate == 0 || left_codec.bitrate <= 0 || + left_codec.bitrate == right_codec.bitrate) && + ((right_codec.channels < 2 && left_codec.channels < 2) || + left_codec.channels == right_codec.channels); + + case Codec::Type::kVideo: + return IsSameCodecSpecific(left_codec.name, left_codec.params, + right_codec.name, right_codec.params); + } + }; + + return matches_id && matches_type_specific(); +} + +bool MatchesWithReferenceAttributes(const Codec& codec1, const Codec& codec2) { + return MatchesWithReferenceAttributesAndComparator( + codec1, codec2, [](int a, int b) { return a == b; }); +} + +// Finds a codec in `codecs2` that matches `codec_to_match`, which is +// a member of `codecs1`. If `codec_to_match` is an RED or RTX codec, both +// the codecs themselves and their associated codecs must match. +std::optional FindMatchingCodec(const std::vector& codecs1, + const std::vector& codecs2, + const Codec& codec_to_match) { + // `codec_to_match` should be a member of `codecs1`, in order to look up + // RED/RTX codecs' associated codecs correctly. If not, that's a programming + // error. + RTC_DCHECK(absl::c_any_of(codecs1, [&codec_to_match](const Codec& codec) { + return &codec == &codec_to_match; + })); + for (const Codec& potential_match : codecs2) { + if (MatchesWithReferenceAttributesAndComparator( + codec_to_match, potential_match, + [&codecs1, &codecs2](int a, int b) { + return ReferencedCodecsMatch(codecs1, a, codecs2, b); + })) { + return potential_match; + } + } + return std::nullopt; +} + +bool IsSameRtpCodec(const Codec& codec, const RtpCodec& rtp_codec) { + RtpCodecParameters rtp_codec2 = codec.ToCodecParameters(); + + return absl::EqualsIgnoreCase(rtp_codec.name, rtp_codec2.name) && + rtp_codec.kind == rtp_codec2.kind && + rtp_codec.num_channels == rtp_codec2.num_channels && + rtp_codec.clock_rate == rtp_codec2.clock_rate && + InsertDefaultParams(rtp_codec.name, rtp_codec.parameters) == + InsertDefaultParams(rtp_codec2.name, rtp_codec2.parameters); +} + +bool IsSameRtpCodecIgnoringLevel(const Codec& codec, + const RtpCodec& rtp_codec) { + RtpCodecParameters rtp_codec2 = codec.ToCodecParameters(); + + if (!absl::EqualsIgnoreCase(rtp_codec.name, rtp_codec2.name) || + rtp_codec.kind != rtp_codec2.kind || + rtp_codec.num_channels != rtp_codec2.num_channels || + rtp_codec.clock_rate != rtp_codec2.clock_rate) { + return false; + } + + CodecParameterMap params1 = + InsertDefaultParams(rtp_codec.name, rtp_codec.parameters); + CodecParameterMap params2 = + InsertDefaultParams(rtp_codec2.name, rtp_codec2.parameters); + + // Some video codecs are compatible with others (e.g. same profile but + // different level). This comparison looks at the relevant parameters, + // ignoring ones that are either irrelevant or unrecognized. + if (rtp_codec.kind == webrtc::MediaType::VIDEO && rtp_codec.IsMediaCodec()) { + return IsSameCodecSpecific(rtp_codec.name, params1, rtp_codec2.name, + params2); + } + // audio/RED should ignore the parameters which specify payload types so + // can not be compared. + if (rtp_codec.kind == webrtc::MediaType::AUDIO && + rtp_codec.name == kRedCodecName) { + return true; + } + + return params1 == params2; +} + +} // namespace webrtc diff --git a/media/base/codec_comparators.h b/media/base/codec_comparators.h new file mode 100644 index 0000000000..1d242b119d --- /dev/null +++ b/media/base/codec_comparators.h @@ -0,0 +1,49 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MEDIA_BASE_CODEC_COMPARATORS_H_ +#define MEDIA_BASE_CODEC_COMPARATORS_H_ + +#include +#include + +#include "api/rtp_parameters.h" +#include "media/base/codec.h" + +namespace webrtc { + +// Comparison used for the Codec::Matches function +bool MatchesWithCodecRules(const Codec& left_codec, const Codec& codec); + +// Comparison that also checks on codecs referenced by PT in the +// fmtp line, as used with RED and RTX "codecs". +bool MatchesWithReferenceAttributes(const Codec& left_codec, + const Codec& right_codec); + +// Finds a codec in `codecs2` that matches `codec_to_match`, which is +// a member of `codecs1`. If `codec_to_match` is an RED or RTX codec, both +// the codecs themselves and their associated codecs must match. +// The purpose of this function is that codecs1 and codecs2 are different +// PT numbering spaces, and it is trying to find the codec in codecs2 +// that has the same functionality as `codec_to_match` so that its PT +// can be used in place of the original. +std::optional FindMatchingCodec(const std::vector& codecs1, + const std::vector& codecs2, + const Codec& codec_to_match); + +// Similar to `Codec::MatchesRtpCodec` but not an exact match of parameters. +// Unspecified parameters are treated as default. +bool IsSameRtpCodec(const Codec& codec, const RtpCodec& rtp_codec); + +// Similar to `IsSameRtpCodec` but ignoring the level related parameter. +bool IsSameRtpCodecIgnoringLevel(const Codec& codec, const RtpCodec& rtp_codec); +} // namespace webrtc + +#endif // MEDIA_BASE_CODEC_COMPARATORS_H_ diff --git a/media/base/codec_comparators_unittest.cc b/media/base/codec_comparators_unittest.cc new file mode 100644 index 0000000000..214bff85a7 --- /dev/null +++ b/media/base/codec_comparators_unittest.cc @@ -0,0 +1,633 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "media/base/codec_comparators.h" + +#include + +#include "api/audio_codecs/audio_format.h" +#include "api/rtp_parameters.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/vp9_profile.h" +#include "media/base/codec.h" +#include "media/base/media_constants.h" +#include "test/gtest.h" + +namespace webrtc { + +using ::testing::TestWithParam; +using ::testing::ValuesIn; + +TEST(CodecComparatorsTest, CodecMatchesItself) { + Codec codec = CreateVideoCodec("custom"); + EXPECT_TRUE(MatchesWithCodecRules(codec, codec)); +} + +TEST(CodecComparatorsTest, MismatchedBasicParameters) { + Codec codec = CreateAudioCodec(SdpAudioFormat("opus", 48000, 2)); + Codec nonmatch_codec = codec; + nonmatch_codec.name = "g711"; + EXPECT_FALSE(MatchesWithCodecRules(nonmatch_codec, codec)); + nonmatch_codec = codec; + nonmatch_codec.clockrate = 8000; + EXPECT_FALSE(MatchesWithCodecRules(nonmatch_codec, codec)); + nonmatch_codec = codec; + nonmatch_codec.channels = 1; + EXPECT_FALSE(MatchesWithCodecRules(nonmatch_codec, codec)); +} + +TEST(CodecComparatorsTest, H264PacketizationModeMismatch) { + Codec pt_mode_1 = CreateVideoCodec(kH264CodecName); + Codec pt_mode_0 = pt_mode_1; + pt_mode_0.SetParam(kH264FmtpPacketizationMode, "0"); + EXPECT_FALSE(MatchesWithCodecRules(pt_mode_1, pt_mode_0)); + EXPECT_FALSE(MatchesWithCodecRules(pt_mode_0, pt_mode_1)); + Codec no_pt_mode = pt_mode_1; + no_pt_mode.RemoveParam(kH264FmtpPacketizationMode); + EXPECT_TRUE(MatchesWithCodecRules(pt_mode_0, no_pt_mode)); + EXPECT_TRUE(MatchesWithCodecRules(no_pt_mode, pt_mode_0)); + EXPECT_FALSE(MatchesWithCodecRules(no_pt_mode, pt_mode_1)); +} + +TEST(CodecComparatorsTest, AudioParametersIgnored) { + // Currently, all parameters on audio codecs are ignored for matching. + Codec basic_opus = CreateAudioCodec(SdpAudioFormat("opus", 48000, 2)); + Codec opus_with_parameters = basic_opus; + opus_with_parameters.SetParam("stereo", "0"); + EXPECT_TRUE(MatchesWithCodecRules(basic_opus, opus_with_parameters)); + EXPECT_TRUE(MatchesWithCodecRules(opus_with_parameters, basic_opus)); + opus_with_parameters.SetParam("nonsense", "stuff"); + EXPECT_TRUE(MatchesWithCodecRules(basic_opus, opus_with_parameters)); + EXPECT_TRUE(MatchesWithCodecRules(opus_with_parameters, basic_opus)); +} + +TEST(CodecComparatorsTest, StaticPayloadTypesIgnoreName) { + // This is the IANA registered format for PT 8 + Codec codec_1 = CreateAudioCodec(8, "pcma", 8000, 1); + Codec codec_2 = CreateAudioCodec(8, "nonsense", 8000, 1); + EXPECT_TRUE(MatchesWithCodecRules(codec_1, codec_2)); +} + +TEST(CodecComparatorsTest, MatchesWithReferenceAttributesRed) { + // Test that RED codecs' reference attributes get parsed correctly. + Codec codec_1 = CreateAudioCodec(101, kRedCodecName, 48000, 2); + codec_1.SetParam(kCodecParamNotInNameValueFormat, "100/100"); + Codec codec_2 = CreateAudioCodec(102, kRedCodecName, 48000, 2); + codec_2.SetParam(kCodecParamNotInNameValueFormat, "101/101"); + // Mixed codecs in RED + Codec codec_3 = CreateAudioCodec(103, kRedCodecName, 48000, 2); + codec_3.SetParam(kCodecParamNotInNameValueFormat, "100/101"); + // Identical codecs always match. + EXPECT_TRUE(MatchesWithReferenceAttributes(codec_1, codec_1)); + EXPECT_TRUE(MatchesWithReferenceAttributes(codec_2, codec_2)); + EXPECT_TRUE(MatchesWithReferenceAttributes(codec_3, codec_3)); + // Mismatched reference codec lists. + EXPECT_FALSE(MatchesWithReferenceAttributes(codec_1, codec_2)); + EXPECT_FALSE(MatchesWithReferenceAttributes(codec_1, codec_3)); + EXPECT_FALSE(MatchesWithReferenceAttributes(codec_2, codec_3)); + // Overflow of longer lists are ignored. + // Overlong list - overflow should be ignored. + Codec codec_4 = CreateAudioCodec(103, kRedCodecName, 48000, 2); + codec_4.SetParam(kCodecParamNotInNameValueFormat, "100/100/101/102"); + EXPECT_TRUE(MatchesWithReferenceAttributes(codec_4, codec_4)); + EXPECT_TRUE(MatchesWithReferenceAttributes(codec_1, codec_4)); + // Broken syntax will cause a non-match with anything except itself. + Codec codec_5 = CreateAudioCodec(103, kRedCodecName, 48000, 2); + codec_5.SetParam(kCodecParamNotInNameValueFormat, ""); + EXPECT_TRUE(MatchesWithReferenceAttributes(codec_5, codec_5)); + EXPECT_FALSE(MatchesWithReferenceAttributes(codec_1, codec_5)); +} + +struct TestParams { + std::string name; + SdpVideoFormat codec1; + SdpVideoFormat codec2; + bool expected_result; +}; + +using IsSameRtpCodecTest = TestWithParam; + +TEST_P(IsSameRtpCodecTest, IsSameRtpCodec) { + TestParams param = GetParam(); + Codec codec1 = CreateVideoCodec(param.codec1); + Codec codec2 = CreateVideoCodec(param.codec2); + + EXPECT_EQ(IsSameRtpCodec(codec1, codec2.ToCodecParameters()), + param.expected_result); +} + +INSTANTIATE_TEST_SUITE_P( + CodecTest, + IsSameRtpCodecTest, + ValuesIn({ + {.name = "CodecWithDifferentName", + .codec1 = {"VP9", {}}, + .codec2 = {"VP8", {}}, + .expected_result = false}, + {.name = "Vp8WithoutParameters", + .codec1 = {"vp8", {}}, + .codec2 = {"VP8", {}}, + .expected_result = true}, + {.name = "Vp8WithSameParameters", + .codec1 = {"VP8", {{"x", "1"}}}, + .codec2 = {"VP8", {{"x", "1"}}}, + .expected_result = true}, + {.name = "Vp8WithDifferentParameters", + .codec1 = {"VP8", {}}, + .codec2 = {"VP8", {{"x", "1"}}}, + .expected_result = false}, + {.name = "Av1WithoutParameters", + .codec1 = {"AV1", {}}, + .codec2 = {"AV1", {}}, + .expected_result = true}, + {.name = "Av1WithSameProfile", + .codec1 = {"AV1", SdpVideoFormat::AV1Profile0().parameters}, + .codec2 = {"AV1", SdpVideoFormat::AV1Profile0().parameters}, + .expected_result = true}, + {.name = "Av1WithoutParametersTreatedAsProfile0", + .codec1 = {"AV1", SdpVideoFormat::AV1Profile0().parameters}, + .codec2 = {"AV1", {}}, + .expected_result = true}, + {.name = "Av1WithoutProfileTreatedAsProfile0", + .codec1 = {"AV1", {{kAv1FmtpProfile, "0"}, {"x", "1"}}}, + .codec2 = {"AV1", {{"x", "1"}}}, + .expected_result = true}, + {.name = "Av1WithDifferentProfile", + .codec1 = {"AV1", SdpVideoFormat::AV1Profile0().parameters}, + .codec2 = {"AV1", SdpVideoFormat::AV1Profile1().parameters}, + .expected_result = false}, + {.name = "Av1WithDifferentParameters", + .codec1 = {"AV1", {{kAv1FmtpProfile, "0"}, {"x", "1"}}}, + .codec2 = {"AV1", {{kAv1FmtpProfile, "0"}, {"x", "2"}}}, + .expected_result = false}, + {.name = "Vp9WithSameProfile", + .codec1 = {"VP9", SdpVideoFormat::VP9Profile0().parameters}, + .codec2 = {"VP9", SdpVideoFormat::VP9Profile0().parameters}, + .expected_result = true}, + {.name = "Vp9WithoutProfileTreatedAsProfile0", + .codec1 = {"VP9", {{kVP9FmtpProfileId, "0"}, {"x", "1"}}}, + .codec2 = {"VP9", {{"x", "1"}}}, + .expected_result = true}, + {.name = "Vp9WithDifferentProfile", + .codec1 = {"VP9", SdpVideoFormat::VP9Profile0().parameters}, + .codec2 = {"VP9", SdpVideoFormat::VP9Profile1().parameters}, + .expected_result = false}, + {.name = "H264WithSamePacketizationMode", + .codec1 = {"H264", {{kH264FmtpPacketizationMode, "0"}}}, + .codec2 = {"H264", {{kH264FmtpPacketizationMode, "0"}}}, + .expected_result = true}, + {.name = "H264WithoutPacketizationModeTreatedAsMode0", + .codec1 = {"H264", {{kH264FmtpPacketizationMode, "0"}, {"x", "1"}}}, + .codec2 = {"H264", {{"x", "1"}}}, + .expected_result = true}, + {.name = "H264WithDifferentPacketizationMode", + .codec1 = {"H264", {{kH264FmtpPacketizationMode, "0"}}}, + .codec2 = {"H264", {{kH264FmtpPacketizationMode, "1"}}}, + .expected_result = false}, +#ifdef RTC_ENABLE_H265 + {.name = "H265WithSameProfile", + .codec1 = {"H265", + {{kH265FmtpProfileId, "1"}, + {kH265FmtpTierFlag, "0"}, + {kH265FmtpLevelId, "93"}, + {kH265FmtpTxMode, "SRST"}}}, + .codec2 = {"H265", + {{kH265FmtpProfileId, "1"}, + {kH265FmtpTierFlag, "0"}, + {kH265FmtpLevelId, "93"}, + {kH265FmtpTxMode, "SRST"}}}, + .expected_result = true}, + {.name = "H265WithoutParametersTreatedAsDefault", + .codec1 = {"H265", + {{kH265FmtpProfileId, "1"}, + {kH265FmtpTierFlag, "0"}, + {kH265FmtpLevelId, "93"}, + {kH265FmtpTxMode, "SRST"}}}, + .codec2 = {"H265", {}}, + .expected_result = true}, + {.name = "H265WithDifferentProfile", + .codec1 = {"H265", + {{kH265FmtpProfileId, "1"}, + {kH265FmtpTierFlag, "0"}, + {kH265FmtpLevelId, "93"}, + {kH265FmtpTxMode, "SRST"}}}, + .codec2 = {"H265", + {{kH265FmtpProfileId, "1"}, + {kH265FmtpTierFlag, "1"}, + {kH265FmtpLevelId, "93"}, + {kH265FmtpTxMode, "SRST"}}}, + .expected_result = false}, +#endif + }), + [](const testing::TestParamInfo& info) { + return info.param.name; + }); + +// For H264, the profile and level IDs are entangled into a single +// "profile-level-id" attribute, so let's test many different versions. +// See https://cconcolato.github.io/media-mime-support/ for inspiration. +TEST(IsSameRtpCodecIgnoringLevelTest, IgnoresH264Levels) { + // AVC Baseline Level 3.1 + Codec baseline_3_1 = + CreateVideoCodec(SdpVideoFormat("H264", + {{kH264FmtpLevelAsymmetryAllowed, "1"}, + {kH264FmtpPacketizationMode, "1"}, + {kH264FmtpProfileLevelId, "42001f"}}, + {ScalabilityMode::kL1T1})); + // AVC Baseline Level 5.2 + Codec baseline_5_2 = + CreateVideoCodec(SdpVideoFormat("H264", + {{kH264FmtpLevelAsymmetryAllowed, "1"}, + {kH264FmtpPacketizationMode, "1"}, + {kH264FmtpProfileLevelId, "420034"}}, + {ScalabilityMode::kL1T1})); + // AVC High Level 3.1 + Codec high_3_1 = + CreateVideoCodec(SdpVideoFormat("H264", + {{kH264FmtpLevelAsymmetryAllowed, "1"}, + {kH264FmtpPacketizationMode, "1"}, + {kH264FmtpProfileLevelId, "64001f"}}, + {ScalabilityMode::kL1T1})); + // AVC High Level 5.2 + Codec high_5_2 = + CreateVideoCodec(SdpVideoFormat("H264", + {{kH264FmtpLevelAsymmetryAllowed, "1"}, + {kH264FmtpPacketizationMode, "1"}, + {kH264FmtpProfileLevelId, "640034"}}, + {ScalabilityMode::kL1T1})); + // AVC High 4:4:4 Predictive Level 3.1 + Codec high_444_predictive_3_1 = + CreateVideoCodec(SdpVideoFormat("H264", + {{kH264FmtpLevelAsymmetryAllowed, "1"}, + {kH264FmtpPacketizationMode, "1"}, + {kH264FmtpProfileLevelId, "f4001f"}}, + {ScalabilityMode::kL1T1})); + + // AVC Baseline Level 5.2 is compatible with AVC Baseline Level 3.1. + EXPECT_TRUE(IsSameRtpCodecIgnoringLevel(baseline_5_2, + baseline_3_1.ToCodecParameters())); + // AVC High is NOT compatible with AVC Baseline. + EXPECT_FALSE( + IsSameRtpCodecIgnoringLevel(baseline_3_1, high_3_1.ToCodecParameters())); + EXPECT_FALSE( + IsSameRtpCodecIgnoringLevel(baseline_3_1, high_5_2.ToCodecParameters())); + EXPECT_FALSE( + IsSameRtpCodecIgnoringLevel(baseline_5_2, high_3_1.ToCodecParameters())); + EXPECT_FALSE( + IsSameRtpCodecIgnoringLevel(baseline_5_2, high_5_2.ToCodecParameters())); + // AVC High 5.2 is compatible with AVC High 3.1 + EXPECT_TRUE( + IsSameRtpCodecIgnoringLevel(high_5_2, high_3_1.ToCodecParameters())); + // 4:4:4 Predictive is NOT compatible with either High or Baseline. + EXPECT_FALSE(IsSameRtpCodecIgnoringLevel(high_444_predictive_3_1, + high_3_1.ToCodecParameters())); + EXPECT_FALSE(IsSameRtpCodecIgnoringLevel(high_444_predictive_3_1, + high_5_2.ToCodecParameters())); + EXPECT_FALSE(IsSameRtpCodecIgnoringLevel(high_444_predictive_3_1, + baseline_3_1.ToCodecParameters())); + EXPECT_FALSE(IsSameRtpCodecIgnoringLevel(high_444_predictive_3_1, + baseline_3_1.ToCodecParameters())); +} + +#ifdef RTC_ENABLE_H265 +// For H265, the "profile-id" and "level-id" are separate so test can be simple. +// The level-id value for Level X.Y is calculated as (X * 10 + Y) * 3. +// The lowest Level, 1.0, is thus (1 * 10 + 0) * 3 = 30. +TEST(IsSameRtpCodecIgnoringLevelTest, IgnoresH265Levels) { + // Profile 1, Level 5.2 + Codec profile_1_level_5_2 = + CreateVideoCodec(SdpVideoFormat("H265", + {{kH265FmtpProfileId, "1"}, + {kH265FmtpTierFlag, "0"}, + {kH265FmtpLevelId, "156"}, + {kH265FmtpTxMode, "SRST"}}, + {ScalabilityMode::kL1T1})); + // Profile 1, Level 6.0 + Codec profile_1_level_6_0 = + CreateVideoCodec(SdpVideoFormat("H265", + {{kH265FmtpProfileId, "1"}, + {kH265FmtpTierFlag, "0"}, + {kH265FmtpLevelId, "180"}, + {kH265FmtpTxMode, "SRST"}}, + {ScalabilityMode::kL1T1})); + // Profile 2, Level 6.0 + Codec profile_2_level_6_0 = + CreateVideoCodec(SdpVideoFormat("H265", + {{kH265FmtpProfileId, "2"}, + {kH265FmtpTierFlag, "0"}, + {kH265FmtpLevelId, "180"}, + {kH265FmtpTxMode, "SRST"}}, + {ScalabilityMode::kL1T1})); + // Profile 1 codecs are compatible with each other. + EXPECT_TRUE(IsSameRtpCodecIgnoringLevel( + profile_1_level_5_2, profile_1_level_6_0.ToCodecParameters())); + // Profile 2 codecs are NOT compatible with profile 1 codecs. + EXPECT_FALSE(IsSameRtpCodecIgnoringLevel( + profile_2_level_6_0, profile_1_level_5_2.ToCodecParameters())); + EXPECT_FALSE(IsSameRtpCodecIgnoringLevel( + profile_2_level_6_0, profile_1_level_6_0.ToCodecParameters())); +} +#endif // RTC_ENABLE_H265 + +TEST(CodecTest, TestCodecMatches) { + // Test a codec with a static payload type. + Codec c0 = CreateAudioCodec(34, "A", 44100, 1); + EXPECT_TRUE(c0.Matches(CreateAudioCodec(34, "", 44100, 1))); + EXPECT_TRUE(c0.Matches(CreateAudioCodec(34, "", 44100, 0))); + EXPECT_TRUE(c0.Matches(CreateAudioCodec(34, "", 44100, 0))); + EXPECT_TRUE(c0.Matches(CreateAudioCodec(34, "", 0, 0))); + EXPECT_FALSE(c0.Matches(CreateAudioCodec(96, "A", 44100, 1))); + EXPECT_FALSE(c0.Matches(CreateAudioCodec(96, "", 44100, 1))); + EXPECT_FALSE(c0.Matches(CreateAudioCodec(95, "", 55100, 1))); + EXPECT_FALSE(c0.Matches(CreateAudioCodec(95, "", 44100, 1))); + EXPECT_FALSE(c0.Matches(CreateAudioCodec(95, "", 44100, 2))); + EXPECT_FALSE(c0.Matches(CreateAudioCodec(95, "", 55100, 2))); + + // Test a codec with a dynamic payload type. + Codec c1 = CreateAudioCodec(96, "A", 44100, 1); + EXPECT_TRUE(c1.Matches(CreateAudioCodec(96, "A", 0, 0))); + EXPECT_TRUE(c1.Matches(CreateAudioCodec(97, "A", 0, 0))); + EXPECT_TRUE(c1.Matches(CreateAudioCodec(96, "a", 0, 0))); + EXPECT_TRUE(c1.Matches(CreateAudioCodec(97, "a", 0, 0))); + EXPECT_TRUE(c1.Matches(CreateAudioCodec(35, "a", 0, 0))); + EXPECT_TRUE(c1.Matches(CreateAudioCodec(42, "a", 0, 0))); + EXPECT_TRUE(c1.Matches(CreateAudioCodec(65, "a", 0, 0))); + EXPECT_FALSE(c1.Matches(CreateAudioCodec(95, "A", 0, 0))); + EXPECT_FALSE(c1.Matches(CreateAudioCodec(34, "A", 0, 0))); + EXPECT_FALSE(c1.Matches(CreateAudioCodec(96, "", 44100, 2))); + EXPECT_FALSE(c1.Matches(CreateAudioCodec(96, "A", 55100, 1))); + + // Test a codec with a dynamic payload type, and auto bitrate. + Codec c2 = CreateAudioCodec(97, "A", 16000, 1); + // Use default bitrate. + EXPECT_TRUE(c2.Matches(CreateAudioCodec(97, "A", 16000, 1))); + EXPECT_TRUE(c2.Matches(CreateAudioCodec(97, "A", 16000, 0))); + // Use explicit bitrate. + EXPECT_TRUE(c2.Matches(CreateAudioCodec(97, "A", 16000, 1))); + // Backward compatibility with clients that might send "-1" (for default). + EXPECT_TRUE(c2.Matches(CreateAudioCodec(97, "A", 16000, 1))); + + // Stereo doesn't match channels = 0. + Codec c3 = CreateAudioCodec(96, "A", 44100, 2); + EXPECT_TRUE(c3.Matches(CreateAudioCodec(96, "A", 44100, 2))); + EXPECT_FALSE(c3.Matches(CreateAudioCodec(96, "A", 44100, 1))); + EXPECT_FALSE(c3.Matches(CreateAudioCodec(96, "A", 44100, 0))); +} + +TEST(CodecTest, TestOpusAudioCodecWithDifferentParameters) { + Codec opus_with_fec = CreateAudioCodec(96, "opus", 48000, 2); + opus_with_fec.params["useinbandfec"] = "1"; + Codec opus_without_fec = CreateAudioCodec(96, "opus", 48000, 2); + + EXPECT_TRUE(opus_with_fec != opus_without_fec); + // Matches does not compare parameters for audio. + EXPECT_TRUE(opus_with_fec.Matches(opus_without_fec)); + + webrtc::RtpCodecParameters rtp_opus_with_fec = + opus_with_fec.ToCodecParameters(); + // MatchesRtpCodec takes parameters into account. + EXPECT_TRUE(opus_with_fec.MatchesRtpCodec(rtp_opus_with_fec)); + EXPECT_FALSE(opus_without_fec.MatchesRtpCodec(rtp_opus_with_fec)); +} + +TEST(CodecTest, TestVideoCodecMatches) { + // Test a codec with a static payload type. + Codec c0 = CreateVideoCodec(34, "V"); + EXPECT_TRUE(c0.Matches(CreateVideoCodec(34, ""))); + EXPECT_FALSE(c0.Matches(CreateVideoCodec(96, ""))); + EXPECT_FALSE(c0.Matches(CreateVideoCodec(96, "V"))); + + // Test a codec with a dynamic payload type. + Codec c1 = CreateVideoCodec(96, "V"); + EXPECT_TRUE(c1.Matches(CreateVideoCodec(96, "V"))); + EXPECT_TRUE(c1.Matches(CreateVideoCodec(97, "V"))); + EXPECT_TRUE(c1.Matches(CreateVideoCodec(96, "v"))); + EXPECT_TRUE(c1.Matches(CreateVideoCodec(97, "v"))); + EXPECT_TRUE(c1.Matches(CreateVideoCodec(35, "v"))); + EXPECT_TRUE(c1.Matches(CreateVideoCodec(42, "v"))); + EXPECT_TRUE(c1.Matches(CreateVideoCodec(65, "v"))); + EXPECT_FALSE(c1.Matches(CreateVideoCodec(96, ""))); + EXPECT_FALSE(c1.Matches(CreateVideoCodec(95, "V"))); + EXPECT_FALSE(c1.Matches(CreateVideoCodec(34, "V"))); +} + +TEST(CodecTest, TestVideoCodecMatchesWithDifferentPacketization) { + Codec c0 = CreateVideoCodec(100, kVp8CodecName); + Codec c1 = CreateVideoCodec(101, kVp8CodecName); + c1.packetization = "raw"; + + EXPECT_TRUE(c0.Matches(c1)); + EXPECT_TRUE(c1.Matches(c0)); +} + +// AV1 codecs compare profile information. +TEST(CodecTest, TestAV1CodecMatches) { + const char kProfile0[] = "0"; + const char kProfile1[] = "1"; + const char kProfile2[] = "2"; + + Codec c_no_profile = CreateVideoCodec(95, kAv1CodecName); + Codec c_profile0 = CreateVideoCodec(95, kAv1CodecName); + c_profile0.params[kAv1FmtpProfile] = kProfile0; + Codec c_profile1 = CreateVideoCodec(95, kAv1CodecName); + c_profile1.params[kAv1FmtpProfile] = kProfile1; + Codec c_profile2 = CreateVideoCodec(95, kAv1CodecName); + c_profile2.params[kAv1FmtpProfile] = kProfile2; + + // An AV1 entry with no profile specified should be treated as profile-0. + EXPECT_TRUE(c_profile0.Matches(c_no_profile)); + + { + // Two AV1 entries without a profile specified are treated as duplicates. + Codec c_no_profile_eq = CreateVideoCodec(95, kAv1CodecName); + EXPECT_TRUE(c_no_profile.Matches(c_no_profile_eq)); + } + + { + // Two AV1 entries with profile 0 specified are treated as duplicates. + Codec c_profile0_eq = CreateVideoCodec(95, kAv1CodecName); + c_profile0_eq.params[kAv1FmtpProfile] = kProfile0; + EXPECT_TRUE(c_profile0.Matches(c_profile0_eq)); + } + + { + // Two AV1 entries with profile 1 specified are treated as duplicates. + Codec c_profile1_eq = CreateVideoCodec(95, kAv1CodecName); + c_profile1_eq.params[kAv1FmtpProfile] = kProfile1; + EXPECT_TRUE(c_profile1.Matches(c_profile1_eq)); + } + + // AV1 entries with different profiles (0 and 1) are seen as distinct. + EXPECT_FALSE(c_profile0.Matches(c_profile1)); + EXPECT_FALSE(c_no_profile.Matches(c_profile1)); + + // AV1 entries with different profiles (0 and 2) are seen as distinct. + EXPECT_FALSE(c_profile0.Matches(c_profile2)); + EXPECT_FALSE(c_no_profile.Matches(c_profile2)); +} + +// VP9 codecs compare profile information. +TEST(CodecTest, TestVP9CodecMatches) { + const char kProfile0[] = "0"; + const char kProfile2[] = "2"; + + Codec c_no_profile = CreateVideoCodec(95, kVp9CodecName); + Codec c_profile0 = CreateVideoCodec(95, kVp9CodecName); + c_profile0.params[webrtc::kVP9FmtpProfileId] = kProfile0; + + EXPECT_TRUE(c_profile0.Matches(c_no_profile)); + + { + Codec c_profile0_eq = CreateVideoCodec(95, kVp9CodecName); + c_profile0_eq.params[webrtc::kVP9FmtpProfileId] = kProfile0; + EXPECT_TRUE(c_profile0.Matches(c_profile0_eq)); + } + + { + Codec c_profile2 = CreateVideoCodec(95, kVp9CodecName); + c_profile2.params[webrtc::kVP9FmtpProfileId] = kProfile2; + EXPECT_FALSE(c_profile0.Matches(c_profile2)); + EXPECT_FALSE(c_no_profile.Matches(c_profile2)); + } + + { + Codec c_no_profile_eq = CreateVideoCodec(95, kVp9CodecName); + EXPECT_TRUE(c_no_profile.Matches(c_no_profile_eq)); + } +} + +// Matching H264 codecs also need to have matching profile-level-id and +// packetization-mode. +TEST(CodecTest, TestH264CodecMatches) { + const char kProfileLevelId1[] = "42e01f"; + const char kProfileLevelId2[] = "42a01e"; + const char kProfileLevelId3[] = "42e01e"; + + Codec pli_1_pm_0 = CreateVideoCodec(95, "H264"); + pli_1_pm_0.params[kH264FmtpProfileLevelId] = kProfileLevelId1; + pli_1_pm_0.params[kH264FmtpPacketizationMode] = "0"; + + { + Codec pli_1_pm_blank = CreateVideoCodec(95, "H264"); + pli_1_pm_blank.params[kH264FmtpProfileLevelId] = kProfileLevelId1; + pli_1_pm_blank.params.erase( + pli_1_pm_blank.params.find(kH264FmtpPacketizationMode)); + + // Matches since if packetization-mode is not specified it defaults to "0". + EXPECT_TRUE(pli_1_pm_0.Matches(pli_1_pm_blank)); + + // MatchesRtpCodec does exact comparison of parameters. + EXPECT_FALSE( + pli_1_pm_0.MatchesRtpCodec(pli_1_pm_blank.ToCodecParameters())); + } + + { + Codec pli_1_pm_1 = CreateVideoCodec(95, "H264"); + pli_1_pm_1.params[kH264FmtpProfileLevelId] = kProfileLevelId1; + pli_1_pm_1.params[kH264FmtpPacketizationMode] = "1"; + + // Does not match since packetization-mode is different. + EXPECT_FALSE(pli_1_pm_0.Matches(pli_1_pm_1)); + + EXPECT_FALSE(pli_1_pm_0.MatchesRtpCodec(pli_1_pm_1.ToCodecParameters())); + } + + { + Codec pli_2_pm_0 = CreateVideoCodec(95, "H264"); + pli_2_pm_0.params[kH264FmtpProfileLevelId] = kProfileLevelId2; + pli_2_pm_0.params[kH264FmtpPacketizationMode] = "0"; + + // Does not match since profile-level-id is different. + EXPECT_FALSE(pli_1_pm_0.Matches(pli_2_pm_0)); + + EXPECT_FALSE(pli_1_pm_0.MatchesRtpCodec(pli_2_pm_0.ToCodecParameters())); + } + + { + Codec pli_3_pm_0_asym = CreateVideoCodec(95, "H264"); + pli_3_pm_0_asym.params[kH264FmtpProfileLevelId] = kProfileLevelId3; + pli_3_pm_0_asym.params[kH264FmtpPacketizationMode] = "0"; + + // Does match, profile-level-id is different but the level is not compared. + // and the profile matches. + EXPECT_TRUE(pli_1_pm_0.Matches(pli_3_pm_0_asym)); + + EXPECT_FALSE( + pli_1_pm_0.MatchesRtpCodec(pli_3_pm_0_asym.ToCodecParameters())); + + // + } +} + +#ifdef RTC_ENABLE_H265 +// Matching H.265 codecs should have matching profile/tier/level and tx-mode. +TEST(CodecTest, TestH265CodecMatches) { + constexpr char kProfile1[] = "1"; + constexpr char kTier1[] = "1"; + constexpr char kLevel3_1[] = "93"; + constexpr char kLevel4[] = "120"; + constexpr char kTxMrst[] = "MRST"; + + Codec c_ptl_blank = CreateVideoCodec(95, kH265CodecName); + + { + Codec c_profile_1 = CreateVideoCodec(95, kH265CodecName); + c_profile_1.params[kH265FmtpProfileId] = kProfile1; + + // Matches since profile-id unspecified defaults to "1". + EXPECT_TRUE(c_ptl_blank.Matches(c_profile_1)); + } + + { + Codec c_tier_flag_1 = CreateVideoCodec(95, kH265CodecName); + c_tier_flag_1.params[kH265FmtpTierFlag] = kTier1; + + // Does not match since profile-space unspecified defaults to "0". + EXPECT_FALSE(c_ptl_blank.Matches(c_tier_flag_1)); + } + + { + Codec c_level_id_3_1 = CreateVideoCodec(95, kH265CodecName); + c_level_id_3_1.params[kH265FmtpLevelId] = kLevel3_1; + + // Matches since level-id unspecified defaults to "93". + EXPECT_TRUE(c_ptl_blank.Matches(c_level_id_3_1)); + } + + { + Codec c_level_id_4 = CreateVideoCodec(95, kH265CodecName); + c_level_id_4.params[kH265FmtpLevelId] = kLevel4; + + // Matches since we ignore level-id when matching H.265 codecs. + EXPECT_TRUE(c_ptl_blank.Matches(c_level_id_4)); + } + + { + Codec c_tx_mode_mrst = CreateVideoCodec(95, kH265CodecName); + c_tx_mode_mrst.params[kH265FmtpTxMode] = kTxMrst; + + // Does not match since tx-mode implies to "SRST" and must be not specified + // when it is the only mode supported: + // https://datatracker.ietf.org/doc/html/draft-ietf-avtcore-hevc-webrtc + EXPECT_FALSE(c_ptl_blank.Matches(c_tx_mode_mrst)); + } +} +#endif + +TEST(CodecTest, TestMatchesRtpCodecRtx) { + const Codec rtx_codec_1 = CreateVideoRtxCodec(96, 120); + const Codec rtx_codec_2 = CreateVideoRtxCodec(96, 121); + EXPECT_TRUE(rtx_codec_1.Matches(rtx_codec_2)); + // MatchesRtpCodec ignores the different associated payload type (apt) for + // RTX. + EXPECT_TRUE(rtx_codec_1.MatchesRtpCodec(rtx_codec_2.ToCodecParameters())); +} + +} // namespace webrtc diff --git a/media/base/codec_list.cc b/media/base/codec_list.cc new file mode 100644 index 0000000000..14f8fbde54 --- /dev/null +++ b/media/base/codec_list.cc @@ -0,0 +1,124 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "media/base/codec_list.h" + +#include +#include +#include + +#include "api/rtc_error.h" +#include "media/base/codec.h" +#include "media/base/media_constants.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/string_encode.h" + +namespace webrtc { + +using webrtc::RTCError; +using webrtc::RTCErrorOr; +using webrtc::RTCErrorType; + +namespace { + +RTCError CheckInputConsistency(const std::vector& codecs) { + std::map pt_to_index; + // Create a map of payload type to index, and ensure + // that there are no duplicates. + for (size_t i = 0; i < codecs.size(); i++) { + const Codec& codec = codecs[i]; + if (codec.id != Codec::kIdNotSet) { + bool inserted = pt_to_index.insert({codec.id, i}).second; + if (!inserted) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "Duplicate payload type in codec list"); + } + } + } + for (const webrtc::Codec& codec : codecs) { + switch (codec.GetResiliencyType()) { + case Codec::ResiliencyType::kRed: + // Check that the target codec exists + break; + case Codec::ResiliencyType::kRtx: { + // Check that the target codec exists + const auto apt_it = codec.params.find(kCodecParamAssociatedPayloadType); + // Not true - there's a test that deliberately injects a wrong + // RTX codec (MediaSessionDescriptionFactoryTest.RtxWithoutApt) + // TODO: https://issues.webrtc.org/384756622 - reject codec earlier and + // enable check. RTC_DCHECK(apt_it != codec.params.end()); Until that is + // fixed: + if (codec.id == Codec::kIdNotSet) { + // Should not have an apt parameter. + if (apt_it != codec.params.end()) { + RTC_LOG(LS_WARNING) << "Surprising condition: RTX codec without " + << "PT has an apt parameter"; + } + // Stop checking the associated PT. + break; + } + if (apt_it == codec.params.end()) { + RTC_LOG(LS_WARNING) << "Surprising condition: RTX codec without" + << " apt parameter: " << codec; + break; + } + int associated_pt; + if (!(webrtc::FromString(apt_it->second, &associated_pt))) { + RTC_LOG(LS_ERROR) << "Non-numeric argument to rtx apt: " << codec + << " apt=" << apt_it->second; + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "Non-numeric argument to rtx apt parameter"); + } + if (codec.id != Codec::kIdNotSet && + pt_to_index.count(associated_pt) != 1) { + RTC_LOG(LS_WARNING) + << "Surprising condition: RTX codec APT not found: " << codec + << " points to a PT that occurs " + << pt_to_index.count(associated_pt) << " times"; + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_PARAMETER, + "PT pointed to by rtx apt parameter does not exist"); + } + // const Codec& referred_codec = codecs[pt_to_index[associated_pt]]; + // Not true: + // RTC_DCHECK(referred_codec.type == Codec::Type::kVideo); + // Not true: + // RTC_DCHECK(referred_codec.GetResiliencyType() == + // Codec::ResiliencyType::kNone); + // TODO: https://issues.webrtc.org/384756623 - figure out if this is + // expected or not. + break; + } + case Codec::ResiliencyType::kNone: + break; // nothing to see here + default: + break; // don't know what to check yet + } + } + return RTCError::OK(); +} + +} // namespace + +// static +RTCErrorOr CodecList::Create(const std::vector& codecs) { + RTCError error = CheckInputConsistency(codecs); + if (!error.ok()) { + return error; + } + return CodecList(codecs); +} + +void CodecList::CheckConsistency() { + RTC_DCHECK(CheckInputConsistency(codecs_).ok()); +} + +} // namespace webrtc diff --git a/media/base/codec_list.h b/media/base/codec_list.h new file mode 100644 index 0000000000..ccfa3ed98b --- /dev/null +++ b/media/base/codec_list.h @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MEDIA_BASE_CODEC_LIST_H_ +#define MEDIA_BASE_CODEC_LIST_H_ + +#include +#include + +#include "api/rtc_error.h" +#include "media/base/codec.h" + +namespace webrtc { + +class CodecList { + public: + using iterator = std::vector::iterator; + using const_iterator = std::vector::const_iterator; + using value_type = Codec; + + CodecList() = default; + // Copy and assign are available. + CodecList(const CodecList&) = default; + CodecList& operator=(const CodecList&) = default; + CodecList(CodecList&&) = default; + CodecList& operator=(CodecList&&) = default; + bool operator==(const CodecList& o) const { return codecs_ == o.codecs_; } + + // Creates a codec list on untrusted data. If successful, the + // resulting CodecList satisfies all the CodecList invariants. + static RTCErrorOr Create(const std::vector& codecs); + // Creates a codec list on trusted data. Only for use when + // the codec list is generated from internal code. + static CodecList CreateFromTrustedData(const std::vector& codecs) { + return CodecList(codecs); + } + // Vector-compatible API to access the codecs. + iterator begin() { return codecs_.begin(); } + iterator end() { return codecs_.end(); } + const_iterator begin() const { return codecs_.begin(); } + const_iterator end() const { return codecs_.end(); } + const Codec& operator[](size_t i) const { return codecs_[i]; } + Codec& operator[](size_t i) { return codecs_[i]; } + void push_back(const Codec& codec) { + codecs_.push_back(codec); + CheckConsistency(); + } + bool empty() const { return codecs_.empty(); } + void clear() { codecs_.clear(); } + size_t size() const { return codecs_.size(); } + // Access to the whole codec list + const std::vector& codecs() const { return codecs_; } + std::vector& writable_codecs() { return codecs_; } + // Verify consistency of the codec list. + // Examples: checking that all RTX codecs have APT pointing + // to a codec in the list. + // The function will CHECK or DCHECK on inconsistencies. + void CheckConsistency(); + + template + friend void AbslStringify(Sink& sink, const CodecList& list) { + absl::Format(&sink, "\n--- Codec list of size %d\n", list.size()); + for (Codec codec : list) { + absl::Format(&sink, "%v\n", codec); + } + sink.Append("--- End\n"); + } + + private: + // Creates a codec list on trusted data. + explicit CodecList(const std::vector& codecs) { + codecs_ = codecs; + CheckConsistency(); + } + + std::vector codecs_; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::CodecList; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // MEDIA_BASE_CODEC_LIST_H_ diff --git a/media/base/codec_list_unittest.cc b/media/base/codec_list_unittest.cc new file mode 100644 index 0000000000..161b59d788 --- /dev/null +++ b/media/base/codec_list_unittest.cc @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "media/base/codec_list.h" + +#include + +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/video_codecs/sdp_video_format.h" +#include "media/base/codec.h" +#include "rtc_base/checks.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using webrtc::RTCErrorOr; +using webrtc::RTCErrorType; + +TEST(CodecList, StoreAndRecall) { + CodecList empty_list = CodecList::CreateFromTrustedData(std::vector{}); + EXPECT_TRUE(empty_list.empty()); + EXPECT_TRUE(empty_list.codecs().empty()); + Codec video_codec = CreateVideoCodec({SdpVideoFormat{"VP8"}}); + CodecList one_codec = CodecList::CreateFromTrustedData({{video_codec}}); + EXPECT_EQ(one_codec.size(), 1U); + EXPECT_EQ(one_codec.codecs()[0], video_codec); +} + +TEST(CodecList, RejectIllegalConstructorArguments) { + std::vector apt_without_number{webrtc::CreateVideoCodec( + {SdpVideoFormat{"rtx", CodecParameterMap{{"apt", "not-a-number"}}}})}; + apt_without_number[0].id = 96; + RTCErrorOr checked_codec_list = + CodecList::Create(apt_without_number); + EXPECT_FALSE(checked_codec_list.ok()); + EXPECT_EQ(checked_codec_list.error().type(), RTCErrorType::INVALID_PARAMETER); +} + +#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) +TEST(CodecList, CrashOnIllegalConstructorArguments) { + // This tests initializing a CodecList with a sequence that doesn't + // satisfy its expected invariants. + // Those invariants are only checked in debug mode. + // See CodecList::CheckInputConsistency for what checks are enabled. + // Checks that can't be enabled log things instead. + // Note: DCHECK is on in some release builds, so we can't use + // EXPECT_DEBUG_DEATH here. + std::vector apt_without_number{webrtc::CreateVideoCodec( + {SdpVideoFormat{"rtx", CodecParameterMap{{"apt", "not-a-number"}}}})}; + apt_without_number[0].id = 96; +#if RTC_DCHECK_IS_ON + EXPECT_DEATH( + CodecList bad = CodecList::CreateFromTrustedData(apt_without_number), + "CheckInputConsistency"); +#else + // Expect initialization to succeed. + CodecList bad = CodecList::CreateFromTrustedData(apt_without_number); + EXPECT_EQ(bad.size(), 1U); +#endif +} +#endif + +} // namespace +} // namespace webrtc diff --git a/media/base/codec_unittest.cc b/media/base/codec_unittest.cc index eb34530c38..e00b08d8c2 100644 --- a/media/base/codec_unittest.cc +++ b/media/base/codec_unittest.cc @@ -10,21 +10,22 @@ #include "media/base/codec.h" -#include +#include +#include +#include -#include "api/video_codecs/av1_profile.h" +#include "absl/strings/str_cat.h" +#include "api/media_types.h" +#include "api/rtp_parameters.h" #include "api/video_codecs/h264_profile_level_id.h" +#include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/vp9_profile.h" +#include "media/base/media_constants.h" #include "modules/video_coding/codecs/h264/include/h264.h" -#include "rtc_base/gunit.h" +#include "test/gtest.h" -using cricket::AudioCodec; -using cricket::Codec; -using cricket::FeedbackParam; -using cricket::kCodecParamAssociatedPayloadType; -using cricket::kCodecParamMaxBitrate; -using cricket::kCodecParamMinBitrate; -using cricket::VideoCodec; +namespace webrtc { +namespace { class TestCodec : public Codec { public: @@ -64,32 +65,32 @@ TEST(CodecTest, TestCodecOperators) { EXPECT_TRUE(c0 != c1); TestCodec c5; - TestCodec c6(0, "", 0); + TestCodec c6(Codec::kIdNotSet, "", 0); EXPECT_TRUE(c5 == c6); } TEST(CodecTest, TestAudioCodecOperators) { - AudioCodec c0 = cricket::CreateAudioCodec(96, "A", 44100, 2); - AudioCodec c1 = cricket::CreateAudioCodec(95, "A", 44100, 2); - AudioCodec c2 = cricket::CreateAudioCodec(96, "x", 44100, 2); - AudioCodec c3 = cricket::CreateAudioCodec(96, "A", 48000, 2); - AudioCodec c4 = cricket::CreateAudioCodec(96, "A", 44100, 2); + Codec c0 = CreateAudioCodec(96, "A", 44100, 2); + Codec c1 = CreateAudioCodec(95, "A", 44100, 2); + Codec c2 = CreateAudioCodec(96, "x", 44100, 2); + Codec c3 = CreateAudioCodec(96, "A", 48000, 2); + Codec c4 = CreateAudioCodec(96, "A", 44100, 2); c4.bitrate = 10000; - AudioCodec c5 = cricket::CreateAudioCodec(96, "A", 44100, 1); + Codec c5 = CreateAudioCodec(96, "A", 44100, 1); EXPECT_NE(c0, c1); EXPECT_NE(c0, c2); EXPECT_NE(c0, c3); EXPECT_NE(c0, c4); EXPECT_NE(c0, c5); - AudioCodec c8 = cricket::CreateAudioCodec(0, "", 0, 0); - AudioCodec c9 = c0; + Codec c8 = CreateAudioCodec(0, "", 0, 0); + Codec c9 = c0; EXPECT_EQ(c9, c0); - AudioCodec c10(c0); - AudioCodec c11(c0); - AudioCodec c12(c0); - AudioCodec c13(c0); + Codec c10(c0); + Codec c11(c0); + Codec c12(c0); + Codec c13(c0); c10.params["x"] = "abc"; c11.params["x"] = "def"; c12.params["y"] = "abc"; @@ -103,67 +104,22 @@ TEST(CodecTest, TestAudioCodecOperators) { EXPECT_EQ(c13, c10); } -TEST(CodecTest, TestAudioCodecMatches) { - // Test a codec with a static payload type. - AudioCodec c0 = cricket::CreateAudioCodec(34, "A", 44100, 1); - EXPECT_TRUE(c0.Matches(cricket::CreateAudioCodec(34, "", 44100, 1))); - EXPECT_TRUE(c0.Matches(cricket::CreateAudioCodec(34, "", 44100, 0))); - EXPECT_TRUE(c0.Matches(cricket::CreateAudioCodec(34, "", 44100, 0))); - EXPECT_TRUE(c0.Matches(cricket::CreateAudioCodec(34, "", 0, 0))); - EXPECT_FALSE(c0.Matches(cricket::CreateAudioCodec(96, "A", 44100, 1))); - EXPECT_FALSE(c0.Matches(cricket::CreateAudioCodec(96, "", 44100, 1))); - EXPECT_FALSE(c0.Matches(cricket::CreateAudioCodec(95, "", 55100, 1))); - EXPECT_FALSE(c0.Matches(cricket::CreateAudioCodec(95, "", 44100, 1))); - EXPECT_FALSE(c0.Matches(cricket::CreateAudioCodec(95, "", 44100, 2))); - EXPECT_FALSE(c0.Matches(cricket::CreateAudioCodec(95, "", 55100, 2))); - - // Test a codec with a dynamic payload type. - AudioCodec c1 = cricket::CreateAudioCodec(96, "A", 44100, 1); - EXPECT_TRUE(c1.Matches(cricket::CreateAudioCodec(96, "A", 0, 0))); - EXPECT_TRUE(c1.Matches(cricket::CreateAudioCodec(97, "A", 0, 0))); - EXPECT_TRUE(c1.Matches(cricket::CreateAudioCodec(96, "a", 0, 0))); - EXPECT_TRUE(c1.Matches(cricket::CreateAudioCodec(97, "a", 0, 0))); - EXPECT_TRUE(c1.Matches(cricket::CreateAudioCodec(35, "a", 0, 0))); - EXPECT_TRUE(c1.Matches(cricket::CreateAudioCodec(42, "a", 0, 0))); - EXPECT_TRUE(c1.Matches(cricket::CreateAudioCodec(65, "a", 0, 0))); - EXPECT_FALSE(c1.Matches(cricket::CreateAudioCodec(95, "A", 0, 0))); - EXPECT_FALSE(c1.Matches(cricket::CreateAudioCodec(34, "A", 0, 0))); - EXPECT_FALSE(c1.Matches(cricket::CreateAudioCodec(96, "", 44100, 2))); - EXPECT_FALSE(c1.Matches(cricket::CreateAudioCodec(96, "A", 55100, 1))); - - // Test a codec with a dynamic payload type, and auto bitrate. - AudioCodec c2 = cricket::CreateAudioCodec(97, "A", 16000, 1); - // Use default bitrate. - EXPECT_TRUE(c2.Matches(cricket::CreateAudioCodec(97, "A", 16000, 1))); - EXPECT_TRUE(c2.Matches(cricket::CreateAudioCodec(97, "A", 16000, 0))); - // Use explicit bitrate. - EXPECT_TRUE(c2.Matches(cricket::CreateAudioCodec(97, "A", 16000, 1))); - // Backward compatibility with clients that might send "-1" (for default). - EXPECT_TRUE(c2.Matches(cricket::CreateAudioCodec(97, "A", 16000, 1))); - - // Stereo doesn't match channels = 0. - AudioCodec c3 = cricket::CreateAudioCodec(96, "A", 44100, 2); - EXPECT_TRUE(c3.Matches(cricket::CreateAudioCodec(96, "A", 44100, 2))); - EXPECT_FALSE(c3.Matches(cricket::CreateAudioCodec(96, "A", 44100, 1))); - EXPECT_FALSE(c3.Matches(cricket::CreateAudioCodec(96, "A", 44100, 0))); -} - TEST(CodecTest, TestVideoCodecOperators) { - VideoCodec c0 = cricket::CreateVideoCodec(96, "V"); - VideoCodec c1 = cricket::CreateVideoCodec(95, "V"); - VideoCodec c2 = cricket::CreateVideoCodec(96, "x"); + Codec c0 = CreateVideoCodec(96, "V"); + Codec c1 = CreateVideoCodec(95, "V"); + Codec c2 = CreateVideoCodec(96, "x"); EXPECT_TRUE(c0 != c1); EXPECT_TRUE(c0 != c2); - VideoCodec c8 = cricket::CreateVideoCodec(0, ""); - VideoCodec c9 = c0; + Codec c8 = CreateVideoCodec(0, ""); + Codec c9 = c0; EXPECT_TRUE(c9 == c0); - VideoCodec c10(c0); - VideoCodec c11(c0); - VideoCodec c12(c0); - VideoCodec c13(c0); + Codec c10(c0); + Codec c11(c0); + Codec c12(c0); + Codec c13(c0); c10.params["x"] = "abc"; c11.params["x"] = "def"; c12.params["y"] = "abc"; @@ -178,9 +134,9 @@ TEST(CodecTest, TestVideoCodecOperators) { } TEST(CodecTest, TestVideoCodecEqualsWithDifferentPacketization) { - VideoCodec c0 = cricket::CreateVideoCodec(100, cricket::kVp8CodecName); - VideoCodec c1 = cricket::CreateVideoCodec(100, cricket::kVp8CodecName); - VideoCodec c2 = cricket::CreateVideoCodec(100, cricket::kVp8CodecName); + Codec c0 = CreateVideoCodec(100, kVp8CodecName); + Codec c1 = CreateVideoCodec(100, kVp8CodecName); + Codec c2 = CreateVideoCodec(100, kVp8CodecName); c2.packetization = "raw"; EXPECT_EQ(c0, c1); @@ -189,161 +145,8 @@ TEST(CodecTest, TestVideoCodecEqualsWithDifferentPacketization) { EXPECT_EQ(c2, c2); } -TEST(CodecTest, TestVideoCodecMatches) { - // Test a codec with a static payload type. - VideoCodec c0 = cricket::CreateVideoCodec(34, "V"); - EXPECT_TRUE(c0.Matches(cricket::CreateVideoCodec(34, ""))); - EXPECT_FALSE(c0.Matches(cricket::CreateVideoCodec(96, ""))); - EXPECT_FALSE(c0.Matches(cricket::CreateVideoCodec(96, "V"))); - - // Test a codec with a dynamic payload type. - VideoCodec c1 = cricket::CreateVideoCodec(96, "V"); - EXPECT_TRUE(c1.Matches(cricket::CreateVideoCodec(96, "V"))); - EXPECT_TRUE(c1.Matches(cricket::CreateVideoCodec(97, "V"))); - EXPECT_TRUE(c1.Matches(cricket::CreateVideoCodec(96, "v"))); - EXPECT_TRUE(c1.Matches(cricket::CreateVideoCodec(97, "v"))); - EXPECT_TRUE(c1.Matches(cricket::CreateVideoCodec(35, "v"))); - EXPECT_TRUE(c1.Matches(cricket::CreateVideoCodec(42, "v"))); - EXPECT_TRUE(c1.Matches(cricket::CreateVideoCodec(65, "v"))); - EXPECT_FALSE(c1.Matches(cricket::CreateVideoCodec(96, ""))); - EXPECT_FALSE(c1.Matches(cricket::CreateVideoCodec(95, "V"))); - EXPECT_FALSE(c1.Matches(cricket::CreateVideoCodec(34, "V"))); -} - -TEST(CodecTest, TestVideoCodecMatchesWithDifferentPacketization) { - VideoCodec c0 = cricket::CreateVideoCodec(100, cricket::kVp8CodecName); - VideoCodec c1 = cricket::CreateVideoCodec(101, cricket::kVp8CodecName); - c1.packetization = "raw"; - - EXPECT_TRUE(c0.Matches(c1)); - EXPECT_TRUE(c1.Matches(c0)); -} - -// AV1 codecs compare profile information. -TEST(CodecTest, TestAV1CodecMatches) { - const char kProfile0[] = "0"; - const char kProfile1[] = "1"; - const char kProfile2[] = "2"; - - VideoCodec c_no_profile = - cricket::CreateVideoCodec(95, cricket::kAv1CodecName); - VideoCodec c_profile0 = cricket::CreateVideoCodec(95, cricket::kAv1CodecName); - c_profile0.params[webrtc::kAV1FmtpProfile] = kProfile0; - VideoCodec c_profile1 = cricket::CreateVideoCodec(95, cricket::kAv1CodecName); - c_profile1.params[webrtc::kAV1FmtpProfile] = kProfile1; - VideoCodec c_profile2 = cricket::CreateVideoCodec(95, cricket::kAv1CodecName); - c_profile2.params[webrtc::kAV1FmtpProfile] = kProfile2; - - // An AV1 entry with no profile specified should be treated as profile-0. - EXPECT_TRUE(c_profile0.Matches(c_no_profile)); - - { - // Two AV1 entries without a profile specified are treated as duplicates. - VideoCodec c_no_profile_eq = - cricket::CreateVideoCodec(95, cricket::kAv1CodecName); - EXPECT_TRUE(c_no_profile.Matches(c_no_profile_eq)); - } - - { - // Two AV1 entries with profile 0 specified are treated as duplicates. - VideoCodec c_profile0_eq = - cricket::CreateVideoCodec(95, cricket::kAv1CodecName); - c_profile0_eq.params[webrtc::kAV1FmtpProfile] = kProfile0; - EXPECT_TRUE(c_profile0.Matches(c_profile0_eq)); - } - - { - // Two AV1 entries with profile 1 specified are treated as duplicates. - VideoCodec c_profile1_eq = - cricket::CreateVideoCodec(95, cricket::kAv1CodecName); - c_profile1_eq.params[webrtc::kAV1FmtpProfile] = kProfile1; - EXPECT_TRUE(c_profile1.Matches(c_profile1_eq)); - } - - // AV1 entries with different profiles (0 and 1) are seen as distinct. - EXPECT_FALSE(c_profile0.Matches(c_profile1)); - EXPECT_FALSE(c_no_profile.Matches(c_profile1)); - - // AV1 entries with different profiles (0 and 2) are seen as distinct. - EXPECT_FALSE(c_profile0.Matches(c_profile2)); - EXPECT_FALSE(c_no_profile.Matches(c_profile2)); -} - -// VP9 codecs compare profile information. -TEST(CodecTest, TestVP9CodecMatches) { - const char kProfile0[] = "0"; - const char kProfile2[] = "2"; - - VideoCodec c_no_profile = - cricket::CreateVideoCodec(95, cricket::kVp9CodecName); - VideoCodec c_profile0 = cricket::CreateVideoCodec(95, cricket::kVp9CodecName); - c_profile0.params[webrtc::kVP9FmtpProfileId] = kProfile0; - - EXPECT_TRUE(c_profile0.Matches(c_no_profile)); - - { - VideoCodec c_profile0_eq = - cricket::CreateVideoCodec(95, cricket::kVp9CodecName); - c_profile0_eq.params[webrtc::kVP9FmtpProfileId] = kProfile0; - EXPECT_TRUE(c_profile0.Matches(c_profile0_eq)); - } - - { - VideoCodec c_profile2 = - cricket::CreateVideoCodec(95, cricket::kVp9CodecName); - c_profile2.params[webrtc::kVP9FmtpProfileId] = kProfile2; - EXPECT_FALSE(c_profile0.Matches(c_profile2)); - EXPECT_FALSE(c_no_profile.Matches(c_profile2)); - } - - { - VideoCodec c_no_profile_eq = - cricket::CreateVideoCodec(95, cricket::kVp9CodecName); - EXPECT_TRUE(c_no_profile.Matches(c_no_profile_eq)); - } -} - -// Matching H264 codecs also need to have matching profile-level-id and -// packetization-mode. -TEST(CodecTest, TestH264CodecMatches) { - const char kProfileLevelId1[] = "42e01f"; - const char kProfileLevelId2[] = "42a01e"; - - VideoCodec pli_1_pm_0 = cricket::CreateVideoCodec(95, "H264"); - pli_1_pm_0.params[cricket::kH264FmtpProfileLevelId] = kProfileLevelId1; - pli_1_pm_0.params[cricket::kH264FmtpPacketizationMode] = "0"; - - { - VideoCodec pli_1_pm_blank = cricket::CreateVideoCodec(95, "H264"); - pli_1_pm_blank.params[cricket::kH264FmtpProfileLevelId] = kProfileLevelId1; - pli_1_pm_blank.params.erase( - pli_1_pm_blank.params.find(cricket::kH264FmtpPacketizationMode)); - - // Matches since if packetization-mode is not specified it defaults to "0". - EXPECT_TRUE(pli_1_pm_0.Matches(pli_1_pm_blank)); - } - - { - VideoCodec pli_1_pm_1 = cricket::CreateVideoCodec(95, "H264"); - pli_1_pm_1.params[cricket::kH264FmtpProfileLevelId] = kProfileLevelId1; - pli_1_pm_1.params[cricket::kH264FmtpPacketizationMode] = "1"; - - // Does not match since packetization-mode is different. - EXPECT_FALSE(pli_1_pm_0.Matches(pli_1_pm_1)); - } - - { - VideoCodec pli_2_pm_0 = cricket::CreateVideoCodec(95, "H264"); - pli_2_pm_0.params[cricket::kH264FmtpProfileLevelId] = kProfileLevelId2; - pli_2_pm_0.params[cricket::kH264FmtpPacketizationMode] = "0"; - - // Does not match since profile-level-id is different. - EXPECT_FALSE(pli_1_pm_0.Matches(pli_2_pm_0)); - } -} - TEST(CodecTest, TestSetParamGetParamAndRemoveParam) { - AudioCodec codec = cricket::CreateAudioCodec(0, "foo", 22222, 2); + Codec codec = CreateAudioCodec(0, "foo", 22222, 2); codec.SetParam("a", "1"); codec.SetParam("b", "x"); @@ -383,12 +186,12 @@ TEST(CodecTest, TestIntersectFeedbackParams) { } TEST(CodecTest, TestGetCodecType) { - // Codec type comparison should be case insenstive on names. - const VideoCodec codec = cricket::CreateVideoCodec(96, "V"); - const VideoCodec rtx_codec = cricket::CreateVideoCodec(96, "rTx"); - const VideoCodec ulpfec_codec = cricket::CreateVideoCodec(96, "ulpFeC"); - const VideoCodec flexfec_codec = cricket::CreateVideoCodec(96, "FlExFeC-03"); - const VideoCodec red_codec = cricket::CreateVideoCodec(96, "ReD"); + // Codec type comparison should be case insensitive on names. + const Codec codec = CreateVideoCodec(96, "V"); + const Codec rtx_codec = CreateVideoCodec(96, "rTx"); + const Codec ulpfec_codec = CreateVideoCodec(96, "ulpFeC"); + const Codec flexfec_codec = CreateVideoCodec(96, "FlExFeC-03"); + const Codec red_codec = CreateVideoCodec(96, "ReD"); EXPECT_TRUE(codec.IsMediaCodec()); EXPECT_EQ(codec.GetResiliencyType(), Codec::ResiliencyType::kNone); EXPECT_EQ(rtx_codec.GetResiliencyType(), Codec::ResiliencyType::kRtx); @@ -398,7 +201,7 @@ TEST(CodecTest, TestGetCodecType) { } TEST(CodecTest, TestCreateRtxCodec) { - VideoCodec rtx_codec = cricket::CreateVideoRtxCodec(96, 120); + const Codec rtx_codec = CreateVideoRtxCodec(96, 120); EXPECT_EQ(96, rtx_codec.id); EXPECT_EQ(rtx_codec.GetResiliencyType(), Codec::ResiliencyType::kRtx); int associated_payload_type; @@ -408,64 +211,64 @@ TEST(CodecTest, TestCreateRtxCodec) { } TEST(CodecTest, TestValidateCodecFormat) { - const VideoCodec codec = cricket::CreateVideoCodec(96, "V"); + const Codec codec = CreateVideoCodec(96, "V"); ASSERT_TRUE(codec.ValidateCodecFormat()); // Accept 0-127 as payload types. - VideoCodec low_payload_type = codec; + Codec low_payload_type = codec; low_payload_type.id = 0; - VideoCodec high_payload_type = codec; + Codec high_payload_type = codec; high_payload_type.id = 127; ASSERT_TRUE(low_payload_type.ValidateCodecFormat()); EXPECT_TRUE(high_payload_type.ValidateCodecFormat()); // Reject negative payloads. - VideoCodec negative_payload_type = codec; + Codec negative_payload_type = codec; negative_payload_type.id = -1; EXPECT_FALSE(negative_payload_type.ValidateCodecFormat()); // Reject too-high payloads. - VideoCodec too_high_payload_type = codec; + Codec too_high_payload_type = codec; too_high_payload_type.id = 128; EXPECT_FALSE(too_high_payload_type.ValidateCodecFormat()); // Reject codecs with min bitrate > max bitrate. - VideoCodec incorrect_bitrates = codec; + Codec incorrect_bitrates = codec; incorrect_bitrates.params[kCodecParamMinBitrate] = "100"; incorrect_bitrates.params[kCodecParamMaxBitrate] = "80"; EXPECT_FALSE(incorrect_bitrates.ValidateCodecFormat()); // Accept min bitrate == max bitrate. - VideoCodec equal_bitrates = codec; + Codec equal_bitrates = codec; equal_bitrates.params[kCodecParamMinBitrate] = "100"; equal_bitrates.params[kCodecParamMaxBitrate] = "100"; EXPECT_TRUE(equal_bitrates.ValidateCodecFormat()); // Accept min bitrate < max bitrate. - VideoCodec different_bitrates = codec; + Codec different_bitrates = codec; different_bitrates.params[kCodecParamMinBitrate] = "99"; different_bitrates.params[kCodecParamMaxBitrate] = "100"; EXPECT_TRUE(different_bitrates.ValidateCodecFormat()); } TEST(CodecTest, TestToCodecParameters) { - VideoCodec v = cricket::CreateVideoCodec(96, "V"); + Codec v = CreateVideoCodec(96, "V"); v.SetParam("p1", "v1"); webrtc::RtpCodecParameters codec_params_1 = v.ToCodecParameters(); EXPECT_EQ(96, codec_params_1.payload_type); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, codec_params_1.kind); + EXPECT_EQ(webrtc::MediaType::VIDEO, codec_params_1.kind); EXPECT_EQ("V", codec_params_1.name); - EXPECT_EQ(cricket::kVideoCodecClockrate, codec_params_1.clock_rate); - EXPECT_EQ(absl::nullopt, codec_params_1.num_channels); + EXPECT_EQ(kVideoCodecClockrate, codec_params_1.clock_rate); + EXPECT_EQ(std::nullopt, codec_params_1.num_channels); ASSERT_EQ(1u, codec_params_1.parameters.size()); EXPECT_EQ("p1", codec_params_1.parameters.begin()->first); EXPECT_EQ("v1", codec_params_1.parameters.begin()->second); - AudioCodec a = cricket::CreateAudioCodec(97, "A", 44100, 2); + Codec a = CreateAudioCodec(97, "A", 44100, 2); a.SetParam("p1", "a1"); webrtc::RtpCodecParameters codec_params_2 = a.ToCodecParameters(); EXPECT_EQ(97, codec_params_2.payload_type); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, codec_params_2.kind); + EXPECT_EQ(webrtc::MediaType::AUDIO, codec_params_2.kind); EXPECT_EQ("A", codec_params_2.name); EXPECT_EQ(44100, codec_params_2.clock_rate); EXPECT_EQ(2, codec_params_2.num_channels); @@ -483,8 +286,7 @@ TEST(CodecTest, H264CostrainedBaselineIsAddedIfH264IsSupported) { std::vector supported_formats = kExplicitlySupportedFormats; - cricket::AddH264ConstrainedBaselineProfileToSupportedFormats( - &supported_formats); + AddH264ConstrainedBaselineProfileToSupportedFormats(&supported_formats); const webrtc::SdpVideoFormat kH264ConstrainedBasedlinePacketization1 = webrtc::CreateH264Format(webrtc::H264Profile::kProfileConstrainedBaseline, @@ -501,14 +303,13 @@ TEST(CodecTest, H264CostrainedBaselineIsAddedIfH264IsSupported) { TEST(CodecTest, H264CostrainedBaselineIsNotAddedIfH264IsUnsupported) { const std::vector kExplicitlySupportedFormats = { - {cricket::kVp9CodecName, + {kVp9CodecName, {{webrtc::kVP9FmtpProfileId, VP9ProfileToString(webrtc::VP9Profile::kProfile0)}}}}; std::vector supported_formats = kExplicitlySupportedFormats; - cricket::AddH264ConstrainedBaselineProfileToSupportedFormats( - &supported_formats); + AddH264ConstrainedBaselineProfileToSupportedFormats(&supported_formats); EXPECT_EQ(supported_formats[0], kExplicitlySupportedFormats[0]); EXPECT_EQ(supported_formats.size(), kExplicitlySupportedFormats.size()); @@ -527,8 +328,7 @@ TEST(CodecTest, H264CostrainedBaselineNotAddedIfAlreadySpecified) { std::vector supported_formats = kExplicitlySupportedFormats; - cricket::AddH264ConstrainedBaselineProfileToSupportedFormats( - &supported_formats); + AddH264ConstrainedBaselineProfileToSupportedFormats(&supported_formats); EXPECT_EQ(supported_formats[0], kExplicitlySupportedFormats[0]); EXPECT_EQ(supported_formats[1], kExplicitlySupportedFormats[1]); @@ -536,3 +336,13 @@ TEST(CodecTest, H264CostrainedBaselineNotAddedIfAlreadySpecified) { EXPECT_EQ(supported_formats[3], kExplicitlySupportedFormats[3]); EXPECT_EQ(supported_formats.size(), kExplicitlySupportedFormats.size()); } + +TEST(CodecTest, AbslStringify) { + Codec codec = CreateAudioCodec(47, "custom-audio", 48000, 2); + EXPECT_EQ(absl::StrCat(codec), "[47:audio/custom-audio/48000/2]"); + codec.params["key"] = "value"; + EXPECT_EQ(absl::StrCat(codec), "[47:audio/custom-audio/48000/2;key=value]"); +} + +} // namespace +} // namespace webrtc diff --git a/media/base/fake_frame_source.cc b/media/base/fake_frame_source.cc index 61bc5857d9..41a014313f 100644 --- a/media/base/fake_frame_source.cc +++ b/media/base/fake_frame_source.cc @@ -10,13 +10,16 @@ #include "media/base/fake_frame_source.h" +#include + #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" -#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame.h" +#include "api/video/video_rotation.h" #include "rtc_base/checks.h" #include "rtc_base/time_utils.h" -namespace cricket { +namespace webrtc { FakeFrameSource::FakeFrameSource(int width, int height, @@ -33,17 +36,17 @@ FakeFrameSource::FakeFrameSource(int width, } FakeFrameSource::FakeFrameSource(int width, int height, int interval_us) - : FakeFrameSource(width, height, interval_us, rtc::TimeMicros()) {} + : FakeFrameSource(width, height, interval_us, TimeMicros()) {} -webrtc::VideoRotation FakeFrameSource::GetRotation() const { +VideoRotation FakeFrameSource::GetRotation() const { return rotation_; } -void FakeFrameSource::SetRotation(webrtc::VideoRotation rotation) { +void FakeFrameSource::SetRotation(VideoRotation rotation) { rotation_ = rotation; } -webrtc::VideoFrame FakeFrameSource::GetFrameRotationApplied() { +VideoFrame FakeFrameSource::GetFrameRotationApplied() { switch (rotation_) { case webrtc::kVideoRotation_0: case webrtc::kVideoRotation_180: @@ -59,30 +62,29 @@ webrtc::VideoFrame FakeFrameSource::GetFrameRotationApplied() { return GetFrame(); } -webrtc::VideoFrame FakeFrameSource::GetFrame() { +VideoFrame FakeFrameSource::GetFrame() { return GetFrame(width_, height_, rotation_, interval_us_); } -webrtc::VideoFrame FakeFrameSource::GetFrame(int width, - int height, - webrtc::VideoRotation rotation, - int interval_us) { +VideoFrame FakeFrameSource::GetFrame(int width, + int height, + VideoRotation rotation, + int interval_us) { RTC_CHECK_GT(width, 0); RTC_CHECK_GT(height, 0); RTC_CHECK_GT(interval_us, 0); - rtc::scoped_refptr buffer( - webrtc::I420Buffer::Create(width, height)); + scoped_refptr buffer(I420Buffer::Create(width, height)); buffer->InitializeData(); - webrtc::VideoFrame frame = webrtc::VideoFrame::Builder() - .set_video_frame_buffer(buffer) - .set_rotation(rotation) - .set_timestamp_us(next_timestamp_us_) - .build(); + VideoFrame frame = VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_rotation(rotation) + .set_timestamp_us(next_timestamp_us_) + .build(); next_timestamp_us_ += interval_us; return frame; } -} // namespace cricket +} // namespace webrtc diff --git a/media/base/fake_frame_source.h b/media/base/fake_frame_source.h index 4c56204e69..f87473c4b1 100644 --- a/media/base/fake_frame_source.h +++ b/media/base/fake_frame_source.h @@ -11,10 +11,12 @@ #ifndef MEDIA_BASE_FAKE_FRAME_SOURCE_H_ #define MEDIA_BASE_FAKE_FRAME_SOURCE_H_ +#include + #include "api/video/video_frame.h" -#include "rtc_base/time_utils.h" +#include "api/video/video_rotation.h" -namespace cricket { +namespace webrtc { class FakeFrameSource { public: @@ -24,27 +26,35 @@ class FakeFrameSource { int64_t timestamp_offset_us); FakeFrameSource(int width, int height, int interval_us); - webrtc::VideoRotation GetRotation() const; - void SetRotation(webrtc::VideoRotation rotation); + VideoRotation GetRotation() const; + void SetRotation(VideoRotation rotation); - webrtc::VideoFrame GetFrame(); - webrtc::VideoFrame GetFrameRotationApplied(); + VideoFrame GetFrame(); + VideoFrame GetFrameRotationApplied(); // Override configuration. - webrtc::VideoFrame GetFrame(int width, - int height, - webrtc::VideoRotation rotation, - int interval_us); + VideoFrame GetFrame(int width, + int height, + VideoRotation rotation, + int interval_us); private: const int width_; const int height_; const int interval_us_; - webrtc::VideoRotation rotation_ = webrtc::kVideoRotation_0; + VideoRotation rotation_ = webrtc::kVideoRotation_0; int64_t next_timestamp_us_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::FakeFrameSource; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_FAKE_FRAME_SOURCE_H_ diff --git a/media/base/fake_media_engine.cc b/media/base/fake_media_engine.cc index 5a1da3326e..5485c02441 100644 --- a/media/base/fake_media_engine.cc +++ b/media/base/fake_media_engine.cc @@ -10,16 +10,43 @@ #include "media/base/fake_media_engine.h" +#include +#include +#include +#include #include +#include +#include #include +#include -#include "absl/algorithm/container.h" #include "absl/strings/match.h" -#include "absl/types/optional.h" +#include "api/audio/audio_device.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_options.h" +#include "api/call/audio_sink.h" +#include "api/crypto/crypto_options.h" +#include "api/make_ref_counted.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/rtp/rtp_source.h" +#include "api/video/recordable_encoded_frame.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "call/audio_state.h" +#include "call/call.h" +#include "media/base/audio_source.h" +#include "media/base/codec.h" #include "media/base/media_channel.h" +#include "media/base/media_config.h" +#include "media/base/media_engine.h" +#include "media/base/stream_params.h" #include "rtc_base/checks.h" +#include "rtc_base/system/file_wrapper.h" -namespace cricket { +namespace webrtc { using webrtc::TaskQueueBase; FakeVoiceMediaReceiveChannel::DtmfInfo::DtmfInfo(uint32_t ssrc, @@ -38,12 +65,12 @@ FakeVoiceMediaReceiveChannel::VoiceChannelAudioSink::~VoiceChannelAudioSink() { } } void FakeVoiceMediaReceiveChannel::VoiceChannelAudioSink::OnData( - const void* audio_data, - int bits_per_sample, - int sample_rate, - size_t number_of_channels, - size_t number_of_frames, - absl::optional absolute_capture_timestamp_ms) {} + const void* /* audio_data */, + int /* bits_per_sample */, + int /* sample_rate */, + size_t /* number_of_channels */, + size_t /* number_of_frames */, + std::optional /* absolute_capture_timestamp_ms */) {} void FakeVoiceMediaReceiveChannel::VoiceChannelAudioSink::OnClose() { source_ = nullptr; } @@ -62,8 +89,7 @@ FakeVoiceMediaReceiveChannel::FakeVoiceMediaReceiveChannel( SetOptions(options); } FakeVoiceMediaReceiveChannel::~FakeVoiceMediaReceiveChannel() = default; -const std::vector& FakeVoiceMediaReceiveChannel::recv_codecs() - const { +const std::vector& FakeVoiceMediaReceiveChannel::recv_codecs() const { return recv_codecs_; } const std::vector& @@ -134,33 +160,34 @@ bool FakeVoiceMediaReceiveChannel::SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, return true; } } -absl::optional FakeVoiceMediaReceiveChannel::GetBaseMinimumPlayoutDelayMs( +std::optional FakeVoiceMediaReceiveChannel::GetBaseMinimumPlayoutDelayMs( uint32_t ssrc) const { const auto it = output_delays_.find(ssrc); if (it != output_delays_.end()) { return it->second; } - return absl::nullopt; + return std::nullopt; } -bool FakeVoiceMediaReceiveChannel::GetStats(VoiceMediaReceiveInfo* info, - bool get_and_clear_legacy_stats) { +bool FakeVoiceMediaReceiveChannel::GetStats( + VoiceMediaReceiveInfo* /* info */, + bool /* get_and_clear_legacy_stats */) { return false; } void FakeVoiceMediaReceiveChannel::SetRawAudioSink( - uint32_t ssrc, - std::unique_ptr sink) { + uint32_t /* ssrc */, + std::unique_ptr sink) { sink_ = std::move(sink); } void FakeVoiceMediaReceiveChannel::SetDefaultRawAudioSink( - std::unique_ptr sink) { + std::unique_ptr sink) { sink_ = std::move(sink); } -std::vector FakeVoiceMediaReceiveChannel::GetSources( - uint32_t ssrc) const { - return std::vector(); +std::vector FakeVoiceMediaReceiveChannel::GetSources( + uint32_t /* ssrc */) const { + return std::vector(); } bool FakeVoiceMediaReceiveChannel::SetRecvCodecs( - const std::vector& codecs) { + const std::vector& codecs) { if (fail_set_recv_codecs()) { // Fake the failure in SetRecvCodecs. return false; @@ -194,12 +221,12 @@ FakeVoiceMediaSendChannel::VoiceChannelAudioSink::~VoiceChannelAudioSink() { } } void FakeVoiceMediaSendChannel::VoiceChannelAudioSink::OnData( - const void* audio_data, - int bits_per_sample, - int sample_rate, - size_t number_of_channels, - size_t number_of_frames, - absl::optional absolute_capture_timestamp_ms) {} + const void* /* audio_data */, + int /* bits_per_sample */, + int /* sample_rate */, + size_t /* number_of_channels */, + size_t /* number_of_frames */, + std::optional /* absolute_capture_timestamp_ms */) {} void FakeVoiceMediaSendChannel::VoiceChannelAudioSink::OnClose() { source_ = nullptr; } @@ -216,14 +243,14 @@ FakeVoiceMediaSendChannel::FakeVoiceMediaSendChannel( SetOptions(options); } FakeVoiceMediaSendChannel::~FakeVoiceMediaSendChannel() = default; -const std::vector& FakeVoiceMediaSendChannel::send_codecs() const { +const std::vector& FakeVoiceMediaSendChannel::send_codecs() const { return send_codecs_; } -absl::optional FakeVoiceMediaSendChannel::GetSendCodec() const { +std::optional FakeVoiceMediaSendChannel::GetSendCodec() const { if (!send_codecs_.empty()) { return send_codecs_.front(); } - return absl::nullopt; + return std::nullopt; } const std::vector& FakeVoiceMediaSendChannel::dtmf_info_queue() const { @@ -267,7 +294,7 @@ bool FakeVoiceMediaSendChannel::HasSource(uint32_t ssrc) const { return local_sinks_.find(ssrc) != local_sinks_.end(); } bool FakeVoiceMediaSendChannel::CanInsertDtmf() { - for (std::vector::const_iterator it = send_codecs_.begin(); + for (std::vector::const_iterator it = send_codecs_.begin(); it != send_codecs_.end(); ++it) { // Find the DTMF telephone event "codec". if (absl::EqualsIgnoreCase(it->name, "telephone-event")) { @@ -288,11 +315,11 @@ bool FakeVoiceMediaSendChannel::GetOutputVolume(uint32_t ssrc, double* volume) { *volume = output_scalings_[ssrc]; return true; } -bool FakeVoiceMediaSendChannel::GetStats(VoiceMediaSendInfo* info) { +bool FakeVoiceMediaSendChannel::GetStats(VoiceMediaSendInfo* /* info */) { return false; } bool FakeVoiceMediaSendChannel::SetSendCodecs( - const std::vector& codecs) { + const std::vector& codecs) { if (fail_set_send_codecs()) { // Fake the failure in SetSendCodecs. return false; @@ -343,10 +370,10 @@ FakeVideoMediaSendChannel::FakeVideoMediaSendChannel( SetOptions(options); } FakeVideoMediaSendChannel::~FakeVideoMediaSendChannel() = default; -const std::vector& FakeVideoMediaSendChannel::send_codecs() const { +const std::vector& FakeVideoMediaSendChannel::send_codecs() const { return send_codecs_; } -const std::vector& FakeVideoMediaSendChannel::codecs() const { +const std::vector& FakeVideoMediaSendChannel::codecs() const { return send_codecs(); } const VideoOptions& FakeVideoMediaSendChannel::options() const { @@ -363,9 +390,9 @@ bool FakeVideoMediaSendChannel::SetSenderParameters( SetSendRtpHeaderExtensions(params.extensions) && SetMaxSendBandwidth(params.max_bandwidth_bps)); } -absl::optional FakeVideoMediaSendChannel::GetSendCodec() const { +std::optional FakeVideoMediaSendChannel::GetSendCodec() const { if (send_codecs_.empty()) { - return absl::nullopt; + return std::nullopt; } return send_codecs_[0]; } @@ -375,7 +402,7 @@ bool FakeVideoMediaSendChannel::SetSend(bool send) { bool FakeVideoMediaSendChannel::SetVideoSend( uint32_t ssrc, const VideoOptions* options, - rtc::VideoSourceInterface* source) { + VideoSourceInterface* source) { if (options) { if (!SetOptions(*options)) { return false; @@ -388,12 +415,12 @@ bool FakeVideoMediaSendChannel::HasSource(uint32_t ssrc) const { return sources_.find(ssrc) != sources_.end() && sources_.at(ssrc) != nullptr; } void FakeVideoMediaSendChannel::FillBitrateInfo( - BandwidthEstimationInfo* bwe_info) {} -bool FakeVideoMediaSendChannel::GetStats(VideoMediaSendInfo* info) { + BandwidthEstimationInfo* /* bwe_info */) {} +bool FakeVideoMediaSendChannel::GetStats(VideoMediaSendInfo* /* info */) { return false; } bool FakeVideoMediaSendChannel::SetSendCodecs( - const std::vector& codecs) { + const std::vector& codecs) { if (fail_set_send_codecs()) { // Fake the failure in SetSendCodecs. return false; @@ -412,8 +439,8 @@ bool FakeVideoMediaSendChannel::SetMaxSendBandwidth(int bps) { return true; } void FakeVideoMediaSendChannel::GenerateSendKeyFrame( - uint32_t ssrc, - const std::vector& rids) {} + uint32_t /* ssrc */, + const std::vector& /* rids */) {} FakeVideoMediaReceiveChannel::FakeVideoMediaReceiveChannel( const VideoOptions& options, @@ -424,8 +451,7 @@ FakeVideoMediaReceiveChannel::FakeVideoMediaReceiveChannel( SetOptions(options); } FakeVideoMediaReceiveChannel::~FakeVideoMediaReceiveChannel() = default; -const std::vector& FakeVideoMediaReceiveChannel::recv_codecs() - const { +const std::vector& FakeVideoMediaReceiveChannel::recv_codecs() const { return recv_codecs_; } bool FakeVideoMediaReceiveChannel::rendering() const { @@ -434,7 +460,7 @@ bool FakeVideoMediaReceiveChannel::rendering() const { const VideoOptions& FakeVideoMediaReceiveChannel::options() const { return options_; } -const std::map*>& +const std::map*>& FakeVideoMediaReceiveChannel::sinks() const { return sinks_; } @@ -449,7 +475,7 @@ bool FakeVideoMediaReceiveChannel::SetReceiverParameters( } bool FakeVideoMediaReceiveChannel::SetSink( uint32_t ssrc, - rtc::VideoSinkInterface* sink) { + VideoSinkInterface* sink) { auto it = sinks_.find(ssrc); if (it == sinks_.end()) { return false; @@ -458,7 +484,7 @@ bool FakeVideoMediaReceiveChannel::SetSink( return true; } void FakeVideoMediaReceiveChannel::SetDefaultSink( - rtc::VideoSinkInterface* sink) {} + VideoSinkInterface* /* sink */) {} bool FakeVideoMediaReceiveChannel::HasSink(uint32_t ssrc) const { return sinks_.find(ssrc) != sinks_.end() && sinks_.at(ssrc) != nullptr; } @@ -481,8 +507,8 @@ bool FakeVideoMediaReceiveChannel::RemoveRecvStream(uint32_t ssrc) { output_delays_.erase(ssrc); return true; } -std::vector FakeVideoMediaReceiveChannel::GetSources( - uint32_t ssrc) const { +std::vector FakeVideoMediaReceiveChannel::GetSources( + uint32_t /* ssrc */) const { return {}; } bool FakeVideoMediaReceiveChannel::SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, @@ -494,16 +520,16 @@ bool FakeVideoMediaReceiveChannel::SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, return true; } } -absl::optional FakeVideoMediaReceiveChannel::GetBaseMinimumPlayoutDelayMs( +std::optional FakeVideoMediaReceiveChannel::GetBaseMinimumPlayoutDelayMs( uint32_t ssrc) const { const auto it = output_delays_.find(ssrc); if (it != output_delays_.end()) { return it->second; } - return absl::nullopt; + return std::nullopt; } bool FakeVideoMediaReceiveChannel::SetRecvCodecs( - const std::vector& codecs) { + const std::vector& codecs) { if (fail_set_recv_codecs()) { // Fake the failure in SetRecvCodecs. return false; @@ -522,95 +548,95 @@ bool FakeVideoMediaReceiveChannel::SetMaxSendBandwidth(int bps) { } void FakeVideoMediaReceiveChannel::SetRecordableEncodedFrameCallback( - uint32_t ssrc, - std::function callback) {} + uint32_t /* ssrc */, + std::function /* callback */) {} void FakeVideoMediaReceiveChannel::ClearRecordableEncodedFrameCallback( - uint32_t ssrc) {} + uint32_t /* ssrc */) {} -void FakeVideoMediaReceiveChannel::RequestRecvKeyFrame(uint32_t ssrc) {} +void FakeVideoMediaReceiveChannel::RequestRecvKeyFrame(uint32_t /* ssrc */) {} -bool FakeVideoMediaReceiveChannel::GetStats(VideoMediaReceiveInfo* info) { +bool FakeVideoMediaReceiveChannel::GetStats(VideoMediaReceiveInfo* /* info */) { return false; } -FakeVoiceEngine::FakeVoiceEngine() : fail_create_channel_(false) { +FakeVoiceEngine::FakeVoiceEngine() + : encoder_factory_(webrtc::make_ref_counted(this)), + decoder_factory_( + webrtc::make_ref_counted(this)) { // Add a fake audio codec. Note that the name must not be "" as there are // sanity checks against that. - SetCodecs({cricket::CreateAudioCodec(101, "fake_audio_codec", 8000, 1)}); + SetCodecs({webrtc::CreateAudioCodec(101, "fake_audio_codec", 8000, 1)}); } void FakeVoiceEngine::Init() {} -rtc::scoped_refptr FakeVoiceEngine::GetAudioState() const { - return rtc::scoped_refptr(); +scoped_refptr FakeVoiceEngine::GetAudioState() const { + return scoped_refptr(); } std::unique_ptr -FakeVoiceEngine::CreateSendChannel(webrtc::Call* call, - const MediaConfig& config, +FakeVoiceEngine::CreateSendChannel(Call* call, + const MediaConfig& /* config */, const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::AudioCodecPairId codec_pair_id) { + const CryptoOptions& /* crypto_options */, + AudioCodecPairId /* codec_pair_id */) { std::unique_ptr ch = std::make_unique(options, call->network_thread()); return ch; } std::unique_ptr -FakeVoiceEngine::CreateReceiveChannel( - webrtc::Call* call, - const MediaConfig& config, - const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::AudioCodecPairId codec_pair_id) { +FakeVoiceEngine::CreateReceiveChannel(Call* call, + const MediaConfig& /* config */, + const AudioOptions& options, + const CryptoOptions& /* crypto_options */, + AudioCodecPairId /* codec_pair_id */) { std::unique_ptr ch = std::make_unique(options, call->network_thread()); return ch; } -const std::vector& FakeVoiceEngine::send_codecs() const { +const std::vector& FakeVoiceEngine::LegacySendCodecs() const { return send_codecs_; } -const std::vector& FakeVoiceEngine::recv_codecs() const { +const std::vector& FakeVoiceEngine::LegacyRecvCodecs() const { return recv_codecs_; } -void FakeVoiceEngine::SetCodecs(const std::vector& codecs) { +void FakeVoiceEngine::SetCodecs(const std::vector& codecs) { send_codecs_ = codecs; recv_codecs_ = codecs; } -void FakeVoiceEngine::SetRecvCodecs(const std::vector& codecs) { +void FakeVoiceEngine::SetRecvCodecs(const std::vector& codecs) { recv_codecs_ = codecs; } -void FakeVoiceEngine::SetSendCodecs(const std::vector& codecs) { +void FakeVoiceEngine::SetSendCodecs(const std::vector& codecs) { send_codecs_ = codecs; } int FakeVoiceEngine::GetInputLevel() { return 0; } -bool FakeVoiceEngine::StartAecDump(webrtc::FileWrapper file, - int64_t max_size_bytes) { +bool FakeVoiceEngine::StartAecDump(FileWrapper /* file */, + int64_t /* max_size_bytes */) { return false; } -absl::optional -FakeVoiceEngine::GetAudioDeviceStats() { - return absl::nullopt; +std::optional FakeVoiceEngine::GetAudioDeviceStats() { + return std::nullopt; } void FakeVoiceEngine::StopAecDump() {} -std::vector +std::vector FakeVoiceEngine::GetRtpHeaderExtensions() const { return header_extensions_; } void FakeVoiceEngine::SetRtpHeaderExtensions( - std::vector header_extensions) { + std::vector header_extensions) { header_extensions_ = std::move(header_extensions); } -FakeVideoEngine::FakeVideoEngine() - : capture_(false), fail_create_channel_(false) { +FakeVideoEngine::FakeVideoEngine() : capture_(false) { // Add a fake video codec. Note that the name must not be "" as there are // sanity checks against that. - send_codecs_.push_back(cricket::CreateVideoCodec(111, "fake_video_codec")); - recv_codecs_.push_back(cricket::CreateVideoCodec(111, "fake_video_codec")); + send_codecs_.push_back(webrtc::CreateVideoCodec(111, "fake_video_codec")); + recv_codecs_.push_back(webrtc::CreateVideoCodec(111, "fake_video_codec")); } bool FakeVideoEngine::SetOptions(const VideoOptions& options) { options_ = options; @@ -618,15 +644,11 @@ bool FakeVideoEngine::SetOptions(const VideoOptions& options) { } std::unique_ptr FakeVideoEngine::CreateSendChannel( - webrtc::Call* call, - const MediaConfig& config, + Call* call, + const MediaConfig& /* config */, const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) { - if (fail_create_channel_) { - return nullptr; - } - + const CryptoOptions& /* crypto_options */, + VideoBitrateAllocatorFactory* /* video_bitrate_allocator_factory */) { std::unique_ptr ch = std::make_unique(options, call->network_thread()); @@ -634,32 +656,38 @@ FakeVideoEngine::CreateSendChannel( } std::unique_ptr FakeVideoEngine::CreateReceiveChannel( - webrtc::Call* call, - const MediaConfig& config, + Call* call, + const MediaConfig& /* config */, const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options) { - if (fail_create_channel_) { - return nullptr; - } - + const CryptoOptions& /* crypto_options */) { std::unique_ptr ch = std::make_unique(options, call->network_thread()); return ch; } -std::vector FakeVideoEngine::send_codecs(bool use_rtx) const { - return send_codecs_; +std::vector FakeVideoEngine::LegacySendCodecs(bool use_rtx) const { + if (use_rtx) { + return send_codecs_; + } else { + std::vector non_rtx_codecs; + for (auto& codec : send_codecs_) { + if (codec.name != "rtx") { + non_rtx_codecs.push_back(codec); + } + } + return non_rtx_codecs; + } } -std::vector FakeVideoEngine::recv_codecs(bool use_rtx) const { +std::vector FakeVideoEngine::LegacyRecvCodecs(bool /* use_rtx */) const { return recv_codecs_; } -void FakeVideoEngine::SetSendCodecs(const std::vector& codecs) { +void FakeVideoEngine::SetSendCodecs(const std::vector& codecs) { send_codecs_ = codecs; } -void FakeVideoEngine::SetRecvCodecs(const std::vector& codecs) { +void FakeVideoEngine::SetRecvCodecs(const std::vector& codecs) { recv_codecs_ = codecs; } @@ -667,12 +695,12 @@ bool FakeVideoEngine::SetCapture(bool capture) { capture_ = capture; return true; } -std::vector +std::vector FakeVideoEngine::GetRtpHeaderExtensions() const { return header_extensions_; } void FakeVideoEngine::SetRtpHeaderExtensions( - std::vector header_extensions) { + std::vector header_extensions) { header_extensions_ = std::move(header_extensions); } @@ -682,24 +710,24 @@ FakeMediaEngine::FakeMediaEngine() voice_(static_cast(&voice())), video_(static_cast(&video())) {} FakeMediaEngine::~FakeMediaEngine() {} -void FakeMediaEngine::SetAudioCodecs(const std::vector& codecs) { +void FakeMediaEngine::SetAudioCodecs(const std::vector& codecs) { voice_->SetCodecs(codecs); } -void FakeMediaEngine::SetAudioRecvCodecs( - const std::vector& codecs) { +void FakeMediaEngine::SetAudioRecvCodecs(const std::vector& codecs) { voice_->SetRecvCodecs(codecs); } -void FakeMediaEngine::SetAudioSendCodecs( - const std::vector& codecs) { +void FakeMediaEngine::SetAudioSendCodecs(const std::vector& codecs) { voice_->SetSendCodecs(codecs); } -void FakeMediaEngine::SetVideoCodecs(const std::vector& codecs) { +void FakeMediaEngine::SetVideoCodecs(const std::vector& codecs) { video_->SetSendCodecs(codecs); video_->SetRecvCodecs(codecs); } -void FakeMediaEngine::set_fail_create_channel(bool fail) { - voice_->fail_create_channel_ = fail; - video_->fail_create_channel_ = fail; +void FakeMediaEngine::SetVideoRecvCodecs(const std::vector& codecs) { + video_->SetRecvCodecs(codecs); +} +void FakeMediaEngine::SetVideoSendCodecs(const std::vector& codecs) { + video_->SetSendCodecs(codecs); } -} // namespace cricket +} // namespace webrtc diff --git a/media/base/fake_media_engine.h b/media/base/fake_media_engine.h index 51828c3535..132205817c 100644 --- a/media/base/fake_media_engine.h +++ b/media/base/fake_media_engine.h @@ -12,35 +12,67 @@ #define MEDIA_BASE_FAKE_MEDIA_ENGINE_H_ #include +#include +#include +#include #include #include #include +#include #include #include -#include #include #include #include "absl/algorithm/container.h" +#include "absl/base/nullability.h" #include "absl/functional/any_invocable.h" +#include "absl/strings/string_view.h" +#include "api/audio/audio_device.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/audio_codecs/audio_format.h" +#include "api/audio_options.h" #include "api/call/audio_sink.h" +#include "api/crypto/crypto_options.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/crypto/frame_encryptor_interface.h" +#include "api/environment/environment.h" +#include "api/frame_transformer_interface.h" #include "api/media_types.h" +#include "api/rtc_error.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/rtp/rtp_source.h" +#include "api/video/recordable_encoded_frame.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "call/audio_state.h" #include "media/base/audio_source.h" +#include "media/base/codec.h" #include "media/base/media_channel.h" #include "media/base/media_channel_impl.h" +#include "media/base/media_config.h" #include "media/base/media_engine.h" #include "media/base/rtp_utils.h" #include "media/base/stream_params.h" -#include "media/engine/webrtc_video_engine.h" -#include "modules/audio_processing/include/audio_processing.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" -#include "rtc_base/thread.h" +#include "rtc_base/system/file_wrapper.h" +#include "test/explicit_key_value_config.h" -using webrtc::RtpExtension; - -namespace cricket { +namespace webrtc { class FakeMediaEngine; class FakeVideoEngine; @@ -50,7 +82,7 @@ class FakeVoiceEngine; template class RtpReceiveChannelHelper : public Base, public MediaChannelUtil { public: - explicit RtpReceiveChannelHelper(webrtc::TaskQueueBase* network_thread) + explicit RtpReceiveChannelHelper(TaskQueueBase* network_thread) : MediaChannelUtil(network_thread), playout_(false), fail_set_recv_codecs_(false), @@ -65,9 +97,9 @@ class RtpReceiveChannelHelper : public Base, public MediaChannelUtil { const std::list& rtcp_packets() const { return rtcp_packets_; } bool SendRtcp(const void* data, size_t len) { - rtc::CopyOnWriteBuffer packet(reinterpret_cast(data), len, - kMaxRtpPacketLen); - return Base::SendRtcp(&packet, rtc::PacketOptions()); + CopyOnWriteBuffer packet(reinterpret_cast(data), len, + kMaxRtpPacketLen); + return Base::SendRtcp(&packet, AsyncSocketPacketOptions()); } bool CheckRtp(const void* data, size_t len) { @@ -92,12 +124,13 @@ class RtpReceiveChannelHelper : public Base, public MediaChannelUtil { bool CheckNoRtcp() { return rtcp_packets_.empty(); } void set_fail_set_recv_codecs(bool fail) { fail_set_recv_codecs_ = fail; } void ResetUnsignaledRecvStream() override {} - absl::optional GetUnsignaledSsrc() const override { - return absl::nullopt; + std::optional GetUnsignaledSsrc() const override { + return std::nullopt; } - void ChooseReceiverReportSsrc(const std::set& choices) override {} + void ChooseReceiverReportSsrc( + const std::set& /* choices */) override {} - virtual bool SetLocalSsrc(const StreamParams& sp) { return true; } + virtual bool SetLocalSsrc(const StreamParams& /* sp */) { return true; } void OnDemuxerCriteriaUpdatePending() override {} void OnDemuxerCriteriaUpdateComplete() override {} @@ -118,48 +151,50 @@ class RtpReceiveChannelHelper : public Base, public MediaChannelUtil { return RemoveStreamBySsrc(&receive_streams_, ssrc); } - webrtc::RtpParameters GetRtpReceiverParameters(uint32_t ssrc) const override { + RtpParameters GetRtpReceiverParameters(uint32_t ssrc) const override { auto parameters_iterator = rtp_receive_parameters_.find(ssrc); if (parameters_iterator != rtp_receive_parameters_.end()) { return parameters_iterator->second; } - return webrtc::RtpParameters(); + return RtpParameters(); } - webrtc::RtpParameters GetDefaultRtpReceiveParameters() const override { - return webrtc::RtpParameters(); + RtpParameters GetDefaultRtpReceiveParameters() const override { + return RtpParameters(); } - const std::vector& recv_streams() const { + const std::vector& recv_streams() const { return receive_streams_; } bool HasRecvStream(uint32_t ssrc) const { return GetStreamBySsrc(receive_streams_, ssrc) != nullptr; } - const RtcpParameters& recv_rtcp_parameters() { return recv_rtcp_parameters_; } + const MediaChannelParameters::RtcpParameters& recv_rtcp_parameters() { + return recv_rtcp_parameters_; + } int transport_overhead_per_packet() const { return transport_overhead_per_packet_; } - rtc::NetworkRoute last_network_route() const { return last_network_route_; } + NetworkRoute last_network_route() const { return last_network_route_; } int num_network_route_changes() const { return num_network_route_changes_; } void set_num_network_route_changes(int changes) { num_network_route_changes_ = changes; } - void OnRtcpPacketReceived(rtc::CopyOnWriteBuffer* packet, - int64_t packet_time_us) { + void OnRtcpPacketReceived(CopyOnWriteBuffer* packet, + int64_t /* packet_time_us */) { rtcp_packets_.push_back(std::string(packet->cdata(), packet->size())); } - void SetFrameDecryptor(uint32_t ssrc, - rtc::scoped_refptr - frame_decryptor) override {} + void SetFrameDecryptor(uint32_t /* ssrc */, + scoped_refptr + /* frame_decryptor */) override {} void SetDepacketizerToDecoderFrameTransformer( - uint32_t ssrc, - rtc::scoped_refptr frame_transformer) + uint32_t /* ssrc */, + scoped_refptr /* frame_transformer */) override {} void SetInterface(MediaChannelNetworkInterface* iface) override { @@ -173,10 +208,11 @@ class RtpReceiveChannelHelper : public Base, public MediaChannelUtil { recv_extensions_ = extensions; return true; } - void set_recv_rtcp_parameters(const RtcpParameters& params) { + void set_recv_rtcp_parameters( + const MediaChannelParameters::RtcpParameters& params) { recv_rtcp_parameters_ = params; } - void OnPacketReceived(const webrtc::RtpPacketReceived& packet) override { + void OnPacketReceived(const RtpPacketReceived& packet) override { rtp_packets_.push_back( std::string(packet.Buffer().cdata(), packet.size())); } @@ -188,12 +224,12 @@ class RtpReceiveChannelHelper : public Base, public MediaChannelUtil { std::list rtp_packets_; std::list rtcp_packets_; std::vector receive_streams_; - RtcpParameters recv_rtcp_parameters_; - std::map rtp_receive_parameters_; + MediaChannelParameters::RtcpParameters recv_rtcp_parameters_; + std::map rtp_receive_parameters_; bool fail_set_recv_codecs_; std::string rtcp_cname_; int transport_overhead_per_packet_; - rtc::NetworkRoute last_network_route_; + NetworkRoute last_network_route_; int num_network_route_changes_; MediaChannelNetworkInterface* network_interface_ = nullptr; }; @@ -202,7 +238,7 @@ class RtpReceiveChannelHelper : public Base, public MediaChannelUtil { template class RtpSendChannelHelper : public Base, public MediaChannelUtil { public: - explicit RtpSendChannelHelper(webrtc::TaskQueueBase* network_thread) + explicit RtpSendChannelHelper(TaskQueueBase* network_thread) : MediaChannelUtil(network_thread), sending_(false), fail_set_send_codecs_(false), @@ -220,18 +256,18 @@ class RtpSendChannelHelper : public Base, public MediaChannelUtil { bool SendPacket(const void* data, size_t len, - const rtc::PacketOptions& options) { + const AsyncSocketPacketOptions& options) { if (!sending_) { return false; } - rtc::CopyOnWriteBuffer packet(reinterpret_cast(data), len, - kMaxRtpPacketLen); + CopyOnWriteBuffer packet(reinterpret_cast(data), len, + kMaxRtpPacketLen); return MediaChannelUtil::SendPacket(&packet, options); } bool SendRtcp(const void* data, size_t len) { - rtc::CopyOnWriteBuffer packet(reinterpret_cast(data), len, - kMaxRtpPacketLen); - return MediaChannelUtil::SendRtcp(&packet, rtc::PacketOptions()); + CopyOnWriteBuffer packet(reinterpret_cast(data), len, + kMaxRtpPacketLen); + return MediaChannelUtil::SendRtcp(&packet, AsyncSocketPacketOptions()); } bool CheckRtp(const void* data, size_t len) { @@ -292,34 +328,33 @@ class RtpSendChannelHelper : public Base, public MediaChannelUtil { return MediaChannelUtil::ExtmapAllowMixed(); } - webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override { + RtpParameters GetRtpSendParameters(uint32_t ssrc) const override { auto parameters_iterator = rtp_send_parameters_.find(ssrc); if (parameters_iterator != rtp_send_parameters_.end()) { return parameters_iterator->second; } - return webrtc::RtpParameters(); + return RtpParameters(); } - webrtc::RTCError SetRtpSendParameters( - uint32_t ssrc, - const webrtc::RtpParameters& parameters, - webrtc::SetParametersCallback callback) override { + RTCError SetRtpSendParameters(uint32_t ssrc, + const RtpParameters& parameters, + SetParametersCallback callback) override { auto parameters_iterator = rtp_send_parameters_.find(ssrc); if (parameters_iterator != rtp_send_parameters_.end()) { auto result = CheckRtpParametersInvalidModificationAndValues( - parameters_iterator->second, parameters); + parameters_iterator->second, parameters, + test::ExplicitKeyValueConfig("")); if (!result.ok()) { return webrtc::InvokeSetParametersCallback(callback, result); } parameters_iterator->second = parameters; - return webrtc::InvokeSetParametersCallback(callback, - webrtc::RTCError::OK()); + return webrtc::InvokeSetParametersCallback(callback, RTCError::OK()); } // Replicate the behavior of the real media channel: return false // when setting parameters for unknown SSRCs. - return InvokeSetParametersCallback( - callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); + return InvokeSetParametersCallback(callback, + RTCError(RTCErrorType::INTERNAL_ERROR)); } bool IsStreamMuted(uint32_t ssrc) const { @@ -331,7 +366,7 @@ class RtpSendChannelHelper : public Base, public MediaChannelUtil { } return ret; } - const std::vector& send_streams() const { + const std::vector& send_streams() const { return send_streams_; } bool HasSendStream(uint32_t ssrc) const { @@ -345,7 +380,9 @@ class RtpSendChannelHelper : public Base, public MediaChannelUtil { return send_streams_[0].first_ssrc(); } - const RtcpParameters& send_rtcp_parameters() { return send_rtcp_parameters_; } + const MediaChannelParameters::RtcpParameters& send_rtcp_parameters() { + return send_rtcp_parameters_; + } bool ready_to_send() const { return ready_to_send_; } @@ -353,24 +390,24 @@ class RtpSendChannelHelper : public Base, public MediaChannelUtil { return transport_overhead_per_packet_; } - rtc::NetworkRoute last_network_route() const { return last_network_route_; } + NetworkRoute last_network_route() const { return last_network_route_; } int num_network_route_changes() const { return num_network_route_changes_; } void set_num_network_route_changes(int changes) { num_network_route_changes_ = changes; } - void OnRtcpPacketReceived(rtc::CopyOnWriteBuffer* packet, - int64_t packet_time_us) { + void OnRtcpPacketReceived(CopyOnWriteBuffer* packet, + int64_t /* packet_time_us */) { rtcp_packets_.push_back(std::string(packet->cdata(), packet->size())); } // Stuff that deals with encryptors, transformers and the like - void SetFrameEncryptor(uint32_t ssrc, - rtc::scoped_refptr - frame_encryptor) override {} + void SetFrameEncryptor(uint32_t /* ssrc */, + scoped_refptr + /* frame_encryptor */) override {} void SetEncoderToPacketizerFrameTransformer( - uint32_t ssrc, - rtc::scoped_refptr frame_transformer) + uint32_t /* ssrc */, + scoped_refptr /* frame_transformer */) override {} void SetInterface(MediaChannelNetworkInterface* iface) override { @@ -401,13 +438,14 @@ class RtpSendChannelHelper : public Base, public MediaChannelUtil { send_extensions_ = extensions; return true; } - void set_send_rtcp_parameters(const RtcpParameters& params) { + void set_send_rtcp_parameters( + const MediaChannelParameters::RtcpParameters& params) { send_rtcp_parameters_ = params; } - void OnPacketSent(const rtc::SentPacket& sent_packet) override {} + void OnPacketSent(const SentPacketInfo& /* sent_packet */) override {} void OnReadyToSend(bool ready) override { ready_to_send_ = ready; } - void OnNetworkRouteChanged(absl::string_view transport_name, - const rtc::NetworkRoute& network_route) override { + void OnNetworkRouteChanged(absl::string_view /* transport_name */, + const NetworkRoute& network_route) override { last_network_route_ = network_route; ++num_network_route_changes_; transport_overhead_per_packet_ = network_route.packet_overhead; @@ -423,15 +461,15 @@ class RtpSendChannelHelper : public Base, public MediaChannelUtil { std::list rtp_packets_; std::list rtcp_packets_; std::vector send_streams_; - RtcpParameters send_rtcp_parameters_; + MediaChannelParameters::RtcpParameters send_rtcp_parameters_; std::set muted_streams_; - std::map rtp_send_parameters_; + std::map rtp_send_parameters_; bool fail_set_send_codecs_; uint32_t send_ssrc_; std::string rtcp_cname_; bool ready_to_send_; int transport_overhead_per_packet_; - rtc::NetworkRoute last_network_route_; + NetworkRoute last_network_route_; int num_network_route_changes_; MediaChannelNetworkInterface* network_interface_ = nullptr; absl::AnyInvocable&)> @@ -448,11 +486,11 @@ class FakeVoiceMediaReceiveChannel int duration; }; FakeVoiceMediaReceiveChannel(const AudioOptions& options, - webrtc::TaskQueueBase* network_thread); + TaskQueueBase* network_thread); virtual ~FakeVoiceMediaReceiveChannel(); // Test methods - const std::vector& recv_codecs() const; + const std::vector& recv_codecs() const; const std::vector& dtmf_info_queue() const; const AudioOptions& options() const; int max_bps() const; @@ -465,9 +503,7 @@ class FakeVoiceMediaReceiveChannel VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() override { return this; } - cricket::MediaType media_type() const override { - return cricket::MEDIA_TYPE_AUDIO; - } + MediaType media_type() const override { return MediaType::AUDIO; } bool SetReceiverParameters(const AudioReceiverParameters& params) override; void SetPlayout(bool playout) override; @@ -481,21 +517,21 @@ class FakeVoiceMediaReceiveChannel bool GetOutputVolume(uint32_t ssrc, double* volume); bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) override; - absl::optional GetBaseMinimumPlayoutDelayMs( - uint32_t ssrc) const override; + std::optional GetBaseMinimumPlayoutDelayMs(uint32_t ssrc) const override; bool GetStats(VoiceMediaReceiveInfo* info, bool get_and_clear_legacy_stats) override; - void SetRawAudioSink( - uint32_t ssrc, - std::unique_ptr sink) override; + void SetRawAudioSink(uint32_t ssrc, + std::unique_ptr sink) override; void SetDefaultRawAudioSink( - std::unique_ptr sink) override; + std::unique_ptr sink) override; - std::vector GetSources(uint32_t ssrc) const override; - void SetReceiveNackEnabled(bool enabled) override {} - void SetReceiveNonSenderRttEnabled(bool enabled) override {} + ::webrtc::RtcpMode RtcpMode() const override { return recv_rtcp_mode_; } + void SetRtcpMode(::webrtc::RtcpMode mode) override { recv_rtcp_mode_ = mode; } + std::vector GetSources(uint32_t ssrc) const override; + void SetReceiveNackEnabled(bool /* enabled */) override {} + void SetReceiveNonSenderRttEnabled(bool /* enabled */) override {} private: class VoiceChannelAudioSink : public AudioSource::Sink { @@ -507,7 +543,7 @@ class FakeVoiceMediaReceiveChannel int sample_rate, size_t number_of_channels, size_t number_of_frames, - absl::optional absolute_capture_timestamp_ms) override; + std::optional absolute_capture_timestamp_ms) override; void OnClose() override; int NumPreferredChannels() const override { return -1; } AudioSource* source() const; @@ -516,18 +552,19 @@ class FakeVoiceMediaReceiveChannel AudioSource* source_; }; - bool SetRecvCodecs(const std::vector& codecs); + bool SetRecvCodecs(const std::vector& codecs); bool SetMaxSendBandwidth(int bps); bool SetOptions(const AudioOptions& options); - std::vector recv_codecs_; + std::vector recv_codecs_; std::map output_scalings_; std::map output_delays_; std::vector dtmf_info_queue_; AudioOptions options_; std::map> local_sinks_; - std::unique_ptr sink_; + std::unique_ptr sink_; int max_bps_; + ::webrtc::RtcpMode recv_rtcp_mode_ = RtcpMode::kCompound; }; class FakeVoiceMediaSendChannel @@ -540,10 +577,10 @@ class FakeVoiceMediaSendChannel int duration; }; FakeVoiceMediaSendChannel(const AudioOptions& options, - webrtc::TaskQueueBase* network_thread); + TaskQueueBase* network_thread); ~FakeVoiceMediaSendChannel() override; - const std::vector& send_codecs() const; + const std::vector& send_codecs() const; const std::vector& dtmf_info_queue() const; const AudioOptions& options() const; int max_bps() const; @@ -555,9 +592,7 @@ class FakeVoiceMediaSendChannel return nullptr; } VoiceMediaSendChannelInterface* AsVoiceSendChannel() override { return this; } - cricket::MediaType media_type() const override { - return cricket::MEDIA_TYPE_AUDIO; - } + MediaType media_type() const override { return MediaType::AUDIO; } bool SetSenderParameters(const AudioSenderParameter& params) override; void SetSend(bool send) override; @@ -571,12 +606,12 @@ class FakeVoiceMediaSendChannel bool SenderNackEnabled() const override { return false; } bool SenderNonSenderRttEnabled() const override { return false; } - void SetReceiveNackEnabled(bool enabled) {} - void SetReceiveNonSenderRttEnabled(bool enabled) {} + void SetReceiveNackEnabled(bool /* enabled */) {} + void SetReceiveNonSenderRttEnabled(bool /* enabled */) {} bool SendCodecHasNack() const override { return false; } void SetSendCodecChangedCallback( - absl::AnyInvocable callback) override {} - absl::optional GetSendCodec() const override; + absl::AnyInvocable /* callback */) override {} + std::optional GetSendCodec() const override; bool GetStats(VoiceMediaSendInfo* stats) override; @@ -590,7 +625,7 @@ class FakeVoiceMediaSendChannel int sample_rate, size_t number_of_channels, size_t number_of_frames, - absl::optional absolute_capture_timestamp_ms) override; + std::optional absolute_capture_timestamp_ms) override; void OnClose() override; int NumPreferredChannels() const override { return -1; } AudioSource* source() const; @@ -599,12 +634,12 @@ class FakeVoiceMediaSendChannel AudioSource* source_; }; - bool SetSendCodecs(const std::vector& codecs); + bool SetSendCodecs(const std::vector& codecs); bool SetMaxSendBandwidth(int bps); bool SetOptions(const AudioOptions& options); bool SetLocalSource(uint32_t ssrc, AudioSource* source); - std::vector send_codecs_; + std::vector send_codecs_; std::map output_scalings_; std::map output_delays_; std::vector dtmf_info_queue_; @@ -623,7 +658,7 @@ class FakeVideoMediaReceiveChannel : public RtpReceiveChannelHelper { public: FakeVideoMediaReceiveChannel(const VideoOptions& options, - webrtc::TaskQueueBase* network_thread); + TaskQueueBase* network_thread); virtual ~FakeVideoMediaReceiveChannel(); @@ -633,63 +668,57 @@ class FakeVideoMediaReceiveChannel VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() override { return nullptr; } - cricket::MediaType media_type() const override { - return cricket::MEDIA_TYPE_VIDEO; - } + MediaType media_type() const override { return MediaType::VIDEO; } - const std::vector& recv_codecs() const; - const std::vector& send_codecs() const; + const std::vector& recv_codecs() const; + const std::vector& send_codecs() const; bool rendering() const; const VideoOptions& options() const; - const std::map*>& - sinks() const; + const std::map*>& sinks() const; int max_bps() const; bool SetReceiverParameters(const VideoReceiverParameters& params) override; - bool SetSink(uint32_t ssrc, - rtc::VideoSinkInterface* sink) override; - void SetDefaultSink( - rtc::VideoSinkInterface* sink) override; + bool SetSink(uint32_t ssrc, VideoSinkInterface* sink) override; + void SetDefaultSink(VideoSinkInterface* sink) override; bool HasSink(uint32_t ssrc) const; - void SetReceive(bool receive) override {} + void SetReceive(bool /* receive */) override {} bool HasSource(uint32_t ssrc) const; bool AddRecvStream(const StreamParams& sp) override; bool RemoveRecvStream(uint32_t ssrc) override; - std::vector GetSources(uint32_t ssrc) const override; + std::vector GetSources(uint32_t ssrc) const override; bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) override; - absl::optional GetBaseMinimumPlayoutDelayMs( - uint32_t ssrc) const override; + std::optional GetBaseMinimumPlayoutDelayMs(uint32_t ssrc) const override; void SetRecordableEncodedFrameCallback( uint32_t ssrc, - std::function callback) - override; + std::function callback) override; void ClearRecordableEncodedFrameCallback(uint32_t ssrc) override; void RequestRecvKeyFrame(uint32_t ssrc) override; - void SetReceiverFeedbackParameters(bool lntf_enabled, - bool nack_enabled, - webrtc::RtcpMode rtcp_mode, - absl::optional rtx_time) override {} + void SetReceiverFeedbackParameters( + bool /* lntf_enabled */, + bool /* nack_enabled */, + RtcpMode /* rtcp_mode */, + std::optional /* rtx_time */) override {} bool GetStats(VideoMediaReceiveInfo* info) override; - bool AddDefaultRecvStreamForTesting(const StreamParams& sp) override { + bool AddDefaultRecvStreamForTesting(const StreamParams& /* sp */) override { RTC_CHECK_NOTREACHED(); return false; } private: - bool SetRecvCodecs(const std::vector& codecs); - bool SetSendCodecs(const std::vector& codecs); + bool SetRecvCodecs(const std::vector& codecs); + bool SetSendCodecs(const std::vector& codecs); bool SetOptions(const VideoOptions& options); bool SetMaxSendBandwidth(int bps); - std::vector recv_codecs_; - std::map*> sinks_; - std::map*> sources_; + std::vector recv_codecs_; + std::map*> sinks_; + std::map*> sources_; std::map output_delays_; VideoOptions options_; int max_bps_; @@ -699,7 +728,7 @@ class FakeVideoMediaSendChannel : public RtpSendChannelHelper { public: FakeVideoMediaSendChannel(const VideoOptions& options, - webrtc::TaskQueueBase* network_thread); + TaskQueueBase* network_thread); virtual ~FakeVideoMediaSendChannel(); @@ -707,25 +736,21 @@ class FakeVideoMediaSendChannel VoiceMediaSendChannelInterface* AsVoiceSendChannel() override { return nullptr; } - cricket::MediaType media_type() const override { - return cricket::MEDIA_TYPE_VIDEO; - } + MediaType media_type() const override { return MediaType::VIDEO; } - const std::vector& send_codecs() const; - const std::vector& codecs() const; + const std::vector& send_codecs() const; + const std::vector& codecs() const; const VideoOptions& options() const; - const std::map*>& - sinks() const; + const std::map*>& sinks() const; int max_bps() const; bool SetSenderParameters(const VideoSenderParameters& params) override; - absl::optional GetSendCodec() const override; + std::optional GetSendCodec() const override; bool SetSend(bool send) override; - bool SetVideoSend( - uint32_t ssrc, - const VideoOptions* options, - rtc::VideoSourceInterface* source) override; + bool SetVideoSend(uint32_t ssrc, + const VideoOptions* options, + VideoSourceInterface* source) override; bool HasSource(uint32_t ssrc) const; @@ -733,29 +758,25 @@ class FakeVideoMediaSendChannel void GenerateSendKeyFrame(uint32_t ssrc, const std::vector& rids) override; - webrtc::RtcpMode SendCodecRtcpMode() const override { - return webrtc::RtcpMode::kCompound; - } + RtcpMode SendCodecRtcpMode() const override { return RtcpMode::kCompound; } void SetSendCodecChangedCallback( - absl::AnyInvocable callback) override {} + absl::AnyInvocable /* callback */) override {} void SetSsrcListChangedCallback( - absl::AnyInvocable&)> callback) override {} + absl::AnyInvocable&)> /* callback */) + override {} - void SetVideoCodecSwitchingEnabled(bool enabled) override {} bool SendCodecHasLntf() const override { return false; } bool SendCodecHasNack() const override { return false; } - absl::optional SendCodecRtxTime() const override { - return absl::nullopt; - } + std::optional SendCodecRtxTime() const override { return std::nullopt; } bool GetStats(VideoMediaSendInfo* info) override; private: - bool SetSendCodecs(const std::vector& codecs); + bool SetSendCodecs(const std::vector& codecs); bool SetOptions(const VideoOptions& options); bool SetMaxSendBandwidth(int bps); - std::vector send_codecs_; - std::map*> sources_; + std::vector send_codecs_; + std::map*> sources_; VideoOptions options_; int max_bps_; }; @@ -764,43 +785,106 @@ class FakeVoiceEngine : public VoiceEngineInterface { public: FakeVoiceEngine(); void Init() override; - rtc::scoped_refptr GetAudioState() const override; + scoped_refptr GetAudioState() const override; std::unique_ptr CreateSendChannel( - webrtc::Call* call, + Call* call, const MediaConfig& config, const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::AudioCodecPairId codec_pair_id) override; + const CryptoOptions& crypto_options, + AudioCodecPairId codec_pair_id) override; std::unique_ptr CreateReceiveChannel( - webrtc::Call* call, + Call* call, const MediaConfig& config, const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::AudioCodecPairId codec_pair_id) override; + const CryptoOptions& crypto_options, + AudioCodecPairId codec_pair_id) override; // TODO(ossu): For proper testing, These should either individually settable // or the voice engine should reference mockable factories. - const std::vector& send_codecs() const override; - const std::vector& recv_codecs() const override; - void SetCodecs(const std::vector& codecs); - void SetRecvCodecs(const std::vector& codecs); - void SetSendCodecs(const std::vector& codecs); + // TODO: https://issues.webrtc.org/360058654 - stop faking codecs here. + const std::vector& LegacySendCodecs() const override; + const std::vector& LegacyRecvCodecs() const override; + AudioEncoderFactory* encoder_factory() const override { + return encoder_factory_.get(); + } + AudioDecoderFactory* decoder_factory() const override { + return decoder_factory_.get(); + } + void SetCodecs(const std::vector& codecs); + void SetRecvCodecs(const std::vector& codecs); + void SetSendCodecs(const std::vector& codecs); int GetInputLevel(); - bool StartAecDump(webrtc::FileWrapper file, int64_t max_size_bytes) override; + bool StartAecDump(FileWrapper file, int64_t max_size_bytes) override; void StopAecDump() override; - absl::optional GetAudioDeviceStats() - override; - std::vector GetRtpHeaderExtensions() + std::optional GetAudioDeviceStats() override; + std::vector GetRtpHeaderExtensions() const override; void SetRtpHeaderExtensions( - std::vector header_extensions); + std::vector header_extensions); private: - std::vector recv_codecs_; - std::vector send_codecs_; - bool fail_create_channel_; - std::vector header_extensions_; + class FakeVoiceEncoderFactory : public AudioEncoderFactory { + public: + explicit FakeVoiceEncoderFactory(FakeVoiceEngine* owner) : owner_(owner) {} + std::vector GetSupportedEncoders() override { + // The reason for this convoluted mapping is because there are + // too many tests that expect to push codecs into the fake voice + // engine's "send_codecs/recv_codecs" and have them show up later. + std::vector specs; + for (const auto& codec : owner_->send_codecs_) { + specs.push_back( + AudioCodecSpec{{codec.name, codec.clockrate, codec.channels}, + {codec.clockrate, codec.channels, codec.bitrate}}); + } + return specs; + } + std::optional QueryAudioEncoder( + const SdpAudioFormat& format) override { + return std::nullopt; + } + absl_nullable std::unique_ptr Create( + const Environment& env, + const SdpAudioFormat& format, + Options options) override { + return nullptr; + } + FakeVoiceEngine* owner_; + }; + class FakeVoiceDecoderFactory : public AudioDecoderFactory { + public: + explicit FakeVoiceDecoderFactory(FakeVoiceEngine* owner) : owner_(owner) {} + std::vector GetSupportedDecoders() override { + // The reason for this convoluted mapping is because there are + // too many tests that expect to push codecs into the fake voice + // engine's "send_codecs/recv_codecs" and have them show up later. + std::vector specs; + for (const auto& codec : owner_->recv_codecs_) { + specs.push_back( + AudioCodecSpec{{codec.name, codec.clockrate, codec.channels}, + {codec.clockrate, codec.channels, codec.bitrate}}); + } + return specs; + } + bool IsSupportedDecoder(const SdpAudioFormat& format) override { + return false; + } + absl_nullable std::unique_ptr Create( + const Environment& env, + const SdpAudioFormat& format, + std::optional codec_pair_id) override { + return nullptr; + } + + private: + FakeVoiceEngine* owner_; + }; + + std::vector recv_codecs_; + std::vector send_codecs_; + scoped_refptr encoder_factory_; + scoped_refptr decoder_factory_; + std::vector header_extensions_; friend class FakeMediaEngine; }; @@ -810,43 +894,41 @@ class FakeVideoEngine : public VideoEngineInterface { FakeVideoEngine(); bool SetOptions(const VideoOptions& options); std::unique_ptr CreateSendChannel( - webrtc::Call* call, + Call* call, const MediaConfig& config, const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) - override; + const CryptoOptions& crypto_options, + VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) override; std::unique_ptr CreateReceiveChannel( - webrtc::Call* call, + Call* call, const MediaConfig& config, const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options) override; + const CryptoOptions& crypto_options) override; FakeVideoMediaSendChannel* GetSendChannel(size_t index); FakeVideoMediaReceiveChannel* GetReceiveChannel(size_t index); - std::vector send_codecs() const override { - return send_codecs(true); + std::vector LegacySendCodecs() const override { + return LegacySendCodecs(true); } - std::vector recv_codecs() const override { - return recv_codecs(true); + std::vector LegacyRecvCodecs() const override { + return LegacyRecvCodecs(true); } - std::vector send_codecs(bool include_rtx) const override; - std::vector recv_codecs(bool include_rtx) const override; - void SetSendCodecs(const std::vector& codecs); - void SetRecvCodecs(const std::vector& codecs); + std::vector LegacySendCodecs(bool include_rtx) const override; + std::vector LegacyRecvCodecs(bool include_rtx) const override; + void SetSendCodecs(const std::vector& codecs); + void SetRecvCodecs(const std::vector& codecs); bool SetCapture(bool capture); - std::vector GetRtpHeaderExtensions() + std::vector GetRtpHeaderExtensions() const override; void SetRtpHeaderExtensions( - std::vector header_extensions); + std::vector header_extensions); private: - std::vector send_codecs_; - std::vector recv_codecs_; + std::vector send_codecs_; + std::vector recv_codecs_; bool capture_; VideoOptions options_; - bool fail_create_channel_; - std::vector header_extensions_; + std::vector header_extensions_; friend class FakeMediaEngine; }; @@ -857,12 +939,12 @@ class FakeMediaEngine : public CompositeMediaEngine { ~FakeMediaEngine() override; - void SetAudioCodecs(const std::vector& codecs); - void SetAudioRecvCodecs(const std::vector& codecs); - void SetAudioSendCodecs(const std::vector& codecs); - void SetVideoCodecs(const std::vector& codecs); - - void set_fail_create_channel(bool fail); + void SetAudioCodecs(const std::vector& codecs); + void SetAudioRecvCodecs(const std::vector& codecs); + void SetAudioSendCodecs(const std::vector& codecs); + void SetVideoCodecs(const std::vector& codecs); + void SetVideoRecvCodecs(const std::vector& codecs); + void SetVideoSendCodecs(const std::vector& codecs); FakeVoiceEngine* fake_voice_engine() { return voice_; } FakeVideoEngine* fake_video_engine() { return video_; } @@ -872,6 +954,23 @@ class FakeMediaEngine : public CompositeMediaEngine { FakeVideoEngine* const video_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::CompareDtmfInfo; +using ::webrtc::FakeMediaEngine; +using ::webrtc::FakeVideoEngine; +using ::webrtc::FakeVideoMediaReceiveChannel; +using ::webrtc::FakeVideoMediaSendChannel; +using ::webrtc::FakeVoiceEngine; +using ::webrtc::FakeVoiceMediaReceiveChannel; +using ::webrtc::FakeVoiceMediaSendChannel; +using ::webrtc::RtpReceiveChannelHelper; +using ::webrtc::RtpSendChannelHelper; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_FAKE_MEDIA_ENGINE_H_ diff --git a/media/base/fake_network_interface.h b/media/base/fake_network_interface.h index d0763fe533..1ef126ffa7 100644 --- a/media/base/fake_network_interface.h +++ b/media/base/fake_network_interface.h @@ -11,6 +11,8 @@ #ifndef MEDIA_BASE_FAKE_NETWORK_INTERFACE_H_ #define MEDIA_BASE_FAKE_NETWORK_INTERFACE_H_ +#include +#include #include #include #include @@ -18,30 +20,34 @@ #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/timestamp.h" #include "media/base/media_channel.h" -#include "media/base/rtp_utils.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_util.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/byte_order.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/dscp.h" +#include "rtc_base/logging.h" +#include "rtc_base/socket.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" -namespace cricket { +namespace webrtc { // Fake NetworkInterface that sends/receives RTP/RTCP packets. class FakeNetworkInterface : public MediaChannelNetworkInterface { public: FakeNetworkInterface() - : thread_(rtc::Thread::Current()), + : thread_(Thread::Current()), dest_(NULL), conf_(false), sendbuf_size_(-1), recvbuf_size_(-1), - dscp_(rtc::DSCP_NO_CHANGE) {} + dscp_(DSCP_NO_CHANGE) {} void SetDestination(MediaReceiveChannelInterface* dest) { dest_ = dest; } @@ -50,13 +56,13 @@ class FakeNetworkInterface : public MediaChannelNetworkInterface { // SSRCs. This allows us to simulate receiving media from multiple sources. void SetConferenceMode(bool conf, const std::vector& ssrcs) RTC_LOCKS_EXCLUDED(mutex_) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); conf_ = conf; conf_sent_ssrcs_ = ssrcs; } int NumRtpBytes() RTC_LOCKS_EXCLUDED(mutex_) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); int bytes = 0; for (size_t i = 0; i < rtp_packets_.size(); ++i) { bytes += static_cast(rtp_packets_[i].size()); @@ -65,31 +71,31 @@ class FakeNetworkInterface : public MediaChannelNetworkInterface { } int NumRtpBytes(uint32_t ssrc) RTC_LOCKS_EXCLUDED(mutex_) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); int bytes = 0; GetNumRtpBytesAndPackets(ssrc, &bytes, NULL); return bytes; } int NumRtpPackets() RTC_LOCKS_EXCLUDED(mutex_) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return static_cast(rtp_packets_.size()); } int NumRtpPackets(uint32_t ssrc) RTC_LOCKS_EXCLUDED(mutex_) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); int packets = 0; GetNumRtpBytesAndPackets(ssrc, NULL, &packets); return packets; } int NumSentSsrcs() RTC_LOCKS_EXCLUDED(mutex_) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return static_cast(sent_ssrcs_.size()); } - rtc::CopyOnWriteBuffer GetRtpPacket(int index) RTC_LOCKS_EXCLUDED(mutex_) { - webrtc::MutexLock lock(&mutex_); + CopyOnWriteBuffer GetRtpPacket(int index) RTC_LOCKS_EXCLUDED(mutex_) { + MutexLock lock(&mutex_); if (index >= static_cast(rtp_packets_.size())) { return {}; } @@ -97,34 +103,33 @@ class FakeNetworkInterface : public MediaChannelNetworkInterface { } int NumRtcpPackets() RTC_LOCKS_EXCLUDED(mutex_) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return static_cast(rtcp_packets_.size()); } // Note: callers are responsible for deleting the returned buffer. - const rtc::CopyOnWriteBuffer* GetRtcpPacket(int index) - RTC_LOCKS_EXCLUDED(mutex_) { - webrtc::MutexLock lock(&mutex_); + const CopyOnWriteBuffer* GetRtcpPacket(int index) RTC_LOCKS_EXCLUDED(mutex_) { + MutexLock lock(&mutex_); if (index >= static_cast(rtcp_packets_.size())) { return NULL; } - return new rtc::CopyOnWriteBuffer(rtcp_packets_[index]); + return new CopyOnWriteBuffer(rtcp_packets_[index]); } int sendbuf_size() const { return sendbuf_size_; } int recvbuf_size() const { return recvbuf_size_; } - rtc::DiffServCodePoint dscp() const { return dscp_; } - rtc::PacketOptions options() const { return options_; } + DiffServCodePoint dscp() const { return dscp_; } + AsyncSocketPacketOptions options() const { return options_; } protected: - virtual bool SendPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) + virtual bool SendPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options) RTC_LOCKS_EXCLUDED(mutex_) { if (!webrtc::IsRtpPacket(*packet)) { return false; } - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); sent_ssrcs_[webrtc::ParseRtpSsrc(*packet)]++; options_ = options; @@ -140,10 +145,10 @@ class FakeNetworkInterface : public MediaChannelNetworkInterface { return true; } - virtual bool SendRtcp(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) + virtual bool SendRtcp(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options) RTC_LOCKS_EXCLUDED(mutex_) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); rtcp_packets_.push_back(*packet); options_ = options; if (!conf_) { @@ -154,37 +159,36 @@ class FakeNetworkInterface : public MediaChannelNetworkInterface { return true; } - virtual int SetOption(SocketType type, rtc::Socket::Option opt, int option) { - if (opt == rtc::Socket::OPT_SNDBUF) { + virtual int SetOption(SocketType /* type */, Socket::Option opt, int option) { + if (opt == Socket::OPT_SNDBUF) { sendbuf_size_ = option; - } else if (opt == rtc::Socket::OPT_RCVBUF) { + } else if (opt == Socket::OPT_RCVBUF) { recvbuf_size_ = option; - } else if (opt == rtc::Socket::OPT_DSCP) { - dscp_ = static_cast(option); + } else if (opt == Socket::OPT_DSCP) { + dscp_ = static_cast(option); } return 0; } - void PostPacket(rtc::CopyOnWriteBuffer packet) { - thread_->PostTask( - SafeTask(safety_.flag(), [this, packet = std::move(packet)]() mutable { - if (dest_) { - webrtc::RtpPacketReceived parsed_packet; - if (parsed_packet.Parse(packet)) { - parsed_packet.set_arrival_time( - webrtc::Timestamp::Micros(rtc::TimeMicros())); - dest_->OnPacketReceived(std::move(parsed_packet)); - } else { - RTC_DCHECK_NOTREACHED(); - } - } - })); + void PostPacket(CopyOnWriteBuffer packet) { + thread_->PostTask(SafeTask(safety_.flag(), [this, packet = std::move( + packet)]() mutable { + if (dest_) { + RtpPacketReceived parsed_packet; + if (parsed_packet.Parse(packet)) { + parsed_packet.set_arrival_time(Timestamp::Micros(TimeMicros())); + dest_->OnPacketReceived(std::move(parsed_packet)); + } else { + RTC_DCHECK_NOTREACHED(); + } + } + })); } private: - void SetRtpSsrc(uint32_t ssrc, rtc::CopyOnWriteBuffer& buffer) { + void SetRtpSsrc(uint32_t ssrc, CopyOnWriteBuffer& buffer) { RTC_CHECK_GE(buffer.size(), 12); - rtc::SetBE32(buffer.MutableData() + 8, ssrc); + webrtc::SetBE32(buffer.MutableData() + 8, ssrc); } void GetNumRtpBytesAndPackets(uint32_t ssrc, int* bytes, int* packets) { @@ -206,7 +210,7 @@ class FakeNetworkInterface : public MediaChannelNetworkInterface { } } - webrtc::TaskQueueBase* thread_; + TaskQueueBase* thread_; MediaReceiveChannelInterface* dest_; bool conf_; // The ssrcs used in sending out packets in conference mode. @@ -216,17 +220,25 @@ class FakeNetworkInterface : public MediaChannelNetworkInterface { std::map sent_ssrcs_; // Map to track packet-number that needs to be dropped per ssrc. std::map > drop_map_; - webrtc::Mutex mutex_; - std::vector rtp_packets_; - std::vector rtcp_packets_; + Mutex mutex_; + std::vector rtp_packets_; + std::vector rtcp_packets_; int sendbuf_size_; int recvbuf_size_; - rtc::DiffServCodePoint dscp_; + DiffServCodePoint dscp_; // Options of the most recently sent packet. - rtc::PacketOptions options_; - webrtc::ScopedTaskSafety safety_; + AsyncSocketPacketOptions options_; + ScopedTaskSafety safety_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::FakeNetworkInterface; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_FAKE_NETWORK_INTERFACE_H_ diff --git a/media/base/fake_rtp.cc b/media/base/fake_rtp.cc index 21322419e1..f76600a9cd 100644 --- a/media/base/fake_rtp.cc +++ b/media/base/fake_rtp.cc @@ -13,6 +13,8 @@ #include #include +#include + #include "absl/algorithm/container.h" #include "rtc_base/checks.h" #include "test/gtest.h" diff --git a/media/base/fake_video_renderer.cc b/media/base/fake_video_renderer.cc index b235738d24..6d190fad52 100644 --- a/media/base/fake_video_renderer.cc +++ b/media/base/fake_video_renderer.cc @@ -10,9 +10,16 @@ #include "media/base/fake_video_renderer.h" -namespace cricket { +#include + +#include "api/scoped_refptr.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { namespace { -bool CheckFrameColorYuv(const webrtc::VideoFrame& frame) { +bool CheckFrameColorYuv(const VideoFrame& frame) { // TODO(zhurunz) Check with VP8 team to see if we can remove this // tolerance on Y values. Some unit tests produce Y values close // to 16 rather than close to zero, for supposedly black frames. @@ -28,7 +35,7 @@ bool CheckFrameColorYuv(const webrtc::VideoFrame& frame) { if (!frame.video_frame_buffer()) { return false; } - rtc::scoped_refptr i420_buffer = + scoped_refptr i420_buffer = frame.video_frame_buffer()->ToI420(); // Y int y_width = frame.width(); @@ -74,8 +81,8 @@ bool CheckFrameColorYuv(const webrtc::VideoFrame& frame) { FakeVideoRenderer::FakeVideoRenderer() = default; -void FakeVideoRenderer::OnFrame(const webrtc::VideoFrame& frame) { - webrtc::MutexLock lock(&mutex_); +void FakeVideoRenderer::OnFrame(const VideoFrame& frame) { + MutexLock lock(&mutex_); black_frame_ = CheckFrameColorYuv(frame); ++num_rendered_frames_; width_ = frame.width(); @@ -84,4 +91,4 @@ void FakeVideoRenderer::OnFrame(const webrtc::VideoFrame& frame) { timestamp_us_ = frame.timestamp_us(); } -} // namespace cricket +} // namespace webrtc diff --git a/media/base/fake_video_renderer.h b/media/base/fake_video_renderer.h index 33d99a2668..ad894883f5 100644 --- a/media/base/fake_video_renderer.h +++ b/media/base/fake_video_renderer.h @@ -13,61 +13,67 @@ #include -#include "api/scoped_refptr.h" #include "api/video/video_frame.h" -#include "api/video/video_frame_buffer.h" #include "api/video/video_rotation.h" #include "api/video/video_sink_interface.h" #include "rtc_base/synchronization/mutex.h" -namespace cricket { +namespace webrtc { // Faked video renderer that has a callback for actions on rendering. -class FakeVideoRenderer : public rtc::VideoSinkInterface { +class FakeVideoRenderer : public VideoSinkInterface { public: FakeVideoRenderer(); - void OnFrame(const webrtc::VideoFrame& frame) override; + void OnFrame(const VideoFrame& frame) override; int width() const { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return width_; } int height() const { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return height_; } - webrtc::VideoRotation rotation() const { - webrtc::MutexLock lock(&mutex_); + VideoRotation rotation() const { + MutexLock lock(&mutex_); return rotation_; } int64_t timestamp_us() const { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return timestamp_us_; } int num_rendered_frames() const { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return num_rendered_frames_; } bool black_frame() const { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return black_frame_; } private: int width_ = 0; int height_ = 0; - webrtc::VideoRotation rotation_ = webrtc::kVideoRotation_0; + VideoRotation rotation_ = webrtc::kVideoRotation_0; int64_t timestamp_us_ = 0; int num_rendered_frames_ = 0; bool black_frame_ = false; - mutable webrtc::Mutex mutex_; + mutable Mutex mutex_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::FakeVideoRenderer; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_FAKE_VIDEO_RENDERER_H_ diff --git a/media/base/media_channel.h b/media/base/media_channel.h index 2282b57d7e..b1f026372a 100644 --- a/media/base/media_channel.h +++ b/media/base/media_channel.h @@ -11,14 +11,20 @@ #ifndef MEDIA_BASE_MEDIA_CHANNEL_H_ #define MEDIA_BASE_MEDIA_CHANNEL_H_ +#include +#include +#include #include #include +#include #include #include #include #include -#include "absl/types/optional.h" +#include "absl/functional/any_invocable.h" +#include "absl/strings/string_view.h" +#include "api/audio/audio_processing_statistics.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_options.h" #include "api/call/audio_sink.h" @@ -26,57 +32,44 @@ #include "api/crypto/frame_encryptor_interface.h" #include "api/frame_transformer_interface.h" #include "api/media_stream_interface.h" +#include "api/media_types.h" #include "api/rtc_error.h" +#include "api/rtp_headers.h" #include "api/rtp_parameters.h" #include "api/rtp_sender_interface.h" -#include "api/task_queue/pending_task_safety_flag.h" -#include "api/transport/data_channel_transport_interface.h" +#include "api/scoped_refptr.h" #include "api/transport/rtp/rtp_source.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "api/video/recordable_encoded_frame.h" #include "api/video/video_content_type.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "api/video/video_timing.h" #include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_encoder_factory.h" -#include "call/video_receive_stream.h" #include "common_video/include/quality_limitation_reason.h" +#include "media/base/audio_source.h" #include "media/base/codec.h" -#include "media/base/media_constants.h" #include "media/base/stream_params.h" -#include "modules/audio_processing/include/audio_processing_statistics.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/async_packet_socket.h" -#include "rtc_base/buffer.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/dscp.h" -#include "rtc_base/logging.h" +#include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" #include "rtc_base/socket.h" #include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" -#include "video/config/video_encoder_config.h" - -namespace rtc { -class Timing; -} namespace webrtc { class VideoFrame; +struct VideoFormat; webrtc::RTCError InvokeSetParametersCallback(SetParametersCallback& callback, RTCError error); -} // namespace webrtc - -namespace cricket { - -class AudioSource; -class VideoCapturer; -struct RtpHeader; -struct VideoFormat; class VideoMediaSendChannelInterface; class VideoMediaReceiveChannelInterface; class VoiceMediaSendChannelInterface; @@ -85,13 +78,12 @@ class VoiceMediaReceiveChannelInterface; const int kScreencastDefaultFps = 5; template -static std::string ToStringIfSet(const char* key, - const absl::optional& val) { +static std::string ToStringIfSet(const char* key, const std::optional& val) { std::string str; if (val) { str = key; str += ": "; - str += val ? rtc::ToString(*val) : ""; + str += val ? absl::StrCat(*val) : ""; str += ", "; } return str; @@ -99,7 +91,7 @@ static std::string ToStringIfSet(const char* key, template static std::string VectorToString(const std::vector& vals) { - rtc::StringBuilder ost; // no-presubmit-check TODO(webrtc:8982) + StringBuilder ost; ost << "["; for (size_t i = 0; i < vals.size(); ++i) { if (i > 0) { @@ -133,7 +125,7 @@ struct VideoOptions { bool operator!=(const VideoOptions& o) const { return !(*this == o); } std::string ToString() const { - rtc::StringBuilder ost; + StringBuilder ost; ost << "VideoOptions {"; ost << ToStringIfSet("noise reduction", video_noise_reduction); ost << ToStringIfSet("screencast min bitrate kbps", @@ -146,23 +138,23 @@ struct VideoOptions { // Enable denoising? This flag comes from the getUserMedia // constraint 'googNoiseReduction', and WebRtcVideoEngine passes it // on to the codec options. Disabled by default. - absl::optional video_noise_reduction; + std::optional video_noise_reduction; // Force screencast to use a minimum bitrate. This flag comes from // the PeerConnection constraint 'googScreencastMinBitrate'. It is // copied to the encoder config by WebRtcVideoChannel. // TODO(https://crbug.com/1315155): Remove the ability to set it in Chromium // and delete this flag (it should default to 100 kbps). - absl::optional screencast_min_bitrate_kbps; + std::optional screencast_min_bitrate_kbps; // Set by screencast sources. Implies selection of encoding settings // suitable for screencast. Most likely not the right way to do // things, e.g., screencast of a text document and screencast of a // youtube video have different needs. - absl::optional is_screencast; + std::optional is_screencast; webrtc::VideoTrackInterface::ContentHint content_hint; private: template - static void SetFrom(absl::optional* s, const absl::optional& o) { + static void SetFrom(std::optional* s, const std::optional& o) { if (o) { *s = o; } @@ -172,12 +164,12 @@ struct VideoOptions { class MediaChannelNetworkInterface { public: enum SocketType { ST_RTP, ST_RTCP }; - virtual bool SendPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) = 0; - virtual bool SendRtcp(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) = 0; + virtual bool SendPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options) = 0; + virtual bool SendRtcp(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options) = 0; virtual int SetOption(SocketType type, - rtc::Socket::Option opt, + webrtc::Socket::Option opt, int option) = 0; virtual ~MediaChannelNetworkInterface() {} }; @@ -189,14 +181,14 @@ class MediaSendChannelInterface { virtual VideoMediaSendChannelInterface* AsVideoSendChannel() = 0; virtual VoiceMediaSendChannelInterface* AsVoiceSendChannel() = 0; - virtual cricket::MediaType media_type() const = 0; + virtual webrtc::MediaType media_type() const = 0; // Gets the currently set codecs/payload types to be used for outgoing media. - virtual absl::optional GetSendCodec() const = 0; + virtual std::optional GetSendCodec() const = 0; // Creates a new outgoing media stream with SSRCs and CNAME as described // by sp. - virtual bool AddSendStream(const StreamParams& sp) = 0; + virtual bool AddSendStream(const webrtc::StreamParams& sp) = 0; // Removes an outgoing media stream. // SSRC must be the first SSRC of the media stream if the stream uses // multiple SSRCs. In the case of an ssrc of 0, the possibly cached @@ -204,13 +196,13 @@ class MediaSendChannelInterface { virtual bool RemoveSendStream(uint32_t ssrc) = 0; // Called on the network thread after a transport has finished sending a // packet. - virtual void OnPacketSent(const rtc::SentPacket& sent_packet) = 0; + virtual void OnPacketSent(const SentPacketInfo& sent_packet) = 0; // Called when the socket's ability to send has changed. virtual void OnReadyToSend(bool ready) = 0; // Called when the network route used for sending packets changed. virtual void OnNetworkRouteChanged( absl::string_view transport_name, - const rtc::NetworkRoute& network_route) = 0; + const webrtc::NetworkRoute& network_route) = 0; // Sets the abstract interface class for sending RTP/RTCP data. virtual void SetInterface(MediaChannelNetworkInterface* iface) = 0; @@ -230,7 +222,7 @@ class MediaSendChannelInterface { // to. virtual void SetFrameEncryptor( uint32_t ssrc, - rtc::scoped_refptr frame_encryptor) = 0; + scoped_refptr frame_encryptor) = 0; virtual webrtc::RTCError SetRtpSendParameters( uint32_t ssrc, @@ -239,15 +231,14 @@ class MediaSendChannelInterface { virtual void SetEncoderToPacketizerFrameTransformer( uint32_t ssrc, - rtc::scoped_refptr - frame_transformer) = 0; + scoped_refptr frame_transformer) = 0; // note: The encoder_selector object must remain valid for the lifetime of the // MediaChannel, unless replaced. virtual void SetEncoderSelector( - uint32_t ssrc, - webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { - } + uint32_t /* ssrc */, + webrtc::VideoEncoderFactory:: + EncoderSelectorInterface* /* encoder_selector */) {} virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0; virtual bool SendCodecHasNack() const = 0; // Called whenever the list of sending SSRCs changes. @@ -265,11 +256,11 @@ class MediaReceiveChannelInterface { virtual VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() = 0; virtual VoiceMediaReceiveChannelInterface* AsVoiceReceiveChannel() = 0; - virtual cricket::MediaType media_type() const = 0; + virtual webrtc::MediaType media_type() const = 0; // Creates a new incoming media stream with SSRCs, CNAME as described // by sp. In the case of a sp without SSRCs, the unsignaled sp is cached // to be used later for unsignaled streams received. - virtual bool AddRecvStream(const StreamParams& sp) = 0; + virtual bool AddRecvStream(const webrtc::StreamParams& sp) = 0; // Removes an incoming media stream. // ssrc must be the first SSRC of the media stream if the stream uses // multiple SSRCs. @@ -282,7 +273,7 @@ class MediaReceiveChannelInterface { // Called on the network when an RTP packet is received. virtual void OnPacketReceived(const webrtc::RtpPacketReceived& packet) = 0; // Gets the current unsignaled receive stream's SSRC, if there is one. - virtual absl::optional GetUnsignaledSsrc() const = 0; + virtual std::optional GetUnsignaledSsrc() const = 0; // Sets the local SSRC for listening to incoming RTCP reports. virtual void ChooseReceiverReportSsrc(const std::set& choices) = 0; // This is currently a workaround because of the demuxer state being managed @@ -304,12 +295,11 @@ class MediaReceiveChannelInterface { // attached to. virtual void SetFrameDecryptor( uint32_t ssrc, - rtc::scoped_refptr frame_decryptor) = 0; + scoped_refptr frame_decryptor) = 0; virtual void SetDepacketizerToDecoderFrameTransformer( uint32_t ssrc, - rtc::scoped_refptr - frame_transformer) = 0; + scoped_refptr frame_transformer) = 0; // Set base minimum delay of the receive stream with specified ssrc. // Base minimum delay sets lower bound on minimum delay value which @@ -318,7 +308,7 @@ class MediaReceiveChannelInterface { virtual bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) = 0; // Returns current value of base minimum delay in milliseconds. - virtual absl::optional GetBaseMinimumPlayoutDelayMs( + virtual std::optional GetBaseMinimumPlayoutDelayMs( uint32_t ssrc) const = 0; }; @@ -385,12 +375,12 @@ struct MediaSenderInfo { // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-nackcount uint32_t nacks_received = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-targetbitrate - absl::optional target_bitrate; + std::optional target_bitrate; int packets_lost = 0; float fraction_lost = 0.0f; int64_t rtt_ms = 0; std::string codec_name; - absl::optional codec_payload_type; + std::optional codec_payload_type; std::vector local_stats; std::vector remote_stats; // A snapshot of the most recent Report Block with additional data of interest @@ -398,7 +388,7 @@ struct MediaSenderInfo { // this list, the `ReportBlockData::source_ssrc()`, which is the SSRC of the // corresponding outbound RTP stream, is unique. std::vector report_block_datas; - absl::optional active; + std::optional active; // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay webrtc::TimeDelta total_packet_send_delay = webrtc::TimeDelta::Zero(); }; @@ -445,9 +435,9 @@ struct MediaReceiverInfo { int packets_received = 0; int packets_lost = 0; - absl::optional retransmitted_bytes_received; - absl::optional retransmitted_packets_received; - absl::optional nacks_sent; + std::optional retransmitted_bytes_received; + std::optional retransmitted_packets_received; + std::optional nacks_sent; // Jitter (network-related) latency (cumulative). // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferdelay double jitter_buffer_delay_seconds = 0.0; @@ -463,19 +453,37 @@ struct MediaReceiverInfo { // The timestamp at which the last packet was received, i.e. the time of the // local clock when it was received - not the RTP timestamp of that packet. // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-lastpacketreceivedtimestamp - absl::optional last_packet_received; + std::optional last_packet_received; // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-estimatedplayouttimestamp - absl::optional estimated_playout_ntp_timestamp_ms; + std::optional estimated_playout_ntp_timestamp_ms; std::string codec_name; - absl::optional codec_payload_type; + std::optional codec_payload_type; std::vector local_stats; std::vector remote_stats; // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-fecpacketsreceived - absl::optional fec_packets_received; + std::optional fec_packets_received; // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-fecpacketsdiscarded - absl::optional fec_packets_discarded; + std::optional fec_packets_discarded; // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-fecbytesreceived - absl::optional fec_bytes_received; + std::optional fec_bytes_received; + // https://www.w3.org/TR/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalprocessingdelay + double total_processing_delay_seconds = 0.0; + + // Remote outbound stats derived by the received RTCP sender reports. + // https://w3c.github.io/webrtc-stats/#remoteoutboundrtpstats-dict* + std::optional last_sender_report_timestamp; + // TODO: bugs.webrtc.org/370535296 - Remove the utc timestamp when linked + // issue is fixed. + std::optional last_sender_report_utc_timestamp; + std::optional last_sender_report_remote_utc_timestamp; + uint64_t sender_reports_packets_sent = 0; + uint64_t sender_reports_bytes_sent = 0; + uint64_t sender_reports_reports_count = 0; + // These require a DLRR block, see + // https://w3c.github.io/webrtc-stats/#dom-rtcremoteoutboundrtpstreamstats-roundtriptime + std::optional round_trip_time; + webrtc::TimeDelta total_round_trip_time = webrtc::TimeDelta::Zero(); + int round_trip_time_measurements = 0; }; struct VoiceSenderInfo : public MediaSenderInfo { @@ -551,23 +559,14 @@ struct VoiceReceiverInfo : public MediaReceiverInfo { // longer than 150 ms). int32_t interruption_count = 0; int32_t total_interruption_duration_ms = 0; - // Remote outbound stats derived by the received RTCP sender reports. - // https://w3c.github.io/webrtc-stats/#remoteoutboundrtpstats-dict* - absl::optional last_sender_report_timestamp_ms; - absl::optional last_sender_report_remote_timestamp_ms; - uint64_t sender_reports_packets_sent = 0; - uint64_t sender_reports_bytes_sent = 0; - uint64_t sender_reports_reports_count = 0; - absl::optional round_trip_time; - webrtc::TimeDelta total_round_trip_time = webrtc::TimeDelta::Zero(); - int round_trip_time_measurements = 0; }; struct VideoSenderInfo : public MediaSenderInfo { VideoSenderInfo(); ~VideoSenderInfo(); - std::vector ssrc_groups; - absl::optional encoder_implementation_name; + std::optional encoding_index; + std::vector ssrc_groups; + std::optional encoder_implementation_name; int firs_received = 0; int plis_received = 0; int send_frame_width = 0; @@ -596,23 +595,23 @@ struct VideoSenderInfo : public MediaSenderInfo { // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalencodedbytestarget uint64_t total_encoded_bytes_target = 0; bool has_entered_low_resolution = false; - absl::optional qp_sum; + std::optional qp_sum; webrtc::VideoContentType content_type = webrtc::VideoContentType::UNSPECIFIED; uint32_t frames_sent = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcvideosenderstats-hugeframessent uint32_t huge_frames_sent = 0; uint32_t aggregated_huge_frames_sent = 0; - absl::optional rid; - absl::optional power_efficient_encoder; - absl::optional scalability_mode; + std::optional rid; + std::optional power_efficient_encoder; + std::optional scalability_mode; }; struct VideoReceiverInfo : public MediaReceiverInfo { VideoReceiverInfo(); ~VideoReceiverInfo(); - std::vector ssrc_groups; - absl::optional decoder_implementation_name; - absl::optional power_efficient_decoder; + std::vector ssrc_groups; + std::optional decoder_implementation_name; + std::optional power_efficient_decoder; int packets_concealed = 0; int firs_sent = 0; int plis_sent = 0; @@ -630,7 +629,18 @@ struct VideoReceiverInfo : public MediaReceiverInfo { uint32_t frames_decoded = 0; uint32_t key_frames_decoded = 0; uint32_t frames_rendered = 0; - absl::optional qp_sum; + std::optional qp_sum; + // Corruption score, indicating the probability of corruption. Its value is + // between 0 and 1, where 0 means no corruption and 1 means that the + // compressed frame is corrupted. + // However, note that the corruption score may not accurately reflect + // corruption. E.g. even if the corruption score is 0, the compressed frame + // may still be corrupted and vice versa. + std::optional corruption_score_sum; + std::optional corruption_score_squared_sum; + // Number of frames the `corruption_score` was calculated on. This is + // usually not the same as `frames_decoded` or `frames_rendered`. + uint32_t corruption_score_count = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totaldecodetime webrtc::TimeDelta total_decode_time = webrtc::TimeDelta::Zero(); // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalprocessingdelay @@ -676,7 +686,7 @@ struct VideoReceiverInfo : public MediaReceiverInfo { // Timing frame info: all important timestamps for a full lifetime of a // single 'timing frame'. - absl::optional timing_frame_info; + std::optional timing_frame_info; }; struct BandwidthEstimationInfo { @@ -806,25 +816,27 @@ struct VideoMediaInfo { RtpCodecParametersMap receive_codecs; }; -struct RtcpParameters { - bool reduced_size = false; - bool remote_estimate = false; -}; - struct MediaChannelParameters { virtual ~MediaChannelParameters() = default; + // This is the value to be sent in the MID RTP header extension (if the header + // extension in included in the list of extensions). + // It is also used as a key to map the channel to its transport. + std::string mid; - std::vector codecs; + std::vector codecs; std::vector extensions; - // For a send stream this is true if we've neogtiated a send direction, + // For a send stream this is true if we've negotiated a send direction, // for a receive stream this is true if we've negotiated a receive direction. bool is_stream_active = true; // TODO(pthatcher): Add streams. - RtcpParameters rtcp; + struct RtcpParameters { + bool reduced_size = false; + bool remote_estimate = false; + } rtcp; std::string ToString() const { - rtc::StringBuilder ost; + StringBuilder ost; ost << "{"; const char* separator = ""; for (const auto& entry : ToStringMap()) { @@ -838,21 +850,21 @@ struct MediaChannelParameters { protected: virtual std::map ToStringMap() const { return {{"codecs", VectorToString(codecs)}, - {"extensions", VectorToString(extensions)}}; + {"extensions", VectorToString(extensions)}, + {"rtcp", "{reduced_size:" + absl::StrCat(rtcp.reduced_size) + + ", remote_estimate:" + + absl::StrCat(rtcp.remote_estimate) + "}"}}; } }; struct SenderParameters : MediaChannelParameters { int max_bandwidth_bps = -1; - // This is the value to be sent in the MID RTP header extension (if the header - // extension in included in the list of extensions). - std::string mid; bool extmap_allow_mixed = false; protected: std::map ToStringMap() const override { auto params = MediaChannelParameters::ToStringMap(); - params["max_bandwidth_bps"] = rtc::ToString(max_bandwidth_bps); + params["max_bandwidth_bps"] = absl::StrCat(max_bandwidth_bps); params["mid"] = (mid.empty() ? "" : mid); params["extmap-allow-mixed"] = extmap_allow_mixed ? "true" : "false"; return params; @@ -862,7 +874,7 @@ struct SenderParameters : MediaChannelParameters { struct AudioSenderParameter : SenderParameters { AudioSenderParameter(); ~AudioSenderParameter() override; - AudioOptions options; + webrtc::AudioOptions options; protected: std::map ToStringMap() const override; @@ -878,8 +890,8 @@ class VoiceMediaSendChannelInterface : public MediaSendChannelInterface { // Configure stream for sending. virtual bool SetAudioSend(uint32_t ssrc, bool enable, - const AudioOptions* options, - AudioSource* source) = 0; + const webrtc::AudioOptions* options, + webrtc::AudioSource* source) = 0; // Returns if the telephone-event has been negotiated. virtual bool CanInsertDtmf() = 0; // Send a DTMF `event`. The DTMF out-of-band signal will be used. @@ -914,6 +926,8 @@ class VoiceMediaReceiveChannelInterface : public MediaReceiveChannelInterface { virtual void SetDefaultRawAudioSink( std::unique_ptr sink) = 0; virtual bool GetStats(VoiceMediaReceiveInfo* stats, bool reset_legacy) = 0; + virtual webrtc::RtcpMode RtcpMode() const = 0; + virtual void SetRtcpMode(webrtc::RtcpMode mode) = 0; virtual void SetReceiveNackEnabled(bool enabled) = 0; virtual void SetReceiveNonSenderRttEnabled(bool enabled) = 0; }; @@ -945,12 +959,10 @@ class VideoMediaSendChannelInterface : public MediaSendChannelInterface { virtual bool SetVideoSend( uint32_t ssrc, const VideoOptions* options, - rtc::VideoSourceInterface* source) = 0; + webrtc::VideoSourceInterface* source) = 0; // Cause generation of a keyframe for `ssrc` on a sending channel. virtual void GenerateSendKeyFrame(uint32_t ssrc, const std::vector& rids) = 0; - // Enable network condition based codec switching. - virtual void SetVideoCodecSwitchingEnabled(bool enabled) = 0; virtual bool GetStats(VideoMediaSendInfo* stats) = 0; // This fills the "bitrate parts" (rtx, video bitrate) of the // BandwidthEstimationInfo, since that part that isn't possible to get @@ -964,7 +976,7 @@ class VideoMediaSendChannelInterface : public MediaSendChannelInterface { // Information queries to support SetReceiverFeedbackParameters virtual webrtc::RtcpMode SendCodecRtcpMode() const = 0; virtual bool SendCodecHasLntf() const = 0; - virtual absl::optional SendCodecRtxTime() const = 0; + virtual std::optional SendCodecRtxTime() const = 0; }; class VideoMediaReceiveChannelInterface : public MediaReceiveChannelInterface { @@ -979,11 +991,12 @@ class VideoMediaReceiveChannelInterface : public MediaReceiveChannelInterface { // stream, which is used when SSRCs are not signaled. virtual webrtc::RtpParameters GetDefaultRtpReceiveParameters() const = 0; // Sets the sink object to be used for the specified stream. - virtual bool SetSink(uint32_t ssrc, - rtc::VideoSinkInterface* sink) = 0; + virtual bool SetSink( + uint32_t ssrc, + webrtc::VideoSinkInterface* sink) = 0; // The sink is used for the 'default' stream. virtual void SetDefaultSink( - rtc::VideoSinkInterface* sink) = 0; + webrtc::VideoSinkInterface* sink) = 0; // Request generation of a keyframe for `ssrc` on a receiving channel via // RTCP feedback. virtual void RequestRecvKeyFrame(uint32_t ssrc) = 0; @@ -999,10 +1012,52 @@ class VideoMediaReceiveChannelInterface : public MediaReceiveChannelInterface { virtual void SetReceiverFeedbackParameters(bool lntf_enabled, bool nack_enabled, webrtc::RtcpMode rtcp_mode, - absl::optional rtx_time) = 0; - virtual bool AddDefaultRecvStreamForTesting(const StreamParams& sp) = 0; + std::optional rtx_time) = 0; + virtual bool AddDefaultRecvStreamForTesting( + const webrtc::StreamParams& sp) = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using RtcpParameters = ::webrtc::MediaChannelParameters::RtcpParameters; +using ::webrtc::AudioReceiverParameters; +using ::webrtc::AudioSenderParameter; +using ::webrtc::BandwidthEstimationInfo; +using ::webrtc::kScreencastDefaultFps; +using ::webrtc::MediaChannelNetworkInterface; +using ::webrtc::MediaChannelParameters; +using ::webrtc::MediaReceiveChannelInterface; +using ::webrtc::MediaReceiverInfo; +using ::webrtc::MediaSendChannelInterface; +using ::webrtc::MediaSenderInfo; +using ::webrtc::RtpCodecParametersMap; +using ::webrtc::SenderParameters; +using ::webrtc::SsrcReceiverInfo; +using ::webrtc::SsrcSenderInfo; +using ::webrtc::ToStringIfSet; +using ::webrtc::VectorToString; +using ::webrtc::VideoMediaInfo; +using ::webrtc::VideoMediaReceiveChannelInterface; +using ::webrtc::VideoMediaReceiveInfo; +using ::webrtc::VideoMediaSendChannelInterface; +using ::webrtc::VideoMediaSendInfo; +using ::webrtc::VideoOptions; +using ::webrtc::VideoReceiverInfo; +using ::webrtc::VideoReceiverParameters; +using ::webrtc::VideoSenderInfo; +using ::webrtc::VideoSenderParameters; +using ::webrtc::VoiceMediaInfo; +using ::webrtc::VoiceMediaReceiveChannelInterface; +using ::webrtc::VoiceMediaReceiveInfo; +using ::webrtc::VoiceMediaSendChannelInterface; +using ::webrtc::VoiceMediaSendInfo; +using ::webrtc::VoiceReceiverInfo; +using ::webrtc::VoiceSenderInfo; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_MEDIA_CHANNEL_H_ diff --git a/media/base/media_channel_impl.cc b/media/base/media_channel_impl.cc index e7e84c781c..40da627b4b 100644 --- a/media/base/media_channel_impl.cc +++ b/media/base/media_channel_impl.cc @@ -10,26 +10,31 @@ #include "media/base/media_channel_impl.h" +#include #include #include -#include #include #include "absl/functional/any_invocable.h" +#include "api/array_view.h" #include "api/audio_options.h" +#include "api/call/transport.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/crypto/frame_encryptor_interface.h" +#include "api/frame_transformer_interface.h" #include "api/media_stream_interface.h" #include "api/rtc_error.h" #include "api/rtp_sender_interface.h" -#include "api/units/time_delta.h" -#include "api/video/video_timing.h" -#include "api/video_codecs/scalability_mode.h" -#include "common_video/include/quality_limitation_reason.h" -#include "media/base/codec.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "media/base/media_channel.h" #include "media/base/rtp_utils.h" -#include "media/base/stream_params.h" -#include "modules/rtp_rtcp/include/report_block_data.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/dscp.h" +#include "rtc_base/socket.h" namespace webrtc { @@ -42,17 +47,6 @@ webrtc::RTCError InvokeSetParametersCallback(SetParametersCallback& callback, return error; } -} // namespace webrtc - -namespace cricket { -using webrtc::FrameDecryptorInterface; -using webrtc::FrameEncryptorInterface; -using webrtc::FrameTransformerInterface; -using webrtc::PendingTaskSafetyFlag; -using webrtc::SafeTask; -using webrtc::TaskQueueBase; -using webrtc::VideoTrackInterface; - VideoOptions::VideoOptions() : content_hint(VideoTrackInterface::ContentHint::kNone) {} VideoOptions::~VideoOptions() = default; @@ -71,30 +65,18 @@ int MediaChannelUtil::GetRtpSendTimeExtnId() const { return -1; } -void MediaChannelUtil::SetFrameEncryptor( - uint32_t ssrc, - rtc::scoped_refptr frame_encryptor) { - // Placeholder should be pure virtual once internal supports it. -} - -void MediaChannelUtil::SetFrameDecryptor( - uint32_t ssrc, - rtc::scoped_refptr frame_decryptor) { - // Placeholder should be pure virtual once internal supports it. -} - -bool MediaChannelUtil::SendPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) { +bool MediaChannelUtil::SendPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options) { return transport_.DoSendPacket(packet, false, options); } -bool MediaChannelUtil::SendRtcp(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) { +bool MediaChannelUtil::SendRtcp(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options) { return transport_.DoSendPacket(packet, true, options); } int MediaChannelUtil::SetOption(MediaChannelNetworkInterface::SocketType type, - rtc::Socket::Option opt, + webrtc::Socket::Option opt, int option) { return transport_.SetOption(type, opt, option); } @@ -115,19 +97,11 @@ bool MediaChannelUtil::HasNetworkInterface() const { return transport_.HasNetworkInterface(); } -void MediaChannelUtil::SetEncoderToPacketizerFrameTransformer( - uint32_t ssrc, - rtc::scoped_refptr frame_transformer) {} - -void MediaChannelUtil::SetDepacketizerToDecoderFrameTransformer( - uint32_t ssrc, - rtc::scoped_refptr frame_transformer) {} - bool MediaChannelUtil::DscpEnabled() const { return transport_.DscpEnabled(); } -void MediaChannelUtil::SetPreferredDscp(rtc::DiffServCodePoint new_dscp) { +void MediaChannelUtil::SetPreferredDscp(webrtc::DiffServCodePoint new_dscp) { transport_.SetPreferredDscp(new_dscp); } @@ -200,10 +174,10 @@ MediaChannelUtil::TransportForMediaChannels::~TransportForMediaChannels() { } bool MediaChannelUtil::TransportForMediaChannels::SendRtcp( - rtc::ArrayView packet) { - auto send = [this, packet = rtc::CopyOnWriteBuffer( - packet, kMaxRtpPacketLen)]() mutable { - rtc::PacketOptions rtc_options; + ArrayView packet) { + auto send = [this, packet = CopyOnWriteBuffer( + packet, webrtc::kMaxRtpPacketLen)]() mutable { + AsyncSocketPacketOptions rtc_options; if (DscpEnabled()) { rtc_options.dscp = PreferredDscp(); } @@ -219,28 +193,31 @@ bool MediaChannelUtil::TransportForMediaChannels::SendRtcp( } bool MediaChannelUtil::TransportForMediaChannels::SendRtp( - rtc::ArrayView packet, + ArrayView packet, const webrtc::PacketOptions& options) { - auto send = - [this, packet_id = options.packet_id, - included_in_feedback = options.included_in_feedback, - included_in_allocation = options.included_in_allocation, - batchable = options.batchable, - last_packet_in_batch = options.last_packet_in_batch, - packet = rtc::CopyOnWriteBuffer(packet, kMaxRtpPacketLen)]() mutable { - rtc::PacketOptions rtc_options; - rtc_options.packet_id = packet_id; - if (DscpEnabled()) { - rtc_options.dscp = PreferredDscp(); - } - rtc_options.info_signaled_after_sent.included_in_feedback = - included_in_feedback; - rtc_options.info_signaled_after_sent.included_in_allocation = - included_in_allocation; - rtc_options.batchable = batchable; - rtc_options.last_packet_in_batch = last_packet_in_batch; - DoSendPacket(&packet, false, rtc_options); - }; + auto send = [this, packet_id = options.packet_id, + included_in_feedback = options.included_in_feedback, + included_in_allocation = options.included_in_allocation, + batchable = options.batchable, + last_packet_in_batch = options.last_packet_in_batch, + is_media = options.is_media, ect_1 = options.send_as_ect1, + packet = CopyOnWriteBuffer(packet, + webrtc::kMaxRtpPacketLen)]() mutable { + AsyncSocketPacketOptions rtc_options; + rtc_options.packet_id = packet_id; + if (DscpEnabled()) { + rtc_options.dscp = PreferredDscp(); + } + rtc_options.info_signaled_after_sent.included_in_feedback = + included_in_feedback; + rtc_options.info_signaled_after_sent.included_in_allocation = + included_in_allocation; + rtc_options.info_signaled_after_sent.is_media = is_media; + rtc_options.ecn_1 = ect_1; + rtc_options.batchable = batchable; + rtc_options.last_packet_in_batch = last_packet_in_batch; + DoSendPacket(&packet, false, rtc_options); + }; // TODO(bugs.webrtc.org/11993): ModuleRtpRtcpImpl2 and related classes (e.g. // RTCPSender) aren't aware of the network thread and may trigger calls to @@ -263,19 +240,19 @@ void MediaChannelUtil::TransportForMediaChannels::SetInterface( } void MediaChannelUtil::TransportForMediaChannels::UpdateDscp() { - rtc::DiffServCodePoint value = - enable_dscp_ ? preferred_dscp_ : rtc::DSCP_DEFAULT; + webrtc::DiffServCodePoint value = + enable_dscp_ ? preferred_dscp_ : webrtc::DSCP_DEFAULT; int ret = SetOptionLocked(MediaChannelNetworkInterface::ST_RTP, - rtc::Socket::OPT_DSCP, value); + webrtc::Socket::OPT_DSCP, value); if (ret == 0) SetOptionLocked(MediaChannelNetworkInterface::ST_RTCP, - rtc::Socket::OPT_DSCP, value); + webrtc::Socket::OPT_DSCP, value); } bool MediaChannelUtil::TransportForMediaChannels::DoSendPacket( - rtc::CopyOnWriteBuffer* packet, + CopyOnWriteBuffer* packet, bool rtcp, - const rtc::PacketOptions& options) { + const AsyncSocketPacketOptions& options) { RTC_DCHECK_RUN_ON(network_thread_); if (!network_interface_) return false; @@ -286,7 +263,7 @@ bool MediaChannelUtil::TransportForMediaChannels::DoSendPacket( int MediaChannelUtil::TransportForMediaChannels::SetOption( MediaChannelNetworkInterface::SocketType type, - rtc::Socket::Option opt, + webrtc::Socket::Option opt, int option) { RTC_DCHECK_RUN_ON(network_thread_); return SetOptionLocked(type, opt, option); @@ -294,7 +271,7 @@ int MediaChannelUtil::TransportForMediaChannels::SetOption( int MediaChannelUtil::TransportForMediaChannels::SetOptionLocked( MediaChannelNetworkInterface::SocketType type, - rtc::Socket::Option opt, + webrtc::Socket::Option opt, int option) { if (!network_interface_) return -1; @@ -302,7 +279,7 @@ int MediaChannelUtil::TransportForMediaChannels::SetOptionLocked( } void MediaChannelUtil::TransportForMediaChannels::SetPreferredDscp( - rtc::DiffServCodePoint new_dscp) { + webrtc::DiffServCodePoint new_dscp) { if (!network_thread_->IsCurrent()) { // This is currently the common path as the derived channel classes // get called on the worker thread. There are still some tests though @@ -320,4 +297,4 @@ void MediaChannelUtil::TransportForMediaChannels::SetPreferredDscp( UpdateDscp(); } -} // namespace cricket +} // namespace webrtc diff --git a/media/base/media_channel_impl.h b/media/base/media_channel_impl.h index f8c8174efa..3c30e6bd1e 100644 --- a/media/base/media_channel_impl.h +++ b/media/base/media_channel_impl.h @@ -14,48 +14,16 @@ #include #include -#include -#include -#include -#include -#include -#include - -#include "absl/functional/any_invocable.h" -#include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/audio_options.h" -#include "api/call/audio_sink.h" +#include "api/array_view.h" #include "api/call/transport.h" -#include "api/crypto/frame_decryptor_interface.h" -#include "api/crypto/frame_encryptor_interface.h" -#include "api/frame_transformer_interface.h" -#include "api/media_types.h" -#include "api/rtc_error.h" -#include "api/rtp_headers.h" -#include "api/rtp_parameters.h" -#include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" -#include "api/transport/rtp/rtp_source.h" -#include "api/video/recordable_encoded_frame.h" -#include "api/video/video_frame.h" -#include "api/video/video_sink_interface.h" -#include "api/video/video_source_interface.h" -#include "api/video_codecs/video_encoder_factory.h" -#include "media/base/codec.h" #include "media/base/media_channel.h" -#include "media/base/stream_params.h" -#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/async_packet_socket.h" -#include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/dscp.h" -#include "rtc_base/logging.h" -#include "rtc_base/network/sent_packet.h" -#include "rtc_base/network_route.h" #include "rtc_base/socket.h" #include "rtc_base/thread_annotations.h" // This file contains the base classes for classes that implement @@ -63,31 +31,31 @@ // These implementation classes used to be the exposed interface names, // but this is in the process of being changed. -namespace cricket { +namespace webrtc { // The `MediaChannelUtil` class provides functionality that is used by // multiple MediaChannel-like objects, of both sending and receiving // types. class MediaChannelUtil { public: - MediaChannelUtil(webrtc::TaskQueueBase* network_thread, - bool enable_dscp = false); + explicit MediaChannelUtil(TaskQueueBase* network_thread, + bool enable_dscp = false); virtual ~MediaChannelUtil(); // Returns the absolute sendtime extension id value from media channel. virtual int GetRtpSendTimeExtnId() const; - webrtc::Transport* transport() { return &transport_; } + Transport* transport() { return &transport_; } // Base methods to send packet using MediaChannelNetworkInterface. // These methods are used by some tests only. - bool SendPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options); + bool SendPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options); - bool SendRtcp(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options); + bool SendRtcp(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options); int SetOption(MediaChannelNetworkInterface::SocketType type, - rtc::Socket::Option opt, + Socket::Option opt, int option); // Functions that form part of one or more interface classes. @@ -106,50 +74,35 @@ class MediaChannelUtil { // Must be called on the network thread. bool HasNetworkInterface() const; - void SetFrameEncryptor( - uint32_t ssrc, - rtc::scoped_refptr frame_encryptor); - void SetFrameDecryptor( - uint32_t ssrc, - rtc::scoped_refptr frame_decryptor); - - void SetEncoderToPacketizerFrameTransformer( - uint32_t ssrc, - rtc::scoped_refptr frame_transformer); - void SetDepacketizerToDecoderFrameTransformer( - uint32_t ssrc, - rtc::scoped_refptr frame_transformer); - protected: bool DscpEnabled() const; - void SetPreferredDscp(rtc::DiffServCodePoint new_dscp); + void SetPreferredDscp(DiffServCodePoint new_dscp); private: // Implementation of the webrtc::Transport interface required // by Call(). - class TransportForMediaChannels : public webrtc::Transport { + class TransportForMediaChannels : public Transport { public: - TransportForMediaChannels(webrtc::TaskQueueBase* network_thread, - bool enable_dscp); + TransportForMediaChannels(TaskQueueBase* network_thread, bool enable_dscp); virtual ~TransportForMediaChannels(); // Implementation of webrtc::Transport - bool SendRtp(rtc::ArrayView packet, - const webrtc::PacketOptions& options) override; - bool SendRtcp(rtc::ArrayView packet) override; + bool SendRtp(ArrayView packet, + const PacketOptions& options) override; + bool SendRtcp(ArrayView packet) override; // Not implementation of webrtc::Transport void SetInterface(MediaChannelNetworkInterface* iface); int SetOption(MediaChannelNetworkInterface::SocketType type, - rtc::Socket::Option opt, + Socket::Option opt, int option); - bool DoSendPacket(rtc::CopyOnWriteBuffer* packet, + bool DoSendPacket(CopyOnWriteBuffer* packet, bool rtcp, - const rtc::PacketOptions& options); + const AsyncSocketPacketOptions& options); bool HasNetworkInterface() const { RTC_DCHECK_RUN_ON(network_thread_); @@ -157,12 +110,12 @@ class MediaChannelUtil { } bool DscpEnabled() const { return enable_dscp_; } - void SetPreferredDscp(rtc::DiffServCodePoint new_dscp); + void SetPreferredDscp(DiffServCodePoint new_dscp); private: // This is the DSCP value used for both RTP and RTCP channels if DSCP is // enabled. It can be changed at any time via `SetPreferredDscp`. - rtc::DiffServCodePoint PreferredDscp() const { + DiffServCodePoint PreferredDscp() const { RTC_DCHECK_RUN_ON(network_thread_); return preferred_dscp_; } @@ -173,23 +126,23 @@ class MediaChannelUtil { void UpdateDscp() RTC_RUN_ON(network_thread_); int SetOptionLocked(MediaChannelNetworkInterface::SocketType type, - rtc::Socket::Option opt, + Socket::Option opt, int option) RTC_RUN_ON(network_thread_); - const rtc::scoped_refptr network_safety_ + const scoped_refptr network_safety_ RTC_PT_GUARDED_BY(network_thread_); - webrtc::TaskQueueBase* const network_thread_; + TaskQueueBase* const network_thread_; const bool enable_dscp_; MediaChannelNetworkInterface* network_interface_ RTC_GUARDED_BY(network_thread_) = nullptr; - rtc::DiffServCodePoint preferred_dscp_ RTC_GUARDED_BY(network_thread_) = - rtc::DSCP_DEFAULT; + DiffServCodePoint preferred_dscp_ RTC_GUARDED_BY(network_thread_) = + webrtc::DSCP_DEFAULT; }; bool extmap_allow_mixed_ = false; TransportForMediaChannels transport_; }; -} // namespace cricket +} // namespace webrtc #endif // MEDIA_BASE_MEDIA_CHANNEL_IMPL_H_ diff --git a/media/base/media_config.h b/media/base/media_config.h index 782770569c..8d0a597d20 100644 --- a/media/base/media_config.h +++ b/media/base/media_config.h @@ -11,7 +11,7 @@ #ifndef MEDIA_BASE_MEDIA_CONFIG_H_ #define MEDIA_BASE_MEDIA_CONFIG_H_ -namespace cricket { +namespace webrtc { // Construction-time settings, passed on when creating // MediaChannels. @@ -22,6 +22,17 @@ struct MediaConfig { // and delete this flag. bool enable_dscp = true; + // If true, RTCStats timestamps are sourced from the monotonically increasing + // environment Clock, where the epoch is unspecified (i.e. up to the Clock + // implementation). If false, RTCStats timestamps are either sourced from + // system clock via webrtc::TimeUTCMicros() which is relative to 1970 but not + // necessarily monotonically increasing, or from a monotonic clock that is + // set to webrtc::TimeUTCMicros() at first call, and then procceeds to + // increase monotonically. + // TODO: bugs.webrtc.org/370535296 - Change default value to true and delete + // this flag once downstream projects have migrated. + bool stats_timestamp_with_environment_clock = false; + // Video-specific config. struct Video { // Enable WebRTC CPU Overuse Detection. This flag comes from the @@ -93,6 +104,14 @@ struct MediaConfig { bool operator!=(const MediaConfig& o) const { return !(*this == o); } }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::MediaConfig; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_MEDIA_CONFIG_H_ diff --git a/media/base/media_constants.cc b/media/base/media_constants.cc index 94ce3c7b21..966ac39eba 100644 --- a/media/base/media_constants.cc +++ b/media/base/media_constants.cc @@ -10,13 +10,15 @@ #include "media/base/media_constants.h" -namespace cricket { +#include + +namespace webrtc { const int kVideoCodecClockrate = 90000; const int kVideoMtu = 1200; const int kVideoRtpSendBufferSize = 262144; -const int kVideoRtpRecvBufferSize = 262144; +const int kVideoRtpRecvBufferSize = 1048576; const float kHighSystemCpuThreshold = 0.85f; const float kLowSystemCpuThreshold = 0.65f; @@ -24,7 +26,6 @@ const float kProcessCpuThreshold = 0.10f; const char kRedCodecName[] = "red"; const char kUlpfecCodecName[] = "ulpfec"; -const char kMultiplexCodecName[] = "multiplex"; // TODO(brandtr): Change this to 'flexfec' when we are confident that the // header format is not changing anymore. @@ -46,7 +47,6 @@ const char kCodecParamNotInNameValueFormat[] = ""; const char kOpusCodecName[] = "opus"; const char kL16CodecName[] = "L16"; const char kG722CodecName[] = "G722"; -const char kIlbcCodecName[] = "ILBC"; const char kPcmuCodecName[] = "PCMU"; const char kPcmaCodecName[] = "PCMA"; const char kCnCodecName[] = "CN"; @@ -60,6 +60,7 @@ const char kCodecParamSPropStereo[] = "sprop-stereo"; const char kCodecParamStereo[] = "stereo"; const char kCodecParamUseInbandFec[] = "useinbandfec"; const char kCodecParamUseDtx[] = "usedtx"; +const char kCodecParamCbr[] = "cbr"; const char kCodecParamMaxAverageBitrate[] = "maxaveragebitrate"; const char kCodecParamMaxPlaybackRate[] = "maxplaybackrate"; @@ -96,6 +97,8 @@ const char kCodecParamMaxBitrate[] = "x-google-max-bitrate"; const char kCodecParamMinBitrate[] = "x-google-min-bitrate"; const char kCodecParamStartBitrate[] = "x-google-start-bitrate"; const char kCodecParamMaxQuantization[] = "x-google-max-quantization"; +const char kCodecParamPerLayerPictureLossIndication[] = + "x-google-per-layer-pli"; const char kComfortNoiseCodecName[] = "CN"; @@ -124,9 +127,21 @@ const char kH265FmtpProfileCompatibilityIndicator[] = const char kH265FmtpInteropConstraints[] = "interop-constraints"; const char kH265FmtpTxMode[] = "tx-mode"; +// draft-ietf-payload-vp9 const char kVP9ProfileId[] = "profile-id"; +// https://aomediacodec.github.io/av1-rtp-spec/ +const char kAv1FmtpProfile[] = "profile"; +const char kAv1FmtpLevelIdx[] = "level-idx"; +const char kAv1FmtpTier[] = "tier"; + const int kDefaultVideoMaxFramerate = 60; +// Max encode quantizer for VP8/9 and AV1 encoders assuming libvpx/libaom API +// range [0, 63] +const int kDefaultVideoMaxQpVpx = 56; +const int kDefaultVideoMaxQpAv1 = 52; +// Max encode quantizer for H264/5 assuming the bitstream range [0, 51]. +const int kDefaultVideoMaxQpH26x = 51; const size_t kConferenceMaxNumSpatialLayers = 3; const size_t kConferenceMaxNumTemporalLayers = 3; @@ -135,4 +150,4 @@ const size_t kConferenceDefaultNumTemporalLayers = 3; // RFC 3556 and RFC 3890 const char kApplicationSpecificBandwidth[] = "AS"; const char kTransportSpecificBandwidth[] = "TIAS"; -} // namespace cricket +} // namespace webrtc diff --git a/media/base/media_constants.h b/media/base/media_constants.h index 3321aac41d..a7af2cc263 100644 --- a/media/base/media_constants.h +++ b/media/base/media_constants.h @@ -17,7 +17,7 @@ // This file contains constants related to media. -namespace cricket { +namespace webrtc { extern const int kVideoCodecClockrate; @@ -47,7 +47,6 @@ extern const char kCodecParamNotInNameValueFormat[]; extern const char kOpusCodecName[]; extern const char kL16CodecName[]; extern const char kG722CodecName[]; -extern const char kIlbcCodecName[]; extern const char kPcmuCodecName[]; extern const char kPcmaCodecName[]; extern const char kCnCodecName[]; @@ -62,8 +61,10 @@ extern const char kCodecParamSPropStereo[]; extern const char kCodecParamStereo[]; extern const char kCodecParamUseInbandFec[]; extern const char kCodecParamUseDtx[]; +extern const char kCodecParamCbr[]; extern const char kCodecParamMaxAverageBitrate[]; extern const char kCodecParamMaxPlaybackRate[]; +extern const char kCodecParamPerLayerPictureLossIndication[]; extern const char kParamValueTrue[]; // Parameters are stored as parameter/value pairs. For parameters who do not @@ -147,9 +148,18 @@ RTC_EXPORT extern const char kH265FmtpProfileCompatibilityIndicator[]; RTC_EXPORT extern const char kH265FmtpInteropConstraints[]; RTC_EXPORT extern const char kH265FmtpTxMode[]; +// draft-ietf-payload-vp9 extern const char kVP9ProfileId[]; +// https://aomediacodec.github.io/av1-rtp-spec/ +extern const char kAv1FmtpProfile[]; +extern const char kAv1FmtpLevelIdx[]; +extern const char kAv1FmtpTier[]; + extern const int kDefaultVideoMaxFramerate; +extern const int kDefaultVideoMaxQpVpx; +extern const int kDefaultVideoMaxQpAv1; +extern const int kDefaultVideoMaxQpH26x; extern const size_t kConferenceMaxNumSpatialLayers; extern const size_t kConferenceMaxNumTemporalLayers; @@ -157,6 +167,109 @@ extern const size_t kConferenceDefaultNumTemporalLayers; extern const char kApplicationSpecificBandwidth[]; extern const char kTransportSpecificBandwidth[]; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::kApplicationSpecificBandwidth; +using ::webrtc::kAv1CodecName; +using ::webrtc::kAv1FmtpLevelIdx; +using ::webrtc::kAv1FmtpProfile; +using ::webrtc::kAv1FmtpTier; +using ::webrtc::kCnCodecName; +using ::webrtc::kCodecParamAssociatedCodecName; +using ::webrtc::kCodecParamAssociatedPayloadType; +using ::webrtc::kCodecParamCbr; +using ::webrtc::kCodecParamMaxAverageBitrate; +using ::webrtc::kCodecParamMaxBitrate; +using ::webrtc::kCodecParamMaxPlaybackRate; +using ::webrtc::kCodecParamMaxPTime; +using ::webrtc::kCodecParamMaxQuantization; +using ::webrtc::kCodecParamMinBitrate; +using ::webrtc::kCodecParamMinPTime; +using ::webrtc::kCodecParamNotInNameValueFormat; +using ::webrtc::kCodecParamPerLayerPictureLossIndication; +using ::webrtc::kCodecParamPTime; +using ::webrtc::kCodecParamRtxTime; +using ::webrtc::kCodecParamSPropStereo; +using ::webrtc::kCodecParamStartBitrate; +using ::webrtc::kCodecParamStereo; +using ::webrtc::kCodecParamUseDtx; +using ::webrtc::kCodecParamUseInbandFec; +using ::webrtc::kComfortNoiseCodecName; +using ::webrtc::kConferenceDefaultNumTemporalLayers; +using ::webrtc::kConferenceMaxNumSpatialLayers; +using ::webrtc::kConferenceMaxNumTemporalLayers; +using ::webrtc::kDefaultVideoMaxFramerate; +using ::webrtc::kDefaultVideoMaxQpAv1; +using ::webrtc::kDefaultVideoMaxQpH26x; +using ::webrtc::kDefaultVideoMaxQpVpx; +using ::webrtc::kDtmfCodecName; +using ::webrtc::kFlexfecCodecName; +using ::webrtc::kFlexfecFmtpRepairWindow; +using ::webrtc::kG722CodecName; +using ::webrtc::kH264CodecName; +using ::webrtc::kH264FmtpLevelAsymmetryAllowed; +using ::webrtc::kH264FmtpPacketizationMode; +using ::webrtc::kH264FmtpProfileLevelId; +using ::webrtc::kH264FmtpSpropParameterSets; +using ::webrtc::kH264FmtpSpsPpsIdrInKeyframe; +using ::webrtc::kH264ProfileLevelConstrainedBaseline; +using ::webrtc::kH264ProfileLevelConstrainedHigh; +using ::webrtc::kH265CodecName; +using ::webrtc::kH265FmtpInteropConstraints; +using ::webrtc::kH265FmtpLevelId; +using ::webrtc::kH265FmtpProfileCompatibilityIndicator; +using ::webrtc::kH265FmtpProfileId; +using ::webrtc::kH265FmtpProfileSpace; +using ::webrtc::kH265FmtpTierFlag; +using ::webrtc::kH265FmtpTxMode; +using ::webrtc::kHighSystemCpuThreshold; +using ::webrtc::kL16CodecName; +using ::webrtc::kLowSystemCpuThreshold; +using ::webrtc::kMultiplexCodecName; +using ::webrtc::kOpusCodecName; +using ::webrtc::kOpusDefaultMaxPlaybackRate; +using ::webrtc::kOpusDefaultMaxPTime; +using ::webrtc::kOpusDefaultMinPTime; +using ::webrtc::kOpusDefaultPTime; +using ::webrtc::kOpusDefaultSPropStereo; +using ::webrtc::kOpusDefaultStereo; +using ::webrtc::kOpusDefaultUseDtx; +using ::webrtc::kOpusDefaultUseInbandFec; +using ::webrtc::kPacketizationParamRaw; +using ::webrtc::kParamValueEmpty; +using ::webrtc::kParamValueTrue; +using ::webrtc::kPcmaCodecName; +using ::webrtc::kPcmuCodecName; +using ::webrtc::kPreferredMaxPTime; +using ::webrtc::kPreferredMinPTime; +using ::webrtc::kPreferredSPropStereo; +using ::webrtc::kPreferredStereo; +using ::webrtc::kPreferredUseInbandFec; +using ::webrtc::kProcessCpuThreshold; +using ::webrtc::kRedCodecName; +using ::webrtc::kRtcpFbCcmParamFir; +using ::webrtc::kRtcpFbNackParamPli; +using ::webrtc::kRtcpFbParamCcm; +using ::webrtc::kRtcpFbParamLntf; +using ::webrtc::kRtcpFbParamNack; +using ::webrtc::kRtcpFbParamRemb; +using ::webrtc::kRtcpFbParamRrtr; +using ::webrtc::kRtcpFbParamTransportCc; +using ::webrtc::kRtxCodecName; +using ::webrtc::kTransportSpecificBandwidth; +using ::webrtc::kUlpfecCodecName; +using ::webrtc::kVideoCodecClockrate; +using ::webrtc::kVideoMtu; +using ::webrtc::kVideoRtpRecvBufferSize; +using ::webrtc::kVideoRtpSendBufferSize; +using ::webrtc::kVp8CodecName; +using ::webrtc::kVp9CodecName; +using ::webrtc::kVP9ProfileId; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_MEDIA_CONSTANTS_H_ diff --git a/media/base/media_engine.cc b/media/base/media_engine.cc index 7304ab03d7..f1f5173027 100644 --- a/media/base/media_engine.cc +++ b/media/base/media_engine.cc @@ -13,32 +13,54 @@ #include #include +#include +#include #include #include +#include #include "absl/algorithm/container.h" -#include "api/video/video_bitrate_allocation.h" +#include "api/array_view.h" +#include "api/field_trials_view.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" +#include "api/video/video_codec_constants.h" +#include "api/video_codecs/scalability_mode.h" +#include "media/base/codec.h" +#include "media/base/codec_comparators.h" +#include "media/base/rid_description.h" +#include "media/base/stream_params.h" #include "rtc_base/checks.h" -#include "rtc_base/string_encode.h" -namespace cricket { +namespace webrtc { +namespace { +bool SupportsMode(const Codec& codec, + std::optional scalability_mode) { + if (!scalability_mode.has_value()) { + return true; + } + return absl::c_any_of( + codec.scalability_modes, [&](webrtc::ScalabilityMode mode) { + return ScalabilityModeToString(mode) == *scalability_mode; + }); +} -RtpCapabilities::RtpCapabilities() = default; -RtpCapabilities::~RtpCapabilities() = default; +} // namespace -webrtc::RtpParameters CreateRtpParametersWithOneEncoding() { - webrtc::RtpParameters parameters; - webrtc::RtpEncodingParameters encoding; +RtpParameters CreateRtpParametersWithOneEncoding() { + RtpParameters parameters; + RtpEncodingParameters encoding; parameters.encodings.push_back(encoding); return parameters; } -webrtc::RtpParameters CreateRtpParametersWithEncodings(StreamParams sp) { +RtpParameters CreateRtpParametersWithEncodings(StreamParams sp) { std::vector primary_ssrcs; sp.GetPrimarySsrcs(&primary_ssrcs); size_t encoding_count = primary_ssrcs.size(); - std::vector encodings(encoding_count); + std::vector encodings(encoding_count); for (size_t i = 0; i < encodings.size(); ++i) { encodings[i].ssrc = primary_ssrcs[i]; } @@ -49,39 +71,40 @@ webrtc::RtpParameters CreateRtpParametersWithEncodings(StreamParams sp) { encodings[i].rid = rids[i].rid; } - webrtc::RtpParameters parameters; + RtpParameters parameters; parameters.encodings = encodings; parameters.rtcp.cname = sp.cname; return parameters; } -std::vector GetDefaultEnabledRtpHeaderExtensions( +std::vector GetDefaultEnabledRtpHeaderExtensions( const RtpHeaderExtensionQueryInterface& query_interface) { - std::vector extensions; + std::vector extensions; for (const auto& entry : query_interface.GetRtpHeaderExtensions()) { - if (entry.direction != webrtc::RtpTransceiverDirection::kStopped) + if (entry.direction != RtpTransceiverDirection::kStopped) extensions.emplace_back(entry.uri, *entry.preferred_id); } return extensions; } -webrtc::RTCError CheckScalabilityModeValues( - const webrtc::RtpParameters& rtp_parameters, - rtc::ArrayView codec_preferences, - absl::optional send_codec) { +RTCError CheckScalabilityModeValues(const RtpParameters& rtp_parameters, + ArrayView send_codecs, + std::optional send_codec) { using webrtc::RTCErrorType; - if (codec_preferences.empty()) { + if (send_codecs.empty()) { // This is an audio sender or an extra check in the stack where the codec // list is not available and we can't check the scalability_mode values. - return webrtc::RTCError::OK(); + return RTCError::OK(); } for (size_t i = 0; i < rtp_parameters.encodings.size(); ++i) { if (rtp_parameters.encodings[i].codec) { bool codecFound = false; - for (const cricket::VideoCodec& codec : codec_preferences) { - if (codec.MatchesRtpCodec(*rtp_parameters.encodings[i].codec)) { + for (const webrtc::Codec& codec : send_codecs) { + if (IsSameRtpCodecIgnoringLevel(codec, + *rtp_parameters.encodings[i].codec) && + SupportsMode(codec, rtp_parameters.encodings[i].scalability_mode)) { codecFound = true; send_codec = codec; break; @@ -97,7 +120,7 @@ webrtc::RTCError CheckScalabilityModeValues( if (rtp_parameters.encodings[i].scalability_mode) { if (!send_codec) { bool scalabilityModeFound = false; - for (const cricket::VideoCodec& codec : codec_preferences) { + for (const webrtc::Codec& codec : send_codecs) { for (const auto& scalability_mode : codec.scalability_modes) { if (ScalabilityModeToString(scalability_mode) == *rtp_parameters.encodings[i].scalability_mode) { @@ -134,15 +157,16 @@ webrtc::RTCError CheckScalabilityModeValues( } } - return webrtc::RTCError::OK(); + return RTCError::OK(); } -webrtc::RTCError CheckRtpParametersValues( - const webrtc::RtpParameters& rtp_parameters, - rtc::ArrayView codec_preferences, - absl::optional send_codec) { +RTCError CheckRtpParametersValues(const RtpParameters& rtp_parameters, + ArrayView send_codecs, + std::optional send_codec, + const FieldTrialsView& field_trials) { using webrtc::RTCErrorType; + bool has_scale_resolution_down_to = false; for (size_t i = 0; i < rtp_parameters.encodings.size(); ++i) { if (rtp_parameters.encodings[i].bitrate_priority <= 0) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_RANGE, @@ -166,7 +190,7 @@ webrtc::RTCError CheckRtpParametersValues( rtp_parameters.encodings[i].max_bitrate_bps) { if (*rtp_parameters.encodings[i].max_bitrate_bps < *rtp_parameters.encodings[i].min_bitrate_bps) { - LOG_AND_RETURN_ERROR(webrtc::RTCErrorType::INVALID_RANGE, + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_RANGE, "Attempted to set RtpParameters min bitrate " "larger than max bitrate."); } @@ -181,37 +205,53 @@ webrtc::RTCError CheckRtpParametersValues( } } - if (rtp_parameters.encodings[i].requested_resolution && - rtp_parameters.encodings[i].scale_resolution_down_by) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_RANGE, - "Attempted to set scale_resolution_down_by and " - "requested_resolution simultaniously."); + if (rtp_parameters.encodings[i].scale_resolution_down_to.has_value()) { + has_scale_resolution_down_to = true; + if (rtp_parameters.encodings[i].scale_resolution_down_to->width <= 0 || + rtp_parameters.encodings[i].scale_resolution_down_to->height <= 0) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, + "The resolution dimensions must be positive."); + } } - if (i > 0 && rtp_parameters.encodings[i - 1].codec != - rtp_parameters.encodings[i].codec) { - LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, - "Attempted to use different codec values for " - "different encodings."); + if (!field_trials.IsEnabled("WebRTC-MixedCodecSimulcast")) { + if (i > 0 && rtp_parameters.encodings[i - 1].codec != + rtp_parameters.encodings[i].codec) { + LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, + "Attempted to use different codec values for " + "different encodings."); + } } } - return CheckScalabilityModeValues(rtp_parameters, codec_preferences, - send_codec); + if (has_scale_resolution_down_to && + absl::c_any_of(rtp_parameters.encodings, + [](const webrtc::RtpEncodingParameters& encoding) { + return encoding.active && + !encoding.scale_resolution_down_to.has_value(); + })) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, + "If a resolution is specified on any encoding then " + "it must be specified on all encodings."); + } + + return CheckScalabilityModeValues(rtp_parameters, send_codecs, send_codec); } -webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( - const webrtc::RtpParameters& old_rtp_parameters, - const webrtc::RtpParameters& rtp_parameters) { +RTCError CheckRtpParametersInvalidModificationAndValues( + const RtpParameters& old_rtp_parameters, + const RtpParameters& rtp_parameters, + const FieldTrialsView& field_trials) { return CheckRtpParametersInvalidModificationAndValues( - old_rtp_parameters, rtp_parameters, {}, absl::nullopt); + old_rtp_parameters, rtp_parameters, {}, std::nullopt, field_trials); } -webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( - const webrtc::RtpParameters& old_rtp_parameters, - const webrtc::RtpParameters& rtp_parameters, - rtc::ArrayView codec_preferences, - absl::optional send_codec) { +RTCError CheckRtpParametersInvalidModificationAndValues( + const RtpParameters& old_rtp_parameters, + const RtpParameters& rtp_parameters, + ArrayView send_codecs, + std::optional send_codec, + const FieldTrialsView& field_trials) { using webrtc::RTCErrorType; if (rtp_parameters.encodings.size() != old_rtp_parameters.encodings.size()) { LOG_AND_RETURN_ERROR( @@ -246,12 +286,12 @@ webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( "Attempted to set RtpParameters with modified SSRC"); } - return CheckRtpParametersValues(rtp_parameters, codec_preferences, - send_codec); + return CheckRtpParametersValues(rtp_parameters, send_codecs, send_codec, + field_trials); } CompositeMediaEngine::CompositeMediaEngine( - std::unique_ptr trials, + std::unique_ptr trials, std::unique_ptr audio_engine, std::unique_ptr video_engine) : trials_(std::move(trials)), @@ -288,4 +328,4 @@ const VideoEngineInterface& CompositeMediaEngine::video() const { return *video_engine_.get(); } -} // namespace cricket +} // namespace webrtc diff --git a/media/base/media_engine.h b/media/base/media_engine.h index 428123516f..1cef38e9ec 100644 --- a/media/base/media_engine.h +++ b/media/base/media_engine.h @@ -11,67 +11,64 @@ #ifndef MEDIA_BASE_MEDIA_ENGINE_H_ #define MEDIA_BASE_MEDIA_ENGINE_H_ +#include #include -#include +#include #include +#include "api/array_view.h" +#include "api/audio/audio_device.h" +#include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder_factory.h" +#include "api/audio_options.h" #include "api/crypto/crypto_options.h" #include "api/field_trials_view.h" +#include "api/rtc_error.h" #include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/video/video_bitrate_allocator_factory.h" #include "call/audio_state.h" #include "media/base/codec.h" #include "media/base/media_channel.h" -#include "media/base/media_channel_impl.h" #include "media/base/media_config.h" -#include "media/base/video_common.h" +#include "media/base/stream_params.h" #include "rtc_base/system/file_wrapper.h" namespace webrtc { -class AudioDeviceModule; + class AudioMixer; -class AudioProcessing; class Call; -} // namespace webrtc - -namespace cricket { // Checks that the scalability_mode value of each encoding is supported by at // least one video codec of the list. If the list is empty, no check is done. -webrtc::RTCError CheckScalabilityModeValues( - const webrtc::RtpParameters& new_parameters, - rtc::ArrayView codec_preferences, - absl::optional send_codec); +RTCError CheckScalabilityModeValues(const RtpParameters& new_parameters, + ArrayView send_codecs, + std::optional send_codec); // Checks the parameters have valid and supported values, and checks parameters // with CheckScalabilityModeValues(). -webrtc::RTCError CheckRtpParametersValues( - const webrtc::RtpParameters& new_parameters, - rtc::ArrayView codec_preferences, - absl::optional send_codec); +RTCError CheckRtpParametersValues(const RtpParameters& new_parameters, + ArrayView send_codecs, + std::optional send_codec, + const FieldTrialsView& field_trials); // Checks that the immutable values have not changed in new_parameters and // checks all parameters with CheckRtpParametersValues(). -webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( - const webrtc::RtpParameters& old_parameters, - const webrtc::RtpParameters& new_parameters, - rtc::ArrayView codec_preferences, - absl::optional send_codec); +RTCError CheckRtpParametersInvalidModificationAndValues( + const RtpParameters& old_parameters, + const RtpParameters& new_parameters, + ArrayView send_codecs, + std::optional send_codec, + const FieldTrialsView& field_trials); // Checks that the immutable values have not changed in new_parameters and // checks parameters (except SVC) with CheckRtpParametersValues(). It should // usually be paired with a call to CheckScalabilityModeValues(). -webrtc::RTCError CheckRtpParametersInvalidModificationAndValues( - const webrtc::RtpParameters& old_parameters, - const webrtc::RtpParameters& new_parameters); - -struct RtpCapabilities { - RtpCapabilities(); - ~RtpCapabilities(); - std::vector header_extensions; -}; +RTCError CheckRtpParametersInvalidModificationAndValues( + const RtpParameters& old_parameters, + const RtpParameters& new_parameters, + const FieldTrialsView& field_trials); class RtpHeaderExtensionQueryInterface { public: @@ -79,8 +76,8 @@ class RtpHeaderExtensionQueryInterface { // Returns a vector of RtpHeaderExtensionCapability, whose direction is // kStopped if the extension is stopped (not used) by default. - virtual std::vector - GetRtpHeaderExtensions() const = 0; + virtual std::vector GetRtpHeaderExtensions() + const = 0; }; class VoiceEngineInterface : public RtpHeaderExtensionQueryInterface { @@ -96,44 +93,48 @@ class VoiceEngineInterface : public RtpHeaderExtensionQueryInterface { virtual void Init() = 0; // TODO(solenberg): Remove once VoE API refactoring is done. - virtual rtc::scoped_refptr GetAudioState() const = 0; + virtual scoped_refptr GetAudioState() const = 0; virtual std::unique_ptr CreateSendChannel( - webrtc::Call* call, - const MediaConfig& config, - const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::AudioCodecPairId codec_pair_id) { - // TODO(hta): Make pure virtual when all downstream has updated - RTC_CHECK_NOTREACHED(); - return nullptr; - } + Call* /* call */, + const MediaConfig& /* config */, + const AudioOptions& /* options */, + const CryptoOptions& /* crypto_options */, + AudioCodecPairId /* codec_pair_id */) = 0; virtual std::unique_ptr - CreateReceiveChannel(webrtc::Call* call, - const MediaConfig& config, - const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::AudioCodecPairId codec_pair_id) { - // TODO(hta): Make pure virtual when all downstream has updated - RTC_CHECK_NOTREACHED(); - return nullptr; + CreateReceiveChannel(Call* /* call */, + const MediaConfig& /* config */, + const AudioOptions& /* options */, + const CryptoOptions& /* crypto_options */, + AudioCodecPairId /* codec_pair_id */) = 0; + + // Legacy: Retrieve list of supported codecs. + // + protection codecs, and assigns PT numbers that may have to be + // reassigned. + // This function is being moved to CodecVendor + // TODO: https://issues.webrtc.org/360058654 - remove when all users updated. + [[deprecated]] inline const std::vector& send_codecs() const { + return LegacySendCodecs(); + } + [[deprecated]] inline const std::vector& recv_codecs() const { + return LegacyRecvCodecs(); } + virtual const std::vector& LegacySendCodecs() const = 0; + virtual const std::vector& LegacyRecvCodecs() const = 0; - virtual const std::vector& send_codecs() const = 0; - virtual const std::vector& recv_codecs() const = 0; + virtual AudioEncoderFactory* encoder_factory() const = 0; + virtual AudioDecoderFactory* decoder_factory() const = 0; // Starts AEC dump using existing file, a maximum file size in bytes can be // specified. Logging is stopped just before the size limit is exceeded. // If max_size_bytes is set to a value <= 0, no limit will be used. - virtual bool StartAecDump(webrtc::FileWrapper file, - int64_t max_size_bytes) = 0; + virtual bool StartAecDump(FileWrapper file, int64_t max_size_bytes) = 0; // Stops recording AEC dump. virtual void StopAecDump() = 0; - virtual absl::optional - GetAudioDeviceStats() = 0; + virtual std::optional GetAudioDeviceStats() = 0; }; class VideoEngineInterface : public RtpHeaderExtensionQueryInterface { @@ -145,39 +146,40 @@ class VideoEngineInterface : public RtpHeaderExtensionQueryInterface { VideoEngineInterface& operator=(const VideoEngineInterface&) = delete; virtual std::unique_ptr CreateSendChannel( - webrtc::Call* call, - const MediaConfig& config, - const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) { - // Default implementation, delete when all is updated - RTC_CHECK_NOTREACHED(); - return nullptr; - } + Call* /* call */, + const MediaConfig& /* config */, + const VideoOptions& /* options */, + const CryptoOptions& /* crypto_options */, + VideoBitrateAllocatorFactory* + /* video_bitrate_allocator_factory */) = 0; virtual std::unique_ptr - CreateReceiveChannel(webrtc::Call* call, - const MediaConfig& config, - const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options) { - // Default implementation, delete when all is updated - RTC_CHECK_NOTREACHED(); - return nullptr; + CreateReceiveChannel(Call* /* call */, + const MediaConfig& /* config */, + const VideoOptions& /* options */, + const CryptoOptions& /* crypto_options */) = 0; + + // Legacy: Retrieve list of supported codecs. + // + protection codecs, and assigns PT numbers that may have to be + // reassigned. + // This functionality is being moved to the CodecVendor class. + // TODO: https://issues.webrtc.org/360058654 - deprecate and remove. + [[deprecated]] inline std::vector send_codecs() const { + return LegacySendCodecs(); } - - // Retrieve list of supported codecs. - virtual std::vector send_codecs() const = 0; - virtual std::vector recv_codecs() const = 0; + [[deprecated]] inline std::vector recv_codecs() const { + return LegacyRecvCodecs(); + } + virtual std::vector LegacySendCodecs() const = 0; + virtual std::vector LegacyRecvCodecs() const = 0; // As above, but if include_rtx is false, don't include RTX codecs. - // TODO(bugs.webrtc.org/13931): Remove default implementation once - // upstream subclasses have converted. - virtual std::vector send_codecs(bool include_rtx) const { - RTC_DCHECK(include_rtx); - return send_codecs(); + [[deprecated]] inline std::vector send_codecs(bool include_rtx) const { + return LegacySendCodecs(include_rtx); } - virtual std::vector recv_codecs(bool include_rtx) const { - RTC_DCHECK(include_rtx); - return recv_codecs(); + virtual std::vector LegacySendCodecs(bool include_rtx) const = 0; + virtual std::vector LegacyRecvCodecs(bool include_rtx) const = 0; + [[deprecated]] inline std::vector recv_codecs(bool include_rtx) const { + return LegacyRecvCodecs(include_rtx); } }; @@ -203,7 +205,7 @@ class MediaEngineInterface { // Optionally owns a FieldTrialsView trials map. class CompositeMediaEngine : public MediaEngineInterface { public: - CompositeMediaEngine(std::unique_ptr trials, + CompositeMediaEngine(std::unique_ptr trials, std::unique_ptr audio_engine, std::unique_ptr video_engine); CompositeMediaEngine(std::unique_ptr audio_engine, @@ -219,21 +221,39 @@ class CompositeMediaEngine : public MediaEngineInterface { const VideoEngineInterface& video() const override; private: - const std::unique_ptr trials_; + const std::unique_ptr trials_; const std::unique_ptr voice_engine_; const std::unique_ptr video_engine_; }; -webrtc::RtpParameters CreateRtpParametersWithOneEncoding(); -webrtc::RtpParameters CreateRtpParametersWithEncodings(StreamParams sp); +RtpParameters CreateRtpParametersWithOneEncoding(); +RtpParameters CreateRtpParametersWithEncodings(StreamParams sp); // Returns a vector of RTP extensions as visible from RtpSender/Receiver // GetCapabilities(). The returned vector only shows what will definitely be // offered by default, i.e. the list of extensions returned from // GetRtpHeaderExtensions() that are not kStopped. -std::vector GetDefaultEnabledRtpHeaderExtensions( +std::vector GetDefaultEnabledRtpHeaderExtensions( const RtpHeaderExtensionQueryInterface& query_interface); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::CheckRtpParametersInvalidModificationAndValues; +using ::webrtc::CheckRtpParametersValues; +using ::webrtc::CheckScalabilityModeValues; +using ::webrtc::CompositeMediaEngine; +using ::webrtc::CreateRtpParametersWithEncodings; +using ::webrtc::CreateRtpParametersWithOneEncoding; +using ::webrtc::GetDefaultEnabledRtpHeaderExtensions; +using ::webrtc::MediaEngineInterface; +using ::webrtc::RtpHeaderExtensionQueryInterface; +using ::webrtc::VideoEngineInterface; +using ::webrtc::VoiceEngineInterface; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_MEDIA_ENGINE_H_ diff --git a/media/base/media_engine_unittest.cc b/media/base/media_engine_unittest.cc index b8db32a2d5..54cf713f42 100644 --- a/media/base/media_engine_unittest.cc +++ b/media/base/media_engine_unittest.cc @@ -10,6 +10,17 @@ #include "media/base/media_engine.h" +#include +#include +#include + +#include "api/audio/audio_device.h" +#include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" +#include "api/scoped_refptr.h" +#include "call/audio_state.h" +#include "media/base/codec.h" +#include "rtc_base/system/file_wrapper.h" #include "test/gmock.h" #include "test/gtest.h" @@ -21,7 +32,7 @@ using ::webrtc::RtpExtension; using ::webrtc::RtpHeaderExtensionCapability; using ::webrtc::RtpTransceiverDirection; -namespace cricket { +namespace webrtc { namespace { class MockRtpHeaderExtensionQueryInterface @@ -61,26 +72,23 @@ TEST(MediaEngineTest, ReturnsNotStoppedHeaderExtensions) { // functions with default implementations are not mocked. class MostlyMockVoiceEngineInterface : public VoiceEngineInterface { public: - MOCK_METHOD(std::vector, + MOCK_METHOD(std::vector, GetRtpHeaderExtensions, (), (const, override)); MOCK_METHOD(void, Init, (), (override)); - MOCK_METHOD(rtc::scoped_refptr, - GetAudioState, - (), - (const, override)); - MOCK_METHOD(std::vector&, send_codecs, (), (const, override)); - MOCK_METHOD(std::vector&, recv_codecs, (), (const, override)); + MOCK_METHOD(scoped_refptr, GetAudioState, (), (const, override)); + MOCK_METHOD(std::vector&, LegacySendCodecs, (), (const, override)); + MOCK_METHOD(std::vector&, LegacyRecvCodecs, (), (const, override)); MOCK_METHOD(bool, StartAecDump, - (webrtc::FileWrapper file, int64_t max_size_bytes), + (FileWrapper file, int64_t max_size_bytes), (override)); MOCK_METHOD(void, StopAecDump, (), (override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, GetAudioDeviceStats, (), (override)); }; -} // namespace cricket +} // namespace webrtc diff --git a/media/base/rid_description.cc b/media/base/rid_description.cc index b3eae272f9..154529e137 100644 --- a/media/base/rid_description.cc +++ b/media/base/rid_description.cc @@ -10,7 +10,9 @@ #include "media/base/rid_description.h" -namespace cricket { +#include + +namespace webrtc { RidDescription::RidDescription() = default; RidDescription::RidDescription(const std::string& rid, RidDirection direction) @@ -21,8 +23,7 @@ RidDescription& RidDescription::operator=(const RidDescription& other) = default; bool RidDescription::operator==(const RidDescription& other) const { return rid == other.rid && direction == other.direction && - payload_types == other.payload_types && - restrictions == other.restrictions; + codecs == other.codecs && restrictions == other.restrictions; } -} // namespace cricket +} // namespace webrtc diff --git a/media/base/rid_description.h b/media/base/rid_description.h index 04c0f3d4bc..042e2840e4 100644 --- a/media/base/rid_description.h +++ b/media/base/rid_description.h @@ -15,7 +15,9 @@ #include #include -namespace cricket { +#include "media/base/codec.h" + +namespace webrtc { enum class RidDirection { kSend, kReceive }; @@ -73,9 +75,11 @@ struct RidDescription final { // the stream were changed to "sendrecv" or "recvonly". RidDirection direction; - // The list of codec payload types for this stream. - // It should be a subset of the payloads supported for the media section. - std::vector payload_types; + // The list of codecs for this stream. + // When the RID is serialized/deserialized, these codecs are mapped to/from + // the payload types listed in the media section, ensuring PT consistency in + // the SDP even when `codecs[i].id` cannot be trusted. + std::vector codecs; // Contains key-value pairs for restrictions. // The keys are not validated against a known set. @@ -88,6 +92,15 @@ struct RidDescription final { std::map restrictions; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::RidDescription; +using ::webrtc::RidDirection; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_RID_DESCRIPTION_H_ diff --git a/media/base/rtp_utils.cc b/media/base/rtp_utils.cc index c630cbc7e4..4a905b2fe7 100644 --- a/media/base/rtp_utils.cc +++ b/media/base/rtp_utils.cc @@ -12,10 +12,13 @@ #include +#include #include // PacketTimeUpdateParams is defined in asyncpacketsocket.h. // TODO(sergeyu): Find more appropriate place for PacketTimeUpdateParams. +#include "absl/strings/string_view.h" +#include "api/array_view.h" #include "media/base/turn_utils.h" #include "modules/rtp_rtcp/source/rtp_util.h" #include "rtc_base/async_packet_socket.h" @@ -23,7 +26,7 @@ #include "rtc_base/checks.h" #include "rtc_base/message_digest.h" -namespace cricket { +namespace webrtc { static const size_t kRtcpPayloadTypeOffset = 1; static const size_t kRtpExtensionHeaderLen = 4; @@ -73,7 +76,7 @@ void UpdateAbsSendTimeExtensionValue(uint8_t* extension_data, // the RTP packet. void UpdateRtpAuthTag(uint8_t* rtp, size_t length, - const rtc::PacketTimeUpdateParams& packet_time_params) { + const PacketTimeUpdateParams& packet_time_params) { // If there is no key, return. if (packet_time_params.srtp_auth_key.empty()) { return; @@ -99,10 +102,10 @@ void UpdateRtpAuthTag(uint8_t* rtp, size_t auth_required_length = length - tag_length + kRocLength; uint8_t output[64]; - size_t result = - rtc::ComputeHmac(rtc::DIGEST_SHA_1, &packet_time_params.srtp_auth_key[0], - packet_time_params.srtp_auth_key.size(), rtp, - auth_required_length, output, sizeof(output)); + size_t result = webrtc::ComputeHmac( + webrtc::DIGEST_SHA_1, &packet_time_params.srtp_auth_key[0], + packet_time_params.srtp_auth_key.size(), rtp, auth_required_length, + output, sizeof(output)); if (result < tag_length) { RTC_DCHECK_NOTREACHED(); @@ -144,7 +147,7 @@ bool GetRtcpSsrc(const void* data, size_t len, uint32_t* value) { // SDES packet parsing is not supported. if (pl_type == kRtcpTypeSDES) return false; - *value = rtc::GetBE32(static_cast(data) + 4); + *value = webrtc::GetBE32(static_cast(data) + 4); return true; } @@ -172,12 +175,11 @@ absl::string_view RtpPacketTypeToString(RtpPacketType packet_type) { RTC_CHECK_NOTREACHED(); } -RtpPacketType InferRtpPacketType(rtc::ArrayView packet) { - if (webrtc::IsRtcpPacket( - rtc::reinterpret_array_view(packet))) { +RtpPacketType InferRtpPacketType(ArrayView packet) { + if (webrtc::IsRtcpPacket(packet)) { return RtpPacketType::kRtcp; } - if (webrtc::IsRtpPacket(rtc::reinterpret_array_view(packet))) { + if (webrtc::IsRtpPacket(packet)) { return RtpPacketType::kRtp; } return RtpPacketType::kUnknown; @@ -217,7 +219,7 @@ bool ValidateRtpHeader(const uint8_t* rtp, // Getting extension profile length. // Length is in 32 bit words. - uint16_t extension_length_in_32bits = rtc::GetBE16(rtp + 2); + uint16_t extension_length_in_32bits = webrtc::GetBE16(rtp + 2); size_t extension_length = extension_length_in_32bits * 4; size_t rtp_header_length = extension_length + @@ -238,7 +240,7 @@ bool ValidateRtpHeader(const uint8_t* rtp, // ValidateRtpHeader() must be called before this method to make sure, we have // a sane rtp packet. bool UpdateRtpAbsSendTimeExtension(uint8_t* rtp, - size_t length, + size_t /* length */, int extension_id, uint64_t time_us) { // 0 1 2 3 @@ -265,9 +267,9 @@ bool UpdateRtpAbsSendTimeExtension(uint8_t* rtp, rtp += header_length_without_extension; // Getting extension profile ID and length. - uint16_t profile_id = rtc::GetBE16(rtp); + uint16_t profile_id = webrtc::GetBE16(rtp); // Length is in 32 bit words. - uint16_t extension_length_in_32bits = rtc::GetBE16(rtp + 2); + uint16_t extension_length_in_32bits = webrtc::GetBE16(rtp + 2); size_t extension_length = extension_length_in_32bits * 4; rtp += kRtpExtensionHeaderLen; // Moving past extension header. @@ -354,7 +356,7 @@ bool UpdateRtpAbsSendTimeExtension(uint8_t* rtp, bool ApplyPacketOptions(uint8_t* data, size_t length, - const rtc::PacketTimeUpdateParams& packet_time_params, + const PacketTimeUpdateParams& packet_time_params, uint64_t time_us) { RTC_DCHECK(data); RTC_DCHECK(length); @@ -371,13 +373,13 @@ bool ApplyPacketOptions(uint8_t* data, // indication. size_t rtp_start_pos; size_t rtp_length; - if (!UnwrapTurnPacket(data, length, &rtp_start_pos, &rtp_length)) { + if (!webrtc::UnwrapTurnPacket(data, length, &rtp_start_pos, &rtp_length)) { RTC_DCHECK_NOTREACHED(); return false; } // Making sure we have a valid RTP packet at the end. - auto packet = rtc::MakeArrayView(data + rtp_start_pos, rtp_length); + auto packet = MakeArrayView(data + rtp_start_pos, rtp_length); if (!webrtc::IsRtpPacket(packet) || !ValidateRtpHeader(data + rtp_start_pos, rtp_length, nullptr)) { RTC_DCHECK_NOTREACHED(); @@ -398,4 +400,4 @@ bool ApplyPacketOptions(uint8_t* data, return true; } -} // namespace cricket +} // namespace webrtc diff --git a/media/base/rtp_utils.h b/media/base/rtp_utils.h index a501fd7af3..991af8d16b 100644 --- a/media/base/rtp_utils.h +++ b/media/base/rtp_utils.h @@ -11,16 +11,15 @@ #ifndef MEDIA_BASE_RTP_UTILS_H_ #define MEDIA_BASE_RTP_UTILS_H_ +#include +#include + #include "absl/strings/string_view.h" #include "api/array_view.h" -#include "rtc_base/byte_order.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/system/rtc_export.h" -namespace rtc { -struct PacketTimeUpdateParams; -} // namespace rtc - -namespace cricket { +namespace webrtc { const size_t kMinRtpPacketLen = 12; const size_t kMaxRtpPacketLen = 2048; @@ -46,7 +45,7 @@ bool GetRtcpType(const void* data, size_t len, int* value); bool GetRtcpSsrc(const void* data, size_t len, uint32_t* value); // Checks the packet header to determine if it can be an RTP or RTCP packet. -RtpPacketType InferRtpPacketType(rtc::ArrayView packet); +RtpPacketType InferRtpPacketType(ArrayView packet); // True if |payload type| is 0-127. bool IsValidRtpPayloadType(int payload_type); @@ -72,9 +71,37 @@ bool UpdateRtpAbsSendTimeExtension(uint8_t* rtp, bool RTC_EXPORT ApplyPacketOptions(uint8_t* data, size_t length, - const rtc::PacketTimeUpdateParams& packet_time_params, + const PacketTimeUpdateParams& packet_time_params, uint64_t time_us); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::ApplyPacketOptions; +using ::webrtc::GetRtcpSsrc; +using ::webrtc::GetRtcpType; +using ::webrtc::InferRtpPacketType; +using ::webrtc::IsValidRtpPacketSize; +using ::webrtc::IsValidRtpPayloadType; +using ::webrtc::kMaxRtpPacketLen; +using ::webrtc::kMinRtcpPacketLen; +using ::webrtc::kMinRtpPacketLen; +using ::webrtc::kRtcpTypeApp; +using ::webrtc::kRtcpTypeBye; +using ::webrtc::kRtcpTypePSFB; +using ::webrtc::kRtcpTypeRR; +using ::webrtc::kRtcpTypeRTPFB; +using ::webrtc::kRtcpTypeSDES; +using ::webrtc::kRtcpTypeSR; +using ::webrtc::RtcpTypes; +using ::webrtc::RtpPacketType; +using ::webrtc::RtpPacketTypeToString; +using ::webrtc::UpdateRtpAbsSendTimeExtension; +using ::webrtc::ValidateRtpHeader; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_RTP_UTILS_H_ diff --git a/media/base/rtp_utils_unittest.cc b/media/base/rtp_utils_unittest.cc index a594f944c0..b3c07b5da9 100644 --- a/media/base/rtp_utils_unittest.cc +++ b/media/base/rtp_utils_unittest.cc @@ -15,11 +15,12 @@ #include #include +#include "api/array_view.h" #include "media/base/fake_rtp.h" #include "rtc_base/async_packet_socket.h" #include "test/gtest.h" -namespace cricket { +namespace webrtc { static const uint8_t kInvalidPacket[] = {0x80, 0x00}; @@ -72,15 +73,12 @@ static const int kAstIndexInOneByteRtpMsg = 21; // and in message `kRtpMsgWithTwoByteAbsSendTimeExtension`. static const int kAstIndexInTwoByteRtpMsg = 21; -static const rtc::ArrayView kPcmuFrameArrayView = - rtc::MakeArrayView(reinterpret_cast(kPcmuFrame), - sizeof(kPcmuFrame)); -static const rtc::ArrayView kRtcpReportArrayView = - rtc::MakeArrayView(reinterpret_cast(kRtcpReport), - sizeof(kRtcpReport)); -static const rtc::ArrayView kInvalidPacketArrayView = - rtc::MakeArrayView(reinterpret_cast(kInvalidPacket), - sizeof(kInvalidPacket)); +static const ArrayView kPcmuFrameArrayView = + MakeArrayView(kPcmuFrame, sizeof(kPcmuFrame)); +static const ArrayView kRtcpReportArrayView = + MakeArrayView(kRtcpReport, sizeof(kRtcpReport)); +static const ArrayView kInvalidPacketArrayView = + MakeArrayView(kInvalidPacket, sizeof(kInvalidPacket)); TEST(RtpUtilsTest, GetRtcp) { int pt; @@ -189,7 +187,7 @@ TEST(RtpUtilsTest, UpdateAbsSendTimeExtensionInTurnSendIndication) { // Test without any packet options variables set. This method should return // without HMAC value in the packet. TEST(RtpUtilsTest, ApplyPacketOptionsWithDefaultValues) { - rtc::PacketTimeUpdateParams packet_time_params; + PacketTimeUpdateParams packet_time_params; std::vector rtp_packet( kRtpMsgWithOneByteAbsSendTimeExtension, kRtpMsgWithOneByteAbsSendTimeExtension + @@ -210,7 +208,7 @@ TEST(RtpUtilsTest, ApplyPacketOptionsWithDefaultValues) { // Veirfy HMAC is updated when packet option parameters are set. TEST(RtpUtilsTest, ApplyPacketOptionsWithAuthParams) { - rtc::PacketTimeUpdateParams packet_time_params; + PacketTimeUpdateParams packet_time_params; packet_time_params.srtp_auth_key.assign(kTestKey, kTestKey + sizeof(kTestKey)); packet_time_params.srtp_auth_tag_len = 4; @@ -267,7 +265,7 @@ TEST(RtpUtilsTest, UpdateTwoByteAbsSendTimeExtensionInRtpPacket) { // Verify we update both AbsSendTime extension header and HMAC. TEST(RtpUtilsTest, ApplyPacketOptionsWithAuthParamsAndAbsSendTime) { - rtc::PacketTimeUpdateParams packet_time_params; + PacketTimeUpdateParams packet_time_params; packet_time_params.srtp_auth_key.assign(kTestKey, kTestKey + sizeof(kTestKey)); packet_time_params.srtp_auth_tag_len = 4; @@ -300,4 +298,4 @@ TEST(RtpUtilsTest, InferRtpPacketType) { InferRtpPacketType(kInvalidPacketArrayView)); } -} // namespace cricket +} // namespace webrtc diff --git a/media/base/sdp_video_format_utils.cc b/media/base/sdp_video_format_utils.cc index a156afdc02..70d110509c 100644 --- a/media/base/sdp_video_format_utils.cc +++ b/media/base/sdp_video_format_utils.cc @@ -12,9 +12,17 @@ #include #include +#include +#include #include +#include "api/rtp_parameters.h" #include "api/video_codecs/h264_profile_level_id.h" +#ifdef RTC_ENABLE_H265 +#include "api/video_codecs/h265_profile_tier_level.h" +#endif +#include "absl/algorithm/container.h" +#include "media/base/media_constants.h" #include "rtc_base/checks.h" #include "rtc_base/string_to_number.h" @@ -27,8 +35,13 @@ const char kVPxFmtpMaxFrameRate[] = "max-fr"; // Max frame size for VP8 and VP9 video. const char kVPxFmtpMaxFrameSize[] = "max-fs"; const int kVPxFmtpFrameSizeSubBlockPixels = 256; +#ifdef RTC_ENABLE_H265 +constexpr char kH265ProfileId[] = "profile-id"; +constexpr char kH265TierFlag[] = "tier-flag"; +constexpr char kH265LevelId[] = "level-id"; +#endif -bool IsH264LevelAsymmetryAllowed(const SdpVideoFormat::Parameters& params) { +bool IsH264LevelAsymmetryAllowed(const CodecParameterMap& params) { const auto it = params.find(kH264LevelAsymmetryAllowed); return it != params.end() && strcmp(it->second.c_str(), "1") == 0; } @@ -46,27 +59,77 @@ H264Level H264LevelMin(H264Level a, H264Level b) { return H264LevelIsLess(a, b) ? a : b; } -absl::optional ParsePositiveNumberFromParams( - const SdpVideoFormat::Parameters& params, +std::optional ParsePositiveNumberFromParams( + const CodecParameterMap& params, const char* parameter_name) { const auto max_frame_rate_it = params.find(parameter_name); if (max_frame_rate_it == params.end()) - return absl::nullopt; + return std::nullopt; - const absl::optional i = - rtc::StringToNumber(max_frame_rate_it->second); + const std::optional i = StringToNumber(max_frame_rate_it->second); if (!i.has_value() || i.value() <= 0) - return absl::nullopt; + return std::nullopt; return i; } +#ifdef RTC_ENABLE_H265 +// Compares two H265Level and return the smaller. +H265Level H265LevelMin(H265Level a, H265Level b) { + return a <= b ? a : b; +} + +// Returns true if none of profile-id/tier-flag/level-id is specified +// explicitly in the param. +bool IsDefaultH265PTL(const CodecParameterMap& params) { + return !params.count(kH265ProfileId) && !params.count(kH265TierFlag) && + !params.count(kH265LevelId); +} +#endif + } // namespace +#ifdef RTC_ENABLE_H265 +// Set level according to https://tools.ietf.org/html/rfc7798#section-7.1 +void H265GenerateProfileTierLevelForAnswer( + const CodecParameterMap& local_supported_params, + const CodecParameterMap& remote_offered_params, + CodecParameterMap* answer_params) { + // If local and remote haven't set profile-id/tier-flag/level-id, they + // are both using the default PTL In this case, don't set PTL in answer + // either. + if (IsDefaultH265PTL(local_supported_params) && + IsDefaultH265PTL(remote_offered_params)) { + return; + } + + // Parse profile-tier-level. + const std::optional local_profile_tier_level = + ParseSdpForH265ProfileTierLevel(local_supported_params); + const std::optional remote_profile_tier_level = + ParseSdpForH265ProfileTierLevel(remote_offered_params); + // Profile and tier for local and remote codec must be valid and equal. + RTC_DCHECK(local_profile_tier_level); + RTC_DCHECK(remote_profile_tier_level); + RTC_DCHECK_EQ(local_profile_tier_level->profile, + remote_profile_tier_level->profile); + RTC_DCHECK_EQ(local_profile_tier_level->tier, + remote_profile_tier_level->tier); + + const H265Level answer_level = H265LevelMin(local_profile_tier_level->level, + remote_profile_tier_level->level); + + // Level-id in answer is changable as long as the highest level indicated by + // the answer is not higher than that indicated by the offer. See + // https://tools.ietf.org/html/rfc7798#section-7.2.2, sub-clause 2. + (*answer_params)[kH265LevelId] = H265LevelToString(answer_level); +} +#endif + // Set level according to https://tools.ietf.org/html/rfc6184#section-8.2.2. void H264GenerateProfileLevelIdForAnswer( - const SdpVideoFormat::Parameters& local_supported_params, - const SdpVideoFormat::Parameters& remote_offered_params, - SdpVideoFormat::Parameters* answer_params) { + const CodecParameterMap& local_supported_params, + const CodecParameterMap& remote_offered_params, + CodecParameterMap* answer_params) { // If both local and remote haven't set profile-level-id, they are both using // the default profile. In this case, don't set profile-level-id in answer // either. @@ -76,9 +139,9 @@ void H264GenerateProfileLevelIdForAnswer( } // Parse profile-level-ids. - const absl::optional local_profile_level_id = + const std::optional local_profile_level_id = ParseSdpForH264ProfileLevelId(local_supported_params); - const absl::optional remote_profile_level_id = + const std::optional remote_profile_level_id = ParseSdpForH264ProfileLevelId(remote_offered_params); // The local and remote codec must have valid and equal H264 Profiles. RTC_DCHECK(local_profile_level_id); @@ -105,17 +168,23 @@ void H264GenerateProfileLevelIdForAnswer( H264ProfileLevelId(local_profile_level_id->profile, answer_level)); } -absl::optional ParseSdpForVPxMaxFrameRate( - const SdpVideoFormat::Parameters& params) { +std::optional ParseSdpForVPxMaxFrameRate(const CodecParameterMap& params) { return ParsePositiveNumberFromParams(params, kVPxFmtpMaxFrameRate); } -absl::optional ParseSdpForVPxMaxFrameSize( - const SdpVideoFormat::Parameters& params) { - const absl::optional i = +std::optional ParseSdpForVPxMaxFrameSize(const CodecParameterMap& params) { + const std::optional i = ParsePositiveNumberFromParams(params, kVPxFmtpMaxFrameSize); - return i ? absl::make_optional(i.value() * kVPxFmtpFrameSizeSubBlockPixels) - : absl::nullopt; + return i ? std::make_optional(i.value() * kVPxFmtpFrameSizeSubBlockPixels) + : std::nullopt; +} + +bool SupportsPerLayerPictureLossIndication(const CodecParameterMap& params) { + return absl::c_find_if( + params, [](const std::pair& kv) { + return kv.first == kCodecParamPerLayerPictureLossIndication && + kv.second == "1"; + }) != params.end(); } } // namespace webrtc diff --git a/media/base/sdp_video_format_utils.h b/media/base/sdp_video_format_utils.h index 80c1e4d501..65e83aed3a 100644 --- a/media/base/sdp_video_format_utils.h +++ b/media/base/sdp_video_format_utils.h @@ -11,8 +11,9 @@ #ifndef MEDIA_BASE_SDP_VIDEO_FORMAT_UTILS_H_ #define MEDIA_BASE_SDP_VIDEO_FORMAT_UTILS_H_ -#include "absl/types/optional.h" -#include "api/video_codecs/sdp_video_format.h" +#include + +#include "api/rtp_parameters.h" namespace webrtc { // Generate codec parameters that will be used as answer in an SDP negotiation @@ -32,20 +33,34 @@ namespace webrtc { // parameters that are used when negotiating are the level part of // profile-level-id and level-asymmetry-allowed. void H264GenerateProfileLevelIdForAnswer( - const SdpVideoFormat::Parameters& local_supported_params, - const SdpVideoFormat::Parameters& remote_offered_params, - SdpVideoFormat::Parameters* answer_params); + const CodecParameterMap& local_supported_params, + const CodecParameterMap& remote_offered_params, + CodecParameterMap* answer_params); + +#ifdef RTC_ENABLE_H265 +// Works similarly as H264GenerateProfileLevelIdForAnswer, but generates codec +// parameters that will be used as answer for H.265. +// Media configuration parameters, except level-id, must be used symmetrically. +// For level-id, the highest level indicated by the answer must not be higher +// than that indicated by the offer. +void H265GenerateProfileTierLevelForAnswer( + const CodecParameterMap& local_supported_params, + const CodecParameterMap& remote_offered_params, + CodecParameterMap* answer_params); +#endif -// Parse max frame rate from SDP FMTP line. absl::nullopt is returned if the +// Parse max frame rate from SDP FMTP line. std::nullopt is returned if the // field is missing or not a number. -absl::optional ParseSdpForVPxMaxFrameRate( - const SdpVideoFormat::Parameters& params); +std::optional ParseSdpForVPxMaxFrameRate(const CodecParameterMap& params); -// Parse max frame size from SDP FMTP line. absl::nullopt is returned if the +// Parse max frame size from SDP FMTP line. std::nullopt is returned if the // field is missing or not a number. Please note that the value is stored in sub // blocks but the returned value is in total number of pixels. -absl::optional ParseSdpForVPxMaxFrameSize( - const SdpVideoFormat::Parameters& params); +std::optional ParseSdpForVPxMaxFrameSize(const CodecParameterMap& params); + +// Determines whether the non-standard x-google-per-layer-pli fmtp is present +// in the parameters and has a value of "1". +bool SupportsPerLayerPictureLossIndication(const CodecParameterMap& params); } // namespace webrtc diff --git a/media/base/sdp_video_format_utils_unittest.cc b/media/base/sdp_video_format_utils_unittest.cc index d8ef9ab827..cde1e30e1c 100644 --- a/media/base/sdp_video_format_utils_unittest.cc +++ b/media/base/sdp_video_format_utils_unittest.cc @@ -10,12 +10,10 @@ #include "media/base/sdp_video_format_utils.h" -#include - #include -#include +#include -#include "rtc_base/string_to_number.h" +#include "api/rtp_parameters.h" #include "test/gtest.h" namespace webrtc { @@ -24,32 +22,34 @@ namespace { const char kVPxFmtpMaxFrameRate[] = "max-fr"; // Max frame size for VP8 and VP9 video. const char kVPxFmtpMaxFrameSize[] = "max-fs"; +// Nonstandard per-layer PLI for video. +const char kCodecParamPerLayerPictureLossIndication[] = + "x-google-per-layer-pli"; } // namespace TEST(SdpVideoFormatUtilsTest, TestH264GenerateProfileLevelIdForAnswerEmpty) { - SdpVideoFormat::Parameters answer_params; - H264GenerateProfileLevelIdForAnswer(SdpVideoFormat::Parameters(), - SdpVideoFormat::Parameters(), + CodecParameterMap answer_params; + H264GenerateProfileLevelIdForAnswer(CodecParameterMap(), CodecParameterMap(), &answer_params); EXPECT_TRUE(answer_params.empty()); } TEST(SdpVideoFormatUtilsTest, TestH264GenerateProfileLevelIdForAnswerLevelSymmetryCapped) { - SdpVideoFormat::Parameters low_level; + CodecParameterMap low_level; low_level["profile-level-id"] = "42e015"; - SdpVideoFormat::Parameters high_level; + CodecParameterMap high_level; high_level["profile-level-id"] = "42e01f"; // Level asymmetry is not allowed; test that answer level is the lower of the // local and remote levels. - SdpVideoFormat::Parameters answer_params; + CodecParameterMap answer_params; H264GenerateProfileLevelIdForAnswer(low_level /* local_supported */, high_level /* remote_offered */, &answer_params); EXPECT_EQ("42e015", answer_params["profile-level-id"]); - SdpVideoFormat::Parameters answer_params2; + CodecParameterMap answer_params2; H264GenerateProfileLevelIdForAnswer(high_level /* local_supported */, low_level /* remote_offered */, &answer_params2); @@ -58,13 +58,13 @@ TEST(SdpVideoFormatUtilsTest, TEST(SdpVideoFormatUtilsTest, TestH264GenerateProfileLevelIdForAnswerConstrainedBaselineLevelAsymmetry) { - SdpVideoFormat::Parameters local_params; + CodecParameterMap local_params; local_params["profile-level-id"] = "42e01f"; local_params["level-asymmetry-allowed"] = "1"; - SdpVideoFormat::Parameters remote_params; + CodecParameterMap remote_params; remote_params["profile-level-id"] = "42e015"; remote_params["level-asymmetry-allowed"] = "1"; - SdpVideoFormat::Parameters answer_params; + CodecParameterMap answer_params; H264GenerateProfileLevelIdForAnswer(local_params, remote_params, &answer_params); // When level asymmetry is allowed, we can answer a higher level than what was @@ -72,9 +72,39 @@ TEST(SdpVideoFormatUtilsTest, EXPECT_EQ("42e01f", answer_params["profile-level-id"]); } +#ifdef RTC_ENABLE_H265 +// Answer should not include explicit PTL info if neither local nor remote set +// any of them. +TEST(SdpVideoFormatUtilsTest, H265GenerateProfileTierLevelEmpty) { + CodecParameterMap answer_params; + H265GenerateProfileTierLevelForAnswer(CodecParameterMap(), + CodecParameterMap(), &answer_params); + EXPECT_TRUE(answer_params.empty()); +} + +// Answer must use the minimum level as supported by both local and remote. +TEST(SdpVideoFormatUtilsTest, H265GenerateProfileTierLevelNoEmpty) { + constexpr char kLocallySupportedLevelId[] = "93"; + constexpr char kRemoteOfferedLevelId[] = "120"; + + CodecParameterMap local_params; + local_params["profile-id"] = "1"; + local_params["tier-flag"] = "0"; + local_params["level-id"] = kLocallySupportedLevelId; + CodecParameterMap remote_params; + remote_params["profile-id"] = "1"; + remote_params["tier-flag"] = "0"; + remote_params["level-id"] = kRemoteOfferedLevelId; + CodecParameterMap answer_params; + H265GenerateProfileTierLevelForAnswer(local_params, remote_params, + &answer_params); + EXPECT_EQ(kLocallySupportedLevelId, answer_params["level-id"]); +} +#endif + TEST(SdpVideoFormatUtilsTest, MaxFrameRateIsMissingOrInvalid) { - SdpVideoFormat::Parameters params; - absl::optional empty = ParseSdpForVPxMaxFrameRate(params); + CodecParameterMap params; + std::optional empty = ParseSdpForVPxMaxFrameRate(params); EXPECT_FALSE(empty); params[kVPxFmtpMaxFrameRate] = "-1"; EXPECT_FALSE(ParseSdpForVPxMaxFrameRate(params)); @@ -85,7 +115,7 @@ TEST(SdpVideoFormatUtilsTest, MaxFrameRateIsMissingOrInvalid) { } TEST(SdpVideoFormatUtilsTest, MaxFrameRateIsSpecified) { - SdpVideoFormat::Parameters params; + CodecParameterMap params; params[kVPxFmtpMaxFrameRate] = "30"; EXPECT_EQ(ParseSdpForVPxMaxFrameRate(params), 30); params[kVPxFmtpMaxFrameRate] = "60"; @@ -93,8 +123,8 @@ TEST(SdpVideoFormatUtilsTest, MaxFrameRateIsSpecified) { } TEST(SdpVideoFormatUtilsTest, MaxFrameSizeIsMissingOrInvalid) { - SdpVideoFormat::Parameters params; - absl::optional empty = ParseSdpForVPxMaxFrameSize(params); + CodecParameterMap params; + std::optional empty = ParseSdpForVPxMaxFrameSize(params); EXPECT_FALSE(empty); params[kVPxFmtpMaxFrameSize] = "-1"; EXPECT_FALSE(ParseSdpForVPxMaxFrameSize(params)); @@ -105,11 +135,22 @@ TEST(SdpVideoFormatUtilsTest, MaxFrameSizeIsMissingOrInvalid) { } TEST(SdpVideoFormatUtilsTest, MaxFrameSizeIsSpecified) { - SdpVideoFormat::Parameters params; + CodecParameterMap params; params[kVPxFmtpMaxFrameSize] = "8100"; // 1920 x 1080 / (16^2) EXPECT_EQ(ParseSdpForVPxMaxFrameSize(params), 1920 * 1080); params[kVPxFmtpMaxFrameSize] = "32400"; // 3840 x 2160 / (16^2) EXPECT_EQ(ParseSdpForVPxMaxFrameSize(params), 3840 * 2160); } +TEST(SdpVideoFormatUtilsTest, PerLayerPictureLossIndication) { + CodecParameterMap params; + EXPECT_FALSE(SupportsPerLayerPictureLossIndication(params)); + params[kCodecParamPerLayerPictureLossIndication] = "wrong"; + EXPECT_FALSE(SupportsPerLayerPictureLossIndication(params)); + params[kCodecParamPerLayerPictureLossIndication] = "0"; + EXPECT_FALSE(SupportsPerLayerPictureLossIndication(params)); + params[kCodecParamPerLayerPictureLossIndication] = "1"; + EXPECT_TRUE(SupportsPerLayerPictureLossIndication(params)); +} + } // namespace webrtc diff --git a/media/base/stream_params.cc b/media/base/stream_params.cc index ac9daee200..8346e4cd47 100644 --- a/media/base/stream_params.cc +++ b/media/base/stream_params.cc @@ -12,17 +12,21 @@ #include -#include +#include +#include +#include #include "absl/algorithm/container.h" #include "api/array_view.h" +#include "media/base/rid_description.h" +#include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/unique_id_generator.h" -namespace cricket { +namespace webrtc { namespace { -void AppendSsrcs(rtc::ArrayView ssrcs, - rtc::SimpleStringBuilder* sb) { +void AppendSsrcs(ArrayView ssrcs, SimpleStringBuilder* sb) { *sb << "ssrcs:["; const char* delimiter = ""; for (uint32_t ssrc : ssrcs) { @@ -32,8 +36,8 @@ void AppendSsrcs(rtc::ArrayView ssrcs, *sb << "]"; } -void AppendSsrcGroups(rtc::ArrayView ssrc_groups, - rtc::SimpleStringBuilder* sb) { +void AppendSsrcGroups(ArrayView ssrc_groups, + SimpleStringBuilder* sb) { *sb << "ssrc_groups:"; const char* delimiter = ""; for (const SsrcGroup& ssrc_group : ssrc_groups) { @@ -42,8 +46,8 @@ void AppendSsrcGroups(rtc::ArrayView ssrc_groups, } } -void AppendStreamIds(rtc::ArrayView stream_ids, - rtc::SimpleStringBuilder* sb) { +void AppendStreamIds(ArrayView stream_ids, + SimpleStringBuilder* sb) { *sb << "stream_ids:"; const char* delimiter = ""; for (const std::string& stream_id : stream_ids) { @@ -52,8 +56,7 @@ void AppendStreamIds(rtc::ArrayView stream_ids, } } -void AppendRids(rtc::ArrayView rids, - rtc::SimpleStringBuilder* sb) { +void AppendRids(ArrayView rids, SimpleStringBuilder* sb) { *sb << "rids:["; const char* delimiter = ""; for (const RidDescription& rid : rids) { @@ -95,7 +98,7 @@ bool SsrcGroup::has_semantics(const std::string& semantics_in) const { std::string SsrcGroup::ToString() const { char buf[1024]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); sb << "{"; sb << "semantics:" << semantics << ";"; AppendSsrcs(ssrcs, &sb); @@ -120,7 +123,7 @@ bool StreamParams::operator==(const StreamParams& other) const { std::string StreamParams::ToString() const { char buf[2 * 1024]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); sb << "{"; if (!id.empty()) { sb << "id:" << id << ";"; @@ -145,7 +148,7 @@ std::string StreamParams::ToString() const { void StreamParams::GenerateSsrcs(int num_layers, bool generate_fid, bool generate_fec_fr, - rtc::UniqueRandomIdGenerator* ssrc_generator) { + UniqueRandomIdGenerator* ssrc_generator) { RTC_DCHECK_GE(num_layers, 0); RTC_DCHECK(ssrc_generator); std::vector primary_ssrcs; @@ -173,13 +176,13 @@ void StreamParams::GenerateSsrcs(int num_layers, } } -void StreamParams::GetPrimarySsrcs(std::vector* ssrcs) const { +void StreamParams::GetPrimarySsrcs(std::vector* primary_ssrcs) const { const SsrcGroup* sim_group = get_ssrc_group(kSimSsrcGroupSemantics); if (sim_group == NULL) { - ssrcs->push_back(first_ssrc()); + primary_ssrcs->push_back(first_ssrc()); } else { - ssrcs->insert(ssrcs->end(), sim_group->ssrcs.begin(), - sim_group->ssrcs.end()); + primary_ssrcs->insert(primary_ssrcs->end(), sim_group->ssrcs.begin(), + sim_group->ssrcs.end()); } } @@ -237,4 +240,4 @@ std::string StreamParams::first_stream_id() const { return stream_ids_.empty() ? "" : stream_ids_[0]; } -} // namespace cricket +} // namespace webrtc diff --git a/media/base/stream_params.h b/media/base/stream_params.h index 89fc1554cc..354d66d263 100644 --- a/media/base/stream_params.h +++ b/media/base/stream_params.h @@ -56,7 +56,7 @@ #include "media/base/rid_description.h" #include "rtc_base/unique_id_generator.h" -namespace cricket { +namespace webrtc { extern const char kFecSsrcGroupSemantics[]; extern const char kFecFrSsrcGroupSemantics[]; @@ -80,7 +80,7 @@ struct SsrcGroup { std::string ToString() const; - std::string semantics; // e.g FIX, FEC, SIM. + std::string semantics; // e.g FID, FEC-FR, SIM. std::vector ssrcs; // SSRCs of this type. }; @@ -160,11 +160,11 @@ struct StreamParams { void GenerateSsrcs(int num_layers, bool generate_fid, bool generate_fec_fr, - rtc::UniqueRandomIdGenerator* ssrc_generator); + UniqueRandomIdGenerator* ssrc_generator); // Convenience to get all the SIM SSRCs if there are SIM ssrcs, or // the first SSRC otherwise. - void GetPrimarySsrcs(std::vector* ssrcs) const; + void GetPrimarySsrcs(std::vector* primary_ssrcs) const; // Convenience to get all the secondary SSRCs for the given primary ssrcs // of a particular semantic. @@ -316,6 +316,28 @@ inline bool RemoveStreamByIds(StreamParamsVec* streams, const std::string& id) { [&id](const StreamParams& sp) { return sp.id == id; }); } +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::GetStream; +using ::webrtc::GetStreamByIds; +using ::webrtc::GetStreamBySsrc; +using ::webrtc::HasStreamWithNoSsrcs; +using ::webrtc::kFecFrSsrcGroupSemantics; +using ::webrtc::kFecSsrcGroupSemantics; +using ::webrtc::kFidSsrcGroupSemantics; +using ::webrtc::kSimSsrcGroupSemantics; +using ::webrtc::RemoveStream; +using ::webrtc::RemoveStreamByIds; +using ::webrtc::RemoveStreamBySsrc; +using ::webrtc::SsrcGroup; +using ::webrtc::StreamParams; +using ::webrtc::StreamParamsVec; +using ::webrtc::StreamSelector; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_STREAM_PARAMS_H_ diff --git a/media/base/stream_params_unittest.cc b/media/base/stream_params_unittest.cc index 7adf0f517d..efb113a68b 100644 --- a/media/base/stream_params_unittest.cc +++ b/media/base/stream_params_unittest.cc @@ -12,8 +12,13 @@ #include +#include +#include +#include + #include "media/base/test_utils.h" #include "rtc_base/arraysize.h" +#include "rtc_base/unique_id_generator.h" #include "test/gmock.h" #include "test/gtest.h" @@ -23,24 +28,24 @@ using ::testing::Ne; static const uint32_t kSsrcs1[] = {1}; static const uint32_t kSsrcs2[] = {1, 2}; -static cricket::StreamParams CreateStreamParamsWithSsrcGroup( +static webrtc::StreamParams CreateStreamParamsWithSsrcGroup( const std::string& semantics, const uint32_t ssrcs_in[], size_t len) { - cricket::StreamParams stream; + webrtc::StreamParams stream; std::vector ssrcs(ssrcs_in, ssrcs_in + len); - cricket::SsrcGroup sg(semantics, ssrcs); + webrtc::SsrcGroup sg(semantics, ssrcs); stream.ssrcs = ssrcs; stream.ssrc_groups.push_back(sg); return stream; } TEST(SsrcGroup, EqualNotEqual) { - cricket::SsrcGroup ssrc_groups[] = { - cricket::SsrcGroup("ABC", MAKE_VECTOR(kSsrcs1)), - cricket::SsrcGroup("ABC", MAKE_VECTOR(kSsrcs2)), - cricket::SsrcGroup("Abc", MAKE_VECTOR(kSsrcs2)), - cricket::SsrcGroup("abc", MAKE_VECTOR(kSsrcs2)), + webrtc::SsrcGroup ssrc_groups[] = { + webrtc::SsrcGroup("ABC", MAKE_VECTOR(kSsrcs1)), + webrtc::SsrcGroup("ABC", MAKE_VECTOR(kSsrcs2)), + webrtc::SsrcGroup("Abc", MAKE_VECTOR(kSsrcs2)), + webrtc::SsrcGroup("abc", MAKE_VECTOR(kSsrcs2)), }; for (size_t i = 0; i < arraysize(ssrc_groups); ++i) { @@ -52,24 +57,24 @@ TEST(SsrcGroup, EqualNotEqual) { } TEST(SsrcGroup, HasSemantics) { - cricket::SsrcGroup sg1("ABC", MAKE_VECTOR(kSsrcs1)); + webrtc::SsrcGroup sg1("ABC", MAKE_VECTOR(kSsrcs1)); EXPECT_TRUE(sg1.has_semantics("ABC")); - cricket::SsrcGroup sg2("Abc", MAKE_VECTOR(kSsrcs1)); + webrtc::SsrcGroup sg2("Abc", MAKE_VECTOR(kSsrcs1)); EXPECT_FALSE(sg2.has_semantics("ABC")); - cricket::SsrcGroup sg3("abc", MAKE_VECTOR(kSsrcs1)); + webrtc::SsrcGroup sg3("abc", MAKE_VECTOR(kSsrcs1)); EXPECT_FALSE(sg3.has_semantics("ABC")); } TEST(SsrcGroup, ToString) { - cricket::SsrcGroup sg1("ABC", MAKE_VECTOR(kSsrcs1)); + webrtc::SsrcGroup sg1("ABC", MAKE_VECTOR(kSsrcs1)); EXPECT_STREQ("{semantics:ABC;ssrcs:[1]}", sg1.ToString().c_str()); } TEST(StreamParams, CreateLegacy) { const uint32_t ssrc = 7; - cricket::StreamParams one_sp = cricket::StreamParams::CreateLegacy(ssrc); + webrtc::StreamParams one_sp = webrtc::StreamParams::CreateLegacy(ssrc); EXPECT_EQ(1U, one_sp.ssrcs.size()); EXPECT_EQ(ssrc, one_sp.first_ssrc()); EXPECT_TRUE(one_sp.has_ssrcs()); @@ -80,7 +85,7 @@ TEST(StreamParams, CreateLegacy) { } TEST(StreamParams, HasSsrcGroup) { - cricket::StreamParams sp = + webrtc::StreamParams sp = CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, arraysize(kSsrcs2)); EXPECT_EQ(2U, sp.ssrcs.size()); EXPECT_EQ(kSsrcs2[0], sp.first_ssrc()); @@ -95,35 +100,35 @@ TEST(StreamParams, HasSsrcGroup) { } TEST(StreamParams, GetSsrcGroup) { - cricket::StreamParams sp = + webrtc::StreamParams sp = CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, arraysize(kSsrcs2)); EXPECT_EQ(NULL, sp.get_ssrc_group("xyz")); EXPECT_EQ(&sp.ssrc_groups[0], sp.get_ssrc_group("XYZ")); } TEST(StreamParams, HasStreamWithNoSsrcs) { - cricket::StreamParams sp_1 = cricket::StreamParams::CreateLegacy(kSsrcs1[0]); - cricket::StreamParams sp_2 = cricket::StreamParams::CreateLegacy(kSsrcs2[0]); - std::vector streams({sp_1, sp_2}); - EXPECT_FALSE(HasStreamWithNoSsrcs(streams)); + webrtc::StreamParams sp_1 = webrtc::StreamParams::CreateLegacy(kSsrcs1[0]); + webrtc::StreamParams sp_2 = webrtc::StreamParams::CreateLegacy(kSsrcs2[0]); + std::vector streams({sp_1, sp_2}); + EXPECT_FALSE(webrtc::HasStreamWithNoSsrcs(streams)); - cricket::StreamParams unsignaled_stream; + webrtc::StreamParams unsignaled_stream; streams.push_back(unsignaled_stream); - EXPECT_TRUE(HasStreamWithNoSsrcs(streams)); + EXPECT_TRUE(webrtc::HasStreamWithNoSsrcs(streams)); } TEST(StreamParams, EqualNotEqual) { - cricket::StreamParams l1 = cricket::StreamParams::CreateLegacy(1); - cricket::StreamParams l2 = cricket::StreamParams::CreateLegacy(2); - cricket::StreamParams sg1 = + webrtc::StreamParams l1 = webrtc::StreamParams::CreateLegacy(1); + webrtc::StreamParams l2 = webrtc::StreamParams::CreateLegacy(2); + webrtc::StreamParams sg1 = CreateStreamParamsWithSsrcGroup("ABC", kSsrcs1, arraysize(kSsrcs1)); - cricket::StreamParams sg2 = + webrtc::StreamParams sg2 = CreateStreamParamsWithSsrcGroup("ABC", kSsrcs2, arraysize(kSsrcs2)); - cricket::StreamParams sg3 = + webrtc::StreamParams sg3 = CreateStreamParamsWithSsrcGroup("Abc", kSsrcs2, arraysize(kSsrcs2)); - cricket::StreamParams sg4 = + webrtc::StreamParams sg4 = CreateStreamParamsWithSsrcGroup("abc", kSsrcs2, arraysize(kSsrcs2)); - cricket::StreamParams sps[] = {l1, l2, sg1, sg2, sg3, sg4}; + webrtc::StreamParams sps[] = {l1, l2, sg1, sg2, sg3, sg4}; for (size_t i = 0; i < arraysize(sps); ++i) { for (size_t j = 0; j < arraysize(sps); ++j) { @@ -136,7 +141,7 @@ TEST(StreamParams, EqualNotEqual) { TEST(StreamParams, FidFunctions) { uint32_t fid_ssrc; - cricket::StreamParams sp = cricket::StreamParams::CreateLegacy(1); + webrtc::StreamParams sp = webrtc::StreamParams::CreateLegacy(1); EXPECT_FALSE(sp.AddFidSsrc(10, 20)); EXPECT_TRUE(sp.AddFidSsrc(1, 2)); EXPECT_TRUE(sp.GetFidSsrc(1, &fid_ssrc)); @@ -153,16 +158,16 @@ TEST(StreamParams, FidFunctions) { // for this. std::vector fid_vector; fid_vector.push_back(13); - cricket::SsrcGroup invalid_fid_group(cricket::kFidSsrcGroupSemantics, - fid_vector); - cricket::StreamParams sp_invalid; + webrtc::SsrcGroup invalid_fid_group(webrtc::kFidSsrcGroupSemantics, + fid_vector); + webrtc::StreamParams sp_invalid; sp_invalid.add_ssrc(13); sp_invalid.ssrc_groups.push_back(invalid_fid_group); EXPECT_FALSE(sp_invalid.GetFidSsrc(13, &fid_ssrc)); } TEST(StreamParams, GetPrimaryAndFidSsrcs) { - cricket::StreamParams sp; + webrtc::StreamParams sp; sp.ssrcs.push_back(1); sp.ssrcs.push_back(2); sp.ssrcs.push_back(3); @@ -176,7 +181,7 @@ TEST(StreamParams, GetPrimaryAndFidSsrcs) { ASSERT_EQ(0u, fid_ssrcs.size()); sp.ssrc_groups.push_back( - cricket::SsrcGroup(cricket::kSimSsrcGroupSemantics, sp.ssrcs)); + webrtc::SsrcGroup(webrtc::kSimSsrcGroupSemantics, sp.ssrcs)); sp.AddFidSsrc(1, 10); sp.AddFidSsrc(2, 20); @@ -196,7 +201,7 @@ TEST(StreamParams, GetPrimaryAndFidSsrcs) { TEST(StreamParams, FecFrFunctions) { uint32_t fecfr_ssrc; - cricket::StreamParams sp = cricket::StreamParams::CreateLegacy(1); + webrtc::StreamParams sp = webrtc::StreamParams::CreateLegacy(1); EXPECT_FALSE(sp.AddFecFrSsrc(10, 20)); EXPECT_TRUE(sp.AddFecFrSsrc(1, 2)); EXPECT_TRUE(sp.GetFecFrSsrc(1, &fecfr_ssrc)); @@ -213,16 +218,16 @@ TEST(StreamParams, FecFrFunctions) { // for this. std::vector fecfr_vector; fecfr_vector.push_back(13); - cricket::SsrcGroup invalid_fecfr_group(cricket::kFecFrSsrcGroupSemantics, - fecfr_vector); - cricket::StreamParams sp_invalid; + webrtc::SsrcGroup invalid_fecfr_group(webrtc::kFecFrSsrcGroupSemantics, + fecfr_vector); + webrtc::StreamParams sp_invalid; sp_invalid.add_ssrc(13); sp_invalid.ssrc_groups.push_back(invalid_fecfr_group); EXPECT_FALSE(sp_invalid.GetFecFrSsrc(13, &fecfr_ssrc)); } TEST(StreamParams, ToString) { - cricket::StreamParams sp = + webrtc::StreamParams sp = CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, arraysize(kSsrcs2)); sp.set_stream_ids({"stream_id"}); EXPECT_STREQ( @@ -232,8 +237,8 @@ TEST(StreamParams, ToString) { } TEST(StreamParams, TestGenerateSsrcs_SingleStreamWithRtxAndFlex) { - rtc::UniqueRandomIdGenerator generator; - cricket::StreamParams stream; + webrtc::UniqueRandomIdGenerator generator; + webrtc::StreamParams stream; stream.GenerateSsrcs(1, true, true, &generator); uint32_t primary_ssrc = stream.first_ssrc(); ASSERT_NE(0u, primary_ssrc); @@ -244,14 +249,14 @@ TEST(StreamParams, TestGenerateSsrcs_SingleStreamWithRtxAndFlex) { EXPECT_NE(0u, rtx_ssrc); EXPECT_TRUE(stream.GetFecFrSsrc(primary_ssrc, &flex_ssrc)); EXPECT_NE(0u, flex_ssrc); - EXPECT_FALSE(stream.has_ssrc_group(cricket::kSimSsrcGroupSemantics)); - EXPECT_TRUE(stream.has_ssrc_group(cricket::kFidSsrcGroupSemantics)); - EXPECT_TRUE(stream.has_ssrc_group(cricket::kFecFrSsrcGroupSemantics)); + EXPECT_FALSE(stream.has_ssrc_group(webrtc::kSimSsrcGroupSemantics)); + EXPECT_TRUE(stream.has_ssrc_group(webrtc::kFidSsrcGroupSemantics)); + EXPECT_TRUE(stream.has_ssrc_group(webrtc::kFecFrSsrcGroupSemantics)); } TEST(StreamParams, TestGenerateSsrcs_SingleStreamWithRtx) { - rtc::UniqueRandomIdGenerator generator; - cricket::StreamParams stream; + webrtc::UniqueRandomIdGenerator generator; + webrtc::StreamParams stream; stream.GenerateSsrcs(1, true, false, &generator); uint32_t primary_ssrc = stream.first_ssrc(); ASSERT_NE(0u, primary_ssrc); @@ -262,13 +267,13 @@ TEST(StreamParams, TestGenerateSsrcs_SingleStreamWithRtx) { EXPECT_NE(0u, rtx_ssrc); EXPECT_FALSE(stream.GetFecFrSsrc(primary_ssrc, &flex_ssrc)); EXPECT_EQ(0u, flex_ssrc); - EXPECT_FALSE(stream.has_ssrc_group(cricket::kSimSsrcGroupSemantics)); - EXPECT_TRUE(stream.has_ssrc_group(cricket::kFidSsrcGroupSemantics)); + EXPECT_FALSE(stream.has_ssrc_group(webrtc::kSimSsrcGroupSemantics)); + EXPECT_TRUE(stream.has_ssrc_group(webrtc::kFidSsrcGroupSemantics)); } TEST(StreamParams, TestGenerateSsrcs_SingleStreamWithFlex) { - rtc::UniqueRandomIdGenerator generator; - cricket::StreamParams stream; + webrtc::UniqueRandomIdGenerator generator; + webrtc::StreamParams stream; stream.GenerateSsrcs(1, false, true, &generator); uint32_t primary_ssrc = stream.first_ssrc(); ASSERT_NE(0u, primary_ssrc); @@ -279,14 +284,14 @@ TEST(StreamParams, TestGenerateSsrcs_SingleStreamWithFlex) { EXPECT_EQ(0u, rtx_ssrc); EXPECT_TRUE(stream.GetFecFrSsrc(primary_ssrc, &flex_ssrc)); EXPECT_NE(0u, flex_ssrc); - EXPECT_FALSE(stream.has_ssrc_group(cricket::kSimSsrcGroupSemantics)); - EXPECT_TRUE(stream.has_ssrc_group(cricket::kFecFrSsrcGroupSemantics)); + EXPECT_FALSE(stream.has_ssrc_group(webrtc::kSimSsrcGroupSemantics)); + EXPECT_TRUE(stream.has_ssrc_group(webrtc::kFecFrSsrcGroupSemantics)); } TEST(StreamParams, TestGenerateSsrcs_SimulcastLayersAndRtx) { const size_t kNumStreams = 3; - rtc::UniqueRandomIdGenerator generator; - cricket::StreamParams stream; + webrtc::UniqueRandomIdGenerator generator; + webrtc::StreamParams stream; stream.GenerateSsrcs(kNumStreams, true, false, &generator); EXPECT_EQ(kNumStreams * 2, stream.ssrcs.size()); std::vector primary_ssrcs, rtx_ssrcs; @@ -296,6 +301,6 @@ TEST(StreamParams, TestGenerateSsrcs_SimulcastLayersAndRtx) { stream.GetFidSsrcs(primary_ssrcs, &rtx_ssrcs); EXPECT_EQ(kNumStreams, rtx_ssrcs.size()); EXPECT_THAT(rtx_ssrcs, Each(Ne(0u))); - EXPECT_TRUE(stream.has_ssrc_group(cricket::kSimSsrcGroupSemantics)); - EXPECT_TRUE(stream.has_ssrc_group(cricket::kFidSsrcGroupSemantics)); + EXPECT_TRUE(stream.has_ssrc_group(webrtc::kSimSsrcGroupSemantics)); + EXPECT_TRUE(stream.has_ssrc_group(webrtc::kFidSsrcGroupSemantics)); } diff --git a/media/base/test_utils.cc b/media/base/test_utils.cc index 1b288735be..b9479afe3c 100644 --- a/media/base/test_utils.cc +++ b/media/base/test_utils.cc @@ -10,18 +10,19 @@ #include "media/base/test_utils.h" +#include #include +#include +#include -#include "api/video/video_frame.h" -#include "api/video/video_source_interface.h" +#include "media/base/stream_params.h" -namespace cricket { +namespace webrtc { -cricket::StreamParams CreateSimStreamParams( - const std::string& cname, - const std::vector& ssrcs) { - cricket::StreamParams sp; - cricket::SsrcGroup sg(cricket::kSimSsrcGroupSemantics, ssrcs); +StreamParams CreateSimStreamParams(const std::string& cname, + const std::vector& ssrcs) { + StreamParams sp; + SsrcGroup sg(kSimSsrcGroupSemantics, ssrcs); sp.ssrcs = ssrcs; sp.ssrc_groups.push_back(sg); sp.cname = cname; @@ -29,11 +30,11 @@ cricket::StreamParams CreateSimStreamParams( } // There should be an rtx_ssrc per ssrc. -cricket::StreamParams CreateSimWithRtxStreamParams( +StreamParams CreateSimWithRtxStreamParams( const std::string& cname, const std::vector& ssrcs, const std::vector& rtx_ssrcs) { - cricket::StreamParams sp = CreateSimStreamParams(cname, ssrcs); + StreamParams sp = CreateSimStreamParams(cname, ssrcs); for (size_t i = 0; i < ssrcs.size(); ++i) { sp.AddFidSsrc(ssrcs[i], rtx_ssrcs[i]); } @@ -41,15 +42,14 @@ cricket::StreamParams CreateSimWithRtxStreamParams( } // There should be one fec ssrc per ssrc. -cricket::StreamParams CreatePrimaryWithFecFrStreamParams( - const std::string& cname, - uint32_t primary_ssrc, - uint32_t flexfec_ssrc) { - cricket::StreamParams sp; +StreamParams CreatePrimaryWithFecFrStreamParams(const std::string& cname, + uint32_t primary_ssrc, + uint32_t flexfec_ssrc) { + StreamParams sp; sp.ssrcs = {primary_ssrc}; sp.cname = cname; sp.AddFecFrSsrc(primary_ssrc, flexfec_ssrc); return sp; } -} // namespace cricket +} // namespace webrtc diff --git a/media/base/test_utils.h b/media/base/test_utils.h index fb18485d32..94f4492752 100644 --- a/media/base/test_utils.h +++ b/media/base/test_utils.h @@ -11,18 +11,19 @@ #ifndef MEDIA_BASE_TEST_UTILS_H_ #define MEDIA_BASE_TEST_UTILS_H_ +#include +#include #include #include -#include "media/base/media_channel.h" -#include "media/base/video_common.h" +#include "media/base/stream_params.h" #include "rtc_base/arraysize.h" namespace webrtc { class VideoFrame; } -namespace cricket { +namespace webrtc { // Returns size of 420 image with rounding on chroma for odd sizes. #define I420_SIZE(w, h) (w * h + (((w + 1) / 2) * ((h + 1) / 2)) * 2) @@ -33,38 +34,34 @@ template inline std::vector MakeVector(const T a[], size_t s) { return std::vector(a, a + s); } -#define MAKE_VECTOR(a) cricket::MakeVector(a, arraysize(a)) - -// Checks whether `codecs` contains `codec`; checks using Codec::Matches(). -template -bool ContainsMatchingCodec(const std::vector& codecs, - const C& codec, - const webrtc::FieldTrialsView* field_trials) { - typename std::vector::const_iterator it; - for (it = codecs.begin(); it != codecs.end(); ++it) { - if (it->Matches(codec, field_trials)) { - return true; - } - } - return false; -} +#define MAKE_VECTOR(a) webrtc::MakeVector(a, arraysize(a)) // Create Simulcast StreamParams with given `ssrcs` and `cname`. -cricket::StreamParams CreateSimStreamParams(const std::string& cname, - const std::vector& ssrcs); +StreamParams CreateSimStreamParams(const std::string& cname, + const std::vector& ssrcs); // Create Simulcast stream with given `ssrcs` and `rtx_ssrcs`. // The number of `rtx_ssrcs` must match number of `ssrcs`. -cricket::StreamParams CreateSimWithRtxStreamParams( +StreamParams CreateSimWithRtxStreamParams( const std::string& cname, const std::vector& ssrcs, const std::vector& rtx_ssrcs); // Create StreamParams with single primary SSRC and corresponding FlexFEC SSRC. -cricket::StreamParams CreatePrimaryWithFecFrStreamParams( - const std::string& cname, - uint32_t primary_ssrc, - uint32_t flexfec_ssrc); +StreamParams CreatePrimaryWithFecFrStreamParams(const std::string& cname, + uint32_t primary_ssrc, + uint32_t flexfec_ssrc); + +} // namespace webrtc +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::CreatePrimaryWithFecFrStreamParams; +using ::webrtc::CreateSimStreamParams; +using ::webrtc::CreateSimWithRtxStreamParams; +using ::webrtc::MakeVector; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_TEST_UTILS_H_ diff --git a/media/base/turn_utils.cc b/media/base/turn_utils.cc index c413117fb6..4e4783f965 100644 --- a/media/base/turn_utils.cc +++ b/media/base/turn_utils.cc @@ -10,10 +10,13 @@ #include "media/base/turn_utils.h" +#include +#include + #include "api/transport/stun.h" #include "rtc_base/byte_order.h" -namespace cricket { +namespace webrtc { namespace { @@ -28,7 +31,7 @@ bool IsTurnSendIndicationPacket(const uint8_t* data, size_t length) { return false; } - uint16_t type = rtc::GetBE16(data); + uint16_t type = webrtc::GetBE16(data); return (type == TURN_SEND_INDICATION); } @@ -49,7 +52,7 @@ bool UnwrapTurnPacket(const uint8_t* packet, // / Application Data / // / / // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ - size_t length = rtc::GetBE16(&packet[2]); + size_t length = webrtc::GetBE16(&packet[2]); if (length + kTurnChannelHeaderLength > packet_size) { return false; } @@ -61,7 +64,7 @@ bool UnwrapTurnPacket(const uint8_t* packet, if (IsTurnSendIndicationPacket(packet, packet_size)) { // Validate STUN message length. - const size_t stun_message_length = rtc::GetBE16(&packet[2]); + const size_t stun_message_length = webrtc::GetBE16(&packet[2]); if (stun_message_length + kStunHeaderSize != packet_size) { return false; } @@ -91,8 +94,8 @@ bool UnwrapTurnPacket(const uint8_t* packet, } // Getting attribute type and length. - attr_type = rtc::GetBE16(&packet[pos]); - attr_length = rtc::GetBE16(&packet[pos + sizeof(attr_type)]); + attr_type = webrtc::GetBE16(&packet[pos]); + attr_length = webrtc::GetBE16(&packet[pos + sizeof(attr_type)]); pos += kAttrHeaderLength; // Skip STUN_DATA_ATTR header. @@ -123,4 +126,4 @@ bool UnwrapTurnPacket(const uint8_t* packet, return true; } -} // namespace cricket +} // namespace webrtc diff --git a/media/base/turn_utils.h b/media/base/turn_utils.h index 82e492c028..689de19b23 100644 --- a/media/base/turn_utils.h +++ b/media/base/turn_utils.h @@ -16,7 +16,7 @@ #include "rtc_base/system/rtc_export.h" -namespace cricket { +namespace webrtc { // Finds data location within a TURN Channel Message or TURN Send Indication // message. @@ -25,6 +25,14 @@ bool RTC_EXPORT UnwrapTurnPacket(const uint8_t* packet, size_t* content_position, size_t* content_size); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::UnwrapTurnPacket; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_TURN_UTILS_H_ diff --git a/media/base/turn_utils_unittest.cc b/media/base/turn_utils_unittest.cc index f7bbf8b8d4..5ffaa0a671 100644 --- a/media/base/turn_utils_unittest.cc +++ b/media/base/turn_utils_unittest.cc @@ -10,9 +10,12 @@ #include "media/base/turn_utils.h" +#include +#include + #include "test/gtest.h" -namespace cricket { +namespace webrtc { // Invalid TURN send indication messages. Messages are proper STUN // messages with incorrect values in attributes. @@ -124,4 +127,4 @@ TEST(TurnUtilsTest, ChannelMessageZeroLength) { EXPECT_EQ(0u, content_size); } -} // namespace cricket +} // namespace webrtc diff --git a/media/base/video_adapter.cc b/media/base/video_adapter.cc index daac8cf856..a4f88145f4 100644 --- a/media/base/video_adapter.cc +++ b/media/base/video_adapter.cc @@ -15,15 +15,19 @@ #include #include #include +#include +#include +#include #include -#include "absl/types/optional.h" +#include "api/video/resolution.h" +#include "api/video/video_source_interface.h" #include "media/base/video_common.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/field_trial.h" namespace { @@ -32,7 +36,7 @@ struct Fraction { int denominator; void DivideByGcd() { - int g = cricket::GreatestCommonDivisor(numerator, denominator); + int g = std::gcd(numerator, denominator); numerator /= g; denominator /= g; } @@ -59,8 +63,7 @@ int roundUp(int value_to_round, int multiple, int max_value) { Fraction FindScale(int input_width, int input_height, int target_pixels, - int max_pixels, - bool variable_start_scale_factor) { + int max_pixels) { // This function only makes sense for a positive target. RTC_DCHECK_GT(target_pixels, 0); RTC_DCHECK_GT(max_pixels, 0); @@ -75,16 +78,14 @@ Fraction FindScale(int input_width, Fraction current_scale = Fraction{1, 1}; Fraction best_scale = Fraction{1, 1}; - if (variable_start_scale_factor) { - // Start scaling down by 2/3 depending on `input_width` and `input_height`. - if (input_width % 3 == 0 && input_height % 3 == 0) { - // 2/3 (then alternates 3/4, 2/3, 3/4,...). - current_scale = Fraction{6, 6}; - } - if (input_width % 9 == 0 && input_height % 9 == 0) { - // 2/3, 2/3 (then alternates 3/4, 2/3, 3/4,...). - current_scale = Fraction{36, 36}; - } + // Start scaling down by 2/3 depending on `input_width` and `input_height`. + if (input_width % 3 == 0 && input_height % 3 == 0) { + // 2/3 (then alternates 3/4, 2/3, 3/4,...). + current_scale = Fraction{6, 6}; + } + if (input_width % 9 == 0 && input_height % 9 == 0) { + // 2/3, 2/3 (then alternates 3/4, 2/3, 3/4,...). + current_scale = Fraction{36, 36}; } // The minimum (absolute) difference between the number of output pixels and @@ -125,17 +126,17 @@ Fraction FindScale(int input_width, return best_scale; } -absl::optional> Swap( - const absl::optional>& in) { +std::optional> Swap( + const std::optional>& in) { if (!in) { - return absl::nullopt; + return std::nullopt; } return std::make_pair(in->second, in->first); } } // namespace -namespace cricket { +namespace webrtc { VideoAdapter::VideoAdapter(int source_resolution_alignment) : frames_in_(0), @@ -144,8 +145,6 @@ VideoAdapter::VideoAdapter(int source_resolution_alignment) adaption_changes_(0), previous_width_(0), previous_height_(0), - variable_start_scale_factor_(!webrtc::field_trial::IsDisabled( - "WebRTC-Video-VariableStartScaleFactor")), source_resolution_alignment_(source_resolution_alignment), resolution_alignment_(source_resolution_alignment), resolution_request_target_pixel_count_(std::numeric_limits::max()), @@ -172,7 +171,7 @@ bool VideoAdapter::AdaptFrameResolution(int in_width, int* cropped_height, int* out_width, int* out_height) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); ++frames_in_; // The max output pixel count is the minimum of the requests from @@ -181,7 +180,7 @@ bool VideoAdapter::AdaptFrameResolution(int in_width, // Select target aspect ratio and max pixel count depending on input frame // orientation. - absl::optional> target_aspect_ratio; + std::optional> target_aspect_ratio; if (in_width > in_height) { target_aspect_ratio = output_format_request_.target_landscape_aspect_ratio; if (output_format_request_.max_landscape_pixel_count) @@ -231,9 +230,8 @@ bool VideoAdapter::AdaptFrameResolution(int in_width, *cropped_height = std::min(in_height, static_cast(in_width / requested_aspect)); } - const Fraction scale = - FindScale(*cropped_width, *cropped_height, target_pixel_count, - max_pixel_count, variable_start_scale_factor_); + const Fraction scale = FindScale(*cropped_width, *cropped_height, + target_pixel_count, max_pixel_count); // Adjust cropping slightly to get correctly aligned output size and a perfect // scale factor. *cropped_width = roundUp(*cropped_width, @@ -243,12 +241,41 @@ bool VideoAdapter::AdaptFrameResolution(int in_width, RTC_DCHECK_EQ(0, *cropped_width % scale.denominator); RTC_DCHECK_EQ(0, *cropped_height % scale.denominator); - // Calculate final output size. + // Calculate output size. *out_width = *cropped_width / scale.denominator * scale.numerator; *out_height = *cropped_height / scale.denominator * scale.numerator; RTC_DCHECK_EQ(0, *out_width % resolution_alignment_); RTC_DCHECK_EQ(0, *out_height % resolution_alignment_); + // Lastly, make the output size fit within the resolution restrictions as + // specified by `scale_resolution_down_to_`. This does not modify aspect ratio + // or cropping, only `out_width` and `out_height`. + if (scale_resolution_down_to_.has_value()) { + // Make frame and "scale to" have matching orientation. + Resolution scale_resolution_down_to = scale_resolution_down_to_.value(); + if ((*out_width < *out_height) != (scale_resolution_down_to_->width < + scale_resolution_down_to_->height)) { + scale_resolution_down_to = {.width = scale_resolution_down_to_->height, + .height = scale_resolution_down_to_->width}; + } + // Downscale by smallest scaling factor, if necessary. + if (*out_width > 0 && *out_height > 0 && + (scale_resolution_down_to.width < *out_width || + scale_resolution_down_to.height < *out_height)) { + double scale_factor = std::min( + scale_resolution_down_to.width / static_cast(*out_width), + scale_resolution_down_to.height / static_cast(*out_height)); + *out_width = + roundUp(std::round(*out_width * scale_factor), resolution_alignment_, + scale_resolution_down_to.width); + *out_height = + roundUp(std::round(*out_height * scale_factor), resolution_alignment_, + scale_resolution_down_to.height); + RTC_DCHECK_EQ(0, *out_width % resolution_alignment_); + RTC_DCHECK_EQ(0, *out_height % resolution_alignment_); + } + } + ++frames_out_; if (scale.numerator != scale.denominator) ++frames_scaled_; @@ -274,25 +301,25 @@ bool VideoAdapter::AdaptFrameResolution(int in_width, } void VideoAdapter::OnOutputFormatRequest( - const absl::optional& format) { - absl::optional> target_aspect_ratio; - absl::optional max_pixel_count; - absl::optional max_fps; + const std::optional& format) { + std::optional> target_aspect_ratio; + std::optional max_pixel_count; + std::optional max_fps; if (format) { target_aspect_ratio = std::make_pair(format->width, format->height); max_pixel_count = format->width * format->height; if (format->interval > 0) - max_fps = rtc::kNumNanosecsPerSec / format->interval; + max_fps = webrtc::kNumNanosecsPerSec / format->interval; } OnOutputFormatRequest(target_aspect_ratio, max_pixel_count, max_fps); } void VideoAdapter::OnOutputFormatRequest( - const absl::optional>& target_aspect_ratio, - const absl::optional& max_pixel_count, - const absl::optional& max_fps) { - absl::optional> target_landscape_aspect_ratio; - absl::optional> target_portrait_aspect_ratio; + const std::optional>& target_aspect_ratio, + const std::optional& max_pixel_count, + const std::optional& max_fps) { + std::optional> target_landscape_aspect_ratio; + std::optional> target_portrait_aspect_ratio; if (target_aspect_ratio && target_aspect_ratio->first > 0 && target_aspect_ratio->second > 0) { // Maintain input orientation. @@ -308,12 +335,12 @@ void VideoAdapter::OnOutputFormatRequest( } void VideoAdapter::OnOutputFormatRequest( - const absl::optional>& target_landscape_aspect_ratio, - const absl::optional& max_landscape_pixel_count, - const absl::optional>& target_portrait_aspect_ratio, - const absl::optional& max_portrait_pixel_count, - const absl::optional& max_fps) { - webrtc::MutexLock lock(&mutex_); + const std::optional>& target_landscape_aspect_ratio, + const std::optional& max_landscape_pixel_count, + const std::optional>& target_portrait_aspect_ratio, + const std::optional& max_portrait_pixel_count, + const std::optional& max_fps) { + MutexLock lock(&mutex_); OutputFormatRequest request = { .target_landscape_aspect_ratio = target_landscape_aspect_ratio, @@ -325,7 +352,7 @@ void VideoAdapter::OnOutputFormatRequest( if (stashed_output_format_request_) { // Save the output format request for later use in case the encoder making // this call would become active, because currently all active encoders use - // requested_resolution instead. + // scale_resolution_down_to instead. stashed_output_format_request_ = request; RTC_LOG(LS_INFO) << "Stashing OnOutputFormatRequest: " << stashed_output_format_request_->ToString(); @@ -338,37 +365,33 @@ void VideoAdapter::OnOutputFormatRequest( framerate_controller_.Reset(); } -void VideoAdapter::OnSinkWants(const rtc::VideoSinkWants& sink_wants) { - webrtc::MutexLock lock(&mutex_); +void VideoAdapter::OnSinkWants(const VideoSinkWants& sink_wants) { + MutexLock lock(&mutex_); resolution_request_max_pixel_count_ = sink_wants.max_pixel_count; resolution_request_target_pixel_count_ = sink_wants.target_pixel_count.value_or( resolution_request_max_pixel_count_); max_framerate_request_ = sink_wants.max_framerate_fps; - resolution_alignment_ = cricket::LeastCommonMultiple( - source_resolution_alignment_, sink_wants.resolution_alignment); - - if (!sink_wants.aggregates) { - RTC_LOG(LS_WARNING) - << "These should always be created by VideoBroadcaster!"; - return; + resolution_alignment_ = + std::lcm(source_resolution_alignment_, sink_wants.resolution_alignment); + // Convert from std::optional to + // std::optional. Both are {int,int}. + scale_resolution_down_to_ = std::nullopt; + if (sink_wants.requested_resolution.has_value()) { + scale_resolution_down_to_ = { + .width = sink_wants.requested_resolution->width, + .height = sink_wants.requested_resolution->height}; } - // If requested_resolution is used, and there are no active encoders - // that are NOT using requested_resolution (aka newapi), then override - // calls to OnOutputFormatRequest and use values from requested_resolution + // If scale_resolution_down_to is used, and there are no active encoders + // that are NOT using scale_resolution_down_to (aka newapi), then override + // calls to OnOutputFormatRequest and use values from scale_resolution_down_to // instead (combined with qualityscaling based on pixel counts above). - if (webrtc::field_trial::IsDisabled( - "WebRTC-Video-RequestedResolutionOverrideOutputFormatRequest")) { - // kill-switch... - return; - } - if (!sink_wants.requested_resolution) { if (stashed_output_format_request_) { // because current active_output_format_request is based on - // requested_resolution logic, while current encoder(s) doesn't want that, - // we have to restore the stashed request. + // scale_resolution_down_to logic, while current encoder(s) doesn't want + // that, we have to restore the stashed request. RTC_LOG(LS_INFO) << "Unstashing OnOutputFormatRequest: " << stashed_output_format_request_->ToString(); output_format_request_ = *stashed_output_format_request_; @@ -377,12 +400,20 @@ void VideoAdapter::OnSinkWants(const rtc::VideoSinkWants& sink_wants) { return; } - if (sink_wants.aggregates->any_active_without_requested_resolution) { + // The code below is only needed when `scale_resolution_down_to` is signalled + // back to the video source which only happens if + // `VideoStreamEncoderSettings::use_standard_scale_resolution_down_to` is + // false. + // TODO(https://crbug.com/webrtc/366284861): Delete the code below as part of + // deleting this flag and only supporting the standard behavior. + + if (sink_wants.aggregates.has_value() && + sink_wants.aggregates->any_active_without_requested_resolution) { return; } if (!stashed_output_format_request_) { - // The active output format request is about to be rewritten by + // The active output format request is about to be cleared due to // request_resolution. We need to save it for later use in case the encoder // which doesn't use request_resolution logic become active in the future. stashed_output_format_request_ = output_format_request_; @@ -390,26 +421,18 @@ void VideoAdapter::OnSinkWants(const rtc::VideoSinkWants& sink_wants) { << stashed_output_format_request_->ToString(); } - auto res = *sink_wants.requested_resolution; - auto pixel_count = res.width * res.height; - output_format_request_.target_landscape_aspect_ratio = - std::make_pair(res.width, res.height); - output_format_request_.max_landscape_pixel_count = pixel_count; - output_format_request_.target_portrait_aspect_ratio = - std::make_pair(res.height, res.width); - output_format_request_.max_portrait_pixel_count = pixel_count; - output_format_request_.max_fps = max_framerate_request_; - RTC_LOG(LS_INFO) << "Setting output_format_request_ based on sink_wants: " - << output_format_request_.ToString(); + // Clear the output format request, `scale_resolution_down_to_` will be + // applied instead which happens inside AdaptFrameResolution(). + output_format_request_ = {}; } int VideoAdapter::GetTargetPixels() const { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return resolution_request_target_pixel_count_; } float VideoAdapter::GetMaxFramerate() const { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); // Minimum of `output_format_request_.max_fps` and `max_framerate_request_` is // used to throttle frame-rate. int framerate = @@ -423,7 +446,7 @@ float VideoAdapter::GetMaxFramerate() const { } std::string VideoAdapter::OutputFormatRequest::ToString() const { - rtc::StringBuilder oss; + StringBuilder oss; oss << "[ "; if (target_landscape_aspect_ratio == Swap(target_portrait_aspect_ratio) && max_landscape_pixel_count == max_portrait_pixel_count) { @@ -467,4 +490,4 @@ std::string VideoAdapter::OutputFormatRequest::ToString() const { return oss.Release(); } -} // namespace cricket +} // namespace webrtc diff --git a/media/base/video_adapter.h b/media/base/video_adapter.h index b3e69c492b..670984eb1c 100644 --- a/media/base/video_adapter.h +++ b/media/base/video_adapter.h @@ -13,10 +13,11 @@ #include +#include #include #include -#include "absl/types/optional.h" +#include "api/video/resolution.h" #include "api/video/video_source_interface.h" #include "common_video/framerate_controller.h" #include "media/base/video_common.h" @@ -24,7 +25,7 @@ #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread_annotations.h" -namespace cricket { +namespace webrtc { // VideoAdapter adapts an input video frame to an output frame based on the // specified input and output formats. The adaptation includes dropping frames @@ -62,7 +63,7 @@ class RTC_EXPORT VideoAdapter { // maintain the input orientation, so it doesn't matter if e.g. 1280x720 or // 720x1280 is requested. // Note: Should be called from the source only. - void OnOutputFormatRequest(const absl::optional& format) + void OnOutputFormatRequest(const std::optional& format) RTC_LOCKS_EXCLUDED(mutex_); // Requests output frame size and frame interval from `AdaptFrameResolution`. @@ -74,20 +75,20 @@ class RTC_EXPORT VideoAdapter { // `max_fps`: The maximum output framerate. // Note: Should be called from the source only. void OnOutputFormatRequest( - const absl::optional>& target_aspect_ratio, - const absl::optional& max_pixel_count, - const absl::optional& max_fps) RTC_LOCKS_EXCLUDED(mutex_); + const std::optional>& target_aspect_ratio, + const std::optional& max_pixel_count, + const std::optional& max_fps) RTC_LOCKS_EXCLUDED(mutex_); // Same as above, but allows setting two different target aspect ratios // depending on incoming frame orientation. This gives more fine-grained // control and can e.g. be used to force landscape video to be cropped to // portrait video. void OnOutputFormatRequest( - const absl::optional>& target_landscape_aspect_ratio, - const absl::optional& max_landscape_pixel_count, - const absl::optional>& target_portrait_aspect_ratio, - const absl::optional& max_portrait_pixel_count, - const absl::optional& max_fps) RTC_LOCKS_EXCLUDED(mutex_); + const std::optional>& target_landscape_aspect_ratio, + const std::optional& max_landscape_pixel_count, + const std::optional>& target_portrait_aspect_ratio, + const std::optional& max_portrait_pixel_count, + const std::optional& max_fps) RTC_LOCKS_EXCLUDED(mutex_); // Requests the output frame size from `AdaptFrameResolution` to have as close // as possible to `sink_wants.target_pixel_count` pixels (if set) @@ -99,8 +100,7 @@ class RTC_EXPORT VideoAdapter { // The sink resolution alignment requirement is given by // `sink_wants.resolution_alignment`. // Note: Should be called from the sink only. - void OnSinkWants(const rtc::VideoSinkWants& sink_wants) - RTC_LOCKS_EXCLUDED(mutex_); + void OnSinkWants(const VideoSinkWants& sink_wants) RTC_LOCKS_EXCLUDED(mutex_); // Returns maximum image area, which shouldn't impose any adaptations. // Can return `numeric_limits::max()` if no limit is set. @@ -122,7 +122,6 @@ class RTC_EXPORT VideoAdapter { int previous_width_ RTC_GUARDED_BY(mutex_); // Previous adapter output width. int previous_height_ RTC_GUARDED_BY(mutex_); // Previous adapter output height. - const bool variable_start_scale_factor_; // The fixed source resolution alignment requirement. const int source_resolution_alignment_; @@ -135,11 +134,11 @@ class RTC_EXPORT VideoAdapter { // OnResolutionFramerateRequest respectively. // The adapted output format is the minimum of these. struct OutputFormatRequest { - absl::optional> target_landscape_aspect_ratio; - absl::optional max_landscape_pixel_count; - absl::optional> target_portrait_aspect_ratio; - absl::optional max_portrait_pixel_count; - absl::optional max_fps; + std::optional> target_landscape_aspect_ratio; + std::optional max_landscape_pixel_count; + std::optional> target_portrait_aspect_ratio; + std::optional max_portrait_pixel_count; + std::optional max_fps; // For logging. std::string ToString() const; @@ -149,24 +148,33 @@ class RTC_EXPORT VideoAdapter { int resolution_request_target_pixel_count_ RTC_GUARDED_BY(mutex_); int resolution_request_max_pixel_count_ RTC_GUARDED_BY(mutex_); int max_framerate_request_ RTC_GUARDED_BY(mutex_); + std::optional scale_resolution_down_to_ RTC_GUARDED_BY(mutex_); // Stashed OutputFormatRequest that is used to save value of // OnOutputFormatRequest in case all active encoders are using - // requested_resolution. I.e when all active encoders are using - // requested_resolution, the call to OnOutputFormatRequest is ignored - // and the value from requested_resolution is used instead (to scale/crop + // scale_resolution_down_to. I.e when all active encoders are using + // scale_resolution_down_to, the call to OnOutputFormatRequest is ignored + // and the value from scale_resolution_down_to is used instead (to scale/crop // frame). This allows for an application to only use // RtpEncodingParameters::request_resolution and get the same behavior as if // it had used VideoAdapter::OnOutputFormatRequest. - absl::optional stashed_output_format_request_ + std::optional stashed_output_format_request_ RTC_GUARDED_BY(mutex_); - webrtc::FramerateController framerate_controller_ RTC_GUARDED_BY(mutex_); + FramerateController framerate_controller_ RTC_GUARDED_BY(mutex_); // The critical section to protect the above variables. - mutable webrtc::Mutex mutex_; + mutable Mutex mutex_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::VideoAdapter; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_VIDEO_ADAPTER_H_ diff --git a/media/base/video_adapter_unittest.cc b/media/base/video_adapter_unittest.cc index 778e61e74c..135afa5f4a 100644 --- a/media/base/video_adapter_unittest.cc +++ b/media/base/video_adapter_unittest.cc @@ -10,8 +10,11 @@ #include "media/base/video_adapter.h" +#include +#include #include #include +#include #include #include @@ -19,55 +22,51 @@ #include "api/video/video_frame.h" #include "api/video/video_source_interface.h" #include "media/base/fake_frame_source.h" +#include "media/base/video_common.h" #include "rtc_base/arraysize.h" #include "rtc_base/time_utils.h" #include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" -namespace cricket { +namespace webrtc { namespace { -const int kWidth = 1280; -const int kHeight = 720; -const int kDefaultFps = 30; +constexpr int kWidth = 1280; +constexpr int kHeight = 720; +constexpr int kDefaultFps = 30; using ::testing::_; using ::testing::Eq; using ::testing::Pair; -using webrtc::Resolution; +using ::webrtc::test::ScopedFieldTrials; -rtc::VideoSinkWants BuildSinkWants(absl::optional target_pixel_count, - int max_pixel_count, - int max_framerate_fps, - int sink_alignment = 1) { - rtc::VideoSinkWants wants; +VideoSinkWants BuildSinkWants(std::optional target_pixel_count, + int max_pixel_count, + int max_framerate_fps, + int sink_alignment = 1) { + VideoSinkWants wants; wants.target_pixel_count = target_pixel_count; wants.max_pixel_count = max_pixel_count; wants.max_framerate_fps = max_framerate_fps; wants.resolution_alignment = sink_alignment; wants.is_active = true; - wants.aggregates.emplace(rtc::VideoSinkWants::Aggregates()); + wants.aggregates.emplace(VideoSinkWants::Aggregates()); wants.aggregates->any_active_without_requested_resolution = false; return wants; } -rtc::VideoSinkWants BuildSinkWants( - absl::optional requested_resolution, +VideoSinkWants BuildSinkWants( + std::optional scale_resolution_down_to, bool any_active_without_requested_resolution) { - rtc::VideoSinkWants wants; + VideoSinkWants wants; wants.max_framerate_fps = kDefaultFps; wants.resolution_alignment = 1; wants.is_active = true; - if (requested_resolution) { - wants.target_pixel_count = requested_resolution->PixelCount(); - wants.max_pixel_count = requested_resolution->PixelCount(); - wants.requested_resolution.emplace(rtc::VideoSinkWants::FrameSize( - requested_resolution->width, requested_resolution->height)); - } else { - wants.target_pixel_count = kWidth * kHeight; - wants.max_pixel_count = kWidth * kHeight; + if (scale_resolution_down_to) { + wants.requested_resolution.emplace(VideoSinkWants::FrameSize( + scale_resolution_down_to->width, scale_resolution_down_to->height)); } - wants.aggregates.emplace(rtc::VideoSinkWants::Aggregates()); + wants.aggregates.emplace(VideoSinkWants::Aggregates()); wants.aggregates->any_active_without_requested_resolution = any_active_without_requested_resolution; return wants; @@ -86,7 +85,7 @@ class VideoAdapterTest : public ::testing::Test, kWidth, kHeight, VideoFormat::FpsToInterval(kDefaultFps) / - rtc::kNumNanosecsPerMicrosec)), + webrtc::kNumNanosecsPerMicrosec)), adapter_(source_resolution_alignment), adapter_wrapper_(std::make_unique(&adapter_)), use_new_format_request_(GetParam()) {} @@ -109,7 +108,7 @@ class VideoAdapterTest : public ::testing::Test, explicit VideoAdapterWrapper(VideoAdapter* adapter) : video_adapter_(adapter) {} - void AdaptFrame(const webrtc::VideoFrame& frame) { + void AdaptFrame(const VideoFrame& frame) { const int in_width = frame.width(); const int in_height = frame.height(); int cropped_width; @@ -118,7 +117,7 @@ class VideoAdapterTest : public ::testing::Test, int out_height; if (video_adapter_->AdaptFrameResolution( in_width, in_height, - frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec, + frame.timestamp_us() * webrtc::kNumNanosecsPerMicrosec, &cropped_width, &cropped_height, &out_width, &out_height)) { stats_.cropped_width = cropped_width; stats_.cropped_height = cropped_height; @@ -153,26 +152,25 @@ class VideoAdapterTest : public ::testing::Test, void OnOutputFormatRequest(int width, int height, - const absl::optional& fps) { + const std::optional& fps) { if (use_new_format_request_) { - absl::optional> target_aspect_ratio = + std::optional> target_aspect_ratio = std::make_pair(width, height); - absl::optional max_pixel_count = width * height; - absl::optional max_fps = fps; + std::optional max_pixel_count = width * height; + std::optional max_fps = fps; adapter_.OnOutputFormatRequest(target_aspect_ratio, max_pixel_count, max_fps); return; } adapter_.OnOutputFormatRequest( VideoFormat(width, height, fps ? VideoFormat::FpsToInterval(*fps) : 0, - cricket::FOURCC_I420)); + webrtc::FOURCC_I420)); } // Return pair of - std::pair AdaptFrameResolution( - webrtc::Resolution res) { - webrtc::Resolution out; - webrtc::Resolution cropped; + std::pair AdaptFrameResolution(Resolution res) { + Resolution out; + Resolution cropped; timestamp_ns_ += 1000000000; EXPECT_TRUE(adapter_.AdaptFrameResolution( res.width, res.height, timestamp_ns_, &cropped.width, &cropped.height, @@ -180,7 +178,7 @@ class VideoAdapterTest : public ::testing::Test, return std::make_pair(out, cropped); } - webrtc::test::ScopedFieldTrials override_field_trials_; + ScopedFieldTrials override_field_trials_; const std::unique_ptr frame_source_; VideoAdapter adapter_; int64_t timestamp_ns_ = 0; @@ -211,7 +209,7 @@ TEST_P(VideoAdapterTest, AdaptNothing) { } TEST_P(VideoAdapterTest, AdaptZeroInterval) { - OnOutputFormatRequest(kWidth, kHeight, absl::nullopt); + OnOutputFormatRequest(kWidth, kHeight, std::nullopt); for (int i = 0; i < 40; ++i) adapter_wrapper_->AdaptFrame(frame_source_->GetFrame()); @@ -265,7 +263,7 @@ TEST_P(VideoAdapterTest, AdaptFramerateHighLimit) { // Expect the number of dropped frames to be half of the number the captured // frames. TEST_P(VideoAdapterTest, AdaptFramerateToHalfWithNoPixelLimit) { - adapter_.OnOutputFormatRequest(absl::nullopt, absl::nullopt, kDefaultFps / 2); + adapter_.OnOutputFormatRequest(std::nullopt, std::nullopt, kDefaultFps / 2); // Capture 10 frames and verify that every other frame is dropped. The first // frame should not be dropped. @@ -305,7 +303,7 @@ TEST_P(VideoAdapterTest, AdaptFramerateOntheFly) { // Do not adapt the frame rate or the resolution. Expect no frame drop, no // cropping, and no resolution change. TEST_P(VideoAdapterTest, AdaptFramerateRequestMax) { - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, std::numeric_limits::max(), std::numeric_limits::max())); @@ -322,7 +320,7 @@ TEST_P(VideoAdapterTest, AdaptFramerateRequestMax) { TEST_P(VideoAdapterTest, AdaptFramerateRequestZero) { adapter_.OnSinkWants( - BuildSinkWants(absl::nullopt, std::numeric_limits::max(), 0)); + BuildSinkWants(std::nullopt, std::numeric_limits::max(), 0)); for (int i = 0; i < 10; ++i) adapter_wrapper_->AdaptFrame(frame_source_->GetFrame()); @@ -336,7 +334,7 @@ TEST_P(VideoAdapterTest, AdaptFramerateRequestZero) { // the number of dropped frames to be half of the number the captured frames. TEST_P(VideoAdapterTest, AdaptFramerateRequestHalf) { adapter_.OnSinkWants(BuildSinkWants( - absl::nullopt, std::numeric_limits::max(), kDefaultFps / 2)); + std::nullopt, std::numeric_limits::max(), kDefaultFps / 2)); for (int i = 0; i < 10; ++i) adapter_wrapper_->AdaptFrame(frame_source_->GetFrame()); @@ -451,7 +449,7 @@ TEST_P(VideoAdapterTest, TestOnOutputFormatRequest) { EXPECT_EQ(400, out_height_); // Format request 640x400. - OnOutputFormatRequest(640, 400, absl::nullopt); + OnOutputFormatRequest(640, 400, std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -462,7 +460,7 @@ TEST_P(VideoAdapterTest, TestOnOutputFormatRequest) { // Request 1280x720, higher than input, but aspect 16:9. Expect cropping but // no scaling. - OnOutputFormatRequest(1280, 720, absl::nullopt); + OnOutputFormatRequest(1280, 720, std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -472,13 +470,13 @@ TEST_P(VideoAdapterTest, TestOnOutputFormatRequest) { EXPECT_EQ(360, out_height_); // Request 0x0. - OnOutputFormatRequest(0, 0, absl::nullopt); + OnOutputFormatRequest(0, 0, std::nullopt); EXPECT_FALSE(adapter_.AdaptFrameResolution(640, 400, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); // Request 320x200. Expect scaling, but no cropping. - OnOutputFormatRequest(320, 200, absl::nullopt); + OnOutputFormatRequest(320, 200, std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -490,7 +488,7 @@ TEST_P(VideoAdapterTest, TestOnOutputFormatRequest) { // Request resolution close to 2/3 scale. Expect adapt down. Scaling to 2/3 // is not optimized and not allowed, therefore 1/2 scaling will be used // instead. - OnOutputFormatRequest(424, 265, absl::nullopt); + OnOutputFormatRequest(424, 265, std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -500,7 +498,7 @@ TEST_P(VideoAdapterTest, TestOnOutputFormatRequest) { EXPECT_EQ(200, out_height_); // Request resolution of 3 / 8. Expect adapt down. - OnOutputFormatRequest(640 * 3 / 8, 400 * 3 / 8, absl::nullopt); + OnOutputFormatRequest(640 * 3 / 8, 400 * 3 / 8, std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -510,7 +508,7 @@ TEST_P(VideoAdapterTest, TestOnOutputFormatRequest) { EXPECT_EQ(400 * 3 / 8, out_height_); // Switch back up. Expect adapt. - OnOutputFormatRequest(320, 200, absl::nullopt); + OnOutputFormatRequest(320, 200, std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -520,7 +518,7 @@ TEST_P(VideoAdapterTest, TestOnOutputFormatRequest) { EXPECT_EQ(200, out_height_); // Format request 480x300. - OnOutputFormatRequest(480, 300, absl::nullopt); + OnOutputFormatRequest(480, 300, std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -541,7 +539,7 @@ TEST_P(VideoAdapterTest, TestViewRequestPlusCameraSwitch) { EXPECT_EQ(720, out_height_); // Format request for VGA. - OnOutputFormatRequest(640, 360, absl::nullopt); + OnOutputFormatRequest(640, 360, std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -562,7 +560,7 @@ TEST_P(VideoAdapterTest, TestViewRequestPlusCameraSwitch) { // And another view request comes in for 640x360, which should have no // real impact. - OnOutputFormatRequest(640, 360, absl::nullopt); + OnOutputFormatRequest(640, 360, std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 360, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -574,7 +572,7 @@ TEST_P(VideoAdapterTest, TestViewRequestPlusCameraSwitch) { TEST_P(VideoAdapterTest, TestVgaWidth) { // Requested output format is 640x360. - OnOutputFormatRequest(640, 360, absl::nullopt); + OnOutputFormatRequest(640, 360, std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0, &cropped_width_, &cropped_height_, &out_width_, @@ -613,7 +611,7 @@ TEST_P(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) { EXPECT_EQ(720, out_height_); // Adapt down one step. - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, 1280 * 720 - 1, + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, 1280 * 720 - 1, std::numeric_limits::max())); EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0, &cropped_width_, &cropped_height_, &out_width_, @@ -624,7 +622,7 @@ TEST_P(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) { EXPECT_EQ(540, out_height_); // Adapt down one step more. - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, 960 * 540 - 1, + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, 960 * 540 - 1, std::numeric_limits::max())); EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0, &cropped_width_, &cropped_height_, &out_width_, @@ -635,7 +633,7 @@ TEST_P(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) { EXPECT_EQ(360, out_height_); // Adapt down one step more. - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, 640 * 360 - 1, + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, 640 * 360 - 1, std::numeric_limits::max())); EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0, &cropped_width_, &cropped_height_, &out_width_, @@ -689,7 +687,7 @@ TEST_P(VideoAdapterTest, TestOnResolutionRequestMaxZero) { EXPECT_EQ(720, out_height_); adapter_.OnSinkWants( - BuildSinkWants(absl::nullopt, 0, std::numeric_limits::max())); + BuildSinkWants(std::nullopt, 0, std::numeric_limits::max())); EXPECT_FALSE(adapter_.AdaptFrameResolution(1280, 720, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -697,7 +695,7 @@ TEST_P(VideoAdapterTest, TestOnResolutionRequestMaxZero) { TEST_P(VideoAdapterTest, TestOnResolutionRequestInLargeSteps) { // Large step down. - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, 640 * 360 - 1, + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, 640 * 360 - 1, std::numeric_limits::max())); EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0, &cropped_width_, &cropped_height_, &out_width_, @@ -720,7 +718,7 @@ TEST_P(VideoAdapterTest, TestOnResolutionRequestInLargeSteps) { } TEST_P(VideoAdapterTest, TestOnOutputFormatRequestCapsMaxResolution) { - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, 640 * 360 - 1, + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, 640 * 360 - 1, std::numeric_limits::max())); EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0, &cropped_width_, &cropped_height_, &out_width_, @@ -730,7 +728,7 @@ TEST_P(VideoAdapterTest, TestOnOutputFormatRequestCapsMaxResolution) { EXPECT_EQ(480, out_width_); EXPECT_EQ(270, out_height_); - OnOutputFormatRequest(640, 360, absl::nullopt); + OnOutputFormatRequest(640, 360, std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -739,8 +737,8 @@ TEST_P(VideoAdapterTest, TestOnOutputFormatRequestCapsMaxResolution) { EXPECT_EQ(480, out_width_); EXPECT_EQ(270, out_height_); - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, 960 * 720, - std::numeric_limits::max())); + adapter_.OnSinkWants( + BuildSinkWants(std::nullopt, 960 * 720, std::numeric_limits::max())); EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -759,7 +757,7 @@ TEST_P(VideoAdapterTest, TestOnResolutionRequestReset) { EXPECT_EQ(1280, out_width_); EXPECT_EQ(720, out_height_); - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, 640 * 360 - 1, + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, 640 * 360 - 1, std::numeric_limits::max())); EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0, &cropped_width_, &cropped_height_, &out_width_, @@ -769,7 +767,7 @@ TEST_P(VideoAdapterTest, TestOnResolutionRequestReset) { EXPECT_EQ(480, out_width_); EXPECT_EQ(270, out_height_); - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, std::numeric_limits::max(), std::numeric_limits::max())); EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0, &cropped_width_, @@ -790,7 +788,7 @@ TEST_P(VideoAdapterTest, TestOnOutputFormatRequestResolutionReset) { EXPECT_EQ(1280, out_width_); EXPECT_EQ(720, out_height_); - adapter_.OnOutputFormatRequest(absl::nullopt, 640 * 360 - 1, absl::nullopt); + adapter_.OnOutputFormatRequest(std::nullopt, 640 * 360 - 1, std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -799,7 +797,7 @@ TEST_P(VideoAdapterTest, TestOnOutputFormatRequestResolutionReset) { EXPECT_EQ(480, out_width_); EXPECT_EQ(270, out_height_); - adapter_.OnOutputFormatRequest(absl::nullopt, absl::nullopt, absl::nullopt); + adapter_.OnOutputFormatRequest(std::nullopt, std::nullopt, std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -819,7 +817,7 @@ TEST_P(VideoAdapterTest, TestOnOutputFormatRequestFpsReset) { EXPECT_GT(dropped_frames, 0); // Reset frame rate. - OnOutputFormatRequest(kWidth, kHeight, absl::nullopt); + OnOutputFormatRequest(kWidth, kHeight, std::nullopt); for (int i = 0; i < 20; ++i) adapter_wrapper_->AdaptFrame(frame_source_->GetFrame()); @@ -830,8 +828,8 @@ TEST_P(VideoAdapterTest, TestOnOutputFormatRequestFpsReset) { TEST_P(VideoAdapterTest, RequestAspectRatio) { // Request aspect ratio 320/180 (16:9), smaller than input, but no resolution // limit. Expect cropping but no scaling. - adapter_.OnOutputFormatRequest(std::make_pair(320, 180), absl::nullopt, - absl::nullopt); + adapter_.OnOutputFormatRequest(std::make_pair(320, 180), std::nullopt, + std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -841,7 +839,7 @@ TEST_P(VideoAdapterTest, RequestAspectRatio) { EXPECT_EQ(360, out_height_); adapter_.OnOutputFormatRequest(std::make_pair(1280, 720), 1280 * 720 - 1, - absl::nullopt); + std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(2592, 1944, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -854,7 +852,7 @@ TEST_P(VideoAdapterTest, RequestAspectRatio) { TEST_P(VideoAdapterTest, RequestAspectRatioWithDifferentOrientation) { // Request 720x1280, higher than input, but aspect 16:9. Orientation should // not matter, expect cropping but no scaling. - OnOutputFormatRequest(720, 1280, absl::nullopt); + OnOutputFormatRequest(720, 1280, std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -866,8 +864,8 @@ TEST_P(VideoAdapterTest, RequestAspectRatioWithDifferentOrientation) { TEST_P(VideoAdapterTest, InvalidAspectRatioIgnored) { // Request aspect ratio 320/0. Expect no cropping. - adapter_.OnOutputFormatRequest(std::make_pair(320, 0), absl::nullopt, - absl::nullopt); + adapter_.OnOutputFormatRequest(std::make_pair(320, 0), std::nullopt, + std::nullopt); EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); @@ -879,7 +877,7 @@ TEST_P(VideoAdapterTest, InvalidAspectRatioIgnored) { TEST_P(VideoAdapterTest, TestCroppingWithResolutionRequest) { // Ask for 640x360 (16:9 aspect). - OnOutputFormatRequest(640, 360, absl::nullopt); + OnOutputFormatRequest(640, 360, std::nullopt); // Send 640x480 (4:3 aspect). EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0, &cropped_width_, &cropped_height_, &out_width_, @@ -891,7 +889,7 @@ TEST_P(VideoAdapterTest, TestCroppingWithResolutionRequest) { EXPECT_EQ(360, out_height_); // Adapt down one step. - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, 640 * 360 - 1, + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, 640 * 360 - 1, std::numeric_limits::max())); // Expect cropping to 16:9 format and 3/4 scaling. EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0, &cropped_width_, @@ -903,7 +901,7 @@ TEST_P(VideoAdapterTest, TestCroppingWithResolutionRequest) { EXPECT_EQ(270, out_height_); // Adapt down one step more. - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, 480 * 270 - 1, + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, 480 * 270 - 1, std::numeric_limits::max())); // Expect cropping to 16:9 format and 1/2 scaling. EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0, &cropped_width_, @@ -953,9 +951,8 @@ TEST_P(VideoAdapterTest, TestCroppingWithResolutionRequest) { TEST_P(VideoAdapterTest, TestCroppingOddResolution) { // Ask for 640x360 (16:9 aspect), with 3/16 scaling. - OnOutputFormatRequest(640, 360, absl::nullopt); - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, - 640 * 360 * 3 / 16 * 3 / 16, + OnOutputFormatRequest(640, 360, std::nullopt); + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, 640 * 360 * 3 / 16 * 3 / 16, std::numeric_limits::max())); // Send 640x480 (4:3 aspect). @@ -975,8 +972,8 @@ TEST_P(VideoAdapterTest, TestAdaptToVerySmallResolution) { // Ask for 1920x1080 (16:9 aspect), with 1/16 scaling. const int w = 1920; const int h = 1080; - OnOutputFormatRequest(w, h, absl::nullopt); - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, w * h * 1 / 16 * 1 / 16, + OnOutputFormatRequest(w, h, std::nullopt); + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, w * h * 1 / 16 * 1 / 16, std::numeric_limits::max())); // Send 1920x1080 (16:9 aspect). @@ -1018,7 +1015,7 @@ TEST_P(VideoAdapterTest, AdaptFrameResolutionDropWithResolutionRequest) { &cropped_width_, &cropped_height_, &out_width_, &out_height_)); - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, 640 * 480 - 1, + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, 640 * 480 - 1, std::numeric_limits::max())); // Still expect all frames to be dropped @@ -1061,7 +1058,7 @@ TEST(VideoAdapterTestMultipleOrientation, TestNormal) { EXPECT_TRUE(video_adapter.AdaptFrameResolution( /* in_width= */ 480, /* in_height= */ 640, - /* in_timestamp_ns= */ rtc::kNumNanosecsPerSec / 30, &cropped_width, + /* in_timestamp_ns= */ webrtc::kNumNanosecsPerSec / 30, &cropped_width, &cropped_height, &out_width, &out_height)); EXPECT_EQ(360, cropped_width); EXPECT_EQ(640, cropped_height); @@ -1089,7 +1086,7 @@ TEST(VideoAdapterTestMultipleOrientation, TestForcePortrait) { EXPECT_TRUE(video_adapter.AdaptFrameResolution( /* in_width= */ 480, /* in_height= */ 640, - /* in_timestamp_ns= */ rtc::kNumNanosecsPerSec / 30, &cropped_width, + /* in_timestamp_ns= */ webrtc::kNumNanosecsPerSec / 30, &cropped_width, &cropped_height, &out_width, &out_height)); EXPECT_EQ(360, cropped_width); EXPECT_EQ(640, cropped_height); @@ -1098,9 +1095,7 @@ TEST(VideoAdapterTestMultipleOrientation, TestForcePortrait) { } TEST_P(VideoAdapterTest, AdaptResolutionInStepsFirst3_4) { - const int kWidth = 1280; - const int kHeight = 720; - OnOutputFormatRequest(kWidth, kHeight, absl::nullopt); // 16:9 aspect. + OnOutputFormatRequest(kWidth, kHeight, std::nullopt); // 16:9 aspect. // Scale factors: 3/4, 2/3, 3/4, 2/3, ... // Scale : 3/4, 1/2, 3/8, 1/4, 3/16, 1/8. @@ -1112,7 +1107,7 @@ TEST_P(VideoAdapterTest, AdaptResolutionInStepsFirst3_4) { for (size_t i = 0; i < arraysize(kExpectedWidths); ++i) { // Adapt down one step. - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, request_width * request_height - 1, std::numeric_limits::max())); EXPECT_TRUE(adapter_.AdaptFrameResolution(kWidth, kHeight, 0, @@ -1126,24 +1121,25 @@ TEST_P(VideoAdapterTest, AdaptResolutionInStepsFirst3_4) { } TEST_P(VideoAdapterTest, AdaptResolutionInStepsFirst2_3) { - const int kWidth = 1920; - const int kHeight = 1080; - OnOutputFormatRequest(kWidth, kHeight, absl::nullopt); // 16:9 aspect. + const int kWidth1080p = 1920; + const int kHeight1080p = 1080; + OnOutputFormatRequest(kWidth1080p, kHeight1080p, + std::nullopt); // 16:9 aspect. // Scale factors: 2/3, 3/4, 2/3, 3/4, ... // Scale: 2/3, 1/2, 1/3, 1/4, 1/6, 1/8, 1/12. const int kExpectedWidths[] = {1280, 960, 640, 480, 320, 240, 160}; const int kExpectedHeights[] = {720, 540, 360, 270, 180, 135, 90}; - int request_width = kWidth; - int request_height = kHeight; + int request_width = kWidth1080p; + int request_height = kHeight1080p; for (size_t i = 0; i < arraysize(kExpectedWidths); ++i) { // Adapt down one step. - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, request_width * request_height - 1, std::numeric_limits::max())); - EXPECT_TRUE(adapter_.AdaptFrameResolution(kWidth, kHeight, 0, + EXPECT_TRUE(adapter_.AdaptFrameResolution(kWidth1080p, kHeight1080p, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_)); EXPECT_EQ(kExpectedWidths[i], out_width_); @@ -1154,26 +1150,27 @@ TEST_P(VideoAdapterTest, AdaptResolutionInStepsFirst2_3) { } TEST_P(VideoAdapterTest, AdaptResolutionInStepsFirst2x2_3) { - const int kWidth = 1440; - const int kHeight = 1080; - OnOutputFormatRequest(kWidth, kHeight, absl::nullopt); // 4:3 aspect. + const int kWidth1080p4to3 = 1440; + const int kHeight1080p4to3 = 1080; + OnOutputFormatRequest(kWidth1080p4to3, kHeight1080p4to3, + std::nullopt); // 4:3 aspect. // Scale factors: 2/3, 2/3, 3/4, 2/3, 3/4, ... // Scale : 2/3, 4/9, 1/3, 2/9, 1/6, 1/9, 1/12, 1/18, 1/24, 1/36. const int kExpectedWidths[] = {960, 640, 480, 320, 240, 160, 120, 80, 60, 40}; const int kExpectedHeights[] = {720, 480, 360, 240, 180, 120, 90, 60, 45, 30}; - int request_width = kWidth; - int request_height = kHeight; + int request_width = kWidth1080p4to3; + int request_height = kHeight1080p4to3; for (size_t i = 0; i < arraysize(kExpectedWidths); ++i) { // Adapt down one step. - adapter_.OnSinkWants(BuildSinkWants(absl::nullopt, + adapter_.OnSinkWants(BuildSinkWants(std::nullopt, request_width * request_height - 1, std::numeric_limits::max())); - EXPECT_TRUE(adapter_.AdaptFrameResolution(kWidth, kHeight, 0, - &cropped_width_, &cropped_height_, - &out_width_, &out_height_)); + EXPECT_TRUE(adapter_.AdaptFrameResolution( + kWidth1080p4to3, kHeight1080p4to3, 0, &cropped_width_, &cropped_height_, + &out_width_, &out_height_)); EXPECT_EQ(kExpectedWidths[i], out_width_); EXPECT_EQ(kExpectedHeights[i], out_height_); request_width = out_width_; @@ -1194,12 +1191,12 @@ TEST_P(VideoAdapterTest, AdaptResolutionWithSinkAlignment) { int frame_num = 1; for (const int sink_alignment : {2, 3, 4, 5}) { adapter_.OnSinkWants( - BuildSinkWants(absl::nullopt, std::numeric_limits::max(), + BuildSinkWants(std::nullopt, std::numeric_limits::max(), std::numeric_limits::max(), sink_alignment)); EXPECT_TRUE(adapter_.AdaptFrameResolution( kSourceWidth, kSourceHeight, - frame_num * rtc::kNumNanosecsPerSec / kSourceFramerate, &cropped_width_, - &cropped_height_, &out_width_, &out_height_)); + frame_num * webrtc::kNumNanosecsPerSec / kSourceFramerate, + &cropped_width_, &cropped_height_, &out_width_, &out_height_)); EXPECT_EQ(out_width_ % sink_alignment, 0); EXPECT_EQ(out_height_ % sink_alignment, 0); @@ -1208,8 +1205,9 @@ TEST_P(VideoAdapterTest, AdaptResolutionWithSinkAlignment) { } // Verify the cases the OnOutputFormatRequest is ignored and -// requested_resolution is used instead. -TEST_P(VideoAdapterTest, UseRequestedResolutionInsteadOfOnOutputFormatRequest) { +// scale_resolution_down_to is used instead. +TEST_P(VideoAdapterTest, + UseScaleResolutionDownToInsteadOfOnOutputFormatRequest) { { // Both new and old API active => Use OnOutputFormatRequest OnOutputFormatRequest(640, 360, kDefaultFps); @@ -1224,7 +1222,7 @@ TEST_P(VideoAdapterTest, UseRequestedResolutionInsteadOfOnOutputFormatRequest) { } { // New API active, old API inactive, ignore OnOutputFormatRequest and use - // requested_resolution. + // scale_resolution_down_to. OnOutputFormatRequest(640, 360, kDefaultFps); adapter_.OnSinkWants( BuildSinkWants(Resolution{.width = 960, .height = 540}, @@ -1240,7 +1238,7 @@ TEST_P(VideoAdapterTest, UseRequestedResolutionInsteadOfOnOutputFormatRequest) { // New API inactive, old API inactive, use OnOutputFormatRequest. OnOutputFormatRequest(640, 360, kDefaultFps); adapter_.OnSinkWants( - BuildSinkWants(absl::nullopt, + BuildSinkWants(std::nullopt, /* any_active_without_requested_resolution= */ false)); EXPECT_THAT( @@ -1262,7 +1260,7 @@ TEST_P(VideoAdapterTest, UseRequestedResolutionInsteadOfOnOutputFormatRequest) { Eq(Resolution{.width = 960, .height = 540})); // This is ignored since there is not any active NOT using - // requested_resolution. + // scale_resolution_down_to. OnOutputFormatRequest(320, 180, kDefaultFps); EXPECT_THAT( @@ -1272,7 +1270,7 @@ TEST_P(VideoAdapterTest, UseRequestedResolutionInsteadOfOnOutputFormatRequest) { // Disable new API => fallback to last OnOutputFormatRequest. adapter_.OnSinkWants( - BuildSinkWants(absl::nullopt, + BuildSinkWants(std::nullopt, /* any_active_without_requested_resolution= */ false)); EXPECT_THAT( @@ -1282,6 +1280,48 @@ TEST_P(VideoAdapterTest, UseRequestedResolutionInsteadOfOnOutputFormatRequest) { } } +TEST_P(VideoAdapterTest, ScaleResolutionDownToIsOrientationAgnostic) { + // Request 1280x720 when frame is 720x1280. + { + adapter_.OnSinkWants( + BuildSinkWants(Resolution{.width = 1280, .height = 720}, + /* any_active_without_requested_resolution= */ false)); + + EXPECT_THAT( + AdaptFrameResolution(/* input frame */ {.width = 720, .height = 1280}) + .first, + Eq(Resolution{.width = 720, .height = 1280})); + } + // Request 720x1280 when frame is 1280x720. + { + adapter_.OnSinkWants( + BuildSinkWants(Resolution{.width = 720, .height = 1280}, + /* any_active_without_requested_resolution= */ false)); + + EXPECT_THAT( + AdaptFrameResolution(/* input frame */ {.width = 1280, .height = 720}) + .first, + Eq(Resolution{.width = 1280, .height = 720})); + } +} + +TEST_P(VideoAdapterTest, ScaleResolutionDownToMaintainsAspectRatio) { + // Request 720x720. + adapter_.OnSinkWants( + BuildSinkWants(Resolution{.width = 720, .height = 720}, + /* any_active_without_requested_resolution= */ false)); + + // A 1280x720 frame restricted to 720x720 produces 720x405. + EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, /*in_timestamp_ns=*/0, + &cropped_width_, &cropped_height_, + &out_width_, &out_height_)); + EXPECT_EQ(out_width_, 720); + EXPECT_EQ(out_height_, 405); + // No cropping needed (coverage for https://crbug.com/webrtc/369865055). + EXPECT_EQ(cropped_width_, 1280); + EXPECT_EQ(cropped_height_, 720); +} + class VideoAdapterWithSourceAlignmentTest : public VideoAdapterTest { protected: static constexpr int kSourceResolutionAlignment = 7; @@ -1318,7 +1358,7 @@ TEST_P(VideoAdapterWithSourceAlignmentTest, AdaptResolutionWithSinkAlignment) { OnOutputFormatRequest(kRequestedWidth, kRequestedHeight, kRequestedFramerate); adapter_.OnSinkWants(BuildSinkWants( - absl::nullopt, std::numeric_limits::max(), + std::nullopt, std::numeric_limits::max(), std::numeric_limits::max(), kSinkResolutionAlignment)); EXPECT_TRUE(adapter_.AdaptFrameResolution( kSourceWidth, kSourceHeight, /*in_timestamp_ns=*/0, &cropped_width_, @@ -1329,8 +1369,27 @@ TEST_P(VideoAdapterWithSourceAlignmentTest, AdaptResolutionWithSinkAlignment) { EXPECT_EQ(out_height_ % kSinkResolutionAlignment, 0); } +TEST_P(VideoAdapterWithSourceAlignmentTest, + ScaleResolutionDownToMaintainsAspectRatioWithAlignment) { + // Request 720x720. + adapter_.OnSinkWants( + BuildSinkWants(Resolution{.width = 720, .height = 720}, + /* any_active_without_requested_resolution= */ false)); + + // A 1280x720 frame restricted to 720x720 produces 720x405 but this is not a + // multiple of `kSourceResolutionAlignment` (= 7), the rounded up multiple of + // this value that is less than the restrictions (720) is 714x406. + EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, /*in_timestamp_ns=*/0, + &cropped_width_, &cropped_height_, + &out_width_, &out_height_)); + EXPECT_EQ(out_width_, 714); + EXPECT_EQ(out_height_, 406); + EXPECT_EQ(out_width_ % kSourceResolutionAlignment, 0); + EXPECT_EQ(out_height_ % kSourceResolutionAlignment, 0); +} + INSTANTIATE_TEST_SUITE_P(OnOutputFormatRequests, VideoAdapterWithSourceAlignmentTest, ::testing::Values(true, false)); -} // namespace cricket +} // namespace webrtc diff --git a/media/base/video_broadcaster.cc b/media/base/video_broadcaster.cc index 43c17734e3..7f5cf85958 100644 --- a/media/base/video_broadcaster.cc +++ b/media/base/video_broadcaster.cc @@ -11,25 +11,32 @@ #include "media/base/video_broadcaster.h" #include +#include +#include #include -#include "absl/types/optional.h" +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" #include "api/video/video_rotation.h" -#include "media/base/video_common.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "api/video_track_source_constraints.h" +#include "media/base/video_source_base.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" -namespace rtc { +namespace webrtc { VideoBroadcaster::VideoBroadcaster() = default; VideoBroadcaster::~VideoBroadcaster() = default; -void VideoBroadcaster::AddOrUpdateSink( - VideoSinkInterface* sink, - const VideoSinkWants& wants) { +void VideoBroadcaster::AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) { RTC_DCHECK(sink != nullptr); - webrtc::MutexLock lock(&sinks_and_wants_lock_); + MutexLock lock(&sinks_and_wants_lock_); if (!FindSinkPair(sink)) { // `Sink` is a new sink, which didn't receive previous frame. previous_frame_sent_to_all_sinks_ = false; @@ -45,26 +52,25 @@ void VideoBroadcaster::AddOrUpdateSink( UpdateWants(); } -void VideoBroadcaster::RemoveSink( - VideoSinkInterface* sink) { +void VideoBroadcaster::RemoveSink(VideoSinkInterface* sink) { RTC_DCHECK(sink != nullptr); - webrtc::MutexLock lock(&sinks_and_wants_lock_); + MutexLock lock(&sinks_and_wants_lock_); VideoSourceBase::RemoveSink(sink); UpdateWants(); } bool VideoBroadcaster::frame_wanted() const { - webrtc::MutexLock lock(&sinks_and_wants_lock_); + MutexLock lock(&sinks_and_wants_lock_); return !sink_pairs().empty(); } VideoSinkWants VideoBroadcaster::wants() const { - webrtc::MutexLock lock(&sinks_and_wants_lock_); + MutexLock lock(&sinks_and_wants_lock_); return current_wants_; } -void VideoBroadcaster::OnFrame(const webrtc::VideoFrame& frame) { - webrtc::MutexLock lock(&sinks_and_wants_lock_); +void VideoBroadcaster::OnFrame(const VideoFrame& frame) { + MutexLock lock(&sinks_and_wants_lock_); bool current_frame_was_discarded = false; for (auto& sink_pair : sink_pairs()) { if (sink_pair.wants.rotation_applied && @@ -79,19 +85,18 @@ void VideoBroadcaster::OnFrame(const webrtc::VideoFrame& frame) { continue; } if (sink_pair.wants.black_frames) { - webrtc::VideoFrame black_frame = - webrtc::VideoFrame::Builder() - .set_video_frame_buffer( - GetBlackFrameBuffer(frame.width(), frame.height())) - .set_rotation(frame.rotation()) - .set_timestamp_us(frame.timestamp_us()) - .set_id(frame.id()) - .build(); + VideoFrame black_frame = VideoFrame::Builder() + .set_video_frame_buffer(GetBlackFrameBuffer( + frame.width(), frame.height())) + .set_rotation(frame.rotation()) + .set_timestamp_us(frame.timestamp_us()) + .set_id(frame.id()) + .build(); sink_pair.sink->OnFrame(black_frame); } else if (!previous_frame_sent_to_all_sinks_ && frame.has_update_rect()) { // Since last frame was not sent to some sinks, no reliable update // information is available, so we need to clear the update rect. - webrtc::VideoFrame copy = frame; + VideoFrame copy = frame; copy.clear_update_rect(); sink_pair.sink->OnFrame(copy); } else { @@ -102,15 +107,15 @@ void VideoBroadcaster::OnFrame(const webrtc::VideoFrame& frame) { } void VideoBroadcaster::OnDiscardedFrame() { - webrtc::MutexLock lock(&sinks_and_wants_lock_); + MutexLock lock(&sinks_and_wants_lock_); for (auto& sink_pair : sink_pairs()) { sink_pair.sink->OnDiscardedFrame(); } } void VideoBroadcaster::ProcessConstraints( - const webrtc::VideoTrackSourceConstraints& constraints) { - webrtc::MutexLock lock(&sinks_and_wants_lock_); + const VideoTrackSourceConstraints& constraints) { + MutexLock lock(&sinks_and_wants_lock_); RTC_LOG(LS_INFO) << __func__ << " min_fps " << constraints.min_fps.value_or(-1) << " max_fps " << constraints.max_fps.value_or(-1) << " broadcasting to " @@ -131,7 +136,7 @@ void VideoBroadcaster::UpdateWants() { // "ignore" encoders that are not active. But that would // probably require a controlled roll out with a field trials? // To play it safe, only ignore inactive encoders is there is an - // active encoder using the new api (requested_resolution), + // active encoder using the new api (scale_resolution_down_to), // this means that there is only a behavioural change when using new // api. bool ignore_inactive_encoders_old_api = false; @@ -168,11 +173,11 @@ void VideoBroadcaster::UpdateWants() { if (sink.wants.max_framerate_fps < wants.max_framerate_fps) { wants.max_framerate_fps = sink.wants.max_framerate_fps; } - wants.resolution_alignment = cricket::LeastCommonMultiple( - wants.resolution_alignment, sink.wants.resolution_alignment); + wants.resolution_alignment = + std::lcm(wants.resolution_alignment, sink.wants.resolution_alignment); - // Pick MAX(requested_resolution) since the actual can be downscaled - // in encoder instead. + // Pick MAX(requested_resolution) since the actual can be downscaled in + // encoder instead. if (sink.wants.requested_resolution) { if (!wants.requested_resolution) { wants.requested_resolution = sink.wants.requested_resolution; @@ -198,17 +203,17 @@ void VideoBroadcaster::UpdateWants() { current_wants_ = wants; } -const rtc::scoped_refptr& -VideoBroadcaster::GetBlackFrameBuffer(int width, int height) { +const scoped_refptr& VideoBroadcaster::GetBlackFrameBuffer( + int width, + int height) { if (!black_frame_buffer_ || black_frame_buffer_->width() != width || black_frame_buffer_->height() != height) { - rtc::scoped_refptr buffer = - webrtc::I420Buffer::Create(width, height); - webrtc::I420Buffer::SetBlack(buffer.get()); + scoped_refptr buffer = I420Buffer::Create(width, height); + I420Buffer::SetBlack(buffer.get()); black_frame_buffer_ = buffer; } return black_frame_buffer_; } -} // namespace rtc +} // namespace webrtc diff --git a/media/base/video_broadcaster.h b/media/base/video_broadcaster.h index c253d44b09..9d6f7a7ecc 100644 --- a/media/base/video_broadcaster.h +++ b/media/base/video_broadcaster.h @@ -11,24 +11,27 @@ #ifndef MEDIA_BASE_VIDEO_BROADCASTER_H_ #define MEDIA_BASE_VIDEO_BROADCASTER_H_ -#include "api/media_stream_interface.h" +#include + #include "api/scoped_refptr.h" -#include "api/sequence_checker.h" +#include "api/video/video_frame.h" #include "api/video/video_frame_buffer.h" +#include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" +#include "api/video_track_source_constraints.h" #include "media/base/video_source_base.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" -namespace rtc { +namespace webrtc { // VideoBroadcaster broadcast video frames to sinks and combines VideoSinkWants -// from its sinks. It does that by implementing rtc::VideoSourceInterface and -// rtc::VideoSinkInterface. The class is threadsafe; methods may be called on +// from its sinks. It does that by implementing webrtc::VideoSourceInterface and +// webrtc::VideoSinkInterface. The class is threadsafe; methods may be called on // any thread. This is needed because VideoStreamEncoder calls AddOrUpdateSink // both on the worker thread and on the encoder task queue. class VideoBroadcaster : public VideoSourceBase, - public VideoSinkInterface { + public VideoSinkInterface { public: VideoBroadcaster(); ~VideoBroadcaster() override; @@ -37,9 +40,9 @@ class VideoBroadcaster : public VideoSourceBase, // ProcessConstraints has been called previously, the new sink's // OnConstraintsCalled method will be invoked with the most recent // constraints. - void AddOrUpdateSink(VideoSinkInterface* sink, + void AddOrUpdateSink(VideoSinkInterface* sink, const VideoSinkWants& wants) override; - void RemoveSink(VideoSinkInterface* sink) override; + void RemoveSink(VideoSinkInterface* sink) override; // Returns true if the next frame will be delivered to at least one sink. bool frame_wanted() const; @@ -52,31 +55,38 @@ class VideoBroadcaster : public VideoSourceBase, // it will never receive a frame with pending rotation. Our caller // may pass in frames without precise synchronization with changes // to the VideoSinkWants. - void OnFrame(const webrtc::VideoFrame& frame) override; + void OnFrame(const VideoFrame& frame) override; void OnDiscardedFrame() override; // Called on the network thread when constraints change. Forwards the // constraints to sinks added with AddOrUpdateSink via OnConstraintsChanged. - void ProcessConstraints( - const webrtc::VideoTrackSourceConstraints& constraints); + void ProcessConstraints(const VideoTrackSourceConstraints& constraints); protected: void UpdateWants() RTC_EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_); - const rtc::scoped_refptr& GetBlackFrameBuffer( - int width, - int height) RTC_EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_); + const scoped_refptr& GetBlackFrameBuffer(int width, + int height) + RTC_EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_); - mutable webrtc::Mutex sinks_and_wants_lock_; + mutable Mutex sinks_and_wants_lock_; VideoSinkWants current_wants_ RTC_GUARDED_BY(sinks_and_wants_lock_); - rtc::scoped_refptr black_frame_buffer_; + scoped_refptr black_frame_buffer_; bool previous_frame_sent_to_all_sinks_ RTC_GUARDED_BY(sinks_and_wants_lock_) = true; - absl::optional last_constraints_ + std::optional last_constraints_ RTC_GUARDED_BY(sinks_and_wants_lock_); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::VideoBroadcaster; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_VIDEO_BROADCASTER_H_ diff --git a/media/base/video_broadcaster_unittest.cc b/media/base/video_broadcaster_unittest.cc index bb80c11930..7d0dfda200 100644 --- a/media/base/video_broadcaster_unittest.cc +++ b/media/base/video_broadcaster_unittest.cc @@ -11,20 +11,23 @@ #include "media/base/video_broadcaster.h" #include +#include -#include "absl/types/optional.h" +#include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" #include "api/video/video_frame.h" #include "api/video/video_rotation.h" +#include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" +#include "api/video_track_source_constraints.h" #include "media/base/fake_video_renderer.h" #include "test/gmock.h" #include "test/gtest.h" -using cricket::FakeVideoRenderer; -using rtc::VideoBroadcaster; -using rtc::VideoSinkWants; -using FrameSize = rtc::VideoSinkWants::FrameSize; +using ::webrtc::FakeVideoRenderer; +using ::webrtc::VideoBroadcaster; +using ::webrtc::VideoSinkWants; +using FrameSize = webrtc::VideoSinkWants::FrameSize; using ::testing::AllOf; using ::testing::Eq; @@ -32,7 +35,7 @@ using ::testing::Field; using ::testing::Mock; using ::testing::Optional; -class MockSink : public rtc::VideoSinkInterface { +class MockSink : public webrtc::VideoSinkInterface { public: void OnFrame(const webrtc::VideoFrame&) override {} @@ -47,7 +50,7 @@ TEST(VideoBroadcasterTest, frame_wanted) { EXPECT_FALSE(broadcaster.frame_wanted()); FakeVideoRenderer sink; - broadcaster.AddOrUpdateSink(&sink, rtc::VideoSinkWants()); + broadcaster.AddOrUpdateSink(&sink, webrtc::VideoSinkWants()); EXPECT_TRUE(broadcaster.frame_wanted()); broadcaster.RemoveSink(&sink); @@ -59,12 +62,12 @@ TEST(VideoBroadcasterTest, OnFrame) { FakeVideoRenderer sink1; FakeVideoRenderer sink2; - broadcaster.AddOrUpdateSink(&sink1, rtc::VideoSinkWants()); - broadcaster.AddOrUpdateSink(&sink2, rtc::VideoSinkWants()); + broadcaster.AddOrUpdateSink(&sink1, webrtc::VideoSinkWants()); + broadcaster.AddOrUpdateSink(&sink2, webrtc::VideoSinkWants()); static int kWidth = 100; static int kHeight = 50; - rtc::scoped_refptr buffer( + webrtc::scoped_refptr buffer( webrtc::I420Buffer::Create(kWidth, kHeight)); // Initialize, to avoid warnings on use of initialized values. webrtc::I420Buffer::SetBlack(buffer.get()); @@ -84,7 +87,7 @@ TEST(VideoBroadcasterTest, OnFrame) { EXPECT_EQ(1, sink1.num_rendered_frames()); EXPECT_EQ(2, sink2.num_rendered_frames()); - broadcaster.AddOrUpdateSink(&sink1, rtc::VideoSinkWants()); + broadcaster.AddOrUpdateSink(&sink1, webrtc::VideoSinkWants()); broadcaster.OnFrame(frame); EXPECT_EQ(2, sink1.num_rendered_frames()); EXPECT_EQ(3, sink2.num_rendered_frames()); @@ -218,7 +221,7 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) { wants2.black_frames = false; broadcaster.AddOrUpdateSink(&sink2, wants2); - rtc::scoped_refptr buffer( + webrtc::scoped_refptr buffer( webrtc::I420Buffer::Create(100, 200)); // Makes it not all black. buffer->InitializeData(); @@ -297,21 +300,21 @@ TEST(VideoBroadcasterTest, ForwardsConstraintsToSink) { EXPECT_CALL(sink, OnConstraintsChanged(AllOf( Field(&webrtc::VideoTrackSourceConstraints::min_fps, - Eq(absl::nullopt)), + Eq(std::nullopt)), Field(&webrtc::VideoTrackSourceConstraints::max_fps, - Eq(absl::nullopt))))); + Eq(std::nullopt))))); broadcaster.ProcessConstraints( - webrtc::VideoTrackSourceConstraints{absl::nullopt, absl::nullopt}); + webrtc::VideoTrackSourceConstraints{std::nullopt, std::nullopt}); Mock::VerifyAndClearExpectations(&sink); EXPECT_CALL( sink, OnConstraintsChanged(AllOf( Field(&webrtc::VideoTrackSourceConstraints::min_fps, - Eq(absl::nullopt)), + Eq(std::nullopt)), Field(&webrtc::VideoTrackSourceConstraints::max_fps, Optional(3))))); broadcaster.ProcessConstraints( - webrtc::VideoTrackSourceConstraints{absl::nullopt, 3}); + webrtc::VideoTrackSourceConstraints{std::nullopt, 3}); Mock::VerifyAndClearExpectations(&sink); EXPECT_CALL( @@ -319,9 +322,9 @@ TEST(VideoBroadcasterTest, ForwardsConstraintsToSink) { OnConstraintsChanged(AllOf( Field(&webrtc::VideoTrackSourceConstraints::min_fps, Optional(2)), Field(&webrtc::VideoTrackSourceConstraints::max_fps, - Eq(absl::nullopt))))); + Eq(std::nullopt))))); broadcaster.ProcessConstraints( - webrtc::VideoTrackSourceConstraints{2, absl::nullopt}); + webrtc::VideoTrackSourceConstraints{2, std::nullopt}); Mock::VerifyAndClearExpectations(&sink); EXPECT_CALL( @@ -332,7 +335,7 @@ TEST(VideoBroadcasterTest, ForwardsConstraintsToSink) { broadcaster.ProcessConstraints(webrtc::VideoTrackSourceConstraints{2, 3}); } -TEST(VideoBroadcasterTest, AppliesMaxOfSinkWantsRequestedResolution) { +TEST(VideoBroadcasterTest, AppliesMaxOfSinkWantsScaleResolutionDownTo) { VideoBroadcaster broadcaster; FakeVideoRenderer sink1; @@ -374,7 +377,7 @@ TEST(VideoBroadcasterTest, AnyActive) { EXPECT_EQ(false, broadcaster.wants().is_active); } -TEST(VideoBroadcasterTest, AnyActiveWithoutRequestedResolution) { +TEST(VideoBroadcasterTest, AnyActiveWithoutScaleResolutionDownTo) { VideoBroadcaster broadcaster; FakeVideoRenderer sink1; @@ -402,8 +405,9 @@ TEST(VideoBroadcasterTest, AnyActiveWithoutRequestedResolution) { } // This verifies that the VideoSinkWants from a Sink that is_active = false -// is ignored IF there is an active sink using new api (Requested_Resolution). -// The uses resolution_alignment for verification. +// is ignored IF there is an active sink using requested_resolution (controlled +// via new API scale_resolution_down_to). The uses resolution_alignment for +// verification. TEST(VideoBroadcasterTest, IgnoreInactiveSinkIfNewApiUsed) { VideoBroadcaster broadcaster; diff --git a/media/base/video_common.cc b/media/base/video_common.cc index 0ac3b3790e..bc18d85b70 100644 --- a/media/base/video_common.cc +++ b/media/base/video_common.cc @@ -10,12 +10,16 @@ #include "media/base/video_common.h" +#include +#include +#include + #include "api/array_view.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" -namespace cricket { +namespace webrtc { struct FourCCAliasEntry { uint32_t alias; @@ -70,7 +74,7 @@ std::string VideoFormat::ToString() const { } char buf[256]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); sb << fourcc_name << width << "x" << height << "x" << IntervalToFpsFloat(interval); return sb.str(); @@ -79,19 +83,13 @@ std::string VideoFormat::ToString() const { int GreatestCommonDivisor(int a, int b) { RTC_DCHECK_GE(a, 0); RTC_DCHECK_GT(b, 0); - int c = a % b; - while (c != 0) { - a = b; - b = c; - c = a % b; - } - return b; + return std::gcd(a, b); } int LeastCommonMultiple(int a, int b) { RTC_DCHECK_GT(a, 0); RTC_DCHECK_GT(b, 0); - return a * (b / GreatestCommonDivisor(a, b)); + return std::lcm(a, b); } -} // namespace cricket +} // namespace webrtc diff --git a/media/base/video_common.h b/media/base/video_common.h index f27e008d26..38219a8d60 100644 --- a/media/base/video_common.h +++ b/media/base/video_common.h @@ -17,10 +17,11 @@ #include +#include "absl/base/macros.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/time_utils.h" -namespace cricket { +namespace webrtc { ////////////////////////////////////////////////////////////////////////////// // Definition of FourCC codes @@ -141,7 +142,7 @@ struct VideoFormatPod { struct RTC_EXPORT VideoFormat : VideoFormatPod { static const int64_t kMinimumInterval = - rtc::kNumNanosecsPerSec / 10000; // 10k fps. + webrtc::kNumNanosecsPerSec / 10000; // 10k fps. VideoFormat() { Construct(0, 0, 0, 0); } @@ -161,21 +162,21 @@ struct RTC_EXPORT VideoFormat : VideoFormatPod { } static int64_t FpsToInterval(int fps) { - return fps ? rtc::kNumNanosecsPerSec / fps : kMinimumInterval; + return fps ? webrtc::kNumNanosecsPerSec / fps : kMinimumInterval; } static int IntervalToFps(int64_t interval) { if (!interval) { return 0; } - return static_cast(rtc::kNumNanosecsPerSec / interval); + return static_cast(webrtc::kNumNanosecsPerSec / interval); } static float IntervalToFpsFloat(int64_t interval) { if (!interval) { return 0.f; } - return static_cast(rtc::kNumNanosecsPerSec) / + return static_cast(webrtc::kNumNanosecsPerSec) / static_cast(interval); } @@ -214,11 +215,71 @@ struct RTC_EXPORT VideoFormat : VideoFormatPod { }; // Returns the largest positive integer that divides both `a` and `b`. -int GreatestCommonDivisor(int a, int b); +ABSL_DEPRECATE_AND_INLINE() int GreatestCommonDivisor(int a, int b); // Returns the smallest positive integer that is divisible by both `a` and `b`. -int LeastCommonMultiple(int a, int b); +ABSL_DEPRECATE_AND_INLINE() int LeastCommonMultiple(int a, int b); + +} // namespace webrtc +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::CanonicalFourCC; +using ::webrtc::FourCC; +using ::webrtc::FOURCC_24BG; +using ::webrtc::FOURCC_2VUY; +using ::webrtc::FOURCC_ABGR; +using ::webrtc::FOURCC_ANY; +using ::webrtc::FOURCC_ARGB; +using ::webrtc::FOURCC_BA81; +using ::webrtc::FOURCC_BGGR; +using ::webrtc::FOURCC_BGR3; +using ::webrtc::FOURCC_BGRA; +using ::webrtc::FOURCC_CM24; +using ::webrtc::FOURCC_CM32; +using ::webrtc::FOURCC_DMB1; +using ::webrtc::FOURCC_GBRG; +using ::webrtc::FOURCC_GRBG; +using ::webrtc::FOURCC_H264; +using ::webrtc::FOURCC_HDYC; +using ::webrtc::FOURCC_I400; +using ::webrtc::FOURCC_I411; +using ::webrtc::FOURCC_I420; +using ::webrtc::FOURCC_I422; +using ::webrtc::FOURCC_I444; +using ::webrtc::FOURCC_IYUV; +using ::webrtc::FOURCC_J400; +using ::webrtc::FOURCC_J420; +using ::webrtc::FOURCC_JPEG; +using ::webrtc::FOURCC_M420; +using ::webrtc::FOURCC_MJPG; +using ::webrtc::FOURCC_NV12; +using ::webrtc::FOURCC_NV21; +using ::webrtc::FOURCC_R444; +using ::webrtc::FOURCC_RAW; +using ::webrtc::FOURCC_RGB3; +using ::webrtc::FOURCC_RGBA; +using ::webrtc::FOURCC_RGBO; +using ::webrtc::FOURCC_RGBP; +using ::webrtc::FOURCC_RGGB; +using ::webrtc::FOURCC_UYVY; +using ::webrtc::FOURCC_YU12; +using ::webrtc::FOURCC_YU16; +using ::webrtc::FOURCC_YU24; +using ::webrtc::FOURCC_YUVS; +using ::webrtc::FOURCC_YUY2; +using ::webrtc::FOURCC_YUYV; +using ::webrtc::FOURCC_YV12; +using ::webrtc::FOURCC_YV16; +using ::webrtc::FOURCC_YV24; +using ::webrtc::GetFourccName; +using ::webrtc::GreatestCommonDivisor; +using ::webrtc::LeastCommonMultiple; +using ::webrtc::VideoFormat; +using ::webrtc::VideoFormatPod; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_VIDEO_COMMON_H_ diff --git a/media/base/video_common_unittest.cc b/media/base/video_common_unittest.cc index 3f445c7769..a813aa3fa7 100644 --- a/media/base/video_common_unittest.cc +++ b/media/base/video_common_unittest.cc @@ -10,9 +10,10 @@ #include "media/base/video_common.h" +#include "rtc_base/time_utils.h" #include "test/gtest.h" -namespace cricket { +namespace webrtc { TEST(VideoCommonTest, TestCanonicalFourCC) { // Canonical fourccs are not changed. @@ -39,8 +40,8 @@ TEST(VideoCommonTest, TestCanonicalFourCC) { // Test conversion between interval and fps TEST(VideoCommonTest, TestVideoFormatFps) { EXPECT_EQ(VideoFormat::kMinimumInterval, VideoFormat::FpsToInterval(0)); - EXPECT_EQ(rtc::kNumNanosecsPerSec / 20, VideoFormat::FpsToInterval(20)); - EXPECT_EQ(20, VideoFormat::IntervalToFps(rtc::kNumNanosecsPerSec / 20)); + EXPECT_EQ(webrtc::kNumNanosecsPerSec / 20, VideoFormat::FpsToInterval(20)); + EXPECT_EQ(20, VideoFormat::IntervalToFps(webrtc::kNumNanosecsPerSec / 20)); EXPECT_EQ(0, VideoFormat::IntervalToFps(0)); } @@ -92,17 +93,4 @@ TEST(VideoCommonTest, TestVideoFormatCompare) { EXPECT_TRUE(format.IsPixelRateLess(format2)); } -TEST(VideoCommonTest, GreatestCommonDivisor) { - EXPECT_EQ(GreatestCommonDivisor(0, 1000), 1000); - EXPECT_EQ(GreatestCommonDivisor(1, 1), 1); - EXPECT_EQ(GreatestCommonDivisor(8, 12), 4); - EXPECT_EQ(GreatestCommonDivisor(24, 54), 6); -} - -TEST(VideoCommonTest, LeastCommonMultiple) { - EXPECT_EQ(LeastCommonMultiple(1, 1), 1); - EXPECT_EQ(LeastCommonMultiple(2, 3), 6); - EXPECT_EQ(LeastCommonMultiple(16, 32), 32); -} - -} // namespace cricket +} // namespace webrtc diff --git a/media/base/video_source_base.cc b/media/base/video_source_base.cc index 2454902069..24f9ca4f31 100644 --- a/media/base/video_source_base.cc +++ b/media/base/video_source_base.cc @@ -11,18 +11,22 @@ #include "media/base/video_source_base.h" #include +#include #include "absl/algorithm/container.h" +#include "api/sequence_checker.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" #include "rtc_base/checks.h" -namespace rtc { +namespace webrtc { VideoSourceBase::VideoSourceBase() = default; VideoSourceBase::~VideoSourceBase() = default; -void VideoSourceBase::AddOrUpdateSink( - VideoSinkInterface* sink, - const VideoSinkWants& wants) { +void VideoSourceBase::AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) { RTC_DCHECK(sink != nullptr); SinkPair* sink_pair = FindSinkPair(sink); @@ -33,7 +37,7 @@ void VideoSourceBase::AddOrUpdateSink( } } -void VideoSourceBase::RemoveSink(VideoSinkInterface* sink) { +void VideoSourceBase::RemoveSink(VideoSinkInterface* sink) { RTC_DCHECK(sink != nullptr); RTC_DCHECK(FindSinkPair(sink)); sinks_.erase(std::remove_if(sinks_.begin(), sinks_.end(), @@ -44,7 +48,7 @@ void VideoSourceBase::RemoveSink(VideoSinkInterface* sink) { } VideoSourceBase::SinkPair* VideoSourceBase::FindSinkPair( - const VideoSinkInterface* sink) { + const VideoSinkInterface* sink) { auto sink_pair_it = absl::c_find_if( sinks_, [sink](const SinkPair& sink_pair) { return sink_pair.sink == sink; }); @@ -58,7 +62,7 @@ VideoSourceBaseGuarded::VideoSourceBaseGuarded() = default; VideoSourceBaseGuarded::~VideoSourceBaseGuarded() = default; void VideoSourceBaseGuarded::AddOrUpdateSink( - VideoSinkInterface* sink, + VideoSinkInterface* sink, const VideoSinkWants& wants) { RTC_DCHECK_RUN_ON(&source_sequence_); RTC_DCHECK(sink != nullptr); @@ -71,8 +75,7 @@ void VideoSourceBaseGuarded::AddOrUpdateSink( } } -void VideoSourceBaseGuarded::RemoveSink( - VideoSinkInterface* sink) { +void VideoSourceBaseGuarded::RemoveSink(VideoSinkInterface* sink) { RTC_DCHECK_RUN_ON(&source_sequence_); RTC_DCHECK(sink != nullptr); RTC_DCHECK(FindSinkPair(sink)); @@ -84,7 +87,7 @@ void VideoSourceBaseGuarded::RemoveSink( } VideoSourceBaseGuarded::SinkPair* VideoSourceBaseGuarded::FindSinkPair( - const VideoSinkInterface* sink) { + const VideoSinkInterface* sink) { RTC_DCHECK_RUN_ON(&source_sequence_); auto sink_pair_it = absl::c_find_if( sinks_, @@ -101,4 +104,4 @@ VideoSourceBaseGuarded::sink_pairs() const { return sinks_; } -} // namespace rtc +} // namespace webrtc diff --git a/media/base/video_source_base.h b/media/base/video_source_base.h index 2644723aa7..068719cd75 100644 --- a/media/base/video_source_base.h +++ b/media/base/video_source_base.h @@ -18,29 +18,30 @@ #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" -namespace rtc { +namespace webrtc { // VideoSourceBase is not thread safe. Before using this class, consider using // VideoSourceBaseGuarded below instead, which is an identical implementation // but applies a sequence checker to help protect internal state. // TODO(bugs.webrtc.org/12780): Delete this class. -class VideoSourceBase : public VideoSourceInterface { +class VideoSourceBase : public VideoSourceInterface { public: VideoSourceBase(); ~VideoSourceBase() override; - void AddOrUpdateSink(VideoSinkInterface* sink, + void AddOrUpdateSink(VideoSinkInterface* sink, const VideoSinkWants& wants) override; - void RemoveSink(VideoSinkInterface* sink) override; + void RemoveSink(VideoSinkInterface* sink) override; protected: struct SinkPair { - SinkPair(VideoSinkInterface* sink, VideoSinkWants wants) + SinkPair(VideoSinkInterface* sink, VideoSinkWants wants) : sink(sink), wants(wants) {} - VideoSinkInterface* sink; + VideoSinkInterface* sink; VideoSinkWants wants; }; - SinkPair* FindSinkPair(const VideoSinkInterface* sink); + SinkPair* FindSinkPair(const VideoSinkInterface* sink); const std::vector& sink_pairs() const { return sinks_; } @@ -50,34 +51,43 @@ class VideoSourceBase : public VideoSourceInterface { // VideoSourceBaseGuarded assumes that operations related to sinks, occur on the // same TQ/thread that the object was constructed on. -class VideoSourceBaseGuarded : public VideoSourceInterface { +class VideoSourceBaseGuarded : public VideoSourceInterface { public: VideoSourceBaseGuarded(); ~VideoSourceBaseGuarded() override; - void AddOrUpdateSink(VideoSinkInterface* sink, + void AddOrUpdateSink(VideoSinkInterface* sink, const VideoSinkWants& wants) override; - void RemoveSink(VideoSinkInterface* sink) override; + void RemoveSink(VideoSinkInterface* sink) override; protected: struct SinkPair { - SinkPair(VideoSinkInterface* sink, VideoSinkWants wants) + SinkPair(VideoSinkInterface* sink, VideoSinkWants wants) : sink(sink), wants(wants) {} - VideoSinkInterface* sink; + VideoSinkInterface* sink; VideoSinkWants wants; }; - SinkPair* FindSinkPair(const VideoSinkInterface* sink); + SinkPair* FindSinkPair(const VideoSinkInterface* sink); const std::vector& sink_pairs() const; // Keep the `source_sequence_` checker protected to allow sub classes the // ability to call Detach() if/when appropriate. - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker source_sequence_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker source_sequence_; private: std::vector sinks_ RTC_GUARDED_BY(&source_sequence_); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::VideoSourceBase; +using ::webrtc::VideoSourceBaseGuarded; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_BASE_VIDEO_SOURCE_BASE_H_ diff --git a/media/engine/adm_helpers.cc b/media/engine/adm_helpers.cc index c349b7ce06..12dfbc9bbe 100644 --- a/media/engine/adm_helpers.cc +++ b/media/engine/adm_helpers.cc @@ -10,7 +10,7 @@ #include "media/engine/adm_helpers.h" -#include "modules/audio_device/include/audio_device.h" +#include "api/audio/audio_device.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" diff --git a/media/engine/fake_video_codec_factory.cc b/media/engine/fake_video_codec_factory.cc index 6f4f796b16..70811f9568 100644 --- a/media/engine/fake_video_codec_factory.cc +++ b/media/engine/fake_video_codec_factory.cc @@ -11,14 +11,14 @@ #include "media/engine/fake_video_codec_factory.h" #include +#include +#include "absl/container/inlined_vector.h" +#include "api/environment/environment.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_encoder.h" -#include "modules/video_coding/include/video_codec_interface.h" -#include "modules/video_coding/include/video_error_codes.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" #include "test/fake_decoder.h" #include "test/fake_encoder.h" @@ -30,22 +30,23 @@ static const char kFakeCodecFactoryCodecName[] = "FakeCodec"; namespace webrtc { -FakeVideoEncoderFactory::FakeVideoEncoderFactory() = default; - -// static -std::unique_ptr FakeVideoEncoderFactory::CreateVideoEncoder() { - return std::make_unique(Clock::GetRealTimeClock()); -} - std::vector FakeVideoEncoderFactory::GetSupportedFormats() const { + const absl::InlinedVector + kSupportedScalabilityModes = {webrtc::ScalabilityMode::kL1T1, + webrtc::ScalabilityMode::kL1T2, + webrtc::ScalabilityMode::kL1T3}; + return std::vector( - 1, SdpVideoFormat(kFakeCodecFactoryCodecName)); + 1, SdpVideoFormat(kFakeCodecFactoryCodecName, {}, + kSupportedScalabilityModes)); } -std::unique_ptr FakeVideoEncoderFactory::CreateVideoEncoder( - const SdpVideoFormat& format) { - return std::make_unique(Clock::GetRealTimeClock()); +std::unique_ptr FakeVideoEncoderFactory::Create( + const Environment& env, + const SdpVideoFormat& /* format */) { + return std::make_unique(env); } FakeVideoDecoderFactory::FakeVideoDecoderFactory() = default; @@ -61,8 +62,9 @@ std::vector FakeVideoDecoderFactory::GetSupportedFormats() 1, SdpVideoFormat(kFakeCodecFactoryCodecName)); } -std::unique_ptr FakeVideoDecoderFactory::CreateVideoDecoder( - const SdpVideoFormat& format) { +std::unique_ptr FakeVideoDecoderFactory::Create( + const Environment& /* env */, + const SdpVideoFormat& /* format */) { return std::make_unique(); } diff --git a/media/engine/fake_video_codec_factory.h b/media/engine/fake_video_codec_factory.h index 4a99120467..77a5cc7aea 100644 --- a/media/engine/fake_video_codec_factory.h +++ b/media/engine/fake_video_codec_factory.h @@ -14,7 +14,11 @@ #include #include +#include "api/environment/environment.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "rtc_base/system/rtc_export.h" @@ -24,14 +28,12 @@ namespace webrtc { // the given bitrate constraints. class RTC_EXPORT FakeVideoEncoderFactory : public VideoEncoderFactory { public: - FakeVideoEncoderFactory(); - - static std::unique_ptr CreateVideoEncoder(); + FakeVideoEncoderFactory() = default; // VideoEncoderFactory implementation std::vector GetSupportedFormats() const override; - std::unique_ptr CreateVideoEncoder( - const SdpVideoFormat& format) override; + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override; }; // Provides a fake video decoder instance that ignores the given bitstream and @@ -44,8 +46,8 @@ class RTC_EXPORT FakeVideoDecoderFactory : public VideoDecoderFactory { // VideoDecoderFactory implementation std::vector GetSupportedFormats() const override; - std::unique_ptr CreateVideoDecoder( - const SdpVideoFormat& format) override; + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override; }; } // namespace webrtc diff --git a/media/engine/fake_webrtc_call.cc b/media/engine/fake_webrtc_call.cc index 846be4d7ae..fb8d985a14 100644 --- a/media/engine/fake_webrtc_call.cc +++ b/media/engine/fake_webrtc_call.cc @@ -10,43 +10,72 @@ #include "media/engine/fake_webrtc_call.h" +#include #include +#include +#include #include +#include #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" +#include "api/adaptation/resource.h" +#include "api/audio_codecs/audio_format.h" #include "api/call/audio_sink.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/environment/environment.h" +#include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" +#include "api/media_types.h" +#include "api/rtc_error.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" #include "api/units/timestamp.h" +#include "api/video/video_source_interface.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "call/audio_receive_stream.h" +#include "call/audio_send_stream.h" +#include "call/call.h" +#include "call/flexfec_receive_stream.h" #include "call/packet_receiver.h" +#include "call/video_receive_stream.h" +#include "call/video_send_stream.h" #include "media/base/media_channel.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_util.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/gunit.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network/sent_packet.h" #include "rtc_base/thread.h" +#include "test/gtest.h" #include "video/config/encoder_stream_factory.h" +#include "video/config/video_encoder_config.h" -namespace cricket { +namespace webrtc { +using ::webrtc::Environment; using ::webrtc::ParseRtpSsrc; -FakeAudioSendStream::FakeAudioSendStream( - int id, - const webrtc::AudioSendStream::Config& config) +FakeAudioSendStream::FakeAudioSendStream(int id, + const AudioSendStream::Config& config) : id_(id), config_(config) {} -void FakeAudioSendStream::Reconfigure( - const webrtc::AudioSendStream::Config& config, - webrtc::SetParametersCallback callback) { +void FakeAudioSendStream::Reconfigure(const AudioSendStream::Config& config, + SetParametersCallback callback) { config_ = config; - webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); + webrtc::InvokeSetParametersCallback(callback, RTCError::OK()); } -const webrtc::AudioSendStream::Config& FakeAudioSendStream::GetConfig() const { +const AudioSendStream::Config& FakeAudioSendStream::GetConfig() const { return config_; } -void FakeAudioSendStream::SetStats( - const webrtc::AudioSendStream::Stats& stats) { +void FakeAudioSendStream::SetStats(const AudioSendStream::Stats& stats) { stats_ = stats; } @@ -70,33 +99,33 @@ void FakeAudioSendStream::SetMuted(bool muted) { muted_ = muted; } -webrtc::AudioSendStream::Stats FakeAudioSendStream::GetStats() const { +AudioSendStream::Stats FakeAudioSendStream::GetStats() const { return stats_; } -webrtc::AudioSendStream::Stats FakeAudioSendStream::GetStats( +AudioSendStream::Stats FakeAudioSendStream::GetStats( bool /*has_remote_tracks*/) const { return stats_; } FakeAudioReceiveStream::FakeAudioReceiveStream( int id, - const webrtc::AudioReceiveStreamInterface::Config& config) + const AudioReceiveStreamInterface::Config& config) : id_(id), config_(config) {} -const webrtc::AudioReceiveStreamInterface::Config& -FakeAudioReceiveStream::GetConfig() const { +const AudioReceiveStreamInterface::Config& FakeAudioReceiveStream::GetConfig() + const { return config_; } void FakeAudioReceiveStream::SetStats( - const webrtc::AudioReceiveStreamInterface::Stats& stats) { + const AudioReceiveStreamInterface::Stats& stats) { stats_ = stats; } bool FakeAudioReceiveStream::VerifyLastPacket(const uint8_t* data, size_t length) const { - return last_packet_ == rtc::Buffer(data, length); + return last_packet_ == Buffer(data, length); } bool FakeAudioReceiveStream::DeliverRtp(const uint8_t* packet, @@ -108,12 +137,12 @@ bool FakeAudioReceiveStream::DeliverRtp(const uint8_t* packet, } void FakeAudioReceiveStream::SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { config_.frame_transformer = std::move(frame_transformer); } void FakeAudioReceiveStream::SetDecoderMap( - std::map decoder_map) { + std::map decoder_map) { config_.decoder_map = std::move(decoder_map); } @@ -121,21 +150,25 @@ void FakeAudioReceiveStream::SetNackHistory(int history_ms) { config_.rtp.nack.rtp_history_ms = history_ms; } +void FakeAudioReceiveStream::SetRtcpMode(RtcpMode mode) { + config_.rtp.rtcp_mode = mode; +} + void FakeAudioReceiveStream::SetNonSenderRttMeasurement(bool enabled) { config_.enable_non_sender_rtt = enabled; } void FakeAudioReceiveStream::SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) { + scoped_refptr frame_decryptor) { config_.frame_decryptor = std::move(frame_decryptor); } -webrtc::AudioReceiveStreamInterface::Stats FakeAudioReceiveStream::GetStats( - bool get_and_clear_legacy_stats) const { +AudioReceiveStreamInterface::Stats FakeAudioReceiveStream::GetStats( + bool /* get_and_clear_legacy_stats */) const { return stats_; } -void FakeAudioReceiveStream::SetSink(webrtc::AudioSinkInterface* sink) { +void FakeAudioReceiveStream::SetSink(AudioSinkInterface* sink) { sink_ = sink; } @@ -143,10 +176,11 @@ void FakeAudioReceiveStream::SetGain(float gain) { gain_ = gain; } -FakeVideoSendStream::FakeVideoSendStream( - webrtc::VideoSendStream::Config config, - webrtc::VideoEncoderConfig encoder_config) - : sending_(false), +FakeVideoSendStream::FakeVideoSendStream(const Environment& env, + VideoSendStream::Config config, + VideoEncoderConfig encoder_config) + : env_(env), + sending_(false), config_(std::move(config)), codec_settings_set_(false), resolution_scaling_enabled_(false), @@ -163,17 +197,15 @@ FakeVideoSendStream::~FakeVideoSendStream() { source_->RemoveSink(this); } -const webrtc::VideoSendStream::Config& FakeVideoSendStream::GetConfig() const { +const VideoSendStream::Config& FakeVideoSendStream::GetConfig() const { return config_; } -const webrtc::VideoEncoderConfig& FakeVideoSendStream::GetEncoderConfig() - const { +const VideoEncoderConfig& FakeVideoSendStream::GetEncoderConfig() const { return encoder_config_; } -const std::vector& FakeVideoSendStream::GetVideoStreams() - const { +const std::vector& FakeVideoSendStream::GetVideoStreams() const { return video_streams_; } @@ -181,8 +213,7 @@ bool FakeVideoSendStream::IsSending() const { return sending_; } -bool FakeVideoSendStream::GetVp8Settings( - webrtc::VideoCodecVP8* settings) const { +bool FakeVideoSendStream::GetVp8Settings(VideoCodecVP8* settings) const { if (!codec_settings_set_) { return false; } @@ -191,8 +222,7 @@ bool FakeVideoSendStream::GetVp8Settings( return true; } -bool FakeVideoSendStream::GetVp9Settings( - webrtc::VideoCodecVP9* settings) const { +bool FakeVideoSendStream::GetVp9Settings(VideoCodecVP9* settings) const { if (!codec_settings_set_) { return false; } @@ -201,8 +231,7 @@ bool FakeVideoSendStream::GetVp9Settings( return true; } -bool FakeVideoSendStream::GetH264Settings( - webrtc::VideoCodecH264* settings) const { +bool FakeVideoSendStream::GetH264Settings(VideoCodecH264* settings) const { if (!codec_settings_set_) { return false; } @@ -211,6 +240,15 @@ bool FakeVideoSendStream::GetH264Settings( return true; } +bool FakeVideoSendStream::GetAv1Settings(VideoCodecAV1* settings) const { + if (!codec_settings_set_) { + return false; + } + + *settings = codec_specific_settings_.av1; + return true; +} + int FakeVideoSendStream::GetNumberOfSwappedFrames() const { return num_swapped_frames_; } @@ -228,7 +266,7 @@ int64_t FakeVideoSendStream::GetLastTimestamp() const { return last_frame_->render_time_ms(); } -void FakeVideoSendStream::OnFrame(const webrtc::VideoFrame& frame) { +void FakeVideoSendStream::OnFrame(const VideoFrame& frame) { ++num_swapped_frames_; if (!last_frame_ || frame.width() != last_frame_->width() || frame.height() != last_frame_->height() || @@ -237,41 +275,34 @@ void FakeVideoSendStream::OnFrame(const webrtc::VideoFrame& frame) { // Note: only tests set their own EncoderStreamFactory... video_streams_ = encoder_config_.video_stream_factory->CreateEncoderStreams( - frame.width(), frame.height(), encoder_config_); + env_.field_trials(), frame.width(), frame.height(), + encoder_config_); } else { - webrtc::VideoEncoder::EncoderInfo encoder_info; - rtc::scoped_refptr< - webrtc::VideoEncoderConfig::VideoStreamFactoryInterface> - factory = rtc::make_ref_counted( - encoder_config_.video_format.name, encoder_config_.max_qp, - encoder_config_.content_type == - webrtc::VideoEncoderConfig::ContentType::kScreen, - encoder_config_.legacy_conference_mode, encoder_info); + VideoEncoder::EncoderInfo encoder_info; + auto factory = make_ref_counted(encoder_info); video_streams_ = factory->CreateEncoderStreams( - frame.width(), frame.height(), encoder_config_); + env_.field_trials(), frame.width(), frame.height(), encoder_config_); } } last_frame_ = frame; } -void FakeVideoSendStream::SetStats( - const webrtc::VideoSendStream::Stats& stats) { +void FakeVideoSendStream::SetStats(const VideoSendStream::Stats& stats) { stats_ = stats; } -webrtc::VideoSendStream::Stats FakeVideoSendStream::GetStats() { +VideoSendStream::Stats FakeVideoSendStream::GetStats() { return stats_; } -void FakeVideoSendStream::ReconfigureVideoEncoder( - webrtc::VideoEncoderConfig config) { +void FakeVideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) { ReconfigureVideoEncoder(std::move(config), nullptr); } void FakeVideoSendStream::ReconfigureVideoEncoder( - webrtc::VideoEncoderConfig config, - webrtc::SetParametersCallback callback) { + VideoEncoderConfig config, + SetParametersCallback callback) { int width, height; if (last_frame_) { width = last_frame_->width(); @@ -282,17 +313,13 @@ void FakeVideoSendStream::ReconfigureVideoEncoder( if (config.video_stream_factory) { // Note: only tests set their own EncoderStreamFactory... video_streams_ = config.video_stream_factory->CreateEncoderStreams( - width, height, config); + env_.field_trials(), width, height, config); } else { - webrtc::VideoEncoder::EncoderInfo encoder_info; - rtc::scoped_refptr - factory = rtc::make_ref_counted( - config.video_format.name, config.max_qp, - config.content_type == - webrtc::VideoEncoderConfig::ContentType::kScreen, - config.legacy_conference_mode, encoder_info); - - video_streams_ = factory->CreateEncoderStreams(width, height, config); + VideoEncoder::EncoderInfo encoder_info; + auto factory = make_ref_counted(encoder_info); + + video_streams_ = factory->CreateEncoderStreams(env_.field_trials(), width, + height, config); } if (config.encoder_specific_settings != nullptr) { @@ -315,6 +342,9 @@ void FakeVideoSendStream::ReconfigureVideoEncoder( } else if (config_.rtp.payload_name == "H264") { codec_specific_settings_.h264.numberOfTemporalLayers = num_temporal_layers; + } else if (config_.rtp.payload_name == "AV1") { + config.encoder_specific_settings->FillVideoCodecAv1( + &codec_specific_settings_.av1); } else { ADD_FAILURE() << "Unsupported encoder payload: " << config_.rtp.payload_name; @@ -323,18 +353,7 @@ void FakeVideoSendStream::ReconfigureVideoEncoder( codec_settings_set_ = config.encoder_specific_settings != nullptr; encoder_config_ = std::move(config); ++num_encoder_reconfigurations_; - webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); -} - -void FakeVideoSendStream::StartPerRtpStream( - const std::vector active_layers) { - sending_ = false; - for (const bool active_layer : active_layers) { - if (active_layer) { - sending_ = true; - break; - } - } + webrtc::InvokeSetParametersCallback(callback, RTCError::OK()); } void FakeVideoSendStream::Start() { @@ -346,41 +365,40 @@ void FakeVideoSendStream::Stop() { } void FakeVideoSendStream::AddAdaptationResource( - rtc::scoped_refptr resource) {} + scoped_refptr /* resource */) {} -std::vector> +std::vector> FakeVideoSendStream::GetAdaptationResources() { return {}; } void FakeVideoSendStream::SetSource( - rtc::VideoSourceInterface* source, - const webrtc::DegradationPreference& degradation_preference) { + VideoSourceInterface* source, + const DegradationPreference& degradation_preference) { if (source_) source_->RemoveSink(this); source_ = source; switch (degradation_preference) { - case webrtc::DegradationPreference::MAINTAIN_FRAMERATE: + case DegradationPreference::MAINTAIN_FRAMERATE: resolution_scaling_enabled_ = true; framerate_scaling_enabled_ = false; break; - case webrtc::DegradationPreference::MAINTAIN_RESOLUTION: + case DegradationPreference::MAINTAIN_RESOLUTION: resolution_scaling_enabled_ = false; framerate_scaling_enabled_ = true; break; - case webrtc::DegradationPreference::BALANCED: + case DegradationPreference::BALANCED: resolution_scaling_enabled_ = true; framerate_scaling_enabled_ = true; break; - case webrtc::DegradationPreference::DISABLED: + case DegradationPreference::DISABLED: resolution_scaling_enabled_ = false; framerate_scaling_enabled_ = false; break; } if (source) - source->AddOrUpdateSink(this, resolution_scaling_enabled_ - ? sink_wants_ - : rtc::VideoSinkWants()); + source->AddOrUpdateSink( + this, resolution_scaling_enabled_ ? sink_wants_ : VideoSinkWants()); } void FakeVideoSendStream::GenerateKeyFrame( @@ -388,18 +406,17 @@ void FakeVideoSendStream::GenerateKeyFrame( keyframes_requested_by_rid_ = rids; } -void FakeVideoSendStream::InjectVideoSinkWants( - const rtc::VideoSinkWants& wants) { +void FakeVideoSendStream::InjectVideoSinkWants(const VideoSinkWants& wants) { sink_wants_ = wants; source_->AddOrUpdateSink(this, wants); } FakeVideoReceiveStream::FakeVideoReceiveStream( - webrtc::VideoReceiveStreamInterface::Config config) + VideoReceiveStreamInterface::Config config) : config_(std::move(config)), receiving_(false) {} -const webrtc::VideoReceiveStreamInterface::Config& -FakeVideoReceiveStream::GetConfig() const { +const VideoReceiveStreamInterface::Config& FakeVideoReceiveStream::GetConfig() + const { return config_; } @@ -407,12 +424,11 @@ bool FakeVideoReceiveStream::IsReceiving() const { return receiving_; } -void FakeVideoReceiveStream::InjectFrame(const webrtc::VideoFrame& frame) { +void FakeVideoReceiveStream::InjectFrame(const VideoFrame& frame) { config_.renderer->OnFrame(frame); } -webrtc::VideoReceiveStreamInterface::Stats FakeVideoReceiveStream::GetStats() - const { +VideoReceiveStreamInterface::Stats FakeVideoReceiveStream::GetStats() const { return stats_; } @@ -425,36 +441,36 @@ void FakeVideoReceiveStream::Stop() { } void FakeVideoReceiveStream::SetStats( - const webrtc::VideoReceiveStreamInterface::Stats& stats) { + const VideoReceiveStreamInterface::Stats& stats) { stats_ = stats; } FakeFlexfecReceiveStream::FakeFlexfecReceiveStream( - const webrtc::FlexfecReceiveStream::Config config) + const FlexfecReceiveStream::Config config) : config_(std::move(config)) {} -const webrtc::FlexfecReceiveStream::Config& -FakeFlexfecReceiveStream::GetConfig() const { +const FlexfecReceiveStream::Config& FakeFlexfecReceiveStream::GetConfig() + const { return config_; } -void FakeFlexfecReceiveStream::OnRtpPacket(const webrtc::RtpPacketReceived&) { +void FakeFlexfecReceiveStream::OnRtpPacket(const RtpPacketReceived&) { RTC_DCHECK_NOTREACHED() << "Not implemented."; } -FakeCall::FakeCall(webrtc::test::ScopedKeyValueConfig* field_trials) - : FakeCall(rtc::Thread::Current(), rtc::Thread::Current(), field_trials) {} +FakeCall::FakeCall(const Environment& env) + : FakeCall(env, Thread::Current(), Thread::Current()) {} -FakeCall::FakeCall(webrtc::TaskQueueBase* worker_thread, - webrtc::TaskQueueBase* network_thread, - webrtc::test::ScopedKeyValueConfig* field_trials) - : network_thread_(network_thread), +FakeCall::FakeCall(const Environment& env, + TaskQueueBase* worker_thread, + TaskQueueBase* network_thread) + : env_(env), + network_thread_(network_thread), worker_thread_(worker_thread), audio_network_state_(webrtc::kNetworkUp), video_network_state_(webrtc::kNetworkUp), num_created_send_streams_(0), - num_created_receive_streams_(0), - trials_(field_trials ? field_trials : &fallback_trials_) {} + num_created_receive_streams_(0) {} FakeCall::~FakeCall() { EXPECT_EQ(0u, video_send_streams_.size()); @@ -511,14 +527,15 @@ FakeCall::GetFlexfecReceiveStreams() { return flexfec_receive_streams_; } -webrtc::NetworkState FakeCall::GetNetworkState(webrtc::MediaType media) const { +NetworkState FakeCall::GetNetworkState(MediaType media) const { switch (media) { - case webrtc::MediaType::AUDIO: + case MediaType::AUDIO: return audio_network_state_; - case webrtc::MediaType::VIDEO: + case MediaType::VIDEO: return video_network_state_; - case webrtc::MediaType::DATA: - case webrtc::MediaType::ANY: + case MediaType::DATA: + case MediaType::ANY: + case MediaType::UNSUPPORTED: ADD_FAILURE() << "GetNetworkState called with unknown parameter."; return webrtc::kNetworkDown; } @@ -529,8 +546,8 @@ webrtc::NetworkState FakeCall::GetNetworkState(webrtc::MediaType media) const { return webrtc::kNetworkDown; } -webrtc::AudioSendStream* FakeCall::CreateAudioSendStream( - const webrtc::AudioSendStream::Config& config) { +AudioSendStream* FakeCall::CreateAudioSendStream( + const AudioSendStream::Config& config) { FakeAudioSendStream* fake_stream = new FakeAudioSendStream(next_stream_id_++, config); audio_send_streams_.push_back(fake_stream); @@ -538,7 +555,7 @@ webrtc::AudioSendStream* FakeCall::CreateAudioSendStream( return fake_stream; } -void FakeCall::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { +void FakeCall::DestroyAudioSendStream(AudioSendStream* send_stream) { auto it = absl::c_find(audio_send_streams_, static_cast(send_stream)); if (it == audio_send_streams_.end()) { @@ -549,8 +566,8 @@ void FakeCall::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { } } -webrtc::AudioReceiveStreamInterface* FakeCall::CreateAudioReceiveStream( - const webrtc::AudioReceiveStreamInterface::Config& config) { +AudioReceiveStreamInterface* FakeCall::CreateAudioReceiveStream( + const AudioReceiveStreamInterface::Config& config) { audio_receive_streams_.push_back( new FakeAudioReceiveStream(next_stream_id_++, config)); ++num_created_receive_streams_; @@ -558,7 +575,7 @@ webrtc::AudioReceiveStreamInterface* FakeCall::CreateAudioReceiveStream( } void FakeCall::DestroyAudioReceiveStream( - webrtc::AudioReceiveStreamInterface* receive_stream) { + AudioReceiveStreamInterface* receive_stream) { auto it = absl::c_find(audio_receive_streams_, static_cast(receive_stream)); if (it == audio_receive_streams_.end()) { @@ -569,17 +586,17 @@ void FakeCall::DestroyAudioReceiveStream( } } -webrtc::VideoSendStream* FakeCall::CreateVideoSendStream( - webrtc::VideoSendStream::Config config, - webrtc::VideoEncoderConfig encoder_config) { - FakeVideoSendStream* fake_stream = - new FakeVideoSendStream(std::move(config), std::move(encoder_config)); +VideoSendStream* FakeCall::CreateVideoSendStream( + VideoSendStream::Config config, + VideoEncoderConfig encoder_config) { + FakeVideoSendStream* fake_stream = new FakeVideoSendStream( + env_, std::move(config), std::move(encoder_config)); video_send_streams_.push_back(fake_stream); ++num_created_send_streams_; return fake_stream; } -void FakeCall::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { +void FakeCall::DestroyVideoSendStream(VideoSendStream* send_stream) { auto it = absl::c_find(video_send_streams_, static_cast(send_stream)); if (it == video_send_streams_.end()) { @@ -590,8 +607,8 @@ void FakeCall::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { } } -webrtc::VideoReceiveStreamInterface* FakeCall::CreateVideoReceiveStream( - webrtc::VideoReceiveStreamInterface::Config config) { +VideoReceiveStreamInterface* FakeCall::CreateVideoReceiveStream( + VideoReceiveStreamInterface::Config config) { video_receive_streams_.push_back( new FakeVideoReceiveStream(std::move(config))); ++num_created_receive_streams_; @@ -599,7 +616,7 @@ webrtc::VideoReceiveStreamInterface* FakeCall::CreateVideoReceiveStream( } void FakeCall::DestroyVideoReceiveStream( - webrtc::VideoReceiveStreamInterface* receive_stream) { + VideoReceiveStreamInterface* receive_stream) { auto it = absl::c_find(video_receive_streams_, static_cast(receive_stream)); if (it == video_receive_streams_.end()) { @@ -610,8 +627,8 @@ void FakeCall::DestroyVideoReceiveStream( } } -webrtc::FlexfecReceiveStream* FakeCall::CreateFlexfecReceiveStream( - const webrtc::FlexfecReceiveStream::Config config) { +FlexfecReceiveStream* FakeCall::CreateFlexfecReceiveStream( + const FlexfecReceiveStream::Config config) { FakeFlexfecReceiveStream* fake_stream = new FakeFlexfecReceiveStream(std::move(config)); flexfec_receive_streams_.push_back(fake_stream); @@ -620,7 +637,7 @@ webrtc::FlexfecReceiveStream* FakeCall::CreateFlexfecReceiveStream( } void FakeCall::DestroyFlexfecReceiveStream( - webrtc::FlexfecReceiveStream* receive_stream) { + FlexfecReceiveStream* receive_stream) { auto it = absl::c_find(flexfec_receive_streams_, static_cast(receive_stream)); @@ -633,16 +650,15 @@ void FakeCall::DestroyFlexfecReceiveStream( } } -void FakeCall::AddAdaptationResource( - rtc::scoped_refptr resource) {} +void FakeCall::AddAdaptationResource(scoped_refptr /* resource */) {} -webrtc::PacketReceiver* FakeCall::Receiver() { +PacketReceiver* FakeCall::Receiver() { return this; } void FakeCall::DeliverRtpPacket( - webrtc::MediaType media_type, - webrtc::RtpPacketReceived packet, + MediaType media_type, + RtpPacketReceived packet, OnUndemuxablePacketHandler undemuxable_packet_handler) { if (!DeliverPacketInternal(media_type, packet.Ssrc(), packet.Buffer(), packet.arrival_time())) { @@ -654,16 +670,15 @@ void FakeCall::DeliverRtpPacket( last_received_rtp_packet_ = packet; } -bool FakeCall::DeliverPacketInternal(webrtc::MediaType media_type, +bool FakeCall::DeliverPacketInternal(MediaType media_type, uint32_t ssrc, - const rtc::CopyOnWriteBuffer& packet, - webrtc::Timestamp arrival_time) { + const CopyOnWriteBuffer& packet, + Timestamp arrival_time) { EXPECT_GE(packet.size(), 12u); RTC_DCHECK(arrival_time.IsFinite()); - RTC_DCHECK(media_type == webrtc::MediaType::AUDIO || - media_type == webrtc::MediaType::VIDEO); + RTC_DCHECK(media_type == MediaType::AUDIO || media_type == MediaType::VIDEO); - if (media_type == webrtc::MediaType::VIDEO) { + if (media_type == MediaType::VIDEO) { for (auto receiver : video_receive_streams_) { if (receiver->GetConfig().rtp.remote_ssrc == ssrc || receiver->GetConfig().rtp.rtx_ssrc == ssrc) { @@ -672,7 +687,7 @@ bool FakeCall::DeliverPacketInternal(webrtc::MediaType media_type, } } } - if (media_type == webrtc::MediaType::AUDIO) { + if (media_type == MediaType::AUDIO) { for (auto receiver : audio_receive_streams_) { if (receiver->GetConfig().rtp.remote_ssrc == ssrc) { receiver->DeliverRtp(packet.cdata(), packet.size(), arrival_time.us()); @@ -684,7 +699,7 @@ bool FakeCall::DeliverPacketInternal(webrtc::MediaType media_type, return false; } -void FakeCall::SetStats(const webrtc::Call::Stats& stats) { +void FakeCall::SetStats(const Call::Stats& stats) { stats_ = stats; } @@ -696,66 +711,66 @@ int FakeCall::GetNumCreatedReceiveStreams() const { return num_created_receive_streams_; } -webrtc::Call::Stats FakeCall::GetStats() const { +Call::Stats FakeCall::GetStats() const { return stats_; } -webrtc::TaskQueueBase* FakeCall::network_thread() const { +TaskQueueBase* FakeCall::network_thread() const { return network_thread_; } -webrtc::TaskQueueBase* FakeCall::worker_thread() const { +TaskQueueBase* FakeCall::worker_thread() const { return worker_thread_; } -void FakeCall::SignalChannelNetworkState(webrtc::MediaType media, - webrtc::NetworkState state) { +void FakeCall::SignalChannelNetworkState(MediaType media, NetworkState state) { switch (media) { - case webrtc::MediaType::AUDIO: + case MediaType::AUDIO: audio_network_state_ = state; break; - case webrtc::MediaType::VIDEO: + case MediaType::VIDEO: video_network_state_ = state; break; - case webrtc::MediaType::DATA: - case webrtc::MediaType::ANY: + case MediaType::DATA: + case MediaType::ANY: + case MediaType::UNSUPPORTED: ADD_FAILURE() << "SignalChannelNetworkState called with unknown parameter."; } } void FakeCall::OnAudioTransportOverheadChanged( - int transport_overhead_per_packet) {} + int /* transport_overhead_per_packet */) {} -void FakeCall::OnLocalSsrcUpdated(webrtc::AudioReceiveStreamInterface& stream, +void FakeCall::OnLocalSsrcUpdated(AudioReceiveStreamInterface& stream, uint32_t local_ssrc) { auto& fake_stream = static_cast(stream); fake_stream.SetLocalSsrc(local_ssrc); } -void FakeCall::OnLocalSsrcUpdated(webrtc::VideoReceiveStreamInterface& stream, +void FakeCall::OnLocalSsrcUpdated(VideoReceiveStreamInterface& stream, uint32_t local_ssrc) { auto& fake_stream = static_cast(stream); fake_stream.SetLocalSsrc(local_ssrc); } -void FakeCall::OnLocalSsrcUpdated(webrtc::FlexfecReceiveStream& stream, +void FakeCall::OnLocalSsrcUpdated(FlexfecReceiveStream& stream, uint32_t local_ssrc) { auto& fake_stream = static_cast(stream); fake_stream.SetLocalSsrc(local_ssrc); } -void FakeCall::OnUpdateSyncGroup(webrtc::AudioReceiveStreamInterface& stream, +void FakeCall::OnUpdateSyncGroup(AudioReceiveStreamInterface& stream, absl::string_view sync_group) { auto& fake_stream = static_cast(stream); fake_stream.SetSyncGroup(sync_group); } -void FakeCall::OnSentPacket(const rtc::SentPacket& sent_packet) { +void FakeCall::OnSentPacket(const SentPacketInfo& sent_packet) { last_sent_packet_ = sent_packet; if (sent_packet.packet_id >= 0) { last_sent_nonnegative_packet_id_ = sent_packet.packet_id; } } -} // namespace cricket +} // namespace webrtc diff --git a/media/engine/fake_webrtc_call.h b/media/engine/fake_webrtc_call.h index 8d9ecb6396..aec1fc1626 100644 --- a/media/engine/fake_webrtc_call.h +++ b/media/engine/fake_webrtc_call.h @@ -20,28 +20,58 @@ #ifndef MEDIA_ENGINE_FAKE_WEBRTC_CALL_H_ #define MEDIA_ENGINE_FAKE_WEBRTC_CALL_H_ +#include +#include #include #include +#include #include #include #include #include "absl/strings/string_view.h" -#include "api/transport/field_trial_based_config.h" +#include "api/adaptation/resource.h" +#include "api/audio/audio_frame.h" +#include "api/audio/audio_mixer.h" +#include "api/audio_codecs/audio_format.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/environment/environment.h" +#include "api/frame_transformer_interface.h" +#include "api/media_types.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/bitrate_settings.h" +#include "api/transport/rtp/rtp_source.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "api/video_codecs/video_codec.h" #include "call/audio_receive_stream.h" #include "call/audio_send_stream.h" #include "call/call.h" +#include "call/fake_payload_type_suggester.h" #include "call/flexfec_receive_stream.h" +#include "call/packet_receiver.h" +#include "call/payload_type.h" +#include "call/rtp_transport_controller_send_interface.h" #include "call/test/mock_rtp_transport_controller_send.h" #include "call/video_receive_stream.h" #include "call/video_send_stream.h" +#include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/buffer.h" -#include "test/scoped_key_value_config.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network/sent_packet.h" +#include "test/gmock.h" +#include "video/config/video_encoder_config.h" -namespace cricket { -class FakeAudioSendStream final : public webrtc::AudioSendStream { +namespace webrtc { +class FakeAudioSendStream final : public AudioSendStream { public: struct TelephoneEvent { int payload_type = -1; @@ -50,54 +80,50 @@ class FakeAudioSendStream final : public webrtc::AudioSendStream { int duration_ms = 0; }; - explicit FakeAudioSendStream(int id, - const webrtc::AudioSendStream::Config& config); + explicit FakeAudioSendStream(int id, const AudioSendStream::Config& config); int id() const { return id_; } - const webrtc::AudioSendStream::Config& GetConfig() const override; - void SetStats(const webrtc::AudioSendStream::Stats& stats); + const AudioSendStream::Config& GetConfig() const override; + void SetStats(const AudioSendStream::Stats& stats); TelephoneEvent GetLatestTelephoneEvent() const; bool IsSending() const { return sending_; } bool muted() const { return muted_; } private: // webrtc::AudioSendStream implementation. - void Reconfigure(const webrtc::AudioSendStream::Config& config, - webrtc::SetParametersCallback callback) override; + void Reconfigure(const AudioSendStream::Config& config, + SetParametersCallback callback) override; void Start() override { sending_ = true; } void Stop() override { sending_ = false; } - void SendAudioData(std::unique_ptr audio_frame) override { - } + void SendAudioData(std::unique_ptr /* audio_frame */) override {} bool SendTelephoneEvent(int payload_type, int payload_frequency, int event, int duration_ms) override; void SetMuted(bool muted) override; - webrtc::AudioSendStream::Stats GetStats() const override; - webrtc::AudioSendStream::Stats GetStats( - bool has_remote_tracks) const override; + AudioSendStream::Stats GetStats() const override; + AudioSendStream::Stats GetStats(bool has_remote_tracks) const override; int id_ = -1; TelephoneEvent latest_telephone_event_; - webrtc::AudioSendStream::Config config_; - webrtc::AudioSendStream::Stats stats_; + AudioSendStream::Config config_; + AudioSendStream::Stats stats_; bool sending_ = false; bool muted_ = false; }; -class FakeAudioReceiveStream final - : public webrtc::AudioReceiveStreamInterface { +class FakeAudioReceiveStream final : public AudioReceiveStreamInterface { public: explicit FakeAudioReceiveStream( int id, - const webrtc::AudioReceiveStreamInterface::Config& config); + const AudioReceiveStreamInterface::Config& config); int id() const { return id_; } - const webrtc::AudioReceiveStreamInterface::Config& GetConfig() const; - void SetStats(const webrtc::AudioReceiveStreamInterface::Stats& stats); + const AudioReceiveStreamInterface::Config& GetConfig() const; + void SetStats(const AudioReceiveStreamInterface::Stats& stats); int received_packets() const { return received_packets_; } bool VerifyLastPacket(const uint8_t* data, size_t length) const; - const webrtc::AudioSinkInterface* sink() const { return sink_; } + const AudioSinkInterface* sink() const { return sink_; } float gain() const { return gain_; } bool DeliverRtp(const uint8_t* packet, size_t length, int64_t packet_time_us); bool started() const { return started_; } @@ -118,18 +144,17 @@ class FakeAudioReceiveStream final void Stop() override { started_ = false; } bool IsRunning() const override { return started_; } void SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) - override; - void SetDecoderMap( - std::map decoder_map) override; + scoped_refptr frame_transformer) override; + void SetDecoderMap(std::map decoder_map) override; void SetNackHistory(int history_ms) override; + void SetRtcpMode(RtcpMode mode) override; void SetNonSenderRttMeasurement(bool enabled) override; - void SetFrameDecryptor(rtc::scoped_refptr - frame_decryptor) override; + void SetFrameDecryptor( + scoped_refptr frame_decryptor) override; - webrtc::AudioReceiveStreamInterface::Stats GetStats( + AudioReceiveStreamInterface::Stats GetStats( bool get_and_clear_legacy_stats) const override; - void SetSink(webrtc::AudioSinkInterface* sink) override; + void SetSink(AudioSinkInterface* sink) override; void SetGain(float gain) override; bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override { base_mininum_playout_delay_ms_ = delay_ms; @@ -138,43 +163,48 @@ class FakeAudioReceiveStream final int GetBaseMinimumPlayoutDelayMs() const override { return base_mininum_playout_delay_ms_; } - std::vector GetSources() const override { - return std::vector(); + std::vector GetSources() const override { + return std::vector(); + } + AudioMixer::Source* source() override { + // TODO(b/397376626): Add a Fake AudioMixer::Source + return nullptr; } private: int id_ = -1; - webrtc::AudioReceiveStreamInterface::Config config_; - webrtc::AudioReceiveStreamInterface::Stats stats_; + AudioReceiveStreamInterface::Config config_; + AudioReceiveStreamInterface::Stats stats_; int received_packets_ = 0; - webrtc::AudioSinkInterface* sink_ = nullptr; + AudioSinkInterface* sink_ = nullptr; float gain_ = 1.0f; - rtc::Buffer last_packet_; + Buffer last_packet_; bool started_ = false; int base_mininum_playout_delay_ms_ = 0; }; -class FakeVideoSendStream final - : public webrtc::VideoSendStream, - public rtc::VideoSinkInterface { +class FakeVideoSendStream final : public VideoSendStream, + public VideoSinkInterface { public: - FakeVideoSendStream(webrtc::VideoSendStream::Config config, - webrtc::VideoEncoderConfig encoder_config); + FakeVideoSendStream(const Environment& env, + VideoSendStream::Config config, + VideoEncoderConfig encoder_config); ~FakeVideoSendStream() override; - const webrtc::VideoSendStream::Config& GetConfig() const; - const webrtc::VideoEncoderConfig& GetEncoderConfig() const; - const std::vector& GetVideoStreams() const; + const VideoSendStream::Config& GetConfig() const; + const VideoEncoderConfig& GetEncoderConfig() const; + const std::vector& GetVideoStreams() const; bool IsSending() const; - bool GetVp8Settings(webrtc::VideoCodecVP8* settings) const; - bool GetVp9Settings(webrtc::VideoCodecVP9* settings) const; - bool GetH264Settings(webrtc::VideoCodecH264* settings) const; + bool GetVp8Settings(VideoCodecVP8* settings) const; + bool GetVp9Settings(VideoCodecVP9* settings) const; + bool GetH264Settings(VideoCodecH264* settings) const; + bool GetAv1Settings(VideoCodecAV1* settings) const; int GetNumberOfSwappedFrames() const; int GetLastWidth() const; int GetLastHeight() const; int64_t GetLastTimestamp() const; - void SetStats(const webrtc::VideoSendStream::Stats& stats); + void SetStats(const VideoSendStream::Stats& stats); int num_encoder_reconfigurations() const { return num_encoder_reconfigurations_; } @@ -183,76 +213,70 @@ class FakeVideoSendStream final return resolution_scaling_enabled_; } bool framerate_scaling_enabled() const { return framerate_scaling_enabled_; } - void InjectVideoSinkWants(const rtc::VideoSinkWants& wants); + void InjectVideoSinkWants(const VideoSinkWants& wants); - rtc::VideoSourceInterface* source() const { - return source_; - } + VideoSourceInterface* source() const { return source_; } void GenerateKeyFrame(const std::vector& rids); const std::vector& GetKeyFramesRequested() const { return keyframes_requested_by_rid_; } private: - // rtc::VideoSinkInterface implementation. - void OnFrame(const webrtc::VideoFrame& frame) override; + // webrtc::VideoSinkInterface implementation. + void OnFrame(const VideoFrame& frame) override; // webrtc::VideoSendStream implementation. - void StartPerRtpStream(std::vector active_layers) override; void Start() override; void Stop() override; bool started() override { return IsSending(); } - void AddAdaptationResource( - rtc::scoped_refptr resource) override; - std::vector> GetAdaptationResources() - override; - void SetSource( - rtc::VideoSourceInterface* source, - const webrtc::DegradationPreference& degradation_preference) override; - webrtc::VideoSendStream::Stats GetStats() override; - - void ReconfigureVideoEncoder(webrtc::VideoEncoderConfig config) override; - void ReconfigureVideoEncoder(webrtc::VideoEncoderConfig config, - webrtc::SetParametersCallback callback) override; + void AddAdaptationResource(scoped_refptr resource) override; + std::vector> GetAdaptationResources() override; + void SetSource(VideoSourceInterface* source, + const DegradationPreference& degradation_preference) override; + VideoSendStream::Stats GetStats() override; + void ReconfigureVideoEncoder(VideoEncoderConfig config) override; + void ReconfigureVideoEncoder(VideoEncoderConfig config, + SetParametersCallback callback) override; + + const Environment env_; bool sending_; - webrtc::VideoSendStream::Config config_; - webrtc::VideoEncoderConfig encoder_config_; - std::vector video_streams_; - rtc::VideoSinkWants sink_wants_; + VideoSendStream::Config config_; + VideoEncoderConfig encoder_config_; + std::vector video_streams_; + VideoSinkWants sink_wants_; bool codec_settings_set_; union CodecSpecificSettings { - webrtc::VideoCodecVP8 vp8; - webrtc::VideoCodecVP9 vp9; - webrtc::VideoCodecH264 h264; + VideoCodecVP8 vp8; + VideoCodecVP9 vp9; + VideoCodecH264 h264; + VideoCodecAV1 av1; } codec_specific_settings_; bool resolution_scaling_enabled_; bool framerate_scaling_enabled_; - rtc::VideoSourceInterface* source_; + VideoSourceInterface* source_; int num_swapped_frames_; - absl::optional last_frame_; - webrtc::VideoSendStream::Stats stats_; + std::optional last_frame_; + VideoSendStream::Stats stats_; int num_encoder_reconfigurations_ = 0; std::vector keyframes_requested_by_rid_; }; -class FakeVideoReceiveStream final - : public webrtc::VideoReceiveStreamInterface { +class FakeVideoReceiveStream final : public VideoReceiveStreamInterface { public: - explicit FakeVideoReceiveStream( - webrtc::VideoReceiveStreamInterface::Config config); + explicit FakeVideoReceiveStream(VideoReceiveStreamInterface::Config config); - const webrtc::VideoReceiveStreamInterface::Config& GetConfig() const; + const VideoReceiveStreamInterface::Config& GetConfig() const; bool IsReceiving() const; - void InjectFrame(const webrtc::VideoFrame& frame); + void InjectFrame(const VideoFrame& frame); - void SetStats(const webrtc::VideoReceiveStreamInterface::Stats& stats); + void SetStats(const VideoReceiveStreamInterface::Stats& stats); - std::vector GetSources() const override { - return std::vector(); + std::vector GetSources() const override { + return std::vector(); } int base_mininum_playout_delay_ms() const { @@ -265,24 +289,23 @@ class FakeVideoReceiveStream final void UpdateRtxSsrc(uint32_t ssrc) { config_.rtp.rtx_ssrc = ssrc; } - void SetFrameDecryptor(rtc::scoped_refptr - frame_decryptor) override {} + void SetFrameDecryptor(scoped_refptr + /* frame_decryptor */) override {} void SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) + scoped_refptr /* frame_transformer */) override {} - RecordingState SetAndGetRecordingState(RecordingState state, - bool generate_key_frame) override { + RecordingState SetAndGetRecordingState( + RecordingState /* state */, + bool /* generate_key_frame */) override { return RecordingState(); } void GenerateKeyFrame() override {} - void SetRtcpMode(webrtc::RtcpMode mode) override { - config_.rtp.rtcp_mode = mode; - } + void SetRtcpMode(RtcpMode mode) override { config_.rtp.rtcp_mode = mode; } - void SetFlexFecProtection(webrtc::RtpPacketSinkInterface* sink) override { + void SetFlexFecProtection(RtpPacketSinkInterface* sink) override { config_.rtp.packet_sink_ = sink; config_.rtp.protected_by_flexfec = (sink != nullptr); } @@ -291,7 +314,7 @@ class FakeVideoReceiveStream final config_.rtp.lntf.enabled = enabled; } - void SetNackHistory(webrtc::TimeDelta history) override { + void SetNackHistory(TimeDelta history) override { config_.rtp.nack.rtp_history_ms = history.ms(); } @@ -313,7 +336,7 @@ class FakeVideoReceiveStream final void Start() override; void Stop() override; - webrtc::VideoReceiveStreamInterface::Stats GetStats() const override; + VideoReceiveStreamInterface::Stats GetStats() const override; bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override { base_mininum_playout_delay_ms_ = delay_ms; @@ -325,50 +348,51 @@ class FakeVideoReceiveStream final } private: - webrtc::VideoReceiveStreamInterface::Config config_; + VideoReceiveStreamInterface::Config config_; bool receiving_; - webrtc::VideoReceiveStreamInterface::Stats stats_; + VideoReceiveStreamInterface::Stats stats_; int base_mininum_playout_delay_ms_ = 0; }; -class FakeFlexfecReceiveStream final : public webrtc::FlexfecReceiveStream { +class FakeFlexfecReceiveStream final : public FlexfecReceiveStream { public: - explicit FakeFlexfecReceiveStream( - const webrtc::FlexfecReceiveStream::Config config); + explicit FakeFlexfecReceiveStream(const FlexfecReceiveStream::Config config); void SetLocalSsrc(uint32_t local_ssrc) { config_.rtp.local_ssrc = local_ssrc; } - void SetRtcpMode(webrtc::RtcpMode mode) override { config_.rtcp_mode = mode; } + void SetRtcpMode(RtcpMode mode) override { config_.rtcp_mode = mode; } int payload_type() const override { return config_.payload_type; } void SetPayloadType(int payload_type) override { config_.payload_type = payload_type; } - const webrtc::FlexfecReceiveStream::Config& GetConfig() const; + const FlexfecReceiveStream::Config& GetConfig() const; uint32_t remote_ssrc() const { return config_.rtp.remote_ssrc; } - const webrtc::ReceiveStatistics* GetStats() const override { return nullptr; } + const ReceiveStatistics* GetStats() const override { return nullptr; } private: - void OnRtpPacket(const webrtc::RtpPacketReceived& packet) override; + void OnRtpPacket(const RtpPacketReceived& packet) override; - webrtc::FlexfecReceiveStream::Config config_; + FlexfecReceiveStream::Config config_; }; -class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { +class FakeCall final : public Call, public PacketReceiver { public: - explicit FakeCall(webrtc::test::ScopedKeyValueConfig* field_trials = nullptr); - FakeCall(webrtc::TaskQueueBase* worker_thread, - webrtc::TaskQueueBase* network_thread, - webrtc::test::ScopedKeyValueConfig* field_trials = nullptr); + explicit FakeCall(const Environment& env); + FakeCall(const Environment& env, + TaskQueueBase* worker_thread, + TaskQueueBase* network_thread); ~FakeCall() override; - webrtc::MockRtpTransportControllerSend* GetMockTransportControllerSend() { + PayloadTypeSuggester* GetPayloadTypeSuggester() { return &pt_suggester_; } + + MockRtpTransportControllerSend* GetMockTransportControllerSend() { return &transport_controller_send_; } @@ -383,8 +407,8 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { const std::vector& GetFlexfecReceiveStreams(); - rtc::SentPacket last_sent_packet() const { return last_sent_packet_; } - const webrtc::RtpPacketReceived& last_received_rtp_packet() const { + SentPacketInfo last_sent_packet() const { return last_sent_packet_; } + const RtpPacketReceived& last_received_rtp_packet() const { return last_received_rtp_packet_; } size_t GetDeliveredPacketsForSsrc(uint32_t ssrc) const { @@ -398,100 +422,95 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { return last_sent_nonnegative_packet_id_; } - webrtc::NetworkState GetNetworkState(webrtc::MediaType media) const; + NetworkState GetNetworkState(MediaType media) const; int GetNumCreatedSendStreams() const; int GetNumCreatedReceiveStreams() const; - void SetStats(const webrtc::Call::Stats& stats); + void SetStats(const Call::Stats& stats); void SetClientBitratePreferences( - const webrtc::BitrateSettings& preferences) override {} - - void SetFieldTrial(const std::string& field_trial_string) { - trials_overrides_ = std::make_unique( - *trials_, field_trial_string); - } - - const webrtc::FieldTrialsView& trials() const override { return *trials_; } + const BitrateSettings& /* preferences */) override {} + const FieldTrialsView& trials() const override { return env_.field_trials(); } + void EnableSendCongestionControlFeedbackAccordingToRfc8888() override {} + int FeedbackAccordingToRfc8888Count() { return 0; } + int FeedbackAccordingToTransportCcCount() { return 0; } private: - webrtc::AudioSendStream* CreateAudioSendStream( - const webrtc::AudioSendStream::Config& config) override; - void DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) override; + AudioSendStream* CreateAudioSendStream( + const AudioSendStream::Config& config) override; + void DestroyAudioSendStream(AudioSendStream* send_stream) override; - webrtc::AudioReceiveStreamInterface* CreateAudioReceiveStream( - const webrtc::AudioReceiveStreamInterface::Config& config) override; + AudioReceiveStreamInterface* CreateAudioReceiveStream( + const AudioReceiveStreamInterface::Config& config) override; void DestroyAudioReceiveStream( - webrtc::AudioReceiveStreamInterface* receive_stream) override; + AudioReceiveStreamInterface* receive_stream) override; - webrtc::VideoSendStream* CreateVideoSendStream( - webrtc::VideoSendStream::Config config, - webrtc::VideoEncoderConfig encoder_config) override; - void DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) override; + VideoSendStream* CreateVideoSendStream( + VideoSendStream::Config config, + VideoEncoderConfig encoder_config) override; + void DestroyVideoSendStream(VideoSendStream* send_stream) override; - webrtc::VideoReceiveStreamInterface* CreateVideoReceiveStream( - webrtc::VideoReceiveStreamInterface::Config config) override; + VideoReceiveStreamInterface* CreateVideoReceiveStream( + VideoReceiveStreamInterface::Config config) override; void DestroyVideoReceiveStream( - webrtc::VideoReceiveStreamInterface* receive_stream) override; + VideoReceiveStreamInterface* receive_stream) override; - webrtc::FlexfecReceiveStream* CreateFlexfecReceiveStream( - const webrtc::FlexfecReceiveStream::Config config) override; + FlexfecReceiveStream* CreateFlexfecReceiveStream( + const FlexfecReceiveStream::Config config) override; void DestroyFlexfecReceiveStream( - webrtc::FlexfecReceiveStream* receive_stream) override; + FlexfecReceiveStream* receive_stream) override; - void AddAdaptationResource( - rtc::scoped_refptr resource) override; + void AddAdaptationResource(scoped_refptr resource) override; - webrtc::PacketReceiver* Receiver() override; + PacketReceiver* Receiver() override; - void DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) override {} + void DeliverRtcpPacket(CopyOnWriteBuffer /* packet */) override {} void DeliverRtpPacket( - webrtc::MediaType media_type, - webrtc::RtpPacketReceived packet, + MediaType media_type, + RtpPacketReceived packet, OnUndemuxablePacketHandler un_demuxable_packet_handler) override; - bool DeliverPacketInternal(webrtc::MediaType media_type, + bool DeliverPacketInternal(MediaType media_type, uint32_t ssrc, - const rtc::CopyOnWriteBuffer& packet, - webrtc::Timestamp arrival_time); + const CopyOnWriteBuffer& packet, + Timestamp arrival_time); - webrtc::RtpTransportControllerSendInterface* GetTransportControllerSend() - override { + RtpTransportControllerSendInterface* GetTransportControllerSend() override { return &transport_controller_send_; } - webrtc::Call::Stats GetStats() const override; + Call::Stats GetStats() const override; - webrtc::TaskQueueBase* network_thread() const override; - webrtc::TaskQueueBase* worker_thread() const override; + TaskQueueBase* network_thread() const override; + TaskQueueBase* worker_thread() const override; - void SignalChannelNetworkState(webrtc::MediaType media, - webrtc::NetworkState state) override; + void SignalChannelNetworkState(MediaType media, NetworkState state) override; void OnAudioTransportOverheadChanged( int transport_overhead_per_packet) override; - void OnLocalSsrcUpdated(webrtc::AudioReceiveStreamInterface& stream, + void OnLocalSsrcUpdated(AudioReceiveStreamInterface& stream, uint32_t local_ssrc) override; - void OnLocalSsrcUpdated(webrtc::VideoReceiveStreamInterface& stream, + void OnLocalSsrcUpdated(VideoReceiveStreamInterface& stream, uint32_t local_ssrc) override; - void OnLocalSsrcUpdated(webrtc::FlexfecReceiveStream& stream, + void OnLocalSsrcUpdated(FlexfecReceiveStream& stream, uint32_t local_ssrc) override; - void OnUpdateSyncGroup(webrtc::AudioReceiveStreamInterface& stream, + void OnUpdateSyncGroup(AudioReceiveStreamInterface& stream, absl::string_view sync_group) override; - void OnSentPacket(const rtc::SentPacket& sent_packet) override; + void OnSentPacket(const SentPacketInfo& sent_packet) override; - webrtc::TaskQueueBase* const network_thread_; - webrtc::TaskQueueBase* const worker_thread_; + const Environment env_; + TaskQueueBase* const network_thread_; + TaskQueueBase* const worker_thread_; - ::testing::NiceMock + ::testing::NiceMock transport_controller_send_; - webrtc::NetworkState audio_network_state_; - webrtc::NetworkState video_network_state_; - rtc::SentPacket last_sent_packet_; - webrtc::RtpPacketReceived last_received_rtp_packet_; + NetworkState audio_network_state_; + NetworkState video_network_state_; + SentPacketInfo last_sent_packet_; + RtpPacketReceived last_received_rtp_packet_; int last_sent_nonnegative_packet_id_ = -1; int next_stream_id_ = 665; - webrtc::Call::Stats stats_; + Call::Stats stats_; std::vector video_send_streams_; std::vector audio_send_streams_; std::vector video_receive_streams_; @@ -502,16 +521,21 @@ class FakeCall final : public webrtc::Call, public webrtc::PacketReceiver { int num_created_send_streams_; int num_created_receive_streams_; - // The field trials that are in use, either supplied by caller - // or pointer to &fallback_trials_. - webrtc::test::ScopedKeyValueConfig* trials_; - - // fallback_trials_ is used if caller does not provide any field trials. - webrtc::test::ScopedKeyValueConfig fallback_trials_; - - // An extra field trial that can be set using SetFieldTrial. - std::unique_ptr trials_overrides_; + FakePayloadTypeSuggester pt_suggester_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::FakeAudioReceiveStream; +using ::webrtc::FakeAudioSendStream; +using ::webrtc::FakeCall; +using ::webrtc::FakeFlexfecReceiveStream; +using ::webrtc::FakeVideoReceiveStream; +using ::webrtc::FakeVideoSendStream; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_ENGINE_FAKE_WEBRTC_CALL_H_ diff --git a/media/engine/fake_webrtc_video_engine.cc b/media/engine/fake_webrtc_video_engine.cc index adbaf6cce3..89a536d449 100644 --- a/media/engine/fake_webrtc_video_engine.cc +++ b/media/engine/fake_webrtc_video_engine.cc @@ -11,25 +11,43 @@ #include "media/engine/fake_webrtc_video_engine.h" #include +#include #include +#include +#include +#include #include "absl/strings/match.h" +#include "api/environment/environment.h" +#include "api/fec_controller_override.h" +#include "api/units/time_delta.h" +#include "api/video/encoded_image.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_factory.h" #include "media/base/codec.h" #include "media/base/media_constants.h" #include "media/engine/simulcast_encoder_adapter.h" #include "modules/video_coding/include/video_error_codes.h" +#include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" -namespace cricket { +namespace webrtc { namespace { -static constexpr webrtc::TimeDelta kEventTimeout = - webrtc::TimeDelta::Seconds(10); +using ::webrtc::Environment; -bool IsScalabilityModeSupported( - const std::vector& formats, - absl::optional scalability_mode) { +static constexpr TimeDelta kEventTimeout = TimeDelta::Seconds(10); + +bool IsScalabilityModeSupported(const std::vector& formats, + std::optional scalability_mode) { if (!scalability_mode.has_value()) { return true; } @@ -55,18 +73,17 @@ FakeWebRtcVideoDecoder::~FakeWebRtcVideoDecoder() { } } -bool FakeWebRtcVideoDecoder::Configure(const Settings& settings) { +bool FakeWebRtcVideoDecoder::Configure(const Settings& /* settings */) { return true; } -int32_t FakeWebRtcVideoDecoder::Decode(const webrtc::EncodedImage&, - int64_t) { +int32_t FakeWebRtcVideoDecoder::Decode(const EncodedImage&, int64_t) { num_frames_received_++; return WEBRTC_VIDEO_CODEC_OK; } int32_t FakeWebRtcVideoDecoder::RegisterDecodeCompleteCallback( - webrtc::DecodedImageCallback*) { + DecodedImageCallback*) { return WEBRTC_VIDEO_CODEC_OK; } @@ -82,22 +99,26 @@ int FakeWebRtcVideoDecoder::GetNumFramesReceived() const { FakeWebRtcVideoDecoderFactory::FakeWebRtcVideoDecoderFactory() : num_created_decoders_(0) {} -std::vector -FakeWebRtcVideoDecoderFactory::GetSupportedFormats() const { - std::vector formats; +std::vector FakeWebRtcVideoDecoderFactory::GetSupportedFormats() + const { + std::vector formats; for (const webrtc::SdpVideoFormat& format : supported_codec_formats_) { - // Don't add same codec twice. - if (!format.IsCodecInList(formats)) - formats.push_back(format); + // We need to test erroneous scenarios, so just warn if there's + // a duplicate. + if (format.IsCodecInList(formats)) { + RTC_LOG(LS_WARNING) << "GetSupportedFormats found a duplicate format: " + << format << ", check that this is expected."; + } + formats.push_back(format); } return formats; } -std::unique_ptr -FakeWebRtcVideoDecoderFactory::CreateVideoDecoder( - const webrtc::SdpVideoFormat& format) { +std::unique_ptr FakeWebRtcVideoDecoderFactory::Create( + const Environment& /* env */, + const SdpVideoFormat& format) { if (format.IsCodecInList(supported_codec_formats_)) { num_created_decoders_++; std::unique_ptr decoder = @@ -115,12 +136,17 @@ void FakeWebRtcVideoDecoderFactory::DecoderDestroyed( decoders_.end()); } +void FakeWebRtcVideoDecoderFactory::AddSupportedVideoCodec( + const SdpVideoFormat& format) { + supported_codec_formats_.push_back(format); +} + void FakeWebRtcVideoDecoderFactory::AddSupportedVideoCodecType( const std::string& name) { - // This is to match the default H264 params of cricket::VideoCodec. - cricket::VideoCodec video_codec = cricket::CreateVideoCodec(name); + // This is to match the default H264 params of Codec. + Codec video_codec = CreateVideoCodec(name); supported_codec_formats_.push_back( - webrtc::SdpVideoFormat(video_codec.name, video_codec.params)); + SdpVideoFormat(video_codec.name, video_codec.params)); } int FakeWebRtcVideoDecoderFactory::GetNumCreatedDecoders() { @@ -144,30 +170,30 @@ FakeWebRtcVideoEncoder::~FakeWebRtcVideoEncoder() { } void FakeWebRtcVideoEncoder::SetFecControllerOverride( - webrtc::FecControllerOverride* fec_controller_override) { + FecControllerOverride* /* fec_controller_override */) { // Ignored. } int32_t FakeWebRtcVideoEncoder::InitEncode( - const webrtc::VideoCodec* codecSettings, - const VideoEncoder::Settings& settings) { - webrtc::MutexLock lock(&mutex_); + const VideoCodec* codecSettings, + const VideoEncoder::Settings& /* settings */) { + MutexLock lock(&mutex_); codec_settings_ = *codecSettings; init_encode_event_.Set(); return WEBRTC_VIDEO_CODEC_OK; } int32_t FakeWebRtcVideoEncoder::Encode( - const webrtc::VideoFrame& inputImage, - const std::vector* frame_types) { - webrtc::MutexLock lock(&mutex_); + const VideoFrame& /* inputImage */, + const std::vector* /* frame_types */) { + MutexLock lock(&mutex_); ++num_frames_encoded_; init_encode_event_.Set(); return WEBRTC_VIDEO_CODEC_OK; } int32_t FakeWebRtcVideoEncoder::RegisterEncodeCompleteCallback( - webrtc::EncodedImageCallback* callback) { + EncodedImageCallback* /* callback */) { return WEBRTC_VIDEO_CODEC_OK; } @@ -175,11 +201,10 @@ int32_t FakeWebRtcVideoEncoder::Release() { return WEBRTC_VIDEO_CODEC_OK; } -void FakeWebRtcVideoEncoder::SetRates(const RateControlParameters& parameters) { -} +void FakeWebRtcVideoEncoder::SetRates( + const RateControlParameters& /* parameters */) {} -webrtc::VideoEncoder::EncoderInfo FakeWebRtcVideoEncoder::GetEncoderInfo() - const { +VideoEncoder::EncoderInfo FakeWebRtcVideoEncoder::GetEncoderInfo() const { EncoderInfo info; info.is_hardware_accelerated = true; return info; @@ -189,13 +214,13 @@ bool FakeWebRtcVideoEncoder::WaitForInitEncode() { return init_encode_event_.Wait(kEventTimeout); } -webrtc::VideoCodec FakeWebRtcVideoEncoder::GetCodecSettings() { - webrtc::MutexLock lock(&mutex_); +VideoCodec FakeWebRtcVideoEncoder::GetCodecSettings() { + MutexLock lock(&mutex_); return codec_settings_; } int FakeWebRtcVideoEncoder::GetNumEncodedFrames() { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return num_frames_encoded_; } @@ -203,9 +228,9 @@ int FakeWebRtcVideoEncoder::GetNumEncodedFrames() { FakeWebRtcVideoEncoderFactory::FakeWebRtcVideoEncoderFactory() : num_created_encoders_(0), vp8_factory_mode_(false) {} -std::vector -FakeWebRtcVideoEncoderFactory::GetSupportedFormats() const { - std::vector formats; +std::vector FakeWebRtcVideoEncoderFactory::GetSupportedFormats() + const { + std::vector formats; for (const webrtc::SdpVideoFormat& format : formats_) { // Don't add same codec twice. @@ -216,11 +241,11 @@ FakeWebRtcVideoEncoderFactory::GetSupportedFormats() const { return formats; } -webrtc::VideoEncoderFactory::CodecSupport +VideoEncoderFactory::CodecSupport FakeWebRtcVideoEncoderFactory::QueryCodecSupport( - const webrtc::SdpVideoFormat& format, - absl::optional scalability_mode) const { - std::vector supported_formats; + const SdpVideoFormat& format, + std::optional scalability_mode) const { + std::vector supported_formats; for (const auto& f : formats_) { if (format.IsSameCodec(f)) supported_formats.push_back(f); @@ -232,11 +257,11 @@ FakeWebRtcVideoEncoderFactory::QueryCodecSupport( return {.is_supported = false}; } -std::unique_ptr -FakeWebRtcVideoEncoderFactory::CreateVideoEncoder( - const webrtc::SdpVideoFormat& format) { - webrtc::MutexLock lock(&mutex_); - std::unique_ptr encoder; +std::unique_ptr FakeWebRtcVideoEncoderFactory::Create( + const Environment& env, + const SdpVideoFormat& format) { + MutexLock lock(&mutex_); + std::unique_ptr encoder; if (format.IsCodecInList(formats_)) { if (absl::EqualsIgnoreCase(format.name, kVp8CodecName) && !vp8_factory_mode_) { @@ -244,7 +269,8 @@ FakeWebRtcVideoEncoderFactory::CreateVideoEncoder( // encoders. Enter vp8_factory_mode so that we now create these encoders // instead of more adapters. vp8_factory_mode_ = true; - encoder = std::make_unique(this, format); + encoder = std::make_unique( + env, /*primary_factory=*/this, /*fallback_factory=*/nullptr, format); } else { num_created_encoders_++; created_video_encoder_event_.Set(); @@ -257,46 +283,48 @@ FakeWebRtcVideoEncoderFactory::CreateVideoEncoder( bool FakeWebRtcVideoEncoderFactory::WaitForCreatedVideoEncoders( int num_encoders) { - int64_t start_offset_ms = rtc::TimeMillis(); + int64_t start_offset_ms = TimeMillis(); int64_t wait_time = kEventTimeout.ms(); do { if (GetNumCreatedEncoders() >= num_encoders) return true; - wait_time = kEventTimeout.ms() - (rtc::TimeMillis() - start_offset_ms); - } while (wait_time > 0 && created_video_encoder_event_.Wait( - webrtc::TimeDelta::Millis(wait_time))); + wait_time = kEventTimeout.ms() - (TimeMillis() - start_offset_ms); + } while (wait_time > 0 && + created_video_encoder_event_.Wait(TimeDelta::Millis(wait_time))); return false; } void FakeWebRtcVideoEncoderFactory::EncoderDestroyed( FakeWebRtcVideoEncoder* encoder) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); encoders_.erase(std::remove(encoders_.begin(), encoders_.end(), encoder), encoders_.end()); } void FakeWebRtcVideoEncoderFactory::AddSupportedVideoCodec( - const webrtc::SdpVideoFormat& format) { + const SdpVideoFormat& format) { formats_.push_back(format); } void FakeWebRtcVideoEncoderFactory::AddSupportedVideoCodecType( - const std::string& name) { - // This is to match the default H264 params of cricket::VideoCodec. - cricket::VideoCodec video_codec = cricket::CreateVideoCodec(name); + const std::string& name, + const std::vector& scalability_modes) { + // This is to match the default H264 params of Codec. + Codec video_codec = CreateVideoCodec(name); formats_.push_back( - webrtc::SdpVideoFormat(video_codec.name, video_codec.params)); + SdpVideoFormat(video_codec.name, video_codec.params, + {scalability_modes.begin(), scalability_modes.end()})); } int FakeWebRtcVideoEncoderFactory::GetNumCreatedEncoders() { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return num_created_encoders_; } const std::vector FakeWebRtcVideoEncoderFactory::encoders() { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return encoders_; } -} // namespace cricket +} // namespace webrtc diff --git a/media/engine/fake_webrtc_video_engine.h b/media/engine/fake_webrtc_video_engine.h index 87d107ac37..fcc75b55c0 100644 --- a/media/engine/fake_webrtc_video_engine.h +++ b/media/engine/fake_webrtc_video_engine.h @@ -15,39 +15,40 @@ #include #include +#include #include #include +#include "api/environment/environment.h" #include "api/fec_controller_override.h" #include "api/video/encoded_image.h" -#include "api/video/video_bitrate_allocation.h" #include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" -#include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/event.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" -namespace cricket { +namespace webrtc { class FakeWebRtcVideoDecoderFactory; class FakeWebRtcVideoEncoderFactory; // Fake class for mocking out webrtc::VideoDecoder -class FakeWebRtcVideoDecoder : public webrtc::VideoDecoder { +class FakeWebRtcVideoDecoder : public VideoDecoder { public: explicit FakeWebRtcVideoDecoder(FakeWebRtcVideoDecoderFactory* factory); ~FakeWebRtcVideoDecoder(); bool Configure(const Settings& settings) override; - int32_t Decode(const webrtc::EncodedImage&, int64_t) override; - int32_t RegisterDecodeCompleteCallback( - webrtc::DecodedImageCallback*) override; + int32_t Decode(const EncodedImage&, int64_t) override; + int32_t RegisterDecodeCompleteCallback(DecodedImageCallback*) override; int32_t Release() override; int GetNumFramesReceived() const; @@ -58,85 +59,98 @@ class FakeWebRtcVideoDecoder : public webrtc::VideoDecoder { }; // Fake class for mocking out webrtc::VideoDecoderFactory. -class FakeWebRtcVideoDecoderFactory : public webrtc::VideoDecoderFactory { +class FakeWebRtcVideoDecoderFactory : public VideoDecoderFactory { public: FakeWebRtcVideoDecoderFactory(); - std::vector GetSupportedFormats() const override; - std::unique_ptr CreateVideoDecoder( - const webrtc::SdpVideoFormat& format) override; + std::vector GetSupportedFormats() const override; + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override; void DecoderDestroyed(FakeWebRtcVideoDecoder* decoder); + void AddSupportedVideoCodec(const SdpVideoFormat& format); void AddSupportedVideoCodecType(const std::string& name); int GetNumCreatedDecoders(); const std::vector& decoders(); private: - std::vector supported_codec_formats_; + std::vector supported_codec_formats_; std::vector decoders_; int num_created_decoders_; }; // Fake class for mocking out webrtc::VideoEnoder -class FakeWebRtcVideoEncoder : public webrtc::VideoEncoder { +class FakeWebRtcVideoEncoder : public VideoEncoder { public: explicit FakeWebRtcVideoEncoder(FakeWebRtcVideoEncoderFactory* factory); ~FakeWebRtcVideoEncoder(); void SetFecControllerOverride( - webrtc::FecControllerOverride* fec_controller_override) override; - int32_t InitEncode(const webrtc::VideoCodec* codecSettings, + FecControllerOverride* fec_controller_override) override; + int32_t InitEncode(const VideoCodec* codecSettings, const VideoEncoder::Settings& settings) override; - int32_t Encode( - const webrtc::VideoFrame& inputImage, - const std::vector* frame_types) override; + int32_t Encode(const VideoFrame& inputImage, + const std::vector* frame_types) override; int32_t RegisterEncodeCompleteCallback( - webrtc::EncodedImageCallback* callback) override; + EncodedImageCallback* callback) override; int32_t Release() override; void SetRates(const RateControlParameters& parameters) override; - webrtc::VideoEncoder::EncoderInfo GetEncoderInfo() const override; + VideoEncoder::EncoderInfo GetEncoderInfo() const override; bool WaitForInitEncode(); - webrtc::VideoCodec GetCodecSettings(); + VideoCodec GetCodecSettings(); int GetNumEncodedFrames(); private: - webrtc::Mutex mutex_; - rtc::Event init_encode_event_; + Mutex mutex_; + Event init_encode_event_; int num_frames_encoded_ RTC_GUARDED_BY(mutex_); - webrtc::VideoCodec codec_settings_ RTC_GUARDED_BY(mutex_); + VideoCodec codec_settings_ RTC_GUARDED_BY(mutex_); FakeWebRtcVideoEncoderFactory* factory_; }; // Fake class for mocking out webrtc::VideoEncoderFactory. -class FakeWebRtcVideoEncoderFactory : public webrtc::VideoEncoderFactory { +class FakeWebRtcVideoEncoderFactory : public VideoEncoderFactory { public: FakeWebRtcVideoEncoderFactory(); - std::vector GetSupportedFormats() const override; - webrtc::VideoEncoderFactory::CodecSupport QueryCodecSupport( - const webrtc::SdpVideoFormat& format, - absl::optional scalability_mode) const override; - std::unique_ptr CreateVideoEncoder( - const webrtc::SdpVideoFormat& format) override; + std::vector GetSupportedFormats() const override; + VideoEncoderFactory::CodecSupport QueryCodecSupport( + const SdpVideoFormat& format, + std::optional scalability_mode) const override; + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override; bool WaitForCreatedVideoEncoders(int num_encoders); void EncoderDestroyed(FakeWebRtcVideoEncoder* encoder); void set_encoders_have_internal_sources(bool internal_source); - void AddSupportedVideoCodec(const webrtc::SdpVideoFormat& format); - void AddSupportedVideoCodecType(const std::string& name); + void AddSupportedVideoCodec(const SdpVideoFormat& format); + void AddSupportedVideoCodecType( + const std::string& name, + const std::vector& scalability_modes = {}); int GetNumCreatedEncoders(); const std::vector encoders(); private: - webrtc::Mutex mutex_; - rtc::Event created_video_encoder_event_; - std::vector formats_; + Mutex mutex_; + Event created_video_encoder_event_; + std::vector formats_; std::vector encoders_ RTC_GUARDED_BY(mutex_); int num_created_encoders_ RTC_GUARDED_BY(mutex_); bool vp8_factory_mode_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::FakeWebRtcVideoDecoder; +using ::webrtc::FakeWebRtcVideoDecoderFactory; +using ::webrtc::FakeWebRtcVideoEncoder; +using ::webrtc::FakeWebRtcVideoEncoderFactory; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_ENGINE_FAKE_WEBRTC_VIDEO_ENGINE_H_ diff --git a/media/engine/internal_decoder_factory.cc b/media/engine/internal_decoder_factory.cc index e761fd60c8..b87fe2f2c3 100644 --- a/media/engine/internal_decoder_factory.cc +++ b/media/engine/internal_decoder_factory.cc @@ -10,18 +10,22 @@ #include "media/engine/internal_decoder_factory.h" +#include +#include + #include "absl/strings/match.h" -#include "api/video_codecs/av1_profile.h" +#include "api/environment/environment.h" +#include "api/video/video_codec_type.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" -#include "media/base/codec.h" +#include "api/video_codecs/video_decoder.h" +#include "api/video_codecs/video_decoder_factory.h" #include "media/base/media_constants.h" #include "modules/video_coding/codecs/h264/include/h264.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" #if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY) #include "modules/video_coding/codecs/av1/dav1d_decoder.h" // nogncheck @@ -33,7 +37,7 @@ namespace { constexpr bool kDav1dIsIncluded = true; #else constexpr bool kDav1dIsIncluded = false; -std::unique_ptr CreateDav1dDecoder() { +std::unique_ptr CreateDav1dDecoder(const Environment& env) { return nullptr; } #endif @@ -43,17 +47,15 @@ std::unique_ptr CreateDav1dDecoder() { std::vector InternalDecoderFactory::GetSupportedFormats() const { std::vector formats; - formats.push_back(SdpVideoFormat(cricket::kVp8CodecName)); + formats.push_back(SdpVideoFormat::VP8()); for (const SdpVideoFormat& format : SupportedVP9DecoderCodecs()) formats.push_back(format); for (const SdpVideoFormat& h264_format : SupportedH264DecoderCodecs()) formats.push_back(h264_format); if (kDav1dIsIncluded) { - formats.push_back(SdpVideoFormat(cricket::kAv1CodecName)); - formats.push_back(SdpVideoFormat( - cricket::kAv1CodecName, - {{kAV1FmtpProfile, AV1ProfileToString(AV1Profile::kProfile1).data()}})); + formats.push_back(SdpVideoFormat::AV1Profile0()); + formats.push_back(SdpVideoFormat::AV1Profile1()); } return formats; @@ -77,7 +79,8 @@ VideoDecoderFactory::CodecSupport InternalDecoderFactory::QueryCodecSupport( return codec_support; } -std::unique_ptr InternalDecoderFactory::CreateVideoDecoder( +std::unique_ptr InternalDecoderFactory::Create( + const Environment& env, const SdpVideoFormat& format) { if (!format.IsCodecInList(GetSupportedFormats())) { RTC_LOG(LS_WARNING) << "Trying to create decoder for unsupported format. " @@ -85,16 +88,15 @@ std::unique_ptr InternalDecoderFactory::CreateVideoDecoder( return nullptr; } - if (absl::EqualsIgnoreCase(format.name, cricket::kVp8CodecName)) - return VP8Decoder::Create(); - if (absl::EqualsIgnoreCase(format.name, cricket::kVp9CodecName)) + if (absl::EqualsIgnoreCase(format.name, kVp8CodecName)) + return CreateVp8Decoder(env); + if (absl::EqualsIgnoreCase(format.name, kVp9CodecName)) return VP9Decoder::Create(); - if (absl::EqualsIgnoreCase(format.name, cricket::kH264CodecName)) + if (absl::EqualsIgnoreCase(format.name, kH264CodecName)) return H264Decoder::Create(); - if (absl::EqualsIgnoreCase(format.name, cricket::kAv1CodecName) && - kDav1dIsIncluded) { - return CreateDav1dDecoder(); + if (absl::EqualsIgnoreCase(format.name, kAv1CodecName) && kDav1dIsIncluded) { + return CreateDav1dDecoder(env); } RTC_DCHECK_NOTREACHED(); diff --git a/media/engine/internal_decoder_factory.h b/media/engine/internal_decoder_factory.h index 0129fb2173..a2cd362069 100644 --- a/media/engine/internal_decoder_factory.h +++ b/media/engine/internal_decoder_factory.h @@ -14,6 +14,7 @@ #include #include +#include "api/environment/environment.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder_factory.h" @@ -26,8 +27,8 @@ class RTC_EXPORT InternalDecoderFactory : public VideoDecoderFactory { std::vector GetSupportedFormats() const override; CodecSupport QueryCodecSupport(const SdpVideoFormat& format, bool reference_scaling) const override; - std::unique_ptr CreateVideoDecoder( - const SdpVideoFormat& format) override; + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override; }; } // namespace webrtc diff --git a/media/engine/internal_decoder_factory_unittest.cc b/media/engine/internal_decoder_factory_unittest.cc index bb2e24d5d8..a7cd2762dc 100644 --- a/media/engine/internal_decoder_factory_unittest.cc +++ b/media/engine/internal_decoder_factory_unittest.cc @@ -10,13 +10,15 @@ #include "media/engine/internal_decoder_factory.h" -#include "api/video_codecs/av1_profile.h" +#include + +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder.h" -#include "api/video_codecs/vp9_profile.h" +#include "api/video_codecs/video_decoder_factory.h" #include "media/base/media_constants.h" #include "system_wrappers/include/field_trial.h" -#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" @@ -43,6 +45,8 @@ constexpr bool kDav1dIsIncluded = true; #else constexpr bool kDav1dIsIncluded = false; #endif +constexpr bool kH265Enabled = false; + constexpr VideoDecoderFactory::CodecSupport kSupported = { /*is_supported=*/true, /*is_power_efficient=*/false}; constexpr VideoDecoderFactory::CodecSupport kUnsupported = { @@ -54,107 +58,115 @@ MATCHER_P(Support, expected, "") { } TEST(InternalDecoderFactoryTest, Vp8) { + const Environment env = CreateEnvironment(); InternalDecoderFactory factory; std::unique_ptr decoder = - factory.CreateVideoDecoder(SdpVideoFormat(cricket::kVp8CodecName)); + factory.Create(env, SdpVideoFormat::VP8()); EXPECT_TRUE(decoder); } TEST(InternalDecoderFactoryTest, Vp9Profile0) { + const Environment env = CreateEnvironment(); InternalDecoderFactory factory; std::unique_ptr decoder = - factory.CreateVideoDecoder(SdpVideoFormat( - cricket::kVp9CodecName, - {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})); + factory.Create(env, SdpVideoFormat::VP9Profile0()); EXPECT_EQ(static_cast(decoder), kVp9Enabled); } TEST(InternalDecoderFactoryTest, Vp9Profile1) { + const Environment env = CreateEnvironment(); InternalDecoderFactory factory; std::unique_ptr decoder = - factory.CreateVideoDecoder(SdpVideoFormat( - cricket::kVp9CodecName, - {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile1)}})); + factory.Create(env, SdpVideoFormat::VP9Profile1()); EXPECT_EQ(static_cast(decoder), kVp9Enabled); } TEST(InternalDecoderFactoryTest, H264) { + const Environment env = CreateEnvironment(); InternalDecoderFactory factory; std::unique_ptr decoder = - factory.CreateVideoDecoder(SdpVideoFormat(cricket::kH264CodecName)); + factory.Create(env, SdpVideoFormat::H264()); EXPECT_EQ(static_cast(decoder), kH264Enabled); } TEST(InternalDecoderFactoryTest, Av1Profile0) { + const Environment env = CreateEnvironment(); InternalDecoderFactory factory; if (kDav1dIsIncluded) { EXPECT_THAT(factory.GetSupportedFormats(), - Contains(Field(&SdpVideoFormat::name, cricket::kAv1CodecName))); - EXPECT_TRUE( - factory.CreateVideoDecoder(SdpVideoFormat(cricket::kAv1CodecName))); + Contains(Field(&SdpVideoFormat::name, kAv1CodecName))); + EXPECT_TRUE(factory.Create(env, SdpVideoFormat::AV1Profile0())); } else { - EXPECT_THAT( - factory.GetSupportedFormats(), - Not(Contains(Field(&SdpVideoFormat::name, cricket::kAv1CodecName)))); + EXPECT_THAT(factory.GetSupportedFormats(), + Not(Contains(Field(&SdpVideoFormat::name, kAv1CodecName)))); } } +// At current stage since internal H.265 decoder is not implemented, +TEST(InternalDecoderFactoryTest, H265IsNotEnabled) { + const Environment env = CreateEnvironment(); + InternalDecoderFactory factory; + std::unique_ptr decoder = + factory.Create(env, SdpVideoFormat(kH265CodecName)); + EXPECT_EQ(static_cast(decoder), kH265Enabled); +} + #if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY) TEST(InternalDecoderFactoryTest, Av1) { InternalDecoderFactory factory; EXPECT_THAT(factory.GetSupportedFormats(), - Contains(Field(&SdpVideoFormat::name, cricket::kAv1CodecName))); + Contains(Field(&SdpVideoFormat::name, webrtc::kAv1CodecName))); } #endif TEST(InternalDecoderFactoryTest, Av1Profile1_Dav1dDecoderTrialEnabled) { + const Environment env = CreateEnvironment(); InternalDecoderFactory factory; - std::unique_ptr decoder = factory.CreateVideoDecoder( - SdpVideoFormat(cricket::kAv1CodecName, - {{kAV1FmtpProfile, - AV1ProfileToString(AV1Profile::kProfile1).data()}})); + std::unique_ptr decoder = + factory.Create(env, SdpVideoFormat::AV1Profile1()); EXPECT_EQ(static_cast(decoder), kDav1dIsIncluded); } TEST(InternalDecoderFactoryTest, QueryCodecSupportNoReferenceScaling) { InternalDecoderFactory factory; - EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat(cricket::kVp8CodecName), + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::VP8(), /*reference_scaling=*/false), Support(kSupported)); - EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat(cricket::kVp9CodecName), + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::VP9Profile0(), /*reference_scaling=*/false), Support(kVp9Enabled ? kSupported : kUnsupported)); - EXPECT_THAT(factory.QueryCodecSupport( - SdpVideoFormat(cricket::kVp9CodecName, - {{kVP9FmtpProfileId, - VP9ProfileToString(VP9Profile::kProfile1)}}), - /*reference_scaling=*/false), + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::VP9Profile1(), + /*reference_scaling=*/false), Support(kVp9Enabled ? kSupported : kUnsupported)); #if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY) - EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat(cricket::kAv1CodecName), + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::AV1Profile0(), /*reference_scaling=*/false), Support(kSupported)); + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::AV1Profile1(), + /*reference_scaling=*/false), + Support(kSupported)); + #endif } TEST(InternalDecoderFactoryTest, QueryCodecSupportReferenceScaling) { InternalDecoderFactory factory; // VP9 and AV1 support for spatial layers. - EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat(cricket::kVp9CodecName), + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::VP9Profile0(), /*reference_scaling=*/true), Support(kVp9Enabled ? kSupported : kUnsupported)); #if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY) - EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat(cricket::kAv1CodecName), + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::AV1Profile0(), /*reference_scaling=*/true), Support(kSupported)); #endif // Invalid config even though VP8 and H264 are supported. - EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat(cricket::kH264CodecName), + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::H264(), /*reference_scaling=*/true), Support(kUnsupported)); - EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat(cricket::kVp8CodecName), + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::VP8(), /*reference_scaling=*/true), Support(kUnsupported)); } diff --git a/media/engine/internal_encoder_factory.cc b/media/engine/internal_encoder_factory.cc index 7b5fc24e0a..a00c056f0a 100644 --- a/media/engine/internal_encoder_factory.cc +++ b/media/engine/internal_encoder_factory.cc @@ -11,10 +11,13 @@ #include "media/engine/internal_encoder_factory.h" #include +#include #include #include -#include "absl/strings/match.h" +#include "api/environment/environment.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "api/video_codecs/video_encoder_factory_template.h" #if defined(RTC_USE_LIBAOM_AV1_ENCODER) @@ -45,17 +48,17 @@ std::vector InternalEncoderFactory::GetSupportedFormats() return Factory().GetSupportedFormats(); } -std::unique_ptr InternalEncoderFactory::CreateVideoEncoder( +std::unique_ptr InternalEncoderFactory::Create( + const Environment& env, const SdpVideoFormat& format) { auto original_format = FuzzyMatchSdpVideoFormat(Factory().GetSupportedFormats(), format); - return original_format ? Factory().CreateVideoEncoder(*original_format) - : nullptr; + return original_format ? Factory().Create(env, *original_format) : nullptr; } VideoEncoderFactory::CodecSupport InternalEncoderFactory::QueryCodecSupport( const SdpVideoFormat& format, - absl::optional scalability_mode) const { + std::optional scalability_mode) const { auto original_format = FuzzyMatchSdpVideoFormat(Factory().GetSupportedFormats(), format); return original_format diff --git a/media/engine/internal_encoder_factory.h b/media/engine/internal_encoder_factory.h index 25480d088f..01c5ef7cfe 100644 --- a/media/engine/internal_encoder_factory.h +++ b/media/engine/internal_encoder_factory.h @@ -12,9 +12,13 @@ #define MEDIA_ENGINE_INTERNAL_ENCODER_FACTORY_H_ #include +#include #include #include +#include "api/environment/environment.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "rtc_base/system/rtc_export.h" @@ -24,9 +28,9 @@ class RTC_EXPORT InternalEncoderFactory : public VideoEncoderFactory { std::vector GetSupportedFormats() const override; CodecSupport QueryCodecSupport( const SdpVideoFormat& format, - absl::optional scalability_mode) const override; - std::unique_ptr CreateVideoEncoder( - const SdpVideoFormat& format) override; + std::optional scalability_mode) const override; + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override; }; } // namespace webrtc diff --git a/media/engine/internal_encoder_factory_unittest.cc b/media/engine/internal_encoder_factory_unittest.cc index a1c90b8cf4..9a079d6bc2 100644 --- a/media/engine/internal_encoder_factory_unittest.cc +++ b/media/engine/internal_encoder_factory_unittest.cc @@ -10,9 +10,13 @@ #include "media/engine/internal_encoder_factory.h" +#include +#include + +#include "api/environment/environment_factory.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/vp9_profile.h" +#include "api/video_codecs/video_encoder_factory.h" #include "media/base/media_constants.h" #include "test/gmock.h" #include "test/gtest.h" @@ -33,6 +37,8 @@ constexpr bool kH264Enabled = true; #else constexpr bool kH264Enabled = false; #endif +constexpr bool kH265Enabled = false; + constexpr VideoEncoderFactory::CodecSupport kSupported = { /*is_supported=*/true, /*is_power_efficient=*/false}; constexpr VideoEncoderFactory::CodecSupport kUnsupported = { @@ -46,7 +52,7 @@ MATCHER_P(Support, expected, "") { TEST(InternalEncoderFactoryTest, Vp8) { InternalEncoderFactory factory; std::unique_ptr encoder = - factory.CreateVideoEncoder(SdpVideoFormat(cricket::kVp8CodecName)); + factory.Create(CreateEnvironment(), SdpVideoFormat::VP8()); EXPECT_TRUE(encoder); } @@ -54,14 +60,11 @@ TEST(InternalEncoderFactoryTest, Vp9Profile0) { InternalEncoderFactory factory; if (kVp9Enabled) { std::unique_ptr encoder = - factory.CreateVideoEncoder(SdpVideoFormat( - cricket::kVp9CodecName, - {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})); + factory.Create(CreateEnvironment(), SdpVideoFormat::VP9Profile0()); EXPECT_TRUE(encoder); } else { - EXPECT_THAT( - factory.GetSupportedFormats(), - Not(Contains(Field(&SdpVideoFormat::name, cricket::kVp9CodecName)))); + EXPECT_THAT(factory.GetSupportedFormats(), + Not(Contains(Field(&SdpVideoFormat::name, kVp9CodecName)))); } } @@ -69,70 +72,73 @@ TEST(InternalEncoderFactoryTest, H264) { InternalEncoderFactory factory; if (kH264Enabled) { std::unique_ptr encoder = - factory.CreateVideoEncoder(SdpVideoFormat(cricket::kH264CodecName)); + factory.Create(CreateEnvironment(), SdpVideoFormat::H264()); EXPECT_TRUE(encoder); } else { - EXPECT_THAT( - factory.GetSupportedFormats(), - Not(Contains(Field(&SdpVideoFormat::name, cricket::kH264CodecName)))); + EXPECT_THAT(factory.GetSupportedFormats(), + Not(Contains(Field(&SdpVideoFormat::name, kH264CodecName)))); } } +// At current stage H.265 is not supported by internal encoder factory. +TEST(InternalEncoderFactoryTest, H265IsNotEnabled) { + InternalEncoderFactory factory; + std::unique_ptr encoder = + factory.Create(CreateEnvironment(), SdpVideoFormat(kH265CodecName)); + EXPECT_EQ(static_cast(encoder), kH265Enabled); + EXPECT_THAT(factory.GetSupportedFormats(), + Not(Contains(Field(&SdpVideoFormat::name, kH265CodecName)))); +} + TEST(InternalEncoderFactoryTest, QueryCodecSupportWithScalabilityMode) { InternalEncoderFactory factory; // VP8 and VP9 supported for singles spatial layers. - EXPECT_THAT( - factory.QueryCodecSupport(SdpVideoFormat(cricket::kVp8CodecName), "L1T2"), - Support(kSupported)); - EXPECT_THAT( - factory.QueryCodecSupport(SdpVideoFormat(cricket::kVp9CodecName), "L1T3"), - Support(kVp9Enabled ? kSupported : kUnsupported)); + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::VP8(), "L1T2"), + Support(kSupported)); + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::VP9Profile0(), "L1T3"), + Support(kVp9Enabled ? kSupported : kUnsupported)); // VP9 support for spatial layers. - EXPECT_THAT( - factory.QueryCodecSupport(SdpVideoFormat(cricket::kVp9CodecName), "L3T3"), - Support(kVp9Enabled ? kSupported : kUnsupported)); + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::VP9Profile0(), "L3T3"), + Support(kVp9Enabled ? kSupported : kUnsupported)); // Invalid scalability modes even though VP8 and H264 are supported. - EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat(cricket::kH264CodecName), - "L2T2"), + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::H264(), "L2T2"), + Support(kUnsupported)); + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::VP8(), "L3T3"), Support(kUnsupported)); - EXPECT_THAT( - factory.QueryCodecSupport(SdpVideoFormat(cricket::kVp8CodecName), "L3T3"), - Support(kUnsupported)); } #if defined(RTC_USE_LIBAOM_AV1_ENCODER) TEST(InternalEncoderFactoryTest, Av1) { InternalEncoderFactory factory; EXPECT_THAT(factory.GetSupportedFormats(), - Contains(Field(&SdpVideoFormat::name, cricket::kAv1CodecName))); + Contains(Field(&SdpVideoFormat::name, webrtc::kAv1CodecName))); EXPECT_TRUE( - factory.CreateVideoEncoder(SdpVideoFormat(cricket::kAv1CodecName))); + factory.Create(CreateEnvironment(), SdpVideoFormat::AV1Profile0())); } TEST(InternalEncoderFactoryTest, QueryCodecSupportNoScalabilityModeAv1) { InternalEncoderFactory factory; - EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat(cricket::kAv1CodecName), - /*scalability_mode=*/absl::nullopt), + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::AV1Profile0(), + /*scalability_mode=*/std::nullopt), Support(kSupported)); } TEST(InternalEncoderFactoryTest, QueryCodecSupportNoScalabilityMode) { InternalEncoderFactory factory; - EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat(cricket::kVp8CodecName), - /*scalability_mode=*/absl::nullopt), + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::VP8(), + /*scalability_mode=*/std::nullopt), Support(kSupported)); - EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat(cricket::kVp9CodecName), - /*scalability_mode=*/absl::nullopt), + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::VP9Profile0(), + /*scalability_mode=*/std::nullopt), Support(kVp9Enabled ? kSupported : kUnsupported)); } TEST(InternalEncoderFactoryTest, QueryCodecSupportWithScalabilityModeAv1) { InternalEncoderFactory factory; - EXPECT_THAT( - factory.QueryCodecSupport(SdpVideoFormat(cricket::kAv1CodecName), "L2T1"), - Support(kSupported)); + EXPECT_THAT(factory.QueryCodecSupport(SdpVideoFormat::AV1Profile0(), "L2T1"), + Support(kSupported)); } #endif // defined(RTC_USE_LIBAOM_AV1_ENCODER) diff --git a/media/engine/multiplex_codec_factory.cc b/media/engine/multiplex_codec_factory.cc deleted file mode 100644 index 90df02a77e..0000000000 --- a/media/engine/multiplex_codec_factory.cc +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "media/engine/multiplex_codec_factory.h" - -#include -#include -#include - -#include "absl/strings/match.h" -#include "api/video_codecs/sdp_video_format.h" -#include "media/base/codec.h" -#include "media/base/media_constants.h" -#include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h" -#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h" -#include "rtc_base/logging.h" - -namespace { - -bool IsMultiplexCodec(const cricket::VideoCodec& codec) { - return absl::EqualsIgnoreCase(codec.name.c_str(), - cricket::kMultiplexCodecName); -} - -} // anonymous namespace - -namespace webrtc { - -constexpr const char* kMultiplexAssociatedCodecName = cricket::kVp9CodecName; - -MultiplexEncoderFactory::MultiplexEncoderFactory( - std::unique_ptr factory, - bool supports_augmenting_data) - : factory_(std::move(factory)), - supports_augmenting_data_(supports_augmenting_data) {} - -std::vector MultiplexEncoderFactory::GetSupportedFormats() - const { - std::vector formats = factory_->GetSupportedFormats(); - for (const auto& format : formats) { - if (absl::EqualsIgnoreCase(format.name, kMultiplexAssociatedCodecName)) { - SdpVideoFormat multiplex_format = format; - multiplex_format.parameters[cricket::kCodecParamAssociatedCodecName] = - format.name; - multiplex_format.name = cricket::kMultiplexCodecName; - formats.push_back(multiplex_format); - break; - } - } - return formats; -} - -std::unique_ptr MultiplexEncoderFactory::CreateVideoEncoder( - const SdpVideoFormat& format) { - if (!IsMultiplexCodec(cricket::CreateVideoCodec(format))) - return factory_->CreateVideoEncoder(format); - const auto& it = - format.parameters.find(cricket::kCodecParamAssociatedCodecName); - if (it == format.parameters.end()) { - RTC_LOG(LS_ERROR) << "No assicated codec for multiplex."; - return nullptr; - } - SdpVideoFormat associated_format = format; - associated_format.name = it->second; - return std::unique_ptr(new MultiplexEncoderAdapter( - factory_.get(), associated_format, supports_augmenting_data_)); -} - -MultiplexDecoderFactory::MultiplexDecoderFactory( - std::unique_ptr factory, - bool supports_augmenting_data) - : factory_(std::move(factory)), - supports_augmenting_data_(supports_augmenting_data) {} - -std::vector MultiplexDecoderFactory::GetSupportedFormats() - const { - std::vector formats = factory_->GetSupportedFormats(); - std::vector augmented_formats = formats; - for (const auto& format : formats) { - if (absl::EqualsIgnoreCase(format.name, kMultiplexAssociatedCodecName)) { - SdpVideoFormat multiplex_format = format; - multiplex_format.parameters[cricket::kCodecParamAssociatedCodecName] = - format.name; - multiplex_format.name = cricket::kMultiplexCodecName; - augmented_formats.push_back(multiplex_format); - } - } - return augmented_formats; -} - -std::unique_ptr MultiplexDecoderFactory::CreateVideoDecoder( - const SdpVideoFormat& format) { - if (!IsMultiplexCodec(cricket::CreateVideoCodec(format))) - return factory_->CreateVideoDecoder(format); - const auto& it = - format.parameters.find(cricket::kCodecParamAssociatedCodecName); - if (it == format.parameters.end()) { - RTC_LOG(LS_ERROR) << "No assicated codec for multiplex."; - return nullptr; - } - SdpVideoFormat associated_format = format; - associated_format.name = it->second; - return std::unique_ptr(new MultiplexDecoderAdapter( - factory_.get(), associated_format, supports_augmenting_data_)); -} - -} // namespace webrtc diff --git a/media/engine/multiplex_codec_factory.h b/media/engine/multiplex_codec_factory.h deleted file mode 100644 index a4272a2eb2..0000000000 --- a/media/engine/multiplex_codec_factory.h +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MEDIA_ENGINE_MULTIPLEX_CODEC_FACTORY_H_ -#define MEDIA_ENGINE_MULTIPLEX_CODEC_FACTORY_H_ - -#include -#include - -#include "api/video_codecs/sdp_video_format.h" -#include "api/video_codecs/video_decoder.h" -#include "api/video_codecs/video_decoder_factory.h" -#include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/video_encoder_factory.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { -// Multiplex codec is a completely modular/optional codec that allows users to -// send more than a frame's opaque content(RGB/YUV) over video channels. -// - Allows sending Alpha channel over the wire iff input is -// I420ABufferInterface. Users can expect to receive I420ABufferInterface as the -// decoded video frame buffer. I420A data is split into YUV/AXX portions, -// encoded/decoded seperately and bitstreams are concatanated. -// - Allows sending augmenting data over the wire attached to the frame. This -// attached data portion is not encoded in any way and sent as it is. Users can -// input AugmentedVideoFrameBuffer and can expect the same interface as the -// decoded video frame buffer. -// - Showcases an example of how to add a custom codec in webrtc video channel. -// How to use it end-to-end: -// - Wrap your existing VideoEncoderFactory implemention with -// MultiplexEncoderFactory and VideoDecoderFactory implemention with -// MultiplexDecoderFactory below. For actual coding, multiplex creates encoder -// and decoder instance(s) using these factories. -// - Use Multiplex*coderFactory classes in CreatePeerConnectionFactory() calls. -// - Select "multiplex" codec in SDP negotiation. -class RTC_EXPORT MultiplexEncoderFactory : public VideoEncoderFactory { - public: - // `supports_augmenting_data` defines if the encoder would support augmenting - // data. If set, the encoder expects to receive video frame buffers of type - // AugmentedVideoFrameBuffer. - MultiplexEncoderFactory(std::unique_ptr factory, - bool supports_augmenting_data = false); - - std::vector GetSupportedFormats() const override; - std::unique_ptr CreateVideoEncoder( - const SdpVideoFormat& format) override; - - private: - std::unique_ptr factory_; - const bool supports_augmenting_data_; -}; - -class RTC_EXPORT MultiplexDecoderFactory : public VideoDecoderFactory { - public: - // `supports_augmenting_data` defines if the decoder would support augmenting - // data. If set, the decoder is expected to output video frame buffers of type - // AugmentedVideoFrameBuffer. - MultiplexDecoderFactory(std::unique_ptr factory, - bool supports_augmenting_data = false); - - std::vector GetSupportedFormats() const override; - std::unique_ptr CreateVideoDecoder( - const SdpVideoFormat& format) override; - - private: - std::unique_ptr factory_; - const bool supports_augmenting_data_; -}; - -} // namespace webrtc - -#endif // MEDIA_ENGINE_MULTIPLEX_CODEC_FACTORY_H_ diff --git a/media/engine/multiplex_codec_factory_unittest.cc b/media/engine/multiplex_codec_factory_unittest.cc deleted file mode 100644 index 1cde2f37d8..0000000000 --- a/media/engine/multiplex_codec_factory_unittest.cc +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "media/engine/multiplex_codec_factory.h" - -#include - -#include "api/video_codecs/sdp_video_format.h" -#include "api/video_codecs/video_decoder.h" -#include "api/video_codecs/video_encoder.h" -#include "media/base/media_constants.h" -#include "media/engine/internal_decoder_factory.h" -#include "media/engine/internal_encoder_factory.h" -#include "test/gtest.h" - -namespace webrtc { - -TEST(MultiplexDecoderFactory, CreateVideoDecoder) { - std::unique_ptr internal_factory( - new InternalDecoderFactory()); - MultiplexDecoderFactory factory(std::move(internal_factory)); - std::unique_ptr decoder = - factory.CreateVideoDecoder(SdpVideoFormat( - cricket::kMultiplexCodecName, - {{cricket::kCodecParamAssociatedCodecName, cricket::kVp9CodecName}})); - EXPECT_TRUE(decoder); -} - -TEST(MultiplexEncoderFactory, CreateVideoEncoder) { - std::unique_ptr internal_factory( - new InternalEncoderFactory()); - MultiplexEncoderFactory factory(std::move(internal_factory)); - std::unique_ptr encoder = - factory.CreateVideoEncoder(SdpVideoFormat( - cricket::kMultiplexCodecName, - {{cricket::kCodecParamAssociatedCodecName, cricket::kVp9CodecName}})); - EXPECT_TRUE(encoder); -} - -} // namespace webrtc diff --git a/media/engine/null_webrtc_video_engine.h b/media/engine/null_webrtc_video_engine.h deleted file mode 100644 index f94cb43e75..0000000000 --- a/media/engine/null_webrtc_video_engine.h +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MEDIA_ENGINE_NULL_WEBRTC_VIDEO_ENGINE_H_ -#define MEDIA_ENGINE_NULL_WEBRTC_VIDEO_ENGINE_H_ - -#include - -#include "media/base/media_channel.h" -#include "media/base/media_engine.h" - -namespace webrtc { - -class Call; - -} // namespace webrtc - -namespace cricket { - -// Video engine implementation that does nothing and can be used in -// CompositeMediaEngine. -class NullWebRtcVideoEngine : public VideoEngineInterface { - public: - std::vector send_codecs(bool) const override { - return std::vector(); - } - - std::vector recv_codecs(bool) const override { - return std::vector(); - } - std::vector send_codecs() const override { - return std::vector(); - } - - std::vector recv_codecs() const override { - return std::vector(); - } - - std::vector GetRtpHeaderExtensions() - const override { - return {}; - } -}; - -} // namespace cricket - -#endif // MEDIA_ENGINE_NULL_WEBRTC_VIDEO_ENGINE_H_ diff --git a/media/engine/null_webrtc_video_engine_unittest.cc b/media/engine/null_webrtc_video_engine_unittest.cc deleted file mode 100644 index 31c442d53d..0000000000 --- a/media/engine/null_webrtc_video_engine_unittest.cc +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "media/engine/null_webrtc_video_engine.h" - -#include -#include - -#include "api/task_queue/default_task_queue_factory.h" -#include "api/task_queue/task_queue_factory.h" -#include "api/transport/field_trial_based_config.h" -#include "media/engine/webrtc_voice_engine.h" -#include "modules/audio_device/include/mock_audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "test/gtest.h" -#include "test/mock_audio_decoder_factory.h" -#include "test/mock_audio_encoder_factory.h" - -namespace cricket { - -// Simple test to check if NullWebRtcVideoEngine implements the methods -// required by CompositeMediaEngine. -TEST(NullWebRtcVideoEngineTest, CheckInterface) { - std::unique_ptr task_queue_factory = - webrtc::CreateDefaultTaskQueueFactory(); - rtc::scoped_refptr adm = - webrtc::test::MockAudioDeviceModule::CreateNice(); - webrtc::FieldTrialBasedConfig trials; - auto audio_engine = std::make_unique( - task_queue_factory.get(), adm.get(), - webrtc::MockAudioEncoderFactory::CreateUnusedFactory(), - webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, - webrtc::AudioProcessingBuilder().Create(), nullptr, nullptr, trials); - - CompositeMediaEngine engine(std::move(audio_engine), - std::make_unique()); - engine.Init(); -} - -} // namespace cricket diff --git a/media/engine/payload_type_mapper.cc b/media/engine/payload_type_mapper.cc deleted file mode 100644 index bd86453b1c..0000000000 --- a/media/engine/payload_type_mapper.cc +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "media/engine/payload_type_mapper.h" - -#include - -#include "absl/strings/ascii.h" -#include "api/audio_codecs/audio_format.h" -#include "media/base/codec.h" -#include "media/base/media_constants.h" - -namespace cricket { - -webrtc::SdpAudioFormat AudioCodecToSdpAudioFormat(const AudioCodec& ac) { - return webrtc::SdpAudioFormat(ac.name, ac.clockrate, ac.channels, ac.params); -} - -PayloadTypeMapper::PayloadTypeMapper() - // RFC 3551 reserves payload type numbers in the range 96-127 exclusively - // for dynamic assignment. Once those are used up, it is recommended that - // payload types unassigned by the RFC are used for dynamic payload type - // mapping, before any static payload ids. At this point, we only support - // mapping within the exclusive range. - : next_unused_payload_type_(96), - max_payload_type_(127), - mappings_( - {// Static payload type assignments according to RFC 3551. - {{kPcmuCodecName, 8000, 1}, 0}, - {{"GSM", 8000, 1}, 3}, - {{"G723", 8000, 1}, 4}, - {{"DVI4", 8000, 1}, 5}, - {{"DVI4", 16000, 1}, 6}, - {{"LPC", 8000, 1}, 7}, - {{kPcmaCodecName, 8000, 1}, 8}, - {{kG722CodecName, 8000, 1}, 9}, - {{kL16CodecName, 44100, 2}, 10}, - {{kL16CodecName, 44100, 1}, 11}, - {{"QCELP", 8000, 1}, 12}, - {{kCnCodecName, 8000, 1}, 13}, - // RFC 4566 is a bit ambiguous on the contents of the "encoding - // parameters" field, which, for audio, encodes the number of - // channels. It is "optional and may be omitted if the number of - // channels is one". Does that necessarily imply that an omitted - // encoding parameter means one channel? Since RFC 3551 doesn't - // specify a value for this parameter for MPA, I've included both 0 - // and 1 here, to increase the chances it will be correctly used if - // someone implements an MPEG audio encoder/decoder. - {{"MPA", 90000, 0}, 14}, - {{"MPA", 90000, 1}, 14}, - {{"G728", 8000, 1}, 15}, - {{"DVI4", 11025, 1}, 16}, - {{"DVI4", 22050, 1}, 17}, - {{"G729", 8000, 1}, 18}, - - // Payload type assignments currently used by WebRTC. - // Includes data to reduce collisions (and thus reassignments) - {{kIlbcCodecName, 8000, 1}, 102}, - {{kCnCodecName, 16000, 1}, 105}, - {{kCnCodecName, 32000, 1}, 106}, - {{kOpusCodecName, - 48000, - 2, - {{kCodecParamMinPTime, "10"}, - {kCodecParamUseInbandFec, kParamValueTrue}}}, - 111}, - // RED for opus is assigned in the lower range, starting at the top. - // Note that the FMTP refers to the opus payload type. - {{kRedCodecName, - 48000, - 2, - {{kCodecParamNotInNameValueFormat, "111/111"}}}, - 63}, - // TODO(solenberg): Remove the hard coded 16k,32k,48k DTMF once we - // assign payload types dynamically for send side as well. - {{kDtmfCodecName, 48000, 1}, 110}, - {{kDtmfCodecName, 32000, 1}, 112}, - {{kDtmfCodecName, 16000, 1}, 113}, - {{kDtmfCodecName, 8000, 1}, 126}}) { - // TODO(ossu): Try to keep this as change-proof as possible until we're able - // to remove the payload type constants from everywhere in the code. - for (const auto& mapping : mappings_) { - used_payload_types_.insert(mapping.second); - } -} - -PayloadTypeMapper::~PayloadTypeMapper() = default; - -absl::optional PayloadTypeMapper::GetMappingFor( - const webrtc::SdpAudioFormat& format) { - auto iter = mappings_.find(format); - if (iter != mappings_.end()) - return iter->second; - - for (; next_unused_payload_type_ <= max_payload_type_; - ++next_unused_payload_type_) { - int payload_type = next_unused_payload_type_; - if (used_payload_types_.find(payload_type) == used_payload_types_.end()) { - used_payload_types_.insert(payload_type); - mappings_[format] = payload_type; - ++next_unused_payload_type_; - return payload_type; - } - } - - return absl::nullopt; -} - -absl::optional PayloadTypeMapper::FindMappingFor( - const webrtc::SdpAudioFormat& format) const { - auto iter = mappings_.find(format); - if (iter != mappings_.end()) - return iter->second; - - return absl::nullopt; -} - -absl::optional PayloadTypeMapper::ToAudioCodec( - const webrtc::SdpAudioFormat& format) { - // TODO(ossu): We can safely set bitrate to zero here, since that field is - // not presented in the SDP. It is used to ferry around some target bitrate - // values for certain codecs (ISAC and Opus) and in ways it really - // shouldn't. It should be removed once we no longer use CodecInsts in the - // ACM or NetEq. - auto opt_payload_type = GetMappingFor(format); - if (opt_payload_type) { - AudioCodec codec = - cricket::CreateAudioCodec(*opt_payload_type, format.name, - format.clockrate_hz, format.num_channels); - codec.params = format.parameters; - return std::move(codec); - } - - return absl::nullopt; -} - -bool PayloadTypeMapper::SdpAudioFormatOrdering::operator()( - const webrtc::SdpAudioFormat& a, - const webrtc::SdpAudioFormat& b) const { - if (a.clockrate_hz == b.clockrate_hz) { - if (a.num_channels == b.num_channels) { - int name_cmp = - absl::AsciiStrToLower(a.name).compare(absl::AsciiStrToLower(b.name)); - if (name_cmp == 0) - return a.parameters < b.parameters; - return name_cmp < 0; - } - return a.num_channels < b.num_channels; - } - return a.clockrate_hz < b.clockrate_hz; -} - -} // namespace cricket diff --git a/media/engine/payload_type_mapper.h b/media/engine/payload_type_mapper.h deleted file mode 100644 index 1d5cd7198f..0000000000 --- a/media/engine/payload_type_mapper.h +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MEDIA_ENGINE_PAYLOAD_TYPE_MAPPER_H_ -#define MEDIA_ENGINE_PAYLOAD_TYPE_MAPPER_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_format.h" -#include "media/base/codec.h" - -namespace cricket { - -webrtc::SdpAudioFormat AudioCodecToSdpAudioFormat(const AudioCodec& ac); - -class PayloadTypeMapper { - public: - PayloadTypeMapper(); - ~PayloadTypeMapper(); - - // Finds the current payload type for `format` or assigns a new one, if no - // current mapping exists. Will return an empty value if it was unable to - // create a mapping, i.e. if all dynamic payload type ids have been used up. - absl::optional GetMappingFor(const webrtc::SdpAudioFormat& format); - - // Finds the current payload type for `format`, if any. Returns an empty value - // if no payload type mapping exists for the format. - absl::optional FindMappingFor( - const webrtc::SdpAudioFormat& format) const; - - // Like GetMappingFor, but fills in an AudioCodec structure with the necessary - // information instead. - absl::optional ToAudioCodec(const webrtc::SdpAudioFormat& format); - - private: - struct SdpAudioFormatOrdering { - bool operator()(const webrtc::SdpAudioFormat& a, - const webrtc::SdpAudioFormat& b) const; - }; - - int next_unused_payload_type_; - int max_payload_type_; - std::map mappings_; - std::set used_payload_types_; -}; - -} // namespace cricket -#endif // MEDIA_ENGINE_PAYLOAD_TYPE_MAPPER_H_ diff --git a/media/engine/payload_type_mapper_unittest.cc b/media/engine/payload_type_mapper_unittest.cc deleted file mode 100644 index 92253a0f5d..0000000000 --- a/media/engine/payload_type_mapper_unittest.cc +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "media/engine/payload_type_mapper.h" - -#include -#include - -#include "absl/strings/string_view.h" -#include "media/base/media_constants.h" -#include "test/gmock.h" -#include "test/gtest.h" - -namespace cricket { - -class PayloadTypeMapperTest : public ::testing::Test { - protected: - PayloadTypeMapper mapper_; -}; - -TEST_F(PayloadTypeMapperTest, StaticPayloadTypes) { - EXPECT_EQ(0, mapper_.FindMappingFor({"pcmu", 8000, 1})); - EXPECT_EQ(3, mapper_.FindMappingFor({"gsm", 8000, 1})); - EXPECT_EQ(4, mapper_.FindMappingFor({"g723", 8000, 1})); - EXPECT_EQ(5, mapper_.FindMappingFor({"dvi4", 8000, 1})); - EXPECT_EQ(6, mapper_.FindMappingFor({"dvi4", 16000, 1})); - EXPECT_EQ(7, mapper_.FindMappingFor({"lpc", 8000, 1})); - EXPECT_EQ(8, mapper_.FindMappingFor({"pcma", 8000, 1})); - EXPECT_EQ(9, mapper_.FindMappingFor({"g722", 8000, 1})); - EXPECT_EQ(10, mapper_.FindMappingFor({"l16", 44100, 2})); - EXPECT_EQ(11, mapper_.FindMappingFor({"l16", 44100, 1})); - EXPECT_EQ(12, mapper_.FindMappingFor({"qcelp", 8000, 1})); - EXPECT_EQ(13, mapper_.FindMappingFor({"cn", 8000, 1})); - EXPECT_EQ(14, mapper_.FindMappingFor({"mpa", 90000, 0})); - EXPECT_EQ(14, mapper_.FindMappingFor({"mpa", 90000, 1})); - EXPECT_EQ(15, mapper_.FindMappingFor({"g728", 8000, 1})); - EXPECT_EQ(16, mapper_.FindMappingFor({"dvi4", 11025, 1})); - EXPECT_EQ(17, mapper_.FindMappingFor({"dvi4", 22050, 1})); - EXPECT_EQ(18, mapper_.FindMappingFor({"g729", 8000, 1})); -} - -TEST_F(PayloadTypeMapperTest, WebRTCPayloadTypes) { - // Tests that the payload mapper knows about the audio formats we've - // been using in WebRTC, with their hard coded values. - EXPECT_EQ(102, mapper_.FindMappingFor({kIlbcCodecName, 8000, 1})); - EXPECT_EQ(105, mapper_.FindMappingFor({kCnCodecName, 16000, 1})); - EXPECT_EQ(106, mapper_.FindMappingFor({kCnCodecName, 32000, 1})); - EXPECT_EQ(111, mapper_.FindMappingFor( - {kOpusCodecName, - 48000, - 2, - {{"minptime", "10"}, {"useinbandfec", "1"}}})); - EXPECT_EQ( - 63, mapper_.FindMappingFor({kRedCodecName, 48000, 2, {{"", "111/111"}}})); - // TODO(solenberg): Remove 16k, 32k, 48k DTMF checks once these payload types - // are dynamically assigned. - EXPECT_EQ(110, mapper_.FindMappingFor({kDtmfCodecName, 48000, 1})); - EXPECT_EQ(112, mapper_.FindMappingFor({kDtmfCodecName, 32000, 1})); - EXPECT_EQ(113, mapper_.FindMappingFor({kDtmfCodecName, 16000, 1})); - EXPECT_EQ(126, mapper_.FindMappingFor({kDtmfCodecName, 8000, 1})); -} - -TEST_F(PayloadTypeMapperTest, ValidDynamicPayloadTypes) { - // RFC 3551 says: - // "This profile reserves payload type numbers in the range 96-127 - // exclusively for dynamic assignment. Applications SHOULD first use - // values in this range for dynamic payload types. Those applications - // which need to define more than 32 dynamic payload types MAY bind - // codes below 96, in which case it is RECOMMENDED that unassigned - // payload type numbers be used first. However, the statically assigned - // payload types are default bindings and MAY be dynamically bound to - // new encodings if needed." - - // Tests that the payload mapper uses values in the dynamic payload type range - // (96 - 127) before any others and that the values returned are all valid. - bool has_been_below_96 = false; - std::set used_payload_types; - for (int i = 0; i != 256; ++i) { - std::string format_name = "unknown_format_" + std::to_string(i); - webrtc::SdpAudioFormat format(format_name.c_str(), i * 100, (i % 2) + 1); - auto opt_payload_type = mapper_.GetMappingFor(format); - bool mapper_is_full = false; - - // There's a limited number of slots for payload types. We're fine with not - // being able to map them all. - if (opt_payload_type) { - int payload_type = *opt_payload_type; - EXPECT_FALSE(mapper_is_full) << "Mapping should not fail sporadically"; - EXPECT_EQ(used_payload_types.find(payload_type), used_payload_types.end()) - << "Payload types must not be reused"; - used_payload_types.insert(payload_type); - EXPECT_GE(payload_type, 0) << "Negative payload types are invalid"; - EXPECT_LE(payload_type, 127) << "Payload types above 127 are invalid"; - EXPECT_FALSE(payload_type >= 96 && has_been_below_96); - if (payload_type < 96) - has_been_below_96 = true; - - EXPECT_EQ(payload_type, mapper_.FindMappingFor(format)) - << "Mapping must be permanent after successful call to " - "GetMappingFor"; - EXPECT_EQ(payload_type, mapper_.GetMappingFor(format)) - << "Subsequent calls to GetMappingFor must return the same value"; - } else { - mapper_is_full = true; - } - } - - // Also, we must've been able to map at least one dynamic payload type. - EXPECT_FALSE(used_payload_types.empty()) - << "Mapper must support at least one user-defined payload type"; -} - -TEST_F(PayloadTypeMapperTest, ToAudioCodec) { - webrtc::SdpAudioFormat format("unknown_format", 4711, 17); - auto opt_payload_type = mapper_.GetMappingFor(format); - EXPECT_TRUE(opt_payload_type); - auto opt_audio_codec = mapper_.ToAudioCodec(format); - EXPECT_TRUE(opt_audio_codec); - - if (opt_payload_type && opt_audio_codec) { - int payload_type = *opt_payload_type; - const AudioCodec& codec = *opt_audio_codec; - - EXPECT_EQ(codec.id, payload_type); - EXPECT_EQ(codec.name, format.name); - EXPECT_EQ(codec.clockrate, format.clockrate_hz); - EXPECT_EQ(codec.channels, format.num_channels); - EXPECT_THAT(codec.params, ::testing::ContainerEq(format.parameters)); - } -} - -} // namespace cricket diff --git a/media/engine/simulcast_encoder_adapter.cc b/media/engine/simulcast_encoder_adapter.cc index 7ee95b1a30..73040a485f 100644 --- a/media/engine/simulcast_encoder_adapter.cc +++ b/media/engine/simulcast_encoder_adapter.cc @@ -15,45 +15,59 @@ #include #include +#include +#include +#include +#include #include +#include #include +#include #include "absl/algorithm/container.h" +#include "absl/base/nullability.h" +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/fec_controller_override.h" +#include "api/field_trials_view.h" #include "api/scoped_refptr.h" -#include "api/video/i420_buffer.h" +#include "api/sequence_checker.h" +#include "api/units/data_rate.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" #include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" #include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" #include "api/video/video_rotation.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/simulcast_stream.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "api/video_codecs/video_encoder_software_fallback_wrapper.h" -#include "media/base/video_common.h" +#include "common_video/framerate_controller.h" +#include "media/base/sdp_video_format_utils.h" #include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/include/video_error_codes_utils.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" +#include "rtc_base/strings/str_join.h" +#include "rtc_base/strings/string_builder.h" +namespace webrtc { namespace { -const unsigned int kDefaultMinQp = 2; -const unsigned int kDefaultMaxQp = 56; // Max qp for lowest spatial resolution when doing simulcast. const unsigned int kLowestResMaxQp = 45; -absl::optional GetScreenshareBoostedQpValue() { - std::string experiment_group = - webrtc::field_trial::FindFullName("WebRTC-BoostedScreenshareQp"); - unsigned int qp; - if (sscanf(experiment_group.c_str(), "%u", &qp) != 1) - return absl::nullopt; - qp = std::min(qp, 63u); - qp = std::max(qp, 1u); - return qp; -} - -uint32_t SumStreamMaxBitrate(int streams, const webrtc::VideoCodec& codec) { +uint32_t SumStreamMaxBitrate(int streams, const VideoCodec& codec) { uint32_t bitrate_sum = 0; for (int i = 0; i < streams; ++i) { bitrate_sum += codec.simulcastStream[i].maxBitrate; @@ -61,7 +75,7 @@ uint32_t SumStreamMaxBitrate(int streams, const webrtc::VideoCodec& codec) { return bitrate_sum; } -int CountAllStreams(const webrtc::VideoCodec& codec) { +int CountAllStreams(const VideoCodec& codec) { int total_streams_count = codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams; uint32_t simulcast_max_bitrate = @@ -72,7 +86,7 @@ int CountAllStreams(const webrtc::VideoCodec& codec) { return total_streams_count; } -int CountActiveStreams(const webrtc::VideoCodec& codec) { +int CountActiveStreams(const VideoCodec& codec) { if (codec.numberOfSimulcastStreams < 1) { return 1; } @@ -86,7 +100,7 @@ int CountActiveStreams(const webrtc::VideoCodec& codec) { return active_streams_count; } -int VerifyCodec(const webrtc::VideoCodec* codec_settings) { +int VerifyCodec(const VideoCodec* codec_settings) { if (codec_settings == nullptr) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } @@ -109,14 +123,13 @@ int VerifyCodec(const webrtc::VideoCodec* codec_settings) { return WEBRTC_VIDEO_CODEC_OK; } -bool StreamQualityCompare(const webrtc::SimulcastStream& a, - const webrtc::SimulcastStream& b) { +bool StreamQualityCompare(const SimulcastStream& a, const SimulcastStream& b) { return std::tie(a.height, a.width, a.maxBitrate, a.maxFramerate) < std::tie(b.height, b.width, b.maxBitrate, b.maxFramerate); } void GetLowestAndHighestQualityStreamIndixes( - rtc::ArrayView streams, + ArrayView streams, int* lowest_quality_stream_idx, int* highest_quality_stream_idx) { const auto lowest_highest_quality_streams = @@ -127,14 +140,13 @@ void GetLowestAndHighestQualityStreamIndixes( std::distance(streams.begin(), lowest_highest_quality_streams.second); } -std::vector GetStreamStartBitratesKbps( - const webrtc::VideoCodec& codec) { +std::vector GetStreamStartBitratesKbps(const Environment& env, + const VideoCodec& codec) { std::vector start_bitrates; - std::unique_ptr rate_allocator = - std::make_unique(codec); - webrtc::VideoBitrateAllocation allocation = - rate_allocator->Allocate(webrtc::VideoBitrateAllocationParameters( - codec.startBitrate * 1000, codec.maxFramerate)); + VideoBitrateAllocation allocation = + SimulcastRateAllocator(env, codec) + .Allocate(VideoBitrateAllocationParameters(codec.startBitrate * 1000, + codec.maxFramerate)); int total_streams_count = CountAllStreams(codec); for (int i = 0; i < total_streams_count; ++i) { @@ -146,8 +158,6 @@ std::vector GetStreamStartBitratesKbps( } // namespace -namespace webrtc { - SimulcastEncoderAdapter::EncoderContext::EncoderContext( std::unique_ptr encoder, bool prefer_temporal_support, @@ -242,26 +252,25 @@ void SimulcastEncoderAdapter::StreamContext::OnDroppedFrame( parent_->OnDroppedFrame(stream_idx_); } -SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory, - const SdpVideoFormat& format) - : SimulcastEncoderAdapter(factory, nullptr, format) {} - SimulcastEncoderAdapter::SimulcastEncoderAdapter( - VideoEncoderFactory* primary_factory, - VideoEncoderFactory* fallback_factory, + const Environment& env, + VideoEncoderFactory* absl_nonnull primary_factory, + VideoEncoderFactory* absl_nullable fallback_factory, const SdpVideoFormat& format) - : inited_(0), + : env_(env), + inited_(0), primary_encoder_factory_(primary_factory), fallback_encoder_factory_(fallback_factory), video_format_(format), total_streams_count_(0), bypass_mode_(false), encoded_complete_callback_(nullptr), - experimental_boosted_screenshare_qp_(GetScreenshareBoostedQpValue()), - boost_base_layer_quality_(RateControlSettings::ParseFromFieldTrials() - .Vp8BoostBaseLayerQuality()), - prefer_temporal_support_on_base_layer_(field_trial::IsEnabled( - "WebRTC-Video-PreferTemporalSupportOnBaseLayer")) { + boost_base_layer_quality_( + RateControlSettings(env_.field_trials()).Vp8BoostBaseLayerQuality()), + prefer_temporal_support_on_base_layer_(env_.field_trials().IsEnabled( + "WebRTC-Video-PreferTemporalSupportOnBaseLayer")), + per_layer_pli_(SupportsPerLayerPictureLossIndication(format.parameters)), + encoder_info_override_(env.field_trials()) { RTC_DCHECK(primary_factory); // The adapter is typically created on the worker thread, but operated on @@ -270,6 +279,7 @@ SimulcastEncoderAdapter::SimulcastEncoderAdapter( } SimulcastEncoderAdapter::~SimulcastEncoderAdapter() { + RTC_DCHECK_RUN_ON(&encoder_queue_); RTC_DCHECK(!Initialized()); DestroyStoredEncoders(); } @@ -319,18 +329,13 @@ int SimulcastEncoderAdapter::InitEncode( codec_ = *codec_settings; total_streams_count_ = CountAllStreams(*codec_settings); - // TODO(ronghuawu): Remove once this is handled in LibvpxVp8Encoder. - if (codec_.qpMax < kDefaultMinQp) { - codec_.qpMax = kDefaultMaxQp; - } - bool is_legacy_singlecast = codec_.numberOfSimulcastStreams == 0; int lowest_quality_stream_idx = 0; int highest_quality_stream_idx = 0; if (!is_legacy_singlecast) { GetLowestAndHighestQualityStreamIndixes( - rtc::ArrayView(codec_.simulcastStream, - total_streams_count_), + ArrayView(codec_.simulcastStream, + total_streams_count_), &lowest_quality_stream_idx, &highest_quality_stream_idx); } @@ -355,13 +360,22 @@ int SimulcastEncoderAdapter::InitEncode( // If we only have a single active layer it is better to create an encoder // with only one configured layer than creating it with all-but-one disabled // layers because that way we control scaling. + // The use of the nonstandard x-google-per-layer-pli fmtp parameter also + // forces the use of SEA with separate encoders to support per-layer + // handling of PLIs. bool separate_encoders_needed = !encoder_context->encoder().GetEncoderInfo().supports_simulcast || - active_streams_count == 1; + active_streams_count == 1 || per_layer_pli_; + RTC_LOG(LS_INFO) << "[SEA] InitEncode: total_streams_count: " + << total_streams_count_ + << ", active_streams_count: " << active_streams_count + << ", separate_encoders_needed: " + << (separate_encoders_needed ? "true" : "false"); // Singlecast or simulcast with simulcast-capable underlaying encoder. if (total_streams_count_ == 1 || !separate_encoders_needed) { - int ret = encoder_context->encoder().InitEncode(&codec_, settings); - if (ret >= 0) { + RTC_LOG(LS_INFO) << "[SEA] InitEncode: Single-encoder mode"; + int result = encoder_context->encoder().InitEncode(&codec_, settings); + if (result >= 0) { stream_contexts_.emplace_back( /*parent=*/nullptr, std::move(encoder_context), /*framerate_controller=*/nullptr, /*stream_idx=*/0, codec_.width, @@ -374,15 +388,21 @@ int SimulcastEncoderAdapter::InitEncode( } encoder_context->Release(); + encoder_context->encoder().RegisterEncodeCompleteCallback( + encoded_complete_callback_); if (total_streams_count_ == 1) { - // Failed to initialize singlecast encoder. + RTC_LOG(LS_ERROR) << "[SEA] InitEncode: failed with error code: " + << WebRtcVideoCodecErrorToString(ret); return ret; } + RTC_LOG(LS_WARNING) << "[SEA] InitEncode: failed with error code: " + << WebRtcVideoCodecErrorToString(ret) + << ". Falling back to multi-encoder mode."; } // Multi-encoder simulcast or singlecast (deactivated layers). std::vector stream_start_bitrate_kbps = - GetStreamStartBitratesKbps(codec_); + GetStreamStartBitratesKbps(env_, codec_); for (int stream_idx = 0; stream_idx < total_streams_count_; ++stream_idx) { if (!is_legacy_singlecast && !codec_.simulcastStream[stream_idx].active) { @@ -403,11 +423,17 @@ int SimulcastEncoderAdapter::InitEncode( /*is_lowest_quality_stream=*/stream_idx == lowest_quality_stream_idx, /*is_highest_quality_stream=*/stream_idx == highest_quality_stream_idx); - int ret = encoder_context->encoder().InitEncode(&stream_codec, settings); - if (ret < 0) { + RTC_LOG(LS_INFO) << "[SEA] Multi-encoder mode: initializing stream: " + << stream_idx << ", active: " + << (codec_.simulcastStream[stream_idx].active ? "true" + : "false"); + int result = encoder_context->encoder().InitEncode(&stream_codec, settings); + if (result < 0) { encoder_context.reset(); Release(); - return ret; + RTC_LOG(LS_ERROR) << "[SEA] InitEncode: failed with error code: " + << WebRtcVideoCodecErrorToString(ret); + return result; } // Intercept frame encode complete callback only for upper streams, where @@ -420,6 +446,7 @@ int SimulcastEncoderAdapter::InitEncode( parent, std::move(encoder_context), std::make_unique(stream_codec.maxFramerate), stream_idx, stream_codec.width, stream_codec.height, is_paused); + encoder_context = nullptr; } // To save memory, don't store encoders that we don't use. @@ -475,7 +502,7 @@ int SimulcastEncoderAdapter::Encode( } // Temporary thay may hold the result of texture to i420 buffer conversion. - rtc::scoped_refptr src_buffer; + scoped_refptr src_buffer; int src_width = input_image.width(); int src_height = input_image.height(); @@ -487,7 +514,7 @@ int SimulcastEncoderAdapter::Encode( // Convert timestamp from RTP 90kHz clock. const Timestamp frame_timestamp = - Timestamp::Micros((1000 * input_image.timestamp()) / 90); + Timestamp::Micros((1000 * input_image.rtp_timestamp()) / 90); // If adapter is passed through and only one sw encoder does simulcast, // frame types for all streams should be passed to the encoder unchanged. @@ -549,7 +576,7 @@ int SimulcastEncoderAdapter::Encode( if (src_buffer == nullptr) { src_buffer = input_image.video_frame_buffer(); } - rtc::scoped_refptr dst_buffer = + scoped_refptr dst_buffer = src_buffer->Scale(layer.width(), layer.height()); if (!dst_buffer) { RTC_LOG(LS_ERROR) << "Failed to scale video frame"; @@ -685,7 +712,7 @@ EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( &stream_codec_specific); } -void SimulcastEncoderAdapter::OnDroppedFrame(size_t stream_idx) { +void SimulcastEncoderAdapter::OnDroppedFrame(size_t /* stream_idx */) { // Not yet implemented. } @@ -694,6 +721,7 @@ bool SimulcastEncoderAdapter::Initialized() const { } void SimulcastEncoderAdapter::DestroyStoredEncoders() { + RTC_DCHECK_RUN_ON(&encoder_queue_); while (!cached_encoder_contexts_.empty()) { cached_encoder_contexts_.pop_back(); } @@ -702,6 +730,7 @@ void SimulcastEncoderAdapter::DestroyStoredEncoders() { std::unique_ptr SimulcastEncoderAdapter::FetchOrCreateEncoderContext( bool is_lowest_quality_stream) const { + RTC_DCHECK_RUN_ON(&encoder_queue_); bool prefer_temporal_support = fallback_encoder_factory_ != nullptr && is_lowest_quality_stream && prefer_temporal_support_on_base_layer_; @@ -722,12 +751,11 @@ SimulcastEncoderAdapter::FetchOrCreateEncoderContext( cached_encoder_contexts_.erase(encoder_context_iter); } else { std::unique_ptr primary_encoder = - primary_encoder_factory_->CreateVideoEncoder(video_format_); + primary_encoder_factory_->Create(env_, video_format_); std::unique_ptr fallback_encoder; if (fallback_encoder_factory_ != nullptr) { - fallback_encoder = - fallback_encoder_factory_->CreateVideoEncoder(video_format_); + fallback_encoder = fallback_encoder_factory_->Create(env_, video_format_); } std::unique_ptr encoder; @@ -742,7 +770,7 @@ SimulcastEncoderAdapter::FetchOrCreateEncoderContext( encoder = std::move(primary_encoder); } else { encoder = CreateVideoEncoderSoftwareFallbackWrapper( - std::move(fallback_encoder), std::move(primary_encoder), + env_, std::move(fallback_encoder), std::move(primary_encoder), prefer_temporal_support); } } else if (fallback_encoder != nullptr) { @@ -787,31 +815,29 @@ webrtc::VideoCodec SimulcastEncoderAdapter::MakeStreamCodec( // By default, `scalability_mode` comes from SimulcastStream when // SimulcastEncoderAdapter is used. This allows multiple encodings of L1Tx, // but SimulcastStream currently does not support multiple spatial layers. - ScalabilityMode scalability_mode = stream_params.GetScalabilityMode(); + std::optional scalability_mode = + stream_params.GetScalabilityMode(); // To support the full set of scalability modes in the event that this is the // only active encoding, prefer VideoCodec::GetScalabilityMode() if all other // encodings are inactive. - if (codec.GetScalabilityMode().has_value()) { - bool only_active_stream = true; - for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) { - if (i != stream_idx && codec.simulcastStream[i].active) { - only_active_stream = false; - break; - } - } - if (only_active_stream) { - scalability_mode = codec.GetScalabilityMode().value(); + bool only_active_stream = true; + for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) { + if (i != stream_idx && codec.simulcastStream[i].active) { + only_active_stream = false; + break; } } - codec_params.SetScalabilityMode(scalability_mode); + if (codec.GetScalabilityMode().has_value() && only_active_stream) { + scalability_mode = codec.GetScalabilityMode(); + } + if (scalability_mode.has_value()) { + codec_params.SetScalabilityMode(*scalability_mode); + } // Settings that are based on stream/resolution. if (is_lowest_quality_stream) { // Settings for lowest spatial resolutions. - if (codec.mode == VideoCodecMode::kScreensharing) { - if (experimental_boosted_screenshare_qp_) { - codec_params.qpMax = *experimental_boosted_screenshare_qp_; - } - } else if (boost_base_layer_quality_) { + if (codec.mode == VideoCodecMode::kRealtimeVideo && + boost_base_layer_quality_) { codec_params.qpMax = kLowestResMaxQp; } } @@ -832,6 +858,15 @@ webrtc::VideoCodec SimulcastEncoderAdapter::MakeStreamCodec( } else if (codec.codecType == webrtc::kVideoCodecH264) { codec_params.H264()->numberOfTemporalLayers = stream_params.numberOfTemporalLayers; + } else if (codec.codecType == webrtc::kVideoCodecVP9 && + scalability_mode.has_value() && !only_active_stream) { + // If VP9 simulcast then explicitly set a single spatial layer for each + // simulcast stream. + codec_params.VP9()->numberOfSpatialLayers = 1; + codec_params.VP9()->numberOfTemporalLayers = + stream_params.GetNumberOfTemporalLayers(); + codec_params.VP9()->interLayerPred = InterLayerPredMode::kOff; + codec_params.spatialLayers[0] = stream_params; } // Cap start bitrate to the min bitrate in order to avoid strange codec @@ -849,9 +884,9 @@ webrtc::VideoCodec SimulcastEncoderAdapter::MakeStreamCodec( void SimulcastEncoderAdapter::OverrideFromFieldTrial( VideoEncoder::EncoderInfo* info) const { if (encoder_info_override_.requested_resolution_alignment()) { - info->requested_resolution_alignment = cricket::LeastCommonMultiple( - info->requested_resolution_alignment, - *encoder_info_override_.requested_resolution_alignment()); + info->requested_resolution_alignment = + std::lcm(info->requested_resolution_alignment, + *encoder_info_override_.requested_resolution_alignment()); info->apply_alignment_to_all_simulcast_layers = info->apply_alignment_to_all_simulcast_layers || encoder_info_override_.apply_alignment_to_all_simulcast_layers(); @@ -878,7 +913,7 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const { encoder_info.requested_resolution_alignment = 1; encoder_info.apply_alignment_to_all_simulcast_layers = false; encoder_info.supports_native_handle = true; - encoder_info.scaling_settings.thresholds = absl::nullopt; + encoder_info.scaling_settings.thresholds = std::nullopt; if (stream_contexts_.empty()) { // GetEncoderInfo queried before InitEncode. Only alignment info is needed @@ -896,9 +931,9 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const { const VideoEncoder::EncoderInfo& fallback_info = encoder_context->FallbackInfo(); - encoder_info.requested_resolution_alignment = cricket::LeastCommonMultiple( - primary_info.requested_resolution_alignment, - fallback_info.requested_resolution_alignment); + encoder_info.requested_resolution_alignment = + std::lcm(primary_info.requested_resolution_alignment, + fallback_info.requested_resolution_alignment); encoder_info.apply_alignment_to_all_simulcast_layers = primary_info.apply_alignment_to_all_simulcast_layers || @@ -915,15 +950,17 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const { } encoder_info.scaling_settings = VideoEncoder::ScalingSettings::kOff; + std::vector encoder_names; for (size_t i = 0; i < stream_contexts_.size(); ++i) { VideoEncoder::EncoderInfo encoder_impl_info = stream_contexts_[i].encoder().GetEncoderInfo(); - if (i == 0) { - // Encoder name indicates names of all sub-encoders. - encoder_info.implementation_name += " ("; - encoder_info.implementation_name += encoder_impl_info.implementation_name; + // Encoder name indicates names of all active sub-encoders. + if (!stream_contexts_[i].is_paused()) { + encoder_names.push_back(encoder_impl_info.implementation_name); + } + if (i == 0) { encoder_info.supports_native_handle = encoder_impl_info.supports_native_handle; encoder_info.has_trusted_rate_controller = @@ -932,9 +969,6 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const { encoder_impl_info.is_hardware_accelerated; encoder_info.is_qp_trusted = encoder_impl_info.is_qp_trusted; } else { - encoder_info.implementation_name += ", "; - encoder_info.implementation_name += encoder_impl_info.implementation_name; - // Native handle supported if any encoder supports it. encoder_info.supports_native_handle |= encoder_impl_info.supports_native_handle; @@ -957,9 +991,9 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const { encoder_impl_info.is_qp_trusted.value_or(true); } encoder_info.fps_allocation[i] = encoder_impl_info.fps_allocation[0]; - encoder_info.requested_resolution_alignment = cricket::LeastCommonMultiple( - encoder_info.requested_resolution_alignment, - encoder_impl_info.requested_resolution_alignment); + encoder_info.requested_resolution_alignment = + std::lcm(encoder_info.requested_resolution_alignment, + encoder_impl_info.requested_resolution_alignment); // request alignment on all layers if any of the encoders may need it, or // if any non-top layer encoder requests a non-trivial alignment. if (encoder_impl_info.apply_alignment_to_all_simulcast_layers || @@ -969,7 +1003,13 @@ VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const { encoder_info.apply_alignment_to_all_simulcast_layers = true; } } - encoder_info.implementation_name += ")"; + + if (!encoder_names.empty()) { + StringBuilder implementation_name_builder(" ("); + implementation_name_builder << StrJoin(encoder_names, ", "); + implementation_name_builder << ")"; + encoder_info.implementation_name += implementation_name_builder.Release(); + } OverrideFromFieldTrial(&encoder_info); diff --git a/media/engine/simulcast_encoder_adapter.h b/media/engine/simulcast_encoder_adapter.h index ef8205e91a..1461416c4c 100644 --- a/media/engine/simulcast_encoder_adapter.h +++ b/media/engine/simulcast_encoder_adapter.h @@ -13,17 +13,23 @@ #define MEDIA_ENGINE_SIMULCAST_ENCODER_ADAPTER_H_ #include +#include +#include #include #include -#include -#include -#include +#include #include -#include "absl/types/optional.h" +#include "absl/base/nullability.h" +#include "api/environment/environment.h" #include "api/fec_controller_override.h" #include "api/sequence_checker.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "common_video/framerate_controller.h" @@ -40,15 +46,14 @@ namespace webrtc { // interfaces should be called from the encoder task queue. class RTC_EXPORT SimulcastEncoderAdapter : public VideoEncoder { public: - // TODO(bugs.webrtc.org/11000): Remove when downstream usage is gone. - SimulcastEncoderAdapter(VideoEncoderFactory* primarty_factory, - const SdpVideoFormat& format); // `primary_factory` produces the first-choice encoders to use. // `fallback_factory`, if non-null, is used to create fallback encoder that // will be used if InitEncode() fails for the primary encoder. - SimulcastEncoderAdapter(VideoEncoderFactory* primary_factory, - VideoEncoderFactory* fallback_factory, + SimulcastEncoderAdapter(const Environment& env, + VideoEncoderFactory* absl_nonnull primary_factory, + VideoEncoderFactory* absl_nullable fallback_factory, const SdpVideoFormat& format); + ~SimulcastEncoderAdapter() override; // Implements VideoEncoder. @@ -67,6 +72,9 @@ class RTC_EXPORT SimulcastEncoderAdapter : public VideoEncoder { EncoderInfo GetEncoderInfo() const override; + protected: + void DestroyStoredEncoders(); + private: class EncoderContext { public: @@ -120,10 +128,10 @@ class RTC_EXPORT SimulcastEncoderAdapter : public VideoEncoder { void set_is_keyframe_needed() { is_keyframe_needed_ = true; } bool is_paused() const { return is_paused_; } void set_is_paused(bool is_paused) { is_paused_ = is_paused; } - absl::optional target_fps() const { + std::optional target_fps() const { return framerate_controller_ == nullptr - ? absl::nullopt - : absl::optional( + ? std::nullopt + : std::optional( framerate_controller_->GetMaxFramerate()); } @@ -144,8 +152,6 @@ class RTC_EXPORT SimulcastEncoderAdapter : public VideoEncoder { bool Initialized() const; - void DestroyStoredEncoders(); - // This method creates encoder. May reuse previously created encoders from // `cached_encoder_contexts_`. It's const because it's used from // const GetEncoderInfo(). @@ -167,6 +173,7 @@ class RTC_EXPORT SimulcastEncoderAdapter : public VideoEncoder { void OverrideFromFieldTrial(VideoEncoder::EncoderInfo* info) const; + const Environment env_; std::atomic inited_; VideoEncoderFactory* const primary_encoder_factory_; VideoEncoderFactory* const fallback_encoder_factory_; @@ -186,9 +193,9 @@ class RTC_EXPORT SimulcastEncoderAdapter : public VideoEncoder { // GetEncoderInfo(), which is const. mutable std::list> cached_encoder_contexts_; - const absl::optional experimental_boosted_screenshare_qp_; const bool boost_base_layer_quality_; const bool prefer_temporal_support_on_base_layer_; + const bool per_layer_pli_; const SimulcastEncoderAdapterEncoderInfoSettings encoder_info_override_; }; diff --git a/media/engine/simulcast_encoder_adapter_unittest.cc b/media/engine/simulcast_encoder_adapter_unittest.cc index 7d86dcc4f9..a332003054 100644 --- a/media/engine/simulcast_encoder_adapter_unittest.cc +++ b/media/engine/simulcast_encoder_adapter_unittest.cc @@ -11,27 +11,53 @@ #include "media/engine/simulcast_encoder_adapter.h" #include +#include +#include #include +#include +#include +#include #include +#include "absl/container/inlined_vector.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/fec_controller_override.h" +#include "api/make_ref_counted.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/test/create_simulcast_test_fixture.h" +#include "api/test/mock_video_decoder.h" #include "api/test/simulcast_test_fixture.h" #include "api/test/video/function_video_decoder_factory.h" #include "api/test/video/function_video_encoder_factory.h" +#include "api/units/data_rate.h" +#include "api/video/encoded_image.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" #include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" -#include "common_video/include/video_frame_buffer.h" -#include "media/base/media_constants.h" #include "media/engine/internal_encoder_factory.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "modules/video_coding/utility/simulcast_test_fixture_impl.h" #include "rtc_base/checks.h" -#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/scoped_key_value_config.h" using ::testing::_; using ::testing::Return; @@ -54,17 +80,19 @@ std::unique_ptr CreateSpecificSimulcastTestFixture( VideoEncoderFactory* internal_encoder_factory) { std::unique_ptr encoder_factory = std::make_unique( - [internal_encoder_factory]() { + [internal_encoder_factory](const Environment& env, + const SdpVideoFormat& /* format */) { return std::make_unique( - internal_encoder_factory, - SdpVideoFormat(cricket::kVp8CodecName)); + env, internal_encoder_factory, nullptr, SdpVideoFormat::VP8()); }); std::unique_ptr decoder_factory = std::make_unique( - []() { return VP8Decoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& /* format */) { + return CreateVp8Decoder(env); + }); return CreateSimulcastTestFixture(std::move(encoder_factory), std::move(decoder_factory), - SdpVideoFormat(cricket::kVp8CodecName)); + SdpVideoFormat::VP8()); } } // namespace @@ -166,8 +194,8 @@ class MockVideoEncoderFactory : public VideoEncoderFactory { public: std::vector GetSupportedFormats() const override; - std::unique_ptr CreateVideoEncoder( - const SdpVideoFormat& format) override; + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override; const std::vector& encoders() const; void SetEncoderNames(const std::vector& encoder_names); @@ -186,12 +214,16 @@ class MockVideoEncoderFactory : public VideoEncoderFactory { std::vector limits) { resolution_bitrate_limits_ = limits; } + void set_fallback_from_simulcast(std::optional return_value) { + fallback_from_simulcast_ = return_value; + } void DestroyVideoEncoder(VideoEncoder* encoder); private: bool create_video_encoder_return_nullptr_ = false; int32_t init_encode_return_value_ = 0; + std::optional fallback_from_simulcast_; std::vector encoders_; std::vector encoder_names_; // Keep number of entries in sync with `kMaxSimulcastStreams`. @@ -214,8 +246,11 @@ class MockVideoEncoder : public VideoEncoder { (override)); int32_t InitEncode(const VideoCodec* codecSettings, - const VideoEncoder::Settings& settings) override { + const VideoEncoder::Settings& /* settings */) override { codec_ = *codecSettings; + if (codec_.numberOfSimulcastStreams > 1 && fallback_from_simulcast_) { + return *fallback_from_simulcast_; + } return init_encode_return_value_; } @@ -258,6 +293,8 @@ class MockVideoEncoder : public VideoEncoder { const VideoCodec& codec() const { return codec_; } + EncodedImageCallback* callback() const { return callback_; } + void SendEncodedImage(int width, int height) { // Sends a fake image of the given width/height. EncodedImage image; @@ -280,6 +317,10 @@ class MockVideoEncoder : public VideoEncoder { init_encode_return_value_ = value; } + void set_fallback_from_simulcast(std::optional value) { + fallback_from_simulcast_ = value; + } + void set_scaling_settings(const VideoEncoder::ScalingSettings& settings) { scaling_settings_ = settings; } @@ -315,7 +356,7 @@ class MockVideoEncoder : public VideoEncoder { video_format_ = video_format; } - void set_is_qp_trusted(absl::optional is_qp_trusted) { + void set_is_qp_trusted(std::optional is_qp_trusted) { is_qp_trusted_ = is_qp_trusted; } @@ -338,10 +379,11 @@ class MockVideoEncoder : public VideoEncoder { bool has_trusted_rate_controller_ = false; bool is_hardware_accelerated_ = false; int32_t init_encode_return_value_ = 0; + std::optional fallback_from_simulcast_; VideoEncoder::RateControlParameters last_set_rates_; FramerateFractions fps_allocation_; bool supports_simulcast_ = false; - absl::optional is_qp_trusted_; + std::optional is_qp_trusted_; SdpVideoFormat video_format_; std::vector resolution_bitrate_limits; @@ -351,11 +393,11 @@ class MockVideoEncoder : public VideoEncoder { std::vector MockVideoEncoderFactory::GetSupportedFormats() const { - std::vector formats = {SdpVideoFormat("VP8")}; - return formats; + return {SdpVideoFormat::VP8()}; } -std::unique_ptr MockVideoEncoderFactory::CreateVideoEncoder( +std::unique_ptr MockVideoEncoderFactory::Create( + const Environment& /* env */, const SdpVideoFormat& format) { if (create_video_encoder_return_nullptr_) { return nullptr; @@ -363,6 +405,7 @@ std::unique_ptr MockVideoEncoderFactory::CreateVideoEncoder( auto encoder = std::make_unique<::testing::NiceMock>(this); encoder->set_init_encode_return_value(init_encode_return_value_); + encoder->set_fallback_from_simulcast(fallback_from_simulcast_); const char* encoder_name = encoder_names_.empty() ? "codec_implementation_name" : encoder_names_[encoders_.size()]; @@ -401,27 +444,28 @@ void MockVideoEncoderFactory::set_init_encode_return_value(int32_t value) { class TestSimulcastEncoderAdapterFakeHelper { public: explicit TestSimulcastEncoderAdapterFakeHelper( + const Environment& env, bool use_fallback_factory, const SdpVideoFormat& video_format) - : primary_factory_(new MockVideoEncoderFactory()), - fallback_factory_(use_fallback_factory ? new MockVideoEncoderFactory() - : nullptr), + : env_(env), + fallback_factory_(use_fallback_factory + ? std::make_unique() + : nullptr), video_format_(video_format) {} - // Can only be called once as the SimulcastEncoderAdapter will take the - // ownership of `factory_`. - VideoEncoder* CreateMockEncoderAdapter() { - return new SimulcastEncoderAdapter(primary_factory_.get(), - fallback_factory_.get(), video_format_); + std::unique_ptr CreateMockEncoderAdapter() { + return std::make_unique( + env_, &primary_factory_, fallback_factory_.get(), video_format_); } - MockVideoEncoderFactory* factory() { return primary_factory_.get(); } + MockVideoEncoderFactory* factory() { return &primary_factory_; } MockVideoEncoderFactory* fallback_factory() { return fallback_factory_.get(); } private: - std::unique_ptr primary_factory_; + const Environment env_; + MockVideoEncoderFactory primary_factory_; std::unique_ptr fallback_factory_; SdpVideoFormat video_format_; }; @@ -440,12 +484,13 @@ class TestSimulcastEncoderAdapterFake : public ::testing::Test, } void SetUp() override { - helper_.reset(new TestSimulcastEncoderAdapterFakeHelper( - use_fallback_factory_, SdpVideoFormat("VP8", sdp_video_parameters_))); - adapter_.reset(helper_->CreateMockEncoderAdapter()); - last_encoded_image_width_ = absl::nullopt; - last_encoded_image_height_ = absl::nullopt; - last_encoded_image_simulcast_index_ = absl::nullopt; + helper_ = std::make_unique( + env_, use_fallback_factory_, + SdpVideoFormat("VP8", sdp_video_parameters_)); + adapter_ = helper_->CreateMockEncoderAdapter(); + last_encoded_image_width_ = std::nullopt; + last_encoded_image_height_ = std::nullopt; + last_encoded_image_simulcast_index_ = std::nullopt; } void ReSetUp() { @@ -458,18 +503,19 @@ class TestSimulcastEncoderAdapterFake : public ::testing::Test, SetUp(); } - Result OnEncodedImage(const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info) override { + Result OnEncodedImage( + const EncodedImage& encoded_image, + const CodecSpecificInfo* /* codec_specific_info */) override { last_encoded_image_width_ = encoded_image._encodedWidth; last_encoded_image_height_ = encoded_image._encodedHeight; last_encoded_image_simulcast_index_ = encoded_image.SimulcastIndex(); - return Result(Result::OK, encoded_image.Timestamp()); + return Result(Result::OK, encoded_image.RtpTimestamp()); } - bool GetLastEncodedImageInfo(absl::optional* out_width, - absl::optional* out_height, - absl::optional* out_simulcast_index) { + bool GetLastEncodedImageInfo(std::optional* out_width, + std::optional* out_height, + std::optional* out_simulcast_index) { if (!last_encoded_image_width_.has_value()) { return false; } @@ -496,11 +542,33 @@ class TestSimulcastEncoderAdapterFake : public ::testing::Test, codec_.simulcastStream[stream_idx].active = active_streams[stream_idx]; } } - rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + rate_allocator_ = std::make_unique(env_, codec_); EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); adapter_->RegisterEncodeCompleteCallback(this); } + void SetupCodecWithEarlyEncodeCompleteCallback( + std::vector active_streams) { + SimulcastTestFixtureImpl::DefaultSettings( + &codec_, static_cast(kTestTemporalLayerProfile), + kVideoCodecVP8); + ASSERT_LE(active_streams.size(), codec_.numberOfSimulcastStreams); + codec_.numberOfSimulcastStreams = active_streams.size(); + for (size_t stream_idx = 0; stream_idx < kMaxSimulcastStreams; + ++stream_idx) { + if (stream_idx >= codec_.numberOfSimulcastStreams) { + // Reset parameters of unspecified stream. + codec_.simulcastStream[stream_idx] = {0}; + } else { + codec_.simulcastStream[stream_idx].active = active_streams[stream_idx]; + } + } + rate_allocator_ = std::make_unique(env_, codec_); + // Register the callback before the InitEncode(). + adapter_->RegisterEncodeCompleteCallback(this); + EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); + } + void VerifyCodec(const VideoCodec& ref, int stream_index) { const VideoCodec& target = helper_->factory()->encoders()[stream_index]->codec(); @@ -572,15 +640,17 @@ class TestSimulcastEncoderAdapterFake : public ::testing::Test, } protected: + test::ScopedKeyValueConfig field_trials_; + const Environment env_ = CreateEnvironment(&field_trials_); std::unique_ptr helper_; std::unique_ptr adapter_; VideoCodec codec_; - absl::optional last_encoded_image_width_; - absl::optional last_encoded_image_height_; - absl::optional last_encoded_image_simulcast_index_; + std::optional last_encoded_image_width_; + std::optional last_encoded_image_height_; + std::optional last_encoded_image_simulcast_index_; std::unique_ptr rate_allocator_; bool use_fallback_factory_; - SdpVideoFormat::Parameters sdp_video_parameters_; + CodecParameterMap sdp_video_parameters_; }; TEST_F(TestSimulcastEncoderAdapterFake, InitEncode) { @@ -588,6 +658,18 @@ TEST_F(TestSimulcastEncoderAdapterFake, InitEncode) { VerifyCodecSettings(); } +TEST_F(TestSimulcastEncoderAdapterFake, EarlyCallbackSetupNotLost) { + helper_->factory()->set_supports_simulcast(true); + helper_->factory()->set_fallback_from_simulcast( + WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE); + SetupCodecWithEarlyEncodeCompleteCallback( + /*active_streams=*/{true, true, true}); + for (size_t idx = 0; idx < 3; ++idx) { + auto callback = helper_->factory()->encoders()[idx]->callback(); + EXPECT_NE(callback, nullptr); + } +} + TEST_F(TestSimulcastEncoderAdapterFake, ReleaseWithoutInitEncode) { EXPECT_EQ(0, adapter_->Release()); } @@ -615,9 +697,9 @@ TEST_F(TestSimulcastEncoderAdapterFake, EncodedCallbackForDifferentEncoders) { std::vector encoders = helper_->factory()->encoders(); ASSERT_EQ(3u, encoders.size()); encoders[0]->SendEncodedImage(1152, 704); - absl::optional width; - absl::optional height; - absl::optional simulcast_index; + std::optional width; + std::optional height; + std::optional simulcast_index; EXPECT_TRUE(GetLastEncodedImageInfo(&width, &height, &simulcast_index)); ASSERT_TRUE(width.has_value()); EXPECT_EQ(1152, width.value()); @@ -654,7 +736,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, ReusesEncodersInOrder) { SimulcastTestFixtureImpl::DefaultSettings( &codec_, static_cast(kTestTemporalLayerProfile), kVideoCodecVP8); - rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + rate_allocator_ = std::make_unique(env_, codec_); adapter_->RegisterEncodeCompleteCallback(this); const uint32_t target_bitrate = 1000 * (codec_.simulcastStream[0].targetBitrate + @@ -662,10 +744,10 @@ TEST_F(TestSimulcastEncoderAdapterFake, ReusesEncodersInOrder) { codec_.simulcastStream[2].minBitrate); // Input data. - rtc::scoped_refptr buffer(I420Buffer::Create(1280, 720)); + scoped_refptr buffer(I420Buffer::Create(1280, 720)); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_ms(1000) .set_rotation(kVideoRotation_180) .build(); @@ -842,9 +924,9 @@ TEST_F(TestSimulcastEncoderAdapterFake, ReinitDoesNotReorderFrameSimulcastIdx) { std::vector encoders = helper_->factory()->encoders(); ASSERT_EQ(3u, encoders.size()); encoders[0]->SendEncodedImage(1152, 704); - absl::optional width; - absl::optional height; - absl::optional simulcast_index; + std::optional width; + std::optional height; + std::optional simulcast_index; EXPECT_TRUE(GetLastEncodedImageInfo(&width, &height, &simulcast_index)); // SEA doesn't intercept frame encode complete callback for the lowest stream. EXPECT_FALSE(simulcast_index.has_value()); @@ -905,7 +987,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, SetRatesUnderMinBitrate) { codec_.minBitrate = 50; codec_.numberOfSimulcastStreams = 1; EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); - rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + rate_allocator_ = std::make_unique(env_, codec_); // Above min should be respected. VideoBitrateAllocation target_bitrate = rate_allocator_->Allocate( @@ -933,15 +1015,15 @@ TEST_F(TestSimulcastEncoderAdapterFake, SupportsImplementationName) { SimulcastTestFixtureImpl::DefaultSettings( &codec_, static_cast(kTestTemporalLayerProfile), kVideoCodecVP8); + codec_.numberOfSimulcastStreams = 2; std::vector encoder_names; encoder_names.push_back("codec1"); encoder_names.push_back("codec2"); - encoder_names.push_back("codec3"); helper_->factory()->SetEncoderNames(encoder_names); EXPECT_EQ("SimulcastEncoderAdapter", adapter_->GetEncoderInfo().implementation_name); EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); - EXPECT_EQ("SimulcastEncoderAdapter (codec1, codec2, codec3)", + EXPECT_EQ("SimulcastEncoderAdapter (codec1, codec2)", adapter_->GetEncoderInfo().implementation_name); // Single streams should not expose "SimulcastEncoderAdapter" in name. @@ -963,13 +1045,59 @@ TEST_F(TestSimulcastEncoderAdapterFake, RuntimeEncoderInfoUpdate) { encoder_names.push_back("codec3"); helper_->factory()->SetEncoderNames(encoder_names); EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); - EXPECT_EQ("SimulcastEncoderAdapter (codec1, codec2, codec3)", + EXPECT_EQ("SimulcastEncoderAdapter (codec1, codec2)", adapter_->GetEncoderInfo().implementation_name); // Change name of first encoder to indicate it has done a fallback to another // implementation. helper_->factory()->encoders().front()->set_implementation_name("fallback1"); - EXPECT_EQ("SimulcastEncoderAdapter (fallback1, codec2, codec3)", + EXPECT_EQ("SimulcastEncoderAdapter (fallback1, codec2)", + adapter_->GetEncoderInfo().implementation_name); +} + +TEST_F(TestSimulcastEncoderAdapterFake, EncoderInfoDeactiveLayersUpdatesName) { + SimulcastTestFixtureImpl::DefaultSettings( + &codec_, static_cast(kTestTemporalLayerProfile), + kVideoCodecVP8); + const DataRate target_bitrate = + DataRate::KilobitsPerSec(codec_.simulcastStream[0].targetBitrate + + codec_.simulcastStream[1].targetBitrate + + codec_.simulcastStream[2].targetBitrate); + const DataRate bandwidth_allocation = + target_bitrate + DataRate::KilobitsPerSec(600); + const DataRate target_bitrate_without_layer3 = + target_bitrate - + DataRate::KilobitsPerSec(codec_.simulcastStream[2].targetBitrate); + const DataRate bandwidth_allocation_without_layer3 = + target_bitrate + DataRate::KilobitsPerSec(300); + + std::vector encoder_names = {"codec1", "codec2", "codec3"}; + helper_->factory()->SetEncoderNames(encoder_names); + rate_allocator_ = std::make_unique(env_, codec_); + + EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); + adapter_->SetRates(VideoEncoder::RateControlParameters( + rate_allocator_->Allocate( + VideoBitrateAllocationParameters(target_bitrate, 30)), + 30.0, bandwidth_allocation)); + EXPECT_EQ("SimulcastEncoderAdapter (codec1, codec2, codec3)", + adapter_->GetEncoderInfo().implementation_name); + + // Disable the third encoder using bitrate allocation. + adapter_->SetRates(VideoEncoder::RateControlParameters( + rate_allocator_->Allocate( + VideoBitrateAllocationParameters(target_bitrate_without_layer3, 30)), + 30.0, bandwidth_allocation_without_layer3)); + EXPECT_EQ("SimulcastEncoderAdapter (codec1, codec2)", + adapter_->GetEncoderInfo().implementation_name); + + // Enable the third encoder again using bitrate allocation. + rate_allocator_ = std::make_unique(env_, codec_); + adapter_->SetRates(VideoEncoder::RateControlParameters( + rate_allocator_->Allocate( + VideoBitrateAllocationParameters(target_bitrate, 30)), + 30.0, bandwidth_allocation)); + EXPECT_EQ("SimulcastEncoderAdapter (codec1, codec2, codec3)", adapter_->GetEncoderInfo().implementation_name); } @@ -1003,7 +1131,7 @@ class FakeNativeBufferI420 : public VideoFrameBuffer { int width() const override { return width_; } int height() const override { return height_; } - rtc::scoped_refptr ToI420() override { + scoped_refptr ToI420() override { if (allow_to_i420_) { return I420Buffer::Create(width_, height_); } else { @@ -1034,12 +1162,12 @@ TEST_F(TestSimulcastEncoderAdapterFake, EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); EXPECT_TRUE(adapter_->GetEncoderInfo().supports_native_handle); - rtc::scoped_refptr buffer( - rtc::make_ref_counted(1280, 720, - /*allow_to_i420=*/false)); + scoped_refptr buffer( + make_ref_counted(1280, 720, + /*allow_to_i420=*/false)); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_ms(1000) .set_rotation(kVideoRotation_180) .build(); @@ -1071,12 +1199,12 @@ TEST_F(TestSimulcastEncoderAdapterFake, NativeHandleForwardingOnlyIfSupported) { EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); EXPECT_TRUE(adapter_->GetEncoderInfo().supports_native_handle); - rtc::scoped_refptr buffer( - rtc::make_ref_counted(1280, 720, - /*allow_to_i420=*/true)); + scoped_refptr buffer( + make_ref_counted(1280, 720, + /*allow_to_i420=*/true)); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_ms(1000) .set_rotation(kVideoRotation_180) .build(); @@ -1088,7 +1216,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, NativeHandleForwardingOnlyIfSupported) { // ...the lowest one gets a software buffer. EXPECT_CALL(*encoders[0], Encode) .WillOnce([&](const VideoFrame& frame, - const std::vector* frame_types) { + const std::vector* /* frame_types */) { EXPECT_EQ(frame.video_frame_buffer()->type(), VideoFrameBuffer::Type::kI420); return 0; @@ -1102,11 +1230,11 @@ TEST_F(TestSimulcastEncoderAdapterFake, GeneratesKeyFramesOnRequestedLayers) { SimulcastTestFixtureImpl::DefaultSettings( &codec_, static_cast(kTestTemporalLayerProfile), kVideoCodecVP8); - rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + rate_allocator_ = std::make_unique(env_, codec_); adapter_->RegisterEncodeCompleteCallback(this); // Input data. - rtc::scoped_refptr buffer(I420Buffer::Create(1280, 720)); + scoped_refptr buffer(I420Buffer::Create(1280, 720)); // Encode with three streams. codec_.startBitrate = 3000; @@ -1134,7 +1262,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, GeneratesKeyFramesOnRequestedLayers) { .WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); VideoFrame first_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(0) + .set_rtp_timestamp(0) .set_timestamp_ms(0) .build(); EXPECT_EQ(0, adapter_->Encode(first_frame, &frame_types)); @@ -1154,7 +1282,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, GeneratesKeyFramesOnRequestedLayers) { frame_types[2] = VideoFrameType::kVideoFrameDelta; VideoFrame second_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(10000) + .set_rtp_timestamp(10000) .set_timestamp_ms(100000) .build(); EXPECT_EQ(0, adapter_->Encode(second_frame, &frame_types)); @@ -1174,7 +1302,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, GeneratesKeyFramesOnRequestedLayers) { frame_types[2] = VideoFrameType::kVideoFrameDelta; VideoFrame third_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(20000) + .set_rtp_timestamp(20000) .set_timestamp_ms(200000) .build(); EXPECT_EQ(0, adapter_->Encode(third_frame, &frame_types)); @@ -1193,12 +1321,12 @@ TEST_F(TestSimulcastEncoderAdapterFake, TestFailureReturnCodesFromEncodeCalls) { .WillOnce(Return(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE)); // Send a fake frame and assert the return is software fallback. - rtc::scoped_refptr input_buffer = + scoped_refptr input_buffer = I420Buffer::Create(kDefaultWidth, kDefaultHeight); input_buffer->InitializeData(); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(input_buffer) - .set_timestamp_rtp(0) + .set_rtp_timestamp(0) .set_timestamp_us(0) .set_rotation(kVideoRotation_0) .build(); @@ -1291,7 +1419,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, ActivatesCorrectStreamsInInitEncode) { SimulcastTestFixtureImpl::DefaultSettings( &codec_, static_cast(kTestTemporalLayerProfile), kVideoCodecVP8); - rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + rate_allocator_ = std::make_unique(env_, codec_); adapter_->RegisterEncodeCompleteCallback(this); // Only enough start bitrate for the lowest stream. @@ -1300,10 +1428,10 @@ TEST_F(TestSimulcastEncoderAdapterFake, ActivatesCorrectStreamsInInitEncode) { codec_.simulcastStream[1].minBitrate - 1; // Input data. - rtc::scoped_refptr buffer(I420Buffer::Create(1280, 720)); + scoped_refptr buffer(I420Buffer::Create(1280, 720)); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_ms(1000) .set_rotation(kVideoRotation_180) .build(); @@ -1329,7 +1457,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, TrustedRateControl) { SimulcastTestFixtureImpl::DefaultSettings( &codec_, static_cast(kTestTemporalLayerProfile), kVideoCodecVP8); - rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + rate_allocator_ = std::make_unique(env_, codec_); adapter_->RegisterEncodeCompleteCallback(this); // Only enough start bitrate for the lowest stream. @@ -1338,10 +1466,10 @@ TEST_F(TestSimulcastEncoderAdapterFake, TrustedRateControl) { codec_.simulcastStream[1].minBitrate - 1; // Input data. - rtc::scoped_refptr buffer(I420Buffer::Create(1280, 720)); + scoped_refptr buffer(I420Buffer::Create(1280, 720)); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_ms(1000) .set_rotation(kVideoRotation_180) .build(); @@ -1435,12 +1563,14 @@ TEST_F(TestSimulcastEncoderAdapterFake, TEST_F( TestSimulcastEncoderAdapterFake, EncoderInfoFromFieldTrialDoesNotOverrideExistingBitrateLimitsInSinglecast) { - test::ScopedFieldTrials field_trials( + test::ScopedKeyValueConfig field_trials( + field_trials_, "WebRTC-SimulcastEncoderAdapter-GetEncoderInfoOverride/" "frame_size_pixels:123|456|789," "min_start_bitrate_bps:11000|22000|33000," "min_bitrate_bps:44000|55000|66000," "max_bitrate_bps:77000|88000|99000/"); + SetUp(); std::vector bitrate_limits; bitrate_limits.push_back( @@ -1463,7 +1593,8 @@ TEST_F( } TEST_F(TestSimulcastEncoderAdapterFake, EncoderInfoFromFieldTrial) { - test::ScopedFieldTrials field_trials( + test::ScopedKeyValueConfig field_trials( + field_trials_, "WebRTC-SimulcastEncoderAdapter-GetEncoderInfoOverride/" "requested_resolution_alignment:8," "apply_alignment_to_all_simulcast_layers/"); @@ -1483,7 +1614,8 @@ TEST_F(TestSimulcastEncoderAdapterFake, EncoderInfoFromFieldTrial) { TEST_F(TestSimulcastEncoderAdapterFake, EncoderInfoFromFieldTrialForSingleStream) { - test::ScopedFieldTrials field_trials( + test::ScopedKeyValueConfig field_trials( + field_trials_, "WebRTC-SimulcastEncoderAdapter-GetEncoderInfoOverride/" "requested_resolution_alignment:9," "frame_size_pixels:123|456|789," @@ -1572,7 +1704,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, SetRateDistributesBandwithAllocation) { const DataRate bandwidth_allocation = target_bitrate + DataRate::KilobitsPerSec(600); - rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + rate_allocator_ = std::make_unique(env_, codec_); EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); adapter_->RegisterEncodeCompleteCallback(this); @@ -1608,7 +1740,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, CanSetZeroBitrateWithHeadroom) { kVideoCodecVP8); codec_.numberOfSimulcastStreams = 3; - rate_allocator_.reset(new SimulcastRateAllocator(codec_)); + rate_allocator_ = std::make_unique(env_, codec_); EXPECT_EQ(0, adapter_->InitEncode(&codec_, kSettings)); adapter_->RegisterEncodeCompleteCallback(this); @@ -1641,10 +1773,10 @@ TEST_F(TestSimulcastEncoderAdapterFake, SupportsSimulcast) { // Only one encoder should have been produced. ASSERT_EQ(1u, helper_->factory()->encoders().size()); - rtc::scoped_refptr buffer(I420Buffer::Create(1280, 720)); + scoped_refptr buffer(I420Buffer::Create(1280, 720)); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_ms(1000) .set_rotation(kVideoRotation_180) .build(); @@ -1692,10 +1824,10 @@ TEST_F(TestSimulcastEncoderAdapterFake, SupportsFallback) { ASSERT_EQ(3u, fallback_encoders.size()); // Create frame to test with. - rtc::scoped_refptr buffer(I420Buffer::Create(1280, 720)); + scoped_refptr buffer(I420Buffer::Create(1280, 720)); VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_ms(1000) .set_rotation(kVideoRotation_180) .build(); @@ -1747,6 +1879,113 @@ TEST_F(TestSimulcastEncoderAdapterFake, SupportsFallback) { EXPECT_EQ(0, adapter_->Encode(input_frame, &frame_types)); } +TEST_F(TestSimulcastEncoderAdapterFake, + SupportsHardwareSimulcastWithBadParametrs) { + SimulcastTestFixtureImpl::DefaultSettings( + &codec_, static_cast(kTestTemporalLayerProfile), + kVideoCodecVP8); + + // Enable support for fallback encoder factory and re-setup. + use_fallback_factory_ = true; + SetUp(); + + helper_->factory()->set_supports_simulcast(true); + // Make encoders reject the simulcast configuration despite supporting it + // because parameters are not good for simulcast (e.g. different temporal + // layers setting). + helper_->factory()->set_fallback_from_simulcast( + WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED); + + SetupCodec(); + + // Make sure we have bitrate for all layers. + DataRate max_bitrate = DataRate::Zero(); + for (int i = 0; i < 3; ++i) { + max_bitrate += + DataRate::KilobitsPerSec(codec_.simulcastStream[i].maxBitrate); + } + const auto rate_settings = VideoEncoder::RateControlParameters( + rate_allocator_->Allocate( + VideoBitrateAllocationParameters(max_bitrate.bps(), 30)), + 30.0, max_bitrate); + adapter_->SetRates(rate_settings); + + std::vector primary_encoders = + helper_->factory()->encoders(); + std::vector fallback_encoders = + helper_->fallback_factory()->encoders(); + + ASSERT_EQ(3u, primary_encoders.size()); + ASSERT_EQ(3u, fallback_encoders.size()); + + // Create frame to test with. + scoped_refptr buffer(I420Buffer::Create(1280, 720)); + VideoFrame input_frame = VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_rtp_timestamp(100) + .set_timestamp_ms(1000) + .set_rotation(kVideoRotation_180) + .build(); + std::vector frame_types(3, VideoFrameType::kVideoFrameKey); + + // All primary encoders must be used. + for (auto codec : primary_encoders) { + EXPECT_CALL(*codec, Encode).WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + } + EXPECT_EQ(0, adapter_->Encode(input_frame, &frame_types)); +} + +TEST_F(TestSimulcastEncoderAdapterFake, SupportsHardwareSimulcast) { + SimulcastTestFixtureImpl::DefaultSettings( + &codec_, static_cast(kTestTemporalLayerProfile), + kVideoCodecVP8); + + // Enable support for fallback encoder factory and re-setup. + use_fallback_factory_ = true; + SetUp(); + + helper_->factory()->set_supports_simulcast(true); + helper_->factory()->set_fallback_from_simulcast(std::nullopt); + + SetupCodec(); + + // Make sure we have bitrate for all layers. + DataRate max_bitrate = DataRate::Zero(); + for (int i = 0; i < 3; ++i) { + max_bitrate += + DataRate::KilobitsPerSec(codec_.simulcastStream[i].maxBitrate); + } + const auto rate_settings = VideoEncoder::RateControlParameters( + rate_allocator_->Allocate( + VideoBitrateAllocationParameters(max_bitrate.bps(), 30)), + 30.0, max_bitrate); + adapter_->SetRates(rate_settings); + + std::vector primary_encoders = + helper_->factory()->encoders(); + std::vector fallback_encoders = + helper_->fallback_factory()->encoders(); + + ASSERT_EQ(1u, primary_encoders.size()); + ASSERT_EQ(1u, fallback_encoders.size()); + + // Create frame to test with. + scoped_refptr buffer(I420Buffer::Create(1280, 720)); + VideoFrame input_frame = VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_rtp_timestamp(100) + .set_timestamp_ms(1000) + .set_rotation(kVideoRotation_180) + .build(); + std::vector frame_types(3, VideoFrameType::kVideoFrameKey); + + // A primary encoders must be used. + for (auto codec : primary_encoders) { + EXPECT_CALL(*codec, Encode).WillOnce(Return(WEBRTC_VIDEO_CODEC_OK)); + } + EXPECT_EQ(0, adapter_->Encode(input_frame, &frame_types)); +} + TEST_F(TestSimulcastEncoderAdapterFake, SupportsPerSimulcastLayerMaxFramerate) { SimulcastTestFixtureImpl::DefaultSettings( &codec_, static_cast(kTestTemporalLayerProfile), @@ -1798,8 +2037,8 @@ TEST_F(TestSimulcastEncoderAdapterFake, // Normally SEA reuses encoders. But, when TL-based SW fallback is enabled, // the encoder which served the lowest stream should be recreated before it // can be used to process an upper layer and vice-versa. - test::ScopedFieldTrials field_trials( - "WebRTC-Video-PreferTemporalSupportOnBaseLayer/Enabled/"); + test::ScopedKeyValueConfig field_trials( + field_trials_, "WebRTC-Video-PreferTemporalSupportOnBaseLayer/Enabled/"); use_fallback_factory_ = true; ReSetUp(); diff --git a/media/engine/webrtc_media_engine.cc b/media/engine/webrtc_media_engine.cc index 99d7dd2704..60bf57f775 100644 --- a/media/engine/webrtc_media_engine.cc +++ b/media/engine/webrtc_media_engine.cc @@ -12,59 +12,27 @@ #include #include -#include #include -#include +#include #include "absl/algorithm/container.h" #include "absl/strings/match.h" -#include "api/transport/field_trial_based_config.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/field_trials_view.h" +#include "api/rtp_parameters.h" +#include "api/transport/bitrate_settings.h" +#include "media/base/codec.h" #include "media/base/media_constants.h" -#include "media/engine/webrtc_voice_engine.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#ifdef HAVE_WEBRTC_VIDEO -#include "media/engine/webrtc_video_engine.h" -#else -#include "media/engine/null_webrtc_video_engine.h" -#endif - -namespace cricket { - -std::unique_ptr CreateMediaEngine( - MediaEngineDependencies dependencies) { - // TODO(sprang): Make populating `dependencies.trials` mandatory and remove - // these fallbacks. - std::unique_ptr fallback_trials( - dependencies.trials ? nullptr : new webrtc::FieldTrialBasedConfig()); - const webrtc::FieldTrialsView& trials = - dependencies.trials ? *dependencies.trials : *fallback_trials; - auto audio_engine = std::make_unique( - dependencies.task_queue_factory, dependencies.adm.get(), - std::move(dependencies.audio_encoder_factory), - std::move(dependencies.audio_decoder_factory), - std::move(dependencies.audio_mixer), - std::move(dependencies.audio_processing), - dependencies.audio_frame_processor, - std::move(dependencies.owned_audio_frame_processor), trials); -#ifdef HAVE_WEBRTC_VIDEO - auto video_engine = std::make_unique( - std::move(dependencies.video_encoder_factory), - std::move(dependencies.video_decoder_factory), trials); -#else - auto video_engine = std::make_unique(); -#endif - return std::make_unique(std::move(fallback_trials), - std::move(audio_engine), - std::move(video_engine)); -} - +namespace webrtc { namespace { // Remove mutually exclusive extensions with lower priority. void DiscardRedundantExtensions( - std::vector* extensions, - rtc::ArrayView extensions_decreasing_prio) { + std::vector* extensions, + ArrayView extensions_decreasing_prio) { RTC_DCHECK(extensions); bool found = false; for (const char* uri : extensions_decreasing_prio) { @@ -81,13 +49,12 @@ void DiscardRedundantExtensions( } } // namespace -bool ValidateRtpExtensions( - rtc::ArrayView extensions, - rtc::ArrayView old_extensions) { - bool id_used[1 + webrtc::RtpExtension::kMaxId] = {false}; +bool ValidateRtpExtensions(ArrayView extensions, + ArrayView old_extensions) { + bool id_used[1 + RtpExtension::kMaxId] = {false}; for (const auto& extension : extensions) { - if (extension.id < webrtc::RtpExtension::kMinId || - extension.id > webrtc::RtpExtension::kMaxId) { + if (extension.id < RtpExtension::kMinId || + extension.id > RtpExtension::kMaxId) { RTC_LOG(LS_ERROR) << "Bad RTP extension ID: " << extension.ToString(); return false; } @@ -114,7 +81,7 @@ bool ValidateRtpExtensions( // // Getting at this seems like a hard slog. if (!old_extensions.empty()) { - absl::string_view urimap[1 + webrtc::RtpExtension::kMaxId]; + absl::string_view urimap[1 + RtpExtension::kMaxId]; std::map idmap; for (const auto& old_extension : old_extensions) { urimap[old_extension.id] = old_extension.uri; @@ -140,15 +107,15 @@ bool ValidateRtpExtensions( return true; } -std::vector FilterRtpExtensions( - const std::vector& extensions, +std::vector FilterRtpExtensions( + const std::vector& extensions, bool (*supported)(absl::string_view), bool filter_redundant_extensions, - const webrtc::FieldTrialsView& trials) { + const FieldTrialsView& trials) { // Don't check against old parameters; this should have been done earlier. RTC_DCHECK(ValidateRtpExtensions(extensions, {})); RTC_DCHECK(supported); - std::vector result; + std::vector result; // Ignore any extensions that we don't recognize. for (const auto& extension : extensions) { @@ -182,22 +149,20 @@ std::vector FilterRtpExtensions( if (absl::StartsWith(trials.Lookup("WebRTC-FilterAbsSendTimeExtension"), "Enabled")) { static const char* const kBweExtensionPriorities[] = { - webrtc::RtpExtension::kTransportSequenceNumberUri, - webrtc::RtpExtension::kAbsSendTimeUri, - webrtc::RtpExtension::kTimestampOffsetUri}; + RtpExtension::kTransportSequenceNumberUri, + RtpExtension::kAbsSendTimeUri, RtpExtension::kTimestampOffsetUri}; DiscardRedundantExtensions(&result, kBweExtensionPriorities); } else { static const char* const kBweExtensionPriorities[] = { - webrtc::RtpExtension::kAbsSendTimeUri, - webrtc::RtpExtension::kTimestampOffsetUri}; + RtpExtension::kAbsSendTimeUri, RtpExtension::kTimestampOffsetUri}; DiscardRedundantExtensions(&result, kBweExtensionPriorities); } } return result; } -webrtc::BitrateConstraints GetBitrateConfigForCodec(const Codec& codec) { - webrtc::BitrateConstraints config; +BitrateConstraints GetBitrateConfigForCodec(const Codec& codec) { + BitrateConstraints config; int bitrate_kbps = 0; if (codec.GetParam(kCodecParamMinBitrate, &bitrate_kbps) && bitrate_kbps > 0) { @@ -220,4 +185,4 @@ webrtc::BitrateConstraints GetBitrateConfigForCodec(const Codec& codec) { } return config; } -} // namespace cricket +} // namespace webrtc diff --git a/media/engine/webrtc_media_engine.h b/media/engine/webrtc_media_engine.h index 0f6dce35b5..15eb5427ff 100644 --- a/media/engine/webrtc_media_engine.h +++ b/media/engine/webrtc_media_engine.h @@ -11,79 +11,44 @@ #ifndef MEDIA_ENGINE_WEBRTC_MEDIA_ENGINE_H_ #define MEDIA_ENGINE_WEBRTC_MEDIA_ENGINE_H_ -#include #include #include "absl/strings/string_view.h" #include "api/array_view.h" -#include "api/audio/audio_frame_processor.h" -#include "api/audio/audio_mixer.h" -#include "api/audio_codecs/audio_decoder_factory.h" -#include "api/audio_codecs/audio_encoder_factory.h" #include "api/field_trials_view.h" #include "api/rtp_parameters.h" -#include "api/scoped_refptr.h" -#include "api/task_queue/task_queue_factory.h" #include "api/transport/bitrate_settings.h" -#include "api/video_codecs/video_decoder_factory.h" -#include "api/video_codecs/video_encoder_factory.h" #include "media/base/codec.h" -#include "media/base/media_engine.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "rtc_base/system/rtc_export.h" -namespace cricket { - -struct MediaEngineDependencies { - MediaEngineDependencies() = default; - MediaEngineDependencies(const MediaEngineDependencies&) = delete; - MediaEngineDependencies(MediaEngineDependencies&&) = default; - MediaEngineDependencies& operator=(const MediaEngineDependencies&) = delete; - MediaEngineDependencies& operator=(MediaEngineDependencies&&) = default; - ~MediaEngineDependencies() = default; - - webrtc::TaskQueueFactory* task_queue_factory = nullptr; - rtc::scoped_refptr adm; - rtc::scoped_refptr audio_encoder_factory; - rtc::scoped_refptr audio_decoder_factory; - rtc::scoped_refptr audio_mixer; - rtc::scoped_refptr audio_processing; - // TODO(bugs.webrtc.org/15111): - // Remove the raw AudioFrameProcessor pointer in the follow-up. - webrtc::AudioFrameProcessor* audio_frame_processor = nullptr; - std::unique_ptr owned_audio_frame_processor; - - std::unique_ptr video_encoder_factory; - std::unique_ptr video_decoder_factory; - - const webrtc::FieldTrialsView* trials = nullptr; -}; - -// CreateMediaEngine may be called on any thread, though the engine is -// only expected to be used on one thread, internally called the "worker -// thread". This is the thread Init must be called on. -RTC_EXPORT std::unique_ptr CreateMediaEngine( - MediaEngineDependencies dependencies); +namespace webrtc { // Verify that extension IDs are within 1-byte extension range and are not // overlapping, and that they form a legal change from previously registerd // extensions (if any). -bool ValidateRtpExtensions( - rtc::ArrayView extennsions, - rtc::ArrayView old_extensions); +bool ValidateRtpExtensions(ArrayView extennsions, + ArrayView old_extensions); // Discard any extensions not validated by the 'supported' predicate. Duplicate // extensions are removed if 'filter_redundant_extensions' is set, and also any // mutually exclusive extensions (see implementation for details) are removed. -std::vector FilterRtpExtensions( - const std::vector& extensions, +std::vector FilterRtpExtensions( + const std::vector& extensions, bool (*supported)(absl::string_view), bool filter_redundant_extensions, - const webrtc::FieldTrialsView& trials); + const FieldTrialsView& trials); -webrtc::BitrateConstraints GetBitrateConfigForCodec(const Codec& codec); +BitrateConstraints GetBitrateConfigForCodec(const Codec& codec); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::FilterRtpExtensions; +using ::webrtc::GetBitrateConfigForCodec; +using ::webrtc::ValidateRtpExtensions; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_ENGINE_WEBRTC_MEDIA_ENGINE_H_ diff --git a/media/engine/webrtc_media_engine_defaults.cc b/media/engine/webrtc_media_engine_defaults.cc deleted file mode 100644 index 1660873e8b..0000000000 --- a/media/engine/webrtc_media_engine_defaults.cc +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "media/engine/webrtc_media_engine_defaults.h" - -#include "api/audio_codecs/builtin_audio_decoder_factory.h" -#include "api/audio_codecs/builtin_audio_encoder_factory.h" -#include "api/task_queue/default_task_queue_factory.h" -#include "api/video/builtin_video_bitrate_allocator_factory.h" -#include "api/video_codecs/builtin_video_decoder_factory.h" -#include "api/video_codecs/builtin_video_encoder_factory.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -void SetMediaEngineDefaults(cricket::MediaEngineDependencies* deps) { - RTC_DCHECK(deps); - if (deps->task_queue_factory == nullptr) { - static TaskQueueFactory* const task_queue_factory = - CreateDefaultTaskQueueFactory().release(); - deps->task_queue_factory = task_queue_factory; - } - if (deps->audio_encoder_factory == nullptr) - deps->audio_encoder_factory = CreateBuiltinAudioEncoderFactory(); - if (deps->audio_decoder_factory == nullptr) - deps->audio_decoder_factory = CreateBuiltinAudioDecoderFactory(); - if (deps->audio_processing == nullptr) - deps->audio_processing = AudioProcessingBuilder().Create(); - - if (deps->video_encoder_factory == nullptr) - deps->video_encoder_factory = CreateBuiltinVideoEncoderFactory(); - if (deps->video_decoder_factory == nullptr) - deps->video_decoder_factory = CreateBuiltinVideoDecoderFactory(); -} - -} // namespace webrtc diff --git a/media/engine/webrtc_media_engine_defaults.h b/media/engine/webrtc_media_engine_defaults.h deleted file mode 100644 index 16b1d462e3..0000000000 --- a/media/engine/webrtc_media_engine_defaults.h +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MEDIA_ENGINE_WEBRTC_MEDIA_ENGINE_DEFAULTS_H_ -#define MEDIA_ENGINE_WEBRTC_MEDIA_ENGINE_DEFAULTS_H_ - -#include "media/engine/webrtc_media_engine.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// Sets required but null dependencies with default factories. -RTC_EXPORT void SetMediaEngineDefaults(cricket::MediaEngineDependencies* deps); - -} // namespace webrtc - -#endif // MEDIA_ENGINE_WEBRTC_MEDIA_ENGINE_DEFAULTS_H_ diff --git a/media/engine/webrtc_media_engine_unittest.cc b/media/engine/webrtc_media_engine_unittest.cc index 4615f03deb..91893fa08a 100644 --- a/media/engine/webrtc_media_engine_unittest.cc +++ b/media/engine/webrtc_media_engine_unittest.cc @@ -10,19 +10,20 @@ #include "media/engine/webrtc_media_engine.h" -#include #include #include +#include -#include "media/engine/webrtc_media_engine_defaults.h" +#include "absl/strings/string_view.h" +#include "api/rtp_parameters.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" -using webrtc::RtpExtension; - -namespace cricket { +namespace webrtc { namespace { +using webrtc::test::ScopedKeyValueConfig; + std::vector MakeUniqueExtensions() { std::vector result; char name[] = "a"; @@ -54,7 +55,7 @@ bool SupportedExtensions2(absl::string_view name) { return name != "a" && name != "n"; } -bool IsSorted(const std::vector& extensions) { +bool IsSorted(const std::vector& extensions) { const std::string* last = nullptr; for (const auto& extension : extensions) { if (last && *last > extension.uri) { @@ -129,16 +130,16 @@ TEST(WebRtcMediaEngineTest, ValidateRtpExtensionsChangeIdForUrl) { TEST(WebRtcMediaEngineTest, FilterRtpExtensionsEmptyList) { std::vector extensions; - webrtc::test::ScopedKeyValueConfig trials; - std::vector filtered = + ScopedKeyValueConfig trials; + std::vector filtered = FilterRtpExtensions(extensions, SupportedExtensions1, true, trials); EXPECT_EQ(0u, filtered.size()); } TEST(WebRtcMediaEngineTest, FilterRtpExtensionsIncludeOnlySupported) { std::vector extensions = MakeUniqueExtensions(); - webrtc::test::ScopedKeyValueConfig trials; - std::vector filtered = + ScopedKeyValueConfig trials; + std::vector filtered = FilterRtpExtensions(extensions, SupportedExtensions1, false, trials); EXPECT_EQ(2u, filtered.size()); EXPECT_EQ("c", filtered[0].uri); @@ -147,8 +148,8 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensionsIncludeOnlySupported) { TEST(WebRtcMediaEngineTest, FilterRtpExtensionsSortedByName1) { std::vector extensions = MakeUniqueExtensions(); - webrtc::test::ScopedKeyValueConfig trials; - std::vector filtered = + ScopedKeyValueConfig trials; + std::vector filtered = FilterRtpExtensions(extensions, SupportedExtensions2, false, trials); EXPECT_EQ(12u, filtered.size()); EXPECT_TRUE(IsSorted(filtered)); @@ -156,8 +157,8 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensionsSortedByName1) { TEST(WebRtcMediaEngineTest, FilterRtpExtensionsSortedByName2) { std::vector extensions = MakeUniqueExtensions(); - webrtc::test::ScopedKeyValueConfig trials; - std::vector filtered = + ScopedKeyValueConfig trials; + std::vector filtered = FilterRtpExtensions(extensions, SupportedExtensions2, true, trials); EXPECT_EQ(12u, filtered.size()); EXPECT_TRUE(IsSorted(filtered)); @@ -165,8 +166,8 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensionsSortedByName2) { TEST(WebRtcMediaEngineTest, FilterRtpExtensionsDontRemoveRedundant) { std::vector extensions = MakeRedundantExtensions(); - webrtc::test::ScopedKeyValueConfig trials; - std::vector filtered = + ScopedKeyValueConfig trials; + std::vector filtered = FilterRtpExtensions(extensions, SupportedExtensions2, false, trials); EXPECT_EQ(12u, filtered.size()); EXPECT_TRUE(IsSorted(filtered)); @@ -175,8 +176,8 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensionsDontRemoveRedundant) { TEST(WebRtcMediaEngineTest, FilterRtpExtensionsRemoveRedundant) { std::vector extensions = MakeRedundantExtensions(); - webrtc::test::ScopedKeyValueConfig trials; - std::vector filtered = + ScopedKeyValueConfig trials; + std::vector filtered = FilterRtpExtensions(extensions, SupportedExtensions2, true, trials); EXPECT_EQ(6u, filtered.size()); EXPECT_TRUE(IsSorted(filtered)); @@ -185,12 +186,12 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensionsRemoveRedundant) { TEST(WebRtcMediaEngineTest, FilterRtpExtensionsRemoveRedundantEncrypted1) { std::vector extensions; - extensions.push_back(webrtc::RtpExtension("b", 1)); - extensions.push_back(webrtc::RtpExtension("b", 2, true)); - extensions.push_back(webrtc::RtpExtension("c", 3)); - extensions.push_back(webrtc::RtpExtension("b", 4)); - webrtc::test::ScopedKeyValueConfig trials; - std::vector filtered = + extensions.push_back(RtpExtension("b", 1)); + extensions.push_back(RtpExtension("b", 2, true)); + extensions.push_back(RtpExtension("c", 3)); + extensions.push_back(RtpExtension("b", 4)); + ScopedKeyValueConfig trials; + std::vector filtered = FilterRtpExtensions(extensions, SupportedExtensions2, true, trials); EXPECT_EQ(3u, filtered.size()); EXPECT_TRUE(IsSorted(filtered)); @@ -202,12 +203,12 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensionsRemoveRedundantEncrypted1) { TEST(WebRtcMediaEngineTest, FilterRtpExtensionsRemoveRedundantEncrypted2) { std::vector extensions; - extensions.push_back(webrtc::RtpExtension("b", 1, true)); - extensions.push_back(webrtc::RtpExtension("b", 2)); - extensions.push_back(webrtc::RtpExtension("c", 3)); - extensions.push_back(webrtc::RtpExtension("b", 4)); - webrtc::test::ScopedKeyValueConfig trials; - std::vector filtered = + extensions.push_back(RtpExtension("b", 1, true)); + extensions.push_back(RtpExtension("b", 2)); + extensions.push_back(RtpExtension("c", 3)); + extensions.push_back(RtpExtension("b", 4)); + ScopedKeyValueConfig trials; + std::vector filtered = FilterRtpExtensions(extensions, SupportedExtensions2, true, trials); EXPECT_EQ(3u, filtered.size()); EXPECT_TRUE(IsSorted(filtered)); @@ -218,8 +219,7 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensionsRemoveRedundantEncrypted2) { } TEST(WebRtcMediaEngineTest, FilterRtpExtensionsRemoveRedundantBwe1) { - webrtc::test::ScopedKeyValueConfig trials( - "WebRTC-FilterAbsSendTimeExtension/Enabled/"); + ScopedKeyValueConfig trials("WebRTC-FilterAbsSendTimeExtension/Enabled/"); std::vector extensions; extensions.push_back( RtpExtension(RtpExtension::kTransportSequenceNumberUri, 3)); @@ -228,7 +228,7 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensionsRemoveRedundantBwe1) { extensions.push_back( RtpExtension(RtpExtension::kTransportSequenceNumberUri, 1)); extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 14)); - std::vector filtered = + std::vector filtered = FilterRtpExtensions(extensions, SupportedExtensions2, true, trials); EXPECT_EQ(1u, filtered.size()); EXPECT_EQ(RtpExtension::kTransportSequenceNumberUri, filtered[0].uri); @@ -244,8 +244,8 @@ TEST(WebRtcMediaEngineTest, extensions.push_back( RtpExtension(RtpExtension::kTransportSequenceNumberUri, 1)); extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 14)); - webrtc::test::ScopedKeyValueConfig trials; - std::vector filtered = + ScopedKeyValueConfig trials; + std::vector filtered = FilterRtpExtensions(extensions, SupportedExtensions2, true, trials); EXPECT_EQ(2u, filtered.size()); EXPECT_EQ(RtpExtension::kTransportSequenceNumberUri, filtered[0].uri); @@ -253,8 +253,7 @@ TEST(WebRtcMediaEngineTest, } TEST(WebRtcMediaEngineTest, FilterRtpExtensionsRemoveRedundantBweEncrypted1) { - webrtc::test::ScopedKeyValueConfig trials( - "WebRTC-FilterAbsSendTimeExtension/Enabled/"); + ScopedKeyValueConfig trials("WebRTC-FilterAbsSendTimeExtension/Enabled/"); std::vector extensions; extensions.push_back( RtpExtension(RtpExtension::kTransportSequenceNumberUri, 3)); @@ -267,7 +266,7 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensionsRemoveRedundantBweEncrypted1) { extensions.push_back( RtpExtension(RtpExtension::kTransportSequenceNumberUri, 2, true)); extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 14)); - std::vector filtered = + std::vector filtered = FilterRtpExtensions(extensions, SupportedExtensions2, true, trials); EXPECT_EQ(2u, filtered.size()); EXPECT_EQ(RtpExtension::kTransportSequenceNumberUri, filtered[0].uri); @@ -289,8 +288,8 @@ TEST(WebRtcMediaEngineTest, extensions.push_back( RtpExtension(RtpExtension::kTransportSequenceNumberUri, 2, true)); extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 14)); - webrtc::test::ScopedKeyValueConfig trials; - std::vector filtered = + ScopedKeyValueConfig trials; + std::vector filtered = FilterRtpExtensions(extensions, SupportedExtensions2, true, trials); EXPECT_EQ(3u, filtered.size()); EXPECT_EQ(RtpExtension::kTransportSequenceNumberUri, filtered[0].uri); @@ -304,8 +303,8 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensionsRemoveRedundantBwe2) { extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 1)); extensions.push_back(RtpExtension(RtpExtension::kAbsSendTimeUri, 14)); extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 7)); - webrtc::test::ScopedKeyValueConfig trials; - std::vector filtered = + ScopedKeyValueConfig trials; + std::vector filtered = FilterRtpExtensions(extensions, SupportedExtensions2, true, trials); EXPECT_EQ(1u, filtered.size()); EXPECT_EQ(RtpExtension::kAbsSendTimeUri, filtered[0].uri); @@ -315,23 +314,11 @@ TEST(WebRtcMediaEngineTest, FilterRtpExtensionsRemoveRedundantBwe3) { std::vector extensions; extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 2)); extensions.push_back(RtpExtension(RtpExtension::kTimestampOffsetUri, 14)); - webrtc::test::ScopedKeyValueConfig trials; - std::vector filtered = + ScopedKeyValueConfig trials; + std::vector filtered = FilterRtpExtensions(extensions, SupportedExtensions2, true, trials); EXPECT_EQ(1u, filtered.size()); EXPECT_EQ(RtpExtension::kTimestampOffsetUri, filtered[0].uri); } -TEST(WebRtcMediaEngineTest, Create) { - MediaEngineDependencies deps; - webrtc::SetMediaEngineDefaults(&deps); - webrtc::test::ScopedKeyValueConfig trials; - deps.trials = &trials; - - std::unique_ptr engine = - CreateMediaEngine(std::move(deps)); - - EXPECT_TRUE(engine); -} - -} // namespace cricket +} // namespace webrtc diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc index e8bffc2d05..3645bd054f 100644 --- a/media/engine/webrtc_video_engine.cc +++ b/media/engine/webrtc_video_engine.cc @@ -14,26 +14,47 @@ #include #include +#include #include +#include +#include +#include #include #include -#include +#include #include +#include #include "absl/algorithm/container.h" -#include "absl/container/inlined_vector.h" #include "absl/functional/bind_front.h" #include "absl/strings/match.h" -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/crypto/crypto_options.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/field_trials_view.h" +#include "api/frame_transformer_interface.h" #include "api/make_ref_counted.h" #include "api/media_stream_interface.h" #include "api/media_types.h" #include "api/priority.h" +#include "api/rtc_error.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_direction.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/rtp/rtp_source.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "api/video/resolution.h" +#include "api/video/recordable_encoded_frame.h" +#include "api/video/video_bitrate_allocator_factory.h" #include "api/video/video_codec_type.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" #include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" @@ -41,43 +62,54 @@ #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "call/call.h" +#include "call/flexfec_receive_stream.h" #include "call/packet_receiver.h" +#include "call/payload_type.h" +#include "call/payload_type_picker.h" #include "call/receive_stream.h" +#include "call/rtp_config.h" #include "call/rtp_transport_controller_send_interface.h" +#include "call/video_receive_stream.h" +#include "call/video_send_stream.h" #include "common_video/frame_counts.h" -#include "common_video/include/quality_limitation_reason.h" #include "media/base/codec.h" +#include "media/base/codec_comparators.h" +#include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" +#include "media/base/media_config.h" #include "media/base/media_constants.h" +#include "media/base/media_engine.h" #include "media/base/rid_description.h" #include "media/base/rtp_utils.h" +#include "media/base/stream_params.h" #include "media/engine/webrtc_media_engine.h" #include "modules/rtp_rtcp/include/receive_statistics.h" -#include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_util.h" #include "modules/video_coding/svc/scalability_mode_util.h" #include "rtc_base/checks.h" #include "rtc_base/dscp.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" +#include "rtc_base/network_route.h" #include "rtc_base/socket.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" +#include "video/config/video_encoder_config.h" -namespace cricket { +namespace webrtc { namespace { using ::webrtc::ParseRtpPayloadType; using ::webrtc::ParseRtpSsrc; -constexpr int64_t kUnsignaledSsrcCooldownMs = rtc::kNumMillisecsPerSec / 2; - -// TODO(bugs.webrtc.org/13166): Remove AV1X when backwards compatibility is not -// needed. -constexpr char kAv1xCodecName[] = "AV1X"; +constexpr int64_t kUnsignaledSsrcCooldownMs = webrtc::kNumMillisecsPerSec / 2; // This constant is really an on/off, lower-level configurable NACK history // duration hasn't been implemented. @@ -88,29 +120,27 @@ const int kDefaultRtcpReceiverReportSsrc = 1; // Minimum time interval for logging stats. const int64_t kStatsLogIntervalMs = 10000; -const char* StreamTypeToString( - webrtc::VideoSendStream::StreamStats::StreamType type) { +const char* StreamTypeToString(VideoSendStream::StreamStats::StreamType type) { switch (type) { - case webrtc::VideoSendStream::StreamStats::StreamType::kMedia: + case VideoSendStream::StreamStats::StreamType::kMedia: return "kMedia"; - case webrtc::VideoSendStream::StreamStats::StreamType::kRtx: + case VideoSendStream::StreamStats::StreamType::kRtx: return "kRtx"; - case webrtc::VideoSendStream::StreamStats::StreamType::kFlexfec: + case VideoSendStream::StreamStats::StreamType::kFlexfec: return "kFlexfec"; } return nullptr; } -bool IsEnabled(const webrtc::FieldTrialsView& trials, absl::string_view name) { +bool IsEnabled(const FieldTrialsView& trials, absl::string_view name) { return absl::StartsWith(trials.Lookup(name), "Enabled"); } -bool IsDisabled(const webrtc::FieldTrialsView& trials, absl::string_view name) { +bool IsDisabled(const FieldTrialsView& trials, absl::string_view name) { return absl::StartsWith(trials.Lookup(name), "Disabled"); } -void AddDefaultFeedbackParams(VideoCodec* codec, - const webrtc::FieldTrialsView& trials) { +void AddDefaultFeedbackParams(Codec* codec, const FieldTrialsView& trials) { // Don't add any feedback params for RED and ULPFEC. if (codec->name == kRedCodecName || codec->name == kUlpfecCodecName) return; @@ -129,74 +159,36 @@ void AddDefaultFeedbackParams(VideoCodec* codec, } } -// Helper function to determine whether a codec should use the [35, 63] range. -// Should be used when adding new codecs (or variants). -bool IsCodecValidForLowerRange(const VideoCodec& codec) { - if (absl::EqualsIgnoreCase(codec.name, kFlexfecCodecName) || - absl::EqualsIgnoreCase(codec.name, kAv1CodecName) || - absl::EqualsIgnoreCase(codec.name, kAv1xCodecName)) { - return true; - } else if (absl::EqualsIgnoreCase(codec.name, kH264CodecName)) { - std::string profile_level_id; - std::string packetization_mode; - - if (codec.GetParam(kH264FmtpProfileLevelId, &profile_level_id)) { - if (absl::StartsWithIgnoreCase(profile_level_id, "4d00")) { - if (codec.GetParam(kH264FmtpPacketizationMode, &packetization_mode)) { - return packetization_mode == "0"; - } - } - // H264 with YUV444. - return absl::StartsWithIgnoreCase(profile_level_id, "f400"); - } - } else if (absl::EqualsIgnoreCase(codec.name, kVp9CodecName)) { - std::string profile_id; - - if (codec.GetParam(kVP9ProfileId, &profile_id)) { - if (profile_id.compare("1") == 0 || profile_id.compare("3") == 0) { - return true; - } - } - } - return false; -} - -// This function will assign dynamic payload types (in the range [96, 127] -// and then [35, 63]) to the input codecs, and also add ULPFEC, RED, FlexFEC, -// and associated RTX codecs for recognized codecs (VP8, VP9, H264, and RED). -// It will also add default feedback params to the codecs. +// Get the default set of supported codecs. // is_decoder_factory is needed to keep track of the implict assumption that any // H264 decoder also supports constrained base line profile. // Also, is_decoder_factory is used to decide whether FlexFEC video format // should be advertised as supported. -// TODO(kron): Perhaps it is better to move the implicit knowledge to the place -// where codecs are negotiated. template -std::vector GetPayloadTypesAndDefaultCodecs( +std::vector GetDefaultSupportedFormats( const T* factory, bool is_decoder_factory, - bool include_rtx, - const webrtc::FieldTrialsView& trials) { + const FieldTrialsView& trials) { if (!factory) { return {}; } - std::vector supported_formats = + std::vector supported_formats = factory->GetSupportedFormats(); if (is_decoder_factory) { - AddH264ConstrainedBaselineProfileToSupportedFormats(&supported_formats); + webrtc::AddH264ConstrainedBaselineProfileToSupportedFormats( + &supported_formats); } if (supported_formats.empty()) - return std::vector(); - - supported_formats.push_back(webrtc::SdpVideoFormat(kRedCodecName)); - supported_formats.push_back(webrtc::SdpVideoFormat(kUlpfecCodecName)); + return supported_formats; + supported_formats.push_back(SdpVideoFormat(kRedCodecName)); + supported_formats.push_back(SdpVideoFormat(kUlpfecCodecName)); // flexfec-03 is always supported as receive codec and as send codec // only if WebRTC-FlexFEC-03-Advertised is enabled if (is_decoder_factory || IsEnabled(trials, "WebRTC-FlexFEC-03-Advertised")) { - webrtc::SdpVideoFormat flexfec_format(kFlexfecCodecName); + SdpVideoFormat flexfec_format(kFlexfecCodecName); // This value is currently arbitrarily set to 10 seconds. (The unit // is microseconds.) This parameter MUST be present in the SDP, but // we never use the actual value anywhere in our code however. @@ -204,72 +196,95 @@ std::vector GetPayloadTypesAndDefaultCodecs( flexfec_format.parameters = {{kFlexfecFmtpRepairWindow, "10000000"}}; supported_formats.push_back(flexfec_format); } + return supported_formats; +} - // Due to interoperability issues with old Chrome/WebRTC versions that - // ignore the [35, 63] range prefer the lower range for new codecs. - static const int kFirstDynamicPayloadTypeLowerRange = 35; - static const int kLastDynamicPayloadTypeLowerRange = 63; - - static const int kFirstDynamicPayloadTypeUpperRange = 96; - static const int kLastDynamicPayloadTypeUpperRange = 127; - int payload_type_upper = kFirstDynamicPayloadTypeUpperRange; - int payload_type_lower = kFirstDynamicPayloadTypeLowerRange; - - std::vector output_codecs; - for (const webrtc::SdpVideoFormat& format : supported_formats) { - VideoCodec codec = cricket::CreateVideoCodec(format); - bool isFecCodec = absl::EqualsIgnoreCase(codec.name, kUlpfecCodecName) || - absl::EqualsIgnoreCase(codec.name, kFlexfecCodecName); - - // Check if we ran out of payload types. - if (payload_type_lower > kLastDynamicPayloadTypeLowerRange) { - // TODO(https://bugs.chromium.org/p/webrtc/issues/detail?id=12248): - // return an error. - RTC_LOG(LS_ERROR) << "Out of dynamic payload types [35,63] after " - "fallback from [96, 127], skipping the rest."; - RTC_DCHECK_EQ(payload_type_upper, kLastDynamicPayloadTypeUpperRange); - break; - } +// This function will assign dynamic payload types (in the range [96, 127] +// and then [35, 63]) to the input codecs, and also add ULPFEC, RED, FlexFEC, +// It will also add default feedback params to the codecs. +RTCErrorOr AssignPayloadType(const SdpVideoFormat& format, + PayloadTypePicker& pt_mapper, + const FieldTrialsView& trials) { + Codec codec = webrtc::CreateVideoCodec(format); + RTCErrorOr result = + pt_mapper.SuggestMapping(codec, /* excluder= */ nullptr); + if (!result.ok()) { + return result.MoveError(); + } + codec.id = result.value(); + AddDefaultFeedbackParams(&codec, trials); + return codec; +} + +// This function will add a associated RTX codec for a recognized primary codecs +// (VP8, VP9, AV1, H264, and RED). +RTCErrorOr AddRtx(const Codec& primary_codec, + PayloadTypePicker& pt_mapper) { + Codec rtx_codec = + webrtc::CreateVideoRtxCodec(Codec::kIdNotSet, primary_codec.id); + RTCErrorOr result = + pt_mapper.SuggestMapping(rtx_codec, /* excluder= */ nullptr); + if (!result.ok()) { + return result.MoveError(); + } + rtx_codec.id = result.value(); + return rtx_codec; +} - // Lower range gets used for "new" codecs or when running out of payload - // types in the upper range. - if (IsCodecValidForLowerRange(codec) || - payload_type_upper >= kLastDynamicPayloadTypeUpperRange) { - codec.id = payload_type_lower++; - } else { - codec.id = payload_type_upper++; +// TODO(kron): Perhaps it is better to move the implicit knowledge to the place +// where codecs are negotiated. +template +std::vector GetPayloadTypesAndDefaultCodecs( + const T* factory, + bool is_decoder_factory, + bool include_rtx, + const FieldTrialsView& trials) { + auto supported_formats = + GetDefaultSupportedFormats(factory, is_decoder_factory, trials); + + PayloadTypePicker pt_mapper; + std::unordered_set used_payload_types; + std::vector output_codecs; + for (const auto& supported_format : supported_formats) { + RTCErrorOr result = + AssignPayloadType(supported_format, pt_mapper, trials); + if (!result.ok()) { + // TODO: https://issues.webrtc.org/360058654 - stop assigning PTs here. + // TODO: https://issues.webrtc.org/360058654 - Handle running out of IDs. + continue; } - AddDefaultFeedbackParams(&codec, trials); - output_codecs.push_back(codec); - - // Add associated RTX codec for non-FEC codecs. + bool inserted = used_payload_types.insert(result.value().id).second; + if (!inserted) { + RTC_LOG(LS_WARNING) << "Factory produced duplicate codecs, ignoring " + << result.value() << " produced from " + << supported_format; + continue; + } + output_codecs.push_back(result.value()); if (include_rtx) { - if (!isFecCodec) { - // Check if we ran out of payload types. - if (payload_type_lower > kLastDynamicPayloadTypeLowerRange) { - // TODO(https://bugs.chromium.org/p/webrtc/issues/detail?id=12248): - // return an error. - RTC_LOG(LS_ERROR) << "Out of dynamic payload types [35,63] after " - "fallback from [96, 127], skipping the rest."; - RTC_DCHECK_EQ(payload_type_upper, kLastDynamicPayloadTypeUpperRange); - break; - } - if (IsCodecValidForLowerRange(codec) || - payload_type_upper >= kLastDynamicPayloadTypeUpperRange) { - output_codecs.push_back( - cricket::CreateVideoRtxCodec(payload_type_lower++, codec.id)); - } else { - output_codecs.push_back( - cricket::CreateVideoRtxCodec(payload_type_upper++, codec.id)); - } + Codec::ResiliencyType resiliency_type = + result.value().GetResiliencyType(); + // FEC codecs do not use retransmission. + if (resiliency_type == Codec::ResiliencyType::kFlexfec || + resiliency_type == Codec::ResiliencyType::kUlpfec) { + continue; + } + + RTCErrorOr rtx_result = AddRtx(result.value(), pt_mapper); + if (!rtx_result.ok()) { + // TODO: https://issues.webrtc.org/360058654 - stop assigning PTs here. + // TODO: https://issues.webrtc.org/360058654 - Handle running out of + // IDs. + continue; } + output_codecs.push_back(rtx_result.MoveValue()); } } return output_codecs; } -static std::string CodecVectorToString(const std::vector& codecs) { - rtc::StringBuilder out; +static std::string CodecVectorToString(const std::vector& codecs) { + StringBuilder out; out << "{"; for (size_t i = 0; i < codecs.size(); ++i) { out << codecs[i].ToString(); @@ -281,7 +296,7 @@ static std::string CodecVectorToString(const std::vector& codecs) { return out.Release(); } -static bool ValidateCodecFormats(const std::vector& codecs) { +static bool ValidateCodecFormats(const std::vector& codecs) { bool has_video = false; for (size_t i = 0; i < codecs.size(); ++i) { if (!codecs[i].ValidateCodecFormat()) { @@ -356,8 +371,11 @@ static bool ValidateStreamParams(const StreamParams& sp) { } } for (const auto& group : sp.ssrc_groups) { - if (group.semantics != kSimSsrcGroupSemantics) + if (!(group.semantics == kFidSsrcGroupSemantics || + group.semantics == kSimSsrcGroupSemantics || + group.semantics == kFecFrSsrcGroupSemantics)) { continue; + } for (uint32_t group_ssrc : group.ssrcs) { auto it = absl::c_find_if(sp.ssrcs, [&group_ssrc](uint32_t ssrc) { return ssrc == group_ssrc; @@ -365,7 +383,7 @@ static bool ValidateStreamParams(const StreamParams& sp) { if (it == sp.ssrcs.end()) { RTC_LOG(LS_ERROR) << "SSRC '" << group_ssrc << "' missing from StreamParams ssrcs with semantics " - << kSimSsrcGroupSemantics << ": " << sp.ToString(); + << group.semantics << ": " << sp.ToString(); return false; } } @@ -375,7 +393,7 @@ static bool ValidateStreamParams(const StreamParams& sp) { // Returns true if the given codec is disallowed from doing simulcast. bool IsCodecDisabledForSimulcast(bool legacy_scalability_mode, - webrtc::VideoCodecType codec_type) { + VideoCodecType codec_type) { if (legacy_scalability_mode && (codec_type == webrtc::kVideoCodecVP9 || codec_type == webrtc::kVideoCodecAV1)) { return true; @@ -384,13 +402,13 @@ bool IsCodecDisabledForSimulcast(bool legacy_scalability_mode, return false; } -bool IsLayerActive(const webrtc::RtpEncodingParameters& layer) { +bool IsLayerActive(const RtpEncodingParameters& layer) { return layer.active && (!layer.max_bitrate_bps || *layer.max_bitrate_bps > 0) && (!layer.max_framerate || *layer.max_framerate > 0); } -int NumActiveStreams(const webrtc::RtpParameters& rtp_parameters) { +int NumActiveStreams(const RtpParameters& rtp_parameters) { int res = 0; for (size_t i = 0; i < rtp_parameters.encodings.size(); ++i) { if (rtp_parameters.encodings[i].active) { @@ -400,35 +418,34 @@ int NumActiveStreams(const webrtc::RtpParameters& rtp_parameters) { return res; } -absl::optional NumSpatialLayersFromEncoding( - const webrtc::RtpParameters& rtp_parameters, +std::optional NumSpatialLayersFromEncoding( + const RtpParameters& rtp_parameters, size_t idx) { if (idx >= rtp_parameters.encodings.size()) - return absl::nullopt; + return std::nullopt; - absl::optional scalability_mode = + std::optional scalability_mode = webrtc::ScalabilityModeFromString( rtp_parameters.encodings[idx].scalability_mode.value_or("")); return scalability_mode - ? absl::optional( + ? std::optional( ScalabilityModeToNumSpatialLayers(*scalability_mode)) - : absl::nullopt; + : std::nullopt; } -std::map +std::map MergeInfoAboutOutboundRtpSubstreams( - const std::map& - substreams) { - std::map rtp_substreams; + const std::map& substreams) { + std::map rtp_substreams; // Add substreams for all RTP media streams. for (const auto& pair : substreams) { uint32_t ssrc = pair.first; - const webrtc::VideoSendStream::StreamStats& substream = pair.second; + const VideoSendStream::StreamStats& substream = pair.second; switch (substream.type) { - case webrtc::VideoSendStream::StreamStats::StreamType::kMedia: + case VideoSendStream::StreamStats::StreamType::kMedia: break; - case webrtc::VideoSendStream::StreamStats::StreamType::kRtx: - case webrtc::VideoSendStream::StreamStats::StreamType::kFlexfec: + case VideoSendStream::StreamStats::StreamType::kRtx: + case VideoSendStream::StreamStats::StreamType::kFlexfec: continue; } rtp_substreams.insert(std::make_pair(ssrc, substream)); @@ -437,16 +454,15 @@ MergeInfoAboutOutboundRtpSubstreams( // substream stats. for (const auto& pair : substreams) { switch (pair.second.type) { - case webrtc::VideoSendStream::StreamStats::StreamType::kMedia: + case VideoSendStream::StreamStats::StreamType::kMedia: continue; - case webrtc::VideoSendStream::StreamStats::StreamType::kRtx: - case webrtc::VideoSendStream::StreamStats::StreamType::kFlexfec: + case VideoSendStream::StreamStats::StreamType::kRtx: + case VideoSendStream::StreamStats::StreamType::kFlexfec: break; } // The associated substream is an RTX or FlexFEC substream that is // referencing an RTP media substream. - const webrtc::VideoSendStream::StreamStats& associated_substream = - pair.second; + const VideoSendStream::StreamStats& associated_substream = pair.second; RTC_DCHECK(associated_substream.referenced_media_ssrc.has_value()); uint32_t media_ssrc = associated_substream.referenced_media_ssrc.value(); if (substreams.find(media_ssrc) == substreams.end()) { @@ -457,8 +473,7 @@ MergeInfoAboutOutboundRtpSubstreams( << "RTP stats."; continue; } - webrtc::VideoSendStream::StreamStats& rtp_substream = - rtp_substreams[media_ssrc]; + VideoSendStream::StreamStats& rtp_substream = rtp_substreams[media_ssrc]; // We only merge `rtp_stats`. All other metrics are not applicable for RTX // and FlexFEC. @@ -470,8 +485,8 @@ MergeInfoAboutOutboundRtpSubstreams( } bool IsActiveFromEncodings( - absl::optional ssrc, - const std::vector& encodings) { + std::optional ssrc, + const std::vector& encodings) { if (ssrc.has_value()) { // Report the `active` value of a specific ssrc, or false if an encoding // with this ssrc does not exist. @@ -491,12 +506,11 @@ bool IsActiveFromEncodings( return false; } -bool IsScalabilityModeSupportedByCodec( - const VideoCodec& codec, - const std::string& scalability_mode, - const webrtc::VideoSendStream::Config& config) { +bool IsScalabilityModeSupportedByCodec(const Codec& codec, + const std::string& scalability_mode, + const VideoSendStream::Config& config) { return config.encoder_settings.encoder_factory - ->QueryCodecSupport(webrtc::SdpVideoFormat(codec.name, codec.params), + ->QueryCodecSupport(SdpVideoFormat(codec.name, codec.params), scalability_mode) .is_supported; } @@ -504,9 +518,9 @@ bool IsScalabilityModeSupportedByCodec( // Fallback to default value if the scalability mode is unset or unsupported by // the codec. void FallbackToDefaultScalabilityModeIfNotSupported( - const VideoCodec& codec, - const webrtc::VideoSendStream::Config& config, - std::vector& encodings) { + const Codec& codec, + const VideoSendStream::Config& config, + std::vector& encodings) { if (!absl::c_any_of(encodings, [](const webrtc::RtpEncodingParameters& encoding) { return encoding.scalability_mode && @@ -527,10 +541,17 @@ void FallbackToDefaultScalabilityModeIfNotSupported( // scalability mode of the first encoding when the others are inactive. continue; } + if (!encoding.scalability_mode.has_value() || !IsScalabilityModeSupportedByCodec(codec, *encoding.scalability_mode, config)) { - encoding.scalability_mode = webrtc::kDefaultScalabilityModeStr; + encoding.scalability_mode = + (encoding.scalability_mode != + std::string(webrtc::kDefaultScalabilityModeStr) && + IsScalabilityModeSupportedByCodec( + codec, webrtc::kDefaultScalabilityModeStr, config)) + ? webrtc::kDefaultScalabilityModeStr + : webrtc::kNoLayeringScalabilityModeStr; RTC_LOG(LS_INFO) << " -> " << *encoding.scalability_mode; } } @@ -540,29 +561,39 @@ void FallbackToDefaultScalabilityModeIfNotSupported( // "codecs". Note that VideoCodecSettings correspond to concrete codecs like // VP8, VP9, H264 while VideoCodecs correspond also to "virtual" codecs like // RTX, ULPFEC, FLEXFEC. -std::vector MapCodecs( - const std::vector& codecs) { +RTCErrorOr> MapCodecs( + const std::vector& codecs) { + std::vector video_codecs; if (codecs.empty()) { - return {}; + return video_codecs; } - std::vector video_codecs; std::map payload_codec_type; // `rtx_mapping` maps video payload type to rtx payload type. std::map rtx_mapping; std::map rtx_time_mapping; + std::map defined_codecs; - webrtc::UlpfecConfig ulpfec_config; - absl::optional flexfec_payload_type; + UlpfecConfig ulpfec_config; + std::optional flexfec_payload_type; - for (const VideoCodec& in_codec : codecs) { + for (const webrtc::Codec& in_codec : codecs) { const int payload_type = in_codec.id; if (payload_codec_type.find(payload_type) != payload_codec_type.end()) { - RTC_LOG(LS_ERROR) << "Payload type already registered: " - << in_codec.ToString(); - return {}; + if (webrtc::MatchesWithCodecRules(defined_codecs.at(in_codec.id), + in_codec)) { + // Ignore second occurence of the same codec. + // This can happen with multiple H.264 profiles. + continue; + } + RTC_LOG(LS_ERROR) << "Duplicate codec ID, rejecting " << in_codec + << " because " << defined_codecs.at(in_codec.id) + << " is earlier in the list."; + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Duplicate codec ID with non-matching codecs"); } + defined_codecs.insert({in_codec.id, in_codec}); payload_codec_type[payload_type] = in_codec.GetResiliencyType(); switch (in_codec.GetResiliencyType()) { @@ -610,7 +641,8 @@ std::vector MapCodecs( RTC_LOG(LS_ERROR) << "RTX codec with invalid or no associated payload type: " << in_codec.ToString(); - return {}; + return RTCError(RTCErrorType::INVALID_PARAMETER, + "RTX codec with invalid APT"); } int rtx_time; if (in_codec.GetParam(kCodecParamRtxTime, &rtx_time) && rtx_time > 0) { @@ -639,7 +671,8 @@ std::vector MapCodecs( RTC_LOG(LS_ERROR) << "RTX codec (PT=" << rtx_payload_type << ") mapped to PT=" << associated_payload_type << " which is not in the codec list."; - return {}; + return RTCError(RTCErrorType::INVALID_PARAMETER, + "RTX codec with unlisted APT"); } const Codec::ResiliencyType associated_codec_type = it->second; if (associated_codec_type != Codec::ResiliencyType::kNone && @@ -648,7 +681,8 @@ std::vector MapCodecs( << "RTX PT=" << rtx_payload_type << " not mapped to regular video codec or RED codec (PT=" << associated_payload_type << ")."; - return {}; + return RTCError(RTCErrorType::INVALID_PARAMETER, + "RTX codec with APT not video"); } if (associated_payload_type == ulpfec_config.red_payload_type) { @@ -706,7 +740,7 @@ bool NonFlexfecReceiveCodecsHaveChanged(std::vector before, std::string CodecSettingsVectorToString( const std::vector& codecs) { - rtc::StringBuilder out; + StringBuilder out; out << "{"; for (size_t i = 0; i < codecs.size(); ++i) { out << codecs[i].codec.ToString(); @@ -719,10 +753,10 @@ std::string CodecSettingsVectorToString( } void ExtractCodecInformation( - rtc::ArrayView recv_codecs, + ArrayView recv_codecs, std::map& rtx_associated_payload_types, std::set& raw_payload_types, - std::vector& decoders) { + std::vector& decoders) { RTC_DCHECK(!recv_codecs.empty()); RTC_DCHECK(rtx_associated_payload_types.empty()); RTC_DCHECK(raw_payload_types.empty()); @@ -730,7 +764,7 @@ void ExtractCodecInformation( for (const VideoCodecSettings& recv_codec : recv_codecs) { decoders.emplace_back( - webrtc::SdpVideoFormat(recv_codec.codec.name, recv_codec.codec.params), + SdpVideoFormat(recv_codec.codec.name, recv_codec.codec.params), recv_codec.codec.id); rtx_associated_payload_types.emplace(recv_codec.rtx_payload_type, recv_codec.codec.id); @@ -740,13 +774,64 @@ void ExtractCodecInformation( } } +int ParseReceiveBufferSize(const FieldTrialsView& trials) { + FieldTrialParameter size_bytes("size_bytes", kVideoRtpRecvBufferSize); + webrtc::ParseFieldTrial({&size_bytes}, + trials.Lookup("WebRTC-ReceiveBufferSize")); + if (size_bytes.Get() < 10'000 || size_bytes.Get() > 10'000'000) { + RTC_LOG(LS_WARNING) << "WebRTC-ReceiveBufferSize out of bounds: " + << size_bytes.Get(); + return kVideoRtpRecvBufferSize; + } + return size_bytes.Get(); +} + +RTCError ResolveSendCodecs( + const VideoCodecSettings& current_codec, + const std::vector& current_codecs, + const std::vector& encodings, + const std::vector negotiated_codecs, + std::vector* resolved_codecs) { + RTC_DCHECK(resolved_codecs); + resolved_codecs->clear(); + for (size_t i = 0; i < encodings.size(); i++) { + const std::optional& requested_codec = encodings[i].codec; + std::optional found_codec; + if (!requested_codec) { + found_codec = current_codec; + } else if (i < current_codecs.size()) { + const VideoCodecSettings& codec = current_codecs[i]; + if (IsSameRtpCodecIgnoringLevel(codec.codec, *requested_codec)) { + found_codec = codec; + } + } + if (!found_codec) { + RTC_DCHECK(requested_codec); + auto matched_codec = + absl::c_find_if(negotiated_codecs, [&](auto negotiated_codec) { + return IsSameRtpCodecIgnoringLevel(negotiated_codec.codec, + *requested_codec); + }); + if (matched_codec == negotiated_codecs.end()) { + return RTCError(RTCErrorType::INVALID_MODIFICATION, + "Attempted to use an unsupported codec for layer " + + std::to_string(i)); + } + found_codec = *matched_codec; + } + RTC_DCHECK(found_codec); + resolved_codecs->push_back(*found_codec); + } + return RTCError::OK(); +} + } // namespace // --------------- WebRtcVideoEngine --------------------------- WebRtcVideoEngine::WebRtcVideoEngine( - std::unique_ptr video_encoder_factory, - std::unique_ptr video_decoder_factory, - const webrtc::FieldTrialsView& trials) + std::unique_ptr video_encoder_factory, + std::unique_ptr video_decoder_factory, + const FieldTrialsView& trials) : decoder_factory_(std::move(video_decoder_factory)), encoder_factory_(std::move(video_encoder_factory)), trials_(trials) { @@ -759,40 +844,41 @@ WebRtcVideoEngine::~WebRtcVideoEngine() { std::unique_ptr WebRtcVideoEngine::CreateSendChannel( - webrtc::Call* call, + Call* call, const MediaConfig& config, const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) { + const CryptoOptions& crypto_options, + VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) { return std::make_unique( call, config, options, crypto_options, encoder_factory_.get(), decoder_factory_.get(), video_bitrate_allocator_factory); } std::unique_ptr -WebRtcVideoEngine::CreateReceiveChannel( - webrtc::Call* call, - const MediaConfig& config, - const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options) { +WebRtcVideoEngine::CreateReceiveChannel(Call* call, + const MediaConfig& config, + const VideoOptions& options, + const CryptoOptions& crypto_options) { return std::make_unique( call, config, options, crypto_options, decoder_factory_.get()); } -std::vector WebRtcVideoEngine::send_codecs(bool include_rtx) const { +std::vector WebRtcVideoEngine::LegacySendCodecs(bool include_rtx) const { return GetPayloadTypesAndDefaultCodecs(encoder_factory_.get(), /*is_decoder_factory=*/false, include_rtx, trials_); } -std::vector WebRtcVideoEngine::recv_codecs(bool include_rtx) const { +std::vector WebRtcVideoEngine::LegacyRecvCodecs(bool include_rtx) const { return GetPayloadTypesAndDefaultCodecs(decoder_factory_.get(), /*is_decoder_factory=*/true, include_rtx, trials_); } -std::vector +std::vector WebRtcVideoEngine::GetRtpHeaderExtensions() const { - std::vector result; + std::vector result; + // id is *not* incremented for non-default extensions, UsedIds needs to + // resolve conflicts. int id = 1; for (const auto& uri : {webrtc::RtpExtension::kTimestampOffsetUri, @@ -804,52 +890,53 @@ WebRtcVideoEngine::GetRtpHeaderExtensions() const { webrtc::RtpExtension::kVideoTimingUri, webrtc::RtpExtension::kColorSpaceUri, webrtc::RtpExtension::kMidUri, webrtc::RtpExtension::kRidUri, webrtc::RtpExtension::kRepairedRidUri}) { - result.emplace_back(uri, id++, webrtc::RtpTransceiverDirection::kSendRecv); + result.emplace_back(uri, id++, RtpTransceiverDirection::kSendRecv); } + result.emplace_back(RtpExtension::kCorruptionDetectionUri, id++, + /*preferred_encrypt=*/true, + RtpTransceiverDirection::kStopped); for (const auto& uri : {webrtc::RtpExtension::kAbsoluteCaptureTimeUri}) { - result.emplace_back(uri, id++, webrtc::RtpTransceiverDirection::kStopped); + result.emplace_back(uri, id, RtpTransceiverDirection::kStopped); } - result.emplace_back(webrtc::RtpExtension::kGenericFrameDescriptorUri00, id++, + result.emplace_back(RtpExtension::kGenericFrameDescriptorUri00, id, IsEnabled(trials_, "WebRTC-GenericDescriptorAdvertised") - ? webrtc::RtpTransceiverDirection::kSendRecv - : webrtc::RtpTransceiverDirection::kStopped); + ? RtpTransceiverDirection::kSendRecv + : RtpTransceiverDirection::kStopped); result.emplace_back( - webrtc::RtpExtension::kDependencyDescriptorUri, id++, + RtpExtension::kDependencyDescriptorUri, id, IsEnabled(trials_, "WebRTC-DependencyDescriptorAdvertised") - ? webrtc::RtpTransceiverDirection::kSendRecv - : webrtc::RtpTransceiverDirection::kStopped); - + ? RtpTransceiverDirection::kSendRecv + : RtpTransceiverDirection::kStopped); result.emplace_back( - webrtc::RtpExtension::kVideoLayersAllocationUri, id++, + RtpExtension::kVideoLayersAllocationUri, id, IsEnabled(trials_, "WebRTC-VideoLayersAllocationAdvertised") - ? webrtc::RtpTransceiverDirection::kSendRecv - : webrtc::RtpTransceiverDirection::kStopped); + ? RtpTransceiverDirection::kSendRecv + : RtpTransceiverDirection::kStopped); // VideoFrameTrackingId is a test-only extension. if (IsEnabled(trials_, "WebRTC-VideoFrameTrackingIdAdvertised")) { - result.emplace_back(webrtc::RtpExtension::kVideoFrameTrackingIdUri, id++, - webrtc::RtpTransceiverDirection::kSendRecv); + result.emplace_back(RtpExtension::kVideoFrameTrackingIdUri, id, + RtpTransceiverDirection::kSendRecv); } return result; } // Free function, exported for testing -std::map +std::map MergeInfoAboutOutboundRtpSubstreamsForTesting( - const std::map& - substreams) { + const std::map& substreams) { return MergeInfoAboutOutboundRtpSubstreams(substreams); } // --------------- WebRtcVideoSendChannel ---------------------- WebRtcVideoSendChannel::WebRtcVideoSendChannel( - webrtc::Call* call, + Call* call, const MediaConfig& config, const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::VideoEncoderFactory* encoder_factory, - webrtc::VideoDecoderFactory* decoder_factory, - webrtc::VideoBitrateAllocatorFactory* bitrate_allocator_factory) + const CryptoOptions& crypto_options, + VideoEncoderFactory* encoder_factory, + VideoDecoderFactory* decoder_factory, + VideoBitrateAllocatorFactory* bitrate_allocator_factory) : MediaChannelUtil(call->network_thread(), config.enable_dscp), worker_thread_(call->worker_thread()), sending_(false), @@ -869,9 +956,11 @@ WebRtcVideoSendChannel::WebRtcVideoSendChannel( crypto_options_(crypto_options) { RTC_DCHECK_RUN_ON(&thread_checker_); rtcp_receiver_report_ssrc_ = kDefaultRtcpReceiverReportSsrc; + // Crash if MapCodecs fails. recv_codecs_ = MapCodecs(GetPayloadTypesAndDefaultCodecs( - decoder_factory_, /*is_decoder_factory=*/true, - /*include_rtx=*/true, call_->trials())); + decoder_factory_, /*is_decoder_factory=*/true, + /*include_rtx=*/true, call_->trials())) + .value(); recv_flexfec_payload_type_ = recv_codecs_.empty() ? 0 : recv_codecs_.front().flexfec_payload_type; } @@ -881,9 +970,9 @@ WebRtcVideoSendChannel::~WebRtcVideoSendChannel() { delete kv.second; } -rtc::scoped_refptr +scoped_refptr WebRtcVideoSendChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( - const VideoCodec& codec) { + const Codec& codec) { RTC_DCHECK_RUN_ON(&thread_checker_); bool is_screencast = parameters_.options.is_screencast.value_or(false); // No automatic resizing when using simulcast or screencast, or when @@ -906,17 +995,15 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( return nullptr; } if (absl::EqualsIgnoreCase(codec.name, kVp8CodecName)) { - webrtc::VideoCodecVP8 vp8_settings = - webrtc::VideoEncoder::GetDefaultVp8Settings(); + VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); vp8_settings.automaticResizeOn = automatic_resize; // VP8 denoising is enabled by default. vp8_settings.denoisingOn = codec_default_denoising ? true : denoising; - return rtc::make_ref_counted< - webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); + return make_ref_counted( + vp8_settings); } if (absl::EqualsIgnoreCase(codec.name, kVp9CodecName)) { - webrtc::VideoCodecVP9 vp9_settings = - webrtc::VideoEncoder::GetDefaultVp9Settings(); + VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); vp9_settings.numberOfSpatialLayers = std::min( parameters_.config.rtp.ssrcs.size(), kConferenceMaxNumSpatialLayers); @@ -930,46 +1017,55 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( vp9_settings.denoisingOn = codec_default_denoising ? true : denoising; // Disable automatic resize if more than one spatial layer is requested. bool vp9_automatic_resize = automatic_resize; - absl::optional num_spatial_layers = + std::optional num_spatial_layers = NumSpatialLayersFromEncoding(rtp_parameters_, /*idx=*/0); if (num_spatial_layers && *num_spatial_layers > 1) { vp9_automatic_resize = false; } vp9_settings.automaticResizeOn = vp9_automatic_resize; if (!is_screencast) { - webrtc::FieldTrialFlag interlayer_pred_experiment_enabled("Enabled"); - webrtc::FieldTrialEnum inter_layer_pred_mode( - "inter_layer_pred_mode", webrtc::InterLayerPredMode::kOnKeyPic, - {{"off", webrtc::InterLayerPredMode::kOff}, - {"on", webrtc::InterLayerPredMode::kOn}, - {"onkeypic", webrtc::InterLayerPredMode::kOnKeyPic}}); - webrtc::FieldTrialFlag force_flexible_mode("FlexibleMode"); + FieldTrialFlag interlayer_pred_experiment_enabled("Enabled"); + FieldTrialEnum inter_layer_pred_mode( + "inter_layer_pred_mode", InterLayerPredMode::kOnKeyPic, + {{"off", InterLayerPredMode::kOff}, + {"on", InterLayerPredMode::kOn}, + {"onkeypic", InterLayerPredMode::kOnKeyPic}}); webrtc::ParseFieldTrial( - {&interlayer_pred_experiment_enabled, &inter_layer_pred_mode, - &force_flexible_mode}, + {&interlayer_pred_experiment_enabled, &inter_layer_pred_mode}, call_->trials().Lookup("WebRTC-Vp9InterLayerPred")); if (interlayer_pred_experiment_enabled) { vp9_settings.interLayerPred = inter_layer_pred_mode; } else { // Limit inter-layer prediction to key pictures by default. - vp9_settings.interLayerPred = webrtc::InterLayerPredMode::kOnKeyPic; + vp9_settings.interLayerPred = InterLayerPredMode::kOnKeyPic; } - vp9_settings.flexibleMode = force_flexible_mode.Get(); + + // TODO(webrtc:329396373): Remove after flexible mode is fully deployed. + vp9_settings.flexibleMode = + !IsDisabled(call_->trials(), "WebRTC-Video-Vp9FlexibleMode"); } else { // Multiple spatial layers vp9 screenshare needs flexible mode. vp9_settings.flexibleMode = vp9_settings.numberOfSpatialLayers > 1; - vp9_settings.interLayerPred = webrtc::InterLayerPredMode::kOn; + vp9_settings.interLayerPred = InterLayerPredMode::kOn; } - return rtc::make_ref_counted< - webrtc::VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); + return make_ref_counted( + vp9_settings); + } + if (absl::EqualsIgnoreCase(codec.name, kAv1CodecName)) { + VideoCodecAV1 av1_settings = {.automatic_resize_on = automatic_resize}; + if (NumSpatialLayersFromEncoding(rtp_parameters_, /*idx=*/0) > 1) { + av1_settings.automatic_resize_on = false; + } + return make_ref_counted( + av1_settings); } return nullptr; } std::vector WebRtcVideoSendChannel::SelectSendVideoCodecs( const std::vector& remote_mapped_codecs) const { - std::vector sdp_formats = + std::vector sdp_formats = encoder_factory_ ? encoder_factory_->GetImplementations() - : std::vector(); + : std::vector(); // The returned vector holds the VideoCodecSettings in term of preference. // They are orderd by receive codec preference first and local implementation @@ -983,6 +1079,8 @@ std::vector WebRtcVideoSendChannel::SelectSendVideoCodecs( // following the spec in https://tools.ietf.org/html/rfc6184#section-8.2.2 // since we should limit the encode level to the lower of local and remote // level when level asymmetry is not allowed. + // For H.265, the level asymmetry is implicitly allowed. We need to make + // sure the encode level is set to the remote offered level. if (format_it->IsSameCodec( {remote_codec.codec.name, remote_codec.codec.params})) { encoders.push_back(remote_codec); @@ -1011,13 +1109,20 @@ bool WebRtcVideoSendChannel::GetChangedSenderParameters( return false; } + auto result = MapCodecs(params.codecs); + if (!result.ok()) { + RTC_LOG(LS_ERROR) << "Failure in codec list, error = " << result.error(); + return false; + } + std::vector mapped_codecs = result.value(); + std::vector negotiated_codecs = - SelectSendVideoCodecs(MapCodecs(params.codecs)); + SelectSendVideoCodecs(mapped_codecs); - // We should only fail here if send direction is enabled. if (params.is_stream_active && negotiated_codecs.empty()) { - RTC_LOG(LS_ERROR) << "No video codecs supported."; - return false; + // This is not a failure but will lead to the answer being rejected. + RTC_LOG(LS_ERROR) << "No video codecs in common."; + return true; } // Never enable sending FlexFEC, unless we are in the experiment. @@ -1026,7 +1131,7 @@ bool WebRtcVideoSendChannel::GetChangedSenderParameters( codec.flexfec_payload_type = -1; } - absl::optional force_codec; + std::optional force_codec; if (!send_streams_.empty()) { // Since we do not support mixed-codec simulcast yet, // all send streams must have the same codec value. @@ -1034,8 +1139,8 @@ bool WebRtcVideoSendChannel::GetChangedSenderParameters( if (rtp_parameters.encodings[0].codec) { auto matched_codec = absl::c_find_if(negotiated_codecs, [&](auto negotiated_codec) { - return negotiated_codec.codec.MatchesRtpCodec( - *rtp_parameters.encodings[0].codec); + return IsSameRtpCodec(negotiated_codec.codec, + *rtp_parameters.encodings[0].codec); }); if (matched_codec != negotiated_codecs.end()) { force_codec = *matched_codec; @@ -1053,25 +1158,59 @@ bool WebRtcVideoSendChannel::GetChangedSenderParameters( if (negotiated_codecs_ != negotiated_codecs) { if (negotiated_codecs.empty()) { - changed_params->send_codec = absl::nullopt; + changed_params->send_codec = std::nullopt; } else if (force_codec) { changed_params->send_codec = force_codec; } else if (send_codec() != negotiated_codecs.front()) { changed_params->send_codec = negotiated_codecs.front(); } - changed_params->negotiated_codecs = std::move(negotiated_codecs); + changed_params->negotiated_codecs = negotiated_codecs; + } + + // For mixed-codec simulcast + std::vector send_codecs; + if (!send_streams_.empty() && !negotiated_codecs.empty()) { + bool needs_update = false; + auto rtp_parameters = send_streams_.begin()->second->GetRtpParameters(); + for (auto& encoding : rtp_parameters.encodings) { + if (encoding.codec) { + auto matched_codec = + absl::c_find_if(negotiated_codecs, [&](auto negotiated_codec) { + return IsSameRtpCodec(negotiated_codec.codec, *encoding.codec); + }); + if (matched_codec != negotiated_codecs.end()) { + send_codecs.push_back(*matched_codec); + } else { + // The requested codec has been negotiated away, we clear it from the + // parameters. + encoding.codec.reset(); + needs_update = true; + send_codecs.push_back(negotiated_codecs.front()); + } + } else { + send_codecs.push_back(negotiated_codecs.front()); + } + } + + if (needs_update) { + send_streams_.begin()->second->SetRtpParameters(rtp_parameters, nullptr); + } + } + + if (send_codecs_ != send_codecs) { + changed_params->send_codecs = send_codecs; } // Handle RTP header extensions. if (params.extmap_allow_mixed != ExtmapAllowMixed()) { changed_params->extmap_allow_mixed = params.extmap_allow_mixed; } - std::vector filtered_extensions = FilterRtpExtensions( - params.extensions, webrtc::RtpExtension::IsSupportedForVideo, true, - call_->trials()); + std::vector filtered_extensions = + FilterRtpExtensions(params.extensions, RtpExtension::IsSupportedForVideo, + true, call_->trials()); if (send_rtp_extensions_ != filtered_extensions) { changed_params->rtp_header_extensions = - absl::optional>(filtered_extensions); + std::optional>(filtered_extensions); } if (params.mid != send_params_.mid) { @@ -1095,9 +1234,8 @@ bool WebRtcVideoSendChannel::GetChangedSenderParameters( // Handle RTCP mode. if (params.rtcp.reduced_size != send_params_.rtcp.reduced_size) { - changed_params->rtcp_mode = params.rtcp.reduced_size - ? webrtc::RtcpMode::kReducedSize - : webrtc::RtcpMode::kCompound; + changed_params->rtcp_mode = + params.rtcp.reduced_size ? RtcpMode::kReducedSize : RtcpMode::kCompound; } return true; @@ -1142,9 +1280,8 @@ void WebRtcVideoSendChannel::RequestEncoderFallback() { ApplyChangedParams(params); } -void WebRtcVideoSendChannel::RequestEncoderSwitch( - const webrtc::SdpVideoFormat& format, - bool allow_default_fallback) { +void WebRtcVideoSendChannel::RequestEncoderSwitch(const SdpVideoFormat& format, + bool allow_default_fallback) { if (!worker_thread_->IsCurrent()) { worker_thread_->PostTask( SafeTask(task_safety_.flag(), [this, format, allow_default_fallback] { @@ -1193,6 +1330,12 @@ bool WebRtcVideoSendChannel::ApplyChangedParams( if (changed_params.send_codec) send_codec() = changed_params.send_codec; + if (changed_params.send_codecs) { + send_codecs_ = *changed_params.send_codecs; + } else { + send_codecs_.clear(); + } + if (changed_params.extmap_allow_mixed) { SetExtmapAllowMixed(*changed_params.extmap_allow_mixed); } @@ -1250,7 +1393,7 @@ bool WebRtcVideoSendChannel::ApplyChangedParams( return true; } -webrtc::RtpParameters WebRtcVideoSendChannel::GetRtpSendParameters( +RtpParameters WebRtcVideoSendChannel::GetRtpSendParameters( uint32_t ssrc) const { RTC_DCHECK_RUN_ON(&thread_checker_); auto it = send_streams_.find(ssrc); @@ -1258,13 +1401,13 @@ webrtc::RtpParameters WebRtcVideoSendChannel::GetRtpSendParameters( RTC_LOG(LS_WARNING) << "Attempting to get RTP send parameters for stream " "with ssrc " << ssrc << " which doesn't exist."; - return webrtc::RtpParameters(); + return RtpParameters(); } - webrtc::RtpParameters rtp_params = it->second->GetRtpParameters(); + RtpParameters rtp_params = it->second->GetRtpParameters(); // Need to add the common list of codecs to the send stream-specific // RTP parameters. - for (const VideoCodec& codec : send_params_.codecs) { + for (const webrtc::Codec& codec : send_params_.codecs) { if (send_codec() && send_codec()->codec.id == codec.id) { // Put the current send codec to the front of the codecs list. RTC_DCHECK_EQ(codec.name, send_codec()->codec.name); @@ -1278,10 +1421,10 @@ webrtc::RtpParameters WebRtcVideoSendChannel::GetRtpSendParameters( return rtp_params; } -webrtc::RTCError WebRtcVideoSendChannel::SetRtpSendParameters( +RTCError WebRtcVideoSendChannel::SetRtpSendParameters( uint32_t ssrc, - const webrtc::RtpParameters& parameters, - webrtc::SetParametersCallback callback) { + const RtpParameters& parameters, + SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&thread_checker_); TRACE_EVENT0("webrtc", "WebRtcVideoSendChannel::SetRtpSendParameters"); auto it = send_streams_.find(ssrc); @@ -1290,17 +1433,17 @@ webrtc::RTCError WebRtcVideoSendChannel::SetRtpSendParameters( "with ssrc " << ssrc << " which doesn't exist."; return webrtc::InvokeSetParametersCallback( - callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); + callback, RTCError(RTCErrorType::INTERNAL_ERROR)); } // TODO(deadbeef): Handle setting parameters with a list of codecs in a // different order (which should change the send codec). - webrtc::RtpParameters current_parameters = GetRtpSendParameters(ssrc); + RtpParameters current_parameters = GetRtpSendParameters(ssrc); if (current_parameters.codecs != parameters.codecs) { RTC_DLOG(LS_ERROR) << "Using SetParameters to change the set of codecs " "is not currently supported."; return webrtc::InvokeSetParametersCallback( - callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); + callback, RTCError(RTCErrorType::INTERNAL_ERROR)); } if (!parameters.encodings.empty()) { @@ -1308,37 +1451,41 @@ webrtc::RTCError WebRtcVideoSendChannel::SetRtpSendParameters( // https://tools.ietf.org/html/draft-ietf-tsvwg-rtcweb-qos-16#section-5 // TODO(deadbeef): Change values depending on whether we are sending a // keyframe or non-keyframe. - rtc::DiffServCodePoint new_dscp = rtc::DSCP_DEFAULT; + DiffServCodePoint new_dscp = webrtc::DSCP_DEFAULT; switch (parameters.encodings[0].network_priority) { - case webrtc::Priority::kVeryLow: - new_dscp = rtc::DSCP_CS1; + case Priority::kVeryLow: + new_dscp = webrtc::DSCP_CS1; break; - case webrtc::Priority::kLow: - new_dscp = rtc::DSCP_DEFAULT; + case Priority::kLow: + new_dscp = webrtc::DSCP_DEFAULT; break; - case webrtc::Priority::kMedium: - new_dscp = rtc::DSCP_AF42; + case Priority::kMedium: + new_dscp = webrtc::DSCP_AF42; break; - case webrtc::Priority::kHigh: - new_dscp = rtc::DSCP_AF41; + case Priority::kHigh: + new_dscp = webrtc::DSCP_AF41; break; } - // TODO(orphis): Support mixed-codec simulcast - if (parameters.encodings[0].codec && send_codec_ && - !send_codec_->codec.MatchesRtpCodec(*parameters.encodings[0].codec)) { - RTC_LOG(LS_ERROR) << "Trying to change codec to " - << parameters.encodings[0].codec->name; - auto matched_codec = - absl::c_find_if(negotiated_codecs_, [&](auto negotiated_codec) { - return negotiated_codec.codec.MatchesRtpCodec( - *parameters.encodings[0].codec); - }); - RTC_CHECK(matched_codec != negotiated_codecs_.end()); + if (send_codec_ && + std::any_of(parameters.encodings.begin(), parameters.encodings.end(), + [](const auto& e) { return e.codec; })) { + std::vector send_codecs; + auto error = + ResolveSendCodecs(*send_codec_, send_codecs_, parameters.encodings, + negotiated_codecs_, &send_codecs); + if (!error.ok()) { + return webrtc::InvokeSetParametersCallback(callback, error); + } - ChangedSenderParameters params; - params.send_codec = *matched_codec; - ApplyChangedParams(params); + if (send_codecs_ != send_codecs) { + ChangedSenderParameters params; + if (!send_codecs.empty()) { + params.send_codec = send_codecs[0]; + } + params.send_codecs = send_codecs; + ApplyChangedParams(params); + } } SetPreferredDscp(new_dscp); @@ -1346,11 +1493,11 @@ webrtc::RTCError WebRtcVideoSendChannel::SetRtpSendParameters( return it->second->SetRtpParameters(parameters, std::move(callback)); } -absl::optional WebRtcVideoSendChannel::GetSendCodec() const { +std::optional WebRtcVideoSendChannel::GetSendCodec() const { RTC_DCHECK_RUN_ON(&thread_checker_); if (!send_codec()) { RTC_LOG(LS_VERBOSE) << "GetSendCodec: No send codec set."; - return absl::nullopt; + return std::nullopt; } return send_codec()->codec; } @@ -1373,7 +1520,7 @@ bool WebRtcVideoSendChannel::SetSend(bool send) { bool WebRtcVideoSendChannel::SetVideoSend( uint32_t ssrc, const VideoOptions* options, - rtc::VideoSourceInterface* source) { + VideoSourceInterface* source) { RTC_DCHECK_RUN_ON(&thread_checker_); TRACE_EVENT0("webrtc", "SetVideoSend"); RTC_DCHECK(ssrc != 0); @@ -1416,7 +1563,7 @@ bool WebRtcVideoSendChannel::AddSendStream(const StreamParams& sp) { for (uint32_t used_ssrc : sp.ssrcs) send_ssrcs_.insert(used_ssrc); - webrtc::VideoSendStream::Config config(transport()); + VideoSendStream::Config config(transport()); for (const RidDescription& rid : sp.rids()) { config.rtp.rids.push_back(rid.rid); @@ -1431,6 +1578,7 @@ bool WebRtcVideoSendChannel::AddSendStream(const StreamParams& sp) { config.encoder_settings.bitrate_allocator_factory = bitrate_allocator_factory_; config.encoder_settings.encoder_switch_request_callback = this; + config.crypto_options = crypto_options_; config.rtp.extmap_allow_mixed = ExtmapAllowMixed(); config.rtcp_report_interval_ms = video_config_.rtcp_report_interval_ms; @@ -1440,15 +1588,15 @@ bool WebRtcVideoSendChannel::AddSendStream(const StreamParams& sp) { WebRtcVideoSendStream* stream = new WebRtcVideoSendStream( call_, sp, std::move(config), default_send_options_, video_config_.enable_cpu_adaptation, bitrate_config_.max_bitrate_bps, - send_codec(), send_rtp_extensions_, send_params_); + send_codec(), send_codecs_, send_rtp_extensions_, send_params_); uint32_t ssrc = sp.first_ssrc(); RTC_DCHECK(ssrc != 0); send_streams_[ssrc] = stream; - if (ssrc_list_changed_callback_) { - ssrc_list_changed_callback_(send_ssrcs_); - } + if (ssrc_list_changed_callback_) { + ssrc_list_changed_callback_(send_ssrcs_); + } if (sending_) { stream->SetSend(true); @@ -1494,7 +1642,7 @@ bool WebRtcVideoSendChannel::GetStats(VideoMediaSendInfo* info) { // Log stats periodically. bool log_stats = false; - int64_t now_ms = rtc::TimeMillis(); + int64_t now_ms = webrtc::TimeMillis(); if (last_send_stats_log_ms_ == -1 || now_ms - last_send_stats_log_ms_ > kStatsLogIntervalMs) { last_send_stats_log_ms_ = now_ms; @@ -1506,7 +1654,7 @@ bool WebRtcVideoSendChannel::GetStats(VideoMediaSendInfo* info) { FillSendCodecStats(info); // TODO(holmer): We should either have rtt available as a metric on // VideoSend/ReceiveStreams, or we should remove rtt from VideoSenderInfo. - webrtc::Call::Stats stats = call_->GetStats(); + Call::Stats stats = call_->GetStats(); if (stats.rtt_ms != -1) { for (size_t i = 0; i < info->senders.size(); ++i) { info->senders[i].rtt_ms = stats.rtt_ms; @@ -1558,7 +1706,7 @@ void WebRtcVideoSendChannel::FillSendCodecStats( send_codec()->codec.id, send_codec()->codec.ToCodecParameters())); } -void WebRtcVideoSendChannel::OnPacketSent(const rtc::SentPacket& sent_packet) { +void WebRtcVideoSendChannel::OnPacketSent(const SentPacketInfo& sent_packet) { RTC_DCHECK_RUN_ON(&network_thread_checker_); // TODO(tommi): We shouldn't need to go through call_ to deliver this // notification. We should already have direct access to @@ -1574,19 +1722,18 @@ void WebRtcVideoSendChannel::OnReadyToSend(bool ready) { RTC_DCHECK_RUN_ON(&network_thread_checker_); RTC_LOG(LS_VERBOSE) << "OnReadyToSend: " << (ready ? "Ready." : "Not ready."); call_->SignalChannelNetworkState( - webrtc::MediaType::VIDEO, - ready ? webrtc::kNetworkUp : webrtc::kNetworkDown); + MediaType::VIDEO, ready ? webrtc::kNetworkUp : webrtc::kNetworkDown); } void WebRtcVideoSendChannel::OnNetworkRouteChanged( absl::string_view transport_name, - const rtc::NetworkRoute& network_route) { + const NetworkRoute& network_route) { RTC_DCHECK_RUN_ON(&network_thread_checker_); worker_thread_->PostTask(SafeTask( task_safety_.flag(), [this, name = std::string(transport_name), route = network_route] { RTC_DCHECK_RUN_ON(&thread_checker_); - webrtc::RtpTransportControllerSendInterface* transport = + RtpTransportControllerSendInterface* transport = call_->GetTransportControllerSend(); transport->OnNetworkRouteChanged(name, route); transport->OnTransportOverheadChanged(route.packet_overhead); @@ -1613,12 +1760,12 @@ void WebRtcVideoSendChannel::SetInterface(MediaChannelNetworkInterface* iface) { } MediaChannelUtil::SetOption(MediaChannelNetworkInterface::ST_RTP, - rtc::Socket::OPT_SNDBUF, send_buffer_size); + Socket::OPT_SNDBUF, send_buffer_size); } void WebRtcVideoSendChannel::SetFrameEncryptor( uint32_t ssrc, - rtc::scoped_refptr frame_encryptor) { + scoped_refptr frame_encryptor) { RTC_DCHECK_RUN_ON(&thread_checker_); auto matching_stream = send_streams_.find(ssrc); if (matching_stream != send_streams_.end()) { @@ -1630,7 +1777,7 @@ void WebRtcVideoSendChannel::SetFrameEncryptor( void WebRtcVideoSendChannel::SetEncoderSelector( uint32_t ssrc, - webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { + VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { RTC_DCHECK_RUN_ON(&thread_checker_); auto matching_stream = send_streams_.find(ssrc); if (matching_stream != send_streams_.end()) { @@ -1640,35 +1787,30 @@ void WebRtcVideoSendChannel::SetEncoderSelector( } } -void WebRtcVideoSendChannel::SetVideoCodecSwitchingEnabled(bool enabled) { - RTC_DCHECK_RUN_ON(&thread_checker_); - allow_codec_switching_ = enabled; - if (allow_codec_switching_) { - RTC_LOG(LS_INFO) << "Encoder switching enabled."; - } -} - WebRtcVideoSendChannel::WebRtcVideoSendStream::VideoSendStreamParameters:: VideoSendStreamParameters( - webrtc::VideoSendStream::Config config, + VideoSendStream::Config config, const VideoOptions& options, int max_bitrate_bps, - const absl::optional& codec_settings) + const std::optional& codec_settings, + const std::vector& codec_settings_list) : config(std::move(config)), options(options), max_bitrate_bps(max_bitrate_bps), conference_mode(false), - codec_settings(codec_settings) {} + codec_settings(codec_settings), + codec_settings_list(codec_settings_list) {} WebRtcVideoSendChannel::WebRtcVideoSendStream::WebRtcVideoSendStream( - webrtc::Call* call, + Call* call, const StreamParams& sp, - webrtc::VideoSendStream::Config config, + VideoSendStream::Config config, const VideoOptions& options, bool enable_cpu_overuse_detection, int max_bitrate_bps, - const absl::optional& codec_settings, - const absl::optional>& rtp_extensions, + const std::optional& codec_settings, + const std::vector& codec_settings_list, + const std::optional>& rtp_extensions, // TODO(deadbeef): Don't duplicate information between send_params, // rtp_extensions, options, etc. const VideoSenderParameters& send_params) @@ -1679,7 +1821,11 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::WebRtcVideoSendStream( enable_cpu_overuse_detection_(enable_cpu_overuse_detection), source_(nullptr), stream_(nullptr), - parameters_(std::move(config), options, max_bitrate_bps, codec_settings), + parameters_(std::move(config), + options, + max_bitrate_bps, + codec_settings, + codec_settings_list), rtp_parameters_(CreateRtpParametersWithEncodings(sp)), sending_(false), disable_automatic_resize_( @@ -1732,13 +1878,13 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::WebRtcVideoSendStream( rtp_parameters_.header_extensions = *rtp_extensions; } parameters_.config.rtp.rtcp_mode = send_params.rtcp.reduced_size - ? webrtc::RtcpMode::kReducedSize - : webrtc::RtcpMode::kCompound; + ? RtcpMode::kReducedSize + : RtcpMode::kCompound; parameters_.config.rtp.mid = send_params.mid; rtp_parameters_.rtcp.reduced_size = send_params.rtcp.reduced_size; if (codec_settings) { - SetCodec(*codec_settings); + SetCodec(*codec_settings, codec_settings_list); } } @@ -1750,10 +1896,11 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::~WebRtcVideoSendStream() { bool WebRtcVideoSendChannel::WebRtcVideoSendStream::SetVideoSend( const VideoOptions* options, - rtc::VideoSourceInterface* source) { + VideoSourceInterface* source) { TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::SetVideoSend"); RTC_DCHECK_RUN_ON(&thread_checker_); + bool reconfiguration_needed = false; if (options) { VideoOptions old_options = parameters_.options; parameters_.options.SetAll(*options); @@ -1763,19 +1910,27 @@ bool WebRtcVideoSendChannel::WebRtcVideoSendStream::SetVideoSend( // If screen content settings change, we may need to recreate the codec // instance so that the correct type is used. - SetCodec(*parameters_.codec_settings); + SetCodec(*parameters_.codec_settings, parameters_.codec_settings_list); // Mark screenshare parameter as being updated, then test for any other // changes that may require codec reconfiguration. old_options.is_screencast = options->is_screencast; } if (parameters_.options != old_options) { - ReconfigureEncoder(nullptr); + reconfiguration_needed = true; } } if (source_ && stream_) { - stream_->SetSource(nullptr, webrtc::DegradationPreference::DISABLED); + stream_->SetSource(nullptr, DegradationPreference::DISABLED); + if (source && source != source_) { + reconfiguration_needed = true; + } } + + if (reconfiguration_needed) { + ReconfigureEncoder(nullptr); + } + // Switch to the new source. source_ = source; if (source && stream_) { @@ -1784,7 +1939,7 @@ bool WebRtcVideoSendChannel::WebRtcVideoSendStream::SetVideoSend( return true; } -webrtc::DegradationPreference +DegradationPreference WebRtcVideoSendChannel::WebRtcVideoSendStream::GetDegradationPreference() const { // Do not adapt resolution for screen content as this will likely @@ -1792,32 +1947,29 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::GetDegradationPreference() // `this` acts like a VideoSource to make sure SinkWants are handled on the // correct thread. if (!enable_cpu_overuse_detection_) { - return webrtc::DegradationPreference::DISABLED; + return DegradationPreference::DISABLED; } - webrtc::DegradationPreference degradation_preference; + DegradationPreference degradation_preference; if (rtp_parameters_.degradation_preference.has_value()) { degradation_preference = *rtp_parameters_.degradation_preference; } else { if (parameters_.options.content_hint == - webrtc::VideoTrackInterface::ContentHint::kFluid) { - degradation_preference = - webrtc::DegradationPreference::MAINTAIN_FRAMERATE; + VideoTrackInterface::ContentHint::kFluid) { + degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE; } else if (parameters_.options.is_screencast.value_or(false) || parameters_.options.content_hint == - webrtc::VideoTrackInterface::ContentHint::kDetailed || + VideoTrackInterface::ContentHint::kDetailed || parameters_.options.content_hint == - webrtc::VideoTrackInterface::ContentHint::kText) { - degradation_preference = - webrtc::DegradationPreference::MAINTAIN_RESOLUTION; + VideoTrackInterface::ContentHint::kText) { + degradation_preference = DegradationPreference::MAINTAIN_RESOLUTION; } else if (IsEnabled(call_->trials(), "WebRTC-Video-BalancedDegradation")) { // Standard wants balanced by default, but it needs to be tuned first. - degradation_preference = webrtc::DegradationPreference::BALANCED; + degradation_preference = DegradationPreference::BALANCED; } else { // Keep MAINTAIN_FRAMERATE by default until BALANCED has been tuned for // all codecs and launched. - degradation_preference = - webrtc::DegradationPreference::MAINTAIN_FRAMERATE; + degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE; } } @@ -1830,7 +1982,8 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::GetSsrcs() const { } void WebRtcVideoSendChannel::WebRtcVideoSendStream::SetCodec( - const VideoCodecSettings& codec_settings) { + const VideoCodecSettings& codec_settings, + const std::vector& codec_settings_list) { RTC_DCHECK_RUN_ON(&thread_checker_); FallbackToDefaultScalabilityModeIfNotSupported( codec_settings.codec, parameters_.config, rtp_parameters_.encodings); @@ -1858,15 +2011,55 @@ void WebRtcVideoSendChannel::WebRtcVideoSendStream::SetCodec( } } - const bool has_lntf = HasLntf(codec_settings.codec); + const bool has_lntf = webrtc::HasLntf(codec_settings.codec); parameters_.config.rtp.lntf.enabled = has_lntf; parameters_.config.encoder_settings.capabilities.loss_notification = has_lntf; parameters_.config.rtp.nack.rtp_history_ms = - HasNack(codec_settings.codec) ? kNackHistoryMs : 0; + webrtc::HasNack(codec_settings.codec) ? kNackHistoryMs : 0; parameters_.codec_settings = codec_settings; + // Settings for mixed-codec simulcast. + if (codec_settings_list.empty()) { + parameters_.config.rtp.stream_configs.clear(); + } else { + if (parameters_.config.rtp.ssrcs.size() == codec_settings_list.size()) { + parameters_.config.rtp.stream_configs.resize( + parameters_.config.rtp.ssrcs.size()); + for (size_t i = 0; i < codec_settings_list.size(); i++) { + auto& stream_config = parameters_.config.rtp.stream_configs[i]; + const auto& cs = codec_settings_list[i]; + stream_config.ssrc = parameters_.config.rtp.ssrcs[i]; + if (i < parameters_.config.rtp.rids.size()) { + stream_config.rid = parameters_.config.rtp.rids[i]; + } + stream_config.payload_name = cs.codec.name; + stream_config.payload_type = cs.codec.id; + stream_config.raw_payload = + cs.codec.packetization == kPacketizationParamRaw; + if (i < parameters_.config.rtp.rtx.ssrcs.size()) { + auto& rtx = stream_config.rtx.emplace( + decltype(stream_config.rtx)::value_type()); + rtx.ssrc = parameters_.config.rtp.rtx.ssrcs[i]; + rtx.payload_type = cs.rtx_payload_type; + } + } + } else { + // TODO(crbug.com/378724147): We need to investigate when it + // has mismatched sizes. + RTC_DCHECK_EQ(parameters_.config.rtp.ssrcs.size(), + codec_settings_list.size()); + + RTC_LOG(LS_ERROR) << "Mismatched sizes between codec_settings_list:" + << codec_settings_list.size() + << ", parameters_.config.rtp.ssrcs:" + << parameters_.config.rtp.ssrcs.size(); + } + } + + parameters_.codec_settings_list = codec_settings_list; + // TODO(bugs.webrtc.org/8830): Avoid recreation, it should be enough to call // ReconfigureEncoder. RTC_LOG(LS_INFO) << "RecreateWebRtcStream (send) because of SetCodec."; @@ -1882,7 +2075,7 @@ void WebRtcVideoSendChannel::WebRtcVideoSendStream::SetSenderParameters( if (params.rtcp_mode) { parameters_.config.rtp.rtcp_mode = *params.rtcp_mode; rtp_parameters_.rtcp.reduced_size = - parameters_.config.rtp.rtcp_mode == webrtc::RtcpMode::kReducedSize; + parameters_.config.rtp.rtcp_mode == RtcpMode::kReducedSize; recreate_stream = true; } if (params.extmap_allow_mixed) { @@ -1908,10 +2101,11 @@ void WebRtcVideoSendChannel::WebRtcVideoSendStream::SetSenderParameters( // Set codecs and options. if (params.send_codec) { - SetCodec(*params.send_codec); + SetCodec(*params.send_codec, + params.send_codecs.value_or(std::vector())); recreate_stream = false; // SetCodec has already recreated the stream. } else if (params.conference_mode && parameters_.codec_settings) { - SetCodec(*parameters_.codec_settings); + SetCodec(*parameters_.codec_settings, parameters_.codec_settings_list); recreate_stream = false; // SetCodec has already recreated the stream. } if (recreate_stream) { @@ -1921,20 +2115,18 @@ void WebRtcVideoSendChannel::WebRtcVideoSendStream::SetSenderParameters( } } -webrtc::RTCError -WebRtcVideoSendChannel::WebRtcVideoSendStream::SetRtpParameters( - const webrtc::RtpParameters& new_parameters, - webrtc::SetParametersCallback callback) { +RTCError WebRtcVideoSendChannel::WebRtcVideoSendStream::SetRtpParameters( + const RtpParameters& new_parameters, + SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&thread_checker_); // This is checked higher in the stack (RtpSender), so this is only checking // for users accessing the private APIs or tests, not specification // conformance. // TODO(orphis): Migrate tests to later make this a DCHECK only - webrtc::RTCError error = CheckRtpParametersInvalidModificationAndValues( - rtp_parameters_, new_parameters); + RTCError error = CheckRtpParametersInvalidModificationAndValues( + rtp_parameters_, new_parameters, call_->trials()); if (!error.ok()) { - // Error is propagated to the callback at a higher level - return error; + return webrtc::InvokeSetParametersCallback(callback, error); } bool new_param = false; @@ -1949,10 +2141,12 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::SetRtpParameters( rtp_parameters_.encodings[i].scale_resolution_down_by) || (new_parameters.encodings[i].num_temporal_layers != rtp_parameters_.encodings[i].num_temporal_layers) || - (new_parameters.encodings[i].requested_resolution != - rtp_parameters_.encodings[i].requested_resolution) || + (new_parameters.encodings[i].scale_resolution_down_to != + rtp_parameters_.encodings[i].scale_resolution_down_to) || (new_parameters.encodings[i].scalability_mode != - rtp_parameters_.encodings[i].scalability_mode)) { + rtp_parameters_.encodings[i].scalability_mode) || + (new_parameters.encodings[i].codec != + rtp_parameters_.encodings[i].codec)) { new_param = true; break; } @@ -1981,6 +2175,7 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::SetRtpParameters( new_send_state = true; } } + rtp_parameters_ = new_parameters; // Codecs are currently handled at the WebRtcVideoSendChannel level. rtp_parameters_.codecs.clear(); @@ -1989,9 +2184,6 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::SetRtpParameters( ReconfigureEncoder(std::move(callback)); callback = nullptr; } - if (new_send_state) { - UpdateSendState(); - } if (new_degradation_preference) { if (source_ && stream_) { stream_->SetSource(source_, GetDegradationPreference()); @@ -2013,17 +2205,17 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::SetRtpParameters( } GenerateKeyFrame(key_frames_requested_by_rid); } - return webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); + return webrtc::InvokeSetParametersCallback(callback, RTCError::OK()); } -webrtc::RtpParameters -WebRtcVideoSendChannel::WebRtcVideoSendStream::GetRtpParameters() const { +RtpParameters WebRtcVideoSendChannel::WebRtcVideoSendStream::GetRtpParameters() + const { RTC_DCHECK_RUN_ON(&thread_checker_); return rtp_parameters_; } void WebRtcVideoSendChannel::WebRtcVideoSendStream::SetFrameEncryptor( - rtc::scoped_refptr frame_encryptor) { + scoped_refptr frame_encryptor) { RTC_DCHECK_RUN_ON(&thread_checker_); parameters_.config.frame_encryptor = frame_encryptor; if (stream_) { @@ -2035,7 +2227,7 @@ void WebRtcVideoSendChannel::WebRtcVideoSendStream::SetFrameEncryptor( } void WebRtcVideoSendChannel::WebRtcVideoSendStream::SetEncoderSelector( - webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { + VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { RTC_DCHECK_RUN_ON(&thread_checker_); parameters_.config.encoder_selector = encoder_selector; if (stream_) { @@ -2050,27 +2242,9 @@ void WebRtcVideoSendChannel::WebRtcVideoSendStream::UpdateSendState() { RTC_DCHECK_RUN_ON(&thread_checker_); if (sending_) { RTC_DCHECK(stream_ != nullptr); - size_t num_layers = rtp_parameters_.encodings.size(); - if (parameters_.encoder_config.number_of_streams == 1) { - // SVC is used. Only one simulcast layer is present. - num_layers = 1; - } - std::vector active_layers(num_layers); - for (size_t i = 0; i < num_layers; ++i) { - active_layers[i] = IsLayerActive(rtp_parameters_.encodings[i]); - } - if (parameters_.encoder_config.number_of_streams == 1 && - rtp_parameters_.encodings.size() > 1) { - // SVC is used. - // The only present simulcast layer should be active if any of the - // configured SVC layers is active. - active_layers[0] = - absl::c_any_of(rtp_parameters_.encodings, - [](const auto& encoding) { return encoding.active; }); - } - // This updates what simulcast layers are sending, and possibly starts - // or stops the VideoSendStream. - stream_->StartPerRtpStream(active_layers); + // This allows the the Stream to be used. Ie, DTLS is connected and the + // RtpTransceiver direction allows sending. + stream_->Start(); } else { if (stream_ != nullptr) { stream_->Stop(); @@ -2078,25 +2252,23 @@ void WebRtcVideoSendChannel::WebRtcVideoSendStream::UpdateSendState() { } } -webrtc::VideoEncoderConfig +VideoEncoderConfig WebRtcVideoSendChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig( - const VideoCodec& codec) const { + const Codec& codec) const { RTC_DCHECK_RUN_ON(&thread_checker_); - webrtc::VideoEncoderConfig encoder_config; + VideoEncoderConfig encoder_config; encoder_config.codec_type = webrtc::PayloadStringToCodecType(codec.name); - encoder_config.video_format = - webrtc::SdpVideoFormat(codec.name, codec.params); + encoder_config.video_format = SdpVideoFormat(codec.name, codec.params); bool is_screencast = parameters_.options.is_screencast.value_or(false); if (is_screencast) { encoder_config.min_transmit_bitrate_bps = 1000 * parameters_.options.screencast_min_bitrate_kbps.value_or(0); - encoder_config.content_type = - webrtc::VideoEncoderConfig::ContentType::kScreen; + encoder_config.content_type = VideoEncoderConfig::ContentType::kScreen; } else { encoder_config.min_transmit_bitrate_bps = 0; encoder_config.content_type = - webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo; + VideoEncoderConfig::ContentType::kRealtimeVideo; } // By default, the stream count for the codec configuration should match the @@ -2107,7 +2279,8 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig( for (const webrtc::RtpEncodingParameters& encoding : rtp_parameters_.encodings) { if (encoding.scalability_mode.has_value() && - encoding.scale_resolution_down_by.has_value()) { + (encoding.scale_resolution_down_by.has_value() || + encoding.scale_resolution_down_to.has_value())) { legacy_scalability_mode = false; break; } @@ -2188,8 +2361,8 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig( encoder_config.simulcast_layers[i].num_temporal_layers = *rtp_parameters_.encodings[i].num_temporal_layers; } - encoder_config.simulcast_layers[i].requested_resolution = - rtp_parameters_.encodings[i].requested_resolution; + encoder_config.simulcast_layers[i].scale_resolution_down_to = + rtp_parameters_.encodings[i].scale_resolution_down_to; } encoder_config.legacy_conference_mode = parameters_.conference_mode; @@ -2202,20 +2375,21 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig( // Ensure frame dropping is always enabled. encoder_config.frame_drop_enabled = true; - int max_qp = kDefaultQpMax; - codec.GetParam(kCodecParamMaxQuantization, &max_qp); - encoder_config.max_qp = max_qp; + int max_qp = -1; + if (codec.GetParam(kCodecParamMaxQuantization, &max_qp) && max_qp > 0) { + encoder_config.max_qp = max_qp; + } return encoder_config; } void WebRtcVideoSendChannel::WebRtcVideoSendStream::ReconfigureEncoder( - webrtc::SetParametersCallback callback) { + SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&thread_checker_); if (!stream_) { // The webrtc::VideoSendStream `stream_` has not yet been created but other // parameters has changed. - webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); + webrtc::InvokeSetParametersCallback(callback, RTCError::OK()); return; } @@ -2228,12 +2402,11 @@ void WebRtcVideoSendChannel::WebRtcVideoSendStream::ReconfigureEncoder( codec_settings.codec, parameters_.config, rtp_parameters_.encodings); // Latest config, with and without encoder specfic settings. - webrtc::VideoEncoderConfig encoder_config = + VideoEncoderConfig encoder_config = CreateVideoEncoderConfig(codec_settings.codec); encoder_config.encoder_specific_settings = ConfigureVideoEncoderSettings(codec_settings.codec); - webrtc::VideoEncoderConfig encoder_config_with_specifics = - encoder_config.Copy(); + VideoEncoderConfig encoder_config_with_specifics = encoder_config.Copy(); encoder_config.encoder_specific_settings = nullptr; // When switching between legacy SVC (3 encodings interpreted as 1 stream with @@ -2247,7 +2420,7 @@ void WebRtcVideoSendChannel::WebRtcVideoSendStream::ReconfigureEncoder( // The app is switching between legacy and standard modes, recreate instead // of reconfiguring to avoid number of streams not matching in lower layers. RecreateWebRtcStream(); - webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); + webrtc::InvokeSetParametersCallback(callback, RTCError::OK()); return; } @@ -2271,7 +2444,7 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( common_info.codec_payload_type = parameters_.codec_settings->codec.id; } std::vector infos; - webrtc::VideoSendStream::Stats stats; + VideoSendStream::Stats stats; if (stream_ == nullptr) { for (uint32_t ssrc : parameters_.config.rtp.ssrcs) { common_info.add_ssrc(ssrc); @@ -2281,7 +2454,7 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( } else { stats = stream_->GetStats(); if (log_stats) - RTC_LOG(LS_INFO) << stats.ToString(rtc::TimeMillis()); + RTC_LOG(LS_INFO) << stats.ToString(webrtc::TimeMillis()); // Metrics that are in common for all substreams. common_info.adapt_changes = stats.number_of_cpu_adapt_changes; @@ -2302,7 +2475,6 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( common_info.quality_limitation_resolution_changes = stats.quality_limitation_resolution_changes; common_info.encoder_implementation_name = stats.encoder_implementation_name; - common_info.target_bitrate = stats.target_media_bitrate_bps; common_info.ssrc_groups = ssrc_groups_; common_info.frames = stats.frames; common_info.framerate_input = stats.input_frame_rate; @@ -2322,8 +2494,9 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( for (uint32_t ssrc : parameters_.config.rtp.ssrcs) { common_info.add_ssrc(ssrc); } + common_info.encoding_index = 0; common_info.active = - IsActiveFromEncodings(absl::nullopt, rtp_parameters_.encodings); + IsActiveFromEncodings(std::nullopt, rtp_parameters_.encodings); common_info.framerate_sent = stats.encode_frame_rate; common_info.frames_encoded = stats.frames_encoded; common_info.total_encode_time_ms = stats.total_encode_time_ms; @@ -2339,6 +2512,12 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( // with the outbound-rtp stats objects. auto outbound_rtp_substreams = MergeInfoAboutOutboundRtpSubstreams(stats.substreams); + // The streams are ordered by SSRC, but the SSRCs are randomly assigned so we + // need map for index lookup by SSRC. + std::map encoding_index_by_ssrc; + for (size_t i = 0; i < parameters_.config.rtp.ssrcs.size(); ++i) { + encoding_index_by_ssrc[parameters_.config.rtp.ssrcs[i]] = i; + } // If SVC is used, one stream is configured but multiple encodings exist. This // is not spec-compliant, but it is how we've implemented SVC so this affects // how the RTP stream's "active" value is determined. @@ -2349,12 +2528,15 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( uint32_t ssrc = pair.first; info.add_ssrc(ssrc); info.rid = parameters_.config.rtp.GetRidForSsrc(ssrc); + if (encoding_index_by_ssrc.find(ssrc) != encoding_index_by_ssrc.end()) { + info.encoding_index = encoding_index_by_ssrc[ssrc]; + } info.active = IsActiveFromEncodings( - !is_svc ? absl::optional(ssrc) : absl::nullopt, + !is_svc ? std::optional(ssrc) : std::nullopt, rtp_parameters_.encodings); auto stream_stats = pair.second; RTC_DCHECK_EQ(stream_stats.type, - webrtc::VideoSendStream::StreamStats::StreamType::kMedia); + VideoSendStream::StreamStats::StreamType::kMedia); info.payload_bytes_sent = stream_stats.rtp_stats.transmitted.payload_bytes; info.header_and_padding_bytes_sent = stream_stats.rtp_stats.transmitted.header_bytes + @@ -2385,6 +2567,7 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::GetPerLayerVideoSenderInfos( info.total_encoded_bytes_target = stream_stats.total_encoded_bytes_target; info.huge_frames_sent = stream_stats.huge_frames_sent; info.scalability_mode = stream_stats.scalability_mode; + info.target_bitrate = stream_stats.target_bitrate; infos.push_back(info); } return infos; @@ -2399,6 +2582,7 @@ WebRtcVideoSendChannel::WebRtcVideoSendStream::GetAggregatedVideoSenderInfo( return infos[0]; } VideoSenderInfo info = infos[0]; + info.encoding_index = std::nullopt; // An aggregated info has no index. info.local_stats.clear(); for (uint32_t ssrc : parameters_.config.rtp.ssrcs) { info.add_ssrc(ssrc); @@ -2445,7 +2629,7 @@ void WebRtcVideoSendChannel::WebRtcVideoSendStream::FillBitrateInfo( if (stream_ == NULL) { return; } - webrtc::VideoSendStream::Stats stats = stream_->GetStats(); + VideoSendStream::Stats stats = stream_->GetStats(); for (const auto& it : stats.substreams) { bwe_info->transmit_bitrate += it.second.total_bitrate_bps; bwe_info->retransmit_bitrate += it.second.retransmit_bitrate_bps; @@ -2456,8 +2640,7 @@ void WebRtcVideoSendChannel::WebRtcVideoSendStream::FillBitrateInfo( void WebRtcVideoSendChannel::WebRtcVideoSendStream:: SetEncoderToPacketizerFrameTransformer( - rtc::scoped_refptr - frame_transformer) { + scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&thread_checker_); parameters_.config.frame_transformer = std::move(frame_transformer); if (stream_) @@ -2466,19 +2649,16 @@ void WebRtcVideoSendChannel::WebRtcVideoSendStream:: void WebRtcVideoSendChannel::WebRtcVideoSendStream::RecreateWebRtcStream() { RTC_DCHECK_RUN_ON(&thread_checker_); - if (stream_ != NULL) { - call_->DestroyVideoSendStream(stream_); - } RTC_CHECK(parameters_.codec_settings); RTC_DCHECK_EQ((parameters_.encoder_config.content_type == - webrtc::VideoEncoderConfig::ContentType::kScreen), + VideoEncoderConfig::ContentType::kScreen), parameters_.options.is_screencast.value_or(false)) << "encoder content type inconsistent with screencast option"; parameters_.encoder_config.encoder_specific_settings = ConfigureVideoEncoderSettings(parameters_.codec_settings->codec); - webrtc::VideoSendStream::Config config = parameters_.config.Copy(); + VideoSendStream::Config config = parameters_.config.Copy(); if (!config.rtp.rtx.ssrcs.empty() && config.rtp.rtx.payload_type == -1) { RTC_LOG(LS_WARNING) << "RTX SSRCs configured but there's no configured RTX " "payload type the set codec. Ignoring RTX."; @@ -2493,8 +2673,25 @@ void WebRtcVideoSendChannel::WebRtcVideoSendStream::RecreateWebRtcStream() { } } } - stream_ = call_->CreateVideoSendStream(std::move(config), - parameters_.encoder_config.Copy()); + + if (RtpExtension::FindHeaderExtensionByUri( + config.rtp.extensions, RtpExtension::kCorruptionDetectionUri, + RtpExtension::kRequireEncryptedExtension)) { + config.encoder_settings.enable_frame_instrumentation_generator = true; + } + + if (stream_ != NULL) { + // TODO: webrtc:40644448 - Make sure the stats are not updated between + // GetStats and DestroyVideoSendStream. + VideoSendStream::Stats stats = stream_->GetStats(); + call_->DestroyVideoSendStream(stream_); + stream_ = call_->CreateVideoSendStream(std::move(config), + parameters_.encoder_config.Copy()); + stream_->SetStats(stats); + } else { + stream_ = call_->CreateVideoSendStream(std::move(config), + parameters_.encoder_config.Copy()); + } parameters_.encoder_config.encoder_specific_settings = NULL; @@ -2536,7 +2733,7 @@ void WebRtcVideoSendChannel::GenerateSendKeyFrame( void WebRtcVideoSendChannel::SetEncoderToPacketizerFrameTransformer( uint32_t ssrc, - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&thread_checker_); auto matching_stream = send_streams_.find(ssrc); if (matching_stream != send_streams_.end()) { @@ -2547,11 +2744,11 @@ void WebRtcVideoSendChannel::SetEncoderToPacketizerFrameTransformer( // ------------------------ WebRtcVideoReceiveChannel --------------------- WebRtcVideoReceiveChannel::WebRtcVideoReceiveChannel( - webrtc::Call* call, + Call* call, const MediaConfig& config, const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::VideoDecoderFactory* decoder_factory) + const CryptoOptions& crypto_options, + VideoDecoderFactory* decoder_factory) : MediaChannelUtil(call->network_thread(), config.enable_dscp), worker_thread_(call->worker_thread()), receiving_(false), @@ -2564,12 +2761,15 @@ WebRtcVideoReceiveChannel::WebRtcVideoReceiveChannel( discard_unknown_ssrc_packets_( IsEnabled(call_->trials(), "WebRTC-Video-DiscardPacketsWithUnknownSsrc")), - crypto_options_(crypto_options) { + crypto_options_(crypto_options), + receive_buffer_size_(ParseReceiveBufferSize(call_->trials())) { RTC_DCHECK_RUN_ON(&thread_checker_); rtcp_receiver_report_ssrc_ = kDefaultRtcpReceiverReportSsrc; + // Crash if MapCodecs fails. recv_codecs_ = MapCodecs(GetPayloadTypesAndDefaultCodecs( - decoder_factory_, /*is_decoder_factory=*/true, - /*include_rtx=*/true, call_->trials())); + decoder_factory_, /*is_decoder_factory=*/true, + /*include_rtx=*/true, call_->trials())) + .value(); recv_flexfec_payload_type_ = recv_codecs_.empty() ? 0 : recv_codecs_.front().flexfec_payload_type; } @@ -2582,8 +2782,8 @@ WebRtcVideoReceiveChannel::~WebRtcVideoReceiveChannel() { void WebRtcVideoReceiveChannel::SetReceiverFeedbackParameters( bool lntf_enabled, bool nack_enabled, - webrtc::RtcpMode rtcp_mode, - absl::optional rtx_time) { + RtcpMode rtcp_mode, + std::optional rtx_time) { RTC_DCHECK_RUN_ON(&thread_checker_); // Update receive feedback parameters from new codec or RTCP mode. @@ -2603,33 +2803,33 @@ void WebRtcVideoReceiveChannel::SetReceiverFeedbackParameters( // Note: There is no place in config to store rtx_time. } -webrtc::RtpParameters WebRtcVideoReceiveChannel::GetRtpReceiverParameters( +RtpParameters WebRtcVideoReceiveChannel::GetRtpReceiverParameters( uint32_t ssrc) const { RTC_DCHECK_RUN_ON(&thread_checker_); - webrtc::RtpParameters rtp_params; + RtpParameters rtp_params; auto it = receive_streams_.find(ssrc); if (it == receive_streams_.end()) { RTC_LOG(LS_WARNING) << "Attempting to get RTP receive parameters for stream " "with SSRC " << ssrc << " which doesn't exist."; - return webrtc::RtpParameters(); + return RtpParameters(); } rtp_params = it->second->GetRtpParameters(); rtp_params.header_extensions = recv_rtp_extensions_; // Add codecs, which any stream is prepared to receive. - for (const VideoCodec& codec : recv_params_.codecs) { + for (const webrtc::Codec& codec : recv_params_.codecs) { rtp_params.codecs.push_back(codec.ToCodecParameters()); } return rtp_params; } -webrtc::RtpParameters -WebRtcVideoReceiveChannel::GetDefaultRtpReceiveParameters() const { +RtpParameters WebRtcVideoReceiveChannel::GetDefaultRtpReceiveParameters() + const { RTC_DCHECK_RUN_ON(&thread_checker_); - webrtc::RtpParameters rtp_params; + RtpParameters rtp_params; if (!default_sink_) { // Getting parameters on a default, unsignaled video receive stream but // because we've not configured to receive such a stream, `encodings` is @@ -2639,7 +2839,7 @@ WebRtcVideoReceiveChannel::GetDefaultRtpReceiveParameters() const { rtp_params.encodings.emplace_back(); // Add codecs, which any stream is prepared to receive. - for (const VideoCodec& codec : recv_params_.codecs) { + for (const webrtc::Codec& codec : recv_params_.codecs) { rtp_params.codecs.push_back(codec.ToCodecParameters()); } @@ -2655,22 +2855,24 @@ bool WebRtcVideoReceiveChannel::GetChangedReceiverParameters( } // Handle receive codecs. - const std::vector mapped_codecs = - MapCodecs(params.codecs); - if (mapped_codecs.empty()) { - RTC_LOG(LS_ERROR) - << "GetChangedReceiverParameters called without any video codecs."; + auto result = MapCodecs(params.codecs); + if (!result.ok()) { + RTC_LOG(LS_ERROR) << "GetChangedReceiverParameters called without valid " + "video codecs, error =" + << result.error(); return false; } + const std::vector mapped_codecs = result.value(); // Verify that every mapped codec is supported locally. if (params.is_stream_active) { - const std::vector local_supported_codecs = + const std::vector local_supported_codecs = GetPayloadTypesAndDefaultCodecs(decoder_factory_, /*is_decoder_factory=*/true, /*include_rtx=*/true, call_->trials()); for (const VideoCodecSettings& mapped_codec : mapped_codecs) { - if (!FindMatchingCodec(local_supported_codecs, mapped_codec.codec)) { + if (!webrtc::FindMatchingVideoCodec(local_supported_codecs, + mapped_codec.codec)) { RTC_LOG(LS_ERROR) << "GetChangedReceiverParameters called with " "unsupported video codec: " << mapped_codec.codec.ToString(); @@ -2681,16 +2883,16 @@ bool WebRtcVideoReceiveChannel::GetChangedReceiverParameters( if (NonFlexfecReceiveCodecsHaveChanged(recv_codecs_, mapped_codecs)) { changed_params->codec_settings = - absl::optional>(mapped_codecs); + std::optional>(mapped_codecs); } // Handle RTP header extensions. - std::vector filtered_extensions = FilterRtpExtensions( - params.extensions, webrtc::RtpExtension::IsSupportedForVideo, false, - call_->trials()); + std::vector filtered_extensions = + FilterRtpExtensions(params.extensions, RtpExtension::IsSupportedForVideo, + false, call_->trials()); if (filtered_extensions != recv_rtp_extensions_) { changed_params->rtp_header_extensions = - absl::optional>(filtered_extensions); + std::optional>(filtered_extensions); } int flexfec_payload_type = mapped_codecs.front().flexfec_payload_type; @@ -2718,8 +2920,7 @@ bool WebRtcVideoReceiveChannel::SetReceiverParameters( } if (changed_params.rtp_header_extensions) { recv_rtp_extensions_ = *changed_params.rtp_header_extensions; - recv_rtp_extension_map_ = - webrtc::RtpHeaderExtensionMap(recv_rtp_extensions_); + recv_rtp_extension_map_ = RtpHeaderExtensionMap(recv_rtp_extensions_); } if (changed_params.codec_settings) { RTC_DLOG(LS_INFO) << "Changing recv codecs from " @@ -2836,9 +3037,8 @@ bool WebRtcVideoReceiveChannel::AddRecvStream(const StreamParams& sp, for (uint32_t used_ssrc : sp.ssrcs) receive_ssrcs_.insert(used_ssrc); - webrtc::VideoReceiveStreamInterface::Config config(transport(), - decoder_factory_); - webrtc::FlexfecReceiveStream::Config flexfec_config(transport()); + VideoReceiveStreamInterface::Config config(transport(), decoder_factory_); + FlexfecReceiveStream::Config flexfec_config(transport()); ConfigureReceiverRtp(&config, &flexfec_config, sp); config.crypto_options = crypto_options_; @@ -2862,8 +3062,8 @@ bool WebRtcVideoReceiveChannel::AddRecvStream(const StreamParams& sp, } void WebRtcVideoReceiveChannel::ConfigureReceiverRtp( - webrtc::VideoReceiveStreamInterface::Config* config, - webrtc::FlexfecReceiveStream::Config* flexfec_config, + VideoReceiveStreamInterface::Config* config, + FlexfecReceiveStream::Config* flexfec_config, const StreamParams& sp) const { uint32_t ssrc = sp.first_ssrc(); @@ -2917,7 +3117,7 @@ void WebRtcVideoReceiveChannel::ResetUnsignaledRecvStream() { RTC_DCHECK_RUN_ON(&thread_checker_); RTC_LOG(LS_INFO) << "ResetUnsignaledRecvStream."; unsignaled_stream_params_ = StreamParams(); - last_unsignalled_ssrc_creation_time_ms_ = absl::nullopt; + last_unsignalled_ssrc_creation_time_ms_ = std::nullopt; // Delete any created default streams. This is needed to avoid SSRC collisions // in Call's RtpDemuxer, in the case that `this` has created a default video @@ -2934,9 +3134,9 @@ void WebRtcVideoReceiveChannel::ResetUnsignaledRecvStream() { } } -absl::optional WebRtcVideoReceiveChannel::GetUnsignaledSsrc() const { +std::optional WebRtcVideoReceiveChannel::GetUnsignaledSsrc() const { RTC_DCHECK_RUN_ON(&thread_checker_); - absl::optional ssrc; + std::optional ssrc; for (auto it = receive_streams_.begin(); it != receive_streams_.end(); ++it) { if (it->second->IsDefaultStream()) { ssrc.emplace(it->first); @@ -2956,9 +3156,8 @@ void WebRtcVideoReceiveChannel::OnDemuxerCriteriaUpdateComplete() { ++demuxer_criteria_completed_id_; } -bool WebRtcVideoReceiveChannel::SetSink( - uint32_t ssrc, - rtc::VideoSinkInterface* sink) { +bool WebRtcVideoReceiveChannel::SetSink(uint32_t ssrc, + VideoSinkInterface* sink) { RTC_DCHECK_RUN_ON(&thread_checker_); RTC_LOG(LS_INFO) << "SetSink: ssrc:" << ssrc << " " << (sink ? "(ptr)" : "nullptr"); @@ -2973,7 +3172,7 @@ bool WebRtcVideoReceiveChannel::SetSink( } void WebRtcVideoReceiveChannel::SetDefaultSink( - rtc::VideoSinkInterface* sink) { + VideoSinkInterface* sink) { RTC_DCHECK_RUN_ON(&thread_checker_); RTC_LOG(LS_INFO) << "SetDefaultSink: " << (sink ? "(ptr)" : "nullptr"); default_sink_ = sink; @@ -2990,7 +3189,7 @@ bool WebRtcVideoReceiveChannel::GetStats(VideoMediaReceiveInfo* info) { // Log stats periodically. bool log_stats = false; - int64_t now_ms = rtc::TimeMillis(); + int64_t now_ms = webrtc::TimeMillis(); if (last_receive_stats_log_ms_ == -1 || now_ms - last_receive_stats_log_ms_ > kStatsLogIntervalMs) { last_receive_stats_log_ms_ = now_ms; @@ -3015,8 +3214,8 @@ void WebRtcVideoReceiveChannel::FillReceiverStats( void WebRtcVideoReceiveChannel::FillReceiveCodecStats( VideoMediaReceiveInfo* video_media_info) { for (const auto& receiver : video_media_info->receivers) { - auto codec = - absl::c_find_if(recv_params_.codecs, [&receiver](const VideoCodec& c) { + auto codec = absl::c_find_if( + recv_params_.codecs, [&receiver](const webrtc::Codec& c) { return receiver.codec_payload_type && *receiver.codec_payload_type == c.id; }); @@ -3028,7 +3227,7 @@ void WebRtcVideoReceiveChannel::FillReceiveCodecStats( } void WebRtcVideoReceiveChannel::OnPacketReceived( - const webrtc::RtpPacketReceived& packet) { + const RtpPacketReceived& packet) { // Note: the network_thread_checker may refer to the worker thread if the two // threads are combined, but this is either always true or always false // depending on configuration set at object initialization. @@ -3036,7 +3235,7 @@ void WebRtcVideoReceiveChannel::OnPacketReceived( // TODO(crbug.com/1373439): Stop posting to the worker thread when the // combined network/worker project launches. - if (webrtc::TaskQueueBase::Current() != worker_thread_) { + if (TaskQueueBase::Current() != worker_thread_) { worker_thread_->PostTask( SafeTask(task_safety_.flag(), [this, packet = packet]() mutable { RTC_DCHECK_RUN_ON(&thread_checker_); @@ -3049,7 +3248,7 @@ void WebRtcVideoReceiveChannel::OnPacketReceived( } bool WebRtcVideoReceiveChannel::MaybeCreateDefaultReceiveStream( - const webrtc::RtpPacketReceived& packet) { + const RtpPacketReceived& packet) { if (discard_unknown_ssrc_packets_) { return false; } @@ -3087,47 +3286,44 @@ bool WebRtcVideoReceiveChannel::MaybeCreateDefaultReceiveStream( if (is_rtx_payload) { // As we don't support receiving simulcast there can only be one RTX // stream, which will be associated with unsignaled media stream. - absl::optional current_default_ssrc = GetUnsignaledSsrc(); + std::optional current_default_ssrc = GetUnsignaledSsrc(); if (current_default_ssrc) { FindReceiveStream(*current_default_ssrc)->UpdateRtxSsrc(packet.Ssrc()); - } else { - // Received unsignaled RTX packet before a media packet. Create a default - // stream with a "random" SSRC and the RTX SSRC from the packet. The - // stream will be recreated on the first media packet, unless we are - // extremely lucky and used the right media SSRC. - ReCreateDefaultReceiveStream(/*ssrc =*/14795, /*rtx_ssrc=*/packet.Ssrc()); + return true; } - return true; - } else { - // Ignore unknown ssrcs if we recently created an unsignalled receive - // stream since this shouldn't happen frequently. Getting into a state - // of creating decoders on every packet eats up processing time (e.g. - // https://crbug.com/1069603) and this cooldown prevents that. - if (last_unsignalled_ssrc_creation_time_ms_.has_value()) { - int64_t now_ms = rtc::TimeMillis(); - if (now_ms - last_unsignalled_ssrc_creation_time_ms_.value() < - kUnsignaledSsrcCooldownMs) { - // We've already created an unsignalled ssrc stream within the last - // 0.5 s, ignore with a warning. - RTC_LOG(LS_WARNING) - << "Another unsignalled ssrc packet arrived shortly after the " - << "creation of an unsignalled ssrc stream. Dropping packet."; - return false; - } + // Default media SSRC not known yet. Drop the packet. + // BWE has already been notified of this received packet. + return false; + } + // Ignore unknown ssrcs if we recently created an unsignalled receive + // stream since this shouldn't happen frequently. Getting into a state + // of creating decoders on every packet eats up processing time (e.g. + // https://crbug.com/1069603) and this cooldown prevents that. + if (last_unsignalled_ssrc_creation_time_ms_.has_value()) { + int64_t now_ms = webrtc::TimeMillis(); + if (now_ms - last_unsignalled_ssrc_creation_time_ms_.value() < + kUnsignaledSsrcCooldownMs) { + // We've already created an unsignalled ssrc stream within the last + // 0.5 s, ignore with a warning. + RTC_LOG(LS_WARNING) + << "Another unsignalled ssrc packet arrived shortly after the " + << "creation of an unsignalled ssrc stream. Dropping packet."; + return false; } } + // RTX SSRC not yet known. - ReCreateDefaultReceiveStream(packet.Ssrc(), absl::nullopt); - last_unsignalled_ssrc_creation_time_ms_ = rtc::TimeMillis(); + ReCreateDefaultReceiveStream(packet.Ssrc(), std::nullopt); + last_unsignalled_ssrc_creation_time_ms_ = webrtc::TimeMillis(); return true; } void WebRtcVideoReceiveChannel::ReCreateDefaultReceiveStream( uint32_t ssrc, - absl::optional rtx_ssrc) { + std::optional rtx_ssrc) { RTC_DCHECK_RUN_ON(&thread_checker_); - absl::optional default_recv_ssrc = GetUnsignaledSsrc(); + std::optional default_recv_ssrc = GetUnsignaledSsrc(); if (default_recv_ssrc) { RTC_LOG(LS_INFO) << "Destroying old default receive stream for SSRC=" << ssrc << "."; @@ -3161,12 +3357,12 @@ void WebRtcVideoReceiveChannel::SetInterface( MediaChannelUtil::SetInterface(iface); // Set the RTP recv/send buffer to a bigger size. MediaChannelUtil::SetOption(MediaChannelNetworkInterface::ST_RTP, - rtc::Socket::OPT_RCVBUF, kVideoRtpRecvBufferSize); + Socket::OPT_RCVBUF, receive_buffer_size_); } void WebRtcVideoReceiveChannel::SetFrameDecryptor( uint32_t ssrc, - rtc::scoped_refptr frame_decryptor) { + scoped_refptr frame_decryptor) { RTC_DCHECK_RUN_ON(&thread_checker_); auto matching_stream = receive_streams_.find(ssrc); if (matching_stream != receive_streams_.end()) { @@ -3177,7 +3373,7 @@ void WebRtcVideoReceiveChannel::SetFrameDecryptor( bool WebRtcVideoReceiveChannel::SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) { RTC_DCHECK_RUN_ON(&thread_checker_); - absl::optional default_ssrc = GetUnsignaledSsrc(); + std::optional default_ssrc = GetUnsignaledSsrc(); // SSRC of 0 represents the default receive stream. if (ssrc == 0) { @@ -3202,7 +3398,7 @@ bool WebRtcVideoReceiveChannel::SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, } } -absl::optional WebRtcVideoReceiveChannel::GetBaseMinimumPlayoutDelayMs( +std::optional WebRtcVideoReceiveChannel::GetBaseMinimumPlayoutDelayMs( uint32_t ssrc) const { RTC_DCHECK_RUN_ON(&thread_checker_); // SSRC of 0 represents the default receive stream. @@ -3215,11 +3411,11 @@ absl::optional WebRtcVideoReceiveChannel::GetBaseMinimumPlayoutDelayMs( return stream->second->GetBaseMinimumPlayoutDelayMs(); } else { RTC_LOG(LS_ERROR) << "No stream found to get base minimum playout delay"; - return absl::nullopt; + return std::nullopt; } } -std::vector WebRtcVideoReceiveChannel::GetSources( +std::vector WebRtcVideoReceiveChannel::GetSources( uint32_t ssrc) const { RTC_DCHECK_RUN_ON(&thread_checker_); auto it = receive_streams_.find(ssrc); @@ -3234,12 +3430,12 @@ std::vector WebRtcVideoReceiveChannel::GetSources( } WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream( - webrtc::Call* call, + Call* call, const StreamParams& sp, - webrtc::VideoReceiveStreamInterface::Config config, + VideoReceiveStreamInterface::Config config, bool default_stream, const std::vector& recv_codecs, - const webrtc::FlexfecReceiveStream::Config& flexfec_config) + const FlexfecReceiveStream::Config& flexfec_config) : call_(call), stream_params_(sp), stream_(NULL), @@ -3260,13 +3456,15 @@ WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream( const VideoCodecSettings& codec = recv_codecs.front(); config_.rtp.ulpfec_payload_type = codec.ulpfec.ulpfec_payload_type; config_.rtp.red_payload_type = codec.ulpfec.red_payload_type; - config_.rtp.lntf.enabled = HasLntf(codec.codec); - config_.rtp.nack.rtp_history_ms = HasNack(codec.codec) ? kNackHistoryMs : 0; + config_.rtp.lntf.enabled = webrtc::HasLntf(codec.codec); + config_.rtp.nack.rtp_history_ms = + webrtc::HasNack(codec.codec) ? kNackHistoryMs : 0; if (codec.rtx_time && config_.rtp.nack.rtp_history_ms != 0) { config_.rtp.nack.rtp_history_ms = *codec.rtx_time; } - config_.rtp.rtcp_xr.receiver_reference_time_report = HasRrtr(codec.codec); + config_.rtp.rtcp_xr.receiver_reference_time_report = + webrtc::HasRrtr(codec.codec); if (codec.ulpfec.red_rtx_payload_type != -1) { config_.rtp @@ -3287,13 +3485,13 @@ WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream:: call_->DestroyFlexfecReceiveStream(flexfec_stream_); } -webrtc::VideoReceiveStreamInterface& +VideoReceiveStreamInterface& WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::stream() { RTC_DCHECK(stream_); return *stream_; } -webrtc::FlexfecReceiveStream* +FlexfecReceiveStream* WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::flexfec_stream() { return flexfec_stream_; } @@ -3303,15 +3501,15 @@ WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::GetSsrcs() const { return stream_params_.ssrcs; } -std::vector +std::vector WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::GetSources() { RTC_DCHECK(stream_); return stream_->GetSources(); } -webrtc::RtpParameters +RtpParameters WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::GetRtpParameters() const { - webrtc::RtpParameters rtp_parameters; + RtpParameters rtp_parameters; std::vector primary_ssrcs; stream_params_.GetPrimarySsrcs(&primary_ssrcs); @@ -3321,7 +3519,7 @@ WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::GetRtpParameters() const { } rtp_parameters.rtcp.reduced_size = - config_.rtp.rtcp_mode == webrtc::RtcpMode::kReducedSize; + config_.rtp.rtcp_mode == RtcpMode::kReducedSize; return rtp_parameters; } @@ -3333,7 +3531,7 @@ bool WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::ReconfigureCodecs( std::map rtx_associated_payload_types; std::set raw_payload_types; - std::vector decoders; + std::vector decoders; ExtractCodecInformation(recv_codecs, rtx_associated_payload_types, raw_payload_types, decoders); @@ -3347,14 +3545,14 @@ bool WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::ReconfigureCodecs( config_.rtp.ulpfec_payload_type); } - const bool has_lntf = HasLntf(codec.codec); + const bool has_lntf = webrtc::HasLntf(codec.codec); if (config_.rtp.lntf.enabled != has_lntf) { config_.rtp.lntf.enabled = has_lntf; stream_->SetLossNotificationEnabled(has_lntf); } int new_history_ms = config_.rtp.nack.rtp_history_ms; - const int rtp_history_ms = HasNack(codec.codec) ? kNackHistoryMs : 0; + const int rtp_history_ms = webrtc::HasNack(codec.codec) ? kNackHistoryMs : 0; if (rtp_history_ms != config_.rtp.nack.rtp_history_ms) { new_history_ms = rtp_history_ms; } @@ -3367,10 +3565,10 @@ bool WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::ReconfigureCodecs( if (config_.rtp.nack.rtp_history_ms != new_history_ms) { config_.rtp.nack.rtp_history_ms = new_history_ms; - stream_->SetNackHistory(webrtc::TimeDelta::Millis(new_history_ms)); + stream_->SetNackHistory(TimeDelta::Millis(new_history_ms)); } - const bool has_rtr = HasRrtr(codec.codec); + const bool has_rtr = webrtc::HasRrtr(codec.codec); if (has_rtr != config_.rtp.rtcp_xr.receiver_reference_time_report) { config_.rtp.rtcp_xr.receiver_reference_time_report = has_rtr; stream_->SetRtcpXr(config_.rtp.rtcp_xr); @@ -3405,8 +3603,8 @@ bool WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::ReconfigureCodecs( void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::SetFeedbackParameters( bool lntf_enabled, bool nack_enabled, - webrtc::RtcpMode rtcp_mode, - absl::optional rtx_time) { + RtcpMode rtcp_mode, + std::optional rtx_time) { RTC_DCHECK(stream_); if (config_.rtp.rtcp_mode != rtcp_mode) { @@ -3424,7 +3622,7 @@ void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::SetFeedbackParameters( int nack_history_ms = nack_enabled ? rtx_time.value_or(kNackHistoryMs) : 0; config_.rtp.nack.rtp_history_ms = nack_history_ms; - stream_->SetNackHistory(webrtc::TimeDelta::Millis(nack_history_ms)); + stream_->SetNackHistory(TimeDelta::Millis(nack_history_ms)); } void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::SetFlexFecPayload( @@ -3482,13 +3680,12 @@ void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream:: RecreateReceiveStream() { RTC_DCHECK_RUN_ON(&thread_checker_); RTC_DCHECK(stream_); - absl::optional base_minimum_playout_delay_ms; - absl::optional - recording_state; + std::optional base_minimum_playout_delay_ms; + std::optional recording_state; if (stream_) { base_minimum_playout_delay_ms = stream_->GetBaseMinimumPlayoutDelayMs(); recording_state = stream_->SetAndGetRecordingState( - webrtc::VideoReceiveStreamInterface::RecordingState(), + VideoReceiveStreamInterface::RecordingState(), /*generate_key_frame=*/false); call_->DestroyVideoReceiveStream(stream_); stream_ = nullptr; @@ -3522,7 +3719,7 @@ void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream:: flexfec_stream_ = call_->CreateFlexfecReceiveStream(flexfec_config_); } - webrtc::VideoReceiveStreamInterface::Config config = config_.Copy(); + VideoReceiveStreamInterface::Config config = config_.Copy(); config.rtp.protected_by_flexfec = (flexfec_stream_ != nullptr); config.rtp.packet_sink_ = flexfec_stream_; stream_ = call_->CreateVideoReceiveStream(std::move(config)); @@ -3542,10 +3739,10 @@ void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::StopReceiveStream() { } void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::OnFrame( - const webrtc::VideoFrame& frame) { - webrtc::MutexLock lock(&sink_lock_); + const VideoFrame& frame) { + MutexLock lock(&sink_lock_); - int64_t time_now_ms = rtc::TimeMillis(); + int64_t time_now_ms = webrtc::TimeMillis(); if (first_frame_timestamp_ < 0) first_frame_timestamp_ = time_now_ms; int64_t elapsed_time_ms = time_now_ms - first_frame_timestamp_; @@ -3567,7 +3764,7 @@ bool WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::IsDefaultStream() } void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) { + scoped_refptr frame_decryptor) { config_.frame_decryptor = frame_decryptor; if (stream_) { RTC_LOG(LS_INFO) @@ -3589,8 +3786,8 @@ int WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream:: } void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::SetSink( - rtc::VideoSinkInterface* sink) { - webrtc::MutexLock lock(&sink_lock_); + VideoSinkInterface* sink) { + MutexLock lock(&sink_lock_); sink_ = sink; } @@ -3600,7 +3797,7 @@ WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo( VideoReceiverInfo info; info.ssrc_groups = stream_params_.ssrc_groups; info.add_ssrc(config_.rtp.remote_ssrc); - webrtc::VideoReceiveStreamInterface::Stats stats = stream_->GetStats(); + VideoReceiveStreamInterface::Stats stats = stream_->GetStats(); info.decoder_implementation_name = stats.decoder_implementation_name; info.power_efficient_decoder = stats.power_efficient_decoder; if (stats.current_payload_type != -1) { @@ -3626,7 +3823,7 @@ WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo( info.frame_height = stats.height; { - webrtc::MutexLock frame_cs(&sink_lock_); + MutexLock frame_cs(&sink_lock_); info.capture_start_ntp_time_ms = estimated_remote_start_ntp_time_ms_; } @@ -3651,6 +3848,9 @@ WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo( info.key_frames_decoded = stats.frame_counts.key_frames; info.frames_rendered = stats.frames_rendered; info.qp_sum = stats.qp_sum; + info.corruption_score_sum = stats.corruption_score_sum; + info.corruption_score_squared_sum = stats.corruption_score_squared_sum; + info.corruption_score_count = stats.corruption_score_count; info.total_decode_time = stats.total_decode_time; info.total_processing_delay = stats.total_processing_delay; info.total_assembly_time = stats.total_assembly_time; @@ -3693,12 +3893,12 @@ WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo( } if (flexfec_stream_) { - const webrtc::ReceiveStatistics* fec_stats = flexfec_stream_->GetStats(); + const ReceiveStatistics* fec_stats = flexfec_stream_->GetStats(); if (fec_stats) { - const webrtc::StreamStatistician* statistican = + const StreamStatistician* statistican = fec_stats->GetStatistician(flexfec_config_.rtp.remote_ssrc); if (statistican) { - const webrtc::RtpReceiveStats fec_rtp_stats = statistican->GetStats(); + const RtpReceiveStats fec_rtp_stats = statistican->GetStats(); info.fec_packets_received = fec_rtp_stats.packet_counter.packets; // TODO(bugs.webrtc.org/15250): implement fecPacketsDiscarded. info.fec_bytes_received = fec_rtp_stats.packet_counter.payload_bytes; @@ -3715,19 +3915,30 @@ WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::GetVideoReceiverInfo( } } + // remote-outbound-rtp stats. + info.last_sender_report_timestamp = stats.last_sender_report_timestamp; + info.last_sender_report_utc_timestamp = + stats.last_sender_report_utc_timestamp; + info.last_sender_report_remote_utc_timestamp = + stats.last_sender_report_remote_utc_timestamp; + info.sender_reports_packets_sent = stats.sender_reports_packets_sent; + info.sender_reports_bytes_sent = stats.sender_reports_bytes_sent; + info.sender_reports_reports_count = stats.sender_reports_reports_count; + // TODO(bugs.webrtc.org/12529): RTT-related fields are missing and can only be + // present if DLRR is enabled. + if (log_stats) - RTC_LOG(LS_INFO) << stats.ToString(rtc::TimeMillis()); + RTC_LOG(LS_INFO) << stats.ToString(webrtc::TimeMillis()); return info; } void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream:: SetRecordableEncodedFrameCallback( - std::function callback) { + std::function callback) { if (stream_) { stream_->SetAndGetRecordingState( - webrtc::VideoReceiveStreamInterface::RecordingState( - std::move(callback)), + VideoReceiveStreamInterface::RecordingState(std::move(callback)), /*generate_key_frame=*/true); } else { RTC_LOG(LS_ERROR) << "Absent receive stream; ignoring setting encoded " @@ -3739,7 +3950,7 @@ void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream:: ClearRecordableEncodedFrameCallback() { if (stream_) { stream_->SetAndGetRecordingState( - webrtc::VideoReceiveStreamInterface::RecordingState(), + VideoReceiveStreamInterface::RecordingState(), /*generate_key_frame=*/false); } else { RTC_LOG(LS_ERROR) << "Absent receive stream; ignoring clearing encoded " @@ -3758,8 +3969,7 @@ void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::GenerateKeyFrame() { void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream:: SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr - frame_transformer) { + scoped_refptr frame_transformer) { config_.frame_transformer = frame_transformer; if (stream_) stream_->SetDepacketizerToDecoderFrameTransformer(frame_transformer); @@ -3780,7 +3990,7 @@ void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::UpdateRtxSsrc( WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream* WebRtcVideoReceiveChannel::FindReceiveStream(uint32_t ssrc) { if (ssrc == 0) { - absl::optional default_ssrc = GetUnsignaledSsrc(); + std::optional default_ssrc = GetUnsignaledSsrc(); if (!default_ssrc) { return nullptr; } @@ -3795,7 +4005,7 @@ WebRtcVideoReceiveChannel::FindReceiveStream(uint32_t ssrc) { // RTC_RUN_ON(worker_thread_) void WebRtcVideoReceiveChannel::ProcessReceivedPacket( - webrtc::RtpPacketReceived packet) { + RtpPacketReceived packet) { // TODO(bugs.webrtc.org/11993): This code is very similar to what // WebRtcVoiceMediaChannel::OnPacketReceived does. For maintainability and // consistency it would be good to move the interaction with call_->Receiver() @@ -3810,18 +4020,18 @@ void WebRtcVideoReceiveChannel::ProcessReceivedPacket( packet.IdentifyExtensions(recv_rtp_extension_map_); packet.set_payload_type_frequency(webrtc::kVideoPayloadTypeFrequency); if (!packet.arrival_time().IsFinite()) { - packet.set_arrival_time(webrtc::Timestamp::Micros(rtc::TimeMicros())); + packet.set_arrival_time(Timestamp::Micros(webrtc::TimeMicros())); } call_->Receiver()->DeliverRtpPacket( - webrtc::MediaType::VIDEO, std::move(packet), + MediaType::VIDEO, std::move(packet), absl::bind_front( &WebRtcVideoReceiveChannel::MaybeCreateDefaultReceiveStream, this)); } void WebRtcVideoReceiveChannel::SetRecordableEncodedFrameCallback( uint32_t ssrc, - std::function callback) { + std::function callback) { RTC_DCHECK_RUN_ON(&thread_checker_); WebRtcVideoReceiveStream* stream = FindReceiveStream(ssrc); if (stream) { @@ -3860,7 +4070,7 @@ void WebRtcVideoReceiveChannel::RequestRecvKeyFrame(uint32_t ssrc) { void WebRtcVideoReceiveChannel::SetDepacketizerToDecoderFrameTransformer( uint32_t ssrc, - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { RTC_DCHECK(frame_transformer); RTC_DCHECK_RUN_ON(&thread_checker_); if (ssrc == 0) { @@ -3879,7 +4089,7 @@ void WebRtcVideoReceiveChannel::SetDepacketizerToDecoderFrameTransformer( // ------------------------- VideoCodecSettings -------------------- -VideoCodecSettings::VideoCodecSettings(const VideoCodec& codec) +VideoCodecSettings::VideoCodecSettings(const Codec& codec) : codec(codec), flexfec_payload_type(-1), rtx_payload_type(-1) {} bool VideoCodecSettings::operator==(const VideoCodecSettings& other) const { @@ -3900,4 +4110,4 @@ bool VideoCodecSettings::operator!=(const VideoCodecSettings& other) const { return !(*this == other); } -} // namespace cricket +} // namespace webrtc diff --git a/media/engine/webrtc_video_engine.h b/media/engine/webrtc_video_engine.h index 11f1b99ac2..5d6bd6a847 100644 --- a/media/engine/webrtc_video_engine.h +++ b/media/engine/webrtc_video_engine.h @@ -17,6 +17,7 @@ #include #include #include +#include #include #include #include @@ -24,14 +25,13 @@ #include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" -#include "api/call/transport.h" #include "api/crypto/crypto_options.h" #include "api/crypto/frame_decryptor_interface.h" #include "api/crypto/frame_encryptor_interface.h" #include "api/field_trials_view.h" #include "api/frame_transformer_interface.h" +#include "api/media_types.h" #include "api/rtc_error.h" #include "api/rtp_headers.h" #include "api/rtp_parameters.h" @@ -41,7 +41,6 @@ #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "api/transport/bitrate_settings.h" -#include "api/transport/field_trial_based_config.h" #include "api/transport/rtp/rtp_source.h" #include "api/video/recordable_encoded_frame.h" #include "api/video/video_bitrate_allocator_factory.h" @@ -64,6 +63,7 @@ #include "media/base/stream_params.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/checks.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" #include "rtc_base/synchronization/mutex.h" @@ -76,7 +76,7 @@ class VideoDecoderFactory; class VideoEncoderFactory; } // namespace webrtc -namespace cricket { +namespace webrtc { // Public for testing. // Inputs StreamStats for all types of substreams (kMedia, kRtx, kFlexfec) and @@ -87,56 +87,55 @@ namespace cricket { // objects ready to be turned into "outbound-rtp" stats objects for GetStats() // which does not create separate stream stats objects for complementary // streams. -std::map +std::map MergeInfoAboutOutboundRtpSubstreamsForTesting( - const std::map& substreams); + const std::map& substreams); // WebRtcVideoEngine is used for the new native WebRTC Video API (webrtc:1667). class WebRtcVideoEngine : public VideoEngineInterface { public: // These video codec factories represents all video codecs, i.e. both software // and external hardware codecs. - WebRtcVideoEngine( - std::unique_ptr video_encoder_factory, - std::unique_ptr video_decoder_factory, - const webrtc::FieldTrialsView& trials); + WebRtcVideoEngine(std::unique_ptr video_encoder_factory, + std::unique_ptr video_decoder_factory, + const FieldTrialsView& trials); ~WebRtcVideoEngine() override; std::unique_ptr CreateSendChannel( - webrtc::Call* call, + Call* call, const MediaConfig& config, const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) - override; + const CryptoOptions& crypto_options, + VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) override; std::unique_ptr CreateReceiveChannel( - webrtc::Call* call, + Call* call, const MediaConfig& config, const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options) override; + const CryptoOptions& crypto_options) override; - std::vector send_codecs() const override { - return send_codecs(true); + // TODO: https://issues.webrtc.org/360058654 - remove Legacy functions. + std::vector LegacySendCodecs() const override { + return LegacySendCodecs(true); } - std::vector recv_codecs() const override { - return recv_codecs(true); + std::vector LegacyRecvCodecs() const override { + return LegacyRecvCodecs(true); } - std::vector send_codecs(bool include_rtx) const override; - std::vector recv_codecs(bool include_rtx) const override; - std::vector GetRtpHeaderExtensions() + std::vector LegacySendCodecs(bool include_rtx) const override; + std::vector LegacyRecvCodecs(bool include_rtx) const override; + std::vector GetRtpHeaderExtensions() const override; private: - const std::unique_ptr decoder_factory_; - const std::unique_ptr encoder_factory_; - const std::unique_ptr + const std::unique_ptr decoder_factory_; + const std::unique_ptr encoder_factory_; + const std::unique_ptr bitrate_allocator_factory_; - const webrtc::FieldTrialsView& trials_; + const FieldTrialsView& trials_; }; struct VideoCodecSettings { - explicit VideoCodecSettings(const VideoCodec& codec); + explicit VideoCodecSettings(const Codec& codec); // Checks if all members of |*this| are equal to the corresponding members // of `other`. @@ -148,28 +147,28 @@ struct VideoCodecSettings { static bool EqualsDisregardingFlexfec(const VideoCodecSettings& a, const VideoCodecSettings& b); - VideoCodec codec; - webrtc::UlpfecConfig ulpfec; + Codec codec; + UlpfecConfig ulpfec; int flexfec_payload_type; // -1 if absent. int rtx_payload_type; // -1 if absent. - absl::optional rtx_time; + std::optional rtx_time; }; class WebRtcVideoSendChannel : public MediaChannelUtil, public VideoMediaSendChannelInterface, - public webrtc::EncoderSwitchRequestCallback { + public EncoderSwitchRequestCallback { public: WebRtcVideoSendChannel( - webrtc::Call* call, + Call* call, const MediaConfig& config, const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::VideoEncoderFactory* encoder_factory, - webrtc::VideoDecoderFactory* decoder_factory, - webrtc::VideoBitrateAllocatorFactory* bitrate_allocator_factory); + const CryptoOptions& crypto_options, + VideoEncoderFactory* encoder_factory, + VideoDecoderFactory* decoder_factory, + VideoBitrateAllocatorFactory* bitrate_allocator_factory); ~WebRtcVideoSendChannel() override; - MediaType media_type() const override { return MEDIA_TYPE_VIDEO; } + MediaType media_type() const override { return MediaType::VIDEO; } // Type manipulations VideoMediaSendChannelInterface* AsVideoSendChannel() override { return this; } VoiceMediaSendChannelInterface* AsVoiceSendChannel() override { @@ -191,41 +190,37 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, void SetInterface(MediaChannelNetworkInterface* iface) override; // VideoMediaSendChannelInterface implementation bool SetSenderParameters(const VideoSenderParameters& params) override; - webrtc::RTCError SetRtpSendParameters( - uint32_t ssrc, - const webrtc::RtpParameters& parameters, - webrtc::SetParametersCallback callback) override; - webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override; - absl::optional GetSendCodec() const override; + RTCError SetRtpSendParameters(uint32_t ssrc, + const RtpParameters& parameters, + SetParametersCallback callback) override; + RtpParameters GetRtpSendParameters(uint32_t ssrc) const override; + std::optional GetSendCodec() const override; bool SetSend(bool send) override; - bool SetVideoSend( - uint32_t ssrc, - const VideoOptions* options, - rtc::VideoSourceInterface* source) override; + bool SetVideoSend(uint32_t ssrc, + const VideoOptions* options, + VideoSourceInterface* source) override; bool AddSendStream(const StreamParams& sp) override; bool RemoveSendStream(uint32_t ssrc) override; void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) override; bool GetStats(VideoMediaSendInfo* info) override; - void OnPacketSent(const rtc::SentPacket& sent_packet) override; + void OnPacketSent(const SentPacketInfo& sent_packet) override; void OnReadyToSend(bool ready) override; void OnNetworkRouteChanged(absl::string_view transport_name, - const rtc::NetworkRoute& network_route) override; + const NetworkRoute& network_route) override; // Set a frame encryptor to a particular ssrc that will intercept all // outgoing video frames and attempt to encrypt them and forward the result // to the packetizer. - void SetFrameEncryptor(uint32_t ssrc, - rtc::scoped_refptr - frame_encryptor) override; + void SetFrameEncryptor( + uint32_t ssrc, + scoped_refptr frame_encryptor) override; // note: The encoder_selector object must remain valid for the lifetime of the // MediaChannel, unless replaced. - void SetEncoderSelector(uint32_t ssrc, - webrtc::VideoEncoderFactory::EncoderSelectorInterface* - encoder_selector) override; - - void SetVideoCodecSwitchingEnabled(bool enabled) override; + void SetEncoderSelector( + uint32_t ssrc, + VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) override; void SetSendCodecChangedCallback( absl::AnyInvocable callback) override { @@ -252,11 +247,9 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, ADAPTREASON_BANDWIDTH = 2, }; - static constexpr int kDefaultQpMax = 56; - // Implements webrtc::EncoderSwitchRequestCallback. void RequestEncoderFallback() override; - void RequestEncoderSwitch(const webrtc::SdpVideoFormat& format, + void RequestEncoderSwitch(const SdpVideoFormat& format, bool allow_default_fallback) override; void GenerateSendKeyFrame(uint32_t ssrc, @@ -264,13 +257,12 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, void SetEncoderToPacketizerFrameTransformer( uint32_t ssrc, - rtc::scoped_refptr frame_transformer) - override; + scoped_refptr frame_transformer) override; // Information queries to support SetReceiverFeedbackParameters - webrtc::RtcpMode SendCodecRtcpMode() const override { + RtcpMode SendCodecRtcpMode() const override { RTC_DCHECK_RUN_ON(&thread_checker_); - return send_params_.rtcp.reduced_size ? webrtc::RtcpMode::kReducedSize - : webrtc::RtcpMode::kCompound; + return send_params_.rtcp.reduced_size ? RtcpMode::kReducedSize + : RtcpMode::kCompound; } bool SendCodecHasLntf() const override { @@ -278,19 +270,19 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, if (!send_codec()) { return false; } - return HasLntf(send_codec()->codec); + return webrtc::HasLntf(send_codec()->codec); } bool SendCodecHasNack() const override { RTC_DCHECK_RUN_ON(&thread_checker_); if (!send_codec()) { return false; } - return HasNack(send_codec()->codec); + return webrtc::HasNack(send_codec()->codec); } - absl::optional SendCodecRtxTime() const override { + std::optional SendCodecRtxTime() const override { RTC_DCHECK_RUN_ON(&thread_checker_); if (!send_codec()) { - return absl::nullopt; + return std::nullopt; } return send_codec()->rtx_time; } @@ -298,14 +290,15 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, private: struct ChangedSenderParameters { // These optionals are unset if not changed. - absl::optional send_codec; - absl::optional> negotiated_codecs; - absl::optional> rtp_header_extensions; - absl::optional mid; - absl::optional extmap_allow_mixed; - absl::optional max_bandwidth_bps; - absl::optional conference_mode; - absl::optional rtcp_mode; + std::optional send_codec; + std::optional> negotiated_codecs; + std::optional> send_codecs; + std::optional> rtp_header_extensions; + std::optional mid; + std::optional extmap_allow_mixed; + std::optional max_bandwidth_bps; + std::optional conference_mode; + std::optional rtcp_mode; }; bool GetChangedSenderParameters(const VideoSenderParameters& params, @@ -319,42 +312,42 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, // `decoders` based on codec settings provided by `recv_codecs`. // `recv_codecs` must be non-empty and all other parameters must be empty. static void ExtractCodecInformation( - rtc::ArrayView recv_codecs, + ArrayView recv_codecs, std::map& rtx_associated_payload_types, std::set& raw_payload_types, - std::vector& decoders); + std::vector& decoders); // Wrapper for the sender part. class WebRtcVideoSendStream { public: WebRtcVideoSendStream( - webrtc::Call* call, + Call* call, const StreamParams& sp, - webrtc::VideoSendStream::Config config, + VideoSendStream::Config config, const VideoOptions& options, bool enable_cpu_overuse_detection, int max_bitrate_bps, - const absl::optional& codec_settings, - const absl::optional>& rtp_extensions, + const std::optional& codec_settings, + const std::vector& codec_settings_list, + const std::optional>& rtp_extensions, const VideoSenderParameters& send_params); ~WebRtcVideoSendStream(); void SetSenderParameters(const ChangedSenderParameters& send_params); - webrtc::RTCError SetRtpParameters(const webrtc::RtpParameters& parameters, - webrtc::SetParametersCallback callback); - webrtc::RtpParameters GetRtpParameters() const; + RTCError SetRtpParameters(const RtpParameters& parameters, + SetParametersCallback callback); + RtpParameters GetRtpParameters() const; void SetFrameEncryptor( - rtc::scoped_refptr frame_encryptor); + scoped_refptr frame_encryptor); bool SetVideoSend(const VideoOptions* options, - rtc::VideoSourceInterface* source); + VideoSourceInterface* source); // note: The encoder_selector object must remain valid for the lifetime of // the MediaChannel, unless replaced. void SetEncoderSelector( - webrtc::VideoEncoderFactory::EncoderSelectorInterface* - encoder_selector); + VideoEncoderFactory::EncoderSelectorInterface* encoder_selector); void SetSend(bool send); @@ -368,8 +361,7 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, void FillBitrateInfo(BandwidthEstimationInfo* bwe_info); void SetEncoderToPacketizerFrameTransformer( - rtc::scoped_refptr - frame_transformer); + scoped_refptr frame_transformer); void GenerateKeyFrame(const std::vector& rids); private: @@ -379,46 +371,46 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, // similar parameters depending on which options changed etc. struct VideoSendStreamParameters { VideoSendStreamParameters( - webrtc::VideoSendStream::Config config, + VideoSendStream::Config config, const VideoOptions& options, int max_bitrate_bps, - const absl::optional& codec_settings); - webrtc::VideoSendStream::Config config; + const std::optional& codec_settings, + const std::vector& codec_settings_list); + VideoSendStream::Config config; VideoOptions options; int max_bitrate_bps; bool conference_mode; - absl::optional codec_settings; + std::optional codec_settings; + std::vector codec_settings_list; // Sent resolutions + bitrates etc. by the underlying VideoSendStream, // typically changes when setting a new resolution or reconfiguring // bitrates. - webrtc::VideoEncoderConfig encoder_config; + VideoEncoderConfig encoder_config; }; - rtc::scoped_refptr - ConfigureVideoEncoderSettings(const VideoCodec& codec); - void SetCodec(const VideoCodecSettings& codec); + scoped_refptr + ConfigureVideoEncoderSettings(const Codec& codec); + void SetCodec(const VideoCodecSettings& codec, + const std::vector& codec_settings_list); void RecreateWebRtcStream(); - webrtc::VideoEncoderConfig CreateVideoEncoderConfig( - const VideoCodec& codec) const; - void ReconfigureEncoder(webrtc::SetParametersCallback callback); + VideoEncoderConfig CreateVideoEncoderConfig(const Codec& codec) const; + void ReconfigureEncoder(SetParametersCallback callback); - // Calls Start or Stop according to whether or not `sending_` is true, - // and whether or not the encoding in `rtp_parameters_` is active. + // Calls Start or Stop according to whether or not `sending_` is true. void UpdateSendState(); - webrtc::DegradationPreference GetDegradationPreference() const + DegradationPreference GetDegradationPreference() const RTC_EXCLUSIVE_LOCKS_REQUIRED(&thread_checker_); - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker thread_checker_; - webrtc::TaskQueueBase* const worker_thread_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker thread_checker_; + TaskQueueBase* const worker_thread_; const std::vector ssrcs_ RTC_GUARDED_BY(&thread_checker_); const std::vector ssrc_groups_ RTC_GUARDED_BY(&thread_checker_); - webrtc::Call* const call_; + Call* const call_; const bool enable_cpu_overuse_detection_; - rtc::VideoSourceInterface* source_ - RTC_GUARDED_BY(&thread_checker_); + VideoSourceInterface* source_ RTC_GUARDED_BY(&thread_checker_); - webrtc::VideoSendStream* stream_ RTC_GUARDED_BY(&thread_checker_); + VideoSendStream* stream_ RTC_GUARDED_BY(&thread_checker_); // Contains settings that are the same for all streams in the MediaChannel, // such as codecs, header extensions, and the global bitrate limit for the @@ -429,7 +421,7 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, // TODO(skvlad): Move ssrcs_ and ssrc_groups_ into rtp_parameters_. // TODO(skvlad): Combine parameters_ and rtp_parameters_ once we have only // one stream per MediaChannel. - webrtc::RtpParameters rtp_parameters_ RTC_GUARDED_BY(&thread_checker_); + RtpParameters rtp_parameters_ RTC_GUARDED_BY(&thread_checker_); bool sending_ RTC_GUARDED_BY(&thread_checker_); @@ -439,7 +431,7 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, const bool disable_automatic_resize_; }; - void Construct(webrtc::Call* call, WebRtcVideoEngine* engine); + void Construct(Call* call, WebRtcVideoEngine* engine); // Get all codecs that are compatible with the receiver. std::vector SelectSendVideoCodecs( @@ -448,7 +440,7 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, void FillSenderStats(VideoMediaSendInfo* info, bool log_stats) RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_); - void FillBandwidthEstimationStats(const webrtc::Call::Stats& stats, + void FillBandwidthEstimationStats(const Call::Stats& stats, VideoMediaInfo* info) RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_); void FillSendCodecStats(VideoMediaSendInfo* video_media_info) @@ -458,23 +450,22 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, // that a receive channel does not touch the send codec directly. // Can go away once these are different classes. // TODO(bugs.webrtc.org/13931): Remove this function - absl::optional& send_codec() { return send_codec_; } - const absl::optional& send_codec() const { + std::optional& send_codec() { return send_codec_; } + const std::optional& send_codec() const { return send_codec_; } - webrtc::TaskQueueBase* const worker_thread_; - webrtc::ScopedTaskSafety task_safety_; - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker network_thread_checker_{ - webrtc::SequenceChecker::kDetached}; - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker thread_checker_; + TaskQueueBase* const worker_thread_; + ScopedTaskSafety task_safety_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker network_thread_checker_{ + SequenceChecker::kDetached}; + RTC_NO_UNIQUE_ADDRESS SequenceChecker thread_checker_; uint32_t rtcp_receiver_report_ssrc_ RTC_GUARDED_BY(thread_checker_); bool sending_ RTC_GUARDED_BY(thread_checker_); bool receiving_ RTC_GUARDED_BY(&thread_checker_); - webrtc::Call* const call_; + Call* const call_; - rtc::VideoSinkInterface* default_sink_ - RTC_GUARDED_BY(thread_checker_); + VideoSinkInterface* default_sink_ RTC_GUARDED_BY(thread_checker_); // Delay for unsignaled streams, which may be set before the stream exists. int default_recv_base_minimum_delay_ms_ RTC_GUARDED_BY(thread_checker_) = 0; @@ -500,34 +491,31 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, // is a risk of receiving ssrcs for other, recently added m= sections. uint32_t demuxer_criteria_id_ RTC_GUARDED_BY(thread_checker_) = 0; uint32_t demuxer_criteria_completed_id_ RTC_GUARDED_BY(thread_checker_) = 0; - absl::optional last_unsignalled_ssrc_creation_time_ms_ + std::optional last_unsignalled_ssrc_creation_time_ms_ RTC_GUARDED_BY(thread_checker_); std::set send_ssrcs_ RTC_GUARDED_BY(thread_checker_); std::set receive_ssrcs_ RTC_GUARDED_BY(thread_checker_); - absl::optional send_codec_ - RTC_GUARDED_BY(thread_checker_); + std::optional send_codec_ RTC_GUARDED_BY(thread_checker_); std::vector negotiated_codecs_ RTC_GUARDED_BY(thread_checker_); + std::vector send_codecs_ RTC_GUARDED_BY(thread_checker_); - std::vector send_rtp_extensions_ + std::vector send_rtp_extensions_ RTC_GUARDED_BY(thread_checker_); - webrtc::VideoEncoderFactory* const encoder_factory_ - RTC_GUARDED_BY(thread_checker_); - webrtc::VideoDecoderFactory* const decoder_factory_ - RTC_GUARDED_BY(thread_checker_); - webrtc::VideoBitrateAllocatorFactory* const bitrate_allocator_factory_ + VideoEncoderFactory* const encoder_factory_ RTC_GUARDED_BY(thread_checker_); + VideoDecoderFactory* const decoder_factory_ RTC_GUARDED_BY(thread_checker_); + VideoBitrateAllocatorFactory* const bitrate_allocator_factory_ RTC_GUARDED_BY(thread_checker_); std::vector recv_codecs_ RTC_GUARDED_BY(thread_checker_); - webrtc::RtpHeaderExtensionMap recv_rtp_extension_map_ - RTC_GUARDED_BY(thread_checker_); - std::vector recv_rtp_extensions_ + RtpHeaderExtensionMap recv_rtp_extension_map_ RTC_GUARDED_BY(thread_checker_); + std::vector recv_rtp_extensions_ RTC_GUARDED_BY(thread_checker_); // See reason for keeping track of the FlexFEC payload type separately in // comment in WebRtcVideoChannel::ChangedReceiverParameters. int recv_flexfec_payload_type_ RTC_GUARDED_BY(thread_checker_); - webrtc::BitrateConstraints bitrate_config_ RTC_GUARDED_BY(thread_checker_); + BitrateConstraints bitrate_config_ RTC_GUARDED_BY(thread_checker_); // TODO(deadbeef): Don't duplicate information between // send_params/recv_params, rtp_extensions, options, etc. VideoSenderParameters send_params_ RTC_GUARDED_BY(thread_checker_); @@ -543,21 +531,16 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, StreamParams unsignaled_stream_params_ RTC_GUARDED_BY(thread_checker_); // Per peer connection crypto options that last for the lifetime of the peer // connection. - const webrtc::CryptoOptions crypto_options_ RTC_GUARDED_BY(thread_checker_); + const CryptoOptions crypto_options_ RTC_GUARDED_BY(thread_checker_); // Optional frame transformer set on unsignaled streams. - rtc::scoped_refptr - unsignaled_frame_transformer_ RTC_GUARDED_BY(thread_checker_); - - // TODO(bugs.webrtc.org/11341): Remove this and relevant PC API. Presence - // of multiple negotiated codecs allows generic encoder fallback on failures. - // Presence of EncoderSelector allows switching to specific encoders. - bool allow_codec_switching_ = false; + scoped_refptr unsignaled_frame_transformer_ + RTC_GUARDED_BY(thread_checker_); // RTP parameters that need to be set when creating a video receive stream. // Only used in Receiver mode - in Both mode, it reads those things from the // codec. - webrtc::VideoReceiveStreamInterface::Config::Rtp rtp_config_; + VideoReceiveStreamInterface::Config::Rtp rtp_config_; // Callback invoked whenever the send codec changes. // TODO(bugs.webrtc.org/13931): Remove again when coupling isn't needed. @@ -570,15 +553,15 @@ class WebRtcVideoSendChannel : public MediaChannelUtil, class WebRtcVideoReceiveChannel : public MediaChannelUtil, public VideoMediaReceiveChannelInterface { public: - WebRtcVideoReceiveChannel(webrtc::Call* call, + WebRtcVideoReceiveChannel(Call* call, const MediaConfig& config, const VideoOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::VideoDecoderFactory* decoder_factory); + const CryptoOptions& crypto_options, + VideoDecoderFactory* decoder_factory); ~WebRtcVideoReceiveChannel() override; public: - MediaType media_type() const override { return MEDIA_TYPE_VIDEO; } + MediaType media_type() const override { return MediaType::VIDEO; } VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() override { return this; } @@ -591,8 +574,8 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, void SetInterface(MediaChannelNetworkInterface* iface) override; // VideoMediaReceiveChannelInterface implementation bool SetReceiverParameters(const VideoReceiverParameters& params) override; - webrtc::RtpParameters GetRtpReceiverParameters(uint32_t ssrc) const override; - webrtc::RtpParameters GetDefaultRtpReceiveParameters() const override; + RtpParameters GetRtpReceiverParameters(uint32_t ssrc) const override; + RtpParameters GetDefaultRtpReceiveParameters() const override; void SetReceive(bool receive) override; bool AddRecvStream(const StreamParams& sp) override; bool AddDefaultRecvStreamForTesting(const StreamParams& sp) override { @@ -601,19 +584,16 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, } bool RemoveRecvStream(uint32_t ssrc) override; void ResetUnsignaledRecvStream() override; - absl::optional GetUnsignaledSsrc() const override; + std::optional GetUnsignaledSsrc() const override; void OnDemuxerCriteriaUpdatePending() override; void OnDemuxerCriteriaUpdateComplete() override; - bool SetSink(uint32_t ssrc, - rtc::VideoSinkInterface* sink) override; - void SetDefaultSink( - rtc::VideoSinkInterface* sink) override; + bool SetSink(uint32_t ssrc, VideoSinkInterface* sink) override; + void SetDefaultSink(VideoSinkInterface* sink) override; bool GetStats(VideoMediaReceiveInfo* info) override; - void OnPacketReceived(const webrtc::RtpPacketReceived& packet) override; + void OnPacketReceived(const RtpPacketReceived& packet) override; bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) override; - absl::optional GetBaseMinimumPlayoutDelayMs( - uint32_t ssrc) const override; + std::optional GetBaseMinimumPlayoutDelayMs(uint32_t ssrc) const override; // Choose one of the available SSRCs (or default if none) as the current // receiver report SSRC. @@ -623,36 +603,34 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, // Set a frame decryptor to a particular ssrc that will intercept all // incoming video frames and attempt to decrypt them before forwarding the // result. - void SetFrameDecryptor(uint32_t ssrc, - rtc::scoped_refptr - frame_decryptor) override; + void SetFrameDecryptor( + uint32_t ssrc, + scoped_refptr frame_decryptor) override; void SetRecordableEncodedFrameCallback( uint32_t ssrc, - std::function callback) - override; + std::function callback) override; void ClearRecordableEncodedFrameCallback(uint32_t ssrc) override; void RequestRecvKeyFrame(uint32_t ssrc) override; void SetDepacketizerToDecoderFrameTransformer( uint32_t ssrc, - rtc::scoped_refptr frame_transformer) - override; - std::vector GetSources(uint32_t ssrc) const override; + scoped_refptr frame_transformer) override; + std::vector GetSources(uint32_t ssrc) const override; void SetReceiverFeedbackParameters(bool lntf_enabled, bool nack_enabled, - webrtc::RtcpMode rtcp_mode, - absl::optional rtx_time) override; + RtcpMode rtcp_mode, + std::optional rtx_time) override; private: class WebRtcVideoReceiveStream; struct ChangedReceiverParameters { // These optionals are unset if not changed. - absl::optional> codec_settings; - absl::optional> rtp_header_extensions; + std::optional> codec_settings; + std::optional> rtp_header_extensions; // Keep track of the FlexFEC payload type separately from `codec_settings`. // This allows us to recreate the FlexfecReceiveStream separately from the // VideoReceiveStreamInterface when the FlexFEC payload type is changed. - absl::optional flexfec_payload_type; + std::optional flexfec_payload_type; }; // Finds VideoReceiveStreamInterface corresponding to ssrc. Aware of @@ -660,24 +638,22 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, WebRtcVideoReceiveStream* FindReceiveStream(uint32_t ssrc) RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_); - void ProcessReceivedPacket(webrtc::RtpPacketReceived packet) + void ProcessReceivedPacket(RtpPacketReceived packet) RTC_RUN_ON(thread_checker_); // Expected to be invoked once per packet that belongs to this channel that // can not be demuxed. // Returns true if a new default stream has been created. - bool MaybeCreateDefaultReceiveStream( - const webrtc::RtpPacketReceived& parsed_packet) + bool MaybeCreateDefaultReceiveStream(const RtpPacketReceived& parsed_packet) RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_); void ReCreateDefaultReceiveStream(uint32_t ssrc, - absl::optional rtx_ssrc); + std::optional rtx_ssrc); // Add a receive stream. Used for testing. bool AddRecvStream(const StreamParams& sp, bool default_stream); - void ConfigureReceiverRtp( - webrtc::VideoReceiveStreamInterface::Config* config, - webrtc::FlexfecReceiveStream::Config* flexfec_config, - const StreamParams& sp) const + void ConfigureReceiverRtp(VideoReceiveStreamInterface::Config* config, + FlexfecReceiveStream::Config* flexfec_config, + const StreamParams& sp) const RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_); bool ValidateReceiveSsrcAvailability(const StreamParams& sp) const RTC_EXCLUSIVE_LOCKS_REQUIRED(thread_checker_); @@ -690,59 +666,57 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, // Wrapper for the receiver part, contains configs etc. that are needed to // reconstruct the underlying VideoReceiveStreamInterface. - class WebRtcVideoReceiveStream - : public rtc::VideoSinkInterface { + class WebRtcVideoReceiveStream : public VideoSinkInterface { public: WebRtcVideoReceiveStream( - webrtc::Call* call, + Call* call, const StreamParams& sp, - webrtc::VideoReceiveStreamInterface::Config config, + VideoReceiveStreamInterface::Config config, bool default_stream, const std::vector& recv_codecs, - const webrtc::FlexfecReceiveStream::Config& flexfec_config); + const FlexfecReceiveStream::Config& flexfec_config); ~WebRtcVideoReceiveStream(); - webrtc::VideoReceiveStreamInterface& stream(); + VideoReceiveStreamInterface& stream(); // Return value may be nullptr. - webrtc::FlexfecReceiveStream* flexfec_stream(); + FlexfecReceiveStream* flexfec_stream(); const std::vector& GetSsrcs() const; - std::vector GetSources(); + std::vector GetSources(); // Does not return codecs, nor header extensions, they are filled by the // owning WebRtcVideoChannel. - webrtc::RtpParameters GetRtpParameters() const; + RtpParameters GetRtpParameters() const; // TODO(deadbeef): Move these feedback parameters into the recv parameters. void SetFeedbackParameters(bool lntf_enabled, bool nack_enabled, - webrtc::RtcpMode rtcp_mode, - absl::optional rtx_time); + RtcpMode rtcp_mode, + std::optional rtx_time); void SetReceiverParameters(const ChangedReceiverParameters& recv_params); - void OnFrame(const webrtc::VideoFrame& frame) override; + void OnFrame(const VideoFrame& frame) override; bool IsDefaultStream() const; void SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor); + scoped_refptr frame_decryptor); bool SetBaseMinimumPlayoutDelayMs(int delay_ms); int GetBaseMinimumPlayoutDelayMs() const; - void SetSink(rtc::VideoSinkInterface* sink); + void SetSink(VideoSinkInterface* sink); VideoReceiverInfo GetVideoReceiverInfo(bool log_stats); void SetRecordableEncodedFrameCallback( - std::function callback); + std::function callback); void ClearRecordableEncodedFrameCallback(); void GenerateKeyFrame(); void SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr - frame_transformer); + scoped_refptr frame_transformer); void SetLocalSsrc(uint32_t local_ssrc); void UpdateRtxSsrc(uint32_t ssrc); @@ -763,27 +737,26 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, // were applied. bool ReconfigureCodecs(const std::vector& recv_codecs); - webrtc::Call* const call_; + Call* const call_; const StreamParams stream_params_; // Both `stream_` and `flexfec_stream_` are managed by `this`. They are // destroyed by calling call_->DestroyVideoReceiveStream and // call_->DestroyFlexfecReceiveStream, respectively. - webrtc::VideoReceiveStreamInterface* stream_; + VideoReceiveStreamInterface* stream_; const bool default_stream_; - webrtc::VideoReceiveStreamInterface::Config config_; - webrtc::FlexfecReceiveStream::Config flexfec_config_; - webrtc::FlexfecReceiveStream* flexfec_stream_; + VideoReceiveStreamInterface::Config config_; + FlexfecReceiveStream::Config flexfec_config_; + FlexfecReceiveStream* flexfec_stream_; - webrtc::Mutex sink_lock_; - rtc::VideoSinkInterface* sink_ - RTC_GUARDED_BY(sink_lock_); + Mutex sink_lock_; + VideoSinkInterface* sink_ RTC_GUARDED_BY(sink_lock_); int64_t first_frame_timestamp_ RTC_GUARDED_BY(sink_lock_); // Start NTP time is estimated as current remote NTP time (estimated from // RTCP) minus the elapsed time, as soon as remote NTP time is available. int64_t estimated_remote_start_ntp_time_ms_ RTC_GUARDED_BY(sink_lock_); - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker thread_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker thread_checker_; bool receiving_ RTC_GUARDED_BY(&thread_checker_); }; bool GetChangedReceiverParameters(const VideoReceiverParameters& params, @@ -802,18 +775,17 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, return unsignaled_stream_params_; } // Variables. - webrtc::TaskQueueBase* const worker_thread_; - webrtc::ScopedTaskSafety task_safety_; - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker network_thread_checker_{ - webrtc::SequenceChecker::kDetached}; - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker thread_checker_; + TaskQueueBase* const worker_thread_; + ScopedTaskSafety task_safety_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker network_thread_checker_{ + SequenceChecker::kDetached}; + RTC_NO_UNIQUE_ADDRESS SequenceChecker thread_checker_; uint32_t rtcp_receiver_report_ssrc_ RTC_GUARDED_BY(thread_checker_); bool receiving_ RTC_GUARDED_BY(&thread_checker_); - webrtc::Call* const call_; + Call* const call_; - rtc::VideoSinkInterface* default_sink_ - RTC_GUARDED_BY(thread_checker_); + VideoSinkInterface* default_sink_ RTC_GUARDED_BY(thread_checker_); // Delay for unsignaled streams, which may be set before the stream exists. int default_recv_base_minimum_delay_ms_ RTC_GUARDED_BY(thread_checker_) = 0; @@ -836,30 +808,27 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, // is a risk of receiving ssrcs for other, recently added m= sections. uint32_t demuxer_criteria_id_ RTC_GUARDED_BY(thread_checker_) = 0; uint32_t demuxer_criteria_completed_id_ RTC_GUARDED_BY(thread_checker_) = 0; - absl::optional last_unsignalled_ssrc_creation_time_ms_ + std::optional last_unsignalled_ssrc_creation_time_ms_ RTC_GUARDED_BY(thread_checker_); std::set send_ssrcs_ RTC_GUARDED_BY(thread_checker_); std::set receive_ssrcs_ RTC_GUARDED_BY(thread_checker_); - absl::optional send_codec_ - RTC_GUARDED_BY(thread_checker_); + std::optional send_codec_ RTC_GUARDED_BY(thread_checker_); std::vector negotiated_codecs_ RTC_GUARDED_BY(thread_checker_); - std::vector send_rtp_extensions_ + std::vector send_rtp_extensions_ RTC_GUARDED_BY(thread_checker_); - webrtc::VideoDecoderFactory* const decoder_factory_ - RTC_GUARDED_BY(thread_checker_); + VideoDecoderFactory* const decoder_factory_ RTC_GUARDED_BY(thread_checker_); std::vector recv_codecs_ RTC_GUARDED_BY(thread_checker_); - webrtc::RtpHeaderExtensionMap recv_rtp_extension_map_ - RTC_GUARDED_BY(thread_checker_); - std::vector recv_rtp_extensions_ + RtpHeaderExtensionMap recv_rtp_extension_map_ RTC_GUARDED_BY(thread_checker_); + std::vector recv_rtp_extensions_ RTC_GUARDED_BY(thread_checker_); // See reason for keeping track of the FlexFEC payload type separately in // comment in WebRtcVideoChannel::ChangedReceiverParameters. int recv_flexfec_payload_type_ RTC_GUARDED_BY(thread_checker_); - webrtc::BitrateConstraints bitrate_config_ RTC_GUARDED_BY(thread_checker_); + BitrateConstraints bitrate_config_ RTC_GUARDED_BY(thread_checker_); // TODO(deadbeef): Don't duplicate information between // send_params/recv_params, rtp_extensions, options, etc. VideoSenderParameters send_params_ RTC_GUARDED_BY(thread_checker_); @@ -874,16 +843,16 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, StreamParams unsignaled_stream_params_ RTC_GUARDED_BY(thread_checker_); // Per peer connection crypto options that last for the lifetime of the peer // connection. - const webrtc::CryptoOptions crypto_options_ RTC_GUARDED_BY(thread_checker_); + const CryptoOptions crypto_options_ RTC_GUARDED_BY(thread_checker_); // Optional frame transformer set on unsignaled streams. - rtc::scoped_refptr - unsignaled_frame_transformer_ RTC_GUARDED_BY(thread_checker_); + scoped_refptr unsignaled_frame_transformer_ + RTC_GUARDED_BY(thread_checker_); // RTP parameters that need to be set when creating a video receive stream. // Only used in Receiver mode - in Both mode, it reads those things from the // codec. - webrtc::VideoReceiveStreamInterface::Config::Rtp rtp_config_; + VideoReceiveStreamInterface::Config::Rtp rtp_config_; // Callback invoked whenever the send codec changes. // TODO(bugs.webrtc.org/13931): Remove again when coupling isn't needed. @@ -891,10 +860,12 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, // Callback invoked whenever the list of SSRCs changes. absl::AnyInvocable&)> ssrc_list_changed_callback_; + + const int receive_buffer_size_; }; // Keeping the old name "WebRtcVideoChannel" around because some external -// customers are using cricket::WebRtcVideoChannel::AdaptReason +// customers are using webrtc::WebRtcVideoChannel::AdaptReason // TODO(bugs.webrtc.org/15216): Move this enum to an interface class and // delete this workaround. class WebRtcVideoChannel : public WebRtcVideoSendChannel { @@ -904,6 +875,19 @@ class WebRtcVideoChannel : public WebRtcVideoSendChannel { using WebRtcVideoSendChannel::AdaptReason; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::MergeInfoAboutOutboundRtpSubstreamsForTesting; +using ::webrtc::VideoCodecSettings; +using ::webrtc::WebRtcVideoChannel; +using ::webrtc::WebRtcVideoEngine; +using ::webrtc::WebRtcVideoReceiveChannel; +using ::webrtc::WebRtcVideoSendChannel; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_ENGINE_WEBRTC_VIDEO_ENGINE_H_ diff --git a/media/engine/webrtc_video_engine_unittest.cc b/media/engine/webrtc_video_engine_unittest.cc index 4262aa03db..8e1029a2e1 100644 --- a/media/engine/webrtc_video_engine_unittest.cc +++ b/media/engine/webrtc_video_engine_unittest.cc @@ -11,33 +11,61 @@ #include "media/engine/webrtc_video_engine.h" #include +#include #include +#include +#include #include #include +#include +#include #include +#include +#include #include #include #include "absl/algorithm/container.h" -#include "absl/memory/memory.h" +#include "absl/container/inlined_vector.h" #include "absl/strings/match.h" -#include "api/rtc_event_log/rtc_event_log.h" +#include "api/call/transport.h" +#include "api/crypto/crypto_options.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/make_ref_counted.h" +#include "api/media_types.h" +#include "api/priority.h" +#include "api/rtc_error.h" +#include "api/rtp_headers.h" #include "api/rtp_parameters.h" -#include "api/task_queue/default_task_queue_factory.h" +#include "api/scoped_refptr.h" #include "api/test/mock_encoder_selector.h" #include "api/test/mock_video_bitrate_allocator.h" #include "api/test/mock_video_bitrate_allocator_factory.h" #include "api/test/mock_video_decoder_factory.h" #include "api/test/mock_video_encoder_factory.h" #include "api/test/video/function_video_decoder_factory.h" +#include "api/transport/bitrate_settings.h" #include "api/transport/field_trial_based_config.h" +#include "api/transport/rtp/rtp_source.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "api/video/i420_buffer.h" -#include "api/video/video_bitrate_allocation.h" +#include "api/video/recordable_encoded_frame.h" +#include "api/video/resolution.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" #include "api/video_codecs/h264_profile_level_id.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" @@ -51,38 +79,56 @@ #include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" +#include "call/call.h" +#include "call/call_config.h" #include "call/flexfec_receive_stream.h" +#include "call/video_receive_stream.h" +#include "call/video_send_stream.h" +#include "common_video/include/quality_limitation_reason.h" +#include "media/base/codec.h" #include "media/base/fake_frame_source.h" #include "media/base/fake_network_interface.h" #include "media/base/fake_video_renderer.h" #include "media/base/media_channel.h" +#include "media/base/media_config.h" #include "media/base/media_constants.h" -#include "media/base/rtp_utils.h" +#include "media/base/media_engine.h" +#include "media/base/rid_description.h" +#include "media/base/stream_params.h" #include "media/base/test_utils.h" +#include "media/base/video_common.h" #include "media/engine/fake_webrtc_call.h" #include "media/engine/fake_webrtc_video_engine.h" -#include "media/engine/webrtc_voice_engine.h" +#include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/video_coding/svc/scalability_mode_util.h" #include "rtc_base/arraysize.h" -#include "rtc_base/event.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/dscp.h" #include "rtc_base/experiments/min_video_bitrate_experiment.h" -#include "rtc_base/fake_clock.h" -#include "rtc_base/gunit.h" #include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/socket.h" #include "rtc_base/time_utils.h" #include "test/fake_decoder.h" #include "test/frame_forwarder.h" #include "test/gmock.h" +#include "test/gtest.h" #include "test/rtcp_packet_parser.h" #include "test/scoped_key_value_config.h" #include "test/time_controller/simulated_time_controller.h" +#include "video/config/encoder_stream_factory.h" #include "video/config/simulcast.h" +#include "video/config/video_encoder_config.h" using ::testing::_; +using ::testing::Combine; using ::testing::Contains; using ::testing::Each; using ::testing::ElementsAre; @@ -98,18 +144,13 @@ using ::testing::SizeIs; using ::testing::StrNe; using ::testing::Values; using ::testing::WithArg; -using ::webrtc::BitrateConstraints; -using ::webrtc::kDefaultScalabilityModeStr; -using ::webrtc::RtpExtension; -using ::webrtc::RtpPacket; -using ::webrtc::RtpPacketReceived; -using ::webrtc::ScalabilityMode; -using ::webrtc::TimeDelta; -using ::webrtc::Timestamp; +using ::webrtc::test::FrameForwarder; +using ::webrtc::test::FunctionVideoDecoderFactory; using ::webrtc::test::RtcpPacketParser; +using ::webrtc::test::ScopedKeyValueConfig; +namespace webrtc { namespace { -static const int kDefaultQpMax = 56; static const uint8_t kRedRtxPayloadType = 125; @@ -134,36 +175,34 @@ constexpr size_t kNumSimulcastStreams = 3; static const char kUnsupportedExtensionName[] = "urn:ietf:params:rtp-hdrext:unsupported"; -cricket::VideoCodec RemoveFeedbackParams(cricket::VideoCodec&& codec) { - codec.feedback_params = cricket::FeedbackParams(); +webrtc::Codec RemoveFeedbackParams(webrtc::Codec&& codec) { + codec.feedback_params = webrtc::FeedbackParams(); return std::move(codec); } -void VerifyCodecHasDefaultFeedbackParams(const cricket::VideoCodec& codec, +void VerifyCodecHasDefaultFeedbackParams(const webrtc::Codec& codec, bool lntf_expected) { - EXPECT_EQ(lntf_expected, - codec.HasFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamLntf, cricket::kParamValueEmpty))); - EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamNack, cricket::kParamValueEmpty))); - EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamNack, cricket::kRtcpFbNackParamPli))); - EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty))); - EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamTransportCc, cricket::kParamValueEmpty))); - EXPECT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir))); + EXPECT_EQ(lntf_expected, codec.HasFeedbackParam(webrtc::FeedbackParam( + kRtcpFbParamLntf, kParamValueEmpty))); + EXPECT_TRUE(codec.HasFeedbackParam( + webrtc::FeedbackParam(kRtcpFbParamNack, kParamValueEmpty))); + EXPECT_TRUE(codec.HasFeedbackParam( + webrtc::FeedbackParam(kRtcpFbParamNack, kRtcpFbNackParamPli))); + EXPECT_TRUE(codec.HasFeedbackParam( + webrtc::FeedbackParam(kRtcpFbParamRemb, kParamValueEmpty))); + EXPECT_TRUE(codec.HasFeedbackParam( + webrtc::FeedbackParam(kRtcpFbParamTransportCc, kParamValueEmpty))); + EXPECT_TRUE(codec.HasFeedbackParam( + webrtc::FeedbackParam(kRtcpFbParamCcm, kRtcpFbCcmParamFir))); } // Return true if any codec in `codecs` is an RTX codec with associated // payload type `payload_type`. -bool HasRtxCodec(const std::vector& codecs, - int payload_type) { - for (const cricket::VideoCodec& codec : codecs) { +bool HasRtxCodec(const std::vector& codecs, int payload_type) { + for (const webrtc::Codec& codec : codecs) { int associated_payload_type; if (absl::EqualsIgnoreCase(codec.name.c_str(), "rtx") && - codec.GetParam(cricket::kCodecParamAssociatedPayloadType, + codec.GetParam(kCodecParamAssociatedPayloadType, &associated_payload_type) && associated_payload_type == payload_type) { return true; @@ -174,8 +213,8 @@ bool HasRtxCodec(const std::vector& codecs, // Return true if any codec in `codecs` is an RTX codec, independent of // payload type. -bool HasAnyRtxCodec(const std::vector& codecs) { - for (const cricket::VideoCodec& codec : codecs) { +bool HasAnyRtxCodec(const std::vector& codecs) { + for (const webrtc::Codec& codec : codecs) { if (absl::EqualsIgnoreCase(codec.name.c_str(), "rtx")) { return true; } @@ -208,10 +247,9 @@ bool VerifyRtxReceiveAssociations( return true; } -rtc::scoped_refptr CreateBlackFrameBuffer( - int width, - int height) { - rtc::scoped_refptr buffer = +scoped_refptr CreateBlackFrameBuffer(int width, + int height) { + scoped_refptr buffer = webrtc::I420Buffer::Create(width, height); webrtc::I420Buffer::SetBlack(buffer.get()); return buffer; @@ -231,8 +269,8 @@ void VerifySendStreamHasRtxTypes(const webrtc::VideoSendStream::Config& config, } } -cricket::MediaConfig GetMediaConfig() { - cricket::MediaConfig media_config; +MediaConfig GetMediaConfig() { + MediaConfig media_config; media_config.video.enable_cpu_adaptation = false; return media_config; } @@ -250,34 +288,35 @@ int GetMaxDefaultBitrateBps(size_t width, size_t height) { } } -class MockVideoSource : public rtc::VideoSourceInterface { +class MockVideoSource + : public webrtc::VideoSourceInterface { public: MOCK_METHOD(void, AddOrUpdateSink, - (rtc::VideoSinkInterface * sink, - const rtc::VideoSinkWants& wants), + (webrtc::VideoSinkInterface * sink, + const webrtc::VideoSinkWants& wants), (override)); MOCK_METHOD(void, RemoveSink, - (rtc::VideoSinkInterface * sink), + (webrtc::VideoSinkInterface * sink), (override)); }; -class MockNetworkInterface : public cricket::MediaChannelNetworkInterface { +class MockNetworkInterface : public MediaChannelNetworkInterface { public: MOCK_METHOD(bool, SendPacket, - (rtc::CopyOnWriteBuffer * packet, - const rtc::PacketOptions& options), + (webrtc::CopyOnWriteBuffer * packet, + const webrtc::AsyncSocketPacketOptions& options), (override)); MOCK_METHOD(bool, SendRtcp, - (rtc::CopyOnWriteBuffer * packet, - const rtc::PacketOptions& options), + (webrtc::CopyOnWriteBuffer * packet, + const webrtc::AsyncSocketPacketOptions& options), (override)); MOCK_METHOD(int, SetOption, - (SocketType type, rtc::Socket::Option opt, int option), + (SocketType type, webrtc::Socket::Option opt, int option), (override)); }; @@ -286,8 +325,7 @@ std::vector GetStreamResolutions( std::vector res; for (const auto& s : streams) { if (s.active) { - res.push_back( - {rtc::checked_cast(s.width), rtc::checked_cast(s.height)}); + res.push_back({checked_cast(s.width), checked_cast(s.height)}); } } return res; @@ -334,8 +372,6 @@ RtpPacketReceived BuildRtxPacket(uint32_t rtx_ssrc, return packet; } -} // namespace - // TODO(tommi): Consider replacing these macros with custom matchers. #define EXPECT_FRAME(c, w, h) \ EXPECT_EQ((c), renderer_.num_rendered_frames()); \ @@ -347,29 +383,24 @@ RtpPacketReceived BuildRtxPacket(uint32_t rtx_ssrc, EXPECT_EQ((w), (r).width()); \ EXPECT_EQ((h), (r).height()); -namespace cricket { class WebRtcVideoEngineTest : public ::testing::Test { public: WebRtcVideoEngineTest() : WebRtcVideoEngineTest("") {} explicit WebRtcVideoEngineTest(const std::string& field_trials) : field_trials_(field_trials), - time_controller_(webrtc::Timestamp::Millis(4711)), - task_queue_factory_(time_controller_.CreateTaskQueueFactory()), - call_(webrtc::Call::Create([&] { - webrtc::Call::Config call_config(&event_log_); - call_config.task_queue_factory = task_queue_factory_.get(); - call_config.trials = &field_trials_; - return call_config; - }())), - encoder_factory_(new cricket::FakeWebRtcVideoEncoderFactory), - decoder_factory_(new cricket::FakeWebRtcVideoDecoderFactory), + time_controller_(Timestamp::Millis(4711)), + env_(CreateEnvironment(&field_trials_, + time_controller_.CreateTaskQueueFactory(), + time_controller_.GetClock())), + call_(Call::Create(CallConfig(env_))), + encoder_factory_(new FakeWebRtcVideoEncoderFactory), + decoder_factory_(new FakeWebRtcVideoDecoderFactory), video_bitrate_allocator_factory_( webrtc::CreateBuiltinVideoBitrateAllocatorFactory()), - engine_(std::unique_ptr( - encoder_factory_), - std::unique_ptr( - decoder_factory_), - field_trials_) {} + engine_( + std::unique_ptr(encoder_factory_), + std::unique_ptr(decoder_factory_), + field_trials_) {} protected: void AssignDefaultAptRtxTypes(); @@ -381,31 +412,34 @@ class WebRtcVideoEngineTest : public ::testing::Test { // Find the codec in the engine with the given name. The codec must be // present. - cricket::VideoCodec GetEngineCodec(const std::string& name) const; - void AddSupportedVideoCodecType(const std::string& name); + Codec GetEngineCodec(const std::string& name) const; + void AddSupportedVideoCodecType( + const std::string& name, + const std::vector& scalability_modes = {}); + void AddSupportedVideoCodec(SdpVideoFormat format); + std::unique_ptr SetSendParamsWithAllSupportedCodecs(); std::unique_ptr SetRecvParamsWithAllSupportedCodecs(); std::unique_ptr - SetRecvParamsWithSupportedCodecs(const std::vector& codecs); + SetRecvParamsWithSupportedCodecs(const std::vector& codecs); void ExpectRtpCapabilitySupport(const char* uri, bool supported) const; - webrtc::test::ScopedKeyValueConfig field_trials_; - webrtc::GlobalSimulatedTimeController time_controller_; - webrtc::RtcEventLogNull event_log_; - std::unique_ptr task_queue_factory_; + ScopedKeyValueConfig field_trials_; + GlobalSimulatedTimeController time_controller_; + Environment env_; // Used in WebRtcVideoEngineVoiceTest, but defined here so it's properly // initialized when the constructor is called. - std::unique_ptr call_; - cricket::FakeWebRtcVideoEncoderFactory* encoder_factory_; - cricket::FakeWebRtcVideoDecoderFactory* decoder_factory_; - std::unique_ptr + std::unique_ptr call_; + FakeWebRtcVideoEncoderFactory* encoder_factory_; + FakeWebRtcVideoDecoderFactory* decoder_factory_; + std::unique_ptr video_bitrate_allocator_factory_; WebRtcVideoEngine engine_; - absl::optional default_codec_; + std::optional default_codec_; std::map default_apt_rtx_types_; }; @@ -413,7 +447,7 @@ TEST_F(WebRtcVideoEngineTest, DefaultRtxCodecHasAssociatedPayloadTypeSet) { encoder_factory_->AddSupportedVideoCodecType("VP8"); AssignDefaultCodec(); - std::vector engine_codecs = engine_.send_codecs(); + std::vector engine_codecs = engine_.LegacySendCodecs(); for (size_t i = 0; i < engine_codecs.size(); ++i) { if (engine_codecs[i].name != kRtxCodecName) continue; @@ -426,6 +460,60 @@ TEST_F(WebRtcVideoEngineTest, DefaultRtxCodecHasAssociatedPayloadTypeSet) { FAIL() << "No RTX codec found among default codecs."; } +// Test that we prefer to assign RTX payload types as "primary codec PT + 1". +// This is purely for backwards compatibility (see https://crbug.com/391132280). +// The spec does NOT mandate we do this and note that this is best-effort, if +// "PT + 1" is already in-use the PT suggester would pick a different PT. +TEST_F(WebRtcVideoEngineTest, + DefaultRtxCodecIsAssignedAssociatedPayloadTypePlusOne) { + AddSupportedVideoCodecType("VP8"); + AddSupportedVideoCodecType("VP9"); + AddSupportedVideoCodecType("AV1"); + AddSupportedVideoCodecType("H264"); + for (const webrtc::Codec& codec : engine_.LegacySendCodecs()) { + if (codec.name != kRtxCodecName) + continue; + int associated_payload_type; + ASSERT_TRUE(codec.GetParam(kCodecParamAssociatedPayloadType, + &associated_payload_type)); + EXPECT_EQ(codec.id, associated_payload_type + 1); + } + for (const webrtc::Codec& codec : engine_.LegacyRecvCodecs()) { + if (codec.name != kRtxCodecName) + continue; + int associated_payload_type; + ASSERT_TRUE(codec.GetParam(kCodecParamAssociatedPayloadType, + &associated_payload_type)); + EXPECT_EQ(codec.id, associated_payload_type + 1); + } +} + +MATCHER(HasUniquePtValues, "") { + std::unordered_set seen_ids; + for (const auto& codec : arg) { + if (seen_ids.count(codec.id) > 0) { + *result_listener << "Duplicate id for " << absl::StrCat(codec); + return false; + } + seen_ids.insert(codec.id); + } + return true; +} + +TEST_F(WebRtcVideoEngineTest, SupportingTwoKindsOfVp9IsOk) { + AddSupportedVideoCodecType("VP8"); + AddSupportedVideoCodec(SdpVideoFormat("VP9", {{"profile-id", "0"}})); + AddSupportedVideoCodec(SdpVideoFormat("VP9", {{"profile-id", "1"}})); + AddSupportedVideoCodec(SdpVideoFormat("VP9", {{"profile-id", "3"}})); + AddSupportedVideoCodec(SdpVideoFormat( + "AV1", {{"level-idx", "5"}, {"profile", "1"}, {"tier", "0"}})); + AddSupportedVideoCodec(SdpVideoFormat( + "AV1", {{"level-idx", "5"}, {"profile", "0"}, {"tier", "0"}})); + AddSupportedVideoCodec(SdpVideoFormat("VP9")); // No parameters + ASSERT_THAT(engine_.LegacySendCodecs(), HasUniquePtValues()); + ASSERT_THAT(engine_.LegacyRecvCodecs(), HasUniquePtValues()); +} + TEST_F(WebRtcVideoEngineTest, SupportsTimestampOffsetHeaderExtension) { ExpectRtpCapabilitySupport(RtpExtension::kTimestampOffsetUri, true); } @@ -462,6 +550,10 @@ TEST_F(WebRtcVideoEngineTest, AdvertiseGenericDescriptor00) { ExpectRtpCapabilitySupport(RtpExtension::kGenericFrameDescriptorUri00, false); } +TEST_F(WebRtcVideoEngineTest, SupportCorruptionDetectionHeaderExtension) { + ExpectRtpCapabilitySupport(RtpExtension::kCorruptionDetectionUri, false); +} + class WebRtcVideoEngineTestWithGenericDescriptor : public WebRtcVideoEngineTest { public: @@ -527,7 +619,7 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeCapturer) { // Add CVO extension. const int id = 1; - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.extensions.push_back( RtpExtension(RtpExtension::kVideoRotationUri, id)); @@ -535,7 +627,7 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeCapturer) { EXPECT_CALL( video_source, - AddOrUpdateSink(_, Field(&rtc::VideoSinkWants::rotation_applied, false))); + AddOrUpdateSink(_, Field(&VideoSinkWants::rotation_applied, false))); // Set capturer. EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &video_source)); @@ -546,7 +638,7 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeCapturer) { parameters.extensions.clear(); EXPECT_CALL( video_source, - AddOrUpdateSink(_, Field(&rtc::VideoSinkWants::rotation_applied, true))); + AddOrUpdateSink(_, Field(&VideoSinkWants::rotation_applied, true))); EXPECT_TRUE(send_channel->SetSenderParameters(parameters)); } @@ -561,7 +653,7 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeAddSendStream) { auto send_channel = SetSendParamsWithAllSupportedCodecs(); // Add CVO extension. const int id = 1; - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.extensions.push_back( RtpExtension(RtpExtension::kVideoRotationUri, id)); @@ -571,7 +663,7 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionBeforeAddSendStream) { // Set source. EXPECT_CALL( video_source, - AddOrUpdateSink(_, Field(&rtc::VideoSinkWants::rotation_applied, false))); + AddOrUpdateSink(_, Field(&VideoSinkWants::rotation_applied, false))); EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &video_source)); } @@ -588,7 +680,7 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionAfterCapturer) { // Set capturer. EXPECT_CALL( video_source, - AddOrUpdateSink(_, Field(&rtc::VideoSinkWants::rotation_applied, true))); + AddOrUpdateSink(_, Field(&VideoSinkWants::rotation_applied, true))); EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &video_source)); // Verify capturer has turned on applying rotation. @@ -596,7 +688,7 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionAfterCapturer) { // Add CVO extension. const int id = 1; - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); parameters.extensions.push_back( @@ -605,7 +697,7 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionAfterCapturer) { parameters.codecs.erase(parameters.codecs.begin()); EXPECT_CALL( video_source, - AddOrUpdateSink(_, Field(&rtc::VideoSinkWants::rotation_applied, false))); + AddOrUpdateSink(_, Field(&VideoSinkWants::rotation_applied, false))); EXPECT_TRUE(send_channel->SetSenderParameters(parameters)); // Verify capturer has turned off applying rotation. @@ -615,7 +707,7 @@ TEST_F(WebRtcVideoEngineTest, CVOSetHeaderExtensionAfterCapturer) { parameters.extensions.clear(); EXPECT_CALL( video_source, - AddOrUpdateSink(_, Field(&rtc::VideoSinkWants::rotation_applied, true))); + AddOrUpdateSink(_, Field(&VideoSinkWants::rotation_applied, true))); EXPECT_TRUE(send_channel->SetSenderParameters(parameters)); } @@ -624,7 +716,7 @@ TEST_F(WebRtcVideoEngineTest, SetSendFailsBeforeSettingCodecs) { std::unique_ptr send_channel = engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), - webrtc::CryptoOptions(), + CryptoOptions(), video_bitrate_allocator_factory_.get()); EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(123))); @@ -640,7 +732,7 @@ TEST_F(WebRtcVideoEngineTest, GetStatsWithoutCodecsSetDoesNotCrash) { std::unique_ptr send_channel = engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), - webrtc::CryptoOptions(), + CryptoOptions(), video_bitrate_allocator_factory_.get()); EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(123))); VideoMediaSendInfo send_info; @@ -648,7 +740,7 @@ TEST_F(WebRtcVideoEngineTest, GetStatsWithoutCodecsSetDoesNotCrash) { std::unique_ptr receive_channel = engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions()); + VideoOptions(), CryptoOptions()); EXPECT_TRUE(receive_channel->AddRecvStream(StreamParams::CreateLegacy(123))); VideoMediaReceiveInfo receive_info; receive_channel->GetStats(&receive_info); @@ -661,16 +753,14 @@ TEST_F(WebRtcVideoEngineTest, UseFactoryForVp8WhenSupported) { send_channel->OnReadyToSend(true); - EXPECT_TRUE( - send_channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); EXPECT_EQ(0, encoder_factory_->GetNumCreatedEncoders()); EXPECT_TRUE(send_channel->SetSend(true)); - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 30); EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); + time_controller_.AdvanceTime(TimeDelta::Zero()); // Sending one frame will have allocate the encoder. ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(1)); EXPECT_GT(encoder_factory_->encoders()[0]->GetNumEncodedFrames(), 0); @@ -680,7 +770,7 @@ TEST_F(WebRtcVideoEngineTest, UseFactoryForVp8WhenSupported) { // Setting codecs of the same type should not reallocate any encoders // (expecting a no-op). - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); EXPECT_TRUE(send_channel->SetSenderParameters(parameters)); EXPECT_EQ(num_created_encoders, encoder_factory_->GetNumCreatedEncoders()); @@ -699,15 +789,15 @@ TEST_F(WebRtcVideoEngineTest, RtxCodecAddedForH264Codec) { using webrtc::H264Profile; using webrtc::H264ProfileLevelId; using webrtc::H264ProfileLevelIdToString; - webrtc::SdpVideoFormat h264_constrained_baseline("H264"); + SdpVideoFormat h264_constrained_baseline("H264"); h264_constrained_baseline.parameters[kH264FmtpProfileLevelId] = *H264ProfileLevelIdToString(H264ProfileLevelId( H264Profile::kProfileConstrainedBaseline, H264Level::kLevel1)); - webrtc::SdpVideoFormat h264_constrained_high("H264"); + SdpVideoFormat h264_constrained_high("H264"); h264_constrained_high.parameters[kH264FmtpProfileLevelId] = *H264ProfileLevelIdToString(H264ProfileLevelId( H264Profile::kProfileConstrainedHigh, H264Level::kLevel1)); - webrtc::SdpVideoFormat h264_high("H264"); + SdpVideoFormat h264_high("H264"); h264_high.parameters[kH264FmtpProfileLevelId] = *H264ProfileLevelIdToString( H264ProfileLevelId(H264Profile::kProfileHigh, H264Level::kLevel1)); @@ -716,20 +806,21 @@ TEST_F(WebRtcVideoEngineTest, RtxCodecAddedForH264Codec) { encoder_factory_->AddSupportedVideoCodec(h264_high); // First figure out what payload types the test codecs got assigned. - const std::vector codecs = engine_.send_codecs(); + const std::vector codecs = engine_.LegacySendCodecs(); // Now search for RTX codecs for them. Expect that they all have associated // RTX codecs. EXPECT_TRUE(HasRtxCodec( - codecs, FindMatchingCodec( - codecs, cricket::CreateVideoCodec(h264_constrained_baseline)) + codecs, webrtc::FindMatchingVideoCodec( + codecs, webrtc::CreateVideoCodec(h264_constrained_baseline)) ->id)); EXPECT_TRUE(HasRtxCodec( - codecs, FindMatchingCodec( - codecs, cricket::CreateVideoCodec(h264_constrained_high)) + codecs, webrtc::FindMatchingVideoCodec( + codecs, webrtc::CreateVideoCodec(h264_constrained_high)) ->id)); - EXPECT_TRUE(HasRtxCodec( - codecs, - FindMatchingCodec(codecs, cricket::CreateVideoCodec(h264_high))->id)); + EXPECT_TRUE( + HasRtxCodec(codecs, webrtc::FindMatchingVideoCodec( + codecs, webrtc::CreateVideoCodec(h264_high)) + ->id)); } #if defined(RTC_ENABLE_VP9) @@ -738,23 +829,21 @@ TEST_F(WebRtcVideoEngineTest, CanConstructDecoderForVp9EncoderFactory) { auto receive_channel = SetRecvParamsWithAllSupportedCodecs(); - EXPECT_TRUE(receive_channel->AddRecvStream( - cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE( + receive_channel->AddRecvStream(StreamParams::CreateLegacy(kSsrc))); } #endif // defined(RTC_ENABLE_VP9) TEST_F(WebRtcVideoEngineTest, PropagatesInputFrameTimestamp) { AddSupportedVideoCodecType("VP8"); - FakeCall* fake_call = new FakeCall(); + FakeCall* fake_call = new FakeCall(env_); call_.reset(fake_call); auto send_channel = SetSendParamsWithAllSupportedCodecs(); - EXPECT_TRUE( - send_channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 60); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 60); EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder)); send_channel->SetSend(true); @@ -769,25 +858,25 @@ TEST_F(WebRtcVideoEngineTest, PropagatesInputFrameTimestamp) { // Precision changes from nanosecond to millisecond. // Allow error to be no more than 1. - EXPECT_NEAR(cricket::VideoFormat::FpsToInterval(60) / 1E6, interval, 1); + EXPECT_NEAR(VideoFormat::FpsToInterval(60) / 1E6, interval, 1); last_timestamp = timestamp; } frame_forwarder.IncomingCapturedFrame( - frame_source.GetFrame(1280, 720, webrtc::VideoRotation::kVideoRotation_0, - rtc::kNumMicrosecsPerSec / 30)); + frame_source.GetFrame(1280, 720, VideoRotation::kVideoRotation_0, + webrtc::kNumMicrosecsPerSec / 30)); last_timestamp = stream->GetLastTimestamp(); for (int i = 0; i < 10; i++) { - frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame( - 1280, 720, webrtc::VideoRotation::kVideoRotation_0, - rtc::kNumMicrosecsPerSec / 30)); + frame_forwarder.IncomingCapturedFrame( + frame_source.GetFrame(1280, 720, VideoRotation::kVideoRotation_0, + webrtc::kNumMicrosecsPerSec / 30)); int64_t timestamp = stream->GetLastTimestamp(); int64_t interval = timestamp - last_timestamp; // Precision changes from nanosecond to millisecond. // Allow error to be no more than 1. - EXPECT_NEAR(cricket::VideoFormat::FpsToInterval(30) / 1E6, interval, 1); + EXPECT_NEAR(VideoFormat::FpsToInterval(30) / 1E6, interval, 1); last_timestamp = timestamp; } @@ -797,9 +886,9 @@ TEST_F(WebRtcVideoEngineTest, PropagatesInputFrameTimestamp) { } void WebRtcVideoEngineTest::AssignDefaultAptRtxTypes() { - std::vector engine_codecs = engine_.send_codecs(); + std::vector engine_codecs = engine_.LegacySendCodecs(); RTC_DCHECK(!engine_codecs.empty()); - for (const cricket::VideoCodec& codec : engine_codecs) { + for (const webrtc::Codec& codec : engine_codecs) { if (codec.name == "rtx") { int associated_payload_type; if (codec.GetParam(kCodecParamAssociatedPayloadType, @@ -811,10 +900,10 @@ void WebRtcVideoEngineTest::AssignDefaultAptRtxTypes() { } void WebRtcVideoEngineTest::AssignDefaultCodec() { - std::vector engine_codecs = engine_.send_codecs(); + std::vector engine_codecs = engine_.LegacySendCodecs(); RTC_DCHECK(!engine_codecs.empty()); bool codec_set = false; - for (const cricket::VideoCodec& codec : engine_codecs) { + for (const webrtc::Codec& codec : engine_codecs) { if (!codec_set && codec.name != "rtx" && codec.name != "red" && codec.name != "ulpfec" && codec.name != "flexfec-03") { default_codec_ = codec; @@ -827,18 +916,18 @@ void WebRtcVideoEngineTest::AssignDefaultCodec() { size_t WebRtcVideoEngineTest::GetEngineCodecIndex( const std::string& name) const { - const std::vector codecs = engine_.send_codecs(); + const std::vector codecs = engine_.LegacySendCodecs(); for (size_t i = 0; i < codecs.size(); ++i) { - const cricket::VideoCodec engine_codec = codecs[i]; + const Codec engine_codec = codecs[i]; if (!absl::EqualsIgnoreCase(name, engine_codec.name)) continue; // The tests only use H264 Constrained Baseline. Make sure we don't return // an internal H264 codec from the engine with a different H264 profile. if (absl::EqualsIgnoreCase(name.c_str(), kH264CodecName)) { - const absl::optional profile_level_id = + const std::optional profile_level_id = webrtc::ParseSdpForH264ProfileLevelId(engine_codec.params); if (profile_level_id->profile != - webrtc::H264Profile::kProfileConstrainedBaseline) { + H264Profile::kProfileConstrainedBaseline) { continue; } } @@ -849,28 +938,33 @@ size_t WebRtcVideoEngineTest::GetEngineCodecIndex( return -1; } -cricket::VideoCodec WebRtcVideoEngineTest::GetEngineCodec( - const std::string& name) const { - return engine_.send_codecs()[GetEngineCodecIndex(name)]; +Codec WebRtcVideoEngineTest::GetEngineCodec(const std::string& name) const { + return engine_.LegacySendCodecs()[GetEngineCodecIndex(name)]; } void WebRtcVideoEngineTest::AddSupportedVideoCodecType( - const std::string& name) { - encoder_factory_->AddSupportedVideoCodecType(name); + const std::string& name, + const std::vector& scalability_modes) { + encoder_factory_->AddSupportedVideoCodecType(name, scalability_modes); decoder_factory_->AddSupportedVideoCodecType(name); } +void WebRtcVideoEngineTest::AddSupportedVideoCodec(SdpVideoFormat format) { + encoder_factory_->AddSupportedVideoCodec(format); + decoder_factory_->AddSupportedVideoCodec(format); +} + std::unique_ptr WebRtcVideoEngineTest::SetSendParamsWithAllSupportedCodecs() { std::unique_ptr channel = engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), - webrtc::CryptoOptions(), + CryptoOptions(), video_bitrate_allocator_factory_.get()); - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; // We need to look up the codec in the engine to get the correct payload type. for (const webrtc::SdpVideoFormat& format : encoder_factory_->GetSupportedFormats()) { - cricket::VideoCodec engine_codec = GetEngineCodec(format.name); + Codec engine_codec = GetEngineCodec(format.name); if (!absl::c_linear_search(parameters.codecs, engine_codec)) { parameters.codecs.push_back(engine_codec); } @@ -883,11 +977,11 @@ WebRtcVideoEngineTest::SetSendParamsWithAllSupportedCodecs() { std::unique_ptr WebRtcVideoEngineTest::SetRecvParamsWithSupportedCodecs( - const std::vector& codecs) { + const std::vector& codecs) { std::unique_ptr channel = engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions()); - cricket::VideoReceiverParameters parameters; + VideoOptions(), CryptoOptions()); + VideoReceiverParameters parameters; parameters.codecs = codecs; EXPECT_TRUE(channel->SetReceiverParameters(parameters)); @@ -896,10 +990,10 @@ WebRtcVideoEngineTest::SetRecvParamsWithSupportedCodecs( std::unique_ptr WebRtcVideoEngineTest::SetRecvParamsWithAllSupportedCodecs() { - std::vector codecs; + std::vector codecs; for (const webrtc::SdpVideoFormat& format : decoder_factory_->GetSupportedFormats()) { - cricket::VideoCodec engine_codec = GetEngineCodec(format.name); + Codec engine_codec = GetEngineCodec(format.name); if (!absl::c_linear_search(codecs, engine_codec)) { codecs.push_back(engine_codec); } @@ -910,7 +1004,7 @@ WebRtcVideoEngineTest::SetRecvParamsWithAllSupportedCodecs() { void WebRtcVideoEngineTest::ExpectRtpCapabilitySupport(const char* uri, bool supported) const { - const std::vector header_extensions = + const std::vector header_extensions = GetDefaultEnabledRtpHeaderExtensions(engine_); if (supported) { EXPECT_THAT(header_extensions, Contains(Field(&RtpExtension::uri, uri))); @@ -919,56 +1013,42 @@ void WebRtcVideoEngineTest::ExpectRtpCapabilitySupport(const char* uri, } } -TEST_F(WebRtcVideoEngineTest, SendsFeedbackAfterUnsignaledRtxPacket) { - // Setup a channel with VP8, RTX and transport sequence number header - // extension. Receive stream is not explicitly configured. - AddSupportedVideoCodecType("VP8"); - std::vector supported_codecs = - engine_.recv_codecs(/*include_rtx=*/true); - ASSERT_EQ(supported_codecs[1].name, "rtx"); - int rtx_payload_type = supported_codecs[1].id; - MockNetworkInterface network; - RtcpPacketParser rtcp_parser; - ON_CALL(network, SendRtcp) - .WillByDefault( - testing::DoAll(WithArg<0>([&](rtc::CopyOnWriteBuffer* packet) { - ASSERT_TRUE(rtcp_parser.Parse(*packet)); - }), - Return(true))); - std::unique_ptr send_channel = - engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), - webrtc::CryptoOptions(), - video_bitrate_allocator_factory_.get()); +TEST_F(WebRtcVideoEngineTest, ReceiveBufferSizeViaFieldTrial) { + ScopedKeyValueConfig override_field_trials( + field_trials_, "WebRTC-ReceiveBufferSize/size_bytes:10000/"); std::unique_ptr receive_channel = engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions()); - cricket::VideoReceiverParameters parameters; - parameters.codecs = supported_codecs; - const int kTransportSeqExtensionId = 1; - parameters.extensions.push_back(RtpExtension( - RtpExtension::kTransportSequenceNumberUri, kTransportSeqExtensionId)); - ASSERT_TRUE(receive_channel->SetReceiverParameters(parameters)); - send_channel->SetInterface(&network); + VideoOptions(), CryptoOptions()); + FakeNetworkInterface network; receive_channel->SetInterface(&network); - send_channel->OnReadyToSend(true); - receive_channel->SetReceive(true); - - // Inject a RTX packet. - webrtc::RtpHeaderExtensionMap extension_map(parameters.extensions); - webrtc::RtpPacketReceived packet(&extension_map); - packet.SetMarker(true); - packet.SetPayloadType(rtx_payload_type); - packet.SetSsrc(999); - packet.SetExtension(7); - uint8_t* buf_ptr = packet.AllocatePayload(11); - memset(buf_ptr, 0, 11); // Pass MSAN (don't care about bytes 1-9) - receive_channel->OnPacketReceived(packet); + EXPECT_EQ(10000, network.recvbuf_size()); + receive_channel->SetInterface(nullptr); +} - // Expect that feedback is sent after a while. - time_controller_.AdvanceTime(webrtc::TimeDelta::Seconds(1)); - EXPECT_GT(rtcp_parser.transport_feedback()->num_packets(), 0); +TEST_F(WebRtcVideoEngineTest, TooLowReceiveBufferSizeViaFieldTrial) { + // 10000001 is too high, it will revert to the default + // kVideoRtpRecvBufferSize. + ScopedKeyValueConfig override_field_trials( + field_trials_, "WebRTC-ReceiveBufferSize/size_bytes:10000001/"); + std::unique_ptr receive_channel = + engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(), + VideoOptions(), CryptoOptions()); + FakeNetworkInterface network; + receive_channel->SetInterface(&network); + EXPECT_EQ(kVideoRtpRecvBufferSize, network.recvbuf_size()); + receive_channel->SetInterface(nullptr); +} - send_channel->SetInterface(nullptr); +TEST_F(WebRtcVideoEngineTest, TooHighReceiveBufferSizeViaFieldTrial) { + // 9999 is too low, it will revert to the default kVideoRtpRecvBufferSize. + ScopedKeyValueConfig override_field_trials( + field_trials_, "WebRTC-ReceiveBufferSize/size_bytes:9999/"); + std::unique_ptr receive_channel = + engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(), + VideoOptions(), CryptoOptions()); + FakeNetworkInterface network; + receive_channel->SetInterface(&network); + EXPECT_EQ(kVideoRtpRecvBufferSize, network.recvbuf_size()); receive_channel->SetInterface(nullptr); } @@ -976,15 +1056,15 @@ TEST_F(WebRtcVideoEngineTest, UpdatesUnsignaledRtxSsrcAndRecoversPayload) { // Setup a channel with VP8, RTX and transport sequence number header // extension. Receive stream is not explicitly configured. AddSupportedVideoCodecType("VP8"); - std::vector supported_codecs = - engine_.recv_codecs(/*include_rtx=*/true); + std::vector supported_codecs = + engine_.LegacyRecvCodecs(/*include_rtx=*/true); ASSERT_EQ(supported_codecs[1].name, "rtx"); int rtx_payload_type = supported_codecs[1].id; std::unique_ptr receive_channel = engine_.CreateReceiveChannel(call_.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions()); - cricket::VideoReceiverParameters parameters; + VideoOptions(), CryptoOptions()); + VideoReceiverParameters parameters; parameters.codecs = supported_codecs; ASSERT_TRUE(receive_channel->SetReceiverParameters(parameters)); receive_channel->SetReceive(true); @@ -996,7 +1076,7 @@ TEST_F(WebRtcVideoEngineTest, UpdatesUnsignaledRtxSsrcAndRecoversPayload) { packet_1.SetMarker(false); receive_channel->OnPacketReceived(packet_1); - time_controller_.AdvanceTime(webrtc::TimeDelta::Millis(100)); + time_controller_.AdvanceTime(TimeDelta::Millis(100)); // No complete frame received. No decoder created yet. EXPECT_THAT(decoder_factory_->decoders(), IsEmpty()); @@ -1011,7 +1091,7 @@ TEST_F(WebRtcVideoEngineTest, UpdatesUnsignaledRtxSsrcAndRecoversPayload) { receive_channel->OnPacketReceived(rtx_packet); - time_controller_.AdvanceTime(webrtc::TimeDelta::Millis(0)); + time_controller_.AdvanceTime(TimeDelta::Millis(0)); ASSERT_THAT(decoder_factory_->decoders(), Not(IsEmpty())); EXPECT_EQ(decoder_factory_->decoders()[0]->GetNumFramesReceived(), 1); } @@ -1023,17 +1103,16 @@ TEST_F(WebRtcVideoEngineTest, UsesSimulcastAdapterForVp8Factories) { std::vector ssrcs = MAKE_VECTOR(kSsrcs3); - EXPECT_TRUE( - send_channel->AddSendStream(CreateSimStreamParams("cname", ssrcs))); + EXPECT_TRUE(send_channel->AddSendStream( + webrtc::CreateSimStreamParams("cname", ssrcs))); EXPECT_TRUE(send_channel->SetSend(true)); - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 60); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 60); EXPECT_TRUE( send_channel->SetVideoSend(ssrcs.front(), nullptr, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); + time_controller_.AdvanceTime(TimeDelta::Zero()); ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(2)); // Verify that encoders are configured for simulcast through adapter @@ -1041,7 +1120,7 @@ TEST_F(WebRtcVideoEngineTest, UsesSimulcastAdapterForVp8Factories) { int prev_width = -1; for (size_t i = 0; i < encoder_factory_->encoders().size(); ++i) { ASSERT_TRUE(encoder_factory_->encoders()[i]->WaitForInitEncode()); - webrtc::VideoCodec codec_settings = + VideoCodec codec_settings = encoder_factory_->encoders()[i]->GetCodecSettings(); EXPECT_EQ(0, codec_settings.numberOfSimulcastStreams); EXPECT_GT(codec_settings.width, prev_width); @@ -1059,34 +1138,32 @@ TEST_F(WebRtcVideoEngineTest, ChannelWithH264CanChangeToVp8) { AddSupportedVideoCodecType("H264"); // Frame source. - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 30); std::unique_ptr send_channel = engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), - webrtc::CryptoOptions(), + CryptoOptions(), video_bitrate_allocator_factory_.get()); - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("H264")); EXPECT_TRUE(send_channel->SetSenderParameters(parameters)); - EXPECT_TRUE( - send_channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder)); // Sending one frame will have allocate the encoder. frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); + time_controller_.AdvanceTime(TimeDelta::Zero()); ASSERT_EQ(1u, encoder_factory_->encoders().size()); - cricket::VideoSenderParameters new_parameters; + VideoSenderParameters new_parameters; new_parameters.codecs.push_back(GetEngineCodec("VP8")); EXPECT_TRUE(send_channel->SetSenderParameters(new_parameters)); // Sending one frame will switch encoder. frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); + time_controller_.AdvanceTime(TimeDelta::Zero()); EXPECT_EQ(1u, encoder_factory_->encoders().size()); } @@ -1098,28 +1175,27 @@ TEST_F(WebRtcVideoEngineTest, std::unique_ptr send_channel = engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), - webrtc::CryptoOptions(), + CryptoOptions(), video_bitrate_allocator_factory_.get()); - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); EXPECT_TRUE(send_channel->SetSenderParameters(parameters)); std::vector ssrcs = MAKE_VECTOR(kSsrcs3); - EXPECT_TRUE( - send_channel->AddSendStream(CreateSimStreamParams("cname", ssrcs))); + EXPECT_TRUE(send_channel->AddSendStream( + webrtc::CreateSimStreamParams("cname", ssrcs))); EXPECT_TRUE(send_channel->SetSend(true)); // Send a fake frame, or else the media engine will configure the simulcast // encoder adapter at a low-enough size that it'll only create a single // encoder layer. - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 30); EXPECT_TRUE( send_channel->SetVideoSend(ssrcs.front(), nullptr, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); + time_controller_.AdvanceTime(TimeDelta::Zero()); ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(2)); ASSERT_TRUE(encoder_factory_->encoders()[0]->WaitForInitEncode()); @@ -1138,22 +1214,20 @@ TEST_F(WebRtcVideoEngineTest, std::unique_ptr send_channel = engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), - webrtc::CryptoOptions(), + CryptoOptions(), video_bitrate_allocator_factory_.get()); - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("H264")); EXPECT_TRUE(send_channel->SetSenderParameters(parameters)); - EXPECT_TRUE( - send_channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); // Send a frame of 720p. This should trigger a "real" encoder initialization. - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 30); EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, nullptr, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); + time_controller_.AdvanceTime(TimeDelta::Zero()); ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(1)); ASSERT_EQ(1u, encoder_factory_->encoders().size()); ASSERT_TRUE(encoder_factory_->encoders()[0]->WaitForInitEncode()); @@ -1170,24 +1244,23 @@ TEST_F(WebRtcVideoEngineTest, SimulcastEnabledForH264) { std::unique_ptr send_channel = engine_.CreateSendChannel(call_.get(), GetMediaConfig(), VideoOptions(), - webrtc::CryptoOptions(), + CryptoOptions(), video_bitrate_allocator_factory_.get()); - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("H264")); EXPECT_TRUE(send_channel->SetSenderParameters(parameters)); const std::vector ssrcs = MAKE_VECTOR(kSsrcs3); EXPECT_TRUE(send_channel->AddSendStream( - cricket::CreateSimStreamParams("cname", ssrcs))); + webrtc::CreateSimStreamParams("cname", ssrcs))); // Send a frame of 720p. This should trigger a "real" encoder initialization. - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 30); EXPECT_TRUE(send_channel->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); + time_controller_.AdvanceTime(TimeDelta::Zero()); ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(1)); ASSERT_EQ(1u, encoder_factory_->encoders().size()); @@ -1203,29 +1276,28 @@ TEST_F(WebRtcVideoEngineTest, SimulcastEnabledForH264) { TEST_F(WebRtcVideoEngineTest, Flexfec03SendCodecEnablesWithFieldTrial) { encoder_factory_->AddSupportedVideoCodecType("VP8"); - auto flexfec = Field("name", &VideoCodec::name, "flexfec-03"); + auto flexfec = Field("name", &Codec::name, "flexfec-03"); - EXPECT_THAT(engine_.send_codecs(), Not(Contains(flexfec))); + EXPECT_THAT(engine_.LegacySendCodecs(), Not(Contains(flexfec))); - webrtc::test::ScopedKeyValueConfig override_field_trials( + ScopedKeyValueConfig override_field_trials( field_trials_, "WebRTC-FlexFEC-03-Advertised/Enabled/"); - EXPECT_THAT(engine_.send_codecs(), Contains(flexfec)); + EXPECT_THAT(engine_.LegacySendCodecs(), Contains(flexfec)); } // Test that the FlexFEC "codec" gets assigned to the lower payload type range TEST_F(WebRtcVideoEngineTest, Flexfec03LowerPayloadTypeRange) { encoder_factory_->AddSupportedVideoCodecType("VP8"); - auto flexfec = Field("name", &VideoCodec::name, "flexfec-03"); + auto flexfec = Field("name", &Codec::name, "flexfec-03"); // FlexFEC is active with field trial. - webrtc::test::ScopedKeyValueConfig override_field_trials( + ScopedKeyValueConfig override_field_trials( field_trials_, "WebRTC-FlexFEC-03-Advertised/Enabled/"); - auto send_codecs = engine_.send_codecs(); - auto it = std::find_if(send_codecs.begin(), send_codecs.end(), - [](const cricket::VideoCodec& codec) { - return codec.name == "flexfec-03"; - }); + auto send_codecs = engine_.LegacySendCodecs(); + auto it = std::find_if( + send_codecs.begin(), send_codecs.end(), + [](const webrtc::Codec& codec) { return codec.name == "flexfec-03"; }); ASSERT_NE(it, send_codecs.end()); EXPECT_LE(35, it->id); EXPECT_GE(65, it->id); @@ -1252,11 +1324,11 @@ TEST_F(WebRtcVideoEngineTest, ReportSupportedAddedCodec) { // Set up external encoder factory with first codec, and initialize engine. encoder_factory_->AddSupportedVideoCodecType(kFakeExternalCodecName1); - std::vector codecs_before(engine_.send_codecs()); + std::vector codecs_before(engine_.LegacySendCodecs()); // Add second codec. encoder_factory_->AddSupportedVideoCodecType(kFakeExternalCodecName2); - std::vector codecs_after(engine_.send_codecs()); + std::vector codecs_after(engine_.LegacySendCodecs()); // The codec itself and RTX should have been added. EXPECT_EQ(codecs_before.size() + 2, codecs_after.size()); @@ -1272,20 +1344,20 @@ TEST_F(WebRtcVideoEngineTest, ReportRtxForExternalCodec) { encoder_factory_->AddSupportedVideoCodecType(kFakeCodecName); const size_t fake_codec_index = GetEngineCodecIndex(kFakeCodecName); - EXPECT_EQ("rtx", engine_.send_codecs().at(fake_codec_index + 1).name); + EXPECT_EQ("rtx", engine_.LegacySendCodecs().at(fake_codec_index + 1).name); } TEST_F(WebRtcVideoEngineTest, RegisterDecodersIfSupported) { AddSupportedVideoCodecType("VP8"); - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); auto receive_channel = SetRecvParamsWithSupportedCodecs(parameters.codecs); - EXPECT_TRUE(receive_channel->AddRecvStream( - cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE( + receive_channel->AddRecvStream(StreamParams::CreateLegacy(kSsrc))); // Decoders are not created until they are used. - time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); + time_controller_.AdvanceTime(TimeDelta::Zero()); EXPECT_EQ(0u, decoder_factory_->decoders().size()); // Setting codecs of the same type should not reallocate the decoder. @@ -1304,15 +1376,15 @@ TEST_F(WebRtcVideoEngineTest, RegisterH264DecoderIfSupported) { // For now we add a FakeWebRtcVideoEncoderFactory to add H264 to supported // codecs. AddSupportedVideoCodecType("H264"); - std::vector codecs; + std::vector codecs; codecs.push_back(GetEngineCodec("H264")); auto receive_channel = SetRecvParamsWithSupportedCodecs(codecs); - EXPECT_TRUE(receive_channel->AddRecvStream( - cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE( + receive_channel->AddRecvStream(StreamParams::CreateLegacy(kSsrc))); // Decoders are not created until they are used. - time_controller_.AdvanceTime(webrtc::TimeDelta::Zero()); + time_controller_.AdvanceTime(TimeDelta::Zero()); ASSERT_EQ(0u, decoder_factory_->decoders().size()); } @@ -1321,44 +1393,41 @@ TEST_F(WebRtcVideoEngineTest, RegisterH264DecoderIfSupported) { TEST_F(WebRtcVideoEngineTest, GetSourcesWithNonExistingSsrc) { // Setup an recv stream with `kSsrc`. AddSupportedVideoCodecType("VP8"); - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); auto receive_channel = SetRecvParamsWithSupportedCodecs(parameters.codecs); - EXPECT_TRUE(receive_channel->AddRecvStream( - cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE( + receive_channel->AddRecvStream(StreamParams::CreateLegacy(kSsrc))); // Call GetSources with |kSsrc + 1| which doesn't exist. - std::vector sources = - receive_channel->GetSources(kSsrc + 1); + std::vector sources = receive_channel->GetSources(kSsrc + 1); EXPECT_EQ(0u, sources.size()); } TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, NullFactories) { - std::unique_ptr encoder_factory; - std::unique_ptr decoder_factory; - webrtc::FieldTrialBasedConfig trials; + std::unique_ptr encoder_factory; + std::unique_ptr decoder_factory; + FieldTrialBasedConfig trials; WebRtcVideoEngine engine(std::move(encoder_factory), std::move(decoder_factory), trials); - EXPECT_EQ(0u, engine.send_codecs().size()); - EXPECT_EQ(0u, engine.recv_codecs().size()); + EXPECT_EQ(0u, engine.LegacySendCodecs().size()); + EXPECT_EQ(0u, engine.LegacyRecvCodecs().size()); } TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, EmptyFactories) { // `engine` take ownership of the factories. - webrtc::MockVideoEncoderFactory* encoder_factory = - new webrtc::MockVideoEncoderFactory(); - webrtc::MockVideoDecoderFactory* decoder_factory = - new webrtc::MockVideoDecoderFactory(); - webrtc::FieldTrialBasedConfig trials; + MockVideoEncoderFactory* encoder_factory = new MockVideoEncoderFactory(); + MockVideoDecoderFactory* decoder_factory = new MockVideoDecoderFactory(); + FieldTrialBasedConfig trials; WebRtcVideoEngine engine( - (std::unique_ptr(encoder_factory)), - (std::unique_ptr(decoder_factory)), trials); + (std::unique_ptr(encoder_factory)), + (std::unique_ptr(decoder_factory)), trials); // TODO(kron): Change to Times(1) once send and receive codecs are changed // to be treated independently. EXPECT_CALL(*encoder_factory, GetSupportedFormats()).Times(1); - EXPECT_EQ(0u, engine.send_codecs().size()); - EXPECT_EQ(0u, engine.recv_codecs().size()); + EXPECT_EQ(0u, engine.LegacySendCodecs().size()); + EXPECT_EQ(0u, engine.LegacyRecvCodecs().size()); EXPECT_CALL(*encoder_factory, Die()); EXPECT_CALL(*decoder_factory, Die()); } @@ -1369,31 +1438,26 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, EmptyFactories) { // new factories. TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) { // `engine` take ownership of the factories. - webrtc::MockVideoEncoderFactory* encoder_factory = - new webrtc::MockVideoEncoderFactory(); - webrtc::MockVideoDecoderFactory* decoder_factory = - new webrtc::MockVideoDecoderFactory(); - std::unique_ptr - rate_allocator_factory = - std::make_unique(); + MockVideoEncoderFactory* encoder_factory = new MockVideoEncoderFactory(); + MockVideoDecoderFactory* decoder_factory = new MockVideoDecoderFactory(); + std::unique_ptr rate_allocator_factory = + std::make_unique(); EXPECT_CALL(*rate_allocator_factory, - CreateVideoBitrateAllocator(Field(&webrtc::VideoCodec::codecType, - webrtc::kVideoCodecVP8))) - .WillOnce( - [] { return std::make_unique(); }); - webrtc::FieldTrialBasedConfig trials; + Create(_, Field(&VideoCodec::codecType, webrtc::kVideoCodecVP8))) + .WillOnce([] { return std::make_unique(); }); + FieldTrialBasedConfig trials; WebRtcVideoEngine engine( - (std::unique_ptr(encoder_factory)), - (std::unique_ptr(decoder_factory)), trials); - const webrtc::SdpVideoFormat vp8_format("VP8"); - const std::vector supported_formats = {vp8_format}; + (std::unique_ptr(encoder_factory)), + (std::unique_ptr(decoder_factory)), trials); + const SdpVideoFormat vp8_format("VP8"); + const std::vector supported_formats = {vp8_format}; EXPECT_CALL(*encoder_factory, GetSupportedFormats()) .WillRepeatedly(Return(supported_formats)); EXPECT_CALL(*decoder_factory, GetSupportedFormats()) .WillRepeatedly(Return(supported_formats)); // Verify the codecs from the engine. - const std::vector engine_codecs = engine.send_codecs(); + const std::vector engine_codecs = engine.LegacySendCodecs(); // Verify default codecs has been added correctly. EXPECT_EQ(5u, engine_codecs.size()); EXPECT_EQ("VP8", engine_codecs.at(0).name); @@ -1417,42 +1481,36 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) { EXPECT_EQ(kUlpfecCodecName, engine_codecs.at(4).name); int associated_payload_type; - EXPECT_TRUE(engine_codecs.at(1).GetParam( - cricket::kCodecParamAssociatedPayloadType, &associated_payload_type)); + EXPECT_TRUE(engine_codecs.at(1).GetParam(kCodecParamAssociatedPayloadType, + &associated_payload_type)); EXPECT_EQ(engine_codecs.at(0).id, associated_payload_type); // Verify default parameters has been added to the VP8 codec. VerifyCodecHasDefaultFeedbackParams(engine_codecs.at(0), /*lntf_expected=*/false); // Mock encoder creation. `engine` take ownership of the encoder. - const webrtc::SdpVideoFormat format("VP8"); - EXPECT_CALL(*encoder_factory, CreateVideoEncoder(format)).WillOnce([&] { + const SdpVideoFormat format("VP8"); + EXPECT_CALL(*encoder_factory, Create(_, format)).WillOnce([&] { return std::make_unique(nullptr); }); // Expect no decoder to be created at this point. The decoder will only be // created if we receive payload data. - EXPECT_CALL(*decoder_factory, CreateVideoDecoder(format)).Times(0); + EXPECT_CALL(*decoder_factory, Create).Times(0); // Create a call. - webrtc::RtcEventLogNull event_log; - webrtc::GlobalSimulatedTimeController time_controller( - webrtc::Timestamp::Millis(4711)); - auto task_queue_factory = time_controller.CreateTaskQueueFactory(); - webrtc::Call::Config call_config(&event_log); - webrtc::FieldTrialBasedConfig field_trials; - call_config.trials = &field_trials; - call_config.task_queue_factory = task_queue_factory.get(); - const auto call = absl::WrapUnique(webrtc::Call::Create(call_config)); + GlobalSimulatedTimeController time_controller(Timestamp::Millis(4711)); + CallConfig call_config(CreateEnvironment( + time_controller.CreateTaskQueueFactory(), time_controller.GetClock())); + const std::unique_ptr call = Call::Create(std::move(call_config)); // Create send channel. const int send_ssrc = 123; std::unique_ptr send_channel = engine.CreateSendChannel(call.get(), GetMediaConfig(), VideoOptions(), - webrtc::CryptoOptions(), - rate_allocator_factory.get()); + CryptoOptions(), rate_allocator_factory.get()); - cricket::VideoSenderParameters send_parameters; + VideoSenderParameters send_parameters; send_parameters.codecs.push_back(engine_codecs.at(0)); EXPECT_TRUE(send_channel->SetSenderParameters(send_parameters)); send_channel->OnReadyToSend(true); @@ -1461,25 +1519,24 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) { EXPECT_TRUE(send_channel->SetSend(true)); // Set capturer. - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 30); EXPECT_TRUE(send_channel->SetVideoSend(send_ssrc, nullptr, &frame_forwarder)); // Sending one frame will allocate the encoder. frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - time_controller.AdvanceTime(webrtc::TimeDelta::Zero()); + time_controller.AdvanceTime(TimeDelta::Zero()); // Create recv channel. const int recv_ssrc = 321; std::unique_ptr receive_channel = engine.CreateReceiveChannel(call.get(), GetMediaConfig(), VideoOptions(), - webrtc::CryptoOptions()); + CryptoOptions()); - cricket::VideoReceiverParameters recv_parameters; + VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(engine_codecs.at(0)); EXPECT_TRUE(receive_channel->SetReceiverParameters(recv_parameters)); - EXPECT_TRUE(receive_channel->AddRecvStream( - cricket::StreamParams::CreateLegacy(recv_ssrc))); + EXPECT_TRUE( + receive_channel->AddRecvStream(StreamParams::CreateLegacy(recv_ssrc))); // Remove streams previously added to free the encoder and decoder instance. EXPECT_CALL(*encoder_factory, Die()); @@ -1491,27 +1548,25 @@ TEST(WebRtcVideoEngineNewVideoCodecFactoryTest, Vp8) { TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) { encoder_factory_->AddSupportedVideoCodecType("VP8"); - std::unique_ptr fake_call(new FakeCall()); + auto fake_call = std::make_unique(env_); auto send_channel = SetSendParamsWithAllSupportedCodecs(); - ASSERT_TRUE( - send_channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrc))); - cricket::VideoCodec codec = GetEngineCodec("VP8"); - cricket::VideoSenderParameters parameters; + ASSERT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); + Codec codec = GetEngineCodec("VP8"); + VideoSenderParameters parameters; parameters.codecs.push_back(codec); send_channel->OnReadyToSend(true); send_channel->SetSend(true); ASSERT_TRUE(send_channel->SetSenderParameters(parameters)); - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 30); VideoOptions options; EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, &options, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(1)); - EXPECT_EQ(webrtc::VideoCodecMode::kRealtimeVideo, + EXPECT_EQ(VideoCodecMode::kRealtimeVideo, encoder_factory_->encoders().back()->GetCodecSettings().mode); EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, &options, &frame_forwarder)); @@ -1526,7 +1581,7 @@ TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) { // adapter case, this will result in two calls since InitEncode triggers a // a new instance. ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(2)); - EXPECT_EQ(webrtc::VideoCodecMode::kScreensharing, + EXPECT_EQ(VideoCodecMode::kScreensharing, encoder_factory_->encoders().back()->GetCodecSettings().mode); EXPECT_TRUE(send_channel->SetVideoSend(kSsrc, &options, &frame_forwarder)); @@ -1541,7 +1596,7 @@ TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) { // a non `is_screencast` option just to verify it doesn't affect recreation. frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(3)); - EXPECT_EQ(webrtc::VideoCodecMode::kRealtimeVideo, + EXPECT_EQ(VideoCodecMode::kRealtimeVideo, encoder_factory_->encoders().back()->GetCodecSettings().mode); // Remove stream previously added to free the external encoder instance. @@ -1551,63 +1606,54 @@ TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) { TEST_F(WebRtcVideoEngineTest, SetVideoRtxEnabled) { AddSupportedVideoCodecType("VP8"); - std::vector send_codecs; - std::vector recv_codecs; + std::vector send_codecs; + std::vector recv_codecs; - webrtc::test::ScopedKeyValueConfig field_trials; + ScopedKeyValueConfig field_trials; // Don't want RTX - send_codecs = engine_.send_codecs(false); + send_codecs = engine_.LegacySendCodecs(false); EXPECT_FALSE(HasAnyRtxCodec(send_codecs)); - recv_codecs = engine_.recv_codecs(false); + recv_codecs = engine_.LegacyRecvCodecs(false); EXPECT_FALSE(HasAnyRtxCodec(recv_codecs)); // Want RTX - send_codecs = engine_.send_codecs(true); + send_codecs = engine_.LegacySendCodecs(true); EXPECT_TRUE(HasAnyRtxCodec(send_codecs)); - recv_codecs = engine_.recv_codecs(true); + recv_codecs = engine_.LegacyRecvCodecs(true); EXPECT_TRUE(HasAnyRtxCodec(recv_codecs)); } class WebRtcVideoChannelEncodedFrameCallbackTest : public ::testing::Test { protected: - webrtc::Call::Config GetCallConfig( - webrtc::RtcEventLogNull* event_log, - webrtc::TaskQueueFactory* task_queue_factory) { - webrtc::Call::Config call_config(event_log); - call_config.task_queue_factory = task_queue_factory; - call_config.trials = &field_trials_; - return call_config; - } - WebRtcVideoChannelEncodedFrameCallbackTest() - : task_queue_factory_(time_controller_.CreateTaskQueueFactory()), - call_(absl::WrapUnique(webrtc::Call::Create( - GetCallConfig(&event_log_, task_queue_factory_.get())))), + : env_(CreateEnvironment(&field_trials_, + time_controller_.CreateTaskQueueFactory(), + time_controller_.GetClock())), + call_(Call::Create(CallConfig(env_))), video_bitrate_allocator_factory_( webrtc::CreateBuiltinVideoBitrateAllocatorFactory()), engine_( - std::make_unique>(), - std::make_unique( - []() { return std::make_unique(); }, + std::make_unique< + VideoEncoderFactoryTemplate>(), + std::make_unique( + []() { return std::make_unique(); }, kSdpVideoFormats), field_trials_) { send_channel_ = engine_.CreateSendChannel( - call_.get(), cricket::MediaConfig(), cricket::VideoOptions(), - webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get()); + call_.get(), MediaConfig(), VideoOptions(), CryptoOptions(), + video_bitrate_allocator_factory_.get()); receive_channel_ = engine_.CreateReceiveChannel( - call_.get(), cricket::MediaConfig(), cricket::VideoOptions(), - webrtc::CryptoOptions()); + call_.get(), MediaConfig(), VideoOptions(), CryptoOptions()); network_interface_.SetDestination(receive_channel_.get()); send_channel_->SetInterface(&network_interface_); receive_channel_->SetInterface(&network_interface_); - cricket::VideoReceiverParameters parameters; - parameters.codecs = engine_.recv_codecs(); + VideoReceiverParameters parameters; + parameters.codecs = engine_.LegacyRecvCodecs(); receive_channel_->SetReceiverParameters(parameters); receive_channel_->SetReceive(true); } @@ -1629,32 +1675,30 @@ class WebRtcVideoChannelEncodedFrameCallbackTest : public ::testing::Test { EXPECT_EQ(renderer_.num_rendered_frames(), 1); } - static const std::vector kSdpVideoFormats; - webrtc::GlobalSimulatedTimeController time_controller_{ - Timestamp::Seconds(1000)}; - webrtc::test::ScopedKeyValueConfig field_trials_; - webrtc::RtcEventLogNull event_log_; - std::unique_ptr task_queue_factory_; - std::unique_ptr call_; - std::unique_ptr + static const std::vector kSdpVideoFormats; + GlobalSimulatedTimeController time_controller_{Timestamp::Seconds(1000)}; + ScopedKeyValueConfig field_trials_; + Environment env_; + std::unique_ptr call_; + std::unique_ptr video_bitrate_allocator_factory_; WebRtcVideoEngine engine_; std::unique_ptr send_channel_; std::unique_ptr receive_channel_; - cricket::FakeNetworkInterface network_interface_; - cricket::FakeVideoRenderer renderer_; + FakeNetworkInterface network_interface_; + FakeVideoRenderer renderer_; }; -const std::vector +const std::vector WebRtcVideoChannelEncodedFrameCallbackTest::kSdpVideoFormats = { - webrtc::SdpVideoFormat("VP8")}; + SdpVideoFormat::VP8()}; TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, SetEncodedFrameBufferFunction_DefaultStream) { - testing::MockFunction callback; + testing::MockFunction callback; EXPECT_CALL(callback, Call); EXPECT_TRUE(receive_channel_->AddDefaultRecvStreamForTesting( - cricket::StreamParams::CreateLegacy(kSsrc))); + StreamParams::CreateLegacy(kSsrc))); receive_channel_->SetRecordableEncodedFrameCallback(/*ssrc=*/0, callback.AsStdFunction()); EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_)); @@ -1666,10 +1710,10 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, SetEncodedFrameBufferFunction_MatchSsrcWithDefaultStream) { - testing::MockFunction callback; + testing::MockFunction callback; EXPECT_CALL(callback, Call); EXPECT_TRUE(receive_channel_->AddDefaultRecvStreamForTesting( - cricket::StreamParams::CreateLegacy(kSsrc))); + StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_)); receive_channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); @@ -1681,10 +1725,10 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, SetEncodedFrameBufferFunction_MatchSsrc) { - testing::MockFunction callback; + testing::MockFunction callback; EXPECT_CALL(callback, Call); - EXPECT_TRUE(receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE( + receive_channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_)); receive_channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); @@ -1697,10 +1741,10 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, SetEncodedFrameBufferFunction_MismatchSsrc) { testing::StrictMock< - testing::MockFunction> + testing::MockFunction> callback; - EXPECT_TRUE(receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(kSsrc + 1))); + EXPECT_TRUE( + receive_channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc + 1))); EXPECT_TRUE(receive_channel_->SetSink(kSsrc + 1, &renderer_)); receive_channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); @@ -1712,10 +1756,10 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, SetEncodedFrameBufferFunction_MismatchSsrcWithDefaultStream) { testing::StrictMock< - testing::MockFunction> + testing::MockFunction> callback; EXPECT_TRUE(receive_channel_->AddDefaultRecvStreamForTesting( - cricket::StreamParams::CreateLegacy(kSsrc + 1))); + StreamParams::CreateLegacy(kSsrc + 1))); EXPECT_TRUE(receive_channel_->SetSink(kSsrc + 1, &renderer_)); receive_channel_->SetRecordableEncodedFrameCallback(kSsrc, callback.AsStdFunction()); @@ -1726,10 +1770,10 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, } TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, DoesNotDecodeWhenDisabled) { - testing::MockFunction callback; + testing::MockFunction callback; EXPECT_CALL(callback, Call); EXPECT_TRUE(receive_channel_->AddDefaultRecvStreamForTesting( - cricket::StreamParams::CreateLegacy(kSsrc))); + StreamParams::CreateLegacy(kSsrc))); receive_channel_->SetRecordableEncodedFrameCallback(/*ssrc=*/0, callback.AsStdFunction()); EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_)); @@ -1753,54 +1797,53 @@ TEST_F(WebRtcVideoChannelEncodedFrameCallbackTest, DoesNotDecodeWhenDisabled) { class WebRtcVideoChannelBaseTest : public ::testing::Test { protected: WebRtcVideoChannelBaseTest() - : task_queue_factory_(time_controller_.CreateTaskQueueFactory()), + : env_(CreateEnvironment(&field_trials_, + time_controller_.CreateTaskQueueFactory(), + time_controller_.GetClock())), video_bitrate_allocator_factory_( webrtc::CreateBuiltinVideoBitrateAllocatorFactory()), - engine_(std::make_unique>(), - std::make_unique>(), - field_trials_) {} + engine_( + std::make_unique< + VideoEncoderFactoryTemplate>(), + std::make_unique< + VideoDecoderFactoryTemplate>(), + field_trials_) {} void SetUp() override { // One testcase calls SetUp in a loop, only create call_ once. if (!call_) { - webrtc::Call::Config call_config(&event_log_); - call_config.task_queue_factory = task_queue_factory_.get(); - call_config.trials = &field_trials_; - call_.reset(webrtc::Call::Create(call_config)); + call_ = Call::Create(CallConfig(env_)); } - cricket::MediaConfig media_config; + MediaConfig media_config; // Disabling cpu overuse detection actually disables quality scaling too; it // implies DegradationPreference kMaintainResolution. Automatic scaling // needs to be disabled, otherwise, tests which check the size of received // frames become flaky. media_config.video.enable_cpu_adaptation = false; send_channel_ = engine_.CreateSendChannel( - call_.get(), media_config, cricket::VideoOptions(), - webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get()); - receive_channel_ = engine_.CreateReceiveChannel(call_.get(), media_config, - cricket::VideoOptions(), - webrtc::CryptoOptions()); + call_.get(), media_config, VideoOptions(), CryptoOptions(), + video_bitrate_allocator_factory_.get()); + receive_channel_ = engine_.CreateReceiveChannel( + call_.get(), media_config, VideoOptions(), CryptoOptions()); send_channel_->OnReadyToSend(true); receive_channel_->SetReceive(true); network_interface_.SetDestination(receive_channel_.get()); send_channel_->SetInterface(&network_interface_); receive_channel_->SetInterface(&network_interface_); - cricket::VideoReceiverParameters parameters; - parameters.codecs = engine_.send_codecs(); + VideoReceiverParameters parameters; + parameters.codecs = engine_.LegacySendCodecs(); receive_channel_->SetReceiverParameters(parameters); EXPECT_TRUE(send_channel_->AddSendStream(DefaultSendStreamParams())); - frame_forwarder_ = std::make_unique(); - frame_source_ = std::make_unique( - 640, 480, rtc::kNumMicrosecsPerSec / kFramerate); + frame_forwarder_ = std::make_unique(); + frame_source_ = std::make_unique( + 640, 480, webrtc::kNumMicrosecsPerSec / kFramerate); EXPECT_TRUE( send_channel_->SetVideoSend(kSsrc, nullptr, frame_forwarder_.get())); } @@ -1809,7 +1852,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { WebRtcVideoSendChannel* SendImpl() { // Note that this function requires intimate knowledge of how the channel // was created. - return static_cast(send_channel_.get()); + return static_cast(send_channel_.get()); } // Utility method to setup an additional stream to send and receive video. @@ -1817,8 +1860,8 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { void SetUpSecondStream() { SetUpSecondStreamWithNoRecv(); // Setup recv for second stream. - EXPECT_TRUE(receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(kSsrc + 2))); + EXPECT_TRUE( + receive_channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc + 2))); // Make the second renderer available for use by a new stream. EXPECT_TRUE(receive_channel_->SetSink(kSsrc + 2, &renderer2_)); } @@ -1827,17 +1870,17 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { // This is required if you want to test unsignalled recv of video rtp packets. void SetUpSecondStreamWithNoRecv() { // SetUp() already added kSsrc make sure duplicate SSRCs cant be added. - EXPECT_TRUE(receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE( + receive_channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_)); - EXPECT_FALSE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrc))); - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrc + 2))); + EXPECT_FALSE( + send_channel_->AddSendStream(StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE( + send_channel_->AddSendStream(StreamParams::CreateLegacy(kSsrc + 2))); // We dont add recv for the second stream. // Setup the receive and renderer for second stream after send. - frame_forwarder_2_ = std::make_unique(); + frame_forwarder_2_ = std::make_unique(); EXPECT_TRUE(send_channel_->SetVideoSend(kSsrc + 2, nullptr, frame_forwarder_2_.get())); } @@ -1856,14 +1899,14 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { bool SetDefaultCodec() { return SetOneCodec(DefaultCodec()); } - bool SetOneCodec(const cricket::VideoCodec& codec) { - frame_source_ = std::make_unique( - kVideoWidth, kVideoHeight, rtc::kNumMicrosecsPerSec / kFramerate); + bool SetOneCodec(const Codec& codec) { + frame_source_ = std::make_unique( + kVideoWidth, kVideoHeight, webrtc::kNumMicrosecsPerSec / kFramerate); bool sending = SendImpl()->sending(); bool success = SetSend(false); if (success) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(codec); success = send_channel_->SetSenderParameters(parameters); } @@ -1894,17 +1937,17 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { return network_interface_.NumRtpPackets(ssrc); } int NumSentSsrcs() { return network_interface_.NumSentSsrcs(); } - rtc::CopyOnWriteBuffer GetRtpPacket(int index) { + CopyOnWriteBuffer GetRtpPacket(int index) { return network_interface_.GetRtpPacket(index); } - static int GetPayloadType(rtc::CopyOnWriteBuffer p) { + static int GetPayloadType(CopyOnWriteBuffer p) { RtpPacket header; EXPECT_TRUE(header.Parse(std::move(p))); return header.PayloadType(); } // Tests that we can send and receive frames. - void SendAndReceive(const cricket::VideoCodec& codec) { + void SendAndReceive(const Codec& codec) { EXPECT_TRUE(SetOneCodec(codec)); EXPECT_TRUE(SetSend(true)); receive_channel_->SetDefaultSink(&renderer_); @@ -1914,7 +1957,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { EXPECT_EQ(codec.id, GetPayloadType(GetRtpPacket(0))); } - void SendReceiveManyAndGetStats(const cricket::VideoCodec& codec, + void SendReceiveManyAndGetStats(const Codec& codec, int duration_sec, int fps) { EXPECT_TRUE(SetOneCodec(codec)); @@ -1930,14 +1973,14 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { EXPECT_EQ(codec.id, GetPayloadType(GetRtpPacket(0))); } - cricket::VideoSenderInfo GetSenderStats(size_t i) { + VideoSenderInfo GetSenderStats(size_t i) { VideoMediaSendInfo send_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); return send_info.senders[i]; } - cricket::VideoReceiverInfo GetReceiverStats(size_t i) { - cricket::VideoMediaReceiveInfo info; + VideoReceiverInfo GetReceiverStats(size_t i) { + VideoMediaReceiveInfo info; EXPECT_TRUE(receive_channel_->GetStats(&info)); return info.receivers[i]; } @@ -1945,7 +1988,7 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { // Two streams one channel tests. // Tests that we can send and receive frames. - void TwoStreamsSendAndReceive(const cricket::VideoCodec& codec) { + void TwoStreamsSendAndReceive(const Codec& codec) { SetUpSecondStream(); // Test sending and receiving on first stream. SendAndReceive(codec); @@ -1954,45 +1997,43 @@ class WebRtcVideoChannelBaseTest : public ::testing::Test { EXPECT_GT(NumRtpPackets(), 0); } - cricket::VideoCodec GetEngineCodec(const std::string& name) { - for (const cricket::VideoCodec& engine_codec : engine_.send_codecs()) { + Codec GetEngineCodec(const std::string& name) { + for (const webrtc::Codec& engine_codec : engine_.LegacySendCodecs()) { if (absl::EqualsIgnoreCase(name, engine_codec.name)) return engine_codec; } // This point should never be reached. ADD_FAILURE() << "Unrecognized codec name: " << name; - return cricket::CreateVideoCodec(0, ""); + return webrtc::CreateVideoCodec(0, ""); } - cricket::VideoCodec DefaultCodec() { return GetEngineCodec("VP8"); } + Codec DefaultCodec() { return GetEngineCodec("VP8"); } - cricket::StreamParams DefaultSendStreamParams() { - return cricket::StreamParams::CreateLegacy(kSsrc); + StreamParams DefaultSendStreamParams() { + return StreamParams::CreateLegacy(kSsrc); } - webrtc::GlobalSimulatedTimeController time_controller_{ - Timestamp::Seconds(1000)}; + GlobalSimulatedTimeController time_controller_{Timestamp::Seconds(1000)}; - webrtc::RtcEventLogNull event_log_; - webrtc::test::ScopedKeyValueConfig field_trials_; - std::unique_ptr override_field_trials_; - std::unique_ptr task_queue_factory_; - std::unique_ptr call_; - std::unique_ptr + ScopedKeyValueConfig field_trials_; + std::unique_ptr override_field_trials_; + Environment env_; + std::unique_ptr call_; + std::unique_ptr video_bitrate_allocator_factory_; WebRtcVideoEngine engine_; - std::unique_ptr frame_source_; - std::unique_ptr frame_forwarder_; - std::unique_ptr frame_forwarder_2_; + std::unique_ptr frame_source_; + std::unique_ptr frame_forwarder_; + std::unique_ptr frame_forwarder_2_; std::unique_ptr send_channel_; std::unique_ptr receive_channel_; - cricket::FakeNetworkInterface network_interface_; - cricket::FakeVideoRenderer renderer_; + FakeNetworkInterface network_interface_; + FakeVideoRenderer renderer_; // Used by test cases where 2 streams are run on the same channel. - cricket::FakeVideoRenderer renderer2_; + FakeVideoRenderer renderer2_; }; // Test that SetSend works. @@ -2032,8 +2073,8 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStats) { const int kFps = 10; SendReceiveManyAndGetStats(DefaultCodec(), kDurationSec, kFps); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -2089,17 +2130,15 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStats) { // Test that stats work properly for a conf call with multiple recv streams. TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleRecvStreams) { - cricket::FakeVideoRenderer renderer1, renderer2; + FakeVideoRenderer renderer1, renderer2; EXPECT_TRUE(SetOneCodec(DefaultCodec())); - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(DefaultCodec()); parameters.conference_mode = true; EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); EXPECT_TRUE(SetSend(true)); - EXPECT_TRUE( - receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); - EXPECT_TRUE( - receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2))); + EXPECT_TRUE(receive_channel_->AddRecvStream(StreamParams::CreateLegacy(1))); + EXPECT_TRUE(receive_channel_->AddRecvStream(StreamParams::CreateLegacy(2))); EXPECT_TRUE(receive_channel_->SetSink(1, &renderer1)); EXPECT_TRUE(receive_channel_->SetSink(2, &renderer2)); EXPECT_EQ(0, renderer1.num_rendered_frames()); @@ -2114,8 +2153,8 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleRecvStreams) { EXPECT_TRUE(send_channel_->SetSend(false)); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -2145,12 +2184,12 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleSendStreams) { // Normal setup; note that we set the SSRC explicitly to ensure that // it will come first in the senders map. EXPECT_TRUE(SetOneCodec(DefaultCodec())); - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(DefaultCodec()); parameters.conference_mode = true; EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); - EXPECT_TRUE(receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE( + receive_channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_)); EXPECT_TRUE(SetSend(true)); SendFrame(); @@ -2158,24 +2197,23 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleSendStreams) { EXPECT_FRAME(1, kVideoWidth, kVideoHeight); // Add an additional capturer, and hook up a renderer to receive it. - cricket::FakeVideoRenderer renderer2; - webrtc::test::FrameForwarder frame_forwarder; + FakeVideoRenderer renderer2; + FrameForwarder frame_forwarder; const int kTestWidth = 160; const int kTestHeight = 120; - cricket::FakeFrameSource frame_source(kTestWidth, kTestHeight, - rtc::kNumMicrosecsPerSec / 5); - EXPECT_TRUE( - send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(5678))); + FakeFrameSource frame_source(kTestWidth, kTestHeight, + webrtc::kNumMicrosecsPerSec / 5); + EXPECT_TRUE(send_channel_->AddSendStream(StreamParams::CreateLegacy(5678))); EXPECT_TRUE(send_channel_->SetVideoSend(5678, nullptr, &frame_forwarder)); - EXPECT_TRUE(receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(5678))); + EXPECT_TRUE( + receive_channel_->AddRecvStream(StreamParams::CreateLegacy(5678))); EXPECT_TRUE(receive_channel_->SetSink(5678, &renderer2)); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); time_controller_.AdvanceTime(kFrameDuration); EXPECT_FRAME_ON_RENDERER(renderer2, 1, kTestWidth, kTestHeight); // Get stats, and make sure they are correct for two senders - cricket::VideoMediaSendInfo send_info; + VideoMediaSendInfo send_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); ASSERT_EQ(2U, send_info.senders.size()); @@ -2194,9 +2232,27 @@ TEST_F(WebRtcVideoChannelBaseTest, GetStatsMultipleSendStreams) { send_channel_->SetVideoSend(5678, nullptr, nullptr); } +TEST_F(WebRtcVideoChannelBaseTest, GetStatsDoesNotResetAfterCodecChange) { + const int kDurationSec = 3; + const int kFps = 10; + SendReceiveManyAndGetStats(GetEngineCodec("VP9"), kDurationSec, kFps); + + const Codec& new_codec = GetEngineCodec("VP8"); + EXPECT_TRUE(SetOneCodec(new_codec)); + EXPECT_TRUE(SetSend(true)); + VideoMediaSendInfo send_info; + EXPECT_TRUE(send_channel_->GetStats(&send_info)); + ASSERT_EQ(1U, send_info.senders.size()); + EXPECT_EQ(send_info.senders[0].payload_bytes_sent, + NumRtpBytes() - kRtpHeaderSize * NumRtpPackets()); + EXPECT_EQ(NumRtpPackets(), send_info.senders[0].packets_sent); + ASSERT_TRUE(send_info.senders[0].codec_payload_type); + EXPECT_EQ(new_codec.id, *send_info.senders[0].codec_payload_type); +} + // Test that we can set the bandwidth. TEST_F(WebRtcVideoChannelBaseTest, SetSendBandwidth) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(DefaultCodec()); parameters.max_bandwidth_bps = -1; // <= 0 means unlimited. EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); @@ -2228,8 +2284,7 @@ TEST_F(WebRtcVideoChannelBaseTest, SetSendSsrcAfterSetCodecs) { // Remove stream added in Setup. EXPECT_TRUE(send_channel_->RemoveSendStream(kSsrc)); EXPECT_TRUE(SetDefaultCodec()); - EXPECT_TRUE( - send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(999))); + EXPECT_TRUE(send_channel_->AddSendStream(StreamParams::CreateLegacy(999))); EXPECT_TRUE( send_channel_->SetVideoSend(999u, nullptr, frame_forwarder_.get())); EXPECT_TRUE(SetSend(true)); @@ -2279,8 +2334,7 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveSendStreams) { EXPECT_TRUE(send_channel_->RemoveSendStream(kSsrc)); int rtp_packets = NumRtpPackets(); - EXPECT_TRUE( - send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(789u))); + EXPECT_TRUE(send_channel_->AddSendStream(StreamParams::CreateLegacy(789u))); EXPECT_TRUE( send_channel_->SetVideoSend(789u, nullptr, frame_forwarder_.get())); EXPECT_EQ(rtp_packets, NumRtpPackets()); @@ -2295,17 +2349,15 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveSendStreams) { // Tests the behavior of incoming streams in a conference scenario. TEST_F(WebRtcVideoChannelBaseTest, SimulateConference) { - cricket::FakeVideoRenderer renderer1, renderer2; + FakeVideoRenderer renderer1, renderer2; EXPECT_TRUE(SetDefaultCodec()); - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(DefaultCodec()); parameters.conference_mode = true; EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); EXPECT_TRUE(SetSend(true)); - EXPECT_TRUE( - receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); - EXPECT_TRUE( - receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2))); + EXPECT_TRUE(receive_channel_->AddRecvStream(StreamParams::CreateLegacy(1))); + EXPECT_TRUE(receive_channel_->AddRecvStream(StreamParams::CreateLegacy(2))); EXPECT_TRUE(receive_channel_->SetSink(1, &renderer1)); EXPECT_TRUE(receive_channel_->SetSink(2, &renderer2)); EXPECT_EQ(0, renderer1.num_rendered_frames()); @@ -2329,12 +2381,9 @@ TEST_F(WebRtcVideoChannelBaseTest, SimulateConference) { // Tests that we can add and remove capturers and frames are sent out properly TEST_F(WebRtcVideoChannelBaseTest, DISABLED_AddRemoveCapturer) { - using cricket::FOURCC_I420; - using cricket::VideoCodec; - using cricket::VideoFormat; - using cricket::VideoOptions; + using webrtc::Codec; - VideoCodec codec = DefaultCodec(); + Codec codec = DefaultCodec(); const int time_between_send_ms = VideoFormat::FpsToInterval(kFramerate); EXPECT_TRUE(SetOneCodec(codec)); EXPECT_TRUE(SetSend(true)); @@ -2343,9 +2392,9 @@ TEST_F(WebRtcVideoChannelBaseTest, DISABLED_AddRemoveCapturer) { SendFrame(); EXPECT_FRAME(1, kVideoWidth, kVideoHeight); - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(480, 360, rtc::kNumMicrosecsPerSec / 30, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(480, 360, webrtc::kNumMicrosecsPerSec / 30, + webrtc::kNumMicrosecsPerSec / 30); // TODO(nisse): This testcase fails if we don't configure // screencast. It's unclear why, I see nothing obvious in this @@ -2416,34 +2465,30 @@ TEST_F(WebRtcVideoChannelBaseTest, RemoveCapturerWithoutAdd) { // Tests that we can add and remove capturer as unique sources. TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) { // Set up the stream associated with the engine. - EXPECT_TRUE(receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE( + receive_channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc))); EXPECT_TRUE(receive_channel_->SetSink(kSsrc, &renderer_)); - cricket::VideoFormat capture_format( - kVideoWidth, kVideoHeight, - cricket::VideoFormat::FpsToInterval(kFramerate), cricket::FOURCC_I420); + VideoFormat capture_format(kVideoWidth, kVideoHeight, + VideoFormat::FpsToInterval(kFramerate), + FOURCC_I420); // Set up additional stream 1. - cricket::FakeVideoRenderer renderer1; + FakeVideoRenderer renderer1; EXPECT_FALSE(receive_channel_->SetSink(1, &renderer1)); - EXPECT_TRUE( - receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); + EXPECT_TRUE(receive_channel_->AddRecvStream(StreamParams::CreateLegacy(1))); EXPECT_TRUE(receive_channel_->SetSink(1, &renderer1)); - EXPECT_TRUE( - send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(1))); + EXPECT_TRUE(send_channel_->AddSendStream(StreamParams::CreateLegacy(1))); - webrtc::test::FrameForwarder frame_forwarder1; - cricket::FakeFrameSource frame_source(kVideoWidth, kVideoHeight, - rtc::kNumMicrosecsPerSec / kFramerate); + FrameForwarder frame_forwarder1; + FakeFrameSource frame_source(kVideoWidth, kVideoHeight, + webrtc::kNumMicrosecsPerSec / kFramerate); // Set up additional stream 2. - cricket::FakeVideoRenderer renderer2; + FakeVideoRenderer renderer2; EXPECT_FALSE(receive_channel_->SetSink(2, &renderer2)); - EXPECT_TRUE( - receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(2))); + EXPECT_TRUE(receive_channel_->AddRecvStream(StreamParams::CreateLegacy(2))); EXPECT_TRUE(receive_channel_->SetSink(2, &renderer2)); - EXPECT_TRUE( - send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(2))); - webrtc::test::FrameForwarder frame_forwarder2; + EXPECT_TRUE(send_channel_->AddSendStream(StreamParams::CreateLegacy(2))); + FrameForwarder frame_forwarder2; // State for all the streams. EXPECT_TRUE(SetOneCodec(DefaultCodec())); @@ -2457,14 +2502,14 @@ TEST_F(WebRtcVideoChannelBaseTest, AddRemoveCapturerMultipleSources) { const int kTestWidth = 160; const int kTestHeight = 120; frame_forwarder1.IncomingCapturedFrame(frame_source.GetFrame( - kTestWidth, kTestHeight, webrtc::VideoRotation::kVideoRotation_0, - rtc::kNumMicrosecsPerSec / kFramerate)); + kTestWidth, kTestHeight, VideoRotation::kVideoRotation_0, + webrtc::kNumMicrosecsPerSec / kFramerate)); time_controller_.AdvanceTime(kFrameDuration); EXPECT_FRAME_ON_RENDERER(renderer1, 1, kTestWidth, kTestHeight); // Capture a frame with additional capturer2, frames should be received frame_forwarder2.IncomingCapturedFrame(frame_source.GetFrame( - kTestWidth, kTestHeight, webrtc::VideoRotation::kVideoRotation_0, - rtc::kNumMicrosecsPerSec / kFramerate)); + kTestWidth, kTestHeight, VideoRotation::kVideoRotation_0, + webrtc::kNumMicrosecsPerSec / kFramerate)); time_controller_.AdvanceTime(kFrameDuration); EXPECT_FRAME_ON_RENDERER(renderer2, 1, kTestWidth, kTestHeight); // Successfully remove the capturer. @@ -2480,10 +2525,9 @@ TEST_F(WebRtcVideoChannelBaseTest, RejectEmptyStreamParams) { // Remove the send stream that was added during Setup. EXPECT_TRUE(send_channel_->RemoveSendStream(kSsrc)); - cricket::StreamParams empty; + StreamParams empty; EXPECT_FALSE(send_channel_->AddSendStream(empty)); - EXPECT_TRUE( - send_channel_->AddSendStream(cricket::StreamParams::CreateLegacy(789u))); + EXPECT_TRUE(send_channel_->AddSendStream(StreamParams::CreateLegacy(789u))); } // Test that multiple send streams can be created and deleted properly. @@ -2493,8 +2537,8 @@ TEST_F(WebRtcVideoChannelBaseTest, MultipleSendStreams) { EXPECT_TRUE(send_channel_->RemoveSendStream(kSsrc)); const unsigned int kSsrcsSize = sizeof(kSsrcs4) / sizeof(kSsrcs4[0]); for (unsigned int i = 0; i < kSsrcsSize; ++i) { - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrcs4[i]))); + EXPECT_TRUE( + send_channel_->AddSendStream(StreamParams::CreateLegacy(kSsrcs4[i]))); } // Delete one of the non default channel streams, let the destructor delete // the remaining ones. @@ -2521,20 +2565,52 @@ TEST_F(WebRtcVideoChannelBaseTest, TwoStreamsSendAndReceive) { // initially will use QVGA instead of VGA. // TODO(pbos): Set up the quality scaler so that both senders reliably start // at QVGA, then verify that instead. - cricket::VideoCodec codec = GetEngineCodec("VP8"); + Codec codec = GetEngineCodec("VP8"); codec.params[kCodecParamStartBitrate] = "1000000"; TwoStreamsSendAndReceive(codec); } +TEST_F(WebRtcVideoChannelBaseTest, + RequestEncoderFallbackNextCodecFollowNegotiatedOrder) { + VideoSenderParameters parameters; + parameters.codecs.push_back(GetEngineCodec("VP9")); + parameters.codecs.push_back(GetEngineCodec("AV1")); + parameters.codecs.push_back(GetEngineCodec("VP8")); + EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); + + std::optional codec = send_channel_->GetSendCodec(); + ASSERT_TRUE(codec); + EXPECT_EQ("VP9", codec->name); + + SendImpl()->RequestEncoderFallback(); + time_controller_.AdvanceTime(kFrameDuration); + codec = send_channel_->GetSendCodec(); + ASSERT_TRUE(codec); + EXPECT_EQ("AV1", codec->name); + + SendImpl()->RequestEncoderFallback(); + time_controller_.AdvanceTime(kFrameDuration); + codec = send_channel_->GetSendCodec(); + ASSERT_TRUE(codec); + EXPECT_EQ("VP8", codec->name); + + SendImpl()->RequestEncoderFallback(); + time_controller_.AdvanceTime(kFrameDuration); + + FrameForwarder frame_forwarder; + EXPECT_TRUE(send_channel_->SetVideoSend(kSsrc, nullptr, &frame_forwarder)); + EXPECT_TRUE(send_channel_->RemoveSendStream(kSsrc)); +} + #if defined(RTC_ENABLE_VP9) TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderFallback) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); parameters.codecs.push_back(GetEngineCodec("VP8")); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); - absl::optional codec = send_channel_->GetSendCodec(); + std::optional codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ("VP9", codec->name); @@ -2555,18 +2631,18 @@ TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderFallback) { } TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderSwitchDefaultFallback) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); parameters.codecs.push_back(GetEngineCodec("VP8")); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); - absl::optional codec = send_channel_->GetSendCodec(); + std::optional codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ("VP9", codec->name); // RequestEncoderSwitch will post a task to the worker thread (which is also // the current thread), hence the ProcessMessages call. - SendImpl()->RequestEncoderSwitch(webrtc::SdpVideoFormat("UnavailableCodec"), + SendImpl()->RequestEncoderSwitch(SdpVideoFormat("UnavailableCodec"), /*allow_default_fallback=*/true); time_controller_.AdvanceTime(kFrameDuration); @@ -2578,21 +2654,20 @@ TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderSwitchDefaultFallback) { } TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderSwitchStrictPreference) { - VideoCodec vp9 = GetEngineCodec("VP9"); + Codec vp9 = GetEngineCodec("VP9"); vp9.params["profile-id"] = "0"; - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(vp9); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); - absl::optional codec = send_channel_->GetSendCodec(); + std::optional codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ("VP8", codec->name); - SendImpl()->RequestEncoderSwitch( - webrtc::SdpVideoFormat("VP9", {{"profile-id", "1"}}), - /*allow_default_fallback=*/false); + SendImpl()->RequestEncoderSwitch(SdpVideoFormat::VP9Profile1(), + /*allow_default_fallback=*/false); time_controller_.AdvanceTime(kFrameDuration); // VP9 profile_id=1 is not available. Default fallback is not allowed. Switch @@ -2601,9 +2676,8 @@ TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderSwitchStrictPreference) { ASSERT_TRUE(codec); EXPECT_EQ("VP8", codec->name); - SendImpl()->RequestEncoderSwitch( - webrtc::SdpVideoFormat("VP9", {{"profile-id", "0"}}), - /*allow_default_fallback=*/false); + SendImpl()->RequestEncoderSwitch(SdpVideoFormat::VP9Profile0(), + /*allow_default_fallback=*/false); time_controller_.AdvanceTime(kFrameDuration); // VP9 profile_id=0 is available. Switch encoder. @@ -2613,11 +2687,10 @@ TEST_F(WebRtcVideoChannelBaseTest, RequestEncoderSwitchStrictPreference) { } TEST_F(WebRtcVideoChannelBaseTest, SendCodecIsMovedToFrontInRtpParameters) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); parameters.codecs.push_back(GetEngineCodec("VP8")); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); - send_channel_->SetVideoCodecSwitchingEnabled(true); auto send_codecs = send_channel_->GetRtpSendParameters(kSsrc).codecs; ASSERT_EQ(send_codecs.size(), 2u); @@ -2640,22 +2713,23 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { WebRtcVideoChannelTest() : WebRtcVideoChannelTest("") {} explicit WebRtcVideoChannelTest(const char* field_trials) : WebRtcVideoEngineTest(field_trials), - frame_source_(1280, 720, rtc::kNumMicrosecsPerSec / 30), + frame_source_(1280, 720, webrtc::kNumMicrosecsPerSec / 30), last_ssrc_(0) {} void SetUp() override { AddSupportedVideoCodecType("VP8"); AddSupportedVideoCodecType("VP9"); + AddSupportedVideoCodecType( + "AV1", {ScalabilityMode::kL1T3, ScalabilityMode::kL2T3}); #if defined(WEBRTC_USE_H264) AddSupportedVideoCodecType("H264"); #endif - fake_call_.reset(new FakeCall(&field_trials_)); + fake_call_ = std::make_unique(env_); send_channel_ = engine_.CreateSendChannel( - fake_call_.get(), GetMediaConfig(), VideoOptions(), - webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get()); - receive_channel_ = - engine_.CreateReceiveChannel(fake_call_.get(), GetMediaConfig(), - VideoOptions(), webrtc::CryptoOptions()); + fake_call_.get(), GetMediaConfig(), VideoOptions(), CryptoOptions(), + video_bitrate_allocator_factory_.get()); + receive_channel_ = engine_.CreateReceiveChannel( + fake_call_.get(), GetMediaConfig(), VideoOptions(), CryptoOptions()); send_channel_->SetSsrcListChangedCallback( [receive_channel = receive_channel_.get()](const std::set& choices) { @@ -2670,8 +2744,8 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { send_channel_->OnReadyToSend(true); receive_channel_->SetReceive(true); last_ssrc_ = 123; - send_parameters_.codecs = engine_.send_codecs(); - recv_parameters_.codecs = engine_.recv_codecs(); + send_parameters_.codecs = engine_.LegacySendCodecs(); + recv_parameters_.codecs = engine_.LegacyRecvCodecs(); ASSERT_TRUE(send_channel_->SetSenderParameters(send_parameters_)); } @@ -2692,33 +2766,32 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { WebRtcVideoSendChannel* SendImpl() { // Note that this function requires intimate knowledge of how the channel // was created. - return static_cast(send_channel_.get()); + return static_cast(send_channel_.get()); } // Casts a shim channel to a webrtc::Transport. Used once. - webrtc::Transport* ChannelImplAsTransport( - cricket::VideoMediaSendChannelInterface* channel) { - return static_cast(channel)->transport(); + Transport* ChannelImplAsTransport(VideoMediaSendChannelInterface* channel) { + return static_cast(channel)->transport(); } - cricket::VideoCodec GetEngineCodec(const std::string& name) { - for (const cricket::VideoCodec& engine_codec : engine_.send_codecs()) { + Codec GetEngineCodec(const std::string& name) { + for (const webrtc::Codec& engine_codec : engine_.LegacySendCodecs()) { if (absl::EqualsIgnoreCase(name, engine_codec.name)) return engine_codec; } // This point should never be reached. ADD_FAILURE() << "Unrecognized codec name: " << name; - return cricket::CreateVideoCodec(0, ""); + return webrtc::CreateVideoCodec(0, ""); } - cricket::VideoCodec DefaultCodec() { return GetEngineCodec("VP8"); } + Codec DefaultCodec() { return GetEngineCodec("VP8"); } // After receciving and processing the packet, enough time is advanced that // the unsignalled receive stream cooldown is no longer in effect. void ReceivePacketAndAdvanceTime(const RtpPacketReceived& packet) { receive_channel_->OnPacketReceived(packet); time_controller_.AdvanceTime( - webrtc::TimeDelta::Millis(kUnsignalledReceiveStreamCooldownMs)); + TimeDelta::Millis(kUnsignalledReceiveStreamCooldownMs)); } protected: @@ -2790,11 +2863,11 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { void TestExtmapAllowMixedCaller(bool extmap_allow_mixed) { // For a caller, the answer will be applied in set remote description // where SetSenderParameters() is called. - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrc))); + EXPECT_TRUE( + send_channel_->AddSendStream(StreamParams::CreateLegacy(kSsrc))); send_parameters_.extmap_allow_mixed = extmap_allow_mixed; EXPECT_TRUE(send_channel_->SetSenderParameters(send_parameters_)); - const webrtc::VideoSendStream::Config& config = + const VideoSendStream::Config& config = fake_call_->GetVideoSendStreams()[0]->GetConfig(); EXPECT_EQ(extmap_allow_mixed, config.rtp.extmap_allow_mixed); } @@ -2803,9 +2876,9 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { // For a callee, the answer will be applied in set local description // where SetExtmapAllowMixed() and AddSendStream() are called. send_channel_->SetExtmapAllowMixed(extmap_allow_mixed); - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrc))); - const webrtc::VideoSendStream::Config& config = + EXPECT_TRUE( + send_channel_->AddSendStream(StreamParams::CreateLegacy(kSsrc))); + const VideoSendStream::Config& config = fake_call_->GetVideoSendStreams()[0]->GetConfig(); EXPECT_EQ(extmap_allow_mixed, config.rtp.extmap_allow_mixed); } @@ -2813,11 +2886,11 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { void TestSetSendRtpHeaderExtensions(const std::string& ext_uri) { // Enable extension. const int id = 1; - cricket::VideoSenderParameters parameters = send_parameters_; + VideoSenderParameters parameters = send_parameters_; parameters.extensions.push_back(RtpExtension(ext_uri, id)); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* send_stream = - AddSendStream(cricket::StreamParams::CreateLegacy(123)); + AddSendStream(StreamParams::CreateLegacy(123)); // Verify the send extension id. ASSERT_EQ(1u, send_stream->GetConfig().rtp.extensions.size()); @@ -2844,11 +2917,11 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { void TestSetRecvRtpHeaderExtensions(const std::string& ext_uri) { // Enable extension. const int id = 1; - cricket::VideoReceiverParameters parameters = recv_parameters_; + VideoReceiverParameters parameters = recv_parameters_; parameters.extensions.push_back(RtpExtension(ext_uri, id)); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); - AddRecvStream(cricket::StreamParams::CreateLegacy(123)); + AddRecvStream(StreamParams::CreateLegacy(123)); EXPECT_THAT( receive_channel_->GetRtpReceiverParameters(123).header_extensions, ElementsAre(RtpExtension(ext_uri, id))); @@ -2858,7 +2931,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { // Verify that SetRecvRtpHeaderExtensions doesn't implicitly add them for // senders. - EXPECT_TRUE(AddSendStream(cricket::StreamParams::CreateLegacy(123)) + EXPECT_TRUE(AddSendStream(StreamParams::CreateLegacy(123)) ->GetConfig() .rtp.extensions.empty()); @@ -2879,25 +2952,25 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { AssignDefaultCodec(); VerifyCodecHasDefaultFeedbackParams(*default_codec_, expect_lntf_enabled); - cricket::VideoSenderParameters parameters; - parameters.codecs = engine_.send_codecs(); + VideoSenderParameters parameters; + parameters.codecs = engine_.LegacySendCodecs(); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); EXPECT_TRUE(send_channel_->SetSend(true)); // Send side. FakeVideoSendStream* send_stream = - AddSendStream(cricket::StreamParams::CreateLegacy(1)); + AddSendStream(StreamParams::CreateLegacy(1)); EXPECT_EQ(send_stream->GetConfig().rtp.lntf.enabled, expect_lntf_enabled); // Receiver side. FakeVideoReceiveStream* recv_stream = - AddRecvStream(cricket::StreamParams::CreateLegacy(1)); + AddRecvStream(StreamParams::CreateLegacy(1)); EXPECT_EQ(recv_stream->GetConfig().rtp.lntf.enabled, expect_lntf_enabled); } void TestExtensionFilter(const std::vector& extensions, const std::string& expected_extension) { - cricket::VideoSenderParameters parameters = send_parameters_; + VideoSenderParameters parameters = send_parameters_; int expected_id = -1; int id = 1; for (const std::string& extension : extensions) { @@ -2907,7 +2980,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { } EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* send_stream = - AddSendStream(cricket::StreamParams::CreateLegacy(123)); + AddSendStream(StreamParams::CreateLegacy(123)); // Verify that only one of them has been set, and that it is the one with // highest priority (transport sequence number). @@ -2925,11 +2998,10 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { void TestReceiveUnsignaledSsrcPacket(uint8_t payload_type, bool expect_created_receive_stream); - FakeVideoSendStream* SetDenoisingOption( - uint32_t ssrc, - webrtc::test::FrameForwarder* frame_forwarder, - bool enabled) { - cricket::VideoOptions options; + FakeVideoSendStream* SetDenoisingOption(uint32_t ssrc, + FrameForwarder* frame_forwarder, + bool enabled) { + VideoOptions options; options.video_noise_reduction = enabled; EXPECT_TRUE(send_channel_->SetVideoSend(ssrc, &options, frame_forwarder)); // Options only take effect on the next frame. @@ -2953,9 +3025,9 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { } if (with_rtx) { return AddSendStream( - cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs)); + webrtc::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs)); } - return AddSendStream(CreateSimStreamParams("cname", ssrcs)); + return AddSendStream(webrtc::CreateSimStreamParams("cname", ssrcs)); } int GetMaxEncoderBitrate() { @@ -2973,8 +3045,7 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { VideoSenderParameters limited_send_params = send_parameters_; limited_send_params.max_bandwidth_bps = global_max; EXPECT_TRUE(send_channel_->SetSenderParameters(limited_send_params)); - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); parameters.encodings[0].max_bitrate_bps = stream_max; EXPECT_TRUE( @@ -2988,8 +3059,8 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { } // Values from kSimulcastConfigs in simulcast.cc. - const std::vector GetSimulcastBitrates720p() const { - std::vector layers(3); + const std::vector GetSimulcastBitrates720p() const { + std::vector layers(3); layers[0].min_bitrate_bps = 30000; layers[0].target_bitrate_bps = 150000; layers[0].max_bitrate_bps = 200000; @@ -3002,12 +3073,12 @@ class WebRtcVideoChannelTest : public WebRtcVideoEngineTest { return layers; } - cricket::FakeFrameSource frame_source_; + FakeFrameSource frame_source_; std::unique_ptr fake_call_; std::unique_ptr send_channel_; std::unique_ptr receive_channel_; - cricket::VideoSenderParameters send_parameters_; - cricket::VideoReceiverParameters recv_parameters_; + VideoSenderParameters send_parameters_; + VideoReceiverParameters recv_parameters_; uint32_t last_ssrc_; }; @@ -3015,7 +3086,7 @@ TEST_F(WebRtcVideoChannelTest, SetsSyncGroupFromSyncLabel) { const uint32_t kVideoSsrc = 123; const std::string kSyncLabel = "AvSyncLabel"; - cricket::StreamParams sp = cricket::StreamParams::CreateLegacy(kVideoSsrc); + StreamParams sp = StreamParams::CreateLegacy(kVideoSsrc); sp.set_stream_ids({kSyncLabel}); EXPECT_TRUE(receive_channel_->AddRecvStream(sp)); @@ -3026,8 +3097,8 @@ TEST_F(WebRtcVideoChannelTest, SetsSyncGroupFromSyncLabel) { } TEST_F(WebRtcVideoChannelTest, RecvStreamWithSimAndRtx) { - cricket::VideoSenderParameters parameters; - parameters.codecs = engine_.send_codecs(); + VideoSenderParameters parameters; + parameters.codecs = engine_.LegacySendCodecs(); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); EXPECT_TRUE(send_channel_->SetSend(true)); parameters.conference_mode = true; @@ -3037,7 +3108,7 @@ TEST_F(WebRtcVideoChannelTest, RecvStreamWithSimAndRtx) { const std::vector ssrcs = MAKE_VECTOR(kSsrcs1); const std::vector rtx_ssrcs = MAKE_VECTOR(kRtxSsrcs1); FakeVideoSendStream* send_stream = AddSendStream( - cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs)); + webrtc::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs)); ASSERT_EQ(rtx_ssrcs.size(), send_stream->GetConfig().rtp.rtx.ssrcs.size()); for (size_t i = 0; i < rtx_ssrcs.size(); ++i) @@ -3045,7 +3116,7 @@ TEST_F(WebRtcVideoChannelTest, RecvStreamWithSimAndRtx) { // Receiver side. FakeVideoReceiveStream* recv_stream = AddRecvStream( - cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs)); + webrtc::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs)); EXPECT_FALSE( recv_stream->GetConfig().rtp.rtx_associated_payload_types.empty()); EXPECT_TRUE(VerifyRtxReceiveAssociations(recv_stream->GetConfig())) @@ -3059,8 +3130,7 @@ TEST_F(WebRtcVideoChannelTest, RecvStreamWithSimAndRtx) { TEST_F(WebRtcVideoChannelTest, RecvStreamWithRtx) { // Setup one channel with an associated RTX stream. - cricket::StreamParams params = - cricket::StreamParams::CreateLegacy(kSsrcs1[0]); + StreamParams params = StreamParams::CreateLegacy(kSsrcs1[0]); params.AddFidSsrc(kSsrcs1[0], kRtxSsrcs1[0]); FakeVideoReceiveStream* recv_stream = AddRecvStream(params); EXPECT_EQ(kRtxSsrcs1[0], recv_stream->GetConfig().rtp.rtx_ssrc); @@ -3074,8 +3144,7 @@ TEST_F(WebRtcVideoChannelTest, RecvStreamWithRtx) { TEST_F(WebRtcVideoChannelTest, RecvStreamNoRtx) { // Setup one channel without an associated RTX stream. - cricket::StreamParams params = - cricket::StreamParams::CreateLegacy(kSsrcs1[0]); + StreamParams params = StreamParams::CreateLegacy(kSsrcs1[0]); FakeVideoReceiveStream* recv_stream = AddRecvStream(params); ASSERT_EQ(0U, recv_stream->GetConfig().rtp.rtx_ssrc); } @@ -3096,10 +3165,10 @@ TEST_F(WebRtcVideoChannelTest, SetExtmapAllowMixedDisabledAsCallee) { TEST_F(WebRtcVideoChannelTest, NoHeaderExtesionsByDefault) { FakeVideoSendStream* send_stream = - AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcs1[0])); + AddSendStream(StreamParams::CreateLegacy(kSsrcs1[0])); ASSERT_TRUE(send_stream->GetConfig().rtp.extensions.empty()); - AddRecvStream(cricket::StreamParams::CreateLegacy(kSsrcs1[0])); + AddRecvStream(StreamParams::CreateLegacy(kSsrcs1[0])); ASSERT_TRUE(receive_channel_->GetRtpReceiverParameters(kSsrcs1[0]) .header_extensions.empty()); } @@ -3123,7 +3192,7 @@ TEST_F(WebRtcVideoChannelTest, RecvAbsoluteSendTimeHeaderExtensions) { } TEST_F(WebRtcVideoChannelTest, FiltersExtensionsPicksTransportSeqNum) { - webrtc::test::ScopedKeyValueConfig override_field_trials( + ScopedKeyValueConfig override_field_trials( field_trials_, "WebRTC-FilterAbsSendTimeExtension/Enabled/"); // Enable three redundant extensions. std::vector extensions; @@ -3157,6 +3226,47 @@ TEST_F(WebRtcVideoChannelTest, RecvVideoRotationHeaderExtensions) { TestSetRecvRtpHeaderExtensions(RtpExtension::kVideoRotationUri); } +TEST_F(WebRtcVideoChannelTest, SendCorruptionDetectionHeaderExtensions) { + TestSetSendRtpHeaderExtensions(RtpExtension::kCorruptionDetectionUri); +} +TEST_F(WebRtcVideoChannelTest, RecvCorruptionDetectionHeaderExtensions) { + TestSetRecvRtpHeaderExtensions(RtpExtension::kCorruptionDetectionUri); +} + +TEST_F(WebRtcVideoChannelTest, DisableFrameInstrumentationByDefault) { + EXPECT_TRUE(send_channel_->SetSenderParameters(send_parameters_)); + FakeVideoSendStream* send_stream = + AddSendStream(StreamParams::CreateLegacy(123)); + EXPECT_FALSE(send_stream->GetConfig() + .encoder_settings.enable_frame_instrumentation_generator); +} + +TEST_F(WebRtcVideoChannelTest, + EnableFrameInstrumentationWhenEncryptedExtensionIsPresent) { + VideoSenderParameters parameters = send_parameters_; + parameters.extensions.push_back(RtpExtension( + RtpExtension::kCorruptionDetectionUri, /*id=*/1, /*encrypt=*/true)); + EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); + + FakeVideoSendStream* send_stream = + AddSendStream(StreamParams::CreateLegacy(123)); + EXPECT_TRUE(send_stream->GetConfig() + .encoder_settings.enable_frame_instrumentation_generator); +} + +TEST_F(WebRtcVideoChannelTest, + DisableFrameInstrumentationWhenNoEncryptedExtensionIsPresent) { + VideoSenderParameters parameters = send_parameters_; + parameters.extensions.push_back(RtpExtension( + RtpExtension::kCorruptionDetectionUri, /*id=*/1, /*encrypt=*/false)); + EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); + + FakeVideoSendStream* send_stream = + AddSendStream(StreamParams::CreateLegacy(123)); + EXPECT_FALSE(send_stream->GetConfig() + .encoder_settings.enable_frame_instrumentation_generator); +} + TEST_F(WebRtcVideoChannelTest, IdenticalSendExtensionsDoesntRecreateStream) { const int kAbsSendTimeId = 1; const int kVideoRotationId = 2; @@ -3167,7 +3277,7 @@ TEST_F(WebRtcVideoChannelTest, IdenticalSendExtensionsDoesntRecreateStream) { EXPECT_TRUE(send_channel_->SetSenderParameters(send_parameters_)); FakeVideoSendStream* send_stream = - AddSendStream(cricket::StreamParams::CreateLegacy(123)); + AddSendStream(StreamParams::CreateLegacy(123)); EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams()); ASSERT_EQ(2u, send_stream->GetConfig().rtp.extensions.size()); @@ -3197,7 +3307,7 @@ TEST_F(WebRtcVideoChannelTest, RtpExtension(RtpExtension::kTimestampOffsetUri, kTOffsetId)); EXPECT_TRUE(send_channel_->SetSenderParameters(send_parameters_)); FakeVideoSendStream* send_stream = - AddSendStream(cricket::StreamParams::CreateLegacy(123)); + AddSendStream(StreamParams::CreateLegacy(123)); // Only timestamp offset extension is set to send stream, // unsupported rtp extension is ignored. @@ -3216,7 +3326,7 @@ TEST_F(WebRtcVideoChannelTest, recv_parameters_.extensions.push_back( RtpExtension(RtpExtension::kTimestampOffsetUri, kTOffsetId)); EXPECT_TRUE(receive_channel_->SetReceiverParameters(recv_parameters_)); - AddRecvStream(cricket::StreamParams::CreateLegacy(123)); + AddRecvStream(StreamParams::CreateLegacy(123)); // Only timestamp offset extension is set to receive stream, // unsupported rtp extension is ignored. @@ -3281,14 +3391,14 @@ TEST_F(WebRtcVideoChannelTest, SetRecvRtpHeaderExtensionsRejectsDuplicateIds) { } TEST_F(WebRtcVideoChannelTest, OnPacketReceivedIdentifiesExtensions) { - cricket::VideoReceiverParameters parameters = recv_parameters_; + VideoReceiverParameters parameters = recv_parameters_; parameters.extensions.push_back( RtpExtension(RtpExtension::kVideoRotationUri, /*id=*/1)); ASSERT_TRUE(receive_channel_->SetReceiverParameters(parameters)); - webrtc::RtpHeaderExtensionMap extension_map(parameters.extensions); + RtpHeaderExtensionMap extension_map(parameters.extensions); RtpPacketReceived reference_packet(&extension_map); - reference_packet.SetExtension( - webrtc::VideoRotation::kVideoRotation_270); + reference_packet.SetExtension( + VideoRotation::kVideoRotation_270); // Create a packet without the extension map but with the same content. RtpPacketReceived received_packet; ASSERT_TRUE(received_packet.Parse(reference_packet.Buffer())); @@ -3296,20 +3406,19 @@ TEST_F(WebRtcVideoChannelTest, OnPacketReceivedIdentifiesExtensions) { receive_channel_->OnPacketReceived(received_packet); time_controller_.AdvanceTime(TimeDelta::Zero()); - EXPECT_EQ(fake_call_->last_received_rtp_packet() - .GetExtension(), - webrtc::VideoRotation::kVideoRotation_270); + EXPECT_EQ( + fake_call_->last_received_rtp_packet().GetExtension(), + VideoRotation::kVideoRotation_270); } TEST_F(WebRtcVideoChannelTest, AddRecvStreamOnlyUsesOneReceiveStream) { - EXPECT_TRUE( - receive_channel_->AddRecvStream(cricket::StreamParams::CreateLegacy(1))); + EXPECT_TRUE(receive_channel_->AddRecvStream(StreamParams::CreateLegacy(1))); EXPECT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()); } TEST_F(WebRtcVideoChannelTest, RtcpIsCompoundByDefault) { FakeVideoReceiveStream* stream = AddRecvStream(); - EXPECT_EQ(webrtc::RtcpMode::kCompound, stream->GetConfig().rtp.rtcp_mode); + EXPECT_EQ(RtcpMode::kCompound, stream->GetConfig().rtp.rtcp_mode); } TEST_F(WebRtcVideoChannelTest, LossNotificationIsDisabledByDefault) { @@ -3317,14 +3426,14 @@ TEST_F(WebRtcVideoChannelTest, LossNotificationIsDisabledByDefault) { } TEST_F(WebRtcVideoChannelTest, LossNotificationIsEnabledByFieldTrial) { - webrtc::test::ScopedKeyValueConfig override_field_trials( + ScopedKeyValueConfig override_field_trials( field_trials_, "WebRTC-RtcpLossNotification/Enabled/"); ResetTest(); TestLossNotificationState(true); } TEST_F(WebRtcVideoChannelTest, LossNotificationCanBeEnabledAndDisabled) { - webrtc::test::ScopedKeyValueConfig override_field_trials( + ScopedKeyValueConfig override_field_trials( field_trials_, "WebRTC-RtcpLossNotification/Enabled/"); ResetTest(); @@ -3332,22 +3441,22 @@ TEST_F(WebRtcVideoChannelTest, LossNotificationCanBeEnabledAndDisabled) { VerifyCodecHasDefaultFeedbackParams(*default_codec_, true); { - cricket::VideoSenderParameters parameters; - parameters.codecs = engine_.send_codecs(); + VideoSenderParameters parameters; + parameters.codecs = engine_.LegacySendCodecs(); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); EXPECT_TRUE(send_channel_->SetSend(true)); } // Start with LNTF enabled. FakeVideoSendStream* send_stream = - AddSendStream(cricket::StreamParams::CreateLegacy(1)); + AddSendStream(StreamParams::CreateLegacy(1)); ASSERT_TRUE(send_stream->GetConfig().rtp.lntf.enabled); FakeVideoReceiveStream* recv_stream = - AddRecvStream(cricket::StreamParams::CreateLegacy(1)); + AddRecvStream(StreamParams::CreateLegacy(1)); ASSERT_TRUE(recv_stream->GetConfig().rtp.lntf.enabled); // Verify that LNTF is turned off when send(!) codecs without LNTF are set. - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(RemoveFeedbackParams(GetEngineCodec("VP8"))); EXPECT_TRUE(parameters.codecs[0].feedback_params.params().empty()); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); @@ -3357,7 +3466,7 @@ TEST_F(WebRtcVideoChannelTest, LossNotificationCanBeEnabledAndDisabled) { EXPECT_FALSE(send_stream->GetConfig().rtp.lntf.enabled); // Setting the default codecs again, including VP8, turns LNTF back on. - parameters.codecs = engine_.send_codecs(); + parameters.codecs = engine_.LegacySendCodecs(); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); recv_stream = fake_call_->GetVideoReceiveStreams()[0]; EXPECT_TRUE(recv_stream->GetConfig().rtp.lntf.enabled); @@ -3369,19 +3478,19 @@ TEST_F(WebRtcVideoChannelTest, NackIsEnabledByDefault) { AssignDefaultCodec(); VerifyCodecHasDefaultFeedbackParams(*default_codec_, false); - cricket::VideoSenderParameters parameters; - parameters.codecs = engine_.send_codecs(); + VideoSenderParameters parameters; + parameters.codecs = engine_.LegacySendCodecs(); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); EXPECT_TRUE(send_channel_->SetSend(true)); // Send side. FakeVideoSendStream* send_stream = - AddSendStream(cricket::StreamParams::CreateLegacy(1)); + AddSendStream(StreamParams::CreateLegacy(1)); EXPECT_GT(send_stream->GetConfig().rtp.nack.rtp_history_ms, 0); // Receiver side. FakeVideoReceiveStream* recv_stream = - AddRecvStream(cricket::StreamParams::CreateLegacy(1)); + AddRecvStream(StreamParams::CreateLegacy(1)); EXPECT_GT(recv_stream->GetConfig().rtp.nack.rtp_history_ms, 0); // Nack history size should match between sender and receiver. @@ -3397,7 +3506,7 @@ TEST_F(WebRtcVideoChannelTest, NackCanBeEnabledAndDisabled) { EXPECT_GT(send_stream->GetConfig().rtp.nack.rtp_history_ms, 0); // Verify that NACK is turned off when send(!) codecs without NACK are set. - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(RemoveFeedbackParams(GetEngineCodec("VP8"))); EXPECT_TRUE(parameters.codecs[0].feedback_params.params().empty()); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); @@ -3408,7 +3517,7 @@ TEST_F(WebRtcVideoChannelTest, NackCanBeEnabledAndDisabled) { // Verify that NACK is turned on when setting default codecs since the // default codecs have NACK enabled. - parameters.codecs = engine_.send_codecs(); + parameters.codecs = engine_.LegacySendCodecs(); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); recv_stream = fake_call_->GetVideoReceiveStreams()[0]; EXPECT_GT(recv_stream->GetConfig().rtp.nack.rtp_history_ms, 0); @@ -3422,7 +3531,7 @@ TEST_F(WebRtcVideoChannelTest, NackCanBeEnabledAndDisabled) { // same source that will be sent later, which just means that we're ready // earlier. TEST_F(WebRtcVideoChannelTest, ReconfiguresEncodersWhenNotSending) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); send_channel_->SetSend(false); @@ -3430,13 +3539,12 @@ TEST_F(WebRtcVideoChannelTest, ReconfiguresEncodersWhenNotSending) { FakeVideoSendStream* stream = AddSendStream(); // No frames entered. - std::vector streams = stream->GetVideoStreams(); + std::vector streams = stream->GetVideoStreams(); EXPECT_EQ(0u, streams[0].width); EXPECT_EQ(0u, streams[0].height); - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 30); EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); @@ -3444,23 +3552,22 @@ TEST_F(WebRtcVideoChannelTest, ReconfiguresEncodersWhenNotSending) { // Frame entered, should be reconfigured to new dimensions. streams = stream->GetVideoStreams(); - EXPECT_EQ(rtc::checked_cast(1280), streams[0].width); - EXPECT_EQ(rtc::checked_cast(720), streams[0].height); + EXPECT_EQ(checked_cast(1280), streams[0].width); + EXPECT_EQ(checked_cast(720), streams[0].height); EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } TEST_F(WebRtcVideoChannelTest, UsesCorrectSettingsForScreencast) { static const int kScreenshareMinBitrateKbps = 800; - cricket::VideoCodec codec = GetEngineCodec("VP8"); - cricket::VideoSenderParameters parameters; + Codec codec = GetEngineCodec("VP8"); + VideoSenderParameters parameters; parameters.codecs.push_back(codec); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); AddSendStream(); - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 30); VideoOptions min_bitrate_options; min_bitrate_options.screencast_min_bitrate_kbps = kScreenshareMinBitrateKbps; EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, &min_bitrate_options, @@ -3475,13 +3582,12 @@ TEST_F(WebRtcVideoChannelTest, UsesCorrectSettingsForScreencast) { EXPECT_EQ(1, send_stream->GetNumberOfSwappedFrames()); // Verify non-screencast settings. - webrtc::VideoEncoderConfig encoder_config = - send_stream->GetEncoderConfig().Copy(); - EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo, + VideoEncoderConfig encoder_config = send_stream->GetEncoderConfig().Copy(); + EXPECT_EQ(VideoEncoderConfig::ContentType::kRealtimeVideo, encoder_config.content_type); - std::vector streams = send_stream->GetVideoStreams(); - EXPECT_EQ(rtc::checked_cast(1280), streams.front().width); - EXPECT_EQ(rtc::checked_cast(720), streams.front().height); + std::vector streams = send_stream->GetVideoStreams(); + EXPECT_EQ(checked_cast(1280), streams.front().width); + EXPECT_EQ(checked_cast(720), streams.front().height); EXPECT_EQ(0, encoder_config.min_transmit_bitrate_bps) << "Non-screenshare shouldn't use min-transmit bitrate."; @@ -3499,14 +3605,14 @@ TEST_F(WebRtcVideoChannelTest, UsesCorrectSettingsForScreencast) { // Verify screencast settings. encoder_config = send_stream->GetEncoderConfig().Copy(); - EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kScreen, + EXPECT_EQ(VideoEncoderConfig::ContentType::kScreen, encoder_config.content_type); EXPECT_EQ(kScreenshareMinBitrateKbps * 1000, encoder_config.min_transmit_bitrate_bps); streams = send_stream->GetVideoStreams(); - EXPECT_EQ(rtc::checked_cast(1280), streams.front().width); - EXPECT_EQ(rtc::checked_cast(720), streams.front().height); + EXPECT_EQ(checked_cast(1280), streams.front().width); + EXPECT_EQ(checked_cast(720), streams.front().height); EXPECT_FALSE(streams[0].num_temporal_layers.has_value()); EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } @@ -3520,9 +3626,8 @@ TEST_F(WebRtcVideoChannelTest, AddSendStream(); VideoOptions options; options.is_screencast = true; - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 30); EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); EXPECT_TRUE(send_channel_->SetSend(true)); @@ -3531,15 +3636,14 @@ TEST_F(WebRtcVideoChannelTest, ASSERT_EQ(1u, fake_call_->GetVideoSendStreams().size()); FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front(); - webrtc::VideoEncoderConfig encoder_config = - send_stream->GetEncoderConfig().Copy(); + VideoEncoderConfig encoder_config = send_stream->GetEncoderConfig().Copy(); // Verify screencast settings. encoder_config = send_stream->GetEncoderConfig().Copy(); - EXPECT_EQ(webrtc::VideoEncoderConfig::ContentType::kScreen, + EXPECT_EQ(VideoEncoderConfig::ContentType::kScreen, encoder_config.content_type); - std::vector streams = send_stream->GetVideoStreams(); + std::vector streams = send_stream->GetVideoStreams(); ASSERT_EQ(1u, streams.size()); ASSERT_EQ(2u, streams[0].num_temporal_layers); EXPECT_EQ(kConferenceScreencastTemporalBitrateBps, @@ -3558,10 +3662,10 @@ TEST_F(WebRtcVideoChannelTest, SetMediaConfigSuspendBelowMinBitrate) { media_config.video.suspend_below_min_bitrate = true; send_channel_ = engine_.CreateSendChannel( - fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), + fake_call_.get(), media_config, VideoOptions(), CryptoOptions(), video_bitrate_allocator_factory_.get()); receive_channel_ = engine_.CreateReceiveChannel( - fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions()); + fake_call_.get(), media_config, VideoOptions(), CryptoOptions()); send_channel_->OnReadyToSend(true); send_channel_->SetSenderParameters(send_parameters_); @@ -3571,10 +3675,10 @@ TEST_F(WebRtcVideoChannelTest, SetMediaConfigSuspendBelowMinBitrate) { media_config.video.suspend_below_min_bitrate = false; send_channel_ = engine_.CreateSendChannel( - fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), + fake_call_.get(), media_config, VideoOptions(), CryptoOptions(), video_bitrate_allocator_factory_.get()); receive_channel_ = engine_.CreateReceiveChannel( - fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions()); + fake_call_.get(), media_config, VideoOptions(), CryptoOptions()); send_channel_->OnReadyToSend(true); send_channel_->SetSenderParameters(send_parameters_); @@ -3585,13 +3689,13 @@ TEST_F(WebRtcVideoChannelTest, SetMediaConfigSuspendBelowMinBitrate) { TEST_F(WebRtcVideoChannelTest, Vp8DenoisingEnabledByDefault) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoCodecVP8 vp8_settings; + VideoCodecVP8 vp8_settings; ASSERT_TRUE(stream->GetVp8Settings(&vp8_settings)) << "No VP8 config set."; EXPECT_TRUE(vp8_settings.denoisingOn); } TEST_F(WebRtcVideoChannelTest, VerifyVp8SpecificSettings) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); @@ -3600,14 +3704,14 @@ TEST_F(WebRtcVideoChannelTest, VerifyVp8SpecificSettings) { // both RTX and regular SSRCs). FakeVideoSendStream* stream = SetUpSimulcast(false, /*with_rtx=*/true); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); - webrtc::VideoCodecVP8 vp8_settings; + VideoCodecVP8 vp8_settings; ASSERT_TRUE(stream->GetVp8Settings(&vp8_settings)) << "No VP8 config set."; EXPECT_TRUE(vp8_settings.denoisingOn) << "VP8 denoising should be on by default."; @@ -3664,14 +3768,49 @@ TEST_F(WebRtcVideoChannelTest, VerifyVp8SpecificSettings) { EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } +TEST_F(WebRtcVideoChannelTest, VerifyAv1SpecificSettings) { + VideoSenderParameters parameters; + parameters.codecs.push_back(GetEngineCodec("AV1")); + ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); + FrameForwarder frame_forwarder; + VideoCodecAV1 settings; + + // Single-stream settings should apply with RTX as well (verifies that we + // check number of regular SSRCs and not StreamParams::ssrcs which contains + // both RTX and regular SSRCs). + FakeVideoSendStream* stream = SetUpSimulcast(false, /*with_rtx=*/true); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); + send_channel_->SetSend(true); + frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); + + ASSERT_TRUE(stream->GetAv1Settings(&settings)) << "No AV1 config set."; + EXPECT_TRUE(settings.automatic_resize_on); + + RtpParameters rtp_parameters = + send_channel_->GetRtpSendParameters(last_ssrc_); + EXPECT_THAT(rtp_parameters.encodings, + ElementsAre(Field(&RtpEncodingParameters::scalability_mode, + std::nullopt))); + rtp_parameters.encodings[0].scalability_mode = "L2T3"; + EXPECT_TRUE( + send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters).ok()); + frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); + + ASSERT_TRUE(stream->GetAv1Settings(&settings)) << "No AV1 config set."; + EXPECT_FALSE(settings.automatic_resize_on); + + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); +} + // Test that setting the same options doesn't result in the encoder being // reconfigured. TEST_F(WebRtcVideoChannelTest, SetIdenticalOptionsDoesntReconfigureEncoder) { VideoOptions options; - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; AddSendStream(); - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front(); @@ -3684,27 +3823,60 @@ TEST_F(WebRtcVideoChannelTest, SetIdenticalOptionsDoesntReconfigureEncoder) { // Expect 1 reconfigurations at this point from the initial configuration. EXPECT_EQ(1, send_stream->num_encoder_reconfigurations()); - // Set the options one more time and expect no additional reconfigurations. - EXPECT_TRUE( - send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - EXPECT_EQ(1, send_stream->num_encoder_reconfigurations()); + FrameForwarder new_frame_forwarder; - // Change `options` and expect 2 reconfigurations. - options.video_noise_reduction = true; + // Set the options one more time but with a new source instance, expect + // one additional reconfiguration. EXPECT_TRUE( - send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetVideoSend(last_ssrc_, &options, &new_frame_forwarder)); + new_frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); EXPECT_EQ(2, send_stream->num_encoder_reconfigurations()); EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } -class Vp9SettingsTest : public WebRtcVideoChannelTest { - public: - Vp9SettingsTest() : Vp9SettingsTest("") {} - explicit Vp9SettingsTest(const char* field_trials) - : WebRtcVideoChannelTest(field_trials) { - encoder_factory_->AddSupportedVideoCodecType("VP9"); - } +// Test that if a new source is set, we reconfigure the encoder even if the +// same options are used. +TEST_F(WebRtcVideoChannelTest, + SetNewSourceWithIdenticalOptionsReconfiguresEncoder) { + VideoOptions options; + FrameForwarder frame_forwarder; + + AddSendStream(); + VideoSenderParameters parameters; + parameters.codecs.push_back(GetEngineCodec("VP8")); + ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); + FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front(); + + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); + // Expect 1 reconfigurations at this point from the initial configuration. + EXPECT_EQ(1, send_stream->num_encoder_reconfigurations()); + + // Set the options one more time and expect no additional reconfigurations. + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + EXPECT_EQ(1, send_stream->num_encoder_reconfigurations()); + + // Change `options` and expect 2 reconfigurations. + options.video_noise_reduction = true; + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + EXPECT_EQ(2, send_stream->num_encoder_reconfigurations()); + + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); +} + +class Vp9SettingsTest : public WebRtcVideoChannelTest { + public: + Vp9SettingsTest() : Vp9SettingsTest("") {} + explicit Vp9SettingsTest(const char* field_trials) + : WebRtcVideoChannelTest(field_trials) { + encoder_factory_->AddSupportedVideoCodecType("VP9"); + } virtual ~Vp9SettingsTest() {} protected: @@ -3717,23 +3889,23 @@ class Vp9SettingsTest : public WebRtcVideoChannelTest { TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) { encoder_factory_->AddSupportedVideoCodec( - webrtc::SdpVideoFormat("VP9", webrtc::SdpVideoFormat::Parameters(), - {ScalabilityMode::kL1T1, ScalabilityMode::kL2T1})); + SdpVideoFormat("VP9", CodecParameterMap(), + {ScalabilityMode::kL1T1, ScalabilityMode::kL2T1})); - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* stream = SetUpSimulcast(false, /*with_rtx=*/false); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); - webrtc::VideoCodecVP9 vp9_settings; + VideoCodecVP9 vp9_settings; ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set."; EXPECT_TRUE(vp9_settings.denoisingOn) << "VP9 denoising should be on by default."; @@ -3755,12 +3927,11 @@ TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) { EXPECT_TRUE(stream->GetEncoderConfig().frame_drop_enabled); EXPECT_TRUE(vp9_settings.automaticResizeOn); - webrtc::RtpParameters rtp_parameters = + RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); - EXPECT_THAT( - rtp_parameters.encodings, - ElementsAre(Field(&webrtc::RtpEncodingParameters::scalability_mode, - absl::nullopt))); + EXPECT_THAT(rtp_parameters.encodings, + ElementsAre(Field(&RtpEncodingParameters::scalability_mode, + std::nullopt))); rtp_parameters.encodings[0].scalability_mode = "L2T1"; EXPECT_TRUE( send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters).ok()); @@ -3772,16 +3943,16 @@ TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) { << "Automatic resize off for multiple spatial layers."; rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); - EXPECT_THAT(rtp_parameters.encodings, - ElementsAre(Field( - &webrtc::RtpEncodingParameters::scalability_mode, "L2T1"))); + EXPECT_THAT( + rtp_parameters.encodings, + ElementsAre(Field(&RtpEncodingParameters::scalability_mode, "L2T1"))); rtp_parameters.encodings[0].scalability_mode = "L1T1"; EXPECT_TRUE( send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters).ok()); rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); - EXPECT_THAT(rtp_parameters.encodings, - ElementsAre(Field( - &webrtc::RtpEncodingParameters::scalability_mode, "L1T1"))); + EXPECT_THAT( + rtp_parameters.encodings, + ElementsAre(Field(&RtpEncodingParameters::scalability_mode, "L1T1"))); ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set."; EXPECT_TRUE(vp9_settings.denoisingOn); @@ -3815,24 +3986,24 @@ TEST_F(Vp9SettingsTest, VerifyVp9SpecificSettings) { } TEST_F(Vp9SettingsTest, MultipleSsrcsEnablesSvc) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); std::vector ssrcs = MAKE_VECTOR(kSsrcs3); FakeVideoSendStream* stream = - AddSendStream(CreateSimStreamParams("cname", ssrcs)); + AddSendStream(webrtc::CreateSimStreamParams("cname", ssrcs)); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + VideoSendStream::Config config = stream->GetConfig().Copy(); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; EXPECT_TRUE(send_channel_->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); - webrtc::VideoCodecVP9 vp9_settings; + VideoCodecVP9 vp9_settings; ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set."; const size_t kNumSpatialLayers = ssrcs.size(); @@ -3844,27 +4015,27 @@ TEST_F(Vp9SettingsTest, MultipleSsrcsEnablesSvc) { } TEST_F(Vp9SettingsTest, SvcModeCreatesSingleRtpStream) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); std::vector ssrcs = MAKE_VECTOR(kSsrcs3); FakeVideoSendStream* stream = - AddSendStream(CreateSimStreamParams("cname", ssrcs)); + AddSendStream(webrtc::CreateSimStreamParams("cname", ssrcs)); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + VideoSendStream::Config config = stream->GetConfig().Copy(); // Despite 3 ssrcs provided, single layer is used. EXPECT_EQ(1u, config.rtp.ssrcs.size()); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; EXPECT_TRUE(send_channel_->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); - webrtc::VideoCodecVP9 vp9_settings; + VideoCodecVP9 vp9_settings; ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set."; const size_t kNumSpatialLayers = ssrcs.size(); @@ -3874,7 +4045,7 @@ TEST_F(Vp9SettingsTest, SvcModeCreatesSingleRtpStream) { } TEST_F(Vp9SettingsTest, AllEncodingParametersCopied) { - cricket::VideoSenderParameters send_parameters; + VideoSenderParameters send_parameters; send_parameters.codecs.push_back(GetEngineCodec("VP9")); ASSERT_TRUE(send_channel_->SetSenderParameters(send_parameters)); @@ -3882,10 +4053,9 @@ TEST_F(Vp9SettingsTest, AllEncodingParametersCopied) { std::vector ssrcs = MAKE_VECTOR(kSsrcs3); FakeVideoSendStream* stream = - AddSendStream(CreateSimStreamParams("cname", ssrcs)); + AddSendStream(webrtc::CreateSimStreamParams("cname", ssrcs)); - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(ssrcs[0]); + RtpParameters parameters = send_channel_->GetRtpSendParameters(ssrcs[0]); ASSERT_EQ(kNumSpatialLayers, parameters.encodings.size()); ASSERT_TRUE(parameters.encodings[0].active); ASSERT_TRUE(parameters.encodings[1].active); @@ -3894,7 +4064,7 @@ TEST_F(Vp9SettingsTest, AllEncodingParametersCopied) { parameters.encodings[1].active = false; EXPECT_TRUE(send_channel_->SetRtpSendParameters(ssrcs[0], parameters).ok()); - webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); + VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); // number_of_streams should be 1 since all spatial layers are sent on the // same SSRC. But encoding parameters of all layers is supposed to be copied @@ -3907,27 +4077,27 @@ TEST_F(Vp9SettingsTest, AllEncodingParametersCopied) { } TEST_F(Vp9SettingsTest, MaxBitrateDeterminedBySvcResolutions) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); std::vector ssrcs = MAKE_VECTOR(kSsrcs3); FakeVideoSendStream* stream = - AddSendStream(CreateSimStreamParams("cname", ssrcs)); + AddSendStream(webrtc::CreateSimStreamParams("cname", ssrcs)); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + VideoSendStream::Config config = stream->GetConfig().Copy(); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; EXPECT_TRUE(send_channel_->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); send_channel_->SetSend(true); // Send frame at 1080p@30fps. - frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame( - 1920, 1080, webrtc::VideoRotation::kVideoRotation_0, - /*duration_us=*/33000)); + frame_forwarder.IncomingCapturedFrame( + frame_source_.GetFrame(1920, 1080, VideoRotation::kVideoRotation_0, + /*duration_us=*/33000)); - webrtc::VideoCodecVP9 vp9_settings; + VideoCodecVP9 vp9_settings; ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set."; const size_t kNumSpatialLayers = ssrcs.size(); @@ -3939,42 +4109,40 @@ TEST_F(Vp9SettingsTest, MaxBitrateDeterminedBySvcResolutions) { // VideoStream max bitrate should be more than legacy 2.5Mbps default stream // cap. - EXPECT_THAT( - stream->GetVideoStreams(), - ElementsAre(Field(&webrtc::VideoStream::max_bitrate_bps, Gt(2500000)))); + EXPECT_THAT(stream->GetVideoStreams(), + ElementsAre(Field(&VideoStream::max_bitrate_bps, Gt(2500000)))); // Update send parameters to 2Mbps, this should cap the max bitrate of the // stream. parameters.max_bandwidth_bps = 2000000; send_channel_->SetSenderParameters(parameters); - EXPECT_THAT( - stream->GetVideoStreams(), - ElementsAre(Field(&webrtc::VideoStream::max_bitrate_bps, Eq(2000000)))); + EXPECT_THAT(stream->GetVideoStreams(), + ElementsAre(Field(&VideoStream::max_bitrate_bps, Eq(2000000)))); } TEST_F(Vp9SettingsTest, Vp9SvcTargetBitrateCappedByMax) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); std::vector ssrcs = MAKE_VECTOR(kSsrcs3); FakeVideoSendStream* stream = - AddSendStream(CreateSimStreamParams("cname", ssrcs)); + AddSendStream(webrtc::CreateSimStreamParams("cname", ssrcs)); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + VideoSendStream::Config config = stream->GetConfig().Copy(); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; EXPECT_TRUE(send_channel_->SetVideoSend(ssrcs[0], nullptr, &frame_forwarder)); send_channel_->SetSend(true); // Set up 3 spatial layers with 720p, which should result in a max bitrate of // 2084 kbps. frame_forwarder.IncomingCapturedFrame( - frame_source_.GetFrame(1280, 720, webrtc::VideoRotation::kVideoRotation_0, + frame_source_.GetFrame(1280, 720, VideoRotation::kVideoRotation_0, /*duration_us=*/33000)); - webrtc::VideoCodecVP9 vp9_settings; + VideoCodecVP9 vp9_settings; ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set."; const size_t kNumSpatialLayers = ssrcs.size(); @@ -3986,16 +4154,15 @@ TEST_F(Vp9SettingsTest, Vp9SvcTargetBitrateCappedByMax) { // VideoStream both min and max bitrate should be lower than legacy 2.5Mbps // default stream cap. - EXPECT_THAT( - stream->GetVideoStreams()[0], - AllOf(Field(&webrtc::VideoStream::max_bitrate_bps, Lt(2500000)), - Field(&webrtc::VideoStream::target_bitrate_bps, Lt(2500000)))); + EXPECT_THAT(stream->GetVideoStreams()[0], + AllOf(Field(&VideoStream::max_bitrate_bps, Lt(2500000)), + Field(&VideoStream::target_bitrate_bps, Lt(2500000)))); } class Vp9SettingsTestWithFieldTrial : public Vp9SettingsTest, public ::testing::WithParamInterface< - ::testing::tuple> { + ::testing::tuple> { protected: Vp9SettingsTestWithFieldTrial() : Vp9SettingsTest(::testing::get<0>(GetParam())), @@ -4005,21 +4172,21 @@ class Vp9SettingsTestWithFieldTrial void VerifySettings(int num_spatial_layers, int num_temporal_layers, - webrtc::InterLayerPredMode interLayerPred) { - cricket::VideoSenderParameters parameters; + InterLayerPredMode /* interLayerPred */) { + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP9")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* stream = SetUpSimulcast(false, /*with_rtx=*/false); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); send_channel_->SetSend(true); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); - webrtc::VideoCodecVP9 vp9_settings; + VideoCodecVP9 vp9_settings; ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)) << "No VP9 config set."; EXPECT_EQ(num_spatial_layers, vp9_settings.numberOfSpatialLayers); EXPECT_EQ(num_temporal_layers, vp9_settings.numberOfTemporalLayers); @@ -4030,7 +4197,7 @@ class Vp9SettingsTestWithFieldTrial const uint8_t num_spatial_layers_; const uint8_t num_temporal_layers_; - const webrtc::InterLayerPredMode inter_layer_pred_mode_; + const InterLayerPredMode inter_layer_pred_mode_; }; TEST_P(Vp9SettingsTestWithFieldTrial, VerifyCodecSettings) { @@ -4042,49 +4209,49 @@ INSTANTIATE_TEST_SUITE_P( All, Vp9SettingsTestWithFieldTrial, Values( - std::make_tuple("", 1, 1, webrtc::InterLayerPredMode::kOnKeyPic), + std::make_tuple("", 1, 1, InterLayerPredMode::kOnKeyPic), std::make_tuple("WebRTC-Vp9InterLayerPred/Default/", 1, 1, - webrtc::InterLayerPredMode::kOnKeyPic), + InterLayerPredMode::kOnKeyPic), std::make_tuple("WebRTC-Vp9InterLayerPred/Disabled/", 1, 1, - webrtc::InterLayerPredMode::kOnKeyPic), + InterLayerPredMode::kOnKeyPic), std::make_tuple( "WebRTC-Vp9InterLayerPred/Enabled,inter_layer_pred_mode:off/", 1, 1, - webrtc::InterLayerPredMode::kOff), + InterLayerPredMode::kOff), std::make_tuple( "WebRTC-Vp9InterLayerPred/Enabled,inter_layer_pred_mode:on/", 1, 1, - webrtc::InterLayerPredMode::kOn), + InterLayerPredMode::kOn), std::make_tuple( "WebRTC-Vp9InterLayerPred/Enabled,inter_layer_pred_mode:onkeypic/", 1, 1, - webrtc::InterLayerPredMode::kOnKeyPic))); + InterLayerPredMode::kOnKeyPic))); TEST_F(WebRtcVideoChannelTest, VerifyMinBitrate) { - std::vector streams = AddSendStream()->GetVideoStreams(); + std::vector streams = AddSendStream()->GetVideoStreams(); ASSERT_EQ(1u, streams.size()); EXPECT_EQ(webrtc::kDefaultMinVideoBitrateBps, streams[0].min_bitrate_bps); } TEST_F(WebRtcVideoChannelTest, VerifyMinBitrateWithForcedFallbackFieldTrial) { - webrtc::test::ScopedKeyValueConfig override_field_trials( + ScopedKeyValueConfig override_field_trials( field_trials_, "WebRTC-VP8-Forced-Fallback-Encoder-v2/Enabled-1,2,34567/"); - std::vector streams = AddSendStream()->GetVideoStreams(); + std::vector streams = AddSendStream()->GetVideoStreams(); ASSERT_EQ(1u, streams.size()); EXPECT_EQ(34567, streams[0].min_bitrate_bps); } TEST_F(WebRtcVideoChannelTest, BalancedDegradationPreferenceNotSupportedWithoutFieldtrial) { - webrtc::test::ScopedKeyValueConfig override_field_trials( + ScopedKeyValueConfig override_field_trials( field_trials_, "WebRTC-Video-BalancedDegradation/Disabled/"); const bool kResolutionScalingEnabled = true; const bool kFpsScalingEnabled = false; @@ -4093,7 +4260,7 @@ TEST_F(WebRtcVideoChannelTest, TEST_F(WebRtcVideoChannelTest, BalancedDegradationPreferenceSupportedBehindFieldtrial) { - webrtc::test::ScopedKeyValueConfig override_field_trials( + ScopedKeyValueConfig override_field_trials( field_trials_, "WebRTC-Video-BalancedDegradation/Enabled/"); const bool kResolutionScalingEnabled = true; const bool kFpsScalingEnabled = true; @@ -4117,26 +4284,26 @@ TEST_F(WebRtcVideoChannelTest, DoesNotAdaptOnOveruseWhenScreensharing) { } TEST_F(WebRtcVideoChannelTest, PreviousAdaptationDoesNotApplyToScreenshare) { - cricket::VideoCodec codec = GetEngineCodec("VP8"); - cricket::VideoSenderParameters parameters; + Codec codec = GetEngineCodec("VP8"); + VideoSenderParameters parameters; parameters.codecs.push_back(codec); MediaConfig media_config = GetMediaConfig(); media_config.video.enable_cpu_adaptation = true; send_channel_ = engine_.CreateSendChannel( - fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), + fake_call_.get(), media_config, VideoOptions(), CryptoOptions(), video_bitrate_allocator_factory_.get()); receive_channel_ = engine_.CreateReceiveChannel( - fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions()); + fake_call_.get(), media_config, VideoOptions(), CryptoOptions()); send_channel_->OnReadyToSend(true); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); AddSendStream(); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; ASSERT_TRUE(send_channel_->SetSend(true)); - cricket::VideoOptions camera_options; + VideoOptions camera_options; camera_options.is_screencast = false; send_channel_->SetVideoSend(last_ssrc_, &camera_options, &frame_forwarder); @@ -4148,7 +4315,7 @@ TEST_F(WebRtcVideoChannelTest, PreviousAdaptationDoesNotApplyToScreenshare) { // transitioning from MAINTAIN_FRAMERATE to BALANCED. // Switch to screen share. Expect no resolution scaling. - cricket::VideoOptions screenshare_options; + VideoOptions screenshare_options; screenshare_options.is_screencast = true; send_channel_->SetVideoSend(last_ssrc_, &screenshare_options, &frame_forwarder); @@ -4171,24 +4338,24 @@ TEST_F(WebRtcVideoChannelTest, PreviousAdaptationDoesNotApplyToScreenshare) { void WebRtcVideoChannelTest::TestDegradationPreference( bool resolution_scaling_enabled, bool fps_scaling_enabled) { - cricket::VideoCodec codec = GetEngineCodec("VP8"); - cricket::VideoSenderParameters parameters; + Codec codec = GetEngineCodec("VP8"); + VideoSenderParameters parameters; parameters.codecs.push_back(codec); MediaConfig media_config = GetMediaConfig(); media_config.video.enable_cpu_adaptation = true; send_channel_ = engine_.CreateSendChannel( - fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), + fake_call_.get(), media_config, VideoOptions(), CryptoOptions(), video_bitrate_allocator_factory_.get()); receive_channel_ = engine_.CreateReceiveChannel( - fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions()); + fake_call_.get(), media_config, VideoOptions(), CryptoOptions()); send_channel_->OnReadyToSend(true); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); AddSendStream(); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); @@ -4205,8 +4372,8 @@ void WebRtcVideoChannelTest::TestDegradationPreference( void WebRtcVideoChannelTest::TestCpuAdaptation(bool enable_overuse, bool is_screenshare) { - cricket::VideoCodec codec = GetEngineCodec("VP8"); - cricket::VideoSenderParameters parameters; + Codec codec = GetEngineCodec("VP8"); + VideoSenderParameters parameters; parameters.codecs.push_back(codec); MediaConfig media_config = GetMediaConfig(); @@ -4214,17 +4381,17 @@ void WebRtcVideoChannelTest::TestCpuAdaptation(bool enable_overuse, media_config.video.enable_cpu_adaptation = true; } send_channel_ = engine_.CreateSendChannel( - fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions(), + fake_call_.get(), media_config, VideoOptions(), CryptoOptions(), video_bitrate_allocator_factory_.get()); receive_channel_ = engine_.CreateReceiveChannel( - fake_call_.get(), media_config, VideoOptions(), webrtc::CryptoOptions()); + fake_call_.get(), media_config, VideoOptions(), CryptoOptions()); send_channel_->OnReadyToSend(true); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); AddSendStream(); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; options.is_screencast = is_screenshare; EXPECT_TRUE( @@ -4255,13 +4422,13 @@ TEST_F(WebRtcVideoChannelTest, EstimatesNtpStartTimeCorrectly) { EXPECT_TRUE(receive_channel_->SetReceiverParameters(recv_parameters_)); FakeVideoReceiveStream* stream = AddRecvStream(); - cricket::FakeVideoRenderer renderer; + FakeVideoRenderer renderer; EXPECT_TRUE(receive_channel_->SetSink(last_ssrc_, &renderer)); - webrtc::VideoFrame video_frame = - webrtc::VideoFrame::Builder() + VideoFrame video_frame = + VideoFrame::Builder() .set_video_frame_buffer(CreateBlackFrameBuffer(4, 4)) - .set_timestamp_rtp(kInitialTimestamp) + .set_rtp_timestamp(kInitialTimestamp) .set_timestamp_us(0) .set_rotation(webrtc::kVideoRotation_0) .build(); @@ -4274,16 +4441,16 @@ TEST_F(WebRtcVideoChannelTest, EstimatesNtpStartTimeCorrectly) { // This timestamp is kInitialTimestamp (-1) + kFrameOffsetMs * 90, which // triggers a constant-overflow warning, hence we're calculating it explicitly // here. - time_controller_.AdvanceTime(webrtc::TimeDelta::Millis(kFrameOffsetMs)); - video_frame.set_timestamp(kFrameOffsetMs * 90 - 1); + time_controller_.AdvanceTime(TimeDelta::Millis(kFrameOffsetMs)); + video_frame.set_rtp_timestamp(kFrameOffsetMs * 90 - 1); video_frame.set_ntp_time_ms(kInitialNtpTimeMs + kFrameOffsetMs); stream->InjectFrame(video_frame); EXPECT_EQ(2, renderer.num_rendered_frames()); // Verify that NTP time has been correctly deduced. - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -4296,16 +4463,16 @@ TEST_F(WebRtcVideoChannelTest, SetDefaultSendCodecs) { AssignDefaultAptRtxTypes(); ASSERT_TRUE(send_channel_->SetSenderParameters(send_parameters_)); - absl::optional codec = send_channel_->GetSendCodec(); + std::optional codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); - EXPECT_TRUE(codec->Matches(engine_.send_codecs()[0], &field_trials_)); + EXPECT_TRUE(codec->Matches(engine_.LegacySendCodecs()[0])); // Using a RTX setup to verify that the default RTX payload type is good. const std::vector ssrcs = MAKE_VECTOR(kSsrcs1); const std::vector rtx_ssrcs = MAKE_VECTOR(kRtxSsrcs1); FakeVideoSendStream* stream = AddSendStream( - cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs)); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + webrtc::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs)); + VideoSendStream::Config config = stream->GetConfig().Copy(); // Make sure NACK and FEC are enabled on the correct payload types. EXPECT_EQ(1000, config.rtp.nack.rtp_history_ms); @@ -4319,23 +4486,23 @@ TEST_F(WebRtcVideoChannelTest, SetDefaultSendCodecs) { } TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithoutPacketization) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); - const webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + const VideoSendStream::Config config = stream->GetConfig().Copy(); EXPECT_FALSE(config.rtp.raw_payload); } TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithPacketization) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.back().packetization = kPacketizationParamRaw; EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); - const webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + const VideoSendStream::Config config = stream->GetConfig().Copy(); EXPECT_TRUE(config.rtp.raw_payload); } @@ -4345,7 +4512,7 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithPacketization) { // default. TEST_F(WebRtcVideoChannelTest, FlexfecSendCodecWithoutSsrcNotExposedByDefault) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + VideoSendStream::Config config = stream->GetConfig().Copy(); EXPECT_EQ(-1, config.rtp.flexfec.payload_type); EXPECT_EQ(0U, config.rtp.flexfec.ssrc); @@ -4353,9 +4520,10 @@ TEST_F(WebRtcVideoChannelTest, FlexfecSendCodecWithoutSsrcNotExposedByDefault) { } TEST_F(WebRtcVideoChannelTest, FlexfecSendCodecWithSsrcNotExposedByDefault) { - FakeVideoSendStream* stream = AddSendStream( - CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + FakeVideoSendStream* stream = + AddSendStream(webrtc::CreatePrimaryWithFecFrStreamParams( + "cname", kSsrcs1[0], kFlexfecSsrc)); + VideoSendStream::Config config = stream->GetConfig().Copy(); EXPECT_EQ(-1, config.rtp.flexfec.payload_type); EXPECT_EQ(0U, config.rtp.flexfec.ssrc); @@ -4371,8 +4539,8 @@ TEST_F(WebRtcVideoChannelTest, FlexfecRecvCodecWithoutSsrcNotExposedByDefault) { } TEST_F(WebRtcVideoChannelTest, FlexfecRecvCodecWithSsrcExposedByDefault) { - AddRecvStream( - CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); + AddRecvStream(webrtc::CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], + kFlexfecSsrc)); const std::vector& streams = fake_call_->GetFlexfecReceiveStreams(); @@ -4389,9 +4557,8 @@ class WebRtcVideoChannelFlexfecRecvTest : public WebRtcVideoChannelTest { }; TEST_F(WebRtcVideoChannelFlexfecRecvTest, - DefaultFlexfecCodecHasTransportCcAndRembFeedbackParam) { - EXPECT_TRUE(cricket::HasTransportCc(GetEngineCodec("flexfec-03"))); - EXPECT_TRUE(cricket::HasRemb(GetEngineCodec("flexfec-03"))); + DefaultFlexfecCodecHasRembFeedbackParam) { + EXPECT_TRUE(webrtc::HasRemb(GetEngineCodec("flexfec-03"))); } TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetDefaultRecvCodecsWithoutSsrc) { @@ -4405,21 +4572,21 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetDefaultRecvCodecsWithoutSsrc) { fake_call_->GetVideoReceiveStreams(); ASSERT_EQ(1U, video_streams.size()); const FakeVideoReceiveStream& video_stream = *video_streams.front(); - const webrtc::VideoReceiveStreamInterface::Config& video_config = + const VideoReceiveStreamInterface::Config& video_config = video_stream.GetConfig(); EXPECT_FALSE(video_config.rtp.protected_by_flexfec); EXPECT_EQ(video_config.rtp.packet_sink_, nullptr); } TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetDefaultRecvCodecsWithSsrc) { - AddRecvStream( - CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); + AddRecvStream(webrtc::CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], + kFlexfecSsrc)); const std::vector& streams = fake_call_->GetFlexfecReceiveStreams(); ASSERT_EQ(1U, streams.size()); const auto* stream = streams.front(); - const webrtc::FlexfecReceiveStream::Config& config = stream->GetConfig(); + const FlexfecReceiveStream::Config& config = stream->GetConfig(); EXPECT_EQ(GetEngineCodec("flexfec-03").id, config.payload_type); EXPECT_EQ(kFlexfecSsrc, config.rtp.remote_ssrc); ASSERT_EQ(1U, config.protected_media_ssrcs.size()); @@ -4429,7 +4596,7 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetDefaultRecvCodecsWithSsrc) { fake_call_->GetVideoReceiveStreams(); ASSERT_EQ(1U, video_streams.size()); const FakeVideoReceiveStream& video_stream = *video_streams.front(); - const webrtc::VideoReceiveStreamInterface::Config& video_config = + const VideoReceiveStreamInterface::Config& video_config = video_stream.GetConfig(); EXPECT_TRUE(video_config.rtp.protected_by_flexfec); EXPECT_NE(video_config.rtp.packet_sink_, nullptr); @@ -4441,18 +4608,18 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetDefaultRecvCodecsWithSsrc) { // existing video stream instance. TEST_F(WebRtcVideoChannelFlexfecRecvTest, EnablingFlexfecDoesNotRecreateVideoReceiveStream) { - cricket::VideoReceiverParameters recv_parameters; + VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); ASSERT_TRUE(receive_channel_->SetReceiverParameters(recv_parameters)); - AddRecvStream( - CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); + AddRecvStream(webrtc::CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], + kFlexfecSsrc)); EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams()); const std::vector& video_streams = fake_call_->GetVideoReceiveStreams(); ASSERT_EQ(1U, video_streams.size()); const FakeVideoReceiveStream* video_stream = video_streams.front(); - const webrtc::VideoReceiveStreamInterface::Config* video_config = + const VideoReceiveStreamInterface::Config* video_config = &video_stream->GetConfig(); EXPECT_FALSE(video_config->rtp.protected_by_flexfec); EXPECT_EQ(video_config->rtp.packet_sink_, nullptr); @@ -4482,20 +4649,20 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, // stream will be set/cleared as dictated by the configuration change. TEST_F(WebRtcVideoChannelFlexfecRecvTest, DisablingFlexfecDoesNotRecreateVideoReceiveStream) { - cricket::VideoReceiverParameters recv_parameters; + VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); recv_parameters.codecs.push_back(GetEngineCodec("flexfec-03")); ASSERT_TRUE(receive_channel_->SetReceiverParameters(recv_parameters)); - AddRecvStream( - CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); + AddRecvStream(webrtc::CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], + kFlexfecSsrc)); EXPECT_EQ(2, fake_call_->GetNumCreatedReceiveStreams()); EXPECT_EQ(1U, fake_call_->GetFlexfecReceiveStreams().size()); const std::vector& video_streams = fake_call_->GetVideoReceiveStreams(); ASSERT_EQ(1U, video_streams.size()); const FakeVideoReceiveStream* video_stream = video_streams.front(); - const webrtc::VideoReceiveStreamInterface::Config* video_config = + const VideoReceiveStreamInterface::Config* video_config = &video_stream->GetConfig(); EXPECT_TRUE(video_config->rtp.protected_by_flexfec); EXPECT_NE(video_config->rtp.packet_sink_, nullptr); @@ -4522,22 +4689,22 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, TEST_F(WebRtcVideoChannelFlexfecRecvTest, DuplicateFlexfecCodecIsDropped) { constexpr int kUnusedPayloadType1 = 127; - cricket::VideoReceiverParameters recv_parameters; + VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); recv_parameters.codecs.push_back(GetEngineCodec("flexfec-03")); - cricket::VideoCodec duplicate = GetEngineCodec("flexfec-03"); + Codec duplicate = GetEngineCodec("flexfec-03"); duplicate.id = kUnusedPayloadType1; recv_parameters.codecs.push_back(duplicate); ASSERT_TRUE(receive_channel_->SetReceiverParameters(recv_parameters)); - AddRecvStream( - CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); + AddRecvStream(webrtc::CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], + kFlexfecSsrc)); const std::vector& streams = fake_call_->GetFlexfecReceiveStreams(); ASSERT_EQ(1U, streams.size()); const auto* stream = streams.front(); - const webrtc::FlexfecReceiveStream::Config& config = stream->GetConfig(); + const FlexfecReceiveStream::Config& config = stream->GetConfig(); EXPECT_EQ(GetEngineCodec("flexfec-03").id, config.payload_type); } @@ -4554,7 +4721,7 @@ class WebRtcVideoChannelFlexfecSendRecvTest : public WebRtcVideoChannelTest { TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, SetDefaultSendCodecsWithoutSsrc) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + VideoSendStream::Config config = stream->GetConfig().Copy(); EXPECT_EQ(GetEngineCodec("flexfec-03").id, config.rtp.flexfec.payload_type); EXPECT_EQ(0U, config.rtp.flexfec.ssrc); @@ -4562,9 +4729,10 @@ TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, SetDefaultSendCodecsWithoutSsrc) { } TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, SetDefaultSendCodecsWithSsrc) { - FakeVideoSendStream* stream = AddSendStream( - CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + FakeVideoSendStream* stream = + AddSendStream(webrtc::CreatePrimaryWithFecFrStreamParams( + "cname", kSsrcs1[0], kFlexfecSsrc)); + VideoSendStream::Config config = stream->GetConfig().Copy(); EXPECT_EQ(GetEngineCodec("flexfec-03").id, config.rtp.flexfec.payload_type); EXPECT_EQ(kFlexfecSsrc, config.rtp.flexfec.ssrc); @@ -4573,33 +4741,33 @@ TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, SetDefaultSendCodecsWithSsrc) { } TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithoutFec) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + VideoSendStream::Config config = stream->GetConfig().Copy(); EXPECT_EQ(-1, config.rtp.ulpfec.ulpfec_payload_type); EXPECT_EQ(-1, config.rtp.ulpfec.red_payload_type); } TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, SetSendCodecsWithoutFec) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + VideoSendStream::Config config = stream->GetConfig().Copy(); EXPECT_EQ(-1, config.rtp.flexfec.payload_type); } TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvCodecsWithFec) { - AddRecvStream( - CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); + AddRecvStream(webrtc::CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], + kFlexfecSsrc)); - cricket::VideoReceiverParameters recv_parameters; + VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); recv_parameters.codecs.push_back(GetEngineCodec("flexfec-03")); ASSERT_TRUE(receive_channel_->SetReceiverParameters(recv_parameters)); @@ -4608,7 +4776,7 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvCodecsWithFec) { fake_call_->GetFlexfecReceiveStreams(); ASSERT_EQ(1U, flexfec_streams.size()); const FakeFlexfecReceiveStream* flexfec_stream = flexfec_streams.front(); - const webrtc::FlexfecReceiveStream::Config& flexfec_stream_config = + const FlexfecReceiveStream::Config& flexfec_stream_config = flexfec_stream->GetConfig(); EXPECT_EQ(GetEngineCodec("flexfec-03").id, flexfec_stream_config.payload_type); @@ -4618,7 +4786,7 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvCodecsWithFec) { const std::vector& video_streams = fake_call_->GetVideoReceiveStreams(); const FakeVideoReceiveStream* video_stream = video_streams.front(); - const webrtc::VideoReceiveStreamInterface::Config& video_stream_config = + const VideoReceiveStreamInterface::Config& video_stream_config = video_stream->GetConfig(); EXPECT_EQ(video_stream_config.rtp.local_ssrc, flexfec_stream_config.rtp.local_ssrc); @@ -4633,13 +4801,13 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvCodecsWithFec) { // TODO(brandtr): Remove when FlexFEC is enabled by default. TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetSendCodecsWithoutSsrcWithFecDoesNotEnableFec) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("flexfec-03")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + VideoSendStream::Config config = stream->GetConfig().Copy(); EXPECT_EQ(-1, config.rtp.flexfec.payload_type); EXPECT_EQ(0u, config.rtp.flexfec.ssrc); @@ -4648,14 +4816,15 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetSendCodecsWithSsrcWithFecDoesNotEnableFec) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("flexfec-03")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); - FakeVideoSendStream* stream = AddSendStream( - CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + FakeVideoSendStream* stream = + AddSendStream(webrtc::CreatePrimaryWithFecFrStreamParams( + "cname", kSsrcs1[0], kFlexfecSsrc)); + VideoSendStream::Config config = stream->GetConfig().Copy(); EXPECT_EQ(-1, config.rtp.flexfec.payload_type); EXPECT_EQ(0u, config.rtp.flexfec.ssrc); @@ -4665,11 +4834,11 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, TEST_F(WebRtcVideoChannelTest, SetSendCodecRejectsRtxWithoutAssociatedPayloadType) { const int kUnusedPayloadType = 127; - EXPECT_FALSE(FindCodecById(engine_.send_codecs(), kUnusedPayloadType)); + EXPECT_FALSE( + webrtc::FindCodecById(engine_.LegacySendCodecs(), kUnusedPayloadType)); - cricket::VideoSenderParameters parameters; - cricket::VideoCodec rtx_codec = - cricket::CreateVideoCodec(kUnusedPayloadType, "rtx"); + VideoSenderParameters parameters; + Codec rtx_codec = webrtc::CreateVideoCodec(kUnusedPayloadType, "rtx"); parameters.codecs.push_back(rtx_codec); EXPECT_FALSE(send_channel_->SetSenderParameters(parameters)) << "RTX codec without associated payload type should be rejected."; @@ -4679,20 +4848,22 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecRejectsRtxWithoutMatchingVideoCodec) { const int kUnusedPayloadType1 = 126; const int kUnusedPayloadType2 = 127; - EXPECT_FALSE(FindCodecById(engine_.send_codecs(), kUnusedPayloadType1)); - EXPECT_FALSE(FindCodecById(engine_.send_codecs(), kUnusedPayloadType2)); + EXPECT_FALSE( + webrtc::FindCodecById(engine_.LegacySendCodecs(), kUnusedPayloadType1)); + EXPECT_FALSE( + webrtc::FindCodecById(engine_.LegacySendCodecs(), kUnusedPayloadType2)); { - cricket::VideoCodec rtx_codec = cricket::CreateVideoRtxCodec( - kUnusedPayloadType1, GetEngineCodec("VP8").id); - cricket::VideoSenderParameters parameters; + Codec rtx_codec = webrtc::CreateVideoRtxCodec(kUnusedPayloadType1, + GetEngineCodec("VP8").id); + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(rtx_codec); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); } { - cricket::VideoCodec rtx_codec = - cricket::CreateVideoRtxCodec(kUnusedPayloadType1, kUnusedPayloadType2); - cricket::VideoSenderParameters parameters; + Codec rtx_codec = + webrtc::CreateVideoRtxCodec(kUnusedPayloadType1, kUnusedPayloadType2); + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(rtx_codec); EXPECT_FALSE(send_channel_->SetSenderParameters(parameters)) @@ -4703,25 +4874,25 @@ TEST_F(WebRtcVideoChannelTest, TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithChangedRtxPayloadType) { const int kUnusedPayloadType1 = 126; const int kUnusedPayloadType2 = 127; - EXPECT_FALSE(FindCodecById(engine_.send_codecs(), kUnusedPayloadType1)); - EXPECT_FALSE(FindCodecById(engine_.send_codecs(), kUnusedPayloadType2)); + EXPECT_FALSE( + webrtc::FindCodecById(engine_.LegacySendCodecs(), kUnusedPayloadType1)); + EXPECT_FALSE( + webrtc::FindCodecById(engine_.LegacySendCodecs(), kUnusedPayloadType2)); // SSRCs for RTX. - cricket::StreamParams params = - cricket::StreamParams::CreateLegacy(kSsrcs1[0]); + StreamParams params = StreamParams::CreateLegacy(kSsrcs1[0]); params.AddFidSsrc(kSsrcs1[0], kRtxSsrcs1[0]); AddSendStream(params); // Original payload type for RTX. - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - cricket::VideoCodec rtx_codec = - cricket::CreateVideoCodec(kUnusedPayloadType1, "rtx"); + Codec rtx_codec = webrtc::CreateVideoCodec(kUnusedPayloadType1, "rtx"); rtx_codec.SetParam("apt", GetEngineCodec("VP8").id); parameters.codecs.push_back(rtx_codec); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); ASSERT_EQ(1U, fake_call_->GetVideoSendStreams().size()); - const webrtc::VideoSendStream::Config& config_before = + const VideoSendStream::Config& config_before = fake_call_->GetVideoSendStreams()[0]->GetConfig(); EXPECT_EQ(kUnusedPayloadType1, config_before.rtp.rtx.payload_type); ASSERT_EQ(1U, config_before.rtp.rtx.ssrcs.size()); @@ -4731,7 +4902,7 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithChangedRtxPayloadType) { parameters.codecs[1].id = kUnusedPayloadType2; EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); ASSERT_EQ(1U, fake_call_->GetVideoSendStreams().size()); - const webrtc::VideoSendStream::Config& config_after = + const VideoSendStream::Config& config_after = fake_call_->GetVideoSendStreams()[0]->GetConfig(); EXPECT_EQ(kUnusedPayloadType2, config_after.rtp.rtx.payload_type); ASSERT_EQ(1U, config_after.rtp.rtx.ssrcs.size()); @@ -4739,13 +4910,13 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithChangedRtxPayloadType) { } TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithoutFecDisablesFec) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("ulpfec")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + VideoSendStream::Config config = stream->GetConfig().Copy(); EXPECT_EQ(GetEngineCodec("ulpfec").id, config.rtp.ulpfec.ulpfec_payload_type); @@ -4760,14 +4931,15 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithoutFecDisablesFec) { TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, SetSendCodecsWithoutFecDisablesFec) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("flexfec-03")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); - FakeVideoSendStream* stream = AddSendStream( - CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); - webrtc::VideoSendStream::Config config = stream->GetConfig().Copy(); + FakeVideoSendStream* stream = + AddSendStream(webrtc::CreatePrimaryWithFecFrStreamParams( + "cname", kSsrcs1[0], kFlexfecSsrc)); + VideoSendStream::Config config = stream->GetConfig().Copy(); EXPECT_EQ(GetEngineCodec("flexfec-03").id, config.rtp.flexfec.payload_type); EXPECT_EQ(kFlexfecSsrc, config.rtp.flexfec.ssrc); @@ -4784,28 +4956,28 @@ TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, } TEST_F(WebRtcVideoChannelTest, SetSendCodecsChangesExistingStreams) { - cricket::VideoSenderParameters parameters; - cricket::VideoCodec codec = cricket::CreateVideoCodec(100, "VP8"); - codec.SetParam(kCodecParamMaxQuantization, kDefaultQpMax); + VideoSenderParameters parameters; + Codec codec = webrtc::CreateVideoCodec(100, "VP8"); + codec.SetParam(kCodecParamMaxQuantization, kDefaultVideoMaxQpVpx); parameters.codecs.push_back(codec); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); send_channel_->SetSend(true); FakeVideoSendStream* stream = AddSendStream(); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); - std::vector streams = stream->GetVideoStreams(); - EXPECT_EQ(kDefaultQpMax, streams[0].max_qp); + std::vector streams = stream->GetVideoStreams(); + EXPECT_EQ(kDefaultVideoMaxQpVpx, streams[0].max_qp); parameters.codecs.clear(); - codec.SetParam(kCodecParamMaxQuantization, kDefaultQpMax + 1); + codec.SetParam(kCodecParamMaxQuantization, kDefaultVideoMaxQpVpx + 1); parameters.codecs.push_back(codec); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); streams = fake_call_->GetVideoSendStreams()[0]->GetVideoStreams(); - EXPECT_EQ(kDefaultQpMax + 1, streams[0].max_qp); + EXPECT_EQ(kDefaultVideoMaxQpVpx + 1, streams[0].max_qp); EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } @@ -4816,7 +4988,7 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithBitrates) { TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithHighMaxBitrate) { SetSendCodecsShouldWorkForBitrates("", 0, "", -1, "10000", 10000000); - std::vector streams = AddSendStream()->GetVideoStreams(); + std::vector streams = AddSendStream()->GetVideoStreams(); ASSERT_EQ(1u, streams.size()); EXPECT_EQ(10000000, streams[0].max_bitrate_bps); } @@ -4839,17 +5011,17 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsRejectsMaxLessThanMinBitrate) { TEST_F(WebRtcVideoChannelTest, SetSenderParametersRemovesSelectedCodecFromRtpParameters) { EXPECT_TRUE(AddSendStream()); - cricket::VideoSenderParameters parameters; - parameters.codecs.push_back(cricket::CreateVideoCodec(100, "VP8")); - parameters.codecs.push_back(cricket::CreateVideoCodec(100, "VP9")); + VideoSenderParameters parameters; + parameters.codecs.push_back(webrtc::CreateVideoCodec(100, "VP8")); + parameters.codecs.push_back(webrtc::CreateVideoCodec(100, "VP9")); send_channel_->SetSenderParameters(parameters); - webrtc::RtpParameters initial_params = + RtpParameters initial_params = send_channel_->GetRtpSendParameters(last_ssrc_); - webrtc::RtpCodec vp9_rtp_codec; + RtpCodec vp9_rtp_codec; vp9_rtp_codec.name = "VP9"; - vp9_rtp_codec.kind = cricket::MEDIA_TYPE_VIDEO; + vp9_rtp_codec.kind = MediaType::VIDEO; vp9_rtp_codec.clock_rate = 90000; initial_params.encodings[0].codec = vp9_rtp_codec; @@ -4859,14 +5031,13 @@ TEST_F(WebRtcVideoChannelTest, send_channel_->SetRtpSendParameters(last_ssrc_, initial_params).ok()); parameters.codecs.clear(); - parameters.codecs.push_back(cricket::CreateVideoCodec(100, "VP8")); + parameters.codecs.push_back(webrtc::CreateVideoCodec(100, "VP8")); send_channel_->SetSenderParameters(parameters); // Since VP9 is no longer negotiated, the RTP parameters should not have a // forced codec anymore. - webrtc::RtpParameters new_params = - send_channel_->GetRtpSendParameters(last_ssrc_); - EXPECT_EQ(new_params.encodings[0].codec, absl::nullopt); + RtpParameters new_params = send_channel_->GetRtpSendParameters(last_ssrc_); + EXPECT_EQ(new_params.encodings[0].codec, std::nullopt); } // Test that when both the codec-specific bitrate params and max_bandwidth_bps @@ -4984,8 +5155,7 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(300000, video_send_stream->GetVideoStreams()[0].max_bitrate_bps); // The RtpParameter max bitrate overrides the codec's. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(1u, parameters.encodings.size()); parameters.encodings[0].max_bitrate_bps = 500000; EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); @@ -5005,8 +5175,7 @@ TEST_F(WebRtcVideoChannelTest, stream->GetVideoStreams()[0].max_bitrate_bps); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1u, parameters.encodings.size()); parameters.encodings[0].max_bitrate_bps = 99999 - 1; @@ -5021,18 +5190,18 @@ TEST_F(WebRtcVideoChannelTest, } TEST_F(WebRtcVideoChannelTest, SetMaxSendBitrateCanIncreaseSenderBitrate) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); send_channel_->SetSend(true); FakeVideoSendStream* stream = AddSendStream(); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); - std::vector streams = stream->GetVideoStreams(); + std::vector streams = stream->GetVideoStreams(); int initial_max_bitrate_bps = streams[0].max_bitrate_bps; EXPECT_GT(initial_max_bitrate_bps, 0); @@ -5047,24 +5216,24 @@ TEST_F(WebRtcVideoChannelTest, SetMaxSendBitrateCanIncreaseSenderBitrate) { TEST_F(WebRtcVideoChannelTest, SetMaxSendBitrateCanIncreaseSimulcastSenderBitrate) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); send_channel_->SetSend(true); FakeVideoSendStream* stream = AddSendStream( - cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3))); + webrtc::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3))); // Send a frame to make sure this scales up to >1 stream (simulcast). - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; EXPECT_TRUE( send_channel_->SetVideoSend(kSsrcs3[0], nullptr, &frame_forwarder)); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); - std::vector streams = stream->GetVideoStreams(); + std::vector streams = stream->GetVideoStreams(); ASSERT_GT(streams.size(), 1u) << "Without simulcast this test doesn't make sense."; - int initial_max_bitrate_bps = GetTotalMaxBitrate(streams).bps(); + int initial_max_bitrate_bps = webrtc::GetTotalMaxBitrate(streams).bps(); EXPECT_GT(initial_max_bitrate_bps, 0); parameters.max_bandwidth_bps = initial_max_bitrate_bps * 2; @@ -5072,7 +5241,7 @@ TEST_F(WebRtcVideoChannelTest, // Insert a frame to update the encoder config. frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); streams = stream->GetVideoStreams(); - int increased_max_bitrate_bps = GetTotalMaxBitrate(streams).bps(); + int increased_max_bitrate_bps = webrtc::GetTotalMaxBitrate(streams).bps(); EXPECT_EQ(initial_max_bitrate_bps * 2, increased_max_bitrate_bps); EXPECT_TRUE(send_channel_->SetVideoSend(kSsrcs3[0], nullptr, nullptr)); @@ -5080,14 +5249,14 @@ TEST_F(WebRtcVideoChannelTest, TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithMaxQuantization) { static const char* kMaxQuantization = "21"; - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs[0].params[kCodecParamMaxQuantization] = kMaxQuantization; EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); EXPECT_EQ(atoi(kMaxQuantization), AddSendStream()->GetVideoStreams().back().max_qp); - absl::optional codec = send_channel_->GetSendCodec(); + std::optional codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ(kMaxQuantization, codec->params[kCodecParamMaxQuantization]); } @@ -5095,7 +5264,7 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsWithMaxQuantization) { TEST_F(WebRtcVideoChannelTest, SetSendCodecsRejectBadPayloadTypes) { // TODO(pbos): Should we only allow the dynamic range? static const int kIncorrectPayloads[] = {-2, -1, 128, 129}; - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); for (size_t i = 0; i < arraysize(kIncorrectPayloads); ++i) { parameters.codecs[0].id = kIncorrectPayloads[i]; @@ -5105,7 +5274,7 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsRejectBadPayloadTypes) { } TEST_F(WebRtcVideoChannelTest, SetSendCodecsAcceptAllValidPayloadTypes) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); for (int payload_type = 96; payload_type <= 127; ++payload_type) { parameters.codecs[0].id = payload_type; @@ -5120,7 +5289,7 @@ TEST_F(WebRtcVideoChannelTest, SetSendCodecsAcceptAllValidPayloadTypes) { // result of one of the codecs being rejected. TEST_F(WebRtcVideoChannelTest, SetSendCodecsIdenticalFirstCodecDoesntRecreateStream) { - cricket::VideoSenderParameters parameters1; + VideoSenderParameters parameters1; parameters1.codecs.push_back(GetEngineCodec("VP8")); parameters1.codecs.push_back(GetEngineCodec("VP9")); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters1)); @@ -5128,14 +5297,14 @@ TEST_F(WebRtcVideoChannelTest, AddSendStream(); EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams()); - cricket::VideoSenderParameters parameters2; + VideoSenderParameters parameters2; parameters2.codecs.push_back(GetEngineCodec("VP8")); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters2)); EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams()); } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithOnlyVp8) { - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); } @@ -5144,13 +5313,14 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithOnlyVp8) { TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithRtx) { const int kUnusedPayloadType1 = 126; const int kUnusedPayloadType2 = 127; - EXPECT_FALSE(FindCodecById(engine_.recv_codecs(), kUnusedPayloadType1)); - EXPECT_FALSE(FindCodecById(engine_.recv_codecs(), kUnusedPayloadType2)); + EXPECT_FALSE( + webrtc::FindCodecById(engine_.LegacyRecvCodecs(), kUnusedPayloadType1)); + EXPECT_FALSE( + webrtc::FindCodecById(engine_.LegacyRecvCodecs(), kUnusedPayloadType2)); - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - cricket::VideoCodec rtx_codec = - cricket::CreateVideoCodec(kUnusedPayloadType1, "rtx"); + Codec rtx_codec = webrtc::CreateVideoCodec(kUnusedPayloadType1, "rtx"); parameters.codecs.push_back(rtx_codec); EXPECT_FALSE(receive_channel_->SetReceiverParameters(parameters)) << "RTX codec without associated payload should be rejected."; @@ -5162,8 +5332,7 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithRtx) { parameters.codecs[1].SetParam("apt", GetEngineCodec("VP8").id); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); - cricket::VideoCodec rtx_codec2 = - cricket::CreateVideoCodec(kUnusedPayloadType2, "rtx"); + Codec rtx_codec2 = webrtc::CreateVideoCodec(kUnusedPayloadType2, "rtx"); rtx_codec2.SetParam("apt", rtx_codec.id); parameters.codecs.push_back(rtx_codec2); @@ -5173,32 +5342,30 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithRtx) { } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithPacketization) { - cricket::VideoCodec vp8_codec = GetEngineCodec("VP8"); + Codec vp8_codec = GetEngineCodec("VP8"); vp8_codec.packetization = kPacketizationParamRaw; - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs = {vp8_codec, GetEngineCodec("VP9")}; EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); - const cricket::StreamParams params = - cricket::StreamParams::CreateLegacy(kSsrcs1[0]); + const StreamParams params = StreamParams::CreateLegacy(kSsrcs1[0]); AddRecvStream(params); ASSERT_THAT(fake_call_->GetVideoReceiveStreams(), testing::SizeIs(1)); - const webrtc::VideoReceiveStreamInterface::Config& config = + const VideoReceiveStreamInterface::Config& config = fake_call_->GetVideoReceiveStreams()[0]->GetConfig(); ASSERT_THAT(config.rtp.raw_payload_types, testing::SizeIs(1)); EXPECT_EQ(config.rtp.raw_payload_types.count(vp8_codec.id), 1U); } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithPacketizationRecreatesStream) { - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs = {GetEngineCodec("VP8"), GetEngineCodec("VP9")}; parameters.codecs.back().packetization = kPacketizationParamRaw; EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); - const cricket::StreamParams params = - cricket::StreamParams::CreateLegacy(kSsrcs1[0]); + const StreamParams params = StreamParams::CreateLegacy(kSsrcs1[0]); AddRecvStream(params); ASSERT_THAT(fake_call_->GetVideoReceiveStreams(), testing::SizeIs(1)); EXPECT_EQ(fake_call_->GetNumCreatedReceiveStreams(), 1); @@ -5212,12 +5379,12 @@ TEST_F(WebRtcVideoChannelTest, DuplicateUlpfecCodecIsDropped) { constexpr int kFirstUlpfecPayloadType = 126; constexpr int kSecondUlpfecPayloadType = 127; - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - parameters.codecs.push_back(cricket::CreateVideoCodec( - kFirstUlpfecPayloadType, cricket::kUlpfecCodecName)); - parameters.codecs.push_back(cricket::CreateVideoCodec( - kSecondUlpfecPayloadType, cricket::kUlpfecCodecName)); + parameters.codecs.push_back( + webrtc::CreateVideoCodec(kFirstUlpfecPayloadType, kUlpfecCodecName)); + parameters.codecs.push_back( + webrtc::CreateVideoCodec(kSecondUlpfecPayloadType, kUlpfecCodecName)); ASSERT_TRUE(receive_channel_->SetReceiverParameters(parameters)); FakeVideoReceiveStream* recv_stream = AddRecvStream(); @@ -5229,12 +5396,12 @@ TEST_F(WebRtcVideoChannelTest, DuplicateRedCodecIsDropped) { constexpr int kFirstRedPayloadType = 126; constexpr int kSecondRedPayloadType = 127; - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back( - cricket::CreateVideoCodec(kFirstRedPayloadType, cricket::kRedCodecName)); + webrtc::CreateVideoCodec(kFirstRedPayloadType, kRedCodecName)); parameters.codecs.push_back( - cricket::CreateVideoCodec(kSecondRedPayloadType, cricket::kRedCodecName)); + webrtc::CreateVideoCodec(kSecondRedPayloadType, kRedCodecName)); ASSERT_TRUE(receive_channel_->SetReceiverParameters(parameters)); FakeVideoReceiveStream* recv_stream = AddRecvStream(); @@ -5245,25 +5412,25 @@ TEST_F(WebRtcVideoChannelTest, DuplicateRedCodecIsDropped) { TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithChangedRtxPayloadType) { const int kUnusedPayloadType1 = 126; const int kUnusedPayloadType2 = 127; - EXPECT_FALSE(FindCodecById(engine_.recv_codecs(), kUnusedPayloadType1)); - EXPECT_FALSE(FindCodecById(engine_.recv_codecs(), kUnusedPayloadType2)); + EXPECT_FALSE( + webrtc::FindCodecById(engine_.LegacyRecvCodecs(), kUnusedPayloadType1)); + EXPECT_FALSE( + webrtc::FindCodecById(engine_.LegacyRecvCodecs(), kUnusedPayloadType2)); // SSRCs for RTX. - cricket::StreamParams params = - cricket::StreamParams::CreateLegacy(kSsrcs1[0]); + StreamParams params = StreamParams::CreateLegacy(kSsrcs1[0]); params.AddFidSsrc(kSsrcs1[0], kRtxSsrcs1[0]); AddRecvStream(params); // Original payload type for RTX. - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - cricket::VideoCodec rtx_codec = - cricket::CreateVideoCodec(kUnusedPayloadType1, "rtx"); + Codec rtx_codec = webrtc::CreateVideoCodec(kUnusedPayloadType1, "rtx"); rtx_codec.SetParam("apt", GetEngineCodec("VP8").id); parameters.codecs.push_back(rtx_codec); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); ASSERT_EQ(1U, fake_call_->GetVideoReceiveStreams().size()); - const webrtc::VideoReceiveStreamInterface::Config& config_before = + const VideoReceiveStreamInterface::Config& config_before = fake_call_->GetVideoReceiveStreams()[0]->GetConfig(); EXPECT_EQ(1U, config_before.rtp.rtx_associated_payload_types.size()); const int* payload_type_before = FindKeyByValue( @@ -5276,7 +5443,7 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithChangedRtxPayloadType) { parameters.codecs[1].id = kUnusedPayloadType2; EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); ASSERT_EQ(1U, fake_call_->GetVideoReceiveStreams().size()); - const webrtc::VideoReceiveStreamInterface::Config& config_after = + const VideoReceiveStreamInterface::Config& config_after = fake_call_->GetVideoReceiveStreams()[0]->GetConfig(); EXPECT_EQ(1U, config_after.rtp.rtx_associated_payload_types.size()); const int* payload_type_after = FindKeyByValue( @@ -5289,25 +5456,25 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithChangedRtxPayloadType) { TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRtxWithRtxTime) { const int kUnusedPayloadType1 = 126; const int kUnusedPayloadType2 = 127; - EXPECT_FALSE(FindCodecById(engine_.recv_codecs(), kUnusedPayloadType1)); - EXPECT_FALSE(FindCodecById(engine_.recv_codecs(), kUnusedPayloadType2)); + EXPECT_FALSE( + webrtc::FindCodecById(engine_.LegacyRecvCodecs(), kUnusedPayloadType1)); + EXPECT_FALSE( + webrtc::FindCodecById(engine_.LegacyRecvCodecs(), kUnusedPayloadType2)); // SSRCs for RTX. - cricket::StreamParams params = - cricket::StreamParams::CreateLegacy(kSsrcs1[0]); + StreamParams params = StreamParams::CreateLegacy(kSsrcs1[0]); params.AddFidSsrc(kSsrcs1[0], kRtxSsrcs1[0]); AddRecvStream(params); // Payload type for RTX. - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - cricket::VideoCodec rtx_codec = - cricket::CreateVideoCodec(kUnusedPayloadType1, "rtx"); + Codec rtx_codec = webrtc::CreateVideoCodec(kUnusedPayloadType1, "rtx"); rtx_codec.SetParam("apt", GetEngineCodec("VP8").id); parameters.codecs.push_back(rtx_codec); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); ASSERT_EQ(1U, fake_call_->GetVideoReceiveStreams().size()); - const webrtc::VideoReceiveStreamInterface::Config& config = + const VideoReceiveStreamInterface::Config& config = fake_call_->GetVideoReceiveStreams()[0]->GetConfig(); const int kRtxTime = 343; @@ -5358,41 +5525,40 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRtxWithRtxTime) { } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsDifferentPayloadType) { - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs[0].id = 99; EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsAcceptDefaultCodecs) { - cricket::VideoReceiverParameters parameters; - parameters.codecs = engine_.recv_codecs(); + VideoReceiverParameters parameters; + parameters.codecs = engine_.LegacyRecvCodecs(); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); FakeVideoReceiveStream* stream = AddRecvStream(); - const webrtc::VideoReceiveStreamInterface::Config& config = - stream->GetConfig(); - EXPECT_EQ(engine_.recv_codecs()[0].name, + const VideoReceiveStreamInterface::Config& config = stream->GetConfig(); + EXPECT_EQ(engine_.LegacyRecvCodecs()[0].name, config.decoders[0].video_format.name); - EXPECT_EQ(engine_.recv_codecs()[0].id, config.decoders[0].payload_type); + EXPECT_EQ(engine_.LegacyRecvCodecs()[0].id, config.decoders[0].payload_type); } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRejectUnsupportedCodec) { - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); - parameters.codecs.push_back(cricket::CreateVideoCodec(101, "WTF3")); + parameters.codecs.push_back(webrtc::CreateVideoCodec(101, "WTF3")); EXPECT_FALSE(receive_channel_->SetReceiverParameters(parameters)); } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsAcceptsMultipleVideoCodecs) { - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithoutFecDisablesFec) { - cricket::VideoSenderParameters send_parameters; + VideoSenderParameters send_parameters; send_parameters.codecs.push_back(GetEngineCodec("VP8")); send_parameters.codecs.push_back(GetEngineCodec("red")); send_parameters.codecs.push_back(GetEngineCodec("ulpfec")); @@ -5403,7 +5569,7 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithoutFecDisablesFec) { EXPECT_EQ(GetEngineCodec("ulpfec").id, stream->GetConfig().rtp.ulpfec_payload_type); - cricket::VideoReceiverParameters recv_parameters; + VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); ASSERT_TRUE(receive_channel_->SetReceiverParameters(recv_parameters)); stream = fake_call_->GetVideoReceiveStreams()[0]; @@ -5413,8 +5579,8 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsWithoutFecDisablesFec) { } TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvParamsWithoutFecDisablesFec) { - AddRecvStream( - CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); + AddRecvStream(webrtc::CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], + kFlexfecSsrc)); const std::vector& streams = fake_call_->GetFlexfecReceiveStreams(); @@ -5425,7 +5591,7 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvParamsWithoutFecDisablesFec) { ASSERT_EQ(1U, stream->GetConfig().protected_media_ssrcs.size()); EXPECT_EQ(kSsrcs1[0], stream->GetConfig().protected_media_ssrcs[0]); - cricket::VideoReceiverParameters recv_parameters; + VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); ASSERT_TRUE(receive_channel_->SetReceiverParameters(recv_parameters)); EXPECT_TRUE(streams.empty()) @@ -5437,7 +5603,7 @@ TEST_F(WebRtcVideoChannelTest, SetSendParamsWithFecEnablesFec) { EXPECT_EQ(GetEngineCodec("ulpfec").id, stream->GetConfig().rtp.ulpfec_payload_type); - cricket::VideoReceiverParameters recv_parameters; + VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); recv_parameters.codecs.push_back(GetEngineCodec("red")); recv_parameters.codecs.push_back(GetEngineCodec("ulpfec")); @@ -5448,7 +5614,7 @@ TEST_F(WebRtcVideoChannelTest, SetSendParamsWithFecEnablesFec) { stream->GetConfig().rtp.ulpfec_payload_type) << "ULPFEC should be enabled on the receive stream."; - cricket::VideoSenderParameters send_parameters; + VideoSenderParameters send_parameters; send_parameters.codecs.push_back(GetEngineCodec("VP8")); send_parameters.codecs.push_back(GetEngineCodec("red")); send_parameters.codecs.push_back(GetEngineCodec("ulpfec")); @@ -5461,12 +5627,12 @@ TEST_F(WebRtcVideoChannelTest, SetSendParamsWithFecEnablesFec) { TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, SetSendRecvParamsWithFecEnablesFec) { - AddRecvStream( - CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], kFlexfecSsrc)); + AddRecvStream(webrtc::CreatePrimaryWithFecFrStreamParams("cname", kSsrcs1[0], + kFlexfecSsrc)); const std::vector& streams = fake_call_->GetFlexfecReceiveStreams(); - cricket::VideoReceiverParameters recv_parameters; + VideoReceiverParameters recv_parameters; recv_parameters.codecs.push_back(GetEngineCodec("VP8")); recv_parameters.codecs.push_back(GetEngineCodec("flexfec-03")); ASSERT_TRUE(receive_channel_->SetReceiverParameters(recv_parameters)); @@ -5480,7 +5646,7 @@ TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, EXPECT_EQ(kSsrcs1[0], stream_with_recv_params->GetConfig().protected_media_ssrcs[0]); - cricket::VideoSenderParameters send_parameters; + VideoSenderParameters send_parameters; send_parameters.codecs.push_back(GetEngineCodec("VP8")); send_parameters.codecs.push_back(GetEngineCodec("flexfec-03")); ASSERT_TRUE(send_channel_->SetSenderParameters(send_parameters)); @@ -5496,7 +5662,7 @@ TEST_F(WebRtcVideoChannelFlexfecSendRecvTest, } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRejectDuplicateFecPayloads) { - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("red")); parameters.codecs[1].id = parameters.codecs[0].id; @@ -5505,7 +5671,7 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRejectDuplicateFecPayloads) { TEST_F(WebRtcVideoChannelFlexfecRecvTest, SetRecvCodecsRejectDuplicateFecPayloads) { - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("flexfec-03")); parameters.codecs[1].id = parameters.codecs[0].id; @@ -5513,7 +5679,7 @@ TEST_F(WebRtcVideoChannelFlexfecRecvTest, } TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRejectDuplicateCodecPayloads) { - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); parameters.codecs[1].id = parameters.codecs[0].id; @@ -5522,7 +5688,7 @@ TEST_F(WebRtcVideoChannelTest, SetRecvCodecsRejectDuplicateCodecPayloads) { TEST_F(WebRtcVideoChannelTest, SetRecvCodecsAcceptSameCodecOnMultiplePayloadTypes) { - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs[1].id += 1; @@ -5533,15 +5699,15 @@ TEST_F(WebRtcVideoChannelTest, // doesn't result in the stream being recreated. TEST_F(WebRtcVideoChannelTest, SetRecvCodecsDifferentOrderDoesntRecreateStream) { - cricket::VideoReceiverParameters parameters1; + VideoReceiverParameters parameters1; parameters1.codecs.push_back(GetEngineCodec("VP8")); parameters1.codecs.push_back(GetEngineCodec("red")); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters1)); - AddRecvStream(cricket::StreamParams::CreateLegacy(123)); + AddRecvStream(StreamParams::CreateLegacy(123)); EXPECT_EQ(1, fake_call_->GetNumCreatedReceiveStreams()); - cricket::VideoReceiverParameters parameters2; + VideoReceiverParameters parameters2; parameters2.codecs.push_back(GetEngineCodec("red")); parameters2.codecs.push_back(GetEngineCodec("VP8")); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters2)); @@ -5581,29 +5747,29 @@ TEST_F(WebRtcVideoChannelTest, SetSend) { // This test verifies DSCP settings are properly applied on video media channel. TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) { - std::unique_ptr network_interface( - new cricket::FakeNetworkInterface); + std::unique_ptr network_interface( + new FakeNetworkInterface); MediaConfig config; - std::unique_ptr send_channel; - webrtc::RtpParameters parameters; + std::unique_ptr send_channel; + RtpParameters parameters; send_channel = engine_.CreateSendChannel( - call_.get(), config, VideoOptions(), webrtc::CryptoOptions(), + call_.get(), config, VideoOptions(), CryptoOptions(), video_bitrate_allocator_factory_.get()); send_channel->SetInterface(network_interface.get()); // Default value when DSCP is disabled should be DSCP_DEFAULT. - EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp()); + EXPECT_EQ(DSCP_DEFAULT, network_interface->dscp()); send_channel->SetInterface(nullptr); // Default value when DSCP is enabled is also DSCP_DEFAULT, until it is set // through rtp parameters. config.enable_dscp = true; send_channel = engine_.CreateSendChannel( - call_.get(), config, VideoOptions(), webrtc::CryptoOptions(), + call_.get(), config, VideoOptions(), CryptoOptions(), video_bitrate_allocator_factory_.get()); send_channel->SetInterface(network_interface.get()); - EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp()); + EXPECT_EQ(DSCP_DEFAULT, network_interface->dscp()); // Create a send stream to configure EXPECT_TRUE(send_channel->AddSendStream(StreamParams::CreateLegacy(kSsrc))); @@ -5611,29 +5777,29 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) { ASSERT_FALSE(parameters.encodings.empty()); // Various priorities map to various dscp values. - parameters.encodings[0].network_priority = webrtc::Priority::kHigh; + parameters.encodings[0].network_priority = Priority::kHigh; ASSERT_TRUE( send_channel->SetRtpSendParameters(kSsrc, parameters, nullptr).ok()); - EXPECT_EQ(rtc::DSCP_AF41, network_interface->dscp()); - parameters.encodings[0].network_priority = webrtc::Priority::kVeryLow; + EXPECT_EQ(DSCP_AF41, network_interface->dscp()); + parameters.encodings[0].network_priority = Priority::kVeryLow; ASSERT_TRUE( send_channel->SetRtpSendParameters(kSsrc, parameters, nullptr).ok()); - EXPECT_EQ(rtc::DSCP_CS1, network_interface->dscp()); + EXPECT_EQ(DSCP_CS1, network_interface->dscp()); // Packets should also self-identify their dscp in PacketOptions. const uint8_t kData[10] = {0}; EXPECT_TRUE(ChannelImplAsTransport(send_channel.get())->SendRtcp(kData)); - EXPECT_EQ(rtc::DSCP_CS1, network_interface->options().dscp); + EXPECT_EQ(DSCP_CS1, network_interface->options().dscp); send_channel->SetInterface(nullptr); // Verify that setting the option to false resets the // DiffServCodePoint. config.enable_dscp = false; send_channel = engine_.CreateSendChannel( - call_.get(), config, VideoOptions(), webrtc::CryptoOptions(), + call_.get(), config, VideoOptions(), CryptoOptions(), video_bitrate_allocator_factory_.get()); send_channel->SetInterface(network_interface.get()); - EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface->dscp()); + EXPECT_EQ(DSCP_DEFAULT, network_interface->dscp()); send_channel->SetInterface(nullptr); } @@ -5642,8 +5808,8 @@ TEST_F(WebRtcVideoChannelTest, TestSetDscpOptions) { TEST_F(WebRtcVideoChannelTest, TestSetSendRtcpReducedSize) { // Create stream, expecting that default mode is "compound". FakeVideoSendStream* stream1 = AddSendStream(); - EXPECT_EQ(webrtc::RtcpMode::kCompound, stream1->GetConfig().rtp.rtcp_mode); - webrtc::RtpParameters rtp_parameters = + EXPECT_EQ(RtcpMode::kCompound, stream1->GetConfig().rtp.rtcp_mode); + RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_FALSE(rtp_parameters.rtcp.reduced_size); @@ -5651,13 +5817,13 @@ TEST_F(WebRtcVideoChannelTest, TestSetSendRtcpReducedSize) { send_parameters_.rtcp.reduced_size = true; EXPECT_TRUE(send_channel_->SetSenderParameters(send_parameters_)); stream1 = fake_call_->GetVideoSendStreams()[0]; - EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream1->GetConfig().rtp.rtcp_mode); + EXPECT_EQ(RtcpMode::kReducedSize, stream1->GetConfig().rtp.rtcp_mode); rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_TRUE(rtp_parameters.rtcp.reduced_size); // Create a new stream and ensure it picks up the reduced size mode. FakeVideoSendStream* stream2 = AddSendStream(); - EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream2->GetConfig().rtp.rtcp_mode); + EXPECT_EQ(RtcpMode::kReducedSize, stream2->GetConfig().rtp.rtcp_mode); } // This test verifies that the RTCP reduced size mode is properly applied to @@ -5665,7 +5831,7 @@ TEST_F(WebRtcVideoChannelTest, TestSetSendRtcpReducedSize) { TEST_F(WebRtcVideoChannelTest, TestSetRecvRtcpReducedSize) { // Create stream, expecting that default mode is "compound". FakeVideoReceiveStream* stream1 = AddRecvStream(); - EXPECT_EQ(webrtc::RtcpMode::kCompound, stream1->GetConfig().rtp.rtcp_mode); + EXPECT_EQ(RtcpMode::kCompound, stream1->GetConfig().rtp.rtcp_mode); // Now enable reduced size mode. // TODO(deadbeef): Once "recv_parameters" becomes "receiver_parameters", @@ -5673,41 +5839,36 @@ TEST_F(WebRtcVideoChannelTest, TestSetRecvRtcpReducedSize) { send_parameters_.rtcp.reduced_size = true; EXPECT_TRUE(send_channel_->SetSenderParameters(send_parameters_)); stream1 = fake_call_->GetVideoReceiveStreams()[0]; - EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream1->GetConfig().rtp.rtcp_mode); + EXPECT_EQ(RtcpMode::kReducedSize, stream1->GetConfig().rtp.rtcp_mode); // Create a new stream and ensure it picks up the reduced size mode. FakeVideoReceiveStream* stream2 = AddRecvStream(); - EXPECT_EQ(webrtc::RtcpMode::kReducedSize, stream2->GetConfig().rtp.rtcp_mode); + EXPECT_EQ(RtcpMode::kReducedSize, stream2->GetConfig().rtp.rtcp_mode); } TEST_F(WebRtcVideoChannelTest, OnReadyToSendSignalsNetworkState) { - EXPECT_EQ(webrtc::kNetworkUp, - fake_call_->GetNetworkState(webrtc::MediaType::VIDEO)); - EXPECT_EQ(webrtc::kNetworkUp, - fake_call_->GetNetworkState(webrtc::MediaType::AUDIO)); + EXPECT_EQ(webrtc::kNetworkUp, fake_call_->GetNetworkState(MediaType::VIDEO)); + EXPECT_EQ(webrtc::kNetworkUp, fake_call_->GetNetworkState(MediaType::AUDIO)); send_channel_->OnReadyToSend(false); EXPECT_EQ(webrtc::kNetworkDown, - fake_call_->GetNetworkState(webrtc::MediaType::VIDEO)); - EXPECT_EQ(webrtc::kNetworkUp, - fake_call_->GetNetworkState(webrtc::MediaType::AUDIO)); + fake_call_->GetNetworkState(MediaType::VIDEO)); + EXPECT_EQ(webrtc::kNetworkUp, fake_call_->GetNetworkState(MediaType::AUDIO)); send_channel_->OnReadyToSend(true); - EXPECT_EQ(webrtc::kNetworkUp, - fake_call_->GetNetworkState(webrtc::MediaType::VIDEO)); - EXPECT_EQ(webrtc::kNetworkUp, - fake_call_->GetNetworkState(webrtc::MediaType::AUDIO)); + EXPECT_EQ(webrtc::kNetworkUp, fake_call_->GetNetworkState(MediaType::VIDEO)); + EXPECT_EQ(webrtc::kNetworkUp, fake_call_->GetNetworkState(MediaType::AUDIO)); } TEST_F(WebRtcVideoChannelTest, GetStatsReportsSentCodecName) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); AddSendStream(); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -5716,12 +5877,12 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsSentCodecName) { TEST_F(WebRtcVideoChannelTest, GetStatsReportsEncoderImplementationName) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Stats stats; + VideoSendStream::Stats stats; stats.encoder_implementation_name = "encoder_implementation_name"; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -5731,12 +5892,12 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsEncoderImplementationName) { TEST_F(WebRtcVideoChannelTest, GetStatsReportsPowerEfficientEncoder) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Stats stats; + VideoSendStream::Stats stats; stats.power_efficient_encoder = true; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -5745,13 +5906,13 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsPowerEfficientEncoder) { TEST_F(WebRtcVideoChannelTest, GetStatsReportsCpuOveruseMetrics) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Stats stats; + VideoSendStream::Stats stats; stats.avg_encode_time_ms = 13; stats.encode_usage_percent = 42; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -5762,12 +5923,12 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsCpuOveruseMetrics) { TEST_F(WebRtcVideoChannelTest, GetStatsReportsFramesEncoded) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Stats stats; + VideoSendStream::Stats stats; stats.frames_encoded = 13; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -5776,13 +5937,13 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsFramesEncoded) { TEST_F(WebRtcVideoChannelTest, GetStatsReportsKeyFramesEncoded) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Stats stats; + VideoSendStream::Stats stats; stats.substreams[123].frame_counts.key_frames = 10; stats.substreams[456].frame_counts.key_frames = 87; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -5794,13 +5955,13 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsKeyFramesEncoded) { TEST_F(WebRtcVideoChannelTest, GetStatsReportsPerLayerQpSum) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Stats stats; + VideoSendStream::Stats stats; stats.substreams[123].qp_sum = 15; stats.substreams[456].qp_sum = 11; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -5810,8 +5971,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsPerLayerQpSum) { EXPECT_EQ(*send_info.aggregated_senders[0].qp_sum, 26u); } -webrtc::VideoSendStream::Stats GetInitialisedStats() { - webrtc::VideoSendStream::Stats stats; +VideoSendStream::Stats GetInitialisedStats() { + VideoSendStream::Stats stats; stats.encoder_implementation_name = "vp"; stats.input_frame_rate = 1.0; stats.encode_frame_rate = 2; @@ -5833,14 +5994,13 @@ webrtc::VideoSendStream::Stats GetInitialisedStats() { stats.bw_limited_framerate = true; // Not wired. stats.cpu_limited_framerate = true; - stats.quality_limitation_reason = webrtc::QualityLimitationReason::kCpu; - stats.quality_limitation_durations_ms[webrtc::QualityLimitationReason::kCpu] = - 15; + stats.quality_limitation_reason = QualityLimitationReason::kCpu; + stats.quality_limitation_durations_ms[QualityLimitationReason::kCpu] = 15; stats.quality_limitation_resolution_changes = 16; stats.number_of_cpu_adapt_changes = 17; stats.number_of_quality_adapt_changes = 18; stats.has_entered_low_resolution = true; - stats.content_type = webrtc::VideoContentType::SCREENSHARE; + stats.content_type = VideoContentType::SCREENSHARE; stats.frames_sent = 19; stats.huge_frames_sent = 20; @@ -5851,8 +6011,8 @@ TEST_F(WebRtcVideoChannelTest, GetAggregatedStatsReportWithoutSubStreams) { FakeVideoSendStream* stream = AddSendStream(); auto stats = GetInitialisedStats(); stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -5910,15 +6070,15 @@ TEST_F(WebRtcVideoChannelTest, GetAggregatedStatsReportWithoutSubStreams) { EXPECT_EQ(sender.total_encoded_bytes_target, stats.total_encoded_bytes_target); // Comes from substream only. - EXPECT_EQ(sender.total_packet_send_delay, webrtc::TimeDelta::Zero()); - EXPECT_EQ(sender.qp_sum, absl::nullopt); + EXPECT_EQ(sender.total_packet_send_delay, TimeDelta::Zero()); + EXPECT_EQ(sender.qp_sum, std::nullopt); EXPECT_EQ(sender.has_entered_low_resolution, stats.has_entered_low_resolution); - EXPECT_EQ(sender.content_type, webrtc::VideoContentType::SCREENSHARE); + EXPECT_EQ(sender.content_type, VideoContentType::SCREENSHARE); EXPECT_EQ(sender.frames_sent, stats.frames_encoded); EXPECT_EQ(sender.huge_frames_sent, stats.huge_frames_sent); - EXPECT_EQ(sender.rid, absl::nullopt); + EXPECT_EQ(sender.rid, std::nullopt); } TEST_F(WebRtcVideoChannelTest, GetAggregatedStatsReportForSubStreams) { @@ -5937,8 +6097,7 @@ TEST_F(WebRtcVideoChannelTest, GetAggregatedStatsReportForSubStreams) { substream.retransmit_bitrate_bps = 6; substream.avg_delay_ms = 7; substream.max_delay_ms = 8; - substream.rtp_stats.transmitted.total_packet_delay = - webrtc::TimeDelta::Millis(9); + substream.rtp_stats.transmitted.total_packet_delay = TimeDelta::Millis(9); substream.rtp_stats.transmitted.header_bytes = 10; substream.rtp_stats.transmitted.padding_bytes = 11; substream.rtp_stats.retransmitted.payload_bytes = 12; @@ -5946,12 +6105,13 @@ TEST_F(WebRtcVideoChannelTest, GetAggregatedStatsReportForSubStreams) { substream.rtcp_packet_type_counts.fir_packets = 14; substream.rtcp_packet_type_counts.nack_packets = 15; substream.rtcp_packet_type_counts.pli_packets = 16; - webrtc::rtcp::ReportBlock report_block; + rtcp::ReportBlock report_block; report_block.SetCumulativeLost(17); report_block.SetFractionLost(18); - webrtc::ReportBlockData report_block_data; - report_block_data.SetReportBlock(0, report_block, webrtc::Timestamp::Zero()); - report_block_data.AddRoundTripTimeSample(webrtc::TimeDelta::Millis(19)); + ReportBlockData report_block_data; + report_block_data.SetReportBlock(0, report_block, Timestamp::Zero(), + Timestamp::Zero()); + report_block_data.AddRoundTripTimeSample(TimeDelta::Millis(19)); substream.report_block_data = report_block_data; substream.encode_frame_rate = 20.0; substream.frames_encoded = 21; @@ -5964,8 +6124,8 @@ TEST_F(WebRtcVideoChannelTest, GetAggregatedStatsReportForSubStreams) { stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6039,10 +6199,10 @@ TEST_F(WebRtcVideoChannelTest, GetAggregatedStatsReportForSubStreams) { EXPECT_EQ(sender.has_entered_low_resolution, stats.has_entered_low_resolution); EXPECT_EQ(sender.qp_sum, 2u * *substream.qp_sum); - EXPECT_EQ(sender.content_type, webrtc::VideoContentType::SCREENSHARE); + EXPECT_EQ(sender.content_type, VideoContentType::SCREENSHARE); EXPECT_EQ(sender.frames_sent, 2u * substream.frames_encoded); EXPECT_EQ(sender.huge_frames_sent, stats.huge_frames_sent); - EXPECT_EQ(sender.rid, absl::nullopt); + EXPECT_EQ(sender.rid, std::nullopt); } TEST_F(WebRtcVideoChannelTest, GetPerLayerStatsReportForSubStreams) { @@ -6061,8 +6221,7 @@ TEST_F(WebRtcVideoChannelTest, GetPerLayerStatsReportForSubStreams) { substream.retransmit_bitrate_bps = 6; substream.avg_delay_ms = 7; substream.max_delay_ms = 8; - substream.rtp_stats.transmitted.total_packet_delay = - webrtc::TimeDelta::Millis(9); + substream.rtp_stats.transmitted.total_packet_delay = TimeDelta::Millis(9); substream.rtp_stats.transmitted.header_bytes = 10; substream.rtp_stats.transmitted.padding_bytes = 11; substream.rtp_stats.retransmitted.payload_bytes = 12; @@ -6070,12 +6229,13 @@ TEST_F(WebRtcVideoChannelTest, GetPerLayerStatsReportForSubStreams) { substream.rtcp_packet_type_counts.fir_packets = 14; substream.rtcp_packet_type_counts.nack_packets = 15; substream.rtcp_packet_type_counts.pli_packets = 16; - webrtc::rtcp::ReportBlock report_block; + rtcp::ReportBlock report_block; report_block.SetCumulativeLost(17); report_block.SetFractionLost(18); - webrtc::ReportBlockData report_block_data; - report_block_data.SetReportBlock(0, report_block, webrtc::Timestamp::Zero()); - report_block_data.AddRoundTripTimeSample(webrtc::TimeDelta::Millis(19)); + ReportBlockData report_block_data; + report_block_data.SetReportBlock(0, report_block, Timestamp::Zero(), + Timestamp::Zero()); + report_block_data.AddRoundTripTimeSample(TimeDelta::Millis(19)); substream.report_block_data = report_block_data; substream.encode_frame_rate = 20.0; substream.frames_encoded = 21; @@ -6088,8 +6248,8 @@ TEST_F(WebRtcVideoChannelTest, GetPerLayerStatsReportForSubStreams) { stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6163,11 +6323,11 @@ TEST_F(WebRtcVideoChannelTest, GetPerLayerStatsReportForSubStreams) { EXPECT_EQ(sender.has_entered_low_resolution, stats.has_entered_low_resolution); EXPECT_EQ(sender.qp_sum, *substream.qp_sum); - EXPECT_EQ(sender.content_type, webrtc::VideoContentType::SCREENSHARE); + EXPECT_EQ(sender.content_type, VideoContentType::SCREENSHARE); EXPECT_EQ(sender.frames_sent, static_cast(substream.frames_encoded)); EXPECT_EQ(sender.huge_frames_sent, substream.huge_frames_sent); - EXPECT_EQ(sender.rid, absl::nullopt); + EXPECT_EQ(sender.rid, std::nullopt); } TEST_F(WebRtcVideoChannelTest, @@ -6178,10 +6338,9 @@ TEST_F(WebRtcVideoChannelTest, // Create simulcast stream from both SSRCs. // `kSsrc1` is the "main" ssrc used for getting parameters. FakeVideoSendStream* stream = - AddSendStream(cricket::CreateSimStreamParams("cname", {kSsrc1, kSsrc2})); + AddSendStream(webrtc::CreateSimStreamParams("cname", {kSsrc1, kSsrc2})); - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(kSsrc1); + RtpParameters parameters = send_channel_->GetRtpSendParameters(kSsrc1); ASSERT_EQ(2u, parameters.encodings.size()); parameters.encodings[0].active = false; parameters.encodings[1].active = true; @@ -6194,8 +6353,8 @@ TEST_F(WebRtcVideoChannelTest, stream->SetStats(stats); // GetStats() and ensure `active` matches `encodings` for each SSRC. - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6207,7 +6366,7 @@ TEST_F(WebRtcVideoChannelTest, } TEST_F(WebRtcVideoChannelTest, OutboundRtpIsActiveComesFromAnyEncodingInSvc) { - cricket::VideoSenderParameters send_parameters; + VideoSenderParameters send_parameters; send_parameters.codecs.push_back(GetEngineCodec("VP9")); ASSERT_TRUE(send_channel_->SetSenderParameters(send_parameters)); @@ -6219,15 +6378,14 @@ TEST_F(WebRtcVideoChannelTest, OutboundRtpIsActiveComesFromAnyEncodingInSvc) { // difference is that the VP9 codec is used. This triggers special hacks that // we depend on because we don't have a proper SVC API yet. FakeVideoSendStream* stream = AddSendStream( - cricket::CreateSimStreamParams("cname", {kSsrc1, kSsrc2, kSsrc3})); + webrtc::CreateSimStreamParams("cname", {kSsrc1, kSsrc2, kSsrc3})); // Expect that we got SVC. EXPECT_EQ(stream->GetEncoderConfig().number_of_streams, 1u); - webrtc::VideoCodecVP9 vp9_settings; + VideoCodecVP9 vp9_settings; ASSERT_TRUE(stream->GetVp9Settings(&vp9_settings)); EXPECT_EQ(vp9_settings.numberOfSpatialLayers, 3u); - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(kSsrc1); + RtpParameters parameters = send_channel_->GetRtpSendParameters(kSsrc1); ASSERT_EQ(3u, parameters.encodings.size()); parameters.encodings[0].active = false; parameters.encodings[1].active = true; @@ -6240,8 +6398,8 @@ TEST_F(WebRtcVideoChannelTest, OutboundRtpIsActiveComesFromAnyEncodingInSvc) { stream->SetStats(stats); // GetStats() and ensure `active` is true if ANY encoding is active. - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6277,12 +6435,12 @@ TEST_F(WebRtcVideoChannelTest, MediaSubstreamMissingProducesEmpyStats) { // Covers https://crbug.com/1090712. auto stats = GetInitialisedStats(); auto& substream = stats.substreams[kRtxSsrc]; - substream.type = webrtc::VideoSendStream::StreamStats::StreamType::kRtx; + substream.type = VideoSendStream::StreamStats::StreamType::kRtx; substream.referenced_media_ssrc = kMissingMediaSsrc; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6291,7 +6449,7 @@ TEST_F(WebRtcVideoChannelTest, MediaSubstreamMissingProducesEmpyStats) { TEST_F(WebRtcVideoChannelTest, GetStatsReportsUpperResolution) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Stats stats; + VideoSendStream::Stats stats; stats.substreams[17].width = 123; stats.substreams[17].height = 40; stats.substreams[42].width = 80; @@ -6300,8 +6458,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsUpperResolution) { stats.substreams[11].height = 90; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6319,13 +6477,13 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsUpperResolution) { TEST_F(WebRtcVideoChannelTest, GetStatsReportsCpuAdaptationStats) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Stats stats; + VideoSendStream::Stats stats; stats.number_of_cpu_adapt_changes = 2; stats.cpu_limited_resolution = true; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6338,14 +6496,14 @@ TEST_F(WebRtcVideoChannelTest, GetStatsReportsCpuAdaptationStats) { TEST_F(WebRtcVideoChannelTest, GetStatsReportsAdaptationAndBandwidthStats) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Stats stats; + VideoSendStream::Stats stats; stats.number_of_cpu_adapt_changes = 2; stats.cpu_limited_resolution = true; stats.bw_limited_resolution = true; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6361,11 +6519,10 @@ TEST(WebRtcVideoChannelHelperTest, MergeInfoAboutOutboundRtpSubstreams) { const uint32_t kFirstMediaStreamSsrc = 10; const uint32_t kSecondMediaStreamSsrc = 20; const uint32_t kRtxSsrc = 30; - const uint32_t kFlexfecSsrc = 40; - std::map substreams; + std::map substreams; // First kMedia stream. substreams[kFirstMediaStreamSsrc].type = - webrtc::VideoSendStream::StreamStats::StreamType::kMedia; + VideoSendStream::StreamStats::StreamType::kMedia; substreams[kFirstMediaStreamSsrc].rtp_stats.transmitted.header_bytes = 1; substreams[kFirstMediaStreamSsrc].rtp_stats.transmitted.padding_bytes = 2; substreams[kFirstMediaStreamSsrc].rtp_stats.transmitted.payload_bytes = 3; @@ -6374,12 +6531,12 @@ TEST(WebRtcVideoChannelHelperTest, MergeInfoAboutOutboundRtpSubstreams) { substreams[kFirstMediaStreamSsrc].rtp_stats.retransmitted.padding_bytes = 6; substreams[kFirstMediaStreamSsrc].rtp_stats.retransmitted.payload_bytes = 7; substreams[kFirstMediaStreamSsrc].rtp_stats.retransmitted.packets = 8; - substreams[kFirstMediaStreamSsrc].referenced_media_ssrc = absl::nullopt; + substreams[kFirstMediaStreamSsrc].referenced_media_ssrc = std::nullopt; substreams[kFirstMediaStreamSsrc].width = 1280; substreams[kFirstMediaStreamSsrc].height = 720; // Second kMedia stream. substreams[kSecondMediaStreamSsrc].type = - webrtc::VideoSendStream::StreamStats::StreamType::kMedia; + VideoSendStream::StreamStats::StreamType::kMedia; substreams[kSecondMediaStreamSsrc].rtp_stats.transmitted.header_bytes = 10; substreams[kSecondMediaStreamSsrc].rtp_stats.transmitted.padding_bytes = 11; substreams[kSecondMediaStreamSsrc].rtp_stats.transmitted.payload_bytes = 12; @@ -6388,12 +6545,11 @@ TEST(WebRtcVideoChannelHelperTest, MergeInfoAboutOutboundRtpSubstreams) { substreams[kSecondMediaStreamSsrc].rtp_stats.retransmitted.padding_bytes = 15; substreams[kSecondMediaStreamSsrc].rtp_stats.retransmitted.payload_bytes = 16; substreams[kSecondMediaStreamSsrc].rtp_stats.retransmitted.packets = 17; - substreams[kSecondMediaStreamSsrc].referenced_media_ssrc = absl::nullopt; + substreams[kSecondMediaStreamSsrc].referenced_media_ssrc = std::nullopt; substreams[kSecondMediaStreamSsrc].width = 640; substreams[kSecondMediaStreamSsrc].height = 480; // kRtx stream referencing the first kMedia stream. - substreams[kRtxSsrc].type = - webrtc::VideoSendStream::StreamStats::StreamType::kRtx; + substreams[kRtxSsrc].type = VideoSendStream::StreamStats::StreamType::kRtx; substreams[kRtxSsrc].rtp_stats.transmitted.header_bytes = 19; substreams[kRtxSsrc].rtp_stats.transmitted.padding_bytes = 20; substreams[kRtxSsrc].rtp_stats.transmitted.payload_bytes = 21; @@ -6405,7 +6561,7 @@ TEST(WebRtcVideoChannelHelperTest, MergeInfoAboutOutboundRtpSubstreams) { substreams[kRtxSsrc].referenced_media_ssrc = kFirstMediaStreamSsrc; // kFlexfec stream referencing the second kMedia stream. substreams[kFlexfecSsrc].type = - webrtc::VideoSendStream::StreamStats::StreamType::kFlexfec; + VideoSendStream::StreamStats::StreamType::kFlexfec; substreams[kFlexfecSsrc].rtp_stats.transmitted.header_bytes = 19; substreams[kFlexfecSsrc].rtp_stats.transmitted.padding_bytes = 20; substreams[kFlexfecSsrc].rtp_stats.transmitted.payload_bytes = 21; @@ -6422,15 +6578,15 @@ TEST(WebRtcVideoChannelHelperTest, MergeInfoAboutOutboundRtpSubstreams) { EXPECT_TRUE(merged_substreams.find(kFirstMediaStreamSsrc) != merged_substreams.end()); EXPECT_EQ(merged_substreams[kFirstMediaStreamSsrc].type, - webrtc::VideoSendStream::StreamStats::StreamType::kMedia); + VideoSendStream::StreamStats::StreamType::kMedia); EXPECT_TRUE(merged_substreams.find(kSecondMediaStreamSsrc) != merged_substreams.end()); EXPECT_EQ(merged_substreams[kSecondMediaStreamSsrc].type, - webrtc::VideoSendStream::StreamStats::StreamType::kMedia); + VideoSendStream::StreamStats::StreamType::kMedia); EXPECT_FALSE(merged_substreams.find(kRtxSsrc) != merged_substreams.end()); EXPECT_FALSE(merged_substreams.find(kFlexfecSsrc) != merged_substreams.end()); // Expect kFirstMediaStreamSsrc's rtp_stats to be merged with kRtxSsrc. - webrtc::StreamDataCounters first_media_expected_rtp_stats = + StreamDataCounters first_media_expected_rtp_stats = substreams[kFirstMediaStreamSsrc].rtp_stats; first_media_expected_rtp_stats.Add(substreams[kRtxSsrc].rtp_stats); EXPECT_EQ(merged_substreams[kFirstMediaStreamSsrc].rtp_stats.transmitted, @@ -6438,7 +6594,7 @@ TEST(WebRtcVideoChannelHelperTest, MergeInfoAboutOutboundRtpSubstreams) { EXPECT_EQ(merged_substreams[kFirstMediaStreamSsrc].rtp_stats.retransmitted, first_media_expected_rtp_stats.retransmitted); // Expect kSecondMediaStreamSsrc' rtp_stats to be merged with kFlexfecSsrc. - webrtc::StreamDataCounters second_media_expected_rtp_stats = + StreamDataCounters second_media_expected_rtp_stats = substreams[kSecondMediaStreamSsrc].rtp_stats; second_media_expected_rtp_stats.Add(substreams[kFlexfecSsrc].rtp_stats); EXPECT_EQ(merged_substreams[kSecondMediaStreamSsrc].rtp_stats.transmitted, @@ -6459,10 +6615,9 @@ TEST(WebRtcVideoChannelHelperTest, MergeInfoAboutOutboundRtpSubstreams) { TEST_F(WebRtcVideoChannelTest, GetStatsReportsTransmittedAndRetransmittedBytesAndPacketsCorrectly) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Stats stats; + VideoSendStream::Stats stats; // Simulcast layer 1, RTP stream. header+padding=10, payload=20, packets=3. - stats.substreams[101].type = - webrtc::VideoSendStream::StreamStats::StreamType::kMedia; + stats.substreams[101].type = VideoSendStream::StreamStats::StreamType::kMedia; stats.substreams[101].rtp_stats.transmitted.header_bytes = 5; stats.substreams[101].rtp_stats.transmitted.padding_bytes = 5; stats.substreams[101].rtp_stats.transmitted.payload_bytes = 20; @@ -6471,10 +6626,9 @@ TEST_F(WebRtcVideoChannelTest, stats.substreams[101].rtp_stats.retransmitted.padding_bytes = 0; stats.substreams[101].rtp_stats.retransmitted.payload_bytes = 0; stats.substreams[101].rtp_stats.retransmitted.packets = 0; - stats.substreams[101].referenced_media_ssrc = absl::nullopt; + stats.substreams[101].referenced_media_ssrc = std::nullopt; // Simulcast layer 1, RTX stream. header+padding=5, payload=10, packets=1. - stats.substreams[102].type = - webrtc::VideoSendStream::StreamStats::StreamType::kRtx; + stats.substreams[102].type = VideoSendStream::StreamStats::StreamType::kRtx; stats.substreams[102].rtp_stats.retransmitted.header_bytes = 3; stats.substreams[102].rtp_stats.retransmitted.padding_bytes = 2; stats.substreams[102].rtp_stats.retransmitted.payload_bytes = 10; @@ -6483,8 +6637,7 @@ TEST_F(WebRtcVideoChannelTest, stats.substreams[102].rtp_stats.retransmitted; stats.substreams[102].referenced_media_ssrc = 101; // Simulcast layer 2, RTP stream. header+padding=20, payload=40, packets=7. - stats.substreams[201].type = - webrtc::VideoSendStream::StreamStats::StreamType::kMedia; + stats.substreams[201].type = VideoSendStream::StreamStats::StreamType::kMedia; stats.substreams[201].rtp_stats.transmitted.header_bytes = 10; stats.substreams[201].rtp_stats.transmitted.padding_bytes = 10; stats.substreams[201].rtp_stats.transmitted.payload_bytes = 40; @@ -6493,10 +6646,9 @@ TEST_F(WebRtcVideoChannelTest, stats.substreams[201].rtp_stats.retransmitted.padding_bytes = 0; stats.substreams[201].rtp_stats.retransmitted.payload_bytes = 0; stats.substreams[201].rtp_stats.retransmitted.packets = 0; - stats.substreams[201].referenced_media_ssrc = absl::nullopt; + stats.substreams[201].referenced_media_ssrc = std::nullopt; // Simulcast layer 2, RTX stream. header+padding=10, payload=20, packets=4. - stats.substreams[202].type = - webrtc::VideoSendStream::StreamStats::StreamType::kRtx; + stats.substreams[202].type = VideoSendStream::StreamStats::StreamType::kRtx; stats.substreams[202].rtp_stats.retransmitted.header_bytes = 6; stats.substreams[202].rtp_stats.retransmitted.padding_bytes = 4; stats.substreams[202].rtp_stats.retransmitted.payload_bytes = 20; @@ -6507,7 +6659,7 @@ TEST_F(WebRtcVideoChannelTest, // FlexFEC stream associated with the Simulcast layer 2. // header+padding=15, payload=17, packets=5. stats.substreams[301].type = - webrtc::VideoSendStream::StreamStats::StreamType::kFlexfec; + VideoSendStream::StreamStats::StreamType::kFlexfec; stats.substreams[301].rtp_stats.transmitted.header_bytes = 13; stats.substreams[301].rtp_stats.transmitted.padding_bytes = 2; stats.substreams[301].rtp_stats.transmitted.payload_bytes = 17; @@ -6519,8 +6671,8 @@ TEST_F(WebRtcVideoChannelTest, stats.substreams[301].referenced_media_ssrc = 201; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6541,12 +6693,12 @@ TEST_F(WebRtcVideoChannelTest, TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesBandwidthLimitedResolutionCorrectly) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Stats stats; + VideoSendStream::Stats stats; stats.bw_limited_resolution = true; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6557,7 +6709,7 @@ TEST_F(WebRtcVideoChannelTest, TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesSendRtcpPacketTypesCorrectly) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Stats stats; + VideoSendStream::Stats stats; stats.substreams[17].rtcp_packet_type_counts.fir_packets = 2; stats.substreams[17].rtcp_packet_type_counts.nack_packets = 3; stats.substreams[17].rtcp_packet_type_counts.pli_packets = 4; @@ -6568,8 +6720,8 @@ TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesSendRtcpPacketTypesCorrectly) { stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6589,30 +6741,28 @@ TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesSendRtcpPacketTypesCorrectly) { TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesReceiveRtcpPacketTypesCorrectly) { FakeVideoReceiveStream* stream = AddRecvStream(); - webrtc::VideoReceiveStreamInterface::Stats stats; + VideoReceiveStreamInterface::Stats stats; stats.rtcp_packet_type_counts.fir_packets = 2; stats.rtcp_packet_type_counts.nack_packets = 3; stats.rtcp_packet_type_counts.pli_packets = 4; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); - EXPECT_EQ( - stats.rtcp_packet_type_counts.fir_packets, - rtc::checked_cast(receive_info.receivers[0].firs_sent)); + EXPECT_EQ(stats.rtcp_packet_type_counts.fir_packets, + checked_cast(receive_info.receivers[0].firs_sent)); EXPECT_EQ(stats.rtcp_packet_type_counts.nack_packets, receive_info.receivers[0].nacks_sent); - EXPECT_EQ( - stats.rtcp_packet_type_counts.pli_packets, - rtc::checked_cast(receive_info.receivers[0].plis_sent)); + EXPECT_EQ(stats.rtcp_packet_type_counts.pli_packets, + checked_cast(receive_info.receivers[0].plis_sent)); } TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesDecodeStatsCorrectly) { FakeVideoReceiveStream* stream = AddRecvStream(); - webrtc::VideoReceiveStreamInterface::Stats stats; + VideoReceiveStreamInterface::Stats stats; stats.decoder_implementation_name = "decoder_implementation_name"; stats.decode_ms = 2; stats.max_decode_ms = 3; @@ -6632,18 +6782,21 @@ TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesDecodeStatsCorrectly) { stats.frames_rendered = 13; stats.frames_decoded = 14; stats.qp_sum = 15; - stats.total_decode_time = webrtc::TimeDelta::Millis(16); - stats.total_assembly_time = webrtc::TimeDelta::Millis(4); + stats.corruption_score_sum = 0.3; + stats.corruption_score_squared_sum = 0.05; + stats.corruption_score_count = 2; + stats.total_decode_time = TimeDelta::Millis(16); + stats.total_assembly_time = TimeDelta::Millis(4); stats.frames_assembled_from_multiple_packets = 2; stats.power_efficient_decoder = true; - webrtc::RtpReceiveStats rtx_stats; + RtpReceiveStats rtx_stats; rtx_stats.packet_counter.packets = 5; rtx_stats.packet_counter.payload_bytes = 23; stats.rtx_rtp_stats = rtx_stats; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6667,14 +6820,20 @@ TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesDecodeStatsCorrectly) { EXPECT_EQ(stats.render_delay_ms, receive_info.receivers[0].render_delay_ms); EXPECT_EQ(stats.width, receive_info.receivers[0].frame_width); EXPECT_EQ(stats.height, receive_info.receivers[0].frame_height); - EXPECT_EQ(rtc::checked_cast(stats.frame_counts.key_frames + - stats.frame_counts.delta_frames), + EXPECT_EQ(checked_cast(stats.frame_counts.key_frames + + stats.frame_counts.delta_frames), receive_info.receivers[0].frames_received); EXPECT_EQ(stats.frames_rendered, receive_info.receivers[0].frames_rendered); EXPECT_EQ(stats.frames_decoded, receive_info.receivers[0].frames_decoded); - EXPECT_EQ(rtc::checked_cast(stats.frame_counts.key_frames), + EXPECT_EQ(checked_cast(stats.frame_counts.key_frames), receive_info.receivers[0].key_frames_decoded); EXPECT_EQ(stats.qp_sum, receive_info.receivers[0].qp_sum); + EXPECT_EQ(stats.corruption_score_sum, + receive_info.receivers[0].corruption_score_sum); + EXPECT_EQ(stats.corruption_score_squared_sum, + receive_info.receivers[0].corruption_score_squared_sum); + EXPECT_EQ(stats.corruption_score_count, + receive_info.receivers[0].corruption_score_count); EXPECT_EQ(stats.total_decode_time, receive_info.receivers[0].total_decode_time); EXPECT_EQ(stats.total_assembly_time, @@ -6691,13 +6850,13 @@ TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesDecodeStatsCorrectly) { TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesInterFrameDelayStatsCorrectly) { FakeVideoReceiveStream* stream = AddRecvStream(); - webrtc::VideoReceiveStreamInterface::Stats stats; + VideoReceiveStreamInterface::Stats stats; stats.total_inter_frame_delay = 0.123; stats.total_squared_inter_frame_delay = 0.00456; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6709,7 +6868,7 @@ TEST_F(WebRtcVideoChannelTest, TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesReceivePacketStatsCorrectly) { FakeVideoReceiveStream* stream = AddRecvStream(); - webrtc::VideoReceiveStreamInterface::Stats stats; + VideoReceiveStreamInterface::Stats stats; stats.rtp_stats.packet_counter.payload_bytes = 2; stats.rtp_stats.packet_counter.header_bytes = 3; stats.rtp_stats.packet_counter.padding_bytes = 4; @@ -6717,17 +6876,17 @@ TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesReceivePacketStatsCorrectly) { stats.rtp_stats.packets_lost = 6; stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); - EXPECT_EQ(stats.rtp_stats.packet_counter.payload_bytes, - rtc::checked_cast( - receive_info.receivers[0].payload_bytes_received)); - EXPECT_EQ(stats.rtp_stats.packet_counter.packets, - rtc::checked_cast( - receive_info.receivers[0].packets_received)); + EXPECT_EQ( + stats.rtp_stats.packet_counter.payload_bytes, + checked_cast(receive_info.receivers[0].payload_bytes_received)); + EXPECT_EQ( + stats.rtp_stats.packet_counter.packets, + checked_cast(receive_info.receivers[0].packets_received)); EXPECT_EQ(stats.rtp_stats.packets_lost, receive_info.receivers[0].packets_lost); } @@ -6735,12 +6894,12 @@ TEST_F(WebRtcVideoChannelTest, GetStatsTranslatesReceivePacketStatsCorrectly) { TEST_F(WebRtcVideoChannelTest, TranslatesCallStatsCorrectly) { AddSendStream(); AddSendStream(); - webrtc::Call::Stats stats; + Call::Stats stats; stats.rtt_ms = 123; fake_call_->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6751,7 +6910,7 @@ TEST_F(WebRtcVideoChannelTest, TranslatesCallStatsCorrectly) { TEST_F(WebRtcVideoChannelTest, TranslatesSenderBitrateStatsCorrectly) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::VideoSendStream::Stats stats; + VideoSendStream::Stats stats; stats.target_media_bitrate_bps = 156; stats.media_bitrate_bps = 123; stats.substreams[17].total_bitrate_bps = 1; @@ -6761,7 +6920,7 @@ TEST_F(WebRtcVideoChannelTest, TranslatesSenderBitrateStatsCorrectly) { stream->SetStats(stats); FakeVideoSendStream* stream2 = AddSendStream(); - webrtc::VideoSendStream::Stats stats2; + VideoSendStream::Stats stats2; stats2.target_media_bitrate_bps = 200; stats2.media_bitrate_bps = 321; stats2.substreams[13].total_bitrate_bps = 5; @@ -6770,8 +6929,8 @@ TEST_F(WebRtcVideoChannelTest, TranslatesSenderBitrateStatsCorrectly) { stats2.substreams[21].retransmit_bitrate_bps = 8; stream2->SetStats(stats2); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6813,7 +6972,7 @@ TEST_F(WebRtcVideoChannelTest, DefaultReceiveStreamReconfiguresToUseRtx) { << "Default receive stream should not have configured RTX"; EXPECT_TRUE(receive_channel_->AddRecvStream( - cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs))); + webrtc::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs))); ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()) << "AddRecvStream should have reconfigured, not added a new receiver."; recv_stream = fake_call_->GetVideoReceiveStreams()[0]; @@ -6834,7 +6993,7 @@ TEST_F(WebRtcVideoChannelTest, RejectsAddingStreamsWithMissingSsrcsForRtx) { const std::vector rtx_ssrcs = MAKE_VECTOR(kRtxSsrcs1); StreamParams sp = - cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs); + webrtc::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs); sp.ssrcs = ssrcs; // Without RTXs, this is the important part. EXPECT_FALSE(send_channel_->AddSendStream(sp)); @@ -6848,13 +7007,13 @@ TEST_F(WebRtcVideoChannelTest, RejectsAddingStreamsWithOverlappingRtxSsrcs) { const std::vector rtx_ssrcs = MAKE_VECTOR(kRtxSsrcs1); StreamParams sp = - cricket::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs); + webrtc::CreateSimWithRtxStreamParams("cname", ssrcs, rtx_ssrcs); EXPECT_TRUE(send_channel_->AddSendStream(sp)); EXPECT_TRUE(receive_channel_->AddRecvStream(sp)); // The RTX SSRC is already used in previous streams, using it should fail. - sp = cricket::StreamParams::CreateLegacy(rtx_ssrcs[0]); + sp = StreamParams::CreateLegacy(rtx_ssrcs[0]); EXPECT_FALSE(send_channel_->AddSendStream(sp)); EXPECT_FALSE(receive_channel_->AddRecvStream(sp)); @@ -6873,14 +7032,14 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_TRUE(send_channel_->SetSenderParameters(send_parameters_)); StreamParams sp = - cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kFirstStreamSsrcs)); + webrtc::CreateSimStreamParams("cname", MAKE_VECTOR(kFirstStreamSsrcs)); EXPECT_TRUE(send_channel_->AddSendStream(sp)); EXPECT_TRUE(receive_channel_->AddRecvStream(sp)); // One of the SSRCs is already used in previous streams, using it should fail. - sp = cricket::CreateSimStreamParams("cname", - MAKE_VECTOR(kOverlappingStreamSsrcs)); + sp = webrtc::CreateSimStreamParams("cname", + MAKE_VECTOR(kOverlappingStreamSsrcs)); EXPECT_FALSE(send_channel_->AddSendStream(sp)); EXPECT_FALSE(receive_channel_->AddRecvStream(sp)); @@ -6898,7 +7057,7 @@ TEST_F(WebRtcVideoChannelTest, ReportsSsrcGroupsInStats) { static const uint32_t kSenderSsrcs[] = {4, 7, 10}; static const uint32_t kSenderRtxSsrcs[] = {5, 8, 11}; - StreamParams sender_sp = cricket::CreateSimWithRtxStreamParams( + StreamParams sender_sp = webrtc::CreateSimWithRtxStreamParams( "cname", MAKE_VECTOR(kSenderSsrcs), MAKE_VECTOR(kSenderRtxSsrcs)); EXPECT_TRUE(send_channel_->AddSendStream(sender_sp)); @@ -6906,12 +7065,12 @@ TEST_F(WebRtcVideoChannelTest, ReportsSsrcGroupsInStats) { static const uint32_t kReceiverSsrcs[] = {3}; static const uint32_t kReceiverRtxSsrcs[] = {2}; - StreamParams receiver_sp = cricket::CreateSimWithRtxStreamParams( + StreamParams receiver_sp = webrtc::CreateSimWithRtxStreamParams( "cname", MAKE_VECTOR(kReceiverSsrcs), MAKE_VECTOR(kReceiverRtxSsrcs)); EXPECT_TRUE(receive_channel_->AddRecvStream(receiver_sp)); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6925,12 +7084,12 @@ TEST_F(WebRtcVideoChannelTest, ReportsSsrcGroupsInStats) { TEST_F(WebRtcVideoChannelTest, MapsReceivedPayloadTypeToCodecName) { FakeVideoReceiveStream* stream = AddRecvStream(); - webrtc::VideoReceiveStreamInterface::Stats stats; + VideoReceiveStreamInterface::Stats stats; // Report no codec name before receiving. stream->SetStats(stats); - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; EXPECT_TRUE(send_channel_->GetStats(&send_info)); EXPECT_TRUE(receive_channel_->GetStats(&receive_info)); @@ -6958,7 +7117,7 @@ TEST_F(WebRtcVideoChannelTest, MapsReceivedPayloadTypeToCodecName) { // arrives to properly create a receive stream with a sync label. TEST_F(WebRtcVideoChannelTest, RecvUnsignaledSsrcWithSignaledStreamId) { const char kSyncLabel[] = "sync_label"; - cricket::StreamParams unsignaled_stream; + StreamParams unsignaled_stream; unsignaled_stream.set_stream_ids({kSyncLabel}); ASSERT_TRUE(receive_channel_->AddRecvStream(unsignaled_stream)); receive_channel_->OnDemuxerCriteriaUpdatePending(); @@ -7019,7 +7178,7 @@ TEST_F(WebRtcVideoChannelTest, receive_channel_->ResetUnsignaledRecvStream(); constexpr uint32_t kIncomingSignalledSsrc = kIncomingUnsignalledSsrc + 1; ASSERT_TRUE(receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(kIncomingSignalledSsrc))); + StreamParams::CreateLegacy(kIncomingSignalledSsrc))); // New receiver is for the signaled stream. const auto& receivers2 = fake_call_->GetVideoReceiveStreams(); @@ -7170,8 +7329,6 @@ TEST_F(WebRtcVideoChannelTest, } TEST_F(WebRtcVideoChannelTest, MultiplePendingDemuxerCriteriaUpdates) { - const uint32_t kSsrc = 1; - // Starting point: receiving kSsrc. EXPECT_TRUE( receive_channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrc))); @@ -7270,7 +7427,7 @@ TEST_F(WebRtcVideoChannelTest, UnsignalledSsrcHasACooldown) { } time_controller_.AdvanceTime( - webrtc::TimeDelta::Millis(kUnsignalledReceiveStreamCooldownMs - 1)); + TimeDelta::Millis(kUnsignalledReceiveStreamCooldownMs - 1)); // We now have an unsignalled receive stream for kSsrc1. EXPECT_EQ(fake_call_->GetVideoReceiveStreams().size(), 1u); @@ -7293,7 +7450,7 @@ TEST_F(WebRtcVideoChannelTest, UnsignalledSsrcHasACooldown) { // After 500 ms, kSsrc2 should trigger a new unsignalled receive stream that // replaces the old one. - time_controller_.AdvanceTime(webrtc::TimeDelta::Millis(1)); + time_controller_.AdvanceTime(TimeDelta::Millis(1)); { // Receive a packet for kSsrc2. RtpPacketReceived packet; @@ -7330,7 +7487,7 @@ TEST_F(WebRtcVideoChannelTest, BaseMinimumPlayoutDelayMs) { // Test BaseMinimumPlayoutDelayMs on unsignaled receive streams. TEST_F(WebRtcVideoChannelTest, BaseMinimumPlayoutDelayMsUnsignaledRecvStream) { - absl::optional delay_ms; + std::optional delay_ms; const FakeVideoReceiveStream* recv_stream; // Set default stream with SSRC 0 @@ -7364,11 +7521,12 @@ void WebRtcVideoChannelTest::TestReceiveUnsignaledSsrcPacket( uint8_t payload_type, bool expect_created_receive_stream) { // kRedRtxPayloadType must currently be unused. - EXPECT_FALSE(FindCodecById(engine_.recv_codecs(), kRedRtxPayloadType)); + EXPECT_FALSE( + webrtc::FindCodecById(engine_.LegacyRecvCodecs(), kRedRtxPayloadType)); // Add a RED RTX codec. - VideoCodec red_rtx_codec = cricket::CreateVideoRtxCodec( - kRedRtxPayloadType, GetEngineCodec("red").id); + Codec red_rtx_codec = + webrtc::CreateVideoRtxCodec(kRedRtxPayloadType, GetEngineCodec("red").id); recv_parameters_.codecs.push_back(red_rtx_codec); EXPECT_TRUE(receive_channel_->SetReceiverParameters(recv_parameters_)); @@ -7411,12 +7569,12 @@ TEST_F(WebRtcVideoChannelTest, Vp9PacketCreatesUnsignalledStream) { true /* expect_created_receive_stream */); } -TEST_F(WebRtcVideoChannelTest, RtxPacketCreateUnsignalledStream) { +TEST_F(WebRtcVideoChannelTest, RtxPacketDoesntCreateUnsignalledStream) { AssignDefaultAptRtxTypes(); - const cricket::VideoCodec vp8 = GetEngineCodec("VP8"); + const Codec vp8 = GetEngineCodec("VP8"); const int rtx_vp8_payload_type = default_apt_rtx_types_[vp8.id]; TestReceiveUnsignaledSsrcPacket(rtx_vp8_payload_type, - true /* expect_created_receive_stream */); + false /* expect_created_receive_stream */); } TEST_F(WebRtcVideoChannelTest, UlpfecPacketDoesntCreateUnsignalledStream) { @@ -7437,7 +7595,7 @@ TEST_F(WebRtcVideoChannelTest, RedRtxPacketDoesntCreateUnsignalledStream) { TEST_F(WebRtcVideoChannelTest, RtxAfterMediaPacketUpdatesUnsignalledRtxSsrc) { AssignDefaultAptRtxTypes(); - const cricket::VideoCodec vp8 = GetEngineCodec("VP8"); + const Codec vp8 = GetEngineCodec("VP8"); const int payload_type = vp8.id; const int rtx_vp8_payload_type = default_apt_rtx_types_[vp8.id]; const uint32_t ssrc = kIncomingUnsignalledSsrc; @@ -7470,24 +7628,23 @@ TEST_F(WebRtcVideoChannelTest, RtxAfterMediaPacketUpdatesUnsignalledRtxSsrc) { EXPECT_EQ(fake_call_->GetDeliveredPacketsForSsrc(rtx_ssrc), 1u); } -TEST_F(WebRtcVideoChannelTest, - MediaPacketAfterRtxImmediatelyRecreatesUnsignalledStream) { +TEST_F(WebRtcVideoChannelTest, UnsignaledStreamCreatedAfterMediaPacket) { AssignDefaultAptRtxTypes(); - const cricket::VideoCodec vp8 = GetEngineCodec("VP8"); + const Codec vp8 = GetEngineCodec("VP8"); const int payload_type = vp8.id; const int rtx_vp8_payload_type = default_apt_rtx_types_[vp8.id]; const uint32_t ssrc = kIncomingUnsignalledSsrc; const uint32_t rtx_ssrc = ssrc + 1; - // Send rtx packet. + // Receive rtx packet. RtpPacketReceived rtx_packet; rtx_packet.SetPayloadType(rtx_vp8_payload_type); rtx_packet.SetSsrc(rtx_ssrc); receive_channel_->OnPacketReceived(rtx_packet); time_controller_.AdvanceTime(TimeDelta::Zero()); - EXPECT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()); + EXPECT_EQ(0u, fake_call_->GetVideoReceiveStreams().size()); - // Send media packet. + // Receive media packet. RtpPacketReceived packet; packet.SetPayloadType(payload_type); packet.SetSsrc(ssrc); @@ -7506,19 +7663,19 @@ TEST_F(WebRtcVideoChannelTest, // Any different unsignalled SSRC received will replace the default. TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) { // Allow receiving VP8, VP9, H264 (if enabled). - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); #if defined(WEBRTC_USE_H264) - cricket::VideoCodec H264codec = cricket::CreateVideoCodec(126, "H264"); + webrtc::Codec H264codec = webrtc::CreateVideoCodec(126, "H264"); parameters.codecs.push_back(H264codec); #endif EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); // No receive streams yet. ASSERT_EQ(0u, fake_call_->GetVideoReceiveStreams().size()); - cricket::FakeVideoRenderer renderer; + FakeVideoRenderer renderer; receive_channel_->SetDefaultSink(&renderer); // Receive VP8 packet on first SSRC. @@ -7531,10 +7688,10 @@ TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) { FakeVideoReceiveStream* recv_stream = fake_call_->GetVideoReceiveStreams()[0]; EXPECT_EQ(rtp_packet.Ssrc(), recv_stream->GetConfig().rtp.remote_ssrc); // Verify that the receive stream sinks to a renderer. - webrtc::VideoFrame video_frame = - webrtc::VideoFrame::Builder() + VideoFrame video_frame = + VideoFrame::Builder() .set_video_frame_buffer(CreateBlackFrameBuffer(4, 4)) - .set_timestamp_rtp(100) + .set_rtp_timestamp(100) .set_timestamp_us(0) .set_rotation(webrtc::kVideoRotation_0) .build(); @@ -7550,10 +7707,10 @@ TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) { recv_stream = fake_call_->GetVideoReceiveStreams()[0]; EXPECT_EQ(rtp_packet.Ssrc(), recv_stream->GetConfig().rtp.remote_ssrc); // Verify that the receive stream sinks to a renderer. - webrtc::VideoFrame video_frame2 = - webrtc::VideoFrame::Builder() + VideoFrame video_frame2 = + VideoFrame::Builder() .set_video_frame_buffer(CreateBlackFrameBuffer(4, 4)) - .set_timestamp_rtp(200) + .set_rtp_timestamp(200) .set_timestamp_us(0) .set_rotation(webrtc::kVideoRotation_0) .build(); @@ -7573,7 +7730,7 @@ TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) { webrtc::VideoFrame video_frame3 = webrtc::VideoFrame::Builder() .set_video_frame_buffer(CreateBlackFrameBuffer(4, 4)) - .set_timestamp_rtp(300) + .set_rtp_timestamp(300) .set_timestamp_us(0) .set_rotation(webrtc::kVideoRotation_0) .build(); @@ -7587,7 +7744,7 @@ TEST_F(WebRtcVideoChannelTest, ReceiveDifferentUnsignaledSsrc) { // been the default receive stream before being properly signaled. TEST_F(WebRtcVideoChannelTest, NewUnsignaledStreamDoesNotDestroyPreviouslyUnsignaledStream) { - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); ASSERT_TRUE(receive_channel_->SetReceiverParameters(parameters)); @@ -7607,8 +7764,8 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(kSsrcs3[0], recv_stream0->GetConfig().rtp.remote_ssrc); // Signal the SSRC. - EXPECT_TRUE(receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(kSsrcs3[0]))); + EXPECT_TRUE( + receive_channel_->AddRecvStream(StreamParams::CreateLegacy(kSsrcs3[0]))); ASSERT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()); recv_stream0 = fake_call_->GetVideoReceiveStreams()[0]; EXPECT_EQ(kSsrcs3[0], recv_stream0->GetConfig().rtp.remote_ssrc); @@ -7627,7 +7784,7 @@ TEST_F(WebRtcVideoChannelTest, TEST_F(WebRtcVideoChannelTest, CanSetMaxBitrateForExistingStream) { AddSendStream(); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); EXPECT_TRUE(send_channel_->SetSend(true)); @@ -7653,11 +7810,11 @@ TEST_F(WebRtcVideoChannelTest, CanSetMaxBitrateForExistingStream) { } TEST_F(WebRtcVideoChannelTest, CannotSetMaxBitrateForNonexistentStream) { - webrtc::RtpParameters nonexistent_parameters = + RtpParameters nonexistent_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(0u, nonexistent_parameters.encodings.size()); - nonexistent_parameters.encodings.push_back(webrtc::RtpEncodingParameters()); + nonexistent_parameters.encodings.push_back(RtpEncodingParameters()); EXPECT_FALSE( send_channel_->SetRtpSendParameters(last_ssrc_, nonexistent_parameters) .ok()); @@ -7667,8 +7824,7 @@ TEST_F(WebRtcVideoChannelTest, SetLowMaxBitrateOverwritesVideoStreamMinBitrate) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_FALSE(parameters.encodings[0].max_bitrate_bps.has_value()); EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); @@ -7705,8 +7861,7 @@ TEST_F(WebRtcVideoChannelTest, int high_min_bitrate_bps = stream->GetVideoStreams()[0].max_bitrate_bps + 1; // Set a high min bitrate and check that max_bitrate_bps is adjusted up. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); parameters.encodings[0].min_bitrate_bps = high_min_bitrate_bps; EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); @@ -7730,8 +7885,7 @@ TEST_F(WebRtcVideoChannelTest, // Set min bitrate above global max bitrate and check that min_bitrate_bps is // adjusted down. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); parameters.encodings[0].min_bitrate_bps = 99999 + 1; EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); @@ -7745,8 +7899,7 @@ TEST_F(WebRtcVideoChannelTest, TEST_F(WebRtcVideoChannelTest, SetMaxFramerateOneStream) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_FALSE(parameters.encodings[0].max_framerate.has_value()); EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); @@ -7772,8 +7925,7 @@ TEST_F(WebRtcVideoChannelTest, SetMaxFramerateOneStream) { TEST_F(WebRtcVideoChannelTest, SetNumTemporalLayersForSingleStream) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_FALSE(parameters.encodings[0].num_temporal_layers.has_value()); EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); @@ -7797,10 +7949,9 @@ TEST_F(WebRtcVideoChannelTest, SetNumTemporalLayersForSingleStream) { TEST_F(WebRtcVideoChannelTest, CannotSetRtpSendParametersWithIncorrectNumberOfEncodings) { AddSendStream(); - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); // Two or more encodings should result in failure. - parameters.encodings.push_back(webrtc::RtpEncodingParameters()); + parameters.encodings.push_back(RtpEncodingParameters()); EXPECT_FALSE( send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); // Zero encodings should also fail. @@ -7812,14 +7963,13 @@ TEST_F(WebRtcVideoChannelTest, TEST_F(WebRtcVideoChannelTest, CannotSetSimulcastRtpSendParametersWithIncorrectNumberOfEncodings) { std::vector ssrcs = MAKE_VECTOR(kSsrcs3); - StreamParams sp = CreateSimStreamParams("cname", ssrcs); + StreamParams sp = webrtc::CreateSimStreamParams("cname", ssrcs); AddSendStream(sp); - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); // Additional encodings should result in failure. - parameters.encodings.push_back(webrtc::RtpEncodingParameters()); + parameters.encodings.push_back(RtpEncodingParameters()); EXPECT_FALSE( send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); // Zero encodings should also fail. @@ -7831,8 +7981,7 @@ TEST_F(WebRtcVideoChannelTest, // Changing the SSRC through RtpParameters is not allowed. TEST_F(WebRtcVideoChannelTest, CannotSetSsrcInRtpSendParameters) { AddSendStream(); - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); parameters.encodings[0].ssrc = 0xdeadbeef; EXPECT_FALSE( send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); @@ -7842,8 +7991,7 @@ TEST_F(WebRtcVideoChannelTest, CannotSetSsrcInRtpSendParameters) { // a value <= 0, setting the parameters returns false. TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersInvalidBitratePriority) { AddSendStream(); - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_EQ(webrtc::kDefaultBitratePriority, parameters.encodings[0].bitrate_priority); @@ -7860,8 +8008,7 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersInvalidBitratePriority) { // properly on the VideoChannel and propogates down to the video encoder. TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersPriorityOneStream) { AddSendStream(); - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_EQ(webrtc::kDefaultBitratePriority, parameters.encodings[0].bitrate_priority); @@ -7889,7 +8036,7 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersPriorityOneStream) { // Check that the vector of VideoStreams also was propagated correctly. Note // that this is testing the behavior of the FakeVideoSendStream, which mimics // the calls to CreateEncoderStreams to get the VideoStreams. - EXPECT_EQ(absl::optional(new_bitrate_priority), + EXPECT_EQ(std::optional(new_bitrate_priority), video_send_stream->GetVideoStreams()[0].bitrate_priority); } @@ -7898,27 +8045,25 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersPriorityOneStream) { // streams. TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersPrioritySimulcastStreams) { // Create the stream params with multiple ssrcs for simulcast. - const size_t kNumSimulcastStreams = 3; std::vector ssrcs = MAKE_VECTOR(kSsrcs3); - StreamParams stream_params = CreateSimStreamParams("cname", ssrcs); + StreamParams stream_params = webrtc::CreateSimStreamParams("cname", ssrcs); AddSendStream(stream_params); uint32_t primary_ssrc = stream_params.first_ssrc(); // Using the FrameForwarder, we manually send a full size // frame. This creates multiple VideoStreams for all simulcast layers when // reconfiguring, and allows us to test this behavior. - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; EXPECT_TRUE( send_channel_->SetVideoSend(primary_ssrc, &options, &frame_forwarder)); send_channel_->SetSend(true); - frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame( - 1920, 1080, webrtc::VideoRotation::kVideoRotation_0, - rtc::kNumMicrosecsPerSec / 30)); + frame_forwarder.IncomingCapturedFrame( + frame_source_.GetFrame(1920, 1080, VideoRotation::kVideoRotation_0, + webrtc::kNumMicrosecsPerSec / 30)); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(primary_ssrc); + RtpParameters parameters = send_channel_->GetRtpSendParameters(primary_ssrc); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_EQ(webrtc::kDefaultBitratePriority, parameters.encodings[0].bitrate_priority); @@ -7948,142 +8093,44 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersPrioritySimulcastStreams) { // FakeVideoSendStream calls CreateEncoderStreams, and we are testing that // these are created appropriately for the simulcast case. EXPECT_EQ(kNumSimulcastStreams, video_send_stream->GetVideoStreams().size()); - EXPECT_EQ(absl::optional(new_bitrate_priority), + EXPECT_EQ(std::optional(new_bitrate_priority), video_send_stream->GetVideoStreams()[0].bitrate_priority); // Since we are only setting bitrate priority per-sender, the other // VideoStreams should have a bitrate priority of 0. - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, video_send_stream->GetVideoStreams()[1].bitrate_priority); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, video_send_stream->GetVideoStreams()[2].bitrate_priority); EXPECT_TRUE(send_channel_->SetVideoSend(primary_ssrc, nullptr, nullptr)); } -TEST_F(WebRtcVideoChannelTest, - GetAndSetRtpSendParametersScaleResolutionDownByVP8) { - VideoSenderParameters parameters; - parameters.codecs.push_back(cricket::CreateVideoCodec(kVp8CodecName)); - ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); - FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); - - webrtc::test::FrameForwarder frame_forwarder; - FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); - - VideoOptions options; - EXPECT_TRUE( - send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - send_channel_->SetSend(true); - - // Try layers in natural order (smallest to largest). - { - auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); - ASSERT_EQ(3u, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].scale_resolution_down_by = 4.0; - rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; - rtp_parameters.encodings[2].scale_resolution_down_by = 1.0; - auto result = - send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); - ASSERT_TRUE(result.ok()); - - frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - - std::vector video_streams = stream->GetVideoStreams(); - ASSERT_EQ(3u, video_streams.size()); - EXPECT_EQ(320u, video_streams[0].width); - EXPECT_EQ(180u, video_streams[0].height); - EXPECT_EQ(640u, video_streams[1].width); - EXPECT_EQ(360u, video_streams[1].height); - EXPECT_EQ(1280u, video_streams[2].width); - EXPECT_EQ(720u, video_streams[2].height); - } - - // Try layers in reverse natural order (largest to smallest). - { - auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); - ASSERT_EQ(3u, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; - rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; - rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = - send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); - ASSERT_TRUE(result.ok()); - - frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - - std::vector video_streams = stream->GetVideoStreams(); - ASSERT_EQ(3u, video_streams.size()); - EXPECT_EQ(1280u, video_streams[0].width); - EXPECT_EQ(720u, video_streams[0].height); - EXPECT_EQ(640u, video_streams[1].width); - EXPECT_EQ(360u, video_streams[1].height); - EXPECT_EQ(320u, video_streams[2].width); - EXPECT_EQ(180u, video_streams[2].height); - } - - // Try layers in mixed order. - { - auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); - ASSERT_EQ(3u, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].scale_resolution_down_by = 10.0; - rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; - rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = - send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); - ASSERT_TRUE(result.ok()); - - frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - - std::vector video_streams = stream->GetVideoStreams(); - ASSERT_EQ(3u, video_streams.size()); - EXPECT_EQ(128u, video_streams[0].width); - EXPECT_EQ(72u, video_streams[0].height); - EXPECT_EQ(640u, video_streams[1].width); - EXPECT_EQ(360u, video_streams[1].height); - EXPECT_EQ(320u, video_streams[2].width); - EXPECT_EQ(180u, video_streams[2].height); - } - - // Try with a missing scale setting, defaults to 1.0 if any other is set. - { - auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); - ASSERT_EQ(3u, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; - rtp_parameters.encodings[1].scale_resolution_down_by.reset(); - rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = - send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); - ASSERT_TRUE(result.ok()); - - frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - - std::vector video_streams = stream->GetVideoStreams(); - ASSERT_EQ(3u, video_streams.size()); - EXPECT_EQ(1280u, video_streams[0].width); - EXPECT_EQ(720u, video_streams[0].height); - EXPECT_EQ(1280u, video_streams[1].width); - EXPECT_EQ(720u, video_streams[1].height); - EXPECT_EQ(320u, video_streams[2].width); - EXPECT_EQ(180u, video_streams[2].height); - } - - EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); -} +struct ScaleResolutionDownByTestParameters { + std::string field_trials; + Resolution resolution; + std::vector> scale_resolution_down_by; + std::vector expected_resolutions; +}; -TEST_F(WebRtcVideoChannelTest, - GetAndSetRtpSendParametersScaleResolutionDownByVP8WithOddResolution) { - // Ensure that the top layer has width and height divisible by 2^3, - // so that the bottom layer has width and height divisible by 2. - // TODO(bugs.webrtc.org/8785): Remove this field trial when we fully trust - // the number of simulcast layers set by the app. - webrtc::test::ScopedKeyValueConfig field_trial( - field_trials_, "WebRTC-NormalizeSimulcastResolution/Enabled-3/"); - - // Set up WebRtcVideoChannel for 3-layer VP8 simulcast. +class WebRtcVideoChannelScaleResolutionDownByTest + : public WebRtcVideoChannelTest, + public ::testing::WithParamInterface< + std::tuple> {}; + +TEST_P(WebRtcVideoChannelScaleResolutionDownByTest, ScaleResolutionDownBy) { + ScaleResolutionDownByTestParameters test_params = std::get<0>(GetParam()); + std::string codec_name = std::get<1>(GetParam()); + ScopedKeyValueConfig field_trial(field_trials_, test_params.field_trials); + // Set up WebRtcVideoChannel for 3-layer simulcast. + encoder_factory_->AddSupportedVideoCodecType(codec_name); VideoSenderParameters parameters; - parameters.codecs.push_back(cricket::CreateVideoCodec(kVp8CodecName)); + Codec codec = webrtc::CreateVideoCodec(codec_name); + // Codec ID does not matter, but must be valid. + codec.id = 123; + parameters.codecs.push_back(codec); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, /*options=*/nullptr, &frame_forwarder)); send_channel_->SetSend(true); @@ -8091,205 +8138,97 @@ TEST_F(WebRtcVideoChannelTest, // Set `scale_resolution_down_by`'s. auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(rtp_parameters.encodings.size(), 3u); - rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; - rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; - rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; + rtp_parameters.encodings[0].scale_resolution_down_by = + test_params.scale_resolution_down_by[0]; + rtp_parameters.encodings[1].scale_resolution_down_by = + test_params.scale_resolution_down_by[1]; + rtp_parameters.encodings[2].scale_resolution_down_by = + test_params.scale_resolution_down_by[2]; const auto result = send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); ASSERT_TRUE(result.ok()); // Use a capture resolution whose width and height are not divisible by 2^3. // (See field trial set at the top of the test.) - FakeFrameSource frame_source(2007, 1207, rtc::kNumMicrosecsPerSec / 30); + FakeFrameSource frame_source(test_params.resolution.width, + test_params.resolution.height, + webrtc::kNumMicrosecsPerSec / 30); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); // Ensure the scaling is correct. - const auto video_streams = stream->GetVideoStreams(); - ASSERT_EQ(video_streams.size(), 3u); - // Ensure that we round the capture resolution down for the top layer... - EXPECT_EQ(video_streams[0].width, 2000u); - EXPECT_EQ(video_streams[0].height, 1200u); - EXPECT_EQ(video_streams[1].width, 1000u); - EXPECT_EQ(video_streams[1].height, 600u); - // ...and that the bottom layer has a width/height divisible by 2. - EXPECT_EQ(video_streams[2].width, 500u); - EXPECT_EQ(video_streams[2].height, 300u); + const auto streams = stream->GetVideoStreams(); + ASSERT_EQ(streams.size(), 3u); + EXPECT_EQ(static_cast(streams[0].width), + test_params.expected_resolutions[0].width); + EXPECT_EQ(static_cast(streams[0].height), + test_params.expected_resolutions[0].height); + EXPECT_EQ(static_cast(streams[1].width), + test_params.expected_resolutions[1].width); + EXPECT_EQ(static_cast(streams[1].height), + test_params.expected_resolutions[1].height); + EXPECT_EQ(static_cast(streams[2].width), + test_params.expected_resolutions[2].width); + EXPECT_EQ(static_cast(streams[2].height), + test_params.expected_resolutions[2].height); // Tear down. EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } -TEST_F(WebRtcVideoChannelTest, - GetAndSetRtpSendParametersScaleResolutionDownByH264) { - encoder_factory_->AddSupportedVideoCodecType(kH264CodecName); - VideoSenderParameters parameters; - parameters.codecs.push_back(cricket::CreateVideoCodec(kH264CodecName)); - ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); - FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); - - webrtc::test::FrameForwarder frame_forwarder; - FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); +INSTANTIATE_TEST_SUITE_P( + All, + WebRtcVideoChannelScaleResolutionDownByTest, + Combine(Values( + // Try layers in natural order (smallest to largest). + ScaleResolutionDownByTestParameters{ + .resolution = {.width = 1280, .height = 720}, + .scale_resolution_down_by = {4, 2, 1}, + .expected_resolutions = {{.width = 320, .height = 180}, + {.width = 640, .height = 360}, + {.width = 1280, .height = 720}}}, + // Try layers in reverse natural order (largest to smallest). + ScaleResolutionDownByTestParameters{ + .resolution = {.width = 1280, .height = 720}, + .scale_resolution_down_by = {1, 2, 4}, + .expected_resolutions = {{.width = 1280, .height = 720}, + {.width = 640, .height = 360}, + {.width = 320, .height = 180}}}, + // Try layers in mixed order. + ScaleResolutionDownByTestParameters{ + .resolution = {.width = 1280, .height = 720}, + .scale_resolution_down_by = {10, 2, 4}, + .expected_resolutions = {{.width = 128, .height = 72}, + {.width = 640, .height = 360}, + {.width = 320, .height = 180}}}, + // Try with a missing scale setting, defaults to 1.0 if any + // other is set. + ScaleResolutionDownByTestParameters{ + .resolution = {.width = 1280, .height = 720}, + .scale_resolution_down_by = {1, std::nullopt, 4}, + .expected_resolutions = {{.width = 1280, .height = 720}, + {.width = 1280, .height = 720}, + {.width = 320, .height = 180}}}, + // Odd resolution. Request alignment by 8 to get the resolution + // of the smallest layer multiple by 2. + ScaleResolutionDownByTestParameters{ + .field_trials = + "WebRTC-NormalizeSimulcastResolution/Enabled-3/", + .resolution = {.width = 2007, .height = 1207}, + .scale_resolution_down_by = {1, 2, 4}, + .expected_resolutions = {{.width = 2000, .height = 1200}, + {.width = 1000, .height = 600}, + {.width = 500, .height = 300}}}), + Values(kVp8CodecName, kH264CodecName))); - VideoOptions options; - EXPECT_TRUE( - send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); - send_channel_->SetSend(true); +TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMaxFramerate) { + SetUpSimulcast(true, /*with_rtx=*/false); - // Try layers in natural order (smallest to largest). - { - auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); - ASSERT_EQ(3u, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].scale_resolution_down_by = 4.0; - rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; - rtp_parameters.encodings[2].scale_resolution_down_by = 1.0; - auto result = - send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); - ASSERT_TRUE(result.ok()); - - frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - - std::vector video_streams = stream->GetVideoStreams(); - ASSERT_EQ(3u, video_streams.size()); - EXPECT_EQ(320u, video_streams[0].width); - EXPECT_EQ(180u, video_streams[0].height); - EXPECT_EQ(640u, video_streams[1].width); - EXPECT_EQ(360u, video_streams[1].height); - EXPECT_EQ(1280u, video_streams[2].width); - EXPECT_EQ(720u, video_streams[2].height); - } - - // Try layers in reverse natural order (largest to smallest). - { - auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); - ASSERT_EQ(3u, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; - rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; - rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = - send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); - ASSERT_TRUE(result.ok()); - - frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - - std::vector video_streams = stream->GetVideoStreams(); - ASSERT_EQ(3u, video_streams.size()); - EXPECT_EQ(1280u, video_streams[0].width); - EXPECT_EQ(720u, video_streams[0].height); - EXPECT_EQ(640u, video_streams[1].width); - EXPECT_EQ(360u, video_streams[1].height); - EXPECT_EQ(320u, video_streams[2].width); - EXPECT_EQ(180u, video_streams[2].height); - } - - // Try layers in mixed order. - { - auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); - ASSERT_EQ(3u, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].scale_resolution_down_by = 10.0; - rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; - rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = - send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); - ASSERT_TRUE(result.ok()); - - frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - - std::vector video_streams = stream->GetVideoStreams(); - ASSERT_EQ(3u, video_streams.size()); - EXPECT_EQ(128u, video_streams[0].width); - EXPECT_EQ(72u, video_streams[0].height); - EXPECT_EQ(640u, video_streams[1].width); - EXPECT_EQ(360u, video_streams[1].height); - EXPECT_EQ(320u, video_streams[2].width); - EXPECT_EQ(180u, video_streams[2].height); - } - - // Try with a missing scale setting, defaults to 1.0 if any other is set. - { - auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); - ASSERT_EQ(3u, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; - rtp_parameters.encodings[1].scale_resolution_down_by.reset(); - rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - auto result = - send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); - ASSERT_TRUE(result.ok()); - - frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - - std::vector video_streams = stream->GetVideoStreams(); - ASSERT_EQ(3u, video_streams.size()); - EXPECT_EQ(1280u, video_streams[0].width); - EXPECT_EQ(720u, video_streams[0].height); - EXPECT_EQ(1280u, video_streams[1].width); - EXPECT_EQ(720u, video_streams[1].height); - EXPECT_EQ(320u, video_streams[2].width); - EXPECT_EQ(180u, video_streams[2].height); - } - EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); -} - -TEST_F(WebRtcVideoChannelTest, - GetAndSetRtpSendParametersScaleResolutionDownByH264WithOddResolution) { - // Ensure that the top layer has width and height divisible by 2^3, - // so that the bottom layer has width and height divisible by 2. - // TODO(bugs.webrtc.org/8785): Remove this field trial when we fully trust - // the number of simulcast layers set by the app. - webrtc::test::ScopedKeyValueConfig field_trial( - field_trials_, "WebRTC-NormalizeSimulcastResolution/Enabled-3/"); - - // Set up WebRtcVideoChannel for 3-layer H264 simulcast. - encoder_factory_->AddSupportedVideoCodecType(kH264CodecName); - VideoSenderParameters parameters; - parameters.codecs.push_back(cricket::CreateVideoCodec(kH264CodecName)); - ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); - FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); - webrtc::test::FrameForwarder frame_forwarder; - EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, /*options=*/nullptr, - &frame_forwarder)); - send_channel_->SetSend(true); - - // Set `scale_resolution_down_by`'s. - auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); - ASSERT_EQ(rtp_parameters.encodings.size(), 3u); - rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; - rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; - rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; - const auto result = - send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); - ASSERT_TRUE(result.ok()); - - // Use a capture resolution whose width and height are not divisible by 2^3. - // (See field trial set at the top of the test.) - FakeFrameSource frame_source(2007, 1207, rtc::kNumMicrosecsPerSec / 30); - frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); - - // Ensure the scaling is correct. - const auto video_streams = stream->GetVideoStreams(); - ASSERT_EQ(video_streams.size(), 3u); - // Ensure that we round the capture resolution down for the top layer... - EXPECT_EQ(video_streams[0].width, 2000u); - EXPECT_EQ(video_streams[0].height, 1200u); - EXPECT_EQ(video_streams[1].width, 1000u); - EXPECT_EQ(video_streams[1].height, 600u); - // ...and that the bottom layer has a width/height divisible by 2. - EXPECT_EQ(video_streams[2].width, 500u); - EXPECT_EQ(video_streams[2].height, 300u); - - // Tear down. - EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); -} - -TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMaxFramerate) { - SetUpSimulcast(true, /*with_rtx=*/false); - - // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); - EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); - for (const auto& encoding : parameters.encodings) { - EXPECT_FALSE(encoding.max_framerate); - } + // Get and set the rtp encoding parameters. + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); + EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); + for (const auto& encoding : parameters.encodings) { + EXPECT_FALSE(encoding.max_framerate); + } // Change the value and set it on the VideoChannel. parameters.encodings[0].max_framerate = 10; @@ -8310,16 +8249,15 @@ TEST_F(WebRtcVideoChannelTest, SetUpSimulcast(true, /*with_rtx=*/false); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); // Num temporal layers should be in the range [1, kMaxTemporalStreams]. parameters.encodings[0].num_temporal_layers = 0; - EXPECT_EQ(webrtc::RTCErrorType::INVALID_RANGE, + EXPECT_EQ(RTCErrorType::INVALID_RANGE, send_channel_->SetRtpSendParameters(last_ssrc_, parameters).type()); parameters.encodings[0].num_temporal_layers = webrtc::kMaxTemporalStreams + 1; - EXPECT_EQ(webrtc::RTCErrorType::INVALID_RANGE, + EXPECT_EQ(RTCErrorType::INVALID_RANGE, send_channel_->SetRtpSendParameters(last_ssrc_, parameters).type()); } @@ -8327,8 +8265,7 @@ TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersNumTemporalLayers) { SetUpSimulcast(true, /*with_rtx=*/false); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); for (const auto& encoding : parameters.encodings) EXPECT_FALSE(encoding.num_temporal_layers); @@ -8351,7 +8288,7 @@ TEST_F(WebRtcVideoChannelTest, NumTemporalLayersPropagatedToEncoder) { FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); // Send a full size frame so all simulcast layers are used when reconfiguring. - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); @@ -8360,8 +8297,7 @@ TEST_F(WebRtcVideoChannelTest, NumTemporalLayersPropagatedToEncoder) { // Get and set the rtp encoding parameters. // Change the value and set it on the VideoChannel. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].num_temporal_layers = 3; parameters.encodings[1].num_temporal_layers = 2; @@ -8371,7 +8307,7 @@ TEST_F(WebRtcVideoChannelTest, NumTemporalLayersPropagatedToEncoder) { // Verify that the new value is propagated down to the encoder. // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. EXPECT_EQ(2, stream->num_encoder_reconfigurations()); - webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); + VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); EXPECT_EQ(kNumSimulcastStreams, encoder_config.number_of_streams); EXPECT_EQ(kNumSimulcastStreams, encoder_config.simulcast_layers.size()); EXPECT_EQ(3UL, encoder_config.simulcast_layers[0].num_temporal_layers); @@ -8398,7 +8334,7 @@ TEST_F(WebRtcVideoChannelTest, FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); // Send a full size frame so all simulcast layers are used when reconfiguring. - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); @@ -8406,15 +8342,14 @@ TEST_F(WebRtcVideoChannelTest, frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Change rtp encoding parameters. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].num_temporal_layers = 2; parameters.encodings[2].num_temporal_layers = 1; EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); // Verify that no value is propagated down to the encoder. - webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); + VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); EXPECT_EQ(kNumSimulcastStreams, encoder_config.number_of_streams); EXPECT_EQ(kNumSimulcastStreams, encoder_config.simulcast_layers.size()); EXPECT_EQ(2UL, encoder_config.simulcast_layers[0].num_temporal_layers); @@ -8434,11 +8369,11 @@ TEST_F(WebRtcVideoChannelTest, TEST_F(WebRtcVideoChannelTest, DefaultValuePropagatedToEncoderForUnsetFramerate) { - const std::vector kDefault = GetSimulcastBitrates720p(); + const std::vector kDefault = GetSimulcastBitrates720p(); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); // Send a full size frame so all simulcast layers are used when reconfiguring. - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); @@ -8447,8 +8382,7 @@ TEST_F(WebRtcVideoChannelTest, // Get and set the rtp encoding parameters. // Change the value and set it on the VideoChannel. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].max_framerate = 15; parameters.encodings[2].max_framerate = 20; @@ -8456,7 +8390,7 @@ TEST_F(WebRtcVideoChannelTest, // Verify that the new value propagated down to the encoder. // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. - webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); + VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); EXPECT_EQ(kNumSimulcastStreams, encoder_config.number_of_streams); EXPECT_EQ(kNumSimulcastStreams, encoder_config.simulcast_layers.size()); EXPECT_EQ(15, encoder_config.simulcast_layers[0].max_framerate); @@ -8480,13 +8414,13 @@ TEST_F(WebRtcVideoChannelTest, FallbackForUnsetOrUnsupportedScalabilityMode) { kSupportedModes = {ScalabilityMode::kL1T1, ScalabilityMode::kL1T2, ScalabilityMode::kL1T3}; - encoder_factory_->AddSupportedVideoCodec(webrtc::SdpVideoFormat( - "VP8", webrtc::SdpVideoFormat::Parameters(), kSupportedModes)); + encoder_factory_->AddSupportedVideoCodec( + SdpVideoFormat("VP8", CodecParameterMap(), kSupportedModes)); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); // Send a full size frame so all simulcast layers are used when reconfiguring. - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); @@ -8494,49 +8428,116 @@ TEST_F(WebRtcVideoChannelTest, FallbackForUnsetOrUnsupportedScalabilityMode) { frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Set scalability mode. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); - parameters.encodings[0].scalability_mode = absl::nullopt; + parameters.encodings[0].scalability_mode = std::nullopt; parameters.encodings[1].scalability_mode = "L1T3"; // Supported. parameters.encodings[2].scalability_mode = "L3T3"; // Unsupported. EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); // Verify that the new value is propagated down to the encoder. // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. - const absl::optional kDefaultScalabilityMode = + const std::optional kDefaultScalabilityMode = webrtc::ScalabilityModeFromString(kDefaultScalabilityModeStr); EXPECT_EQ(2, stream->num_encoder_reconfigurations()); - webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); + VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); EXPECT_EQ(kNumSimulcastStreams, encoder_config.number_of_streams); - EXPECT_THAT(encoder_config.simulcast_layers, - ElementsAre(Field(&webrtc::VideoStream::scalability_mode, - kDefaultScalabilityMode), - Field(&webrtc::VideoStream::scalability_mode, - ScalabilityMode::kL1T3), - Field(&webrtc::VideoStream::scalability_mode, - kDefaultScalabilityMode))); + EXPECT_THAT( + encoder_config.simulcast_layers, + ElementsAre( + Field(&VideoStream::scalability_mode, kDefaultScalabilityMode), + Field(&VideoStream::scalability_mode, ScalabilityMode::kL1T3), + Field(&VideoStream::scalability_mode, kDefaultScalabilityMode))); // FakeVideoSendStream calls CreateEncoderStreams, test that the vector of // VideoStreams are created appropriately for the simulcast case. - EXPECT_THAT(stream->GetVideoStreams(), - ElementsAre(Field(&webrtc::VideoStream::scalability_mode, - kDefaultScalabilityMode), - Field(&webrtc::VideoStream::scalability_mode, - ScalabilityMode::kL1T3), - Field(&webrtc::VideoStream::scalability_mode, - kDefaultScalabilityMode))); + EXPECT_THAT( + stream->GetVideoStreams(), + ElementsAre( + Field(&VideoStream::scalability_mode, kDefaultScalabilityMode), + Field(&VideoStream::scalability_mode, ScalabilityMode::kL1T3), + Field(&VideoStream::scalability_mode, kDefaultScalabilityMode))); // GetParameters. parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_THAT( parameters.encodings, + ElementsAre(Field(&RtpEncodingParameters::scalability_mode, + kDefaultScalabilityModeStr), + Field(&RtpEncodingParameters::scalability_mode, "L1T3"), + Field(&RtpEncodingParameters::scalability_mode, + kDefaultScalabilityModeStr))); + + // No parameters changed, encoder should not be reconfigured. + EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + EXPECT_EQ(2, stream->num_encoder_reconfigurations()); + + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); +} + +#ifdef RTC_ENABLE_H265 +TEST_F( + WebRtcVideoChannelTest, + NoLayeringValueUsedIfModeIsUnsetOrUnsupportedByH265AndDefaultUnsupported) { + const absl::InlinedVector + kSupportedModes = {ScalabilityMode::kL1T1, ScalabilityMode::kL1T3}; + + encoder_factory_->AddSupportedVideoCodec( + SdpVideoFormat("H265", CodecParameterMap(), kSupportedModes)); + VideoSenderParameters send_parameters; + send_parameters.codecs.push_back(GetEngineCodec("H265")); + EXPECT_TRUE(send_channel_->SetSenderParameters(send_parameters)); + + FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); + + // Send a full size frame so all simulcast layers are used when reconfiguring. + FrameForwarder frame_forwarder; + VideoOptions options; + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); + frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); + + // Set scalability mode. + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); + EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); + parameters.encodings[0].scalability_mode = std::nullopt; + parameters.encodings[1].scalability_mode = "L1T3"; // Supported. + parameters.encodings[2].scalability_mode = "L3T3"; // Unsupported. + EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + + // Verify that the new value is propagated down to the encoder. + // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. + const std::optional kDefaultScalabilityMode = + webrtc::ScalabilityModeFromString(webrtc::kNoLayeringScalabilityModeStr); + EXPECT_EQ(2, stream->num_encoder_reconfigurations()); + VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); + EXPECT_EQ(kNumSimulcastStreams, encoder_config.number_of_streams); + EXPECT_THAT( + encoder_config.simulcast_layers, + ElementsAre( + Field(&VideoStream::scalability_mode, kDefaultScalabilityMode), + Field(&VideoStream::scalability_mode, ScalabilityMode::kL1T3), + Field(&VideoStream::scalability_mode, kDefaultScalabilityMode))); + + // FakeVideoSendStream calls CreateEncoderStreams, test that the vector of + // VideoStreams are created appropriately for the simulcast case. + EXPECT_THAT( + stream->GetVideoStreams(), ElementsAre( - Field(&webrtc::RtpEncodingParameters::scalability_mode, - kDefaultScalabilityModeStr), - Field(&webrtc::RtpEncodingParameters::scalability_mode, "L1T3"), - Field(&webrtc::RtpEncodingParameters::scalability_mode, - kDefaultScalabilityModeStr))); + Field(&VideoStream::scalability_mode, kDefaultScalabilityMode), + Field(&VideoStream::scalability_mode, ScalabilityMode::kL1T3), + Field(&VideoStream::scalability_mode, kDefaultScalabilityMode))); + + // GetParameters. + parameters = send_channel_->GetRtpSendParameters(last_ssrc_); + EXPECT_THAT( + parameters.encodings, + ElementsAre(Field(&RtpEncodingParameters::scalability_mode, + webrtc::kNoLayeringScalabilityModeStr), + Field(&RtpEncodingParameters::scalability_mode, "L1T3"), + Field(&RtpEncodingParameters::scalability_mode, + webrtc::kNoLayeringScalabilityModeStr))); // No parameters changed, encoder should not be reconfigured. EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); @@ -8546,23 +8547,220 @@ TEST_F(WebRtcVideoChannelTest, FallbackForUnsetOrUnsupportedScalabilityMode) { } TEST_F(WebRtcVideoChannelTest, - DefaultValueUsedIfScalabilityModeIsUnsupportedByCodec) { - const absl::InlinedVector - kVp9SupportedModes = {ScalabilityMode::kL3T3}; + SetRtpParametersForH265ShouldSucceedIgnoreLowerLevelId) { + encoder_factory_->AddSupportedVideoCodec( + SdpVideoFormat("H265", + {{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", "156"}, + {"tx-mode", "SRST"}}, + {ScalabilityMode::kL1T1})); + VideoSenderParameters send_parameters; + send_parameters.codecs.push_back(GetEngineCodec("H265")); + for (auto& codec : send_parameters.codecs) { + if (absl::EqualsIgnoreCase(codec.name, "H265")) { + codec.params["level-id"] = "156"; + } + } + + EXPECT_TRUE(send_channel_->SetSenderParameters(send_parameters)); + FakeVideoSendStream* stream = AddSendStream(); + ASSERT_TRUE(stream); + + FrameForwarder frame_forwarder; + VideoOptions options; + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); + frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); + + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); + + RtpCodecParameters matched_codec; + for (const auto& codec : parameters.codecs) { + if (absl::EqualsIgnoreCase(codec.name, "H265")) { + EXPECT_EQ(codec.parameters.at("level-id"), "156"); + matched_codec = codec; + } + } + + FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front(); + ASSERT_TRUE(send_stream); + VideoEncoderConfig encoder_config = send_stream->GetEncoderConfig().Copy(); + EXPECT_EQ(encoder_config.video_format.parameters.at("level-id"), "156"); - encoder_factory_->AddSupportedVideoCodec(webrtc::SdpVideoFormat( - "VP8", webrtc::SdpVideoFormat::Parameters(), {ScalabilityMode::kL1T1})); - encoder_factory_->AddSupportedVideoCodec(webrtc::SdpVideoFormat( - "VP9", webrtc::SdpVideoFormat::Parameters(), {ScalabilityMode::kL3T3})); + // Set the level-id parameter to lower than the negotiated codec level-id. + EXPECT_EQ(1u, parameters.encodings.size()); + matched_codec.parameters["level-id"] = "120"; + parameters.encodings[0].codec = matched_codec; + + EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + RtpParameters parameters2 = send_channel_->GetRtpSendParameters(last_ssrc_); + + for (const auto& codec : parameters2.codecs) { + if (absl::EqualsIgnoreCase(codec.name, "H265")) { + EXPECT_EQ(codec.parameters.at("level-id"), "156"); + } + } + + FakeVideoSendStream* send_stream2 = fake_call_->GetVideoSendStreams().front(); + ASSERT_TRUE(send_stream2); + VideoEncoderConfig encoder_config2 = send_stream2->GetEncoderConfig().Copy(); + EXPECT_EQ(encoder_config2.video_format.parameters.at("level-id"), "156"); + + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); +} + +TEST_F(WebRtcVideoChannelTest, + SetRtpParametersForH265WithSameLevelIdShouldSucceed) { + encoder_factory_->AddSupportedVideoCodec( + SdpVideoFormat("H265", + {{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", "156"}, + {"tx-mode", "SRST"}}, + {ScalabilityMode::kL1T1})); + VideoSenderParameters send_parameters; + send_parameters.codecs.push_back(GetEngineCodec("H265")); + for (auto& codec : send_parameters.codecs) { + if (absl::EqualsIgnoreCase(codec.name, "H265")) { + codec.params["level-id"] = "156"; + } + } + + EXPECT_TRUE(send_channel_->SetSenderParameters(send_parameters)); + FakeVideoSendStream* stream = AddSendStream(); + ASSERT_TRUE(stream); + + FrameForwarder frame_forwarder; + VideoOptions options; + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); + frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); + + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); + + RtpCodecParameters matched_codec; + for (const auto& codec : parameters.codecs) { + if (absl::EqualsIgnoreCase(codec.name, "H265")) { + EXPECT_EQ(codec.parameters.at("level-id"), "156"); + matched_codec = codec; + } + } + + FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front(); + ASSERT_TRUE(send_stream); + VideoEncoderConfig encoder_config = send_stream->GetEncoderConfig().Copy(); + EXPECT_EQ(encoder_config.video_format.parameters.at("level-id"), "156"); + + // Set the level-id parameter to the same as the negotiated codec level-id. + EXPECT_EQ(1u, parameters.encodings.size()); + matched_codec.parameters["level-id"] = "156"; + parameters.encodings[0].codec = matched_codec; + + EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + + RtpParameters parameters2 = send_channel_->GetRtpSendParameters(last_ssrc_); + + for (const auto& codec : parameters2.codecs) { + if (absl::EqualsIgnoreCase(codec.name, "H265")) { + EXPECT_EQ(codec.parameters.at("level-id"), "156"); + matched_codec = codec; + } + } + + FakeVideoSendStream* send_stream2 = fake_call_->GetVideoSendStreams().front(); + ASSERT_TRUE(send_stream2); + VideoEncoderConfig encoder_config2 = send_stream2->GetEncoderConfig().Copy(); + EXPECT_EQ(encoder_config2.video_format.parameters.at("level-id"), "156"); + + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); +} + +TEST_F(WebRtcVideoChannelTest, + SetRtpParametersForH265ShouldSucceedIgnoreHigherLevelId) { + encoder_factory_->AddSupportedVideoCodec( + SdpVideoFormat("H265", + {{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", "156"}, + {"tx-mode", "SRST"}}, + {ScalabilityMode::kL1T1})); + VideoSenderParameters send_parameters; + send_parameters.codecs.push_back(GetEngineCodec("H265")); + for (auto& codec : send_parameters.codecs) { + if (absl::EqualsIgnoreCase(codec.name, "H265")) { + codec.params["level-id"] = "156"; + } + } - cricket::VideoSenderParameters send_parameters; + EXPECT_TRUE(send_channel_->SetSenderParameters(send_parameters)); + FakeVideoSendStream* stream = AddSendStream(); + ASSERT_TRUE(stream); + + FrameForwarder frame_forwarder; + VideoOptions options; + EXPECT_TRUE( + send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); + send_channel_->SetSend(true); + frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); + + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); + + RtpCodecParameters matched_codec; + for (const auto& codec : parameters.codecs) { + if (absl::EqualsIgnoreCase(codec.name, "H265")) { + EXPECT_EQ(codec.parameters.at("level-id"), "156"); + matched_codec = codec; + } + } + + FakeVideoSendStream* send_stream = fake_call_->GetVideoSendStreams().front(); + ASSERT_TRUE(send_stream); + VideoEncoderConfig encoder_config = send_stream->GetEncoderConfig().Copy(); + EXPECT_EQ(encoder_config.video_format.parameters.at("level-id"), "156"); + + // Set the level-id parameter to higher than the negotiated codec level-id. + EXPECT_EQ(1u, parameters.encodings.size()); + matched_codec.parameters["level-id"] = "180"; + parameters.encodings[0].codec = matched_codec; + + EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); + + RtpParameters parameters2 = send_channel_->GetRtpSendParameters(last_ssrc_); + + for (const auto& codec : parameters2.codecs) { + if (absl::EqualsIgnoreCase(codec.name, "H265")) { + EXPECT_EQ(codec.parameters.at("level-id"), "156"); + } + } + FakeVideoSendStream* send_stream2 = fake_call_->GetVideoSendStreams().front(); + ASSERT_TRUE(send_stream2); + VideoEncoderConfig encoder_config2 = send_stream2->GetEncoderConfig().Copy(); + EXPECT_EQ(encoder_config2.video_format.parameters.at("level-id"), "156"); + + EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); +} +#endif + +TEST_F(WebRtcVideoChannelTest, + DefaultValueUsedIfScalabilityModeIsUnsupportedByCodec) { + encoder_factory_->AddSupportedVideoCodec( + SdpVideoFormat("VP8", CodecParameterMap(), + {ScalabilityMode::kL1T1, ScalabilityMode::kL1T2})); + encoder_factory_->AddSupportedVideoCodec( + SdpVideoFormat("VP9", CodecParameterMap(), + {ScalabilityMode::kL1T2, ScalabilityMode::kL3T3})); + + VideoSenderParameters send_parameters; send_parameters.codecs.push_back(GetEngineCodec("VP9")); EXPECT_TRUE(send_channel_->SetSenderParameters(send_parameters)); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); // Send a full size frame so all simulcast layers are used when reconfiguring. - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); @@ -8570,46 +8768,43 @@ TEST_F(WebRtcVideoChannelTest, frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Set scalability mode. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].scalability_mode = "L3T3"; EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); // Verify that the new value is propagated down to the encoder. // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. - const absl::optional kDefaultScalabilityMode = + const std::optional kDefaultScalabilityMode = webrtc::ScalabilityModeFromString(kDefaultScalabilityModeStr); EXPECT_EQ(2, stream->num_encoder_reconfigurations()); - webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); + VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); EXPECT_EQ(1u, encoder_config.number_of_streams); - EXPECT_THAT(encoder_config.simulcast_layers, - ElementsAre(Field(&webrtc::VideoStream::scalability_mode, - ScalabilityMode::kL3T3), - Field(&webrtc::VideoStream::scalability_mode, - kDefaultScalabilityMode), - Field(&webrtc::VideoStream::scalability_mode, - kDefaultScalabilityMode))); + EXPECT_THAT( + encoder_config.simulcast_layers, + ElementsAre( + Field(&VideoStream::scalability_mode, ScalabilityMode::kL3T3), + Field(&VideoStream::scalability_mode, kDefaultScalabilityMode), + Field(&VideoStream::scalability_mode, kDefaultScalabilityMode))); // FakeVideoSendStream calls CreateEncoderStreams, test that the vector of // VideoStreams are created appropriately for the simulcast case. EXPECT_THAT(stream->GetVideoStreams(), - ElementsAre(Field(&webrtc::VideoStream::scalability_mode, + ElementsAre(Field(&VideoStream::scalability_mode, ScalabilityMode::kL3T3))); // GetParameters. parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_THAT( parameters.encodings, - ElementsAre( - Field(&webrtc::RtpEncodingParameters::scalability_mode, "L3T3"), - Field(&webrtc::RtpEncodingParameters::scalability_mode, - kDefaultScalabilityModeStr), - Field(&webrtc::RtpEncodingParameters::scalability_mode, - kDefaultScalabilityModeStr))); + ElementsAre(Field(&RtpEncodingParameters::scalability_mode, "L3T3"), + Field(&RtpEncodingParameters::scalability_mode, + kDefaultScalabilityModeStr), + Field(&RtpEncodingParameters::scalability_mode, + kDefaultScalabilityModeStr))); // Change codec to VP8. - cricket::VideoSenderParameters vp8_parameters; + VideoSenderParameters vp8_parameters; vp8_parameters.codecs.push_back(GetEngineCodec("VP8")); EXPECT_TRUE(send_channel_->SetSenderParameters(vp8_parameters)); frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); @@ -8620,23 +8815,21 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_EQ(2, fake_call_->GetNumCreatedSendStreams()); // Verify fallback to default value triggered (L3T3 is not supported). - EXPECT_THAT(new_streams[0]->GetVideoStreams(), - ElementsAre(Field(&webrtc::VideoStream::scalability_mode, - kDefaultScalabilityMode), - Field(&webrtc::VideoStream::scalability_mode, - kDefaultScalabilityMode), - Field(&webrtc::VideoStream::scalability_mode, - kDefaultScalabilityMode))); + EXPECT_THAT( + new_streams[0]->GetVideoStreams(), + ElementsAre( + Field(&VideoStream::scalability_mode, kDefaultScalabilityMode), + Field(&VideoStream::scalability_mode, kDefaultScalabilityMode), + Field(&VideoStream::scalability_mode, kDefaultScalabilityMode))); parameters = send_channel_->GetRtpSendParameters(last_ssrc_); - EXPECT_THAT( - parameters.encodings, - ElementsAre(Field(&webrtc::RtpEncodingParameters::scalability_mode, - kDefaultScalabilityModeStr), - Field(&webrtc::RtpEncodingParameters::scalability_mode, - kDefaultScalabilityModeStr), - Field(&webrtc::RtpEncodingParameters::scalability_mode, - kDefaultScalabilityModeStr))); + EXPECT_THAT(parameters.encodings, + ElementsAre(Field(&RtpEncodingParameters::scalability_mode, + kDefaultScalabilityModeStr), + Field(&RtpEncodingParameters::scalability_mode, + kDefaultScalabilityModeStr), + Field(&RtpEncodingParameters::scalability_mode, + kDefaultScalabilityModeStr))); EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } @@ -8645,8 +8838,7 @@ TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMinAndMaxBitrate) { SetUpSimulcast(true, /*with_rtx=*/false); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); for (const auto& encoding : parameters.encodings) { EXPECT_FALSE(encoding.min_bitrate_bps); @@ -8677,14 +8869,13 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersFailsWithIncorrectBitrate) { SetUpSimulcast(true, /*with_rtx=*/false); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); // Max bitrate lower than min bitrate should fail. parameters.encodings[2].min_bitrate_bps = 100000; parameters.encodings[2].max_bitrate_bps = 100000 - 1; - EXPECT_EQ(webrtc::RTCErrorType::INVALID_RANGE, + EXPECT_EQ(RTCErrorType::INVALID_RANGE, send_channel_->SetRtpSendParameters(last_ssrc_, parameters).type()); } @@ -8696,7 +8887,7 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxSimulcastBitratePropagatedToEncoder) { FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); // Send a full size frame so all simulcast layers are used when reconfiguring. - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); @@ -8705,8 +8896,7 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxSimulcastBitratePropagatedToEncoder) { // Get and set the rtp encoding parameters. // Change the value and set it on the VideoChannel. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].min_bitrate_bps = 100000; parameters.encodings[0].max_bitrate_bps = 200000; @@ -8719,7 +8909,7 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxSimulcastBitratePropagatedToEncoder) { // Verify that the new value propagated down to the encoder. // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. EXPECT_EQ(2, stream->num_encoder_reconfigurations()); - webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); + VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); EXPECT_EQ(kNumSimulcastStreams, encoder_config.number_of_streams); EXPECT_EQ(kNumSimulcastStreams, encoder_config.simulcast_layers.size()); EXPECT_EQ(100000, encoder_config.simulcast_layers[0].min_bitrate_bps); @@ -8756,11 +8946,11 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxSimulcastBitratePropagatedToEncoder) { // RtpParameters. The unspecified min/max and target value should be set to the // simulcast default that is used if no min/max are specified. TEST_F(WebRtcVideoChannelTest, MinOrMaxSimulcastBitratePropagatedToEncoder) { - const std::vector kDefault = GetSimulcastBitrates720p(); + const std::vector kDefault = GetSimulcastBitrates720p(); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); // Send a full size frame so all simulcast layers are used when reconfiguring. - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); @@ -8768,8 +8958,7 @@ TEST_F(WebRtcVideoChannelTest, MinOrMaxSimulcastBitratePropagatedToEncoder) { frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); // Change the value and set it on the VideoChannel. @@ -8783,7 +8972,7 @@ TEST_F(WebRtcVideoChannelTest, MinOrMaxSimulcastBitratePropagatedToEncoder) { // Verify that the new value propagated down to the encoder. // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. - webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); + VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); EXPECT_EQ(kNumSimulcastStreams, encoder_config.number_of_streams); EXPECT_EQ(kNumSimulcastStreams, encoder_config.simulcast_layers.size()); EXPECT_EQ(kMinBpsLayer0, encoder_config.simulcast_layers[0].min_bitrate_bps); @@ -8826,11 +9015,11 @@ TEST_F(WebRtcVideoChannelTest, MinOrMaxSimulcastBitratePropagatedToEncoder) { // RtpParameters above (or below) the simulcast default max (or min) adjusts the // unspecified values accordingly. TEST_F(WebRtcVideoChannelTest, SetMinAndMaxSimulcastBitrateAboveBelowDefault) { - const std::vector kDefault = GetSimulcastBitrates720p(); + const std::vector kDefault = GetSimulcastBitrates720p(); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); // Send a full size frame so all simulcast layers are used when reconfiguring. - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); @@ -8838,8 +9027,7 @@ TEST_F(WebRtcVideoChannelTest, SetMinAndMaxSimulcastBitrateAboveBelowDefault) { frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Get and set the rtp encoding parameters. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); // Change the value and set it on the VideoChannel. @@ -8875,11 +9063,11 @@ TEST_F(WebRtcVideoChannelTest, SetMinAndMaxSimulcastBitrateAboveBelowDefault) { } TEST_F(WebRtcVideoChannelTest, BandwidthAboveTotalMaxBitrateGivenToMaxLayer) { - const std::vector kDefault = GetSimulcastBitrates720p(); + const std::vector kDefault = GetSimulcastBitrates720p(); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); // Send a full size frame so all simulcast layers are used when reconfiguring. - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); @@ -8887,8 +9075,7 @@ TEST_F(WebRtcVideoChannelTest, BandwidthAboveTotalMaxBitrateGivenToMaxLayer) { frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Set max bitrate for all but the highest layer. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[0].max_bitrate_bps = kDefault[0].max_bitrate_bps; parameters.encodings[1].max_bitrate_bps = kDefault[1].max_bitrate_bps; @@ -8896,7 +9083,7 @@ TEST_F(WebRtcVideoChannelTest, BandwidthAboveTotalMaxBitrateGivenToMaxLayer) { // Set max bandwidth equal to total max bitrate. send_parameters_.max_bandwidth_bps = - GetTotalMaxBitrate(stream->GetVideoStreams()).bps(); + webrtc::GetTotalMaxBitrate(stream->GetVideoStreams()).bps(); ExpectSetMaxBitrate(send_parameters_.max_bandwidth_bps); ASSERT_TRUE(send_channel_->SetSenderParameters(send_parameters_)); @@ -8907,7 +9094,7 @@ TEST_F(WebRtcVideoChannelTest, BandwidthAboveTotalMaxBitrateGivenToMaxLayer) { // Set max bandwidth above the total max bitrate. send_parameters_.max_bandwidth_bps = - GetTotalMaxBitrate(stream->GetVideoStreams()).bps() + 1; + webrtc::GetTotalMaxBitrate(stream->GetVideoStreams()).bps() + 1; ExpectSetMaxBitrate(send_parameters_.max_bandwidth_bps); ASSERT_TRUE(send_channel_->SetSenderParameters(send_parameters_)); @@ -8915,7 +9102,7 @@ TEST_F(WebRtcVideoChannelTest, BandwidthAboveTotalMaxBitrateGivenToMaxLayer) { // max should be given to the highest layer. EXPECT_EQ(kNumSimulcastStreams, stream->GetVideoStreams().size()); EXPECT_EQ(send_parameters_.max_bandwidth_bps, - GetTotalMaxBitrate(stream->GetVideoStreams()).bps()); + webrtc::GetTotalMaxBitrate(stream->GetVideoStreams()).bps()); EXPECT_EQ(kDefault[2].max_bitrate_bps + 1, stream->GetVideoStreams()[2].max_bitrate_bps); @@ -8924,12 +9111,12 @@ TEST_F(WebRtcVideoChannelTest, BandwidthAboveTotalMaxBitrateGivenToMaxLayer) { TEST_F(WebRtcVideoChannelTest, BandwidthAboveTotalMaxBitrateNotGivenToMaxLayerIfMaxBitrateSet) { - const std::vector kDefault = GetSimulcastBitrates720p(); + const std::vector kDefault = GetSimulcastBitrates720p(); EXPECT_EQ(kNumSimulcastStreams, kDefault.size()); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); // Send a full size frame so all simulcast layers are used when reconfiguring. - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); @@ -8937,15 +9124,14 @@ TEST_F(WebRtcVideoChannelTest, frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame()); // Set max bitrate for the highest layer. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); parameters.encodings[2].max_bitrate_bps = kDefault[2].max_bitrate_bps; EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); // Set max bandwidth above the total max bitrate. send_parameters_.max_bandwidth_bps = - GetTotalMaxBitrate(stream->GetVideoStreams()).bps() + 1; + webrtc::GetTotalMaxBitrate(stream->GetVideoStreams()).bps() + 1; ExpectSetMaxBitrate(send_parameters_.max_bandwidth_bps); ASSERT_TRUE(send_channel_->SetSenderParameters(send_parameters_)); @@ -8958,6 +9144,81 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } +TEST_F(WebRtcVideoChannelTest, SetMixedCodecSimulcastStreamConfig) { + ScopedKeyValueConfig field_trials(field_trials_, + "WebRTC-MixedCodecSimulcast/Enabled/"); + + StreamParams sp = webrtc::CreateSimStreamParams("cname", {123, 456, 789}); + + std::vector rid_descriptions; + rid_descriptions.emplace_back("f", RidDirection::kSend); + rid_descriptions.emplace_back("h", RidDirection::kSend); + rid_descriptions.emplace_back("q", RidDirection::kSend); + sp.set_rids(rid_descriptions); + + ASSERT_TRUE(send_channel_->AddSendStream(sp)); + + RtpParameters rtp_parameters = + send_channel_->GetRtpSendParameters(last_ssrc_); + EXPECT_EQ(3UL, rtp_parameters.encodings.size()); + Codec vp8 = GetEngineCodec("VP8"); + Codec vp9 = GetEngineCodec("VP9"); + rtp_parameters.encodings[0].codec = vp8.ToCodecParameters(); + rtp_parameters.encodings[1].codec = vp8.ToCodecParameters(); + rtp_parameters.encodings[2].codec = vp9.ToCodecParameters(); + EXPECT_TRUE( + send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters).ok()); + + VideoSenderParameters parameters; + parameters.codecs.push_back(vp8); + parameters.codecs.push_back(vp9); + EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); + + const auto& streams = fake_call_->GetVideoSendStreams(); + ASSERT_EQ(1u, streams.size()); + auto stream = streams[0]; + ASSERT_NE(stream, nullptr); + const auto& config = stream->GetConfig(); + // RtpStreamConfig should have the correct codec name and payload type. + ASSERT_THAT(config.rtp.stream_configs, SizeIs(3)); + EXPECT_EQ(config.rtp.stream_configs[0].rid, "f"); + EXPECT_EQ(config.rtp.stream_configs[1].rid, "h"); + EXPECT_EQ(config.rtp.stream_configs[2].rid, "q"); + EXPECT_EQ(config.rtp.stream_configs[0].payload_name, vp8.name); + EXPECT_EQ(config.rtp.stream_configs[1].payload_name, vp8.name); + EXPECT_EQ(config.rtp.stream_configs[2].payload_name, vp9.name); + EXPECT_EQ(config.rtp.stream_configs[0].payload_type, vp8.id); + EXPECT_EQ(config.rtp.stream_configs[1].payload_type, vp8.id); + EXPECT_EQ(config.rtp.stream_configs[2].payload_type, vp9.id); +} + +#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) +TEST_F(WebRtcVideoChannelTest, + SetMixedCodecSimulcastWithDifferentConfigSettingsSizes) { + webrtc::test::ScopedKeyValueConfig field_trials( + field_trials_, "WebRTC-MixedCodecSimulcast/Enabled/"); + AddSendStream(); + + webrtc::VideoSenderParameters parameters; + webrtc::Codec vp8 = GetEngineCodec("VP8"); + parameters.codecs.push_back(vp8); + + // `codec_settings_list.size()` is 1 after this in the + EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); + + // It sets 2 sizes of config ssrc. + webrtc::StreamParams sp = CreateSimStreamParams("cname", {123, 456}); + std::vector rid_descriptions2; + rid_descriptions2.emplace_back("f", webrtc::RidDirection::kSend); + rid_descriptions2.emplace_back("h", webrtc::RidDirection::kSend); + sp.set_rids(rid_descriptions2); + + // `WebRtcVideoSendStream::SetCodec` test for different sizes + // between parameters_.config.rtp.ssrcs.size() and codec_settings_list.size(). + EXPECT_DEATH(send_channel_->AddSendStream(sp), ""); +} +#endif + // Test that min and max bitrate values set via RtpParameters are correctly // propagated to the underlying encoder for a single stream. TEST_F(WebRtcVideoChannelTest, MinAndMaxBitratePropagatedToEncoder) { @@ -8966,15 +9227,14 @@ TEST_F(WebRtcVideoChannelTest, MinAndMaxBitratePropagatedToEncoder) { EXPECT_TRUE(stream->IsSending()); // Set min and max bitrate. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); + RtpParameters parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1u, parameters.encodings.size()); parameters.encodings[0].min_bitrate_bps = 80000; parameters.encodings[0].max_bitrate_bps = 150000; EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. - webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); + VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); EXPECT_EQ(1u, encoder_config.number_of_streams); EXPECT_EQ(1u, encoder_config.simulcast_layers.size()); EXPECT_EQ(80000, encoder_config.simulcast_layers[0].min_bitrate_bps); @@ -8997,7 +9257,7 @@ TEST_F(WebRtcVideoChannelTest, DefaultMinAndMaxBitratePropagatedToEncoder) { EXPECT_TRUE(stream->IsSending()); // Check that WebRtcVideoSendStream updates VideoEncoderConfig correctly. - webrtc::VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); + VideoEncoderConfig encoder_config = stream->GetEncoderConfig().Copy(); EXPECT_EQ(1u, encoder_config.number_of_streams); EXPECT_EQ(1u, encoder_config.simulcast_layers.size()); EXPECT_EQ(-1, encoder_config.simulcast_layers[0].min_bitrate_bps); @@ -9014,131 +9274,30 @@ TEST_F(WebRtcVideoChannelTest, DefaultMinAndMaxBitratePropagatedToEncoder) { stream->GetVideoStreams()[0].target_bitrate_bps); } -// Test that a stream will not be sending if its encoding is made inactive -// through SetRtpSendParameters. -TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersOneEncodingActive) { - FakeVideoSendStream* stream = AddSendStream(); - EXPECT_TRUE(send_channel_->SetSend(true)); - EXPECT_TRUE(stream->IsSending()); - - // Get current parameters and change "active" to false. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); - ASSERT_EQ(1u, parameters.encodings.size()); - ASSERT_TRUE(parameters.encodings[0].active); - parameters.encodings[0].active = false; - EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); - EXPECT_FALSE(stream->IsSending()); - - // Now change it back to active and verify we resume sending. - parameters.encodings[0].active = true; - EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); - EXPECT_TRUE(stream->IsSending()); -} - -// Tests that when active is updated for any simulcast layer then the send -// stream's sending state will be updated and it will be reconfigured with the -// new appropriate active simulcast streams. -TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersMultipleEncodingsActive) { - // Create the stream params with multiple ssrcs for simulcast. - const size_t kNumSimulcastStreams = 3; - std::vector ssrcs = MAKE_VECTOR(kSsrcs3); - StreamParams stream_params = CreateSimStreamParams("cname", ssrcs); - FakeVideoSendStream* fake_video_send_stream = AddSendStream(stream_params); - uint32_t primary_ssrc = stream_params.first_ssrc(); - - // Using the FrameForwarder, we manually send a full size - // frame. This allows us to test that ReconfigureEncoder is called - // appropriately. - webrtc::test::FrameForwarder frame_forwarder; - VideoOptions options; - EXPECT_TRUE( - send_channel_->SetVideoSend(primary_ssrc, &options, &frame_forwarder)); - send_channel_->SetSend(true); - frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame( - 1920, 1080, webrtc::VideoRotation::kVideoRotation_0, - rtc::kNumMicrosecsPerSec / 30)); - - // Check that all encodings are initially active. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(primary_ssrc); - EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); - EXPECT_TRUE(parameters.encodings[0].active); - EXPECT_TRUE(parameters.encodings[1].active); - EXPECT_TRUE(parameters.encodings[2].active); - EXPECT_TRUE(fake_video_send_stream->IsSending()); - - // Only turn on only the middle stream. - parameters.encodings[0].active = false; - parameters.encodings[1].active = true; - parameters.encodings[2].active = false; - EXPECT_TRUE( - send_channel_->SetRtpSendParameters(primary_ssrc, parameters).ok()); - // Verify that the active fields are set on the VideoChannel. - parameters = send_channel_->GetRtpSendParameters(primary_ssrc); - EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); - EXPECT_FALSE(parameters.encodings[0].active); - EXPECT_TRUE(parameters.encodings[1].active); - EXPECT_FALSE(parameters.encodings[2].active); - // Check that the VideoSendStream is updated appropriately. This means its - // send state was updated and it was reconfigured. - EXPECT_TRUE(fake_video_send_stream->IsSending()); - std::vector simulcast_streams = - fake_video_send_stream->GetVideoStreams(); - EXPECT_EQ(kNumSimulcastStreams, simulcast_streams.size()); - EXPECT_FALSE(simulcast_streams[0].active); - EXPECT_TRUE(simulcast_streams[1].active); - EXPECT_FALSE(simulcast_streams[2].active); - - // Turn off all streams. - parameters.encodings[0].active = false; - parameters.encodings[1].active = false; - parameters.encodings[2].active = false; - EXPECT_TRUE( - send_channel_->SetRtpSendParameters(primary_ssrc, parameters).ok()); - // Verify that the active fields are set on the VideoChannel. - parameters = send_channel_->GetRtpSendParameters(primary_ssrc); - EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); - EXPECT_FALSE(parameters.encodings[0].active); - EXPECT_FALSE(parameters.encodings[1].active); - EXPECT_FALSE(parameters.encodings[2].active); - // Check that the VideoSendStream is off. - EXPECT_FALSE(fake_video_send_stream->IsSending()); - simulcast_streams = fake_video_send_stream->GetVideoStreams(); - EXPECT_EQ(kNumSimulcastStreams, simulcast_streams.size()); - EXPECT_FALSE(simulcast_streams[0].active); - EXPECT_FALSE(simulcast_streams[1].active); - EXPECT_FALSE(simulcast_streams[2].active); - - EXPECT_TRUE(send_channel_->SetVideoSend(primary_ssrc, nullptr, nullptr)); -} - // Tests that when some streams are disactivated then the lowest // stream min_bitrate would be reused for the first active stream. TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersSetsMinBitrateForFirstActiveStream) { // Create the stream params with multiple ssrcs for simulcast. - const size_t kNumSimulcastStreams = 3; std::vector ssrcs = MAKE_VECTOR(kSsrcs3); - StreamParams stream_params = CreateSimStreamParams("cname", ssrcs); + StreamParams stream_params = webrtc::CreateSimStreamParams("cname", ssrcs); FakeVideoSendStream* fake_video_send_stream = AddSendStream(stream_params); uint32_t primary_ssrc = stream_params.first_ssrc(); // Using the FrameForwarder, we manually send a full size // frame. This allows us to test that ReconfigureEncoder is called // appropriately. - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; EXPECT_TRUE( send_channel_->SetVideoSend(primary_ssrc, &options, &frame_forwarder)); send_channel_->SetSend(true); - frame_forwarder.IncomingCapturedFrame(frame_source_.GetFrame( - 1920, 1080, webrtc::VideoRotation::kVideoRotation_0, - rtc::kNumMicrosecsPerSec / 30)); + frame_forwarder.IncomingCapturedFrame( + frame_source_.GetFrame(1920, 1080, VideoRotation::kVideoRotation_0, + webrtc::kNumMicrosecsPerSec / 30)); // Check that all encodings are initially active. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(primary_ssrc); + RtpParameters parameters = send_channel_->GetRtpSendParameters(primary_ssrc); EXPECT_EQ(kNumSimulcastStreams, parameters.encodings.size()); EXPECT_TRUE(parameters.encodings[0].active); EXPECT_TRUE(parameters.encodings[1].active); @@ -9155,7 +9314,7 @@ TEST_F(WebRtcVideoChannelTest, // Check that the VideoSendStream is updated appropriately. This means its // send state was updated and it was reconfigured. EXPECT_TRUE(fake_video_send_stream->IsSending()); - std::vector simulcast_streams = + std::vector simulcast_streams = fake_video_send_stream->GetVideoStreams(); EXPECT_EQ(kNumSimulcastStreams, simulcast_streams.size()); EXPECT_FALSE(simulcast_streams[0].active); @@ -9168,56 +9327,15 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_TRUE(send_channel_->SetVideoSend(primary_ssrc, nullptr, nullptr)); } -// Test that if a stream is reconfigured (due to a codec change or other -// change) while its encoding is still inactive, it doesn't start sending. -TEST_F(WebRtcVideoChannelTest, - InactiveStreamDoesntStartSendingWhenReconfigured) { - // Set an initial codec list, which will be modified later. - cricket::VideoSenderParameters parameters1; - parameters1.codecs.push_back(GetEngineCodec("VP8")); - parameters1.codecs.push_back(GetEngineCodec("VP9")); - EXPECT_TRUE(send_channel_->SetSenderParameters(parameters1)); - - FakeVideoSendStream* stream = AddSendStream(); - EXPECT_TRUE(send_channel_->SetSend(true)); - EXPECT_TRUE(stream->IsSending()); - - // Get current parameters and change "active" to false. - webrtc::RtpParameters parameters = - send_channel_->GetRtpSendParameters(last_ssrc_); - ASSERT_EQ(1u, parameters.encodings.size()); - ASSERT_TRUE(parameters.encodings[0].active); - parameters.encodings[0].active = false; - EXPECT_EQ(1u, GetFakeSendStreams().size()); - EXPECT_EQ(1, fake_call_->GetNumCreatedSendStreams()); - EXPECT_TRUE(send_channel_->SetRtpSendParameters(last_ssrc_, parameters).ok()); - EXPECT_FALSE(stream->IsSending()); - - // Reorder the codec list, causing the stream to be reconfigured. - cricket::VideoSenderParameters parameters2; - parameters2.codecs.push_back(GetEngineCodec("VP9")); - parameters2.codecs.push_back(GetEngineCodec("VP8")); - EXPECT_TRUE(send_channel_->SetSenderParameters(parameters2)); - auto new_streams = GetFakeSendStreams(); - // Assert that a new underlying stream was created due to the codec change. - // Otherwise, this test isn't testing what it set out to test. - EXPECT_EQ(1u, GetFakeSendStreams().size()); - EXPECT_EQ(2, fake_call_->GetNumCreatedSendStreams()); - - // Verify that we still are not sending anything, due to the inactive - // encoding. - EXPECT_FALSE(new_streams[0]->IsSending()); -} - // Test that GetRtpSendParameters returns the currently configured codecs. TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersCodecs) { AddSendStream(); - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); - webrtc::RtpParameters rtp_parameters = + RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(2u, rtp_parameters.codecs.size()); EXPECT_EQ(GetEngineCodec("VP8").ToCodecParameters(), @@ -9232,8 +9350,7 @@ TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersRtcpCname) { params.cname = "rtcpcname"; AddSendStream(params); - webrtc::RtpParameters rtp_parameters = - send_channel_->GetRtpSendParameters(kSsrc); + RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(kSsrc); EXPECT_STREQ("rtcpcname", rtp_parameters.rtcp.cname.c_str()); } @@ -9242,7 +9359,7 @@ TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersRtcpCname) { TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersSsrc) { AddSendStream(); - webrtc::RtpParameters rtp_parameters = + RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(1u, rtp_parameters.encodings.size()); EXPECT_EQ(last_ssrc_, rtp_parameters.encodings[0].ssrc); @@ -9251,37 +9368,37 @@ TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersSsrc) { TEST_F(WebRtcVideoChannelTest, DetectRtpSendParameterHeaderExtensionsChange) { AddSendStream(); - webrtc::RtpParameters rtp_parameters = + RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); rtp_parameters.header_extensions.emplace_back(); EXPECT_NE(0u, rtp_parameters.header_extensions.size()); - webrtc::RTCError result = + RTCError result = send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); - EXPECT_EQ(webrtc::RTCErrorType::INVALID_MODIFICATION, result.type()); + EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type()); } TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersDegradationPreference) { AddSendStream(); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); - webrtc::RtpParameters rtp_parameters = + RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_FALSE(rtp_parameters.degradation_preference.has_value()); rtp_parameters.degradation_preference = - webrtc::DegradationPreference::MAINTAIN_FRAMERATE; + DegradationPreference::MAINTAIN_FRAMERATE; EXPECT_TRUE( send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters).ok()); - webrtc::RtpParameters updated_rtp_parameters = + RtpParameters updated_rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(updated_rtp_parameters.degradation_preference, - webrtc::DegradationPreference::MAINTAIN_FRAMERATE); + DegradationPreference::MAINTAIN_FRAMERATE); // Remove the source since it will be destroyed before the channel EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); @@ -9290,12 +9407,12 @@ TEST_F(WebRtcVideoChannelTest, GetRtpSendParametersDegradationPreference) { // Test that if we set/get parameters multiple times, we get the same results. TEST_F(WebRtcVideoChannelTest, SetAndGetRtpSendParameters) { AddSendStream(); - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); - webrtc::RtpParameters initial_params = + RtpParameters initial_params = send_channel_->GetRtpSendParameters(last_ssrc_); // We should be able to set the params we just got. @@ -9309,12 +9426,12 @@ TEST_F(WebRtcVideoChannelTest, SetAndGetRtpSendParameters) { // Test that GetRtpReceiverParameters returns the currently configured codecs. TEST_F(WebRtcVideoChannelTest, GetRtpReceiveParametersCodecs) { AddRecvStream(); - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); - webrtc::RtpParameters rtp_parameters = + RtpParameters rtp_parameters = receive_channel_->GetRtpReceiverParameters(last_ssrc_); ASSERT_EQ(2u, rtp_parameters.codecs.size()); EXPECT_EQ(GetEngineCodec("VP8").ToCodecParameters(), @@ -9328,19 +9445,18 @@ TEST_F(WebRtcVideoChannelTest, GetRtpReceiveFmtpSprop) { #else TEST_F(WebRtcVideoChannelTest, DISABLED_GetRtpReceiveFmtpSprop) { #endif - cricket::VideoReceiverParameters parameters; - cricket::VideoCodec kH264sprop1 = cricket::CreateVideoCodec(101, "H264"); + VideoReceiverParameters parameters; + Codec kH264sprop1 = webrtc::CreateVideoCodec(101, "H264"); kH264sprop1.SetParam(kH264FmtpSpropParameterSets, "uvw"); parameters.codecs.push_back(kH264sprop1); - cricket::VideoCodec kH264sprop2 = cricket::CreateVideoCodec(102, "H264"); + Codec kH264sprop2 = webrtc::CreateVideoCodec(102, "H264"); kH264sprop2.SetParam(kH264FmtpSpropParameterSets, "xyz"); parameters.codecs.push_back(kH264sprop2); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); FakeVideoReceiveStream* recv_stream = AddRecvStream(); - const webrtc::VideoReceiveStreamInterface::Config& cfg = - recv_stream->GetConfig(); - webrtc::RtpParameters rtp_parameters = + const VideoReceiveStreamInterface::Config& cfg = recv_stream->GetConfig(); + RtpParameters rtp_parameters = receive_channel_->GetRtpReceiverParameters(last_ssrc_); ASSERT_EQ(2u, rtp_parameters.codecs.size()); EXPECT_EQ(kH264sprop1.ToCodecParameters(), rtp_parameters.codecs[0]); @@ -9365,7 +9481,7 @@ TEST_F(WebRtcVideoChannelTest, DISABLED_GetRtpReceiveFmtpSprop) { TEST_F(WebRtcVideoChannelTest, GetRtpReceiveParametersSsrc) { AddRecvStream(); - webrtc::RtpParameters rtp_parameters = + RtpParameters rtp_parameters = receive_channel_->GetRtpReceiverParameters(last_ssrc_); ASSERT_EQ(1u, rtp_parameters.encodings.size()); EXPECT_EQ(last_ssrc_, rtp_parameters.encodings[0].ssrc); @@ -9374,12 +9490,12 @@ TEST_F(WebRtcVideoChannelTest, GetRtpReceiveParametersSsrc) { // Test that if we set/get parameters multiple times, we get the same results. TEST_F(WebRtcVideoChannelTest, SetAndGetRtpReceiveParameters) { AddRecvStream(); - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); - webrtc::RtpParameters initial_params = + RtpParameters initial_params = receive_channel_->GetRtpReceiverParameters(last_ssrc_); // ... And this shouldn't change the params returned by @@ -9396,22 +9512,22 @@ TEST_F(WebRtcVideoChannelTest, GetDefaultRtpReceiveParametersWithUnsignaledSsrc) { // Call necessary methods to configure receiving a default stream as // soon as it arrives. - cricket::VideoReceiverParameters parameters; + VideoReceiverParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(GetEngineCodec("VP9")); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); // Call GetRtpReceiverParameters before configured to receive an unsignaled // stream. Should return nothing. - EXPECT_EQ(webrtc::RtpParameters(), + EXPECT_EQ(RtpParameters(), receive_channel_->GetDefaultRtpReceiveParameters()); // Set a sink for an unsignaled stream. - cricket::FakeVideoRenderer renderer; + FakeVideoRenderer renderer; receive_channel_->SetDefaultSink(&renderer); // Call GetDefaultRtpReceiveParameters before the SSRC is known. - webrtc::RtpParameters rtp_parameters = + RtpParameters rtp_parameters = receive_channel_->GetDefaultRtpReceiveParameters(); ASSERT_EQ(1u, rtp_parameters.encodings.size()); EXPECT_FALSE(rtp_parameters.encodings[0].ssrc); @@ -9435,13 +9551,13 @@ TEST_F(WebRtcVideoChannelTest, AddReceiveStreamAfterReceivingNonPrimaryUnsignaledSsrc) { // Receive VP8 RTX packet. RtpPacketReceived rtp_packet; - const cricket::VideoCodec vp8 = GetEngineCodec("VP8"); + const Codec vp8 = GetEngineCodec("VP8"); rtp_packet.SetPayloadType(default_apt_rtx_types_[vp8.id]); rtp_packet.SetSsrc(2); ReceivePacketAndAdvanceTime(rtp_packet); EXPECT_EQ(1u, fake_call_->GetVideoReceiveStreams().size()); - cricket::StreamParams params = cricket::StreamParams::CreateLegacy(1); + StreamParams params = StreamParams::CreateLegacy(1); params.AddFidSsrc(1, 2); EXPECT_TRUE(receive_channel_->AddRecvStream(params)); } @@ -9510,7 +9626,7 @@ TEST_F(WebRtcVideoChannelTest, SinglecastScreenSharing_QualityScalingNotAllowed) { SetUpSimulcast(false, /*with_rtx=*/true); - webrtc::test::FrameForwarder frame_forwarder; + FrameForwarder frame_forwarder; VideoOptions options; options.is_screencast = true; EXPECT_TRUE( @@ -9527,7 +9643,7 @@ TEST_F(WebRtcVideoChannelTest, SimulcastSingleActiveStream_QualityScalingAllowed) { FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); - webrtc::RtpParameters rtp_parameters = + RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); ASSERT_TRUE(rtp_parameters.encodings[0].active); @@ -9543,7 +9659,7 @@ TEST_F(WebRtcVideoChannelTest, TEST_F(WebRtcVideoChannelTest, GenerateKeyFrameSinglecast) { FakeVideoSendStream* stream = AddSendStream(); - webrtc::RtpParameters rtp_parameters = + RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(1u, rtp_parameters.encodings.size()); EXPECT_EQ(rtp_parameters.encodings[0].rid, ""); @@ -9562,17 +9678,18 @@ TEST_F(WebRtcVideoChannelTest, GenerateKeyFrameSinglecast) { } TEST_F(WebRtcVideoChannelTest, GenerateKeyFrameSimulcast) { - StreamParams stream_params = CreateSimStreamParams("cname", {123, 456, 789}); + StreamParams stream_params = + webrtc::CreateSimStreamParams("cname", {123, 456, 789}); std::vector rids = {"f", "h", "q"}; - std::vector rid_descriptions; + std::vector rid_descriptions; for (const auto& rid : rids) { - rid_descriptions.emplace_back(rid, cricket::RidDirection::kSend); + rid_descriptions.emplace_back(rid, RidDirection::kSend); } stream_params.set_rids(rid_descriptions); FakeVideoSendStream* stream = AddSendStream(stream_params); - webrtc::RtpParameters rtp_parameters = + RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); ASSERT_EQ(3u, rtp_parameters.encodings.size()); EXPECT_EQ(rtp_parameters.encodings[0].rid, "f"); @@ -9621,40 +9738,42 @@ TEST_F(WebRtcVideoChannelTest, GenerateKeyFrameSimulcast) { class WebRtcVideoChannelSimulcastTest : public ::testing::Test { public: WebRtcVideoChannelSimulcastTest() - : fake_call_(), - encoder_factory_(new cricket::FakeWebRtcVideoEncoderFactory), - decoder_factory_(new cricket::FakeWebRtcVideoDecoderFactory), + : fake_call_(CreateEnvironment(&field_trials_)), + encoder_factory_(new FakeWebRtcVideoEncoderFactory), + decoder_factory_(new FakeWebRtcVideoDecoderFactory), mock_rate_allocator_factory_( - std::make_unique()), - engine_(std::unique_ptr( - encoder_factory_), - std::unique_ptr( - decoder_factory_), - field_trials_), + std::make_unique()), + engine_( + std::unique_ptr(encoder_factory_), + std::unique_ptr(decoder_factory_), + field_trials_), last_ssrc_(0) {} void SetUp() override { encoder_factory_->AddSupportedVideoCodecType("VP8"); decoder_factory_->AddSupportedVideoCodecType("VP8"); send_channel_ = engine_.CreateSendChannel( - &fake_call_, GetMediaConfig(), VideoOptions(), webrtc::CryptoOptions(), + &fake_call_, GetMediaConfig(), VideoOptions(), CryptoOptions(), mock_rate_allocator_factory_.get()); receive_channel_ = engine_.CreateReceiveChannel( - &fake_call_, GetMediaConfig(), VideoOptions(), webrtc::CryptoOptions()); + &fake_call_, GetMediaConfig(), VideoOptions(), CryptoOptions()); send_channel_->OnReadyToSend(true); receive_channel_->SetReceive(true); last_ssrc_ = 123; } protected: - void VerifySimulcastSettings(const VideoCodec& codec, + void VerifySimulcastSettings(const Codec& codec_in, int capture_width, int capture_height, size_t num_configured_streams, size_t expected_num_streams, bool screenshare, bool conference_mode) { - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; + // The codec ID does not matter, but must be valid. + Codec codec = codec_in; + codec.id = 123; parameters.codecs.push_back(codec); parameters.conference_mode = conference_mode; ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); @@ -9663,12 +9782,12 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { RTC_DCHECK(num_configured_streams <= ssrcs.size()); ssrcs.resize(num_configured_streams); - AddSendStream(CreateSimStreamParams("cname", ssrcs)); + AddSendStream(webrtc::CreateSimStreamParams("cname", ssrcs)); // Send a full-size frame to trigger a stream reconfiguration to use all // expected simulcast layers. - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(capture_width, capture_height, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(capture_width, capture_height, + webrtc::kNumMicrosecsPerSec / 30); VideoOptions options; if (screenshare) @@ -9684,33 +9803,35 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { auto rtp_parameters = send_channel_->GetRtpSendParameters(kSsrcs3[0]); EXPECT_EQ(num_configured_streams, rtp_parameters.encodings.size()); - std::vector video_streams = stream->GetVideoStreams(); + std::vector video_streams = stream->GetVideoStreams(); ASSERT_EQ(expected_num_streams, video_streams.size()); EXPECT_LE(expected_num_streams, stream->GetConfig().rtp.ssrcs.size()); - std::vector expected_streams; + std::vector expected_streams; if (num_configured_streams > 1 || conference_mode) { - expected_streams = GetSimulcastConfig( - /*min_layers=*/1, num_configured_streams, capture_width, - capture_height, webrtc::kDefaultBitratePriority, kDefaultQpMax, - screenshare && conference_mode, true, field_trials_); + const VideoEncoderConfig& encoder_config = stream->GetEncoderConfig(); + VideoEncoder::EncoderInfo encoder_info; + auto factory = make_ref_counted(encoder_info); + expected_streams = factory->CreateEncoderStreams( + field_trials_, capture_width, capture_height, encoder_config); if (screenshare && conference_mode) { - for (const webrtc::VideoStream& stream : expected_streams) { + for (const webrtc::VideoStream& expected_stream : expected_streams) { // Never scale screen content. - EXPECT_EQ(stream.width, rtc::checked_cast(capture_width)); - EXPECT_EQ(stream.height, rtc::checked_cast(capture_height)); + EXPECT_EQ(expected_stream.width, checked_cast(capture_width)); + EXPECT_EQ(expected_stream.height, + checked_cast(capture_height)); } } } else { - webrtc::VideoStream stream; - stream.width = capture_width; - stream.height = capture_height; - stream.max_framerate = kDefaultVideoMaxFramerate; - stream.min_bitrate_bps = webrtc::kDefaultMinVideoBitrateBps; - stream.target_bitrate_bps = stream.max_bitrate_bps = + VideoStream expected_stream; + expected_stream.width = capture_width; + expected_stream.height = capture_height; + expected_stream.max_framerate = kDefaultVideoMaxFramerate; + expected_stream.min_bitrate_bps = webrtc::kDefaultMinVideoBitrateBps; + expected_stream.target_bitrate_bps = expected_stream.max_bitrate_bps = GetMaxDefaultBitrateBps(capture_width, capture_height); - stream.max_qp = kDefaultQpMax; - expected_streams.push_back(stream); + expected_stream.max_qp = kDefaultVideoMaxQpVpx; + expected_streams.push_back(expected_stream); } ASSERT_EQ(expected_streams.size(), video_streams.size()); @@ -9737,7 +9858,7 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { video_streams[i].max_bitrate_bps); EXPECT_GT(video_streams[i].max_qp, 0); - EXPECT_EQ(expected_streams[i].max_qp, video_streams[i].max_qp); + EXPECT_EQ(video_streams[i].max_qp, kDefaultVideoMaxQpVpx); EXPECT_EQ(num_configured_streams > 1 || conference_mode, expected_streams[i].num_temporal_layers.has_value()); @@ -9781,12 +9902,11 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { return streams[streams.size() - 1]; } - webrtc::test::ScopedKeyValueConfig field_trials_; - webrtc::RtcEventLogNull event_log_; + ScopedKeyValueConfig field_trials_; FakeCall fake_call_; - cricket::FakeWebRtcVideoEncoderFactory* encoder_factory_; - cricket::FakeWebRtcVideoDecoderFactory* decoder_factory_; - std::unique_ptr + FakeWebRtcVideoEncoderFactory* encoder_factory_; + FakeWebRtcVideoDecoderFactory* decoder_factory_; + std::unique_ptr mock_rate_allocator_factory_; WebRtcVideoEngine engine_; std::unique_ptr send_channel_; @@ -9795,33 +9915,33 @@ class WebRtcVideoChannelSimulcastTest : public ::testing::Test { }; TEST_F(WebRtcVideoChannelSimulcastTest, SetSendCodecsWith2SimulcastStreams) { - VerifySimulcastSettings(cricket::CreateVideoCodec("VP8"), 640, 360, 2, 2, + VerifySimulcastSettings(webrtc::CreateVideoCodec("VP8"), 640, 360, 2, 2, false, true); } TEST_F(WebRtcVideoChannelSimulcastTest, SetSendCodecsWith3SimulcastStreams) { - VerifySimulcastSettings(cricket::CreateVideoCodec("VP8"), 1280, 720, 3, 3, + VerifySimulcastSettings(webrtc::CreateVideoCodec("VP8"), 1280, 720, 3, 3, false, true); } // Test that we normalize send codec format size in simulcast. TEST_F(WebRtcVideoChannelSimulcastTest, SetSendCodecsWithOddSizeInSimulcast) { - VerifySimulcastSettings(cricket::CreateVideoCodec("VP8"), 541, 271, 2, 2, + VerifySimulcastSettings(webrtc::CreateVideoCodec("VP8"), 541, 271, 2, 2, false, true); } TEST_F(WebRtcVideoChannelSimulcastTest, SetSendCodecsForScreenshare) { - VerifySimulcastSettings(cricket::CreateVideoCodec("VP8"), 1280, 720, 3, 3, + VerifySimulcastSettings(webrtc::CreateVideoCodec("VP8"), 1280, 720, 3, 3, true, false); } TEST_F(WebRtcVideoChannelSimulcastTest, SetSendCodecsForSimulcastScreenshare) { - VerifySimulcastSettings(cricket::CreateVideoCodec("VP8"), 1280, 720, 3, 2, + VerifySimulcastSettings(webrtc::CreateVideoCodec("VP8"), 1280, 720, 3, 2, true, true); } TEST_F(WebRtcVideoChannelSimulcastTest, SimulcastScreenshareWithoutConference) { - VerifySimulcastSettings(cricket::CreateVideoCodec("VP8"), 1280, 720, 3, 3, + VerifySimulcastSettings(webrtc::CreateVideoCodec("VP8"), 1280, 720, 3, 3, true, false); } @@ -9841,9 +9961,9 @@ TEST_F(WebRtcVideoChannelBaseTest, GetSources) { EXPECT_THAT(receive_channel_->GetSources(kSsrc), SizeIs(1)); EXPECT_THAT(receive_channel_->GetSources(kSsrc + 1), IsEmpty()); - webrtc::RtpSource source = receive_channel_->GetSources(kSsrc)[0]; + RtpSource source = receive_channel_->GetSources(kSsrc)[0]; EXPECT_EQ(source.source_id(), kSsrc); - EXPECT_EQ(source.source_type(), webrtc::RtpSourceType::SSRC); + EXPECT_EQ(source.source_type(), RtpSourceType::SSRC); int64_t rtp_timestamp_1 = source.rtp_timestamp(); Timestamp timestamp_1 = source.timestamp(); @@ -9857,7 +9977,7 @@ TEST_F(WebRtcVideoChannelBaseTest, GetSources) { source = receive_channel_->GetSources(kSsrc)[0]; EXPECT_EQ(source.source_id(), kSsrc); - EXPECT_EQ(source.source_type(), webrtc::RtpSourceType::SSRC); + EXPECT_EQ(source.source_type(), RtpSourceType::SSRC); int64_t rtp_timestamp_2 = source.rtp_timestamp(); Timestamp timestamp_2 = source.timestamp(); @@ -9866,12 +9986,12 @@ TEST_F(WebRtcVideoChannelBaseTest, GetSources) { } TEST_F(WebRtcVideoChannelTest, SetsRidsOnSendStream) { - StreamParams sp = CreateSimStreamParams("cname", {123, 456, 789}); + StreamParams sp = webrtc::CreateSimStreamParams("cname", {123, 456, 789}); std::vector rids = {"f", "h", "q"}; - std::vector rid_descriptions; + std::vector rid_descriptions; for (const auto& rid : rids) { - rid_descriptions.emplace_back(rid, cricket::RidDirection::kSend); + rid_descriptions.emplace_back(rid, RidDirection::kSend); } sp.set_rids(rid_descriptions); @@ -9885,21 +10005,21 @@ TEST_F(WebRtcVideoChannelTest, SetsRidsOnSendStream) { } TEST_F(WebRtcVideoChannelBaseTest, EncoderSelectorSwitchCodec) { - VideoCodec vp9 = GetEngineCodec("VP9"); + Codec vp9 = GetEngineCodec("VP9"); - cricket::VideoSenderParameters parameters; + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); parameters.codecs.push_back(vp9); EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); send_channel_->SetSend(true); - absl::optional codec = send_channel_->GetSendCodec(); + std::optional codec = send_channel_->GetSendCodec(); ASSERT_TRUE(codec); EXPECT_EQ("VP8", codec->name); - webrtc::MockEncoderSelector encoder_selector; + MockEncoderSelector encoder_selector; EXPECT_CALL(encoder_selector, OnAvailableBitrate) - .WillRepeatedly(Return(webrtc::SdpVideoFormat("VP9"))); + .WillRepeatedly(Return(SdpVideoFormat::VP9Profile0())); send_channel_->SetEncoderSelector(kSsrc, &encoder_selector); time_controller_.AdvanceTime(kFrameDuration); @@ -9912,151 +10032,167 @@ TEST_F(WebRtcVideoChannelBaseTest, EncoderSelectorSwitchCodec) { send_channel_->SetEncoderSelector(kSsrc, nullptr); } -TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecast) { - cricket::VideoSenderParameters parameters; +TEST_F(WebRtcVideoChannelTest, ScaleResolutionDownToSinglecast) { + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 30); EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); - { // TEST requested_resolution < frame size - webrtc::RtpParameters rtp_parameters = + { // TEST scale_resolution_down_to < frame size + RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].requested_resolution = {.width = 640, - .height = 360}; + rtp_parameters.encodings[0].scale_resolution_down_to = {.width = 640, + .height = 360}; send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); auto streams = stream->GetVideoStreams(); ASSERT_EQ(streams.size(), 1u); - EXPECT_EQ(rtc::checked_cast(640), streams[0].width); - EXPECT_EQ(rtc::checked_cast(360), streams[0].height); + EXPECT_EQ(checked_cast(640), streams[0].width); + EXPECT_EQ(checked_cast(360), streams[0].height); } - { // TEST requested_resolution == frame size + { // TEST scale_resolution_down_to == frame size auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].requested_resolution = {.width = 1280, - .height = 720}; + rtp_parameters.encodings[0].scale_resolution_down_to = {.width = 1280, + .height = 720}; send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); auto streams = stream->GetVideoStreams(); ASSERT_EQ(streams.size(), 1u); - EXPECT_EQ(rtc::checked_cast(1280), streams[0].width); - EXPECT_EQ(rtc::checked_cast(720), streams[0].height); + EXPECT_EQ(checked_cast(1280), streams[0].width); + EXPECT_EQ(checked_cast(720), streams[0].height); } - { // TEST requested_resolution > frame size + { // TEST scale_resolution_down_to > frame size auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].requested_resolution = {.width = 2 * 1280, - .height = 2 * 720}; + rtp_parameters.encodings[0].scale_resolution_down_to = {.width = 2 * 1280, + .height = 2 * 720}; send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); auto streams = stream->GetVideoStreams(); ASSERT_EQ(streams.size(), 1u); - EXPECT_EQ(rtc::checked_cast(1280), streams[0].width); - EXPECT_EQ(rtc::checked_cast(720), streams[0].height); + EXPECT_EQ(checked_cast(1280), streams[0].width); + EXPECT_EQ(checked_cast(720), streams[0].height); } EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } -TEST_F(WebRtcVideoChannelTest, RequestedResolutionSinglecastCropping) { - cricket::VideoSenderParameters parameters; +TEST_F(WebRtcVideoChannelTest, ScaleResolutionDownToSinglecastScaling) { + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* stream = AddSendStream(); - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 30); EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); { auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].requested_resolution = {.width = 720, - .height = 720}; + rtp_parameters.encodings[0].scale_resolution_down_to = {.width = 720, + .height = 720}; send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); auto streams = stream->GetVideoStreams(); ASSERT_EQ(streams.size(), 1u); - EXPECT_EQ(rtc::checked_cast(720), streams[0].width); - EXPECT_EQ(rtc::checked_cast(720), streams[0].height); + // The scaling factor is 720/1280 because of orientation, + // scaling the height (720) by this value gets you 405p. + EXPECT_EQ(checked_cast(720), streams[0].width); + EXPECT_EQ(checked_cast(405), streams[0].height); } { auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].requested_resolution = {.width = 1280, - .height = 1280}; + rtp_parameters.encodings[0].scale_resolution_down_to = {.width = 1280, + .height = 1280}; send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); auto streams = stream->GetVideoStreams(); ASSERT_EQ(streams.size(), 1u); - EXPECT_EQ(rtc::checked_cast(720), streams[0].width); - EXPECT_EQ(rtc::checked_cast(720), streams[0].height); + // No downscale needed to fit 1280x1280. + EXPECT_EQ(checked_cast(1280), streams[0].width); + EXPECT_EQ(checked_cast(720), streams[0].height); } { auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(1UL, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].requested_resolution = {.width = 650, - .height = 650}; + rtp_parameters.encodings[0].scale_resolution_down_to = {.width = 650, + .height = 650}; send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); auto streams = stream->GetVideoStreams(); ASSERT_EQ(streams.size(), 1u); - EXPECT_EQ(rtc::checked_cast(480), streams[0].width); - EXPECT_EQ(rtc::checked_cast(480), streams[0].height); + // The scaling factor is 650/1280 because of orientation, + // scaling the height (720) by this value gets you 365.625 which is rounded. + EXPECT_EQ(checked_cast(650), streams[0].width); + EXPECT_EQ(checked_cast(366), streams[0].height); + } + + { + auto rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); + EXPECT_EQ(1UL, rtp_parameters.encodings.size()); + rtp_parameters.encodings[0].scale_resolution_down_to = {.width = 2560, + .height = 1440}; + send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + + auto streams = stream->GetVideoStreams(); + ASSERT_EQ(streams.size(), 1u); + // We don't upscale. + EXPECT_EQ(checked_cast(1280), streams[0].width); + EXPECT_EQ(checked_cast(720), streams[0].height); } EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } -TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { - cricket::VideoSenderParameters parameters; +TEST_F(WebRtcVideoChannelTest, ScaleResolutionDownToSimulcast) { + VideoSenderParameters parameters; parameters.codecs.push_back(GetEngineCodec("VP8")); ASSERT_TRUE(send_channel_->SetSenderParameters(parameters)); FakeVideoSendStream* stream = SetUpSimulcast(true, /*with_rtx=*/false); - webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FrameForwarder frame_forwarder; + FakeFrameSource frame_source(1280, 720, webrtc::kNumMicrosecsPerSec / 30); EXPECT_TRUE( send_channel_->SetVideoSend(last_ssrc_, nullptr, &frame_forwarder)); { - webrtc::RtpParameters rtp_parameters = + RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(3UL, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].requested_resolution = {.width = 320, - .height = 180}; - rtp_parameters.encodings[1].requested_resolution = {.width = 640, - .height = 360}; - rtp_parameters.encodings[2].requested_resolution = {.width = 1280, - .height = 720}; + rtp_parameters.encodings[0].scale_resolution_down_to = {.width = 320, + .height = 180}; + rtp_parameters.encodings[1].scale_resolution_down_to = {.width = 640, + .height = 360}; + rtp_parameters.encodings[2].scale_resolution_down_to = {.width = 1280, + .height = 720}; send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); EXPECT_EQ(GetStreamResolutions(stream->GetVideoStreams()), - (std::vector{ + (std::vector{ {.width = 320, .height = 180}, {.width = 640, .height = 360}, {.width = 1280, .height = 720}, @@ -10064,43 +10200,43 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { } { - webrtc::RtpParameters rtp_parameters = + RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(3UL, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].requested_resolution = {.width = 320, - .height = 180}; + rtp_parameters.encodings[0].scale_resolution_down_to = {.width = 320, + .height = 180}; rtp_parameters.encodings[1].active = false; - rtp_parameters.encodings[2].requested_resolution = {.width = 1280, - .height = 720}; + rtp_parameters.encodings[2].scale_resolution_down_to = {.width = 1280, + .height = 720}; send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); EXPECT_EQ(GetStreamResolutions(stream->GetVideoStreams()), - (std::vector{ + (std::vector{ {.width = 320, .height = 180}, {.width = 1280, .height = 720}, })); } { - webrtc::RtpParameters rtp_parameters = + RtpParameters rtp_parameters = send_channel_->GetRtpSendParameters(last_ssrc_); EXPECT_EQ(3UL, rtp_parameters.encodings.size()); - rtp_parameters.encodings[0].requested_resolution = {.width = 320, - .height = 180}; + rtp_parameters.encodings[0].scale_resolution_down_to = {.width = 320, + .height = 180}; rtp_parameters.encodings[1].active = true; - rtp_parameters.encodings[1].requested_resolution = {.width = 640, - .height = 360}; - rtp_parameters.encodings[2].requested_resolution = {.width = 960, - .height = 540}; + rtp_parameters.encodings[1].scale_resolution_down_to = {.width = 640, + .height = 360}; + rtp_parameters.encodings[2].scale_resolution_down_to = {.width = 960, + .height = 540}; send_channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); EXPECT_EQ(GetStreamResolutions(stream->GetVideoStreams()), - (std::vector{ + (std::vector{ {.width = 320, .height = 180}, {.width = 640, .height = 360}, {.width = 960, .height = 540}, @@ -10110,4 +10246,5 @@ TEST_F(WebRtcVideoChannelTest, RequestedResolutionSimulcast) { EXPECT_TRUE(send_channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } -} // namespace cricket +} // namespace +} // namespace webrtc diff --git a/media/engine/webrtc_voice_engine.cc b/media/engine/webrtc_voice_engine.cc index f9b6adb864..cb48226931 100644 --- a/media/engine/webrtc_voice_engine.cc +++ b/media/engine/webrtc_voice_engine.cc @@ -12,87 +12,109 @@ #include #include +#include #include #include #include #include +#include #include +#include +#include #include -#include #include #include #include "absl/algorithm/algorithm.h" #include "absl/algorithm/container.h" +#include "absl/functional/any_invocable.h" #include "absl/functional/bind_front.h" #include "absl/strings/match.h" -#include "absl/types/optional.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" #include "api/audio/audio_frame.h" #include "api/audio/audio_frame_processor.h" +#include "api/audio/audio_mixer.h" +#include "api/audio/audio_processing.h" +#include "api/audio/audio_processing_statistics.h" #include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/audio_codecs/audio_format.h" +#include "api/audio_options.h" #include "api/call/audio_sink.h" +#include "api/crypto/crypto_options.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/environment/environment.h" #include "api/field_trials_view.h" +#include "api/frame_transformer_interface.h" #include "api/make_ref_counted.h" #include "api/media_types.h" #include "api/priority.h" +#include "api/rtc_error.h" #include "api/rtp_headers.h" #include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_direction.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/bitrate_settings.h" +#include "api/transport/rtp/rtp_source.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "call/audio_receive_stream.h" +#include "call/audio_send_stream.h" +#include "call/audio_state.h" +#include "call/call.h" #include "call/packet_receiver.h" +#include "call/payload_type_picker.h" #include "call/rtp_config.h" #include "call/rtp_transport_controller_send_interface.h" #include "media/base/audio_source.h" #include "media/base/codec.h" +#include "media/base/media_channel.h" +#include "media/base/media_channel_impl.h" +#include "media/base/media_config.h" #include "media/base/media_constants.h" +#include "media/base/media_engine.h" #include "media/base/stream_params.h" #include "media/engine/adm_helpers.h" -#include "media/engine/payload_type_mapper.h" #include "media/engine/webrtc_media_engine.h" #include "modules/async_audio_processing/async_audio_processing.h" #include "modules/audio_mixer/audio_mixer_impl.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "modules/audio_processing/include/audio_processing_statistics.h" -#include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "modules/rtp_rtcp/source/rtp_util.h" #include "rtc_base/checks.h" #include "rtc_base/dscp.h" #include "rtc_base/experiments/struct_parameters_parser.h" -#include "rtc_base/ignore_wundef.h" #include "rtc_base/logging.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" #include "rtc_base/race_checker.h" #include "rtc_base/string_encode.h" -#include "rtc_base/strings/audio_format_to_string.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/strings/string_format.h" +#include "rtc_base/system/file_wrapper.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/metrics.h" #if WEBRTC_ENABLE_PROTOBUF -RTC_PUSH_IGNORING_WUNDEF() #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/modules/audio_coding/audio_network_adaptor/config.pb.h" #else #include "modules/audio_coding/audio_network_adaptor/config.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() + #endif -namespace cricket { +namespace webrtc { namespace { -using ::webrtc::ParseRtpSsrc; - constexpr size_t kMaxUnsignaledRecvStreams = 4; constexpr int kNackRtpHistoryMs = 5000; @@ -103,7 +125,7 @@ const int kMaxTelephoneEventCode = 255; const int kMinPayloadType = 0; const int kMaxPayloadType = 127; -class ProxySink : public webrtc::AudioSinkInterface { +class ProxySink : public AudioSinkInterface { public: explicit ProxySink(AudioSinkInterface* sink) : sink_(sink) { RTC_DCHECK(sink); @@ -112,7 +134,7 @@ class ProxySink : public webrtc::AudioSinkInterface { void OnData(const Data& audio) override { sink_->OnData(audio); } private: - webrtc::AudioSinkInterface* sink_; + AudioSinkInterface* sink_; }; bool ValidateStreamParams(const StreamParams& sp) { @@ -129,8 +151,8 @@ bool ValidateStreamParams(const StreamParams& sp) { } // Dumps an AudioCodec in RFC 2327-ish format. -std::string ToString(const AudioCodec& codec) { - rtc::StringBuilder ss; +std::string ToString(const Codec& codec) { + StringBuilder ss; ss << codec.name << "/" << codec.clockrate << "/" << codec.channels; if (!codec.params.empty()) { ss << " {"; @@ -143,34 +165,32 @@ std::string ToString(const AudioCodec& codec) { return ss.Release(); } -bool IsCodec(const AudioCodec& codec, const char* ref_name) { +bool IsCodec(const Codec& codec, const char* ref_name) { return absl::EqualsIgnoreCase(codec.name, ref_name); } -absl::optional FindCodec( - const std::vector& codecs, - const AudioCodec& codec, - const webrtc::FieldTrialsView* field_trials) { - for (const AudioCodec& c : codecs) { - if (c.Matches(codec, field_trials)) { +std::optional FindCodec(const std::vector& codecs, + const Codec& codec) { + for (const webrtc::Codec& c : codecs) { + if (c.Matches(codec)) { return c; } } - return absl::nullopt; + return std::nullopt; } -bool VerifyUniquePayloadTypes(const std::vector& codecs) { +bool VerifyUniquePayloadTypes(const std::vector& codecs) { if (codecs.empty()) { return true; } std::vector payload_types; absl::c_transform(codecs, std::back_inserter(payload_types), - [](const AudioCodec& codec) { return codec.id; }); + [](const webrtc::Codec& codec) { return codec.id; }); absl::c_sort(payload_types); return absl::c_adjacent_find(payload_types) == payload_types.end(); } -absl::optional GetAudioNetworkAdaptorConfig( +std::optional GetAudioNetworkAdaptorConfig( const AudioOptions& options) { if (options.audio_network_adaptor && *options.audio_network_adaptor && options.audio_network_adaptor_config) { @@ -178,7 +198,7 @@ absl::optional GetAudioNetworkAdaptorConfig( // equals true and `options_.audio_network_adaptor_config` has a value. return options.audio_network_adaptor_config; } - return absl::nullopt; + return std::nullopt; } // Returns its smallest positive argument. If neither argument is positive, @@ -195,9 +215,9 @@ int MinPositive(int a, int b) { // `max_send_bitrate_bps` is the bitrate from "b=" in SDP. // `rtp_max_bitrate_bps` is the bitrate from RtpSender::SetParameters. -absl::optional ComputeSendBitrate(int max_send_bitrate_bps, - absl::optional rtp_max_bitrate_bps, - const webrtc::AudioCodecSpec& spec) { +std::optional ComputeSendBitrate(int max_send_bitrate_bps, + std::optional rtp_max_bitrate_bps, + const AudioCodecSpec& spec) { // If application-configured bitrate is set, take minimum of that and SDP // bitrate. const int bps = rtp_max_bitrate_bps @@ -216,7 +236,7 @@ absl::optional ComputeSendBitrate(int max_send_bitrate_bps, << " bps" ", requires at least " << spec.info.min_bitrate_bps << " bps."; - return absl::nullopt; + return std::nullopt; } if (spec.info.HasFixedBitrate()) { @@ -227,32 +247,28 @@ absl::optional ComputeSendBitrate(int max_send_bitrate_bps, } } -bool IsEnabled(const webrtc::FieldTrialsView& config, absl::string_view trial) { - return absl::StartsWith(config.Lookup(trial), "Enabled"); -} - struct AdaptivePtimeConfig { bool enabled = false; - webrtc::DataRate min_payload_bitrate = webrtc::DataRate::KilobitsPerSec(16); + DataRate min_payload_bitrate = DataRate::KilobitsPerSec(16); // Value is chosen to ensure FEC can be encoded, see LBRR_WB_MIN_RATE_BPS in // libopus. - webrtc::DataRate min_encoder_bitrate = webrtc::DataRate::KilobitsPerSec(16); + DataRate min_encoder_bitrate = DataRate::KilobitsPerSec(16); bool use_slow_adaptation = true; - absl::optional audio_network_adaptor_config; + std::optional audio_network_adaptor_config; - std::unique_ptr Parser() { - return webrtc::StructParametersParser::Create( // + std::unique_ptr Parser() { + return StructParametersParser::Create( // "enabled", &enabled, // "min_payload_bitrate", &min_payload_bitrate, // "min_encoder_bitrate", &min_encoder_bitrate, // "use_slow_adaptation", &use_slow_adaptation); } - explicit AdaptivePtimeConfig(const webrtc::FieldTrialsView& trials) { + explicit AdaptivePtimeConfig(const FieldTrialsView& trials) { Parser()->Parse(trials.Lookup("WebRTC-Audio-AdaptivePtime")); #if WEBRTC_ENABLE_PROTOBUF - webrtc::audio_network_adaptor::config::ControllerManager config; + audio_network_adaptor::config::ControllerManager config; auto* frame_length_controller = config.add_controllers()->mutable_frame_length_controller_v2(); frame_length_controller->set_min_payload_bitrate_bps( @@ -266,27 +282,29 @@ struct AdaptivePtimeConfig { // TODO(tommi): Constructing a receive stream could be made simpler. // Move some of this boiler plate code into the config structs themselves. -webrtc::AudioReceiveStreamInterface::Config BuildReceiveStreamConfig( +AudioReceiveStreamInterface::Config BuildReceiveStreamConfig( uint32_t remote_ssrc, uint32_t local_ssrc, bool use_nack, bool enable_non_sender_rtt, + RtcpMode rtcp_mode, const std::vector& stream_ids, - const std::vector& extensions, - webrtc::Transport* rtcp_send_transport, - const rtc::scoped_refptr& decoder_factory, - const std::map& decoder_map, - absl::optional codec_pair_id, + const std::vector& /* extensions */, + Transport* rtcp_send_transport, + const scoped_refptr& decoder_factory, + const std::map& decoder_map, + std::optional codec_pair_id, size_t jitter_buffer_max_packets, bool jitter_buffer_fast_accelerate, int jitter_buffer_min_delay_ms, - rtc::scoped_refptr frame_decryptor, - const webrtc::CryptoOptions& crypto_options, - rtc::scoped_refptr frame_transformer) { - webrtc::AudioReceiveStreamInterface::Config config; + scoped_refptr frame_decryptor, + const CryptoOptions& crypto_options, + scoped_refptr frame_transformer) { + AudioReceiveStreamInterface::Config config; config.rtp.remote_ssrc = remote_ssrc; config.rtp.local_ssrc = local_ssrc; config.rtp.nack.rtp_history_ms = use_nack ? kNackRtpHistoryMs : 0; + config.rtp.rtcp_mode = rtcp_mode; if (!stream_ids.empty()) { config.sync_group = stream_ids[0]; } @@ -306,8 +324,8 @@ webrtc::AudioReceiveStreamInterface::Config BuildReceiveStreamConfig( // Utility function to check if RED codec and its parameters match a codec spec. bool CheckRedParameters( - const AudioCodec& red_codec, - const webrtc::AudioSendStream::Config::SendCodecSpec& send_codec_spec) { + const Codec& red_codec, + const AudioSendStream::Config::SendCodecSpec& send_codec_spec) { if (red_codec.clockrate != send_codec_spec.format.clockrate_hz || red_codec.channels != send_codec_spec.format.num_channels) { return false; @@ -315,53 +333,147 @@ bool CheckRedParameters( // Check the FMTP line for the empty parameter which should match // /[/...] - auto red_parameters = red_codec.params.find(""); + auto red_parameters = red_codec.params.find(kCodecParamNotInNameValueFormat); if (red_parameters == red_codec.params.end()) { RTC_LOG(LS_WARNING) << "audio/RED missing fmtp parameters."; return false; } std::vector redundant_payloads = - rtc::split(red_parameters->second, '/'); + webrtc::split(red_parameters->second, '/'); // 32 is chosen as a maximum upper bound for consistency with the // red payload splitter. if (redundant_payloads.size() < 2 || redundant_payloads.size() > 32) { return false; } for (auto pt : redundant_payloads) { - if (pt != rtc::ToString(send_codec_spec.payload_type)) { + if (pt != absl::StrCat(send_codec_spec.payload_type)) { return false; } } return true; } +SdpAudioFormat AudioCodecToSdpAudioFormat(const Codec& ac) { + return SdpAudioFormat(ac.name, ac.clockrate, ac.channels, ac.params); +} + +// Assign the payload types for the codecs of this voice engine. +// This is a "preliminary" pass, done to prime the +// payload type picker with a normal set of PTs. +// TODO: https://issues.webrtc.org/360058654 - remove. +std::vector LegacyCollectCodecs(const std::vector& specs, + bool allocate_pt) { + // Only used for the legacy "allocate_pt = true" case. + PayloadTypePicker pt_mapper; + std::vector out; + + // Only generate CN payload types for these clockrates: + std::map> generate_cn = {{8000, false}}; + // Only generate telephone-event payload types for these clockrates: + std::map> generate_dtmf = {{8000, false}, + {48000, false}}; + + for (const auto& spec : specs) { + Codec codec = webrtc::CreateAudioCodec(spec.format); + if (allocate_pt) { + auto pt_or_error = pt_mapper.SuggestMapping(codec, nullptr); + // We need to do some extra stuff before adding the main codecs to out. + if (!pt_or_error.ok()) { + continue; + } + codec.id = pt_or_error.value(); + } + if (spec.info.supports_network_adaption) { + codec.AddFeedbackParam( + FeedbackParam(kRtcpFbParamTransportCc, kParamValueEmpty)); + } + + if (spec.info.allow_comfort_noise) { + // Generate a CN entry if the decoder allows it and we support the + // clockrate. + auto cn = generate_cn.find(spec.format.clockrate_hz); + if (cn != generate_cn.end()) { + cn->second = true; + } + } + + // Generate a telephone-event entry if we support the clockrate. + auto dtmf = generate_dtmf.find(spec.format.clockrate_hz); + if (dtmf != generate_dtmf.end()) { + dtmf->second = true; + } + + out.push_back(codec); + + // TODO(hta): Don't assign RED codecs until we know that the PT for Opus + // is final + if (codec.name == kOpusCodecName) { + if (allocate_pt) { + std::string red_fmtp = + absl::StrCat(codec.id) + "/" + absl::StrCat(codec.id); + Codec red_codec = webrtc::CreateAudioCodec( + {kRedCodecName, codec.clockrate, codec.channels, {{"", red_fmtp}}}); + red_codec.id = pt_mapper.SuggestMapping(red_codec, nullptr).value(); + out.push_back(red_codec); + } else { + // We don't know the PT to put into the RED fmtp parameter yet. + // Leave it out. + Codec red_codec = webrtc::CreateAudioCodec({kRedCodecName, 48000, 2}); + out.push_back(red_codec); + } + } + } + + // Add CN codecs after "proper" audio codecs. + for (const auto& cn : generate_cn) { + if (cn.second) { + Codec cn_codec = webrtc::CreateAudioCodec({kCnCodecName, cn.first, 1}); + if (allocate_pt) { + cn_codec.id = pt_mapper.SuggestMapping(cn_codec, nullptr).value(); + } + out.push_back(cn_codec); + } + } + + // Add telephone-event codecs last. + for (const auto& dtmf : generate_dtmf) { + if (dtmf.second) { + Codec dtmf_codec = + webrtc::CreateAudioCodec({kDtmfCodecName, dtmf.first, 1}); + if (allocate_pt) { + dtmf_codec.id = pt_mapper.SuggestMapping(dtmf_codec, nullptr).value(); + } + out.push_back(dtmf_codec); + } + } + return out; +} + } // namespace WebRtcVoiceEngine::WebRtcVoiceEngine( - webrtc::TaskQueueFactory* task_queue_factory, - webrtc::AudioDeviceModule* adm, - const rtc::scoped_refptr& encoder_factory, - const rtc::scoped_refptr& decoder_factory, - rtc::scoped_refptr audio_mixer, - rtc::scoped_refptr audio_processing, - // TODO(bugs.webrtc.org/15111): - // Remove the raw AudioFrameProcessor pointer in the follow-up. - webrtc::AudioFrameProcessor* audio_frame_processor, - std::unique_ptr owned_audio_frame_processor, - const webrtc::FieldTrialsView& trials) - : task_queue_factory_(task_queue_factory), - adm_(adm), - encoder_factory_(encoder_factory), - decoder_factory_(decoder_factory), - audio_mixer_(audio_mixer), - apm_(audio_processing), - audio_frame_processor_(audio_frame_processor), - owned_audio_frame_processor_(std::move(owned_audio_frame_processor)), + const Environment& env, + scoped_refptr adm, + scoped_refptr encoder_factory, + scoped_refptr decoder_factory, + scoped_refptr audio_mixer, + scoped_refptr audio_processing, + std::unique_ptr audio_frame_processor) + : env_(env), + adm_(std::move(adm)), + encoder_factory_(std::move(encoder_factory)), + decoder_factory_(std::move(decoder_factory)), + audio_mixer_(std::move(audio_mixer)), + apm_(std::move(audio_processing)), + audio_frame_processor_(std::move(audio_frame_processor)), minimized_remsampling_on_mobile_trial_enabled_( - IsEnabled(trials, "WebRTC-Audio-MinimizeResamplingOnMobile")) { + env_.field_trials().IsEnabled( + "WebRTC-Audio-MinimizeResamplingOnMobile")), + payload_types_in_transport_trial_enabled_( + env_.field_trials().IsEnabled("WebRTC-PayloadTypesInTransport")) { RTC_LOG(LS_INFO) << "WebRtcVoiceEngine::WebRtcVoiceEngine"; - RTC_DCHECK(decoder_factory); - RTC_DCHECK(encoder_factory); + RTC_DCHECK(decoder_factory_); + RTC_DCHECK(encoder_factory_); // The rest of our initialization will happen in Init. } @@ -385,28 +497,31 @@ void WebRtcVoiceEngine::Init() { // TaskQueue expects to be created/destroyed on the same thread. RTC_DCHECK(!low_priority_worker_queue_); - low_priority_worker_queue_.reset( - new rtc::TaskQueue(task_queue_factory_->CreateTaskQueue( - "rtc-low-prio", webrtc::TaskQueueFactory::Priority::LOW))); + low_priority_worker_queue_ = env_.task_queue_factory().CreateTaskQueue( + "rtc-low-prio", TaskQueueFactory::Priority::LOW); // Load our audio codec lists. RTC_LOG(LS_VERBOSE) << "Supported send codecs in order of preference:"; - send_codecs_ = CollectCodecs(encoder_factory_->GetSupportedEncoders()); - for (const AudioCodec& codec : send_codecs_) { + send_codecs_ = + LegacyCollectCodecs(encoder_factory_->GetSupportedEncoders(), + !payload_types_in_transport_trial_enabled_); + for (const webrtc::Codec& codec : send_codecs_) { RTC_LOG(LS_VERBOSE) << ToString(codec); } RTC_LOG(LS_VERBOSE) << "Supported recv codecs in order of preference:"; - recv_codecs_ = CollectCodecs(decoder_factory_->GetSupportedDecoders()); - for (const AudioCodec& codec : recv_codecs_) { + recv_codecs_ = + LegacyCollectCodecs(decoder_factory_->GetSupportedDecoders(), + !payload_types_in_transport_trial_enabled_); + for (const webrtc::Codec& codec : recv_codecs_) { RTC_LOG(LS_VERBOSE) << ToString(codec); } #if defined(WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE) // No ADM supplied? Create a default one. if (!adm_) { - adm_ = webrtc::AudioDeviceModule::Create( - webrtc::AudioDeviceModule::kPlatformDefaultAudio, task_queue_factory_); + adm_ = AudioDeviceModule::Create(AudioDeviceModule::kPlatformDefaultAudio, + &env_.task_queue_factory()); } #endif // WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE RTC_CHECK(adm()); @@ -414,24 +529,20 @@ void WebRtcVoiceEngine::Init() { // Set up AudioState. { - webrtc::AudioState::Config config; + AudioState::Config config; if (audio_mixer_) { config.audio_mixer = audio_mixer_; } else { - config.audio_mixer = webrtc::AudioMixerImpl::Create(); + config.audio_mixer = AudioMixerImpl::Create(); } config.audio_processing = apm_; config.audio_device_module = adm_; if (audio_frame_processor_) { config.async_audio_processing_factory = - rtc::make_ref_counted( - *audio_frame_processor_, *task_queue_factory_); - } else if (owned_audio_frame_processor_) { - config.async_audio_processing_factory = - rtc::make_ref_counted( - std::move(owned_audio_frame_processor_), *task_queue_factory_); + make_ref_counted( + std::move(audio_frame_processor_), env_.task_queue_factory()); } - audio_state_ = webrtc::AudioState::Create(config); + audio_state_ = AudioState::Create(config); } // Connect the ADM to our audio path. @@ -458,30 +569,27 @@ void WebRtcVoiceEngine::Init() { initialized_ = true; } -rtc::scoped_refptr WebRtcVoiceEngine::GetAudioState() - const { +scoped_refptr WebRtcVoiceEngine::GetAudioState() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); return audio_state_; } std::unique_ptr -WebRtcVoiceEngine::CreateSendChannel( - webrtc::Call* call, - const MediaConfig& config, - const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::AudioCodecPairId codec_pair_id) { +WebRtcVoiceEngine::CreateSendChannel(Call* call, + const MediaConfig& config, + const AudioOptions& options, + const CryptoOptions& crypto_options, + AudioCodecPairId codec_pair_id) { return std::make_unique( this, config, options, crypto_options, call, codec_pair_id); } std::unique_ptr -WebRtcVoiceEngine::CreateReceiveChannel( - webrtc::Call* call, - const MediaConfig& config, - const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::AudioCodecPairId codec_pair_id) { +WebRtcVoiceEngine::CreateReceiveChannel(Call* call, + const MediaConfig& config, + const AudioOptions& options, + const CryptoOptions& crypto_options, + AudioCodecPairId codec_pair_id) { return std::make_unique( this, config, options, crypto_options, call, codec_pair_id); } @@ -604,12 +712,12 @@ void WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { *options.audio_jitter_buffer_min_delay_ms; } - webrtc::AudioProcessing* ap = apm(); + AudioProcessing* ap = apm(); if (!ap) { return; } - webrtc::AudioProcessing::Config apm_config = ap->GetConfig(); + AudioProcessing::Config apm_config = ap->GetConfig(); if (options.echo_cancellation) { apm_config.echo_canceller.enabled = *options.echo_cancellation; @@ -636,44 +744,45 @@ void WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { const bool enabled = *options.noise_suppression; apm_config.noise_suppression.enabled = enabled; apm_config.noise_suppression.level = - webrtc::AudioProcessing::Config::NoiseSuppression::Level::kHigh; + AudioProcessing::Config::NoiseSuppression::Level::kHigh; } ap->ApplyConfig(apm_config); } -const std::vector& WebRtcVoiceEngine::send_codecs() const { +const std::vector& WebRtcVoiceEngine::LegacySendCodecs() const { RTC_DCHECK(signal_thread_checker_.IsCurrent()); return send_codecs_; } -const std::vector& WebRtcVoiceEngine::recv_codecs() const { +const std::vector& WebRtcVoiceEngine::LegacyRecvCodecs() const { RTC_DCHECK(signal_thread_checker_.IsCurrent()); return recv_codecs_; } -std::vector +std::vector WebRtcVoiceEngine::GetRtpHeaderExtensions() const { RTC_DCHECK(signal_thread_checker_.IsCurrent()); - std::vector result; + std::vector result; + // id is *not* incremented for non-default extensions, UsedIds needs to + // resolve conflicts. int id = 1; for (const auto& uri : {webrtc::RtpExtension::kAudioLevelUri, webrtc::RtpExtension::kAbsSendTimeUri, webrtc::RtpExtension::kTransportSequenceNumberUri, webrtc::RtpExtension::kMidUri}) { - result.emplace_back(uri, id++, webrtc::RtpTransceiverDirection::kSendRecv); + result.emplace_back(uri, id++, RtpTransceiverDirection::kSendRecv); } for (const auto& uri : {webrtc::RtpExtension::kAbsoluteCaptureTimeUri}) { - result.emplace_back(uri, id++, webrtc::RtpTransceiverDirection::kStopped); + result.emplace_back(uri, id, RtpTransceiverDirection::kStopped); } return result; } -bool WebRtcVoiceEngine::StartAecDump(webrtc::FileWrapper file, - int64_t max_size_bytes) { +bool WebRtcVoiceEngine::StartAecDump(FileWrapper file, int64_t max_size_bytes) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - webrtc::AudioProcessing* ap = apm(); + AudioProcessing* ap = apm(); if (!ap) { RTC_LOG(LS_WARNING) << "Attempting to start aecdump when no audio processing module is " @@ -687,7 +796,7 @@ bool WebRtcVoiceEngine::StartAecDump(webrtc::FileWrapper file, void WebRtcVoiceEngine::StopAecDump() { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - webrtc::AudioProcessing* ap = apm(); + AudioProcessing* ap = apm(); if (ap) { ap->DetachAecDump(); } else { @@ -696,107 +805,28 @@ void WebRtcVoiceEngine::StopAecDump() { } } -absl::optional +std::optional WebRtcVoiceEngine::GetAudioDeviceStats() { return adm()->GetStats(); } -webrtc::AudioDeviceModule* WebRtcVoiceEngine::adm() { +AudioDeviceModule* WebRtcVoiceEngine::adm() { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(adm_); return adm_.get(); } -webrtc::AudioProcessing* WebRtcVoiceEngine::apm() const { +AudioProcessing* WebRtcVoiceEngine::apm() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); return apm_.get(); } -webrtc::AudioState* WebRtcVoiceEngine::audio_state() { +AudioState* WebRtcVoiceEngine::audio_state() { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(audio_state_); return audio_state_.get(); } -std::vector WebRtcVoiceEngine::CollectCodecs( - const std::vector& specs) const { - PayloadTypeMapper mapper; - std::vector out; - - // Only generate CN payload types for these clockrates: - std::map> generate_cn = { - {8000, false}, {16000, false}, {32000, false}}; - // Only generate telephone-event payload types for these clockrates: - std::map> generate_dtmf = { - {8000, false}, {16000, false}, {32000, false}, {48000, false}}; - - auto map_format = [&mapper](const webrtc::SdpAudioFormat& format, - std::vector* out) { - absl::optional opt_codec = mapper.ToAudioCodec(format); - if (opt_codec) { - if (out) { - out->push_back(*opt_codec); - } - } else { - RTC_LOG(LS_ERROR) << "Unable to assign payload type to format: " - << rtc::ToString(format); - } - - return opt_codec; - }; - - for (const auto& spec : specs) { - // We need to do some extra stuff before adding the main codecs to out. - absl::optional opt_codec = map_format(spec.format, nullptr); - if (opt_codec) { - AudioCodec& codec = *opt_codec; - if (spec.info.supports_network_adaption) { - codec.AddFeedbackParam( - FeedbackParam(kRtcpFbParamTransportCc, kParamValueEmpty)); - } - - if (spec.info.allow_comfort_noise) { - // Generate a CN entry if the decoder allows it and we support the - // clockrate. - auto cn = generate_cn.find(spec.format.clockrate_hz); - if (cn != generate_cn.end()) { - cn->second = true; - } - } - - // Generate a telephone-event entry if we support the clockrate. - auto dtmf = generate_dtmf.find(spec.format.clockrate_hz); - if (dtmf != generate_dtmf.end()) { - dtmf->second = true; - } - - out.push_back(codec); - - if (codec.name == kOpusCodecName) { - std::string redFmtp = - rtc::ToString(codec.id) + "/" + rtc::ToString(codec.id); - map_format({kRedCodecName, 48000, 2, {{"", redFmtp}}}, &out); - } - } - } - - // Add CN codecs after "proper" audio codecs. - for (const auto& cn : generate_cn) { - if (cn.second) { - map_format({kCnCodecName, cn.first, 1}, &out); - } - } - - // Add telephone-event codecs last. - for (const auto& dtmf : generate_dtmf) { - if (dtmf.second) { - map_format({kDtmfCodecName, dtmf.first, 1}, &out); - } - } - - return out; -} - // --------------------------------- WebRtcVoiceSendChannel ------------------ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { @@ -806,19 +836,19 @@ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { const std::string& mid, const std::string& c_name, const std::string track_id, - const absl::optional& + const std::optional& send_codec_spec, bool extmap_allow_mixed, - const std::vector& extensions, + const std::vector& extensions, int max_send_bitrate_bps, int rtcp_report_interval_ms, - const absl::optional& audio_network_adaptor_config, - webrtc::Call* call, - webrtc::Transport* send_transport, - const rtc::scoped_refptr& encoder_factory, - const absl::optional codec_pair_id, - rtc::scoped_refptr frame_encryptor, - const webrtc::CryptoOptions& crypto_options) + const std::optional& audio_network_adaptor_config, + Call* call, + Transport* send_transport, + const scoped_refptr& encoder_factory, + const std::optional codec_pair_id, + scoped_refptr frame_encryptor, + const CryptoOptions& crypto_options) : adaptive_ptime_config_(call->trials()), call_(call), config_(send_transport), @@ -832,7 +862,7 @@ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { config_.rtp.extmap_allow_mixed = extmap_allow_mixed; config_.rtp.extensions = extensions; config_.has_dscp = - rtp_parameters_.encodings[0].network_priority != webrtc::Priority::kLow; + rtp_parameters_.encodings[0].network_priority != Priority::kLow; config_.encoder_factory = encoder_factory; config_.codec_pair_id = codec_pair_id; config_.track_id = track_id; @@ -864,12 +894,12 @@ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { } void SetSendCodecSpec( - const webrtc::AudioSendStream::Config::SendCodecSpec& send_codec_spec) { + const AudioSendStream::Config::SendCodecSpec& send_codec_spec) { UpdateSendCodecSpec(send_codec_spec); ReconfigureAudioSendStream(nullptr); } - void SetRtpExtensions(const std::vector& extensions) { + void SetRtpExtensions(const std::vector& extensions) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.rtp.extensions = extensions; rtp_parameters_.header_extensions = extensions; @@ -890,15 +920,26 @@ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { ReconfigureAudioSendStream(nullptr); } + void SetRtcpMode(RtcpMode mode) { + bool reduced_size = mode == RtcpMode::kReducedSize; + if (rtp_parameters_.rtcp.reduced_size == reduced_size) { + return; + } + rtp_parameters_.rtcp.reduced_size = reduced_size; + // Note: this is not wired up beyond this point. For all audio + // RTCP packets sent by a sender there is no difference. + ReconfigureAudioSendStream(nullptr); + } + void SetFrameEncryptor( - rtc::scoped_refptr frame_encryptor) { + scoped_refptr frame_encryptor) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.frame_encryptor = frame_encryptor; ReconfigureAudioSendStream(nullptr); } void SetAudioNetworkAdaptorConfig( - const absl::optional& audio_network_adaptor_config) { + const std::optional& audio_network_adaptor_config) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); if (audio_network_adaptor_config_from_options_ == audio_network_adaptor_config) { @@ -958,7 +999,7 @@ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { return muted_; } - webrtc::AudioSendStream::Stats GetStats(bool has_remote_tracks) const { + AudioSendStream::Stats GetStats(bool has_remote_tracks) const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); return stream_->GetStats(has_remote_tracks); @@ -999,13 +1040,13 @@ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { int sample_rate, size_t number_of_channels, size_t number_of_frames, - absl::optional absolute_capture_timestamp_ms) override { + std::optional absolute_capture_timestamp_ms) override { TRACE_EVENT_BEGIN2("webrtc", "WebRtcAudioSendStream::OnData", "sample_rate", sample_rate, "number_of_frames", number_of_frames); RTC_DCHECK_EQ(16, bits_per_sample); RTC_CHECK_RUNS_SERIALIZED(&audio_capture_race_checker_); RTC_DCHECK(stream_); - std::unique_ptr audio_frame(new webrtc::AudioFrame()); + std::unique_ptr audio_frame(new AudioFrame()); audio_frame->UpdateFrame( audio_frame->timestamp_, static_cast(audio_data), number_of_frames, sample_rate, audio_frame->speech_type_, @@ -1031,38 +1072,36 @@ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { UpdateSendState(); } - const webrtc::RtpParameters& rtp_parameters() const { - return rtp_parameters_; - } + const RtpParameters& rtp_parameters() const { return rtp_parameters_; } - webrtc::RTCError SetRtpParameters(const webrtc::RtpParameters& parameters, - webrtc::SetParametersCallback callback) { - webrtc::RTCError error = CheckRtpParametersInvalidModificationAndValues( - rtp_parameters_, parameters); + RTCError SetRtpParameters(const RtpParameters& parameters, + SetParametersCallback callback) { + RTCError error = CheckRtpParametersInvalidModificationAndValues( + rtp_parameters_, parameters, call_->trials()); if (!error.ok()) { return webrtc::InvokeSetParametersCallback(callback, error); } - absl::optional send_rate; + std::optional send_rate; if (audio_codec_spec_) { send_rate = ComputeSendBitrate(max_send_bitrate_bps_, parameters.encodings[0].max_bitrate_bps, *audio_codec_spec_); if (!send_rate) { return webrtc::InvokeSetParametersCallback( - callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); + callback, RTCError(RTCErrorType::INTERNAL_ERROR)); } } - const absl::optional old_rtp_max_bitrate = + const std::optional old_rtp_max_bitrate = rtp_parameters_.encodings[0].max_bitrate_bps; double old_priority = rtp_parameters_.encodings[0].bitrate_priority; - webrtc::Priority old_dscp = rtp_parameters_.encodings[0].network_priority; + Priority old_dscp = rtp_parameters_.encodings[0].network_priority; bool old_adaptive_ptime = rtp_parameters_.encodings[0].adaptive_ptime; rtp_parameters_ = parameters; config_.bitrate_priority = rtp_parameters_.encodings[0].bitrate_priority; - config_.has_dscp = (rtp_parameters_.encodings[0].network_priority != - webrtc::Priority::kLow); + config_.has_dscp = + (rtp_parameters_.encodings[0].network_priority != Priority::kLow); bool reconfigure_send_stream = (rtp_parameters_.encodings[0].max_bitrate_bps != old_rtp_max_bitrate) || @@ -1082,19 +1121,20 @@ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { UpdateAllowedBitrateRange(); ReconfigureAudioSendStream(std::move(callback)); } else { - webrtc::InvokeSetParametersCallback(callback, webrtc::RTCError::OK()); + webrtc::InvokeSetParametersCallback(callback, RTCError::OK()); } rtp_parameters_.rtcp.cname = config_.rtp.c_name; - rtp_parameters_.rtcp.reduced_size = false; + rtp_parameters_.rtcp.reduced_size = + config_.rtp.rtcp_mode == RtcpMode::kReducedSize; // parameters.encodings[0].active could have changed. UpdateSendState(); - return webrtc::RTCError::OK(); + return RTCError::OK(); } void SetEncoderToPacketizerFrameTransformer( - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.frame_transformer = std::move(frame_transformer); ReconfigureAudioSendStream(nullptr); @@ -1105,9 +1145,10 @@ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); RTC_DCHECK_EQ(1UL, rtp_parameters_.encodings.size()); - if (send_ && source_ != nullptr && rtp_parameters_.encodings[0].active) { + // Stream can be started without |source_| being set. + if (send_ && rtp_parameters_.encodings[0].active) { stream_->Start(); - } else { // !send || source_ = nullptr + } else { stream_->Stop(); } } @@ -1136,7 +1177,7 @@ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { } void UpdateSendCodecSpec( - const webrtc::AudioSendStream::Config::SendCodecSpec& send_codec_spec) { + const AudioSendStream::Config::SendCodecSpec& send_codec_spec) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); config_.send_codec_spec = send_codec_spec; auto info = @@ -1150,8 +1191,7 @@ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { std::min(info->max_bitrate_bps, *send_codec_spec.target_bitrate_bps)); } - audio_codec_spec_.emplace( - webrtc::AudioCodecSpec{send_codec_spec.format, *info}); + audio_codec_spec_.emplace(AudioCodecSpec{send_codec_spec.format, *info}); config_.send_codec_spec->target_bitrate_bps = ComputeSendBitrate( max_send_bitrate_bps_, rtp_parameters_.encodings[0].max_bitrate_bps, @@ -1179,7 +1219,7 @@ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { audio_network_adaptor_config_from_options_; } - void ReconfigureAudioSendStream(webrtc::SetParametersCallback callback) { + void ReconfigureAudioSendStream(SetParametersCallback callback) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(stream_); stream_->Reconfigure(config_, std::move(callback)); @@ -1188,13 +1228,13 @@ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { int NumPreferredChannels() const override { return num_encoded_channels_; } const AdaptivePtimeConfig adaptive_ptime_config_; - webrtc::SequenceChecker worker_thread_checker_; - rtc::RaceChecker audio_capture_race_checker_; - webrtc::Call* call_ = nullptr; - webrtc::AudioSendStream::Config config_; + SequenceChecker worker_thread_checker_; + RaceChecker audio_capture_race_checker_; + Call* call_ = nullptr; + AudioSendStream::Config config_; // The stream is owned by WebRtcAudioSendStream and may be reallocated if // configuration changes. - webrtc::AudioSendStream* stream_ = nullptr; + AudioSendStream* stream_ = nullptr; // Raw pointer to AudioSource owned by LocalAudioTrackHandler. // PeerConnection will make sure invalidating the pointer before the object @@ -1203,11 +1243,11 @@ class WebRtcVoiceSendChannel::WebRtcAudioSendStream : public AudioSource::Sink { bool send_ = false; bool muted_ = false; int max_send_bitrate_bps_; - webrtc::RtpParameters rtp_parameters_; - absl::optional audio_codec_spec_; + RtpParameters rtp_parameters_; + std::optional audio_codec_spec_; // TODO(webrtc:11717): Remove this once audio_network_adaptor in AudioOptions // has been removed. - absl::optional audio_network_adaptor_config_from_options_; + std::optional audio_network_adaptor_config_from_options_; std::atomic num_encoded_channels_{-1}; }; @@ -1215,9 +1255,9 @@ WebRtcVoiceSendChannel::WebRtcVoiceSendChannel( WebRtcVoiceEngine* engine, const MediaConfig& config, const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::Call* call, - webrtc::AudioCodecPairId codec_pair_id) + const CryptoOptions& crypto_options, + Call* call, + AudioCodecPairId codec_pair_id) : MediaChannelUtil(call->network_thread(), config.enable_dscp), worker_thread_(call->worker_thread()), engine_(engine), @@ -1251,7 +1291,7 @@ bool WebRtcVoiceSendChannel::SetOptions(const AudioOptions& options) { options_.SetAll(options); engine()->ApplyOptions(options_); - absl::optional audio_network_adaptor_config = + std::optional audio_network_adaptor_config = GetAudioNetworkAdaptorConfig(options_); for (auto& it : send_streams_) { it.second->SetAudioNetworkAdaptorConfig(audio_network_adaptor_config); @@ -1272,7 +1312,7 @@ bool WebRtcVoiceSendChannel::SetSenderParameters( // all the information at once. // Finding if the RtpParameters force a specific codec - absl::optional force_codec; + std::optional force_codec; if (send_streams_.size() == 1) { // Since audio simulcast is not supported, currently, only PlanB // has multiple tracks and we don't care about getting the @@ -1313,9 +1353,9 @@ bool WebRtcVoiceSendChannel::SetSenderParameters( } } - std::vector filtered_extensions = FilterRtpExtensions( - params.extensions, webrtc::RtpExtension::IsSupportedForAudio, true, - call_->trials()); + std::vector filtered_extensions = + FilterRtpExtensions(params.extensions, RtpExtension::IsSupportedForAudio, + true, call_->trials()); if (send_rtp_extensions_ != filtered_extensions) { send_rtp_extensions_.swap(filtered_extensions); for (auto& it : send_streams_) { @@ -1329,17 +1369,22 @@ bool WebRtcVoiceSendChannel::SetSenderParameters( } } - if (!SetMaxSendBitrate(params.max_bandwidth_bps)) { + if (send_codec_spec_ && !SetMaxSendBitrate(params.max_bandwidth_bps)) { return false; } + rtcp_mode_ = + params.rtcp.reduced_size ? RtcpMode::kReducedSize : RtcpMode::kCompound; + for (auto& it : send_streams_) { + it.second->SetRtcpMode(rtcp_mode_); + } return SetOptions(params.options); } -absl::optional WebRtcVoiceSendChannel::GetSendCodec() const { +std::optional WebRtcVoiceSendChannel::GetSendCodec() const { if (send_codec_spec_) { - return CreateAudioCodec(send_codec_spec_->format); + return webrtc::CreateAudioCodec(send_codec_spec_->format); } - return absl::nullopt; + return std::nullopt; } // Utility function called from SetSenderParameters() to extract current send @@ -1347,13 +1392,13 @@ absl::optional WebRtcVoiceSendChannel::GetSendCodec() const { // and receive streams may be reconfigured based on the new settings. bool WebRtcVoiceSendChannel::SetSendCodecs( const std::vector& codecs, - absl::optional preferred_codec) { + std::optional preferred_codec) { RTC_DCHECK_RUN_ON(worker_thread_); - dtmf_payload_type_ = absl::nullopt; + dtmf_payload_type_ = std::nullopt; dtmf_payload_freq_ = -1; // Validate supplied codecs list. - for (const Codec& codec : codecs) { + for (const webrtc::Codec& codec : codecs) { // TODO(solenberg): Validate more aspects of input - that payload types // don't overlap, remove redundant/unsupported codecs etc - // the same way it is done for RtpHeaderExtensions. @@ -1368,7 +1413,7 @@ bool WebRtcVoiceSendChannel::SetSendCodecs( // case we don't have a DTMF codec with a rate matching the send codec's, or // if this function returns early. std::vector dtmf_codecs; - for (const Codec& codec : codecs) { + for (const webrtc::Codec& codec : codecs) { if (IsCodec(codec, kDtmfCodecName)) { dtmf_codecs.push_back(codec); if (!dtmf_payload_type_ || codec.clockrate < dtmf_payload_freq_) { @@ -1379,33 +1424,32 @@ bool WebRtcVoiceSendChannel::SetSendCodecs( } // Scan through the list to figure out the codec to use for sending. - absl::optional - send_codec_spec; - webrtc::BitrateConstraints bitrate_config; - absl::optional voice_codec_info; + std::optional send_codec_spec; + BitrateConstraints bitrate_config; + std::optional voice_codec_info; size_t send_codec_position = 0; - for (const Codec& voice_codec : codecs) { + for (const webrtc::Codec& voice_codec : codecs) { if (!(IsCodec(voice_codec, kCnCodecName) || IsCodec(voice_codec, kDtmfCodecName) || IsCodec(voice_codec, kRedCodecName)) && (!preferred_codec || preferred_codec->Matches(voice_codec))) { - webrtc::SdpAudioFormat format(voice_codec.name, voice_codec.clockrate, - voice_codec.channels, voice_codec.params); + SdpAudioFormat format(voice_codec.name, voice_codec.clockrate, + voice_codec.channels, voice_codec.params); voice_codec_info = engine()->encoder_factory_->QueryAudioEncoder(format); if (!voice_codec_info) { RTC_LOG(LS_WARNING) << "Unknown codec " << ToString(voice_codec); + send_codec_position++; continue; } - send_codec_spec = webrtc::AudioSendStream::Config::SendCodecSpec( - voice_codec.id, format); + send_codec_spec = + AudioSendStream::Config::SendCodecSpec(voice_codec.id, format); if (voice_codec.bitrate > 0) { send_codec_spec->target_bitrate_bps = voice_codec.bitrate; } - send_codec_spec->transport_cc_enabled = HasTransportCc(voice_codec); - send_codec_spec->nack_enabled = HasNack(voice_codec); - send_codec_spec->enable_non_sender_rtt = HasRrtr(voice_codec); + send_codec_spec->nack_enabled = webrtc::HasNack(voice_codec); + send_codec_spec->enable_non_sender_rtt = webrtc::HasRrtr(voice_codec); bitrate_config = GetBitrateConfigForCodec(voice_codec); break; } @@ -1413,22 +1457,22 @@ bool WebRtcVoiceSendChannel::SetSendCodecs( } if (!send_codec_spec) { - return false; + // No codecs in common, bail out early. + return true; } RTC_DCHECK(voice_codec_info); if (voice_codec_info->allow_comfort_noise) { // Loop through the codecs list again to find the CN codec. // TODO(solenberg): Break out into a separate function? - for (const Codec& cn_codec : codecs) { + for (const webrtc::Codec& cn_codec : codecs) { if (IsCodec(cn_codec, kCnCodecName) && cn_codec.clockrate == send_codec_spec->format.clockrate_hz && cn_codec.channels == voice_codec_info->num_channels) { if (cn_codec.channels != 1) { RTC_LOG(LS_WARNING) << "CN #channels " << cn_codec.channels << " not supported."; - } else if (cn_codec.clockrate != 8000 && cn_codec.clockrate != 16000 && - cn_codec.clockrate != 32000) { + } else if (cn_codec.clockrate != 8000) { RTC_LOG(LS_WARNING) << "CN frequency " << cn_codec.clockrate << " not supported."; } else { @@ -1439,7 +1483,7 @@ bool WebRtcVoiceSendChannel::SetSendCodecs( } // Find the telephone-event PT exactly matching the preferred send codec. - for (const Codec& dtmf_codec : dtmf_codecs) { + for (const webrtc::Codec& dtmf_codec : dtmf_codecs) { if (dtmf_codec.clockrate == send_codec_spec->format.clockrate_hz) { dtmf_payload_type_ = dtmf_codec.id; dtmf_payload_freq_ = dtmf_codec.clockrate; @@ -1545,7 +1589,7 @@ bool WebRtcVoiceSendChannel::AddSendStream(const StreamParams& sp) { return false; } - absl::optional audio_network_adaptor_config = + std::optional audio_network_adaptor_config = GetAudioNetworkAdaptorConfig(options_); WebRtcAudioSendStream* stream = new WebRtcAudioSendStream( ssrc, mid_, sp.cname, sp.id, send_codec_spec_, ExtmapAllowMixed(), @@ -1626,7 +1670,7 @@ bool WebRtcVoiceSendChannel::CanInsertDtmf() { void WebRtcVoiceSendChannel::SetFrameEncryptor( uint32_t ssrc, - rtc::scoped_refptr frame_encryptor) { + scoped_refptr frame_encryptor) { RTC_DCHECK_RUN_ON(worker_thread_); auto matching_stream = send_streams_.find(ssrc); if (matching_stream != send_streams_.end()) { @@ -1658,7 +1702,7 @@ bool WebRtcVoiceSendChannel::InsertDtmf(uint32_t ssrc, event, duration); } -void WebRtcVoiceSendChannel::OnPacketSent(const rtc::SentPacket& sent_packet) { +void WebRtcVoiceSendChannel::OnPacketSent(const SentPacketInfo& sent_packet) { RTC_DCHECK_RUN_ON(&network_thread_checker_); // TODO(tommi): We shouldn't need to go through call_ to deliver this // notification. We should already have direct access to @@ -1671,7 +1715,7 @@ void WebRtcVoiceSendChannel::OnPacketSent(const rtc::SentPacket& sent_packet) { void WebRtcVoiceSendChannel::OnNetworkRouteChanged( absl::string_view transport_name, - const rtc::NetworkRoute& network_route) { + const NetworkRoute& network_route) { RTC_DCHECK_RUN_ON(&network_thread_checker_); call_->OnAudioTransportOverheadChanged(network_route.packet_overhead); @@ -1702,7 +1746,7 @@ bool WebRtcVoiceSendChannel::MuteStream(uint32_t ssrc, bool muted) { for (const auto& kv : send_streams_) { all_muted = all_muted && kv.second->muted(); } - webrtc::AudioProcessing* ap = engine()->apm(); + AudioProcessing* ap = engine()->apm(); if (ap) { ap->set_output_will_be_muted(all_muted); } @@ -1726,8 +1770,7 @@ void WebRtcVoiceSendChannel::OnReadyToSend(bool ready) { RTC_DCHECK_RUN_ON(&network_thread_checker_); RTC_LOG(LS_VERBOSE) << "OnReadyToSend: " << (ready ? "Ready." : "Not ready."); call_->SignalChannelNetworkState( - webrtc::MediaType::AUDIO, - ready ? webrtc::kNetworkUp : webrtc::kNetworkDown); + MediaType::AUDIO, ready ? webrtc::kNetworkUp : webrtc::kNetworkDown); } bool WebRtcVoiceSendChannel::GetStats(VoiceMediaSendInfo* info) { @@ -1741,7 +1784,7 @@ bool WebRtcVoiceSendChannel::GetStats(VoiceMediaSendInfo* info) { // senders. RTC_DCHECK(info->senders.size() == 0U || send_streams_.size() == 0); for (const auto& stream : send_streams_) { - webrtc::AudioSendStream::Stats stats = stream.second->GetStats(false); + AudioSendStream::Stats stats = stream.second->GetStats(false); VoiceSenderInfo sinfo; sinfo.add_ssrc(stats.local_ssrc); sinfo.payload_bytes_sent = stats.payload_bytes_sent; @@ -1753,7 +1796,10 @@ bool WebRtcVoiceSendChannel::GetStats(VoiceMediaSendInfo* info) { sinfo.packets_lost = stats.packets_lost; sinfo.fraction_lost = stats.fraction_lost; sinfo.nacks_received = stats.nacks_received; - sinfo.target_bitrate = stats.target_bitrate_bps; + sinfo.target_bitrate = + stats.target_bitrate_bps > 0 + ? std::optional(DataRate::BitsPerSec(stats.target_bitrate_bps)) + : std::nullopt; sinfo.codec_name = stats.codec_name; sinfo.codec_payload_type = stats.codec_payload_type; sinfo.jitter_ms = stats.jitter_ms; @@ -1781,9 +1827,11 @@ bool WebRtcVoiceSendChannel::GetStats(VoiceMediaSendInfo* info) { void WebRtcVoiceSendChannel::FillSendCodecStats( VoiceMediaSendInfo* voice_media_info) { for (const auto& sender : voice_media_info->senders) { - auto codec = absl::c_find_if(send_codecs_, [&sender](const AudioCodec& c) { - return sender.codec_payload_type && *sender.codec_payload_type == c.id; - }); + auto codec = + absl::c_find_if(send_codecs_, [&sender](const webrtc::Codec& c) { + return sender.codec_payload_type && + *sender.codec_payload_type == c.id; + }); if (codec != send_codecs_.end()) { voice_media_info->send_codecs.insert( std::make_pair(codec->id, codec->ToCodecParameters())); @@ -1793,7 +1841,7 @@ void WebRtcVoiceSendChannel::FillSendCodecStats( void WebRtcVoiceSendChannel::SetEncoderToPacketizerFrameTransformer( uint32_t ssrc, - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(worker_thread_); auto matching_stream = send_streams_.find(ssrc); if (matching_stream == send_streams_.end()) { @@ -1805,7 +1853,7 @@ void WebRtcVoiceSendChannel::SetEncoderToPacketizerFrameTransformer( std::move(frame_transformer)); } -webrtc::RtpParameters WebRtcVoiceSendChannel::GetRtpSendParameters( +RtpParameters WebRtcVoiceSendChannel::GetRtpSendParameters( uint32_t ssrc) const { RTC_DCHECK_RUN_ON(worker_thread_); auto it = send_streams_.find(ssrc); @@ -1813,22 +1861,22 @@ webrtc::RtpParameters WebRtcVoiceSendChannel::GetRtpSendParameters( RTC_LOG(LS_WARNING) << "Attempting to get RTP send parameters for stream " "with ssrc " << ssrc << " which doesn't exist."; - return webrtc::RtpParameters(); + return RtpParameters(); } - webrtc::RtpParameters rtp_params = it->second->rtp_parameters(); + RtpParameters rtp_params = it->second->rtp_parameters(); // Need to add the common list of codecs to the send stream-specific // RTP parameters. - for (const AudioCodec& codec : send_codecs_) { + for (const webrtc::Codec& codec : send_codecs_) { rtp_params.codecs.push_back(codec.ToCodecParameters()); } return rtp_params; } -webrtc::RTCError WebRtcVoiceSendChannel::SetRtpSendParameters( +RTCError WebRtcVoiceSendChannel::SetRtpSendParameters( uint32_t ssrc, - const webrtc::RtpParameters& parameters, - webrtc::SetParametersCallback callback) { + const RtpParameters& parameters, + SetParametersCallback callback) { RTC_DCHECK_RUN_ON(worker_thread_); auto it = send_streams_.find(ssrc); if (it == send_streams_.end()) { @@ -1836,51 +1884,59 @@ webrtc::RTCError WebRtcVoiceSendChannel::SetRtpSendParameters( "with ssrc " << ssrc << " which doesn't exist."; return webrtc::InvokeSetParametersCallback( - callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); + callback, RTCError(RTCErrorType::INTERNAL_ERROR)); } // TODO(deadbeef): Handle setting parameters with a list of codecs in a // different order (which should change the send codec). - webrtc::RtpParameters current_parameters = GetRtpSendParameters(ssrc); + RtpParameters current_parameters = GetRtpSendParameters(ssrc); if (current_parameters.codecs != parameters.codecs) { RTC_DLOG(LS_ERROR) << "Using SetParameters to change the set of codecs " "is not currently supported."; return webrtc::InvokeSetParametersCallback( - callback, webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); + callback, RTCError(RTCErrorType::INTERNAL_ERROR)); } if (!parameters.encodings.empty()) { // Note that these values come from: // https://tools.ietf.org/html/draft-ietf-tsvwg-rtcweb-qos-16#section-5 - rtc::DiffServCodePoint new_dscp = rtc::DSCP_DEFAULT; + DiffServCodePoint new_dscp = webrtc::DSCP_DEFAULT; switch (parameters.encodings[0].network_priority) { - case webrtc::Priority::kVeryLow: - new_dscp = rtc::DSCP_CS1; + case Priority::kVeryLow: + new_dscp = webrtc::DSCP_CS1; break; - case webrtc::Priority::kLow: - new_dscp = rtc::DSCP_DEFAULT; + case Priority::kLow: + new_dscp = webrtc::DSCP_DEFAULT; break; - case webrtc::Priority::kMedium: - new_dscp = rtc::DSCP_EF; + case Priority::kMedium: + new_dscp = webrtc::DSCP_EF; break; - case webrtc::Priority::kHigh: - new_dscp = rtc::DSCP_EF; + case Priority::kHigh: + new_dscp = webrtc::DSCP_EF; break; } SetPreferredDscp(new_dscp); - absl::optional send_codec = GetSendCodec(); - // TODO(orphis): Support mixed-codec simulcast + std::optional send_codec = GetSendCodec(); + // Since we validate that all layers have the same value, we can just check + // the first layer. + // TODO: https://issues.webrtc.org/362277533 - Support mixed-codec simulcast if (parameters.encodings[0].codec && send_codec && !send_codec->MatchesRtpCodec(*parameters.encodings[0].codec)) { - RTC_LOG(LS_ERROR) << "Trying to change codec to " - << parameters.encodings[0].codec->name; + RTC_LOG(LS_VERBOSE) << "Trying to change codec to " + << parameters.encodings[0].codec->name; auto matched_codec = absl::c_find_if(send_codecs_, [&](auto negotiated_codec) { return negotiated_codec.MatchesRtpCodec( *parameters.encodings[0].codec); }); - RTC_DCHECK(matched_codec != send_codecs_.end()); + + if (matched_codec == send_codecs_.end()) { + return webrtc::InvokeSetParametersCallback( + callback, + RTCError(RTCErrorType::INVALID_MODIFICATION, + "Attempted to use an unsupported codec for layer 0")); + } SetSendCodecs(send_codecs_, *matched_codec); } @@ -1895,7 +1951,7 @@ webrtc::RTCError WebRtcVoiceSendChannel::SetRtpSendParameters( // 2. AudioSendStream can be recreated. // Codecs are handled at the WebRtcVoiceMediaChannel level. - webrtc::RtpParameters reduced_params = parameters; + RtpParameters reduced_params = parameters; reduced_params.codecs.clear(); return it->second->SetRtpParameters(reduced_params, std::move(callback)); } @@ -1904,8 +1960,8 @@ webrtc::RTCError WebRtcVoiceSendChannel::SetRtpSendParameters( class WebRtcVoiceReceiveChannel::WebRtcAudioReceiveStream { public: - WebRtcAudioReceiveStream(webrtc::AudioReceiveStreamInterface::Config config, - webrtc::Call* call) + WebRtcAudioReceiveStream(AudioReceiveStreamInterface::Config config, + Call* call) : call_(call), stream_(call_->CreateAudioReceiveStream(config)) { RTC_DCHECK(call); RTC_DCHECK(stream_); @@ -1920,13 +1976,13 @@ class WebRtcVoiceReceiveChannel::WebRtcAudioReceiveStream { call_->DestroyAudioReceiveStream(stream_); } - webrtc::AudioReceiveStreamInterface& stream() { + AudioReceiveStreamInterface& stream() { RTC_DCHECK(stream_); return *stream_; } void SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) { + scoped_refptr frame_decryptor) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); stream_->SetFrameDecryptor(std::move(frame_decryptor)); } @@ -1936,24 +1992,29 @@ class WebRtcVoiceReceiveChannel::WebRtcAudioReceiveStream { stream_->SetNackHistory(use_nack ? kNackRtpHistoryMs : 0); } + void SetRtcpMode(::webrtc::RtcpMode mode) { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + stream_->SetRtcpMode(mode); + } + void SetNonSenderRttMeasurement(bool enabled) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); stream_->SetNonSenderRttMeasurement(enabled); } // Set a new payload type -> decoder map. - void SetDecoderMap(const std::map& decoder_map) { + void SetDecoderMap(const std::map& decoder_map) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); stream_->SetDecoderMap(decoder_map); } - webrtc::AudioReceiveStreamInterface::Stats GetStats( + AudioReceiveStreamInterface::Stats GetStats( bool get_and_clear_legacy_stats) const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); return stream_->GetStats(get_and_clear_legacy_stats); } - void SetRawAudioSink(std::unique_ptr sink) { + void SetRawAudioSink(std::unique_ptr sink) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); // Need to update the stream's sink first; once raw_audio_sink_ is // reassigned, whatever was in there before is destroyed. @@ -1992,22 +2053,22 @@ class WebRtcVoiceReceiveChannel::WebRtcAudioReceiveStream { return stream_->GetBaseMinimumPlayoutDelayMs(); } - std::vector GetSources() { + std::vector GetSources() { RTC_DCHECK_RUN_ON(&worker_thread_checker_); return stream_->GetSources(); } void SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); stream_->SetDepacketizerToDecoderFrameTransformer(frame_transformer); } private: - webrtc::SequenceChecker worker_thread_checker_; - webrtc::Call* call_ = nullptr; - webrtc::AudioReceiveStreamInterface* const stream_ = nullptr; - std::unique_ptr raw_audio_sink_ + SequenceChecker worker_thread_checker_; + Call* call_ = nullptr; + AudioReceiveStreamInterface* const stream_ = nullptr; + std::unique_ptr raw_audio_sink_ RTC_GUARDED_BY(worker_thread_checker_); }; @@ -2015,9 +2076,9 @@ WebRtcVoiceReceiveChannel::WebRtcVoiceReceiveChannel( WebRtcVoiceEngine* engine, const MediaConfig& config, const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::Call* call, - webrtc::AudioCodecPairId codec_pair_id) + const CryptoOptions& crypto_options, + Call* call, + AudioCodecPairId codec_pair_id) : MediaChannelUtil(call->network_thread(), config.enable_dscp), worker_thread_(call->worker_thread()), engine_(engine), @@ -2050,6 +2111,7 @@ bool WebRtcVoiceReceiveChannel::SetReceiverParameters( << params.ToString(); // TODO(pthatcher): Refactor this to be more clean now that we have // all the information at once. + mid_ = params.mid; if (!SetRecvCodecs(params.codecs)) { return false; @@ -2058,43 +2120,47 @@ bool WebRtcVoiceReceiveChannel::SetReceiverParameters( if (!ValidateRtpExtensions(params.extensions, recv_rtp_extensions_)) { return false; } - std::vector filtered_extensions = FilterRtpExtensions( - params.extensions, webrtc::RtpExtension::IsSupportedForAudio, false, - call_->trials()); + std::vector filtered_extensions = + FilterRtpExtensions(params.extensions, RtpExtension::IsSupportedForAudio, + false, call_->trials()); if (recv_rtp_extensions_ != filtered_extensions) { recv_rtp_extensions_.swap(filtered_extensions); - recv_rtp_extension_map_ = - webrtc::RtpHeaderExtensionMap(recv_rtp_extensions_); + recv_rtp_extension_map_ = RtpHeaderExtensionMap(recv_rtp_extensions_); } + // RTCP mode, NACK, and receive-side RTT are not configured here because they + // enable send functionality in the receive channels. This functionality is + // instead configured using the SetReceiveRtcpMode, SetReceiveNackEnabled, and + // SetReceiveNonSenderRttEnabled methods. return true; } -webrtc::RtpParameters WebRtcVoiceReceiveChannel::GetRtpReceiverParameters( +RtpParameters WebRtcVoiceReceiveChannel::GetRtpReceiverParameters( uint32_t ssrc) const { RTC_DCHECK_RUN_ON(worker_thread_); - webrtc::RtpParameters rtp_params; + RtpParameters rtp_params; auto it = recv_streams_.find(ssrc); if (it == recv_streams_.end()) { RTC_LOG(LS_WARNING) << "Attempting to get RTP receive parameters for stream " "with ssrc " << ssrc << " which doesn't exist."; - return webrtc::RtpParameters(); + return RtpParameters(); } rtp_params.encodings.emplace_back(); rtp_params.encodings.back().ssrc = it->second->stream().remote_ssrc(); rtp_params.header_extensions = recv_rtp_extensions_; - for (const AudioCodec& codec : recv_codecs_) { + for (const webrtc::Codec& codec : recv_codecs_) { rtp_params.codecs.push_back(codec.ToCodecParameters()); } + rtp_params.rtcp.reduced_size = recv_rtcp_mode_ == RtcpMode::kReducedSize; return rtp_params; } -webrtc::RtpParameters -WebRtcVoiceReceiveChannel::GetDefaultRtpReceiveParameters() const { +RtpParameters WebRtcVoiceReceiveChannel::GetDefaultRtpReceiveParameters() + const { RTC_DCHECK_RUN_ON(worker_thread_); - webrtc::RtpParameters rtp_params; + RtpParameters rtp_params; if (!default_sink_) { // Getting parameters on a default, unsignaled audio receive stream but // because we've not configured to receive such a stream, `encodings` is @@ -2103,7 +2169,7 @@ WebRtcVoiceReceiveChannel::GetDefaultRtpReceiveParameters() const { } rtp_params.encodings.emplace_back(); - for (const AudioCodec& codec : recv_codecs_) { + for (const webrtc::Codec& codec : recv_codecs_) { rtp_params.codecs.push_back(codec.ToCodecParameters()); } return rtp_params; @@ -2125,11 +2191,20 @@ bool WebRtcVoiceReceiveChannel::SetOptions(const AudioOptions& options) { } bool WebRtcVoiceReceiveChannel::SetRecvCodecs( - const std::vector& codecs) { + const std::vector& codecs_in) { RTC_DCHECK_RUN_ON(worker_thread_); - // Set the payload types to be used for incoming media. - RTC_LOG(LS_INFO) << "Setting receive voice codecs."; + auto codecs = codecs_in; + // Record the payload types used in the payload type suggester. + RTC_LOG(LS_INFO) << "Setting receive voice codecs. Mid is " << mid_; + for (auto& codec : codecs) { + auto error = call_->GetPayloadTypeSuggester()->AddLocalMapping( + mid_, codec.id, codec); + if (!error.ok()) { + RTC_LOG(LS_ERROR) << "Failed to register PT for " << codec.ToString(); + return false; + } + } if (!VerifyUniquePayloadTypes(codecs)) { RTC_LOG(LS_ERROR) << "Codec payload types overlap."; @@ -2138,12 +2213,11 @@ bool WebRtcVoiceReceiveChannel::SetRecvCodecs( // Create a payload type -> SdpAudioFormat map with all the decoders. Fail // unless the factory claims to support all decoders. - std::map decoder_map; - for (const AudioCodec& codec : codecs) { + std::map decoder_map; + for (const webrtc::Codec& codec : codecs) { // Log a warning if a codec's payload type is changing. This used to be // treated as an error. It's abnormal, but not really illegal. - absl::optional old_codec = - FindCodec(recv_codecs_, codec, &call_->trials()); + std::optional old_codec = FindCodec(recv_codecs_, codec); if (old_codec && old_codec->id != codec.id) { RTC_LOG(LS_WARNING) << codec.name << " mapped to a second payload type (" << codec.id << ", was already mapped to " @@ -2153,7 +2227,7 @@ bool WebRtcVoiceReceiveChannel::SetRecvCodecs( if (!IsCodec(codec, kCnCodecName) && !IsCodec(codec, kDtmfCodecName) && !IsCodec(codec, kRedCodecName) && !engine()->decoder_factory_->IsSupportedDecoder(format)) { - RTC_LOG(LS_ERROR) << "Unsupported codec: " << rtc::ToString(format); + RTC_LOG(LS_ERROR) << "Unsupported codec: " << absl::StrCat(format); return false; } // We allow adding new codecs but don't allow changing the payload type of @@ -2202,6 +2276,18 @@ bool WebRtcVoiceReceiveChannel::SetRecvCodecs( return true; } +void WebRtcVoiceReceiveChannel::SetRtcpMode(::webrtc::RtcpMode mode) { + // Check if the reduced size RTCP status changed on the + // preferred send codec, and in that case reconfigure all receive streams. + if (recv_rtcp_mode_ != mode) { + RTC_LOG(LS_INFO) << "Changing RTCP mode on receive streams."; + recv_rtcp_mode_ = mode; + for (auto& kv : recv_streams_) { + kv.second->SetRtcpMode(recv_rtcp_mode_); + } + } +} + void WebRtcVoiceReceiveChannel::SetReceiveNackEnabled(bool enabled) { // Check if the NACK status has changed on the // preferred send codec, and in that case reconfigure all receive streams. @@ -2275,7 +2361,7 @@ bool WebRtcVoiceReceiveChannel::AddRecvStream(const StreamParams& sp) { // Create a new channel for receiving audio data. auto config = BuildReceiveStreamConfig( ssrc, receiver_reports_ssrc_, recv_nack_enabled_, enable_non_sender_rtt_, - sp.stream_ids(), recv_rtp_extensions_, transport(), + recv_rtcp_mode_, sp.stream_ids(), recv_rtp_extensions_, transport(), engine()->decoder_factory_, decoder_map_, codec_pair_id_, engine()->audio_jitter_buffer_max_packets_, engine()->audio_jitter_buffer_fast_accelerate_, @@ -2320,9 +2406,9 @@ void WebRtcVoiceReceiveChannel::ResetUnsignaledRecvStream() { } } -absl::optional WebRtcVoiceReceiveChannel::GetUnsignaledSsrc() const { +std::optional WebRtcVoiceReceiveChannel::GetUnsignaledSsrc() const { if (unsignaled_recv_ssrcs_.empty()) { - return absl::nullopt; + return std::nullopt; } // In the event of multiple unsignaled ssrcs, the last in the vector will be // the most recent one (the one forwarded to the MediaStreamTrack). @@ -2355,17 +2441,17 @@ void WebRtcVoiceReceiveChannel::OnDemuxerCriteriaUpdateComplete() {} bool WebRtcVoiceReceiveChannel::SetOutputVolume(uint32_t ssrc, double volume) { RTC_DCHECK_RUN_ON(worker_thread_); - RTC_LOG(LS_INFO) << rtc::StringFormat("WRVMC::%s({ssrc=%u}, {volume=%.2f})", - __func__, ssrc, volume); + RTC_LOG(LS_INFO) << webrtc::StringFormat( + "WRVMC::%s({ssrc=%u}, {volume=%.2f})", __func__, ssrc, volume); const auto it = recv_streams_.find(ssrc); if (it == recv_streams_.end()) { - RTC_LOG(LS_WARNING) << rtc::StringFormat( + RTC_LOG(LS_WARNING) << webrtc::StringFormat( "WRVMC::%s => (WARNING: no receive stream for SSRC %u)", __func__, ssrc); return false; } it->second->SetOutputVolume(volume); - RTC_LOG(LS_INFO) << rtc::StringFormat( + RTC_LOG(LS_INFO) << webrtc::StringFormat( "WRVMC::%s => (stream with SSRC %u now uses volume %.2f)", __func__, ssrc, volume); return true; @@ -2396,21 +2482,21 @@ bool WebRtcVoiceReceiveChannel::SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, default_recv_base_minimum_delay_ms_ = delay_ms; ssrcs = unsignaled_recv_ssrcs_; } - for (uint32_t ssrc : ssrcs) { - const auto it = recv_streams_.find(ssrc); + for (uint32_t recv_ssrc : ssrcs) { + const auto it = recv_streams_.find(recv_ssrc); if (it == recv_streams_.end()) { RTC_LOG(LS_WARNING) << "SetBaseMinimumPlayoutDelayMs: no recv stream " - << ssrc; + << recv_ssrc; return false; } it->second->SetBaseMinimumPlayoutDelayMs(delay_ms); RTC_LOG(LS_INFO) << "SetBaseMinimumPlayoutDelayMs() to " << delay_ms - << " for recv stream with ssrc " << ssrc; + << " for recv stream with ssrc " << recv_ssrc; } return true; } -absl::optional WebRtcVoiceReceiveChannel::GetBaseMinimumPlayoutDelayMs( +std::optional WebRtcVoiceReceiveChannel::GetBaseMinimumPlayoutDelayMs( uint32_t ssrc) const { // SSRC of 0 represents the default receive stream. if (ssrc == 0) { @@ -2422,12 +2508,12 @@ absl::optional WebRtcVoiceReceiveChannel::GetBaseMinimumPlayoutDelayMs( if (it != recv_streams_.end()) { return it->second->GetBaseMinimumPlayoutDelayMs(); } - return absl::nullopt; + return std::nullopt; } void WebRtcVoiceReceiveChannel::SetFrameDecryptor( uint32_t ssrc, - rtc::scoped_refptr frame_decryptor) { + scoped_refptr frame_decryptor) { RTC_DCHECK_RUN_ON(worker_thread_); auto matching_stream = recv_streams_.find(ssrc); if (matching_stream != recv_streams_.end()) { @@ -2440,7 +2526,7 @@ void WebRtcVoiceReceiveChannel::SetFrameDecryptor( } void WebRtcVoiceReceiveChannel::OnPacketReceived( - const webrtc::RtpPacketReceived& packet) { + const RtpPacketReceived& packet) { RTC_DCHECK_RUN_ON(&network_thread_checker_); // TODO(bugs.webrtc.org/11993): This code is very similar to what @@ -2454,18 +2540,18 @@ void WebRtcVoiceReceiveChannel::OnPacketReceived( RTC_DCHECK_RUN_ON(worker_thread_); // TODO(bugs.webrtc.org/7135): extensions in `packet` is currently set - // in RtpTransport and does not neccessarily include extensions specific + // in RtpTransport and does not necessarily include extensions specific // to this channel/MID. Also see comment in // BaseChannel::MaybeUpdateDemuxerAndRtpExtensions_w. // It would likely be good if extensions where merged per BUNDLE and // applied directly in RtpTransport::DemuxPacket; packet.IdentifyExtensions(recv_rtp_extension_map_); if (!packet.arrival_time().IsFinite()) { - packet.set_arrival_time(webrtc::Timestamp::Micros(rtc::TimeMicros())); + packet.set_arrival_time(Timestamp::Micros(webrtc::TimeMicros())); } call_->Receiver()->DeliverRtpPacket( - webrtc::MediaType::AUDIO, std::move(packet), + MediaType::AUDIO, std::move(packet), absl::bind_front( &WebRtcVoiceReceiveChannel::MaybeCreateDefaultReceiveStream, this)); @@ -2473,7 +2559,7 @@ void WebRtcVoiceReceiveChannel::OnPacketReceived( } bool WebRtcVoiceReceiveChannel::MaybeCreateDefaultReceiveStream( - const webrtc::RtpPacketReceived& packet) { + const RtpPacketReceived& packet) { // Create an unsignaled receive stream for this previously not received // ssrc. If there already is N unsignaled receive streams, delete the // oldest. See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5208 @@ -2512,7 +2598,7 @@ bool WebRtcVoiceReceiveChannel::MaybeCreateDefaultReceiveStream( auto it = recv_streams_.find(drop_ssrc); it->second->SetRawAudioSink(nullptr); } - std::unique_ptr proxy_sink( + std::unique_ptr proxy_sink( new ProxySink(default_sink_.get())); SetRawAudioSink(ssrc, std::move(proxy_sink)); } @@ -2545,7 +2631,7 @@ bool WebRtcVoiceReceiveChannel::GetStats(VoiceMediaReceiveInfo* info, continue; } } - webrtc::AudioReceiveStreamInterface::Stats stats = + AudioReceiveStreamInterface::Stats stats = stream.second->GetStats(get_and_clear_legacy_stats); VoiceReceiverInfo rinfo; rinfo.add_ssrc(stats.remote_ssrc); @@ -2605,17 +2691,18 @@ bool WebRtcVoiceReceiveChannel::GetStats(VoiceMediaReceiveInfo* info, stats.relative_packet_arrival_delay_seconds; rinfo.interruption_count = stats.interruption_count; rinfo.total_interruption_duration_ms = stats.total_interruption_duration_ms; - rinfo.last_sender_report_timestamp_ms = - stats.last_sender_report_timestamp_ms; - rinfo.last_sender_report_remote_timestamp_ms = - stats.last_sender_report_remote_timestamp_ms; + rinfo.last_sender_report_timestamp = stats.last_sender_report_timestamp; + rinfo.last_sender_report_utc_timestamp = + stats.last_sender_report_utc_timestamp; + rinfo.last_sender_report_remote_utc_timestamp = + stats.last_sender_report_remote_utc_timestamp; rinfo.sender_reports_packets_sent = stats.sender_reports_packets_sent; rinfo.sender_reports_bytes_sent = stats.sender_reports_bytes_sent; rinfo.sender_reports_reports_count = stats.sender_reports_reports_count; rinfo.round_trip_time = stats.round_trip_time; rinfo.round_trip_time_measurements = stats.round_trip_time_measurements; rinfo.total_round_trip_time = stats.total_round_trip_time; - + rinfo.total_processing_delay_seconds = stats.total_processing_delay_seconds; if (recv_nack_enabled_) { rinfo.nacks_sent = stats.nacks_sent; } @@ -2634,7 +2721,7 @@ void WebRtcVoiceReceiveChannel::FillReceiveCodecStats( VoiceMediaReceiveInfo* voice_media_info) { for (const auto& receiver : voice_media_info->receivers) { auto codec = - absl::c_find_if(recv_codecs_, [&receiver](const AudioCodec& c) { + absl::c_find_if(recv_codecs_, [&receiver](const webrtc::Codec& c) { return receiver.codec_payload_type && *receiver.codec_payload_type == c.id; }); @@ -2647,7 +2734,7 @@ void WebRtcVoiceReceiveChannel::FillReceiveCodecStats( void WebRtcVoiceReceiveChannel::SetRawAudioSink( uint32_t ssrc, - std::unique_ptr sink) { + std::unique_ptr sink) { RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::SetRawAudioSink: ssrc:" << ssrc << " " << (sink ? "(ptr)" : "NULL"); @@ -2660,31 +2747,31 @@ void WebRtcVoiceReceiveChannel::SetRawAudioSink( } void WebRtcVoiceReceiveChannel::SetDefaultRawAudioSink( - std::unique_ptr sink) { + std::unique_ptr sink) { RTC_DCHECK_RUN_ON(worker_thread_); RTC_LOG(LS_VERBOSE) << "WebRtcVoiceMediaChannel::SetDefaultRawAudioSink:"; if (!unsignaled_recv_ssrcs_.empty()) { - std::unique_ptr proxy_sink( + std::unique_ptr proxy_sink( sink ? new ProxySink(sink.get()) : nullptr); SetRawAudioSink(unsignaled_recv_ssrcs_.back(), std::move(proxy_sink)); } default_sink_ = std::move(sink); } -std::vector WebRtcVoiceReceiveChannel::GetSources( +std::vector WebRtcVoiceReceiveChannel::GetSources( uint32_t ssrc) const { auto it = recv_streams_.find(ssrc); if (it == recv_streams_.end()) { RTC_LOG(LS_ERROR) << "Attempting to get contributing sources for SSRC:" << ssrc << " which doesn't exist."; - return std::vector(); + return std::vector(); } return it->second->GetSources(); } void WebRtcVoiceReceiveChannel::SetDepacketizerToDecoderFrameTransformer( uint32_t ssrc, - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(worker_thread_); if (ssrc == 0) { // If the receiver is unsignaled, save the frame transformer and set it when @@ -2713,4 +2800,4 @@ bool WebRtcVoiceReceiveChannel::MaybeDeregisterUnsignaledRecvStream( } return false; } -} // namespace cricket +} // namespace webrtc diff --git a/media/engine/webrtc_voice_engine.h b/media/engine/webrtc_voice_engine.h index a3e6d3acab..fc4a0409d5 100644 --- a/media/engine/webrtc_voice_engine.h +++ b/media/engine/webrtc_voice_engine.h @@ -16,6 +16,7 @@ #include #include +#include #include #include #include @@ -23,58 +24,51 @@ #include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/audio/audio_device.h" #include "api/audio/audio_frame_processor.h" #include "api/audio/audio_mixer.h" +#include "api/audio/audio_processing.h" #include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder_factory.h" #include "api/audio_codecs/audio_format.h" #include "api/audio_options.h" #include "api/call/audio_sink.h" -#include "api/call/transport.h" #include "api/crypto/crypto_options.h" #include "api/crypto/frame_decryptor_interface.h" #include "api/crypto/frame_encryptor_interface.h" -#include "api/field_trials_view.h" +#include "api/environment/environment.h" #include "api/frame_transformer_interface.h" +#include "api/media_types.h" #include "api/rtc_error.h" +#include "api/rtp_headers.h" #include "api/rtp_parameters.h" #include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" -#include "api/task_queue/task_queue_factory.h" #include "api/transport/rtp/rtp_source.h" #include "call/audio_send_stream.h" #include "call/audio_state.h" #include "call/call.h" +#include "media/base/audio_source.h" #include "media/base/codec.h" #include "media/base/media_channel.h" #include "media/base/media_channel_impl.h" #include "media/base/media_config.h" #include "media/base/media_engine.h" -#include "media/base/rtp_utils.h" #include "media/base/stream_params.h" -#include "modules/async_audio_processing/async_audio_processing.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" #include "rtc_base/system/file_wrapper.h" -#include "rtc_base/task_queue.h" namespace webrtc { -class AudioFrameProcessor; -} - -namespace cricket { -class AudioSource; +class AudioFrameProcessor; // WebRtcVoiceEngine is a class to be used with CompositeMediaEngine. // It uses the WebRtc VoiceEngine library for audio handling. @@ -83,18 +77,13 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { friend class WebRtcVoiceReceiveChannel; public: - WebRtcVoiceEngine( - webrtc::TaskQueueFactory* task_queue_factory, - webrtc::AudioDeviceModule* adm, - const rtc::scoped_refptr& encoder_factory, - const rtc::scoped_refptr& decoder_factory, - rtc::scoped_refptr audio_mixer, - rtc::scoped_refptr audio_processing, - // TODO(bugs.webrtc.org/15111): - // Remove the raw AudioFrameProcessor pointer in the follow-up. - webrtc::AudioFrameProcessor* audio_frame_processor, - std::unique_ptr owned_audio_frame_processor, - const webrtc::FieldTrialsView& trials); + WebRtcVoiceEngine(const Environment& env, + scoped_refptr adm, + scoped_refptr encoder_factory, + scoped_refptr decoder_factory, + scoped_refptr audio_mixer, + scoped_refptr audio_processing, + std::unique_ptr audio_frame_processor); WebRtcVoiceEngine() = delete; WebRtcVoiceEngine(const WebRtcVoiceEngine&) = delete; @@ -104,38 +93,44 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { // Does initialization that needs to occur on the worker thread. void Init() override; - rtc::scoped_refptr GetAudioState() const override; + scoped_refptr GetAudioState() const override; std::unique_ptr CreateSendChannel( - webrtc::Call* call, + Call* call, const MediaConfig& config, const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::AudioCodecPairId codec_pair_id) override; + const CryptoOptions& crypto_options, + AudioCodecPairId codec_pair_id) override; std::unique_ptr CreateReceiveChannel( - webrtc::Call* call, + Call* call, const MediaConfig& config, const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::AudioCodecPairId codec_pair_id) override; + const CryptoOptions& crypto_options, + AudioCodecPairId codec_pair_id) override; - const std::vector& send_codecs() const override; - const std::vector& recv_codecs() const override; - std::vector GetRtpHeaderExtensions() + const std::vector& LegacySendCodecs() const override; + const std::vector& LegacyRecvCodecs() const override; + + AudioEncoderFactory* encoder_factory() const override { + return encoder_factory_.get(); + } + AudioDecoderFactory* decoder_factory() const override { + return decoder_factory_.get(); + } + std::vector GetRtpHeaderExtensions() const override; // Starts AEC dump using an existing file. A maximum file size in bytes can be // specified. When the maximum file size is reached, logging is stopped and // the file is closed. If max_size_bytes is set to <= 0, no limit will be // used. - bool StartAecDump(webrtc::FileWrapper file, int64_t max_size_bytes) override; + bool StartAecDump(FileWrapper file, int64_t max_size_bytes) override; // Stops AEC dump. void StopAecDump() override; - absl::optional GetAudioDeviceStats() - override; + std::optional GetAudioDeviceStats() override; private: // Every option that is "set" will be applied. Every option not "set" will be @@ -143,37 +138,29 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { // easily at any time. void ApplyOptions(const AudioOptions& options); - webrtc::TaskQueueFactory* const task_queue_factory_; - std::unique_ptr low_priority_worker_queue_; + const Environment env_; + std::unique_ptr low_priority_worker_queue_; - webrtc::AudioDeviceModule* adm(); - webrtc::AudioProcessing* apm() const; - webrtc::AudioState* audio_state(); + AudioDeviceModule* adm(); + AudioProcessing* apm() const; + AudioState* audio_state(); - std::vector CollectCodecs( - const std::vector& specs) const; - - webrtc::SequenceChecker signal_thread_checker_{ - webrtc::SequenceChecker::kDetached}; - webrtc::SequenceChecker worker_thread_checker_{ - webrtc::SequenceChecker::kDetached}; + SequenceChecker signal_thread_checker_{SequenceChecker::kDetached}; + SequenceChecker worker_thread_checker_{SequenceChecker::kDetached}; // The audio device module. - rtc::scoped_refptr adm_; - rtc::scoped_refptr encoder_factory_; - rtc::scoped_refptr decoder_factory_; - rtc::scoped_refptr audio_mixer_; + scoped_refptr adm_; + scoped_refptr encoder_factory_; + scoped_refptr decoder_factory_; + scoped_refptr audio_mixer_; // The audio processing module. - rtc::scoped_refptr apm_; + scoped_refptr apm_; // Asynchronous audio processing. - // TODO(bugs.webrtc.org/15111): - // Remove the raw AudioFrameProcessor pointer in the follow-up. - webrtc::AudioFrameProcessor* const audio_frame_processor_; - std::unique_ptr owned_audio_frame_processor_; + std::unique_ptr audio_frame_processor_; // The primary instance of WebRtc VoiceEngine. - rtc::scoped_refptr audio_state_; - std::vector send_codecs_; - std::vector recv_codecs_; + scoped_refptr audio_state_; + std::vector send_codecs_; + std::vector recv_codecs_; bool is_dumping_aec_ = false; bool initialized_ = false; @@ -183,6 +170,7 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { int audio_jitter_buffer_min_delay_ms_ = 0; const bool minimized_remsampling_on_mobile_trial_enabled_; + const bool payload_types_in_transport_trial_enabled_; }; class WebRtcVoiceSendChannel final : public MediaChannelUtil, @@ -191,9 +179,9 @@ class WebRtcVoiceSendChannel final : public MediaChannelUtil, WebRtcVoiceSendChannel(WebRtcVoiceEngine* engine, const MediaConfig& config, const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::Call* call, - webrtc::AudioCodecPairId codec_pair_id); + const CryptoOptions& crypto_options, + Call* call, + AudioCodecPairId codec_pair_id); WebRtcVoiceSendChannel() = delete; WebRtcVoiceSendChannel(const WebRtcVoiceSendChannel&) = delete; @@ -201,14 +189,14 @@ class WebRtcVoiceSendChannel final : public MediaChannelUtil, ~WebRtcVoiceSendChannel() override; - MediaType media_type() const override { return MEDIA_TYPE_AUDIO; } + MediaType media_type() const override { return MediaType::AUDIO; } VideoMediaSendChannelInterface* AsVideoSendChannel() override { RTC_CHECK_NOTREACHED(); return nullptr; } VoiceMediaSendChannelInterface* AsVoiceSendChannel() override { return this; } - absl::optional GetSendCodec() const override; + std::optional GetSendCodec() const override; // Functions imported from MediaChannelUtil void SetInterface(MediaChannelNetworkInterface* iface) override { @@ -228,11 +216,10 @@ class WebRtcVoiceSendChannel final : public MediaChannelUtil, const AudioOptions& options() const { return options_; } bool SetSenderParameters(const AudioSenderParameter& params) override; - webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const override; - webrtc::RTCError SetRtpSendParameters( - uint32_t ssrc, - const webrtc::RtpParameters& parameters, - webrtc::SetParametersCallback callback) override; + RtpParameters GetRtpSendParameters(uint32_t ssrc) const override; + RTCError SetRtpSendParameters(uint32_t ssrc, + const RtpParameters& parameters, + SetParametersCallback callback) override; void SetSend(bool send) override; bool SetAudioSend(uint32_t ssrc, @@ -249,16 +236,16 @@ class WebRtcVoiceSendChannel final : public MediaChannelUtil, // Set a frame encryptor to a particular ssrc that will intercept all // outgoing audio payloads frames and attempt to encrypt them and forward the // result to the packetizer. - void SetFrameEncryptor(uint32_t ssrc, - rtc::scoped_refptr - frame_encryptor) override; + void SetFrameEncryptor( + uint32_t ssrc, + scoped_refptr frame_encryptor) override; bool CanInsertDtmf() override; bool InsertDtmf(uint32_t ssrc, int event, int duration) override; - void OnPacketSent(const rtc::SentPacket& sent_packet) override; + void OnPacketSent(const SentPacketInfo& sent_packet) override; void OnNetworkRouteChanged(absl::string_view transport_name, - const rtc::NetworkRoute& network_route) override; + const NetworkRoute& network_route) override; void OnReadyToSend(bool ready) override; bool GetStats(VoiceMediaSendInfo* info) override; @@ -266,8 +253,7 @@ class WebRtcVoiceSendChannel final : public MediaChannelUtil, // encoded frames before sending them out the network. void SetEncoderToPacketizerFrameTransformer( uint32_t ssrc, - rtc::scoped_refptr frame_transformer) - override; + scoped_refptr frame_transformer) override; bool SenderNackEnabled() const override { if (!send_codec_spec_) { @@ -291,7 +277,7 @@ class WebRtcVoiceSendChannel final : public MediaChannelUtil, private: bool SetOptions(const AudioOptions& options); bool SetSendCodecs(const std::vector& codecs, - absl::optional preferred_codec); + std::optional preferred_codec); bool SetLocalSource(uint32_t ssrc, AudioSource* source); bool MuteStream(uint32_t ssrc, bool mute); @@ -299,41 +285,39 @@ class WebRtcVoiceSendChannel final : public MediaChannelUtil, bool SetMaxSendBitrate(int bps); void SetupRecording(); - webrtc::TaskQueueBase* const worker_thread_; - webrtc::ScopedTaskSafety task_safety_; - webrtc::SequenceChecker network_thread_checker_{ - webrtc::SequenceChecker::kDetached}; + TaskQueueBase* const worker_thread_; + ScopedTaskSafety task_safety_; + SequenceChecker network_thread_checker_{SequenceChecker::kDetached}; WebRtcVoiceEngine* const engine_ = nullptr; - std::vector send_codecs_; + std::vector send_codecs_; int max_send_bitrate_bps_ = 0; AudioOptions options_; - absl::optional dtmf_payload_type_; + std::optional dtmf_payload_type_; int dtmf_payload_freq_ = -1; bool enable_non_sender_rtt_ = false; bool send_ = false; - webrtc::Call* const call_ = nullptr; + Call* const call_ = nullptr; const MediaConfig::Audio audio_config_; class WebRtcAudioSendStream; std::map send_streams_; - std::vector send_rtp_extensions_; + std::vector send_rtp_extensions_; std::string mid_; + RtcpMode rtcp_mode_; - absl::optional - send_codec_spec_; + std::optional send_codec_spec_; // TODO(kwiberg): Per-SSRC codec pair IDs? - const webrtc::AudioCodecPairId codec_pair_id_; + const AudioCodecPairId codec_pair_id_; // Per peer connection crypto options that last for the lifetime of the peer // connection. - const webrtc::CryptoOptions crypto_options_; - rtc::scoped_refptr - unsignaled_frame_transformer_; + const CryptoOptions crypto_options_; + scoped_refptr unsignaled_frame_transformer_; void FillSendCodecStats(VoiceMediaSendInfo* voice_media_info); @@ -352,9 +336,9 @@ class WebRtcVoiceReceiveChannel final WebRtcVoiceReceiveChannel(WebRtcVoiceEngine* engine, const MediaConfig& config, const AudioOptions& options, - const webrtc::CryptoOptions& crypto_options, - webrtc::Call* call, - webrtc::AudioCodecPairId codec_pair_id); + const CryptoOptions& crypto_options, + Call* call, + AudioCodecPairId codec_pair_id); WebRtcVoiceReceiveChannel() = delete; WebRtcVoiceReceiveChannel(const WebRtcVoiceReceiveChannel&) = delete; @@ -363,7 +347,7 @@ class WebRtcVoiceReceiveChannel final ~WebRtcVoiceReceiveChannel() override; - MediaType media_type() const override { return MEDIA_TYPE_AUDIO; } + MediaType media_type() const override { return MediaType::AUDIO; } VideoMediaReceiveChannelInterface* AsVideoReceiveChannel() override { RTC_CHECK_NOTREACHED(); @@ -379,14 +363,14 @@ class WebRtcVoiceReceiveChannel final MediaChannelUtil::SetInterface(iface); } bool SetReceiverParameters(const AudioReceiverParameters& params) override; - webrtc::RtpParameters GetRtpReceiverParameters(uint32_t ssrc) const override; - webrtc::RtpParameters GetDefaultRtpReceiveParameters() const override; + RtpParameters GetRtpReceiverParameters(uint32_t ssrc) const override; + RtpParameters GetDefaultRtpReceiveParameters() const override; void SetPlayout(bool playout) override; bool AddRecvStream(const StreamParams& sp) override; bool RemoveRecvStream(uint32_t ssrc) override; void ResetUnsignaledRecvStream() override; - absl::optional GetUnsignaledSsrc() const override; + std::optional GetUnsignaledSsrc() const override; void ChooseReceiverReportSsrc(const std::set& choices) override; @@ -397,44 +381,43 @@ class WebRtcVoiceReceiveChannel final // Set a frame decryptor to a particular ssrc that will intercept all // incoming audio payloads and attempt to decrypt them before forwarding the // result. - void SetFrameDecryptor(uint32_t ssrc, - rtc::scoped_refptr - frame_decryptor) override; + void SetFrameDecryptor( + uint32_t ssrc, + scoped_refptr frame_decryptor) override; bool SetOutputVolume(uint32_t ssrc, double volume) override; // Applies the new volume to current and future unsignaled streams. bool SetDefaultOutputVolume(double volume) override; bool SetBaseMinimumPlayoutDelayMs(uint32_t ssrc, int delay_ms) override; - absl::optional GetBaseMinimumPlayoutDelayMs( - uint32_t ssrc) const override; + std::optional GetBaseMinimumPlayoutDelayMs(uint32_t ssrc) const override; - void OnPacketReceived(const webrtc::RtpPacketReceived& packet) override; + void OnPacketReceived(const RtpPacketReceived& packet) override; bool GetStats(VoiceMediaReceiveInfo* info, bool get_and_clear_legacy_stats) override; // Set the audio sink for an existing stream. - void SetRawAudioSink( - uint32_t ssrc, - std::unique_ptr sink) override; + void SetRawAudioSink(uint32_t ssrc, + std::unique_ptr sink) override; // Will set the audio sink on the latest unsignaled stream, future or // current. Only one stream at a time will use the sink. void SetDefaultRawAudioSink( - std::unique_ptr sink) override; + std::unique_ptr sink) override; - std::vector GetSources(uint32_t ssrc) const override; + std::vector GetSources(uint32_t ssrc) const override; void SetDepacketizerToDecoderFrameTransformer( uint32_t ssrc, - rtc::scoped_refptr frame_transformer) - override; + scoped_refptr frame_transformer) override; + ::webrtc::RtcpMode RtcpMode() const override { return recv_rtcp_mode_; } + void SetRtcpMode(::webrtc::RtcpMode mode) override; void SetReceiveNackEnabled(bool enabled) override; void SetReceiveNonSenderRttEnabled(bool enabled) override; private: bool SetOptions(const AudioOptions& options); - bool SetRecvCodecs(const std::vector& codecs); + bool SetRecvCodecs(const std::vector& codecs); bool SetLocalSource(uint32_t ssrc, AudioSource* source); bool MuteStream(uint32_t ssrc, bool mute); @@ -444,28 +427,28 @@ class WebRtcVoiceReceiveChannel final // Expected to be invoked once per packet that belongs to this channel that // can not be demuxed. Returns true if a default receive stream has been // created. - bool MaybeCreateDefaultReceiveStream(const webrtc::RtpPacketReceived& packet); + bool MaybeCreateDefaultReceiveStream(const RtpPacketReceived& packet); // Check if 'ssrc' is an unsignaled stream, and if so mark it as not being // unsignaled anymore (i.e. it is now removed, or signaled), and return true. bool MaybeDeregisterUnsignaledRecvStream(uint32_t ssrc); - webrtc::TaskQueueBase* const worker_thread_; - webrtc::ScopedTaskSafety task_safety_; - webrtc::SequenceChecker network_thread_checker_{ - webrtc::SequenceChecker::kDetached}; + TaskQueueBase* const worker_thread_; + ScopedTaskSafety task_safety_; + SequenceChecker network_thread_checker_{SequenceChecker::kDetached}; WebRtcVoiceEngine* const engine_ = nullptr; // TODO(kwiberg): decoder_map_ and recv_codecs_ store the exact same // information, in slightly different formats. Eliminate recv_codecs_. - std::map decoder_map_; - std::vector recv_codecs_; + std::map decoder_map_; + std::vector recv_codecs_; AudioOptions options_; bool recv_nack_enabled_ = false; + ::webrtc::RtcpMode recv_rtcp_mode_ = RtcpMode::kCompound; bool enable_non_sender_rtt_ = false; bool playout_ = false; - webrtc::Call* const call_ = nullptr; + Call* const call_ = nullptr; const MediaConfig::Audio audio_config_; @@ -485,7 +468,7 @@ class WebRtcVoiceReceiveChannel final int default_recv_base_minimum_delay_ms_ = 0; // Sink for latest unsignaled stream - may be set before the stream exists. - std::unique_ptr default_sink_; + std::unique_ptr default_sink_; // Default SSRC to use for RTCP receiver reports in case of no signaled // send streams. See: https://code.google.com/p/webrtc/issues/detail?id=4740 // and https://code.google.com/p/chromium/issues/detail?id=547661 @@ -496,27 +479,34 @@ class WebRtcVoiceReceiveChannel final class WebRtcAudioReceiveStream; std::map recv_streams_; - std::vector recv_rtp_extensions_; - webrtc::RtpHeaderExtensionMap recv_rtp_extension_map_; + std::vector recv_rtp_extensions_; + RtpHeaderExtensionMap recv_rtp_extension_map_; - absl::optional - send_codec_spec_; + std::optional send_codec_spec_; // TODO(kwiberg): Per-SSRC codec pair IDs? - const webrtc::AudioCodecPairId codec_pair_id_; + const AudioCodecPairId codec_pair_id_; // Per peer connection crypto options that last for the lifetime of the peer // connection. - const webrtc::CryptoOptions crypto_options_; + const CryptoOptions crypto_options_; // Unsignaled streams have an option to have a frame decryptor set on them. - rtc::scoped_refptr - unsignaled_frame_decryptor_; - rtc::scoped_refptr - unsignaled_frame_transformer_; + scoped_refptr unsignaled_frame_decryptor_; + scoped_refptr unsignaled_frame_transformer_; void FillReceiveCodecStats(VoiceMediaReceiveInfo* voice_media_info); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::WebRtcVoiceEngine; +using ::webrtc::WebRtcVoiceReceiveChannel; +using ::webrtc::WebRtcVoiceSendChannel; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_ENGINE_WEBRTC_VOICE_ENGINE_H_ diff --git a/media/engine/webrtc_voice_engine_unittest.cc b/media/engine/webrtc_voice_engine_unittest.cc index e1d7805180..4223f8878b 100644 --- a/media/engine/webrtc_voice_engine_unittest.cc +++ b/media/engine/webrtc_voice_engine_unittest.cc @@ -10,41 +10,75 @@ #include "media/engine/webrtc_voice_engine.h" +#include +#include +#include +#include #include +#include +#include +#include #include +#include -#include "absl/memory/memory.h" #include "absl/strings/match.h" -#include "absl/types/optional.h" +#include "api/audio/audio_processing.h" +#include "api/audio/builtin_audio_processing_builder.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/audio_options.h" +#include "api/call/audio_sink.h" +#include "api/crypto/crypto_options.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/make_ref_counted.h" #include "api/media_types.h" -#include "api/rtc_event_log/rtc_event_log.h" +#include "api/priority.h" +#include "api/ref_count.h" +#include "api/rtc_error.h" +#include "api/rtp_headers.h" #include "api/rtp_parameters.h" #include "api/scoped_refptr.h" -#include "api/task_queue/default_task_queue_factory.h" -#include "api/transport/field_trial_based_config.h" +#include "api/transport/bitrate_settings.h" +#include "api/transport/rtp/rtp_source.h" +#include "call/audio_receive_stream.h" +#include "call/audio_send_stream.h" +#include "call/audio_state.h" #include "call/call.h" +#include "call/call_config.h" +#include "call/payload_type_picker.h" +#include "media/base/audio_source.h" #include "media/base/codec.h" -#include "media/base/fake_media_engine.h" #include "media/base/fake_network_interface.h" #include "media/base/fake_rtp.h" #include "media/base/media_channel.h" +#include "media/base/media_config.h" #include "media/base/media_constants.h" +#include "media/base/media_engine.h" +#include "media/base/stream_params.h" #include "media/engine/fake_webrtc_call.h" #include "modules/audio_device/include/mock_audio_device.h" #include "modules/audio_mixer/audio_mixer_impl.h" #include "modules/audio_processing/include/mock_audio_processing.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/arraysize.h" #include "rtc_base/byte_order.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/dscp.h" #include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/thread.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/mock_audio_decoder_factory.h" #include "test/mock_audio_encoder_factory.h" #include "test/scoped_key_value_config.h" +namespace { using ::testing::_; using ::testing::ContainerEq; using ::testing::Contains; @@ -55,30 +89,35 @@ using ::testing::ReturnPointee; using ::testing::SaveArg; using ::testing::StrictMock; using ::testing::UnorderedElementsAreArray; - -namespace { -using webrtc::BitrateConstraints; +using ::webrtc::AudioProcessing; +using ::webrtc::BitrateConstraints; +using ::webrtc::BuiltinAudioProcessingBuilder; +using ::webrtc::Call; +using ::webrtc::CallConfig; +using ::webrtc::CreateEnvironment; +using ::webrtc::Environment; +using ::webrtc::scoped_refptr; constexpr uint32_t kMaxUnsignaledRecvStreams = 4; -const cricket::AudioCodec kPcmuCodec = - cricket::CreateAudioCodec(0, "PCMU", 8000, 1); -const cricket::AudioCodec kOpusCodec = - cricket::CreateAudioCodec(111, "opus", 48000, 2); -const cricket::AudioCodec kG722CodecVoE = - cricket::CreateAudioCodec(9, "G722", 16000, 1); -const cricket::AudioCodec kG722CodecSdp = - cricket::CreateAudioCodec(9, "G722", 8000, 1); -const cricket::AudioCodec kCn8000Codec = - cricket::CreateAudioCodec(13, "CN", 8000, 1); -const cricket::AudioCodec kCn16000Codec = - cricket::CreateAudioCodec(105, "CN", 16000, 1); -const cricket::AudioCodec kRed48000Codec = - cricket::CreateAudioCodec(112, "RED", 48000, 2); -const cricket::AudioCodec kTelephoneEventCodec1 = - cricket::CreateAudioCodec(106, "telephone-event", 8000, 1); -const cricket::AudioCodec kTelephoneEventCodec2 = - cricket::CreateAudioCodec(107, "telephone-event", 32000, 1); +const webrtc::Codec kPcmuCodec = webrtc::CreateAudioCodec(0, "PCMU", 8000, 1); +const webrtc::Codec kOpusCodec = + webrtc::CreateAudioCodec(111, "opus", 48000, 2); +const webrtc::Codec kG722CodecVoE = + webrtc::CreateAudioCodec(9, "G722", 16000, 1); +const webrtc::Codec kG722CodecSdp = + webrtc::CreateAudioCodec(9, "G722", 8000, 1); +const webrtc::Codec kCn8000Codec = webrtc::CreateAudioCodec(13, "CN", 8000, 1); +const webrtc::Codec kCn16000Codec = + webrtc::CreateAudioCodec(105, "CN", 16000, 1); +const webrtc::Codec kRed48000Codec = + webrtc::CreateAudioCodec(112, "RED", 48000, 2); +const webrtc::Codec kTelephoneEventCodec1 = + webrtc::CreateAudioCodec(106, "telephone-event", 8000, 1); +const webrtc::Codec kTelephoneEventCodec2 = + webrtc::CreateAudioCodec(107, "telephone-event", 32000, 1); +const webrtc::Codec kUnknownCodec = + webrtc::CreateAudioCodec(127, "XYZ", 32000, 1); const uint32_t kSsrc0 = 0; const uint32_t kSsrc1 = 1; @@ -145,19 +184,42 @@ void AdmSetupExpectations(webrtc::test::MockAudioDeviceModule* adm) { EXPECT_CALL(*adm, RegisterAudioCallback(nullptr)).WillOnce(Return(0)); EXPECT_CALL(*adm, Terminate()).WillOnce(Return(0)); } + +std::vector AddIdToCodecs( + webrtc::PayloadTypePicker& pt_mapper, + std::vector&& codecs_in) { + std::vector codecs = std::move(codecs_in); + for (webrtc::Codec& codec : codecs) { + if (codec.id == webrtc::Codec::kIdNotSet) { + auto id_or_error = pt_mapper.SuggestMapping(codec, nullptr); + EXPECT_TRUE(id_or_error.ok()); + if (id_or_error.ok()) { + codec.id = id_or_error.value(); + } + } + } + return codecs; +} + +std::vector ReceiveCodecsWithId( + webrtc::WebRtcVoiceEngine& engine) { + webrtc::PayloadTypePicker pt_mapper; + std::vector codecs = engine.LegacyRecvCodecs(); + return AddIdToCodecs(pt_mapper, std::move(codecs)); +} + } // namespace // Tests that our stub library "works". TEST(WebRtcVoiceEngineTestStubLibrary, StartupShutdown) { + Environment env = CreateEnvironment(); for (bool use_null_apm : {false, true}) { - std::unique_ptr task_queue_factory = - webrtc::CreateDefaultTaskQueueFactory(); - rtc::scoped_refptr adm = + webrtc::scoped_refptr adm = webrtc::test::MockAudioDeviceModule::CreateStrict(); AdmSetupExpectations(adm.get()); - rtc::scoped_refptr> apm = + webrtc::scoped_refptr> apm = use_null_apm ? nullptr - : rtc::make_ref_counted< + : webrtc::make_ref_counted< StrictMock>(); webrtc::AudioProcessing::Config apm_config; @@ -167,12 +229,10 @@ TEST(WebRtcVoiceEngineTestStubLibrary, StartupShutdown) { EXPECT_CALL(*apm, DetachAecDump()); } { - webrtc::FieldTrialBasedConfig trials; - cricket::WebRtcVoiceEngine engine( - task_queue_factory.get(), adm.get(), - webrtc::MockAudioEncoderFactory::CreateUnusedFactory(), + webrtc::WebRtcVoiceEngine engine( + env, adm, webrtc::MockAudioEncoderFactory::CreateUnusedFactory(), webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm, - nullptr, nullptr, trials); + nullptr); engine.Init(); } } @@ -180,24 +240,24 @@ TEST(WebRtcVoiceEngineTestStubLibrary, StartupShutdown) { class FakeAudioSink : public webrtc::AudioSinkInterface { public: - void OnData(const Data& audio) override {} + void OnData(const Data& /* audio */) override {} }; -class FakeAudioSource : public cricket::AudioSource { - void SetSink(Sink* sink) override {} +class FakeAudioSource : public webrtc::AudioSource { + void SetSink(Sink* /* sink */) override {} }; class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { public: WebRtcVoiceEngineTestFake() : use_null_apm_(GetParam()), - task_queue_factory_(webrtc::CreateDefaultTaskQueueFactory()), + env_(CreateEnvironment(&field_trials_)), adm_(webrtc::test::MockAudioDeviceModule::CreateStrict()), apm_(use_null_apm_ ? nullptr - : rtc::make_ref_counted< + : webrtc::make_ref_counted< StrictMock>()), - call_(&field_trials_) { + call_(env_) { // AudioDeviceModule. AdmSetupExpectations(adm_.get()); @@ -216,9 +276,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // factories. Those tests should probably be moved elsewhere. auto encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory(); auto decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory(); - engine_.reset(new cricket::WebRtcVoiceEngine( - task_queue_factory_.get(), adm_.get(), encoder_factory, decoder_factory, - nullptr, apm_, nullptr, nullptr, field_trials_)); + engine_ = std::make_unique( + env_, adm_, encoder_factory, decoder_factory, nullptr, apm_, nullptr); engine_->Init(); send_parameters_.codecs.push_back(kPcmuCodec); recv_parameters_.codecs.push_back(kPcmuCodec); @@ -236,24 +295,16 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { bool SetupChannel() { send_channel_ = engine_->CreateSendChannel( - &call_, cricket::MediaConfig(), cricket::AudioOptions(), + &call_, webrtc::MediaConfig(), webrtc::AudioOptions(), webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); receive_channel_ = engine_->CreateReceiveChannel( - &call_, cricket::MediaConfig(), cricket::AudioOptions(), + &call_, webrtc::MediaConfig(), webrtc::AudioOptions(), webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); send_channel_->SetSsrcListChangedCallback( [receive_channel = receive_channel_.get()](const std::set& choices) { receive_channel->ChooseReceiverReportSsrc(choices); }); - send_channel_->SetSendCodecChangedCallback( - [receive_channel = receive_channel_.get(), - send_channel = send_channel_.get()]() { - receive_channel->SetReceiveNackEnabled( - send_channel->SendCodecHasNack()); - receive_channel->SetReceiveNonSenderRttEnabled( - send_channel->SenderNonSenderRttEnabled()); - }); return true; } @@ -265,10 +316,10 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { } bool SetupSendStream() { - return SetupSendStream(cricket::StreamParams::CreateLegacy(kSsrcX)); + return SetupSendStream(webrtc::StreamParams::CreateLegacy(kSsrcX)); } - bool SetupSendStream(const cricket::StreamParams& sp) { + bool SetupSendStream(const webrtc::StreamParams& sp) { if (!SetupChannel()) { return false; } @@ -284,7 +335,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { bool AddRecvStream(uint32_t ssrc) { EXPECT_TRUE(receive_channel_); return receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(ssrc)); + webrtc::StreamParams::CreateLegacy(ssrc)); } void SetupForMultiSendStream() { @@ -300,16 +351,16 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { webrtc::RtpPacketReceived packet; packet.Parse(reinterpret_cast(data), len); receive_channel_->OnPacketReceived(packet); - rtc::Thread::Current()->ProcessMessages(0); + webrtc::Thread::Current()->ProcessMessages(0); } - const cricket::FakeAudioSendStream& GetSendStream(uint32_t ssrc) { + const webrtc::FakeAudioSendStream& GetSendStream(uint32_t ssrc) { const auto* send_stream = call_.GetAudioSendStream(ssrc); EXPECT_TRUE(send_stream); return *send_stream; } - const cricket::FakeAudioReceiveStream& GetRecvStream(uint32_t ssrc) { + const webrtc::FakeAudioReceiveStream& GetRecvStream(uint32_t ssrc) { const auto* recv_stream = call_.GetAudioReceiveStream(ssrc); EXPECT_TRUE(recv_stream); return *recv_stream; @@ -340,15 +391,24 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { send_channel_->SetSend(enable); } - void SetSenderParameters(const cricket::AudioSenderParameter& params) { + void SetSenderParameters(const webrtc::AudioSenderParameter& params) { ASSERT_TRUE(send_channel_); EXPECT_TRUE(send_channel_->SetSenderParameters(params)); + if (receive_channel_) { + receive_channel_->SetRtcpMode(params.rtcp.reduced_size + ? webrtc::RtcpMode::kReducedSize + : webrtc::RtcpMode::kCompound); + receive_channel_->SetReceiveNackEnabled( + send_channel_->SendCodecHasNack()); + receive_channel_->SetReceiveNonSenderRttEnabled( + send_channel_->SenderNonSenderRttEnabled()); + } } void SetAudioSend(uint32_t ssrc, bool enable, - cricket::AudioSource* source, - const cricket::AudioOptions* options = nullptr) { + webrtc::AudioSource* source, + const webrtc::AudioOptions* options = nullptr) { ASSERT_TRUE(send_channel_); if (!use_null_apm_) { EXPECT_CALL(*apm_, set_output_will_be_muted(!enable)); @@ -356,15 +416,13 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { EXPECT_TRUE(send_channel_->SetAudioSend(ssrc, enable, options, source)); } - void TestInsertDtmf(uint32_t ssrc, - bool caller, - const cricket::AudioCodec& codec) { + void TestInsertDtmf(uint32_t ssrc, bool caller, const webrtc::Codec& codec) { EXPECT_TRUE(SetupChannel()); if (caller) { // If this is a caller, local description will be applied and add the // send stream. EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrcX))); + webrtc::StreamParams::CreateLegacy(kSsrcX))); } // Test we can only InsertDtmf when the other side supports telephone-event. @@ -380,14 +438,14 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // If this is callee, there's no active send channel yet. EXPECT_FALSE(send_channel_->InsertDtmf(ssrc, 2, 123)); EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrcX))); + webrtc::StreamParams::CreateLegacy(kSsrcX))); } // Check we fail if the ssrc is invalid. EXPECT_FALSE(send_channel_->InsertDtmf(-1, 1, 111)); // Test send. - cricket::FakeAudioSendStream::TelephoneEvent telephone_event = + webrtc::FakeAudioSendStream::TelephoneEvent telephone_event = GetSendStream(kSsrcX).GetLatestTelephoneEvent(); EXPECT_EQ(-1, telephone_event.payload_type); EXPECT_TRUE(send_channel_->InsertDtmf(ssrc, 2, 123)); @@ -403,7 +461,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // where SetSenderParameters() is called. EXPECT_TRUE(SetupChannel()); EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrcX))); + webrtc::StreamParams::CreateLegacy(kSsrcX))); send_parameters_.extmap_allow_mixed = extmap_allow_mixed; SetSenderParameters(send_parameters_); const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX); @@ -416,7 +474,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { EXPECT_TRUE(SetupChannel()); send_channel_->SetExtmapAllowMixed(extmap_allow_mixed); EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrcX))); + webrtc::StreamParams::CreateLegacy(kSsrcX))); const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX); EXPECT_EQ(extmap_allow_mixed, config.rtp.extmap_allow_mixed); @@ -427,11 +485,11 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // `max_bitrate` is a parameter to set to SetMaxSendBandwidth(). // `expected_result` is the expected result from SetMaxSendBandwidth(). // `expected_bitrate` is the expected audio bitrate afterward. - void TestMaxSendBandwidth(const cricket::AudioCodec& codec, + void TestMaxSendBandwidth(const webrtc::Codec& codec, int max_bitrate, bool expected_result, int expected_bitrate) { - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(codec); parameters.max_bandwidth_bps = max_bitrate; if (expected_result) { @@ -452,8 +510,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { return send_channel_->SetRtpSendParameters(ssrc, parameters).ok(); } - void SetGlobalMaxBitrate(const cricket::AudioCodec& codec, int bitrate) { - cricket::AudioSenderParameter send_parameters; + void SetGlobalMaxBitrate(const webrtc::Codec& codec, int bitrate) { + webrtc::AudioSenderParameter send_parameters; send_parameters.codecs.push_back(codec); send_parameters.max_bandwidth_bps = bitrate; SetSenderParameters(send_parameters); @@ -467,20 +525,23 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { EXPECT_EQ(expected_bitrate, spec->target_bitrate_bps); } - absl::optional GetCodecBitrate(int32_t ssrc) { - return GetSendStreamConfig(ssrc).send_codec_spec->target_bitrate_bps; + std::optional GetCodecBitrate(int32_t ssrc) { + auto spec = GetSendStreamConfig(ssrc).send_codec_spec; + if (!spec.has_value()) { + return std::nullopt; + } + return spec->target_bitrate_bps; } int GetMaxBitrate(int32_t ssrc) { return GetSendStreamConfig(ssrc).max_bitrate_bps; } - const absl::optional& GetAudioNetworkAdaptorConfig( - int32_t ssrc) { + const std::optional& GetAudioNetworkAdaptorConfig(int32_t ssrc) { return GetSendStreamConfig(ssrc).audio_network_adaptor_config; } - void SetAndExpectMaxBitrate(const cricket::AudioCodec& codec, + void SetAndExpectMaxBitrate(const webrtc::Codec& codec, int global_max, int stream_max, bool expected_result, @@ -515,9 +576,9 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { auto& codecs = send_parameters_.codecs; codecs.clear(); codecs.push_back(kOpusCodec); - codecs[0].params[cricket::kCodecParamMinBitrate] = min_bitrate_kbps; - codecs[0].params[cricket::kCodecParamStartBitrate] = start_bitrate_kbps; - codecs[0].params[cricket::kCodecParamMaxBitrate] = max_bitrate_kbps; + codecs[0].params[webrtc::kCodecParamMinBitrate] = min_bitrate_kbps; + codecs[0].params[webrtc::kCodecParamStartBitrate] = start_bitrate_kbps; + codecs[0].params[webrtc::kCodecParamMaxBitrate] = max_bitrate_kbps; EXPECT_CALL(*call_.GetMockTransportControllerSend(), SetSdpBitrateParameters( AllOf(Field(&BitrateConstraints::min_bitrate_bps, @@ -557,7 +618,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { // Ensure extension is set properly on new stream. EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrcY))); + webrtc::StreamParams::CreateLegacy(kSsrcY))); EXPECT_NE(call_.GetAudioSendStream(kSsrcX), call_.GetAudioSendStream(kSsrcY)); EXPECT_EQ(1u, GetSendStreamConfig(kSsrcY).rtp.extensions.size()); @@ -653,8 +714,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { s->SetStats(GetAudioSendStreamStats()); } } - void VerifyVoiceSenderInfo(const cricket::VoiceSenderInfo& info, - bool is_sending) { + void VerifyVoiceSenderInfo(const webrtc::VoiceSenderInfo& info, + bool /* is_sending */) { const auto stats = GetAudioSendStreamStats(); EXPECT_EQ(info.ssrc(), stats.local_ssrc); EXPECT_EQ(info.payload_bytes_sent, stats.payload_bytes_sent); @@ -716,6 +777,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { stats.concealment_events = 12; stats.jitter_buffer_delay_seconds = 34; stats.jitter_buffer_emitted_count = 77; + stats.total_processing_delay_seconds = 0.123; stats.expand_rate = 5.67f; stats.speech_expand_rate = 8.90f; stats.secondary_decoded_rate = 1.23f; @@ -738,23 +800,25 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { s->SetStats(GetAudioReceiveStreamStats()); } } - void VerifyVoiceReceiverInfo(const cricket::VoiceReceiverInfo& info) { + void VerifyVoiceReceiverInfo(const webrtc::VoiceReceiverInfo& info) { const auto stats = GetAudioReceiveStreamStats(); EXPECT_EQ(info.ssrc(), stats.remote_ssrc); EXPECT_EQ(info.payload_bytes_received, stats.payload_bytes_received); EXPECT_EQ(info.header_and_padding_bytes_received, stats.header_and_padding_bytes_received); - EXPECT_EQ(rtc::checked_cast(info.packets_received), + EXPECT_EQ(webrtc::checked_cast(info.packets_received), stats.packets_received); EXPECT_EQ(info.packets_lost, stats.packets_lost); EXPECT_EQ(info.codec_name, stats.codec_name); EXPECT_EQ(info.codec_payload_type, stats.codec_payload_type); - EXPECT_EQ(rtc::checked_cast(info.jitter_ms), stats.jitter_ms); - EXPECT_EQ(rtc::checked_cast(info.jitter_buffer_ms), + EXPECT_EQ(webrtc::checked_cast(info.jitter_ms), + stats.jitter_ms); + EXPECT_EQ(webrtc::checked_cast(info.jitter_buffer_ms), stats.jitter_buffer_ms); - EXPECT_EQ(rtc::checked_cast(info.jitter_buffer_preferred_ms), - stats.jitter_buffer_preferred_ms); - EXPECT_EQ(rtc::checked_cast(info.delay_estimate_ms), + EXPECT_EQ( + webrtc::checked_cast(info.jitter_buffer_preferred_ms), + stats.jitter_buffer_preferred_ms); + EXPECT_EQ(webrtc::checked_cast(info.delay_estimate_ms), stats.delay_estimate_ms); EXPECT_EQ(info.audio_level, stats.audio_level); EXPECT_EQ(info.total_samples_received, stats.total_samples_received); @@ -764,6 +828,8 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { stats.jitter_buffer_delay_seconds); EXPECT_EQ(info.jitter_buffer_emitted_count, stats.jitter_buffer_emitted_count); + EXPECT_EQ(info.total_processing_delay_seconds, + stats.total_processing_delay_seconds); EXPECT_EQ(info.expand_rate, stats.expand_rate); EXPECT_EQ(info.speech_expand_rate, stats.speech_expand_rate); EXPECT_EQ(info.secondary_decoded_rate, stats.secondary_decoded_rate); @@ -782,17 +848,17 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { EXPECT_EQ(info.capture_start_ntp_time_ms, stats.capture_start_ntp_time_ms); } void VerifyVoiceSendRecvCodecs( - const cricket::VoiceMediaSendInfo& send_info, - const cricket::VoiceMediaReceiveInfo& receive_info) const { + const webrtc::VoiceMediaSendInfo& send_info, + const webrtc::VoiceMediaReceiveInfo& receive_info) const { EXPECT_EQ(send_parameters_.codecs.size(), send_info.send_codecs.size()); - for (const cricket::AudioCodec& codec : send_parameters_.codecs) { + for (const webrtc::Codec& codec : send_parameters_.codecs) { ASSERT_EQ(send_info.send_codecs.count(codec.id), 1U); EXPECT_EQ(send_info.send_codecs.find(codec.id)->second, codec.ToCodecParameters()); } EXPECT_EQ(recv_parameters_.codecs.size(), receive_info.receive_codecs.size()); - for (const cricket::AudioCodec& codec : recv_parameters_.codecs) { + for (const webrtc::Codec& codec : recv_parameters_.codecs) { ASSERT_EQ(receive_info.receive_codecs.count(codec.id), 1U); EXPECT_EQ(receive_info.receive_codecs.find(codec.id)->second, codec.ToCodecParameters()); @@ -825,34 +891,39 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam { return apm_config_.high_pass_filter.enabled; } - cricket::WebRtcVoiceSendChannel* SendImplFromPointer( - cricket::VoiceMediaSendChannelInterface* channel) { - return static_cast(channel); + webrtc::WebRtcVoiceSendChannel* SendImplFromPointer( + webrtc::VoiceMediaSendChannelInterface* channel) { + return static_cast(channel); } - cricket::WebRtcVoiceSendChannel* SendImpl() { + webrtc::WebRtcVoiceSendChannel* SendImpl() { return SendImplFromPointer(send_channel_.get()); } - cricket::WebRtcVoiceReceiveChannel* ReceiveImpl() { - return static_cast( + webrtc::WebRtcVoiceReceiveChannel* ReceiveImpl() { + return static_cast( receive_channel_.get()); } + std::vector SendCodecsWithId() { + std::vector codecs = engine_->LegacySendCodecs(); + return AddIdToCodecs(pt_mapper_, std::move(codecs)); + } protected: - rtc::AutoThread main_thread_; + webrtc::AutoThread main_thread_; const bool use_null_apm_; webrtc::test::ScopedKeyValueConfig field_trials_; - std::unique_ptr task_queue_factory_; - rtc::scoped_refptr adm_; - rtc::scoped_refptr> apm_; - cricket::FakeCall call_; - std::unique_ptr engine_; - std::unique_ptr send_channel_; - std::unique_ptr receive_channel_; - cricket::AudioSenderParameter send_parameters_; - cricket::AudioReceiverParameters recv_parameters_; + const Environment env_; + webrtc::scoped_refptr adm_; + webrtc::scoped_refptr> apm_; + webrtc::FakeCall call_; FakeAudioSource fake_source_; + std::unique_ptr engine_; + std::unique_ptr send_channel_; + std::unique_ptr receive_channel_; + webrtc::AudioSenderParameter send_parameters_; + webrtc::AudioReceiverParameters recv_parameters_; webrtc::AudioProcessing::Config apm_config_; + webrtc::PayloadTypePicker pt_mapper_; }; INSTANTIATE_TEST_SUITE_P(TestBothWithAndWithoutNullApm, @@ -867,8 +938,8 @@ TEST_P(WebRtcVoiceEngineTestFake, CreateMediaChannel) { // Test that we can add a send stream and that it has the correct defaults. TEST_P(WebRtcVoiceEngineTestFake, CreateSendStream) { EXPECT_TRUE(SetupChannel()); - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE( + send_channel_->AddSendStream(webrtc::StreamParams::CreateLegacy(kSsrcX))); const webrtc::AudioSendStream::Config& config = GetSendStreamConfig(kSsrcX); EXPECT_EQ(kSsrcX, config.rtp.ssrc); EXPECT_EQ("", config.rtp.c_name); @@ -888,22 +959,10 @@ TEST_P(WebRtcVoiceEngineTestFake, CreateRecvStream) { EXPECT_EQ("", config.sync_group); } -TEST_P(WebRtcVoiceEngineTestFake, OpusSupportsTransportCc) { - const std::vector& codecs = engine_->send_codecs(); - bool opus_found = false; - for (const cricket::AudioCodec& codec : codecs) { - if (codec.name == "opus") { - EXPECT_TRUE(HasTransportCc(codec)); - opus_found = true; - } - } - EXPECT_TRUE(opus_found); -} - // Test that we set our inbound codecs properly, including changing PT. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecs) { EXPECT_TRUE(SetupChannel()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kTelephoneEventCodec1); @@ -923,16 +982,16 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecs) { // Test that we fail to set an unknown inbound codec. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsUnsupportedCodec) { EXPECT_TRUE(SetupChannel()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); - parameters.codecs.push_back(cricket::CreateAudioCodec(127, "XYZ", 32000, 1)); + parameters.codecs.push_back(kUnknownCodec); EXPECT_FALSE(receive_channel_->SetReceiverParameters(parameters)); } // Test that we fail if we have duplicate types in the inbound list. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsDuplicatePayloadType) { EXPECT_TRUE(SetupChannel()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs[1].id = kOpusCodec.id; @@ -942,7 +1001,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsDuplicatePayloadType) { // Test that we can decode OPUS without stereo parameters. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpusNoStereo) { EXPECT_TRUE(SetupChannel()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kOpusCodec); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); @@ -955,7 +1014,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpusNoStereo) { // Test that we can decode OPUS with stereo = 0. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus0Stereo) { EXPECT_TRUE(SetupChannel()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kOpusCodec); parameters.codecs[1].params["stereo"] = "0"; @@ -970,7 +1029,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus0Stereo) { // Test that we can decode OPUS with stereo = 1. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus1Stereo) { EXPECT_TRUE(SetupChannel()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kOpusCodec); parameters.codecs[1].params["stereo"] = "1"; @@ -985,7 +1044,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithOpus1Stereo) { // Test that changes to recv codecs are applied to all streams. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithMultipleStreams) { EXPECT_TRUE(SetupChannel()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kTelephoneEventCodec1); @@ -1006,7 +1065,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWithMultipleStreams) { TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsAfterAddingStreams) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].id = 106; // collide with existing CN 32k EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); @@ -1019,7 +1078,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsAfterAddingStreams) { // Test that we can apply the same set of codecs again while playing. TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWhilePlaying) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); @@ -1036,7 +1095,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRecvCodecsWhilePlaying) { // Test that we can add a codec while playing. TEST_P(WebRtcVoiceEngineTestFake, AddRecvCodecsWhilePlaying) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); @@ -1051,7 +1110,7 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvCodecsWhilePlaying) { // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=5847 TEST_P(WebRtcVoiceEngineTestFake, ChangeRecvCodecPayloadType) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); @@ -1062,7 +1121,7 @@ TEST_P(WebRtcVoiceEngineTestFake, ChangeRecvCodecPayloadType) { // Test that we do allow setting Opus/Red by default. TEST_P(WebRtcVoiceEngineTestFake, RecvRedDefault) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kRed48000Codec); parameters.codecs[1].params[""] = "111/111"; @@ -1116,13 +1175,13 @@ TEST_P(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthFixedRateAsCaller) { TEST_P(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthMultiRateAsCallee) { EXPECT_TRUE(SetupChannel()); const int kDesiredBitrate = 128000; - cricket::AudioSenderParameter parameters; - parameters.codecs = engine_->send_codecs(); + webrtc::AudioSenderParameter parameters; + parameters.codecs = SendCodecsWithId(); parameters.max_bandwidth_bps = kDesiredBitrate; SetSenderParameters(parameters); - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE( + send_channel_->AddSendStream(webrtc::StreamParams::CreateLegacy(kSsrcX))); EXPECT_EQ(kDesiredBitrate, GetCodecBitrate(kSsrcX)); } @@ -1226,7 +1285,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpParametersEncodingsActive) { // Now change it back to active and verify we resume sending. // This should occur even when other parameters are updated. parameters.encodings[0].active = true; - parameters.encodings[0].max_bitrate_bps = absl::optional(6000); + parameters.encodings[0].max_bitrate_bps = std::optional(6000); EXPECT_TRUE(send_channel_->SetRtpSendParameters(kSsrcX, parameters).ok()); EXPECT_TRUE(GetSendStream(kSsrcX).IsSending()); } @@ -1279,8 +1338,8 @@ TEST_P(WebRtcVoiceEngineTestFake, RtpParametersArePerStream) { SetupForMultiSendStream(); // Create send streams. for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE( + send_channel_->AddSendStream(webrtc::StreamParams::CreateLegacy(ssrc))); } // Configure one stream to be limited by the stream config, another to be // limited by the global max, and the third one with no per-stream limit @@ -1305,7 +1364,7 @@ TEST_P(WebRtcVoiceEngineTestFake, RtpParametersArePerStream) { // Test that GetRtpSendParameters returns the currently configured codecs. TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersCodecs) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); SetSenderParameters(parameters); @@ -1319,7 +1378,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersCodecs) { // Test that GetRtpSendParameters returns the currently configured RTCP CNAME. TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersRtcpCname) { - cricket::StreamParams params = cricket::StreamParams::CreateLegacy(kSsrcX); + webrtc::StreamParams params = webrtc::StreamParams::CreateLegacy(kSsrcX); params.cname = "rtcpcname"; EXPECT_TRUE(SetupSendStream(params)); @@ -1355,7 +1414,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpSendParametersSsrc) { // Test that if we set/get parameters multiple times, we get the same results. TEST_P(WebRtcVoiceEngineTestFake, SetAndGetRtpSendParameters) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); SetSenderParameters(parameters); @@ -1377,7 +1436,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetAndGetRtpSendParameters) { TEST_P(WebRtcVoiceEngineTestFake, SetSendParametersRemovesSelectedCodecFromRtpParameters) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); SetSenderParameters(parameters); @@ -1387,7 +1446,7 @@ TEST_P(WebRtcVoiceEngineTestFake, webrtc::RtpCodec opus_rtp_codec; opus_rtp_codec.name = "opus"; - opus_rtp_codec.kind = cricket::MEDIA_TYPE_AUDIO; + opus_rtp_codec.kind = webrtc::MediaType::AUDIO; opus_rtp_codec.num_channels = 2; opus_rtp_codec.clock_rate = 48000; initial_params.encodings[0].codec = opus_rtp_codec; @@ -1404,14 +1463,14 @@ TEST_P(WebRtcVoiceEngineTestFake, // forced codec anymore. webrtc::RtpParameters new_params = send_channel_->GetRtpSendParameters(kSsrcX); - EXPECT_EQ(new_params.encodings[0].codec, absl::nullopt); + EXPECT_EQ(new_params.encodings[0].codec, std::nullopt); } // Test that max_bitrate_bps in send stream config gets updated correctly when // SetRtpSendParameters is called. TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesMaxBitrate) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter send_parameters; + webrtc::AudioSenderParameter send_parameters; send_parameters.codecs.push_back(kOpusCodec); SetSenderParameters(send_parameters); @@ -1471,7 +1530,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRtpSendParameterUpdatesBitratePriority) { // Test that GetRtpReceiverParameters returns the currently configured codecs. TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersCodecs) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); @@ -1495,7 +1554,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersSsrc) { // Test that if we set/get parameters multiple times, we get the same results. TEST_P(WebRtcVoiceEngineTestFake, SetAndGetRtpReceiveParameters) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); @@ -1518,7 +1577,7 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersWithUnsignaledSsrc) { ASSERT_TRUE(SetupChannel()); // Call necessary methods to configure receiving a default stream as // soon as it arrives. - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); @@ -1549,33 +1608,32 @@ TEST_P(WebRtcVoiceEngineTestFake, GetRtpReceiveParametersWithUnsignaledSsrc) { TEST_P(WebRtcVoiceEngineTestFake, OnPacketReceivedIdentifiesExtensions) { ASSERT_TRUE(SetupChannel()); - cricket::AudioReceiverParameters parameters = recv_parameters_; + webrtc::AudioReceiverParameters parameters = recv_parameters_; parameters.extensions.push_back( - RtpExtension(RtpExtension::kAudioLevelUri, /*id=*/1)); + webrtc::RtpExtension(webrtc::RtpExtension::kAudioLevelUri, /*id=*/1)); ASSERT_TRUE(receive_channel_->SetReceiverParameters(parameters)); webrtc::RtpHeaderExtensionMap extension_map(parameters.extensions); webrtc::RtpPacketReceived reference_packet(&extension_map); constexpr uint8_t kAudioLevel = 123; - reference_packet.SetExtension(/*voice_activity=*/true, - kAudioLevel); + reference_packet.SetExtension( + webrtc::AudioLevel(/*voice_activity=*/true, kAudioLevel)); // Create a packet without the extension map but with the same content. webrtc::RtpPacketReceived received_packet; ASSERT_TRUE(received_packet.Parse(reference_packet.Buffer())); receive_channel_->OnPacketReceived(received_packet); - rtc::Thread::Current()->ProcessMessages(0); + webrtc::Thread::Current()->ProcessMessages(0); - bool voice_activity; - uint8_t audio_level; - EXPECT_TRUE(call_.last_received_rtp_packet().GetExtension( - &voice_activity, &audio_level)); - EXPECT_EQ(audio_level, kAudioLevel); + webrtc::AudioLevel audio_level; + EXPECT_TRUE(call_.last_received_rtp_packet() + .GetExtension(&audio_level)); + EXPECT_EQ(audio_level.level(), kAudioLevel); } // Test that we apply codecs properly. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecs) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn8000Codec); @@ -1587,7 +1645,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecs) { EXPECT_EQ(22000, send_codec_spec.target_bitrate_bps); EXPECT_STRCASEEQ("OPUS", send_codec_spec.format.name.c_str()); EXPECT_NE(send_codec_spec.format.clockrate_hz, 8000); - EXPECT_EQ(absl::nullopt, send_codec_spec.cng_payload_type); + EXPECT_EQ(std::nullopt, send_codec_spec.cng_payload_type); EXPECT_FALSE(send_channel_->CanInsertDtmf()); } @@ -1595,7 +1653,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecs) { // listed as the first codec and there is an fmtp line. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRed) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kRed48000Codec); parameters.codecs[0].params[""] = "111/111"; parameters.codecs.push_back(kOpusCodec); @@ -1610,20 +1668,20 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRed) { // listed as the first codec but there is no fmtp line. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedNoFmtp) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kRed48000Codec); parameters.codecs.push_back(kOpusCodec); SetSenderParameters(parameters); const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(111, send_codec_spec.payload_type); EXPECT_STRCASEEQ("opus", send_codec_spec.format.name.c_str()); - EXPECT_EQ(absl::nullopt, send_codec_spec.red_payload_type); + EXPECT_EQ(std::nullopt, send_codec_spec.red_payload_type); } // Test that we do not use Opus/Red by default. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedDefault) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kRed48000Codec); parameters.codecs[1].params[""] = "111/111"; @@ -1631,13 +1689,13 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedDefault) { const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(111, send_codec_spec.payload_type); EXPECT_STRCASEEQ("opus", send_codec_spec.format.name.c_str()); - EXPECT_EQ(absl::nullopt, send_codec_spec.red_payload_type); + EXPECT_EQ(std::nullopt, send_codec_spec.red_payload_type); } // Test that the RED fmtp line must match the payload type. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedFmtpMismatch) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kRed48000Codec); parameters.codecs[0].params[""] = "8/8"; parameters.codecs.push_back(kOpusCodec); @@ -1645,13 +1703,13 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedFmtpMismatch) { const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(111, send_codec_spec.payload_type); EXPECT_STRCASEEQ("opus", send_codec_spec.format.name.c_str()); - EXPECT_EQ(absl::nullopt, send_codec_spec.red_payload_type); + EXPECT_EQ(std::nullopt, send_codec_spec.red_payload_type); } // Test that the RED fmtp line must show 2..32 payloads. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedFmtpAmountOfRedundancy) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kRed48000Codec); parameters.codecs[0].params[""] = "111"; parameters.codecs.push_back(kOpusCodec); @@ -1659,7 +1717,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedFmtpAmountOfRedundancy) { const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(111, send_codec_spec.payload_type); EXPECT_STRCASEEQ("opus", send_codec_spec.format.name.c_str()); - EXPECT_EQ(absl::nullopt, send_codec_spec.red_payload_type); + EXPECT_EQ(std::nullopt, send_codec_spec.red_payload_type); for (int i = 1; i < 32; i++) { parameters.codecs[0].params[""] += "/111"; SetSenderParameters(parameters); @@ -1673,14 +1731,30 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsRedFmtpAmountOfRedundancy) { const auto& send_codec_spec3 = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(111, send_codec_spec3.payload_type); EXPECT_STRCASEEQ("opus", send_codec_spec3.format.name.c_str()); - EXPECT_EQ(absl::nullopt, send_codec_spec3.red_payload_type); + EXPECT_EQ(std::nullopt, send_codec_spec3.red_payload_type); +} + +// Test that we use Opus/Red by default if an unknown codec +// is before RED and Opus. +TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecRedWithUnknownCodec) { + EXPECT_TRUE(SetupSendStream()); + webrtc::AudioSenderParameter parameters; + parameters.codecs.push_back(kUnknownCodec); + parameters.codecs.push_back(kRed48000Codec); + parameters.codecs.back().params[""] = "111/111"; + parameters.codecs.push_back(kOpusCodec); + SetSenderParameters(parameters); + const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; + EXPECT_EQ(111, send_codec_spec.payload_type); + EXPECT_STRCASEEQ("opus", send_codec_spec.format.name.c_str()); + EXPECT_EQ(112, send_codec_spec.red_payload_type); } // Test that WebRtcVoiceEngine reconfigures, rather than recreates its // AudioSendStream. TEST_P(WebRtcVoiceEngineTestFake, DontRecreateSendStream) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn8000Codec); @@ -1698,73 +1772,80 @@ TEST_P(WebRtcVoiceEngineTestFake, DontRecreateSendStream) { // TODO(ossu): Revisit if these tests need to be here, now that these kinds of // tests should be available in AudioEncoderOpusTest. -// Test that if clockrate is not 48000 for opus, we fail. +// Test that if clockrate is not 48000 for opus, we do not have a send codec. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusBadClockrate) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].clockrate = 50000; - EXPECT_FALSE(send_channel_->SetSenderParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); + EXPECT_EQ(send_channel_->GetSendCodec(), std::nullopt); } -// Test that if channels=0 for opus, we fail. +// Test that if channels=0 for opus, we do not have a send codec. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusBad0ChannelsNoStereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].channels = 0; - EXPECT_FALSE(send_channel_->SetSenderParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); + EXPECT_EQ(send_channel_->GetSendCodec(), std::nullopt); } -// Test that if channels=0 for opus, we fail. +// Test that if channels=0 for opus, we do not have a send codec. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusBad0Channels1Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].channels = 0; parameters.codecs[0].params["stereo"] = "1"; - EXPECT_FALSE(send_channel_->SetSenderParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); + EXPECT_EQ(send_channel_->GetSendCodec(), std::nullopt); } -// Test that if channel is 1 for opus and there's no stereo, we fail. +// Test that if channel is 1 for opus and there's no stereo, we do not have a +// send codec. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpus1ChannelNoStereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].channels = 1; - EXPECT_FALSE(send_channel_->SetSenderParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); + EXPECT_EQ(send_channel_->GetSendCodec(), std::nullopt); } -// Test that if channel is 1 for opus and stereo=0, we fail. +// Test that if channel is 1 for opus and stereo=0, we do not have a send codec. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusBad1Channel0Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].channels = 1; parameters.codecs[0].params["stereo"] = "0"; - EXPECT_FALSE(send_channel_->SetSenderParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); + EXPECT_EQ(send_channel_->GetSendCodec(), std::nullopt); } -// Test that if channel is 1 for opus and stereo=1, we fail. +// Test that if channel is 1 for opus and stereo=1, we do not have a send codec. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusBad1Channel1Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].channels = 1; parameters.codecs[0].params["stereo"] = "1"; - EXPECT_FALSE(send_channel_->SetSenderParameters(parameters)); + EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); + EXPECT_EQ(send_channel_->GetSendCodec(), std::nullopt); } // Test that with bitrate=0 and no stereo, bitrate is 32000. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0BitrateNoStereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; SetSenderParameters(parameters); @@ -1774,7 +1855,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0BitrateNoStereo) { // Test that with bitrate=0 and stereo=0, bitrate is 32000. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0Bitrate0Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].params["stereo"] = "0"; @@ -1785,7 +1866,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0Bitrate0Stereo) { // Test that with bitrate=invalid and stereo=0, bitrate is 32000. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodXBitrate0Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].params["stereo"] = "0"; // bitrate that's out of the range between 6000 and 510000 will be clamped. @@ -1801,7 +1882,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodXBitrate0Stereo) { // Test that with bitrate=0 and stereo=1, bitrate is 64000. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0Bitrate1Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 0; parameters.codecs[0].params["stereo"] = "1"; @@ -1812,7 +1893,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGood0Bitrate1Stereo) { // Test that with bitrate=invalid and stereo=1, bitrate is 64000. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodXBitrate1Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].params["stereo"] = "1"; // bitrate that's out of the range between 6000 and 510000 will be clamped. @@ -1828,7 +1909,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodXBitrate1Stereo) { // Test that with bitrate=N and stereo unset, bitrate is N. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrateNoStereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 96000; SetSenderParameters(parameters); @@ -1843,7 +1924,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrateNoStereo) { // Test that with bitrate=N and stereo=0, bitrate is N. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrate0Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 30000; parameters.codecs[0].params["stereo"] = "0"; @@ -1854,7 +1935,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrate0Stereo) { // Test that with bitrate=N and without any parameters, bitrate is N. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrateNoParameters) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 30000; SetSenderParameters(parameters); @@ -1864,7 +1945,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrateNoParameters) { // Test that with bitrate=N and stereo=1, bitrate is N. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecOpusGoodNBitrate1Stereo) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].bitrate = 30000; parameters.codecs[0].params["stereo"] = "1"; @@ -1907,27 +1988,27 @@ TEST_P(WebRtcVoiceEngineTestFake, SetMaxSendBandwidthForAudioDoesntAffectBwe) { // Test that we can enable NACK with opus as callee. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackAsCallee) { EXPECT_TRUE(SetupRecvStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); - parameters.codecs[0].AddFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)); + parameters.codecs[0].AddFeedbackParam(webrtc::FeedbackParam( + webrtc::kRtcpFbParamNack, webrtc::kParamValueEmpty)); EXPECT_EQ(0, GetRecvStreamConfig(kSsrcX).rtp.nack.rtp_history_ms); SetSenderParameters(parameters); // NACK should be enabled even with no send stream. EXPECT_EQ(kRtpHistoryMs, GetRecvStreamConfig(kSsrcX).rtp.nack.rtp_history_ms); - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE( + send_channel_->AddSendStream(webrtc::StreamParams::CreateLegacy(kSsrcX))); } // Test that we can enable NACK on receive streams. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackRecvStreams) { EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(AddRecvStream(kSsrcY)); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); - parameters.codecs[0].AddFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)); + parameters.codecs[0].AddFeedbackParam(webrtc::FeedbackParam( + webrtc::kRtcpFbParamNack, webrtc::kParamValueEmpty)); EXPECT_EQ(0, GetRecvStreamConfig(kSsrcY).rtp.nack.rtp_history_ms); SetSenderParameters(parameters); EXPECT_EQ(kRtpHistoryMs, GetRecvStreamConfig(kSsrcY).rtp.nack.rtp_history_ms); @@ -1937,10 +2018,10 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecEnableNackRecvStreams) { TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecDisableNackRecvStreams) { EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(AddRecvStream(kSsrcY)); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); - parameters.codecs[0].AddFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)); + parameters.codecs[0].AddFeedbackParam(webrtc::FeedbackParam( + webrtc::kRtcpFbParamNack, webrtc::kParamValueEmpty)); SetSenderParameters(parameters); EXPECT_EQ(kRtpHistoryMs, GetRecvStreamConfig(kSsrcY).rtp.nack.rtp_history_ms); @@ -1953,11 +2034,11 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecDisableNackRecvStreams) { // Test that NACK is enabled on a new receive stream. TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamEnableNack) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kCn16000Codec); - parameters.codecs[0].AddFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)); + parameters.codecs[0].AddFeedbackParam(webrtc::FeedbackParam( + webrtc::kRtcpFbParamNack, webrtc::kParamValueEmpty)); SetSenderParameters(parameters); EXPECT_TRUE(AddRecvStream(kSsrcY)); @@ -1966,11 +2047,78 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamEnableNack) { EXPECT_EQ(kRtpHistoryMs, GetRecvStreamConfig(kSsrcZ).rtp.nack.rtp_history_ms); } +// Test that we can enable RTCP reduced size mode with opus as callee. +TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecEnableRtcpReducedSizeAsCallee) { + EXPECT_TRUE(SetupRecvStream()); + webrtc::AudioSenderParameter parameters; + parameters.codecs.push_back(kOpusCodec); + parameters.rtcp.reduced_size = true; + EXPECT_EQ(webrtc::RtcpMode::kCompound, + GetRecvStreamConfig(kSsrcX).rtp.rtcp_mode); + SetSenderParameters(parameters); + // Reduced size mode should be enabled even with no send stream. + EXPECT_EQ(webrtc::RtcpMode::kReducedSize, + GetRecvStreamConfig(kSsrcX).rtp.rtcp_mode); + + EXPECT_TRUE( + send_channel_->AddSendStream(webrtc::StreamParams::CreateLegacy(kSsrcX))); +} + +// Test that we can enable RTCP reduced size mode on receive streams. +TEST_P(WebRtcVoiceEngineTestFake, + SetSendCodecEnableRtcpReducedSizeRecvStreams) { + EXPECT_TRUE(SetupSendStream()); + EXPECT_TRUE(AddRecvStream(kSsrcY)); + webrtc::AudioSenderParameter parameters; + parameters.codecs.push_back(kOpusCodec); + parameters.rtcp.reduced_size = true; + EXPECT_EQ(webrtc::RtcpMode::kCompound, + GetRecvStreamConfig(kSsrcY).rtp.rtcp_mode); + SetSenderParameters(parameters); + EXPECT_EQ(webrtc::RtcpMode::kReducedSize, + GetRecvStreamConfig(kSsrcY).rtp.rtcp_mode); +} + +// Test that we can disable RTCP reduced size mode on receive streams. +TEST_P(WebRtcVoiceEngineTestFake, + SetSendCodecDisableRtcpReducedSizeRecvStreams) { + EXPECT_TRUE(SetupSendStream()); + EXPECT_TRUE(AddRecvStream(kSsrcY)); + webrtc::AudioSenderParameter parameters; + parameters.codecs.push_back(kOpusCodec); + parameters.rtcp.reduced_size = true; + SetSenderParameters(parameters); + EXPECT_EQ(webrtc::RtcpMode::kReducedSize, + GetRecvStreamConfig(kSsrcY).rtp.rtcp_mode); + + parameters.rtcp.reduced_size = false; + SetSenderParameters(parameters); + EXPECT_EQ(webrtc::RtcpMode::kCompound, + GetRecvStreamConfig(kSsrcY).rtp.rtcp_mode); +} + +// Test that RTCP reduced size mode is enabled on a new receive stream. +TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamEnableRtcpReducedSize) { + EXPECT_TRUE(SetupSendStream()); + webrtc::AudioSenderParameter parameters; + parameters.codecs.push_back(kOpusCodec); + parameters.codecs.push_back(kCn16000Codec); + parameters.rtcp.reduced_size = true; + SetSenderParameters(parameters); + + EXPECT_TRUE(AddRecvStream(kSsrcY)); + EXPECT_EQ(webrtc::RtcpMode::kReducedSize, + GetRecvStreamConfig(kSsrcY).rtp.rtcp_mode); + EXPECT_TRUE(AddRecvStream(kSsrcZ)); + EXPECT_EQ(webrtc::RtcpMode::kReducedSize, + GetRecvStreamConfig(kSsrcZ).rtp.rtcp_mode); +} + // Test that we can switch back and forth between Opus and PCMU with CN. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsOpusPcmuSwitching) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter opus_parameters; + webrtc::AudioSenderParameter opus_parameters; opus_parameters.codecs.push_back(kOpusCodec); SetSenderParameters(opus_parameters); { @@ -1979,7 +2127,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsOpusPcmuSwitching) { EXPECT_STRCASEEQ("opus", spec.format.name.c_str()); } - cricket::AudioSenderParameter pcmu_parameters; + webrtc::AudioSenderParameter pcmu_parameters; pcmu_parameters.codecs.push_back(kPcmuCodec); pcmu_parameters.codecs.push_back(kCn16000Codec); pcmu_parameters.codecs.push_back(kOpusCodec); @@ -2001,7 +2149,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsOpusPcmuSwitching) { // Test that we handle various ways of specifying bitrate. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsBitrate) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kPcmuCodec); SetSenderParameters(parameters); { @@ -2031,18 +2179,19 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsBitrate) { } } -// Test that we fail if no codecs are specified. +// Test that we do not fail if no codecs are specified. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsNoCodecs) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; - EXPECT_FALSE(send_channel_->SetSenderParameters(parameters)); + webrtc::AudioSenderParameter parameters; + EXPECT_TRUE(send_channel_->SetSenderParameters(parameters)); + EXPECT_EQ(send_channel_->GetSendCodec(), std::nullopt); } // Test that we can set send codecs even with telephone-event codec as the first // one on the list. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFOnTop) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kTelephoneEventCodec1); parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); @@ -2059,7 +2208,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFOnTop) { // Test that CanInsertDtmf() is governed by the send flag TEST_P(WebRtcVoiceEngineTestFake, DTMFControlledBySendFlag) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kTelephoneEventCodec1); parameters.codecs.push_back(kPcmuCodec); parameters.codecs[0].id = 98; // DTMF @@ -2075,7 +2224,7 @@ TEST_P(WebRtcVoiceEngineTestFake, DTMFControlledBySendFlag) { // Test that payload type range is limited for telephone-event codec. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFPayloadTypeOutOfRange) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kTelephoneEventCodec2); parameters.codecs.push_back(kOpusCodec); parameters.codecs[0].id = 0; // DTMF @@ -2098,7 +2247,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsDTMFPayloadTypeOutOfRange) { // one on the list. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNOnTop) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kCn8000Codec); parameters.codecs.push_back(kPcmuCodec); parameters.codecs[0].id = 98; // narrowband CN @@ -2112,7 +2261,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNOnTop) { // Test that we set VAD and DTMF types correctly as caller. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCaller) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs.push_back(kCn8000Codec); @@ -2133,7 +2282,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCaller) { // Test that we set VAD and DTMF types correctly as callee. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCallee) { EXPECT_TRUE(SetupChannel()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs.push_back(kCn8000Codec); @@ -2142,8 +2291,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCallee) { parameters.codecs[2].id = 97; // narrowband CN parameters.codecs[3].id = 98; // DTMF SetSenderParameters(parameters); - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE( + send_channel_->AddSendStream(webrtc::StreamParams::CreateLegacy(kSsrcX))); const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_EQ(96, send_codec_spec.payload_type); @@ -2158,7 +2307,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNandDTMFAsCallee) { // send codec clockrate. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNNoMatch) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; // Set PCMU(8K) and CN(16K). VAD should not be activated. parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); @@ -2167,7 +2316,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNNoMatch) { { const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str()); - EXPECT_EQ(absl::nullopt, send_codec_spec.cng_payload_type); + EXPECT_EQ(std::nullopt, send_codec_spec.cng_payload_type); } // Set PCMU(8K) and CN(8K). VAD should be activated. parameters.codecs[1] = kCn8000Codec; @@ -2184,14 +2333,14 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCNNoMatch) { { const auto& send_codec_spec = *GetSendStreamConfig(kSsrcX).send_codec_spec; EXPECT_STRCASEEQ("OPUS", send_codec_spec.format.name.c_str()); - EXPECT_EQ(absl::nullopt, send_codec_spec.cng_payload_type); + EXPECT_EQ(std::nullopt, send_codec_spec.cng_payload_type); } } // Test that we perform case-insensitive matching of codec names. TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCaseInsensitive) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn16000Codec); parameters.codecs.push_back(kCn8000Codec); @@ -2213,10 +2362,10 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsCaseInsensitive) { TEST_P(WebRtcVoiceEngineTestFake, SupportsTransportSequenceNumberHeaderExtension) { const std::vector header_extensions = - GetDefaultEnabledRtpHeaderExtensions(*engine_); + webrtc::GetDefaultEnabledRtpHeaderExtensions(*engine_); EXPECT_THAT(header_extensions, Contains(::testing::Field( - "uri", &RtpExtension::uri, + "uri", &webrtc::RtpExtension::uri, webrtc::RtpExtension::kTransportSequenceNumberUri))); } @@ -2248,20 +2397,6 @@ TEST_P(WebRtcVoiceEngineTestFake, Send) { EXPECT_FALSE(GetSendStream(kSsrcX).IsSending()); } -// Test that a channel will send if and only if it has a source and is enabled -// for sending. -TEST_P(WebRtcVoiceEngineTestFake, SendStateWithAndWithoutSource) { - EXPECT_TRUE(SetupSendStream()); - SetSenderParameters(send_parameters_); - SetAudioSend(kSsrcX, true, nullptr); - SetSend(true); - EXPECT_FALSE(GetSendStream(kSsrcX).IsSending()); - SetAudioSend(kSsrcX, true, &fake_source_); - EXPECT_TRUE(GetSendStream(kSsrcX).IsSending()); - SetAudioSend(kSsrcX, true, nullptr); - EXPECT_FALSE(GetSendStream(kSsrcX).IsSending()); -} - // Test that a channel is muted/unmuted. TEST_P(WebRtcVoiceEngineTestFake, SendStateMuteUnmute) { EXPECT_TRUE(SetupSendStream()); @@ -2316,8 +2451,8 @@ TEST_P(WebRtcVoiceEngineTestFake, CreateAndDeleteMultipleSendStreams) { SetSend(true); for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE( + send_channel_->AddSendStream(webrtc::StreamParams::CreateLegacy(ssrc))); SetAudioSend(ssrc, true, &fake_source_); // Verify that we are in a sending state for all the created streams. EXPECT_TRUE(GetSendStream(ssrc).IsSending()); @@ -2339,11 +2474,11 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsWithMultipleSendStreams) { // Create send streams. for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE( + send_channel_->AddSendStream(webrtc::StreamParams::CreateLegacy(ssrc))); } - cricket::AudioSenderParameter parameters; + webrtc::AudioSenderParameter parameters; // Set PCMU and CN(8K). VAD should be activated. parameters.codecs.push_back(kPcmuCodec); parameters.codecs.push_back(kCn8000Codec); @@ -2369,7 +2504,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendCodecsWithMultipleSendStreams) { const auto& send_codec_spec = *call_.GetAudioSendStream(ssrc)->GetConfig().send_codec_spec; EXPECT_STRCASEEQ("PCMU", send_codec_spec.format.name.c_str()); - EXPECT_EQ(absl::nullopt, send_codec_spec.cng_payload_type); + EXPECT_EQ(std::nullopt, send_codec_spec.cng_payload_type); } } @@ -2379,8 +2514,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendWithMultipleSendStreams) { // Create the send channels and they should be a "not sending" date. for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE( + send_channel_->AddSendStream(webrtc::StreamParams::CreateLegacy(ssrc))); SetAudioSend(ssrc, true, &fake_source_); EXPECT_FALSE(GetSendStream(ssrc).IsSending()); } @@ -2406,8 +2541,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) { // Create send streams. for (uint32_t ssrc : kSsrcs4) { - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(ssrc))); + EXPECT_TRUE( + send_channel_->AddSendStream(webrtc::StreamParams::CreateLegacy(ssrc))); } // Create a receive stream to check that none of the send streams end up in @@ -2423,8 +2558,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) { // Check stats for the added streams. { EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); - cricket::VoiceMediaSendInfo send_info; - cricket::VoiceMediaReceiveInfo receive_info; + webrtc::VoiceMediaSendInfo send_info; + webrtc::VoiceMediaReceiveInfo receive_info; EXPECT_EQ(true, send_channel_->GetStats(&send_info)); EXPECT_EQ(true, receive_channel_->GetStats( &receive_info, /*get_and_clear_legacy_stats=*/true)); @@ -2444,8 +2579,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) { // Remove the kSsrcY stream. No receiver stats. { - cricket::VoiceMediaReceiveInfo receive_info; - cricket::VoiceMediaSendInfo send_info; + webrtc::VoiceMediaReceiveInfo receive_info; + webrtc::VoiceMediaSendInfo send_info; EXPECT_TRUE(receive_channel_->RemoveRecvStream(kSsrcY)); EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); EXPECT_EQ(true, send_channel_->GetStats(&send_info)); @@ -2459,8 +2594,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStatsWithMultipleSendStreams) { // Deliver a new packet - a default receive stream should be created and we // should see stats again. { - cricket::VoiceMediaSendInfo send_info; - cricket::VoiceMediaReceiveInfo receive_info; + webrtc::VoiceMediaSendInfo send_info; + webrtc::VoiceMediaReceiveInfo receive_info; DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame)); SetAudioReceiveStreamStats(); EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); @@ -2532,10 +2667,10 @@ TEST_P(WebRtcVoiceEngineTestFake, AudioSendResetAudioNetworkAdaptor) { SetSenderParameters(send_parameters_); EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config, GetAudioNetworkAdaptorConfig(kSsrcX)); - cricket::AudioOptions options; + webrtc::AudioOptions options; options.audio_network_adaptor = false; SetAudioSend(kSsrcX, true, nullptr, &options); - EXPECT_EQ(absl::nullopt, GetAudioNetworkAdaptorConfig(kSsrcX)); + EXPECT_EQ(std::nullopt, GetAudioNetworkAdaptorConfig(kSsrcX)); } TEST_P(WebRtcVoiceEngineTestFake, AudioNetworkAdaptorNotGetOverridden) { @@ -2546,8 +2681,8 @@ TEST_P(WebRtcVoiceEngineTestFake, AudioNetworkAdaptorNotGetOverridden) { EXPECT_EQ(send_parameters_.options.audio_network_adaptor_config, GetAudioNetworkAdaptorConfig(kSsrcX)); const int initial_num = call_.GetNumCreatedSendStreams(); - cricket::AudioOptions options; - options.audio_network_adaptor = absl::nullopt; + webrtc::AudioOptions options; + options.audio_network_adaptor = std::nullopt; // Unvalued `options.audio_network_adaptor` should not reset audio network // adaptor. SetAudioSend(kSsrcX, true, nullptr, &options); @@ -2577,8 +2712,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStats) { // Check stats for the added streams. { EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); - cricket::VoiceMediaSendInfo send_info; - cricket::VoiceMediaReceiveInfo receive_info; + webrtc::VoiceMediaSendInfo send_info; + webrtc::VoiceMediaReceiveInfo receive_info; EXPECT_EQ(true, send_channel_->GetStats(&send_info)); EXPECT_EQ(true, receive_channel_->GetStats( &receive_info, /*get_and_clear_legacy_stats=*/true)); @@ -2595,8 +2730,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStats) { { SetSend(true); EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); - cricket::VoiceMediaSendInfo send_info; - cricket::VoiceMediaReceiveInfo receive_info; + webrtc::VoiceMediaSendInfo send_info; + webrtc::VoiceMediaReceiveInfo receive_info; SetAudioReceiveStreamStats(); EXPECT_EQ(true, send_channel_->GetStats(&send_info)); EXPECT_EQ(true, receive_channel_->GetStats( @@ -2609,8 +2744,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStats) { { EXPECT_TRUE(receive_channel_->RemoveRecvStream(kSsrcY)); EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); - cricket::VoiceMediaSendInfo send_info; - cricket::VoiceMediaReceiveInfo receive_info; + webrtc::VoiceMediaSendInfo send_info; + webrtc::VoiceMediaReceiveInfo receive_info; EXPECT_EQ(true, send_channel_->GetStats(&send_info)); EXPECT_EQ(true, receive_channel_->GetStats( &receive_info, /*get_and_clear_legacy_stats=*/true)); @@ -2624,8 +2759,8 @@ TEST_P(WebRtcVoiceEngineTestFake, GetStats) { DeliverPacket(kPcmuFrame, sizeof(kPcmuFrame)); SetAudioReceiveStreamStats(); EXPECT_CALL(*adm_, GetPlayoutUnderrunCount()).WillOnce(Return(0)); - cricket::VoiceMediaSendInfo send_info; - cricket::VoiceMediaReceiveInfo receive_info; + webrtc::VoiceMediaSendInfo send_info; + webrtc::VoiceMediaReceiveInfo receive_info; EXPECT_EQ(true, send_channel_->GetStats(&send_info)); EXPECT_EQ(true, receive_channel_->GetStats( &receive_info, /*get_and_clear_legacy_stats=*/true)); @@ -2650,8 +2785,8 @@ TEST_P(WebRtcVoiceEngineTestFake, SetSendSsrcWithMultipleStreams) { TEST_P(WebRtcVoiceEngineTestFake, SetSendSsrcAfterCreatingReceiveChannel) { EXPECT_TRUE(SetupChannel()); EXPECT_TRUE(AddRecvStream(kSsrcY)); - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrcX))); + EXPECT_TRUE( + send_channel_->AddSendStream(webrtc::StreamParams::CreateLegacy(kSsrcX))); EXPECT_TRUE(call_.GetAudioSendStream(kSsrcX)); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc); } @@ -2679,12 +2814,12 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvWithMultipleStreams) { unsigned char packets[4][sizeof(kPcmuFrame)]; for (size_t i = 0; i < arraysize(packets); ++i) { memcpy(packets[i], kPcmuFrame, sizeof(kPcmuFrame)); - rtc::SetBE32(packets[i] + 8, static_cast(i)); + webrtc::SetBE32(packets[i] + 8, static_cast(i)); } - const cricket::FakeAudioReceiveStream& s1 = GetRecvStream(ssrc1); - const cricket::FakeAudioReceiveStream& s2 = GetRecvStream(ssrc2); - const cricket::FakeAudioReceiveStream& s3 = GetRecvStream(ssrc3); + const webrtc::FakeAudioReceiveStream& s1 = GetRecvStream(ssrc1); + const webrtc::FakeAudioReceiveStream& s2 = GetRecvStream(ssrc2); + const webrtc::FakeAudioReceiveStream& s3 = GetRecvStream(ssrc3); EXPECT_EQ(s1.received_packets(), 0); EXPECT_EQ(s2.received_packets(), 0); @@ -2736,7 +2871,7 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvUnsignaled) { TEST_P(WebRtcVoiceEngineTestFake, RecvUnsignaledSsrcWithSignaledStreamId) { const char kSyncLabel[] = "sync_label"; EXPECT_TRUE(SetupChannel()); - cricket::StreamParams unsignaled_stream; + webrtc::StreamParams unsignaled_stream; unsignaled_stream.set_stream_ids({kSyncLabel}); ASSERT_TRUE(receive_channel_->AddRecvStream(unsignaled_stream)); // The stream shouldn't have been created at this point because it doesn't @@ -2772,9 +2907,9 @@ TEST_P(WebRtcVoiceEngineTestFake, // Deliver a couple packets with unsignaled SSRCs. unsigned char packet[sizeof(kPcmuFrame)]; memcpy(packet, kPcmuFrame, sizeof(kPcmuFrame)); - rtc::SetBE32(&packet[8], 0x1234); + webrtc::SetBE32(&packet[8], 0x1234); DeliverPacket(packet, sizeof(packet)); - rtc::SetBE32(&packet[8], 0x5678); + webrtc::SetBE32(&packet[8], 0x5678); DeliverPacket(packet, sizeof(packet)); // Verify that the receive streams were created. @@ -2796,7 +2931,7 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvMultipleUnsignaled) { // Note that SSRC = 0 is not supported. for (uint32_t ssrc = 1; ssrc < (1 + kMaxUnsignaledRecvStreams); ++ssrc) { - rtc::SetBE32(&packet[8], ssrc); + webrtc::SetBE32(&packet[8], ssrc); DeliverPacket(packet, sizeof(packet)); // Verify we have one new stream for each loop iteration. @@ -2807,7 +2942,7 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvMultipleUnsignaled) { // Sending on the same SSRCs again should not create new streams. for (uint32_t ssrc = 1; ssrc < (1 + kMaxUnsignaledRecvStreams); ++ssrc) { - rtc::SetBE32(&packet[8], ssrc); + webrtc::SetBE32(&packet[8], ssrc); DeliverPacket(packet, sizeof(packet)); EXPECT_EQ(kMaxUnsignaledRecvStreams, call_.GetAudioReceiveStreams().size()); @@ -2817,7 +2952,7 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvMultipleUnsignaled) { // Send on another SSRC, the oldest unsignaled stream (SSRC=1) is replaced. constexpr uint32_t kAnotherSsrc = 667; - rtc::SetBE32(&packet[8], kAnotherSsrc); + webrtc::SetBE32(&packet[8], kAnotherSsrc); DeliverPacket(packet, sizeof(packet)); const auto& streams = call_.GetAudioReceiveStreams(); @@ -2842,7 +2977,7 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvUnsignaledAfterSignaled) { // Add a known stream, send packet and verify we got it. const uint32_t signaled_ssrc = 1; - rtc::SetBE32(&packet[8], signaled_ssrc); + webrtc::SetBE32(&packet[8], signaled_ssrc); EXPECT_TRUE(AddRecvStream(signaled_ssrc)); DeliverPacket(packet, sizeof(packet)); EXPECT_TRUE( @@ -2852,7 +2987,7 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvUnsignaledAfterSignaled) { // Note that the first unknown SSRC cannot be 0, because we only support // creating receive streams for SSRC!=0. const uint32_t unsignaled_ssrc = 7011; - rtc::SetBE32(&packet[8], unsignaled_ssrc); + webrtc::SetBE32(&packet[8], unsignaled_ssrc); DeliverPacket(packet, sizeof(packet)); EXPECT_TRUE( GetRecvStream(unsignaled_ssrc).VerifyLastPacket(packet, sizeof(packet))); @@ -2861,7 +2996,7 @@ TEST_P(WebRtcVoiceEngineTestFake, RecvUnsignaledAfterSignaled) { DeliverPacket(packet, sizeof(packet)); EXPECT_EQ(2, GetRecvStream(unsignaled_ssrc).received_packets()); - rtc::SetBE32(&packet[8], signaled_ssrc); + webrtc::SetBE32(&packet[8], signaled_ssrc); DeliverPacket(packet, sizeof(packet)); EXPECT_EQ(2, GetRecvStream(signaled_ssrc).received_packets()); EXPECT_EQ(2u, call_.GetAudioReceiveStreams().size()); @@ -2907,7 +3042,7 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamAfterUnsignaled_Updates) { const std::string new_stream_id("stream_id"); int audio_receive_stream_id = streams.front()->id(); - cricket::StreamParams stream_params; + webrtc::StreamParams stream_params; stream_params.ssrcs.push_back(1); stream_params.set_stream_ids({new_stream_id}); @@ -2930,7 +3065,7 @@ TEST_P(WebRtcVoiceEngineTestFake, AddRecvStream) { // those previously passed into SetRecvCodecs. TEST_P(WebRtcVoiceEngineTestFake, AddRecvStreamUnsupportedCodec) { EXPECT_TRUE(SetupSendStream()); - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.codecs.push_back(kOpusCodec); parameters.codecs.push_back(kPcmuCodec); EXPECT_TRUE(receive_channel_->SetReceiverParameters(parameters)); @@ -3018,7 +3153,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetAudioOptions) { EXPECT_FALSE(GetRecvStreamConfig(kSsrcY).jitter_buffer_fast_accelerate); // Nothing set in AudioOptions, so everything should be as default. - send_parameters_.options = cricket::AudioOptions(); + send_parameters_.options = webrtc::AudioOptions(); SetSenderParameters(send_parameters_); if (!use_null_apm_) { VerifyEchoCancellationSettings(/*enabled=*/true); @@ -3097,9 +3232,9 @@ TEST_P(WebRtcVoiceEngineTestFake, InitRecordingOnSend) { EXPECT_CALL(*adm_, Recording()).WillOnce(Return(false)); EXPECT_CALL(*adm_, InitRecording()).Times(1); - std::unique_ptr send_channel( + std::unique_ptr send_channel( engine_->CreateSendChannel( - &call_, cricket::MediaConfig(), cricket::AudioOptions(), + &call_, webrtc::MediaConfig(), webrtc::AudioOptions(), webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create())); send_channel->SetSend(true); @@ -3110,11 +3245,11 @@ TEST_P(WebRtcVoiceEngineTestFake, SkipInitRecordingOnSend) { EXPECT_CALL(*adm_, Recording()).Times(0); EXPECT_CALL(*adm_, InitRecording()).Times(0); - cricket::AudioOptions options; + webrtc::AudioOptions options; options.init_recording_on_send = false; - std::unique_ptr send_channel( - engine_->CreateSendChannel(&call_, cricket::MediaConfig(), options, + std::unique_ptr send_channel( + engine_->CreateSendChannel(&call_, webrtc::MediaConfig(), options, webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create())); @@ -3139,25 +3274,25 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) { EXPECT_CALL(*adm_, Recording()).Times(2).WillRepeatedly(Return(false)); EXPECT_CALL(*adm_, InitRecording()).Times(2).WillRepeatedly(Return(0)); - std::unique_ptr send_channel1( + std::unique_ptr send_channel1( engine_->CreateSendChannel( - &call_, cricket::MediaConfig(), cricket::AudioOptions(), + &call_, webrtc::MediaConfig(), webrtc::AudioOptions(), webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create())); - std::unique_ptr send_channel2( + std::unique_ptr send_channel2( engine_->CreateSendChannel( - &call_, cricket::MediaConfig(), cricket::AudioOptions(), + &call_, webrtc::MediaConfig(), webrtc::AudioOptions(), webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create())); // Have to add a stream to make SetSend work. - cricket::StreamParams stream1; + webrtc::StreamParams stream1; stream1.ssrcs.push_back(1); send_channel1->AddSendStream(stream1); - cricket::StreamParams stream2; + webrtc::StreamParams stream2; stream2.ssrcs.push_back(2); send_channel2->AddSendStream(stream2); // AEC and AGC and NS - cricket::AudioSenderParameter parameters_options_all = send_parameters_; + webrtc::AudioSenderParameter parameters_options_all = send_parameters_; parameters_options_all.options.echo_cancellation = true; parameters_options_all.options.auto_gain_control = true; parameters_options_all.options.noise_suppression = true; @@ -3177,10 +3312,10 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) { } // unset NS - cricket::AudioSenderParameter parameters_options_no_ns = send_parameters_; + webrtc::AudioSenderParameter parameters_options_no_ns = send_parameters_; parameters_options_no_ns.options.noise_suppression = false; EXPECT_TRUE(send_channel1->SetSenderParameters(parameters_options_no_ns)); - cricket::AudioOptions expected_options = parameters_options_all.options; + webrtc::AudioOptions expected_options = parameters_options_all.options; if (!use_null_apm_) { VerifyEchoCancellationSettings(/*enabled=*/true); EXPECT_FALSE(apm_config_.noise_suppression.enabled); @@ -3194,7 +3329,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) { } // unset AGC - cricket::AudioSenderParameter parameters_options_no_agc = send_parameters_; + webrtc::AudioSenderParameter parameters_options_no_agc = send_parameters_; parameters_options_no_agc.options.auto_gain_control = false; EXPECT_TRUE(send_channel2->SetSenderParameters(parameters_options_no_agc)); if (!use_null_apm_) { @@ -3234,7 +3369,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) { } // Make sure settings take effect while we are sending. - cricket::AudioSenderParameter parameters_options_no_agc_nor_ns = + webrtc::AudioSenderParameter parameters_options_no_agc_nor_ns = send_parameters_; parameters_options_no_agc_nor_ns.options.auto_gain_control = false; parameters_options_no_agc_nor_ns.options.noise_suppression = false; @@ -3256,55 +3391,55 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOptionOverridesViaChannels) { // This test verifies DSCP settings are properly applied on voice media channel. TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) { EXPECT_TRUE(SetupSendStream()); - cricket::FakeNetworkInterface network_interface; - cricket::MediaConfig config; - std::unique_ptr channel; + webrtc::FakeNetworkInterface network_interface; + webrtc::MediaConfig config; + std::unique_ptr channel; webrtc::RtpParameters parameters; - channel = engine_->CreateSendChannel(&call_, config, cricket::AudioOptions(), + channel = engine_->CreateSendChannel(&call_, config, webrtc::AudioOptions(), webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); channel->SetInterface(&network_interface); // Default value when DSCP is disabled should be DSCP_DEFAULT. - EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp()); + EXPECT_EQ(webrtc::DSCP_DEFAULT, network_interface.dscp()); channel->SetInterface(nullptr); config.enable_dscp = true; - channel = engine_->CreateSendChannel(&call_, config, cricket::AudioOptions(), + channel = engine_->CreateSendChannel(&call_, config, webrtc::AudioOptions(), webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); channel->SetInterface(&network_interface); - EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp()); + EXPECT_EQ(webrtc::DSCP_DEFAULT, network_interface.dscp()); // Create a send stream to configure EXPECT_TRUE( - channel->AddSendStream(cricket::StreamParams::CreateLegacy(kSsrcZ))); + channel->AddSendStream(webrtc::StreamParams::CreateLegacy(kSsrcZ))); parameters = channel->GetRtpSendParameters(kSsrcZ); ASSERT_FALSE(parameters.encodings.empty()); // Various priorities map to various dscp values. parameters.encodings[0].network_priority = webrtc::Priority::kHigh; ASSERT_TRUE(channel->SetRtpSendParameters(kSsrcZ, parameters, nullptr).ok()); - EXPECT_EQ(rtc::DSCP_EF, network_interface.dscp()); + EXPECT_EQ(webrtc::DSCP_EF, network_interface.dscp()); parameters.encodings[0].network_priority = webrtc::Priority::kVeryLow; ASSERT_TRUE(channel->SetRtpSendParameters(kSsrcZ, parameters, nullptr).ok()); - EXPECT_EQ(rtc::DSCP_CS1, network_interface.dscp()); + EXPECT_EQ(webrtc::DSCP_CS1, network_interface.dscp()); // Packets should also self-identify their dscp in PacketOptions. const uint8_t kData[10] = {0}; EXPECT_TRUE(SendImplFromPointer(channel.get())->transport()->SendRtcp(kData)); - EXPECT_EQ(rtc::DSCP_CS1, network_interface.options().dscp); + EXPECT_EQ(webrtc::DSCP_CS1, network_interface.options().dscp); channel->SetInterface(nullptr); // Verify that setting the option to false resets the // DiffServCodePoint. config.enable_dscp = false; - channel = engine_->CreateSendChannel(&call_, config, cricket::AudioOptions(), + channel = engine_->CreateSendChannel(&call_, config, webrtc::AudioOptions(), webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); channel->SetInterface(&network_interface); // Default value when DSCP is disabled should be DSCP_DEFAULT. - EXPECT_EQ(rtc::DSCP_DEFAULT, network_interface.dscp()); + EXPECT_EQ(webrtc::DSCP_DEFAULT, network_interface.dscp()); channel->SetInterface(nullptr); } @@ -3312,7 +3447,7 @@ TEST_P(WebRtcVoiceEngineTestFake, TestSetDscpOptions) { TEST_P(WebRtcVoiceEngineTestFake, SetOutputVolume) { EXPECT_TRUE(SetupChannel()); EXPECT_FALSE(receive_channel_->SetOutputVolume(kSsrcY, 0.5)); - cricket::StreamParams stream; + webrtc::StreamParams stream; stream.ssrcs.push_back(kSsrcY); EXPECT_TRUE(receive_channel_->AddRecvStream(stream)); EXPECT_DOUBLE_EQ(1, GetRecvStream(kSsrcY).gain()); @@ -3335,7 +3470,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetOutputVolumeUnsignaledRecvStream) { // Spawn an unsignaled stream by sending a packet - gain should be 2. unsigned char pcmuFrame2[sizeof(kPcmuFrame)]; memcpy(pcmuFrame2, kPcmuFrame, sizeof(kPcmuFrame)); - rtc::SetBE32(&pcmuFrame2[8], kSsrcX); + webrtc::SetBE32(&pcmuFrame2[8], kSsrcX); DeliverPacket(pcmuFrame2, sizeof(pcmuFrame2)); EXPECT_DOUBLE_EQ(2, GetRecvStream(kSsrcX).gain()); @@ -3360,7 +3495,7 @@ TEST_P(WebRtcVoiceEngineTestFake, BaseMinimumPlayoutDelayMs) { EXPECT_FALSE( receive_channel_->GetBaseMinimumPlayoutDelayMs(kSsrcY).has_value()); - cricket::StreamParams stream; + webrtc::StreamParams stream; stream.ssrcs.push_back(kSsrcY); EXPECT_TRUE(receive_channel_->AddRecvStream(stream)); EXPECT_EQ(0, GetRecvStream(kSsrcY).base_mininum_playout_delay_ms()); @@ -3397,7 +3532,7 @@ TEST_P(WebRtcVoiceEngineTestFake, // Spawn an unsignaled stream by sending a packet - delay should be 100. unsigned char pcmuFrame2[sizeof(kPcmuFrame)]; memcpy(pcmuFrame2, kPcmuFrame, sizeof(kPcmuFrame)); - rtc::SetBE32(&pcmuFrame2[8], kSsrcX); + webrtc::SetBE32(&pcmuFrame2[8], kSsrcX); DeliverPacket(pcmuFrame2, sizeof(pcmuFrame2)); EXPECT_EQ( 100, receive_channel_->GetBaseMinimumPlayoutDelayMs(kSsrcX).value_or(-1)); @@ -3433,7 +3568,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetsSyncGroupFromStreamId) { const std::string kStreamId = "AvSyncLabel"; EXPECT_TRUE(SetupSendStream()); - cricket::StreamParams sp = cricket::StreamParams::CreateLegacy(kAudioSsrc); + webrtc::StreamParams sp = webrtc::StreamParams::CreateLegacy(kAudioSsrc); sp.set_stream_ids({kStreamId}); // Creating two channels to make sure that sync label is set properly for both // the default voice channel and following ones. @@ -3463,7 +3598,7 @@ TEST_P(WebRtcVoiceEngineTestFake, ConfiguresAudioReceiveStreamRtpExtensions) { SetSenderParameters(send_parameters_); for (uint32_t ssrc : ssrcs) { EXPECT_TRUE(receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(ssrc))); + webrtc::StreamParams::CreateLegacy(ssrc))); } EXPECT_EQ(2u, call_.GetAudioReceiveStreams().size()); @@ -3475,8 +3610,8 @@ TEST_P(WebRtcVoiceEngineTestFake, ConfiguresAudioReceiveStreamRtpExtensions) { // Set up receive extensions. const std::vector header_extensions = - GetDefaultEnabledRtpHeaderExtensions(*engine_); - cricket::AudioReceiverParameters recv_parameters; + webrtc::GetDefaultEnabledRtpHeaderExtensions(*engine_); + webrtc::AudioReceiverParameters recv_parameters; recv_parameters.extensions = header_extensions; receive_channel_->SetReceiverParameters(recv_parameters); EXPECT_EQ(2u, call_.GetAudioReceiveStreams().size()); @@ -3487,7 +3622,7 @@ TEST_P(WebRtcVoiceEngineTestFake, ConfiguresAudioReceiveStreamRtpExtensions) { } // Disable receive extensions. - receive_channel_->SetReceiverParameters(cricket::AudioReceiverParameters()); + receive_channel_->SetReceiverParameters(webrtc::AudioReceiverParameters()); for (uint32_t ssrc : ssrcs) { EXPECT_THAT( receive_channel_->GetRtpReceiverParameters(ssrc).header_extensions, @@ -3498,27 +3633,27 @@ TEST_P(WebRtcVoiceEngineTestFake, ConfiguresAudioReceiveStreamRtpExtensions) { TEST_P(WebRtcVoiceEngineTestFake, DeliverAudioPacket_Call) { // Test that packets are forwarded to the Call when configured accordingly. const uint32_t kAudioSsrc = 1; - rtc::CopyOnWriteBuffer kPcmuPacket(kPcmuFrame, sizeof(kPcmuFrame)); + webrtc::CopyOnWriteBuffer kPcmuPacket(kPcmuFrame, sizeof(kPcmuFrame)); static const unsigned char kRtcp[] = { 0x80, 0xc9, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}; - rtc::CopyOnWriteBuffer kRtcpPacket(kRtcp, sizeof(kRtcp)); + webrtc::CopyOnWriteBuffer kRtcpPacket(kRtcp, sizeof(kRtcp)); EXPECT_TRUE(SetupSendStream()); - cricket::VoiceMediaReceiveChannelInterface* media_channel = ReceiveImpl(); + webrtc::VoiceMediaReceiveChannelInterface* media_channel = ReceiveImpl(); SetSenderParameters(send_parameters_); EXPECT_TRUE(media_channel->AddRecvStream( - cricket::StreamParams::CreateLegacy(kAudioSsrc))); + webrtc::StreamParams::CreateLegacy(kAudioSsrc))); EXPECT_EQ(1u, call_.GetAudioReceiveStreams().size()); - const cricket::FakeAudioReceiveStream* s = + const webrtc::FakeAudioReceiveStream* s = call_.GetAudioReceiveStream(kAudioSsrc); EXPECT_EQ(0, s->received_packets()); webrtc::RtpPacketReceived parsed_packet; RTC_CHECK(parsed_packet.Parse(kPcmuPacket)); receive_channel_->OnPacketReceived(parsed_packet); - rtc::Thread::Current()->ProcessMessages(0); + webrtc::Thread::Current()->ProcessMessages(0); EXPECT_EQ(1, s->received_packets()); } @@ -3529,8 +3664,8 @@ TEST_P(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_SendCreatedFirst) { EXPECT_TRUE(SetupSendStream()); EXPECT_TRUE(AddRecvStream(kSsrcY)); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc); - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrcZ))); + EXPECT_TRUE( + send_channel_->AddSendStream(webrtc::StreamParams::CreateLegacy(kSsrcZ))); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcY).rtp.local_ssrc); EXPECT_TRUE(AddRecvStream(kSsrcW)); EXPECT_EQ(kSsrcX, GetRecvStreamConfig(kSsrcW).rtp.local_ssrc); @@ -3539,13 +3674,13 @@ TEST_P(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_SendCreatedFirst) { TEST_P(WebRtcVoiceEngineTestFake, AssociateFirstSendChannel_RecvCreatedFirst) { EXPECT_TRUE(SetupRecvStream()); EXPECT_EQ(0xFA17FA17u, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc); - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrcY))); + EXPECT_TRUE( + send_channel_->AddSendStream(webrtc::StreamParams::CreateLegacy(kSsrcY))); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc); EXPECT_TRUE(AddRecvStream(kSsrcZ)); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcZ).rtp.local_ssrc); - EXPECT_TRUE(send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kSsrcW))); + EXPECT_TRUE( + send_channel_->AddSendStream(webrtc::StreamParams::CreateLegacy(kSsrcW))); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcX).rtp.local_ssrc); EXPECT_EQ(kSsrcY, GetRecvStreamConfig(kSsrcZ).rtp.local_ssrc); @@ -3602,7 +3737,7 @@ TEST_P(WebRtcVoiceEngineTestFake, SetRawAudioSinkUnsignaledRecvStream) { // and the previous unsignaled stream should lose it. unsigned char pcmuFrame2[sizeof(kPcmuFrame)]; memcpy(pcmuFrame2, kPcmuFrame, sizeof(kPcmuFrame)); - rtc::SetBE32(&pcmuFrame2[8], kSsrcX); + webrtc::SetBE32(&pcmuFrame2[8], kSsrcX); DeliverPacket(pcmuFrame2, sizeof(pcmuFrame2)); if (kMaxUnsignaledRecvStreams > 1) { EXPECT_EQ(nullptr, GetRecvStream(kSsrc1).sink()); @@ -3665,7 +3800,7 @@ TEST_P(WebRtcVoiceEngineTestFake, PreservePlayoutWhenRecreateRecvStream) { // Changing RTP header extensions will recreate the // AudioReceiveStreamInterface. - cricket::AudioReceiverParameters parameters; + webrtc::AudioReceiverParameters parameters; parameters.extensions.push_back( webrtc::RtpExtension(webrtc::RtpExtension::kAudioLevelUri, 12)); receive_channel_->SetReceiverParameters(parameters); @@ -3678,7 +3813,7 @@ TEST_P(WebRtcVoiceEngineTestFake, PreservePlayoutWhenRecreateRecvStream) { TEST_P(WebRtcVoiceEngineTestFake, GetSourcesWithNonExistingSsrc) { // Setup an recv stream with `kSsrcX`. SetupRecvStream(); - cricket::WebRtcVoiceReceiveChannel* media_channel = ReceiveImpl(); + webrtc::WebRtcVoiceReceiveChannel* media_channel = ReceiveImpl(); // Call GetSources with `kSsrcY` which doesn't exist. std::vector sources = media_channel->GetSources(kSsrcY); EXPECT_EQ(0u, sources.size()); @@ -3686,36 +3821,29 @@ TEST_P(WebRtcVoiceEngineTestFake, GetSourcesWithNonExistingSsrc) { // Tests that the library initializes and shuts down properly. TEST(WebRtcVoiceEngineTest, StartupShutdown) { - rtc::AutoThread main_thread; + webrtc::AutoThread main_thread; for (bool use_null_apm : {false, true}) { // If the VoiceEngine wants to gather available codecs early, that's fine // but we never want it to create a decoder at this stage. - std::unique_ptr task_queue_factory = - webrtc::CreateDefaultTaskQueueFactory(); - rtc::scoped_refptr adm = + Environment env = CreateEnvironment(); + webrtc::scoped_refptr adm = webrtc::test::MockAudioDeviceModule::CreateNice(); - rtc::scoped_refptr apm = - use_null_apm ? nullptr : webrtc::AudioProcessingBuilder().Create(); - webrtc::FieldTrialBasedConfig field_trials; - cricket::WebRtcVoiceEngine engine( - task_queue_factory.get(), adm.get(), - webrtc::MockAudioEncoderFactory::CreateUnusedFactory(), + scoped_refptr apm = + use_null_apm ? nullptr : BuiltinAudioProcessingBuilder().Build(env); + webrtc::WebRtcVoiceEngine engine( + env, adm, webrtc::MockAudioEncoderFactory::CreateUnusedFactory(), webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm, - nullptr, nullptr, field_trials); + nullptr); engine.Init(); - webrtc::RtcEventLogNull event_log; - webrtc::Call::Config call_config(&event_log); - call_config.trials = &field_trials; - call_config.task_queue_factory = task_queue_factory.get(); - auto call = absl::WrapUnique(webrtc::Call::Create(call_config)); - std::unique_ptr send_channel = + std::unique_ptr call = Call::Create(CallConfig(env)); + std::unique_ptr send_channel = engine.CreateSendChannel( - call.get(), cricket::MediaConfig(), cricket::AudioOptions(), + call.get(), webrtc::MediaConfig(), webrtc::AudioOptions(), webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); EXPECT_TRUE(send_channel); - std::unique_ptr - receive_channel = engine.CreateReceiveChannel( - call.get(), cricket::MediaConfig(), cricket::AudioOptions(), + std::unique_ptr receive_channel = + engine.CreateReceiveChannel( + call.get(), webrtc::MediaConfig(), webrtc::AudioOptions(), webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); EXPECT_TRUE(receive_channel); } @@ -3723,63 +3851,53 @@ TEST(WebRtcVoiceEngineTest, StartupShutdown) { // Tests that reference counting on the external ADM is correct. TEST(WebRtcVoiceEngineTest, StartupShutdownWithExternalADM) { - rtc::AutoThread main_thread; + webrtc::AutoThread main_thread; for (bool use_null_apm : {false, true}) { - std::unique_ptr task_queue_factory = - webrtc::CreateDefaultTaskQueueFactory(); - auto adm = rtc::make_ref_counted< + Environment env = CreateEnvironment(); + auto adm = webrtc::make_ref_counted< ::testing::NiceMock>(); { - rtc::scoped_refptr apm = - use_null_apm ? nullptr : webrtc::AudioProcessingBuilder().Create(); - webrtc::FieldTrialBasedConfig field_trials; - cricket::WebRtcVoiceEngine engine( - task_queue_factory.get(), adm.get(), - webrtc::MockAudioEncoderFactory::CreateUnusedFactory(), + scoped_refptr apm = + use_null_apm ? nullptr : BuiltinAudioProcessingBuilder().Build(env); + webrtc::WebRtcVoiceEngine engine( + env, adm, webrtc::MockAudioEncoderFactory::CreateUnusedFactory(), webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm, - nullptr, nullptr, field_trials); + nullptr); engine.Init(); - webrtc::RtcEventLogNull event_log; - webrtc::Call::Config call_config(&event_log); - call_config.trials = &field_trials; - call_config.task_queue_factory = task_queue_factory.get(); - auto call = absl::WrapUnique(webrtc::Call::Create(call_config)); - std::unique_ptr send_channel = + std::unique_ptr call = Call::Create(CallConfig(env)); + std::unique_ptr send_channel = engine.CreateSendChannel( - call.get(), cricket::MediaConfig(), cricket::AudioOptions(), + call.get(), webrtc::MediaConfig(), webrtc::AudioOptions(), webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); EXPECT_TRUE(send_channel); - std::unique_ptr + std::unique_ptr receive_channel = engine.CreateReceiveChannel( - call.get(), cricket::MediaConfig(), cricket::AudioOptions(), + call.get(), webrtc::MediaConfig(), webrtc::AudioOptions(), webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); EXPECT_TRUE(receive_channel); } // The engine/channel should have dropped their references. EXPECT_EQ(adm.release()->Release(), - rtc::RefCountReleaseStatus::kDroppedLastRef); + webrtc::RefCountReleaseStatus::kDroppedLastRef); } } // Verify the payload id of common audio codecs, including CN and G722. TEST(WebRtcVoiceEngineTest, HasCorrectPayloadTypeMapping) { + Environment env = CreateEnvironment(); for (bool use_null_apm : {false, true}) { - std::unique_ptr task_queue_factory = - webrtc::CreateDefaultTaskQueueFactory(); // TODO(ossu): Why are the payload types of codecs with non-static payload // type assignments checked here? It shouldn't really matter. - rtc::scoped_refptr adm = + webrtc::scoped_refptr adm = webrtc::test::MockAudioDeviceModule::CreateNice(); - rtc::scoped_refptr apm = - use_null_apm ? nullptr : webrtc::AudioProcessingBuilder().Create(); - webrtc::FieldTrialBasedConfig field_trials; - cricket::WebRtcVoiceEngine engine( - task_queue_factory.get(), adm.get(), - webrtc::MockAudioEncoderFactory::CreateUnusedFactory(), + scoped_refptr apm = + use_null_apm ? nullptr : BuiltinAudioProcessingBuilder().Build(env); + webrtc::WebRtcVoiceEngine engine( + env, adm, webrtc::MockAudioEncoderFactory::CreateUnusedFactory(), webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm, - nullptr, nullptr, field_trials); + nullptr); engine.Init(); - for (const cricket::AudioCodec& codec : engine.send_codecs()) { + for (const webrtc::Codec& codec : engine.LegacySendCodecs()) { auto is_codec = [&codec](const char* name, int clockrate = 0) { return absl::EqualsIgnoreCase(codec.name, name) && (clockrate == 0 || codec.clockrate == clockrate); @@ -3814,33 +3932,26 @@ TEST(WebRtcVoiceEngineTest, HasCorrectPayloadTypeMapping) { // Tests that VoE supports at least 32 channels TEST(WebRtcVoiceEngineTest, Has32Channels) { - rtc::AutoThread main_thread; + webrtc::AutoThread main_thread; for (bool use_null_apm : {false, true}) { - std::unique_ptr task_queue_factory = - webrtc::CreateDefaultTaskQueueFactory(); - rtc::scoped_refptr adm = + Environment env = CreateEnvironment(); + webrtc::scoped_refptr adm = webrtc::test::MockAudioDeviceModule::CreateNice(); - rtc::scoped_refptr apm = - use_null_apm ? nullptr : webrtc::AudioProcessingBuilder().Create(); - webrtc::FieldTrialBasedConfig field_trials; - cricket::WebRtcVoiceEngine engine( - task_queue_factory.get(), adm.get(), - webrtc::MockAudioEncoderFactory::CreateUnusedFactory(), + scoped_refptr apm = + use_null_apm ? nullptr : BuiltinAudioProcessingBuilder().Build(env); + webrtc::WebRtcVoiceEngine engine( + env, adm, webrtc::MockAudioEncoderFactory::CreateUnusedFactory(), webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm, - nullptr, nullptr, field_trials); + nullptr); engine.Init(); - webrtc::RtcEventLogNull event_log; - webrtc::Call::Config call_config(&event_log); - call_config.trials = &field_trials; - call_config.task_queue_factory = task_queue_factory.get(); - auto call = absl::WrapUnique(webrtc::Call::Create(call_config)); + std::unique_ptr call = Call::Create(CallConfig(env)); - std::vector> + std::vector> channels; while (channels.size() < 32) { - std::unique_ptr channel = + std::unique_ptr channel = engine.CreateSendChannel( - call.get(), cricket::MediaConfig(), cricket::AudioOptions(), + call.get(), webrtc::MediaConfig(), webrtc::AudioOptions(), webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); if (!channel) break; @@ -3853,10 +3964,9 @@ TEST(WebRtcVoiceEngineTest, Has32Channels) { // Test that we set our preferred codecs properly. TEST(WebRtcVoiceEngineTest, SetRecvCodecs) { - rtc::AutoThread main_thread; + webrtc::AutoThread main_thread; for (bool use_null_apm : {false, true}) { - std::unique_ptr task_queue_factory = - webrtc::CreateDefaultTaskQueueFactory(); + Environment env = CreateEnvironment(); // TODO(ossu): I'm not sure of the intent of this test. It's either: // - Check that our builtin codecs are usable by Channel. // - The codecs provided by the engine is usable by Channel. @@ -3864,50 +3974,36 @@ TEST(WebRtcVoiceEngineTest, SetRecvCodecs) { // what we sent in - though it's probably reasonable to expect so, if // SetReceiverParameters returns true. // I think it will become clear once audio decoder injection is completed. - rtc::scoped_refptr adm = + webrtc::scoped_refptr adm = webrtc::test::MockAudioDeviceModule::CreateNice(); - rtc::scoped_refptr apm = - use_null_apm ? nullptr : webrtc::AudioProcessingBuilder().Create(); - webrtc::FieldTrialBasedConfig field_trials; - cricket::WebRtcVoiceEngine engine( - task_queue_factory.get(), adm.get(), - webrtc::MockAudioEncoderFactory::CreateUnusedFactory(), - webrtc::CreateBuiltinAudioDecoderFactory(), nullptr, apm, nullptr, - nullptr, field_trials); + scoped_refptr apm = + use_null_apm ? nullptr : BuiltinAudioProcessingBuilder().Build(env); + webrtc::WebRtcVoiceEngine engine( + env, adm, webrtc::MockAudioEncoderFactory::CreateUnusedFactory(), + webrtc::CreateBuiltinAudioDecoderFactory(), nullptr, apm, nullptr); engine.Init(); - webrtc::RtcEventLogNull event_log; - webrtc::Call::Config call_config(&event_log); - call_config.trials = &field_trials; - call_config.task_queue_factory = task_queue_factory.get(); - auto call = absl::WrapUnique(webrtc::Call::Create(call_config)); - cricket::WebRtcVoiceReceiveChannel channel( - &engine, cricket::MediaConfig(), cricket::AudioOptions(), + std::unique_ptr call = Call::Create(CallConfig(env)); + webrtc::WebRtcVoiceReceiveChannel channel( + &engine, webrtc::MediaConfig(), webrtc::AudioOptions(), webrtc::CryptoOptions(), call.get(), webrtc::AudioCodecPairId::Create()); - cricket::AudioReceiverParameters parameters; - parameters.codecs = engine.recv_codecs(); + webrtc::AudioReceiverParameters parameters; + parameters.codecs = ReceiveCodecsWithId(engine); EXPECT_TRUE(channel.SetReceiverParameters(parameters)); } } TEST(WebRtcVoiceEngineTest, SetRtpSendParametersMaxBitrate) { - rtc::AutoThread main_thread; - std::unique_ptr task_queue_factory = - webrtc::CreateDefaultTaskQueueFactory(); - rtc::scoped_refptr adm = + webrtc::AutoThread main_thread; + Environment env = CreateEnvironment(); + webrtc::scoped_refptr adm = webrtc::test::MockAudioDeviceModule::CreateNice(); - webrtc::FieldTrialBasedConfig field_trials; FakeAudioSource source; - cricket::WebRtcVoiceEngine engine(task_queue_factory.get(), adm.get(), - webrtc::CreateBuiltinAudioEncoderFactory(), - webrtc::CreateBuiltinAudioDecoderFactory(), - nullptr, nullptr, nullptr, nullptr, - field_trials); + webrtc::WebRtcVoiceEngine engine( + env, adm, webrtc::CreateBuiltinAudioEncoderFactory(), + webrtc::CreateBuiltinAudioDecoderFactory(), nullptr, nullptr, nullptr); engine.Init(); - webrtc::RtcEventLogNull event_log; - webrtc::Call::Config call_config(&event_log); - call_config.trials = &field_trials; - call_config.task_queue_factory = task_queue_factory.get(); + CallConfig call_config(env); { webrtc::AudioState::Config config; config.audio_mixer = webrtc::AudioMixerImpl::Create(); @@ -3915,20 +4011,20 @@ TEST(WebRtcVoiceEngineTest, SetRtpSendParametersMaxBitrate) { webrtc::test::MockAudioDeviceModule::CreateNice(); call_config.audio_state = webrtc::AudioState::Create(config); } - auto call = absl::WrapUnique(webrtc::Call::Create(call_config)); - cricket::WebRtcVoiceSendChannel channel( - &engine, cricket::MediaConfig(), cricket::AudioOptions(), + std::unique_ptr call = Call::Create(std::move(call_config)); + webrtc::WebRtcVoiceSendChannel channel( + &engine, webrtc::MediaConfig(), webrtc::AudioOptions(), webrtc::CryptoOptions(), call.get(), webrtc::AudioCodecPairId::Create()); { - cricket::AudioSenderParameter params; - params.codecs.push_back(cricket::CreateAudioCodec(1, "opus", 48000, 2)); + webrtc::AudioSenderParameter params; + params.codecs.push_back(webrtc::CreateAudioCodec(1, "opus", 48000, 2)); params.extensions.push_back(webrtc::RtpExtension( webrtc::RtpExtension::kTransportSequenceNumberUri, 1)); EXPECT_TRUE(channel.SetSenderParameters(params)); } constexpr int kSsrc = 1234; { - cricket::StreamParams params; + webrtc::StreamParams params; params.add_ssrc(kSsrc); channel.AddSendStream(params); } @@ -3943,6 +4039,96 @@ TEST(WebRtcVoiceEngineTest, SetRtpSendParametersMaxBitrate) { } TEST(WebRtcVoiceEngineTest, CollectRecvCodecs) { + Environment env = CreateEnvironment(); + for (bool use_null_apm : {false, true}) { + std::vector specs; + webrtc::AudioCodecSpec spec1{{"codec1", 48000, 2, {{"param1", "value1"}}}, + {48000, 2, 16000, 10000, 20000}}; + spec1.info.allow_comfort_noise = false; + spec1.info.supports_network_adaption = true; + specs.push_back(spec1); + webrtc::AudioCodecSpec spec2{{"codec2", 48000, 2, {{"param1", "value1"}}}, + {48000, 2, 16000, 10000, 20000}}; + // We do not support 48khz CN. + spec2.info.allow_comfort_noise = true; + specs.push_back(spec2); + specs.push_back( + webrtc::AudioCodecSpec{{"codec3", 8000, 1}, {8000, 1, 64000}}); + specs.push_back( + webrtc::AudioCodecSpec{{"codec4", 8000, 2}, {8000, 1, 64000}}); + + webrtc::scoped_refptr + unused_encoder_factory = + webrtc::MockAudioEncoderFactory::CreateUnusedFactory(); + webrtc::scoped_refptr + mock_decoder_factory = + webrtc::make_ref_counted(); + EXPECT_CALL(*mock_decoder_factory.get(), GetSupportedDecoders()) + .WillOnce(Return(specs)); + webrtc::scoped_refptr adm = + webrtc::test::MockAudioDeviceModule::CreateNice(); + + scoped_refptr apm = + use_null_apm ? nullptr : BuiltinAudioProcessingBuilder().Build(env); + webrtc::WebRtcVoiceEngine engine(env, adm, unused_encoder_factory, + mock_decoder_factory, nullptr, apm, + nullptr); + engine.Init(); + auto codecs = engine.LegacyRecvCodecs(); + EXPECT_EQ(7u, codecs.size()); + + // Rather than just ASSERTing that there are enough codecs, ensure that we + // can check the actual values safely, to provide better test results. + auto get_codec = [&codecs](size_t index) -> const webrtc::Codec& { + static const webrtc::Codec missing_codec = + webrtc::CreateAudioCodec(0, "", 0, 0); + if (codecs.size() > index) + return codecs[index]; + return missing_codec; + }; + + // Ensure the general codecs are generated first and in order. + for (size_t i = 0; i != specs.size(); ++i) { + EXPECT_EQ(specs[i].format.name, get_codec(i).name); + EXPECT_EQ(specs[i].format.clockrate_hz, get_codec(i).clockrate); + EXPECT_EQ(specs[i].format.num_channels, get_codec(i).channels); + EXPECT_EQ(specs[i].format.parameters, get_codec(i).params); + } + + // Find the index of a codec, or -1 if not found, so that we can easily + // check supplementary codecs are ordered after the general codecs. + auto find_codec = [&codecs](const webrtc::SdpAudioFormat& format) -> int { + for (size_t i = 0; i != codecs.size(); ++i) { + const webrtc::Codec& codec = codecs[i]; + if (absl::EqualsIgnoreCase(codec.name, format.name) && + codec.clockrate == format.clockrate_hz && + codec.channels == format.num_channels) { + return webrtc::checked_cast(i); + } + } + return -1; + }; + + // Ensure all supplementary codecs are generated last. Their internal + // ordering is not important. Without this cast, the comparison turned + // unsigned and, thus, failed for -1. + const int num_specs = static_cast(specs.size()); + EXPECT_GE(find_codec({"cn", 8000, 1}), num_specs); + EXPECT_EQ(find_codec({"cn", 16000, 1}), -1); + EXPECT_EQ(find_codec({"cn", 32000, 1}), -1); + EXPECT_EQ(find_codec({"cn", 48000, 1}), -1); + EXPECT_GE(find_codec({"telephone-event", 8000, 1}), num_specs); + EXPECT_EQ(find_codec({"telephone-event", 16000, 1}), -1); + EXPECT_EQ(find_codec({"telephone-event", 32000, 1}), -1); + EXPECT_GE(find_codec({"telephone-event", 48000, 1}), num_specs); + } +} + +TEST(WebRtcVoiceEngineTest, CollectRecvCodecsWithLatePtAssignment) { + webrtc::test::ScopedKeyValueConfig field_trials( + "WebRTC-PayloadTypesInTransport/Enabled/"); + Environment env = CreateEnvironment(&field_trials); + for (bool use_null_apm : {false, true}) { std::vector specs; webrtc::AudioCodecSpec spec1{{"codec1", 48000, 2, {{"param1", "value1"}}}, @@ -3950,43 +4136,41 @@ TEST(WebRtcVoiceEngineTest, CollectRecvCodecs) { spec1.info.allow_comfort_noise = false; spec1.info.supports_network_adaption = true; specs.push_back(spec1); - webrtc::AudioCodecSpec spec2{{"codec2", 32000, 1}, {32000, 1, 32000}}; - spec2.info.allow_comfort_noise = false; + webrtc::AudioCodecSpec spec2{{"codec2", 48000, 2, {{"param1", "value1"}}}, + {48000, 2, 16000, 10000, 20000}}; + // We do not support 48khz CN. + spec2.info.allow_comfort_noise = true; specs.push_back(spec2); - specs.push_back(webrtc::AudioCodecSpec{ - {"codec3", 16000, 1, {{"param1", "value1b"}, {"param2", "value2"}}}, - {16000, 1, 13300}}); specs.push_back( - webrtc::AudioCodecSpec{{"codec4", 8000, 1}, {8000, 1, 64000}}); + webrtc::AudioCodecSpec{{"codec3", 8000, 1}, {8000, 1, 64000}}); specs.push_back( - webrtc::AudioCodecSpec{{"codec5", 8000, 2}, {8000, 1, 64000}}); - - std::unique_ptr task_queue_factory = - webrtc::CreateDefaultTaskQueueFactory(); - rtc::scoped_refptr unused_encoder_factory = - webrtc::MockAudioEncoderFactory::CreateUnusedFactory(); - rtc::scoped_refptr mock_decoder_factory = - rtc::make_ref_counted(); + webrtc::AudioCodecSpec{{"codec4", 8000, 2}, {8000, 1, 64000}}); + + webrtc::scoped_refptr + unused_encoder_factory = + webrtc::MockAudioEncoderFactory::CreateUnusedFactory(); + webrtc::scoped_refptr + mock_decoder_factory = + webrtc::make_ref_counted(); EXPECT_CALL(*mock_decoder_factory.get(), GetSupportedDecoders()) .WillOnce(Return(specs)); - rtc::scoped_refptr adm = + webrtc::scoped_refptr adm = webrtc::test::MockAudioDeviceModule::CreateNice(); - rtc::scoped_refptr apm = - use_null_apm ? nullptr : webrtc::AudioProcessingBuilder().Create(); - webrtc::FieldTrialBasedConfig field_trials; - cricket::WebRtcVoiceEngine engine( - task_queue_factory.get(), adm.get(), unused_encoder_factory, - mock_decoder_factory, nullptr, apm, nullptr, nullptr, field_trials); + scoped_refptr apm = + use_null_apm ? nullptr : BuiltinAudioProcessingBuilder().Build(env); + webrtc::WebRtcVoiceEngine engine(env, adm, unused_encoder_factory, + mock_decoder_factory, nullptr, apm, + nullptr); engine.Init(); - auto codecs = engine.recv_codecs(); - EXPECT_EQ(11u, codecs.size()); + auto codecs = engine.LegacyRecvCodecs(); + EXPECT_EQ(7u, codecs.size()); // Rather than just ASSERTing that there are enough codecs, ensure that we // can check the actual values safely, to provide better test results. - auto get_codec = [&codecs](size_t index) -> const cricket::AudioCodec& { - static const cricket::AudioCodec missing_codec = - cricket::CreateAudioCodec(0, "", 0, 0); + auto get_codec = [&codecs](size_t index) -> const webrtc::Codec& { + static const webrtc::Codec missing_codec = + webrtc::CreateAudioCodec(0, "", 0, 0); if (codecs.size() > index) return codecs[index]; return missing_codec; @@ -4004,11 +4188,11 @@ TEST(WebRtcVoiceEngineTest, CollectRecvCodecs) { // check supplementary codecs are ordered after the general codecs. auto find_codec = [&codecs](const webrtc::SdpAudioFormat& format) -> int { for (size_t i = 0; i != codecs.size(); ++i) { - const cricket::AudioCodec& codec = codecs[i]; + const webrtc::Codec& codec = codecs[i]; if (absl::EqualsIgnoreCase(codec.name, format.name) && codec.clockrate == format.clockrate_hz && codec.channels == format.num_channels) { - return rtc::checked_cast(i); + return webrtc::checked_cast(i); } } return -1; @@ -4019,11 +4203,12 @@ TEST(WebRtcVoiceEngineTest, CollectRecvCodecs) { // unsigned and, thus, failed for -1. const int num_specs = static_cast(specs.size()); EXPECT_GE(find_codec({"cn", 8000, 1}), num_specs); - EXPECT_GE(find_codec({"cn", 16000, 1}), num_specs); + EXPECT_EQ(find_codec({"cn", 16000, 1}), -1); EXPECT_EQ(find_codec({"cn", 32000, 1}), -1); + EXPECT_EQ(find_codec({"cn", 48000, 1}), -1); EXPECT_GE(find_codec({"telephone-event", 8000, 1}), num_specs); - EXPECT_GE(find_codec({"telephone-event", 16000, 1}), num_specs); - EXPECT_GE(find_codec({"telephone-event", 32000, 1}), num_specs); + EXPECT_EQ(find_codec({"telephone-event", 16000, 1}), -1); + EXPECT_EQ(find_codec({"telephone-event", 32000, 1}), -1); EXPECT_GE(find_codec({"telephone-event", 48000, 1}), num_specs); } } diff --git a/media/sctp/OWNERS b/media/sctp/OWNERS index da2f0178a8..489385182e 100644 --- a/media/sctp/OWNERS +++ b/media/sctp/OWNERS @@ -1,3 +1,2 @@ boivie@webrtc.org deadbeef@webrtc.org -orphis@webrtc.org diff --git a/media/sctp/dcsctp_transport.cc b/media/sctp/dcsctp_transport.cc index 525075468c..af2e6eaed3 100644 --- a/media/sctp/dcsctp_transport.cc +++ b/media/sctp/dcsctp_transport.cc @@ -11,22 +11,42 @@ #include "media/sctp/dcsctp_transport.h" #include +#include #include +#include #include +#include +#include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" -#include "media/base/media_channel.h" +#include "api/data_channel_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/environment/environment.h" +#include "api/priority.h" +#include "api/rtc_error.h" +#include "api/sctp_transport_interface.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/data_channel_transport_interface.h" +#include "media/sctp/sctp_transport_internal.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/public/dcsctp_socket.h" #include "net/dcsctp/public/dcsctp_socket_factory.h" #include "net/dcsctp/public/packet_observer.h" #include "net/dcsctp/public/text_pcap_packet_observer.h" +#include "net/dcsctp/public/timeout.h" #include "net/dcsctp/public/types.h" #include "p2p/base/packet_transport_internal.h" +#include "p2p/dtls/dtls_transport_internal.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/socket.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/thread.h" @@ -68,7 +88,7 @@ WebrtcPPID ToPPID(DataMessageType message_type, size_t size) { } } -absl::optional ToDataMessageType(dcsctp::PPID ppid) { +std::optional ToDataMessageType(dcsctp::PPID ppid) { switch (static_cast(ppid.value())) { case WebrtcPPID::kDCEP: return DataMessageType::kControl; @@ -81,21 +101,20 @@ absl::optional ToDataMessageType(dcsctp::PPID ppid) { case WebrtcPPID::kBinaryEmpty: return DataMessageType::kBinary; } - return absl::nullopt; + return std::nullopt; } -absl::optional ToErrorCauseCode( - dcsctp::ErrorKind error) { +std::optional ToErrorCauseCode(dcsctp::ErrorKind error) { switch (error) { case dcsctp::ErrorKind::kParseFailed: - return cricket::SctpErrorCauseCode::kUnrecognizedParameters; + return SctpErrorCauseCode::kUnrecognizedParameters; case dcsctp::ErrorKind::kPeerReported: - return cricket::SctpErrorCauseCode::kUserInitiatedAbort; + return SctpErrorCauseCode::kUserInitiatedAbort; case dcsctp::ErrorKind::kWrongSequence: case dcsctp::ErrorKind::kProtocolViolation: - return cricket::SctpErrorCauseCode::kProtocolViolation; + return SctpErrorCauseCode::kProtocolViolation; case dcsctp::ErrorKind::kResourceExhaustion: - return cricket::SctpErrorCauseCode::kOutOfResource; + return SctpErrorCauseCode::kOutOfResource; case dcsctp::ErrorKind::kTooManyRetries: case dcsctp::ErrorKind::kUnsupportedOperation: case dcsctp::ErrorKind::kNoError: @@ -103,7 +122,7 @@ absl::optional ToErrorCauseCode( // No SCTP error cause code matches those break; } - return absl::nullopt; + return std::nullopt; } bool IsEmptyPPID(dcsctp::PPID ppid) { @@ -113,23 +132,23 @@ bool IsEmptyPPID(dcsctp::PPID ppid) { } } // namespace -DcSctpTransport::DcSctpTransport(rtc::Thread* network_thread, - rtc::PacketTransportInternal* transport, - Clock* clock) - : DcSctpTransport(network_thread, +DcSctpTransport::DcSctpTransport(const Environment& env, + Thread* network_thread, + DtlsTransportInternal* transport) + : DcSctpTransport(env, + network_thread, transport, - clock, std::make_unique()) {} DcSctpTransport::DcSctpTransport( - rtc::Thread* network_thread, - rtc::PacketTransportInternal* transport, - Clock* clock, + const Environment& env, + Thread* network_thread, + DtlsTransportInternal* transport, std::unique_ptr socket_factory) : network_thread_(network_thread), transport_(transport), - clock_(clock), - random_(clock_->TimeInMicroseconds()), + env_(env), + random_(env_.clock().TimeInMicroseconds()), socket_factory_(std::move(socket_factory)), task_queue_timeout_factory_( *network_thread, @@ -139,7 +158,7 @@ DcSctpTransport::DcSctpTransport( }) { RTC_DCHECK_RUN_ON(network_thread_); static std::atomic instance_count = 0; - rtc::StringBuilder sb; + StringBuilder sb; sb << debug_name_ << instance_count++; debug_name_ = sb.Release(); ConnectTransportSignals(); @@ -164,8 +183,7 @@ void DcSctpTransport::SetDataChannelSink(DataChannelSink* sink) { } } -void DcSctpTransport::SetDtlsTransport( - rtc::PacketTransportInternal* transport) { +void DcSctpTransport::SetDtlsTransport(DtlsTransportInternal* transport) { RTC_DCHECK_RUN_ON(network_thread_); DisconnectTransportSignals(); transport_ = transport; @@ -173,24 +191,28 @@ void DcSctpTransport::SetDtlsTransport( MaybeConnectSocket(); } -bool DcSctpTransport::Start(int local_sctp_port, - int remote_sctp_port, - int max_message_size) { +bool DcSctpTransport::Start(const SctpOptions& options) { RTC_DCHECK_RUN_ON(network_thread_); - RTC_DCHECK(max_message_size > 0); - RTC_DLOG(LS_INFO) << debug_name_ << "->Start(local=" << local_sctp_port - << ", remote=" << remote_sctp_port - << ", max_message_size=" << max_message_size << ")"; + RTC_DCHECK(options.max_message_size > 0); + RTC_DLOG(LS_INFO) << debug_name_ << "->Start(local=" << options.local_port + << ", remote=" << options.remote_port + << ", max_message_size=" << options.max_message_size << ")"; if (!socket_) { - dcsctp::DcSctpOptions options; - options.local_port = local_sctp_port; - options.remote_port = remote_sctp_port; - options.max_message_size = max_message_size; - options.max_timer_backoff_duration = kMaxTimerBackoffDuration; + dcsctp::DcSctpOptions dcsctp_options; + dcsctp_options.local_port = options.local_port; + dcsctp_options.remote_port = options.remote_port; + dcsctp_options.max_message_size = options.max_message_size; + dcsctp_options.max_timer_backoff_duration = kMaxTimerBackoffDuration; // Don't close the connection automatically on too many retransmissions. - options.max_retransmissions = absl::nullopt; - options.max_init_retransmits = absl::nullopt; + dcsctp_options.max_retransmissions = std::nullopt; + dcsctp_options.max_init_retransmits = std::nullopt; + dcsctp_options.per_stream_send_queue_limit = + DataChannelInterface::MaxSendQueueSize(); + // This is just set to avoid denial-of-service. Practically unlimited. + dcsctp_options.max_send_buffer_size = std::numeric_limits::max(); + dcsctp_options.enable_message_interleaving = + env_.field_trials().IsEnabled("WebRTC-DataChannelMessageInterleaving"); std::unique_ptr packet_observer; if (RTC_LOG_CHECK_LEVEL(LS_VERBOSE)) { @@ -198,32 +220,43 @@ bool DcSctpTransport::Start(int local_sctp_port, std::make_unique(debug_name_); } - socket_ = socket_factory_->Create(debug_name_, *this, - std::move(packet_observer), options); + socket_ = socket_factory_->Create( + debug_name_, *this, std::move(packet_observer), dcsctp_options); } else { - if (local_sctp_port != socket_->options().local_port || - remote_sctp_port != socket_->options().remote_port) { + if (options.local_port != socket_->options().local_port || + options.remote_port != socket_->options().remote_port) { RTC_LOG(LS_ERROR) - << debug_name_ << "->Start(local=" << local_sctp_port - << ", remote=" << remote_sctp_port + << debug_name_ << "->Start(local=" << options.local_port + << ", remote=" << options.remote_port << "): Can't change ports on already started transport."; return false; } - socket_->SetMaxMessageSize(max_message_size); + socket_->SetMaxMessageSize(options.max_message_size); } MaybeConnectSocket(); + for (const auto& [sid, stream_state] : stream_states_) { + socket_->SetStreamPriority(sid, stream_state.priority); + } + return true; } -bool DcSctpTransport::OpenStream(int sid) { +bool DcSctpTransport::OpenStream(int sid, PriorityValue priority) { RTC_DCHECK_RUN_ON(network_thread_); - RTC_DLOG(LS_INFO) << debug_name_ << "->OpenStream(" << sid << ")."; + RTC_DLOG(LS_INFO) << debug_name_ << "->OpenStream(" << sid << ", " + << priority.value() << ")."; StreamState stream_state; + stream_state.priority = dcsctp::StreamPriority(priority.value()); stream_states_.insert_or_assign(dcsctp::StreamID(static_cast(sid)), stream_state); + if (socket_) { + socket_->SetStreamPriority(dcsctp::StreamID(sid), + dcsctp::StreamPriority(priority.value())); + } + return true; } @@ -259,7 +292,7 @@ bool DcSctpTransport::ResetStream(int sid) { RTCError DcSctpTransport::SendData(int sid, const SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload) { + const CopyOnWriteBuffer& payload) { RTC_DCHECK_RUN_ON(network_thread_); RTC_DLOG(LS_VERBOSE) << debug_name_ << "->SendData(sid=" << sid << ", type=" << static_cast(params.type) @@ -340,11 +373,11 @@ RTCError DcSctpTransport::SendData(int sid, ready_to_send_data_ = false; return RTCError(RTCErrorType::RESOURCE_EXHAUSTED); default: - absl::string_view message = dcsctp::ToString(error); + absl::string_view error_message = dcsctp::ToString(error); RTC_LOG(LS_ERROR) << debug_name_ << "->SendData(...): send() failed with error " - << message << "."; - return RTCError(RTCErrorType::NETWORK_ERROR, message); + << error_message << "."; + return RTCError(RTCErrorType::NETWORK_ERROR, error_message); } } @@ -362,24 +395,42 @@ int DcSctpTransport::max_message_size() const { return socket_->options().max_message_size; } -absl::optional DcSctpTransport::max_outbound_streams() const { +std::optional DcSctpTransport::max_outbound_streams() const { if (!socket_) - return absl::nullopt; + return std::nullopt; return socket_->options().announced_maximum_outgoing_streams; } -absl::optional DcSctpTransport::max_inbound_streams() const { +std::optional DcSctpTransport::max_inbound_streams() const { if (!socket_) - return absl::nullopt; + return std::nullopt; return socket_->options().announced_maximum_incoming_streams; } +size_t DcSctpTransport::buffered_amount(int sid) const { + if (!socket_) + return 0; + return socket_->buffered_amount(dcsctp::StreamID(sid)); +} + +size_t DcSctpTransport::buffered_amount_low_threshold(int sid) const { + if (!socket_) + return 0; + return socket_->buffered_amount_low_threshold(dcsctp::StreamID(sid)); +} + +void DcSctpTransport::SetBufferedAmountLowThreshold(int sid, size_t bytes) { + if (!socket_) + return; + socket_->SetBufferedAmountLowThreshold(dcsctp::StreamID(sid), bytes); +} + void DcSctpTransport::set_debug_name_for_testing(const char* debug_name) { debug_name_ = debug_name; } SendPacketStatus DcSctpTransport::SendPacketWithStatus( - rtc::ArrayView data) { + ArrayView data) { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(socket_); @@ -401,14 +452,14 @@ SendPacketStatus DcSctpTransport::SendPacketWithStatus( auto result = transport_->SendPacket(reinterpret_cast(data.data()), - data.size(), rtc::PacketOptions(), 0); + data.size(), AsyncSocketPacketOptions(), 0); if (result < 0) { RTC_LOG(LS_WARNING) << debug_name_ << "->SendPacket(length=" << data.size() << ") failed with error: " << transport_->GetError() << "."; - if (rtc::IsBlockingError(transport_->GetError())) { + if (IsBlockingError(transport_->GetError())) { return SendPacketStatus::kTemporaryFailure; } return SendPacketStatus::kError; @@ -422,7 +473,7 @@ std::unique_ptr DcSctpTransport::CreateTimeout( } dcsctp::TimeMs DcSctpTransport::TimeMillis() { - return dcsctp::TimeMs(clock_->TimeInMilliseconds()); + return dcsctp::TimeMs(env_.clock().TimeInMilliseconds()); } uint32_t DcSctpTransport::GetRandomInt(uint32_t low, uint32_t high) { @@ -439,6 +490,13 @@ void DcSctpTransport::OnTotalBufferedAmountLow() { } } +void DcSctpTransport::OnBufferedAmountLow(dcsctp::StreamID stream_id) { + RTC_DCHECK_RUN_ON(network_thread_); + if (data_channel_sink_) { + data_channel_sink_->OnBufferedAmountLow(*stream_id); + } +} + void DcSctpTransport::OnMessageReceived(dcsctp::DcSctpMessage message) { RTC_DCHECK_RUN_ON(network_thread_); RTC_DLOG(LS_VERBOSE) << debug_name_ << "->OnMessageReceived(sid=" @@ -522,7 +580,7 @@ void DcSctpTransport::OnConnectionRestarted() { } void DcSctpTransport::OnStreamsResetFailed( - rtc::ArrayView outgoing_streams, + ArrayView outgoing_streams, absl::string_view reason) { // TODO(orphis): Need a test to check for correct behavior for (auto& stream_id : outgoing_streams) { @@ -534,7 +592,7 @@ void DcSctpTransport::OnStreamsResetFailed( } void DcSctpTransport::OnStreamsResetPerformed( - rtc::ArrayView outgoing_streams) { + ArrayView outgoing_streams) { RTC_DCHECK_RUN_ON(network_thread_); for (auto& stream_id : outgoing_streams) { RTC_LOG(LS_INFO) << debug_name_ @@ -562,7 +620,7 @@ void DcSctpTransport::OnStreamsResetPerformed( } void DcSctpTransport::OnIncomingStreamsReset( - rtc::ArrayView incoming_streams) { + ArrayView incoming_streams) { RTC_DCHECK_RUN_ON(network_thread_); for (auto& stream_id : incoming_streams) { RTC_LOG(LS_INFO) << debug_name_ @@ -605,9 +663,22 @@ void DcSctpTransport::ConnectTransportSignals() { } transport_->SignalWritableState.connect( this, &DcSctpTransport::OnTransportWritableState); - transport_->SignalReadPacket.connect(this, - &DcSctpTransport::OnTransportReadPacket); - transport_->SignalClosed.connect(this, &DcSctpTransport::OnTransportClosed); + transport_->RegisterReceivedPacketCallback( + this, + [&](PacketTransportInternal* transport, const ReceivedIpPacket& packet) { + OnTransportReadPacket(transport, packet); + }); + transport_->SetOnCloseCallback([this]() { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DLOG(LS_VERBOSE) << debug_name_ << "->OnTransportClosed()."; + if (data_channel_sink_) { + data_channel_sink_->OnTransportClosed({}); + } + }); + transport_->SubscribeDtlsTransportState( + this, [this](DtlsTransportInternal* transport, DtlsTransportState state) { + OnDtlsTransportState(transport, state); + }); } void DcSctpTransport::DisconnectTransportSignals() { @@ -616,46 +687,56 @@ void DcSctpTransport::DisconnectTransportSignals() { return; } transport_->SignalWritableState.disconnect(this); - transport_->SignalReadPacket.disconnect(this); - transport_->SignalClosed.disconnect(this); + transport_->DeregisterReceivedPacketCallback(this); + transport_->SetOnCloseCallback(nullptr); + transport_->UnsubscribeDtlsTransportState(this); } void DcSctpTransport::OnTransportWritableState( - rtc::PacketTransportInternal* transport) { + PacketTransportInternal* transport) { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK_EQ(transport_, transport); RTC_DLOG(LS_VERBOSE) << debug_name_ << "->OnTransportWritableState(), writable=" - << transport->writable(); + << transport->writable() << " socket: " + << (socket_ ? std::to_string( + static_cast(socket_->state())) + : "UNSET"); MaybeConnectSocket(); } +void DcSctpTransport::OnDtlsTransportState(DtlsTransportInternal* transport, + webrtc::DtlsTransportState state) { + if (state == DtlsTransportState::kNew && socket_) { + // IF DTLS restart (DtlsTransportState::kNew) + // THEN + // restart socket so that we send an SCPT init + // before any outgoing messages. This is needed + // after DTLS fingerprint changed since peer will discard + // messages with crypto derived from old fingerprint. + RTC_DLOG(LS_INFO) << debug_name_ << " DTLS restart"; + dcsctp::DcSctpOptions options = socket_->options(); + socket_.reset(); + RTC_DCHECK_LE(options.max_message_size, kSctpSendBufferSize); + Start({.local_port = options.local_port, + .remote_port = options.remote_port, + .max_message_size = static_cast(options.max_message_size)}); + } +} + void DcSctpTransport::OnTransportReadPacket( - rtc::PacketTransportInternal* transport, - const char* data, - size_t length, - const int64_t& /* packet_time_us */, - int flags) { + PacketTransportInternal* /* transport */, + const ReceivedIpPacket& packet) { RTC_DCHECK_RUN_ON(network_thread_); - if (flags) { + if (packet.decryption_info() != ReceivedIpPacket::kDtlsDecrypted) { // We are only interested in SCTP packets. return; } - RTC_DLOG(LS_VERBOSE) << debug_name_ - << "->OnTransportReadPacket(), length=" << length; + RTC_DLOG(LS_VERBOSE) << debug_name_ << "->OnTransportReadPacket(), length=" + << packet.payload().size(); if (socket_) { - socket_->ReceivePacket(rtc::ArrayView( - reinterpret_cast(data), length)); - } -} - -void DcSctpTransport::OnTransportClosed( - rtc::PacketTransportInternal* transport) { - RTC_DCHECK_RUN_ON(network_thread_); - RTC_DLOG(LS_VERBOSE) << debug_name_ << "->OnTransportClosed()."; - if (data_channel_sink_) { - data_channel_sink_->OnTransportClosed({}); + socket_->ReceivePacket(packet.payload()); } } diff --git a/media/sctp/dcsctp_transport.h b/media/sctp/dcsctp_transport.h index 7ae0d64134..582e273d48 100644 --- a/media/sctp/dcsctp_transport.h +++ b/media/sctp/dcsctp_transport.h @@ -11,107 +11,116 @@ #ifndef MEDIA_SCTP_DCSCTP_TRANSPORT_H_ #define MEDIA_SCTP_DCSCTP_TRANSPORT_H_ +#include +#include +#include #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/dtls_transport_interface.h" +#include "api/environment/environment.h" +#include "api/priority.h" +#include "api/rtc_error.h" +#include "api/sctp_transport_interface.h" #include "api/task_queue/task_queue_base.h" +#include "api/transport/data_channel_transport_interface.h" #include "media/sctp/sctp_transport_internal.h" -#include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/public/dcsctp_socket.h" #include "net/dcsctp/public/dcsctp_socket_factory.h" +#include "net/dcsctp/public/timeout.h" #include "net/dcsctp/public/types.h" #include "net/dcsctp/timer/task_queue_timeout.h" #include "p2p/base/packet_transport_internal.h" +#include "p2p/dtls/dtls_transport_internal.h" #include "rtc_base/containers/flat_map.h" #include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/random.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" -#include "system_wrappers/include/clock.h" namespace webrtc { -class DcSctpTransport : public cricket::SctpTransportInternal, +class DcSctpTransport : public SctpTransportInternal, public dcsctp::DcSctpSocketCallbacks, public sigslot::has_slots<> { public: - DcSctpTransport(rtc::Thread* network_thread, - rtc::PacketTransportInternal* transport, - Clock* clock); - DcSctpTransport(rtc::Thread* network_thread, - rtc::PacketTransportInternal* transport, - Clock* clock, + DcSctpTransport(const Environment& env, + Thread* network_thread, + DtlsTransportInternal* transport); + DcSctpTransport(const Environment& env, + Thread* network_thread, + DtlsTransportInternal* transport, std::unique_ptr socket_factory); ~DcSctpTransport() override; - // cricket::SctpTransportInternal + // webrtc::SctpTransportInternal void SetOnConnectedCallback(std::function callback) override; void SetDataChannelSink(DataChannelSink* sink) override; - void SetDtlsTransport(rtc::PacketTransportInternal* transport) override; - bool Start(int local_sctp_port, - int remote_sctp_port, - int max_message_size) override; - bool OpenStream(int sid) override; + void SetDtlsTransport(DtlsTransportInternal* transport) override; + bool Start(const SctpOptions& options) override; + bool OpenStream(int sid, PriorityValue priority) override; bool ResetStream(int sid) override; RTCError SendData(int sid, const SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload) override; + const CopyOnWriteBuffer& payload) override; bool ReadyToSendData() override; int max_message_size() const override; - absl::optional max_outbound_streams() const override; - absl::optional max_inbound_streams() const override; + std::optional max_outbound_streams() const override; + std::optional max_inbound_streams() const override; + size_t buffered_amount(int sid) const override; + size_t buffered_amount_low_threshold(int sid) const override; + void SetBufferedAmountLowThreshold(int sid, size_t bytes) override; void set_debug_name_for_testing(const char* debug_name) override; private: // dcsctp::DcSctpSocketCallbacks dcsctp::SendPacketStatus SendPacketWithStatus( - rtc::ArrayView data) override; + ArrayView data) override; std::unique_ptr CreateTimeout( TaskQueueBase::DelayPrecision precision) override; dcsctp::TimeMs TimeMillis() override; uint32_t GetRandomInt(uint32_t low, uint32_t high) override; void OnTotalBufferedAmountLow() override; + void OnBufferedAmountLow(dcsctp::StreamID stream_id) override; void OnMessageReceived(dcsctp::DcSctpMessage message) override; void OnError(dcsctp::ErrorKind error, absl::string_view message) override; void OnAborted(dcsctp::ErrorKind error, absl::string_view message) override; void OnConnected() override; void OnClosed() override; void OnConnectionRestarted() override; - void OnStreamsResetFailed( - rtc::ArrayView outgoing_streams, - absl::string_view reason) override; + void OnStreamsResetFailed(ArrayView outgoing_streams, + absl::string_view reason) override; void OnStreamsResetPerformed( - rtc::ArrayView outgoing_streams) override; + ArrayView outgoing_streams) override; void OnIncomingStreamsReset( - rtc::ArrayView incoming_streams) override; + ArrayView incoming_streams) override; // Transport callbacks void ConnectTransportSignals(); void DisconnectTransportSignals(); - void OnTransportWritableState(rtc::PacketTransportInternal* transport); - void OnTransportReadPacket(rtc::PacketTransportInternal* transport, - const char* data, - size_t length, - const int64_t& /* packet_time_us */, - int flags); - void OnTransportClosed(rtc::PacketTransportInternal* transport); - + void OnTransportWritableState(PacketTransportInternal* transport); + void OnTransportReadPacket(PacketTransportInternal* transport, + const ReceivedIpPacket& packet); + void OnDtlsTransportState(DtlsTransportInternal* transport, + webrtc::DtlsTransportState); void MaybeConnectSocket(); - rtc::Thread* network_thread_; - rtc::PacketTransportInternal* transport_; - Clock* clock_; + Thread* network_thread_; + DtlsTransportInternal* transport_; + Environment env_; Random random_; std::unique_ptr socket_factory_; dcsctp::TaskQueueTimeoutFactory task_queue_timeout_factory_; std::unique_ptr socket_; std::string debug_name_ = "DcSctpTransport"; - rtc::CopyOnWriteBuffer receive_buffer_; + CopyOnWriteBuffer receive_buffer_; // Used to keep track of the state of data channels. // Reset needs to happen both ways before signaling the transport @@ -126,6 +135,9 @@ class DcSctpTransport : public cricket::SctpTransportInternal, bool incoming_reset_done = false; // True when the local connection received OnStreamsResetPerformed bool outgoing_reset_done = false; + // Priority of the stream according to RFC 8831, section 6.4 + dcsctp::StreamPriority priority = + dcsctp::StreamPriority(PriorityValue(webrtc::Priority::kLow).value()); }; // Map of all currently open or closing data channels diff --git a/media/sctp/dcsctp_transport_unittest.cc b/media/sctp/dcsctp_transport_unittest.cc index 65fc3a1690..d629722078 100644 --- a/media/sctp/dcsctp_transport_unittest.cc +++ b/media/sctp/dcsctp_transport_unittest.cc @@ -11,16 +11,29 @@ #include "media/sctp/dcsctp_transport.h" #include +#include #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/priority.h" +#include "api/rtc_error.h" +#include "api/transport/data_channel_transport_interface.h" +#include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/public/dcsctp_socket.h" #include "net/dcsctp/public/mock_dcsctp_socket.h" #include "net/dcsctp/public/mock_dcsctp_socket_factory.h" -#include "p2p/base/fake_packet_transport.h" +#include "net/dcsctp/public/types.h" +#include "p2p/dtls/fake_dtls_transport.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/thread.h" +#include "system_wrappers/include/clock.h" +#include "test/gmock.h" #include "test/gtest.h" using ::testing::_; using ::testing::ByMove; -using ::testing::DoAll; using ::testing::ElementsAre; using ::testing::InSequence; using ::testing::Invoke; @@ -31,6 +44,12 @@ using ::testing::ReturnPointee; namespace webrtc { namespace { + +constexpr char kTransportName[] = "transport"; +constexpr int kComponent = 77; + +const PriorityValue kDefaultPriority = PriorityValue(Priority::kLow); + class MockDataChannelSink : public DataChannelSink { public: MOCK_METHOD(void, OnConnected, ()); @@ -38,18 +57,22 @@ class MockDataChannelSink : public DataChannelSink { // DataChannelSink MOCK_METHOD(void, OnDataReceived, - (int, DataMessageType, const rtc::CopyOnWriteBuffer&)); + (int, DataMessageType, const webrtc::CopyOnWriteBuffer&)); MOCK_METHOD(void, OnChannelClosing, (int)); MOCK_METHOD(void, OnChannelClosed, (int)); MOCK_METHOD(void, OnReadyToSend, ()); MOCK_METHOD(void, OnTransportClosed, (RTCError)); + MOCK_METHOD(void, OnBufferedAmountLow, (int channel_id), (override)); }; static_assert(!std::is_abstract_v); class Peer { public: - Peer() : fake_packet_transport_("transport"), simulated_clock_(1000) { + Peer() + : fake_dtls_transport_(kTransportName, kComponent), + simulated_clock_(1000), + env_(CreateEnvironment(&simulated_clock_)) { auto socket_ptr = std::make_unique(); socket_ = socket_ptr.get(); @@ -60,14 +83,15 @@ class Peer { .WillOnce(Return(ByMove(std::move(socket_ptr)))); sctp_transport_ = std::make_unique( - rtc::Thread::Current(), &fake_packet_transport_, &simulated_clock_, + env_, Thread::Current(), &fake_dtls_transport_, std::move(mock_dcsctp_socket_factory)); sctp_transport_->SetDataChannelSink(&sink_); sctp_transport_->SetOnConnectedCallback([this]() { sink_.OnConnected(); }); } - rtc::FakePacketTransport fake_packet_transport_; + FakeDtlsTransport fake_dtls_transport_; webrtc::SimulatedClock simulated_clock_; + Environment env_; dcsctp::MockDcSctpSocket* socket_; std::unique_ptr sctp_transport_; NiceMock sink_; @@ -75,9 +99,9 @@ class Peer { } // namespace TEST(DcSctpTransportTest, OpenSequence) { - rtc::AutoThread main_thread; + AutoThread main_thread; Peer peer_a; - peer_a.fake_packet_transport_.SetWritable(true); + peer_a.fake_dtls_transport_.SetWritable(true); EXPECT_CALL(*peer_a.socket_, Connect) .Times(1) @@ -85,21 +109,26 @@ TEST(DcSctpTransportTest, OpenSequence) { &dcsctp::DcSctpSocketCallbacks::OnConnected)); EXPECT_CALL(peer_a.sink_, OnReadyToSend); EXPECT_CALL(peer_a.sink_, OnConnected); - - peer_a.sctp_transport_->Start(5000, 5000, 256 * 1024); + peer_a.sctp_transport_->Start({.local_port = 5000, + .remote_port = 5000, + .max_message_size = 256 * 1024}); } // Tests that the close sequence invoked from one end results in the stream to // be reset from both ends and all the proper signals are sent. TEST(DcSctpTransportTest, CloseSequence) { - rtc::AutoThread main_thread; + AutoThread main_thread; Peer peer_a; Peer peer_b; - peer_a.fake_packet_transport_.SetDestination(&peer_b.fake_packet_transport_, - false); + peer_a.fake_dtls_transport_.SetDestination(&peer_b.fake_dtls_transport_, + false); { InSequence sequence; + EXPECT_CALL( + *peer_a.socket_, + SetStreamPriority(dcsctp::StreamID(1), + dcsctp::StreamPriority(kDefaultPriority.value()))); EXPECT_CALL(*peer_a.socket_, ResetStreams(ElementsAre(dcsctp::StreamID(1)))) .WillOnce(Return(dcsctp::ResetStreamsStatus::kPerformed)); @@ -112,10 +141,14 @@ TEST(DcSctpTransportTest, CloseSequence) { EXPECT_CALL(peer_b.sink_, OnChannelClosed(1)); } - peer_a.sctp_transport_->Start(5000, 5000, 256 * 1024); - peer_b.sctp_transport_->Start(5000, 5000, 256 * 1024); - peer_a.sctp_transport_->OpenStream(1); - peer_b.sctp_transport_->OpenStream(1); + peer_a.sctp_transport_->Start({.local_port = 5000, + .remote_port = 5000, + .max_message_size = 256 * 1024}); + peer_b.sctp_transport_->Start({.local_port = 5000, + .remote_port = 5000, + .max_message_size = 256 * 1024}); + peer_a.sctp_transport_->OpenStream(1, kDefaultPriority); + peer_b.sctp_transport_->OpenStream(1, kDefaultPriority); peer_a.sctp_transport_->ResetStream(1); // Simulate the callbacks from the stream resets @@ -134,11 +167,11 @@ TEST(DcSctpTransportTest, CloseSequence) { // terminates properly. Both peers will think they initiated it, so no // OnClosingProcedureStartedRemotely should be called. TEST(DcSctpTransportTest, CloseSequenceSimultaneous) { - rtc::AutoThread main_thread; + AutoThread main_thread; Peer peer_a; Peer peer_b; - peer_a.fake_packet_transport_.SetDestination(&peer_b.fake_packet_transport_, - false); + peer_a.fake_dtls_transport_.SetDestination(&peer_b.fake_dtls_transport_, + false); { InSequence sequence; @@ -154,10 +187,14 @@ TEST(DcSctpTransportTest, CloseSequenceSimultaneous) { EXPECT_CALL(peer_b.sink_, OnChannelClosed(1)); } - peer_a.sctp_transport_->Start(5000, 5000, 256 * 1024); - peer_b.sctp_transport_->Start(5000, 5000, 256 * 1024); - peer_a.sctp_transport_->OpenStream(1); - peer_b.sctp_transport_->OpenStream(1); + peer_a.sctp_transport_->Start({.local_port = 5000, + .remote_port = 5000, + .max_message_size = 256 * 1024}); + peer_b.sctp_transport_->Start({.local_port = 5000, + .remote_port = 5000, + .max_message_size = 256 * 1024}); + peer_a.sctp_transport_->OpenStream(1, kDefaultPriority); + peer_b.sctp_transport_->OpenStream(1, kDefaultPriority); peer_a.sctp_transport_->ResetStream(1); peer_b.sctp_transport_->ResetStream(1); @@ -173,62 +210,94 @@ TEST(DcSctpTransportTest, CloseSequenceSimultaneous) { ->OnIncomingStreamsReset(streams); } +TEST(DcSctpTransportTest, SetStreamPriority) { + AutoThread main_thread; + Peer peer_a; + + { + InSequence sequence; + + EXPECT_CALL( + *peer_a.socket_, + SetStreamPriority(dcsctp::StreamID(1), dcsctp::StreamPriority(1337))); + EXPECT_CALL( + *peer_a.socket_, + SetStreamPriority(dcsctp::StreamID(2), dcsctp::StreamPriority(3141))); + } + + EXPECT_CALL(*peer_a.socket_, Send(_, _)).Times(0); + + peer_a.sctp_transport_->OpenStream(1, PriorityValue(1337)); + peer_a.sctp_transport_->Start({.local_port = 5000, + .remote_port = 5000, + .max_message_size = 256 * 1024}); + peer_a.sctp_transport_->OpenStream(2, PriorityValue(3141)); +} + TEST(DcSctpTransportTest, DiscardMessageClosedChannel) { - rtc::AutoThread main_thread; + AutoThread main_thread; Peer peer_a; EXPECT_CALL(*peer_a.socket_, Send(_, _)).Times(0); - peer_a.sctp_transport_->Start(5000, 5000, 256 * 1024); + peer_a.sctp_transport_->Start({.local_port = 5000, + .remote_port = 5000, + .max_message_size = 256 * 1024}); SendDataParams params; - rtc::CopyOnWriteBuffer payload; + CopyOnWriteBuffer payload; EXPECT_EQ(peer_a.sctp_transport_->SendData(1, params, payload).type(), RTCErrorType::INVALID_STATE); } TEST(DcSctpTransportTest, DiscardMessageClosingChannel) { - rtc::AutoThread main_thread; + AutoThread main_thread; Peer peer_a; EXPECT_CALL(*peer_a.socket_, Send(_, _)).Times(0); - peer_a.sctp_transport_->OpenStream(1); - peer_a.sctp_transport_->Start(5000, 5000, 256 * 1024); + peer_a.sctp_transport_->OpenStream(1, kDefaultPriority); + peer_a.sctp_transport_->Start({.local_port = 5000, + .remote_port = 5000, + .max_message_size = 256 * 1024}); peer_a.sctp_transport_->ResetStream(1); SendDataParams params; - rtc::CopyOnWriteBuffer payload; + CopyOnWriteBuffer payload; EXPECT_EQ(peer_a.sctp_transport_->SendData(1, params, payload).type(), RTCErrorType::INVALID_STATE); } TEST(DcSctpTransportTest, SendDataOpenChannel) { - rtc::AutoThread main_thread; + AutoThread main_thread; Peer peer_a; dcsctp::DcSctpOptions options; EXPECT_CALL(*peer_a.socket_, Send(_, _)).Times(1); EXPECT_CALL(*peer_a.socket_, options()).WillOnce(ReturnPointee(&options)); - peer_a.sctp_transport_->OpenStream(1); - peer_a.sctp_transport_->Start(5000, 5000, 256 * 1024); + peer_a.sctp_transport_->OpenStream(1, kDefaultPriority); + peer_a.sctp_transport_->Start({.local_port = 5000, + .remote_port = 5000, + .max_message_size = 256 * 1024}); SendDataParams params; - rtc::CopyOnWriteBuffer payload; + CopyOnWriteBuffer payload; EXPECT_TRUE(peer_a.sctp_transport_->SendData(1, params, payload).ok()); } TEST(DcSctpTransportTest, DeliversMessage) { - rtc::AutoThread main_thread; + AutoThread main_thread; Peer peer_a; EXPECT_CALL(peer_a.sink_, OnDataReceived(1, webrtc::DataMessageType::kBinary, _)) .Times(1); - peer_a.sctp_transport_->OpenStream(1); - peer_a.sctp_transport_->Start(5000, 5000, 256 * 1024); + peer_a.sctp_transport_->OpenStream(1, kDefaultPriority); + peer_a.sctp_transport_->Start({.local_port = 5000, + .remote_port = 5000, + .max_message_size = 256 * 1024}); static_cast(peer_a.sctp_transport_.get()) ->OnMessageReceived( @@ -236,13 +305,15 @@ TEST(DcSctpTransportTest, DeliversMessage) { } TEST(DcSctpTransportTest, DropMessageWithUnknownPpid) { - rtc::AutoThread main_thread; + AutoThread main_thread; Peer peer_a; EXPECT_CALL(peer_a.sink_, OnDataReceived(_, _, _)).Times(0); - peer_a.sctp_transport_->OpenStream(1); - peer_a.sctp_transport_->Start(5000, 5000, 256 * 1024); + peer_a.sctp_transport_->OpenStream(1, kDefaultPriority); + peer_a.sctp_transport_->Start({.local_port = 5000, + .remote_port = 5000, + .max_message_size = 256 * 1024}); static_cast(peer_a.sctp_transport_.get()) ->OnMessageReceived( diff --git a/media/sctp/sctp_transport_factory.cc b/media/sctp/sctp_transport_factory.cc index 457bc5f889..3ccaaaf197 100644 --- a/media/sctp/sctp_transport_factory.cc +++ b/media/sctp/sctp_transport_factory.cc @@ -10,29 +10,34 @@ #include "media/sctp/sctp_transport_factory.h" +#include + +#include "api/environment/environment.h" +#include "media/sctp/sctp_transport_internal.h" +#include "p2p/dtls/dtls_transport_internal.h" #include "rtc_base/system/unused.h" +#include "rtc_base/thread.h" #ifdef WEBRTC_HAVE_DCSCTP -#include "media/sctp/dcsctp_transport.h" // nogncheck -#include "system_wrappers/include/clock.h" // nogncheck +#include "media/sctp/dcsctp_transport.h" // nogncheck #endif -namespace cricket { +namespace webrtc { -SctpTransportFactory::SctpTransportFactory(rtc::Thread* network_thread) +SctpTransportFactory::SctpTransportFactory(Thread* network_thread) : network_thread_(network_thread) { RTC_UNUSED(network_thread_); } std::unique_ptr -SctpTransportFactory::CreateSctpTransport( - rtc::PacketTransportInternal* transport) { +SctpTransportFactory::CreateSctpTransport(const Environment& env, + DtlsTransportInternal* transport) { std::unique_ptr result; #ifdef WEBRTC_HAVE_DCSCTP - result = std::unique_ptr(new webrtc::DcSctpTransport( - network_thread_, transport, webrtc::Clock::GetRealTimeClock())); + result = std::unique_ptr( + new DcSctpTransport(env, network_thread_, transport)); #endif return result; } -} // namespace cricket +} // namespace webrtc diff --git a/media/sctp/sctp_transport_factory.h b/media/sctp/sctp_transport_factory.h index 4fff214129..77ef015727 100644 --- a/media/sctp/sctp_transport_factory.h +++ b/media/sctp/sctp_transport_factory.h @@ -13,23 +13,33 @@ #include +#include "api/environment/environment.h" #include "api/transport/sctp_transport_factory_interface.h" #include "media/sctp/sctp_transport_internal.h" #include "rtc_base/thread.h" -namespace cricket { +namespace webrtc { -class SctpTransportFactory : public webrtc::SctpTransportFactoryInterface { +class SctpTransportFactory : public SctpTransportFactoryInterface { public: - explicit SctpTransportFactory(rtc::Thread* network_thread); + explicit SctpTransportFactory(Thread* network_thread); std::unique_ptr CreateSctpTransport( - rtc::PacketTransportInternal* transport) override; + const Environment& env, + DtlsTransportInternal* transport) override; private: - rtc::Thread* network_thread_; + Thread* network_thread_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::SctpTransportFactory; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_SCTP_SCTP_TRANSPORT_FACTORY_H__ diff --git a/media/sctp/sctp_transport_internal.h b/media/sctp/sctp_transport_internal.h index 8a7450f405..934fbde0a7 100644 --- a/media/sctp/sctp_transport_internal.h +++ b/media/sctp/sctp_transport_internal.h @@ -14,22 +14,21 @@ // TODO(deadbeef): Move SCTP code out of media/, and make it not depend on // anything in media/. -#include -#include -#include +#include +#include +#include +#include +#include "api/priority.h" #include "api/rtc_error.h" +#include "api/sctp_transport_interface.h" #include "api/transport/data_channel_transport_interface.h" -#include "media/base/media_channel.h" -#include "p2p/base/packet_transport_internal.h" +#include "p2p/dtls/dtls_transport_internal.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/thread.h" -namespace cricket { +namespace webrtc { // Constants that are important to API users -// The size of the SCTP association send buffer. 256kB, the usrsctp default. -constexpr int kSctpSendBufferSize = 256 * 1024; // The number of outgoing streams that we'll negotiate. Since stream IDs (SIDs) // are 0-based, the highest usable SID is 1023. @@ -78,31 +77,31 @@ class SctpTransportInternal { virtual ~SctpTransportInternal() {} virtual void SetOnConnectedCallback(std::function callback) = 0; - virtual void SetDataChannelSink(webrtc::DataChannelSink* sink) = 0; + virtual void SetDataChannelSink(DataChannelSink* sink) = 0; // Changes what underlying DTLS transport is uses. Used when switching which // bundled transport the SctpTransport uses. - virtual void SetDtlsTransport(rtc::PacketTransportInternal* transport) = 0; + virtual void SetDtlsTransport(DtlsTransportInternal* transport) = 0; // When Start is called, connects as soon as possible; this can be called // before DTLS completes, in which case the connection will begin when DTLS // completes. This method can be called multiple times, though not if either // of the ports are changed. // - // `local_sctp_port` and `remote_sctp_port` are passed along the wire and the - // listener and connector must be using the same port. They are not related - // to the ports at the IP level. If set to -1, we default to - // kSctpDefaultPort. - // `max_message_size_` sets the max message size on the connection. - // It must be smaller than or equal to kSctpSendBufferSize. - // It can be changed by a secons Start() call. - // + virtual bool Start(const SctpOptions& options) = 0; // TODO(deadbeef): Support calling Start with different local/remote ports // and create a new association? Not clear if this is something we need to // support though. See: https://github.com/w3c/webrtc-pc/issues/979 + [[deprecated("Call with SctpOptions")]] virtual bool Start(int local_sctp_port, int remote_sctp_port, - int max_message_size) = 0; + int max_message_size) { + return Start({ + .local_port = local_sctp_port, + .remote_port = remote_sctp_port, + .max_message_size = max_message_size, + }); + } // NOTE: Initially there was a "Stop" method here, but it was never used, so // it was removed. @@ -113,7 +112,7 @@ class SctpTransportInternal { // TODO(deadbeef): Actually implement the "returns false if `sid` can't be // used" part. See: // https://bugs.chromium.org/p/chromium/issues/detail?id=619849 - virtual bool OpenStream(int sid) = 0; + virtual bool OpenStream(int sid, PriorityValue priority) = 0; // The inverse of OpenStream. Begins the closing procedure, which will // eventually result in SignalClosingProcedureComplete on the side that // initiates it, and both SignalClosingProcedureStartedRemotely and @@ -122,9 +121,9 @@ class SctpTransportInternal { // Send data down this channel. // Returns RTCError::OK() if successful an error otherwise. Notably // RTCErrorType::RESOURCE_EXHAUSTED for blocked operations. - virtual webrtc::RTCError SendData(int sid, - const webrtc::SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload) = 0; + virtual RTCError SendData(int sid, + const SendDataParams& params, + const CopyOnWriteBuffer& payload) = 0; // Indicates when the SCTP socket is created and not blocked by congestion // control. This changes to false when SDR_BLOCK is returned from SendData, @@ -136,15 +135,33 @@ class SctpTransportInternal { // Returns the current max message size, set with Start(). virtual int max_message_size() const = 0; // Returns the current negotiated max # of outbound streams. - // Will return absl::nullopt if negotiation is incomplete. - virtual absl::optional max_outbound_streams() const = 0; + // Will return std::nullopt if negotiation is incomplete. + virtual std::optional max_outbound_streams() const = 0; // Returns the current negotiated max # of inbound streams. - virtual absl::optional max_inbound_streams() const = 0; + virtual std::optional max_inbound_streams() const = 0; + // Returns the amount of buffered data in the send queue for a stream. + virtual size_t buffered_amount(int sid) const = 0; + virtual size_t buffered_amount_low_threshold(int sid) const = 0; + virtual void SetBufferedAmountLowThreshold(int sid, size_t bytes) = 0; // Helper for debugging. virtual void set_debug_name_for_testing(const char* debug_name) = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::kMaxSctpSid; +using ::webrtc::kMaxSctpStreams; +using ::webrtc::kMinSctpSid; +using ::webrtc::kSctpDefaultPort; +using ::webrtc::kSpecMaxSctpSid; +using ::webrtc::SctpErrorCauseCode; +using ::webrtc::SctpTransportInternal; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // MEDIA_SCTP_SCTP_TRANSPORT_INTERNAL_H_ diff --git a/modules/BUILD.gn b/modules/BUILD.gn index 4870cb3499..cefed31d2b 100644 --- a/modules/BUILD.gn +++ b/modules/BUILD.gn @@ -30,7 +30,6 @@ group("modules") { rtc_source_set("module_api_public") { sources = [ "include/module_common_types_public.h" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("module_api") { @@ -44,21 +43,6 @@ rtc_source_set("module_fec_api") { } if (rtc_include_tests && !build_with_chromium) { - modules_tests_resources = [ - "../resources/audio_coding/testfile16kHz.pcm", - "../resources/audio_coding/testfile32kHz.pcm", - "../resources/audio_coding/teststereo32kHz.pcm", - "../resources/foreman_cif.yuv", - ] - - if (is_ios) { - bundle_data("modules_tests_bundle_data") { - testonly = true - sources = modules_tests_resources - outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] - } - } - rtc_test("modules_tests") { testonly = true @@ -75,7 +59,7 @@ if (rtc_include_tests && !build_with_chromium) { deps += [ "desktop_capture:desktop_capture_modules_tests" ] } - data = modules_tests_resources + data_deps = [ "../resources:modules_tests_data" ] if (is_android) { use_default_launcher = false @@ -84,129 +68,14 @@ if (rtc_include_tests && !build_with_chromium) { # rtc_test targets. Therefore we include this target here, instead of # in video_coding_modules_tests, where it is actually used. "../sdk/android:libjingle_peerconnection_java", - "//sdk/android:native_test_jni_onload", - "//testing/android/native_test:native_test_support", ] shard_timeout = 900 } if (is_ios) { - deps += [ ":modules_tests_bundle_data" ] + deps += [ "../resources:modules_tests_bundle_data" ] } } - - modules_unittests_resources = [ - "../resources/audio_coding/neteq_opus.rtp", - "../resources/audio_coding/neteq_opus_dtx.rtp", - "../resources/audio_coding/neteq_universal_new.rtp", - "../resources/audio_coding/speech_4_channels_48k_one_second.wav", - "../resources/audio_coding/speech_mono_16kHz.pcm", - "../resources/audio_coding/speech_mono_32_48kHz.pcm", - "../resources/audio_coding/testfile16kHz.pcm", - "../resources/audio_coding/testfile32kHz.pcm", - "../resources/audio_coding/testfile_fake_stereo_32kHz.pcm", - "../resources/audio_coding/teststereo32kHz.pcm", - "../resources/audio_device/audio_short16.pcm", - "../resources/audio_device/audio_short44.pcm", - "../resources/audio_device/audio_short48.pcm", - "../resources/audio_processing/agc/agc_audio.pcm", - "../resources/audio_processing/agc/agc_no_circular_buffer.dat", - "../resources/audio_processing/agc/agc_pitch_gain.dat", - "../resources/audio_processing/agc/agc_pitch_lag.dat", - "../resources/audio_processing/agc/agc_spectral_peak.dat", - "../resources/audio_processing/agc/agc_vad.dat", - "../resources/audio_processing/agc/agc_voicing_prob.dat", - "../resources/audio_processing/agc/agc_with_circular_buffer.dat", - "../resources/audio_processing/output_data_fixed.pb", - "../resources/audio_processing/output_data_float.pb", - "../resources/audio_processing/output_data_float_avx2.pb", - "../resources/audio_processing/output_data_mac.pb", - "../resources/audio_processing/transient/ajm-macbook-1-spke16m.pcm", - "../resources/audio_processing/transient/audio16kHz.pcm", - "../resources/audio_processing/transient/audio32kHz.pcm", - "../resources/audio_processing/transient/audio48kHz.pcm", - "../resources/audio_processing/transient/audio8kHz.pcm", - "../resources/audio_processing/transient/detect16kHz.dat", - "../resources/audio_processing/transient/detect32kHz.dat", - "../resources/audio_processing/transient/detect48kHz.dat", - "../resources/audio_processing/transient/detect8kHz.dat", - "../resources/audio_processing/transient/double-utils.dat", - "../resources/audio_processing/transient/float-utils.dat", - "../resources/audio_processing/transient/suppressed16kHz.pcm", - "../resources/audio_processing/transient/suppressed32kHz.pcm", - "../resources/audio_processing/transient/suppressed8kHz.pcm", - "../resources/audio_processing/transient/wpd0.dat", - "../resources/audio_processing/transient/wpd1.dat", - "../resources/audio_processing/transient/wpd2.dat", - "../resources/audio_processing/transient/wpd3.dat", - "../resources/audio_processing/transient/wpd4.dat", - "../resources/audio_processing/transient/wpd5.dat", - "../resources/audio_processing/transient/wpd6.dat", - "../resources/audio_processing/transient/wpd7.dat", - "../resources/deflicker_before_cif_short.yuv", - "../resources/far16_stereo.pcm", - "../resources/far176_stereo.pcm", - "../resources/far192_stereo.pcm", - "../resources/far22_stereo.pcm", - "../resources/far32_stereo.pcm", - "../resources/far44_stereo.pcm", - "../resources/far48_stereo.pcm", - "../resources/far88_stereo.pcm", - "../resources/far8_stereo.pcm", - "../resources/far96_stereo.pcm", - "../resources/foremanColorEnhanced_cif_short.yuv", - "../resources/foreman_cif.yuv", - "../resources/foreman_cif_short.yuv", - "../resources/near16_stereo.pcm", - "../resources/near176_stereo.pcm", - "../resources/near192_stereo.pcm", - "../resources/near22_stereo.pcm", - "../resources/near32_stereo.pcm", - "../resources/near44_stereo.pcm", - "../resources/near48_stereo.pcm", - "../resources/near88_stereo.pcm", - "../resources/near8_stereo.pcm", - "../resources/near96_stereo.pcm", - "../resources/ref03.aecdump", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_0_AST.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_0_TOF.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_1_AST.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_1_TOF.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_0_AST.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_0_TOF.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_1_AST.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_1_TOF.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingDelay1_0_AST.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingDelay1_0_TOF.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingLoss1_0_AST.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingLoss1_0_TOF.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_Multi1_1_AST.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_Multi1_1_TOF.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_0_AST.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_0_TOF.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_1_AST.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_1_TOF.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyDelay_0_AST.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyDelay_0_TOF.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyLoss_0_AST.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyLoss_0_TOF.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_UnlimitedSpeed_0_AST.bin", - "../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_UnlimitedSpeed_0_TOF.bin", - "../resources/short_mixed_mono_48.dat", - "../resources/short_mixed_mono_48.pcm", - "../resources/short_mixed_mono_48_arm.dat", - "../resources/short_mixed_stereo_48.dat", - "../resources/short_mixed_stereo_48.pcm", - "../resources/voice_engine/audio_tiny48.wav", - ] - if (is_ios) { - bundle_data("modules_unittests_bundle_data") { - testonly = true - sources = modules_unittests_resources - outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] - } - } - rtc_test("modules_unittests") { testonly = true defines = [] @@ -236,19 +105,16 @@ if (rtc_include_tests && !build_with_chromium) { deps += [ "desktop_capture:desktop_capture_unittests" ] } - data = modules_unittests_resources + data_deps = [ "../resources:modules_unittests_data" ] if (is_android) { use_default_launcher = false - deps += [ - "../sdk/android:libjingle_peerconnection_java", - "//testing/android/native_test:native_test_support", - ] + deps += [ "../sdk/android:libjingle_peerconnection_java" ] shard_timeout = 900 } if (is_ios) { info_plist = "../test/ios/Info.plist" - deps += [ ":modules_unittests_bundle_data" ] + deps += [ "../resources:modules_unittests_bundle_data" ] configs += [ "..:common_objc" ] ldflags = [ "-ObjC" ] } diff --git a/modules/async_audio_processing/BUILD.gn b/modules/async_audio_processing/BUILD.gn index 7a7ca20df1..0a8fc58520 100644 --- a/modules/async_audio_processing/BUILD.gn +++ b/modules/async_audio_processing/BUILD.gn @@ -24,7 +24,6 @@ rtc_library("async_audio_processing") { "../../api/task_queue:task_queue", "../../rtc_base:checks", "../../rtc_base:refcount", - "../../rtc_base:rtc_task_queue", ] } diff --git a/modules/async_audio_processing/async_audio_processing.cc b/modules/async_audio_processing/async_audio_processing.cc index 19c08dc3e5..d61b1264fc 100644 --- a/modules/async_audio_processing/async_audio_processing.cc +++ b/modules/async_audio_processing/async_audio_processing.cc @@ -63,7 +63,7 @@ AsyncAudioProcessing::AsyncAudioProcessing( "AsyncAudioProcessing", TaskQueueFactory::Priority::NORMAL)) { frame_processor_.SetSink([this](std::unique_ptr frame) { - task_queue_.PostTask([this, frame = std::move(frame)]() mutable { + task_queue_->PostTask([this, frame = std::move(frame)]() mutable { on_frame_processed_callback_(std::move(frame)); }); }); @@ -80,7 +80,7 @@ AsyncAudioProcessing::AsyncAudioProcessing( "AsyncAudioProcessing", TaskQueueFactory::Priority::NORMAL)) { owned_frame_processor_->SetSink([this](std::unique_ptr frame) { - task_queue_.PostTask([this, frame = std::move(frame)]() mutable { + task_queue_->PostTask([this, frame = std::move(frame)]() mutable { on_frame_processed_callback_(std::move(frame)); }); }); @@ -88,11 +88,11 @@ AsyncAudioProcessing::AsyncAudioProcessing( void AsyncAudioProcessing::Process(std::unique_ptr frame) { if (owned_frame_processor_) { - task_queue_.PostTask([this, frame = std::move(frame)]() mutable { + task_queue_->PostTask([this, frame = std::move(frame)]() mutable { owned_frame_processor_->Process(std::move(frame)); }); } else { - task_queue_.PostTask([this, frame = std::move(frame)]() mutable { + task_queue_->PostTask([this, frame = std::move(frame)]() mutable { frame_processor_.Process(std::move(frame)); }); } diff --git a/modules/async_audio_processing/async_audio_processing.h b/modules/async_audio_processing/async_audio_processing.h index f3ed96959b..c5a28cf54a 100644 --- a/modules/async_audio_processing/async_audio_processing.h +++ b/modules/async_audio_processing/async_audio_processing.h @@ -14,8 +14,8 @@ #include #include "api/audio/audio_frame_processor.h" +#include "api/task_queue/task_queue_base.h" #include "rtc_base/ref_count.h" -#include "rtc_base/task_queue.h" namespace webrtc { @@ -30,7 +30,7 @@ class AsyncAudioProcessing final { public: // Helper class passing AudioFrameProcessor and TaskQueueFactory into // AsyncAudioProcessing constructor. - class Factory : public rtc::RefCountInterface { + class Factory : public RefCountInterface { public: Factory(const Factory&) = delete; Factory& operator=(const Factory&) = delete; @@ -101,7 +101,7 @@ class AsyncAudioProcessing final { // called. AudioFrameProcessor& frame_processor_; std::unique_ptr owned_frame_processor_; - rtc::TaskQueue task_queue_; + std::unique_ptr task_queue_; }; } // namespace webrtc diff --git a/modules/audio_coding/BUILD.gn b/modules/audio_coding/BUILD.gn index 8f5019b4c2..6a13b89c4e 100644 --- a/modules/audio_coding/BUILD.gn +++ b/modules/audio_coding/BUILD.gn @@ -22,8 +22,6 @@ rtc_source_set("audio_coding_module_typedefs") { rtc_library("audio_coding") { visibility += [ "*" ] sources = [ - "acm2/acm_receiver.cc", - "acm2/acm_receiver.h", "acm2/acm_remixing.cc", "acm2/acm_remixing.h", "acm2/acm_resampler.cc", @@ -38,7 +36,6 @@ rtc_library("audio_coding") { deps = [ ":audio_coding_module_typedefs", - ":default_neteq_factory", ":neteq", "..:module_api", "..:module_api_public", @@ -46,10 +43,12 @@ rtc_library("audio_coding") { "../../api:function_view", "../../api/audio:audio_frame_api", "../../api/audio_codecs:audio_codecs_api", + "../../api/environment", + "../../api/neteq:default_neteq_factory", "../../api/neteq:neteq_api", + "../../api/units:timestamp", "../../common_audio", "../../common_audio:common_audio_c", - "../../rtc_base:audio_format_to_string", "../../rtc_base:buffer", "../../rtc_base:checks", "../../rtc_base:logging", @@ -58,10 +57,8 @@ rtc_library("audio_coding") { "../../rtc_base/synchronization:mutex", "../../system_wrappers", "../../system_wrappers:metrics", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -76,7 +73,6 @@ rtc_library("legacy_encoded_audio_frame") { "../../rtc_base:buffer", "../../rtc_base:checks", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("webrtc_cng") { @@ -109,7 +105,6 @@ rtc_library("audio_encoder_cng") { "../../common_audio", "../../rtc_base:checks", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("red") { @@ -129,10 +124,7 @@ rtc_library("red") { "../../rtc_base:byte_order", "../../rtc_base:checks", "../../rtc_base:logging", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -154,7 +146,6 @@ rtc_library("g711") { "../../rtc_base:buffer", "../../rtc_base:checks", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] public_deps += [ ":g711_c" ] # no-presubmit-check TODO(webrtc:8603) } @@ -187,7 +178,6 @@ rtc_library("g722") { "../../rtc_base:checks", "../../rtc_base:safe_conversions", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] public_deps += [ ":g722_c" ] # no-presubmit-check TODO(webrtc:8603) } @@ -200,187 +190,6 @@ rtc_library("g722_c") { deps = [ "../third_party/g722:g722_3p" ] } -rtc_library("ilbc") { - visibility += webrtc_default_visibility - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/ilbc/audio_decoder_ilbc.cc", - "codecs/ilbc/audio_decoder_ilbc.h", - "codecs/ilbc/audio_encoder_ilbc.cc", - "codecs/ilbc/audio_encoder_ilbc.h", - ] - - deps = [ - ":legacy_encoded_audio_frame", - "../../api:array_view", - "../../api/audio_codecs:audio_codecs_api", - "../../api/audio_codecs/ilbc:audio_encoder_ilbc_config", - "../../api/units:time_delta", - "../../common_audio", - "../../rtc_base:buffer", - "../../rtc_base:checks", - "../../rtc_base:logging", - "../../rtc_base:safe_conversions", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - public_deps += [ ":ilbc_c" ] # no-presubmit-check TODO(webrtc:8603) -} - -rtc_library("ilbc_c") { - poisonous = [ "audio_codecs" ] - sources = [ - "codecs/ilbc/abs_quant.c", - "codecs/ilbc/abs_quant.h", - "codecs/ilbc/abs_quant_loop.c", - "codecs/ilbc/abs_quant_loop.h", - "codecs/ilbc/augmented_cb_corr.c", - "codecs/ilbc/augmented_cb_corr.h", - "codecs/ilbc/bw_expand.c", - "codecs/ilbc/bw_expand.h", - "codecs/ilbc/cb_construct.c", - "codecs/ilbc/cb_construct.h", - "codecs/ilbc/cb_mem_energy.c", - "codecs/ilbc/cb_mem_energy.h", - "codecs/ilbc/cb_mem_energy_augmentation.c", - "codecs/ilbc/cb_mem_energy_augmentation.h", - "codecs/ilbc/cb_mem_energy_calc.c", - "codecs/ilbc/cb_mem_energy_calc.h", - "codecs/ilbc/cb_search.c", - "codecs/ilbc/cb_search.h", - "codecs/ilbc/cb_search_core.c", - "codecs/ilbc/cb_search_core.h", - "codecs/ilbc/cb_update_best_index.c", - "codecs/ilbc/cb_update_best_index.h", - "codecs/ilbc/chebyshev.c", - "codecs/ilbc/chebyshev.h", - "codecs/ilbc/comp_corr.c", - "codecs/ilbc/comp_corr.h", - "codecs/ilbc/constants.c", - "codecs/ilbc/constants.h", - "codecs/ilbc/create_augmented_vec.c", - "codecs/ilbc/create_augmented_vec.h", - "codecs/ilbc/decode.c", - "codecs/ilbc/decode.h", - "codecs/ilbc/decode_residual.c", - "codecs/ilbc/decode_residual.h", - "codecs/ilbc/decoder_interpolate_lsf.c", - "codecs/ilbc/decoder_interpolate_lsf.h", - "codecs/ilbc/defines.h", - "codecs/ilbc/do_plc.c", - "codecs/ilbc/do_plc.h", - "codecs/ilbc/encode.c", - "codecs/ilbc/encode.h", - "codecs/ilbc/energy_inverse.c", - "codecs/ilbc/energy_inverse.h", - "codecs/ilbc/enh_upsample.c", - "codecs/ilbc/enh_upsample.h", - "codecs/ilbc/enhancer.c", - "codecs/ilbc/enhancer.h", - "codecs/ilbc/enhancer_interface.c", - "codecs/ilbc/enhancer_interface.h", - "codecs/ilbc/filtered_cb_vecs.c", - "codecs/ilbc/filtered_cb_vecs.h", - "codecs/ilbc/frame_classify.c", - "codecs/ilbc/frame_classify.h", - "codecs/ilbc/gain_dequant.c", - "codecs/ilbc/gain_dequant.h", - "codecs/ilbc/gain_quant.c", - "codecs/ilbc/gain_quant.h", - "codecs/ilbc/get_cd_vec.c", - "codecs/ilbc/get_cd_vec.h", - "codecs/ilbc/get_lsp_poly.c", - "codecs/ilbc/get_lsp_poly.h", - "codecs/ilbc/get_sync_seq.c", - "codecs/ilbc/get_sync_seq.h", - "codecs/ilbc/hp_input.c", - "codecs/ilbc/hp_input.h", - "codecs/ilbc/hp_output.c", - "codecs/ilbc/hp_output.h", - "codecs/ilbc/ilbc.c", - "codecs/ilbc/ilbc.h", - "codecs/ilbc/index_conv_dec.c", - "codecs/ilbc/index_conv_dec.h", - "codecs/ilbc/index_conv_enc.c", - "codecs/ilbc/index_conv_enc.h", - "codecs/ilbc/init_decode.c", - "codecs/ilbc/init_decode.h", - "codecs/ilbc/init_encode.c", - "codecs/ilbc/init_encode.h", - "codecs/ilbc/interpolate.c", - "codecs/ilbc/interpolate.h", - "codecs/ilbc/interpolate_samples.c", - "codecs/ilbc/interpolate_samples.h", - "codecs/ilbc/lpc_encode.c", - "codecs/ilbc/lpc_encode.h", - "codecs/ilbc/lsf_check.c", - "codecs/ilbc/lsf_check.h", - "codecs/ilbc/lsf_interpolate_to_poly_dec.c", - "codecs/ilbc/lsf_interpolate_to_poly_dec.h", - "codecs/ilbc/lsf_interpolate_to_poly_enc.c", - "codecs/ilbc/lsf_interpolate_to_poly_enc.h", - "codecs/ilbc/lsf_to_lsp.c", - "codecs/ilbc/lsf_to_lsp.h", - "codecs/ilbc/lsf_to_poly.c", - "codecs/ilbc/lsf_to_poly.h", - "codecs/ilbc/lsp_to_lsf.c", - "codecs/ilbc/lsp_to_lsf.h", - "codecs/ilbc/my_corr.c", - "codecs/ilbc/my_corr.h", - "codecs/ilbc/nearest_neighbor.c", - "codecs/ilbc/nearest_neighbor.h", - "codecs/ilbc/pack_bits.c", - "codecs/ilbc/pack_bits.h", - "codecs/ilbc/poly_to_lsf.c", - "codecs/ilbc/poly_to_lsf.h", - "codecs/ilbc/poly_to_lsp.c", - "codecs/ilbc/poly_to_lsp.h", - "codecs/ilbc/refiner.c", - "codecs/ilbc/refiner.h", - "codecs/ilbc/simple_interpolate_lsf.c", - "codecs/ilbc/simple_interpolate_lsf.h", - "codecs/ilbc/simple_lpc_analysis.c", - "codecs/ilbc/simple_lpc_analysis.h", - "codecs/ilbc/simple_lsf_dequant.c", - "codecs/ilbc/simple_lsf_dequant.h", - "codecs/ilbc/simple_lsf_quant.c", - "codecs/ilbc/simple_lsf_quant.h", - "codecs/ilbc/smooth.c", - "codecs/ilbc/smooth.h", - "codecs/ilbc/smooth_out_data.c", - "codecs/ilbc/smooth_out_data.h", - "codecs/ilbc/sort_sq.c", - "codecs/ilbc/sort_sq.h", - "codecs/ilbc/split_vq.c", - "codecs/ilbc/split_vq.h", - "codecs/ilbc/state_construct.c", - "codecs/ilbc/state_construct.h", - "codecs/ilbc/state_search.c", - "codecs/ilbc/state_search.h", - "codecs/ilbc/swap_bytes.c", - "codecs/ilbc/swap_bytes.h", - "codecs/ilbc/unpack_bits.c", - "codecs/ilbc/unpack_bits.h", - "codecs/ilbc/vq3.c", - "codecs/ilbc/vq3.h", - "codecs/ilbc/vq4.c", - "codecs/ilbc/vq4.h", - "codecs/ilbc/window32_w32.c", - "codecs/ilbc/window32_w32.h", - "codecs/ilbc/xcorr_coef.c", - "codecs/ilbc/xcorr_coef.h", - ] - - deps = [ - "../../api/audio_codecs:audio_codecs_api", - "../../common_audio", - "../../common_audio:common_audio_c", - "../../rtc_base:checks", - "../../rtc_base:sanitizer", - "../../rtc_base/system:arch", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] -} - rtc_library("isac_vad") { visibility += [ "../audio_processing/vad:*" ] sources = [ @@ -452,10 +261,7 @@ rtc_library("audio_coding_opus_common") { "../../api/audio_codecs:audio_codecs_api", "../../rtc_base:checks", "../../rtc_base:stringutils", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -473,8 +279,10 @@ rtc_library("webrtc_opus") { ":audio_coding_opus_common", ":audio_network_adaptor", "../../api:array_view", + "../../api:field_trials_view", "../../api/audio_codecs:audio_codecs_api", "../../api/audio_codecs/opus:audio_encoder_opus_config", + "../../api/environment", "../../common_audio", "../../rtc_base:buffer", "../../rtc_base:checks", @@ -486,11 +294,9 @@ rtc_library("webrtc_opus") { "../../rtc_base:safe_minmax", "../../rtc_base:stringutils", "../../rtc_base:timeutils", - "../../system_wrappers:field_trial", - ] - absl_deps = [ + "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] public_deps += # no-presubmit-check TODO(webrtc:8603) [ ":webrtc_opus_wrapper" ] @@ -510,6 +316,7 @@ rtc_library("webrtc_multiopus") { deps = [ ":audio_coding_opus_common", + "../../api:array_view", "../../api/audio_codecs:audio_codecs_api", "../../api/audio_codecs/opus:audio_decoder_opus_config", "../../api/audio_codecs/opus:audio_encoder_opus_config", @@ -518,13 +325,11 @@ rtc_library("webrtc_multiopus") { "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:macromagic", + "../../rtc_base:safe_conversions", "../../rtc_base:safe_minmax", "../../rtc_base:stringutils", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] public_deps += # no-presubmit-check TODO(webrtc:8603) [ ":webrtc_opus_wrapper" ] @@ -546,7 +351,6 @@ rtc_library("webrtc_opus_wrapper") { "../../api:array_view", "../../rtc_base:checks", "../../rtc_base:ignore_wundef", - "../../system_wrappers:field_trial", ] if (rtc_build_opus) { @@ -561,7 +365,7 @@ if (rtc_enable_protobuf) { proto_library("ana_debug_dump_proto") { visibility += webrtc_default_visibility sources = [ "audio_network_adaptor/debug_dump.proto" ] - link_deps = [ ":ana_config_proto" ] + deps = [ ":ana_config_proto" ] proto_out_dir = "modules/audio_coding/audio_network_adaptor" } proto_library("ana_config_proto") { @@ -577,7 +381,6 @@ rtc_library("audio_network_adaptor_config") { "audio_network_adaptor/audio_network_adaptor_config.cc", "audio_network_adaptor/include/audio_network_adaptor_config.h", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("audio_network_adaptor") { @@ -618,7 +421,6 @@ rtc_library("audio_network_adaptor") { "../../common_audio", "../../logging:rtc_event_audio", "../../rtc_base:checks", - "../../rtc_base:ignore_wundef", "../../rtc_base:logging", "../../rtc_base:protobuf_utils", "../../rtc_base:safe_conversions", @@ -626,11 +428,8 @@ rtc_library("audio_network_adaptor") { "../../rtc_base/system:file_wrapper", "../../system_wrappers", "../../system_wrappers:field_trial", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (rtc_enable_protobuf) { @@ -662,6 +461,8 @@ rtc_library("neteq") { "neteq/decision_logic.h", "neteq/decoder_database.cc", "neteq/decoder_database.h", + "neteq/delay_constraints.cc", + "neteq/delay_constraints.h", "neteq/delay_manager.cc", "neteq/delay_manager.h", "neteq/dsp_helper.cc", @@ -690,8 +491,6 @@ rtc_library("neteq") { "neteq/packet_arrival_history.h", "neteq/packet_buffer.cc", "neteq/packet_buffer.h", - "neteq/post_decode_vad.cc", - "neteq/post_decode_vad.h", "neteq/preemptive_expand.cc", "neteq/preemptive_expand.h", "neteq/random_vector.cc", @@ -717,17 +516,21 @@ rtc_library("neteq") { ":webrtc_cng", "..:module_api_public", "../../api:array_view", + "../../api:field_trials_view", "../../api:rtp_headers", "../../api:rtp_packet_info", "../../api:scoped_refptr", "../../api/audio:audio_frame_api", "../../api/audio_codecs:audio_codecs_api", + "../../api/environment", "../../api/neteq:neteq_api", "../../api/neteq:neteq_controller_api", "../../api/neteq:tick_timer", + "../../api/units:time_delta", + "../../api/units:timestamp", "../../common_audio", "../../common_audio:common_audio_c", - "../../rtc_base:audio_format_to_string", + "../../common_audio:common_audio_cc", "../../rtc_base:buffer", "../../rtc_base:checks", "../../rtc_base:event_tracer", @@ -741,28 +544,9 @@ rtc_library("neteq") { "../../rtc_base/experiments:field_trial_parser", "../../rtc_base/synchronization:mutex", "../../system_wrappers", - "../../system_wrappers:field_trial", "../../system_wrappers:metrics", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_source_set("default_neteq_factory") { - visibility += webrtc_default_visibility - sources = [ - "neteq/default_neteq_factory.cc", - "neteq/default_neteq_factory.h", - ] - deps = [ - ":neteq", - "../../api:scoped_refptr", - "../../api/audio_codecs:audio_codecs_api", - "../../api/neteq:default_neteq_controller_factory", - "../../api/neteq:neteq_api", - "../../system_wrappers:system_wrappers", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -788,16 +572,20 @@ rtc_library("neteq_tools_minimal") { ] deps = [ - ":default_neteq_factory", ":neteq", "../../api:array_view", + "../../api:field_trials", "../../api:neteq_simulator_api", "../../api:rtp_headers", "../../api/audio:audio_frame_api", "../../api/audio_codecs:audio_codecs_api", + "../../api/environment", + "../../api/environment:environment_factory", "../../api/neteq:custom_neteq_factory", "../../api/neteq:default_neteq_controller_factory", + "../../api/neteq:default_neteq_factory", "../../api/neteq:neteq_api", + "../../api/units:timestamp", "../../rtc_base:buffer", "../../rtc_base:checks", "../../rtc_base:copy_on_write_buffer", @@ -806,7 +594,6 @@ rtc_library("neteq_tools_minimal") { "../../system_wrappers", "../rtp_rtcp:rtp_rtcp_format", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] defines = audio_codec_defines } @@ -840,15 +627,13 @@ rtc_library("neteq_test_tools") { "../../common_audio", "../../rtc_base:buffer", "../../rtc_base:checks", - "../../rtc_base:ssl", + "../../rtc_base:copy_on_write_buffer", + "../../rtc_base:digest", "../../rtc_base:stringutils", "../../rtc_base/system:arch", "../../test:rtp_test_utils", "../rtp_rtcp:rtp_rtcp_format", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -880,10 +665,7 @@ rtc_library("neteq_tools") { "../../rtc_base:timeutils", "../rtp_rtcp", "../rtp_rtcp:rtp_rtcp_format", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (rtc_enable_protobuf) { @@ -909,8 +691,8 @@ rtc_library("neteq_input_audio_tools") { deps = [ "../../common_audio", "../../rtc_base:checks", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } if (rtc_enable_protobuf) { @@ -946,7 +728,6 @@ rtc_library("audio_coding_modules_tests_shared") { deps = [ ":audio_coding", ":audio_coding_module_typedefs", - ":default_neteq_factory", ":neteq_test_tools", ":neteq_tools_minimal", ":webrtc_opus_wrapper", @@ -955,19 +736,20 @@ rtc_library("audio_coding_modules_tests_shared") { "../../api/audio:audio_frame_api", "../../api/audio_codecs:builtin_audio_decoder_factory", "../../api/audio_codecs:builtin_audio_encoder_factory", + "../../api/environment", + "../../api/environment:environment_factory", + "../../api/neteq:default_neteq_factory", "../../api/neteq:neteq_api", + "../../api/units:timestamp", "../../rtc_base:checks", - "../../rtc_base:ignore_wundef", - "../../rtc_base:ssl", + "../../rtc_base:digest", "../../rtc_base:stringutils", "../../system_wrappers", "../../test:fileutils", "../../test:test_support", "//testing/gtest", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] defines = audio_coding_defines @@ -986,9 +768,6 @@ if (rtc_include_tests) { "../../common_audio", "../../system_wrappers", ] - if (rtc_include_ilbc) { - audio_coding_deps += [ ":ilbc" ] - } if (rtc_include_opus) { audio_coding_deps += [ ":webrtc_opus" ] } @@ -1022,8 +801,6 @@ if (rtc_include_tests) { ":audio_decoder_unittests", ":g711_test", ":g722_test", - ":ilbc_test", - ":neteq_ilbc_quality_test", ":neteq_opus_quality_test", ":neteq_pcm16b_quality_test", ":neteq_pcmu_quality_test", @@ -1083,10 +860,13 @@ if (rtc_include_tests) { "../../api/audio_codecs/g711:audio_encoder_g711", "../../api/audio_codecs/g722:audio_decoder_g722", "../../api/audio_codecs/g722:audio_encoder_g722", - "../../api/audio_codecs/ilbc:audio_decoder_ilbc", - "../../api/audio_codecs/ilbc:audio_encoder_ilbc", "../../api/audio_codecs/opus:audio_decoder_opus", "../../api/audio_codecs/opus:audio_encoder_opus", + "../../api/environment", + "../../api/environment:environment_factory", + "../../api/neteq:default_neteq_factory", + "../../api/neteq:neteq_api", + "../../api/units:timestamp", "../../common_audio", "../../rtc_base:checks", "../../rtc_base:logging", @@ -1097,10 +877,8 @@ if (rtc_include_tests) { "../../test:fileutils", "../../test:scoped_key_value_config", "../../test:test_support", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] defines = audio_coding_defines } @@ -1117,14 +895,16 @@ if (rtc_include_tests) { ":neteq_test_support", ":neteq_test_tools", "../../api/audio_codecs/opus:audio_encoder_opus", + "../../api/environment:environment_factory", "../../api/test/metrics:global_metrics_logger_and_exporter", "../../api/test/metrics:metric", "../../rtc_base:macromagic", "../../rtc_base:timeutils", "../../system_wrappers", - "../../system_wrappers:field_trial", "../../test:fileutils", + "../../test:test_flags", "../../test:test_support", + "//third_party/abseil-cpp/absl/flags:flag", ] } @@ -1144,12 +924,14 @@ if (rtc_include_tests) { "../../api:scoped_refptr", "../../api/audio_codecs:audio_codecs_api", "../../api/audio_codecs:builtin_audio_decoder_factory", + "../../api/environment:environment_factory", + "../../api/neteq:default_neteq_factory", + "../../api/neteq:neteq_api", "../../test:test_support", "//testing/gtest", ] deps += audio_coding_deps - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("acm_send_test") { @@ -1170,13 +952,16 @@ if (rtc_include_tests) { "../../api/audio_codecs:audio_codecs_api", "../../api/audio_codecs:builtin_audio_decoder_factory", "../../api/audio_codecs:builtin_audio_encoder_factory", + "../../api/environment", + "../../api/environment:environment_factory", "../../rtc_base:checks", "../../rtc_base:stringutils", "../../test:test_support", "//testing/gtest", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] deps += audio_coding_deps - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } if (!build_with_chromium) { @@ -1198,14 +983,15 @@ if (rtc_include_tests) { defines = neteq_defines deps = [ - ":ilbc", ":neteq", ":neteq_input_audio_tools", ":neteq_tools", "../../api/audio_codecs:audio_codecs_api", "../../api/audio_codecs/opus:audio_encoder_opus", + "../../api/environment:environment_factory", "../../common_audio", "../../rtc_base/system:arch", + "../../test:explicit_key_value_config", "../../test:fileutils", "../../test:test_main", "../../test:test_support", @@ -1216,11 +1002,7 @@ if (rtc_include_tests) { if (is_android) { use_default_launcher = false - deps += [ - "//build/android/gtest_apk:native_test_instrumentation_test_runner_java", - "//testing/android/native_test:native_test_java", - "//testing/android/native_test:native_test_support", - ] + deps += [ "//build/android/gtest_apk:native_test_instrumentation_test_runner_java" ] shard_timeout = 900 } if (is_ios) { @@ -1235,31 +1017,32 @@ if (rtc_include_tests) { visibility += webrtc_default_visibility defines = audio_codec_defines deps = [ + ":neteq", ":neteq_input_audio_tools", + ":neteq_test_tools", ":neteq_tools", ":neteq_tools_minimal", + "../../api:make_ref_counted", + "../../api:rtp_headers", + "../../api:scoped_refptr", + "../../api/audio_codecs:audio_codecs_api", + "../../api/audio_codecs:builtin_audio_decoder_factory", + "../../api/environment:environment", + "../../api/neteq:neteq_api", + "../../logging:rtc_event_log_parser", "../../rtc_base:checks", "../../rtc_base:refcount", + "../../test:audio_test_common", + "../../test:field_trial", "../../test:fileutils", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "../../test:test_support", + "../rtp_rtcp:rtp_rtcp_format", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "neteq/tools/neteq_test_factory.cc", "neteq/tools/neteq_test_factory.h", ] - - deps += [ - ":neteq", - ":neteq_test_tools", - "../../api/audio_codecs:builtin_audio_decoder_factory", - "../../api/neteq:neteq_api", - "../../test:audio_test_common", - "../../test:field_trial", - "../../test:test_support", - ] } } @@ -1272,13 +1055,13 @@ if (rtc_include_tests) { ":neteq_test_factory", ":neteq_test_tools", ":neteq_tools_minimal", + "../../rtc_base:checks", "../../rtc_base:stringutils", "../../system_wrappers:field_trial", "../../test:field_trial", "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] sources = [ "neteq/tools/neteq_rtpplay.cc" ] } @@ -1316,11 +1099,7 @@ if (rtc_include_tests) { if (is_android) { use_default_launcher = false - deps += [ - "//build/android/gtest_apk:native_test_instrumentation_test_runner_java", - "//testing/android/native_test:native_test_java", - "//testing/android/native_test:native_test_support", - ] + deps += [ "//build/android/gtest_apk:native_test_instrumentation_test_runner_java" ] shard_timeout = 900 } @@ -1347,13 +1126,15 @@ if (rtc_include_tests) { ] deps = [ - ":default_neteq_factory", ":neteq", ":neteq_test_tools", ":pcm16b", "../../api/audio:audio_frame_api", "../../api/audio_codecs:audio_codecs_api", "../../api/audio_codecs:builtin_audio_decoder_factory", + "../../api/environment", + "../../api/environment:environment_factory", + "../../api/neteq:default_neteq_factory", "../../api/neteq:neteq_api", "../../rtc_base:checks", "../../system_wrappers", @@ -1372,23 +1153,23 @@ if (rtc_include_tests) { ] deps = [ - ":default_neteq_factory", ":neteq", ":neteq_input_audio_tools", ":neteq_test_tools", ":neteq_tools_minimal", "../../api/audio_codecs:builtin_audio_decoder_factory", + "../../api/environment:environment_factory", + "../../api/neteq:default_neteq_factory", "../../api/neteq:neteq_api", + "../../api/units:timestamp", "../../rtc_base:checks", "../../rtc_base:stringutils", "../../system_wrappers", "../../test:fileutils", "../../test:test_support", "//testing/gtest", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -1397,18 +1178,20 @@ if (rtc_include_tests) { deps = [ ":audio_coding", + ":audio_coding_module_typedefs", ":audio_encoder_cng", ":neteq_input_audio_tools", "../../api/audio:audio_frame_api", "../../api/audio_codecs/L16:audio_encoder_L16", "../../api/audio_codecs/g711:audio_encoder_g711", "../../api/audio_codecs/g722:audio_encoder_g722", - "../../api/audio_codecs/ilbc:audio_encoder_ilbc", "../../api/audio_codecs/opus:audio_encoder_opus", + "../../api/environment:environment_factory", + "../../rtc_base:checks", + "../../rtc_base:ip_address", "../../rtc_base:safe_conversions", "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/memory", ] deps += audio_coding_deps @@ -1493,25 +1276,6 @@ if (rtc_include_tests) { ] } - rtc_executable("neteq_ilbc_quality_test") { - testonly = true - - sources = [ "neteq/test/neteq_ilbc_quality_test.cc" ] - - deps = [ - ":ilbc", - ":neteq", - ":neteq_quality_test_support", - ":neteq_tools", - "../../rtc_base:checks", - "../../rtc_base:safe_conversions", - "../../test:fileutils", - "../../test:test_main", - "//testing/gtest", - "//third_party/abseil-cpp/absl/flags:flag", - ] - } - rtc_executable("neteq_pcmu_quality_test") { testonly = true @@ -1566,14 +1330,6 @@ if (rtc_include_tests) { } if (!build_with_chromium) { - rtc_executable("ilbc_test") { - testonly = true - - sources = [ "codecs/ilbc/test/iLBC_test.c" ] - - deps = [ ":ilbc" ] - } - rtc_executable("webrtc_opus_fec_test") { testonly = true @@ -1595,7 +1351,6 @@ if (rtc_include_tests) { visibility += webrtc_default_visibility sources = [ - "acm2/acm_receiver_unittest.cc", "acm2/acm_remixing_unittest.cc", "acm2/audio_coding_module_unittest.cc", "acm2/call_statistics_unittest.cc", @@ -1613,9 +1368,9 @@ if (rtc_include_tests) { "codecs/builtin_audio_encoder_factory_unittest.cc", "codecs/cng/audio_encoder_cng_unittest.cc", "codecs/cng/cng_unittest.cc", - "codecs/ilbc/ilbc_unittest.cc", "codecs/legacy_encoded_audio_frame_unittest.cc", "codecs/opus/audio_decoder_multi_channel_opus_unittest.cc", + "codecs/opus/audio_decoder_opus_unittest.cc", "codecs/opus/audio_encoder_multi_channel_opus_unittest.cc", "codecs/opus/audio_encoder_opus_unittest.cc", "codecs/opus/opus_bandwidth_unittest.cc", @@ -1628,6 +1383,7 @@ if (rtc_include_tests) { "neteq/comfort_noise_unittest.cc", "neteq/decision_logic_unittest.cc", "neteq/decoder_database_unittest.cc", + "neteq/delay_constraints_unittest.cc", "neteq/delay_manager_unittest.cc", "neteq/dsp_helper_unittest.cc", "neteq/dtmf_buffer_unittest.cc", @@ -1643,6 +1399,7 @@ if (rtc_include_tests) { "neteq/mock/mock_expand.h", "neteq/mock/mock_histogram.h", "neteq/mock/mock_neteq_controller.h", + "neteq/mock/mock_packet_arrival_history.h", "neteq/mock/mock_packet_buffer.h", "neteq/mock/mock_red_payload_splitter.h", "neteq/mock/mock_statistics_calculator.h", @@ -1655,7 +1412,6 @@ if (rtc_include_tests) { "neteq/normal_unittest.cc", "neteq/packet_arrival_history_unittest.cc", "neteq/packet_buffer_unittest.cc", - "neteq/post_decode_vad_unittest.cc", "neteq/random_vector_unittest.cc", "neteq/red_payload_splitter_unittest.cc", "neteq/reorder_optimizer_unittest.cc", @@ -1677,9 +1433,7 @@ if (rtc_include_tests) { ":audio_coding_opus_common", ":audio_encoder_cng", ":audio_network_adaptor", - ":default_neteq_factory", ":g711", - ":ilbc", ":legacy_encoded_audio_frame", ":mocks", ":neteq", @@ -1695,6 +1449,8 @@ if (rtc_include_tests) { "..:module_api", "..:module_api_public", "../../api:array_view", + "../../api:make_ref_counted", + "../../api:scoped_refptr", "../../api/audio:audio_frame_api", "../../api/audio_codecs:audio_codecs_api", "../../api/audio_codecs:builtin_audio_decoder_factory", @@ -1703,28 +1459,35 @@ if (rtc_include_tests) { "../../api/audio_codecs/opus:audio_decoder_opus", "../../api/audio_codecs/opus:audio_encoder_multiopus", "../../api/audio_codecs/opus:audio_encoder_opus", + "../../api/audio_codecs/opus:audio_encoder_opus_config", + "../../api/environment", + "../../api/environment:environment_factory", "../../api/neteq:default_neteq_controller_factory", + "../../api/neteq:default_neteq_factory", "../../api/neteq:neteq_api", "../../api/neteq:neteq_controller_api", "../../api/neteq:tick_timer", "../../api/neteq:tick_timer_unittest", "../../api/rtc_event_log", + "../../api/units:time_delta", + "../../api/units:timestamp", "../../common_audio", "../../common_audio:common_audio_c", "../../common_audio:mock_common_audio", "../../logging:mocks", "../../logging:rtc_event_audio", "../../modules/rtp_rtcp:rtp_rtcp_format", + "../../rtc_base:buffer", "../../rtc_base:checks", - "../../rtc_base:ignore_wundef", + "../../rtc_base:digest", "../../rtc_base:macromagic", "../../rtc_base:platform_thread", + "../../rtc_base:random", "../../rtc_base:refcount", "../../rtc_base:rtc_base_tests_utils", "../../rtc_base:rtc_event", "../../rtc_base:safe_conversions", "../../rtc_base:sanitizer", - "../../rtc_base:ssl", "../../rtc_base:stringutils", "../../rtc_base:timeutils", "../../rtc_base/synchronization:mutex", @@ -1732,6 +1495,7 @@ if (rtc_include_tests) { "../../system_wrappers", "../../test:audio_codec_mocks", "../../test:audio_test_common", + "../../test:explicit_key_value_config", "../../test:field_trial", "../../test:fileutils", "../../test:rtc_expect_death", @@ -1741,13 +1505,11 @@ if (rtc_include_tests) { "../../test:test_support", "codecs/opus/test", "codecs/opus/test:test_unittest", + "//testing/gmock", "//testing/gtest", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] defines = audio_coding_defines diff --git a/modules/audio_coding/acm2/acm_receive_test.cc b/modules/audio_coding/acm2/acm_receive_test.cc index 66f6255b01..bf1fa7f1fe 100644 --- a/modules/audio_coding/acm2/acm_receive_test.cc +++ b/modules/audio_coding/acm2/acm_receive_test.cc @@ -15,6 +15,8 @@ #include #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/environment/environment_factory.h" +#include "api/neteq/default_neteq_factory.h" #include "modules/audio_coding/include/audio_coding_module.h" #include "modules/audio_coding/neteq/tools/audio_sink.h" #include "modules/audio_coding/neteq/tools/packet.h" @@ -24,26 +26,16 @@ namespace webrtc { namespace test { -namespace { -acm2::AcmReceiver::Config MakeAcmConfig( - Clock& clock, - rtc::scoped_refptr decoder_factory) { - acm2::AcmReceiver::Config config; - config.clock = clock; - config.decoder_factory = std::move(decoder_factory); - return config; -} -} // namespace - AcmReceiveTestOldApi::AcmReceiveTestOldApi( PacketSource* packet_source, AudioSink* audio_sink, int output_freq_hz, NumOutputChannels exptected_output_channels, - rtc::scoped_refptr decoder_factory) + scoped_refptr decoder_factory) : clock_(0), - acm_receiver_(std::make_unique( - MakeAcmConfig(clock_, std::move(decoder_factory)))), + neteq_(DefaultNetEqFactory().Create(CreateEnvironment(&clock_), + NetEq::Config(), + std::move(decoder_factory))), packet_source_(packet_source), audio_sink_(audio_sink), output_freq_hz_(output_freq_hz), @@ -52,43 +44,42 @@ AcmReceiveTestOldApi::AcmReceiveTestOldApi( AcmReceiveTestOldApi::~AcmReceiveTestOldApi() = default; void AcmReceiveTestOldApi::RegisterDefaultCodecs() { - acm_receiver_->SetCodecs({{103, {"ISAC", 16000, 1}}, - {104, {"ISAC", 32000, 1}}, - {107, {"L16", 8000, 1}}, - {108, {"L16", 16000, 1}}, - {109, {"L16", 32000, 1}}, - {111, {"L16", 8000, 2}}, - {112, {"L16", 16000, 2}}, - {113, {"L16", 32000, 2}}, - {0, {"PCMU", 8000, 1}}, - {110, {"PCMU", 8000, 2}}, - {8, {"PCMA", 8000, 1}}, - {118, {"PCMA", 8000, 2}}, - {102, {"ILBC", 8000, 1}}, - {9, {"G722", 8000, 1}}, - {119, {"G722", 8000, 2}}, - {120, {"OPUS", 48000, 2, {{"stereo", "1"}}}}, - {13, {"CN", 8000, 1}}, - {98, {"CN", 16000, 1}}, - {99, {"CN", 32000, 1}}}); + neteq_->SetCodecs({{103, {"ISAC", 16000, 1}}, + {104, {"ISAC", 32000, 1}}, + {107, {"L16", 8000, 1}}, + {108, {"L16", 16000, 1}}, + {109, {"L16", 32000, 1}}, + {111, {"L16", 8000, 2}}, + {112, {"L16", 16000, 2}}, + {113, {"L16", 32000, 2}}, + {0, {"PCMU", 8000, 1}}, + {110, {"PCMU", 8000, 2}}, + {8, {"PCMA", 8000, 1}}, + {118, {"PCMA", 8000, 2}}, + {9, {"G722", 8000, 1}}, + {119, {"G722", 8000, 2}}, + {120, {"OPUS", 48000, 2, {{"stereo", "1"}}}}, + {13, {"CN", 8000, 1}}, + {98, {"CN", 16000, 1}}, + {99, {"CN", 32000, 1}}}); } // Remaps payload types from ACM's default to those used in the resource file // neteq_universal_new.rtp. void AcmReceiveTestOldApi::RegisterNetEqTestCodecs() { - acm_receiver_->SetCodecs({{103, {"ISAC", 16000, 1}}, - {104, {"ISAC", 32000, 1}}, - {93, {"L16", 8000, 1}}, - {94, {"L16", 16000, 1}}, - {95, {"L16", 32000, 1}}, - {0, {"PCMU", 8000, 1}}, - {8, {"PCMA", 8000, 1}}, - {102, {"ILBC", 8000, 1}}, - {9, {"G722", 8000, 1}}, - {120, {"OPUS", 48000, 2}}, - {13, {"CN", 8000, 1}}, - {98, {"CN", 16000, 1}}, - {99, {"CN", 32000, 1}}}); + neteq_->SetCodecs({{103, {"ISAC", 16000, 1}}, + {104, {"ISAC", 32000, 1}}, + {93, {"L16", 8000, 1}}, + {94, {"L16", 16000, 1}}, + {95, {"L16", 32000, 1}}, + {0, {"PCMU", 8000, 1}}, + {8, {"PCMA", 8000, 1}}, + {102, {"ILBC", 8000, 1}}, + {9, {"G722", 8000, 1}}, + {120, {"OPUS", 48000, 2}}, + {13, {"CN", 8000, 1}}, + {98, {"CN", 16000, 1}}, + {99, {"CN", 32000, 1}}}); } void AcmReceiveTestOldApi::Run() { @@ -98,8 +89,9 @@ void AcmReceiveTestOldApi::Run() { while (clock_.TimeInMilliseconds() < packet->time_ms()) { AudioFrame output_frame; bool muted; - EXPECT_EQ( - 0, acm_receiver_->GetAudio(output_freq_hz_, &output_frame, &muted)); + EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output_frame, &muted)); + EXPECT_TRUE( + resampler_helper_.MaybeResample(output_freq_hz_, &output_frame)); ASSERT_EQ(output_freq_hz_, output_frame.sample_rate_hz_); ASSERT_FALSE(muted); const size_t samples_per_block = @@ -119,10 +111,11 @@ void AcmReceiveTestOldApi::Run() { AfterGetAudio(); } - EXPECT_EQ(0, acm_receiver_->InsertPacket( + EXPECT_EQ(0, neteq_->InsertPacket( packet->header(), - rtc::ArrayView( - packet->payload(), packet->payload_length_bytes()))) + ArrayView(packet->payload(), + packet->payload_length_bytes()), + clock_.CurrentTime())) << "Failure when inserting packet:" << std::endl << " PT = " << static_cast(packet->header().payloadType) << std::endl diff --git a/modules/audio_coding/acm2/acm_receive_test.h b/modules/audio_coding/acm2/acm_receive_test.h index d0195dddc6..9cc9019591 100644 --- a/modules/audio_coding/acm2/acm_receive_test.h +++ b/modules/audio_coding/acm2/acm_receive_test.h @@ -17,8 +17,9 @@ #include #include "api/audio_codecs/audio_decoder_factory.h" +#include "api/neteq/neteq.h" #include "api/scoped_refptr.h" -#include "modules/audio_coding/acm2/acm_receiver.h" +#include "modules/audio_coding/acm2/acm_resampler.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -42,7 +43,7 @@ class AcmReceiveTestOldApi { AudioSink* audio_sink, int output_freq_hz, NumOutputChannels exptected_output_channels, - rtc::scoped_refptr decoder_factory); + scoped_refptr decoder_factory); virtual ~AcmReceiveTestOldApi(); AcmReceiveTestOldApi(const AcmReceiveTestOldApi&) = delete; @@ -63,7 +64,8 @@ class AcmReceiveTestOldApi { virtual void AfterGetAudio() {} SimulatedClock clock_; - std::unique_ptr acm_receiver_; + std::unique_ptr neteq_; + acm2::ResamplerHelper resampler_helper_; PacketSource* packet_source_; AudioSink* audio_sink_; int output_freq_hz_; diff --git a/modules/audio_coding/acm2/acm_receiver.cc b/modules/audio_coding/acm2/acm_receiver.cc deleted file mode 100644 index a77e472ec1..0000000000 --- a/modules/audio_coding/acm2/acm_receiver.cc +++ /dev/null @@ -1,362 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/acm2/acm_receiver.h" - -#include -#include - -#include -#include - -#include "absl/strings/match.h" -#include "api/audio/audio_frame.h" -#include "api/audio_codecs/audio_decoder.h" -#include "api/neteq/neteq.h" -#include "modules/audio_coding/acm2/acm_resampler.h" -#include "modules/audio_coding/acm2/call_statistics.h" -#include "modules/audio_coding/neteq/default_neteq_factory.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/strings/audio_format_to_string.h" -#include "system_wrappers/include/clock.h" - -namespace webrtc { - -namespace acm2 { - -namespace { - -std::unique_ptr CreateNetEq( - NetEqFactory* neteq_factory, - const NetEq::Config& config, - Clock* clock, - const rtc::scoped_refptr& decoder_factory) { - if (neteq_factory) { - return neteq_factory->CreateNetEq(config, decoder_factory, clock); - } - return DefaultNetEqFactory().CreateNetEq(config, decoder_factory, clock); -} - -} // namespace - -AcmReceiver::Config::Config( - rtc::scoped_refptr decoder_factory) - : clock(*Clock::GetRealTimeClock()), decoder_factory(decoder_factory) { - // Post-decode VAD is disabled by default in NetEq, however, Audio - // Conference Mixer relies on VAD decisions and fails without them. - neteq_config.enable_post_decode_vad = true; -} - -AcmReceiver::Config::Config(const Config&) = default; -AcmReceiver::Config::~Config() = default; - -AcmReceiver::AcmReceiver(const Config& config) - : last_audio_buffer_(new int16_t[AudioFrame::kMaxDataSizeSamples]), - neteq_(CreateNetEq(config.neteq_factory, - config.neteq_config, - &config.clock, - config.decoder_factory)), - clock_(config.clock), - resampled_last_output_frame_(true) { - memset(last_audio_buffer_.get(), 0, - sizeof(int16_t) * AudioFrame::kMaxDataSizeSamples); -} - -AcmReceiver::~AcmReceiver() = default; - -int AcmReceiver::SetMinimumDelay(int delay_ms) { - if (neteq_->SetMinimumDelay(delay_ms)) - return 0; - RTC_LOG(LS_ERROR) << "AcmReceiver::SetExtraDelay " << delay_ms; - return -1; -} - -int AcmReceiver::SetMaximumDelay(int delay_ms) { - if (neteq_->SetMaximumDelay(delay_ms)) - return 0; - RTC_LOG(LS_ERROR) << "AcmReceiver::SetExtraDelay " << delay_ms; - return -1; -} - -bool AcmReceiver::SetBaseMinimumDelayMs(int delay_ms) { - return neteq_->SetBaseMinimumDelayMs(delay_ms); -} - -int AcmReceiver::GetBaseMinimumDelayMs() const { - return neteq_->GetBaseMinimumDelayMs(); -} - -absl::optional AcmReceiver::last_packet_sample_rate_hz() const { - MutexLock lock(&mutex_); - if (!last_decoder_) { - return absl::nullopt; - } - return last_decoder_->sample_rate_hz; -} - -int AcmReceiver::last_output_sample_rate_hz() const { - return neteq_->last_output_sample_rate_hz(); -} - -int AcmReceiver::InsertPacket(const RTPHeader& rtp_header, - rtc::ArrayView incoming_payload) { - if (incoming_payload.empty()) { - neteq_->InsertEmptyPacket(rtp_header); - return 0; - } - - int payload_type = rtp_header.payloadType; - auto format = neteq_->GetDecoderFormat(payload_type); - if (format && absl::EqualsIgnoreCase(format->sdp_format.name, "red")) { - // This is a RED packet. Get the format of the audio codec. - payload_type = incoming_payload[0] & 0x7f; - format = neteq_->GetDecoderFormat(payload_type); - } - if (!format) { - RTC_LOG_F(LS_ERROR) << "Payload-type " << payload_type - << " is not registered."; - return -1; - } - - { - MutexLock lock(&mutex_); - if (absl::EqualsIgnoreCase(format->sdp_format.name, "cn")) { - if (last_decoder_ && last_decoder_->num_channels > 1) { - // This is a CNG and the audio codec is not mono, so skip pushing in - // packets into NetEq. - return 0; - } - } else { - last_decoder_ = DecoderInfo{/*payload_type=*/payload_type, - /*sample_rate_hz=*/format->sample_rate_hz, - /*num_channels=*/format->num_channels, - /*sdp_format=*/std::move(format->sdp_format)}; - } - } // `mutex_` is released. - - if (neteq_->InsertPacket(rtp_header, incoming_payload) < 0) { - RTC_LOG(LS_ERROR) << "AcmReceiver::InsertPacket " - << static_cast(rtp_header.payloadType) - << " Failed to insert packet"; - return -1; - } - return 0; -} - -int AcmReceiver::GetAudio(int desired_freq_hz, - AudioFrame* audio_frame, - bool* muted) { - RTC_DCHECK(muted); - - int current_sample_rate_hz = 0; - if (neteq_->GetAudio(audio_frame, muted, ¤t_sample_rate_hz) != - NetEq::kOK) { - RTC_LOG(LS_ERROR) << "AcmReceiver::GetAudio - NetEq Failed."; - return -1; - } - - RTC_DCHECK_NE(current_sample_rate_hz, 0); - - // Update if resampling is required. - const bool need_resampling = - (desired_freq_hz != -1) && (current_sample_rate_hz != desired_freq_hz); - - // Accessing members, take the lock. - MutexLock lock(&mutex_); - if (need_resampling && !resampled_last_output_frame_) { - // Prime the resampler with the last frame. - int16_t temp_output[AudioFrame::kMaxDataSizeSamples]; - int samples_per_channel_int = resampler_.Resample10Msec( - last_audio_buffer_.get(), current_sample_rate_hz, desired_freq_hz, - audio_frame->num_channels_, AudioFrame::kMaxDataSizeSamples, - temp_output); - if (samples_per_channel_int < 0) { - RTC_LOG(LS_ERROR) << "AcmReceiver::GetAudio - " - "Resampling last_audio_buffer_ failed."; - return -1; - } - } - - // TODO(bugs.webrtc.org/3923) Glitches in the output may appear if the output - // rate from NetEq changes. - if (need_resampling) { - // TODO(yujo): handle this more efficiently for muted frames. - int samples_per_channel_int = resampler_.Resample10Msec( - audio_frame->data(), current_sample_rate_hz, desired_freq_hz, - audio_frame->num_channels_, AudioFrame::kMaxDataSizeSamples, - audio_frame->mutable_data()); - if (samples_per_channel_int < 0) { - RTC_LOG(LS_ERROR) - << "AcmReceiver::GetAudio - Resampling audio_buffer_ failed."; - return -1; - } - audio_frame->samples_per_channel_ = - static_cast(samples_per_channel_int); - audio_frame->sample_rate_hz_ = desired_freq_hz; - RTC_DCHECK_EQ( - audio_frame->sample_rate_hz_, - rtc::dchecked_cast(audio_frame->samples_per_channel_ * 100)); - resampled_last_output_frame_ = true; - } else { - resampled_last_output_frame_ = false; - // We might end up here ONLY if codec is changed. - } - - // Store current audio in `last_audio_buffer_` for next time. - memcpy(last_audio_buffer_.get(), audio_frame->data(), - sizeof(int16_t) * audio_frame->samples_per_channel_ * - audio_frame->num_channels_); - - call_stats_.DecodedByNetEq(audio_frame->speech_type_, *muted); - return 0; -} - -void AcmReceiver::SetCodecs(const std::map& codecs) { - neteq_->SetCodecs(codecs); -} - -void AcmReceiver::FlushBuffers() { - neteq_->FlushBuffers(); -} - -void AcmReceiver::RemoveAllCodecs() { - MutexLock lock(&mutex_); - neteq_->RemoveAllPayloadTypes(); - last_decoder_ = absl::nullopt; -} - -absl::optional AcmReceiver::GetPlayoutTimestamp() { - return neteq_->GetPlayoutTimestamp(); -} - -int AcmReceiver::FilteredCurrentDelayMs() const { - return neteq_->FilteredCurrentDelayMs(); -} - -int AcmReceiver::TargetDelayMs() const { - return neteq_->TargetDelayMs(); -} - -absl::optional> AcmReceiver::LastDecoder() - const { - MutexLock lock(&mutex_); - if (!last_decoder_) { - return absl::nullopt; - } - RTC_DCHECK_NE(-1, last_decoder_->payload_type); - return std::make_pair(last_decoder_->payload_type, last_decoder_->sdp_format); -} - -void AcmReceiver::GetNetworkStatistics( - NetworkStatistics* acm_stat, - bool get_and_clear_legacy_stats /* = true */) const { - NetEqNetworkStatistics neteq_stat; - if (get_and_clear_legacy_stats) { - // NetEq function always returns zero, so we don't check the return value. - neteq_->NetworkStatistics(&neteq_stat); - - acm_stat->currentExpandRate = neteq_stat.expand_rate; - acm_stat->currentSpeechExpandRate = neteq_stat.speech_expand_rate; - acm_stat->currentPreemptiveRate = neteq_stat.preemptive_rate; - acm_stat->currentAccelerateRate = neteq_stat.accelerate_rate; - acm_stat->currentSecondaryDecodedRate = neteq_stat.secondary_decoded_rate; - acm_stat->currentSecondaryDiscardedRate = - neteq_stat.secondary_discarded_rate; - acm_stat->meanWaitingTimeMs = neteq_stat.mean_waiting_time_ms; - acm_stat->maxWaitingTimeMs = neteq_stat.max_waiting_time_ms; - } else { - neteq_stat = neteq_->CurrentNetworkStatistics(); - acm_stat->currentExpandRate = 0; - acm_stat->currentSpeechExpandRate = 0; - acm_stat->currentPreemptiveRate = 0; - acm_stat->currentAccelerateRate = 0; - acm_stat->currentSecondaryDecodedRate = 0; - acm_stat->currentSecondaryDiscardedRate = 0; - acm_stat->meanWaitingTimeMs = -1; - acm_stat->maxWaitingTimeMs = 1; - } - acm_stat->currentBufferSize = neteq_stat.current_buffer_size_ms; - acm_stat->preferredBufferSize = neteq_stat.preferred_buffer_size_ms; - acm_stat->jitterPeaksFound = neteq_stat.jitter_peaks_found ? true : false; - - NetEqLifetimeStatistics neteq_lifetime_stat = neteq_->GetLifetimeStatistics(); - acm_stat->totalSamplesReceived = neteq_lifetime_stat.total_samples_received; - acm_stat->concealedSamples = neteq_lifetime_stat.concealed_samples; - acm_stat->silentConcealedSamples = - neteq_lifetime_stat.silent_concealed_samples; - acm_stat->concealmentEvents = neteq_lifetime_stat.concealment_events; - acm_stat->jitterBufferDelayMs = neteq_lifetime_stat.jitter_buffer_delay_ms; - acm_stat->jitterBufferTargetDelayMs = - neteq_lifetime_stat.jitter_buffer_target_delay_ms; - acm_stat->jitterBufferMinimumDelayMs = - neteq_lifetime_stat.jitter_buffer_minimum_delay_ms; - acm_stat->jitterBufferEmittedCount = - neteq_lifetime_stat.jitter_buffer_emitted_count; - acm_stat->delayedPacketOutageSamples = - neteq_lifetime_stat.delayed_packet_outage_samples; - acm_stat->relativePacketArrivalDelayMs = - neteq_lifetime_stat.relative_packet_arrival_delay_ms; - acm_stat->interruptionCount = neteq_lifetime_stat.interruption_count; - acm_stat->totalInterruptionDurationMs = - neteq_lifetime_stat.total_interruption_duration_ms; - acm_stat->insertedSamplesForDeceleration = - neteq_lifetime_stat.inserted_samples_for_deceleration; - acm_stat->removedSamplesForAcceleration = - neteq_lifetime_stat.removed_samples_for_acceleration; - acm_stat->fecPacketsReceived = neteq_lifetime_stat.fec_packets_received; - acm_stat->fecPacketsDiscarded = neteq_lifetime_stat.fec_packets_discarded; - acm_stat->packetsDiscarded = neteq_lifetime_stat.packets_discarded; - - NetEqOperationsAndState neteq_operations_and_state = - neteq_->GetOperationsAndState(); - acm_stat->packetBufferFlushes = - neteq_operations_and_state.packet_buffer_flushes; -} - -int AcmReceiver::EnableNack(size_t max_nack_list_size) { - neteq_->EnableNack(max_nack_list_size); - return 0; -} - -void AcmReceiver::DisableNack() { - neteq_->DisableNack(); -} - -std::vector AcmReceiver::GetNackList( - int64_t round_trip_time_ms) const { - return neteq_->GetNackList(round_trip_time_ms); -} - -void AcmReceiver::ResetInitialDelay() { - neteq_->SetMinimumDelay(0); - // TODO(turajs): Should NetEq Buffer be flushed? -} - -uint32_t AcmReceiver::NowInTimestamp(int decoder_sampling_rate) const { - // Down-cast the time to (32-6)-bit since we only care about - // the least significant bits. (32-6) bits cover 2^(32-6) = 67108864 ms. - // We masked 6 most significant bits of 32-bit so there is no overflow in - // the conversion from milliseconds to timestamp. - const uint32_t now_in_ms = - static_cast(clock_.TimeInMilliseconds() & 0x03ffffff); - return static_cast((decoder_sampling_rate / 1000) * now_in_ms); -} - -void AcmReceiver::GetDecodingCallStatistics( - AudioDecodingCallStats* stats) const { - MutexLock lock(&mutex_); - *stats = call_stats_.GetDecodingStatistics(); -} - -} // namespace acm2 - -} // namespace webrtc diff --git a/modules/audio_coding/acm2/acm_receiver.h b/modules/audio_coding/acm2/acm_receiver.h deleted file mode 100644 index 820150aede..0000000000 --- a/modules/audio_coding/acm2/acm_receiver.h +++ /dev/null @@ -1,245 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_ACM2_ACM_RECEIVER_H_ -#define MODULES_AUDIO_CODING_ACM2_ACM_RECEIVER_H_ - -#include - -#include -#include -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/audio_codecs/audio_decoder.h" -#include "api/audio_codecs/audio_decoder_factory.h" -#include "api/audio_codecs/audio_format.h" -#include "api/neteq/neteq.h" -#include "api/neteq/neteq_factory.h" -#include "modules/audio_coding/acm2/acm_resampler.h" -#include "modules/audio_coding/acm2/call_statistics.h" -#include "modules/audio_coding/include/audio_coding_module_typedefs.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_annotations.h" - -namespace webrtc { - -class Clock; -class NetEq; -struct RTPHeader; - -namespace acm2 { - -class AcmReceiver { - public: - struct Config { - explicit Config( - rtc::scoped_refptr decoder_factory = nullptr); - Config(const Config&); - ~Config(); - - NetEq::Config neteq_config; - Clock& clock; - rtc::scoped_refptr decoder_factory; - NetEqFactory* neteq_factory = nullptr; - }; - - // Constructor of the class - explicit AcmReceiver(const Config& config); - - // Destructor of the class. - ~AcmReceiver(); - - // - // Inserts a payload with its associated RTP-header into NetEq. - // - // Input: - // - rtp_header : RTP header for the incoming payload containing - // information about payload type, sequence number, - // timestamp, SSRC and marker bit. - // - incoming_payload : Incoming audio payload. - // - length_payload : Length of incoming audio payload in bytes. - // - // Return value : 0 if OK. - // <0 if NetEq returned an error. - // - int InsertPacket(const RTPHeader& rtp_header, - rtc::ArrayView incoming_payload); - - // - // Asks NetEq for 10 milliseconds of decoded audio. - // - // Input: - // -desired_freq_hz : specifies the sampling rate [Hz] of the output - // audio. If set -1 indicates to resampling is - // is required and the audio returned at the - // sampling rate of the decoder. - // - // Output: - // -audio_frame : an audio frame were output data and - // associated parameters are written to. - // -muted : if true, the sample data in audio_frame is not - // populated, and must be interpreted as all zero. - // - // Return value : 0 if OK. - // -1 if NetEq returned an error. - // - int GetAudio(int desired_freq_hz, AudioFrame* audio_frame, bool* muted); - - // Replace the current set of decoders with the specified set. - void SetCodecs(const std::map& codecs); - - // - // Sets a minimum delay for packet buffer. The given delay is maintained, - // unless channel condition dictates a higher delay. - // - // Input: - // - delay_ms : minimum delay in milliseconds. - // - // Return value : 0 if OK. - // <0 if NetEq returned an error. - // - int SetMinimumDelay(int delay_ms); - - // - // Sets a maximum delay [ms] for the packet buffer. The target delay does not - // exceed the given value, even if channel condition requires so. - // - // Input: - // - delay_ms : maximum delay in milliseconds. - // - // Return value : 0 if OK. - // <0 if NetEq returned an error. - // - int SetMaximumDelay(int delay_ms); - - // Sets a base minimum delay in milliseconds for the packet buffer. - // Base minimum delay sets lower bound minimum delay value which - // is set via SetMinimumDelay. - // - // Returns true if value was successfully set, false overwise. - bool SetBaseMinimumDelayMs(int delay_ms); - - // Returns current value of base minimum delay in milliseconds. - int GetBaseMinimumDelayMs() const; - - // - // Resets the initial delay to zero. - // - void ResetInitialDelay(); - - // Returns the sample rate of the decoder associated with the last incoming - // packet. If no packet of a registered non-CNG codec has been received, the - // return value is empty. Also, if the decoder was unregistered since the last - // packet was inserted, the return value is empty. - absl::optional last_packet_sample_rate_hz() const; - - // Returns last_output_sample_rate_hz from the NetEq instance. - int last_output_sample_rate_hz() const; - - // - // Get the current network statistics from NetEq. - // - // Output: - // - statistics : The current network statistics. - // - void GetNetworkStatistics(NetworkStatistics* statistics, - bool get_and_clear_legacy_stats = true) const; - - // - // Flushes the NetEq packet and speech buffers. - // - void FlushBuffers(); - - // - // Remove all registered codecs. - // - void RemoveAllCodecs(); - - // Returns the RTP timestamp for the last sample delivered by GetAudio(). - // The return value will be empty if no valid timestamp is available. - absl::optional GetPlayoutTimestamp(); - - // Returns the current total delay from NetEq (packet buffer and sync buffer) - // in ms, with smoothing applied to even out short-time fluctuations due to - // jitter. The packet buffer part of the delay is not updated during DTX/CNG - // periods. - // - int FilteredCurrentDelayMs() const; - - // Returns the current target delay for NetEq in ms. - // - int TargetDelayMs() const; - - // - // Get payload type and format of the last non-CNG/non-DTMF received payload. - // If no non-CNG/non-DTMF packet is received absl::nullopt is returned. - // - absl::optional> LastDecoder() const; - - // - // Enable NACK and set the maximum size of the NACK list. If NACK is already - // enabled then the maximum NACK list size is modified accordingly. - // - // If the sequence number of last received packet is N, the sequence numbers - // of NACK list are in the range of [N - `max_nack_list_size`, N). - // - // `max_nack_list_size` should be positive (none zero) and less than or - // equal to `Nack::kNackListSizeLimit`. Otherwise, No change is applied and -1 - // is returned. 0 is returned at success. - // - int EnableNack(size_t max_nack_list_size); - - // Disable NACK. - void DisableNack(); - - // - // Get a list of packets to be retransmitted. `round_trip_time_ms` is an - // estimate of the round-trip-time (in milliseconds). Missing packets which - // will be playout in a shorter time than the round-trip-time (with respect - // to the time this API is called) will not be included in the list. - // - // Negative `round_trip_time_ms` results is an error message and empty list - // is returned. - // - std::vector GetNackList(int64_t round_trip_time_ms) const; - - // - // Get statistics of calls to GetAudio(). - void GetDecodingCallStatistics(AudioDecodingCallStats* stats) const; - - private: - struct DecoderInfo { - int payload_type; - int sample_rate_hz; - int num_channels; - SdpAudioFormat sdp_format; - }; - - uint32_t NowInTimestamp(int decoder_sampling_rate) const; - - mutable Mutex mutex_; - absl::optional last_decoder_ RTC_GUARDED_BY(mutex_); - ACMResampler resampler_ RTC_GUARDED_BY(mutex_); - std::unique_ptr last_audio_buffer_ RTC_GUARDED_BY(mutex_); - CallStatistics call_stats_ RTC_GUARDED_BY(mutex_); - const std::unique_ptr neteq_; // NetEq is thread-safe; no lock needed. - Clock& clock_; - bool resampled_last_output_frame_ RTC_GUARDED_BY(mutex_); -}; - -} // namespace acm2 - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_ACM2_ACM_RECEIVER_H_ diff --git a/modules/audio_coding/acm2/acm_receiver_unittest.cc b/modules/audio_coding/acm2/acm_receiver_unittest.cc deleted file mode 100644 index cda6688157..0000000000 --- a/modules/audio_coding/acm2/acm_receiver_unittest.cc +++ /dev/null @@ -1,471 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/acm2/acm_receiver.h" - -#include // std::min -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/builtin_audio_decoder_factory.h" -#include "api/audio_codecs/builtin_audio_encoder_factory.h" -#include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" -#include "modules/audio_coding/include/audio_coding_module.h" -#include "modules/audio_coding/neteq/tools/rtp_generator.h" -#include "modules/include/module_common_types.h" -#include "rtc_base/checks.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "system_wrappers/include/clock.h" -#include "test/gtest.h" -#include "test/testsupport/file_utils.h" - -namespace webrtc { - -namespace acm2 { - -class AcmReceiverTestOldApi : public AudioPacketizationCallback, - public ::testing::Test { - protected: - AcmReceiverTestOldApi() - : timestamp_(0), - packet_sent_(false), - last_packet_send_timestamp_(timestamp_), - last_frame_type_(AudioFrameType::kEmptyFrame) { - config_.decoder_factory = decoder_factory_; - } - - ~AcmReceiverTestOldApi() {} - - void SetUp() override { - acm_ = AudioCodingModule::Create(); - receiver_.reset(new AcmReceiver(config_)); - ASSERT_TRUE(receiver_.get() != NULL); - ASSERT_TRUE(acm_.get() != NULL); - acm_->RegisterTransportCallback(this); - - rtp_header_.sequenceNumber = 0; - rtp_header_.timestamp = 0; - rtp_header_.markerBit = false; - rtp_header_.ssrc = 0x12345678; // Arbitrary. - rtp_header_.numCSRCs = 0; - rtp_header_.payloadType = 0; - } - - void TearDown() override {} - - AudioCodecInfo SetEncoder(int payload_type, - const SdpAudioFormat& format, - const std::map cng_payload_types = {}) { - // Create the speech encoder. - absl::optional info = - encoder_factory_->QueryAudioEncoder(format); - RTC_CHECK(info.has_value()); - std::unique_ptr enc = - encoder_factory_->MakeAudioEncoder(payload_type, format, absl::nullopt); - - // If we have a compatible CN specification, stack a CNG on top. - auto it = cng_payload_types.find(info->sample_rate_hz); - if (it != cng_payload_types.end()) { - AudioEncoderCngConfig config; - config.speech_encoder = std::move(enc); - config.num_channels = 1; - config.payload_type = it->second; - config.vad_mode = Vad::kVadNormal; - enc = CreateComfortNoiseEncoder(std::move(config)); - } - - // Actually start using the new encoder. - acm_->SetEncoder(std::move(enc)); - return *info; - } - - int InsertOnePacketOfSilence(const AudioCodecInfo& info) { - // Frame setup according to the codec. - AudioFrame frame; - frame.sample_rate_hz_ = info.sample_rate_hz; - frame.samples_per_channel_ = info.sample_rate_hz / 100; // 10 ms. - frame.num_channels_ = info.num_channels; - frame.Mute(); - packet_sent_ = false; - last_packet_send_timestamp_ = timestamp_; - int num_10ms_frames = 0; - while (!packet_sent_) { - frame.timestamp_ = timestamp_; - timestamp_ += rtc::checked_cast(frame.samples_per_channel_); - EXPECT_GE(acm_->Add10MsData(frame), 0); - ++num_10ms_frames; - } - return num_10ms_frames; - } - - int SendData(AudioFrameType frame_type, - uint8_t payload_type, - uint32_t timestamp, - const uint8_t* payload_data, - size_t payload_len_bytes, - int64_t absolute_capture_timestamp_ms) override { - if (frame_type == AudioFrameType::kEmptyFrame) - return 0; - - rtp_header_.payloadType = payload_type; - rtp_header_.timestamp = timestamp; - - int ret_val = receiver_->InsertPacket( - rtp_header_, - rtc::ArrayView(payload_data, payload_len_bytes)); - if (ret_val < 0) { - RTC_DCHECK_NOTREACHED(); - return -1; - } - rtp_header_.sequenceNumber++; - packet_sent_ = true; - last_frame_type_ = frame_type; - return 0; - } - - const rtc::scoped_refptr encoder_factory_ = - CreateBuiltinAudioEncoderFactory(); - const rtc::scoped_refptr decoder_factory_ = - CreateBuiltinAudioDecoderFactory(); - acm2::AcmReceiver::Config config_; - std::unique_ptr receiver_; - std::unique_ptr acm_; - RTPHeader rtp_header_; - uint32_t timestamp_; - bool packet_sent_; // Set when SendData is called reset when inserting audio. - uint32_t last_packet_send_timestamp_; - AudioFrameType last_frame_type_; -}; - -#if defined(WEBRTC_ANDROID) -#define MAYBE_SampleRate DISABLED_SampleRate -#else -#define MAYBE_SampleRate SampleRate -#endif -TEST_F(AcmReceiverTestOldApi, MAYBE_SampleRate) { - const std::map codecs = {{0, {"OPUS", 48000, 2}}}; - receiver_->SetCodecs(codecs); - - constexpr int kOutSampleRateHz = 8000; // Different than codec sample rate. - for (size_t i = 0; i < codecs.size(); ++i) { - const int payload_type = rtc::checked_cast(i); - const int num_10ms_frames = - InsertOnePacketOfSilence(SetEncoder(payload_type, codecs.at(i))); - for (int k = 0; k < num_10ms_frames; ++k) { - AudioFrame frame; - bool muted; - EXPECT_EQ(0, receiver_->GetAudio(kOutSampleRateHz, &frame, &muted)); - } - EXPECT_EQ(encoder_factory_->QueryAudioEncoder(codecs.at(i))->sample_rate_hz, - receiver_->last_output_sample_rate_hz()); - } -} - -class AcmReceiverTestFaxModeOldApi : public AcmReceiverTestOldApi { - protected: - AcmReceiverTestFaxModeOldApi() { - config_.neteq_config.for_test_no_time_stretching = true; - } - - void RunVerifyAudioFrame(const SdpAudioFormat& codec) { - // Make sure "fax mode" is enabled. This will avoid delay changes unless the - // packet-loss concealment is made. We do this in order to make the - // timestamp increments predictable; in normal mode, NetEq may decide to do - // accelerate or pre-emptive expand operations after some time, offsetting - // the timestamp. - EXPECT_TRUE(config_.neteq_config.for_test_no_time_stretching); - - constexpr int payload_type = 17; - receiver_->SetCodecs({{payload_type, codec}}); - - const AudioCodecInfo info = SetEncoder(payload_type, codec); - const int output_sample_rate_hz = info.sample_rate_hz; - const size_t output_channels = info.num_channels; - const size_t samples_per_ms = rtc::checked_cast( - rtc::CheckedDivExact(output_sample_rate_hz, 1000)); - const AudioFrame::VADActivity expected_vad_activity = - output_sample_rate_hz > 16000 ? AudioFrame::kVadActive - : AudioFrame::kVadPassive; - - // Expect the first output timestamp to be 5*fs/8000 samples before the - // first inserted timestamp (because of NetEq's look-ahead). (This value is - // defined in Expand::overlap_length_.) - uint32_t expected_output_ts = - last_packet_send_timestamp_ - - rtc::CheckedDivExact(5 * output_sample_rate_hz, 8000); - - AudioFrame frame; - bool muted; - EXPECT_EQ(0, receiver_->GetAudio(output_sample_rate_hz, &frame, &muted)); - // Expect timestamp = 0 before first packet is inserted. - EXPECT_EQ(0u, frame.timestamp_); - for (int i = 0; i < 5; ++i) { - const int num_10ms_frames = InsertOnePacketOfSilence(info); - for (int k = 0; k < num_10ms_frames; ++k) { - EXPECT_EQ(0, - receiver_->GetAudio(output_sample_rate_hz, &frame, &muted)); - EXPECT_EQ(expected_output_ts, frame.timestamp_); - expected_output_ts += rtc::checked_cast(10 * samples_per_ms); - EXPECT_EQ(10 * samples_per_ms, frame.samples_per_channel_); - EXPECT_EQ(output_sample_rate_hz, frame.sample_rate_hz_); - EXPECT_EQ(output_channels, frame.num_channels_); - EXPECT_EQ(AudioFrame::kNormalSpeech, frame.speech_type_); - EXPECT_EQ(expected_vad_activity, frame.vad_activity_); - EXPECT_FALSE(muted); - } - } - } -}; - -#if defined(WEBRTC_ANDROID) -#define MAYBE_VerifyAudioFramePCMU DISABLED_VerifyAudioFramePCMU -#else -#define MAYBE_VerifyAudioFramePCMU VerifyAudioFramePCMU -#endif -TEST_F(AcmReceiverTestFaxModeOldApi, MAYBE_VerifyAudioFramePCMU) { - RunVerifyAudioFrame({"PCMU", 8000, 1}); -} - -#if defined(WEBRTC_ANDROID) -#define MAYBE_VerifyAudioFrameOpus DISABLED_VerifyAudioFrameOpus -#else -#define MAYBE_VerifyAudioFrameOpus VerifyAudioFrameOpus -#endif -TEST_F(AcmReceiverTestFaxModeOldApi, MAYBE_VerifyAudioFrameOpus) { - RunVerifyAudioFrame({"opus", 48000, 2}); -} - -#if defined(WEBRTC_ANDROID) -#define MAYBE_PostdecodingVad DISABLED_PostdecodingVad -#else -#define MAYBE_PostdecodingVad PostdecodingVad -#endif -TEST_F(AcmReceiverTestOldApi, MAYBE_PostdecodingVad) { - EXPECT_TRUE(config_.neteq_config.enable_post_decode_vad); - constexpr int payload_type = 34; - const SdpAudioFormat codec = {"L16", 16000, 1}; - const AudioCodecInfo info = SetEncoder(payload_type, codec); - receiver_->SetCodecs({{payload_type, codec}}); - constexpr int kNumPackets = 5; - AudioFrame frame; - for (int n = 0; n < kNumPackets; ++n) { - const int num_10ms_frames = InsertOnePacketOfSilence(info); - for (int k = 0; k < num_10ms_frames; ++k) { - bool muted; - ASSERT_EQ(0, receiver_->GetAudio(info.sample_rate_hz, &frame, &muted)); - } - } - EXPECT_EQ(AudioFrame::kVadPassive, frame.vad_activity_); -} - -class AcmReceiverTestPostDecodeVadPassiveOldApi : public AcmReceiverTestOldApi { - protected: - AcmReceiverTestPostDecodeVadPassiveOldApi() { - config_.neteq_config.enable_post_decode_vad = false; - } -}; - -#if defined(WEBRTC_ANDROID) -#define MAYBE_PostdecodingVad DISABLED_PostdecodingVad -#else -#define MAYBE_PostdecodingVad PostdecodingVad -#endif -TEST_F(AcmReceiverTestPostDecodeVadPassiveOldApi, MAYBE_PostdecodingVad) { - EXPECT_FALSE(config_.neteq_config.enable_post_decode_vad); - constexpr int payload_type = 34; - const SdpAudioFormat codec = {"L16", 16000, 1}; - const AudioCodecInfo info = SetEncoder(payload_type, codec); - auto const value = encoder_factory_->QueryAudioEncoder(codec); - ASSERT_TRUE(value.has_value()); - receiver_->SetCodecs({{payload_type, codec}}); - const int kNumPackets = 5; - AudioFrame frame; - for (int n = 0; n < kNumPackets; ++n) { - const int num_10ms_frames = InsertOnePacketOfSilence(info); - for (int k = 0; k < num_10ms_frames; ++k) { - bool muted; - ASSERT_EQ(0, receiver_->GetAudio(info.sample_rate_hz, &frame, &muted)); - } - } - EXPECT_EQ(AudioFrame::kVadUnknown, frame.vad_activity_); -} - -#if defined(WEBRTC_ANDROID) -#define MAYBE_LastAudioCodec DISABLED_LastAudioCodec -#else -#define MAYBE_LastAudioCodec LastAudioCodec -#endif -#if defined(WEBRTC_CODEC_OPUS) -TEST_F(AcmReceiverTestOldApi, MAYBE_LastAudioCodec) { - const std::map codecs = { - {0, {"PCMU", 8000, 1}}, {1, {"PCMA", 8000, 1}}, {2, {"L16", 32000, 1}}}; - const std::map cng_payload_types = { - {8000, 100}, {16000, 101}, {32000, 102}}; - { - std::map receive_codecs = codecs; - for (const auto& cng_type : cng_payload_types) { - receive_codecs.emplace(std::make_pair( - cng_type.second, SdpAudioFormat("CN", cng_type.first, 1))); - } - receiver_->SetCodecs(receive_codecs); - } - - // No audio payload is received. - EXPECT_EQ(absl::nullopt, receiver_->LastDecoder()); - - // Start with sending DTX. - packet_sent_ = false; - InsertOnePacketOfSilence( - SetEncoder(0, codecs.at(0), cng_payload_types)); // Enough to test - // with one codec. - ASSERT_TRUE(packet_sent_); - EXPECT_EQ(AudioFrameType::kAudioFrameCN, last_frame_type_); - - // Has received, only, DTX. Last Audio codec is undefined. - EXPECT_EQ(absl::nullopt, receiver_->LastDecoder()); - EXPECT_EQ(absl::nullopt, receiver_->last_packet_sample_rate_hz()); - - for (size_t i = 0; i < codecs.size(); ++i) { - // Set DTX off to send audio payload. - packet_sent_ = false; - const int payload_type = rtc::checked_cast(i); - const AudioCodecInfo info_without_cng = - SetEncoder(payload_type, codecs.at(i)); - InsertOnePacketOfSilence(info_without_cng); - - // Sanity check if Actually an audio payload received, and it should be - // of type "speech." - ASSERT_TRUE(packet_sent_); - ASSERT_EQ(AudioFrameType::kAudioFrameSpeech, last_frame_type_); - EXPECT_EQ(info_without_cng.sample_rate_hz, - receiver_->last_packet_sample_rate_hz()); - - // Set VAD on to send DTX. Then check if the "Last Audio codec" returns - // the expected codec. Encode repeatedly until a DTX is sent. - const AudioCodecInfo info_with_cng = - SetEncoder(payload_type, codecs.at(i), cng_payload_types); - while (last_frame_type_ != AudioFrameType::kAudioFrameCN) { - packet_sent_ = false; - InsertOnePacketOfSilence(info_with_cng); - ASSERT_TRUE(packet_sent_); - } - EXPECT_EQ(info_with_cng.sample_rate_hz, - receiver_->last_packet_sample_rate_hz()); - EXPECT_EQ(codecs.at(i), receiver_->LastDecoder()->second); - } -} -#endif - -// Check if the statistics are initialized correctly. Before any call to ACM -// all fields have to be zero. -#if defined(WEBRTC_ANDROID) -#define MAYBE_InitializedToZero DISABLED_InitializedToZero -#else -#define MAYBE_InitializedToZero InitializedToZero -#endif -TEST_F(AcmReceiverTestOldApi, MAYBE_InitializedToZero) { - AudioDecodingCallStats stats; - receiver_->GetDecodingCallStatistics(&stats); - EXPECT_EQ(0, stats.calls_to_neteq); - EXPECT_EQ(0, stats.calls_to_silence_generator); - EXPECT_EQ(0, stats.decoded_normal); - EXPECT_EQ(0, stats.decoded_cng); - EXPECT_EQ(0, stats.decoded_neteq_plc); - EXPECT_EQ(0, stats.decoded_plc_cng); - EXPECT_EQ(0, stats.decoded_muted_output); -} - -#if defined(WEBRTC_ANDROID) -#define MAYBE_VerifyOutputFrame DISABLED_VerifyOutputFrame -#else -#define MAYBE_VerifyOutputFrame VerifyOutputFrame -#endif -TEST_F(AcmReceiverTestOldApi, MAYBE_VerifyOutputFrame) { - AudioFrame audio_frame; - const int kSampleRateHz = 32000; - bool muted; - EXPECT_EQ(0, receiver_->GetAudio(kSampleRateHz, &audio_frame, &muted)); - ASSERT_FALSE(muted); - EXPECT_EQ(0u, audio_frame.timestamp_); - EXPECT_GT(audio_frame.num_channels_, 0u); - EXPECT_EQ(static_cast(kSampleRateHz / 100), - audio_frame.samples_per_channel_); - EXPECT_EQ(kSampleRateHz, audio_frame.sample_rate_hz_); -} - -// Insert some packets and pull audio. Check statistics are valid. Then, -// simulate packet loss and check if PLC and PLC-to-CNG statistics are -// correctly updated. -#if defined(WEBRTC_ANDROID) -#define MAYBE_NetEqCalls DISABLED_NetEqCalls -#else -#define MAYBE_NetEqCalls NetEqCalls -#endif -TEST_F(AcmReceiverTestOldApi, MAYBE_NetEqCalls) { - AudioDecodingCallStats stats; - const int kNumNormalCalls = 10; - const int kSampleRateHz = 16000; - const int kNumSamples10ms = kSampleRateHz / 100; - const int kFrameSizeMs = 10; // Multiple of 10. - const int kFrameSizeSamples = kFrameSizeMs / 10 * kNumSamples10ms; - const int kPayloadSizeBytes = kFrameSizeSamples * sizeof(int16_t); - const uint8_t kPayloadType = 111; - RTPHeader rtp_header; - AudioFrame audio_frame; - bool muted; - - receiver_->SetCodecs( - {{kPayloadType, SdpAudioFormat("L16", kSampleRateHz, 1)}}); - rtp_header.sequenceNumber = 0xABCD; - rtp_header.timestamp = 0xABCDEF01; - rtp_header.payloadType = kPayloadType; - rtp_header.markerBit = false; - rtp_header.ssrc = 0x1234; - rtp_header.numCSRCs = 0; - - for (int num_calls = 0; num_calls < kNumNormalCalls; ++num_calls) { - const uint8_t kPayload[kPayloadSizeBytes] = {0}; - ASSERT_EQ(0, receiver_->InsertPacket(rtp_header, kPayload)); - ++rtp_header.sequenceNumber; - rtp_header.timestamp += kFrameSizeSamples; - ASSERT_EQ(0, receiver_->GetAudio(-1, &audio_frame, &muted)); - EXPECT_FALSE(muted); - } - receiver_->GetDecodingCallStatistics(&stats); - EXPECT_EQ(kNumNormalCalls, stats.calls_to_neteq); - EXPECT_EQ(0, stats.calls_to_silence_generator); - EXPECT_EQ(kNumNormalCalls, stats.decoded_normal); - EXPECT_EQ(0, stats.decoded_cng); - EXPECT_EQ(0, stats.decoded_neteq_plc); - EXPECT_EQ(0, stats.decoded_plc_cng); - EXPECT_EQ(0, stats.decoded_muted_output); - - const int kNumPlc = 3; - const int kNumPlcCng = 5; - - // Simulate packet-loss. NetEq first performs PLC then PLC fades to CNG. - for (int n = 0; n < kNumPlc + kNumPlcCng; ++n) { - ASSERT_EQ(0, receiver_->GetAudio(-1, &audio_frame, &muted)); - EXPECT_FALSE(muted); - } - receiver_->GetDecodingCallStatistics(&stats); - EXPECT_EQ(kNumNormalCalls + kNumPlc + kNumPlcCng, stats.calls_to_neteq); - EXPECT_EQ(0, stats.calls_to_silence_generator); - EXPECT_EQ(kNumNormalCalls, stats.decoded_normal); - EXPECT_EQ(0, stats.decoded_cng); - EXPECT_EQ(kNumPlc, stats.decoded_neteq_plc); - EXPECT_EQ(kNumPlcCng, stats.decoded_plc_cng); - EXPECT_EQ(0, stats.decoded_muted_output); - // TODO(henrik.lundin) Add a test with muted state enabled. -} - -} // namespace acm2 - -} // namespace webrtc diff --git a/modules/audio_coding/acm2/acm_remixing.cc b/modules/audio_coding/acm2/acm_remixing.cc index 13709dbbee..45eb3c1247 100644 --- a/modules/audio_coding/acm2/acm_remixing.cc +++ b/modules/audio_coding/acm2/acm_remixing.cc @@ -14,7 +14,7 @@ namespace webrtc { -void DownMixFrame(const AudioFrame& input, rtc::ArrayView output) { +void DownMixFrame(const AudioFrame& input, ArrayView output) { RTC_DCHECK_EQ(input.num_channels_, 2); RTC_DCHECK_EQ(output.size(), input.samples_per_channel_); @@ -23,7 +23,7 @@ void DownMixFrame(const AudioFrame& input, rtc::ArrayView output) { } else { const int16_t* const input_data = input.data(); for (size_t n = 0; n < input.samples_per_channel_; ++n) { - output[n] = rtc::dchecked_cast( + output[n] = dchecked_cast( (int32_t{input_data[2 * n]} + int32_t{input_data[2 * n + 1]}) >> 1); } } @@ -94,7 +94,7 @@ void ReMixFrame(const AudioFrame& input, // When downmixing is needed, and the input is stereo, average the channels. if (input.num_channels_ == 2) { for (size_t n = 0; n < input.samples_per_channel_; ++n) { - (*output)[n] = rtc::dchecked_cast( + (*output)[n] = dchecked_cast( (int32_t{input_data[2 * n]} + int32_t{input_data[2 * n + 1]}) >> 1); } return; diff --git a/modules/audio_coding/acm2/acm_remixing.h b/modules/audio_coding/acm2/acm_remixing.h index 661569b033..c6ae2c3844 100644 --- a/modules/audio_coding/acm2/acm_remixing.h +++ b/modules/audio_coding/acm2/acm_remixing.h @@ -13,13 +13,14 @@ #include +#include "api/array_view.h" #include "api/audio/audio_frame.h" namespace webrtc { // Stereo-to-mono downmixing. The length of the output must equal to the number // of samples per channel in the input. -void DownMixFrame(const AudioFrame& input, rtc::ArrayView output); +void DownMixFrame(const AudioFrame& input, ArrayView output); // Remixes the interleaved input frame to an interleaved output data vector. The // remixed data replaces the data in the output vector which is resized if diff --git a/modules/audio_coding/acm2/acm_remixing_unittest.cc b/modules/audio_coding/acm2/acm_remixing_unittest.cc index a1a816f727..7c0c0ef665 100644 --- a/modules/audio_coding/acm2/acm_remixing_unittest.cc +++ b/modules/audio_coding/acm2/acm_remixing_unittest.cc @@ -28,11 +28,8 @@ namespace webrtc { TEST(AcmRemixing, DownMixFrame) { std::vector out(480, 0); AudioFrame in; - in.num_channels_ = 2; - in.samples_per_channel_ = 480; - - int16_t* const in_data = in.mutable_data(); - for (size_t k = 0; k < in.samples_per_channel_; ++k) { + InterleavedView const in_data = in.mutable_data(480, 2); + for (size_t k = 0; k < in_data.samples_per_channel(); ++k) { in_data[2 * k] = 2; in_data[2 * k + 1] = 0; } diff --git a/modules/audio_coding/acm2/acm_resampler.cc b/modules/audio_coding/acm2/acm_resampler.cc index e307c6ca57..85f22ac6ef 100644 --- a/modules/audio_coding/acm2/acm_resampler.cc +++ b/modules/audio_coding/acm2/acm_resampler.cc @@ -12,6 +12,7 @@ #include +#include "api/audio/audio_frame.h" #include "rtc_base/logging.h" namespace webrtc { @@ -27,34 +28,93 @@ int ACMResampler::Resample10Msec(const int16_t* in_audio, size_t num_audio_channels, size_t out_capacity_samples, int16_t* out_audio) { - size_t in_length = in_freq_hz * num_audio_channels / 100; + InterleavedView src( + in_audio, SampleRateToDefaultChannelSize(in_freq_hz), num_audio_channels); + InterleavedView dst(out_audio, + SampleRateToDefaultChannelSize(out_freq_hz), + num_audio_channels); + RTC_DCHECK_GE(out_capacity_samples, dst.size()); if (in_freq_hz == out_freq_hz) { - if (out_capacity_samples < in_length) { + if (out_capacity_samples < src.data().size()) { RTC_DCHECK_NOTREACHED(); return -1; } - memcpy(out_audio, in_audio, in_length * sizeof(int16_t)); - return static_cast(in_length / num_audio_channels); + CopySamples(dst, src); + RTC_DCHECK_EQ(dst.samples_per_channel(), src.samples_per_channel()); + return static_cast(dst.samples_per_channel()); } - if (resampler_.InitializeIfNeeded(in_freq_hz, out_freq_hz, - num_audio_channels) != 0) { - RTC_LOG(LS_ERROR) << "InitializeIfNeeded(" << in_freq_hz << ", " - << out_freq_hz << ", " << num_audio_channels + int out_length = resampler_.Resample(src, dst); + if (out_length == -1) { + RTC_LOG(LS_ERROR) << "Resample(" << in_audio << ", " << src.data().size() + << ", " << out_audio << ", " << out_capacity_samples << ") failed."; return -1; } + RTC_DCHECK_EQ(out_length, dst.size()); + RTC_DCHECK_EQ(out_length / num_audio_channels, dst.samples_per_channel()); + return static_cast(dst.samples_per_channel()); +} - int out_length = - resampler_.Resample(in_audio, in_length, out_audio, out_capacity_samples); - if (out_length == -1) { - RTC_LOG(LS_ERROR) << "Resample(" << in_audio << ", " << in_length << ", " - << out_audio << ", " << out_capacity_samples - << ") failed."; - return -1; +ResamplerHelper::ResamplerHelper() { + ClearSamples(last_audio_buffer_); +} + +bool ResamplerHelper::MaybeResample(int desired_sample_rate_hz, + AudioFrame* audio_frame) { + const int current_sample_rate_hz = audio_frame->sample_rate_hz_; + RTC_DCHECK_NE(current_sample_rate_hz, 0); + + // Update if resampling is required. + const bool need_resampling = + (desired_sample_rate_hz != -1) && + (current_sample_rate_hz != desired_sample_rate_hz); + + if (need_resampling && !resampled_last_output_frame_) { + // Prime the resampler with the last frame. + int16_t temp_output[AudioFrame::kMaxDataSizeSamples]; + int samples_per_channel_int = resampler_.Resample10Msec( + last_audio_buffer_.data(), current_sample_rate_hz, + desired_sample_rate_hz, audio_frame->num_channels_, + AudioFrame::kMaxDataSizeSamples, temp_output); + if (samples_per_channel_int < 0) { + RTC_LOG(LS_ERROR) << "AcmReceiver::GetAudio - " + "Resampling last_audio_buffer_ failed."; + return false; + } } - return static_cast(out_length / num_audio_channels); + // TODO(bugs.webrtc.org/3923) Glitches in the output may appear if the output + // rate from NetEq changes. + if (need_resampling) { + // TODO(yujo): handle this more efficiently for muted frames. + int samples_per_channel_int = resampler_.Resample10Msec( + audio_frame->data(), current_sample_rate_hz, desired_sample_rate_hz, + audio_frame->num_channels_, AudioFrame::kMaxDataSizeSamples, + audio_frame->mutable_data()); + if (samples_per_channel_int < 0) { + RTC_LOG(LS_ERROR) + << "AcmReceiver::GetAudio - Resampling audio_buffer_ failed."; + return false; + } + audio_frame->samples_per_channel_ = + static_cast(samples_per_channel_int); + audio_frame->sample_rate_hz_ = desired_sample_rate_hz; + RTC_DCHECK_EQ(audio_frame->sample_rate_hz_, + dchecked_cast(audio_frame->samples_per_channel_ * 100)); + resampled_last_output_frame_ = true; + } else { + resampled_last_output_frame_ = false; + // We might end up here ONLY if codec is changed. + } + + // Store current audio in `last_audio_buffer_` for next time. + // TODO: b/335805780 - Use CopySamples(). + memcpy(last_audio_buffer_.data(), audio_frame->data(), + sizeof(int16_t) * audio_frame->samples_per_channel_ * + audio_frame->num_channels_); + + return true; } } // namespace acm2 diff --git a/modules/audio_coding/acm2/acm_resampler.h b/modules/audio_coding/acm2/acm_resampler.h index 96ba93a762..cf7133c38d 100644 --- a/modules/audio_coding/acm2/acm_resampler.h +++ b/modules/audio_coding/acm2/acm_resampler.h @@ -14,6 +14,7 @@ #include #include +#include "api/audio/audio_frame.h" #include "common_audio/resampler/include/push_resampler.h" namespace webrtc { @@ -24,6 +25,7 @@ class ACMResampler { ACMResampler(); ~ACMResampler(); + // TODO: b/335805780 - Change to accept InterleavedView<>. int Resample10Msec(const int16_t* in_audio, int in_freq_hz, int out_freq_hz, @@ -35,6 +37,22 @@ class ACMResampler { PushResampler resampler_; }; +// Helper class to perform resampling if needed, meant to be used after +// receiving the audio_frame from NetEq. Provides reasonably glitch free +// transitions between different output sample rates from NetEq. +class ResamplerHelper { + public: + ResamplerHelper(); + + // Resamples audio_frame if it is not already in desired_sample_rate_hz. + bool MaybeResample(int desired_sample_rate_hz, AudioFrame* audio_frame); + + private: + ACMResampler resampler_; + bool resampled_last_output_frame_ = true; + std::array last_audio_buffer_; +}; + } // namespace acm2 } // namespace webrtc diff --git a/modules/audio_coding/acm2/acm_send_test.cc b/modules/audio_coding/acm2/acm_send_test.cc index fddaa87701..c79bded007 100644 --- a/modules/audio_coding/acm2/acm_send_test.cc +++ b/modules/audio_coding/acm2/acm_send_test.cc @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/environment/environment_factory.h" #include "modules/audio_coding/include/audio_coding_module.h" #include "modules/audio_coding/neteq/tools/input_audio_file.h" #include "modules/audio_coding/neteq/tools/packet.h" @@ -32,6 +33,7 @@ AcmSendTestOldApi::AcmSendTestOldApi(InputAudioFile* audio_source, int source_rate_hz, int test_duration_ms) : clock_(0), + env_(CreateEnvironment(&clock_)), acm_(webrtc::AudioCodingModule::Create()), audio_source_(audio_source), source_rate_hz_(source_rate_hz), @@ -69,11 +71,11 @@ bool AcmSendTestOldApi::RegisterCodec(absl::string_view payload_name, } format.num_channels = 2; } - format.parameters["ptime"] = rtc::ToString(rtc::CheckedDivExact( - frame_size_samples, rtc::CheckedDivExact(clockrate_hz, 1000))); + format.parameters["ptime"] = absl::StrCat( + CheckedDivExact(frame_size_samples, CheckedDivExact(clockrate_hz, 1000))); auto factory = CreateBuiltinAudioEncoderFactory(); acm_->SetEncoder( - factory->MakeAudioEncoder(payload_type, format, absl::nullopt)); + factory->Create(env_, format, {.payload_type = payload_type})); codec_registered_ = true; input_frame_.num_channels_ = num_channels; RTC_DCHECK_LE(input_block_size_samples_ * input_frame_.num_channels_, @@ -117,12 +119,13 @@ std::unique_ptr AcmSendTestOldApi::NextPacket() { } // This method receives the callback from ACM when a new packet is produced. -int32_t AcmSendTestOldApi::SendData(AudioFrameType frame_type, - uint8_t payload_type, - uint32_t timestamp, - const uint8_t* payload_data, - size_t payload_len_bytes, - int64_t absolute_capture_timestamp_ms) { +int32_t AcmSendTestOldApi::SendData( + AudioFrameType frame_type, + uint8_t payload_type, + uint32_t timestamp, + const uint8_t* payload_data, + size_t payload_len_bytes, + int64_t /* absolute_capture_timestamp_ms */) { // Store the packet locally. frame_type_ = frame_type; payload_type_ = payload_type; @@ -135,14 +138,13 @@ int32_t AcmSendTestOldApi::SendData(AudioFrameType frame_type, std::unique_ptr AcmSendTestOldApi::CreatePacket() { const size_t kRtpHeaderSize = 12; - rtc::CopyOnWriteBuffer packet_buffer(last_payload_vec_.size() + - kRtpHeaderSize); + CopyOnWriteBuffer packet_buffer(last_payload_vec_.size() + kRtpHeaderSize); uint8_t* packet_memory = packet_buffer.MutableData(); // Populate the header bytes. packet_memory[0] = 0x80; packet_memory[1] = static_cast(payload_type_); packet_memory[2] = (sequence_number_ >> 8) & 0xFF; - packet_memory[3] = (sequence_number_)&0xFF; + packet_memory[3] = (sequence_number_) & 0xFF; packet_memory[4] = (timestamp_ >> 24) & 0xFF; packet_memory[5] = (timestamp_ >> 16) & 0xFF; packet_memory[6] = (timestamp_ >> 8) & 0xFF; diff --git a/modules/audio_coding/acm2/acm_send_test.h b/modules/audio_coding/acm2/acm_send_test.h index 0bd24705fd..1bba67618b 100644 --- a/modules/audio_coding/acm2/acm_send_test.h +++ b/modules/audio_coding/acm2/acm_send_test.h @@ -16,6 +16,7 @@ #include "absl/strings/string_view.h" #include "api/audio/audio_frame.h" +#include "api/environment/environment.h" #include "modules/audio_coding/include/audio_coding_module.h" #include "modules/audio_coding/neteq/tools/packet_source.h" #include "system_wrappers/include/clock.h" @@ -70,6 +71,7 @@ class AcmSendTestOldApi : public AudioPacketizationCallback, std::unique_ptr CreatePacket(); SimulatedClock clock_; + const Environment env_; std::unique_ptr acm_; InputAudioFile* audio_source_; int source_rate_hz_; diff --git a/modules/audio_coding/acm2/audio_coding_module.cc b/modules/audio_coding/acm2/audio_coding_module.cc index 97a204ac4f..f2b4237301 100644 --- a/modules/audio_coding/acm2/audio_coding_module.cc +++ b/modules/audio_coding/acm2/audio_coding_module.cc @@ -10,15 +10,21 @@ #include "modules/audio_coding/include/audio_coding_module.h" -#include +#include +#include #include +#include +#include +#include +#include -#include "absl/strings/match.h" #include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/function_view.h" #include "modules/audio_coding/acm2/acm_remixing.h" #include "modules/audio_coding/acm2/acm_resampler.h" -#include "modules/include/module_common_types.h" +#include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/include/module_common_types_public.h" #include "rtc_base/buffer.h" #include "rtc_base/checks.h" @@ -43,12 +49,14 @@ class AudioCodingModuleImpl final : public AudioCodingModule { explicit AudioCodingModuleImpl(); ~AudioCodingModuleImpl() override; + void Reset() override; + ///////////////////////////////////////// // Sender // - void ModifyEncoder(rtc::FunctionView*)> - modifier) override; + void ModifyEncoder( + FunctionView*)> modifier) override; // Register a transport callback which will be // called to deliver the encoded buffers. @@ -108,7 +116,7 @@ class AudioCodingModuleImpl final : public AudioCodingModule { // TODO(bugs.webrtc.org/10739): change `absolute_capture_timestamp_ms` to // int64_t when it always receives a valid value. int Encode(const InputData& input_data, - absl::optional absolute_capture_timestamp_ms) + std::optional absolute_capture_timestamp_ms) RTC_EXCLUSIVE_LOCKS_REQUIRED(acm_mutex_); bool HaveValidEncoder(absl::string_view caller_name) const @@ -134,7 +142,7 @@ class AudioCodingModuleImpl final : public AudioCodingModule { int UpdateUponReceivingCodec(int index); mutable Mutex acm_mutex_; - rtc::Buffer encode_buffer_ RTC_GUARDED_BY(acm_mutex_); + Buffer encode_buffer_ RTC_GUARDED_BY(acm_mutex_); uint32_t expected_codec_ts_ RTC_GUARDED_BY(acm_mutex_); uint32_t expected_in_ts_ RTC_GUARDED_BY(acm_mutex_); acm2::ACMResampler resampler_ RTC_GUARDED_BY(acm_mutex_); @@ -152,6 +160,8 @@ class AudioCodingModuleImpl final : public AudioCodingModule { bool first_frame_ RTC_GUARDED_BY(acm_mutex_); uint32_t last_timestamp_ RTC_GUARDED_BY(acm_mutex_); uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(acm_mutex_); + std::optional absolute_capture_timestamp_ms_ + RTC_GUARDED_BY(acm_mutex_); Mutex callback_mutex_; AudioPacketizationCallback* packetization_callback_ @@ -160,6 +170,10 @@ class AudioCodingModuleImpl final : public AudioCodingModule { int codec_histogram_bins_log_[static_cast( AudioEncoder::CodecType::kMaxLoggedAudioCodecTypes)]; int number_of_consecutive_empty_packets_; + + mutable Mutex stats_mutex_; + ANAStats ana_stats_ RTC_GUARDED_BY(stats_mutex_); + int target_bitrate_ RTC_GUARDED_BY(stats_mutex_) = -1; }; // Adds a codec usage sample to the histogram. @@ -196,7 +210,7 @@ AudioCodingModuleImpl::~AudioCodingModuleImpl() = default; int32_t AudioCodingModuleImpl::Encode( const InputData& input_data, - absl::optional absolute_capture_timestamp_ms) { + std::optional absolute_capture_timestamp_ms) { // TODO(bugs.webrtc.org/10739): add dcheck that // `audio_frame.absolute_capture_timestamp_ms()` always has a value. AudioEncoder::EncodedInfo encoded_info; @@ -216,7 +230,7 @@ int32_t AudioCodingModuleImpl::Encode( first_frame_ ? input_data.input_timestamp : last_rtp_timestamp_ + - rtc::dchecked_cast(rtc::CheckedDivExact( + dchecked_cast(CheckedDivExact( int64_t{input_data.input_timestamp - last_timestamp_} * encoder_stack_->RtpTimestampRateHz(), int64_t{encoder_stack_->SampleRateHz()})); @@ -225,11 +239,15 @@ int32_t AudioCodingModuleImpl::Encode( last_rtp_timestamp_ = rtp_timestamp; first_frame_ = false; + if (!absolute_capture_timestamp_ms_.has_value()) { + absolute_capture_timestamp_ms_ = absolute_capture_timestamp_ms; + } + // Clear the buffer before reuse - encoded data will get appended. encode_buffer_.Clear(); encoded_info = encoder_stack_->Encode( rtp_timestamp, - rtc::ArrayView( + ArrayView( input_data.audio, input_data.audio_channel * input_data.length_per_channel), &encode_buffer_); @@ -271,10 +289,16 @@ int32_t AudioCodingModuleImpl::Encode( packetization_callback_->SendData( frame_type, encoded_info.payload_type, encoded_info.encoded_timestamp, encode_buffer_.data(), encode_buffer_.size(), - absolute_capture_timestamp_ms.value_or(-1)); + absolute_capture_timestamp_ms_.value_or(-1)); } } + absolute_capture_timestamp_ms_.reset(); previous_pltype_ = encoded_info.payload_type; + { + MutexLock lock(&stats_mutex_); + ana_stats_ = encoder_stack_->GetANAStats(); + target_bitrate_ = encoder_stack_->GetTargetBitrate(); + } return static_cast(encode_buffer_.size()); } @@ -282,8 +306,16 @@ int32_t AudioCodingModuleImpl::Encode( // Sender // +void AudioCodingModuleImpl::Reset() { + MutexLock lock(&acm_mutex_); + absolute_capture_timestamp_ms_.reset(); + if (HaveValidEncoder("Reset")) { + encoder_stack_->Reset(); + } +} + void AudioCodingModuleImpl::ModifyEncoder( - rtc::FunctionView*)> modifier) { + FunctionView*)> modifier) { MutexLock lock(&acm_mutex_); modifier(&encoder_stack_); } @@ -413,7 +445,7 @@ int AudioCodingModuleImpl::PreprocessToAddData(const AudioFrame& in_frame, } if (!down_mix && !resample) { - // No pre-processing is required. + // No preprocessing is required. if (expected_in_ts_ == expected_codec_ts_) { // If we've never resampled, we can use the input frame as-is *ptr_out = &in_frame; @@ -443,8 +475,8 @@ int AudioCodingModuleImpl::PreprocessToAddData(const AudioFrame& in_frame, RTC_DCHECK_GE(audio.size(), preprocess_frame_.samples_per_channel_); RTC_DCHECK_GE(audio.size(), in_frame.samples_per_channel_); DownMixFrame(in_frame, - rtc::ArrayView( - dest_ptr_audio, preprocess_frame_.samples_per_channel_)); + ArrayView(dest_ptr_audio, + preprocess_frame_.samples_per_channel_)); preprocess_frame_.num_channels_ = 1; // Set the input of the resampler to the down-mixed signal. @@ -508,19 +540,13 @@ bool AudioCodingModuleImpl::HaveValidEncoder( } ANAStats AudioCodingModuleImpl::GetANAStats() const { - MutexLock lock(&acm_mutex_); - if (encoder_stack_) - return encoder_stack_->GetANAStats(); - // If no encoder is set, return default stats. - return ANAStats(); + MutexLock lock(&stats_mutex_); + return ana_stats_; } int AudioCodingModuleImpl::GetTargetBitrate() const { - MutexLock lock(&acm_mutex_); - if (!encoder_stack_) { - return -1; - } - return encoder_stack_->GetTargetBitrate(); + MutexLock lock(&stats_mutex_); + return target_bitrate_; } } // namespace diff --git a/modules/audio_coding/acm2/audio_coding_module_unittest.cc b/modules/audio_coding/acm2/audio_coding_module_unittest.cc index 210244154a..51f49b1112 100644 --- a/modules/audio_coding/acm2/audio_coding_module_unittest.cc +++ b/modules/audio_coding/acm2/audio_coding_module_unittest.cc @@ -13,48 +13,64 @@ #include #include +#include #include +#include #include +#include +#include +#include #include #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/audio_codecs/opus/audio_decoder_multi_channel_opus.h" -#include "api/audio_codecs/opus/audio_decoder_opus.h" #include "api/audio_codecs/opus/audio_encoder_multi_channel_opus.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" +#include "api/audio_codecs/opus/audio_encoder_opus_config.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/make_ref_counted.h" +#include "api/neteq/default_neteq_factory.h" +#include "api/neteq/neteq.h" +#include "api/scoped_refptr.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "common_audio/vad/include/vad.h" #include "modules/audio_coding/acm2/acm_receive_test.h" #include "modules/audio_coding/acm2/acm_send_test.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" -#include "modules/audio_coding/codecs/g711/audio_decoder_pcm.h" #include "modules/audio_coding/codecs/g711/audio_encoder_pcm.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/audio_coding/neteq/tools/audio_checksum.h" -#include "modules/audio_coding/neteq/tools/audio_loop.h" +#include "modules/audio_coding/neteq/tools/audio_sink.h" #include "modules/audio_coding/neteq/tools/constant_pcm_packet_source.h" #include "modules/audio_coding/neteq/tools/input_audio_file.h" #include "modules/audio_coding/neteq/tools/output_audio_file.h" #include "modules/audio_coding/neteq/tools/output_wav_file.h" #include "modules/audio_coding/neteq/tools/packet.h" -#include "modules/audio_coding/neteq/tools/rtp_file_source.h" +#include "rtc_base/buffer.h" #include "rtc_base/event.h" #include "rtc_base/message_digest.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/platform_thread.h" +#include "rtc_base/string_encode.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/arch.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" -#include "system_wrappers/include/cpu_features_wrapper.h" #include "system_wrappers/include/sleep.h" #include "test/audio_decoder_proxy_factory.h" +#include "test/gmock.h" #include "test/gtest.h" -#include "test/mock_audio_decoder.h" #include "test/mock_audio_encoder.h" #include "test/testsupport/file_utils.h" -#include "test/testsupport/rtc_expect_death.h" using ::testing::_; using ::testing::AtLeast; @@ -62,15 +78,6 @@ using ::testing::Invoke; namespace webrtc { -namespace { -const int kSampleRateHz = 16000; -const int kNumSamples10ms = kSampleRateHz / 100; -const int kFrameSizeMs = 10; // Multiple of 10. -const int kFrameSizeSamples = kFrameSizeMs / 10 * kNumSamples10ms; -const int kPayloadSizeBytes = kFrameSizeSamples * sizeof(int16_t); -const uint8_t kPayloadType = 111; -} // namespace - class RtpData { public: RtpData(int samples_per_packet, uint8_t payload_type) @@ -110,7 +117,7 @@ class PacketizationCallbackStubOldApi : public AudioPacketizationCallback { uint32_t timestamp, const uint8_t* payload_data, size_t payload_len_bytes, - int64_t absolute_capture_timestamp_ms) override { + int64_t /* absolute_capture_timestamp_ms */) override { MutexLock lock(&mutex_); ++num_calls_; last_frame_type_ = frame_type; @@ -127,7 +134,7 @@ class PacketizationCallbackStubOldApi : public AudioPacketizationCallback { int last_payload_len_bytes() const { MutexLock lock(&mutex_); - return rtc::checked_cast(last_payload_vec_.size()); + return checked_cast(last_payload_vec_.size()); } AudioFrameType last_frame_type() const { @@ -161,9 +168,16 @@ class PacketizationCallbackStubOldApi : public AudioPacketizationCallback { class AudioCodingModuleTestOldApi : public ::testing::Test { protected: + static constexpr int kSampleRateHz = 16000; + static constexpr int kNumSamples10ms = kSampleRateHz / 100; + static constexpr int kFrameSizeMs = 10; // Multiple of 10. + static constexpr int kFrameSizeSamples = kFrameSizeMs / 10 * kNumSamples10ms; + static constexpr int kPayloadSizeBytes = kFrameSizeSamples * sizeof(int16_t); + static constexpr uint8_t kPayloadType = 111; + AudioCodingModuleTestOldApi() - : rtp_utility_(new RtpData(kFrameSizeSamples, kPayloadType)), - clock_(Clock::GetRealTimeClock()) {} + : env_(CreateEnvironment()), + rtp_utility_(new RtpData(kFrameSizeSamples, kPayloadType)) {} ~AudioCodingModuleTestOldApi() {} @@ -171,10 +185,8 @@ class AudioCodingModuleTestOldApi : public ::testing::Test { void SetUp() { acm_ = AudioCodingModule::Create(); - acm2::AcmReceiver::Config config; - config.clock = *clock_; - config.decoder_factory = CreateBuiltinAudioDecoderFactory(); - acm_receiver_ = std::make_unique(config); + neteq_ = DefaultNetEqFactory().Create(env_, NetEq::Config(), + CreateBuiltinAudioDecoderFactory()); rtp_utility_->Populate(&rtp_header_); @@ -197,9 +209,9 @@ class AudioCodingModuleTestOldApi : public ::testing::Test { } virtual void RegisterCodec() { - acm_receiver_->SetCodecs({{kPayloadType, *audio_format_}}); - acm_->SetEncoder(CreateBuiltinAudioEncoderFactory()->MakeAudioEncoder( - kPayloadType, *audio_format_, absl::nullopt)); + neteq_->SetCodecs({{kPayloadType, *audio_format_}}); + acm_->SetEncoder(CreateBuiltinAudioEncoderFactory()->Create( + env_, *audio_format_, {.payload_type = kPayloadType})); } virtual void InsertPacketAndPullAudio() { @@ -209,16 +221,17 @@ class AudioCodingModuleTestOldApi : public ::testing::Test { virtual void InsertPacket() { const uint8_t kPayload[kPayloadSizeBytes] = {0}; - ASSERT_EQ(0, acm_receiver_->InsertPacket(rtp_header_, - rtc::ArrayView( - kPayload, kPayloadSizeBytes))); + ASSERT_EQ(0, neteq_->InsertPacket( + rtp_header_, + ArrayView(kPayload, kPayloadSizeBytes), + /*receive_time=*/Timestamp::MinusInfinity())); rtp_utility_->Forward(&rtp_header_); } virtual void PullAudio() { AudioFrame audio_frame; bool muted; - ASSERT_EQ(0, acm_receiver_->GetAudio(-1, &audio_frame, &muted)); + ASSERT_EQ(0, neteq_->GetAudio(&audio_frame, &muted)); ASSERT_FALSE(muted); } @@ -238,36 +251,21 @@ class AudioCodingModuleTestOldApi : public ::testing::Test { VerifyEncoding(); } + Environment env_; std::unique_ptr rtp_utility_; std::unique_ptr acm_; - std::unique_ptr acm_receiver_; + std::unique_ptr neteq_; PacketizationCallbackStubOldApi packet_cb_; RTPHeader rtp_header_; AudioFrame input_frame_; - absl::optional audio_format_; + std::optional audio_format_; int pac_size_ = -1; - - Clock* clock_; }; class AudioCodingModuleTestOldApiDeathTest : public AudioCodingModuleTestOldApi {}; -// The below test is temporarily disabled on Windows due to problems -// with clang debug builds. -// TODO(tommi): Re-enable when we've figured out what the problem is. -// http://crbug.com/615050 -#if !defined(WEBRTC_WIN) && defined(__clang__) && RTC_DCHECK_IS_ON && \ - GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST_F(AudioCodingModuleTestOldApiDeathTest, FailOnZeroDesiredFrequency) { - AudioFrame audio_frame; - bool muted; - RTC_EXPECT_DEATH(acm_receiver_->GetAudio(0, &audio_frame, &muted), - "dst_sample_rate_hz"); -} -#endif - // Checks that the transport callback is invoked once for each speech packet. // Also checks that the frame type is kAudioFrameSpeech. TEST_F(AudioCodingModuleTestOldApi, TransportCallbackIsInvokedForEachPacket) { @@ -296,8 +294,8 @@ class AudioCodingModuleTestWithComfortNoiseOldApi : public AudioCodingModuleTestOldApi { protected: void RegisterCngCodec(int rtp_payload_type) { - acm_receiver_->SetCodecs({{kPayloadType, *audio_format_}, - {rtp_payload_type, {"cn", kSampleRateHz, 1}}}); + neteq_->SetCodecs({{kPayloadType, *audio_format_}, + {rtp_payload_type, {"cn", kSampleRateHz, 1}}}); acm_->ModifyEncoder([&](std::unique_ptr* enc) { AudioEncoderCngConfig config; config.speech_encoder = std::move(*enc); @@ -380,7 +378,9 @@ class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi { pull_audio_count_(0), next_insert_packet_time_ms_(0), fake_clock_(new SimulatedClock(0)) { - clock_ = fake_clock_.get(); + EnvironmentFactory override_clock(env_); + override_clock.Set(fake_clock_.get()); + env_ = override_clock.Create(); } void SetUp() { @@ -393,22 +393,22 @@ class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi { quit_.store(false); const auto attributes = - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime); - send_thread_ = rtc::PlatformThread::SpawnJoinable( + ThreadAttributes().SetPriority(ThreadPriority::kRealtime); + send_thread_ = PlatformThread::SpawnJoinable( [this] { while (!quit_.load()) { CbSendImpl(); } }, "send", attributes); - insert_packet_thread_ = rtc::PlatformThread::SpawnJoinable( + insert_packet_thread_ = PlatformThread::SpawnJoinable( [this] { while (!quit_.load()) { CbInsertPacketImpl(); } }, "insert_packet", attributes); - pull_audio_thread_ = rtc::PlatformThread::SpawnJoinable( + pull_audio_thread_ = PlatformThread::SpawnJoinable( [this] { while (!quit_.load()) { CbPullAudioImpl(); @@ -457,7 +457,7 @@ class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi { SleepMs(1); { MutexLock lock(&mutex_); - if (clock_->TimeInMilliseconds() < next_insert_packet_time_ms_) { + if (env_.clock().TimeInMilliseconds() < next_insert_packet_time_ms_) { return; } next_insert_packet_time_ms_ += 10; @@ -472,7 +472,7 @@ class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi { { MutexLock lock(&mutex_); // Don't let the insert thread fall behind. - if (next_insert_packet_time_ms_ < clock_->TimeInMilliseconds()) { + if (next_insert_packet_time_ms_ < env_.clock().TimeInMilliseconds()) { return; } ++pull_audio_count_; @@ -482,13 +482,13 @@ class AudioCodingModuleMtTestOldApi : public AudioCodingModuleTestOldApi { fake_clock_->AdvanceTimeMilliseconds(10); } - rtc::PlatformThread send_thread_; - rtc::PlatformThread insert_packet_thread_; - rtc::PlatformThread pull_audio_thread_; + PlatformThread send_thread_; + PlatformThread insert_packet_thread_; + PlatformThread pull_audio_thread_; // Used to force worker threads to stop looping. std::atomic quit_; - rtc::Event test_complete_; + Event test_complete_; int send_count_; int insert_packet_count_; int pull_audio_count_ RTC_GUARDED_BY(mutex_); @@ -506,6 +506,129 @@ TEST_F(AudioCodingModuleMtTestOldApi, MAYBE_DoTest) { EXPECT_TRUE(RunTest()); } +class AudioPacketizationCallbackMock : public AudioPacketizationCallback { + public: + MOCK_METHOD(int32_t, + SendData, + (AudioFrameType frame_type, + uint8_t payload_type, + uint32_t timestamp, + const uint8_t* payload_data, + size_t payload_len_bytes, + int64_t absolute_capture_timestamp_ms), + (override)); +}; + +TEST(AudioCodingModule, DoesResetEncoder) { + std::unique_ptr acm = AudioCodingModule::Create(); + auto encoder = std::make_unique(); + MockAudioEncoder* encoder_mock = encoder.get(); + + acm->SetEncoder(std::move(encoder)); + + EXPECT_CALL(*encoder_mock, Reset()).Times(1); + acm->Reset(); +} + +class AcmAbsoluteCaptureTimestamp : public ::testing::Test { + public: + AcmAbsoluteCaptureTimestamp() : audio_frame_(kSampleRateHz, kNumChannels) {} + + protected: + static constexpr int kPTimeMs = 20; + static constexpr int kSampleRateHz = 48000; + static constexpr int kFrameSize = kSampleRateHz / 100; + static constexpr int kNumChannels = 2; + + void SetUp() { + scoped_refptr codec_factory = + CreateBuiltinAudioEncoderFactory(); + acm_ = AudioCodingModule::Create(); + std::unique_ptr encoder = codec_factory->Create( + CreateEnvironment(), + SdpAudioFormat("OPUS", kSampleRateHz, kNumChannels), + {.payload_type = 111}); + encoder->SetDtx(true); + encoder->SetReceiverFrameLengthRange(kPTimeMs, kPTimeMs); + acm_->SetEncoder(std::move(encoder)); + acm_->RegisterTransportCallback(&transport_); + for (size_t k = 0; k < audio_.size(); ++k) { + audio_[k] = 10 * k; + } + } + + const AudioFrame& GetAudioWithAbsoluteCaptureTimestamp( + int64_t absolute_capture_timestamp_ms) { + audio_frame_.ResetWithoutMuting(); + audio_frame_.UpdateFrame(timestamp_, audio_.data(), kFrameSize, + kSampleRateHz, + AudioFrame::SpeechType::kNormalSpeech, + AudioFrame::VADActivity::kVadActive, kNumChannels); + audio_frame_.set_absolute_capture_timestamp_ms( + absolute_capture_timestamp_ms); + timestamp_ += kFrameSize; + return audio_frame_; + } + + std::unique_ptr acm_; + AudioPacketizationCallbackMock transport_; + AudioFrame audio_frame_; + std::array audio_; + uint32_t timestamp_ = 9873546; +}; + +TEST_F(AcmAbsoluteCaptureTimestamp, HaveBeginningOfFrameCaptureTime) { + constexpr int64_t first_absolute_capture_timestamp_ms = 123456789; + + int64_t absolute_capture_timestamp_ms = first_absolute_capture_timestamp_ms; + EXPECT_CALL(transport_, + SendData(_, _, _, _, _, first_absolute_capture_timestamp_ms)) + .Times(1); + EXPECT_CALL( + transport_, + SendData(_, _, _, _, _, first_absolute_capture_timestamp_ms + kPTimeMs)) + .Times(1); + for (int k = 0; k < 5; ++k) { + acm_->Add10MsData( + GetAudioWithAbsoluteCaptureTimestamp(absolute_capture_timestamp_ms)); + absolute_capture_timestamp_ms += 10; + } +} + +TEST_F(AcmAbsoluteCaptureTimestamp, DoesResetWhenAudioCodingModuleDo) { + constexpr int64_t first_absolute_capture_timestamp_ms = 123456789; + + int64_t absolute_capture_timestamp_ms = first_absolute_capture_timestamp_ms; + EXPECT_CALL(transport_, + SendData(_, _, _, _, _, first_absolute_capture_timestamp_ms)) + .Times(1); + EXPECT_CALL( + transport_, + SendData(_, _, _, _, _, first_absolute_capture_timestamp_ms + kPTimeMs)) + .Times(1); + for (int k = 0; k < 5; ++k) { + acm_->Add10MsData( + GetAudioWithAbsoluteCaptureTimestamp(absolute_capture_timestamp_ms)); + absolute_capture_timestamp_ms += 10; + } + + acm_->Reset(); + constexpr int64_t after_reset_absolute_capture_timestamp_ms = 523456789; + EXPECT_CALL(transport_, SendData(_, _, _, _, _, + after_reset_absolute_capture_timestamp_ms)) + .Times(1); + EXPECT_CALL(transport_, + SendData(_, _, _, _, _, + after_reset_absolute_capture_timestamp_ms + kPTimeMs)) + .Times(1); + absolute_capture_timestamp_ms = after_reset_absolute_capture_timestamp_ms; + for (int k = 0; k < 5; ++k) { + acm_->Add10MsData( + GetAudioWithAbsoluteCaptureTimestamp(absolute_capture_timestamp_ms)); + absolute_capture_timestamp_ms += 10; + } +} + // Disabling all of these tests on iOS until file support has been added. // See https://code.google.com/p/webrtc/issues/detail?id=4752 for details. #if !defined(WEBRTC_IOS) @@ -533,7 +656,7 @@ class AcmSenderBitExactnessOldApi : public ::testing::Test, payload_type_(0), last_sequence_number_(0), last_timestamp_(0), - payload_checksum_(rtc::MessageDigestFactory::Create(rtc::DIGEST_MD5)) {} + payload_checksum_(MessageDigestFactory::Create(DIGEST_MD5)) {} // Sets up the test::AcmSendTest object. Returns true on success, otherwise // false. @@ -564,7 +687,7 @@ class AcmSenderBitExactnessOldApi : public ::testing::Test, std::unique_ptr external_speech_encoder, int payload_type) { payload_type_ = payload_type; - frame_size_rtp_timestamps_ = rtc::checked_cast( + frame_size_rtp_timestamps_ = checked_cast( external_speech_encoder->Num10MsFramesInNextPacket() * external_speech_encoder->RtpTimestampRateHz() / 100); send_test_->RegisterExternalCodec(std::move(external_speech_encoder)); @@ -576,7 +699,7 @@ class AcmSenderBitExactnessOldApi : public ::testing::Test, absl::string_view payload_checksum_ref, int expected_packets, test::AcmReceiveTestOldApi::NumOutputChannels expected_channels, - rtc::scoped_refptr decoder_factory = nullptr) { + scoped_refptr decoder_factory = nullptr) { if (!decoder_factory) { decoder_factory = CreateBuiltinAudioDecoderFactory(); } @@ -607,9 +730,9 @@ class AcmSenderBitExactnessOldApi : public ::testing::Test, ExpectChecksumEq(audio_checksum_ref, checksum_string); // Extract and verify the payload checksum. - rtc::Buffer checksum_result(payload_checksum_->Size()); + Buffer checksum_result(payload_checksum_->Size()); payload_checksum_->Finish(checksum_result.data(), checksum_result.size()); - checksum_string = rtc::hex_encode(checksum_result); + checksum_string = hex_encode(checksum_result); ExpectChecksumEq(payload_checksum_ref, checksum_string); // Verify number of packets produced. @@ -692,7 +815,7 @@ class AcmSenderBitExactnessOldApi : public ::testing::Test, uint8_t payload_type_; uint16_t last_sequence_number_; uint32_t last_timestamp_; - std::unique_ptr payload_checksum_; + std::unique_ptr payload_checksum_; const std::string kTestFileMono32kHz = webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"); const std::string kTestFileFakeStereo32kHz = @@ -707,7 +830,7 @@ class AcmSenderBitExactnessNewApi : public AcmSenderBitExactnessOldApi {}; TEST_F(AcmSenderBitExactnessOldApi, Pcm16_8000khz_10ms) { ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 1, 107, 80, 80)); - Run(/*audio_checksum_ref=*/"69118ed438ac76252d023e0463819471", + Run(/*audio_checksum_ref=*/"3e43fd5d3c73a59e8118e68fbfafe2c7", /*payload_checksum_ref=*/"c1edd36339ce0326cc4550041ad719a0", /*expected_packets=*/100, /*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput); @@ -715,7 +838,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcm16_8000khz_10ms) { TEST_F(AcmSenderBitExactnessOldApi, Pcm16_16000khz_10ms) { ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 16000, 1, 108, 160, 160)); - Run(/*audio_checksum_ref=*/"f95c87bdd33f631bcf80f4b19445bbd2", + Run(/*audio_checksum_ref=*/"608750138315cbab33d76d38e8367807", /*payload_checksum_ref=*/"ad786526383178b08d80d6eee06e9bad", /*expected_packets=*/100, /*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput); @@ -723,7 +846,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcm16_16000khz_10ms) { TEST_F(AcmSenderBitExactnessOldApi, Pcm16_32000khz_10ms) { ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 32000, 1, 109, 320, 320)); - Run(/*audio_checksum_ref=*/"c50244419c5c3a2f04cc69a022c266a2", + Run(/*audio_checksum_ref=*/"02e9927ef5e4d2cd792a5df0bdee5e19", /*payload_checksum_ref=*/"5ef82ea885e922263606c6fdbc49f651", /*expected_packets=*/100, /*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput); @@ -731,7 +854,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcm16_32000khz_10ms) { TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_8000khz_10ms) { ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 8000, 2, 111, 80, 80)); - Run(/*audio_checksum_ref=*/"4fccf4cc96f1e8e8de4b9fadf62ded9e", + Run(/*audio_checksum_ref=*/"4ff38de045b19f64de9c7e229ba36317", /*payload_checksum_ref=*/"62ce5adb0d4965d0a52ec98ae7f98974", /*expected_packets=*/100, /*expected_channels=*/test::AcmReceiveTestOldApi::kStereoOutput); @@ -739,7 +862,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_8000khz_10ms) { TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_16000khz_10ms) { ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 16000, 2, 112, 160, 160)); - Run(/*audio_checksum_ref=*/"e15e388d9d4af8c02a59fe1552fedee3", + Run(/*audio_checksum_ref=*/"1ee35394cfca78ad6d55468441af36fa", /*payload_checksum_ref=*/"41ca8edac4b8c71cd54fd9f25ec14870", /*expected_packets=*/100, /*expected_channels=*/test::AcmReceiveTestOldApi::kStereoOutput); @@ -747,7 +870,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_16000khz_10ms) { TEST_F(AcmSenderBitExactnessOldApi, Pcm16_stereo_32000khz_10ms) { ASSERT_NO_FATAL_FAILURE(SetUpTest("L16", 32000, 2, 113, 320, 320)); - Run(/*audio_checksum_ref=*/"b240520c0d05003fde7a174ae5957286", + Run(/*audio_checksum_ref=*/"19cae34730a0f6a17cf4e76bf21b69d6", /*payload_checksum_ref=*/"50e58502fb04421bf5b857dda4c96879", /*expected_packets=*/100, /*expected_channels=*/test::AcmReceiveTestOldApi::kStereoOutput); @@ -763,7 +886,7 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcmu_20ms) { TEST_F(AcmSenderBitExactnessOldApi, Pcma_20ms) { ASSERT_NO_FATAL_FAILURE(SetUpTest("PCMA", 8000, 1, 8, 160, 160)); - Run(/*audio_checksum_ref=*/"47eb60e855eb12d1b0e6da9c975754a4", + Run(/*audio_checksum_ref=*/"ae259cab624095270b7369e53a7b53a3", /*payload_checksum_ref=*/"6ad745e55aa48981bfc790d0eeef2dd1", /*expected_packets=*/50, /*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput); @@ -779,27 +902,22 @@ TEST_F(AcmSenderBitExactnessOldApi, Pcmu_stereo_20ms) { TEST_F(AcmSenderBitExactnessOldApi, Pcma_stereo_20ms) { ASSERT_NO_FATAL_FAILURE(SetUpTest("PCMA", 8000, 2, 118, 160, 160)); - Run(/*audio_checksum_ref=*/"a84d75e098d87ab6b260687eb4b612a2", + Run(/*audio_checksum_ref=*/"f2e81d2531a805c40e61da5106b50006", /*payload_checksum_ref=*/"92b282c83efd20e7eeef52ba40842cf7", /*expected_packets=*/50, /*expected_channels=*/test::AcmReceiveTestOldApi::kStereoOutput); } -#if defined(WEBRTC_CODEC_ILBC) && defined(WEBRTC_LINUX) && \ - defined(WEBRTC_ARCH_X86_64) -TEST_F(AcmSenderBitExactnessOldApi, Ilbc_30ms) { - ASSERT_NO_FATAL_FAILURE(SetUpTest("ILBC", 8000, 1, 102, 240, 240)); - Run(/*audio_checksum_ref=*/"b14dba0de36efa5ec88a32c0b320b70f", - /*payload_checksum_ref=*/"cfae2e9f6aba96e145f2bcdd5050ce78", - /*expected_packets=*/33, - /*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput); -} -#endif - #if defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64) + +// TODO(bugs.webrtc.org/345525069): Either fix/enable or remove G722. +#if defined(__has_feature) && __has_feature(undefined_behavior_sanitizer) +TEST_F(AcmSenderBitExactnessOldApi, DISABLED_G722_20ms) { +#else TEST_F(AcmSenderBitExactnessOldApi, G722_20ms) { +#endif ASSERT_NO_FATAL_FAILURE(SetUpTest("G722", 16000, 1, 9, 320, 160)); - Run(/*audio_checksum_ref=*/"f5264affff25cf2cbd2e1e8a5217f9a3", + Run(/*audio_checksum_ref=*/"b875d9a3e41f5470857bdff02e3b368f", /*payload_checksum_ref=*/"fc68a87e1380614e658087cb35d5ca10", /*expected_packets=*/50, /*expected_channels=*/test::AcmReceiveTestOldApi::kMonoOutput); @@ -807,9 +925,15 @@ TEST_F(AcmSenderBitExactnessOldApi, G722_20ms) { #endif #if defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64) + +// TODO(bugs.webrtc.org/345525069): Either fix/enable or remove G722. +#if defined(__has_feature) && __has_feature(undefined_behavior_sanitizer) +TEST_F(AcmSenderBitExactnessOldApi, DISABLED_G722_stereo_20ms) { +#else TEST_F(AcmSenderBitExactnessOldApi, G722_stereo_20ms) { +#endif ASSERT_NO_FATAL_FAILURE(SetUpTest("G722", 16000, 2, 119, 320, 160)); - Run(/*audio_checksum_ref=*/"be0b8528ff9db3a2219f55ddd36faf7f", + Run(/*audio_checksum_ref=*/"02c427d73363b2f37853a0dd17fe1aba", /*payload_checksum_ref=*/"66516152eeaa1e650ad94ff85f668dac", /*expected_packets=*/50, /*expected_channels=*/test::AcmReceiveTestOldApi::kStereoOutput); @@ -840,7 +964,9 @@ TEST_F(AcmSenderBitExactnessNewApi, OpusFromFormat_stereo_20ms) { SdpAudioFormat("opus", 48000, 2, {{"stereo", "1"}})); ASSERT_TRUE(SetUpSender(kTestFileFakeStereo32kHz, 32000)); ASSERT_NO_FATAL_FAILURE(SetUpTestExternalEncoder( - AudioEncoderOpus::MakeAudioEncoder(*config, 120), 120)); + AudioEncoderOpus::MakeAudioEncoder(CreateEnvironment(), *config, + {.payload_type = 120}), + 120)); Run(audio_checksum, payload_checksum, /*expected_packets=*/50, /*expected_channels=*/test::AcmReceiveTestOldApi::kStereoOutput); } @@ -875,8 +1001,8 @@ TEST_F(AcmSenderBitExactnessNewApi, DISABLED_OpusManyChannels) { const auto opus_decoder = AudioDecoderMultiChannelOpus::MakeAudioDecoder(*decoder_config); - rtc::scoped_refptr decoder_factory = - rtc::make_ref_counted(opus_decoder.get()); + scoped_refptr decoder_factory = + make_ref_counted(opus_decoder.get()); // Set up an EXTERNAL DECODER to parse 4 channels. Run("audio checksum check downstream|8051617907766bec5f4e4a4f7c6d5291", @@ -895,10 +1021,12 @@ TEST_F(AcmSenderBitExactnessNewApi, OpusFromFormat_stereo_20ms_voip) { config->application = AudioEncoderOpusConfig::ApplicationMode::kVoip; ASSERT_TRUE(SetUpSender(kTestFileFakeStereo32kHz, 32000)); ASSERT_NO_FATAL_FAILURE(SetUpTestExternalEncoder( - AudioEncoderOpus::MakeAudioEncoder(*config, 120), 120)); + AudioEncoderOpus::MakeAudioEncoder(CreateEnvironment(), *config, + {.payload_type = 120}), + 120)); const std::string audio_maybe_sse = - "1010e60ad34cee73c939edaf563d0593" - "|c05b4523d4c3fad2bab96d2a56baa2d0"; + "cb644fc17d9666a0f5986eef24818159" + "|4a74024473c7c729543c2790829b1e42"; const std::string payload_maybe_sse = "ea48d94e43217793af9b7e15ece94e54" @@ -936,14 +1064,14 @@ class AcmSetBitRateTest : public ::testing::Test { int channels, int payload_type, int frame_size_samples, - int frame_size_rtp_timestamps) { + int /* frame_size_rtp_timestamps */) { return send_test_->RegisterCodec(payload_name, sampling_freq_hz, channels, payload_type, frame_size_samples); } void RegisterExternalSendCodec( std::unique_ptr external_speech_encoder, - int payload_type) { + int /* payload_type */) { send_test_->RegisterExternalCodec(std::move(external_speech_encoder)); } @@ -951,7 +1079,7 @@ class AcmSetBitRateTest : public ::testing::Test { int nr_bytes = 0; while (std::unique_ptr next_packet = send_test_->NextPacket()) { - nr_bytes += rtc::checked_cast(next_packet->payload_length_bytes()); + nr_bytes += checked_cast(next_packet->payload_length_bytes()); } EXPECT_LE(min_expected_total_bits, nr_bytes * 8); EXPECT_GE(max_expected_total_bits, nr_bytes * 8); @@ -986,8 +1114,10 @@ TEST_F(AcmSetBitRateNewApi, OpusFromFormat_48khz_20ms_10kbps) { const auto config = AudioEncoderOpus::SdpToConfig( SdpAudioFormat("opus", 48000, 2, {{"maxaveragebitrate", "10000"}})); ASSERT_TRUE(SetUpSender()); - RegisterExternalSendCodec(AudioEncoderOpus::MakeAudioEncoder(*config, 107), - 107); + RegisterExternalSendCodec( + AudioEncoderOpus::MakeAudioEncoder(CreateEnvironment(), *config, + {.payload_type = 107}), + 107); RunInner(7000, 12000); } @@ -995,8 +1125,10 @@ TEST_F(AcmSetBitRateNewApi, OpusFromFormat_48khz_20ms_50kbps) { const auto config = AudioEncoderOpus::SdpToConfig( SdpAudioFormat("opus", 48000, 2, {{"maxaveragebitrate", "50000"}})); ASSERT_TRUE(SetUpSender()); - RegisterExternalSendCodec(AudioEncoderOpus::MakeAudioEncoder(*config, 107), - 107); + RegisterExternalSendCodec( + AudioEncoderOpus::MakeAudioEncoder(CreateEnvironment(), *config, + {.payload_type = 107}), + 107); RunInner(40000, 60000); } @@ -1103,8 +1235,10 @@ TEST_F(AcmSetBitRateNewApi, MAYBE_OpusFromFormat_48khz_20ms_100kbps) { const auto config = AudioEncoderOpus::SdpToConfig( SdpAudioFormat("opus", 48000, 2, {{"maxaveragebitrate", "100000"}})); ASSERT_TRUE(SetUpSender()); - RegisterExternalSendCodec(AudioEncoderOpus::MakeAudioEncoder(*config, 107), - 107); + RegisterExternalSendCodec( + AudioEncoderOpus::MakeAudioEncoder(CreateEnvironment(), *config, + {.payload_type = 107}), + 107); RunInner(80000, 120000); } @@ -1136,9 +1270,10 @@ TEST_F(AcmSenderBitExactnessOldApi, External_Pcmu_20ms) { EXPECT_CALL(*mock_encoder, EncodeImpl(_, _, _)) .Times(AtLeast(1)) .WillRepeatedly(Invoke( - &encoder, static_cast, rtc::Buffer*)>( - &AudioEncoderPcmU::Encode))); + &encoder, + static_cast, webrtc::Buffer*)>( + &AudioEncoderPcmU::Encode))); ASSERT_TRUE(SetUpSender(kTestFileMono32kHz, 32000)); ASSERT_NO_FATAL_FAILURE( SetUpTestExternalEncoder(std::move(mock_encoder), config.payload_type)); diff --git a/modules/audio_coding/audio_coding.gni b/modules/audio_coding/audio_coding.gni index 3b147091de..b5d75a7234 100644 --- a/modules/audio_coding/audio_coding.gni +++ b/modules/audio_coding/audio_coding.gni @@ -9,9 +9,6 @@ import("../../webrtc.gni") audio_codec_defines = [] -if (rtc_include_ilbc) { - audio_codec_defines += [ "WEBRTC_CODEC_ILBC" ] -} if (rtc_include_opus) { audio_codec_defines += [ "WEBRTC_CODEC_OPUS" ] } diff --git a/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.cc b/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.cc index 64163f9118..ddcf6c8060 100644 --- a/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.cc +++ b/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.cc @@ -107,7 +107,7 @@ AudioEncoderRuntimeConfig AudioNetworkAdaptorImpl::GetEncoderRuntimeConfig() { controller->MakeDecision(&config); // Update ANA stats. - auto increment_opt = [](absl::optional& a) { + auto increment_opt = [](std::optional& a) { a = a.value_or(0) + 1; }; if (prev_config_) { @@ -137,7 +137,7 @@ AudioEncoderRuntimeConfig AudioNetworkAdaptorImpl::GetEncoderRuntimeConfig() { prev_config_ = config; if (debug_dump_writer_) - debug_dump_writer_->DumpEncoderRuntimeConfig(config, rtc::TimeMillis()); + debug_dump_writer_->DumpEncoderRuntimeConfig(config, TimeMillis()); if (event_log_writer_) event_log_writer_->MaybeLogEncoderConfig(config); @@ -159,7 +159,7 @@ ANAStats AudioNetworkAdaptorImpl::GetStats() const { void AudioNetworkAdaptorImpl::DumpNetworkMetrics() { if (debug_dump_writer_) - debug_dump_writer_->DumpNetworkMetrics(last_metrics_, rtc::TimeMillis()); + debug_dump_writer_->DumpNetworkMetrics(last_metrics_, TimeMillis()); } void AudioNetworkAdaptorImpl::UpdateNetworkMetrics( diff --git a/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h b/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h index 664e76bda5..b4ec355908 100644 --- a/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h +++ b/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h @@ -14,8 +14,8 @@ #include #include +#include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_encoder.h" #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/debug_dump_writer.h" @@ -79,7 +79,7 @@ class AudioNetworkAdaptorImpl final : public AudioNetworkAdaptor { Controller::NetworkMetrics last_metrics_; - absl::optional prev_config_; + std::optional prev_config_; ANAStats stats_; }; diff --git a/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl_unittest.cc b/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl_unittest.cc index 60b4db13cb..59c5eb8714 100644 --- a/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl_unittest.cc +++ b/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl_unittest.cc @@ -176,7 +176,7 @@ TEST(AudioNetworkAdaptorImplTest, DumpEncoderRuntimeConfigIsCalledOnGetEncoderRuntimeConfig) { test::ScopedFieldTrials override_field_trials( "WebRTC-Audio-FecAdaptation/Enabled/"); - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; fake_clock.AdvanceTime(TimeDelta::Millis(kClockInitialTimeMs)); auto states = CreateAudioNetworkAdaptor(); AudioEncoderRuntimeConfig config; @@ -194,7 +194,7 @@ TEST(AudioNetworkAdaptorImplTest, TEST(AudioNetworkAdaptorImplTest, DumpNetworkMetricsIsCalledOnSetNetworkMetrics) { - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; fake_clock.AdvanceTime(TimeDelta::Millis(kClockInitialTimeMs)); auto states = CreateAudioNetworkAdaptor(); diff --git a/modules/audio_coding/audio_network_adaptor/bitrate_controller.h b/modules/audio_coding/audio_network_adaptor/bitrate_controller.h index c1032146cc..6e91ec2047 100644 --- a/modules/audio_coding/audio_network_adaptor/bitrate_controller.h +++ b/modules/audio_coding/audio_network_adaptor/bitrate_controller.h @@ -13,7 +13,8 @@ #include -#include "absl/types/optional.h" +#include + #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" @@ -49,8 +50,8 @@ class BitrateController final : public Controller { const Config config_; int bitrate_bps_; int frame_length_ms_; - absl::optional target_audio_bitrate_bps_; - absl::optional overhead_bytes_per_packet_; + std::optional target_audio_bitrate_bps_; + std::optional overhead_bytes_per_packet_; }; } // namespace audio_network_adaptor diff --git a/modules/audio_coding/audio_network_adaptor/bitrate_controller_unittest.cc b/modules/audio_coding/audio_network_adaptor/bitrate_controller_unittest.cc index 9c593b818b..b142915c24 100644 --- a/modules/audio_coding/audio_network_adaptor/bitrate_controller_unittest.cc +++ b/modules/audio_coding/audio_network_adaptor/bitrate_controller_unittest.cc @@ -21,8 +21,8 @@ namespace { void UpdateNetworkMetrics( BitrateController* controller, - const absl::optional& target_audio_bitrate_bps, - const absl::optional& overhead_bytes_per_packet) { + const std::optional& target_audio_bitrate_bps, + const std::optional& overhead_bytes_per_packet) { // UpdateNetworkMetrics can accept multiple network metric updates at once. // However, currently, the most used case is to update one metric at a time. // To reflect this fact, we separate the calls. @@ -39,7 +39,7 @@ void UpdateNetworkMetrics( } void CheckDecision(BitrateController* controller, - const absl::optional& frame_length_ms, + const std::optional& frame_length_ms, int expected_bitrate_bps) { AudioEncoderRuntimeConfig config; config.frame_length_ms = frame_length_ms; @@ -59,7 +59,7 @@ TEST(AnaBitrateControllerTest, OutputInitValueWhenTargetBitrateUnknown) { constexpr size_t kOverheadBytesPerPacket = 64; BitrateController controller(BitrateController::Config( kInitialBitrateBps, kInitialFrameLengthMs, 0, 0)); - UpdateNetworkMetrics(&controller, absl::nullopt, kOverheadBytesPerPacket); + UpdateNetworkMetrics(&controller, std::nullopt, kOverheadBytesPerPacket); CheckDecision(&controller, kInitialFrameLengthMs * 2, kInitialBitrateBps); } @@ -69,7 +69,7 @@ TEST(AnaBitrateControllerTest, OutputInitValueWhenOverheadUnknown) { constexpr int kTargetBitrateBps = 48000; BitrateController controller(BitrateController::Config( kInitialBitrateBps, kInitialFrameLengthMs, 0, 0)); - UpdateNetworkMetrics(&controller, kTargetBitrateBps, absl::nullopt); + UpdateNetworkMetrics(&controller, kTargetBitrateBps, std::nullopt); CheckDecision(&controller, kInitialFrameLengthMs * 2, kInitialBitrateBps); } @@ -121,7 +121,7 @@ TEST(AnaBitrateControllerTest, TreatUnknownFrameLengthAsFrameLengthUnchanged) { 1000 / kInitialFrameLengthMs; UpdateNetworkMetrics(&controller, kTargetBitrateBps, kOverheadBytesPerPacket); - CheckDecision(&controller, absl::nullopt, kBitrateBps); + CheckDecision(&controller, std::nullopt, kBitrateBps); } TEST(AnaBitrateControllerTest, IncreaseBitrateOnFrameLengthIncreased) { @@ -135,7 +135,7 @@ TEST(AnaBitrateControllerTest, IncreaseBitrateOnFrameLengthIncreased) { 1000 / kInitialFrameLengthMs; UpdateNetworkMetrics(&controller, kTargetBitrateBps, kOverheadBytesPerPacket); - CheckDecision(&controller, absl::nullopt, kBitrateBps); + CheckDecision(&controller, std::nullopt, kBitrateBps); constexpr int kFrameLengthMs = 60; constexpr size_t kPacketOverheadRateDiff = @@ -157,7 +157,7 @@ TEST(AnaBitrateControllerTest, DecreaseBitrateOnFrameLengthDecreased) { 1000 / kInitialFrameLengthMs; UpdateNetworkMetrics(&controller, kTargetBitrateBps, kOverheadBytesPerPacket); - CheckDecision(&controller, absl::nullopt, kBitrateBps); + CheckDecision(&controller, std::nullopt, kBitrateBps); constexpr int kFrameLengthMs = 20; constexpr size_t kPacketOverheadRateDiff = @@ -186,7 +186,7 @@ TEST(AnaBitrateControllerTest, CheckBehaviorOnChangingCondition) { int overall_bitrate = 34567; size_t overhead_bytes_per_packet = 64; int frame_length_ms = 20; - int current_bitrate = rtc::checked_cast( + int current_bitrate = checked_cast( overall_bitrate - overhead_bytes_per_packet * 8 * 1000 / frame_length_ms); UpdateNetworkMetrics(&controller, overall_bitrate, overhead_bytes_per_packet); @@ -201,8 +201,8 @@ TEST(AnaBitrateControllerTest, CheckBehaviorOnChangingCondition) { // Next: change frame length. frame_length_ms = 60; current_bitrate += - rtc::checked_cast(overhead_bytes_per_packet * 8 * 1000 / 20 - - overhead_bytes_per_packet * 8 * 1000 / 60); + checked_cast(overhead_bytes_per_packet * 8 * 1000 / 20 - + overhead_bytes_per_packet * 8 * 1000 / 60); UpdateNetworkMetrics(&controller, overall_bitrate, overhead_bytes_per_packet); CheckDecision(&controller, frame_length_ms, current_bitrate); @@ -215,8 +215,8 @@ TEST(AnaBitrateControllerTest, CheckBehaviorOnChangingCondition) { // Next: change frame length. frame_length_ms = 20; current_bitrate -= - rtc::checked_cast(overhead_bytes_per_packet * 8 * 1000 / 20 - - overhead_bytes_per_packet * 8 * 1000 / 60); + checked_cast(overhead_bytes_per_packet * 8 * 1000 / 20 - + overhead_bytes_per_packet * 8 * 1000 / 60); UpdateNetworkMetrics(&controller, overall_bitrate, overhead_bytes_per_packet); CheckDecision(&controller, frame_length_ms, current_bitrate); @@ -225,8 +225,8 @@ TEST(AnaBitrateControllerTest, CheckBehaviorOnChangingCondition) { current_bitrate -= 100; frame_length_ms = 60; current_bitrate += - rtc::checked_cast(overhead_bytes_per_packet * 8 * 1000 / 20 - - overhead_bytes_per_packet * 8 * 1000 / 60); + checked_cast(overhead_bytes_per_packet * 8 * 1000 / 20 - + overhead_bytes_per_packet * 8 * 1000 / 60); UpdateNetworkMetrics(&controller, overall_bitrate, overhead_bytes_per_packet); CheckDecision(&controller, frame_length_ms, current_bitrate); diff --git a/modules/audio_coding/audio_network_adaptor/channel_controller.h b/modules/audio_coding/audio_network_adaptor/channel_controller.h index 3cd4bb7dec..a877d775f3 100644 --- a/modules/audio_coding/audio_network_adaptor/channel_controller.h +++ b/modules/audio_coding/audio_network_adaptor/channel_controller.h @@ -13,7 +13,8 @@ #include -#include "absl/types/optional.h" +#include + #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" @@ -50,7 +51,7 @@ class ChannelController final : public Controller { private: const Config config_; size_t channels_to_encode_; - absl::optional uplink_bandwidth_bps_; + std::optional uplink_bandwidth_bps_; }; } // namespace webrtc diff --git a/modules/audio_coding/audio_network_adaptor/channel_controller_unittest.cc b/modules/audio_coding/audio_network_adaptor/channel_controller_unittest.cc index 21504bcec0..1803d449ed 100644 --- a/modules/audio_coding/audio_network_adaptor/channel_controller_unittest.cc +++ b/modules/audio_coding/audio_network_adaptor/channel_controller_unittest.cc @@ -33,7 +33,7 @@ std::unique_ptr CreateChannelController(int init_channels) { } void CheckDecision(ChannelController* controller, - const absl::optional& uplink_bandwidth_bps, + const std::optional& uplink_bandwidth_bps, size_t expected_num_channels) { if (uplink_bandwidth_bps) { Controller::NetworkMetrics network_metrics; @@ -50,7 +50,7 @@ void CheckDecision(ChannelController* controller, TEST(ChannelControllerTest, OutputInitValueWhenUplinkBandwidthUnknown) { constexpr int kInitChannels = 2; auto controller = CreateChannelController(kInitChannels); - CheckDecision(controller.get(), absl::nullopt, kInitChannels); + CheckDecision(controller.get(), std::nullopt, kInitChannels); } TEST(ChannelControllerTest, SwitchTo2ChannelsOnHighUplinkBandwidth) { diff --git a/modules/audio_coding/audio_network_adaptor/controller.h b/modules/audio_coding/audio_network_adaptor/controller.h index b70ada01a4..fc2cba78ed 100644 --- a/modules/audio_coding/audio_network_adaptor/controller.h +++ b/modules/audio_coding/audio_network_adaptor/controller.h @@ -11,7 +11,8 @@ #ifndef MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_CONTROLLER_H_ #define MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_CONTROLLER_H_ -#include "absl/types/optional.h" +#include + #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" namespace webrtc { @@ -21,11 +22,11 @@ class Controller { struct NetworkMetrics { NetworkMetrics(); ~NetworkMetrics(); - absl::optional uplink_bandwidth_bps; - absl::optional uplink_packet_loss_fraction; - absl::optional target_audio_bitrate_bps; - absl::optional rtt_ms; - absl::optional overhead_bytes_per_packet; + std::optional uplink_bandwidth_bps; + std::optional uplink_packet_loss_fraction; + std::optional target_audio_bitrate_bps; + std::optional rtt_ms; + std::optional overhead_bytes_per_packet; }; virtual ~Controller() = default; diff --git a/modules/audio_coding/audio_network_adaptor/controller_manager.cc b/modules/audio_coding/audio_network_adaptor/controller_manager.cc index 42dd8a8786..599fcc3653 100644 --- a/modules/audio_coding/audio_network_adaptor/controller_manager.cc +++ b/modules/audio_coding/audio_network_adaptor/controller_manager.cc @@ -24,18 +24,16 @@ #include "modules/audio_coding/audio_network_adaptor/frame_length_controller.h" #include "modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h" #include "modules/audio_coding/audio_network_adaptor/util/threshold_curve.h" -#include "rtc_base/ignore_wundef.h" #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" #if WEBRTC_ENABLE_PROTOBUF -RTC_PUSH_IGNORING_WUNDEF() #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/modules/audio_coding/audio_network_adaptor/config.pb.h" #else #include "modules/audio_coding/audio_network_adaptor/config.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() + #endif namespace webrtc { @@ -79,7 +77,7 @@ std::unique_ptr CreateFecControllerPlrBased( std::unique_ptr CreateFrameLengthController( const audio_network_adaptor::config::FrameLengthController& config, - rtc::ArrayView encoder_frame_lengths_ms, + ArrayView encoder_frame_lengths_ms, int initial_frame_length_ms, int min_encoder_bitrate_bps) { RTC_CHECK(config.has_fl_increasing_packet_loss_fraction()); @@ -203,7 +201,7 @@ std::unique_ptr CreateBitrateController( std::unique_ptr CreateFrameLengthControllerV2( const audio_network_adaptor::config::FrameLengthControllerV2& config, - rtc::ArrayView encoder_frame_lengths_ms) { + ArrayView encoder_frame_lengths_ms) { return std::make_unique( encoder_frame_lengths_ms, config.min_payload_bitrate_bps(), config.use_slow_adaptation()); @@ -222,7 +220,7 @@ ControllerManagerImpl::Config::~Config() = default; std::unique_ptr ControllerManagerImpl::Create( absl::string_view config_string, size_t num_encoder_channels, - rtc::ArrayView encoder_frame_lengths_ms, + ArrayView encoder_frame_lengths_ms, int min_encoder_bitrate_bps, size_t intial_channels_to_encode, int initial_frame_length_ms, @@ -238,7 +236,7 @@ std::unique_ptr ControllerManagerImpl::Create( std::unique_ptr ControllerManagerImpl::Create( absl::string_view config_string, size_t num_encoder_channels, - rtc::ArrayView encoder_frame_lengths_ms, + ArrayView encoder_frame_lengths_ms, int min_encoder_bitrate_bps, size_t intial_channels_to_encode, int initial_frame_length_ms, @@ -252,7 +250,7 @@ std::unique_ptr ControllerManagerImpl::Create( controller_manager_config.ParseFromString(std::string(config_string))); if (debug_dump_writer) debug_dump_writer->DumpControllerManagerConfig(controller_manager_config, - rtc::TimeMillis()); + TimeMillis()); std::vector> controllers; std::map> scoring_points; @@ -340,7 +338,7 @@ ControllerManagerImpl::ControllerManagerImpl( const std::map>& scoring_points) : config_(config), controllers_(std::move(controllers)), - last_reordering_time_ms_(absl::nullopt), + last_reordering_time_ms_(std::nullopt), last_scoring_point_(0, 0.0) { for (auto& controller : controllers_) default_sorted_controllers_.push_back(controller.get()); @@ -362,7 +360,7 @@ std::vector ControllerManagerImpl::GetSortedControllers( if (!metrics.uplink_bandwidth_bps || !metrics.uplink_packet_loss_fraction) return sorted_controllers_; - const int64_t now_ms = rtc::TimeMillis(); + const int64_t now_ms = TimeMillis(); if (last_reordering_time_ms_ && now_ms - *last_reordering_time_ms_ < config_.min_reordering_time_ms) return sorted_controllers_; diff --git a/modules/audio_coding/audio_network_adaptor/controller_manager.h b/modules/audio_coding/audio_network_adaptor/controller_manager.h index 47e8e0f5a0..632cc616a3 100644 --- a/modules/audio_coding/audio_network_adaptor/controller_manager.h +++ b/modules/audio_coding/audio_network_adaptor/controller_manager.h @@ -49,7 +49,7 @@ class ControllerManagerImpl final : public ControllerManager { static std::unique_ptr Create( absl::string_view config_string, size_t num_encoder_channels, - rtc::ArrayView encoder_frame_lengths_ms, + ArrayView encoder_frame_lengths_ms, int min_encoder_bitrate_bps, size_t intial_channels_to_encode, int initial_frame_length_ms, @@ -60,7 +60,7 @@ class ControllerManagerImpl final : public ControllerManager { static std::unique_ptr Create( absl::string_view config_string, size_t num_encoder_channels, - rtc::ArrayView encoder_frame_lengths_ms, + ArrayView encoder_frame_lengths_ms, int min_encoder_bitrate_bps, size_t intial_channels_to_encode, int initial_frame_length_ms, @@ -107,7 +107,7 @@ class ControllerManagerImpl final : public ControllerManager { std::vector> controllers_; - absl::optional last_reordering_time_ms_; + std::optional last_reordering_time_ms_; ScoringPoint last_scoring_point_; std::vector default_sorted_controllers_; diff --git a/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc b/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc index 3e6ecf6def..5042274698 100644 --- a/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc +++ b/modules/audio_coding/audio_network_adaptor/controller_manager_unittest.cc @@ -10,24 +10,33 @@ #include "modules/audio_coding/audio_network_adaptor/controller_manager.h" +#include +#include +#include +#include +#include +#include #include #include +#include #include "absl/strings/string_view.h" +#include "api/units/time_delta.h" +#include "modules/audio_coding/audio_network_adaptor/controller.h" +#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" #include "modules/audio_coding/audio_network_adaptor/mock/mock_controller.h" #include "modules/audio_coding/audio_network_adaptor/mock/mock_debug_dump_writer.h" +#include "rtc_base/checks.h" #include "rtc_base/fake_clock.h" -#include "rtc_base/ignore_wundef.h" +#include "test/gmock.h" #include "test/gtest.h" #if WEBRTC_ENABLE_PROTOBUF -RTC_PUSH_IGNORING_WUNDEF() #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/modules/audio_coding/audio_network_adaptor/config.pb.h" #else #include "modules/audio_coding/audio_network_adaptor/config.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() #endif namespace webrtc { @@ -89,8 +98,8 @@ ControllerManagerStates CreateControllerManager() { // exists in the vector. void CheckControllersOrder( ControllerManagerStates* states, - const absl::optional& uplink_bandwidth_bps, - const absl::optional& uplink_packet_loss_fraction, + const std::optional& uplink_bandwidth_bps, + const std::optional& uplink_packet_loss_fraction, const std::vector& expected_order) { RTC_DCHECK_EQ(kNumControllers, expected_order.size()); Controller::NetworkMetrics metrics; @@ -126,7 +135,7 @@ TEST(ControllerManagerTest, ControllersInDefaultOrderOnEmptyNetworkMetrics) { auto states = CreateControllerManager(); // `network_metrics` are empty, and the controllers are supposed to follow the // default order. - CheckControllersOrder(&states, absl::nullopt, absl::nullopt, {0, 1, 2, 3}); + CheckControllersOrder(&states, std::nullopt, std::nullopt, {0, 1, 2, 3}); } TEST(ControllerManagerTest, ControllersWithoutCharPointAtEndAndInDefaultOrder) { @@ -143,7 +152,7 @@ TEST(ControllerManagerTest, ControllersWithCharPointDependOnNetworkMetrics) { } TEST(ControllerManagerTest, DoNotReorderBeforeMinReordingTime) { - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; auto states = CreateControllerManager(); CheckControllersOrder(&states, kChracteristicBandwithBps[0], kChracteristicPacketLossFraction[0], @@ -158,7 +167,7 @@ TEST(ControllerManagerTest, DoNotReorderBeforeMinReordingTime) { } TEST(ControllerManagerTest, ReorderBeyondMinReordingTimeAndMinDistance) { - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; auto states = CreateControllerManager(); constexpr int kBandwidthBps = (kChracteristicBandwithBps[0] + kChracteristicBandwithBps[1]) / 2; @@ -177,7 +186,7 @@ TEST(ControllerManagerTest, ReorderBeyondMinReordingTimeAndMinDistance) { } TEST(ControllerManagerTest, DoNotReorderIfNetworkMetricsChangeTooSmall) { - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; auto states = CreateControllerManager(); constexpr int kBandwidthBps = (kChracteristicBandwithBps[0] + kChracteristicBandwithBps[1]) / 2; @@ -301,8 +310,6 @@ void CheckControllersOrder(const std::vector& controllers, ASSERT_EQ(expected_types.size(), controllers.size()); // We also check that the controllers follow the initial settings. - AudioEncoderRuntimeConfig encoder_config; - for (size_t i = 0; i < controllers.size(); ++i) { AudioEncoderRuntimeConfig encoder_config; // We check the order of `controllers` by judging their decisions. @@ -357,7 +364,7 @@ TEST(ControllerManagerTest, DebugDumpLoggedWhenCreateFromConfigString) { const std::vector encoder_frame_lengths_ms = {20, 60}; constexpr int64_t kClockInitialTimeMs = 12345678; - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; fake_clock.AdvanceTime(TimeDelta::Millis(kClockInitialTimeMs)); auto debug_dump_writer = std::unique_ptr(new NiceMock()); @@ -420,7 +427,7 @@ TEST(ControllerManagerTest, CreateCharPointFreeConfigAndCheckDefaultOrder) { } TEST(ControllerManagerTest, CreateFromConfigStringAndCheckReordering) { - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; audio_network_adaptor::config::ControllerManager config; config.set_min_reordering_time_ms(kMinReorderingTimeMs); config.set_min_reordering_squared_distance(kMinReorderingSquareDistance); diff --git a/modules/audio_coding/audio_network_adaptor/debug_dump_writer.cc b/modules/audio_coding/audio_network_adaptor/debug_dump_writer.cc index 2616706ee5..4504e04088 100644 --- a/modules/audio_coding/audio_network_adaptor/debug_dump_writer.cc +++ b/modules/audio_coding/audio_network_adaptor/debug_dump_writer.cc @@ -10,22 +10,19 @@ #include "modules/audio_coding/audio_network_adaptor/debug_dump_writer.h" +#include #include -#include "absl/types/optional.h" #include "rtc_base/checks.h" -#include "rtc_base/ignore_wundef.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/system/file_wrapper.h" #if WEBRTC_ENABLE_PROTOBUF -RTC_PUSH_IGNORING_WUNDEF() #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/modules/audio_coding/audio_network_adaptor/debug_dump.pb.h" #else #include "modules/audio_coding/audio_network_adaptor/debug_dump.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() #endif namespace webrtc { @@ -41,7 +38,7 @@ void DumpEventToFile(const Event& event, FileWrapper* dump_file) { RTC_CHECK(dump_file->is_open()); std::string dump_data; event.SerializeToString(&dump_data); - int32_t size = rtc::checked_cast(event.ByteSizeLong()); + int32_t size = checked_cast(event.ByteSizeLong()); dump_file->Write(&size, sizeof(size)); dump_file->Write(dump_data.data(), dump_data.length()); } diff --git a/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h b/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h index 8fdf2f7728..fd3a64dbb1 100644 --- a/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h +++ b/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h @@ -15,16 +15,14 @@ #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" -#include "rtc_base/ignore_wundef.h" #include "rtc_base/system/file_wrapper.h" + #if WEBRTC_ENABLE_PROTOBUF -RTC_PUSH_IGNORING_WUNDEF() #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/modules/audio_coding/audio_network_adaptor/config.pb.h" #else #include "modules/audio_coding/audio_network_adaptor/config.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() #endif namespace webrtc { diff --git a/modules/audio_coding/audio_network_adaptor/dtx_controller.h b/modules/audio_coding/audio_network_adaptor/dtx_controller.h index b8a8e476e4..f43fd02b71 100644 --- a/modules/audio_coding/audio_network_adaptor/dtx_controller.h +++ b/modules/audio_coding/audio_network_adaptor/dtx_controller.h @@ -11,7 +11,8 @@ #ifndef MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_DTX_CONTROLLER_H_ #define MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_DTX_CONTROLLER_H_ -#include "absl/types/optional.h" +#include + #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" @@ -44,7 +45,7 @@ class DtxController final : public Controller { private: const Config config_; bool dtx_enabled_; - absl::optional uplink_bandwidth_bps_; + std::optional uplink_bandwidth_bps_; }; } // namespace webrtc diff --git a/modules/audio_coding/audio_network_adaptor/dtx_controller_unittest.cc b/modules/audio_coding/audio_network_adaptor/dtx_controller_unittest.cc index 567df6f76e..46c8f8b844 100644 --- a/modules/audio_coding/audio_network_adaptor/dtx_controller_unittest.cc +++ b/modules/audio_coding/audio_network_adaptor/dtx_controller_unittest.cc @@ -31,7 +31,7 @@ std::unique_ptr CreateController(int initial_dtx_enabled) { } void CheckDecision(DtxController* controller, - const absl::optional& uplink_bandwidth_bps, + const std::optional& uplink_bandwidth_bps, bool expected_dtx_enabled) { if (uplink_bandwidth_bps) { Controller::NetworkMetrics network_metrics; @@ -48,7 +48,7 @@ void CheckDecision(DtxController* controller, TEST(DtxControllerTest, OutputInitValueWhenUplinkBandwidthUnknown) { constexpr bool kInitialDtxEnabled = true; auto controller = CreateController(kInitialDtxEnabled); - CheckDecision(controller.get(), absl::nullopt, kInitialDtxEnabled); + CheckDecision(controller.get(), std::nullopt, kInitialDtxEnabled); } TEST(DtxControllerTest, TurnOnDtxForLowUplinkBandwidth) { diff --git a/modules/audio_coding/audio_network_adaptor/event_log_writer.cc b/modules/audio_coding/audio_network_adaptor/event_log_writer.cc index 0a79484a16..1bef64eaa6 100644 --- a/modules/audio_coding/audio_network_adaptor/event_log_writer.cc +++ b/modules/audio_coding/audio_network_adaptor/event_log_writer.cc @@ -15,9 +15,9 @@ #include #include #include +#include #include -#include "absl/types/optional.h" #include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" #include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" diff --git a/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.cc b/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.cc index c5e5fa76e3..bcfbd75c61 100644 --- a/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.cc +++ b/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.cc @@ -14,27 +14,9 @@ #include #include "rtc_base/checks.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { -namespace { -class NullSmoothingFilter final : public SmoothingFilter { - public: - void AddSample(float sample) override { last_sample_ = sample; } - - absl::optional GetAverage() override { return last_sample_; } - - bool SetTimeConstantMs(int time_constant_ms) override { - RTC_DCHECK_NOTREACHED(); - return false; - } - - private: - absl::optional last_sample_; -}; -} // namespace - FecControllerPlrBased::Config::Config( bool initial_fec_enabled, const ThresholdCurve& fec_enabling_threshold, @@ -57,10 +39,7 @@ FecControllerPlrBased::FecControllerPlrBased( FecControllerPlrBased::FecControllerPlrBased(const Config& config) : FecControllerPlrBased( config, - webrtc::field_trial::FindFullName("UseTwccPlrForAna") == "Enabled" - ? std::unique_ptr(new NullSmoothingFilter()) - : std::unique_ptr( - new SmoothingFilterImpl(config.time_constant_ms))) {} + std::make_unique(config.time_constant_ms)) {} FecControllerPlrBased::~FecControllerPlrBased() = default; @@ -89,7 +68,7 @@ void FecControllerPlrBased::MakeDecision(AudioEncoderRuntimeConfig* config) { } bool FecControllerPlrBased::FecEnablingDecision( - const absl::optional& packet_loss) const { + const std::optional& packet_loss) const { if (!uplink_bandwidth_bps_ || !packet_loss) { return false; } else { @@ -100,7 +79,7 @@ bool FecControllerPlrBased::FecEnablingDecision( } bool FecControllerPlrBased::FecDisablingDecision( - const absl::optional& packet_loss) const { + const std::optional& packet_loss) const { if (!uplink_bandwidth_bps_ || !packet_loss) { return false; } else { diff --git a/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h b/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h index 0c57ad1d1e..71b2d69aab 100644 --- a/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h +++ b/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h @@ -12,8 +12,8 @@ #define MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_FEC_CONTROLLER_PLR_BASED_H_ #include +#include -#include "absl/types/optional.h" #include "common_audio/smoothing_filter.h" #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" @@ -60,12 +60,12 @@ class FecControllerPlrBased final : public Controller { void MakeDecision(AudioEncoderRuntimeConfig* config) override; private: - bool FecEnablingDecision(const absl::optional& packet_loss) const; - bool FecDisablingDecision(const absl::optional& packet_loss) const; + bool FecEnablingDecision(const std::optional& packet_loss) const; + bool FecDisablingDecision(const std::optional& packet_loss) const; const Config config_; bool fec_enabled_; - absl::optional uplink_bandwidth_bps_; + std::optional uplink_bandwidth_bps_; const std::unique_ptr packet_loss_smoother_; }; diff --git a/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based_unittest.cc b/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based_unittest.cc index 743b087163..0227252b90 100644 --- a/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based_unittest.cc +++ b/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based_unittest.cc @@ -10,9 +10,17 @@ #include "modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h" +#include +#include #include +#include #include "common_audio/mocks/mock_smoothing_filter.h" +#include "modules/audio_coding/audio_network_adaptor/controller.h" +#include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" +#include "modules/audio_coding/audio_network_adaptor/util/threshold_curve.h" +#include "rtc_base/checks.h" +#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { @@ -79,8 +87,8 @@ FecControllerPlrBasedTestStates CreateFecControllerPlrBased( } void UpdateNetworkMetrics(FecControllerPlrBasedTestStates* states, - const absl::optional& uplink_bandwidth_bps, - const absl::optional& uplink_packet_loss) { + const std::optional& uplink_bandwidth_bps, + const std::optional& uplink_packet_loss) { // UpdateNetworkMetrics can accept multiple network metric updates at once. // However, currently, the most used case is to update one metric at a time. // To reflect this fact, we separate the calls. @@ -132,7 +140,7 @@ TEST(FecControllerPlrBasedTest, OutputInitValueWhenUplinkBandwidthUnknown) { kEnablingPacketLossAtLowBw - kEpsilon, kEnablingPacketLossAtLowBw, kEnablingPacketLossAtLowBw + kEpsilon}) { auto states = CreateFecControllerPlrBased(initial_fec_enabled); - UpdateNetworkMetrics(&states, absl::nullopt, packet_loss); + UpdateNetworkMetrics(&states, std::nullopt, packet_loss); CheckDecision(&states, initial_fec_enabled, packet_loss); } } @@ -147,7 +155,7 @@ TEST(FecControllerPlrBasedTest, kDisablingBandwidthLow + 1, kEnablingBandwidthLow - 1, kEnablingBandwidthLow, kEnablingBandwidthLow + 1}) { auto states = CreateFecControllerPlrBased(initial_fec_enabled); - UpdateNetworkMetrics(&states, bandwidth, absl::nullopt); + UpdateNetworkMetrics(&states, bandwidth, std::nullopt); CheckDecision(&states, initial_fec_enabled, 0.0); } } @@ -319,8 +327,8 @@ TEST(FecControllerPlrBasedTest, CheckBehaviorOnSpecialCurves) { // | A|___B______ // |-----------------> bandwidth - constexpr int kEnablingBandwidthHigh = kEnablingBandwidthLow; - constexpr float kDisablingPacketLossAtLowBw = kDisablingPacketLossAtHighBw; + constexpr int kEnablingBandwidth = kEnablingBandwidthLow; + constexpr float kDisablingPacketLoss = kDisablingPacketLossAtHighBw; FecControllerPlrBasedTestStates states; std::unique_ptr mock_smoothing_filter( new NiceMock()); @@ -329,8 +337,8 @@ TEST(FecControllerPlrBasedTest, CheckBehaviorOnSpecialCurves) { FecControllerPlrBased::Config( true, ThresholdCurve(kEnablingBandwidthLow, kEnablingPacketLossAtLowBw, - kEnablingBandwidthHigh, kEnablingPacketLossAtHighBw), - ThresholdCurve(kDisablingBandwidthLow, kDisablingPacketLossAtLowBw, + kEnablingBandwidth, kEnablingPacketLossAtHighBw), + ThresholdCurve(kDisablingBandwidthLow, kDisablingPacketLoss, kDisablingBandwidthHigh, kDisablingPacketLossAtHighBw), 0), std::move(mock_smoothing_filter))); @@ -342,7 +350,7 @@ TEST(FecControllerPlrBasedTest, CheckBehaviorOnSpecialCurves) { kEnablingPacketLossAtHighBw * 0.99f); CheckDecision(&states, false, kEnablingPacketLossAtHighBw * 0.99f); - UpdateNetworkMetrics(&states, kEnablingBandwidthHigh, + UpdateNetworkMetrics(&states, kEnablingBandwidth, kEnablingPacketLossAtHighBw); CheckDecision(&states, true, kEnablingPacketLossAtHighBw); diff --git a/modules/audio_coding/audio_network_adaptor/frame_length_controller.cc b/modules/audio_coding/audio_network_adaptor/frame_length_controller.cc index c47434f9aa..c3ec2d7072 100644 --- a/modules/audio_coding/audio_network_adaptor/frame_length_controller.cc +++ b/modules/audio_coding/audio_network_adaptor/frame_length_controller.cc @@ -97,7 +97,7 @@ bool FrameLengthController::Config::FrameLengthChange::operator<( } bool FrameLengthController::FrameLengthIncreasingDecision( - const AudioEncoderRuntimeConfig& config) { + const AudioEncoderRuntimeConfig& /* config */) { // Increase frame length if // 1. `uplink_bandwidth_bps` is known to be smaller or equal than // `min_encoder_bitrate_bps` plus `prevent_overuse_margin_bps` plus the @@ -153,7 +153,7 @@ bool FrameLengthController::FrameLengthIncreasingDecision( } bool FrameLengthController::FrameLengthDecreasingDecision( - const AudioEncoderRuntimeConfig& config) { + const AudioEncoderRuntimeConfig& /* config */) { // Decrease frame length if // 1. shorter frame length is available AND // 2. `uplink_bandwidth_bps` is known to be bigger than diff --git a/modules/audio_coding/audio_network_adaptor/frame_length_controller.h b/modules/audio_coding/audio_network_adaptor/frame_length_controller.h index 04693f8db7..5abfb83839 100644 --- a/modules/audio_coding/audio_network_adaptor/frame_length_controller.h +++ b/modules/audio_coding/audio_network_adaptor/frame_length_controller.h @@ -14,9 +14,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" @@ -77,11 +77,11 @@ class FrameLengthController final : public Controller { std::set::const_iterator frame_length_ms_; - absl::optional uplink_bandwidth_bps_; + std::optional uplink_bandwidth_bps_; - absl::optional uplink_packet_loss_fraction_; + std::optional uplink_packet_loss_fraction_; - absl::optional overhead_bytes_per_packet_; + std::optional overhead_bytes_per_packet_; // True if the previous frame length decision was an increase, otherwise // false. diff --git a/modules/audio_coding/audio_network_adaptor/frame_length_controller_unittest.cc b/modules/audio_coding/audio_network_adaptor/frame_length_controller_unittest.cc index 23123934dc..2bf52b21aa 100644 --- a/modules/audio_coding/audio_network_adaptor/frame_length_controller_unittest.cc +++ b/modules/audio_coding/audio_network_adaptor/frame_length_controller_unittest.cc @@ -124,9 +124,9 @@ CreateChangeCriteriaFor40msAnd60ms() { void UpdateNetworkMetrics( FrameLengthController* controller, - const absl::optional& uplink_bandwidth_bps, - const absl::optional& uplink_packet_loss_fraction, - const absl::optional& overhead_bytes_per_packet) { + const std::optional& uplink_bandwidth_bps, + const std::optional& uplink_packet_loss_fraction, + const std::optional& overhead_bytes_per_packet) { // UpdateNetworkMetrics can accept multiple network metric updates at once. // However, currently, the most used case is to update one metric at a time. // To reflect this fact, we separate the calls. @@ -160,14 +160,14 @@ TEST(FrameLengthControllerTest, DecreaseTo20MsOnHighUplinkBandwidth) { auto controller = CreateController(CreateChangeCriteriaFor20msAnd60ms(), kDefaultEncoderFrameLengthsMs, 60); UpdateNetworkMetrics(controller.get(), kFl60msTo20msBandwidthBps, - absl::nullopt, kOverheadBytesPerPacket); + std::nullopt, kOverheadBytesPerPacket); CheckDecision(controller.get(), 20); } TEST(FrameLengthControllerTest, DecreaseTo20MsOnHighUplinkPacketLossFraction) { auto controller = CreateController(CreateChangeCriteriaFor20msAnd60ms(), kDefaultEncoderFrameLengthsMs, 60); - UpdateNetworkMetrics(controller.get(), absl::nullopt, + UpdateNetworkMetrics(controller.get(), std::nullopt, kFlDecreasingPacketLossFraction, kOverheadBytesPerPacket); CheckDecision(controller.get(), 20); @@ -200,7 +200,7 @@ TEST(FrameLengthControllerTest, DecreaseTo40MsOnHighUplinkBandwidth) { auto controller = CreateController(CreateChangeCriteriaFor40msAnd60ms(), kDefaultEncoderFrameLengthsMs, 40); UpdateNetworkMetrics(controller.get(), kFl60msTo40msBandwidthBps, - absl::nullopt, kOverheadBytesPerPacket); + std::nullopt, kOverheadBytesPerPacket); CheckDecision(controller.get(), 40); } @@ -316,11 +316,11 @@ TEST(FrameLengthControllerTest, From120MsTo20MsOnHighUplinkBandwidth) { kDefaultEncoderFrameLengthsMs, 120); // It takes two steps for frame length to go from 120ms to 20ms. UpdateNetworkMetrics(controller.get(), kFl60msTo20msBandwidthBps, - absl::nullopt, kOverheadBytesPerPacket); + std::nullopt, kOverheadBytesPerPacket); CheckDecision(controller.get(), 60); UpdateNetworkMetrics(controller.get(), kFl60msTo20msBandwidthBps, - absl::nullopt, kOverheadBytesPerPacket); + std::nullopt, kOverheadBytesPerPacket); CheckDecision(controller.get(), 20); } @@ -328,12 +328,12 @@ TEST(FrameLengthControllerTest, From120MsTo20MsOnHighUplinkPacketLossFraction) { auto controller = CreateController(CreateChangeCriteriaFor20ms60msAnd120ms(), kDefaultEncoderFrameLengthsMs, 120); // It takes two steps for frame length to go from 120ms to 20ms. - UpdateNetworkMetrics(controller.get(), absl::nullopt, + UpdateNetworkMetrics(controller.get(), std::nullopt, kFlDecreasingPacketLossFraction, kOverheadBytesPerPacket); CheckDecision(controller.get(), 60); - UpdateNetworkMetrics(controller.get(), absl::nullopt, + UpdateNetworkMetrics(controller.get(), std::nullopt, kFlDecreasingPacketLossFraction, kOverheadBytesPerPacket); CheckDecision(controller.get(), 20); diff --git a/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.cc b/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.cc index 36fc10ba82..f6f4759ba1 100644 --- a/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.cc +++ b/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.cc @@ -25,7 +25,7 @@ int OverheadBps(int overhead_bytes_per_packet, int frame_length_ms) { } // namespace FrameLengthControllerV2::FrameLengthControllerV2( - rtc::ArrayView encoder_frame_lengths_ms, + ArrayView encoder_frame_lengths_ms, int min_payload_bitrate_bps, bool use_slow_adaptation) : encoder_frame_lengths_ms_(encoder_frame_lengths_ms.begin(), diff --git a/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h b/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h index d7102b0b44..c64c75f051 100644 --- a/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h +++ b/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.h @@ -11,9 +11,9 @@ #ifndef MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_FRAME_LENGTH_CONTROLLER_V2_H_ #define MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_FRAME_LENGTH_CONTROLLER_V2_H_ +#include #include -#include "absl/types/optional.h" #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" @@ -21,7 +21,7 @@ namespace webrtc { class FrameLengthControllerV2 final : public Controller { public: - FrameLengthControllerV2(rtc::ArrayView encoder_frame_lengths_ms, + FrameLengthControllerV2(ArrayView encoder_frame_lengths_ms, int min_payload_bitrate_bps, bool use_slow_adaptation); @@ -34,9 +34,9 @@ class FrameLengthControllerV2 final : public Controller { const int min_payload_bitrate_bps_; const bool use_slow_adaptation_; - absl::optional uplink_bandwidth_bps_; - absl::optional target_bitrate_bps_; - absl::optional overhead_bytes_per_packet_; + std::optional uplink_bandwidth_bps_; + std::optional target_bitrate_bps_; + std::optional overhead_bytes_per_packet_; }; } // namespace webrtc diff --git a/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2_unittest.cc b/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2_unittest.cc index 1c88f47c58..1bec1acfc9 100644 --- a/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2_unittest.cc +++ b/modules/audio_coding/audio_network_adaptor/frame_length_controller_v2_unittest.cc @@ -62,8 +62,8 @@ class FrameLengthControllerV2Test : public testing::Test { std::make_unique(kANASupportedFrameLengths, kMinPayloadBitrateBps, /*use_slow_adaptation=*/false); - absl::optional target_audio_bitrate_bps_; - absl::optional overhead_bytes_per_packet_; + std::optional target_audio_bitrate_bps_; + std::optional overhead_bytes_per_packet_; }; // Don't return any decision if we haven't received all required network diff --git a/modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h b/modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h index 346ed5db1a..ed07298e8e 100644 --- a/modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h +++ b/modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h @@ -11,7 +11,8 @@ #ifndef MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_INCLUDE_AUDIO_NETWORK_ADAPTOR_H_ #define MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_INCLUDE_AUDIO_NETWORK_ADAPTOR_H_ -#include "absl/types/optional.h" +#include + #include "api/audio_codecs/audio_encoder.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" diff --git a/modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h b/modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h index bd16292f7e..5934f6e1f1 100644 --- a/modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h +++ b/modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h @@ -13,7 +13,7 @@ #include -#include "absl/types/optional.h" +#include namespace webrtc { @@ -23,18 +23,18 @@ struct AudioEncoderRuntimeConfig { ~AudioEncoderRuntimeConfig(); AudioEncoderRuntimeConfig& operator=(const AudioEncoderRuntimeConfig& other); bool operator==(const AudioEncoderRuntimeConfig& other) const; - absl::optional bitrate_bps; - absl::optional frame_length_ms; + std::optional bitrate_bps; + std::optional frame_length_ms; // Note: This is what we tell the encoder. It doesn't have to reflect // the actual NetworkMetrics; it's subject to our decision. - absl::optional uplink_packet_loss_fraction; - absl::optional enable_fec; - absl::optional enable_dtx; + std::optional uplink_packet_loss_fraction; + std::optional enable_fec; + std::optional enable_dtx; // Some encoders can encode fewer channels than the actual input to make // better use of the bandwidth. `num_channels` sets the number of channels // to encode. - absl::optional num_channels; + std::optional num_channels; // This is true if the last frame length change was an increase, and otherwise // false. diff --git a/modules/audio_coding/codecs/builtin_audio_decoder_factory_unittest.cc b/modules/audio_coding/codecs/builtin_audio_decoder_factory_unittest.cc index bd8d1cc341..55ca237966 100644 --- a/modules/audio_coding/codecs/builtin_audio_decoder_factory_unittest.cc +++ b/modules/audio_coding/codecs/builtin_audio_decoder_factory_unittest.cc @@ -12,150 +12,121 @@ #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "test/gtest.h" namespace webrtc { TEST(AudioDecoderFactoryTest, CreateUnknownDecoder) { - rtc::scoped_refptr adf = - CreateBuiltinAudioDecoderFactory(); + const Environment env = CreateEnvironment(); + scoped_refptr adf = CreateBuiltinAudioDecoderFactory(); ASSERT_TRUE(adf); - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("rey", 8000, 1), absl::nullopt)); + EXPECT_FALSE(adf->Create(env, SdpAudioFormat("rey", 8000, 1), std::nullopt)); } TEST(AudioDecoderFactoryTest, CreatePcmu) { - rtc::scoped_refptr adf = - CreateBuiltinAudioDecoderFactory(); + const Environment env = CreateEnvironment(); + scoped_refptr adf = CreateBuiltinAudioDecoderFactory(); ASSERT_TRUE(adf); // PCMu supports 8 kHz, and any number of channels. + EXPECT_FALSE(adf->Create(env, SdpAudioFormat("pcmu", 8000, 0), std::nullopt)); + EXPECT_TRUE(adf->Create(env, SdpAudioFormat("pcmu", 8000, 1), std::nullopt)); + EXPECT_TRUE(adf->Create(env, SdpAudioFormat("pcmu", 8000, 2), std::nullopt)); + EXPECT_TRUE(adf->Create(env, SdpAudioFormat("pcmu", 8000, 3), std::nullopt)); EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 8000, 0), absl::nullopt)); - EXPECT_TRUE( - adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 8000, 1), absl::nullopt)); - EXPECT_TRUE( - adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 8000, 2), absl::nullopt)); - EXPECT_TRUE( - adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 8000, 3), absl::nullopt)); - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("pcmu", 16000, 1), absl::nullopt)); + adf->Create(env, SdpAudioFormat("pcmu", 16000, 1), std::nullopt)); } TEST(AudioDecoderFactoryTest, CreatePcma) { - rtc::scoped_refptr adf = - CreateBuiltinAudioDecoderFactory(); + const Environment env = CreateEnvironment(); + scoped_refptr adf = CreateBuiltinAudioDecoderFactory(); ASSERT_TRUE(adf); // PCMa supports 8 kHz, and any number of channels. + EXPECT_FALSE(adf->Create(env, SdpAudioFormat("pcma", 8000, 0), std::nullopt)); + EXPECT_TRUE(adf->Create(env, SdpAudioFormat("pcma", 8000, 1), std::nullopt)); + EXPECT_TRUE(adf->Create(env, SdpAudioFormat("pcma", 8000, 2), std::nullopt)); + EXPECT_TRUE(adf->Create(env, SdpAudioFormat("pcma", 8000, 3), std::nullopt)); EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("pcma", 8000, 0), absl::nullopt)); - EXPECT_TRUE( - adf->MakeAudioDecoder(SdpAudioFormat("pcma", 8000, 1), absl::nullopt)); - EXPECT_TRUE( - adf->MakeAudioDecoder(SdpAudioFormat("pcma", 8000, 2), absl::nullopt)); - EXPECT_TRUE( - adf->MakeAudioDecoder(SdpAudioFormat("pcma", 8000, 3), absl::nullopt)); - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("pcma", 16000, 1), absl::nullopt)); -} - -TEST(AudioDecoderFactoryTest, CreateIlbc) { - rtc::scoped_refptr adf = - CreateBuiltinAudioDecoderFactory(); - ASSERT_TRUE(adf); - // iLBC supports 8 kHz, 1 channel. - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 8000, 0), absl::nullopt)); -#ifdef WEBRTC_CODEC_ILBC - EXPECT_TRUE( - adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 8000, 1), absl::nullopt)); -#endif - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 8000, 2), absl::nullopt)); - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("ilbc", 16000, 1), absl::nullopt)); + adf->Create(env, SdpAudioFormat("pcma", 16000, 1), std::nullopt)); } TEST(AudioDecoderFactoryTest, CreateL16) { - rtc::scoped_refptr adf = - CreateBuiltinAudioDecoderFactory(); + const Environment env = CreateEnvironment(); + scoped_refptr adf = CreateBuiltinAudioDecoderFactory(); ASSERT_TRUE(adf); // L16 supports any clock rate and any number of channels up to 24. const int clockrates[] = {8000, 16000, 32000, 48000}; const int num_channels[] = {1, 2, 3, 24}; for (int clockrate : clockrates) { - EXPECT_FALSE(adf->MakeAudioDecoder(SdpAudioFormat("l16", clockrate, 0), - absl::nullopt)); + EXPECT_FALSE( + adf->Create(env, SdpAudioFormat("l16", clockrate, 0), std::nullopt)); for (int channels : num_channels) { - EXPECT_TRUE(adf->MakeAudioDecoder( - SdpAudioFormat("l16", clockrate, channels), absl::nullopt)); + EXPECT_TRUE(adf->Create(env, SdpAudioFormat("l16", clockrate, channels), + std::nullopt)); } } } // Tests that using more channels than the maximum does not work TEST(AudioDecoderFactoryTest, MaxNrOfChannels) { - rtc::scoped_refptr adf = - CreateBuiltinAudioDecoderFactory(); + const Environment env = CreateEnvironment(); + scoped_refptr adf = CreateBuiltinAudioDecoderFactory(); std::vector codecs = { #ifdef WEBRTC_CODEC_OPUS "opus", -#endif -#ifdef WEBRTC_CODEC_ILBC - "ilbc", #endif "pcmu", "pcma", "l16", "G722", "G711", }; for (auto codec : codecs) { - EXPECT_FALSE(adf->MakeAudioDecoder( + EXPECT_FALSE(adf->Create( + env, SdpAudioFormat(codec, 32000, AudioDecoder::kMaxNumberOfChannels + 1), - absl::nullopt)); + std::nullopt)); } } TEST(AudioDecoderFactoryTest, CreateG722) { - rtc::scoped_refptr adf = - CreateBuiltinAudioDecoderFactory(); + const Environment env = CreateEnvironment(); + scoped_refptr adf = CreateBuiltinAudioDecoderFactory(); ASSERT_TRUE(adf); // g722 supports 8 kHz, 1-2 channels. + EXPECT_FALSE(adf->Create(env, SdpAudioFormat("g722", 8000, 0), std::nullopt)); + EXPECT_TRUE(adf->Create(env, SdpAudioFormat("g722", 8000, 1), std::nullopt)); + EXPECT_TRUE(adf->Create(env, SdpAudioFormat("g722", 8000, 2), std::nullopt)); + EXPECT_FALSE(adf->Create(env, SdpAudioFormat("g722", 8000, 3), std::nullopt)); EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("g722", 8000, 0), absl::nullopt)); - EXPECT_TRUE( - adf->MakeAudioDecoder(SdpAudioFormat("g722", 8000, 1), absl::nullopt)); - EXPECT_TRUE( - adf->MakeAudioDecoder(SdpAudioFormat("g722", 8000, 2), absl::nullopt)); - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("g722", 8000, 3), absl::nullopt)); - EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("g722", 16000, 1), absl::nullopt)); + adf->Create(env, SdpAudioFormat("g722", 16000, 1), std::nullopt)); EXPECT_FALSE( - adf->MakeAudioDecoder(SdpAudioFormat("g722", 32000, 1), absl::nullopt)); + adf->Create(env, SdpAudioFormat("g722", 32000, 1), std::nullopt)); // g722 actually uses a 16 kHz sample rate instead of the nominal 8 kHz. std::unique_ptr dec = - adf->MakeAudioDecoder(SdpAudioFormat("g722", 8000, 1), absl::nullopt); + adf->Create(env, SdpAudioFormat("g722", 8000, 1), std::nullopt); EXPECT_EQ(16000, dec->SampleRateHz()); } TEST(AudioDecoderFactoryTest, CreateOpus) { - rtc::scoped_refptr adf = - CreateBuiltinAudioDecoderFactory(); + const Environment env = CreateEnvironment(); + scoped_refptr adf = CreateBuiltinAudioDecoderFactory(); ASSERT_TRUE(adf); - // Opus supports 48 kHz, 2 channels, and wants a "stereo" parameter whose - // value is either "0" or "1". + // Opus supports 48 kHz and 2 channels. It is possible to specify a "stereo" + // parameter whose value is either "0" or "1". for (int hz : {8000, 16000, 32000, 48000}) { for (int channels : {0, 1, 2, 3}) { for (std::string stereo : {"XX", "0", "1", "2"}) { - SdpAudioFormat::Parameters params; + CodecParameterMap params; if (stereo != "XX") { params["stereo"] = stereo; } const bool good = (hz == 48000 && channels == 2 && (stereo == "XX" || stereo == "0" || stereo == "1")); - EXPECT_EQ(good, - static_cast(adf->MakeAudioDecoder( - SdpAudioFormat("opus", hz, channels, std::move(params)), - absl::nullopt))); + EXPECT_EQ( + good, + static_cast(adf->Create( + env, SdpAudioFormat("opus", hz, channels, std::move(params)), + std::nullopt))); } } } diff --git a/modules/audio_coding/codecs/builtin_audio_encoder_factory_unittest.cc b/modules/audio_coding/codecs/builtin_audio_encoder_factory_unittest.cc index 26ae1eda8a..78ba36fe85 100644 --- a/modules/audio_coding/codecs/builtin_audio_encoder_factory_unittest.cc +++ b/modules/audio_coding/codecs/builtin_audio_encoder_factory_unittest.cc @@ -14,6 +14,8 @@ #include #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "rtc_base/numerics/safe_conversions.h" #include "test/gmock.h" #include "test/gtest.h" @@ -21,8 +23,7 @@ namespace webrtc { class AudioEncoderFactoryTest - : public ::testing::TestWithParam> { -}; + : public ::testing::TestWithParam> {}; TEST_P(AudioEncoderFactoryTest, SupportsAtLeastOneFormat) { auto factory = GetParam(); @@ -40,11 +41,12 @@ TEST_P(AudioEncoderFactoryTest, CanQueryAllSupportedFormats) { } TEST_P(AudioEncoderFactoryTest, CanConstructAllSupportedEncoders) { + const Environment env = CreateEnvironment(); auto factory = GetParam(); auto supported_encoders = factory->GetSupportedEncoders(); for (const auto& spec : supported_encoders) { auto info = factory->QueryAudioEncoder(spec.format); - auto encoder = factory->MakeAudioEncoder(127, spec.format, absl::nullopt); + auto encoder = factory->Create(env, spec.format, {.payload_type = 127}); EXPECT_TRUE(encoder); EXPECT_EQ(encoder->SampleRateHz(), info->sample_rate_hz); EXPECT_EQ(encoder->NumChannels(), info->num_channels); @@ -54,18 +56,25 @@ TEST_P(AudioEncoderFactoryTest, CanConstructAllSupportedEncoders) { TEST_P(AudioEncoderFactoryTest, CanRunAllSupportedEncoders) { constexpr int kTestPayloadType = 127; + const Environment env = CreateEnvironment(); auto factory = GetParam(); auto supported_encoders = factory->GetSupportedEncoders(); for (const auto& spec : supported_encoders) { +// TODO(bugs.webrtc.org/345525069): Either fix/enable or remove G722. +#if defined(__has_feature) && __has_feature(undefined_behavior_sanitizer) + if (spec.format.name == "G722") { + GTEST_SKIP() << "Skipping G722, see webrtc:345525069."; + } +#endif auto encoder = - factory->MakeAudioEncoder(kTestPayloadType, spec.format, absl::nullopt); + factory->Create(env, spec.format, {.payload_type = kTestPayloadType}); EXPECT_TRUE(encoder); encoder->Reset(); - const int num_samples = rtc::checked_cast( - encoder->SampleRateHz() * encoder->NumChannels() / 100); - rtc::Buffer out; - rtc::BufferT audio; - audio.SetData(num_samples, [](rtc::ArrayView audio) { + const int num_samples = checked_cast(encoder->SampleRateHz() * + encoder->NumChannels() / 100); + Buffer out; + BufferT audio; + audio.SetData(num_samples, [](ArrayView audio) { for (size_t i = 0; i != audio.size(); ++i) { // Just put some numbers in there, ensure they're within range. audio[i] = @@ -126,52 +135,40 @@ TEST(BuiltinAudioEncoderFactoryTest, SupportsTheExpectedFormats) { const std::vector expected_formats = { #ifdef WEBRTC_CODEC_OPUS - {"opus", 48000, 2, {{"minptime", "10"}, {"useinbandfec", "1"}}}, + {"opus", 48000, 2, {{"minptime", "10"}, {"useinbandfec", "1"}}}, #endif #if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX) - {"isac", 16000, 1}, + {"isac", 16000, 1}, #endif #ifdef WEBRTC_CODEC_ISAC - {"isac", 32000, 1}, -#endif - {"G722", 8000, 1}, -#ifdef WEBRTC_CODEC_ILBC - {"ilbc", 8000, 1}, + {"isac", 32000, 1}, #endif - {"pcmu", 8000, 1}, - {"pcma", 8000, 1} - }; + {"G722", 8000, 1}, + {"pcmu", 8000, 1}, + {"pcma", 8000, 1}}; ASSERT_THAT(supported_formats, ElementsAreArray(expected_formats)); } // Tests that using more channels than the maximum does not work. TEST(BuiltinAudioEncoderFactoryTest, MaxNrOfChannels) { - rtc::scoped_refptr aef = - CreateBuiltinAudioEncoderFactory(); + const Environment env = CreateEnvironment(); + scoped_refptr aef = CreateBuiltinAudioEncoderFactory(); std::vector codecs = { #ifdef WEBRTC_CODEC_OPUS - "opus", + "opus", #endif #if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX) - "isac", -#endif -#ifdef WEBRTC_CODEC_ILBC - "ilbc", + "isac", #endif - "pcmu", - "pcma", - "l16", - "G722", - "G711", + "pcmu", "pcma", "l16", "G722", "G711", }; for (auto codec : codecs) { - EXPECT_FALSE(aef->MakeAudioEncoder( - /*payload_type=*/111, - /*format=*/ + EXPECT_FALSE(aef->Create( + env, /*format=*/ SdpAudioFormat(codec, 32000, AudioEncoder::kMaxNumberOfChannels + 1), - /*codec_pair_id=*/absl::nullopt)); + {.payload_type = 111})); } } diff --git a/modules/audio_coding/codecs/cng/audio_encoder_cng.cc b/modules/audio_coding/codecs/cng/audio_encoder_cng.cc index 7546ac178f..a4d0b9abd4 100644 --- a/modules/audio_coding/codecs/cng/audio_encoder_cng.cc +++ b/modules/audio_coding/codecs/cng/audio_encoder_cng.cc @@ -12,9 +12,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/units/time_delta.h" #include "modules/audio_coding/codecs/cng/webrtc_cng.h" #include "rtc_base/checks.h" @@ -43,26 +43,24 @@ class AudioEncoderCng final : public AudioEncoder { size_t Max10MsFramesInAPacket() const override; int GetTargetBitrate() const override; EncodedInfo EncodeImpl(uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) override; + ArrayView audio, + Buffer* encoded) override; void Reset() override; bool SetFec(bool enable) override; bool SetDtx(bool enable) override; bool SetApplication(Application application) override; void SetMaxPlaybackRate(int frequency_hz) override; - rtc::ArrayView> ReclaimContainedEncoders() - override; + ArrayView> ReclaimContainedEncoders() override; void OnReceivedUplinkPacketLossFraction( float uplink_packet_loss_fraction) override; - void OnReceivedUplinkBandwidth( - int target_audio_bitrate_bps, - absl::optional bwe_period_ms) override; - absl::optional> GetFrameLengthRange() + void OnReceivedUplinkBandwidth(int target_audio_bitrate_bps, + std::optional bwe_period_ms) override; + std::optional> GetFrameLengthRange() const override; private: - EncodedInfo EncodePassive(size_t frames_to_encode, rtc::Buffer* encoded); - EncodedInfo EncodeActive(size_t frames_to_encode, rtc::Buffer* encoded); + EncodedInfo EncodePassive(size_t frames_to_encode, Buffer* encoded); + EncodedInfo EncodeActive(size_t frames_to_encode, Buffer* encoded); size_t SamplesPer10msFrame() const; std::unique_ptr speech_encoder_; @@ -89,7 +87,9 @@ AudioEncoderCng::AudioEncoderCng(AudioEncoderCngConfig&& config) : CreateVad(config.vad_mode)), cng_encoder_(new ComfortNoiseEncoder(SampleRateHz(), sid_frame_interval_ms_, - num_cng_coefficients_)) {} + num_cng_coefficients_)) { + speech_encoder_->Reset(); +} AudioEncoderCng::~AudioEncoderCng() = default; @@ -119,8 +119,8 @@ int AudioEncoderCng::GetTargetBitrate() const { AudioEncoder::EncodedInfo AudioEncoderCng::EncodeImpl( uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) { + ArrayView audio, + Buffer* encoded) { const size_t samples_per_10ms_frame = SamplesPer10msFrame(); RTC_CHECK_EQ(speech_buffer_.size(), rtp_timestamps_.size() * samples_per_10ms_frame); @@ -210,9 +210,9 @@ void AudioEncoderCng::SetMaxPlaybackRate(int frequency_hz) { speech_encoder_->SetMaxPlaybackRate(frequency_hz); } -rtc::ArrayView> +ArrayView> AudioEncoderCng::ReclaimContainedEncoders() { - return rtc::ArrayView>(&speech_encoder_, 1); + return ArrayView>(&speech_encoder_, 1); } void AudioEncoderCng::OnReceivedUplinkPacketLossFraction( @@ -223,19 +223,19 @@ void AudioEncoderCng::OnReceivedUplinkPacketLossFraction( void AudioEncoderCng::OnReceivedUplinkBandwidth( int target_audio_bitrate_bps, - absl::optional bwe_period_ms) { + std::optional bwe_period_ms) { speech_encoder_->OnReceivedUplinkBandwidth(target_audio_bitrate_bps, bwe_period_ms); } -absl::optional> +std::optional> AudioEncoderCng::GetFrameLengthRange() const { return speech_encoder_->GetFrameLengthRange(); } AudioEncoder::EncodedInfo AudioEncoderCng::EncodePassive( size_t frames_to_encode, - rtc::Buffer* encoded) { + Buffer* encoded) { bool force_sid = last_frame_active_; bool output_produced = false; const size_t samples_per_10ms_frame = SamplesPer10msFrame(); @@ -246,11 +246,10 @@ AudioEncoder::EncodedInfo AudioEncoderCng::EncodePassive( // WebRtcCng_Encode(), since later loop iterations may return zero in // that value, in which case we don't want to overwrite any value from // an earlier iteration. - size_t encoded_bytes_tmp = - cng_encoder_->Encode(rtc::ArrayView( - &speech_buffer_[i * samples_per_10ms_frame], + size_t encoded_bytes_tmp = cng_encoder_->Encode( + ArrayView(&speech_buffer_[i * samples_per_10ms_frame], samples_per_10ms_frame), - force_sid, encoded); + force_sid, encoded); if (encoded_bytes_tmp > 0) { RTC_CHECK(!output_produced); @@ -268,16 +267,15 @@ AudioEncoder::EncodedInfo AudioEncoderCng::EncodePassive( } AudioEncoder::EncodedInfo AudioEncoderCng::EncodeActive(size_t frames_to_encode, - rtc::Buffer* encoded) { + Buffer* encoded) { const size_t samples_per_10ms_frame = SamplesPer10msFrame(); AudioEncoder::EncodedInfo info; for (size_t i = 0; i < frames_to_encode; ++i) { - info = - speech_encoder_->Encode(rtp_timestamps_.front(), - rtc::ArrayView( - &speech_buffer_[i * samples_per_10ms_frame], - samples_per_10ms_frame), - encoded); + info = speech_encoder_->Encode( + rtp_timestamps_.front(), + ArrayView(&speech_buffer_[i * samples_per_10ms_frame], + samples_per_10ms_frame), + encoded); if (i + 1 == frames_to_encode) { RTC_CHECK_GT(info.encoded_bytes, 0) << "Encoder didn't deliver data."; } else { @@ -289,7 +287,7 @@ AudioEncoder::EncodedInfo AudioEncoderCng::EncodeActive(size_t frames_to_encode, } size_t AudioEncoderCng::SamplesPer10msFrame() const { - return rtc::CheckedDivExact(10 * SampleRateHz(), 1000); + return CheckedDivExact(10 * SampleRateHz(), 1000); } } // namespace diff --git a/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc b/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc index c688004363..c6c2df7f0c 100644 --- a/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc +++ b/modules/audio_coding/codecs/cng/audio_encoder_cng_unittest.cc @@ -88,8 +88,7 @@ class AudioEncoderCngTest : public ::testing::Test { void Encode() { ASSERT_TRUE(cng_) << "Must call CreateCng() first."; encoded_info_ = cng_->Encode( - timestamp_, - rtc::ArrayView(audio_, num_audio_samples_10ms_), + timestamp_, ArrayView(audio_, num_audio_samples_10ms_), &encoded_); timestamp_ += static_cast(num_audio_samples_10ms_); } @@ -207,7 +206,7 @@ class AudioEncoderCngTest : public ::testing::Test { uint32_t timestamp_; int16_t audio_[kMaxNumSamples]; size_t num_audio_samples_10ms_; - rtc::Buffer encoded_; + Buffer encoded_; AudioEncoder::EncodedInfo encoded_info_; int sample_rate_hz_; }; @@ -226,8 +225,8 @@ TEST_F(AudioEncoderCngTest, CheckFrameSizePropagation) { TEST_F(AudioEncoderCngTest, CheckTargetAudioBitratePropagation) { CreateCng(MakeCngConfig()); EXPECT_CALL(*mock_encoder_, - OnReceivedUplinkBandwidth(4711, absl::optional())); - cng_->OnReceivedUplinkBandwidth(4711, absl::nullopt); + OnReceivedUplinkBandwidth(4711, std::optional())); + cng_->OnReceivedUplinkBandwidth(4711, std::nullopt); } TEST_F(AudioEncoderCngTest, CheckPacketLossFractionPropagation) { @@ -241,7 +240,7 @@ TEST_F(AudioEncoderCngTest, CheckGetFrameLengthRangePropagation) { auto expected_range = std::make_pair(TimeDelta::Millis(20), TimeDelta::Millis(20)); EXPECT_CALL(*mock_encoder_, GetFrameLengthRange()) - .WillRepeatedly(Return(absl::make_optional(expected_range))); + .WillRepeatedly(Return(std::make_optional(expected_range))); EXPECT_THAT(cng_->GetFrameLengthRange(), Optional(Eq(expected_range))); } @@ -305,8 +304,8 @@ TEST_F(AudioEncoderCngTest, EncodePassive) { encoded_info_.encoded_bytes); EXPECT_EQ(expected_timestamp, encoded_info_.encoded_timestamp); } - expected_timestamp += rtc::checked_cast( - kBlocksPerFrame * num_audio_samples_10ms_); + expected_timestamp += + checked_cast(kBlocksPerFrame * num_audio_samples_10ms_); } else { // Otherwise, expect no output. EXPECT_EQ(0u, encoded_info_.encoded_bytes); diff --git a/modules/audio_coding/codecs/cng/cng_unittest.cc b/modules/audio_coding/codecs/cng/cng_unittest.cc index 0e6ab79394..76d11d413c 100644 --- a/modules/audio_coding/codecs/cng/cng_unittest.cc +++ b/modules/audio_coding/codecs/cng/cng_unittest.cc @@ -55,17 +55,17 @@ void CngTest::SetUp() { } void CngTest::TestCngEncode(int sample_rate_hz, int quality) { - const size_t num_samples_10ms = rtc::CheckedDivExact(sample_rate_hz, 100); - rtc::Buffer sid_data; + const size_t num_samples_10ms = CheckedDivExact(sample_rate_hz, 100); + Buffer sid_data; ComfortNoiseEncoder cng_encoder(sample_rate_hz, kSidNormalIntervalUpdate, quality); - EXPECT_EQ(0U, cng_encoder.Encode(rtc::ArrayView( - speech_data_, num_samples_10ms), - kNoSid, &sid_data)); + EXPECT_EQ(0U, cng_encoder.Encode( + ArrayView(speech_data_, num_samples_10ms), + kNoSid, &sid_data)); EXPECT_EQ(static_cast(quality + 1), cng_encoder.Encode( - rtc::ArrayView(speech_data_, num_samples_10ms), + ArrayView(speech_data_, num_samples_10ms), kForceSid, &sid_data)); } @@ -89,16 +89,15 @@ TEST_F(CngDeathTest, CngInitFail) { // Encode Cng with too long input vector. TEST_F(CngDeathTest, CngEncodeTooLong) { - rtc::Buffer sid_data; + Buffer sid_data; // Create encoder. ComfortNoiseEncoder cng_encoder(8000, kSidNormalIntervalUpdate, kCNGNumParamsNormal); // Run encoder with too much data. - EXPECT_DEATH( - cng_encoder.Encode(rtc::ArrayView(speech_data_, 641), - kNoSid, &sid_data), - ""); + EXPECT_DEATH(cng_encoder.Encode(ArrayView(speech_data_, 641), + kNoSid, &sid_data), + ""); } #endif // GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) @@ -124,7 +123,7 @@ TEST_F(CngTest, CngEncode64000) { // Update SID parameters, for both 9 and 16 parameters. TEST_F(CngTest, CngUpdateSid) { - rtc::Buffer sid_data; + Buffer sid_data; // Create and initialize encoder and decoder. ComfortNoiseEncoder cng_encoder(16000, kSidNormalIntervalUpdate, @@ -133,7 +132,7 @@ TEST_F(CngTest, CngUpdateSid) { // Run normal Encode and UpdateSid. EXPECT_EQ(kCNGNumParamsNormal + 1, - cng_encoder.Encode(rtc::ArrayView(speech_data_, 160), + cng_encoder.Encode(ArrayView(speech_data_, 160), kForceSid, &sid_data)); cng_decoder.UpdateSid(sid_data); @@ -142,27 +141,26 @@ TEST_F(CngTest, CngUpdateSid) { cng_decoder.Reset(); // Expect 0 because of unstable parameters after switching length. - EXPECT_EQ(0U, - cng_encoder.Encode(rtc::ArrayView(speech_data_, 160), - kForceSid, &sid_data)); + EXPECT_EQ(0U, cng_encoder.Encode(ArrayView(speech_data_, 160), + kForceSid, &sid_data)); EXPECT_EQ( kCNGNumParamsHigh + 1, - cng_encoder.Encode(rtc::ArrayView(speech_data_ + 160, 160), + cng_encoder.Encode(ArrayView(speech_data_ + 160, 160), kForceSid, &sid_data)); cng_decoder.UpdateSid( - rtc::ArrayView(sid_data.data(), kCNGNumParamsNormal + 1)); + ArrayView(sid_data.data(), kCNGNumParamsNormal + 1)); } // Update SID parameters, with wrong parameters or without calling decode. TEST_F(CngTest, CngUpdateSidErroneous) { - rtc::Buffer sid_data; + Buffer sid_data; // Encode. ComfortNoiseEncoder cng_encoder(16000, kSidNormalIntervalUpdate, kCNGNumParamsNormal); ComfortNoiseDecoder cng_decoder; EXPECT_EQ(kCNGNumParamsNormal + 1, - cng_encoder.Encode(rtc::ArrayView(speech_data_, 160), + cng_encoder.Encode(ArrayView(speech_data_, 160), kForceSid, &sid_data)); // First run with valid parameters, then with too many CNG parameters. @@ -180,7 +178,7 @@ TEST_F(CngTest, CngUpdateSidErroneous) { // Test to generate cng data, by forcing SID. Both normal and faulty condition. TEST_F(CngTest, CngGenerate) { - rtc::Buffer sid_data; + Buffer sid_data; int16_t out_data[640]; // Create and initialize encoder and decoder. @@ -190,23 +188,23 @@ TEST_F(CngTest, CngGenerate) { // Normal Encode. EXPECT_EQ(kCNGNumParamsNormal + 1, - cng_encoder.Encode(rtc::ArrayView(speech_data_, 160), + cng_encoder.Encode(ArrayView(speech_data_, 160), kForceSid, &sid_data)); // Normal UpdateSid. cng_decoder.UpdateSid(sid_data); // Two normal Generate, one with new_period. - EXPECT_TRUE(cng_decoder.Generate(rtc::ArrayView(out_data, 640), 1)); - EXPECT_TRUE(cng_decoder.Generate(rtc::ArrayView(out_data, 640), 0)); + EXPECT_TRUE(cng_decoder.Generate(ArrayView(out_data, 640), 1)); + EXPECT_TRUE(cng_decoder.Generate(ArrayView(out_data, 640), 0)); // Call Genereate with too much data. - EXPECT_FALSE(cng_decoder.Generate(rtc::ArrayView(out_data, 641), 0)); + EXPECT_FALSE(cng_decoder.Generate(ArrayView(out_data, 641), 0)); } // Test automatic SID. TEST_F(CngTest, CngAutoSid) { - rtc::Buffer sid_data; + Buffer sid_data; // Create and initialize encoder and decoder. ComfortNoiseEncoder cng_encoder(16000, kSidNormalIntervalUpdate, @@ -215,20 +213,20 @@ TEST_F(CngTest, CngAutoSid) { // Normal Encode, 100 msec, where no SID data should be generated. for (int i = 0; i < 10; i++) { - EXPECT_EQ( - 0U, cng_encoder.Encode(rtc::ArrayView(speech_data_, 160), - kNoSid, &sid_data)); + EXPECT_EQ(0U, + cng_encoder.Encode(ArrayView(speech_data_, 160), + kNoSid, &sid_data)); } // We have reached 100 msec, and SID data should be generated. EXPECT_EQ(kCNGNumParamsNormal + 1, - cng_encoder.Encode(rtc::ArrayView(speech_data_, 160), + cng_encoder.Encode(ArrayView(speech_data_, 160), kNoSid, &sid_data)); } // Test automatic SID, with very short interval. TEST_F(CngTest, CngAutoSidShort) { - rtc::Buffer sid_data; + Buffer sid_data; // Create and initialize encoder and decoder. ComfortNoiseEncoder cng_encoder(16000, kSidShortIntervalUpdate, @@ -236,16 +234,14 @@ TEST_F(CngTest, CngAutoSidShort) { ComfortNoiseDecoder cng_decoder; // First call will never generate SID, unless forced to. - EXPECT_EQ(0U, - cng_encoder.Encode(rtc::ArrayView(speech_data_, 160), - kNoSid, &sid_data)); + EXPECT_EQ(0U, cng_encoder.Encode(ArrayView(speech_data_, 160), + kNoSid, &sid_data)); // Normal Encode, 100 msec, SID data should be generated all the time. for (int i = 0; i < 10; i++) { - EXPECT_EQ( - kCNGNumParamsNormal + 1, - cng_encoder.Encode(rtc::ArrayView(speech_data_, 160), - kNoSid, &sid_data)); + EXPECT_EQ(kCNGNumParamsNormal + 1, + cng_encoder.Encode(ArrayView(speech_data_, 160), + kNoSid, &sid_data)); } } diff --git a/modules/audio_coding/codecs/cng/webrtc_cng.cc b/modules/audio_coding/codecs/cng/webrtc_cng.cc index 48f1b8c296..4649f16d9e 100644 --- a/modules/audio_coding/codecs/cng/webrtc_cng.cc +++ b/modules/audio_coding/codecs/cng/webrtc_cng.cc @@ -71,7 +71,7 @@ void ComfortNoiseDecoder::Reset() { dec_used_scale_factor_ = 0; } -void ComfortNoiseDecoder::UpdateSid(rtc::ArrayView sid) { +void ComfortNoiseDecoder::UpdateSid(ArrayView sid) { int16_t refCs[WEBRTC_CNG_MAX_LPC_ORDER]; int32_t targetEnergy; size_t length = sid.size(); @@ -108,7 +108,7 @@ void ComfortNoiseDecoder::UpdateSid(rtc::ArrayView sid) { } } -bool ComfortNoiseDecoder::Generate(rtc::ArrayView out_data, +bool ComfortNoiseDecoder::Generate(ArrayView out_data, bool new_period) { int16_t excitation[kCngMaxOutsizeOrder]; int16_t low[kCngMaxOutsizeOrder]; @@ -137,7 +137,7 @@ bool ComfortNoiseDecoder::Generate(rtc::ArrayView out_data, } /* Calculate new scale factor in Q13 */ - dec_used_scale_factor_ = rtc::checked_cast( + dec_used_scale_factor_ = checked_cast( WEBRTC_SPL_MUL_16_16_RSFT(dec_used_scale_factor_, Beta >> 2, 13) + WEBRTC_SPL_MUL_16_16_RSFT(dec_target_scale_factor_, BetaC >> 2, 13)); @@ -199,8 +199,7 @@ bool ComfortNoiseDecoder::Generate(rtc::ArrayView out_data, * `out_data` - Filtered speech samples. */ WebRtcSpl_FilterAR(lpPoly, WEBRTC_CNG_MAX_LPC_ORDER + 1, excitation, num_samples, dec_filtstate_, WEBRTC_CNG_MAX_LPC_ORDER, - dec_filtstateLow_, WEBRTC_CNG_MAX_LPC_ORDER, - out_data.data(), low, num_samples); + dec_filtstateLow_, out_data.data(), low); return true; } @@ -233,9 +232,9 @@ void ComfortNoiseEncoder::Reset(int fs, int interval, int quality) { enc_seed_ = 7777; /* For debugging only. */ } -size_t ComfortNoiseEncoder::Encode(rtc::ArrayView speech, +size_t ComfortNoiseEncoder::Encode(ArrayView speech, bool force_sid, - rtc::Buffer* output) { + Buffer* output) { int16_t arCoefs[WEBRTC_CNG_MAX_LPC_ORDER + 1]; int32_t corrVector[WEBRTC_CNG_MAX_LPC_ORDER + 1]; int16_t refCs[WEBRTC_CNG_MAX_LPC_ORDER + 1]; @@ -364,7 +363,7 @@ size_t ComfortNoiseEncoder::Encode(rtc::ArrayView speech, index = 94; const size_t output_coefs = enc_nrOfCoefs_ + 1; - output->AppendData(output_coefs, [&](rtc::ArrayView output) { + output->AppendData(output_coefs, [&](ArrayView output) { output[0] = (uint8_t)index; /* Quantize coefficients with tweak for WebRtc implementation of diff --git a/modules/audio_coding/codecs/cng/webrtc_cng.h b/modules/audio_coding/codecs/cng/webrtc_cng.h index 7afd243f81..738f60aa1f 100644 --- a/modules/audio_coding/codecs/cng/webrtc_cng.h +++ b/modules/audio_coding/codecs/cng/webrtc_cng.h @@ -34,7 +34,7 @@ class ComfortNoiseDecoder { // Updates the CN state when a new SID packet arrives. // `sid` is a view of the SID packet without the headers. - void UpdateSid(rtc::ArrayView sid); + void UpdateSid(ArrayView sid); // Generates comfort noise. // `out_data` will be filled with samples - its size determines the number of @@ -43,7 +43,7 @@ class ComfortNoiseDecoder { // currently 640 bytes (equalling 10ms at 64kHz). // TODO(ossu): Specify better limits for the size of out_data. Either let it // be unbounded or limit to 10ms in the current sample rate. - bool Generate(rtc::ArrayView out_data, bool new_period); + bool Generate(ArrayView out_data, bool new_period); private: uint32_t dec_seed_; @@ -79,9 +79,9 @@ class ComfortNoiseEncoder { // true, a SID frame is forced and the internal sid interval counter is reset. // Will fail if the input size is too large (> 640 samples, see // ComfortNoiseDecoder::Generate). - size_t Encode(rtc::ArrayView speech, + size_t Encode(ArrayView speech, bool force_sid, - rtc::Buffer* output); + Buffer* output); private: size_t enc_nrOfCoefs_; diff --git a/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc b/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc index 46ac671b30..4b97e11c02 100644 --- a/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc +++ b/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc @@ -20,7 +20,7 @@ namespace webrtc { void AudioDecoderPcmU::Reset() {} std::vector AudioDecoderPcmU::ParsePayload( - rtc::Buffer&& payload, + Buffer&& payload, uint32_t timestamp) { return LegacyEncodedAudioFrame::SplitBySamples( this, std::move(payload), timestamp, 8 * num_channels_, 8); @@ -52,16 +52,21 @@ int AudioDecoderPcmU::DecodeInternal(const uint8_t* encoded, return static_cast(ret); } -int AudioDecoderPcmU::PacketDuration(const uint8_t* encoded, +int AudioDecoderPcmU::PacketDuration(const uint8_t* /* encoded */, size_t encoded_len) const { // One encoded byte per sample per channel. return static_cast(encoded_len / Channels()); } +int AudioDecoderPcmU::PacketDurationRedundant(const uint8_t* encoded, + size_t encoded_len) const { + return PacketDuration(encoded, encoded_len); +} + void AudioDecoderPcmA::Reset() {} std::vector AudioDecoderPcmA::ParsePayload( - rtc::Buffer&& payload, + Buffer&& payload, uint32_t timestamp) { return LegacyEncodedAudioFrame::SplitBySamples( this, std::move(payload), timestamp, 8 * num_channels_, 8); @@ -93,10 +98,15 @@ int AudioDecoderPcmA::DecodeInternal(const uint8_t* encoded, return static_cast(ret); } -int AudioDecoderPcmA::PacketDuration(const uint8_t* encoded, +int AudioDecoderPcmA::PacketDuration(const uint8_t* /* encoded */, size_t encoded_len) const { // One encoded byte per sample per channel. return static_cast(encoded_len / Channels()); } +int AudioDecoderPcmA::PacketDurationRedundant(const uint8_t* encoded, + size_t encoded_len) const { + return PacketDuration(encoded, encoded_len); +} + } // namespace webrtc diff --git a/modules/audio_coding/codecs/g711/audio_decoder_pcm.h b/modules/audio_coding/codecs/g711/audio_decoder_pcm.h index 3fa42cba30..7eb2835be4 100644 --- a/modules/audio_coding/codecs/g711/audio_decoder_pcm.h +++ b/modules/audio_coding/codecs/g711/audio_decoder_pcm.h @@ -32,9 +32,11 @@ class AudioDecoderPcmU final : public AudioDecoder { AudioDecoderPcmU& operator=(const AudioDecoderPcmU&) = delete; void Reset() override; - std::vector ParsePayload(rtc::Buffer&& payload, + std::vector ParsePayload(Buffer&& payload, uint32_t timestamp) override; int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override; + int PacketDurationRedundant(const uint8_t* encoded, + size_t encoded_len) const override; int SampleRateHz() const override; size_t Channels() const override; @@ -59,9 +61,11 @@ class AudioDecoderPcmA final : public AudioDecoder { AudioDecoderPcmA& operator=(const AudioDecoderPcmA&) = delete; void Reset() override; - std::vector ParsePayload(rtc::Buffer&& payload, + std::vector ParsePayload(Buffer&& payload, uint32_t timestamp) override; int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override; + int PacketDurationRedundant(const uint8_t* encoded, + size_t encoded_len) const override; int SampleRateHz() const override; size_t Channels() const override; diff --git a/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc b/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc index 65e2da479d..38232e1c7b 100644 --- a/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc +++ b/modules/audio_coding/codecs/g711/audio_encoder_pcm.cc @@ -61,8 +61,8 @@ int AudioEncoderPcm::GetTargetBitrate() const { AudioEncoder::EncodedInfo AudioEncoderPcm::EncodeImpl( uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) { + ArrayView audio, + Buffer* encoded) { if (speech_buffer_.empty()) { first_timestamp_in_buffer_ = rtp_timestamp; } @@ -75,8 +75,7 @@ AudioEncoder::EncodedInfo AudioEncoderPcm::EncodeImpl( info.encoded_timestamp = first_timestamp_in_buffer_; info.payload_type = payload_type_; info.encoded_bytes = encoded->AppendData( - full_frame_samples_ * BytesPerSample(), - [&](rtc::ArrayView encoded) { + full_frame_samples_ * BytesPerSample(), [&](ArrayView encoded) { return EncodeCall(&speech_buffer_[0], full_frame_samples_, encoded.data()); }); @@ -89,7 +88,7 @@ void AudioEncoderPcm::Reset() { speech_buffer_.clear(); } -absl::optional> +std::optional> AudioEncoderPcm::GetFrameLengthRange() const { return {{TimeDelta::Millis(num_10ms_frames_per_packet_ * 10), TimeDelta::Millis(num_10ms_frames_per_packet_ * 10)}}; diff --git a/modules/audio_coding/codecs/g711/audio_encoder_pcm.h b/modules/audio_coding/codecs/g711/audio_encoder_pcm.h index d50be4b457..18a4770ec4 100644 --- a/modules/audio_coding/codecs/g711/audio_encoder_pcm.h +++ b/modules/audio_coding/codecs/g711/audio_encoder_pcm.h @@ -11,10 +11,10 @@ #ifndef MODULES_AUDIO_CODING_CODECS_G711_AUDIO_ENCODER_PCM_H_ #define MODULES_AUDIO_CODING_CODECS_G711_AUDIO_ENCODER_PCM_H_ +#include #include #include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_encoder.h" #include "api/units/time_delta.h" @@ -43,15 +43,15 @@ class AudioEncoderPcm : public AudioEncoder { size_t Max10MsFramesInAPacket() const override; int GetTargetBitrate() const override; void Reset() override; - absl::optional> GetFrameLengthRange() + std::optional> GetFrameLengthRange() const override; protected: AudioEncoderPcm(const Config& config, int sample_rate_hz); EncodedInfo EncodeImpl(uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) override; + ArrayView audio, + Buffer* encoded) override; virtual size_t EncodeCall(const int16_t* audio, size_t input_len, diff --git a/modules/audio_coding/codecs/g711/g711_interface.c b/modules/audio_coding/codecs/g711/g711_interface.c index 5fe1692ccb..84b08cb6b2 100644 --- a/modules/audio_coding/codecs/g711/g711_interface.c +++ b/modules/audio_coding/codecs/g711/g711_interface.c @@ -8,10 +8,11 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "modules/audio_coding/codecs/g711/g711_interface.h" + #include #include "modules/third_party/g711/g711.h" -#include "modules/audio_coding/codecs/g711/g711_interface.h" size_t WebRtcG711_EncodeA(const int16_t* speechIn, size_t len, diff --git a/modules/audio_coding/codecs/g722/audio_decoder_g722.cc b/modules/audio_coding/codecs/g722/audio_decoder_g722.cc index e969ed1189..cbffc488ec 100644 --- a/modules/audio_coding/codecs/g722/audio_decoder_g722.cc +++ b/modules/audio_coding/codecs/g722/audio_decoder_g722.cc @@ -51,18 +51,23 @@ void AudioDecoderG722Impl::Reset() { } std::vector AudioDecoderG722Impl::ParsePayload( - rtc::Buffer&& payload, + Buffer&& payload, uint32_t timestamp) { return LegacyEncodedAudioFrame::SplitBySamples(this, std::move(payload), timestamp, 8, 16); } -int AudioDecoderG722Impl::PacketDuration(const uint8_t* encoded, +int AudioDecoderG722Impl::PacketDuration(const uint8_t* /* encoded */, size_t encoded_len) const { // 1/2 encoded byte per sample per channel. return static_cast(2 * encoded_len / Channels()); } +int AudioDecoderG722Impl::PacketDurationRedundant(const uint8_t* encoded, + size_t encoded_len) const { + return PacketDuration(encoded, encoded_len); +} + int AudioDecoderG722Impl::SampleRateHz() const { return 16000; } @@ -120,7 +125,7 @@ int AudioDecoderG722StereoImpl::DecodeInternal(const uint8_t* encoded, return static_cast(ret); } -int AudioDecoderG722StereoImpl::PacketDuration(const uint8_t* encoded, +int AudioDecoderG722StereoImpl::PacketDuration(const uint8_t* /* encoded */, size_t encoded_len) const { // 1/2 encoded byte per sample per channel. Make sure the length represents // an equal number of bytes per channel. Otherwise, we cannot de-interleave @@ -142,7 +147,7 @@ void AudioDecoderG722StereoImpl::Reset() { } std::vector AudioDecoderG722StereoImpl::ParsePayload( - rtc::Buffer&& payload, + Buffer&& payload, uint32_t timestamp) { return LegacyEncodedAudioFrame::SplitBySamples(this, std::move(payload), timestamp, 2 * 8, 16); diff --git a/modules/audio_coding/codecs/g722/audio_decoder_g722.h b/modules/audio_coding/codecs/g722/audio_decoder_g722.h index 5872fad5de..431d896b18 100644 --- a/modules/audio_coding/codecs/g722/audio_decoder_g722.h +++ b/modules/audio_coding/codecs/g722/audio_decoder_g722.h @@ -27,9 +27,11 @@ class AudioDecoderG722Impl final : public AudioDecoder { bool HasDecodePlc() const override; void Reset() override; - std::vector ParsePayload(rtc::Buffer&& payload, + std::vector ParsePayload(Buffer&& payload, uint32_t timestamp) override; int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override; + int PacketDurationRedundant(const uint8_t* encoded, + size_t encoded_len) const override; int SampleRateHz() const override; size_t Channels() const override; @@ -54,7 +56,7 @@ class AudioDecoderG722StereoImpl final : public AudioDecoder { delete; void Reset() override; - std::vector ParsePayload(rtc::Buffer&& payload, + std::vector ParsePayload(Buffer&& payload, uint32_t timestamp) override; int SampleRateHz() const override; int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override; diff --git a/modules/audio_coding/codecs/g722/audio_encoder_g722.cc b/modules/audio_coding/codecs/g722/audio_encoder_g722.cc index b7d34ba581..699aeb47d1 100644 --- a/modules/audio_coding/codecs/g722/audio_encoder_g722.cc +++ b/modules/audio_coding/codecs/g722/audio_encoder_g722.cc @@ -79,7 +79,7 @@ void AudioEncoderG722Impl::Reset() { RTC_CHECK_EQ(0, WebRtcG722_EncoderInit(encoders_[i].encoder)); } -absl::optional> +std::optional> AudioEncoderG722Impl::GetFrameLengthRange() const { return {{TimeDelta::Millis(num_10ms_frames_per_packet_ * 10), TimeDelta::Millis(num_10ms_frames_per_packet_ * 10)}}; @@ -87,8 +87,8 @@ AudioEncoderG722Impl::GetFrameLengthRange() const { AudioEncoder::EncodedInfo AudioEncoderG722Impl::EncodeImpl( uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) { + ArrayView audio, + Buffer* encoded) { if (num_10ms_frames_buffered_ == 0) first_timestamp_in_buffer_ = rtp_timestamp; @@ -116,8 +116,8 @@ AudioEncoder::EncodedInfo AudioEncoderG722Impl::EncodeImpl( const size_t bytes_to_encode = samples_per_channel / 2 * num_channels_; EncodedInfo info; - info.encoded_bytes = encoded->AppendData( - bytes_to_encode, [&](rtc::ArrayView encoded) { + info.encoded_bytes = + encoded->AppendData(bytes_to_encode, [&](ArrayView encoded) { // Interleave the encoded bytes of the different channels. Each separate // channel and the interleaved stream encodes two samples per byte, most // significant half first. diff --git a/modules/audio_coding/codecs/g722/audio_encoder_g722.h b/modules/audio_coding/codecs/g722/audio_encoder_g722.h index a932aa8b7d..8a584e26e8 100644 --- a/modules/audio_coding/codecs/g722/audio_encoder_g722.h +++ b/modules/audio_coding/codecs/g722/audio_encoder_g722.h @@ -12,9 +12,9 @@ #define MODULES_AUDIO_CODING_CODECS_G722_AUDIO_ENCODER_G722_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/g722/audio_encoder_g722_config.h" #include "api/units/time_delta.h" @@ -38,20 +38,20 @@ class AudioEncoderG722Impl final : public AudioEncoder { size_t Max10MsFramesInAPacket() const override; int GetTargetBitrate() const override; void Reset() override; - absl::optional> GetFrameLengthRange() + std::optional> GetFrameLengthRange() const override; protected: EncodedInfo EncodeImpl(uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) override; + ArrayView audio, + Buffer* encoded) override; private: // The encoder state for one channel. struct EncoderState { G722EncInst* encoder; std::unique_ptr speech_buffer; // Queued up for encoding. - rtc::Buffer encoded_buffer; // Already encoded. + Buffer encoded_buffer; // Already encoded. EncoderState(); ~EncoderState(); }; @@ -64,7 +64,7 @@ class AudioEncoderG722Impl final : public AudioEncoder { size_t num_10ms_frames_buffered_; uint32_t first_timestamp_in_buffer_; const std::unique_ptr encoders_; - rtc::Buffer interleave_buffer_; + Buffer interleave_buffer_; }; } // namespace webrtc diff --git a/modules/audio_coding/codecs/g722/g722_interface.c b/modules/audio_coding/codecs/g722/g722_interface.c index 36ee6d92be..0744e99cf5 100644 --- a/modules/audio_coding/codecs/g722/g722_interface.c +++ b/modules/audio_coding/codecs/g722/g722_interface.c @@ -8,60 +8,56 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "modules/audio_coding/codecs/g722/g722_interface.h" + #include #include -#include "modules/audio_coding/codecs/g722/g722_interface.h" #include "modules/third_party/g722/g722_enc_dec.h" -int16_t WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst) -{ - *G722enc_inst=(G722EncInst*)malloc(sizeof(G722EncoderState)); - if (*G722enc_inst!=NULL) { - return(0); - } else { - return(-1); - } +int16_t WebRtcG722_CreateEncoder(G722EncInst** G722enc_inst) { + *G722enc_inst = (G722EncInst*)malloc(sizeof(G722EncoderState)); + if (*G722enc_inst != NULL) { + return (0); + } else { + return (-1); + } } -int16_t WebRtcG722_EncoderInit(G722EncInst *G722enc_inst) -{ - // Create and/or reset the G.722 encoder - // Bitrate 64 kbps and wideband mode (2) - G722enc_inst = (G722EncInst *) WebRtc_g722_encode_init( - (G722EncoderState*) G722enc_inst, 64000, 2); - if (G722enc_inst == NULL) { - return -1; - } else { - return 0; - } +int16_t WebRtcG722_EncoderInit(G722EncInst* G722enc_inst) { + // Create and/or reset the G.722 encoder + // Bitrate 64 kbps and wideband mode (2) + G722enc_inst = (G722EncInst*)WebRtc_g722_encode_init( + (G722EncoderState*)G722enc_inst, 64000, 2); + if (G722enc_inst == NULL) { + return -1; + } else { + return 0; + } } -int WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst) -{ - // Free encoder memory - return WebRtc_g722_encode_release((G722EncoderState*) G722enc_inst); +int WebRtcG722_FreeEncoder(G722EncInst* G722enc_inst) { + // Free encoder memory + return WebRtc_g722_encode_release((G722EncoderState*)G722enc_inst); } -size_t WebRtcG722_Encode(G722EncInst *G722enc_inst, +size_t WebRtcG722_Encode(G722EncInst* G722enc_inst, const int16_t* speechIn, size_t len, - uint8_t* encoded) -{ - unsigned char *codechar = (unsigned char*) encoded; - // Encode the input speech vector - return WebRtc_g722_encode((G722EncoderState*) G722enc_inst, codechar, - speechIn, len); + uint8_t* encoded) { + unsigned char* codechar = (unsigned char*)encoded; + // Encode the input speech vector + return WebRtc_g722_encode((G722EncoderState*)G722enc_inst, codechar, speechIn, + len); } -int16_t WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst) -{ - *G722dec_inst=(G722DecInst*)malloc(sizeof(G722DecoderState)); - if (*G722dec_inst!=NULL) { - return(0); - } else { - return(-1); - } +int16_t WebRtcG722_CreateDecoder(G722DecInst** G722dec_inst) { + *G722dec_inst = (G722DecInst*)malloc(sizeof(G722DecoderState)); + if (*G722dec_inst != NULL) { + return (0); + } else { + return (-1); + } } void WebRtcG722_DecoderInit(G722DecInst* inst) { @@ -70,35 +66,29 @@ void WebRtcG722_DecoderInit(G722DecInst* inst) { WebRtc_g722_decode_init((G722DecoderState*)inst, 64000, 2); } -int WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst) -{ - // Free encoder memory - return WebRtc_g722_decode_release((G722DecoderState*) G722dec_inst); +int WebRtcG722_FreeDecoder(G722DecInst* G722dec_inst) { + // Free encoder memory + return WebRtc_g722_decode_release((G722DecoderState*)G722dec_inst); } -size_t WebRtcG722_Decode(G722DecInst *G722dec_inst, - const uint8_t *encoded, +size_t WebRtcG722_Decode(G722DecInst* G722dec_inst, + const uint8_t* encoded, size_t len, - int16_t *decoded, - int16_t *speechType) -{ - // Decode the G.722 encoder stream - *speechType=G722_WEBRTC_SPEECH; - return WebRtc_g722_decode((G722DecoderState*) G722dec_inst, decoded, - encoded, len); + int16_t* decoded, + int16_t* speechType) { + // Decode the G.722 encoder stream + *speechType = G722_WEBRTC_SPEECH; + return WebRtc_g722_decode((G722DecoderState*)G722dec_inst, decoded, encoded, + len); } -int16_t WebRtcG722_Version(char *versionStr, short len) -{ - // Get version string - char version[30] = "2.0.0\n"; - if (strlen(version) < (unsigned int)len) - { - strcpy(versionStr, version); - return 0; - } - else - { - return -1; - } +int16_t WebRtcG722_Version(char* versionStr, short len) { + // Get version string + char version[30] = "2.0.0\n"; + if (strlen(version) < (unsigned int)len) { + strcpy(versionStr, version); + return 0; + } else { + return -1; + } } diff --git a/modules/audio_coding/codecs/ilbc/abs_quant.c b/modules/audio_coding/codecs/ilbc/abs_quant.c deleted file mode 100644 index 77da78ba7f..0000000000 --- a/modules/audio_coding/codecs/ilbc/abs_quant.c +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_AbsQuant.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/abs_quant.h" - -#include "modules/audio_coding/codecs/ilbc/abs_quant_loop.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - - -/*----------------------------------------------------------------* - * predictive noise shaping encoding of scaled start state - * (subrutine for WebRtcIlbcfix_StateSearch) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_AbsQuant( - IlbcEncoder *iLBCenc_inst, - /* (i) Encoder instance */ - iLBC_bits *iLBC_encbits, /* (i/o) Encoded bits (outputs idxForMax - and idxVec, uses state_first as - input) */ - int16_t *in, /* (i) vector to encode */ - int16_t *weightDenum /* (i) denominator of synthesis filter */ - ) { - int16_t *syntOut; - size_t quantLen[2]; - - /* Stack based */ - int16_t syntOutBuf[LPC_FILTERORDER+STATE_SHORT_LEN_30MS]; - int16_t in_weightedVec[STATE_SHORT_LEN_30MS+LPC_FILTERORDER]; - int16_t *in_weighted = &in_weightedVec[LPC_FILTERORDER]; - - /* Initialize the buffers */ - WebRtcSpl_MemSetW16(syntOutBuf, 0, LPC_FILTERORDER+STATE_SHORT_LEN_30MS); - syntOut = &syntOutBuf[LPC_FILTERORDER]; - /* Start with zero state */ - WebRtcSpl_MemSetW16(in_weightedVec, 0, LPC_FILTERORDER); - - /* Perform the quantization loop in two sections of length quantLen[i], - where the perceptual weighting filter is updated at the subframe - border */ - - if (iLBC_encbits->state_first) { - quantLen[0]=SUBL; - quantLen[1]=iLBCenc_inst->state_short_len-SUBL; - } else { - quantLen[0]=iLBCenc_inst->state_short_len-SUBL; - quantLen[1]=SUBL; - } - - /* Calculate the weighted residual, switch perceptual weighting - filter at the subframe border */ - WebRtcSpl_FilterARFastQ12( - in, in_weighted, - weightDenum, LPC_FILTERORDER+1, quantLen[0]); - WebRtcSpl_FilterARFastQ12( - &in[quantLen[0]], &in_weighted[quantLen[0]], - &weightDenum[LPC_FILTERORDER+1], LPC_FILTERORDER+1, quantLen[1]); - - WebRtcIlbcfix_AbsQuantLoop( - syntOut, - in_weighted, - weightDenum, - quantLen, - iLBC_encbits->idxVec); - -} diff --git a/modules/audio_coding/codecs/ilbc/abs_quant.h b/modules/audio_coding/codecs/ilbc/abs_quant.h deleted file mode 100644 index 4a3f004ed3..0000000000 --- a/modules/audio_coding/codecs/ilbc/abs_quant.h +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_AbsQuant.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_H_ - -#include -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * predictive noise shaping encoding of scaled start state - * (subrutine for WebRtcIlbcfix_StateSearch) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_AbsQuant( - IlbcEncoder* iLBCenc_inst, - /* (i) Encoder instance */ - iLBC_bits* iLBC_encbits, /* (i/o) Encoded bits (outputs idxForMax - and idxVec, uses state_first as - input) */ - int16_t* in, /* (i) vector to encode */ - int16_t* weightDenum /* (i) denominator of synthesis filter */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/abs_quant_loop.c b/modules/audio_coding/codecs/ilbc/abs_quant_loop.c deleted file mode 100644 index cf9266299d..0000000000 --- a/modules/audio_coding/codecs/ilbc/abs_quant_loop.c +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_AbsQuantLoop.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/abs_quant_loop.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/sort_sq.h" - -void WebRtcIlbcfix_AbsQuantLoop(int16_t *syntOutIN, int16_t *in_weightedIN, - int16_t *weightDenumIN, size_t *quantLenIN, - int16_t *idxVecIN ) { - size_t k1, k2; - int16_t index; - int32_t toQW32; - int32_t toQ32; - int16_t tmp16a; - int16_t xq; - - int16_t *syntOut = syntOutIN; - int16_t *in_weighted = in_weightedIN; - int16_t *weightDenum = weightDenumIN; - size_t *quantLen = quantLenIN; - int16_t *idxVec = idxVecIN; - - for(k1=0;k1<2;k1++) { - for(k2=0;k2 32767) { - toQ32 = (int32_t) 32767; - } else if (toQ32 < -32768) { - toQ32 = (int32_t) -32768; - } - - /* Quantize the state */ - if (toQW32<(-7577)) { - /* To prevent negative overflow */ - index=0; - } else if (toQW32>8151) { - /* To prevent positive overflow */ - index=7; - } else { - /* Find the best quantization index - (state_sq3Tbl is in Q13 and toQ is in Q11) - */ - WebRtcIlbcfix_SortSq(&xq, &index, - (int16_t)toQ32, - WebRtcIlbcfix_kStateSq3, 8); - } - - /* Store selected index */ - (*idxVec++) = index; - - /* Compute decoded sample and update of the prediction filter */ - tmp16a = ((WebRtcIlbcfix_kStateSq3[index] + 2 ) >> 2); - - *syntOut = (int16_t) (tmp16a + (int32_t)(*in_weighted) - toQW32); - - syntOut++; in_weighted++; - } - /* Update perceptual weighting filter at subframe border */ - weightDenum += 11; - } -} diff --git a/modules/audio_coding/codecs/ilbc/abs_quant_loop.h b/modules/audio_coding/codecs/ilbc/abs_quant_loop.h deleted file mode 100644 index 841d73b9fb..0000000000 --- a/modules/audio_coding/codecs/ilbc/abs_quant_loop.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_AbsQuantLoop.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_LOOP_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_LOOP_H_ - -#include -#include - -/*----------------------------------------------------------------* - * predictive noise shaping encoding of scaled start state - * (subrutine for WebRtcIlbcfix_StateSearch) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_AbsQuantLoop(int16_t* syntOutIN, - int16_t* in_weightedIN, - int16_t* weightDenumIN, - size_t* quantLenIN, - int16_t* idxVecIN); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.cc b/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.cc deleted file mode 100644 index 57b5abbe23..0000000000 --- a/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.cc +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h" - -#include -#include - -#include "modules/audio_coding/codecs/ilbc/ilbc.h" -#include "modules/audio_coding/codecs/legacy_encoded_audio_frame.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -AudioDecoderIlbcImpl::AudioDecoderIlbcImpl() { - WebRtcIlbcfix_DecoderCreate(&dec_state_); - WebRtcIlbcfix_Decoderinit30Ms(dec_state_); -} - -AudioDecoderIlbcImpl::~AudioDecoderIlbcImpl() { - WebRtcIlbcfix_DecoderFree(dec_state_); -} - -bool AudioDecoderIlbcImpl::HasDecodePlc() const { - return true; -} - -int AudioDecoderIlbcImpl::DecodeInternal(const uint8_t* encoded, - size_t encoded_len, - int sample_rate_hz, - int16_t* decoded, - SpeechType* speech_type) { - RTC_DCHECK_EQ(sample_rate_hz, 8000); - int16_t temp_type = 1; // Default is speech. - int ret = WebRtcIlbcfix_Decode(dec_state_, encoded, encoded_len, decoded, - &temp_type); - *speech_type = ConvertSpeechType(temp_type); - return ret; -} - -size_t AudioDecoderIlbcImpl::DecodePlc(size_t num_frames, int16_t* decoded) { - return WebRtcIlbcfix_NetEqPlc(dec_state_, decoded, num_frames); -} - -void AudioDecoderIlbcImpl::Reset() { - WebRtcIlbcfix_Decoderinit30Ms(dec_state_); -} - -std::vector AudioDecoderIlbcImpl::ParsePayload( - rtc::Buffer&& payload, - uint32_t timestamp) { - std::vector results; - size_t bytes_per_frame; - int timestamps_per_frame; - if (payload.size() >= 950) { - RTC_LOG(LS_WARNING) - << "AudioDecoderIlbcImpl::ParsePayload: Payload too large"; - return results; - } - if (payload.size() % 38 == 0) { - // 20 ms frames. - bytes_per_frame = 38; - timestamps_per_frame = 160; - } else if (payload.size() % 50 == 0) { - // 30 ms frames. - bytes_per_frame = 50; - timestamps_per_frame = 240; - } else { - RTC_LOG(LS_WARNING) - << "AudioDecoderIlbcImpl::ParsePayload: Invalid payload"; - return results; - } - - RTC_DCHECK_EQ(0, payload.size() % bytes_per_frame); - if (payload.size() == bytes_per_frame) { - std::unique_ptr frame( - new LegacyEncodedAudioFrame(this, std::move(payload))); - results.emplace_back(timestamp, 0, std::move(frame)); - } else { - size_t byte_offset; - uint32_t timestamp_offset; - for (byte_offset = 0, timestamp_offset = 0; byte_offset < payload.size(); - byte_offset += bytes_per_frame, - timestamp_offset += timestamps_per_frame) { - std::unique_ptr frame(new LegacyEncodedAudioFrame( - this, rtc::Buffer(payload.data() + byte_offset, bytes_per_frame))); - results.emplace_back(timestamp + timestamp_offset, 0, std::move(frame)); - } - } - - return results; -} - -int AudioDecoderIlbcImpl::SampleRateHz() const { - return 8000; -} - -size_t AudioDecoderIlbcImpl::Channels() const { - return 1; -} - -} // namespace webrtc diff --git a/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h b/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h deleted file mode 100644 index 46ba755148..0000000000 --- a/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_DECODER_ILBC_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_DECODER_ILBC_H_ - -#include -#include - -#include - -#include "api/audio_codecs/audio_decoder.h" -#include "rtc_base/buffer.h" - -typedef struct iLBC_decinst_t_ IlbcDecoderInstance; - -namespace webrtc { - -class AudioDecoderIlbcImpl final : public AudioDecoder { - public: - AudioDecoderIlbcImpl(); - ~AudioDecoderIlbcImpl() override; - - AudioDecoderIlbcImpl(const AudioDecoderIlbcImpl&) = delete; - AudioDecoderIlbcImpl& operator=(const AudioDecoderIlbcImpl&) = delete; - - bool HasDecodePlc() const override; - size_t DecodePlc(size_t num_frames, int16_t* decoded) override; - void Reset() override; - std::vector ParsePayload(rtc::Buffer&& payload, - uint32_t timestamp) override; - int SampleRateHz() const override; - size_t Channels() const override; - - protected: - int DecodeInternal(const uint8_t* encoded, - size_t encoded_len, - int sample_rate_hz, - int16_t* decoded, - SpeechType* speech_type) override; - - private: - IlbcDecoderInstance* dec_state_; -}; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_DECODER_ILBC_H_ diff --git a/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc b/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc deleted file mode 100644 index 9fbf42ceeb..0000000000 --- a/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h" - -#include -#include - -#include "modules/audio_coding/codecs/ilbc/ilbc.h" -#include "rtc_base/checks.h" -#include "rtc_base/numerics/safe_conversions.h" - -namespace webrtc { - -namespace { - -const int kSampleRateHz = 8000; - -int GetIlbcBitrate(int ptime) { - switch (ptime) { - case 20: - case 40: - // 38 bytes per frame of 20 ms => 15200 bits/s. - return 15200; - case 30: - case 60: - // 50 bytes per frame of 30 ms => (approx) 13333 bits/s. - return 13333; - default: - RTC_CHECK_NOTREACHED(); - } -} - -} // namespace - -AudioEncoderIlbcImpl::AudioEncoderIlbcImpl(const AudioEncoderIlbcConfig& config, - int payload_type) - : frame_size_ms_(config.frame_size_ms), - payload_type_(payload_type), - num_10ms_frames_per_packet_( - static_cast(config.frame_size_ms / 10)), - encoder_(nullptr) { - RTC_CHECK(config.IsOk()); - Reset(); -} - -AudioEncoderIlbcImpl::~AudioEncoderIlbcImpl() { - RTC_CHECK_EQ(0, WebRtcIlbcfix_EncoderFree(encoder_)); -} - -int AudioEncoderIlbcImpl::SampleRateHz() const { - return kSampleRateHz; -} - -size_t AudioEncoderIlbcImpl::NumChannels() const { - return 1; -} - -size_t AudioEncoderIlbcImpl::Num10MsFramesInNextPacket() const { - return num_10ms_frames_per_packet_; -} - -size_t AudioEncoderIlbcImpl::Max10MsFramesInAPacket() const { - return num_10ms_frames_per_packet_; -} - -int AudioEncoderIlbcImpl::GetTargetBitrate() const { - return GetIlbcBitrate(rtc::dchecked_cast(num_10ms_frames_per_packet_) * - 10); -} - -AudioEncoder::EncodedInfo AudioEncoderIlbcImpl::EncodeImpl( - uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) { - // Save timestamp if starting a new packet. - if (num_10ms_frames_buffered_ == 0) - first_timestamp_in_buffer_ = rtp_timestamp; - - // Buffer input. - std::copy(audio.cbegin(), audio.cend(), - input_buffer_ + kSampleRateHz / 100 * num_10ms_frames_buffered_); - - // If we don't yet have enough buffered input for a whole packet, we're done - // for now. - if (++num_10ms_frames_buffered_ < num_10ms_frames_per_packet_) { - return EncodedInfo(); - } - - // Encode buffered input. - RTC_DCHECK_EQ(num_10ms_frames_buffered_, num_10ms_frames_per_packet_); - num_10ms_frames_buffered_ = 0; - size_t encoded_bytes = encoded->AppendData( - RequiredOutputSizeBytes(), [&](rtc::ArrayView encoded) { - const int r = WebRtcIlbcfix_Encode( - encoder_, input_buffer_, - kSampleRateHz / 100 * num_10ms_frames_per_packet_, encoded.data()); - RTC_CHECK_GE(r, 0); - - return static_cast(r); - }); - - RTC_DCHECK_EQ(encoded_bytes, RequiredOutputSizeBytes()); - - EncodedInfo info; - info.encoded_bytes = encoded_bytes; - info.encoded_timestamp = first_timestamp_in_buffer_; - info.payload_type = payload_type_; - info.encoder_type = CodecType::kIlbc; - return info; -} - -void AudioEncoderIlbcImpl::Reset() { - if (encoder_) - RTC_CHECK_EQ(0, WebRtcIlbcfix_EncoderFree(encoder_)); - RTC_CHECK_EQ(0, WebRtcIlbcfix_EncoderCreate(&encoder_)); - const int encoder_frame_size_ms = - frame_size_ms_ > 30 ? frame_size_ms_ / 2 : frame_size_ms_; - RTC_CHECK_EQ(0, WebRtcIlbcfix_EncoderInit(encoder_, encoder_frame_size_ms)); - num_10ms_frames_buffered_ = 0; -} - -absl::optional> -AudioEncoderIlbcImpl::GetFrameLengthRange() const { - return {{TimeDelta::Millis(num_10ms_frames_per_packet_ * 10), - TimeDelta::Millis(num_10ms_frames_per_packet_ * 10)}}; -} - -size_t AudioEncoderIlbcImpl::RequiredOutputSizeBytes() const { - switch (num_10ms_frames_per_packet_) { - case 2: - return 38; - case 3: - return 50; - case 4: - return 2 * 38; - case 6: - return 2 * 50; - default: - RTC_CHECK_NOTREACHED(); - } -} - -} // namespace webrtc diff --git a/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h b/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h deleted file mode 100644 index c8dfa2ca6d..0000000000 --- a/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_ - -#include -#include - -#include - -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_encoder.h" -#include "api/audio_codecs/ilbc/audio_encoder_ilbc_config.h" -#include "api/units/time_delta.h" -#include "modules/audio_coding/codecs/ilbc/ilbc.h" - -namespace webrtc { - -class AudioEncoderIlbcImpl final : public AudioEncoder { - public: - AudioEncoderIlbcImpl(const AudioEncoderIlbcConfig& config, int payload_type); - ~AudioEncoderIlbcImpl() override; - - AudioEncoderIlbcImpl(const AudioEncoderIlbcImpl&) = delete; - AudioEncoderIlbcImpl& operator=(const AudioEncoderIlbcImpl&) = delete; - - int SampleRateHz() const override; - size_t NumChannels() const override; - size_t Num10MsFramesInNextPacket() const override; - size_t Max10MsFramesInAPacket() const override; - int GetTargetBitrate() const override; - EncodedInfo EncodeImpl(uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) override; - void Reset() override; - absl::optional> GetFrameLengthRange() - const override; - - private: - size_t RequiredOutputSizeBytes() const; - - static constexpr size_t kMaxSamplesPerPacket = 480; - const int frame_size_ms_; - const int payload_type_; - const size_t num_10ms_frames_per_packet_; - size_t num_10ms_frames_buffered_; - uint32_t first_timestamp_in_buffer_; - int16_t input_buffer_[kMaxSamplesPerPacket]; - IlbcEncoderInstance* encoder_; -}; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_ diff --git a/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c b/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c deleted file mode 100644 index c915a2f9f0..0000000000 --- a/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_AugmentedCbCorr.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/augmented_cb_corr.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -void WebRtcIlbcfix_AugmentedCbCorr( - int16_t *target, /* (i) Target vector */ - int16_t *buffer, /* (i) Memory buffer */ - int16_t *interpSamples, /* (i) buffer with - interpolated samples */ - int32_t *crossDot, /* (o) The cross correlation between - the target and the Augmented - vector */ - size_t low, /* (i) Lag to start from (typically - 20) */ - size_t high, /* (i) Lag to end at (typically 39) */ - int scale) /* (i) Scale factor to use for - the crossDot */ -{ - size_t lagcount; - size_t ilow; - int16_t *targetPtr; - int32_t *crossDotPtr; - int16_t *iSPtr=interpSamples; - - /* Calculate the correlation between the target and the - interpolated codebook. The correlation is calculated in - 3 sections with the interpolated part in the middle */ - crossDotPtr=crossDot; - for (lagcount=low; lagcount<=high; lagcount++) { - - ilow = lagcount - 4; - - /* Compute dot product for the first (lagcount-4) samples */ - (*crossDotPtr) = WebRtcSpl_DotProductWithScale(target, buffer-lagcount, ilow, scale); - - /* Compute dot product on the interpolated samples */ - (*crossDotPtr) += WebRtcSpl_DotProductWithScale(target+ilow, iSPtr, 4, scale); - targetPtr = target + lagcount; - iSPtr += lagcount-ilow; - - /* Compute dot product for the remaining samples */ - (*crossDotPtr) += WebRtcSpl_DotProductWithScale(targetPtr, buffer-lagcount, SUBL-lagcount, scale); - crossDotPtr++; - } -} diff --git a/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h b/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h deleted file mode 100644 index 2e9612e51a..0000000000 --- a/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_AugmentedCbCorr.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_AUGMENTED_CB_CORR_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_AUGMENTED_CB_CORR_H_ - -#include -#include - -/*----------------------------------------------------------------* - * Calculate correlation between target and Augmented codebooks - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_AugmentedCbCorr( - int16_t* target, /* (i) Target vector */ - int16_t* buffer, /* (i) Memory buffer */ - int16_t* interpSamples, /* (i) buffer with - interpolated samples */ - int32_t* crossDot, /* (o) The cross correlation between - the target and the Augmented - vector */ - size_t low, /* (i) Lag to start from (typically - 20) */ - size_t high, /* (i) Lag to end at (typically 39 */ - int scale); /* (i) Scale factor to use for the crossDot */ - -#endif diff --git a/modules/audio_coding/codecs/ilbc/bw_expand.c b/modules/audio_coding/codecs/ilbc/bw_expand.c deleted file mode 100644 index 1a9b882adf..0000000000 --- a/modules/audio_coding/codecs/ilbc/bw_expand.c +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_BwExpand.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/bw_expand.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * lpc bandwidth expansion - *---------------------------------------------------------------*/ - -/* The output is in the same domain as the input */ -void WebRtcIlbcfix_BwExpand( - int16_t *out, /* (o) the bandwidth expanded lpc coefficients */ - int16_t *in, /* (i) the lpc coefficients before bandwidth - expansion */ - int16_t *coef, /* (i) the bandwidth expansion factor Q15 */ - int16_t length /* (i) the length of lpc coefficient vectors */ - ) { - int i; - - out[0] = in[0]; - for (i = 1; i < length; i++) { - /* out[i] = coef[i] * in[i] with rounding. - in[] and out[] are in Q12 and coef[] is in Q15 - */ - out[i] = (int16_t)((coef[i] * in[i] + 16384) >> 15); - } -} diff --git a/modules/audio_coding/codecs/ilbc/bw_expand.h b/modules/audio_coding/codecs/ilbc/bw_expand.h deleted file mode 100644 index 022c113dda..0000000000 --- a/modules/audio_coding/codecs/ilbc/bw_expand.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_BwExpand.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_BW_EXPAND_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_BW_EXPAND_H_ - -#include -#include - -/*----------------------------------------------------------------* - * lpc bandwidth expansion - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_BwExpand( - int16_t* out, /* (o) the bandwidth expanded lpc coefficients */ - int16_t* in, /* (i) the lpc coefficients before bandwidth - expansion */ - int16_t* coef, /* (i) the bandwidth expansion factor Q15 */ - int16_t length /* (i) the length of lpc coefficient vectors */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/cb_construct.c b/modules/audio_coding/codecs/ilbc/cb_construct.c deleted file mode 100644 index 1e9a7040c7..0000000000 --- a/modules/audio_coding/codecs/ilbc/cb_construct.c +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CbConstruct.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/cb_construct.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/gain_dequant.h" -#include "modules/audio_coding/codecs/ilbc/get_cd_vec.h" -#include "rtc_base/sanitizer.h" - -// An arithmetic operation that is allowed to overflow. (It's still undefined -// behavior, so not a good idea; this just makes UBSan ignore the violation, so -// that our old code can continue to do what it's always been doing.) -static inline int32_t RTC_NO_SANITIZE("signed-integer-overflow") - OverflowingAddS32S32ToS32(int32_t a, int32_t b) { - return a + b; -} - -/*----------------------------------------------------------------* - * Construct decoded vector from codebook and gains. - *---------------------------------------------------------------*/ - -bool WebRtcIlbcfix_CbConstruct( - int16_t* decvector, /* (o) Decoded vector */ - const int16_t* index, /* (i) Codebook indices */ - const int16_t* gain_index, /* (i) Gain quantization indices */ - int16_t* mem, /* (i) Buffer for codevector construction */ - size_t lMem, /* (i) Length of buffer */ - size_t veclen) { /* (i) Length of vector */ - size_t j; - int16_t gain[CB_NSTAGES]; - /* Stack based */ - int16_t cbvec0[SUBL]; - int16_t cbvec1[SUBL]; - int16_t cbvec2[SUBL]; - int32_t a32; - int16_t *gainPtr; - - /* gain de-quantization */ - - gain[0] = WebRtcIlbcfix_GainDequant(gain_index[0], 16384, 0); - gain[1] = WebRtcIlbcfix_GainDequant(gain_index[1], gain[0], 1); - gain[2] = WebRtcIlbcfix_GainDequant(gain_index[2], gain[1], 2); - - /* codebook vector construction and construction of total vector */ - - /* Stack based */ - if (!WebRtcIlbcfix_GetCbVec(cbvec0, mem, (size_t)index[0], lMem, veclen)) - return false; // Failure. - if (!WebRtcIlbcfix_GetCbVec(cbvec1, mem, (size_t)index[1], lMem, veclen)) - return false; // Failure. - if (!WebRtcIlbcfix_GetCbVec(cbvec2, mem, (size_t)index[2], lMem, veclen)) - return false; // Failure. - - gainPtr = &gain[0]; - for (j=0;j> 14); - } - - return true; // Success. -} diff --git a/modules/audio_coding/codecs/ilbc/cb_construct.h b/modules/audio_coding/codecs/ilbc/cb_construct.h deleted file mode 100644 index 8f7c663164..0000000000 --- a/modules/audio_coding/codecs/ilbc/cb_construct.h +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CbConstruct.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_CONSTRUCT_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_CONSTRUCT_H_ - -#include -#include -#include - -#include "absl/base/attributes.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * Construct decoded vector from codebook and gains. - *---------------------------------------------------------------*/ - -// Returns true on success, false on failure. -ABSL_MUST_USE_RESULT -bool WebRtcIlbcfix_CbConstruct( - int16_t* decvector, /* (o) Decoded vector */ - const int16_t* index, /* (i) Codebook indices */ - const int16_t* gain_index, /* (i) Gain quantization indices */ - int16_t* mem, /* (i) Buffer for codevector construction */ - size_t lMem, /* (i) Length of buffer */ - size_t veclen /* (i) Length of vector */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/cb_mem_energy.c b/modules/audio_coding/codecs/ilbc/cb_mem_energy.c deleted file mode 100644 index 21e4197607..0000000000 --- a/modules/audio_coding/codecs/ilbc/cb_mem_energy.c +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CbMemEnergy.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/cb_mem_energy.h" - -#include "modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * Function WebRtcIlbcfix_CbMemEnergy computes the energy of all - * the vectors in the codebook memory that will be used in the - * following search for the best match. - *----------------------------------------------------------------*/ - -void WebRtcIlbcfix_CbMemEnergy( - size_t range, - int16_t *CB, /* (i) The CB memory (1:st section) */ - int16_t *filteredCB, /* (i) The filtered CB memory (2:nd section) */ - size_t lMem, /* (i) Length of the CB memory */ - size_t lTarget, /* (i) Length of the target vector */ - int16_t *energyW16, /* (o) Energy in the CB vectors */ - int16_t *energyShifts, /* (o) Shift value of the energy */ - int scale, /* (i) The scaling of all energy values */ - size_t base_size /* (i) Index to where energy values should be stored */ - ) { - int16_t *ppi, *ppo, *pp; - int32_t energy, tmp32; - - /* Compute the energy and store it in a vector. Also the - * corresponding shift values are stored. The energy values - * are reused in all three stages. */ - - /* Calculate the energy in the first block of 'lTarget' sampels. */ - ppi = CB+lMem-lTarget-1; - ppo = CB+lMem-1; - - pp=CB+lMem-lTarget; - energy = WebRtcSpl_DotProductWithScale( pp, pp, lTarget, scale); - - /* Normalize the energy and store the number of shifts */ - energyShifts[0] = (int16_t)WebRtcSpl_NormW32(energy); - tmp32 = energy << energyShifts[0]; - energyW16[0] = (int16_t)(tmp32 >> 16); - - /* Compute the energy of the rest of the cb memory - * by step wise adding and subtracting the next - * sample and the last sample respectively. */ - WebRtcIlbcfix_CbMemEnergyCalc(energy, range, ppi, ppo, energyW16, energyShifts, scale, 0); - - /* Next, precompute the energy values for the filtered cb section */ - energy=0; - pp=filteredCB+lMem-lTarget; - - energy = WebRtcSpl_DotProductWithScale( pp, pp, lTarget, scale); - - /* Normalize the energy and store the number of shifts */ - energyShifts[base_size] = (int16_t)WebRtcSpl_NormW32(energy); - tmp32 = energy << energyShifts[base_size]; - energyW16[base_size] = (int16_t)(tmp32 >> 16); - - ppi = filteredCB + lMem - 1 - lTarget; - ppo = filteredCB + lMem - 1; - - WebRtcIlbcfix_CbMemEnergyCalc(energy, range, ppi, ppo, energyW16, energyShifts, scale, base_size); -} diff --git a/modules/audio_coding/codecs/ilbc/cb_mem_energy.h b/modules/audio_coding/codecs/ilbc/cb_mem_energy.h deleted file mode 100644 index 15dc884f2a..0000000000 --- a/modules/audio_coding/codecs/ilbc/cb_mem_energy.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CbMemEnergy.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_H_ - -#include -#include - -void WebRtcIlbcfix_CbMemEnergy( - size_t range, - int16_t* CB, /* (i) The CB memory (1:st section) */ - int16_t* filteredCB, /* (i) The filtered CB memory (2:nd section) */ - size_t lMem, /* (i) Length of the CB memory */ - size_t lTarget, /* (i) Length of the target vector */ - int16_t* energyW16, /* (o) Energy in the CB vectors */ - int16_t* energyShifts, /* (o) Shift value of the energy */ - int scale, /* (i) The scaling of all energy values */ - size_t base_size /* (i) Index to where energy values should be stored */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c b/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c deleted file mode 100644 index 0619bbe422..0000000000 --- a/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CbMemEnergyAugmentation.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -void WebRtcIlbcfix_CbMemEnergyAugmentation( - int16_t *interpSamples, /* (i) The interpolated samples */ - int16_t *CBmem, /* (i) The CB memory */ - int scale, /* (i) The scaling of all energy values */ - size_t base_size, /* (i) Index to where energy values should be stored */ - int16_t *energyW16, /* (o) Energy in the CB vectors */ - int16_t *energyShifts /* (o) Shift value of the energy */ - ){ - int32_t energy, tmp32; - int16_t *ppe, *pp, *interpSamplesPtr; - int16_t *CBmemPtr; - size_t lagcount; - int16_t *enPtr=&energyW16[base_size-20]; - int16_t *enShPtr=&energyShifts[base_size-20]; - int32_t nrjRecursive; - - CBmemPtr = CBmem+147; - interpSamplesPtr = interpSamples; - - /* Compute the energy for the first (low-5) noninterpolated samples */ - nrjRecursive = WebRtcSpl_DotProductWithScale( CBmemPtr-19, CBmemPtr-19, 15, scale); - ppe = CBmemPtr - 20; - - for (lagcount=20; lagcount<=39; lagcount++) { - - /* Update the energy recursively to save complexity */ - nrjRecursive += (*ppe * *ppe) >> scale; - ppe--; - energy = nrjRecursive; - - /* interpolation */ - energy += WebRtcSpl_DotProductWithScale(interpSamplesPtr, interpSamplesPtr, 4, scale); - interpSamplesPtr += 4; - - /* Compute energy for the remaining samples */ - pp = CBmemPtr - lagcount; - energy += WebRtcSpl_DotProductWithScale(pp, pp, SUBL-lagcount, scale); - - /* Normalize the energy and store the number of shifts */ - (*enShPtr) = (int16_t)WebRtcSpl_NormW32(energy); - tmp32 = energy << *enShPtr; - *enPtr = (int16_t)(tmp32 >> 16); - enShPtr++; - enPtr++; - } -} diff --git a/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h b/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h deleted file mode 100644 index c489ab54f9..0000000000 --- a/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CbMemEnergyAugmentation.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_AUGMENTATION_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_AUGMENTATION_H_ - -#include -#include - -void WebRtcIlbcfix_CbMemEnergyAugmentation( - int16_t* interpSamples, /* (i) The interpolated samples */ - int16_t* CBmem, /* (i) The CB memory */ - int scale, /* (i) The scaling of all energy values */ - size_t base_size, /* (i) Index to where energy values should be stored */ - int16_t* energyW16, /* (o) Energy in the CB vectors */ - int16_t* energyShifts /* (o) Shift value of the energy */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c b/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c deleted file mode 100644 index 58c0c5fe6d..0000000000 --- a/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CbMemEnergyCalc.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/* Compute the energy of the rest of the cb memory - * by step wise adding and subtracting the next - * sample and the last sample respectively */ -void WebRtcIlbcfix_CbMemEnergyCalc( - int32_t energy, /* (i) input start energy */ - size_t range, /* (i) number of iterations */ - int16_t *ppi, /* (i) input pointer 1 */ - int16_t *ppo, /* (i) input pointer 2 */ - int16_t *energyW16, /* (o) Energy in the CB vectors */ - int16_t *energyShifts, /* (o) Shift value of the energy */ - int scale, /* (i) The scaling of all energy values */ - size_t base_size /* (i) Index to where energy values should be stored */ - ) -{ - size_t j; - int16_t shft; - int32_t tmp; - int16_t *eSh_ptr; - int16_t *eW16_ptr; - - - eSh_ptr = &energyShifts[1+base_size]; - eW16_ptr = &energyW16[1+base_size]; - - for (j = 0; j + 1 < range; j++) { - - /* Calculate next energy by a +/- - operation on the edge samples */ - tmp = (*ppi) * (*ppi) - (*ppo) * (*ppo); - energy += tmp >> scale; - energy = WEBRTC_SPL_MAX(energy, 0); - - ppi--; - ppo--; - - /* Normalize the energy into a int16_t and store - the number of shifts */ - - shft = (int16_t)WebRtcSpl_NormW32(energy); - *eSh_ptr++ = shft; - - tmp = energy << shft; - *eW16_ptr++ = (int16_t)(tmp >> 16); - } -} diff --git a/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h b/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h deleted file mode 100644 index 4b3703182e..0000000000 --- a/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CbMemEnergyCalc.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_CALC_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_CALC_H_ - -#include -#include - -void WebRtcIlbcfix_CbMemEnergyCalc( - int32_t energy, /* (i) input start energy */ - size_t range, /* (i) number of iterations */ - int16_t* ppi, /* (i) input pointer 1 */ - int16_t* ppo, /* (i) input pointer 2 */ - int16_t* energyW16, /* (o) Energy in the CB vectors */ - int16_t* energyShifts, /* (o) Shift value of the energy */ - int scale, /* (i) The scaling of all energy values */ - size_t base_size /* (i) Index to where energy values should be stored */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/cb_search.c b/modules/audio_coding/codecs/ilbc/cb_search.c deleted file mode 100644 index 24b5292354..0000000000 --- a/modules/audio_coding/codecs/ilbc/cb_search.c +++ /dev/null @@ -1,405 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CbSearch.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/cb_search.h" - -#include "modules/audio_coding/codecs/ilbc/augmented_cb_corr.h" -#include "modules/audio_coding/codecs/ilbc/cb_mem_energy.h" -#include "modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h" -#include "modules/audio_coding/codecs/ilbc/cb_search_core.h" -#include "modules/audio_coding/codecs/ilbc/cb_update_best_index.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/create_augmented_vec.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/energy_inverse.h" -#include "modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h" -#include "modules/audio_coding/codecs/ilbc/gain_quant.h" -#include "modules/audio_coding/codecs/ilbc/interpolate_samples.h" - -/*----------------------------------------------------------------* - * Search routine for codebook encoding and gain quantization. - *----------------------------------------------------------------*/ - -void WebRtcIlbcfix_CbSearch( - IlbcEncoder *iLBCenc_inst, - /* (i) the encoder state structure */ - int16_t *index, /* (o) Codebook indices */ - int16_t *gain_index, /* (o) Gain quantization indices */ - int16_t *intarget, /* (i) Target vector for encoding */ - int16_t *decResidual,/* (i) Decoded residual for codebook construction */ - size_t lMem, /* (i) Length of buffer */ - size_t lTarget, /* (i) Length of vector */ - int16_t *weightDenum,/* (i) weighting filter coefficients in Q12 */ - size_t block /* (i) the subblock number */ - ) { - size_t i, range; - int16_t ii, j, stage; - int16_t *pp; - int16_t tmp; - int scale; - int16_t bits, temp1, temp2; - size_t base_size; - int32_t codedEner, targetEner; - int16_t gains[CB_NSTAGES+1]; - int16_t *cb_vecPtr; - size_t indexOffset, sInd, eInd; - int32_t CritMax=0; - int16_t shTotMax=WEBRTC_SPL_WORD16_MIN; - size_t bestIndex=0; - int16_t bestGain=0; - size_t indexNew; - int16_t CritNewSh; - int32_t CritNew; - int32_t *cDotPtr; - size_t noOfZeros; - int16_t *gainPtr; - int32_t t32, tmpW32; - int16_t *WebRtcIlbcfix_kGainSq5_ptr; - /* Stack based */ - int16_t CBbuf[CB_MEML+LPC_FILTERORDER+CB_HALFFILTERLEN]; - int32_t cDot[128]; - int32_t Crit[128]; - int16_t targetVec[SUBL+LPC_FILTERORDER]; - int16_t cbvectors[CB_MEML + 1]; /* Adding one extra position for - Coverity warnings. */ - int16_t codedVec[SUBL]; - int16_t interpSamples[20*4]; - int16_t interpSamplesFilt[20*4]; - int16_t energyW16[CB_EXPAND*128]; - int16_t energyShifts[CB_EXPAND*128]; - int16_t *inverseEnergy=energyW16; /* Reuse memory */ - int16_t *inverseEnergyShifts=energyShifts; /* Reuse memory */ - int16_t *buf = &CBbuf[LPC_FILTERORDER]; - int16_t *target = &targetVec[LPC_FILTERORDER]; - int16_t *aug_vec = (int16_t*)cDot; /* length [SUBL], reuse memory */ - - /* Determine size of codebook sections */ - - base_size=lMem-lTarget+1; - if (lTarget==SUBL) { - base_size=lMem-19; - } - - /* weighting of the CB memory */ - noOfZeros=lMem-WebRtcIlbcfix_kFilterRange[block]; - WebRtcSpl_MemSetW16(&buf[-LPC_FILTERORDER], 0, noOfZeros+LPC_FILTERORDER); - WebRtcSpl_FilterARFastQ12( - decResidual+noOfZeros, buf+noOfZeros, - weightDenum, LPC_FILTERORDER+1, WebRtcIlbcfix_kFilterRange[block]); - - /* weighting of the target vector */ - WEBRTC_SPL_MEMCPY_W16(&target[-LPC_FILTERORDER], buf+noOfZeros+WebRtcIlbcfix_kFilterRange[block]-LPC_FILTERORDER, LPC_FILTERORDER); - WebRtcSpl_FilterARFastQ12( - intarget, target, - weightDenum, LPC_FILTERORDER+1, lTarget); - - /* Store target, towards the end codedVec is calculated as - the initial target minus the remaining target */ - WEBRTC_SPL_MEMCPY_W16(codedVec, target, lTarget); - - /* Find the highest absolute value to calculate proper - vector scale factor (so that it uses 12 bits) */ - temp1 = WebRtcSpl_MaxAbsValueW16(buf, lMem); - temp2 = WebRtcSpl_MaxAbsValueW16(target, lTarget); - - if ((temp1>0)&&(temp2>0)) { - temp1 = WEBRTC_SPL_MAX(temp1, temp2); - scale = WebRtcSpl_GetSizeInBits((uint32_t)(temp1 * temp1)); - } else { - /* temp1 or temp2 is negative (maximum was -32768) */ - scale = 30; - } - - /* Scale to so that a mul-add 40 times does not overflow */ - scale = scale - 25; - scale = WEBRTC_SPL_MAX(0, scale); - - /* Compute energy of the original target */ - targetEner = WebRtcSpl_DotProductWithScale(target, target, lTarget, scale); - - /* Prepare search over one more codebook section. This section - is created by filtering the original buffer with a filter. */ - WebRtcIlbcfix_FilteredCbVecs(cbvectors, buf, lMem, WebRtcIlbcfix_kFilterRange[block]); - - range = WebRtcIlbcfix_kSearchRange[block][0]; - - if(lTarget == SUBL) { - /* Create the interpolated samples and store them for use in all stages */ - - /* First section, non-filtered half of the cb */ - WebRtcIlbcfix_InterpolateSamples(interpSamples, buf, lMem); - - /* Second section, filtered half of the cb */ - WebRtcIlbcfix_InterpolateSamples(interpSamplesFilt, cbvectors, lMem); - - /* Compute the CB vectors' energies for the first cb section (non-filtered) */ - WebRtcIlbcfix_CbMemEnergyAugmentation(interpSamples, buf, - scale, 20, energyW16, energyShifts); - - /* Compute the CB vectors' energies for the second cb section (filtered cb) */ - WebRtcIlbcfix_CbMemEnergyAugmentation(interpSamplesFilt, cbvectors, scale, - base_size + 20, energyW16, - energyShifts); - - /* Compute the CB vectors' energies and store them in the vector - * energyW16. Also the corresponding shift values are stored. The - * energy values are used in all three stages. */ - WebRtcIlbcfix_CbMemEnergy(range, buf, cbvectors, lMem, - lTarget, energyW16+20, energyShifts+20, scale, base_size); - - } else { - /* Compute the CB vectors' energies and store them in the vector - * energyW16. Also the corresponding shift values are stored. The - * energy values are used in all three stages. */ - WebRtcIlbcfix_CbMemEnergy(range, buf, cbvectors, lMem, - lTarget, energyW16, energyShifts, scale, base_size); - - /* Set the energy positions 58-63 and 122-127 to zero - (otherwise they are uninitialized) */ - WebRtcSpl_MemSetW16(energyW16+range, 0, (base_size-range)); - WebRtcSpl_MemSetW16(energyW16+range+base_size, 0, (base_size-range)); - } - - /* Calculate Inverse Energy (energyW16 is already normalized - and will contain the inverse energy in Q29 after this call */ - WebRtcIlbcfix_EnergyInverse(energyW16, base_size*CB_EXPAND); - - /* The gain value computed in the previous stage is used - * as an upper limit to what the next stage gain value - * is allowed to be. In stage 0, 16384 (1.0 in Q14) is used as - * the upper limit. */ - gains[0] = 16384; - - for (stage=0; stage> 1) > bestIndex) ? - 0 : (bestIndex - (CB_RESRANGE >> 1)); - eInd=sInd+CB_RESRANGE; - if (eInd>=range) { - eInd=range-1; - sInd=eInd-CB_RESRANGE; - } - - range = WebRtcIlbcfix_kSearchRange[block][stage]; - - if (lTarget==SUBL) { - i=sInd; - if (sInd<20) { - WebRtcIlbcfix_AugmentedCbCorr(target, cbvectors + lMem, - interpSamplesFilt, cDot, sInd + 20, - WEBRTC_SPL_MIN(39, (eInd + 20)), scale); - i=20; - cDotPtr = &cDot[20 - sInd]; - } else { - cDotPtr = cDot; - } - - cb_vecPtr = cbvectors+lMem-20-i; - - /* Calculate the cross correlations (main part of the filtered CB) */ - WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget, - eInd - i + 1, scale, -1); - - } else { - cDotPtr = cDot; - cb_vecPtr = cbvectors+lMem-lTarget-sInd; - - /* Calculate the cross correlations (main part of the filtered CB) */ - WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget, - eInd - sInd + 1, scale, -1); - - } - - /* Adjust the search range for the augmented vectors */ - indexOffset=base_size+sInd; - - /* Search for best index in this part of the vector */ - WebRtcIlbcfix_CbSearchCore( - cDot, eInd-sInd+1, stage, inverseEnergy+indexOffset, - inverseEnergyShifts+indexOffset, Crit, - &indexNew, &CritNew, &CritNewSh); - - /* Update the global best index and the corresponding gain */ - WebRtcIlbcfix_CbUpdateBestIndex( - CritNew, CritNewSh, indexNew+indexOffset, cDot[indexNew], - inverseEnergy[indexNew+indexOffset], inverseEnergyShifts[indexNew+indexOffset], - &CritMax, &shTotMax, &bestIndex, &bestGain); - - index[stage] = (int16_t)bestIndex; - - - bestGain = WebRtcIlbcfix_GainQuant(bestGain, - (int16_t)WEBRTC_SPL_ABS_W16(gains[stage]), stage, &gain_index[stage]); - - /* Extract the best (according to measure) codebook vector - Also adjust the index, so that the augmented vectors are last. - Above these vectors were first... - */ - - if(lTarget==(STATE_LEN-iLBCenc_inst->state_short_len)) { - - if((size_t)index[stage]=20) { - /* Adjust index and extract vector */ - index[stage]-=20; - pp=buf+lMem-lTarget-index[stage]; - } else { - /* Adjust index and extract vector */ - index[stage]+=(int16_t)(base_size-20); - - WebRtcIlbcfix_CreateAugmentedVec(index[stage]-base_size+40, - buf+lMem, aug_vec); - pp = aug_vec; - - } - } else { - - if ((index[stage] - base_size) >= 20) { - /* Adjust index and extract vector */ - index[stage]-=20; - pp=cbvectors+lMem-lTarget- - index[stage]+base_size; - } else { - /* Adjust index and extract vector */ - index[stage]+=(int16_t)(base_size-20); - WebRtcIlbcfix_CreateAugmentedVec(index[stage]-2*base_size+40, - cbvectors+lMem, aug_vec); - pp = aug_vec; - } - } - } - - /* Subtract the best codebook vector, according - to measure, from the target vector */ - - WebRtcSpl_AddAffineVectorToVector(target, pp, (int16_t)(-bestGain), - (int32_t)8192, (int16_t)14, lTarget); - - /* record quantized gain */ - gains[stage+1] = bestGain; - - } /* end of Main Loop. for (stage=0;... */ - - /* Calculte the coded vector (original target - what's left) */ - for (i=0;i> 14); - - targetEner = (int16_t)WEBRTC_SPL_SHIFT_W32(targetEner, -bits) * tmp; - - tmpW32 = ((int32_t)(gains[1]-1))<<1; - - /* Pointer to the table that contains - gain_sq5TblFIX * gain_sq5TblFIX in Q14 */ - gainPtr=(int16_t*)WebRtcIlbcfix_kGainSq5Sq+gain_index[0]; - temp1 = (int16_t)WEBRTC_SPL_SHIFT_W32(codedEner, -bits); - - WebRtcIlbcfix_kGainSq5_ptr = (int16_t*)&WebRtcIlbcfix_kGainSq5[j]; - - /* targetEner and codedEner are in Q(-2*scale) */ - for (ii=gain_index[0];ii<32;ii++) { - - /* Change the index if - (codedEnergy*gainTbl[i]*gainTbl[i])<(targetEn*gain[0]*gain[0]) AND - gainTbl[i] < 2*gain[0] - */ - - t32 = temp1 * *gainPtr; - t32 = t32 - targetEner; - if (t32 < 0) { - if ((*WebRtcIlbcfix_kGainSq5_ptr) < tmpW32) { - j=ii; - WebRtcIlbcfix_kGainSq5_ptr = (int16_t*)&WebRtcIlbcfix_kGainSq5[ii]; - } - } - gainPtr++; - } - gain_index[0]=j; - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/cb_search.h b/modules/audio_coding/codecs/ilbc/cb_search.h deleted file mode 100644 index 11856649e7..0000000000 --- a/modules/audio_coding/codecs/ilbc/cb_search.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CbSearch.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_H_ - -#include -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -void WebRtcIlbcfix_CbSearch( - IlbcEncoder* iLBCenc_inst, - /* (i) the encoder state structure */ - int16_t* index, /* (o) Codebook indices */ - int16_t* gain_index, /* (o) Gain quantization indices */ - int16_t* intarget, /* (i) Target vector for encoding */ - int16_t* decResidual, /* (i) Decoded residual for codebook construction */ - size_t lMem, /* (i) Length of buffer */ - size_t lTarget, /* (i) Length of vector */ - int16_t* weightDenum, /* (i) weighting filter coefficients in Q12 */ - size_t block /* (i) the subblock number */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/cb_search_core.c b/modules/audio_coding/codecs/ilbc/cb_search_core.c deleted file mode 100644 index a75e5b0ab8..0000000000 --- a/modules/audio_coding/codecs/ilbc/cb_search_core.c +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CbSearchCore.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/cb_search_core.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -void WebRtcIlbcfix_CbSearchCore( - int32_t *cDot, /* (i) Cross Correlation */ - size_t range, /* (i) Search range */ - int16_t stage, /* (i) Stage of this search */ - int16_t *inverseEnergy, /* (i) Inversed energy */ - int16_t *inverseEnergyShift, /* (i) Shifts of inversed energy - with the offset 2*16-29 */ - int32_t *Crit, /* (o) The criteria */ - size_t *bestIndex, /* (o) Index that corresponds to - maximum criteria (in this - vector) */ - int32_t *bestCrit, /* (o) Value of critera for the - chosen index */ - int16_t *bestCritSh) /* (o) The domain of the chosen - criteria */ -{ - int32_t maxW32, tmp32; - int16_t max, sh, tmp16; - size_t i; - int32_t *cDotPtr; - int16_t cDotSqW16; - int16_t *inverseEnergyPtr; - int32_t *critPtr; - int16_t *inverseEnergyShiftPtr; - - /* Don't allow negative values for stage 0 */ - if (stage==0) { - cDotPtr=cDot; - for (i=0;i> 16); - cDotSqW16 = (int16_t)(((int32_t)(tmp16)*(tmp16))>>16); - - /* Calculate the criteria (cDot*cDot/energy) */ - *critPtr = cDotSqW16 * *inverseEnergyPtr; - - /* Extract the maximum shift value under the constraint - that the criteria is not zero */ - if ((*critPtr)!=0) { - max = WEBRTC_SPL_MAX((*inverseEnergyShiftPtr), max); - } - - inverseEnergyPtr++; - inverseEnergyShiftPtr++; - critPtr++; - cDotPtr++; - } - - /* If no max shifts still at initialization value, set shift to zero */ - if (max==WEBRTC_SPL_WORD16_MIN) { - max = 0; - } - - /* Modify the criterias, so that all of them use the same Q domain */ - critPtr=Crit; - inverseEnergyShiftPtr=inverseEnergyShift; - for (i=0;i31) */ - tmp16 = WEBRTC_SPL_MIN(16, max-(*inverseEnergyShiftPtr)); - - (*critPtr)=WEBRTC_SPL_SHIFT_W32((*critPtr),-tmp16); - critPtr++; - inverseEnergyShiftPtr++; - } - - /* Find the index of the best value */ - *bestIndex = WebRtcSpl_MaxIndexW32(Crit, range); - *bestCrit = Crit[*bestIndex]; - - /* Calculate total shifts of this criteria */ - *bestCritSh = 32 - 2*sh + max; - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/cb_search_core.h b/modules/audio_coding/codecs/ilbc/cb_search_core.h deleted file mode 100644 index 5a3b13e446..0000000000 --- a/modules/audio_coding/codecs/ilbc/cb_search_core.h +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CbSearchCore.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_CORE_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_CORE_H_ - -#include -#include - -void WebRtcIlbcfix_CbSearchCore( - int32_t* cDot, /* (i) Cross Correlation */ - size_t range, /* (i) Search range */ - int16_t stage, /* (i) Stage of this search */ - int16_t* inverseEnergy, /* (i) Inversed energy */ - int16_t* inverseEnergyShift, /* (i) Shifts of inversed energy - with the offset 2*16-29 */ - int32_t* Crit, /* (o) The criteria */ - size_t* bestIndex, /* (o) Index that corresponds to - maximum criteria (in this - vector) */ - int32_t* bestCrit, /* (o) Value of critera for the - chosen index */ - int16_t* bestCritSh); /* (o) The domain of the chosen - criteria */ - -#endif diff --git a/modules/audio_coding/codecs/ilbc/cb_update_best_index.c b/modules/audio_coding/codecs/ilbc/cb_update_best_index.c deleted file mode 100644 index d6fa4d93d4..0000000000 --- a/modules/audio_coding/codecs/ilbc/cb_update_best_index.c +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CbUpdateBestIndex.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/cb_update_best_index.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -void WebRtcIlbcfix_CbUpdateBestIndex( - int32_t CritNew, /* (i) New Potentially best Criteria */ - int16_t CritNewSh, /* (i) Shift value of above Criteria */ - size_t IndexNew, /* (i) Index of new Criteria */ - int32_t cDotNew, /* (i) Cross dot of new index */ - int16_t invEnergyNew, /* (i) Inversed energy new index */ - int16_t energyShiftNew, /* (i) Energy shifts of new index */ - int32_t *CritMax, /* (i/o) Maximum Criteria (so far) */ - int16_t *shTotMax, /* (i/o) Shifts of maximum criteria */ - size_t *bestIndex, /* (i/o) Index that corresponds to - maximum criteria */ - int16_t *bestGain) /* (i/o) Gain in Q14 that corresponds - to maximum criteria */ -{ - int16_t shOld, shNew, tmp16; - int16_t scaleTmp; - int32_t gainW32; - - /* Normalize the new and old Criteria to the same domain */ - if (CritNewSh>(*shTotMax)) { - shOld=WEBRTC_SPL_MIN(31,CritNewSh-(*shTotMax)); - shNew=0; - } else { - shOld=0; - shNew=WEBRTC_SPL_MIN(31,(*shTotMax)-CritNewSh); - } - - /* Compare the two criterias. If the new one is better, - calculate the gain and store this index as the new best one - */ - - if ((CritNew >> shNew) > (*CritMax >> shOld)) { - - tmp16 = (int16_t)WebRtcSpl_NormW32(cDotNew); - tmp16 = 16 - tmp16; - - /* Calculate the gain in Q14 - Compensate for inverseEnergyshift in Q29 and that the energy - value was stored in a int16_t (shifted down 16 steps) - => 29-14+16 = 31 */ - - scaleTmp = -energyShiftNew-tmp16+31; - scaleTmp = WEBRTC_SPL_MIN(31, scaleTmp); - - gainW32 = ((int16_t)WEBRTC_SPL_SHIFT_W32(cDotNew, -tmp16) * invEnergyNew) >> - scaleTmp; - - /* Check if criteria satisfies Gain criteria (max 1.3) - if it is larger set the gain to 1.3 - (slightly different from FLP version) - */ - if (gainW32>21299) { - *bestGain=21299; - } else if (gainW32<-21299) { - *bestGain=-21299; - } else { - *bestGain=(int16_t)gainW32; - } - - *CritMax=CritNew; - *shTotMax=CritNewSh; - *bestIndex = IndexNew; - } - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/cb_update_best_index.h b/modules/audio_coding/codecs/ilbc/cb_update_best_index.h deleted file mode 100644 index 1a95d531e9..0000000000 --- a/modules/audio_coding/codecs/ilbc/cb_update_best_index.h +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CbUpdateBestIndex.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_UPDATE_BEST_INDEX_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_UPDATE_BEST_INDEX_H_ - -#include -#include - -void WebRtcIlbcfix_CbUpdateBestIndex( - int32_t CritNew, /* (i) New Potentially best Criteria */ - int16_t CritNewSh, /* (i) Shift value of above Criteria */ - size_t IndexNew, /* (i) Index of new Criteria */ - int32_t cDotNew, /* (i) Cross dot of new index */ - int16_t invEnergyNew, /* (i) Inversed energy new index */ - int16_t energyShiftNew, /* (i) Energy shifts of new index */ - int32_t* CritMax, /* (i/o) Maximum Criteria (so far) */ - int16_t* shTotMax, /* (i/o) Shifts of maximum criteria */ - size_t* bestIndex, /* (i/o) Index that corresponds to - maximum criteria */ - int16_t* bestGain); /* (i/o) Gain in Q14 that corresponds - to maximum criteria */ - -#endif diff --git a/modules/audio_coding/codecs/ilbc/chebyshev.c b/modules/audio_coding/codecs/ilbc/chebyshev.c deleted file mode 100644 index b4eee66219..0000000000 --- a/modules/audio_coding/codecs/ilbc/chebyshev.c +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Chebyshev.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/chebyshev.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*------------------------------------------------------------------* - * Calculate the Chevyshev polynomial series - * F(w) = 2*exp(-j5w)*C(x) - * C(x) = (T_0(x) + f(1)T_1(x) + ... + f(4)T_1(x) + f(5)/2) - * T_i(x) is the i:th order Chebyshev polynomial - *------------------------------------------------------------------*/ - -int16_t WebRtcIlbcfix_Chebyshev( - /* (o) Result of C(x) */ - int16_t x, /* (i) Value to the Chevyshev polynomial */ - int16_t *f /* (i) The coefficients in the polynomial */ - ) { - int16_t b1_high, b1_low; /* Use the high, low format to increase the accuracy */ - int32_t b2; - int32_t tmp1W32; - int32_t tmp2W32; - int i; - - b2 = (int32_t)0x1000000; /* b2 = 1.0 (Q23) */ - /* Calculate b1 = 2*x + f[1] */ - tmp1W32 = (x << 10) + (f[1] << 14); - - for (i = 2; i < 5; i++) { - tmp2W32 = tmp1W32; - - /* Split b1 (in tmp1W32) into a high and low part */ - b1_high = (int16_t)(tmp1W32 >> 16); - b1_low = (int16_t)((tmp1W32 - ((int32_t)b1_high << 16)) >> 1); - - /* Calculate 2*x*b1-b2+f[i] */ - tmp1W32 = ((b1_high * x + ((b1_low * x) >> 15)) << 2) - b2 + (f[i] << 14); - - /* Update b2 for next round */ - b2 = tmp2W32; - } - - /* Split b1 (in tmp1W32) into a high and low part */ - b1_high = (int16_t)(tmp1W32 >> 16); - b1_low = (int16_t)((tmp1W32 - ((int32_t)b1_high << 16)) >> 1); - - /* tmp1W32 = x*b1 - b2 + f[i]/2 */ - tmp1W32 = ((b1_high * x) << 1) + (((b1_low * x) >> 15) << 1) - - b2 + (f[i] << 13); - - /* Handle overflows and set to maximum or minimum int16_t instead */ - if (tmp1W32>((int32_t)33553408)) { - return(WEBRTC_SPL_WORD16_MAX); - } else if (tmp1W32<((int32_t)-33554432)) { - return(WEBRTC_SPL_WORD16_MIN); - } else { - return (int16_t)(tmp1W32 >> 10); - } -} diff --git a/modules/audio_coding/codecs/ilbc/chebyshev.h b/modules/audio_coding/codecs/ilbc/chebyshev.h deleted file mode 100644 index 8ba82927b8..0000000000 --- a/modules/audio_coding/codecs/ilbc/chebyshev.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Chebyshev.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CHEBYSHEV_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CHEBYSHEV_H_ - -#include -#include - -/*------------------------------------------------------------------* - * Calculate the Chevyshev polynomial series - * F(w) = 2*exp(-j5w)*C(x) - * C(x) = (T_0(x) + f(1)T_1(x) + ... + f(4)T_1(x) + f(5)/2) - * T_i(x) is the i:th order Chebyshev polynomial - *------------------------------------------------------------------*/ - -int16_t WebRtcIlbcfix_Chebyshev( - /* (o) Result of C(x) */ - int16_t x, /* (i) Value to the Chevyshev polynomial */ - int16_t* f /* (i) The coefficients in the polynomial */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/comp_corr.c b/modules/audio_coding/codecs/ilbc/comp_corr.c deleted file mode 100644 index 452bc78e3b..0000000000 --- a/modules/audio_coding/codecs/ilbc/comp_corr.c +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CompCorr.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/comp_corr.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * Compute cross correlation and pitch gain for pitch prediction - * of last subframe at given lag. - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_CompCorr( - int32_t *corr, /* (o) cross correlation */ - int32_t *ener, /* (o) energy */ - int16_t *buffer, /* (i) signal buffer */ - size_t lag, /* (i) pitch lag */ - size_t bLen, /* (i) length of buffer */ - size_t sRange, /* (i) correlation search length */ - int16_t scale /* (i) number of rightshifts to use */ - ){ - int16_t *w16ptr; - - w16ptr=&buffer[bLen-sRange-lag]; - - /* Calculate correlation and energy */ - (*corr)=WebRtcSpl_DotProductWithScale(&buffer[bLen-sRange], w16ptr, sRange, scale); - (*ener)=WebRtcSpl_DotProductWithScale(w16ptr, w16ptr, sRange, scale); - - /* For zero energy set the energy to 0 in order to avoid potential - problems for coming divisions */ - if (*ener == 0) { - *corr = 0; - *ener = 1; - } -} diff --git a/modules/audio_coding/codecs/ilbc/comp_corr.h b/modules/audio_coding/codecs/ilbc/comp_corr.h deleted file mode 100644 index d9df9a78f8..0000000000 --- a/modules/audio_coding/codecs/ilbc/comp_corr.h +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CompCorr.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_COMP_CORR_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_COMP_CORR_H_ - -#include -#include - -/*----------------------------------------------------------------* - * Compute cross correlation and pitch gain for pitch prediction - * of last subframe at given lag. - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_CompCorr(int32_t* corr, /* (o) cross correlation */ - int32_t* ener, /* (o) energy */ - int16_t* buffer, /* (i) signal buffer */ - size_t lag, /* (i) pitch lag */ - size_t bLen, /* (i) length of buffer */ - size_t sRange, /* (i) correlation search length */ - int16_t scale /* (i) number of rightshifts to use */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/complexityMeasures.m b/modules/audio_coding/codecs/ilbc/complexityMeasures.m deleted file mode 100644 index 4bda83622f..0000000000 --- a/modules/audio_coding/codecs/ilbc/complexityMeasures.m +++ /dev/null @@ -1,57 +0,0 @@ -% % Copyright(c) 2011 The WebRTC project authors.All Rights Reserved.% - % Use of this source code is governed by a BSD - - - style license % that can be found in the LICENSE file in the root of the source - % tree.An additional intellectual property rights grant can be found - % in the file PATENTS.All contributing project authors may - % be found in the AUTHORS file in the root of the source tree.% - - clear; -pack; -% -% Enter the path to YOUR executable and remember to define the perprocessor -% variable PRINT_MIPS te get the instructions printed to the screen. -% -command = '!iLBCtest.exe 30 speechAndBGnoise.pcm out1.bit out1.pcm tlm10_30ms.dat'; -cout=' > st.txt'; %saves to matlab variable 'st' -eval(strcat(command,cout)); -if(length(cout)>3) - load st.txt -else - disp('No cout file to load') -end - -% initialize vector to zero -index = find(st(1:end,1)==-1); -indexnonzero = find(st(1:end,1)>0); -frames = length(index)-indexnonzero(1)+1; -start = indexnonzero(1) - 1; -functionOrder=max(st(:,2)); -new=zeros(frames,functionOrder); - -for i = 1:frames, - for j = index(start-1+i)+1:(index(start+i)-1), - new(i,st(j,2)) = new(i,st(j,2)) + st(j,1); - end -end - -result=zeros(functionOrder,3); -for i=1:functionOrder - nonzeroelements = find(new(1:end,i)>0); - result(i,1)=i; - - % Compute each function's mean complexity - % result(i,2)=(sum(new(nonzeroelements,i))/(length(nonzeroelements)*0.03))/1000000; - - % Compute each function's maximum complexity in encoding - % and decoding respectively and then add it together: - % result(i,3)=(max(new(1:end,i))/0.03)/1000000; - result(i,3)=(max(new(1:size(new,1)/2,i))/0.03)/1000000 + (max(new(size(new,1)/2+1:end,i))/0.03)/1000000; -end - -result - -% Compute maximum complexity for a single frame (enc/dec separately and together) -maxEncComplexityInAFrame = (max(sum(new(1:size(new,1)/2,:),2))/0.03)/1000000 -maxDecComplexityInAFrame = (max(sum(new(size(new,1)/2+1:end,:),2))/0.03)/1000000 -totalComplexity = maxEncComplexityInAFrame + maxDecComplexityInAFrame diff --git a/modules/audio_coding/codecs/ilbc/constants.c b/modules/audio_coding/codecs/ilbc/constants.c deleted file mode 100644 index 22f2acb330..0000000000 --- a/modules/audio_coding/codecs/ilbc/constants.c +++ /dev/null @@ -1,667 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - constants.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/constants.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/* HP Filters {b[0] b[1] b[2] -a[1] -a[2]} */ - -const int16_t WebRtcIlbcfix_kHpInCoefs[5] = {3798, -7596, 3798, 7807, -3733}; -const int16_t WebRtcIlbcfix_kHpOutCoefs[5] = {3849, -7699, 3849, 7918, -3833}; - -/* Window in Q11 to window the energies of the 5 choises (3 for 20ms) in the choise for - the 80 sample start state -*/ -const int16_t WebRtcIlbcfix_kStartSequenceEnrgWin[NSUB_MAX-1]= { - 1638, 1843, 2048, 1843, 1638 -}; - -/* LP Filter coeffs used for downsampling */ -const int16_t WebRtcIlbcfix_kLpFiltCoefs[FILTERORDER_DS_PLUS1]= { - -273, 512, 1297, 1696, 1297, 512, -273 -}; - -/* Constants used in the LPC calculations */ - -/* Hanning LPC window (in Q15) */ -const int16_t WebRtcIlbcfix_kLpcWin[BLOCKL_MAX] = { - 6, 22, 50, 89, 139, 200, 272, 355, 449, 554, 669, 795, - 932, 1079, 1237, 1405, 1583, 1771, 1969, 2177, 2395, 2622, 2858, 3104, - 3359, 3622, 3894, 4175, 4464, 4761, 5066, 5379, 5699, 6026, 6361, 6702, - 7050, 7404, 7764, 8130, 8502, 8879, 9262, 9649, 10040, 10436, 10836, 11240, - 11647, 12058, 12471, 12887, 13306, 13726, 14148, 14572, 14997, 15423, 15850, 16277, - 16704, 17131, 17558, 17983, 18408, 18831, 19252, 19672, 20089, 20504, 20916, 21325, - 21730, 22132, 22530, 22924, 23314, 23698, 24078, 24452, 24821, 25185, 25542, 25893, - 26238, 26575, 26906, 27230, 27547, 27855, 28156, 28450, 28734, 29011, 29279, 29538, - 29788, 30029, 30261, 30483, 30696, 30899, 31092, 31275, 31448, 31611, 31764, 31906, - 32037, 32158, 32268, 32367, 32456, 32533, 32600, 32655, 32700, 32733, 32755, 32767, - 32767, 32755, 32733, 32700, 32655, 32600, 32533, 32456, 32367, 32268, 32158, 32037, - 31906, 31764, 31611, 31448, 31275, 31092, 30899, 30696, 30483, 30261, 30029, 29788, - 29538, 29279, 29011, 28734, 28450, 28156, 27855, 27547, 27230, 26906, 26575, 26238, - 25893, 25542, 25185, 24821, 24452, 24078, 23698, 23314, 22924, 22530, 22132, 21730, - 21325, 20916, 20504, 20089, 19672, 19252, 18831, 18408, 17983, 17558, 17131, 16704, - 16277, 15850, 15423, 14997, 14572, 14148, 13726, 13306, 12887, 12471, 12058, 11647, - 11240, 10836, 10436, 10040, 9649, 9262, 8879, 8502, 8130, 7764, 7404, 7050, - 6702, 6361, 6026, 5699, 5379, 5066, 4761, 4464, 4175, 3894, 3622, 3359, - 3104, 2858, 2622, 2395, 2177, 1969, 1771, 1583, 1405, 1237, 1079, 932, - 795, 669, 554, 449, 355, 272, 200, 139, 89, 50, 22, 6 -}; - -/* Asymmetric LPC window (in Q15)*/ -const int16_t WebRtcIlbcfix_kLpcAsymWin[BLOCKL_MAX] = { - 2, 7, 15, 27, 42, 60, 81, 106, 135, 166, 201, 239, - 280, 325, 373, 424, 478, 536, 597, 661, 728, 798, 872, 949, - 1028, 1111, 1197, 1287, 1379, 1474, 1572, 1674, 1778, 1885, 1995, 2108, - 2224, 2343, 2465, 2589, 2717, 2847, 2980, 3115, 3254, 3395, 3538, 3684, - 3833, 3984, 4138, 4295, 4453, 4615, 4778, 4944, 5112, 5283, 5456, 5631, - 5808, 5987, 6169, 6352, 6538, 6725, 6915, 7106, 7300, 7495, 7692, 7891, - 8091, 8293, 8497, 8702, 8909, 9118, 9328, 9539, 9752, 9966, 10182, 10398, - 10616, 10835, 11055, 11277, 11499, 11722, 11947, 12172, 12398, 12625, 12852, 13080, - 13309, 13539, 13769, 14000, 14231, 14463, 14695, 14927, 15160, 15393, 15626, 15859, - 16092, 16326, 16559, 16792, 17026, 17259, 17492, 17725, 17957, 18189, 18421, 18653, - 18884, 19114, 19344, 19573, 19802, 20030, 20257, 20483, 20709, 20934, 21157, 21380, - 21602, 21823, 22042, 22261, 22478, 22694, 22909, 23123, 23335, 23545, 23755, 23962, - 24168, 24373, 24576, 24777, 24977, 25175, 25371, 25565, 25758, 25948, 26137, 26323, - 26508, 26690, 26871, 27049, 27225, 27399, 27571, 27740, 27907, 28072, 28234, 28394, - 28552, 28707, 28860, 29010, 29157, 29302, 29444, 29584, 29721, 29855, 29987, 30115, - 30241, 30364, 30485, 30602, 30717, 30828, 30937, 31043, 31145, 31245, 31342, 31436, - 31526, 31614, 31699, 31780, 31858, 31933, 32005, 32074, 32140, 32202, 32261, 32317, - 32370, 32420, 32466, 32509, 32549, 32585, 32618, 32648, 32675, 32698, 32718, 32734, - 32748, 32758, 32764, 32767, 32767, 32667, 32365, 31863, 31164, 30274, 29197, 27939, - 26510, 24917, 23170, 21281, 19261, 17121, 14876, 12540, 10126, 7650, 5126, 2571 -}; - -/* Lag window for LPC (Q31) */ -const int32_t WebRtcIlbcfix_kLpcLagWin[LPC_FILTERORDER + 1]={ - 2147483647, 2144885453, 2137754373, 2125918626, 2109459810, - 2088483140, 2063130336, 2033564590, 1999977009, 1962580174, - 1921610283}; - -/* WebRtcIlbcfix_kLpcChirpSyntDenum vector in Q15 corresponding - * floating point vector {1 0.9025 0.9025^2 0.9025^3 ...} - */ -const int16_t WebRtcIlbcfix_kLpcChirpSyntDenum[LPC_FILTERORDER + 1] = { - 32767, 29573, 26690, 24087, - 21739, 19619, 17707, 15980, - 14422, 13016, 11747}; - -/* WebRtcIlbcfix_kLpcChirpWeightDenum in Q15 corresponding to - * floating point vector {1 0.4222 0.4222^2... } - */ -const int16_t WebRtcIlbcfix_kLpcChirpWeightDenum[LPC_FILTERORDER + 1] = { - 32767, 13835, 5841, 2466, 1041, 440, - 186, 78, 33, 14, 6}; - -/* LSF quantization Q13 domain */ -const int16_t WebRtcIlbcfix_kLsfCb[64 * 3 + 128 * 3 + 128 * 4] = { - 1273, 2238, 3696, - 3199, 5309, 8209, - 3606, 5671, 7829, - 2815, 5262, 8778, - 2608, 4027, 5493, - 1582, 3076, 5945, - 2983, 4181, 5396, - 2437, 4322, 6902, - 1861, 2998, 4613, - 2007, 3250, 5214, - 1388, 2459, 4262, - 2563, 3805, 5269, - 2036, 3522, 5129, - 1935, 4025, 6694, - 2744, 5121, 7338, - 2810, 4248, 5723, - 3054, 5405, 7745, - 1449, 2593, 4763, - 3411, 5128, 6596, - 2484, 4659, 7496, - 1668, 2879, 4818, - 1812, 3072, 5036, - 1638, 2649, 3900, - 2464, 3550, 4644, - 1853, 2900, 4158, - 2458, 4163, 5830, - 2556, 4036, 6254, - 2703, 4432, 6519, - 3062, 4953, 7609, - 1725, 3703, 6187, - 2221, 3877, 5427, - 2339, 3579, 5197, - 2021, 4633, 7037, - 2216, 3328, 4535, - 2961, 4739, 6667, - 2807, 3955, 5099, - 2788, 4501, 6088, - 1642, 2755, 4431, - 3341, 5282, 7333, - 2414, 3726, 5727, - 1582, 2822, 5269, - 2259, 3447, 4905, - 3117, 4986, 7054, - 1825, 3491, 5542, - 3338, 5736, 8627, - 1789, 3090, 5488, - 2566, 3720, 4923, - 2846, 4682, 7161, - 1950, 3321, 5976, - 1834, 3383, 6734, - 3238, 4769, 6094, - 2031, 3978, 5903, - 1877, 4068, 7436, - 2131, 4644, 8296, - 2764, 5010, 8013, - 2194, 3667, 6302, - 2053, 3127, 4342, - 3523, 6595, 10010, - 3134, 4457, 5748, - 3142, 5819, 9414, - 2223, 4334, 6353, - 2022, 3224, 4822, - 2186, 3458, 5544, - 2552, 4757, 6870, - 10905, 12917, 14578, - 9503, 11485, 14485, - 9518, 12494, 14052, - 6222, 7487, 9174, - 7759, 9186, 10506, - 8315, 12755, 14786, - 9609, 11486, 13866, - 8909, 12077, 13643, - 7369, 9054, 11520, - 9408, 12163, 14715, - 6436, 9911, 12843, - 7109, 9556, 11884, - 7557, 10075, 11640, - 6482, 9202, 11547, - 6463, 7914, 10980, - 8611, 10427, 12752, - 7101, 9676, 12606, - 7428, 11252, 13172, - 10197, 12955, 15842, - 7487, 10955, 12613, - 5575, 7858, 13621, - 7268, 11719, 14752, - 7476, 11744, 13795, - 7049, 8686, 11922, - 8234, 11314, 13983, - 6560, 11173, 14984, - 6405, 9211, 12337, - 8222, 12054, 13801, - 8039, 10728, 13255, - 10066, 12733, 14389, - 6016, 7338, 10040, - 6896, 8648, 10234, - 7538, 9170, 12175, - 7327, 12608, 14983, - 10516, 12643, 15223, - 5538, 7644, 12213, - 6728, 12221, 14253, - 7563, 9377, 12948, - 8661, 11023, 13401, - 7280, 8806, 11085, - 7723, 9793, 12333, - 12225, 14648, 16709, - 8768, 13389, 15245, - 10267, 12197, 13812, - 5301, 7078, 11484, - 7100, 10280, 11906, - 8716, 12555, 14183, - 9567, 12464, 15434, - 7832, 12305, 14300, - 7608, 10556, 12121, - 8913, 11311, 12868, - 7414, 9722, 11239, - 8666, 11641, 13250, - 9079, 10752, 12300, - 8024, 11608, 13306, - 10453, 13607, 16449, - 8135, 9573, 10909, - 6375, 7741, 10125, - 10025, 12217, 14874, - 6985, 11063, 14109, - 9296, 13051, 14642, - 8613, 10975, 12542, - 6583, 10414, 13534, - 6191, 9368, 13430, - 5742, 6859, 9260, - 7723, 9813, 13679, - 8137, 11291, 12833, - 6562, 8973, 10641, - 6062, 8462, 11335, - 6928, 8784, 12647, - 7501, 8784, 10031, - 8372, 10045, 12135, - 8191, 9864, 12746, - 5917, 7487, 10979, - 5516, 6848, 10318, - 6819, 9899, 11421, - 7882, 12912, 15670, - 9558, 11230, 12753, - 7752, 9327, 11472, - 8479, 9980, 11358, - 11418, 14072, 16386, - 7968, 10330, 14423, - 8423, 10555, 12162, - 6337, 10306, 14391, - 8850, 10879, 14276, - 6750, 11885, 15710, - 7037, 8328, 9764, - 6914, 9266, 13476, - 9746, 13949, 15519, - 11032, 14444, 16925, - 8032, 10271, 11810, - 10962, 13451, 15833, - 10021, 11667, 13324, - 6273, 8226, 12936, - 8543, 10397, 13496, - 7936, 10302, 12745, - 6769, 8138, 10446, - 6081, 7786, 11719, - 8637, 11795, 14975, - 8790, 10336, 11812, - 7040, 8490, 10771, - 7338, 10381, 13153, - 6598, 7888, 9358, - 6518, 8237, 12030, - 9055, 10763, 12983, - 6490, 10009, 12007, - 9589, 12023, 13632, - 6867, 9447, 10995, - 7930, 9816, 11397, - 10241, 13300, 14939, - 5830, 8670, 12387, - 9870, 11915, 14247, - 9318, 11647, 13272, - 6721, 10836, 12929, - 6543, 8233, 9944, - 8034, 10854, 12394, - 9112, 11787, 14218, - 9302, 11114, 13400, - 9022, 11366, 13816, - 6962, 10461, 12480, - 11288, 13333, 15222, - 7249, 8974, 10547, - 10566, 12336, 14390, - 6697, 11339, 13521, - 11851, 13944, 15826, - 6847, 8381, 11349, - 7509, 9331, 10939, - 8029, 9618, 11909, - 13973, 17644, 19647, 22474, - 14722, 16522, 20035, 22134, - 16305, 18179, 21106, 23048, - 15150, 17948, 21394, 23225, - 13582, 15191, 17687, 22333, - 11778, 15546, 18458, 21753, - 16619, 18410, 20827, 23559, - 14229, 15746, 17907, 22474, - 12465, 15327, 20700, 22831, - 15085, 16799, 20182, 23410, - 13026, 16935, 19890, 22892, - 14310, 16854, 19007, 22944, - 14210, 15897, 18891, 23154, - 14633, 18059, 20132, 22899, - 15246, 17781, 19780, 22640, - 16396, 18904, 20912, 23035, - 14618, 17401, 19510, 21672, - 15473, 17497, 19813, 23439, - 18851, 20736, 22323, 23864, - 15055, 16804, 18530, 20916, - 16490, 18196, 19990, 21939, - 11711, 15223, 21154, 23312, - 13294, 15546, 19393, 21472, - 12956, 16060, 20610, 22417, - 11628, 15843, 19617, 22501, - 14106, 16872, 19839, 22689, - 15655, 18192, 20161, 22452, - 12953, 15244, 20619, 23549, - 15322, 17193, 19926, 21762, - 16873, 18676, 20444, 22359, - 14874, 17871, 20083, 21959, - 11534, 14486, 19194, 21857, - 17766, 19617, 21338, 23178, - 13404, 15284, 19080, 23136, - 15392, 17527, 19470, 21953, - 14462, 16153, 17985, 21192, - 17734, 19750, 21903, 23783, - 16973, 19096, 21675, 23815, - 16597, 18936, 21257, 23461, - 15966, 17865, 20602, 22920, - 15416, 17456, 20301, 22972, - 18335, 20093, 21732, 23497, - 15548, 17217, 20679, 23594, - 15208, 16995, 20816, 22870, - 13890, 18015, 20531, 22468, - 13211, 15377, 19951, 22388, - 12852, 14635, 17978, 22680, - 16002, 17732, 20373, 23544, - 11373, 14134, 19534, 22707, - 17329, 19151, 21241, 23462, - 15612, 17296, 19362, 22850, - 15422, 19104, 21285, 23164, - 13792, 17111, 19349, 21370, - 15352, 17876, 20776, 22667, - 15253, 16961, 18921, 22123, - 14108, 17264, 20294, 23246, - 15785, 17897, 20010, 21822, - 17399, 19147, 20915, 22753, - 13010, 15659, 18127, 20840, - 16826, 19422, 22218, 24084, - 18108, 20641, 22695, 24237, - 18018, 20273, 22268, 23920, - 16057, 17821, 21365, 23665, - 16005, 17901, 19892, 23016, - 13232, 16683, 21107, 23221, - 13280, 16615, 19915, 21829, - 14950, 18575, 20599, 22511, - 16337, 18261, 20277, 23216, - 14306, 16477, 21203, 23158, - 12803, 17498, 20248, 22014, - 14327, 17068, 20160, 22006, - 14402, 17461, 21599, 23688, - 16968, 18834, 20896, 23055, - 15070, 17157, 20451, 22315, - 15419, 17107, 21601, 23946, - 16039, 17639, 19533, 21424, - 16326, 19261, 21745, 23673, - 16489, 18534, 21658, 23782, - 16594, 18471, 20549, 22807, - 18973, 21212, 22890, 24278, - 14264, 18674, 21123, 23071, - 15117, 16841, 19239, 23118, - 13762, 15782, 20478, 23230, - 14111, 15949, 20058, 22354, - 14990, 16738, 21139, 23492, - 13735, 16971, 19026, 22158, - 14676, 17314, 20232, 22807, - 16196, 18146, 20459, 22339, - 14747, 17258, 19315, 22437, - 14973, 17778, 20692, 23367, - 15715, 17472, 20385, 22349, - 15702, 18228, 20829, 23410, - 14428, 16188, 20541, 23630, - 16824, 19394, 21365, 23246, - 13069, 16392, 18900, 21121, - 12047, 16640, 19463, 21689, - 14757, 17433, 19659, 23125, - 15185, 16930, 19900, 22540, - 16026, 17725, 19618, 22399, - 16086, 18643, 21179, 23472, - 15462, 17248, 19102, 21196, - 17368, 20016, 22396, 24096, - 12340, 14475, 19665, 23362, - 13636, 16229, 19462, 22728, - 14096, 16211, 19591, 21635, - 12152, 14867, 19943, 22301, - 14492, 17503, 21002, 22728, - 14834, 16788, 19447, 21411, - 14650, 16433, 19326, 22308, - 14624, 16328, 19659, 23204, - 13888, 16572, 20665, 22488, - 12977, 16102, 18841, 22246, - 15523, 18431, 21757, 23738, - 14095, 16349, 18837, 20947, - 13266, 17809, 21088, 22839, - 15427, 18190, 20270, 23143, - 11859, 16753, 20935, 22486, - 12310, 17667, 21736, 23319, - 14021, 15926, 18702, 22002, - 12286, 15299, 19178, 21126, - 15703, 17491, 21039, 23151, - 12272, 14018, 18213, 22570, - 14817, 16364, 18485, 22598, - 17109, 19683, 21851, 23677, - 12657, 14903, 19039, 22061, - 14713, 16487, 20527, 22814, - 14635, 16726, 18763, 21715, - 15878, 18550, 20718, 22906 -}; - -const int16_t WebRtcIlbcfix_kLsfDimCb[LSF_NSPLIT] = {3, 3, 4}; -const int16_t WebRtcIlbcfix_kLsfSizeCb[LSF_NSPLIT] = {64,128,128}; - -const int16_t WebRtcIlbcfix_kLsfMean[LPC_FILTERORDER] = { - 2308, 3652, 5434, 7885, - 10255, 12559, 15160, 17513, - 20328, 22752}; - -const int16_t WebRtcIlbcfix_kLspMean[LPC_FILTERORDER] = { - 31476, 29565, 25819, 18725, 10276, - 1236, -9049, -17600, -25884, -30618 -}; - -/* Q14 */ -const int16_t WebRtcIlbcfix_kLsfWeight20ms[4] = {12288, 8192, 4096, 0}; -const int16_t WebRtcIlbcfix_kLsfWeight30ms[6] = {8192, 16384, 10923, 5461, 0, 0}; - -/* - cos(x) in Q15 - WebRtcIlbcfix_kCos[i] = cos(pi*i/64.0) - used in WebRtcIlbcfix_Lsp2Lsf() -*/ - -const int16_t WebRtcIlbcfix_kCos[64] = { - 32767, 32729, 32610, 32413, 32138, 31786, 31357, 30853, - 30274, 29622, 28899, 28106, 27246, 26320, 25330, 24279, - 23170, 22006, 20788, 19520, 18205, 16846, 15447, 14010, - 12540, 11039, 9512, 7962, 6393, 4808, 3212, 1608, - 0, -1608, -3212, -4808, -6393, -7962, -9512, -11039, - -12540, -14010, -15447, -16846, -18205, -19520, -20788, -22006, - -23170, -24279, -25330, -26320, -27246, -28106, -28899, -29622, - -30274, -30853, -31357, -31786, -32138, -32413, -32610, -32729 -}; - -/* - Derivative in Q19, used to interpolate between the - WebRtcIlbcfix_kCos[] values to get a more exact y = cos(x) -*/ -const int16_t WebRtcIlbcfix_kCosDerivative[64] = { - -632, -1893, -3150, -4399, -5638, -6863, -8072, -9261, - -10428, -11570, -12684, -13767, -14817, -15832, -16808, -17744, - -18637, -19486, -20287, -21039, -21741, -22390, -22986, -23526, - -24009, -24435, -24801, -25108, -25354, -25540, -25664, -25726, - -25726, -25664, -25540, -25354, -25108, -24801, -24435, -24009, - -23526, -22986, -22390, -21741, -21039, -20287, -19486, -18637, - -17744, -16808, -15832, -14817, -13767, -12684, -11570, -10428, - -9261, -8072, -6863, -5638, -4399, -3150, -1893, -632}; - -/* - Table in Q15, used for a2lsf conversion - WebRtcIlbcfix_kCosGrid[i] = cos((2*pi*i)/(float)(2*COS_GRID_POINTS)); -*/ - -const int16_t WebRtcIlbcfix_kCosGrid[COS_GRID_POINTS + 1] = { - 32760, 32723, 32588, 32364, 32051, 31651, 31164, 30591, - 29935, 29196, 28377, 27481, 26509, 25465, 24351, 23170, - 21926, 20621, 19260, 17846, 16384, 14876, 13327, 11743, - 10125, 8480, 6812, 5126, 3425, 1714, 0, -1714, -3425, - -5126, -6812, -8480, -10125, -11743, -13327, -14876, - -16384, -17846, -19260, -20621, -21926, -23170, -24351, - -25465, -26509, -27481, -28377, -29196, -29935, -30591, - -31164, -31651, -32051, -32364, -32588, -32723, -32760 -}; - -/* - Derivative of y = acos(x) in Q12 - used in WebRtcIlbcfix_Lsp2Lsf() -*/ - -const int16_t WebRtcIlbcfix_kAcosDerivative[64] = { - -26887, -8812, -5323, -3813, -2979, -2444, -2081, -1811, - -1608, -1450, -1322, -1219, -1132, -1059, -998, -946, - -901, -861, -827, -797, -772, -750, -730, -713, - -699, -687, -677, -668, -662, -657, -654, -652, - -652, -654, -657, -662, -668, -677, -687, -699, - -713, -730, -750, -772, -797, -827, -861, -901, - -946, -998, -1059, -1132, -1219, -1322, -1450, -1608, - -1811, -2081, -2444, -2979, -3813, -5323, -8812, -26887 -}; - - -/* Tables for quantization of start state */ - -/* State quantization tables */ -const int16_t WebRtcIlbcfix_kStateSq3[8] = { /* Values in Q13 */ - -30473, -17838, -9257, -2537, - 3639, 10893, 19958, 32636 -}; - -/* This table defines the limits for the selection of the freqg - less or equal than value 0 => index = 0 - less or equal than value k => index = k -*/ -const int32_t WebRtcIlbcfix_kChooseFrgQuant[64] = { - 118, 163, 222, 305, 425, 604, - 851, 1174, 1617, 2222, 3080, 4191, - 5525, 7215, 9193, 11540, 14397, 17604, - 21204, 25209, 29863, 35720, 42531, 50375, - 59162, 68845, 80108, 93754, 110326, 129488, - 150654, 174328, 201962, 233195, 267843, 308239, - 354503, 405988, 464251, 531550, 608652, 697516, - 802526, 928793, 1080145, 1258120, 1481106, 1760881, - 2111111, 2546619, 3078825, 3748642, 4563142, 5573115, - 6887601, 8582108, 10797296, 14014513, 18625760, 25529599, - 37302935, 58819185, 109782723, WEBRTC_SPL_WORD32_MAX -}; - -const int16_t WebRtcIlbcfix_kScale[64] = { - /* Values in Q16 */ - 29485, 25003, 21345, 18316, 15578, 13128, 10973, 9310, 7955, - 6762, 5789, 4877, 4255, 3699, 3258, 2904, 2595, 2328, - 2123, 1932, 1785, 1631, 1493, 1370, 1260, 1167, 1083, - /* Values in Q21 */ - 32081, 29611, 27262, 25229, 23432, 21803, 20226, 18883, 17609, - 16408, 15311, 14327, 13390, 12513, 11693, 10919, 10163, 9435, - 8739, 8100, 7424, 6813, 6192, 5648, 5122, 4639, 4207, 3798, - 3404, 3048, 2706, 2348, 2036, 1713, 1393, 1087, 747 -}; - -/*frgq in fixpoint, but already computed like this: - for(i=0; i<64; i++){ - a = (pow(10,frgq[i])/4.5); - WebRtcIlbcfix_kFrgQuantMod[i] = round(a); - } - - Value 0 :36 in Q8 - 37:58 in Q5 - 59:63 in Q3 -*/ -const int16_t WebRtcIlbcfix_kFrgQuantMod[64] = { - /* First 37 values in Q8 */ - 569, 671, 786, 916, 1077, 1278, - 1529, 1802, 2109, 2481, 2898, 3440, - 3943, 4535, 5149, 5778, 6464, 7208, - 7904, 8682, 9397, 10285, 11240, 12246, - 13313, 14382, 15492, 16735, 18131, 19693, - 21280, 22912, 24624, 26544, 28432, 30488, - 32720, - /* 22 values in Q5 */ - 4383, 4684, 5012, 5363, 5739, 6146, - 6603, 7113, 7679, 8285, 9040, 9850, - 10838, 11882, 13103, 14467, 15950, 17669, - 19712, 22016, 24800, 28576, - /* 5 values in Q3 */ - 8240, 9792, 12040, 15440, 22472 -}; - -/* Constants for codebook search and creation */ - -/* Expansion filter to get additional cb section. - * Q12 and reversed compared to flp - */ -const int16_t WebRtcIlbcfix_kCbFiltersRev[CB_FILTERLEN]={ - -140, 446, -755, 3302, 2922, -590, 343, -138}; - -/* Weighting coefficients for short lags. - * [0.2 0.4 0.6 0.8] in Q15 */ -const int16_t WebRtcIlbcfix_kAlpha[4]={ - 6554, 13107, 19661, 26214}; - -/* Ranges for search and filters at different subframes */ - -const size_t WebRtcIlbcfix_kSearchRange[5][CB_NSTAGES]={ - {58,58,58}, {108,44,44}, {108,108,108}, {108,108,108}, {108,108,108}}; - -const size_t WebRtcIlbcfix_kFilterRange[5]={63, 85, 125, 147, 147}; - -/* Gain Quantization for the codebook gains of the 3 stages */ - -/* Q14 (one extra value (max int16_t) to simplify for the search) */ -const int16_t WebRtcIlbcfix_kGainSq3[9]={ - -16384, -10813, -5407, 0, 4096, 8192, - 12288, 16384, 32767}; - -/* Q14 (one extra value (max int16_t) to simplify for the search) */ -const int16_t WebRtcIlbcfix_kGainSq4[17]={ - -17203, -14746, -12288, -9830, -7373, -4915, - -2458, 0, 2458, 4915, 7373, 9830, - 12288, 14746, 17203, 19661, 32767}; - -/* Q14 (one extra value (max int16_t) to simplify for the search) */ -const int16_t WebRtcIlbcfix_kGainSq5[33]={ - 614, 1229, 1843, 2458, 3072, 3686, - 4301, 4915, 5530, 6144, 6758, 7373, - 7987, 8602, 9216, 9830, 10445, 11059, - 11674, 12288, 12902, 13517, 14131, 14746, - 15360, 15974, 16589, 17203, 17818, 18432, - 19046, 19661, 32767}; - -/* Q14 gain_sq5Tbl squared in Q14 */ -const int16_t WebRtcIlbcfix_kGainSq5Sq[32] = { - 23, 92, 207, 368, 576, 829, - 1129, 1474, 1866, 2304, 2787, 3317, - 3893, 4516, 5184, 5897, 6658, 7464, - 8318, 9216, 10160, 11151, 12187, 13271, - 14400, 15574, 16796, 18062, 19377, 20736, - 22140, 23593 -}; - -const int16_t* const WebRtcIlbcfix_kGain[3] = -{WebRtcIlbcfix_kGainSq5, WebRtcIlbcfix_kGainSq4, WebRtcIlbcfix_kGainSq3}; - - -/* Tables for the Enhancer, using upsamling factor 4 (ENH_UPS0 = 4) */ - -const int16_t WebRtcIlbcfix_kEnhPolyPhaser[ENH_UPS0][ENH_FLO_MULT2_PLUS1]={ - {0, 0, 0, 4096, 0, 0, 0}, - {64, -315, 1181, 3531, -436, 77, -64}, - {97, -509, 2464, 2464, -509, 97, -97}, - {77, -436, 3531, 1181, -315, 64, -77} -}; - -const int16_t WebRtcIlbcfix_kEnhWt[3] = { - 4800, 16384, 27968 /* Q16 */ -}; - -const size_t WebRtcIlbcfix_kEnhPlocs[ENH_NBLOCKS_TOT] = { - 160, 480, 800, 1120, 1440, 1760, 2080, 2400 /* Q(-2) */ -}; - -/* PLC table */ - -const int16_t WebRtcIlbcfix_kPlcPerSqr[6] = { /* Grid points for square of periodiciy in Q15 */ - 839, 1343, 2048, 2998, 4247, 5849 -}; - -const int16_t WebRtcIlbcfix_kPlcPitchFact[6] = { /* Value of y=(x^4-0.4)/(0.7-0.4) in grid points in Q15 */ - 0, 5462, 10922, 16384, 21846, 27306 -}; - -const int16_t WebRtcIlbcfix_kPlcPfSlope[6] = { /* Slope of y=(x^4-0.4)/(0.7-0.4) in Q11 */ - 26667, 18729, 13653, 10258, 7901, 6214 -}; diff --git a/modules/audio_coding/codecs/ilbc/constants.h b/modules/audio_coding/codecs/ilbc/constants.h deleted file mode 100644 index a8645c00db..0000000000 --- a/modules/audio_coding/codecs/ilbc/constants.h +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - constants.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CONSTANTS_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CONSTANTS_H_ - -#include -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/* high pass filters */ - -extern const int16_t WebRtcIlbcfix_kHpInCoefs[]; -extern const int16_t WebRtcIlbcfix_kHpOutCoefs[]; - -/* Window for start state decision */ -extern const int16_t WebRtcIlbcfix_kStartSequenceEnrgWin[]; - -/* low pass filter used for downsampling */ -extern const int16_t WebRtcIlbcfix_kLpFiltCoefs[]; - -/* LPC analysis and quantization */ - -extern const int16_t WebRtcIlbcfix_kLpcWin[]; -extern const int16_t WebRtcIlbcfix_kLpcAsymWin[]; -extern const int32_t WebRtcIlbcfix_kLpcLagWin[]; -extern const int16_t WebRtcIlbcfix_kLpcChirpSyntDenum[]; -extern const int16_t WebRtcIlbcfix_kLpcChirpWeightDenum[]; -extern const int16_t WebRtcIlbcfix_kLsfDimCb[]; -extern const int16_t WebRtcIlbcfix_kLsfSizeCb[]; -extern const int16_t WebRtcIlbcfix_kLsfCb[]; -extern const int16_t WebRtcIlbcfix_kLsfWeight20ms[]; -extern const int16_t WebRtcIlbcfix_kLsfWeight30ms[]; -extern const int16_t WebRtcIlbcfix_kLsfMean[]; -extern const int16_t WebRtcIlbcfix_kLspMean[]; -extern const int16_t WebRtcIlbcfix_kCos[]; -extern const int16_t WebRtcIlbcfix_kCosDerivative[]; -extern const int16_t WebRtcIlbcfix_kCosGrid[]; -extern const int16_t WebRtcIlbcfix_kAcosDerivative[]; - -/* state quantization tables */ - -extern const int16_t WebRtcIlbcfix_kStateSq3[]; -extern const int32_t WebRtcIlbcfix_kChooseFrgQuant[]; -extern const int16_t WebRtcIlbcfix_kScale[]; -extern const int16_t WebRtcIlbcfix_kFrgQuantMod[]; - -/* Ranges for search and filters at different subframes */ - -extern const size_t WebRtcIlbcfix_kSearchRange[5][CB_NSTAGES]; -extern const size_t WebRtcIlbcfix_kFilterRange[]; - -/* gain quantization tables */ - -extern const int16_t WebRtcIlbcfix_kGainSq3[]; -extern const int16_t WebRtcIlbcfix_kGainSq4[]; -extern const int16_t WebRtcIlbcfix_kGainSq5[]; -extern const int16_t WebRtcIlbcfix_kGainSq5Sq[]; -extern const int16_t* const WebRtcIlbcfix_kGain[]; - -/* adaptive codebook definitions */ - -extern const int16_t WebRtcIlbcfix_kCbFiltersRev[]; -extern const int16_t WebRtcIlbcfix_kAlpha[]; - -/* enhancer definitions */ - -extern const int16_t WebRtcIlbcfix_kEnhPolyPhaser[ENH_UPS0] - [ENH_FLO_MULT2_PLUS1]; -extern const int16_t WebRtcIlbcfix_kEnhWt[]; -extern const size_t WebRtcIlbcfix_kEnhPlocs[]; - -/* PLC tables */ - -extern const int16_t WebRtcIlbcfix_kPlcPerSqr[]; -extern const int16_t WebRtcIlbcfix_kPlcPitchFact[]; -extern const int16_t WebRtcIlbcfix_kPlcPfSlope[]; - -#endif diff --git a/modules/audio_coding/codecs/ilbc/create_augmented_vec.c b/modules/audio_coding/codecs/ilbc/create_augmented_vec.c deleted file mode 100644 index 7e21faee6c..0000000000 --- a/modules/audio_coding/codecs/ilbc/create_augmented_vec.c +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CreateAugmentedVec.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/create_augmented_vec.h" - -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "rtc_base/sanitizer.h" - -/*----------------------------------------------------------------* - * Recreate a specific codebook vector from the augmented part. - * - *----------------------------------------------------------------*/ - -void WebRtcIlbcfix_CreateAugmentedVec( - size_t index, /* (i) Index for the augmented vector to be - created */ - const int16_t* buffer, /* (i) Pointer to the end of the codebook memory - that is used for creation of the augmented - codebook */ - int16_t* cbVec) { /* (o) The constructed codebook vector */ - size_t ilow; - const int16_t *ppo, *ppi; - int16_t cbVecTmp[4]; - /* Interpolation starts 4 elements before cbVec+index, but must not start - outside `cbVec`; clamping interp_len to stay within `cbVec`. - */ - size_t interp_len = WEBRTC_SPL_MIN(index, 4); - - rtc_MsanCheckInitialized(buffer - index - interp_len, sizeof(buffer[0]), - index + interp_len); - - ilow = index - interp_len; - - /* copy the first noninterpolated part */ - ppo = buffer-index; - WEBRTC_SPL_MEMCPY_W16(cbVec, ppo, index); - - /* interpolation */ - ppo = buffer - interp_len; - ppi = buffer - index - interp_len; - - /* perform cbVec[ilow+k] = ((ppi[k]*alphaTbl[k])>>15) + - ((ppo[k]*alphaTbl[interp_len-1-k])>>15); - for k = 0..interp_len-1 - */ - WebRtcSpl_ElementwiseVectorMult(&cbVec[ilow], ppi, WebRtcIlbcfix_kAlpha, - interp_len, 15); - WebRtcSpl_ReverseOrderMultArrayElements( - cbVecTmp, ppo, &WebRtcIlbcfix_kAlpha[interp_len - 1], interp_len, 15); - WebRtcSpl_AddVectorsAndShift(&cbVec[ilow], &cbVec[ilow], cbVecTmp, interp_len, - 0); - - /* copy the second noninterpolated part */ - ppo = buffer - index; - /* `tempbuff2` is declared in WebRtcIlbcfix_GetCbVec and is SUBL+5 elements - long. `buffer` points one element past the end of that vector, i.e., at - tempbuff2+SUBL+5. Since ppo=buffer-index, we cannot read any more than - `index` elements from `ppo`. - - `cbVec` is declared to be SUBL elements long in WebRtcIlbcfix_CbConstruct. - Therefore, we can only write SUBL-index elements to cbVec+index. - - These two conditions limit the number of elements to copy. - */ - WEBRTC_SPL_MEMCPY_W16(cbVec+index, ppo, WEBRTC_SPL_MIN(SUBL-index, index)); -} diff --git a/modules/audio_coding/codecs/ilbc/create_augmented_vec.h b/modules/audio_coding/codecs/ilbc/create_augmented_vec.h deleted file mode 100644 index 5bed469a12..0000000000 --- a/modules/audio_coding/codecs/ilbc/create_augmented_vec.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_CreateAugmentedVec.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CREATE_AUGMENTED_VEC_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CREATE_AUGMENTED_VEC_H_ - -#include -#include - -/*----------------------------------------------------------------* - * Recreate a specific codebook vector from the augmented part. - * - *----------------------------------------------------------------*/ - -void WebRtcIlbcfix_CreateAugmentedVec( - size_t index, /* (i) Index for the augmented vector to be - created */ - const int16_t* buffer, /* (i) Pointer to the end of the codebook memory - that is used for creation of the augmented - codebook */ - int16_t* cbVec); /* (o) The construced codebook vector */ - -#endif diff --git a/modules/audio_coding/codecs/ilbc/decode.c b/modules/audio_coding/codecs/ilbc/decode.c deleted file mode 100644 index d7621d5b65..0000000000 --- a/modules/audio_coding/codecs/ilbc/decode.c +++ /dev/null @@ -1,261 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Decode.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/decode.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/decode_residual.h" -#include "modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/do_plc.h" -#include "modules/audio_coding/codecs/ilbc/enhancer_interface.h" -#include "modules/audio_coding/codecs/ilbc/hp_output.h" -#include "modules/audio_coding/codecs/ilbc/index_conv_dec.h" -#include "modules/audio_coding/codecs/ilbc/init_decode.h" -#include "modules/audio_coding/codecs/ilbc/lsf_check.h" -#include "modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h" -#include "modules/audio_coding/codecs/ilbc/unpack_bits.h" -#include "modules/audio_coding/codecs/ilbc/xcorr_coef.h" -#include "rtc_base/system/arch.h" - -#ifndef WEBRTC_ARCH_BIG_ENDIAN -#include "modules/audio_coding/codecs/ilbc/swap_bytes.h" -#endif - -/*----------------------------------------------------------------* - * main decoder function - *---------------------------------------------------------------*/ - -int WebRtcIlbcfix_DecodeImpl( - int16_t *decblock, /* (o) decoded signal block */ - const uint16_t *bytes, /* (i) encoded signal bits */ - IlbcDecoder *iLBCdec_inst, /* (i/o) the decoder state - structure */ - int16_t mode /* (i) 0: bad packet, PLC, - 1: normal */ - ) { - const int old_mode = iLBCdec_inst->mode; - const int old_use_enhancer = iLBCdec_inst->use_enhancer; - - size_t i; - int16_t order_plus_one; - - int16_t last_bit; - int16_t *data; - /* Stack based */ - int16_t decresidual[BLOCKL_MAX]; - int16_t PLCresidual[BLOCKL_MAX + LPC_FILTERORDER]; - int16_t syntdenum[NSUB_MAX*(LPC_FILTERORDER+1)]; - int16_t PLClpc[LPC_FILTERORDER + 1]; -#ifndef WEBRTC_ARCH_BIG_ENDIAN - uint16_t swapped[NO_OF_WORDS_30MS]; -#endif - iLBC_bits *iLBCbits_inst = (iLBC_bits*)PLCresidual; - - /* Reuse some buffers that are non overlapping in order to save stack memory */ - data = &PLCresidual[LPC_FILTERORDER]; - - if (mode) { /* the data are good */ - - /* decode data */ - - /* Unpacketize bits into parameters */ - -#ifndef WEBRTC_ARCH_BIG_ENDIAN - WebRtcIlbcfix_SwapBytes(bytes, iLBCdec_inst->no_of_words, swapped); - last_bit = WebRtcIlbcfix_UnpackBits(swapped, iLBCbits_inst, iLBCdec_inst->mode); -#else - last_bit = WebRtcIlbcfix_UnpackBits(bytes, iLBCbits_inst, iLBCdec_inst->mode); -#endif - - /* Check for bit errors */ - if (iLBCbits_inst->startIdx<1) - mode = 0; - if ((iLBCdec_inst->mode==20) && (iLBCbits_inst->startIdx>3)) - mode = 0; - if ((iLBCdec_inst->mode==30) && (iLBCbits_inst->startIdx>5)) - mode = 0; - if (last_bit==1) - mode = 0; - - if (mode) { /* No bit errors was detected, continue decoding */ - /* Stack based */ - int16_t lsfdeq[LPC_FILTERORDER*LPC_N_MAX]; - int16_t weightdenum[(LPC_FILTERORDER + 1)*NSUB_MAX]; - - /* adjust index */ - WebRtcIlbcfix_IndexConvDec(iLBCbits_inst->cb_index); - - /* decode the lsf */ - WebRtcIlbcfix_SimpleLsfDeQ(lsfdeq, (int16_t*)(iLBCbits_inst->lsf), iLBCdec_inst->lpc_n); - WebRtcIlbcfix_LsfCheck(lsfdeq, LPC_FILTERORDER, iLBCdec_inst->lpc_n); - WebRtcIlbcfix_DecoderInterpolateLsp(syntdenum, weightdenum, - lsfdeq, LPC_FILTERORDER, iLBCdec_inst); - - /* Decode the residual using the cb and gain indexes */ - if (!WebRtcIlbcfix_DecodeResidual(iLBCdec_inst, iLBCbits_inst, - decresidual, syntdenum)) - goto error; - - /* preparing the plc for a future loss! */ - WebRtcIlbcfix_DoThePlc( - PLCresidual, PLClpc, 0, decresidual, - syntdenum + (LPC_FILTERORDER + 1) * (iLBCdec_inst->nsub - 1), - iLBCdec_inst->last_lag, iLBCdec_inst); - - /* Use the output from doThePLC */ - WEBRTC_SPL_MEMCPY_W16(decresidual, PLCresidual, iLBCdec_inst->blockl); - } - - } - - if (mode == 0) { - /* the data is bad (either a PLC call - * was made or a bit error was detected) - */ - - /* packet loss conceal */ - - WebRtcIlbcfix_DoThePlc(PLCresidual, PLClpc, 1, decresidual, syntdenum, - iLBCdec_inst->last_lag, iLBCdec_inst); - - WEBRTC_SPL_MEMCPY_W16(decresidual, PLCresidual, iLBCdec_inst->blockl); - - order_plus_one = LPC_FILTERORDER + 1; - - for (i = 0; i < iLBCdec_inst->nsub; i++) { - WEBRTC_SPL_MEMCPY_W16(syntdenum+(i*order_plus_one), - PLClpc, order_plus_one); - } - } - - if ((*iLBCdec_inst).use_enhancer == 1) { /* Enhancer activated */ - - /* Update the filter and filter coefficients if there was a packet loss */ - if (iLBCdec_inst->prev_enh_pl==2) { - for (i=0;insub;i++) { - WEBRTC_SPL_MEMCPY_W16(&(iLBCdec_inst->old_syntdenum[i*(LPC_FILTERORDER+1)]), - syntdenum, (LPC_FILTERORDER+1)); - } - } - - /* post filtering */ - (*iLBCdec_inst).last_lag = - WebRtcIlbcfix_EnhancerInterface(data, decresidual, iLBCdec_inst); - - /* synthesis filtering */ - - /* Set up the filter state */ - WEBRTC_SPL_MEMCPY_W16(&data[-LPC_FILTERORDER], iLBCdec_inst->syntMem, LPC_FILTERORDER); - - if (iLBCdec_inst->mode==20) { - /* Enhancer has 40 samples delay */ - i=0; - WebRtcSpl_FilterARFastQ12( - data, data, - iLBCdec_inst->old_syntdenum + (i+iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1), - LPC_FILTERORDER+1, SUBL); - - for (i=1; i < iLBCdec_inst->nsub; i++) { - WebRtcSpl_FilterARFastQ12( - data+i*SUBL, data+i*SUBL, - syntdenum+(i-1)*(LPC_FILTERORDER+1), - LPC_FILTERORDER+1, SUBL); - } - - } else if (iLBCdec_inst->mode==30) { - /* Enhancer has 80 samples delay */ - for (i=0; i < 2; i++) { - WebRtcSpl_FilterARFastQ12( - data+i*SUBL, data+i*SUBL, - iLBCdec_inst->old_syntdenum + (i+4)*(LPC_FILTERORDER+1), - LPC_FILTERORDER+1, SUBL); - } - for (i=2; i < iLBCdec_inst->nsub; i++) { - WebRtcSpl_FilterARFastQ12( - data+i*SUBL, data+i*SUBL, - syntdenum+(i-2)*(LPC_FILTERORDER+1), - LPC_FILTERORDER+1, SUBL); - } - } - - /* Save the filter state */ - WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->syntMem, &data[iLBCdec_inst->blockl-LPC_FILTERORDER], LPC_FILTERORDER); - - } else { /* Enhancer not activated */ - size_t lag; - - /* Find last lag (since the enhancer is not called to give this info) */ - lag = 20; - if (iLBCdec_inst->mode==20) { - lag = WebRtcIlbcfix_XcorrCoef( - &decresidual[iLBCdec_inst->blockl-60], - &decresidual[iLBCdec_inst->blockl-60-lag], - 60, - 80, lag, -1); - } else { - lag = WebRtcIlbcfix_XcorrCoef( - &decresidual[iLBCdec_inst->blockl-ENH_BLOCKL], - &decresidual[iLBCdec_inst->blockl-ENH_BLOCKL-lag], - ENH_BLOCKL, - 100, lag, -1); - } - - /* Store lag (it is needed if next packet is lost) */ - (*iLBCdec_inst).last_lag = lag; - - /* copy data and run synthesis filter */ - WEBRTC_SPL_MEMCPY_W16(data, decresidual, iLBCdec_inst->blockl); - - /* Set up the filter state */ - WEBRTC_SPL_MEMCPY_W16(&data[-LPC_FILTERORDER], iLBCdec_inst->syntMem, LPC_FILTERORDER); - - for (i=0; i < iLBCdec_inst->nsub; i++) { - WebRtcSpl_FilterARFastQ12( - data+i*SUBL, data+i*SUBL, - syntdenum + i*(LPC_FILTERORDER+1), - LPC_FILTERORDER+1, SUBL); - } - - /* Save the filter state */ - WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->syntMem, &data[iLBCdec_inst->blockl-LPC_FILTERORDER], LPC_FILTERORDER); - } - - WEBRTC_SPL_MEMCPY_W16(decblock,data,iLBCdec_inst->blockl); - - /* High pass filter the signal (with upscaling a factor 2 and saturation) */ - WebRtcIlbcfix_HpOutput(decblock, (int16_t*)WebRtcIlbcfix_kHpOutCoefs, - iLBCdec_inst->hpimemy, iLBCdec_inst->hpimemx, - iLBCdec_inst->blockl); - - WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->old_syntdenum, - syntdenum, iLBCdec_inst->nsub*(LPC_FILTERORDER+1)); - - iLBCdec_inst->prev_enh_pl=0; - - if (mode==0) { /* PLC was used */ - iLBCdec_inst->prev_enh_pl=1; - } - - return 0; // Success. - -error: - // The decoder got sick from eating that data. Reset it and return. - WebRtcIlbcfix_InitDecode(iLBCdec_inst, old_mode, old_use_enhancer); - return -1; // Error -} diff --git a/modules/audio_coding/codecs/ilbc/decode.h b/modules/audio_coding/codecs/ilbc/decode.h deleted file mode 100644 index a7d2910115..0000000000 --- a/modules/audio_coding/codecs/ilbc/decode.h +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Decode.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODE_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODE_H_ - -#include - -#include "absl/base/attributes.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * main decoder function - *---------------------------------------------------------------*/ - -// Returns 0 on success, -1 on error. -ABSL_MUST_USE_RESULT -int WebRtcIlbcfix_DecodeImpl( - int16_t* decblock, /* (o) decoded signal block */ - const uint16_t* bytes, /* (i) encoded signal bits */ - IlbcDecoder* iLBCdec_inst, /* (i/o) the decoder state - structure */ - int16_t mode /* (i) 0: bad packet, PLC, - 1: normal */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/decode_residual.c b/modules/audio_coding/codecs/ilbc/decode_residual.c deleted file mode 100644 index a9668e2889..0000000000 --- a/modules/audio_coding/codecs/ilbc/decode_residual.c +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_DecodeResidual.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/decode_residual.h" - -#include - -#include "modules/audio_coding/codecs/ilbc/cb_construct.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/do_plc.h" -#include "modules/audio_coding/codecs/ilbc/enhancer_interface.h" -#include "modules/audio_coding/codecs/ilbc/index_conv_dec.h" -#include "modules/audio_coding/codecs/ilbc/lsf_check.h" -#include "modules/audio_coding/codecs/ilbc/state_construct.h" -#include "modules/audio_coding/codecs/ilbc/xcorr_coef.h" - -/*----------------------------------------------------------------* - * frame residual decoder function (subrutine to iLBC_decode) - *---------------------------------------------------------------*/ - -bool WebRtcIlbcfix_DecodeResidual( - IlbcDecoder *iLBCdec_inst, - /* (i/o) the decoder state structure */ - iLBC_bits *iLBC_encbits, /* (i/o) Encoded bits, which are used - for the decoding */ - int16_t *decresidual, /* (o) decoded residual frame */ - int16_t *syntdenum /* (i) the decoded synthesis filter - coefficients */ - ) { - size_t meml_gotten, diff, start_pos; - size_t subcount, subframe; - int16_t *reverseDecresidual = iLBCdec_inst->enh_buf; /* Reversed decoded data, used for decoding backwards in time (reuse memory in state) */ - int16_t *memVec = iLBCdec_inst->prevResidual; /* Memory for codebook and filter state (reuse memory in state) */ - int16_t *mem = &memVec[CB_HALFFILTERLEN]; /* Memory for codebook */ - - diff = STATE_LEN - iLBCdec_inst->state_short_len; - - if (iLBC_encbits->state_first == 1) { - start_pos = (iLBC_encbits->startIdx-1)*SUBL; - } else { - start_pos = (iLBC_encbits->startIdx-1)*SUBL + diff; - } - - /* decode scalar part of start state */ - - WebRtcIlbcfix_StateConstruct(iLBC_encbits->idxForMax, - iLBC_encbits->idxVec, &syntdenum[(iLBC_encbits->startIdx-1)*(LPC_FILTERORDER+1)], - &decresidual[start_pos], iLBCdec_inst->state_short_len - ); - - if (iLBC_encbits->state_first) { /* put adaptive part in the end */ - - /* setup memory */ - - WebRtcSpl_MemSetW16(mem, 0, CB_MEML - iLBCdec_inst->state_short_len); - WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-iLBCdec_inst->state_short_len, decresidual+start_pos, - iLBCdec_inst->state_short_len); - - /* construct decoded vector */ - - if (!WebRtcIlbcfix_CbConstruct( - &decresidual[start_pos + iLBCdec_inst->state_short_len], - iLBC_encbits->cb_index, iLBC_encbits->gain_index, - mem + CB_MEML - ST_MEM_L_TBL, ST_MEM_L_TBL, diff)) - return false; // Error. - - } - else {/* put adaptive part in the beginning */ - - /* setup memory */ - - meml_gotten = iLBCdec_inst->state_short_len; - WebRtcSpl_MemCpyReversedOrder(mem+CB_MEML-1, - decresidual+start_pos, meml_gotten); - WebRtcSpl_MemSetW16(mem, 0, CB_MEML - meml_gotten); - - /* construct decoded vector */ - - if (!WebRtcIlbcfix_CbConstruct(reverseDecresidual, iLBC_encbits->cb_index, - iLBC_encbits->gain_index, - mem + CB_MEML - ST_MEM_L_TBL, ST_MEM_L_TBL, - diff)) - return false; // Error. - - /* get decoded residual from reversed vector */ - - WebRtcSpl_MemCpyReversedOrder(&decresidual[start_pos-1], - reverseDecresidual, diff); - } - - /* counter for predicted subframes */ - - subcount=1; - - /* forward prediction of subframes */ - - if (iLBCdec_inst->nsub > iLBC_encbits->startIdx + 1) { - - /* setup memory */ - WebRtcSpl_MemSetW16(mem, 0, CB_MEML-STATE_LEN); - WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-STATE_LEN, - decresidual+(iLBC_encbits->startIdx-1)*SUBL, STATE_LEN); - - /* loop over subframes to encode */ - - size_t Nfor = iLBCdec_inst->nsub - iLBC_encbits->startIdx - 1; - for (subframe=0; subframestartIdx + 1 + subframe) * SUBL], - iLBC_encbits->cb_index + subcount * CB_NSTAGES, - iLBC_encbits->gain_index + subcount * CB_NSTAGES, mem, MEM_LF_TBL, - SUBL)) - return false; // Error; - - /* update memory */ - memmove(mem, mem + SUBL, (CB_MEML - SUBL) * sizeof(*mem)); - WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-SUBL, - &decresidual[(iLBC_encbits->startIdx+1+subframe)*SUBL], SUBL); - - subcount++; - } - - } - - /* backward prediction of subframes */ - - if (iLBC_encbits->startIdx > 1) { - - /* setup memory */ - - meml_gotten = SUBL*(iLBCdec_inst->nsub+1-iLBC_encbits->startIdx); - if( meml_gotten > CB_MEML ) { - meml_gotten=CB_MEML; - } - - WebRtcSpl_MemCpyReversedOrder(mem+CB_MEML-1, - decresidual+(iLBC_encbits->startIdx-1)*SUBL, meml_gotten); - WebRtcSpl_MemSetW16(mem, 0, CB_MEML - meml_gotten); - - /* loop over subframes to decode */ - - size_t Nback = iLBC_encbits->startIdx - 1; - for (subframe=0; subframecb_index + subcount * CB_NSTAGES, - iLBC_encbits->gain_index + subcount * CB_NSTAGES, mem, MEM_LF_TBL, - SUBL)) - return false; // Error. - - /* update memory */ - memmove(mem, mem + SUBL, (CB_MEML - SUBL) * sizeof(*mem)); - WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-SUBL, - &reverseDecresidual[subframe*SUBL], SUBL); - - subcount++; - } - - /* get decoded residual from reversed vector */ - WebRtcSpl_MemCpyReversedOrder(decresidual+SUBL*Nback-1, - reverseDecresidual, SUBL*Nback); - } - - return true; // Success. -} diff --git a/modules/audio_coding/codecs/ilbc/decode_residual.h b/modules/audio_coding/codecs/ilbc/decode_residual.h deleted file mode 100644 index d079577661..0000000000 --- a/modules/audio_coding/codecs/ilbc/decode_residual.h +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_DecodeResidual.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODE_RESIDUAL_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODE_RESIDUAL_H_ - -#include -#include -#include - -#include "absl/base/attributes.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * frame residual decoder function (subrutine to iLBC_decode) - *---------------------------------------------------------------*/ - -// Returns true on success, false on failure. In case of failure, the decoder -// state may be corrupted and needs resetting. -ABSL_MUST_USE_RESULT -bool WebRtcIlbcfix_DecodeResidual( - IlbcDecoder* iLBCdec_inst, /* (i/o) the decoder state structure */ - iLBC_bits* iLBC_encbits, /* (i/o) Encoded bits, which are used - for the decoding */ - int16_t* decresidual, /* (o) decoded residual frame */ - int16_t* syntdenum /* (i) the decoded synthesis filter - coefficients */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c b/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c deleted file mode 100644 index d96bb9b2e9..0000000000 --- a/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_DecoderInterpolateLsp.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h" - -#include "modules/audio_coding/codecs/ilbc/bw_expand.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h" - -/*----------------------------------------------------------------* - * obtain synthesis and weighting filters form lsf coefficients - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_DecoderInterpolateLsp( - int16_t *syntdenum, /* (o) synthesis filter coefficients */ - int16_t *weightdenum, /* (o) weighting denumerator - coefficients */ - int16_t *lsfdeq, /* (i) dequantized lsf coefficients */ - int16_t length, /* (i) length of lsf coefficient vector */ - IlbcDecoder *iLBCdec_inst - /* (i) the decoder state structure */ - ){ - size_t i; - int pos, lp_length; - int16_t lp[LPC_FILTERORDER + 1], *lsfdeq2; - - lsfdeq2 = lsfdeq + length; - lp_length = length + 1; - - if (iLBCdec_inst->mode==30) { - /* subframe 1: Interpolation between old and first LSF */ - - WebRtcIlbcfix_LspInterpolate2PolyDec(lp, (*iLBCdec_inst).lsfdeqold, lsfdeq, - WebRtcIlbcfix_kLsfWeight30ms[0], length); - WEBRTC_SPL_MEMCPY_W16(syntdenum,lp,lp_length); - WebRtcIlbcfix_BwExpand(weightdenum, lp, (int16_t*)WebRtcIlbcfix_kLpcChirpSyntDenum, (int16_t)lp_length); - - /* subframes 2 to 6: interpolation between first and last LSF */ - - pos = lp_length; - for (i = 1; i < 6; i++) { - WebRtcIlbcfix_LspInterpolate2PolyDec(lp, lsfdeq, lsfdeq2, - WebRtcIlbcfix_kLsfWeight30ms[i], length); - WEBRTC_SPL_MEMCPY_W16(syntdenum + pos,lp,lp_length); - WebRtcIlbcfix_BwExpand(weightdenum + pos, lp, - (int16_t*)WebRtcIlbcfix_kLpcChirpSyntDenum, (int16_t)lp_length); - pos += lp_length; - } - } else { /* iLBCdec_inst->mode=20 */ - /* subframes 1 to 4: interpolation between old and new LSF */ - pos = 0; - for (i = 0; i < iLBCdec_inst->nsub; i++) { - WebRtcIlbcfix_LspInterpolate2PolyDec(lp, iLBCdec_inst->lsfdeqold, lsfdeq, - WebRtcIlbcfix_kLsfWeight20ms[i], length); - WEBRTC_SPL_MEMCPY_W16(syntdenum+pos,lp,lp_length); - WebRtcIlbcfix_BwExpand(weightdenum+pos, lp, - (int16_t*)WebRtcIlbcfix_kLpcChirpSyntDenum, (int16_t)lp_length); - pos += lp_length; - } - } - - /* update memory */ - - if (iLBCdec_inst->mode==30) { - WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->lsfdeqold, lsfdeq2, length); - } else { - WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->lsfdeqold, lsfdeq, length); - } -} diff --git a/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h b/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h deleted file mode 100644 index 40510007a9..0000000000 --- a/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_DecoderInterpolateLsp.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODER_INTERPOLATE_LSF_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODER_INTERPOLATE_LSF_H_ - -#include -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * obtain synthesis and weighting filters form lsf coefficients - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_DecoderInterpolateLsp( - int16_t* syntdenum, /* (o) synthesis filter coefficients */ - int16_t* weightdenum, /* (o) weighting denumerator - coefficients */ - int16_t* lsfdeq, /* (i) dequantized lsf coefficients */ - int16_t length, /* (i) length of lsf coefficient vector */ - IlbcDecoder* iLBCdec_inst - /* (i) the decoder state structure */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/defines.h b/modules/audio_coding/codecs/ilbc/defines.h deleted file mode 100644 index 64135c4887..0000000000 --- a/modules/audio_coding/codecs/ilbc/defines.h +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - define.h - -******************************************************************/ -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DEFINES_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DEFINES_H_ - -#include -#include - -#include "common_audio/signal_processing/include/signal_processing_library.h" - -/* general codec settings */ - -#define FS 8000 -#define BLOCKL_20MS 160 -#define BLOCKL_30MS 240 -#define BLOCKL_MAX 240 -#define NSUB_20MS 4 -#define NSUB_30MS 6 -#define NSUB_MAX 6 -#define NASUB_20MS 2 -#define NASUB_30MS 4 -#define NASUB_MAX 4 -#define SUBL 40 -#define STATE_LEN 80 -#define STATE_SHORT_LEN_30MS 58 -#define STATE_SHORT_LEN_20MS 57 - -/* LPC settings */ - -#define LPC_FILTERORDER 10 -#define LPC_LOOKBACK 60 -#define LPC_N_20MS 1 -#define LPC_N_30MS 2 -#define LPC_N_MAX 2 -#define LPC_ASYMDIFF 20 -#define LSF_NSPLIT 3 -#define LSF_NUMBER_OF_STEPS 4 -#define LPC_HALFORDER 5 -#define COS_GRID_POINTS 60 - -/* cb settings */ - -#define CB_NSTAGES 3 -#define CB_EXPAND 2 -#define CB_MEML 147 -#define CB_FILTERLEN (2 * 4) -#define CB_HALFFILTERLEN 4 -#define CB_RESRANGE 34 -#define CB_MAXGAIN_FIXQ6 83 /* error = -0.24% */ -#define CB_MAXGAIN_FIXQ14 21299 - -/* enhancer */ - -#define ENH_BLOCKL 80 /* block length */ -#define ENH_BLOCKL_HALF (ENH_BLOCKL / 2) -#define ENH_HL \ - 3 /* 2*ENH_HL+1 is number blocks \ - in said second \ - sequence */ -#define ENH_SLOP \ - 2 /* max difference estimated and \ - correct pitch period */ -#define ENH_PLOCSL \ - 8 /* pitch-estimates and \ - pitch-locations buffer \ - length */ -#define ENH_OVERHANG 2 -#define ENH_UPS0 4 /* upsampling rate */ -#define ENH_FL0 3 /* 2*FLO+1 is the length of each filter */ -#define ENH_FLO_MULT2_PLUS1 7 -#define ENH_VECTL (ENH_BLOCKL + 2 * ENH_FL0) -#define ENH_CORRDIM (2 * ENH_SLOP + 1) -#define ENH_NBLOCKS (BLOCKL / ENH_BLOCKL) -#define ENH_NBLOCKS_EXTRA 5 -#define ENH_NBLOCKS_TOT 8 /* ENH_NBLOCKS+ENH_NBLOCKS_EXTRA */ -#define ENH_BUFL (ENH_NBLOCKS_TOT) * ENH_BLOCKL -#define ENH_BUFL_FILTEROVERHEAD 3 -#define ENH_A0 819 /* Q14 */ -#define ENH_A0_MINUS_A0A0DIV4 848256041 /* Q34 */ -#define ENH_A0DIV2 26843546 /* Q30 */ - -/* PLC */ - -/* Down sampling */ - -#define FILTERORDER_DS_PLUS1 7 -#define DELAY_DS 3 -#define FACTOR_DS 2 - -/* bit stream defs */ - -#define NO_OF_BYTES_20MS 38 -#define NO_OF_BYTES_30MS 50 -#define NO_OF_WORDS_20MS 19 -#define NO_OF_WORDS_30MS 25 -#define STATE_BITS 3 -#define BYTE_LEN 8 -#define ULP_CLASSES 3 - -/* help parameters */ - -#define TWO_PI_FIX 25736 /* Q12 */ - -/* Constants for codebook search and creation */ - -#define ST_MEM_L_TBL 85 -#define MEM_LF_TBL 147 - -/* Struct for the bits */ -typedef struct iLBC_bits_t_ { - int16_t lsf[LSF_NSPLIT * LPC_N_MAX]; - int16_t cb_index[CB_NSTAGES * (NASUB_MAX + 1)]; /* First CB_NSTAGES values - contains extra CB index */ - int16_t gain_index[CB_NSTAGES * (NASUB_MAX + 1)]; /* First CB_NSTAGES values - contains extra CB gain */ - size_t idxForMax; - int16_t state_first; - int16_t idxVec[STATE_SHORT_LEN_30MS]; - int16_t firstbits; - size_t startIdx; -} iLBC_bits; - -/* type definition encoder instance */ -typedef struct IlbcEncoder_ { - /* flag for frame size mode */ - int16_t mode; - - /* basic parameters for different frame sizes */ - size_t blockl; - size_t nsub; - int16_t nasub; - size_t no_of_bytes, no_of_words; - int16_t lpc_n; - size_t state_short_len; - - /* analysis filter state */ - int16_t anaMem[LPC_FILTERORDER]; - - /* Fix-point old lsf parameters for interpolation */ - int16_t lsfold[LPC_FILTERORDER]; - int16_t lsfdeqold[LPC_FILTERORDER]; - - /* signal buffer for LP analysis */ - int16_t lpc_buffer[LPC_LOOKBACK + BLOCKL_MAX]; - - /* state of input HP filter */ - int16_t hpimemx[2]; - int16_t hpimemy[4]; - -#ifdef SPLIT_10MS - int16_t weightdenumbuf[66]; - int16_t past_samples[160]; - uint16_t bytes[25]; - int16_t section; - int16_t Nfor_flag; - int16_t Nback_flag; - int16_t start_pos; - size_t diff; -#endif - -} IlbcEncoder; - -/* type definition decoder instance */ -typedef struct IlbcDecoder_ { - /* flag for frame size mode */ - int16_t mode; - - /* basic parameters for different frame sizes */ - size_t blockl; - size_t nsub; - int16_t nasub; - size_t no_of_bytes, no_of_words; - int16_t lpc_n; - size_t state_short_len; - - /* synthesis filter state */ - int16_t syntMem[LPC_FILTERORDER]; - - /* old LSF for interpolation */ - int16_t lsfdeqold[LPC_FILTERORDER]; - - /* pitch lag estimated in enhancer and used in PLC */ - size_t last_lag; - - /* PLC state information */ - int consPLICount, prev_enh_pl; - int16_t perSquare; - - int16_t prevScale, prevPLI; - size_t prevLag; - int16_t prevLpc[LPC_FILTERORDER + 1]; - int16_t prevResidual[NSUB_MAX * SUBL]; - int16_t seed; - - /* previous synthesis filter parameters */ - - int16_t old_syntdenum[(LPC_FILTERORDER + 1) * NSUB_MAX]; - - /* state of output HP filter */ - int16_t hpimemx[2]; - int16_t hpimemy[4]; - - /* enhancer state information */ - int use_enhancer; - int16_t enh_buf[ENH_BUFL + ENH_BUFL_FILTEROVERHEAD]; - size_t enh_period[ENH_NBLOCKS_TOT]; - -} IlbcDecoder; - -#endif diff --git a/modules/audio_coding/codecs/ilbc/do_plc.c b/modules/audio_coding/codecs/ilbc/do_plc.c deleted file mode 100644 index 9ca6ca48e9..0000000000 --- a/modules/audio_coding/codecs/ilbc/do_plc.c +++ /dev/null @@ -1,309 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_DoThePlc.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/do_plc.h" - -#include "modules/audio_coding/codecs/ilbc/bw_expand.h" -#include "modules/audio_coding/codecs/ilbc/comp_corr.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * Packet loss concealment routine. Conceals a residual signal - * and LP parameters. If no packet loss, update state. - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_DoThePlc( - int16_t *PLCresidual, /* (o) concealed residual */ - int16_t *PLClpc, /* (o) concealed LP parameters */ - int16_t PLI, /* (i) packet loss indicator - 0 - no PL, 1 = PL */ - int16_t *decresidual, /* (i) decoded residual */ - int16_t *lpc, /* (i) decoded LPC (only used for no PL) */ - size_t inlag, /* (i) pitch lag */ - IlbcDecoder *iLBCdec_inst - /* (i/o) decoder instance */ - ){ - size_t i; - int32_t cross, ener, cross_comp, ener_comp = 0; - int32_t measure, maxMeasure, energy; - int32_t noise_energy_threshold_30dB; - int16_t max, crossSquareMax, crossSquare; - size_t j, lag, randlag; - int16_t tmp1, tmp2; - int16_t shift1, shift2, shift3, shiftMax; - int16_t scale3; - size_t corrLen; - int32_t tmpW32, tmp2W32; - int16_t use_gain; - int16_t tot_gain; - int16_t max_perSquare; - int16_t scale1, scale2; - int16_t totscale; - int32_t nom; - int16_t denom; - int16_t pitchfact; - size_t use_lag; - int ind; - int16_t randvec[BLOCKL_MAX]; - - /* Packet Loss */ - if (PLI == 1) { - - (*iLBCdec_inst).consPLICount += 1; - - /* if previous frame not lost, - determine pitch pred. gain */ - - if (iLBCdec_inst->prevPLI != 1) { - - /* Maximum 60 samples are correlated, preserve as high accuracy - as possible without getting overflow */ - max = WebRtcSpl_MaxAbsValueW16((*iLBCdec_inst).prevResidual, - iLBCdec_inst->blockl); - scale3 = (WebRtcSpl_GetSizeInBits(max)<<1) - 25; - if (scale3 < 0) { - scale3 = 0; - } - - /* Store scale for use when interpolating between the - * concealment and the received packet */ - iLBCdec_inst->prevScale = scale3; - - /* Search around the previous lag +/-3 to find the - best pitch period */ - lag = inlag - 3; - - /* Guard against getting outside the frame */ - corrLen = (size_t)WEBRTC_SPL_MIN(60, iLBCdec_inst->blockl-(inlag+3)); - - WebRtcIlbcfix_CompCorr( &cross, &ener, - iLBCdec_inst->prevResidual, lag, iLBCdec_inst->blockl, corrLen, scale3); - - /* Normalize and store cross^2 and the number of shifts */ - shiftMax = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_ABS_W32(cross))-15; - crossSquareMax = (int16_t)(( - (int16_t)WEBRTC_SPL_SHIFT_W32(cross, -shiftMax) * - (int16_t)WEBRTC_SPL_SHIFT_W32(cross, -shiftMax)) >> 15); - - for (j=inlag-2;j<=inlag+3;j++) { - WebRtcIlbcfix_CompCorr( &cross_comp, &ener_comp, - iLBCdec_inst->prevResidual, j, iLBCdec_inst->blockl, corrLen, scale3); - - /* Use the criteria (corr*corr)/energy to compare if - this lag is better or not. To avoid the division, - do a cross multiplication */ - shift1 = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_ABS_W32(cross_comp))-15; - crossSquare = (int16_t)(( - (int16_t)WEBRTC_SPL_SHIFT_W32(cross_comp, -shift1) * - (int16_t)WEBRTC_SPL_SHIFT_W32(cross_comp, -shift1)) >> 15); - - shift2 = WebRtcSpl_GetSizeInBits(ener)-15; - measure = (int16_t)WEBRTC_SPL_SHIFT_W32(ener, -shift2) * crossSquare; - - shift3 = WebRtcSpl_GetSizeInBits(ener_comp)-15; - maxMeasure = (int16_t)WEBRTC_SPL_SHIFT_W32(ener_comp, -shift3) * - crossSquareMax; - - /* Calculate shift value, so that the two measures can - be put in the same Q domain */ - if(2 * shiftMax + shift3 > 2 * shift1 + shift2) { - tmp1 = - WEBRTC_SPL_MIN(31, 2 * shiftMax + shift3 - 2 * shift1 - shift2); - tmp2 = 0; - } else { - tmp1 = 0; - tmp2 = - WEBRTC_SPL_MIN(31, 2 * shift1 + shift2 - 2 * shiftMax - shift3); - } - - if ((measure>>tmp1) > (maxMeasure>>tmp2)) { - /* New lag is better => record lag, measure and domain */ - lag = j; - crossSquareMax = crossSquare; - cross = cross_comp; - shiftMax = shift1; - ener = ener_comp; - } - } - - /* Calculate the periodicity for the lag with the maximum correlation. - - Definition of the periodicity: - abs(corr(vec1, vec2))/(sqrt(energy(vec1))*sqrt(energy(vec2))) - - Work in the Square domain to simplify the calculations - max_perSquare is less than 1 (in Q15) - */ - tmp2W32=WebRtcSpl_DotProductWithScale(&iLBCdec_inst->prevResidual[iLBCdec_inst->blockl-corrLen], - &iLBCdec_inst->prevResidual[iLBCdec_inst->blockl-corrLen], - corrLen, scale3); - - if ((tmp2W32>0)&&(ener_comp>0)) { - /* norm energies to int16_t, compute the product of the energies and - use the upper int16_t as the denominator */ - - scale1=(int16_t)WebRtcSpl_NormW32(tmp2W32)-16; - tmp1=(int16_t)WEBRTC_SPL_SHIFT_W32(tmp2W32, scale1); - - scale2=(int16_t)WebRtcSpl_NormW32(ener)-16; - tmp2=(int16_t)WEBRTC_SPL_SHIFT_W32(ener, scale2); - denom = (int16_t)((tmp1 * tmp2) >> 16); /* in Q(scale1+scale2-16) */ - - /* Square the cross correlation and norm it such that max_perSquare - will be in Q15 after the division */ - - totscale = scale1+scale2-1; - tmp1 = (int16_t)WEBRTC_SPL_SHIFT_W32(cross, (totscale>>1)); - tmp2 = (int16_t)WEBRTC_SPL_SHIFT_W32(cross, totscale-(totscale>>1)); - - nom = tmp1 * tmp2; - max_perSquare = (int16_t)WebRtcSpl_DivW32W16(nom, denom); - - } else { - max_perSquare = 0; - } - } - - /* previous frame lost, use recorded lag and gain */ - - else { - lag = iLBCdec_inst->prevLag; - max_perSquare = iLBCdec_inst->perSquare; - } - - /* Attenuate signal and scale down pitch pred gain if - several frames lost consecutively */ - - use_gain = 32767; /* 1.0 in Q15 */ - - if (iLBCdec_inst->consPLICount*iLBCdec_inst->blockl>320) { - use_gain = 29491; /* 0.9 in Q15 */ - } else if (iLBCdec_inst->consPLICount*iLBCdec_inst->blockl>640) { - use_gain = 22938; /* 0.7 in Q15 */ - } else if (iLBCdec_inst->consPLICount*iLBCdec_inst->blockl>960) { - use_gain = 16384; /* 0.5 in Q15 */ - } else if (iLBCdec_inst->consPLICount*iLBCdec_inst->blockl>1280) { - use_gain = 0; /* 0.0 in Q15 */ - } - - /* Compute mixing factor of picth repeatition and noise: - for max_per>0.7 set periodicity to 1.0 - 0.47868) { /* periodicity > 0.7 (0.7^4=0.2401 in Q15) */ - pitchfact = 32767; - } else if (max_perSquare>839) { /* 0.4 < periodicity < 0.7 (0.4^4=0.0256 in Q15) */ - /* find best index and interpolate from that */ - ind = 5; - while ((max_perSquare0)) { - ind--; - } - /* pitch fact is approximated by first order */ - tmpW32 = (int32_t)WebRtcIlbcfix_kPlcPitchFact[ind] + - ((WebRtcIlbcfix_kPlcPfSlope[ind] * - (max_perSquare - WebRtcIlbcfix_kPlcPerSqr[ind])) >> 11); - - pitchfact = (int16_t)WEBRTC_SPL_MIN(tmpW32, 32767); /* guard against overflow */ - - } else { /* periodicity < 0.4 */ - pitchfact = 0; - } - - /* avoid repetition of same pitch cycle (buzzyness) */ - use_lag = lag; - if (lag<80) { - use_lag = 2*lag; - } - - /* compute concealed residual */ - noise_energy_threshold_30dB = (int32_t)iLBCdec_inst->blockl * 900; - energy = 0; - for (i=0; iblockl; i++) { - - /* noise component - 52 < randlagFIX < 117 */ - iLBCdec_inst->seed = (int16_t)(iLBCdec_inst->seed * 31821 + 13849); - randlag = 53 + (iLBCdec_inst->seed & 63); - if (randlag > i) { - randvec[i] = - iLBCdec_inst->prevResidual[iLBCdec_inst->blockl + i - randlag]; - } else { - randvec[i] = iLBCdec_inst->prevResidual[i - randlag]; - } - - /* pitch repeatition component */ - if (use_lag > i) { - PLCresidual[i] = - iLBCdec_inst->prevResidual[iLBCdec_inst->blockl + i - use_lag]; - } else { - PLCresidual[i] = PLCresidual[i - use_lag]; - } - - /* Attinuate total gain for each 10 ms */ - if (i<80) { - tot_gain=use_gain; - } else if (i<160) { - tot_gain = (int16_t)((31130 * use_gain) >> 15); /* 0.95*use_gain */ - } else { - tot_gain = (int16_t)((29491 * use_gain) >> 15); /* 0.9*use_gain */ - } - - - /* mix noise and pitch repeatition */ - PLCresidual[i] = (int16_t)((tot_gain * - ((pitchfact * PLCresidual[i] + (32767 - pitchfact) * randvec[i] + - 16384) >> 15)) >> 15); - - /* Compute energy until threshold for noise energy is reached */ - if (energy < noise_energy_threshold_30dB) { - energy += PLCresidual[i] * PLCresidual[i]; - } - } - - /* less than 30 dB, use only noise */ - if (energy < noise_energy_threshold_30dB) { - for (i=0; iblockl; i++) { - PLCresidual[i] = randvec[i]; - } - } - - /* use the old LPC */ - WEBRTC_SPL_MEMCPY_W16(PLClpc, (*iLBCdec_inst).prevLpc, LPC_FILTERORDER+1); - - /* Update state in case there are multiple frame losses */ - iLBCdec_inst->prevLag = lag; - iLBCdec_inst->perSquare = max_perSquare; - } - - /* no packet loss, copy input */ - - else { - WEBRTC_SPL_MEMCPY_W16(PLCresidual, decresidual, iLBCdec_inst->blockl); - WEBRTC_SPL_MEMCPY_W16(PLClpc, lpc, (LPC_FILTERORDER+1)); - iLBCdec_inst->consPLICount = 0; - } - - /* update state */ - iLBCdec_inst->prevPLI = PLI; - WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->prevLpc, PLClpc, (LPC_FILTERORDER+1)); - WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->prevResidual, PLCresidual, iLBCdec_inst->blockl); - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/do_plc.h b/modules/audio_coding/codecs/ilbc/do_plc.h deleted file mode 100644 index 5e3bcc6d3c..0000000000 --- a/modules/audio_coding/codecs/ilbc/do_plc.h +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_DoThePlc.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DO_PLC_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DO_PLC_H_ - -#include -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * Packet loss concealment routine. Conceals a residual signal - * and LP parameters. If no packet loss, update state. - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_DoThePlc( - int16_t* PLCresidual, /* (o) concealed residual */ - int16_t* PLClpc, /* (o) concealed LP parameters */ - int16_t PLI, /* (i) packet loss indicator - 0 - no PL, 1 = PL */ - int16_t* decresidual, /* (i) decoded residual */ - int16_t* lpc, /* (i) decoded LPC (only used for no PL) */ - size_t inlag, /* (i) pitch lag */ - IlbcDecoder* iLBCdec_inst - /* (i/o) decoder instance */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/encode.c b/modules/audio_coding/codecs/ilbc/encode.c deleted file mode 100644 index 8e536221cd..0000000000 --- a/modules/audio_coding/codecs/ilbc/encode.c +++ /dev/null @@ -1,517 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Encode.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/encode.h" - -#include - -#include "modules/audio_coding/codecs/ilbc/cb_construct.h" -#include "modules/audio_coding/codecs/ilbc/cb_search.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/frame_classify.h" -#include "modules/audio_coding/codecs/ilbc/hp_input.h" -#include "modules/audio_coding/codecs/ilbc/index_conv_enc.h" -#include "modules/audio_coding/codecs/ilbc/lpc_encode.h" -#include "modules/audio_coding/codecs/ilbc/pack_bits.h" -#include "modules/audio_coding/codecs/ilbc/state_construct.h" -#include "modules/audio_coding/codecs/ilbc/state_search.h" -#include "rtc_base/checks.h" -#include "rtc_base/system/arch.h" - -#ifdef SPLIT_10MS -#include "modules/audio_coding/codecs/ilbc/unpack_bits.h" -#include "modules/audio_coding/codecs/ilbc/index_conv_dec.h" -#endif - -#ifndef WEBRTC_ARCH_BIG_ENDIAN -#include "modules/audio_coding/codecs/ilbc/swap_bytes.h" -#endif - -/*----------------------------------------------------------------* - * main encoder function - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_EncodeImpl( - uint16_t *bytes, /* (o) encoded data bits iLBC */ - const int16_t *block, /* (i) speech vector to encode */ - IlbcEncoder *iLBCenc_inst /* (i/o) the general encoder - state */ - ){ - size_t n, meml_gotten, Nfor; - size_t diff, start_pos; - size_t index; - size_t subcount, subframe; - size_t start_count, end_count; - int16_t *residual; - int32_t en1, en2; - int16_t scale, max; - int16_t *syntdenum; - int16_t *decresidual; - int16_t *reverseResidual; - int16_t *reverseDecresidual; - /* Stack based */ - int16_t weightdenum[(LPC_FILTERORDER + 1)*NSUB_MAX]; - int16_t dataVec[BLOCKL_MAX + LPC_FILTERORDER]; - int16_t memVec[CB_MEML+CB_FILTERLEN]; - int16_t bitsMemory[sizeof(iLBC_bits)/sizeof(int16_t)]; - iLBC_bits *iLBCbits_inst = (iLBC_bits*)bitsMemory; - - -#ifdef SPLIT_10MS - int16_t *weightdenumbuf = iLBCenc_inst->weightdenumbuf; - int16_t last_bit; -#endif - - int16_t *data = &dataVec[LPC_FILTERORDER]; - int16_t *mem = &memVec[CB_HALFFILTERLEN]; - - /* Reuse som buffers to save stack memory */ - residual = &iLBCenc_inst->lpc_buffer[LPC_LOOKBACK+BLOCKL_MAX-iLBCenc_inst->blockl]; - syntdenum = mem; /* syntdenum[(LPC_FILTERORDER + 1)*NSUB_MAX] and mem are used non overlapping in the code */ - decresidual = residual; /* Already encoded residual is overwritten by the decoded version */ - reverseResidual = data; /* data and reverseResidual are used non overlapping in the code */ - reverseDecresidual = reverseResidual; /* Already encoded residual is overwritten by the decoded version */ - -#ifdef SPLIT_10MS - - WebRtcSpl_MemSetW16 ( (int16_t *) iLBCbits_inst, 0, - sizeof(iLBC_bits) / sizeof(int16_t) ); - - start_pos = iLBCenc_inst->start_pos; - diff = iLBCenc_inst->diff; - - if (iLBCenc_inst->section != 0){ - WEBRTC_SPL_MEMCPY_W16 (weightdenum, weightdenumbuf, - SCRATCH_ENCODE_DATAVEC - SCRATCH_ENCODE_WEIGHTDENUM); - /* Un-Packetize the frame into parameters */ - last_bit = WebRtcIlbcfix_UnpackBits (iLBCenc_inst->bytes, iLBCbits_inst, iLBCenc_inst->mode); - if (last_bit) - return; - /* adjust index */ - WebRtcIlbcfix_IndexConvDec (iLBCbits_inst->cb_index); - - if (iLBCenc_inst->section == 1){ - /* Save first 80 samples of a 160/240 sample frame for 20/30msec */ - WEBRTC_SPL_MEMCPY_W16 (iLBCenc_inst->past_samples, block, 80); - } - else{ // iLBCenc_inst->section == 2 AND mode = 30ms - /* Save second 80 samples of a 240 sample frame for 30msec */ - WEBRTC_SPL_MEMCPY_W16 (iLBCenc_inst->past_samples + 80, block, 80); - } - } - else{ // iLBCenc_inst->section == 0 - /* form a complete frame of 160/240 for 20msec/30msec mode */ - WEBRTC_SPL_MEMCPY_W16 (data + (iLBCenc_inst->mode * 8) - 80, block, 80); - WEBRTC_SPL_MEMCPY_W16 (data, iLBCenc_inst->past_samples, - (iLBCenc_inst->mode * 8) - 80); - iLBCenc_inst->Nfor_flag = 0; - iLBCenc_inst->Nback_flag = 0; -#else - /* copy input block to data*/ - WEBRTC_SPL_MEMCPY_W16(data,block,iLBCenc_inst->blockl); -#endif - - /* high pass filtering of input signal and scale down the residual (*0.5) */ - WebRtcIlbcfix_HpInput(data, (int16_t*)WebRtcIlbcfix_kHpInCoefs, - iLBCenc_inst->hpimemy, iLBCenc_inst->hpimemx, - iLBCenc_inst->blockl); - - /* LPC of hp filtered input data */ - WebRtcIlbcfix_LpcEncode(syntdenum, weightdenum, iLBCbits_inst->lsf, data, - iLBCenc_inst); - - /* Set up state */ - WEBRTC_SPL_MEMCPY_W16(dataVec, iLBCenc_inst->anaMem, LPC_FILTERORDER); - - /* inverse filter to get residual */ - for (n=0; nnsub; n++ ) { - WebRtcSpl_FilterMAFastQ12( - &data[n*SUBL], &residual[n*SUBL], - &syntdenum[n*(LPC_FILTERORDER+1)], - LPC_FILTERORDER+1, SUBL); - } - - /* Copy the state for next frame */ - WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->anaMem, &data[iLBCenc_inst->blockl-LPC_FILTERORDER], LPC_FILTERORDER); - - /* find state location */ - - iLBCbits_inst->startIdx = WebRtcIlbcfix_FrameClassify(iLBCenc_inst,residual); - - /* check if state should be in first or last part of the - two subframes */ - - index = (iLBCbits_inst->startIdx-1)*SUBL; - max=WebRtcSpl_MaxAbsValueW16(&residual[index], 2*SUBL); - scale = WebRtcSpl_GetSizeInBits((uint32_t)(max * max)); - - /* Scale to maximum 25 bits so that the MAC won't cause overflow */ - scale = scale - 25; - if(scale < 0) { - scale = 0; - } - - diff = STATE_LEN - iLBCenc_inst->state_short_len; - en1=WebRtcSpl_DotProductWithScale(&residual[index], &residual[index], - iLBCenc_inst->state_short_len, scale); - index += diff; - en2=WebRtcSpl_DotProductWithScale(&residual[index], &residual[index], - iLBCenc_inst->state_short_len, scale); - if (en1 > en2) { - iLBCbits_inst->state_first = 1; - start_pos = (iLBCbits_inst->startIdx-1)*SUBL; - } else { - iLBCbits_inst->state_first = 0; - start_pos = (iLBCbits_inst->startIdx-1)*SUBL + diff; - } - - /* scalar quantization of state */ - - WebRtcIlbcfix_StateSearch(iLBCenc_inst, iLBCbits_inst, &residual[start_pos], - &syntdenum[(iLBCbits_inst->startIdx-1)*(LPC_FILTERORDER+1)], - &weightdenum[(iLBCbits_inst->startIdx-1)*(LPC_FILTERORDER+1)]); - - WebRtcIlbcfix_StateConstruct(iLBCbits_inst->idxForMax, iLBCbits_inst->idxVec, - &syntdenum[(iLBCbits_inst->startIdx-1)*(LPC_FILTERORDER+1)], - &decresidual[start_pos], iLBCenc_inst->state_short_len - ); - - /* predictive quantization in state */ - - if (iLBCbits_inst->state_first) { /* put adaptive part in the end */ - - /* setup memory */ - - WebRtcSpl_MemSetW16(mem, 0, CB_MEML - iLBCenc_inst->state_short_len); - WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-iLBCenc_inst->state_short_len, - decresidual+start_pos, iLBCenc_inst->state_short_len); - - /* encode subframes */ - - WebRtcIlbcfix_CbSearch(iLBCenc_inst, iLBCbits_inst->cb_index, iLBCbits_inst->gain_index, - &residual[start_pos+iLBCenc_inst->state_short_len], - mem+CB_MEML-ST_MEM_L_TBL, ST_MEM_L_TBL, diff, - &weightdenum[iLBCbits_inst->startIdx*(LPC_FILTERORDER+1)], 0); - - /* construct decoded vector */ - - RTC_CHECK(WebRtcIlbcfix_CbConstruct( - &decresidual[start_pos + iLBCenc_inst->state_short_len], - iLBCbits_inst->cb_index, iLBCbits_inst->gain_index, - mem + CB_MEML - ST_MEM_L_TBL, ST_MEM_L_TBL, diff)); - - } - else { /* put adaptive part in the beginning */ - - /* create reversed vectors for prediction */ - - WebRtcSpl_MemCpyReversedOrder(&reverseResidual[diff-1], - &residual[(iLBCbits_inst->startIdx+1)*SUBL-STATE_LEN], diff); - - /* setup memory */ - - meml_gotten = iLBCenc_inst->state_short_len; - WebRtcSpl_MemCpyReversedOrder(&mem[CB_MEML-1], &decresidual[start_pos], meml_gotten); - WebRtcSpl_MemSetW16(mem, 0, CB_MEML - iLBCenc_inst->state_short_len); - - /* encode subframes */ - WebRtcIlbcfix_CbSearch(iLBCenc_inst, iLBCbits_inst->cb_index, iLBCbits_inst->gain_index, - reverseResidual, mem+CB_MEML-ST_MEM_L_TBL, ST_MEM_L_TBL, diff, - &weightdenum[(iLBCbits_inst->startIdx-1)*(LPC_FILTERORDER+1)], - 0); - - /* construct decoded vector */ - RTC_CHECK(WebRtcIlbcfix_CbConstruct( - reverseDecresidual, iLBCbits_inst->cb_index, - iLBCbits_inst->gain_index, mem + CB_MEML - ST_MEM_L_TBL, - ST_MEM_L_TBL, diff)); - - /* get decoded residual from reversed vector */ - - WebRtcSpl_MemCpyReversedOrder(&decresidual[start_pos-1], reverseDecresidual, diff); - } - -#ifdef SPLIT_10MS - iLBCenc_inst->start_pos = start_pos; - iLBCenc_inst->diff = diff; - iLBCenc_inst->section++; - /* adjust index */ - WebRtcIlbcfix_IndexConvEnc (iLBCbits_inst->cb_index); - /* Packetize the parameters into the frame */ - WebRtcIlbcfix_PackBits (iLBCenc_inst->bytes, iLBCbits_inst, iLBCenc_inst->mode); - WEBRTC_SPL_MEMCPY_W16 (weightdenumbuf, weightdenum, - SCRATCH_ENCODE_DATAVEC - SCRATCH_ENCODE_WEIGHTDENUM); - return; - } -#endif - - /* forward prediction of subframes */ - - Nfor = iLBCenc_inst->nsub-iLBCbits_inst->startIdx-1; - - /* counter for predicted subframes */ -#ifdef SPLIT_10MS - if (iLBCenc_inst->mode == 20) - { - subcount = 1; - } - if (iLBCenc_inst->mode == 30) - { - if (iLBCenc_inst->section == 1) - { - subcount = 1; - } - if (iLBCenc_inst->section == 2) - { - subcount = 3; - } - } -#else - subcount=1; -#endif - - if( Nfor > 0 ){ - - /* setup memory */ - - WebRtcSpl_MemSetW16(mem, 0, CB_MEML-STATE_LEN); - WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-STATE_LEN, - decresidual+(iLBCbits_inst->startIdx-1)*SUBL, STATE_LEN); - -#ifdef SPLIT_10MS - if (iLBCenc_inst->Nfor_flag > 0) - { - for (subframe = 0; subframe < WEBRTC_SPL_MIN (Nfor, 2); subframe++) - { - /* update memory */ - WEBRTC_SPL_MEMCPY_W16 (mem, mem + SUBL, (CB_MEML - SUBL)); - WEBRTC_SPL_MEMCPY_W16 (mem + CB_MEML - SUBL, - &decresidual[(iLBCbits_inst->startIdx + 1 + - subframe) * SUBL], SUBL); - } - } - - iLBCenc_inst->Nfor_flag++; - - if (iLBCenc_inst->mode == 20) - { - start_count = 0; - end_count = Nfor; - } - if (iLBCenc_inst->mode == 30) - { - if (iLBCenc_inst->section == 1) - { - start_count = 0; - end_count = WEBRTC_SPL_MIN (Nfor, (size_t)2); - } - if (iLBCenc_inst->section == 2) - { - start_count = WEBRTC_SPL_MIN (Nfor, (size_t)2); - end_count = Nfor; - } - } -#else - start_count = 0; - end_count = Nfor; -#endif - - /* loop over subframes to encode */ - - for (subframe = start_count; subframe < end_count; subframe++){ - - /* encode subframe */ - - WebRtcIlbcfix_CbSearch(iLBCenc_inst, iLBCbits_inst->cb_index+subcount*CB_NSTAGES, - iLBCbits_inst->gain_index+subcount*CB_NSTAGES, - &residual[(iLBCbits_inst->startIdx+1+subframe)*SUBL], - mem, MEM_LF_TBL, SUBL, - &weightdenum[(iLBCbits_inst->startIdx+1+subframe)*(LPC_FILTERORDER+1)], - subcount); - - /* construct decoded vector */ - RTC_CHECK(WebRtcIlbcfix_CbConstruct( - &decresidual[(iLBCbits_inst->startIdx + 1 + subframe) * SUBL], - iLBCbits_inst->cb_index + subcount * CB_NSTAGES, - iLBCbits_inst->gain_index + subcount * CB_NSTAGES, mem, MEM_LF_TBL, - SUBL)); - - /* update memory */ - - memmove(mem, mem + SUBL, (CB_MEML - SUBL) * sizeof(*mem)); - WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-SUBL, - &decresidual[(iLBCbits_inst->startIdx+1+subframe)*SUBL], SUBL); - - subcount++; - } - } - -#ifdef SPLIT_10MS - if ((iLBCenc_inst->section == 1) && - (iLBCenc_inst->mode == 30) && (Nfor > 0) && (end_count == 2)) - { - iLBCenc_inst->section++; - /* adjust index */ - WebRtcIlbcfix_IndexConvEnc (iLBCbits_inst->cb_index); - /* Packetize the parameters into the frame */ - WebRtcIlbcfix_PackBits (iLBCenc_inst->bytes, iLBCbits_inst, iLBCenc_inst->mode); - WEBRTC_SPL_MEMCPY_W16 (weightdenumbuf, weightdenum, - SCRATCH_ENCODE_DATAVEC - SCRATCH_ENCODE_WEIGHTDENUM); - return; - } -#endif - - /* backward prediction of subframes */ - - if (iLBCbits_inst->startIdx > 1) { - - /* create reverse order vectors - (The decresidual does not need to be copied since it is - contained in the same vector as the residual) - */ - - size_t Nback = iLBCbits_inst->startIdx - 1; - WebRtcSpl_MemCpyReversedOrder(&reverseResidual[Nback*SUBL-1], residual, Nback*SUBL); - - /* setup memory */ - - meml_gotten = SUBL*(iLBCenc_inst->nsub+1-iLBCbits_inst->startIdx); - if( meml_gotten > CB_MEML ) { - meml_gotten=CB_MEML; - } - - WebRtcSpl_MemCpyReversedOrder(&mem[CB_MEML-1], &decresidual[Nback*SUBL], meml_gotten); - WebRtcSpl_MemSetW16(mem, 0, CB_MEML - meml_gotten); - -#ifdef SPLIT_10MS - if (iLBCenc_inst->Nback_flag > 0) - { - for (subframe = 0; subframe < WEBRTC_SPL_MAX (2 - Nfor, 0); subframe++) - { - /* update memory */ - WEBRTC_SPL_MEMCPY_W16 (mem, mem + SUBL, (CB_MEML - SUBL)); - WEBRTC_SPL_MEMCPY_W16 (mem + CB_MEML - SUBL, - &reverseDecresidual[subframe * SUBL], SUBL); - } - } - - iLBCenc_inst->Nback_flag++; - - - if (iLBCenc_inst->mode == 20) - { - start_count = 0; - end_count = Nback; - } - if (iLBCenc_inst->mode == 30) - { - if (iLBCenc_inst->section == 1) - { - start_count = 0; - end_count = (Nfor >= 2) ? 0 : (2 - NFor); - } - if (iLBCenc_inst->section == 2) - { - start_count = (Nfor >= 2) ? 0 : (2 - NFor); - end_count = Nback; - } - } -#else - start_count = 0; - end_count = Nback; -#endif - - /* loop over subframes to encode */ - - for (subframe = start_count; subframe < end_count; subframe++){ - - /* encode subframe */ - - WebRtcIlbcfix_CbSearch(iLBCenc_inst, iLBCbits_inst->cb_index+subcount*CB_NSTAGES, - iLBCbits_inst->gain_index+subcount*CB_NSTAGES, &reverseResidual[subframe*SUBL], - mem, MEM_LF_TBL, SUBL, - &weightdenum[(iLBCbits_inst->startIdx-2-subframe)*(LPC_FILTERORDER+1)], - subcount); - - /* construct decoded vector */ - RTC_CHECK(WebRtcIlbcfix_CbConstruct( - &reverseDecresidual[subframe * SUBL], - iLBCbits_inst->cb_index + subcount * CB_NSTAGES, - iLBCbits_inst->gain_index + subcount * CB_NSTAGES, mem, MEM_LF_TBL, - SUBL)); - - /* update memory */ - memmove(mem, mem + SUBL, (CB_MEML - SUBL) * sizeof(*mem)); - WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-SUBL, - &reverseDecresidual[subframe*SUBL], SUBL); - - subcount++; - - } - - /* get decoded residual from reversed vector */ - - WebRtcSpl_MemCpyReversedOrder(&decresidual[SUBL*Nback-1], reverseDecresidual, SUBL*Nback); - } - /* end encoding part */ - - /* adjust index */ - - WebRtcIlbcfix_IndexConvEnc(iLBCbits_inst->cb_index); - - /* Packetize the parameters into the frame */ - -#ifdef SPLIT_10MS - if( (iLBCenc_inst->mode==30) && (iLBCenc_inst->section==1) ){ - WebRtcIlbcfix_PackBits(iLBCenc_inst->bytes, iLBCbits_inst, iLBCenc_inst->mode); - } - else{ - WebRtcIlbcfix_PackBits(bytes, iLBCbits_inst, iLBCenc_inst->mode); - } -#else - WebRtcIlbcfix_PackBits(bytes, iLBCbits_inst, iLBCenc_inst->mode); -#endif - -#ifndef WEBRTC_ARCH_BIG_ENDIAN - /* Swap bytes for LITTLE ENDIAN since the packbits() - function assumes BIG_ENDIAN machine */ -#ifdef SPLIT_10MS - if (( (iLBCenc_inst->section == 1) && (iLBCenc_inst->mode == 20) ) || - ( (iLBCenc_inst->section == 2) && (iLBCenc_inst->mode == 30) )){ - WebRtcIlbcfix_SwapBytes(bytes, iLBCenc_inst->no_of_words, bytes); - } -#else - WebRtcIlbcfix_SwapBytes(bytes, iLBCenc_inst->no_of_words, bytes); -#endif -#endif - -#ifdef SPLIT_10MS - if (subcount == (iLBCenc_inst->nsub - 1)) - { - iLBCenc_inst->section = 0; - } - else - { - iLBCenc_inst->section++; - WEBRTC_SPL_MEMCPY_W16 (weightdenumbuf, weightdenum, - SCRATCH_ENCODE_DATAVEC - SCRATCH_ENCODE_WEIGHTDENUM); - } -#endif - -} diff --git a/modules/audio_coding/codecs/ilbc/encode.h b/modules/audio_coding/codecs/ilbc/encode.h deleted file mode 100644 index 5290420bbf..0000000000 --- a/modules/audio_coding/codecs/ilbc/encode.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Encode.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENCODE_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENCODE_H_ - -#include -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * main encoder function - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_EncodeImpl( - uint16_t* bytes, /* (o) encoded data bits iLBC */ - const int16_t* block, /* (i) speech vector to encode */ - IlbcEncoder* iLBCenc_inst /* (i/o) the general encoder - state */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/energy_inverse.c b/modules/audio_coding/codecs/ilbc/energy_inverse.c deleted file mode 100644 index 7f00254aea..0000000000 --- a/modules/audio_coding/codecs/ilbc/energy_inverse.c +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_EnergyInverse.c - -******************************************************************/ - -/* Inverses the in vector in into Q29 domain */ - -#include "modules/audio_coding/codecs/ilbc/energy_inverse.h" - -void WebRtcIlbcfix_EnergyInverse( - int16_t *energy, /* (i/o) Energy and inverse - energy (in Q29) */ - size_t noOfEnergies) /* (i) The length of the energy - vector */ -{ - int32_t Nom=(int32_t)0x1FFFFFFF; - int16_t *energyPtr; - size_t i; - - /* Set the minimum energy value to 16384 to avoid overflow */ - energyPtr=energy; - for (i=0; i -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/* Inverses the in vector in into Q29 domain */ - -void WebRtcIlbcfix_EnergyInverse( - int16_t* - energy, /* (i/o) Energy and inverse - energy (in Q29) */ - size_t noOfEnergies); /* (i) The length of the energy - vector */ - -#endif diff --git a/modules/audio_coding/codecs/ilbc/enh_upsample.c b/modules/audio_coding/codecs/ilbc/enh_upsample.c deleted file mode 100644 index cd3d0a4db1..0000000000 --- a/modules/audio_coding/codecs/ilbc/enh_upsample.c +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_EnhUpsample.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/enh_upsample.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * upsample finite array assuming zeros outside bounds - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_EnhUpsample( - int32_t *useq1, /* (o) upsampled output sequence */ - int16_t *seq1 /* (i) unupsampled sequence */ - ){ - int j; - int32_t *pu1, *pu11; - int16_t *ps, *w16tmp; - const int16_t *pp; - - /* filtering: filter overhangs left side of sequence */ - pu1=useq1; - for (j=0;j -#include - -/*----------------------------------------------------------------* - * perform enhancement on idata+centerStartPos through - * idata+centerStartPos+ENH_BLOCKL-1 - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Enhancer( - int16_t* odata, /* (o) smoothed block, dimension blockl */ - int16_t* idata, /* (i) data buffer used for enhancing */ - size_t idatal, /* (i) dimension idata */ - size_t centerStartPos, /* (i) first sample current block within idata */ - size_t* period, /* (i) pitch period array (pitch bward-in time) */ - const size_t* plocs, /* (i) locations where period array values valid */ - size_t periodl /* (i) dimension of period and plocs */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/enhancer_interface.c b/modules/audio_coding/codecs/ilbc/enhancer_interface.c deleted file mode 100644 index ca23e19ae3..0000000000 --- a/modules/audio_coding/codecs/ilbc/enhancer_interface.c +++ /dev/null @@ -1,382 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_EnhancerInterface.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/enhancer_interface.h" - -#include -#include - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/enhancer.h" -#include "modules/audio_coding/codecs/ilbc/hp_output.h" -#include "modules/audio_coding/codecs/ilbc/xcorr_coef.h" - - - -/*----------------------------------------------------------------* - * interface for enhancer - *---------------------------------------------------------------*/ - -size_t // (o) Estimated lag in end of in[] - WebRtcIlbcfix_EnhancerInterface( - int16_t* out, // (o) enhanced signal - const int16_t* in, // (i) unenhanced signal - IlbcDecoder* iLBCdec_inst) { // (i) buffers etc - size_t iblock; - size_t lag=20, tlag=20; - size_t inLen=iLBCdec_inst->blockl+120; - int16_t scale, scale1; - size_t plc_blockl; - int16_t *enh_buf; - size_t *enh_period; - int32_t tmp1, tmp2, max; - size_t new_blocks; - int16_t *enh_bufPtr1; - size_t i; - size_t k; - int16_t EnChange; - int16_t SqrtEnChange; - int16_t inc; - int16_t win; - int16_t *tmpW16ptr; - size_t startPos; - int16_t *plc_pred; - const int16_t *target, *regressor; - int16_t max16; - int shifts; - int32_t ener; - int16_t enerSh; - int16_t corrSh; - size_t ind; - int16_t sh; - size_t start, stop; - /* Stack based */ - int16_t totsh[3]; - int16_t downsampled[(BLOCKL_MAX+120)>>1]; /* length 180 */ - int32_t corr32[50]; - int32_t corrmax[3]; - int16_t corr16[3]; - int16_t en16[3]; - size_t lagmax[3]; - - plc_pred = downsampled; /* Reuse memory since plc_pred[ENH_BLOCKL] and - downsampled are non overlapping */ - enh_buf=iLBCdec_inst->enh_buf; - enh_period=iLBCdec_inst->enh_period; - - /* Copy in the new data into the enhancer buffer */ - memmove(enh_buf, &enh_buf[iLBCdec_inst->blockl], - (ENH_BUFL - iLBCdec_inst->blockl) * sizeof(*enh_buf)); - - WEBRTC_SPL_MEMCPY_W16(&enh_buf[ENH_BUFL-iLBCdec_inst->blockl], in, - iLBCdec_inst->blockl); - - /* Set variables that are dependent on frame size */ - if (iLBCdec_inst->mode==30) { - plc_blockl=ENH_BLOCKL; - new_blocks=3; - startPos=320; /* Start position for enhancement - (640-new_blocks*ENH_BLOCKL-80) */ - } else { - plc_blockl=40; - new_blocks=2; - startPos=440; /* Start position for enhancement - (640-new_blocks*ENH_BLOCKL-40) */ - } - - /* Update the pitch prediction for each enhancer block, move the old ones */ - memmove(enh_period, &enh_period[new_blocks], - (ENH_NBLOCKS_TOT - new_blocks) * sizeof(*enh_period)); - - WebRtcSpl_DownsampleFast( - enh_buf+ENH_BUFL-inLen, /* Input samples */ - inLen + ENH_BUFL_FILTEROVERHEAD, - downsampled, - inLen / 2, - (int16_t*)WebRtcIlbcfix_kLpFiltCoefs, /* Coefficients in Q12 */ - FILTERORDER_DS_PLUS1, /* Length of filter (order-1) */ - FACTOR_DS, - DELAY_DS); - - /* Estimate the pitch in the down sampled domain. */ - for(iblock = 0; iblock> 16); - en16[i] = (int16_t)WEBRTC_SPL_SHIFT_W32(ener, enerSh); - totsh[i] = enerSh - 2 * corrSh; - } - - /* Compare lagmax[0..3] for the (corr^2)/ener criteria */ - ind = 0; - for (i=1; i<3; i++) { - if (totsh[ind] > totsh[i]) { - sh = WEBRTC_SPL_MIN(31, totsh[ind]-totsh[i]); - if (corr16[ind] * en16[i] < (corr16[i] * en16[ind]) >> sh) { - ind = i; - } - } else { - sh = WEBRTC_SPL_MIN(31, totsh[i]-totsh[ind]); - if ((corr16[ind] * en16[i]) >> sh < corr16[i] * en16[ind]) { - ind = i; - } - } - } - - lag = lagmax[ind] + 10; - - /* Store the estimated lag in the non-downsampled domain */ - enh_period[ENH_NBLOCKS_TOT - new_blocks + iblock] = lag * 8; - - /* Store the estimated lag for backward PLC */ - if (iLBCdec_inst->prev_enh_pl==1) { - if (!iblock) { - tlag = lag * 2; - } - } else { - if (iblock==1) { - tlag = lag * 2; - } - } - - lag *= 2; - } - - if ((iLBCdec_inst->prev_enh_pl==1)||(iLBCdec_inst->prev_enh_pl==2)) { - - /* Calculate the best lag of the new frame - This is used to interpolate backwards and mix with the PLC'd data - */ - - /* references */ - target=in; - regressor=in+tlag-1; - - /* scaling */ - // Note that this is not abs-max, so we will take the absolute value below. - max16 = WebRtcSpl_MaxAbsElementW16(regressor, plc_blockl + 3 - 1); - const int16_t max_target = - WebRtcSpl_MaxAbsElementW16(target, plc_blockl + 3 - 1); - const int64_t max_val = plc_blockl * abs(max16 * max_target); - const int32_t factor = max_val >> 31; - shifts = factor == 0 ? 0 : 31 - WebRtcSpl_NormW32(factor); - - /* compute cross correlation */ - WebRtcSpl_CrossCorrelation(corr32, target, regressor, plc_blockl, 3, shifts, - 1); - - /* find lag */ - lag=WebRtcSpl_MaxIndexW32(corr32, 3); - lag+=tlag-1; - - /* Copy the backward PLC to plc_pred */ - - if (iLBCdec_inst->prev_enh_pl==1) { - if (lag>plc_blockl) { - WEBRTC_SPL_MEMCPY_W16(plc_pred, &in[lag-plc_blockl], plc_blockl); - } else { - WEBRTC_SPL_MEMCPY_W16(&plc_pred[plc_blockl-lag], in, lag); - WEBRTC_SPL_MEMCPY_W16( - plc_pred, &enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl+lag], - (plc_blockl-lag)); - } - } else { - size_t pos; - - pos = plc_blockl; - - while (lagprev_enh_pl==1) { - /* limit energy change - if energy in backward PLC is more than 4 times higher than the forward - PLC, then reduce the energy in the backward PLC vector: - sample 1...len-16 set energy of the to 4 times forward PLC - sample len-15..len interpolate between 4 times fw PLC and bw PLC energy - - Note: Compared to floating point code there is a slight change, - the window is 16 samples long instead of 10 samples to simplify the - calculations - */ - - max=WebRtcSpl_MaxAbsValueW16( - &enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl], plc_blockl); - max16=WebRtcSpl_MaxAbsValueW16(plc_pred, plc_blockl); - max = WEBRTC_SPL_MAX(max, max16); - scale=22-(int16_t)WebRtcSpl_NormW32(max); - scale=WEBRTC_SPL_MAX(scale,0); - - tmp2 = WebRtcSpl_DotProductWithScale( - &enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl], - &enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl], - plc_blockl, scale); - tmp1 = WebRtcSpl_DotProductWithScale(plc_pred, plc_pred, - plc_blockl, scale); - - /* Check the energy difference */ - if ((tmp1>0)&&((tmp1>>2)>tmp2)) { - /* EnChange is now guaranteed to be <0.5 - Calculate EnChange=tmp2/tmp1 in Q16 - */ - - scale1=(int16_t)WebRtcSpl_NormW32(tmp1); - tmp1=WEBRTC_SPL_SHIFT_W32(tmp1, (scale1-16)); /* using 15 bits */ - - tmp2=WEBRTC_SPL_SHIFT_W32(tmp2, (scale1)); - EnChange = (int16_t)WebRtcSpl_DivW32W16(tmp2, - (int16_t)tmp1); - - /* Calculate the Sqrt of the energy in Q15 ((14+16)/2) */ - SqrtEnChange = (int16_t)WebRtcSpl_SqrtFloor(EnChange << 14); - - - /* Multiply first part of vector with 2*SqrtEnChange */ - WebRtcSpl_ScaleVector(plc_pred, plc_pred, SqrtEnChange, plc_blockl-16, - 14); - - /* Calculate increase parameter for window part (16 last samples) */ - /* (1-2*SqrtEnChange)/16 in Q15 */ - inc = 2048 - (SqrtEnChange >> 3); - - win=0; - tmpW16ptr=&plc_pred[plc_blockl-16]; - - for (i=16;i>0;i--) { - *tmpW16ptr = (int16_t)( - (*tmpW16ptr * (SqrtEnChange + (win >> 1))) >> 14); - /* multiply by (2.0*SqrtEnChange+win) */ - - win += inc; - tmpW16ptr++; - } - } - - /* Make the linear interpolation between the forward PLC'd data - and the backward PLC'd data (from the new frame) - */ - - if (plc_blockl==40) { - inc=400; /* 1/41 in Q14 */ - } else { /* plc_blockl==80 */ - inc=202; /* 1/81 in Q14 */ - } - win=0; - enh_bufPtr1=&enh_buf[ENH_BUFL-1-iLBCdec_inst->blockl]; - for (i=0; i> 14); - *enh_bufPtr1 += (int16_t)( - ((16384 - win) * plc_pred[plc_blockl - 1 - i]) >> 14); - enh_bufPtr1--; - } - } else { - int16_t *synt = &downsampled[LPC_FILTERORDER]; - - enh_bufPtr1=&enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl]; - WEBRTC_SPL_MEMCPY_W16(enh_bufPtr1, plc_pred, plc_blockl); - - /* Clear fileter memory */ - WebRtcSpl_MemSetW16(iLBCdec_inst->syntMem, 0, LPC_FILTERORDER); - WebRtcSpl_MemSetW16(iLBCdec_inst->hpimemy, 0, 4); - WebRtcSpl_MemSetW16(iLBCdec_inst->hpimemx, 0, 2); - - /* Initialize filter memory by filtering through 2 lags */ - WEBRTC_SPL_MEMCPY_W16(&synt[-LPC_FILTERORDER], iLBCdec_inst->syntMem, - LPC_FILTERORDER); - WebRtcSpl_FilterARFastQ12( - enh_bufPtr1, - synt, - &iLBCdec_inst->old_syntdenum[ - (iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1)], - LPC_FILTERORDER+1, lag); - - WEBRTC_SPL_MEMCPY_W16(&synt[-LPC_FILTERORDER], &synt[lag-LPC_FILTERORDER], - LPC_FILTERORDER); - WebRtcIlbcfix_HpOutput(synt, (int16_t*)WebRtcIlbcfix_kHpOutCoefs, - iLBCdec_inst->hpimemy, iLBCdec_inst->hpimemx, - lag); - WebRtcSpl_FilterARFastQ12( - enh_bufPtr1, synt, - &iLBCdec_inst->old_syntdenum[ - (iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1)], - LPC_FILTERORDER+1, lag); - - WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->syntMem, &synt[lag-LPC_FILTERORDER], - LPC_FILTERORDER); - WebRtcIlbcfix_HpOutput(synt, (int16_t*)WebRtcIlbcfix_kHpOutCoefs, - iLBCdec_inst->hpimemy, iLBCdec_inst->hpimemx, - lag); - } - } - - - /* Perform enhancement block by block */ - - for (iblock = 0; iblock -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * interface for enhancer - *---------------------------------------------------------------*/ - -size_t // (o) Estimated lag in end of in[] -WebRtcIlbcfix_EnhancerInterface(int16_t* out, // (o) enhanced signal - const int16_t* in, // (i) unenhanced signal - IlbcDecoder* iLBCdec_inst); // (i) buffers etc - -#endif diff --git a/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c b/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c deleted file mode 100644 index 6b4f30c96b..0000000000 --- a/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_FilteredCbVecs.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * Construct an additional codebook vector by filtering the - * initial codebook buffer. This vector is then used to expand - * the codebook with an additional section. - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_FilteredCbVecs( - int16_t *cbvectors, /* (o) Codebook vector for the higher section */ - int16_t *CBmem, /* (i) Codebook memory that is filtered to create a - second CB section */ - size_t lMem, /* (i) Length of codebook memory */ - size_t samples /* (i) Number of samples to filter */ - ) { - - /* Set up the memory, start with zero state */ - WebRtcSpl_MemSetW16(CBmem+lMem, 0, CB_HALFFILTERLEN); - WebRtcSpl_MemSetW16(CBmem-CB_HALFFILTERLEN, 0, CB_HALFFILTERLEN); - WebRtcSpl_MemSetW16(cbvectors, 0, lMem-samples); - - /* Filter to obtain the filtered CB memory */ - - WebRtcSpl_FilterMAFastQ12( - CBmem+CB_HALFFILTERLEN+lMem-samples, cbvectors+lMem-samples, - (int16_t*)WebRtcIlbcfix_kCbFiltersRev, CB_FILTERLEN, samples); - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h b/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h deleted file mode 100644 index d0f5f1a4ed..0000000000 --- a/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_FilteredCbVecs.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FILTERED_CB_VECS_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FILTERED_CB_VECS_H_ - -#include -#include - -/*----------------------------------------------------------------* - * Construct an additional codebook vector by filtering the - * initial codebook buffer. This vector is then used to expand - * the codebook with an additional section. - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_FilteredCbVecs( - int16_t* cbvectors, /* (o) Codebook vector for the higher section */ - int16_t* CBmem, /* (i) Codebook memory that is filtered to create a - second CB section */ - size_t lMem, /* (i) Length of codebook memory */ - size_t samples /* (i) Number of samples to filter */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/frame_classify.c b/modules/audio_coding/codecs/ilbc/frame_classify.c deleted file mode 100644 index c1084b1645..0000000000 --- a/modules/audio_coding/codecs/ilbc/frame_classify.c +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_FrameClassify.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/frame_classify.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * Classification of subframes to localize start state - *---------------------------------------------------------------*/ - -size_t WebRtcIlbcfix_FrameClassify( - /* (o) Index to the max-energy sub frame */ - IlbcEncoder *iLBCenc_inst, - /* (i/o) the encoder state structure */ - int16_t *residualFIX /* (i) lpc residual signal */ - ){ - int16_t max, scale; - int32_t ssqEn[NSUB_MAX-1]; - int16_t *ssqPtr; - int32_t *seqEnPtr; - int32_t maxW32; - int16_t scale1; - size_t pos; - size_t n; - - /* - Calculate the energy of each of the 80 sample blocks - in the draft the 4 first and last samples are windowed with 1/5...4/5 - and 4/5...1/5 respectively. To simplify for the fixpoint we have changed - this to 0 0 1 1 and 1 1 0 0 - */ - - max = WebRtcSpl_MaxAbsValueW16(residualFIX, iLBCenc_inst->blockl); - scale = WebRtcSpl_GetSizeInBits((uint32_t)(max * max)); - - /* Scale to maximum 24 bits so that it won't overflow for 76 samples */ - scale = scale-24; - scale1 = WEBRTC_SPL_MAX(0, scale); - - /* Calculate energies */ - ssqPtr=residualFIX + 2; - seqEnPtr=ssqEn; - for (n=(iLBCenc_inst->nsub-1); n>0; n--) { - (*seqEnPtr) = WebRtcSpl_DotProductWithScale(ssqPtr, ssqPtr, 76, scale1); - ssqPtr += 40; - seqEnPtr++; - } - - /* Scale to maximum 20 bits in order to allow for the 11 bit window */ - maxW32 = WebRtcSpl_MaxValueW32(ssqEn, iLBCenc_inst->nsub - 1); - scale = WebRtcSpl_GetSizeInBits(maxW32) - 20; - scale1 = WEBRTC_SPL_MAX(0, scale); - - /* Window each 80 block with the ssqEn_winTbl window to give higher probability for - the blocks in the middle - */ - seqEnPtr=ssqEn; - if (iLBCenc_inst->mode==20) { - ssqPtr=(int16_t*)WebRtcIlbcfix_kStartSequenceEnrgWin+1; - } else { - ssqPtr=(int16_t*)WebRtcIlbcfix_kStartSequenceEnrgWin; - } - for (n=(iLBCenc_inst->nsub-1); n>0; n--) { - (*seqEnPtr)=WEBRTC_SPL_MUL(((*seqEnPtr)>>scale1), (*ssqPtr)); - seqEnPtr++; - ssqPtr++; - } - - /* Extract the best choise of start state */ - pos = WebRtcSpl_MaxIndexW32(ssqEn, iLBCenc_inst->nsub - 1) + 1; - - return(pos); -} diff --git a/modules/audio_coding/codecs/ilbc/frame_classify.h b/modules/audio_coding/codecs/ilbc/frame_classify.h deleted file mode 100644 index dee67cc5f9..0000000000 --- a/modules/audio_coding/codecs/ilbc/frame_classify.h +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_FrameClassify.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FRAME_CLASSIFY_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FRAME_CLASSIFY_H_ - -#include -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -size_t WebRtcIlbcfix_FrameClassify( - /* (o) Index to the max-energy sub frame */ - IlbcEncoder* iLBCenc_inst, - /* (i/o) the encoder state structure */ - int16_t* residualFIX /* (i) lpc residual signal */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/gain_dequant.c b/modules/audio_coding/codecs/ilbc/gain_dequant.c deleted file mode 100644 index 1357dece33..0000000000 --- a/modules/audio_coding/codecs/ilbc/gain_dequant.c +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_GainDequant.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/gain_dequant.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * decoder for quantized gains in the gain-shape coding of - * residual - *---------------------------------------------------------------*/ - -int16_t WebRtcIlbcfix_GainDequant( - /* (o) quantized gain value (Q14) */ - int16_t index, /* (i) quantization index */ - int16_t maxIn, /* (i) maximum of unquantized gain (Q14) */ - int16_t stage /* (i) The stage of the search */ - ){ - int16_t scale; - const int16_t *gain; - - /* obtain correct scale factor */ - - scale=WEBRTC_SPL_ABS_W16(maxIn); - scale = WEBRTC_SPL_MAX(1638, scale); /* if lower than 0.1, set it to 0.1 */ - - /* select the quantization table and return the decoded value */ - gain = WebRtcIlbcfix_kGain[stage]; - - return (int16_t)((scale * gain[index] + 8192) >> 14); -} diff --git a/modules/audio_coding/codecs/ilbc/gain_dequant.h b/modules/audio_coding/codecs/ilbc/gain_dequant.h deleted file mode 100644 index b5e6cef97b..0000000000 --- a/modules/audio_coding/codecs/ilbc/gain_dequant.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_GainDequant.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_DEQUANT_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_DEQUANT_H_ - -#include - -/*----------------------------------------------------------------* - * decoder for quantized gains in the gain-shape coding of - * residual - *---------------------------------------------------------------*/ - -int16_t WebRtcIlbcfix_GainDequant( - /* (o) quantized gain value (Q14) */ - int16_t index, /* (i) quantization index */ - int16_t maxIn, /* (i) maximum of unquantized gain (Q14) */ - int16_t stage /* (i) The stage of the search */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/gain_quant.c b/modules/audio_coding/codecs/ilbc/gain_quant.c deleted file mode 100644 index 9a6d49d51a..0000000000 --- a/modules/audio_coding/codecs/ilbc/gain_quant.c +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_GainQuant.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/gain_quant.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * quantizer for the gain in the gain-shape coding of residual - *---------------------------------------------------------------*/ - -int16_t WebRtcIlbcfix_GainQuant( /* (o) quantized gain value */ - int16_t gain, /* (i) gain value Q14 */ - int16_t maxIn, /* (i) maximum of gain value Q14 */ - int16_t stage, /* (i) The stage of the search */ - int16_t *index /* (o) quantization index */ - ) { - - int16_t scale, cblen; - int32_t gainW32, measure1, measure2; - const int16_t *cbPtr, *cb; - int loc, noMoves, noChecks, i; - - /* ensure a lower bound (0.1) on the scaling factor */ - - scale = WEBRTC_SPL_MAX(1638, maxIn); - - /* select the quantization table and calculate - the length of the table and the number of - steps in the binary search that are needed */ - cb = WebRtcIlbcfix_kGain[stage]; - cblen = 32>>stage; - noChecks = 4-stage; - - /* Multiply the gain with 2^14 to make the comparison - easier and with higher precision */ - gainW32 = gain << 14; - - /* Do a binary search, starting in the middle of the CB - loc - defines the current position in the table - noMoves - defines the number of steps to move in the CB in order - to get next CB location - */ - - loc = cblen>>1; - noMoves = loc; - cbPtr = cb + loc; /* Centre of CB */ - - for (i=noChecks;i>0;i--) { - noMoves>>=1; - measure1 = scale * *cbPtr; - - /* Move up if gain is larger, otherwise move down in table */ - measure1 = measure1 - gainW32; - - if (0>measure1) { - cbPtr+=noMoves; - loc+=noMoves; - } else { - cbPtr-=noMoves; - loc-=noMoves; - } - } - - /* Check which value is the closest one: loc-1, loc or loc+1 */ - - measure1 = scale * *cbPtr; - if (gainW32>measure1) { - /* Check against value above loc */ - measure2 = scale * cbPtr[1]; - if ((measure2-gainW32)<(gainW32-measure1)) { - loc+=1; - } - } else { - /* Check against value below loc */ - measure2 = scale * cbPtr[-1]; - if ((gainW32-measure2)<=(measure1-gainW32)) { - loc-=1; - } - } - - /* Guard against getting outside the table. The calculation above can give a location - which is one above the maximum value (in very rare cases) */ - loc=WEBRTC_SPL_MIN(loc, (cblen-1)); - *index=loc; - - /* Calculate and return the quantized gain value (in Q14) */ - return (int16_t)((scale * cb[loc] + 8192) >> 14); -} diff --git a/modules/audio_coding/codecs/ilbc/gain_quant.h b/modules/audio_coding/codecs/ilbc/gain_quant.h deleted file mode 100644 index fab9718a75..0000000000 --- a/modules/audio_coding/codecs/ilbc/gain_quant.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_GainQuant.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_QUANT_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_QUANT_H_ - -#include - -/*----------------------------------------------------------------* - * quantizer for the gain in the gain-shape coding of residual - *---------------------------------------------------------------*/ - -int16_t -WebRtcIlbcfix_GainQuant( /* (o) quantized gain value */ - int16_t gain, /* (i) gain value Q14 */ - int16_t maxIn, /* (i) maximum of gain value Q14 */ - int16_t stage, /* (i) The stage of the search */ - int16_t* index /* (o) quantization index */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/get_cd_vec.c b/modules/audio_coding/codecs/ilbc/get_cd_vec.c deleted file mode 100644 index e9cd2008e0..0000000000 --- a/modules/audio_coding/codecs/ilbc/get_cd_vec.c +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_GetCbVec.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/get_cd_vec.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/create_augmented_vec.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * Construct codebook vector for given index. - *---------------------------------------------------------------*/ - -bool WebRtcIlbcfix_GetCbVec( - int16_t *cbvec, /* (o) Constructed codebook vector */ - int16_t *mem, /* (i) Codebook buffer */ - size_t index, /* (i) Codebook index */ - size_t lMem, /* (i) Length of codebook buffer */ - size_t cbveclen /* (i) Codebook vector length */ - ){ - size_t k, base_size; - size_t lag; - /* Stack based */ - int16_t tempbuff2[SUBL+5]; - - /* Determine size of codebook sections */ - - base_size=lMem-cbveclen+1; - - if (cbveclen==SUBL) { - base_size += cbveclen / 2; - } - - /* No filter -> First codebook section */ - - if (index -#include -#include - -#include "absl/base/attributes.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -// Returns true on success, false on failure. In case of failure, the decoder -// state may be corrupted and needs resetting. -ABSL_MUST_USE_RESULT -bool WebRtcIlbcfix_GetCbVec( - int16_t* cbvec, /* (o) Constructed codebook vector */ - int16_t* mem, /* (i) Codebook buffer */ - size_t index, /* (i) Codebook index */ - size_t lMem, /* (i) Length of codebook buffer */ - size_t cbveclen /* (i) Codebook vector length */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/get_lsp_poly.c b/modules/audio_coding/codecs/ilbc/get_lsp_poly.c deleted file mode 100644 index e0fb21caf0..0000000000 --- a/modules/audio_coding/codecs/ilbc/get_lsp_poly.c +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_GetLspPoly.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/get_lsp_poly.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * Construct the polynomials F1(z) and F2(z) from the LSP - * (Computations are done in Q24) - * - * The expansion is performed using the following recursion: - * - * f[0] = 1; - * tmp = -2.0 * lsp[0]; - * f[1] = tmp; - * for (i=2; i<=5; i++) { - * b = -2.0 * lsp[2*i-2]; - * f[i] = tmp*f[i-1] + 2.0*f[i-2]; - * for (j=i; j>=2; j--) { - * f[j] = f[j] + tmp*f[j-1] + f[j-2]; - * } - * f[i] = f[i] + tmp; - * } - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_GetLspPoly( - int16_t *lsp, /* (i) LSP in Q15 */ - int32_t *f) /* (o) polonymial in Q24 */ -{ - int32_t tmpW32; - int i, j; - int16_t high, low; - int16_t *lspPtr; - int32_t *fPtr; - - lspPtr = lsp; - fPtr = f; - /* f[0] = 1.0 (Q24) */ - (*fPtr) = (int32_t)16777216; - fPtr++; - - (*fPtr) = WEBRTC_SPL_MUL((*lspPtr), -1024); - fPtr++; - lspPtr+=2; - - for(i=2; i<=5; i++) - { - (*fPtr) = fPtr[-2]; - - for(j=i; j>1; j--) - { - /* Compute f[j] = f[j] + tmp*f[j-1] + f[j-2]; */ - high = (int16_t)(fPtr[-1] >> 16); - low = (int16_t)((fPtr[-1] & 0xffff) >> 1); - - tmpW32 = 4 * high * *lspPtr + 4 * ((low * *lspPtr) >> 15); - - (*fPtr) += fPtr[-2]; - (*fPtr) -= tmpW32; - fPtr--; - } - *fPtr -= *lspPtr * (1 << 10); - - fPtr+=i; - lspPtr+=2; - } - return; -} diff --git a/modules/audio_coding/codecs/ilbc/get_lsp_poly.h b/modules/audio_coding/codecs/ilbc/get_lsp_poly.h deleted file mode 100644 index 70c9c4d4b4..0000000000 --- a/modules/audio_coding/codecs/ilbc/get_lsp_poly.h +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_GetLspPoly.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_LSP_POLY_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_LSP_POLY_H_ - -#include - -/*----------------------------------------------------------------* - * Construct the polynomials F1(z) and F2(z) from the LSP - * (Computations are done in Q24) - * - * The expansion is performed using the following recursion: - * - * f[0] = 1; - * tmp = -2.0 * lsp[0]; - * f[1] = tmp; - * for (i=2; i<=5; i++) { - * b = -2.0 * lsp[2*i-2]; - * f[i] = tmp*f[i-1] + 2.0*f[i-2]; - * for (j=i; j>=2; j--) { - * f[j] = f[j] + tmp*f[j-1] + f[j-2]; - * } - * f[i] = f[i] + tmp; - * } - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_GetLspPoly(int16_t* lsp, /* (i) LSP in Q15 */ - int32_t* f); /* (o) polonymial in Q24 */ - -#endif diff --git a/modules/audio_coding/codecs/ilbc/get_sync_seq.c b/modules/audio_coding/codecs/ilbc/get_sync_seq.c deleted file mode 100644 index 68a569a40a..0000000000 --- a/modules/audio_coding/codecs/ilbc/get_sync_seq.c +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_GetSyncSeq.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/get_sync_seq.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/nearest_neighbor.h" -#include "modules/audio_coding/codecs/ilbc/refiner.h" - -/*----------------------------------------------------------------* - * get the pitch-synchronous sample sequence - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_GetSyncSeq( - int16_t *idata, /* (i) original data */ - size_t idatal, /* (i) dimension of data */ - size_t centerStartPos, /* (i) where current block starts */ - size_t *period, /* (i) rough-pitch-period array (Q-2) */ - const size_t *plocs, /* (i) where periods of period array are taken (Q-2) */ - size_t periodl, /* (i) dimension period array */ - size_t hl, /* (i) 2*hl+1 is the number of sequences */ - int16_t *surround /* (i/o) The contribution from this sequence - summed with earlier contributions */ - ){ - size_t i, centerEndPos, q; - /* Stack based */ - size_t lagBlock[2 * ENH_HL + 1]; - size_t blockStartPos[2 * ENH_HL + 1]; /* The position to search around (Q2) */ - size_t plocs2[ENH_PLOCSL]; - - centerEndPos = centerStartPos + ENH_BLOCKL - 1; - - /* present (find predicted lag from this position) */ - - WebRtcIlbcfix_NearestNeighbor(lagBlock + hl, - plocs, - 2 * (centerStartPos + centerEndPos), - periodl); - - blockStartPos[hl] = 4 * centerStartPos; - - /* past (find predicted position and perform a refined - search to find the best sequence) */ - - for (q = hl; q > 0; q--) { - size_t qq = q - 1; - size_t period_q = period[lagBlock[q]]; - /* Stop if this sequence would be outside the buffer; that means all - further-past sequences would also be outside the buffer. */ - if (blockStartPos[q] < period_q + (4 * ENH_OVERHANG)) - break; - blockStartPos[qq] = blockStartPos[q] - period_q; - - size_t value = blockStartPos[qq] + 4 * ENH_BLOCKL_HALF; - value = (value > period_q) ? (value - period_q) : 0; - WebRtcIlbcfix_NearestNeighbor(lagBlock + qq, plocs, value, periodl); - - /* Find the best possible sequence in the 4 times upsampled - domain around blockStartPos+q */ - WebRtcIlbcfix_Refiner(blockStartPos + qq, idata, idatal, centerStartPos, - blockStartPos[qq], surround, - WebRtcIlbcfix_kEnhWt[qq]); - } - - /* future (find predicted position and perform a refined - search to find the best sequence) */ - - for (i = 0; i < periodl; i++) { - plocs2[i] = plocs[i] - period[i]; - } - - for (q = hl + 1; q <= (2 * hl); q++) { - - WebRtcIlbcfix_NearestNeighbor( - lagBlock + q, - plocs2, - blockStartPos[q - 1] + 4 * ENH_BLOCKL_HALF, - periodl); - - blockStartPos[q]=blockStartPos[q-1]+period[lagBlock[q]]; - - if (blockStartPos[q] + 4 * (ENH_BLOCKL + ENH_OVERHANG) < 4 * idatal) { - - /* Find the best possible sequence in the 4 times upsampled - domain around blockStartPos+q */ - WebRtcIlbcfix_Refiner(blockStartPos + q, idata, idatal, centerStartPos, - blockStartPos[q], surround, - WebRtcIlbcfix_kEnhWt[2 * hl - q]); - - } else { - /* Don't add anything since this sequence would - be outside the buffer */ - } - } -} diff --git a/modules/audio_coding/codecs/ilbc/get_sync_seq.h b/modules/audio_coding/codecs/ilbc/get_sync_seq.h deleted file mode 100644 index 87030e568f..0000000000 --- a/modules/audio_coding/codecs/ilbc/get_sync_seq.h +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_GetSyncSeq.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_SYNC_SEQ_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_SYNC_SEQ_H_ - -#include -#include - -/*----------------------------------------------------------------* - * get the pitch-synchronous sample sequence - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_GetSyncSeq( - int16_t* idata, /* (i) original data */ - size_t idatal, /* (i) dimension of data */ - size_t centerStartPos, /* (i) where current block starts */ - size_t* period, /* (i) rough-pitch-period array (Q-2) */ - const size_t* plocs, /* (i) where periods of period array are taken (Q-2) */ - size_t periodl, /* (i) dimension period array */ - size_t hl, /* (i) 2*hl+1 is the number of sequences */ - int16_t* surround /* (i/o) The contribution from this sequence - summed with earlier contributions */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/hp_input.c b/modules/audio_coding/codecs/ilbc/hp_input.c deleted file mode 100644 index be582f2e23..0000000000 --- a/modules/audio_coding/codecs/ilbc/hp_input.c +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_HpInput.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/hp_input.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * high-pass filter of input with *0.5 and saturation - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_HpInput( - int16_t *signal, /* (i/o) signal vector */ - int16_t *ba, /* (i) B- and A-coefficients (2:nd order) - {b[0] b[1] b[2] -a[1] -a[2]} a[0] - is assumed to be 1.0 */ - int16_t *y, /* (i/o) Filter state yhi[n-1] ylow[n-1] - yhi[n-2] ylow[n-2] */ - int16_t *x, /* (i/o) Filter state x[n-1] x[n-2] */ - size_t len) /* (i) Number of samples to filter */ -{ - size_t i; - int32_t tmpW32; - int32_t tmpW32b; - - for (i=0; i>15); - tmpW32 += y[0] * ba[3]; /* (-a[1])*y[i-1] (high part) */ - tmpW32 += y[2] * ba[4]; /* (-a[2])*y[i-2] (high part) */ - tmpW32 = (tmpW32<<1); - - tmpW32 += signal[i] * ba[0]; /* b[0]*x[0] */ - tmpW32 += x[0] * ba[1]; /* b[1]*x[i-1] */ - tmpW32 += x[1] * ba[2]; /* b[2]*x[i-2] */ - - /* Update state (input part) */ - x[1] = x[0]; - x[0] = signal[i]; - - /* Rounding in Q(12+1), i.e. add 2^12 */ - tmpW32b = tmpW32 + 4096; - - /* Saturate (to 2^28) so that the HP filtered signal does not overflow */ - tmpW32b = WEBRTC_SPL_SAT((int32_t)268435455, tmpW32b, (int32_t)-268435456); - - /* Convert back to Q0 and multiply with 0.5 */ - signal[i] = (int16_t)(tmpW32b >> 13); - - /* Update state (filtered part) */ - y[2] = y[0]; - y[3] = y[1]; - - /* upshift tmpW32 by 3 with saturation */ - if (tmpW32>268435455) { - tmpW32 = WEBRTC_SPL_WORD32_MAX; - } else if (tmpW32<-268435456) { - tmpW32 = WEBRTC_SPL_WORD32_MIN; - } else { - tmpW32 <<= 3; - } - - y[0] = (int16_t)(tmpW32 >> 16); - y[1] = (int16_t)((tmpW32 - (y[0] << 16)) >> 1); - } - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/hp_input.h b/modules/audio_coding/codecs/ilbc/hp_input.h deleted file mode 100644 index 9143d8efed..0000000000 --- a/modules/audio_coding/codecs/ilbc/hp_input.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_HpInput.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_INPUT_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_INPUT_H_ - -#include -#include - -// clang-format off -// Bad job here. https://bugs.llvm.org/show_bug.cgi?id=34274 -void WebRtcIlbcfix_HpInput( - int16_t* signal, /* (i/o) signal vector */ - int16_t* ba, /* (i) B- and A-coefficients (2:nd order) - {b[0] b[1] b[2] -a[1] -a[2]} - a[0] is assumed to be 1.0 */ - int16_t* y, /* (i/o) Filter state yhi[n-1] ylow[n-1] - yhi[n-2] ylow[n-2] */ - int16_t* x, /* (i/o) Filter state x[n-1] x[n-2] */ - size_t len); /* (i) Number of samples to filter */ -// clang-format on - -#endif diff --git a/modules/audio_coding/codecs/ilbc/hp_output.c b/modules/audio_coding/codecs/ilbc/hp_output.c deleted file mode 100644 index cc5f6dcd37..0000000000 --- a/modules/audio_coding/codecs/ilbc/hp_output.c +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_HpOutput.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/hp_output.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * high-pass filter of output and *2 with saturation - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_HpOutput( - int16_t *signal, /* (i/o) signal vector */ - int16_t *ba, /* (i) B- and A-coefficients (2:nd order) - {b[0] b[1] b[2] -a[1] -a[2]} a[0] - is assumed to be 1.0 */ - int16_t *y, /* (i/o) Filter state yhi[n-1] ylow[n-1] - yhi[n-2] ylow[n-2] */ - int16_t *x, /* (i/o) Filter state x[n-1] x[n-2] */ - size_t len) /* (i) Number of samples to filter */ -{ - size_t i; - int32_t tmpW32; - int32_t tmpW32b; - - for (i=0; i>15); - tmpW32 += y[0] * ba[3]; /* (-a[1])*y[i-1] (high part) */ - tmpW32 += y[2] * ba[4]; /* (-a[2])*y[i-2] (high part) */ - tmpW32 *= 2; - - tmpW32 += signal[i] * ba[0]; /* b[0]*x[0] */ - tmpW32 += x[0] * ba[1]; /* b[1]*x[i-1] */ - tmpW32 += x[1] * ba[2]; /* b[2]*x[i-2] */ - - /* Update state (input part) */ - x[1] = x[0]; - x[0] = signal[i]; - - /* Rounding in Q(12-1), i.e. add 2^10 */ - tmpW32b = tmpW32 + 1024; - - /* Saturate (to 2^26) so that the HP filtered signal does not overflow */ - tmpW32b = WEBRTC_SPL_SAT((int32_t)67108863, tmpW32b, (int32_t)-67108864); - - /* Convert back to Q0 and multiply with 2 */ - signal[i] = (int16_t)(tmpW32b >> 11); - - /* Update state (filtered part) */ - y[2] = y[0]; - y[3] = y[1]; - - /* upshift tmpW32 by 3 with saturation */ - if (tmpW32>268435455) { - tmpW32 = WEBRTC_SPL_WORD32_MAX; - } else if (tmpW32<-268435456) { - tmpW32 = WEBRTC_SPL_WORD32_MIN; - } else { - tmpW32 *= 8; - } - - y[0] = (int16_t)(tmpW32 >> 16); - y[1] = (int16_t)((tmpW32 & 0xffff) >> 1); - - } - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/hp_output.h b/modules/audio_coding/codecs/ilbc/hp_output.h deleted file mode 100644 index 6d1bd3cd88..0000000000 --- a/modules/audio_coding/codecs/ilbc/hp_output.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_HpOutput.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_OUTPUT_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_OUTPUT_H_ - -#include -#include - -// clang-format off -// Bad job here. https://bugs.llvm.org/show_bug.cgi?id=34274 -void WebRtcIlbcfix_HpOutput( - int16_t* signal, /* (i/o) signal vector */ - int16_t* ba, /* (i) B- and A-coefficients (2:nd order) - {b[0] b[1] b[2] -a[1] -a[2]} a[0] - is assumed to be 1.0 */ - int16_t* y, /* (i/o) Filter state yhi[n-1] ylow[n-1] - yhi[n-2] ylow[n-2] */ - int16_t* x, /* (i/o) Filter state x[n-1] x[n-2] */ - size_t len); /* (i) Number of samples to filter */ -// clang-format on - -#endif diff --git a/modules/audio_coding/codecs/ilbc/ilbc.c b/modules/audio_coding/codecs/ilbc/ilbc.c deleted file mode 100644 index ba6c3e46c3..0000000000 --- a/modules/audio_coding/codecs/ilbc/ilbc.c +++ /dev/null @@ -1,288 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - iLBCInterface.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/ilbc.h" - -#include - -#include "modules/audio_coding/codecs/ilbc/decode.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/encode.h" -#include "modules/audio_coding/codecs/ilbc/init_decode.h" -#include "modules/audio_coding/codecs/ilbc/init_encode.h" -#include "rtc_base/checks.h" - -int16_t WebRtcIlbcfix_EncoderAssign(IlbcEncoderInstance** iLBC_encinst, - int16_t* ILBCENC_inst_Addr, - int16_t* size) { - *iLBC_encinst=(IlbcEncoderInstance*)ILBCENC_inst_Addr; - *size=sizeof(IlbcEncoder)/sizeof(int16_t); - if (*iLBC_encinst!=NULL) { - return(0); - } else { - return(-1); - } -} - -int16_t WebRtcIlbcfix_DecoderAssign(IlbcDecoderInstance** iLBC_decinst, - int16_t* ILBCDEC_inst_Addr, - int16_t* size) { - *iLBC_decinst=(IlbcDecoderInstance*)ILBCDEC_inst_Addr; - *size=sizeof(IlbcDecoder)/sizeof(int16_t); - if (*iLBC_decinst!=NULL) { - return(0); - } else { - return(-1); - } -} - -int16_t WebRtcIlbcfix_EncoderCreate(IlbcEncoderInstance **iLBC_encinst) { - *iLBC_encinst=(IlbcEncoderInstance*)malloc(sizeof(IlbcEncoder)); - if (*iLBC_encinst!=NULL) { - return(0); - } else { - return(-1); - } -} - -int16_t WebRtcIlbcfix_DecoderCreate(IlbcDecoderInstance **iLBC_decinst) { - *iLBC_decinst=(IlbcDecoderInstance*)malloc(sizeof(IlbcDecoder)); - if (*iLBC_decinst!=NULL) { - return(0); - } else { - return(-1); - } -} - -int16_t WebRtcIlbcfix_EncoderFree(IlbcEncoderInstance *iLBC_encinst) { - free(iLBC_encinst); - return(0); -} - -int16_t WebRtcIlbcfix_DecoderFree(IlbcDecoderInstance *iLBC_decinst) { - free(iLBC_decinst); - return(0); -} - -int16_t WebRtcIlbcfix_EncoderInit(IlbcEncoderInstance* iLBCenc_inst, - int16_t mode) { - if ((mode==20)||(mode==30)) { - WebRtcIlbcfix_InitEncode((IlbcEncoder*) iLBCenc_inst, mode); - return(0); - } else { - return(-1); - } -} - -int WebRtcIlbcfix_Encode(IlbcEncoderInstance* iLBCenc_inst, - const int16_t* speechIn, - size_t len, - uint8_t* encoded) { - size_t pos = 0; - size_t encpos = 0; - - if ((len != ((IlbcEncoder*)iLBCenc_inst)->blockl) && -#ifdef SPLIT_10MS - (len != 80) && -#endif - (len != 2*((IlbcEncoder*)iLBCenc_inst)->blockl) && - (len != 3*((IlbcEncoder*)iLBCenc_inst)->blockl)) - { - /* A maximum of 3 frames/packet is allowed */ - return(-1); - } else { - - /* call encoder */ - while (possection == 0) -#else - pos += ((IlbcEncoder*)iLBCenc_inst)->blockl; -#endif - encpos += ((IlbcEncoder*)iLBCenc_inst)->no_of_words; - } - return (int)(encpos*2); - } -} - -int16_t WebRtcIlbcfix_DecoderInit(IlbcDecoderInstance* iLBCdec_inst, - int16_t mode) { - if ((mode==20)||(mode==30)) { - WebRtcIlbcfix_InitDecode((IlbcDecoder*) iLBCdec_inst, mode, 1); - return(0); - } else { - return(-1); - } -} -void WebRtcIlbcfix_DecoderInit20Ms(IlbcDecoderInstance* iLBCdec_inst) { - WebRtcIlbcfix_InitDecode((IlbcDecoder*) iLBCdec_inst, 20, 1); -} -void WebRtcIlbcfix_Decoderinit30Ms(IlbcDecoderInstance* iLBCdec_inst) { - WebRtcIlbcfix_InitDecode((IlbcDecoder*) iLBCdec_inst, 30, 1); -} - - -int WebRtcIlbcfix_Decode(IlbcDecoderInstance* iLBCdec_inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speechType) -{ - size_t i=0; - /* Allow for automatic switching between the frame sizes - (although you do get some discontinuity) */ - if ((len==((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)|| - (len==2*((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)|| - (len==3*((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)) { - /* ok, do nothing */ - } else { - /* Test if the mode has changed */ - if (((IlbcDecoder*)iLBCdec_inst)->mode==20) { - if ((len==NO_OF_BYTES_30MS)|| - (len==2*NO_OF_BYTES_30MS)|| - (len==3*NO_OF_BYTES_30MS)) { - WebRtcIlbcfix_InitDecode( - ((IlbcDecoder*)iLBCdec_inst), 30, - ((IlbcDecoder*)iLBCdec_inst)->use_enhancer); - } else { - /* Unsupported frame length */ - return(-1); - } - } else { - if ((len==NO_OF_BYTES_20MS)|| - (len==2*NO_OF_BYTES_20MS)|| - (len==3*NO_OF_BYTES_20MS)) { - WebRtcIlbcfix_InitDecode( - ((IlbcDecoder*)iLBCdec_inst), 20, - ((IlbcDecoder*)iLBCdec_inst)->use_enhancer); - } else { - /* Unsupported frame length */ - return(-1); - } - } - } - - while ((i*((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)blockl], - (const uint16_t*)&encoded - [2 * i * ((IlbcDecoder*)iLBCdec_inst)->no_of_words], - (IlbcDecoder*)iLBCdec_inst, 1) == -1) - return -1; - i++; - } - /* iLBC does not support VAD/CNG yet */ - *speechType=1; - return (int)(i*((IlbcDecoder*)iLBCdec_inst)->blockl); -} - -int WebRtcIlbcfix_Decode20Ms(IlbcDecoderInstance* iLBCdec_inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speechType) -{ - size_t i=0; - if ((len==((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)|| - (len==2*((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)|| - (len==3*((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)) { - /* ok, do nothing */ - } else { - return(-1); - } - - while ((i*((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)blockl], - (const uint16_t*)&encoded - [2 * i * ((IlbcDecoder*)iLBCdec_inst)->no_of_words], - (IlbcDecoder*)iLBCdec_inst, 1)) - return -1; - i++; - } - /* iLBC does not support VAD/CNG yet */ - *speechType=1; - return (int)(i*((IlbcDecoder*)iLBCdec_inst)->blockl); -} - -int WebRtcIlbcfix_Decode30Ms(IlbcDecoderInstance* iLBCdec_inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speechType) -{ - size_t i=0; - if ((len==((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)|| - (len==2*((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)|| - (len==3*((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)) { - /* ok, do nothing */ - } else { - return(-1); - } - - while ((i*((IlbcDecoder*)iLBCdec_inst)->no_of_bytes)blockl], - (const uint16_t*)&encoded - [2 * i * ((IlbcDecoder*)iLBCdec_inst)->no_of_words], - (IlbcDecoder*)iLBCdec_inst, 1)) - return -1; - i++; - } - /* iLBC does not support VAD/CNG yet */ - *speechType=1; - return (int)(i*((IlbcDecoder*)iLBCdec_inst)->blockl); -} - -size_t WebRtcIlbcfix_DecodePlc(IlbcDecoderInstance* iLBCdec_inst, - int16_t* decoded, - size_t noOfLostFrames) { - size_t i; - uint16_t dummy; - - for (i=0;iblockl], &dummy, - (IlbcDecoder*)iLBCdec_inst, 0); - RTC_CHECK_EQ(result, 0); - } - return (noOfLostFrames*((IlbcDecoder*)iLBCdec_inst)->blockl); -} - -size_t WebRtcIlbcfix_NetEqPlc(IlbcDecoderInstance* iLBCdec_inst, - int16_t* decoded, - size_t noOfLostFrames) { - /* Two input parameters not used, but needed for function pointers in NetEQ */ - (void)(decoded = NULL); - (void)(noOfLostFrames = 0); - - WebRtcSpl_MemSetW16(((IlbcDecoder*)iLBCdec_inst)->enh_buf, 0, ENH_BUFL); - ((IlbcDecoder*)iLBCdec_inst)->prev_enh_pl = 2; - - return (0); -} - -void WebRtcIlbcfix_version(char *version) -{ - strcpy((char*)version, "1.1.1"); -} diff --git a/modules/audio_coding/codecs/ilbc/ilbc.h b/modules/audio_coding/codecs/ilbc/ilbc.h deleted file mode 100644 index de8cfde111..0000000000 --- a/modules/audio_coding/codecs/ilbc/ilbc.h +++ /dev/null @@ -1,251 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * ilbc.h - * - * This header file contains all of the API's for iLBC. - * - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_ILBC_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_ILBC_H_ - -#include -#include - -/* - * Solution to support multiple instances - * Customer has to cast instance to proper type - */ - -typedef struct iLBC_encinst_t_ IlbcEncoderInstance; - -typedef struct iLBC_decinst_t_ IlbcDecoderInstance; - -/* - * Comfort noise constants - */ - -#define ILBC_SPEECH 1 -#define ILBC_CNG 2 - -#ifdef __cplusplus -extern "C" { -#endif - -/**************************************************************************** - * WebRtcIlbcfix_XxxAssign(...) - * - * These functions assigns the encoder/decoder instance to the specified - * memory location - * - * Input: - * - XXX_xxxinst : Pointer to created instance that should be - * assigned - * - ILBCXXX_inst_Addr : Pointer to the desired memory space - * - size : The size that this structure occupies (in Word16) - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIlbcfix_EncoderAssign(IlbcEncoderInstance** iLBC_encinst, - int16_t* ILBCENC_inst_Addr, - int16_t* size); -int16_t WebRtcIlbcfix_DecoderAssign(IlbcDecoderInstance** iLBC_decinst, - int16_t* ILBCDEC_inst_Addr, - int16_t* size); - -/**************************************************************************** - * WebRtcIlbcfix_XxxAssign(...) - * - * These functions create a instance to the specified structure - * - * Input: - * - XXX_inst : Pointer to created instance that should be created - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIlbcfix_EncoderCreate(IlbcEncoderInstance** iLBC_encinst); -int16_t WebRtcIlbcfix_DecoderCreate(IlbcDecoderInstance** iLBC_decinst); - -/**************************************************************************** - * WebRtcIlbcfix_XxxFree(...) - * - * These functions frees the dynamic memory of a specified instance - * - * Input: - * - XXX_inst : Pointer to created instance that should be freed - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIlbcfix_EncoderFree(IlbcEncoderInstance* iLBC_encinst); -int16_t WebRtcIlbcfix_DecoderFree(IlbcDecoderInstance* iLBC_decinst); - -/**************************************************************************** - * WebRtcIlbcfix_EncoderInit(...) - * - * This function initializes a iLBC instance - * - * Input: - * - iLBCenc_inst : iLBC instance, i.e. the user that should receive - * be initialized - * - frameLen : The frame length of the codec 20/30 (ms) - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIlbcfix_EncoderInit(IlbcEncoderInstance* iLBCenc_inst, - int16_t frameLen); - -/**************************************************************************** - * WebRtcIlbcfix_Encode(...) - * - * This function encodes one iLBC frame. Input speech length has be a - * multiple of the frame length. - * - * Input: - * - iLBCenc_inst : iLBC instance, i.e. the user that should encode - * a package - * - speechIn : Input speech vector - * - len : Samples in speechIn (160, 240, 320 or 480) - * - * Output: - * - encoded : The encoded data vector - * - * Return value : >0 - Length (in bytes) of coded data - * -1 - Error - */ - -int WebRtcIlbcfix_Encode(IlbcEncoderInstance* iLBCenc_inst, - const int16_t* speechIn, - size_t len, - uint8_t* encoded); - -/**************************************************************************** - * WebRtcIlbcfix_DecoderInit(...) - * - * This function initializes a iLBC instance with either 20 or 30 ms frames - * Alternatively the WebRtcIlbcfix_DecoderInit_XXms can be used. Then it's - * not needed to specify the frame length with a variable. - * - * Input: - * - IlbcDecoderInstance : iLBC decoder instance - * - frameLen : The frame length of the codec 20/30 (ms) - * - * Return value : 0 - Ok - * -1 - Error - */ - -int16_t WebRtcIlbcfix_DecoderInit(IlbcDecoderInstance* iLBCdec_inst, - int16_t frameLen); -void WebRtcIlbcfix_DecoderInit20Ms(IlbcDecoderInstance* iLBCdec_inst); -void WebRtcIlbcfix_Decoderinit30Ms(IlbcDecoderInstance* iLBCdec_inst); - -/**************************************************************************** - * WebRtcIlbcfix_Decode(...) - * - * This function decodes a packet with iLBC frame(s). Output speech length - * will be a multiple of 160 or 240 samples ((160 or 240)*frames/packet). - * - * Input: - * - iLBCdec_inst : iLBC instance, i.e. the user that should decode - * a packet - * - encoded : Encoded iLBC frame(s) - * - len : Bytes in encoded vector - * - * Output: - * - decoded : The decoded vector - * - speechType : 1 normal, 2 CNG - * - * Return value : >0 - Samples in decoded vector - * -1 - Error - */ - -int WebRtcIlbcfix_Decode(IlbcDecoderInstance* iLBCdec_inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speechType); -int WebRtcIlbcfix_Decode20Ms(IlbcDecoderInstance* iLBCdec_inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speechType); -int WebRtcIlbcfix_Decode30Ms(IlbcDecoderInstance* iLBCdec_inst, - const uint8_t* encoded, - size_t len, - int16_t* decoded, - int16_t* speechType); - -/**************************************************************************** - * WebRtcIlbcfix_DecodePlc(...) - * - * This function conducts PLC for iLBC frame(s). Output speech length - * will be a multiple of 160 or 240 samples. - * - * Input: - * - iLBCdec_inst : iLBC instance, i.e. the user that should perform - * a PLC - * - noOfLostFrames : Number of PLC frames to produce - * - * Output: - * - decoded : The "decoded" vector - * - * Return value : Samples in decoded PLC vector - */ - -size_t WebRtcIlbcfix_DecodePlc(IlbcDecoderInstance* iLBCdec_inst, - int16_t* decoded, - size_t noOfLostFrames); - -/**************************************************************************** - * WebRtcIlbcfix_NetEqPlc(...) - * - * This function updates the decoder when a packet loss has occured, but it - * does not produce any PLC data. Function can be used if another PLC method - * is used (i.e NetEq). - * - * Input: - * - iLBCdec_inst : iLBC instance that should be updated - * - noOfLostFrames : Number of lost frames - * - * Output: - * - decoded : The "decoded" vector (nothing in this case) - * - * Return value : Samples in decoded PLC vector - */ - -size_t WebRtcIlbcfix_NetEqPlc(IlbcDecoderInstance* iLBCdec_inst, - int16_t* decoded, - size_t noOfLostFrames); - -/**************************************************************************** - * WebRtcIlbcfix_version(...) - * - * This function returns the version number of iLBC - * - * Output: - * - version : Version number of iLBC (maximum 20 char) - */ - -void WebRtcIlbcfix_version(char* version); - -#ifdef __cplusplus -} -#endif - -#endif // MODULES_AUDIO_CODING_CODECS_ILBC_ILBC_H_ diff --git a/modules/audio_coding/codecs/ilbc/ilbc_unittest.cc b/modules/audio_coding/codecs/ilbc/ilbc_unittest.cc deleted file mode 100644 index 689292f131..0000000000 --- a/modules/audio_coding/codecs/ilbc/ilbc_unittest.cc +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h" -#include "modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h" -#include "modules/audio_coding/codecs/legacy_encoded_audio_frame.h" -#include "test/gtest.h" - -namespace webrtc { - -TEST(IlbcTest, BadPacket) { - // Get a good packet. - AudioEncoderIlbcConfig config; - config.frame_size_ms = 20; // We need 20 ms rather than the default 30 ms; - // otherwise, all possible values of cb_index[2] - // are valid. - AudioEncoderIlbcImpl encoder(config, 102); - std::vector samples(encoder.SampleRateHz() / 100, 4711); - rtc::Buffer packet; - int num_10ms_chunks = 0; - while (packet.size() == 0) { - encoder.Encode(0, samples, &packet); - num_10ms_chunks += 1; - } - - // Break the packet by setting all bits of the unsigned 7-bit number - // cb_index[2] to 1, giving it a value of 127. For a 20 ms packet, this is - // too large. - EXPECT_EQ(38u, packet.size()); - rtc::Buffer bad_packet(packet.data(), packet.size()); - bad_packet[29] |= 0x3f; // Bits 1-6. - bad_packet[30] |= 0x80; // Bit 0. - - // Decode the bad packet. We expect the decoder to respond by returning -1. - AudioDecoderIlbcImpl decoder; - std::vector decoded_samples(num_10ms_chunks * samples.size()); - AudioDecoder::SpeechType speech_type; - EXPECT_EQ(-1, decoder.Decode(bad_packet.data(), bad_packet.size(), - encoder.SampleRateHz(), - sizeof(int16_t) * decoded_samples.size(), - decoded_samples.data(), &speech_type)); - - // Decode the good packet. This should work, because the failed decoding - // should not have left the decoder in a broken state. - EXPECT_EQ(static_cast(decoded_samples.size()), - decoder.Decode(packet.data(), packet.size(), encoder.SampleRateHz(), - sizeof(int16_t) * decoded_samples.size(), - decoded_samples.data(), &speech_type)); -} - -class SplitIlbcTest : public ::testing::TestWithParam > { - protected: - virtual void SetUp() { - const std::pair parameters = GetParam(); - num_frames_ = parameters.first; - frame_length_ms_ = parameters.second; - frame_length_bytes_ = (frame_length_ms_ == 20) ? 38 : 50; - } - size_t num_frames_; - int frame_length_ms_; - size_t frame_length_bytes_; -}; - -TEST_P(SplitIlbcTest, NumFrames) { - AudioDecoderIlbcImpl decoder; - const size_t frame_length_samples = frame_length_ms_ * 8; - const auto generate_payload = [](size_t payload_length_bytes) { - rtc::Buffer payload(payload_length_bytes); - // Fill payload with increasing integers {0, 1, 2, ...}. - for (size_t i = 0; i < payload.size(); ++i) { - payload[i] = static_cast(i); - } - return payload; - }; - - const auto results = decoder.ParsePayload( - generate_payload(frame_length_bytes_ * num_frames_), 0); - EXPECT_EQ(num_frames_, results.size()); - - size_t frame_num = 0; - uint8_t payload_value = 0; - for (const auto& result : results) { - EXPECT_EQ(frame_length_samples * frame_num, result.timestamp); - const LegacyEncodedAudioFrame* frame = - static_cast(result.frame.get()); - const rtc::Buffer& payload = frame->payload(); - EXPECT_EQ(frame_length_bytes_, payload.size()); - for (size_t i = 0; i < payload.size(); ++i, ++payload_value) { - EXPECT_EQ(payload_value, payload[i]); - } - ++frame_num; - } -} - -// Test 1 through 5 frames of 20 and 30 ms size. -// Also test the maximum number of frames in one packet for 20 and 30 ms. -// The maximum is defined by the largest payload length that can be uniquely -// resolved to a frame size of either 38 bytes (20 ms) or 50 bytes (30 ms). -INSTANTIATE_TEST_SUITE_P( - IlbcTest, - SplitIlbcTest, - ::testing::Values(std::pair(1, 20), // 1 frame, 20 ms. - std::pair(2, 20), // 2 frames, 20 ms. - std::pair(3, 20), // And so on. - std::pair(4, 20), - std::pair(5, 20), - std::pair(24, 20), - std::pair(1, 30), - std::pair(2, 30), - std::pair(3, 30), - std::pair(4, 30), - std::pair(5, 30), - std::pair(18, 30))); - -// Test too large payload size. -TEST(IlbcTest, SplitTooLargePayload) { - AudioDecoderIlbcImpl decoder; - constexpr size_t kPayloadLengthBytes = 950; - const auto results = - decoder.ParsePayload(rtc::Buffer(kPayloadLengthBytes), 0); - EXPECT_TRUE(results.empty()); -} - -// Payload not an integer number of frames. -TEST(IlbcTest, SplitUnevenPayload) { - AudioDecoderIlbcImpl decoder; - constexpr size_t kPayloadLengthBytes = 39; // Not an even number of frames. - const auto results = - decoder.ParsePayload(rtc::Buffer(kPayloadLengthBytes), 0); - EXPECT_TRUE(results.empty()); -} - -} // namespace webrtc diff --git a/modules/audio_coding/codecs/ilbc/index_conv_dec.c b/modules/audio_coding/codecs/ilbc/index_conv_dec.c deleted file mode 100644 index d78f81a897..0000000000 --- a/modules/audio_coding/codecs/ilbc/index_conv_dec.c +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_IndexConvDec.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/index_conv_dec.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -void WebRtcIlbcfix_IndexConvDec( - int16_t *index /* (i/o) Codebook indexes */ - ){ - int k; - - for (k=4;k<6;k++) { - /* Readjust the second and third codebook index for the first 40 sample - so that they look the same as the first (in terms of lag) - */ - if ((index[k]>=44)&&(index[k]<108)) { - index[k]+=64; - } else if ((index[k]>=108)&&(index[k]<128)) { - index[k]+=128; - } else { - /* ERROR */ - } - } -} diff --git a/modules/audio_coding/codecs/ilbc/index_conv_dec.h b/modules/audio_coding/codecs/ilbc/index_conv_dec.h deleted file mode 100644 index 4d3f733355..0000000000 --- a/modules/audio_coding/codecs/ilbc/index_conv_dec.h +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_IndexConvDec.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INDEX_CONV_DEC_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INDEX_CONV_DEC_H_ - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -void WebRtcIlbcfix_IndexConvDec(int16_t* index /* (i/o) Codebook indexes */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/index_conv_enc.c b/modules/audio_coding/codecs/ilbc/index_conv_enc.c deleted file mode 100644 index 83144150b4..0000000000 --- a/modules/audio_coding/codecs/ilbc/index_conv_enc.c +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - IiLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_IndexConvEnc.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/index_conv_enc.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * Convert the codebook indexes to make the search easier - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_IndexConvEnc( - int16_t *index /* (i/o) Codebook indexes */ - ){ - int k; - - for (k=4;k<6;k++) { - /* Readjust the second and third codebook index so that it is - packetized into 7 bits (before it was put in lag-wise the same - way as for the first codebook which uses 8 bits) - */ - if ((index[k]>=108)&&(index[k]<172)) { - index[k]-=64; - } else if (index[k]>=236) { - index[k]-=128; - } else { - /* ERROR */ - } - } -} diff --git a/modules/audio_coding/codecs/ilbc/index_conv_enc.h b/modules/audio_coding/codecs/ilbc/index_conv_enc.h deleted file mode 100644 index 0172ac416b..0000000000 --- a/modules/audio_coding/codecs/ilbc/index_conv_enc.h +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_IndexConvEnc.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INDEX_CONV_ENC_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INDEX_CONV_ENC_H_ - -#include - -/*----------------------------------------------------------------* - * Convert the codebook indexes to make the search easier - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_IndexConvEnc(int16_t* index /* (i/o) Codebook indexes */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/init_decode.c b/modules/audio_coding/codecs/ilbc/init_decode.c deleted file mode 100644 index 3eb41e33b0..0000000000 --- a/modules/audio_coding/codecs/ilbc/init_decode.c +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_InitDecode.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/init_decode.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * Initiation of decoder instance. - *---------------------------------------------------------------*/ - -int WebRtcIlbcfix_InitDecode( /* (o) Number of decoded samples */ - IlbcDecoder *iLBCdec_inst, /* (i/o) Decoder instance */ - int16_t mode, /* (i) frame size mode */ - int use_enhancer) { /* (i) 1: use enhancer, 0: no enhancer */ - int i; - - iLBCdec_inst->mode = mode; - - /* Set all the variables that are dependent on the frame size mode */ - if (mode==30) { - iLBCdec_inst->blockl = BLOCKL_30MS; - iLBCdec_inst->nsub = NSUB_30MS; - iLBCdec_inst->nasub = NASUB_30MS; - iLBCdec_inst->lpc_n = LPC_N_30MS; - iLBCdec_inst->no_of_bytes = NO_OF_BYTES_30MS; - iLBCdec_inst->no_of_words = NO_OF_WORDS_30MS; - iLBCdec_inst->state_short_len=STATE_SHORT_LEN_30MS; - } - else if (mode==20) { - iLBCdec_inst->blockl = BLOCKL_20MS; - iLBCdec_inst->nsub = NSUB_20MS; - iLBCdec_inst->nasub = NASUB_20MS; - iLBCdec_inst->lpc_n = LPC_N_20MS; - iLBCdec_inst->no_of_bytes = NO_OF_BYTES_20MS; - iLBCdec_inst->no_of_words = NO_OF_WORDS_20MS; - iLBCdec_inst->state_short_len=STATE_SHORT_LEN_20MS; - } - else { - return(-1); - } - - /* Reset all the previous LSF to mean LSF */ - WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->lsfdeqold, WebRtcIlbcfix_kLsfMean, LPC_FILTERORDER); - - /* Clear the synthesis filter memory */ - WebRtcSpl_MemSetW16(iLBCdec_inst->syntMem, 0, LPC_FILTERORDER); - - /* Set the old synthesis filter to {1.0 0.0 ... 0.0} */ - WebRtcSpl_MemSetW16(iLBCdec_inst->old_syntdenum, 0, ((LPC_FILTERORDER + 1)*NSUB_MAX)); - for (i=0; iold_syntdenum[i*(LPC_FILTERORDER+1)] = 4096; - } - - /* Clear the variables that are used for the PLC */ - iLBCdec_inst->last_lag = 20; - iLBCdec_inst->consPLICount = 0; - iLBCdec_inst->prevPLI = 0; - iLBCdec_inst->perSquare = 0; - iLBCdec_inst->prevLag = 120; - iLBCdec_inst->prevLpc[0] = 4096; - WebRtcSpl_MemSetW16(iLBCdec_inst->prevLpc+1, 0, LPC_FILTERORDER); - WebRtcSpl_MemSetW16(iLBCdec_inst->prevResidual, 0, BLOCKL_MAX); - - /* Initialize the seed for the random number generator */ - iLBCdec_inst->seed = 777; - - /* Set the filter state of the HP filter to 0 */ - WebRtcSpl_MemSetW16(iLBCdec_inst->hpimemx, 0, 2); - WebRtcSpl_MemSetW16(iLBCdec_inst->hpimemy, 0, 4); - - /* Set the variables that are used in the ehnahcer */ - iLBCdec_inst->use_enhancer = use_enhancer; - WebRtcSpl_MemSetW16(iLBCdec_inst->enh_buf, 0, (ENH_BUFL+ENH_BUFL_FILTEROVERHEAD)); - for (i=0;ienh_period[i]=160; /* Q(-4) */ - } - - iLBCdec_inst->prev_enh_pl = 0; - - return (int)(iLBCdec_inst->blockl); -} diff --git a/modules/audio_coding/codecs/ilbc/init_decode.h b/modules/audio_coding/codecs/ilbc/init_decode.h deleted file mode 100644 index 92f9ad68e7..0000000000 --- a/modules/audio_coding/codecs/ilbc/init_decode.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_InitDecode.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_DECODE_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_DECODE_H_ - -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * Initiation of decoder instance. - *---------------------------------------------------------------*/ - -int WebRtcIlbcfix_InitDecode(/* (o) Number of decoded samples */ - IlbcDecoder* - iLBCdec_inst, /* (i/o) Decoder instance */ - int16_t mode, /* (i) frame size mode */ - int use_enhancer /* (i) 1 to use enhancer - 0 to run without enhancer */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/init_encode.c b/modules/audio_coding/codecs/ilbc/init_encode.c deleted file mode 100644 index aa858e94bb..0000000000 --- a/modules/audio_coding/codecs/ilbc/init_encode.c +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_InitEncode.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/init_encode.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * Initiation of encoder instance. - *---------------------------------------------------------------*/ - -int WebRtcIlbcfix_InitEncode( /* (o) Number of bytes encoded */ - IlbcEncoder *iLBCenc_inst, /* (i/o) Encoder instance */ - int16_t mode) { /* (i) frame size mode */ - iLBCenc_inst->mode = mode; - - /* Set all the variables that are dependent on the frame size mode */ - if (mode==30) { - iLBCenc_inst->blockl = BLOCKL_30MS; - iLBCenc_inst->nsub = NSUB_30MS; - iLBCenc_inst->nasub = NASUB_30MS; - iLBCenc_inst->lpc_n = LPC_N_30MS; - iLBCenc_inst->no_of_bytes = NO_OF_BYTES_30MS; - iLBCenc_inst->no_of_words = NO_OF_WORDS_30MS; - iLBCenc_inst->state_short_len=STATE_SHORT_LEN_30MS; - } - else if (mode==20) { - iLBCenc_inst->blockl = BLOCKL_20MS; - iLBCenc_inst->nsub = NSUB_20MS; - iLBCenc_inst->nasub = NASUB_20MS; - iLBCenc_inst->lpc_n = LPC_N_20MS; - iLBCenc_inst->no_of_bytes = NO_OF_BYTES_20MS; - iLBCenc_inst->no_of_words = NO_OF_WORDS_20MS; - iLBCenc_inst->state_short_len=STATE_SHORT_LEN_20MS; - } - else { - return(-1); - } - - /* Clear the buffers and set the previous LSF and LSP to the mean value */ - WebRtcSpl_MemSetW16(iLBCenc_inst->anaMem, 0, LPC_FILTERORDER); - WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->lsfold, WebRtcIlbcfix_kLsfMean, LPC_FILTERORDER); - WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->lsfdeqold, WebRtcIlbcfix_kLsfMean, LPC_FILTERORDER); - WebRtcSpl_MemSetW16(iLBCenc_inst->lpc_buffer, 0, LPC_LOOKBACK + BLOCKL_MAX); - - /* Set the filter state of the HP filter to 0 */ - WebRtcSpl_MemSetW16(iLBCenc_inst->hpimemx, 0, 2); - WebRtcSpl_MemSetW16(iLBCenc_inst->hpimemy, 0, 4); - -#ifdef SPLIT_10MS - /*Zeroing the past samples for 10msec Split*/ - WebRtcSpl_MemSetW16(iLBCenc_inst->past_samples,0,160); - iLBCenc_inst->section = 0; -#endif - - return (int)(iLBCenc_inst->no_of_bytes); -} diff --git a/modules/audio_coding/codecs/ilbc/init_encode.h b/modules/audio_coding/codecs/ilbc/init_encode.h deleted file mode 100644 index 4a233fb946..0000000000 --- a/modules/audio_coding/codecs/ilbc/init_encode.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_InitEncode.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_ENCODE_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_ENCODE_H_ - -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * Initiation of encoder instance. - *---------------------------------------------------------------*/ - -int WebRtcIlbcfix_InitEncode(/* (o) Number of bytes encoded */ - IlbcEncoder* - iLBCenc_inst, /* (i/o) Encoder instance */ - int16_t mode /* (i) frame size mode */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/interpolate.c b/modules/audio_coding/codecs/ilbc/interpolate.c deleted file mode 100644 index 17ed244bd4..0000000000 --- a/modules/audio_coding/codecs/ilbc/interpolate.c +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Interpolate.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/interpolate.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * interpolation between vectors - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Interpolate( - int16_t *out, /* (o) output vector */ - int16_t *in1, /* (i) first input vector */ - int16_t *in2, /* (i) second input vector */ - int16_t coef, /* (i) weight coefficient in Q14 */ - int16_t length) /* (i) number of sample is vectors */ -{ - int i; - int16_t invcoef; - - /* - Performs the operation out[i] = in[i]*coef + (1-coef)*in2[i] (with rounding) - */ - - invcoef = 16384 - coef; /* 16384 = 1.0 (Q14)*/ - for (i = 0; i < length; i++) { - out[i] = (int16_t)((coef * in1[i] + invcoef * in2[i] + 8192) >> 14); - } - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/interpolate.h b/modules/audio_coding/codecs/ilbc/interpolate.h deleted file mode 100644 index 892082b75c..0000000000 --- a/modules/audio_coding/codecs/ilbc/interpolate.h +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Interpolate.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_H_ - -#include - -/*----------------------------------------------------------------* - * interpolation between vectors - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Interpolate( - int16_t* out, /* (o) output vector */ - int16_t* in1, /* (i) first input vector */ - int16_t* in2, /* (i) second input vector */ - int16_t coef, /* (i) weight coefficient in Q14 */ - int16_t length); /* (i) number of sample is vectors */ - -#endif diff --git a/modules/audio_coding/codecs/ilbc/interpolate_samples.c b/modules/audio_coding/codecs/ilbc/interpolate_samples.c deleted file mode 100644 index 6dddd6fb86..0000000000 --- a/modules/audio_coding/codecs/ilbc/interpolate_samples.c +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_InterpolateSamples.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/interpolate_samples.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -void WebRtcIlbcfix_InterpolateSamples( - int16_t *interpSamples, /* (o) The interpolated samples */ - int16_t *CBmem, /* (i) The CB memory */ - size_t lMem /* (i) Length of the CB memory */ - ) { - int16_t *ppi, *ppo, i, j, temp1, temp2; - int16_t *tmpPtr; - - /* Calculate the 20 vectors of interpolated samples (4 samples each) - that are used in the codebooks for lag 20 to 39 */ - tmpPtr = interpSamples; - for (j=0; j<20; j++) { - temp1 = 0; - temp2 = 3; - ppo = CBmem+lMem-4; - ppi = CBmem+lMem-j-24; - for (i=0; i<4; i++) { - - *tmpPtr++ = (int16_t)((WebRtcIlbcfix_kAlpha[temp2] * *ppo) >> 15) + - (int16_t)((WebRtcIlbcfix_kAlpha[temp1] * *ppi) >> 15); - - ppo++; - ppi++; - temp1++; - temp2--; - } - } - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/interpolate_samples.h b/modules/audio_coding/codecs/ilbc/interpolate_samples.h deleted file mode 100644 index f4fa97d477..0000000000 --- a/modules/audio_coding/codecs/ilbc/interpolate_samples.h +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_InterpolateSamples.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_SAMPLES_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_SAMPLES_H_ - -#include -#include - -/*----------------------------------------------------------------* - * Construct the interpolated samples for the Augmented CB - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_InterpolateSamples( - int16_t* interpSamples, /* (o) The interpolated samples */ - int16_t* CBmem, /* (i) The CB memory */ - size_t lMem /* (i) Length of the CB memory */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/lpc_encode.c b/modules/audio_coding/codecs/ilbc/lpc_encode.c deleted file mode 100644 index 89f6d29724..0000000000 --- a/modules/audio_coding/codecs/ilbc/lpc_encode.c +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_LpcEncode.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/lpc_encode.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/lsf_check.h" -#include "modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h" -#include "modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h" -#include "modules/audio_coding/codecs/ilbc/simple_lsf_quant.h" - -/*----------------------------------------------------------------* - * lpc encoder - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_LpcEncode( - int16_t *syntdenum, /* (i/o) synthesis filter coefficients - before/after encoding */ - int16_t *weightdenum, /* (i/o) weighting denumerator coefficients - before/after encoding */ - int16_t *lsf_index, /* (o) lsf quantization index */ - int16_t *data, /* (i) Speech to do LPC analysis on */ - IlbcEncoder *iLBCenc_inst - /* (i/o) the encoder state structure */ - ) { - /* Stack based */ - int16_t lsf[LPC_FILTERORDER * LPC_N_MAX]; - int16_t lsfdeq[LPC_FILTERORDER * LPC_N_MAX]; - - /* Calculate LSF's from the input speech */ - WebRtcIlbcfix_SimpleLpcAnalysis(lsf, data, iLBCenc_inst); - - /* Quantize the LSF's */ - WebRtcIlbcfix_SimpleLsfQ(lsfdeq, lsf_index, lsf, iLBCenc_inst->lpc_n); - - /* Stableize the LSF's if needed */ - WebRtcIlbcfix_LsfCheck(lsfdeq, LPC_FILTERORDER, iLBCenc_inst->lpc_n); - - /* Calculate the synthesis and weighting filter coefficients from - the optimal LSF and the dequantized LSF */ - WebRtcIlbcfix_SimpleInterpolateLsf(syntdenum, weightdenum, - lsf, lsfdeq, iLBCenc_inst->lsfold, - iLBCenc_inst->lsfdeqold, LPC_FILTERORDER, iLBCenc_inst); - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/lpc_encode.h b/modules/audio_coding/codecs/ilbc/lpc_encode.h deleted file mode 100644 index ca050b02cc..0000000000 --- a/modules/audio_coding/codecs/ilbc/lpc_encode.h +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_LpcEncode.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LPC_ENCODE_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LPC_ENCODE_H_ - -#include -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * lpc encoder - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_LpcEncode( - int16_t* syntdenum, /* (i/o) synthesis filter coefficients - before/after encoding */ - int16_t* weightdenum, /* (i/o) weighting denumerator coefficients - before/after encoding */ - int16_t* lsf_index, /* (o) lsf quantization index */ - int16_t* data, /* (i) Speech to do LPC analysis on */ - IlbcEncoder* iLBCenc_inst - /* (i/o) the encoder state structure */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/lsf_check.c b/modules/audio_coding/codecs/ilbc/lsf_check.c deleted file mode 100644 index 9f0e19a2d9..0000000000 --- a/modules/audio_coding/codecs/ilbc/lsf_check.c +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_LsfCheck.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/lsf_check.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * check for stability of lsf coefficients - *---------------------------------------------------------------*/ - -int WebRtcIlbcfix_LsfCheck( - int16_t *lsf, /* LSF parameters */ - int dim, /* dimension of LSF */ - int NoAn) /* No of analysis per frame */ -{ - int k,n,m, Nit=2, change=0,pos; - const int16_t eps=319; /* 0.039 in Q13 (50 Hz)*/ - const int16_t eps2=160; /* eps/2.0 in Q13;*/ - const int16_t maxlsf=25723; /* 3.14; (4000 Hz)*/ - const int16_t minlsf=82; /* 0.01; (0 Hz)*/ - - /* LSF separation check*/ - for (n=0;nmaxlsf) { - lsf[pos]=maxlsf; - change=1; - } - } - } - } - - return change; -} diff --git a/modules/audio_coding/codecs/ilbc/lsf_check.h b/modules/audio_coding/codecs/ilbc/lsf_check.h deleted file mode 100644 index 9ba90a31e6..0000000000 --- a/modules/audio_coding/codecs/ilbc/lsf_check.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_LsfCheck.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_CHECK_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_CHECK_H_ - -#include - -/*----------------------------------------------------------------* - * check for stability of lsf coefficients - *---------------------------------------------------------------*/ - -int WebRtcIlbcfix_LsfCheck(int16_t* lsf, /* LSF parameters */ - int dim, /* dimension of LSF */ - int NoAn); /* No of analysis per frame */ - -#endif diff --git a/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c b/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c deleted file mode 100644 index 04de5e7e6c..0000000000 --- a/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_LspInterpolate2PolyDec.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/interpolate.h" -#include "modules/audio_coding/codecs/ilbc/lsf_to_poly.h" - -/*----------------------------------------------------------------* - * interpolation of lsf coefficients for the decoder - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_LspInterpolate2PolyDec( - int16_t *a, /* (o) lpc coefficients Q12 */ - int16_t *lsf1, /* (i) first set of lsf coefficients Q13 */ - int16_t *lsf2, /* (i) second set of lsf coefficients Q13 */ - int16_t coef, /* (i) weighting coefficient to use between - lsf1 and lsf2 Q14 */ - int16_t length /* (i) length of coefficient vectors */ - ){ - int16_t lsftmp[LPC_FILTERORDER]; - - /* interpolate LSF */ - WebRtcIlbcfix_Interpolate(lsftmp, lsf1, lsf2, coef, length); - - /* Compute the filter coefficients from the LSF */ - WebRtcIlbcfix_Lsf2Poly(a, lsftmp); -} diff --git a/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h b/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h deleted file mode 100644 index a0ccfa96ac..0000000000 --- a/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_LspInterpolate2PolyDec.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_DEC_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_DEC_H_ - -#include - -/*----------------------------------------------------------------* - * interpolation of lsf coefficients for the decoder - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_LspInterpolate2PolyDec( - int16_t* a, /* (o) lpc coefficients Q12 */ - int16_t* lsf1, /* (i) first set of lsf coefficients Q13 */ - int16_t* lsf2, /* (i) second set of lsf coefficients Q13 */ - int16_t coef, /* (i) weighting coefficient to use between - lsf1 and lsf2 Q14 */ - int16_t length /* (i) length of coefficient vectors */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c b/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c deleted file mode 100644 index 618821216c..0000000000 --- a/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_LsfInterpolate2PloyEnc.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/interpolate.h" -#include "modules/audio_coding/codecs/ilbc/lsf_to_poly.h" - -/*----------------------------------------------------------------* - * lsf interpolator and conversion from lsf to a coefficients - * (subrutine to SimpleInterpolateLSF) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_LsfInterpolate2PloyEnc( - int16_t *a, /* (o) lpc coefficients Q12 */ - int16_t *lsf1, /* (i) first set of lsf coefficients Q13 */ - int16_t *lsf2, /* (i) second set of lsf coefficients Q13 */ - int16_t coef, /* (i) weighting coefficient to use between - lsf1 and lsf2 Q14 */ - int16_t length /* (i) length of coefficient vectors */ - ) { - /* Stack based */ - int16_t lsftmp[LPC_FILTERORDER]; - - /* interpolate LSF */ - WebRtcIlbcfix_Interpolate(lsftmp, lsf1, lsf2, coef, length); - - /* Compute the filter coefficients from the LSF */ - WebRtcIlbcfix_Lsf2Poly(a, lsftmp); - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h b/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h deleted file mode 100644 index 08d1e8325a..0000000000 --- a/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_LsfInterpolate2PloyEnc.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_ENC_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_ENC_H_ - -#include - -/*----------------------------------------------------------------* - * lsf interpolator and conversion from lsf to a coefficients - * (subrutine to SimpleInterpolateLSF) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_LsfInterpolate2PloyEnc( - int16_t* a, /* (o) lpc coefficients Q12 */ - int16_t* lsf1, /* (i) first set of lsf coefficients Q13 */ - int16_t* lsf2, /* (i) second set of lsf coefficients Q13 */ - int16_t coef, /* (i) weighting coefficient to use between - lsf1 and lsf2 Q14 */ - int16_t length /* (i) length of coefficient vectors */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c b/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c deleted file mode 100644 index ee8292f394..0000000000 --- a/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Lsf2Lsp.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/lsf_to_lsp.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * conversion from lsf to lsp coefficients - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Lsf2Lsp( - int16_t *lsf, /* (i) lsf in Q13 values between 0 and pi */ - int16_t *lsp, /* (o) lsp in Q15 values between -1 and 1 */ - int16_t m /* (i) number of coefficients */ - ) { - int16_t i, k; - int16_t diff; /* difference, which is used for the - linear approximation (Q8) */ - int16_t freq; /* normalized frequency in Q15 (0..1) */ - int32_t tmpW32; - - for(i=0; i> 15); - /* 20861: 1.0/(2.0*PI) in Q17 */ - /* - Upper 8 bits give the index k and - Lower 8 bits give the difference, which needs - to be approximated linearly - */ - k = freq >> 8; - diff = (freq&0x00ff); - - /* Guard against getting outside table */ - - if (k>63) { - k = 63; - } - - /* Calculate linear approximation */ - tmpW32 = WebRtcIlbcfix_kCosDerivative[k] * diff; - lsp[i] = WebRtcIlbcfix_kCos[k] + (int16_t)(tmpW32 >> 12); - } - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h b/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h deleted file mode 100644 index fccc3c2b1c..0000000000 --- a/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Lsf2Lsp.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_LSP_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_LSP_H_ - -#include - -/*----------------------------------------------------------------* - * conversion from lsf to lsp coefficients - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Lsf2Lsp( - int16_t* lsf, /* (i) lsf in Q13 values between 0 and pi */ - int16_t* lsp, /* (o) lsp in Q15 values between -1 and 1 */ - int16_t m /* (i) number of coefficients */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/lsf_to_poly.c b/modules/audio_coding/codecs/ilbc/lsf_to_poly.c deleted file mode 100644 index 8ca91d82f8..0000000000 --- a/modules/audio_coding/codecs/ilbc/lsf_to_poly.c +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Lsf2Poly.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/lsf_to_poly.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/get_lsp_poly.h" -#include "modules/audio_coding/codecs/ilbc/lsf_to_lsp.h" - -void WebRtcIlbcfix_Lsf2Poly( - int16_t *a, /* (o) predictor coefficients (order = 10) in Q12 */ - int16_t *lsf /* (i) line spectral frequencies in Q13 */ - ) { - int32_t f[2][6]; /* f[0][] and f[1][] corresponds to - F1(z) and F2(z) respectivly */ - int32_t *f1ptr, *f2ptr; - int16_t *a1ptr, *a2ptr; - int32_t tmpW32; - int16_t lsp[10]; - int i; - - /* Convert lsf to lsp */ - WebRtcIlbcfix_Lsf2Lsp(lsf, lsp, LPC_FILTERORDER); - - /* Get F1(z) and F2(z) from the lsp */ - f1ptr=f[0]; - f2ptr=f[1]; - WebRtcIlbcfix_GetLspPoly(&lsp[0],f1ptr); - WebRtcIlbcfix_GetLspPoly(&lsp[1],f2ptr); - - /* for i = 5 down to 1 - Compute f1[i] += f1[i-1]; - and f2[i] += f2[i-1]; - */ - f1ptr=&f[0][5]; - f2ptr=&f[1][5]; - for (i=5; i>0; i--) - { - (*f1ptr) += (*(f1ptr-1)); - (*f2ptr) -= (*(f2ptr-1)); - f1ptr--; - f2ptr--; - } - - /* Get the A(z) coefficients - a[0] = 1.0 - for i = 1 to 5 - a[i] = (f1[i] + f2[i] + round)>>13; - for i = 1 to 5 - a[11-i] = (f1[i] - f2[i] + round)>>13; - */ - a[0]=4096; - a1ptr=&a[1]; - a2ptr=&a[10]; - f1ptr=&f[0][1]; - f2ptr=&f[1][1]; - for (i=5; i>0; i--) - { - tmpW32 = (*f1ptr) + (*f2ptr); - *a1ptr = (int16_t)((tmpW32 + 4096) >> 13); - - tmpW32 = (*f1ptr) - (*f2ptr); - *a2ptr = (int16_t)((tmpW32 + 4096) >> 13); - - a1ptr++; - a2ptr--; - f1ptr++; - f2ptr++; - } - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/lsf_to_poly.h b/modules/audio_coding/codecs/ilbc/lsf_to_poly.h deleted file mode 100644 index 06f292f038..0000000000 --- a/modules/audio_coding/codecs/ilbc/lsf_to_poly.h +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Lsf2Poly.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_POLY_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_POLY_H_ - -#include - -/*----------------------------------------------------------------* - * Convert from LSF coefficients to A coefficients - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Lsf2Poly( - int16_t* a, /* (o) predictor coefficients (order = 10) in Q12 */ - int16_t* lsf /* (i) line spectral frequencies in Q13 */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c b/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c deleted file mode 100644 index 227f4d45b4..0000000000 --- a/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Lsp2Lsf.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/lsp_to_lsf.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * conversion from LSP coefficients to LSF coefficients - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Lsp2Lsf( - int16_t *lsp, /* (i) lsp vector -1...+1 in Q15 */ - int16_t *lsf, /* (o) Lsf vector 0...Pi in Q13 - (ordered, so that lsf[i]=0; i--) - { - /* - locate value in the table, which is just above lsp[i], - basically an approximation to acos(x) - */ - while( (((int32_t)(*cosTblPtr)-(*lspPtr)) < 0)&&(k>0) ) - { - k-=1; - cosTblPtr--; - } - - /* Calculate diff, which is used in the linear approximation of acos(x) */ - diff = (*lspPtr)-(*cosTblPtr); - - /* - The linear approximation of acos(lsp[i]) : - acos(lsp[i])= k*512 + (WebRtcIlbcfix_kAcosDerivative[ind]*offset >> 11) - */ - - /* tmp (linear offset) in Q16 */ - tmp = (int16_t)((WebRtcIlbcfix_kAcosDerivative[k] * diff) >> 11); - - /* freq in Q16 */ - freq = (k << 9) + tmp; - - /* lsf = freq*2*pi */ - (*lsfPtr) = (int16_t)(((int32_t)freq*25736)>>15); - - lsfPtr--; - lspPtr--; - } - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h b/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h deleted file mode 100644 index a0dfb8e8eb..0000000000 --- a/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Lsp2Lsf.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSP_TO_LSF_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSP_TO_LSF_H_ - -#include - -/*----------------------------------------------------------------* - * conversion from LSP coefficients to LSF coefficients - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Lsp2Lsf( - int16_t* lsp, /* (i) lsp vector -1...+1 in Q15 */ - int16_t* lsf, /* (o) Lsf vector 0...Pi in Q13 - (ordered, so that lsf[i] -#include - -/*----------------------------------------------------------------* - * compute cross correlation between sequences - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_MyCorr(int32_t* corr, /* (o) correlation of seq1 and seq2 */ - const int16_t* seq1, /* (i) first sequence */ - size_t dim1, /* (i) dimension first seq1 */ - const int16_t* seq2, /* (i) second sequence */ - size_t dim2 /* (i) dimension seq2 */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/nearest_neighbor.c b/modules/audio_coding/codecs/ilbc/nearest_neighbor.c deleted file mode 100644 index 1ecdd96d5a..0000000000 --- a/modules/audio_coding/codecs/ilbc/nearest_neighbor.c +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_NearestNeighbor.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/nearest_neighbor.h" - -void WebRtcIlbcfix_NearestNeighbor(size_t* index, - const size_t* array, - size_t value, - size_t arlength) { - size_t i; - size_t min_diff = (size_t)-1; - for (i = 0; i < arlength; i++) { - const size_t diff = - (array[i] < value) ? (value - array[i]) : (array[i] - value); - if (diff < min_diff) { - *index = i; - min_diff = diff; - } - } -} diff --git a/modules/audio_coding/codecs/ilbc/nearest_neighbor.h b/modules/audio_coding/codecs/ilbc/nearest_neighbor.h deleted file mode 100644 index 6db30b3e15..0000000000 --- a/modules/audio_coding/codecs/ilbc/nearest_neighbor.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_NearestNeighbor.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_NEAREST_NEIGHBOR_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_NEAREST_NEIGHBOR_H_ - -#include -#include - -/*----------------------------------------------------------------* - * Find index in array such that the array element with said - * index is the element of said array closest to "value" - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_NearestNeighbor( - size_t* index, /* (o) index of array element closest to value */ - const size_t* array, /* (i) data array (Q2) */ - size_t value, /* (i) value (Q2) */ - size_t arlength /* (i) dimension of data array (==ENH_NBLOCKS_TOT) */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/pack_bits.c b/modules/audio_coding/codecs/ilbc/pack_bits.c deleted file mode 100644 index dd44eb8fb6..0000000000 --- a/modules/audio_coding/codecs/ilbc/pack_bits.c +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_PackBits.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/pack_bits.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * unpacking of bits from bitstream, i.e., vector of bytes - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_PackBits( - uint16_t *bitstream, /* (o) The packetized bitstream */ - iLBC_bits *enc_bits, /* (i) Encoded bits */ - int16_t mode /* (i) Codec mode (20 or 30) */ - ){ - uint16_t *bitstreamPtr; - int i, k; - int16_t *tmpPtr; - - bitstreamPtr=bitstream; - - /* Class 1 bits of ULP */ - /* First int16_t */ - (*bitstreamPtr) = ((uint16_t)enc_bits->lsf[0])<<10; /* Bit 0..5 */ - (*bitstreamPtr) |= (enc_bits->lsf[1])<<3; /* Bit 6..12 */ - (*bitstreamPtr) |= (enc_bits->lsf[2]&0x70)>>4; /* Bit 13..15 */ - bitstreamPtr++; - /* Second int16_t */ - (*bitstreamPtr) = ((uint16_t)enc_bits->lsf[2]&0xF)<<12; /* Bit 0..3 */ - - if (mode==20) { - (*bitstreamPtr) |= (enc_bits->startIdx)<<10; /* Bit 4..5 */ - (*bitstreamPtr) |= (enc_bits->state_first)<<9; /* Bit 6 */ - (*bitstreamPtr) |= (enc_bits->idxForMax)<<3; /* Bit 7..12 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[0])&0x70)>>4; /* Bit 13..15 */ - bitstreamPtr++; - /* Third int16_t */ - (*bitstreamPtr) = ((enc_bits->cb_index[0])&0xE)<<12; /* Bit 0..2 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[0])&0x18)<<8; /* Bit 3..4 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[1])&0x8)<<7; /* Bit 5 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[3])&0xFE)<<2; /* Bit 6..12 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[3])&0x10)>>2; /* Bit 13 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[4])&0x8)>>2; /* Bit 14 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[6])&0x10)>>4; /* Bit 15 */ - } else { /* mode==30 */ - (*bitstreamPtr) |= (enc_bits->lsf[3])<<6; /* Bit 4..9 */ - (*bitstreamPtr) |= (enc_bits->lsf[4]&0x7E)>>1; /* Bit 10..15 */ - bitstreamPtr++; - /* Third int16_t */ - (*bitstreamPtr) = ((uint16_t)enc_bits->lsf[4]&0x1)<<15; /* Bit 0 */ - (*bitstreamPtr) |= (enc_bits->lsf[5])<<8; /* Bit 1..7 */ - (*bitstreamPtr) |= (enc_bits->startIdx)<<5; /* Bit 8..10 */ - (*bitstreamPtr) |= (enc_bits->state_first)<<4; /* Bit 11 */ - (*bitstreamPtr) |= ((enc_bits->idxForMax)&0x3C)>>2; /* Bit 12..15 */ - bitstreamPtr++; - /* 4:th int16_t */ - (*bitstreamPtr) = ((uint16_t)enc_bits->idxForMax&0x3)<<14; /* Bit 0..1 */ - (*bitstreamPtr) |= (enc_bits->cb_index[0]&0x78)<<7; /* Bit 2..5 */ - (*bitstreamPtr) |= (enc_bits->gain_index[0]&0x10)<<5; /* Bit 6 */ - (*bitstreamPtr) |= (enc_bits->gain_index[1]&0x8)<<5; /* Bit 7 */ - (*bitstreamPtr) |= (enc_bits->cb_index[3]&0xFC); /* Bit 8..13 */ - (*bitstreamPtr) |= (enc_bits->gain_index[3]&0x10)>>3; /* Bit 14 */ - (*bitstreamPtr) |= (enc_bits->gain_index[4]&0x8)>>3; /* Bit 15 */ - } - /* Class 2 bits of ULP */ - /* 4:th to 6:th int16_t for 20 ms case - 5:th to 7:th int16_t for 30 ms case */ - bitstreamPtr++; - tmpPtr=enc_bits->idxVec; - for (k=0; k<3; k++) { - (*bitstreamPtr) = 0; - for (i=15; i>=0; i--) { - (*bitstreamPtr) |= ((uint16_t)((*tmpPtr)&0x4)>>2)<6; i--) { - (*bitstreamPtr) |= ((uint16_t)((*tmpPtr)&0x4)>>2)<gain_index[1]&0x4)<<4; /* Bit 9 */ - (*bitstreamPtr) |= (enc_bits->gain_index[3]&0xC)<<2; /* Bit 10..11 */ - (*bitstreamPtr) |= (enc_bits->gain_index[4]&0x4)<<1; /* Bit 12 */ - (*bitstreamPtr) |= (enc_bits->gain_index[6]&0x8)>>1; /* Bit 13 */ - (*bitstreamPtr) |= (enc_bits->gain_index[7]&0xC)>>2; /* Bit 14..15 */ - - } else { /* mode==30 */ - /* 8:th int16_t */ - (*bitstreamPtr) = 0; - for (i=15; i>5; i--) { - (*bitstreamPtr) |= ((uint16_t)((*tmpPtr)&0x4)>>2)<cb_index[0]&0x6)<<3; /* Bit 10..11 */ - (*bitstreamPtr) |= (enc_bits->gain_index[0]&0x8); /* Bit 12 */ - (*bitstreamPtr) |= (enc_bits->gain_index[1]&0x4); /* Bit 13 */ - (*bitstreamPtr) |= (enc_bits->cb_index[3]&0x2); /* Bit 14 */ - (*bitstreamPtr) |= (enc_bits->cb_index[6]&0x80)>>7; /* Bit 15 */ - bitstreamPtr++; - /* 9:th int16_t */ - (*bitstreamPtr) = ((uint16_t)enc_bits->cb_index[6]&0x7E)<<9;/* Bit 0..5 */ - (*bitstreamPtr) |= (enc_bits->cb_index[9]&0xFE)<<2; /* Bit 6..12 */ - (*bitstreamPtr) |= (enc_bits->cb_index[12]&0xE0)>>5; /* Bit 13..15 */ - bitstreamPtr++; - /* 10:th int16_t */ - (*bitstreamPtr) = ((uint16_t)enc_bits->cb_index[12]&0x1E)<<11;/* Bit 0..3 */ - (*bitstreamPtr) |= (enc_bits->gain_index[3]&0xC)<<8; /* Bit 4..5 */ - (*bitstreamPtr) |= (enc_bits->gain_index[4]&0x6)<<7; /* Bit 6..7 */ - (*bitstreamPtr) |= (enc_bits->gain_index[6]&0x18)<<3; /* Bit 8..9 */ - (*bitstreamPtr) |= (enc_bits->gain_index[7]&0xC)<<2; /* Bit 10..11 */ - (*bitstreamPtr) |= (enc_bits->gain_index[9]&0x10)>>1; /* Bit 12 */ - (*bitstreamPtr) |= (enc_bits->gain_index[10]&0x8)>>1; /* Bit 13 */ - (*bitstreamPtr) |= (enc_bits->gain_index[12]&0x10)>>3; /* Bit 14 */ - (*bitstreamPtr) |= (enc_bits->gain_index[13]&0x8)>>3; /* Bit 15 */ - } - bitstreamPtr++; - /* Class 3 bits of ULP */ - /* 8:th to 14:th int16_t for 20 ms case - 11:th to 17:th int16_t for 30 ms case */ - tmpPtr=enc_bits->idxVec; - for (k=0; k<7; k++) { - (*bitstreamPtr) = 0; - for (i=14; i>=0; i-=2) { - (*bitstreamPtr) |= ((uint16_t)((*tmpPtr)&0x3))<idxVec[56])&0x3))<<14;/* Bit 0..1 */ - (*bitstreamPtr) |= (((enc_bits->cb_index[0])&1))<<13; /* Bit 2 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[1]))<<6; /* Bit 3..9 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[2])&0x7E)>>1; /* Bit 10..15 */ - bitstreamPtr++; - /* 16:th int16_t */ - (*bitstreamPtr) = ((uint16_t)((enc_bits->cb_index[2])&0x1))<<15; - /* Bit 0 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[0])&0x7)<<12; /* Bit 1..3 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[1])&0x3)<<10; /* Bit 4..5 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[2]))<<7; /* Bit 6..8 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[3])&0x1)<<6; /* Bit 9 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[4])&0x7E)>>1; /* Bit 10..15 */ - bitstreamPtr++; - /* 17:th int16_t */ - (*bitstreamPtr) = ((uint16_t)((enc_bits->cb_index[4])&0x1))<<15; - /* Bit 0 */ - (*bitstreamPtr) |= (enc_bits->cb_index[5])<<8; /* Bit 1..7 */ - (*bitstreamPtr) |= (enc_bits->cb_index[6]); /* Bit 8..15 */ - bitstreamPtr++; - /* 18:th int16_t */ - (*bitstreamPtr) = ((uint16_t)(enc_bits->cb_index[7]))<<8; /* Bit 0..7 */ - (*bitstreamPtr) |= (enc_bits->cb_index[8]); /* Bit 8..15 */ - bitstreamPtr++; - /* 19:th int16_t */ - (*bitstreamPtr) = ((uint16_t)((enc_bits->gain_index[3])&0x3))<<14; - /* Bit 0..1 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[4])&0x3)<<12; /* Bit 2..3 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[5]))<<9; /* Bit 4..6 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[6])&0x7)<<6; /* Bit 7..9 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[7])&0x3)<<4; /* Bit 10..11 */ - (*bitstreamPtr) |= (enc_bits->gain_index[8])<<1; /* Bit 12..14 */ - } else { /* mode==30 */ - /* 18:th int16_t */ - (*bitstreamPtr) = ((uint16_t)((enc_bits->idxVec[56])&0x3))<<14;/* Bit 0..1 */ - (*bitstreamPtr) |= (((enc_bits->idxVec[57])&0x3))<<12; /* Bit 2..3 */ - (*bitstreamPtr) |= (((enc_bits->cb_index[0])&1))<<11; /* Bit 4 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[1]))<<4; /* Bit 5..11 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[2])&0x78)>>3; /* Bit 12..15 */ - bitstreamPtr++; - /* 19:th int16_t */ - (*bitstreamPtr) = ((uint16_t)(enc_bits->cb_index[2])&0x7)<<13; - /* Bit 0..2 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[0])&0x7)<<10; /* Bit 3..5 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[1])&0x3)<<8; /* Bit 6..7 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[2])&0x7)<<5; /* Bit 8..10 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[3])&0x1)<<4; /* Bit 11 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[4])&0x78)>>3; /* Bit 12..15 */ - bitstreamPtr++; - /* 20:th int16_t */ - (*bitstreamPtr) = ((uint16_t)(enc_bits->cb_index[4])&0x7)<<13; - /* Bit 0..2 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[5]))<<6; /* Bit 3..9 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[6])&0x1)<<5; /* Bit 10 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[7])&0xF8)>>3; /* Bit 11..15 */ - bitstreamPtr++; - /* 21:st int16_t */ - (*bitstreamPtr) = ((uint16_t)(enc_bits->cb_index[7])&0x7)<<13; - /* Bit 0..2 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[8]))<<5; /* Bit 3..10 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[9])&0x1)<<4; /* Bit 11 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[10])&0xF0)>>4; /* Bit 12..15 */ - bitstreamPtr++; - /* 22:nd int16_t */ - (*bitstreamPtr) = ((uint16_t)(enc_bits->cb_index[10])&0xF)<<12; - /* Bit 0..3 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[11]))<<4; /* Bit 4..11 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[12])&0x1)<<3; /* Bit 12 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[13])&0xE0)>>5; /* Bit 13..15 */ - bitstreamPtr++; - /* 23:rd int16_t */ - (*bitstreamPtr) = ((uint16_t)(enc_bits->cb_index[13])&0x1F)<<11; - /* Bit 0..4 */ - (*bitstreamPtr) |= ((enc_bits->cb_index[14]))<<3; /* Bit 5..12 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[3])&0x3)<<1; /* Bit 13..14 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[4])&0x1); /* Bit 15 */ - bitstreamPtr++; - /* 24:rd int16_t */ - (*bitstreamPtr) = ((uint16_t)(enc_bits->gain_index[5]))<<13; - /* Bit 0..2 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[6])&0x7)<<10; /* Bit 3..5 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[7])&0x3)<<8; /* Bit 6..7 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[8]))<<5; /* Bit 8..10 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[9])&0xF)<<1; /* Bit 11..14 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[10])&0x4)>>2; /* Bit 15 */ - bitstreamPtr++; - /* 25:rd int16_t */ - (*bitstreamPtr) = ((uint16_t)(enc_bits->gain_index[10])&0x3)<<14; - /* Bit 0..1 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[11]))<<11; /* Bit 2..4 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[12])&0xF)<<7; /* Bit 5..8 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[13])&0x7)<<4; /* Bit 9..11 */ - (*bitstreamPtr) |= ((enc_bits->gain_index[14]))<<1; /* Bit 12..14 */ - } - /* Last bit is automatically zero */ - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/pack_bits.h b/modules/audio_coding/codecs/ilbc/pack_bits.h deleted file mode 100644 index d2ebeeeda9..0000000000 --- a/modules/audio_coding/codecs/ilbc/pack_bits.h +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_PackBits.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_PACK_BITS_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_PACK_BITS_H_ - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * unpacking of bits from bitstream, i.e., vector of bytes - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_PackBits( - uint16_t* bitstream, /* (o) The packetized bitstream */ - iLBC_bits* enc_bits, /* (i) Encoded bits */ - int16_t mode /* (i) Codec mode (20 or 30) */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/poly_to_lsf.c b/modules/audio_coding/codecs/ilbc/poly_to_lsf.c deleted file mode 100644 index 7192eaab49..0000000000 --- a/modules/audio_coding/codecs/ilbc/poly_to_lsf.c +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Poly2Lsf.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/poly_to_lsf.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/lsp_to_lsf.h" -#include "modules/audio_coding/codecs/ilbc/poly_to_lsp.h" - -void WebRtcIlbcfix_Poly2Lsf( - int16_t *lsf, /* (o) lsf coefficients (Q13) */ - int16_t *a /* (i) A coefficients (Q12) */ - ) { - int16_t lsp[10]; - WebRtcIlbcfix_Poly2Lsp(a, lsp, (int16_t*)WebRtcIlbcfix_kLspMean); - WebRtcIlbcfix_Lsp2Lsf(lsp, lsf, 10); -} diff --git a/modules/audio_coding/codecs/ilbc/poly_to_lsf.h b/modules/audio_coding/codecs/ilbc/poly_to_lsf.h deleted file mode 100644 index d10f84126e..0000000000 --- a/modules/audio_coding/codecs/ilbc/poly_to_lsf.h +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Poly2Lsf.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSF_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSF_H_ - -#include - -/*----------------------------------------------------------------* - * conversion from lpc coefficients to lsf coefficients - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Poly2Lsf(int16_t* lsf, /* (o) lsf coefficients (Q13) */ - int16_t* a /* (i) A coefficients (Q12) */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/poly_to_lsp.c b/modules/audio_coding/codecs/ilbc/poly_to_lsp.c deleted file mode 100644 index ad0ecd70ab..0000000000 --- a/modules/audio_coding/codecs/ilbc/poly_to_lsp.c +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Poly2Lsp.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/poly_to_lsp.h" - -#include "modules/audio_coding/codecs/ilbc/chebyshev.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" - -/*----------------------------------------------------------------* - * conversion from lpc coefficients to lsp coefficients - * function is only for 10:th order LPC - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Poly2Lsp( - int16_t *a, /* (o) A coefficients in Q12 */ - int16_t *lsp, /* (i) LSP coefficients in Q15 */ - int16_t *old_lsp /* (i) old LSP coefficients that are used if the new - coefficients turn out to be unstable */ - ) { - int16_t f[2][6]; /* f[0][] represents f1 and f[1][] represents f2 */ - int16_t *a_i_ptr, *a_10mi_ptr; - int16_t *f1ptr, *f2ptr; - int32_t tmpW32; - int16_t x, y, xlow, ylow, xmid, ymid, xhigh, yhigh, xint; - int16_t shifts, sign; - int i, j; - int foundFreqs; - int fi_select; - - /* - Calculate the two polynomials f1(z) and f2(z) - (the sum and the diff polynomial) - f1[0] = f2[0] = 1.0; - f1[i+1] = a[i+1] + a[10-i] - f1[i]; - f2[i+1] = a[i+1] - a[10-i] - f1[i]; - */ - - a_i_ptr = a + 1; - a_10mi_ptr = a + 10; - f1ptr = f[0]; - f2ptr = f[1]; - (*f1ptr) = 1024; /* 1.0 in Q10 */ - (*f2ptr) = 1024; /* 1.0 in Q10 */ - for (i = 0; i < 5; i++) { - *(f1ptr + 1) = - (int16_t)((((int32_t)(*a_i_ptr) + *a_10mi_ptr) >> 2) - *f1ptr); - *(f2ptr + 1) = - (int16_t)((((int32_t)(*a_i_ptr) - *a_10mi_ptr) >> 2) + *f2ptr); - a_i_ptr++; - a_10mi_ptr--; - f1ptr++; - f2ptr++; - } - - /* - find the LSPs using the Chebychev pol. evaluation - */ - - fi_select = 0; /* selector between f1 and f2, start with f1 */ - - foundFreqs = 0; - - xlow = WebRtcIlbcfix_kCosGrid[0]; - ylow = WebRtcIlbcfix_Chebyshev(xlow, f[fi_select]); - - /* - Iterate until all the 10 LSP's have been found or - all the grid points have been tried. If the 10 LSP's can - not be found, set the LSP vector to previous LSP - */ - - for (j = 1; j < COS_GRID_POINTS && foundFreqs < 10; j++) { - xhigh = xlow; - yhigh = ylow; - xlow = WebRtcIlbcfix_kCosGrid[j]; - ylow = WebRtcIlbcfix_Chebyshev(xlow, f[fi_select]); - - if (ylow * yhigh <= 0) { - /* Run 4 times to reduce the interval */ - for (i = 0; i < 4; i++) { - /* xmid =(xlow + xhigh)/2 */ - xmid = (xlow >> 1) + (xhigh >> 1); - ymid = WebRtcIlbcfix_Chebyshev(xmid, f[fi_select]); - - if (ylow * ymid <= 0) { - yhigh = ymid; - xhigh = xmid; - } else { - ylow = ymid; - xlow = xmid; - } - } - - /* - Calculater xint by linear interpolation: - xint = xlow - ylow*(xhigh-xlow)/(yhigh-ylow); - */ - - x = xhigh - xlow; - y = yhigh - ylow; - - if (y == 0) { - xint = xlow; - } else { - sign = y; - y = WEBRTC_SPL_ABS_W16(y); - shifts = (int16_t)WebRtcSpl_NormW32(y)-16; - y <<= shifts; - y = (int16_t)WebRtcSpl_DivW32W16(536838144, y); /* 1/(yhigh-ylow) */ - - tmpW32 = (x * y) >> (19 - shifts); - - /* y=(xhigh-xlow)/(yhigh-ylow) */ - y = (int16_t)(tmpW32&0xFFFF); - - if (sign < 0) { - y = -y; - } - /* tmpW32 = ylow*(xhigh-xlow)/(yhigh-ylow) */ - tmpW32 = (ylow * y) >> 10; - xint = xlow-(int16_t)(tmpW32&0xFFFF); - } - - /* Store the calculated lsp */ - lsp[foundFreqs] = (int16_t)xint; - foundFreqs++; - - /* if needed, set xlow and ylow for next recursion */ - if (foundFreqs<10) { - xlow = xint; - /* Swap between f1 and f2 (f[0][] and f[1][]) */ - fi_select = ((fi_select+1)&0x1); - - ylow = WebRtcIlbcfix_Chebyshev(xlow, f[fi_select]); - } - } - } - - /* Check if M roots found, if not then use the old LSP */ - if (foundFreqs < 10) { - WEBRTC_SPL_MEMCPY_W16(lsp, old_lsp, 10); - } - return; -} diff --git a/modules/audio_coding/codecs/ilbc/poly_to_lsp.h b/modules/audio_coding/codecs/ilbc/poly_to_lsp.h deleted file mode 100644 index d95173689a..0000000000 --- a/modules/audio_coding/codecs/ilbc/poly_to_lsp.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Poly2Lsp.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSP_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSP_H_ - -#include - -/*----------------------------------------------------------------* - * conversion from lpc coefficients to lsp coefficients - * function is only for 10:th order LPC - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Poly2Lsp( - int16_t* a, /* (o) A coefficients in Q12 */ - int16_t* lsp, /* (i) LSP coefficients in Q15 */ - int16_t* old_lsp /* (i) old LSP coefficients that are used if the new - coefficients turn out to be unstable */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/refiner.c b/modules/audio_coding/codecs/ilbc/refiner.c deleted file mode 100644 index 5bdab7a4b0..0000000000 --- a/modules/audio_coding/codecs/ilbc/refiner.c +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Refiner.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/refiner.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/enh_upsample.h" -#include "modules/audio_coding/codecs/ilbc/my_corr.h" - -/*----------------------------------------------------------------* - * find segment starting near idata+estSegPos that has highest - * correlation with idata+centerStartPos through - * idata+centerStartPos+ENH_BLOCKL-1 segment is found at a - * resolution of ENH_UPSO times the original of the original - * sampling rate - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Refiner( - size_t *updStartPos, /* (o) updated start point (Q-2) */ - int16_t *idata, /* (i) original data buffer */ - size_t idatal, /* (i) dimension of idata */ - size_t centerStartPos, /* (i) beginning center segment */ - size_t estSegPos, /* (i) estimated beginning other segment (Q-2) */ - int16_t *surround, /* (i/o) The contribution from this sequence - summed with earlier contributions */ - int16_t gain /* (i) Gain to use for this sequence */ - ){ - size_t estSegPosRounded, searchSegStartPos, searchSegEndPos, corrdim; - size_t tloc, tloc2, i; - - int32_t maxtemp, scalefact; - int16_t *filtStatePtr, *polyPtr; - /* Stack based */ - int16_t filt[7]; - int32_t corrVecUps[ENH_CORRDIM*ENH_UPS0]; - int32_t corrVecTemp[ENH_CORRDIM]; - int16_t vect[ENH_VECTL]; - int16_t corrVec[ENH_CORRDIM]; - - /* defining array bounds */ - - estSegPosRounded = (estSegPos - 2) >> 2; - - searchSegStartPos = - (estSegPosRounded < ENH_SLOP) ? 0 : (estSegPosRounded - ENH_SLOP); - - searchSegEndPos = estSegPosRounded + ENH_SLOP; - if ((searchSegEndPos + ENH_BLOCKL) >= idatal) { - searchSegEndPos = idatal - ENH_BLOCKL - 1; - } - - corrdim = searchSegEndPos + 1 - searchSegStartPos; - - /* compute upsampled correlation and find - location of max */ - - WebRtcIlbcfix_MyCorr(corrVecTemp, idata + searchSegStartPos, - corrdim + ENH_BLOCKL - 1, idata + centerStartPos, - ENH_BLOCKL); - - /* Calculate the rescaling factor for the correlation in order to - put the correlation in a int16_t vector instead */ - maxtemp = WebRtcSpl_MaxAbsValueW32(corrVecTemp, corrdim); - - scalefact = WebRtcSpl_GetSizeInBits(maxtemp) - 15; - - if (scalefact > 0) { - for (i = 0; i < corrdim; i++) { - corrVec[i] = (int16_t)(corrVecTemp[i] >> scalefact); - } - } else { - for (i = 0; i < corrdim; i++) { - corrVec[i] = (int16_t)corrVecTemp[i]; - } - } - /* In order to guarantee that all values are initialized */ - for (i = corrdim; i < ENH_CORRDIM; i++) { - corrVec[i] = 0; - } - - /* Upsample the correlation */ - WebRtcIlbcfix_EnhUpsample(corrVecUps, corrVec); - - /* Find maximum */ - tloc = WebRtcSpl_MaxIndexW32(corrVecUps, ENH_UPS0 * corrdim); - - /* make vector can be upsampled without ever running outside - bounds */ - *updStartPos = searchSegStartPos * 4 + tloc + 4; - - tloc2 = (tloc + 3) >> 2; - - /* initialize the vector to be filtered, stuff with zeros - when data is outside idata buffer */ - if (ENH_FL0 > (searchSegStartPos + tloc2)) { - const size_t st = ENH_FL0 - searchSegStartPos - tloc2; - WebRtcSpl_MemSetW16(vect, 0, st); - WEBRTC_SPL_MEMCPY_W16(&vect[st], idata, ENH_VECTL - st); - } else { - const size_t st = searchSegStartPos + tloc2 - ENH_FL0; - if ((st + ENH_VECTL) > idatal) { - const size_t en = st + ENH_VECTL - idatal; - WEBRTC_SPL_MEMCPY_W16(vect, &idata[st], ENH_VECTL - en); - WebRtcSpl_MemSetW16(&vect[ENH_VECTL - en], 0, en); - } else { - WEBRTC_SPL_MEMCPY_W16(vect, &idata[st], ENH_VECTL); - } - } - - /* compute the segment (this is actually a convolution) */ - filtStatePtr = filt + 6; - polyPtr = (int16_t*)WebRtcIlbcfix_kEnhPolyPhaser[tloc2 * ENH_UPS0 - tloc]; - for (i = 0; i < 7; i++) { - *filtStatePtr-- = *polyPtr++; - } - - WebRtcSpl_FilterMAFastQ12(&vect[6], vect, filt, ENH_FLO_MULT2_PLUS1, - ENH_BLOCKL); - - /* Add the contribution from this vector (scaled with gain) to the total - surround vector */ - WebRtcSpl_AddAffineVectorToVector(surround, vect, gain, 32768, 16, - ENH_BLOCKL); - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/refiner.h b/modules/audio_coding/codecs/ilbc/refiner.h deleted file mode 100644 index 29be89e35a..0000000000 --- a/modules/audio_coding/codecs/ilbc/refiner.h +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Refiner.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_REFINER_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_REFINER_H_ - -#include -#include - -/*----------------------------------------------------------------* - * find segment starting near idata+estSegPos that has highest - * correlation with idata+centerStartPos through - * idata+centerStartPos+ENH_BLOCKL-1 segment is found at a - * resolution of ENH_UPSO times the original of the original - * sampling rate - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Refiner( - size_t* updStartPos, /* (o) updated start point (Q-2) */ - int16_t* idata, /* (i) original data buffer */ - size_t idatal, /* (i) dimension of idata */ - size_t centerStartPos, /* (i) beginning center segment */ - size_t estSegPos, /* (i) estimated beginning other segment (Q-2) */ - int16_t* surround, /* (i/o) The contribution from this sequence - summed with earlier contributions */ - int16_t gain /* (i) Gain to use for this sequence */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c b/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c deleted file mode 100644 index 7343530a5e..0000000000 --- a/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_SimpleInterpolateLsf.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h" - -#include "modules/audio_coding/codecs/ilbc/bw_expand.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h" - -/*----------------------------------------------------------------* - * lsf interpolator (subrutine to LPCencode) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_SimpleInterpolateLsf( - int16_t *syntdenum, /* (o) the synthesis filter denominator - resulting from the quantized - interpolated lsf Q12 */ - int16_t *weightdenum, /* (o) the weighting filter denominator - resulting from the unquantized - interpolated lsf Q12 */ - int16_t *lsf, /* (i) the unquantized lsf coefficients Q13 */ - int16_t *lsfdeq, /* (i) the dequantized lsf coefficients Q13 */ - int16_t *lsfold, /* (i) the unquantized lsf coefficients of - the previous signal frame Q13 */ - int16_t *lsfdeqold, /* (i) the dequantized lsf coefficients of the - previous signal frame Q13 */ - int16_t length, /* (i) should equate FILTERORDER */ - IlbcEncoder *iLBCenc_inst - /* (i/o) the encoder state structure */ - ) { - size_t i; - int pos, lp_length; - - int16_t *lsf2, *lsfdeq2; - /* Stack based */ - int16_t lp[LPC_FILTERORDER + 1]; - - lsf2 = lsf + length; - lsfdeq2 = lsfdeq + length; - lp_length = length + 1; - - if (iLBCenc_inst->mode==30) { - /* subframe 1: Interpolation between old and first set of - lsf coefficients */ - - /* Calculate Analysis/Syntehsis filter from quantized LSF */ - WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfdeqold, lsfdeq, - WebRtcIlbcfix_kLsfWeight30ms[0], - length); - WEBRTC_SPL_MEMCPY_W16(syntdenum, lp, lp_length); - - /* Calculate Weighting filter from quantized LSF */ - WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfold, lsf, - WebRtcIlbcfix_kLsfWeight30ms[0], - length); - WebRtcIlbcfix_BwExpand(weightdenum, lp, - (int16_t*)WebRtcIlbcfix_kLpcChirpWeightDenum, - (int16_t)lp_length); - - /* subframe 2 to 6: Interpolation between first and second - set of lsf coefficients */ - - pos = lp_length; - for (i = 1; i < iLBCenc_inst->nsub; i++) { - - /* Calculate Analysis/Syntehsis filter from quantized LSF */ - WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfdeq, lsfdeq2, - WebRtcIlbcfix_kLsfWeight30ms[i], - length); - WEBRTC_SPL_MEMCPY_W16(syntdenum + pos, lp, lp_length); - - /* Calculate Weighting filter from quantized LSF */ - WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsf, lsf2, - WebRtcIlbcfix_kLsfWeight30ms[i], - length); - WebRtcIlbcfix_BwExpand(weightdenum + pos, lp, - (int16_t*)WebRtcIlbcfix_kLpcChirpWeightDenum, - (int16_t)lp_length); - - pos += lp_length; - } - - /* update memory */ - - WEBRTC_SPL_MEMCPY_W16(lsfold, lsf2, length); - WEBRTC_SPL_MEMCPY_W16(lsfdeqold, lsfdeq2, length); - - } else { /* iLBCenc_inst->mode==20 */ - pos = 0; - for (i = 0; i < iLBCenc_inst->nsub; i++) { - - /* Calculate Analysis/Syntehsis filter from quantized LSF */ - WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfdeqold, lsfdeq, - WebRtcIlbcfix_kLsfWeight20ms[i], - length); - WEBRTC_SPL_MEMCPY_W16(syntdenum + pos, lp, lp_length); - - /* Calculate Weighting filter from quantized LSF */ - WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfold, lsf, - WebRtcIlbcfix_kLsfWeight20ms[i], - length); - WebRtcIlbcfix_BwExpand(weightdenum+pos, lp, - (int16_t*)WebRtcIlbcfix_kLpcChirpWeightDenum, - (int16_t)lp_length); - - pos += lp_length; - } - - /* update memory */ - - WEBRTC_SPL_MEMCPY_W16(lsfold, lsf, length); - WEBRTC_SPL_MEMCPY_W16(lsfdeqold, lsfdeq, length); - - } - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h b/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h deleted file mode 100644 index 7e7e10e62a..0000000000 --- a/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_SimpleInterpolateLsf.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_INTERPOLATE_LSF_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_INTERPOLATE_LSF_H_ - -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * lsf interpolator (subrutine to LPCencode) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_SimpleInterpolateLsf( - int16_t* syntdenum, /* (o) the synthesis filter denominator - resulting from the quantized - interpolated lsf Q12 */ - int16_t* weightdenum, /* (o) the weighting filter denominator - resulting from the unquantized - interpolated lsf Q12 */ - int16_t* lsf, /* (i) the unquantized lsf coefficients Q13 */ - int16_t* lsfdeq, /* (i) the dequantized lsf coefficients Q13 */ - int16_t* lsfold, /* (i) the unquantized lsf coefficients of - the previous signal frame Q13 */ - int16_t* lsfdeqold, /* (i) the dequantized lsf coefficients of the - previous signal frame Q13 */ - int16_t length, /* (i) should equate FILTERORDER */ - IlbcEncoder* iLBCenc_inst - /* (i/o) the encoder state structure */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c b/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c deleted file mode 100644 index fdc4553d95..0000000000 --- a/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_SimpleLpcAnalysis.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h" - -#include "modules/audio_coding/codecs/ilbc/bw_expand.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/poly_to_lsf.h" -#include "modules/audio_coding/codecs/ilbc/window32_w32.h" - -/*----------------------------------------------------------------* - * lpc analysis (subrutine to LPCencode) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_SimpleLpcAnalysis( - int16_t *lsf, /* (o) lsf coefficients */ - int16_t *data, /* (i) new block of speech */ - IlbcEncoder *iLBCenc_inst - /* (i/o) the encoder state structure */ - ) { - int k; - int scale; - size_t is; - int16_t stability; - /* Stack based */ - int16_t A[LPC_FILTERORDER + 1]; - int32_t R[LPC_FILTERORDER + 1]; - int16_t windowedData[BLOCKL_MAX]; - int16_t rc[LPC_FILTERORDER]; - - is=LPC_LOOKBACK+BLOCKL_MAX-iLBCenc_inst->blockl; - WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->lpc_buffer+is,data,iLBCenc_inst->blockl); - - /* No lookahead, last window is asymmetric */ - - for (k = 0; k < iLBCenc_inst->lpc_n; k++) { - - is = LPC_LOOKBACK; - - if (k < (iLBCenc_inst->lpc_n - 1)) { - - /* Hanning table WebRtcIlbcfix_kLpcWin[] is in Q15-domain so the output is right-shifted 15 */ - WebRtcSpl_ElementwiseVectorMult(windowedData, iLBCenc_inst->lpc_buffer, WebRtcIlbcfix_kLpcWin, BLOCKL_MAX, 15); - } else { - - /* Hanning table WebRtcIlbcfix_kLpcAsymWin[] is in Q15-domain so the output is right-shifted 15 */ - WebRtcSpl_ElementwiseVectorMult(windowedData, iLBCenc_inst->lpc_buffer+is, WebRtcIlbcfix_kLpcAsymWin, BLOCKL_MAX, 15); - } - - /* Compute autocorrelation */ - WebRtcSpl_AutoCorrelation(windowedData, BLOCKL_MAX, LPC_FILTERORDER, R, &scale); - - /* Window autocorrelation vector */ - WebRtcIlbcfix_Window32W32(R, R, WebRtcIlbcfix_kLpcLagWin, LPC_FILTERORDER + 1 ); - - /* Calculate the A coefficients from the Autocorrelation using Levinson Durbin algorithm */ - stability=WebRtcSpl_LevinsonDurbin(R, A, rc, LPC_FILTERORDER); - - /* - Set the filter to {1.0, 0.0, 0.0,...} if filter from Levinson Durbin algorithm is unstable - This should basically never happen... - */ - if (stability!=1) { - A[0]=4096; - WebRtcSpl_MemSetW16(&A[1], 0, LPC_FILTERORDER); - } - - /* Bandwidth expand the filter coefficients */ - WebRtcIlbcfix_BwExpand(A, A, (int16_t*)WebRtcIlbcfix_kLpcChirpSyntDenum, LPC_FILTERORDER+1); - - /* Convert from A to LSF representation */ - WebRtcIlbcfix_Poly2Lsf(lsf + k*LPC_FILTERORDER, A); - } - - is=LPC_LOOKBACK+BLOCKL_MAX-iLBCenc_inst->blockl; - WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->lpc_buffer, - iLBCenc_inst->lpc_buffer+LPC_LOOKBACK+BLOCKL_MAX-is, is); - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h b/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h deleted file mode 100644 index 90e0c4a3ba..0000000000 --- a/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_SimpleLpcAnalysis.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LPC_ANALYSIS_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LPC_ANALYSIS_H_ - -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * lpc analysis (subrutine to LPCencode) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_SimpleLpcAnalysis( - int16_t* lsf, /* (o) lsf coefficients */ - int16_t* data, /* (i) new block of speech */ - IlbcEncoder* iLBCenc_inst - /* (i/o) the encoder state structure */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c b/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c deleted file mode 100644 index e7494ceb59..0000000000 --- a/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_SimpleLsfDeQ.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * obtain dequantized lsf coefficients from quantization index - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_SimpleLsfDeQ( - int16_t *lsfdeq, /* (o) dequantized lsf coefficients */ - int16_t *index, /* (i) quantization index */ - int16_t lpc_n /* (i) number of LPCs */ - ){ - int i, j, pos, cb_pos; - - /* decode first LSF */ - - pos = 0; - cb_pos = 0; - for (i = 0; i < LSF_NSPLIT; i++) { - for (j = 0; j < WebRtcIlbcfix_kLsfDimCb[i]; j++) { - lsfdeq[pos + j] = WebRtcIlbcfix_kLsfCb[cb_pos + j + index[i] * - WebRtcIlbcfix_kLsfDimCb[i]]; - } - pos += WebRtcIlbcfix_kLsfDimCb[i]; - cb_pos += WebRtcIlbcfix_kLsfSizeCb[i] * WebRtcIlbcfix_kLsfDimCb[i]; - } - - if (lpc_n>1) { - /* decode last LSF */ - pos = 0; - cb_pos = 0; - for (i = 0; i < LSF_NSPLIT; i++) { - for (j = 0; j < WebRtcIlbcfix_kLsfDimCb[i]; j++) { - lsfdeq[LPC_FILTERORDER + pos + j] = WebRtcIlbcfix_kLsfCb[ - cb_pos + index[LSF_NSPLIT + i] * WebRtcIlbcfix_kLsfDimCb[i] + j]; - } - pos += WebRtcIlbcfix_kLsfDimCb[i]; - cb_pos += WebRtcIlbcfix_kLsfSizeCb[i] * WebRtcIlbcfix_kLsfDimCb[i]; - } - } - return; -} diff --git a/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h b/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h deleted file mode 100644 index 00b126af7e..0000000000 --- a/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_SimpleLsfDeQ.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_DEQUANT_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_DEQUANT_H_ - -#include - -/*----------------------------------------------------------------* - * obtain dequantized lsf coefficients from quantization index - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_SimpleLsfDeQ( - int16_t* lsfdeq, /* (o) dequantized lsf coefficients */ - int16_t* index, /* (i) quantization index */ - int16_t lpc_n /* (i) number of LPCs */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c b/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c deleted file mode 100644 index 1291d1442e..0000000000 --- a/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_SimpleLsfQ.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/simple_lsf_quant.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/split_vq.h" - -/*----------------------------------------------------------------* - * lsf quantizer (subrutine to LPCencode) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_SimpleLsfQ( - int16_t *lsfdeq, /* (o) dequantized lsf coefficients - (dimension FILTERORDER) Q13 */ - int16_t *index, /* (o) quantization index */ - int16_t *lsf, /* (i) the lsf coefficient vector to be - quantized (dimension FILTERORDER) Q13 */ - int16_t lpc_n /* (i) number of lsf sets to quantize */ - ){ - - /* Quantize first LSF with memoryless split VQ */ - WebRtcIlbcfix_SplitVq( lsfdeq, index, lsf, - (int16_t*)WebRtcIlbcfix_kLsfCb, (int16_t*)WebRtcIlbcfix_kLsfDimCb, (int16_t*)WebRtcIlbcfix_kLsfSizeCb); - - if (lpc_n==2) { - /* Quantize second LSF with memoryless split VQ */ - WebRtcIlbcfix_SplitVq( lsfdeq + LPC_FILTERORDER, index + LSF_NSPLIT, - lsf + LPC_FILTERORDER, (int16_t*)WebRtcIlbcfix_kLsfCb, - (int16_t*)WebRtcIlbcfix_kLsfDimCb, (int16_t*)WebRtcIlbcfix_kLsfSizeCb); - } - return; -} diff --git a/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h b/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h deleted file mode 100644 index 38dcdfa59d..0000000000 --- a/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_SimpleLsfQ.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_QUANT_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_QUANT_H_ - -#include - -/*----------------------------------------------------------------* - * lsf quantizer (subrutine to LPCencode) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_SimpleLsfQ( - int16_t* lsfdeq, /* (o) dequantized lsf coefficients - (dimension FILTERORDER) Q13 */ - int16_t* index, /* (o) quantization index */ - int16_t* lsf, /* (i) the lsf coefficient vector to be - quantized (dimension FILTERORDER) Q13 */ - int16_t lpc_n /* (i) number of lsf sets to quantize */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/smooth.c b/modules/audio_coding/codecs/ilbc/smooth.c deleted file mode 100644 index 631b2f432a..0000000000 --- a/modules/audio_coding/codecs/ilbc/smooth.c +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Smooth.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/smooth.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/smooth_out_data.h" - -/*----------------------------------------------------------------* - * find the smoothed output data - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Smooth( - int16_t *odata, /* (o) smoothed output */ - int16_t *current, /* (i) the un enhanced residual for - this block */ - int16_t *surround /* (i) The approximation from the - surrounding sequences */ - ) { - int16_t scale, scale1, scale2; - int16_t A, B, C, denomW16; - int32_t B_W32, denom, num; - int32_t errs; - int32_t w00,w10,w11, endiff, crit; - int32_t w00prim, w10prim, w11_div_w00; - int16_t w11prim; - int16_t bitsw00, bitsw10, bitsw11; - int32_t w11w00, w10w10, w00w00; - uint32_t max1, max2, max12; - - /* compute some inner products (ensure no overflow by first calculating proper scale factor) */ - - w00 = w10 = w11 = 0; - - // Calculate a right shift that will let us sum ENH_BLOCKL pairwise products - // of values from the two sequences without overflowing an int32_t. (The +1 - // in max1 and max2 are because WebRtcSpl_MaxAbsValueW16 will return 2**15 - - // 1 if the input array contains -2**15.) - max1 = WebRtcSpl_MaxAbsValueW16(current, ENH_BLOCKL) + 1; - max2 = WebRtcSpl_MaxAbsValueW16(surround, ENH_BLOCKL) + 1; - max12 = WEBRTC_SPL_MAX(max1, max2); - scale = (64 - 31) - - WebRtcSpl_CountLeadingZeros64((max12 * max12) * (uint64_t)ENH_BLOCKL); - scale=WEBRTC_SPL_MAX(0, scale); - - w00=WebRtcSpl_DotProductWithScale(current,current,ENH_BLOCKL,scale); - w11=WebRtcSpl_DotProductWithScale(surround,surround,ENH_BLOCKL,scale); - w10=WebRtcSpl_DotProductWithScale(surround,current,ENH_BLOCKL,scale); - - if (w00<0) w00 = WEBRTC_SPL_WORD32_MAX; - if (w11<0) w11 = WEBRTC_SPL_WORD32_MAX; - - /* Rescale w00 and w11 to w00prim and w11prim, so that w00prim/w11prim - is in Q16 */ - - bitsw00 = WebRtcSpl_GetSizeInBits(w00); - bitsw11 = WebRtcSpl_GetSizeInBits(w11); - bitsw10 = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_ABS_W32(w10)); - scale1 = 31 - bitsw00; - scale2 = 15 - bitsw11; - - if (scale2>(scale1-16)) { - scale2 = scale1 - 16; - } else { - scale1 = scale2 + 16; - } - - w00prim = w00 << scale1; - w11prim = (int16_t) WEBRTC_SPL_SHIFT_W32(w11, scale2); - - /* Perform C = sqrt(w11/w00) (C is in Q11 since (16+6)/2=11) */ - if (w11prim>64) { - endiff = WebRtcSpl_DivW32W16(w00prim, w11prim) << 6; - C = (int16_t)WebRtcSpl_SqrtFloor(endiff); /* C is in Q11 */ - } else { - C = 1; - } - - /* first try enhancement without power-constraint */ - - errs = WebRtcIlbcfix_Smooth_odata(odata, current, surround, C); - - - - /* if constraint violated by first try, add constraint */ - - if ( (6-scale+scale1) > 31) { - crit=0; - } else { - /* crit = 0.05 * w00 (Result in Q-6) */ - crit = WEBRTC_SPL_SHIFT_W32( - WEBRTC_SPL_MUL(ENH_A0, w00prim >> 14), - -(6-scale+scale1)); - } - - if (errs > crit) { - - if( w00 < 1) { - w00=1; - } - - /* Calculate w11*w00, w10*w10 and w00*w00 in the same Q domain */ - - scale1 = bitsw00-15; - scale2 = bitsw11-15; - - if (scale2>scale1) { - scale = scale2; - } else { - scale = scale1; - } - - w11w00 = (int16_t)WEBRTC_SPL_SHIFT_W32(w11, -scale) * - (int16_t)WEBRTC_SPL_SHIFT_W32(w00, -scale); - - w10w10 = (int16_t)WEBRTC_SPL_SHIFT_W32(w10, -scale) * - (int16_t)WEBRTC_SPL_SHIFT_W32(w10, -scale); - - w00w00 = (int16_t)WEBRTC_SPL_SHIFT_W32(w00, -scale) * - (int16_t)WEBRTC_SPL_SHIFT_W32(w00, -scale); - - /* Calculate (w11*w00-w10*w10)/(w00*w00) in Q16 */ - if (w00w00>65536) { - endiff = (w11w00-w10w10); - endiff = WEBRTC_SPL_MAX(0, endiff); - /* denom is in Q16 */ - denom = WebRtcSpl_DivW32W16(endiff, (int16_t)(w00w00 >> 16)); - } else { - denom = 65536; - } - - if( denom > 7){ /* eliminates numerical problems - for if smooth */ - - scale=WebRtcSpl_GetSizeInBits(denom)-15; - - if (scale>0) { - /* denomW16 is in Q(16+scale) */ - denomW16 = (int16_t)(denom >> scale); - - /* num in Q(34-scale) */ - num = ENH_A0_MINUS_A0A0DIV4 >> scale; - } else { - /* denomW16 is in Q16 */ - denomW16=(int16_t)denom; - - /* num in Q34 */ - num=ENH_A0_MINUS_A0A0DIV4; - } - - /* A sqrt( (ENH_A0-(ENH_A0^2)/4)*(w00*w00)/(w11*w00 + w10*w10) ) in Q9 */ - A = (int16_t)WebRtcSpl_SqrtFloor(WebRtcSpl_DivW32W16(num, denomW16)); - - /* B_W32 is in Q30 ( B = 1 - ENH_A0/2 - A * w10/w00 ) */ - scale1 = 31-bitsw10; - scale2 = 21-scale1; - w10prim = w10 == 0 ? 0 : w10 * (1 << scale1); - w00prim = WEBRTC_SPL_SHIFT_W32(w00, -scale2); - scale = bitsw00-scale2-15; - - if (scale>0) { - w10prim >>= scale; - w00prim >>= scale; - } - - if ((w00prim>0)&&(w10prim>0)) { - w11_div_w00=WebRtcSpl_DivW32W16(w10prim, (int16_t)w00prim); - - if (WebRtcSpl_GetSizeInBits(w11_div_w00)+WebRtcSpl_GetSizeInBits(A)>31) { - B_W32 = 0; - } else { - B_W32 = (int32_t)1073741824 - (int32_t)ENH_A0DIV2 - - WEBRTC_SPL_MUL(A, w11_div_w00); - } - B = (int16_t)(B_W32 >> 16); /* B in Q14. */ - } else { - /* No smoothing */ - A = 0; - B = 16384; /* 1 in Q14 */ - } - } - else{ /* essentially no difference between cycles; - smoothing not needed */ - - A = 0; - B = 16384; /* 1 in Q14 */ - } - - /* create smoothed sequence */ - - WebRtcSpl_ScaleAndAddVectors(surround, A, 9, - current, B, 14, - odata, ENH_BLOCKL); - } - return; -} diff --git a/modules/audio_coding/codecs/ilbc/smooth.h b/modules/audio_coding/codecs/ilbc/smooth.h deleted file mode 100644 index 12da5cdea5..0000000000 --- a/modules/audio_coding/codecs/ilbc/smooth.h +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Smooth.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_H_ - -#include - -/*----------------------------------------------------------------* - * find the smoothed output data - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Smooth(int16_t* odata, /* (o) smoothed output */ - int16_t* current, /* (i) the un enhanced residual for - this block */ - int16_t* surround /* (i) The approximation from the - surrounding sequences */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/smooth_out_data.c b/modules/audio_coding/codecs/ilbc/smooth_out_data.c deleted file mode 100644 index 9f952bfb93..0000000000 --- a/modules/audio_coding/codecs/ilbc/smooth_out_data.c +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Smooth_odata.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/smooth_out_data.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "rtc_base/sanitizer.h" - -// An s32 + s32 -> s32 addition that's allowed to overflow. (It's still -// undefined behavior, so not a good idea; this just makes UBSan ignore the -// violation, so that our old code can continue to do what it's always been -// doing.) -static inline int32_t RTC_NO_SANITIZE("signed-integer-overflow") - OverflowingAdd_S32_S32_To_S32(int32_t a, int32_t b) { - return a + b; -} - -int32_t WebRtcIlbcfix_Smooth_odata( - int16_t *odata, - int16_t *psseq, - int16_t *surround, - int16_t C) -{ - int i; - - int16_t err; - int32_t errs; - - for(i=0;i<80;i++) { - odata[i]= (int16_t)((C * surround[i] + 1024) >> 11); - } - - errs=0; - for(i=0;i<80;i++) { - err = (psseq[i] - odata[i]) >> 3; - errs = OverflowingAdd_S32_S32_To_S32(errs, err * err); // errs in Q-6 - } - - return errs; -} diff --git a/modules/audio_coding/codecs/ilbc/smooth_out_data.h b/modules/audio_coding/codecs/ilbc/smooth_out_data.h deleted file mode 100644 index 318e7b04a2..0000000000 --- a/modules/audio_coding/codecs/ilbc/smooth_out_data.h +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Smooth_odata.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_OUT_DATA_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_OUT_DATA_H_ - -#include - -/*----------------------------------------------------------------* - * help function to WebRtcIlbcfix_Smooth() - *---------------------------------------------------------------*/ - -int32_t WebRtcIlbcfix_Smooth_odata(int16_t* odata, - int16_t* psseq, - int16_t* surround, - int16_t C); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/sort_sq.c b/modules/audio_coding/codecs/ilbc/sort_sq.c deleted file mode 100644 index c3a24750f0..0000000000 --- a/modules/audio_coding/codecs/ilbc/sort_sq.c +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_SortSq.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/sort_sq.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * scalar quantization - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_SortSq( - int16_t *xq, /* (o) the quantized value */ - int16_t *index, /* (o) the quantization index */ - int16_t x, /* (i) the value to quantize */ - const int16_t *cb, /* (i) the quantization codebook */ - int16_t cb_size /* (i) the size of the quantization codebook */ - ){ - int i; - - if (x <= cb[0]) { - *index = 0; - *xq = cb[0]; - } else { - i = 0; - while ((x > cb[i]) && (i < (cb_size-1))) { - i++; - } - - if (x > (((int32_t)cb[i] + cb[i - 1] + 1) >> 1)) { - *index = i; - *xq = cb[i]; - } else { - *index = i - 1; - *xq = cb[i - 1]; - } - } -} diff --git a/modules/audio_coding/codecs/ilbc/sort_sq.h b/modules/audio_coding/codecs/ilbc/sort_sq.h deleted file mode 100644 index a40661fb80..0000000000 --- a/modules/audio_coding/codecs/ilbc/sort_sq.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_SortSq.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SORT_SQ_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SORT_SQ_H_ - -#include - -/*----------------------------------------------------------------* - * scalar quantization - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_SortSq( - int16_t* xq, /* (o) the quantized value */ - int16_t* index, /* (o) the quantization index */ - int16_t x, /* (i) the value to quantize */ - const int16_t* cb, /* (i) the quantization codebook */ - int16_t cb_size /* (i) the size of the quantization codebook */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/split_vq.c b/modules/audio_coding/codecs/ilbc/split_vq.c deleted file mode 100644 index c1f04d2287..0000000000 --- a/modules/audio_coding/codecs/ilbc/split_vq.c +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_SplitVq.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/split_vq.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/vq3.h" -#include "modules/audio_coding/codecs/ilbc/vq4.h" - -/*----------------------------------------------------------------* - * split vector quantization - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_SplitVq( - int16_t *qX, /* (o) the quantized vector in Q13 */ - int16_t *index, /* (o) a vector of indexes for all vector - codebooks in the split */ - int16_t *X, /* (i) the vector to quantize */ - int16_t *CB, /* (i) the quantizer codebook in Q13 */ - int16_t *dim, /* (i) the dimension of X and qX */ - int16_t *cbsize /* (i) the number of vectors in the codebook */ - ) { - - int16_t *qXPtr, *indexPtr, *CBPtr, *XPtr; - - /* Quantize X with the 3 vectror quantization tables */ - - qXPtr=qX; - indexPtr=index; - CBPtr=CB; - XPtr=X; - WebRtcIlbcfix_Vq3(qXPtr, indexPtr, CBPtr, XPtr, cbsize[0]); - - qXPtr+=3; - indexPtr+=1; - CBPtr+=(dim[0]*cbsize[0]); - XPtr+=3; - WebRtcIlbcfix_Vq3(qXPtr, indexPtr, CBPtr, XPtr, cbsize[1]); - - qXPtr+=3; - indexPtr+=1; - CBPtr+=(dim[1]*cbsize[1]); - XPtr+=3; - WebRtcIlbcfix_Vq4(qXPtr, indexPtr, CBPtr, XPtr, cbsize[2]); - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/split_vq.h b/modules/audio_coding/codecs/ilbc/split_vq.h deleted file mode 100644 index 79d3cd12ee..0000000000 --- a/modules/audio_coding/codecs/ilbc/split_vq.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_SplitVq.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SPLIT_VQ_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SPLIT_VQ_H_ - -#include - -/*----------------------------------------------------------------* - * split vector quantization - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_SplitVq( - int16_t* qX, /* (o) the quantized vector in Q13 */ - int16_t* index, /* (o) a vector of indexes for all vector - codebooks in the split */ - int16_t* X, /* (i) the vector to quantize */ - int16_t* CB, /* (i) the quantizer codebook in Q13 */ - int16_t* dim, /* (i) the dimension of X and qX */ - int16_t* cbsize /* (i) the number of vectors in the codebook */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/state_construct.c b/modules/audio_coding/codecs/ilbc/state_construct.c deleted file mode 100644 index c58086c03b..0000000000 --- a/modules/audio_coding/codecs/ilbc/state_construct.c +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_StateConstruct.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/state_construct.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * decoding of the start state - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_StateConstruct( - size_t idxForMax, /* (i) 6-bit index for the quantization of - max amplitude */ - int16_t *idxVec, /* (i) vector of quantization indexes */ - int16_t *syntDenum, /* (i) synthesis filter denumerator */ - int16_t *Out_fix, /* (o) the decoded state vector */ - size_t len /* (i) length of a state vector */ - ) { - size_t k; - int16_t maxVal; - int16_t *tmp1, *tmp2, *tmp3; - /* Stack based */ - int16_t numerator[1+LPC_FILTERORDER]; - int16_t sampleValVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER]; - int16_t sampleMaVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER]; - int16_t *sampleVal = &sampleValVec[LPC_FILTERORDER]; - int16_t *sampleMa = &sampleMaVec[LPC_FILTERORDER]; - int16_t *sampleAr = &sampleValVec[LPC_FILTERORDER]; - - /* initialization of coefficients */ - - for (k=0; k> - 22); - tmp1++; - tmp2--; - } - } else if (idxForMax<59) { - for(k=0; k> - 19); - tmp1++; - tmp2--; - } - } else { - for(k=0; k> - 17); - tmp1++; - tmp2--; - } - } - - /* Set the rest of the data to zero */ - WebRtcSpl_MemSetW16(&sampleVal[len], 0, len); - - /* circular convolution with all-pass filter */ - - /* Set the state to zero */ - WebRtcSpl_MemSetW16(sampleValVec, 0, (LPC_FILTERORDER)); - - /* Run MA filter + AR filter */ - WebRtcSpl_FilterMAFastQ12( - sampleVal, sampleMa, - numerator, LPC_FILTERORDER+1, len + LPC_FILTERORDER); - WebRtcSpl_MemSetW16(&sampleMa[len + LPC_FILTERORDER], 0, (len - LPC_FILTERORDER)); - WebRtcSpl_FilterARFastQ12( - sampleMa, sampleAr, - syntDenum, LPC_FILTERORDER+1, 2 * len); - - tmp1 = &sampleAr[len-1]; - tmp2 = &sampleAr[2*len-1]; - tmp3 = Out_fix; - for(k=0;k -#include - -/*----------------------------------------------------------------* - * Generate the start state from the quantized indexes - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_StateConstruct( - size_t idxForMax, /* (i) 6-bit index for the quantization of - max amplitude */ - int16_t* idxVec, /* (i) vector of quantization indexes */ - int16_t* syntDenum, /* (i) synthesis filter denumerator */ - int16_t* Out_fix, /* (o) the decoded state vector */ - size_t len /* (i) length of a state vector */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/state_search.c b/modules/audio_coding/codecs/ilbc/state_search.c deleted file mode 100644 index 7227ac9d45..0000000000 --- a/modules/audio_coding/codecs/ilbc/state_search.c +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_StateSearch.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/state_search.h" - -#include "modules/audio_coding/codecs/ilbc/abs_quant.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * encoding of start state - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_StateSearch( - IlbcEncoder *iLBCenc_inst, - /* (i) Encoder instance */ - iLBC_bits *iLBC_encbits,/* (i/o) Encoded bits (output idxForMax - and idxVec, input state_first) */ - int16_t *residual, /* (i) target residual vector */ - int16_t *syntDenum, /* (i) lpc synthesis filter */ - int16_t *weightDenum /* (i) weighting filter denuminator */ - ) { - size_t k, index; - int16_t maxVal; - int16_t scale, shift; - int32_t maxValsq; - int16_t scaleRes; - int16_t max; - int i; - /* Stack based */ - int16_t numerator[1+LPC_FILTERORDER]; - int16_t residualLongVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER]; - int16_t sampleMa[2*STATE_SHORT_LEN_30MS]; - int16_t *residualLong = &residualLongVec[LPC_FILTERORDER]; - int16_t *sampleAr = residualLong; - - /* Scale to maximum 12 bits to avoid saturation in circular convolution filter */ - max = WebRtcSpl_MaxAbsValueW16(residual, iLBCenc_inst->state_short_len); - scaleRes = WebRtcSpl_GetSizeInBits(max)-12; - scaleRes = WEBRTC_SPL_MAX(0, scaleRes); - /* Set up the filter coefficients for the circular convolution */ - for (i=0; i>scaleRes); - } - - /* Copy the residual to a temporary buffer that we can filter - * and set the remaining samples to zero. - */ - WEBRTC_SPL_MEMCPY_W16(residualLong, residual, iLBCenc_inst->state_short_len); - WebRtcSpl_MemSetW16(residualLong + iLBCenc_inst->state_short_len, 0, iLBCenc_inst->state_short_len); - - /* Run the Zero-Pole filter (Ciurcular convolution) */ - WebRtcSpl_MemSetW16(residualLongVec, 0, LPC_FILTERORDER); - WebRtcSpl_FilterMAFastQ12(residualLong, sampleMa, numerator, - LPC_FILTERORDER + 1, - iLBCenc_inst->state_short_len + LPC_FILTERORDER); - WebRtcSpl_MemSetW16(&sampleMa[iLBCenc_inst->state_short_len + LPC_FILTERORDER], 0, iLBCenc_inst->state_short_len - LPC_FILTERORDER); - - WebRtcSpl_FilterARFastQ12( - sampleMa, sampleAr, - syntDenum, LPC_FILTERORDER+1, 2 * iLBCenc_inst->state_short_len); - - for(k=0;kstate_short_len;k++){ - sampleAr[k] += sampleAr[k+iLBCenc_inst->state_short_len]; - } - - /* Find maximum absolute value in the vector */ - maxVal=WebRtcSpl_MaxAbsValueW16(sampleAr, iLBCenc_inst->state_short_len); - - /* Find the best index */ - - if ((((int32_t)maxVal)<=WebRtcIlbcfix_kChooseFrgQuant[i]) { - index=i+1; - } else { - i=63; - } - } - iLBC_encbits->idxForMax=index; - - /* Rescale the vector before quantization */ - scale=WebRtcIlbcfix_kScale[index]; - - if (index<27) { /* scale table is in Q16, fout[] is in Q(-1) and we want the result to be in Q11 */ - shift=4; - } else { /* scale table is in Q21, fout[] is in Q(-1) and we want the result to be in Q11 */ - shift=9; - } - - /* Set up vectors for AbsQuant and rescale it with the scale factor */ - WebRtcSpl_ScaleVectorWithSat(sampleAr, sampleAr, scale, - iLBCenc_inst->state_short_len, (int16_t)(shift-scaleRes)); - - /* Quantize the values in fout[] */ - WebRtcIlbcfix_AbsQuant(iLBCenc_inst, iLBC_encbits, sampleAr, weightDenum); - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/state_search.h b/modules/audio_coding/codecs/ilbc/state_search.h deleted file mode 100644 index 7a215e43d3..0000000000 --- a/modules/audio_coding/codecs/ilbc/state_search.h +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_StateSearch.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_STATE_SEARCH_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_STATE_SEARCH_H_ - -#include -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * encoding of start state - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_StateSearch( - IlbcEncoder* iLBCenc_inst, - /* (i) Encoder instance */ - iLBC_bits* iLBC_encbits, /* (i/o) Encoded bits (output idxForMax - and idxVec, input state_first) */ - int16_t* residual, /* (i) target residual vector */ - int16_t* syntDenum, /* (i) lpc synthesis filter */ - int16_t* weightDenum /* (i) weighting filter denuminator */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/swap_bytes.c b/modules/audio_coding/codecs/ilbc/swap_bytes.c deleted file mode 100644 index bbafc1a2ed..0000000000 --- a/modules/audio_coding/codecs/ilbc/swap_bytes.c +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_SwapBytes.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/swap_bytes.h" - -/*----------------------------------------------------------------* - * Swap bytes (to simplify operations on Little Endian machines) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_SwapBytes( - const uint16_t* input, /* (i) the sequence to swap */ - size_t wordLength, /* (i) number or uint16_t to swap */ - uint16_t* output /* (o) the swapped sequence */ - ) { - size_t k; - for (k = wordLength; k > 0; k--) { - *output++ = (*input >> 8)|(*input << 8); - input++; - } -} diff --git a/modules/audio_coding/codecs/ilbc/swap_bytes.h b/modules/audio_coding/codecs/ilbc/swap_bytes.h deleted file mode 100644 index 2e517743ce..0000000000 --- a/modules/audio_coding/codecs/ilbc/swap_bytes.h +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_SwapBytes.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SWAP_BYTES_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SWAP_BYTES_H_ - -#include -#include - -/*----------------------------------------------------------------* - * Swap bytes (to simplify operations on Little Endian machines) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_SwapBytes( - const uint16_t* input, /* (i) the sequence to swap */ - size_t wordLength, /* (i) number or uint16_t to swap */ - uint16_t* output /* (o) the swapped sequence */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/test/empty.cc b/modules/audio_coding/codecs/ilbc/test/empty.cc deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/modules/audio_coding/codecs/ilbc/test/iLBC_test.c b/modules/audio_coding/codecs/ilbc/test/iLBC_test.c deleted file mode 100644 index e0ca075eda..0000000000 --- a/modules/audio_coding/codecs/ilbc/test/iLBC_test.c +++ /dev/null @@ -1,238 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - iLBC_test.c - -******************************************************************/ - -#include -#include -#include -#include "modules/audio_coding/codecs/ilbc/ilbc.h" - -/*---------------------------------------------------------------* - * Main program to test iLBC encoding and decoding - * - * Usage: - * exefile_name.exe - * - * : Input file, speech for encoder (16-bit pcm file) - * : Bit stream output from the encoder - * : Output file, decoded speech (16-bit pcm file) - * : Bit error file, optional (16-bit) - * 1 - Packet received correctly - * 0 - Packet Lost - * - *--------------------------------------------------------------*/ - -#define BLOCKL_MAX 240 -#define ILBCNOOFWORDS_MAX 25 - - -int main(int argc, char* argv[]) -{ - - FILE *ifileid,*efileid,*ofileid, *cfileid; - int16_t data[BLOCKL_MAX]; - uint8_t encoded_data[2 * ILBCNOOFWORDS_MAX]; - int16_t decoded_data[BLOCKL_MAX]; - int len_int, mode; - short pli; - int blockcount = 0; - size_t frameLen, len, len_i16s; - int16_t speechType; - IlbcEncoderInstance *Enc_Inst; - IlbcDecoderInstance *Dec_Inst; - -#ifdef __ILBC_WITH_40BITACC - /* Doublecheck that long long exists */ - if (sizeof(long)>=sizeof(long long)) { - fprintf(stderr, "40-bit simulation is not be supported on this platform\n"); - exit(0); - } -#endif - - /* get arguments and open files */ - - if ((argc!=5) && (argc!=6)) { - fprintf(stderr, - "\n*-----------------------------------------------*\n"); - fprintf(stderr, - " %s <20,30> input encoded decoded (channel)\n\n", - argv[0]); - fprintf(stderr, - " mode : Frame size for the encoding/decoding\n"); - fprintf(stderr, - " 20 - 20 ms\n"); - fprintf(stderr, - " 30 - 30 ms\n"); - fprintf(stderr, - " input : Speech for encoder (16-bit pcm file)\n"); - fprintf(stderr, - " encoded : Encoded bit stream\n"); - fprintf(stderr, - " decoded : Decoded speech (16-bit pcm file)\n"); - fprintf(stderr, - " channel : Packet loss pattern, optional (16-bit)\n"); - fprintf(stderr, - " 1 - Packet received correctly\n"); - fprintf(stderr, - " 0 - Packet Lost\n"); - fprintf(stderr, - "*-----------------------------------------------*\n\n"); - exit(1); - } - mode=atoi(argv[1]); - if (mode != 20 && mode != 30) { - fprintf(stderr,"Wrong mode %s, must be 20, or 30\n", - argv[1]); - exit(2); - } - if ( (ifileid=fopen(argv[2],"rb")) == NULL) { - fprintf(stderr,"Cannot open input file %s\n", argv[2]); - exit(2);} - if ( (efileid=fopen(argv[3],"wb")) == NULL) { - fprintf(stderr, "Cannot open encoded file file %s\n", - argv[3]); exit(1);} - if ( (ofileid=fopen(argv[4],"wb")) == NULL) { - fprintf(stderr, "Cannot open decoded file %s\n", - argv[4]); exit(1);} - if (argc==6) { - if( (cfileid=fopen(argv[5],"rb")) == NULL) { - fprintf(stderr, "Cannot open channel file %s\n", - argv[5]); - exit(1); - } - } else { - cfileid=NULL; - } - - /* print info */ - - fprintf(stderr, "\n"); - fprintf(stderr, - "*---------------------------------------------------*\n"); - fprintf(stderr, - "* *\n"); - fprintf(stderr, - "* iLBC test program *\n"); - fprintf(stderr, - "* *\n"); - fprintf(stderr, - "* *\n"); - fprintf(stderr, - "*---------------------------------------------------*\n"); - fprintf(stderr,"\nMode : %2d ms\n", mode); - fprintf(stderr,"Input file : %s\n", argv[2]); - fprintf(stderr,"Encoded file : %s\n", argv[3]); - fprintf(stderr,"Output file : %s\n", argv[4]); - if (argc==6) { - fprintf(stderr,"Channel file : %s\n", argv[5]); - } - fprintf(stderr,"\n"); - - /* Create structs */ - WebRtcIlbcfix_EncoderCreate(&Enc_Inst); - WebRtcIlbcfix_DecoderCreate(&Dec_Inst); - - - /* Initialization */ - - WebRtcIlbcfix_EncoderInit(Enc_Inst, mode); - WebRtcIlbcfix_DecoderInit(Dec_Inst, mode); - frameLen = (size_t)(mode*8); - - /* loop over input blocks */ - - while (fread(data,sizeof(int16_t),frameLen,ifileid) == frameLen) { - - blockcount++; - - /* encoding */ - - fprintf(stderr, "--- Encoding block %i --- ",blockcount); - len_int = WebRtcIlbcfix_Encode(Enc_Inst, data, frameLen, encoded_data); - if (len_int < 0) { - fprintf(stderr, "Error encoding\n"); - exit(0); - } - len = (size_t)len_int; - fprintf(stderr, "\r"); - - /* write byte file */ - - len_i16s = (len + 1) / sizeof(int16_t); - if (fwrite(encoded_data, sizeof(int16_t), len_i16s, efileid) != len_i16s) { - return -1; - } - - /* get channel data if provided */ - if (argc==6) { - if (fread(&pli, sizeof(int16_t), 1, cfileid)) { - if ((pli!=0)&&(pli!=1)) { - fprintf(stderr, "Error in channel file\n"); - exit(0); - } - if (pli==0) { - /* Packet loss -> remove info from frame */ - memset(encoded_data, 0, - sizeof(int16_t)*ILBCNOOFWORDS_MAX); - } - } else { - fprintf(stderr, "Error. Channel file too short\n"); - exit(0); - } - } else { - pli=1; - } - - /* decoding */ - - fprintf(stderr, "--- Decoding block %i --- ",blockcount); - if (pli==1) { - len_int=WebRtcIlbcfix_Decode(Dec_Inst, encoded_data, - len, decoded_data,&speechType); - if (len_int < 0) { - fprintf(stderr, "Error decoding\n"); - exit(0); - } - len = (size_t)len_int; - } else { - len=WebRtcIlbcfix_DecodePlc(Dec_Inst, decoded_data, 1); - } - fprintf(stderr, "\r"); - - /* write output file */ - - if (fwrite(decoded_data, sizeof(int16_t), len, ofileid) != len) { - return -1; - } - } - - /* close files */ - - fclose(ifileid); fclose(efileid); fclose(ofileid); - if (argc==6) { - fclose(cfileid); - } - - /* Free structs */ - WebRtcIlbcfix_EncoderFree(Enc_Inst); - WebRtcIlbcfix_DecoderFree(Dec_Inst); - - - printf("\nDone with simulation\n\n"); - - return(0); -} diff --git a/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c b/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c deleted file mode 100644 index 132f3bdb37..0000000000 --- a/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c +++ /dev/null @@ -1,215 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - -iLBC Speech Coder ANSI-C Source Code - -iLBC_test.c - -******************************************************************/ - -#include -#include -#include -#include -#include -#include "modules/audio_coding/codecs/ilbc/ilbc.h" - -//#define JUNK_DATA -#ifdef JUNK_DATA -#define SEED_FILE "randseed.txt" -#endif - - -/*----------------------------------------------------------------* -* Main program to test iLBC encoding and decoding -* -* Usage: -* exefile_name.exe -* -*---------------------------------------------------------------*/ - -int main(int argc, char* argv[]) -{ - FILE *ifileid,*efileid,*ofileid, *chfileid; - short encoded_data[55], data[240], speechType; - int len_int, mode; - short pli; - size_t len, readlen; - int blockcount = 0; - - IlbcEncoderInstance *Enc_Inst; - IlbcDecoderInstance *Dec_Inst; -#ifdef JUNK_DATA - size_t i; - FILE *seedfile; - unsigned int random_seed = (unsigned int) time(NULL);//1196764538 -#endif - - /* Create structs */ - WebRtcIlbcfix_EncoderCreate(&Enc_Inst); - WebRtcIlbcfix_DecoderCreate(&Dec_Inst); - - /* get arguments and open files */ - - if (argc != 6 ) { - fprintf(stderr, "%s mode inputfile bytefile outputfile channelfile\n", - argv[0]); - fprintf(stderr, "Example:\n"); - fprintf(stderr, "%s <30,20> in.pcm byte.dat out.pcm T30.0.dat\n", argv[0]); - exit(1); - } - mode=atoi(argv[1]); - if (mode != 20 && mode != 30) { - fprintf(stderr,"Wrong mode %s, must be 20, or 30\n", argv[1]); - exit(2); - } - if ( (ifileid=fopen(argv[2],"rb")) == NULL) { - fprintf(stderr,"Cannot open input file %s\n", argv[2]); - exit(2);} - if ( (efileid=fopen(argv[3],"wb")) == NULL) { - fprintf(stderr, "Cannot open channelfile file %s\n", - argv[3]); exit(3);} - if( (ofileid=fopen(argv[4],"wb")) == NULL) { - fprintf(stderr, "Cannot open output file %s\n", - argv[4]); exit(3);} - if ( (chfileid=fopen(argv[5],"rb")) == NULL) { - fprintf(stderr,"Cannot open channel file file %s\n", argv[5]); - exit(2); - } - /* print info */ - fprintf(stderr, "\n"); - fprintf(stderr, - "*---------------------------------------------------*\n"); - fprintf(stderr, - "* *\n"); - fprintf(stderr, - "* iLBCtest *\n"); - fprintf(stderr, - "* *\n"); - fprintf(stderr, - "* *\n"); - fprintf(stderr, - "*---------------------------------------------------*\n"); -#ifdef SPLIT_10MS - fprintf(stderr,"\n10ms split with raw mode: %2d ms\n", mode); -#else - fprintf(stderr,"\nMode : %2d ms\n", mode); -#endif - fprintf(stderr,"\nInput file : %s\n", argv[2]); - fprintf(stderr,"Coded file : %s\n", argv[3]); - fprintf(stderr,"Output file : %s\n\n", argv[4]); - fprintf(stderr,"Channel file : %s\n\n", argv[5]); - -#ifdef JUNK_DATA - srand(random_seed); - - if ( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL ) { - fprintf(stderr, "Error: Could not open file %s\n", SEED_FILE); - } - else { - fprintf(seedfile, "%u\n", random_seed); - fclose(seedfile); - } -#endif - - /* Initialization */ - WebRtcIlbcfix_EncoderInit(Enc_Inst, mode); - WebRtcIlbcfix_DecoderInit(Dec_Inst, mode); - - /* loop over input blocks */ -#ifdef SPLIT_10MS - readlen = 80; -#else - readlen = (size_t)(mode << 3); -#endif - while(fread(data, sizeof(short), readlen, ifileid) == readlen) { - blockcount++; - - /* encoding */ - fprintf(stderr, "--- Encoding block %i --- ",blockcount); - len_int=WebRtcIlbcfix_Encode(Enc_Inst, data, readlen, encoded_data); - if (len_int < 0) { - fprintf(stderr, "Error encoding\n"); - exit(0); - } - len = (size_t)len_int; - fprintf(stderr, "\r"); - -#ifdef JUNK_DATA - for ( i = 0; i < len; i++) { - encoded_data[i] = (short) (encoded_data[i] + (short) rand()); - } -#endif - /* write byte file */ - if(len != 0){ //len may be 0 in 10ms split case - fwrite(encoded_data,1,len,efileid); - - /* get channel data if provided */ - if (argc==6) { - if (fread(&pli, sizeof(int16_t), 1, chfileid)) { - if ((pli!=0)&&(pli!=1)) { - fprintf(stderr, "Error in channel file\n"); - exit(0); - } - if (pli==0) { - /* Packet loss -> remove info from frame */ - memset(encoded_data, 0, sizeof(int16_t)*25); - } - } else { - fprintf(stderr, "Error. Channel file too short\n"); - exit(0); - } - } else { - pli=1; - } - - /* decoding */ - fprintf(stderr, "--- Decoding block %i --- ",blockcount); - if (pli==1) { - len_int = WebRtcIlbcfix_Decode(Dec_Inst, encoded_data, len, data, - &speechType); - if (len_int < 0) { - fprintf(stderr, "Error decoding\n"); - exit(0); - } - len = (size_t)len_int; - } else { - len=WebRtcIlbcfix_DecodePlc(Dec_Inst, data, 1); - } - fprintf(stderr, "\r"); - - /* write output file */ - fwrite(data,sizeof(short),len,ofileid); - } - } - -#ifdef JUNK_DATA - if ( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL ) { - fprintf(stderr, "Error: Could not open file %s\n", SEED_FILE); - } - else { - fprintf(seedfile, "ok\n\n"); - fclose(seedfile); - } -#endif - - /* free structs */ - WebRtcIlbcfix_EncoderFree(Enc_Inst); - WebRtcIlbcfix_DecoderFree(Dec_Inst); - - /* close files */ - fclose(ifileid); - fclose(efileid); - fclose(ofileid); - - return 0; -} diff --git a/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c b/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c deleted file mode 100644 index a62a42edf6..0000000000 --- a/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c +++ /dev/null @@ -1,343 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - iLBC_test.c - -******************************************************************/ - -#include -#include -#include -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" -#include "modules/audio_coding/codecs/ilbc/nit_encode.h" -#include "modules/audio_coding/codecs/ilbc/encode.h" -#include "modules/audio_coding/codecs/ilbc/init_decode.h" -#include "modules/audio_coding/codecs/ilbc/decode.h" -#include "modules/audio_coding/codecs/ilbc/constants.h" -#include "modules/audio_coding/codecs/ilbc/ilbc.h" - -#define ILBCNOOFWORDS_MAX (NO_OF_BYTES_30MS)/2 - -/* Runtime statistics */ -#include -/* #define CLOCKS_PER_SEC 1000 */ - -/*----------------------------------------------------------------* - * Encoder interface function - *---------------------------------------------------------------*/ - -short encode( /* (o) Number of bytes encoded */ - IlbcEncoder *iLBCenc_inst, /* (i/o) Encoder instance */ - int16_t *encoded_data, /* (o) The encoded bytes */ - int16_t *data /* (i) The signal block to encode */ - ){ - - /* do the actual encoding */ - WebRtcIlbcfix_Encode((uint16_t *)encoded_data, data, iLBCenc_inst); - - return (iLBCenc_inst->no_of_bytes); -} - -/*----------------------------------------------------------------* - * Decoder interface function - *---------------------------------------------------------------*/ - -short decode( /* (o) Number of decoded samples */ - IlbcDecoder *iLBCdec_inst, /* (i/o) Decoder instance */ - short *decoded_data, /* (o) Decoded signal block */ - short *encoded_data, /* (i) Encoded bytes */ - short mode /* (i) 0=PL, 1=Normal */ - ){ - - /* check if mode is valid */ - - if (mode<0 || mode>1) { - printf("\nERROR - Wrong mode - 0, 1 allowed\n"); exit(3);} - - /* do actual decoding of block */ - - WebRtcIlbcfix_Decode(decoded_data, (uint16_t *)encoded_data, - iLBCdec_inst, mode); - - return (iLBCdec_inst->blockl); -} - -/*----------------------------------------------------------------* - * Main program to test iLBC encoding and decoding - * - * Usage: - * exefile_name.exe - * - *---------------------------------------------------------------*/ - -#define MAXFRAMES 10000 -#define MAXFILELEN (BLOCKL_MAX*MAXFRAMES) - -int main(int argc, char* argv[]) -{ - - /* Runtime statistics */ - - float starttime1, starttime2; - float runtime1, runtime2; - float outtime; - - FILE *ifileid,*efileid,*ofileid, *chfileid; - short *inputdata, *encodeddata, *decodeddata; - short *channeldata; - int blockcount = 0, noOfBlocks=0, i, noOfLostBlocks=0; - short mode; - IlbcEncoder Enc_Inst; - IlbcDecoder Dec_Inst; - - short frameLen; - short count; -#ifdef SPLIT_10MS - short size; -#endif - - inputdata=(short*) malloc(MAXFILELEN*sizeof(short)); - if (inputdata==NULL) { - fprintf(stderr,"Could not allocate memory for vector\n"); - exit(0); - } - encodeddata=(short*) malloc(ILBCNOOFWORDS_MAX*MAXFRAMES*sizeof(short)); - if (encodeddata==NULL) { - fprintf(stderr,"Could not allocate memory for vector\n"); - free(inputdata); - exit(0); - } - decodeddata=(short*) malloc(MAXFILELEN*sizeof(short)); - if (decodeddata==NULL) { - fprintf(stderr,"Could not allocate memory for vector\n"); - free(inputdata); - free(encodeddata); - exit(0); - } - channeldata=(short*) malloc(MAXFRAMES*sizeof(short)); - if (channeldata==NULL) { - fprintf(stderr,"Could not allocate memory for vector\n"); - free(inputdata); - free(encodeddata); - free(decodeddata); - exit(0); - } - - /* get arguments and open files */ - - if (argc != 6 ) { - fprintf(stderr, "%s mode inputfile bytefile outputfile channelfile\n", - argv[0]); - fprintf(stderr, "Example:\n"); - fprintf(stderr, "%s <30,20> in.pcm byte.dat out.pcm T30.0.dat\n", argv[0]); - exit(1); - } - mode=atoi(argv[1]); - if (mode != 20 && mode != 30) { - fprintf(stderr,"Wrong mode %s, must be 20, or 30\n", argv[1]); - exit(2); - } - if ( (ifileid=fopen(argv[2],"rb")) == NULL) { - fprintf(stderr,"Cannot open input file %s\n", argv[2]); - exit(2);} - if ( (efileid=fopen(argv[3],"wb")) == NULL) { - fprintf(stderr, "Cannot open channelfile file %s\n", - argv[3]); exit(3);} - if( (ofileid=fopen(argv[4],"wb")) == NULL) { - fprintf(stderr, "Cannot open output file %s\n", - argv[4]); exit(3);} - if ( (chfileid=fopen(argv[5],"rb")) == NULL) { - fprintf(stderr,"Cannot open channel file file %s\n", argv[5]); - exit(2);} - - - /* print info */ -#ifndef PRINT_MIPS - fprintf(stderr, "\n"); - fprintf(stderr, - "*---------------------------------------------------*\n"); - fprintf(stderr, - "* *\n"); - fprintf(stderr, - "* iLBCtest *\n"); - fprintf(stderr, - "* *\n"); - fprintf(stderr, - "* *\n"); - fprintf(stderr, - "*---------------------------------------------------*\n"); -#ifdef SPLIT_10MS - fprintf(stderr,"\n10ms split with raw mode: %2d ms\n", mode); -#else - fprintf(stderr,"\nMode : %2d ms\n", mode); -#endif - fprintf(stderr,"\nInput file : %s\n", argv[2]); - fprintf(stderr,"Coded file : %s\n", argv[3]); - fprintf(stderr,"Output file : %s\n\n", argv[4]); - fprintf(stderr,"Channel file : %s\n\n", argv[5]); -#endif - - /* Initialization */ - - WebRtcIlbcfix_EncoderInit(&Enc_Inst, mode); - WebRtcIlbcfix_DecoderInit(&Dec_Inst, mode, 1); - - /* extract the input file and channel file */ - -#ifdef SPLIT_10MS - frameLen = (mode==20)? 80:160; - fread(Enc_Inst.past_samples, sizeof(short), frameLen, ifileid); - Enc_Inst.section = 0; - - while( fread(&inputdata[noOfBlocks*80], sizeof(short), - 80, ifileid) == 80 ) { - noOfBlocks++; - } - - noOfBlocks += frameLen/80; - frameLen = 80; -#else - frameLen = Enc_Inst.blockl; - - while( fread(&inputdata[noOfBlocks*Enc_Inst.blockl],sizeof(short), - Enc_Inst.blockl,ifileid)==(uint16_t)Enc_Inst.blockl){ - noOfBlocks++; - } -#endif - - - while ((fread(&channeldata[blockcount],sizeof(short), 1,chfileid)==1) - && ( blockcount < noOfBlocks/(Enc_Inst.blockl/frameLen) )) { - blockcount++; - } - - if ( blockcount < noOfBlocks/(Enc_Inst.blockl/frameLen) ) { - fprintf(stderr,"Channel file %s is too short\n", argv[4]); - free(inputdata); - free(encodeddata); - free(decodeddata); - free(channeldata); - exit(0); - } - - count=0; - - /* Runtime statistics */ - - starttime1 = clock()/(float)CLOCKS_PER_SEC; - - /* Encoding loop */ -#ifdef PRINT_MIPS - printf("-1 -1\n"); -#endif - -#ifdef SPLIT_10MS - /* "Enc_Inst.section != 0" is to make sure we run through full - lengths of all vectors for 10ms split mode. - */ - // while( (count < noOfBlocks) || (Enc_Inst.section != 0) ) { - while( count < blockcount * (Enc_Inst.blockl/frameLen) ) { - - encode(&Enc_Inst, &encodeddata[Enc_Inst.no_of_words * - (count/(Enc_Inst.nsub/2))], - &inputdata[frameLen * count] ); -#else - while (count < noOfBlocks) { - encode( &Enc_Inst, &encodeddata[Enc_Inst.no_of_words * count], - &inputdata[frameLen * count] ); -#endif - -#ifdef PRINT_MIPS - printf("-1 -1\n"); -#endif - - count++; - } - - count=0; - - /* Runtime statistics */ - - starttime2=clock()/(float)CLOCKS_PER_SEC; - runtime1 = (float)(starttime2-starttime1); - - /* Decoding loop */ - - while (count < blockcount) { - if (channeldata[count]==1) { - /* Normal decoding */ - decode(&Dec_Inst, &decodeddata[count * Dec_Inst.blockl], - &encodeddata[Dec_Inst.no_of_words * count], 1); - } else if (channeldata[count]==0) { - /* PLC */ - short emptydata[ILBCNOOFWORDS_MAX]; - memset(emptydata, 0, Dec_Inst.no_of_words*sizeof(short)); - decode(&Dec_Inst, &decodeddata[count*Dec_Inst.blockl], - emptydata, 0); - noOfLostBlocks++; - } else { - printf("Error in channel file (values have to be either 1 or 0)\n"); - exit(0); - } -#ifdef PRINT_MIPS - printf("-1 -1\n"); -#endif - - count++; - } - - /* Runtime statistics */ - - runtime2 = (float)(clock()/(float)CLOCKS_PER_SEC-starttime2); - - outtime = (float)((float)blockcount* - (float)mode/1000.0); - -#ifndef PRINT_MIPS - printf("\nLength of speech file: %.1f s\n", outtime); - printf("Lost frames : %.1f%%\n\n", 100*(float)noOfLostBlocks/(float)blockcount); - - printf("Time to run iLBC_encode+iLBC_decode:"); - printf(" %.1f s (%.1f%% of realtime)\n", runtime1+runtime2, - (100*(runtime1+runtime2)/outtime)); - - printf("Time in iLBC_encode :"); - printf(" %.1f s (%.1f%% of total runtime)\n", - runtime1, 100.0*runtime1/(runtime1+runtime2)); - - printf("Time in iLBC_decode :"); - printf(" %.1f s (%.1f%% of total runtime)\n\n", - runtime2, 100.0*runtime2/(runtime1+runtime2)); -#endif - - /* Write data to files */ - for (i=0; ilsf[0] = (*bitstreamPtr)>>10; /* Bit 0..5 */ - enc_bits->lsf[1] = ((*bitstreamPtr)>>3)&0x7F; /* Bit 6..12 */ - enc_bits->lsf[2] = ((*bitstreamPtr)&0x7)<<4; /* Bit 13..15 */ - bitstreamPtr++; - /* Second int16_t */ - enc_bits->lsf[2] |= ((*bitstreamPtr)>>12)&0xF; /* Bit 0..3 */ - - if (mode==20) { - enc_bits->startIdx = ((*bitstreamPtr)>>10)&0x3; /* Bit 4..5 */ - enc_bits->state_first = ((*bitstreamPtr)>>9)&0x1; /* Bit 6 */ - enc_bits->idxForMax = ((*bitstreamPtr)>>3)&0x3F; /* Bit 7..12 */ - enc_bits->cb_index[0] = ((*bitstreamPtr)&0x7)<<4; /* Bit 13..15 */ - bitstreamPtr++; - /* Third int16_t */ - enc_bits->cb_index[0] |= ((*bitstreamPtr)>>12)&0xE; /* Bit 0..2 */ - enc_bits->gain_index[0] = ((*bitstreamPtr)>>8)&0x18; /* Bit 3..4 */ - enc_bits->gain_index[1] = ((*bitstreamPtr)>>7)&0x8; /* Bit 5 */ - enc_bits->cb_index[3] = ((*bitstreamPtr)>>2)&0xFE; /* Bit 6..12 */ - enc_bits->gain_index[3] = ((*bitstreamPtr)<<2)&0x10; /* Bit 13 */ - enc_bits->gain_index[4] = ((*bitstreamPtr)<<2)&0x8; /* Bit 14 */ - enc_bits->gain_index[6] = ((*bitstreamPtr)<<4)&0x10; /* Bit 15 */ - } else { /* mode==30 */ - enc_bits->lsf[3] = ((*bitstreamPtr)>>6)&0x3F; /* Bit 4..9 */ - enc_bits->lsf[4] = ((*bitstreamPtr)<<1)&0x7E; /* Bit 10..15 */ - bitstreamPtr++; - /* Third int16_t */ - enc_bits->lsf[4] |= ((*bitstreamPtr)>>15)&0x1; /* Bit 0 */ - enc_bits->lsf[5] = ((*bitstreamPtr)>>8)&0x7F; /* Bit 1..7 */ - enc_bits->startIdx = ((*bitstreamPtr)>>5)&0x7; /* Bit 8..10 */ - enc_bits->state_first = ((*bitstreamPtr)>>4)&0x1; /* Bit 11 */ - enc_bits->idxForMax = ((*bitstreamPtr)<<2)&0x3C; /* Bit 12..15 */ - bitstreamPtr++; - /* 4:th int16_t */ - enc_bits->idxForMax |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1 */ - enc_bits->cb_index[0] = ((*bitstreamPtr)>>7)&0x78; /* Bit 2..5 */ - enc_bits->gain_index[0] = ((*bitstreamPtr)>>5)&0x10; /* Bit 6 */ - enc_bits->gain_index[1] = ((*bitstreamPtr)>>5)&0x8; /* Bit 7 */ - enc_bits->cb_index[3] = ((*bitstreamPtr))&0xFC; /* Bit 8..13 */ - enc_bits->gain_index[3] = ((*bitstreamPtr)<<3)&0x10; /* Bit 14 */ - enc_bits->gain_index[4] = ((*bitstreamPtr)<<3)&0x8; /* Bit 15 */ - } - /* Class 2 bits of ULP */ - /* 4:th to 6:th int16_t for 20 ms case - 5:th to 7:th int16_t for 30 ms case */ - bitstreamPtr++; - tmpPtr=enc_bits->idxVec; - for (k=0; k<3; k++) { - for (i=15; i>=0; i--) { - (*tmpPtr) = (((*bitstreamPtr)>>i)<<2)&0x4; - /* Bit 15-i */ - tmpPtr++; - } - bitstreamPtr++; - } - - if (mode==20) { - /* 7:th int16_t */ - for (i=15; i>6; i--) { - (*tmpPtr) = (((*bitstreamPtr)>>i)<<2)&0x4; - /* Bit 15-i */ - tmpPtr++; - } - enc_bits->gain_index[1] |= ((*bitstreamPtr)>>4)&0x4; /* Bit 9 */ - enc_bits->gain_index[3] |= ((*bitstreamPtr)>>2)&0xC; /* Bit 10..11 */ - enc_bits->gain_index[4] |= ((*bitstreamPtr)>>1)&0x4; /* Bit 12 */ - enc_bits->gain_index[6] |= ((*bitstreamPtr)<<1)&0x8; /* Bit 13 */ - enc_bits->gain_index[7] = ((*bitstreamPtr)<<2)&0xC; /* Bit 14..15 */ - - } else { /* mode==30 */ - /* 8:th int16_t */ - for (i=15; i>5; i--) { - (*tmpPtr) = (((*bitstreamPtr)>>i)<<2)&0x4; - /* Bit 15-i */ - tmpPtr++; - } - enc_bits->cb_index[0] |= ((*bitstreamPtr)>>3)&0x6; /* Bit 10..11 */ - enc_bits->gain_index[0] |= ((*bitstreamPtr))&0x8; /* Bit 12 */ - enc_bits->gain_index[1] |= ((*bitstreamPtr))&0x4; /* Bit 13 */ - enc_bits->cb_index[3] |= ((*bitstreamPtr))&0x2; /* Bit 14 */ - enc_bits->cb_index[6] = ((*bitstreamPtr)<<7)&0x80; /* Bit 15 */ - bitstreamPtr++; - /* 9:th int16_t */ - enc_bits->cb_index[6] |= ((*bitstreamPtr)>>9)&0x7E; /* Bit 0..5 */ - enc_bits->cb_index[9] = ((*bitstreamPtr)>>2)&0xFE; /* Bit 6..12 */ - enc_bits->cb_index[12] = ((*bitstreamPtr)<<5)&0xE0; /* Bit 13..15 */ - bitstreamPtr++; - /* 10:th int16_t */ - enc_bits->cb_index[12] |= ((*bitstreamPtr)>>11)&0x1E;/* Bit 0..3 */ - enc_bits->gain_index[3] |= ((*bitstreamPtr)>>8)&0xC; /* Bit 4..5 */ - enc_bits->gain_index[4] |= ((*bitstreamPtr)>>7)&0x6; /* Bit 6..7 */ - enc_bits->gain_index[6] = ((*bitstreamPtr)>>3)&0x18; /* Bit 8..9 */ - enc_bits->gain_index[7] = ((*bitstreamPtr)>>2)&0xC; /* Bit 10..11 */ - enc_bits->gain_index[9] = ((*bitstreamPtr)<<1)&0x10; /* Bit 12 */ - enc_bits->gain_index[10] = ((*bitstreamPtr)<<1)&0x8; /* Bit 13 */ - enc_bits->gain_index[12] = ((*bitstreamPtr)<<3)&0x10; /* Bit 14 */ - enc_bits->gain_index[13] = ((*bitstreamPtr)<<3)&0x8; /* Bit 15 */ - } - bitstreamPtr++; - /* Class 3 bits of ULP */ - /* 8:th to 14:th int16_t for 20 ms case - 11:th to 17:th int16_t for 30 ms case */ - tmpPtr=enc_bits->idxVec; - for (k=0; k<7; k++) { - for (i=14; i>=0; i-=2) { - (*tmpPtr) |= ((*bitstreamPtr)>>i)&0x3; /* Bit 15-i..14-i*/ - tmpPtr++; - } - bitstreamPtr++; - } - - if (mode==20) { - /* 15:th int16_t */ - enc_bits->idxVec[56] |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1 */ - enc_bits->cb_index[0] |= ((*bitstreamPtr)>>13)&0x1; /* Bit 2 */ - enc_bits->cb_index[1] = ((*bitstreamPtr)>>6)&0x7F; /* Bit 3..9 */ - enc_bits->cb_index[2] = ((*bitstreamPtr)<<1)&0x7E; /* Bit 10..15 */ - bitstreamPtr++; - /* 16:th int16_t */ - enc_bits->cb_index[2] |= ((*bitstreamPtr)>>15)&0x1; /* Bit 0 */ - enc_bits->gain_index[0] |= ((*bitstreamPtr)>>12)&0x7; /* Bit 1..3 */ - enc_bits->gain_index[1] |= ((*bitstreamPtr)>>10)&0x3; /* Bit 4..5 */ - enc_bits->gain_index[2] = ((*bitstreamPtr)>>7)&0x7; /* Bit 6..8 */ - enc_bits->cb_index[3] |= ((*bitstreamPtr)>>6)&0x1; /* Bit 9 */ - enc_bits->cb_index[4] = ((*bitstreamPtr)<<1)&0x7E; /* Bit 10..15 */ - bitstreamPtr++; - /* 17:th int16_t */ - enc_bits->cb_index[4] |= ((*bitstreamPtr)>>15)&0x1; /* Bit 0 */ - enc_bits->cb_index[5] = ((*bitstreamPtr)>>8)&0x7F; /* Bit 1..7 */ - enc_bits->cb_index[6] = ((*bitstreamPtr))&0xFF; /* Bit 8..15 */ - bitstreamPtr++; - /* 18:th int16_t */ - enc_bits->cb_index[7] = (*bitstreamPtr)>>8; /* Bit 0..7 */ - enc_bits->cb_index[8] = (*bitstreamPtr)&0xFF; /* Bit 8..15 */ - bitstreamPtr++; - /* 19:th int16_t */ - enc_bits->gain_index[3] |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1 */ - enc_bits->gain_index[4] |= ((*bitstreamPtr)>>12)&0x3; /* Bit 2..3 */ - enc_bits->gain_index[5] = ((*bitstreamPtr)>>9)&0x7; /* Bit 4..6 */ - enc_bits->gain_index[6] |= ((*bitstreamPtr)>>6)&0x7; /* Bit 7..9 */ - enc_bits->gain_index[7] |= ((*bitstreamPtr)>>4)&0x3; /* Bit 10..11 */ - enc_bits->gain_index[8] = ((*bitstreamPtr)>>1)&0x7; /* Bit 12..14 */ - } else { /* mode==30 */ - /* 18:th int16_t */ - enc_bits->idxVec[56] |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1 */ - enc_bits->idxVec[57] |= ((*bitstreamPtr)>>12)&0x3; /* Bit 2..3 */ - enc_bits->cb_index[0] |= ((*bitstreamPtr)>>11)&1; /* Bit 4 */ - enc_bits->cb_index[1] = ((*bitstreamPtr)>>4)&0x7F; /* Bit 5..11 */ - enc_bits->cb_index[2] = ((*bitstreamPtr)<<3)&0x78; /* Bit 12..15 */ - bitstreamPtr++; - /* 19:th int16_t */ - enc_bits->cb_index[2] |= ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2 */ - enc_bits->gain_index[0] |= ((*bitstreamPtr)>>10)&0x7; /* Bit 3..5 */ - enc_bits->gain_index[1] |= ((*bitstreamPtr)>>8)&0x3; /* Bit 6..7 */ - enc_bits->gain_index[2] = ((*bitstreamPtr)>>5)&0x7; /* Bit 8..10 */ - enc_bits->cb_index[3] |= ((*bitstreamPtr)>>4)&0x1; /* Bit 11 */ - enc_bits->cb_index[4] = ((*bitstreamPtr)<<3)&0x78; /* Bit 12..15 */ - bitstreamPtr++; - /* 20:th int16_t */ - enc_bits->cb_index[4] |= ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2 */ - enc_bits->cb_index[5] = ((*bitstreamPtr)>>6)&0x7F; /* Bit 3..9 */ - enc_bits->cb_index[6] |= ((*bitstreamPtr)>>5)&0x1; /* Bit 10 */ - enc_bits->cb_index[7] = ((*bitstreamPtr)<<3)&0xF8; /* Bit 11..15 */ - bitstreamPtr++; - /* 21:st int16_t */ - enc_bits->cb_index[7] |= ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2 */ - enc_bits->cb_index[8] = ((*bitstreamPtr)>>5)&0xFF; /* Bit 3..10 */ - enc_bits->cb_index[9] |= ((*bitstreamPtr)>>4)&0x1; /* Bit 11 */ - enc_bits->cb_index[10] = ((*bitstreamPtr)<<4)&0xF0; /* Bit 12..15 */ - bitstreamPtr++; - /* 22:nd int16_t */ - enc_bits->cb_index[10] |= ((*bitstreamPtr)>>12)&0xF; /* Bit 0..3 */ - enc_bits->cb_index[11] = ((*bitstreamPtr)>>4)&0xFF; /* Bit 4..11 */ - enc_bits->cb_index[12] |= ((*bitstreamPtr)>>3)&0x1; /* Bit 12 */ - enc_bits->cb_index[13] = ((*bitstreamPtr)<<5)&0xE0; /* Bit 13..15 */ - bitstreamPtr++; - /* 23:rd int16_t */ - enc_bits->cb_index[13] |= ((*bitstreamPtr)>>11)&0x1F;/* Bit 0..4 */ - enc_bits->cb_index[14] = ((*bitstreamPtr)>>3)&0xFF; /* Bit 5..12 */ - enc_bits->gain_index[3] |= ((*bitstreamPtr)>>1)&0x3; /* Bit 13..14 */ - enc_bits->gain_index[4] |= ((*bitstreamPtr)&0x1); /* Bit 15 */ - bitstreamPtr++; - /* 24:rd int16_t */ - enc_bits->gain_index[5] = ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2 */ - enc_bits->gain_index[6] |= ((*bitstreamPtr)>>10)&0x7; /* Bit 3..5 */ - enc_bits->gain_index[7] |= ((*bitstreamPtr)>>8)&0x3; /* Bit 6..7 */ - enc_bits->gain_index[8] = ((*bitstreamPtr)>>5)&0x7; /* Bit 8..10 */ - enc_bits->gain_index[9] |= ((*bitstreamPtr)>>1)&0xF; /* Bit 11..14 */ - enc_bits->gain_index[10] |= ((*bitstreamPtr)<<2)&0x4; /* Bit 15 */ - bitstreamPtr++; - /* 25:rd int16_t */ - enc_bits->gain_index[10] |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1 */ - enc_bits->gain_index[11] = ((*bitstreamPtr)>>11)&0x7; /* Bit 2..4 */ - enc_bits->gain_index[12] |= ((*bitstreamPtr)>>7)&0xF; /* Bit 5..8 */ - enc_bits->gain_index[13] |= ((*bitstreamPtr)>>4)&0x7; /* Bit 9..11 */ - enc_bits->gain_index[14] = ((*bitstreamPtr)>>1)&0x7; /* Bit 12..14 */ - } - /* Last bit should be zero, otherwise it's an "empty" frame */ - if (((*bitstreamPtr)&0x1) == 1) { - return(1); - } else { - return(0); - } -} diff --git a/modules/audio_coding/codecs/ilbc/unpack_bits.h b/modules/audio_coding/codecs/ilbc/unpack_bits.h deleted file mode 100644 index 1ef5e1a7db..0000000000 --- a/modules/audio_coding/codecs/ilbc/unpack_bits.h +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_UnpackBits.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_UNPACK_BITS_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_UNPACK_BITS_H_ - -#include - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * unpacking of bits from bitstream, i.e., vector of bytes - *---------------------------------------------------------------*/ - -int16_t -WebRtcIlbcfix_UnpackBits(/* (o) "Empty" frame indicator */ - const uint16_t* - bitstream, /* (i) The packatized bitstream */ - iLBC_bits* - enc_bits, /* (o) Paramerers from bitstream */ - int16_t mode /* (i) Codec mode (20 or 30) */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/vq3.c b/modules/audio_coding/codecs/ilbc/vq3.c deleted file mode 100644 index d9375fb995..0000000000 --- a/modules/audio_coding/codecs/ilbc/vq3.c +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Vq3.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/vq3.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" - -/*----------------------------------------------------------------* - * vector quantization - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Vq3( - int16_t *Xq, /* quantized vector (Q13) */ - int16_t *index, - int16_t *CB, /* codebook in Q13 */ - int16_t *X, /* vector to quantize (Q13) */ - int16_t n_cb - ){ - int16_t i, j; - int16_t pos, minindex=0; - int16_t tmp; - int32_t dist, mindist; - - pos = 0; - mindist = WEBRTC_SPL_WORD32_MAX; /* start value */ - - /* Find the codebook with the lowest square distance */ - for (j = 0; j < n_cb; j++) { - tmp = X[0] - CB[pos]; - dist = tmp * tmp; - for (i = 1; i < 3; i++) { - tmp = X[i] - CB[pos + i]; - dist += tmp * tmp; - } - - if (dist < mindist) { - mindist = dist; - minindex = j; - } - pos += 3; - } - - /* Store the quantized codebook and the index */ - for (i = 0; i < 3; i++) { - Xq[i] = CB[minindex*3 + i]; - } - *index = minindex; - -} diff --git a/modules/audio_coding/codecs/ilbc/vq3.h b/modules/audio_coding/codecs/ilbc/vq3.h deleted file mode 100644 index 33d06b8ad0..0000000000 --- a/modules/audio_coding/codecs/ilbc/vq3.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Vq3.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_VQ3_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_VQ3_H_ - -#include - -/*----------------------------------------------------------------* - * Vector quantization of order 3 (based on MSE) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Vq3( - int16_t* Xq, /* (o) the quantized vector (Q13) */ - int16_t* index, /* (o) the quantization index */ - int16_t* CB, /* (i) the vector quantization codebook (Q13) */ - int16_t* X, /* (i) the vector to quantize (Q13) */ - int16_t n_cb /* (i) the number of vectors in the codebook */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/vq4.c b/modules/audio_coding/codecs/ilbc/vq4.c deleted file mode 100644 index c9a65aec2a..0000000000 --- a/modules/audio_coding/codecs/ilbc/vq4.c +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Vq4.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/vq4.h" - -#include "modules/audio_coding/codecs/ilbc/constants.h" - -/*----------------------------------------------------------------* - * vector quantization - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Vq4( - int16_t *Xq, /* quantized vector (Q13) */ - int16_t *index, - int16_t *CB, /* codebook in Q13 */ - int16_t *X, /* vector to quantize (Q13) */ - int16_t n_cb - ){ - int16_t i, j; - int16_t pos, minindex=0; - int16_t tmp; - int32_t dist, mindist; - - pos = 0; - mindist = WEBRTC_SPL_WORD32_MAX; /* start value */ - - /* Find the codebook with the lowest square distance */ - for (j = 0; j < n_cb; j++) { - tmp = X[0] - CB[pos]; - dist = tmp * tmp; - for (i = 1; i < 4; i++) { - tmp = X[i] - CB[pos + i]; - dist += tmp * tmp; - } - - if (dist < mindist) { - mindist = dist; - minindex = j; - } - pos += 4; - } - - /* Store the quantized codebook and the index */ - for (i = 0; i < 4; i++) { - Xq[i] = CB[minindex*4 + i]; - } - *index = minindex; -} diff --git a/modules/audio_coding/codecs/ilbc/vq4.h b/modules/audio_coding/codecs/ilbc/vq4.h deleted file mode 100644 index 0337368bcb..0000000000 --- a/modules/audio_coding/codecs/ilbc/vq4.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Vq4.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_VQ4_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_VQ4_H_ - -#include - -/*----------------------------------------------------------------* - * Vector quantization of order 4 (based on MSE) - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Vq4( - int16_t* Xq, /* (o) the quantized vector (Q13) */ - int16_t* index, /* (o) the quantization index */ - int16_t* CB, /* (i) the vector quantization codebook (Q13) */ - int16_t* X, /* (i) the vector to quantize (Q13) */ - int16_t n_cb /* (i) the number of vectors in the codebook */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/window32_w32.c b/modules/audio_coding/codecs/ilbc/window32_w32.c deleted file mode 100644 index e82d167220..0000000000 --- a/modules/audio_coding/codecs/ilbc/window32_w32.c +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Window32W32.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/window32_w32.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * window multiplication - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Window32W32( - int32_t *z, /* Output */ - int32_t *x, /* Input (same domain as Output)*/ - const int32_t *y, /* Q31 Window */ - size_t N /* length to process */ - ) { - size_t i; - int16_t x_low, x_hi, y_low, y_hi; - int16_t left_shifts; - int32_t temp; - - left_shifts = (int16_t)WebRtcSpl_NormW32(x[0]); - WebRtcSpl_VectorBitShiftW32(x, N, x, (int16_t)(-left_shifts)); - - - /* The double precision numbers use a special representation: - * w32 = hi<<16 + lo<<1 - */ - for (i = 0; i < N; i++) { - /* Extract higher bytes */ - x_hi = (int16_t)(x[i] >> 16); - y_hi = (int16_t)(y[i] >> 16); - - /* Extract lower bytes, defined as (w32 - hi<<16)>>1 */ - x_low = (int16_t)((x[i] - (x_hi << 16)) >> 1); - - y_low = (int16_t)((y[i] - (y_hi << 16)) >> 1); - - /* Calculate z by a 32 bit multiplication using both low and high from x and y */ - temp = ((x_hi * y_hi) << 1) + ((x_hi * y_low) >> 14); - - z[i] = temp + ((x_low * y_hi) >> 14); - } - - WebRtcSpl_VectorBitShiftW32(z, N, z, left_shifts); - - return; -} diff --git a/modules/audio_coding/codecs/ilbc/window32_w32.h b/modules/audio_coding/codecs/ilbc/window32_w32.h deleted file mode 100644 index 93bb72e998..0000000000 --- a/modules/audio_coding/codecs/ilbc/window32_w32.h +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_Window32W32.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_WINDOW32_W32_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_WINDOW32_W32_H_ - -#include -#include - -/*----------------------------------------------------------------* - * window multiplication - *---------------------------------------------------------------*/ - -void WebRtcIlbcfix_Window32W32(int32_t* z, /* Output */ - int32_t* x, /* Input (same domain as Output)*/ - const int32_t* y, /* Q31 Window */ - size_t N /* length to process */ -); - -#endif diff --git a/modules/audio_coding/codecs/ilbc/xcorr_coef.c b/modules/audio_coding/codecs/ilbc/xcorr_coef.c deleted file mode 100644 index 9dc880b37e..0000000000 --- a/modules/audio_coding/codecs/ilbc/xcorr_coef.c +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_XcorrCoef.c - -******************************************************************/ - -#include "modules/audio_coding/codecs/ilbc/xcorr_coef.h" - -#include "modules/audio_coding/codecs/ilbc/defines.h" - -/*----------------------------------------------------------------* - * cross correlation which finds the optimal lag for the - * crossCorr*crossCorr/(energy) criteria - *---------------------------------------------------------------*/ - -size_t WebRtcIlbcfix_XcorrCoef( - int16_t *target, /* (i) first array */ - int16_t *regressor, /* (i) second array */ - size_t subl, /* (i) dimension arrays */ - size_t searchLen, /* (i) the search lenght */ - size_t offset, /* (i) samples offset between arrays */ - int16_t step /* (i) +1 or -1 */ - ){ - size_t k; - size_t maxlag; - int16_t pos; - int16_t max; - int16_t crossCorrScale, Energyscale; - int16_t crossCorrSqMod, crossCorrSqMod_Max; - int32_t crossCorr, Energy; - int16_t crossCorrmod, EnergyMod, EnergyMod_Max; - int16_t *tp, *rp; - int16_t *rp_beg, *rp_end; - int16_t totscale, totscale_max; - int16_t scalediff; - int32_t newCrit, maxCrit; - int shifts; - - /* Initializations, to make sure that the first one is selected */ - crossCorrSqMod_Max=0; - EnergyMod_Max=WEBRTC_SPL_WORD16_MAX; - totscale_max=-500; - maxlag=0; - pos=0; - - /* Find scale value and start position */ - if (step==1) { - max=WebRtcSpl_MaxAbsValueW16(regressor, subl + searchLen - 1); - rp_beg = regressor; - rp_end = regressor + subl; - } else { /* step==-1 */ - max = WebRtcSpl_MaxAbsValueW16(regressor - searchLen, subl + searchLen - 1); - rp_beg = regressor - 1; - rp_end = regressor + subl - 1; - } - - /* Introduce a scale factor on the Energy in int32_t in - order to make sure that the calculation does not - overflow */ - - if (max>5000) { - shifts=2; - } else { - shifts=0; - } - - /* Calculate the first energy, then do a +/- to get the other energies */ - Energy=WebRtcSpl_DotProductWithScale(regressor, regressor, subl, shifts); - - for (k=0;k0)&&(crossCorr>0)) { - - /* Put cross correlation and energy on 16 bit word */ - crossCorrScale=(int16_t)WebRtcSpl_NormW32(crossCorr)-16; - crossCorrmod=(int16_t)WEBRTC_SPL_SHIFT_W32(crossCorr, crossCorrScale); - Energyscale=(int16_t)WebRtcSpl_NormW32(Energy)-16; - EnergyMod=(int16_t)WEBRTC_SPL_SHIFT_W32(Energy, Energyscale); - - /* Square cross correlation and store upper int16_t */ - crossCorrSqMod = (int16_t)((crossCorrmod * crossCorrmod) >> 16); - - /* Calculate the total number of (dynamic) right shifts that have - been performed on (crossCorr*crossCorr)/energy - */ - totscale=Energyscale-(crossCorrScale<<1); - - /* Calculate the shift difference in order to be able to compare the two - (crossCorr*crossCorr)/energy in the same domain - */ - scalediff=totscale-totscale_max; - scalediff=WEBRTC_SPL_MIN(scalediff,31); - scalediff=WEBRTC_SPL_MAX(scalediff,-31); - - /* Compute the cross multiplication between the old best criteria - and the new one to be able to compare them without using a - division */ - - if (scalediff<0) { - newCrit = ((int32_t)crossCorrSqMod*EnergyMod_Max)>>(-scalediff); - maxCrit = ((int32_t)crossCorrSqMod_Max*EnergyMod); - } else { - newCrit = ((int32_t)crossCorrSqMod*EnergyMod_Max); - maxCrit = ((int32_t)crossCorrSqMod_Max*EnergyMod)>>scalediff; - } - - /* Store the new lag value if the new criteria is larger - than previous largest criteria */ - - if (newCrit > maxCrit) { - crossCorrSqMod_Max = crossCorrSqMod; - EnergyMod_Max = EnergyMod; - totscale_max = totscale; - maxlag = k; - } - } - pos+=step; - - /* Do a +/- to get the next energy */ - Energy += step * ((*rp_end * *rp_end - *rp_beg * *rp_beg) >> shifts); - rp_beg+=step; - rp_end+=step; - } - - return(maxlag+offset); -} diff --git a/modules/audio_coding/codecs/ilbc/xcorr_coef.h b/modules/audio_coding/codecs/ilbc/xcorr_coef.h deleted file mode 100644 index 3fcce25147..0000000000 --- a/modules/audio_coding/codecs/ilbc/xcorr_coef.h +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/****************************************************************** - - iLBC Speech Coder ANSI-C Source Code - - WebRtcIlbcfix_XcorrCoef.h - -******************************************************************/ - -#ifndef MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_XCORR_COEF_H_ -#define MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_XCORR_COEF_H_ - -#include -#include - -/*----------------------------------------------------------------* - * cross correlation which finds the optimal lag for the - * crossCorr*crossCorr/(energy) criteria - *---------------------------------------------------------------*/ - -size_t WebRtcIlbcfix_XcorrCoef( - int16_t* target, /* (i) first array */ - int16_t* regressor, /* (i) second array */ - size_t subl, /* (i) dimension arrays */ - size_t searchLen, /* (i) the search lenght */ - size_t offset, /* (i) samples offset between arrays */ - int16_t step /* (i) +1 or -1 */ -); - -#endif diff --git a/modules/audio_coding/codecs/isac/main/source/filter_functions.c b/modules/audio_coding/codecs/isac/main/source/filter_functions.c index a4f297c5a1..d359e8f725 100644 --- a/modules/audio_coding/codecs/isac/main/source/filter_functions.c +++ b/modules/audio_coding/codecs/isac/main/source/filter_functions.c @@ -14,8 +14,8 @@ #include #endif -#include "modules/audio_coding/codecs/isac/main/source/pitch_estimator.h" #include "modules/audio_coding/codecs/isac/main/source/isac_vad.h" +#include "modules/audio_coding/codecs/isac/main/source/pitch_estimator.h" static void WebRtcIsac_AllPoleFilter(double* InOut, double* Coef, @@ -27,26 +27,21 @@ static void WebRtcIsac_AllPoleFilter(double* InOut, size_t n; int k; - //if (fabs(Coef[0]-1.0)<0.001) { - if ( (Coef[0] > 0.9999) && (Coef[0] < 1.0001) ) - { - for(n = 0; n < lengthInOut; n++) - { + // if (fabs(Coef[0]-1.0)<0.001) { + if ((Coef[0] > 0.9999) && (Coef[0] < 1.0001)) { + for (n = 0; n < lengthInOut; n++) { sum = Coef[1] * InOut[-1]; - for(k = 2; k <= orderCoef; k++){ + for (k = 2; k <= orderCoef; k++) { sum += Coef[k] * InOut[-k]; } *InOut++ -= sum; } - } - else - { + } else { scal = 1.0 / Coef[0]; - for(n=0;nbuffer, sizeof(double) * PITCH_WLPCBUFLEN); - memcpy(tmpbuffer+PITCH_WLPCBUFLEN, in, sizeof(double) * PITCH_FRAME_LEN); - memcpy(wfdata->buffer, tmpbuffer+PITCH_FRAME_LEN, sizeof(double) * PITCH_WLPCBUFLEN); + memcpy(tmpbuffer + PITCH_WLPCBUFLEN, in, sizeof(double) * PITCH_FRAME_LEN); + memcpy(wfdata->buffer, tmpbuffer + PITCH_FRAME_LEN, + sizeof(double) * PITCH_WLPCBUFLEN); - dp=weoutbuf; - dp2=whoutbuf; - for (k=0;kweostate[k]; *dp2++ = wfdata->whostate[k]; - opol[k]=0.0; + opol[k] = 0.0; } - opol[0]=1.0; - opol[PITCH_WLPCORDER]=0.0; - weo=dp; - who=dp2; + opol[0] = 1.0; + opol[PITCH_WLPCORDER] = 0.0; + weo = dp; + who = dp2; - endpos=PITCH_WLPCBUFLEN + PITCH_SUBFRAME_LEN; - inp=tmpbuffer + PITCH_WLPCBUFLEN; + endpos = PITCH_WLPCBUFLEN + PITCH_SUBFRAME_LEN; + inp = tmpbuffer + PITCH_WLPCBUFLEN; - for (n=0; nwindow[k]*tmpbuffer[start+k]; + start = endpos - PITCH_WLPCWINLEN; + for (k = 0; k < PITCH_WLPCWINLEN; k++) { + ext[k] = wfdata->window[k] * tmpbuffer[start + k]; } /* Get LPC polynomial */ WebRtcIsac_AutoCorr(corr, ext, PITCH_WLPCWINLEN, PITCH_WLPCORDER); - corr[0]=1.01*corr[0]+1.0; /* White noise correction */ + corr[0] = 1.01 * corr[0] + 1.0; /* White noise correction */ WebRtcIsac_LevDurb(apol, rc, corr, PITCH_WLPCORDER); - WebRtcIsac_BwExpand(apolr, apol, rho, PITCH_WLPCORDER+1); + WebRtcIsac_BwExpand(apolr, apol, rho, PITCH_WLPCORDER + 1); /* Filtering */ - WebRtcIsac_ZeroPoleFilter(inp, apol, apolr, PITCH_SUBFRAME_LEN, PITCH_WLPCORDER, weo); - WebRtcIsac_ZeroPoleFilter(inp, apolr, opol, PITCH_SUBFRAME_LEN, PITCH_WLPCORDER, who); - - inp+=PITCH_SUBFRAME_LEN; - endpos+=PITCH_SUBFRAME_LEN; - weo+=PITCH_SUBFRAME_LEN; - who+=PITCH_SUBFRAME_LEN; + WebRtcIsac_ZeroPoleFilter(inp, apol, apolr, PITCH_SUBFRAME_LEN, + PITCH_WLPCORDER, weo); + WebRtcIsac_ZeroPoleFilter(inp, apolr, opol, PITCH_SUBFRAME_LEN, + PITCH_WLPCORDER, who); + + inp += PITCH_SUBFRAME_LEN; + endpos += PITCH_SUBFRAME_LEN; + weo += PITCH_SUBFRAME_LEN; + who += PITCH_SUBFRAME_LEN; } /* Export filter states */ - for (k=0;kweostate[k]=weoutbuf[PITCH_FRAME_LEN+k]; - wfdata->whostate[k]=whoutbuf[PITCH_FRAME_LEN+k]; + for (k = 0; k < PITCH_WLPCORDER; k++) { + wfdata->weostate[k] = weoutbuf[PITCH_FRAME_LEN + k]; + wfdata->whostate[k] = whoutbuf[PITCH_FRAME_LEN + k]; } /* Export output data */ - memcpy(weiout, weoutbuf+PITCH_WLPCORDER, sizeof(double) * PITCH_FRAME_LEN); - memcpy(whiout, whoutbuf+PITCH_WLPCORDER, sizeof(double) * PITCH_FRAME_LEN); + memcpy(weiout, weoutbuf + PITCH_WLPCORDER, sizeof(double) * PITCH_FRAME_LEN); + memcpy(whiout, whoutbuf + PITCH_WLPCORDER, sizeof(double) * PITCH_FRAME_LEN); } diff --git a/modules/audio_coding/codecs/isac/main/source/pitch_estimator.c b/modules/audio_coding/codecs/isac/main/source/pitch_estimator.c index 8a19ac1710..157eb195e0 100644 --- a/modules/audio_coding/codecs/isac/main/source/pitch_estimator.c +++ b/modules/audio_coding/codecs/isac/main/source/pitch_estimator.c @@ -21,12 +21,12 @@ #include "modules/audio_coding/codecs/isac/main/source/pitch_filter.h" #include "rtc_base/system/ignore_warnings.h" -static const double kInterpolWin[8] = {-0.00067556028640, 0.02184247643159, -0.12203175715679, 0.60086484101160, - 0.60086484101160, -0.12203175715679, 0.02184247643159, -0.00067556028640}; +static const double kInterpolWin[8] = { + -0.00067556028640, 0.02184247643159, -0.12203175715679, 0.60086484101160, + 0.60086484101160, -0.12203175715679, 0.02184247643159, -0.00067556028640}; /* interpolation filter */ -__inline static void IntrepolFilter(double *data_ptr, double *intrp) -{ +__inline static void IntrepolFilter(double* data_ptr, double* intrp) { *intrp = kInterpolWin[0] * data_ptr[-3]; *intrp += kInterpolWin[1] * data_ptr[-2]; *intrp += kInterpolWin[2] * data_ptr[-1]; @@ -37,16 +37,17 @@ __inline static void IntrepolFilter(double *data_ptr, double *intrp) *intrp += kInterpolWin[7] * data_ptr[4]; } - /* 2D parabolic interpolation */ -/* probably some 0.5 factors can be eliminated, and the square-roots can be removed from the Cholesky fact. */ -__inline static void Intrpol2D(double T[3][3], double *x, double *y, double *peak_val) -{ +/* probably some 0.5 factors can be eliminated, and the square-roots can be + * removed from the Cholesky fact. */ +__inline static void Intrpol2D(double T[3][3], + double* x, + double* y, + double* peak_val) { double c, b[2], A[2][2]; double t1, t2, d; double delta1, delta2; - // double T[3][3] = {{-1.25, -.25,-.25}, {-.25, .75, .75}, {-.25, .75, .75}}; // should result in: delta1 = 0.5; delta2 = 0.0; peak_val = 1.0 @@ -61,7 +62,7 @@ __inline static void Intrpol2D(double T[3][3], double *x, double *y, double *pea A[1][1] = -t2 - 0.5 * d; /* deal with singularities or ill-conditioned cases */ - if ( (A[0][0] < 1e-7) || ((A[0][0] * A[1][1] - A[0][1] * A[0][1]) < 1e-7) ) { + if ((A[0][0] < 1e-7) || ((A[0][0] * A[1][1] - A[0][1] * A[0][1]) < 1e-7)) { *peak_val = T[1][1]; return; } @@ -91,27 +92,25 @@ __inline static void Intrpol2D(double T[3][3], double *x, double *y, double *pea *y += delta2; } - -static void PCorr(const double *in, double *outcorr) -{ +static void PCorr(const double* in, double* outcorr) { double sum, ysum, prod; const double *x, *inptr; int k, n; - //ysum = 1e-6; /* use this with float (i.s.o. double)! */ + // ysum = 1e-6; /* use this with float (i.s.o. double)! */ ysum = 1e-13; sum = 0.0; - x = in + PITCH_MAX_LAG/2 + 2; + x = in + PITCH_MAX_LAG / 2 + 2; for (n = 0; n < PITCH_CORR_LEN2; n++) { ysum += in[n] * in[n]; sum += x[n] * in[n]; } - outcorr += PITCH_LAG_SPAN2 - 1; /* index of last element in array */ + outcorr += PITCH_LAG_SPAN2 - 1; /* index of last element in array */ *outcorr = sum / sqrt(ysum); for (k = 1; k < PITCH_LAG_SPAN2; k++) { - ysum -= in[k-1] * in[k-1]; + ysum -= in[k - 1] * in[k - 1]; ysum += in[PITCH_CORR_LEN2 + k - 1] * in[PITCH_CORR_LEN2 + k - 1]; sum = 0.0; inptr = &in[k]; @@ -176,15 +175,15 @@ static void WebRtcIsac_InitializePitch(const double* in, const double old_gain, PitchAnalysisStruct* State, double* lags) { - double buf_dec[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2+2]; + double buf_dec[PITCH_CORR_LEN2 + PITCH_CORR_STEP2 + PITCH_MAX_LAG / 2 + 2]; double ratio, log_lag, gain_bias; double bias; double corrvec1[PITCH_LAG_SPAN2]; double corrvec2[PITCH_LAG_SPAN2]; int m, k; // Allocating 10 extra entries at the begining of the CorrSurf - double corrSurfBuff[10 + (2*PITCH_BW+3)*(PITCH_LAG_SPAN2+4)]; - double* CorrSurf[2*PITCH_BW+3]; + double corrSurfBuff[10 + (2 * PITCH_BW + 3) * (PITCH_LAG_SPAN2 + 4)]; + double* CorrSurf[2 * PITCH_BW + 3]; double *CorrSurfPtr1, *CorrSurfPtr2; double LagWin[3] = {0.2, 0.5, 0.98}; int ind1, ind2, peaks_ind, peak, max_ind; @@ -198,30 +197,38 @@ static void WebRtcIsac_InitializePitch(const double* in, double T[3][3]; int row; - for(k = 0; k < 2*PITCH_BW+3; k++) - { - CorrSurf[k] = &corrSurfBuff[10 + k * (PITCH_LAG_SPAN2+4)]; + for (k = 0; k < 2 * PITCH_BW + 3; k++) { + CorrSurf[k] = &corrSurfBuff[10 + k * (PITCH_LAG_SPAN2 + 4)]; } /* reset CorrSurf matrix */ - memset(corrSurfBuff, 0, sizeof(double) * (10 + (2*PITCH_BW+3) * (PITCH_LAG_SPAN2+4))); + memset(corrSurfBuff, 0, + sizeof(double) * (10 + (2 * PITCH_BW + 3) * (PITCH_LAG_SPAN2 + 4))); - //warnings -DH + // warnings -DH max_ind = 0; peak = 0; /* copy old values from state buffer */ - memcpy(buf_dec, State->dec_buffer, sizeof(double) * (PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2)); + memcpy(buf_dec, State->dec_buffer, + sizeof(double) * (PITCH_CORR_LEN2 + PITCH_CORR_STEP2 + + PITCH_MAX_LAG / 2 - PITCH_FRAME_LEN / 2 + 2)); /* decimation; put result after the old values */ - WebRtcIsac_DecimateAllpass(in, State->decimator_state, PITCH_FRAME_LEN, - &buf_dec[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2]); + WebRtcIsac_DecimateAllpass( + in, State->decimator_state, PITCH_FRAME_LEN, + &buf_dec[PITCH_CORR_LEN2 + PITCH_CORR_STEP2 + PITCH_MAX_LAG / 2 - + PITCH_FRAME_LEN / 2 + 2]); /* low-pass filtering */ - for (k = PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2; k < PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2+2; k++) - buf_dec[k] += 0.75 * buf_dec[k-1] - 0.25 * buf_dec[k-2]; + for (k = PITCH_CORR_LEN2 + PITCH_CORR_STEP2 + PITCH_MAX_LAG / 2 - + PITCH_FRAME_LEN / 2 + 2; + k < PITCH_CORR_LEN2 + PITCH_CORR_STEP2 + PITCH_MAX_LAG / 2 + 2; k++) + buf_dec[k] += 0.75 * buf_dec[k - 1] - 0.25 * buf_dec[k - 2]; /* copy end part back into state buffer */ - memcpy(State->dec_buffer, buf_dec+PITCH_FRAME_LEN/2, sizeof(double) * (PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2)); + memcpy(State->dec_buffer, buf_dec + PITCH_FRAME_LEN / 2, + sizeof(double) * (PITCH_CORR_LEN2 + PITCH_CORR_STEP2 + + PITCH_MAX_LAG / 2 - PITCH_FRAME_LEN / 2 + 2)); /* compute correlation for first and second half of the frame */ PCorr(buf_dec, corrvec1); @@ -230,10 +237,10 @@ static void WebRtcIsac_InitializePitch(const double* in, /* bias towards pitch lag of previous frame */ log_lag = log(0.5 * old_lag); gain_bias = 4.0 * old_gain * old_gain; - if (gain_bias > 0.8) gain_bias = 0.8; - for (k = 0; k < PITCH_LAG_SPAN2; k++) - { - ratio = log((double) (k + (PITCH_MIN_LAG/2-2))) - log_lag; + if (gain_bias > 0.8) + gain_bias = 0.8; + for (k = 0; k < PITCH_LAG_SPAN2; k++) { + ratio = log((double)(k + (PITCH_MIN_LAG / 2 - 2))) - log_lag; bias = 1.0 + gain_bias * exp(-5.0 * ratio * ratio); corrvec1[k] *= bias; } @@ -243,8 +250,8 @@ static void WebRtcIsac_InitializePitch(const double* in, gain_tmp = LagWin[k]; corrvec1[k] *= gain_tmp; corrvec2[k] *= gain_tmp; - corrvec1[PITCH_LAG_SPAN2-1-k] *= gain_tmp; - corrvec2[PITCH_LAG_SPAN2-1-k] *= gain_tmp; + corrvec1[PITCH_LAG_SPAN2 - 1 - k] *= gain_tmp; + corrvec2[PITCH_LAG_SPAN2 - 1 - k] *= gain_tmp; } corr_max = 0.0; @@ -256,7 +263,7 @@ static void WebRtcIsac_InitializePitch(const double* in, corr = corrvec1[ind1++] + corrvec2[ind2++]; CorrSurfPtr1[k] = corr; if (corr > corr_max) { - corr_max = corr; /* update maximum */ + corr_max = corr; /* update maximum */ max_ind = (int)(&CorrSurfPtr1[k] - &CorrSurf[0][0]); } } @@ -264,63 +271,66 @@ static void WebRtcIsac_InitializePitch(const double* in, ind1 = 0; ind2 = PITCH_BW; CorrSurfPtr1 = &CorrSurf[0][2]; - CorrSurfPtr2 = &CorrSurf[2*PITCH_BW][PITCH_BW+2]; - for (k = 0; k < PITCH_LAG_SPAN2-PITCH_BW; k++) { - ratio = ((double) (ind1 + 12)) / ((double) (ind2 + 12)); - adj = 0.2 * ratio * (2.0 - ratio); /* adjustment factor; inverse parabola as a function of ratio */ + CorrSurfPtr2 = &CorrSurf[2 * PITCH_BW][PITCH_BW + 2]; + for (k = 0; k < PITCH_LAG_SPAN2 - PITCH_BW; k++) { + ratio = ((double)(ind1 + 12)) / ((double)(ind2 + 12)); + adj = 0.2 * ratio * (2.0 - ratio); /* adjustment factor; inverse parabola as + a function of ratio */ corr = adj * (corrvec1[ind1] + corrvec2[ind2]); CorrSurfPtr1[k] = corr; if (corr > corr_max) { - corr_max = corr; /* update maximum */ + corr_max = corr; /* update maximum */ max_ind = (int)(&CorrSurfPtr1[k] - &CorrSurf[0][0]); } corr = adj * (corrvec1[ind2++] + corrvec2[ind1++]); CorrSurfPtr2[k] = corr; if (corr > corr_max) { - corr_max = corr; /* update maximum */ + corr_max = corr; /* update maximum */ max_ind = (int)(&CorrSurfPtr2[k] - &CorrSurf[0][0]); } } /* fill second and next to last rows of correlation surface */ ind1 = 0; - ind2 = PITCH_BW-1; + ind2 = PITCH_BW - 1; CorrSurfPtr1 = &CorrSurf[1][2]; - CorrSurfPtr2 = &CorrSurf[2*PITCH_BW-1][PITCH_BW+1]; - for (k = 0; k < PITCH_LAG_SPAN2-PITCH_BW+1; k++) { - ratio = ((double) (ind1 + 12)) / ((double) (ind2 + 12)); - adj = 0.9 * ratio * (2.0 - ratio); /* adjustment factor; inverse parabola as a function of ratio */ + CorrSurfPtr2 = &CorrSurf[2 * PITCH_BW - 1][PITCH_BW + 1]; + for (k = 0; k < PITCH_LAG_SPAN2 - PITCH_BW + 1; k++) { + ratio = ((double)(ind1 + 12)) / ((double)(ind2 + 12)); + adj = 0.9 * ratio * (2.0 - ratio); /* adjustment factor; inverse parabola as + a function of ratio */ corr = adj * (corrvec1[ind1] + corrvec2[ind2]); CorrSurfPtr1[k] = corr; if (corr > corr_max) { - corr_max = corr; /* update maximum */ + corr_max = corr; /* update maximum */ max_ind = (int)(&CorrSurfPtr1[k] - &CorrSurf[0][0]); } corr = adj * (corrvec1[ind2++] + corrvec2[ind1++]); CorrSurfPtr2[k] = corr; if (corr > corr_max) { - corr_max = corr; /* update maximum */ + corr_max = corr; /* update maximum */ max_ind = (int)(&CorrSurfPtr2[k] - &CorrSurf[0][0]); } } /* fill remainder of correlation surface */ for (m = 2; m < PITCH_BW; m++) { ind1 = 0; - ind2 = PITCH_BW - m; /* always larger than ind1 */ + ind2 = PITCH_BW - m; /* always larger than ind1 */ CorrSurfPtr1 = &CorrSurf[m][2]; - CorrSurfPtr2 = &CorrSurf[2*PITCH_BW-m][PITCH_BW+2-m]; - for (k = 0; k < PITCH_LAG_SPAN2-PITCH_BW+m; k++) { - ratio = ((double) (ind1 + 12)) / ((double) (ind2 + 12)); - adj = ratio * (2.0 - ratio); /* adjustment factor; inverse parabola as a function of ratio */ + CorrSurfPtr2 = &CorrSurf[2 * PITCH_BW - m][PITCH_BW + 2 - m]; + for (k = 0; k < PITCH_LAG_SPAN2 - PITCH_BW + m; k++) { + ratio = ((double)(ind1 + 12)) / ((double)(ind2 + 12)); + adj = ratio * (2.0 - ratio); /* adjustment factor; inverse parabola as a + function of ratio */ corr = adj * (corrvec1[ind1] + corrvec2[ind2]); CorrSurfPtr1[k] = corr; if (corr > corr_max) { - corr_max = corr; /* update maximum */ + corr_max = corr; /* update maximum */ max_ind = (int)(&CorrSurfPtr1[k] - &CorrSurf[0][0]); } corr = adj * (corrvec1[ind2++] + corrvec2[ind1++]); CorrSurfPtr2[k] = corr; if (corr > corr_max) { - corr_max = corr; /* update maximum */ + corr_max = corr; /* update maximum */ max_ind = (int)(&CorrSurfPtr2[k] - &CorrSurf[0][0]); } } @@ -331,33 +341,41 @@ static void WebRtcIsac_InitializePitch(const double* in, peaks_ind = 0; /* find peaks */ - for (m = 1; m < PITCH_BW+1; m++) { - if (peaks_ind == PITCH_MAX_NUM_PEAKS) break; + for (m = 1; m < PITCH_BW + 1; m++) { + if (peaks_ind == PITCH_MAX_NUM_PEAKS) + break; CorrSurfPtr1 = &CorrSurf[m][2]; - for (k = 2; k < PITCH_LAG_SPAN2-PITCH_BW-2+m; k++) { + for (k = 2; k < PITCH_LAG_SPAN2 - PITCH_BW - 2 + m; k++) { corr = CorrSurfPtr1[k]; if (corr > corr_max) { - if ( (corr > CorrSurfPtr1[k - (PITCH_LAG_SPAN2+5)]) && (corr > CorrSurfPtr1[k - (PITCH_LAG_SPAN2+4)]) ) { - if ( (corr > CorrSurfPtr1[k + (PITCH_LAG_SPAN2+4)]) && (corr > CorrSurfPtr1[k + (PITCH_LAG_SPAN2+5)]) ) { + if ((corr > CorrSurfPtr1[k - (PITCH_LAG_SPAN2 + 5)]) && + (corr > CorrSurfPtr1[k - (PITCH_LAG_SPAN2 + 4)])) { + if ((corr > CorrSurfPtr1[k + (PITCH_LAG_SPAN2 + 4)]) && + (corr > CorrSurfPtr1[k + (PITCH_LAG_SPAN2 + 5)])) { /* found a peak; store index into matrix */ peaks[peaks_ind++] = (int)(&CorrSurfPtr1[k] - &CorrSurf[0][0]); - if (peaks_ind == PITCH_MAX_NUM_PEAKS) break; + if (peaks_ind == PITCH_MAX_NUM_PEAKS) + break; } } } } } - for (m = PITCH_BW+1; m < 2*PITCH_BW; m++) { - if (peaks_ind == PITCH_MAX_NUM_PEAKS) break; + for (m = PITCH_BW + 1; m < 2 * PITCH_BW; m++) { + if (peaks_ind == PITCH_MAX_NUM_PEAKS) + break; CorrSurfPtr1 = &CorrSurf[m][2]; - for (k = 2+m-PITCH_BW; k < PITCH_LAG_SPAN2-2; k++) { + for (k = 2 + m - PITCH_BW; k < PITCH_LAG_SPAN2 - 2; k++) { corr = CorrSurfPtr1[k]; if (corr > corr_max) { - if ( (corr > CorrSurfPtr1[k - (PITCH_LAG_SPAN2+5)]) && (corr > CorrSurfPtr1[k - (PITCH_LAG_SPAN2+4)]) ) { - if ( (corr > CorrSurfPtr1[k + (PITCH_LAG_SPAN2+4)]) && (corr > CorrSurfPtr1[k + (PITCH_LAG_SPAN2+5)]) ) { + if ((corr > CorrSurfPtr1[k - (PITCH_LAG_SPAN2 + 5)]) && + (corr > CorrSurfPtr1[k - (PITCH_LAG_SPAN2 + 4)])) { + if ((corr > CorrSurfPtr1[k + (PITCH_LAG_SPAN2 + 4)]) && + (corr > CorrSurfPtr1[k + (PITCH_LAG_SPAN2 + 5)])) { /* found a peak; store index into matrix */ peaks[peaks_ind++] = (int)(&CorrSurfPtr1[k] - &CorrSurf[0][0]); - if (peaks_ind == PITCH_MAX_NUM_PEAKS) break; + if (peaks_ind == PITCH_MAX_NUM_PEAKS) + break; } } } @@ -371,28 +389,32 @@ static void WebRtcIsac_InitializePitch(const double* in, peak = peaks[k]; /* compute four interpolated values around current peak */ - IntrepolFilter(&CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+5)], &intrp_a); - IntrepolFilter(&CorrSurfPtr1[peak - 1 ], &intrp_b); - IntrepolFilter(&CorrSurfPtr1[peak ], &intrp_c); - IntrepolFilter(&CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+4)], &intrp_d); + IntrepolFilter(&CorrSurfPtr1[peak - (PITCH_LAG_SPAN2 + 5)], &intrp_a); + IntrepolFilter(&CorrSurfPtr1[peak - 1], &intrp_b); + IntrepolFilter(&CorrSurfPtr1[peak], &intrp_c); + IntrepolFilter(&CorrSurfPtr1[peak + (PITCH_LAG_SPAN2 + 4)], &intrp_d); /* determine maximum of the interpolated values */ corr = CorrSurfPtr1[peak]; corr_max = intrp_a; - if (intrp_b > corr_max) corr_max = intrp_b; - if (intrp_c > corr_max) corr_max = intrp_c; - if (intrp_d > corr_max) corr_max = intrp_d; + if (intrp_b > corr_max) + corr_max = intrp_b; + if (intrp_c > corr_max) + corr_max = intrp_c; + if (intrp_d > corr_max) + corr_max = intrp_d; /* determine where the peak sits and fill a 3x3 matrix around it */ - row = peak / (PITCH_LAG_SPAN2+4); - lags1[k] = (double) ((peak - row * (PITCH_LAG_SPAN2+4)) + PITCH_MIN_LAG/2 - 4); - lags2[k] = (double) (lags1[k] + PITCH_BW - row); - if ( corr > corr_max ) { - T[0][0] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+5)]; - T[2][0] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+4)]; + row = peak / (PITCH_LAG_SPAN2 + 4); + lags1[k] = (double)((peak - row * (PITCH_LAG_SPAN2 + 4)) + + PITCH_MIN_LAG / 2 - 4); + lags2[k] = (double)(lags1[k] + PITCH_BW - row); + if (corr > corr_max) { + T[0][0] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2 + 5)]; + T[2][0] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2 + 4)]; T[1][1] = corr; - T[0][2] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+4)]; - T[2][2] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+5)]; + T[0][2] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2 + 4)]; + T[2][2] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2 + 5)]; T[1][0] = intrp_a; T[0][1] = intrp_b; T[2][1] = intrp_c; @@ -401,51 +423,55 @@ static void WebRtcIsac_InitializePitch(const double* in, if (intrp_a == corr_max) { lags1[k] -= 0.5; lags2[k] += 0.5; - IntrepolFilter(&CorrSurfPtr1[peak - 2*(PITCH_LAG_SPAN2+5)], &T[0][0]); - IntrepolFilter(&CorrSurfPtr1[peak - (2*PITCH_LAG_SPAN2+9)], &T[2][0]); + IntrepolFilter(&CorrSurfPtr1[peak - 2 * (PITCH_LAG_SPAN2 + 5)], + &T[0][0]); + IntrepolFilter(&CorrSurfPtr1[peak - (2 * PITCH_LAG_SPAN2 + 9)], + &T[2][0]); T[1][1] = intrp_a; T[0][2] = intrp_b; T[2][2] = intrp_c; - T[1][0] = CorrSurfPtr1[peak - (2*PITCH_LAG_SPAN2+9)]; - T[0][1] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+5)]; - T[2][1] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+4)]; + T[1][0] = CorrSurfPtr1[peak - (2 * PITCH_LAG_SPAN2 + 9)]; + T[0][1] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2 + 5)]; + T[2][1] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2 + 4)]; T[1][2] = corr; } else if (intrp_b == corr_max) { lags1[k] -= 0.5; lags2[k] -= 0.5; - IntrepolFilter(&CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+6)], &T[0][0]); + IntrepolFilter(&CorrSurfPtr1[peak - (PITCH_LAG_SPAN2 + 6)], &T[0][0]); T[2][0] = intrp_a; T[1][1] = intrp_b; - IntrepolFilter(&CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+3)], &T[0][2]); + IntrepolFilter(&CorrSurfPtr1[peak + (PITCH_LAG_SPAN2 + 3)], &T[0][2]); T[2][2] = intrp_d; - T[1][0] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+5)]; + T[1][0] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2 + 5)]; T[0][1] = CorrSurfPtr1[peak - 1]; T[2][1] = corr; - T[1][2] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+4)]; + T[1][2] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2 + 4)]; } else if (intrp_c == corr_max) { lags1[k] += 0.5; lags2[k] += 0.5; T[0][0] = intrp_a; - IntrepolFilter(&CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+4)], &T[2][0]); + IntrepolFilter(&CorrSurfPtr1[peak - (PITCH_LAG_SPAN2 + 4)], &T[2][0]); T[1][1] = intrp_c; T[0][2] = intrp_d; - IntrepolFilter(&CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+5)], &T[2][2]); - T[1][0] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2+4)]; + IntrepolFilter(&CorrSurfPtr1[peak + (PITCH_LAG_SPAN2 + 5)], &T[2][2]); + T[1][0] = CorrSurfPtr1[peak - (PITCH_LAG_SPAN2 + 4)]; T[0][1] = corr; T[2][1] = CorrSurfPtr1[peak + 1]; - T[1][2] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+5)]; + T[1][2] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2 + 5)]; } else { lags1[k] += 0.5; lags2[k] -= 0.5; T[0][0] = intrp_b; T[2][0] = intrp_c; T[1][1] = intrp_d; - IntrepolFilter(&CorrSurfPtr1[peak + 2*(PITCH_LAG_SPAN2+4)], &T[0][2]); - IntrepolFilter(&CorrSurfPtr1[peak + (2*PITCH_LAG_SPAN2+9)], &T[2][2]); + IntrepolFilter(&CorrSurfPtr1[peak + 2 * (PITCH_LAG_SPAN2 + 4)], + &T[0][2]); + IntrepolFilter(&CorrSurfPtr1[peak + (2 * PITCH_LAG_SPAN2 + 9)], + &T[2][2]); T[1][0] = corr; - T[0][1] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+4)]; - T[2][1] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2+5)]; - T[1][2] = CorrSurfPtr1[peak + (2*PITCH_LAG_SPAN2+9)]; + T[0][1] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2 + 4)]; + T[2][1] = CorrSurfPtr1[peak + (PITCH_LAG_SPAN2 + 5)]; + T[1][2] = CorrSurfPtr1[peak + (2 * PITCH_LAG_SPAN2 + 9)]; } } @@ -466,27 +492,34 @@ static void WebRtcIsac_InitializePitch(const double* in, lags1[peak] *= 2.0; lags2[peak] *= 2.0; - if (lags1[peak] < (double) PITCH_MIN_LAG) lags1[peak] = (double) PITCH_MIN_LAG; - if (lags2[peak] < (double) PITCH_MIN_LAG) lags2[peak] = (double) PITCH_MIN_LAG; - if (lags1[peak] > (double) PITCH_MAX_LAG) lags1[peak] = (double) PITCH_MAX_LAG; - if (lags2[peak] > (double) PITCH_MAX_LAG) lags2[peak] = (double) PITCH_MAX_LAG; + if (lags1[peak] < (double)PITCH_MIN_LAG) + lags1[peak] = (double)PITCH_MIN_LAG; + if (lags2[peak] < (double)PITCH_MIN_LAG) + lags2[peak] = (double)PITCH_MIN_LAG; + if (lags1[peak] > (double)PITCH_MAX_LAG) + lags1[peak] = (double)PITCH_MAX_LAG; + if (lags2[peak] > (double)PITCH_MAX_LAG) + lags2[peak] = (double)PITCH_MAX_LAG; /* store lags of highest peak in output array */ lags[0] = lags1[peak]; lags[1] = lags1[peak]; lags[2] = lags2[peak]; lags[3] = lags2[peak]; - } - else - { - row = max_ind / (PITCH_LAG_SPAN2+4); - lags1[0] = (double) ((max_ind - row * (PITCH_LAG_SPAN2+4)) + PITCH_MIN_LAG/2 - 4); - lags2[0] = (double) (lags1[0] + PITCH_BW - row); - - if (lags1[0] < (double) PITCH_MIN_LAG) lags1[0] = (double) PITCH_MIN_LAG; - if (lags2[0] < (double) PITCH_MIN_LAG) lags2[0] = (double) PITCH_MIN_LAG; - if (lags1[0] > (double) PITCH_MAX_LAG) lags1[0] = (double) PITCH_MAX_LAG; - if (lags2[0] > (double) PITCH_MAX_LAG) lags2[0] = (double) PITCH_MAX_LAG; + } else { + row = max_ind / (PITCH_LAG_SPAN2 + 4); + lags1[0] = (double)((max_ind - row * (PITCH_LAG_SPAN2 + 4)) + + PITCH_MIN_LAG / 2 - 4); + lags2[0] = (double)(lags1[0] + PITCH_BW - row); + + if (lags1[0] < (double)PITCH_MIN_LAG) + lags1[0] = (double)PITCH_MIN_LAG; + if (lags2[0] < (double)PITCH_MIN_LAG) + lags2[0] = (double)PITCH_MIN_LAG; + if (lags1[0] > (double)PITCH_MAX_LAG) + lags1[0] = (double)PITCH_MAX_LAG; + if (lags2[0] > (double)PITCH_MAX_LAG) + lags2[0] = (double)PITCH_MAX_LAG; /* store lags of highest peak in output array */ lags[0] = lags1[0]; @@ -498,35 +531,37 @@ static void WebRtcIsac_InitializePitch(const double* in, RTC_POP_IGNORING_WFRAME_LARGER_THAN() -/* create weighting matrix by orthogonalizing a basis of polynomials of increasing order - * t = (0:4)'; - * A = [t.^0, t.^1, t.^2, t.^3, t.^4]; - * [Q, dummy] = qr(A); - * P.Weight = Q * diag([0, .1, .5, 1, 1]) * Q'; */ +/* create weighting matrix by orthogonalizing a basis of polynomials of + * increasing order t = (0:4)'; A = [t.^0, t.^1, t.^2, t.^3, t.^4]; [Q, dummy] = + * qr(A); P.Weight = Q * diag([0, .1, .5, 1, 1]) * Q'; */ static const double kWeight[5][5] = { - { 0.29714285714286, -0.30857142857143, -0.05714285714286, 0.05142857142857, 0.01714285714286}, - {-0.30857142857143, 0.67428571428571, -0.27142857142857, -0.14571428571429, 0.05142857142857}, - {-0.05714285714286, -0.27142857142857, 0.65714285714286, -0.27142857142857, -0.05714285714286}, - { 0.05142857142857, -0.14571428571429, -0.27142857142857, 0.67428571428571, -0.30857142857143}, - { 0.01714285714286, 0.05142857142857, -0.05714285714286, -0.30857142857143, 0.29714285714286} -}; + {0.29714285714286, -0.30857142857143, -0.05714285714286, 0.05142857142857, + 0.01714285714286}, + {-0.30857142857143, 0.67428571428571, -0.27142857142857, -0.14571428571429, + 0.05142857142857}, + {-0.05714285714286, -0.27142857142857, 0.65714285714286, -0.27142857142857, + -0.05714285714286}, + {0.05142857142857, -0.14571428571429, -0.27142857142857, 0.67428571428571, + -0.30857142857143}, + {0.01714285714286, 0.05142857142857, -0.05714285714286, -0.30857142857143, + 0.29714285714286}}; /* second order high-pass filter */ static void WebRtcIsac_Highpass(const double* in, - double* out, - double* state, - size_t N) { + double* out, + double* state, + size_t N) { /* create high-pass filter ocefficients * z = 0.998 * exp(j*2*pi*35/8000); * p = 0.94 * exp(j*2*pi*140/8000); * HP_b = [1, -2*real(z), abs(z)^2]; * HP_a = [1, -2*real(p), abs(p)^2]; */ - static const double a_coef[2] = { 1.86864659625574, -0.88360000000000}; - static const double b_coef[2] = {-1.99524591718270, 0.99600400000000}; + static const double a_coef[2] = {1.86864659625574, -0.88360000000000}; + static const double b_coef[2] = {-1.99524591718270, 0.99600400000000}; size_t k; - for (k=0; kwhitened_buf, sizeof(double) * QLOOKAHEAD); /* compute weighted and whitened signals */ - WebRtcIsac_WeightingFilter(HPin, &Weighted[0], &Whitened[QLOOKAHEAD], &(State->Wghtstr)); + WebRtcIsac_WeightingFilter(HPin, &Weighted[0], &Whitened[QLOOKAHEAD], + &(State->Wghtstr)); /* copy from buffer into state */ - memcpy(State->whitened_buf, Whitened+PITCH_FRAME_LEN, sizeof(double) * QLOOKAHEAD); + memcpy(State->whitened_buf, Whitened + PITCH_FRAME_LEN, + sizeof(double) * QLOOKAHEAD); old_lag = State->PFstr_wght.oldlagp[0]; old_gain = State->PFstr_wght.oldgainp[0]; @@ -573,7 +611,6 @@ void WebRtcIsac_PitchAnalysis(const double *in, /* PITCH_FRAME_LEN /* inital pitch estimate */ WebRtcIsac_InitializePitch(Weighted, old_lag, old_gain, State, lags); - /* Iterative optimization of lags - to be done */ /* compute energy of whitened signal */ @@ -581,10 +618,10 @@ void WebRtcIsac_PitchAnalysis(const double *in, /* PITCH_FRAME_LEN for (k = 0; k < PITCH_FRAME_LEN + QLOOKAHEAD; k++) nrg_wht += Whitened[k] * Whitened[k]; - /* Iterative optimization of gains */ - /* set weights for energy, gain fluctiation, and spectral gain penalty functions */ + /* set weights for energy, gain fluctiation, and spectral gain penalty + * functions */ Wnrg = 1.0 / nrg_wht; Wgain = 0.005; Wfluct = 3.0; @@ -596,9 +633,11 @@ void WebRtcIsac_PitchAnalysis(const double *in, /* PITCH_FRAME_LEN /* two iterations should be enough */ for (iter = 0; iter < 2; iter++) { /* compute Jacobian of pre-filter output towards gains */ - WebRtcIsac_PitchfilterPre_gains(Whitened, out_G, out_dG, &(State->PFstr_wght), lags, gains); + WebRtcIsac_PitchfilterPre_gains(Whitened, out_G, out_dG, + &(State->PFstr_wght), lags, gains); - /* gradient and approximate Hessian (lower triangle) for minimizing the filter's output power */ + /* gradient and approximate Hessian (lower triangle) for minimizing the + * filter's output power */ for (k = 0; k < 4; k++) { tmp = 0.0; for (n = 0; n < PITCH_FRAME_LEN + QLOOKAHEAD; n++) @@ -614,16 +653,17 @@ void WebRtcIsac_PitchAnalysis(const double *in, /* PITCH_FRAME_LEN } } - /* add gradient and Hessian (lower triangle) for dampening fast gain changes */ + /* add gradient and Hessian (lower triangle) for dampening fast gain changes + */ for (k = 0; k < 4; k++) { - tmp = kWeight[k+1][0] * old_gain; + tmp = kWeight[k + 1][0] * old_gain; for (m = 0; m < 4; m++) - tmp += kWeight[k+1][m+1] * gains[m]; + tmp += kWeight[k + 1][m + 1] * gains[m]; grad[k] += tmp * Wfluct; } for (k = 0; k < 4; k++) { for (m = 0; m <= k; m++) { - H[k][m] += kWeight[k+1][m+1] * Wfluct; + H[k][m] += kWeight[k + 1][m + 1] * Wfluct; } } @@ -637,10 +677,10 @@ void WebRtcIsac_PitchAnalysis(const double *in, /* PITCH_FRAME_LEN grad[3] += 1.33 * (tmp * tmp * Wgain); H[3][3] += 2.66 * tmp * (tmp * tmp * Wgain); - /* compute Cholesky factorization of Hessian * by overwritting the upper triangle; scale factors on diagonal - * (for non pc-platforms store the inverse of the diagonals seperately to minimize divisions) */ + * (for non pc-platforms store the inverse of the diagonals seperately to + * minimize divisions) */ H[0][1] = H[1][0] / H[0][0]; H[0][2] = H[2][0] / H[0][0]; H[0][3] = H[3][0] / H[0][0]; @@ -648,8 +688,10 @@ void WebRtcIsac_PitchAnalysis(const double *in, /* PITCH_FRAME_LEN H[1][2] = (H[2][1] - H[0][1] * H[2][0]) / H[1][1]; H[1][3] = (H[3][1] - H[0][1] * H[3][0]) / H[1][1]; H[2][2] -= H[0][0] * H[0][2] * H[0][2] + H[1][1] * H[1][2] * H[1][2]; - H[2][3] = (H[3][2] - H[0][2] * H[3][0] - H[1][2] * H[1][1] * H[1][3]) / H[2][2]; - H[3][3] -= H[0][0] * H[0][3] * H[0][3] + H[1][1] * H[1][3] * H[1][3] + H[2][2] * H[2][3] * H[2][3]; + H[2][3] = + (H[3][2] - H[0][2] * H[3][0] - H[1][2] * H[1][1] * H[1][3]) / H[2][2]; + H[3][3] -= H[0][0] * H[0][3] * H[0][3] + H[1][1] * H[1][3] * H[1][3] + + H[2][2] * H[2][3] * H[2][3]; /* Compute update as delta_gains = -inv(H) * grad */ /* copy and negate */ @@ -682,7 +724,7 @@ void WebRtcIsac_PitchAnalysis(const double *in, /* PITCH_FRAME_LEN /* concatenate previous input's end and current input */ memcpy(inbuf, State->inbuf, sizeof(double) * QLOOKAHEAD); - memcpy(inbuf+QLOOKAHEAD, in, sizeof(double) * PITCH_FRAME_LEN); + memcpy(inbuf + QLOOKAHEAD, in, sizeof(double) * PITCH_FRAME_LEN); /* lookahead pitch filtering for masking analysis */ WebRtcIsac_PitchfilterPre_la(inbuf, out, &(State->PFstr), lags, gains); diff --git a/modules/audio_coding/codecs/isac/main/source/pitch_filter.c b/modules/audio_coding/codecs/isac/main/source/pitch_filter.c index bf03dfff2e..494b5b7b35 100644 --- a/modules/audio_coding/codecs/isac/main/source/pitch_filter.c +++ b/modules/audio_coding/codecs/isac/main/source/pitch_filter.c @@ -12,8 +12,8 @@ #include #include -#include "modules/audio_coding/codecs/isac/main/source/pitch_estimator.h" #include "modules/audio_coding/codecs/isac/main/source/os_specific_inline.h" +#include "modules/audio_coding/codecs/isac/main/source/pitch_estimator.h" #include "rtc_base/compile_assert_c.h" /* @@ -31,35 +31,34 @@ */ static const double kDampFilter[PITCH_DAMPORDER] = {-0.07, 0.25, 0.64, 0.25, - -0.07}; + -0.07}; /* interpolation coefficients; generated by design_pitch_filter.m */ static const double kIntrpCoef[PITCH_FRACS][PITCH_FRACORDER] = { - {-0.02239172458614, 0.06653315052934, -0.16515880017569, 0.60701333734125, - 0.64671399919202, -0.20249000396417, 0.09926548334755, -0.04765933793109, + {-0.02239172458614, 0.06653315052934, -0.16515880017569, 0.60701333734125, + 0.64671399919202, -0.20249000396417, 0.09926548334755, -0.04765933793109, 0.01754159521746}, - {-0.01985640750434, 0.05816126837866, -0.13991265473714, 0.44560418147643, - 0.79117042386876, -0.20266133815188, 0.09585268418555, -0.04533310458084, + {-0.01985640750434, 0.05816126837866, -0.13991265473714, 0.44560418147643, + 0.79117042386876, -0.20266133815188, 0.09585268418555, -0.04533310458084, 0.01654127246314}, - {-0.01463300534216, 0.04229888475060, -0.09897034715253, 0.28284326017787, - 0.90385267956632, -0.16976950138649, 0.07704272393639, -0.03584218578311, + {-0.01463300534216, 0.04229888475060, -0.09897034715253, 0.28284326017787, + 0.90385267956632, -0.16976950138649, 0.07704272393639, -0.03584218578311, 0.01295781500709}, - {-0.00764851320885, 0.02184035544377, -0.04985561057281, 0.13083306574393, - 0.97545011664662, -0.10177807997561, 0.04400901776474, -0.02010737175166, + {-0.00764851320885, 0.02184035544377, -0.04985561057281, 0.13083306574393, + 0.97545011664662, -0.10177807997561, 0.04400901776474, -0.02010737175166, 0.00719783432422}, - {-0.00000000000000, 0.00000000000000, -0.00000000000001, 0.00000000000001, - 0.99999999999999, 0.00000000000001, -0.00000000000001, 0.00000000000000, + {-0.00000000000000, 0.00000000000000, -0.00000000000001, 0.00000000000001, + 0.99999999999999, 0.00000000000001, -0.00000000000001, 0.00000000000000, -0.00000000000000}, - {0.00719783432422, -0.02010737175166, 0.04400901776474, -0.10177807997562, - 0.97545011664663, 0.13083306574393, -0.04985561057280, 0.02184035544377, + {0.00719783432422, -0.02010737175166, 0.04400901776474, -0.10177807997562, + 0.97545011664663, 0.13083306574393, -0.04985561057280, 0.02184035544377, -0.00764851320885}, - {0.01295781500710, -0.03584218578312, 0.07704272393640, -0.16976950138650, - 0.90385267956634, 0.28284326017785, -0.09897034715252, 0.04229888475059, + {0.01295781500710, -0.03584218578312, 0.07704272393640, -0.16976950138650, + 0.90385267956634, 0.28284326017785, -0.09897034715252, 0.04229888475059, -0.01463300534216}, - {0.01654127246315, -0.04533310458085, 0.09585268418557, -0.20266133815190, - 0.79117042386878, 0.44560418147640, -0.13991265473712, 0.05816126837865, - -0.01985640750433} -}; + {0.01654127246315, -0.04533310458085, 0.09585268418557, -0.20266133815190, + 0.79117042386878, 0.44560418147640, -0.13991265473712, 0.05816126837865, + -0.01985640750433}}; /* * Enumerating the operation of the filter. @@ -78,7 +77,10 @@ static const double kIntrpCoef[PITCH_FRACS][PITCH_FRACORDER] = { * used to find the optimal gain. */ typedef enum { - kPitchFilterPre, kPitchFilterPost, kPitchFilterPreLa, kPitchFilterPreGain + kPitchFilterPre, + kPitchFilterPost, + kPitchFilterPreLa, + kPitchFilterPreGain } PitchFilterOperation; /* @@ -104,7 +106,7 @@ typedef enum { typedef struct { double buffer[PITCH_INTBUFFSIZE + QLOOKAHEAD]; double damper_state[PITCH_DAMPORDER]; - const double *interpol_coeff; + const double* interpol_coeff; double gain; double lag; int lag_offset; @@ -132,7 +134,8 @@ typedef struct { * where the output of different gain values (differential * change to gain) is written. */ -static void FilterSegment(const double* in_data, PitchFilterParam* parameters, +static void FilterSegment(const double* in_data, + PitchFilterParam* parameters, double* out_data, double out_dg[][PITCH_FRAME_LEN + QLOOKAHEAD]) { int n; @@ -173,15 +176,15 @@ static void FilterSegment(const double* in_data, PitchFilterParam* parameters, for (j = 0; j < parameters->sub_frame + 1; ++j) { /* Filter for fractional pitch. */ sum2 = 0.0; - for (m = PITCH_FRACORDER-1; m >= m_tmp; --m) { + for (m = PITCH_FRACORDER - 1; m >= m_tmp; --m) { /* `lag_index + m` is always larger than or equal to zero, see how * m_tmp is computed. This is equivalent to assume samples outside * `out_dg[j]` are zero. */ sum2 += out_dg[j][lag_index + m] * parameters->interpol_coeff[m]; } /* Add the contribution of differential gain change. */ - parameters->damper_state_dg[j][0] = parameters->gain_mult[j] * sum + - parameters->gain * sum2; + parameters->damper_state_dg[j][0] = + parameters->gain_mult[j] * sum + parameters->gain * sum2; } /* Filter with damping filter, and store the results. */ @@ -201,8 +204,8 @@ static void FilterSegment(const double* in_data, PitchFilterParam* parameters, /* Subtract from input and update buffer. */ out_data[parameters->index] = in_data[parameters->index] - sum; - parameters->buffer[pos] = in_data[parameters->index] + - out_data[parameters->index]; + parameters->buffer[pos] = + in_data[parameters->index] + out_data[parameters->index]; ++parameters->index; ++pos; @@ -216,8 +219,8 @@ static void Update(PitchFilterParam* parameters) { double fraction; int fraction_index; /* Compute integer lag-offset. */ - parameters->lag_offset = WebRtcIsac_lrint(parameters->lag + PITCH_FILTDELAY + - 0.5); + parameters->lag_offset = + WebRtcIsac_lrint(parameters->lag + PITCH_FILTDELAY + 0.5); /* Find correct set of coefficients for computing fractional pitch. */ fraction = parameters->lag_offset - (parameters->lag + PITCH_FILTDELAY); fraction_index = WebRtcIsac_lrint(PITCH_FRACS * fraction - 0.5); @@ -257,8 +260,11 @@ static void Update(PitchFilterParam* parameters) { * where the output of different gain values (differential * change to gain) is written. */ -static void FilterFrame(const double* in_data, PitchFiltstr* filter_state, - double* lags, double* gains, PitchFilterOperation mode, +static void FilterFrame(const double* in_data, + PitchFiltstr* filter_state, + double* lags, + double* gains, + PitchFilterOperation mode, double* out_data, double out_dg[][PITCH_FRAME_LEN + QLOOKAHEAD]) { PitchFilterParam filter_parameters; @@ -276,7 +282,7 @@ static void FilterFrame(const double* in_data, PitchFiltstr* filter_state, memcpy(filter_parameters.buffer, filter_state->ubuf, sizeof(filter_state->ubuf)); RTC_COMPILE_ASSERT(sizeof(filter_parameters.buffer) >= - sizeof(filter_state->ubuf)); + sizeof(filter_state->ubuf)); memset(filter_parameters.buffer + sizeof(filter_state->ubuf) / sizeof(filter_state->ubuf[0]), 0, sizeof(filter_parameters.buffer) - sizeof(filter_state->ubuf)); @@ -289,7 +295,7 @@ static void FilterFrame(const double* in_data, PitchFiltstr* filter_state, memset(filter_parameters.damper_state_dg, 0, sizeof(filter_parameters.damper_state_dg)); for (n = 0; n < PITCH_SUBFRAMES; ++n) { - //memset(out_dg[n], 0, sizeof(double) * (PITCH_FRAME_LEN + QLOOKAHEAD)); + // memset(out_dg[n], 0, sizeof(double) * (PITCH_FRAME_LEN + QLOOKAHEAD)); memset(out_dg[n], 0, sizeof(out_dg[n])); } } else if (mode == kPitchFilterPost) { @@ -360,29 +366,38 @@ static void FilterFrame(const double* in_data, PitchFiltstr* filter_state, } } -void WebRtcIsac_PitchfilterPre(double* in_data, double* out_data, - PitchFiltstr* pf_state, double* lags, +void WebRtcIsac_PitchfilterPre(double* in_data, + double* out_data, + PitchFiltstr* pf_state, + double* lags, double* gains) { FilterFrame(in_data, pf_state, lags, gains, kPitchFilterPre, out_data, NULL); } -void WebRtcIsac_PitchfilterPre_la(double* in_data, double* out_data, - PitchFiltstr* pf_state, double* lags, +void WebRtcIsac_PitchfilterPre_la(double* in_data, + double* out_data, + PitchFiltstr* pf_state, + double* lags, double* gains) { FilterFrame(in_data, pf_state, lags, gains, kPitchFilterPreLa, out_data, NULL); } void WebRtcIsac_PitchfilterPre_gains( - double* in_data, double* out_data, - double out_dg[][PITCH_FRAME_LEN + QLOOKAHEAD], PitchFiltstr *pf_state, - double* lags, double* gains) { + double* in_data, + double* out_data, + double out_dg[][PITCH_FRAME_LEN + QLOOKAHEAD], + PitchFiltstr* pf_state, + double* lags, + double* gains) { FilterFrame(in_data, pf_state, lags, gains, kPitchFilterPreGain, out_data, out_dg); } -void WebRtcIsac_PitchfilterPost(double* in_data, double* out_data, - PitchFiltstr* pf_state, double* lags, +void WebRtcIsac_PitchfilterPost(double* in_data, + double* out_data, + PitchFiltstr* pf_state, + double* lags, double* gains) { FilterFrame(in_data, pf_state, lags, gains, kPitchFilterPost, out_data, NULL); } diff --git a/modules/audio_coding/codecs/legacy_encoded_audio_frame.cc b/modules/audio_coding/codecs/legacy_encoded_audio_frame.cc index dacf325082..adbda10e2e 100644 --- a/modules/audio_coding/codecs/legacy_encoded_audio_frame.cc +++ b/modules/audio_coding/codecs/legacy_encoded_audio_frame.cc @@ -19,7 +19,7 @@ namespace webrtc { LegacyEncodedAudioFrame::LegacyEncodedAudioFrame(AudioDecoder* decoder, - rtc::Buffer&& payload) + Buffer&& payload) : decoder_(decoder), payload_(std::move(payload)) {} LegacyEncodedAudioFrame::~LegacyEncodedAudioFrame() = default; @@ -29,22 +29,22 @@ size_t LegacyEncodedAudioFrame::Duration() const { return (ret < 0) ? 0 : static_cast(ret); } -absl::optional -LegacyEncodedAudioFrame::Decode(rtc::ArrayView decoded) const { +std::optional +LegacyEncodedAudioFrame::Decode(ArrayView decoded) const { AudioDecoder::SpeechType speech_type = AudioDecoder::kSpeech; const int ret = decoder_->Decode( payload_.data(), payload_.size(), decoder_->SampleRateHz(), decoded.size() * sizeof(int16_t), decoded.data(), &speech_type); if (ret < 0) - return absl::nullopt; + return std::nullopt; return DecodeResult{static_cast(ret), speech_type}; } std::vector LegacyEncodedAudioFrame::SplitBySamples( AudioDecoder* decoder, - rtc::Buffer&& payload, + Buffer&& payload, uint32_t timestamp, size_t bytes_per_ms, uint32_t timestamps_per_ms) { @@ -75,7 +75,7 @@ std::vector LegacyEncodedAudioFrame::SplitBySamples( timestamp_offset += timestamps_per_chunk) { split_size_bytes = std::min(split_size_bytes, payload.size() - byte_offset); - rtc::Buffer new_payload(payload.data() + byte_offset, split_size_bytes); + Buffer new_payload(payload.data() + byte_offset, split_size_bytes); std::unique_ptr frame( new LegacyEncodedAudioFrame(decoder, std::move(new_payload))); results.emplace_back(timestamp + timestamp_offset, 0, std::move(frame)); diff --git a/modules/audio_coding/codecs/legacy_encoded_audio_frame.h b/modules/audio_coding/codecs/legacy_encoded_audio_frame.h index 21da1367ed..50349e0aed 100644 --- a/modules/audio_coding/codecs/legacy_encoded_audio_frame.h +++ b/modules/audio_coding/codecs/legacy_encoded_audio_frame.h @@ -14,9 +14,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio_codecs/audio_decoder.h" #include "rtc_base/buffer.h" @@ -25,27 +25,26 @@ namespace webrtc { class LegacyEncodedAudioFrame final : public AudioDecoder::EncodedAudioFrame { public: - LegacyEncodedAudioFrame(AudioDecoder* decoder, rtc::Buffer&& payload); + LegacyEncodedAudioFrame(AudioDecoder* decoder, Buffer&& payload); ~LegacyEncodedAudioFrame() override; static std::vector SplitBySamples( AudioDecoder* decoder, - rtc::Buffer&& payload, + Buffer&& payload, uint32_t timestamp, size_t bytes_per_ms, uint32_t timestamps_per_ms); size_t Duration() const override; - absl::optional Decode( - rtc::ArrayView decoded) const override; + std::optional Decode(ArrayView decoded) const override; // For testing: - const rtc::Buffer& payload() const { return payload_; } + const Buffer& payload() const { return payload_; } private: AudioDecoder* const decoder_; - const rtc::Buffer payload_; + const Buffer payload_; }; } // namespace webrtc diff --git a/modules/audio_coding/codecs/legacy_encoded_audio_frame_unittest.cc b/modules/audio_coding/codecs/legacy_encoded_audio_frame_unittest.cc index f81aeeea80..5e03dce2bd 100644 --- a/modules/audio_coding/codecs/legacy_encoded_audio_frame_unittest.cc +++ b/modules/audio_coding/codecs/legacy_encoded_audio_frame_unittest.cc @@ -10,6 +10,11 @@ #include "modules/audio_coding/codecs/legacy_encoded_audio_frame.h" +#include +#include + +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" #include "test/gtest.h" @@ -122,7 +127,7 @@ TEST_P(SplitBySamplesTest, PayloadSizes) { // resulting frames can be checked and we can be reasonably certain no // sample was missed or repeated. const auto generate_payload = [](size_t num_bytes) { - rtc::Buffer payload(num_bytes); + Buffer payload(num_bytes); uint8_t value = 0; // Allow wrap-around of value in counter below. for (size_t i = 0; i != payload.size(); ++i, ++value) { @@ -146,13 +151,13 @@ TEST_P(SplitBySamplesTest, PayloadSizes) { const size_t length_bytes = expected_split.frame_sizes[i] * bytes_per_ms_; EXPECT_EQ(length_bytes, frame->payload().size()); EXPECT_EQ(expected_timestamp, result.timestamp); - const rtc::Buffer& payload = frame->payload(); + const Buffer& payload = frame->payload(); // Allow wrap-around of value in counter below. - for (size_t i = 0; i != payload.size(); ++i, ++value) { - ASSERT_EQ(value, payload[i]); + for (size_t j = 0; j != payload.size(); ++j, ++value) { + ASSERT_EQ(value, payload[j]); } - expected_timestamp += rtc::checked_cast( + expected_timestamp += checked_cast( expected_split.frame_sizes[i] * samples_per_ms_); } } diff --git a/modules/audio_coding/codecs/opus/audio_coder_opus_common.cc b/modules/audio_coding/codecs/opus/audio_coder_opus_common.cc index 03c02186d0..486f3a9f56 100644 --- a/modules/audio_coding/codecs/opus/audio_coder_opus_common.cc +++ b/modules/audio_coding/codecs/opus/audio_coder_opus_common.cc @@ -14,18 +14,18 @@ namespace webrtc { -absl::optional GetFormatParameter(const SdpAudioFormat& format, - absl::string_view param) { +std::optional GetFormatParameter(const SdpAudioFormat& format, + absl::string_view param) { auto it = format.parameters.find(std::string(param)); if (it == format.parameters.end()) - return absl::nullopt; + return std::nullopt; return it->second; } // Parses a comma-separated string "1,2,0,6" into a std::vector. template <> -absl::optional> GetFormatParameter( +std::optional> GetFormatParameter( const SdpAudioFormat& format, absl::string_view param) { std::vector result; @@ -39,9 +39,9 @@ absl::optional> GetFormatParameter( : (next_comma - pos); auto substring_with_number = comma_separated_list.substr(pos, distance_to_next_comma); - auto conv = rtc::StringToNumber(substring_with_number); + auto conv = StringToNumber(substring_with_number); if (!conv.has_value()) { - return absl::nullopt; + return std::nullopt; } result.push_back(*conv); pos += substring_with_number.size() + 1; diff --git a/modules/audio_coding/codecs/opus/audio_coder_opus_common.h b/modules/audio_coding/codecs/opus/audio_coder_opus_common.h index 5ebb51b577..adbfbc6eea 100644 --- a/modules/audio_coding/codecs/opus/audio_coder_opus_common.h +++ b/modules/audio_coding/codecs/opus/audio_coder_opus_common.h @@ -11,37 +11,35 @@ #ifndef MODULES_AUDIO_CODING_CODECS_OPUS_AUDIO_CODER_OPUS_COMMON_H_ #define MODULES_AUDIO_CODING_CODECS_OPUS_AUDIO_CODER_OPUS_COMMON_H_ +#include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_format.h" #include "rtc_base/string_to_number.h" namespace webrtc { -absl::optional GetFormatParameter(const SdpAudioFormat& format, - absl::string_view param); +std::optional GetFormatParameter(const SdpAudioFormat& format, + absl::string_view param); template -absl::optional GetFormatParameter(const SdpAudioFormat& format, - absl::string_view param) { - return rtc::StringToNumber(GetFormatParameter(format, param).value_or("")); +std::optional GetFormatParameter(const SdpAudioFormat& format, + absl::string_view param) { + return StringToNumber(GetFormatParameter(format, param).value_or("")); } template <> -absl::optional> GetFormatParameter( +std::optional> GetFormatParameter( const SdpAudioFormat& format, absl::string_view param); class OpusFrame : public AudioDecoder::EncodedAudioFrame { public: - OpusFrame(AudioDecoder* decoder, - rtc::Buffer&& payload, - bool is_primary_payload) + OpusFrame(AudioDecoder* decoder, Buffer&& payload, bool is_primary_payload) : decoder_(decoder), payload_(std::move(payload)), is_primary_payload_(is_primary_payload) {} @@ -58,8 +56,8 @@ class OpusFrame : public AudioDecoder::EncodedAudioFrame { bool IsDtxPacket() const override { return payload_.size() <= 2; } - absl::optional Decode( - rtc::ArrayView decoded) const override { + std::optional Decode( + ArrayView decoded) const override { AudioDecoder::SpeechType speech_type = AudioDecoder::kSpeech; int ret; if (is_primary_payload_) { @@ -73,14 +71,14 @@ class OpusFrame : public AudioDecoder::EncodedAudioFrame { } if (ret < 0) - return absl::nullopt; + return std::nullopt; return DecodeResult{static_cast(ret), speech_type}; } private: AudioDecoder* const decoder_; - const rtc::Buffer payload_; + const Buffer payload_; const bool is_primary_payload_; }; diff --git a/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.cc b/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.cc index 285ea89959..8acb6d2262 100644 --- a/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.cc +++ b/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.cc @@ -57,36 +57,36 @@ AudioDecoderMultiChannelOpusImpl::~AudioDecoderMultiChannelOpusImpl() { WebRtcOpus_DecoderFree(dec_state_); } -absl::optional +std::optional AudioDecoderMultiChannelOpusImpl::SdpToConfig(const SdpAudioFormat& format) { AudioDecoderMultiChannelOpusConfig config; config.num_channels = format.num_channels; auto num_streams = GetFormatParameter(format, "num_streams"); if (!num_streams.has_value()) { - return absl::nullopt; + return std::nullopt; } config.num_streams = *num_streams; auto coupled_streams = GetFormatParameter(format, "coupled_streams"); if (!coupled_streams.has_value()) { - return absl::nullopt; + return std::nullopt; } config.coupled_streams = *coupled_streams; auto channel_mapping = GetFormatParameter>(format, "channel_mapping"); if (!channel_mapping.has_value()) { - return absl::nullopt; + return std::nullopt; } config.channel_mapping = *channel_mapping; if (!config.IsOk()) { - return absl::nullopt; + return std::nullopt; } return config; } std::vector -AudioDecoderMultiChannelOpusImpl::ParsePayload(rtc::Buffer&& payload, +AudioDecoderMultiChannelOpusImpl::ParsePayload(Buffer&& payload, uint32_t timestamp) { std::vector results; @@ -94,7 +94,7 @@ AudioDecoderMultiChannelOpusImpl::ParsePayload(rtc::Buffer&& payload, const int duration = PacketDurationRedundant(payload.data(), payload.size()); RTC_DCHECK_GE(duration, 0); - rtc::Buffer payload_copy(payload.data(), payload.size()); + Buffer payload_copy(payload.data(), payload.size()); std::unique_ptr fec_frame( new OpusFrame(this, std::move(payload_copy), false)); results.emplace_back(timestamp - duration, 1, std::move(fec_frame)); diff --git a/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.h b/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.h index 2ff47a8a53..7d9d6aadf1 100644 --- a/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.h +++ b/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.h @@ -36,7 +36,7 @@ class AudioDecoderMultiChannelOpusImpl final : public AudioDecoder { AudioDecoderMultiChannelOpusImpl& operator=( const AudioDecoderMultiChannelOpusImpl&) = delete; - std::vector ParsePayload(rtc::Buffer&& payload, + std::vector ParsePayload(Buffer&& payload, uint32_t timestamp) override; void Reset() override; int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override; @@ -46,7 +46,7 @@ class AudioDecoderMultiChannelOpusImpl final : public AudioDecoder { int SampleRateHz() const override; size_t Channels() const override; - static absl::optional SdpToConfig( + static std::optional SdpToConfig( const SdpAudioFormat& format); protected: diff --git a/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_unittest.cc b/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_unittest.cc index 57e2107f3c..7b30a392f9 100644 --- a/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_unittest.cc +++ b/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_unittest.cc @@ -25,15 +25,15 @@ TEST(AudioDecoderMultiOpusTest, GetFormatParameter) { {"num_streams", "2"}}); EXPECT_EQ(GetFormatParameter(sdp_format, "channel_mapping"), - absl::optional("0,1,2,3")); + std::optional("0,1,2,3")); EXPECT_EQ(GetFormatParameter(sdp_format, "coupled_streams"), - absl::optional(2)); + std::optional(2)); - EXPECT_EQ(GetFormatParameter(sdp_format, "missing"), absl::nullopt); + EXPECT_EQ(GetFormatParameter(sdp_format, "missing"), std::nullopt); EXPECT_EQ(GetFormatParameter(sdp_format, "channel_mapping"), - absl::nullopt); + std::nullopt); } TEST(AudioDecoderMultiOpusTest, InvalidChannelMappings) { @@ -43,7 +43,7 @@ TEST(AudioDecoderMultiOpusTest, InvalidChannelMappings) { {{"channel_mapping", "3,0"}, {"coupled_streams", "1"}, {"num_streams", "2"}}); - const absl::optional decoder_config = + const std::optional decoder_config = AudioDecoderMultiChannelOpus::SdpToConfig(sdp_format); EXPECT_FALSE(decoder_config.has_value()); } @@ -54,7 +54,7 @@ TEST(AudioDecoderMultiOpusTest, InvalidChannelMappings) { {{"channel_mapping", "0,1,2,3,4,5"}, {"coupled_streams", "0"}, {"num_streams", "2"}}); - const absl::optional decoder_config = + const std::optional decoder_config = AudioDecoderMultiChannelOpus::SdpToConfig(sdp_format); EXPECT_FALSE(decoder_config.has_value()); } @@ -63,7 +63,7 @@ TEST(AudioDecoderMultiOpusTest, InvalidChannelMappings) { const SdpAudioFormat sdp_format( "multiopus", 48000, 5, {{"channel_mapping", "0,1,two,3,4"}, {"coupled_streams", "0"}}); - const absl::optional decoder_config = + const std::optional decoder_config = AudioDecoderMultiChannelOpus::SdpToConfig(sdp_format); EXPECT_FALSE(decoder_config.has_value()); } @@ -75,7 +75,7 @@ TEST(AudioDecoderMultiOpusTest, ValidSdpToConfigProducesCorrectConfig) { {"coupled_streams", "2"}, {"num_streams", "2"}}); - const absl::optional decoder_config = + const std::optional decoder_config = AudioDecoderMultiChannelOpus::SdpToConfig(sdp_format); ASSERT_TRUE(decoder_config.has_value()); @@ -92,7 +92,7 @@ TEST(AudioDecoderMultiOpusTest, InvalidSdpToConfigDoesNotProduceConfig) { {"coupled_stream", "2"}, {"num_streams", "2"}}); - const absl::optional decoder_config = + const std::optional decoder_config = AudioDecoderMultiChannelOpus::SdpToConfig(sdp_format); EXPECT_FALSE(decoder_config.has_value()); @@ -104,7 +104,7 @@ TEST(AudioDecoderMultiOpusTest, InvalidSdpToConfigDoesNotProduceConfig) { {"coupled_streams", "2"}, {"num_streams", "2"}}); - const absl::optional decoder_config = + const std::optional decoder_config = AudioDecoderMultiChannelOpus::SdpToConfig(sdp_format); EXPECT_FALSE(decoder_config.has_value()); @@ -117,7 +117,7 @@ TEST(AudioDecoderMultiOpusTest, CodecsCanBeCreated) { {"coupled_streams", "2"}, {"num_streams", "2"}}); - const absl::optional decoder_config = + const std::optional decoder_config = AudioDecoderMultiChannelOpus::SdpToConfig(sdp_format); ASSERT_TRUE(decoder_config.has_value()); @@ -135,7 +135,7 @@ TEST(AudioDecoderMultiOpusTest, AdvertisedCodecsCanBeCreated) { EXPECT_FALSE(specs.empty()); for (const AudioCodecSpec& spec : specs) { - const absl::optional decoder_config = + const std::optional decoder_config = AudioDecoderMultiChannelOpus::SdpToConfig(spec.format); ASSERT_TRUE(decoder_config.has_value()); diff --git a/modules/audio_coding/codecs/opus/audio_decoder_opus.cc b/modules/audio_coding/codecs/opus/audio_decoder_opus.cc index cff9685548..fae0bd47de 100644 --- a/modules/audio_coding/codecs/opus/audio_decoder_opus.cc +++ b/modules/audio_coding/codecs/opus/audio_decoder_opus.cc @@ -11,18 +11,22 @@ #include "modules/audio_coding/codecs/opus/audio_decoder_opus.h" #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/field_trials_view.h" #include "modules/audio_coding/codecs/opus/audio_coder_opus_common.h" #include "rtc_base/checks.h" namespace webrtc { -AudioDecoderOpusImpl::AudioDecoderOpusImpl(size_t num_channels, +AudioDecoderOpusImpl::AudioDecoderOpusImpl(const FieldTrialsView& field_trials, + size_t num_channels, int sample_rate_hz) - : channels_{num_channels}, sample_rate_hz_{sample_rate_hz} { + : channels_(num_channels), + sample_rate_hz_(sample_rate_hz), + generate_plc_(field_trials.IsEnabled("WebRTC-Audio-OpusGeneratePlc")) { RTC_DCHECK(num_channels == 1 || num_channels == 2); RTC_DCHECK(sample_rate_hz == 16000 || sample_rate_hz == 48000); const int error = @@ -36,7 +40,7 @@ AudioDecoderOpusImpl::~AudioDecoderOpusImpl() { } std::vector AudioDecoderOpusImpl::ParsePayload( - rtc::Buffer&& payload, + Buffer&& payload, uint32_t timestamp) { std::vector results; @@ -44,7 +48,7 @@ std::vector AudioDecoderOpusImpl::ParsePayload( const int duration = PacketDurationRedundant(payload.data(), payload.size()); RTC_DCHECK_GE(duration, 0); - rtc::Buffer payload_copy(payload.data(), payload.size()); + Buffer payload_copy(payload.data(), payload.size()); std::unique_ptr fec_frame( new OpusFrame(this, std::move(payload_copy), false)); results.emplace_back(timestamp - duration, 1, std::move(fec_frame)); @@ -125,4 +129,22 @@ size_t AudioDecoderOpusImpl::Channels() const { return channels_; } +void AudioDecoderOpusImpl::GeneratePlc( + size_t /* requested_samples_per_channel */, + BufferT* concealment_audio) { + if (!generate_plc_) { + return; + } + int plc_size = WebRtcOpus_PlcDuration(dec_state_) * channels_; + concealment_audio->AppendData(plc_size, [&](ArrayView decoded) { + int16_t temp_type = 1; + int ret = + WebRtcOpus_Decode(dec_state_, nullptr, 0, decoded.data(), &temp_type); + if (ret < 0) { + return 0; + } + return ret; + }); +} + } // namespace webrtc diff --git a/modules/audio_coding/codecs/opus/audio_decoder_opus.h b/modules/audio_coding/codecs/opus/audio_decoder_opus.h index e8fd0440bc..c36f4b1d11 100644 --- a/modules/audio_coding/codecs/opus/audio_decoder_opus.h +++ b/modules/audio_coding/codecs/opus/audio_decoder_opus.h @@ -17,6 +17,7 @@ #include #include "api/audio_codecs/audio_decoder.h" +#include "api/field_trials_view.h" #include "modules/audio_coding/codecs/opus/opus_interface.h" #include "rtc_base/buffer.h" @@ -24,14 +25,16 @@ namespace webrtc { class AudioDecoderOpusImpl final : public AudioDecoder { public: - explicit AudioDecoderOpusImpl(size_t num_channels, - int sample_rate_hz = 48000); + explicit AudioDecoderOpusImpl(const FieldTrialsView& field_trails, + size_t num_channels, + int sample_rate_hz); + ~AudioDecoderOpusImpl() override; AudioDecoderOpusImpl(const AudioDecoderOpusImpl&) = delete; AudioDecoderOpusImpl& operator=(const AudioDecoderOpusImpl&) = delete; - std::vector ParsePayload(rtc::Buffer&& payload, + std::vector ParsePayload(Buffer&& payload, uint32_t timestamp) override; void Reset() override; int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override; @@ -40,6 +43,8 @@ class AudioDecoderOpusImpl final : public AudioDecoder { bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const override; int SampleRateHz() const override; size_t Channels() const override; + void GeneratePlc(size_t requested_samples_per_channel, + BufferT* concealment_audio) override; protected: int DecodeInternal(const uint8_t* encoded, @@ -57,6 +62,7 @@ class AudioDecoderOpusImpl final : public AudioDecoder { OpusDecInst* dec_state_; const size_t channels_; const int sample_rate_hz_; + const bool generate_plc_; }; } // namespace webrtc diff --git a/modules/audio_coding/codecs/opus/audio_decoder_opus_unittest.cc b/modules/audio_coding/codecs/opus/audio_decoder_opus_unittest.cc new file mode 100644 index 0000000000..cba690406b --- /dev/null +++ b/modules/audio_coding/codecs/opus/audio_decoder_opus_unittest.cc @@ -0,0 +1,438 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_coding/codecs/opus/audio_decoder_opus.h" + +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/audio/audio_frame.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/opus/audio_encoder_opus_config.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "modules/audio_coding/codecs/opus/audio_encoder_opus.h" +#include "modules/audio_coding/test/PCMFile.h" +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/random.h" +#include "test/explicit_key_value_config.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { +namespace { + +using test::ExplicitKeyValueConfig; +using testing::SizeIs; + +using DecodeResult = ::webrtc::AudioDecoder::EncodedAudioFrame::DecodeResult; +using ParseResult = ::webrtc::AudioDecoder::ParseResult; + +constexpr int kSampleRateHz = 48000; + +constexpr int kInputFrameDurationMs = 10; +constexpr int kInputFrameLength = kInputFrameDurationMs * kSampleRateHz / 1000; + +constexpr int kEncoderFrameDurationMs = 20; +constexpr int kEncoderFrameLength = + kEncoderFrameDurationMs * kSampleRateHz / 1000; + +constexpr int kPayloadType = 123; + +AudioEncoderOpusConfig GetEncoderConfig(int num_channels, bool dtx_enabled) { + AudioEncoderOpusConfig config; + + config.frame_size_ms = kEncoderFrameDurationMs; + config.sample_rate_hz = kSampleRateHz; + config.num_channels = num_channels; + config.application = AudioEncoderOpusConfig::ApplicationMode::kVoip; + config.bitrate_bps = 32000; + config.fec_enabled = false; + config.cbr_enabled = false; + config.max_playback_rate_hz = kSampleRateHz; + config.complexity = 10; + config.dtx_enabled = dtx_enabled; + + return config; +} + +class WhiteNoiseGenerator { + public: + explicit WhiteNoiseGenerator(double amplitude_dbfs) + : amplitude_( + saturated_cast(std::pow(10, amplitude_dbfs / 20) * + std::numeric_limits::max())), + random_generator_(42) {} + + void GenerateNextFrame(ArrayView frame) { + for (size_t i = 0; i < frame.size(); ++i) { + frame[i] = saturated_cast( + random_generator_.Rand(-amplitude_, amplitude_)); + } + } + + private: + const int32_t amplitude_; + Random random_generator_; +}; + +bool IsZeroedFrame(ArrayView audio) { + for (const int16_t& v : audio) { + if (v != 0) + return false; + } + return true; +} + +bool IsTrivialStereo(ArrayView audio) { + const int num_samples = CheckedDivExact(audio.size(), static_cast(2)); + for (int i = 0, j = 0; i < num_samples; ++i, j += 2) { + if (audio[j] != audio[j + 1]) { + return false; + } + } + return true; +} + +void EncodeDecodeSpeech(AudioEncoderOpusImpl& encoder, + AudioDecoderOpusImpl& decoder, + uint32_t& rtp_timestamp, + uint32_t& timestamp, + int max_frames) { + RTC_CHECK(encoder.NumChannels() == 1 || encoder.NumChannels() == 2); + const bool stereo_encoding = encoder.NumChannels() == 2; + const size_t decoder_num_channels = decoder.Channels(); + std::vector decoded_frame(kEncoderFrameLength * + decoder_num_channels); + + PCMFile pcm_file; + pcm_file.Open(test::ResourcePath( + stereo_encoding ? "near48_stereo" : "near48_mono", "pcm"), + kSampleRateHz, "rb"); + pcm_file.ReadStereo(stereo_encoding); + + AudioFrame audio_frame; + for (int i = 0; i < max_frames; ++i) { + if (pcm_file.EndOfFile()) { + break; + } + pcm_file.Read10MsData(audio_frame); + Buffer payload; + encoder.Encode(rtp_timestamp++, audio_frame.data_view().data(), &payload); + + // Ignore empty payloads: the encoder needs more audio to produce a packet. + if (payload.size() == 0) { + continue; + } + + // Decode. + std::vector parse_results = + decoder.ParsePayload(std::move(payload), timestamp++); + RTC_CHECK_EQ(parse_results.size(), 1); + std::optional decode_results = + parse_results[0].frame->Decode(decoded_frame); + RTC_CHECK(decode_results); + RTC_CHECK_EQ(decode_results->num_decoded_samples, decoded_frame.size()); + } +} + +void EncodeDecodeNoiseUntilDecoderInDtxMode(AudioEncoderOpusImpl& encoder, + AudioDecoderOpusImpl& decoder, + uint32_t& rtp_timestamp, + uint32_t& timestamp) { + WhiteNoiseGenerator generator(/*amplitude_dbfs=*/-70.0); + std::vector input_frame(kInputFrameLength * encoder.NumChannels()); + const size_t decoder_num_channels = decoder.Channels(); + std::vector decoded_frame(kEncoderFrameLength * + decoder_num_channels); + + for (int i = 0; i < 50; ++i) { + generator.GenerateNextFrame(input_frame); + Buffer payload; + const AudioEncoder::EncodedInfo info = + encoder.Encode(rtp_timestamp++, input_frame, &payload); + + // Ignore empty payloads: the encoder needs more audio to produce a packet. + if (payload.size() == 0) { + continue; + } + + // Decode `payload`. If it encodes a DTX packet (i.e., 1 byte payload), the + // decoder will switch to DTX mode. Otherwise, it may update the internal + // decoder parameters for comfort noise generation. + std::vector parse_results = + decoder.ParsePayload(std::move(payload), timestamp++); + RTC_CHECK_EQ(parse_results.size(), 1); + std::optional decode_results = + parse_results[0].frame->Decode(decoded_frame); + RTC_CHECK(decode_results); + RTC_CHECK_EQ(decode_results->num_decoded_samples, decoded_frame.size()); + if (parse_results[0].frame->IsDtxPacket()) { + return; + } + } + RTC_CHECK_NOTREACHED(); +} + +// Generates packets by encoding speech frames and decodes them until a non-DTX +// packet is generated and, when that condition is met, returns the decoded +// audio samples. +std::vector EncodeDecodeSpeechUntilOneFrameIsDecoded( + AudioEncoderOpusImpl& encoder, + AudioDecoderOpusImpl& decoder, + uint32_t& rtp_timestamp, + uint32_t& timestamp) { + RTC_CHECK(encoder.NumChannels() == 1 || encoder.NumChannels() == 2); + const bool stereo_encoding = encoder.NumChannels() == 2; + const size_t decoder_num_channels = decoder.Channels(); + std::vector decoded_frame(kEncoderFrameLength * + decoder_num_channels); + + PCMFile pcm_file; + pcm_file.Open(test::ResourcePath( + stereo_encoding ? "near48_stereo" : "near48_mono", "pcm"), + kSampleRateHz, "rb"); + pcm_file.ReadStereo(stereo_encoding); + + AudioFrame audio_frame; + while (true) { + if (pcm_file.EndOfFile()) { + break; + } + pcm_file.Read10MsData(audio_frame); + Buffer payload; + encoder.Encode(rtp_timestamp++, audio_frame.data_view().data(), &payload); + + // Ignore empty payloads: the encoder needs more audio to produce a packet. + if (payload.size() == 0) { + continue; + } + + // Decode `payload`. + std::vector parse_results = + decoder.ParsePayload(std::move(payload), timestamp++); + RTC_CHECK_EQ(parse_results.size(), 1); + std::optional decode_results = + parse_results[0].frame->Decode(decoded_frame); + RTC_CHECK(decode_results); + + if (parse_results[0].frame->IsDtxPacket()) { + continue; + } + RTC_CHECK_EQ(decode_results->num_decoded_samples, decoded_frame.size()); + return decoded_frame; + } + RTC_CHECK_NOTREACHED(); +} + +} // namespace + +TEST(AudioDecoderOpusTest, MonoEncoderStereoDecoderOutputsTrivialStereo) { + const Environment env = EnvironmentFactory().Create(); + WhiteNoiseGenerator generator(/*amplitude_dbfs=*/-70.0); + std::array input_frame; + // Create a mono encoder. + const AudioEncoderOpusConfig encoder_config = + GetEncoderConfig(/*num_channels=*/1, /*dtx_enabled=*/false); + AudioEncoderOpusImpl encoder(env, encoder_config, kPayloadType); + // Create a stereo decoder. + constexpr size_t kDecoderNumChannels = 2; + AudioDecoderOpusImpl decoder(env.field_trials(), kDecoderNumChannels, + kSampleRateHz); + std::array decoded_frame; + + uint32_t rtp_timestamp = 0xFFFu; + uint32_t timestamp = 0; + for (int i = 0; i < 30; ++i) { + generator.GenerateNextFrame(input_frame); + Buffer payload; + encoder.Encode(rtp_timestamp++, input_frame, &payload); + if (payload.size() == 0) { + continue; + } + + // Decode. + std::vector parse_results = + decoder.ParsePayload(std::move(payload), timestamp++); + RTC_CHECK_EQ(parse_results.size(), 1); + std::optional decode_results = + parse_results[0].frame->Decode(decoded_frame); + RTC_CHECK(decode_results); + RTC_CHECK_EQ(decode_results->num_decoded_samples, decoded_frame.size()); + + EXPECT_TRUE(IsTrivialStereo(decoded_frame)); + } +} + +TEST(AudioDecoderOpusTest, + MonoEncoderStereoDecoderOutputsTrivialStereoComfortNoise) { + const Environment env = EnvironmentFactory().Create(); + // Create a mono encoder. + const AudioEncoderOpusConfig encoder_config = + GetEncoderConfig(/*num_channels=*/1, /*dtx_enabled=*/true); + AudioEncoderOpusImpl encoder(env, encoder_config, kPayloadType); + // Create a stereo decoder. + constexpr size_t kDecoderNumChannels = 2; + AudioDecoderOpusImpl decoder(env.field_trials(), kDecoderNumChannels, + kSampleRateHz); + std::vector decoded_frame; + + uint32_t rtp_timestamp = 0xFFFu; + uint32_t timestamp = 0; + // Feed the encoder with speech, otherwise DTX will never kick in. + EncodeDecodeSpeech(encoder, decoder, rtp_timestamp, timestamp, + /*max_frames=*/100); + // Feed the encoder with noise until the decoder is in DTX mode. + EncodeDecodeNoiseUntilDecoderInDtxMode(encoder, decoder, rtp_timestamp, + timestamp); + + // Decode an empty packet so that Opus generates comfort noise. + decoded_frame.resize(kEncoderFrameLength * kDecoderNumChannels); + AudioDecoder::SpeechType speech_type; + const int num_decoded_samples = + decoder.Decode(/*encoded=*/nullptr, /*encoded_len=*/0, kSampleRateHz, + decoded_frame.size(), decoded_frame.data(), &speech_type); + ASSERT_EQ(speech_type, AudioDecoder::SpeechType::kComfortNoise); + RTC_CHECK_GT(num_decoded_samples, 0); + RTC_CHECK_LE(num_decoded_samples, decoded_frame.size()); + ArrayView decoded_view(decoded_frame.data(), + num_decoded_samples); + // Make sure that comfort noise is not a muted frame. + ASSERT_FALSE(IsZeroedFrame(decoded_view)); + EXPECT_TRUE(IsTrivialStereo(decoded_view)); + + // Also check the first decoded audio frame after comfort noise. + decoded_frame = EncodeDecodeSpeechUntilOneFrameIsDecoded( + encoder, decoder, rtp_timestamp, timestamp); + ASSERT_THAT(decoded_frame, SizeIs(kDecoderNumChannels * kEncoderFrameLength)); + ASSERT_FALSE(IsZeroedFrame(decoded_frame)); + EXPECT_TRUE(IsTrivialStereo(decoded_frame)); +} + +TEST(AudioDecoderOpusTest, MonoEncoderStereoDecoderOutputsTrivialStereoPlc) { + const ExplicitKeyValueConfig trials("WebRTC-Audio-OpusGeneratePlc/Enabled/"); + EnvironmentFactory env_factory; + env_factory.Set(&trials); + const Environment env = env_factory.Create(); + // Create a mono encoder. + const AudioEncoderOpusConfig encoder_config = + GetEncoderConfig(/*num_channels=*/1, /*dtx_enabled=*/false); + AudioEncoderOpusImpl encoder(env, encoder_config, kPayloadType); + // Create a stereo decoder. + constexpr size_t kDecoderNumChannels = 2; + AudioDecoderOpusImpl decoder(env.field_trials(), kDecoderNumChannels, + kSampleRateHz); + + uint32_t rtp_timestamp = 0xFFFu; + uint32_t timestamp = 0; + // Feed the encoder with speech. + EncodeDecodeSpeech(encoder, decoder, rtp_timestamp, timestamp, + /*max_frames=*/100); + + // Generate packet loss concealment. + BufferT concealment_audio; + constexpr int kIgnored = 123; + decoder.GeneratePlc(/*requested_samples_per_channel=*/kIgnored, + &concealment_audio); + RTC_CHECK_GT(concealment_audio.size(), 0); + ArrayView decoded_view(concealment_audio.data(), + concealment_audio.size()); + // Make sure that packet loss concealment is not a muted frame. + ASSERT_FALSE(IsZeroedFrame(decoded_view)); + EXPECT_TRUE(IsTrivialStereo(decoded_view)); + + // Also check the first decoded audio frame after packet loss concealment. + std::vector decoded_frame = EncodeDecodeSpeechUntilOneFrameIsDecoded( + encoder, decoder, rtp_timestamp, timestamp); + ASSERT_THAT(decoded_frame, SizeIs(kDecoderNumChannels * kEncoderFrameLength)); + ASSERT_FALSE(IsZeroedFrame(decoded_frame)); + EXPECT_TRUE(IsTrivialStereo(decoded_frame)); +} + +TEST(AudioDecoderOpusTest, + StereoEncoderStereoDecoderOutputsNonTrivialStereoComfortNoise) { + const Environment env = EnvironmentFactory().Create(); + // Create a stereo encoder. + const AudioEncoderOpusConfig encoder_config = + GetEncoderConfig(/*num_channels=*/2, /*dtx_enabled=*/true); + AudioEncoderOpusImpl encoder(env, encoder_config, kPayloadType); + // Create a stereo decoder. + constexpr size_t kDecoderNumChannels = 2; + AudioDecoderOpusImpl decoder(env.field_trials(), kDecoderNumChannels, + kSampleRateHz); + + uint32_t rtp_timestamp = 0xFFFu; + uint32_t timestamp = 0; + // Feed the encoder with speech, otherwise DTX will never kick in. + EncodeDecodeSpeech(encoder, decoder, rtp_timestamp, timestamp, + /*max_frames=*/100); + // Feed the encoder with noise and decode until the decoder is in DTX mode. + EncodeDecodeNoiseUntilDecoderInDtxMode(encoder, decoder, rtp_timestamp, + timestamp); + + // Decode an empty packet so that Opus generates comfort noise. + std::array decoded_frame; + AudioDecoder::SpeechType speech_type; + const int num_decoded_samples = + decoder.Decode(/*encoded=*/nullptr, /*encoded_len=*/0, kSampleRateHz, + decoded_frame.size(), decoded_frame.data(), &speech_type); + ASSERT_EQ(speech_type, AudioDecoder::SpeechType::kComfortNoise); + RTC_CHECK_GT(num_decoded_samples, 0); + RTC_CHECK_LE(num_decoded_samples, decoded_frame.size()); + ArrayView decoded_view(decoded_frame.data(), + num_decoded_samples); + // Make sure that comfort noise is not a muted frame. + ASSERT_FALSE(IsZeroedFrame(decoded_view)); + + EXPECT_FALSE(IsTrivialStereo(decoded_view)); +} + +TEST(AudioDecoderOpusTest, + StereoEncoderStereoDecoderOutputsNonTrivialStereoPlc) { + const ExplicitKeyValueConfig trials("WebRTC-Audio-OpusGeneratePlc/Enabled/"); + EnvironmentFactory env_factory; + env_factory.Set(&trials); + const Environment env = env_factory.Create(); + // Create a stereo encoder. + const AudioEncoderOpusConfig encoder_config = + GetEncoderConfig(/*num_channels=*/2, /*dtx_enabled=*/false); + AudioEncoderOpusImpl encoder(env, encoder_config, kPayloadType); + // Create a stereo decoder. + constexpr size_t kDecoderNumChannels = 2; + AudioDecoderOpusImpl decoder(env.field_trials(), kDecoderNumChannels, + kSampleRateHz); + + uint32_t rtp_timestamp = 0xFFFu; + uint32_t timestamp = 0; + // Feed the encoder with speech. + EncodeDecodeSpeech(encoder, decoder, rtp_timestamp, timestamp, + /*max_frames=*/100); + + // Generate packet loss concealment. + BufferT concealment_audio; + constexpr int kIgnored = 123; + decoder.GeneratePlc(/*requested_samples_per_channel=*/kIgnored, + &concealment_audio); + RTC_CHECK_GT(concealment_audio.size(), 0); + ArrayView decoded_view(concealment_audio.data(), + concealment_audio.size()); + // Make sure that packet loss concealment is not a muted frame. + ASSERT_FALSE(IsZeroedFrame(decoded_view)); + + EXPECT_FALSE(IsTrivialStereo(decoded_view)); +} + +} // namespace webrtc diff --git a/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.cc b/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.cc index 38a11c123d..13d4cd7826 100644 --- a/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.cc +++ b/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.cc @@ -20,15 +20,17 @@ #include "modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h" #include +#include #include #include #include #include "absl/strings/match.h" +#include "api/audio_codecs/opus/audio_encoder_opus_config.h" #include "modules/audio_coding/codecs/opus/audio_coder_opus_common.h" -#include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/string_to_number.h" namespace webrtc { @@ -86,11 +88,11 @@ int GetFrameSizeMs(const SdpAudioFormat& format) { int CalculateDefaultBitrate(int max_playback_rate, size_t num_channels) { const int bitrate = [&] { if (max_playback_rate <= 8000) { - return kOpusBitrateNbBps * rtc::dchecked_cast(num_channels); + return kOpusBitrateNbBps * dchecked_cast(num_channels); } else if (max_playback_rate <= 16000) { - return kOpusBitrateWbBps * rtc::dchecked_cast(num_channels); + return kOpusBitrateWbBps * dchecked_cast(num_channels); } else { - return kOpusBitrateFbBps * rtc::dchecked_cast(num_channels); + return kOpusBitrateFbBps * dchecked_cast(num_channels); } }(); RTC_DCHECK_GE(bitrate, AudioEncoderMultiChannelOpusConfig::kMinBitrateBps); @@ -101,12 +103,12 @@ int CalculateDefaultBitrate(int max_playback_rate, size_t num_channels) { // out how invalid it is and accurately log invalid values. int CalculateBitrate(int max_playback_rate_hz, size_t num_channels, - absl::optional bitrate_param) { + std::optional bitrate_param) { const int default_bitrate = CalculateDefaultBitrate(max_playback_rate_hz, num_channels); if (bitrate_param) { - const auto bitrate = rtc::StringToNumber(*bitrate_param); + const auto bitrate = StringToNumber(*bitrate_param); if (bitrate) { const int chosen_bitrate = std::max(AudioEncoderOpusConfig::kMinBitrateBps, @@ -165,7 +167,7 @@ void AudioEncoderMultiChannelOpusImpl::Reset() { RTC_CHECK(RecreateEncoderInstance(config_)); } -absl::optional> +std::optional> AudioEncoderMultiChannelOpusImpl::GetFrameLengthRange() const { return {{TimeDelta::Millis(config_.frame_size_ms), TimeDelta::Millis(config_.frame_size_ms)}}; @@ -235,11 +237,11 @@ bool AudioEncoderMultiChannelOpusImpl::RecreateEncoderInstance( return true; } -absl::optional +std::optional AudioEncoderMultiChannelOpusImpl::SdpToConfig(const SdpAudioFormat& format) { if (!absl::EqualsIgnoreCase(format.name, "multiopus") || format.clockrate_hz != 48000) { - return absl::nullopt; + return std::nullopt; } AudioEncoderMultiChannelOpusConfig config; @@ -264,25 +266,25 @@ AudioEncoderMultiChannelOpusImpl::SdpToConfig(const SdpAudioFormat& format) { auto num_streams = GetFormatParameter(format, "num_streams"); if (!num_streams.has_value()) { - return absl::nullopt; + return std::nullopt; } config.num_streams = *num_streams; auto coupled_streams = GetFormatParameter(format, "coupled_streams"); if (!coupled_streams.has_value()) { - return absl::nullopt; + return std::nullopt; } config.coupled_streams = *coupled_streams; auto channel_mapping = GetFormatParameter>(format, "channel_mapping"); if (!channel_mapping.has_value()) { - return absl::nullopt; + return std::nullopt; } config.channel_mapping = *channel_mapping; if (!config.IsOk()) { - return absl::nullopt; + return std::nullopt; } return config; } @@ -299,10 +301,10 @@ AudioCodecInfo AudioEncoderMultiChannelOpusImpl::QueryAudioEncoder( } size_t AudioEncoderMultiChannelOpusImpl::Num10msFramesPerPacket() const { - return static_cast(rtc::CheckedDivExact(config_.frame_size_ms, 10)); + return static_cast(CheckedDivExact(config_.frame_size_ms, 10)); } size_t AudioEncoderMultiChannelOpusImpl::SamplesPer10msFrame() const { - return rtc::CheckedDivExact(48000, 100) * config_.num_channels; + return CheckedDivExact(48000, 100) * config_.num_channels; } int AudioEncoderMultiChannelOpusImpl::SampleRateHz() const { return 48000; @@ -322,8 +324,8 @@ int AudioEncoderMultiChannelOpusImpl::GetTargetBitrate() const { AudioEncoder::EncodedInfo AudioEncoderMultiChannelOpusImpl::EncodeImpl( uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) { + ArrayView audio, + Buffer* encoded) { if (input_buffer_.empty()) first_timestamp_in_buffer_ = rtp_timestamp; @@ -337,12 +339,12 @@ AudioEncoder::EncodedInfo AudioEncoderMultiChannelOpusImpl::EncodeImpl( const size_t max_encoded_bytes = SufficientOutputBufferSize(); EncodedInfo info; - info.encoded_bytes = encoded->AppendData( - max_encoded_bytes, [&](rtc::ArrayView encoded) { + info.encoded_bytes = + encoded->AppendData(max_encoded_bytes, [&](ArrayView encoded) { int status = WebRtcOpus_Encode( inst_, &input_buffer_[0], - rtc::CheckedDivExact(input_buffer_.size(), config_.num_channels), - rtc::saturated_cast(max_encoded_bytes), encoded.data()); + CheckedDivExact(input_buffer_.size(), config_.num_channels), + saturated_cast(max_encoded_bytes), encoded.data()); RTC_CHECK_GE(status, 0); // Fails only if fed invalid data. diff --git a/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h b/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h index 8a7210515c..9e6b2a582b 100644 --- a/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h +++ b/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h @@ -11,16 +11,21 @@ #ifndef MODULES_AUDIO_CODING_CODECS_OPUS_AUDIO_ENCODER_MULTI_CHANNEL_OPUS_IMPL_H_ #define MODULES_AUDIO_CODING_CODECS_OPUS_AUDIO_ENCODER_MULTI_CHANNEL_OPUS_IMPL_H_ +#include +#include + #include +#include #include #include -#include "absl/types/optional.h" +#include "api/array_view.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h" #include "api/units/time_delta.h" #include "modules/audio_coding/codecs/opus/opus_interface.h" +#include "rtc_base/buffer.h" namespace webrtc { @@ -40,7 +45,7 @@ class AudioEncoderMultiChannelOpusImpl final : public AudioEncoder { // Static interface for use by BuiltinAudioEncoderFactory. static constexpr const char* GetPayloadName() { return "multiopus"; } - static absl::optional QueryAudioEncoder( + static std::optional QueryAudioEncoder( const SdpAudioFormat& format); int SampleRateHz() const override; @@ -50,16 +55,16 @@ class AudioEncoderMultiChannelOpusImpl final : public AudioEncoder { int GetTargetBitrate() const override; void Reset() override; - absl::optional> GetFrameLengthRange() + std::optional> GetFrameLengthRange() const override; protected: EncodedInfo EncodeImpl(uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) override; + ArrayView audio, + Buffer* encoded) override; private: - static absl::optional SdpToConfig( + static std::optional SdpToConfig( const SdpAudioFormat& format); static AudioCodecInfo QueryAudioEncoder( const AudioEncoderMultiChannelOpusConfig& config); diff --git a/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_unittest.cc b/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_unittest.cc index 92f6f2c169..7601589557 100644 --- a/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_unittest.cc +++ b/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_unittest.cc @@ -26,7 +26,7 @@ TEST(AudioEncoderMultiOpusTest, CheckConfigValidity) { {{"channel_mapping", "3,0"}, {"coupled_streams", "1"}, {"num_streams", "2"}}); - const absl::optional encoder_config = + const std::optional encoder_config = AudioEncoderMultiChannelOpus::SdpToConfig(sdp_format); // Maps input channel 0 to coded channel 3, which doesn't exist. @@ -38,7 +38,7 @@ TEST(AudioEncoderMultiOpusTest, CheckConfigValidity) { {{"channel_mapping", "0"}, {"coupled_streams", "1"}, {"num_streams", "2"}}); - const absl::optional encoder_config = + const std::optional encoder_config = AudioEncoderMultiChannelOpus::SdpToConfig(sdp_format); // The mapping is too short. @@ -49,7 +49,7 @@ TEST(AudioEncoderMultiOpusTest, CheckConfigValidity) { {{"channel_mapping", "0,0,0"}, {"coupled_streams", "0"}, {"num_streams", "1"}}); - const absl::optional encoder_config = + const std::optional encoder_config = AudioEncoderMultiChannelOpus::SdpToConfig(sdp_format); // Coded channel 0 comes from both input channels 0, 1 and 2. @@ -60,7 +60,7 @@ TEST(AudioEncoderMultiOpusTest, CheckConfigValidity) { {{"channel_mapping", "0,255,255"}, {"coupled_streams", "0"}, {"num_streams", "1"}}); - const absl::optional encoder_config = + const std::optional encoder_config = AudioEncoderMultiChannelOpus::SdpToConfig(sdp_format); ASSERT_TRUE(encoder_config.has_value()); @@ -72,7 +72,7 @@ TEST(AudioEncoderMultiOpusTest, CheckConfigValidity) { {{"channel_mapping", "0,255,255"}, {"coupled_streams", "0"}, {"num_streams", "2"}}); - const absl::optional encoder_config = + const std::optional encoder_config = AudioEncoderMultiChannelOpus::SdpToConfig(sdp_format); // This is NOT fine, because channels nothing says how coded channel 1 @@ -90,7 +90,7 @@ TEST(AudioEncoderMultiOpusTest, ConfigValuesAreParsedCorrectly) { {"channel_mapping", "0,4,1,2,3,5"}, {"num_streams", "4"}, {"coupled_streams", "2"}}}); - const absl::optional encoder_config = + const std::optional encoder_config = AudioEncoderMultiChannelOpus::SdpToConfig(sdp_format); ASSERT_TRUE(encoder_config.has_value()); @@ -107,7 +107,7 @@ TEST(AudioEncoderMultiOpusTest, CreateFromValidConfig) { {{"channel_mapping", "0,255,255"}, {"coupled_streams", "0"}, {"num_streams", "2"}}); - const absl::optional encoder_config = + const std::optional encoder_config = AudioEncoderMultiChannelOpus::SdpToConfig(sdp_format); ASSERT_FALSE(encoder_config.has_value()); } @@ -116,7 +116,7 @@ TEST(AudioEncoderMultiOpusTest, CreateFromValidConfig) { {{"channel_mapping", "1,255,0"}, {"coupled_streams", "1"}, {"num_streams", "1"}}); - const absl::optional encoder_config = + const std::optional encoder_config = AudioEncoderMultiChannelOpus::SdpToConfig(sdp_format); ASSERT_TRUE(encoder_config.has_value()); @@ -141,7 +141,7 @@ TEST(AudioEncoderMultiOpusTest, AdvertisedCodecsCanBeCreated) { EXPECT_FALSE(specs.empty()); for (const AudioCodecSpec& spec : specs) { - const absl::optional encoder_config = + const std::optional encoder_config = AudioEncoderMultiChannelOpus::SdpToConfig(spec.format); ASSERT_TRUE(encoder_config.has_value()); diff --git a/modules/audio_coding/codecs/opus/audio_encoder_opus.cc b/modules/audio_coding/codecs/opus/audio_encoder_opus.cc index 51b0fcd492..dbab35d4a8 100644 --- a/modules/audio_coding/codecs/opus/audio_encoder_opus.cc +++ b/modules/audio_coding/codecs/opus/audio_encoder_opus.cc @@ -16,8 +16,10 @@ #include #include +#include "absl/memory/memory.h" #include "absl/strings/match.h" #include "absl/strings/string_view.h" +#include "api/field_trials_view.h" #include "modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h" #include "modules/audio_coding/audio_network_adaptor/controller_manager.h" #include "modules/audio_coding/codecs/opus/audio_coder_opus_common.h" @@ -31,7 +33,6 @@ #include "rtc_base/string_encode.h" #include "rtc_base/string_to_number.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -72,11 +73,11 @@ constexpr float kMaxPacketLossFraction = 0.2f; int CalculateDefaultBitrate(int max_playback_rate, size_t num_channels) { const int bitrate = [&] { if (max_playback_rate <= 8000) { - return kOpusBitrateNbBps * rtc::dchecked_cast(num_channels); + return kOpusBitrateNbBps * dchecked_cast(num_channels); } else if (max_playback_rate <= 16000) { - return kOpusBitrateWbBps * rtc::dchecked_cast(num_channels); + return kOpusBitrateWbBps * dchecked_cast(num_channels); } else { - return kOpusBitrateFbBps * rtc::dchecked_cast(num_channels); + return kOpusBitrateFbBps * dchecked_cast(num_channels); } }(); RTC_DCHECK_GE(bitrate, AudioEncoderOpusConfig::kMinBitrateBps); @@ -88,12 +89,12 @@ int CalculateDefaultBitrate(int max_playback_rate, size_t num_channels) { // out how invalid it is and accurately log invalid values. int CalculateBitrate(int max_playback_rate_hz, size_t num_channels, - absl::optional bitrate_param) { + std::optional bitrate_param) { const int default_bitrate = CalculateDefaultBitrate(max_playback_rate_hz, num_channels); if (bitrate_param) { - const auto bitrate = rtc::StringToNumber(*bitrate_param); + const auto bitrate = StringToNumber(*bitrate_param); if (bitrate) { const int chosen_bitrate = std::max(AudioEncoderOpusConfig::kMinBitrateBps, @@ -163,16 +164,16 @@ int GetBitrateBps(const AudioEncoderOpusConfig& config) { return *config.bitrate_bps; } -std::vector GetBitrateMultipliers() { +std::vector GetBitrateMultipliers(const FieldTrialsView& field_trials) { constexpr char kBitrateMultipliersName[] = "WebRTC-Audio-OpusBitrateMultipliers"; const bool use_bitrate_multipliers = - webrtc::field_trial::IsEnabled(kBitrateMultipliersName); + field_trials.IsEnabled(kBitrateMultipliersName); if (use_bitrate_multipliers) { const std::string field_trial_string = - webrtc::field_trial::FindFullName(kBitrateMultipliersName); + field_trials.Lookup(kBitrateMultipliersName); std::vector pieces; - rtc::tokenize(field_trial_string, '-', &pieces); + tokenize(field_trial_string, '-', &pieces); if (pieces.size() < 2 || pieces[0] != "Enabled") { RTC_LOG(LS_WARNING) << "Invalid parameters for " << kBitrateMultipliersName @@ -181,7 +182,7 @@ std::vector GetBitrateMultipliers() { } std::vector multipliers(pieces.size() - 1); for (size_t i = 1; i < pieces.size(); i++) { - if (!rtc::FromString(pieces[i], &multipliers[i - 1])) { + if (!FromString(pieces[i], &multipliers[i - 1])) { RTC_LOG(LS_WARNING) << "Invalid parameters for " << kBitrateMultipliersName << ", not using custom values."; @@ -227,21 +228,11 @@ AudioCodecInfo AudioEncoderOpusImpl::QueryAudioEncoder( return info; } -std::unique_ptr AudioEncoderOpusImpl::MakeAudioEncoder( - const AudioEncoderOpusConfig& config, - int payload_type) { - if (!config.IsOk()) { - RTC_DCHECK_NOTREACHED(); - return nullptr; - } - return std::make_unique(config, payload_type); -} - -absl::optional AudioEncoderOpusImpl::SdpToConfig( +std::optional AudioEncoderOpusImpl::SdpToConfig( const SdpAudioFormat& format) { if (!absl::EqualsIgnoreCase(format.name, "opus") || format.clockrate_hz != kRtpTimestampRateHz || format.num_channels != 2) { - return absl::nullopt; + return std::nullopt; } AudioEncoderOpusConfig config; @@ -254,6 +245,7 @@ absl::optional AudioEncoderOpusImpl::SdpToConfig( config.bitrate_bps = CalculateBitrate(config.max_playback_rate_hz, config.num_channels, GetFormatParameter(format, "maxaveragebitrate")); + // TODO: https://issues.webrtc.org/376071290 - Use `contentHint` if available. config.application = config.num_channels == 1 ? AudioEncoderOpusConfig::ApplicationMode::kVoip : AudioEncoderOpusConfig::ApplicationMode::kAudio; @@ -274,12 +266,12 @@ absl::optional AudioEncoderOpusImpl::SdpToConfig( &config.supported_frame_lengths_ms); if (!config.IsOk()) { RTC_DCHECK_NOTREACHED(); - return absl::nullopt; + return std::nullopt; } return config; } -absl::optional AudioEncoderOpusImpl::GetNewComplexity( +std::optional AudioEncoderOpusImpl::GetNewComplexity( const AudioEncoderOpusConfig& config) { RTC_DCHECK(config.IsOk()); const int bitrate_bps = GetBitrateBps(config); @@ -288,7 +280,7 @@ absl::optional AudioEncoderOpusImpl::GetNewComplexity( bitrate_bps <= config.complexity_threshold_bps + config.complexity_threshold_window_bps) { // Within the hysteresis window; make no change. - return absl::nullopt; + return std::nullopt; } else { return bitrate_bps <= config.complexity_threshold_bps ? config.low_rate_complexity @@ -296,7 +288,7 @@ absl::optional AudioEncoderOpusImpl::GetNewComplexity( } } -absl::optional AudioEncoderOpusImpl::GetNewBandwidth( +std::optional AudioEncoderOpusImpl::GetNewBandwidth( const AudioEncoderOpusConfig& config, OpusEncInst* inst) { constexpr int kMinWidebandBitrate = 8000; @@ -305,34 +297,34 @@ absl::optional AudioEncoderOpusImpl::GetNewBandwidth( RTC_DCHECK(config.IsOk()); const int bitrate = GetBitrateBps(config); if (bitrate > kAutomaticThreshold) { - return absl::optional(OPUS_AUTO); + return std::optional(OPUS_AUTO); } const int bandwidth = WebRtcOpus_GetBandwidth(inst); RTC_DCHECK_GE(bandwidth, 0); if (bitrate > kMaxNarrowbandBitrate && bandwidth < OPUS_BANDWIDTH_WIDEBAND) { - return absl::optional(OPUS_BANDWIDTH_WIDEBAND); + return std::optional(OPUS_BANDWIDTH_WIDEBAND); } else if (bitrate < kMinWidebandBitrate && bandwidth > OPUS_BANDWIDTH_NARROWBAND) { - return absl::optional(OPUS_BANDWIDTH_NARROWBAND); + return std::optional(OPUS_BANDWIDTH_NARROWBAND); } - return absl::optional(); + return std::optional(); } class AudioEncoderOpusImpl::PacketLossFractionSmoother { public: explicit PacketLossFractionSmoother() - : last_sample_time_ms_(rtc::TimeMillis()), + : last_sample_time_ms_(TimeMillis()), smoother_(kAlphaForPacketLossFractionSmoother) {} // Gets the smoothed packet loss fraction. float GetAverage() const { float value = smoother_.filtered(); - return (value == rtc::ExpFilter::kValueUndefined) ? 0.0f : value; + return (value == ExpFilter::kValueUndefined) ? 0.0f : value; } // Add new observation to the packet loss fraction smoother. void AddSample(float packet_loss_fraction) { - int64_t now_ms = rtc::TimeMillis(); + int64_t now_ms = TimeMillis(); smoother_.Apply(static_cast(now_ms - last_sample_time_ms_), packet_loss_fraction); last_sample_time_ms_ = now_ms; @@ -342,38 +334,52 @@ class AudioEncoderOpusImpl::PacketLossFractionSmoother { int64_t last_sample_time_ms_; // An exponential filter is used to smooth the packet loss fraction. - rtc::ExpFilter smoother_; + ExpFilter smoother_; }; -AudioEncoderOpusImpl::AudioEncoderOpusImpl(const AudioEncoderOpusConfig& config, +std::unique_ptr AudioEncoderOpusImpl::CreateForTesting( + const Environment& env, + const AudioEncoderOpusConfig& config, + int payload_type, + const AudioNetworkAdaptorCreator& audio_network_adaptor_creator, + std::unique_ptr bitrate_smoother) { + // Using `new` to access a non-public constructor. + return absl::WrapUnique(new AudioEncoderOpusImpl( + env, config, payload_type, audio_network_adaptor_creator, + std::move(bitrate_smoother))); +} + +AudioEncoderOpusImpl::AudioEncoderOpusImpl(const Environment& env, + const AudioEncoderOpusConfig& config, int payload_type) : AudioEncoderOpusImpl( + env, config, payload_type, [this](absl::string_view config_string, RtcEventLog* event_log) { return DefaultAudioNetworkAdaptorCreator(config_string, event_log); }, // We choose 5sec as initial time constant due to empirical data. - std::make_unique(5000)) {} + std::make_unique(5'000)) {} AudioEncoderOpusImpl::AudioEncoderOpusImpl( + const Environment& env, const AudioEncoderOpusConfig& config, int payload_type, const AudioNetworkAdaptorCreator& audio_network_adaptor_creator, std::unique_ptr bitrate_smoother) : payload_type_(payload_type), - use_stable_target_for_adaptation_(!webrtc::field_trial::IsDisabled( + use_stable_target_for_adaptation_(!env.field_trials().IsDisabled( "WebRTC-Audio-StableTargetAdaptation")), adjust_bandwidth_( - webrtc::field_trial::IsEnabled("WebRTC-AdjustOpusBandwidth")), + env.field_trials().IsEnabled("WebRTC-AdjustOpusBandwidth")), bitrate_changed_(true), - bitrate_multipliers_(GetBitrateMultipliers()), + bitrate_multipliers_(GetBitrateMultipliers(env.field_trials())), packet_loss_rate_(0.0), inst_(nullptr), packet_loss_fraction_smoother_(new PacketLossFractionSmoother()), audio_network_adaptor_creator_(audio_network_adaptor_creator), - bitrate_smoother_(std::move(bitrate_smoother)), - consecutive_dtx_frames_(0) { + bitrate_smoother_(std::move(bitrate_smoother)) { RTC_DCHECK(0 <= payload_type && payload_type <= 127); // Sanity check of the redundant payload type field that we want to get rid @@ -381,13 +387,10 @@ AudioEncoderOpusImpl::AudioEncoderOpusImpl( RTC_CHECK(config.payload_type == -1 || config.payload_type == payload_type); RTC_CHECK(RecreateEncoderInstance(config)); + SetProjectedPacketLossRate(packet_loss_rate_); } -AudioEncoderOpusImpl::AudioEncoderOpusImpl(int payload_type, - const SdpAudioFormat& format) - : AudioEncoderOpusImpl(*SdpToConfig(format), payload_type) {} - AudioEncoderOpusImpl::~AudioEncoderOpusImpl() { RTC_CHECK_EQ(0, WebRtcOpus_EncoderFree(inst_)); } @@ -494,8 +497,8 @@ void AudioEncoderOpusImpl::OnReceivedTargetAudioBitrate( void AudioEncoderOpusImpl::OnReceivedUplinkBandwidth( int target_audio_bitrate_bps, - absl::optional bwe_period_ms, - absl::optional stable_target_bitrate_bps) { + std::optional bwe_period_ms, + std::optional stable_target_bitrate_bps) { if (audio_network_adaptor_) { audio_network_adaptor_->SetTargetAudioBitrate(target_audio_bitrate_bps); if (use_stable_target_for_adaptation_) { @@ -536,9 +539,9 @@ void AudioEncoderOpusImpl::OnReceivedUplinkBandwidth( } void AudioEncoderOpusImpl::OnReceivedUplinkBandwidth( int target_audio_bitrate_bps, - absl::optional bwe_period_ms) { + std::optional bwe_period_ms) { OnReceivedUplinkBandwidth(target_audio_bitrate_bps, bwe_period_ms, - absl::nullopt); + std::nullopt); } void AudioEncoderOpusImpl::OnReceivedUplinkAllocation( @@ -577,8 +580,8 @@ void AudioEncoderOpusImpl::SetReceiverFrameLengthRange( AudioEncoder::EncodedInfo AudioEncoderOpusImpl::EncodeImpl( uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) { + ArrayView audio, + Buffer* encoded) { MaybeUpdateUplinkBandwidth(); if (input_buffer_.empty()) @@ -594,12 +597,12 @@ AudioEncoder::EncodedInfo AudioEncoderOpusImpl::EncodeImpl( const size_t max_encoded_bytes = SufficientOutputBufferSize(); EncodedInfo info; - info.encoded_bytes = encoded->AppendData( - max_encoded_bytes, [&](rtc::ArrayView encoded) { + info.encoded_bytes = + encoded->AppendData(max_encoded_bytes, [&](ArrayView encoded) { int status = WebRtcOpus_Encode( inst_, &input_buffer_[0], - rtc::CheckedDivExact(input_buffer_.size(), config_.num_channels), - rtc::saturated_cast(max_encoded_bytes), encoded.data()); + CheckedDivExact(input_buffer_.size(), config_.num_channels), + saturated_cast(max_encoded_bytes), encoded.data()); RTC_CHECK_GE(status, 0); // Fails only if fed invalid data. @@ -607,8 +610,6 @@ AudioEncoder::EncodedInfo AudioEncoderOpusImpl::EncodeImpl( }); input_buffer_.clear(); - bool dtx_frame = (info.encoded_bytes <= 2); - // Will use new packet size for next encoding. config_.frame_size_ms = next_frame_length_ms_; @@ -623,25 +624,18 @@ AudioEncoder::EncodedInfo AudioEncoderOpusImpl::EncodeImpl( info.encoded_timestamp = first_timestamp_in_buffer_; info.payload_type = payload_type_; info.send_even_if_empty = true; // Allows Opus to send empty packets. - // After 20 DTX frames (MAX_CONSECUTIVE_DTX) Opus will send a frame - // coding the background noise. Avoid flagging this frame as speech - // (even though there is a probability of the frame being speech). - info.speech = !dtx_frame && (consecutive_dtx_frames_ != 20); + info.speech = WebRtcOpus_GetInDtx(inst_) == 0; info.encoder_type = CodecType::kOpus; - // Increase or reset DTX counter. - consecutive_dtx_frames_ = (dtx_frame) ? (consecutive_dtx_frames_ + 1) : (0); - return info; } size_t AudioEncoderOpusImpl::Num10msFramesPerPacket() const { - return static_cast(rtc::CheckedDivExact(config_.frame_size_ms, 10)); + return static_cast(CheckedDivExact(config_.frame_size_ms, 10)); } size_t AudioEncoderOpusImpl::SamplesPer10msFrame() const { - return rtc::CheckedDivExact(config_.sample_rate_hz, 100) * - config_.num_channels; + return CheckedDivExact(config_.sample_rate_hz, 100) * config_.num_channels; } size_t AudioEncoderOpusImpl::SufficientOutputBufferSize() const { @@ -708,9 +702,9 @@ bool AudioEncoderOpusImpl::RecreateEncoderInstance( void AudioEncoderOpusImpl::SetFrameLength(int frame_length_ms) { if (next_frame_length_ms_ != frame_length_ms) { - RTC_LOG(LS_VERBOSE) << "Update Opus frame length " - << "from " << next_frame_length_ms_ << " ms " - << "to " << frame_length_ms << " ms."; + RTC_LOG(LS_VERBOSE) << "Update Opus frame length " << "from " + << next_frame_length_ms_ << " ms " << "to " + << frame_length_ms << " ms."; } next_frame_length_ms_ = frame_length_ms; } @@ -738,9 +732,9 @@ void AudioEncoderOpusImpl::SetProjectedPacketLossRate(float fraction) { } void AudioEncoderOpusImpl::SetTargetBitrate(int bits_per_second) { - const int new_bitrate = rtc::SafeClamp( - bits_per_second, AudioEncoderOpusConfig::kMinBitrateBps, - AudioEncoderOpusConfig::kMaxBitrateBps); + const int new_bitrate = + SafeClamp(bits_per_second, AudioEncoderOpusConfig::kMinBitrateBps, + AudioEncoderOpusConfig::kMaxBitrateBps); if (config_.bitrate_bps && *config_.bitrate_bps != new_bitrate) { config_.bitrate_bps = new_bitrate; RTC_DCHECK(config_.IsOk()); @@ -788,11 +782,11 @@ AudioEncoderOpusImpl::DefaultAudioNetworkAdaptorCreator( void AudioEncoderOpusImpl::MaybeUpdateUplinkBandwidth() { if (audio_network_adaptor_ && !use_stable_target_for_adaptation_) { - int64_t now_ms = rtc::TimeMillis(); + int64_t now_ms = TimeMillis(); if (!bitrate_smoother_last_update_time_ || now_ms - *bitrate_smoother_last_update_time_ >= config_.uplink_bandwidth_update_interval_ms) { - absl::optional smoothed_bitrate = bitrate_smoother_->GetAverage(); + std::optional smoothed_bitrate = bitrate_smoother_->GetAverage(); if (smoothed_bitrate) audio_network_adaptor_->SetUplinkBandwidth(*smoothed_bitrate); bitrate_smoother_last_update_time_ = now_ms; @@ -807,11 +801,11 @@ ANAStats AudioEncoderOpusImpl::GetANAStats() const { return ANAStats(); } -absl::optional > +std::optional > AudioEncoderOpusImpl::GetFrameLengthRange() const { if (audio_network_adaptor_) { if (config_.supported_frame_lengths_ms.empty()) { - return absl::nullopt; + return std::nullopt; } return {{TimeDelta::Millis(config_.supported_frame_lengths_ms.front()), TimeDelta::Millis(config_.supported_frame_lengths_ms.back())}}; diff --git a/modules/audio_coding/codecs/opus/audio_encoder_opus.h b/modules/audio_coding/codecs/opus/audio_encoder_opus.h index 8c5c235016..706280f2e3 100644 --- a/modules/audio_coding/codecs/opus/audio_encoder_opus.h +++ b/modules/audio_coding/codecs/opus/audio_encoder_opus.h @@ -13,14 +13,15 @@ #include #include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/audio_codecs/audio_encoder.h" #include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/opus/audio_encoder_opus_config.h" +#include "api/environment/environment.h" #include "common_audio/smoothing_filter.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" #include "modules/audio_coding/codecs/opus/opus_interface.h" @@ -35,13 +36,13 @@ class AudioEncoderOpusImpl final : public AudioEncoder { // defined by complexity_threshold_bps +/- complexity_threshold_window_bps. // Otherwise, returns the current complexity depending on whether the // current bitrate is above or below complexity_threshold_bps. - static absl::optional GetNewComplexity( + static std::optional GetNewComplexity( const AudioEncoderOpusConfig& config); // Returns OPUS_AUTO if the the current bitrate is above wideband threshold. // Returns empty if it is below, but bandwidth coincides with the desired one. // Otherwise returns the desired bandwidth. - static absl::optional GetNewBandwidth( + static std::optional GetNewBandwidth( const AudioEncoderOpusConfig& config, OpusEncInst* inst); @@ -49,16 +50,17 @@ class AudioEncoderOpusImpl final : public AudioEncoder { std::function(absl::string_view, RtcEventLog*)>; - AudioEncoderOpusImpl(const AudioEncoderOpusConfig& config, int payload_type); - - // Dependency injection for testing. - AudioEncoderOpusImpl( + static std::unique_ptr CreateForTesting( + const Environment& env, const AudioEncoderOpusConfig& config, int payload_type, const AudioNetworkAdaptorCreator& audio_network_adaptor_creator, std::unique_ptr bitrate_smoother); - AudioEncoderOpusImpl(int payload_type, const SdpAudioFormat& format); + AudioEncoderOpusImpl(const Environment& env, + const AudioEncoderOpusConfig& config, + int payload_type); + ~AudioEncoderOpusImpl() override; AudioEncoderOpusImpl(const AudioEncoderOpusImpl&) = delete; @@ -88,18 +90,17 @@ class AudioEncoderOpusImpl final : public AudioEncoder { void OnReceivedUplinkPacketLossFraction( float uplink_packet_loss_fraction) override; void OnReceivedTargetAudioBitrate(int target_audio_bitrate_bps) override; - void OnReceivedUplinkBandwidth( - int target_audio_bitrate_bps, - absl::optional bwe_period_ms) override; + void OnReceivedUplinkBandwidth(int target_audio_bitrate_bps, + std::optional bwe_period_ms) override; void OnReceivedUplinkAllocation(BitrateAllocationUpdate update) override; void OnReceivedRtt(int rtt_ms) override; void OnReceivedOverhead(size_t overhead_bytes_per_packet) override; void SetReceiverFrameLengthRange(int min_frame_length_ms, int max_frame_length_ms) override; ANAStats GetANAStats() const override; - absl::optional > GetFrameLengthRange() + std::optional > GetFrameLengthRange() const override; - rtc::ArrayView supported_frame_lengths_ms() const { + ArrayView supported_frame_lengths_ms() const { return config_.supported_frame_lengths_ms; } @@ -114,19 +115,23 @@ class AudioEncoderOpusImpl final : public AudioEncoder { protected: EncodedInfo EncodeImpl(uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) override; + ArrayView audio, + Buffer* encoded) override; private: class PacketLossFractionSmoother; - static absl::optional SdpToConfig( + AudioEncoderOpusImpl( + const Environment& env, + const AudioEncoderOpusConfig& config, + int payload_type, + const AudioNetworkAdaptorCreator& audio_network_adaptor_creator, + std::unique_ptr bitrate_smoother); + + static std::optional SdpToConfig( const SdpAudioFormat& format); static void AppendSupportedEncoders(std::vector* specs); static AudioCodecInfo QueryAudioEncoder(const AudioEncoderOpusConfig& config); - static std::unique_ptr MakeAudioEncoder( - const AudioEncoderOpusConfig&, - int payload_type); size_t Num10msFramesPerPacket() const; size_t SamplesPer10msFrame() const; @@ -138,8 +143,8 @@ class AudioEncoderOpusImpl final : public AudioEncoder { void OnReceivedUplinkBandwidth( int target_audio_bitrate_bps, - absl::optional bwe_period_ms, - absl::optional link_capacity_allocation); + std::optional bwe_period_ms, + std::optional link_capacity_allocation); // TODO(minyue): remove "override" when we can deprecate // `AudioEncoder::SetTargetBitrate`. @@ -171,10 +176,9 @@ class AudioEncoderOpusImpl final : public AudioEncoder { std::unique_ptr packet_loss_fraction_smoother_; const AudioNetworkAdaptorCreator audio_network_adaptor_creator_; std::unique_ptr audio_network_adaptor_; - absl::optional overhead_bytes_per_packet_; + std::optional overhead_bytes_per_packet_; const std::unique_ptr bitrate_smoother_; - absl::optional bitrate_smoother_last_update_time_; - int consecutive_dtx_frames_; + std::optional bitrate_smoother_last_update_time_; friend struct AudioEncoderOpus; }; diff --git a/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc b/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc index a2ebe43bbe..d4fe4683e1 100644 --- a/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc +++ b/modules/audio_coding/codecs/opus/audio_encoder_opus_unittest.cc @@ -15,6 +15,7 @@ #include #include "absl/strings/string_view.h" +#include "api/environment/environment_factory.h" #include "common_audio/mocks/mock_smoothing_filter.h" #include "modules/audio_coding/audio_network_adaptor/mock/mock_audio_network_adaptor.h" #include "modules/audio_coding/codecs/opus/audio_encoder_opus.h" @@ -22,24 +23,24 @@ #include "modules/audio_coding/neteq/tools/audio_loop.h" #include "rtc_base/checks.h" #include "rtc_base/fake_clock.h" -#include "test/field_trial.h" +#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" namespace webrtc { +namespace { +using test::ExplicitKeyValueConfig; using ::testing::NiceMock; using ::testing::Return; -namespace { - constexpr int kDefaultOpusPayloadType = 105; constexpr int kDefaultOpusRate = 32000; constexpr int kDefaultOpusPacSize = 960; constexpr int64_t kInitialTimeUs = 12345678; AudioEncoderOpusConfig CreateConfigWithParameters( - const SdpAudioFormat::Parameters& params) { + const CodecParameterMap& params) { const SdpAudioFormat format("opus", 48000, 2, params); return *AudioEncoderOpus::SdpToConfig(format); } @@ -48,21 +49,23 @@ struct AudioEncoderOpusStates { MockAudioNetworkAdaptor* mock_audio_network_adaptor; MockSmoothingFilter* mock_bitrate_smoother; std::unique_ptr encoder; - std::unique_ptr fake_clock; + std::unique_ptr fake_clock; AudioEncoderOpusConfig config; }; -std::unique_ptr CreateCodec(int sample_rate_hz, - size_t num_channels) { +std::unique_ptr CreateCodec( + int sample_rate_hz, + size_t num_channels, + const FieldTrialsView* field_trials = nullptr) { std::unique_ptr states = std::make_unique(); states->mock_audio_network_adaptor = nullptr; - states->fake_clock.reset(new rtc::ScopedFakeClock()); + states->fake_clock.reset(new ScopedFakeClock()); states->fake_clock->SetTime(Timestamp::Micros(kInitialTimeUs)); MockAudioNetworkAdaptor** mock_ptr = &states->mock_audio_network_adaptor; AudioEncoderOpusImpl::AudioNetworkAdaptorCreator creator = - [mock_ptr](absl::string_view, RtcEventLog* event_log) { + [mock_ptr](absl::string_view, RtcEventLog* /* event_log */) { std::unique_ptr adaptor( new NiceMock()); EXPECT_CALL(*adaptor, Die()); @@ -71,7 +74,7 @@ std::unique_ptr CreateCodec(int sample_rate_hz, }; AudioEncoderOpusConfig config; - config.frame_size_ms = rtc::CheckedDivExact(kDefaultOpusPacSize, 48); + config.frame_size_ms = CheckedDivExact(kDefaultOpusPacSize, 48); config.sample_rate_hz = sample_rate_hz; config.num_channels = num_channels; config.bitrate_bps = kDefaultOpusRate; @@ -85,9 +88,9 @@ std::unique_ptr CreateCodec(int sample_rate_hz, new MockSmoothingFilter()); states->mock_bitrate_smoother = bitrate_smoother.get(); - states->encoder.reset( - new AudioEncoderOpusImpl(states->config, kDefaultOpusPayloadType, creator, - std::move(bitrate_smoother))); + states->encoder = AudioEncoderOpusImpl::CreateForTesting( + CreateEnvironment(field_trials), states->config, kDefaultOpusPayloadType, + creator, std::move(bitrate_smoother)); return states; } @@ -120,8 +123,7 @@ std::unique_ptr Create10msAudioBlocks( test::ResourcePath("audio_coding/testfile32kHz", "pcm"); std::unique_ptr speech_data(new test::AudioLoop()); - int audio_samples_per_ms = - rtc::CheckedDivExact(encoder->SampleRateHz(), 1000); + int audio_samples_per_ms = CheckedDivExact(encoder->SampleRateHz(), 1000); if (!speech_data->Init( file_name, packet_size_ms * audio_samples_per_ms * @@ -201,28 +203,28 @@ TEST_P(AudioEncoderOpusTest, const int kMaxBitrateBps = 510000; const int kOverheadBytesPerPacket = 64; states->encoder->OnReceivedOverhead(kOverheadBytesPerPacket); - const int kOverheadBps = 8 * kOverheadBytesPerPacket * - rtc::CheckedDivExact(48000, kDefaultOpusPacSize); + const int kOverheadBps = + 8 * kOverheadBytesPerPacket * CheckedDivExact(48000, kDefaultOpusPacSize); // Set a too low bitrate. states->encoder->OnReceivedUplinkBandwidth(kMinBitrateBps + kOverheadBps - 1, - absl::nullopt); + std::nullopt); EXPECT_EQ(kMinBitrateBps, states->encoder->GetTargetBitrate()); // Set a too high bitrate. states->encoder->OnReceivedUplinkBandwidth(kMaxBitrateBps + kOverheadBps + 1, - absl::nullopt); + std::nullopt); EXPECT_EQ(kMaxBitrateBps, states->encoder->GetTargetBitrate()); // Set the minimum rate. states->encoder->OnReceivedUplinkBandwidth(kMinBitrateBps + kOverheadBps, - absl::nullopt); + std::nullopt); EXPECT_EQ(kMinBitrateBps, states->encoder->GetTargetBitrate()); // Set the maximum rate. states->encoder->OnReceivedUplinkBandwidth(kMaxBitrateBps + kOverheadBps, - absl::nullopt); + std::nullopt); EXPECT_EQ(kMaxBitrateBps, states->encoder->GetTargetBitrate()); // Set rates from kMaxBitrateBps up to 32000 bps. for (int rate = kMinBitrateBps + kOverheadBps; rate <= 32000 + kOverheadBps; rate += 1000) { - states->encoder->OnReceivedUplinkBandwidth(rate, absl::nullopt); + states->encoder->OnReceivedUplinkBandwidth(rate, std::nullopt); EXPECT_EQ(rate - kOverheadBps, states->encoder->GetTargetBitrate()); } } @@ -264,9 +266,9 @@ TEST_P(AudioEncoderOpusTest, TEST_P(AudioEncoderOpusTest, InvokeAudioNetworkAdaptorOnReceivedUplinkBandwidth) { - test::ScopedFieldTrials override_field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Audio-StableTargetAdaptation/Disabled/"); - auto states = CreateCodec(sample_rate_hz_, 2); + auto states = CreateCodec(sample_rate_hz_, 2, &field_trials); states->encoder->EnableAudioNetworkAdaptor("", nullptr); auto config = CreateEncoderRuntimeConfig(); @@ -376,7 +378,7 @@ TEST_P(AudioEncoderOpusTest, DoNotInvokeSetTargetBitrateIfOverheadUnknown) { auto states = CreateCodec(sample_rate_hz_, 2); states->encoder->OnReceivedUplinkBandwidth(kDefaultOpusRate * 2, - absl::nullopt); + std::nullopt); // Since `OnReceivedOverhead` has not been called, the codec bitrate should // not change. @@ -391,7 +393,7 @@ TEST(AudioEncoderOpusTest, ConfigComplexityAdaptation) { // Bitrate within hysteresis window. Expect empty output. config.bitrate_bps = 12500; - EXPECT_EQ(absl::nullopt, AudioEncoderOpusImpl::GetNewComplexity(config)); + EXPECT_EQ(std::nullopt, AudioEncoderOpusImpl::GetNewComplexity(config)); // Bitrate below hysteresis window. Expect higher complexity. config.bitrate_bps = 10999; @@ -399,7 +401,7 @@ TEST(AudioEncoderOpusTest, ConfigComplexityAdaptation) { // Bitrate within hysteresis window. Expect empty output. config.bitrate_bps = 12500; - EXPECT_EQ(absl::nullopt, AudioEncoderOpusImpl::GetNewComplexity(config)); + EXPECT_EQ(std::nullopt, AudioEncoderOpusImpl::GetNewComplexity(config)); // Bitrate above hysteresis window. Expect lower complexity. config.bitrate_bps = 14001; @@ -409,7 +411,7 @@ TEST(AudioEncoderOpusTest, ConfigComplexityAdaptation) { // Verifies that the bandwidth adaptation in the config works as intended. TEST_P(AudioEncoderOpusTest, ConfigBandwidthAdaptation) { AudioEncoderOpusConfig config; - const size_t opus_rate_khz = rtc::CheckedDivExact(sample_rate_hz_, 1000); + const size_t opus_rate_khz = CheckedDivExact(sample_rate_hz_, 1000); const std::vector silence( opus_rate_khz * config.frame_size_ms * config.num_channels, 0); constexpr size_t kMaxBytes = 1000; @@ -425,41 +427,41 @@ TEST_P(AudioEncoderOpusTest, ConfigBandwidthAdaptation) { sample_rate_hz_)); // Bitrate below minmum wideband. Expect narrowband. - config.bitrate_bps = absl::optional(7999); + config.bitrate_bps = std::optional(7999); auto bandwidth = AudioEncoderOpusImpl::GetNewBandwidth(config, inst); - EXPECT_EQ(absl::optional(OPUS_BANDWIDTH_NARROWBAND), bandwidth); + EXPECT_EQ(std::optional(OPUS_BANDWIDTH_NARROWBAND), bandwidth); WebRtcOpus_SetBandwidth(inst, *bandwidth); // It is necessary to encode here because Opus has some logic in the encoder // that goes from the user-set bandwidth to the used and returned one. WebRtcOpus_Encode(inst, silence.data(), - rtc::CheckedDivExact(silence.size(), config.num_channels), + CheckedDivExact(silence.size(), config.num_channels), kMaxBytes, bitstream); // Bitrate not yet above maximum narrowband. Expect empty. - config.bitrate_bps = absl::optional(9000); + config.bitrate_bps = std::optional(9000); bandwidth = AudioEncoderOpusImpl::GetNewBandwidth(config, inst); - EXPECT_EQ(absl::optional(), bandwidth); + EXPECT_EQ(std::optional(), bandwidth); // Bitrate above maximum narrowband. Expect wideband. - config.bitrate_bps = absl::optional(9001); + config.bitrate_bps = std::optional(9001); bandwidth = AudioEncoderOpusImpl::GetNewBandwidth(config, inst); - EXPECT_EQ(absl::optional(OPUS_BANDWIDTH_WIDEBAND), bandwidth); + EXPECT_EQ(std::optional(OPUS_BANDWIDTH_WIDEBAND), bandwidth); WebRtcOpus_SetBandwidth(inst, *bandwidth); // It is necessary to encode here because Opus has some logic in the encoder // that goes from the user-set bandwidth to the used and returned one. WebRtcOpus_Encode(inst, silence.data(), - rtc::CheckedDivExact(silence.size(), config.num_channels), + CheckedDivExact(silence.size(), config.num_channels), kMaxBytes, bitstream); // Bitrate not yet below minimum wideband. Expect empty. - config.bitrate_bps = absl::optional(8000); + config.bitrate_bps = std::optional(8000); bandwidth = AudioEncoderOpusImpl::GetNewBandwidth(config, inst); - EXPECT_EQ(absl::optional(), bandwidth); + EXPECT_EQ(std::optional(), bandwidth); // Bitrate above automatic threshold. Expect automatic. - config.bitrate_bps = absl::optional(12001); + config.bitrate_bps = std::optional(12001); bandwidth = AudioEncoderOpusImpl::GetNewBandwidth(config, inst); - EXPECT_EQ(absl::optional(OPUS_AUTO), bandwidth); + EXPECT_EQ(std::optional(OPUS_AUTO), bandwidth); EXPECT_EQ(0, WebRtcOpus_EncoderFree(inst)); } @@ -485,18 +487,18 @@ TEST_P(AudioEncoderOpusTest, EmptyConfigDoesNotAffectEncoderSettings) { } TEST_P(AudioEncoderOpusTest, UpdateUplinkBandwidthInAudioNetworkAdaptor) { - test::ScopedFieldTrials override_field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Audio-StableTargetAdaptation/Disabled/"); - auto states = CreateCodec(sample_rate_hz_, 2); + auto states = CreateCodec(sample_rate_hz_, 2, &field_trials); states->encoder->EnableAudioNetworkAdaptor("", nullptr); - const size_t opus_rate_khz = rtc::CheckedDivExact(sample_rate_hz_, 1000); + const size_t opus_rate_khz = CheckedDivExact(sample_rate_hz_, 1000); const std::vector audio(opus_rate_khz * 10 * 2, 0); - rtc::Buffer encoded; + Buffer encoded; EXPECT_CALL(*states->mock_bitrate_smoother, GetAverage()) .WillOnce(Return(50000)); EXPECT_CALL(*states->mock_audio_network_adaptor, SetUplinkBandwidth(50000)); states->encoder->Encode( - 0, rtc::ArrayView(audio.data(), audio.size()), &encoded); + 0, ArrayView(audio.data(), audio.size()), &encoded); // Repeat update uplink bandwidth tests. for (int i = 0; i < 5; i++) { @@ -504,7 +506,7 @@ TEST_P(AudioEncoderOpusTest, UpdateUplinkBandwidthInAudioNetworkAdaptor) { states->fake_clock->AdvanceTime(TimeDelta::Millis( states->config.uplink_bandwidth_update_interval_ms - 1)); states->encoder->Encode( - 0, rtc::ArrayView(audio.data(), audio.size()), &encoded); + 0, ArrayView(audio.data(), audio.size()), &encoded); // Update when it is time to update. EXPECT_CALL(*states->mock_bitrate_smoother, GetAverage()) @@ -512,7 +514,7 @@ TEST_P(AudioEncoderOpusTest, UpdateUplinkBandwidthInAudioNetworkAdaptor) { EXPECT_CALL(*states->mock_audio_network_adaptor, SetUplinkBandwidth(40000)); states->fake_clock->AdvanceTime(TimeDelta::Millis(1)); states->encoder->Encode( - 0, rtc::ArrayView(audio.data(), audio.size()), &encoded); + 0, ArrayView(audio.data(), audio.size()), &encoded); } } @@ -522,10 +524,10 @@ TEST_P(AudioEncoderOpusTest, EncodeAtMinBitrate) { auto audio_frames = Create10msAudioBlocks(states->encoder, kNumPacketsToEncode * 20); ASSERT_TRUE(audio_frames) << "Create10msAudioBlocks failed"; - rtc::Buffer encoded; + Buffer encoded; uint32_t rtp_timestamp = 12345; // Just a number not important to this test. - states->encoder->OnReceivedUplinkBandwidth(0, absl::nullopt); + states->encoder->OnReceivedUplinkBandwidth(0, std::nullopt); for (int packet_index = 0; packet_index < kNumPacketsToEncode; packet_index++) { // Make sure we are not encoding before we have enough data for @@ -670,10 +672,10 @@ TEST(AudioEncoderOpusTest, TestConfigFromInvalidParams) { TEST(AudioEncoderOpusTest, GetFrameLenghtRange) { AudioEncoderOpusConfig config = CreateConfigWithParameters({{"maxptime", "10"}, {"ptime", "10"}}); - std::unique_ptr encoder = - AudioEncoderOpus::MakeAudioEncoder(config, kDefaultOpusPayloadType); + std::unique_ptr encoder = AudioEncoderOpus::MakeAudioEncoder( + CreateEnvironment(), config, {.payload_type = kDefaultOpusPayloadType}); auto ptime = webrtc::TimeDelta::Millis(10); - absl::optional> range = { + std::optional> range = { {ptime, ptime}}; EXPECT_EQ(encoder->GetFrameLengthRange(), range); } @@ -762,8 +764,8 @@ TEST_P(AudioEncoderOpusTest, OpusFlagDtxAsNonSpeech) { AudioEncoderOpusConfig config; config.dtx_enabled = true; config.sample_rate_hz = sample_rate_hz_; - constexpr int payload_type = 17; - const auto encoder = AudioEncoderOpus::MakeAudioEncoder(config, payload_type); + const auto encoder = AudioEncoderOpus::MakeAudioEncoder( + CreateEnvironment(), config, {.payload_type = 17}); // Open file containing speech and silence. const std::string kInputFileName = @@ -779,7 +781,7 @@ TEST_P(AudioEncoderOpusTest, OpusFlagDtxAsNonSpeech) { // Encode. AudioEncoder::EncodedInfo info; - rtc::Buffer encoded(500); + Buffer encoded(500); int nonspeech_frames = 0; int max_nonspeech_frames = 0; int dtx_frames = 0; @@ -818,97 +820,4 @@ TEST_P(AudioEncoderOpusTest, OpusFlagDtxAsNonSpeech) { EXPECT_GT(max_nonspeech_frames, 15); } -TEST(AudioEncoderOpusTest, OpusDtxFilteringHighEnergyRefreshPackets) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-Audio-OpusAvoidNoisePumpingDuringDtx/Enabled/"); - const std::string kInputFileName = - webrtc::test::ResourcePath("audio_coding/testfile16kHz", "pcm"); - constexpr int kSampleRateHz = 16000; - AudioEncoderOpusConfig config; - config.dtx_enabled = true; - config.sample_rate_hz = kSampleRateHz; - constexpr int payload_type = 17; - const auto encoder = AudioEncoderOpus::MakeAudioEncoder(config, payload_type); - test::AudioLoop audio_loop; - constexpr size_t kMaxLoopLengthSaples = kSampleRateHz * 11.6f; - constexpr size_t kInputBlockSizeSamples = kSampleRateHz / 100; - EXPECT_TRUE(audio_loop.Init(kInputFileName, kMaxLoopLengthSaples, - kInputBlockSizeSamples)); - AudioEncoder::EncodedInfo info; - rtc::Buffer encoded(500); - // Encode the audio file and store the last part that corresponds to silence. - constexpr size_t kSilenceDurationSamples = kSampleRateHz * 0.2f; - std::array silence; - uint32_t rtp_timestamp = 0; - bool last_packet_dtx_frame = false; - bool opus_entered_dtx = false; - bool silence_filled = false; - size_t timestamp_start_silence = 0; - while (!silence_filled && rtp_timestamp < kMaxLoopLengthSaples) { - encoded.Clear(); - // Every second call to the encoder will generate an Opus packet. - for (int j = 0; j < 2; j++) { - auto next_frame = audio_loop.GetNextBlock(); - info = encoder->Encode(rtp_timestamp, next_frame, &encoded); - if (opus_entered_dtx) { - size_t silence_frame_start = rtp_timestamp - timestamp_start_silence; - silence_filled = silence_frame_start >= kSilenceDurationSamples; - if (!silence_filled) { - std::copy(next_frame.begin(), next_frame.end(), - silence.begin() + silence_frame_start); - } - } - rtp_timestamp += kInputBlockSizeSamples; - } - EXPECT_TRUE(info.encoded_bytes > 0 || last_packet_dtx_frame); - last_packet_dtx_frame = info.encoded_bytes > 0 ? info.encoded_bytes <= 2 - : last_packet_dtx_frame; - if (info.encoded_bytes <= 2 && !opus_entered_dtx) { - timestamp_start_silence = rtp_timestamp; - } - opus_entered_dtx = info.encoded_bytes <= 2; - } - - EXPECT_TRUE(silence_filled); - // The copied 200 ms of silence is used for creating 6 bursts that are fed to - // the encoder, the first three ones with a larger energy and the last three - // with a lower energy. This test verifies that the encoder just sends refresh - // DTX packets during the last bursts. - int number_non_empty_packets_during_increase = 0; - int number_non_empty_packets_during_decrease = 0; - for (size_t burst = 0; burst < 6; ++burst) { - uint32_t rtp_timestamp_start = rtp_timestamp; - const bool increase_noise = burst < 3; - const float gain = increase_noise ? 1.4f : 0.0f; - while (rtp_timestamp < rtp_timestamp_start + kSilenceDurationSamples) { - encoded.Clear(); - // Every second call to the encoder will generate an Opus packet. - for (int j = 0; j < 2; j++) { - std::array silence_frame; - size_t silence_frame_start = rtp_timestamp - rtp_timestamp_start; - std::transform( - silence.begin() + silence_frame_start, - silence.begin() + silence_frame_start + kInputBlockSizeSamples, - silence_frame.begin(), [gain](float s) { return gain * s; }); - info = encoder->Encode(rtp_timestamp, silence_frame, &encoded); - rtp_timestamp += kInputBlockSizeSamples; - } - EXPECT_TRUE(info.encoded_bytes > 0 || last_packet_dtx_frame); - last_packet_dtx_frame = info.encoded_bytes > 0 ? info.encoded_bytes <= 2 - : last_packet_dtx_frame; - // Tracking the number of non empty packets. - if (increase_noise && info.encoded_bytes > 2) { - number_non_empty_packets_during_increase++; - } - if (!increase_noise && info.encoded_bytes > 2) { - number_non_empty_packets_during_decrease++; - } - } - } - // Check that the refresh DTX packets are just sent during the decrease energy - // region. - EXPECT_EQ(number_non_empty_packets_during_increase, 0); - EXPECT_GT(number_non_empty_packets_during_decrease, 0); -} - } // namespace webrtc diff --git a/modules/audio_coding/codecs/opus/opus_bandwidth_unittest.cc b/modules/audio_coding/codecs/opus/opus_bandwidth_unittest.cc index 38b60c6187..2d5aa445ee 100644 --- a/modules/audio_coding/codecs/opus/opus_bandwidth_unittest.cc +++ b/modules/audio_coding/codecs/opus/opus_bandwidth_unittest.cc @@ -10,11 +10,13 @@ #include "api/audio_codecs/opus/audio_decoder_opus.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "common_audio/include/audio_util.h" #include "common_audio/window_generator.h" #include "modules/audio_coding/codecs/opus/test/lapped_transform.h" #include "modules/audio_coding/neteq/tools/audio_loop.h" -#include "test/field_trial.h" +#include "test/explicit_key_value_config.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -46,9 +48,9 @@ class PowerRatioEstimator : public LappedTransform::Callback { protected: void ProcessAudioBlock(const std::complex* const* input, size_t num_input_channels, - size_t num_freq_bins, - size_t num_output_channels, - std::complex* const* output) override { + size_t /* num_freq_bins */, + size_t /* num_output_channels */, + std::complex* const* /* output */) override { float low_pow = 0.f; float high_pow = 0.f; for (size_t i = 0u; i < num_input_channels; ++i) { @@ -76,7 +78,7 @@ float EncodedPowerRatio(AudioEncoder* encoder, // Encode and decode. uint32_t rtp_timestamp = 0u; constexpr size_t kBufferSize = 500; - rtc::Buffer encoded(kBufferSize); + Buffer encoded(kBufferSize); std::vector decoded(kOutputBlockSizeSamples); std::vector decoded_float(kOutputBlockSizeSamples); AudioDecoder::SpeechType speech_type = AudioDecoder::kSpeech; @@ -103,23 +105,24 @@ float EncodedPowerRatio(AudioEncoder* encoder, // TODO(ivoc): Remove this test, WebRTC-AdjustOpusBandwidth is obsolete. TEST(BandwidthAdaptationTest, BandwidthAdaptationTest) { - test::ScopedFieldTrials override_field_trials( - "WebRTC-AdjustOpusBandwidth/Enabled/"); + const Environment env = + CreateEnvironment(std::make_unique( + "WebRTC-AdjustOpusBandwidth/Enabled/")); constexpr float kMaxNarrowbandRatio = 0.0035f; constexpr float kMinWidebandRatio = 0.01f; // Create encoder. AudioEncoderOpusConfig enc_config; - enc_config.bitrate_bps = absl::optional(7999); + enc_config.bitrate_bps = std::optional(7999); enc_config.num_channels = kNumChannels; - constexpr int payload_type = 17; - auto encoder = AudioEncoderOpus::MakeAudioEncoder(enc_config, payload_type); + auto encoder = + AudioEncoderOpus::MakeAudioEncoder(env, enc_config, {.payload_type = 17}); // Create decoder. AudioDecoderOpus::Config dec_config; dec_config.num_channels = kNumChannels; - auto decoder = AudioDecoderOpus::MakeAudioDecoder(dec_config); + auto decoder = AudioDecoderOpus::MakeAudioDecoder(env, dec_config); // Open speech file. const std::string kInputFileName = diff --git a/modules/audio_coding/codecs/opus/opus_complexity_unittest.cc b/modules/audio_coding/codecs/opus/opus_complexity_unittest.cc index e8c131092c..b280ec0b17 100644 --- a/modules/audio_coding/codecs/opus/opus_complexity_unittest.cc +++ b/modules/audio_coding/codecs/opus/opus_complexity_unittest.cc @@ -9,6 +9,7 @@ */ #include "api/audio_codecs/opus/audio_encoder_opus.h" +#include "api/environment/environment_factory.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/metrics/metric.h" #include "modules/audio_coding/neteq/tools/audio_loop.h" @@ -23,10 +24,11 @@ using ::webrtc::test::GetGlobalMetricsLogger; using ::webrtc::test::ImprovementDirection; using ::webrtc::test::Unit; -int64_t RunComplexityTest(const AudioEncoderOpusConfig& config) { +int64_t RunComplexityTest(const Environment& env, + const AudioEncoderOpusConfig& config) { // Create encoder. - constexpr int payload_type = 17; - const auto encoder = AudioEncoderOpus::MakeAudioEncoder(config, payload_type); + const auto encoder = + AudioEncoderOpus::MakeAudioEncoder(env, config, {.payload_type = 17}); // Open speech file. const std::string kInputFileName = webrtc::test::ResourcePath("audio_coding/speech_mono_32_48kHz", "pcm"); @@ -40,16 +42,16 @@ int64_t RunComplexityTest(const AudioEncoderOpusConfig& config) { EXPECT_TRUE(audio_loop.Init(kInputFileName, kMaxLoopLengthSamples, kInputBlockSizeSamples)); // Encode. - const int64_t start_time_ms = rtc::TimeMillis(); + const int64_t start_time_ms = TimeMillis(); AudioEncoder::EncodedInfo info; - rtc::Buffer encoded(500); + Buffer encoded(500); uint32_t rtp_timestamp = 0u; for (size_t i = 0; i < 10000; ++i) { encoded.Clear(); info = encoder->Encode(rtp_timestamp, audio_loop.GetNextBlock(), &encoded); rtp_timestamp += kInputBlockSizeSamples; } - return rtc::TimeMillis() - start_time_ms; + return TimeMillis() - start_time_ms; } // This test encodes an audio file using Opus twice with different bitrates @@ -63,16 +65,17 @@ int64_t RunComplexityTest(const AudioEncoderOpusConfig& config) { // be higher, since we have explicitly asked for a higher complexity setting at // the lower rate. TEST(AudioEncoderOpusComplexityAdaptationTest, Adaptation_On) { + const Environment env = CreateEnvironment(); // Create config. AudioEncoderOpusConfig config; // The limit -- including the hysteresis window -- at which the complexity // shuold be increased. config.bitrate_bps = 11000 - 1; config.low_rate_complexity = 9; - int64_t runtime_10999bps = RunComplexityTest(config); + int64_t runtime_10999bps = RunComplexityTest(env, config); config.bitrate_bps = 15500; - int64_t runtime_15500bps = RunComplexityTest(config); + int64_t runtime_15500bps = RunComplexityTest(env, config); GetGlobalMetricsLogger()->LogSingleValueMetric( "opus_encoding_complexity_ratio", "adaptation_on", @@ -84,16 +87,17 @@ TEST(AudioEncoderOpusComplexityAdaptationTest, Adaptation_On) { // adaptation enabled (neither on desktop, nor on mobile). The expectation is // that the resulting ratio is less than 100% at all times. TEST(AudioEncoderOpusComplexityAdaptationTest, Adaptation_Off) { + const Environment env = CreateEnvironment(); // Create config. AudioEncoderOpusConfig config; // The limit -- including the hysteresis window -- at which the complexity // shuold be increased (but not in this test since complexity adaptation is // disabled). config.bitrate_bps = 11000 - 1; - int64_t runtime_10999bps = RunComplexityTest(config); + int64_t runtime_10999bps = RunComplexityTest(env, config); config.bitrate_bps = 15500; - int64_t runtime_15500bps = RunComplexityTest(config); + int64_t runtime_15500bps = RunComplexityTest(env, config); GetGlobalMetricsLogger()->LogSingleValueMetric( "opus_encoding_complexity_ratio", "adaptation_off", diff --git a/modules/audio_coding/codecs/opus/opus_inst.h b/modules/audio_coding/codecs/opus/opus_inst.h index 92c5c354a7..f7b45ea69b 100644 --- a/modules/audio_coding/codecs/opus/opus_inst.h +++ b/modules/audio_coding/codecs/opus/opus_inst.h @@ -25,19 +25,17 @@ struct WebRtcOpusEncInst { OpusMSEncoder* multistream_encoder; size_t channels; int in_dtx_mode; - bool avoid_noise_pumping_during_dtx; int sample_rate_hz; - float smooth_energy_non_active_frames; }; struct WebRtcOpusDecInst { OpusDecoder* decoder; OpusMSDecoder* multistream_decoder; - int prev_decoded_samples; - bool plc_use_prev_decoded_samples; size_t channels; int in_dtx_mode; int sample_rate_hz; + // TODO: https://issues.webrtc.org/376493209 - Remove when libopus gets fixed. + int last_packet_num_channels; }; #endif // MODULES_AUDIO_CODING_CODECS_OPUS_OPUS_INST_H_ diff --git a/modules/audio_coding/codecs/opus/opus_interface.cc b/modules/audio_coding/codecs/opus/opus_interface.cc index 64a1f59237..d18b517fe4 100644 --- a/modules/audio_coding/codecs/opus/opus_interface.cc +++ b/modules/audio_coding/codecs/opus/opus_interface.cc @@ -15,7 +15,6 @@ #include "api/array_view.h" #include "rtc_base/checks.h" -#include "system_wrappers/include/field_trial.h" enum { #if WEBRTC_OPUS_SUPPORT_120MS_PTIME @@ -35,15 +34,6 @@ enum { kWebRtcOpusPlcFrameSizeMs = 10, }; -constexpr char kPlcUsePrevDecodedSamplesFieldTrial[] = - "WebRTC-Audio-OpusPlcUsePrevDecodedSamples"; - -constexpr char kAvoidNoisePumpingDuringDtxFieldTrial[] = - "WebRTC-Audio-OpusAvoidNoisePumpingDuringDtx"; - -constexpr char kSetSignalVoiceWithDtxFieldTrial[] = - "WebRTC-Audio-OpusSetSignalVoiceWithDtx"; - static int FrameSizePerChannel(int frame_size_ms, int sample_rate_hz) { RTC_DCHECK_GT(frame_size_ms, 0); RTC_DCHECK_EQ(frame_size_ms % 10, 0); @@ -57,51 +47,6 @@ static int MaxFrameSizePerChannel(int sample_rate_hz) { return FrameSizePerChannel(kWebRtcOpusMaxDecodeFrameSizeMs, sample_rate_hz); } -// Default sample count per channel. -static int DefaultFrameSizePerChannel(int sample_rate_hz) { - return FrameSizePerChannel(20, sample_rate_hz); -} - -// Returns true if the `encoded` payload corresponds to a refresh DTX packet -// whose energy is larger than the expected for non activity packets. -static bool WebRtcOpus_IsHighEnergyRefreshDtxPacket( - OpusEncInst* inst, - rtc::ArrayView frame, - rtc::ArrayView encoded) { - if (encoded.size() <= 2) { - return false; - } - int number_frames = - frame.size() / DefaultFrameSizePerChannel(inst->sample_rate_hz); - if (number_frames > 0 && - WebRtcOpus_PacketHasVoiceActivity(encoded.data(), encoded.size()) == 0) { - const float average_frame_energy = - std::accumulate(frame.begin(), frame.end(), 0.0f, - [](float a, int32_t b) { return a + b * b; }) / - number_frames; - if (WebRtcOpus_GetInDtx(inst) == 1 && - average_frame_energy >= inst->smooth_energy_non_active_frames * 0.5f) { - // This is a refresh DTX packet as the encoder is in DTX and has - // produced a payload > 2 bytes. This refresh packet has a higher energy - // than the smooth energy of non activity frames (with a 3 dB negative - // margin) and, therefore, it is flagged as a high energy refresh DTX - // packet. - return true; - } - // The average energy is tracked in a similar way as the modeling of the - // comfort noise in the Silk decoder in Opus - // (third_party/opus/src/silk/CNG.c). - if (average_frame_energy < inst->smooth_energy_non_active_frames * 0.5f) { - inst->smooth_energy_non_active_frames = average_frame_energy; - } else { - inst->smooth_energy_non_active_frames += - (average_frame_energy - inst->smooth_energy_non_active_frames) * - 0.25f; - } - } - return false; -} - int16_t WebRtcOpus_EncoderCreate(OpusEncInst** inst, size_t channels, int32_t application, @@ -137,9 +82,6 @@ int16_t WebRtcOpus_EncoderCreate(OpusEncInst** inst, state->in_dtx_mode = 0; state->channels = channels; state->sample_rate_hz = sample_rate_hz; - state->smooth_energy_non_active_frames = 0.0f; - state->avoid_noise_pumping_during_dtx = - webrtc::field_trial::IsEnabled(kAvoidNoisePumpingDuringDtxFieldTrial); *inst = state; return 0; @@ -185,8 +127,6 @@ int16_t WebRtcOpus_MultistreamEncoderCreate( state->in_dtx_mode = 0; state->channels = channels; state->sample_rate_hz = sample_rate_hz; - state->smooth_energy_non_active_frames = 0.0f; - state->avoid_noise_pumping_during_dtx = false; *inst = state; return 0; @@ -244,21 +184,6 @@ int WebRtcOpus_Encode(OpusEncInst* inst, } } - if (inst->avoid_noise_pumping_during_dtx && WebRtcOpus_GetUseDtx(inst) == 1 && - WebRtcOpus_IsHighEnergyRefreshDtxPacket( - inst, rtc::MakeArrayView(audio_in, samples), - rtc::MakeArrayView(encoded, res))) { - // This packet is a high energy refresh DTX packet. For avoiding an increase - // of the energy in the DTX region at the decoder, this packet is - // substituted by a TOC byte with one empty frame. - // The number of frames described in the TOC byte - // (https://tools.ietf.org/html/rfc6716#section-3.1) are overwritten to - // always indicate one frame (last two bits equal to 0). - encoded[0] = encoded[0] & 0b11111100; - inst->in_dtx_mode = 1; - // The payload is just the TOC byte and has 1 byte as length. - return 1; - } inst->in_dtx_mode = 0; return res; } @@ -361,12 +286,6 @@ int16_t WebRtcOpus_DisableFec(OpusEncInst* inst) { int16_t WebRtcOpus_EnableDtx(OpusEncInst* inst) { if (inst) { - if (webrtc::field_trial::IsEnabled(kSetSignalVoiceWithDtxFieldTrial)) { - int ret = ENCODER_CTL(inst, OPUS_SET_SIGNAL(OPUS_SIGNAL_VOICE)); - if (ret != OPUS_OK) { - return ret; - } - } return ENCODER_CTL(inst, OPUS_SET_DTX(1)); } else { return -1; @@ -375,12 +294,6 @@ int16_t WebRtcOpus_EnableDtx(OpusEncInst* inst) { int16_t WebRtcOpus_DisableDtx(OpusEncInst* inst) { if (inst) { - if (webrtc::field_trial::IsEnabled(kSetSignalVoiceWithDtxFieldTrial)) { - int ret = ENCODER_CTL(inst, OPUS_SET_SIGNAL(OPUS_AUTO)); - if (ret != OPUS_OK) { - return ret; - } - } return ENCODER_CTL(inst, OPUS_SET_DTX(0)); } else { return -1; @@ -397,6 +310,16 @@ int16_t WebRtcOpus_GetUseDtx(OpusEncInst* inst) { return -1; } +int16_t WebRtcOpus_GetInDtx(OpusEncInst* inst) { + if (inst) { + opus_int32 in_dtx; + if (ENCODER_CTL(inst, OPUS_GET_IN_DTX(&in_dtx)) == 0) { + return in_dtx; + } + } + return -1; +} + int16_t WebRtcOpus_EnableCbr(OpusEncInst* inst) { if (inst) { return ENCODER_CTL(inst, OPUS_SET_VBR(0)); @@ -453,19 +376,6 @@ int16_t WebRtcOpus_SetForceChannels(OpusEncInst* inst, size_t num_channels) { } } -int32_t WebRtcOpus_GetInDtx(OpusEncInst* inst) { - if (!inst) { - return -1; - } -#ifdef OPUS_GET_IN_DTX - int32_t in_dtx; - if (ENCODER_CTL(inst, OPUS_GET_IN_DTX(&in_dtx)) == 0) { - return in_dtx; - } -#endif - return -1; -} - int16_t WebRtcOpus_DecoderCreate(OpusDecInst** inst, size_t channels, int sample_rate_hz) { @@ -485,13 +395,8 @@ int16_t WebRtcOpus_DecoderCreate(OpusDecInst** inst, // Creation of memory all ok. state->channels = channels; state->sample_rate_hz = sample_rate_hz; - state->plc_use_prev_decoded_samples = - webrtc::field_trial::IsEnabled(kPlcUsePrevDecodedSamplesFieldTrial); - if (state->plc_use_prev_decoded_samples) { - state->prev_decoded_samples = - DefaultFrameSizePerChannel(state->sample_rate_hz); - } state->in_dtx_mode = 0; + state->last_packet_num_channels = channels; *inst = state; return 0; } @@ -529,12 +434,6 @@ int16_t WebRtcOpus_MultistreamDecoderCreate( // Creation of memory all ok. state->channels = channels; state->sample_rate_hz = 48000; - state->plc_use_prev_decoded_samples = - webrtc::field_trial::IsEnabled(kPlcUsePrevDecodedSamplesFieldTrial); - if (state->plc_use_prev_decoded_samples) { - state->prev_decoded_samples = - DefaultFrameSizePerChannel(state->sample_rate_hz); - } state->in_dtx_mode = 0; *inst = state; return 0; @@ -633,17 +532,6 @@ static int DecodePlc(OpusDecInst* inst, int16_t* decoded) { int plc_samples = FrameSizePerChannel(kWebRtcOpusPlcFrameSizeMs, inst->sample_rate_hz); - if (inst->plc_use_prev_decoded_samples) { - /* The number of samples we ask for is `number_of_lost_frames` times - * `prev_decoded_samples_`. Limit the number of samples to maximum - * `MaxFrameSizePerChannel()`. */ - plc_samples = inst->prev_decoded_samples; - const int max_samples_per_channel = - MaxFrameSizePerChannel(inst->sample_rate_hz); - plc_samples = plc_samples <= max_samples_per_channel - ? plc_samples - : max_samples_per_channel; - } decoded_samples = DecodeNative(inst, NULL, 0, plc_samples, decoded, &audio_type, 0); if (decoded_samples < 0) { @@ -658,26 +546,37 @@ int WebRtcOpus_Decode(OpusDecInst* inst, size_t encoded_bytes, int16_t* decoded, int16_t* audio_type) { - int decoded_samples; - + int decoded_samples_per_channel; if (encoded_bytes == 0) { *audio_type = DetermineAudioType(inst, encoded_bytes); - decoded_samples = DecodePlc(inst, decoded); + decoded_samples_per_channel = DecodePlc(inst, decoded); } else { - decoded_samples = DecodeNative(inst, encoded, encoded_bytes, - MaxFrameSizePerChannel(inst->sample_rate_hz), - decoded, audio_type, 0); + decoded_samples_per_channel = DecodeNative( + inst, encoded, encoded_bytes, + MaxFrameSizePerChannel(inst->sample_rate_hz), decoded, audio_type, 0); + + // TODO: https://issues.webrtc.org/376493209 - When fixed, remove block + // below. + inst->last_packet_num_channels = opus_packet_get_nb_channels(encoded); + RTC_DCHECK(inst->last_packet_num_channels == 1 || + inst->last_packet_num_channels == 2); } - if (decoded_samples < 0) { + if (decoded_samples_per_channel < 0) { return -1; } - if (inst->plc_use_prev_decoded_samples) { - /* Update decoded sample memory, to be used by the PLC in case of losses. */ - inst->prev_decoded_samples = decoded_samples; + // TODO: https://issues.webrtc.org/376493209 - When fixed, remove block below. + // When stereo decoding is enabled and the last observed non-empty packet + // encoded mono audio, the Opus decoder may generate non-trivial stereo audio. + // As that is undesired, in that case make sure that `decoded` contains + // trivial stereo audio by copying the left channel into the right one. + if (inst->channels == 2 && inst->last_packet_num_channels == 1) { + for (int i = 0; i < decoded_samples_per_channel << 1; i += 2) { + decoded[i + 1] = decoded[i]; + } } - return decoded_samples; + return decoded_samples_per_channel; } int WebRtcOpus_DecodeFec(OpusDecInst* inst, @@ -730,16 +629,6 @@ int WebRtcOpus_DurationEst(OpusDecInst* inst, } int WebRtcOpus_PlcDuration(OpusDecInst* inst) { - if (inst->plc_use_prev_decoded_samples) { - /* The number of samples we ask for is `number_of_lost_frames` times - * `prev_decoded_samples_`. Limit the number of samples to maximum - * `MaxFrameSizePerChannel()`. */ - const int plc_samples = inst->prev_decoded_samples; - const int max_samples_per_channel = - MaxFrameSizePerChannel(inst->sample_rate_hz); - return plc_samples <= max_samples_per_channel ? plc_samples - : max_samples_per_channel; - } return FrameSizePerChannel(kWebRtcOpusPlcFrameSizeMs, inst->sample_rate_hz); } diff --git a/modules/audio_coding/codecs/opus/opus_interface.h b/modules/audio_coding/codecs/opus/opus_interface.h index 89159ce1c0..dd64e82476 100644 --- a/modules/audio_coding/codecs/opus/opus_interface.h +++ b/modules/audio_coding/codecs/opus/opus_interface.h @@ -245,6 +245,20 @@ int16_t WebRtcOpus_DisableDtx(OpusEncInst* inst); */ int16_t WebRtcOpus_GetUseDtx(OpusEncInst* inst); +/**************************************************************************** + * WebRtcOpus_GetUseDtx() + * + * This function gets if the encoder is in DTX. + * + * Input: + * - inst : Encoder context + * + * Return value : 0 - Encoder is not DTX. + * 1 - Encoder is in DTX. + * -1 - Error. + */ +int16_t WebRtcOpus_GetInDtx(OpusEncInst* inst); + /**************************************************************************** * WebRtcOpus_EnableCbr() * @@ -320,20 +334,6 @@ int32_t WebRtcOpus_GetBandwidth(OpusEncInst* inst); */ int16_t WebRtcOpus_SetBandwidth(OpusEncInst* inst, int32_t bandwidth); -/* - * WebRtcOpus_GetInDtx(...) - * - * Gets the DTX state of the encoder. - * - * Input: - * - inst : Encoder context - * - * Return value : -1 - Error. - * 1 - Last encoded frame was comfort noise update during DTX. - * 0 - Last encoded frame was encoded with encoder not in DTX. - */ -int32_t WebRtcOpus_GetInDtx(OpusEncInst* inst); - /* * WebRtcOpus_SetForceChannels(...) * @@ -421,9 +421,7 @@ void WebRtcOpus_DecoderInit(OpusDecInst* inst); * * Output: * - decoded : The decoded vector - * - audio_type : 1 normal, 2 CNG (for Opus it should - * always return 1 since we're not using Opus's - * built-in DTX/CNG scheme) + * - audio_type : 1 normal, 2 CNG * * Return value : >0 - Samples per channel in decoded vector * -1 - Error diff --git a/modules/audio_coding/codecs/opus/opus_unittest.cc b/modules/audio_coding/codecs/opus/opus_unittest.cc index 4a9156ad58..0d04f1dc3b 100644 --- a/modules/audio_coding/codecs/opus/opus_unittest.cc +++ b/modules/audio_coding/codecs/opus/opus_unittest.cc @@ -94,7 +94,7 @@ void CreateSingleOrMultiStreamDecoder(WebRtcOpusDecInst** opus_decoder, } int SamplesPerChannel(int sample_rate_hz, int duration_ms) { - const int samples_per_ms = rtc::CheckedDivExact(sample_rate_hz, 1000); + const int samples_per_ms = CheckedDivExact(sample_rate_hz, 1000); return samples_per_ms * duration_ms; } @@ -122,7 +122,7 @@ class OpusTest void PrepareSpeechData(int block_length_ms, int loop_length_ms); int EncodeDecode(WebRtcOpusEncInst* encoder, - rtc::ArrayView input_audio, + ArrayView input_audio, WebRtcOpusDecInst* decoder, int16_t* output_audio, int16_t* audio_type); @@ -180,14 +180,13 @@ void OpusTest::PrepareSpeechData(int block_length_ms, int loop_length_ms) { if (loop_length_ms < block_length_ms) { loop_length_ms = block_length_ms; } - const int sample_rate_khz = - rtc::CheckedDivExact(encoder_sample_rate_hz_, 1000); + const int sample_rate_khz = CheckedDivExact(encoder_sample_rate_hz_, 1000); EXPECT_TRUE(speech_data_.Init(file_name, loop_length_ms * sample_rate_khz * channels_, block_length_ms * sample_rate_khz * channels_)); } -void OpusTest::SetMaxPlaybackRate(WebRtcOpusEncInst* encoder, +void OpusTest::SetMaxPlaybackRate(WebRtcOpusEncInst* /* encoder */, opus_int32 expect, int32_t set) { opus_int32 bandwidth; @@ -209,12 +208,12 @@ void OpusTest::CheckAudioBounded(const int16_t* audio, } int OpusTest::EncodeDecode(WebRtcOpusEncInst* encoder, - rtc::ArrayView input_audio, + ArrayView input_audio, WebRtcOpusDecInst* decoder, int16_t* output_audio, int16_t* audio_type) { const int input_samples_per_channel = - rtc::CheckedDivExact(input_audio.size(), channels_); + CheckedDivExact(input_audio.size(), channels_); int encoded_bytes_int = WebRtcOpus_Encode(encoder, input_audio.data(), input_samples_per_channel, kMaxBytes, bitstream_); @@ -248,9 +247,9 @@ int OpusTest::EncodeDecode(WebRtcOpusEncInst* encoder, void OpusTest::TestDtxEffect(bool dtx, int block_length_ms) { PrepareSpeechData(block_length_ms, 2000); const size_t input_samples = - rtc::CheckedDivExact(encoder_sample_rate_hz_, 1000) * block_length_ms; + CheckedDivExact(encoder_sample_rate_hz_, 1000) * block_length_ms; const size_t output_samples = - rtc::CheckedDivExact(decoder_sample_rate_hz_, 1000) * block_length_ms; + CheckedDivExact(decoder_sample_rate_hz_, 1000) * block_length_ms; // Create encoder memory. CreateSingleOrMultiStreamEncoder(&opus_encoder_, channels_, application_, @@ -417,7 +416,7 @@ void OpusTest::TestDtxEffect(bool dtx, int block_length_ms) { void OpusTest::TestCbrEffect(bool cbr, int block_length_ms) { PrepareSpeechData(block_length_ms, 2000); const size_t output_samples = - rtc::CheckedDivExact(decoder_sample_rate_hz_, 1000) * block_length_ms; + CheckedDivExact(decoder_sample_rate_hz_, 1000) * block_length_ms; int32_t max_pkt_size_diff = 0; int32_t prev_pkt_size = 0; @@ -448,7 +447,7 @@ void OpusTest::TestCbrEffect(bool cbr, int block_length_ms) { int32_t diff = std::abs((int32_t)encoded_bytes_ - prev_pkt_size); max_pkt_size_diff = std::max(max_pkt_size_diff, diff); } - prev_pkt_size = rtc::checked_cast(encoded_bytes_); + prev_pkt_size = checked_cast(encoded_bytes_); } if (cbr) { @@ -747,7 +746,8 @@ TEST_P(OpusTest, OpusDtxOn) { TestDtxEffect(true, 40); } -TEST_P(OpusTest, OpusCbrOff) { +// TODO: https://issues.webrtc.org/411157363 - reenable test after update. +TEST_P(OpusTest, DISABLED_OpusCbrOff) { TestCbrEffect(false, 10); TestCbrEffect(false, 20); TestCbrEffect(false, 40); @@ -851,10 +851,10 @@ TEST_P(OpusTest, OpusDurationEstimation) { // 10 ms. We use only first 10 ms of a 20 ms block. auto speech_block = speech_data_.GetNextBlock(); - int encoded_bytes_int = WebRtcOpus_Encode( - opus_encoder_, speech_block.data(), - rtc::CheckedDivExact(speech_block.size(), 2 * channels_), kMaxBytes, - bitstream_); + int encoded_bytes_int = + WebRtcOpus_Encode(opus_encoder_, speech_block.data(), + CheckedDivExact(speech_block.size(), 2 * channels_), + kMaxBytes, bitstream_); EXPECT_GE(encoded_bytes_int, 0); EXPECT_EQ(SamplesPerChannel(decoder_sample_rate_hz_, /*ms=*/10), WebRtcOpus_DurationEst(opus_decoder_, bitstream_, @@ -862,10 +862,9 @@ TEST_P(OpusTest, OpusDurationEstimation) { // 20 ms speech_block = speech_data_.GetNextBlock(); - encoded_bytes_int = - WebRtcOpus_Encode(opus_encoder_, speech_block.data(), - rtc::CheckedDivExact(speech_block.size(), channels_), - kMaxBytes, bitstream_); + encoded_bytes_int = WebRtcOpus_Encode( + opus_encoder_, speech_block.data(), + CheckedDivExact(speech_block.size(), channels_), kMaxBytes, bitstream_); EXPECT_GE(encoded_bytes_int, 0); EXPECT_EQ(SamplesPerChannel(decoder_sample_rate_hz_, /*ms=*/20), WebRtcOpus_DurationEst(opus_decoder_, bitstream_, @@ -914,12 +913,11 @@ TEST_P(OpusTest, OpusDecodeRepacketized) { constexpr size_t kMaxCycles = 100; for (size_t idx = 0; idx < kMaxCycles; ++idx) { auto speech_block = speech_data_.GetNextBlock(); - encoded_bytes_ = - WebRtcOpus_Encode(opus_encoder_, speech_block.data(), - rtc::CheckedDivExact(speech_block.size(), channels_), - kMaxBytes, bitstream_); + encoded_bytes_ = WebRtcOpus_Encode( + opus_encoder_, speech_block.data(), + CheckedDivExact(speech_block.size(), channels_), kMaxBytes, bitstream_); if (opus_repacketizer_cat(rp, bitstream_, - rtc::checked_cast(encoded_bytes_)) == + checked_cast(encoded_bytes_)) == OPUS_OK) { ++num_packets; if (num_packets == kPackets) { diff --git a/modules/audio_coding/codecs/opus/test/blocker_unittest.cc b/modules/audio_coding/codecs/opus/test/blocker_unittest.cc index 9c8e789ba9..edca73980e 100644 --- a/modules/audio_coding/codecs/opus/test/blocker_unittest.cc +++ b/modules/audio_coding/codecs/opus/test/blocker_unittest.cc @@ -22,7 +22,7 @@ class PlusThreeBlockerCallback : public webrtc::BlockerCallback { public: void ProcessBlock(const float* const* input, size_t num_frames, - size_t num_input_channels, + size_t /* num_input_channels */, size_t num_output_channels, float* const* output) override { for (size_t i = 0; i < num_output_channels; ++i) { @@ -38,7 +38,7 @@ class CopyBlockerCallback : public webrtc::BlockerCallback { public: void ProcessBlock(const float* const* input, size_t num_frames, - size_t num_input_channels, + size_t /* num_input_channels */, size_t num_output_channels, float* const* output) override { for (size_t i = 0; i < num_output_channels; ++i) { diff --git a/modules/audio_coding/codecs/opus/test/lapped_transform.h b/modules/audio_coding/codecs/opus/test/lapped_transform.h index bb25c34a9e..5a67d31801 100644 --- a/modules/audio_coding/codecs/opus/test/lapped_transform.h +++ b/modules/audio_coding/codecs/opus/test/lapped_transform.h @@ -162,7 +162,6 @@ class LappedTransform { Callback* const block_processor_; Blocker blocker_; - // TODO(alessiob): Replace RealFourier with a different FFT library. std::unique_ptr fft_; const size_t cplx_length_; AlignedArray real_buf_; diff --git a/modules/audio_coding/codecs/opus/test/lapped_transform_unittest.cc b/modules/audio_coding/codecs/opus/test/lapped_transform_unittest.cc index 1003ed52e5..8273f8d7f8 100644 --- a/modules/audio_coding/codecs/opus/test/lapped_transform_unittest.cc +++ b/modules/audio_coding/codecs/opus/test/lapped_transform_unittest.cc @@ -50,7 +50,7 @@ class FftCheckerCallback : public webrtc::LappedTransform::Callback { size_t in_channels, size_t frames, size_t out_channels, - complex* const* out_block) override { + complex* const* /* out_block */) override { RTC_CHECK_EQ(in_channels, out_channels); size_t full_length = (frames - 1) * 2; diff --git a/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc b/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc index 7761efe8b3..79f3562ed9 100644 --- a/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc +++ b/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc @@ -53,18 +53,23 @@ int AudioDecoderPcm16B::DecodeInternal(const uint8_t* encoded, } std::vector AudioDecoderPcm16B::ParsePayload( - rtc::Buffer&& payload, + Buffer&& payload, uint32_t timestamp) { - const int samples_per_ms = rtc::CheckedDivExact(sample_rate_hz_, 1000); + const int samples_per_ms = CheckedDivExact(sample_rate_hz_, 1000); return LegacyEncodedAudioFrame::SplitBySamples( this, std::move(payload), timestamp, samples_per_ms * 2 * num_channels_, samples_per_ms); } -int AudioDecoderPcm16B::PacketDuration(const uint8_t* encoded, +int AudioDecoderPcm16B::PacketDuration(const uint8_t* /* encoded */, size_t encoded_len) const { // Two encoded byte per sample per channel. return static_cast(encoded_len / (2 * Channels())); } +int AudioDecoderPcm16B::PacketDurationRedundant(const uint8_t* encoded, + size_t encoded_len) const { + return PacketDuration(encoded, encoded_len); +} + } // namespace webrtc diff --git a/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h b/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h index 6f50161d3f..1945887cf9 100644 --- a/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h +++ b/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h @@ -29,9 +29,11 @@ class AudioDecoderPcm16B final : public AudioDecoder { AudioDecoderPcm16B& operator=(const AudioDecoderPcm16B&) = delete; void Reset() override; - std::vector ParsePayload(rtc::Buffer&& payload, + std::vector ParsePayload(Buffer&& payload, uint32_t timestamp) override; int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override; + int PacketDurationRedundant(const uint8_t* encoded, + size_t encoded_len) const override; int SampleRateHz() const override; size_t Channels() const override; diff --git a/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc b/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc index 634f14d370..660ab91fa7 100644 --- a/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc +++ b/modules/audio_coding/codecs/red/audio_encoder_copy_red.cc @@ -62,7 +62,7 @@ AudioEncoderCopyRed::AudioEncoderCopyRed(Config&& config, auto number_of_redundant_encodings = GetMaxRedundancyFromFieldTrial(field_trials); for (size_t i = 0; i < number_of_redundant_encodings; i++) { - std::pair redundant; + std::pair redundant; redundant.second.EnsureCapacity(kAudioMaxRtpPacketLen); redundant_encodings_.push_front(std::move(redundant)); } @@ -96,8 +96,8 @@ int AudioEncoderCopyRed::GetTargetBitrate() const { AudioEncoder::EncodedInfo AudioEncoderCopyRed::EncodeImpl( uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) { + ArrayView audio, + Buffer* encoded) { primary_encoded_.Clear(); EncodedInfo info = speech_encoder_->Encode(rtp_timestamp, audio, &primary_encoded_); @@ -149,8 +149,8 @@ AudioEncoder::EncodedInfo AudioEncoderCopyRed::EncodeImpl( const uint32_t timestamp_delta = info.encoded_timestamp - it->first.encoded_timestamp; encoded->data()[header_offset] = it->first.payload_type | 0x80; - rtc::SetBE16(static_cast(encoded->data()) + header_offset + 1, - (timestamp_delta << 2) | (it->first.encoded_bytes >> 8)); + SetBE16(static_cast(encoded->data()) + header_offset + 1, + (timestamp_delta << 2) | (it->first.encoded_bytes >> 8)); encoded->data()[header_offset + 3] = it->first.encoded_bytes & 0xff; header_offset += kRedHeaderLength; info.redundant.push_back(it->first); @@ -193,7 +193,7 @@ void AudioEncoderCopyRed::Reset() { auto number_of_redundant_encodings = redundant_encodings_.size(); redundant_encodings_.clear(); for (size_t i = 0; i < number_of_redundant_encodings; i++) { - std::pair redundant; + std::pair redundant; redundant.second.EnsureCapacity(kAudioMaxRtpPacketLen); redundant_encodings_.push_front(std::move(redundant)); } @@ -237,7 +237,7 @@ void AudioEncoderCopyRed::OnReceivedUplinkPacketLossFraction( void AudioEncoderCopyRed::OnReceivedUplinkBandwidth( int target_audio_bitrate_bps, - absl::optional bwe_period_ms) { + std::optional bwe_period_ms) { speech_encoder_->OnReceivedUplinkBandwidth(target_audio_bitrate_bps, bwe_period_ms); } @@ -247,7 +247,7 @@ void AudioEncoderCopyRed::OnReceivedUplinkAllocation( speech_encoder_->OnReceivedUplinkAllocation(update); } -absl::optional> +std::optional> AudioEncoderCopyRed::GetFrameLengthRange() const { return speech_encoder_->GetFrameLengthRange(); } @@ -271,9 +271,9 @@ ANAStats AudioEncoderCopyRed::GetANAStats() const { return speech_encoder_->GetANAStats(); } -rtc::ArrayView> +ArrayView> AudioEncoderCopyRed::ReclaimContainedEncoders() { - return rtc::ArrayView>(&speech_encoder_, 1); + return ArrayView>(&speech_encoder_, 1); } } // namespace webrtc diff --git a/modules/audio_coding/codecs/red/audio_encoder_copy_red.h b/modules/audio_coding/codecs/red/audio_encoder_copy_red.h index 359b5eaa17..09d21325e2 100644 --- a/modules/audio_coding/codecs/red/audio_encoder_copy_red.h +++ b/modules/audio_coding/codecs/red/audio_encoder_copy_red.h @@ -16,9 +16,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio_codecs/audio_encoder.h" #include "api/field_trials_view.h" @@ -70,31 +70,29 @@ class AudioEncoderCopyRed final : public AudioEncoder { void DisableAudioNetworkAdaptor() override; void OnReceivedUplinkPacketLossFraction( float uplink_packet_loss_fraction) override; - void OnReceivedUplinkBandwidth( - int target_audio_bitrate_bps, - absl::optional bwe_period_ms) override; + void OnReceivedUplinkBandwidth(int target_audio_bitrate_bps, + std::optional bwe_period_ms) override; void OnReceivedUplinkAllocation(BitrateAllocationUpdate update) override; void OnReceivedRtt(int rtt_ms) override; void OnReceivedOverhead(size_t overhead_bytes_per_packet) override; void SetReceiverFrameLengthRange(int min_frame_length_ms, int max_frame_length_ms) override; ANAStats GetANAStats() const override; - absl::optional> GetFrameLengthRange() + std::optional> GetFrameLengthRange() const override; - rtc::ArrayView> ReclaimContainedEncoders() - override; + ArrayView> ReclaimContainedEncoders() override; protected: EncodedInfo EncodeImpl(uint32_t rtp_timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) override; + ArrayView audio, + Buffer* encoded) override; private: std::unique_ptr speech_encoder_; - rtc::Buffer primary_encoded_; + Buffer primary_encoded_; size_t max_packet_length_; int red_payload_type_; - std::list> redundant_encodings_; + std::list> redundant_encodings_; }; } // namespace webrtc diff --git a/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc b/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc index e9b1b079ca..92dec2c835 100644 --- a/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc +++ b/modules/audio_coding/codecs/red/audio_encoder_copy_red_unittest.cc @@ -63,10 +63,9 @@ class AudioEncoderCopyRedTest : public ::testing::Test { ASSERT_TRUE(red_.get() != NULL); encoded_.Clear(); encoded_info_ = red_->Encode( - timestamp_, - rtc::ArrayView(audio_, num_audio_samples_10ms), + timestamp_, ArrayView(audio_, num_audio_samples_10ms), &encoded_); - timestamp_ += rtc::checked_cast(num_audio_samples_10ms); + timestamp_ += checked_cast(num_audio_samples_10ms); } test::ScopedKeyValueConfig field_trials_; @@ -76,7 +75,7 @@ class AudioEncoderCopyRedTest : public ::testing::Test { int16_t audio_[kMaxNumSamples]; const int sample_rate_hz_; size_t num_audio_samples_10ms; - rtc::Buffer encoded_; + Buffer encoded_; AudioEncoder::EncodedInfo encoded_info_; const int red_payload_type_; }; @@ -106,8 +105,8 @@ TEST_F(AudioEncoderCopyRedTest, CheckMaxFrameSizePropagation) { TEST_F(AudioEncoderCopyRedTest, CheckTargetAudioBitratePropagation) { EXPECT_CALL(*mock_encoder_, - OnReceivedUplinkBandwidth(4711, absl::optional())); - red_->OnReceivedUplinkBandwidth(4711, absl::nullopt); + OnReceivedUplinkBandwidth(4711, std::optional())); + red_->OnReceivedUplinkBandwidth(4711, std::nullopt); } TEST_F(AudioEncoderCopyRedTest, CheckPacketLossFractionPropagation) { @@ -119,7 +118,7 @@ TEST_F(AudioEncoderCopyRedTest, CheckGetFrameLengthRangePropagation) { auto expected_range = std::make_pair(TimeDelta::Millis(20), TimeDelta::Millis(20)); EXPECT_CALL(*mock_encoder_, GetFrameLengthRange()) - .WillRepeatedly(Return(absl::make_optional(expected_range))); + .WillRepeatedly(Return(std::make_optional(expected_range))); EXPECT_THAT(red_->GetFrameLengthRange(), Optional(Eq(expected_range))); } diff --git a/modules/audio_coding/include/audio_coding_module.h b/modules/audio_coding/include/audio_coding_module.h index 5c5bd167c0..d4a6a39b66 100644 --- a/modules/audio_coding/include/audio_coding_module.h +++ b/modules/audio_coding/include/audio_coding_module.h @@ -12,11 +12,11 @@ #define MODULES_AUDIO_CODING_INCLUDE_AUDIO_CODING_MODULE_H_ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_encoder.h" #include "api/function_view.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" @@ -39,17 +39,17 @@ class AudioPacketizationCallback { uint32_t timestamp, const uint8_t* payload_data, size_t payload_len_bytes, - int64_t absolute_capture_timestamp_ms) { + int64_t /* absolute_capture_timestamp_ms */) { // TODO(bugs.webrtc.org/10739): Deprecate the old SendData and make this one // pure virtual. return SendData(frame_type, payload_type, timestamp, payload_data, payload_len_bytes); } - virtual int32_t SendData(AudioFrameType frame_type, - uint8_t payload_type, - uint32_t timestamp, - const uint8_t* payload_data, - size_t payload_len_bytes) { + virtual int32_t SendData(AudioFrameType /* frame_type */, + uint8_t /* payload_type */, + uint32_t /* timestamp */, + const uint8_t* /* payload_data */, + size_t /* payload_len_bytes */) { RTC_DCHECK_NOTREACHED() << "This method must be overridden, or not used."; return -1; } @@ -69,7 +69,7 @@ class AudioCodingModule { // access to the unique_ptr; it may call the encoder, steal the encoder and // replace it with another encoder or with nullptr, etc. virtual void ModifyEncoder( - rtc::FunctionView*)> modifier) = 0; + FunctionView*)> modifier) = 0; // Utility method for simply replacing the existing encoder with a new one. void SetEncoder(std::unique_ptr new_encoder) { @@ -78,6 +78,10 @@ class AudioCodingModule { }); } + // Reset encoder and audio coding module. This throws away any audio passed + // and starts fresh. + virtual void Reset() = 0; + // int32_t RegisterTransportCallback() // Register a transport callback which will be called to deliver // the encoded buffers whenever Process() is called and a diff --git a/modules/audio_coding/include/audio_coding_module_typedefs.h b/modules/audio_coding/include/audio_coding_module_typedefs.h index 4b880fb633..f375108195 100644 --- a/modules/audio_coding/include/audio_coding_module_typedefs.h +++ b/modules/audio_coding/include/audio_coding_module_typedefs.h @@ -94,6 +94,7 @@ struct NetworkStatistics { uint64_t removedSamplesForAcceleration; uint64_t fecPacketsReceived; uint64_t fecPacketsDiscarded; + uint64_t totalProcessingDelayUs; // Stats below correspond to similarly-named fields in the WebRTC stats spec. // https://w3c.github.io/webrtc-stats/#dom-rtcreceivedrtpstreamstats uint64_t packetsDiscarded; diff --git a/modules/audio_coding/neteq/accelerate.cc b/modules/audio_coding/neteq/accelerate.cc index 06a38cc534..c15ab73d67 100644 --- a/modules/audio_coding/neteq/accelerate.cc +++ b/modules/audio_coding/neteq/accelerate.cc @@ -26,8 +26,7 @@ Accelerate::ReturnCodes Accelerate::Process(const int16_t* input, input_length / num_channels_ < (2 * k15ms - 1) * fs_mult_) { // Length of input data too short to do accelerate. Simply move all data // from input to output. - output->PushBackInterleaved( - rtc::ArrayView(input, input_length)); + output->PushBackInterleaved(ArrayView(input, input_length)); return kError; } return TimeStretch::Process(input, input_length, fast_accelerate, output, @@ -70,15 +69,15 @@ Accelerate::ReturnCodes Accelerate::CheckCriteriaAndStretch( RTC_DCHECK_GE(fs_mult_120, peak_index); // Should be handled in Process(). // Copy first part; 0 to 15 ms. output->PushBackInterleaved( - rtc::ArrayView(input, fs_mult_120 * num_channels_)); + ArrayView(input, fs_mult_120 * num_channels_)); // Copy the `peak_index` starting at 15 ms to `temp_vector`. AudioMultiVector temp_vector(num_channels_); - temp_vector.PushBackInterleaved(rtc::ArrayView( + temp_vector.PushBackInterleaved(ArrayView( &input[fs_mult_120 * num_channels_], peak_index * num_channels_)); // Cross-fade `temp_vector` onto the end of `output`. output->CrossFade(temp_vector, peak_index); // Copy the last unmodified part, 15 ms + pitch period until the end. - output->PushBackInterleaved(rtc::ArrayView( + output->PushBackInterleaved(ArrayView( &input[(fs_mult_120 + peak_index) * num_channels_], input_length - (fs_mult_120 + peak_index) * num_channels_)); @@ -89,8 +88,7 @@ Accelerate::ReturnCodes Accelerate::CheckCriteriaAndStretch( } } else { // Accelerate not allowed. Simply move all data from decoded to outData. - output->PushBackInterleaved( - rtc::ArrayView(input, input_length)); + output->PushBackInterleaved(ArrayView(input, input_length)); return kNoStretch; } } diff --git a/modules/audio_coding/neteq/audio_decoder_unittest.cc b/modules/audio_coding/neteq/audio_decoder_unittest.cc index fef3c3c1e4..06a088299c 100644 --- a/modules/audio_coding/neteq/audio_decoder_unittest.cc +++ b/modules/audio_coding/neteq/audio_decoder_unittest.cc @@ -16,17 +16,17 @@ #include #include "api/audio_codecs/opus/audio_encoder_opus.h" +#include "api/environment/environment_factory.h" #include "modules/audio_coding/codecs/g711/audio_decoder_pcm.h" #include "modules/audio_coding/codecs/g711/audio_encoder_pcm.h" #include "modules/audio_coding/codecs/g722/audio_decoder_g722.h" #include "modules/audio_coding/codecs/g722/audio_encoder_g722.h" -#include "modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h" -#include "modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h" #include "modules/audio_coding/codecs/opus/audio_decoder_opus.h" #include "modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h" #include "modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h" #include "modules/audio_coding/neteq/tools/resample_input_audio_file.h" #include "rtc_base/system/arch.h" +#include "test/explicit_key_value_config.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -34,6 +34,8 @@ namespace webrtc { namespace { +using test::ExplicitKeyValueConfig; + constexpr int kOverheadBytesPerPacket = 50; // The absolute difference between the input and output (the first channel) is @@ -120,7 +122,7 @@ class AudioDecoderTest : public ::testing::Test { // implementations are gone. virtual int EncodeFrame(const int16_t* input, size_t input_len_samples, - rtc::Buffer* output) { + Buffer* output) { AudioEncoder::EncodedInfo encoded_info; const size_t samples_per_10ms = audio_encoder_->SampleRateHz() / 100; RTC_CHECK_EQ(samples_per_10ms * audio_encoder_->Num10MsFramesInNextPacket(), @@ -136,13 +138,12 @@ class AudioDecoderTest : public ::testing::Test { samples_per_10ms, channels_, interleaved_input.get()); - encoded_info = - audio_encoder_->Encode(0, - rtc::ArrayView( - interleaved_input.get(), - audio_encoder_->NumChannels() * - audio_encoder_->SampleRateHz() / 100), - output); + encoded_info = audio_encoder_->Encode( + 0, + ArrayView(interleaved_input.get(), + audio_encoder_->NumChannels() * + audio_encoder_->SampleRateHz() / 100), + output); } EXPECT_EQ(payload_type_, encoded_info.payload_type); return static_cast(encoded_info.encoded_bytes); @@ -173,7 +174,7 @@ class AudioDecoderTest : public ::testing::Test { ASSERT_GE(input.size() - processed_samples, frame_size_); ASSERT_TRUE(input_audio_.Read(frame_size_, codec_input_rate_hz_, &input[processed_samples])); - rtc::Buffer encoded; + Buffer encoded; size_t enc_len = EncodeFrame(&input[processed_samples], frame_size_, &encoded); // Make sure that frame_size_ * channels_ samples are allocated and free. @@ -183,8 +184,8 @@ class AudioDecoderTest : public ::testing::Test { decoder_->ParsePayload(std::move(encoded), /*timestamp=*/0); RTC_CHECK_EQ(parse_result.size(), size_t{1}); auto decode_result = parse_result[0].frame->Decode( - rtc::ArrayView(&decoded[processed_samples * channels_], - frame_size_ * channels_ * sizeof(int16_t))); + ArrayView(&decoded[processed_samples * channels_], + frame_size_ * channels_ * sizeof(int16_t))); RTC_CHECK(decode_result.has_value()); EXPECT_EQ(frame_size_ * channels_, decode_result->num_decoded_samples); encoded_bytes += enc_len; @@ -213,7 +214,7 @@ class AudioDecoderTest : public ::testing::Test { std::unique_ptr input(new int16_t[frame_size_]); ASSERT_TRUE( input_audio_.Read(frame_size_, codec_input_rate_hz_, input.get())); - std::array encoded; + std::array encoded; EncodeFrame(input.get(), frame_size_, &encoded[0]); // Make a copy. encoded[1].SetData(encoded[0].data(), encoded[0].size()); @@ -238,7 +239,7 @@ class AudioDecoderTest : public ::testing::Test { std::unique_ptr input(new int16_t[frame_size_]); ASSERT_TRUE( input_audio_.Read(frame_size_, codec_input_rate_hz_, input.get())); - rtc::Buffer encoded; + Buffer encoded; EncodeFrame(input.get(), frame_size_, &encoded); decoder_->Reset(); std::vector output(frame_size_ * channels_); @@ -309,40 +310,6 @@ class AudioDecoderPcm16BTest : public AudioDecoderTest { } }; -class AudioDecoderIlbcTest : public AudioDecoderTest { - protected: - AudioDecoderIlbcTest() : AudioDecoderTest() { - codec_input_rate_hz_ = 8000; - frame_size_ = 240; - data_length_ = 10 * frame_size_; - decoder_ = new AudioDecoderIlbcImpl; - RTC_DCHECK(decoder_); - AudioEncoderIlbcConfig config; - config.frame_size_ms = 30; - audio_encoder_.reset(new AudioEncoderIlbcImpl(config, payload_type_)); - } - - // Overload the default test since iLBC's function WebRtcIlbcfix_NetEqPlc does - // not return any data. It simply resets a few states and returns 0. - void DecodePlcTest() { - InitEncoder(); - std::unique_ptr input(new int16_t[frame_size_]); - ASSERT_TRUE( - input_audio_.Read(frame_size_, codec_input_rate_hz_, input.get())); - rtc::Buffer encoded; - size_t enc_len = EncodeFrame(input.get(), frame_size_, &encoded); - AudioDecoder::SpeechType speech_type; - decoder_->Reset(); - std::unique_ptr output(new int16_t[frame_size_ * channels_]); - size_t dec_len = decoder_->Decode( - encoded.data(), enc_len, codec_input_rate_hz_, - frame_size_ * channels_ * sizeof(int16_t), output.get(), &speech_type); - EXPECT_EQ(frame_size_, dec_len); - // Simply call DecodePlc and verify that we get 0 as return value. - EXPECT_EQ(0U, decoder_->DecodePlc(1, output.get())); - } -}; - class AudioDecoderG722Test : public AudioDecoderTest { protected: AudioDecoderG722Test() : AudioDecoderTest() { @@ -381,10 +348,10 @@ class AudioDecoderOpusTest AudioDecoderOpusTest() : AudioDecoderTest() { channels_ = opus_num_channels_; codec_input_rate_hz_ = opus_sample_rate_hz_; - frame_size_ = rtc::CheckedDivExact(opus_sample_rate_hz_, 100); + frame_size_ = CheckedDivExact(opus_sample_rate_hz_, 100); data_length_ = 10 * frame_size_; - decoder_ = - new AudioDecoderOpusImpl(opus_num_channels_, opus_sample_rate_hz_); + decoder_ = new AudioDecoderOpusImpl( + ExplicitKeyValueConfig(""), opus_num_channels_, opus_sample_rate_hz_); AudioEncoderOpusConfig config; config.frame_size_ms = 10; config.sample_rate_hz = opus_sample_rate_hz_; @@ -392,7 +359,8 @@ class AudioDecoderOpusTest config.application = opus_num_channels_ == 1 ? AudioEncoderOpusConfig::ApplicationMode::kVoip : AudioEncoderOpusConfig::ApplicationMode::kAudio; - audio_encoder_ = AudioEncoderOpus::MakeAudioEncoder(config, payload_type_); + audio_encoder_ = AudioEncoderOpus::MakeAudioEncoder( + CreateEnvironment(), config, {.payload_type = payload_type_}); audio_encoder_->OnReceivedOverhead(kOverheadBytesPerPacket); } const int opus_sample_rate_hz_{std::get<0>(GetParam())}; @@ -414,7 +382,7 @@ TEST_F(AudioDecoderPcmUTest, EncodeDecode) { namespace { int SetAndGetTargetBitrate(AudioEncoder* audio_encoder, int rate) { - audio_encoder->OnReceivedUplinkBandwidth(rate, absl::nullopt); + audio_encoder->OnReceivedUplinkBandwidth(rate, std::nullopt); return audio_encoder->GetTargetBitrate(); } void TestSetAndGetTargetBitratesWithFixedCodec(AudioEncoder* audio_encoder, @@ -455,21 +423,12 @@ TEST_F(AudioDecoderPcm16BTest, SetTargetBitrate) { codec_input_rate_hz_ * 16); } -TEST_F(AudioDecoderIlbcTest, EncodeDecode) { - int tolerance = 6808; - double mse = 2.13e6; - int delay = 80; // Delay from input to output. - EncodeDecodeTest(500, tolerance, mse, delay); - ReInitTest(); - EXPECT_TRUE(decoder_->HasDecodePlc()); - DecodePlcTest(); -} - -TEST_F(AudioDecoderIlbcTest, SetTargetBitrate) { - TestSetAndGetTargetBitratesWithFixedCodec(audio_encoder_.get(), 13333); -} - +// TODO(bugs.webrtc.org/345525069): Either fix/enable or remove G722. +#if defined(__has_feature) && __has_feature(undefined_behavior_sanitizer) +TEST_F(AudioDecoderG722Test, DISABLED_EncodeDecode) { +#else TEST_F(AudioDecoderG722Test, EncodeDecode) { +#endif int tolerance = 6176; double mse = 238630.0; int delay = 22; // Delay from input to output. @@ -482,7 +441,12 @@ TEST_F(AudioDecoderG722Test, SetTargetBitrate) { TestSetAndGetTargetBitratesWithFixedCodec(audio_encoder_.get(), 64000); } +// TODO(bugs.webrtc.org/345525069): Either fix/enable or remove G722. +#if defined(__has_feature) && __has_feature(undefined_behavior_sanitizer) +TEST_F(AudioDecoderG722StereoTest, DISABLED_EncodeDecode) { +#else TEST_F(AudioDecoderG722StereoTest, EncodeDecode) { +#endif int tolerance = 6176; int channel_diff_tolerance = 0; double mse = 238630.0; diff --git a/modules/audio_coding/neteq/audio_multi_vector.cc b/modules/audio_coding/neteq/audio_multi_vector.cc index 3aa49e5b9a..e86d9356ad 100644 --- a/modules/audio_coding/neteq/audio_multi_vector.cc +++ b/modules/audio_coding/neteq/audio_multi_vector.cc @@ -66,7 +66,7 @@ void AudioMultiVector::CopyTo(AudioMultiVector* copy_to) const { } void AudioMultiVector::PushBackInterleaved( - rtc::ArrayView append_this) { + ArrayView append_this) { RTC_DCHECK_EQ(append_this.size() % num_channels_, 0); if (append_this.empty()) { return; diff --git a/modules/audio_coding/neteq/audio_multi_vector.h b/modules/audio_coding/neteq/audio_multi_vector.h index 715ec6dfc7..6cb4def994 100644 --- a/modules/audio_coding/neteq/audio_multi_vector.h +++ b/modules/audio_coding/neteq/audio_multi_vector.h @@ -52,7 +52,7 @@ class AudioMultiVector { // is assumed to be channel-interleaved. The length must be an even multiple // of this object's number of channels. The length of this object is increased // with the length of the array divided by the number of channels. - void PushBackInterleaved(rtc::ArrayView append_this); + void PushBackInterleaved(ArrayView append_this); // Appends the contents of AudioMultiVector `append_this` to this object. The // length of this object is increased with the length of `append_this`. diff --git a/modules/audio_coding/neteq/audio_multi_vector_unittest.cc b/modules/audio_coding/neteq/audio_multi_vector_unittest.cc index 386c3d48a3..eaf31476b3 100644 --- a/modules/audio_coding/neteq/audio_multi_vector_unittest.cc +++ b/modules/audio_coding/neteq/audio_multi_vector_unittest.cc @@ -47,7 +47,7 @@ class AudioMultiVectorTest : public ::testing::TestWithParam { // And so on. for (size_t i = 0; i < array_length(); ++i) { for (size_t j = 1; j <= num_channels_; ++j) { - *ptr = rtc::checked_cast(j * 100 + i); + *ptr = checked_cast(j * 100 + i); ++ptr; } } diff --git a/modules/audio_coding/neteq/audio_vector_unittest.cc b/modules/audio_coding/neteq/audio_vector_unittest.cc index ae9dd88606..8221a5f6c7 100644 --- a/modules/audio_coding/neteq/audio_vector_unittest.cc +++ b/modules/audio_coding/neteq/audio_vector_unittest.cc @@ -24,7 +24,7 @@ class AudioVectorTest : public ::testing::Test { virtual void SetUp() { // Populate test array. for (size_t i = 0; i < array_length(); ++i) { - array_[i] = rtc::checked_cast(i); + array_[i] = checked_cast(i); } } @@ -250,7 +250,7 @@ TEST_F(AudioVectorTest, InsertAtEnd) { for (int i = 0; i < kNewLength; ++i) { new_array[i] = 100 + i; } - int insert_position = rtc::checked_cast(array_length()); + int insert_position = checked_cast(array_length()); vec.InsertAt(new_array, kNewLength, insert_position); // Verify that the vector looks as follows: // {0, 1, ..., kLength - 1, 100, 101, ..., 100 + kNewLength - 1 }. @@ -279,8 +279,7 @@ TEST_F(AudioVectorTest, InsertBeyondEnd) { for (int i = 0; i < kNewLength; ++i) { new_array[i] = 100 + i; } - int insert_position = - rtc::checked_cast(array_length() + 10); // Too large. + int insert_position = checked_cast(array_length() + 10); // Too large. vec.InsertAt(new_array, kNewLength, insert_position); // Verify that the vector looks as follows: // {0, 1, ..., kLength - 1, 100, 101, ..., 100 + kNewLength - 1 }. @@ -336,7 +335,7 @@ TEST_F(AudioVectorTest, OverwriteBeyondEnd) { for (int i = 0; i < kNewLength; ++i) { new_array[i] = 100 + i; } - int insert_position = rtc::checked_cast(array_length() - 2); + int insert_position = checked_cast(array_length() - 2); vec.OverwriteAt(new_array, kNewLength, insert_position); ASSERT_EQ(array_length() - 2u + kNewLength, vec.Size()); // Verify that the vector looks as follows: diff --git a/modules/audio_coding/neteq/background_noise.cc b/modules/audio_coding/neteq/background_noise.cc index 2c95d3b390..812d656c96 100644 --- a/modules/audio_coding/neteq/background_noise.cc +++ b/modules/audio_coding/neteq/background_noise.cc @@ -17,7 +17,6 @@ #include "common_audio/signal_processing/include/signal_processing_library.h" #include "modules/audio_coding/neteq/audio_multi_vector.h" #include "modules/audio_coding/neteq/cross_correlation.h" -#include "modules/audio_coding/neteq/post_decode_vad.h" namespace webrtc { namespace { @@ -44,17 +43,11 @@ void BackgroundNoise::Reset() { } } -bool BackgroundNoise::Update(const AudioMultiVector& input, - const PostDecodeVad& vad) { +bool BackgroundNoise::Update(const AudioMultiVector& sync_buffer) { bool filter_params_saved = false; - if (vad.running() && vad.active_speech()) { - // Do not update the background noise parameters if we know that the signal - // is active speech. - return filter_params_saved; - } int32_t auto_correlation[kMaxLpcOrder + 1]; - int16_t fiter_output[kMaxLpcOrder + kResidualLength]; + int16_t filter_output[kMaxLpcOrder + kResidualLength]; int16_t reflection_coefficients[kMaxLpcOrder]; int16_t lpc_coefficients[kMaxLpcOrder + 1]; @@ -62,14 +55,13 @@ bool BackgroundNoise::Update(const AudioMultiVector& input, ChannelParameters& parameters = channel_parameters_[channel_ix]; int16_t temp_signal_array[kVecLen + kMaxLpcOrder] = {0}; int16_t* temp_signal = &temp_signal_array[kMaxLpcOrder]; - RTC_DCHECK_GE(input.Size(), kVecLen); - input[channel_ix].CopyTo(kVecLen, input.Size() - kVecLen, temp_signal); + RTC_DCHECK_GE(sync_buffer.Size(), kVecLen); + sync_buffer[channel_ix].CopyTo(kVecLen, sync_buffer.Size() - kVecLen, + temp_signal); int32_t sample_energy = CalculateAutoCorrelation(temp_signal, kVecLen, auto_correlation); - if ((!vad.running() && - sample_energy < parameters.energy_update_threshold) || - (vad.running() && !vad.active_speech())) { + if (sample_energy < parameters.energy_update_threshold) { // Generate LPC coefficients. if (auto_correlation[0] <= 0) { // Center value in auto-correlation is not positive. Do not update. @@ -95,10 +87,10 @@ bool BackgroundNoise::Update(const AudioMultiVector& input, // Generate the CNG gain factor by looking at the energy of the residual. WebRtcSpl_FilterMAFastQ12(temp_signal + kVecLen - kResidualLength, - fiter_output, lpc_coefficients, + filter_output, lpc_coefficients, kMaxLpcOrder + 1, kResidualLength); int32_t residual_energy = WebRtcSpl_DotProductWithScale( - fiter_output, fiter_output, kResidualLength, 0); + filter_output, filter_output, kResidualLength, 0); // Check spectral flatness. // Comparing the residual variance with the input signal variance tells @@ -117,9 +109,8 @@ bool BackgroundNoise::Update(const AudioMultiVector& input, filter_params_saved = true; } } else { - // Will only happen if post-decode VAD is disabled and `sample_energy` is - // not low enough. Increase the threshold for update so that it increases - // by a factor 4 in 4 seconds. + // Will only happen if `sample_energy` is not low enough. Increase the + // threshold for update so that it increases by a factor 4 in 4 seconds. IncrementEnergyThreshold(channel_ix, sample_energy); } } @@ -127,10 +118,10 @@ bool BackgroundNoise::Update(const AudioMultiVector& input, } void BackgroundNoise::GenerateBackgroundNoise( - rtc::ArrayView random_vector, + ArrayView random_vector, size_t channel, - int mute_slope, - bool too_many_expands, + int /* mute_slope */, + bool /* too_many_expands */, size_t num_noise_samples, int16_t* buffer) { constexpr size_t kNoiseLpcOrder = kMaxLpcOrder; @@ -202,7 +193,7 @@ const int16_t* BackgroundNoise::FilterState(size_t channel) const { } void BackgroundNoise::SetFilterState(size_t channel, - rtc::ArrayView input) { + ArrayView input) { RTC_DCHECK_LT(channel, num_channels_); size_t length = std::min(input.size(), kMaxLpcOrder); memcpy(channel_parameters_[channel].filter_state, input.data(), diff --git a/modules/audio_coding/neteq/background_noise.h b/modules/audio_coding/neteq/background_noise.h index 8e6d5890a0..ef2f9cb966 100644 --- a/modules/audio_coding/neteq/background_noise.h +++ b/modules/audio_coding/neteq/background_noise.h @@ -39,13 +39,13 @@ class BackgroundNoise { void Reset(); // Updates the parameter estimates based on the signal currently in the - // `sync_buffer`, and on the latest decision in `vad` if it is running. + // `sync_buffer`. // Returns true if the filter parameters are updated. - bool Update(const AudioMultiVector& sync_buffer, const PostDecodeVad& vad); + bool Update(const AudioMultiVector& sync_buffer); // Generates background noise given a random vector and writes the output to // `buffer`. - void GenerateBackgroundNoise(rtc::ArrayView random_vector, + void GenerateBackgroundNoise(ArrayView random_vector, size_t channel, int mute_slope, bool too_many_expands, @@ -69,7 +69,7 @@ class BackgroundNoise { // Copies `input` to the filter state. Will not copy more than `kMaxLpcOrder` // elements. - void SetFilterState(size_t channel, rtc::ArrayView input); + void SetFilterState(size_t channel, ArrayView input); // Returns `scale_` for `channel`. int16_t Scale(size_t channel) const; diff --git a/modules/audio_coding/neteq/buffer_level_filter.cc b/modules/audio_coding/neteq/buffer_level_filter.cc index 948545d948..79789feffc 100644 --- a/modules/audio_coding/neteq/buffer_level_filter.cc +++ b/modules/audio_coding/neteq/buffer_level_filter.cc @@ -35,18 +35,18 @@ void BufferLevelFilter::Update(size_t buffer_size_samples, // `level_factor_` and `filtered_current_level_` are in Q8. // `buffer_size_samples` is in Q0. const int64_t filtered_current_level = - (level_factor_* int64_t{filtered_current_level_} >> 8) + - (256 - level_factor_) * rtc::dchecked_cast(buffer_size_samples); + (level_factor_ * int64_t{filtered_current_level_} >> 8) + + (256 - level_factor_) * dchecked_cast(buffer_size_samples); // Account for time-scale operations (accelerate and pre-emptive expand) and // make sure that the filtered value remains non-negative. - filtered_current_level_ = rtc::saturated_cast(std::max( + filtered_current_level_ = saturated_cast(std::max( 0, filtered_current_level - int64_t{time_stretched_samples} * (1 << 8))); } void BufferLevelFilter::SetFilteredBufferLevel(int buffer_size_samples) { filtered_current_level_ = - rtc::saturated_cast(int64_t{buffer_size_samples} * 256); + saturated_cast(int64_t{buffer_size_samples} * 256); } void BufferLevelFilter::SetTargetBufferLevel(int target_buffer_level_ms) { diff --git a/modules/audio_coding/neteq/buffer_level_filter_unittest.cc b/modules/audio_coding/neteq/buffer_level_filter_unittest.cc index 6773e96f58..28452d6b62 100644 --- a/modules/audio_coding/neteq/buffer_level_filter_unittest.cc +++ b/modules/audio_coding/neteq/buffer_level_filter_unittest.cc @@ -31,7 +31,7 @@ TEST(BufferLevelFilter, ConvergenceTest) { for (int value = 100; value <= 200; value += 10) { filter.Reset(); filter.SetTargetBufferLevel(20); // Makes filter coefficient 251/256. - rtc::StringBuilder ss; + StringBuilder ss; ss << "times = " << times << ", value = " << value; SCOPED_TRACE(ss.str()); // Print out the parameter values on failure. for (int i = 0; i < times; ++i) { diff --git a/modules/audio_coding/neteq/comfort_noise.cc b/modules/audio_coding/neteq/comfort_noise.cc index 8a906593d7..49ba21e6de 100644 --- a/modules/audio_coding/neteq/comfort_noise.cc +++ b/modules/audio_coding/neteq/comfort_noise.cc @@ -67,8 +67,8 @@ int ComfortNoise::Generate(size_t requested_length, AudioMultiVector* output) { } std::unique_ptr temp(new int16_t[number_of_samples]); - if (!cng_decoder->Generate( - rtc::ArrayView(temp.get(), number_of_samples), new_period)) { + if (!cng_decoder->Generate(ArrayView(temp.get(), number_of_samples), + new_period)) { // Error returned. output->Zeros(requested_length); RTC_LOG(LS_ERROR) diff --git a/modules/audio_coding/neteq/decision_logic.cc b/modules/audio_coding/neteq/decision_logic.cc index fd4f2f5a20..accc34e7c8 100644 --- a/modules/audio_coding/neteq/decision_logic.cc +++ b/modules/audio_coding/neteq/decision_logic.cc @@ -14,19 +14,18 @@ #include #include -#include +#include +#include -#include "absl/types/optional.h" +#include "api/environment/environment.h" #include "api/neteq/neteq.h" #include "api/neteq/neteq_controller.h" +#include "modules/audio_coding/neteq/buffer_level_filter.h" +#include "modules/audio_coding/neteq/delay_manager.h" #include "modules/audio_coding/neteq/packet_arrival_history.h" #include "modules/audio_coding/neteq/packet_buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/experiments/struct_parameters_parser.h" -#include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -34,17 +33,16 @@ namespace { constexpr int kPostponeDecodingLevel = 50; constexpr int kTargetLevelWindowMs = 100; -constexpr int kMaxWaitForPacketMs = 100; // The granularity of delay adjustments (accelerate/preemptive expand) is 15ms, // but round up since the clock has a granularity of 10ms. constexpr int kDelayAdjustmentGranularityMs = 20; -constexpr int kReinitAfterExpandsMs = 1000; +constexpr int kPacketHistorySizeMs = 2000; +constexpr size_t kCngTimeoutMs = 1000; std::unique_ptr CreateDelayManager( + const Environment& env, const NetEqController::Config& neteq_config) { - DelayManager::Config config; - config.max_packets_in_buffer = neteq_config.max_packets_in_buffer; - config.base_minimum_delay_ms = neteq_config.base_min_delay_ms; + DelayManager::Config config(env.field_trials()); config.Log(); return std::make_unique(config, neteq_config.tick_timer); } @@ -67,38 +65,26 @@ bool IsExpand(NetEq::Mode mode) { } // namespace -DecisionLogic::Config::Config() { - StructParametersParser::Create( - "enable_stable_delay_mode", &enable_stable_delay_mode, // - "combine_concealment_decision", &combine_concealment_decision, // - "packet_history_size_ms", &packet_history_size_ms, // - "cng_timeout_ms", &cng_timeout_ms, // - "deceleration_target_level_offset_ms", - &deceleration_target_level_offset_ms) - ->Parse(webrtc::field_trial::FindFullName( - "WebRTC-Audio-NetEqDecisionLogicConfig")); - RTC_LOG(LS_INFO) << "NetEq decision logic config:" - << " enable_stable_delay_mode=" << enable_stable_delay_mode - << " combine_concealment_decision=" - << combine_concealment_decision - << " packet_history_size_ms=" << packet_history_size_ms - << " cng_timeout_ms=" << cng_timeout_ms.value_or(-1) - << " deceleration_target_level_offset_ms=" - << deceleration_target_level_offset_ms; -} - -DecisionLogic::DecisionLogic(NetEqController::Config config) +DecisionLogic::DecisionLogic(const Environment& env, + NetEqController::Config config) : DecisionLogic(config, - CreateDelayManager(config), + CreateDelayManager(env, config), std::make_unique()) {} DecisionLogic::DecisionLogic( NetEqController::Config config, std::unique_ptr delay_manager, - std::unique_ptr buffer_level_filter) + std::unique_ptr buffer_level_filter, + std::unique_ptr packet_arrival_history) : delay_manager_(std::move(delay_manager)), + delay_constraints_(config.max_packets_in_buffer, + config.base_min_delay_ms), buffer_level_filter_(std::move(buffer_level_filter)), - packet_arrival_history_(config_.packet_history_size_ms), + packet_arrival_history_( + packet_arrival_history + ? std::move(packet_arrival_history) + : std::make_unique(config.tick_timer, + kPacketHistorySizeMs)), tick_timer_(config.tick_timer), disallow_time_stretching_(!config.allow_time_stretching), timescale_countdown_( @@ -115,7 +101,7 @@ void DecisionLogic::SoftReset() { time_stretched_cn_samples_ = 0; delay_manager_->Reset(); buffer_level_filter_->Reset(); - packet_arrival_history_.Reset(); + packet_arrival_history_->Reset(); } void DecisionLogic::SetSampleRate(int fs_hz, size_t output_size_samples) { @@ -124,17 +110,16 @@ void DecisionLogic::SetSampleRate(int fs_hz, size_t output_size_samples) { fs_hz == 48000); sample_rate_khz_ = fs_hz / 1000; output_size_samples_ = output_size_samples; - packet_arrival_history_.set_sample_rate(fs_hz); + packet_arrival_history_->set_sample_rate(fs_hz); } NetEq::Operation DecisionLogic::GetDecision(const NetEqStatus& status, - bool* reset_decoder) { + bool* /* reset_decoder */) { prev_time_scale_ = prev_time_scale_ && IsTimestretch(status.last_mode); if (prev_time_scale_) { timescale_countdown_ = tick_timer_->GetNewCountdown(kMinTimescaleInterval); } - if (!IsCng(status.last_mode) && - !(config_.combine_concealment_decision && IsExpand(status.last_mode))) { + if (!IsCng(status.last_mode) && !IsExpand(status.last_mode)) { FilterBufferLevel(status.packet_buffer_info.span_samples); } @@ -157,15 +142,6 @@ NetEq::Operation DecisionLogic::GetDecision(const NetEqStatus& status, return NoPacket(status); } - // If the expand period was very long, reset NetEQ since it is likely that the - // sender was restarted. - if (!config_.combine_concealment_decision && IsExpand(status.last_mode) && - status.generated_noise_samples > - static_cast(kReinitAfterExpandsMs * sample_rate_khz_)) { - *reset_decoder = true; - return NetEq::Operation::kNormal; - } - if (PostponeDecode(status)) { return NoPacket(status); } @@ -187,46 +163,40 @@ NetEq::Operation DecisionLogic::GetDecision(const NetEqStatus& status, } int DecisionLogic::TargetLevelMs() const { - int target_delay_ms = delay_manager_->TargetDelayMs(); - if (!config_.enable_stable_delay_mode) { - target_delay_ms = - std::max(target_delay_ms, - static_cast(packet_length_samples_ / sample_rate_khz_)); - } - return target_delay_ms; + return delay_constraints_.Clamp(UnlimitedTargetLevelMs()); } int DecisionLogic::UnlimitedTargetLevelMs() const { - return delay_manager_->UnlimitedTargetLevelMs(); + return delay_manager_->TargetDelayMs(); } int DecisionLogic::GetFilteredBufferLevel() const { return buffer_level_filter_->filtered_current_level(); } -absl::optional DecisionLogic::PacketArrived( - int fs_hz, - bool should_update_stats, - const PacketArrivedInfo& info) { +std::optional DecisionLogic::PacketArrived(int fs_hz, + bool should_update_stats, + const PacketArrivedInfo& info) { buffer_flush_ = buffer_flush_ || info.buffer_flush; if (!should_update_stats || info.is_cng_or_dtmf) { - return absl::nullopt; + return std::nullopt; } if (info.packet_length_samples > 0 && fs_hz > 0 && info.packet_length_samples != packet_length_samples_) { packet_length_samples_ = info.packet_length_samples; - delay_manager_->SetPacketAudioLength(packet_length_samples_ * 1000 / fs_hz); + delay_constraints_.SetPacketAudioLength(packet_length_samples_ * 1000 / + fs_hz); } - int64_t time_now_ms = tick_timer_->ticks() * tick_timer_->ms_per_tick(); - packet_arrival_history_.Insert(info.main_timestamp, time_now_ms); - if (packet_arrival_history_.size() < 2) { + bool inserted = packet_arrival_history_->Insert(info.main_timestamp, + info.packet_length_samples); + if (!inserted || packet_arrival_history_->size() < 2) { // No meaningful delay estimate unless at least 2 packets have arrived. - return absl::nullopt; + return std::nullopt; } int arrival_delay_ms = - packet_arrival_history_.GetDelayMs(info.main_timestamp, time_now_ms); + packet_arrival_history_->GetDelayMs(info.main_timestamp); bool reordered = - !packet_arrival_history_.IsNewestRtpTimestamp(info.main_timestamp); + !packet_arrival_history_->IsNewestRtpTimestamp(info.main_timestamp); delay_manager_->Update(arrival_delay_ms, reordered); return arrival_delay_ms; } @@ -264,10 +234,10 @@ NetEq::Operation DecisionLogic::CngOperation( // The waiting time for this packet will be longer than 1.5 // times the wanted buffer delay. Apply fast-forward to cut the // waiting time down to the optimal. - noise_fast_forward_ = rtc::saturated_cast(noise_fast_forward_ + - excess_waiting_time_samp); + noise_fast_forward_ = + saturated_cast(noise_fast_forward_ + excess_waiting_time_samp); timestamp_diff = - rtc::saturated_cast(timestamp_diff + excess_waiting_time_samp); + saturated_cast(timestamp_diff + excess_waiting_time_samp); } if (timestamp_diff < 0 && status.last_mode == NetEq::Mode::kRfc3389Cng) { @@ -287,9 +257,7 @@ NetEq::Operation DecisionLogic::NoPacket(NetEqController::NetEqStatus status) { return NetEq::Operation::kRfc3389CngNoPacket; case NetEq::Mode::kCodecInternalCng: { // Stop CNG after a timeout. - if (config_.cng_timeout_ms && - status.generated_noise_samples > - static_cast(*config_.cng_timeout_ms * sample_rate_khz_)) { + if (status.generated_noise_samples > kCngTimeoutMs * sample_rate_khz_) { return NetEq::Operation::kExpand; } return NetEq::Operation::kCodecInternalCng; @@ -304,42 +272,20 @@ NetEq::Operation DecisionLogic::ExpectedPacketAvailable( NetEqController::NetEqStatus status) { if (!disallow_time_stretching_ && status.last_mode != NetEq::Mode::kExpand && !status.play_dtmf) { - if (config_.enable_stable_delay_mode) { - const int playout_delay_ms = GetPlayoutDelayMs(status); - const int low_limit = TargetLevelMs(); - const int high_limit = low_limit + - packet_arrival_history_.GetMaxDelayMs() + - kDelayAdjustmentGranularityMs; - if (playout_delay_ms >= high_limit * 4) { - return NetEq::Operation::kFastAccelerate; + const int playout_delay_ms = GetPlayoutDelayMs(status); + const int64_t low_limit = TargetLevelMs(); + const int64_t high_limit = low_limit + + packet_arrival_history_->GetMaxDelayMs() + + kDelayAdjustmentGranularityMs; + if (playout_delay_ms >= high_limit * 4) { + return NetEq::Operation::kFastAccelerate; + } + if (TimescaleAllowed()) { + if (playout_delay_ms >= high_limit) { + return NetEq::Operation::kAccelerate; } - if (TimescaleAllowed()) { - if (playout_delay_ms >= high_limit) { - return NetEq::Operation::kAccelerate; - } - if (playout_delay_ms < low_limit) { - return NetEq::Operation::kPreemptiveExpand; - } - } - } else { - const int target_level_samples = TargetLevelMs() * sample_rate_khz_; - const int low_limit = std::max( - target_level_samples * 3 / 4, - target_level_samples - - config_.deceleration_target_level_offset_ms * sample_rate_khz_); - const int high_limit = std::max( - target_level_samples, - low_limit + kDelayAdjustmentGranularityMs * sample_rate_khz_); - - const int buffer_level_samples = - buffer_level_filter_->filtered_current_level(); - if (buffer_level_samples >= high_limit * 4) - return NetEq::Operation::kFastAccelerate; - if (TimescaleAllowed()) { - if (buffer_level_samples >= high_limit) - return NetEq::Operation::kAccelerate; - if (buffer_level_samples < low_limit) - return NetEq::Operation::kPreemptiveExpand; + if (playout_delay_ms < low_limit) { + return NetEq::Operation::kPreemptiveExpand; } } } @@ -351,35 +297,20 @@ NetEq::Operation DecisionLogic::FuturePacketAvailable( // Required packet is not available, but a future packet is. // Check if we should continue with an ongoing concealment because the new // packet is too far into the future. - if (config_.combine_concealment_decision || IsCng(status.last_mode)) { - const int buffer_delay_samples = - config_.combine_concealment_decision - ? status.packet_buffer_info.span_samples_wait_time - : status.packet_buffer_info.span_samples; - const int buffer_delay_ms = buffer_delay_samples / sample_rate_khz_; - const int high_limit = TargetLevelMs() + kTargetLevelWindowMs / 2; - const int low_limit = - std::max(0, TargetLevelMs() - kTargetLevelWindowMs / 2); - const bool above_target_delay = buffer_delay_ms > high_limit; - const bool below_target_delay = buffer_delay_ms < low_limit; - if ((PacketTooEarly(status) && !above_target_delay) || - (below_target_delay && !config_.combine_concealment_decision)) { - return NoPacket(status); - } - uint32_t timestamp_leap = - status.next_packet->timestamp - status.target_timestamp; - if (config_.combine_concealment_decision) { - if (timestamp_leap != status.generated_noise_samples) { - // The delay was adjusted, reinitialize the buffer level filter. - buffer_level_filter_->SetFilteredBufferLevel(buffer_delay_samples); - } - } else { - time_stretched_cn_samples_ = - timestamp_leap - status.generated_noise_samples; - } - } else if (IsExpand(status.last_mode) && ShouldContinueExpand(status)) { + const int buffer_delay_samples = + status.packet_buffer_info.span_samples_wait_time; + const int buffer_delay_ms = buffer_delay_samples / sample_rate_khz_; + const int high_limit = TargetLevelMs() + kTargetLevelWindowMs / 2; + const bool above_target_delay = buffer_delay_ms > high_limit; + if ((PacketTooEarly(status) && !above_target_delay)) { return NoPacket(status); } + uint32_t timestamp_leap = + status.next_packet->timestamp - status.target_timestamp; + if (timestamp_leap != status.generated_noise_samples) { + // The delay was adjusted, reinitialize the buffer level filter. + buffer_level_filter_->SetFilteredBufferLevel(buffer_delay_samples); + } // Time to play the next packet. switch (status.last_mode) { @@ -406,9 +337,7 @@ bool DecisionLogic::PostponeDecode(NetEqController::NetEqStatus status) const { const size_t min_buffer_level_samples = TargetLevelMs() * sample_rate_khz_ * kPostponeDecodingLevel / 100; const size_t buffer_level_samples = - config_.combine_concealment_decision - ? status.packet_buffer_info.span_samples_wait_time - : status.packet_buffer_info.span_samples; + status.packet_buffer_info.span_samples_wait_time; if (buffer_level_samples >= min_buffer_level_samples) { return false; } @@ -418,7 +347,7 @@ bool DecisionLogic::PostponeDecode(NetEqController::NetEqStatus status) const { return false; } // Continue CNG until the buffer is at least at the minimum level. - if (config_.combine_concealment_decision && IsCng(status.last_mode)) { + if (IsCng(status.last_mode)) { return true; } // Only continue expand if the mute factor is low enough (otherwise the @@ -430,38 +359,17 @@ bool DecisionLogic::PostponeDecode(NetEqController::NetEqStatus status) const { return false; } -bool DecisionLogic::ReinitAfterExpands( - NetEqController::NetEqStatus status) const { - const uint32_t timestamp_leap = - status.next_packet->timestamp - status.target_timestamp; - return timestamp_leap >= - static_cast(kReinitAfterExpandsMs * sample_rate_khz_); -} - bool DecisionLogic::PacketTooEarly(NetEqController::NetEqStatus status) const { const uint32_t timestamp_leap = status.next_packet->timestamp - status.target_timestamp; return timestamp_leap > status.generated_noise_samples; } -bool DecisionLogic::MaxWaitForPacket( - NetEqController::NetEqStatus status) const { - return status.generated_noise_samples >= - static_cast(kMaxWaitForPacketMs * sample_rate_khz_); -} - -bool DecisionLogic::ShouldContinueExpand( - NetEqController::NetEqStatus status) const { - return !ReinitAfterExpands(status) && !MaxWaitForPacket(status) && - PacketTooEarly(status) && UnderTargetLevel(); -} - int DecisionLogic::GetPlayoutDelayMs( NetEqController::NetEqStatus status) const { uint32_t playout_timestamp = status.target_timestamp - status.sync_buffer_samples; - return packet_arrival_history_.GetDelayMs( - playout_timestamp, tick_timer_->ticks() * tick_timer_->ms_per_tick()); + return packet_arrival_history_->GetDelayMs(playout_timestamp); } } // namespace webrtc diff --git a/modules/audio_coding/neteq/decision_logic.h b/modules/audio_coding/neteq/decision_logic.h index d96fbecd6a..26dd8a46ae 100644 --- a/modules/audio_coding/neteq/decision_logic.h +++ b/modules/audio_coding/neteq/decision_logic.h @@ -11,25 +11,31 @@ #ifndef MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_H_ #define MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_H_ +#include +#include #include +#include +#include "api/environment/environment.h" #include "api/neteq/neteq.h" #include "api/neteq/neteq_controller.h" #include "api/neteq/tick_timer.h" #include "modules/audio_coding/neteq/buffer_level_filter.h" +#include "modules/audio_coding/neteq/delay_constraints.h" #include "modules/audio_coding/neteq/delay_manager.h" #include "modules/audio_coding/neteq/packet_arrival_history.h" -#include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { // This is the class for the decision tree implementation. class DecisionLogic : public NetEqController { public: - DecisionLogic(NetEqController::Config config); - DecisionLogic(NetEqController::Config config, - std::unique_ptr delay_manager, - std::unique_ptr buffer_level_filter); + DecisionLogic(const Environment& env, NetEqController::Config config); + DecisionLogic( + NetEqController::Config config, + std::unique_ptr delay_manager, + std::unique_ptr buffer_level_filter, + std::unique_ptr packet_arrival_history = nullptr); ~DecisionLogic() override; @@ -58,7 +64,7 @@ class DecisionLogic : public NetEqController { NetEq::Operation GetDecision(const NetEqController::NetEqStatus& status, bool* reset_decoder) override; - void ExpandDecision(NetEq::Operation operation) override {} + void ExpandDecision(NetEq::Operation /* operation */) override {} // Adds `value` to `sample_memory_`. void AddSampleMemory(int32_t value) override { sample_memory_ += value; } @@ -67,23 +73,23 @@ class DecisionLogic : public NetEqController { int UnlimitedTargetLevelMs() const override; - absl::optional PacketArrived(int fs_hz, - bool should_update_stats, - const PacketArrivedInfo& info) override; + std::optional PacketArrived(int fs_hz, + bool should_update_stats, + const PacketArrivedInfo& info) override; void RegisterEmptyPacket() override {} bool SetMaximumDelay(int delay_ms) override { - return delay_manager_->SetMaximumDelay(delay_ms); + return delay_constraints_.SetMaximumDelay(delay_ms); } bool SetMinimumDelay(int delay_ms) override { - return delay_manager_->SetMinimumDelay(delay_ms); + return delay_constraints_.SetMinimumDelay(delay_ms); } bool SetBaseMinimumDelay(int delay_ms) override { - return delay_manager_->SetBaseMinimumDelay(delay_ms); + return delay_constraints_.SetBaseMinimumDelay(delay_ms); } int GetBaseMinimumDelay() const override { - return delay_manager_->GetBaseMinimumDelay(); + return delay_constraints_.GetBaseMinimumDelay(); } bool PeakFound() const override { return false; } @@ -138,32 +144,16 @@ class DecisionLogic : public NetEqController { // level, even though the next packet is available. bool PostponeDecode(NetEqController::NetEqStatus status) const; - // Checks if the timestamp leap is so long into the future that a reset due - // to exceeding the expand limit will be done. - bool ReinitAfterExpands(NetEqController::NetEqStatus status) const; - // Checks if we still have not done enough expands to cover the distance from // the last decoded packet to the next available packet. bool PacketTooEarly(NetEqController::NetEqStatus status) const; - bool MaxWaitForPacket(NetEqController::NetEqStatus status) const; - bool ShouldContinueExpand(NetEqController::NetEqStatus status) const; + int GetPlayoutDelayMs(NetEqController::NetEqStatus status) const; - // Runtime configurable options through field trial - // WebRTC-Audio-NetEqDecisionLogicConfig. - struct Config { - Config(); - - bool enable_stable_delay_mode = false; - bool combine_concealment_decision = false; - int deceleration_target_level_offset_ms = 85; - int packet_history_size_ms = 2000; - absl::optional cng_timeout_ms; - }; - Config config_; std::unique_ptr delay_manager_; + DelayConstraints delay_constraints_; std::unique_ptr buffer_level_filter_; - PacketArrivalHistory packet_arrival_history_; + std::unique_ptr packet_arrival_history_; const TickTimer* tick_timer_; int sample_rate_khz_; size_t output_size_samples_; diff --git a/modules/audio_coding/neteq/decision_logic_unittest.cc b/modules/audio_coding/neteq/decision_logic_unittest.cc index 97e20dd883..2d3170b9ec 100644 --- a/modules/audio_coding/neteq/decision_logic_unittest.cc +++ b/modules/audio_coding/neteq/decision_logic_unittest.cc @@ -14,17 +14,19 @@ #include "api/neteq/neteq_controller.h" #include "api/neteq/tick_timer.h" -#include "modules/audio_coding/neteq/buffer_level_filter.h" #include "modules/audio_coding/neteq/delay_manager.h" #include "modules/audio_coding/neteq/mock/mock_buffer_level_filter.h" #include "modules/audio_coding/neteq/mock/mock_delay_manager.h" -#include "test/field_trial.h" +#include "modules/audio_coding/neteq/mock/mock_packet_arrival_history.h" +#include "test/explicit_key_value_config.h" #include "test/gtest.h" namespace webrtc { namespace { +using test::ExplicitKeyValueConfig; + constexpr int kSampleRate = 8000; constexpr int kSamplesPerMs = kSampleRate / 1000; constexpr int kOutputSizeSamples = kSamplesPerMs * 10; @@ -44,9 +46,11 @@ NetEqController::NetEqStatus CreateNetEqStatus(NetEq::Mode last_mode, current_delay_ms * kSamplesPerMs; status.packet_buffer_info.dtx_or_cng = false; status.next_packet = {status.target_timestamp, false, false}; + status.sync_buffer_samples = 0; return status; } +using ::testing::_; using ::testing::Return; } // namespace @@ -54,18 +58,22 @@ using ::testing::Return; class DecisionLogicTest : public ::testing::Test { protected: DecisionLogicTest() { - test::ScopedFieldTrials trials( - "WebRTC-Audio-NetEqDecisionLogicConfig/cng_timeout_ms:1000/"); NetEqController::Config config; config.tick_timer = &tick_timer_; config.allow_time_stretching = true; + config.max_packets_in_buffer = 200; + config.base_min_delay_ms = 0; auto delay_manager = std::make_unique( - DelayManager::Config(), config.tick_timer); + DelayManager::Config(ExplicitKeyValueConfig("")), config.tick_timer); mock_delay_manager_ = delay_manager.get(); auto buffer_level_filter = std::make_unique(); mock_buffer_level_filter_ = buffer_level_filter.get(); + auto packet_arrival_history = + std::make_unique(&tick_timer_); + mock_packet_arrival_history_ = packet_arrival_history.get(); decision_logic_ = std::make_unique( - config, std::move(delay_manager), std::move(buffer_level_filter)); + config, std::move(delay_manager), std::move(buffer_level_filter), + std::move(packet_arrival_history)); decision_logic_->SetSampleRate(kSampleRate, kOutputSizeSamples); } @@ -73,13 +81,16 @@ class DecisionLogicTest : public ::testing::Test { std::unique_ptr decision_logic_; MockDelayManager* mock_delay_manager_; MockBufferLevelFilter* mock_buffer_level_filter_; + MockPacketArrivalHistory* mock_packet_arrival_history_; }; TEST_F(DecisionLogicTest, NormalOperation) { EXPECT_CALL(*mock_delay_manager_, TargetDelayMs()) .WillRepeatedly(Return(100)); - EXPECT_CALL(*mock_buffer_level_filter_, filtered_current_level()) - .WillRepeatedly(Return(90 * kSamplesPerMs)); + EXPECT_CALL(*mock_packet_arrival_history_, GetDelayMs(_)) + .WillRepeatedly(Return(100)); + EXPECT_CALL(*mock_packet_arrival_history_, GetMaxDelayMs()) + .WillRepeatedly(Return(0)); bool reset_decoder = false; tick_timer_.Increment(kMinTimescaleInterval + 1); @@ -92,8 +103,10 @@ TEST_F(DecisionLogicTest, NormalOperation) { TEST_F(DecisionLogicTest, Accelerate) { EXPECT_CALL(*mock_delay_manager_, TargetDelayMs()) .WillRepeatedly(Return(100)); - EXPECT_CALL(*mock_buffer_level_filter_, filtered_current_level()) - .WillRepeatedly(Return(110 * kSamplesPerMs)); + EXPECT_CALL(*mock_packet_arrival_history_, GetDelayMs(_)) + .WillRepeatedly(Return(150)); + EXPECT_CALL(*mock_packet_arrival_history_, GetMaxDelayMs()) + .WillRepeatedly(Return(0)); bool reset_decoder = false; tick_timer_.Increment(kMinTimescaleInterval + 1); @@ -106,8 +119,10 @@ TEST_F(DecisionLogicTest, Accelerate) { TEST_F(DecisionLogicTest, FastAccelerate) { EXPECT_CALL(*mock_delay_manager_, TargetDelayMs()) .WillRepeatedly(Return(100)); - EXPECT_CALL(*mock_buffer_level_filter_, filtered_current_level()) - .WillRepeatedly(Return(400 * kSamplesPerMs)); + EXPECT_CALL(*mock_packet_arrival_history_, GetDelayMs(_)) + .WillRepeatedly(Return(500)); + EXPECT_CALL(*mock_packet_arrival_history_, GetMaxDelayMs()) + .WillRepeatedly(Return(0)); bool reset_decoder = false; tick_timer_.Increment(kMinTimescaleInterval + 1); @@ -120,8 +135,10 @@ TEST_F(DecisionLogicTest, FastAccelerate) { TEST_F(DecisionLogicTest, PreemptiveExpand) { EXPECT_CALL(*mock_delay_manager_, TargetDelayMs()) .WillRepeatedly(Return(100)); - EXPECT_CALL(*mock_buffer_level_filter_, filtered_current_level()) - .WillRepeatedly(Return(50 * kSamplesPerMs)); + EXPECT_CALL(*mock_packet_arrival_history_, GetDelayMs(_)) + .WillRepeatedly(Return(50)); + EXPECT_CALL(*mock_packet_arrival_history_, GetMaxDelayMs()) + .WillRepeatedly(Return(0)); bool reset_decoder = false; tick_timer_.Increment(kMinTimescaleInterval + 1); @@ -131,20 +148,6 @@ TEST_F(DecisionLogicTest, PreemptiveExpand) { EXPECT_FALSE(reset_decoder); } -TEST_F(DecisionLogicTest, DecelerationTargetLevelOffset) { - EXPECT_CALL(*mock_delay_manager_, TargetDelayMs()) - .WillRepeatedly(Return(500)); - EXPECT_CALL(*mock_buffer_level_filter_, filtered_current_level()) - .WillRepeatedly(Return(400 * kSamplesPerMs)); - - bool reset_decoder = false; - tick_timer_.Increment(kMinTimescaleInterval + 1); - EXPECT_EQ(decision_logic_->GetDecision( - CreateNetEqStatus(NetEq::Mode::kNormal, 400), &reset_decoder), - NetEq::Operation::kPreemptiveExpand); - EXPECT_FALSE(reset_decoder); -} - TEST_F(DecisionLogicTest, PostponeDecodeAfterExpand) { EXPECT_CALL(*mock_delay_manager_, TargetDelayMs()) .WillRepeatedly(Return(500)); @@ -170,7 +173,7 @@ TEST_F(DecisionLogicTest, TimeStrechComfortNoise) { { bool reset_decoder = false; // Below target window. - auto status = CreateNetEqStatus(NetEq::Mode::kCodecInternalCng, 400); + auto status = CreateNetEqStatus(NetEq::Mode::kCodecInternalCng, 200); status.generated_noise_samples = 400 * kSamplesPerMs; status.next_packet->timestamp = status.target_timestamp + 400 * kSamplesPerMs; @@ -189,24 +192,12 @@ TEST_F(DecisionLogicTest, TimeStrechComfortNoise) { EXPECT_EQ(decision_logic_->GetDecision(status, &reset_decoder), NetEq::Operation::kNormal); EXPECT_FALSE(reset_decoder); - - // The buffer level filter should be adjusted with the number of samples - // that was skipped. - int timestamp_leap = status.next_packet->timestamp - - status.target_timestamp - - status.generated_noise_samples; - EXPECT_CALL(*mock_buffer_level_filter_, - Update(400 * kSamplesPerMs, timestamp_leap)); - EXPECT_EQ(decision_logic_->GetDecision( - CreateNetEqStatus(NetEq::Mode::kNormal, 400), &reset_decoder), - NetEq::Operation::kNormal); - EXPECT_FALSE(reset_decoder); } } TEST_F(DecisionLogicTest, CngTimeout) { auto status = CreateNetEqStatus(NetEq::Mode::kCodecInternalCng, 0); - status.next_packet = absl::nullopt; + status.next_packet = std::nullopt; status.generated_noise_samples = kSamplesPerMs * 500; bool reset_decoder = false; EXPECT_EQ(decision_logic_->GetDecision(status, &reset_decoder), diff --git a/modules/audio_coding/neteq/decoder_database.cc b/modules/audio_coding/neteq/decoder_database.cc index 3447ced1da..821efec811 100644 --- a/modules/audio_coding/neteq/decoder_database.cc +++ b/modules/audio_coding/neteq/decoder_database.cc @@ -20,40 +20,36 @@ #include "absl/strings/match.h" #include "absl/strings/string_view.h" #include "api/audio_codecs/audio_decoder.h" +#include "api/environment/environment.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/strings/audio_format_to_string.h" namespace webrtc { DecoderDatabase::DecoderDatabase( - const rtc::scoped_refptr& decoder_factory, - absl::optional codec_pair_id) - : active_decoder_type_(-1), + const Environment& env, + scoped_refptr decoder_factory, + std::optional codec_pair_id) + : env_(env), + active_decoder_type_(-1), active_cng_decoder_type_(-1), - decoder_factory_(decoder_factory), + decoder_factory_(std::move(decoder_factory)), codec_pair_id_(codec_pair_id) {} DecoderDatabase::~DecoderDatabase() = default; DecoderDatabase::DecoderInfo::DecoderInfo( + const Environment& env, const SdpAudioFormat& audio_format, - absl::optional codec_pair_id, - AudioDecoderFactory* factory, - absl::string_view codec_name) - : name_(codec_name), + std::optional codec_pair_id, + AudioDecoderFactory* factory) + : env_(env), audio_format_(audio_format), codec_pair_id_(codec_pair_id), factory_(factory), cng_decoder_(CngDecoder::Create(audio_format)), subtype_(SubtypeFromFormat(audio_format)) {} -DecoderDatabase::DecoderInfo::DecoderInfo( - const SdpAudioFormat& audio_format, - absl::optional codec_pair_id, - AudioDecoderFactory* factory) - : DecoderInfo(audio_format, codec_pair_id, factory, audio_format.name) {} - DecoderDatabase::DecoderInfo::DecoderInfo(DecoderInfo&&) = default; DecoderDatabase::DecoderInfo::~DecoderInfo() = default; @@ -66,9 +62,9 @@ AudioDecoder* DecoderDatabase::DecoderInfo::GetDecoder() const { // TODO(ossu): Keep a check here for now, since a number of tests create // DecoderInfos without factories. RTC_DCHECK(factory_); - decoder_ = factory_->MakeAudioDecoder(audio_format_, codec_pair_id_); + decoder_ = factory_->Create(env_, audio_format_, codec_pair_id_); } - RTC_DCHECK(decoder_) << "Failed to create: " << rtc::ToString(audio_format_); + RTC_DCHECK(decoder_) << "Failed to create: " << absl::StrCat(audio_format_); return decoder_.get(); } @@ -76,7 +72,7 @@ bool DecoderDatabase::DecoderInfo::IsType(absl::string_view name) const { return absl::EqualsIgnoreCase(audio_format_.name, name); } -absl::optional +std::optional DecoderDatabase::DecoderInfo::CngDecoder::Create(const SdpAudioFormat& format) { if (absl::EqualsIgnoreCase(format.name, "CN")) { // CN has a 1:1 RTP clock rate to sample rate ratio. @@ -85,7 +81,7 @@ DecoderDatabase::DecoderInfo::CngDecoder::Create(const SdpAudioFormat& format) { sample_rate_hz == 32000 || sample_rate_hz == 48000); return DecoderDatabase::DecoderInfo::CngDecoder{sample_rate_hz}; } else { - return absl::nullopt; + return std::nullopt; } } @@ -133,8 +129,8 @@ std::vector DecoderDatabase::SetCodecs( RTC_DCHECK_LE(rtp_payload_type, 0x7f); if (decoders_.count(rtp_payload_type) == 0) { decoders_.insert(std::make_pair( - rtp_payload_type, - DecoderInfo(audio_format, codec_pair_id_, decoder_factory_.get()))); + rtp_payload_type, DecoderInfo(env_, audio_format, codec_pair_id_, + decoder_factory_.get()))); } else { // The mapping for this payload type hasn't changed. } @@ -150,7 +146,7 @@ int DecoderDatabase::RegisterPayload(int rtp_payload_type, } const auto ret = decoders_.insert(std::make_pair( rtp_payload_type, - DecoderInfo(audio_format, codec_pair_id_, decoder_factory_.get()))); + DecoderInfo(env_, audio_format, codec_pair_id_, decoder_factory_.get()))); if (ret.second == false) { // Database already contains a decoder with type `rtp_payload_type`. return kDecoderExists; diff --git a/modules/audio_coding/neteq/decoder_database.h b/modules/audio_coding/neteq/decoder_database.h index 8cf2019135..9c1a1ec1ef 100644 --- a/modules/audio_coding/neteq/decoder_database.h +++ b/modules/audio_coding/neteq/decoder_database.h @@ -18,6 +18,7 @@ #include "absl/strings/string_view.h" #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_format.h" +#include "api/environment/environment.h" #include "api/scoped_refptr.h" #include "modules/audio_coding/codecs/cng/webrtc_cng.h" #include "modules/audio_coding/neteq/packet.h" @@ -39,13 +40,10 @@ class DecoderDatabase { // Class that stores decoder info in the database. class DecoderInfo { public: - DecoderInfo(const SdpAudioFormat& audio_format, - absl::optional codec_pair_id, - AudioDecoderFactory* factory, - absl::string_view codec_name); - explicit DecoderInfo(const SdpAudioFormat& audio_format, - absl::optional codec_pair_id, - AudioDecoderFactory* factory = nullptr); + DecoderInfo(const Environment& env, + const SdpAudioFormat& audio_format, + std::optional codec_pair_id, + AudioDecoderFactory* factory); DecoderInfo(DecoderInfo&&); ~DecoderInfo(); @@ -83,25 +81,21 @@ class DecoderDatabase { // Returns true if the decoder's format is named `name`. bool IsType(absl::string_view name) const; - const std::string& get_name() const { return name_; } + const std::string& get_name() const { return audio_format_.name; } private: - // TODO(ossu): `name_` is kept here while we retain the old external - // decoder interface. Remove this once using an - // AudioDecoderFactory has supplanted the old functionality. - const std::string name_; - + const Environment env_; const SdpAudioFormat audio_format_; - const absl::optional codec_pair_id_; + const std::optional codec_pair_id_; AudioDecoderFactory* const factory_; mutable std::unique_ptr decoder_; // Set iff this is a comfort noise decoder. struct CngDecoder { - static absl::optional Create(const SdpAudioFormat& format); + static std::optional Create(const SdpAudioFormat& format); int sample_rate_hz; }; - const absl::optional cng_decoder_; + const std::optional cng_decoder_; enum class Subtype : int8_t { kNormal, kComfortNoise, kDtmf, kRed }; @@ -114,9 +108,9 @@ class DecoderDatabase { // only 7 bits). static const uint8_t kRtpPayloadTypeError = 0xFF; - DecoderDatabase( - const rtc::scoped_refptr& decoder_factory, - absl::optional codec_pair_id); + DecoderDatabase(const Environment& env, + scoped_refptr decoder_factory, + std::optional codec_pair_id); virtual ~DecoderDatabase(); @@ -192,12 +186,13 @@ class DecoderDatabase { private: typedef std::map DecoderMap; + const Environment env_; DecoderMap decoders_; int active_decoder_type_; int active_cng_decoder_type_; mutable std::unique_ptr active_cng_decoder_; - rtc::scoped_refptr decoder_factory_; - const absl::optional codec_pair_id_; + scoped_refptr decoder_factory_; + const std::optional codec_pair_id_; }; } // namespace webrtc diff --git a/modules/audio_coding/neteq/decoder_database_unittest.cc b/modules/audio_coding/neteq/decoder_database_unittest.cc index 445c21924b..e11b38070e 100644 --- a/modules/audio_coding/neteq/decoder_database_unittest.cc +++ b/modules/audio_coding/neteq/decoder_database_unittest.cc @@ -14,27 +14,28 @@ #include +#include "absl/memory/memory.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/environment/environment_factory.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/mock_audio_decoder.h" #include "test/mock_audio_decoder_factory.h" -using ::testing::_; -using ::testing::Invoke; - namespace webrtc { +using ::testing::WithArg; + TEST(DecoderDatabase, CreateAndDestroy) { - DecoderDatabase db(rtc::make_ref_counted(), - absl::nullopt); + DecoderDatabase db(CreateEnvironment(), + make_ref_counted(), std::nullopt); EXPECT_EQ(0, db.Size()); EXPECT_TRUE(db.Empty()); } TEST(DecoderDatabase, InsertAndRemove) { - auto factory = rtc::make_ref_counted(); - DecoderDatabase db(factory, absl::nullopt); + DecoderDatabase db(CreateEnvironment(), + make_ref_counted(), std::nullopt); const uint8_t kPayloadType = 0; const std::string kCodecName = "Robert\'); DROP TABLE Students;"; EXPECT_EQ( @@ -48,8 +49,8 @@ TEST(DecoderDatabase, InsertAndRemove) { } TEST(DecoderDatabase, InsertAndRemoveAll) { - auto factory = rtc::make_ref_counted(); - DecoderDatabase db(factory, absl::nullopt); + DecoderDatabase db(CreateEnvironment(), + make_ref_counted(), std::nullopt); const std::string kCodecName1 = "Robert\'); DROP TABLE Students;"; const std::string kCodecName2 = "https://xkcd.com/327/"; EXPECT_EQ(DecoderDatabase::kOK, @@ -64,16 +65,14 @@ TEST(DecoderDatabase, InsertAndRemoveAll) { } TEST(DecoderDatabase, GetDecoderInfo) { - auto factory = rtc::make_ref_counted(); + auto factory = make_ref_counted(); auto* decoder = new MockAudioDecoder; - EXPECT_CALL(*factory, MakeAudioDecoderMock(_, _, _)) - .WillOnce(Invoke([decoder](const SdpAudioFormat& format, - absl::optional codec_pair_id, - std::unique_ptr* dec) { + EXPECT_CALL(*factory, Create) + .WillOnce(WithArg<1>([decoder](const SdpAudioFormat& format) { EXPECT_EQ("pcmu", format.name); - dec->reset(decoder); + return absl::WrapUnique(decoder); })); - DecoderDatabase db(factory, absl::nullopt); + DecoderDatabase db(CreateEnvironment(), std::move(factory), std::nullopt); const uint8_t kPayloadType = 0; const std::string kCodecName = "pcmu"; EXPECT_EQ( @@ -90,7 +89,8 @@ TEST(DecoderDatabase, GetDecoderInfo) { } TEST(DecoderDatabase, GetDecoder) { - DecoderDatabase db(CreateBuiltinAudioDecoderFactory(), absl::nullopt); + DecoderDatabase db(CreateEnvironment(), CreateBuiltinAudioDecoderFactory(), + std::nullopt); const uint8_t kPayloadType = 0; EXPECT_EQ(DecoderDatabase::kOK, db.RegisterPayload(kPayloadType, SdpAudioFormat("l16", 8000, 1))); @@ -99,8 +99,8 @@ TEST(DecoderDatabase, GetDecoder) { } TEST(DecoderDatabase, TypeTests) { - auto factory = rtc::make_ref_counted(); - DecoderDatabase db(factory, absl::nullopt); + DecoderDatabase db(CreateEnvironment(), + make_ref_counted(), std::nullopt); const uint8_t kPayloadTypePcmU = 0; const uint8_t kPayloadTypeCng = 13; const uint8_t kPayloadTypeDtmf = 100; @@ -133,8 +133,8 @@ TEST(DecoderDatabase, TypeTests) { TEST(DecoderDatabase, CheckPayloadTypes) { constexpr int kNumPayloads = 10; - auto factory = rtc::make_ref_counted(); - DecoderDatabase db(factory, absl::nullopt); + DecoderDatabase db(CreateEnvironment(), + make_ref_counted(), std::nullopt); // Load a number of payloads into the database. Payload types are 0, 1, ..., // while the decoder type is the same for all payload types (this does not // matter for the test). @@ -175,7 +175,8 @@ TEST(DecoderDatabase, CheckPayloadTypes) { // Test the methods for setting and getting active speech and CNG decoders. TEST(DecoderDatabase, IF_ISAC(ActiveDecoders)) { - DecoderDatabase db(CreateBuiltinAudioDecoderFactory(), absl::nullopt); + DecoderDatabase db(CreateEnvironment(), CreateBuiltinAudioDecoderFactory(), + std::nullopt); // Load payload types. ASSERT_EQ(DecoderDatabase::kOK, db.RegisterPayload(0, SdpAudioFormat("pcmu", 8000, 1))); diff --git a/modules/audio_coding/neteq/delay_constraints.cc b/modules/audio_coding/neteq/delay_constraints.cc new file mode 100644 index 0000000000..16402189f2 --- /dev/null +++ b/modules/audio_coding/neteq/delay_constraints.cc @@ -0,0 +1,118 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_coding/neteq/delay_constraints.h" + +#include + +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_minmax.h" + +namespace webrtc { + +constexpr int kMinBaseMinimumDelayMs = 0; +constexpr int kMaxBaseMinimumDelayMs = 10000; + +DelayConstraints::DelayConstraints(int max_packets_in_buffer, + int base_minimum_delay_ms) + : max_packets_in_buffer_(max_packets_in_buffer), + base_minimum_delay_ms_(base_minimum_delay_ms), + effective_minimum_delay_ms_(base_minimum_delay_ms), + minimum_delay_ms_(0), + maximum_delay_ms_(0) {} + +int DelayConstraints::Clamp(int delay_ms) const { + delay_ms = std::max(delay_ms, effective_minimum_delay_ms_); + if (maximum_delay_ms_ > 0) { + delay_ms = std::min(delay_ms, maximum_delay_ms_); + } + if (packet_len_ms_ > 0) { + // Limit to 75% of maximum buffer size. + delay_ms = + std::min(delay_ms, 3 * max_packets_in_buffer_ * packet_len_ms_ / 4); + } + return delay_ms; +} + +bool DelayConstraints::SetPacketAudioLength(int length_ms) { + if (length_ms <= 0) { + RTC_LOG_F(LS_ERROR) << "length_ms = " << length_ms; + return false; + } + packet_len_ms_ = length_ms; + return true; +} + +bool DelayConstraints::IsValidMinimumDelay(int delay_ms) const { + return 0 <= delay_ms && delay_ms <= MinimumDelayUpperBound(); +} + +bool DelayConstraints::IsValidBaseMinimumDelay(int delay_ms) const { + return kMinBaseMinimumDelayMs <= delay_ms && + delay_ms <= kMaxBaseMinimumDelayMs; +} + +bool DelayConstraints::SetMinimumDelay(int delay_ms) { + if (!IsValidMinimumDelay(delay_ms)) { + return false; + } + + minimum_delay_ms_ = delay_ms; + UpdateEffectiveMinimumDelay(); + return true; +} + +bool DelayConstraints::SetMaximumDelay(int delay_ms) { + // If `delay_ms` is zero then it unsets the maximum delay and target level is + // unconstrained by maximum delay. + if (delay_ms != 0 && delay_ms < minimum_delay_ms_) { + // Maximum delay shouldn't be less than minimum delay or less than a packet. + return false; + } + + maximum_delay_ms_ = delay_ms; + UpdateEffectiveMinimumDelay(); + return true; +} + +bool DelayConstraints::SetBaseMinimumDelay(int delay_ms) { + if (!IsValidBaseMinimumDelay(delay_ms)) { + return false; + } + + base_minimum_delay_ms_ = delay_ms; + UpdateEffectiveMinimumDelay(); + return true; +} + +int DelayConstraints::GetBaseMinimumDelay() const { + return base_minimum_delay_ms_; +} + +void DelayConstraints::UpdateEffectiveMinimumDelay() { + // Clamp `base_minimum_delay_ms_` into the range which can be effectively + // used. + const int base_minimum_delay_ms = + SafeClamp(base_minimum_delay_ms_, 0, MinimumDelayUpperBound()); + effective_minimum_delay_ms_ = + std::max(minimum_delay_ms_, base_minimum_delay_ms); +} + +int DelayConstraints::MinimumDelayUpperBound() const { + // Choose the lowest possible bound discarding 0 cases which mean the value + // is not set and unconstrained. + int q75 = max_packets_in_buffer_ * packet_len_ms_ * 3 / 4; + q75 = q75 > 0 ? q75 : kMaxBaseMinimumDelayMs; + const int maximum_delay_ms = + maximum_delay_ms_ > 0 ? maximum_delay_ms_ : kMaxBaseMinimumDelayMs; + return std::min(maximum_delay_ms, q75); +} + +} // namespace webrtc diff --git a/modules/audio_coding/neteq/delay_constraints.h b/modules/audio_coding/neteq/delay_constraints.h new file mode 100644 index 0000000000..5437d21fdf --- /dev/null +++ b/modules/audio_coding/neteq/delay_constraints.h @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_CODING_NETEQ_DELAY_CONSTRAINTS_H_ +#define MODULES_AUDIO_CODING_NETEQ_DELAY_CONSTRAINTS_H_ + +namespace webrtc { + +class DelayConstraints { + public: + DelayConstraints(int max_packets_in_buffer, int base_minimum_delay_ms); + + // Returns the delay (in ms) clamped to the range of valid delays. + int Clamp(int delay_ms) const; + + // Notifies the DelayManager of how much audio data is carried in each packet. + bool SetPacketAudioLength(int length_ms); + + // Accessors and mutators. + // Assuming `delay` is in valid range. + bool SetMinimumDelay(int delay_ms); + bool SetMaximumDelay(int delay_ms); + bool SetBaseMinimumDelay(int delay_ms); + int GetBaseMinimumDelay() const; + + // These accessors are only intended for testing purposes. + int effective_minimum_delay_ms_for_test() const { + return effective_minimum_delay_ms_; + } + + private: + // Provides value which minimum delay can't exceed based on current buffer + // size and given `maximum_delay_ms_`. Lower bound is a constant 0. + int MinimumDelayUpperBound() const; + + // Updates `effective_minimum_delay_ms_` delay based on current + // `minimum_delay_ms_`, `base_minimum_delay_ms_`, `maximum_delay_ms_` and + // buffer size. + void UpdateEffectiveMinimumDelay(); + + // Makes sure that `delay_ms` is less than maximum delay, if any maximum + // is set. Also, if possible check `delay_ms` to be less than 75% of + // `max_packets_in_buffer_`. + bool IsValidMinimumDelay(int delay_ms) const; + + // Checks that `delay_ms` is in the range of valid base minimum delays. + bool IsValidBaseMinimumDelay(int delay_ms) const; + + // TODO(jakobi): set maximum buffer delay instead of number of packets. + const int max_packets_in_buffer_; + + int base_minimum_delay_ms_; + int effective_minimum_delay_ms_; // Used as lower bound for target delay. + int minimum_delay_ms_; // Externally set minimum delay. + int maximum_delay_ms_; // Externally set maximum delay. No maximum + // delay is enforced if <= 0. + + int packet_len_ms_ = 0; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_CODING_NETEQ_DELAY_CONSTRAINTS_H_ diff --git a/modules/audio_coding/neteq/delay_constraints_unittest.cc b/modules/audio_coding/neteq/delay_constraints_unittest.cc new file mode 100644 index 0000000000..3cefb2172e --- /dev/null +++ b/modules/audio_coding/neteq/delay_constraints_unittest.cc @@ -0,0 +1,198 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_coding/neteq/delay_constraints.h" + +#include "rtc_base/checks.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +constexpr int kMaxNumberOfPackets = 200; +constexpr int kFrameSizeMs = 20; +constexpr int kMaxBufferSizeMs = kMaxNumberOfPackets * kFrameSizeMs; + +TEST(DelayConstraintsTest, NoConstraints) { + DelayConstraints constraints(kMaxNumberOfPackets, 0); + EXPECT_EQ(constraints.Clamp(100), 100); + EXPECT_EQ(constraints.Clamp(0), 0); +} + +TEST(DelayConstraintsTest, MaxDelay) { + DelayConstraints constraints(kMaxNumberOfPackets, 0); + constexpr int kMaxDelayMs = 60; + EXPECT_TRUE(constraints.SetMaximumDelay(kMaxDelayMs)); + EXPECT_EQ(constraints.Clamp(100), kMaxDelayMs); +} + +TEST(DelayConstraintsTest, MinDelay) { + DelayConstraints constraints(kMaxNumberOfPackets, 0); + constexpr int kMinDelayMs = 7 * kFrameSizeMs; + constraints.SetMinimumDelay(kMinDelayMs); + EXPECT_EQ(constraints.Clamp(20), kMinDelayMs); +} + +TEST(DelayConstraintsTest, BaseMinimumDelayCheckValidRange) { + DelayConstraints constraints(kMaxNumberOfPackets, 0); + // Base minimum delay should be between [0, 10000] milliseconds. + EXPECT_FALSE(constraints.SetBaseMinimumDelay(-1)); + EXPECT_FALSE(constraints.SetBaseMinimumDelay(10001)); + EXPECT_EQ(constraints.GetBaseMinimumDelay(), 0); + + EXPECT_TRUE(constraints.SetBaseMinimumDelay(7999)); + EXPECT_EQ(constraints.GetBaseMinimumDelay(), 7999); +} + +TEST(DelayConstraintsTest, BaseMinimumDelayLowerThanMinimumDelay) { + DelayConstraints constraints(kMaxNumberOfPackets, 0); + constexpr int kBaseMinimumDelayMs = 100; + constexpr int kMinimumDelayMs = 200; + + // Base minimum delay sets lower bound on minimum. That is why when base + // minimum delay is lower than minimum delay we use minimum delay. + RTC_DCHECK_LT(kBaseMinimumDelayMs, kMinimumDelayMs); + + EXPECT_TRUE(constraints.SetBaseMinimumDelay(kBaseMinimumDelayMs)); + EXPECT_TRUE(constraints.SetMinimumDelay(kMinimumDelayMs)); + EXPECT_EQ(constraints.effective_minimum_delay_ms_for_test(), kMinimumDelayMs); +} + +TEST(DelayConstraintsTest, BaseMinimumDelayGreaterThanMinimumDelay) { + DelayConstraints constraints(kMaxNumberOfPackets, 0); + constexpr int kBaseMinimumDelayMs = 70; + constexpr int kMinimumDelayMs = 30; + + // Base minimum delay sets lower bound on minimum. That is why when base + // minimum delay is greater than minimum delay we use base minimum delay. + RTC_DCHECK_GT(kBaseMinimumDelayMs, kMinimumDelayMs); + + EXPECT_TRUE(constraints.SetBaseMinimumDelay(kBaseMinimumDelayMs)); + EXPECT_TRUE(constraints.SetMinimumDelay(kMinimumDelayMs)); + EXPECT_EQ(constraints.effective_minimum_delay_ms_for_test(), + kBaseMinimumDelayMs); +} + +TEST(DelayConstraintsTest, BaseMinimumDelayGreaterThanBufferSize) { + DelayConstraints constraints(kMaxNumberOfPackets, 0); + constexpr int kBaseMinimumDelayMs = kMaxBufferSizeMs + 1; + constexpr int kMinimumDelayMs = 12; + constexpr int kMaximumDelayMs = 20; + constexpr int kMaxBufferSizeMsQ75 = 3 * kMaxBufferSizeMs / 4; + EXPECT_TRUE(constraints.SetPacketAudioLength(kFrameSizeMs)); + + EXPECT_TRUE(constraints.SetMaximumDelay(kMaximumDelayMs)); + + // Base minimum delay is greater than minimum delay, that is why we clamp + // it to current the highest possible value which is maximum delay. + RTC_DCHECK_GT(kBaseMinimumDelayMs, kMinimumDelayMs); + RTC_DCHECK_GT(kBaseMinimumDelayMs, kMaxBufferSizeMs); + RTC_DCHECK_GT(kBaseMinimumDelayMs, kMaximumDelayMs); + RTC_DCHECK_LT(kMaximumDelayMs, kMaxBufferSizeMsQ75); + + EXPECT_TRUE(constraints.SetMinimumDelay(kMinimumDelayMs)); + EXPECT_TRUE(constraints.SetBaseMinimumDelay(kBaseMinimumDelayMs)); + + // Unset maximum value. + EXPECT_TRUE(constraints.SetMaximumDelay(0)); + + // With maximum value unset, the highest possible value now is 75% of + // currently possible maximum buffer size. + EXPECT_EQ(constraints.effective_minimum_delay_ms_for_test(), + kMaxBufferSizeMsQ75); +} + +TEST(DelayConstraintsTest, BaseMinimumDelayGreaterThanMaximumDelay) { + DelayConstraints constraints(kMaxNumberOfPackets, 0); + constexpr int kMaximumDelayMs = 400; + constexpr int kBaseMinimumDelayMs = kMaximumDelayMs + 1; + constexpr int kMinimumDelayMs = 20; + + // Base minimum delay is greater than minimum delay, that is why we clamp + // it to current the highest possible value which is kMaximumDelayMs. + RTC_DCHECK_GT(kBaseMinimumDelayMs, kMinimumDelayMs); + RTC_DCHECK_GT(kBaseMinimumDelayMs, kMaximumDelayMs); + RTC_DCHECK_LT(kMaximumDelayMs, kMaxBufferSizeMs); + + EXPECT_TRUE(constraints.SetMaximumDelay(kMaximumDelayMs)); + EXPECT_TRUE(constraints.SetMinimumDelay(kMinimumDelayMs)); + EXPECT_TRUE(constraints.SetBaseMinimumDelay(kBaseMinimumDelayMs)); + EXPECT_EQ(constraints.effective_minimum_delay_ms_for_test(), kMaximumDelayMs); +} + +TEST(DelayConstraintsTest, BaseMinimumDelayLowerThanMaxSize) { + DelayConstraints constraints(kMaxNumberOfPackets, 0); + constexpr int kMaximumDelayMs = 400; + constexpr int kBaseMinimumDelayMs = kMaximumDelayMs - 1; + constexpr int kMinimumDelayMs = 20; + + // Base minimum delay is greater than minimum delay, and lower than maximum + // delays that is why it is used. + RTC_DCHECK_GT(kBaseMinimumDelayMs, kMinimumDelayMs); + RTC_DCHECK_LT(kBaseMinimumDelayMs, kMaximumDelayMs); + + EXPECT_TRUE(constraints.SetMaximumDelay(kMaximumDelayMs)); + EXPECT_TRUE(constraints.SetMinimumDelay(kMinimumDelayMs)); + EXPECT_TRUE(constraints.SetBaseMinimumDelay(kBaseMinimumDelayMs)); + EXPECT_EQ(constraints.effective_minimum_delay_ms_for_test(), + kBaseMinimumDelayMs); +} + +TEST(DelayConstraintsTest, MinimumDelayMemorization) { + DelayConstraints constraints(kMaxNumberOfPackets, 0); + // Check that when we increase base minimum delay to value higher than + // minimum delay then minimum delay is still memorized. This allows to + // restore effective minimum delay to memorized minimum delay value when we + // decrease base minimum delay. + constexpr int kBaseMinimumDelayMsLow = 10; + constexpr int kMinimumDelayMs = 20; + constexpr int kBaseMinimumDelayMsHigh = 30; + + EXPECT_TRUE(constraints.SetBaseMinimumDelay(kBaseMinimumDelayMsLow)); + EXPECT_TRUE(constraints.SetMinimumDelay(kMinimumDelayMs)); + // Minimum delay is used as it is higher than base minimum delay. + EXPECT_EQ(constraints.effective_minimum_delay_ms_for_test(), kMinimumDelayMs); + + EXPECT_TRUE(constraints.SetBaseMinimumDelay(kBaseMinimumDelayMsHigh)); + // Base minimum delay is used as it is now higher than minimum delay. + EXPECT_EQ(constraints.effective_minimum_delay_ms_for_test(), + kBaseMinimumDelayMsHigh); + + EXPECT_TRUE(constraints.SetBaseMinimumDelay(kBaseMinimumDelayMsLow)); + // Check that minimum delay is memorized and is used again. + EXPECT_EQ(constraints.effective_minimum_delay_ms_for_test(), kMinimumDelayMs); +} + +TEST(DelayConstraintsTest, BaseMinimumDelay) { + DelayConstraints constraints(kMaxNumberOfPackets, 0); + constexpr int kBaseMinimumDelayMs = 7 * kFrameSizeMs; + EXPECT_TRUE(constraints.SetBaseMinimumDelay(kBaseMinimumDelayMs)); + EXPECT_EQ(constraints.GetBaseMinimumDelay(), kBaseMinimumDelayMs); + EXPECT_EQ(constraints.Clamp(20), kBaseMinimumDelayMs); +} + +TEST(DelayConstraintsTest, Failures) { + DelayConstraints constraints(kMaxNumberOfPackets, 0); + // Wrong packet size. + EXPECT_FALSE(constraints.SetPacketAudioLength(0)); + EXPECT_FALSE(constraints.SetPacketAudioLength(-1)); + + // Minimum delay higher than a maximum delay is not accepted. + EXPECT_TRUE(constraints.SetMaximumDelay(20)); + EXPECT_FALSE(constraints.SetMinimumDelay(40)); + + // Maximum delay less than minimum delay is not accepted. + EXPECT_TRUE(constraints.SetMaximumDelay(100)); + EXPECT_TRUE(constraints.SetMinimumDelay(80)); + EXPECT_FALSE(constraints.SetMaximumDelay(60)); +} + +} // namespace +} // namespace webrtc diff --git a/modules/audio_coding/neteq/delay_manager.cc b/modules/audio_coding/neteq/delay_manager.cc index bf3a0f18a1..e9a99cfc76 100644 --- a/modules/audio_coding/neteq/delay_manager.cc +++ b/modules/audio_coding/neteq/delay_manager.cc @@ -15,22 +15,16 @@ #include #include -#include -#include -#include "modules/include/module_common_types_public.h" -#include "rtc_base/checks.h" +#include "api/field_trials_view.h" +#include "api/neteq/tick_timer.h" +#include "modules/audio_coding/neteq/reorder_optimizer.h" #include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/numerics/safe_minmax.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { -constexpr int kMinBaseMinimumDelayMs = 0; -constexpr int kMaxBaseMinimumDelayMs = 10000; constexpr int kStartDelayMs = 80; std::unique_ptr MaybeCreateReorderOptimizer( @@ -45,7 +39,7 @@ std::unique_ptr MaybeCreateReorderOptimizer( } // namespace -DelayManager::Config::Config() { +DelayManager::Config::Config(const FieldTrialsView& field_trials) { StructParametersParser::Create( // "quantile", &quantile, // "forget_factor", &forget_factor, // @@ -54,8 +48,7 @@ DelayManager::Config::Config() { "use_reorder_optimizer", &use_reorder_optimizer, // "reorder_forget_factor", &reorder_forget_factor, // "ms_per_loss_percent", &ms_per_loss_percent) - ->Parse(webrtc::field_trial::FindFullName( - "WebRTC-Audio-NetEqDelayManagerConfig")); + ->Parse(field_trials.Lookup("WebRTC-Audio-NetEqDelayManagerConfig")); } void DelayManager::Config::Log() { @@ -71,20 +64,13 @@ void DelayManager::Config::Log() { } DelayManager::DelayManager(const Config& config, const TickTimer* tick_timer) - : max_packets_in_buffer_(config.max_packets_in_buffer), - underrun_optimizer_(tick_timer, + : underrun_optimizer_(tick_timer, (1 << 30) * config.quantile, (1 << 15) * config.forget_factor, config.start_forget_weight, config.resample_interval_ms), reorder_optimizer_(MaybeCreateReorderOptimizer(config)), - base_minimum_delay_ms_(config.base_minimum_delay_ms), - effective_minimum_delay_ms_(config.base_minimum_delay_ms), - minimum_delay_ms_(0), - maximum_delay_ms_(0), target_level_ms_(kStartDelayMs) { - RTC_DCHECK_GE(base_minimum_delay_ms_, 0); - Reset(); } @@ -101,29 +87,9 @@ void DelayManager::Update(int arrival_delay_ms, bool reordered) { target_level_ms_ = std::max( target_level_ms_, reorder_optimizer_->GetOptimalDelayMs().value_or(0)); } - unlimited_target_level_ms_ = target_level_ms_; - target_level_ms_ = std::max(target_level_ms_, effective_minimum_delay_ms_); - if (maximum_delay_ms_ > 0) { - target_level_ms_ = std::min(target_level_ms_, maximum_delay_ms_); - } - if (packet_len_ms_ > 0) { - // Limit to 75% of maximum buffer size. - target_level_ms_ = std::min( - target_level_ms_, 3 * max_packets_in_buffer_ * packet_len_ms_ / 4); - } -} - -int DelayManager::SetPacketAudioLength(int length_ms) { - if (length_ms <= 0) { - RTC_LOG_F(LS_ERROR) << "length_ms = " << length_ms; - return -1; - } - packet_len_ms_ = length_ms; - return 0; } void DelayManager::Reset() { - packet_len_ms_ = 0; underrun_optimizer_.Reset(); target_level_ms_ = kStartDelayMs; if (reorder_optimizer_) { @@ -135,73 +101,4 @@ int DelayManager::TargetDelayMs() const { return target_level_ms_; } -int DelayManager::UnlimitedTargetLevelMs() const { - return unlimited_target_level_ms_; -} - -bool DelayManager::IsValidMinimumDelay(int delay_ms) const { - return 0 <= delay_ms && delay_ms <= MinimumDelayUpperBound(); -} - -bool DelayManager::IsValidBaseMinimumDelay(int delay_ms) const { - return kMinBaseMinimumDelayMs <= delay_ms && - delay_ms <= kMaxBaseMinimumDelayMs; -} - -bool DelayManager::SetMinimumDelay(int delay_ms) { - if (!IsValidMinimumDelay(delay_ms)) { - return false; - } - - minimum_delay_ms_ = delay_ms; - UpdateEffectiveMinimumDelay(); - return true; -} - -bool DelayManager::SetMaximumDelay(int delay_ms) { - // If `delay_ms` is zero then it unsets the maximum delay and target level is - // unconstrained by maximum delay. - if (delay_ms != 0 && delay_ms < minimum_delay_ms_) { - // Maximum delay shouldn't be less than minimum delay or less than a packet. - return false; - } - - maximum_delay_ms_ = delay_ms; - UpdateEffectiveMinimumDelay(); - return true; -} - -bool DelayManager::SetBaseMinimumDelay(int delay_ms) { - if (!IsValidBaseMinimumDelay(delay_ms)) { - return false; - } - - base_minimum_delay_ms_ = delay_ms; - UpdateEffectiveMinimumDelay(); - return true; -} - -int DelayManager::GetBaseMinimumDelay() const { - return base_minimum_delay_ms_; -} - -void DelayManager::UpdateEffectiveMinimumDelay() { - // Clamp `base_minimum_delay_ms_` into the range which can be effectively - // used. - const int base_minimum_delay_ms = - rtc::SafeClamp(base_minimum_delay_ms_, 0, MinimumDelayUpperBound()); - effective_minimum_delay_ms_ = - std::max(minimum_delay_ms_, base_minimum_delay_ms); -} - -int DelayManager::MinimumDelayUpperBound() const { - // Choose the lowest possible bound discarding 0 cases which mean the value - // is not set and unconstrained. - int q75 = max_packets_in_buffer_ * packet_len_ms_ * 3 / 4; - q75 = q75 > 0 ? q75 : kMaxBaseMinimumDelayMs; - const int maximum_delay_ms = - maximum_delay_ms_ > 0 ? maximum_delay_ms_ : kMaxBaseMinimumDelayMs; - return std::min(maximum_delay_ms, q75); -} - } // namespace webrtc diff --git a/modules/audio_coding/neteq/delay_manager.h b/modules/audio_coding/neteq/delay_manager.h index a333681535..2e00328084 100644 --- a/modules/audio_coding/neteq/delay_manager.h +++ b/modules/audio_coding/neteq/delay_manager.h @@ -11,14 +11,11 @@ #ifndef MODULES_AUDIO_CODING_NETEQ_DELAY_MANAGER_H_ #define MODULES_AUDIO_CODING_NETEQ_DELAY_MANAGER_H_ -#include // Provide access to size_t. - -#include #include +#include -#include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/neteq/tick_timer.h" -#include "modules/audio_coding/neteq/histogram.h" #include "modules/audio_coding/neteq/reorder_optimizer.h" #include "modules/audio_coding/neteq/underrun_optimizer.h" @@ -27,22 +24,18 @@ namespace webrtc { class DelayManager { public: struct Config { - Config(); + explicit Config(const FieldTrialsView& field_trials); void Log(); // Options that can be configured via field trial. double quantile = 0.95; double forget_factor = 0.983; - absl::optional start_forget_weight = 2; - absl::optional resample_interval_ms = 500; + std::optional start_forget_weight = 2; + std::optional resample_interval_ms = 500; bool use_reorder_optimizer = true; double reorder_forget_factor = 0.9993; int ms_per_loss_percent = 20; - - // Options that are externally populated. - int max_packets_in_buffer = 200; - int base_minimum_delay_ms = 0; }; DelayManager(const Config& config, const TickTimer* tick_timer); @@ -66,55 +59,10 @@ class DelayManager { // min/max delay. virtual int TargetDelayMs() const; - // Reports the target delay that would be used if no minimum/maximum delay - // would be set. - virtual int UnlimitedTargetLevelMs() const; - - // Notifies the DelayManager of how much audio data is carried in each packet. - virtual int SetPacketAudioLength(int length_ms); - - // Accessors and mutators. - // Assuming `delay` is in valid range. - virtual bool SetMinimumDelay(int delay_ms); - virtual bool SetMaximumDelay(int delay_ms); - virtual bool SetBaseMinimumDelay(int delay_ms); - virtual int GetBaseMinimumDelay() const; - - // These accessors are only intended for testing purposes. - int effective_minimum_delay_ms_for_test() const { - return effective_minimum_delay_ms_; - } - private: - // Provides value which minimum delay can't exceed based on current buffer - // size and given `maximum_delay_ms_`. Lower bound is a constant 0. - int MinimumDelayUpperBound() const; - - // Updates `effective_minimum_delay_ms_` delay based on current - // `minimum_delay_ms_`, `base_minimum_delay_ms_` and `maximum_delay_ms_` - // and buffer size. - void UpdateEffectiveMinimumDelay(); - - // Makes sure that `delay_ms` is less than maximum delay, if any maximum - // is set. Also, if possible check `delay_ms` to be less than 75% of - // `max_packets_in_buffer_`. - bool IsValidMinimumDelay(int delay_ms) const; - - bool IsValidBaseMinimumDelay(int delay_ms) const; - - // TODO(jakobi): set maximum buffer delay instead of number of packets. - const int max_packets_in_buffer_; UnderrunOptimizer underrun_optimizer_; std::unique_ptr reorder_optimizer_; - - int base_minimum_delay_ms_; - int effective_minimum_delay_ms_; // Used as lower bound for target delay. - int minimum_delay_ms_; // Externally set minimum delay. - int maximum_delay_ms_; // Externally set maximum allowed delay. - - int packet_len_ms_ = 0; int target_level_ms_ = 0; // Currently preferred buffer level. - int unlimited_target_level_ms_ = 0; }; } // namespace webrtc diff --git a/modules/audio_coding/neteq/delay_manager_unittest.cc b/modules/audio_coding/neteq/delay_manager_unittest.cc index da5f53188c..169e2ab950 100644 --- a/modules/audio_coding/neteq/delay_manager_unittest.cc +++ b/modules/audio_coding/neteq/delay_manager_unittest.cc @@ -12,235 +12,25 @@ #include "modules/audio_coding/neteq/delay_manager.h" -#include - -#include - -#include "absl/types/optional.h" -#include "modules/audio_coding/neteq/histogram.h" -#include "modules/audio_coding/neteq/mock/mock_histogram.h" -#include "modules/audio_coding/neteq/mock/mock_statistics_calculator.h" -#include "rtc_base/checks.h" -#include "test/field_trial.h" -#include "test/gmock.h" +#include "api/neteq/tick_timer.h" +#include "test/explicit_key_value_config.h" #include "test/gtest.h" namespace webrtc { - namespace { -constexpr int kMaxNumberOfPackets = 200; -constexpr int kTimeStepMs = 10; -constexpr int kFrameSizeMs = 20; -constexpr int kMaxBufferSizeMs = kMaxNumberOfPackets * kFrameSizeMs; - -} // namespace - -class DelayManagerTest : public ::testing::Test { - protected: - DelayManagerTest(); - virtual void SetUp(); - void Update(int delay); - void IncreaseTime(int inc_ms); - - TickTimer tick_timer_; - DelayManager dm_; -}; - -DelayManagerTest::DelayManagerTest() - : dm_(DelayManager::Config(), &tick_timer_) {} - -void DelayManagerTest::SetUp() { - dm_.SetPacketAudioLength(kFrameSizeMs); -} - -void DelayManagerTest::Update(int delay) { - dm_.Update(delay, false); -} - -void DelayManagerTest::IncreaseTime(int inc_ms) { - for (int t = 0; t < inc_ms; t += kTimeStepMs) { - tick_timer_.Increment(); - } -} -TEST_F(DelayManagerTest, CreateAndDestroy) { - // Nothing to do here. The test fixture creates and destroys the DelayManager - // object. -} +using test::ExplicitKeyValueConfig; -TEST_F(DelayManagerTest, UpdateNormal) { +TEST(DelayManagerTest, UpdateNormal) { + TickTimer tick_timer; + DelayManager dm(DelayManager::Config(ExplicitKeyValueConfig("")), + &tick_timer); for (int i = 0; i < 50; ++i) { - Update(0); - IncreaseTime(kFrameSizeMs); + dm.Update(0, false); + tick_timer.Increment(2); } - EXPECT_EQ(20, dm_.TargetDelayMs()); -} - -TEST_F(DelayManagerTest, MaxDelay) { - Update(0); - const int kMaxDelayMs = 60; - EXPECT_GT(dm_.TargetDelayMs(), kMaxDelayMs); - EXPECT_TRUE(dm_.SetMaximumDelay(kMaxDelayMs)); - Update(0); - EXPECT_EQ(kMaxDelayMs, dm_.TargetDelayMs()); -} - -TEST_F(DelayManagerTest, MinDelay) { - Update(0); - int kMinDelayMs = 7 * kFrameSizeMs; - EXPECT_LT(dm_.TargetDelayMs(), kMinDelayMs); - dm_.SetMinimumDelay(kMinDelayMs); - IncreaseTime(kFrameSizeMs); - Update(0); - EXPECT_EQ(kMinDelayMs, dm_.TargetDelayMs()); -} - -TEST_F(DelayManagerTest, BaseMinimumDelayCheckValidRange) { - // Base minimum delay should be between [0, 10000] milliseconds. - EXPECT_FALSE(dm_.SetBaseMinimumDelay(-1)); - EXPECT_FALSE(dm_.SetBaseMinimumDelay(10001)); - EXPECT_EQ(dm_.GetBaseMinimumDelay(), 0); - - EXPECT_TRUE(dm_.SetBaseMinimumDelay(7999)); - EXPECT_EQ(dm_.GetBaseMinimumDelay(), 7999); -} - -TEST_F(DelayManagerTest, BaseMinimumDelayLowerThanMinimumDelay) { - constexpr int kBaseMinimumDelayMs = 100; - constexpr int kMinimumDelayMs = 200; - - // Base minimum delay sets lower bound on minimum. That is why when base - // minimum delay is lower than minimum delay we use minimum delay. - RTC_DCHECK_LT(kBaseMinimumDelayMs, kMinimumDelayMs); - - EXPECT_TRUE(dm_.SetBaseMinimumDelay(kBaseMinimumDelayMs)); - EXPECT_TRUE(dm_.SetMinimumDelay(kMinimumDelayMs)); - EXPECT_EQ(dm_.effective_minimum_delay_ms_for_test(), kMinimumDelayMs); -} - -TEST_F(DelayManagerTest, BaseMinimumDelayGreaterThanMinimumDelay) { - constexpr int kBaseMinimumDelayMs = 70; - constexpr int kMinimumDelayMs = 30; - - // Base minimum delay sets lower bound on minimum. That is why when base - // minimum delay is greater than minimum delay we use base minimum delay. - RTC_DCHECK_GT(kBaseMinimumDelayMs, kMinimumDelayMs); - - EXPECT_TRUE(dm_.SetBaseMinimumDelay(kBaseMinimumDelayMs)); - EXPECT_TRUE(dm_.SetMinimumDelay(kMinimumDelayMs)); - EXPECT_EQ(dm_.effective_minimum_delay_ms_for_test(), kBaseMinimumDelayMs); -} - -TEST_F(DelayManagerTest, BaseMinimumDelayGreaterThanBufferSize) { - constexpr int kBaseMinimumDelayMs = kMaxBufferSizeMs + 1; - constexpr int kMinimumDelayMs = 12; - constexpr int kMaximumDelayMs = 20; - constexpr int kMaxBufferSizeMsQ75 = 3 * kMaxBufferSizeMs / 4; - - EXPECT_TRUE(dm_.SetMaximumDelay(kMaximumDelayMs)); - - // Base minimum delay is greater than minimum delay, that is why we clamp - // it to current the highest possible value which is maximum delay. - RTC_DCHECK_GT(kBaseMinimumDelayMs, kMinimumDelayMs); - RTC_DCHECK_GT(kBaseMinimumDelayMs, kMaxBufferSizeMs); - RTC_DCHECK_GT(kBaseMinimumDelayMs, kMaximumDelayMs); - RTC_DCHECK_LT(kMaximumDelayMs, kMaxBufferSizeMsQ75); - - EXPECT_TRUE(dm_.SetMinimumDelay(kMinimumDelayMs)); - EXPECT_TRUE(dm_.SetBaseMinimumDelay(kBaseMinimumDelayMs)); - - // Unset maximum value. - EXPECT_TRUE(dm_.SetMaximumDelay(0)); - - // With maximum value unset, the highest possible value now is 75% of - // currently possible maximum buffer size. - EXPECT_EQ(dm_.effective_minimum_delay_ms_for_test(), kMaxBufferSizeMsQ75); -} - -TEST_F(DelayManagerTest, BaseMinimumDelayGreaterThanMaximumDelay) { - constexpr int kMaximumDelayMs = 400; - constexpr int kBaseMinimumDelayMs = kMaximumDelayMs + 1; - constexpr int kMinimumDelayMs = 20; - - // Base minimum delay is greater than minimum delay, that is why we clamp - // it to current the highest possible value which is kMaximumDelayMs. - RTC_DCHECK_GT(kBaseMinimumDelayMs, kMinimumDelayMs); - RTC_DCHECK_GT(kBaseMinimumDelayMs, kMaximumDelayMs); - RTC_DCHECK_LT(kMaximumDelayMs, kMaxBufferSizeMs); - - EXPECT_TRUE(dm_.SetMaximumDelay(kMaximumDelayMs)); - EXPECT_TRUE(dm_.SetMinimumDelay(kMinimumDelayMs)); - EXPECT_TRUE(dm_.SetBaseMinimumDelay(kBaseMinimumDelayMs)); - EXPECT_EQ(dm_.effective_minimum_delay_ms_for_test(), kMaximumDelayMs); -} - -TEST_F(DelayManagerTest, BaseMinimumDelayLowerThanMaxSize) { - constexpr int kMaximumDelayMs = 400; - constexpr int kBaseMinimumDelayMs = kMaximumDelayMs - 1; - constexpr int kMinimumDelayMs = 20; - - // Base minimum delay is greater than minimum delay, and lower than maximum - // delays that is why it is used. - RTC_DCHECK_GT(kBaseMinimumDelayMs, kMinimumDelayMs); - RTC_DCHECK_LT(kBaseMinimumDelayMs, kMaximumDelayMs); - - EXPECT_TRUE(dm_.SetMaximumDelay(kMaximumDelayMs)); - EXPECT_TRUE(dm_.SetMinimumDelay(kMinimumDelayMs)); - EXPECT_TRUE(dm_.SetBaseMinimumDelay(kBaseMinimumDelayMs)); - EXPECT_EQ(dm_.effective_minimum_delay_ms_for_test(), kBaseMinimumDelayMs); -} - -TEST_F(DelayManagerTest, MinimumDelayMemorization) { - // Check that when we increase base minimum delay to value higher than - // minimum delay then minimum delay is still memorized. This allows to - // restore effective minimum delay to memorized minimum delay value when we - // decrease base minimum delay. - constexpr int kBaseMinimumDelayMsLow = 10; - constexpr int kMinimumDelayMs = 20; - constexpr int kBaseMinimumDelayMsHigh = 30; - - EXPECT_TRUE(dm_.SetBaseMinimumDelay(kBaseMinimumDelayMsLow)); - EXPECT_TRUE(dm_.SetMinimumDelay(kMinimumDelayMs)); - // Minimum delay is used as it is higher than base minimum delay. - EXPECT_EQ(dm_.effective_minimum_delay_ms_for_test(), kMinimumDelayMs); - - EXPECT_TRUE(dm_.SetBaseMinimumDelay(kBaseMinimumDelayMsHigh)); - // Base minimum delay is used as it is now higher than minimum delay. - EXPECT_EQ(dm_.effective_minimum_delay_ms_for_test(), kBaseMinimumDelayMsHigh); - - EXPECT_TRUE(dm_.SetBaseMinimumDelay(kBaseMinimumDelayMsLow)); - // Check that minimum delay is memorized and is used again. - EXPECT_EQ(dm_.effective_minimum_delay_ms_for_test(), kMinimumDelayMs); -} - -TEST_F(DelayManagerTest, BaseMinimumDelay) { - // First packet arrival. - Update(0); - - constexpr int kBaseMinimumDelayMs = 7 * kFrameSizeMs; - EXPECT_LT(dm_.TargetDelayMs(), kBaseMinimumDelayMs); - EXPECT_TRUE(dm_.SetBaseMinimumDelay(kBaseMinimumDelayMs)); - EXPECT_EQ(dm_.GetBaseMinimumDelay(), kBaseMinimumDelayMs); - - IncreaseTime(kFrameSizeMs); - Update(0); - EXPECT_EQ(dm_.GetBaseMinimumDelay(), kBaseMinimumDelayMs); - EXPECT_EQ(kBaseMinimumDelayMs, dm_.TargetDelayMs()); -} - -TEST_F(DelayManagerTest, Failures) { - // Wrong packet size. - EXPECT_EQ(-1, dm_.SetPacketAudioLength(0)); - EXPECT_EQ(-1, dm_.SetPacketAudioLength(-1)); - - // Minimum delay higher than a maximum delay is not accepted. - EXPECT_TRUE(dm_.SetMaximumDelay(20)); - EXPECT_FALSE(dm_.SetMinimumDelay(40)); - - // Maximum delay less than minimum delay is not accepted. - EXPECT_TRUE(dm_.SetMaximumDelay(100)); - EXPECT_TRUE(dm_.SetMinimumDelay(80)); - EXPECT_FALSE(dm_.SetMaximumDelay(60)); + EXPECT_EQ(20, dm.TargetDelayMs()); } +} // namespace } // namespace webrtc diff --git a/modules/audio_coding/neteq/dtmf_tone_generator_unittest.cc b/modules/audio_coding/neteq/dtmf_tone_generator_unittest.cc index e843706dd3..989fe05a11 100644 --- a/modules/audio_coding/neteq/dtmf_tone_generator_unittest.cc +++ b/modules/audio_coding/neteq/dtmf_tone_generator_unittest.cc @@ -34,7 +34,7 @@ class DtmfToneGeneratorTest : public ::testing::Test { AudioMultiVector signal(channels); for (int event = 0; event <= 15; ++event) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Checking event " << event << " at sample rate " << fs_hz; SCOPED_TRACE(ss.str()); const int kAttenuation = 0; @@ -73,7 +73,7 @@ class DtmfToneGeneratorTest : public ::testing::Test { EXPECT_EQ(kNumSamples, tone_gen_.Generate(kNumSamples, &ref_signal)); // Test every 5 steps (to save time). for (int attenuation = 1; attenuation <= 63; attenuation += 5) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Checking event " << event << " at sample rate " << fs_hz; ss << "; attenuation " << attenuation; SCOPED_TRACE(ss.str()); diff --git a/modules/audio_coding/neteq/expand.cc b/modules/audio_coding/neteq/expand.cc index 9c3274609f..88a68382ab 100644 --- a/modules/audio_coding/neteq/expand.cc +++ b/modules/audio_coding/neteq/expand.cc @@ -13,9 +13,13 @@ #include // memset #include // min, max +#include #include // numeric_limits +#include +#include "common_audio/signal_processing/dot_product_with_scale.h" #include "common_audio/signal_processing/include/signal_processing_library.h" +#include "common_audio/signal_processing/include/spl_inl.h" #include "modules/audio_coding/neteq/audio_multi_vector.h" #include "modules/audio_coding/neteq/background_noise.h" #include "modules/audio_coding/neteq/cross_correlation.h" @@ -23,6 +27,7 @@ #include "modules/audio_coding/neteq/random_vector.h" #include "modules/audio_coding/neteq/statistics_calculator.h" #include "modules/audio_coding/neteq/sync_buffer.h" +#include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" namespace webrtc { @@ -100,45 +105,48 @@ int Expand::Process(AudioMultiVector* output) { // Voiced part. // Generate a weighted vector with the current lag. - size_t expansion_vector_length = max_lag_ + overlap_length_; - size_t current_lag = expand_lags_[current_lag_index_]; + const size_t expansion_vector_length = max_lag_ + overlap_length_; + const size_t current_lag = expand_lags_[current_lag_index_]; // Copy lag+overlap data. - size_t expansion_vector_position = + const size_t expansion_vector_position = expansion_vector_length - current_lag - overlap_length_; - size_t temp_length = current_lag + overlap_length_; + const size_t expansion_temp_length = current_lag + overlap_length_; for (size_t channel_ix = 0; channel_ix < num_channels_; ++channel_ix) { ChannelParameters& parameters = channel_parameters_[channel_ix]; if (current_lag_index_ == 0) { // Use only expand_vector0. - RTC_DCHECK_LE(expansion_vector_position + temp_length, + RTC_DCHECK_LE(expansion_vector_position + expansion_temp_length, parameters.expand_vector0.Size()); - parameters.expand_vector0.CopyTo(temp_length, expansion_vector_position, + parameters.expand_vector0.CopyTo(expansion_temp_length, + expansion_vector_position, voiced_vector_storage); } else if (current_lag_index_ == 1) { - std::unique_ptr temp_0(new int16_t[temp_length]); - parameters.expand_vector0.CopyTo(temp_length, expansion_vector_position, - temp_0.get()); - std::unique_ptr temp_1(new int16_t[temp_length]); - parameters.expand_vector1.CopyTo(temp_length, expansion_vector_position, - temp_1.get()); + std::unique_ptr temp_0(new int16_t[expansion_temp_length]); + parameters.expand_vector0.CopyTo(expansion_temp_length, + expansion_vector_position, temp_0.get()); + std::unique_ptr temp_1(new int16_t[expansion_temp_length]); + parameters.expand_vector1.CopyTo(expansion_temp_length, + expansion_vector_position, temp_1.get()); // Mix 3/4 of expand_vector0 with 1/4 of expand_vector1. WebRtcSpl_ScaleAndAddVectorsWithRound(temp_0.get(), 3, temp_1.get(), 1, 2, - voiced_vector_storage, temp_length); + voiced_vector_storage, + expansion_temp_length); } else if (current_lag_index_ == 2) { // Mix 1/2 of expand_vector0 with 1/2 of expand_vector1. - RTC_DCHECK_LE(expansion_vector_position + temp_length, + RTC_DCHECK_LE(expansion_vector_position + expansion_temp_length, parameters.expand_vector0.Size()); - RTC_DCHECK_LE(expansion_vector_position + temp_length, + RTC_DCHECK_LE(expansion_vector_position + expansion_temp_length, parameters.expand_vector1.Size()); - std::unique_ptr temp_0(new int16_t[temp_length]); - parameters.expand_vector0.CopyTo(temp_length, expansion_vector_position, - temp_0.get()); - std::unique_ptr temp_1(new int16_t[temp_length]); - parameters.expand_vector1.CopyTo(temp_length, expansion_vector_position, - temp_1.get()); + std::unique_ptr temp_0(new int16_t[expansion_temp_length]); + parameters.expand_vector0.CopyTo(expansion_temp_length, + expansion_vector_position, temp_0.get()); + std::unique_ptr temp_1(new int16_t[expansion_temp_length]); + parameters.expand_vector1.CopyTo(expansion_temp_length, + expansion_vector_position, temp_1.get()); WebRtcSpl_ScaleAndAddVectorsWithRound(temp_0.get(), 1, temp_1.get(), 1, 1, - voiced_vector_storage, temp_length); + voiced_vector_storage, + expansion_temp_length); } // Get tapering window parameters. Values are in Q15. @@ -223,7 +231,7 @@ int Expand::Process(AudioMultiVector* output) { // >= 64 * fs_mult => go from 1 to 0 in about 32 ms. // temp_shift = getbits(max_lag_) - 5. int temp_shift = - (31 - WebRtcSpl_NormW32(rtc::dchecked_cast(max_lag_))) - 5; + (31 - WebRtcSpl_NormW32(dchecked_cast(max_lag_))) - 5; int16_t mix_factor_increment = 256 >> temp_shift; if (stop_muting_) { mix_factor_increment = 0; @@ -314,8 +322,8 @@ int Expand::Process(AudioMultiVector* output) { : consecutive_expands_ + 1; expand_duration_samples_ += output->Size(); // Clamp the duration counter at 2 seconds. - expand_duration_samples_ = std::min(expand_duration_samples_, - rtc::dchecked_cast(fs_hz_ * 2)); + expand_duration_samples_ = + std::min(expand_duration_samples_, dchecked_cast(fs_hz_ * 2)); return 0; } @@ -743,8 +751,7 @@ void Expand::AnalyzeSignal(int16_t* random_vector) { // the division. // Shift the denominator from Q13 to Q5 before the division. The result of // the division will then be in Q20. - int16_t denom = - rtc::saturated_cast((distortion_lag * slope) >> 8); + int16_t denom = saturated_cast((distortion_lag * slope) >> 8); int temp_ratio = WebRtcSpl_DivW32W16((slope - 8192) << 12, denom); if (slope > 14746) { // slope > 1.8. diff --git a/modules/audio_coding/neteq/expand_uma_logger.cc b/modules/audio_coding/neteq/expand_uma_logger.cc index a91358b489..e735a45636 100644 --- a/modules/audio_coding/neteq/expand_uma_logger.cc +++ b/modules/audio_coding/neteq/expand_uma_logger.cc @@ -47,7 +47,7 @@ void ExpandUmaLogger::UpdateSampleCounter(uint64_t samples, last_value_ = samples; sample_rate_hz_ = sample_rate_hz; if (!last_logged_value_) { - last_logged_value_ = absl::optional(samples); + last_logged_value_ = std::optional(samples); } if (!timer_->Finished()) { @@ -58,7 +58,7 @@ void ExpandUmaLogger::UpdateSampleCounter(uint64_t samples, RTC_DCHECK(last_logged_value_); RTC_DCHECK_GE(last_value_, *last_logged_value_); const uint64_t diff = last_value_ - *last_logged_value_; - last_logged_value_ = absl::optional(last_value_); + last_logged_value_ = std::optional(last_value_); // Calculate rate in percent. RTC_DCHECK_GT(sample_rate_hz, 0); const int rate = (100 * diff) / (sample_rate_hz * logging_period_s_); diff --git a/modules/audio_coding/neteq/expand_uma_logger.h b/modules/audio_coding/neteq/expand_uma_logger.h index cc5c20a886..d6f83719cb 100644 --- a/modules/audio_coding/neteq/expand_uma_logger.h +++ b/modules/audio_coding/neteq/expand_uma_logger.h @@ -13,10 +13,10 @@ #include #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/neteq/tick_timer.h" namespace webrtc { @@ -48,7 +48,7 @@ class ExpandUmaLogger { const int logging_period_s_; const TickTimer& tick_timer_; std::unique_ptr timer_; - absl::optional last_logged_value_; + std::optional last_logged_value_; uint64_t last_value_ = 0; int sample_rate_hz_ = 0; }; diff --git a/modules/audio_coding/neteq/expand_unittest.cc b/modules/audio_coding/neteq/expand_unittest.cc index 9355fce5e1..4a63a020f2 100644 --- a/modules/audio_coding/neteq/expand_unittest.cc +++ b/modules/audio_coding/neteq/expand_unittest.cc @@ -30,7 +30,8 @@ TEST(Expand, CreateAndDestroy) { BackgroundNoise bgn(channels); SyncBuffer sync_buffer(1, 1000); RandomVector random_vector; - StatisticsCalculator statistics; + TickTimer timer; + StatisticsCalculator statistics(&timer); Expand expand(&bgn, &sync_buffer, &random_vector, &statistics, fs, channels); } @@ -40,7 +41,8 @@ TEST(Expand, CreateUsingFactory) { BackgroundNoise bgn(channels); SyncBuffer sync_buffer(1, 1000); RandomVector random_vector; - StatisticsCalculator statistics; + TickTimer timer; + StatisticsCalculator statistics(&timer); ExpandFactory expand_factory; Expand* expand = expand_factory.Create(&bgn, &sync_buffer, &random_vector, &statistics, fs, channels); @@ -51,7 +53,10 @@ TEST(Expand, CreateUsingFactory) { namespace { class FakeStatisticsCalculator : public StatisticsCalculator { public: - void LogDelayedPacketOutageEvent(int num_samples, int fs_hz) override { + FakeStatisticsCalculator(TickTimer* tick_timer) + : StatisticsCalculator(tick_timer) {} + + void LogDelayedPacketOutageEvent(int num_samples, int /* fs_hz */) override { last_outage_duration_samples_ = num_samples; } @@ -77,6 +82,7 @@ class ExpandTest : public ::testing::Test { background_noise_(num_channels_), sync_buffer_(num_channels_, kNetEqSyncBufferLengthMs * test_sample_rate_hz_ / 1000), + statistics_(&tick_timer_), expand_(&background_noise_, &sync_buffer_, &random_vector_, @@ -106,6 +112,7 @@ class ExpandTest : public ::testing::Test { BackgroundNoise background_noise_; SyncBuffer sync_buffer_; RandomVector random_vector_; + TickTimer tick_timer_; FakeStatisticsCalculator statistics_; Expand expand_; }; @@ -125,7 +132,7 @@ TEST_F(ExpandTest, DelayedPacketOutage) { } expand_.SetParametersForNormalAfterExpand(); // Convert `sum_output_len_samples` to milliseconds. - EXPECT_EQ(rtc::checked_cast(sum_output_len_samples), + EXPECT_EQ(checked_cast(sum_output_len_samples), statistics_.last_outage_duration_samples()); } @@ -163,7 +170,7 @@ TEST_F(ExpandTest, CheckOutageStatsAfterReset) { } expand_.SetParametersForNormalAfterExpand(); // Convert `sum_output_len_samples` to milliseconds. - EXPECT_EQ(rtc::checked_cast(sum_output_len_samples), + EXPECT_EQ(checked_cast(sum_output_len_samples), statistics_.last_outage_duration_samples()); } diff --git a/modules/audio_coding/neteq/histogram.cc b/modules/audio_coding/neteq/histogram.cc index 4360d1a904..825c8ac349 100644 --- a/modules/audio_coding/neteq/histogram.cc +++ b/modules/audio_coding/neteq/histogram.cc @@ -13,8 +13,8 @@ #include #include #include +#include -#include "absl/types/optional.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" @@ -22,7 +22,7 @@ namespace webrtc { Histogram::Histogram(size_t num_buckets, int forget_factor, - absl::optional start_forget_weight) + std::optional start_forget_weight) : buckets_(num_buckets, 0), forget_factor_(0), base_forget_factor_(forget_factor), diff --git a/modules/audio_coding/neteq/histogram.h b/modules/audio_coding/neteq/histogram.h index 265a10e00a..169eb33e3d 100644 --- a/modules/audio_coding/neteq/histogram.h +++ b/modules/audio_coding/neteq/histogram.h @@ -13,10 +13,9 @@ #include // Provide access to size_t. +#include #include -#include "absl/types/optional.h" - namespace webrtc { class Histogram { @@ -24,7 +23,7 @@ class Histogram { // Creates histogram with capacity `num_buckets` and `forget_factor` in Q15. Histogram(size_t num_buckets, int forget_factor, - absl::optional start_forget_weight = absl::nullopt); + std::optional start_forget_weight = std::nullopt); virtual ~Histogram(); @@ -47,7 +46,7 @@ class Histogram { // Accessors only intended for testing purposes. int base_forget_factor_for_testing() const { return base_forget_factor_; } int forget_factor_for_testing() const { return forget_factor_; } - absl::optional start_forget_weight_for_testing() const { + std::optional start_forget_weight_for_testing() const { return start_forget_weight_; } @@ -56,7 +55,7 @@ class Histogram { int forget_factor_; // Q15 const int base_forget_factor_; int add_count_; - const absl::optional start_forget_weight_; + const std::optional start_forget_weight_; }; } // namespace webrtc diff --git a/modules/audio_coding/neteq/merge.cc b/modules/audio_coding/neteq/merge.cc index 22cf6a7754..17767975d7 100644 --- a/modules/audio_coding/neteq/merge.cc +++ b/modules/audio_coding/neteq/merge.cc @@ -61,7 +61,7 @@ size_t Merge::Process(int16_t* input, // Transfer input signal to an AudioMultiVector. AudioMultiVector input_vector(num_channels_); input_vector.PushBackInterleaved( - rtc::ArrayView(input, input_length)); + ArrayView(input, input_length)); size_t input_length_per_channel = input_vector.Size(); RTC_DCHECK_EQ(input_length_per_channel, input_length / num_channels_); @@ -211,8 +211,8 @@ int16_t Merge::SignalScaling(const int16_t* input, size_t input_length, const int16_t* expanded_signal) const { // Adjust muting factor if new vector is more or less of the BGN energy. - const auto mod_input_length = rtc::SafeMin( - 64 * rtc::dchecked_cast(fs_mult_), input_length); + const auto mod_input_length = + SafeMin(64 * dchecked_cast(fs_mult_), input_length); const int16_t expanded_max = WebRtcSpl_MaxAbsValueW16(expanded_signal, mod_input_length); int32_t factor = diff --git a/modules/audio_coding/neteq/merge_unittest.cc b/modules/audio_coding/neteq/merge_unittest.cc index d5a55eb056..87740af508 100644 --- a/modules/audio_coding/neteq/merge_unittest.cc +++ b/modules/audio_coding/neteq/merge_unittest.cc @@ -32,7 +32,8 @@ TEST(Merge, CreateAndDestroy) { BackgroundNoise bgn(channels); SyncBuffer sync_buffer(1, 1000); RandomVector random_vector; - StatisticsCalculator statistics; + TickTimer timer; + StatisticsCalculator statistics(&timer); Expand expand(&bgn, &sync_buffer, &random_vector, &statistics, fs, channels); Merge merge(fs, channels, &expand, &sync_buffer); } @@ -52,6 +53,7 @@ class MergeTest : public testing::TestWithParam { background_noise_(num_channels_), sync_buffer_(num_channels_, kNetEqSyncBufferLengthMs * test_sample_rate_hz_ / 1000), + statistics_(&timer_), expand_(&background_noise_, &sync_buffer_, &random_vector_, @@ -86,6 +88,7 @@ class MergeTest : public testing::TestWithParam { BackgroundNoise background_noise_; SyncBuffer sync_buffer_; RandomVector random_vector_; + TickTimer timer_; StatisticsCalculator statistics_; Expand expand_; Merge merge_; diff --git a/modules/audio_coding/neteq/mock/mock_decoder_database.h b/modules/audio_coding/neteq/mock/mock_decoder_database.h index 2394120e99..08e12156df 100644 --- a/modules/audio_coding/neteq/mock/mock_decoder_database.h +++ b/modules/audio_coding/neteq/mock/mock_decoder_database.h @@ -13,6 +13,7 @@ #include +#include "api/environment/environment_factory.h" #include "modules/audio_coding/neteq/decoder_database.h" #include "test/gmock.h" @@ -20,9 +21,10 @@ namespace webrtc { class MockDecoderDatabase : public DecoderDatabase { public: - explicit MockDecoderDatabase( - rtc::scoped_refptr factory = nullptr) - : DecoderDatabase(factory, absl::nullopt) {} + MockDecoderDatabase() + : DecoderDatabase(CreateEnvironment(), + /*decoder_factory=*/nullptr, + /*codec_pair_id=*/std::nullopt) {} ~MockDecoderDatabase() override { Die(); } MOCK_METHOD(void, Die, ()); MOCK_METHOD(bool, Empty, (), (const, override)); diff --git a/modules/audio_coding/neteq/mock/mock_neteq_controller.h b/modules/audio_coding/neteq/mock/mock_neteq_controller.h index dc5cab4b16..ea7809a1b8 100644 --- a/modules/audio_coding/neteq/mock/mock_neteq_controller.h +++ b/modules/audio_coding/neteq/mock/mock_neteq_controller.h @@ -39,7 +39,7 @@ class MockNetEqController : public NetEqController { MOCK_METHOD(void, ExpandDecision, (NetEq::Operation operation), (override)); MOCK_METHOD(void, AddSampleMemory, (int32_t value), (override)); MOCK_METHOD(int, TargetLevelMs, (), (const, override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, PacketArrived, (int fs_hz, bool should_update_stats, diff --git a/modules/audio_coding/neteq/mock/mock_packet_arrival_history.h b/modules/audio_coding/neteq/mock/mock_packet_arrival_history.h new file mode 100644 index 0000000000..23731d093b --- /dev/null +++ b/modules/audio_coding/neteq/mock/mock_packet_arrival_history.h @@ -0,0 +1,31 @@ +/* + * Copyright 2023 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PACKET_ARRIVAL_HISTORY_H_ +#define MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PACKET_ARRIVAL_HISTORY_H_ + +#include "api/neteq/tick_timer.h" +#include "modules/audio_coding/neteq/packet_arrival_history.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockPacketArrivalHistory : public PacketArrivalHistory { + public: + MockPacketArrivalHistory(const TickTimer* tick_timer) + : PacketArrivalHistory(tick_timer, 0) {} + + MOCK_METHOD(int, GetDelayMs, (uint32_t rtp_timestamp), (const, override)); + MOCK_METHOD(int, GetMaxDelayMs, (), (const, override)); +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PACKET_ARRIVAL_HISTORY_H_ diff --git a/modules/audio_coding/neteq/mock/mock_packet_buffer.h b/modules/audio_coding/neteq/mock/mock_packet_buffer.h index 48357ea466..85bc82bec3 100644 --- a/modules/audio_coding/neteq/mock/mock_packet_buffer.h +++ b/modules/audio_coding/neteq/mock/mock_packet_buffer.h @@ -18,39 +18,15 @@ namespace webrtc { class MockPacketBuffer : public PacketBuffer { public: - MockPacketBuffer(size_t max_number_of_packets, const TickTimer* tick_timer) - : PacketBuffer(max_number_of_packets, tick_timer) {} + MockPacketBuffer(size_t max_number_of_packets, + const TickTimer* tick_timer, + StatisticsCalculator* stats) + : PacketBuffer(max_number_of_packets, tick_timer, stats) {} ~MockPacketBuffer() override { Die(); } MOCK_METHOD(void, Die, ()); - MOCK_METHOD(void, Flush, (StatisticsCalculator * stats), (override)); - MOCK_METHOD(void, - PartialFlush, - (int target_level_ms, - size_t sample_rate, - size_t last_decoded_length, - StatisticsCalculator* stats), - (override)); + MOCK_METHOD(void, Flush, (), (override)); MOCK_METHOD(bool, Empty, (), (const, override)); - MOCK_METHOD(int, - InsertPacket, - (Packet && packet, - StatisticsCalculator* stats, - size_t last_decoded_length, - size_t sample_rate, - int target_level_ms, - const DecoderDatabase& decoder_database), - (override)); - MOCK_METHOD(int, - InsertPacketList, - (PacketList * packet_list, - const DecoderDatabase& decoder_database, - absl::optional* current_rtp_payload_type, - absl::optional* current_cng_rtp_payload_type, - StatisticsCalculator* stats, - size_t last_decoded_length, - size_t sample_rate, - int target_level_ms), - (override)); + MOCK_METHOD(int, InsertPacket, (Packet && packet), (override)); MOCK_METHOD(int, NextTimestamp, (uint32_t * next_timestamp), @@ -60,20 +36,15 @@ class MockPacketBuffer : public PacketBuffer { (uint32_t timestamp, uint32_t* next_timestamp), (const, override)); MOCK_METHOD(const Packet*, PeekNextPacket, (), (const, override)); - MOCK_METHOD(absl::optional, GetNextPacket, (), (override)); - MOCK_METHOD(int, - DiscardNextPacket, - (StatisticsCalculator * stats), - (override)); + MOCK_METHOD(std::optional, GetNextPacket, (), (override)); + MOCK_METHOD(int, DiscardNextPacket, (), (override)); MOCK_METHOD(void, DiscardOldPackets, - (uint32_t timestamp_limit, - uint32_t horizon_samples, - StatisticsCalculator* stats), + (uint32_t timestamp_limit, uint32_t horizon_samples), (override)); MOCK_METHOD(void, DiscardAllOldPackets, - (uint32_t timestamp_limit, StatisticsCalculator* stats), + (uint32_t timestamp_limit), (override)); MOCK_METHOD(size_t, NumPacketsInBuffer, (), (const, override)); }; diff --git a/modules/audio_coding/neteq/mock/mock_statistics_calculator.h b/modules/audio_coding/neteq/mock/mock_statistics_calculator.h index f8812478d6..85c70beb6a 100644 --- a/modules/audio_coding/neteq/mock/mock_statistics_calculator.h +++ b/modules/audio_coding/neteq/mock/mock_statistics_calculator.h @@ -18,6 +18,9 @@ namespace webrtc { class MockStatisticsCalculator : public StatisticsCalculator { public: + MockStatisticsCalculator(TickTimer* tick_timer) + : StatisticsCalculator(tick_timer) {} + MOCK_METHOD(void, PacketsDiscarded, (size_t num_packets), (override)); MOCK_METHOD(void, SecondaryPacketsDiscarded, diff --git a/modules/audio_coding/neteq/nack_tracker.cc b/modules/audio_coding/neteq/nack_tracker.cc index 71c6fc3048..f6f6b80dbc 100644 --- a/modules/audio_coding/neteq/nack_tracker.cc +++ b/modules/audio_coding/neteq/nack_tracker.cc @@ -13,10 +13,10 @@ #include #include +#include "api/field_trials_view.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { @@ -28,14 +28,13 @@ constexpr char kNackTrackerConfigFieldTrial[] = } // namespace -NackTracker::Config::Config() { +NackTracker::Config::Config(const FieldTrialsView& field_trials) { auto parser = StructParametersParser::Create( "packet_loss_forget_factor", &packet_loss_forget_factor, "ms_per_loss_percent", &ms_per_loss_percent, "never_nack_multiple_times", &never_nack_multiple_times, "require_valid_rtt", &require_valid_rtt, "max_loss_rate", &max_loss_rate); - parser->Parse( - webrtc::field_trial::FindFullName(kNackTrackerConfigFieldTrial)); + parser->Parse(field_trials.Lookup(kNackTrackerConfigFieldTrial)); RTC_LOG(LS_INFO) << "Nack tracker config:" " packet_loss_forget_factor=" << packet_loss_forget_factor @@ -45,8 +44,9 @@ NackTracker::Config::Config() { << " max_loss_rate=" << max_loss_rate; } -NackTracker::NackTracker() - : sequence_num_last_received_rtp_(0), +NackTracker::NackTracker(const FieldTrialsView& field_trials) + : config_(field_trials), + sequence_num_last_received_rtp_(0), timestamp_last_received_rtp_(0), any_rtp_received_(false), sequence_num_last_decoded_rtp_(0), @@ -98,7 +98,7 @@ void NackTracker::UpdateLastReceivedPacket(uint16_t sequence_number, LimitNackListSize(); } -absl::optional NackTracker::GetSamplesPerPacket( +std::optional NackTracker::GetSamplesPerPacket( uint16_t sequence_number_current_received_rtp, uint32_t timestamp_current_received_rtp) const { uint32_t timestamp_increase = @@ -110,7 +110,7 @@ absl::optional NackTracker::GetSamplesPerPacket( if (samples_per_packet == 0 || samples_per_packet > kMaxPacketSizeMs * sample_rate_khz_) { // Not a valid samples per packet. - return absl::nullopt; + return std::nullopt; } return samples_per_packet; } @@ -125,7 +125,7 @@ void NackTracker::UpdateList(uint16_t sequence_number_current_received_rtp, IsNewerSequenceNumber(sequence_number_current_received_rtp, sequence_num_last_decoded_rtp_)); - absl::optional samples_per_packet = GetSamplesPerPacket( + std::optional samples_per_packet = GetSamplesPerPacket( sequence_number_current_received_rtp, timestamp_current_received_rtp); if (!samples_per_packet) { return; diff --git a/modules/audio_coding/neteq/nack_tracker.h b/modules/audio_coding/neteq/nack_tracker.h index d9005da085..a11f483277 100644 --- a/modules/audio_coding/neteq/nack_tracker.h +++ b/modules/audio_coding/neteq/nack_tracker.h @@ -15,9 +15,10 @@ #include #include +#include #include -#include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "modules/include/module_common_types_public.h" #include "rtc_base/gtest_prod_util.h" @@ -54,7 +55,7 @@ class NackTracker { // A limit for the size of the NACK list. static const size_t kNackListSizeLimit = 500; // 10 seconds for 20 ms frame // packets. - NackTracker(); + explicit NackTracker(const FieldTrialsView& field_trials); ~NackTracker(); // Set a maximum for the size of the NACK list. If the last received packet @@ -99,7 +100,7 @@ class NackTracker { // Options that can be configured via field trial. struct Config { - Config(); + explicit Config(const FieldTrialsView& field_trials); // The exponential decay factor used to estimate the packet loss rate. double packet_loss_forget_factor = 0.996; @@ -150,7 +151,7 @@ class NackTracker { // Returns a valid number of samples per packet given the current received // sequence number and timestamp or nullopt of none could be computed. - absl::optional GetSamplesPerPacket( + std::optional GetSamplesPerPacket( uint16_t sequence_number_current_received_rtp, uint32_t timestamp_current_received_rtp) const; diff --git a/modules/audio_coding/neteq/nack_tracker_unittest.cc b/modules/audio_coding/neteq/nack_tracker_unittest.cc index a843425b78..89d3555b29 100644 --- a/modules/audio_coding/neteq/nack_tracker_unittest.cc +++ b/modules/audio_coding/neteq/nack_tracker_unittest.cc @@ -16,12 +16,14 @@ #include #include "modules/audio_coding/include/audio_coding_module_typedefs.h" -#include "test/field_trial.h" +#include "test/explicit_key_value_config.h" #include "test/gtest.h" namespace webrtc { namespace { +using test::ExplicitKeyValueConfig; + const int kSampleRateHz = 16000; const int kPacketSizeMs = 30; const uint32_t kTimestampIncrement = 480; // 30 ms. @@ -54,7 +56,8 @@ bool IsNackListCorrect(const std::vector& nack_list, } // namespace TEST(NackTrackerTest, EmptyListWhenNoPacketLoss) { - NackTracker nack; + ExplicitKeyValueConfig field_trials(""); + NackTracker nack(field_trials); nack.UpdateSampleRate(kSampleRateHz); int seq_num = 1; @@ -72,12 +75,13 @@ TEST(NackTrackerTest, EmptyListWhenNoPacketLoss) { } TEST(NackTrackerTest, LatePacketsMovedToNackThenNackListDoesNotChange) { + ExplicitKeyValueConfig field_trials(""); const uint16_t kSequenceNumberLostPackets[] = {2, 3, 4, 5, 6, 7, 8, 9}; static const int kNumAllLostPackets = sizeof(kSequenceNumberLostPackets) / sizeof(kSequenceNumberLostPackets[0]); for (int k = 0; k < 2; k++) { // Two iteration with/without wrap around. - NackTracker nack; + NackTracker nack(field_trials); nack.UpdateSampleRate(kSampleRateHz); uint16_t sequence_num_lost_packets[kNumAllLostPackets]; @@ -119,12 +123,13 @@ TEST(NackTrackerTest, LatePacketsMovedToNackThenNackListDoesNotChange) { } TEST(NackTrackerTest, ArrivedPacketsAreRemovedFromNackList) { + ExplicitKeyValueConfig field_trials(""); const uint16_t kSequenceNumberLostPackets[] = {2, 3, 4, 5, 6, 7, 8, 9}; static const int kNumAllLostPackets = sizeof(kSequenceNumberLostPackets) / sizeof(kSequenceNumberLostPackets[0]); for (int k = 0; k < 2; ++k) { // Two iteration with/without wrap around. - NackTracker nack; + NackTracker nack(field_trials); nack.UpdateSampleRate(kSampleRateHz); uint16_t sequence_num_lost_packets[kNumAllLostPackets]; @@ -180,13 +185,14 @@ TEST(NackTrackerTest, ArrivedPacketsAreRemovedFromNackList) { // Assess if estimation of timestamps and time-to-play is correct. Introduce all // combinations that timestamps and sequence numbers might have wrap around. TEST(NackTrackerTest, EstimateTimestampAndTimeToPlay) { + ExplicitKeyValueConfig field_trials(""); const uint16_t kLostPackets[] = {2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}; static const int kNumAllLostPackets = sizeof(kLostPackets) / sizeof(kLostPackets[0]); for (int k = 0; k < 4; ++k) { - NackTracker nack; + NackTracker nack(field_trials); nack.UpdateSampleRate(kSampleRateHz); // Sequence number wrap around if `k` is 2 or 3; @@ -243,9 +249,10 @@ TEST(NackTrackerTest, EstimateTimestampAndTimeToPlay) { TEST(NackTrackerTest, MissingPacketsPriorToLastDecodedRtpShouldNotBeInNackList) { + ExplicitKeyValueConfig field_trials(""); for (int m = 0; m < 2; ++m) { uint16_t seq_num_offset = (m == 0) ? 0 : 65531; // Wrap around if `m` is 1. - NackTracker nack; + NackTracker nack(field_trials); nack.UpdateSampleRate(kSampleRateHz); // Two consecutive packets to have a correct estimate of timestamp increase. @@ -296,7 +303,8 @@ TEST(NackTrackerTest, } TEST(NackTrackerTest, Reset) { - NackTracker nack; + ExplicitKeyValueConfig field_trials(""); + NackTracker nack(field_trials); nack.UpdateSampleRate(kSampleRateHz); // Two consecutive packets to have a correct estimate of timestamp increase. @@ -320,10 +328,11 @@ TEST(NackTrackerTest, Reset) { } TEST(NackTrackerTest, ListSizeAppliedFromBeginning) { + ExplicitKeyValueConfig field_trials(""); const size_t kNackListSize = 10; for (int m = 0; m < 2; ++m) { uint16_t seq_num_offset = (m == 0) ? 0 : 65525; // Wrap around if `m` is 1. - NackTracker nack; + NackTracker nack(field_trials); nack.UpdateSampleRate(kSampleRateHz); nack.SetMaxNackListSize(kNackListSize); @@ -344,10 +353,11 @@ TEST(NackTrackerTest, ListSizeAppliedFromBeginning) { } TEST(NackTrackerTest, ChangeOfListSizeAppliedAndOldElementsRemoved) { + ExplicitKeyValueConfig field_trials(""); const size_t kNackListSize = 10; for (int m = 0; m < 2; ++m) { uint16_t seq_num_offset = (m == 0) ? 0 : 65525; // Wrap around if `m` is 1. - NackTracker nack; + NackTracker nack(field_trials); nack.UpdateSampleRate(kSampleRateHz); uint16_t seq_num = seq_num_offset; @@ -399,8 +409,9 @@ TEST(NackTrackerTest, ChangeOfListSizeAppliedAndOldElementsRemoved) { } TEST(NackTrackerTest, RoudTripTimeIsApplied) { + ExplicitKeyValueConfig field_trials(""); const int kNackListSize = 200; - NackTracker nack; + NackTracker nack(field_trials); nack.UpdateSampleRate(kSampleRateHz); nack.SetMaxNackListSize(kNackListSize); @@ -430,12 +441,12 @@ TEST(NackTrackerTest, RoudTripTimeIsApplied) { // Set never_nack_multiple_times to true with a field trial and verify that // packets are not nacked multiple times. TEST(NackTrackerTest, DoNotNackMultipleTimes) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Audio-NetEqNackTrackerConfig/" "packet_loss_forget_factor:0.996,ms_per_loss_percent:20," "never_nack_multiple_times:true/"); const int kNackListSize = 200; - NackTracker nack; + NackTracker nack(field_trials); nack.UpdateSampleRate(kSampleRateHz); nack.SetMaxNackListSize(kNackListSize); @@ -461,8 +472,9 @@ TEST(NackTrackerTest, DoNotNackMultipleTimes) { // Test if estimated packet loss rate is correct. TEST(NackTrackerTest, PacketLossRateCorrect) { + ExplicitKeyValueConfig field_trials(""); const int kNackListSize = 200; - NackTracker nack; + NackTracker nack(field_trials); nack.UpdateSampleRate(kSampleRateHz); nack.SetMaxNackListSize(kNackListSize); uint16_t seq_num = 0; @@ -487,8 +499,9 @@ TEST(NackTrackerTest, PacketLossRateCorrect) { } TEST(NackTrackerTest, DoNotNackAfterDtx) { + ExplicitKeyValueConfig field_trials(""); const int kNackListSize = 200; - NackTracker nack; + NackTracker nack(field_trials); nack.UpdateSampleRate(kSampleRateHz); nack.SetMaxNackListSize(kNackListSize); uint16_t seq_num = 0; @@ -502,10 +515,10 @@ TEST(NackTrackerTest, DoNotNackAfterDtx) { } TEST(NackTrackerTest, DoNotNackIfLossRateIsTooHigh) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Audio-NetEqNackTrackerConfig/max_loss_rate:0.4/"); const int kNackListSize = 200; - NackTracker nack; + NackTracker nack(field_trials); nack.UpdateSampleRate(kSampleRateHz); nack.SetMaxNackListSize(kNackListSize); uint16_t seq_num = 0; @@ -527,10 +540,10 @@ TEST(NackTrackerTest, DoNotNackIfLossRateIsTooHigh) { } TEST(NackTrackerTest, OnlyNackIfRttIsValid) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Audio-NetEqNackTrackerConfig/require_valid_rtt:true/"); const int kNackListSize = 200; - NackTracker nack; + NackTracker nack(field_trials); nack.UpdateSampleRate(kSampleRateHz); nack.SetMaxNackListSize(kNackListSize); uint16_t seq_num = 0; diff --git a/modules/audio_coding/neteq/neteq_decoder_plc_unittest.cc b/modules/audio_coding/neteq/neteq_decoder_plc_unittest.cc index ec6ade9c11..df22fbdd71 100644 --- a/modules/audio_coding/neteq/neteq_decoder_plc_unittest.cc +++ b/modules/audio_coding/neteq/neteq_decoder_plc_unittest.cc @@ -11,10 +11,10 @@ // Test to verify correct operation when using the decoder-internal PLC. #include +#include #include #include -#include "absl/types/optional.h" #include "modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h" #include "modules/audio_coding/neteq/tools/audio_checksum.h" #include "modules/audio_coding/neteq/tools/audio_sink.h" @@ -61,7 +61,7 @@ class AudioDecoderPlc : public AudioDecoder { } void GeneratePlc(size_t requested_samples_per_channel, - rtc::BufferT* concealment_audio) override { + BufferT* concealment_audio) override { // Instead of generating random data for GeneratePlc we use the same data as // the input, so we can check that we produce the same result independently // of the losses. @@ -75,7 +75,7 @@ class AudioDecoderPlc : public AudioDecoder { int dec_len = DecodeInternal(nullptr, 2 * 10 * sample_rate_hz_ / 1000, sample_rate_hz_, decoded.data(), &speech_type); concealment_audio->AppendData(decoded.data(), dec_len); - concealed_samples_ += rtc::checked_cast(dec_len); + concealed_samples_ += checked_cast(dec_len); if (!last_was_plc) { ++concealment_events_; @@ -97,9 +97,9 @@ class AudioDecoderPlc : public AudioDecoder { // An input sample generator which generates only zero-samples. class ZeroSampleGenerator : public EncodeNetEqInput::Generator { public: - rtc::ArrayView Generate(size_t num_samples) override { + ArrayView Generate(size_t num_samples) override { vec.resize(num_samples, 0); - rtc::ArrayView view(vec); + ArrayView view(vec); RTC_DCHECK_EQ(view.size(), num_samples); return view; } @@ -119,15 +119,15 @@ class LossyInput : public NetEqInput { burst_length_(burst_length), input_(std::move(input)) {} - absl::optional NextPacketTime() const override { + std::optional NextPacketTime() const override { return input_->NextPacketTime(); } - absl::optional NextOutputEventTime() const override { + std::optional NextOutputEventTime() const override { return input_->NextOutputEventTime(); } - absl::optional NextSetMinimumDelayInfo() const override { + std::optional NextSetMinimumDelayInfo() const override { return input_->NextSetMinimumDelayInfo(); } @@ -151,7 +151,7 @@ class LossyInput : public NetEqInput { bool ended() const override { return input_->ended(); } - absl::optional NextHeader() const override { + std::optional NextHeader() const override { return input_->NextHeader(); } @@ -215,7 +215,7 @@ TestStatistics RunTest(int loss_cadence, NetEqTest neteq_test( config, /*decoder_factory=*/ - rtc::make_ref_counted(&dec), + make_ref_counted(&dec), /*codecs=*/decoders, /*text_log=*/nullptr, /*neteq_factory=*/nullptr, /*input=*/std::move(lossy_input), std::move(output), callbacks); EXPECT_LE(kRunTimeMs, neteq_test.Run()); diff --git a/modules/audio_coding/neteq/neteq_impl.cc b/modules/audio_coding/neteq/neteq_impl.cc index 52e8cbad3a..204cde0c12 100644 --- a/modules/audio_coding/neteq/neteq_impl.cc +++ b/modules/audio_coding/neteq/neteq_impl.cc @@ -16,17 +16,29 @@ #include #include #include +#include #include #include +#include "absl/strings/str_cat.h" +#include "api/array_view.h" #include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_format.h" +#include "api/environment/environment.h" +#include "api/neteq/neteq.h" +#include "api/neteq/neteq_controller.h" +#include "api/neteq/neteq_controller_factory.h" #include "api/neteq/tick_timer.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" +#include "api/rtp_headers.h" +#include "api/rtp_packet_info.h" +#include "api/rtp_packet_infos.h" +#include "api/scoped_refptr.h" +#include "api/units/time_delta.h" #include "modules/audio_coding/codecs/cng/webrtc_cng.h" #include "modules/audio_coding/neteq/accelerate.h" #include "modules/audio_coding/neteq/background_noise.h" #include "modules/audio_coding/neteq/comfort_noise.h" -#include "modules/audio_coding/neteq/decision_logic.h" #include "modules/audio_coding/neteq/decoder_database.h" #include "modules/audio_coding/neteq/dtmf_buffer.h" #include "modules/audio_coding/neteq/dtmf_tone_generator.h" @@ -36,7 +48,6 @@ #include "modules/audio_coding/neteq/normal.h" #include "modules/audio_coding/neteq/packet.h" #include "modules/audio_coding/neteq/packet_buffer.h" -#include "modules/audio_coding/neteq/post_decode_vad.h" #include "modules/audio_coding/neteq/preemptive_expand.h" #include "modules/audio_coding/neteq/red_payload_splitter.h" #include "modules/audio_coding/neteq/statistics_calculator.h" @@ -47,52 +58,72 @@ #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/sanitizer.h" -#include "rtc_base/strings/audio_format_to_string.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" namespace webrtc { namespace { -std::unique_ptr CreateNetEqController( - const NetEqControllerFactory& controller_factory, - int base_min_delay, - int max_packets_in_buffer, - bool allow_time_stretching, - TickTimer* tick_timer, - webrtc::Clock* clock) { - NetEqController::Config config; - config.base_min_delay_ms = base_min_delay; - config.max_packets_in_buffer = max_packets_in_buffer; - config.allow_time_stretching = allow_time_stretching; - config.tick_timer = tick_timer; - config.clock = clock; - return controller_factory.CreateNetEqController(config); +AudioFrame::SpeechType ToSpeechType(NetEqImpl::OutputType type) { + switch (type) { + case NetEqImpl::OutputType::kNormalSpeech: { + return AudioFrame::kNormalSpeech; + } + case NetEqImpl::OutputType::kCNG: { + return AudioFrame::kCNG; + } + case NetEqImpl::OutputType::kPLC: { + return AudioFrame::kPLC; + } + case NetEqImpl::OutputType::kPLCCNG: { + return AudioFrame::kPLCCNG; + } + case NetEqImpl::OutputType::kCodecPLC: { + return AudioFrame::kCodecPLC; + } + default: + RTC_DCHECK_NOTREACHED(); + return AudioFrame::kUndefined; + } +} + +// Returns true if both payload types are known to the decoder database, and +// have the same sample rate. +bool EqualSampleRates(uint8_t pt1, + uint8_t pt2, + const DecoderDatabase& decoder_database) { + auto* di1 = decoder_database.GetDecoderInfo(pt1); + auto* di2 = decoder_database.GetDecoderInfo(pt2); + return di1 && di2 && di1->SampleRateHz() == di2->SampleRateHz(); } } // namespace NetEqImpl::Dependencies::Dependencies( + const Environment& env, const NetEq::Config& config, - Clock* clock, - const rtc::scoped_refptr& decoder_factory, + scoped_refptr decoder_factory, const NetEqControllerFactory& controller_factory) - : clock(clock), + : env(env), tick_timer(new TickTimer), - stats(new StatisticsCalculator), + stats(std::make_unique(tick_timer.get())), decoder_database( - new DecoderDatabase(decoder_factory, config.codec_pair_id)), + std::make_unique(env, + std::move(decoder_factory), + config.codec_pair_id)), dtmf_buffer(new DtmfBuffer(config.sample_rate_hz)), dtmf_tone_generator(new DtmfToneGenerator), - packet_buffer( - new PacketBuffer(config.max_packets_in_buffer, tick_timer.get())), - neteq_controller( - CreateNetEqController(controller_factory, - config.min_delay_ms, - config.max_packets_in_buffer, - !config.for_test_no_time_stretching, - tick_timer.get(), - clock)), + packet_buffer(new PacketBuffer(config.max_packets_in_buffer, + tick_timer.get(), + stats.get())), + neteq_controller(controller_factory.Create( + env, + {.allow_time_stretching = !config.for_test_no_time_stretching, + .max_packets_in_buffer = + static_cast(config.max_packets_in_buffer), + .base_min_delay_ms = config.min_delay_ms, + .tick_timer = tick_timer.get()})), red_payload_splitter(new RedPayloadSplitter), timestamp_scaler(new TimestampScaler(*decoder_database)), accelerate_factory(new AccelerateFactory), @@ -104,7 +135,7 @@ NetEqImpl::Dependencies::~Dependencies() = default; NetEqImpl::NetEqImpl(const NetEq::Config& config, Dependencies&& deps, bool create_components) - : clock_(deps.clock), + : env_(deps.env), tick_timer_(std::move(deps.tick_timer)), decoder_database_(std::move(deps.decoder_database)), dtmf_buffer_(std::move(deps.dtmf_buffer)), @@ -112,7 +143,6 @@ NetEqImpl::NetEqImpl(const NetEq::Config& config, packet_buffer_(std::move(deps.packet_buffer)), red_payload_splitter_(std::move(deps.red_payload_splitter)), timestamp_scaler_(std::move(deps.timestamp_scaler)), - vad_(new PostDecodeVad()), expand_factory_(std::move(deps.expand_factory)), accelerate_factory_(std::move(deps.accelerate_factory)), preemptive_expand_factory_(std::move(deps.preemptive_expand_factory)), @@ -129,12 +159,6 @@ NetEqImpl::NetEqImpl(const NetEq::Config& config, enable_fast_accelerate_(config.enable_fast_accelerate), nack_enabled_(false), enable_muted_state_(config.enable_muted_state), - expand_uma_logger_("WebRTC.Audio.ExpandRatePercent", - 10, // Report once every 10 s. - tick_timer_.get()), - speech_expand_uma_logger_("WebRTC.Audio.SpeechExpandRatePercent", - 10, // Report once every 10 s. - tick_timer_.get()), no_time_stretching_(config.for_test_no_time_stretching) { RTC_LOG(LS_INFO) << "NetEq config: " << config.ToString(); int fs = config.sample_rate_hz; @@ -154,20 +178,17 @@ NetEqImpl::NetEqImpl(const NetEq::Config& config, if (create_components) { SetSampleRateAndChannels(fs, 1); // Default is 1 channel. } - RTC_DCHECK(!vad_->enabled()); - if (config.enable_post_decode_vad) { - vad_->Enable(); - } } NetEqImpl::~NetEqImpl() = default; int NetEqImpl::InsertPacket(const RTPHeader& rtp_header, - rtc::ArrayView payload) { - rtc::MsanCheckInitialized(payload); + ArrayView payload, + const RtpPacketInfo& packet_info) { + MsanCheckInitialized(payload); TRACE_EVENT0("webrtc", "NetEqImpl::InsertPacket"); MutexLock lock(&mutex_); - if (InsertPacketInternal(rtp_header, payload) != 0) { + if (InsertPacketInternal(rtp_header, payload, packet_info) != 0) { return kFail; } return kOK; @@ -182,70 +203,22 @@ void NetEqImpl::InsertEmptyPacket(const RTPHeader& rtp_header) { controller_->RegisterEmptyPacket(); } -namespace { -void SetAudioFrameActivityAndType(bool vad_enabled, - NetEqImpl::OutputType type, - AudioFrame::VADActivity last_vad_activity, - AudioFrame* audio_frame) { - switch (type) { - case NetEqImpl::OutputType::kNormalSpeech: { - audio_frame->speech_type_ = AudioFrame::kNormalSpeech; - audio_frame->vad_activity_ = AudioFrame::kVadActive; - break; - } - case NetEqImpl::OutputType::kVadPassive: { - // This should only be reached if the VAD is enabled. - RTC_DCHECK(vad_enabled); - audio_frame->speech_type_ = AudioFrame::kNormalSpeech; - audio_frame->vad_activity_ = AudioFrame::kVadPassive; - break; - } - case NetEqImpl::OutputType::kCNG: { - audio_frame->speech_type_ = AudioFrame::kCNG; - audio_frame->vad_activity_ = AudioFrame::kVadPassive; - break; - } - case NetEqImpl::OutputType::kPLC: { - audio_frame->speech_type_ = AudioFrame::kPLC; - audio_frame->vad_activity_ = last_vad_activity; - break; - } - case NetEqImpl::OutputType::kPLCCNG: { - audio_frame->speech_type_ = AudioFrame::kPLCCNG; - audio_frame->vad_activity_ = AudioFrame::kVadPassive; - break; - } - case NetEqImpl::OutputType::kCodecPLC: { - audio_frame->speech_type_ = AudioFrame::kCodecPLC; - audio_frame->vad_activity_ = last_vad_activity; - break; - } - default: - RTC_DCHECK_NOTREACHED(); - } - if (!vad_enabled) { - // Always set kVadUnknown when receive VAD is inactive. - audio_frame->vad_activity_ = AudioFrame::kVadUnknown; - } -} -} // namespace - int NetEqImpl::GetAudio(AudioFrame* audio_frame, bool* muted, int* current_sample_rate_hz, - absl::optional action_override) { + std::optional action_override) { TRACE_EVENT0("webrtc", "NetEqImpl::GetAudio"); MutexLock lock(&mutex_); - if (GetAudioInternal(audio_frame, muted, action_override) != 0) { + if (GetAudioInternal(audio_frame, action_override) != 0) { return kFail; } - RTC_DCHECK_EQ( - audio_frame->sample_rate_hz_, - rtc::dchecked_cast(audio_frame->samples_per_channel_ * 100)); - RTC_DCHECK_EQ(*muted, audio_frame->muted()); - SetAudioFrameActivityAndType(vad_->enabled(), LastOutputType(), - last_vad_activity_, audio_frame); - last_vad_activity_ = audio_frame->vad_activity_; + stats_->IncreaseCounter(output_size_samples_, fs_hz_); + RTC_DCHECK_EQ(audio_frame->sample_rate_hz_, + dchecked_cast(audio_frame->samples_per_channel_ * 100)); + if (muted != nullptr) { + *muted = audio_frame->muted(); + } + audio_frame->speech_type_ = ToSpeechType(LastOutputType()); last_output_sample_rate_hz_ = audio_frame->sample_rate_hz_; RTC_DCHECK(last_output_sample_rate_hz_ == 8000 || last_output_sample_rate_hz_ == 16000 || @@ -265,7 +238,7 @@ void NetEqImpl::SetCodecs(const std::map& codecs) { const std::vector changed_payload_types = decoder_database_->SetCodecs(codecs); for (const int pt : changed_payload_types) { - packet_buffer_->DiscardPacketsWithPayloadType(pt, stats_.get()); + packet_buffer_->DiscardPacketsWithPayloadType(pt); } } @@ -273,7 +246,7 @@ bool NetEqImpl::RegisterPayloadType(int rtp_payload_type, const SdpAudioFormat& audio_format) { RTC_LOG(LS_VERBOSE) << "NetEqImpl::RegisterPayloadType: payload type " << rtp_payload_type << ", codec " - << rtc::ToString(audio_format); + << absl::StrCat(audio_format); MutexLock lock(&mutex_); return decoder_database_->RegisterPayload(rtp_payload_type, audio_format) == DecoderDatabase::kOK; @@ -283,8 +256,7 @@ int NetEqImpl::RemovePayloadType(uint8_t rtp_payload_type) { MutexLock lock(&mutex_); int ret = decoder_database_->Remove(rtp_payload_type); if (ret == DecoderDatabase::kOK || ret == DecoderDatabase::kDecoderNotFound) { - packet_buffer_->DiscardPacketsWithPayloadType(rtp_payload_type, - stats_.get()); + packet_buffer_->DiscardPacketsWithPayloadType(rtp_payload_type); return kOK; } return kFail; @@ -339,7 +311,7 @@ int NetEqImpl::FilteredCurrentDelayMs() const { const int delay_samples = controller_->GetFilteredBufferLevel() + sync_buffer_->FutureLength(); // The division below will truncate. The return value is in ms. - return delay_samples / rtc::CheckedDivExact(fs_hz_, 1000); + return delay_samples / CheckedDivExact(fs_hz_, 1000); } int NetEqImpl::NetworkStatistics(NetEqNetworkStatistics* stats) { @@ -390,26 +362,14 @@ NetEqOperationsAndState NetEqImpl::GetOperationsAndState() const { return result; } -void NetEqImpl::EnableVad() { - MutexLock lock(&mutex_); - RTC_DCHECK(vad_.get()); - vad_->Enable(); -} - -void NetEqImpl::DisableVad() { - MutexLock lock(&mutex_); - RTC_DCHECK(vad_.get()); - vad_->Disable(); -} - -absl::optional NetEqImpl::GetPlayoutTimestamp() const { +std::optional NetEqImpl::GetPlayoutTimestamp() const { MutexLock lock(&mutex_); if (first_packet_ || last_mode_ == Mode::kRfc3389Cng || last_mode_ == Mode::kCodecInternalCng) { // We don't have a valid RTP timestamp until we have decoded our first // RTP packet. Also, the RTP timestamp is not accurate while playing CNG, // which is indicated by returning an empty value. - return absl::nullopt; + return std::nullopt; } return timestamp_scaler_->ToExternal(playout_timestamp_); } @@ -419,29 +379,27 @@ int NetEqImpl::last_output_sample_rate_hz() const { return last_output_sample_rate_hz_; } -absl::optional NetEqImpl::GetDecoderFormat( - int payload_type) const { +std::optional NetEqImpl::GetCurrentDecoderFormat() const { MutexLock lock(&mutex_); - const DecoderDatabase::DecoderInfo* const di = - decoder_database_->GetDecoderInfo(payload_type); - if (di) { - const AudioDecoder* const decoder = di->GetDecoder(); - // TODO(kwiberg): Why the special case for RED? - return DecoderFormat{ - /*sample_rate_hz=*/di->IsRed() ? 8000 : di->SampleRateHz(), - /*num_channels=*/ - decoder ? rtc::dchecked_cast(decoder->Channels()) : 1, - /*sdp_format=*/di->GetFormat()}; - } else { - // Payload type not registered. - return absl::nullopt; - } + if (!current_rtp_payload_type_.has_value()) { + return std::nullopt; + } + const DecoderDatabase::DecoderInfo* di = + decoder_database_->GetDecoderInfo(*current_rtp_payload_type_); + if (di == nullptr) { + return std::nullopt; + } + return DecoderFormat{ + /*payload_type=*/*current_rtp_payload_type_, + /*sample_rate_hz=*/di->SampleRateHz(), + /*num_channels=*/dchecked_cast(di->GetDecoder()->Channels()), + /*sdp_format=*/di->GetFormat()}; } void NetEqImpl::FlushBuffers() { MutexLock lock(&mutex_); RTC_LOG(LS_VERBOSE) << "FlushBuffers"; - packet_buffer_->Flush(stats_.get()); + packet_buffer_->Flush(); RTC_DCHECK(sync_buffer_.get()); RTC_DCHECK(expand_.get()); sync_buffer_->Flush(); @@ -454,7 +412,7 @@ void NetEqImpl::FlushBuffers() { void NetEqImpl::EnableNack(size_t max_nack_list_size) { MutexLock lock(&mutex_); if (!nack_enabled_) { - nack_ = std::make_unique(); + nack_ = std::make_unique(env_.field_trials()); nack_enabled_ = true; nack_->UpdateSampleRate(fs_hz_); } @@ -478,8 +436,8 @@ std::vector NetEqImpl::GetNackList(int64_t round_trip_time_ms) const { int NetEqImpl::SyncBufferSizeMs() const { MutexLock lock(&mutex_); - return rtc::dchecked_cast(sync_buffer_->FutureLength() / - rtc::CheckedDivExact(fs_hz_, 1000)); + return dchecked_cast(sync_buffer_->FutureLength() / + CheckedDivExact(fs_hz_, 1000)); } const SyncBuffer* NetEqImpl::sync_buffer_for_test() const { @@ -495,13 +453,12 @@ NetEq::Operation NetEqImpl::last_operation_for_test() const { // Methods below this line are private. int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, - rtc::ArrayView payload) { + ArrayView payload, + const RtpPacketInfo& packet_info) { if (payload.empty()) { RTC_LOG_F(LS_ERROR) << "payload is empty"; return kInvalidPointer; } - - Timestamp receive_time = clock_->CurrentTime(); stats_->ReceivedPacket(); PacketList packet_list; @@ -533,7 +490,6 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, // Store these for later use, since the first packet may very well disappear // before we need these values. uint32_t main_timestamp = packet_list.front().timestamp; - uint8_t main_payload_type = packet_list.front().payload_type; uint16_t main_sequence_number = packet_list.front().sequence_number; // Reinitialize NetEq if it's needed (changed SSRC or first call). @@ -542,7 +498,7 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, // the packet has been successfully inserted into the packet buffer. // Flush the packet buffer and DTMF buffer. - packet_buffer_->Flush(stats_.get()); + packet_buffer_->Flush(); dtmf_buffer_->Flush(); // Update audio buffer timestamp. @@ -586,7 +542,6 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, if (decoder_database_->IsRed(rtp_header.payloadType)) { timestamp_scaler_->ToInternal(&packet_list); main_timestamp = packet_list.front().timestamp; - main_payload_type = packet_list.front().payload_type; main_sequence_number = packet_list.front().sequence_number; } @@ -640,8 +595,8 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, new_packet.priority.codec_level = result.priority; new_packet.priority.red_level = original_priority.red_level; // Only associate the header information with the primary packet. - if (new_packet.timestamp == rtp_header.timestamp) { - new_packet.packet_info = RtpPacketInfo(rtp_header, receive_time); + if (new_packet.timestamp == packet_info.rtp_timestamp()) { + new_packet.packet_info = packet_info; } new_packet.frame = std::move(result.frame); return new_packet; @@ -681,26 +636,34 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, number_of_primary_packets); } - // Insert packets in buffer. - const int target_level_ms = controller_->TargetLevelMs(); - const int ret = packet_buffer_->InsertPacketList( - &parsed_packet_list, *decoder_database_, ¤t_rtp_payload_type_, - ¤t_cng_rtp_payload_type_, stats_.get(), decoder_frame_length_, - last_output_sample_rate_hz_, target_level_ms); bool buffer_flush_occured = false; - if (ret == PacketBuffer::kFlushed) { + for (Packet& packet : parsed_packet_list) { + if (MaybeChangePayloadType(packet.payload_type)) { + packet_buffer_->Flush(); + buffer_flush_occured = true; + } + NetEqController::PacketArrivedInfo info = ToPacketArrivedInfo(packet); + int return_val = packet_buffer_->InsertPacket(std::move(packet)); + if (return_val == PacketBuffer::kFlushed) { + buffer_flush_occured = true; + } else if (return_val != PacketBuffer::kOK) { + // An error occurred. + return kOtherError; + } + + info.buffer_flush = buffer_flush_occured; + const bool should_update_stats = !new_codec_ && !buffer_flush_occured; + auto relative_delay = + controller_->PacketArrived(fs_hz_, should_update_stats, info); + if (relative_delay) { + stats_->RelativePacketArrivalDelay(relative_delay.value()); + } + } + + if (buffer_flush_occured) { // Reset DSP timestamp etc. if packet buffer flushed. new_codec_ = true; update_sample_rate_and_channels = true; - buffer_flush_occured = true; - } else if (ret == PacketBuffer::kPartialFlush) { - // Forward sync buffer timestamp - timestamp_ = packet_buffer_->PeekNextPacket()->timestamp; - sync_buffer_->IncreaseEndTimestamp(timestamp_ - - sync_buffer_->end_timestamp()); - buffer_flush_occured = true; - } else if (ret != PacketBuffer::kOK) { - return kOtherError; } if (first_packet_) { @@ -745,46 +708,42 @@ int NetEqImpl::InsertPacketInternal(const RTPHeader& rtp_header, } } - const DecoderDatabase::DecoderInfo* dec_info = - decoder_database_->GetDecoderInfo(main_payload_type); - RTC_DCHECK(dec_info); // Already checked that the payload type is known. + return 0; +} - NetEqController::PacketArrivedInfo info; - info.is_cng_or_dtmf = dec_info->IsComfortNoise() || dec_info->IsDtmf(); - info.packet_length_samples = - number_of_primary_packets * decoder_frame_length_; - info.main_timestamp = main_timestamp; - info.main_sequence_number = main_sequence_number; - info.is_dtx = is_dtx; - info.buffer_flush = buffer_flush_occured; - - const bool should_update_stats = !new_codec_; - auto relative_delay = - controller_->PacketArrived(fs_hz_, should_update_stats, info); - if (relative_delay) { - stats_->RelativePacketArrivalDelay(relative_delay.value()); +bool NetEqImpl::MaybeChangePayloadType(uint8_t payload_type) { + bool changed = false; + if (decoder_database_->IsComfortNoise(payload_type)) { + if (current_cng_rtp_payload_type_ && + *current_cng_rtp_payload_type_ != payload_type) { + // New CNG payload type implies new codec type. + current_rtp_payload_type_ = std::nullopt; + changed = true; + } + current_cng_rtp_payload_type_ = payload_type; + } else if (!decoder_database_->IsDtmf(payload_type)) { + // This must be speech. + if ((current_rtp_payload_type_ && + *current_rtp_payload_type_ != payload_type) || + (current_cng_rtp_payload_type_ && + !EqualSampleRates(payload_type, *current_cng_rtp_payload_type_, + *decoder_database_))) { + current_cng_rtp_payload_type_ = std::nullopt; + changed = true; + } + current_rtp_payload_type_ = payload_type; } - return 0; + return changed; } int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame, - bool* muted, - absl::optional action_override) { + std::optional action_override) { PacketList packet_list; DtmfEvent dtmf_event; Operation operation; bool play_dtmf; - *muted = false; last_decoded_packet_infos_.clear(); tick_timer_->Increment(); - stats_->IncreaseCounter(output_size_samples_, fs_hz_); - const auto lifetime_stats = stats_->GetLifetimeStatistics(); - expand_uma_logger_.UpdateSampleCounter(lifetime_stats.concealed_samples, - fs_hz_); - speech_expand_uma_logger_.UpdateSampleCounter( - lifetime_stats.concealed_samples - - lifetime_stats.silent_concealed_samples, - fs_hz_); // Check for muted state. if (enable_muted_state_ && expand_->Muted() && packet_buffer_->Empty()) { @@ -807,7 +766,6 @@ int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame, audio_frame->num_channels_ = sync_buffer_->Channels(); stats_->ExpandedNoiseSamples(output_size_samples_, false); controller_->NotifyMutedState(); - *muted = true; return 0; } int return_value = GetDecision(&operation, &packet_list, &dtmf_event, @@ -826,11 +784,8 @@ int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame, last_decoded_type_ = speech_type; } - RTC_DCHECK(vad_.get()); bool sid_frame_available = (operation == Operation::kRfc3389Cng && !packet_list.empty()); - vad_->Update(decoded_buffer_.get(), static_cast(length), speech_type, - sid_frame_available, fs_hz_); // This is the criterion that we did decode some data through the speech // decoder, and the operation resulted in comfort noise. @@ -980,7 +935,7 @@ int NetEqImpl::GetAudioInternal(AudioFrame* audio_frame, (last_mode_ == Mode::kPreemptiveExpandFail) || (last_mode_ == Mode::kRfc3389Cng) || (last_mode_ == Mode::kCodecInternalCng)) { - background_noise_->Update(*sync_buffer_, *vad_.get()); + background_noise_->Update(*sync_buffer_); } if (operation == Operation::kDtmf) { @@ -1028,7 +983,7 @@ int NetEqImpl::GetDecision(Operation* operation, PacketList* packet_list, DtmfEvent* dtmf_event, bool* play_dtmf, - absl::optional action_override) { + std::optional action_override) { // Initialize output variables. *play_dtmf = false; *operation = Operation::kUndefined; @@ -1037,8 +992,7 @@ int NetEqImpl::GetDecision(Operation* operation, uint32_t end_timestamp = sync_buffer_->end_timestamp(); if (!new_codec_) { const uint32_t five_seconds_samples = 5 * fs_hz_; - packet_buffer_->DiscardOldPackets(end_timestamp, five_seconds_samples, - stats_.get()); + packet_buffer_->DiscardOldPackets(end_timestamp, five_seconds_samples); } const Packet* packet = packet_buffer_->PeekNextPacket(); @@ -1058,14 +1012,12 @@ int NetEqImpl::GetDecision(Operation* operation, (end_timestamp >= packet->timestamp || end_timestamp + generated_noise_samples > packet->timestamp)) { // Don't use this packet, discard it. - if (packet_buffer_->DiscardNextPacket(stats_.get()) != - PacketBuffer::kOK) { + if (packet_buffer_->DiscardNextPacket() != PacketBuffer::kOK) { RTC_DCHECK_NOTREACHED(); // Must be ok by design. } // Check buffer again. if (!new_codec_) { - packet_buffer_->DiscardOldPackets(end_timestamp, 5 * fs_hz_, - stats_.get()); + packet_buffer_->DiscardOldPackets(end_timestamp, 5 * fs_hz_); } packet = packet_buffer_->PeekNextPacket(); } @@ -1080,7 +1032,7 @@ int NetEqImpl::GetDecision(Operation* operation, last_mode_ == Mode::kPreemptiveExpandLowEnergy) { // Subtract (samples_left + output_size_samples_) from sampleMemory. controller_->AddSampleMemory( - -(samples_left + rtc::dchecked_cast(output_size_samples_))); + -(samples_left + dchecked_cast(output_size_samples_))); } // Check if it is time to play a DTMF event. @@ -1142,7 +1094,7 @@ int NetEqImpl::GetDecision(Operation* operation, // Check if we already have enough samples in the `sync_buffer_`. If so, // change decision to normal, unless the decision was merge, accelerate, or // preemptive expand. - if (samples_left >= rtc::dchecked_cast(output_size_samples_) && + if (samples_left >= dchecked_cast(output_size_samples_) && *operation != Operation::kMerge && *operation != Operation::kAccelerate && *operation != Operation::kFastAccelerate && *operation != Operation::kPreemptiveExpand) { @@ -1205,7 +1157,7 @@ int NetEqImpl::GetDecision(Operation* operation, // TODO(hlundin): Write test for this. // Update timestamp. timestamp_ = end_timestamp; - const uint64_t generated_noise_samples = + generated_noise_samples = generated_noise_stopwatch_ ? generated_noise_stopwatch_->ElapsedTicks() * output_size_samples_ + @@ -1428,7 +1380,7 @@ int NetEqImpl::DecodeCng(AudioDecoder* decoder, return 0; } - while (*decoded_length < rtc::dchecked_cast(output_size_samples_)) { + while (*decoded_length < dchecked_cast(output_size_samples_)) { const int length = decoder->Decode( nullptr, 0, fs_hz_, (decoded_buffer_length_ - *decoded_length) * sizeof(int16_t), @@ -1473,8 +1425,8 @@ int NetEqImpl::DecodeLoop(PacketList* packet_list, operation == Operation::kPreemptiveExpand); auto opt_result = packet_list->front().frame->Decode( - rtc::ArrayView(&decoded_buffer_[*decoded_length], - decoded_buffer_length_ - *decoded_length)); + ArrayView(&decoded_buffer_[*decoded_length], + decoded_buffer_length_ - *decoded_length)); if (packet_list->front().packet_info) { last_decoded_packet_infos_.push_back(*packet_list->front().packet_info); } @@ -1483,7 +1435,7 @@ int NetEqImpl::DecodeLoop(PacketList* packet_list, const auto& result = *opt_result; *speech_type = result.speech_type; if (result.num_decoded_samples > 0) { - *decoded_length += rtc::dchecked_cast(result.num_decoded_samples); + *decoded_length += dchecked_cast(result.num_decoded_samples); // Update `decoder_frame_length_` with number of samples per channel. decoder_frame_length_ = result.num_decoded_samples / decoder->Channels(); @@ -1497,7 +1449,7 @@ int NetEqImpl::DecodeLoop(PacketList* packet_list, packet_list->clear(); break; } - if (*decoded_length > rtc::dchecked_cast(decoded_buffer_length_)) { + if (*decoded_length > dchecked_cast(decoded_buffer_length_)) { // Guard against overflow. RTC_LOG(LS_WARNING) << "Decoded too much."; packet_list->clear(); @@ -1546,8 +1498,8 @@ void NetEqImpl::DoMerge(int16_t* decoded_buffer, merge_->Process(decoded_buffer, decoded_length, algorithm_buffer_.get()); // Correction can be negative. int expand_length_correction = - rtc::dchecked_cast(new_length) - - rtc::dchecked_cast(decoded_length / algorithm_buffer_->Channels()); + dchecked_cast(new_length) - + dchecked_cast(decoded_length / algorithm_buffer_->Channels()); // Update in-call and post-call statistics. if (expand_->Muted() || last_decoded_type_ == AudioDecoder::kComfortNoise) { @@ -1956,7 +1908,7 @@ int NetEqImpl::ExtractPackets(size_t required_samples, // Packet extraction loop. do { timestamp_ = next_packet->timestamp; - absl::optional packet = packet_buffer_->GetNextPacket(); + std::optional packet = packet_buffer_->GetNextPacket(); // `next_packet` may be invalid after the `packet_buffer_` operation. next_packet = nullptr; if (!packet) { @@ -1987,8 +1939,7 @@ int NetEqImpl::ExtractPackets(size_t required_samples, packet_duration = packet->frame->Duration(); // TODO(ossu): Is this the correct way to track Opus FEC packets? if (packet->priority.codec_level > 0) { - stats_->SecondaryDecodedSamples( - rtc::dchecked_cast(packet_duration)); + stats_->SecondaryDecodedSamples(dchecked_cast(packet_duration)); } } else if (!has_cng_packet) { RTC_LOG(LS_WARNING) << "Unknown payload type " @@ -2004,9 +1955,17 @@ int NetEqImpl::ExtractPackets(size_t required_samples, extracted_samples = packet->timestamp - first_timestamp + packet_duration; RTC_DCHECK(controller_); - stats_->JitterBufferDelay(packet_duration, waiting_time_ms, - controller_->TargetLevelMs(), - controller_->UnlimitedTargetLevelMs()); + TimeDelta processing_time = TimeDelta::Zero(); + + if (packet->packet_info.has_value() && + !packet->packet_info->receive_time().IsMinusInfinity()) { + processing_time = + env_.clock().CurrentTime() - packet->packet_info->receive_time(); + } + + stats_->JitterBufferDelay( + packet_duration, waiting_time_ms, controller_->TargetLevelMs(), + controller_->UnlimitedTargetLevelMs(), processing_time.us()); // Check what packet is available next. next_packet = packet_buffer_->PeekNextPacket(); @@ -2016,7 +1975,7 @@ int NetEqImpl::ExtractPackets(size_t required_samples, !has_cng_packet; packet_list->push_back(std::move(*packet)); // Store packet in list. - packet = absl::nullopt; // Ensure it's never used after the move. + packet = std::nullopt; // Ensure it's never used after the move. } while (extracted_samples < required_samples && next_packet_available); if (extracted_samples > 0) { @@ -2024,10 +1983,10 @@ int NetEqImpl::ExtractPackets(size_t required_samples, // we could end up in the situation where we never decode anything, since // all incoming packets are considered too old but the buffer will also // never be flooded and flushed. - packet_buffer_->DiscardAllOldPackets(timestamp_, stats_.get()); + packet_buffer_->DiscardAllOldPackets(timestamp_); } - return rtc::dchecked_cast(extracted_samples); + return dchecked_cast(extracted_samples); } void NetEqImpl::UpdatePlcComponents(int fs_hz, size_t channels) { @@ -2059,10 +2018,6 @@ void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) { if (cng_decoder) cng_decoder->Reset(); - // Reinit post-decode VAD with new sample rate. - RTC_DCHECK(vad_.get()); // Cannot be NULL here. - vad_->Init(); - // Delete algorithm buffer and create a new one. algorithm_buffer_.reset(new AudioMultiVector(channels)); @@ -2103,7 +2058,6 @@ void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) { } NetEqImpl::OutputType NetEqImpl::LastOutputType() { - RTC_DCHECK(vad_.get()); RTC_DCHECK(expand_.get()); if (last_mode_ == Mode::kCodecInternalCng || last_mode_ == Mode::kRfc3389Cng) { @@ -2113,12 +2067,27 @@ NetEqImpl::OutputType NetEqImpl::LastOutputType() { return OutputType::kPLCCNG; } else if (last_mode_ == Mode::kExpand) { return OutputType::kPLC; - } else if (vad_->running() && !vad_->active_speech()) { - return OutputType::kVadPassive; } else if (last_mode_ == Mode::kCodecPlc) { return OutputType::kCodecPLC; } else { return OutputType::kNormalSpeech; } } + +NetEqController::PacketArrivedInfo NetEqImpl::ToPacketArrivedInfo( + const Packet& packet) const { + const DecoderDatabase::DecoderInfo* dec_info = + decoder_database_->GetDecoderInfo(packet.payload_type); + + NetEqController::PacketArrivedInfo info; + info.is_cng_or_dtmf = + dec_info && (dec_info->IsComfortNoise() || dec_info->IsDtmf()); + info.packet_length_samples = + packet.frame ? packet.frame->Duration() : decoder_frame_length_; + info.main_timestamp = packet.timestamp; + info.main_sequence_number = packet.sequence_number; + info.is_dtx = packet.frame && packet.frame->IsDtxPacket(); + return info; +} + } // namespace webrtc diff --git a/modules/audio_coding/neteq/neteq_impl.h b/modules/audio_coding/neteq/neteq_impl.h index f27738bcbf..9fbf21abcc 100644 --- a/modules/audio_coding/neteq/neteq_impl.h +++ b/modules/audio_coding/neteq/neteq_impl.h @@ -11,24 +11,32 @@ #ifndef MODULES_AUDIO_CODING_NETEQ_NETEQ_IMPL_H_ #define MODULES_AUDIO_CODING_NETEQ_NETEQ_IMPL_H_ +#include +#include #include #include -#include -#include +#include #include -#include "absl/types/optional.h" +#include "api/array_view.h" #include "api/audio/audio_frame.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_format.h" +#include "api/environment/environment.h" #include "api/neteq/neteq.h" #include "api/neteq/neteq_controller.h" #include "api/neteq/neteq_controller_factory.h" #include "api/neteq/tick_timer.h" +#include "api/rtp_headers.h" #include "api/rtp_packet_info.h" +#include "api/scoped_refptr.h" #include "modules/audio_coding/neteq/audio_multi_vector.h" -#include "modules/audio_coding/neteq/expand_uma_logger.h" #include "modules/audio_coding/neteq/packet.h" +#include "modules/audio_coding/neteq/packet_buffer.h" #include "modules/audio_coding/neteq/random_vector.h" #include "modules/audio_coding/neteq/statistics_calculator.h" +#include "rtc_base/buffer.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -37,7 +45,6 @@ namespace webrtc { // Forward declarations. class Accelerate; class BackgroundNoise; -class Clock; class ComfortNoise; class DecoderDatabase; class DtmfBuffer; @@ -46,9 +53,7 @@ class Expand; class Merge; class NackTracker; class Normal; -class PacketBuffer; class RedPayloadSplitter; -class PostDecodeVad; class PreemptiveExpand; class RandomVector; class SyncBuffer; @@ -95,13 +100,13 @@ class NetEqImpl : public webrtc::NetEq { // before sending the struct to the NetEqImpl constructor. However, there // are dependencies between some of the classes inside the struct, so // swapping out one may make it necessary to re-create another one. - Dependencies(const NetEq::Config& config, - Clock* clock, - const rtc::scoped_refptr& decoder_factory, + Dependencies(const Environment& env, + const NetEq::Config& config, + scoped_refptr decoder_factory, const NetEqControllerFactory& controller_factory); ~Dependencies(); - Clock* const clock; + const Environment env; std::unique_ptr tick_timer; std::unique_ptr stats; std::unique_ptr decoder_database; @@ -126,17 +131,25 @@ class NetEqImpl : public webrtc::NetEq { NetEqImpl(const NetEqImpl&) = delete; NetEqImpl& operator=(const NetEqImpl&) = delete; + int InsertPacket(const RTPHeader& rtp_header, + ArrayView payload) override { + return InsertPacket( + rtp_header, payload, + RtpPacketInfo(rtp_header, /*receive_time=*/Timestamp::MinusInfinity())); + } + // Inserts a new packet into NetEq. Returns 0 on success, -1 on failure. int InsertPacket(const RTPHeader& rtp_header, - rtc::ArrayView payload) override; + ArrayView payload, + const RtpPacketInfo& packet_info) override; void InsertEmptyPacket(const RTPHeader& rtp_header) override; int GetAudio( AudioFrame* audio_frame, - bool* muted, + bool* muted = nullptr, int* current_sample_rate_hz = nullptr, - absl::optional action_override = absl::nullopt) override; + std::optional action_override = std::nullopt) override; void SetCodecs(const std::map& codecs) override; @@ -171,19 +184,11 @@ class NetEqImpl : public webrtc::NetEq { NetEqOperationsAndState GetOperationsAndState() const override; - // Enables post-decode VAD. When enabled, GetAudio() will return - // kOutputVADPassive when the signal contains no speech. - void EnableVad() override; - - // Disables post-decode VAD. - void DisableVad() override; - - absl::optional GetPlayoutTimestamp() const override; + std::optional GetPlayoutTimestamp() const override; int last_output_sample_rate_hz() const override; - absl::optional GetDecoderFormat( - int payload_type) const override; + std::optional GetCurrentDecoderFormat() const override; // Flushes both the packet buffer and the sync buffer. void FlushBuffers() override; @@ -212,14 +217,20 @@ class NetEqImpl : public webrtc::NetEq { // above. Returns 0 on success, otherwise an error code. // TODO(hlundin): Merge this with InsertPacket above? int InsertPacketInternal(const RTPHeader& rtp_header, - rtc::ArrayView payload) + ArrayView payload, + const RtpPacketInfo& packet_info) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + + // Returns true if the payload type changed (this should be followed by + // resetting various state). Returns false if the current payload type is + // unknown or equal to `payload_type`. + bool MaybeChangePayloadType(uint8_t payload_type) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Delivers 10 ms of audio data. The data is written to `audio_frame`. // Returns 0 on success, otherwise an error code. int GetAudioInternal(AudioFrame* audio_frame, - bool* muted, - absl::optional action_override) + std::optional action_override) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Provides a decision to the GetAudioInternal method. The decision what to @@ -231,7 +242,7 @@ class NetEqImpl : public webrtc::NetEq { PacketList* packet_list, DtmfEvent* dtmf_event, bool* play_dtmf, - absl::optional action_override) + std::optional action_override) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Decodes the speech packets in `packet_list`, and writes the results to @@ -286,7 +297,7 @@ class NetEqImpl : public webrtc::NetEq { bool fast_accelerate) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Sub-method which calls the PreemptiveExpand class to perform the - // preemtive expand operation. + // preemptive expand operation. int DoPreemptiveExpand(int16_t* decoded_buffer, size_t decoded_length, AudioDecoder::SpeechType speech_type, @@ -336,7 +347,10 @@ class NetEqImpl : public webrtc::NetEq { NetEqNetworkStatistics CurrentNetworkStatisticsInternal() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - Clock* const clock_; + NetEqController::PacketArrivedInfo ToPacketArrivedInfo( + const Packet& packet) const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + + const Environment env_; mutable Mutex mutex_; const std::unique_ptr tick_timer_ RTC_GUARDED_BY(mutex_); @@ -350,7 +364,6 @@ class NetEqImpl : public webrtc::NetEq { RTC_GUARDED_BY(mutex_); const std::unique_ptr timestamp_scaler_ RTC_GUARDED_BY(mutex_); - const std::unique_ptr vad_ RTC_GUARDED_BY(mutex_); const std::unique_ptr expand_factory_ RTC_GUARDED_BY(mutex_); const std::unique_ptr accelerate_factory_ RTC_GUARDED_BY(mutex_); @@ -376,7 +389,7 @@ class NetEqImpl : public webrtc::NetEq { size_t decoder_frame_length_ RTC_GUARDED_BY(mutex_); Mode last_mode_ RTC_GUARDED_BY(mutex_); Operation last_operation_ RTC_GUARDED_BY(mutex_); - absl::optional last_decoded_type_ + std::optional last_decoded_type_ RTC_GUARDED_BY(mutex_); size_t decoded_buffer_length_ RTC_GUARDED_BY(mutex_); std::unique_ptr decoded_buffer_ RTC_GUARDED_BY(mutex_); @@ -384,22 +397,18 @@ class NetEqImpl : public webrtc::NetEq { bool new_codec_ RTC_GUARDED_BY(mutex_); uint32_t timestamp_ RTC_GUARDED_BY(mutex_); bool reset_decoder_ RTC_GUARDED_BY(mutex_); - absl::optional current_rtp_payload_type_ RTC_GUARDED_BY(mutex_); - absl::optional current_cng_rtp_payload_type_ RTC_GUARDED_BY(mutex_); + std::optional current_rtp_payload_type_ RTC_GUARDED_BY(mutex_); + std::optional current_cng_rtp_payload_type_ RTC_GUARDED_BY(mutex_); bool first_packet_ RTC_GUARDED_BY(mutex_); bool enable_fast_accelerate_ RTC_GUARDED_BY(mutex_); std::unique_ptr nack_ RTC_GUARDED_BY(mutex_); bool nack_enabled_ RTC_GUARDED_BY(mutex_); const bool enable_muted_state_ RTC_GUARDED_BY(mutex_); - AudioFrame::VADActivity last_vad_activity_ RTC_GUARDED_BY(mutex_) = - AudioFrame::kVadPassive; std::unique_ptr generated_noise_stopwatch_ RTC_GUARDED_BY(mutex_); std::vector last_decoded_packet_infos_ RTC_GUARDED_BY(mutex_); - ExpandUmaLogger expand_uma_logger_ RTC_GUARDED_BY(mutex_); - ExpandUmaLogger speech_expand_uma_logger_ RTC_GUARDED_BY(mutex_); bool no_time_stretching_ RTC_GUARDED_BY(mutex_); // Only used for test. - rtc::BufferT concealment_audio_ RTC_GUARDED_BY(mutex_); + BufferT concealment_audio_ RTC_GUARDED_BY(mutex_); }; } // namespace webrtc diff --git a/modules/audio_coding/neteq/neteq_impl_unittest.cc b/modules/audio_coding/neteq/neteq_impl_unittest.cc index e61cd52502..c625dcd898 100644 --- a/modules/audio_coding/neteq/neteq_impl_unittest.cc +++ b/modules/audio_coding/neteq/neteq_impl_unittest.cc @@ -15,13 +15,14 @@ #include #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/environment/environment_factory.h" #include "api/neteq/default_neteq_controller_factory.h" +#include "api/neteq/default_neteq_factory.h" #include "api/neteq/neteq.h" #include "api/neteq/neteq_controller.h" #include "modules/audio_coding/codecs/g711/audio_decoder_pcm.h" #include "modules/audio_coding/neteq/accelerate.h" #include "modules/audio_coding/neteq/decision_logic.h" -#include "modules/audio_coding/neteq/default_neteq_factory.h" #include "modules/audio_coding/neteq/expand.h" #include "modules/audio_coding/neteq/histogram.h" #include "modules/audio_coding/neteq/mock/mock_decoder_database.h" @@ -71,13 +72,14 @@ int DeletePacketsAndReturnOk(PacketList* packet_list) { class NetEqImplTest : public ::testing::Test { protected: - NetEqImplTest() : clock_(0) { config_.sample_rate_hz = 8000; } + NetEqImplTest() : clock_(0), env_(CreateEnvironment(&clock_)) { + config_.sample_rate_hz = 8000; + } - void CreateInstance( - const rtc::scoped_refptr& decoder_factory) { + void CreateInstance(scoped_refptr decoder_factory) { ASSERT_TRUE(decoder_factory); config_.enable_muted_state = enable_muted_state_; - NetEqImpl::Dependencies deps(config_, &clock_, decoder_factory, + NetEqImpl::Dependencies deps(env_, config_, std::move(decoder_factory), DefaultNetEqControllerFactory()); // Get a local pointer to NetEq's TickTimer object. @@ -108,8 +110,8 @@ class NetEqImplTest : public ::testing::Test { dtmf_tone_generator_ = deps.dtmf_tone_generator.get(); if (use_mock_packet_buffer_) { - std::unique_ptr mock( - new MockPacketBuffer(config_.max_packets_in_buffer, tick_timer_)); + std::unique_ptr mock(new MockPacketBuffer( + config_.max_packets_in_buffer, tick_timer_, deps.stats.get())); mock_packet_buffer_ = mock.get(); deps.packet_buffer = std::move(mock); } @@ -120,15 +122,13 @@ class NetEqImplTest : public ::testing::Test { mock_neteq_controller_ = mock.get(); deps.neteq_controller = std::move(mock); } else { - deps.stats = std::make_unique(); NetEqController::Config controller_config; controller_config.tick_timer = tick_timer_; controller_config.base_min_delay_ms = config_.min_delay_ms; controller_config.allow_time_stretching = true; controller_config.max_packets_in_buffer = config_.max_packets_in_buffer; - controller_config.clock = &clock_; deps.neteq_controller = - std::make_unique(std::move(controller_config)); + std::make_unique(env_, std::move(controller_config)); } neteq_controller_ = deps.neteq_controller.get(); @@ -228,6 +228,7 @@ class NetEqImplTest : public ::testing::Test { std::unique_ptr neteq_; NetEq::Config config_; SimulatedClock clock_; + const Environment env_; TickTimer* tick_timer_ = nullptr; MockDecoderDatabase* mock_decoder_database_ = nullptr; DecoderDatabase* decoder_database_ = nullptr; @@ -254,10 +255,8 @@ class NetEqImplTest : public ::testing::Test { // TODO(hlundin): Move to separate file? TEST(NetEq, CreateAndDestroy) { NetEq::Config config; - SimulatedClock clock(0); - auto decoder_factory = CreateBuiltinAudioDecoderFactory(); - std::unique_ptr neteq = - DefaultNetEqFactory().CreateNetEq(config, decoder_factory, &clock); + std::unique_ptr neteq = DefaultNetEqFactory().Create( + CreateEnvironment(), config, CreateBuiltinAudioDecoderFactory()); } TEST_F(NetEqImplTest, RegisterPayloadType) { @@ -305,11 +304,10 @@ TEST_F(NetEqImplTest, InsertPacket) { fake_packet.sequence_number = kFirstSequenceNumber; fake_packet.timestamp = kFirstTimestamp; - auto mock_decoder_factory = rtc::make_ref_counted(); - EXPECT_CALL(*mock_decoder_factory, MakeAudioDecoderMock(_, _, _)) - .WillOnce(Invoke([&](const SdpAudioFormat& format, - absl::optional codec_pair_id, - std::unique_ptr* dec) { + const Environment env = CreateEnvironment(); + auto mock_decoder_factory = make_ref_counted(); + EXPECT_CALL(*mock_decoder_factory, Create) + .WillOnce(WithArg<1>([&](const SdpAudioFormat& format) { EXPECT_EQ("pcmu", format.name); std::unique_ptr mock_decoder(new MockAudioDecoder); @@ -317,10 +315,10 @@ TEST_F(NetEqImplTest, InsertPacket) { EXPECT_CALL(*mock_decoder, SampleRateHz()).WillRepeatedly(Return(8000)); EXPECT_CALL(*mock_decoder, Die()).Times(1); // Called when deleted. - *dec = std::move(mock_decoder); + return mock_decoder; })); - DecoderDatabase::DecoderInfo info(SdpAudioFormat("pcmu", 8000, 1), - absl::nullopt, mock_decoder_factory.get()); + DecoderDatabase::DecoderInfo info(env, SdpAudioFormat("pcmu", 8000, 1), + std::nullopt, mock_decoder_factory.get()); // Expectations for decoder database. EXPECT_CALL(*mock_decoder_database_, GetDecoderInfo(kPayloadType)) @@ -329,15 +327,10 @@ TEST_F(NetEqImplTest, InsertPacket) { // Expectations for packet buffer. EXPECT_CALL(*mock_packet_buffer_, Empty()) .WillOnce(Return(false)); // Called once after first packet is inserted. - EXPECT_CALL(*mock_packet_buffer_, Flush(_)).Times(1); - EXPECT_CALL(*mock_packet_buffer_, InsertPacketList(_, _, _, _, _, _, _, _)) + EXPECT_CALL(*mock_packet_buffer_, Flush()).Times(1); + EXPECT_CALL(*mock_packet_buffer_, InsertPacket(_)) .Times(2) - .WillRepeatedly(DoAll(SetArgPointee<2>(kPayloadType), - WithArg<0>(Invoke(DeletePacketsAndReturnOk)))); - // SetArgPointee<2>(kPayloadType) means that the third argument (zero-based - // index) is a pointer, and the variable pointed to is set to kPayloadType. - // Also invoke the function DeletePacketsAndReturnOk to properly delete all - // packets in the list (to avoid memory leaks in the test). + .WillRepeatedly(Return(PacketBuffer::kOK)); EXPECT_CALL(*mock_packet_buffer_, PeekNextPacket()) .Times(1) .WillOnce(Return(&fake_packet)); @@ -387,6 +380,35 @@ TEST_F(NetEqImplTest, InsertPacket) { neteq_->InsertPacket(rtp_header, payload); } +TEST_F(NetEqImplTest, CountStatsAfterFirstDecodedPacket) { + UseNoMocks(); + CreateInstance(); + const uint8_t kPayloadType = 17; // Just an arbitrary number. + EXPECT_TRUE(neteq_->RegisterPayloadType(kPayloadType, + SdpAudioFormat("l16", 8000, 1))); + const size_t kPayloadLengthSamples = 80; + const size_t kPayloadLengthBytes = 2 * kPayloadLengthSamples; // PCM 16-bit. + uint8_t payload[kPayloadLengthBytes] = {0}; + RTPHeader rtp_header; + rtp_header.payloadType = kPayloadType; + rtp_header.sequenceNumber = 0x1234; + rtp_header.timestamp = 0x12345678; + rtp_header.ssrc = 0x87654321; + AudioFrame frame; + // Get audio a couple of times to make sure that samples received remains + // zero. + for (int i = 0; i < 3; ++i) { + neteq_->GetAudio(&frame); + EXPECT_EQ(neteq_->GetLifetimeStatistics().concealed_samples, 0u); + EXPECT_EQ(neteq_->GetLifetimeStatistics().total_samples_received, 0u); + } + neteq_->InsertPacket(rtp_header, payload); + neteq_->GetAudio(&frame); + EXPECT_EQ(neteq_->GetLifetimeStatistics().concealed_samples, 0u); + EXPECT_EQ(neteq_->GetLifetimeStatistics().total_samples_received, + kPayloadLengthSamples); +} + TEST_F(NetEqImplTest, InsertPacketsUntilBufferIsFull) { UseNoMocks(); CreateInstance(); @@ -464,7 +486,7 @@ TEST_F(NetEqImplTest, VerifyTimestampPropagation) { CountingSamplesDecoder() : next_value_(1) {} // Produce as many samples as input bytes (`encoded_len`). - int DecodeInternal(const uint8_t* encoded, + int DecodeInternal(const uint8_t* /* encoded */, size_t encoded_len, int /* sample_rate_hz */, int16_t* decoded, @@ -473,7 +495,7 @@ TEST_F(NetEqImplTest, VerifyTimestampPropagation) { decoded[i] = next_value_++; } *speech_type = kSpeech; - return rtc::checked_cast(encoded_len); + return checked_cast(encoded_len); } void Reset() override { next_value_ = 1; } @@ -489,7 +511,7 @@ TEST_F(NetEqImplTest, VerifyTimestampPropagation) { } decoder_; auto decoder_factory = - rtc::make_ref_counted(&decoder_); + make_ref_counted(&decoder_); UseNoMocks(); CreateInstance(decoder_factory); @@ -499,8 +521,9 @@ TEST_F(NetEqImplTest, VerifyTimestampPropagation) { // Insert one packet. clock_.AdvanceTimeMilliseconds(123456); - Timestamp expected_receive_time = clock_.CurrentTime(); - EXPECT_EQ(NetEq::kOK, neteq_->InsertPacket(rtp_header, payload)); + RtpPacketInfo expected_packet_info(rtp_header, clock_.CurrentTime()); + EXPECT_EQ(NetEq::kOK, + neteq_->InsertPacket(rtp_header, payload, expected_packet_info)); // Pull audio once. const size_t kMaxOutputSize = static_cast(10 * kSampleRateHz / 1000); @@ -514,14 +537,7 @@ TEST_F(NetEqImplTest, VerifyTimestampPropagation) { // Verify `output.packet_infos_`. ASSERT_THAT(output.packet_infos_, SizeIs(1)); - { - const auto& packet_info = output.packet_infos_[0]; - EXPECT_EQ(packet_info.ssrc(), rtp_header.ssrc); - EXPECT_THAT(packet_info.csrcs(), ElementsAre(43, 65, 17)); - EXPECT_EQ(packet_info.rtp_timestamp(), rtp_header.timestamp); - EXPECT_FALSE(packet_info.audio_level().has_value()); - EXPECT_EQ(packet_info.receive_time(), expected_receive_time); - } + EXPECT_EQ(output.packet_infos_[0], expected_packet_info); // Start with a simple check that the fake decoder is behaving as expected. EXPECT_EQ(kPayloadLengthSamples, @@ -530,10 +546,10 @@ TEST_F(NetEqImplTest, VerifyTimestampPropagation) { // The value of the last of the output samples is the same as the number of // samples played from the decoded packet. Thus, this number + the RTP // timestamp should match the playout timestamp. - // Wrap the expected value in an absl::optional to compare them as such. + // Wrap the expected value in an std::optional to compare them as such. EXPECT_EQ( - absl::optional(rtp_header.timestamp + - output.data()[output.samples_per_channel_ - 1]), + std::optional(rtp_header.timestamp + + output.data()[output.samples_per_channel_ - 1]), neteq_->GetPlayoutTimestamp()); // Check the timestamp for the last value in the sync buffer. This should @@ -557,7 +573,7 @@ TEST_F(NetEqImplTest, ReorderedPacket) { MockAudioDecoder mock_decoder; CreateInstance( - rtc::make_ref_counted(&mock_decoder)); + make_ref_counted(&mock_decoder)); const uint8_t kPayloadType = 17; // Just an arbitrary number. const int kSampleRateHz = 8000; @@ -570,15 +586,15 @@ TEST_F(NetEqImplTest, ReorderedPacket) { rtp_header.sequenceNumber = 0x1234; rtp_header.timestamp = 0x12345678; rtp_header.ssrc = 0x87654321; - rtp_header.extension.hasAudioLevel = true; - rtp_header.extension.audioLevel = 42; + rtp_header.extension.set_audio_level( + AudioLevel(/*voice_activity=*/false, 42)); EXPECT_CALL(mock_decoder, Reset()).WillRepeatedly(Return()); EXPECT_CALL(mock_decoder, SampleRateHz()) .WillRepeatedly(Return(kSampleRateHz)); EXPECT_CALL(mock_decoder, Channels()).WillRepeatedly(Return(1)); EXPECT_CALL(mock_decoder, PacketDuration(_, kPayloadLengthBytes)) - .WillRepeatedly(Return(rtc::checked_cast(kPayloadLengthSamples))); + .WillRepeatedly(Return(checked_cast(kPayloadLengthSamples))); int16_t dummy_output[kPayloadLengthSamples] = {0}; // The below expectation will make the mock decoder write // `kPayloadLengthSamples` zeros to the output array, and mark it as speech. @@ -587,14 +603,16 @@ TEST_F(NetEqImplTest, ReorderedPacket) { .WillOnce(DoAll(SetArrayArgument<3>(dummy_output, dummy_output + kPayloadLengthSamples), SetArgPointee<4>(AudioDecoder::kSpeech), - Return(rtc::checked_cast(kPayloadLengthSamples)))); + Return(checked_cast(kPayloadLengthSamples)))); EXPECT_TRUE(neteq_->RegisterPayloadType(kPayloadType, SdpAudioFormat("L16", 8000, 1))); // Insert one packet. clock_.AdvanceTimeMilliseconds(123456); - Timestamp expected_receive_time = clock_.CurrentTime(); - EXPECT_EQ(NetEq::kOK, neteq_->InsertPacket(rtp_header, payload)); + RtpPacketInfo expected_packet_info = + RtpPacketInfo(rtp_header, /*receive_time=*/clock_.CurrentTime()); + EXPECT_EQ(NetEq::kOK, + neteq_->InsertPacket(rtp_header, payload, expected_packet_info)); // Pull audio once. const size_t kMaxOutputSize = static_cast(10 * kSampleRateHz / 1000); @@ -607,30 +625,25 @@ TEST_F(NetEqImplTest, ReorderedPacket) { // Verify `output.packet_infos_`. ASSERT_THAT(output.packet_infos_, SizeIs(1)); - { - const auto& packet_info = output.packet_infos_[0]; - EXPECT_EQ(packet_info.ssrc(), rtp_header.ssrc); - EXPECT_THAT(packet_info.csrcs(), IsEmpty()); - EXPECT_EQ(packet_info.rtp_timestamp(), rtp_header.timestamp); - EXPECT_EQ(packet_info.audio_level(), rtp_header.extension.audioLevel); - EXPECT_EQ(packet_info.receive_time(), expected_receive_time); - } + EXPECT_EQ(output.packet_infos_[0], expected_packet_info); // Insert two more packets. The first one is out of order, and is already too // old, the second one is the expected next packet. rtp_header.sequenceNumber -= 1; rtp_header.timestamp -= kPayloadLengthSamples; - rtp_header.extension.audioLevel = 1; + rtp_header.extension.set_audio_level(AudioLevel(/*voice_activity=*/false, 1)); payload[0] = 1; clock_.AdvanceTimeMilliseconds(1000); EXPECT_EQ(NetEq::kOK, neteq_->InsertPacket(rtp_header, payload)); rtp_header.sequenceNumber += 2; rtp_header.timestamp += 2 * kPayloadLengthSamples; - rtp_header.extension.audioLevel = 2; + rtp_header.extension.set_audio_level(AudioLevel(/*voice_activity=*/false, 2)); payload[0] = 2; clock_.AdvanceTimeMilliseconds(2000); - expected_receive_time = clock_.CurrentTime(); - EXPECT_EQ(NetEq::kOK, neteq_->InsertPacket(rtp_header, payload)); + expected_packet_info = + RtpPacketInfo(rtp_header, /*receive_time=*/clock_.CurrentTime()); + EXPECT_EQ(NetEq::kOK, + neteq_->InsertPacket(rtp_header, payload, expected_packet_info)); // Expect only the second packet to be decoded (the one with "2" as the first // payload byte). @@ -639,7 +652,7 @@ TEST_F(NetEqImplTest, ReorderedPacket) { .WillOnce(DoAll(SetArrayArgument<3>(dummy_output, dummy_output + kPayloadLengthSamples), SetArgPointee<4>(AudioDecoder::kSpeech), - Return(rtc::checked_cast(kPayloadLengthSamples)))); + Return(checked_cast(kPayloadLengthSamples)))); // Pull audio once. EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted)); @@ -656,14 +669,7 @@ TEST_F(NetEqImplTest, ReorderedPacket) { // Verify `output.packet_infos_`. Expect to only see the second packet. ASSERT_THAT(output.packet_infos_, SizeIs(1)); - { - const auto& packet_info = output.packet_infos_[0]; - EXPECT_EQ(packet_info.ssrc(), rtp_header.ssrc); - EXPECT_THAT(packet_info.csrcs(), IsEmpty()); - EXPECT_EQ(packet_info.rtp_timestamp(), rtp_header.timestamp); - EXPECT_EQ(packet_info.audio_level(), rtp_header.extension.audioLevel); - EXPECT_EQ(packet_info.receive_time(), expected_receive_time); - } + EXPECT_EQ(output.packet_infos_[0], expected_packet_info); EXPECT_CALL(mock_decoder, Die()); } @@ -729,7 +735,6 @@ std::vector CreateRedPayload(size_t num_payloads, int payload_type, int payload_size, int timestamp_offset) { - constexpr int kRedHeaderLength = 4; const size_t size = payload_size + 1 + (num_payloads - 1) * (payload_size + kRedHeaderLength); std::vector payload(size, 0); @@ -747,7 +752,7 @@ std::vector CreateRedPayload(size_t num_payloads, *payload_ptr |= 0x80; ++payload_ptr; const int this_offset = - rtc::checked_cast((num_payloads - i - 1) * timestamp_offset); + checked_cast((num_payloads - i - 1) * timestamp_offset); *payload_ptr = this_offset >> 6; ++payload_ptr; RTC_DCHECK_LE(payload_size, 1023); // Max length described by 10 bits. @@ -778,8 +783,7 @@ TEST_F(NetEqImplTest, InsertRedPayload) { AbsoluteCaptureTime capture_time; capture_time.absolute_capture_timestamp = 1234; header.extension.absolute_capture_time = capture_time; - header.extension.audioLevel = 12; - header.extension.hasAudioLevel = true; + header.extension.set_audio_level(AudioLevel(/*voice_activity=*/false, 12)); header.numCSRCs = 1; header.arrOfCSRCs[0] = 123; neteq_->InsertPacket(header, payload); @@ -801,7 +805,7 @@ TEST_F(NetEqImplTest, InsertRedPayload) { EXPECT_EQ(frame.packet_infos_.size(), 1u); EXPECT_EQ(frame.packet_infos_.front().absolute_capture_time(), capture_time); EXPECT_EQ(frame.packet_infos_.front().audio_level(), - header.extension.audioLevel); + header.extension.audio_level()->level()); EXPECT_EQ(frame.packet_infos_.front().csrcs()[0], header.arrOfCSRCs[0]); } @@ -1082,7 +1086,7 @@ TEST_F(NetEqImplTest, CodecInternalCng) { // Create a mock decoder object. MockAudioDecoder mock_decoder; CreateInstance( - rtc::make_ref_counted(&mock_decoder)); + make_ref_counted(&mock_decoder)); const uint8_t kPayloadType = 17; // Just an arbitrary number. const int kSampleRateKhz = 48; @@ -1103,11 +1107,11 @@ TEST_F(NetEqImplTest, CodecInternalCng) { .WillRepeatedly(Return(kSampleRateKhz * 1000)); EXPECT_CALL(mock_decoder, Channels()).WillRepeatedly(Return(1)); EXPECT_CALL(mock_decoder, PacketDuration(_, kPayloadLengthBytes)) - .WillRepeatedly(Return(rtc::checked_cast(kPayloadLengthSamples))); + .WillRepeatedly(Return(checked_cast(kPayloadLengthSamples))); // Packed duration when asking the decoder for more CNG data (without a new // packet). EXPECT_CALL(mock_decoder, PacketDuration(nullptr, 0)) - .WillRepeatedly(Return(rtc::checked_cast(kPayloadLengthSamples))); + .WillRepeatedly(Return(checked_cast(kPayloadLengthSamples))); EXPECT_TRUE(neteq_->RegisterPayloadType(kPayloadType, SdpAudioFormat("opus", 48000, 2))); @@ -1136,7 +1140,7 @@ TEST_F(NetEqImplTest, CodecInternalCng) { .WillOnce(DoAll(SetArrayArgument<3>( dummy_output, dummy_output + kPayloadLengthSamples), SetArgPointee<4>(packets[i].decoder_output_type), - Return(rtc::checked_cast(kPayloadLengthSamples)))); + Return(checked_cast(kPayloadLengthSamples)))); } // Expect comfort noise to be returned by the decoder. @@ -1145,7 +1149,7 @@ TEST_F(NetEqImplTest, CodecInternalCng) { .WillOnce(DoAll(SetArrayArgument<3>(dummy_output, dummy_output + kPayloadLengthSamples), SetArgPointee<4>(AudioDecoder::kComfortNoise), - Return(rtc::checked_cast(kPayloadLengthSamples)))); + Return(checked_cast(kPayloadLengthSamples)))); std::vector expected_output = { AudioFrame::kNormalSpeech, AudioFrame::kCNG, AudioFrame::kNormalSpeech}; @@ -1172,8 +1176,7 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) { UseNoMocks(); ::testing::NiceMock decoder; - CreateInstance( - rtc::make_ref_counted(&decoder)); + CreateInstance(make_ref_counted(&decoder)); static const size_t kNetEqMaxFrameSize = 5760; // 120 ms @ 48 kHz. static const size_t kChannels = 2; @@ -1197,7 +1200,7 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) { EXPECT_CALL(decoder, PacketDuration(Pointee(kFirstPayloadValue), kPayloadLengthBytes)) .Times(AtLeast(1)) - .WillRepeatedly(Return(rtc::checked_cast(kNetEqMaxFrameSize + 1))); + .WillRepeatedly(Return(checked_cast(kNetEqMaxFrameSize + 1))); EXPECT_CALL(decoder, DecodeInternal(Pointee(kFirstPayloadValue), _, _, _, _)) .Times(0); @@ -1214,7 +1217,7 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) { EXPECT_CALL(decoder, PacketDuration(Pointee(kSecondPayloadValue), kPayloadLengthBytes)) .Times(AtLeast(1)) - .WillRepeatedly(Return(rtc::checked_cast(kNetEqMaxFrameSize))); + .WillRepeatedly(Return(checked_cast(kNetEqMaxFrameSize))); EXPECT_CALL(decoder, SampleRateHz()).WillRepeatedly(Return(kSampleRateHz)); @@ -1246,12 +1249,15 @@ TEST_F(NetEqImplTest, UnsupportedDecoder) { EXPECT_EQ(kChannels, output.num_channels_); EXPECT_THAT(output.packet_infos_, IsEmpty()); - // Second call to GetAudio will decode the packet that is ok. No errors are - // expected. - EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted)); - EXPECT_EQ(kExpectedOutputSize, output.samples_per_channel_ * kChannels); - EXPECT_EQ(kChannels, output.num_channels_); - EXPECT_THAT(output.packet_infos_, SizeIs(1)); + // Call GetAudio until the next packet is decoded. + int calls = 0; + int kTimeout = 10; + while (output.packet_infos_.empty() && calls < kTimeout) { + EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&output, &muted)); + EXPECT_EQ(kExpectedOutputSize, output.samples_per_channel_ * kChannels); + EXPECT_EQ(kChannels, output.num_channels_); + } + EXPECT_LT(calls, kTimeout); // Die isn't called through NiceMock (since it's called by the // MockAudioDecoder constructor), so it needs to be mocked explicitly. @@ -1284,7 +1290,7 @@ TEST_F(NetEqImplTest, FloodBufferAndGetNetworkStats) { for (size_t i = 0; i <= config_.max_packets_in_buffer; ++i) { EXPECT_EQ(i, packet_buffer_->NumPacketsInBuffer()); EXPECT_EQ(NetEq::kOK, neteq_->InsertPacket(rtp_header, payload)); - rtp_header.timestamp += rtc::checked_cast(kPayloadLengthSamples); + rtp_header.timestamp += checked_cast(kPayloadLengthSamples); ++rtp_header.sequenceNumber; } EXPECT_EQ(1u, packet_buffer_->NumPacketsInBuffer()); @@ -1300,7 +1306,7 @@ TEST_F(NetEqImplTest, DecodedPayloadTooShort) { MockAudioDecoder mock_decoder; CreateInstance( - rtc::make_ref_counted(&mock_decoder)); + make_ref_counted(&mock_decoder)); const uint8_t kPayloadType = 17; // Just an arbitrary number. const int kSampleRateHz = 8000; @@ -1319,7 +1325,7 @@ TEST_F(NetEqImplTest, DecodedPayloadTooShort) { .WillRepeatedly(Return(kSampleRateHz)); EXPECT_CALL(mock_decoder, Channels()).WillRepeatedly(Return(1)); EXPECT_CALL(mock_decoder, PacketDuration(_, _)) - .WillRepeatedly(Return(rtc::checked_cast(kPayloadLengthSamples))); + .WillRepeatedly(Return(checked_cast(kPayloadLengthSamples))); int16_t dummy_output[kPayloadLengthSamples] = {0}; // The below expectation will make the mock decoder write // `kPayloadLengthSamples` - 5 zeros to the output array, and mark it as @@ -1330,7 +1336,7 @@ TEST_F(NetEqImplTest, DecodedPayloadTooShort) { DoAll(SetArrayArgument<3>(dummy_output, dummy_output + kPayloadLengthSamples - 5), SetArgPointee<4>(AudioDecoder::kSpeech), - Return(rtc::checked_cast(kPayloadLengthSamples - 5)))); + Return(checked_cast(kPayloadLengthSamples - 5)))); EXPECT_TRUE(neteq_->RegisterPayloadType(kPayloadType, SdpAudioFormat("L16", 8000, 1))); @@ -1359,7 +1365,7 @@ TEST_F(NetEqImplTest, DecodingError) { MockAudioDecoder mock_decoder; CreateInstance( - rtc::make_ref_counted(&mock_decoder)); + make_ref_counted(&mock_decoder)); const uint8_t kPayloadType = 17; // Just an arbitrary number. const int kSampleRateHz = 8000; @@ -1384,7 +1390,7 @@ TEST_F(NetEqImplTest, DecodingError) { .WillRepeatedly(Return(kSampleRateHz)); EXPECT_CALL(mock_decoder, Channels()).WillRepeatedly(Return(1)); EXPECT_CALL(mock_decoder, PacketDuration(_, _)) - .WillRepeatedly(Return(rtc::checked_cast(kFrameLengthSamples))); + .WillRepeatedly(Return(checked_cast(kFrameLengthSamples))); EXPECT_CALL(mock_decoder, ErrorCode()).WillOnce(Return(kDecoderErrorCode)); EXPECT_CALL(mock_decoder, HasDecodePlc()).WillOnce(Return(false)); int16_t dummy_output[kFrameLengthSamples] = {0}; @@ -1399,7 +1405,7 @@ TEST_F(NetEqImplTest, DecodingError) { DoAll(SetArrayArgument<3>(dummy_output, dummy_output + kFrameLengthSamples), SetArgPointee<4>(AudioDecoder::kSpeech), - Return(rtc::checked_cast(kFrameLengthSamples)))) + Return(checked_cast(kFrameLengthSamples)))) .RetiresOnSaturation(); // Then mock decoder fails. A common reason for failure can be buffer being @@ -1417,7 +1423,7 @@ TEST_F(NetEqImplTest, DecodingError) { DoAll(SetArrayArgument<3>(dummy_output, dummy_output + kFrameLengthSamples), SetArgPointee<4>(AudioDecoder::kSpeech), - Return(rtc::checked_cast(kFrameLengthSamples)))); + Return(checked_cast(kFrameLengthSamples)))); } EXPECT_TRUE(neteq_->RegisterPayloadType(kPayloadType, @@ -1603,17 +1609,17 @@ TEST_F(NetEqImplTest, NoCrashWith1000Channels) { AudioDecoder* decoder = nullptr; - auto mock_decoder_factory = rtc::make_ref_counted(); - EXPECT_CALL(*mock_decoder_factory, MakeAudioDecoderMock(_, _, _)) - .WillOnce(Invoke([&](const SdpAudioFormat& format, - absl::optional codec_pair_id, - std::unique_ptr* dec) { + const Environment env = CreateEnvironment(); + auto mock_decoder_factory = make_ref_counted(); + EXPECT_CALL(*mock_decoder_factory, Create) + .WillOnce(WithArg<1>([&](const SdpAudioFormat& format) { EXPECT_EQ("pcmu", format.name); - *dec = std::make_unique(1000); - decoder = dec->get(); + auto dec = std::make_unique(1000); + decoder = dec.get(); + return dec; })); - DecoderDatabase::DecoderInfo info(SdpAudioFormat("pcmu", 8000, 1), - absl::nullopt, mock_decoder_factory.get()); + DecoderDatabase::DecoderInfo info(env, SdpAudioFormat("pcmu", 8000, 1), + std::nullopt, mock_decoder_factory.get()); // Expectations for decoder database. EXPECT_CALL(*mock_decoder_database_, GetDecoderInfo(kPayloadType)) .WillRepeatedly(Return(&info)); @@ -1622,7 +1628,7 @@ TEST_F(NetEqImplTest, NoCrashWith1000Channels) { EXPECT_CALL(*mock_decoder_database_, GetActiveDecoder()) .WillRepeatedly(Return(decoder)); EXPECT_CALL(*mock_decoder_database_, SetActiveDecoder(_, _)) - .WillOnce(Invoke([](uint8_t rtp_payload_type, bool* new_decoder) { + .WillOnce(Invoke([](uint8_t /* rtp_payload_type */, bool* new_decoder) { *new_decoder = true; return 0; })); @@ -1640,6 +1646,85 @@ TEST_F(NetEqImplTest, NoCrashWith1000Channels) { } } +// The test first inserts a packet with narrow-band CNG, then a packet with +// wide-band speech. The expected behavior is to detect a change in sample rate, +// even though no speech packet has been inserted before, and flush out the CNG +// packet. +TEST_F(NetEqImplTest, CngFirstThenSpeechWithNewSampleRate) { + UseNoMocks(); + CreateInstance(); + constexpr int kCnPayloadType = 7; + neteq_->RegisterPayloadType(kCnPayloadType, SdpAudioFormat("cn", 8000, 1)); + constexpr int kSpeechPayloadType = 8; + neteq_->RegisterPayloadType(kSpeechPayloadType, + SdpAudioFormat("l16", 16000, 1)); + + RTPHeader header; + header.payloadType = kCnPayloadType; + uint8_t payload[320] = {0}; + + EXPECT_EQ(neteq_->InsertPacket(header, payload), NetEq::kOK); + EXPECT_EQ(neteq_->GetLifetimeStatistics().packets_discarded, 0u); + + header.payloadType = kSpeechPayloadType; + header.timestamp += 160; + EXPECT_EQ(neteq_->InsertPacket(header, payload), NetEq::kOK); + // CN packet should be discarded, since it does not match the + // new speech sample rate. + EXPECT_EQ(neteq_->GetLifetimeStatistics().packets_discarded, 1u); + + // Next decoded packet should be speech. + AudioFrame audio_frame; + bool muted; + EXPECT_EQ(neteq_->GetAudio(&audio_frame, &muted), NetEq::kOK); + EXPECT_EQ(audio_frame.sample_rate_hz(), 16000); + EXPECT_EQ(audio_frame.speech_type_, AudioFrame::SpeechType::kNormalSpeech); +} + +TEST_F(NetEqImplTest, InsertPacketChangePayloadType) { + UseNoMocks(); + CreateInstance(); + constexpr int kPcmuPayloadType = 7; + neteq_->RegisterPayloadType(kPcmuPayloadType, + SdpAudioFormat("pcmu", 8000, 1)); + constexpr int kPcmaPayloadType = 8; + neteq_->RegisterPayloadType(kPcmaPayloadType, + SdpAudioFormat("pcma", 8000, 1)); + + RTPHeader header; + header.payloadType = kPcmuPayloadType; + header.timestamp = 1234; + uint8_t payload[160] = {0}; + + std::optional decoder = + neteq_->GetCurrentDecoderFormat(); + EXPECT_FALSE(decoder.has_value()); + + EXPECT_EQ(neteq_->InsertPacket(header, payload), NetEq::kOK); + EXPECT_EQ(neteq_->GetLifetimeStatistics().packets_discarded, 0u); + decoder = neteq_->GetCurrentDecoderFormat(); + ASSERT_TRUE(decoder.has_value()); + EXPECT_EQ(decoder->payload_type, kPcmuPayloadType); + EXPECT_EQ(decoder->sdp_format.name, "pcmu"); + + header.payloadType = kPcmaPayloadType; + header.timestamp += 80; + EXPECT_EQ(neteq_->InsertPacket(header, payload), NetEq::kOK); + decoder = neteq_->GetCurrentDecoderFormat(); + ASSERT_TRUE(decoder.has_value()); + EXPECT_EQ(decoder->payload_type, kPcmaPayloadType); + EXPECT_EQ(decoder->sdp_format.name, "pcma"); + // The previous packet should be discarded since the codec changed. + EXPECT_EQ(neteq_->GetLifetimeStatistics().packets_discarded, 1u); + + // Next decoded packet should be speech. + AudioFrame audio_frame; + bool muted; + EXPECT_EQ(neteq_->GetAudio(&audio_frame, &muted), NetEq::kOK); + EXPECT_EQ(audio_frame.sample_rate_hz(), 8000); + EXPECT_EQ(audio_frame.speech_type_, AudioFrame::SpeechType::kNormalSpeech); +} + class Decoder120ms : public AudioDecoder { public: Decoder120ms(int sample_rate_hz, SpeechType speech_type) @@ -1647,19 +1732,19 @@ class Decoder120ms : public AudioDecoder { next_value_(1), speech_type_(speech_type) {} - int DecodeInternal(const uint8_t* encoded, - size_t encoded_len, + int DecodeInternal(const uint8_t* /* encoded */, + size_t /* encoded_len */, int sample_rate_hz, int16_t* decoded, SpeechType* speech_type) override { EXPECT_EQ(sample_rate_hz_, sample_rate_hz); size_t decoded_len = - rtc::CheckedDivExact(sample_rate_hz, 1000) * 120 * Channels(); + CheckedDivExact(sample_rate_hz, 1000) * 120 * Channels(); for (size_t i = 0; i < decoded_len; ++i) { decoded[i] = next_value_++; } *speech_type = speech_type_; - return rtc::checked_cast(decoded_len); + return checked_cast(decoded_len); } void Reset() override { next_value_ = 1; } @@ -1693,7 +1778,7 @@ class NetEqImplTest120ms : public NetEqImplTest { } uint32_t timestamp_diff_between_packets() const { - return rtc::CheckedDivExact(kSamplingFreq_, 1000u) * 120; + return CheckedDivExact(kSamplingFreq_, 1000u) * 120; } uint32_t first_timestamp() const { return 10u; } @@ -1720,7 +1805,7 @@ class NetEqImplTest120ms : public NetEqImplTest { void Register120msCodec(AudioDecoder::SpeechType speech_type) { const uint32_t sampling_freq = kSamplingFreq_; - decoder_factory_ = rtc::make_ref_counted( + decoder_factory_ = make_ref_counted( [sampling_freq, speech_type]() { std::unique_ptr decoder = std::make_unique(sampling_freq, speech_type); @@ -1729,7 +1814,7 @@ class NetEqImplTest120ms : public NetEqImplTest { }); } - rtc::scoped_refptr decoder_factory_; + scoped_refptr decoder_factory_; AudioFrame output_; const uint32_t kPayloadType = 17; const uint32_t kSamplingFreq_ = 48000; diff --git a/modules/audio_coding/neteq/neteq_network_stats_unittest.cc b/modules/audio_coding/neteq/neteq_network_stats_unittest.cc index a669ad727e..3fd6541c39 100644 --- a/modules/audio_coding/neteq/neteq_network_stats_unittest.cc +++ b/modules/audio_coding/neteq/neteq_network_stats_unittest.cc @@ -9,15 +9,15 @@ */ #include +#include -#include "absl/memory/memory.h" #include "api/audio/audio_frame.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/environment/environment_factory.h" +#include "api/neteq/default_neteq_factory.h" #include "api/neteq/neteq.h" -#include "modules/audio_coding/neteq/default_neteq_factory.h" #include "modules/audio_coding/neteq/tools/rtp_generator.h" -#include "system_wrappers/include/clock.h" #include "test/audio_decoder_proxy_factory.h" #include "test/gmock.h" @@ -28,9 +28,9 @@ namespace { std::unique_ptr CreateNetEq( const NetEq::Config& config, - Clock* clock, - const rtc::scoped_refptr& decoder_factory) { - return DefaultNetEqFactory().CreateNetEq(config, decoder_factory, clock); + scoped_refptr decoder_factory) { + return DefaultNetEqFactory().Create(CreateEnvironment(), config, + std::move(decoder_factory)); } } // namespace @@ -58,8 +58,8 @@ class MockAudioDecoder final : public AudioDecoder { size_t Duration() const override { return kPacketDuration; } - absl::optional Decode( - rtc::ArrayView decoded) const override { + std::optional Decode( + ArrayView decoded) const override { const size_t output_size = sizeof(int16_t) * kPacketDuration * num_channels_; if (decoded.size() >= output_size) { @@ -69,7 +69,7 @@ class MockAudioDecoder final : public AudioDecoder { } else { ADD_FAILURE() << "Expected decoded.size() to be >= output_size (" << decoded.size() << " vs. " << output_size << ")"; - return absl::nullopt; + return std::nullopt; } } @@ -77,7 +77,7 @@ class MockAudioDecoder final : public AudioDecoder { const size_t num_channels_; }; - std::vector ParsePayload(rtc::Buffer&& payload, + std::vector ParsePayload(Buffer&& /* payload */, uint32_t timestamp) override { std::vector results; if (fec_enabled_) { @@ -91,14 +91,15 @@ class MockAudioDecoder final : public AudioDecoder { return results; } - int PacketDuration(const uint8_t* encoded, - size_t encoded_len) const override { + int PacketDuration(const uint8_t* /* encoded */, + size_t /* encoded_len */) const override { ADD_FAILURE() << "Since going through ParsePayload, PacketDuration should " "never get called."; return kPacketDuration; } - bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const override { + bool PacketHasFec(const uint8_t* /* encoded */, + size_t /* encoded_len */) const override { ADD_FAILURE() << "Since going through ParsePayload, PacketHasFec should " "never get called."; return fec_enabled_; @@ -113,11 +114,11 @@ class MockAudioDecoder final : public AudioDecoder { bool fec_enabled() const { return fec_enabled_; } protected: - int DecodeInternal(const uint8_t* encoded, - size_t encoded_len, - int sample_rate_hz, - int16_t* decoded, - SpeechType* speech_type) override { + int DecodeInternal(const uint8_t* /* encoded */, + size_t /* encoded_len */, + int /* sample_rate_hz */, + int16_t* /* decoded */, + SpeechType* /* speech_type */) override { ADD_FAILURE() << "Since going through ParsePayload, DecodeInternal should " "never get called."; return -1; @@ -160,8 +161,7 @@ class NetEqNetworkStatsTest { NetEqNetworkStatsTest(const SdpAudioFormat& format, MockAudioDecoder* decoder) : decoder_(decoder), - decoder_factory_( - rtc::make_ref_counted(decoder)), + decoder_factory_(make_ref_counted(decoder)), samples_per_ms_(format.clockrate_hz / 1000), frame_size_samples_(kFrameSizeMs * samples_per_ms_), rtp_generator_(new RtpGenerator(samples_per_ms_)), @@ -169,7 +169,7 @@ class NetEqNetworkStatsTest { packet_loss_interval_(0xffffffff) { NetEq::Config config; config.sample_rate_hz = format.clockrate_hz; - neteq_ = CreateNetEq(config, Clock::GetRealTimeClock(), decoder_factory_); + neteq_ = CreateNetEq(config, decoder_factory_); neteq_->RegisterPayloadType(kPayloadType, format); } @@ -238,7 +238,9 @@ class NetEqNetworkStatsTest { kPayloadType, frame_size_samples_, &rtp_header_); if (!Lost(next_send_time)) { static const uint8_t payload[kPayloadSizeByte] = {0}; - ASSERT_EQ(NetEq::kOK, neteq_->InsertPacket(rtp_header_, payload)); + ASSERT_EQ(NetEq::kOK, + neteq_->InsertPacket(rtp_header_, payload, + Timestamp::Millis(next_send_time))); } } bool muted = true; @@ -273,15 +275,16 @@ class NetEqNetworkStatsTest { // Next we introduce packet losses. SetPacketLossRate(0.1); - expects.stats_ref.expand_rate = expects.stats_ref.speech_expand_rate = 898; + expects.expand_rate = expects.speech_expand_rate = kLargerThan; RunTest(50, expects); // Next we enable FEC. decoder_->set_fec_enabled(true); // If FEC fills in the lost packets, no packet loss will be counted. + expects.expand_rate = expects.speech_expand_rate = kEqual; expects.stats_ref.expand_rate = expects.stats_ref.speech_expand_rate = 0; - expects.stats_ref.secondary_decoded_rate = 2006; - expects.stats_ref.secondary_discarded_rate = 14336; + expects.secondary_decoded_rate = kLargerThan; + expects.secondary_discarded_rate = kLargerThan; RunTest(50, expects); } @@ -308,7 +311,7 @@ class NetEqNetworkStatsTest { private: MockAudioDecoder* decoder_; - rtc::scoped_refptr decoder_factory_; + scoped_refptr decoder_factory_; std::unique_ptr neteq_; const int samples_per_ms_; diff --git a/modules/audio_coding/neteq/neteq_stereo_unittest.cc b/modules/audio_coding/neteq/neteq_stereo_unittest.cc index 6fa56fd1c1..1eb810835a 100644 --- a/modules/audio_coding/neteq/neteq_stereo_unittest.cc +++ b/modules/audio_coding/neteq/neteq_stereo_unittest.cc @@ -17,9 +17,12 @@ #include "api/audio/audio_frame.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/neteq/default_neteq_factory.h" #include "api/neteq/neteq.h" +#include "api/units/timestamp.h" #include "modules/audio_coding/codecs/pcm16b/pcm16b.h" -#include "modules/audio_coding/neteq/default_neteq_factory.h" #include "modules/audio_coding/neteq/tools/input_audio_file.h" #include "modules/audio_coding/neteq/tools/rtp_generator.h" #include "rtc_base/strings/string_builder.h" @@ -60,6 +63,7 @@ class NetEqStereoTest : public ::testing::TestWithParam { static_cast(frame_size_ms_ * samples_per_ms_)), output_size_samples_(10 * samples_per_ms_), clock_(0), + env_(CreateEnvironment(&clock_)), rtp_generator_mono_(samples_per_ms_), rtp_generator_(samples_per_ms_), payload_size_bytes_(0), @@ -70,8 +74,8 @@ class NetEqStereoTest : public ::testing::TestWithParam { config.sample_rate_hz = sample_rate_hz_; DefaultNetEqFactory neteq_factory; auto decoder_factory = CreateBuiltinAudioDecoderFactory(); - neteq_mono_ = neteq_factory.CreateNetEq(config, decoder_factory, &clock_); - neteq_ = neteq_factory.CreateNetEq(config, decoder_factory, &clock_); + neteq_mono_ = neteq_factory.Create(env_, config, decoder_factory); + neteq_ = neteq_factory.Create(env_, config, decoder_factory); input_ = new int16_t[frame_size_samples_]; encoded_ = new uint8_t[2 * frame_size_samples_]; input_multi_channel_ = new int16_t[frame_size_samples_ * num_channels_]; @@ -109,7 +113,7 @@ class NetEqStereoTest : public ::testing::TestWithParam { if (frame_size_samples_ * 2 != payload_size_bytes_) { return -1; } - int next_send_time = rtp_generator_mono_.GetRtpHeader( + int next_send_time_ms = rtp_generator_mono_.GetRtpHeader( kPayloadTypeMono, frame_size_samples_, &rtp_header_mono_); MakeMultiChannelInput(); multi_payload_size_bytes_ = WebRtcPcm16b_Encode( @@ -120,7 +124,7 @@ class NetEqStereoTest : public ::testing::TestWithParam { } rtp_generator_.GetRtpHeader(kPayloadTypeMulti, frame_size_samples_, &rtp_header_); - return next_send_time; + return next_send_time_ms; } virtual void MakeMultiChannelInput() { @@ -151,32 +155,35 @@ class NetEqStereoTest : public ::testing::TestWithParam { void RunTest(int num_loops) { // Get next input packets (mono and multi-channel). - int next_send_time; - int next_arrival_time; + int next_send_time_ms; + int next_arrival_time_ms; do { - next_send_time = GetNewPackets(); - ASSERT_NE(-1, next_send_time); - next_arrival_time = GetArrivalTime(next_send_time); + next_send_time_ms = GetNewPackets(); + ASSERT_NE(-1, next_send_time_ms); + next_arrival_time_ms = GetArrivalTime(next_send_time_ms); } while (Lost()); // If lost, immediately read the next packet. - int time_now = 0; + int time_now_ms = 0; for (int k = 0; k < num_loops; ++k) { - while (time_now >= next_arrival_time) { + while (time_now_ms >= next_arrival_time_ms) { // Insert packet in mono instance. ASSERT_EQ(NetEq::kOK, neteq_mono_->InsertPacket( - rtp_header_mono_, rtc::ArrayView( - encoded_, payload_size_bytes_))); + rtp_header_mono_, + ArrayView(encoded_, payload_size_bytes_), + Timestamp::Millis(time_now_ms))); // Insert packet in multi-channel instance. - ASSERT_EQ(NetEq::kOK, neteq_->InsertPacket( - rtp_header_, rtc::ArrayView( - encoded_multi_channel_, - multi_payload_size_bytes_))); + ASSERT_EQ(NetEq::kOK, + neteq_->InsertPacket( + rtp_header_, + ArrayView(encoded_multi_channel_, + multi_payload_size_bytes_), + Timestamp::Millis(time_now_ms))); // Get next input packets (mono and multi-channel). do { - next_send_time = GetNewPackets(); - ASSERT_NE(-1, next_send_time); - next_arrival_time = GetArrivalTime(next_send_time); + next_send_time_ms = GetNewPackets(); + ASSERT_NE(-1, next_send_time_ms); + next_arrival_time_ms = GetArrivalTime(next_send_time_ms); } while (Lost()); // If lost, immediately read the next packet. } // Get audio from mono instance. @@ -191,13 +198,13 @@ class NetEqStereoTest : public ::testing::TestWithParam { EXPECT_EQ(num_channels_, output_multi_channel_.num_channels_); EXPECT_EQ(output_size_samples_, output_multi_channel_.samples_per_channel_); - rtc::StringBuilder ss; + StringBuilder ss; ss << "Lap number " << k << "."; SCOPED_TRACE(ss.str()); // Print out the parameter values on failure. // Compare mono and multi-channel. ASSERT_NO_FATAL_FAILURE(VerifyOutput(output_size_samples_)); - time_now += kTimeStepMs; + time_now_ms += kTimeStepMs; clock_.AdvanceTimeMilliseconds(kTimeStepMs); } } @@ -209,6 +216,7 @@ class NetEqStereoTest : public ::testing::TestWithParam { const size_t frame_size_samples_; const size_t output_size_samples_; SimulatedClock clock_; + const Environment env_; std::unique_ptr neteq_mono_; std::unique_ptr neteq_; test::RtpGenerator rtp_generator_mono_; diff --git a/modules/audio_coding/neteq/neteq_unittest.cc b/modules/audio_coding/neteq/neteq_unittest.cc index 77bd5b5035..b0e0e77c54 100644 --- a/modules/audio_coding/neteq/neteq_unittest.cc +++ b/modules/audio_coding/neteq/neteq_unittest.cc @@ -31,7 +31,6 @@ #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/ignore_wundef.h" #include "rtc_base/message_digest.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/strings/string_builder.h" @@ -44,15 +43,8 @@ ABSL_FLAG(bool, gen_ref, false, "Generate reference files."); namespace webrtc { -#if defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64) && \ - defined(WEBRTC_NETEQ_UNITTEST_BITEXACT) && \ - (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) && \ - defined(WEBRTC_CODEC_ILBC) -#define MAYBE_TestBitExactness TestBitExactness -#else -#define MAYBE_TestBitExactness DISABLED_TestBitExactness -#endif -TEST_F(NetEqDecodingTest, MAYBE_TestBitExactness) { +// TODO(bugs.webrtc.org/345525069): Either fix/enable or remove. +TEST_F(NetEqDecodingTest, DISABLED_TestBitExactness) { const std::string input_rtp_file = webrtc::test::ResourcePath("audio_coding/neteq_universal_new", "rtp"); @@ -66,9 +58,10 @@ TEST_F(NetEqDecodingTest, MAYBE_TestBitExactness) { absl::GetFlag(FLAGS_gen_ref)); } +// TODO: https://issues.webrtc.org/411157363 - reenable test after update. #if defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64) && \ defined(WEBRTC_NETEQ_UNITTEST_BITEXACT) && defined(WEBRTC_CODEC_OPUS) -#define MAYBE_TestOpusBitExactness TestOpusBitExactness +#define MAYBE_TestOpusBitExactness DISABLED_TestOpusBitExactness #else #define MAYBE_TestOpusBitExactness DISABLED_TestOpusBitExactness #endif @@ -77,20 +70,22 @@ TEST_F(NetEqDecodingTest, MAYBE_TestOpusBitExactness) { webrtc::test::ResourcePath("audio_coding/neteq_opus", "rtp"); const std::string output_checksum = - "fec6827bb9ee0b21770bbbb4a3a6f8823bf537dc|" - "3610cc7be4b3407b9c273b1299ab7f8f47cca96b"; + "434bdc4ec08546510ee903d001c8be1a01c44e24|" + "4336be0091e2faad7a194c16ee0a05e727325727|" + "cefd2de4adfa8f6a9b66a3639ad63c2f6779d0cd"; const std::string network_stats_checksum = - "3d043e47e5f4bb81d37e7bce8c44bf802965c853|" - "076662525572dba753b11578330bd491923f7f5e"; + "06f6b9a86aeae6317fd25a36edf9ed16f35e798f|" + "80ab17c17da030d4f2dfbf314ac44aacdadd7f0c"; DecodeAndCompare(input_rtp_file, output_checksum, network_stats_checksum, absl::GetFlag(FLAGS_gen_ref)); } +// TODO: https://issues.webrtc.org/411157363 - reenable test after update. #if defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64) && \ defined(WEBRTC_NETEQ_UNITTEST_BITEXACT) && defined(WEBRTC_CODEC_OPUS) -#define MAYBE_TestOpusDtxBitExactness TestOpusDtxBitExactness +#define MAYBE_TestOpusDtxBitExactness DISABLED_TestOpusDtxBitExactness #else #define MAYBE_TestOpusDtxBitExactness DISABLED_TestOpusDtxBitExactness #endif @@ -99,11 +94,11 @@ TEST_F(NetEqDecodingTest, MAYBE_TestOpusDtxBitExactness) { webrtc::test::ResourcePath("audio_coding/neteq_opus_dtx", "rtp"); const std::string output_checksum = - "b3c4899eab5378ef5e54f2302948872149f6ad5e|" - "589e975ec31ea13f302457fea1425be9380ffb96"; + "7eddce841cbfa500964c91cdae78b01b9f448948|" + "5d13affec87bf4cc8c7667f0cd0d25e1ad09c7c3"; const std::string network_stats_checksum = - "dc8447b9fee1a21fd5d1f4045d62b982a3fb0215"; + "6af74a713749cc4343464718b6af54f1e5b06ad9"; DecodeAndCompare(input_rtp_file, output_checksum, network_stats_checksum, absl::GetFlag(FLAGS_gen_ref)); @@ -127,12 +122,12 @@ TEST_F(NetEqDecodingTestFaxMode, TestFrameWaitingTimeStatistics) { for (size_t i = 0; i < num_frames; ++i) { const uint8_t payload[kPayloadBytes] = {0}; RTPHeader rtp_info; - rtp_info.sequenceNumber = rtc::checked_cast(i); - rtp_info.timestamp = rtc::checked_cast(i * kSamples); + rtp_info.sequenceNumber = checked_cast(i); + rtp_info.timestamp = checked_cast(i * kSamples); rtp_info.ssrc = 0x1234; // Just an arbitrary SSRC. rtp_info.payloadType = 94; // PCM16b WB codec. rtp_info.markerBit = 0; - ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); + ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, clock_.CurrentTime())); } // Pull out all data. for (size_t i = 0; i < num_frames; ++i) { @@ -165,7 +160,7 @@ TEST_F(NetEqDecodingTest, LongCngWithNegativeClockDrift) { const double kDriftFactor = 1000.0 / (1000.0 + 25.0); const double kNetworkFreezeTimeMs = 0.0; const bool kGetAudioDuringFreezeRecovery = false; - const int kDelayToleranceMs = 20; + const int kDelayToleranceMs = 60; const int kMaxTimeToSpeechMs = 100; LongCngWithClockDrift(kDriftFactor, kNetworkFreezeTimeMs, kGetAudioDuringFreezeRecovery, kDelayToleranceMs, @@ -237,7 +232,8 @@ TEST_F(NetEqDecodingTest, UnknownPayloadType) { RTPHeader rtp_info; PopulateRtpInfo(0, 0, &rtp_info); rtp_info.payloadType = 1; // Not registered as a decoder. - EXPECT_EQ(NetEq::kFail, neteq_->InsertPacket(rtp_info, payload)); + EXPECT_EQ(NetEq::kFail, + neteq_->InsertPacket(rtp_info, payload, clock_.CurrentTime())); } #if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX) @@ -252,7 +248,7 @@ TEST_F(NetEqDecodingTest, MAYBE_DecoderError) { RTPHeader rtp_info; PopulateRtpInfo(0, 0, &rtp_info); rtp_info.payloadType = 103; // iSAC, but the payload is invalid. - EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); + EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload, clock_.CurrentTime())); // Set all of `out_data_` to 1, and verify that it was set to 0 by the call // to GetAudio. int16_t* out_frame_data = out_frame_.mutable_data(); @@ -267,7 +263,7 @@ TEST_F(NetEqDecodingTest, MAYBE_DecoderError) { static const int kExpectedOutputLength = 160; // 10 ms at 16 kHz sample rate. const int16_t* const_out_frame_data = out_frame_.data(); for (int i = 0; i < kExpectedOutputLength; ++i) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "i = " << i; SCOPED_TRACE(ss.str()); // Print out the parameter values on failure. EXPECT_EQ(0, const_out_frame_data[i]); @@ -289,7 +285,7 @@ TEST_F(NetEqDecodingTest, GetAudioBeforeInsertPacket) { kInitSampleRateHz / 100; // 10 ms at initial sample rate. const int16_t* const_out_frame_data = out_frame_.data(); for (int i = 0; i < kExpectedOutputLength; ++i) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "i = " << i; SCOPED_TRACE(ss.str()); // Print out the parameter values on failure. EXPECT_EQ(0, const_out_frame_data[i]); @@ -340,8 +336,10 @@ class NetEqBgnTest : public NetEqDecodingTest { WebRtcPcm16b_Encode(block.data(), block.size(), payload); ASSERT_EQ(enc_len_bytes, expected_samples_per_channel * 2); - ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, rtc::ArrayView( - payload, enc_len_bytes))); + ASSERT_EQ(0, + neteq_->InsertPacket( + rtp_info, ArrayView(payload, enc_len_bytes), + clock_.CurrentTime())); output.Reset(); ASSERT_EQ(0, neteq_->GetAudio(&output, &muted)); ASSERT_EQ(1u, output.num_channels_); @@ -350,7 +348,7 @@ class NetEqBgnTest : public NetEqDecodingTest { // Next packet. rtp_info.timestamp += - rtc::checked_cast(expected_samples_per_channel); + checked_cast(expected_samples_per_channel); rtp_info.sequenceNumber++; } @@ -445,7 +443,7 @@ TEST_F(NetEqDecodingTest, DiscardDuplicateCng) { bool muted; for (int i = 0; i < 3; ++i) { PopulateRtpInfo(seq_no, timestamp, &rtp_info); - ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); + ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, clock_.CurrentTime())); ++seq_no; timestamp += kSamples; @@ -462,8 +460,9 @@ TEST_F(NetEqDecodingTest, DiscardDuplicateCng) { size_t payload_len; PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len); // This is the first time this CNG packet is inserted. - ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, rtc::ArrayView( - payload, payload_len))); + ASSERT_EQ(0, neteq_->InsertPacket( + rtp_info, ArrayView(payload, payload_len), + clock_.CurrentTime())); // Pull audio once and make sure CNG is played. ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted)); @@ -476,8 +475,9 @@ TEST_F(NetEqDecodingTest, DiscardDuplicateCng) { // Insert the same CNG packet again. Note that at this point it is old, since // we have already decoded the first copy of it. - ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, rtc::ArrayView( - payload, payload_len))); + ASSERT_EQ(0, neteq_->InsertPacket( + rtp_info, ArrayView(payload, payload_len), + clock_.CurrentTime())); // Pull audio until we have played `kCngPeriodMs` of CNG. Start at 10 ms since // we have already pulled out CNG once. @@ -495,9 +495,9 @@ TEST_F(NetEqDecodingTest, DiscardDuplicateCng) { timestamp += kCngPeriodSamples; uint32_t first_speech_timestamp = timestamp; // Insert speech again. - for (int i = 0; i < 3; ++i) { + for (int i = 0; i < 4; ++i) { PopulateRtpInfo(seq_no, timestamp, &rtp_info); - ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); + ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, clock_.CurrentTime())); ++seq_no; timestamp += kSamples; } @@ -506,7 +506,7 @@ TEST_F(NetEqDecodingTest, DiscardDuplicateCng) { ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted)); ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_); EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_); - absl::optional playout_timestamp = neteq_->GetPlayoutTimestamp(); + std::optional playout_timestamp = neteq_->GetPlayoutTimestamp(); ASSERT_TRUE(playout_timestamp); EXPECT_EQ(first_speech_timestamp + kSamples - algorithmic_delay_samples, *playout_timestamp); @@ -528,8 +528,9 @@ TEST_F(NetEqDecodingTest, CngFirst) { PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len); ASSERT_EQ(NetEq::kOK, - neteq_->InsertPacket( - rtp_info, rtc::ArrayView(payload, payload_len))); + neteq_->InsertPacket(rtp_info, + ArrayView(payload, payload_len), + clock_.CurrentTime())); ++seq_no; timestamp += kCngPeriodSamples; @@ -545,7 +546,7 @@ TEST_F(NetEqDecodingTest, CngFirst) { do { ASSERT_LT(timeout_counter++, 20) << "Test timed out"; PopulateRtpInfo(seq_no, timestamp, &rtp_info); - ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); + ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, clock_.CurrentTime())); ++seq_no; timestamp += kSamples; @@ -571,7 +572,7 @@ class NetEqDecodingTestWithMutedState : public NetEqDecodingTest { uint8_t payload[kPayloadBytes] = {0}; RTPHeader rtp_info; PopulateRtpInfo(0, rtp_timestamp, &rtp_info); - EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); + EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload, clock_.CurrentTime())); } void InsertCngPacket(uint32_t rtp_timestamp) { @@ -580,8 +581,9 @@ class NetEqDecodingTestWithMutedState : public NetEqDecodingTest { size_t payload_len; PopulateCng(0, rtp_timestamp, &rtp_info, payload, &payload_len); EXPECT_EQ(NetEq::kOK, - neteq_->InsertPacket(rtp_info, rtc::ArrayView( - payload, payload_len))); + neteq_->InsertPacket( + rtp_info, ArrayView(payload, payload_len), + clock_.CurrentTime())); } bool GetAudioReturnMuted() { @@ -651,7 +653,7 @@ TEST_F(NetEqDecodingTestWithMutedState, MutedState) { // NetEqNetworkStatistics::expand_rate tells the fraction of samples that were // concealment samples, in Q14 (16384 = 100%) .The vast majority should be // concealment samples in this test. - EXPECT_GT(stats.expand_rate, 14000); + EXPECT_GT(stats.expand_rate, 13000); // And, it should be greater than the speech_expand_rate. EXPECT_GT(stats.expand_rate, stats.speech_expand_rate); } @@ -700,8 +702,7 @@ TEST_F(NetEqDecodingTestWithMutedState, MutedStateOldPacket) { for (int i = 0; i < 5; ++i) { InsertPacket(kSamples * (i - 1000)); } - EXPECT_FALSE(GetAudioReturnMuted()); - EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_); + GetAudioUntilNormal(); } // Verifies that NetEq doesn't enter muted state when CNG mode is active and the @@ -792,13 +793,13 @@ TEST_F(NetEqDecodingTestTwoInstances, CompareMutedStateOnOff) { uint8_t payload[kPayloadBytes] = {0}; RTPHeader rtp_info; PopulateRtpInfo(0, 0, &rtp_info); - EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); - EXPECT_EQ(0, neteq2_->InsertPacket(rtp_info, payload)); + EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload, clock_.CurrentTime())); + EXPECT_EQ(0, neteq2_->InsertPacket(rtp_info, payload, clock_.CurrentTime())); AudioFrame out_frame1, out_frame2; bool muted; for (int i = 0; i < 1000; ++i) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "i = " << i; SCOPED_TRACE(ss.str()); // Print out the loop iterator on failure. EXPECT_EQ(0, neteq_->GetAudio(&out_frame1, &muted)); @@ -816,14 +817,15 @@ TEST_F(NetEqDecodingTestTwoInstances, CompareMutedStateOnOff) { // packet. for (int i = 0; i < 5; ++i) { PopulateRtpInfo(0, kSamples * 1000 + kSamples * i, &rtp_info); - EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); - EXPECT_EQ(0, neteq2_->InsertPacket(rtp_info, payload)); + EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload, clock_.CurrentTime())); + EXPECT_EQ(0, + neteq2_->InsertPacket(rtp_info, payload, clock_.CurrentTime())); } int counter = 0; while (out_frame1.speech_type_ != AudioFrame::kNormalSpeech) { ASSERT_LT(counter++, 1000) << "Test timed out"; - rtc::StringBuilder ss; + StringBuilder ss; ss << "counter = " << counter; SCOPED_TRACE(ss.str()); // Print out the loop iterator on failure. EXPECT_EQ(0, neteq_->GetAudio(&out_frame1, &muted)); @@ -855,7 +857,7 @@ TEST_F(NetEqDecodingTest, TestConcealmentEvents) { for (int j = 0; j < 10; j++) { rtp_info.sequenceNumber = seq_no++; rtp_info.timestamp = rtp_info.sequenceNumber * kSamples; - neteq_->InsertPacket(rtp_info, payload); + neteq_->InsertPacket(rtp_info, payload, clock_.CurrentTime()); neteq_->GetAudio(&out_frame_, &muted); } @@ -895,11 +897,12 @@ void NetEqDecodingTestFaxMode::TestJitterBufferDelay(bool apply_packet_loss) { if (packets_sent < kNumPackets) { rtp_info.sequenceNumber = packets_sent++; rtp_info.timestamp = rtp_info.sequenceNumber * kSamples; - neteq_->InsertPacket(rtp_info, payload); + neteq_->InsertPacket(rtp_info, payload, clock_.CurrentTime()); } // Get packet. if (packets_sent > kDelayInNumPackets) { + clock_.AdvanceTime(TimeDelta::Millis(kPacketLenMs)); neteq_->GetAudio(&out_frame_, &muted); packets_received++; @@ -919,17 +922,22 @@ void NetEqDecodingTestFaxMode::TestJitterBufferDelay(bool apply_packet_loss) { } if (apply_packet_loss) { + clock_.AdvanceTime(TimeDelta::Millis(kPacketLenMs)); // Extra call to GetAudio to cause concealment. neteq_->GetAudio(&out_frame_, &muted); } // Check jitter buffer delay. NetEqLifetimeStatistics stats = neteq_->GetLifetimeStatistics(); - EXPECT_EQ(expected_delay, - rtc::checked_cast(stats.jitter_buffer_delay_ms)); + EXPECT_EQ(expected_delay, checked_cast(stats.jitter_buffer_delay_ms)); EXPECT_EQ(expected_emitted_count, stats.jitter_buffer_emitted_count); EXPECT_EQ(expected_target_delay, - rtc::checked_cast(stats.jitter_buffer_target_delay_ms)); + checked_cast(stats.jitter_buffer_target_delay_ms)); + // In this test, since the packets are inserted with a receive time equal to + // the current clock time, the jitter buffer delay should match the total + // processing delay. + EXPECT_EQ(stats.jitter_buffer_delay_ms * 1000, + stats.total_processing_delay_us); } TEST_F(NetEqDecodingTestFaxMode, TestJitterBufferDelayWithoutLoss) { @@ -951,17 +959,17 @@ TEST_F(NetEqDecodingTestFaxMode, TestJitterBufferDelayWithAcceleration) { const uint8_t payload[kPayloadBytes] = {0}; int expected_target_delay = neteq_->TargetDelayMs() * kSamples; - neteq_->InsertPacket(rtp_info, payload); + neteq_->InsertPacket(rtp_info, payload, clock_.CurrentTime()); bool muted; neteq_->GetAudio(&out_frame_, &muted); rtp_info.sequenceNumber += 1; rtp_info.timestamp += kSamples; - neteq_->InsertPacket(rtp_info, payload); + neteq_->InsertPacket(rtp_info, payload, clock_.CurrentTime()); rtp_info.sequenceNumber += 1; rtp_info.timestamp += kSamples; - neteq_->InsertPacket(rtp_info, payload); + neteq_->InsertPacket(rtp_info, payload, clock_.CurrentTime()); expected_target_delay += neteq_->TargetDelayMs() * 2 * kSamples; // We have two packets in the buffer and kAccelerate operation will @@ -973,7 +981,7 @@ TEST_F(NetEqDecodingTestFaxMode, TestJitterBufferDelayWithAcceleration) { EXPECT_EQ(10 * kSamples * 3, stats.jitter_buffer_delay_ms); EXPECT_EQ(kSamples * 3, stats.jitter_buffer_emitted_count); EXPECT_EQ(expected_target_delay, - rtc::checked_cast(stats.jitter_buffer_target_delay_ms)); + checked_cast(stats.jitter_buffer_target_delay_ms)); } namespace test { @@ -989,7 +997,7 @@ TEST(NetEqNoTimeStretchingMode, RunTest) { {8, kRtpExtensionVideoTiming}}; std::unique_ptr input = CreateNetEqRtpDumpInput( webrtc::test::ResourcePath("audio_coding/neteq_universal_new", "rtp"), - rtp_ext_map, absl::nullopt /*No SSRC filter*/); + rtp_ext_map, std::nullopt /*No SSRC filter*/); std::unique_ptr input_time_limit( new TimeLimitedNetEqInput(std::move(input), 20000)); std::unique_ptr output(new VoidAudioSink); diff --git a/modules/audio_coding/neteq/normal.cc b/modules/audio_coding/neteq/normal.cc index 461ee7fa4a..489aeee339 100644 --- a/modules/audio_coding/neteq/normal.cc +++ b/modules/audio_coding/neteq/normal.cc @@ -40,7 +40,7 @@ int Normal::Process(const int16_t* input, output->Clear(); return 0; } - output->PushBackInterleaved(rtc::ArrayView(input, length)); + output->PushBackInterleaved(ArrayView(input, length)); const int fs_mult = fs_hz_ / 8000; RTC_DCHECK_GT(fs_mult, 0); diff --git a/modules/audio_coding/neteq/normal.h b/modules/audio_coding/neteq/normal.h index 772293b605..e6c918764f 100644 --- a/modules/audio_coding/neteq/normal.h +++ b/modules/audio_coding/neteq/normal.h @@ -41,9 +41,9 @@ class Normal { decoder_database_(decoder_database), background_noise_(background_noise), expand_(expand), - samples_per_ms_(rtc::CheckedDivExact(fs_hz_, 1000)), + samples_per_ms_(CheckedDivExact(fs_hz_, 1000)), default_win_slope_Q14_( - rtc::dchecked_cast((1 << 14) / samples_per_ms_)), + dchecked_cast((1 << 14) / samples_per_ms_)), statistics_(statistics) {} virtual ~Normal() {} diff --git a/modules/audio_coding/neteq/normal_unittest.cc b/modules/audio_coding/neteq/normal_unittest.cc index 4554d79576..f774733b11 100644 --- a/modules/audio_coding/neteq/normal_unittest.cc +++ b/modules/audio_coding/neteq/normal_unittest.cc @@ -48,7 +48,8 @@ TEST(Normal, CreateAndDestroy) { BackgroundNoise bgn(channels); SyncBuffer sync_buffer(1, 1000); RandomVector random_vector; - StatisticsCalculator statistics; + TickTimer timer; + StatisticsCalculator statistics(&timer); Expand expand(&bgn, &sync_buffer, &random_vector, &statistics, fs, channels); Normal normal(fs, &db, bgn, &expand, &statistics); EXPECT_CALL(db, Die()); // Called when `db` goes out of scope. @@ -61,7 +62,8 @@ TEST(Normal, AvoidDivideByZero) { BackgroundNoise bgn(channels); SyncBuffer sync_buffer(1, 1000); RandomVector random_vector; - StatisticsCalculator statistics; + TickTimer timer; + StatisticsCalculator statistics(&timer); MockExpand expand(&bgn, &sync_buffer, &random_vector, &statistics, fs, channels); Normal normal(fs, &db, bgn, &expand, &statistics); @@ -96,7 +98,8 @@ TEST(Normal, InputLengthAndChannelsDoNotMatch) { BackgroundNoise bgn(channels); SyncBuffer sync_buffer(channels, 1000); RandomVector random_vector; - StatisticsCalculator statistics; + TickTimer timer; + StatisticsCalculator statistics(&timer); MockExpand expand(&bgn, &sync_buffer, &random_vector, &statistics, fs, channels); Normal normal(fs, &db, bgn, &expand, &statistics); @@ -121,7 +124,8 @@ TEST(Normal, LastModeExpand120msPacket) { BackgroundNoise bgn(kChannels); SyncBuffer sync_buffer(kChannels, 1000); RandomVector random_vector; - StatisticsCalculator statistics; + TickTimer timer; + StatisticsCalculator statistics(&timer); MockExpand expand(&bgn, &sync_buffer, &random_vector, &statistics, kFs, kChannels); Normal normal(kFs, &db, bgn, &expand, &statistics); diff --git a/modules/audio_coding/neteq/packet.h b/modules/audio_coding/neteq/packet.h index 795c36dc12..f6ee755b77 100644 --- a/modules/audio_coding/neteq/packet.h +++ b/modules/audio_coding/neteq/packet.h @@ -72,9 +72,9 @@ struct Packet { uint16_t sequence_number; uint8_t payload_type; // Datagram excluding RTP header and header extension. - rtc::Buffer payload; + Buffer payload; Priority priority; - absl::optional packet_info; + std::optional packet_info; std::unique_ptr waiting_time; std::unique_ptr frame; diff --git a/modules/audio_coding/neteq/packet_arrival_history.cc b/modules/audio_coding/neteq/packet_arrival_history.cc index 2077383f76..a36c8a2b06 100644 --- a/modules/audio_coding/neteq/packet_arrival_history.cc +++ b/modules/audio_coding/neteq/packet_arrival_history.cc @@ -11,95 +11,122 @@ #include "modules/audio_coding/neteq/packet_arrival_history.h" #include +#include #include "api/neteq/tick_timer.h" +#include "rtc_base/checks.h" namespace webrtc { -PacketArrivalHistory::PacketArrivalHistory(int window_size_ms) - : window_size_ms_(window_size_ms) {} +PacketArrivalHistory::PacketArrivalHistory(const TickTimer* tick_timer, + int window_size_ms) + : tick_timer_(tick_timer), window_size_ms_(window_size_ms) {} -void PacketArrivalHistory::Insert(uint32_t rtp_timestamp, - int64_t arrival_time_ms) { - RTC_DCHECK(sample_rate_khz_ > 0); - int64_t unwrapped_rtp_timestamp = timestamp_unwrapper_.Unwrap(rtp_timestamp); - if (!newest_rtp_timestamp_ || - unwrapped_rtp_timestamp > *newest_rtp_timestamp_) { - newest_rtp_timestamp_ = unwrapped_rtp_timestamp; +bool PacketArrivalHistory::Insert(uint32_t rtp_timestamp, + int packet_length_samples) { + int64_t arrival_timestamp = + tick_timer_->ticks() * tick_timer_->ms_per_tick() * sample_rate_khz_; + PacketArrival packet(timestamp_unwrapper_.Unwrap(rtp_timestamp), + arrival_timestamp, packet_length_samples); + if (IsObsolete(packet)) { + return false; } - history_.emplace_back(unwrapped_rtp_timestamp / sample_rate_khz_, - arrival_time_ms); - MaybeUpdateCachedArrivals(history_.back()); - while (history_.front().rtp_timestamp_ms + window_size_ms_ < - unwrapped_rtp_timestamp / sample_rate_khz_) { - if (&history_.front() == min_packet_arrival_) { - min_packet_arrival_ = nullptr; - } - if (&history_.front() == max_packet_arrival_) { - max_packet_arrival_ = nullptr; - } - history_.pop_front(); + if (Contains(packet)) { + return false; + } + history_.emplace(packet.rtp_timestamp, packet); + if (packet != history_.rbegin()->second) { + // Packet was reordered. + return true; } - if (!min_packet_arrival_ || !max_packet_arrival_) { - for (const PacketArrival& packet : history_) { - MaybeUpdateCachedArrivals(packet); + // Remove old packets. + while (IsObsolete(history_.begin()->second)) { + if (history_.begin()->second == min_packet_arrivals_.front()) { + min_packet_arrivals_.pop_front(); } + if (history_.begin()->second == max_packet_arrivals_.front()) { + max_packet_arrivals_.pop_front(); + } + history_.erase(history_.begin()); } -} - -void PacketArrivalHistory::MaybeUpdateCachedArrivals( - const PacketArrival& packet_arrival) { - if (!min_packet_arrival_ || packet_arrival <= *min_packet_arrival_) { - min_packet_arrival_ = &packet_arrival; + // Ensure ordering constraints. + while (!min_packet_arrivals_.empty() && + packet <= min_packet_arrivals_.back()) { + min_packet_arrivals_.pop_back(); } - if (!max_packet_arrival_ || packet_arrival >= *max_packet_arrival_) { - max_packet_arrival_ = &packet_arrival; + while (!max_packet_arrivals_.empty() && + packet >= max_packet_arrivals_.back()) { + max_packet_arrivals_.pop_back(); } + min_packet_arrivals_.push_back(packet); + max_packet_arrivals_.push_back(packet); + return true; } void PacketArrivalHistory::Reset() { history_.clear(); - min_packet_arrival_ = nullptr; - max_packet_arrival_ = nullptr; + min_packet_arrivals_.clear(); + max_packet_arrivals_.clear(); timestamp_unwrapper_.Reset(); - newest_rtp_timestamp_ = absl::nullopt; } -int PacketArrivalHistory::GetDelayMs(uint32_t rtp_timestamp, - int64_t time_ms) const { - RTC_DCHECK(sample_rate_khz_ > 0); - int64_t unwrapped_rtp_timestamp_ms = - timestamp_unwrapper_.PeekUnwrap(rtp_timestamp) / sample_rate_khz_; - PacketArrival packet(unwrapped_rtp_timestamp_ms, time_ms); +int PacketArrivalHistory::GetDelayMs(uint32_t rtp_timestamp) const { + int64_t unwrapped_rtp_timestamp = + timestamp_unwrapper_.PeekUnwrap(rtp_timestamp); + int64_t current_timestamp = + tick_timer_->ticks() * tick_timer_->ms_per_tick() * sample_rate_khz_; + PacketArrival packet(unwrapped_rtp_timestamp, current_timestamp, + /*duration_ms=*/0); return GetPacketArrivalDelayMs(packet); } int PacketArrivalHistory::GetMaxDelayMs() const { - if (!max_packet_arrival_) { + if (max_packet_arrivals_.empty()) { return 0; } - return GetPacketArrivalDelayMs(*max_packet_arrival_); + return GetPacketArrivalDelayMs(max_packet_arrivals_.front()); } bool PacketArrivalHistory::IsNewestRtpTimestamp(uint32_t rtp_timestamp) const { - if (!newest_rtp_timestamp_) { - return false; + if (history_.empty()) { + return true; } int64_t unwrapped_rtp_timestamp = timestamp_unwrapper_.PeekUnwrap(rtp_timestamp); - return unwrapped_rtp_timestamp == *newest_rtp_timestamp_; + return unwrapped_rtp_timestamp == history_.rbegin()->second.rtp_timestamp; } int PacketArrivalHistory::GetPacketArrivalDelayMs( const PacketArrival& packet_arrival) const { - if (!min_packet_arrival_) { + if (min_packet_arrivals_.empty()) { return 0; } - return std::max(static_cast(packet_arrival.arrival_time_ms - - min_packet_arrival_->arrival_time_ms - - (packet_arrival.rtp_timestamp_ms - - min_packet_arrival_->rtp_timestamp_ms)), - 0); + RTC_DCHECK_NE(sample_rate_khz_, 0); + // TODO(jakobi): Timestamps are first converted to millis for bit-exactness. + return std::max( + packet_arrival.arrival_timestamp / sample_rate_khz_ - + min_packet_arrivals_.front().arrival_timestamp / sample_rate_khz_ - + (packet_arrival.rtp_timestamp / sample_rate_khz_ - + min_packet_arrivals_.front().rtp_timestamp / sample_rate_khz_), + 0); +} + +bool PacketArrivalHistory::IsObsolete( + const PacketArrival& packet_arrival) const { + if (history_.empty()) { + return false; + } + return packet_arrival.rtp_timestamp + window_size_ms_ * sample_rate_khz_ < + history_.rbegin()->second.rtp_timestamp; +} + +bool PacketArrivalHistory::Contains(const PacketArrival& packet_arrival) const { + auto it = history_.upper_bound(packet_arrival.rtp_timestamp); + if (it == history_.begin()) { + return false; + } + --it; + return it->second.contains(packet_arrival); } } // namespace webrtc diff --git a/modules/audio_coding/neteq/packet_arrival_history.h b/modules/audio_coding/neteq/packet_arrival_history.h index cad362b469..3fa1ea1fa9 100644 --- a/modules/audio_coding/neteq/packet_arrival_history.h +++ b/modules/audio_coding/neteq/packet_arrival_history.h @@ -11,10 +11,11 @@ #ifndef MODULES_AUDIO_CODING_NETEQ_PACKET_ARRIVAL_HISTORY_H_ #define MODULES_AUDIO_CODING_NETEQ_PACKET_ARRIVAL_HISTORY_H_ +#include #include #include +#include -#include "absl/types/optional.h" #include "api/neteq/tick_timer.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" @@ -25,19 +26,23 @@ namespace webrtc { // pruned. class PacketArrivalHistory { public: - explicit PacketArrivalHistory(int window_size_ms); + explicit PacketArrivalHistory(const TickTimer* tick_timer, + int window_size_ms); + virtual ~PacketArrivalHistory() = default; - // Insert packet with `rtp_timestamp` and `arrival_time_ms` into the history. - void Insert(uint32_t rtp_timestamp, int64_t arrival_time_ms); + // Insert packet with `rtp_timestamp` into the history. Returns true if the + // packet was inserted, false if the timestamp is too old or if the timestamp + // already exists. + bool Insert(uint32_t rtp_timestamp, int packet_length_samples); - // The delay for `rtp_timestamp` at `time_ms` is calculated as - // `(time_ms - p.arrival_time_ms) - (rtp_timestamp - p.rtp_timestamp)` - // where `p` is chosen as the packet arrival in the history that maximizes the - // delay. - int GetDelayMs(uint32_t rtp_timestamp, int64_t time_ms) const; + // The delay for `rtp_timestamp` at time `now` is calculated as + // `(now - p.arrival_timestamp) - (rtp_timestamp - p.rtp_timestamp)` where `p` + // is chosen as the packet arrival in the history that maximizes the delay. + virtual int GetDelayMs(uint32_t rtp_timestamp) const; - // Get the maximum packet arrival delay observed in the history. - int GetMaxDelayMs() const; + // Get the maximum packet arrival delay observed in the history, excluding + // reordered packets. + virtual int GetMaxDelayMs() const; bool IsNewestRtpTimestamp(uint32_t rtp_timestamp) const; @@ -51,30 +56,53 @@ class PacketArrivalHistory { private: struct PacketArrival { - PacketArrival(int64_t rtp_timestamp_ms, int64_t arrival_time_ms) - : rtp_timestamp_ms(rtp_timestamp_ms), - arrival_time_ms(arrival_time_ms) {} - int64_t rtp_timestamp_ms; - int64_t arrival_time_ms; + PacketArrival(int64_t rtp_timestamp, + int64_t arrival_timestamp, + int length_samples) + : rtp_timestamp(rtp_timestamp), + arrival_timestamp(arrival_timestamp), + length_samples(length_samples) {} + PacketArrival() = default; + int64_t rtp_timestamp; + int64_t arrival_timestamp; + int length_samples; + bool operator==(const PacketArrival& other) const { + return rtp_timestamp == other.rtp_timestamp && + arrival_timestamp == other.arrival_timestamp && + length_samples == other.length_samples; + } + bool operator!=(const PacketArrival& other) const { + return !(*this == other); + } bool operator<=(const PacketArrival& other) const { - return arrival_time_ms - rtp_timestamp_ms <= - other.arrival_time_ms - other.rtp_timestamp_ms; + return arrival_timestamp - rtp_timestamp <= + other.arrival_timestamp - other.rtp_timestamp; } bool operator>=(const PacketArrival& other) const { - return arrival_time_ms - rtp_timestamp_ms >= - other.arrival_time_ms - other.rtp_timestamp_ms; + return arrival_timestamp - rtp_timestamp >= + other.arrival_timestamp - other.rtp_timestamp; + } + bool contains(const PacketArrival& other) const { + return rtp_timestamp <= other.rtp_timestamp && + rtp_timestamp + length_samples >= + other.rtp_timestamp + other.length_samples; } }; - std::deque history_; int GetPacketArrivalDelayMs(const PacketArrival& packet_arrival) const; - // Updates `min_packet_arrival_` and `max_packet_arrival_`. - void MaybeUpdateCachedArrivals(const PacketArrival& packet); - const PacketArrival* min_packet_arrival_ = nullptr; - const PacketArrival* max_packet_arrival_ = nullptr; + // Checks if the packet is older than the window size. + bool IsObsolete(const PacketArrival& packet_arrival) const; + // Check if the packet exists or fully overlaps with a packet in the history. + bool Contains(const PacketArrival& packet_arrival) const; + const TickTimer* tick_timer_; const int window_size_ms_; - RtpTimestampUnwrapper timestamp_unwrapper_; - absl::optional newest_rtp_timestamp_; int sample_rate_khz_ = 0; + RtpTimestampUnwrapper timestamp_unwrapper_; + // Packet history ordered by rtp timestamp. + std::map history_; + // Tracks min/max packet arrivals in `history_` in ascending/descending order. + // Reordered packets are excluded. + std::deque min_packet_arrivals_; + std::deque max_packet_arrivals_; }; } // namespace webrtc diff --git a/modules/audio_coding/neteq/packet_arrival_history_unittest.cc b/modules/audio_coding/neteq/packet_arrival_history_unittest.cc index 539a318fe1..dd95fec0f7 100644 --- a/modules/audio_coding/neteq/packet_arrival_history_unittest.cc +++ b/modules/audio_coding/neteq/packet_arrival_history_unittest.cc @@ -21,32 +21,36 @@ namespace { constexpr int kFs = 8000; constexpr int kFsKhz = kFs / 1000; constexpr int kFrameSizeMs = 20; +constexpr int kFrameSizeSamples = kFrameSizeMs * kFsKhz; constexpr int kWindowSizeMs = 1000; class PacketArrivalHistoryTest : public testing::Test { public: - PacketArrivalHistoryTest() : history_(kWindowSizeMs) { + PacketArrivalHistoryTest() : history_(&tick_timer_, kWindowSizeMs) { history_.set_sample_rate(kFs); } - void IncrementTime(int delta_ms) { time_ms_ += delta_ms; } + void IncrementTime(int delta_ms) { + tick_timer_.Increment(delta_ms / tick_timer_.ms_per_tick()); + } int InsertPacketAndGetDelay(int timestamp_delta_ms) { uint32_t timestamp = timestamp_ + timestamp_delta_ms * kFsKhz; if (timestamp_delta_ms > 0) { timestamp_ = timestamp; } - history_.Insert(timestamp, time_ms_); + EXPECT_TRUE(history_.Insert(timestamp, kFrameSizeSamples)); EXPECT_EQ(history_.IsNewestRtpTimestamp(timestamp), timestamp_delta_ms >= 0); - return history_.GetDelayMs(timestamp, time_ms_); + return history_.GetDelayMs(timestamp); } protected: - int64_t time_ms_ = 0; + TickTimer tick_timer_; PacketArrivalHistory history_; uint32_t timestamp_ = 0x12345678; }; TEST_F(PacketArrivalHistoryTest, RelativeArrivalDelay) { + // Insert first packet. EXPECT_EQ(InsertPacketAndGetDelay(0), 0); IncrementTime(kFrameSizeMs); @@ -56,7 +60,7 @@ TEST_F(PacketArrivalHistoryTest, RelativeArrivalDelay) { EXPECT_EQ(InsertPacketAndGetDelay(kFrameSizeMs), 20); // Reordered packet. - EXPECT_EQ(InsertPacketAndGetDelay(-2 * kFrameSizeMs), 60); + EXPECT_EQ(InsertPacketAndGetDelay(-3 * kFrameSizeMs), 80); IncrementTime(2 * kFrameSizeMs); EXPECT_EQ(InsertPacketAndGetDelay(kFrameSizeMs), 40); @@ -68,7 +72,7 @@ TEST_F(PacketArrivalHistoryTest, RelativeArrivalDelay) { EXPECT_EQ(InsertPacketAndGetDelay(kFrameSizeMs), 20); // Earlier packet is now more delayed due to the new reference packet. - EXPECT_EQ(history_.GetMaxDelayMs(), 100); + EXPECT_EQ(history_.GetMaxDelayMs(), 80); } TEST_F(PacketArrivalHistoryTest, ReorderedPackets) { @@ -86,7 +90,7 @@ TEST_F(PacketArrivalHistoryTest, ReorderedPackets) { IncrementTime(4 * kFrameSizeMs); EXPECT_EQ(InsertPacketAndGetDelay(kFrameSizeMs), 60); - EXPECT_EQ(history_.GetMaxDelayMs(), 80); + EXPECT_EQ(history_.GetMaxDelayMs(), 60); } TEST_F(PacketArrivalHistoryTest, MaxHistorySize) { @@ -117,7 +121,7 @@ TEST_F(PacketArrivalHistoryTest, TimestampWraparound) { // Insert another in-order packet after the wraparound. EXPECT_EQ(InsertPacketAndGetDelay(kFrameSizeMs), 0); - EXPECT_EQ(history_.GetMaxDelayMs(), 3 * kFrameSizeMs); + EXPECT_EQ(history_.GetMaxDelayMs(), kFrameSizeMs); } TEST_F(PacketArrivalHistoryTest, TimestampWraparoundBackwards) { @@ -134,7 +138,33 @@ TEST_F(PacketArrivalHistoryTest, TimestampWraparoundBackwards) { // Insert another in-order packet after the wraparound. EXPECT_EQ(InsertPacketAndGetDelay(kFrameSizeMs), 0); - EXPECT_EQ(history_.GetMaxDelayMs(), 3 * kFrameSizeMs); + EXPECT_EQ(history_.GetMaxDelayMs(), kFrameSizeMs); +} + +TEST_F(PacketArrivalHistoryTest, OldPacketShouldNotBeInserted) { + // Insert first packet as reference. + EXPECT_EQ(InsertPacketAndGetDelay(0), 0); + // Insert packet with timestamp older than the window size compared to the + // first packet. + EXPECT_FALSE(history_.Insert(timestamp_ - kWindowSizeMs * kFsKhz - 1, + kFrameSizeSamples)); +} + +TEST_F(PacketArrivalHistoryTest, DuplicatePacketShouldNotBeInserted) { + // Insert first packet as reference. + uint32_t first_timestamp = timestamp_; + EXPECT_EQ(InsertPacketAndGetDelay(0), 0); + EXPECT_EQ(InsertPacketAndGetDelay(kFrameSizeMs), 0); + // Same timestamp as the first packet. + EXPECT_FALSE(history_.Insert(first_timestamp, kFrameSizeSamples)); +} + +TEST_F(PacketArrivalHistoryTest, OverlappingPacketShouldNotBeInserted) { + // Insert first packet as reference. + EXPECT_EQ(InsertPacketAndGetDelay(0), 0); + // 10 ms overlap with the previous packet. + EXPECT_FALSE(history_.Insert(timestamp_ + kFrameSizeSamples / 2, + kFrameSizeSamples / 2)); } } // namespace diff --git a/modules/audio_coding/neteq/packet_buffer.cc b/modules/audio_coding/neteq/packet_buffer.cc index 9bfa908ab9..894fc5bef6 100644 --- a/modules/audio_coding/neteq/packet_buffer.cc +++ b/modules/audio_coding/neteq/packet_buffer.cc @@ -28,7 +28,6 @@ #include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { @@ -44,53 +43,14 @@ class NewTimestampIsLarger { const Packet& new_packet_; }; -// Returns true if both payload types are known to the decoder database, and -// have the same sample rate. -bool EqualSampleRates(uint8_t pt1, - uint8_t pt2, - const DecoderDatabase& decoder_database) { - auto* di1 = decoder_database.GetDecoderInfo(pt1); - auto* di2 = decoder_database.GetDecoderInfo(pt2); - return di1 && di2 && di1->SampleRateHz() == di2->SampleRateHz(); -} - -void LogPacketDiscarded(int codec_level, StatisticsCalculator* stats) { - RTC_CHECK(stats); - if (codec_level > 0) { - stats->SecondaryPacketsDiscarded(1); - } else { - stats->PacketsDiscarded(1); - } -} - -absl::optional GetSmartflushingConfig() { - absl::optional result; - std::string field_trial_string = - field_trial::FindFullName("WebRTC-Audio-NetEqSmartFlushing"); - result = SmartFlushingConfig(); - bool enabled = false; - auto parser = StructParametersParser::Create( - "enabled", &enabled, "target_level_threshold_ms", - &result->target_level_threshold_ms, "target_level_multiplier", - &result->target_level_multiplier); - parser->Parse(field_trial_string); - if (!enabled) { - return absl::nullopt; - } - RTC_LOG(LS_INFO) << "Using smart flushing, target_level_threshold_ms: " - << result->target_level_threshold_ms - << ", target_level_multiplier: " - << result->target_level_multiplier; - return result; -} - } // namespace PacketBuffer::PacketBuffer(size_t max_number_of_packets, - const TickTimer* tick_timer) - : smart_flushing_config_(GetSmartflushingConfig()), - max_number_of_packets_(max_number_of_packets), - tick_timer_(tick_timer) {} + const TickTimer* tick_timer, + StatisticsCalculator* stats) + : max_number_of_packets_(max_number_of_packets), + tick_timer_(tick_timer), + stats_(stats) {} // Destructor. All packets in the buffer will be destroyed. PacketBuffer::~PacketBuffer() { @@ -98,45 +58,19 @@ PacketBuffer::~PacketBuffer() { } // Flush the buffer. All packets in the buffer will be destroyed. -void PacketBuffer::Flush(StatisticsCalculator* stats) { +void PacketBuffer::Flush() { for (auto& p : buffer_) { - LogPacketDiscarded(p.priority.codec_level, stats); + LogPacketDiscarded(p.priority.codec_level); } buffer_.clear(); - stats->FlushedPacketBuffer(); -} - -void PacketBuffer::PartialFlush(int target_level_ms, - size_t sample_rate, - size_t last_decoded_length, - StatisticsCalculator* stats) { - // Make sure that at least half the packet buffer capacity will be available - // after the flush. This is done to avoid getting stuck if the target level is - // very high. - int target_level_samples = - std::min(target_level_ms * sample_rate / 1000, - max_number_of_packets_ * last_decoded_length / 2); - // We should avoid flushing to very low levels. - target_level_samples = std::max( - target_level_samples, smart_flushing_config_->target_level_threshold_ms); - while (GetSpanSamples(last_decoded_length, sample_rate, false) > - static_cast(target_level_samples) || - buffer_.size() > max_number_of_packets_ / 2) { - LogPacketDiscarded(PeekNextPacket()->priority.codec_level, stats); - buffer_.pop_front(); - } + stats_->FlushedPacketBuffer(); } bool PacketBuffer::Empty() const { return buffer_.empty(); } -int PacketBuffer::InsertPacket(Packet&& packet, - StatisticsCalculator* stats, - size_t last_decoded_length, - size_t sample_rate, - int target_level_ms, - const DecoderDatabase& decoder_database) { +int PacketBuffer::InsertPacket(Packet&& packet) { if (packet.empty()) { RTC_LOG(LS_WARNING) << "InsertPacket invalid packet"; return kInvalidPacket; @@ -149,32 +83,11 @@ int PacketBuffer::InsertPacket(Packet&& packet, packet.waiting_time = tick_timer_->GetNewStopwatch(); - // Perform a smart flush if the buffer size exceeds a multiple of the target - // level. - const size_t span_threshold = - smart_flushing_config_ - ? smart_flushing_config_->target_level_multiplier * - std::max(smart_flushing_config_->target_level_threshold_ms, - target_level_ms) * - sample_rate / 1000 - : 0; - const bool smart_flush = - smart_flushing_config_.has_value() && - GetSpanSamples(last_decoded_length, sample_rate, false) >= span_threshold; - if (buffer_.size() >= max_number_of_packets_ || smart_flush) { - size_t buffer_size_before_flush = buffer_.size(); - if (smart_flushing_config_.has_value()) { - // Flush down to the target level. - PartialFlush(target_level_ms, sample_rate, last_decoded_length, stats); - return_val = kPartialFlush; - } else { - // Buffer is full. - Flush(stats); - return_val = kFlushed; - } - RTC_LOG(LS_WARNING) << "Packet buffer flushed, " - << (buffer_size_before_flush - buffer_.size()) - << " packets discarded."; + if (buffer_.size() >= max_number_of_packets_) { + // Buffer is full. + Flush(); + return_val = kFlushed; + RTC_LOG(LS_WARNING) << "Packet buffer flushed."; } // Get an iterator pointing to the place in the buffer where the new packet @@ -187,7 +100,7 @@ int PacketBuffer::InsertPacket(Packet&& packet, // timestamp as `rit`, which has a higher priority, do not insert the new // packet to list. if (rit != buffer_.rend() && packet.timestamp == rit->timestamp) { - LogPacketDiscarded(packet.priority.codec_level, stats); + LogPacketDiscarded(packet.priority.codec_level); return return_val; } @@ -196,7 +109,7 @@ int PacketBuffer::InsertPacket(Packet&& packet, // packet. PacketList::iterator it = rit.base(); if (it != buffer_.end() && packet.timestamp == it->timestamp) { - LogPacketDiscarded(it->priority.codec_level, stats); + LogPacketDiscarded(it->priority.codec_level); it = buffer_.erase(it); } buffer_.insert(it, std::move(packet)); // Insert the packet at that position. @@ -204,57 +117,6 @@ int PacketBuffer::InsertPacket(Packet&& packet, return return_val; } -int PacketBuffer::InsertPacketList( - PacketList* packet_list, - const DecoderDatabase& decoder_database, - absl::optional* current_rtp_payload_type, - absl::optional* current_cng_rtp_payload_type, - StatisticsCalculator* stats, - size_t last_decoded_length, - size_t sample_rate, - int target_level_ms) { - RTC_DCHECK(stats); - bool flushed = false; - for (auto& packet : *packet_list) { - if (decoder_database.IsComfortNoise(packet.payload_type)) { - if (*current_cng_rtp_payload_type && - **current_cng_rtp_payload_type != packet.payload_type) { - // New CNG payload type implies new codec type. - *current_rtp_payload_type = absl::nullopt; - Flush(stats); - flushed = true; - } - *current_cng_rtp_payload_type = packet.payload_type; - } else if (!decoder_database.IsDtmf(packet.payload_type)) { - // This must be speech. - if ((*current_rtp_payload_type && - **current_rtp_payload_type != packet.payload_type) || - (*current_cng_rtp_payload_type && - !EqualSampleRates(packet.payload_type, - **current_cng_rtp_payload_type, - decoder_database))) { - *current_cng_rtp_payload_type = absl::nullopt; - Flush(stats); - flushed = true; - } - *current_rtp_payload_type = packet.payload_type; - } - int return_val = - InsertPacket(std::move(packet), stats, last_decoded_length, sample_rate, - target_level_ms, decoder_database); - if (return_val == kFlushed) { - // The buffer flushed, but this is not an error. We can still continue. - flushed = true; - } else if (return_val != kOK) { - // An error occurred. Delete remaining packets in list and return. - packet_list->clear(); - return return_val; - } - } - packet_list->clear(); - return flushed ? kFlushed : kOK; -} - int PacketBuffer::NextTimestamp(uint32_t* next_timestamp) const { if (Empty()) { return kBufferEmpty; @@ -289,13 +151,13 @@ const Packet* PacketBuffer::PeekNextPacket() const { return buffer_.empty() ? nullptr : &buffer_.front(); } -absl::optional PacketBuffer::GetNextPacket() { +std::optional PacketBuffer::GetNextPacket() { if (Empty()) { // Buffer is empty. - return absl::nullopt; + return std::nullopt; } - absl::optional packet(std::move(buffer_.front())); + std::optional packet(std::move(buffer_.front())); // Assert that the packet sanity checks in InsertPacket method works. RTC_DCHECK(!packet->empty()); buffer_.pop_front(); @@ -303,43 +165,40 @@ absl::optional PacketBuffer::GetNextPacket() { return packet; } -int PacketBuffer::DiscardNextPacket(StatisticsCalculator* stats) { +int PacketBuffer::DiscardNextPacket() { if (Empty()) { return kBufferEmpty; } // Assert that the packet sanity checks in InsertPacket method works. const Packet& packet = buffer_.front(); RTC_DCHECK(!packet.empty()); - LogPacketDiscarded(packet.priority.codec_level, stats); + LogPacketDiscarded(packet.priority.codec_level); buffer_.pop_front(); return kOK; } void PacketBuffer::DiscardOldPackets(uint32_t timestamp_limit, - uint32_t horizon_samples, - StatisticsCalculator* stats) { - buffer_.remove_if([timestamp_limit, horizon_samples, stats](const Packet& p) { + uint32_t horizon_samples) { + buffer_.remove_if([this, timestamp_limit, horizon_samples](const Packet& p) { if (timestamp_limit == p.timestamp || !IsObsoleteTimestamp(p.timestamp, timestamp_limit, horizon_samples)) { return false; } - LogPacketDiscarded(p.priority.codec_level, stats); + LogPacketDiscarded(p.priority.codec_level); return true; }); } -void PacketBuffer::DiscardAllOldPackets(uint32_t timestamp_limit, - StatisticsCalculator* stats) { - DiscardOldPackets(timestamp_limit, 0, stats); +void PacketBuffer::DiscardAllOldPackets(uint32_t timestamp_limit) { + DiscardOldPackets(timestamp_limit, 0); } -void PacketBuffer::DiscardPacketsWithPayloadType(uint8_t payload_type, - StatisticsCalculator* stats) { - buffer_.remove_if([payload_type, stats](const Packet& p) { +void PacketBuffer::DiscardPacketsWithPayloadType(uint8_t payload_type) { + buffer_.remove_if([this, payload_type](const Packet& p) { if (p.payload_type != payload_type) { return false; } - LogPacketDiscarded(p.priority.codec_level, stats); + LogPacketDiscarded(p.priority.codec_level); return true; }); } @@ -376,7 +235,7 @@ size_t PacketBuffer::GetSpanSamples(size_t last_decoded_length, } size_t span = buffer_.back().timestamp - buffer_.front().timestamp; - size_t waiting_time_samples = rtc::dchecked_cast( + size_t waiting_time_samples = dchecked_cast( buffer_.back().waiting_time->ElapsedMs() * (sample_rate / 1000)); if (count_waiting_time) { span += waiting_time_samples; @@ -404,4 +263,12 @@ bool PacketBuffer::ContainsDtxOrCngPacket( return false; } +void PacketBuffer::LogPacketDiscarded(int codec_level) { + if (codec_level > 0) { + stats_->SecondaryPacketsDiscarded(1); + } else { + stats_->PacketsDiscarded(1); + } +} + } // namespace webrtc diff --git a/modules/audio_coding/neteq/packet_buffer.h b/modules/audio_coding/neteq/packet_buffer.h index 1eef64a02c..547e993a4d 100644 --- a/modules/audio_coding/neteq/packet_buffer.h +++ b/modules/audio_coding/neteq/packet_buffer.h @@ -11,7 +11,8 @@ #ifndef MODULES_AUDIO_CODING_NETEQ_PACKET_BUFFER_H_ #define MODULES_AUDIO_CODING_NETEQ_PACKET_BUFFER_H_ -#include "absl/types/optional.h" +#include + #include "modules/audio_coding/neteq/decoder_database.h" #include "modules/audio_coding/neteq/packet.h" #include "modules/include/module_common_types_public.h" // IsNewerTimestamp @@ -21,14 +22,6 @@ namespace webrtc { class DecoderDatabase; class StatisticsCalculator; class TickTimer; -struct SmartFlushingConfig { - // When calculating the flushing threshold, the maximum between the target - // level and this value is used. - int target_level_threshold_ms = 500; - // A smart flush is triggered when the packet buffer contains a multiple of - // the target level. - int target_level_multiplier = 3; -}; // This is the actual buffer holding the packets before decoding. class PacketBuffer { @@ -36,7 +29,6 @@ class PacketBuffer { enum BufferReturnCodes { kOK = 0, kFlushed, - kPartialFlush, kNotFound, kBufferEmpty, kInvalidPacket, @@ -45,7 +37,9 @@ class PacketBuffer { // Constructor creates a buffer which can hold a maximum of // `max_number_of_packets` packets. - PacketBuffer(size_t max_number_of_packets, const TickTimer* tick_timer); + PacketBuffer(size_t max_number_of_packets, + const TickTimer* tick_timer, + StatisticsCalculator* stats); // Deletes all packets in the buffer before destroying the buffer. virtual ~PacketBuffer(); @@ -54,13 +48,7 @@ class PacketBuffer { PacketBuffer& operator=(const PacketBuffer&) = delete; // Flushes the buffer and deletes all packets in it. - virtual void Flush(StatisticsCalculator* stats); - - // Partial flush. Flush packets but leave some packets behind. - virtual void PartialFlush(int target_level_ms, - size_t sample_rate, - size_t last_decoded_length, - StatisticsCalculator* stats); + virtual void Flush(); // Returns true for an empty buffer. virtual bool Empty() const; @@ -69,30 +57,7 @@ class PacketBuffer { // the packet object. // Returns PacketBuffer::kOK on success, PacketBuffer::kFlushed if the buffer // was flushed due to overfilling. - virtual int InsertPacket(Packet&& packet, - StatisticsCalculator* stats, - size_t last_decoded_length, - size_t sample_rate, - int target_level_ms, - const DecoderDatabase& decoder_database); - - // Inserts a list of packets into the buffer. The buffer will take over - // ownership of the packet objects. - // Returns PacketBuffer::kOK if all packets were inserted successfully. - // If the buffer was flushed due to overfilling, only a subset of the list is - // inserted, and PacketBuffer::kFlushed is returned. - // The last three parameters are included for legacy compatibility. - // TODO(hlundin): Redesign to not use current_*_payload_type and - // decoder_database. - virtual int InsertPacketList( - PacketList* packet_list, - const DecoderDatabase& decoder_database, - absl::optional* current_rtp_payload_type, - absl::optional* current_cng_rtp_payload_type, - StatisticsCalculator* stats, - size_t last_decoded_length, - size_t sample_rate, - int target_level_ms); + virtual int InsertPacket(Packet&& packet); // Gets the timestamp for the first packet in the buffer and writes it to the // output variable `next_timestamp`. @@ -114,12 +79,12 @@ class PacketBuffer { // Extracts the first packet in the buffer and returns it. // Returns an empty optional if the buffer is empty. - virtual absl::optional GetNextPacket(); + virtual std::optional GetNextPacket(); // Discards the first packet in the buffer. The packet is deleted. // Returns PacketBuffer::kBufferEmpty if the buffer is empty, // PacketBuffer::kOK otherwise. - virtual int DiscardNextPacket(StatisticsCalculator* stats); + virtual int DiscardNextPacket(); // Discards all packets that are (strictly) older than timestamp_limit, // but newer than timestamp_limit - horizon_samples. Setting horizon_samples @@ -127,16 +92,13 @@ class PacketBuffer { // is, if a packet is more than 2^31 timestamps into the future compared with // timestamp_limit (including wrap-around), it is considered old. virtual void DiscardOldPackets(uint32_t timestamp_limit, - uint32_t horizon_samples, - StatisticsCalculator* stats); + uint32_t horizon_samples); // Discards all packets that are (strictly) older than timestamp_limit. - virtual void DiscardAllOldPackets(uint32_t timestamp_limit, - StatisticsCalculator* stats); + virtual void DiscardAllOldPackets(uint32_t timestamp_limit); // Removes all packets with a specific payload type from the buffer. - virtual void DiscardPacketsWithPayloadType(uint8_t payload_type, - StatisticsCalculator* stats); + virtual void DiscardPacketsWithPayloadType(uint8_t payload_type); // Returns the number of packets in the buffer, including duplicates and // redundant packets. @@ -171,10 +133,12 @@ class PacketBuffer { } private: - absl::optional smart_flushing_config_; + void LogPacketDiscarded(int codec_level); + size_t max_number_of_packets_; PacketList buffer_; const TickTimer* tick_timer_; + StatisticsCalculator* stats_; }; } // namespace webrtc diff --git a/modules/audio_coding/neteq/packet_buffer_unittest.cc b/modules/audio_coding/neteq/packet_buffer_unittest.cc index b0079645ff..6e134a17ae 100644 --- a/modules/audio_coding/neteq/packet_buffer_unittest.cc +++ b/modules/audio_coding/neteq/packet_buffer_unittest.cc @@ -36,9 +36,9 @@ class MockEncodedAudioFrame : public webrtc::AudioDecoder::EncodedAudioFrame { MOCK_METHOD(bool, IsDtxPacket, (), (const, override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, Decode, - (rtc::ArrayView decoded), + (webrtc::ArrayView decoded), (const, override)); }; @@ -108,26 +108,23 @@ namespace webrtc { TEST(PacketBuffer, CreateAndDestroy) { TickTimer tick_timer; - PacketBuffer* buffer = new PacketBuffer(10, &tick_timer); // 10 packets. + StrictMock mock_stats(&tick_timer); + PacketBuffer* buffer = + new PacketBuffer(10, &tick_timer, &mock_stats); // 10 packets. EXPECT_TRUE(buffer->Empty()); delete buffer; } TEST(PacketBuffer, InsertPacket) { TickTimer tick_timer; - PacketBuffer buffer(10, &tick_timer); // 10 packets. + StrictMock mock_stats(&tick_timer); + PacketBuffer buffer(10, &tick_timer, &mock_stats); // 10 packets. PacketGenerator gen(17u, 4711u, 0, 10); - StrictMock mock_stats; MockDecoderDatabase decoder_database; const int payload_len = 100; const Packet packet = gen.NextPacket(payload_len, nullptr); - EXPECT_EQ(0, buffer.InsertPacket(/*packet=*/packet.Clone(), - /*stats=*/&mock_stats, - /*last_decoded_length=*/payload_len, - /*sample_rate=*/10000, - /*target_level_ms=*/60, - /*decoder_database=*/decoder_database)); + EXPECT_EQ(0, buffer.InsertPacket(/*packet=*/packet.Clone())); uint32_t next_ts; EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&next_ts)); EXPECT_EQ(4711u, next_ts); @@ -144,28 +141,22 @@ TEST(PacketBuffer, InsertPacket) { // Test to flush buffer. TEST(PacketBuffer, FlushBuffer) { TickTimer tick_timer; - PacketBuffer buffer(10, &tick_timer); // 10 packets. + StrictMock mock_stats(&tick_timer); + PacketBuffer buffer(10, &tick_timer, &mock_stats); // 10 packets. PacketGenerator gen(0, 0, 0, 10); const int payload_len = 10; - StrictMock mock_stats; MockDecoderDatabase decoder_database; // Insert 10 small packets; should be ok. for (int i = 0; i < 10; ++i) { - EXPECT_EQ( - PacketBuffer::kOK, - buffer.InsertPacket(/*packet=*/gen.NextPacket(payload_len, nullptr), - /*stats=*/&mock_stats, - /*last_decoded_length=*/payload_len, - /*sample_rate=*/1000, - /*target_level_ms=*/60, - /*decoder_database=*/decoder_database)); + EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(/*packet=*/gen.NextPacket( + payload_len, nullptr))); } EXPECT_EQ(10u, buffer.NumPacketsInBuffer()); EXPECT_FALSE(buffer.Empty()); EXPECT_CALL(mock_stats, PacketsDiscarded(1)).Times(10); - buffer.Flush(&mock_stats); + buffer.Flush(); // Buffer should delete the payloads itself. EXPECT_EQ(0u, buffer.NumPacketsInBuffer()); EXPECT_TRUE(buffer.Empty()); @@ -175,23 +166,17 @@ TEST(PacketBuffer, FlushBuffer) { // Test to fill the buffer over the limits, and verify that it flushes. TEST(PacketBuffer, OverfillBuffer) { TickTimer tick_timer; - PacketBuffer buffer(10, &tick_timer); // 10 packets. + StrictMock mock_stats(&tick_timer); + PacketBuffer buffer(10, &tick_timer, &mock_stats); // 10 packets. PacketGenerator gen(0, 0, 0, 10); - StrictMock mock_stats; MockDecoderDatabase decoder_database; // Insert 10 small packets; should be ok. const int payload_len = 10; int i; for (i = 0; i < 10; ++i) { - EXPECT_EQ( - PacketBuffer::kOK, - buffer.InsertPacket(/*packet=*/gen.NextPacket(payload_len, nullptr), - /*stats=*/&mock_stats, - /*last_decoded_length=*/payload_len, - /*sample_rate=*/1000, - /*target_level_ms=*/60, - /*decoder_database=*/decoder_database)); + EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(/*packet=*/gen.NextPacket( + payload_len, nullptr))); } EXPECT_EQ(10u, buffer.NumPacketsInBuffer()); uint32_t next_ts; @@ -202,12 +187,7 @@ TEST(PacketBuffer, OverfillBuffer) { const Packet packet = gen.NextPacket(payload_len, nullptr); // Insert 11th packet; should flush the buffer and insert it after flushing. EXPECT_EQ(PacketBuffer::kFlushed, - buffer.InsertPacket(/*packet=*/packet.Clone(), - /*stats=*/&mock_stats, - /*last_decoded_length=*/payload_len, - /*sample_rate=*/1000, - /*target_level_ms=*/60, - /*decoder_database=*/decoder_database)); + buffer.InsertPacket(/*packet=*/packet.Clone())); EXPECT_EQ(1u, buffer.NumPacketsInBuffer()); EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&next_ts)); // Expect last inserted packet to be first in line. @@ -216,190 +196,10 @@ TEST(PacketBuffer, OverfillBuffer) { EXPECT_CALL(decoder_database, Die()); // Called when object is deleted. } -// Test a partial buffer flush. -TEST(PacketBuffer, PartialFlush) { - // Use a field trial to configure smart flushing. - test::ScopedFieldTrials field_trials( - "WebRTC-Audio-NetEqSmartFlushing/enabled:true," - "target_level_threshold_ms:0,target_level_multiplier:2/"); - TickTimer tick_timer; - PacketBuffer buffer(10, &tick_timer); // 10 packets. - PacketGenerator gen(0, 0, 0, 10); - const int payload_len = 10; - StrictMock mock_stats; - MockDecoderDatabase decoder_database; - - // Insert 10 small packets; should be ok. - for (int i = 0; i < 10; ++i) { - EXPECT_EQ( - PacketBuffer::kOK, - buffer.InsertPacket(/*packet=*/gen.NextPacket(payload_len, nullptr), - /*stats=*/&mock_stats, - /*last_decoded_length=*/payload_len, - /*sample_rate=*/1000, - /*target_level_ms=*/100, - /*decoder_database=*/decoder_database)); - } - EXPECT_EQ(10u, buffer.NumPacketsInBuffer()); - EXPECT_FALSE(buffer.Empty()); - - EXPECT_CALL(mock_stats, PacketsDiscarded(1)).Times(7); - buffer.PartialFlush(/*target_level_ms=*/30, - /*sample_rate=*/1000, - /*last_decoded_length=*/payload_len, - /*stats=*/&mock_stats); - // There should still be some packets left in the buffer. - EXPECT_EQ(3u, buffer.NumPacketsInBuffer()); - EXPECT_FALSE(buffer.Empty()); - EXPECT_CALL(decoder_database, Die()); // Called when object is deleted. -} - -// Test to fill the buffer over the limits, and verify that the smart flush -// functionality works as expected. -TEST(PacketBuffer, SmartFlushOverfillBuffer) { - // Use a field trial to configure smart flushing. - test::ScopedFieldTrials field_trials( - "WebRTC-Audio-NetEqSmartFlushing/enabled:true," - "target_level_threshold_ms:0,target_level_multiplier:2/"); - TickTimer tick_timer; - PacketBuffer buffer(10, &tick_timer); // 10 packets. - PacketGenerator gen(0, 0, 0, 10); - StrictMock mock_stats; - MockDecoderDatabase decoder_database; - - // Insert 10 small packets; should be ok. - const int payload_len = 10; - int i; - for (i = 0; i < 10; ++i) { - EXPECT_EQ( - PacketBuffer::kOK, - buffer.InsertPacket(/*packet=*/gen.NextPacket(payload_len, nullptr), - /*stats=*/&mock_stats, - /*last_decoded_length=*/payload_len, - /*sample_rate=*/1000, - /*target_level_ms=*/100, - /*decoder_database=*/decoder_database)); - } - EXPECT_EQ(10u, buffer.NumPacketsInBuffer()); - uint32_t next_ts; - EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&next_ts)); - EXPECT_EQ(0u, next_ts); // Expect first inserted packet to be first in line. - - const Packet packet = gen.NextPacket(payload_len, nullptr); - EXPECT_CALL(mock_stats, PacketsDiscarded(1)).Times(6); - // Insert 11th packet; should cause a partial flush and insert the packet - // after flushing. - EXPECT_EQ(PacketBuffer::kPartialFlush, - buffer.InsertPacket(/*packet=*/packet.Clone(), - /*stats=*/&mock_stats, - /*last_decoded_length=*/payload_len, - /*sample_rate=*/1000, - /*target_level_ms=*/40, - /*decoder_database=*/decoder_database)); - EXPECT_EQ(5u, buffer.NumPacketsInBuffer()); - EXPECT_CALL(decoder_database, Die()); // Called when object is deleted. -} - -// Test inserting a list of packets. -TEST(PacketBuffer, InsertPacketList) { - TickTimer tick_timer; - PacketBuffer buffer(10, &tick_timer); // 10 packets. - PacketGenerator gen(0, 0, 0, 10); - PacketList list; - const int payload_len = 10; - - // Insert 10 small packets. - for (int i = 0; i < 10; ++i) { - list.push_back(gen.NextPacket(payload_len, nullptr)); - } - - MockDecoderDatabase decoder_database; - auto factory = CreateBuiltinAudioDecoderFactory(); - const DecoderDatabase::DecoderInfo info(SdpAudioFormat("pcmu", 8000, 1), - absl::nullopt, factory.get()); - EXPECT_CALL(decoder_database, GetDecoderInfo(0)) - .WillRepeatedly(Return(&info)); - - StrictMock mock_stats; - - absl::optional current_pt; - absl::optional current_cng_pt; - EXPECT_EQ( - PacketBuffer::kOK, - buffer.InsertPacketList(/*packet_list=*/&list, - /*decoder_database=*/decoder_database, - /*current_rtp_payload_type=*/¤t_pt, - /*current_cng_rtp_payload_type=*/¤t_cng_pt, - /*stats=*/&mock_stats, - /*last_decoded_length=*/payload_len, - /*sample_rate=*/1000, - /*target_level_ms=*/30)); - EXPECT_TRUE(list.empty()); // The PacketBuffer should have depleted the list. - EXPECT_EQ(10u, buffer.NumPacketsInBuffer()); - EXPECT_EQ(0, current_pt); // Current payload type changed to 0. - EXPECT_EQ(absl::nullopt, current_cng_pt); // CNG payload type not changed. - - EXPECT_CALL(decoder_database, Die()); // Called when object is deleted. -} - -// Test inserting a list of packets. Last packet is of a different payload type. -// Expecting the buffer to flush. -// TODO(hlundin): Remove this test when legacy operation is no longer needed. -TEST(PacketBuffer, InsertPacketListChangePayloadType) { - TickTimer tick_timer; - PacketBuffer buffer(10, &tick_timer); // 10 packets. - PacketGenerator gen(0, 0, 0, 10); - PacketList list; - const int payload_len = 10; - - // Insert 10 small packets. - for (int i = 0; i < 10; ++i) { - list.push_back(gen.NextPacket(payload_len, nullptr)); - } - // Insert 11th packet of another payload type (not CNG). - { - Packet packet = gen.NextPacket(payload_len, nullptr); - packet.payload_type = 1; - list.push_back(std::move(packet)); - } - - MockDecoderDatabase decoder_database; - auto factory = CreateBuiltinAudioDecoderFactory(); - const DecoderDatabase::DecoderInfo info0(SdpAudioFormat("pcmu", 8000, 1), - absl::nullopt, factory.get()); - EXPECT_CALL(decoder_database, GetDecoderInfo(0)) - .WillRepeatedly(Return(&info0)); - const DecoderDatabase::DecoderInfo info1(SdpAudioFormat("pcma", 8000, 1), - absl::nullopt, factory.get()); - EXPECT_CALL(decoder_database, GetDecoderInfo(1)) - .WillRepeatedly(Return(&info1)); - - StrictMock mock_stats; - - absl::optional current_pt; - absl::optional current_cng_pt; - EXPECT_CALL(mock_stats, PacketsDiscarded(1)).Times(10); - EXPECT_EQ( - PacketBuffer::kFlushed, - buffer.InsertPacketList(/*packet_list=*/&list, - /*decoder_database=*/decoder_database, - /*current_rtp_payload_type=*/¤t_pt, - /*current_cng_rtp_payload_type=*/¤t_cng_pt, - /*stats=*/&mock_stats, - /*last_decoded_length=*/payload_len, - /*sample_rate=*/1000, - /*target_level_ms=*/30)); - EXPECT_TRUE(list.empty()); // The PacketBuffer should have depleted the list. - EXPECT_EQ(1u, buffer.NumPacketsInBuffer()); // Only the last packet. - EXPECT_EQ(1, current_pt); // Current payload type changed to 1. - EXPECT_EQ(absl::nullopt, current_cng_pt); // CNG payload type not changed. - - EXPECT_CALL(decoder_database, Die()); // Called when object is deleted. -} - TEST(PacketBuffer, ExtractOrderRedundancy) { TickTimer tick_timer; - PacketBuffer buffer(100, &tick_timer); // 100 packets. + StrictMock mock_stats(&tick_timer); + PacketBuffer buffer(100, &tick_timer, &mock_stats); // 100 packets. const int kPackets = 18; const int kFrameSize = 10; const int kPayloadLength = 10; @@ -423,8 +223,6 @@ TEST(PacketBuffer, ExtractOrderRedundancy) { PacketGenerator gen(0, 0, 0, kFrameSize); - StrictMock mock_stats; - // Interleaving the EXPECT_CALL sequence with expectations on the MockFunction // check ensures that exactly one call to PacketsDiscarded happens in each // DiscardNextPacket call. @@ -444,12 +242,7 @@ TEST(PacketBuffer, ExtractOrderRedundancy) { } EXPECT_CALL(check, Call(i)); EXPECT_EQ(PacketBuffer::kOK, - buffer.InsertPacket(/*packet=*/packet.Clone(), - /*stats=*/&mock_stats, - /*last_decoded_length=*/kPayloadLength, - /*sample_rate=*/1000, - /*target_level_ms=*/60, - /*decoder_database=*/decoder_database)); + buffer.InsertPacket(/*packet=*/packet.Clone())); if (packet_facts[i].extract_order >= 0) { expect_order[packet_facts[i].extract_order] = std::move(packet); } @@ -459,7 +252,7 @@ TEST(PacketBuffer, ExtractOrderRedundancy) { EXPECT_EQ(kExpectPacketsInBuffer, buffer.NumPacketsInBuffer()); for (size_t i = 0; i < kExpectPacketsInBuffer; ++i) { - const absl::optional packet = buffer.GetNextPacket(); + const std::optional packet = buffer.GetNextPacket(); EXPECT_EQ(packet, expect_order[i]); // Compare contents. } EXPECT_TRUE(buffer.Empty()); @@ -468,25 +261,20 @@ TEST(PacketBuffer, ExtractOrderRedundancy) { TEST(PacketBuffer, DiscardPackets) { TickTimer tick_timer; - PacketBuffer buffer(100, &tick_timer); // 100 packets. + StrictMock mock_stats(&tick_timer); + PacketBuffer buffer(100, &tick_timer, &mock_stats); // 100 packets. const uint16_t start_seq_no = 17; const uint32_t start_ts = 4711; const uint32_t ts_increment = 10; PacketGenerator gen(start_seq_no, start_ts, 0, ts_increment); PacketList list; const int payload_len = 10; - StrictMock mock_stats; MockDecoderDatabase decoder_database; constexpr int kTotalPackets = 10; // Insert 10 small packets. for (int i = 0; i < kTotalPackets; ++i) { - buffer.InsertPacket(/*packet=*/gen.NextPacket(payload_len, nullptr), - /*stats=*/&mock_stats, - /*last_decoded_length=*/payload_len, - /*sample_rate=*/1000, - /*target_level_ms=*/60, - /*decoder_database=*/decoder_database); + buffer.InsertPacket(/*packet=*/gen.NextPacket(payload_len, nullptr)); } EXPECT_EQ(10u, buffer.NumPacketsInBuffer()); @@ -507,7 +295,7 @@ TEST(PacketBuffer, DiscardPackets) { EXPECT_EQ(current_ts, ts); EXPECT_CALL(mock_stats, PacketsDiscarded(1)); EXPECT_CALL(check, Call(i)); - EXPECT_EQ(PacketBuffer::kOK, buffer.DiscardNextPacket(&mock_stats)); + EXPECT_EQ(PacketBuffer::kOK, buffer.DiscardNextPacket()); current_ts += ts_increment; check.Call(i); } @@ -520,7 +308,7 @@ TEST(PacketBuffer, DiscardPackets) { .Times(kRemainingPackets - kSkipPackets); EXPECT_CALL(check, Call(17)); // Arbitrary id number. buffer.DiscardOldPackets(start_ts + kTotalPackets * ts_increment, - kRemainingPackets * ts_increment, &mock_stats); + kRemainingPackets * ts_increment); check.Call(17); // Same arbitrary id number. EXPECT_EQ(kSkipPackets, buffer.NumPacketsInBuffer()); @@ -530,8 +318,7 @@ TEST(PacketBuffer, DiscardPackets) { // Discard all remaining packets. EXPECT_CALL(mock_stats, PacketsDiscarded(kSkipPackets)); - buffer.DiscardAllOldPackets(start_ts + kTotalPackets * ts_increment, - &mock_stats); + buffer.DiscardAllOldPackets(start_ts + kTotalPackets * ts_increment); EXPECT_TRUE(buffer.Empty()); EXPECT_CALL(decoder_database, Die()); // Called when object is deleted. @@ -539,7 +326,8 @@ TEST(PacketBuffer, DiscardPackets) { TEST(PacketBuffer, Reordering) { TickTimer tick_timer; - PacketBuffer buffer(100, &tick_timer); // 100 packets. + StrictMock mock_stats(&tick_timer); + PacketBuffer buffer(100, &tick_timer, &mock_stats); // 100 packets. const uint16_t start_seq_no = 17; const uint32_t start_ts = 4711; const uint32_t ts_increment = 10; @@ -559,118 +347,20 @@ TEST(PacketBuffer, Reordering) { } } - MockDecoderDatabase decoder_database; - auto factory = CreateBuiltinAudioDecoderFactory(); - const DecoderDatabase::DecoderInfo info(SdpAudioFormat("pcmu", 8000, 1), - absl::nullopt, factory.get()); - EXPECT_CALL(decoder_database, GetDecoderInfo(0)) - .WillRepeatedly(Return(&info)); - absl::optional current_pt; - absl::optional current_cng_pt; - - StrictMock mock_stats; - - EXPECT_EQ( - PacketBuffer::kOK, - buffer.InsertPacketList(/*packet_list=*/&list, - /*decoder_database=*/decoder_database, - /*current_rtp_payload_type=*/¤t_pt, - /*current_cng_rtp_payload_type=*/¤t_cng_pt, - /*stats=*/&mock_stats, - /*last_decoded_length=*/payload_len, - /*sample_rate=*/1000, - /*target_level_ms=*/30)); + for (Packet& packet : list) { + EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(std::move(packet))); + } EXPECT_EQ(10u, buffer.NumPacketsInBuffer()); // Extract them and make sure that come out in the right order. uint32_t current_ts = start_ts; for (int i = 0; i < 10; ++i) { - const absl::optional packet = buffer.GetNextPacket(); + const std::optional packet = buffer.GetNextPacket(); ASSERT_TRUE(packet); EXPECT_EQ(current_ts, packet->timestamp); current_ts += ts_increment; } EXPECT_TRUE(buffer.Empty()); - - EXPECT_CALL(decoder_database, Die()); // Called when object is deleted. -} - -// The test first inserts a packet with narrow-band CNG, then a packet with -// wide-band speech. The expected behavior of the packet buffer is to detect a -// change in sample rate, even though no speech packet has been inserted before, -// and flush out the CNG packet. -TEST(PacketBuffer, CngFirstThenSpeechWithNewSampleRate) { - TickTimer tick_timer; - PacketBuffer buffer(10, &tick_timer); // 10 packets. - const uint8_t kCngPt = 13; - const int kPayloadLen = 10; - const uint8_t kSpeechPt = 100; - - MockDecoderDatabase decoder_database; - auto factory = CreateBuiltinAudioDecoderFactory(); - const DecoderDatabase::DecoderInfo info_cng(SdpAudioFormat("cn", 8000, 1), - absl::nullopt, factory.get()); - EXPECT_CALL(decoder_database, GetDecoderInfo(kCngPt)) - .WillRepeatedly(Return(&info_cng)); - const DecoderDatabase::DecoderInfo info_speech( - SdpAudioFormat("l16", 16000, 1), absl::nullopt, factory.get()); - EXPECT_CALL(decoder_database, GetDecoderInfo(kSpeechPt)) - .WillRepeatedly(Return(&info_speech)); - - // Insert first packet, which is narrow-band CNG. - PacketGenerator gen(0, 0, kCngPt, 10); - PacketList list; - list.push_back(gen.NextPacket(kPayloadLen, nullptr)); - absl::optional current_pt; - absl::optional current_cng_pt; - - StrictMock mock_stats; - - EXPECT_EQ( - PacketBuffer::kOK, - buffer.InsertPacketList(/*packet_list=*/&list, - /*decoder_database=*/decoder_database, - /*current_rtp_payload_type=*/¤t_pt, - /*current_cng_rtp_payload_type=*/¤t_cng_pt, - /*stats=*/&mock_stats, - /*last_decoded_length=*/kPayloadLen, - /*sample_rate=*/1000, - /*target_level_ms=*/30)); - EXPECT_TRUE(list.empty()); - EXPECT_EQ(1u, buffer.NumPacketsInBuffer()); - ASSERT_TRUE(buffer.PeekNextPacket()); - EXPECT_EQ(kCngPt, buffer.PeekNextPacket()->payload_type); - EXPECT_EQ(current_pt, absl::nullopt); // Current payload type not set. - EXPECT_EQ(kCngPt, current_cng_pt); // CNG payload type set. - - // Insert second packet, which is wide-band speech. - { - Packet packet = gen.NextPacket(kPayloadLen, nullptr); - packet.payload_type = kSpeechPt; - list.push_back(std::move(packet)); - } - // Expect the buffer to flush out the CNG packet, since it does not match the - // new speech sample rate. - EXPECT_CALL(mock_stats, PacketsDiscarded(1)); - EXPECT_EQ( - PacketBuffer::kFlushed, - buffer.InsertPacketList(/*packet_list=*/&list, - /*decoder_database=*/decoder_database, - /*current_rtp_payload_type=*/¤t_pt, - /*current_cng_rtp_payload_type=*/¤t_cng_pt, - /*stats=*/&mock_stats, - /*last_decoded_length=*/kPayloadLen, - /*sample_rate=*/1000, - /*target_level_ms=*/30)); - EXPECT_TRUE(list.empty()); - EXPECT_EQ(1u, buffer.NumPacketsInBuffer()); - ASSERT_TRUE(buffer.PeekNextPacket()); - EXPECT_EQ(kSpeechPt, buffer.PeekNextPacket()->payload_type); - - EXPECT_EQ(kSpeechPt, current_pt); // Current payload type set. - EXPECT_EQ(absl::nullopt, current_cng_pt); // CNG payload type reset. - - EXPECT_CALL(decoder_database, Die()); // Called when object is deleted. } TEST(PacketBuffer, Failures) { @@ -680,81 +370,27 @@ TEST(PacketBuffer, Failures) { int payload_len = 100; PacketGenerator gen(start_seq_no, start_ts, 0, ts_increment); TickTimer tick_timer; - StrictMock mock_stats; - MockDecoderDatabase decoder_database; + StrictMock mock_stats(&tick_timer); - PacketBuffer* buffer = new PacketBuffer(100, &tick_timer); // 100 packets. + PacketBuffer buffer(100, &tick_timer, &mock_stats); // 100 packets. { Packet packet = gen.NextPacket(payload_len, nullptr); packet.payload.Clear(); EXPECT_EQ(PacketBuffer::kInvalidPacket, - buffer->InsertPacket(/*packet=*/std::move(packet), - /*stats=*/&mock_stats, - /*last_decoded_length=*/payload_len, - /*sample_rate=*/1000, - /*target_level_ms=*/60, - /*decoder_database=*/decoder_database)); + buffer.InsertPacket(/*packet=*/std::move(packet))); } // Buffer should still be empty. Test all empty-checks. uint32_t temp_ts; - EXPECT_EQ(PacketBuffer::kBufferEmpty, buffer->NextTimestamp(&temp_ts)); + EXPECT_EQ(PacketBuffer::kBufferEmpty, buffer.NextTimestamp(&temp_ts)); EXPECT_EQ(PacketBuffer::kBufferEmpty, - buffer->NextHigherTimestamp(0, &temp_ts)); - EXPECT_EQ(NULL, buffer->PeekNextPacket()); - EXPECT_FALSE(buffer->GetNextPacket()); + buffer.NextHigherTimestamp(0, &temp_ts)); + EXPECT_EQ(NULL, buffer.PeekNextPacket()); + EXPECT_FALSE(buffer.GetNextPacket()); // Discarding packets will not invoke mock_stats.PacketDiscarded() because the // packet buffer is empty. - EXPECT_EQ(PacketBuffer::kBufferEmpty, buffer->DiscardNextPacket(&mock_stats)); - buffer->DiscardAllOldPackets(0, &mock_stats); - - // Insert one packet to make the buffer non-empty. - EXPECT_EQ( - PacketBuffer::kOK, - buffer->InsertPacket(/*packet=*/gen.NextPacket(payload_len, nullptr), - /*stats=*/&mock_stats, - /*last_decoded_length=*/payload_len, - /*sample_rate=*/1000, - /*target_level_ms=*/60, - /*decoder_database=*/decoder_database)); - EXPECT_EQ(PacketBuffer::kInvalidPointer, buffer->NextTimestamp(NULL)); - EXPECT_EQ(PacketBuffer::kInvalidPointer, - buffer->NextHigherTimestamp(0, NULL)); - delete buffer; - - // Insert packet list of three packets, where the second packet has an invalid - // payload. Expect first packet to be inserted, and the remaining two to be - // discarded. - buffer = new PacketBuffer(100, &tick_timer); // 100 packets. - PacketList list; - list.push_back(gen.NextPacket(payload_len, nullptr)); // Valid packet. - { - Packet packet = gen.NextPacket(payload_len, nullptr); - packet.payload.Clear(); // Invalid. - list.push_back(std::move(packet)); - } - list.push_back(gen.NextPacket(payload_len, nullptr)); // Valid packet. - auto factory = CreateBuiltinAudioDecoderFactory(); - const DecoderDatabase::DecoderInfo info(SdpAudioFormat("pcmu", 8000, 1), - absl::nullopt, factory.get()); - EXPECT_CALL(decoder_database, GetDecoderInfo(0)) - .WillRepeatedly(Return(&info)); - absl::optional current_pt; - absl::optional current_cng_pt; - EXPECT_EQ( - PacketBuffer::kInvalidPacket, - buffer->InsertPacketList(/*packet_list=*/&list, - /*decoder_database=*/decoder_database, - /*current_rtp_payload_type=*/¤t_pt, - /*current_cng_rtp_payload_type=*/¤t_cng_pt, - /*stats=*/&mock_stats, - /*last_decoded_length=*/payload_len, - /*sample_rate=*/1000, - /*target_level_ms=*/30)); - EXPECT_TRUE(list.empty()); // The PacketBuffer should have depleted the list. - EXPECT_EQ(1u, buffer->NumPacketsInBuffer()); - delete buffer; - EXPECT_CALL(decoder_database, Die()); // Called when object is deleted. + EXPECT_EQ(PacketBuffer::kBufferEmpty, buffer.DiscardNextPacket()); + buffer.DiscardAllOldPackets(0); } // Test packet comparison function. @@ -873,9 +509,9 @@ TEST(PacketBuffer, GetSpanSamples) { constexpr int kSampleRateHz = 48000; constexpr bool kCountWaitingTime = false; TickTimer tick_timer; - PacketBuffer buffer(3, &tick_timer); + StrictMock mock_stats(&tick_timer); + PacketBuffer buffer(3, &tick_timer, &mock_stats); PacketGenerator gen(0, kStartTimeStamp, 0, kFrameSizeSamples); - StrictMock mock_stats; MockDecoderDatabase decoder_database; Packet packet_1 = gen.NextPacket(kPayloadSizeBytes, nullptr); @@ -891,12 +527,7 @@ TEST(PacketBuffer, GetSpanSamples) { packet_2.timestamp); // Tmestamp wrapped around. EXPECT_EQ(PacketBuffer::kOK, - buffer.InsertPacket(/*packet=*/std::move(packet_1), - /*stats=*/&mock_stats, - /*last_decoded_length=*/kFrameSizeSamples, - /*sample_rate=*/1000, - /*target_level_ms=*/60, - /*decoder_database=*/decoder_database)); + buffer.InsertPacket(/*packet=*/std::move(packet_1))); constexpr size_t kLastDecodedSizeSamples = 2; // packet_1 has no access to duration, and relies last decoded duration as @@ -906,12 +537,7 @@ TEST(PacketBuffer, GetSpanSamples) { kCountWaitingTime)); EXPECT_EQ(PacketBuffer::kOK, - buffer.InsertPacket(/*packet=*/std::move(packet_2), - /*stats=*/&mock_stats, - /*last_decoded_length=*/kFrameSizeSamples, - /*sample_rate=*/1000, - /*target_level_ms=*/60, - /*decoder_database=*/decoder_database)); + buffer.InsertPacket(/*packet=*/std::move(packet_2))); EXPECT_EQ(kFrameSizeSamples * 2, buffer.GetSpanSamples(0, kSampleRateHz, kCountWaitingTime)); @@ -931,20 +557,15 @@ TEST(PacketBuffer, GetSpanSamplesCountWaitingTime) { constexpr bool kCountWaitingTime = true; constexpr size_t kLastDecodedSizeSamples = 0; TickTimer tick_timer; - PacketBuffer buffer(3, &tick_timer); + StrictMock mock_stats(&tick_timer); + PacketBuffer buffer(3, &tick_timer, &mock_stats); PacketGenerator gen(0, kStartTimeStamp, 0, kFrameSizeSamples); - StrictMock mock_stats; MockDecoderDatabase decoder_database; Packet packet = gen.NextPacket(kPayloadSizeBytes, nullptr); EXPECT_EQ(PacketBuffer::kOK, - buffer.InsertPacket(/*packet=*/std::move(packet), - /*stats=*/&mock_stats, - /*last_decoded_length=*/kFrameSizeSamples, - /*sample_rate=*/kSampleRateHz, - /*target_level_ms=*/60, - /*decoder_database=*/decoder_database)); + buffer.InsertPacket(/*packet=*/std::move(packet))); EXPECT_EQ(0u, buffer.GetSpanSamples(kLastDecodedSizeSamples, kSampleRateHz, kCountWaitingTime)); diff --git a/modules/audio_coding/neteq/post_decode_vad.cc b/modules/audio_coding/neteq/post_decode_vad.cc deleted file mode 100644 index 9999d6764b..0000000000 --- a/modules/audio_coding/neteq/post_decode_vad.cc +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/neteq/post_decode_vad.h" - -namespace webrtc { - -PostDecodeVad::~PostDecodeVad() { - if (vad_instance_) - WebRtcVad_Free(vad_instance_); -} - -void PostDecodeVad::Enable() { - if (!vad_instance_) { - // Create the instance. - vad_instance_ = WebRtcVad_Create(); - if (vad_instance_ == nullptr) { - // Failed to create instance. - Disable(); - return; - } - } - Init(); - enabled_ = true; -} - -void PostDecodeVad::Disable() { - enabled_ = false; - running_ = false; -} - -void PostDecodeVad::Init() { - running_ = false; - if (vad_instance_) { - WebRtcVad_Init(vad_instance_); - WebRtcVad_set_mode(vad_instance_, kVadMode); - running_ = true; - } -} - -void PostDecodeVad::Update(int16_t* signal, - size_t length, - AudioDecoder::SpeechType speech_type, - bool sid_frame, - int fs_hz) { - if (!vad_instance_ || !enabled_) { - return; - } - - if (speech_type == AudioDecoder::kComfortNoise || sid_frame || - fs_hz > 16000) { - // TODO(hlundin): Remove restriction on fs_hz. - running_ = false; - active_speech_ = true; - sid_interval_counter_ = 0; - } else if (!running_) { - ++sid_interval_counter_; - } - - if (sid_interval_counter_ >= kVadAutoEnable) { - Init(); - } - - if (length > 0 && running_) { - size_t vad_sample_index = 0; - active_speech_ = false; - // Loop through frame sizes 30, 20, and 10 ms. - for (int vad_frame_size_ms = 30; vad_frame_size_ms >= 10; - vad_frame_size_ms -= 10) { - size_t vad_frame_size_samples = - static_cast(vad_frame_size_ms * fs_hz / 1000); - while (length - vad_sample_index >= vad_frame_size_samples) { - int vad_return = - WebRtcVad_Process(vad_instance_, fs_hz, &signal[vad_sample_index], - vad_frame_size_samples); - active_speech_ |= (vad_return == 1); - vad_sample_index += vad_frame_size_samples; - } - } - } -} - -} // namespace webrtc diff --git a/modules/audio_coding/neteq/post_decode_vad.h b/modules/audio_coding/neteq/post_decode_vad.h deleted file mode 100644 index 3bd91b9edb..0000000000 --- a/modules/audio_coding/neteq/post_decode_vad.h +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_NETEQ_POST_DECODE_VAD_H_ -#define MODULES_AUDIO_CODING_NETEQ_POST_DECODE_VAD_H_ - -#include -#include - -#include "api/audio_codecs/audio_decoder.h" -#include "common_audio/vad/include/webrtc_vad.h" - -namespace webrtc { - -class PostDecodeVad { - public: - PostDecodeVad() - : enabled_(false), - running_(false), - active_speech_(true), - sid_interval_counter_(0), - vad_instance_(NULL) {} - - virtual ~PostDecodeVad(); - - PostDecodeVad(const PostDecodeVad&) = delete; - PostDecodeVad& operator=(const PostDecodeVad&) = delete; - - // Enables post-decode VAD. - void Enable(); - - // Disables post-decode VAD. - void Disable(); - - // Initializes post-decode VAD. - void Init(); - - // Updates post-decode VAD with the audio data in `signal` having `length` - // samples. The data is of type `speech_type`, at the sample rate `fs_hz`. - void Update(int16_t* signal, - size_t length, - AudioDecoder::SpeechType speech_type, - bool sid_frame, - int fs_hz); - - // Accessors. - bool enabled() const { return enabled_; } - bool running() const { return running_; } - bool active_speech() const { return active_speech_; } - - private: - static const int kVadMode = 0; // Sets aggressiveness to "Normal". - // Number of Update() calls without CNG/SID before re-enabling VAD. - static const int kVadAutoEnable = 3000; - - bool enabled_; - bool running_; - bool active_speech_; - int sid_interval_counter_; - ::VadInst* vad_instance_; -}; - -} // namespace webrtc -#endif // MODULES_AUDIO_CODING_NETEQ_POST_DECODE_VAD_H_ diff --git a/modules/audio_coding/neteq/preemptive_expand.cc b/modules/audio_coding/neteq/preemptive_expand.cc index 232170b177..f5d540bf06 100644 --- a/modules/audio_coding/neteq/preemptive_expand.cc +++ b/modules/audio_coding/neteq/preemptive_expand.cc @@ -33,8 +33,7 @@ PreemptiveExpand::ReturnCodes PreemptiveExpand::Process( old_data_length >= input_length / num_channels_ - overlap_samples_) { // Length of input data too short to do preemptive expand. Simply move all // data from input to output. - output->PushBackInterleaved( - rtc::ArrayView(input, input_length)); + output->PushBackInterleaved(ArrayView(input, input_length)); return kError; } const bool kFastMode = false; // Fast mode is not available for PE Expand. @@ -78,17 +77,17 @@ PreemptiveExpand::ReturnCodes PreemptiveExpand::CheckCriteriaAndStretch( size_t unmodified_length = std::max(old_data_length_per_channel_, fs_mult_120); // Copy first part, including cross-fade region. - output->PushBackInterleaved(rtc::ArrayView( + output->PushBackInterleaved(ArrayView( input, (unmodified_length + peak_index) * num_channels_)); // Copy the last `peak_index` samples up to 15 ms to `temp_vector`. AudioMultiVector temp_vector(num_channels_); - temp_vector.PushBackInterleaved(rtc::ArrayView( + temp_vector.PushBackInterleaved(ArrayView( &input[(unmodified_length - peak_index) * num_channels_], peak_index * num_channels_)); // Cross-fade `temp_vector` onto the end of `output`. output->CrossFade(temp_vector, peak_index); // Copy the last unmodified part, 15 ms + pitch period until the end. - output->PushBackInterleaved(rtc::ArrayView( + output->PushBackInterleaved(ArrayView( &input[unmodified_length * num_channels_], input_length - unmodified_length * num_channels_)); @@ -99,8 +98,7 @@ PreemptiveExpand::ReturnCodes PreemptiveExpand::CheckCriteriaAndStretch( } } else { // Accelerate not allowed. Simply move all data from decoded to outData. - output->PushBackInterleaved( - rtc::ArrayView(input, input_length)); + output->PushBackInterleaved(ArrayView(input, input_length)); return kNoStretch; } } diff --git a/modules/audio_coding/neteq/red_payload_splitter.cc b/modules/audio_coding/neteq/red_payload_splitter.cc index 992cd28e62..4c753f553e 100644 --- a/modules/audio_coding/neteq/red_payload_splitter.cc +++ b/modules/audio_coding/neteq/red_payload_splitter.cc @@ -114,8 +114,8 @@ bool RedPayloadSplitter::SplitRed(PacketList* packet_list) { PacketList new_packets; // An empty list to store the split packets in. for (size_t i = 0; i != new_headers.size(); ++i) { const auto& new_header = new_headers[i]; - size_t payload_length = new_header.payload_length; - if (payload_ptr + payload_length > + size_t block_length = new_header.payload_length; + if (payload_ptr + block_length > red_packet.payload.data() + red_packet.payload.size()) { // The block lengths in the RED headers do not match the overall // packet length. Something is corrupt. Discard this and the remaining @@ -130,10 +130,10 @@ bool RedPayloadSplitter::SplitRed(PacketList* packet_list) { new_packet.payload_type = new_header.payload_type; new_packet.sequence_number = red_packet.sequence_number; new_packet.priority.red_level = - rtc::dchecked_cast((new_headers.size() - 1) - i); - new_packet.payload.SetData(payload_ptr, payload_length); + dchecked_cast((new_headers.size() - 1) - i); + new_packet.payload.SetData(payload_ptr, block_length); new_packets.push_front(std::move(new_packet)); - payload_ptr += payload_length; + payload_ptr += block_length; } // Insert new packets into original list, before the element pointed to by // iterator `it`. diff --git a/modules/audio_coding/neteq/red_payload_splitter_unittest.cc b/modules/audio_coding/neteq/red_payload_splitter_unittest.cc index 55f9bee272..7a5dde0056 100644 --- a/modules/audio_coding/neteq/red_payload_splitter_unittest.cc +++ b/modules/audio_coding/neteq/red_payload_splitter_unittest.cc @@ -16,6 +16,8 @@ #include // pair #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "modules/audio_coding/neteq/decoder_database.h" #include "modules/audio_coding/neteq/packet.h" #include "rtc_base/numerics/safe_conversions.h" @@ -98,7 +100,7 @@ Packet CreateRedPayload(size_t num_payloads, *payload_ptr |= 0x80; ++payload_ptr; int this_offset = - rtc::checked_cast((num_payloads - i - 1) * timestamp_offset); + checked_cast((num_payloads - i - 1) * timestamp_offset); *payload_ptr = this_offset >> 6; ++payload_ptr; RTC_DCHECK_LE(kPayloadLength, 1023); // Max length described by 10 bits. @@ -281,11 +283,12 @@ TEST(RedPayloadSplitter, TwoPacketsThreePayloads) { // 0 = CNGnb // 1 = PCMu // 2 = DTMF (AVT) -// 3 = iLBC -// We expect the method CheckRedPayloads to discard the iLBC packet, since it +// 3 = PCMa +// We expect the method CheckRedPayloads to discard the PCMa packet, since it // is a non-CNG, non-DTMF payload of another type than the first speech payload // found in the list (which is PCMu). TEST(RedPayloadSplitter, CheckRedPayloads) { + const Environment env = CreateEnvironment(); PacketList packet_list; for (uint8_t i = 0; i <= 3; ++i) { // Create packet with payload type `i`, payload length 10 bytes, all 0. @@ -296,12 +299,12 @@ TEST(RedPayloadSplitter, CheckRedPayloads) { // easier to just register the payload types and let the actual implementation // do its job. DecoderDatabase decoder_database( - rtc::make_ref_counted(), absl::nullopt); + env, make_ref_counted(), std::nullopt); decoder_database.RegisterPayload(0, SdpAudioFormat("cn", 8000, 1)); decoder_database.RegisterPayload(1, SdpAudioFormat("pcmu", 8000, 1)); decoder_database.RegisterPayload(2, SdpAudioFormat("telephone-event", 8000, 1)); - decoder_database.RegisterPayload(3, SdpAudioFormat("ilbc", 8000, 1)); + decoder_database.RegisterPayload(1, SdpAudioFormat("pcma", 8000, 1)); RedPayloadSplitter splitter; splitter.CheckRedPayloads(&packet_list, decoder_database); @@ -321,6 +324,7 @@ TEST(RedPayloadSplitter, CheckRedPayloads) { // for RED. That is, some kind of weird nested RED packet. This is not supported // and the splitter should discard all packets. TEST(RedPayloadSplitter, CheckRedPayloadsRecursiveRed) { + const Environment env = CreateEnvironment(); PacketList packet_list; for (uint8_t i = 0; i <= 3; ++i) { // Create packet with RED payload type, payload length 10 bytes, all 0. @@ -331,7 +335,7 @@ TEST(RedPayloadSplitter, CheckRedPayloadsRecursiveRed) { // easier to just register the payload types and let the actual implementation // do its job. DecoderDatabase decoder_database( - rtc::make_ref_counted(), absl::nullopt); + env, make_ref_counted(), std::nullopt); decoder_database.RegisterPayload(kRedPayloadType, SdpAudioFormat("red", 8000, 1)); diff --git a/modules/audio_coding/neteq/reorder_optimizer.cc b/modules/audio_coding/neteq/reorder_optimizer.cc index f6e073fc88..b3d9154a9a 100644 --- a/modules/audio_coding/neteq/reorder_optimizer.cc +++ b/modules/audio_coding/neteq/reorder_optimizer.cc @@ -10,6 +10,8 @@ #include "modules/audio_coding/neteq/reorder_optimizer.h" +#include + #include #include #include @@ -25,7 +27,7 @@ constexpr int kBucketSizeMs = 20; ReorderOptimizer::ReorderOptimizer(int forget_factor, int ms_per_loss_percent, - absl::optional start_forget_weight) + std::optional start_forget_weight) : histogram_(kDelayBuckets, forget_factor, start_forget_weight), ms_per_loss_percent_(ms_per_loss_percent) {} diff --git a/modules/audio_coding/neteq/reorder_optimizer.h b/modules/audio_coding/neteq/reorder_optimizer.h index 06f6bc7e50..3c5e8fa086 100644 --- a/modules/audio_coding/neteq/reorder_optimizer.h +++ b/modules/audio_coding/neteq/reorder_optimizer.h @@ -11,7 +11,8 @@ #ifndef MODULES_AUDIO_CODING_NETEQ_REORDER_OPTIMIZER_H_ #define MODULES_AUDIO_CODING_NETEQ_REORDER_OPTIMIZER_H_ -#include "absl/types/optional.h" +#include + #include "modules/audio_coding/neteq/histogram.h" namespace webrtc { @@ -23,11 +24,11 @@ class ReorderOptimizer { public: ReorderOptimizer(int forget_factor, int ms_per_loss_percent, - absl::optional start_forget_weight); + std::optional start_forget_weight); void Update(int relative_delay_ms, bool reordered, int base_delay_ms); - absl::optional GetOptimalDelayMs() const { return optimal_delay_ms_; } + std::optional GetOptimalDelayMs() const { return optimal_delay_ms_; } void Reset(); @@ -36,7 +37,7 @@ class ReorderOptimizer { Histogram histogram_; const int ms_per_loss_percent_; - absl::optional optimal_delay_ms_; + std::optional optimal_delay_ms_; }; } // namespace webrtc diff --git a/modules/audio_coding/neteq/statistics_calculator.cc b/modules/audio_coding/neteq/statistics_calculator.cc index 70cfc2b3a8..69e14608a0 100644 --- a/modules/audio_coding/neteq/statistics_calculator.cc +++ b/modules/audio_coding/neteq/statistics_calculator.cc @@ -112,7 +112,7 @@ void StatisticsCalculator::PeriodicUmaAverage::Reset() { counter_ = 0; } -StatisticsCalculator::StatisticsCalculator() +StatisticsCalculator::StatisticsCalculator(TickTimer* tick_timer) : preemptive_samples_(0), accelerate_samples_(0), expanded_speech_samples_(0), @@ -129,7 +129,13 @@ StatisticsCalculator::StatisticsCalculator() 1000), buffer_full_counter_("WebRTC.Audio.JitterBufferFullPerMinute", 60000, // 60 seconds report interval. - 100) {} + 100), + expand_uma_logger_("WebRTC.Audio.ExpandRatePercent", + 10, // Report once every 10 s. + tick_timer), + speech_expand_uma_logger_("WebRTC.Audio.SpeechExpandRatePercent", + 10, // Report once every 10 s. + tick_timer) {} StatisticsCalculator::~StatisticsCalculator() = default; @@ -149,25 +155,37 @@ void StatisticsCalculator::ResetMcu() { void StatisticsCalculator::ExpandedVoiceSamples(size_t num_samples, bool is_new_concealment_event) { + if (!decoded_output_played_) { + return; + } expanded_speech_samples_ += num_samples; - ConcealedSamplesCorrection(rtc::dchecked_cast(num_samples), true); + ConcealedSamplesCorrection(dchecked_cast(num_samples), true); lifetime_stats_.concealment_events += is_new_concealment_event; } void StatisticsCalculator::ExpandedNoiseSamples(size_t num_samples, bool is_new_concealment_event) { + if (!decoded_output_played_) { + return; + } expanded_noise_samples_ += num_samples; - ConcealedSamplesCorrection(rtc::dchecked_cast(num_samples), false); + ConcealedSamplesCorrection(dchecked_cast(num_samples), false); lifetime_stats_.concealment_events += is_new_concealment_event; } void StatisticsCalculator::ExpandedVoiceSamplesCorrection(int num_samples) { + if (!decoded_output_played_) { + return; + } expanded_speech_samples_ = AddIntToSizeTWithLowerCap(num_samples, expanded_speech_samples_); ConcealedSamplesCorrection(num_samples, true); } void StatisticsCalculator::ExpandedNoiseSamplesCorrection(int num_samples) { + if (!decoded_output_played_) { + return; + } expanded_noise_samples_ = AddIntToSizeTWithLowerCap(num_samples, expanded_noise_samples_); ConcealedSamplesCorrection(num_samples, false); @@ -178,6 +196,9 @@ void StatisticsCalculator::DecodedOutputPlayed() { } void StatisticsCalculator::EndExpandEvent(int fs_hz) { + if (!decoded_output_played_) { + return; + } RTC_DCHECK_GE(lifetime_stats_.concealed_samples, concealed_samples_at_event_end_); const int event_duration_ms = @@ -195,6 +216,9 @@ void StatisticsCalculator::EndExpandEvent(int fs_hz) { void StatisticsCalculator::ConcealedSamplesCorrection(int num_samples, bool is_voice) { + if (!decoded_output_played_) { + return; + } if (num_samples < 0) { // Store negative correction to subtract from future positive additions. // See also the function comment in the header file. @@ -220,18 +244,27 @@ void StatisticsCalculator::ConcealedSamplesCorrection(int num_samples, } void StatisticsCalculator::PreemptiveExpandedSamples(size_t num_samples) { + if (!decoded_output_played_) { + return; + } preemptive_samples_ += num_samples; operations_and_state_.preemptive_samples += num_samples; lifetime_stats_.inserted_samples_for_deceleration += num_samples; } void StatisticsCalculator::AcceleratedSamples(size_t num_samples) { + if (!decoded_output_played_) { + return; + } accelerate_samples_ += num_samples; operations_and_state_.accelerate_samples += num_samples; lifetime_stats_.removed_samples_for_acceleration += num_samples; } void StatisticsCalculator::GeneratedNoiseSamples(size_t num_samples) { + if (!decoded_output_played_) { + return; + } lifetime_stats_.generated_noise_samples += num_samples; } @@ -249,8 +282,11 @@ void StatisticsCalculator::SecondaryPacketsReceived(size_t num_packets) { } void StatisticsCalculator::IncreaseCounter(size_t num_samples, int fs_hz) { + if (!decoded_output_played_) { + return; + } const int time_step_ms = - rtc::CheckedDivExact(static_cast(1000 * num_samples), fs_hz); + CheckedDivExact(static_cast(1000 * num_samples), fs_hz); delayed_packet_outage_counter_.AdvanceClock(time_step_ms); excess_buffer_delay_.AdvanceClock(time_step_ms); buffer_full_counter_.AdvanceClock(time_step_ms); @@ -260,19 +296,31 @@ void StatisticsCalculator::IncreaseCounter(size_t num_samples, int fs_hz) { timestamps_since_last_report_ = 0; } lifetime_stats_.total_samples_received += num_samples; + expand_uma_logger_.UpdateSampleCounter(lifetime_stats_.concealed_samples, + fs_hz); + uint64_t speech_concealed_samples = 0; + if (lifetime_stats_.concealed_samples > + lifetime_stats_.silent_concealed_samples) { + speech_concealed_samples = lifetime_stats_.concealed_samples - + lifetime_stats_.silent_concealed_samples; + } + speech_expand_uma_logger_.UpdateSampleCounter(speech_concealed_samples, + fs_hz); } -void StatisticsCalculator::JitterBufferDelay( - size_t num_samples, - uint64_t waiting_time_ms, - uint64_t target_delay_ms, - uint64_t unlimited_target_delay_ms) { +void StatisticsCalculator::JitterBufferDelay(size_t num_samples, + uint64_t waiting_time_ms, + uint64_t target_delay_ms, + uint64_t unlimited_target_delay_ms, + uint64_t processing_delay_us) { lifetime_stats_.jitter_buffer_delay_ms += waiting_time_ms * num_samples; lifetime_stats_.jitter_buffer_target_delay_ms += target_delay_ms * num_samples; lifetime_stats_.jitter_buffer_minimum_delay_ms += unlimited_target_delay_ms * num_samples; lifetime_stats_.jitter_buffer_emitted_count += num_samples; + lifetime_stats_.total_processing_delay_us += + num_samples * processing_delay_us; } void StatisticsCalculator::SecondaryDecodedSamples(int num_samples) { diff --git a/modules/audio_coding/neteq/statistics_calculator.h b/modules/audio_coding/neteq/statistics_calculator.h index 33a22d02dd..28ac613bc7 100644 --- a/modules/audio_coding/neteq/statistics_calculator.h +++ b/modules/audio_coding/neteq/statistics_calculator.h @@ -16,6 +16,7 @@ #include "absl/strings/string_view.h" #include "api/neteq/neteq.h" +#include "modules/audio_coding/neteq/expand_uma_logger.h" namespace webrtc { @@ -24,7 +25,7 @@ class DelayManager; // This class handles various network statistics in NetEq. class StatisticsCalculator { public: - StatisticsCalculator(); + StatisticsCalculator(TickTimer* tick_timer); virtual ~StatisticsCalculator(); @@ -86,7 +87,8 @@ class StatisticsCalculator { void JitterBufferDelay(size_t num_samples, uint64_t waiting_time_ms, uint64_t target_delay_ms, - uint64_t unlimited_target_delay_ms); + uint64_t unlimited_target_delay_ms, + uint64_t processing_delay_us); // Stores new packet waiting time in waiting time statistics. void StoreWaitingTime(int waiting_time_ms); @@ -204,6 +206,8 @@ class StatisticsCalculator { PeriodicUmaAverage excess_buffer_delay_; PeriodicUmaCount buffer_full_counter_; bool decoded_output_played_ = false; + ExpandUmaLogger expand_uma_logger_; + ExpandUmaLogger speech_expand_uma_logger_; }; } // namespace webrtc diff --git a/modules/audio_coding/neteq/statistics_calculator_unittest.cc b/modules/audio_coding/neteq/statistics_calculator_unittest.cc index 491cd83dc4..a8ad359ab0 100644 --- a/modules/audio_coding/neteq/statistics_calculator_unittest.cc +++ b/modules/audio_coding/neteq/statistics_calculator_unittest.cc @@ -15,7 +15,9 @@ namespace webrtc { TEST(LifetimeStatistics, TotalSamplesReceived) { - StatisticsCalculator stats; + TickTimer timer; + StatisticsCalculator stats(&timer); + stats.DecodedOutputPlayed(); for (int i = 0; i < 10; ++i) { stats.IncreaseCounter(480, 48000); // 10 ms at 48 kHz. } @@ -23,7 +25,9 @@ TEST(LifetimeStatistics, TotalSamplesReceived) { } TEST(LifetimeStatistics, SamplesConcealed) { - StatisticsCalculator stats; + TickTimer timer; + StatisticsCalculator stats(&timer); + stats.DecodedOutputPlayed(); stats.ExpandedVoiceSamples(100, false); stats.ExpandedNoiseSamples(17, false); EXPECT_EQ(100u + 17u, stats.GetLifetimeStatistics().concealed_samples); @@ -34,7 +38,9 @@ TEST(LifetimeStatistics, SamplesConcealed) { // would not expect the value to decrease). Instead, the correction should be // made to future increments to the stat. TEST(LifetimeStatistics, SamplesConcealedCorrection) { - StatisticsCalculator stats; + TickTimer timer; + StatisticsCalculator stats(&timer); + stats.DecodedOutputPlayed(); stats.ExpandedVoiceSamples(100, false); EXPECT_EQ(100u, stats.GetLifetimeStatistics().concealed_samples); stats.ExpandedVoiceSamplesCorrection(-10); @@ -55,7 +61,9 @@ TEST(LifetimeStatistics, SamplesConcealedCorrection) { // in a modification to concealed_samples stats. Only PLC operations (i.e., // "expand" and "merge") should affect the stat. TEST(LifetimeStatistics, NoUpdateOnTimeStretch) { - StatisticsCalculator stats; + TickTimer timer; + StatisticsCalculator stats(&timer); + stats.DecodedOutputPlayed(); stats.ExpandedVoiceSamples(100, false); stats.AcceleratedSamples(4711); stats.PreemptiveExpandedSamples(17); @@ -64,7 +72,9 @@ TEST(LifetimeStatistics, NoUpdateOnTimeStretch) { } TEST(StatisticsCalculator, ExpandedSamplesCorrection) { - StatisticsCalculator stats; + TickTimer timer; + StatisticsCalculator stats(&timer); + stats.DecodedOutputPlayed(); NetEqNetworkStatistics stats_output; constexpr int kSampleRateHz = 48000; constexpr int k10MsSamples = kSampleRateHz / 100; @@ -100,7 +110,8 @@ TEST(StatisticsCalculator, ExpandedSamplesCorrection) { } TEST(StatisticsCalculator, RelativePacketArrivalDelay) { - StatisticsCalculator stats; + TickTimer timer; + StatisticsCalculator stats(&timer); stats.RelativePacketArrivalDelay(50); NetEqLifetimeStatistics stats_output = stats.GetLifetimeStatistics(); @@ -112,7 +123,8 @@ TEST(StatisticsCalculator, RelativePacketArrivalDelay) { } TEST(StatisticsCalculator, ReceivedPacket) { - StatisticsCalculator stats; + TickTimer timer; + StatisticsCalculator stats(&timer); stats.ReceivedPacket(); NetEqLifetimeStatistics stats_output = stats.GetLifetimeStatistics(); @@ -126,7 +138,8 @@ TEST(StatisticsCalculator, ReceivedPacket) { TEST(StatisticsCalculator, InterruptionCounter) { constexpr int fs_khz = 48; constexpr int fs_hz = fs_khz * 1000; - StatisticsCalculator stats; + TickTimer timer; + StatisticsCalculator stats(&timer); stats.DecodedOutputPlayed(); stats.EndExpandEvent(fs_hz); auto lts = stats.GetLifetimeStatistics(); @@ -160,7 +173,8 @@ TEST(StatisticsCalculator, InterruptionCounter) { TEST(StatisticsCalculator, InterruptionCounterDoNotLogBeforeDecoding) { constexpr int fs_khz = 48; constexpr int fs_hz = fs_khz * 1000; - StatisticsCalculator stats; + TickTimer timer; + StatisticsCalculator stats(&timer); // Add an event that is longer than 150 ms. Should normally be logged, but we // have not called DecodedOutputPlayed() yet, so it shouldn't this time. @@ -180,7 +194,8 @@ TEST(StatisticsCalculator, InterruptionCounterDoNotLogBeforeDecoding) { } TEST(StatisticsCalculator, DiscardedPackets) { - StatisticsCalculator statistics_calculator; + TickTimer timer; + StatisticsCalculator statistics_calculator(&timer); EXPECT_EQ(0u, statistics_calculator.GetLifetimeStatistics().packets_discarded); @@ -203,4 +218,49 @@ TEST(StatisticsCalculator, DiscardedPackets) { statistics_calculator.GetLifetimeStatistics().packets_discarded); } +TEST(StatisticsCalculator, JitterBufferDelay) { + TickTimer timer; + StatisticsCalculator stats(&timer); + NetEqLifetimeStatistics lts; + lts = stats.GetLifetimeStatistics(); + EXPECT_EQ(lts.total_processing_delay_us, 0ul); + stats.JitterBufferDelay(/*num_samples=*/480, + /*waiting_time_ms=*/90ul, + /*target_delay_ms=*/80ul, + /*unlimited_target_delay_ms=*/70, + /*processing_delay_us=*/100 * 1000ul); + lts = stats.GetLifetimeStatistics(); + EXPECT_EQ(lts.jitter_buffer_delay_ms / 480, 90ul); + EXPECT_EQ(lts.jitter_buffer_target_delay_ms / 480, 80ul); + EXPECT_EQ(lts.jitter_buffer_minimum_delay_ms / 480, 70ul); + EXPECT_EQ(lts.total_processing_delay_us / 480, 100 * 1000ul); + EXPECT_EQ(lts.jitter_buffer_emitted_count, 480ul); + stats.JitterBufferDelay(/*num_samples=*/480, + /*waiting_time_ms=*/90ul, + /*target_delay_ms=*/80ul, + /*unlimited_target_delay_ms=*/70, + /*processing_delay_us=*/100 * 1000ul); + lts = stats.GetLifetimeStatistics(); + EXPECT_EQ(lts.jitter_buffer_delay_ms / 960, 90ul); + EXPECT_EQ(lts.jitter_buffer_target_delay_ms / 960, 80ul); + EXPECT_EQ(lts.jitter_buffer_minimum_delay_ms / 960, 70ul); + EXPECT_EQ(lts.total_processing_delay_us / 960, 100 * 1000ul); + EXPECT_EQ(lts.jitter_buffer_emitted_count, 960ul); +} + +TEST(StatisticsCalculator, CountStatsAfterFirstDecodedPacket) { + TickTimer timer; + StatisticsCalculator stats(&timer); + stats.IncreaseCounter(/*num_samples=*/480, /*fs_hz=*/48000); + stats.ExpandedVoiceSamples(/*num_samples=*/480, + /*is_new_concealment_event=*/true); + NetEqLifetimeStatistics lts = stats.GetLifetimeStatistics(); + EXPECT_EQ(lts.total_samples_received, 0u); + EXPECT_EQ(lts.concealed_samples, 0u); + stats.DecodedOutputPlayed(); + stats.IncreaseCounter(/*num_samples=*/480, /*fs_hz=*/48000); + lts = stats.GetLifetimeStatistics(); + EXPECT_EQ(lts.total_samples_received, 480u); +} + } // namespace webrtc diff --git a/modules/audio_coding/neteq/sync_buffer.cc b/modules/audio_coding/neteq/sync_buffer.cc index 7d7cac7157..5144462cae 100644 --- a/modules/audio_coding/neteq/sync_buffer.cc +++ b/modules/audio_coding/neteq/sync_buffer.cc @@ -37,7 +37,7 @@ void SyncBuffer::PushBack(const AudioMultiVector& append_this) { dtmf_index_ -= std::min(dtmf_index_, samples_added); } -void SyncBuffer::PushBackInterleaved(const rtc::BufferT& append_this) { +void SyncBuffer::PushBackInterleaved(const BufferT& append_this) { const size_t size_before_adding = Size(); AudioMultiVector::PushBackInterleaved(append_this); const size_t samples_added_per_channel = Size() - size_before_adding; diff --git a/modules/audio_coding/neteq/sync_buffer.h b/modules/audio_coding/neteq/sync_buffer.h index cf56c432e3..ea81babca6 100644 --- a/modules/audio_coding/neteq/sync_buffer.h +++ b/modules/audio_coding/neteq/sync_buffer.h @@ -44,7 +44,7 @@ class SyncBuffer : public AudioMultiVector { void PushBack(const AudioMultiVector& append_this) override; // Like PushBack, but reads the samples channel-interleaved from the input. - void PushBackInterleaved(const rtc::BufferT& append_this); + void PushBackInterleaved(const BufferT& append_this); // Adds `length` zeros to the beginning of each channel. Removes // the same number of samples from the end of the SyncBuffer, to diff --git a/modules/audio_coding/neteq/sync_buffer_unittest.cc b/modules/audio_coding/neteq/sync_buffer_unittest.cc index bdcd92446b..f4057cf3bc 100644 --- a/modules/audio_coding/neteq/sync_buffer_unittest.cc +++ b/modules/audio_coding/neteq/sync_buffer_unittest.cc @@ -58,7 +58,7 @@ TEST(SyncBuffer, PushBackAndFlush) { // Populate `new_data`. for (size_t channel = 0; channel < kChannels; ++channel) { for (size_t i = 0; i < kNewLen; ++i) { - new_data[channel][i] = rtc::checked_cast(i); + new_data[channel][i] = checked_cast(i); } } // Push back `new_data` into `sync_buffer`. This operation should pop out @@ -98,7 +98,7 @@ TEST(SyncBuffer, PushFrontZeros) { // Populate `new_data`. for (size_t channel = 0; channel < kChannels; ++channel) { for (size_t i = 0; i < kNewLen; ++i) { - new_data[channel][i] = rtc::checked_cast(1000 + i); + new_data[channel][i] = checked_cast(1000 + i); } } sync_buffer.PushBack(new_data); @@ -131,7 +131,7 @@ TEST(SyncBuffer, GetNextAudioInterleaved) { // Populate `new_data`. for (size_t channel = 0; channel < kChannels; ++channel) { for (size_t i = 0; i < kNewLen; ++i) { - new_data[channel][i] = rtc::checked_cast(i); + new_data[channel][i] = checked_cast(i); } } // Push back `new_data` into `sync_buffer`. This operation should pop out diff --git a/modules/audio_coding/neteq/test/delay_tool/parse_delay_file.m b/modules/audio_coding/neteq/test/delay_tool/parse_delay_file.m index 031d8a39ee..26391243f2 100644 --- a/modules/audio_coding/neteq/test/delay_tool/parse_delay_file.m +++ b/modules/audio_coding/neteq/test/delay_tool/parse_delay_file.m @@ -1,3 +1,4 @@ +% // clang-format off % % Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. % diff --git a/modules/audio_coding/neteq/test/delay_tool/plot_neteq_delay.m b/modules/audio_coding/neteq/test/delay_tool/plot_neteq_delay.m index 86d533fbeb..ff0bca8b8d 100644 --- a/modules/audio_coding/neteq/test/delay_tool/plot_neteq_delay.m +++ b/modules/audio_coding/neteq/test/delay_tool/plot_neteq_delay.m @@ -1,3 +1,4 @@ +% // clang-format off % % Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. % diff --git a/modules/audio_coding/neteq/test/neteq_decoding_test.cc b/modules/audio_coding/neteq/test/neteq_decoding_test.cc index e6c1809fb6..aca3efc95f 100644 --- a/modules/audio_coding/neteq/test/neteq_decoding_test.cc +++ b/modules/audio_coding/neteq/test/neteq_decoding_test.cc @@ -12,20 +12,22 @@ #include "absl/strings/string_view.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/environment/environment_factory.h" +#include "api/neteq/default_neteq_factory.h" #include "api/rtp_headers.h" -#include "modules/audio_coding/neteq/default_neteq_factory.h" +#include "api/units/timestamp.h" #include "modules/audio_coding/neteq/test/result_sink.h" #include "rtc_base/strings/string_builder.h" #include "test/testsupport/file_utils.h" #ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT -RTC_PUSH_IGNORING_WUNDEF() + #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/modules/audio_coding/neteq/neteq_unittest.pb.h" #else #include "modules/audio_coding/neteq/neteq_unittest.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() + #endif namespace webrtc { @@ -37,10 +39,6 @@ void LoadDecoders(webrtc::NetEq* neteq) { neteq->RegisterPayloadType(0, SdpAudioFormat("pcmu", 8000, 1))); ASSERT_EQ(true, neteq->RegisterPayloadType(8, SdpAudioFormat("pcma", 8000, 1))); -#ifdef WEBRTC_CODEC_ILBC - ASSERT_EQ(true, - neteq->RegisterPayloadType(102, SdpAudioFormat("ilbc", 8000, 1))); -#endif #if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX) ASSERT_EQ(true, neteq->RegisterPayloadType(103, SdpAudioFormat("isac", 16000, 1))); @@ -76,6 +74,7 @@ const int NetEqDecodingTest::kInitSampleRateHz; NetEqDecodingTest::NetEqDecodingTest() : clock_(0), + env_(CreateEnvironment(&clock_)), config_(), output_sample_rate_(kInitSampleRateHz), algorithmic_delay_ms_(0) { @@ -83,8 +82,8 @@ NetEqDecodingTest::NetEqDecodingTest() } void NetEqDecodingTest::SetUp() { - auto decoder_factory = CreateBuiltinAudioDecoderFactory(); - neteq_ = DefaultNetEqFactory().CreateNetEq(config_, decoder_factory, &clock_); + neteq_ = DefaultNetEqFactory().Create(env_, config_, + CreateBuiltinAudioDecoderFactory()); NetEqNetworkStatistics stat; ASSERT_EQ(0, neteq_->NetworkStatistics(&stat)); algorithmic_delay_ms_ = stat.current_buffer_size_ms; @@ -106,11 +105,11 @@ void NetEqDecodingTest::Process() { // Ignore payload type 104 (iSAC-swb) if ISAC is not supported. if (packet_->header().payloadType != 104) #endif - ASSERT_EQ( - 0, neteq_->InsertPacket( - packet_->header(), - rtc::ArrayView( - packet_->payload(), packet_->payload_length_bytes()))); + ASSERT_EQ(0, neteq_->InsertPacket(packet_->header(), + ArrayView( + packet_->payload(), + packet_->payload_length_bytes()), + clock_.CurrentTime())); } // Get next packet. packet_ = rtp_source_->NextPacket(); @@ -151,7 +150,7 @@ void NetEqDecodingTest::DecodeAndCompare( uint64_t last_concealed_samples = 0; uint64_t last_total_samples_received = 0; while (packet_) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Lap number " << i++ << " in DecodeAndCompare while loop"; SCOPED_TRACE(ss.str()); // Print out the parameter values on failure. ASSERT_NO_FATAL_FAILURE(Process()); @@ -239,7 +238,8 @@ void NetEqDecodingTest::WrapTest(uint16_t start_seq_no, PopulateRtpInfo(seq_no, timestamp, &rtp_info); if (drop_seq_numbers.find(seq_no) == drop_seq_numbers.end()) { // This sequence number was not in the set to drop. Insert it. - ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); + ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, + Timestamp::Millis(t_ms))); } NetEqNetworkStatistics network_stats; ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats)); @@ -265,7 +265,7 @@ void NetEqDecodingTest::WrapTest(uint16_t start_seq_no, ASSERT_EQ(1u, output.num_channels_); // Expect delay (in samples) to be less than 2 packets. - absl::optional playout_timestamp = neteq_->GetPlayoutTimestamp(); + std::optional playout_timestamp = neteq_->GetPlayoutTimestamp(); ASSERT_TRUE(playout_timestamp); EXPECT_LE(timestamp - *playout_timestamp, static_cast(kSamples * 2)); @@ -298,7 +298,8 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor, uint8_t payload[kPayloadBytes] = {0}; RTPHeader rtp_info; PopulateRtpInfo(seq_no, timestamp, &rtp_info); - ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); + ASSERT_EQ( + 0, neteq_->InsertPacket(rtp_info, payload, Timestamp::Millis(t_ms))); ++seq_no; timestamp += kSamples; next_input_time_ms += static_cast(kFrameSizeMs) * drift_factor; @@ -309,7 +310,7 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor, } EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_); - absl::optional playout_timestamp = neteq_->GetPlayoutTimestamp(); + std::optional playout_timestamp = neteq_->GetPlayoutTimestamp(); ASSERT_TRUE(playout_timestamp); int32_t delay_before = timestamp - *playout_timestamp; @@ -325,8 +326,9 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor, size_t payload_len; RTPHeader rtp_info; PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len); - ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, rtc::ArrayView( - payload, payload_len))); + ASSERT_EQ(0, neteq_->InsertPacket( + rtp_info, ArrayView(payload, payload_len), + Timestamp::Millis(t_ms))); ++seq_no; timestamp += kCngPeriodSamples; next_input_time_ms += static_cast(kCngPeriodMs) * drift_factor; @@ -367,8 +369,9 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor, size_t payload_len; RTPHeader rtp_info; PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len); - ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, rtc::ArrayView( - payload, payload_len))); + ASSERT_EQ(0, neteq_->InsertPacket( + rtp_info, ArrayView(payload, payload_len), + Timestamp::Millis(t_ms))); ++seq_no; timestamp += kCngPeriodSamples; next_input_time_ms += kCngPeriodMs * drift_factor; @@ -384,7 +387,8 @@ void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor, uint8_t payload[kPayloadBytes] = {0}; RTPHeader rtp_info; PopulateRtpInfo(seq_no, timestamp, &rtp_info); - ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); + ASSERT_EQ( + 0, neteq_->InsertPacket(rtp_info, payload, Timestamp::Millis(t_ms))); ++seq_no; timestamp += kSamples; next_input_time_ms += kFrameSizeMs * drift_factor; @@ -413,9 +417,8 @@ void NetEqDecodingTestTwoInstances::SetUp() { } void NetEqDecodingTestTwoInstances::CreateSecondInstance() { - auto decoder_factory = CreateBuiltinAudioDecoderFactory(); - neteq2_ = - DefaultNetEqFactory().CreateNetEq(config2_, decoder_factory, &clock_); + neteq2_ = DefaultNetEqFactory().Create(env_, config2_, + CreateBuiltinAudioDecoderFactory()); ASSERT_TRUE(neteq2_); LoadDecoders(neteq2_.get()); } diff --git a/modules/audio_coding/neteq/test/neteq_decoding_test.h b/modules/audio_coding/neteq/test/neteq_decoding_test.h index 456c397fdd..fa348dbc48 100644 --- a/modules/audio_coding/neteq/test/neteq_decoding_test.h +++ b/modules/audio_coding/neteq/test/neteq_decoding_test.h @@ -17,6 +17,7 @@ #include "absl/strings/string_view.h" #include "api/audio/audio_frame.h" +#include "api/environment/environment.h" #include "api/neteq/neteq.h" #include "api/rtp_headers.h" #include "modules/audio_coding/neteq/tools/packet.h" @@ -70,6 +71,7 @@ class NetEqDecodingTest : public ::testing::Test { int max_time_to_speech_ms); SimulatedClock clock_; + const Environment env_; std::unique_ptr neteq_; NetEq::Config config_; std::unique_ptr rtp_source_; diff --git a/modules/audio_coding/neteq/test/neteq_ilbc_quality_test.cc b/modules/audio_coding/neteq/test/neteq_ilbc_quality_test.cc deleted file mode 100644 index 1004141f16..0000000000 --- a/modules/audio_coding/neteq/test/neteq_ilbc_quality_test.cc +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "absl/flags/flag.h" -#include "modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h" -#include "modules/audio_coding/neteq/tools/neteq_quality_test.h" -#include "rtc_base/checks.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "test/testsupport/file_utils.h" - -ABSL_FLAG(int, frame_size_ms, 20, "Codec frame size (milliseconds)."); - -using ::testing::InitGoogleTest; - -namespace webrtc { -namespace test { -namespace { -static const int kInputSampleRateKhz = 8; -static const int kOutputSampleRateKhz = 8; -} // namespace - -class NetEqIlbcQualityTest : public NetEqQualityTest { - protected: - NetEqIlbcQualityTest() - : NetEqQualityTest(absl::GetFlag(FLAGS_frame_size_ms), - kInputSampleRateKhz, - kOutputSampleRateKhz, - SdpAudioFormat("ilbc", 8000, 1)) { - // Flag validation - RTC_CHECK(absl::GetFlag(FLAGS_frame_size_ms) == 20 || - absl::GetFlag(FLAGS_frame_size_ms) == 30 || - absl::GetFlag(FLAGS_frame_size_ms) == 40 || - absl::GetFlag(FLAGS_frame_size_ms) == 60) - << "Invalid frame size, should be 20, 30, 40, or 60 ms."; - } - - void SetUp() override { - ASSERT_EQ(1u, channels_) << "iLBC supports only mono audio."; - AudioEncoderIlbcConfig config; - config.frame_size_ms = absl::GetFlag(FLAGS_frame_size_ms); - encoder_.reset(new AudioEncoderIlbcImpl(config, 102)); - NetEqQualityTest::SetUp(); - } - - int EncodeBlock(int16_t* in_data, - size_t block_size_samples, - rtc::Buffer* payload, - size_t max_bytes) override { - const size_t kFrameSizeSamples = 80; // Samples per 10 ms. - size_t encoded_samples = 0; - uint32_t dummy_timestamp = 0; - AudioEncoder::EncodedInfo info; - do { - info = encoder_->Encode(dummy_timestamp, - rtc::ArrayView( - in_data + encoded_samples, kFrameSizeSamples), - payload); - encoded_samples += kFrameSizeSamples; - } while (info.encoded_bytes == 0); - return rtc::checked_cast(info.encoded_bytes); - } - - private: - std::unique_ptr encoder_; -}; - -TEST_F(NetEqIlbcQualityTest, Test) { - Simulate(); -} - -} // namespace test -} // namespace webrtc diff --git a/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc b/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc index 5a2df24ef6..59be7cdbf9 100644 --- a/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc +++ b/modules/audio_coding/neteq/test/neteq_opus_quality_test.cc @@ -50,7 +50,7 @@ class NetEqOpusQualityTest : public NetEqQualityTest { void TearDown() override; int EncodeBlock(int16_t* in_data, size_t block_size_samples, - rtc::Buffer* payload, + Buffer* payload, size_t max_bytes) override; private: @@ -106,8 +106,8 @@ NetEqOpusQualityTest::NetEqOpusQualityTest() // Redefine decoder type if input is stereo. if (channels_ > 1) { - audio_format_ = SdpAudioFormat("opus", 48000, 2, - SdpAudioFormat::Parameters{{"stereo", "1"}}); + audio_format_ = + SdpAudioFormat("opus", 48000, 2, CodecParameterMap{{"stereo", "1"}}); } application_ = absl::GetFlag(FLAGS_application); } @@ -144,14 +144,14 @@ void NetEqOpusQualityTest::TearDown() { int NetEqOpusQualityTest::EncodeBlock(int16_t* in_data, size_t block_size_samples, - rtc::Buffer* payload, + Buffer* payload, size_t max_bytes) { EXPECT_EQ(block_size_samples, sub_block_size_samples_ * sub_packets_); int16_t* pointer = in_data; int value; opus_repacketizer_init(repacketizer_); for (int idx = 0; idx < sub_packets_; idx++) { - payload->AppendData(max_bytes, [&](rtc::ArrayView payload) { + payload->AppendData(max_bytes, [&](ArrayView payload) { value = WebRtcOpus_Encode(opus_encoder_, pointer, sub_block_size_samples_, max_bytes, payload.data()); diff --git a/modules/audio_coding/neteq/test/neteq_pcm16b_quality_test.cc b/modules/audio_coding/neteq/test/neteq_pcm16b_quality_test.cc index c3e160cb66..53af21f8ff 100644 --- a/modules/audio_coding/neteq/test/neteq_pcm16b_quality_test.cc +++ b/modules/audio_coding/neteq/test/neteq_pcm16b_quality_test.cc @@ -52,21 +52,21 @@ class NetEqPcm16bQualityTest : public NetEqQualityTest { } int EncodeBlock(int16_t* in_data, - size_t block_size_samples, - rtc::Buffer* payload, - size_t max_bytes) override { + size_t /* block_size_samples */, + Buffer* payload, + size_t /* max_bytes */) override { const size_t kFrameSizeSamples = 480; // Samples per 10 ms. size_t encoded_samples = 0; uint32_t dummy_timestamp = 0; AudioEncoder::EncodedInfo info; do { info = encoder_->Encode(dummy_timestamp, - rtc::ArrayView( + ArrayView( in_data + encoded_samples, kFrameSizeSamples), payload); encoded_samples += kFrameSizeSamples; } while (info.encoded_bytes == 0); - return rtc::checked_cast(info.encoded_bytes); + return checked_cast(info.encoded_bytes); } private: diff --git a/modules/audio_coding/neteq/test/neteq_pcmu_quality_test.cc b/modules/audio_coding/neteq/test/neteq_pcmu_quality_test.cc index d22170c623..62ea361d0a 100644 --- a/modules/audio_coding/neteq/test/neteq_pcmu_quality_test.cc +++ b/modules/audio_coding/neteq/test/neteq_pcmu_quality_test.cc @@ -51,21 +51,21 @@ class NetEqPcmuQualityTest : public NetEqQualityTest { } int EncodeBlock(int16_t* in_data, - size_t block_size_samples, - rtc::Buffer* payload, - size_t max_bytes) override { + size_t /* block_size_samples */, + Buffer* payload, + size_t /* max_bytes */) override { const size_t kFrameSizeSamples = 80; // Samples per 10 ms. size_t encoded_samples = 0; uint32_t dummy_timestamp = 0; AudioEncoder::EncodedInfo info; do { info = encoder_->Encode(dummy_timestamp, - rtc::ArrayView( + ArrayView( in_data + encoded_samples, kFrameSizeSamples), payload); encoded_samples += kFrameSizeSamples; } while (info.encoded_bytes == 0); - return rtc::checked_cast(info.encoded_bytes); + return checked_cast(info.encoded_bytes); } private: diff --git a/modules/audio_coding/neteq/test/neteq_performance_unittest.cc b/modules/audio_coding/neteq/test/neteq_performance_unittest.cc index 961f74ab66..1b453cf7bf 100644 --- a/modules/audio_coding/neteq/test/neteq_performance_unittest.cc +++ b/modules/audio_coding/neteq/test/neteq_performance_unittest.cc @@ -8,11 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "absl/flags/flag.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/metrics/metric.h" #include "modules/audio_coding/neteq/tools/neteq_performance_test.h" -#include "system_wrappers/include/field_trial.h" #include "test/gtest.h" +#include "test/test_flags.h" namespace webrtc { namespace { @@ -29,8 +30,8 @@ TEST(NetEqPerformanceTest, 10_Pl_10_Drift) { const int kLossPeriod = 10; // Drop every 10th packet. const double kDriftFactor = 0.1; int64_t runtime = test::NetEqPerformanceTest::Run( - field_trial::IsEnabled("WebRTC-QuickPerfTest") ? kQuickSimulationTimeMs - : kSimulationTimeMs, + absl::GetFlag(FLAGS_webrtc_quick_perf_test) ? kQuickSimulationTimeMs + : kSimulationTimeMs, kLossPeriod, kDriftFactor); ASSERT_GT(runtime, 0); GetGlobalMetricsLogger()->LogSingleValueMetric( @@ -47,8 +48,8 @@ TEST(NetEqPerformanceTest, 0_Pl_0_Drift) { const int kLossPeriod = 0; // No losses. const double kDriftFactor = 0.0; // No clock drift. int64_t runtime = test::NetEqPerformanceTest::Run( - field_trial::IsEnabled("WebRTC-QuickPerfTest") ? kQuickSimulationTimeMs - : kSimulationTimeMs, + absl::GetFlag(FLAGS_webrtc_quick_perf_test) ? kQuickSimulationTimeMs + : kSimulationTimeMs, kLossPeriod, kDriftFactor); ASSERT_GT(runtime, 0); GetGlobalMetricsLogger()->LogSingleValueMetric( diff --git a/modules/audio_coding/neteq/test/result_sink.cc b/modules/audio_coding/neteq/test/result_sink.cc index f5d50dc859..98683b1d8d 100644 --- a/modules/audio_coding/neteq/test/result_sink.cc +++ b/modules/audio_coding/neteq/test/result_sink.cc @@ -10,22 +10,25 @@ #include "modules/audio_coding/neteq/test/result_sink.h" +#include +#include +#include #include #include "absl/strings/string_view.h" -#include "rtc_base/ignore_wundef.h" +#include "api/neteq/neteq.h" #include "rtc_base/message_digest.h" #include "rtc_base/string_encode.h" #include "test/gtest.h" #ifdef WEBRTC_NETEQ_UNITTEST_BITEXACT -RTC_PUSH_IGNORING_WUNDEF() + #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/modules/audio_coding/neteq/neteq_unittest.pb.h" #else #include "modules/audio_coding/neteq/neteq_unittest.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() + #endif namespace webrtc { @@ -48,9 +51,7 @@ void Convert(const webrtc::NetEqNetworkStatistics& stats_raw, stats->set_max_waiting_time_ms(stats_raw.max_waiting_time_ms); } -void AddMessage(FILE* file, - rtc::MessageDigest* digest, - absl::string_view message) { +void AddMessage(FILE* file, MessageDigest* digest, absl::string_view message) { int32_t size = message.length(); if (file) ASSERT_EQ(1u, fwrite(&size, sizeof(size), 1, file)); @@ -65,8 +66,7 @@ void AddMessage(FILE* file, #endif // WEBRTC_NETEQ_UNITTEST_BITEXACT ResultSink::ResultSink(absl::string_view output_file) - : output_fp_(nullptr), - digest_(rtc::MessageDigestFactory::Create(rtc::DIGEST_SHA_1)) { + : output_fp_(nullptr), digest_(MessageDigestFactory::Create(DIGEST_SHA_1)) { if (!output_file.empty()) { output_fp_ = fopen(std::string(output_file).c_str(), "wb"); EXPECT_TRUE(output_fp_ != NULL); @@ -95,7 +95,7 @@ void ResultSink::VerifyChecksum(absl::string_view checksum) { std::string buffer; buffer.resize(digest_->Size()); digest_->Finish(buffer.data(), buffer.size()); - const std::string result = rtc::hex_encode(buffer); + const std::string result = hex_encode(buffer); if (checksum.size() == result.size()) { EXPECT_EQ(checksum, result); } else { diff --git a/modules/audio_coding/neteq/test/result_sink.h b/modules/audio_coding/neteq/test/result_sink.h index c6923d7a7f..dc3d383568 100644 --- a/modules/audio_coding/neteq/test/result_sink.h +++ b/modules/audio_coding/neteq/test/result_sink.h @@ -13,7 +13,6 @@ #include #include -#include #include "absl/strings/string_view.h" #include "api/neteq/neteq.h" @@ -35,7 +34,7 @@ class ResultSink { private: FILE* output_fp_; - std::unique_ptr digest_; + std::unique_ptr digest_; }; template diff --git a/modules/audio_coding/neteq/time_stretch.cc b/modules/audio_coding/neteq/time_stretch.cc index b89be0608d..d6dddd5317 100644 --- a/modules/audio_coding/neteq/time_stretch.cc +++ b/modules/audio_coding/neteq/time_stretch.cc @@ -184,7 +184,7 @@ bool TimeStretch::SpeechDetection(int32_t vec1_energy, // (vec1_energy + vec2_energy) / 16 <= peak_index * background_noise_energy. // The two sides of the inequality will be denoted `left_side` and // `right_side`. - int32_t left_side = rtc::saturated_cast( + int32_t left_side = saturated_cast( (static_cast(vec1_energy) + vec2_energy) / 16); int32_t right_side; if (background_noise_.initialized()) { @@ -196,8 +196,7 @@ bool TimeStretch::SpeechDetection(int32_t vec1_energy, int right_scale = 16 - WebRtcSpl_NormW32(right_side); right_scale = std::max(0, right_scale); left_side = left_side >> right_scale; - right_side = - rtc::dchecked_cast(peak_index) * (right_side >> right_scale); + right_side = dchecked_cast(peak_index) * (right_side >> right_scale); // Scale `left_side` properly before comparing with `right_side`. // (`scaling` is the scale factor before energy calculation, thus the scale diff --git a/modules/audio_coding/neteq/timestamp_scaler_unittest.cc b/modules/audio_coding/neteq/timestamp_scaler_unittest.cc index c2bb4dd95f..e880004a5b 100644 --- a/modules/audio_coding/neteq/timestamp_scaler_unittest.cc +++ b/modules/audio_coding/neteq/timestamp_scaler_unittest.cc @@ -11,6 +11,8 @@ #include "modules/audio_coding/neteq/timestamp_scaler.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "modules/audio_coding/neteq/mock/mock_decoder_database.h" #include "modules/audio_coding/neteq/packet.h" #include "test/gmock.h" @@ -23,11 +25,12 @@ using ::testing::ReturnNull; namespace webrtc { TEST(TimestampScaler, TestNoScaling) { + const Environment env = CreateEnvironment(); MockDecoderDatabase db; auto factory = CreateBuiltinAudioDecoderFactory(); // Use PCMu, because it doesn't use scaled timestamps. - const DecoderDatabase::DecoderInfo info(SdpAudioFormat("pcmu", 8000, 1), - absl::nullopt, factory.get()); + const DecoderDatabase::DecoderInfo info(env, SdpAudioFormat("pcmu", 8000, 1), + std::nullopt, factory.get()); static const uint8_t kRtpPayloadType = 0; EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType)) .WillRepeatedly(Return(&info)); @@ -45,11 +48,12 @@ TEST(TimestampScaler, TestNoScaling) { } TEST(TimestampScaler, TestNoScalingLargeStep) { + const Environment env = CreateEnvironment(); MockDecoderDatabase db; auto factory = CreateBuiltinAudioDecoderFactory(); // Use PCMu, because it doesn't use scaled timestamps. - const DecoderDatabase::DecoderInfo info(SdpAudioFormat("pcmu", 8000, 1), - absl::nullopt, factory.get()); + const DecoderDatabase::DecoderInfo info(env, SdpAudioFormat("pcmu", 8000, 1), + std::nullopt, factory.get()); static const uint8_t kRtpPayloadType = 0; EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType)) .WillRepeatedly(Return(&info)); @@ -72,11 +76,12 @@ TEST(TimestampScaler, TestNoScalingLargeStep) { } TEST(TimestampScaler, TestG722) { + const Environment env = CreateEnvironment(); MockDecoderDatabase db; auto factory = CreateBuiltinAudioDecoderFactory(); // Use G722, which has a factor 2 scaling. - const DecoderDatabase::DecoderInfo info(SdpAudioFormat("g722", 8000, 1), - absl::nullopt, factory.get()); + const DecoderDatabase::DecoderInfo info(env, SdpAudioFormat("g722", 8000, 1), + std::nullopt, factory.get()); static const uint8_t kRtpPayloadType = 17; EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType)) .WillRepeatedly(Return(&info)); @@ -98,11 +103,12 @@ TEST(TimestampScaler, TestG722) { } TEST(TimestampScaler, TestG722LargeStep) { + const Environment env = CreateEnvironment(); MockDecoderDatabase db; auto factory = CreateBuiltinAudioDecoderFactory(); // Use G722, which has a factor 2 scaling. - const DecoderDatabase::DecoderInfo info(SdpAudioFormat("g722", 8000, 1), - absl::nullopt, factory.get()); + const DecoderDatabase::DecoderInfo info(env, SdpAudioFormat("g722", 8000, 1), + std::nullopt, factory.get()); static const uint8_t kRtpPayloadType = 17; EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType)) .WillRepeatedly(Return(&info)); @@ -128,13 +134,14 @@ TEST(TimestampScaler, TestG722LargeStep) { } TEST(TimestampScaler, TestG722WithCng) { + const Environment env = CreateEnvironment(); MockDecoderDatabase db; auto factory = CreateBuiltinAudioDecoderFactory(); // Use G722, which has a factor 2 scaling. - const DecoderDatabase::DecoderInfo info_g722(SdpAudioFormat("g722", 8000, 1), - absl::nullopt, factory.get()); - const DecoderDatabase::DecoderInfo info_cng(SdpAudioFormat("cn", 16000, 1), - absl::nullopt, factory.get()); + const DecoderDatabase::DecoderInfo info_g722( + env, SdpAudioFormat("g722", 8000, 1), std::nullopt, factory.get()); + const DecoderDatabase::DecoderInfo info_cng( + env, SdpAudioFormat("cn", 16000, 1), std::nullopt, factory.get()); static const uint8_t kRtpPayloadTypeG722 = 17; static const uint8_t kRtpPayloadTypeCng = 13; EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadTypeG722)) @@ -172,11 +179,12 @@ TEST(TimestampScaler, TestG722WithCng) { // Since it is simply calling the other ToInternal method, we are not doing // as many tests here. TEST(TimestampScaler, TestG722Packet) { + const Environment env = CreateEnvironment(); MockDecoderDatabase db; auto factory = CreateBuiltinAudioDecoderFactory(); // Use G722, which has a factor 2 scaling. - const DecoderDatabase::DecoderInfo info(SdpAudioFormat("g722", 8000, 1), - absl::nullopt, factory.get()); + const DecoderDatabase::DecoderInfo info(env, SdpAudioFormat("g722", 8000, 1), + std::nullopt, factory.get()); static const uint8_t kRtpPayloadType = 17; EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType)) .WillRepeatedly(Return(&info)); @@ -202,11 +210,12 @@ TEST(TimestampScaler, TestG722Packet) { // correctly. Since it is simply calling the ToInternal(Packet* packet) method, // we are not doing as many tests here. TEST(TimestampScaler, TestG722PacketList) { + const Environment env = CreateEnvironment(); MockDecoderDatabase db; auto factory = CreateBuiltinAudioDecoderFactory(); // Use G722, which has a factor 2 scaling. - const DecoderDatabase::DecoderInfo info(SdpAudioFormat("g722", 8000, 1), - absl::nullopt, factory.get()); + const DecoderDatabase::DecoderInfo info(env, SdpAudioFormat("g722", 8000, 1), + std::nullopt, factory.get()); static const uint8_t kRtpPayloadType = 17; EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType)) .WillRepeatedly(Return(&info)); @@ -236,11 +245,12 @@ TEST(TimestampScaler, TestG722PacketList) { } TEST(TimestampScaler, TestG722Reset) { + const Environment env = CreateEnvironment(); MockDecoderDatabase db; auto factory = CreateBuiltinAudioDecoderFactory(); // Use G722, which has a factor 2 scaling. - const DecoderDatabase::DecoderInfo info(SdpAudioFormat("g722", 8000, 1), - absl::nullopt, factory.get()); + const DecoderDatabase::DecoderInfo info(env, SdpAudioFormat("g722", 8000, 1), + std::nullopt, factory.get()); static const uint8_t kRtpPayloadType = 17; EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType)) .WillRepeatedly(Return(&info)); @@ -278,10 +288,11 @@ TEST(TimestampScaler, TestG722Reset) { // keep it, since it can be taken as a test case for the situation of a trivial // timestamp scaler. TEST(TimestampScaler, TestOpusLargeStep) { + const Environment env = CreateEnvironment(); MockDecoderDatabase db; auto factory = CreateBuiltinAudioDecoderFactory(); - const DecoderDatabase::DecoderInfo info(SdpAudioFormat("opus", 48000, 2), - absl::nullopt, factory.get()); + const DecoderDatabase::DecoderInfo info(env, SdpAudioFormat("opus", 48000, 2), + std::nullopt, factory.get()); static const uint8_t kRtpPayloadType = 17; EXPECT_CALL(db, GetDecoderInfo(kRtpPayloadType)) .WillRepeatedly(Return(&info)); diff --git a/modules/audio_coding/neteq/tools/audio_checksum.h b/modules/audio_coding/neteq/tools/audio_checksum.h index 42e3a3a3a0..7038a14001 100644 --- a/modules/audio_coding/neteq/tools/audio_checksum.h +++ b/modules/audio_coding/neteq/tools/audio_checksum.h @@ -11,6 +11,8 @@ #ifndef MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_CHECKSUM_H_ #define MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_CHECKSUM_H_ +#include +#include #include #include @@ -26,7 +28,7 @@ namespace test { class AudioChecksum : public AudioSink { public: AudioChecksum() - : checksum_(rtc::MessageDigestFactory::Create(rtc::DIGEST_MD5)), + : checksum_(MessageDigestFactory::Create(DIGEST_MD5)), checksum_result_(checksum_->Size()), finished_(false) {} @@ -50,12 +52,12 @@ class AudioChecksum : public AudioSink { finished_ = true; checksum_->Finish(checksum_result_.data(), checksum_result_.size()); } - return rtc::hex_encode(checksum_result_); + return hex_encode(checksum_result_); } private: - std::unique_ptr checksum_; - rtc::Buffer checksum_result_; + std::unique_ptr checksum_; + Buffer checksum_result_; bool finished_; }; diff --git a/modules/audio_coding/neteq/tools/audio_loop.cc b/modules/audio_coding/neteq/tools/audio_loop.cc index 514e6eb2ee..cfac0f3300 100644 --- a/modules/audio_coding/neteq/tools/audio_loop.cc +++ b/modules/audio_coding/neteq/tools/audio_loop.cc @@ -47,14 +47,14 @@ bool AudioLoop::Init(absl::string_view file_name, return true; } -rtc::ArrayView AudioLoop::GetNextBlock() { +ArrayView AudioLoop::GetNextBlock() { // Check that the AudioLoop is initialized. if (block_length_samples_ == 0) - return rtc::ArrayView(); + return ArrayView(); const int16_t* output_ptr = &audio_array_[next_index_]; next_index_ = (next_index_ + block_length_samples_) % loop_length_samples_; - return rtc::ArrayView(output_ptr, block_length_samples_); + return ArrayView(output_ptr, block_length_samples_); } } // namespace test diff --git a/modules/audio_coding/neteq/tools/audio_loop.h b/modules/audio_coding/neteq/tools/audio_loop.h index f5f0b59011..d722e01666 100644 --- a/modules/audio_coding/neteq/tools/audio_loop.h +++ b/modules/audio_coding/neteq/tools/audio_loop.h @@ -43,7 +43,7 @@ class AudioLoop { // Returns a (pointer,size) pair for the next block of audio. The size is // equal to the `block_length_samples` Init() argument. - rtc::ArrayView GetNextBlock(); + ArrayView GetNextBlock(); private: size_t next_index_; diff --git a/modules/audio_coding/neteq/tools/audio_sink.cc b/modules/audio_coding/neteq/tools/audio_sink.cc index 7d7af7ef9f..656dda43a1 100644 --- a/modules/audio_coding/neteq/tools/audio_sink.cc +++ b/modules/audio_coding/neteq/tools/audio_sink.cc @@ -18,7 +18,8 @@ bool AudioSinkFork::WriteArray(const int16_t* audio, size_t num_samples) { right_sink_->WriteArray(audio, num_samples); } -bool VoidAudioSink::WriteArray(const int16_t* audio, size_t num_samples) { +bool VoidAudioSink::WriteArray(const int16_t* /* audio */, + size_t /* num_samples */) { return true; } diff --git a/modules/audio_coding/neteq/tools/constant_pcm_packet_source.cc b/modules/audio_coding/neteq/tools/constant_pcm_packet_source.cc index 18a910365f..d63e1b05e1 100644 --- a/modules/audio_coding/neteq/tools/constant_pcm_packet_source.cc +++ b/modules/audio_coding/neteq/tools/constant_pcm_packet_source.cc @@ -37,7 +37,7 @@ ConstantPcmPacketSource::ConstantPcmPacketSource(size_t payload_len_samples, std::unique_ptr ConstantPcmPacketSource::NextPacket() { RTC_CHECK_GT(packet_len_bytes_, kHeaderLenBytes); - rtc::CopyOnWriteBuffer packet_buffer(packet_len_bytes_); + CopyOnWriteBuffer packet_buffer(packet_len_bytes_); uint8_t* packet_memory = packet_buffer.MutableData(); // Fill the payload part of the packet memory with the pre-encoded value. for (unsigned i = 0; i < 2 * payload_len_samples_; ++i) diff --git a/modules/audio_coding/neteq/tools/encode_neteq_input.cc b/modules/audio_coding/neteq/tools/encode_neteq_input.cc index 87b987ddb6..b4a940bdaf 100644 --- a/modules/audio_coding/neteq/tools/encode_neteq_input.cc +++ b/modules/audio_coding/neteq/tools/encode_neteq_input.cc @@ -29,12 +29,12 @@ EncodeNetEqInput::EncodeNetEqInput(std::unique_ptr generator, EncodeNetEqInput::~EncodeNetEqInput() = default; -absl::optional EncodeNetEqInput::NextPacketTime() const { +std::optional EncodeNetEqInput::NextPacketTime() const { RTC_DCHECK(packet_data_); return static_cast(packet_data_->time_ms); } -absl::optional EncodeNetEqInput::NextOutputEventTime() const { +std::optional EncodeNetEqInput::NextOutputEventTime() const { return next_output_event_ms_; } @@ -56,7 +56,7 @@ bool EncodeNetEqInput::ended() const { return next_output_event_ms_ > input_duration_ms_; } -absl::optional EncodeNetEqInput::NextHeader() const { +std::optional EncodeNetEqInput::NextHeader() const { RTC_DCHECK(packet_data_); return packet_data_->header; } @@ -72,15 +72,15 @@ void EncodeNetEqInput::CreatePacket() { RTC_DCHECK(!info.send_even_if_empty); int num_blocks = 0; while (packet_data_->payload.size() == 0 && !info.send_even_if_empty) { - const size_t num_samples = rtc::CheckedDivExact( + const size_t num_samples = CheckedDivExact( static_cast(encoder_->SampleRateHz() * kOutputPeriodMs), 1000); info = encoder_->Encode(rtp_timestamp_, generator_->Generate(num_samples), &packet_data_->payload); - rtp_timestamp_ += rtc::dchecked_cast( - num_samples * encoder_->RtpTimestampRateHz() / - encoder_->SampleRateHz()); + rtp_timestamp_ += + dchecked_cast(num_samples * encoder_->RtpTimestampRateHz() / + encoder_->SampleRateHz()); ++num_blocks; } packet_data_->header.timestamp = info.encoded_timestamp; diff --git a/modules/audio_coding/neteq/tools/encode_neteq_input.h b/modules/audio_coding/neteq/tools/encode_neteq_input.h index f2ed4b1cf5..63666e86d8 100644 --- a/modules/audio_coding/neteq/tools/encode_neteq_input.h +++ b/modules/audio_coding/neteq/tools/encode_neteq_input.h @@ -28,7 +28,7 @@ class EncodeNetEqInput : public NetEqInput { public: virtual ~Generator() = default; // Returns the next num_samples values from the signal generator. - virtual rtc::ArrayView Generate(size_t num_samples) = 0; + virtual ArrayView Generate(size_t num_samples) = 0; }; // The source will end after the given input duration. @@ -37,12 +37,12 @@ class EncodeNetEqInput : public NetEqInput { int64_t input_duration_ms); ~EncodeNetEqInput() override; - absl::optional NextPacketTime() const override; + std::optional NextPacketTime() const override; - absl::optional NextOutputEventTime() const override; + std::optional NextOutputEventTime() const override; - absl::optional NextSetMinimumDelayInfo() const override { - return absl::nullopt; + std::optional NextSetMinimumDelayInfo() const override { + return std::nullopt; } std::unique_ptr PopPacket() override; @@ -53,7 +53,7 @@ class EncodeNetEqInput : public NetEqInput { bool ended() const override; - absl::optional NextHeader() const override; + std::optional NextHeader() const override; private: static constexpr int64_t kOutputPeriodMs = 10; diff --git a/modules/audio_coding/neteq/tools/fake_decode_from_file.cc b/modules/audio_coding/neteq/tools/fake_decode_from_file.cc index ad52239ae3..95ce8e9948 100644 --- a/modules/audio_coding/neteq/tools/fake_decode_from_file.cc +++ b/modules/audio_coding/neteq/tools/fake_decode_from_file.cc @@ -32,8 +32,8 @@ class FakeEncodedFrame : public AudioDecoder::EncodedAudioFrame { size_t Duration() const override { return duration_; } - absl::optional Decode( - rtc::ArrayView decoded) const override { + std::optional Decode( + ArrayView decoded) const override { if (is_dtx_) { std::fill_n(decoded.data(), duration_, 0); return DecodeResult{duration_, AudioDecoder::kComfortNoise}; @@ -82,27 +82,27 @@ int FakeDecodeFromFile::DecodeInternal(const uint8_t* encoded, RTC_DCHECK_EQ(encoded_len, 0); RTC_DCHECK(!encoded); // NetEq always sends nullptr in this case. - const int samples_to_decode = rtc::CheckedDivExact(SampleRateHz(), 100); + const int samples_to_decode = CheckedDivExact(SampleRateHz(), 100); const int total_samples_to_decode = samples_to_decode * (stereo_ ? 2 : 1); std::fill_n(decoded, total_samples_to_decode, 0); *speech_type = kComfortNoise; - return rtc::dchecked_cast(total_samples_to_decode); + return dchecked_cast(total_samples_to_decode); } void FakeDecodeFromFile::PrepareEncoded(uint32_t timestamp, size_t samples, size_t original_payload_size_bytes, - rtc::ArrayView encoded) { + ArrayView encoded) { RTC_CHECK_GE(encoded.size(), 12); ByteWriter::WriteLittleEndian(&encoded[0], timestamp); ByteWriter::WriteLittleEndian(&encoded[4], - rtc::checked_cast(samples)); + checked_cast(samples)); ByteWriter::WriteLittleEndian( - &encoded[8], rtc::checked_cast(original_payload_size_bytes)); + &encoded[8], checked_cast(original_payload_size_bytes)); } std::vector FakeDecodeFromFile::ParsePayload( - rtc::Buffer&& payload, + Buffer&& payload, uint32_t timestamp) { RTC_CHECK_GE(payload.size(), 12); // Parse payload encoded in PrepareEncoded. diff --git a/modules/audio_coding/neteq/tools/fake_decode_from_file.h b/modules/audio_coding/neteq/tools/fake_decode_from_file.h index 050a29dc65..ea1610e3ec 100644 --- a/modules/audio_coding/neteq/tools/fake_decode_from_file.h +++ b/modules/audio_coding/neteq/tools/fake_decode_from_file.h @@ -12,8 +12,8 @@ #define MODULES_AUDIO_CODING_NETEQ_TOOLS_FAKE_DECODE_FROM_FILE_H_ #include +#include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio_codecs/audio_decoder.h" #include "modules/audio_coding/neteq/tools/input_audio_file.h" @@ -37,7 +37,7 @@ class FakeDecodeFromFile : public AudioDecoder { ~FakeDecodeFromFile() = default; - std::vector ParsePayload(rtc::Buffer&& payload, + std::vector ParsePayload(Buffer&& payload, uint32_t timestamp) override; void Reset() override {} @@ -63,11 +63,11 @@ class FakeDecodeFromFile : public AudioDecoder { static void PrepareEncoded(uint32_t timestamp, size_t samples, size_t original_payload_size_bytes, - rtc::ArrayView encoded); + ArrayView encoded); private: std::unique_ptr input_; - absl::optional next_timestamp_from_input_; + std::optional next_timestamp_from_input_; const int sample_rate_hz_; const bool stereo_; }; diff --git a/modules/audio_coding/neteq/tools/initial_packet_inserter_neteq_input.cc b/modules/audio_coding/neteq/tools/initial_packet_inserter_neteq_input.cc index 763078eed2..c41ef590bd 100644 --- a/modules/audio_coding/neteq/tools/initial_packet_inserter_neteq_input.cc +++ b/modules/audio_coding/neteq/tools/initial_packet_inserter_neteq_input.cc @@ -27,17 +27,16 @@ InitialPacketInserterNetEqInput::InitialPacketInserterNetEqInput( packets_to_insert_(number_of_initial_packets), sample_rate_hz_(sample_rate_hz) {} -absl::optional InitialPacketInserterNetEqInput::NextPacketTime() - const { +std::optional InitialPacketInserterNetEqInput::NextPacketTime() const { return source_->NextPacketTime(); } -absl::optional InitialPacketInserterNetEqInput::NextOutputEventTime() +std::optional InitialPacketInserterNetEqInput::NextOutputEventTime() const { return source_->NextOutputEventTime(); } -absl::optional +std::optional InitialPacketInserterNetEqInput::NextSetMinimumDelayInfo() const { return source_->NextSetMinimumDelayInfo(); } @@ -55,8 +54,8 @@ InitialPacketInserterNetEqInput::PopPacket() { RTC_CHECK(first_packet_); auto dummy_packet = std::unique_ptr(new PacketData()); dummy_packet->header = first_packet_->header; - dummy_packet->payload = rtc::Buffer(first_packet_->payload.data(), - first_packet_->payload.size()); + dummy_packet->payload = + Buffer(first_packet_->payload.data(), first_packet_->payload.size()); dummy_packet->time_ms = first_packet_->time_ms; dummy_packet->header.sequenceNumber -= packets_to_insert_; // This assumes 20ms per packet. @@ -80,7 +79,7 @@ bool InitialPacketInserterNetEqInput::ended() const { return source_->ended(); } -absl::optional InitialPacketInserterNetEqInput::NextHeader() const { +std::optional InitialPacketInserterNetEqInput::NextHeader() const { return source_->NextHeader(); } diff --git a/modules/audio_coding/neteq/tools/initial_packet_inserter_neteq_input.h b/modules/audio_coding/neteq/tools/initial_packet_inserter_neteq_input.h index 7246949956..f72c21c70f 100644 --- a/modules/audio_coding/neteq/tools/initial_packet_inserter_neteq_input.h +++ b/modules/audio_coding/neteq/tools/initial_packet_inserter_neteq_input.h @@ -27,14 +27,14 @@ class InitialPacketInserterNetEqInput final : public NetEqInput { InitialPacketInserterNetEqInput(std::unique_ptr source, int number_of_initial_packets, int sample_rate_hz); - absl::optional NextPacketTime() const override; - absl::optional NextOutputEventTime() const override; - absl::optional NextSetMinimumDelayInfo() const override; + std::optional NextPacketTime() const override; + std::optional NextOutputEventTime() const override; + std::optional NextSetMinimumDelayInfo() const override; std::unique_ptr PopPacket() override; void AdvanceOutputEvent() override; void AdvanceSetMinimumDelay() override; bool ended() const override; - absl::optional NextHeader() const override; + std::optional NextHeader() const override; private: const std::unique_ptr source_; diff --git a/modules/audio_coding/neteq/tools/input_audio_file.h b/modules/audio_coding/neteq/tools/input_audio_file.h index f538b295a3..e885bb39eb 100644 --- a/modules/audio_coding/neteq/tools/input_audio_file.h +++ b/modules/audio_coding/neteq/tools/input_audio_file.h @@ -11,6 +11,7 @@ #ifndef MODULES_AUDIO_CODING_NETEQ_TOOLS_INPUT_AUDIO_FILE_H_ #define MODULES_AUDIO_CODING_NETEQ_TOOLS_INPUT_AUDIO_FILE_H_ +#include #include #include diff --git a/modules/audio_coding/neteq/tools/input_audio_file_unittest.cc b/modules/audio_coding/neteq/tools/input_audio_file_unittest.cc index 52f7ea82a0..54d5b3fbfa 100644 --- a/modules/audio_coding/neteq/tools/input_audio_file_unittest.cc +++ b/modules/audio_coding/neteq/tools/input_audio_file_unittest.cc @@ -23,7 +23,7 @@ TEST(TestInputAudioFile, DuplicateInterleaveSeparateSrcDst) { static const size_t kChannels = 2; int16_t input[kSamples]; for (size_t i = 0; i < kSamples; ++i) { - input[i] = rtc::checked_cast(i); + input[i] = checked_cast(i); } int16_t output[kSamples * kChannels]; InputAudioFile::DuplicateInterleaved(input, kSamples, kChannels, output); @@ -42,7 +42,7 @@ TEST(TestInputAudioFile, DuplicateInterleaveSameSrcDst) { static const size_t kChannels = 5; int16_t input[kSamples * kChannels]; for (size_t i = 0; i < kSamples; ++i) { - input[i] = rtc::checked_cast(i); + input[i] = checked_cast(i); } InputAudioFile::DuplicateInterleaved(input, kSamples, kChannels, input); diff --git a/modules/audio_coding/neteq/tools/neteq_delay_analyzer.cc b/modules/audio_coding/neteq/tools/neteq_delay_analyzer.cc index 9e77457775..71e388066f 100644 --- a/modules/audio_coding/neteq/tools/neteq_delay_analyzer.cc +++ b/modules/audio_coding/neteq/tools/neteq_delay_analyzer.cc @@ -86,7 +86,7 @@ void PrintDelays(const NetEqDelayAnalyzer::Delays& delays, void NetEqDelayAnalyzer::AfterInsertPacket( const test::NetEqInput::PacketData& packet, - NetEq* neteq) { + NetEq* /* neteq */) { data_.insert( std::make_pair(packet.header.timestamp, TimingData(packet.time_ms))); ssrcs_.insert(packet.header.ssrc); @@ -144,9 +144,8 @@ void NetEqDelayAnalyzer::CreateGraphs(Delays* arrival_delay_ms, // This loop traverses data_ and populates rtp_timestamps_ms as well as // calculates the base offset. for (auto& d : data_) { - rtp_timestamps_ms.push_back( - static_cast(unwrapper.Unwrap(d.first)) / - rtc::CheckedDivExact(last_sample_rate_hz_, 1000)); + rtp_timestamps_ms.push_back(static_cast(unwrapper.Unwrap(d.first)) / + CheckedDivExact(last_sample_rate_hz_, 1000)); offset = std::min(offset, d.second.arrival_time_ms - rtp_timestamps_ms.back()); } diff --git a/modules/audio_coding/neteq/tools/neteq_delay_analyzer.h b/modules/audio_coding/neteq/tools/neteq_delay_analyzer.h index ffcba5843f..cdf178b463 100644 --- a/modules/audio_coding/neteq/tools/neteq_delay_analyzer.h +++ b/modules/audio_coding/neteq/tools/neteq_delay_analyzer.h @@ -12,12 +12,12 @@ #define MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_DELAY_ANALYZER_H_ #include +#include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "modules/audio_coding/neteq/tools/neteq_input.h" #include "modules/audio_coding/neteq/tools/neteq_test.h" @@ -57,10 +57,10 @@ class NetEqDelayAnalyzer : public test::NetEqPostInsertPacket, struct TimingData { explicit TimingData(int64_t at) : arrival_time_ms(at) {} int64_t arrival_time_ms; - absl::optional decode_get_audio_count; - absl::optional sync_delay_ms; - absl::optional target_delay_ms; - absl::optional current_delay_ms; + std::optional decode_get_audio_count; + std::optional sync_delay_ms; + std::optional target_delay_ms; + std::optional current_delay_ms; }; std::map data_; std::vector get_audio_time_ms_; diff --git a/modules/audio_coding/neteq/tools/neteq_event_log_input.cc b/modules/audio_coding/neteq/tools/neteq_event_log_input.cc index 0ca855b626..d42a31b7d9 100644 --- a/modules/audio_coding/neteq/tools/neteq_event_log_input.cc +++ b/modules/audio_coding/neteq/tools/neteq_event_log_input.cc @@ -26,7 +26,7 @@ class NetEqEventLogInput : public NetEqInput { const std::vector& output_events, const std::vector& neteq_set_minimum_delay_events, - absl::optional end_time_ms) + std::optional end_time_ms) : packet_stream_(packet_stream), packet_stream_it_(packet_stream_.begin()), output_events_(output_events), @@ -43,34 +43,34 @@ class NetEqEventLogInput : public NetEqInput { } } - absl::optional NextPacketTime() const override { + std::optional NextPacketTime() const override { if (packet_stream_it_ == packet_stream_.end()) { - return absl::nullopt; + return std::nullopt; } if (end_time_ms_ && packet_stream_it_->rtp.log_time_ms() > *end_time_ms_) { - return absl::nullopt; + return std::nullopt; } return packet_stream_it_->rtp.log_time_ms(); } - absl::optional NextOutputEventTime() const override { + std::optional NextOutputEventTime() const override { if (output_events_it_ == output_events_.end()) { - return absl::nullopt; + return std::nullopt; } if (end_time_ms_ && output_events_it_->log_time_ms() > *end_time_ms_) { - return absl::nullopt; + return std::nullopt; } return output_events_it_->log_time_ms(); } - absl::optional NextSetMinimumDelayInfo() const override { + std::optional NextSetMinimumDelayInfo() const override { if (neteq_set_minimum_delay_events_it_ == neteq_set_minimum_delay_events_.end()) { - return absl::nullopt; + return std::nullopt; } if (end_time_ms_ && neteq_set_minimum_delay_events_it_->log_time_ms() > *end_time_ms_) { - return absl::nullopt; + return std::nullopt; } return SetMinimumDelayInfo( neteq_set_minimum_delay_events_it_->log_time_ms(), @@ -110,9 +110,9 @@ class NetEqEventLogInput : public NetEqInput { bool ended() const override { return !NextEventTime(); } - absl::optional NextHeader() const override { + std::optional NextHeader() const override { if (packet_stream_it_ == packet_stream_.end()) { - return absl::nullopt; + return std::nullopt; } return packet_stream_it_->rtp.header; } @@ -126,14 +126,14 @@ class NetEqEventLogInput : public NetEqInput { neteq_set_minimum_delay_events_; std::vector::const_iterator neteq_set_minimum_delay_events_it_; - const absl::optional end_time_ms_; + const std::optional end_time_ms_; }; } // namespace std::unique_ptr CreateNetEqEventLogInput( const ParsedRtcEventLog& parsed_log, - absl::optional ssrc) { + std::optional ssrc) { if (parsed_log.incoming_audio_ssrcs().empty()) { return nullptr; } diff --git a/modules/audio_coding/neteq/tools/neteq_event_log_input.h b/modules/audio_coding/neteq/tools/neteq_event_log_input.h index a84de8469c..cdfd62d80f 100644 --- a/modules/audio_coding/neteq/tools/neteq_event_log_input.h +++ b/modules/audio_coding/neteq/tools/neteq_event_log_input.h @@ -11,8 +11,9 @@ #ifndef MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_EVENT_LOG_INPUT_H_ #define MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_EVENT_LOG_INPUT_H_ +#include + #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "logging/rtc_event_log/rtc_event_log_parser.h" #include "modules/audio_coding/neteq/tools/neteq_input.h" @@ -21,7 +22,7 @@ namespace test { std::unique_ptr CreateNetEqEventLogInput( const ParsedRtcEventLog& parsed_log, - absl::optional ssrc); + std::optional ssrc); } // namespace test } // namespace webrtc diff --git a/modules/audio_coding/neteq/tools/neteq_input.cc b/modules/audio_coding/neteq/tools/neteq_input.cc index 57160b9969..707ce1b4c8 100644 --- a/modules/audio_coding/neteq/tools/neteq_input.cc +++ b/modules/audio_coding/neteq/tools/neteq_input.cc @@ -19,7 +19,7 @@ NetEqInput::PacketData::PacketData() = default; NetEqInput::PacketData::~PacketData() = default; std::string NetEqInput::PacketData::ToString() const { - rtc::StringBuilder ss; + StringBuilder ss; ss << "{" "time_ms: " << static_cast(time_ms) @@ -50,17 +50,17 @@ TimeLimitedNetEqInput::TimeLimitedNetEqInput(std::unique_ptr input, TimeLimitedNetEqInput::~TimeLimitedNetEqInput() = default; -absl::optional TimeLimitedNetEqInput::NextPacketTime() const { - return ended_ ? absl::nullopt : input_->NextPacketTime(); +std::optional TimeLimitedNetEqInput::NextPacketTime() const { + return ended_ ? std::nullopt : input_->NextPacketTime(); } -absl::optional TimeLimitedNetEqInput::NextOutputEventTime() const { - return ended_ ? absl::nullopt : input_->NextOutputEventTime(); +std::optional TimeLimitedNetEqInput::NextOutputEventTime() const { + return ended_ ? std::nullopt : input_->NextOutputEventTime(); } -absl::optional +std::optional TimeLimitedNetEqInput::NextSetMinimumDelayInfo() const { - return ended_ ? absl::nullopt : input_->NextSetMinimumDelayInfo(); + return ended_ ? std::nullopt : input_->NextSetMinimumDelayInfo(); } std::unique_ptr TimeLimitedNetEqInput::PopPacket() { @@ -90,8 +90,8 @@ bool TimeLimitedNetEqInput::ended() const { return ended_ || input_->ended(); } -absl::optional TimeLimitedNetEqInput::NextHeader() const { - return ended_ ? absl::nullopt : input_->NextHeader(); +std::optional TimeLimitedNetEqInput::NextHeader() const { + return ended_ ? std::nullopt : input_->NextHeader(); } void TimeLimitedNetEqInput::MaybeSetEnded() { diff --git a/modules/audio_coding/neteq/tools/neteq_input.h b/modules/audio_coding/neteq/tools/neteq_input.h index 56b0212add..0f1b090e94 100644 --- a/modules/audio_coding/neteq/tools/neteq_input.h +++ b/modules/audio_coding/neteq/tools/neteq_input.h @@ -13,9 +13,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "modules/audio_coding/neteq/tools/packet.h" #include "modules/audio_coding/neteq/tools/packet_source.h" #include "rtc_base/buffer.h" @@ -32,7 +32,7 @@ class NetEqInput { std::string ToString() const; RTPHeader header; - rtc::Buffer payload; + Buffer payload; int64_t time_ms; }; @@ -47,21 +47,21 @@ class NetEqInput { // Returns at what time (in ms) NetEq::InsertPacket should be called next, or // empty if the source is out of packets. - virtual absl::optional NextPacketTime() const = 0; + virtual std::optional NextPacketTime() const = 0; // Returns at what time (in ms) NetEq::GetAudio should be called next, or // empty if no more output events are available. - virtual absl::optional NextOutputEventTime() const = 0; + virtual std::optional NextOutputEventTime() const = 0; // Returns the information related to the next NetEq set minimum delay event // if available. - virtual absl::optional NextSetMinimumDelayInfo() + virtual std::optional NextSetMinimumDelayInfo() const = 0; // Returns the time (in ms) for the next event (packet, output or set minimum // delay event) or empty if there are no more events. - absl::optional NextEventTime() const { - absl::optional next_event_time = NextPacketTime(); + std::optional NextEventTime() const { + std::optional next_event_time = NextPacketTime(); const auto next_output_time = NextOutputEventTime(); // Return the minimum of non-empty `a` and `b`, or empty if both are empty. if (next_output_time) { @@ -102,7 +102,7 @@ class NetEqInput { // Returns the RTP header for the next packet, i.e., the packet that will be // delivered next by PopPacket(). - virtual absl::optional NextHeader() const = 0; + virtual std::optional NextHeader() const = 0; }; // Wrapper class to impose a time limit on a NetEqInput object, typically @@ -112,20 +112,20 @@ class TimeLimitedNetEqInput : public NetEqInput { public: TimeLimitedNetEqInput(std::unique_ptr input, int64_t duration_ms); ~TimeLimitedNetEqInput() override; - absl::optional NextPacketTime() const override; - absl::optional NextOutputEventTime() const override; - absl::optional NextSetMinimumDelayInfo() const override; + std::optional NextPacketTime() const override; + std::optional NextOutputEventTime() const override; + std::optional NextSetMinimumDelayInfo() const override; std::unique_ptr PopPacket() override; void AdvanceOutputEvent() override; void AdvanceSetMinimumDelay() override; bool ended() const override; - absl::optional NextHeader() const override; + std::optional NextHeader() const override; private: void MaybeSetEnded(); std::unique_ptr input_; - const absl::optional start_time_ms_; + const std::optional start_time_ms_; const int64_t duration_ms_; bool ended_ = false; }; diff --git a/modules/audio_coding/neteq/tools/neteq_performance_test.cc b/modules/audio_coding/neteq/tools/neteq_performance_test.cc index ccaa87b5e8..c3205ded1c 100644 --- a/modules/audio_coding/neteq/tools/neteq_performance_test.cc +++ b/modules/audio_coding/neteq/tools/neteq_performance_test.cc @@ -12,9 +12,11 @@ #include "api/audio/audio_frame.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/neteq/default_neteq_factory.h" #include "api/neteq/neteq.h" #include "modules/audio_coding/codecs/pcm16b/pcm16b.h" -#include "modules/audio_coding/neteq/default_neteq_factory.h" #include "modules/audio_coding/neteq/tools/audio_loop.h" #include "modules/audio_coding/neteq/tools/rtp_generator.h" #include "rtc_base/checks.h" @@ -40,10 +42,9 @@ int64_t NetEqPerformanceTest::Run(int runtime_ms, // Initialize NetEq instance. NetEq::Config config; config.sample_rate_hz = kSampRateHz; - webrtc::Clock* clock = webrtc::Clock::GetRealTimeClock(); - auto audio_decoder_factory = CreateBuiltinAudioDecoderFactory(); - auto neteq = - DefaultNetEqFactory().CreateNetEq(config, audio_decoder_factory, clock); + Environment env = CreateEnvironment(); + auto neteq = DefaultNetEqFactory().Create(env, config, + CreateBuiltinAudioDecoderFactory()); // Register decoder in `neteq`. if (!neteq->RegisterPayloadType(kPayloadType, SdpAudioFormat("l16", kSampRateHz, 1))) @@ -76,7 +77,7 @@ int64_t NetEqPerformanceTest::Run(int runtime_ms, RTC_CHECK_EQ(sizeof(input_payload), payload_len); // Main loop. - int64_t start_time_ms = clock->TimeInMilliseconds(); + int64_t start_time_ms = env.clock().TimeInMilliseconds(); AudioFrame out_frame; while (time_now_ms < runtime_ms) { while (packet_input_time_ms <= time_now_ms) { @@ -87,7 +88,8 @@ int64_t NetEqPerformanceTest::Run(int runtime_ms, } if (!lost) { // Insert packet. - int error = neteq->InsertPacket(rtp_header, input_payload); + int error = neteq->InsertPacket(rtp_header, input_payload, + Timestamp::Millis(time_now_ms)); if (error != NetEq::kOK) return -1; } @@ -120,7 +122,7 @@ int64_t NetEqPerformanceTest::Run(int runtime_ms, drift_flipped = true; } } - int64_t end_time_ms = clock->TimeInMilliseconds(); + int64_t end_time_ms = env.clock().TimeInMilliseconds(); return end_time_ms - start_time_ms; } diff --git a/modules/audio_coding/neteq/tools/neteq_quality_test.cc b/modules/audio_coding/neteq/tools/neteq_quality_test.cc index 18e6091f93..4ddef25bab 100644 --- a/modules/audio_coding/neteq/tools/neteq_quality_test.cc +++ b/modules/audio_coding/neteq/tools/neteq_quality_test.cc @@ -16,7 +16,9 @@ #include "absl/flags/flag.h" #include "absl/strings/string_view.h" -#include "modules/audio_coding/neteq/default_neteq_factory.h" +#include "api/environment/environment_factory.h" +#include "api/neteq/default_neteq_factory.h" +#include "api/units/timestamp.h" #include "modules/audio_coding/neteq/tools/neteq_quality_test.h" #include "modules/audio_coding/neteq/tools/output_audio_file.h" #include "modules/audio_coding/neteq/tools/output_wav_file.h" @@ -83,13 +85,13 @@ namespace { std::unique_ptr CreateNetEq( const NetEq::Config& config, - Clock* clock, - const rtc::scoped_refptr& decoder_factory) { - return DefaultNetEqFactory().CreateNetEq(config, decoder_factory, clock); + scoped_refptr decoder_factory) { + return DefaultNetEqFactory().Create(CreateEnvironment(), config, + std::move(decoder_factory)); } const std::string& GetInFilenamePath(absl::string_view file_name) { - std::vector name_parts = rtc::split(file_name, '.'); + std::vector name_parts = split(file_name, '.'); RTC_CHECK_EQ(name_parts.size(), 2); static const std::string path = ::webrtc::test::ResourcePath(name_parts[0], name_parts[1]); @@ -171,7 +173,7 @@ NetEqQualityTest::NetEqQualityTest( int in_sampling_khz, int out_sampling_khz, const SdpAudioFormat& format, - const rtc::scoped_refptr& decoder_factory) + const scoped_refptr& decoder_factory) : audio_format_(format), channels_(absl::GetFlag(FLAGS_channels)), decoded_time_ms_(0), @@ -248,7 +250,7 @@ NetEqQualityTest::NetEqQualityTest( NetEq::Config config; config.sample_rate_hz = out_sampling_khz_ * 1000; - neteq_ = CreateNetEq(config, Clock::GetRealTimeClock(), decoder_factory); + neteq_ = CreateNetEq(config, decoder_factory); max_payload_bytes_ = in_size_samples_ * channels_ * sizeof(int16_t); in_data_.reset(new int16_t[in_size_samples_ * channels_]); } @@ -257,13 +259,13 @@ NetEqQualityTest::~NetEqQualityTest() { log_file_.close(); } -bool NoLoss::Lost(int now_ms) { +bool NoLoss::Lost(int /* now_ms */) { return false; } UniformLoss::UniformLoss(double loss_rate) : loss_rate_(loss_rate) {} -bool UniformLoss::Lost(int now_ms) { +bool UniformLoss::Lost(int /* now_ms */) { int drop_this = rand(); return (drop_this < loss_rate_ * RAND_MAX); } @@ -415,7 +417,8 @@ int NetEqQualityTest::Transmit() { if (!PacketLost()) { int ret = neteq_->InsertPacket( rtp_header_, - rtc::ArrayView(payload_.data(), payload_size_bytes_)); + ArrayView(payload_.data(), payload_size_bytes_), + Timestamp::Millis(packet_input_time_ms)); if (ret != NetEq::kOK) return -1; Log() << "was sent."; diff --git a/modules/audio_coding/neteq/tools/neteq_quality_test.h b/modules/audio_coding/neteq/tools/neteq_quality_test.h index edcb117748..d995095d99 100644 --- a/modules/audio_coding/neteq/tools/neteq_quality_test.h +++ b/modules/audio_coding/neteq/tools/neteq_quality_test.h @@ -96,13 +96,12 @@ class FixedLossModel : public LossModel { class NetEqQualityTest : public ::testing::Test { protected: - NetEqQualityTest( - int block_duration_ms, - int in_sampling_khz, - int out_sampling_khz, - const SdpAudioFormat& format, - const rtc::scoped_refptr& decoder_factory = - webrtc::CreateBuiltinAudioDecoderFactory()); + NetEqQualityTest(int block_duration_ms, + int in_sampling_khz, + int out_sampling_khz, + const SdpAudioFormat& format, + const scoped_refptr& decoder_factory = + webrtc::CreateBuiltinAudioDecoderFactory()); ~NetEqQualityTest() override; void SetUp() override; @@ -114,7 +113,7 @@ class NetEqQualityTest : public ::testing::Test { // 3. returns the length of the payload (in bytes), virtual int EncodeBlock(int16_t* in_data, size_t block_size_samples, - rtc::Buffer* payload, + Buffer* payload, size_t max_bytes) = 0; // PacketLost(...) determines weather a packet sent at an indicated time gets @@ -163,7 +162,7 @@ class NetEqQualityTest : public ::testing::Test { std::unique_ptr loss_model_; std::unique_ptr in_data_; - rtc::Buffer payload_; + Buffer payload_; AudioFrame out_frame_; RTPHeader rtp_header_; diff --git a/modules/audio_coding/neteq/tools/neteq_replacement_input.cc b/modules/audio_coding/neteq/tools/neteq_replacement_input.cc index 081bd9631f..2fb53d8b3f 100644 --- a/modules/audio_coding/neteq/tools/neteq_replacement_input.cc +++ b/modules/audio_coding/neteq/tools/neteq_replacement_input.cc @@ -30,17 +30,17 @@ NetEqReplacementInput::NetEqReplacementInput( ReplacePacket(); } -absl::optional NetEqReplacementInput::NextPacketTime() const { +std::optional NetEqReplacementInput::NextPacketTime() const { return packet_ - ? absl::optional(static_cast(packet_->time_ms)) - : absl::nullopt; + ? std::optional(static_cast(packet_->time_ms)) + : std::nullopt; } -absl::optional NetEqReplacementInput::NextOutputEventTime() const { +std::optional NetEqReplacementInput::NextOutputEventTime() const { return source_->NextOutputEventTime(); } -absl::optional +std::optional NetEqReplacementInput::NextSetMinimumDelayInfo() const { return source_->NextSetMinimumDelayInfo(); } @@ -72,7 +72,7 @@ bool NetEqReplacementInput::ended() const { return source_->ended(); } -absl::optional NetEqReplacementInput::NextHeader() const { +std::optional NetEqReplacementInput::NextHeader() const { return source_->NextHeader(); } @@ -98,24 +98,26 @@ void NetEqReplacementInput::ReplacePacket() { return; } - absl::optional next_hdr = source_->NextHeader(); + std::optional next_hdr = source_->NextHeader(); RTC_DCHECK(next_hdr); uint8_t payload[12]; - RTC_DCHECK_LE(last_frame_size_timestamps_, 120 * 48); - uint32_t input_frame_size_timestamps = last_frame_size_timestamps_; + constexpr uint32_t kMaxFrameSize = 120 * 48; const uint32_t timestamp_diff = next_hdr->timestamp - packet_->header.timestamp; + uint32_t frame_size = last_frame_size_timestamps_; + if (timestamp_diff > 0) { + frame_size = std::min(frame_size, timestamp_diff); + } const bool opus_dtx = packet_->payload.size() <= 2; if (next_hdr->sequenceNumber == packet_->header.sequenceNumber + 1 && - timestamp_diff <= 120 * 48 && !opus_dtx) { - // Packets are in order and the timestamp diff is less than 5760 samples. - // Accept the timestamp diff as a valid frame size. - input_frame_size_timestamps = timestamp_diff; - last_frame_size_timestamps_ = input_frame_size_timestamps; + timestamp_diff <= kMaxFrameSize && timestamp_diff > 0 && !opus_dtx) { + // Packets are in order and the timestamp diff is valid. + frame_size = timestamp_diff; + last_frame_size_timestamps_ = frame_size; } - RTC_DCHECK_LE(input_frame_size_timestamps, 120 * 48); - FakeDecodeFromFile::PrepareEncoded(packet_->header.timestamp, - input_frame_size_timestamps, + RTC_DCHECK_LE(frame_size, kMaxFrameSize); + RTC_DCHECK_GT(frame_size, 0); + FakeDecodeFromFile::PrepareEncoded(packet_->header.timestamp, frame_size, packet_->payload.size(), payload); packet_->payload.SetData(payload); packet_->header.payloadType = replacement_payload_type_; diff --git a/modules/audio_coding/neteq/tools/neteq_replacement_input.h b/modules/audio_coding/neteq/tools/neteq_replacement_input.h index 23e4beae84..33d8138812 100644 --- a/modules/audio_coding/neteq/tools/neteq_replacement_input.h +++ b/modules/audio_coding/neteq/tools/neteq_replacement_input.h @@ -28,14 +28,14 @@ class NetEqReplacementInput : public NetEqInput { const std::set& comfort_noise_types, const std::set& forbidden_types); - absl::optional NextPacketTime() const override; - absl::optional NextOutputEventTime() const override; - absl::optional NextSetMinimumDelayInfo() const override; + std::optional NextPacketTime() const override; + std::optional NextOutputEventTime() const override; + std::optional NextSetMinimumDelayInfo() const override; std::unique_ptr PopPacket() override; void AdvanceOutputEvent() override; void AdvanceSetMinimumDelay() override; bool ended() const override; - absl::optional NextHeader() const override; + std::optional NextHeader() const override; private: void ReplacePacket(); diff --git a/modules/audio_coding/neteq/tools/neteq_rtp_dump_input.cc b/modules/audio_coding/neteq/tools/neteq_rtp_dump_input.cc index 20e092b079..b3c29c1b8e 100644 --- a/modules/audio_coding/neteq/tools/neteq_rtp_dump_input.cc +++ b/modules/audio_coding/neteq/tools/neteq_rtp_dump_input.cc @@ -22,7 +22,7 @@ class NetEqRtpDumpInput : public NetEqInput { public: NetEqRtpDumpInput(absl::string_view file_name, const std::map& hdr_ext_map, - absl::optional ssrc_filter) + std::optional ssrc_filter) : source_(RtpFileSource::Create(file_name, ssrc_filter)) { for (const auto& ext_pair : hdr_ext_map) { source_->RegisterRtpHeaderExtension(ext_pair.second, ext_pair.first); @@ -30,12 +30,12 @@ class NetEqRtpDumpInput : public NetEqInput { LoadNextPacket(); } - absl::optional NextOutputEventTime() const override { + std::optional NextOutputEventTime() const override { return next_output_event_ms_; } - absl::optional NextSetMinimumDelayInfo() const override { - return absl::nullopt; + std::optional NextSetMinimumDelayInfo() const override { + return std::nullopt; } void AdvanceOutputEvent() override { @@ -43,16 +43,16 @@ class NetEqRtpDumpInput : public NetEqInput { *next_output_event_ms_ += kOutputPeriodMs; } if (!NextPacketTime()) { - next_output_event_ms_ = absl::nullopt; + next_output_event_ms_ = std::nullopt; } } void AdvanceSetMinimumDelay() override {} - absl::optional NextPacketTime() const override { - return packet_ ? absl::optional( + std::optional NextPacketTime() const override { + return packet_ ? std::optional( static_cast(packet_->time_ms())) - : absl::nullopt; + : std::nullopt; } std::unique_ptr PopPacket() override { @@ -78,9 +78,8 @@ class NetEqRtpDumpInput : public NetEqInput { return packet_data; } - absl::optional NextHeader() const override { - return packet_ ? absl::optional(packet_->header()) - : absl::nullopt; + std::optional NextHeader() const override { + return packet_ ? std::optional(packet_->header()) : std::nullopt; } bool ended() const override { return !next_output_event_ms_; } @@ -88,7 +87,7 @@ class NetEqRtpDumpInput : public NetEqInput { private: void LoadNextPacket() { packet_ = source_->NextPacket(); } - absl::optional next_output_event_ms_ = 0; + std::optional next_output_event_ms_ = 0; static constexpr int64_t kOutputPeriodMs = 10; std::unique_ptr source_; @@ -100,7 +99,7 @@ class NetEqRtpDumpInput : public NetEqInput { std::unique_ptr CreateNetEqRtpDumpInput( absl::string_view file_name, const std::map& hdr_ext_map, - absl::optional ssrc_filter) { + std::optional ssrc_filter) { return std::make_unique(file_name, hdr_ext_map, ssrc_filter); } diff --git a/modules/audio_coding/neteq/tools/neteq_rtp_dump_input.h b/modules/audio_coding/neteq/tools/neteq_rtp_dump_input.h index e68ebb2c2c..da03732a2a 100644 --- a/modules/audio_coding/neteq/tools/neteq_rtp_dump_input.h +++ b/modules/audio_coding/neteq/tools/neteq_rtp_dump_input.h @@ -13,9 +13,9 @@ #include #include +#include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "modules/audio_coding/neteq/tools/neteq_input.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -25,7 +25,7 @@ namespace test { std::unique_ptr CreateNetEqRtpDumpInput( absl::string_view file_name, const std::map& hdr_ext_map, - absl::optional ssrc_filter); + std::optional ssrc_filter); } // namespace test } // namespace webrtc diff --git a/modules/audio_coding/neteq/tools/neteq_rtpplay.cc b/modules/audio_coding/neteq/tools/neteq_rtpplay.cc index b274069bd4..a05e1b360e 100644 --- a/modules/audio_coding/neteq/tools/neteq_rtpplay.cc +++ b/modules/audio_coding/neteq/tools/neteq_rtpplay.cc @@ -8,18 +8,26 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include +#include +#include +#include #include +#include +#include #include +#include #include "absl/flags/flag.h" #include "absl/flags/parse.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "modules/audio_coding/neteq/tools/neteq_test.h" #include "modules/audio_coding/neteq/tools/neteq_test_factory.h" +#include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" #include "system_wrappers/include/field_trial.h" -#include "test/field_trial.h" using TestConfig = webrtc::test::NetEqTestFactory::Config; @@ -36,7 +44,6 @@ ABSL_FLAG(std::string, " will assign the group Enable to field trial WebRTC-FooFeature."); ABSL_FLAG(int, pcmu, TestConfig::default_pcmu(), "RTP payload type for PCM-u"); ABSL_FLAG(int, pcma, TestConfig::default_pcma(), "RTP payload type for PCM-a"); -ABSL_FLAG(int, ilbc, TestConfig::default_ilbc(), "RTP payload type for iLBC"); ABSL_FLAG(int, isac, TestConfig::default_isac(), "RTP payload type for iSAC"); ABSL_FLAG(int, isac_swb, @@ -79,7 +86,11 @@ ABSL_FLAG(int, ABSL_FLAG(int, red, TestConfig::default_red(), - "RTP payload type for redundant audio (RED)"); + "RTP payload type for redundant audio (RED, 8kHz)"); +ABSL_FLAG(int, + opus_red, + TestConfig::default_opus_red(), + "RTP payload type for redundant audio (RED, 48kHz)"); ABSL_FLAG(int, cn_nb, TestConfig::default_cn_nb(), @@ -212,7 +223,6 @@ void PrintCodecMappingEntry(absl::string_view codec, int flag) { void PrintCodecMapping() { PrintCodecMappingEntry("PCM-u", absl::GetFlag(FLAGS_pcmu)); PrintCodecMappingEntry("PCM-a", absl::GetFlag(FLAGS_pcma)); - PrintCodecMappingEntry("iLBC", absl::GetFlag(FLAGS_ilbc)); PrintCodecMappingEntry("iSAC", absl::GetFlag(FLAGS_isac)); PrintCodecMappingEntry("iSAC-swb (32 kHz)", absl::GetFlag(FLAGS_isac_swb)); PrintCodecMappingEntry("Opus", absl::GetFlag(FLAGS_opus)); @@ -227,7 +237,10 @@ void PrintCodecMapping() { PrintCodecMappingEntry("AVT/DTMF (16 kHz)", absl::GetFlag(FLAGS_avt_16)); PrintCodecMappingEntry("AVT/DTMF (32 kHz)", absl::GetFlag(FLAGS_avt_32)); PrintCodecMappingEntry("AVT/DTMF (48 kHz)", absl::GetFlag(FLAGS_avt_48)); - PrintCodecMappingEntry("redundant audio (RED)", absl::GetFlag(FLAGS_red)); + PrintCodecMappingEntry("redundant audio (RED 8khz)", + absl::GetFlag(FLAGS_red)); + PrintCodecMappingEntry("redundant audio (RED 48khz)", + absl::GetFlag(FLAGS_opus_red)); PrintCodecMappingEntry("comfort noise (8 kHz)", absl::GetFlag(FLAGS_cn_nb)); PrintCodecMappingEntry("comfort noise (16 kHz)", absl::GetFlag(FLAGS_cn_wb)); PrintCodecMappingEntry("comfort noise (32 kHz)", @@ -262,28 +275,28 @@ bool ValidateOutputFilesOptions(bool textlog, return true; } -absl::optional CreateOptionalOutputFileName( +std::optional CreateOptionalOutputFileName( bool output_requested, absl::string_view basename, absl::string_view output_audio_filename, absl::string_view suffix) { if (!output_requested) { - return absl::nullopt; + return std::nullopt; } if (!basename.empty()) { // Override the automatic assignment. - rtc::StringBuilder sb(basename); + webrtc::StringBuilder sb(basename); sb << suffix; return sb.str(); } if (!output_audio_filename.empty()) { // Automatically assign name. - rtc::StringBuilder sb(output_audio_filename); + webrtc::StringBuilder sb(output_audio_filename); sb << suffix; return sb.str(); } std::cout << "Error: invalid text log file parameters."; - return absl::nullopt; + return std::nullopt; } } // namespace @@ -314,7 +327,6 @@ int main(int argc, char* argv[]) { output_files_base_name, output_audio_filename)); RTC_CHECK(ValidatePayloadType(absl::GetFlag(FLAGS_pcmu))); RTC_CHECK(ValidatePayloadType(absl::GetFlag(FLAGS_pcma))); - RTC_CHECK(ValidatePayloadType(absl::GetFlag(FLAGS_ilbc))); RTC_CHECK(ValidatePayloadType(absl::GetFlag(FLAGS_isac))); RTC_CHECK(ValidatePayloadType(absl::GetFlag(FLAGS_isac_swb))); RTC_CHECK(ValidatePayloadType(absl::GetFlag(FLAGS_opus))); @@ -328,6 +340,7 @@ int main(int argc, char* argv[]) { RTC_CHECK(ValidatePayloadType(absl::GetFlag(FLAGS_avt_32))); RTC_CHECK(ValidatePayloadType(absl::GetFlag(FLAGS_avt_48))); RTC_CHECK(ValidatePayloadType(absl::GetFlag(FLAGS_red))); + RTC_CHECK(ValidatePayloadType(absl::GetFlag(FLAGS_opus_red))); RTC_CHECK(ValidatePayloadType(absl::GetFlag(FLAGS_cn_nb))); RTC_CHECK(ValidatePayloadType(absl::GetFlag(FLAGS_cn_wb))); RTC_CHECK(ValidatePayloadType(absl::GetFlag(FLAGS_cn_swb32))); @@ -348,7 +361,6 @@ int main(int argc, char* argv[]) { webrtc::test::NetEqTestFactory::Config config; config.pcmu = absl::GetFlag(FLAGS_pcmu); config.pcma = absl::GetFlag(FLAGS_pcma); - config.ilbc = absl::GetFlag(FLAGS_ilbc); config.isac = absl::GetFlag(FLAGS_isac); config.isac_swb = absl::GetFlag(FLAGS_isac_swb); config.opus = absl::GetFlag(FLAGS_opus); @@ -362,6 +374,7 @@ int main(int argc, char* argv[]) { config.avt_32 = absl::GetFlag(FLAGS_avt_32); config.avt_48 = absl::GetFlag(FLAGS_avt_48); config.red = absl::GetFlag(FLAGS_red); + config.opus_red = absl::GetFlag(FLAGS_opus_red); config.cn_nb = absl::GetFlag(FLAGS_cn_nb); config.cn_wb = absl::GetFlag(FLAGS_cn_wb); config.cn_swb32 = absl::GetFlag(FLAGS_cn_swb32); @@ -394,7 +407,7 @@ int main(int argc, char* argv[]) { uint32_t ssrc; RTC_CHECK(ParseSsrc(absl::GetFlag(FLAGS_ssrc), &ssrc)) << "Flag verification has failed."; - config.ssrc_filter = absl::make_optional(ssrc); + config.ssrc_filter = std::make_optional(ssrc); } std::unique_ptr test = diff --git a/modules/audio_coding/neteq/tools/neteq_stats_getter.cc b/modules/audio_coding/neteq/tools/neteq_stats_getter.cc index 6738e494f6..2c4e7bc6a4 100644 --- a/modules/audio_coding/neteq/tools/neteq_stats_getter.cc +++ b/modules/audio_coding/neteq/tools/neteq_stats_getter.cc @@ -23,7 +23,7 @@ namespace test { std::string NetEqStatsGetter::ConcealmentEvent::ToString() const { char ss_buf[256]; - rtc::SimpleStringBuilder ss(ss_buf); + SimpleStringBuilder ss(ss_buf); ss << "ConcealmentEvent duration_ms:" << duration_ms << " event_number:" << concealment_event_number << " time_from_previous_event_end_ms:" << time_from_previous_event_end_ms; @@ -48,7 +48,7 @@ void NetEqStatsGetter::AfterGetAudio(int64_t time_now_ms, // get audio. It is called independently from get audio in practice. const auto lifetime_stat = neteq->GetLifetimeStatistics(); if (last_stats_query_time_ms_ == 0 || - rtc::TimeDiff(time_now_ms, last_stats_query_time_ms_) >= + TimeDiff(time_now_ms, last_stats_query_time_ms_) >= stats_query_interval_ms_) { NetEqNetworkStatistics stats; RTC_CHECK_EQ(neteq->NetworkStatistics(&stats), 0); diff --git a/modules/audio_coding/neteq/tools/neteq_test.cc b/modules/audio_coding/neteq/tools/neteq_test.cc index ab9a3e2d9a..67cd892f85 100644 --- a/modules/audio_coding/neteq/tools/neteq_test.cc +++ b/modules/audio_coding/neteq/tools/neteq_test.cc @@ -13,7 +13,11 @@ #include #include -#include "modules/audio_coding/neteq/default_neteq_factory.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/field_trials.h" +#include "api/neteq/default_neteq_factory.h" +#include "api/units/timestamp.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "system_wrappers/include/clock.h" @@ -21,28 +25,28 @@ namespace webrtc { namespace test { namespace { -absl::optional ActionToOperations( - absl::optional a) { +std::optional ActionToOperations( + std::optional a) { if (!a) { - return absl::nullopt; + return std::nullopt; } switch (*a) { case NetEqSimulator::Action::kAccelerate: - return absl::make_optional(NetEq::Operation::kAccelerate); + return std::make_optional(NetEq::Operation::kAccelerate); case NetEqSimulator::Action::kExpand: - return absl::make_optional(NetEq::Operation::kExpand); + return std::make_optional(NetEq::Operation::kExpand); case NetEqSimulator::Action::kNormal: - return absl::make_optional(NetEq::Operation::kNormal); + return std::make_optional(NetEq::Operation::kNormal); case NetEqSimulator::Action::kPreemptiveExpand: - return absl::make_optional(NetEq::Operation::kPreemptiveExpand); + return std::make_optional(NetEq::Operation::kPreemptiveExpand); } } std::unique_ptr CreateNetEq( + const Environment& env, const NetEq::Config& config, - Clock* clock, - const rtc::scoped_refptr& decoder_factory) { - return DefaultNetEqFactory().CreateNetEq(config, decoder_factory, clock); + scoped_refptr decoder_factory) { + return DefaultNetEqFactory().Create(env, config, std::move(decoder_factory)); } } // namespace @@ -60,24 +64,27 @@ void DefaultNetEqTestErrorCallback::OnGetAudioError() { } NetEqTest::NetEqTest(const NetEq::Config& config, - rtc::scoped_refptr decoder_factory, + scoped_refptr decoder_factory, const DecoderMap& codecs, std::unique_ptr text_log, NetEqFactory* neteq_factory, std::unique_ptr input, std::unique_ptr output, - Callbacks callbacks) + Callbacks callbacks, + absl::string_view field_trials) : input_(std::move(input)), clock_(Timestamp::Millis(input_->NextEventTime().value_or(0))), - neteq_(neteq_factory - ? neteq_factory->CreateNetEq(config, decoder_factory, &clock_) - : CreateNetEq(config, &clock_, decoder_factory)), + env_(CreateEnvironment( + &clock_, + FieldTrials::CreateNoGlobal(std::string(field_trials)))), + neteq_( + neteq_factory + ? neteq_factory->Create(env_, config, std::move(decoder_factory)) + : CreateNetEq(env_, config, std::move(decoder_factory))), output_(std::move(output)), callbacks_(callbacks), sample_rate_hz_(config.sample_rate_hz), text_log_(std::move(text_log)) { - RTC_CHECK(!config.enable_muted_state) - << "The code does not handle enable_muted_state"; RegisterDecoders(codecs); } @@ -115,8 +122,8 @@ NetEqTest::SimulationStepResult NetEqTest::RunToNextGetAudio() { packet_data->payload.size() - packet_data->header.paddingLength; if (payload_data_length != 0) { int error = neteq_->InsertPacket( - packet_data->header, - rtc::ArrayView(packet_data->payload)); + packet_data->header, ArrayView(packet_data->payload), + Timestamp::Millis(time_now_ms)); if (error != NetEq::kOK && callbacks_.error_callback) { callbacks_.error_callback->OnInsertPacketError(*packet_data); } @@ -158,9 +165,9 @@ NetEqTest::SimulationStepResult NetEqTest::RunToNextGetAudio() { << ", buffer size: " << std::setw(4) << ops_state.current_buffer_size_ms << std::endl; } - last_packet_time_ms_ = absl::make_optional(time_now_ms); + last_packet_time_ms_ = std::make_optional(time_now_ms); last_packet_timestamp_ = - absl::make_optional(packet_data->header.timestamp); + std::make_optional(packet_data->header.timestamp); } if (input_->NextSetMinimumDelayInfo().has_value() && @@ -177,11 +184,9 @@ NetEqTest::SimulationStepResult NetEqTest::RunToNextGetAudio() { callbacks_.get_audio_callback->BeforeGetAudio(neteq_.get()); } AudioFrame out_frame; - bool muted; - int error = neteq_->GetAudio(&out_frame, &muted, nullptr, + int error = neteq_->GetAudio(&out_frame, nullptr, nullptr, ActionToOperations(next_action_)); - next_action_ = absl::nullopt; - RTC_CHECK(!muted) << "The code does not handle enable_muted_state"; + next_action_ = std::nullopt; if (error != NetEq::kOK) { if (callbacks_.error_callback) { callbacks_.error_callback->OnGetAudioError(); @@ -190,8 +195,8 @@ NetEqTest::SimulationStepResult NetEqTest::RunToNextGetAudio() { sample_rate_hz_ = out_frame.sample_rate_hz_; } if (callbacks_.get_audio_callback) { - callbacks_.get_audio_callback->AfterGetAudio(time_now_ms, out_frame, - muted, neteq_.get()); + callbacks_.get_audio_callback->AfterGetAudio( + time_now_ms, out_frame, out_frame.muted(), neteq_.get()); } if (output_) { @@ -286,7 +291,7 @@ NetEqTest::SimulationStepResult NetEqTest::RunToNextGetAudio() { } void NetEqTest::SetNextAction(NetEqTest::Action next_operation) { - next_action_ = absl::optional(next_operation); + next_action_ = std::optional(next_operation); } NetEqTest::NetEqState NetEqTest::GetNetEqState() { @@ -306,11 +311,9 @@ NetEqLifetimeStatistics NetEqTest::LifetimeStats() const { NetEqTest::DecoderMap NetEqTest::StandardDecoderMap() { DecoderMap codecs = {{0, SdpAudioFormat("pcmu", 8000, 1)}, {8, SdpAudioFormat("pcma", 8000, 1)}, -#ifdef WEBRTC_CODEC_ILBC - {102, SdpAudioFormat("ilbc", 8000, 1)}, -#endif #ifdef WEBRTC_CODEC_OPUS {111, SdpAudioFormat("opus", 48000, 2)}, + {63, SdpAudioFormat("red", 48000, 2)}, #endif {93, SdpAudioFormat("l16", 8000, 1)}, {94, SdpAudioFormat("l16", 16000, 1)}, diff --git a/modules/audio_coding/neteq/tools/neteq_test.h b/modules/audio_coding/neteq/tools/neteq_test.h index 1d3eeda453..b576b3fea8 100644 --- a/modules/audio_coding/neteq/tools/neteq_test.h +++ b/modules/audio_coding/neteq/tools/neteq_test.h @@ -14,11 +14,12 @@ #include #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_decoder_factory.h" +#include "api/environment/environment.h" #include "api/neteq/neteq.h" #include "api/neteq/neteq_factory.h" #include "api/test/neteq_simulator.h" @@ -32,7 +33,8 @@ namespace test { class NetEqTestErrorCallback { public: virtual ~NetEqTestErrorCallback() = default; - virtual void OnInsertPacketError(const NetEqInput::PacketData& packet) {} + virtual void OnInsertPacketError(const NetEqInput::PacketData& /* packet */) { + } virtual void OnGetAudioError() {} }; @@ -81,13 +83,14 @@ class NetEqTest : public NetEqSimulator { // Sets up the test with given configuration, codec mappings, input, ouput, // and callback objects for error reporting. NetEqTest(const NetEq::Config& config, - rtc::scoped_refptr decoder_factory, + scoped_refptr decoder_factory, const DecoderMap& codecs, std::unique_ptr text_log, NetEqFactory* neteq_factory, std::unique_ptr input, std::unique_ptr output, - Callbacks callbacks); + Callbacks callbacks, + absl::string_view field_trials = ""); ~NetEqTest() override; @@ -100,6 +103,7 @@ class NetEqTest : public NetEqSimulator { void SetNextAction(Action next_operation) override; NetEqState GetNetEqState() override; + NetEq* GetNetEq() override { return neteq_.get(); } // Returns the statistics from NetEq. NetEqNetworkStatistics SimulationStats(); @@ -111,8 +115,9 @@ class NetEqTest : public NetEqSimulator { void RegisterDecoders(const DecoderMap& codecs); std::unique_ptr input_; SimulatedClock clock_; - absl::optional next_action_; - absl::optional last_packet_time_ms_; + const Environment env_; + std::optional next_action_; + std::optional last_packet_time_ms_; std::unique_ptr neteq_; std::unique_ptr output_; Callbacks callbacks_; @@ -120,7 +125,7 @@ class NetEqTest : public NetEqSimulator { NetEqState current_state_; NetEqOperationsAndState prev_ops_state_; NetEqLifetimeStatistics prev_lifetime_stats_; - absl::optional last_packet_timestamp_; + std::optional last_packet_timestamp_; std::unique_ptr text_log_; }; diff --git a/modules/audio_coding/neteq/tools/neteq_test_factory.cc b/modules/audio_coding/neteq/tools/neteq_test_factory.cc index 981504b239..f74d211089 100644 --- a/modules/audio_coding/neteq/tools/neteq_test_factory.cc +++ b/modules/audio_coding/neteq/tools/neteq_test_factory.cc @@ -10,27 +10,39 @@ #include "modules/audio_coding/neteq/tools/neteq_test_factory.h" -#include #include // For ULONG_MAX returned by strtoul. #include #include // For strtoul. +#include #include #include +#include #include +#include #include #include #include #include "absl/strings/string_view.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/environment/environment.h" +#include "api/make_ref_counted.h" #include "api/neteq/neteq.h" +#include "api/neteq/neteq_factory.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" +#include "logging/rtc_event_log/rtc_event_log_parser.h" #include "modules/audio_coding/neteq/tools/audio_sink.h" #include "modules/audio_coding/neteq/tools/fake_decode_from_file.h" #include "modules/audio_coding/neteq/tools/initial_packet_inserter_neteq_input.h" #include "modules/audio_coding/neteq/tools/input_audio_file.h" -#include "modules/audio_coding/neteq/tools/neteq_delay_analyzer.h" #include "modules/audio_coding/neteq/tools/neteq_event_log_input.h" +#include "modules/audio_coding/neteq/tools/neteq_input.h" #include "modules/audio_coding/neteq/tools/neteq_replacement_input.h" #include "modules/audio_coding/neteq/tools/neteq_rtp_dump_input.h" #include "modules/audio_coding/neteq/tools/neteq_stats_getter.h" @@ -39,6 +51,7 @@ #include "modules/audio_coding/neteq/tools/output_audio_file.h" #include "modules/audio_coding/neteq/tools/output_wav_file.h" #include "modules/audio_coding/neteq/tools/rtp_file_source.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/checks.h" #include "test/function_audio_decoder_factory.h" #include "test/testsupport/file_utils.h" @@ -47,12 +60,12 @@ namespace webrtc { namespace test { namespace { -absl::optional CodecSampleRate( +std::optional CodecSampleRate( uint8_t payload_type, webrtc::test::NetEqTestFactory::Config config) { if (payload_type == config.pcmu || payload_type == config.pcma || - payload_type == config.ilbc || payload_type == config.pcm16b || - payload_type == config.cn_nb || payload_type == config.avt) + payload_type == config.pcm16b || payload_type == config.cn_nb || + payload_type == config.avt) return 8000; if (payload_type == config.isac || payload_type == config.pcm16b_wb || payload_type == config.g722 || payload_type == config.cn_wb || @@ -62,11 +75,12 @@ absl::optional CodecSampleRate( payload_type == config.cn_swb32 || payload_type == config.avt_32) return 32000; if (payload_type == config.opus || payload_type == config.pcm16b_swb48 || - payload_type == config.cn_swb48 || payload_type == config.avt_48) + payload_type == config.cn_swb48 || payload_type == config.avt_48 || + payload_type == config.opus_red) return 48000; if (payload_type == config.red) return 0; - return absl::nullopt; + return std::nullopt; } } // namespace @@ -98,7 +112,7 @@ class SsrcSwitchDetector : public NetEqPostInsertPacket { private: NetEqPostInsertPacket* other_callback_; - absl::optional last_ssrc_; + std::optional last_ssrc_; }; NetEqTestFactory::NetEqTestFactory() = default; @@ -172,11 +186,6 @@ std::unique_ptr NetEqTestFactory::InitializeTest( return nullptr; } - if (!config.field_trial_string.empty()) { - field_trials_ = - std::make_unique(config.field_trial_string); - } - // Skip some initial events/packets if requested. if (config.skip_get_audio_events > 0) { std::cout << "Skipping " << config.skip_get_audio_events @@ -202,9 +211,9 @@ std::unique_ptr NetEqTestFactory::InitializeTest( } // Check the sample rate. - absl::optional sample_rate_hz; + std::optional sample_rate_hz; std::set> discarded_pt_and_ssrc; - while (absl::optional first_rtp_header = input->NextHeader()) { + while (std::optional first_rtp_header = input->NextHeader()) { RTC_DCHECK(first_rtp_header); sample_rate_hz = CodecSampleRate(first_rtp_header->payloadType, config); if (sample_rate_hz) { @@ -263,7 +272,7 @@ std::unique_ptr NetEqTestFactory::InitializeTest( NetEqTest::DecoderMap codecs = NetEqTest::StandardDecoderMap(); - rtc::scoped_refptr decoder_factory = + scoped_refptr decoder_factory = CreateBuiltinAudioDecoderFactory(); // Check if a replacement audio file was provided. @@ -289,20 +298,20 @@ std::unique_ptr NetEqTestFactory::InitializeTest( std::set cn_types = std_set_int32_to_uint8( {config.cn_nb, config.cn_wb, config.cn_swb32, config.cn_swb48}); - std::set forbidden_types = - std_set_int32_to_uint8({config.g722, config.red, config.avt, - config.avt_16, config.avt_32, config.avt_48}); + std::set forbidden_types = std_set_int32_to_uint8( + {config.g722, config.red, config.opus_red, config.avt, config.avt_16, + config.avt_32, config.avt_48}); input.reset(new NetEqReplacementInput(std::move(input), replacement_pt, cn_types, forbidden_types)); // Note that capture-by-copy implies that the lambda captures the value of // decoder_factory before it's reassigned on the left-hand side. - decoder_factory = rtc::make_ref_counted( + decoder_factory = make_ref_counted( [decoder_factory, config]( - const SdpAudioFormat& format, - absl::optional codec_pair_id) { + const Environment& env, const SdpAudioFormat& format, + std::optional codec_pair_id) { std::unique_ptr decoder = - decoder_factory->MakeAudioDecoder(format, codec_pair_id); + decoder_factory->Create(env, format, codec_pair_id); if (!decoder && format.name == "replacement") { decoder = std::make_unique( std::make_unique(config.replacement_audio_file), @@ -345,9 +354,10 @@ std::unique_ptr NetEqTestFactory::InitializeTest( neteq_config.sample_rate_hz = *sample_rate_hz; neteq_config.max_packets_in_buffer = config.max_nr_packets_in_buffer; neteq_config.enable_fast_accelerate = config.enable_fast_accelerate; - return std::make_unique( - neteq_config, decoder_factory, codecs, std::move(text_log), factory, - std::move(input), std::move(output), callbacks); + return std::make_unique(neteq_config, decoder_factory, codecs, + std::move(text_log), factory, + std::move(input), std::move(output), + callbacks, config.field_trial_string); } } // namespace test diff --git a/modules/audio_coding/neteq/tools/neteq_test_factory.h b/modules/audio_coding/neteq/tools/neteq_test_factory.h index 96ce0b4334..c7564fa718 100644 --- a/modules/audio_coding/neteq/tools/neteq_test_factory.h +++ b/modules/audio_coding/neteq/tools/neteq_test_factory.h @@ -11,13 +11,15 @@ #ifndef MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_TEST_FACTORY_H_ #define MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_TEST_FACTORY_H_ +#include #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/neteq/neteq_factory.h" +#include "modules/audio_coding/neteq/tools/neteq_input.h" #include "modules/audio_coding/neteq/tools/neteq_test.h" -#include "test/field_trial.h" namespace webrtc { namespace test { @@ -42,9 +44,6 @@ class NetEqTestFactory { // RTP payload type for PCM-a. static constexpr int default_pcma() { return 8; } int pcma = default_pcma(); - // RTP payload type for iLBC. - static constexpr int default_ilbc() { return 102; } - int ilbc = default_ilbc(); // RTP payload type for iSAC. static constexpr int default_isac() { return 103; } int isac = default_isac(); @@ -84,6 +83,9 @@ class NetEqTestFactory { // RTP payload type for redundant audio (RED). static constexpr int default_red() { return 117; } int red = default_red(); + + static constexpr int default_opus_red() { return 63; } + int opus_red = default_opus_red(); // RTP payload type for comfort noise (8 kHz). static constexpr int default_cn_nb() { return 13; } int cn_nb = default_cn_nb(); @@ -99,7 +101,7 @@ class NetEqTestFactory { // A PCM file that will be used to populate dummy RTP packets. std::string replacement_audio_file; // Only use packets with this SSRC. - absl::optional ssrc_filter; + std::optional ssrc_filter; // Extension ID for audio level (RFC 6464). static constexpr int default_audio_level() { return 1; } int audio_level = default_audio_level(); @@ -137,11 +139,11 @@ class NetEqTestFactory { bool textlog = false; // If specified and `textlog` is true, the output of `textlog` is written to // the specified file name. - absl::optional textlog_filename; + std::optional textlog_filename; // Base name for the output script files for plotting the delay profile. - absl::optional plot_scripts_basename; + std::optional plot_scripts_basename; // Path to the output audio file. - absl::optional output_audio_filename; + std::optional output_audio_filename; // Field trials to use during the simulation. std::string field_trial_string; }; @@ -161,9 +163,6 @@ class NetEqTestFactory { const Config& config); std::unique_ptr ssrc_switch_detector_; std::unique_ptr stats_plotter_; - // The field trials are stored in the test factory, because neteq_test is not - // in a testonly target, and therefore cannot use ScopedFieldTrials. - std::unique_ptr field_trials_; }; } // namespace test diff --git a/modules/audio_coding/neteq/tools/packet.cc b/modules/audio_coding/neteq/tools/packet.cc index e540173f43..42480e0115 100644 --- a/modules/audio_coding/neteq/tools/packet.cc +++ b/modules/audio_coding/neteq/tools/packet.cc @@ -18,7 +18,7 @@ namespace webrtc { namespace test { -Packet::Packet(rtc::CopyOnWriteBuffer packet, +Packet::Packet(CopyOnWriteBuffer packet, size_t virtual_packet_length_bytes, double time_ms, const RtpHeaderExtensionMap* extension_map) @@ -98,7 +98,7 @@ bool Packet::ParseHeader(const RtpHeaderExtensionMap* extension_map) { size_t padding_size = 0; if (padding) { // Clear the padding bit to prevent failure when rtp payload is omited. - rtc::CopyOnWriteBuffer packet(packet_); + CopyOnWriteBuffer packet(packet_); packet.MutableData()[0] &= ~0b0010'0000; if (!rtp_packet.Parse(std::move(packet))) { return false; @@ -114,8 +114,8 @@ bool Packet::ParseHeader(const RtpHeaderExtensionMap* extension_map) { return false; } } - rtp_payload_ = rtc::MakeArrayView(packet_.data() + rtp_packet.headers_size(), - rtp_packet.payload_size() - padding_size); + rtp_payload_ = MakeArrayView(packet_.data() + rtp_packet.headers_size(), + rtp_packet.payload_size() - padding_size); rtp_packet.GetHeader(&header_); RTC_CHECK_GE(virtual_packet_length_bytes_, rtp_packet.size()); diff --git a/modules/audio_coding/neteq/tools/packet.h b/modules/audio_coding/neteq/tools/packet.h index a4b3da9a4b..a94a0fdc5e 100644 --- a/modules/audio_coding/neteq/tools/packet.h +++ b/modules/audio_coding/neteq/tools/packet.h @@ -31,12 +31,12 @@ class Packet { // that only contain the RTP headers, and no payload (a.k.a RTP dummy files or // RTP light). The `virtual_packet_length_bytes` tells what size the packet // had on wire, including the now discarded payload. - Packet(rtc::CopyOnWriteBuffer packet, + Packet(CopyOnWriteBuffer packet, size_t virtual_packet_length_bytes, double time_ms, const RtpHeaderExtensionMap* extension_map = nullptr); - Packet(rtc::CopyOnWriteBuffer packet, + Packet(CopyOnWriteBuffer packet, double time_ms, const RtpHeaderExtensionMap* extension_map = nullptr) : Packet(packet, packet.size(), time_ms, extension_map) {} @@ -90,8 +90,8 @@ class Packet { void CopyToHeader(RTPHeader* destination) const; RTPHeader header_; - const rtc::CopyOnWriteBuffer packet_; - rtc::ArrayView rtp_payload_; // Empty for dummy RTP packets. + const CopyOnWriteBuffer packet_; + ArrayView rtp_payload_; // Empty for dummy RTP packets. // Virtual lengths are used when parsing RTP header files (dummy RTP files). const size_t virtual_packet_length_bytes_; size_t virtual_payload_length_bytes_ = 0; diff --git a/modules/audio_coding/neteq/tools/packet_unittest.cc b/modules/audio_coding/neteq/tools/packet_unittest.cc index 69cf56b529..a935ff6477 100644 --- a/modules/audio_coding/neteq/tools/packet_unittest.cc +++ b/modules/audio_coding/neteq/tools/packet_unittest.cc @@ -28,7 +28,7 @@ void MakeRtpHeader(int payload_type, rtp_data[0] = 0x80; rtp_data[1] = static_cast(payload_type); rtp_data[2] = (seq_number >> 8) & 0xFF; - rtp_data[3] = (seq_number)&0xFF; + rtp_data[3] = (seq_number) & 0xFF; rtp_data[4] = timestamp >> 24; rtp_data[5] = (timestamp >> 16) & 0xFF; rtp_data[6] = (timestamp >> 8) & 0xFF; @@ -42,7 +42,7 @@ void MakeRtpHeader(int payload_type, TEST(TestPacket, RegularPacket) { const size_t kPacketLengthBytes = 100; - rtc::CopyOnWriteBuffer packet_memory(kPacketLengthBytes); + CopyOnWriteBuffer packet_memory(kPacketLengthBytes); const uint8_t kPayloadType = 17; const uint16_t kSequenceNumber = 4711; const uint32_t kTimestamp = 47114711; @@ -69,7 +69,7 @@ TEST(TestPacket, RegularPacket) { TEST(TestPacket, DummyPacket) { const size_t kPacketLengthBytes = kHeaderLengthBytes; // Only RTP header. const size_t kVirtualPacketLengthBytes = 100; - rtc::CopyOnWriteBuffer packet_memory(kPacketLengthBytes); + CopyOnWriteBuffer packet_memory(kPacketLengthBytes); const uint8_t kPayloadType = 17; const uint16_t kSequenceNumber = 4711; const uint32_t kTimestamp = 47114711; @@ -97,7 +97,7 @@ TEST(TestPacket, DummyPacket) { TEST(TestPacket, DummyPaddingPacket) { const size_t kPacketLengthBytes = kHeaderLengthBytes; // Only RTP header. const size_t kVirtualPacketLengthBytes = 100; - rtc::CopyOnWriteBuffer packet_memory(kPacketLengthBytes); + CopyOnWriteBuffer packet_memory(kPacketLengthBytes); const uint8_t kPayloadType = 17; const uint16_t kSequenceNumber = 4711; const uint32_t kTimestamp = 47114711; @@ -160,7 +160,7 @@ int MakeRedHeader(int payload_type, TEST(TestPacket, RED) { const size_t kPacketLengthBytes = 100; - rtc::CopyOnWriteBuffer packet_memory(kPacketLengthBytes); + CopyOnWriteBuffer packet_memory(kPacketLengthBytes); const uint8_t kRedPayloadType = 17; const uint16_t kSequenceNumber = 4711; const uint32_t kTimestamp = 47114711; diff --git a/modules/audio_coding/neteq/tools/rtp_analyze.cc b/modules/audio_coding/neteq/tools/rtp_analyze.cc index 7ecf925ebb..8e94cbdc05 100644 --- a/modules/audio_coding/neteq/tools/rtp_analyze.cc +++ b/modules/audio_coding/neteq/tools/rtp_analyze.cc @@ -110,9 +110,10 @@ int main(int argc, char* argv[]) { static_cast(packet->virtual_packet_length_bytes()), packet->header().payloadType, packet->header().markerBit, packet->header().ssrc); - if (print_audio_level && packet->header().extension.hasAudioLevel) { - fprintf(out_file, " %5u (%1i)", packet->header().extension.audioLevel, - packet->header().extension.voiceActivity); + if (print_audio_level && packet->header().extension.audio_level()) { + fprintf(out_file, " %5d (%1i)", + packet->header().extension.audio_level()->level(), + packet->header().extension.audio_level()->voice_activity()); } if (print_abs_send_time && packet->header().extension.hasAbsoluteSendTime) { if (cycles == -1) { diff --git a/modules/audio_coding/neteq/tools/rtp_encode.cc b/modules/audio_coding/neteq/tools/rtp_encode.cc index 0dfa8764b2..93207bc15b 100644 --- a/modules/audio_coding/neteq/tools/rtp_encode.cc +++ b/modules/audio_coding/neteq/tools/rtp_encode.cc @@ -10,30 +10,32 @@ #include +#include +#include +#include + #ifdef WIN32 #include #endif -#if defined(WEBRTC_LINUX) || defined(WEBRTC_FUCHSIA) -#include -#endif -#include #include #include #include #include "absl/flags/flag.h" #include "absl/flags/parse.h" -#include "absl/memory/memory.h" #include "api/audio/audio_frame.h" #include "api/audio_codecs/L16/audio_encoder_L16.h" #include "api/audio_codecs/g711/audio_encoder_g711.h" #include "api/audio_codecs/g722/audio_encoder_g722.h" -#include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" +#include "api/environment/environment_factory.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" #include "modules/audio_coding/include/audio_coding_module.h" +#include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/audio_coding/neteq/tools/input_audio_file.h" +#include "rtc_base/checks.h" +#include "rtc_base/ip_address.h" #include "rtc_base/numerics/safe_conversions.h" ABSL_FLAG(bool, list_codecs, false, "Enumerate all codecs"); @@ -71,7 +73,6 @@ enum class CodecType { kPcm16b16, kPcm16b32, kPcm16b48, - kIlbc, }; struct CodecTypeAndInfo { @@ -93,8 +94,7 @@ const std::map& CodecList() { {"pcm16b_8", {CodecType::kPcm16b8, 93, false}}, {"pcm16b_16", {CodecType::kPcm16b16, 94, false}}, {"pcm16b_32", {CodecType::kPcm16b32, 95, false}}, - {"pcm16b_48", {CodecType::kPcm16b48, 96, false}}, - {"ilbc", {CodecType::kIlbc, 102, false}}}; + {"pcm16b_48", {CodecType::kPcm16b48, 96, false}}}; return *codec_list; } @@ -107,22 +107,22 @@ class Packetizer : public AudioPacketizationCallback { ssrc_(ssrc), timestamp_rate_hz_(timestamp_rate_hz) {} - int32_t SendData(AudioFrameType frame_type, + int32_t SendData(AudioFrameType /* frame_type */, uint8_t payload_type, uint32_t timestamp, const uint8_t* payload_data, size_t payload_len_bytes, - int64_t absolute_capture_timestamp_ms) override { + int64_t /* absolute_capture_timestamp_ms */) override { if (payload_len_bytes == 0) { return 0; } constexpr size_t kRtpHeaderLength = 12; constexpr size_t kRtpDumpHeaderLength = 8; - const uint16_t length = htons(rtc::checked_cast( + const uint16_t length = htons(checked_cast( kRtpHeaderLength + kRtpDumpHeaderLength + payload_len_bytes)); - const uint16_t plen = htons( - rtc::checked_cast(kRtpHeaderLength + payload_len_bytes)); + const uint16_t plen = + htons(checked_cast(kRtpHeaderLength + payload_len_bytes)); const uint32_t offset = htonl(timestamp / (timestamp_rate_hz_ / 1000)); RTC_CHECK_EQ(fwrite(&length, sizeof(uint16_t), 1, out_file_), 1); RTC_CHECK_EQ(fwrite(&plen, sizeof(uint16_t), 1, out_file_), 1); @@ -206,7 +206,8 @@ std::unique_ptr CreateEncoder(CodecType codec_type, config.dtx_enabled = absl::GetFlag(FLAGS_dtx); config.fec_enabled = absl::GetFlag(FLAGS_fec); RTC_CHECK(config.IsOk()); - return AudioEncoderOpus::MakeAudioEncoder(config, payload_type); + return AudioEncoderOpus::MakeAudioEncoder(CreateEnvironment(), config, + {.payload_type = payload_type}); } case CodecType::kPcmU: @@ -231,11 +232,6 @@ std::unique_ptr CreateEncoder(CodecType codec_type, return AudioEncoderL16::MakeAudioEncoder(Pcm16bConfig(codec_type), payload_type); } - - case CodecType::kIlbc: { - return AudioEncoderIlbc::MakeAudioEncoder( - GetCodecConfig(), payload_type); - } } RTC_DCHECK_NOTREACHED(); return nullptr; diff --git a/modules/audio_coding/neteq/tools/rtp_file_source.cc b/modules/audio_coding/neteq/tools/rtp_file_source.cc index 7a8daef945..437b560543 100644 --- a/modules/audio_coding/neteq/tools/rtp_file_source.cc +++ b/modules/audio_coding/neteq/tools/rtp_file_source.cc @@ -12,22 +12,23 @@ #include -#include "absl/strings/string_view.h" -#ifndef WIN32 -#include -#endif - +#include #include +#include +#include "absl/strings/string_view.h" #include "modules/audio_coding/neteq/tools/packet.h" +#include "modules/audio_coding/neteq/tools/packet_source.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "test/rtp_file_reader.h" namespace webrtc { namespace test { RtpFileSource* RtpFileSource::Create(absl::string_view file_name, - absl::optional ssrc_filter) { + std::optional ssrc_filter) { RtpFileSource* source = new RtpFileSource(ssrc_filter); RTC_CHECK(source->OpenFile(file_name)); return source; @@ -64,7 +65,7 @@ std::unique_ptr RtpFileSource::NextPacket() { continue; } auto packet = std::make_unique( - rtc::CopyOnWriteBuffer(temp_packet.data, temp_packet.length), + CopyOnWriteBuffer(temp_packet.data, temp_packet.length), temp_packet.original_length, temp_packet.time_ms, &rtp_header_extension_map_); if (!packet->valid_header()) { @@ -79,7 +80,7 @@ std::unique_ptr RtpFileSource::NextPacket() { } } -RtpFileSource::RtpFileSource(absl::optional ssrc_filter) +RtpFileSource::RtpFileSource(std::optional ssrc_filter) : PacketSource(), ssrc_filter_(ssrc_filter) {} bool RtpFileSource::OpenFile(absl::string_view file_name) { diff --git a/modules/audio_coding/neteq/tools/rtp_file_source.h b/modules/audio_coding/neteq/tools/rtp_file_source.h index 55505be630..300981a025 100644 --- a/modules/audio_coding/neteq/tools/rtp_file_source.h +++ b/modules/audio_coding/neteq/tools/rtp_file_source.h @@ -14,10 +14,10 @@ #include #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "modules/audio_coding/neteq/tools/packet_source.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -33,7 +33,7 @@ class RtpFileSource : public PacketSource { // opened, or has the wrong format, NULL will be returned. static RtpFileSource* Create( absl::string_view file_name, - absl::optional ssrc_filter = absl::nullopt); + std::optional ssrc_filter = std::nullopt); // Checks whether a files is a valid RTP dump or PCAP (Wireshark) file. static bool ValidRtpDump(absl::string_view file_name); @@ -54,12 +54,12 @@ class RtpFileSource : public PacketSource { static const int kRtpFileHeaderSize = 4 + 4 + 4 + 2 + 2; static const size_t kPacketHeaderSize = 8; - explicit RtpFileSource(absl::optional ssrc_filter); + explicit RtpFileSource(std::optional ssrc_filter); bool OpenFile(absl::string_view file_name); std::unique_ptr rtp_reader_; - const absl::optional ssrc_filter_; + const std::optional ssrc_filter_; RtpHeaderExtensionMap rtp_header_extension_map_; }; diff --git a/modules/audio_coding/neteq/tools/rtp_jitter.cc b/modules/audio_coding/neteq/tools/rtp_jitter.cc index cccaa9a3bb..23fc35f657 100644 --- a/modules/audio_coding/neteq/tools/rtp_jitter.cc +++ b/modules/audio_coding/neteq/tools/rtp_jitter.cc @@ -26,14 +26,14 @@ namespace { constexpr size_t kRtpDumpHeaderLength = 8; // Returns the next packet or an empty buffer if end of file was encountered. -rtc::Buffer ReadNextPacket(FILE* file) { +Buffer ReadNextPacket(FILE* file) { // Read the rtpdump header for the next packet. - rtc::Buffer buffer; - buffer.SetData(kRtpDumpHeaderLength, [&](rtc::ArrayView x) { + Buffer buffer; + buffer.SetData(kRtpDumpHeaderLength, [&](ArrayView x) { return fread(x.data(), 1, x.size(), file); }); if (buffer.size() != kRtpDumpHeaderLength) { - return rtc::Buffer(); + return Buffer(); } // Get length field. This is the total length for this packet written to file, @@ -42,7 +42,7 @@ rtc::Buffer ReadNextPacket(FILE* file) { RTC_CHECK_GE(len, kRtpDumpHeaderLength); // Read remaining data from file directly into buffer. - buffer.AppendData(len - kRtpDumpHeaderLength, [&](rtc::ArrayView x) { + buffer.AppendData(len - kRtpDumpHeaderLength, [&](ArrayView x) { return fread(x.data(), 1, x.size(), file); }); if (buffer.size() != len) { @@ -52,7 +52,7 @@ rtc::Buffer ReadNextPacket(FILE* file) { } struct PacketAndTime { - rtc::Buffer packet; + Buffer packet; int time; }; diff --git a/modules/audio_coding/neteq/underrun_optimizer.cc b/modules/audio_coding/neteq/underrun_optimizer.cc index baed812327..d1f12e9bfd 100644 --- a/modules/audio_coding/neteq/underrun_optimizer.cc +++ b/modules/audio_coding/neteq/underrun_optimizer.cc @@ -24,15 +24,15 @@ constexpr int kBucketSizeMs = 20; UnderrunOptimizer::UnderrunOptimizer(const TickTimer* tick_timer, int histogram_quantile, int forget_factor, - absl::optional start_forget_weight, - absl::optional resample_interval_ms) + std::optional start_forget_weight, + std::optional resample_interval_ms) : tick_timer_(tick_timer), histogram_(kDelayBuckets, forget_factor, start_forget_weight), histogram_quantile_(histogram_quantile), resample_interval_ms_(resample_interval_ms) {} void UnderrunOptimizer::Update(int relative_delay_ms) { - absl::optional histogram_update; + std::optional histogram_update; if (resample_interval_ms_) { if (!resample_stopwatch_) { resample_stopwatch_ = tick_timer_->GetNewStopwatch(); diff --git a/modules/audio_coding/neteq/underrun_optimizer.h b/modules/audio_coding/neteq/underrun_optimizer.h index b37ce18795..20a46ff907 100644 --- a/modules/audio_coding/neteq/underrun_optimizer.h +++ b/modules/audio_coding/neteq/underrun_optimizer.h @@ -12,8 +12,8 @@ #define MODULES_AUDIO_CODING_NETEQ_UNDERRUN_OPTIMIZER_H_ #include +#include -#include "absl/types/optional.h" #include "api/neteq/tick_timer.h" #include "modules/audio_coding/neteq/histogram.h" @@ -27,12 +27,12 @@ class UnderrunOptimizer { UnderrunOptimizer(const TickTimer* tick_timer, int histogram_quantile, int forget_factor, - absl::optional start_forget_weight, - absl::optional resample_interval_ms); + std::optional start_forget_weight, + std::optional resample_interval_ms); void Update(int relative_delay_ms); - absl::optional GetOptimalDelayMs() const { return optimal_delay_ms_; } + std::optional GetOptimalDelayMs() const { return optimal_delay_ms_; } void Reset(); @@ -40,10 +40,10 @@ class UnderrunOptimizer { const TickTimer* tick_timer_; Histogram histogram_; const int histogram_quantile_; // In Q30. - const absl::optional resample_interval_ms_; + const std::optional resample_interval_ms_; std::unique_ptr resample_stopwatch_; int max_delay_in_interval_ms_ = 0; - absl::optional optimal_delay_ms_; + std::optional optimal_delay_ms_; }; } // namespace webrtc diff --git a/modules/audio_coding/neteq/underrun_optimizer_unittest.cc b/modules/audio_coding/neteq/underrun_optimizer_unittest.cc index a86e9cf107..0988e03d73 100644 --- a/modules/audio_coding/neteq/underrun_optimizer_unittest.cc +++ b/modules/audio_coding/neteq/underrun_optimizer_unittest.cc @@ -25,7 +25,7 @@ TEST(UnderrunOptimizerTest, ResamplePacketDelays) { TickTimer tick_timer; constexpr int kResampleIntervalMs = 500; UnderrunOptimizer underrun_optimizer(&tick_timer, kDefaultHistogramQuantile, - kForgetFactor, absl::nullopt, + kForgetFactor, std::nullopt, kResampleIntervalMs); // The histogram should be updated once with the maximum delay observed for diff --git a/modules/audio_coding/test/Channel.cc b/modules/audio_coding/test/Channel.cc index 8f634db4f6..4c91f5948a 100644 --- a/modules/audio_coding/test/Channel.cc +++ b/modules/audio_coding/test/Channel.cc @@ -22,7 +22,7 @@ int32_t Channel::SendData(AudioFrameType frameType, uint32_t timeStamp, const uint8_t* payloadData, size_t payloadSize, - int64_t absolute_capture_timestamp_ms) { + int64_t /* absolute_capture_timestamp_ms */) { RTPHeader rtp_header; int32_t status; size_t payloadDataSize = payloadSize; @@ -82,8 +82,9 @@ int32_t Channel::SendData(AudioFrameType frameType, return 0; } - status = _receiverACM->InsertPacket( - rtp_header, rtc::ArrayView(_payloadData, payloadDataSize)); + status = _neteq->InsertPacket( + rtp_header, ArrayView(_payloadData, payloadDataSize), + /*receive_time=*/Timestamp::MinusInfinity()); return status; } @@ -186,7 +187,7 @@ void Channel::CalcStatistics(const RTPHeader& rtp_header, size_t payloadSize) { } Channel::Channel(int16_t chID) - : _receiverACM(NULL), + : _neteq(NULL), _seqNo(0), _bitStreamFile(NULL), _saveBitStream(false), @@ -198,7 +199,7 @@ Channel::Channel(int16_t chID) _lastFrameSizeSample(0), _packetLoss(0), _useFECTestWithPacketLoss(false), - _beginTime(rtc::TimeMillis()), + _beginTime(TimeMillis()), _totalBytes(0), external_send_timestamp_(-1), external_sequence_number_(-1), @@ -218,7 +219,7 @@ Channel::Channel(int16_t chID) } if (chID >= 0) { _saveBitStream = true; - rtc::StringBuilder ss; + StringBuilder ss; ss.AppendFormat("bitStream_%d.dat", chID); _bitStreamFile = fopen(ss.str().c_str(), "wb"); } else { @@ -228,8 +229,8 @@ Channel::Channel(int16_t chID) Channel::~Channel() {} -void Channel::RegisterReceiverACM(acm2::AcmReceiver* acm_receiver) { - _receiverACM = acm_receiver; +void Channel::RegisterReceiverNetEq(NetEq* neteq) { + _neteq = neteq; return; } @@ -249,7 +250,7 @@ void Channel::ResetStats() { _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0; } } - _beginTime = rtc::TimeMillis(); + _beginTime = TimeMillis(); _totalBytes = 0; _channelCritSect.Unlock(); } @@ -264,7 +265,7 @@ uint32_t Channel::LastInTimestamp() { double Channel::BitRate() { double rate; - uint64_t currTime = rtc::TimeMillis(); + uint64_t currTime = TimeMillis(); _channelCritSect.Lock(); rate = ((double)_totalBytes * 8.0) / (double)(currTime - _beginTime); _channelCritSect.Unlock(); diff --git a/modules/audio_coding/test/Channel.h b/modules/audio_coding/test/Channel.h index ebf4461d16..616e283647 100644 --- a/modules/audio_coding/test/Channel.h +++ b/modules/audio_coding/test/Channel.h @@ -13,7 +13,7 @@ #include -#include "modules/audio_coding/acm2/acm_receiver.h" +#include "api/neteq/neteq.h" #include "modules/audio_coding/include/audio_coding_module.h" #include "modules/include/module_common_types.h" #include "rtc_base/synchronization/mutex.h" @@ -55,7 +55,7 @@ class Channel : public AudioPacketizationCallback { size_t payloadSize, int64_t absolute_capture_timestamp_ms) override; - void RegisterReceiverACM(acm2::AcmReceiver* acm_receiver); + void RegisterReceiverNetEq(NetEq* neteq); void ResetStats(); @@ -84,7 +84,7 @@ class Channel : public AudioPacketizationCallback { private: void CalcStatistics(const RTPHeader& rtp_header, size_t payloadSize); - acm2::AcmReceiver* _receiverACM; + NetEq* _neteq; uint16_t _seqNo; // 60msec * 32 sample(max)/msec * 2 description (maybe) * 2 bytes/sample uint8_t _payloadData[60 * 32 * 2 * 2]; diff --git a/modules/audio_coding/test/EncodeDecodeTest.cc b/modules/audio_coding/test/EncodeDecodeTest.cc index cab36458e0..f50fb15e48 100644 --- a/modules/audio_coding/test/EncodeDecodeTest.cc +++ b/modules/audio_coding/test/EncodeDecodeTest.cc @@ -18,6 +18,9 @@ #include "absl/strings/string_view.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/neteq/default_neteq_factory.h" #include "modules/audio_coding/include/audio_coding_module.h" #include "rtc_base/strings/string_builder.h" #include "test/gtest.h" @@ -36,12 +39,13 @@ TestPacketization::TestPacketization(RTPStream* rtpStream, uint16_t frequency) TestPacketization::~TestPacketization() {} -int32_t TestPacketization::SendData(const AudioFrameType /* frameType */, - const uint8_t payloadType, - const uint32_t timeStamp, - const uint8_t* payloadData, - const size_t payloadSize, - int64_t absolute_capture_timestamp_ms) { +int32_t TestPacketization::SendData( + const AudioFrameType /* frameType */, + const uint8_t payloadType, + const uint32_t timeStamp, + const uint8_t* payloadData, + const size_t payloadSize, + int64_t /* absolute_capture_timestamp_ms */) { _rtpStream->Write(payloadType, timeStamp, _seqNo++, payloadData, payloadSize, _frequency); return 1; @@ -50,7 +54,8 @@ int32_t TestPacketization::SendData(const AudioFrameType /* frameType */, Sender::Sender() : _acm(NULL), _pcmFile(), _audioFrame(), _packetization(NULL) {} -void Sender::Setup(AudioCodingModule* acm, +void Sender::Setup(const Environment& env, + AudioCodingModule* acm, RTPStream* rtpStream, absl::string_view in_file_name, int in_sample_rate, @@ -67,8 +72,8 @@ void Sender::Setup(AudioCodingModule* acm, // Fast-forward 1 second (100 blocks) since the file starts with silence. _pcmFile.FastForward(100); - acm->SetEncoder(CreateBuiltinAudioEncoderFactory()->MakeAudioEncoder( - payload_type, format, absl::nullopt)); + acm->SetEncoder(CreateBuiltinAudioEncoderFactory()->Create( + env, format, {.payload_type = payload_type})); _packetization = new TestPacketization(rtpStream, format.clockrate_hz); EXPECT_EQ(0, acm->RegisterTransportCallback(_packetization)); @@ -102,37 +107,36 @@ Receiver::Receiver() : _playoutLengthSmpls(kWebRtc10MsPcmAudio), _payloadSizeBytes(MAX_INCOMING_PAYLOAD) {} -void Receiver::Setup(acm2::AcmReceiver* acm_receiver, +void Receiver::Setup(NetEq* neteq, RTPStream* rtpStream, absl::string_view out_file_name, size_t channels, int file_num) { if (channels == 1) { - acm_receiver->SetCodecs({{107, {"L16", 8000, 1}}, - {108, {"L16", 16000, 1}}, - {109, {"L16", 32000, 1}}, - {0, {"PCMU", 8000, 1}}, - {8, {"PCMA", 8000, 1}}, - {102, {"ILBC", 8000, 1}}, - {9, {"G722", 8000, 1}}, - {120, {"OPUS", 48000, 2}}, - {13, {"CN", 8000, 1}}, - {98, {"CN", 16000, 1}}, - {99, {"CN", 32000, 1}}}); + neteq->SetCodecs({{107, {"L16", 8000, 1}}, + {108, {"L16", 16000, 1}}, + {109, {"L16", 32000, 1}}, + {0, {"PCMU", 8000, 1}}, + {8, {"PCMA", 8000, 1}}, + {9, {"G722", 8000, 1}}, + {120, {"OPUS", 48000, 2}}, + {13, {"CN", 8000, 1}}, + {98, {"CN", 16000, 1}}, + {99, {"CN", 32000, 1}}}); } else { ASSERT_EQ(channels, 2u); - acm_receiver->SetCodecs({{111, {"L16", 8000, 2}}, - {112, {"L16", 16000, 2}}, - {113, {"L16", 32000, 2}}, - {110, {"PCMU", 8000, 2}}, - {118, {"PCMA", 8000, 2}}, - {119, {"G722", 8000, 2}}, - {120, {"OPUS", 48000, 2, {{"stereo", "1"}}}}}); + neteq->SetCodecs({{111, {"L16", 8000, 2}}, + {112, {"L16", 16000, 2}}, + {113, {"L16", 32000, 2}}, + {110, {"PCMU", 8000, 2}}, + {118, {"PCMA", 8000, 2}}, + {119, {"G722", 8000, 2}}, + {120, {"OPUS", 48000, 2, {{"stereo", "1"}}}}}); } int playSampFreq; std::string file_name; - rtc::StringBuilder file_stream; + StringBuilder file_stream; file_stream << webrtc::test::OutputPath() << out_file_name << file_num << ".pcm"; file_name = file_stream.str(); @@ -144,7 +148,7 @@ void Receiver::Setup(acm2::AcmReceiver* acm_receiver, _realPayloadSizeBytes = 0; _playoutBuffer = new int16_t[kWebRtc10MsPcmAudio]; _frequency = playSampFreq; - _acm_receiver = acm_receiver; + _neteq = neteq; _firstTime = true; } @@ -169,9 +173,11 @@ bool Receiver::IncomingPacket() { } } - EXPECT_EQ(0, _acm_receiver->InsertPacket( - _rtpHeader, rtc::ArrayView( - _incomingPayload, _realPayloadSizeBytes))); + EXPECT_GE( + 0, _neteq->InsertPacket(_rtpHeader, + ArrayView(_incomingPayload, + _realPayloadSizeBytes), + /*receive_time=*/Timestamp::Millis(_nextTime))); _realPayloadSizeBytes = _rtpStream->Read(&_rtpHeader, _incomingPayload, _payloadSizeBytes, &_nextTime); if (_realPayloadSizeBytes == 0 && _rtpStream->EndOfFile()) { @@ -184,18 +190,19 @@ bool Receiver::IncomingPacket() { bool Receiver::PlayoutData() { AudioFrame audioFrame; bool muted; - int32_t ok = _acm_receiver->GetAudio(_frequency, &audioFrame, &muted); + int ok = _neteq->GetAudio(&audioFrame, &muted); if (muted) { ADD_FAILURE(); return false; } - EXPECT_EQ(0, ok); + EXPECT_EQ(NetEq::kOK, ok); if (ok < 0) { return false; } if (_playoutLengthSmpls == 0) { return false; } + EXPECT_TRUE(_resampler_helper.MaybeResample(_frequency, &audioFrame)); _pcmFile.Write10MsData(audioFrame.data(), audioFrame.samples_per_channel_ * audioFrame.num_channels_); return true; @@ -232,10 +239,12 @@ void EncodeDecodeTest::Perform() { {107, {"L16", 8000, 1}}, {108, {"L16", 16000, 1}}, {109, {"L16", 32000, 1}}, {0, {"PCMU", 8000, 1}}, {8, {"PCMA", 8000, 1}}, -#ifdef WEBRTC_CODEC_ILBC - {102, {"ILBC", 8000, 1}}, +// TODO(bugs.webrtc.org/345525069): Either fix/enable or remove G722. +#if defined(__has_feature) && !__has_feature(undefined_behavior_sanitizer) + {9, {"G722", 8000, 1}}, #endif - {9, {"G722", 8000, 1}}}; + }; + const Environment env = CreateEnvironment(); int file_num = 0; for (const auto& send_codec : send_codecs) { RTPFile rtpFile; @@ -246,7 +255,7 @@ void EncodeDecodeTest::Perform() { rtpFile.Open(fileName.c_str(), "wb+"); rtpFile.WriteHeader(); Sender sender; - sender.Setup(acm.get(), &rtpFile, "audio_coding/testfile32kHz", 32000, + sender.Setup(env, acm.get(), &rtpFile, "audio_coding/testfile32kHz", 32000, send_codec.first, send_codec.second); sender.Run(); sender.Teardown(); @@ -254,12 +263,10 @@ void EncodeDecodeTest::Perform() { rtpFile.Open(fileName.c_str(), "rb"); rtpFile.ReadHeader(); - std::unique_ptr acm_receiver( - std::make_unique( - acm2::AcmReceiver::Config(CreateBuiltinAudioDecoderFactory()))); + std::unique_ptr neteq = DefaultNetEqFactory().Create( + env, NetEq::Config(), CreateBuiltinAudioDecoderFactory()); Receiver receiver; - receiver.Setup(acm_receiver.get(), &rtpFile, "encodeDecode_out", 1, - file_num); + receiver.Setup(neteq.get(), &rtpFile, "encodeDecode_out", 1, file_num); receiver.Run(); receiver.Teardown(); rtpFile.Close(); diff --git a/modules/audio_coding/test/EncodeDecodeTest.h b/modules/audio_coding/test/EncodeDecodeTest.h index 9cd2c23c18..8a20998cd7 100644 --- a/modules/audio_coding/test/EncodeDecodeTest.h +++ b/modules/audio_coding/test/EncodeDecodeTest.h @@ -15,7 +15,9 @@ #include #include "absl/strings/string_view.h" -#include "modules/audio_coding/acm2/acm_receiver.h" +#include "api/environment/environment.h" +#include "api/neteq/neteq.h" +#include "modules/audio_coding/acm2/acm_resampler.h" #include "modules/audio_coding/include/audio_coding_module.h" #include "modules/audio_coding/test/PCMFile.h" #include "modules/audio_coding/test/RTPFile.h" @@ -51,7 +53,8 @@ class TestPacketization : public AudioPacketizationCallback { class Sender { public: Sender(); - void Setup(AudioCodingModule* acm, + void Setup(const Environment& env, + AudioCodingModule* acm, RTPStream* rtpStream, absl::string_view in_file_name, int in_sample_rate, @@ -74,7 +77,7 @@ class Receiver { public: Receiver(); virtual ~Receiver() {} - void Setup(acm2::AcmReceiver* acm_receiver, + void Setup(NetEq* neteq, RTPStream* rtpStream, absl::string_view out_file_name, size_t channels, @@ -92,7 +95,8 @@ class Receiver { bool _firstTime; protected: - acm2::AcmReceiver* _acm_receiver; + NetEq* _neteq; + acm2::ResamplerHelper _resampler_helper; uint8_t _incomingPayload[MAX_INCOMING_PAYLOAD]; RTPStream* _rtpStream; RTPHeader _rtpHeader; diff --git a/modules/audio_coding/test/PCMFile.h b/modules/audio_coding/test/PCMFile.h index 5320aa63d0..64af5df007 100644 --- a/modules/audio_coding/test/PCMFile.h +++ b/modules/audio_coding/test/PCMFile.h @@ -14,10 +14,10 @@ #include #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/audio/audio_frame.h" namespace webrtc { @@ -68,7 +68,7 @@ class PCMFile { uint32_t timestamp_; bool read_stereo_; bool save_stereo_; - absl::optional num_10ms_blocks_to_read_; + std::optional num_10ms_blocks_to_read_; int blocks_read_ = 0; }; diff --git a/modules/audio_coding/test/PacketLossTest.cc b/modules/audio_coding/test/PacketLossTest.cc index c4f665677f..41737521b7 100644 --- a/modules/audio_coding/test/PacketLossTest.cc +++ b/modules/audio_coding/test/PacketLossTest.cc @@ -14,6 +14,10 @@ #include "absl/strings/string_view.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/neteq/default_neteq_factory.h" +#include "api/units/timestamp.h" #include "rtc_base/strings/string_builder.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -27,7 +31,7 @@ ReceiverWithPacketLoss::ReceiverWithPacketLoss() lost_packet_counter_(0), burst_lost_counter_(burst_length_) {} -void ReceiverWithPacketLoss::Setup(acm2::AcmReceiver* acm_receiver, +void ReceiverWithPacketLoss::Setup(NetEq* neteq, RTPStream* rtpStream, absl::string_view out_file_name, int channels, @@ -37,9 +41,9 @@ void ReceiverWithPacketLoss::Setup(acm2::AcmReceiver* acm_receiver, loss_rate_ = loss_rate; burst_length_ = burst_length; burst_lost_counter_ = burst_length_; // To prevent first packet gets lost. - rtc::StringBuilder ss; + StringBuilder ss; ss << out_file_name << "_" << loss_rate_ << "_" << burst_length_ << "_"; - Receiver::Setup(acm_receiver, rtpStream, ss.str(), channels, file_num); + Receiver::Setup(neteq, rtpStream, ss.str(), channels, file_num); } bool ReceiverWithPacketLoss::IncomingPacket() { @@ -58,9 +62,10 @@ bool ReceiverWithPacketLoss::IncomingPacket() { } if (!PacketLost()) { - _acm_receiver->InsertPacket( - _rtpHeader, rtc::ArrayView(_incomingPayload, - _realPayloadSizeBytes)); + _neteq->InsertPacket( + _rtpHeader, + ArrayView(_incomingPayload, _realPayloadSizeBytes), + Timestamp::Millis(_nextTime)); } packet_counter_++; _realPayloadSizeBytes = _rtpStream->Read(&_rtpHeader, _incomingPayload, @@ -90,14 +95,15 @@ bool ReceiverWithPacketLoss::PacketLost() { SenderWithFEC::SenderWithFEC() : expected_loss_rate_(0) {} -void SenderWithFEC::Setup(AudioCodingModule* acm, +void SenderWithFEC::Setup(const Environment& env, + AudioCodingModule* acm, RTPStream* rtpStream, absl::string_view in_file_name, int payload_type, SdpAudioFormat format, int expected_loss_rate) { - Sender::Setup(acm, rtpStream, in_file_name, format.clockrate_hz, payload_type, - format); + Sender::Setup(env, acm, rtpStream, in_file_name, format.clockrate_hz, + payload_type, format); EXPECT_TRUE(SetFEC(true)); EXPECT_TRUE(SetPacketLossRate(expected_loss_rate)); } @@ -136,6 +142,7 @@ void PacketLossTest::Perform() { #ifndef WEBRTC_CODEC_OPUS return; #else + const Environment env = CreateEnvironment(); RTPFile rtpFile; std::unique_ptr acm(AudioCodingModule::Create()); SdpAudioFormat send_format = SdpAudioFormat("opus", 48000, 2); @@ -148,7 +155,7 @@ void PacketLossTest::Perform() { rtpFile.Open(fileName.c_str(), "wb+"); rtpFile.WriteHeader(); SenderWithFEC sender; - sender.Setup(acm.get(), &rtpFile, in_file_name_, 120, send_format, + sender.Setup(env, acm.get(), &rtpFile, in_file_name_, 120, send_format, expected_loss_rate_); sender.Run(); sender.Teardown(); @@ -156,11 +163,10 @@ void PacketLossTest::Perform() { rtpFile.Open(fileName.c_str(), "rb"); rtpFile.ReadHeader(); - std::unique_ptr acm_receiver( - std::make_unique( - acm2::AcmReceiver::Config(CreateBuiltinAudioDecoderFactory()))); + std::unique_ptr neteq = DefaultNetEqFactory().Create( + env, NetEq::Config(), CreateBuiltinAudioDecoderFactory()); ReceiverWithPacketLoss receiver; - receiver.Setup(acm_receiver.get(), &rtpFile, "packetLoss_out", channels_, 15, + receiver.Setup(neteq.get(), &rtpFile, "packetLoss_out", channels_, 15, actual_loss_rate_, burst_length_); receiver.Run(); receiver.Teardown(); diff --git a/modules/audio_coding/test/PacketLossTest.h b/modules/audio_coding/test/PacketLossTest.h index 7569e23efa..41a62d519a 100644 --- a/modules/audio_coding/test/PacketLossTest.h +++ b/modules/audio_coding/test/PacketLossTest.h @@ -21,7 +21,7 @@ namespace webrtc { class ReceiverWithPacketLoss : public Receiver { public: ReceiverWithPacketLoss(); - void Setup(acm2::AcmReceiver* acm_receiver, + void Setup(NetEq* neteq, RTPStream* rtpStream, absl::string_view out_file_name, int channels, @@ -42,7 +42,8 @@ class ReceiverWithPacketLoss : public Receiver { class SenderWithFEC : public Sender { public: SenderWithFEC(); - void Setup(AudioCodingModule* acm, + void Setup(const Environment& env, + AudioCodingModule* acm, RTPStream* rtpStream, absl::string_view in_file_name, int payload_type, diff --git a/modules/audio_coding/test/TestAllCodecs.cc b/modules/audio_coding/test/TestAllCodecs.cc index dd51760fd1..e0ef536b3a 100644 --- a/modules/audio_coding/test/TestAllCodecs.cc +++ b/modules/audio_coding/test/TestAllCodecs.cc @@ -17,6 +17,10 @@ #include "absl/strings/match.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/environment/environment_factory.h" +#include "api/neteq/default_neteq_factory.h" +#include "api/neteq/neteq.h" +#include "modules/audio_coding/acm2/acm_resampler.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/include/module_common_types.h" #include "rtc_base/logging.h" @@ -46,7 +50,7 @@ namespace webrtc { // Class for simulating packet handling. TestPack::TestPack() - : receiver_acm_(NULL), + : neteq_(NULL), sequence_number_(0), timestamp_diff_(0), last_in_timestamp_(0), @@ -55,9 +59,8 @@ TestPack::TestPack() TestPack::~TestPack() {} -void TestPack::RegisterReceiverACM(acm2::AcmReceiver* acm_receiver) { - receiver_acm_ = acm_receiver; - return; +void TestPack::RegisterReceiverNetEq(NetEq* neteq) { + neteq_ = neteq; } int32_t TestPack::SendData(AudioFrameType frame_type, @@ -65,7 +68,7 @@ int32_t TestPack::SendData(AudioFrameType frame_type, uint32_t timestamp, const uint8_t* payload_data, size_t payload_size, - int64_t absolute_capture_timestamp_ms) { + int64_t /* absolute_capture_timestamp_ms */) { RTPHeader rtp_header; int32_t status; @@ -83,8 +86,9 @@ int32_t TestPack::SendData(AudioFrameType frame_type, // Only run mono for all test cases. memcpy(payload_data_, payload_data, payload_size); - status = receiver_acm_->InsertPacket( - rtp_header, rtc::ArrayView(payload_data_, payload_size)); + status = neteq_->InsertPacket( + rtp_header, ArrayView(payload_data_, payload_size), + /*receive_time=*/Timestamp::MinusInfinity()); payload_size_ = payload_size; timestamp_diff_ = timestamp - last_in_timestamp_; @@ -106,9 +110,11 @@ void TestPack::reset_payload_size() { } TestAllCodecs::TestAllCodecs() - : acm_a_(AudioCodingModule::Create()), - acm_b_(std::make_unique( - acm2::AcmReceiver::Config(CreateBuiltinAudioDecoderFactory()))), + : env_(CreateEnvironment()), + acm_a_(AudioCodingModule::Create()), + neteq_(DefaultNetEqFactory().Create(env_, + NetEq::Config(), + CreateBuiltinAudioDecoderFactory())), channel_a_to_b_(NULL), test_count_(0), packet_size_samples_(0), @@ -126,7 +132,7 @@ void TestAllCodecs::Perform() { webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"); infile_a_.Open(file_name, 32000, "rb"); - acm_b_->SetCodecs({{107, {"L16", 8000, 1}}, + neteq_->SetCodecs({{107, {"L16", 8000, 1}}, {108, {"L16", 16000, 1}}, {109, {"L16", 32000, 1}}, {111, {"L16", 8000, 2}}, @@ -136,7 +142,6 @@ void TestAllCodecs::Perform() { {110, {"PCMU", 8000, 2}}, {8, {"PCMA", 8000, 1}}, {118, {"PCMA", 8000, 2}}, - {102, {"ILBC", 8000, 1}}, {9, {"G722", 8000, 1}}, {119, {"G722", 8000, 2}}, {120, {"OPUS", 48000, 2, {{"stereo", "1"}}}}, @@ -147,10 +152,13 @@ void TestAllCodecs::Perform() { // Create and connect the channel channel_a_to_b_ = new TestPack; acm_a_->RegisterTransportCallback(channel_a_to_b_); - channel_a_to_b_->RegisterReceiverACM(acm_b_.get()); + channel_a_to_b_->RegisterReceiverNetEq(neteq_.get()); // All codecs are tested for all allowed sampling frequencies, rates and // packet sizes. + +// TODO(bugs.webrtc.org/345525069): Either fix/enable or remove G722. +#if defined(__has_feature) && !__has_feature(undefined_behavior_sanitizer) test_count_++; OpenOutFile(test_count_); char codec_g722[] = "G722"; @@ -167,19 +175,6 @@ void TestAllCodecs::Perform() { RegisterSendCodec(codec_g722, 16000, 64000, 960, 0); Run(channel_a_to_b_); outfile_b_.Close(); -#ifdef WEBRTC_CODEC_ILBC - test_count_++; - OpenOutFile(test_count_); - char codec_ilbc[] = "ILBC"; - RegisterSendCodec(codec_ilbc, 8000, 13300, 240, 0); - Run(channel_a_to_b_); - RegisterSendCodec(codec_ilbc, 8000, 13300, 480, 0); - Run(channel_a_to_b_); - RegisterSendCodec(codec_ilbc, 8000, 15200, 160, 0); - Run(channel_a_to_b_); - RegisterSendCodec(codec_ilbc, 8000, 15200, 320, 0); - Run(channel_a_to_b_); - outfile_b_.Close(); #endif test_count_++; OpenOutFile(test_count_); @@ -311,16 +306,15 @@ void TestAllCodecs::RegisterSendCodec(char* codec_name, } auto factory = CreateBuiltinAudioEncoderFactory(); - constexpr int payload_type = 17; SdpAudioFormat format = {codec_name, clockrate_hz, num_channels}; - format.parameters["ptime"] = rtc::ToString(rtc::CheckedDivExact( - packet_size, rtc::CheckedDivExact(sampling_freq_hz, 1000))); - acm_a_->SetEncoder( - factory->MakeAudioEncoder(payload_type, format, absl::nullopt)); + format.parameters["ptime"] = absl::StrCat( + CheckedDivExact(packet_size, CheckedDivExact(sampling_freq_hz, 1000))); + acm_a_->SetEncoder(factory->Create(env_, format, {.payload_type = 17})); } void TestAllCodecs::Run(TestPack* channel) { AudioFrame audio_frame; + acm2::ResamplerHelper resampler_helper; int32_t out_freq_hz = outfile_b_.SamplingFrequency(); size_t receive_size; @@ -358,8 +352,9 @@ void TestAllCodecs::Run(TestPack* channel) { // Run received side of ACM. bool muted; - CHECK_ERROR(acm_b_->GetAudio(out_freq_hz, &audio_frame, &muted)); + CHECK_ERROR(neteq_->GetAudio(&audio_frame, &muted)); ASSERT_FALSE(muted); + EXPECT_TRUE(resampler_helper.MaybeResample(out_freq_hz, &audio_frame)); // Write output speech to file. outfile_b_.Write10MsData(audio_frame.data(), @@ -378,7 +373,7 @@ void TestAllCodecs::Run(TestPack* channel) { void TestAllCodecs::OpenOutFile(int test_number) { std::string filename = webrtc::test::OutputPath(); - rtc::StringBuilder test_number_str; + StringBuilder test_number_str; test_number_str << test_number; filename += "testallcodecs_out_"; filename += test_number_str.str(); diff --git a/modules/audio_coding/test/TestAllCodecs.h b/modules/audio_coding/test/TestAllCodecs.h index a17038ad84..0712332a5e 100644 --- a/modules/audio_coding/test/TestAllCodecs.h +++ b/modules/audio_coding/test/TestAllCodecs.h @@ -13,18 +13,20 @@ #include -#include "modules/audio_coding/acm2/acm_receiver.h" +#include "api/environment/environment.h" #include "modules/audio_coding/include/audio_coding_module.h" #include "modules/audio_coding/test/PCMFile.h" namespace webrtc { +class NetEq; + class TestPack : public AudioPacketizationCallback { public: TestPack(); ~TestPack(); - void RegisterReceiverACM(acm2::AcmReceiver* acm_receiver); + void RegisterReceiverNetEq(NetEq* neteq); int32_t SendData(AudioFrameType frame_type, uint8_t payload_type, @@ -38,7 +40,7 @@ class TestPack : public AudioPacketizationCallback { void reset_payload_size(); private: - acm2::AcmReceiver* receiver_acm_; + NetEq* neteq_; uint16_t sequence_number_; uint8_t payload_data_[60 * 32 * 2 * 2]; uint32_t timestamp_diff_; @@ -68,8 +70,9 @@ class TestAllCodecs { void Run(TestPack* channel); void OpenOutFile(int test_number); + const Environment env_; std::unique_ptr acm_a_; - std::unique_ptr acm_b_; + std::unique_ptr neteq_; TestPack* channel_a_to_b_; PCMFile infile_a_; PCMFile outfile_b_; diff --git a/modules/audio_coding/test/TestRedFec.cc b/modules/audio_coding/test/TestRedFec.cc index f8acf48508..1900c905fc 100644 --- a/modules/audio_coding/test/TestRedFec.cc +++ b/modules/audio_coding/test/TestRedFec.cc @@ -24,6 +24,8 @@ #include "api/audio_codecs/g722/audio_encoder_g722.h" #include "api/audio_codecs/opus/audio_decoder_opus.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" +#include "api/environment/environment_factory.h" +#include "api/neteq/default_neteq_factory.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" #include "modules/audio_coding/codecs/red/audio_encoder_copy_red.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" @@ -34,7 +36,8 @@ namespace webrtc { TestRedFec::TestRedFec() - : encoder_factory_(CreateAudioEncoderFactory()), @@ -43,8 +46,9 @@ TestRedFec::TestRedFec() AudioDecoderL16, AudioDecoderOpus>()), _acmA(AudioCodingModule::Create()), - _acm_receiver(std::make_unique( - acm2::AcmReceiver::Config(decoder_factory_))), + _neteq(DefaultNetEqFactory().Create(env_, + NetEq::Config(), + decoder_factory_)), _channelA2B(NULL), _testCntr(0) {} @@ -63,7 +67,7 @@ void TestRedFec::Perform() { // Create and connect the channel _channelA2B = new Channel; _acmA->RegisterTransportCallback(_channelA2B); - _channelA2B->RegisterReceiverACM(_acm_receiver.get()); + _channelA2B->RegisterReceiverNetEq(_neteq.get()); RegisterSendCodec(_acmA, {"L16", 8000, 1}, Vad::kVadAggressive, true); @@ -77,6 +81,8 @@ void TestRedFec::Perform() { Run(); _outFileB.Close(); +// TODO(bugs.webrtc.org/345525069): Either fix/enable or remove G722. +#if defined(__has_feature) && !__has_feature(undefined_behavior_sanitizer) // Switch to a 16 kHz codec; RED should be switched off. RegisterSendCodec(_acmA, {"G722", 8000, 1}, Vad::kVadAggressive, false); @@ -96,8 +102,9 @@ void TestRedFec::Perform() { OpenOutFile(_testCntr); Run(); _outFileB.Close(); +#endif - RegisterSendCodec(_acmA, {"opus", 48000, 2}, absl::nullopt, false); + RegisterSendCodec(_acmA, {"opus", 48000, 2}, std::nullopt, false); // _channelA2B imposes 25% packet loss rate. EXPECT_EQ(0, _acmA->SetPacketLossRate(25)); @@ -110,11 +117,11 @@ void TestRedFec::Perform() { Run(); // Switch to L16 with RED. - RegisterSendCodec(_acmA, {"L16", 8000, 1}, absl::nullopt, true); + RegisterSendCodec(_acmA, {"L16", 8000, 1}, std::nullopt, true); Run(); // Switch to Opus again. - RegisterSendCodec(_acmA, {"opus", 48000, 2}, absl::nullopt, false); + RegisterSendCodec(_acmA, {"opus", 48000, 2}, std::nullopt, false); _acmA->ModifyEncoder([&](std::unique_ptr* enc) { EXPECT_EQ(true, (*enc)->SetFec(false)); }); @@ -129,12 +136,12 @@ void TestRedFec::Perform() { void TestRedFec::RegisterSendCodec( const std::unique_ptr& acm, const SdpAudioFormat& codec_format, - absl::optional vad_mode, + std::optional vad_mode, bool use_red) { constexpr int payload_type = 17, cn_payload_type = 27, red_payload_type = 37; - auto encoder = encoder_factory_->MakeAudioEncoder(payload_type, codec_format, - absl::nullopt); + auto encoder = encoder_factory_->Create(env_, codec_format, + {.payload_type = payload_type}); EXPECT_NE(encoder, nullptr); std::map receive_codecs = {{payload_type, codec_format}}; if (!absl::EqualsIgnoreCase(codec_format.name, "opus")) { @@ -160,7 +167,7 @@ void TestRedFec::RegisterSendCodec( } } acm->SetEncoder(std::move(encoder)); - _acm_receiver->SetCodecs(receive_codecs); + _neteq->SetCodecs(receive_codecs); } void TestRedFec::Run() { @@ -175,7 +182,8 @@ void TestRedFec::Run() { EXPECT_GT(_inFileA.Read10MsData(audioFrame), 0); EXPECT_GE(_acmA->Add10MsData(audioFrame), 0); bool muted; - EXPECT_EQ(0, _acm_receiver->GetAudio(outFreqHzB, &audioFrame, &muted)); + EXPECT_EQ(NetEq::kOK, _neteq->GetAudio(&audioFrame, &muted)); + EXPECT_TRUE(_resampler_helper.MaybeResample(outFreqHzB, &audioFrame)); ASSERT_FALSE(muted); _outFileB.Write10MsData(audioFrame.data(), audioFrame.samples_per_channel_); } @@ -184,7 +192,7 @@ void TestRedFec::Run() { void TestRedFec::OpenOutFile(int16_t test_number) { std::string file_name; - rtc::StringBuilder file_stream; + StringBuilder file_stream; file_stream << webrtc::test::OutputPath(); file_stream << "TestRedFec_outFile_"; file_stream << test_number << ".pcm"; diff --git a/modules/audio_coding/test/TestRedFec.h b/modules/audio_coding/test/TestRedFec.h index 173b03f4fc..3016b3889a 100644 --- a/modules/audio_coding/test/TestRedFec.h +++ b/modules/audio_coding/test/TestRedFec.h @@ -16,8 +16,10 @@ #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder_factory.h" +#include "api/environment/environment.h" +#include "api/neteq/neteq.h" #include "common_audio/vad/include/vad.h" -#include "modules/audio_coding/acm2/acm_receiver.h" +#include "modules/audio_coding/acm2/acm_resampler.h" #include "modules/audio_coding/test/Channel.h" #include "modules/audio_coding/test/PCMFile.h" #include "test/scoped_key_value_config.h" @@ -34,16 +36,18 @@ class TestRedFec final { private: void RegisterSendCodec(const std::unique_ptr& acm, const SdpAudioFormat& codec_format, - absl::optional vad_mode, + std::optional vad_mode, bool use_red); void Run(); void OpenOutFile(int16_t testNumber); test::ScopedKeyValueConfig field_trials_; - const rtc::scoped_refptr encoder_factory_; - const rtc::scoped_refptr decoder_factory_; + const Environment env_; + const scoped_refptr encoder_factory_; + const scoped_refptr decoder_factory_; std::unique_ptr _acmA; - std::unique_ptr _acm_receiver; + std::unique_ptr _neteq; + acm2::ResamplerHelper _resampler_helper; Channel* _channelA2B; diff --git a/modules/audio_coding/test/TestStereo.cc b/modules/audio_coding/test/TestStereo.cc index 94a1576026..8a9a973d21 100644 --- a/modules/audio_coding/test/TestStereo.cc +++ b/modules/audio_coding/test/TestStereo.cc @@ -15,6 +15,8 @@ #include "absl/strings/match.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/environment/environment_factory.h" +#include "api/neteq/default_neteq_factory.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/include/module_common_types.h" #include "rtc_base/strings/string_builder.h" @@ -25,7 +27,7 @@ namespace webrtc { // Class for simulating packet handling TestPackStereo::TestPackStereo() - : receiver_acm_(NULL), + : neteq_(NULL), seq_no_(0), timestamp_diff_(0), last_in_timestamp_(0), @@ -35,8 +37,8 @@ TestPackStereo::TestPackStereo() TestPackStereo::~TestPackStereo() {} -void TestPackStereo::RegisterReceiverACM(acm2::AcmReceiver* acm_receiver) { - receiver_acm_ = acm_receiver; +void TestPackStereo::RegisterReceiverNetEq(NetEq* neteq) { + neteq_ = neteq; return; } @@ -45,7 +47,7 @@ int32_t TestPackStereo::SendData(const AudioFrameType frame_type, const uint32_t timestamp, const uint8_t* payload_data, const size_t payload_size, - int64_t absolute_capture_timestamp_ms) { + int64_t /* absolute_capture_timestamp_ms */) { RTPHeader rtp_header; int32_t status = 0; @@ -60,8 +62,9 @@ int32_t TestPackStereo::SendData(const AudioFrameType frame_type, } if (lost_packet_ == false) { - status = receiver_acm_->InsertPacket( - rtp_header, rtc::ArrayView(payload_data, payload_size)); + status = neteq_->InsertPacket( + rtp_header, ArrayView(payload_data, payload_size), + /*receive_time=*/Timestamp::MinusInfinity()); if (frame_type != AudioFrameType::kAudioFrameCN) { payload_size_ = static_cast(payload_size); @@ -97,9 +100,11 @@ void TestPackStereo::set_lost_packet(bool lost) { } TestStereo::TestStereo() - : acm_a_(AudioCodingModule::Create()), - acm_b_(std::make_unique( - acm2::AcmReceiver::Config(CreateBuiltinAudioDecoderFactory()))), + : env_(CreateEnvironment()), + acm_a_(AudioCodingModule::Create()), + neteq_(DefaultNetEqFactory().Create(env_, + NetEq::Config(), + CreateBuiltinAudioDecoderFactory())), channel_a2b_(NULL), test_cntr_(0), pack_size_samp_(0), @@ -132,10 +137,10 @@ void TestStereo::Perform() { in_file_mono_->ReadStereo(false); // Create and initialize two ACMs, one for each side of a one-to-one call. - ASSERT_TRUE((acm_a_.get() != NULL) && (acm_b_.get() != NULL)); - acm_b_->FlushBuffers(); + ASSERT_TRUE((acm_a_.get() != NULL) && (neteq_.get() != NULL)); + neteq_->FlushBuffers(); - acm_b_->SetCodecs({{103, {"ISAC", 16000, 1}}, + neteq_->SetCodecs({{103, {"ISAC", 16000, 1}}, {104, {"ISAC", 32000, 1}}, {107, {"L16", 8000, 1}}, {108, {"L16", 16000, 1}}, @@ -147,7 +152,6 @@ void TestStereo::Perform() { {110, {"PCMU", 8000, 2}}, {8, {"PCMA", 8000, 1}}, {118, {"PCMA", 8000, 2}}, - {102, {"ILBC", 8000, 1}}, {9, {"G722", 8000, 1}}, {119, {"G722", 8000, 2}}, {120, {"OPUS", 48000, 2, {{"stereo", "1"}}}}, @@ -158,7 +162,7 @@ void TestStereo::Perform() { // Create and connect the channel. channel_a2b_ = new TestPackStereo; EXPECT_EQ(0, acm_a_->RegisterTransportCallback(channel_a2b_)); - channel_a2b_->RegisterReceiverACM(acm_b_.get()); + channel_a2b_->RegisterReceiverNetEq(neteq_.get()); char codec_pcma_temp[] = "PCMA"; RegisterSendCodec('A', codec_pcma_temp, 8000, 64000, 80, 2); @@ -169,6 +173,8 @@ void TestStereo::Perform() { audio_channels = 2; codec_channels = 2; +// TODO(bugs.webrtc.org/345525069): Either fix/enable or remove G722. +#if defined(__has_feature) && !__has_feature(undefined_behavior_sanitizer) // All codecs are tested for all allowed sampling frequencies, rates and // packet sizes. channel_a2b_->set_codec_mode(kStereo); @@ -188,6 +194,7 @@ void TestStereo::Perform() { RegisterSendCodec('A', codec_g722, 16000, 64000, 960, codec_channels); Run(channel_a2b_, audio_channels, codec_channels); out_file_.Close(); +#endif channel_a2b_->set_codec_mode(kStereo); test_cntr_++; @@ -293,12 +300,15 @@ void TestStereo::Perform() { audio_channels = 1; codec_channels = 2; +// TODO(bugs.webrtc.org/345525069): Either fix/enable or remove G722. +#if defined(__has_feature) && !__has_feature(undefined_behavior_sanitizer) test_cntr_++; channel_a2b_->set_codec_mode(kStereo); OpenOutFile(test_cntr_); RegisterSendCodec('A', codec_g722, 16000, 64000, 160, codec_channels); Run(channel_a2b_, audio_channels, codec_channels); out_file_.Close(); +#endif test_cntr_++; channel_a2b_->set_codec_mode(kStereo); @@ -349,12 +359,15 @@ void TestStereo::Perform() { codec_channels = 1; channel_a2b_->set_codec_mode(kMono); +// TODO(bugs.webrtc.org/345525069): Either fix/enable or remove G722. +#if defined(__has_feature) && !__has_feature(undefined_behavior_sanitizer) // Run stereo audio and mono codec. test_cntr_++; OpenOutFile(test_cntr_); RegisterSendCodec('A', codec_g722, 16000, 64000, 160, codec_channels); Run(channel_a2b_, audio_channels, codec_channels); out_file_.Close(); +#endif test_cntr_++; OpenOutFile(test_cntr_); @@ -387,7 +400,7 @@ void TestStereo::Perform() { OpenOutFile(test_cntr_); // Encode and decode in mono. RegisterSendCodec('A', codec_opus, 48000, 32000, 960, codec_channels); - acm_b_->SetCodecs({{120, {"OPUS", 48000, 2}}}); + neteq_->SetCodecs({{120, {"OPUS", 48000, 2}}}); Run(channel_a2b_, audio_channels, codec_channels); // Encode in stereo, decode in mono. @@ -406,13 +419,13 @@ void TestStereo::Perform() { // Decode in stereo. test_cntr_++; OpenOutFile(test_cntr_); - acm_b_->SetCodecs({{120, {"OPUS", 48000, 2, {{"stereo", "1"}}}}}); + neteq_->SetCodecs({{120, {"OPUS", 48000, 2, {{"stereo", "1"}}}}}); Run(channel_a2b_, audio_channels, 2); out_file_.Close(); // Decode in mono. test_cntr_++; OpenOutFile(test_cntr_); - acm_b_->SetCodecs({{120, {"OPUS", 48000, 2}}}); + neteq_->SetCodecs({{120, {"OPUS", 48000, 2}}}); Run(channel_a2b_, audio_channels, codec_channels); out_file_.Close(); #endif @@ -455,7 +468,7 @@ void TestStereo::RegisterSendCodec(char side, case 'B': { // We no longer use this case. Refactor code to avoid the switch. ASSERT_TRUE(false); - // my_acm = acm_b_.get(); + // my_acm = neteq_.get(); break; } default: @@ -467,21 +480,20 @@ void TestStereo::RegisterSendCodec(char side, const int clockrate_hz = absl::EqualsIgnoreCase(codec_name, "g722") ? sampling_freq_hz / 2 : sampling_freq_hz; - const std::string ptime = rtc::ToString(rtc::CheckedDivExact( - pack_size, rtc::CheckedDivExact(sampling_freq_hz, 1000))); - SdpAudioFormat::Parameters params = {{"ptime", ptime}}; + const std::string ptime = absl::StrCat( + CheckedDivExact(pack_size, CheckedDivExact(sampling_freq_hz, 1000))); + CodecParameterMap params = {{"ptime", ptime}}; RTC_CHECK(channels == 1 || channels == 2); if (absl::EqualsIgnoreCase(codec_name, "opus")) { if (channels == 2) { params["stereo"] = "1"; } channels = 2; - params["maxaveragebitrate"] = rtc::ToString(rate); + params["maxaveragebitrate"] = absl::StrCat(rate); } - constexpr int payload_type = 17; - auto encoder = encoder_factory->MakeAudioEncoder( - payload_type, SdpAudioFormat(codec_name, clockrate_hz, channels, params), - absl::nullopt); + auto encoder = encoder_factory->Create( + env_, SdpAudioFormat(codec_name, clockrate_hz, channels, params), + {.payload_type = 17}); EXPECT_NE(nullptr, encoder); my_acm->SetEncoder(std::move(encoder)); @@ -559,7 +571,8 @@ void TestStereo::Run(TestPackStereo* channel, // Run receive side of ACM bool muted; - EXPECT_EQ(0, acm_b_->GetAudio(out_freq_hz_b, &audio_frame, &muted)); + EXPECT_EQ(NetEq::kOK, neteq_->GetAudio(&audio_frame, &muted)); + EXPECT_TRUE(resampler_helper_.MaybeResample(out_freq_hz_b, &audio_frame)); ASSERT_FALSE(muted); // Write output speech to file @@ -589,7 +602,7 @@ void TestStereo::Run(TestPackStereo* channel, void TestStereo::OpenOutFile(int16_t test_number) { std::string file_name; - rtc::StringBuilder file_stream; + StringBuilder file_stream; file_stream << webrtc::test::OutputPath() << "teststereo_out_" << test_number << ".pcm"; file_name = file_stream.str(); diff --git a/modules/audio_coding/test/TestStereo.h b/modules/audio_coding/test/TestStereo.h index a215c90ec1..7f315695d0 100644 --- a/modules/audio_coding/test/TestStereo.h +++ b/modules/audio_coding/test/TestStereo.h @@ -15,7 +15,9 @@ #include -#include "modules/audio_coding/acm2/acm_receiver.h" +#include "api/environment/environment.h" +#include "api/neteq/neteq.h" +#include "modules/audio_coding/acm2/acm_resampler.h" #include "modules/audio_coding/include/audio_coding_module.h" #include "modules/audio_coding/test/PCMFile.h" @@ -30,7 +32,7 @@ class TestPackStereo : public AudioPacketizationCallback { TestPackStereo(); ~TestPackStereo(); - void RegisterReceiverACM(acm2::AcmReceiver* acm_receiver); + void RegisterReceiverNetEq(NetEq* neteq); int32_t SendData(AudioFrameType frame_type, uint8_t payload_type, @@ -46,7 +48,7 @@ class TestPackStereo : public AudioPacketizationCallback { void set_lost_packet(bool lost); private: - acm2::AcmReceiver* receiver_acm_; + NetEq* neteq_; int16_t seq_no_; uint32_t timestamp_diff_; uint32_t last_in_timestamp_; @@ -81,8 +83,10 @@ class TestStereo { int percent_loss = 0); void OpenOutFile(int16_t test_number); + const Environment env_; std::unique_ptr acm_a_; - std::unique_ptr acm_b_; + std::unique_ptr neteq_; + acm2::ResamplerHelper resampler_helper_; TestPackStereo* channel_a2b_; diff --git a/modules/audio_coding/test/TestVADDTX.cc b/modules/audio_coding/test/TestVADDTX.cc index 1789efd714..d03f1aed49 100644 --- a/modules/audio_coding/test/TestVADDTX.cc +++ b/modules/audio_coding/test/TestVADDTX.cc @@ -16,10 +16,10 @@ #include "absl/strings/string_view.h" #include "api/audio_codecs/audio_decoder_factory_template.h" #include "api/audio_codecs/audio_encoder_factory_template.h" -#include "api/audio_codecs/ilbc/audio_decoder_ilbc.h" -#include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" #include "api/audio_codecs/opus/audio_decoder_opus.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" +#include "api/environment/environment_factory.h" +#include "api/neteq/default_neteq_factory.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" #include "modules/audio_coding/test/PCMFile.h" #include "rtc_base/strings/string_builder.h" @@ -66,29 +66,29 @@ void MonitoringAudioPacketizationCallback::GetStatistics(uint32_t* counter) { } TestVadDtx::TestVadDtx() - : encoder_factory_( - CreateAudioEncoderFactory()), - decoder_factory_( - CreateAudioDecoderFactory()), + : env_(CreateEnvironment()), + encoder_factory_(CreateAudioEncoderFactory()), + decoder_factory_(CreateAudioDecoderFactory()), acm_send_(AudioCodingModule::Create()), - acm_receive_(std::make_unique( - acm2::AcmReceiver::Config(decoder_factory_))), + neteq_(DefaultNetEqFactory().Create(env_, + NetEq::Config(), + decoder_factory_)), channel_(std::make_unique()), packetization_callback_( std::make_unique( channel_.get())) { EXPECT_EQ( 0, acm_send_->RegisterTransportCallback(packetization_callback_.get())); - channel_->RegisterReceiverACM(acm_receive_.get()); + channel_->RegisterReceiverNetEq(neteq_.get()); } bool TestVadDtx::RegisterCodec(const SdpAudioFormat& codec_format, - absl::optional vad_mode) { + std::optional vad_mode) { constexpr int payload_type = 17, cn_payload_type = 117; bool added_comfort_noise = false; - auto encoder = encoder_factory_->MakeAudioEncoder(payload_type, codec_format, - absl::nullopt); + auto encoder = encoder_factory_->Create(env_, codec_format, + {.payload_type = payload_type}); if (vad_mode.has_value() && !absl::EqualsIgnoreCase(codec_format.name, "opus")) { AudioEncoderCngConfig config; @@ -103,7 +103,7 @@ bool TestVadDtx::RegisterCodec(const SdpAudioFormat& codec_format, acm_send_->SetEncoder(std::move(encoder)); std::map receive_codecs = {{payload_type, codec_format}}; - acm_receive_->SetCodecs(receive_codecs); + neteq_->SetCodecs(receive_codecs); return added_comfort_noise; } @@ -142,7 +142,8 @@ void TestVadDtx::Run(absl::string_view in_filename, time_stamp_ += frame_size_samples; EXPECT_GE(acm_send_->Add10MsData(audio_frame), 0); bool muted; - acm_receive_->GetAudio(kOutputFreqHz, &audio_frame, &muted); + neteq_->GetAudio(&audio_frame, &muted); + resampler_helper_.MaybeResample(kOutputFreqHz, &audio_frame); ASSERT_FALSE(muted); out_file.Write10MsData(audio_frame); } @@ -177,14 +178,13 @@ void TestVadDtx::Run(absl::string_view in_filename, TestWebRtcVadDtx::TestWebRtcVadDtx() : output_file_num_(0) {} void TestWebRtcVadDtx::Perform() { - RunTestCases({"ILBC", 8000, 1}); RunTestCases({"opus", 48000, 2}); } // Test various configurations on VAD/DTX. void TestWebRtcVadDtx::RunTestCases(const SdpAudioFormat& codec_format) { Test(/*new_outfile=*/true, - /*expect_dtx_enabled=*/RegisterCodec(codec_format, absl::nullopt)); + /*expect_dtx_enabled=*/RegisterCodec(codec_format, std::nullopt)); Test(/*new_outfile=*/false, /*expect_dtx_enabled=*/RegisterCodec(codec_format, Vad::kVadAggressive)); @@ -205,7 +205,7 @@ void TestWebRtcVadDtx::Test(bool new_outfile, bool expect_dtx_enabled) { if (new_outfile) { output_file_num_++; } - rtc::StringBuilder out_filename; + StringBuilder out_filename; out_filename << webrtc::test::OutputPath() << "testWebRtcVadDtx_outFile_" << output_file_num_ << ".pcm"; Run(webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"), 32000, 1, @@ -219,7 +219,7 @@ void TestOpusDtx::Perform() { // Register Opus as send codec std::string out_filename = webrtc::test::OutputPath() + "testOpusDtx_outFile_mono.pcm"; - RegisterCodec({"opus", 48000, 2}, absl::nullopt); + RegisterCodec({"opus", 48000, 2}, std::nullopt); acm_send_->ModifyEncoder([](std::unique_ptr* encoder_ptr) { (*encoder_ptr)->SetDtx(false); }); diff --git a/modules/audio_coding/test/TestVADDTX.h b/modules/audio_coding/test/TestVADDTX.h index 17b3f4185d..e37ad03fae 100644 --- a/modules/audio_coding/test/TestVADDTX.h +++ b/modules/audio_coding/test/TestVADDTX.h @@ -16,8 +16,10 @@ #include "absl/strings/string_view.h" #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_encoder_factory.h" +#include "api/environment/environment.h" +#include "api/neteq/neteq.h" #include "common_audio/vad/include/vad.h" -#include "modules/audio_coding/acm2/acm_receiver.h" +#include "modules/audio_coding/acm2/acm_resampler.h" #include "modules/audio_coding/include/audio_coding_module.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/audio_coding/test/Channel.h" @@ -63,7 +65,7 @@ class TestVadDtx { protected: // Returns true iff CN was added. bool RegisterCodec(const SdpAudioFormat& codec_format, - absl::optional vad_mode); + std::optional vad_mode); // Encoding a file and see if the numbers that various packets occur follow // the expectation. Saves result to a file. @@ -82,10 +84,12 @@ class TestVadDtx { bool append, const int* expects); - const rtc::scoped_refptr encoder_factory_; - const rtc::scoped_refptr decoder_factory_; + const Environment env_; + const scoped_refptr encoder_factory_; + const scoped_refptr decoder_factory_; std::unique_ptr acm_send_; - std::unique_ptr acm_receive_; + std::unique_ptr neteq_; + acm2::ResamplerHelper resampler_helper_; std::unique_ptr channel_; std::unique_ptr packetization_callback_; uint32_t time_stamp_ = 0x12345678; diff --git a/modules/audio_coding/test/opus_test.cc b/modules/audio_coding/test/opus_test.cc index dfebb5f6d7..0438514e7d 100644 --- a/modules/audio_coding/test/opus_test.cc +++ b/modules/audio_coding/test/opus_test.cc @@ -13,6 +13,8 @@ #include #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/environment/environment_factory.h" +#include "api/neteq/default_neteq_factory.h" #include "modules/audio_coding/codecs/opus/opus_interface.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/audio_coding/test/TestStereo.h" @@ -22,8 +24,9 @@ namespace webrtc { OpusTest::OpusTest() - : acm_receiver_(std::make_unique( - acm2::AcmReceiver::Config(CreateBuiltinAudioDecoderFactory()))), + : neteq_(DefaultNetEqFactory().Create(CreateEnvironment(), + NetEq::Config(), + CreateBuiltinAudioDecoderFactory())), channel_a2b_(NULL), counter_(0), payload_type_(255), @@ -82,18 +85,18 @@ void OpusTest::Perform() { WebRtcOpus_DecoderInit(opus_mono_decoder_); WebRtcOpus_DecoderInit(opus_stereo_decoder_); - ASSERT_TRUE(acm_receiver_.get() != NULL); - acm_receiver_->FlushBuffers(); + ASSERT_TRUE(neteq_.get() != NULL); + neteq_->FlushBuffers(); // Register Opus stereo as receiving codec. constexpr int kOpusPayloadType = 120; const SdpAudioFormat kOpusFormatStereo("opus", 48000, 2, {{"stereo", "1"}}); payload_type_ = kOpusPayloadType; - acm_receiver_->SetCodecs({{kOpusPayloadType, kOpusFormatStereo}}); + neteq_->SetCodecs({{kOpusPayloadType, kOpusFormatStereo}}); // Create and connect the channel. channel_a2b_ = new TestPackStereo; - channel_a2b_->RegisterReceiverACM(acm_receiver_.get()); + channel_a2b_->RegisterReceiverNetEq(neteq_.get()); // // Test Stereo. @@ -154,7 +157,7 @@ void OpusTest::Perform() { // Register Opus mono as receiving codec. const SdpAudioFormat kOpusFormatMono("opus", 48000, 2); - acm_receiver_->SetCodecs({{kOpusPayloadType, kOpusFormatMono}}); + neteq_->SetCodecs({{kOpusPayloadType, kOpusFormatMono}}); // Run Opus with 2.5 ms frame size. Run(channel_a2b_, audio_channels, 32000, 120); @@ -353,8 +356,8 @@ void OpusTest::Run(TestPackStereo* channel, // Run received side of ACM. bool muted; - ASSERT_EQ(0, acm_receiver_->GetAudio(out_freq_hz_b, &audio_frame, &muted)); - ASSERT_FALSE(muted); + ASSERT_EQ(NetEq::kOK, neteq_->GetAudio(&audio_frame, &muted)); + ASSERT_TRUE(resampler_helper_.MaybeResample(out_freq_hz_b, &audio_frame)); // Write output speech to file. out_file_.Write10MsData( diff --git a/modules/audio_coding/test/opus_test.h b/modules/audio_coding/test/opus_test.h index cf5581ad2d..a2ae758e55 100644 --- a/modules/audio_coding/test/opus_test.h +++ b/modules/audio_coding/test/opus_test.h @@ -15,7 +15,7 @@ #include -#include "modules/audio_coding/acm2/acm_receiver.h" +#include "api/neteq/neteq.h" #include "modules/audio_coding/acm2/acm_resampler.h" #include "modules/audio_coding/codecs/opus/opus_interface.h" #include "modules/audio_coding/test/PCMFile.h" @@ -39,7 +39,8 @@ class OpusTest { void OpenOutFile(int test_number); - std::unique_ptr acm_receiver_; + std::unique_ptr neteq_; + acm2::ResamplerHelper resampler_helper_; TestPackStereo* channel_a2b_; PCMFile in_file_stereo_; PCMFile in_file_mono_; diff --git a/modules/audio_coding/test/target_delay_unittest.cc b/modules/audio_coding/test/target_delay_unittest.cc index 2a7162794c..8da125700a 100644 --- a/modules/audio_coding/test/target_delay_unittest.cc +++ b/modules/audio_coding/test/target_delay_unittest.cc @@ -12,8 +12,10 @@ #include "api/audio/audio_frame.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/environment/environment_factory.h" +#include "api/neteq/default_neteq_factory.h" #include "api/rtp_headers.h" -#include "modules/audio_coding/acm2/acm_receiver.h" +#include "api/units/timestamp.h" #include "modules/audio_coding/codecs/pcm16b/pcm16b.h" #include "modules/audio_coding/include/audio_coding_module.h" #include "test/gtest.h" @@ -24,8 +26,10 @@ namespace webrtc { class TargetDelayTest : public ::testing::Test { protected: TargetDelayTest() - : receiver_( - acm2::AcmReceiver::Config(CreateBuiltinAudioDecoderFactory())) {} + : neteq_( + DefaultNetEqFactory().Create(CreateEnvironment(), + NetEq::Config(), + CreateBuiltinAudioDecoderFactory())) {} ~TargetDelayTest() {} @@ -33,7 +37,7 @@ class TargetDelayTest : public ::testing::Test { constexpr int pltype = 108; std::map receive_codecs = { {pltype, {"L16", kSampleRateHz, 1}}}; - receiver_.SetCodecs(receive_codecs); + neteq_->SetCodecs(receive_codecs); rtp_header_.payloadType = pltype; rtp_header_.timestamp = 0; @@ -49,20 +53,20 @@ class TargetDelayTest : public ::testing::Test { } void OutOfRangeInput() { - EXPECT_EQ(-1, SetMinimumDelay(-1)); - EXPECT_EQ(-1, SetMinimumDelay(10001)); + EXPECT_FALSE(SetMinimumDelay(-1)); + EXPECT_FALSE(SetMinimumDelay(10001)); } void TargetDelayBufferMinMax() { const int kTargetMinDelayMs = kNum10msPerFrame * 10; - ASSERT_EQ(0, SetMinimumDelay(kTargetMinDelayMs)); + ASSERT_TRUE(SetMinimumDelay(kTargetMinDelayMs)); for (int m = 0; m < 30; ++m) // Run enough iterations to fill the buffer. Run(true); int clean_optimal_delay = GetCurrentOptimalDelayMs(); EXPECT_EQ(kTargetMinDelayMs, clean_optimal_delay); const int kTargetMaxDelayMs = 2 * (kNum10msPerFrame * 10); - ASSERT_EQ(0, SetMaximumDelay(kTargetMaxDelayMs)); + ASSERT_TRUE(SetMaximumDelay(kTargetMaxDelayMs)); for (int n = 0; n < 30; ++n) // Run enough iterations to fill the buffer. Run(false); @@ -83,9 +87,10 @@ class TargetDelayTest : public ::testing::Test { void Push() { rtp_header_.timestamp += kFrameSizeSamples; rtp_header_.sequenceNumber++; - ASSERT_EQ(0, receiver_.InsertPacket(rtp_header_, - rtc::ArrayView( - payload_, kFrameSizeSamples * 2))); + ASSERT_EQ(0, neteq_->InsertPacket( + rtp_header_, + ArrayView(payload_, kFrameSizeSamples * 2), + Timestamp::MinusInfinity())); } // Pull audio equivalent to the amount of audio in one RTP packet. @@ -93,7 +98,7 @@ class TargetDelayTest : public ::testing::Test { AudioFrame frame; bool muted; for (int k = 0; k < kNum10msPerFrame; ++k) { // Pull one frame. - ASSERT_EQ(0, receiver_.GetAudio(-1, &frame, &muted)); + ASSERT_EQ(NetEq::kOK, neteq_->GetAudio(&frame, &muted)); ASSERT_FALSE(muted); // Had to use ASSERT_TRUE, ASSERT_EQ generated error. ASSERT_TRUE(kSampleRateHz == frame.sample_rate_hz_); @@ -110,9 +115,9 @@ class TargetDelayTest : public ::testing::Test { } if (!clean) { - for (int m = 0; m < 10; ++m) { // Long enough to trigger delay change. + for (int o = 0; o < 10; ++o) { // Long enough to trigger delay change. Push(); - for (int n = 0; n < kInterarrivalJitterPacket; ++n) + for (int p = 0; p < kInterarrivalJitterPacket; ++p) Pull(); } } @@ -120,20 +125,20 @@ class TargetDelayTest : public ::testing::Test { } int SetMinimumDelay(int delay_ms) { - return receiver_.SetMinimumDelay(delay_ms); + return neteq_->SetMinimumDelay(delay_ms); } int SetMaximumDelay(int delay_ms) { - return receiver_.SetMaximumDelay(delay_ms); + return neteq_->SetMaximumDelay(delay_ms); } int GetCurrentOptimalDelayMs() { - NetworkStatistics stats; - receiver_.GetNetworkStatistics(&stats); - return stats.preferredBufferSize; + NetEqNetworkStatistics neteq_stats; + neteq_->NetworkStatistics(&neteq_stats); + return neteq_stats.preferred_buffer_size_ms; } - acm2::AcmReceiver receiver_; + std::unique_ptr neteq_; RTPHeader rtp_header_; uint8_t payload_[kPayloadLenBytes]; }; diff --git a/modules/audio_device/BUILD.gn b/modules/audio_device/BUILD.gn index f1ab09d154..aa4d216b28 100644 --- a/modules/audio_device/BUILD.gn +++ b/modules/audio_device/BUILD.gn @@ -26,7 +26,7 @@ config("audio_device_warnings_config") { rtc_source_set("audio_device_default") { visibility = [ "*" ] sources = [ "include/audio_device_default.h" ] - deps = [ ":audio_device_api" ] + deps = [ "../../api/audio:audio_device" ] } rtc_source_set("audio_device") { @@ -47,14 +47,7 @@ rtc_source_set("audio_device_api") { "include/audio_device.h", "include/audio_device_defines.h", ] - deps = [ - "../../api:scoped_refptr", - "../../api/task_queue", - "../../rtc_base:checks", - "../../rtc_base:refcount", - "../../rtc_base:stringutils", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + deps = [ "../../api/audio:audio_device" ] } rtc_library("audio_device_config") { @@ -69,9 +62,9 @@ rtc_library("audio_device_buffer") { "fine_audio_buffer.h", ] deps = [ - ":audio_device_api", "../../api:array_view", "../../api:sequence_checker", + "../../api/audio:audio_device", "../../api/task_queue", "../../common_audio:common_audio_c", "../../rtc_base:buffer", @@ -79,7 +72,6 @@ rtc_library("audio_device_buffer") { "../../rtc_base:event_tracer", "../../rtc_base:logging", "../../rtc_base:macromagic", - "../../rtc_base:rtc_task_queue", "../../rtc_base:safe_conversions", "../../rtc_base:timestamp_aligner", "../../rtc_base:timeutils", @@ -95,8 +87,8 @@ rtc_library("audio_device_generic") { "audio_device_generic.h", ] deps = [ - ":audio_device_api", ":audio_device_buffer", + "../../api/audio:audio_device", "../../rtc_base:logging", ] } @@ -106,7 +98,7 @@ rtc_library("audio_device_name") { "audio_device_name.cc", "audio_device_name.h", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + deps = [ "//third_party/abseil-cpp/absl/strings:string_view" ] } rtc_source_set("windows_core_audio_utility") { @@ -117,8 +109,8 @@ rtc_source_set("windows_core_audio_utility") { ] deps = [ - ":audio_device_api", ":audio_device_name", + "../../api/audio:audio_device", "../../api/units:time_delta", "../../rtc_base:checks", "../../rtc_base:logging", @@ -126,8 +118,8 @@ rtc_source_set("windows_core_audio_utility") { "../../rtc_base:platform_thread_types", "../../rtc_base:stringutils", "../../rtc_base/win:windows_version", + "//third_party/abseil-cpp/absl/strings:strings", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ] libs = [ "oleaut32.lib" ] } @@ -156,12 +148,12 @@ rtc_source_set("audio_device_module_from_input_and_output") { ] deps = [ - ":audio_device_api", ":audio_device_buffer", ":windows_core_audio_utility", "../../api:make_ref_counted", "../../api:scoped_refptr", "../../api:sequence_checker", + "../../api/audio:audio_device", "../../api/task_queue", "../../rtc_base:checks", "../../rtc_base:logging", @@ -172,10 +164,7 @@ rtc_source_set("audio_device_module_from_input_and_output") { "../../rtc_base:timeutils", "../../rtc_base/win:scoped_com_initializer", "../../rtc_base/win:windows_version", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", ] } } @@ -190,7 +179,6 @@ if (!build_with_chromium) { "test_audio_device_impl.h", ] deps = [ - ":audio_device_api", ":audio_device_buffer", ":audio_device_default", ":audio_device_generic", @@ -198,6 +186,7 @@ if (!build_with_chromium) { "../../api:array_view", "../../api:make_ref_counted", "../../api:scoped_refptr", + "../../api/audio:audio_device", "../../api/task_queue", "../../api/units:time_delta", "../../common_audio", @@ -208,15 +197,11 @@ if (!build_with_chromium) { "../../rtc_base:platform_thread", "../../rtc_base:random", "../../rtc_base:rtc_event", - "../../rtc_base:rtc_task_queue", "../../rtc_base:safe_conversions", "../../rtc_base:timeutils", "../../rtc_base/synchronization:mutex", "../../rtc_base/task_utils:repeating_task", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } } @@ -228,9 +213,9 @@ rtc_library("audio_device_dummy") { "dummy/audio_device_dummy.h", ] deps = [ - ":audio_device_api", ":audio_device_buffer", ":audio_device_generic", + "../../api/audio:audio_device", ] } @@ -257,8 +242,8 @@ if (!build_with_chromium) { "../../rtc_base/synchronization:mutex", "../../rtc_base/system:file_wrapper", "../../system_wrappers", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } } @@ -267,7 +252,6 @@ if (!build_with_chromium) { rtc_library("audio_device_impl") { visibility = [ "*" ] deps = [ - ":audio_device_api", ":audio_device_buffer", ":audio_device_config", ":audio_device_default", @@ -278,6 +262,7 @@ rtc_library("audio_device_impl") { "../../api:refcountedbase", "../../api:scoped_refptr", "../../api:sequence_checker", + "../../api/audio:audio_device", "../../api/task_queue", "../../api/units:time_delta", "../../common_audio", @@ -289,7 +274,6 @@ rtc_library("audio_device_impl") { "../../rtc_base:platform_thread", "../../rtc_base:random", "../../rtc_base:rtc_event", - "../../rtc_base:rtc_task_queue", "../../rtc_base:safe_conversions", "../../rtc_base:stringutils", "../../rtc_base:timeutils", @@ -301,11 +285,8 @@ rtc_library("audio_device_impl") { "../../system_wrappers:field_trial", "../../system_wrappers:metrics", "../utility", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (rtc_include_internal_audio_device && is_ios) { deps += [ "../../sdk:audio_device" ] @@ -451,6 +432,7 @@ rtc_source_set("mock_audio_device") { ":audio_device_buffer", ":audio_device_impl", "../../api:make_ref_counted", + "../../api/audio:audio_device", "../../test:test_support", ] } @@ -474,6 +456,7 @@ if (rtc_include_tests && !build_with_chromium) { "../../api:array_view", "../../api:scoped_refptr", "../../api:sequence_checker", + "../../api/audio:audio_device", "../../api/task_queue", "../../api/task_queue:default_task_queue_factory", "../../api/units:time_delta", @@ -481,7 +464,6 @@ if (rtc_include_tests && !build_with_chromium) { "../../common_audio", "../../rtc_base:buffer", "../../rtc_base:checks", - "../../rtc_base:ignore_wundef", "../../rtc_base:logging", "../../rtc_base:macromagic", "../../rtc_base:race_checker", @@ -494,10 +476,6 @@ if (rtc_include_tests && !build_with_chromium) { "../../test:test_support", "../../test/time_controller", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] if (is_linux || is_chromeos || is_mac || is_win) { sources += [ "audio_device_unittest.cc" ] } @@ -515,7 +493,6 @@ if (rtc_include_tests && !build_with_chromium) { "../../sdk/android:internal_jni", "../../sdk/android:libjingle_peerconnection_java", "../../sdk/android:native_api_jni", - "../../sdk/android:native_test_jni_onload", "../utility", ] } diff --git a/modules/audio_device/audio_device_buffer.cc b/modules/audio_device/audio_device_buffer.cc index f1bd8e823b..75406b1ee9 100644 --- a/modules/audio_device/audio_device_buffer.cc +++ b/modules/audio_device/audio_device_buffer.cc @@ -30,13 +30,13 @@ static const char kTimerQueueName[] = "AudioDeviceBufferTimer"; // Time between two sucessive calls to LogStats(). static const size_t kTimerIntervalInSeconds = 10; static const size_t kTimerIntervalInMilliseconds = - kTimerIntervalInSeconds * rtc::kNumMillisecsPerSec; + kTimerIntervalInSeconds * kNumMillisecsPerSec; // Min time required to qualify an audio session as a "call". If playout or // recording has been active for less than this time we will not store any // logs or UMA stats but instead consider the call as too short. static const size_t kMinValidCallTimeTimeInSeconds = 10; static const size_t kMinValidCallTimeTimeInMilliseconds = - kMinValidCallTimeTimeInSeconds * rtc::kNumMillisecsPerSec; + kMinValidCallTimeTimeInSeconds * kNumMillisecsPerSec; #ifdef AUDIO_DEVICE_PLAYS_SINUS_TONE static const double k2Pi = 6.28318530717959; #endif @@ -78,6 +78,17 @@ AudioDeviceBuffer::~AudioDeviceBuffer() { RTC_DCHECK(!playing_); RTC_DCHECK(!recording_); RTC_LOG(LS_INFO) << "AudioDeviceBuffer::~dtor"; + + // Delete and and thus stop task queue before deleting other members to avoid + // race with running tasks. Even though !playing_ and !recording_ called + // StopPeriodicLogging, such stop is asynchronous and may race with the + // AudioDeviceBuffer destructor. In particular there might be regular LogStats + // that attempts to repost task to the task_queue_. + // Thus task_queue_ should be deleted before pointer to it is invalidated. + // std::unique_ptr destructor does the same two operations in reverse order as + // it doesn't expect member would be used after its destruction has started. + task_queue_.get_deleter()(task_queue_.get()); + task_queue_.release(); } int32_t AudioDeviceBuffer::RegisterAudioCallback( @@ -102,13 +113,13 @@ void AudioDeviceBuffer::StartPlayout() { } RTC_DLOG(LS_INFO) << __FUNCTION__; // Clear members tracking playout stats and do it on the task queue. - task_queue_.PostTask([this] { ResetPlayStats(); }); + task_queue_->PostTask([this] { ResetPlayStats(); }); // Start a periodic timer based on task queue if not already done by the // recording side. if (!recording_) { StartPeriodicLogging(); } - const int64_t now_time = rtc::TimeMillis(); + const int64_t now_time = TimeMillis(); // Clear members that are only touched on the main (creating) thread. play_start_time_ = now_time; playing_ = true; @@ -121,14 +132,14 @@ void AudioDeviceBuffer::StartRecording() { } RTC_DLOG(LS_INFO) << __FUNCTION__; // Clear members tracking recording stats and do it on the task queue. - task_queue_.PostTask([this] { ResetRecStats(); }); + task_queue_->PostTask([this] { ResetRecStats(); }); // Start a periodic timer based on task queue if not already done by the // playout side. if (!playing_) { StartPeriodicLogging(); } // Clear members that will be touched on the main (creating) thread. - rec_start_time_ = rtc::TimeMillis(); + rec_start_time_ = TimeMillis(); recording_ = true; // And finally a member which can be modified on the native audio thread. // It is safe to do so since we know by design that the owning ADM has not @@ -147,8 +158,7 @@ void AudioDeviceBuffer::StopPlayout() { if (!recording_) { StopPeriodicLogging(); } - RTC_LOG(LS_INFO) << "total playout time: " - << rtc::TimeSince(play_start_time_); + RTC_LOG(LS_INFO) << "total playout time: " << TimeSince(play_start_time_); } void AudioDeviceBuffer::StopRecording() { @@ -172,7 +182,7 @@ void AudioDeviceBuffer::StopRecording() { // the fact that `only_silence_recorded_` can be affected during the complete // call makes chances of conflicts with potentially one last callback very // small. - const size_t time_since_start = rtc::TimeSince(rec_start_time_); + const size_t time_since_start = TimeSince(rec_start_time_); if (time_since_start > kMinValidCallTimeTimeInMilliseconds) { const int only_zeros = static_cast(only_silence_recorded_); RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.RecordedOnlyZeros", only_zeros); @@ -234,13 +244,13 @@ void AudioDeviceBuffer::SetVQEData(int play_delay_ms, int rec_delay_ms) { int32_t AudioDeviceBuffer::SetRecordedBuffer(const void* audio_buffer, size_t samples_per_channel) { - return SetRecordedBuffer(audio_buffer, samples_per_channel, absl::nullopt); + return SetRecordedBuffer(audio_buffer, samples_per_channel, std::nullopt); } int32_t AudioDeviceBuffer::SetRecordedBuffer( const void* audio_buffer, size_t samples_per_channel, - absl::optional capture_timestamp_ns) { + std::optional capture_timestamp_ns) { // Copy the complete input buffer to the local buffer. const size_t old_size = rec_buffer_.size(); rec_buffer_.SetData(static_cast(audio_buffer), @@ -252,15 +262,14 @@ int32_t AudioDeviceBuffer::SetRecordedBuffer( } if (capture_timestamp_ns) { - int64_t align_offsync_estimation_time = rtc::TimeMicros(); - if (align_offsync_estimation_time - - rtc::TimestampAligner::kMinFrameIntervalUs > + int64_t align_offsync_estimation_time = TimeMicros(); + if (align_offsync_estimation_time - TimestampAligner::kMinFrameIntervalUs > align_offsync_estimation_time_) { align_offsync_estimation_time_ = align_offsync_estimation_time; capture_timestamp_ns_ = - rtc::kNumNanosecsPerMicrosec * + kNumNanosecsPerMicrosec * timestamp_aligner_.TranslateTimestamp( - *capture_timestamp_ns / rtc::kNumNanosecsPerMicrosec, + *capture_timestamp_ns / kNumNanosecsPerMicrosec, align_offsync_estimation_time); } else { // The Timestamp aligner is designed to prevent timestamps that are too @@ -270,9 +279,9 @@ int32_t AudioDeviceBuffer::SetRecordedBuffer( // the clock offset estimation. This get us timestamps without generating // warnings, but could generate two timestamps within a millisecond. capture_timestamp_ns_ = - rtc::kNumNanosecsPerMicrosec * + kNumNanosecsPerMicrosec * timestamp_aligner_.TranslateTimestamp(*capture_timestamp_ns / - rtc::kNumNanosecsPerMicrosec); + kNumNanosecsPerMicrosec); } } // Derive a new level value twice per second and check if it is non-zero. @@ -388,16 +397,16 @@ int32_t AudioDeviceBuffer::GetPlayoutData(void* audio_buffer) { } void AudioDeviceBuffer::StartPeriodicLogging() { - task_queue_.PostTask([this] { LogStats(AudioDeviceBuffer::LOG_START); }); + task_queue_->PostTask([this] { LogStats(AudioDeviceBuffer::LOG_START); }); } void AudioDeviceBuffer::StopPeriodicLogging() { - task_queue_.PostTask([this] { LogStats(AudioDeviceBuffer::LOG_STOP); }); + task_queue_->PostTask([this] { LogStats(AudioDeviceBuffer::LOG_STOP); }); } void AudioDeviceBuffer::LogStats(LogState state) { - RTC_DCHECK_RUN_ON(&task_queue_); - int64_t now_time = rtc::TimeMillis(); + RTC_DCHECK_RUN_ON(task_queue_.get()); + int64_t now_time = TimeMillis(); if (state == AudioDeviceBuffer::LOG_START) { // Reset counters at start. We will not add any logging in this state but @@ -418,7 +427,7 @@ void AudioDeviceBuffer::LogStats(LogState state) { } int64_t next_callback_time = now_time + kTimerIntervalInMilliseconds; - int64_t time_since_last = rtc::TimeDiff(now_time, last_timer_task_time_); + int64_t time_since_last = TimeDiff(now_time, last_timer_task_time_); last_timer_task_time_ = now_time; Stats stats; @@ -493,24 +502,24 @@ void AudioDeviceBuffer::LogStats(LogState state) { } last_stats_ = stats; - int64_t time_to_wait_ms = next_callback_time - rtc::TimeMillis(); + int64_t time_to_wait_ms = next_callback_time - TimeMillis(); RTC_DCHECK_GT(time_to_wait_ms, 0) << "Invalid timer interval"; // Keep posting new (delayed) tasks until state is changed to kLogStop. - task_queue_.PostDelayedTask( + task_queue_->PostDelayedTask( [this] { AudioDeviceBuffer::LogStats(AudioDeviceBuffer::LOG_ACTIVE); }, TimeDelta::Millis(time_to_wait_ms)); } void AudioDeviceBuffer::ResetRecStats() { - RTC_DCHECK_RUN_ON(&task_queue_); + RTC_DCHECK_RUN_ON(task_queue_.get()); last_stats_.ResetRecStats(); MutexLock lock(&lock_); stats_.ResetRecStats(); } void AudioDeviceBuffer::ResetPlayStats() { - RTC_DCHECK_RUN_ON(&task_queue_); + RTC_DCHECK_RUN_ON(task_queue_.get()); last_stats_.ResetPlayStats(); MutexLock lock(&lock_); stats_.ResetPlayStats(); diff --git a/modules/audio_device/audio_device_buffer.h b/modules/audio_device/audio_device_buffer.h index 1260a24c61..4f2ae64211 100644 --- a/modules/audio_device/audio_device_buffer.h +++ b/modules/audio_device/audio_device_buffer.h @@ -15,13 +15,14 @@ #include #include +#include +#include "api/audio/audio_device_defines.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" -#include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/buffer.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/timestamp_aligner.h" @@ -109,7 +110,7 @@ class AudioDeviceBuffer { virtual int32_t SetRecordedBuffer( const void* audio_buffer, size_t samples_per_channel, - absl::optional capture_timestamp_ns); + std::optional capture_timestamp_ns); virtual void SetVQEData(int play_delay_ms, int rec_delay_ms); virtual int32_t DeliverRecordedData(); uint32_t NewMicLevel() const; @@ -158,7 +159,7 @@ class AudioDeviceBuffer { // Task queue used to invoke LogStats() periodically. Tasks are executed on a // worker thread but it does not necessarily have to be the same thread for // each task. - rtc::TaskQueue task_queue_; + std::unique_ptr task_queue_; // Raw pointer to AudioTransport instance. Supplied to RegisterAudioCallback() // and it must outlive this object. It is not possible to change this member @@ -185,11 +186,11 @@ class AudioDeviceBuffer { // Buffer used for audio samples to be played out. Size can be changed // dynamically. The 16-bit samples are interleaved, hence the size is // proportional to the number of channels. - rtc::BufferT play_buffer_; + BufferT play_buffer_; // Byte buffer used for recorded audio samples. Size can be changed // dynamically. - rtc::BufferT rec_buffer_; + BufferT rec_buffer_; // Contains true of a key-press has been detected. bool typing_status_; @@ -199,11 +200,11 @@ class AudioDeviceBuffer { int rec_delay_ms_; // Capture timestamp. - absl::optional capture_timestamp_ns_; + std::optional capture_timestamp_ns_; // The last time the Timestamp Aligner was used to estimate clock offset // between system clock and capture time from audio. // This is used to prevent estimating the clock offset too often. - absl::optional align_offsync_estimation_time_; + std::optional align_offsync_estimation_time_; // Counts number of times LogStats() has been called. size_t num_stat_reports_ RTC_GUARDED_BY(task_queue_); @@ -239,7 +240,7 @@ class AudioDeviceBuffer { // Used for converting capture timestaps (received from AudioRecordThread // via AudioRecordJni::DataIsRecorded) to RTC clock. - rtc::TimestampAligner timestamp_aligner_; + TimestampAligner timestamp_aligner_; // Should *never* be defined in production builds. Only used for testing. // When defined, the output signal will be replaced by a sinus tone at 440Hz. diff --git a/modules/audio_device/audio_device_data_observer.cc b/modules/audio_device/audio_device_data_observer.cc index 0524830327..0548f6cffc 100644 --- a/modules/audio_device/audio_device_data_observer.cc +++ b/modules/audio_device/audio_device_data_observer.cc @@ -10,8 +10,16 @@ #include "modules/audio_device/include/audio_device_data_observer.h" +#include +#include +#include +#include +#include + +#include "api/audio/audio_device.h" +#include "api/audio/audio_device_defines.h" #include "api/make_ref_counted.h" -#include "modules/audio_device/include/audio_device_defines.h" +#include "api/scoped_refptr.h" #include "rtc_base/checks.h" namespace webrtc { @@ -22,28 +30,11 @@ namespace { // callback and redirects the PCM data to AudioDeviceDataObserver callback. class ADMWrapper : public AudioDeviceModule, public AudioTransport { public: - ADMWrapper(rtc::scoped_refptr impl, - AudioDeviceDataObserver* legacy_observer, - std::unique_ptr observer) - : impl_(impl), - legacy_observer_(legacy_observer), - observer_(std::move(observer)) { - is_valid_ = impl_.get() != nullptr; - } - ADMWrapper(AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory, - AudioDeviceDataObserver* legacy_observer, + ADMWrapper(scoped_refptr impl, std::unique_ptr observer) - : ADMWrapper(AudioDeviceModule::Create(audio_layer, task_queue_factory), - legacy_observer, - std::move(observer)) {} - ~ADMWrapper() override { - audio_transport_ = nullptr; - observer_ = nullptr; - } + : impl_(std::move(impl)), observer_(std::move(observer)) {} - // Make sure we have a valid ADM before returning it to user. - bool IsValid() { return is_valid_; } + ~ADMWrapper() override = default; int32_t RecordedDataIsAvailable(const void* audioSamples, size_t nSamples, @@ -58,7 +49,7 @@ class ADMWrapper : public AudioDeviceModule, public AudioTransport { return RecordedDataIsAvailable( audioSamples, nSamples, nBytesPerSample, nChannels, samples_per_sec, total_delay_ms, clockDrift, currentMicLevel, keyPressed, newMicLevel, - /*capture_timestamp_ns=*/absl::nullopt); + /*capture_timestamp_ns=*/std::nullopt); } // AudioTransport methods overrides. @@ -73,7 +64,7 @@ class ADMWrapper : public AudioDeviceModule, public AudioTransport { uint32_t currentMicLevel, bool keyPressed, uint32_t& newMicLevel, - absl::optional capture_timestamp_ns) override { + std::optional capture_timestamp_ns) override { int32_t res = 0; // Capture PCM data of locally captured audio. if (observer_) { @@ -122,13 +113,13 @@ class ADMWrapper : public AudioDeviceModule, public AudioTransport { return res; } - void PullRenderData(int bits_per_sample, - int sample_rate, - size_t number_of_channels, - size_t number_of_frames, - void* audio_data, - int64_t* elapsed_time_ms, - int64_t* ntp_time_ms) override { + void PullRenderData(int /* bits_per_sample */, + int /* sample_rate */, + size_t /* number_of_channels */, + size_t /* number_of_frames */, + void* /* audio_data */, + int64_t* /* elapsed_time_ms */, + int64_t* /* ntp_time_ms */) override { RTC_DCHECK_NOTREACHED(); } @@ -308,66 +299,21 @@ class ADMWrapper : public AudioDeviceModule, public AudioTransport { #endif // WEBRTC_IOS protected: - rtc::scoped_refptr impl_; - AudioDeviceDataObserver* legacy_observer_ = nullptr; + scoped_refptr impl_; std::unique_ptr observer_; AudioTransport* audio_transport_ = nullptr; - bool is_valid_ = false; }; } // namespace -rtc::scoped_refptr CreateAudioDeviceWithDataObserver( - rtc::scoped_refptr impl, - std::unique_ptr observer) { - auto audio_device = rtc::make_ref_counted(impl, observer.get(), - std::move(observer)); - - if (!audio_device->IsValid()) { - return nullptr; - } - - return audio_device; -} - -rtc::scoped_refptr CreateAudioDeviceWithDataObserver( - rtc::scoped_refptr impl, - AudioDeviceDataObserver* legacy_observer) { - auto audio_device = - rtc::make_ref_counted(impl, legacy_observer, nullptr); - - if (!audio_device->IsValid()) { - return nullptr; - } - - return audio_device; -} - -rtc::scoped_refptr CreateAudioDeviceWithDataObserver( - AudioDeviceModule::AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory, +scoped_refptr CreateAudioDeviceWithDataObserver( + scoped_refptr impl, std::unique_ptr observer) { - auto audio_device = rtc::make_ref_counted( - audio_layer, task_queue_factory, observer.get(), std::move(observer)); - - if (!audio_device->IsValid()) { + if (impl == nullptr) { return nullptr; } - - return audio_device; + return make_ref_counted(std::move(impl), std::move(observer)); } -rtc::scoped_refptr CreateAudioDeviceWithDataObserver( - AudioDeviceModule::AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory, - AudioDeviceDataObserver* legacy_observer) { - auto audio_device = rtc::make_ref_counted( - audio_layer, task_queue_factory, legacy_observer, nullptr); - if (!audio_device->IsValid()) { - return nullptr; - } - - return audio_device; -} } // namespace webrtc diff --git a/modules/audio_device/audio_device_generic.cc b/modules/audio_device/audio_device_generic.cc index 7b8cfd1734..a99e015814 100644 --- a/modules/audio_device/audio_device_generic.cc +++ b/modules/audio_device/audio_device_generic.cc @@ -15,50 +15,41 @@ namespace webrtc { bool AudioDeviceGeneric::BuiltInAECIsAvailable() const { - RTC_LOG_F(LS_ERROR) << "Not supported on this platform"; return false; } -int32_t AudioDeviceGeneric::EnableBuiltInAEC(bool enable) { - RTC_LOG_F(LS_ERROR) << "Not supported on this platform"; +int32_t AudioDeviceGeneric::EnableBuiltInAEC(bool /* enable */) { return -1; } bool AudioDeviceGeneric::BuiltInAGCIsAvailable() const { - RTC_LOG_F(LS_ERROR) << "Not supported on this platform"; return false; } -int32_t AudioDeviceGeneric::EnableBuiltInAGC(bool enable) { - RTC_LOG_F(LS_ERROR) << "Not supported on this platform"; +int32_t AudioDeviceGeneric::EnableBuiltInAGC(bool /* enable */) { return -1; } bool AudioDeviceGeneric::BuiltInNSIsAvailable() const { - RTC_LOG_F(LS_ERROR) << "Not supported on this platform"; return false; } -int32_t AudioDeviceGeneric::EnableBuiltInNS(bool enable) { - RTC_LOG_F(LS_ERROR) << "Not supported on this platform"; +int32_t AudioDeviceGeneric::EnableBuiltInNS(bool /* enable */) { return -1; } int32_t AudioDeviceGeneric::GetPlayoutUnderrunCount() const { - RTC_LOG_F(LS_ERROR) << "Not supported on this platform"; return -1; } #if defined(WEBRTC_IOS) int AudioDeviceGeneric::GetPlayoutAudioParameters( AudioParameters* params) const { - RTC_LOG_F(LS_ERROR) << "Not supported on this platform"; return -1; } int AudioDeviceGeneric::GetRecordAudioParameters( AudioParameters* params) const { - RTC_LOG_F(LS_ERROR) << "Not supported on this platform"; return -1; } #endif // WEBRTC_IOS diff --git a/modules/audio_device/audio_device_generic.h b/modules/audio_device/audio_device_generic.h index 41e24eb3b0..3fa3ed9d8a 100644 --- a/modules/audio_device/audio_device_generic.h +++ b/modules/audio_device/audio_device_generic.h @@ -13,9 +13,9 @@ #include +#include "api/audio/audio_device.h" +#include "api/audio/audio_device_defines.h" #include "modules/audio_device/audio_device_buffer.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_device/include/audio_device_defines.h" namespace webrtc { diff --git a/modules/audio_device/audio_device_impl.cc b/modules/audio_device/audio_device_impl.cc index 80ed928933..44cfabeddd 100644 --- a/modules/audio_device/audio_device_impl.cc +++ b/modules/audio_device/audio_device_impl.cc @@ -26,6 +26,7 @@ #endif #elif defined(WEBRTC_ANDROID) #include + #include "sdk/android/native_api/audio_device_module/audio_device_android.h" #elif defined(WEBRTC_LINUX) #if defined(WEBRTC_ENABLE_LINUX_ALSA) @@ -61,7 +62,7 @@ namespace webrtc { -rtc::scoped_refptr AudioDeviceModule::Create( +scoped_refptr AudioDeviceModule::Create( AudioLayer audio_layer, TaskQueueFactory* task_queue_factory) { RTC_DLOG(LS_INFO) << __FUNCTION__; @@ -69,7 +70,7 @@ rtc::scoped_refptr AudioDeviceModule::Create( } // static -rtc::scoped_refptr AudioDeviceModule::CreateForTest( +scoped_refptr AudioDeviceModule::CreateForTest( AudioLayer audio_layer, TaskQueueFactory* task_queue_factory) { RTC_DLOG(LS_INFO) << __FUNCTION__; @@ -82,7 +83,8 @@ rtc::scoped_refptr AudioDeviceModule::CreateForTest( return nullptr; } else if (audio_layer == AudioDeviceModule::kAndroidJavaAudio || audio_layer == AudioDeviceModule::kAndroidOpenSLESAudio || - audio_layer == AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio || + audio_layer == + AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio || audio_layer == kAndroidAAudioAudio || audio_layer == kAndroidJavaInputAndAAudioOutputAudio) { RTC_LOG(LS_ERROR) << "Use the CreateAndroidAudioDeviceModule() " @@ -91,8 +93,8 @@ rtc::scoped_refptr AudioDeviceModule::CreateForTest( } // Create the generic reference counted (platform independent) implementation. - auto audio_device = rtc::make_ref_counted( - audio_layer, task_queue_factory); + auto audio_device = + make_ref_counted(audio_layer, task_queue_factory); // Ensure that the current platform is supported. if (audio_device->CheckPlatform() == -1) { @@ -239,8 +241,10 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects() { // iOS ADM implementation. #if defined(WEBRTC_IOS) if (audio_layer == kPlatformDefaultAudio) { - audio_device_.reset( - new ios_adm::AudioDeviceIOS(/*bypass_voice_processing=*/false)); + audio_device_.reset(new ios_adm::AudioDeviceIOS( + /*bypass_voice_processing=*/false, + /*muted_speech_event_handler=*/nullptr, + /*render_error_handler=*/nullptr)); RTC_LOG(LS_INFO) << "iPhone Audio APIs will be utilized."; } // END #if defined(WEBRTC_IOS) diff --git a/modules/audio_device/audio_device_impl.h b/modules/audio_device/audio_device_impl.h index 46d91a46c8..41d68209e8 100644 --- a/modules/audio_device/audio_device_impl.h +++ b/modules/audio_device/audio_device_impl.h @@ -17,9 +17,9 @@ #include +#include "api/audio/audio_device.h" #include "api/task_queue/task_queue_factory.h" #include "modules/audio_device/audio_device_buffer.h" -#include "modules/audio_device/include/audio_device.h" namespace webrtc { @@ -159,8 +159,8 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { int RestartPlayoutInternally() override { return -1; } int RestartRecordingInternally() override { return -1; } - int SetPlayoutSampleRate(uint32_t sample_rate) override { return -1; } - int SetRecordingSampleRate(uint32_t sample_rate) override { return -1; } + int SetPlayoutSampleRate(uint32_t /* sample_rate */) override { return -1; } + int SetRecordingSampleRate(uint32_t /* sample_rate */) override { return -1; } private: PlatformType Platform() const; diff --git a/modules/audio_device/audio_device_unittest.cc b/modules/audio_device/audio_device_unittest.cc index e03c11655b..8c97b5f93f 100644 --- a/modules/audio_device/audio_device_unittest.cc +++ b/modules/audio_device/audio_device_unittest.cc @@ -8,15 +8,15 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_device/include/audio_device.h" +#include "api/audio/audio_device.h" #include #include #include #include #include +#include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" @@ -106,8 +106,8 @@ enum class TransportType { // measurements. class AudioStream { public: - virtual void Write(rtc::ArrayView source) = 0; - virtual void Read(rtc::ArrayView destination) = 0; + virtual void Write(ArrayView source) = 0; + virtual void Read(ArrayView destination) = 0; virtual ~AudioStream() = default; }; @@ -116,7 +116,7 @@ class AudioStream { // delay value in milliseconds. // Example: index=240, frames_per_10ms_buffer=480 => 5ms as output. int IndexToMilliseconds(size_t index, size_t frames_per_10ms_buffer) { - return rtc::checked_cast( + return checked_cast( 10.0 * (static_cast(index) / frames_per_10ms_buffer) + 0.5); } @@ -134,7 +134,7 @@ int IndexToMilliseconds(size_t index, size_t frames_per_10ms_buffer) { // change over time and that both sides will in most cases use the same size. class FifoAudioStream : public AudioStream { public: - void Write(rtc::ArrayView source) override { + void Write(ArrayView source) override { RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); const size_t size = [&] { MutexLock lock(&lock_); @@ -151,7 +151,7 @@ class FifoAudioStream : public AudioStream { written_elements_ += size; } - void Read(rtc::ArrayView destination) override { + void Read(ArrayView destination) override { MutexLock lock(&lock_); if (fifo_.empty()) { std::fill(destination.begin(), destination.end(), 0); @@ -197,10 +197,10 @@ class FifoAudioStream : public AudioStream { return 0.5 + static_cast(written_elements_ / write_count_); } - using Buffer16 = rtc::BufferT; + using Buffer16 = BufferT; mutable Mutex lock_; - rtc::RaceChecker race_checker_; + RaceChecker race_checker_; std::list fifo_ RTC_GUARDED_BY(lock_); size_t write_count_ RTC_GUARDED_BY(race_checker_) = 0; @@ -220,7 +220,7 @@ class LatencyAudioStream : public AudioStream { } // Insert periodic impulses in first two samples of `destination`. - void Read(rtc::ArrayView destination) override { + void Read(ArrayView destination) override { RTC_DCHECK_RUN_ON(&read_thread_checker_); if (read_count_ == 0) { PRINT("["); @@ -232,7 +232,7 @@ class LatencyAudioStream : public AudioStream { { MutexLock lock(&lock_); if (!pulse_time_) { - pulse_time_ = rtc::TimeMillis(); + pulse_time_ = TimeMillis(); } } constexpr int16_t impulse = std::numeric_limits::max(); @@ -242,7 +242,7 @@ class LatencyAudioStream : public AudioStream { // Detect received impulses in `source`, derive time between transmission and // detection and add the calculated delay to list of latencies. - void Write(rtc::ArrayView source) override { + void Write(ArrayView source) override { RTC_DCHECK_RUN_ON(&write_thread_checker_); RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); MutexLock lock(&lock_); @@ -260,9 +260,9 @@ class LatencyAudioStream : public AudioStream { const size_t max = source[index_of_max]; if (max > kImpulseThreshold) { PRINTD("(%zu, %zu)", max, index_of_max); - int64_t now_time = rtc::TimeMillis(); + int64_t now_time = TimeMillis(); int extra_delay = IndexToMilliseconds(index_of_max, source.size()); - PRINTD("[%d]", rtc::checked_cast(now_time - pulse_time_)); + PRINTD("[%d]", webrtc::checked_cast(now_time - pulse_time_)); PRINTD("[%d]", extra_delay); // Total latency is the difference between transmit time and detection // tome plus the extra delay within the buffer in which we detected the @@ -316,11 +316,11 @@ class LatencyAudioStream : public AudioStream { } Mutex lock_; - rtc::RaceChecker race_checker_; + RaceChecker race_checker_; SequenceChecker read_thread_checker_; SequenceChecker write_thread_checker_; - absl::optional pulse_time_ RTC_GUARDED_BY(lock_); + std::optional pulse_time_ RTC_GUARDED_BY(lock_); std::vector latencies_ RTC_GUARDED_BY(race_checker_); size_t read_count_ RTC_GUARDED_BY(read_thread_checker_) = 0; size_t write_count_ RTC_GUARDED_BY(write_thread_checker_) = 0; @@ -338,7 +338,7 @@ class MockAudioTransport : public test::MockAudioTransport { // implementation where the number of callbacks is counted and an event // is set after a certain number of callbacks. Audio parameters are also // checked. - void HandleCallbacks(rtc::Event* event, + void HandleCallbacks(Event* event, AudioStream* audio_stream, int num_callbacks) { event_ = event; @@ -369,11 +369,11 @@ class MockAudioTransport : public test::MockAudioTransport { const size_t bytes_per_frame, const size_t channels, const uint32_t sample_rate, - const uint32_t total_delay_ms, - const int32_t clock_drift, - const uint32_t current_mic_level, - const bool typing_status, - uint32_t& new_mic_level) { + const uint32_t /* total_delay_ms */, + const int32_t /* clock_drift */, + const uint32_t /* current_mic_level */, + const bool /* typing_status */, + uint32_t& /* new_mic_level */) { EXPECT_TRUE(rec_mode()) << "No test is expecting these callbacks."; // Store audio parameters once in the first callback. For all other // callbacks, verify that the provided audio parameters are maintained and @@ -396,8 +396,8 @@ class MockAudioTransport : public test::MockAudioTransport { // Write audio data to audio stream object if one has been injected. if (audio_stream_) { audio_stream_->Write( - rtc::MakeArrayView(static_cast(audio_buffer), - samples_per_channel * channels)); + MakeArrayView(static_cast(audio_buffer), + samples_per_channel * channels)); } // Signal the event after given amount of callbacks. if (event_ && ReceivedEnoughCallbacks()) { @@ -412,8 +412,8 @@ class MockAudioTransport : public test::MockAudioTransport { const uint32_t sample_rate, void* audio_buffer, size_t& samples_out, - int64_t* elapsed_time_ms, - int64_t* ntp_time_ms) { + int64_t* /* elapsed_time_ms */, + int64_t* /* ntp_time_ms */) { EXPECT_TRUE(play_mode()) << "No test is expecting these callbacks."; // Store audio parameters once in the first callback. For all other // callbacks, verify that the provided audio parameters are maintained and @@ -436,8 +436,8 @@ class MockAudioTransport : public test::MockAudioTransport { samples_out = samples_per_channel * channels; // Read audio data from audio stream object if one has been injected. if (audio_stream_) { - audio_stream_->Read(rtc::MakeArrayView( - static_cast(audio_buffer), samples_per_channel * channels)); + audio_stream_->Read(MakeArrayView(static_cast(audio_buffer), + samples_per_channel * channels)); } else { // Fill the audio buffer with zeros to avoid disturbing audio. const size_t num_bytes = samples_per_channel * bytes_per_frame; @@ -491,7 +491,7 @@ class MockAudioTransport : public test::MockAudioTransport { private: Mutex lock_; TransportType type_ = TransportType::kInvalid; - rtc::Event* event_ = nullptr; + Event* event_ = nullptr; AudioStream* audio_stream_ = nullptr; size_t num_callbacks_ = 0; size_t play_count_ RTC_GUARDED_BY(lock_) = 0; @@ -520,10 +520,10 @@ class MAYBE_AudioDeviceTest MAYBE_AudioDeviceTest() : audio_layer_(GetParam()), task_queue_factory_(CreateDefaultTaskQueueFactory()) { - rtc::LogMessage::LogToDebug(rtc::LS_INFO); + LogMessage::LogToDebug(LS_INFO); // Add extra logging fields here if needed for debugging. - rtc::LogMessage::LogTimestamps(); - rtc::LogMessage::LogThreads(); + LogMessage::LogTimestamps(); + LogMessage::LogThreads(); audio_device_ = CreateAudioDevice(); EXPECT_NE(audio_device_.get(), nullptr); AudioDeviceModule::AudioLayer audio_layer; @@ -573,17 +573,17 @@ class MAYBE_AudioDeviceTest } bool requirements_satisfied() const { return requirements_satisfied_; } - rtc::Event* event() { return &event_; } + Event* event() { return &event_; } AudioDeviceModule::AudioLayer audio_layer() const { return audio_layer_; } // AudioDeviceModuleForTest extends the default ADM interface with some extra // test methods. Intended for usage in tests only and requires a unique // factory method. See CreateAudioDevice() for details. - const rtc::scoped_refptr& audio_device() const { + const scoped_refptr& audio_device() const { return audio_device_; } - rtc::scoped_refptr CreateAudioDevice() { + scoped_refptr CreateAudioDevice() { // Use the default factory for kPlatformDefaultAudio and a special factory // CreateWindowsCoreAudioAudioDeviceModuleForTest() for kWindowsCoreAudio2. // The value of `audio_layer_` is set at construction by GetParam() and two @@ -661,8 +661,8 @@ class MAYBE_AudioDeviceTest AudioDeviceModule::AudioLayer audio_layer_; std::unique_ptr task_queue_factory_; bool requirements_satisfied_ = true; - rtc::Event event_; - rtc::scoped_refptr audio_device_; + Event event_; + scoped_refptr audio_device_; bool stereo_playout_ = false; }; @@ -671,7 +671,7 @@ class MAYBE_AudioDeviceTest TEST(MAYBE_AudioDeviceTestWin, ConstructDestructWithFactory) { std::unique_ptr task_queue_factory = CreateDefaultTaskQueueFactory(); - rtc::scoped_refptr audio_device; + scoped_refptr audio_device; // The default factory should work for all platforms when a default ADM is // requested. audio_device = AudioDeviceModule::Create( diff --git a/modules/audio_device/dummy/audio_device_dummy.cc b/modules/audio_device/dummy/audio_device_dummy.cc index b8fd837038..bc7b95d881 100644 --- a/modules/audio_device/dummy/audio_device_dummy.cc +++ b/modules/audio_device/dummy/audio_device_dummy.cc @@ -13,7 +13,7 @@ namespace webrtc { int32_t AudioDeviceDummy::ActiveAudioLayer( - AudioDeviceModule::AudioLayer& audioLayer) const { + AudioDeviceModule::AudioLayer& /* audioLayer */) const { return -1; } @@ -37,37 +37,39 @@ int16_t AudioDeviceDummy::RecordingDevices() { return -1; } -int32_t AudioDeviceDummy::PlayoutDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) { +int32_t AudioDeviceDummy::PlayoutDeviceName( + uint16_t /* index */, + char /* name */[kAdmMaxDeviceNameSize], + char /* guid */[kAdmMaxGuidSize]) { return -1; } -int32_t AudioDeviceDummy::RecordingDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) { +int32_t AudioDeviceDummy::RecordingDeviceName( + uint16_t /* index */, + char /* name */[kAdmMaxDeviceNameSize], + char /* guid */[kAdmMaxGuidSize]) { return -1; } -int32_t AudioDeviceDummy::SetPlayoutDevice(uint16_t index) { +int32_t AudioDeviceDummy::SetPlayoutDevice(uint16_t /* index */) { return -1; } int32_t AudioDeviceDummy::SetPlayoutDevice( - AudioDeviceModule::WindowsDeviceType device) { + AudioDeviceModule::WindowsDeviceType /* device */) { return -1; } -int32_t AudioDeviceDummy::SetRecordingDevice(uint16_t index) { +int32_t AudioDeviceDummy::SetRecordingDevice(uint16_t /* index */) { return -1; } int32_t AudioDeviceDummy::SetRecordingDevice( - AudioDeviceModule::WindowsDeviceType device) { + AudioDeviceModule::WindowsDeviceType /* device */) { return -1; } -int32_t AudioDeviceDummy::PlayoutIsAvailable(bool& available) { +int32_t AudioDeviceDummy::PlayoutIsAvailable(bool& /* available */) { return -1; } @@ -79,7 +81,7 @@ bool AudioDeviceDummy::PlayoutIsInitialized() const { return false; } -int32_t AudioDeviceDummy::RecordingIsAvailable(bool& available) { +int32_t AudioDeviceDummy::RecordingIsAvailable(bool& /* available */) { return -1; } @@ -131,96 +133,97 @@ bool AudioDeviceDummy::MicrophoneIsInitialized() const { return false; } -int32_t AudioDeviceDummy::SpeakerVolumeIsAvailable(bool& available) { +int32_t AudioDeviceDummy::SpeakerVolumeIsAvailable(bool& /* available */) { return -1; } -int32_t AudioDeviceDummy::SetSpeakerVolume(uint32_t volume) { +int32_t AudioDeviceDummy::SetSpeakerVolume(uint32_t /* volume */) { return -1; } -int32_t AudioDeviceDummy::SpeakerVolume(uint32_t& volume) const { +int32_t AudioDeviceDummy::SpeakerVolume(uint32_t& /* volume */) const { return -1; } -int32_t AudioDeviceDummy::MaxSpeakerVolume(uint32_t& maxVolume) const { +int32_t AudioDeviceDummy::MaxSpeakerVolume(uint32_t& /* maxVolume */) const { return -1; } -int32_t AudioDeviceDummy::MinSpeakerVolume(uint32_t& minVolume) const { +int32_t AudioDeviceDummy::MinSpeakerVolume(uint32_t& /* minVolume */) const { return -1; } -int32_t AudioDeviceDummy::MicrophoneVolumeIsAvailable(bool& available) { +int32_t AudioDeviceDummy::MicrophoneVolumeIsAvailable(bool& /* available */) { return -1; } -int32_t AudioDeviceDummy::SetMicrophoneVolume(uint32_t volume) { +int32_t AudioDeviceDummy::SetMicrophoneVolume(uint32_t /* volume */) { return -1; } -int32_t AudioDeviceDummy::MicrophoneVolume(uint32_t& volume) const { +int32_t AudioDeviceDummy::MicrophoneVolume(uint32_t& /* volume */) const { return -1; } -int32_t AudioDeviceDummy::MaxMicrophoneVolume(uint32_t& maxVolume) const { +int32_t AudioDeviceDummy::MaxMicrophoneVolume(uint32_t& /* maxVolume */) const { return -1; } -int32_t AudioDeviceDummy::MinMicrophoneVolume(uint32_t& minVolume) const { +int32_t AudioDeviceDummy::MinMicrophoneVolume(uint32_t& /* minVolume */) const { return -1; } -int32_t AudioDeviceDummy::SpeakerMuteIsAvailable(bool& available) { +int32_t AudioDeviceDummy::SpeakerMuteIsAvailable(bool& /* available */) { return -1; } -int32_t AudioDeviceDummy::SetSpeakerMute(bool enable) { +int32_t AudioDeviceDummy::SetSpeakerMute(bool /* enable */) { return -1; } -int32_t AudioDeviceDummy::SpeakerMute(bool& enabled) const { +int32_t AudioDeviceDummy::SpeakerMute(bool& /* enabled */) const { return -1; } -int32_t AudioDeviceDummy::MicrophoneMuteIsAvailable(bool& available) { +int32_t AudioDeviceDummy::MicrophoneMuteIsAvailable(bool& /* available */) { return -1; } -int32_t AudioDeviceDummy::SetMicrophoneMute(bool enable) { +int32_t AudioDeviceDummy::SetMicrophoneMute(bool /* enable */) { return -1; } -int32_t AudioDeviceDummy::MicrophoneMute(bool& enabled) const { +int32_t AudioDeviceDummy::MicrophoneMute(bool& /* enabled */) const { return -1; } -int32_t AudioDeviceDummy::StereoPlayoutIsAvailable(bool& available) { +int32_t AudioDeviceDummy::StereoPlayoutIsAvailable(bool& /* available */) { return -1; } -int32_t AudioDeviceDummy::SetStereoPlayout(bool enable) { +int32_t AudioDeviceDummy::SetStereoPlayout(bool /* enable */) { return -1; } -int32_t AudioDeviceDummy::StereoPlayout(bool& enabled) const { +int32_t AudioDeviceDummy::StereoPlayout(bool& /* enabled */) const { return -1; } -int32_t AudioDeviceDummy::StereoRecordingIsAvailable(bool& available) { +int32_t AudioDeviceDummy::StereoRecordingIsAvailable(bool& /* available */) { return -1; } -int32_t AudioDeviceDummy::SetStereoRecording(bool enable) { +int32_t AudioDeviceDummy::SetStereoRecording(bool /* enable */) { return -1; } -int32_t AudioDeviceDummy::StereoRecording(bool& enabled) const { +int32_t AudioDeviceDummy::StereoRecording(bool& /* enabled */) const { return -1; } -int32_t AudioDeviceDummy::PlayoutDelay(uint16_t& delayMS) const { +int32_t AudioDeviceDummy::PlayoutDelay(uint16_t& /* delayMS */) const { return -1; } -void AudioDeviceDummy::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {} +void AudioDeviceDummy::AttachAudioBuffer(AudioDeviceBuffer* /* audioBuffer */) { +} } // namespace webrtc diff --git a/modules/audio_device/dummy/audio_device_dummy.h b/modules/audio_device/dummy/audio_device_dummy.h index 2a2541098e..6550915974 100644 --- a/modules/audio_device/dummy/audio_device_dummy.h +++ b/modules/audio_device/dummy/audio_device_dummy.h @@ -13,10 +13,10 @@ #include +#include "api/audio/audio_device.h" +#include "api/audio/audio_device_defines.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/audio_device_generic.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_device/include/audio_device_defines.h" namespace webrtc { diff --git a/modules/audio_device/dummy/file_audio_device.cc b/modules/audio_device/dummy/file_audio_device.cc index 8c10ae4186..9a365047ca 100644 --- a/modules/audio_device/dummy/file_audio_device.cc +++ b/modules/audio_device/dummy/file_audio_device.cc @@ -50,7 +50,7 @@ FileAudioDevice::FileAudioDevice(absl::string_view inputFilename, FileAudioDevice::~FileAudioDevice() {} int32_t FileAudioDevice::ActiveAudioLayer( - AudioDeviceModule::AudioLayer& audioLayer) const { + AudioDeviceModule::AudioLayer& /* audioLayer */) const { return -1; } @@ -113,7 +113,7 @@ int32_t FileAudioDevice::SetPlayoutDevice(uint16_t index) { } int32_t FileAudioDevice::SetPlayoutDevice( - AudioDeviceModule::WindowsDeviceType device) { + AudioDeviceModule::WindowsDeviceType /* device */) { return -1; } @@ -126,7 +126,7 @@ int32_t FileAudioDevice::SetRecordingDevice(uint16_t index) { } int32_t FileAudioDevice::SetRecordingDevice( - AudioDeviceModule::WindowsDeviceType device) { + AudioDeviceModule::WindowsDeviceType /* device */) { return -1; } @@ -217,13 +217,13 @@ int32_t FileAudioDevice::StartPlayout() { } } - _ptrThreadPlay = rtc::PlatformThread::SpawnJoinable( + _ptrThreadPlay = PlatformThread::SpawnJoinable( [this] { while (PlayThreadProcess()) { } }, "webrtc_audio_module_play_thread", - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); + ThreadAttributes().SetPriority(ThreadPriority::kRealtime)); RTC_LOG(LS_INFO) << "Started playout capture to output file: " << _outputFilename; @@ -278,13 +278,13 @@ int32_t FileAudioDevice::StartRecording() { } } - _ptrThreadRec = rtc::PlatformThread::SpawnJoinable( + _ptrThreadRec = PlatformThread::SpawnJoinable( [this] { while (RecThreadProcess()) { } }, "webrtc_audio_module_capture_thread", - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); + ThreadAttributes().SetPriority(ThreadPriority::kRealtime)); RTC_LOG(LS_INFO) << "Started recording from input file: " << _inputFilename; @@ -332,67 +332,67 @@ bool FileAudioDevice::MicrophoneIsInitialized() const { return true; } -int32_t FileAudioDevice::SpeakerVolumeIsAvailable(bool& available) { +int32_t FileAudioDevice::SpeakerVolumeIsAvailable(bool& /* available */) { return -1; } -int32_t FileAudioDevice::SetSpeakerVolume(uint32_t volume) { +int32_t FileAudioDevice::SetSpeakerVolume(uint32_t /* volume */) { return -1; } -int32_t FileAudioDevice::SpeakerVolume(uint32_t& volume) const { +int32_t FileAudioDevice::SpeakerVolume(uint32_t& /* volume */) const { return -1; } -int32_t FileAudioDevice::MaxSpeakerVolume(uint32_t& maxVolume) const { +int32_t FileAudioDevice::MaxSpeakerVolume(uint32_t& /* maxVolume */) const { return -1; } -int32_t FileAudioDevice::MinSpeakerVolume(uint32_t& minVolume) const { +int32_t FileAudioDevice::MinSpeakerVolume(uint32_t& /* minVolume */) const { return -1; } -int32_t FileAudioDevice::MicrophoneVolumeIsAvailable(bool& available) { +int32_t FileAudioDevice::MicrophoneVolumeIsAvailable(bool& /* available */) { return -1; } -int32_t FileAudioDevice::SetMicrophoneVolume(uint32_t volume) { +int32_t FileAudioDevice::SetMicrophoneVolume(uint32_t /* volume */) { return -1; } -int32_t FileAudioDevice::MicrophoneVolume(uint32_t& volume) const { +int32_t FileAudioDevice::MicrophoneVolume(uint32_t& /* volume */) const { return -1; } -int32_t FileAudioDevice::MaxMicrophoneVolume(uint32_t& maxVolume) const { +int32_t FileAudioDevice::MaxMicrophoneVolume(uint32_t& /* maxVolume */) const { return -1; } -int32_t FileAudioDevice::MinMicrophoneVolume(uint32_t& minVolume) const { +int32_t FileAudioDevice::MinMicrophoneVolume(uint32_t& /* minVolume */) const { return -1; } -int32_t FileAudioDevice::SpeakerMuteIsAvailable(bool& available) { +int32_t FileAudioDevice::SpeakerMuteIsAvailable(bool& /* available */) { return -1; } -int32_t FileAudioDevice::SetSpeakerMute(bool enable) { +int32_t FileAudioDevice::SetSpeakerMute(bool /* enable */) { return -1; } -int32_t FileAudioDevice::SpeakerMute(bool& enabled) const { +int32_t FileAudioDevice::SpeakerMute(bool& /* enabled */) const { return -1; } -int32_t FileAudioDevice::MicrophoneMuteIsAvailable(bool& available) { +int32_t FileAudioDevice::MicrophoneMuteIsAvailable(bool& /* available */) { return -1; } -int32_t FileAudioDevice::SetMicrophoneMute(bool enable) { +int32_t FileAudioDevice::SetMicrophoneMute(bool /* enable */) { return -1; } -int32_t FileAudioDevice::MicrophoneMute(bool& enabled) const { +int32_t FileAudioDevice::MicrophoneMute(bool& /* enabled */) const { return -1; } @@ -400,7 +400,7 @@ int32_t FileAudioDevice::StereoPlayoutIsAvailable(bool& available) { available = true; return 0; } -int32_t FileAudioDevice::SetStereoPlayout(bool enable) { +int32_t FileAudioDevice::SetStereoPlayout(bool /* enable */) { return 0; } @@ -414,7 +414,7 @@ int32_t FileAudioDevice::StereoRecordingIsAvailable(bool& available) { return 0; } -int32_t FileAudioDevice::SetStereoRecording(bool enable) { +int32_t FileAudioDevice::SetStereoRecording(bool /* enable */) { return 0; } @@ -423,7 +423,7 @@ int32_t FileAudioDevice::StereoRecording(bool& enabled) const { return 0; } -int32_t FileAudioDevice::PlayoutDelay(uint16_t& delayMS) const { +int32_t FileAudioDevice::PlayoutDelay(uint16_t& /* delayMS */) const { return 0; } @@ -445,7 +445,7 @@ bool FileAudioDevice::PlayThreadProcess() { if (!_playing) { return false; } - int64_t currentTime = rtc::TimeMillis(); + int64_t currentTime = TimeMillis(); mutex_.Lock(); if (_lastCallPlayoutMillis == 0 || @@ -464,7 +464,7 @@ bool FileAudioDevice::PlayThreadProcess() { _playoutFramesLeft = 0; mutex_.Unlock(); - int64_t deltaTimeMillis = rtc::TimeMillis() - currentTime; + int64_t deltaTimeMillis = TimeMillis() - currentTime; if (deltaTimeMillis < 10) { SleepMs(10 - deltaTimeMillis); } @@ -477,7 +477,7 @@ bool FileAudioDevice::RecThreadProcess() { return false; } - int64_t currentTime = rtc::TimeMillis(); + int64_t currentTime = TimeMillis(); mutex_.Lock(); if (_lastCallRecordMillis == 0 || currentTime - _lastCallRecordMillis >= 10) { @@ -497,7 +497,7 @@ bool FileAudioDevice::RecThreadProcess() { mutex_.Unlock(); - int64_t deltaTimeMillis = rtc::TimeMillis() - currentTime; + int64_t deltaTimeMillis = TimeMillis() - currentTime; if (deltaTimeMillis < 10) { SleepMs(10 - deltaTimeMillis); } diff --git a/modules/audio_device/dummy/file_audio_device.h b/modules/audio_device/dummy/file_audio_device.h index 27979933f2..ab7dee0a98 100644 --- a/modules/audio_device/dummy/file_audio_device.h +++ b/modules/audio_device/dummy/file_audio_device.h @@ -144,8 +144,8 @@ class FileAudioDevice : public AudioDeviceGeneric { size_t _recordingFramesIn10MS; size_t _playoutFramesIn10MS; - rtc::PlatformThread _ptrThreadRec; - rtc::PlatformThread _ptrThreadPlay; + PlatformThread _ptrThreadRec; + PlatformThread _ptrThreadPlay; bool _playing; bool _recording; diff --git a/modules/audio_device/dummy/file_audio_device_factory.cc b/modules/audio_device/dummy/file_audio_device_factory.cc index 8c41111478..600304a6ef 100644 --- a/modules/audio_device/dummy/file_audio_device_factory.cc +++ b/modules/audio_device/dummy/file_audio_device_factory.cc @@ -17,7 +17,7 @@ #include "absl/strings/string_view.h" #include "modules/audio_device/dummy/file_audio_device.h" #include "rtc_base/logging.h" -#include "rtc_base/string_utils.h" +#include "rtc_base/string_utils.h" // IWYU pragma: keep namespace webrtc { @@ -40,15 +40,15 @@ FileAudioDevice* FileAudioDeviceFactory::CreateFileAudioDevice() { } void FileAudioDeviceFactory::SetFilenamesToUse( - absl::string_view inputAudioFilename, - absl::string_view outputAudioFilename) { + [[maybe_unused]] absl::string_view inputAudioFilename, + [[maybe_unused]] absl::string_view outputAudioFilename) { #ifdef WEBRTC_DUMMY_FILE_DEVICES RTC_DCHECK_LT(inputAudioFilename.size(), MAX_FILENAME_LEN); RTC_DCHECK_LT(outputAudioFilename.size(), MAX_FILENAME_LEN); // Copy the strings since we don't know the lifetime of the input pointers. - rtc::strcpyn(_inputAudioFilename, MAX_FILENAME_LEN, inputAudioFilename); - rtc::strcpyn(_outputAudioFilename, MAX_FILENAME_LEN, outputAudioFilename); + strcpyn(_inputAudioFilename, MAX_FILENAME_LEN, inputAudioFilename); + strcpyn(_outputAudioFilename, MAX_FILENAME_LEN, outputAudioFilename); _isConfigured = true; #else // Sanity: must be compiled with the right define to run this. diff --git a/modules/audio_device/fine_audio_buffer.cc b/modules/audio_device/fine_audio_buffer.cc index 86240da196..2ec25413a5 100644 --- a/modules/audio_device/fine_audio_buffer.cc +++ b/modules/audio_device/fine_audio_buffer.cc @@ -13,6 +13,7 @@ #include #include +#include "api/array_view.h" #include "modules/audio_device/audio_device_buffer.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -22,9 +23,9 @@ namespace webrtc { FineAudioBuffer::FineAudioBuffer(AudioDeviceBuffer* audio_device_buffer) : audio_device_buffer_(audio_device_buffer), - playout_samples_per_channel_10ms_(rtc::dchecked_cast( + playout_samples_per_channel_10ms_(dchecked_cast( audio_device_buffer->PlayoutSampleRate() * 10 / 1000)), - record_samples_per_channel_10ms_(rtc::dchecked_cast( + record_samples_per_channel_10ms_(dchecked_cast( audio_device_buffer->RecordingSampleRate() * 10 / 1000)), playout_channels_(audio_device_buffer->PlayoutChannels()), record_channels_(audio_device_buffer->RecordingChannels()) { @@ -62,7 +63,7 @@ bool FineAudioBuffer::IsReadyForRecord() const { return record_samples_per_channel_10ms_ > 0 && record_channels_ > 0; } -void FineAudioBuffer::GetPlayoutData(rtc::ArrayView audio_buffer, +void FineAudioBuffer::GetPlayoutData(ArrayView audio_buffer, int playout_delay_ms) { RTC_DCHECK(IsReadyForPlayout()); // Ask WebRTC for new data in chunks of 10ms until we have enough to @@ -79,7 +80,7 @@ void FineAudioBuffer::GetPlayoutData(rtc::ArrayView audio_buffer, const size_t num_elements_10ms = playout_channels_ * playout_samples_per_channel_10ms_; const size_t written_elements = playout_buffer_.AppendData( - num_elements_10ms, [&](rtc::ArrayView buf) { + num_elements_10ms, [&](ArrayView buf) { const size_t samples_per_channel_10ms = audio_device_buffer_->GetPlayoutData(buf.data()); return playout_channels_ * samples_per_channel_10ms; @@ -106,8 +107,9 @@ void FineAudioBuffer::GetPlayoutData(rtc::ArrayView audio_buffer, } void FineAudioBuffer::DeliverRecordedData( - rtc::ArrayView audio_buffer, - int record_delay_ms) { + ArrayView audio_buffer, + int record_delay_ms, + std::optional capture_time_ns) { RTC_DCHECK(IsReadyForRecord()); // Always append new data and grow the buffer when needed. record_buffer_.AppendData(audio_buffer.data(), audio_buffer.size()); @@ -118,7 +120,8 @@ void FineAudioBuffer::DeliverRecordedData( record_channels_ * record_samples_per_channel_10ms_; while (record_buffer_.size() >= num_elements_10ms) { audio_device_buffer_->SetRecordedBuffer(record_buffer_.data(), - record_samples_per_channel_10ms_); + record_samples_per_channel_10ms_, + capture_time_ns); audio_device_buffer_->SetVQEData(playout_delay_ms_, record_delay_ms); audio_device_buffer_->DeliverRecordedData(); memmove(record_buffer_.data(), record_buffer_.data() + num_elements_10ms, diff --git a/modules/audio_device/fine_audio_buffer.h b/modules/audio_device/fine_audio_buffer.h index a6c3042bb2..dd4d456a68 100644 --- a/modules/audio_device/fine_audio_buffer.h +++ b/modules/audio_device/fine_audio_buffer.h @@ -11,6 +11,10 @@ #ifndef MODULES_AUDIO_DEVICE_FINE_AUDIO_BUFFER_H_ #define MODULES_AUDIO_DEVICE_FINE_AUDIO_BUFFER_H_ +#include +#include +#include + #include "api/array_view.h" #include "rtc_base/buffer.h" @@ -48,8 +52,7 @@ class FineAudioBuffer { // silence instead. The provided delay estimate in `playout_delay_ms` should // contain an estimate of the latency between when an audio frame is read from // WebRTC and when it is played out on the speaker. - void GetPlayoutData(rtc::ArrayView audio_buffer, - int playout_delay_ms); + void GetPlayoutData(ArrayView audio_buffer, int playout_delay_ms); // Consumes the audio data in `audio_buffer` and sends it to the WebRTC layer // in chunks of 10ms. The sum of the provided delay estimate in @@ -60,8 +63,13 @@ class FineAudioBuffer { // Example: buffer size is 5ms => call #1 stores 5ms of data, call #2 stores // 5ms of data and sends a total of 10ms to WebRTC and clears the internal // cache. Call #3 restarts the scheme above. - void DeliverRecordedData(rtc::ArrayView audio_buffer, - int record_delay_ms); + void DeliverRecordedData(ArrayView audio_buffer, + int record_delay_ms) { + DeliverRecordedData(audio_buffer, record_delay_ms, std::nullopt); + } + void DeliverRecordedData(ArrayView audio_buffer, + int record_delay_ms, + std::optional capture_time_ns); private: // Device buffer that works with 10ms chunks of data both for playout and @@ -81,10 +89,10 @@ class FineAudioBuffer { const size_t record_channels_; // Storage for output samples from which a consumer can read audio buffers // in any size using GetPlayoutData(). - rtc::BufferT playout_buffer_; + BufferT playout_buffer_; // Storage for input samples that are about to be delivered to the WebRTC // ADB or remains from the last successful delivery of a 10ms audio buffer. - rtc::BufferT record_buffer_; + BufferT record_buffer_; // Contains latest delay estimate given to GetPlayoutData(). int playout_delay_ms_ = 0; }; diff --git a/modules/audio_device/fine_audio_buffer_unittest.cc b/modules/audio_device/fine_audio_buffer_unittest.cc index 36ea85f7dd..c86af8d7df 100644 --- a/modules/audio_device/fine_audio_buffer_unittest.cc +++ b/modules/audio_device/fine_audio_buffer_unittest.cc @@ -113,7 +113,7 @@ void RunFineBufferTest(int frame_size_in_samples) { { InSequence s; for (int j = 0; j < kNumberOfUpdateBufferCalls - 1; ++j) { - EXPECT_CALL(audio_device_buffer, SetRecordedBuffer(_, kSamplesPer10Ms)) + EXPECT_CALL(audio_device_buffer, SetRecordedBuffer(_, kSamplesPer10Ms, _)) .WillOnce(VerifyInputBuffer(j, kChannels * kSamplesPer10Ms)) .RetiresOnSaturation(); } @@ -132,15 +132,13 @@ void RunFineBufferTest(int frame_size_in_samples) { for (int i = 0; i < kNumberOfFrames; ++i) { fine_buffer.GetPlayoutData( - rtc::ArrayView(out_buffer.get(), - kChannels * kFrameSizeSamples), - 0); + ArrayView(out_buffer.get(), kChannels * kFrameSizeSamples), 0); EXPECT_TRUE( VerifyBuffer(out_buffer.get(), i, kChannels * kFrameSizeSamples)); UpdateInputBuffer(in_buffer.get(), i, kChannels * kFrameSizeSamples); fine_buffer.DeliverRecordedData( - rtc::ArrayView(in_buffer.get(), - kChannels * kFrameSizeSamples), + ArrayView(in_buffer.get(), + kChannels * kFrameSizeSamples), 0); } } diff --git a/modules/audio_device/g3doc/audio_device_module.md b/modules/audio_device/g3doc/audio_device_module.md index 93e9aca741..3b0d6b74dd 100644 --- a/modules/audio_device/g3doc/audio_device_module.md +++ b/modules/audio_device/g3doc/audio_device_module.md @@ -1,5 +1,5 @@ - + # Audio Device Module (ADM) @@ -32,7 +32,7 @@ as possible. By default, the ADM in WebRTC is created in [`WebRtcVoiceEngine::Init`][1] but an external implementation can also be injected using -[`rtc::CreatePeerConnectionFactory`][25]. An example of where an external ADM is +[`webrtc::CreatePeerConnectionFactory`][25]. An example of where an external ADM is injected can be found in [PeerConnectionInterfaceTest][26] where a so-called [fake ADM][29] is utilized to avoid hardware dependency in a gtest. Clients can also inject their own ADMs in situations where functionality is needed that is diff --git a/modules/audio_device/include/audio_device.h b/modules/audio_device/include/audio_device.h index 936ee6cb04..3549e00835 100644 --- a/modules/audio_device/include/audio_device.h +++ b/modules/audio_device/include/audio_device.h @@ -11,184 +11,8 @@ #ifndef MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_H_ #define MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_H_ -#include "absl/types/optional.h" -#include "api/scoped_refptr.h" -#include "api/task_queue/task_queue_factory.h" -#include "modules/audio_device/include/audio_device_defines.h" -#include "rtc_base/ref_count.h" - -namespace webrtc { - -class AudioDeviceModuleForTest; - -class AudioDeviceModule : public rtc::RefCountInterface { - public: - enum AudioLayer { - kPlatformDefaultAudio = 0, - kWindowsCoreAudio, - kWindowsCoreAudio2, - kLinuxAlsaAudio, - kLinuxPulseAudio, - kAndroidJavaAudio, - kAndroidOpenSLESAudio, - kAndroidJavaInputAndOpenSLESOutputAudio, - kAndroidAAudioAudio, - kAndroidJavaInputAndAAudioOutputAudio, - kDummyAudio, - }; - - enum WindowsDeviceType { - kDefaultCommunicationDevice = -1, - kDefaultDevice = -2 - }; - - struct Stats { - // The fields below correspond to similarly-named fields in the WebRTC stats - // spec. https://w3c.github.io/webrtc-stats/#playoutstats-dict* - double synthesized_samples_duration_s = 0; - uint64_t synthesized_samples_events = 0; - double total_samples_duration_s = 0; - double total_playout_delay_s = 0; - uint64_t total_samples_count = 0; - }; - - public: - // Creates a default ADM for usage in production code. - static rtc::scoped_refptr Create( - AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory); - // Creates an ADM with support for extra test methods. Don't use this factory - // in production code. - static rtc::scoped_refptr CreateForTest( - AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory); - - // Retrieve the currently utilized audio layer - virtual int32_t ActiveAudioLayer(AudioLayer* audioLayer) const = 0; - - // Full-duplex transportation of PCM audio - virtual int32_t RegisterAudioCallback(AudioTransport* audioCallback) = 0; - - // Main initialization and termination - virtual int32_t Init() = 0; - virtual int32_t Terminate() = 0; - virtual bool Initialized() const = 0; - - // Device enumeration - virtual int16_t PlayoutDevices() = 0; - virtual int16_t RecordingDevices() = 0; - virtual int32_t PlayoutDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) = 0; - virtual int32_t RecordingDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) = 0; - - // Device selection - virtual int32_t SetPlayoutDevice(uint16_t index) = 0; - virtual int32_t SetPlayoutDevice(WindowsDeviceType device) = 0; - virtual int32_t SetRecordingDevice(uint16_t index) = 0; - virtual int32_t SetRecordingDevice(WindowsDeviceType device) = 0; - - // Audio transport initialization - virtual int32_t PlayoutIsAvailable(bool* available) = 0; - virtual int32_t InitPlayout() = 0; - virtual bool PlayoutIsInitialized() const = 0; - virtual int32_t RecordingIsAvailable(bool* available) = 0; - virtual int32_t InitRecording() = 0; - virtual bool RecordingIsInitialized() const = 0; - - // Audio transport control - virtual int32_t StartPlayout() = 0; - virtual int32_t StopPlayout() = 0; - virtual bool Playing() const = 0; - virtual int32_t StartRecording() = 0; - virtual int32_t StopRecording() = 0; - virtual bool Recording() const = 0; - - // Audio mixer initialization - virtual int32_t InitSpeaker() = 0; - virtual bool SpeakerIsInitialized() const = 0; - virtual int32_t InitMicrophone() = 0; - virtual bool MicrophoneIsInitialized() const = 0; - - // Speaker volume controls - virtual int32_t SpeakerVolumeIsAvailable(bool* available) = 0; - virtual int32_t SetSpeakerVolume(uint32_t volume) = 0; - virtual int32_t SpeakerVolume(uint32_t* volume) const = 0; - virtual int32_t MaxSpeakerVolume(uint32_t* maxVolume) const = 0; - virtual int32_t MinSpeakerVolume(uint32_t* minVolume) const = 0; - - // Microphone volume controls - virtual int32_t MicrophoneVolumeIsAvailable(bool* available) = 0; - virtual int32_t SetMicrophoneVolume(uint32_t volume) = 0; - virtual int32_t MicrophoneVolume(uint32_t* volume) const = 0; - virtual int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const = 0; - virtual int32_t MinMicrophoneVolume(uint32_t* minVolume) const = 0; - - // Speaker mute control - virtual int32_t SpeakerMuteIsAvailable(bool* available) = 0; - virtual int32_t SetSpeakerMute(bool enable) = 0; - virtual int32_t SpeakerMute(bool* enabled) const = 0; - - // Microphone mute control - virtual int32_t MicrophoneMuteIsAvailable(bool* available) = 0; - virtual int32_t SetMicrophoneMute(bool enable) = 0; - virtual int32_t MicrophoneMute(bool* enabled) const = 0; - - // Stereo support - virtual int32_t StereoPlayoutIsAvailable(bool* available) const = 0; - virtual int32_t SetStereoPlayout(bool enable) = 0; - virtual int32_t StereoPlayout(bool* enabled) const = 0; - virtual int32_t StereoRecordingIsAvailable(bool* available) const = 0; - virtual int32_t SetStereoRecording(bool enable) = 0; - virtual int32_t StereoRecording(bool* enabled) const = 0; - - // Playout delay - virtual int32_t PlayoutDelay(uint16_t* delayMS) const = 0; - - // Only supported on Android. - virtual bool BuiltInAECIsAvailable() const = 0; - virtual bool BuiltInAGCIsAvailable() const = 0; - virtual bool BuiltInNSIsAvailable() const = 0; - - // Enables the built-in audio effects. Only supported on Android. - virtual int32_t EnableBuiltInAEC(bool enable) = 0; - virtual int32_t EnableBuiltInAGC(bool enable) = 0; - virtual int32_t EnableBuiltInNS(bool enable) = 0; - - // Play underrun count. Only supported on Android. - // TODO(alexnarest): Make it abstract after upstream projects support it. - virtual int32_t GetPlayoutUnderrunCount() const { return -1; } - - // Used to generate RTC stats. If not implemented, RTCAudioPlayoutStats will - // not be present in the stats. - virtual absl::optional GetStats() const { return absl::nullopt; } - -// Only supported on iOS. -#if defined(WEBRTC_IOS) - virtual int GetPlayoutAudioParameters(AudioParameters* params) const = 0; - virtual int GetRecordAudioParameters(AudioParameters* params) const = 0; -#endif // WEBRTC_IOS - - protected: - ~AudioDeviceModule() override {} -}; - -// Extends the default ADM interface with some extra test methods. -// Intended for usage in tests only and requires a unique factory method. -class AudioDeviceModuleForTest : public AudioDeviceModule { - public: - // Triggers internal restart sequences of audio streaming. Can be used by - // tests to emulate events corresponding to e.g. removal of an active audio - // device or other actions which causes the stream to be disconnected. - virtual int RestartPlayoutInternally() = 0; - virtual int RestartRecordingInternally() = 0; - - virtual int SetPlayoutSampleRate(uint32_t sample_rate) = 0; - virtual int SetRecordingSampleRate(uint32_t sample_rate) = 0; -}; - -} // namespace webrtc +// This is a transitional header forwarding to the new version in the api/ +// folder. +#include "api/audio/audio_device.h" #endif // MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_H_ diff --git a/modules/audio_device/include/audio_device_data_observer.h b/modules/audio_device/include/audio_device_data_observer.h index 36dc45f19e..4627caa10e 100644 --- a/modules/audio_device/include/audio_device_data_observer.h +++ b/modules/audio_device/include/audio_device_data_observer.h @@ -14,10 +14,10 @@ #include #include -#include "absl/base/attributes.h" +#include + +#include "api/audio/audio_device.h" #include "api/scoped_refptr.h" -#include "api/task_queue/task_queue_factory.h" -#include "modules/audio_device/include/audio_device.h" namespace webrtc { @@ -43,30 +43,10 @@ class AudioDeviceDataObserver { // Creates an ADMWrapper around an ADM instance that registers // the provided AudioDeviceDataObserver. -rtc::scoped_refptr CreateAudioDeviceWithDataObserver( - rtc::scoped_refptr impl, +scoped_refptr CreateAudioDeviceWithDataObserver( + scoped_refptr impl, std::unique_ptr observer); -// Creates an ADMWrapper around an ADM instance that registers -// the provided AudioDeviceDataObserver. -ABSL_DEPRECATED("") -rtc::scoped_refptr CreateAudioDeviceWithDataObserver( - rtc::scoped_refptr impl, - AudioDeviceDataObserver* observer); - -// Creates an ADM instance with AudioDeviceDataObserver registered. -rtc::scoped_refptr CreateAudioDeviceWithDataObserver( - AudioDeviceModule::AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory, - std::unique_ptr observer); - -// Creates an ADM instance with AudioDeviceDataObserver registered. -ABSL_DEPRECATED("") -rtc::scoped_refptr CreateAudioDeviceWithDataObserver( - AudioDeviceModule::AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory, - AudioDeviceDataObserver* observer); - } // namespace webrtc #endif // MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_DATA_OBSERVER_H_ diff --git a/modules/audio_device/include/audio_device_default.h b/modules/audio_device/include/audio_device_default.h index 3779d6fb3b..d38c5e81a1 100644 --- a/modules/audio_device/include/audio_device_default.h +++ b/modules/audio_device/include/audio_device_default.h @@ -11,7 +11,7 @@ #ifndef MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_DEFAULT_H_ #define MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_DEFAULT_H_ -#include "modules/audio_device/include/audio_device.h" +#include "api/audio/audio_device.h" namespace webrtc { namespace webrtc_impl { @@ -25,50 +25,50 @@ class AudioDeviceModuleDefault : public T { AudioDeviceModuleDefault() {} virtual ~AudioDeviceModuleDefault() {} - int32_t RegisterAudioCallback(AudioTransport* audioCallback) override { + int32_t RegisterAudioCallback(AudioTransport* /* audioCallback */) override { return 0; } int32_t Init() override { return 0; } int32_t InitSpeaker() override { return 0; } - int32_t SetPlayoutDevice(uint16_t index) override { return 0; } + int32_t SetPlayoutDevice(uint16_t /* index */) override { return 0; } int32_t SetPlayoutDevice( - AudioDeviceModule::WindowsDeviceType device) override { + AudioDeviceModule::WindowsDeviceType /* device */) override { return 0; } - int32_t SetStereoPlayout(bool enable) override { return 0; } + int32_t SetStereoPlayout(bool /* enable */) override { return 0; } int32_t StopPlayout() override { return 0; } int32_t InitMicrophone() override { return 0; } - int32_t SetRecordingDevice(uint16_t index) override { return 0; } + int32_t SetRecordingDevice(uint16_t /* index */) override { return 0; } int32_t SetRecordingDevice( - AudioDeviceModule::WindowsDeviceType device) override { + AudioDeviceModule::WindowsDeviceType /* device */) override { return 0; } - int32_t SetStereoRecording(bool enable) override { return 0; } + int32_t SetStereoRecording(bool /* enable */) override { return 0; } int32_t StopRecording() override { return 0; } int32_t Terminate() override { return 0; } int32_t ActiveAudioLayer( - AudioDeviceModule::AudioLayer* audioLayer) const override { + AudioDeviceModule::AudioLayer* /* audioLayer */) const override { return 0; } bool Initialized() const override { return true; } int16_t PlayoutDevices() override { return 0; } int16_t RecordingDevices() override { return 0; } - int32_t PlayoutDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) override { + int32_t PlayoutDeviceName(uint16_t /* index */, + char /* name */[kAdmMaxDeviceNameSize], + char /* guid */[kAdmMaxGuidSize]) override { return 0; } - int32_t RecordingDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) override { + int32_t RecordingDeviceName(uint16_t /* index */, + char /* name */[kAdmMaxDeviceNameSize], + char /* guid */[kAdmMaxGuidSize]) override { return 0; } - int32_t PlayoutIsAvailable(bool* available) override { return 0; } + int32_t PlayoutIsAvailable(bool* /* available */) override { return 0; } int32_t InitPlayout() override { return 0; } bool PlayoutIsInitialized() const override { return true; } - int32_t RecordingIsAvailable(bool* available) override { return 0; } + int32_t RecordingIsAvailable(bool* /* available */) override { return 0; } int32_t InitRecording() override { return 0; } bool RecordingIsInitialized() const override { return true; } int32_t StartPlayout() override { return 0; } @@ -77,42 +77,54 @@ class AudioDeviceModuleDefault : public T { bool Recording() const override { return false; } bool SpeakerIsInitialized() const override { return true; } bool MicrophoneIsInitialized() const override { return true; } - int32_t SpeakerVolumeIsAvailable(bool* available) override { return 0; } - int32_t SetSpeakerVolume(uint32_t volume) override { return 0; } - int32_t SpeakerVolume(uint32_t* volume) const override { return 0; } - int32_t MaxSpeakerVolume(uint32_t* maxVolume) const override { return 0; } - int32_t MinSpeakerVolume(uint32_t* minVolume) const override { return 0; } - int32_t MicrophoneVolumeIsAvailable(bool* available) override { return 0; } - int32_t SetMicrophoneVolume(uint32_t volume) override { return 0; } - int32_t MicrophoneVolume(uint32_t* volume) const override { return 0; } - int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const override { return 0; } - int32_t MinMicrophoneVolume(uint32_t* minVolume) const override { return 0; } - int32_t SpeakerMuteIsAvailable(bool* available) override { return 0; } - int32_t SetSpeakerMute(bool enable) override { return 0; } - int32_t SpeakerMute(bool* enabled) const override { return 0; } - int32_t MicrophoneMuteIsAvailable(bool* available) override { return 0; } - int32_t SetMicrophoneMute(bool enable) override { return 0; } - int32_t MicrophoneMute(bool* enabled) const override { return 0; } + int32_t SpeakerVolumeIsAvailable(bool* /* available */) override { return 0; } + int32_t SetSpeakerVolume(uint32_t /* volume */) override { return 0; } + int32_t SpeakerVolume(uint32_t* /* volume */) const override { return 0; } + int32_t MaxSpeakerVolume(uint32_t* /* maxVolume */) const override { + return 0; + } + int32_t MinSpeakerVolume(uint32_t* /* minVolume */) const override { + return 0; + } + int32_t MicrophoneVolumeIsAvailable(bool* /* available */) override { + return 0; + } + int32_t SetMicrophoneVolume(uint32_t /* volume */) override { return 0; } + int32_t MicrophoneVolume(uint32_t* /* volume */) const override { return 0; } + int32_t MaxMicrophoneVolume(uint32_t* /* maxVolume */) const override { + return 0; + } + int32_t MinMicrophoneVolume(uint32_t* /* minVolume */) const override { + return 0; + } + int32_t SpeakerMuteIsAvailable(bool* /* available */) override { return 0; } + int32_t SetSpeakerMute(bool /* enable */) override { return 0; } + int32_t SpeakerMute(bool* /* enabled */) const override { return 0; } + int32_t MicrophoneMuteIsAvailable(bool* /* available */) override { + return 0; + } + int32_t SetMicrophoneMute(bool /* enable */) override { return 0; } + int32_t MicrophoneMute(bool* /* enabled */) const override { return 0; } int32_t StereoPlayoutIsAvailable(bool* available) const override { *available = false; return 0; } - int32_t StereoPlayout(bool* enabled) const override { return 0; } + int32_t StereoPlayout(bool* /* enabled */) const override { return 0; } int32_t StereoRecordingIsAvailable(bool* available) const override { *available = false; return 0; } - int32_t StereoRecording(bool* enabled) const override { return 0; } + int32_t StereoRecording(bool* /* enabled */) const override { return 0; } int32_t PlayoutDelay(uint16_t* delayMS) const override { *delayMS = 0; return 0; } bool BuiltInAECIsAvailable() const override { return false; } - int32_t EnableBuiltInAEC(bool enable) override { return -1; } + int32_t EnableBuiltInAEC(bool /* enable */) override { return -1; } bool BuiltInAGCIsAvailable() const override { return false; } - int32_t EnableBuiltInAGC(bool enable) override { return -1; } + int32_t EnableBuiltInAGC(bool /* enable */) override { return -1; } bool BuiltInNSIsAvailable() const override { return false; } - int32_t EnableBuiltInNS(bool enable) override { return -1; } + int32_t EnableBuiltInNS(bool /* enable */) override { return -1; } int32_t GetPlayoutUnderrunCount() const override { return -1; } diff --git a/modules/audio_device/include/audio_device_defines.h b/modules/audio_device/include/audio_device_defines.h index d677d41f69..d126f78f9d 100644 --- a/modules/audio_device/include/audio_device_defines.h +++ b/modules/audio_device/include/audio_device_defines.h @@ -11,167 +11,8 @@ #ifndef MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_DEFINES_H_ #define MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_DEFINES_H_ -#include - -#include - -#include "rtc_base/checks.h" -#include "rtc_base/strings/string_builder.h" - -namespace webrtc { - -static const int kAdmMaxDeviceNameSize = 128; -static const int kAdmMaxFileNameSize = 512; -static const int kAdmMaxGuidSize = 128; - -static const int kAdmMinPlayoutBufferSizeMs = 10; -static const int kAdmMaxPlayoutBufferSizeMs = 250; - -// ---------------------------------------------------------------------------- -// AudioTransport -// ---------------------------------------------------------------------------- - -class AudioTransport { - public: - // TODO(bugs.webrtc.org/13620) Deprecate this function - virtual int32_t RecordedDataIsAvailable(const void* audioSamples, - size_t nSamples, - size_t nBytesPerSample, - size_t nChannels, - uint32_t samplesPerSec, - uint32_t totalDelayMS, - int32_t clockDrift, - uint32_t currentMicLevel, - bool keyPressed, - uint32_t& newMicLevel) = 0; // NOLINT - - virtual int32_t RecordedDataIsAvailable( - const void* audioSamples, - size_t nSamples, - size_t nBytesPerSample, - size_t nChannels, - uint32_t samplesPerSec, - uint32_t totalDelayMS, - int32_t clockDrift, - uint32_t currentMicLevel, - bool keyPressed, - uint32_t& newMicLevel, - absl::optional estimatedCaptureTimeNS) { // NOLINT - // TODO(webrtc:13620) Make the default behaver of the new API to behave as - // the old API. This can be pure virtual if all uses of the old API is - // removed. - return RecordedDataIsAvailable( - audioSamples, nSamples, nBytesPerSample, nChannels, samplesPerSec, - totalDelayMS, clockDrift, currentMicLevel, keyPressed, newMicLevel); - } - - // Implementation has to setup safe values for all specified out parameters. - virtual int32_t NeedMorePlayData(size_t nSamples, - size_t nBytesPerSample, - size_t nChannels, - uint32_t samplesPerSec, - void* audioSamples, - size_t& nSamplesOut, // NOLINT - int64_t* elapsed_time_ms, - int64_t* ntp_time_ms) = 0; // NOLINT - - // Method to pull mixed render audio data from all active VoE channels. - // The data will not be passed as reference for audio processing internally. - virtual void PullRenderData(int bits_per_sample, - int sample_rate, - size_t number_of_channels, - size_t number_of_frames, - void* audio_data, - int64_t* elapsed_time_ms, - int64_t* ntp_time_ms) = 0; - - protected: - virtual ~AudioTransport() {} -}; - -// Helper class for storage of fundamental audio parameters such as sample rate, -// number of channels, native buffer size etc. -// Note that one audio frame can contain more than one channel sample and each -// sample is assumed to be a 16-bit PCM sample. Hence, one audio frame in -// stereo contains 2 * (16/8) = 4 bytes of data. -class AudioParameters { - public: - // This implementation does only support 16-bit PCM samples. - static const size_t kBitsPerSample = 16; - AudioParameters() - : sample_rate_(0), - channels_(0), - frames_per_buffer_(0), - frames_per_10ms_buffer_(0) {} - AudioParameters(int sample_rate, size_t channels, size_t frames_per_buffer) - : sample_rate_(sample_rate), - channels_(channels), - frames_per_buffer_(frames_per_buffer), - frames_per_10ms_buffer_(static_cast(sample_rate / 100)) {} - void reset(int sample_rate, size_t channels, size_t frames_per_buffer) { - sample_rate_ = sample_rate; - channels_ = channels; - frames_per_buffer_ = frames_per_buffer; - frames_per_10ms_buffer_ = static_cast(sample_rate / 100); - } - size_t bits_per_sample() const { return kBitsPerSample; } - void reset(int sample_rate, size_t channels, double buffer_duration) { - reset(sample_rate, channels, - static_cast(sample_rate * buffer_duration + 0.5)); - } - void reset(int sample_rate, size_t channels) { - reset(sample_rate, channels, static_cast(0)); - } - int sample_rate() const { return sample_rate_; } - size_t channels() const { return channels_; } - size_t frames_per_buffer() const { return frames_per_buffer_; } - size_t frames_per_10ms_buffer() const { return frames_per_10ms_buffer_; } - size_t GetBytesPerFrame() const { return channels_ * kBitsPerSample / 8; } - size_t GetBytesPerBuffer() const { - return frames_per_buffer_ * GetBytesPerFrame(); - } - // The WebRTC audio device buffer (ADB) only requires that the sample rate - // and number of channels are configured. Hence, to be "valid", only these - // two attributes must be set. - bool is_valid() const { return ((sample_rate_ > 0) && (channels_ > 0)); } - // Most platforms also require that a native buffer size is defined. - // An audio parameter instance is considered to be "complete" if it is both - // "valid" (can be used by the ADB) and also has a native frame size. - bool is_complete() const { return (is_valid() && (frames_per_buffer_ > 0)); } - size_t GetBytesPer10msBuffer() const { - return frames_per_10ms_buffer_ * GetBytesPerFrame(); - } - double GetBufferSizeInMilliseconds() const { - if (sample_rate_ == 0) - return 0.0; - return frames_per_buffer_ / (sample_rate_ / 1000.0); - } - double GetBufferSizeInSeconds() const { - if (sample_rate_ == 0) - return 0.0; - return static_cast(frames_per_buffer_) / (sample_rate_); - } - std::string ToString() const { - char ss_buf[1024]; - rtc::SimpleStringBuilder ss(ss_buf); - ss << "AudioParameters: "; - ss << "sample_rate=" << sample_rate() << ", channels=" << channels(); - ss << ", frames_per_buffer=" << frames_per_buffer(); - ss << ", frames_per_10ms_buffer=" << frames_per_10ms_buffer(); - ss << ", bytes_per_frame=" << GetBytesPerFrame(); - ss << ", bytes_per_buffer=" << GetBytesPerBuffer(); - ss << ", bytes_per_10ms_buffer=" << GetBytesPer10msBuffer(); - ss << ", size_in_ms=" << GetBufferSizeInMilliseconds(); - return ss.str(); - } - - private: - int sample_rate_; - size_t channels_; - size_t frames_per_buffer_; - size_t frames_per_10ms_buffer_; -}; - -} // namespace webrtc +// This is a transitional header forwarding to the new version in the api/ +// folder. +#include "api/audio/audio_device_defines.h" #endif // MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_DEFINES_H_ diff --git a/modules/audio_device/include/audio_device_factory.cc b/modules/audio_device/include/audio_device_factory.cc index 130e096e6d..7cec58d46e 100644 --- a/modules/audio_device/include/audio_device_factory.cc +++ b/modules/audio_device/include/audio_device_factory.cc @@ -24,15 +24,15 @@ namespace webrtc { -rtc::scoped_refptr CreateWindowsCoreAudioAudioDeviceModule( - TaskQueueFactory* task_queue_factory, - bool automatic_restart) { +webrtc::scoped_refptr +CreateWindowsCoreAudioAudioDeviceModule(TaskQueueFactory* task_queue_factory, + bool automatic_restart) { RTC_DLOG(LS_INFO) << __FUNCTION__; return CreateWindowsCoreAudioAudioDeviceModuleForTest(task_queue_factory, automatic_restart); } -rtc::scoped_refptr +webrtc::scoped_refptr CreateWindowsCoreAudioAudioDeviceModuleForTest( TaskQueueFactory* task_queue_factory, bool automatic_restart) { diff --git a/modules/audio_device/include/audio_device_factory.h b/modules/audio_device/include/audio_device_factory.h index edd7686b8e..535b89fc72 100644 --- a/modules/audio_device/include/audio_device_factory.h +++ b/modules/audio_device/include/audio_device_factory.h @@ -13,8 +13,8 @@ #include +#include "api/audio/audio_device.h" #include "api/task_queue/task_queue_factory.h" -#include "modules/audio_device/include/audio_device.h" namespace webrtc { @@ -27,7 +27,7 @@ namespace webrtc { // Example (assuming webrtc namespace): // // public: -// rtc::scoped_refptr CreateAudioDevice() { +// webrtc::scoped_refptr CreateAudioDevice() { // task_queue_factory_ = CreateDefaultTaskQueueFactory(); // // Tell COM that this thread shall live in the MTA. // com_initializer_ = std::make_unique( @@ -45,11 +45,11 @@ namespace webrtc { // std::unique_ptr com_initializer_; // std::unique_ptr task_queue_factory_; // -rtc::scoped_refptr CreateWindowsCoreAudioAudioDeviceModule( - TaskQueueFactory* task_queue_factory, - bool automatic_restart = true); +webrtc::scoped_refptr +CreateWindowsCoreAudioAudioDeviceModule(TaskQueueFactory* task_queue_factory, + bool automatic_restart = true); -rtc::scoped_refptr +webrtc::scoped_refptr CreateWindowsCoreAudioAudioDeviceModuleForTest( TaskQueueFactory* task_queue_factory, bool automatic_restart = true); diff --git a/modules/audio_device/include/fake_audio_device.h b/modules/audio_device/include/fake_audio_device.h index 2322ce0263..b348483fb1 100644 --- a/modules/audio_device/include/fake_audio_device.h +++ b/modules/audio_device/include/fake_audio_device.h @@ -11,7 +11,7 @@ #ifndef MODULES_AUDIO_DEVICE_INCLUDE_FAKE_AUDIO_DEVICE_H_ #define MODULES_AUDIO_DEVICE_INCLUDE_FAKE_AUDIO_DEVICE_H_ -#include "modules/audio_device/include/audio_device.h" +#include "api/audio/audio_device.h" #include "modules/audio_device/include/audio_device_default.h" namespace webrtc { @@ -23,8 +23,8 @@ class FakeAudioDeviceModule // references using scoped_refptr. Current code doesn't always use refcounting // for this class. void AddRef() const override {} - rtc::RefCountReleaseStatus Release() const override { - return rtc::RefCountReleaseStatus::kDroppedLastRef; + webrtc::RefCountReleaseStatus Release() const override { + return webrtc::RefCountReleaseStatus::kDroppedLastRef; } }; diff --git a/modules/audio_device/include/mock_audio_device.h b/modules/audio_device/include/mock_audio_device.h index 73fbdd547d..191caf604f 100644 --- a/modules/audio_device/include/mock_audio_device.h +++ b/modules/audio_device/include/mock_audio_device.h @@ -13,8 +13,8 @@ #include +#include "api/audio/audio_device.h" #include "api/make_ref_counted.h" -#include "modules/audio_device/include/audio_device.h" #include "test/gmock.h" namespace webrtc { @@ -22,12 +22,11 @@ namespace test { class MockAudioDeviceModule : public AudioDeviceModule { public: - static rtc::scoped_refptr CreateNice() { - return rtc::make_ref_counted<::testing::NiceMock>(); + static scoped_refptr CreateNice() { + return make_ref_counted<::testing::NiceMock>(); } - static rtc::scoped_refptr CreateStrict() { - return rtc::make_ref_counted< - ::testing::StrictMock>(); + static scoped_refptr CreateStrict() { + return make_ref_counted<::testing::StrictMock>(); } // AudioDeviceModule. diff --git a/modules/audio_device/include/mock_audio_transport.h b/modules/audio_device/include/mock_audio_transport.h index b886967319..56f6905e9a 100644 --- a/modules/audio_device/include/mock_audio_transport.h +++ b/modules/audio_device/include/mock_audio_transport.h @@ -11,7 +11,7 @@ #ifndef MODULES_AUDIO_DEVICE_INCLUDE_MOCK_AUDIO_TRANSPORT_H_ #define MODULES_AUDIO_DEVICE_INCLUDE_MOCK_AUDIO_TRANSPORT_H_ -#include "modules/audio_device/include/audio_device_defines.h" +#include "api/audio/audio_device_defines.h" #include "test/gmock.h" namespace webrtc { @@ -48,7 +48,7 @@ class MockAudioTransport : public AudioTransport { uint32_t currentMicLevel, bool keyPressed, uint32_t& newMicLevel, - absl::optional estimated_capture_time_ns), + std::optional estimated_capture_time_ns), (override)); MOCK_METHOD(int32_t, diff --git a/modules/audio_device/include/test_audio_device.cc b/modules/audio_device/include/test_audio_device.cc index 4c29c98f2c..c945b016ce 100644 --- a/modules/audio_device/include/test_audio_device.cc +++ b/modules/audio_device/include/test_audio_device.cc @@ -21,6 +21,7 @@ #include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/make_ref_counted.h" +#include "api/task_queue/task_queue_factory.h" #include "common_audio/wav_file.h" #include "modules/audio_device/audio_device_impl.h" #include "modules/audio_device/include/audio_device_default.h" @@ -33,7 +34,6 @@ #include "rtc_base/platform_thread.h" #include "rtc_base/random.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_queue.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" @@ -43,7 +43,7 @@ namespace webrtc { namespace { constexpr int kFrameLengthUs = 10000; -constexpr int kFramesPerSecond = rtc::kNumMicrosecsPerSec / kFrameLengthUs; +constexpr int kFramesPerSecond = kNumMicrosecsPerSec / kFrameLengthUs; class TestAudioDeviceModuleImpl : public AudioDeviceModuleImpl { public: @@ -85,7 +85,7 @@ class PulsedNoiseCapturerImpl final int NumChannels() const override { return num_channels_; } - bool Capture(rtc::BufferT* buffer) override { + bool Capture(BufferT* buffer) override { fill_with_zero_ = !fill_with_zero_; int16_t max_amplitude; { @@ -95,7 +95,7 @@ class PulsedNoiseCapturerImpl final buffer->SetData( TestAudioDeviceModule::SamplesPerFrame(sampling_frequency_in_hz_) * num_channels_, - [&](rtc::ArrayView data) { + [&](ArrayView data) { if (fill_with_zero_) { std::fill(data.begin(), data.end(), 0); } else { @@ -137,11 +137,11 @@ class WavFileReader final : public TestAudioDeviceModule::Capturer { int NumChannels() const override { return num_channels_; } - bool Capture(rtc::BufferT* buffer) override { + bool Capture(BufferT* buffer) override { buffer->SetData( TestAudioDeviceModule::SamplesPerFrame(sampling_frequency_in_hz_) * num_channels_, - [&](rtc::ArrayView data) { + [&](ArrayView data) { size_t read = wav_reader_->ReadSamples(data.size(), data.data()); if (read < data.size() && repeat_) { do { @@ -191,7 +191,7 @@ class WavFileWriter final : public TestAudioDeviceModule::Renderer { int NumChannels() const override { return num_channels_; } - bool Render(rtc::ArrayView data) override { + bool Render(ArrayView data) override { wav_writer_->WriteSamples(data.data(), data.size()); return true; } @@ -228,7 +228,7 @@ class BoundedWavFileWriter : public TestAudioDeviceModule::Renderer { int NumChannels() const override { return num_channels_; } - bool Render(rtc::ArrayView data) override { + bool Render(ArrayView data) override { const int16_t kAmplitudeThreshold = 5; const int16_t* begin = data.begin(); @@ -287,7 +287,7 @@ class DiscardRenderer final : public TestAudioDeviceModule::Renderer { int NumChannels() const override { return num_channels_; } - bool Render(rtc::ArrayView data) override { return true; } + bool Render(ArrayView /* data */) override { return true; } private: int sampling_frequency_in_hz_; @@ -319,12 +319,12 @@ class RawFileReader final : public TestAudioDeviceModule::Capturer { int NumChannels() const override { return num_channels_; } - bool Capture(rtc::BufferT* buffer) override { + bool Capture(BufferT* buffer) override { buffer->SetData( TestAudioDeviceModule::SamplesPerFrame(SamplingFrequency()) * NumChannels(), - [&](rtc::ArrayView data) { - rtc::ArrayView read_buffer_view = ReadBufferView(); + [&](ArrayView data) { + ArrayView read_buffer_view = ReadBufferView(); size_t size = data.size() * 2; size_t read = input_file_.Read(read_buffer_view.data(), size); if (read < size && repeat_) { @@ -343,7 +343,7 @@ class RawFileReader final : public TestAudioDeviceModule::Capturer { } private: - rtc::ArrayView ReadBufferView() { return read_buffer_; } + ArrayView ReadBufferView() { return read_buffer_; } const std::string input_file_name_; const int sampling_frequency_in_hz_; @@ -381,7 +381,7 @@ class RawFileWriter : public TestAudioDeviceModule::Renderer { int NumChannels() const override { return num_channels_; } - bool Render(rtc::ArrayView data) override { + bool Render(ArrayView data) override { const int16_t kAmplitudeThreshold = 5; const int16_t* begin = data.begin(); @@ -441,15 +441,15 @@ class RawFileWriter : public TestAudioDeviceModule::Renderer { } // namespace size_t TestAudioDeviceModule::SamplesPerFrame(int sampling_frequency_in_hz) { - return rtc::CheckedDivExact(sampling_frequency_in_hz, kFramesPerSecond); + return CheckedDivExact(sampling_frequency_in_hz, kFramesPerSecond); } -rtc::scoped_refptr TestAudioDeviceModule::Create( +scoped_refptr TestAudioDeviceModule::Create( TaskQueueFactory* task_queue_factory, std::unique_ptr capturer, std::unique_ptr renderer, float speed) { - auto audio_device = rtc::make_ref_counted( + auto audio_device = make_ref_counted( task_queue_factory, std::move(capturer), std::move(renderer), speed); // Ensure that the current platform is supported. @@ -499,7 +499,7 @@ TestAudioDeviceModule::CreateWavFileReader(absl::string_view filename, bool repeat) { WavReader reader(filename); int sampling_frequency_in_hz = reader.sample_rate(); - int num_channels = rtc::checked_cast(reader.num_channels()); + int num_channels = checked_cast(reader.num_channels()); return std::make_unique(filename, sampling_frequency_in_hz, num_channels, repeat); } diff --git a/modules/audio_device/include/test_audio_device.h b/modules/audio_device/include/test_audio_device.h index 4b2d755ae1..6a9834235d 100644 --- a/modules/audio_device/include/test_audio_device.h +++ b/modules/audio_device/include/test_audio_device.h @@ -18,10 +18,10 @@ #include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/audio/audio_device.h" +#include "api/audio/audio_device_defines.h" #include "api/scoped_refptr.h" #include "api/task_queue/task_queue_factory.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/buffer.h" namespace webrtc { @@ -49,7 +49,7 @@ class TestAudioDeviceModule { // Replaces the contents of `buffer` with 10ms of captured audio data // (see TestAudioDeviceModule::SamplesPerFrame). Returns true if the // capturer can keep producing data, or false when the capture finishes. - virtual bool Capture(rtc::BufferT* buffer) = 0; + virtual bool Capture(BufferT* buffer) = 0; }; class Renderer { @@ -62,7 +62,7 @@ class TestAudioDeviceModule { virtual int NumChannels() const = 0; // Renders the passed audio data and returns true if the renderer wants // to keep receiving data, or false otherwise. - virtual bool Render(rtc::ArrayView data) = 0; + virtual bool Render(ArrayView data) = 0; }; // A fake capturer that generates pulses with random samples between @@ -81,7 +81,7 @@ class TestAudioDeviceModule { // `renderer` is an object that receives audio data that would have been // played out. Can be nullptr if this device is never used for playing. // Use one of the Create... functions to get these instances. - static rtc::scoped_refptr Create( + static scoped_refptr Create( TaskQueueFactory* task_queue_factory, std::unique_ptr capturer, std::unique_ptr renderer, diff --git a/modules/audio_device/include/test_audio_device_unittest.cc b/modules/audio_device/include/test_audio_device_unittest.cc index 7a122ca84b..18abf00f84 100644 --- a/modules/audio_device/include/test_audio_device_unittest.cc +++ b/modules/audio_device/include/test_audio_device_unittest.cc @@ -13,16 +13,16 @@ #include #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/audio/audio_device_defines.h" #include "api/task_queue/task_queue_factory.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "common_audio/wav_file.h" #include "common_audio/wav_header.h" -#include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/synchronization/mutex.h" @@ -39,9 +39,9 @@ void RunWavTest(const std::vector& input_samples, const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info(); - const std::string output_filename = - test::OutputPath() + "BoundedWavFileWriterTest_" + test_info->name() + - "_" + std::to_string(std::rand()) + ".wav"; + const std::string output_filename = test::OutputPathWithRandomDirectory() + + "BoundedWavFileWriterTest_" + + test_info->name() + ".wav"; static const size_t kSamplesPerFrame = 8; static const int kSampleRate = kSamplesPerFrame * 100; @@ -54,7 +54,7 @@ void RunWavTest(const std::vector& input_samples, TestAudioDeviceModule::CreateBoundedWavFileWriter(output_filename, 800); for (size_t i = 0; i < input_samples.size(); i += kSamplesPerFrame) { - EXPECT_TRUE(writer->Render(rtc::ArrayView( + EXPECT_TRUE(writer->Render(ArrayView( &input_samples[i], std::min(kSamplesPerFrame, input_samples.size() - i)))); } @@ -133,12 +133,12 @@ TEST(BoundedWavFileWriterTest, EndSilenceCutoff) { TEST(WavFileReaderTest, RepeatedTrueWithSingleFrameFileReadTwice) { static const std::vector kInputSamples = {75, 1234, 243, -1231, -22222, 0, 3, 88}; - static const rtc::BufferT kExpectedSamples(kInputSamples.data(), - kInputSamples.size()); + static const BufferT kExpectedSamples(kInputSamples.data(), + kInputSamples.size()); - const std::string output_filename = test::OutputPath() + + const std::string output_filename = test::OutputPathWithRandomDirectory() + "WavFileReaderTest_RepeatedTrue_" + - std::to_string(std::rand()) + ".wav"; + ".wav"; static const size_t kSamplesPerFrame = 8; static const int kSampleRate = kSamplesPerFrame * 100; @@ -151,7 +151,7 @@ TEST(WavFileReaderTest, RepeatedTrueWithSingleFrameFileReadTwice) { TestAudioDeviceModule::CreateWavFileWriter(output_filename, 800); for (size_t i = 0; i < kInputSamples.size(); i += kSamplesPerFrame) { - EXPECT_TRUE(writer->Render(rtc::ArrayView( + EXPECT_TRUE(writer->Render(ArrayView( &kInputSamples[i], std::min(kSamplesPerFrame, kInputSamples.size() - i)))); } @@ -160,7 +160,7 @@ TEST(WavFileReaderTest, RepeatedTrueWithSingleFrameFileReadTwice) { { std::unique_ptr reader = TestAudioDeviceModule::CreateWavFileReader(output_filename, true); - rtc::BufferT buffer(kExpectedSamples.size()); + BufferT buffer(kExpectedSamples.size()); EXPECT_TRUE(reader->Capture(&buffer)); EXPECT_EQ(kExpectedSamples, buffer); EXPECT_TRUE(reader->Capture(&buffer)); @@ -175,9 +175,9 @@ void RunRawTestNoRepeat(const std::vector& input_samples, const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info(); - const std::string output_filename = test::OutputPath() + "RawFileTest_" + - test_info->name() + "_" + - std::to_string(std::rand()) + ".raw"; + const std::string output_filename = test::OutputPathWithRandomDirectory() + + "RawFileTest_" + test_info->name() + + ".raw"; static const size_t kSamplesPerFrame = 8; static const int kSampleRate = kSamplesPerFrame * 100; @@ -191,7 +191,7 @@ void RunRawTestNoRepeat(const std::vector& input_samples, output_filename, /*sampling_frequency_in_hz=*/800); for (size_t i = 0; i < input_samples.size(); i += kSamplesPerFrame) { - EXPECT_TRUE(writer->Render(rtc::ArrayView( + EXPECT_TRUE(writer->Render(ArrayView( &input_samples[i], std::min(kSamplesPerFrame, input_samples.size() - i)))); } @@ -202,8 +202,8 @@ void RunRawTestNoRepeat(const std::vector& input_samples, TestAudioDeviceModule::CreateRawFileReader( output_filename, /*sampling_frequency_in_hz=*/800, /*num_channels=*/2, /*repeat=*/false); - rtc::BufferT buffer(expected_samples.size()); - rtc::BufferT expected_buffer(expected_samples.size()); + BufferT buffer(expected_samples.size()); + BufferT expected_buffer(expected_samples.size()); expected_buffer.SetData(expected_samples); EXPECT_TRUE(reader->Capture(&buffer)); EXPECT_EQ(expected_buffer, buffer); @@ -275,14 +275,14 @@ TEST(RawFileWriterTest, Repeat) { static const std::vector kInputSamples = { 75, 1234, 243, -1231, -22222, 0, 3, 88, 1222, -1213, -13222, -7, -3525, 5787, -25247, 8}; - static const rtc::BufferT kExpectedSamples(kInputSamples.data(), - kInputSamples.size()); + static const BufferT kExpectedSamples(kInputSamples.data(), + kInputSamples.size()); const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info(); - const std::string output_filename = test::OutputPath() + "RawFileTest_" + - test_info->name() + "_" + + const std::string output_filename = test::OutputPathWithRandomDirectory() + + "RawFileTest_" + test_info->name() + "_" + std::to_string(std::rand()) + ".raw"; static const size_t kSamplesPerFrame = 8; @@ -297,7 +297,7 @@ TEST(RawFileWriterTest, Repeat) { output_filename, /*sampling_frequency_in_hz=*/800); for (size_t i = 0; i < kInputSamples.size(); i += kSamplesPerFrame) { - EXPECT_TRUE(writer->Render(rtc::ArrayView( + EXPECT_TRUE(writer->Render(ArrayView( &kInputSamples[i], std::min(kSamplesPerFrame, kInputSamples.size() - i)))); } @@ -308,7 +308,7 @@ TEST(RawFileWriterTest, Repeat) { TestAudioDeviceModule::CreateRawFileReader( output_filename, /*sampling_frequency_in_hz=*/800, /*num_channels=*/2, /*repeat=*/true); - rtc::BufferT buffer(kExpectedSamples.size()); + BufferT buffer(kExpectedSamples.size()); EXPECT_TRUE(reader->Capture(&buffer)); EXPECT_EQ(kExpectedSamples, buffer); EXPECT_TRUE(reader->Capture(&buffer)); @@ -323,7 +323,7 @@ TEST(PulsedNoiseCapturerTest, SetMaxAmplitude) { std::unique_ptr capturer = TestAudioDeviceModule::CreatePulsedNoiseCapturer( kAmplitude, /*sampling_frequency_in_hz=*/8000); - rtc::BufferT recording_buffer; + BufferT recording_buffer; // Verify that the capturer doesn't create entries louder than than // kAmplitude. Since the pulse generator alternates between writing @@ -356,17 +356,17 @@ class TestAudioTransport : public AudioTransport { ~TestAudioTransport() override = default; int32_t RecordedDataIsAvailable( - const void* audioSamples, + const void* /* audioSamples */, size_t samples_per_channel, size_t bytes_per_sample, size_t number_of_channels, uint32_t samples_per_second, - uint32_t total_delay_ms, - int32_t clock_drift, - uint32_t current_mic_level, - bool key_pressed, + uint32_t /* total_delay_ms */, + int32_t /* clock_drift */, + uint32_t /* current_mic_level */, + bool /* key_pressed */, uint32_t& new_mic_level, - absl::optional estimated_capture_time_ns) override { + std::optional /* estimated_capture_time_ns */) override { new_mic_level = 1; if (mode_ != Mode::kRecording) { @@ -411,26 +411,26 @@ class TestAudioTransport : public AudioTransport { return 0; } - int32_t RecordedDataIsAvailable(const void* audio_samples, - size_t samples_per_channel, - size_t bytes_per_sample, - size_t number_of_channels, - uint32_t samples_per_second, - uint32_t total_delay_ms, - int32_t clockDrift, - uint32_t current_mic_level, - bool key_pressed, - uint32_t& new_mic_level) override { + int32_t RecordedDataIsAvailable(const void* /* audio_samples */, + size_t /* samples_per_channel */, + size_t /* bytes_per_sample */, + size_t /* number_of_channels */, + uint32_t /* samples_per_second */, + uint32_t /* total_delay_ms */, + int32_t /* clockDrift */, + uint32_t /* current_mic_level */, + bool /* key_pressed */, + uint32_t& /* new_mic_level */) override { RTC_CHECK(false) << "This methods should be never executed"; } - void PullRenderData(int bits_per_sample, - int sample_rate, - size_t number_of_channels, - size_t number_of_frames, - void* audio_data, - int64_t* elapsed_time_ms, - int64_t* ntp_time_ms) override { + void PullRenderData(int /* bits_per_sample */, + int /* sample_rate */, + size_t /* number_of_channels */, + size_t /* number_of_frames */, + void* /* audio_data */, + int64_t* /* elapsed_time_ms */, + int64_t* /* ntp_time_ms */) override { RTC_CHECK(false) << "This methods should be never executed"; } @@ -469,7 +469,7 @@ TEST(TestAudioDeviceModuleTest, CreatedADMCanRecord) { /*max_amplitude=*/1000, /*sampling_frequency_in_hz=*/48000, /*num_channels=*/2); - rtc::scoped_refptr adm = TestAudioDeviceModule::Create( + scoped_refptr adm = TestAudioDeviceModule::Create( time_controller.GetTaskQueueFactory(), std::move(capturer), /*renderer=*/nullptr); @@ -500,7 +500,7 @@ TEST(TestAudioDeviceModuleTest, CreatedADMCanPlay) { TestAudioDeviceModule::CreateDiscardRenderer( /*sampling_frequency_in_hz=*/48000, /*num_channels=*/2); - rtc::scoped_refptr adm = + scoped_refptr adm = TestAudioDeviceModule::Create(time_controller.GetTaskQueueFactory(), /*capturer=*/nullptr, std::move(renderer)); diff --git a/modules/audio_device/linux/audio_device_alsa_linux.cc b/modules/audio_device/linux/audio_device_alsa_linux.cc index eab73737c5..f5a3189371 100644 --- a/modules/audio_device/linux/audio_device_alsa_linux.cc +++ b/modules/audio_device/linux/audio_device_alsa_linux.cc @@ -588,7 +588,7 @@ int32_t AudioDeviceLinuxALSA::SetPlayoutDevice(uint16_t index) { return -1; } - uint32_t nDevices = GetDevicesInfo(0, true); + int32_t nDevices = GetDevicesInfo(0, true); RTC_LOG(LS_VERBOSE) << "number of available audio output devices is " << nDevices; @@ -657,7 +657,7 @@ int32_t AudioDeviceLinuxALSA::SetRecordingDevice(uint16_t index) { return -1; } - uint32_t nDevices = GetDevicesInfo(0, false); + int32_t nDevices = GetDevicesInfo(0, false); RTC_LOG(LS_VERBOSE) << "number of availiable audio input devices is " << nDevices; @@ -803,7 +803,7 @@ int32_t AudioDeviceLinuxALSA::InitPlayoutLocked() { #if defined(WEBRTC_ARCH_BIG_ENDIAN) SND_PCM_FORMAT_S16_BE, #else - SND_PCM_FORMAT_S16_LE, // format + SND_PCM_FORMAT_S16_LE, // format #endif SND_PCM_ACCESS_RW_INTERLEAVED, // access _playChannels, // channels @@ -882,7 +882,7 @@ int32_t AudioDeviceLinuxALSA::InitRecordingLocked() { // Start by closing any existing pcm-input devices // if (_handleRecord != NULL) { - int errVal = LATE(snd_pcm_close)(_handleRecord); + errVal = LATE(snd_pcm_close)(_handleRecord); _handleRecord = NULL; _recIsInitialized = false; if (errVal < 0) { @@ -927,7 +927,7 @@ int32_t AudioDeviceLinuxALSA::InitRecordingLocked() { #if defined(WEBRTC_ARCH_BIG_ENDIAN) SND_PCM_FORMAT_S16_BE, // format #else - SND_PCM_FORMAT_S16_LE, // format + SND_PCM_FORMAT_S16_LE, // format #endif SND_PCM_ACCESS_RW_INTERLEAVED, // access _recChannels, // channels @@ -1019,13 +1019,14 @@ int32_t AudioDeviceLinuxALSA::StartRecording() { return -1; } // RECORDING - _ptrThreadRec = rtc::PlatformThread::SpawnJoinable( + _ptrThreadRec = webrtc::PlatformThread::SpawnJoinable( [this] { while (RecThreadProcess()) { } }, "webrtc_audio_module_capture_thread", - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); + webrtc::ThreadAttributes().SetPriority( + webrtc::ThreadPriority::kRealtime)); errVal = LATE(snd_pcm_prepare)(_handleRecord); if (errVal < 0) { @@ -1136,13 +1137,14 @@ int32_t AudioDeviceLinuxALSA::StartPlayout() { } // PLAYOUT - _ptrThreadPlay = rtc::PlatformThread::SpawnJoinable( + _ptrThreadPlay = webrtc::PlatformThread::SpawnJoinable( [this] { while (PlayThreadProcess()) { } }, "webrtc_audio_module_play_thread", - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); + webrtc::ThreadAttributes().SetPriority( + webrtc::ThreadPriority::kRealtime)); int errVal = LATE(snd_pcm_prepare)(_handlePlayout); if (errVal < 0) { @@ -1516,7 +1518,7 @@ bool AudioDeviceLinuxALSA::RecThreadProcess() { int err; snd_pcm_sframes_t frames; snd_pcm_sframes_t avail_frames; - int8_t buffer[_recordingBufferSizeIn10MS]; + std::vector buffer(_recordingBufferSizeIn10MS); Lock(); @@ -1542,7 +1544,7 @@ bool AudioDeviceLinuxALSA::RecThreadProcess() { if (static_cast(avail_frames) > _recordingFramesLeft) avail_frames = _recordingFramesLeft; - frames = LATE(snd_pcm_readi)(_handleRecord, buffer, + frames = LATE(snd_pcm_readi)(_handleRecord, buffer.data(), avail_frames); // frames to be written if (frames < 0) { RTC_LOG(LS_ERROR) << "capture snd_pcm_readi error: " @@ -1557,8 +1559,8 @@ bool AudioDeviceLinuxALSA::RecThreadProcess() { LATE(snd_pcm_frames_to_bytes)(_handleRecord, _recordingFramesLeft); int size = LATE(snd_pcm_frames_to_bytes)(_handleRecord, frames); - memcpy(&_recordingBuffer[_recordingBufferSizeIn10MS - left_size], buffer, - size); + memcpy(&_recordingBuffer[_recordingBufferSizeIn10MS - left_size], + buffer.data(), size); _recordingFramesLeft -= frames; if (!_recordingFramesLeft) { // buf is full diff --git a/modules/audio_device/linux/audio_device_alsa_linux.h b/modules/audio_device/linux/audio_device_alsa_linux.h index 23e21d3ce9..dc7f544201 100644 --- a/modules/audio_device/linux/audio_device_alsa_linux.h +++ b/modules/audio_device/linux/audio_device_alsa_linux.h @@ -155,8 +155,8 @@ class AudioDeviceLinuxALSA : public AudioDeviceGeneric { Mutex mutex_; - rtc::PlatformThread _ptrThreadRec; - rtc::PlatformThread _ptrThreadPlay; + webrtc::PlatformThread _ptrThreadRec; + webrtc::PlatformThread _ptrThreadPlay; AudioMixerManagerLinuxALSA _mixerManager; diff --git a/modules/audio_device/linux/audio_device_pulse_linux.cc b/modules/audio_device/linux/audio_device_pulse_linux.cc index 90cd58c497..8843fe727e 100644 --- a/modules/audio_device/linux/audio_device_pulse_linux.cc +++ b/modules/audio_device/linux/audio_device_pulse_linux.cc @@ -160,8 +160,8 @@ AudioDeviceGeneric::InitStatus AudioDeviceLinuxPulse::Init() { // RECORDING const auto attributes = - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime); - _ptrThreadRec = rtc::PlatformThread::SpawnJoinable( + webrtc::ThreadAttributes().SetPriority(webrtc::ThreadPriority::kRealtime); + _ptrThreadRec = webrtc::PlatformThread::SpawnJoinable( [this] { while (RecThreadProcess()) { } @@ -169,7 +169,7 @@ AudioDeviceGeneric::InitStatus AudioDeviceLinuxPulse::Init() { "webrtc_audio_module_rec_thread", attributes); // PLAYOUT - _ptrThreadPlay = rtc::PlatformThread::SpawnJoinable( + _ptrThreadPlay = webrtc::PlatformThread::SpawnJoinable( [this] { while (PlayThreadProcess()) { } diff --git a/modules/audio_device/linux/audio_device_pulse_linux.h b/modules/audio_device/linux/audio_device_pulse_linux.h index 0cf89ef011..2ea6ebf90d 100644 --- a/modules/audio_device/linux/audio_device_pulse_linux.h +++ b/modules/audio_device/linux/audio_device_pulse_linux.h @@ -13,11 +13,11 @@ #include +#include "api/audio/audio_device.h" +#include "api/audio/audio_device_defines.h" #include "api/sequence_checker.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/audio_device_generic.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_device/include/audio_device_defines.h" #include "modules/audio_device/linux/audio_mixer_manager_pulse_linux.h" #include "modules/audio_device/linux/pulseaudiosymboltable_linux.h" #include "rtc_base/event.h" @@ -263,13 +263,13 @@ class AudioDeviceLinuxPulse : public AudioDeviceGeneric { AudioDeviceBuffer* _ptrAudioBuffer; mutable Mutex mutex_; - rtc::Event _timeEventRec; - rtc::Event _timeEventPlay; - rtc::Event _recStartEvent; - rtc::Event _playStartEvent; + webrtc::Event _timeEventRec; + webrtc::Event _timeEventPlay; + webrtc::Event _recStartEvent; + webrtc::Event _playStartEvent; - rtc::PlatformThread _ptrThreadPlay; - rtc::PlatformThread _ptrThreadRec; + webrtc::PlatformThread _ptrThreadPlay; + webrtc::PlatformThread _ptrThreadRec; AudioMixerManagerLinuxPulse _mixerManager; diff --git a/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h b/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h index d98287822d..ec6c781412 100644 --- a/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h +++ b/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h @@ -13,7 +13,7 @@ #include -#include "modules/audio_device/include/audio_device.h" +#include "api/audio/audio_device.h" #include "modules/audio_device/linux/alsasymboltable_linux.h" #include "rtc_base/synchronization/mutex.h" diff --git a/modules/audio_device/mac/audio_device_mac.cc b/modules/audio_device/mac/audio_device_mac.cc index ed7b0e4669..0d21a986f0 100644 --- a/modules/audio_device/mac/audio_device_mac.cc +++ b/modules/audio_device/mac/audio_device_mac.cc @@ -15,6 +15,7 @@ #include // sysctlbyname() #include +#include #include "modules/audio_device/audio_device_config.h" #include "modules/third_party/portaudio/pa_ringbuffer.h" @@ -25,35 +26,35 @@ namespace webrtc { -#define WEBRTC_CA_RETURN_ON_ERR(expr) \ - do { \ - err = expr; \ - if (err != noErr) { \ - logCAMsg(rtc::LS_ERROR, "Error in " #expr, (const char*)&err); \ - return -1; \ - } \ +#define WEBRTC_CA_RETURN_ON_ERR(expr) \ + do { \ + err = expr; \ + if (err != noErr) { \ + logCAMsg(webrtc::LS_ERROR, "Error in " #expr, (const char*)&err); \ + return -1; \ + } \ } while (0) -#define WEBRTC_CA_LOG_ERR(expr) \ - do { \ - err = expr; \ - if (err != noErr) { \ - logCAMsg(rtc::LS_ERROR, "Error in " #expr, (const char*)&err); \ - } \ +#define WEBRTC_CA_LOG_ERR(expr) \ + do { \ + err = expr; \ + if (err != noErr) { \ + logCAMsg(webrtc::LS_ERROR, "Error in " #expr, (const char*)&err); \ + } \ } while (0) -#define WEBRTC_CA_LOG_WARN(expr) \ - do { \ - err = expr; \ - if (err != noErr) { \ - logCAMsg(rtc::LS_WARNING, "Error in " #expr, (const char*)&err); \ - } \ +#define WEBRTC_CA_LOG_WARN(expr) \ + do { \ + err = expr; \ + if (err != noErr) { \ + logCAMsg(webrtc::LS_WARNING, "Error in " #expr, (const char*)&err); \ + } \ } while (0) enum { MaxNumberDevices = 64 }; // CoreAudio errors are best interpreted as four character strings. -void AudioDeviceMac::logCAMsg(const rtc::LoggingSeverity sev, +void AudioDeviceMac::logCAMsg(const webrtc::LoggingSeverity sev, const char* msg, const char* err) { RTC_DCHECK(msg != NULL); @@ -61,14 +62,14 @@ void AudioDeviceMac::logCAMsg(const rtc::LoggingSeverity sev, #ifdef WEBRTC_ARCH_BIG_ENDIAN switch (sev) { - case rtc::LS_ERROR: + case webrtc::LS_ERROR: RTC_LOG(LS_ERROR) << msg << ": " << err[0] << err[1] << err[2] << err[3]; break; - case rtc::LS_WARNING: + case webrtc::LS_WARNING: RTC_LOG(LS_WARNING) << msg << ": " << err[0] << err[1] << err[2] << err[3]; break; - case rtc::LS_VERBOSE: + case webrtc::LS_VERBOSE: RTC_LOG(LS_VERBOSE) << msg << ": " << err[0] << err[1] << err[2] << err[3]; break; @@ -78,14 +79,14 @@ void AudioDeviceMac::logCAMsg(const rtc::LoggingSeverity sev, #else // We need to flip the characters in this case. switch (sev) { - case rtc::LS_ERROR: + case webrtc::LS_ERROR: RTC_LOG(LS_ERROR) << msg << ": " << err[3] << err[2] << err[1] << err[0]; break; - case rtc::LS_WARNING: + case webrtc::LS_WARNING: RTC_LOG(LS_WARNING) << msg << ": " << err[3] << err[2] << err[1] << err[0]; break; - case rtc::LS_VERBOSE: + case webrtc::LS_VERBOSE: RTC_LOG(LS_VERBOSE) << msg << ": " << err[3] << err[2] << err[1] << err[0]; break; @@ -345,7 +346,7 @@ int32_t AudioDeviceMac::Terminate() { err = AudioHardwareUnload(); if (err != noErr) { - logCAMsg(rtc::LS_ERROR, "Error in AudioHardwareUnload()", + logCAMsg(webrtc::LS_ERROR, "Error in AudioHardwareUnload()", (const char*)&err); retVal = -1; } @@ -836,7 +837,7 @@ int32_t AudioDeviceMac::PlayoutDeviceName(uint16_t index, } return GetDeviceName(kAudioDevicePropertyScopeOutput, index, - rtc::ArrayView(name, kAdmMaxDeviceNameSize)); + webrtc::ArrayView(name, kAdmMaxDeviceNameSize)); } int32_t AudioDeviceMac::RecordingDeviceName(uint16_t index, @@ -855,7 +856,7 @@ int32_t AudioDeviceMac::RecordingDeviceName(uint16_t index, } return GetDeviceName(kAudioDevicePropertyScopeInput, index, - rtc::ArrayView(name, kAdmMaxDeviceNameSize)); + webrtc::ArrayView(name, kAdmMaxDeviceNameSize)); } int16_t AudioDeviceMac::RecordingDevices() { @@ -1016,7 +1017,7 @@ int32_t AudioDeviceMac::InitPlayout() { _outputDeviceID, &propertyAddress, 0, NULL, &size, &_outStreamFormat)); if (_outStreamFormat.mFormatID != kAudioFormatLinearPCM) { - logCAMsg(rtc::LS_ERROR, "Unacceptable output stream format -> mFormatID", + logCAMsg(webrtc::LS_ERROR, "Unacceptable output stream format -> mFormatID", (const char*)&_outStreamFormat.mFormatID); return -1; } @@ -1046,7 +1047,7 @@ int32_t AudioDeviceMac::InitPlayout() { << ", mBitsPerChannel = " << _outStreamFormat.mBitsPerChannel; RTC_LOG(LS_VERBOSE) << "mFormatFlags = " << _outStreamFormat.mFormatFlags; - logCAMsg(rtc::LS_VERBOSE, "mFormatID", + logCAMsg(webrtc::LS_VERBOSE, "mFormatID", (const char*)&_outStreamFormat.mFormatID); // Our preferred format to work with. @@ -1126,7 +1127,7 @@ int32_t AudioDeviceMac::InitRecording() { _inputDeviceID, &propertyAddress, 0, NULL, &size, &_inStreamFormat)); if (_inStreamFormat.mFormatID != kAudioFormatLinearPCM) { - logCAMsg(rtc::LS_ERROR, "Unacceptable input stream format -> mFormatID", + logCAMsg(webrtc::LS_ERROR, "Unacceptable input stream format -> mFormatID", (const char*)&_inStreamFormat.mFormatID); return -1; } @@ -1159,7 +1160,7 @@ int32_t AudioDeviceMac::InitRecording() { << ", mBitsPerChannel = " << _inStreamFormat.mBitsPerChannel; RTC_LOG(LS_VERBOSE) << "mFormatFlags = " << _inStreamFormat.mFormatFlags; - logCAMsg(rtc::LS_VERBOSE, "mFormatID", + logCAMsg(webrtc::LS_VERBOSE, "mFormatID", (const char*)&_inStreamFormat.mFormatID); // Our preferred format to work with @@ -1292,13 +1293,14 @@ int32_t AudioDeviceMac::StartRecording() { } RTC_DCHECK(capture_worker_thread_.empty()); - capture_worker_thread_ = rtc::PlatformThread::SpawnJoinable( + capture_worker_thread_ = webrtc::PlatformThread::SpawnJoinable( [this] { while (CaptureWorkerThread()) { } }, "CaptureWorkerThread", - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); + webrtc::ThreadAttributes().SetPriority( + webrtc::ThreadPriority::kRealtime)); OSStatus err = noErr; if (_twoDevices) { @@ -1429,13 +1431,14 @@ int32_t AudioDeviceMac::StartPlayout() { } RTC_DCHECK(render_worker_thread_.empty()); - render_worker_thread_ = rtc::PlatformThread::SpawnJoinable( + render_worker_thread_ = webrtc::PlatformThread::SpawnJoinable( [this] { while (RenderWorkerThread()) { } }, "RenderWorkerThread", - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); + webrtc::ThreadAttributes().SetPriority( + webrtc::ThreadPriority::kRealtime)); if (_twoDevices || !_recording) { OSStatus err = noErr; @@ -1648,7 +1651,7 @@ int32_t AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope, int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, const uint16_t index, - rtc::ArrayView name) { + webrtc::ArrayView name) { OSStatus err = noErr; AudioDeviceID deviceIds[MaxNumberDevices]; @@ -1696,7 +1699,7 @@ int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData( usedID, &propertyAddress, 0, NULL, &len, devName.data())); - rtc::SimpleStringBuilder ss(name); + webrtc::SimpleStringBuilder ss(name); ss.AppendFormat("default (%s)", devName.data()); } else { if (index < numberDevices) { @@ -1942,7 +1945,7 @@ int32_t AudioDeviceMac::HandleDeviceChange() { _captureDeviceIsAlive = 0; _mixerManager.CloseMicrophone(); } else if (err != noErr) { - logCAMsg(rtc::LS_ERROR, "Error in AudioDeviceGetProperty()", + logCAMsg(webrtc::LS_ERROR, "Error in AudioDeviceGetProperty()", (const char*)&err); return -1; } @@ -1961,7 +1964,7 @@ int32_t AudioDeviceMac::HandleDeviceChange() { _renderDeviceIsAlive = 0; _mixerManager.CloseSpeaker(); } else if (err != noErr) { - logCAMsg(rtc::LS_ERROR, "Error in AudioDeviceGetProperty()", + logCAMsg(webrtc::LS_ERROR, "Error in AudioDeviceGetProperty()", (const char*)&err); return -1; } @@ -1988,7 +1991,7 @@ int32_t AudioDeviceMac::HandleStreamFormatChange( objectId, &propertyAddress, 0, NULL, &size, &streamFormat)); if (streamFormat.mFormatID != kAudioFormatLinearPCM) { - logCAMsg(rtc::LS_ERROR, "Unacceptable input stream format -> mFormatID", + logCAMsg(webrtc::LS_ERROR, "Unacceptable input stream format -> mFormatID", (const char*)&streamFormat.mFormatID); return -1; } @@ -2015,7 +2018,8 @@ int32_t AudioDeviceMac::HandleStreamFormatChange( RTC_LOG(LS_VERBOSE) << "mBytesPerFrame = " << streamFormat.mBytesPerFrame << ", mBitsPerChannel = " << streamFormat.mBitsPerChannel; RTC_LOG(LS_VERBOSE) << "mFormatFlags = " << streamFormat.mFormatFlags; - logCAMsg(rtc::LS_VERBOSE, "mFormatID", (const char*)&streamFormat.mFormatID); + logCAMsg(webrtc::LS_VERBOSE, "mFormatID", + (const char*)&streamFormat.mFormatID); if (propertyAddress.mScope == kAudioDevicePropertyScopeInput) { const int io_block_size_samples = streamFormat.mChannelsPerFrame * @@ -2217,7 +2221,7 @@ OSStatus AudioDeviceMac::implDeviceIOProc(const AudioBufferList* inputData, RTC_LOG(LS_ERROR) << "Error in AudioConverterFillComplexBuffer()"; return 1; } else { - logCAMsg(rtc::LS_ERROR, "Error in AudioConverterFillComplexBuffer()", + logCAMsg(webrtc::LS_ERROR, "Error in AudioConverterFillComplexBuffer()", (const char*)&err); return 1; } @@ -2428,7 +2432,7 @@ bool AudioDeviceMac::CaptureWorkerThread() { OSStatus err = noErr; UInt32 noRecSamples = ENGINE_REC_BUF_SIZE_IN_SAMPLES * _inDesiredFormat.mChannelsPerFrame; - SInt16 recordBuffer[noRecSamples]; + std::vector recordBuffer(noRecSamples); UInt32 size = ENGINE_REC_BUF_SIZE_IN_SAMPLES; AudioBufferList engineBuffer; @@ -2436,7 +2440,7 @@ bool AudioDeviceMac::CaptureWorkerThread() { engineBuffer.mBuffers->mNumberChannels = _inDesiredFormat.mChannelsPerFrame; engineBuffer.mBuffers->mDataByteSize = _inDesiredFormat.mBytesPerPacket * noRecSamples; - engineBuffer.mBuffers->mData = recordBuffer; + engineBuffer.mBuffers->mData = recordBuffer.data(); err = AudioConverterFillComplexBuffer(_captureConverter, inConverterProc, this, &size, &engineBuffer, NULL); @@ -2445,7 +2449,7 @@ bool AudioDeviceMac::CaptureWorkerThread() { // This is our own error. return false; } else { - logCAMsg(rtc::LS_ERROR, "Error in AudioConverterFillComplexBuffer()", + logCAMsg(webrtc::LS_ERROR, "Error in AudioConverterFillComplexBuffer()", (const char*)&err); return false; } @@ -2471,7 +2475,8 @@ bool AudioDeviceMac::CaptureWorkerThread() { // store the recorded buffer (no action will be taken if the // #recorded samples is not a full buffer) - _ptrAudioBuffer->SetRecordedBuffer((int8_t*)&recordBuffer, (uint32_t)size); + _ptrAudioBuffer->SetRecordedBuffer((int8_t*)recordBuffer.data(), + (uint32_t)size); _ptrAudioBuffer->SetVQEData(msecOnPlaySide, msecOnRecordSide); _ptrAudioBuffer->SetTypingStatus(KeyPressed()); diff --git a/modules/audio_device/mac/audio_device_mac.h b/modules/audio_device/mac/audio_device_mac.h index bb06395d03..6c07d62f01 100644 --- a/modules/audio_device/mac/audio_device_mac.h +++ b/modules/audio_device/mac/audio_device_mac.h @@ -170,7 +170,7 @@ class AudioDeviceMac : public AudioDeviceGeneric { static void AtomicSet32(int32_t* theValue, int32_t newValue); static int32_t AtomicGet32(int32_t* theValue); - static void logCAMsg(rtc::LoggingSeverity sev, + static void logCAMsg(webrtc::LoggingSeverity sev, const char* msg, const char* err); @@ -180,7 +180,7 @@ class AudioDeviceMac : public AudioDeviceGeneric { int32_t GetDeviceName(AudioObjectPropertyScope scope, uint16_t index, - rtc::ArrayView name); + webrtc::ArrayView name); int32_t InitDevice(uint16_t userDeviceIndex, AudioDeviceID& deviceId, @@ -267,14 +267,14 @@ class AudioDeviceMac : public AudioDeviceGeneric { Mutex mutex_; - rtc::Event _stopEventRec; - rtc::Event _stopEvent; + webrtc::Event _stopEventRec; + webrtc::Event _stopEvent; // Only valid/running between calls to StartRecording and StopRecording. - rtc::PlatformThread capture_worker_thread_; + webrtc::PlatformThread capture_worker_thread_; // Only valid/running between calls to StartPlayout and StopPlayout. - rtc::PlatformThread render_worker_thread_; + webrtc::PlatformThread render_worker_thread_; AudioMixerManagerMac _mixerManager; @@ -282,10 +282,8 @@ class AudioDeviceMac : public AudioDeviceGeneric { uint16_t _outputDeviceIndex; AudioDeviceID _inputDeviceID; AudioDeviceID _outputDeviceID; -#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 1050 AudioDeviceIOProcID _inDeviceIOProcID; AudioDeviceIOProcID _deviceIOProcID; -#endif bool _inputDeviceIsSpecified; bool _outputDeviceIsSpecified; diff --git a/modules/audio_device/mac/audio_mixer_manager_mac.cc b/modules/audio_device/mac/audio_mixer_manager_mac.cc index 942e7db3b3..6644c5acc4 100644 --- a/modules/audio_device/mac/audio_mixer_manager_mac.cc +++ b/modules/audio_device/mac/audio_mixer_manager_mac.cc @@ -16,29 +16,29 @@ namespace webrtc { -#define WEBRTC_CA_RETURN_ON_ERR(expr) \ - do { \ - err = expr; \ - if (err != noErr) { \ - logCAMsg(rtc::LS_ERROR, "Error in " #expr, (const char*)&err); \ - return -1; \ - } \ +#define WEBRTC_CA_RETURN_ON_ERR(expr) \ + do { \ + err = expr; \ + if (err != noErr) { \ + logCAMsg(webrtc::LS_ERROR, "Error in " #expr, (const char*)&err); \ + return -1; \ + } \ } while (0) -#define WEBRTC_CA_LOG_ERR(expr) \ - do { \ - err = expr; \ - if (err != noErr) { \ - logCAMsg(rtc::LS_ERROR, "Error in " #expr, (const char*)&err); \ - } \ +#define WEBRTC_CA_LOG_ERR(expr) \ + do { \ + err = expr; \ + if (err != noErr) { \ + logCAMsg(webrtc::LS_ERROR, "Error in " #expr, (const char*)&err); \ + } \ } while (0) -#define WEBRTC_CA_LOG_WARN(expr) \ - do { \ - err = expr; \ - if (err != noErr) { \ - logCAMsg(rtc::LS_WARNING, "Error in " #expr, (const char*)&err); \ - } \ +#define WEBRTC_CA_LOG_WARN(expr) \ + do { \ + err = expr; \ + if (err != noErr) { \ + logCAMsg(webrtc::LS_WARNING, "Error in " #expr, (const char*)&err); \ + } \ } while (0) AudioMixerManagerMac::AudioMixerManagerMac() @@ -885,19 +885,19 @@ int32_t AudioMixerManagerMac::MinMicrophoneVolume(uint32_t& minVolume) const { // ============================================================================ // CoreAudio errors are best interpreted as four character strings. -void AudioMixerManagerMac::logCAMsg(const rtc::LoggingSeverity sev, +void AudioMixerManagerMac::logCAMsg(const webrtc::LoggingSeverity sev, const char* msg, const char* err) { RTC_DCHECK(msg != NULL); RTC_DCHECK(err != NULL); - RTC_DCHECK(sev == rtc::LS_ERROR || sev == rtc::LS_WARNING); + RTC_DCHECK(sev == webrtc::LS_ERROR || sev == webrtc::LS_WARNING); #ifdef WEBRTC_ARCH_BIG_ENDIAN switch (sev) { - case rtc::LS_ERROR: + case webrtc::LS_ERROR: RTC_LOG(LS_ERROR) << msg << ": " << err[0] << err[1] << err[2] << err[3]; break; - case rtc::LS_WARNING: + case webrtc::LS_WARNING: RTC_LOG(LS_WARNING) << msg << ": " << err[0] << err[1] << err[2] << err[3]; break; @@ -907,10 +907,10 @@ void AudioMixerManagerMac::logCAMsg(const rtc::LoggingSeverity sev, #else // We need to flip the characters in this case. switch (sev) { - case rtc::LS_ERROR: + case webrtc::LS_ERROR: RTC_LOG(LS_ERROR) << msg << ": " << err[3] << err[2] << err[1] << err[0]; break; - case rtc::LS_WARNING: + case webrtc::LS_WARNING: RTC_LOG(LS_WARNING) << msg << ": " << err[3] << err[2] << err[1] << err[0]; break; diff --git a/modules/audio_device/mac/audio_mixer_manager_mac.h b/modules/audio_device/mac/audio_mixer_manager_mac.h index 0ccab4879b..78ce8319e9 100644 --- a/modules/audio_device/mac/audio_mixer_manager_mac.h +++ b/modules/audio_device/mac/audio_mixer_manager_mac.h @@ -13,7 +13,7 @@ #include -#include "modules/audio_device/include/audio_device.h" +#include "api/audio/audio_device.h" #include "rtc_base/logging.h" #include "rtc_base/synchronization/mutex.h" @@ -54,7 +54,7 @@ class AudioMixerManagerMac { private: int32_t CloseSpeakerLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); int32_t CloseMicrophoneLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - static void logCAMsg(rtc::LoggingSeverity sev, + static void logCAMsg(webrtc::LoggingSeverity sev, const char* msg, const char* err); diff --git a/modules/audio_device/mock_audio_device_buffer.h b/modules/audio_device/mock_audio_device_buffer.h index b0f54c20ff..d9c9a48b53 100644 --- a/modules/audio_device/mock_audio_device_buffer.h +++ b/modules/audio_device/mock_audio_device_buffer.h @@ -11,6 +11,8 @@ #ifndef MODULES_AUDIO_DEVICE_MOCK_AUDIO_DEVICE_BUFFER_H_ #define MODULES_AUDIO_DEVICE_MOCK_AUDIO_DEVICE_BUFFER_H_ +#include + #include "modules/audio_device/audio_device_buffer.h" #include "test/gmock.h" @@ -24,7 +26,9 @@ class MockAudioDeviceBuffer : public AudioDeviceBuffer { MOCK_METHOD(int32_t, GetPlayoutData, (void* audioBuffer), (override)); MOCK_METHOD(int32_t, SetRecordedBuffer, - (const void* audioBuffer, size_t nSamples), + (const void* audioBuffer, + size_t nSamples, + std::optional capture_time_ns), (override)); MOCK_METHOD(void, SetVQEData, (int playDelayMS, int recDelayMS), (override)); MOCK_METHOD(int32_t, DeliverRecordedData, (), (override)); diff --git a/modules/audio_device/test_audio_device_impl.cc b/modules/audio_device/test_audio_device_impl.cc index 627e68b36f..2e12be175b 100644 --- a/modules/audio_device/test_audio_device_impl.cc +++ b/modules/audio_device/test_audio_device_impl.cc @@ -10,16 +10,15 @@ #include "modules/audio_device/test_audio_device_impl.h" #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/task_queue/task_queue_factory.h" #include "api/units/time_delta.h" #include "modules/audio_device/include/test_audio_device.h" #include "rtc_base/checks.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_queue.h" #include "rtc_base/task_utils/repeating_task.h" namespace webrtc { @@ -59,11 +58,10 @@ TestAudioDevice::TestAudioDevice( } AudioDeviceGeneric::InitStatus TestAudioDevice::Init() { - task_queue_ = - std::make_unique(task_queue_factory_->CreateTaskQueue( - "TestAudioDeviceModuleImpl", TaskQueueFactory::Priority::NORMAL)); + task_queue_ = task_queue_factory_->CreateTaskQueue( + "TestAudioDeviceModuleImpl", TaskQueueFactory::Priority::NORMAL); - RepeatingTaskHandle::Start(task_queue_->Get(), [this]() { + RepeatingTaskHandle::Start(task_queue_.get(), [this]() { ProcessAudio(); return TimeDelta::Micros(process_interval_us_); }); @@ -171,7 +169,7 @@ void TestAudioDevice::ProcessAudio() { audio_buffer_->SetRecordedBuffer( recording_buffer_.data(), recording_buffer_.size() / capturer_->NumChannels(), - absl::make_optional(rtc::TimeNanos())); + std::make_optional(TimeNanos())); audio_buffer_->DeliverRecordedData(); } if (!keep_capturing) { @@ -186,7 +184,7 @@ void TestAudioDevice::ProcessAudio() { size_t samples_out = samples_per_channel * renderer_->NumChannels(); RTC_CHECK_LE(samples_out, playout_buffer_.size()); const bool keep_rendering = renderer_->Render( - rtc::ArrayView(playout_buffer_.data(), samples_out)); + ArrayView(playout_buffer_.data(), samples_out)); if (!keep_rendering) { rendering_ = false; } diff --git a/modules/audio_device/test_audio_device_impl.h b/modules/audio_device/test_audio_device_impl.h index 36192b7f7f..4eda151da3 100644 --- a/modules/audio_device/test_audio_device_impl.h +++ b/modules/audio_device/test_audio_device_impl.h @@ -14,15 +14,15 @@ #include #include +#include "api/audio/audio_device.h" +#include "api/audio/audio_device_defines.h" +#include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/audio_device_generic.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_device/include/audio_device_defines.h" #include "modules/audio_device/include/test_audio_device.h" #include "rtc_base/buffer.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_queue.h" namespace webrtc { @@ -44,7 +44,7 @@ class TestAudioDevice : public AudioDeviceGeneric { // Retrieve the currently utilized audio layer int32_t ActiveAudioLayer( - AudioDeviceModule::AudioLayer& audioLayer) const override { + AudioDeviceModule::AudioLayer& /* audioLayer */) const override { return 0; } @@ -56,26 +56,26 @@ class TestAudioDevice : public AudioDeviceGeneric { // Device enumeration int16_t PlayoutDevices() override { return 0; } int16_t RecordingDevices() override { return 0; } - int32_t PlayoutDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) override { + int32_t PlayoutDeviceName(uint16_t /* index */, + char /* name */[kAdmMaxDeviceNameSize], + char /* guid */[kAdmMaxGuidSize]) override { return 0; } - int32_t RecordingDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) override { + int32_t RecordingDeviceName(uint16_t /* index */, + char /* name */[kAdmMaxDeviceNameSize], + char /* guid */[kAdmMaxGuidSize]) override { return 0; } // Device selection - int32_t SetPlayoutDevice(uint16_t index) override { return 0; } + int32_t SetPlayoutDevice(uint16_t /* index */) override { return 0; } int32_t SetPlayoutDevice( - AudioDeviceModule::WindowsDeviceType device) override { + AudioDeviceModule::WindowsDeviceType /* device */) override { return 0; } - int32_t SetRecordingDevice(uint16_t index) override { return 0; } + int32_t SetRecordingDevice(uint16_t /* index */) override { return 0; } int32_t SetRecordingDevice( - AudioDeviceModule::WindowsDeviceType device) override { + AudioDeviceModule::WindowsDeviceType /* device */) override { return 0; } @@ -102,42 +102,54 @@ class TestAudioDevice : public AudioDeviceGeneric { bool MicrophoneIsInitialized() const override { return true; } // Speaker volume controls - int32_t SpeakerVolumeIsAvailable(bool& available) override { return 0; } - int32_t SetSpeakerVolume(uint32_t volume) override { return 0; } - int32_t SpeakerVolume(uint32_t& volume) const override { return 0; } - int32_t MaxSpeakerVolume(uint32_t& maxVolume) const override { return 0; } - int32_t MinSpeakerVolume(uint32_t& minVolume) const override { return 0; } + int32_t SpeakerVolumeIsAvailable(bool& /* available */) override { return 0; } + int32_t SetSpeakerVolume(uint32_t /* volume */) override { return 0; } + int32_t SpeakerVolume(uint32_t& /* volume */) const override { return 0; } + int32_t MaxSpeakerVolume(uint32_t& /* maxVolume */) const override { + return 0; + } + int32_t MinSpeakerVolume(uint32_t& /* minVolume */) const override { + return 0; + } // Microphone volume controls - int32_t MicrophoneVolumeIsAvailable(bool& available) override { return 0; } - int32_t SetMicrophoneVolume(uint32_t volume) override { return 0; } - int32_t MicrophoneVolume(uint32_t& volume) const override { return 0; } - int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const override { return 0; } - int32_t MinMicrophoneVolume(uint32_t& minVolume) const override { return 0; } + int32_t MicrophoneVolumeIsAvailable(bool& /* available */) override { + return 0; + } + int32_t SetMicrophoneVolume(uint32_t /* volume */) override { return 0; } + int32_t MicrophoneVolume(uint32_t& /* volume */) const override { return 0; } + int32_t MaxMicrophoneVolume(uint32_t& /* maxVolume */) const override { + return 0; + } + int32_t MinMicrophoneVolume(uint32_t& /* minVolume */) const override { + return 0; + } // Speaker mute control - int32_t SpeakerMuteIsAvailable(bool& available) override { return 0; } - int32_t SetSpeakerMute(bool enable) override { return 0; } - int32_t SpeakerMute(bool& enabled) const override { return 0; } + int32_t SpeakerMuteIsAvailable(bool& /* available */) override { return 0; } + int32_t SetSpeakerMute(bool /* enable */) override { return 0; } + int32_t SpeakerMute(bool& /* enabled */) const override { return 0; } // Microphone mute control - int32_t MicrophoneMuteIsAvailable(bool& available) override { return 0; } - int32_t SetMicrophoneMute(bool enable) override { return 0; } - int32_t MicrophoneMute(bool& enabled) const override { return 0; } + int32_t MicrophoneMuteIsAvailable(bool& /* available */) override { + return 0; + } + int32_t SetMicrophoneMute(bool /* enable */) override { return 0; } + int32_t MicrophoneMute(bool& /* enabled */) const override { return 0; } // Stereo support int32_t StereoPlayoutIsAvailable(bool& available) override { available = false; return 0; } - int32_t SetStereoPlayout(bool enable) override { return 0; } - int32_t StereoPlayout(bool& enabled) const override { return 0; } + int32_t SetStereoPlayout(bool /* enable */) override { return 0; } + int32_t StereoPlayout(bool& /* enabled */) const override { return 0; } int32_t StereoRecordingIsAvailable(bool& available) override { available = false; return 0; } - int32_t SetStereoRecording(bool enable) override { return 0; } - int32_t StereoRecording(bool& enabled) const override { return 0; } + int32_t SetStereoRecording(bool /* enable */) override { return 0; } + int32_t StereoRecording(bool& /* enabled */) const override { return 0; } // Delay information and control int32_t PlayoutDelay(uint16_t& delayMS) const override { @@ -151,9 +163,9 @@ class TestAudioDevice : public AudioDeviceGeneric { bool BuiltInNSIsAvailable() const override { return false; } // Windows Core Audio and Android only. - int32_t EnableBuiltInAEC(bool enable) override { return -1; } - int32_t EnableBuiltInAGC(bool enable) override { return -1; } - int32_t EnableBuiltInNS(bool enable) override { return -1; } + int32_t EnableBuiltInAEC(bool /* enable */) override { return -1; } + int32_t EnableBuiltInAGC(bool /* enable */) override { return -1; } + int32_t EnableBuiltInNS(bool /* enable */) override { return -1; } // Play underrun count. int32_t GetPlayoutUnderrunCount() const override { return -1; } @@ -189,8 +201,8 @@ class TestAudioDevice : public AudioDeviceGeneric { bool capturing_initialized_ RTC_GUARDED_BY(lock_) = false; std::vector playout_buffer_ RTC_GUARDED_BY(lock_); - rtc::BufferT recording_buffer_ RTC_GUARDED_BY(lock_); - std::unique_ptr task_queue_; + BufferT recording_buffer_ RTC_GUARDED_BY(lock_); + std::unique_ptr task_queue_; }; } // namespace webrtc diff --git a/modules/audio_device/test_audio_device_impl_test.cc b/modules/audio_device/test_audio_device_impl_test.cc index e81bb2f807..f8e9137458 100644 --- a/modules/audio_device/test_audio_device_impl_test.cc +++ b/modules/audio_device/test_audio_device_impl_test.cc @@ -10,16 +10,16 @@ #include "modules/audio_device/test_audio_device_impl.h" #include +#include #include -#include "absl/types/optional.h" +#include "api/audio/audio_device.h" +#include "api/audio/audio_device_defines.h" #include "api/task_queue/task_queue_factory.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/audio_device_generic.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_device/include/audio_device_defines.h" #include "modules/audio_device/include/test_audio_device.h" #include "rtc_base/checks.h" #include "rtc_base/synchronization/mutex.h" @@ -42,17 +42,17 @@ class TestAudioTransport : public AudioTransport { ~TestAudioTransport() override = default; int32_t RecordedDataIsAvailable( - const void* audioSamples, + const void* /* audioSamples */, size_t samples_per_channel, size_t bytes_per_sample, size_t number_of_channels, uint32_t samples_per_second, - uint32_t total_delay_ms, - int32_t clock_drift, - uint32_t current_mic_level, - bool key_pressed, + uint32_t /* total_delay_ms */, + int32_t /* clock_drift */, + uint32_t /* current_mic_level */, + bool /* key_pressed */, uint32_t& new_mic_level, - absl::optional estimated_capture_time_ns) override { + std::optional /* estimated_capture_time_ns */) override { new_mic_level = 1; if (mode_ != Mode::kRecording) { @@ -97,26 +97,26 @@ class TestAudioTransport : public AudioTransport { return 0; } - int32_t RecordedDataIsAvailable(const void* audio_samples, - size_t samples_per_channel, - size_t bytes_per_sample, - size_t number_of_channels, - uint32_t samples_per_second, - uint32_t total_delay_ms, - int32_t clockDrift, - uint32_t current_mic_level, - bool key_pressed, - uint32_t& new_mic_level) override { + int32_t RecordedDataIsAvailable(const void* /* audio_samples */, + size_t /* samples_per_channel */, + size_t /* bytes_per_sample */, + size_t /* number_of_channels */, + uint32_t /* samples_per_second */, + uint32_t /* total_delay_ms */, + int32_t /* clockDrift */, + uint32_t /* current_mic_level */, + bool /* key_pressed */, + uint32_t& /* new_mic_level */) override { RTC_CHECK(false) << "This methods should be never executed"; } - void PullRenderData(int bits_per_sample, - int sample_rate, - size_t number_of_channels, - size_t number_of_frames, - void* audio_data, - int64_t* elapsed_time_ms, - int64_t* ntp_time_ms) override { + void PullRenderData(int /* bits_per_sample */, + int /* sample_rate */, + size_t /* number_of_channels */, + size_t /* number_of_frames */, + void* /* audio_data */, + int64_t* /* elapsed_time_ms */, + int64_t* /* ntp_time_ms */) override { RTC_CHECK(false) << "This methods should be never executed"; } diff --git a/modules/audio_device/win/audio_device_core_win.cc b/modules/audio_device/win/audio_device_core_win.cc index aa8b6a9ebe..015a0a394f 100644 --- a/modules/audio_device/win/audio_device_core_win.cc +++ b/modules/audio_device/win/audio_device_core_win.cc @@ -19,7 +19,7 @@ #pragma message(">> INFO: Windows Core Audio is not supported in VS 2003") #endif -#include "modules/audio_device/audio_device_config.h" +#include "modules/audio_device/audio_device_config.h" // IWYU pragma: keep #ifdef WEBRTC_WINDOWS_CORE_AUDIO_BUILD @@ -1850,7 +1850,7 @@ int32_t AudioDeviceWindowsCore::InitPlayout() { RTC_LOG(LS_VERBOSE) << "Audio Engine's current rendering mix format:"; // format type RTC_LOG(LS_VERBOSE) << "wFormatTag : 0x" - << rtc::ToHex(pWfxOut->wFormatTag) << " (" + << webrtc::ToHex(pWfxOut->wFormatTag) << " (" << pWfxOut->wFormatTag << ")"; // number of channels (i.e. mono, stereo...) RTC_LOG(LS_VERBOSE) << "nChannels : " << pWfxOut->nChannels; @@ -1925,8 +1925,8 @@ int32_t AudioDeviceWindowsCore::InitPlayout() { RTC_LOG(LS_VERBOSE) << "VoE selected this rendering format:"; RTC_LOG(LS_VERBOSE) << "wFormatTag : 0x" - << rtc::ToHex(Wfx.wFormatTag) << " (" << Wfx.wFormatTag - << ")"; + << webrtc::ToHex(Wfx.wFormatTag) << " (" + << Wfx.wFormatTag << ")"; RTC_LOG(LS_VERBOSE) << "nChannels : " << Wfx.nChannels; RTC_LOG(LS_VERBOSE) << "nSamplesPerSec : " << Wfx.nSamplesPerSec; RTC_LOG(LS_VERBOSE) << "nAvgBytesPerSec : " << Wfx.nAvgBytesPerSec; @@ -2094,8 +2094,8 @@ int32_t AudioDeviceWindowsCore::InitRecordingDMO() { << "AudioDeviceBuffer must be attached before streaming can start"; } - _mediaBuffer = rtc::make_ref_counted(_recBlockSize * - _recAudioFrameSize); + _mediaBuffer = webrtc::make_ref_counted(_recBlockSize * + _recAudioFrameSize); // Optional, but if called, must be after media types are set. hr = _dmo->AllocateStreamingResources(); @@ -2167,7 +2167,7 @@ int32_t AudioDeviceWindowsCore::InitRecording() { RTC_LOG(LS_VERBOSE) << "Audio Engine's current capturing mix format:"; // format type RTC_LOG(LS_VERBOSE) << "wFormatTag : 0x" - << rtc::ToHex(pWfxIn->wFormatTag) << " (" + << webrtc::ToHex(pWfxIn->wFormatTag) << " (" << pWfxIn->wFormatTag << ")"; // number of channels (i.e. mono, stereo...) RTC_LOG(LS_VERBOSE) << "nChannels : " << pWfxIn->nChannels; @@ -2240,7 +2240,7 @@ int32_t AudioDeviceWindowsCore::InitRecording() { RTC_LOG(LS_VERBOSE) << "VoE selected this capturing format:"; RTC_LOG(LS_VERBOSE) << "wFormatTag : 0x" - << rtc::ToHex(Wfx.Format.wFormatTag) << " (" + << webrtc::ToHex(Wfx.Format.wFormatTag) << " (" << Wfx.Format.wFormatTag << ")"; RTC_LOG(LS_VERBOSE) << "nChannels : " << Wfx.Format.nChannels; RTC_LOG(LS_VERBOSE) << "nSamplesPerSec : " << Wfx.Format.nSamplesPerSec; @@ -2661,7 +2661,7 @@ DWORD AudioDeviceWindowsCore::DoRenderThread() { return 1; } - rtc::SetCurrentThreadName("webrtc_core_audio_render_thread"); + webrtc::SetCurrentThreadName("webrtc_core_audio_render_thread"); // Use Multimedia Class Scheduler Service (MMCSS) to boost the thread // priority. @@ -2923,7 +2923,7 @@ DWORD AudioDeviceWindowsCore::DoRenderThread() { DWORD AudioDeviceWindowsCore::InitCaptureThreadPriority() { _hMmTask = NULL; - rtc::SetCurrentThreadName("webrtc_core_audio_capture_thread"); + webrtc::SetCurrentThreadName("webrtc_core_audio_capture_thread"); // Use Multimedia Class Scheduler Service (MMCSS) to boost the thread // priority. @@ -3300,7 +3300,7 @@ DWORD AudioDeviceWindowsCore::DoCaptureThread() { // client. RTC_LOG(LS_ERROR) << "IAudioCaptureClient::GetBuffer returned" " AUDCLNT_E_BUFFER_ERROR, hr = 0x" - << rtc::ToHex(hr); + << webrtc::ToHex(hr); goto Exit; } @@ -3384,7 +3384,7 @@ int AudioDeviceWindowsCore::SetDMOProperties() { HRESULT hr = S_OK; RTC_DCHECK(_dmo); - rtc::scoped_refptr ps; + webrtc::scoped_refptr ps; { IPropertyStore* ptrPS = NULL; hr = _dmo->QueryInterface(IID_IPropertyStore, @@ -3763,7 +3763,7 @@ int32_t AudioDeviceWindowsCore::_GetDefaultDeviceIndex(EDataFlow dir, *index = -1; for (UINT i = 0; i < count; i++) { memset(szDeviceID, 0, sizeof(szDeviceID)); - rtc::scoped_refptr device; + webrtc::scoped_refptr device; { IMMDevice* ptrDevice = NULL; hr = collection->Item(i, &ptrDevice); @@ -3816,7 +3816,7 @@ int32_t AudioDeviceWindowsCore::_GetDeviceName(IMMDevice* pDevice, hr = pDevice->OpenPropertyStore(STGM_READ, &pProps); if (FAILED(hr)) { RTC_LOG(LS_ERROR) << "IMMDevice::OpenPropertyStore failed, hr = 0x" - << rtc::ToHex(hr); + << webrtc::ToHex(hr); } } @@ -3828,7 +3828,7 @@ int32_t AudioDeviceWindowsCore::_GetDeviceName(IMMDevice* pDevice, hr = pProps->GetValue(PKEY_Device_FriendlyName, &varName); if (FAILED(hr)) { RTC_LOG(LS_ERROR) << "IPropertyStore::GetValue failed, hr = 0x" - << rtc::ToHex(hr); + << webrtc::ToHex(hr); } } @@ -3836,7 +3836,7 @@ int32_t AudioDeviceWindowsCore::_GetDeviceName(IMMDevice* pDevice, hr = E_FAIL; RTC_LOG(LS_ERROR) << "IPropertyStore::GetValue returned no value," " hr = 0x" - << rtc::ToHex(hr); + << webrtc::ToHex(hr); } if ((SUCCEEDED(hr)) && (VT_LPWSTR != varName.vt)) { @@ -3844,7 +3844,7 @@ int32_t AudioDeviceWindowsCore::_GetDeviceName(IMMDevice* pDevice, hr = E_UNEXPECTED; RTC_LOG(LS_ERROR) << "IPropertyStore::GetValue returned unexpected" " type, hr = 0x" - << rtc::ToHex(hr); + << webrtc::ToHex(hr); } if (SUCCEEDED(hr) && (varName.pwszVal != NULL)) { @@ -4035,16 +4035,16 @@ int32_t AudioDeviceWindowsCore::_EnumerateEndpointDevicesAll( hr = pEndpoint->GetState(&dwState); CONTINUE_ON_ERROR(hr); if (dwState & DEVICE_STATE_ACTIVE) - RTC_LOG(LS_VERBOSE) << "state (0x" << rtc::ToHex(dwState) + RTC_LOG(LS_VERBOSE) << "state (0x" << webrtc::ToHex(dwState) << ") : *ACTIVE*"; if (dwState & DEVICE_STATE_DISABLED) - RTC_LOG(LS_VERBOSE) << "state (0x" << rtc::ToHex(dwState) + RTC_LOG(LS_VERBOSE) << "state (0x" << webrtc::ToHex(dwState) << ") : DISABLED"; if (dwState & DEVICE_STATE_NOTPRESENT) - RTC_LOG(LS_VERBOSE) << "state (0x" << rtc::ToHex(dwState) + RTC_LOG(LS_VERBOSE) << "state (0x" << webrtc::ToHex(dwState) << ") : NOTPRESENT"; if (dwState & DEVICE_STATE_UNPLUGGED) - RTC_LOG(LS_VERBOSE) << "state (0x" << rtc::ToHex(dwState) + RTC_LOG(LS_VERBOSE) << "state (0x" << webrtc::ToHex(dwState) << ") : UNPLUGGED"; // Check the hardware volume capabilities. @@ -4056,15 +4056,15 @@ int32_t AudioDeviceWindowsCore::_EnumerateEndpointDevicesAll( CONTINUE_ON_ERROR(hr); if (dwHwSupportMask & ENDPOINT_HARDWARE_SUPPORT_VOLUME) // The audio endpoint device supports a hardware volume control - RTC_LOG(LS_VERBOSE) << "hwmask (0x" << rtc::ToHex(dwHwSupportMask) + RTC_LOG(LS_VERBOSE) << "hwmask (0x" << webrtc::ToHex(dwHwSupportMask) << ") : HARDWARE_SUPPORT_VOLUME"; if (dwHwSupportMask & ENDPOINT_HARDWARE_SUPPORT_MUTE) // The audio endpoint device supports a hardware mute control - RTC_LOG(LS_VERBOSE) << "hwmask (0x" << rtc::ToHex(dwHwSupportMask) + RTC_LOG(LS_VERBOSE) << "hwmask (0x" << webrtc::ToHex(dwHwSupportMask) << ") : HARDWARE_SUPPORT_MUTE"; if (dwHwSupportMask & ENDPOINT_HARDWARE_SUPPORT_METER) // The audio endpoint device supports a hardware peak meter - RTC_LOG(LS_VERBOSE) << "hwmask (0x" << rtc::ToHex(dwHwSupportMask) + RTC_LOG(LS_VERBOSE) << "hwmask (0x" << webrtc::ToHex(dwHwSupportMask) << ") : HARDWARE_SUPPORT_METER"; // Check the channel count (#channels in the audio stream that enters or @@ -4162,7 +4162,7 @@ void AudioDeviceWindowsCore::_TraceCOMError(HRESULT hr) const { RTC_LOG(LS_ERROR) << "Core Audio method failed (hr=" << hr << ")"; StringCchPrintfW(buf, MAXERRORLENGTH, L"Error details: "); StringCchCatW(buf, MAXERRORLENGTH, errorText); - RTC_LOG(LS_ERROR) << rtc::ToUtf8(buf); + RTC_LOG(LS_ERROR) << ToUtf8(buf); } bool AudioDeviceWindowsCore::KeyPressed() const { diff --git a/modules/audio_device/win/audio_device_core_win.h b/modules/audio_device/win/audio_device_core_win.h index 380effb449..d09bed9939 100644 --- a/modules/audio_device/win/audio_device_core_win.h +++ b/modules/audio_device/win/audio_device_core_win.h @@ -238,8 +238,8 @@ class AudioDeviceWindowsCore : public AudioDeviceGeneric { ISimpleAudioVolume* _ptrRenderSimpleVolume; // DirectX Media Object (DMO) for the built-in AEC. - rtc::scoped_refptr _dmo; - rtc::scoped_refptr _mediaBuffer; + webrtc::scoped_refptr _dmo; + webrtc::scoped_refptr _mediaBuffer; bool _builtInAecEnabled; HANDLE _hRenderSamplesReadyEvent; diff --git a/modules/audio_device/win/audio_device_module_win.cc b/modules/audio_device/win/audio_device_module_win.cc index a36c40735e..d97b0309fa 100644 --- a/modules/audio_device/win/audio_device_module_win.cc +++ b/modules/audio_device/win/audio_device_module_win.cc @@ -13,10 +13,10 @@ #include #include +#include "api/audio/audio_device.h" #include "api/make_ref_counted.h" #include "api/sequence_checker.h" #include "modules/audio_device/audio_device_buffer.h" -#include "modules/audio_device/include/audio_device.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/string_utils.h" @@ -195,11 +195,11 @@ class WindowsAudioDeviceModule : public AudioDeviceModuleForTest { int ret = -1; if (guid != nullptr) { ret = output_->DeviceName(index, &name_str, &guid_str); - rtc::strcpyn(guid, kAdmMaxGuidSize, guid_str.c_str()); + webrtc::strcpyn(guid, kAdmMaxGuidSize, guid_str.c_str()); } else { ret = output_->DeviceName(index, &name_str, nullptr); } - rtc::strcpyn(name, kAdmMaxDeviceNameSize, name_str.c_str()); + webrtc::strcpyn(name, kAdmMaxDeviceNameSize, name_str.c_str()); return ret; } int32_t RecordingDeviceName(uint16_t index, @@ -212,11 +212,11 @@ class WindowsAudioDeviceModule : public AudioDeviceModuleForTest { int ret = -1; if (guid != nullptr) { ret = input_->DeviceName(index, &name_str, &guid_str); - rtc::strcpyn(guid, kAdmMaxGuidSize, guid_str.c_str()); + webrtc::strcpyn(guid, kAdmMaxGuidSize, guid_str.c_str()); } else { ret = input_->DeviceName(index, &name_str, nullptr); } - rtc::strcpyn(name, kAdmMaxDeviceNameSize, name_str.c_str()); + webrtc::strcpyn(name, kAdmMaxDeviceNameSize, name_str.c_str()); return ret; } @@ -508,13 +508,13 @@ class WindowsAudioDeviceModule : public AudioDeviceModuleForTest { } // namespace -rtc::scoped_refptr +webrtc::scoped_refptr CreateWindowsCoreAudioAudioDeviceModuleFromInputAndOutput( std::unique_ptr audio_input, std::unique_ptr audio_output, TaskQueueFactory* task_queue_factory) { RTC_DLOG(LS_INFO) << __FUNCTION__; - return rtc::make_ref_counted( + return webrtc::make_ref_counted( std::move(audio_input), std::move(audio_output), task_queue_factory); } diff --git a/modules/audio_device/win/audio_device_module_win.h b/modules/audio_device/win/audio_device_module_win.h index 1ed0b25620..73dd3aa83f 100644 --- a/modules/audio_device/win/audio_device_module_win.h +++ b/modules/audio_device/win/audio_device_module_win.h @@ -14,9 +14,9 @@ #include #include +#include "api/audio/audio_device.h" #include "api/scoped_refptr.h" #include "api/task_queue/task_queue_factory.h" -#include "modules/audio_device/include/audio_device.h" namespace webrtc { @@ -74,7 +74,7 @@ class AudioOutput { // Combines an AudioInput and an AudioOutput implementation to build an // AudioDeviceModule. Hides most parts of the full ADM interface. -rtc::scoped_refptr +webrtc::scoped_refptr CreateWindowsCoreAudioAudioDeviceModuleFromInputAndOutput( std::unique_ptr audio_input, std::unique_ptr audio_output, diff --git a/modules/audio_device/win/core_audio_base_win.cc b/modules/audio_device/win/core_audio_base_win.cc index dc8526b625..4d73ea59d4 100644 --- a/modules/audio_device/win/core_audio_base_win.cc +++ b/modules/audio_device/win/core_audio_base_win.cc @@ -165,7 +165,7 @@ CoreAudioBase::CoreAudioBase(Direction direction, RTC_DLOG(LS_INFO) << __FUNCTION__ << "[" << DirectionToString(direction) << "]"; RTC_DLOG(LS_INFO) << "Automatic restart: " << automatic_restart; - RTC_DLOG(LS_INFO) << "Windows version: " << rtc::rtc_win::GetVersion(); + RTC_DLOG(LS_INFO) << "Windows version: " << webrtc::rtc_win::GetVersion(); // Create the event which the audio engine will signal each time a buffer // becomes ready to be processed by the client. @@ -196,7 +196,7 @@ bool CoreAudioBase::IsRestarting() const { } int64_t CoreAudioBase::TimeSinceStart() const { - return rtc::TimeSince(start_time_); + return webrtc::TimeSince(start_time_); } int CoreAudioBase::NumberOfActiveDevices() const { @@ -410,7 +410,7 @@ bool CoreAudioBase::Init() { // preferred number of channels is larger than two; i.e., initialize the // stream in stereo even if the preferred configuration is multi-channel. if (params.channels() <= 2) { - format->nChannels = rtc::dchecked_cast(params.channels()); + format->nChannels = webrtc::dchecked_cast(params.channels()); } else { // TODO(henrika): ensure that this approach works on different multi-channel // devices. Verified on: @@ -421,13 +421,14 @@ bool CoreAudioBase::Init() { format->nChannels = 2; } format->nSamplesPerSec = params.sample_rate(); - format->wBitsPerSample = rtc::dchecked_cast(params.bits_per_sample()); + format->wBitsPerSample = + webrtc::dchecked_cast(params.bits_per_sample()); format->nBlockAlign = (format->wBitsPerSample / 8) * format->nChannels; format->nAvgBytesPerSec = format->nSamplesPerSec * format->nBlockAlign; format->cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX); // Add the parts which are unique for the WAVE_FORMAT_EXTENSIBLE structure. format_.Samples.wValidBitsPerSample = - rtc::dchecked_cast(params.bits_per_sample()); + webrtc::dchecked_cast(params.bits_per_sample()); format_.dwChannelMask = format->nChannels == 1 ? KSAUDIO_SPEAKER_MONO : KSAUDIO_SPEAKER_STEREO; format_.SubFormat = KSDATAFORMAT_SUBTYPE_PCM; @@ -558,9 +559,10 @@ bool CoreAudioBase::Start() { if (audio_thread_.empty()) { const absl::string_view name = IsInput() ? "wasapi_capture_thread" : "wasapi_render_thread"; - audio_thread_ = rtc::PlatformThread::SpawnJoinable( + audio_thread_ = webrtc::PlatformThread::SpawnJoinable( [this] { ThreadRun(); }, name, - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); + webrtc::ThreadAttributes().SetPriority( + webrtc::ThreadPriority::kRealtime)); RTC_DLOG(LS_INFO) << "Started thread with name: " << name << " and handle: " << *audio_thread_.GetHandle(); } @@ -574,7 +576,7 @@ bool CoreAudioBase::Start() { return false; } - start_time_ = rtc::TimeMillis(); + start_time_ = webrtc::TimeMillis(); num_data_callbacks_ = 0; return true; diff --git a/modules/audio_device/win/core_audio_base_win.h b/modules/audio_device/win/core_audio_base_win.h index 6c1357e059..918c675abc 100644 --- a/modules/audio_device/win/core_audio_base_win.h +++ b/modules/audio_device/win/core_audio_base_win.h @@ -14,10 +14,10 @@ #include #include #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/sequence_checker.h" #include "modules/audio_device/win/core_audio_utility_win.h" #include "rtc_base/platform_thread.h" @@ -140,7 +140,7 @@ class CoreAudioBase : public IAudioSessionEvents { bool is_active_ = false; int64_t num_data_callbacks_ = 0; int latency_ms_ = 0; - absl::optional sample_rate_; + std::optional sample_rate_; private: const Direction direction_; @@ -159,7 +159,7 @@ class CoreAudioBase : public IAudioSessionEvents { // Set when restart process starts and cleared when restart stops // successfully. Accessed atomically. std::atomic is_restarting_; - rtc::PlatformThread audio_thread_; + webrtc::PlatformThread audio_thread_; Microsoft::WRL::ComPtr audio_session_control_; void StopThread(); diff --git a/modules/audio_device/win/core_audio_input_win.cc b/modules/audio_device/win/core_audio_input_win.cc index 17790dafc4..8c52dd9f01 100644 --- a/modules/audio_device/win/core_audio_input_win.cc +++ b/modules/audio_device/win/core_audio_input_win.cc @@ -143,7 +143,7 @@ int CoreAudioInput::InitRecording() { qpc_to_100ns_.reset(); if (::QueryPerformanceFrequency(&ticks_per_sec)) { double qpc_ticks_per_second = - rtc::dchecked_cast(ticks_per_sec.QuadPart); + webrtc::dchecked_cast(ticks_per_sec.QuadPart); qpc_to_100ns_ = 10000000.0 / qpc_ticks_per_second; } @@ -317,7 +317,7 @@ bool CoreAudioInput::OnDataCallback(uint64_t device_frequency) { // Update input delay estimate but only about once per second to save // resources. The estimate is usually stable. if (num_data_callbacks_ % 100 == 0) { - absl::optional opt_record_delay_ms; + std::optional opt_record_delay_ms; // TODO(henrika): note that FineAudioBuffer adds latency as well. opt_record_delay_ms = EstimateLatencyMillis(capture_time_100ns); if (opt_record_delay_ms) { @@ -349,15 +349,15 @@ bool CoreAudioInput::OnDataCallback(uint64_t device_frequency) { // Treat all of the data in the packet as silence and ignore the actual // data values when AUDCLNT_BUFFERFLAGS_SILENT is set. if (flags & AUDCLNT_BUFFERFLAGS_SILENT) { - rtc::ExplicitZeroMemory(audio_data, - format_.Format.nBlockAlign * num_frames_to_read); + webrtc::ExplicitZeroMemory( + audio_data, format_.Format.nBlockAlign * num_frames_to_read); RTC_DLOG(LS_WARNING) << "Captured audio is replaced by silence"; } else { // Copy recorded audio in `audio_data` to the WebRTC sink using the // FineAudioBuffer object. fine_audio_buffer_->DeliverRecordedData( - rtc::MakeArrayView(reinterpret_cast(audio_data), - format_.Format.nChannels * num_frames_to_read), + webrtc::MakeArrayView(reinterpret_cast(audio_data), + format_.Format.nChannels * num_frames_to_read), latency_ms_); } @@ -392,10 +392,10 @@ bool CoreAudioInput::OnErrorCallback(ErrorType error) { return true; } -absl::optional CoreAudioInput::EstimateLatencyMillis( +std::optional CoreAudioInput::EstimateLatencyMillis( uint64_t capture_time_100ns) { if (!qpc_to_100ns_) { - return absl::nullopt; + return std::nullopt; } // Input parameter `capture_time_100ns` contains the performance counter at // the time that the audio endpoint device recorded the device position of @@ -406,7 +406,7 @@ absl::optional CoreAudioInput::EstimateLatencyMillis( // - subtracting `capture_time_100ns` from now_time_100ns. LARGE_INTEGER perf_counter_now = {}; if (!::QueryPerformanceCounter(&perf_counter_now)) { - return absl::nullopt; + return std::nullopt; } uint64_t qpc_now_raw = perf_counter_now.QuadPart; uint64_t now_time_100ns = qpc_now_raw * (*qpc_to_100ns_); diff --git a/modules/audio_device/win/core_audio_input_win.h b/modules/audio_device/win/core_audio_input_win.h index be290f9f4e..ecf536a9fd 100644 --- a/modules/audio_device/win/core_audio_input_win.h +++ b/modules/audio_device/win/core_audio_input_win.h @@ -12,9 +12,9 @@ #define MODULES_AUDIO_DEVICE_WIN_CORE_AUDIO_INPUT_WIN_H_ #include +#include #include -#include "absl/types/optional.h" #include "modules/audio_device/win/audio_device_module_win.h" #include "modules/audio_device/win/core_audio_base_win.h" @@ -58,12 +58,12 @@ class CoreAudioInput final : public CoreAudioBase, public AudioInput { void ReleaseCOMObjects(); bool OnDataCallback(uint64_t device_frequency); bool OnErrorCallback(ErrorType error); - absl::optional EstimateLatencyMillis(uint64_t capture_time_100ns); + std::optional EstimateLatencyMillis(uint64_t capture_time_100ns); bool HandleStreamDisconnected(); std::unique_ptr fine_audio_buffer_; Microsoft::WRL::ComPtr audio_capture_client_; - absl::optional qpc_to_100ns_; + std::optional qpc_to_100ns_; }; } // namespace webrtc_win diff --git a/modules/audio_device/win/core_audio_output_win.cc b/modules/audio_device/win/core_audio_output_win.cc index c92fedf0e9..6db9870b37 100644 --- a/modules/audio_device/win/core_audio_output_win.cc +++ b/modules/audio_device/win/core_audio_output_win.cc @@ -336,8 +336,8 @@ bool CoreAudioOutput::OnDataCallback(uint64_t device_frequency) { // Get audio data from WebRTC and write it to the allocated buffer in // `audio_data`. The playout latency is not updated for each callback. fine_audio_buffer_->GetPlayoutData( - rtc::MakeArrayView(reinterpret_cast(audio_data), - num_requested_frames * format_.Format.nChannels), + webrtc::MakeArrayView(reinterpret_cast(audio_data), + num_requested_frames * format_.Format.nChannels), latency_ms_); // Release the buffer space acquired in IAudioRenderClient::GetBuffer. @@ -376,7 +376,7 @@ int CoreAudioOutput::EstimateOutputLatencyMillis(uint64_t device_frequency) { // Convert latency in number of frames into milliseconds. webrtc::TimeDelta delay = - webrtc::TimeDelta::Micros(delay_frames * rtc::kNumMicrosecsPerSec / + webrtc::TimeDelta::Micros(delay_frames * webrtc::kNumMicrosecsPerSec / format_.Format.nSamplesPerSec); delay_ms = delay.ms(); } diff --git a/modules/audio_device/win/core_audio_utility_win.cc b/modules/audio_device/win/core_audio_utility_win.cc index e4e2864db5..b574c287db 100644 --- a/modules/audio_device/win/core_audio_utility_win.cc +++ b/modules/audio_device/win/core_audio_utility_win.cc @@ -206,7 +206,7 @@ bool LoadAudiosesDll() { L"%WINDIR%\\system32\\audioses.dll"; wchar_t path[MAX_PATH] = {0}; ExpandEnvironmentStringsW(kAudiosesDLL, path, arraysize(path)); - RTC_DLOG(LS_INFO) << rtc::ToUtf8(path); + RTC_DLOG(LS_INFO) << webrtc::ToUtf8(path); return (LoadLibraryExW(path, nullptr, LOAD_WITH_ALTERED_SEARCH_PATH) != nullptr); } @@ -215,7 +215,7 @@ bool LoadAvrtDll() { static const wchar_t* const kAvrtDLL = L"%WINDIR%\\system32\\Avrt.dll"; wchar_t path[MAX_PATH] = {0}; ExpandEnvironmentStringsW(kAvrtDLL, path, arraysize(path)); - RTC_DLOG(LS_INFO) << rtc::ToUtf8(path); + RTC_DLOG(LS_INFO) << webrtc::ToUtf8(path); return (LoadLibraryExW(path, nullptr, LOAD_WITH_ALTERED_SEARCH_PATH) != nullptr); } @@ -267,7 +267,7 @@ bool IsSupportedInternal() { if (!device_enumerator) { RTC_LOG(LS_ERROR) << "Failed to create Core Audio device enumerator on thread with ID " - << rtc::CurrentThreadId(); + << webrtc::CurrentThreadId(); return false; } @@ -314,7 +314,7 @@ ComPtr CreateDeviceInternal(absl::string_view device_id, } else { // Ask for an audio endpoint device that is identified by an endpoint ID // string. - error = device_enum->GetDevice(rtc::ToUtf16(device_id).c_str(), + error = device_enum->GetDevice(webrtc::ToUtf16(device_id).c_str(), audio_endpoint_device.GetAddressOf()); if (FAILED(error.Error())) { RTC_LOG(LS_ERROR) << "IMMDeviceEnumerator::GetDevice failed: " @@ -338,7 +338,7 @@ std::string GetDeviceIdInternal(IMMDevice* device) { // Example: "{0.0.1.00000000}.{8db6020f-18e3-4f25-b6f5-7726c9122574}". LPWSTR device_id; if (SUCCEEDED(device->GetId(&device_id))) { - std::string device_id_utf8 = rtc::ToUtf8(device_id, wcslen(device_id)); + std::string device_id_utf8 = webrtc::ToUtf8(device_id, wcslen(device_id)); CoTaskMemFree(device_id); return device_id_utf8; } else { @@ -362,8 +362,8 @@ std::string GetDeviceFriendlyNameInternal(IMMDevice* device) { if (friendly_name_pv.get().vt == VT_LPWSTR && friendly_name_pv.get().pwszVal) { - return rtc::ToUtf8(friendly_name_pv.get().pwszVal, - wcslen(friendly_name_pv.get().pwszVal)); + return webrtc::ToUtf8(friendly_name_pv.get().pwszVal, + wcslen(friendly_name_pv.get().pwszVal)); } else { return std::string(); } @@ -705,9 +705,9 @@ int NumberOfActiveDevices(EDataFlow data_flow) { uint32_t GetAudioClientVersion() { uint32_t version = 1; - if (rtc::rtc_win::GetVersion() >= rtc::rtc_win::VERSION_WIN10) { + if (webrtc::rtc_win::GetVersion() >= webrtc::rtc_win::VERSION_WIN10) { version = 3; - } else if (rtc::rtc_win::GetVersion() >= rtc::rtc_win::VERSION_WIN8) { + } else if (webrtc::rtc_win::GetVersion() >= webrtc::rtc_win::VERSION_WIN8) { version = 2; } return version; @@ -851,7 +851,7 @@ int NumberOfActiveSessions(IMMDevice* device) { LPWSTR display_name; if (SUCCEEDED(session_control->GetDisplayName(&display_name))) { RTC_DLOG(LS_INFO) << "display name: " - << rtc::ToUtf8(display_name, wcslen(display_name)); + << webrtc::ToUtf8(display_name, wcslen(display_name)); CoTaskMemFree(display_name); } @@ -940,7 +940,7 @@ HRESULT SetClientProperties(IAudioClient2* client) { // an appropriate interface to use for communications scenarios. // This interface is mainly meant for pro audio scenarios. // props.Options |= AUDCLNT_STREAMOPTIONS_MATCH_FORMAT; - RTC_DLOG(LS_INFO) << "options: 0x" << rtc::ToHex(props.Options); + RTC_DLOG(LS_INFO) << "options: 0x" << webrtc::ToHex(props.Options); #endif error = client->SetClientProperties(&props); if (FAILED(error.Error())) { @@ -1205,7 +1205,7 @@ HRESULT SharedModeInitialize(IAudioClient* client, stream_flags |= AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM; stream_flags |= AUDCLNT_STREAMFLAGS_SRC_DEFAULT_QUALITY; } - RTC_DLOG(LS_INFO) << "stream_flags: 0x" << rtc::ToHex(stream_flags); + RTC_DLOG(LS_INFO) << "stream_flags: 0x" << webrtc::ToHex(stream_flags); // Initialize the shared mode client for minimal delay if `buffer_duration` // is 0 or possibly a higher delay (more robust) if `buffer_duration` is @@ -1294,7 +1294,7 @@ HRESULT SharedModeInitializeLowLatency(IAudioClient3* client, stream_flags |= AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM; stream_flags |= AUDCLNT_STREAMFLAGS_SRC_DEFAULT_QUALITY; } - RTC_DLOG(LS_INFO) << "stream_flags: 0x" << rtc::ToHex(stream_flags); + RTC_DLOG(LS_INFO) << "stream_flags: 0x" << webrtc::ToHex(stream_flags); // Initialize the shared mode client for lowest possible latency. // It is assumed that GetSharedModeEnginePeriod() has been used to query the @@ -1478,7 +1478,7 @@ bool FillRenderEndpointBufferWithSilence(IAudioClient* client, std::string WaveFormatToString(const WaveFormatWrapper format) { char ss_buf[1024]; - rtc::SimpleStringBuilder ss(ss_buf); + webrtc::SimpleStringBuilder ss(ss_buf); // Start with the WAVEFORMATEX part (which always exists). ss.AppendFormat("wFormatTag: %s (0x%X)", WaveFormatTagToString(format->wFormatTag), @@ -1519,7 +1519,7 @@ double FramesToMilliseconds(uint32_t num_frames, uint16_t sample_rate) { std::string ErrorToString(const _com_error& error) { char ss_buf[1024]; - rtc::SimpleStringBuilder ss(ss_buf); + webrtc::SimpleStringBuilder ss(ss_buf); ss.AppendFormat("(HRESULT: 0x%08X)", error.Error()); return ss.str(); } diff --git a/modules/audio_device/win/core_audio_utility_win.h b/modules/audio_device/win/core_audio_utility_win.h index 454e60bf31..bbf5710c9c 100644 --- a/modules/audio_device/win/core_audio_utility_win.h +++ b/modules/audio_device/win/core_audio_utility_win.h @@ -23,9 +23,9 @@ #include #include "absl/strings/string_view.h" +#include "api/audio/audio_device_defines.h" #include "api/units/time_delta.h" #include "modules/audio_device/audio_device_name.h" -#include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/logging.h" #include "rtc_base/string_utils.h" @@ -84,7 +84,8 @@ class ScopedMMCSSRegistration { } explicit ScopedMMCSSRegistration(const wchar_t* task_name) { - RTC_DLOG(LS_INFO) << "ScopedMMCSSRegistration: " << rtc::ToUtf8(task_name); + RTC_DLOG(LS_INFO) << "ScopedMMCSSRegistration: " + << webrtc::ToUtf8(task_name); // Register the calling thread with MMCSS for the supplied `task_name`. DWORD mmcss_task_index = 0; mmcss_handle_ = AvSetMmThreadCharacteristicsW(task_name, &mmcss_task_index); diff --git a/modules/audio_device/win/core_audio_utility_win_unittest.cc b/modules/audio_device/win/core_audio_utility_win_unittest.cc index fc4a610eef..c035cc4d56 100644 --- a/modules/audio_device/win/core_audio_utility_win_unittest.cc +++ b/modules/audio_device/win/core_audio_utility_win_unittest.cc @@ -63,9 +63,9 @@ class CoreAudioUtilityWinTest : public ::testing::Test { EXPECT_TRUE(com_init_.Succeeded()); // Configure logging. - rtc::LogMessage::LogToDebug(rtc::LS_INFO); - rtc::LogMessage::LogTimestamps(); - rtc::LogMessage::LogThreads(); + webrtc::LogMessage::LogToDebug(webrtc::LS_INFO); + webrtc::LogMessage::LogTimestamps(); + webrtc::LogMessage::LogThreads(); } virtual ~CoreAudioUtilityWinTest() {} @@ -314,8 +314,8 @@ TEST_F(CoreAudioUtilityWinTest, GetOutputDeviceNames) { } TEST_F(CoreAudioUtilityWinTest, CreateSessionManager2) { - ABORT_TEST_IF_NOT(DevicesAvailable() && - rtc::rtc_win::GetVersion() >= rtc::rtc_win::VERSION_WIN7); + ABORT_TEST_IF_NOT(DevicesAvailable() && webrtc::rtc_win::GetVersion() >= + webrtc::rtc_win::VERSION_WIN7); EDataFlow data_flow[] = {eRender, eCapture}; @@ -333,8 +333,8 @@ TEST_F(CoreAudioUtilityWinTest, CreateSessionManager2) { } TEST_F(CoreAudioUtilityWinTest, CreateSessionEnumerator) { - ABORT_TEST_IF_NOT(DevicesAvailable() && - rtc::rtc_win::GetVersion() >= rtc::rtc_win::VERSION_WIN7); + ABORT_TEST_IF_NOT(DevicesAvailable() && webrtc::rtc_win::GetVersion() >= + webrtc::rtc_win::VERSION_WIN7); EDataFlow data_flow[] = {eRender, eCapture}; @@ -359,8 +359,8 @@ TEST_F(CoreAudioUtilityWinTest, CreateSessionEnumerator) { } TEST_F(CoreAudioUtilityWinTest, NumberOfActiveSessions) { - ABORT_TEST_IF_NOT(DevicesAvailable() && - rtc::rtc_win::GetVersion() >= rtc::rtc_win::VERSION_WIN7); + ABORT_TEST_IF_NOT(DevicesAvailable() && webrtc::rtc_win::GetVersion() >= + webrtc::rtc_win::VERSION_WIN7); EDataFlow data_flow[] = {eRender, eCapture}; diff --git a/modules/audio_mixer/BUILD.gn b/modules/audio_mixer/BUILD.gn index fb038bf677..38c4e95abb 100644 --- a/modules/audio_mixer/BUILD.gn +++ b/modules/audio_mixer/BUILD.gn @@ -43,6 +43,8 @@ rtc_library("audio_mixer_impl") { "../../api:scoped_refptr", "../../api/audio:audio_frame_api", "../../api/audio:audio_mixer_api", + "../../api/audio:audio_processing", + "../../api/audio:audio_processing", "../../audio/utility:audio_frame_operations", "../../common_audio", "../../rtc_base:checks", @@ -55,7 +57,6 @@ rtc_library("audio_mixer_impl") { "../../rtc_base/synchronization:mutex", "../../system_wrappers", "../../system_wrappers:metrics", - "../audio_processing:api", "../audio_processing:apm_logging", "../audio_processing:audio_frame_view", "../audio_processing/agc2:fixed_digital", @@ -109,7 +110,6 @@ if (rtc_include_tests) { "audio_mixer_impl_unittest.cc", "frame_combiner_unittest.cc", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] deps = [ ":audio_frame_manipulator", ":audio_mixer_impl", diff --git a/modules/audio_mixer/audio_frame_manipulator.cc b/modules/audio_mixer/audio_frame_manipulator.cc index 3100271cfb..94f03b2037 100644 --- a/modules/audio_mixer/audio_frame_manipulator.cc +++ b/modules/audio_mixer/audio_frame_manipulator.cc @@ -79,8 +79,9 @@ void RemixFrame(size_t target_number_of_channels, AudioFrame* frame) { // instead of guessing based on number of channels. const ChannelLayout output_layout( GuessChannelLayout(target_number_of_channels)); - ChannelMixer mixer(GuessChannelLayout(frame->num_channels()), - output_layout); + const ChannelLayout input_layout(GuessChannelLayout(frame->num_channels())); + ChannelMixer mixer(input_layout, frame->num_channels(), output_layout, + target_number_of_channels); mixer.Transform(frame); RTC_DCHECK_EQ(frame->channel_layout(), output_layout); } diff --git a/modules/audio_mixer/audio_mixer_impl.cc b/modules/audio_mixer/audio_mixer_impl.cc index faa2b1e1ee..7a877a4024 100644 --- a/modules/audio_mixer/audio_mixer_impl.cc +++ b/modules/audio_mixer/audio_mixer_impl.cc @@ -69,17 +69,17 @@ AudioMixerImpl::AudioMixerImpl( AudioMixerImpl::~AudioMixerImpl() {} -rtc::scoped_refptr AudioMixerImpl::Create() { +scoped_refptr AudioMixerImpl::Create() { return Create(std::unique_ptr( new DefaultOutputRateCalculator()), /*use_limiter=*/true); } -rtc::scoped_refptr AudioMixerImpl::Create( +scoped_refptr AudioMixerImpl::Create( std::unique_ptr output_rate_calculator, bool use_limiter) { - return rtc::make_ref_counted( - std::move(output_rate_calculator), use_limiter); + return make_ref_counted(std::move(output_rate_calculator), + use_limiter); } void AudioMixerImpl::Mix(size_t number_of_channels, @@ -97,8 +97,8 @@ void AudioMixerImpl::Mix(size_t number_of_channels, }); int output_frequency = output_rate_calculator_->CalculateOutputRateFromRange( - rtc::ArrayView(helper_containers_->preferred_rates.data(), - number_of_streams)); + ArrayView(helper_containers_->preferred_rates.data(), + number_of_streams)); frame_combiner_.Combine(GetAudioFromSources(output_frequency), number_of_channels, output_frequency, @@ -125,7 +125,7 @@ void AudioMixerImpl::RemoveSource(Source* audio_source) { audio_source_list_.erase(iter); } -rtc::ArrayView AudioMixerImpl::GetAudioFromSources( +ArrayView AudioMixerImpl::GetAudioFromSources( int output_frequency) { int audio_to_mix_count = 0; for (auto& source_and_status : audio_source_list_) { @@ -144,8 +144,8 @@ rtc::ArrayView AudioMixerImpl::GetAudioFromSources( &source_and_status->audio_frame; } } - return rtc::ArrayView( - helper_containers_->audio_to_mix.data(), audio_to_mix_count); + return ArrayView(helper_containers_->audio_to_mix.data(), + audio_to_mix_count); } void AudioMixerImpl::UpdateSourceCountStats() { diff --git a/modules/audio_mixer/audio_mixer_impl.h b/modules/audio_mixer/audio_mixer_impl.h index 8319487018..fa90929385 100644 --- a/modules/audio_mixer/audio_mixer_impl.h +++ b/modules/audio_mixer/audio_mixer_impl.h @@ -35,9 +35,9 @@ class AudioMixerImpl : public AudioMixer { // AudioProcessing only accepts 10 ms frames. static const int kFrameDurationInMs = 10; - static rtc::scoped_refptr Create(); + static scoped_refptr Create(); - static rtc::scoped_refptr Create( + static scoped_refptr Create( std::unique_ptr output_rate_calculator, bool use_limiter); @@ -64,7 +64,7 @@ class AudioMixerImpl : public AudioMixer { void UpdateSourceCountStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Fetches audio frames to mix from sources. - rtc::ArrayView GetAudioFromSources(int output_frequency) + ArrayView GetAudioFromSources(int output_frequency) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // The critical section lock guards audio source insertion and diff --git a/modules/audio_mixer/audio_mixer_impl_unittest.cc b/modules/audio_mixer/audio_mixer_impl_unittest.cc index 641c966570..19b304f03a 100644 --- a/modules/audio_mixer/audio_mixer_impl_unittest.cc +++ b/modules/audio_mixer/audio_mixer_impl_unittest.cc @@ -15,11 +15,11 @@ #include #include #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/audio/audio_mixer.h" #include "api/rtp_packet_info.h" #include "api/rtp_packet_infos.h" @@ -61,7 +61,7 @@ void ResetFrame(AudioFrame* frame) { std::string ProduceDebugText(int sample_rate_hz, int number_of_channels, int number_of_sources) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Sample rate: " << sample_rate_hz << " "; ss << "Number of channels: " << number_of_channels << " "; ss << "Number of sources: " << number_of_sources; @@ -106,8 +106,7 @@ class MockMixerAudioSource : public ::testing::NiceMock { AudioFrame* audio_frame) { audio_frame->CopyFrom(fake_frame_); audio_frame->sample_rate_hz_ = sample_rate_hz; - audio_frame->samples_per_channel_ = - rtc::CheckedDivExact(sample_rate_hz, 100); + audio_frame->samples_per_channel_ = CheckedDivExact(sample_rate_hz, 100); audio_frame->packet_infos_ = packet_infos_; return fake_info(); } @@ -121,7 +120,7 @@ class CustomRateCalculator : public OutputRateCalculator { public: explicit CustomRateCalculator(int rate) : rate_(rate) {} int CalculateOutputRateFromRange( - rtc::ArrayView preferred_rates) override { + ArrayView /* preferred_rates */) override { return rate_; } @@ -131,7 +130,7 @@ class CustomRateCalculator : public OutputRateCalculator { void MixMonoAtGivenNativeRate(int native_sample_rate, AudioFrame* mix_frame, - rtc::scoped_refptr mixer, + scoped_refptr mixer, MockMixerAudioSource* audio_source) { ON_CALL(*audio_source, PreferredSampleRate()) .WillByDefault(Return(native_sample_rate)); @@ -142,6 +141,7 @@ void MixMonoAtGivenNativeRate(int native_sample_rate, } TEST(AudioMixer, UpdatesSourceCountHistogram) { + metrics::Reset(); constexpr int kAudioSourcesGroup1 = 5; constexpr int kAudioSourcesGroup2 = 3; @@ -297,7 +297,7 @@ TEST(AudioMixer, ParticipantNumberOfChannels) { // can be done on a different thread. TEST(AudioMixer, ConstructFromOtherThread) { TaskQueueForTest init_queue("init"); - rtc::scoped_refptr mixer; + scoped_refptr mixer; init_queue.SendTask([&mixer]() { mixer = AudioMixerImpl::Create(); }); MockMixerAudioSource participant; @@ -447,7 +447,7 @@ TEST(AudioMixer, ShouldIncludeRtpPacketInfoFromAllMixedSources) { const uint32_t kCsrc3 = 23; const int kAudioLevel0 = 10; const int kAudioLevel1 = 40; - const absl::optional kAudioLevel2 = absl::nullopt; + const std::optional kAudioLevel2 = std::nullopt; const uint32_t kRtpTimestamp0 = 300; const uint32_t kRtpTimestamp1 = 400; const Timestamp kReceiveTime0 = Timestamp::Millis(10); @@ -482,7 +482,7 @@ class HighOutputRateCalculator : public OutputRateCalculator { public: static const int kDefaultFrequency = 76000; int CalculateOutputRateFromRange( - rtc::ArrayView preferred_sample_rates) override { + ArrayView /* preferred_sample_rates */) override { return kDefaultFrequency; } ~HighOutputRateCalculator() override {} @@ -516,13 +516,8 @@ TEST(AudioMixerDeathTest, MultipleChannelsAndHighRate) { other_frame->samples_per_channel_ = kSamplesPerChannel; mixer->AddSource(&other_source); -#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) +#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) EXPECT_DEATH(mixer->Mix(kNumberOfChannels, &frame_for_mixing), ""); -#elif !RTC_DCHECK_IS_ON - mixer->Mix(kNumberOfChannels, &frame_for_mixing); - EXPECT_EQ(frame_for_mixing.num_channels_, kNumberOfChannels); - EXPECT_EQ(frame_for_mixing.sample_rate_hz_, - HighOutputRateCalculator::kDefaultFrequency); #endif } diff --git a/modules/audio_mixer/audio_mixer_test.cc b/modules/audio_mixer/audio_mixer_test.cc index 3ee28a7937..b93547e3c3 100644 --- a/modules/audio_mixer/audio_mixer_test.cc +++ b/modules/audio_mixer/audio_mixer_test.cc @@ -78,7 +78,7 @@ class FilePlayingSource : public AudioMixer::Source { bool FileHasEnded() const { return file_has_ended_; } std::string ToString() const { - rtc::StringBuilder ss; + StringBuilder ss; ss << "{rate: " << sample_rate_hz_ << ", channels: " << number_of_channels_ << ", samples_tot: " << wav_reader_->num_samples() << "}"; return ss.Release(); @@ -112,7 +112,7 @@ const std::vector parse_input_files() { int main(int argc, char* argv[]) { absl::ParseCommandLine(argc, argv); - rtc::scoped_refptr mixer( + webrtc::scoped_refptr mixer( webrtc::AudioMixerImpl::Create( std::unique_ptr( new webrtc::DefaultOutputRateCalculator()), diff --git a/modules/audio_mixer/default_output_rate_calculator.cc b/modules/audio_mixer/default_output_rate_calculator.cc index 5f24b653a3..576e83836d 100644 --- a/modules/audio_mixer/default_output_rate_calculator.cc +++ b/modules/audio_mixer/default_output_rate_calculator.cc @@ -13,13 +13,13 @@ #include #include -#include "modules/audio_processing/include/audio_processing.h" +#include "api/audio/audio_processing.h" #include "rtc_base/checks.h" namespace webrtc { int DefaultOutputRateCalculator::CalculateOutputRateFromRange( - rtc::ArrayView preferred_sample_rates) { + ArrayView preferred_sample_rates) { if (preferred_sample_rates.empty()) { return DefaultOutputRateCalculator::kDefaultFrequency; } diff --git a/modules/audio_mixer/default_output_rate_calculator.h b/modules/audio_mixer/default_output_rate_calculator.h index 02a3b5c37b..88e4435e42 100644 --- a/modules/audio_mixer/default_output_rate_calculator.h +++ b/modules/audio_mixer/default_output_rate_calculator.h @@ -27,7 +27,7 @@ class DefaultOutputRateCalculator : public OutputRateCalculator { // AudioProcessing::NativeRate. If `preferred_sample_rates` is // empty, returns `kDefaultFrequency`. int CalculateOutputRateFromRange( - rtc::ArrayView preferred_sample_rates) override; + ArrayView preferred_sample_rates) override; ~DefaultOutputRateCalculator() override {} }; diff --git a/modules/audio_mixer/frame_combiner.cc b/modules/audio_mixer/frame_combiner.cc index 96c62f6b0d..b801ca7f8e 100644 --- a/modules/audio_mixer/frame_combiner.cc +++ b/modules/audio_mixer/frame_combiner.cc @@ -20,13 +20,13 @@ #include #include "api/array_view.h" +#include "api/audio/audio_processing.h" #include "api/rtp_packet_info.h" #include "api/rtp_packet_infos.h" #include "common_audio/include/audio_util.h" #include "modules/audio_mixer/audio_frame_manipulator.h" #include "modules/audio_mixer/audio_mixer_impl.h" #include "modules/audio_processing/include/audio_frame_view.h" -#include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" @@ -36,17 +36,13 @@ namespace webrtc { namespace { -using MixingBuffer = - std::array, - FrameCombiner::kMaximumNumberOfChannels>; - -void SetAudioFrameFields(rtc::ArrayView mix_list, +void SetAudioFrameFields(ArrayView mix_list, size_t number_of_channels, int sample_rate, - size_t number_of_streams, + size_t /* number_of_streams */, AudioFrame* audio_frame_for_mixing) { - const size_t samples_per_channel = static_cast( - (sample_rate * webrtc::AudioMixerImpl::kFrameDurationInMs) / 1000); + const size_t samples_per_channel = + SampleRateToDefaultChannelSize(sample_rate); // TODO(minyue): Issue bugs.webrtc.org/3390. // Audio frame timestamp. The 'timestamp_' field is set to dummy @@ -78,63 +74,54 @@ void SetAudioFrameFields(rtc::ArrayView mix_list, } } -void MixFewFramesWithNoLimiter(rtc::ArrayView mix_list, +void MixFewFramesWithNoLimiter(ArrayView mix_list, AudioFrame* audio_frame_for_mixing) { if (mix_list.empty()) { audio_frame_for_mixing->Mute(); return; } RTC_DCHECK_LE(mix_list.size(), 1); - std::copy(mix_list[0]->data(), - mix_list[0]->data() + - mix_list[0]->num_channels_ * mix_list[0]->samples_per_channel_, - audio_frame_for_mixing->mutable_data()); + InterleavedView dst = audio_frame_for_mixing->mutable_data( + mix_list[0]->samples_per_channel_, mix_list[0]->num_channels_); + CopySamples(dst, mix_list[0]->data_view()); } -void MixToFloatFrame(rtc::ArrayView mix_list, - size_t samples_per_channel, - size_t number_of_channels, - MixingBuffer* mixing_buffer) { - RTC_DCHECK_LE(samples_per_channel, FrameCombiner::kMaximumChannelSize); - RTC_DCHECK_LE(number_of_channels, FrameCombiner::kMaximumNumberOfChannels); +void MixToFloatFrame(ArrayView mix_list, + DeinterleavedView& mixing_buffer) { + const size_t number_of_channels = NumChannels(mixing_buffer); // Clear the mixing buffer. - *mixing_buffer = {}; + ArrayView raw_data = mixing_buffer.data(); + ClearSamples(raw_data); // Convert to FloatS16 and mix. for (size_t i = 0; i < mix_list.size(); ++i) { - const AudioFrame* const frame = mix_list[i]; - const int16_t* const frame_data = frame->data(); - for (size_t j = 0; j < std::min(number_of_channels, - FrameCombiner::kMaximumNumberOfChannels); - ++j) { - for (size_t k = 0; k < std::min(samples_per_channel, - FrameCombiner::kMaximumChannelSize); - ++k) { - (*mixing_buffer)[j][k] += frame_data[number_of_channels * k + j]; + InterleavedView frame_data = mix_list[i]->data_view(); + RTC_CHECK(!frame_data.empty()); + for (size_t j = 0; j < number_of_channels; ++j) { + MonoView channel = mixing_buffer[j]; + for (size_t k = 0; k < SamplesPerChannel(channel); ++k) { + channel[k] += frame_data[number_of_channels * k + j]; } } } } -void RunLimiter(AudioFrameView mixing_buffer_view, Limiter* limiter) { - const size_t sample_rate = mixing_buffer_view.samples_per_channel() * 1000 / - AudioMixerImpl::kFrameDurationInMs; - // TODO(alessiob): Avoid calling SetSampleRate every time. - limiter->SetSampleRate(sample_rate); - limiter->Process(mixing_buffer_view); +void RunLimiter(DeinterleavedView deinterleaved, Limiter* limiter) { + limiter->SetSamplesPerChannel(deinterleaved.samples_per_channel()); + limiter->Process(deinterleaved); } // Both interleaves and rounds. -void InterleaveToAudioFrame(AudioFrameView mixing_buffer_view, +void InterleaveToAudioFrame(DeinterleavedView deinterleaved, AudioFrame* audio_frame_for_mixing) { - const size_t number_of_channels = mixing_buffer_view.num_channels(); - const size_t samples_per_channel = mixing_buffer_view.samples_per_channel(); - int16_t* const mixing_data = audio_frame_for_mixing->mutable_data(); + InterleavedView mixing_data = audio_frame_for_mixing->mutable_data( + deinterleaved.samples_per_channel(), deinterleaved.num_channels()); // Put data in the result frame. - for (size_t i = 0; i < number_of_channels; ++i) { - for (size_t j = 0; j < samples_per_channel; ++j) { - mixing_data[number_of_channels * j + i] = - FloatS16ToS16(mixing_buffer_view.channel(i)[j]); + for (size_t i = 0; i < mixing_data.num_channels(); ++i) { + auto channel = deinterleaved[i]; + for (size_t j = 0; j < mixing_data.samples_per_channel(); ++j) { + mixing_data[mixing_data.num_channels() * j + i] = + FloatS16ToS16(channel[j]); } } } @@ -145,10 +132,7 @@ constexpr size_t FrameCombiner::kMaximumChannelSize; FrameCombiner::FrameCombiner(bool use_limiter) : data_dumper_(new ApmDataDumper(0)), - mixing_buffer_( - std::make_unique, - kMaximumNumberOfChannels>>()), - limiter_(static_cast(48000), data_dumper_.get(), "AudioMixer"), + limiter_(data_dumper_.get(), kMaximumChannelSize, "AudioMixer"), use_limiter_(use_limiter) { static_assert(kMaximumChannelSize * kMaximumNumberOfChannels <= AudioFrame::kMaxDataSizeSamples, @@ -157,23 +141,32 @@ FrameCombiner::FrameCombiner(bool use_limiter) FrameCombiner::~FrameCombiner() = default; -void FrameCombiner::Combine(rtc::ArrayView mix_list, +void FrameCombiner::Combine(ArrayView mix_list, size_t number_of_channels, int sample_rate, size_t number_of_streams, AudioFrame* audio_frame_for_mixing) { RTC_DCHECK(audio_frame_for_mixing); + RTC_DCHECK_GT(sample_rate, 0); + + // Note: `mix_list` is allowed to be empty. + // See FrameCombiner.CombiningZeroFramesShouldProduceSilence. + + // Make sure to cap `number_of_channels` to the kMaximumNumberOfChannels + // limits since processing from hereon out will be bound by them. + number_of_channels = std::min(number_of_channels, kMaximumNumberOfChannels); SetAudioFrameFields(mix_list, number_of_channels, sample_rate, number_of_streams, audio_frame_for_mixing); - const size_t samples_per_channel = static_cast( - (sample_rate * webrtc::AudioMixerImpl::kFrameDurationInMs) / 1000); + size_t samples_per_channel = SampleRateToDefaultChannelSize(sample_rate); +#if RTC_DCHECK_IS_ON for (const auto* frame : mix_list) { RTC_DCHECK_EQ(samples_per_channel, frame->samples_per_channel_); RTC_DCHECK_EQ(sample_rate, frame->sample_rate_hz_); } +#endif // The 'num_channels_' field of frames in 'mix_list' could be // different from 'number_of_channels'. @@ -186,28 +179,23 @@ void FrameCombiner::Combine(rtc::ArrayView mix_list, return; } - MixToFloatFrame(mix_list, samples_per_channel, number_of_channels, - mixing_buffer_.get()); - - const size_t output_number_of_channels = - std::min(number_of_channels, kMaximumNumberOfChannels); - const size_t output_samples_per_channel = - std::min(samples_per_channel, kMaximumChannelSize); - - // Put float data in an AudioFrameView. - std::array channel_pointers{}; - for (size_t i = 0; i < output_number_of_channels; ++i) { - channel_pointers[i] = &(*mixing_buffer_.get())[i][0]; - } - AudioFrameView mixing_buffer_view(&channel_pointers[0], - output_number_of_channels, - output_samples_per_channel); + // Make sure that the size of the view based on the desired + // `samples_per_channel` and `number_of_channels` doesn't exceed the size of + // the `mixing_buffer_` buffer. + RTC_DCHECK_LE(samples_per_channel, kMaximumChannelSize); + // Since the above check is a DCHECK only, clamp down on `samples_per_channel` + // to make sure we don't exceed the buffer size in non-dcheck builds. + // See also FrameCombinerDeathTest.DebugBuildCrashesWithHighRate. + samples_per_channel = std::min(samples_per_channel, kMaximumChannelSize); + DeinterleavedView deinterleaved( + mixing_buffer_.data(), samples_per_channel, number_of_channels); + MixToFloatFrame(mix_list, deinterleaved); if (use_limiter_) { - RunLimiter(mixing_buffer_view, &limiter_); + RunLimiter(deinterleaved, &limiter_); } - InterleaveToAudioFrame(mixing_buffer_view, audio_frame_for_mixing); + InterleaveToAudioFrame(deinterleaved, audio_frame_for_mixing); } } // namespace webrtc diff --git a/modules/audio_mixer/frame_combiner.h b/modules/audio_mixer/frame_combiner.h index 6185b29f8a..fd7d40ec13 100644 --- a/modules/audio_mixer/frame_combiner.h +++ b/modules/audio_mixer/frame_combiner.h @@ -32,7 +32,7 @@ class FrameCombiner { // because 'mix_list' can be empty. The parameter // 'number_of_streams' is used for determining whether to pass the // data through a limiter. - void Combine(rtc::ArrayView mix_list, + void Combine(ArrayView mix_list, size_t number_of_channels, int sample_rate, size_t number_of_streams, @@ -42,14 +42,12 @@ class FrameCombiner { static constexpr size_t kMaximumNumberOfChannels = 8; static constexpr size_t kMaximumChannelSize = 48 * 10; - using MixingBuffer = std::array, - kMaximumNumberOfChannels>; - private: std::unique_ptr data_dumper_; - std::unique_ptr mixing_buffer_; Limiter limiter_; const bool use_limiter_; + std::array + mixing_buffer_ = {}; }; } // namespace webrtc diff --git a/modules/audio_mixer/frame_combiner_unittest.cc b/modules/audio_mixer/frame_combiner_unittest.cc index 6c64d0852a..b481035b02 100644 --- a/modules/audio_mixer/frame_combiner_unittest.cc +++ b/modules/audio_mixer/frame_combiner_unittest.cc @@ -13,11 +13,11 @@ #include #include #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/rtp_packet_info.h" #include "api/rtp_packet_infos.h" @@ -48,7 +48,7 @@ struct FrameCombinerConfig { std::string ProduceDebugText(int sample_rate_hz, int number_of_channels, int number_of_sources) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Sample rate: " << sample_rate_hz << " ,"; ss << "number of channels: " << number_of_channels << " ,"; ss << "number of sources: " << number_of_sources; @@ -56,7 +56,7 @@ std::string ProduceDebugText(int sample_rate_hz, } std::string ProduceDebugText(const FrameCombinerConfig& config) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Sample rate: " << config.sample_rate_hz << " ,"; ss << "number of channels: " << config.number_of_channels << " ,"; ss << "limiter active: " << (config.use_limiter ? "on" : "off") << " ,"; @@ -82,7 +82,7 @@ void SetUpFrames(int sample_rate_hz, int number_of_channels) { frame2.packet_infos_ = RtpPacketInfos({packet_info2, packet_info3}); for (auto* frame : {&frame1, &frame2}) { - frame->UpdateFrame(0, nullptr, rtc::CheckedDivExact(sample_rate_hz, 100), + frame->UpdateFrame(0, nullptr, CheckedDivExact(sample_rate_hz, 100), sample_rate_hz, AudioFrame::kNormalSpeech, AudioFrame::kVadActive, number_of_channels); } @@ -139,8 +139,9 @@ TEST(FrameCombiner, ContainsAllRtpPacketInfos) { } } -// There are DCHECKs in place to check for invalid parameters. -TEST(FrameCombinerDeathTest, DebugBuildCrashesWithManyChannels) { +#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) +// There are CHECKs in place to check for invalid parameters. +TEST(FrameCombinerDeathTest, BuildCrashesWithManyChannels) { FrameCombiner combiner(true); for (const int rate : {8000, 18000, 34000, 48000}) { for (const int number_of_channels : {10, 20, 21}) { @@ -149,7 +150,9 @@ TEST(FrameCombinerDeathTest, DebugBuildCrashesWithManyChannels) { continue; } const std::vector all_frames = {&frame1, &frame2}; - SetUpFrames(rate, number_of_channels); + // With an unsupported channel count, this will crash in + // `AudioFrame::UpdateFrame`. + EXPECT_DEATH(SetUpFrames(rate, number_of_channels), ""); const int number_of_frames = 2; SCOPED_TRACE( @@ -157,18 +160,14 @@ TEST(FrameCombinerDeathTest, DebugBuildCrashesWithManyChannels) { const std::vector frames_to_combine( all_frames.begin(), all_frames.begin() + number_of_frames); AudioFrame audio_frame_for_mixing; -#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) EXPECT_DEATH( combiner.Combine(frames_to_combine, number_of_channels, rate, frames_to_combine.size(), &audio_frame_for_mixing), ""); -#elif !RTC_DCHECK_IS_ON - combiner.Combine(frames_to_combine, number_of_channels, rate, - frames_to_combine.size(), &audio_frame_for_mixing); -#endif } } } +#endif // GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) TEST(FrameCombinerDeathTest, DebugBuildCrashesWithHighRate) { FrameCombiner combiner(true); @@ -187,14 +186,13 @@ TEST(FrameCombinerDeathTest, DebugBuildCrashesWithHighRate) { const std::vector frames_to_combine( all_frames.begin(), all_frames.begin() + number_of_frames); AudioFrame audio_frame_for_mixing; -#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) +#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) EXPECT_DEATH( combiner.Combine(frames_to_combine, number_of_channels, rate, frames_to_combine.size(), &audio_frame_for_mixing), - ""); -#elif !RTC_DCHECK_IS_ON - combiner.Combine(frames_to_combine, number_of_channels, rate, - frames_to_combine.size(), &audio_frame_for_mixing); + "") + << "number_of_channels=" << number_of_channels << ", rate=" << rate + << ", frames to combine=" << frames_to_combine.size(); #endif } } @@ -249,7 +247,8 @@ TEST(FrameCombiner, CombiningZeroFramesShouldProduceSilence) { TEST(FrameCombiner, CombiningOneFrameShouldNotChangeFrame) { FrameCombiner combiner(false); for (const int rate : {8000, 10000, 11000, 32000, 44100}) { - for (const int number_of_channels : {1, 2, 4, 8, 10}) { + // kMaxConcurrentChannels is 8. + for (const int number_of_channels : {1, 2, 4, kMaxConcurrentChannels}) { SCOPED_TRACE(ProduceDebugText(rate, number_of_channels, 1)); AudioFrame audio_frame_for_mixing; @@ -320,9 +319,9 @@ TEST(FrameCombiner, GainCurveIsSmoothForAlternatingNumberOfStreams) { config.sample_rate_hz, number_of_streams, &audio_frame_for_mixing); cumulative_change += change_calculator.CalculateGainChange( - rtc::ArrayView(frame1.data(), number_of_samples), - rtc::ArrayView(audio_frame_for_mixing.data(), - number_of_samples)); + ArrayView(frame1.data(), number_of_samples), + ArrayView(audio_frame_for_mixing.data(), + number_of_samples)); } // Check that the gain doesn't vary too much. diff --git a/modules/audio_mixer/gain_change_calculator.cc b/modules/audio_mixer/gain_change_calculator.cc index dbd0945239..729ab4b7d4 100644 --- a/modules/audio_mixer/gain_change_calculator.cc +++ b/modules/audio_mixer/gain_change_calculator.cc @@ -23,9 +23,8 @@ namespace { constexpr int16_t kReliabilityThreshold = 100; } // namespace -float GainChangeCalculator::CalculateGainChange( - rtc::ArrayView in, - rtc::ArrayView out) { +float GainChangeCalculator::CalculateGainChange(ArrayView in, + ArrayView out) { RTC_DCHECK_EQ(in.size(), out.size()); std::vector gain(in.size()); @@ -37,9 +36,9 @@ float GainChangeCalculator::LatestGain() const { return last_reliable_gain_; } -void GainChangeCalculator::CalculateGain(rtc::ArrayView in, - rtc::ArrayView out, - rtc::ArrayView gain) { +void GainChangeCalculator::CalculateGain(ArrayView in, + ArrayView out, + ArrayView gain) { RTC_DCHECK_EQ(in.size(), out.size()); RTC_DCHECK_EQ(in.size(), gain.size()); @@ -52,7 +51,7 @@ void GainChangeCalculator::CalculateGain(rtc::ArrayView in, } float GainChangeCalculator::CalculateDifferences( - rtc::ArrayView values) { + ArrayView values) { float res = 0; for (float f : values) { res += fabs(f - last_value_); diff --git a/modules/audio_mixer/gain_change_calculator.h b/modules/audio_mixer/gain_change_calculator.h index 3dde9be61e..b17db3b197 100644 --- a/modules/audio_mixer/gain_change_calculator.h +++ b/modules/audio_mixer/gain_change_calculator.h @@ -22,17 +22,17 @@ class GainChangeCalculator { // The 'out' signal is assumed to be produced from 'in' by applying // a smoothly varying gain. This method computes variations of the // gain and handles special cases when the samples are small. - float CalculateGainChange(rtc::ArrayView in, - rtc::ArrayView out); + float CalculateGainChange(ArrayView in, + ArrayView out); float LatestGain() const; private: - void CalculateGain(rtc::ArrayView in, - rtc::ArrayView out, - rtc::ArrayView gain); + void CalculateGain(ArrayView in, + ArrayView out, + ArrayView gain); - float CalculateDifferences(rtc::ArrayView values); + float CalculateDifferences(ArrayView values); float last_value_ = 0.f; float last_reliable_gain_ = 1.0f; }; diff --git a/modules/audio_mixer/output_rate_calculator.h b/modules/audio_mixer/output_rate_calculator.h index 46b65a8b57..04ab82c12f 100644 --- a/modules/audio_mixer/output_rate_calculator.h +++ b/modules/audio_mixer/output_rate_calculator.h @@ -22,7 +22,7 @@ namespace webrtc { class OutputRateCalculator { public: virtual int CalculateOutputRateFromRange( - rtc::ArrayView preferred_sample_rates) = 0; + ArrayView preferred_sample_rates) = 0; virtual ~OutputRateCalculator() {} }; diff --git a/modules/audio_mixer/sine_wave_generator.cc b/modules/audio_mixer/sine_wave_generator.cc index 591fe14e8c..deb88468d6 100644 --- a/modules/audio_mixer/sine_wave_generator.cc +++ b/modules/audio_mixer/sine_wave_generator.cc @@ -27,7 +27,7 @@ void SineWaveGenerator::GenerateNextFrame(AudioFrame* frame) { for (size_t i = 0; i < frame->samples_per_channel_; ++i) { for (size_t ch = 0; ch < frame->num_channels_; ++ch) { frame_data[frame->num_channels_ * i + ch] = - rtc::saturated_cast(amplitude_ * sinf(phase_)); + saturated_cast(amplitude_ * sinf(phase_)); } phase_ += wave_frequency_hz_ * 2 * kPi / frame->sample_rate_hz_; } diff --git a/modules/audio_processing/BUILD.gn b/modules/audio_processing/BUILD.gn index 64e83a006b..2454689de1 100644 --- a/modules/audio_processing/BUILD.gn +++ b/modules/audio_processing/BUILD.gn @@ -21,30 +21,8 @@ config("apm_debug_dump") { rtc_library("api") { visibility = [ "*" ] - sources = [ - "include/audio_processing.cc", - "include/audio_processing.h", - ] - deps = [ - ":audio_frame_view", - ":audio_processing_statistics", - "../../api:array_view", - "../../api:scoped_refptr", - "../../api/audio:aec3_config", - "../../api/audio:audio_frame_api", - "../../api/audio:echo_control", - "../../rtc_base:macromagic", - "../../rtc_base:refcount", - "../../rtc_base:stringutils", - "../../rtc_base/system:arch", - "../../rtc_base/system:file_wrapper", - "../../rtc_base/system:rtc_export", - "agc:gain_control_interface", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] + sources = [ "include/audio_processing.h" ] + deps = [ "../../api/audio:audio_processing" ] } rtc_library("audio_frame_proxies") { @@ -54,9 +32,9 @@ rtc_library("audio_frame_proxies") { "include/audio_frame_proxies.h", ] deps = [ - ":api", ":audio_frame_view", "../../api/audio:audio_frame_api", + "../../api/audio:audio_processing", ] } @@ -77,8 +55,9 @@ rtc_library("audio_buffer") { defines = [] deps = [ - ":api", "../../api:array_view", + "../../api/audio:audio_frame_api", + "../../api/audio:audio_processing", "../../common_audio", "../../common_audio:common_audio_c", "../../rtc_base:checks", @@ -111,12 +90,9 @@ rtc_source_set("aec_dump_interface") { ] deps = [ - ":api", ":audio_frame_view", - ] - absl_deps = [ + "../../api/audio:audio_processing", "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -129,15 +105,17 @@ rtc_library("gain_controller2") { defines = [] deps = [ ":aec_dump_interface", - ":api", ":apm_logging", ":audio_buffer", ":audio_frame_view", + "../../api:field_trials_view", + "../../api/audio:audio_frame_api", + "../../api/audio:audio_processing", + "../../api/environment", "../../common_audio", "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:stringutils", - "../../system_wrappers:field_trial", "agc2:adaptive_digital_gain_controller", "agc2:common", "agc2:cpu_features", @@ -155,7 +133,6 @@ rtc_library("audio_processing") { visibility = [ "*" ] configs += [ ":apm_debug_dump" ] sources = [ - "audio_processing_builder_impl.cc", "audio_processing_impl.cc", "audio_processing_impl.h", "echo_control_mobile_impl.cc", @@ -168,44 +145,46 @@ rtc_library("audio_processing") { defines = [] deps = [ ":aec_dump_interface", - ":api", ":apm_logging", ":audio_buffer", ":audio_frame_proxies", ":audio_frame_view", - ":audio_processing_statistics", ":gain_controller2", ":high_pass_filter", - ":optionally_built_submodule_creators", ":rms_level", "../../api:array_view", + "../../api:field_trials_view", "../../api:function_view", "../../api:make_ref_counted", "../../api/audio:aec3_config", "../../api/audio:audio_frame_api", + "../../api/audio:audio_processing", + "../../api/audio:audio_processing_statistics", "../../api/audio:echo_control", + "../../api/environment", + "../../api/task_queue", "../../audio/utility:audio_frame_operations", + "../../common_audio", "../../common_audio:common_audio_c", + "../../common_audio:fir_filter", + "../../common_audio:fir_filter_factory", "../../common_audio/third_party/ooura:fft_size_256", "../../rtc_base:checks", "../../rtc_base:event_tracer", "../../rtc_base:gtest_prod", - "../../rtc_base:ignore_wundef", "../../rtc_base:logging", "../../rtc_base:macromagic", "../../rtc_base:safe_minmax", "../../rtc_base:sanitizer", "../../rtc_base:swap_queue", "../../rtc_base:timeutils", - "../../rtc_base/experiments:field_trial_parser", "../../rtc_base/synchronization:mutex", "../../rtc_base/system:rtc_export", "../../system_wrappers", "../../system_wrappers:denormal_disabler", - "../../system_wrappers:field_trial", "../../system_wrappers:metrics", "aec3", - "aec_dump:aec_dump", + "aec_dump", "aecm:aecm_core", "agc", "agc:gain_control_interface", @@ -213,19 +192,9 @@ rtc_library("audio_processing") { "agc2:input_volume_stats_reporter", "capture_levels_adjuster", "ns", - "transient:transient_suppressor_api", "vad", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] - - deps += [ - "../../common_audio", - "../../common_audio:fir_filter", - "../../common_audio:fir_filter_factory", - "../../system_wrappers", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (rtc_enable_protobuf) { @@ -251,25 +220,13 @@ rtc_library("residual_echo_detector") { "residual_echo_detector.h", ] deps = [ - ":api", ":apm_logging", "../../api:array_view", + "../../api/audio:audio_processing", "../../rtc_base:checks", "../../rtc_base:logging", "../../system_wrappers:metrics", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] -} - -rtc_library("optionally_built_submodule_creators") { - sources = [ - "optionally_built_submodule_creators.cc", - "optionally_built_submodule_creators.h", - ] - deps = [ - "transient:transient_suppressor_api", - "transient:transient_suppressor_impl", - ] } rtc_source_set("rms_level") { @@ -282,22 +239,17 @@ rtc_source_set("rms_level") { "../../api:array_view", "../../rtc_base:checks", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("audio_processing_statistics") { visibility = [ "*" ] - sources = [ - "include/audio_processing_statistics.cc", - "include/audio_processing_statistics.h", - ] - deps = [ "../../rtc_base/system:rtc_export" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + sources = [ "include/audio_processing_statistics.h" ] + deps = [ "../../api/audio:audio_processing_statistics" ] } rtc_source_set("audio_frame_view") { sources = [ "include/audio_frame_view.h" ] - deps = [ "../../api:array_view" ] + deps = [ "../../api/audio:audio_frame_api" ] } if (rtc_enable_protobuf) { @@ -319,10 +271,7 @@ rtc_library("apm_logging") { "../../common_audio", "../../rtc_base:checks", "../../rtc_base:stringutils", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] defines = [] } @@ -333,30 +282,29 @@ if (rtc_include_tests) { sources = [ "include/mock_audio_processing.h" ] deps = [ ":aec_dump_interface", - ":api", ":audio_buffer", ":audio_processing", - ":audio_processing_statistics", + "../../api:scoped_refptr", + "../../api/audio:audio_processing", + "../../api/audio:audio_processing_statistics", + "../../api/environment", + "../../api/task_queue", "../../test:test_support", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } if (!build_with_chromium) { group("audio_processing_tests") { testonly = true - deps = [ - ":audioproc_test_utils", - "transient:click_annotate", - "transient:transient_suppression_test", - ] + deps = [ ":audioproc_test_utils" ] if (rtc_enable_protobuf) { deps += [ ":audioproc_unittest_proto", "aec_dump:aec_dump_unittests", "test/conversational_speech", - "test/py_quality_assessment", ] } } @@ -371,12 +319,13 @@ if (rtc_include_tests) { "echo_control_mobile_unittest.cc", "gain_controller2_unittest.cc", "splitting_filter_unittest.cc", + "test/echo_canceller3_config_json_unittest.cc", "test/fake_recording_device_unittest.cc", ] deps = [ + ":aec3_config_json", ":analog_mic_simulation", - ":api", ":apm_logging", ":audio_buffer", ":audio_frame_view", @@ -390,12 +339,16 @@ if (rtc_include_tests) { "../../api:scoped_refptr", "../../api/audio:aec3_config", "../../api/audio:aec3_factory", + "../../api/audio:audio_frame_api", + "../../api/audio:audio_processing", + "../../api/audio:builtin_audio_processing_builder", "../../api/audio:echo_detector_creator", + "../../api/environment", + "../../api/environment:environment_factory", "../../common_audio", "../../common_audio:common_audio_c", "../../rtc_base:checks", "../../rtc_base:gtest_prod", - "../../rtc_base:ignore_wundef", "../../rtc_base:macromagic", "../../rtc_base:platform_thread", "../../rtc_base:protobuf_utils", @@ -435,15 +388,11 @@ if (rtc_include_tests) { "capture_levels_adjuster", "capture_levels_adjuster:capture_levels_adjuster_unittests", "test/conversational_speech:unittest", - "transient:transient_suppression_unittests", "utility:legacy_delay_estimator_unittest", "utility:pffft_wrapper_unittest", "vad:vad_unittests", "//testing/gtest", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] defines = [] @@ -461,18 +410,16 @@ if (rtc_include_tests) { ":audioproc_protobuf_utils", ":audioproc_test_utils", ":audioproc_unittest_proto", - ":optionally_built_submodule_creators", ":residual_echo_detector", ":rms_level", ":runtime_settings_protobuf_utils", "../../api/audio:audio_frame_api", "../../api/audio:echo_control", "../../rtc_base:rtc_base_tests_utils", - "../../rtc_base:rtc_task_queue", "aec_dump", "aec_dump:aec_dump_unittests", + "//third_party/abseil-cpp/absl/flags:flag", ] - absl_deps += [ "//third_party/abseil-cpp/absl/flags:flag" ] sources += [ "audio_processing_impl_locking_unittest.cc", "audio_processing_impl_unittest.cc", @@ -493,7 +440,6 @@ if (rtc_include_tests) { "test/echo_canceller_test_tools.h", "test/echo_canceller_test_tools_unittest.cc", "test/echo_control_mock.h", - "test/test_utils.h", ] } } @@ -508,6 +454,8 @@ if (rtc_include_tests) { ":audio_processing", ":audioproc_test_utils", "../../api:array_view", + "../../api/audio:builtin_audio_processing_builder", + "../../api/environment:environment_factory", "../../api/numerics", "../../api/test/metrics:global_metrics_logger_and_exporter", "../../api/test/metrics:metric", @@ -518,8 +466,8 @@ if (rtc_include_tests) { "../../rtc_base:safe_conversions", "../../system_wrappers", "../../test:test_support", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("analog_mic_simulation") { @@ -537,7 +485,6 @@ if (rtc_include_tests) { "../../rtc_base:safe_minmax", "agc2:gain_map", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (rtc_enable_protobuf && !build_with_chromium) { @@ -558,40 +505,35 @@ if (rtc_include_tests) { ] deps = [ + ":aec3_config_json", ":analog_mic_simulation", - ":api", ":apm_logging", ":audio_processing", ":audioproc_debug_proto", ":audioproc_protobuf_utils", ":audioproc_test_utils", - ":runtime_settings_protobuf_utils", - "../../api/audio:aec3_config_json", + "../../api:field_trials", + "../../api:scoped_refptr", + "../../api/audio:aec3_config", "../../api/audio:aec3_factory", + "../../api/audio:audio_processing", + "../../api/audio:builtin_audio_processing_builder", "../../api/audio:echo_detector_creator", + "../../api/environment", + "../../api/environment:environment_factory", "../../common_audio", "../../rtc_base:checks", - "../../rtc_base:ignore_wundef", "../../rtc_base:logging", - "../../rtc_base:protobuf_utils", - "../../rtc_base:rtc_json", "../../rtc_base:safe_conversions", "../../rtc_base:stringutils", "../../rtc_base:task_queue_for_test", "../../rtc_base:timeutils", "../../rtc_base/system:file_wrapper", - "../../system_wrappers", - "../../system_wrappers:field_trial", - "../../test:test_support", "aec_dump", - "aec_dump:aec_dump_impl", - "//testing/gtest", - ] - absl_deps = [ + "//third_party/abseil-cpp/absl/base:nullability", "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } # audioproc_f_impl } @@ -611,7 +553,6 @@ if (rtc_include_tests) { deps = [ ":audioproc_debug_proto", "../../rtc_base:checks", - "../../rtc_base:ignore_wundef", "../../rtc_base:protobuf_utils", "../../rtc_base/system:arch", ] @@ -625,9 +566,9 @@ if (rtc_include_tests) { ] deps = [ - ":api", ":audioproc_debug_proto", ":audioproc_protobuf_utils", + "../../api/audio:audio_processing", "../../rtc_base:checks", ] } @@ -640,8 +581,6 @@ rtc_library("audioproc_test_utils") { sources = [ "test/audio_buffer_tools.cc", "test/audio_buffer_tools.h", - "test/audio_processing_builder_for_testing.cc", - "test/audio_processing_builder_for_testing.h", "test/bitexactness_tools.cc", "test/bitexactness_tools.h", "test/performance_timer.cc", @@ -655,11 +594,11 @@ rtc_library("audioproc_test_utils") { configs += [ ":apm_debug_dump" ] deps = [ - ":api", ":audio_buffer", ":audio_processing", "../../api:array_view", "../../api/audio:audio_frame_api", + "../../api/audio:audio_processing", "../../common_audio", "../../rtc_base:checks", "../../rtc_base:random", @@ -669,9 +608,24 @@ rtc_library("audioproc_test_utils") { "../../test:test_support", "../audio_coding:neteq_input_audio_tools", "//testing/gtest", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", +} + +rtc_library("aec3_config_json") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/echo_canceller3_config_json.cc", + "test/echo_canceller3_config_json.h", + ] + deps = [ + "../../api/audio:aec3_config", + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../rtc_base:rtc_json", + "../../rtc_base:stringutils", + "../../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", ] } diff --git a/modules/audio_processing/aec3/BUILD.gn b/modules/audio_processing/aec3/BUILD.gn index f5eb5d5951..a0bda5f494 100644 --- a/modules/audio_processing/aec3/BUILD.gn +++ b/modules/audio_processing/aec3/BUILD.gn @@ -141,8 +141,10 @@ rtc_library("aec3") { "..:audio_buffer", "..:high_pass_filter", "../../../api:array_view", + "../../../api:field_trials_view", "../../../api/audio:aec3_config", "../../../api/audio:echo_control", + "../../../api/environment", "../../../common_audio:common_audio_c", "../../../rtc_base:checks", "../../../rtc_base:logging", @@ -153,13 +155,9 @@ rtc_library("aec3") { "../../../rtc_base/experiments:field_trial_parser", "../../../rtc_base/system:arch", "../../../system_wrappers", - "../../../system_wrappers:field_trial", "../../../system_wrappers:metrics", "../utility:cascaded_biquad_filter", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (current_cpu == "x86" || current_cpu == "x64") { @@ -210,8 +208,8 @@ rtc_source_set("adaptive_fir_filter") { "..:apm_logging", "../../../api:array_view", "../../../rtc_base/system:arch", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_source_set("adaptive_fir_filter_erl") { @@ -231,7 +229,6 @@ rtc_source_set("matched_filter") { "../../../rtc_base:gtest_prod", "../../../rtc_base/system:arch", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("vector_math") { @@ -317,6 +314,8 @@ if (rtc_include_tests) { "..:high_pass_filter", "../../../api:array_view", "../../../api/audio:aec3_config", + "../../../api/environment", + "../../../api/environment:environment_factory", "../../../rtc_base:checks", "../../../rtc_base:macromagic", "../../../rtc_base:random", @@ -325,11 +324,11 @@ if (rtc_include_tests) { "../../../rtc_base/system:arch", "../../../system_wrappers", "../../../system_wrappers:metrics", + "../../../test:explicit_key_value_config", "../../../test:field_trial", "../../../test:test_support", "../utility:cascaded_biquad_filter", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] defines = [] diff --git a/modules/audio_processing/aec3/adaptive_fir_filter.cc b/modules/audio_processing/aec3/adaptive_fir_filter.cc index 917aa951ee..22c3be2b80 100644 --- a/modules/audio_processing/aec3/adaptive_fir_filter.cc +++ b/modules/audio_processing/aec3/adaptive_fir_filter.cc @@ -129,7 +129,7 @@ void AdaptPartitions(const RenderBuffer& render_buffer, const FftData& G, size_t num_partitions, std::vector>* H) { - rtc::ArrayView> render_buffer_data = + ArrayView> render_buffer_data = render_buffer.GetFftBuffer(); size_t index = render_buffer.Position(); const size_t num_render_channels = render_buffer_data[index].size(); @@ -152,7 +152,7 @@ void AdaptPartitions_Neon(const RenderBuffer& render_buffer, const FftData& G, size_t num_partitions, std::vector>* H) { - rtc::ArrayView> render_buffer_data = + webrtc::ArrayView> render_buffer_data = render_buffer.GetFftBuffer(); const size_t num_render_channels = render_buffer_data[0].size(); const size_t lim1 = std::min( @@ -218,7 +218,7 @@ void AdaptPartitions_Sse2(const RenderBuffer& render_buffer, const FftData& G, size_t num_partitions, std::vector>* H) { - rtc::ArrayView> render_buffer_data = + ArrayView> render_buffer_data = render_buffer.GetFftBuffer(); const size_t num_render_channels = render_buffer_data[0].size(); const size_t lim1 = std::min( @@ -289,7 +289,7 @@ void ApplyFilter(const RenderBuffer& render_buffer, S->re.fill(0.f); S->im.fill(0.f); - rtc::ArrayView> render_buffer_data = + ArrayView> render_buffer_data = render_buffer.GetFftBuffer(); size_t index = render_buffer.Position(); const size_t num_render_channels = render_buffer_data[index].size(); @@ -314,12 +314,12 @@ void ApplyFilter_Neon(const RenderBuffer& render_buffer, const std::vector>& H, FftData* S) { // const RenderBuffer& render_buffer, - // rtc::ArrayView H, + // webrtc::ArrayView H, // FftData* S) { RTC_DCHECK_GE(H.size(), H.size() - 1); S->Clear(); - rtc::ArrayView> render_buffer_data = + webrtc::ArrayView> render_buffer_data = render_buffer.GetFftBuffer(); const size_t num_render_channels = render_buffer_data[0].size(); const size_t lim1 = std::min( @@ -384,13 +384,13 @@ void ApplyFilter_Sse2(const RenderBuffer& render_buffer, const std::vector>& H, FftData* S) { // const RenderBuffer& render_buffer, - // rtc::ArrayView H, + // webrtc::ArrayView H, // FftData* S) { RTC_DCHECK_GE(H.size(), H.size() - 1); S->re.fill(0.f); S->im.fill(0.f); - rtc::ArrayView> render_buffer_data = + ArrayView> render_buffer_data = render_buffer.GetFftBuffer(); const size_t num_render_channels = render_buffer_data[0].size(); const size_t lim1 = std::min( diff --git a/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc b/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc index b6eda9f117..9f720a5abf 100644 --- a/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc +++ b/modules/audio_processing/aec3/adaptive_fir_filter_avx2.cc @@ -54,7 +54,7 @@ void AdaptPartitions_Avx2(const RenderBuffer& render_buffer, const FftData& G, size_t num_partitions, std::vector>* H) { - rtc::ArrayView> render_buffer_data = + ArrayView> render_buffer_data = render_buffer.GetFftBuffer(); const size_t num_render_channels = render_buffer_data[0].size(); const size_t lim1 = std::min( @@ -125,7 +125,7 @@ void ApplyFilter_Avx2(const RenderBuffer& render_buffer, S->re.fill(0.f); S->im.fill(0.f); - rtc::ArrayView> render_buffer_data = + ArrayView> render_buffer_data = render_buffer.GetFftBuffer(); const size_t num_render_channels = render_buffer_data[0].size(); const size_t lim1 = std::min( diff --git a/modules/audio_processing/aec3/adaptive_fir_filter_erl.cc b/modules/audio_processing/aec3/adaptive_fir_filter_erl.cc index 45b8813979..9bf7bc7de1 100644 --- a/modules/audio_processing/aec3/adaptive_fir_filter_erl.cc +++ b/modules/audio_processing/aec3/adaptive_fir_filter_erl.cc @@ -27,7 +27,7 @@ namespace aec3 { // Computes and stores the echo return loss estimate of the filter, which is the // sum of the partition frequency responses. void ErlComputer(const std::vector>& H2, - rtc::ArrayView erl) { + ArrayView erl) { std::fill(erl.begin(), erl.end(), 0.f); for (auto& H2_j : H2) { std::transform(H2_j.begin(), H2_j.end(), erl.begin(), erl.begin(), @@ -40,7 +40,7 @@ void ErlComputer(const std::vector>& H2, // sum of the partition frequency responses. void ErlComputer_NEON( const std::vector>& H2, - rtc::ArrayView erl) { + webrtc::ArrayView erl) { std::fill(erl.begin(), erl.end(), 0.f); for (auto& H2_j : H2) { for (size_t k = 0; k < kFftLengthBy2; k += 4) { @@ -59,7 +59,7 @@ void ErlComputer_NEON( // sum of the partition frequency responses. void ErlComputer_SSE2( const std::vector>& H2, - rtc::ArrayView erl) { + ArrayView erl) { std::fill(erl.begin(), erl.end(), 0.f); for (auto& H2_j : H2) { for (size_t k = 0; k < kFftLengthBy2; k += 4) { @@ -77,7 +77,7 @@ void ErlComputer_SSE2( void ComputeErl(const Aec3Optimization& optimization, const std::vector>& H2, - rtc::ArrayView erl) { + ArrayView erl) { RTC_DCHECK_EQ(kFftLengthBy2Plus1, erl.size()); // Update the frequency response and echo return loss for the filter. switch (optimization) { diff --git a/modules/audio_processing/aec3/adaptive_fir_filter_erl.h b/modules/audio_processing/aec3/adaptive_fir_filter_erl.h index 4ac13b1bc3..68da2d2d66 100644 --- a/modules/audio_processing/aec3/adaptive_fir_filter_erl.h +++ b/modules/audio_processing/aec3/adaptive_fir_filter_erl.h @@ -26,20 +26,20 @@ namespace aec3 { // Computes and stores the echo return loss estimate of the filter, which is the // sum of the partition frequency responses. void ErlComputer(const std::vector>& H2, - rtc::ArrayView erl); + ArrayView erl); #if defined(WEBRTC_HAS_NEON) void ErlComputer_NEON( const std::vector>& H2, - rtc::ArrayView erl); + webrtc::ArrayView erl); #endif #if defined(WEBRTC_ARCH_X86_FAMILY) void ErlComputer_SSE2( const std::vector>& H2, - rtc::ArrayView erl); + ArrayView erl); void ErlComputer_AVX2( const std::vector>& H2, - rtc::ArrayView erl); + ArrayView erl); #endif } // namespace aec3 @@ -47,7 +47,7 @@ void ErlComputer_AVX2( // Computes the echo return loss based on a frequency response. void ComputeErl(const Aec3Optimization& optimization, const std::vector>& H2, - rtc::ArrayView erl); + ArrayView erl); } // namespace webrtc diff --git a/modules/audio_processing/aec3/adaptive_fir_filter_erl_avx2.cc b/modules/audio_processing/aec3/adaptive_fir_filter_erl_avx2.cc index 1e63cf8fe7..dbcaa9acb7 100644 --- a/modules/audio_processing/aec3/adaptive_fir_filter_erl_avx2.cc +++ b/modules/audio_processing/aec3/adaptive_fir_filter_erl_avx2.cc @@ -20,7 +20,7 @@ namespace aec3 { // sum of the partition frequency responses. void ErlComputer_AVX2( const std::vector>& H2, - rtc::ArrayView erl) { + ArrayView erl) { std::fill(erl.begin(), erl.end(), 0.f); for (auto& H2_j : H2) { for (size_t k = 0; k < kFftLengthBy2; k += 8) { diff --git a/modules/audio_processing/aec3/adaptive_fir_filter_unittest.cc b/modules/audio_processing/aec3/adaptive_fir_filter_unittest.cc index a13764c109..ac973bed8f 100644 --- a/modules/audio_processing/aec3/adaptive_fir_filter_unittest.cc +++ b/modules/audio_processing/aec3/adaptive_fir_filter_unittest.cc @@ -22,6 +22,7 @@ #include #endif +#include "api/environment/environment_factory.h" #include "modules/audio_processing/aec3/adaptive_fir_filter_erl.h" #include "modules/audio_processing/aec3/aec3_fft.h" #include "modules/audio_processing/aec3/aec_state.h" @@ -43,7 +44,7 @@ namespace aec3 { namespace { std::string ProduceDebugText(size_t num_render_channels, size_t delay) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "delay: " << delay << ", "; ss << "num_render_channels:" << num_render_channels; return ss.Release(); @@ -482,9 +483,10 @@ TEST_P(AdaptiveFirFilterMultiChannel, FilterAndAdapt) { Block x(kNumBands, num_render_channels); std::vector n(kBlockSize, 0.f); std::vector y(kBlockSize, 0.f); - AecState aec_state(EchoCanceller3Config{}, num_capture_channels); + AecState aec_state(CreateEnvironment(), EchoCanceller3Config{}, + num_capture_channels); RenderSignalAnalyzer render_signal_analyzer(config); - absl::optional delay_estimate; + std::optional delay_estimate; std::vector e(kBlockSize, 0.f); std::array s_scratch; std::vector output(num_capture_channels); @@ -564,7 +566,7 @@ TEST_P(AdaptiveFirFilterMultiChannel, FilterAndAdapt) { e.begin(), [&](float a, float b) { return a - b * kScale; }); std::for_each(e.begin(), e.end(), - [](float& a) { a = rtc::SafeClamp(a, -32768.f, 32767.f); }); + [](float& a) { a = SafeClamp(a, -32768.f, 32767.f); }); fft.ZeroPaddedFft(e, Aec3Fft::Window::kRectangular, &E); for (auto& o : output) { for (size_t k = 0; k < kBlockSize; ++k) { diff --git a/modules/audio_processing/aec3/aec3_fft.cc b/modules/audio_processing/aec3/aec3_fft.cc index 9cc8016f0b..1c4cf33c3b 100644 --- a/modules/audio_processing/aec3/aec3_fft.cc +++ b/modules/audio_processing/aec3/aec3_fft.cc @@ -84,7 +84,7 @@ bool IsSse2Available() { Aec3Fft::Aec3Fft() : ooura_fft_(IsSse2Available()) {} // TODO(peah): Change x to be std::array once the rest of the code allows this. -void Aec3Fft::ZeroPaddedFft(rtc::ArrayView x, +void Aec3Fft::ZeroPaddedFft(ArrayView x, Window window, FftData* X) const { RTC_DCHECK(X); @@ -110,8 +110,8 @@ void Aec3Fft::ZeroPaddedFft(rtc::ArrayView x, Fft(&fft, X); } -void Aec3Fft::PaddedFft(rtc::ArrayView x, - rtc::ArrayView x_old, +void Aec3Fft::PaddedFft(ArrayView x, + ArrayView x_old, Window window, FftData* X) const { RTC_DCHECK(X); diff --git a/modules/audio_processing/aec3/aec3_fft.h b/modules/audio_processing/aec3/aec3_fft.h index c68de53963..83d2a2e919 100644 --- a/modules/audio_processing/aec3/aec3_fft.h +++ b/modules/audio_processing/aec3/aec3_fft.h @@ -48,21 +48,19 @@ class Aec3Fft { // Windows the input using a Hanning window, and then adds padding of // kFftLengthBy2 initial zeros before computing the Fft. - void ZeroPaddedFft(rtc::ArrayView x, - Window window, - FftData* X) const; + void ZeroPaddedFft(ArrayView x, Window window, FftData* X) const; // Concatenates the kFftLengthBy2 values long x and x_old before computing the // Fft. After that, x is copied to x_old. - void PaddedFft(rtc::ArrayView x, - rtc::ArrayView x_old, + void PaddedFft(ArrayView x, + ArrayView x_old, FftData* X) const { PaddedFft(x, x_old, Window::kRectangular, X); } // Padded Fft using a time-domain window. - void PaddedFft(rtc::ArrayView x, - rtc::ArrayView x_old, + void PaddedFft(ArrayView x, + ArrayView x_old, Window window, FftData* X) const; diff --git a/modules/audio_processing/aec3/aec_state.cc b/modules/audio_processing/aec3/aec_state.cc index 81fd91fab9..cc8642dc32 100644 --- a/modules/audio_processing/aec3/aec_state.cc +++ b/modules/audio_processing/aec3/aec_state.cc @@ -14,29 +14,32 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { -bool DeactivateInitialStateResetAtEchoPathChange() { - return field_trial::IsEnabled( +bool DeactivateInitialStateResetAtEchoPathChange( + const FieldTrialsView& field_trials) { + return field_trials.IsEnabled( "WebRTC-Aec3DeactivateInitialStateResetKillSwitch"); } -bool FullResetAtEchoPathChange() { - return !field_trial::IsEnabled("WebRTC-Aec3AecStateFullResetKillSwitch"); +bool FullResetAtEchoPathChange(const FieldTrialsView& field_trials) { + return !field_trials.IsEnabled("WebRTC-Aec3AecStateFullResetKillSwitch"); } -bool SubtractorAnalyzerResetAtEchoPathChange() { - return !field_trial::IsEnabled( +bool SubtractorAnalyzerResetAtEchoPathChange( + const FieldTrialsView& field_trials) { + return !field_trials.IsEnabled( "WebRTC-Aec3AecStateSubtractorAnalyzerResetKillSwitch"); } @@ -45,7 +48,7 @@ void ComputeAvgRenderReverb( int delay_blocks, float reverb_decay, ReverbModel* reverb_model, - rtc::ArrayView reverb_power_spectrum) { + ArrayView reverb_power_spectrum) { RTC_DCHECK(reverb_model); const size_t num_render_channels = spectrum_buffer.buffer[0].size(); int idx_at_delay = @@ -53,13 +56,13 @@ void ComputeAvgRenderReverb( int idx_past = spectrum_buffer.IncIndex(idx_at_delay); std::array X2_data; - rtc::ArrayView X2; + ArrayView X2; if (num_render_channels > 1) { auto average_channels = [](size_t num_render_channels, - rtc::ArrayView> + ArrayView> spectrum_band_0, - rtc::ArrayView render_power) { + ArrayView render_power) { std::fill(render_power.begin(), render_power.end(), 0.f); for (size_t ch = 0; ch < num_render_channels; ++ch) { for (size_t k = 0; k < kFftLengthBy2Plus1; ++k) { @@ -87,7 +90,7 @@ void ComputeAvgRenderReverb( X2 = spectrum_buffer.buffer[idx_at_delay][/*channel=*/0]; } - rtc::ArrayView reverb_power = + ArrayView reverb_power = reverb_model->reverb(); for (size_t k = 0; k < X2.size(); ++k) { reverb_power_spectrum[k] = X2[k] + reverb_power[k]; @@ -98,8 +101,7 @@ void ComputeAvgRenderReverb( std::atomic AecState::instance_count_(0); -void AecState::GetResidualEchoScaling( - rtc::ArrayView residual_scaling) const { +void AecState::GetResidualEchoScaling(ArrayView residual_scaling) const { bool filter_has_had_time_to_converge; if (config_.filter.conservative_initial_phase) { filter_has_had_time_to_converge = @@ -112,22 +114,27 @@ void AecState::GetResidualEchoScaling( residual_scaling); } -AecState::AecState(const EchoCanceller3Config& config, +AecState::AecState(const Environment& env, + const EchoCanceller3Config& config, size_t num_capture_channels) : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), config_(config), num_capture_channels_(num_capture_channels), deactivate_initial_state_reset_at_echo_path_change_( - DeactivateInitialStateResetAtEchoPathChange()), - full_reset_at_echo_path_change_(FullResetAtEchoPathChange()), + DeactivateInitialStateResetAtEchoPathChange(env.field_trials())), + full_reset_at_echo_path_change_( + FullResetAtEchoPathChange(env.field_trials())), subtractor_analyzer_reset_at_echo_path_change_( - SubtractorAnalyzerResetAtEchoPathChange()), + SubtractorAnalyzerResetAtEchoPathChange(env.field_trials())), initial_state_(config_), delay_state_(config_, num_capture_channels_), - transparent_state_(TransparentMode::Create(config_)), + transparent_state_(TransparentMode::Create(env, config_)), filter_quality_state_(config_, num_capture_channels_), erl_estimator_(2 * kNumBlocksPerSecond), - erle_estimator_(2 * kNumBlocksPerSecond, config_, num_capture_channels_), + erle_estimator_(env, + 2 * kNumBlocksPerSecond, + config_, + num_capture_channels_), filter_analyzer_(config_, num_capture_channels_), echo_audibility_( config_.echo_audibility.use_stationarity_properties_at_init), @@ -170,14 +177,14 @@ void AecState::HandleEchoPathChange( } void AecState::Update( - const absl::optional& external_delay, - rtc::ArrayView>> + const std::optional& external_delay, + ArrayView>> adaptive_filter_frequency_responses, - rtc::ArrayView> adaptive_filter_impulse_responses, + ArrayView> adaptive_filter_impulse_responses, const RenderBuffer& render_buffer, - rtc::ArrayView> E2_refined, - rtc::ArrayView> Y2, - rtc::ArrayView subtractor_output) { + ArrayView> E2_refined, + ArrayView> Y2, + ArrayView subtractor_output) { RTC_DCHECK_EQ(num_capture_channels_, Y2.size()); RTC_DCHECK_EQ(num_capture_channels_, subtractor_output.size()); RTC_DCHECK_EQ(num_capture_channels_, @@ -358,8 +365,8 @@ AecState::FilterDelay::FilterDelay(const EchoCanceller3Config& config, min_filter_delay_(delay_headroom_blocks_) {} void AecState::FilterDelay::Update( - rtc::ArrayView analyzer_filter_delay_estimates_blocks, - const absl::optional& external_delay, + ArrayView analyzer_filter_delay_estimates_blocks, + const std::optional& external_delay, size_t blocks_with_proper_filter_adaptation) { // Update the delay based on the external delay. if (external_delay && @@ -405,7 +412,7 @@ void AecState::FilteringQualityAnalyzer::Update( bool active_render, bool transparent_mode, bool saturated_capture, - const absl::optional& external_delay, + const std::optional& external_delay, bool any_filter_converged) { // Update blocks counter. const bool filter_update = active_render && !saturated_capture; @@ -448,7 +455,7 @@ void AecState::SaturationDetector::Update( const Block& x, bool saturated_capture, bool usable_linear_estimate, - rtc::ArrayView subtractor_output, + ArrayView subtractor_output, float echo_path_gain) { saturated_echo_ = false; if (!saturated_capture) { @@ -466,7 +473,7 @@ void AecState::SaturationDetector::Update( } else { float max_sample = 0.f; for (int ch = 0; ch < x.NumChannels(); ++ch) { - rtc::ArrayView x_ch = x.View(/*band=*/0, ch); + ArrayView x_ch = x.View(/*band=*/0, ch); for (float sample : x_ch) { max_sample = std::max(max_sample, fabsf(sample)); } diff --git a/modules/audio_processing/aec3/aec_state.h b/modules/audio_processing/aec3/aec_state.h index a39325c8b8..bd7050c3fd 100644 --- a/modules/audio_processing/aec3/aec_state.h +++ b/modules/audio_processing/aec3/aec_state.h @@ -16,11 +16,12 @@ #include #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" +#include "api/environment/environment.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/delay_estimate.h" #include "modules/audio_processing/aec3/echo_audibility.h" @@ -41,7 +42,9 @@ class ApmDataDumper; // Handles the state and the conditions for the echo removal functionality. class AecState { public: - AecState(const EchoCanceller3Config& config, size_t num_capture_channels); + AecState(const Environment& env, + const EchoCanceller3Config& config, + size_t num_capture_channels); ~AecState(); // Returns whether the echo subtractor can be used to determine the residual @@ -62,7 +65,7 @@ class AecState { // Returns the appropriate scaling of the residual echo to match the // audibility. - void GetResidualEchoScaling(rtc::ArrayView residual_scaling) const; + void GetResidualEchoScaling(ArrayView residual_scaling) const; // Returns whether the stationary properties of the signals are used in the // aec. @@ -71,14 +74,13 @@ class AecState { } // Returns the ERLE. - rtc::ArrayView> Erle( + ArrayView> Erle( bool onset_compensated) const { return erle_estimator_.Erle(onset_compensated); } // Returns the non-capped ERLE. - rtc::ArrayView> ErleUnbounded() - const { + ArrayView> ErleUnbounded() const { return erle_estimator_.ErleUnbounded(); } @@ -125,7 +127,7 @@ class AecState { } // Return the frequency response of the reverberant echo. - rtc::ArrayView GetReverbFrequencyResponse() const { + ArrayView GetReverbFrequencyResponse() const { return reverb_model_estimator_.GetReverbFrequencyResponse(); } @@ -138,15 +140,14 @@ class AecState { // Updates the aec state. // TODO(bugs.webrtc.org/10913): Compute multi-channel ERL. void Update( - const absl::optional& external_delay, - rtc::ArrayView>> + const std::optional& external_delay, + ArrayView>> adaptive_filter_frequency_responses, - rtc::ArrayView> - adaptive_filter_impulse_responses, + ArrayView> adaptive_filter_impulse_responses, const RenderBuffer& render_buffer, - rtc::ArrayView> E2_refined, - rtc::ArrayView> Y2, - rtc::ArrayView subtractor_output); + ArrayView> E2_refined, + ArrayView> Y2, + ArrayView subtractor_output); // Returns filter length in blocks. int FilterLengthBlocks() const { @@ -202,7 +203,7 @@ class AecState { // Returns the delay in blocks relative to the beginning of the filter that // corresponds to the direct path of the echo. - rtc::ArrayView DirectPathFilterDelays() const { + ArrayView DirectPathFilterDelays() const { return filter_delays_blocks_; } @@ -211,17 +212,16 @@ class AecState { int MinDirectPathFilterDelay() const { return min_filter_delay_; } // Updates the delay estimates based on new data. - void Update( - rtc::ArrayView analyzer_filter_delay_estimates_blocks, - const absl::optional& external_delay, - size_t blocks_with_proper_filter_adaptation); + void Update(ArrayView analyzer_filter_delay_estimates_blocks, + const std::optional& external_delay, + size_t blocks_with_proper_filter_adaptation); private: const int delay_headroom_blocks_; bool external_delay_reported_ = false; std::vector filter_delays_blocks_; int min_filter_delay_; - absl::optional external_delay_; + std::optional external_delay_; } delay_state_; // Classifier for toggling transparent mode when there is no echo. @@ -253,7 +253,7 @@ class AecState { void Update(bool active_render, bool transparent_mode, bool saturated_capture, - const absl::optional& external_delay, + const std::optional& external_delay, bool any_filter_converged); private: @@ -276,7 +276,7 @@ class AecState { void Update(const Block& x, bool saturated_capture, bool usable_linear_estimate, - rtc::ArrayView subtractor_output, + ArrayView subtractor_output, float echo_path_gain); private: diff --git a/modules/audio_processing/aec3/aec_state_unittest.cc b/modules/audio_processing/aec3/aec_state_unittest.cc index 6662c8fb1a..f96828c6d9 100644 --- a/modules/audio_processing/aec3/aec_state_unittest.cc +++ b/modules/audio_processing/aec3/aec_state_unittest.cc @@ -10,6 +10,8 @@ #include "modules/audio_processing/aec3/aec_state.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/aec3/aec3_fft.h" #include "modules/audio_processing/aec3/render_delay_buffer.h" #include "modules/audio_processing/logging/apm_data_dumper.h" @@ -27,8 +29,8 @@ void RunNormalUsageTest(size_t num_render_channels, constexpr size_t kNumBands = NumBandsForRate(kSampleRateHz); ApmDataDumper data_dumper(42); EchoCanceller3Config config; - AecState state(config, num_capture_channels); - absl::optional delay_estimate = + AecState state(CreateEnvironment(), config, num_capture_channels); + std::optional delay_estimate = DelayEstimate(DelayEstimate::Quality::kRefined, 10); std::unique_ptr render_delay_buffer( RenderDelayBuffer::Create(config, kSampleRateHz, num_render_channels)); @@ -244,10 +246,10 @@ TEST(AecState, ConvergedFilterDelay) { constexpr int kFilterLengthBlocks = 10; constexpr size_t kNumCaptureChannels = 1; EchoCanceller3Config config; - AecState state(config, kNumCaptureChannels); + AecState state(CreateEnvironment(), config, kNumCaptureChannels); std::unique_ptr render_delay_buffer( RenderDelayBuffer::Create(config, 48000, 1)); - absl::optional delay_estimate; + std::optional delay_estimate; std::vector> E2_refined( kNumCaptureChannels); std::vector> Y2(kNumCaptureChannels); diff --git a/modules/audio_processing/aec3/alignment_mixer.cc b/modules/audio_processing/aec3/alignment_mixer.cc index 7f076dea8e..728e0f7bb8 100644 --- a/modules/audio_processing/aec3/alignment_mixer.cc +++ b/modules/audio_processing/aec3/alignment_mixer.cc @@ -64,7 +64,7 @@ AlignmentMixer::AlignmentMixer(size_t num_channels, } void AlignmentMixer::ProduceOutput(const Block& x, - rtc::ArrayView y) { + ArrayView y) { RTC_DCHECK_EQ(x.NumChannels(), num_channels_); if (selection_variant_ == MixingVariant::kDownmix) { @@ -79,7 +79,7 @@ void AlignmentMixer::ProduceOutput(const Block& x, } void AlignmentMixer::Downmix(const Block& x, - rtc::ArrayView y) const { + ArrayView y) const { RTC_DCHECK_EQ(x.NumChannels(), num_channels_); RTC_DCHECK_GE(num_channels_, 2); std::memcpy(&y[0], x.View(/*band=*/0, /*channel=*/0).data(), @@ -116,7 +116,7 @@ int AlignmentMixer::SelectChannel(const Block& x) { for (int ch = 0; ch < num_ch_to_analyze; ++ch) { float x2_sum = 0.f; - rtc::ArrayView x_ch = x.View(/*band=*/0, ch); + ArrayView x_ch = x.View(/*band=*/0, ch); for (size_t i = 0; i < kBlockSize; ++i) { x2_sum += x_ch[i] * x_ch[i]; } diff --git a/modules/audio_processing/aec3/alignment_mixer.h b/modules/audio_processing/aec3/alignment_mixer.h index b3ed04755c..238f9a0ecf 100644 --- a/modules/audio_processing/aec3/alignment_mixer.h +++ b/modules/audio_processing/aec3/alignment_mixer.h @@ -34,7 +34,7 @@ class AlignmentMixer { float excitation_limit, bool prefer_first_two_channels); - void ProduceOutput(const Block& x, rtc::ArrayView y); + void ProduceOutput(const Block& x, ArrayView y); enum class MixingVariant { kDownmix, kAdaptive, kFixed }; @@ -49,7 +49,7 @@ class AlignmentMixer { int selected_channel_ = 0; size_t block_counter_ = 0; - void Downmix(const Block& x, rtc::ArrayView y) const; + void Downmix(const Block& x, ArrayView y) const; int SelectChannel(const Block& x); }; } // namespace webrtc diff --git a/modules/audio_processing/aec3/alignment_mixer_unittest.cc b/modules/audio_processing/aec3/alignment_mixer_unittest.cc index eaf6dcb235..ef2abee9ff 100644 --- a/modules/audio_processing/aec3/alignment_mixer_unittest.cc +++ b/modules/audio_processing/aec3/alignment_mixer_unittest.cc @@ -28,7 +28,7 @@ std::string ProduceDebugText(bool initial_silence, bool prefer_first_two_channels, int num_channels, int strongest_ch) { - rtc::StringBuilder ss; + StringBuilder ss; ss << ", Initial silence: " << initial_silence; ss << ", Huge activity threshold: " << huge_activity_threshold; ss << ", Prefer first two channels: " << prefer_first_two_channels; diff --git a/modules/audio_processing/aec3/block.h b/modules/audio_processing/aec3/block.h index c1fc70722d..3f0f4f6be6 100644 --- a/modules/audio_processing/aec3/block.h +++ b/modules/audio_processing/aec3/block.h @@ -58,14 +58,14 @@ class Block { } // Access data via ArrayView. - rtc::ArrayView View(int band, int channel) { - return rtc::ArrayView(&data_[GetIndex(band, channel)], - kBlockSize); + ArrayView View(int band, int channel) { + return ArrayView(&data_[GetIndex(band, channel)], + kBlockSize); } - rtc::ArrayView View(int band, int channel) const { - return rtc::ArrayView( - &data_[GetIndex(band, channel)], kBlockSize); + ArrayView View(int band, int channel) const { + return ArrayView(&data_[GetIndex(band, channel)], + kBlockSize); } // Lets two Blocks swap audio data. diff --git a/modules/audio_processing/aec3/block_delay_buffer.cc b/modules/audio_processing/aec3/block_delay_buffer.cc index 059bbafcdb..c599b3924d 100644 --- a/modules/audio_processing/aec3/block_delay_buffer.cc +++ b/modules/audio_processing/aec3/block_delay_buffer.cc @@ -40,7 +40,7 @@ void BlockDelayBuffer::DelaySignal(AudioBuffer* frame) { for (size_t ch = 0; ch < num_channels; ++ch) { RTC_DCHECK_EQ(buf_[ch].size(), frame->num_bands()); RTC_DCHECK_EQ(buf_[ch].size(), num_bands); - rtc::ArrayView frame_ch(frame->split_bands(ch), num_bands); + ArrayView frame_ch(frame->split_bands(ch), num_bands); const size_t delay = delay_; for (size_t band = 0; band < num_bands; ++band) { diff --git a/modules/audio_processing/aec3/block_delay_buffer_unittest.cc b/modules/audio_processing/aec3/block_delay_buffer_unittest.cc index 011ab49651..f12508485d 100644 --- a/modules/audio_processing/aec3/block_delay_buffer_unittest.cc +++ b/modules/audio_processing/aec3/block_delay_buffer_unittest.cc @@ -39,7 +39,7 @@ void PopulateInputFrame(size_t frame_length, std::string ProduceDebugText(int sample_rate_hz, size_t delay) { char log_stream_buffer[8 * 1024]; - rtc::SimpleStringBuilder ss(log_stream_buffer); + SimpleStringBuilder ss(log_stream_buffer); ss << "Sample rate: " << sample_rate_hz; ss << ", Delay: " << delay; return ss.str(); diff --git a/modules/audio_processing/aec3/block_framer.cc b/modules/audio_processing/aec3/block_framer.cc index 4243ddeba0..6ce39c1a9e 100644 --- a/modules/audio_processing/aec3/block_framer.cc +++ b/modules/audio_processing/aec3/block_framer.cc @@ -50,7 +50,7 @@ void BlockFramer::InsertBlock(const Block& block) { void BlockFramer::InsertBlockAndExtractSubFrame( const Block& block, - std::vector>>* sub_frame) { + std::vector>>* sub_frame) { RTC_DCHECK(sub_frame); RTC_DCHECK_EQ(num_bands_, block.NumBands()); RTC_DCHECK_EQ(num_channels_, block.NumChannels()); diff --git a/modules/audio_processing/aec3/block_framer.h b/modules/audio_processing/aec3/block_framer.h index e2cdd5a17c..e45a83841e 100644 --- a/modules/audio_processing/aec3/block_framer.h +++ b/modules/audio_processing/aec3/block_framer.h @@ -37,7 +37,7 @@ class BlockFramer { // Adds a 64 sample block and extracts an 80 sample subframe. void InsertBlockAndExtractSubFrame( const Block& block, - std::vector>>* sub_frame); + std::vector>>* sub_frame); private: const size_t num_bands_; diff --git a/modules/audio_processing/aec3/block_framer_unittest.cc b/modules/audio_processing/aec3/block_framer_unittest.cc index 9439623f72..c9b60588d1 100644 --- a/modules/audio_processing/aec3/block_framer_unittest.cc +++ b/modules/audio_processing/aec3/block_framer_unittest.cc @@ -22,13 +22,13 @@ namespace { void SetupSubFrameView( std::vector>>* sub_frame, - std::vector>>* sub_frame_view) { + std::vector>>* sub_frame_view) { for (size_t band = 0; band < sub_frame_view->size(); ++band) { for (size_t channel = 0; channel < (*sub_frame_view)[band].size(); ++channel) { (*sub_frame_view)[band][channel] = - rtc::ArrayView((*sub_frame)[band][channel].data(), - (*sub_frame)[band][channel].size()); + ArrayView((*sub_frame)[band][channel].data(), + (*sub_frame)[band][channel].size()); } } } @@ -48,7 +48,7 @@ float ComputeSampleValue(size_t chunk_counter, bool VerifySubFrame( size_t sub_frame_counter, int offset, - const std::vector>>& sub_frame_view) { + const std::vector>>& sub_frame_view) { for (size_t band = 0; band < sub_frame_view.size(); ++band) { for (size_t channel = 0; channel < sub_frame_view[band].size(); ++channel) { for (size_t sample = 0; sample < sub_frame_view[band][channel].size(); @@ -85,8 +85,8 @@ void RunFramerTest(int sample_rate_hz, size_t num_channels) { std::vector>> output_sub_frame( num_bands, std::vector>( num_channels, std::vector(kSubFrameLength, 0.f))); - std::vector>> output_sub_frame_view( - num_bands, std::vector>(num_channels)); + std::vector>> output_sub_frame_view( + num_bands, std::vector>(num_channels)); SetupSubFrameView(&output_sub_frame, &output_sub_frame_view); BlockFramer framer(num_bands, num_channels); @@ -124,9 +124,9 @@ void RunWronglySizedInsertAndExtractParametersTest( num_sub_frame_bands, std::vector>( num_sub_frame_channels, std::vector(sub_frame_length, 0.f))); - std::vector>> output_sub_frame_view( + std::vector>> output_sub_frame_view( output_sub_frame.size(), - std::vector>(num_sub_frame_channels)); + std::vector>(num_sub_frame_channels)); SetupSubFrameView(&output_sub_frame, &output_sub_frame_view); BlockFramer framer(correct_num_bands, correct_num_channels); EXPECT_DEATH( @@ -147,9 +147,9 @@ void RunWronglySizedInsertParameterTest(int sample_rate_hz, correct_num_bands, std::vector>( correct_num_channels, std::vector(kSubFrameLength, 0.f))); - std::vector>> output_sub_frame_view( + std::vector>> output_sub_frame_view( output_sub_frame.size(), - std::vector>(correct_num_channels)); + std::vector>(correct_num_channels)); SetupSubFrameView(&output_sub_frame, &output_sub_frame_view); BlockFramer framer(correct_num_bands, correct_num_channels); framer.InsertBlockAndExtractSubFrame(correct_block, &output_sub_frame_view); @@ -174,9 +174,9 @@ void RunWronglyInsertOrderTest(int sample_rate_hz, correct_num_bands, std::vector>( num_channels, std::vector(kSubFrameLength, 0.f))); - std::vector>> output_sub_frame_view( + std::vector>> output_sub_frame_view( output_sub_frame.size(), - std::vector>(num_channels)); + std::vector>(num_channels)); SetupSubFrameView(&output_sub_frame, &output_sub_frame_view); BlockFramer framer(correct_num_bands, num_channels); for (size_t k = 0; k < num_preceeding_api_calls; ++k) { @@ -188,7 +188,7 @@ void RunWronglyInsertOrderTest(int sample_rate_hz, #endif std::string ProduceDebugText(int sample_rate_hz, size_t num_channels) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Sample rate: " << sample_rate_hz; ss << ", number of channels: " << num_channels; return ss.Release(); @@ -293,7 +293,7 @@ TEST(BlockFramerDeathTest, WrongNumberOfPreceedingApiCallsForInsertBlock) { for (size_t num_channels : {1, 2, 8}) { for (auto rate : {16000, 32000, 48000}) { for (size_t num_calls = 0; num_calls < 4; ++num_calls) { - rtc::StringBuilder ss; + webrtc::StringBuilder ss; ss << "Sample rate: " << rate; ss << ", Num channels: " << num_channels; ss << ", Num preceeding InsertBlockAndExtractSubFrame calls: " diff --git a/modules/audio_processing/aec3/block_processor.cc b/modules/audio_processing/aec3/block_processor.cc index 63e3d9cc7c..c7f198e7a6 100644 --- a/modules/audio_processing/aec3/block_processor.cc +++ b/modules/audio_processing/aec3/block_processor.cc @@ -13,10 +13,10 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/audio/echo_canceller3_config.h" #include "api/audio/echo_control.h" #include "modules/audio_processing/aec3/aec3_common.h" @@ -76,7 +76,7 @@ class BlockProcessorImpl final : public BlockProcessor { BlockProcessorMetrics metrics_; RenderDelayBuffer::BufferingEvent render_event_; size_t capture_call_counter_ = 0; - absl::optional estimated_delay_; + std::optional estimated_delay_; }; std::atomic BlockProcessorImpl::instance_count_(0); @@ -84,8 +84,8 @@ std::atomic BlockProcessorImpl::instance_count_(0); BlockProcessorImpl::BlockProcessorImpl( const EchoCanceller3Config& config, int sample_rate_hz, - size_t num_render_channels, - size_t num_capture_channels, + size_t /* num_render_channels */, + size_t /* num_capture_channels */, std::unique_ptr render_buffer, std::unique_ptr delay_controller, std::unique_ptr echo_remover) @@ -172,9 +172,8 @@ void BlockProcessorImpl::ProcessCapture(bool echo_path_gain_change, bool delay_change = render_buffer_->AlignFromDelay(estimated_delay_->delay); if (delay_change) { - rtc::LoggingSeverity log_level = - config_.delay.log_warning_on_delay_changes ? rtc::LS_WARNING - : rtc::LS_INFO; + LoggingSeverity log_level = + config_.delay.log_warning_on_delay_changes ? LS_WARNING : LS_INFO; RTC_LOG_V(log_level) << "Delay changed to " << estimated_delay_->delay << " at block " << capture_call_counter_; echo_path_variability.delay_change = @@ -223,7 +222,7 @@ void BlockProcessorImpl::UpdateEchoLeakageStatus(bool leakage_detected) { void BlockProcessorImpl::GetMetrics(EchoControl::Metrics* metrics) const { echo_remover_->GetMetrics(metrics); constexpr int block_size_ms = 4; - absl::optional delay = render_buffer_->Delay(); + std::optional delay = render_buffer_->Delay(); metrics->delay_ms = delay ? static_cast(*delay) * block_size_ms : 0; } @@ -237,10 +236,12 @@ void BlockProcessorImpl::SetCaptureOutputUsage(bool capture_output_used) { } // namespace -BlockProcessor* BlockProcessor::Create(const EchoCanceller3Config& config, - int sample_rate_hz, - size_t num_render_channels, - size_t num_capture_channels) { +std::unique_ptr BlockProcessor::Create( + const Environment& env, + const EchoCanceller3Config& config, + int sample_rate_hz, + size_t num_render_channels, + size_t num_capture_channels) { std::unique_ptr render_buffer( RenderDelayBuffer::Create(config, sample_rate_hz, num_render_channels)); std::unique_ptr delay_controller; @@ -248,14 +249,15 @@ BlockProcessor* BlockProcessor::Create(const EchoCanceller3Config& config, delay_controller.reset(RenderDelayController::Create(config, sample_rate_hz, num_capture_channels)); } - std::unique_ptr echo_remover(EchoRemover::Create( - config, sample_rate_hz, num_render_channels, num_capture_channels)); + std::unique_ptr echo_remover = EchoRemover::Create( + env, config, sample_rate_hz, num_render_channels, num_capture_channels); return Create(config, sample_rate_hz, num_render_channels, num_capture_channels, std::move(render_buffer), std::move(delay_controller), std::move(echo_remover)); } -BlockProcessor* BlockProcessor::Create( +std::unique_ptr BlockProcessor::Create( + const Environment& env, const EchoCanceller3Config& config, int sample_rate_hz, size_t num_render_channels, @@ -266,14 +268,14 @@ BlockProcessor* BlockProcessor::Create( delay_controller.reset(RenderDelayController::Create(config, sample_rate_hz, num_capture_channels)); } - std::unique_ptr echo_remover(EchoRemover::Create( - config, sample_rate_hz, num_render_channels, num_capture_channels)); + std::unique_ptr echo_remover = EchoRemover::Create( + env, config, sample_rate_hz, num_render_channels, num_capture_channels); return Create(config, sample_rate_hz, num_render_channels, num_capture_channels, std::move(render_buffer), std::move(delay_controller), std::move(echo_remover)); } -BlockProcessor* BlockProcessor::Create( +std::unique_ptr BlockProcessor::Create( const EchoCanceller3Config& config, int sample_rate_hz, size_t num_render_channels, @@ -281,10 +283,10 @@ BlockProcessor* BlockProcessor::Create( std::unique_ptr render_buffer, std::unique_ptr delay_controller, std::unique_ptr echo_remover) { - return new BlockProcessorImpl(config, sample_rate_hz, num_render_channels, - num_capture_channels, std::move(render_buffer), - std::move(delay_controller), - std::move(echo_remover)); + return std::make_unique( + config, sample_rate_hz, num_render_channels, num_capture_channels, + std::move(render_buffer), std::move(delay_controller), + std::move(echo_remover)); } } // namespace webrtc diff --git a/modules/audio_processing/aec3/block_processor.h b/modules/audio_processing/aec3/block_processor.h index 01a83ae5f7..06669ab977 100644 --- a/modules/audio_processing/aec3/block_processor.h +++ b/modules/audio_processing/aec3/block_processor.h @@ -18,6 +18,7 @@ #include "api/audio/echo_canceller3_config.h" #include "api/audio/echo_control.h" +#include "api/environment/environment.h" #include "modules/audio_processing/aec3/block.h" #include "modules/audio_processing/aec3/echo_remover.h" #include "modules/audio_processing/aec3/render_delay_buffer.h" @@ -28,18 +29,21 @@ namespace webrtc { // Class for performing echo cancellation on 64 sample blocks of audio data. class BlockProcessor { public: - static BlockProcessor* Create(const EchoCanceller3Config& config, - int sample_rate_hz, - size_t num_render_channels, - size_t num_capture_channels); + static std::unique_ptr Create( + const Environment& env, + const EchoCanceller3Config& config, + int sample_rate_hz, + size_t num_render_channels, + size_t num_capture_channels); // Only used for testing purposes. - static BlockProcessor* Create( + static std::unique_ptr Create( + const Environment& env, const EchoCanceller3Config& config, int sample_rate_hz, size_t num_render_channels, size_t num_capture_channels, std::unique_ptr render_buffer); - static BlockProcessor* Create( + static std::unique_ptr Create( const EchoCanceller3Config& config, int sample_rate_hz, size_t num_render_channels, diff --git a/modules/audio_processing/aec3/block_processor_unittest.cc b/modules/audio_processing/aec3/block_processor_unittest.cc index aba5c4186d..a56eaac9fa 100644 --- a/modules/audio_processing/aec3/block_processor_unittest.cc +++ b/modules/audio_processing/aec3/block_processor_unittest.cc @@ -14,6 +14,8 @@ #include #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/mock/mock_echo_remover.h" #include "modules/audio_processing/aec3/mock/mock_render_delay_buffer.h" @@ -36,13 +38,15 @@ using ::testing::StrictMock; // Verifies that the basic BlockProcessor functionality works and that the API // methods are callable. -void RunBasicSetupAndApiCallTest(int sample_rate_hz, int num_iterations) { +void RunBasicSetupAndApiCallTest(const Environment& env, + int sample_rate_hz, + int num_iterations) { constexpr size_t kNumRenderChannels = 1; constexpr size_t kNumCaptureChannels = 1; - std::unique_ptr block_processor( - BlockProcessor::Create(EchoCanceller3Config(), sample_rate_hz, - kNumRenderChannels, kNumCaptureChannels)); + std::unique_ptr block_processor = + BlockProcessor::Create(env, EchoCanceller3Config(), sample_rate_hz, + kNumRenderChannels, kNumCaptureChannels); Block block(NumBandsForRate(sample_rate_hz), kNumRenderChannels, 1000.f); for (int k = 0; k < num_iterations; ++k) { block_processor->BufferRender(block); @@ -52,43 +56,46 @@ void RunBasicSetupAndApiCallTest(int sample_rate_hz, int num_iterations) { } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -void RunRenderBlockSizeVerificationTest(int sample_rate_hz) { +void RunRenderBlockSizeVerificationTest(const Environment& env, + int sample_rate_hz) { constexpr size_t kNumRenderChannels = 1; constexpr size_t kNumCaptureChannels = 1; - std::unique_ptr block_processor( - BlockProcessor::Create(EchoCanceller3Config(), sample_rate_hz, - kNumRenderChannels, kNumCaptureChannels)); + std::unique_ptr block_processor = + BlockProcessor::Create(env, EchoCanceller3Config(), sample_rate_hz, + kNumRenderChannels, kNumCaptureChannels); Block block(NumBandsForRate(sample_rate_hz), kNumRenderChannels); EXPECT_DEATH(block_processor->BufferRender(block), ""); } -void RunRenderNumBandsVerificationTest(int sample_rate_hz) { +void RunRenderNumBandsVerificationTest(const Environment& env, + int sample_rate_hz) { constexpr size_t kNumRenderChannels = 1; constexpr size_t kNumCaptureChannels = 1; const size_t wrong_num_bands = NumBandsForRate(sample_rate_hz) < 3 ? NumBandsForRate(sample_rate_hz) + 1 : 1; - std::unique_ptr block_processor( - BlockProcessor::Create(EchoCanceller3Config(), sample_rate_hz, - kNumRenderChannels, kNumCaptureChannels)); + std::unique_ptr block_processor = + BlockProcessor::Create(env, EchoCanceller3Config(), sample_rate_hz, + kNumRenderChannels, kNumCaptureChannels); Block block(wrong_num_bands, kNumRenderChannels); EXPECT_DEATH(block_processor->BufferRender(block), ""); } -void RunCaptureNumBandsVerificationTest(int sample_rate_hz) { +void RunCaptureNumBandsVerificationTest(const Environment& env, + int sample_rate_hz) { constexpr size_t kNumRenderChannels = 1; constexpr size_t kNumCaptureChannels = 1; const size_t wrong_num_bands = NumBandsForRate(sample_rate_hz) < 3 ? NumBandsForRate(sample_rate_hz) + 1 : 1; - std::unique_ptr block_processor( - BlockProcessor::Create(EchoCanceller3Config(), sample_rate_hz, - kNumRenderChannels, kNumCaptureChannels)); + std::unique_ptr block_processor = + BlockProcessor::Create(env, EchoCanceller3Config(), sample_rate_hz, + kNumRenderChannels, kNumCaptureChannels); Block block(wrong_num_bands, kNumRenderChannels); EXPECT_DEATH(block_processor->ProcessCapture(false, false, nullptr, &block), @@ -97,14 +104,12 @@ void RunCaptureNumBandsVerificationTest(int sample_rate_hz) { #endif std::string ProduceDebugText(int sample_rate_hz) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Sample rate: " << sample_rate_hz; return ss.Release(); } -void FillSampleVector(int call_counter, - int delay, - rtc::ArrayView samples) { +void FillSampleVector(int call_counter, int delay, ArrayView samples) { for (size_t i = 0; i < samples.size(); ++i) { samples[i] = (call_counter - delay) * 10000.0f + i; } @@ -123,6 +128,7 @@ TEST(BlockProcessor, DISABLED_DelayControllerIntegration) { constexpr size_t kDelayHeadroom = 1; constexpr size_t kDelayInBlocks = kDelayInSamples / kBlockSize - kDelayHeadroom; + const Environment env = CreateEnvironment(); Random random_generator(42U); for (auto rate : {16000, 32000, 48000}) { SCOPED_TRACE(ProduceDebugText(rate)); @@ -138,9 +144,9 @@ TEST(BlockProcessor, DISABLED_DelayControllerIntegration) { EXPECT_CALL(*render_delay_buffer_mock, Delay()) .Times(kNumBlocks + 1) .WillRepeatedly(Return(0)); - std::unique_ptr block_processor(BlockProcessor::Create( - EchoCanceller3Config(), rate, kNumRenderChannels, kNumCaptureChannels, - std::move(render_delay_buffer_mock))); + std::unique_ptr block_processor = BlockProcessor::Create( + env, EchoCanceller3Config(), rate, kNumRenderChannels, + kNumCaptureChannels, std::move(render_delay_buffer_mock)); Block render_block(NumBandsForRate(rate), kNumRenderChannels); Block capture_block(NumBandsForRate(rate), kNumCaptureChannels); @@ -212,46 +218,51 @@ TEST(BlockProcessor, DISABLED_SubmoduleIntegration) { } TEST(BlockProcessor, BasicSetupAndApiCalls) { + const Environment env = CreateEnvironment(); for (auto rate : {16000, 32000, 48000}) { SCOPED_TRACE(ProduceDebugText(rate)); - RunBasicSetupAndApiCallTest(rate, 1); + RunBasicSetupAndApiCallTest(env, rate, 1); } } TEST(BlockProcessor, TestLongerCall) { - RunBasicSetupAndApiCallTest(16000, 20 * kNumBlocksPerSecond); + RunBasicSetupAndApiCallTest(CreateEnvironment(), 16000, + 20 * kNumBlocksPerSecond); } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) // TODO(gustaf): Re-enable the test once the issue with memory leaks during // DEATH tests on test bots has been fixed. TEST(BlockProcessorDeathTest, DISABLED_VerifyRenderBlockSizeCheck) { + const Environment env = CreateEnvironment(); for (auto rate : {16000, 32000, 48000}) { SCOPED_TRACE(ProduceDebugText(rate)); - RunRenderBlockSizeVerificationTest(rate); + RunRenderBlockSizeVerificationTest(env, rate); } } TEST(BlockProcessorDeathTest, VerifyRenderNumBandsCheck) { + const Environment env = CreateEnvironment(); for (auto rate : {16000, 32000, 48000}) { SCOPED_TRACE(ProduceDebugText(rate)); - RunRenderNumBandsVerificationTest(rate); + RunRenderNumBandsVerificationTest(env, rate); } } // TODO(peah): Verify the check for correct number of bands in the capture // signal. TEST(BlockProcessorDeathTest, VerifyCaptureNumBandsCheck) { + const Environment env = CreateEnvironment(); for (auto rate : {16000, 32000, 48000}) { SCOPED_TRACE(ProduceDebugText(rate)); - RunCaptureNumBandsVerificationTest(rate); + RunCaptureNumBandsVerificationTest(env, rate); } } // Verifiers that the verification for null ProcessCapture input works. TEST(BlockProcessorDeathTest, NullProcessCaptureParameter) { - EXPECT_DEATH(std::unique_ptr( - BlockProcessor::Create(EchoCanceller3Config(), 16000, 1, 1)) + EXPECT_DEATH(BlockProcessor::Create(CreateEnvironment(), + EchoCanceller3Config(), 16000, 1, 1) ->ProcessCapture(false, false, nullptr, nullptr), ""); } @@ -260,8 +271,8 @@ TEST(BlockProcessorDeathTest, NullProcessCaptureParameter) { // TODO(peah): Re-enable the test once the issue with memory leaks during DEATH // tests on test bots has been fixed. TEST(BlockProcessor, DISABLED_WrongSampleRate) { - EXPECT_DEATH(std::unique_ptr( - BlockProcessor::Create(EchoCanceller3Config(), 8001, 1, 1)), + EXPECT_DEATH(BlockProcessor::Create(CreateEnvironment(), + EchoCanceller3Config(), 8001, 1, 1), ""); } @@ -315,19 +326,18 @@ TEST(BlockProcessor, ExternalDelayAppliedCorrectlyWithInitialCaptureCalls) { } EXPECT_CALL(*echo_remover_mock_pointer, ProcessCapture) - .WillRepeatedly( - [](EchoPathVariability /*echo_path_variability*/, - bool /*capture_signal_saturation*/, - const absl::optional& /*external_delay*/, - RenderBuffer* render_buffer, Block* /*linear_output*/, - Block* capture) { - const auto& render = render_buffer->GetBlock(0); - const auto render_view = render.View(/*band=*/0, /*channel=*/0); - const auto capture_view = capture->View(/*band=*/0, /*channel=*/0); - for (size_t i = 0; i < kBlockSize; ++i) { - EXPECT_FLOAT_EQ(render_view[i], capture_view[i]); - } - }); + .WillRepeatedly([](EchoPathVariability /*echo_path_variability*/, + bool /*capture_signal_saturation*/, + const std::optional& /*external_delay*/, + RenderBuffer* render_buffer, Block* /*linear_output*/, + Block* capture) { + const auto& render = render_buffer->GetBlock(0); + const auto render_view = render.View(/*band=*/0, /*channel=*/0); + const auto capture_view = capture->View(/*band=*/0, /*channel=*/0); + for (size_t i = 0; i < kBlockSize; ++i) { + EXPECT_FLOAT_EQ(render_view[i], capture_view[i]); + } + }); FillSampleVector(++capture_call_counter, kDelayInBlocks, capture_block.View(/*band=*/0, /*capture=*/0)); diff --git a/modules/audio_processing/aec3/coarse_filter_update_gain_unittest.cc b/modules/audio_processing/aec3/coarse_filter_update_gain_unittest.cc index 55b79bb812..0f532b6e44 100644 --- a/modules/audio_processing/aec3/coarse_filter_update_gain_unittest.cc +++ b/modules/audio_processing/aec3/coarse_filter_update_gain_unittest.cc @@ -100,7 +100,7 @@ void RunFilterUpdateTest(int num_blocks_to_process, e_coarse.begin(), [&](float a, float b) { return a - b * kScale; }); std::for_each(e_coarse.begin(), e_coarse.end(), - [](float& a) { a = rtc::SafeClamp(a, -32768.f, 32767.f); }); + [](float& a) { a = SafeClamp(a, -32768.f, 32767.f); }); fft.ZeroPaddedFft(e_coarse, Aec3Fft::Window::kRectangular, &E_coarse); std::array render_power; @@ -118,13 +118,13 @@ void RunFilterUpdateTest(int num_blocks_to_process, } std::string ProduceDebugText(int filter_length_blocks) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Length: " << filter_length_blocks; return ss.Release(); } std::string ProduceDebugText(size_t delay, int filter_length_blocks) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Delay: " << delay << ", "; ss << ProduceDebugText(filter_length_blocks); return ss.Release(); @@ -226,7 +226,7 @@ INSTANTIATE_TEST_SUITE_P( ::testing::Values(1, 2, 4), [](const ::testing::TestParamInfo< CoarseFilterUpdateGainOneTwoFourRenderChannels::ParamType>& info) { - return (rtc::StringBuilder() << "Render" << info.param).str(); + return (StringBuilder() << "Render" << info.param).str(); }); // Verifies that the magnitude of the gain on average decreases for a diff --git a/modules/audio_processing/aec3/comfort_noise_generator.cc b/modules/audio_processing/aec3/comfort_noise_generator.cc index de5227c089..ed643f4400 100644 --- a/modules/audio_processing/aec3/comfort_noise_generator.cc +++ b/modules/audio_processing/aec3/comfort_noise_generator.cc @@ -122,10 +122,9 @@ ComfortNoiseGenerator::~ComfortNoiseGenerator() = default; void ComfortNoiseGenerator::Compute( bool saturated_capture, - rtc::ArrayView> - capture_spectrum, - rtc::ArrayView lower_band_noise, - rtc::ArrayView upper_band_noise) { + ArrayView> capture_spectrum, + ArrayView lower_band_noise, + ArrayView upper_band_noise) { const auto& Y2 = capture_spectrum; if (!saturated_capture) { diff --git a/modules/audio_processing/aec3/comfort_noise_generator.h b/modules/audio_processing/aec3/comfort_noise_generator.h index 2785b765c5..b5212fb68f 100644 --- a/modules/audio_processing/aec3/comfort_noise_generator.h +++ b/modules/audio_processing/aec3/comfort_noise_generator.h @@ -48,15 +48,14 @@ class ComfortNoiseGenerator { ComfortNoiseGenerator(const ComfortNoiseGenerator&) = delete; // Computes the comfort noise. - void Compute(bool saturated_capture, - rtc::ArrayView> - capture_spectrum, - rtc::ArrayView lower_band_noise, - rtc::ArrayView upper_band_noise); + void Compute( + bool saturated_capture, + ArrayView> capture_spectrum, + ArrayView lower_band_noise, + ArrayView upper_band_noise); // Returns the estimate of the background noise spectrum. - rtc::ArrayView> NoiseSpectrum() - const { + ArrayView> NoiseSpectrum() const { return N2_; } diff --git a/modules/audio_processing/aec3/comfort_noise_generator_unittest.cc b/modules/audio_processing/aec3/comfort_noise_generator_unittest.cc index a9da17559a..b74e2afdff 100644 --- a/modules/audio_processing/aec3/comfort_noise_generator_unittest.cc +++ b/modules/audio_processing/aec3/comfort_noise_generator_unittest.cc @@ -14,6 +14,7 @@ #include #include "api/audio/echo_canceller3_config.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/aec3/aec_state.h" #include "rtc_base/random.h" #include "rtc_base/system/arch.h" @@ -36,7 +37,7 @@ TEST(ComfortNoiseGenerator, CorrectLevel) { constexpr size_t kNumChannels = 5; EchoCanceller3Config config; ComfortNoiseGenerator cng(config, DetectOptimization(), kNumChannels); - AecState aec_state(config, kNumChannels); + AecState aec_state(CreateEnvironment(), config, kNumChannels); std::vector> N2(kNumChannels); std::vector n_lower(kNumChannels); diff --git a/modules/audio_processing/aec3/config_selector.cc b/modules/audio_processing/aec3/config_selector.cc index c55344da79..e6bd8297ba 100644 --- a/modules/audio_processing/aec3/config_selector.cc +++ b/modules/audio_processing/aec3/config_selector.cc @@ -47,7 +47,7 @@ bool CompatibleConfigs(const EchoCanceller3Config& mono_config, ConfigSelector::ConfigSelector( const EchoCanceller3Config& config, - const absl::optional& multichannel_config, + const std::optional& multichannel_config, int num_render_input_channels) : config_(config), multichannel_config_(multichannel_config) { if (multichannel_config_.has_value()) { diff --git a/modules/audio_processing/aec3/config_selector.h b/modules/audio_processing/aec3/config_selector.h index 3b3f94e5ac..f56f7bacf8 100644 --- a/modules/audio_processing/aec3/config_selector.h +++ b/modules/audio_processing/aec3/config_selector.h @@ -11,7 +11,8 @@ #ifndef MODULES_AUDIO_PROCESSING_AEC3_CONFIG_SELECTOR_H_ #define MODULES_AUDIO_PROCESSING_AEC3_CONFIG_SELECTOR_H_ -#include "absl/types/optional.h" +#include + #include "api/audio/echo_canceller3_config.h" namespace webrtc { @@ -19,10 +20,9 @@ namespace webrtc { // Selects the config to use. class ConfigSelector { public: - ConfigSelector( - const EchoCanceller3Config& config, - const absl::optional& multichannel_config, - int num_render_input_channels); + ConfigSelector(const EchoCanceller3Config& config, + const std::optional& multichannel_config, + int num_render_input_channels); // Updates the config selection based on the detection of multichannel // content. @@ -32,7 +32,7 @@ class ConfigSelector { private: const EchoCanceller3Config config_; - const absl::optional multichannel_config_; + const std::optional multichannel_config_; const EchoCanceller3Config* active_config_ = nullptr; }; diff --git a/modules/audio_processing/aec3/config_selector_unittest.cc b/modules/audio_processing/aec3/config_selector_unittest.cc index 1826bfcace..91a60d87ca 100644 --- a/modules/audio_processing/aec3/config_selector_unittest.cc +++ b/modules/audio_processing/aec3/config_selector_unittest.cc @@ -10,9 +10,9 @@ #include "modules/audio_processing/aec3/config_selector.h" +#include #include -#include "absl/types/optional.h" #include "api/audio/echo_canceller3_config.h" #include "test/gtest.h" @@ -39,7 +39,7 @@ TEST_P(ConfigSelectorChannelsAndContentDetection, const auto [num_channels, detect_stereo_content] = GetParam(); EchoCanceller3Config config; config.multi_channel.detect_stereo_content = detect_stereo_content; - absl::optional multichannel_config; + std::optional multichannel_config; config.delay.default_delay = config.delay.default_delay + 1; const size_t custom_delay_value_in_config = config.delay.default_delay; @@ -63,7 +63,7 @@ TEST_P(ConfigSelectorChannelsAndContentDetection, const auto [num_channels, detect_stereo_content] = GetParam(); EchoCanceller3Config config; config.multi_channel.detect_stereo_content = detect_stereo_content; - absl::optional multichannel_config = config; + std::optional multichannel_config = config; config.delay.default_delay += 1; const size_t custom_delay_value_in_config = config.delay.default_delay; @@ -87,7 +87,7 @@ TEST_P(ConfigSelectorChannels, CorrectConfigUpdateBehavior) { const int num_channels = GetParam(); EchoCanceller3Config config; config.multi_channel.detect_stereo_content = true; - absl::optional multichannel_config = config; + std::optional multichannel_config = config; config.delay.default_delay += 1; const size_t custom_delay_value_in_config = config.delay.default_delay; diff --git a/modules/audio_processing/aec3/decimator.cc b/modules/audio_processing/aec3/decimator.cc index bd03237ca0..beac73e22e 100644 --- a/modules/audio_processing/aec3/decimator.cc +++ b/modules/audio_processing/aec3/decimator.cc @@ -69,8 +69,7 @@ Decimator::Decimator(size_t down_sampling_factor) down_sampling_factor_ == 8); } -void Decimator::Decimate(rtc::ArrayView in, - rtc::ArrayView out) { +void Decimator::Decimate(ArrayView in, ArrayView out) { RTC_DCHECK_EQ(kBlockSize, in.size()); RTC_DCHECK_EQ(kBlockSize / down_sampling_factor_, out.size()); std::array x; diff --git a/modules/audio_processing/aec3/decimator.h b/modules/audio_processing/aec3/decimator.h index dbff3d9fff..69a022cee9 100644 --- a/modules/audio_processing/aec3/decimator.h +++ b/modules/audio_processing/aec3/decimator.h @@ -29,7 +29,7 @@ class Decimator { Decimator& operator=(const Decimator&) = delete; // Downsamples the signal. - void Decimate(rtc::ArrayView in, rtc::ArrayView out); + void Decimate(ArrayView in, ArrayView out); private: const size_t down_sampling_factor_; diff --git a/modules/audio_processing/aec3/decimator_unittest.cc b/modules/audio_processing/aec3/decimator_unittest.cc index e6f5ea0403..3ff4294fbb 100644 --- a/modules/audio_processing/aec3/decimator_unittest.cc +++ b/modules/audio_processing/aec3/decimator_unittest.cc @@ -29,7 +29,7 @@ namespace webrtc { namespace { std::string ProduceDebugText(int sample_rate_hz) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Sample rate: " << sample_rate_hz; return ss.Release(); } @@ -59,18 +59,17 @@ void ProduceDecimatedSinusoidalOutputPower(int sample_rate_hz, for (size_t k = 0; k < kNumBlocks; ++k) { std::vector sub_block(sub_block_size); decimator.Decimate( - rtc::ArrayView(&input[k * kBlockSize], kBlockSize), - sub_block); + ArrayView(&input[k * kBlockSize], kBlockSize), sub_block); std::copy(sub_block.begin(), sub_block.end(), output.begin() + k * sub_block_size); } ASSERT_GT(kNumBlocks, kNumStartupBlocks); - rtc::ArrayView input_to_evaluate( + ArrayView input_to_evaluate( &input[kNumStartupBlocks * kBlockSize], (kNumBlocks - kNumStartupBlocks) * kBlockSize); - rtc::ArrayView output_to_evaluate( + ArrayView output_to_evaluate( &output[kNumStartupBlocks * sub_block_size], (kNumBlocks - kNumStartupBlocks) * sub_block_size); *input_power = diff --git a/modules/audio_processing/aec3/dominant_nearend_detector.cc b/modules/audio_processing/aec3/dominant_nearend_detector.cc index 40073cf615..815e1901f8 100644 --- a/modules/audio_processing/aec3/dominant_nearend_detector.cc +++ b/modules/audio_processing/aec3/dominant_nearend_detector.cc @@ -27,16 +27,15 @@ DominantNearendDetector::DominantNearendDetector( hold_counters_(num_capture_channels_) {} void DominantNearendDetector::Update( - rtc::ArrayView> - nearend_spectrum, - rtc::ArrayView> + ArrayView> nearend_spectrum, + ArrayView> residual_echo_spectrum, - rtc::ArrayView> + ArrayView> comfort_noise_spectrum, bool initial_state) { nearend_state_ = false; - auto low_frequency_energy = [](rtc::ArrayView spectrum) { + auto low_frequency_energy = [](ArrayView spectrum) { RTC_DCHECK_LE(16, spectrum.size()); return std::accumulate(spectrum.begin() + 1, spectrum.begin() + 16, 0.f); }; diff --git a/modules/audio_processing/aec3/dominant_nearend_detector.h b/modules/audio_processing/aec3/dominant_nearend_detector.h index 046d1488d6..625f3d0af3 100644 --- a/modules/audio_processing/aec3/dominant_nearend_detector.h +++ b/modules/audio_processing/aec3/dominant_nearend_detector.h @@ -29,13 +29,13 @@ class DominantNearendDetector : public NearendDetector { bool IsNearendState() const override { return nearend_state_; } // Updates the state selection based on latest spectral estimates. - void Update(rtc::ArrayView> - nearend_spectrum, - rtc::ArrayView> - residual_echo_spectrum, - rtc::ArrayView> - comfort_noise_spectrum, - bool initial_state) override; + void Update( + ArrayView> nearend_spectrum, + ArrayView> + residual_echo_spectrum, + ArrayView> + comfort_noise_spectrum, + bool initial_state) override; private: const float enr_threshold_; diff --git a/modules/audio_processing/aec3/echo_audibility.cc b/modules/audio_processing/aec3/echo_audibility.cc index 142a33d5e0..b0c26ba39c 100644 --- a/modules/audio_processing/aec3/echo_audibility.cc +++ b/modules/audio_processing/aec3/echo_audibility.cc @@ -30,7 +30,7 @@ EchoAudibility::EchoAudibility(bool use_render_stationarity_at_init) EchoAudibility::~EchoAudibility() = default; void EchoAudibility::Update(const RenderBuffer& render_buffer, - rtc::ArrayView average_reverb, + ArrayView average_reverb, int delay_blocks, bool external_delay_seen) { UpdateRenderNoiseEstimator(render_buffer.GetSpectrumBuffer(), @@ -45,12 +45,12 @@ void EchoAudibility::Update(const RenderBuffer& render_buffer, void EchoAudibility::Reset() { render_stationarity_.Reset(); non_zero_render_seen_ = false; - render_spectrum_write_prev_ = absl::nullopt; + render_spectrum_write_prev_ = std::nullopt; } void EchoAudibility::UpdateRenderStationarityFlags( const RenderBuffer& render_buffer, - rtc::ArrayView average_reverb, + ArrayView average_reverb, int min_channel_delay_blocks) { const SpectrumBuffer& spectrum_buffer = render_buffer.GetSpectrumBuffer(); int idx_at_delay = spectrum_buffer.OffsetIndex(spectrum_buffer.read, @@ -98,7 +98,7 @@ bool EchoAudibility::IsRenderTooLow(const BlockBuffer& block_buffer) { idx = block_buffer.IncIndex(idx)) { float max_abs_over_channels = 0.f; for (int ch = 0; ch < num_render_channels; ++ch) { - rtc::ArrayView block = + ArrayView block = block_buffer.buffer[idx].View(/*band=*/0, /*channel=*/ch); auto r = std::minmax_element(block.cbegin(), block.cend()); float max_abs_channel = diff --git a/modules/audio_processing/aec3/echo_audibility.h b/modules/audio_processing/aec3/echo_audibility.h index b9d6f87d2a..44df9266c1 100644 --- a/modules/audio_processing/aec3/echo_audibility.h +++ b/modules/audio_processing/aec3/echo_audibility.h @@ -13,7 +13,8 @@ #include -#include "absl/types/optional.h" +#include + #include "api/array_view.h" #include "modules/audio_processing/aec3/block_buffer.h" #include "modules/audio_processing/aec3/render_buffer.h" @@ -32,12 +33,12 @@ class EchoAudibility { // Feed new render data to the echo audibility estimator. void Update(const RenderBuffer& render_buffer, - rtc::ArrayView average_reverb, + ArrayView average_reverb, int min_channel_delay_blocks, bool external_delay_seen); // Get the residual echo scaling. void GetResidualEchoScaling(bool filter_has_had_time_to_converge, - rtc::ArrayView residual_scaling) const { + ArrayView residual_scaling) const { for (size_t band = 0; band < residual_scaling.size(); ++band) { if (render_stationarity_.IsBandStationary(band) && (filter_has_had_time_to_converge || @@ -60,7 +61,7 @@ class EchoAudibility { // Updates the render stationarity flags for the current frame. void UpdateRenderStationarityFlags(const RenderBuffer& render_buffer, - rtc::ArrayView average_reverb, + ArrayView average_reverb, int delay_blocks); // Updates the noise estimator with the new render data since the previous @@ -73,7 +74,7 @@ class EchoAudibility { // values. bool IsRenderTooLow(const BlockBuffer& block_buffer); - absl::optional render_spectrum_write_prev_; + std::optional render_spectrum_write_prev_; int render_block_write_prev_; bool non_zero_render_seen_; const bool use_render_stationarity_at_init_; diff --git a/modules/audio_processing/aec3/echo_canceller3.cc b/modules/audio_processing/aec3/echo_canceller3.cc index e8e2175994..b6752efe73 100644 --- a/modules/audio_processing/aec3/echo_canceller3.cc +++ b/modules/audio_processing/aec3/echo_canceller3.cc @@ -10,15 +10,33 @@ #include "modules/audio_processing/aec3/echo_canceller3.h" #include +#include +#include +#include +#include +#include #include +#include #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/audio/echo_canceller3_config.h" +#include "api/audio/echo_control.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "modules/audio_processing/aec3/aec3_common.h" +#include "modules/audio_processing/aec3/block.h" +#include "modules/audio_processing/aec3/block_delay_buffer.h" +#include "modules/audio_processing/aec3/block_framer.h" +#include "modules/audio_processing/aec3/block_processor.h" +#include "modules/audio_processing/aec3/frame_blocker.h" #include "modules/audio_processing/high_pass_filter.h" #include "modules/audio_processing/logging/apm_data_dumper.h" +#include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" +#include "rtc_base/race_checker.h" +#include "rtc_base/swap_queue.h" namespace webrtc { @@ -26,7 +44,7 @@ namespace { enum class EchoCanceller3ApiCall { kCapture, kRender }; -bool DetectSaturation(rtc::ArrayView y) { +bool DetectSaturation(ArrayView y) { for (size_t k = 0; k < y.size(); ++k) { if (y[k] >= 32700.0f || y[k] <= -32700.0f) { return true; @@ -38,11 +56,12 @@ bool DetectSaturation(rtc::ArrayView y) { // Retrieves a value from a field trial if it is available. If no value is // present, the default value is returned. If the retrieved value is beyond the // specified limits, the default value is returned instead. -void RetrieveFieldTrialValue(absl::string_view trial_name, +void RetrieveFieldTrialValue(const FieldTrialsView& field_trials, + absl::string_view trial_name, float min, float max, float* value_to_update) { - const std::string field_trial_str = field_trial::FindFullName(trial_name); + const std::string field_trial_str = field_trials.Lookup(trial_name); FieldTrialParameter field_trial_param(/*key=*/"", *value_to_update); @@ -58,11 +77,12 @@ void RetrieveFieldTrialValue(absl::string_view trial_name, } } -void RetrieveFieldTrialValue(absl::string_view trial_name, +void RetrieveFieldTrialValue(const FieldTrialsView& field_trials, + absl::string_view trial_name, int min, int max, int* value_to_update) { - const std::string field_trial_str = field_trial::FindFullName(trial_name); + const std::string field_trial_str = field_trials.Lookup(trial_name); FieldTrialParameter field_trial_param(/*key=*/"", *value_to_update); @@ -81,14 +101,14 @@ void RetrieveFieldTrialValue(absl::string_view trial_name, void FillSubFrameView( AudioBuffer* frame, size_t sub_frame_index, - std::vector>>* sub_frame_view) { + std::vector>>* sub_frame_view) { RTC_DCHECK_GE(1, sub_frame_index); RTC_DCHECK_LE(0, sub_frame_index); RTC_DCHECK_EQ(frame->num_bands(), sub_frame_view->size()); RTC_DCHECK_EQ(frame->num_channels(), (*sub_frame_view)[0].size()); for (size_t band = 0; band < sub_frame_view->size(); ++band) { for (size_t channel = 0; channel < (*sub_frame_view)[0].size(); ++channel) { - (*sub_frame_view)[band][channel] = rtc::ArrayView( + (*sub_frame_view)[band][channel] = ArrayView( &frame->split_bands(channel)[band][sub_frame_index * kSubFrameLength], kSubFrameLength); } @@ -99,7 +119,7 @@ void FillSubFrameView( bool proper_downmix_needed, std::vector>>* frame, size_t sub_frame_index, - std::vector>>* sub_frame_view) { + std::vector>>* sub_frame_view) { RTC_DCHECK_GE(1, sub_frame_index); RTC_DCHECK_EQ(frame->size(), sub_frame_view->size()); const size_t frame_num_channels = (*frame)[0].size(); @@ -127,7 +147,7 @@ void FillSubFrameView( } } for (size_t band = 0; band < frame->size(); ++band) { - (*sub_frame_view)[band][/*channel=*/0] = rtc::ArrayView( + (*sub_frame_view)[band][/*channel=*/0] = ArrayView( &(*frame)[band][/*channel=*/0][sub_frame_index * kSubFrameLength], kSubFrameLength); } @@ -135,7 +155,7 @@ void FillSubFrameView( RTC_DCHECK_EQ(frame_num_channels, sub_frame_num_channels); for (size_t band = 0; band < frame->size(); ++band) { for (size_t channel = 0; channel < (*frame)[band].size(); ++channel) { - (*sub_frame_view)[band][channel] = rtc::ArrayView( + (*sub_frame_view)[band][channel] = ArrayView( &(*frame)[band][channel][sub_frame_index * kSubFrameLength], kSubFrameLength); } @@ -155,10 +175,9 @@ void ProcessCaptureFrameContent( BlockFramer* output_framer, BlockProcessor* block_processor, Block* linear_output_block, - std::vector>>* - linear_output_sub_frame_view, + std::vector>>* linear_output_sub_frame_view, Block* capture_block, - std::vector>>* capture_sub_frame_view) { + std::vector>>* capture_sub_frame_view) { FillSubFrameView(capture, sub_frame_index, capture_sub_frame_view); if (linear_output) { @@ -218,7 +237,7 @@ void BufferRenderFrameContent( FrameBlocker* render_blocker, BlockProcessor* block_processor, Block* block, - std::vector>>* sub_frame_view) { + std::vector>>* sub_frame_view) { FillSubFrameView(proper_downmix_needed, render_frame, sub_frame_index, sub_frame_view); render_blocker->InsertSubFrameAndExtractBlock(*sub_frame_view, block); @@ -244,7 +263,7 @@ void CopyBufferIntoFrame(const AudioBuffer& buffer, RTC_DCHECK_EQ(AudioBuffer::kSplitBandSize, (*frame)[0][0].size()); for (size_t band = 0; band < num_bands; ++band) { for (size_t channel = 0; channel < num_channels; ++channel) { - rtc::ArrayView buffer_view( + ArrayView buffer_view( &buffer.split_bands_const(channel)[band][0], AudioBuffer::kSplitBandSize); std::copy(buffer_view.begin(), buffer_view.end(), @@ -256,61 +275,62 @@ void CopyBufferIntoFrame(const AudioBuffer& buffer, } // namespace // TODO(webrtc:5298): Move this to a separate file. -EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config) { +EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config, + const FieldTrialsView& field_trials) { EchoCanceller3Config adjusted_cfg = config; - if (field_trial::IsEnabled("WebRTC-Aec3StereoContentDetectionKillSwitch")) { + if (field_trials.IsEnabled("WebRTC-Aec3StereoContentDetectionKillSwitch")) { adjusted_cfg.multi_channel.detect_stereo_content = false; } - if (field_trial::IsEnabled("WebRTC-Aec3AntiHowlingMinimizationKillSwitch")) { + if (field_trials.IsEnabled("WebRTC-Aec3AntiHowlingMinimizationKillSwitch")) { adjusted_cfg.suppressor.high_bands_suppression .anti_howling_activation_threshold = 25.f; adjusted_cfg.suppressor.high_bands_suppression.anti_howling_gain = 0.01f; } - if (field_trial::IsEnabled("WebRTC-Aec3UseShortConfigChangeDuration")) { + if (field_trials.IsEnabled("WebRTC-Aec3UseShortConfigChangeDuration")) { adjusted_cfg.filter.config_change_duration_blocks = 10; } - if (field_trial::IsEnabled("WebRTC-Aec3UseZeroInitialStateDuration")) { + if (field_trials.IsEnabled("WebRTC-Aec3UseZeroInitialStateDuration")) { adjusted_cfg.filter.initial_state_seconds = 0.f; - } else if (field_trial::IsEnabled( + } else if (field_trials.IsEnabled( "WebRTC-Aec3UseDot1SecondsInitialStateDuration")) { adjusted_cfg.filter.initial_state_seconds = .1f; - } else if (field_trial::IsEnabled( + } else if (field_trials.IsEnabled( "WebRTC-Aec3UseDot2SecondsInitialStateDuration")) { adjusted_cfg.filter.initial_state_seconds = .2f; - } else if (field_trial::IsEnabled( + } else if (field_trials.IsEnabled( "WebRTC-Aec3UseDot3SecondsInitialStateDuration")) { adjusted_cfg.filter.initial_state_seconds = .3f; - } else if (field_trial::IsEnabled( + } else if (field_trials.IsEnabled( "WebRTC-Aec3UseDot6SecondsInitialStateDuration")) { adjusted_cfg.filter.initial_state_seconds = .6f; - } else if (field_trial::IsEnabled( + } else if (field_trials.IsEnabled( "WebRTC-Aec3UseDot9SecondsInitialStateDuration")) { adjusted_cfg.filter.initial_state_seconds = .9f; - } else if (field_trial::IsEnabled( + } else if (field_trials.IsEnabled( "WebRTC-Aec3Use1Dot2SecondsInitialStateDuration")) { adjusted_cfg.filter.initial_state_seconds = 1.2f; - } else if (field_trial::IsEnabled( + } else if (field_trials.IsEnabled( "WebRTC-Aec3Use1Dot6SecondsInitialStateDuration")) { adjusted_cfg.filter.initial_state_seconds = 1.6f; - } else if (field_trial::IsEnabled( + } else if (field_trials.IsEnabled( "WebRTC-Aec3Use2Dot0SecondsInitialStateDuration")) { adjusted_cfg.filter.initial_state_seconds = 2.0f; } - if (field_trial::IsEnabled("WebRTC-Aec3HighPassFilterEchoReference")) { + if (field_trials.IsEnabled("WebRTC-Aec3HighPassFilterEchoReference")) { adjusted_cfg.filter.high_pass_filter_echo_reference = true; } - if (field_trial::IsEnabled("WebRTC-Aec3EchoSaturationDetectionKillSwitch")) { + if (field_trials.IsEnabled("WebRTC-Aec3EchoSaturationDetectionKillSwitch")) { adjusted_cfg.ep_strength.echo_can_saturate = false; } const std::string use_nearend_reverb_len_tunings = - field_trial::FindFullName("WebRTC-Aec3UseNearendReverbLen"); + field_trials.Lookup("WebRTC-Aec3UseNearendReverbLen"); FieldTrialParameter nearend_reverb_default_len( "default_len", adjusted_cfg.ep_strength.default_len); FieldTrialParameter nearend_reverb_nearend_len( @@ -328,146 +348,138 @@ EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config) { static_cast(nearend_reverb_nearend_len.Get()); } - if (field_trial::IsEnabled("WebRTC-Aec3ConservativeTailFreqResponse")) { + if (field_trials.IsEnabled("WebRTC-Aec3ConservativeTailFreqResponse")) { adjusted_cfg.ep_strength.use_conservative_tail_frequency_response = true; } - if (field_trial::IsDisabled("WebRTC-Aec3ConservativeTailFreqResponse")) { + if (field_trials.IsDisabled("WebRTC-Aec3ConservativeTailFreqResponse")) { adjusted_cfg.ep_strength.use_conservative_tail_frequency_response = false; } - if (field_trial::IsEnabled("WebRTC-Aec3ShortHeadroomKillSwitch")) { + if (field_trials.IsEnabled("WebRTC-Aec3ShortHeadroomKillSwitch")) { // Two blocks headroom. adjusted_cfg.delay.delay_headroom_samples = kBlockSize * 2; } - if (field_trial::IsEnabled("WebRTC-Aec3ClampInstQualityToZeroKillSwitch")) { + if (field_trials.IsEnabled("WebRTC-Aec3ClampInstQualityToZeroKillSwitch")) { adjusted_cfg.erle.clamp_quality_estimate_to_zero = false; } - if (field_trial::IsEnabled("WebRTC-Aec3ClampInstQualityToOneKillSwitch")) { + if (field_trials.IsEnabled("WebRTC-Aec3ClampInstQualityToOneKillSwitch")) { adjusted_cfg.erle.clamp_quality_estimate_to_one = false; } - if (field_trial::IsEnabled("WebRTC-Aec3OnsetDetectionKillSwitch")) { + if (field_trials.IsEnabled("WebRTC-Aec3OnsetDetectionKillSwitch")) { adjusted_cfg.erle.onset_detection = false; } - if (field_trial::IsEnabled( + if (field_trials.IsEnabled( "WebRTC-Aec3EnforceRenderDelayEstimationDownmixing")) { adjusted_cfg.delay.render_alignment_mixing.downmix = true; adjusted_cfg.delay.render_alignment_mixing.adaptive_selection = false; } - if (field_trial::IsEnabled( + if (field_trials.IsEnabled( "WebRTC-Aec3EnforceCaptureDelayEstimationDownmixing")) { adjusted_cfg.delay.capture_alignment_mixing.downmix = true; adjusted_cfg.delay.capture_alignment_mixing.adaptive_selection = false; } - if (field_trial::IsEnabled( + if (field_trials.IsEnabled( "WebRTC-Aec3EnforceCaptureDelayEstimationLeftRightPrioritization")) { adjusted_cfg.delay.capture_alignment_mixing.prefer_first_two_channels = true; } - if (field_trial::IsEnabled( + if (field_trials.IsEnabled( "WebRTC-" "Aec3RenderDelayEstimationLeftRightPrioritizationKillSwitch")) { adjusted_cfg.delay.capture_alignment_mixing.prefer_first_two_channels = false; } - if (field_trial::IsEnabled("WebRTC-Aec3DelayEstimatorDetectPreEcho")) { - adjusted_cfg.delay.detect_pre_echo = true; - } - - if (field_trial::IsDisabled("WebRTC-Aec3DelayEstimatorDetectPreEcho")) { - adjusted_cfg.delay.detect_pre_echo = false; - } - - if (field_trial::IsEnabled("WebRTC-Aec3SensitiveDominantNearendActivation")) { + if (field_trials.IsEnabled("WebRTC-Aec3SensitiveDominantNearendActivation")) { adjusted_cfg.suppressor.dominant_nearend_detection.enr_threshold = 0.5f; - } else if (field_trial::IsEnabled( + } else if (field_trials.IsEnabled( "WebRTC-Aec3VerySensitiveDominantNearendActivation")) { adjusted_cfg.suppressor.dominant_nearend_detection.enr_threshold = 0.75f; } - if (field_trial::IsEnabled("WebRTC-Aec3TransparentAntiHowlingGain")) { + if (field_trials.IsEnabled("WebRTC-Aec3TransparentAntiHowlingGain")) { adjusted_cfg.suppressor.high_bands_suppression.anti_howling_gain = 1.f; } - if (field_trial::IsEnabled( + if (field_trials.IsEnabled( "WebRTC-Aec3EnforceMoreTransparentNormalSuppressorTuning")) { adjusted_cfg.suppressor.normal_tuning.mask_lf.enr_transparent = 0.4f; adjusted_cfg.suppressor.normal_tuning.mask_lf.enr_suppress = 0.5f; } - if (field_trial::IsEnabled( + if (field_trials.IsEnabled( "WebRTC-Aec3EnforceMoreTransparentNearendSuppressorTuning")) { adjusted_cfg.suppressor.nearend_tuning.mask_lf.enr_transparent = 1.29f; adjusted_cfg.suppressor.nearend_tuning.mask_lf.enr_suppress = 1.3f; } - if (field_trial::IsEnabled( + if (field_trials.IsEnabled( "WebRTC-Aec3EnforceMoreTransparentNormalSuppressorHfTuning")) { adjusted_cfg.suppressor.normal_tuning.mask_hf.enr_transparent = 0.3f; adjusted_cfg.suppressor.normal_tuning.mask_hf.enr_suppress = 0.4f; } - if (field_trial::IsEnabled( + if (field_trials.IsEnabled( "WebRTC-Aec3EnforceMoreTransparentNearendSuppressorHfTuning")) { adjusted_cfg.suppressor.nearend_tuning.mask_hf.enr_transparent = 1.09f; adjusted_cfg.suppressor.nearend_tuning.mask_hf.enr_suppress = 1.1f; } - if (field_trial::IsEnabled( + if (field_trials.IsEnabled( "WebRTC-Aec3EnforceRapidlyAdjustingNormalSuppressorTunings")) { adjusted_cfg.suppressor.normal_tuning.max_inc_factor = 2.5f; } - if (field_trial::IsEnabled( + if (field_trials.IsEnabled( "WebRTC-Aec3EnforceRapidlyAdjustingNearendSuppressorTunings")) { adjusted_cfg.suppressor.nearend_tuning.max_inc_factor = 2.5f; } - if (field_trial::IsEnabled( + if (field_trials.IsEnabled( "WebRTC-Aec3EnforceSlowlyAdjustingNormalSuppressorTunings")) { adjusted_cfg.suppressor.normal_tuning.max_dec_factor_lf = .2f; } - if (field_trial::IsEnabled( + if (field_trials.IsEnabled( "WebRTC-Aec3EnforceSlowlyAdjustingNearendSuppressorTunings")) { adjusted_cfg.suppressor.nearend_tuning.max_dec_factor_lf = .2f; } - if (field_trial::IsEnabled("WebRTC-Aec3EnforceConservativeHfSuppression")) { + if (field_trials.IsEnabled("WebRTC-Aec3EnforceConservativeHfSuppression")) { adjusted_cfg.suppressor.conservative_hf_suppression = true; } - if (field_trial::IsEnabled("WebRTC-Aec3EnforceStationarityProperties")) { + if (field_trials.IsEnabled("WebRTC-Aec3EnforceStationarityProperties")) { adjusted_cfg.echo_audibility.use_stationarity_properties = true; } - if (field_trial::IsEnabled( + if (field_trials.IsEnabled( "WebRTC-Aec3EnforceStationarityPropertiesAtInit")) { adjusted_cfg.echo_audibility.use_stationarity_properties_at_init = true; } - if (field_trial::IsEnabled("WebRTC-Aec3EnforceLowActiveRenderLimit")) { + if (field_trials.IsEnabled("WebRTC-Aec3EnforceLowActiveRenderLimit")) { adjusted_cfg.render_levels.active_render_limit = 50.f; - } else if (field_trial::IsEnabled( + } else if (field_trials.IsEnabled( "WebRTC-Aec3EnforceVeryLowActiveRenderLimit")) { adjusted_cfg.render_levels.active_render_limit = 30.f; } - if (field_trial::IsEnabled("WebRTC-Aec3NonlinearModeReverbKillSwitch")) { + if (field_trials.IsEnabled("WebRTC-Aec3NonlinearModeReverbKillSwitch")) { adjusted_cfg.echo_model.model_reverb_in_nonlinear_mode = false; } // Field-trial based override for the whole suppressor tuning. const std::string suppressor_tuning_override_trial_name = - field_trial::FindFullName("WebRTC-Aec3SuppressorTuningOverride"); + field_trials.Lookup("WebRTC-Aec3SuppressorTuningOverride"); FieldTrialParameter nearend_tuning_mask_lf_enr_transparent( "nearend_tuning_mask_lf_enr_transparent", @@ -576,71 +588,86 @@ EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config) { // Field trial-based overrides of individual suppressor parameters. RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorNearendLfMaskTransparentOverride", 0.f, 10.f, + field_trials, "WebRTC-Aec3SuppressorNearendLfMaskTransparentOverride", + 0.f, 10.f, &adjusted_cfg.suppressor.nearend_tuning.mask_lf.enr_transparent); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorNearendLfMaskSuppressOverride", 0.f, 10.f, - &adjusted_cfg.suppressor.nearend_tuning.mask_lf.enr_suppress); + field_trials, "WebRTC-Aec3SuppressorNearendLfMaskSuppressOverride", 0.f, + 10.f, &adjusted_cfg.suppressor.nearend_tuning.mask_lf.enr_suppress); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorNearendHfMaskTransparentOverride", 0.f, 10.f, + field_trials, "WebRTC-Aec3SuppressorNearendHfMaskTransparentOverride", + 0.f, 10.f, &adjusted_cfg.suppressor.nearend_tuning.mask_hf.enr_transparent); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorNearendHfMaskSuppressOverride", 0.f, 10.f, - &adjusted_cfg.suppressor.nearend_tuning.mask_hf.enr_suppress); + field_trials, "WebRTC-Aec3SuppressorNearendHfMaskSuppressOverride", 0.f, + 10.f, &adjusted_cfg.suppressor.nearend_tuning.mask_hf.enr_suppress); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorNearendMaxIncFactorOverride", 0.f, 10.f, - &adjusted_cfg.suppressor.nearend_tuning.max_inc_factor); + field_trials, "WebRTC-Aec3SuppressorNearendMaxIncFactorOverride", 0.f, + 10.f, &adjusted_cfg.suppressor.nearend_tuning.max_inc_factor); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorNearendMaxDecFactorLfOverride", 0.f, 10.f, - &adjusted_cfg.suppressor.nearend_tuning.max_dec_factor_lf); + field_trials, "WebRTC-Aec3SuppressorNearendMaxDecFactorLfOverride", 0.f, + 10.f, &adjusted_cfg.suppressor.nearend_tuning.max_dec_factor_lf); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorNormalLfMaskTransparentOverride", 0.f, 10.f, - &adjusted_cfg.suppressor.normal_tuning.mask_lf.enr_transparent); + field_trials, "WebRTC-Aec3SuppressorNormalLfMaskTransparentOverride", 0.f, + 10.f, &adjusted_cfg.suppressor.normal_tuning.mask_lf.enr_transparent); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorNormalLfMaskSuppressOverride", 0.f, 10.f, - &adjusted_cfg.suppressor.normal_tuning.mask_lf.enr_suppress); + field_trials, "WebRTC-Aec3SuppressorNormalLfMaskSuppressOverride", 0.f, + 10.f, &adjusted_cfg.suppressor.normal_tuning.mask_lf.enr_suppress); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorNormalHfMaskTransparentOverride", 0.f, 10.f, - &adjusted_cfg.suppressor.normal_tuning.mask_hf.enr_transparent); + field_trials, "WebRTC-Aec3SuppressorNormalHfMaskTransparentOverride", 0.f, + 10.f, &adjusted_cfg.suppressor.normal_tuning.mask_hf.enr_transparent); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorNormalHfMaskSuppressOverride", 0.f, 10.f, - &adjusted_cfg.suppressor.normal_tuning.mask_hf.enr_suppress); + field_trials, "WebRTC-Aec3SuppressorNormalHfMaskSuppressOverride", 0.f, + 10.f, &adjusted_cfg.suppressor.normal_tuning.mask_hf.enr_suppress); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorNormalMaxIncFactorOverride", 0.f, 10.f, - &adjusted_cfg.suppressor.normal_tuning.max_inc_factor); + field_trials, "WebRTC-Aec3SuppressorNormalMaxIncFactorOverride", 0.f, + 10.f, &adjusted_cfg.suppressor.normal_tuning.max_inc_factor); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorNormalMaxDecFactorLfOverride", 0.f, 10.f, - &adjusted_cfg.suppressor.normal_tuning.max_dec_factor_lf); + field_trials, "WebRTC-Aec3SuppressorNormalMaxDecFactorLfOverride", 0.f, + 10.f, &adjusted_cfg.suppressor.normal_tuning.max_dec_factor_lf); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorDominantNearendEnrThresholdOverride", 0.f, 100.f, + field_trials, "WebRTC-Aec3SuppressorDominantNearendEnrThresholdOverride", + 0.f, 100.f, &adjusted_cfg.suppressor.dominant_nearend_detection.enr_threshold); RetrieveFieldTrialValue( + field_trials, "WebRTC-Aec3SuppressorDominantNearendEnrExitThresholdOverride", 0.f, 100.f, &adjusted_cfg.suppressor.dominant_nearend_detection.enr_exit_threshold); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorDominantNearendSnrThresholdOverride", 0.f, 100.f, + field_trials, "WebRTC-Aec3SuppressorDominantNearendSnrThresholdOverride", + 0.f, 100.f, &adjusted_cfg.suppressor.dominant_nearend_detection.snr_threshold); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorDominantNearendHoldDurationOverride", 0, 1000, + field_trials, "WebRTC-Aec3SuppressorDominantNearendHoldDurationOverride", + 0, 1000, &adjusted_cfg.suppressor.dominant_nearend_detection.hold_duration); RetrieveFieldTrialValue( + field_trials, "WebRTC-Aec3SuppressorDominantNearendTriggerThresholdOverride", 0, 1000, &adjusted_cfg.suppressor.dominant_nearend_detection.trigger_threshold); RetrieveFieldTrialValue( - "WebRTC-Aec3SuppressorAntiHowlingGainOverride", 0.f, 10.f, + field_trials, "WebRTC-Aec3SuppressorAntiHowlingGainOverride", 0.f, 10.f, &adjusted_cfg.suppressor.high_bands_suppression.anti_howling_gain); // Field trial-based overrides of individual delay estimator parameters. - RetrieveFieldTrialValue("WebRTC-Aec3DelayEstimateSmoothingOverride", 0.f, 1.f, + RetrieveFieldTrialValue(field_trials, + "WebRTC-Aec3DelayEstimateSmoothingOverride", 0.f, 1.f, &adjusted_cfg.delay.delay_estimate_smoothing); RetrieveFieldTrialValue( - "WebRTC-Aec3DelayEstimateSmoothingDelayFoundOverride", 0.f, 1.f, - &adjusted_cfg.delay.delay_estimate_smoothing_delay_found); + field_trials, "WebRTC-Aec3DelayEstimateSmoothingDelayFoundOverride", 0.f, + 1.f, &adjusted_cfg.delay.delay_estimate_smoothing_delay_found); + int max_allowed_excess_render_blocks_override = + adjusted_cfg.buffering.max_allowed_excess_render_blocks; + RetrieveFieldTrialValue( + field_trials, "WebRTC-Aec3BufferingMaxAllowedExcessRenderBlocksOverride", + 0, 20, &max_allowed_excess_render_blocks_override); + adjusted_cfg.buffering.max_allowed_excess_render_blocks = + max_allowed_excess_render_blocks_override; return adjusted_cfg; } @@ -718,18 +745,20 @@ void EchoCanceller3::RenderWriter::Insert(const AudioBuffer& input) { std::atomic EchoCanceller3::instance_count_(0); EchoCanceller3::EchoCanceller3( + const Environment& env, const EchoCanceller3Config& config, - const absl::optional& multichannel_config, + const std::optional& multichannel_config, int sample_rate_hz, size_t num_render_channels, size_t num_capture_channels) - : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), - config_(AdjustConfig(config)), + : env_(env), + data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), + config_(AdjustConfig(config, env.field_trials())), sample_rate_hz_(sample_rate_hz), num_bands_(NumBandsForRate(sample_rate_hz_)), num_render_input_channels_(num_render_channels), num_capture_channels_(num_capture_channels), - config_selector_(AdjustConfig(config), + config_selector_(config_, multichannel_config, num_render_input_channels_), multichannel_content_detector_( @@ -762,7 +791,7 @@ EchoCanceller3::EchoCanceller3( capture_block_(num_bands_, num_capture_channels_), capture_sub_frame_view_( num_bands_, - std::vector>(num_capture_channels_)) { + std::vector>(num_capture_channels_)) { RTC_DCHECK(ValidFullBandRate(sample_rate_hz_)); if (config_selector_.active_config().delay.fixed_capture_delay_samples > 0) { @@ -783,9 +812,8 @@ EchoCanceller3::EchoCanceller3( new BlockFramer(/*num_bands=*/1, num_capture_channels_)); linear_output_block_ = std::make_unique(/*num_bands=*/1, num_capture_channels_), - linear_output_sub_frame_view_ = - std::vector>>( - 1, std::vector>(num_capture_channels_)); + linear_output_sub_frame_view_ = std::vector>>( + 1, std::vector>(num_capture_channels_)); } Initialize(); @@ -813,13 +841,12 @@ void EchoCanceller3::Initialize() { render_blocker_.reset( new FrameBlocker(num_bands_, num_render_channels_to_aec_)); - block_processor_.reset(BlockProcessor::Create( - config_selector_.active_config(), sample_rate_hz_, - num_render_channels_to_aec_, num_capture_channels_)); + block_processor_ = BlockProcessor::Create( + env_, config_selector_.active_config(), sample_rate_hz_, + num_render_channels_to_aec_, num_capture_channels_); - render_sub_frame_view_ = std::vector>>( - num_bands_, - std::vector>(num_render_channels_to_aec_)); + render_sub_frame_view_ = std::vector>>( + num_bands_, std::vector>(num_render_channels_to_aec_)); } void EchoCanceller3::AnalyzeRender(const AudioBuffer& render) { @@ -838,9 +865,8 @@ void EchoCanceller3::AnalyzeCapture(const AudioBuffer& capture) { capture.channels_const()[0], sample_rate_hz_, 1); saturated_microphone_signal_ = false; for (size_t channel = 0; channel < capture.num_channels(); ++channel) { - saturated_microphone_signal_ |= - DetectSaturation(rtc::ArrayView( - capture.channels_const()[channel], capture.num_frames())); + saturated_microphone_signal_ |= DetectSaturation(ArrayView( + capture.channels_const()[channel], capture.num_frames())); if (saturated_microphone_signal_) { break; } @@ -878,7 +904,7 @@ void EchoCanceller3::ProcessCapture(AudioBuffer* capture, block_delay_buffer_->DelaySignal(capture); } - rtc::ArrayView capture_lower_band = rtc::ArrayView( + ArrayView capture_lower_band = ArrayView( &capture->split_bands(0)[0][0], AudioBuffer::kSplitBandSize); data_dumper_->DumpWav("aec3_capture_input", capture_lower_band, 16000, 1); @@ -933,21 +959,6 @@ bool EchoCanceller3::ActiveProcessing() const { return true; } -EchoCanceller3Config EchoCanceller3::CreateDefaultMultichannelConfig() { - EchoCanceller3Config cfg; - // Use shorter and more rapidly adapting coarse filter to compensate for - // thge increased number of total filter parameters to adapt. - cfg.filter.coarse.length_blocks = 11; - cfg.filter.coarse.rate = 0.95f; - cfg.filter.coarse_initial.length_blocks = 11; - cfg.filter.coarse_initial.rate = 0.95f; - - // Use more concervative suppressor behavior for non-nearend speech. - cfg.suppressor.normal_tuning.max_dec_factor_lf = 0.35f; - cfg.suppressor.normal_tuning.max_inc_factor = 1.5f; - return cfg; -} - void EchoCanceller3::SetBlockProcessorForTesting( std::unique_ptr block_processor) { RTC_DCHECK_RUNS_SERIALIZED(&capture_race_checker_); diff --git a/modules/audio_processing/aec3/echo_canceller3.h b/modules/audio_processing/aec3/echo_canceller3.h index 7bf8e51a4b..a10cb13785 100644 --- a/modules/audio_processing/aec3/echo_canceller3.h +++ b/modules/audio_processing/aec3/echo_canceller3.h @@ -15,12 +15,14 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" #include "api/audio/echo_control.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "modules/audio_processing/aec3/api_call_jitter_metrics.h" #include "modules/audio_processing/aec3/block_delay_buffer.h" #include "modules/audio_processing/aec3/block_framer.h" @@ -40,7 +42,8 @@ namespace webrtc { // Method for adjusting config parameter dependencies. // Only to be used externally to AEC3 for testing purposes. // TODO(webrtc:5298): Move this to a separate file. -EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config); +EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config, + const FieldTrialsView& field_trials); // Functor for verifying the invariance of the frames being put into the render // queue. @@ -88,12 +91,12 @@ class Aec3RenderQueueItemVerifier { // AnalyzeRender call which can be called concurrently with the other methods. class EchoCanceller3 : public EchoControl { public: - EchoCanceller3( - const EchoCanceller3Config& config, - const absl::optional& multichannel_config, - int sample_rate_hz, - size_t num_render_channels, - size_t num_capture_channels); + EchoCanceller3(const Environment& env, + const EchoCanceller3Config& config, + const std::optional& multichannel_config, + int sample_rate_hz, + size_t num_render_channels, + size_t num_capture_channels); ~EchoCanceller3() override; @@ -136,9 +139,6 @@ class EchoCanceller3 : public EchoControl { block_processor_->UpdateEchoLeakageStatus(leakage_detected); } - // Produces a default configuration for multichannel. - static EchoCanceller3Config CreateDefaultMultichannelConfig(); - private: friend class EchoCanceller3Tester; FRIEND_TEST_ALL_PREFIXES(EchoCanceller3, DetectionOfProperStereo); @@ -178,8 +178,9 @@ class EchoCanceller3 : public EchoControl { // Analyzes the full-band domain capture signal to detect signal saturation. void AnalyzeCapture(const AudioBuffer& capture); - rtc::RaceChecker capture_race_checker_; - rtc::RaceChecker render_race_checker_; + const Environment env_; + RaceChecker capture_race_checker_; + RaceChecker render_race_checker_; // State that is accessed by the AnalyzeRender call. std::unique_ptr render_writer_ @@ -215,11 +216,11 @@ class EchoCanceller3 : public EchoControl { std::unique_ptr linear_output_block_ RTC_GUARDED_BY(capture_race_checker_); Block capture_block_ RTC_GUARDED_BY(capture_race_checker_); - std::vector>> render_sub_frame_view_ + std::vector>> render_sub_frame_view_ RTC_GUARDED_BY(capture_race_checker_); - std::vector>> linear_output_sub_frame_view_ + std::vector>> linear_output_sub_frame_view_ RTC_GUARDED_BY(capture_race_checker_); - std::vector>> capture_sub_frame_view_ + std::vector>> capture_sub_frame_view_ RTC_GUARDED_BY(capture_race_checker_); std::unique_ptr block_delay_buffer_ RTC_GUARDED_BY(capture_race_checker_); diff --git a/modules/audio_processing/aec3/echo_canceller3_unittest.cc b/modules/audio_processing/aec3/echo_canceller3_unittest.cc index ad126af4d3..486c64cd3c 100644 --- a/modules/audio_processing/aec3/echo_canceller3_unittest.cc +++ b/modules/audio_processing/aec3/echo_canceller3_unittest.cc @@ -16,6 +16,8 @@ #include #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/block_processor.h" #include "modules/audio_processing/aec3/frame_blocker.h" @@ -24,13 +26,14 @@ #include "modules/audio_processing/high_pass_filter.h" #include "modules/audio_processing/utility/cascaded_biquad_filter.h" #include "rtc_base/strings/string_builder.h" -#include "test/field_trial.h" +#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" namespace webrtc { namespace { +using test::ExplicitKeyValueConfig; using ::testing::_; using ::testing::StrictMock; @@ -88,8 +91,8 @@ bool VerifyOutputFrameBitexactness(size_t frame_length, return true; } -bool VerifyOutputFrameBitexactness(rtc::ArrayView reference, - rtc::ArrayView frame, +bool VerifyOutputFrameBitexactness(ArrayView reference, + ArrayView frame, int offset) { for (size_t k = 0; k < frame.size(); ++k) { int reference_index = static_cast(k) + offset; @@ -107,7 +110,7 @@ bool VerifyOutputFrameBitexactness(rtc::ArrayView reference, // EchoCanceller3 output. class CaptureTransportVerificationProcessor : public BlockProcessor { public: - explicit CaptureTransportVerificationProcessor(size_t num_bands) {} + explicit CaptureTransportVerificationProcessor(size_t /* num_bands */) {} CaptureTransportVerificationProcessor() = delete; CaptureTransportVerificationProcessor( @@ -117,27 +120,27 @@ class CaptureTransportVerificationProcessor : public BlockProcessor { ~CaptureTransportVerificationProcessor() override = default; - void ProcessCapture(bool level_change, - bool saturated_microphone_signal, - Block* linear_output, - Block* capture_block) override {} + void ProcessCapture(bool /* level_change */, + bool /* saturated_microphone_signal */, + Block* /* linear_output */, + Block* /* capture_block */) override {} - void BufferRender(const Block& block) override {} + void BufferRender(const Block& /* block */) override {} - void UpdateEchoLeakageStatus(bool leakage_detected) override {} + void UpdateEchoLeakageStatus(bool /* leakage_detected */) override {} - void GetMetrics(EchoControl::Metrics* metrics) const override {} + void GetMetrics(EchoControl::Metrics* /* metrics */) const override {} - void SetAudioBufferDelay(int delay_ms) override {} + void SetAudioBufferDelay(int /* delay_ms */) override {} - void SetCaptureOutputUsage(bool capture_output_used) {} + void SetCaptureOutputUsage(bool /* capture_output_used */) {} }; // Class for testing that the render data is properly received by the block // processor. class RenderTransportVerificationProcessor : public BlockProcessor { public: - explicit RenderTransportVerificationProcessor(size_t num_bands) {} + explicit RenderTransportVerificationProcessor(size_t /* num_bands */) {} RenderTransportVerificationProcessor() = delete; RenderTransportVerificationProcessor( @@ -147,9 +150,9 @@ class RenderTransportVerificationProcessor : public BlockProcessor { ~RenderTransportVerificationProcessor() override = default; - void ProcessCapture(bool level_change, - bool saturated_microphone_signal, - Block* linear_output, + void ProcessCapture(bool /* level_change */, + bool /* saturated_microphone_signal */, + Block* /* linear_output */, Block* capture_block) override { Block render_block = received_render_blocks_.front(); received_render_blocks_.pop_front(); @@ -160,26 +163,26 @@ class RenderTransportVerificationProcessor : public BlockProcessor { received_render_blocks_.push_back(block); } - void UpdateEchoLeakageStatus(bool leakage_detected) override {} + void UpdateEchoLeakageStatus(bool /* leakage_detected */) override {} - void GetMetrics(EchoControl::Metrics* metrics) const override {} + void GetMetrics(EchoControl::Metrics* /* metrics */) const override {} - void SetAudioBufferDelay(int delay_ms) override {} + void SetAudioBufferDelay(int /* delay_ms */) override {} - void SetCaptureOutputUsage(bool capture_output_used) {} + void SetCaptureOutputUsage(bool /* capture_output_used */) {} private: std::deque received_render_blocks_; }; std::string ProduceDebugText(int sample_rate_hz) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Sample rate: " << sample_rate_hz; return ss.Release(); } std::string ProduceDebugText(int sample_rate_hz, int variant) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Sample rate: " << sample_rate_hz << ", variant: " << variant; return ss.Release(); } @@ -188,11 +191,11 @@ void RunAecInStereo(AudioBuffer& buffer, EchoCanceller3& aec3, float channel_0_value, float channel_1_value) { - rtc::ArrayView data_channel_0(&buffer.channels()[0][0], - buffer.num_frames()); + ArrayView data_channel_0(&buffer.channels()[0][0], + buffer.num_frames()); std::fill(data_channel_0.begin(), data_channel_0.end(), channel_0_value); - rtc::ArrayView data_channel_1(&buffer.channels()[1][0], - buffer.num_frames()); + ArrayView data_channel_1(&buffer.channels()[1][0], + buffer.num_frames()); std::fill(data_channel_1.begin(), data_channel_1.end(), channel_1_value); aec3.AnalyzeRender(&buffer); aec3.AnalyzeCapture(&buffer); @@ -202,8 +205,8 @@ void RunAecInStereo(AudioBuffer& buffer, void RunAecInSMono(AudioBuffer& buffer, EchoCanceller3& aec3, float channel_0_value) { - rtc::ArrayView data_channel_0(&buffer.channels()[0][0], - buffer.num_frames()); + ArrayView data_channel_0(&buffer.channels()[0][0], + buffer.num_frames()); std::fill(data_channel_0.begin(), data_channel_0.end(), channel_0_value); aec3.AnalyzeRender(&buffer); aec3.AnalyzeCapture(&buffer); @@ -218,7 +221,7 @@ class EchoCanceller3Tester { : sample_rate_hz_(sample_rate_hz), num_bands_(NumBandsForRate(sample_rate_hz_)), frame_length_(160), - fullband_frame_length_(rtc::CheckedDivExact(sample_rate_hz_, 100)), + fullband_frame_length_(CheckedDivExact(sample_rate_hz_, 100)), capture_buffer_(fullband_frame_length_ * 100, 1, fullband_frame_length_ * 100, @@ -240,8 +243,8 @@ class EchoCanceller3Tester { // and that the processor data is properly passed to the EchoCanceller3 // output. void RunCaptureTransportVerificationTest() { - EchoCanceller3 aec3(EchoCanceller3Config(), - /*multichannel_config=*/absl::nullopt, sample_rate_hz_, + EchoCanceller3 aec3(CreateEnvironment(), EchoCanceller3Config(), + /*multichannel_config=*/std::nullopt, sample_rate_hz_, 1, 1); aec3.SetBlockProcessorForTesting( std::make_unique(num_bands_)); @@ -266,8 +269,8 @@ class EchoCanceller3Tester { // Test method for testing that the render data is properly received by the // block processor. void RunRenderTransportVerificationTest() { - EchoCanceller3 aec3(EchoCanceller3Config(), - /*multichannel_config=*/absl::nullopt, sample_rate_hz_, + EchoCanceller3 aec3(CreateEnvironment(), EchoCanceller3Config(), + /*multichannel_config=*/std::nullopt, sample_rate_hz_, 1, 1); aec3.SetBlockProcessorForTesting( std::make_unique(num_bands_)); @@ -337,8 +340,8 @@ class EchoCanceller3Tester { break; } - EchoCanceller3 aec3(EchoCanceller3Config(), - /*multichannel_config=*/absl::nullopt, sample_rate_hz_, + EchoCanceller3 aec3(CreateEnvironment(), EchoCanceller3Config(), + /*multichannel_config=*/std::nullopt, sample_rate_hz_, 1, 1); aec3.SetBlockProcessorForTesting(std::move(block_processor_mock)); @@ -419,8 +422,8 @@ class EchoCanceller3Tester { } break; } - EchoCanceller3 aec3(EchoCanceller3Config(), - /*multichannel_config=*/absl::nullopt, sample_rate_hz_, + EchoCanceller3 aec3(CreateEnvironment(), EchoCanceller3Config(), + /*multichannel_config=*/std::nullopt, sample_rate_hz_, 1, 1); aec3.SetBlockProcessorForTesting(std::move(block_processor_mock)); @@ -507,8 +510,8 @@ class EchoCanceller3Tester { } break; } - EchoCanceller3 aec3(EchoCanceller3Config(), - /*multichannel_config=*/absl::nullopt, sample_rate_hz_, + EchoCanceller3 aec3(CreateEnvironment(), EchoCanceller3Config(), + /*multichannel_config=*/std::nullopt, sample_rate_hz_, 1, 1); aec3.SetBlockProcessorForTesting(std::move(block_processor_mock)); for (size_t frame_index = 0; frame_index < kNumFramesToProcess; @@ -548,8 +551,9 @@ class EchoCanceller3Tester { // capture and render API calls. void RunRenderSwapQueueVerificationTest() { const EchoCanceller3Config config; - EchoCanceller3 aec3(config, /*multichannel_config=*/absl::nullopt, - sample_rate_hz_, 1, 1); + EchoCanceller3 aec3(CreateEnvironment(), config, + /*multichannel_config=*/std::nullopt, sample_rate_hz_, + 1, 1); aec3.SetBlockProcessorForTesting( std::make_unique(num_bands_)); @@ -597,8 +601,8 @@ class EchoCanceller3Tester { // This test verifies that a buffer overrun in the render swapqueue is // properly reported. void RunRenderPipelineSwapQueueOverrunReturnValueTest() { - EchoCanceller3 aec3(EchoCanceller3Config(), - /*multichannel_config=*/absl::nullopt, sample_rate_hz_, + EchoCanceller3 aec3(CreateEnvironment(), EchoCanceller3Config(), + /*multichannel_config=*/std::nullopt, sample_rate_hz_, 1, 1); constexpr size_t kRenderTransferQueueSize = 30; @@ -624,8 +628,8 @@ class EchoCanceller3Tester { // Set aec3_sample_rate_hz to be different from sample_rate_hz_ in such a // way that the number of bands for the rates are different. const int aec3_sample_rate_hz = sample_rate_hz_ == 48000 ? 32000 : 48000; - EchoCanceller3 aec3(EchoCanceller3Config(), - /*multichannel_config=*/absl::nullopt, + EchoCanceller3 aec3(CreateEnvironment(), EchoCanceller3Config(), + /*multichannel_config=*/std::nullopt, aec3_sample_rate_hz, 1, 1); PopulateInputFrame(frame_length_, 0, &render_buffer_.channels_f()[0][0], 0); @@ -639,8 +643,8 @@ class EchoCanceller3Tester { // Set aec3_sample_rate_hz to be different from sample_rate_hz_ in such a // way that the number of bands for the rates are different. const int aec3_sample_rate_hz = sample_rate_hz_ == 48000 ? 32000 : 48000; - EchoCanceller3 aec3(EchoCanceller3Config(), - /*multichannel_config=*/absl::nullopt, + EchoCanceller3 aec3(CreateEnvironment(), EchoCanceller3Config(), + /*multichannel_config=*/std::nullopt, aec3_sample_rate_hz, 1, 1); PopulateInputFrame(frame_length_, num_bands_, 0, &capture_buffer_.split_bands_f(0)[0], 100); @@ -735,14 +739,15 @@ TEST(EchoCanceller3Messaging, EchoLeakage) { // anti-howling gain. TEST(EchoCanceller3FieldTrials, Aec3SuppressorAntiHowlingGainOverride) { EchoCanceller3Config default_config; - EchoCanceller3Config adjusted_config = AdjustConfig(default_config); + EchoCanceller3Config adjusted_config = + AdjustConfig(default_config, ExplicitKeyValueConfig("")); ASSERT_EQ( default_config.suppressor.high_bands_suppression.anti_howling_gain, adjusted_config.suppressor.high_bands_suppression.anti_howling_gain); - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Aec3SuppressorAntiHowlingGainOverride/0.02/"); - adjusted_config = AdjustConfig(default_config); + adjusted_config = AdjustConfig(default_config, field_trials); ASSERT_NE( default_config.suppressor.high_bands_suppression.anti_howling_gain, @@ -756,13 +761,14 @@ TEST(EchoCanceller3FieldTrials, Aec3SuppressorAntiHowlingGainOverride) { // limit. TEST(EchoCanceller3FieldTrials, Aec3EnforceLowActiveRenderLimit) { EchoCanceller3Config default_config; - EchoCanceller3Config adjusted_config = AdjustConfig(default_config); + EchoCanceller3Config adjusted_config = + AdjustConfig(default_config, ExplicitKeyValueConfig("")); ASSERT_EQ(default_config.render_levels.active_render_limit, adjusted_config.render_levels.active_render_limit); - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Aec3EnforceLowActiveRenderLimit/Enabled/"); - adjusted_config = AdjustConfig(default_config); + adjusted_config = AdjustConfig(default_config, field_trials); ASSERT_NE(default_config.render_levels.active_render_limit, adjusted_config.render_levels.active_render_limit); @@ -772,7 +778,7 @@ TEST(EchoCanceller3FieldTrials, Aec3EnforceLowActiveRenderLimit) { // Testing the field trial-based override of the suppressor parameters for a // joint passing of all parameters. TEST(EchoCanceller3FieldTrials, Aec3SuppressorTuningOverrideAllParams) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Aec3SuppressorTuningOverride/" "nearend_tuning_mask_lf_enr_transparent:0.1,nearend_tuning_mask_lf_enr_" "suppress:0.2,nearend_tuning_mask_hf_enr_transparent:0.3,nearend_tuning_" @@ -787,7 +793,8 @@ TEST(EchoCanceller3FieldTrials, Aec3SuppressorTuningOverrideAllParams) { "threshold:11/"); EchoCanceller3Config default_config; - EchoCanceller3Config adjusted_config = AdjustConfig(default_config); + EchoCanceller3Config adjusted_config = + AdjustConfig(default_config, field_trials); ASSERT_NE(adjusted_config.suppressor.nearend_tuning.mask_lf.enr_transparent, default_config.suppressor.nearend_tuning.mask_lf.enr_transparent); @@ -866,11 +873,12 @@ TEST(EchoCanceller3FieldTrials, Aec3SuppressorTuningOverrideAllParams) { // Testing the field trial-based override of the suppressor parameters for // passing one parameter. TEST(EchoCanceller3FieldTrials, Aec3SuppressorTuningOverrideOneParam) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Aec3SuppressorTuningOverride/nearend_tuning_max_inc_factor:0.5/"); EchoCanceller3Config default_config; - EchoCanceller3Config adjusted_config = AdjustConfig(default_config); + EchoCanceller3Config adjusted_config = + AdjustConfig(default_config, field_trials); ASSERT_EQ(adjusted_config.suppressor.nearend_tuning.mask_lf.enr_transparent, default_config.suppressor.nearend_tuning.mask_lf.enr_transparent); @@ -916,14 +924,26 @@ TEST(EchoCanceller3FieldTrials, Aec3SuppressorTuningOverrideOneParam) { // Testing the field trial-based that override the exponential decay parameters. TEST(EchoCanceller3FieldTrials, Aec3UseNearendReverb) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Aec3UseNearendReverbLen/default_len:0.9,nearend_len:0.8/"); EchoCanceller3Config default_config; - EchoCanceller3Config adjusted_config = AdjustConfig(default_config); + EchoCanceller3Config adjusted_config = + AdjustConfig(default_config, field_trials); EXPECT_FLOAT_EQ(adjusted_config.ep_strength.default_len, 0.9); EXPECT_FLOAT_EQ(adjusted_config.ep_strength.nearend_len, 0.8); } +// Testing the field trial-based that overrides the maximum allowed ecess render +// blocks in the render buffering. +TEST(EchoCanceller3FieldTrials, Aec3BufferingMaxAllowedExcessRenderBlocks) { + ExplicitKeyValueConfig field_trials( + "WebRTC-Aec3BufferingMaxAllowedExcessRenderBlocksOverride/2/"); + EchoCanceller3Config default_config; + EchoCanceller3Config adjusted_config = + AdjustConfig(default_config, field_trials); + EXPECT_EQ(adjusted_config.buffering.max_allowed_excess_render_blocks, 2ul); +} + TEST(EchoCanceller3, DetectionOfProperStereo) { constexpr int kSampleRateHz = 16000; constexpr int kNumChannels = 2; @@ -937,7 +957,7 @@ TEST(EchoCanceller3, DetectionOfProperStereo) { constexpr size_t kNumBlocksForMonoConfig = 1; constexpr size_t kNumBlocksForSurroundConfig = 2; EchoCanceller3Config mono_config; - absl::optional multichannel_config; + std::optional multichannel_config; mono_config.multi_channel.detect_stereo_content = true; mono_config.multi_channel.stereo_detection_threshold = 0.0f; @@ -947,7 +967,7 @@ TEST(EchoCanceller3, DetectionOfProperStereo) { multichannel_config->filter.coarse_initial.length_blocks = kNumBlocksForSurroundConfig; - EchoCanceller3 aec3(mono_config, multichannel_config, + EchoCanceller3 aec3(CreateEnvironment(), mono_config, multichannel_config, /*sample_rate_hz=*/kSampleRateHz, /*num_render_channels=*/kNumChannels, /*num_capture_input_channels=*/kNumChannels); @@ -983,7 +1003,7 @@ TEST(EchoCanceller3, DetectionOfProperStereoUsingThreshold) { constexpr size_t kNumBlocksForMonoConfig = 1; constexpr size_t kNumBlocksForSurroundConfig = 2; EchoCanceller3Config mono_config; - absl::optional multichannel_config; + std::optional multichannel_config; constexpr float kStereoDetectionThreshold = 2.0f; mono_config.multi_channel.detect_stereo_content = true; @@ -995,7 +1015,7 @@ TEST(EchoCanceller3, DetectionOfProperStereoUsingThreshold) { multichannel_config->filter.coarse_initial.length_blocks = kNumBlocksForSurroundConfig; - EchoCanceller3 aec3(mono_config, multichannel_config, + EchoCanceller3 aec3(CreateEnvironment(), mono_config, multichannel_config, /*sample_rate_hz=*/kSampleRateHz, /*num_render_channels=*/kNumChannels, /*num_capture_input_channels=*/kNumChannels); @@ -1033,7 +1053,7 @@ TEST(EchoCanceller3, DetectionOfProperStereoUsingHysteresis) { constexpr size_t kNumBlocksForMonoConfig = 1; constexpr size_t kNumBlocksForSurroundConfig = 2; EchoCanceller3Config mono_config; - absl::optional surround_config; + std::optional surround_config; mono_config.multi_channel.detect_stereo_content = true; mono_config.multi_channel.stereo_detection_hysteresis_seconds = 0.5f; @@ -1042,7 +1062,7 @@ TEST(EchoCanceller3, DetectionOfProperStereoUsingHysteresis) { surround_config->filter.coarse_initial.length_blocks = kNumBlocksForSurroundConfig; - EchoCanceller3 aec3(mono_config, surround_config, + EchoCanceller3 aec3(CreateEnvironment(), mono_config, surround_config, /*sample_rate_hz=*/kSampleRateHz, /*num_render_channels=*/kNumChannels, /*num_capture_input_channels=*/kNumChannels); @@ -1090,8 +1110,9 @@ TEST(EchoCanceller3, StereoContentDetectionForMonoSignals) { constexpr size_t kNumBlocksForMonoConfig = 1; constexpr size_t kNumBlocksForSurroundConfig = 2; + const Environment env = CreateEnvironment(); EchoCanceller3Config mono_config; - absl::optional multichannel_config; + std::optional multichannel_config; for (bool detect_stereo_content : {false, true}) { mono_config.multi_channel.detect_stereo_content = detect_stereo_content; @@ -1107,7 +1128,7 @@ TEST(EchoCanceller3, StereoContentDetectionForMonoSignals) { /*output_rate=*/kSampleRateHz, /*output_num_channels=*/1); - EchoCanceller3 aec3(mono_config, multichannel_config, + EchoCanceller3 aec3(env, mono_config, multichannel_config, /*sample_rate_hz=*/kSampleRateHz, /*num_render_channels=*/1, /*num_capture_input_channels=*/1); @@ -1138,8 +1159,8 @@ TEST(EchoCanceller3InputCheckDeathTest, WrongCaptureNumBandsCheckVerification) { // call works. TEST(EchoCanceller3InputCheckDeathTest, NullCaptureProcessingParameter) { EXPECT_DEATH( - EchoCanceller3(EchoCanceller3Config(), - /*multichannel_config_=*/absl::nullopt, 16000, 1, 1) + EchoCanceller3(CreateEnvironment(), EchoCanceller3Config(), + /*multichannel_config_=*/std::nullopt, 16000, 1, 1) .ProcessCapture(nullptr, false), ""); } @@ -1150,8 +1171,8 @@ TEST(EchoCanceller3InputCheckDeathTest, NullCaptureProcessingParameter) { TEST(EchoCanceller3InputCheckDeathTest, DISABLED_WrongSampleRate) { ApmDataDumper data_dumper(0); EXPECT_DEATH( - EchoCanceller3(EchoCanceller3Config(), - /*multichannel_config_=*/absl::nullopt, 8001, 1, 1), + EchoCanceller3(CreateEnvironment(), EchoCanceller3Config(), + /*multichannel_config_=*/std::nullopt, 8001, 1, 1), ""); } diff --git a/modules/audio_processing/aec3/echo_path_delay_estimator.cc b/modules/audio_processing/aec3/echo_path_delay_estimator.cc index 510e4b8a8d..32eb5d8f6a 100644 --- a/modules/audio_processing/aec3/echo_path_delay_estimator.cc +++ b/modules/audio_processing/aec3/echo_path_delay_estimator.cc @@ -58,12 +58,12 @@ void EchoPathDelayEstimator::Reset(bool reset_delay_confidence) { Reset(true, reset_delay_confidence); } -absl::optional EchoPathDelayEstimator::EstimateDelay( +std::optional EchoPathDelayEstimator::EstimateDelay( const DownsampledRenderBuffer& render_buffer, const Block& capture) { std::array downsampled_capture_data; - rtc::ArrayView downsampled_capture(downsampled_capture_data.data(), - sub_block_size_); + ArrayView downsampled_capture(downsampled_capture_data.data(), + sub_block_size_); std::array downmixed_capture; capture_mixer_.ProduceOutput(capture, downmixed_capture); @@ -74,7 +74,7 @@ absl::optional EchoPathDelayEstimator::EstimateDelay( matched_filter_.Update(render_buffer, downsampled_capture, matched_filter_lag_aggregator_.ReliableDelayFound()); - absl::optional aggregated_matched_filter_lag = + std::optional aggregated_matched_filter_lag = matched_filter_lag_aggregator_.Aggregate( matched_filter_.GetBestLagEstimate()); @@ -121,7 +121,7 @@ void EchoPathDelayEstimator::Reset(bool reset_lag_aggregator, matched_filter_lag_aggregator_.Reset(reset_delay_confidence); } matched_filter_.Reset(/*full_reset=*/reset_lag_aggregator); - old_aggregated_lag_ = absl::nullopt; + old_aggregated_lag_ = std::nullopt; consistent_estimate_counter_ = 0; } } // namespace webrtc diff --git a/modules/audio_processing/aec3/echo_path_delay_estimator.h b/modules/audio_processing/aec3/echo_path_delay_estimator.h index b24d0a29ec..bd5c3b0697 100644 --- a/modules/audio_processing/aec3/echo_path_delay_estimator.h +++ b/modules/audio_processing/aec3/echo_path_delay_estimator.h @@ -13,7 +13,8 @@ #include -#include "absl/types/optional.h" +#include + #include "api/array_view.h" #include "modules/audio_processing/aec3/alignment_mixer.h" #include "modules/audio_processing/aec3/block.h" @@ -45,7 +46,7 @@ class EchoPathDelayEstimator { void Reset(bool reset_delay_confidence); // Produce a delay estimate if such is avaliable. - absl::optional EstimateDelay( + std::optional EstimateDelay( const DownsampledRenderBuffer& render_buffer, const Block& capture); @@ -68,7 +69,7 @@ class EchoPathDelayEstimator { Decimator capture_decimator_; MatchedFilter matched_filter_; MatchedFilterLagAggregator matched_filter_lag_aggregator_; - absl::optional old_aggregated_lag_; + std::optional old_aggregated_lag_; size_t consistent_estimate_counter_ = 0; ClockdriftDetector clockdrift_detector_; diff --git a/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc b/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc index e2c101fb04..8a105041e0 100644 --- a/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc +++ b/modules/audio_processing/aec3/echo_path_delay_estimator_unittest.cc @@ -26,7 +26,7 @@ namespace webrtc { namespace { std::string ProduceDebugText(size_t delay, size_t down_sampling_factor) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Delay: " << delay; ss << ", Down sampling factor: " << down_sampling_factor; return ss.Release(); @@ -88,7 +88,7 @@ TEST(EchoPathDelayEstimator, DelayEstimation) { EchoPathDelayEstimator estimator(&data_dumper, config, kNumCaptureChannels); - absl::optional estimated_delay_samples; + std::optional estimated_delay_samples; for (size_t k = 0; k < (500 + (delay_samples) / kBlockSize); ++k) { RandomizeSampleVector(&random_generator, render.View(/*band=*/0, /*channel=*/0)); diff --git a/modules/audio_processing/aec3/echo_remover.cc b/modules/audio_processing/aec3/echo_remover.cc index 673d88af03..c19da94dbc 100644 --- a/modules/audio_processing/aec3/echo_remover.cc +++ b/modules/audio_processing/aec3/echo_remover.cc @@ -19,6 +19,7 @@ #include #include "api/array_view.h" +#include "api/environment/environment.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/aec3_fft.h" #include "modules/audio_processing/aec3/aec_state.h" @@ -68,9 +69,9 @@ void LinearEchoPower(const FftData& E, } // Fades between two input signals using a fix-sized transition. -void SignalTransition(rtc::ArrayView from, - rtc::ArrayView to, - rtc::ArrayView out) { +void SignalTransition(ArrayView from, + ArrayView to, + ArrayView out) { if (from == to) { RTC_DCHECK_EQ(to.size(), out.size()); std::copy(to.begin(), to.end(), out.begin()); @@ -95,8 +96,8 @@ void SignalTransition(rtc::ArrayView from, // Computes a windowed (square root Hanning) padded FFT and updates the related // memory. void WindowedPaddedFft(const Aec3Fft& fft, - rtc::ArrayView v, - rtc::ArrayView v_old, + ArrayView v, + ArrayView v_old, FftData* V) { fft.PaddedFft(v, v_old, Aec3Fft::Window::kSqrtHanning, V); std::copy(v.begin(), v.end(), v_old.begin()); @@ -105,7 +106,8 @@ void WindowedPaddedFft(const Aec3Fft& fft, // Class for removing the echo from the capture signal. class EchoRemoverImpl final : public EchoRemover { public: - EchoRemoverImpl(const EchoCanceller3Config& config, + EchoRemoverImpl(const Environment& env, + const EchoCanceller3Config& config, int sample_rate_hz, size_t num_render_channels, size_t num_capture_channels); @@ -120,7 +122,7 @@ class EchoRemoverImpl final : public EchoRemover { // signal. void ProcessCapture(EchoPathVariability echo_path_variability, bool capture_signal_saturation, - const absl::optional& external_delay, + const std::optional& external_delay, RenderBuffer* render_buffer, Block* linear_output, Block* capture) override; @@ -140,7 +142,7 @@ class EchoRemoverImpl final : public EchoRemover { // appropriate to pass to the suppressor and forms the linear filter output by // smoothly transition between those. void FormLinearFilterOutput(const SubtractorOutput& subtractor_output, - rtc::ArrayView output); + ArrayView output); static std::atomic instance_count_; const EchoCanceller3Config config_; @@ -182,7 +184,8 @@ class EchoRemoverImpl final : public EchoRemover { std::atomic EchoRemoverImpl::instance_count_(0); -EchoRemoverImpl::EchoRemoverImpl(const EchoCanceller3Config& config, +EchoRemoverImpl::EchoRemoverImpl(const Environment& env, + const EchoCanceller3Config& config, int sample_rate_hz, size_t num_render_channels, size_t num_capture_channels) @@ -195,7 +198,8 @@ EchoRemoverImpl::EchoRemoverImpl(const EchoCanceller3Config& config, num_capture_channels_(num_capture_channels), use_coarse_filter_output_( config_.filter.enable_coarse_filter_output_usage), - subtractor_(config, + subtractor_(env, + config, num_render_channels_, num_capture_channels_, data_dumper_.get(), @@ -209,8 +213,8 @@ EchoRemoverImpl::EchoRemoverImpl(const EchoCanceller3Config& config, sample_rate_hz_, num_capture_channels_), render_signal_analyzer_(config_), - residual_echo_estimator_(config_, num_render_channels), - aec_state_(config_, num_capture_channels_), + residual_echo_estimator_(env, config_, num_render_channels), + aec_state_(env, config_, num_capture_channels_), e_old_(num_capture_channels_, {0.f}), y_old_(num_capture_channels_, {0.f}), e_heap_(NumChannelsOnHeap(num_capture_channels_), {0.f}), @@ -239,7 +243,7 @@ void EchoRemoverImpl::GetMetrics(EchoControl::Metrics* metrics) const { void EchoRemoverImpl::ProcessCapture( EchoPathVariability echo_path_variability, bool capture_signal_saturation, - const absl::optional& external_delay, + const std::optional& external_delay, RenderBuffer* render_buffer, Block* linear_output, Block* capture) { @@ -271,48 +275,48 @@ void EchoRemoverImpl::ProcessCapture( std::array high_band_comfort_noise_stack; std::array subtractor_output_stack; - rtc::ArrayView> e(e_stack.data(), - num_capture_channels_); - rtc::ArrayView> Y2( - Y2_stack.data(), num_capture_channels_); - rtc::ArrayView> E2( - E2_stack.data(), num_capture_channels_); - rtc::ArrayView> R2( - R2_stack.data(), num_capture_channels_); - rtc::ArrayView> R2_unbounded( + ArrayView> e(e_stack.data(), + num_capture_channels_); + ArrayView> Y2(Y2_stack.data(), + num_capture_channels_); + ArrayView> E2(E2_stack.data(), + num_capture_channels_); + ArrayView> R2(R2_stack.data(), + num_capture_channels_); + ArrayView> R2_unbounded( R2_unbounded_stack.data(), num_capture_channels_); - rtc::ArrayView> S2_linear( + ArrayView> S2_linear( S2_linear_stack.data(), num_capture_channels_); - rtc::ArrayView Y(Y_stack.data(), num_capture_channels_); - rtc::ArrayView E(E_stack.data(), num_capture_channels_); - rtc::ArrayView comfort_noise(comfort_noise_stack.data(), - num_capture_channels_); - rtc::ArrayView high_band_comfort_noise( + ArrayView Y(Y_stack.data(), num_capture_channels_); + ArrayView E(E_stack.data(), num_capture_channels_); + ArrayView comfort_noise(comfort_noise_stack.data(), + num_capture_channels_); + ArrayView high_band_comfort_noise( high_band_comfort_noise_stack.data(), num_capture_channels_); - rtc::ArrayView subtractor_output( - subtractor_output_stack.data(), num_capture_channels_); + ArrayView subtractor_output(subtractor_output_stack.data(), + num_capture_channels_); if (NumChannelsOnHeap(num_capture_channels_) > 0) { // If the stack-allocated space is too small, use the heap for storing the // microphone data. - e = rtc::ArrayView>(e_heap_.data(), - num_capture_channels_); - Y2 = rtc::ArrayView>( + e = ArrayView>(e_heap_.data(), + num_capture_channels_); + Y2 = ArrayView>( Y2_heap_.data(), num_capture_channels_); - E2 = rtc::ArrayView>( + E2 = ArrayView>( E2_heap_.data(), num_capture_channels_); - R2 = rtc::ArrayView>( + R2 = ArrayView>( R2_heap_.data(), num_capture_channels_); - R2_unbounded = rtc::ArrayView>( + R2_unbounded = ArrayView>( R2_unbounded_heap_.data(), num_capture_channels_); - S2_linear = rtc::ArrayView>( + S2_linear = ArrayView>( S2_linear_heap_.data(), num_capture_channels_); - Y = rtc::ArrayView(Y_heap_.data(), num_capture_channels_); - E = rtc::ArrayView(E_heap_.data(), num_capture_channels_); - comfort_noise = rtc::ArrayView(comfort_noise_heap_.data(), - num_capture_channels_); - high_band_comfort_noise = rtc::ArrayView( + Y = ArrayView(Y_heap_.data(), num_capture_channels_); + E = ArrayView(E_heap_.data(), num_capture_channels_); + comfort_noise = + ArrayView(comfort_noise_heap_.data(), num_capture_channels_); + high_band_comfort_noise = ArrayView( high_band_comfort_noise_heap_.data(), num_capture_channels_); - subtractor_output = rtc::ArrayView( + subtractor_output = ArrayView( subtractor_output_heap_.data(), num_capture_channels_); } @@ -333,9 +337,9 @@ void EchoRemoverImpl::ProcessCapture( if (gain_change_hangover_ == 0) { constexpr int kMaxBlocksPerFrame = 3; gain_change_hangover_ = kMaxBlocksPerFrame; - rtc::LoggingSeverity log_level = - config_.delay.log_warning_on_delay_changes ? rtc::LS_WARNING - : rtc::LS_VERBOSE; + LoggingSeverity log_level = config_.delay.log_warning_on_delay_changes + ? LS_WARNING + : LS_VERBOSE; RTC_LOG_V(log_level) << "Gain change detected at block " << block_counter_; } else { @@ -476,7 +480,7 @@ void EchoRemoverImpl::ProcessCapture( void EchoRemoverImpl::FormLinearFilterOutput( const SubtractorOutput& subtractor_output, - rtc::ArrayView output) { + ArrayView output) { RTC_DCHECK_EQ(subtractor_output.e_refined.size(), output.size()); RTC_DCHECK_EQ(subtractor_output.e_coarse.size(), output.size()); bool use_refined_output = true; @@ -510,12 +514,14 @@ void EchoRemoverImpl::FormLinearFilterOutput( } // namespace -EchoRemover* EchoRemover::Create(const EchoCanceller3Config& config, - int sample_rate_hz, - size_t num_render_channels, - size_t num_capture_channels) { - return new EchoRemoverImpl(config, sample_rate_hz, num_render_channels, - num_capture_channels); +std::unique_ptr EchoRemover::Create( + const Environment& env, + const EchoCanceller3Config& config, + int sample_rate_hz, + size_t num_render_channels, + size_t num_capture_channels) { + return std::make_unique( + env, config, sample_rate_hz, num_render_channels, num_capture_channels); } } // namespace webrtc diff --git a/modules/audio_processing/aec3/echo_remover.h b/modules/audio_processing/aec3/echo_remover.h index f2f4f5e64d..18a7bc3cfa 100644 --- a/modules/audio_processing/aec3/echo_remover.h +++ b/modules/audio_processing/aec3/echo_remover.h @@ -11,11 +11,13 @@ #ifndef MODULES_AUDIO_PROCESSING_AEC3_ECHO_REMOVER_H_ #define MODULES_AUDIO_PROCESSING_AEC3_ECHO_REMOVER_H_ +#include +#include #include -#include "absl/types/optional.h" #include "api/audio/echo_canceller3_config.h" #include "api/audio/echo_control.h" +#include "api/environment/environment.h" #include "modules/audio_processing/aec3/block.h" #include "modules/audio_processing/aec3/delay_estimate.h" #include "modules/audio_processing/aec3/echo_path_variability.h" @@ -26,10 +28,11 @@ namespace webrtc { // Class for removing the echo from the capture signal. class EchoRemover { public: - static EchoRemover* Create(const EchoCanceller3Config& config, - int sample_rate_hz, - size_t num_render_channels, - size_t num_capture_channels); + static std::unique_ptr Create(const Environment& env, + const EchoCanceller3Config& config, + int sample_rate_hz, + size_t num_render_channels, + size_t num_capture_channels); virtual ~EchoRemover() = default; // Get current metrics. @@ -41,7 +44,7 @@ class EchoRemover { virtual void ProcessCapture( EchoPathVariability echo_path_variability, bool capture_signal_saturation, - const absl::optional& external_delay, + const std::optional& external_delay, RenderBuffer* render_buffer, Block* linear_output, Block* capture) = 0; diff --git a/modules/audio_processing/aec3/echo_remover_metrics.cc b/modules/audio_processing/aec3/echo_remover_metrics.cc index c3fc80773a..aa13da9abd 100644 --- a/modules/audio_processing/aec3/echo_remover_metrics.cc +++ b/modules/audio_processing/aec3/echo_remover_metrics.cc @@ -53,8 +53,8 @@ void EchoRemoverMetrics::ResetMetrics() { void EchoRemoverMetrics::Update( const AecState& aec_state, - const std::array& comfort_noise_spectrum, - const std::array& suppressor_gain) { + const std::array& /* comfort_noise_spectrum */, + const std::array& /* suppressor_gain */) { metrics_reported_ = false; if (++block_counter_ <= kMetricsCollectionBlocks) { erl_time_domain_.UpdateInstant(aec_state.ErlTimeDomain()); @@ -149,7 +149,7 @@ int TransformDbMetricForReporting(bool negate, if (negate) { new_value = -new_value; } - return static_cast(rtc::SafeClamp(new_value, min_value, max_value)); + return static_cast(SafeClamp(new_value, min_value, max_value)); } } // namespace aec3 diff --git a/modules/audio_processing/aec3/echo_remover_metrics_unittest.cc b/modules/audio_processing/aec3/echo_remover_metrics_unittest.cc index 45b30a9c74..b7a0d58f12 100644 --- a/modules/audio_processing/aec3/echo_remover_metrics_unittest.cc +++ b/modules/audio_processing/aec3/echo_remover_metrics_unittest.cc @@ -14,6 +14,7 @@ #include +#include "api/environment/environment_factory.h" #include "modules/audio_processing/aec3/aec3_fft.h" #include "modules/audio_processing/aec3/aec_state.h" #include "test/gtest.h" @@ -138,7 +139,7 @@ TEST(DbMetric, Constructor) { // Verify the general functionality of EchoRemoverMetrics. TEST(EchoRemoverMetrics, NormalUsage) { EchoRemoverMetrics metrics; - AecState aec_state(EchoCanceller3Config{}, 1); + AecState aec_state(CreateEnvironment(), EchoCanceller3Config{}, 1); std::array comfort_noise_spectrum; std::array suppressor_gain; comfort_noise_spectrum.fill(10.f); diff --git a/modules/audio_processing/aec3/echo_remover_unittest.cc b/modules/audio_processing/aec3/echo_remover_unittest.cc index 66168ab08d..fb2eb78d2b 100644 --- a/modules/audio_processing/aec3/echo_remover_unittest.cc +++ b/modules/audio_processing/aec3/echo_remover_unittest.cc @@ -15,6 +15,8 @@ #include #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/render_buffer.h" #include "modules/audio_processing/aec3/render_delay_buffer.h" @@ -27,13 +29,13 @@ namespace webrtc { namespace { std::string ProduceDebugText(int sample_rate_hz) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Sample rate: " << sample_rate_hz; return ss.Release(); } std::string ProduceDebugText(int sample_rate_hz, int delay) { - rtc::StringBuilder ss(ProduceDebugText(sample_rate_hz)); + StringBuilder ss(ProduceDebugText(sample_rate_hz)); ss << ", Delay: " << delay; return ss.Release(); } @@ -53,12 +55,13 @@ INSTANTIATE_TEST_SUITE_P(MultiChannel, TEST_P(EchoRemoverMultiChannel, BasicApiCalls) { const size_t num_render_channels = std::get<0>(GetParam()); const size_t num_capture_channels = std::get<1>(GetParam()); - absl::optional delay_estimate; + const Environment env = CreateEnvironment(); + std::optional delay_estimate; for (auto rate : {16000, 32000, 48000}) { SCOPED_TRACE(ProduceDebugText(rate)); - std::unique_ptr remover( - EchoRemover::Create(EchoCanceller3Config(), rate, num_render_channels, - num_capture_channels)); + std::unique_ptr remover = + EchoRemover::Create(env, EchoCanceller3Config(), rate, + num_render_channels, num_capture_channels); std::unique_ptr render_buffer(RenderDelayBuffer::Create( EchoCanceller3Config(), rate, num_render_channels)); @@ -86,8 +89,8 @@ TEST_P(EchoRemoverMultiChannel, BasicApiCalls) { // TODO(peah): Re-enable the test once the issue with memory leaks during DEATH // tests on test bots has been fixed. TEST(EchoRemoverDeathTest, DISABLED_WrongSampleRate) { - EXPECT_DEATH(std::unique_ptr( - EchoRemover::Create(EchoCanceller3Config(), 8001, 1, 1)), + EXPECT_DEATH(EchoRemover::Create(CreateEnvironment(), EchoCanceller3Config(), + 8001, 1, 1), ""); } @@ -95,11 +98,12 @@ TEST(EchoRemoverDeathTest, DISABLED_WrongSampleRate) { // TODO(peah): Re-enable the test once the issue with memory leaks during DEATH // tests on test bots has been fixed.c TEST(EchoRemoverDeathTest, DISABLED_WrongCaptureNumBands) { - absl::optional delay_estimate; + const Environment env = CreateEnvironment(); + std::optional delay_estimate; for (auto rate : {16000, 32000, 48000}) { SCOPED_TRACE(ProduceDebugText(rate)); - std::unique_ptr remover( - EchoRemover::Create(EchoCanceller3Config(), rate, 1, 1)); + std::unique_ptr remover = + EchoRemover::Create(env, EchoCanceller3Config(), rate, 1, 1); std::unique_ptr render_buffer( RenderDelayBuffer::Create(EchoCanceller3Config(), rate, 1)); Block capture(NumBandsForRate(rate == 48000 ? 16000 : rate + 16000), 1); @@ -114,9 +118,9 @@ TEST(EchoRemoverDeathTest, DISABLED_WrongCaptureNumBands) { // Verifies the check for non-null capture block. TEST(EchoRemoverDeathTest, NullCapture) { - absl::optional delay_estimate; - std::unique_ptr remover( - EchoRemover::Create(EchoCanceller3Config(), 16000, 1, 1)); + std::optional delay_estimate; + std::unique_ptr remover = EchoRemover::Create( + CreateEnvironment(), EchoCanceller3Config(), 16000, 1, 1); std::unique_ptr render_buffer( RenderDelayBuffer::Create(EchoCanceller3Config(), 16000, 1)); EchoPathVariability echo_path_variability( @@ -133,8 +137,9 @@ TEST(EchoRemoverDeathTest, NullCapture) { // remove echoes. TEST(EchoRemover, BasicEchoRemoval) { constexpr int kNumBlocksToProcess = 500; + const Environment env = CreateEnvironment(); Random random_generator(42U); - absl::optional delay_estimate; + std::optional delay_estimate; for (size_t num_channels : {1, 2, 4}) { for (auto rate : {16000, 32000, 48000}) { Block x(NumBandsForRate(rate), num_channels); @@ -144,8 +149,8 @@ TEST(EchoRemover, BasicEchoRemoval) { for (size_t delay_samples : {0, 64, 150, 200, 301}) { SCOPED_TRACE(ProduceDebugText(rate, delay_samples)); EchoCanceller3Config config; - std::unique_ptr remover( - EchoRemover::Create(config, rate, num_channels, num_channels)); + std::unique_ptr remover = + EchoRemover::Create(env, config, rate, num_channels, num_channels); std::unique_ptr render_buffer( RenderDelayBuffer::Create(config, rate, num_channels)); render_buffer->AlignFromDelay(delay_samples / kBlockSize); diff --git a/modules/audio_processing/aec3/erl_estimator.cc b/modules/audio_processing/aec3/erl_estimator.cc index 01cc33cb80..d3eeafc419 100644 --- a/modules/audio_processing/aec3/erl_estimator.cc +++ b/modules/audio_processing/aec3/erl_estimator.cc @@ -40,9 +40,8 @@ void ErlEstimator::Reset() { void ErlEstimator::Update( const std::vector& converged_filters, - rtc::ArrayView> render_spectra, - rtc::ArrayView> - capture_spectra) { + ArrayView> render_spectra, + ArrayView> capture_spectra) { const size_t num_capture_channels = converged_filters.size(); RTC_DCHECK_EQ(capture_spectra.size(), num_capture_channels); @@ -85,7 +84,7 @@ void ErlEstimator::Update( const size_t num_render_channels = render_spectra.size(); std::array max_render_spectrum_data; - rtc::ArrayView max_render_spectrum = + ArrayView max_render_spectrum = render_spectra[/*channel=*/0]; if (num_render_channels > 1) { std::copy(render_spectra[0].begin(), render_spectra[0].end(), diff --git a/modules/audio_processing/aec3/erl_estimator.h b/modules/audio_processing/aec3/erl_estimator.h index 639a52c561..b793ddec78 100644 --- a/modules/audio_processing/aec3/erl_estimator.h +++ b/modules/audio_processing/aec3/erl_estimator.h @@ -34,11 +34,10 @@ class ErlEstimator { void Reset(); // Updates the ERL estimate. - void Update(const std::vector& converged_filters, - rtc::ArrayView> - render_spectra, - rtc::ArrayView> - capture_spectra); + void Update( + const std::vector& converged_filters, + ArrayView> render_spectra, + ArrayView> capture_spectra); // Returns the most recent ERL estimate. const std::array& Erl() const { return erl_; } diff --git a/modules/audio_processing/aec3/erl_estimator_unittest.cc b/modules/audio_processing/aec3/erl_estimator_unittest.cc index 79e5465e3c..4bb07c3ccc 100644 --- a/modules/audio_processing/aec3/erl_estimator_unittest.cc +++ b/modules/audio_processing/aec3/erl_estimator_unittest.cc @@ -18,7 +18,7 @@ namespace webrtc { namespace { std::string ProduceDebugText(size_t num_render_channels, size_t num_capture_channels) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Render channels: " << num_render_channels; ss << ", Capture channels: " << num_capture_channels; return ss.Release(); diff --git a/modules/audio_processing/aec3/erle_estimator.cc b/modules/audio_processing/aec3/erle_estimator.cc index 0e3d715c59..f1edfe601a 100644 --- a/modules/audio_processing/aec3/erle_estimator.cc +++ b/modules/audio_processing/aec3/erle_estimator.cc @@ -15,12 +15,13 @@ namespace webrtc { -ErleEstimator::ErleEstimator(size_t startup_phase_length_blocks, +ErleEstimator::ErleEstimator(const Environment& env, + size_t startup_phase_length_blocks, const EchoCanceller3Config& config, size_t num_capture_channels) : startup_phase_length_blocks_(startup_phase_length_blocks), fullband_erle_estimator_(config.erle, num_capture_channels), - subband_erle_estimator_(config, num_capture_channels) { + subband_erle_estimator_(env, config, num_capture_channels) { if (config.erle.num_sections > 1) { signal_dependent_erle_estimator_ = std::make_unique(config, @@ -44,13 +45,11 @@ void ErleEstimator::Reset(bool delay_change) { void ErleEstimator::Update( const RenderBuffer& render_buffer, - rtc::ArrayView>> + ArrayView>> filter_frequency_responses, - rtc::ArrayView - avg_render_spectrum_with_reverb, - rtc::ArrayView> capture_spectra, - rtc::ArrayView> - subtractor_spectra, + ArrayView avg_render_spectrum_with_reverb, + ArrayView> capture_spectra, + ArrayView> subtractor_spectra, const std::vector& converged_filters) { RTC_DCHECK_EQ(subband_erle_estimator_.Erle(/*onset_compensated=*/true).size(), capture_spectra.size()); diff --git a/modules/audio_processing/aec3/erle_estimator.h b/modules/audio_processing/aec3/erle_estimator.h index 55797592a9..cb014b0e68 100644 --- a/modules/audio_processing/aec3/erle_estimator.h +++ b/modules/audio_processing/aec3/erle_estimator.h @@ -15,9 +15,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" #include "modules/audio_processing/aec3/aec3_common.h" @@ -33,7 +33,8 @@ namespace webrtc { // and another one is done using the aggreation of energy over all the subbands. class ErleEstimator { public: - ErleEstimator(size_t startup_phase_length_blocks, + ErleEstimator(const Environment& env, + size_t startup_phase_length_blocks, const EchoCanceller3Config& config, size_t num_capture_channels); ~ErleEstimator(); @@ -44,18 +45,16 @@ class ErleEstimator { // Updates the ERLE estimates. void Update( const RenderBuffer& render_buffer, - rtc::ArrayView>> + ArrayView>> filter_frequency_responses, - rtc::ArrayView + ArrayView avg_render_spectrum_with_reverb, - rtc::ArrayView> - capture_spectra, - rtc::ArrayView> - subtractor_spectra, + ArrayView> capture_spectra, + ArrayView> subtractor_spectra, const std::vector& converged_filters); // Returns the most recent subband ERLE estimates. - rtc::ArrayView> Erle( + ArrayView> Erle( bool onset_compensated) const { return signal_dependent_erle_estimator_ ? signal_dependent_erle_estimator_->Erle(onset_compensated) @@ -63,8 +62,7 @@ class ErleEstimator { } // Returns the non-capped subband ERLE. - rtc::ArrayView> ErleUnbounded() - const { + ArrayView> ErleUnbounded() const { // Unbounded ERLE is only used with the subband erle estimator where the // ERLE is often capped at low values. When the signal dependent ERLE // estimator is used the capped ERLE is returned. @@ -76,7 +74,7 @@ class ErleEstimator { // Returns the subband ERLE that are estimated during onsets (only used for // testing). - rtc::ArrayView> ErleDuringOnsets() + ArrayView> ErleDuringOnsets() const { return subband_erle_estimator_.ErleDuringOnsets(); } @@ -91,8 +89,7 @@ class ErleEstimator { // vector with content between 0 and 1 where 1 indicates that, at this current // time instant, the linear filter is reaching its maximum subtraction // performance. - rtc::ArrayView> GetInstLinearQualityEstimates() - const { + ArrayView> GetInstLinearQualityEstimates() const { return fullband_erle_estimator_.GetInstLinearQualityEstimates(); } diff --git a/modules/audio_processing/aec3/erle_estimator_unittest.cc b/modules/audio_processing/aec3/erle_estimator_unittest.cc index 42be7d9c7d..9f06ffa4ec 100644 --- a/modules/audio_processing/aec3/erle_estimator_unittest.cc +++ b/modules/audio_processing/aec3/erle_estimator_unittest.cc @@ -13,6 +13,7 @@ #include #include "api/array_view.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/aec3/render_delay_buffer.h" #include "modules/audio_processing/aec3/spectrum_buffer.h" #include "rtc_base/random.h" @@ -28,7 +29,7 @@ constexpr float kTrueErleOnsets = 1.0f; constexpr float kEchoPathGain = 3.f; void VerifyErleBands( - rtc::ArrayView> erle, + ArrayView> erle, float reference_lf, float reference_hf) { for (size_t ch = 0; ch < erle.size(); ++ch) { @@ -41,18 +42,17 @@ void VerifyErleBands( } } -void VerifyErle( - rtc::ArrayView> erle, - float erle_time_domain, - float reference_lf, - float reference_hf) { +void VerifyErle(ArrayView> erle, + float erle_time_domain, + float reference_lf, + float reference_hf) { VerifyErleBands(erle, reference_lf, reference_hf); EXPECT_NEAR(kTrueErle, erle_time_domain, 0.5); } void VerifyErleGreaterOrEqual( - rtc::ArrayView> erle1, - rtc::ArrayView> erle2) { + ArrayView> erle1, + ArrayView> erle2) { for (size_t ch = 0; ch < erle1.size(); ++ch) { for (size_t i = 0; i < kFftLengthBy2Plus1; ++i) { EXPECT_GE(erle1[ch][i], erle2[ch][i]); @@ -81,8 +81,8 @@ void FormFarendTimeFrame(Block* x) { void FormFarendFrame(const RenderBuffer& render_buffer, float erle, std::array* X2, - rtc::ArrayView> E2, - rtc::ArrayView> Y2) { + ArrayView> E2, + ArrayView> Y2) { const auto& spectrum_buffer = render_buffer.GetSpectrumBuffer(); const int num_render_channels = spectrum_buffer.buffer[0].size(); const int num_capture_channels = Y2.size(); @@ -103,11 +103,10 @@ void FormFarendFrame(const RenderBuffer& render_buffer, } } -void FormNearendFrame( - Block* x, - std::array* X2, - rtc::ArrayView> E2, - rtc::ArrayView> Y2) { +void FormNearendFrame(Block* x, + std::array* X2, + ArrayView> E2, + ArrayView> Y2) { for (int band = 0; band < x->NumBands(); ++band) { for (int ch = 0; ch < x->NumChannels(); ++ch) { std::fill(x->begin(band, ch), x->end(band, ch), 0.f); @@ -121,10 +120,9 @@ void FormNearendFrame( } } -void GetFilterFreq( - size_t delay_headroom_samples, - rtc::ArrayView>> - filter_frequency_response) { +void GetFilterFreq(size_t delay_headroom_samples, + ArrayView>> + filter_frequency_response) { const size_t delay_headroom_blocks = delay_headroom_samples / kBlockSize; for (size_t ch = 0; ch < filter_frequency_response[0].size(); ++ch) { for (auto& block_freq_resp : filter_frequency_response) { @@ -173,7 +171,7 @@ TEST_P(ErleEstimatorMultiChannel, VerifyErleIncreaseAndHold) { GetFilterFreq(config.delay.delay_headroom_samples, filter_frequency_response); - ErleEstimator estimator(0, config, num_capture_channels); + ErleEstimator estimator(CreateEnvironment(), 0, config, num_capture_channels); FormFarendTimeFrame(&x); render_delay_buffer->Insert(x); @@ -236,7 +234,8 @@ TEST_P(ErleEstimatorMultiChannel, VerifyErleTrackingOnOnsets) { GetFilterFreq(config.delay.delay_headroom_samples, filter_frequency_response); - ErleEstimator estimator(/*startup_phase_length_blocks=*/0, config, + ErleEstimator estimator(CreateEnvironment(), + /*startup_phase_length_blocks=*/0, config, num_capture_channels); FormFarendTimeFrame(&x); diff --git a/modules/audio_processing/aec3/fft_data.h b/modules/audio_processing/aec3/fft_data.h index 9c25e784aa..d8ac43e918 100644 --- a/modules/audio_processing/aec3/fft_data.h +++ b/modules/audio_processing/aec3/fft_data.h @@ -41,11 +41,11 @@ struct FftData { } // Computes the power spectrum of the data. - void SpectrumAVX2(rtc::ArrayView power_spectrum) const; + void SpectrumAVX2(ArrayView power_spectrum) const; // Computes the power spectrum of the data. void Spectrum(Aec3Optimization optimization, - rtc::ArrayView power_spectrum) const { + ArrayView power_spectrum) const { RTC_DCHECK_EQ(kFftLengthBy2Plus1, power_spectrum.size()); switch (optimization) { #if defined(WEBRTC_ARCH_X86_FAMILY) diff --git a/modules/audio_processing/aec3/fft_data_avx2.cc b/modules/audio_processing/aec3/fft_data_avx2.cc index a4b3056071..c604b56a96 100644 --- a/modules/audio_processing/aec3/fft_data_avx2.cc +++ b/modules/audio_processing/aec3/fft_data_avx2.cc @@ -16,7 +16,7 @@ namespace webrtc { // Computes the power spectrum of the data. -void FftData::SpectrumAVX2(rtc::ArrayView power_spectrum) const { +void FftData::SpectrumAVX2(ArrayView power_spectrum) const { RTC_DCHECK_EQ(kFftLengthBy2Plus1, power_spectrum.size()); for (size_t k = 0; k < kFftLengthBy2; k += 8) { __m256 r = _mm256_loadu_ps(&re[k]); diff --git a/modules/audio_processing/aec3/filter_analyzer.cc b/modules/audio_processing/aec3/filter_analyzer.cc index d8fd3aa275..b5bcad74c4 100644 --- a/modules/audio_processing/aec3/filter_analyzer.cc +++ b/modules/audio_processing/aec3/filter_analyzer.cc @@ -24,7 +24,7 @@ namespace webrtc { namespace { -size_t FindPeakIndex(rtc::ArrayView filter_time_domain, +size_t FindPeakIndex(ArrayView filter_time_domain, size_t peak_index_in, size_t start_sample, size_t end_sample) { @@ -73,7 +73,7 @@ void FilterAnalyzer::Reset() { } void FilterAnalyzer::Update( - rtc::ArrayView> filters_time_domain, + ArrayView> filters_time_domain, const RenderBuffer& render_buffer, bool* any_filter_consistent, float* max_echo_path_gain) { @@ -102,7 +102,7 @@ void FilterAnalyzer::Update( } void FilterAnalyzer::AnalyzeRegion( - rtc::ArrayView> filters_time_domain, + ArrayView> filters_time_domain, const RenderBuffer& render_buffer) { // Preprocess the filter to avoid issues with low-frequency components in the // filter. @@ -134,9 +134,8 @@ void FilterAnalyzer::AnalyzeRegion( } } -void FilterAnalyzer::UpdateFilterGain( - rtc::ArrayView filter_time_domain, - FilterAnalysisState* st) { +void FilterAnalyzer::UpdateFilterGain(ArrayView filter_time_domain, + FilterAnalysisState* st) { bool sufficient_time_to_converge = blocks_since_reset_ > 5 * kNumBlocksPerSecond; @@ -155,7 +154,7 @@ void FilterAnalyzer::UpdateFilterGain( } void FilterAnalyzer::PreProcessFilters( - rtc::ArrayView> filters_time_domain) { + ArrayView> filters_time_domain) { for (size_t ch = 0; ch < filters_time_domain.size(); ++ch) { RTC_DCHECK_LT(region_.start_sample_, filters_time_domain[ch].size()); RTC_DCHECK_LT(region_.end_sample_, filters_time_domain[ch].size()); @@ -220,7 +219,7 @@ void FilterAnalyzer::ConsistentFilterDetector::Reset() { } bool FilterAnalyzer::ConsistentFilterDetector::Detect( - rtc::ArrayView filter_to_analyze, + ArrayView filter_to_analyze, const FilterRegion& region, const Block& x_block, size_t peak_index, @@ -264,7 +263,7 @@ bool FilterAnalyzer::ConsistentFilterDetector::Detect( if (significant_peak_) { bool active_render_block = false; for (int ch = 0; ch < x_block.NumChannels(); ++ch) { - rtc::ArrayView x_channel = + ArrayView x_channel = x_block.View(/*band=*/0, ch); const float x_energy = std::inner_product( x_channel.begin(), x_channel.end(), x_channel.begin(), 0.f); diff --git a/modules/audio_processing/aec3/filter_analyzer.h b/modules/audio_processing/aec3/filter_analyzer.h index 9aec8b14d7..9e87fbb975 100644 --- a/modules/audio_processing/aec3/filter_analyzer.h +++ b/modules/audio_processing/aec3/filter_analyzer.h @@ -42,13 +42,13 @@ class FilterAnalyzer { void Reset(); // Updates the estimates with new input data. - void Update(rtc::ArrayView> filters_time_domain, + void Update(ArrayView> filters_time_domain, const RenderBuffer& render_buffer, bool* any_filter_consistent, float* max_echo_path_gain); // Returns the delay in blocks for each filter. - rtc::ArrayView FilterDelaysBlocks() const { + ArrayView FilterDelaysBlocks() const { return filter_delays_blocks_; } @@ -61,7 +61,7 @@ class FilterAnalyzer { } // Returns the preprocessed filter. - rtc::ArrayView> GetAdjustedFilters() const { + ArrayView> GetAdjustedFilters() const { return h_highpass_; } @@ -71,14 +71,13 @@ class FilterAnalyzer { private: struct FilterAnalysisState; - void AnalyzeRegion( - rtc::ArrayView> filters_time_domain, - const RenderBuffer& render_buffer); + void AnalyzeRegion(ArrayView> filters_time_domain, + const RenderBuffer& render_buffer); - void UpdateFilterGain(rtc::ArrayView filters_time_domain, + void UpdateFilterGain(ArrayView filters_time_domain, FilterAnalysisState* st); void PreProcessFilters( - rtc::ArrayView> filters_time_domain); + ArrayView> filters_time_domain); void ResetRegion(); @@ -93,7 +92,7 @@ class FilterAnalyzer { public: explicit ConsistentFilterDetector(const EchoCanceller3Config& config); void Reset(); - bool Detect(rtc::ArrayView filter_to_analyze, + bool Detect(ArrayView filter_to_analyze, const FilterRegion& region, const Block& x_block, size_t peak_index, diff --git a/modules/audio_processing/aec3/frame_blocker.cc b/modules/audio_processing/aec3/frame_blocker.cc index 3039dcf7f1..154dc12642 100644 --- a/modules/audio_processing/aec3/frame_blocker.cc +++ b/modules/audio_processing/aec3/frame_blocker.cc @@ -32,7 +32,7 @@ FrameBlocker::FrameBlocker(size_t num_bands, size_t num_channels) FrameBlocker::~FrameBlocker() = default; void FrameBlocker::InsertSubFrameAndExtractBlock( - const std::vector>>& sub_frame, + const std::vector>>& sub_frame, Block* block) { RTC_DCHECK(block); RTC_DCHECK_EQ(num_bands_, block->NumBands()); diff --git a/modules/audio_processing/aec3/frame_blocker.h b/modules/audio_processing/aec3/frame_blocker.h index 623c812157..4cac7566f7 100644 --- a/modules/audio_processing/aec3/frame_blocker.h +++ b/modules/audio_processing/aec3/frame_blocker.h @@ -33,7 +33,7 @@ class FrameBlocker { // Inserts one 80 sample multiband subframe from the multiband frame and // extracts one 64 sample multiband block. void InsertSubFrameAndExtractBlock( - const std::vector>>& sub_frame, + const std::vector>>& sub_frame, Block* block); // Reports whether a multiband block of 64 samples is available for // extraction. diff --git a/modules/audio_processing/aec3/frame_blocker_unittest.cc b/modules/audio_processing/aec3/frame_blocker_unittest.cc index 92e393023a..135018412a 100644 --- a/modules/audio_processing/aec3/frame_blocker_unittest.cc +++ b/modules/audio_processing/aec3/frame_blocker_unittest.cc @@ -51,12 +51,12 @@ void FillSubFrameView( size_t sub_frame_counter, int offset, std::vector>>* sub_frame, - std::vector>>* sub_frame_view) { + std::vector>>* sub_frame_view) { FillSubFrame(sub_frame_counter, offset, sub_frame); for (size_t band = 0; band < sub_frame_view->size(); ++band) { for (size_t channel = 0; channel < (*sub_frame_view)[band].size(); ++channel) { - (*sub_frame_view)[band][channel] = rtc::ArrayView( + (*sub_frame_view)[band][channel] = ArrayView( &(*sub_frame)[band][channel][0], (*sub_frame)[band][channel].size()); } } @@ -65,7 +65,7 @@ void FillSubFrameView( bool VerifySubFrame( size_t sub_frame_counter, int offset, - const std::vector>>& sub_frame_view) { + const std::vector>>& sub_frame_view) { std::vector>> reference_sub_frame( sub_frame_view.size(), std::vector>( @@ -111,8 +111,8 @@ void RunBlockerTest(int sample_rate_hz, size_t num_channels) { std::vector>> input_sub_frame( num_bands, std::vector>( num_channels, std::vector(kSubFrameLength, 0.f))); - std::vector>> input_sub_frame_view( - num_bands, std::vector>(num_channels)); + std::vector>> input_sub_frame_view( + num_bands, std::vector>(num_channels)); FrameBlocker blocker(num_bands, num_channels); size_t block_counter = 0; @@ -149,10 +149,10 @@ void RunBlockerAndFramerTest(int sample_rate_hz, size_t num_channels) { std::vector>> output_sub_frame( num_bands, std::vector>( num_channels, std::vector(kSubFrameLength, 0.f))); - std::vector>> output_sub_frame_view( - num_bands, std::vector>(num_channels)); - std::vector>> input_sub_frame_view( - num_bands, std::vector>(num_channels)); + std::vector>> output_sub_frame_view( + num_bands, std::vector>(num_channels)); + std::vector>> input_sub_frame_view( + num_bands, std::vector>(num_channels)); FrameBlocker blocker(num_bands, num_channels); BlockFramer framer(num_bands, num_channels); @@ -199,9 +199,9 @@ void RunWronglySizedInsertAndExtractParametersTest( num_sub_frame_bands, std::vector>( num_sub_frame_channels, std::vector(sub_frame_length, 0.f))); - std::vector>> input_sub_frame_view( + std::vector>> input_sub_frame_view( input_sub_frame.size(), - std::vector>(num_sub_frame_channels)); + std::vector>(num_sub_frame_channels)); FillSubFrameView(0, 0, &input_sub_frame, &input_sub_frame_view); FrameBlocker blocker(correct_num_bands, correct_num_channels); EXPECT_DEATH( @@ -222,9 +222,9 @@ void RunWronglySizedExtractParameterTest(int sample_rate_hz, correct_num_bands, std::vector>( correct_num_channels, std::vector(kSubFrameLength, 0.f))); - std::vector>> input_sub_frame_view( + std::vector>> input_sub_frame_view( input_sub_frame.size(), - std::vector>(correct_num_channels)); + std::vector>(correct_num_channels)); FillSubFrameView(0, 0, &input_sub_frame, &input_sub_frame_view); FrameBlocker blocker(correct_num_bands, correct_num_channels); blocker.InsertSubFrameAndExtractBlock(input_sub_frame_view, &correct_block); @@ -247,8 +247,9 @@ void RunWrongExtractOrderTest(int sample_rate_hz, std::vector>> input_sub_frame( num_bands, std::vector>( num_channels, std::vector(kSubFrameLength, 0.f))); - std::vector>> input_sub_frame_view( - input_sub_frame.size(), std::vector>(num_channels)); + std::vector>> input_sub_frame_view( + input_sub_frame.size(), + std::vector>(num_channels)); FillSubFrameView(0, 0, &input_sub_frame, &input_sub_frame_view); FrameBlocker blocker(num_bands, num_channels); for (size_t k = 0; k < num_preceeding_api_calls; ++k) { @@ -260,7 +261,7 @@ void RunWrongExtractOrderTest(int sample_rate_hz, #endif std::string ProduceDebugText(int sample_rate_hz, size_t num_channels) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Sample rate: " << sample_rate_hz; ss << ", number of channels: " << num_channels; return ss.Release(); @@ -366,7 +367,7 @@ TEST(FrameBlockerDeathTest, WrongNumberOfPreceedingApiCallsForExtractBlock) { for (auto rate : {16000, 32000, 48000}) { for (size_t num_channels : {1, 2, 4, 8}) { for (size_t num_calls = 0; num_calls < 4; ++num_calls) { - rtc::StringBuilder ss; + webrtc::StringBuilder ss; ss << "Sample rate: " << rate; ss << "Num channels: " << num_channels; ss << ", Num preceeding InsertSubFrameAndExtractBlock calls: " @@ -394,7 +395,7 @@ TEST(FrameBlockerDeathTest, NullBlockParameter) { std::vector>> sub_frame( 1, std::vector>( 1, std::vector(kSubFrameLength, 0.f))); - std::vector>> sub_frame_view( + std::vector>> sub_frame_view( sub_frame.size()); FillSubFrameView(0, 0, &sub_frame, &sub_frame_view); EXPECT_DEATH( diff --git a/modules/audio_processing/aec3/fullband_erle_estimator.cc b/modules/audio_processing/aec3/fullband_erle_estimator.cc index e56674e4c9..498c21b568 100644 --- a/modules/audio_processing/aec3/fullband_erle_estimator.cc +++ b/modules/audio_processing/aec3/fullband_erle_estimator.cc @@ -13,8 +13,8 @@ #include #include #include +#include -#include "absl/types/optional.h" #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/logging/apm_data_dumper.h" @@ -57,9 +57,9 @@ void FullBandErleEstimator::Reset() { } void FullBandErleEstimator::Update( - rtc::ArrayView X2, - rtc::ArrayView> Y2, - rtc::ArrayView> E2, + ArrayView X2, + ArrayView> Y2, + ArrayView> E2, const std::vector& converged_filters) { for (size_t ch = 0; ch < Y2.size(); ++ch) { if (converged_filters[ch]) { @@ -142,7 +142,7 @@ void FullBandErleEstimator::ErleInstantaneous::Reset() { } void FullBandErleEstimator::ErleInstantaneous::ResetAccumulators() { - erle_log2_ = absl::nullopt; + erle_log2_ = std::nullopt; inst_quality_estimate_ = 0.f; num_points_ = 0; E2_acum_ = 0.f; diff --git a/modules/audio_processing/aec3/fullband_erle_estimator.h b/modules/audio_processing/aec3/fullband_erle_estimator.h index 7a082176d6..d012d3477b 100644 --- a/modules/audio_processing/aec3/fullband_erle_estimator.h +++ b/modules/audio_processing/aec3/fullband_erle_estimator.h @@ -12,9 +12,9 @@ #define MODULES_AUDIO_PROCESSING_AEC3_FULLBAND_ERLE_ESTIMATOR_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" #include "modules/audio_processing/aec3/aec3_common.h" @@ -33,9 +33,9 @@ class FullBandErleEstimator { void Reset(); // Updates the ERLE estimator. - void Update(rtc::ArrayView X2, - rtc::ArrayView> Y2, - rtc::ArrayView> E2, + void Update(ArrayView X2, + ArrayView> Y2, + ArrayView> E2, const std::vector& converged_filters); // Returns the fullband ERLE estimates in log2 units. @@ -49,8 +49,7 @@ class FullBandErleEstimator { // Returns an estimation of the current linear filter quality. It returns a // float number between 0 and 1 mapping 1 to the highest possible quality. - rtc::ArrayView> GetInstLinearQualityEstimates() - const { + ArrayView> GetInstLinearQualityEstimates() const { return linear_filters_qualities_; } @@ -73,10 +72,10 @@ class FullBandErleEstimator { // Resets the members related with an instantaneous estimate. void ResetAccumulators(); // Returns the instantaneous ERLE in log2 units. - absl::optional GetInstErleLog2() const { return erle_log2_; } + std::optional GetInstErleLog2() const { return erle_log2_; } // Gets an indication between 0 and 1 of the performance of the linear // filter for the current time instant. - absl::optional GetQualityEstimate() const { + std::optional GetQualityEstimate() const { if (erle_log2_) { float value = inst_quality_estimate_; if (clamp_inst_quality_to_zero_) { @@ -85,9 +84,9 @@ class FullBandErleEstimator { if (clamp_inst_quality_to_one_) { value = std::min(1.f, value); } - return absl::optional(value); + return std::optional(value); } - return absl::nullopt; + return std::nullopt; } void Dump(const std::unique_ptr& data_dumper) const; @@ -96,7 +95,7 @@ class FullBandErleEstimator { void UpdateQualityEstimate(); const bool clamp_inst_quality_to_zero_; const bool clamp_inst_quality_to_one_; - absl::optional erle_log2_; + std::optional erle_log2_; float inst_quality_estimate_; float max_erle_log2_; float min_erle_log2_; @@ -110,7 +109,7 @@ class FullBandErleEstimator { std::vector hold_counters_instantaneous_erle_; std::vector erle_time_domain_log2_; std::vector instantaneous_erle_; - std::vector> linear_filters_qualities_; + std::vector> linear_filters_qualities_; }; } // namespace webrtc diff --git a/modules/audio_processing/aec3/matched_filter.cc b/modules/audio_processing/aec3/matched_filter.cc index edaa2a4d14..90d550a255 100644 --- a/modules/audio_processing/aec3/matched_filter.cc +++ b/modules/audio_processing/aec3/matched_filter.cc @@ -9,6 +9,9 @@ */ #include "modules/audio_processing/aec3/matched_filter.h" +#include +#include + // Defines WEBRTC_ARCH_X86_FAMILY, used below. #include "rtc_base/system/arch.h" @@ -21,17 +24,14 @@ #include #include #include -#include -#include +#include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/downsampled_render_buffer.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace { @@ -41,10 +41,10 @@ namespace { constexpr int kAccumulatedErrorSubSampleRate = 4; void UpdateAccumulatedError( - const rtc::ArrayView instantaneous_accumulated_error, - const rtc::ArrayView accumulated_error, - float one_over_error_sum_anchor, - float smooth_constant_increases) { + const webrtc::ArrayView instantaneous_accumulated_error, + const webrtc::ArrayView accumulated_error, + float one_over_error_sum_anchor) { + static constexpr float kSmoothConstantIncreases = 0.015f; for (size_t k = 0; k < instantaneous_accumulated_error.size(); ++k) { float error_norm = instantaneous_accumulated_error[k] * one_over_error_sum_anchor; @@ -52,97 +52,29 @@ void UpdateAccumulatedError( accumulated_error[k] = error_norm; } else { accumulated_error[k] += - smooth_constant_increases * (error_norm - accumulated_error[k]); + kSmoothConstantIncreases * (error_norm - accumulated_error[k]); } } } -size_t ComputePreEchoLag( - const webrtc::MatchedFilter::PreEchoConfiguration& pre_echo_configuration, - const rtc::ArrayView accumulated_error, - size_t lag, - size_t alignment_shift_winner) { +size_t ComputePreEchoLag(const webrtc::ArrayView accumulated_error, + size_t lag, + size_t alignment_shift_winner) { + static constexpr float kPreEchoThreshold = 0.5f; RTC_DCHECK_GE(lag, alignment_shift_winner); size_t pre_echo_lag_estimate = lag - alignment_shift_winner; size_t maximum_pre_echo_lag = std::min(pre_echo_lag_estimate / kAccumulatedErrorSubSampleRate, accumulated_error.size()); - switch (pre_echo_configuration.mode) { - case 0: - // Mode 0: Pre echo lag is defined as the first coefficient with an error - // lower than a threshold with a certain decrease slope. - for (size_t k = 1; k < maximum_pre_echo_lag; ++k) { - if (accumulated_error[k] < - pre_echo_configuration.threshold * accumulated_error[k - 1] && - accumulated_error[k] < pre_echo_configuration.threshold) { - pre_echo_lag_estimate = (k + 1) * kAccumulatedErrorSubSampleRate - 1; - break; - } - } - break; - case 1: - // Mode 1: Pre echo lag is defined as the first coefficient with an error - // lower than a certain threshold. - for (size_t k = 0; k < maximum_pre_echo_lag; ++k) { - if (accumulated_error[k] < pre_echo_configuration.threshold) { - pre_echo_lag_estimate = (k + 1) * kAccumulatedErrorSubSampleRate - 1; - break; - } - } - break; - case 2: - case 3: - // Mode 2,3: Pre echo lag is defined as the closest coefficient to the lag - // with an error lower than a certain threshold. - for (int k = static_cast(maximum_pre_echo_lag) - 1; k >= 0; --k) { - if (accumulated_error[k] > pre_echo_configuration.threshold) { - break; - } - pre_echo_lag_estimate = (k + 1) * kAccumulatedErrorSubSampleRate - 1; - } - break; - default: - RTC_DCHECK_NOTREACHED(); + for (int k = static_cast(maximum_pre_echo_lag) - 1; k >= 0; --k) { + if (accumulated_error[k] > kPreEchoThreshold) { break; + } + pre_echo_lag_estimate = (k + 1) * kAccumulatedErrorSubSampleRate - 1; } return pre_echo_lag_estimate + alignment_shift_winner; } -webrtc::MatchedFilter::PreEchoConfiguration FetchPreEchoConfiguration() { - constexpr float kDefaultThreshold = 0.5f; - constexpr int kDefaultMode = 3; - float threshold = kDefaultThreshold; - int mode = kDefaultMode; - const std::string pre_echo_configuration_field_trial = - webrtc::field_trial::FindFullName("WebRTC-Aec3PreEchoConfiguration"); - webrtc::FieldTrialParameter threshold_field_trial_parameter( - /*key=*/"threshold", /*default_value=*/kDefaultThreshold); - webrtc::FieldTrialParameter mode_field_trial_parameter( - /*key=*/"mode", /*default_value=*/kDefaultMode); - webrtc::ParseFieldTrial( - {&threshold_field_trial_parameter, &mode_field_trial_parameter}, - pre_echo_configuration_field_trial); - float threshold_read = - static_cast(threshold_field_trial_parameter.Get()); - int mode_read = mode_field_trial_parameter.Get(); - if (threshold_read < 1.0f && threshold_read > 0.0f) { - threshold = threshold_read; - } else { - RTC_LOG(LS_ERROR) - << "AEC3: Pre echo configuration: wrong input, threshold = " - << threshold_read << "."; - } - if (mode_read >= 0 && mode_read <= 3) { - mode = mode_read; - } else { - RTC_LOG(LS_ERROR) << "AEC3: Pre echo configuration: wrong input, mode = " - << mode_read << "."; - } - RTC_LOG(LS_INFO) << "AEC3: Pre echo configuration: threshold = " << threshold - << ", mode = " << mode << "."; - return {.threshold = threshold, .mode = mode}; -} - } // namespace namespace webrtc { @@ -160,13 +92,13 @@ void MatchedFilterCoreWithAccumulatedError_NEON( size_t x_start_index, float x2_sum_threshold, float smoothing, - rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView h, + webrtc::ArrayView x, + webrtc::ArrayView y, + webrtc::ArrayView h, bool* filters_updated, float* error_sum, - rtc::ArrayView accumulated_error, - rtc::ArrayView scratch_memory) { + webrtc::ArrayView accumulated_error, + webrtc::ArrayView scratch_memory) { const int h_size = static_cast(h.size()); const int x_size = static_cast(x.size()); RTC_DCHECK_EQ(0, h_size % 4); @@ -241,14 +173,14 @@ void MatchedFilterCoreWithAccumulatedError_NEON( void MatchedFilterCore_NEON(size_t x_start_index, float x2_sum_threshold, float smoothing, - rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView h, + webrtc::ArrayView x, + webrtc::ArrayView y, + webrtc::ArrayView h, bool* filters_updated, float* error_sum, bool compute_accumulated_error, - rtc::ArrayView accumulated_error, - rtc::ArrayView scratch_memory) { + webrtc::ArrayView accumulated_error, + webrtc::ArrayView scratch_memory) { const int h_size = static_cast(h.size()); const int x_size = static_cast(x.size()); RTC_DCHECK_EQ(0, h_size % 4); @@ -355,17 +287,16 @@ void MatchedFilterCore_NEON(size_t x_start_index, #if defined(WEBRTC_ARCH_X86_FAMILY) -void MatchedFilterCore_AccumulatedError_SSE2( - size_t x_start_index, - float x2_sum_threshold, - float smoothing, - rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView h, - bool* filters_updated, - float* error_sum, - rtc::ArrayView accumulated_error, - rtc::ArrayView scratch_memory) { +void MatchedFilterCore_AccumulatedError_SSE2(size_t x_start_index, + float x2_sum_threshold, + float smoothing, + ArrayView x, + ArrayView y, + ArrayView h, + bool* filters_updated, + float* error_sum, + ArrayView accumulated_error, + ArrayView scratch_memory) { const int h_size = static_cast(h.size()); const int x_size = static_cast(x.size()); RTC_DCHECK_EQ(0, h_size % 8); @@ -431,20 +362,20 @@ void MatchedFilterCore_AccumulatedError_SSE2( const float alpha = smoothing * e / x2_sum; const __m128 alpha_128 = _mm_set1_ps(alpha); // filter = filter + smoothing * (y - filter * x) * x / x * x. - float* h_p = &h[0]; - const float* x_p = + float* h_p2 = &h[0]; + const float* x_p2 = chunk1 != h_size ? scratch_memory.data() : &x[x_start_index]; // Perform 128 bit vector operations. const int limit_by_4 = h_size >> 2; - for (int k = limit_by_4; k > 0; --k, h_p += 4, x_p += 4) { + for (int k = limit_by_4; k > 0; --k, h_p2 += 4, x_p2 += 4) { // Load the data into 128 bit vectors. - __m128 h_k = _mm_loadu_ps(h_p); - const __m128 x_k = _mm_loadu_ps(x_p); + __m128 h_k = _mm_loadu_ps(h_p2); + const __m128 x_k = _mm_loadu_ps(x_p2); // Compute h = h + alpha * x. const __m128 alpha_x = _mm_mul_ps(alpha_128, x_k); h_k = _mm_add_ps(h_k, alpha_x); // Store the result. - _mm_storeu_ps(h_p, h_k); + _mm_storeu_ps(h_p2, h_k); } *filters_updated = true; } @@ -455,14 +386,14 @@ void MatchedFilterCore_AccumulatedError_SSE2( void MatchedFilterCore_SSE2(size_t x_start_index, float x2_sum_threshold, float smoothing, - rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView h, + ArrayView x, + ArrayView y, + ArrayView h, bool* filters_updated, float* error_sum, bool compute_accumulated_error, - rtc::ArrayView accumulated_error, - rtc::ArrayView scratch_memory) { + ArrayView accumulated_error, + ArrayView scratch_memory) { if (compute_accumulated_error) { return MatchedFilterCore_AccumulatedError_SSE2( x_start_index, x2_sum_threshold, smoothing, x, y, h, filters_updated, @@ -534,26 +465,26 @@ void MatchedFilterCore_SSE2(size_t x_start_index, const float alpha = smoothing * e / x2_sum; const __m128 alpha_128 = _mm_set1_ps(alpha); // filter = filter + smoothing * (y - filter * x) * x / x * x. - float* h_p = &h[0]; + float* h_p2 = &h[0]; x_p = &x[x_start_index]; // Perform the loop in two chunks. for (int limit : {chunk1, chunk2}) { // Perform 128 bit vector operations. const int limit_by_4 = limit >> 2; - for (int k = limit_by_4; k > 0; --k, h_p += 4, x_p += 4) { + for (int k = limit_by_4; k > 0; --k, h_p2 += 4, x_p += 4) { // Load the data into 128 bit vectors. - __m128 h_k = _mm_loadu_ps(h_p); + __m128 h_k = _mm_loadu_ps(h_p2); const __m128 x_k = _mm_loadu_ps(x_p); // Compute h = h + alpha * x. const __m128 alpha_x = _mm_mul_ps(alpha_128, x_k); h_k = _mm_add_ps(h_k, alpha_x); // Store the result. - _mm_storeu_ps(h_p, h_k); + _mm_storeu_ps(h_p2, h_k); } // Perform non-vector operations for any remaining items. - for (int k = limit - limit_by_4 * 4; k > 0; --k, ++h_p, ++x_p) { - *h_p += alpha * *x_p; + for (int k = limit - limit_by_4 * 4; k > 0; --k, ++h_p2, ++x_p) { + *h_p2 += alpha * *x_p; } x_p = &x[0]; } @@ -567,13 +498,13 @@ void MatchedFilterCore_SSE2(size_t x_start_index, void MatchedFilterCore(size_t x_start_index, float x2_sum_threshold, float smoothing, - rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView h, + ArrayView x, + ArrayView y, + ArrayView h, bool* filters_updated, float* error_sum, bool compute_accumulated_error, - rtc::ArrayView accumulated_error) { + ArrayView accumulated_error) { if (compute_accumulated_error) { std::fill(accumulated_error.begin(), accumulated_error.end(), 0.0f); } @@ -613,10 +544,10 @@ void MatchedFilterCore(size_t x_start_index, const float alpha = smoothing * e / x2_sum; // filter = filter + smoothing * (y - filter * x) * x / x * x. - size_t x_index = x_start_index; + size_t x_index2 = x_start_index; for (size_t k = 0; k < h.size(); ++k) { - h[k] += alpha * x[x_index]; - x_index = x_index < (x.size() - 1) ? x_index + 1 : 0; + h[k] += alpha * x[x_index2]; + x_index2 = x_index2 < (x.size() - 1) ? x_index2 + 1 : 0; } *filters_updated = true; } @@ -625,7 +556,7 @@ void MatchedFilterCore(size_t x_start_index, } } -size_t MaxSquarePeakIndex(rtc::ArrayView h) { +size_t MaxSquarePeakIndex(ArrayView h) { if (h.size() < 2) { return 0; } @@ -685,8 +616,7 @@ MatchedFilter::MatchedFilter(ApmDataDumper* data_dumper, smoothing_fast_(smoothing_fast), smoothing_slow_(smoothing_slow), matching_filter_threshold_(matching_filter_threshold), - detect_pre_echo_(detect_pre_echo), - pre_echo_config_(FetchPreEchoConfiguration()) { + detect_pre_echo_(detect_pre_echo) { RTC_DCHECK(data_dumper); RTC_DCHECK_LT(0, window_size_sub_blocks); RTC_DCHECK((kBlockSize % sub_block_size) == 0); @@ -715,9 +645,9 @@ void MatchedFilter::Reset(bool full_reset) { std::fill(f.begin(), f.end(), 0.f); } - winner_lag_ = absl::nullopt; - reported_lag_estimate_ = absl::nullopt; - if (pre_echo_config_.mode != 3 || full_reset) { + winner_lag_ = std::nullopt; + reported_lag_estimate_ = std::nullopt; + if (full_reset) { for (auto& e : accumulated_error_) { std::fill(e.begin(), e.end(), 1.0f); } @@ -726,7 +656,7 @@ void MatchedFilter::Reset(bool full_reset) { } void MatchedFilter::Update(const DownsampledRenderBuffer& render_buffer, - rtc::ArrayView capture, + ArrayView capture, bool use_slow_smoothing) { RTC_DCHECK_EQ(sub_block_size_, capture.size()); auto& y = capture; @@ -745,10 +675,10 @@ void MatchedFilter::Update(const DownsampledRenderBuffer& render_buffer, // Apply all matched filters. float winner_error_sum = error_sum_anchor; - winner_lag_ = absl::nullopt; - reported_lag_estimate_ = absl::nullopt; + winner_lag_ = std::nullopt; + reported_lag_estimate_ = std::nullopt; size_t alignment_shift = 0; - absl::optional previous_lag_estimate; + std::optional previous_lag_estimate; const int num_filters = static_cast(filters_.size()); int winner_index = -1; for (int n = 0; n < num_filters; ++n) { @@ -823,22 +753,16 @@ void MatchedFilter::Update(const DownsampledRenderBuffer& render_buffer, reported_lag_estimate_ = LagEstimate(winner_lag_.value(), /*pre_echo_lag=*/winner_lag_.value()); if (detect_pre_echo_ && last_detected_best_lag_filter_ == winner_index) { - const float energy_threshold = - pre_echo_config_.mode == 3 ? 1.0f : 30.0f * 30.0f * y.size(); - - if (error_sum_anchor > energy_threshold) { - const float smooth_constant_increases = - pre_echo_config_.mode != 3 ? 0.01f : 0.015f; - - UpdateAccumulatedError( - instantaneous_accumulated_error_, accumulated_error_[winner_index], - 1.0f / error_sum_anchor, smooth_constant_increases); + static constexpr float kEnergyThreshold = 1.0f; + if (error_sum_anchor > kEnergyThreshold) { + UpdateAccumulatedError(instantaneous_accumulated_error_, + accumulated_error_[winner_index], + 1.0f / error_sum_anchor); number_pre_echo_updates_++; } - if (pre_echo_config_.mode != 3 || number_pre_echo_updates_ >= 50) { + if (number_pre_echo_updates_ >= 50) { reported_lag_estimate_->pre_echo_lag = ComputePreEchoLag( - pre_echo_config_, accumulated_error_[winner_index], - winner_lag_.value(), + accumulated_error_[winner_index], winner_lag_.value(), winner_index * filter_intra_lag_shift_ /*alignment_shift_winner*/); } else { reported_lag_estimate_->pre_echo_lag = winner_lag_.value(); @@ -854,7 +778,7 @@ void MatchedFilter::Update(const DownsampledRenderBuffer& render_buffer, } } -void MatchedFilter::LogFilterProperties(int sample_rate_hz, +void MatchedFilter::LogFilterProperties(int /* sample_rate_hz */, size_t shift, size_t downsampling_factor) const { size_t alignment_shift = 0; @@ -885,10 +809,9 @@ void MatchedFilter::Dump() { "aec3_correlator_error_" + std::to_string(n) + "_h"; data_dumper_->DumpRaw(dumper_error.c_str(), accumulated_error_[n]); - size_t pre_echo_lag = - ComputePreEchoLag(pre_echo_config_, accumulated_error_[n], - lag_estimate + n * filter_intra_lag_shift_, - n * filter_intra_lag_shift_); + size_t pre_echo_lag = ComputePreEchoLag( + accumulated_error_[n], lag_estimate + n * filter_intra_lag_shift_, + n * filter_intra_lag_shift_); std::string dumper_pre_lag = "aec3_correlator_pre_echo_lag_" + std::to_string(n); data_dumper_->DumpRaw(dumper_pre_lag.c_str(), pre_echo_lag); diff --git a/modules/audio_processing/aec3/matched_filter.h b/modules/audio_processing/aec3/matched_filter.h index bb54fba2b4..63992bd108 100644 --- a/modules/audio_processing/aec3/matched_filter.h +++ b/modules/audio_processing/aec3/matched_filter.h @@ -13,9 +13,9 @@ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "rtc_base/gtest_prod_util.h" @@ -34,14 +34,14 @@ namespace aec3 { void MatchedFilterCore_NEON(size_t x_start_index, float x2_sum_threshold, float smoothing, - rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView h, + webrtc::ArrayView x, + webrtc::ArrayView y, + webrtc::ArrayView h, bool* filters_updated, float* error_sum, bool compute_accumulation_error, - rtc::ArrayView accumulated_error, - rtc::ArrayView scratch_memory); + webrtc::ArrayView accumulated_error, + webrtc::ArrayView scratch_memory); #endif @@ -51,27 +51,27 @@ void MatchedFilterCore_NEON(size_t x_start_index, void MatchedFilterCore_SSE2(size_t x_start_index, float x2_sum_threshold, float smoothing, - rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView h, + ArrayView x, + ArrayView y, + ArrayView h, bool* filters_updated, float* error_sum, bool compute_accumulated_error, - rtc::ArrayView accumulated_error, - rtc::ArrayView scratch_memory); + ArrayView accumulated_error, + ArrayView scratch_memory); // Filter core for the matched filter that is optimized for AVX2. void MatchedFilterCore_AVX2(size_t x_start_index, float x2_sum_threshold, float smoothing, - rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView h, + ArrayView x, + ArrayView y, + ArrayView h, bool* filters_updated, float* error_sum, bool compute_accumulated_error, - rtc::ArrayView accumulated_error, - rtc::ArrayView scratch_memory); + ArrayView accumulated_error, + ArrayView scratch_memory); #endif @@ -79,16 +79,16 @@ void MatchedFilterCore_AVX2(size_t x_start_index, void MatchedFilterCore(size_t x_start_index, float x2_sum_threshold, float smoothing, - rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView h, + ArrayView x, + ArrayView y, + ArrayView h, bool* filters_updated, float* error_sum, bool compute_accumulation_error, - rtc::ArrayView accumulated_error); + ArrayView accumulated_error); // Find largest peak of squared values in array. -size_t MaxSquarePeakIndex(rtc::ArrayView h); +size_t MaxSquarePeakIndex(ArrayView h); } // namespace aec3 @@ -106,11 +106,6 @@ class MatchedFilter { size_t pre_echo_lag = 0; }; - struct PreEchoConfiguration { - const float threshold; - const int mode; - }; - MatchedFilter(ApmDataDumper* data_dumper, Aec3Optimization optimization, size_t sub_block_size, @@ -131,14 +126,14 @@ class MatchedFilter { // Updates the correlation with the values in the capture buffer. void Update(const DownsampledRenderBuffer& render_buffer, - rtc::ArrayView capture, + ArrayView capture, bool use_slow_smoothing); // Resets the matched filter. void Reset(bool full_reset); // Returns the current lag estimates. - absl::optional GetBestLagEstimate() const { + std::optional GetBestLagEstimate() const { return reported_lag_estimate_; } @@ -153,15 +148,6 @@ class MatchedFilter { size_t downsampling_factor) const; private: - FRIEND_TEST_ALL_PREFIXES(MatchedFilterFieldTrialTest, - PreEchoConfigurationTest); - FRIEND_TEST_ALL_PREFIXES(MatchedFilterFieldTrialTest, - WrongPreEchoConfigurationTest); - - // Only for testing. Gets the pre echo detection configuration. - const PreEchoConfiguration& GetPreEchoConfiguration() const { - return pre_echo_config_; - } void Dump(); ApmDataDumper* const data_dumper_; @@ -172,8 +158,8 @@ class MatchedFilter { std::vector> accumulated_error_; std::vector instantaneous_accumulated_error_; std::vector scratch_memory_; - absl::optional reported_lag_estimate_; - absl::optional winner_lag_; + std::optional reported_lag_estimate_; + std::optional winner_lag_; int last_detected_best_lag_filter_ = -1; std::vector filters_offsets_; int number_pre_echo_updates_ = 0; @@ -182,7 +168,6 @@ class MatchedFilter { const float smoothing_slow_; const float matching_filter_threshold_; const bool detect_pre_echo_; - const PreEchoConfiguration pre_echo_config_; }; } // namespace webrtc diff --git a/modules/audio_processing/aec3/matched_filter_avx2.cc b/modules/audio_processing/aec3/matched_filter_avx2.cc index 8c2ffcbd1e..830ca96f61 100644 --- a/modules/audio_processing/aec3/matched_filter_avx2.cc +++ b/modules/audio_processing/aec3/matched_filter_avx2.cc @@ -10,6 +10,10 @@ #include +#include +#include + +#include "api/array_view.h" #include "modules/audio_processing/aec3/matched_filter.h" #include "rtc_base/checks.h" @@ -28,17 +32,16 @@ inline __m128 hsum_ab(__m256 a, __m256 b) { return s; } -void MatchedFilterCore_AccumulatedError_AVX2( - size_t x_start_index, - float x2_sum_threshold, - float smoothing, - rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView h, - bool* filters_updated, - float* error_sum, - rtc::ArrayView accumulated_error, - rtc::ArrayView scratch_memory) { +void MatchedFilterCore_AccumulatedError_AVX2(size_t x_start_index, + float x2_sum_threshold, + float smoothing, + ArrayView x, + ArrayView y, + ArrayView h, + bool* filters_updated, + float* error_sum, + ArrayView accumulated_error, + ArrayView scratch_memory) { const int h_size = static_cast(h.size()); const int x_size = static_cast(x.size()); RTC_DCHECK_EQ(0, h_size % 16); @@ -90,9 +93,9 @@ void MatchedFilterCore_AccumulatedError_AVX2( s_acum += s_inst_hadd_256[5]; e_128[3] = s_acum - y[i]; - __m128 accumulated_error = _mm_load_ps(a_p); - accumulated_error = _mm_fmadd_ps(e_128, e_128, accumulated_error); - _mm_storeu_ps(a_p, accumulated_error); + __m128 acum_error = _mm_load_ps(a_p); + acum_error = _mm_fmadd_ps(e_128, e_128, acum_error); + _mm_storeu_ps(a_p, acum_error); } // Sum components together. x2_sum_256 = _mm256_add_ps(x2_sum_256, x2_sum_256_8); @@ -114,20 +117,20 @@ void MatchedFilterCore_AccumulatedError_AVX2( const __m256 alpha_256 = _mm256_set1_ps(alpha); // filter = filter + smoothing * (y - filter * x) * x / x * x. - float* h_p = &h[0]; - const float* x_p = + float* h_p2 = &h[0]; + const float* x_p2 = chunk1 != h_size ? scratch_memory.data() : &x[x_start_index]; // Perform 256 bit vector operations. const int limit_by_8 = h_size >> 3; - for (int k = limit_by_8; k > 0; --k, h_p += 8, x_p += 8) { + for (int k = limit_by_8; k > 0; --k, h_p2 += 8, x_p2 += 8) { // Load the data into 256 bit vectors. - __m256 h_k = _mm256_loadu_ps(h_p); - __m256 x_k = _mm256_loadu_ps(x_p); + __m256 h_k = _mm256_loadu_ps(h_p2); + __m256 x_k = _mm256_loadu_ps(x_p2); // Compute h = h + alpha * x. h_k = _mm256_fmadd_ps(x_k, alpha_256, h_k); // Store the result. - _mm256_storeu_ps(h_p, h_k); + _mm256_storeu_ps(h_p2, h_k); } *filters_updated = true; } @@ -139,14 +142,14 @@ void MatchedFilterCore_AccumulatedError_AVX2( void MatchedFilterCore_AVX2(size_t x_start_index, float x2_sum_threshold, float smoothing, - rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView h, + ArrayView x, + ArrayView y, + ArrayView h, bool* filters_updated, float* error_sum, bool compute_accumulated_error, - rtc::ArrayView accumulated_error, - rtc::ArrayView scratch_memory) { + ArrayView accumulated_error, + ArrayView scratch_memory) { if (compute_accumulated_error) { return MatchedFilterCore_AccumulatedError_AVX2( x_start_index, x2_sum_threshold, smoothing, x, y, h, filters_updated, @@ -224,27 +227,27 @@ void MatchedFilterCore_AVX2(size_t x_start_index, const __m256 alpha_256 = _mm256_set1_ps(alpha); // filter = filter + smoothing * (y - filter * x) * x / x * x. - float* h_p = &h[0]; + float* h_p2 = &h[0]; x_p = &x[x_start_index]; // Perform the loop in two chunks. for (int limit : {chunk1, chunk2}) { // Perform 256 bit vector operations. const int limit_by_8 = limit >> 3; - for (int k = limit_by_8; k > 0; --k, h_p += 8, x_p += 8) { + for (int k = limit_by_8; k > 0; --k, h_p2 += 8, x_p += 8) { // Load the data into 256 bit vectors. - __m256 h_k = _mm256_loadu_ps(h_p); + __m256 h_k = _mm256_loadu_ps(h_p2); __m256 x_k = _mm256_loadu_ps(x_p); // Compute h = h + alpha * x. h_k = _mm256_fmadd_ps(x_k, alpha_256, h_k); // Store the result. - _mm256_storeu_ps(h_p, h_k); + _mm256_storeu_ps(h_p2, h_k); } // Perform non-vector operations for any remaining items. - for (int k = limit - limit_by_8 * 8; k > 0; --k, ++h_p, ++x_p) { - *h_p += alpha * *x_p; + for (int k = limit - limit_by_8 * 8; k > 0; --k, ++h_p2, ++x_p) { + *h_p2 += alpha * *x_p; } x_p = &x[0]; diff --git a/modules/audio_processing/aec3/matched_filter_lag_aggregator.cc b/modules/audio_processing/aec3/matched_filter_lag_aggregator.cc index c8ac417a8b..fd6dc5bd3a 100644 --- a/modules/audio_processing/aec3/matched_filter_lag_aggregator.cc +++ b/modules/audio_processing/aec3/matched_filter_lag_aggregator.cc @@ -15,7 +15,6 @@ #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_minmax.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { @@ -63,8 +62,8 @@ void MatchedFilterLagAggregator::Reset(bool hard_reset) { } } -absl::optional MatchedFilterLagAggregator::Aggregate( - const absl::optional& lag_estimate) { +std::optional MatchedFilterLagAggregator::Aggregate( + const std::optional& lag_estimate) { if (lag_estimate && pre_echo_lag_aggregator_) { pre_echo_lag_aggregator_->Dump(data_dumper_); pre_echo_lag_aggregator_->Aggregate( @@ -74,7 +73,7 @@ absl::optional MatchedFilterLagAggregator::Aggregate( if (lag_estimate) { highest_peak_aggregator_.Aggregate( std::max(0, static_cast(lag_estimate->lag) - headroom_)); - rtc::ArrayView histogram = highest_peak_aggregator_.histogram(); + ArrayView histogram = highest_peak_aggregator_.histogram(); int candidate = highest_peak_aggregator_.candidate(); significant_candidate_found_ = significant_candidate_found_ || histogram[candidate] > thresholds_.converged; @@ -91,7 +90,7 @@ absl::optional MatchedFilterLagAggregator::Aggregate( } } - return absl::nullopt; + return std::nullopt; } MatchedFilterLagAggregator::HighestPeakAggregator::HighestPeakAggregator( @@ -124,8 +123,6 @@ MatchedFilterLagAggregator::PreEchoLagAggregator::PreEchoLagAggregator( size_t max_filter_lag, size_t down_sampling_factor) : block_size_log2_(GetDownSamplingBlockSizeLog2(down_sampling_factor)), - penalize_high_delays_initial_phase_(!field_trial::IsDisabled( - "WebRTC-Aec3PenalyzeHighDelaysInitialPhase")), histogram_( ((max_filter_lag + 1) * down_sampling_factor) >> kBlockSizeLog2, 0) { @@ -145,7 +142,7 @@ void MatchedFilterLagAggregator::PreEchoLagAggregator::Aggregate( RTC_DCHECK(pre_echo_block_size >= 0 && pre_echo_block_size < static_cast(histogram_.size())); pre_echo_block_size = - rtc::SafeClamp(pre_echo_block_size, 0, histogram_.size() - 1); + SafeClamp(pre_echo_block_size, 0, histogram_.size() - 1); // Remove the oldest point from the `histogram_`, it ignores the initial // points where no updates have been done to the `histogram_data_` array. if (histogram_data_[histogram_data_index_] != @@ -156,8 +153,7 @@ void MatchedFilterLagAggregator::PreEchoLagAggregator::Aggregate( ++histogram_[histogram_data_[histogram_data_index_]]; histogram_data_index_ = (histogram_data_index_ + 1) % histogram_data_.size(); int pre_echo_candidate_block_size = 0; - if (penalize_high_delays_initial_phase_ && - number_updates_ < kNumBlocksPerSecond * 2) { + if (number_updates_ < kNumBlocksPerSecond * 2) { number_updates_++; float penalization_per_delay = 1.0f; float max_histogram_value = -1.0f; diff --git a/modules/audio_processing/aec3/matched_filter_lag_aggregator.h b/modules/audio_processing/aec3/matched_filter_lag_aggregator.h index 1287b38da0..d1b4161d71 100644 --- a/modules/audio_processing/aec3/matched_filter_lag_aggregator.h +++ b/modules/audio_processing/aec3/matched_filter_lag_aggregator.h @@ -11,9 +11,9 @@ #ifndef MODULES_AUDIO_PROCESSING_AEC3_MATCHED_FILTER_LAG_AGGREGATOR_H_ #define MODULES_AUDIO_PROCESSING_AEC3_MATCHED_FILTER_LAG_AGGREGATOR_H_ +#include #include -#include "absl/types/optional.h" #include "api/audio/echo_canceller3_config.h" #include "modules/audio_processing/aec3/delay_estimate.h" #include "modules/audio_processing/aec3/matched_filter.h" @@ -41,8 +41,8 @@ class MatchedFilterLagAggregator { void Reset(bool hard_reset); // Aggregates the provided lag estimates. - absl::optional Aggregate( - const absl::optional& lag_estimate); + std::optional Aggregate( + const std::optional& lag_estimate); // Returns whether a reliable delay estimate has been found. bool ReliableDelayFound() const { return significant_candidate_found_; } @@ -64,7 +64,6 @@ class MatchedFilterLagAggregator { private: const int block_size_log2_; - const bool penalize_high_delays_initial_phase_; std::array histogram_data_; std::vector histogram_; int histogram_data_index_ = 0; @@ -78,7 +77,7 @@ class MatchedFilterLagAggregator { void Reset(); void Aggregate(int lag); int candidate() const { return candidate_; } - rtc::ArrayView histogram() const { return histogram_; } + ArrayView histogram() const { return histogram_; } private: std::vector histogram_; diff --git a/modules/audio_processing/aec3/matched_filter_lag_aggregator_unittest.cc b/modules/audio_processing/aec3/matched_filter_lag_aggregator_unittest.cc index 6804102584..25c62fe753 100644 --- a/modules/audio_processing/aec3/matched_filter_lag_aggregator_unittest.cc +++ b/modules/audio_processing/aec3/matched_filter_lag_aggregator_unittest.cc @@ -36,7 +36,7 @@ TEST(MatchedFilterLagAggregator, MatchedFilterLagAggregator aggregator(&data_dumper, /*max_filter_lag=*/100, config.delay); - absl::optional aggregated_lag; + std::optional aggregated_lag; for (size_t k = 0; k < kNumLagsBeforeDetection; ++k) { aggregated_lag = aggregator.Aggregate( MatchedFilter::LagEstimate(/*lag=*/10, /*pre_echo_lag=*/10)); @@ -66,7 +66,7 @@ TEST(MatchedFilterLagAggregator, MatchedFilterLagAggregator aggregator(&data_dumper, /*max_filter_lag=*/kLag, config.delay); for (size_t k = 0; k < kNumLagsBeforeDetection * 10; ++k) { - absl::optional aggregated_lag = aggregator.Aggregate( + std::optional aggregated_lag = aggregator.Aggregate( MatchedFilter::LagEstimate(/*lag=*/kLag, /*pre_echo_lag=*/kLag)); EXPECT_FALSE(aggregated_lag); EXPECT_EQ(kLag, aggregated_lag->delay); @@ -84,7 +84,7 @@ TEST(MatchedFilterLagAggregator, DISABLED_PersistentAggregatedLag) { std::vector lag_estimates(1); MatchedFilterLagAggregator aggregator(&data_dumper, std::max(kLag1, kLag2), config.delay); - absl::optional aggregated_lag; + std::optional aggregated_lag; for (size_t k = 0; k < kNumLagsBeforeDetection; ++k) { aggregated_lag = aggregator.Aggregate( MatchedFilter::LagEstimate(/*lag=*/kLag1, /*pre_echo_lag=*/kLag1)); diff --git a/modules/audio_processing/aec3/matched_filter_unittest.cc b/modules/audio_processing/aec3/matched_filter_unittest.cc index 3f26cc146e..332f877aad 100644 --- a/modules/audio_processing/aec3/matched_filter_unittest.cc +++ b/modules/audio_processing/aec3/matched_filter_unittest.cc @@ -10,7 +10,18 @@ #include "modules/audio_processing/aec3/matched_filter.h" +#include +#include +#include +#include +#include +#include + // Defines WEBRTC_ARCH_X86_FAMILY, used below. +#include "api/array_view.h" +#include "api/audio/echo_canceller3_config.h" +#include "modules/audio_processing/aec3/block.h" +#include "rtc_base/checks.h" #include "rtc_base/system/arch.h" #if defined(WEBRTC_ARCH_X86_FAMILY) @@ -27,7 +38,6 @@ #include "rtc_base/random.h" #include "rtc_base/strings/string_builder.h" #include "system_wrappers/include/cpu_features_wrapper.h" -#include "test/field_trial.h" #include "test/gtest.h" namespace webrtc { @@ -35,7 +45,7 @@ namespace aec3 { namespace { std::string ProduceDebugText(size_t delay, size_t down_sampling_factor) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Delay: " << delay; ss << ", Down sampling factor: " << down_sampling_factor; return ss.Release(); @@ -295,8 +305,8 @@ TEST_P(MatchedFilterTest, LagEstimation) { render_delay_buffer->PrepareCaptureProcessing(); std::array downsampled_capture_data; - rtc::ArrayView downsampled_capture( - downsampled_capture_data.data(), sub_block_size); + ArrayView downsampled_capture(downsampled_capture_data.data(), + sub_block_size); capture_decimator.Decimate(capture[0], downsampled_capture); filter.Update(render_delay_buffer->GetDownsampledRenderBuffer(), downsampled_capture, /*use_slow_smoothing=*/false); @@ -360,9 +370,9 @@ TEST_P(MatchedFilterTest, PreEchoEstimation) { signal_echo_delay_buffer.Delay(render.View(0, 0), capture[0]); signal_pre_echo_delay_buffer.Delay(render.View(0, 0), capture_with_pre_echo); - for (size_t k = 0; k < capture[0].size(); ++k) { + for (size_t l = 0; l < capture[0].size(); ++l) { constexpr float gain_pre_echo = 0.8f; - capture[0][k] += gain_pre_echo * capture_with_pre_echo[k]; + capture[0][l] += gain_pre_echo * capture_with_pre_echo[l]; } render_delay_buffer->Insert(render); if (k == 0) { @@ -370,8 +380,8 @@ TEST_P(MatchedFilterTest, PreEchoEstimation) { } render_delay_buffer->PrepareCaptureProcessing(); std::array downsampled_capture_data; - rtc::ArrayView downsampled_capture(downsampled_capture_data.data(), - sub_block_size); + ArrayView downsampled_capture(downsampled_capture_data.data(), + sub_block_size); capture_decimator.Decimate(capture[0], downsampled_capture); filter.Update(render_delay_buffer->GetDownsampledRenderBuffer(), downsampled_capture, /*use_slow_smoothing=*/false); @@ -411,7 +421,7 @@ TEST_P(MatchedFilterTest, LagNotReliableForUncorrelatedRenderAndCapture) { Block render(kNumBands, kNumChannels); std::array capture_data; - rtc::ArrayView capture(capture_data.data(), sub_block_size); + ArrayView capture(capture_data.data(), sub_block_size); std::fill(capture.begin(), capture.end(), 0.f); ApmDataDumper data_dumper(0); std::unique_ptr render_delay_buffer( @@ -476,8 +486,8 @@ TEST_P(MatchedFilterTest, LagNotUpdatedForLowLevelRender) { } std::copy(render.begin(0, 0), render.end(0, 0), capture[0].begin()); std::array downsampled_capture_data; - rtc::ArrayView downsampled_capture(downsampled_capture_data.data(), - sub_block_size); + ArrayView downsampled_capture(downsampled_capture_data.data(), + sub_block_size); capture_decimator.Decimate(capture[0], downsampled_capture); filter.Update(render_delay_buffer->GetDownsampledRenderBuffer(), downsampled_capture, false); @@ -556,57 +566,4 @@ INSTANTIATE_TEST_SUITE_P(_, #endif } // namespace aec3 - -TEST(MatchedFilterFieldTrialTest, PreEchoConfigurationTest) { - float threshold_in = 0.1f; - int mode_in = 2; - rtc::StringBuilder field_trial_name; - field_trial_name << "WebRTC-Aec3PreEchoConfiguration/threshold:" - << threshold_in << ",mode:" << mode_in << "/"; - webrtc::test::ScopedFieldTrials field_trials(field_trial_name.str()); - ApmDataDumper data_dumper(0); - EchoCanceller3Config config; - MatchedFilter matched_filter( - &data_dumper, DetectOptimization(), - kBlockSize / config.delay.down_sampling_factor, - aec3::kWindowSizeSubBlocks, aec3::kNumMatchedFilters, - aec3::kAlignmentShiftSubBlocks, - config.render_levels.poor_excitation_render_limit, - config.delay.delay_estimate_smoothing, - config.delay.delay_estimate_smoothing_delay_found, - config.delay.delay_candidate_detection_threshold, - config.delay.detect_pre_echo); - - auto& pre_echo_config = matched_filter.GetPreEchoConfiguration(); - EXPECT_EQ(pre_echo_config.threshold, threshold_in); - EXPECT_EQ(pre_echo_config.mode, mode_in); -} - -TEST(MatchedFilterFieldTrialTest, WrongPreEchoConfigurationTest) { - constexpr float kDefaultThreshold = 0.5f; - constexpr int kDefaultMode = 3; - float threshold_in = -0.1f; - int mode_in = 5; - rtc::StringBuilder field_trial_name; - field_trial_name << "WebRTC-Aec3PreEchoConfiguration/threshold:" - << threshold_in << ",mode:" << mode_in << "/"; - webrtc::test::ScopedFieldTrials field_trials(field_trial_name.str()); - ApmDataDumper data_dumper(0); - EchoCanceller3Config config; - MatchedFilter matched_filter( - &data_dumper, DetectOptimization(), - kBlockSize / config.delay.down_sampling_factor, - aec3::kWindowSizeSubBlocks, aec3::kNumMatchedFilters, - aec3::kAlignmentShiftSubBlocks, - config.render_levels.poor_excitation_render_limit, - config.delay.delay_estimate_smoothing, - config.delay.delay_estimate_smoothing_delay_found, - config.delay.delay_candidate_detection_threshold, - config.delay.detect_pre_echo); - - auto& pre_echo_config = matched_filter.GetPreEchoConfiguration(); - EXPECT_EQ(pre_echo_config.threshold, kDefaultThreshold); - EXPECT_EQ(pre_echo_config.mode, kDefaultMode); -} - } // namespace webrtc diff --git a/modules/audio_processing/aec3/mock/mock_echo_remover.h b/modules/audio_processing/aec3/mock/mock_echo_remover.h index 31f075ef0a..0be4a650d4 100644 --- a/modules/audio_processing/aec3/mock/mock_echo_remover.h +++ b/modules/audio_processing/aec3/mock/mock_echo_remover.h @@ -11,9 +11,9 @@ #ifndef MODULES_AUDIO_PROCESSING_AEC3_MOCK_MOCK_ECHO_REMOVER_H_ #define MODULES_AUDIO_PROCESSING_AEC3_MOCK_MOCK_ECHO_REMOVER_H_ +#include #include -#include "absl/types/optional.h" #include "modules/audio_processing/aec3/echo_path_variability.h" #include "modules/audio_processing/aec3/echo_remover.h" #include "modules/audio_processing/aec3/render_buffer.h" @@ -31,7 +31,7 @@ class MockEchoRemover : public EchoRemover { ProcessCapture, (EchoPathVariability echo_path_variability, bool capture_signal_saturation, - const absl::optional& delay_estimate, + const std::optional& delay_estimate, RenderBuffer* render_buffer, Block* linear_output, Block* capture), diff --git a/modules/audio_processing/aec3/mock/mock_render_delay_controller.h b/modules/audio_processing/aec3/mock/mock_render_delay_controller.h index 14d499dd28..3395044cf9 100644 --- a/modules/audio_processing/aec3/mock/mock_render_delay_controller.h +++ b/modules/audio_processing/aec3/mock/mock_render_delay_controller.h @@ -11,7 +11,8 @@ #ifndef MODULES_AUDIO_PROCESSING_AEC3_MOCK_MOCK_RENDER_DELAY_CONTROLLER_H_ #define MODULES_AUDIO_PROCESSING_AEC3_MOCK_MOCK_RENDER_DELAY_CONTROLLER_H_ -#include "absl/types/optional.h" +#include + #include "api/array_view.h" #include "modules/audio_processing/aec3/downsampled_render_buffer.h" #include "modules/audio_processing/aec3/render_delay_controller.h" @@ -27,7 +28,7 @@ class MockRenderDelayController : public RenderDelayController { MOCK_METHOD(void, Reset, (bool reset_delay_statistics), (override)); MOCK_METHOD(void, LogRenderCall, (), (override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, GetDelay, (const DownsampledRenderBuffer& render_buffer, size_t render_delay_buffer_delay, diff --git a/modules/audio_processing/aec3/moving_average.cc b/modules/audio_processing/aec3/moving_average.cc index 7a81ee89ea..f89789ec77 100644 --- a/modules/audio_processing/aec3/moving_average.cc +++ b/modules/audio_processing/aec3/moving_average.cc @@ -31,8 +31,8 @@ MovingAverage::MovingAverage(size_t num_elem, size_t mem_len) MovingAverage::~MovingAverage() = default; -void MovingAverage::Average(rtc::ArrayView input, - rtc::ArrayView output) { +void MovingAverage::Average(ArrayView input, + ArrayView output) { RTC_DCHECK(input.size() == num_elem_); RTC_DCHECK(output.size() == num_elem_); diff --git a/modules/audio_processing/aec3/moving_average.h b/modules/audio_processing/aec3/moving_average.h index 913d78519c..68f2fd0fcc 100644 --- a/modules/audio_processing/aec3/moving_average.h +++ b/modules/audio_processing/aec3/moving_average.h @@ -29,7 +29,7 @@ class MovingAverage { // Computes the average of input and mem_len-1 previous inputs and stores the // result in output. - void Average(rtc::ArrayView input, rtc::ArrayView output); + void Average(ArrayView input, ArrayView output); private: const size_t num_elem_; diff --git a/modules/audio_processing/aec3/multi_channel_content_detector.cc b/modules/audio_processing/aec3/multi_channel_content_detector.cc index 98068964d9..2b07585c1b 100644 --- a/modules/audio_processing/aec3/multi_channel_content_detector.cc +++ b/modules/audio_processing/aec3/multi_channel_content_detector.cc @@ -94,9 +94,9 @@ MultiChannelContentDetector::MultiChannelContentDetector( detection_threshold_(detection_threshold), detection_timeout_threshold_frames_( stereo_detection_timeout_threshold_seconds > 0 - ? absl::make_optional(stereo_detection_timeout_threshold_seconds * - kNumFramesPerSecond) - : absl::nullopt), + ? std::make_optional(stereo_detection_timeout_threshold_seconds * + kNumFramesPerSecond) + : std::nullopt), stereo_detection_hysteresis_frames_(static_cast( stereo_detection_hysteresis_seconds * kNumFramesPerSecond)), metrics_logger_((detect_stereo_content && num_render_input_channels > 1) diff --git a/modules/audio_processing/aec3/multi_channel_content_detector.h b/modules/audio_processing/aec3/multi_channel_content_detector.h index be8717f3af..57f49a4990 100644 --- a/modules/audio_processing/aec3/multi_channel_content_detector.h +++ b/modules/audio_processing/aec3/multi_channel_content_detector.h @@ -12,12 +12,12 @@ #define MODULES_AUDIO_PROCESSING_AEC3_MULTI_CHANNEL_CONTENT_DETECTOR_H_ #include +#include #include +#include #include -#include "absl/types/optional.h" - namespace webrtc { // Analyzes audio content to determine whether the contained audio is proper @@ -76,7 +76,7 @@ class MultiChannelContentDetector { const bool detect_stereo_content_; const float detection_threshold_; - const absl::optional detection_timeout_threshold_frames_; + const std::optional detection_timeout_threshold_frames_; const int stereo_detection_hysteresis_frames_; // Collects and reports metrics on the amount of multichannel content diff --git a/modules/audio_processing/aec3/nearend_detector.h b/modules/audio_processing/aec3/nearend_detector.h index 0d8a06b2cd..72e798a115 100644 --- a/modules/audio_processing/aec3/nearend_detector.h +++ b/modules/audio_processing/aec3/nearend_detector.h @@ -28,11 +28,10 @@ class NearendDetector { // Updates the state selection based on latest spectral estimates. virtual void Update( - rtc::ArrayView> - nearend_spectrum, - rtc::ArrayView> + ArrayView> nearend_spectrum, + ArrayView> residual_echo_spectrum, - rtc::ArrayView> + ArrayView> comfort_noise_spectrum, bool initial_state) = 0; }; diff --git a/modules/audio_processing/aec3/refined_filter_update_gain.cc b/modules/audio_processing/aec3/refined_filter_update_gain.cc index 8e391d6fa6..7d32bae49b 100644 --- a/modules/audio_processing/aec3/refined_filter_update_gain.cc +++ b/modules/audio_processing/aec3/refined_filter_update_gain.cc @@ -68,7 +68,7 @@ void RefinedFilterUpdateGain::Compute( const std::array& render_power, const RenderSignalAnalyzer& render_signal_analyzer, const SubtractorOutput& subtractor_output, - rtc::ArrayView erl, + ArrayView erl, size_t size_partitions, bool saturated_capture_signal, bool disallow_leakage_diverged, diff --git a/modules/audio_processing/aec3/refined_filter_update_gain.h b/modules/audio_processing/aec3/refined_filter_update_gain.h index 1a68ebc296..660a0137b5 100644 --- a/modules/audio_processing/aec3/refined_filter_update_gain.h +++ b/modules/audio_processing/aec3/refined_filter_update_gain.h @@ -49,7 +49,7 @@ class RefinedFilterUpdateGain { void Compute(const std::array& render_power, const RenderSignalAnalyzer& render_signal_analyzer, const SubtractorOutput& subtractor_output, - rtc::ArrayView erl, + ArrayView erl, size_t size_partitions, bool saturated_capture_signal, bool disallow_leakage_diverged, diff --git a/modules/audio_processing/aec3/refined_filter_update_gain_unittest.cc b/modules/audio_processing/aec3/refined_filter_update_gain_unittest.cc index c77c5b53d5..fd53a8477d 100644 --- a/modules/audio_processing/aec3/refined_filter_update_gain_unittest.cc +++ b/modules/audio_processing/aec3/refined_filter_update_gain_unittest.cc @@ -11,19 +11,31 @@ #include "modules/audio_processing/aec3/refined_filter_update_gain.h" #include +#include +#include +#include #include +#include #include #include +#include "api/audio/echo_canceller3_config.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/aec3/adaptive_fir_filter.h" #include "modules/audio_processing/aec3/adaptive_fir_filter_erl.h" +#include "modules/audio_processing/aec3/aec3_common.h" +#include "modules/audio_processing/aec3/aec3_fft.h" #include "modules/audio_processing/aec3/aec_state.h" +#include "modules/audio_processing/aec3/block.h" #include "modules/audio_processing/aec3/coarse_filter_update_gain.h" +#include "modules/audio_processing/aec3/delay_estimate.h" #include "modules/audio_processing/aec3/render_delay_buffer.h" #include "modules/audio_processing/aec3/render_signal_analyzer.h" #include "modules/audio_processing/aec3/subtractor_output.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "modules/audio_processing/test/echo_canceller_test_tools.h" +#include "rtc_base/checks.h" #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/random.h" #include "rtc_base/strings/string_builder.h" @@ -34,7 +46,8 @@ namespace { // Method for performing the simulations needed to test the refined filter // update gain functionality. -void RunFilterUpdateTest(int num_blocks_to_process, +void RunFilterUpdateTest(const Environment& env, + int num_blocks_to_process, size_t delay_samples, int filter_length_blocks, const std::vector& blocks_with_echo_path_changes, @@ -89,9 +102,9 @@ void RunFilterUpdateTest(int num_blocks_to_process, config.delay.default_delay = 1; std::unique_ptr render_delay_buffer( RenderDelayBuffer::Create(config, kSampleRateHz, kNumRenderChannels)); - AecState aec_state(config, kNumCaptureChannels); + AecState aec_state(env, config, kNumCaptureChannels); RenderSignalAnalyzer render_signal_analyzer(config); - absl::optional delay_estimate; + std::optional delay_estimate; std::array s_scratch; std::array s; FftData S; @@ -159,10 +172,10 @@ void RunFilterUpdateTest(int num_blocks_to_process, e_refined.begin(), [&](float a, float b) { return a - b * kScale; }); std::for_each(e_refined.begin(), e_refined.end(), - [](float& a) { a = rtc::SafeClamp(a, -32768.f, 32767.f); }); + [](float& a) { a = SafeClamp(a, -32768.f, 32767.f); }); fft.ZeroPaddedFft(e_refined, Aec3Fft::Window::kRectangular, &E_refined); - for (size_t k = 0; k < kBlockSize; ++k) { - s[k] = kScale * s_scratch[k + kFftLengthBy2]; + for (size_t l = 0; l < kBlockSize; ++l) { + s[l] = kScale * s_scratch[l + kFftLengthBy2]; } // Apply the coarse filter. @@ -172,7 +185,7 @@ void RunFilterUpdateTest(int num_blocks_to_process, e_coarse.begin(), [&](float a, float b) { return a - b * kScale; }); std::for_each(e_coarse.begin(), e_coarse.end(), - [](float& a) { a = rtc::SafeClamp(a, -32768.f, 32767.f); }); + [](float& a) { a = SafeClamp(a, -32768.f, 32767.f); }); fft.ZeroPaddedFft(e_coarse, Aec3Fft::Window::kRectangular, &E_coarse); // Compute spectra for future use. @@ -216,13 +229,13 @@ void RunFilterUpdateTest(int num_blocks_to_process, } std::string ProduceDebugText(int filter_length_blocks) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Length: " << filter_length_blocks; return ss.Release(); } std::string ProduceDebugText(size_t delay, int filter_length_blocks) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Delay: " << delay << ", "; ss << ProduceDebugText(filter_length_blocks); return ss.Release(); @@ -254,6 +267,7 @@ TEST(RefinedFilterUpdateGainDeathTest, NullDataOutputGain) { // Verifies that the gain formed causes the filter using it to converge. TEST(RefinedFilterUpdateGain, GainCausesFilterToConverge) { + const Environment env = CreateEnvironment(); std::vector blocks_with_echo_path_changes; std::vector blocks_with_saturation; for (size_t filter_length_blocks : {12, 20, 30}) { @@ -264,7 +278,7 @@ TEST(RefinedFilterUpdateGain, GainCausesFilterToConverge) { std::array y; FftData G; - RunFilterUpdateTest(600, delay_samples, filter_length_blocks, + RunFilterUpdateTest(env, 600, delay_samples, filter_length_blocks, blocks_with_echo_path_changes, blocks_with_saturation, false, &e, &y, &G); @@ -284,6 +298,7 @@ TEST(RefinedFilterUpdateGain, GainCausesFilterToConverge) { // Verifies that the magnitude of the gain on average decreases for a // persistently exciting signal. TEST(RefinedFilterUpdateGain, DecreasingGain) { + const Environment env = CreateEnvironment(); std::vector blocks_with_echo_path_changes; std::vector blocks_with_saturation; @@ -296,11 +311,11 @@ TEST(RefinedFilterUpdateGain, DecreasingGain) { std::array G_b_power; std::array G_c_power; - RunFilterUpdateTest(250, 65, 12, blocks_with_echo_path_changes, + RunFilterUpdateTest(env, 250, 65, 12, blocks_with_echo_path_changes, blocks_with_saturation, false, &e, &y, &G_a); - RunFilterUpdateTest(500, 65, 12, blocks_with_echo_path_changes, + RunFilterUpdateTest(env, 500, 65, 12, blocks_with_echo_path_changes, blocks_with_saturation, false, &e, &y, &G_b); - RunFilterUpdateTest(750, 65, 12, blocks_with_echo_path_changes, + RunFilterUpdateTest(env, 750, 65, 12, blocks_with_echo_path_changes, blocks_with_saturation, false, &e, &y, &G_c); G_a.Spectrum(Aec3Optimization::kNone, G_a_power); @@ -317,6 +332,7 @@ TEST(RefinedFilterUpdateGain, DecreasingGain) { // Verifies that the gain is zero when there is saturation and that the internal // error estimates cause the gain to increase after a period of saturation. TEST(RefinedFilterUpdateGain, SaturationBehavior) { + const Environment env = CreateEnvironment(); std::vector blocks_with_echo_path_changes; std::vector blocks_with_saturation; for (int k = 99; k < 200; ++k) { @@ -336,17 +352,17 @@ TEST(RefinedFilterUpdateGain, SaturationBehavior) { std::array G_a_power; std::array G_b_power; - RunFilterUpdateTest(100, 65, filter_length_blocks, + RunFilterUpdateTest(env, 100, 65, filter_length_blocks, blocks_with_echo_path_changes, blocks_with_saturation, false, &e, &y, &G_a); EXPECT_EQ(G_a_ref.re, G_a.re); EXPECT_EQ(G_a_ref.im, G_a.im); - RunFilterUpdateTest(99, 65, filter_length_blocks, + RunFilterUpdateTest(env, 99, 65, filter_length_blocks, blocks_with_echo_path_changes, blocks_with_saturation, false, &e, &y, &G_a); - RunFilterUpdateTest(201, 65, filter_length_blocks, + RunFilterUpdateTest(env, 201, 65, filter_length_blocks, blocks_with_echo_path_changes, blocks_with_saturation, false, &e, &y, &G_b); @@ -361,6 +377,7 @@ TEST(RefinedFilterUpdateGain, SaturationBehavior) { // Verifies that the gain increases after an echo path change. // TODO(peah): Correct and reactivate this test. TEST(RefinedFilterUpdateGain, DISABLED_EchoPathChangeBehavior) { + const Environment env = CreateEnvironment(); for (size_t filter_length_blocks : {12, 20, 30}) { SCOPED_TRACE(ProduceDebugText(filter_length_blocks)); std::vector blocks_with_echo_path_changes; @@ -374,10 +391,10 @@ TEST(RefinedFilterUpdateGain, DISABLED_EchoPathChangeBehavior) { std::array G_a_power; std::array G_b_power; - RunFilterUpdateTest(100, 65, filter_length_blocks, + RunFilterUpdateTest(env, 100, 65, filter_length_blocks, blocks_with_echo_path_changes, blocks_with_saturation, false, &e, &y, &G_a); - RunFilterUpdateTest(101, 65, filter_length_blocks, + RunFilterUpdateTest(env, 101, 65, filter_length_blocks, blocks_with_echo_path_changes, blocks_with_saturation, false, &e, &y, &G_b); diff --git a/modules/audio_processing/aec3/render_buffer.h b/modules/audio_processing/aec3/render_buffer.h index 8adc996087..e70d48211f 100644 --- a/modules/audio_processing/aec3/render_buffer.h +++ b/modules/audio_processing/aec3/render_buffer.h @@ -47,7 +47,7 @@ class RenderBuffer { } // Get the spectrum from one of the FFTs in the buffer. - rtc::ArrayView> Spectrum( + ArrayView> Spectrum( int buffer_offset_ffts) const { int position = spectrum_buffer_->OffsetIndex(spectrum_buffer_->read, buffer_offset_ffts); @@ -55,7 +55,7 @@ class RenderBuffer { } // Returns the circular fft buffer. - rtc::ArrayView> GetFftBuffer() const { + ArrayView> GetFftBuffer() const { return fft_buffer_->buffer; } diff --git a/modules/audio_processing/aec3/render_delay_buffer.cc b/modules/audio_processing/aec3/render_delay_buffer.cc index ca77a582fa..37a7801c3e 100644 --- a/modules/audio_processing/aec3/render_delay_buffer.cc +++ b/modules/audio_processing/aec3/render_delay_buffer.cc @@ -17,9 +17,9 @@ #include #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" #include "modules/audio_processing/aec3/aec3_common.h" @@ -35,7 +35,6 @@ #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { @@ -74,13 +73,13 @@ class RenderDelayBufferImpl final : public RenderDelayBuffer { const Aec3Optimization optimization_; const EchoCanceller3Config config_; const float render_linear_amplitude_gain_; - const rtc::LoggingSeverity delay_log_level_; + const LoggingSeverity delay_log_level_; size_t down_sampling_factor_; const int sub_block_size_; BlockBuffer blocks_; SpectrumBuffer spectra_; FftBuffer ffts_; - absl::optional delay_; + std::optional delay_; RenderBuffer echo_remover_buffer_; DownsampledRenderBuffer low_rate_; AlignmentMixer render_mixer_; @@ -95,7 +94,7 @@ class RenderDelayBufferImpl final : public RenderDelayBuffer { int64_t render_call_counter_ = 0; bool render_activity_ = false; size_t render_activity_counter_ = 0; - absl::optional external_audio_buffer_delay_; + std::optional external_audio_buffer_delay_; bool external_audio_buffer_delay_verified_after_reset_ = false; size_t min_latency_blocks_ = 0; size_t excess_render_detection_counter_ = 0; @@ -104,7 +103,7 @@ class RenderDelayBufferImpl final : public RenderDelayBuffer { int ComputeDelay() const; void ApplyTotalDelay(int delay); void InsertBlock(const Block& block, int previous_write); - bool DetectActiveRender(rtc::ArrayView x) const; + bool DetectActiveRender(ArrayView x) const; bool DetectExcessRenderBlocks(); void IncrementWriteIndices(); void IncrementLowRateReadIndices(); @@ -123,9 +122,8 @@ RenderDelayBufferImpl::RenderDelayBufferImpl(const EchoCanceller3Config& config, config_(config), render_linear_amplitude_gain_( std::pow(10.0f, config_.render_levels.render_power_gain_db / 20.f)), - delay_log_level_(config_.delay.log_warning_on_delay_changes - ? rtc::LS_WARNING - : rtc::LS_VERBOSE), + delay_log_level_(config_.delay.log_warning_on_delay_changes ? LS_WARNING + : LS_VERBOSE), down_sampling_factor_(config.delay.down_sampling_factor), sub_block_size_(static_cast(down_sampling_factor_ > 0 ? kBlockSize / down_sampling_factor_ @@ -193,7 +191,7 @@ void RenderDelayBufferImpl::Reset() { ApplyTotalDelay(config_.delay.default_delay); // Unset the delays which are set by AlignFromDelay. - delay_ = absl::nullopt; + delay_ = std::nullopt; } } @@ -402,8 +400,7 @@ void RenderDelayBufferImpl::InsertBlock(const Block& block, if (render_linear_amplitude_gain_ != 1.f) { for (size_t band = 0; band < num_bands; ++band) { for (size_t ch = 0; ch < num_render_channels; ++ch) { - rtc::ArrayView b_view = - b.buffer[b.write].View(band, ch); + ArrayView b_view = b.buffer[b.write].View(band, ch); for (float& sample : b_view) { sample *= render_linear_amplitude_gain_; } @@ -426,8 +423,7 @@ void RenderDelayBufferImpl::InsertBlock(const Block& block, } } -bool RenderDelayBufferImpl::DetectActiveRender( - rtc::ArrayView x) const { +bool RenderDelayBufferImpl::DetectActiveRender(ArrayView x) const { const float x_energy = std::inner_product(x.begin(), x.end(), x.begin(), 0.f); return x_energy > (config_.render_levels.active_render_limit * config_.render_levels.active_render_limit) * diff --git a/modules/audio_processing/aec3/render_delay_buffer_unittest.cc b/modules/audio_processing/aec3/render_delay_buffer_unittest.cc index d51e06a1ac..e46a69abf0 100644 --- a/modules/audio_processing/aec3/render_delay_buffer_unittest.cc +++ b/modules/audio_processing/aec3/render_delay_buffer_unittest.cc @@ -25,7 +25,7 @@ namespace webrtc { namespace { std::string ProduceDebugText(int sample_rate_hz) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Sample rate: " << sample_rate_hz; return ss.Release(); } diff --git a/modules/audio_processing/aec3/render_delay_controller.cc b/modules/audio_processing/aec3/render_delay_controller.cc index 465e77fb7c..b02e516a04 100644 --- a/modules/audio_processing/aec3/render_delay_controller.cc +++ b/modules/audio_processing/aec3/render_delay_controller.cc @@ -14,8 +14,8 @@ #include #include #include +#include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" #include "modules/audio_processing/aec3/aec3_common.h" @@ -44,7 +44,7 @@ class RenderDelayControllerImpl final : public RenderDelayController { ~RenderDelayControllerImpl() override; void Reset(bool reset_delay_confidence) override; void LogRenderCall() override; - absl::optional GetDelay( + std::optional GetDelay( const DownsampledRenderBuffer& render_buffer, size_t render_delay_buffer_delay, const Block& capture) override; @@ -54,17 +54,17 @@ class RenderDelayControllerImpl final : public RenderDelayController { static std::atomic instance_count_; std::unique_ptr data_dumper_; const int hysteresis_limit_blocks_; - absl::optional delay_; + std::optional delay_; EchoPathDelayEstimator delay_estimator_; RenderDelayControllerMetrics metrics_; - absl::optional delay_samples_; + std::optional delay_samples_; size_t capture_call_counter_ = 0; int delay_change_counter_ = 0; DelayEstimate::Quality last_delay_estimate_quality_; }; DelayEstimate ComputeBufferDelay( - const absl::optional& current_delay, + const std::optional& current_delay, int hysteresis_limit_blocks, DelayEstimate estimated_delay) { // Compute the buffer delay increase required to achieve the desired latency. @@ -100,8 +100,8 @@ RenderDelayControllerImpl::RenderDelayControllerImpl( RenderDelayControllerImpl::~RenderDelayControllerImpl() = default; void RenderDelayControllerImpl::Reset(bool reset_delay_confidence) { - delay_ = absl::nullopt; - delay_samples_ = absl::nullopt; + delay_ = std::nullopt; + delay_samples_ = std::nullopt; delay_estimator_.Reset(reset_delay_confidence); delay_change_counter_ = 0; if (reset_delay_confidence) { @@ -111,9 +111,9 @@ void RenderDelayControllerImpl::Reset(bool reset_delay_confidence) { void RenderDelayControllerImpl::LogRenderCall() {} -absl::optional RenderDelayControllerImpl::GetDelay( +std::optional RenderDelayControllerImpl::GetDelay( const DownsampledRenderBuffer& render_buffer, - size_t render_delay_buffer_delay, + size_t /* render_delay_buffer_delay */, const Block& capture) { ++capture_call_counter_; @@ -155,11 +155,10 @@ absl::optional RenderDelayControllerImpl::GetDelay( last_delay_estimate_quality_ = delay_samples_->quality; } - metrics_.Update( - delay_samples_ ? absl::optional(delay_samples_->delay) - : absl::nullopt, - delay_ ? absl::optional(delay_->delay) : absl::nullopt, - delay_estimator_.Clockdrift()); + metrics_.Update(delay_samples_ ? std::optional(delay_samples_->delay) + : std::nullopt, + delay_ ? std::optional(delay_->delay) : std::nullopt, + delay_estimator_.Clockdrift()); data_dumper_->DumpRaw("aec3_render_delay_controller_delay", delay_samples ? delay_samples->delay : 0); diff --git a/modules/audio_processing/aec3/render_delay_controller.h b/modules/audio_processing/aec3/render_delay_controller.h index 4a18a11e36..b74c1619b4 100644 --- a/modules/audio_processing/aec3/render_delay_controller.h +++ b/modules/audio_processing/aec3/render_delay_controller.h @@ -11,7 +11,8 @@ #ifndef MODULES_AUDIO_PROCESSING_AEC3_RENDER_DELAY_CONTROLLER_H_ #define MODULES_AUDIO_PROCESSING_AEC3_RENDER_DELAY_CONTROLLER_H_ -#include "absl/types/optional.h" +#include + #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" #include "modules/audio_processing/aec3/block.h" @@ -38,7 +39,7 @@ class RenderDelayController { virtual void LogRenderCall() = 0; // Aligns the render buffer content with the capture signal. - virtual absl::optional GetDelay( + virtual std::optional GetDelay( const DownsampledRenderBuffer& render_buffer, size_t render_delay_buffer_delay, const Block& capture) = 0; diff --git a/modules/audio_processing/aec3/render_delay_controller_metrics.cc b/modules/audio_processing/aec3/render_delay_controller_metrics.cc index 1e0a0f443e..8d3f41533a 100644 --- a/modules/audio_processing/aec3/render_delay_controller_metrics.cc +++ b/modules/audio_processing/aec3/render_delay_controller_metrics.cc @@ -42,8 +42,8 @@ enum class DelayChangesCategory { RenderDelayControllerMetrics::RenderDelayControllerMetrics() = default; void RenderDelayControllerMetrics::Update( - absl::optional delay_samples, - absl::optional buffer_delay_blocks, + std::optional delay_samples, + std::optional buffer_delay_blocks, ClockdriftDetector::Level clockdrift) { ++call_counter_; diff --git a/modules/audio_processing/aec3/render_delay_controller_metrics.h b/modules/audio_processing/aec3/render_delay_controller_metrics.h index b81833b43f..0a6f30e83e 100644 --- a/modules/audio_processing/aec3/render_delay_controller_metrics.h +++ b/modules/audio_processing/aec3/render_delay_controller_metrics.h @@ -13,7 +13,8 @@ #include -#include "absl/types/optional.h" +#include + #include "modules/audio_processing/aec3/clockdrift_detector.h" namespace webrtc { @@ -28,8 +29,8 @@ class RenderDelayControllerMetrics { delete; // Updates the metric with new data. - void Update(absl::optional delay_samples, - absl::optional buffer_delay_blocks, + void Update(std::optional delay_samples, + std::optional buffer_delay_blocks, ClockdriftDetector::Level clockdrift); private: diff --git a/modules/audio_processing/aec3/render_delay_controller_metrics_unittest.cc b/modules/audio_processing/aec3/render_delay_controller_metrics_unittest.cc index cf9df6b297..22739e7107 100644 --- a/modules/audio_processing/aec3/render_delay_controller_metrics_unittest.cc +++ b/modules/audio_processing/aec3/render_delay_controller_metrics_unittest.cc @@ -10,7 +10,8 @@ #include "modules/audio_processing/aec3/render_delay_controller_metrics.h" -#include "absl/types/optional.h" +#include + #include "modules/audio_processing/aec3/aec3_common.h" #include "system_wrappers/include/metrics.h" #include "test/gtest.h" @@ -27,7 +28,7 @@ TEST(RenderDelayControllerMetrics, NormalUsage) { for (int j = 0; j < 3; ++j) { for (int k = 0; k < kMetricsReportingIntervalBlocks - 1; ++k) { - metrics.Update(absl::nullopt, absl::nullopt, + metrics.Update(std::nullopt, std::nullopt, ClockdriftDetector::Level::kNone); } EXPECT_METRIC_EQ( @@ -49,7 +50,7 @@ TEST(RenderDelayControllerMetrics, NormalUsage) { // We expect metric reports every kMetricsReportingIntervalBlocks blocks. ++expected_num_metric_reports; - metrics.Update(absl::nullopt, absl::nullopt, + metrics.Update(std::nullopt, std::nullopt, ClockdriftDetector::Level::kNone); EXPECT_METRIC_EQ( metrics::NumSamples("WebRTC.Audio.EchoCanceller.EchoPathDelay"), diff --git a/modules/audio_processing/aec3/render_delay_controller_unittest.cc b/modules/audio_processing/aec3/render_delay_controller_unittest.cc index e1a54fca9e..a3c308e39c 100644 --- a/modules/audio_processing/aec3/render_delay_controller_unittest.cc +++ b/modules/audio_processing/aec3/render_delay_controller_unittest.cc @@ -29,7 +29,7 @@ namespace webrtc { namespace { std::string ProduceDebugText(int sample_rate_hz) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Sample rate: " << sample_rate_hz; return ss.Release(); } @@ -38,7 +38,7 @@ std::string ProduceDebugText(int sample_rate_hz, size_t delay, size_t num_render_channels, size_t num_capture_channels) { - rtc::StringBuilder ss; + StringBuilder ss; ss << ProduceDebugText(sample_rate_hz) << ", Delay: " << delay << ", Num render channels: " << num_render_channels << ", Num capture channels: " << num_capture_channels; @@ -85,7 +85,7 @@ TEST(RenderDelayController, DISABLED_BasicApiCalls) { for (size_t num_capture_channels : {1, 2, 4}) { for (size_t num_render_channels : {1, 2, 8}) { Block capture_block(/*num_bands=*/1, num_capture_channels); - absl::optional delay_blocks; + std::optional delay_blocks; for (size_t num_matched_filters = 4; num_matched_filters <= 10; num_matched_filters++) { for (auto down_sampling_factor : kDownSamplingFactors) { @@ -140,7 +140,7 @@ TEST(RenderDelayController, DISABLED_Alignment) { Block render_block(NumBandsForRate(rate), num_render_channels); for (size_t delay_samples : {15, 50, 150, 200, 800, 4000}) { - absl::optional delay_blocks; + std::optional delay_blocks; SCOPED_TRACE(ProduceDebugText(rate, delay_samples, num_render_channels, num_capture_channels)); @@ -203,7 +203,7 @@ TEST(RenderDelayController, DISABLED_NonCausalAlignment) { Block capture_block(NumBandsForRate(rate), num_capture_channels); for (int delay_samples : {-15, -50, -150, -200}) { - absl::optional delay_blocks; + std::optional delay_blocks; SCOPED_TRACE(ProduceDebugText(rate, -delay_samples, num_render_channels, num_capture_channels)); @@ -259,7 +259,7 @@ TEST(RenderDelayController, DISABLED_AlignmentWithJitter) { for (auto rate : {16000, 32000, 48000}) { Block render_block(NumBandsForRate(rate), num_render_channels); for (size_t delay_samples : {15, 50, 300, 800}) { - absl::optional delay_blocks; + std::optional delay_blocks; SCOPED_TRACE(ProduceDebugText(rate, delay_samples, num_render_channels, num_capture_channels)); diff --git a/modules/audio_processing/aec3/render_signal_analyzer.cc b/modules/audio_processing/aec3/render_signal_analyzer.cc index bfbeb0ec2e..7970caf622 100644 --- a/modules/audio_processing/aec3/render_signal_analyzer.cc +++ b/modules/audio_processing/aec3/render_signal_analyzer.cc @@ -27,7 +27,7 @@ constexpr size_t kCounterThreshold = 5; // Identifies local bands with narrow characteristics. void IdentifySmallNarrowBandRegions( const RenderBuffer& render_buffer, - const absl::optional& delay_partitions, + const std::optional& delay_partitions, std::array* narrow_band_counters) { RTC_DCHECK(narrow_band_counters); @@ -38,7 +38,7 @@ void IdentifySmallNarrowBandRegions( std::array channel_counters; channel_counters.fill(0); - rtc::ArrayView> X2 = + ArrayView> X2 = render_buffer.Spectrum(*delay_partitions); for (size_t ch = 0; ch < X2.size(); ++ch) { for (size_t k = 1; k < kFftLengthBy2; ++k) { @@ -56,20 +56,20 @@ void IdentifySmallNarrowBandRegions( // Identifies whether the signal has a single strong narrow-band component. void IdentifyStrongNarrowBandComponent(const RenderBuffer& render_buffer, int strong_peak_freeze_duration, - absl::optional* narrow_peak_band, + std::optional* narrow_peak_band, size_t* narrow_peak_counter) { RTC_DCHECK(narrow_peak_band); RTC_DCHECK(narrow_peak_counter); if (*narrow_peak_band && ++(*narrow_peak_counter) > static_cast(strong_peak_freeze_duration)) { - *narrow_peak_band = absl::nullopt; + *narrow_peak_band = std::nullopt; } const Block& x_latest = render_buffer.GetBlock(0); float max_peak_level = 0.f; for (int channel = 0; channel < x_latest.NumChannels(); ++channel) { - rtc::ArrayView X2_latest = + ArrayView X2_latest = render_buffer.Spectrum(0)[channel]; // Identify the spectral peak. @@ -125,7 +125,7 @@ RenderSignalAnalyzer::~RenderSignalAnalyzer() = default; void RenderSignalAnalyzer::Update( const RenderBuffer& render_buffer, - const absl::optional& delay_partitions) { + const std::optional& delay_partitions) { // Identify bands of narrow nature. IdentifySmallNarrowBandRegions(render_buffer, delay_partitions, &narrow_band_counters_); diff --git a/modules/audio_processing/aec3/render_signal_analyzer.h b/modules/audio_processing/aec3/render_signal_analyzer.h index 2e4aaa4ba7..058b2922e4 100644 --- a/modules/audio_processing/aec3/render_signal_analyzer.h +++ b/modules/audio_processing/aec3/render_signal_analyzer.h @@ -14,8 +14,8 @@ #include #include #include +#include -#include "absl/types/optional.h" #include "api/audio/echo_canceller3_config.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/render_buffer.h" @@ -34,7 +34,7 @@ class RenderSignalAnalyzer { // Updates the render signal analysis with the most recent render signal. void Update(const RenderBuffer& render_buffer, - const absl::optional& delay_partitions); + const std::optional& delay_partitions); // Returns true if the render signal is poorly exciting. bool PoorSignalExcitation() const { @@ -48,12 +48,12 @@ class RenderSignalAnalyzer { void MaskRegionsAroundNarrowBands( std::array* v) const; - absl::optional NarrowPeakBand() const { return narrow_peak_band_; } + std::optional NarrowPeakBand() const { return narrow_peak_band_; } private: const int strong_peak_freeze_duration_; std::array narrow_band_counters_; - absl::optional narrow_peak_band_; + std::optional narrow_peak_band_; size_t narrow_peak_counter_; }; diff --git a/modules/audio_processing/aec3/render_signal_analyzer_unittest.cc b/modules/audio_processing/aec3/render_signal_analyzer_unittest.cc index 16f6280cb6..e51be2faac 100644 --- a/modules/audio_processing/aec3/render_signal_analyzer_unittest.cc +++ b/modules/audio_processing/aec3/render_signal_analyzer_unittest.cc @@ -84,7 +84,7 @@ void RunNarrowBandDetectionTest(size_t num_channels) { render_delay_buffer->PrepareCaptureProcessing(); analyzer.Update(*render_delay_buffer->GetRenderBuffer(), - known_delay ? absl::optional(0) : absl::nullopt); + known_delay ? std::optional(0) : std::nullopt); } }; @@ -107,7 +107,7 @@ void RunNarrowBandDetectionTest(size_t num_channels) { } std::string ProduceDebugText(size_t num_channels) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "number of channels: " << num_channels; return ss.Release(); } @@ -149,7 +149,7 @@ TEST(RenderSignalAnalyzer, NoFalseDetectionOfNarrowBands) { render_delay_buffer->PrepareCaptureProcessing(); analyzer.Update(*render_delay_buffer->GetRenderBuffer(), - absl::optional(0)); + std::optional(0)); } mask.fill(1.f); diff --git a/modules/audio_processing/aec3/residual_echo_estimator.cc b/modules/audio_processing/aec3/residual_echo_estimator.cc index 640a3e3cb9..384abfff49 100644 --- a/modules/audio_processing/aec3/residual_echo_estimator.cc +++ b/modules/audio_processing/aec3/residual_echo_estimator.cc @@ -16,9 +16,10 @@ #include #include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "modules/audio_processing/aec3/reverb_model.h" #include "rtc_base/checks.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { @@ -30,25 +31,28 @@ float GetTransparentModeGain() { } float GetEarlyReflectionsDefaultModeGain( + const FieldTrialsView& field_trials, const EchoCanceller3Config::EpStrength& config) { - if (field_trial::IsEnabled("WebRTC-Aec3UseLowEarlyReflectionsDefaultGain")) { + if (field_trials.IsEnabled("WebRTC-Aec3UseLowEarlyReflectionsDefaultGain")) { return 0.1f; } return config.default_gain; } float GetLateReflectionsDefaultModeGain( + const FieldTrialsView& field_trials, const EchoCanceller3Config::EpStrength& config) { - if (field_trial::IsEnabled("WebRTC-Aec3UseLowLateReflectionsDefaultGain")) { + if (field_trials.IsEnabled("WebRTC-Aec3UseLowLateReflectionsDefaultGain")) { return 0.1f; } return config.default_gain; } bool UseErleOnsetCompensationInDominantNearend( + const FieldTrialsView& field_trials, const EchoCanceller3Config::EpStrength& config) { return config.erle_onset_compensation_in_dominant_nearend || - field_trial::IsEnabled( + field_trials.IsEnabled( "WebRTC-Aec3UseErleOnsetCompensationInDominantNearend"); } @@ -76,9 +80,9 @@ void GetRenderIndexesToAnalyze( // Estimates the residual echo power based on the echo return loss enhancement // (ERLE) and the linear power estimate. void LinearEstimate( - rtc::ArrayView> S2_linear, - rtc::ArrayView> erle, - rtc::ArrayView> R2) { + ArrayView> S2_linear, + ArrayView> erle, + ArrayView> R2) { RTC_DCHECK_EQ(S2_linear.size(), erle.size()); RTC_DCHECK_EQ(S2_linear.size(), R2.size()); @@ -93,10 +97,9 @@ void LinearEstimate( // Estimates the residual echo power based on the estimate of the echo path // gain. -void NonLinearEstimate( - float echo_path_gain, - const std::array& X2, - rtc::ArrayView> R2) { +void NonLinearEstimate(float echo_path_gain, + const std::array& X2, + ArrayView> R2) { const size_t num_capture_channels = R2.size(); for (size_t ch = 0; ch < num_capture_channels; ++ch) { for (size_t k = 0; k < kFftLengthBy2Plus1; ++k) { @@ -107,7 +110,7 @@ void NonLinearEstimate( // Applies a soft noise gate to the echo generating power. void ApplyNoiseGate(const EchoCanceller3Config::EchoModel& config, - rtc::ArrayView X2) { + ArrayView X2) { for (size_t k = 0; k < kFftLengthBy2Plus1; ++k) { if (config.noise_gate_power > X2[k]) { X2[k] = std::max(0.f, X2[k] - config.noise_gate_slope * @@ -122,7 +125,7 @@ void EchoGeneratingPower(size_t num_render_channels, const SpectrumBuffer& spectrum_buffer, const EchoCanceller3Config::EchoModel& echo_model, int filter_delay_blocks, - rtc::ArrayView X2) { + ArrayView X2) { int idx_stop; int idx_start; GetRenderIndexesToAnalyze(spectrum_buffer, echo_model, filter_delay_blocks, @@ -154,18 +157,22 @@ void EchoGeneratingPower(size_t num_render_channels, } // namespace -ResidualEchoEstimator::ResidualEchoEstimator(const EchoCanceller3Config& config, +ResidualEchoEstimator::ResidualEchoEstimator(const Environment& env, + const EchoCanceller3Config& config, size_t num_render_channels) : config_(config), num_render_channels_(num_render_channels), early_reflections_transparent_mode_gain_(GetTransparentModeGain()), late_reflections_transparent_mode_gain_(GetTransparentModeGain()), early_reflections_general_gain_( - GetEarlyReflectionsDefaultModeGain(config_.ep_strength)), + GetEarlyReflectionsDefaultModeGain(env.field_trials(), + config_.ep_strength)), late_reflections_general_gain_( - GetLateReflectionsDefaultModeGain(config_.ep_strength)), + GetLateReflectionsDefaultModeGain(env.field_trials(), + config_.ep_strength)), erle_onset_compensation_in_dominant_nearend_( - UseErleOnsetCompensationInDominantNearend(config_.ep_strength)) { + UseErleOnsetCompensationInDominantNearend(env.field_trials(), + config_.ep_strength)) { Reset(); } @@ -174,11 +181,11 @@ ResidualEchoEstimator::~ResidualEchoEstimator() = default; void ResidualEchoEstimator::Estimate( const AecState& aec_state, const RenderBuffer& render_buffer, - rtc::ArrayView> S2_linear, - rtc::ArrayView> Y2, + ArrayView> S2_linear, + ArrayView> Y2, bool dominant_nearend, - rtc::ArrayView> R2, - rtc::ArrayView> R2_unbounded) { + ArrayView> R2, + ArrayView> R2_unbounded) { RTC_DCHECK_EQ(R2.size(), Y2.size()); RTC_DCHECK_EQ(R2.size(), S2_linear.size()); @@ -270,10 +277,9 @@ void ResidualEchoEstimator::Reset() { void ResidualEchoEstimator::UpdateRenderNoisePower( const RenderBuffer& render_buffer) { std::array render_power_data; - rtc::ArrayView> X2 = + ArrayView> X2 = render_buffer.Spectrum(0); - rtc::ArrayView render_power = - X2[/*channel=*/0]; + ArrayView render_power = X2[/*channel=*/0]; if (num_render_channels_ > 1) { render_power_data.fill(0.f); for (size_t ch = 0; ch < num_render_channels_; ++ch) { @@ -317,10 +323,9 @@ void ResidualEchoEstimator::UpdateReverb(ReverbType reverb_type, // Compute render power for the reverb. std::array render_power_data; - rtc::ArrayView> X2 = + ArrayView> X2 = render_buffer.Spectrum(first_reverb_partition); - rtc::ArrayView render_power = - X2[/*channel=*/0]; + ArrayView render_power = X2[/*channel=*/0]; if (num_render_channels_ > 1) { render_power_data.fill(0.f); for (size_t ch = 0; ch < num_render_channels_; ++ch) { @@ -346,11 +351,11 @@ void ResidualEchoEstimator::UpdateReverb(ReverbType reverb_type, } // Adds the estimated power of the reverb to the residual echo power. void ResidualEchoEstimator::AddReverb( - rtc::ArrayView> R2) const { + ArrayView> R2) const { const size_t num_capture_channels = R2.size(); // Add the reverb power. - rtc::ArrayView reverb_power = + ArrayView reverb_power = echo_reverb_.reverb(); for (size_t ch = 0; ch < num_capture_channels; ++ch) { for (size_t k = 0; k < kFftLengthBy2Plus1; ++k) { diff --git a/modules/audio_processing/aec3/residual_echo_estimator.h b/modules/audio_processing/aec3/residual_echo_estimator.h index c468764002..549d3defc9 100644 --- a/modules/audio_processing/aec3/residual_echo_estimator.h +++ b/modules/audio_processing/aec3/residual_echo_estimator.h @@ -13,9 +13,10 @@ #include #include +#include -#include "absl/types/optional.h" #include "api/audio/echo_canceller3_config.h" +#include "api/environment/environment.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/aec_state.h" #include "modules/audio_processing/aec3/render_buffer.h" @@ -27,7 +28,8 @@ namespace webrtc { class ResidualEchoEstimator { public: - ResidualEchoEstimator(const EchoCanceller3Config& config, + ResidualEchoEstimator(const Environment& env, + const EchoCanceller3Config& config, size_t num_render_channels); ~ResidualEchoEstimator(); @@ -37,11 +39,11 @@ class ResidualEchoEstimator { void Estimate( const AecState& aec_state, const RenderBuffer& render_buffer, - rtc::ArrayView> S2_linear, - rtc::ArrayView> Y2, + ArrayView> S2_linear, + ArrayView> Y2, bool dominant_nearend, - rtc::ArrayView> R2, - rtc::ArrayView> R2_unbounded); + ArrayView> R2, + ArrayView> R2_unbounded); private: enum class ReverbType { kLinear, kNonLinear }; @@ -61,8 +63,7 @@ class ResidualEchoEstimator { // Adds the estimated unmodelled echo power to the residual echo power // estimate. - void AddReverb( - rtc::ArrayView> R2) const; + void AddReverb(ArrayView> R2) const; // Gets the echo path gain to apply. float GetEchoPathGain(const AecState& aec_state, diff --git a/modules/audio_processing/aec3/residual_echo_estimator_unittest.cc b/modules/audio_processing/aec3/residual_echo_estimator_unittest.cc index 9a7bf0a89c..d083f8b439 100644 --- a/modules/audio_processing/aec3/residual_echo_estimator_unittest.cc +++ b/modules/audio_processing/aec3/residual_echo_estimator_unittest.cc @@ -13,6 +13,8 @@ #include #include "api/audio/echo_canceller3_config.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/aec3/aec3_fft.h" #include "modules/audio_processing/aec3/aec_state.h" #include "modules/audio_processing/aec3/render_delay_buffer.h" @@ -37,8 +39,8 @@ class ResidualEchoEstimatorTest { : num_render_channels_(num_render_channels), num_capture_channels_(num_capture_channels), config_(config), - estimator_(config_, num_render_channels_), - aec_state_(config_, num_capture_channels_), + estimator_(env_, config_, num_render_channels_), + aec_state_(env_, config_, num_capture_channels_), render_delay_buffer_(RenderDelayBuffer::Create(config_, kSampleRateHz, num_render_channels_)), @@ -98,13 +100,14 @@ class ResidualEchoEstimatorTest { S2_linear_, Y2_, dominant_nearend, R2_, R2_unbounded_); } - rtc::ArrayView> R2() const { + ArrayView> R2() const { return R2_; } private: const size_t num_render_channels_; const size_t num_capture_channels_; + const Environment env_ = CreateEnvironment(); const EchoCanceller3Config& config_; ResidualEchoEstimator estimator_; AecState aec_state_; @@ -120,7 +123,7 @@ class ResidualEchoEstimatorTest { Random random_generator_; std::vector output_; std::array y_; - absl::optional delay_estimate_; + std::optional delay_estimate_; bool first_frame_ = true; }; diff --git a/modules/audio_processing/aec3/reverb_decay_estimator.cc b/modules/audio_processing/aec3/reverb_decay_estimator.cc index 2daf376911..f973f86ec4 100644 --- a/modules/audio_processing/aec3/reverb_decay_estimator.cc +++ b/modules/audio_processing/aec3/reverb_decay_estimator.cc @@ -32,7 +32,7 @@ constexpr float kEarlyReverbFirstPointAtLinearRegressors = -0.5f * kBlocksPerSection * kFftLengthBy2 + 0.5f; // Averages the values in a block of size kFftLengthBy2; -float BlockAverage(rtc::ArrayView v, size_t block_index) { +float BlockAverage(ArrayView v, size_t block_index) { constexpr float kOneByFftLengthBy2 = 1.f / kFftLengthBy2; const int i = block_index * kFftLengthBy2; RTC_DCHECK_GE(v.size(), i + kFftLengthBy2); @@ -60,7 +60,7 @@ constexpr float SymmetricArithmetricSum(int N) { } // Returns the peak energy of an impulse response. -float BlockEnergyPeak(rtc::ArrayView h, int peak_block) { +float BlockEnergyPeak(ArrayView h, int peak_block) { RTC_DCHECK_LE((peak_block + 1) * kFftLengthBy2, h.size()); RTC_DCHECK_GE(peak_block, 0); float peak_value = @@ -71,7 +71,7 @@ float BlockEnergyPeak(rtc::ArrayView h, int peak_block) { } // Returns the average energy of an impulse response block. -float BlockEnergyAverage(rtc::ArrayView h, int block_index) { +float BlockEnergyAverage(ArrayView h, int block_index) { RTC_DCHECK_LE((block_index + 1) * kFftLengthBy2, h.size()); RTC_DCHECK_GE(block_index, 0); constexpr float kOneByFftLengthBy2 = 1.f / kFftLengthBy2; @@ -101,8 +101,8 @@ ReverbDecayEstimator::ReverbDecayEstimator(const EchoCanceller3Config& config) ReverbDecayEstimator::~ReverbDecayEstimator() = default; -void ReverbDecayEstimator::Update(rtc::ArrayView filter, - const absl::optional& filter_quality, +void ReverbDecayEstimator::Update(ArrayView filter, + const std::optional& filter_quality, int filter_delay_blocks, bool usable_linear_filter, bool stationary_signal) { @@ -157,7 +157,7 @@ void ReverbDecayEstimator::ResetDecayEstimation() { late_reverb_end_ = 0; } -void ReverbDecayEstimator::EstimateDecay(rtc::ArrayView filter, +void ReverbDecayEstimator::EstimateDecay(ArrayView filter, int peak_block) { auto& h = filter; RTC_DCHECK_EQ(0, h.size() % kFftLengthBy2); @@ -221,8 +221,8 @@ void ReverbDecayEstimator::EstimateDecay(rtc::ArrayView filter, early_reverb_estimator_.Reset(); } -void ReverbDecayEstimator::AnalyzeFilter(rtc::ArrayView filter) { - auto h = rtc::ArrayView( +void ReverbDecayEstimator::AnalyzeFilter(ArrayView filter) { + auto h = ArrayView( filter.begin() + block_to_analyze_ * kFftLengthBy2, kFftLengthBy2); // Compute squared filter coeffiecients for the block to analyze_; diff --git a/modules/audio_processing/aec3/reverb_decay_estimator.h b/modules/audio_processing/aec3/reverb_decay_estimator.h index fee54210e6..4fcc8897d3 100644 --- a/modules/audio_processing/aec3/reverb_decay_estimator.h +++ b/modules/audio_processing/aec3/reverb_decay_estimator.h @@ -12,9 +12,9 @@ #define MODULES_AUDIO_PROCESSING_AEC3_REVERB_DECAY_ESTIMATOR_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" // kMaxAdaptiveFilter... @@ -29,8 +29,8 @@ class ReverbDecayEstimator { explicit ReverbDecayEstimator(const EchoCanceller3Config& config); ~ReverbDecayEstimator(); // Updates the decay estimate. - void Update(rtc::ArrayView filter, - const absl::optional& filter_quality, + void Update(ArrayView filter, + const std::optional& filter_quality, int filter_delay_blocks, bool usable_linear_filter, bool stationary_signal); @@ -47,8 +47,8 @@ class ReverbDecayEstimator { void Dump(ApmDataDumper* data_dumper) const; private: - void EstimateDecay(rtc::ArrayView filter, int peak_block); - void AnalyzeFilter(rtc::ArrayView filter); + void EstimateDecay(ArrayView filter, int peak_block); + void AnalyzeFilter(ArrayView filter); void ResetDecayEstimation(); diff --git a/modules/audio_processing/aec3/reverb_frequency_response.cc b/modules/audio_processing/aec3/reverb_frequency_response.cc index 6e7282a1fc..508b0b825c 100644 --- a/modules/audio_processing/aec3/reverb_frequency_response.cc +++ b/modules/audio_processing/aec3/reverb_frequency_response.cc @@ -27,9 +27,8 @@ namespace { // Computes the ratio of the energies between the direct path and the tail. The // energy is computed in the power spectrum domain discarding the DC // contributions. -float AverageDecayWithinFilter( - rtc::ArrayView freq_resp_direct_path, - rtc::ArrayView freq_resp_tail) { +float AverageDecayWithinFilter(ArrayView freq_resp_direct_path, + ArrayView freq_resp_tail) { // Skipping the DC for the ratio computation constexpr size_t kSkipBins = 1; RTC_CHECK_EQ(freq_resp_direct_path.size(), freq_resp_tail.size()); @@ -62,7 +61,7 @@ void ReverbFrequencyResponse::Update( const std::vector>& frequency_response, int filter_delay_blocks, - const absl::optional& linear_filter_quality, + const std::optional& linear_filter_quality, bool stationary_block) { if (stationary_block || !linear_filter_quality) { return; @@ -76,10 +75,10 @@ void ReverbFrequencyResponse::Update( frequency_response, int filter_delay_blocks, float linear_filter_quality) { - rtc::ArrayView freq_resp_tail( + ArrayView freq_resp_tail( frequency_response[frequency_response.size() - 1]); - rtc::ArrayView freq_resp_direct_path( + ArrayView freq_resp_direct_path( frequency_response[filter_delay_blocks]); float average_decay = diff --git a/modules/audio_processing/aec3/reverb_frequency_response.h b/modules/audio_processing/aec3/reverb_frequency_response.h index 69b16b54d0..3fecba7396 100644 --- a/modules/audio_processing/aec3/reverb_frequency_response.h +++ b/modules/audio_processing/aec3/reverb_frequency_response.h @@ -12,9 +12,9 @@ #define MODULES_AUDIO_PROCESSING_AEC3_REVERB_FREQUENCY_RESPONSE_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" @@ -31,13 +31,11 @@ class ReverbFrequencyResponse { void Update(const std::vector>& frequency_response, int filter_delay_blocks, - const absl::optional& linear_filter_quality, + const std::optional& linear_filter_quality, bool stationary_block); // Returns the estimated frequency response for the reverb. - rtc::ArrayView FrequencyResponse() const { - return tail_response_; - } + ArrayView FrequencyResponse() const { return tail_response_; } private: void Update(const std::vector>& diff --git a/modules/audio_processing/aec3/reverb_model.cc b/modules/audio_processing/aec3/reverb_model.cc index e4f3507d31..3581696163 100644 --- a/modules/audio_processing/aec3/reverb_model.cc +++ b/modules/audio_processing/aec3/reverb_model.cc @@ -30,7 +30,7 @@ void ReverbModel::Reset() { } void ReverbModel::UpdateReverbNoFreqShaping( - rtc::ArrayView power_spectrum, + ArrayView power_spectrum, float power_spectrum_scaling, float reverb_decay) { if (reverb_decay > 0) { @@ -42,10 +42,9 @@ void ReverbModel::UpdateReverbNoFreqShaping( } } -void ReverbModel::UpdateReverb( - rtc::ArrayView power_spectrum, - rtc::ArrayView power_spectrum_scaling, - float reverb_decay) { +void ReverbModel::UpdateReverb(ArrayView power_spectrum, + ArrayView power_spectrum_scaling, + float reverb_decay) { if (reverb_decay > 0) { // Update the estimate of the reverberant power. for (size_t k = 0; k < power_spectrum.size(); ++k) { diff --git a/modules/audio_processing/aec3/reverb_model.h b/modules/audio_processing/aec3/reverb_model.h index 47ed2f78f3..15e24895e6 100644 --- a/modules/audio_processing/aec3/reverb_model.h +++ b/modules/audio_processing/aec3/reverb_model.h @@ -29,9 +29,7 @@ class ReverbModel { void Reset(); // Returns the reverb. - rtc::ArrayView reverb() const { - return reverb_; - } + ArrayView reverb() const { return reverb_; } // The methods UpdateReverbNoFreqShaping and UpdateReverb update the // estimate of the reverberation contribution to an input/output power @@ -39,13 +37,13 @@ class ReverbModel { // power spectrum is pre-scaled. Use the method UpdateReverb when a different // scaling should be applied per frequency and UpdateReverb_no_freq_shape if // the same scaling should be used for all the frequencies. - void UpdateReverbNoFreqShaping(rtc::ArrayView power_spectrum, + void UpdateReverbNoFreqShaping(ArrayView power_spectrum, float power_spectrum_scaling, float reverb_decay); // Update the reverb based on new data. - void UpdateReverb(rtc::ArrayView power_spectrum, - rtc::ArrayView power_spectrum_scaling, + void UpdateReverb(ArrayView power_spectrum, + ArrayView power_spectrum_scaling, float reverb_decay); private: diff --git a/modules/audio_processing/aec3/reverb_model_estimator.cc b/modules/audio_processing/aec3/reverb_model_estimator.cc index 5cd7a7870d..398b906e32 100644 --- a/modules/audio_processing/aec3/reverb_model_estimator.cc +++ b/modules/audio_processing/aec3/reverb_model_estimator.cc @@ -28,11 +28,11 @@ ReverbModelEstimator::ReverbModelEstimator(const EchoCanceller3Config& config, ReverbModelEstimator::~ReverbModelEstimator() = default; void ReverbModelEstimator::Update( - rtc::ArrayView> impulse_responses, - rtc::ArrayView>> + ArrayView> impulse_responses, + ArrayView>> frequency_responses, - rtc::ArrayView> linear_filter_qualities, - rtc::ArrayView filter_delays_blocks, + ArrayView> linear_filter_qualities, + ArrayView filter_delays_blocks, const std::vector& usable_linear_estimates, bool stationary_block) { const size_t num_capture_channels = reverb_decay_estimators_.size(); diff --git a/modules/audio_processing/aec3/reverb_model_estimator.h b/modules/audio_processing/aec3/reverb_model_estimator.h index 63bade977f..8994a4a55e 100644 --- a/modules/audio_processing/aec3/reverb_model_estimator.h +++ b/modules/audio_processing/aec3/reverb_model_estimator.h @@ -13,9 +13,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" #include "modules/audio_processing/aec3/aec3_common.h" // kFftLengthBy2Plus1 @@ -35,11 +35,11 @@ class ReverbModelEstimator { // Updates the estimates based on new data. void Update( - rtc::ArrayView> impulse_responses, - rtc::ArrayView>> + ArrayView> impulse_responses, + ArrayView>> frequency_responses, - rtc::ArrayView> linear_filter_qualities, - rtc::ArrayView filter_delays_blocks, + ArrayView> linear_filter_qualities, + ArrayView filter_delays_blocks, const std::vector& usable_linear_estimates, bool stationary_block); @@ -53,7 +53,7 @@ class ReverbModelEstimator { // Return the frequency response of the reverberant echo. // TODO(peah): Correct to properly support multiple channels. - rtc::ArrayView GetReverbFrequencyResponse() const { + ArrayView GetReverbFrequencyResponse() const { return reverb_frequency_responses_[0].FrequencyResponse(); } diff --git a/modules/audio_processing/aec3/reverb_model_estimator_unittest.cc b/modules/audio_processing/aec3/reverb_model_estimator_unittest.cc index fb7dcef37f..2e99a08360 100644 --- a/modules/audio_processing/aec3/reverb_model_estimator_unittest.cc +++ b/modules/audio_processing/aec3/reverb_model_estimator_unittest.cc @@ -14,9 +14,9 @@ #include #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" #include "modules/audio_processing/aec3/aec3_common.h" @@ -74,7 +74,7 @@ class ReverbModelEstimatorTest { float true_power_tail_ = 0.f; std::vector> h_; std::vector>> H2_; - std::vector> quality_linear_; + std::vector> quality_linear_; }; void ReverbModelEstimatorTest::CreateImpulseResponseWithDecay() { @@ -109,7 +109,7 @@ void ReverbModelEstimatorTest::CreateImpulseResponseWithDecay() { H_j.Spectrum(Aec3Optimization::kNone, H2_[ch][j]); } } - rtc::ArrayView H2_tail(H2_[0][H2_[0].size() - 1]); + ArrayView H2_tail(H2_[0][H2_[0].size() - 1]); true_power_tail_ = std::accumulate(H2_tail.begin(), H2_tail.end(), 0.f); } void ReverbModelEstimatorTest::RunEstimator() { diff --git a/modules/audio_processing/aec3/signal_dependent_erle_estimator.cc b/modules/audio_processing/aec3/signal_dependent_erle_estimator.cc index a5e77092a6..b2bdb27cb7 100644 --- a/modules/audio_processing/aec3/signal_dependent_erle_estimator.cc +++ b/modules/audio_processing/aec3/signal_dependent_erle_estimator.cc @@ -177,13 +177,13 @@ void SignalDependentErleEstimator::Reset() { // correction factor to the erle that is given as an input to this method. void SignalDependentErleEstimator::Update( const RenderBuffer& render_buffer, - rtc::ArrayView>> + ArrayView>> filter_frequency_responses, - rtc::ArrayView X2, - rtc::ArrayView> Y2, - rtc::ArrayView> E2, - rtc::ArrayView> average_erle, - rtc::ArrayView> + ArrayView X2, + ArrayView> Y2, + ArrayView> E2, + ArrayView> average_erle, + ArrayView> average_erle_onset_compensated, const std::vector& converged_filters) { RTC_DCHECK_GT(num_sections_, 1); @@ -205,12 +205,12 @@ void SignalDependentErleEstimator::Update( float correction_factor = correction_factors_[ch][n_active_sections_[ch][k]] [band_to_subband_[k]]; - erle_[ch][k] = rtc::SafeClamp(average_erle[ch][k] * correction_factor, - min_erle_, max_erle_[band_to_subband_[k]]); + erle_[ch][k] = SafeClamp(average_erle[ch][k] * correction_factor, + min_erle_, max_erle_[band_to_subband_[k]]); if (use_onset_detection_) { - erle_onset_compensated_[ch][k] = rtc::SafeClamp( - average_erle_onset_compensated[ch][k] * correction_factor, - min_erle_, max_erle_[band_to_subband_[k]]); + erle_onset_compensated_[ch][k] = + SafeClamp(average_erle_onset_compensated[ch][k] * correction_factor, + min_erle_, max_erle_[band_to_subband_[k]]); } } } @@ -231,7 +231,7 @@ void SignalDependentErleEstimator::Dump( // together constitute 90% of the estimated echo energy. void SignalDependentErleEstimator::ComputeNumberOfActiveFilterSections( const RenderBuffer& render_buffer, - rtc::ArrayView>> + ArrayView>> filter_frequency_responses) { RTC_DCHECK_GT(num_sections_, 1); // Computes an approximation of the power spectrum if the filter would have @@ -244,17 +244,17 @@ void SignalDependentErleEstimator::ComputeNumberOfActiveFilterSections( } void SignalDependentErleEstimator::UpdateCorrectionFactors( - rtc::ArrayView X2, - rtc::ArrayView> Y2, - rtc::ArrayView> E2, + ArrayView X2, + ArrayView> Y2, + ArrayView> E2, const std::vector& converged_filters) { for (size_t ch = 0; ch < converged_filters.size(); ++ch) { if (converged_filters[ch]) { constexpr float kX2BandEnergyThreshold = 44015068.0f; constexpr float kSmthConstantDecreases = 0.1f; constexpr float kSmthConstantIncreases = kSmthConstantDecreases / 2.f; - auto subband_powers = [](rtc::ArrayView power_spectrum, - rtc::ArrayView power_spectrum_subbands) { + auto subband_powers = [](ArrayView power_spectrum, + ArrayView power_spectrum_subbands) { for (size_t subband = 0; subband < kSubbands; ++subband) { RTC_DCHECK_LE(kBandBoundaries[subband + 1], power_spectrum.size()); power_spectrum_subbands[subband] = std::accumulate( @@ -306,7 +306,7 @@ void SignalDependentErleEstimator::UpdateCorrectionFactors( alpha = static_cast(is_erle_updated[subband]) * alpha; erle_estimators_[ch][idx][subband] += alpha * (new_erle[subband] - erle_estimators_[ch][idx][subband]); - erle_estimators_[ch][idx][subband] = rtc::SafeClamp( + erle_estimators_[ch][idx][subband] = SafeClamp( erle_estimators_[ch][idx][subband], min_erle_, max_erle_[subband]); } @@ -317,8 +317,8 @@ void SignalDependentErleEstimator::UpdateCorrectionFactors( alpha = static_cast(is_erle_updated[subband]) * alpha; erle_ref_[ch][subband] += alpha * (new_erle[subband] - erle_ref_[ch][subband]); - erle_ref_[ch][subband] = rtc::SafeClamp(erle_ref_[ch][subband], - min_erle_, max_erle_[subband]); + erle_ref_[ch][subband] = + SafeClamp(erle_ref_[ch][subband], min_erle_, max_erle_[subband]); } for (size_t subband = 0; subband < kSubbands; ++subband) { @@ -344,7 +344,7 @@ void SignalDependentErleEstimator::UpdateCorrectionFactors( void SignalDependentErleEstimator::ComputeEchoEstimatePerFilterSection( const RenderBuffer& render_buffer, - rtc::ArrayView>> + ArrayView>> filter_frequency_responses) { const SpectrumBuffer& spectrum_render_buffer = render_buffer.GetSpectrumBuffer(); diff --git a/modules/audio_processing/aec3/signal_dependent_erle_estimator.h b/modules/audio_processing/aec3/signal_dependent_erle_estimator.h index 6847c1ab13..d46fd19e10 100644 --- a/modules/audio_processing/aec3/signal_dependent_erle_estimator.h +++ b/modules/audio_processing/aec3/signal_dependent_erle_estimator.h @@ -37,7 +37,7 @@ class SignalDependentErleEstimator { void Reset(); // Returns the Erle per frequency subband. - rtc::ArrayView> Erle( + ArrayView> Erle( bool onset_compensated) const { return onset_compensated && use_onset_detection_ ? erle_onset_compensated_ : erle_; @@ -47,13 +47,13 @@ class SignalDependentErleEstimator { // to be an estimation of the average Erle achieved by the linear filter. void Update( const RenderBuffer& render_buffer, - rtc::ArrayView>> + ArrayView>> filter_frequency_response, - rtc::ArrayView X2, - rtc::ArrayView> Y2, - rtc::ArrayView> E2, - rtc::ArrayView> average_erle, - rtc::ArrayView> + ArrayView X2, + ArrayView> Y2, + ArrayView> E2, + ArrayView> average_erle, + ArrayView> average_erle_onset_compensated, const std::vector& converged_filters); @@ -64,18 +64,18 @@ class SignalDependentErleEstimator { private: void ComputeNumberOfActiveFilterSections( const RenderBuffer& render_buffer, - rtc::ArrayView>> + ArrayView>> filter_frequency_responses); void UpdateCorrectionFactors( - rtc::ArrayView X2, - rtc::ArrayView> Y2, - rtc::ArrayView> E2, + ArrayView X2, + ArrayView> Y2, + ArrayView> E2, const std::vector& converged_filters); void ComputeEchoEstimatePerFilterSection( const RenderBuffer& render_buffer, - rtc::ArrayView>> + ArrayView>> filter_frequency_responses); void ComputeActiveFilterSections(); diff --git a/modules/audio_processing/aec3/signal_dependent_erle_estimator_unittest.cc b/modules/audio_processing/aec3/signal_dependent_erle_estimator_unittest.cc index 67927a6c68..2653c9e474 100644 --- a/modules/audio_processing/aec3/signal_dependent_erle_estimator_unittest.cc +++ b/modules/audio_processing/aec3/signal_dependent_erle_estimator_unittest.cc @@ -49,15 +49,15 @@ class TestInputs { size_t num_capture_channels); ~TestInputs(); const RenderBuffer& GetRenderBuffer() { return *render_buffer_; } - rtc::ArrayView GetX2() { return X2_; } - rtc::ArrayView> GetY2() const { + ArrayView GetX2() { return X2_; } + ArrayView> GetY2() const { return Y2_; } - rtc::ArrayView> GetE2() const { + ArrayView> GetE2() const { return E2_; } - rtc::ArrayView>> - GetH2() const { + ArrayView>> GetH2() + const { return H2_; } const std::vector& GetConvergedFilters() const { diff --git a/modules/audio_processing/aec3/stationarity_estimator.cc b/modules/audio_processing/aec3/stationarity_estimator.cc index 4d364041b3..b6da56a3fd 100644 --- a/modules/audio_processing/aec3/stationarity_estimator.cc +++ b/modules/audio_processing/aec3/stationarity_estimator.cc @@ -42,7 +42,7 @@ void StationarityEstimator::Reset() { // Update just the noise estimator. Usefull until the delay is known void StationarityEstimator::UpdateNoiseEstimator( - rtc::ArrayView> spectrum) { + ArrayView> spectrum) { noise_.Update(spectrum); data_dumper_->DumpRaw("aec3_stationarity_noise_spectrum", noise_.Spectrum()); data_dumper_->DumpRaw("aec3_stationarity_is_block_stationary", @@ -51,7 +51,7 @@ void StationarityEstimator::UpdateNoiseEstimator( void StationarityEstimator::UpdateStationarityFlags( const SpectrumBuffer& spectrum_buffer, - rtc::ArrayView render_reverb_contribution_spectrum, + ArrayView render_reverb_contribution_spectrum, int idx_current, int num_lookahead) { std::array indexes; @@ -96,7 +96,7 @@ bool StationarityEstimator::IsBlockStationary() const { bool StationarityEstimator::EstimateBandStationarity( const SpectrumBuffer& spectrum_buffer, - rtc::ArrayView average_reverb, + ArrayView average_reverb, const std::array& indexes, size_t band) const { constexpr float kThrStationarity = 10.f; @@ -165,12 +165,12 @@ void StationarityEstimator::NoiseSpectrum::Reset() { } void StationarityEstimator::NoiseSpectrum::Update( - rtc::ArrayView> spectrum) { + ArrayView> spectrum) { RTC_DCHECK_LE(1, spectrum[0].size()); const int num_render_channels = static_cast(spectrum.size()); std::array avg_spectrum_data; - rtc::ArrayView avg_spectrum; + ArrayView avg_spectrum; if (num_render_channels == 1) { avg_spectrum = spectrum[0]; } else { diff --git a/modules/audio_processing/aec3/stationarity_estimator.h b/modules/audio_processing/aec3/stationarity_estimator.h index 8bcd3b789e..6edc195b66 100644 --- a/modules/audio_processing/aec3/stationarity_estimator.h +++ b/modules/audio_processing/aec3/stationarity_estimator.h @@ -37,13 +37,13 @@ class StationarityEstimator { // Update just the noise estimator. Usefull until the delay is known void UpdateNoiseEstimator( - rtc::ArrayView> spectrum); + ArrayView> spectrum); // Update the flag indicating whether this current frame is stationary. For // getting a more robust estimation, it looks at future and/or past frames. void UpdateStationarityFlags( const SpectrumBuffer& spectrum_buffer, - rtc::ArrayView render_reverb_contribution_spectrum, + ArrayView render_reverb_contribution_spectrum, int idx_current, int num_lookahead); @@ -63,7 +63,7 @@ class StationarityEstimator { // Get an estimation of the stationarity for the current band by looking // at the past/present/future available data. bool EstimateBandStationarity(const SpectrumBuffer& spectrum_buffer, - rtc::ArrayView average_reverb, + ArrayView average_reverb, const std::array& indexes, size_t band) const; @@ -88,10 +88,10 @@ class StationarityEstimator { // Update the noise power spectrum with a new frame. void Update( - rtc::ArrayView> spectrum); + ArrayView> spectrum); // Get the noise estimation power spectrum. - rtc::ArrayView Spectrum() const { return noise_spectrum_; } + ArrayView Spectrum() const { return noise_spectrum_; } // Get the noise power spectrum at a certain band. float Power(size_t band) const { diff --git a/modules/audio_processing/aec3/subband_erle_estimator.cc b/modules/audio_processing/aec3/subband_erle_estimator.cc index dc7f92fd99..6208d26388 100644 --- a/modules/audio_processing/aec3/subband_erle_estimator.cc +++ b/modules/audio_processing/aec3/subband_erle_estimator.cc @@ -13,9 +13,10 @@ #include #include +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_minmax.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -34,18 +35,20 @@ std::array SetMaxErleBands(float max_erle_l, return max_erle; } -bool EnableMinErleDuringOnsets() { - return !field_trial::IsEnabled("WebRTC-Aec3MinErleDuringOnsetsKillSwitch"); +bool EnableMinErleDuringOnsets(const FieldTrialsView& field_trials) { + return !field_trials.IsEnabled("WebRTC-Aec3MinErleDuringOnsetsKillSwitch"); } } // namespace -SubbandErleEstimator::SubbandErleEstimator(const EchoCanceller3Config& config, +SubbandErleEstimator::SubbandErleEstimator(const Environment& env, + const EchoCanceller3Config& config, size_t num_capture_channels) : use_onset_detection_(config.erle.onset_detection), min_erle_(config.erle.min), max_erle_(SetMaxErleBands(config.erle.max_l, config.erle.max_h)), - use_min_erle_during_onsets_(EnableMinErleDuringOnsets()), + use_min_erle_during_onsets_( + EnableMinErleDuringOnsets(env.field_trials())), accum_spectra_(num_capture_channels), erle_(num_capture_channels), erle_onset_compensated_(num_capture_channels), @@ -72,9 +75,9 @@ void SubbandErleEstimator::Reset() { } void SubbandErleEstimator::Update( - rtc::ArrayView X2, - rtc::ArrayView> Y2, - rtc::ArrayView> E2, + ArrayView X2, + ArrayView> Y2, + ArrayView> E2, const std::vector& converged_filters) { UpdateAccumulatedSpectra(X2, Y2, E2, converged_filters); UpdateBands(converged_filters); @@ -138,7 +141,7 @@ void SubbandErleEstimator::UpdateBands( if (!use_min_erle_during_onsets_) { float alpha = new_erle[k] < erle_during_onsets_[ch][k] ? 0.3f : 0.15f; - erle_during_onsets_[ch][k] = rtc::SafeClamp( + erle_during_onsets_[ch][k] = SafeClamp( erle_during_onsets_[ch][k] + alpha * (new_erle[k] - erle_during_onsets_[ch][k]), min_erle_, max_erle_[k]); @@ -156,8 +159,7 @@ void SubbandErleEstimator::UpdateBands( if (new_erle < erle) { alpha = low_render_energy ? 0.f : 0.1f; } - erle = - rtc::SafeClamp(erle + alpha * (new_erle - erle), min_erle, max_erle); + erle = SafeClamp(erle + alpha * (new_erle - erle), min_erle, max_erle); }; for (size_t k = 1; k < kFftLengthBy2; ++k) { @@ -211,9 +213,9 @@ void SubbandErleEstimator::ResetAccumulatedSpectra() { } void SubbandErleEstimator::UpdateAccumulatedSpectra( - rtc::ArrayView X2, - rtc::ArrayView> Y2, - rtc::ArrayView> E2, + ArrayView X2, + ArrayView> Y2, + ArrayView> E2, const std::vector& converged_filters) { auto& st = accum_spectra_; RTC_DCHECK_EQ(st.E2.size(), E2.size()); diff --git a/modules/audio_processing/aec3/subband_erle_estimator.h b/modules/audio_processing/aec3/subband_erle_estimator.h index 8bf9c4d645..fdfaad8872 100644 --- a/modules/audio_processing/aec3/subband_erle_estimator.h +++ b/modules/audio_processing/aec3/subband_erle_estimator.h @@ -19,6 +19,7 @@ #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" +#include "api/environment/environment.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/logging/apm_data_dumper.h" @@ -27,7 +28,8 @@ namespace webrtc { // Estimates the echo return loss enhancement for each frequency subband. class SubbandErleEstimator { public: - SubbandErleEstimator(const EchoCanceller3Config& config, + SubbandErleEstimator(const Environment& env, + const EchoCanceller3Config& config, size_t num_capture_channels); ~SubbandErleEstimator(); @@ -35,26 +37,25 @@ class SubbandErleEstimator { void Reset(); // Updates the ERLE estimate. - void Update(rtc::ArrayView X2, - rtc::ArrayView> Y2, - rtc::ArrayView> E2, + void Update(ArrayView X2, + ArrayView> Y2, + ArrayView> E2, const std::vector& converged_filters); // Returns the ERLE estimate. - rtc::ArrayView> Erle( + ArrayView> Erle( bool onset_compensated) const { return onset_compensated && use_onset_detection_ ? erle_onset_compensated_ : erle_; } // Returns the non-capped ERLE estimate. - rtc::ArrayView> ErleUnbounded() - const { + ArrayView> ErleUnbounded() const { return erle_unbounded_; } // Returns the ERLE estimate at onsets (only used for testing). - rtc::ArrayView> ErleDuringOnsets() + ArrayView> ErleDuringOnsets() const { return erle_during_onsets_; } @@ -75,9 +76,9 @@ class SubbandErleEstimator { }; void UpdateAccumulatedSpectra( - rtc::ArrayView X2, - rtc::ArrayView> Y2, - rtc::ArrayView> E2, + ArrayView X2, + ArrayView> Y2, + ArrayView> E2, const std::vector& converged_filters); void ResetAccumulatedSpectra(); diff --git a/modules/audio_processing/aec3/subband_nearend_detector.cc b/modules/audio_processing/aec3/subband_nearend_detector.cc index 2aa400c3af..73c7c3c0e0 100644 --- a/modules/audio_processing/aec3/subband_nearend_detector.cc +++ b/modules/audio_processing/aec3/subband_nearend_detector.cc @@ -27,13 +27,12 @@ SubbandNearendDetector::SubbandNearendDetector( 1.f / (config_.subband2.high - config_.subband2.low + 1)) {} void SubbandNearendDetector::Update( - rtc::ArrayView> - nearend_spectrum, - rtc::ArrayView> - residual_echo_spectrum, - rtc::ArrayView> + ArrayView> nearend_spectrum, + ArrayView> + /* residual_echo_spectrum */, + ArrayView> comfort_noise_spectrum, - bool initial_state) { + bool /* initial_state */) { nearend_state_ = false; for (size_t ch = 0; ch < num_capture_channels_; ++ch) { const std::array& noise = diff --git a/modules/audio_processing/aec3/subband_nearend_detector.h b/modules/audio_processing/aec3/subband_nearend_detector.h index 8357edb65f..846b8345cf 100644 --- a/modules/audio_processing/aec3/subband_nearend_detector.h +++ b/modules/audio_processing/aec3/subband_nearend_detector.h @@ -30,13 +30,13 @@ class SubbandNearendDetector : public NearendDetector { bool IsNearendState() const override { return nearend_state_; } // Updates the state selection based on latest spectral estimates. - void Update(rtc::ArrayView> - nearend_spectrum, - rtc::ArrayView> - residual_echo_spectrum, - rtc::ArrayView> - comfort_noise_spectrum, - bool initial_state) override; + void Update( + ArrayView> nearend_spectrum, + ArrayView> + residual_echo_spectrum, + ArrayView> + comfort_noise_spectrum, + bool initial_state) override; private: const EchoCanceller3Config::Suppressor::SubbandNearendDetection config_; diff --git a/modules/audio_processing/aec3/subtractor.cc b/modules/audio_processing/aec3/subtractor.cc index aa36bb272a..4d81a6cc18 100644 --- a/modules/audio_processing/aec3/subtractor.cc +++ b/modules/audio_processing/aec3/subtractor.cc @@ -14,25 +14,26 @@ #include #include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "modules/audio_processing/aec3/adaptive_fir_filter_erl.h" #include "modules/audio_processing/aec3/fft_data.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_minmax.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { -bool UseCoarseFilterResetHangover() { - return !field_trial::IsEnabled( +bool UseCoarseFilterResetHangover(const FieldTrialsView& field_trials) { + return !field_trials.IsEnabled( "WebRTC-Aec3CoarseFilterResetHangoverKillSwitch"); } void PredictionError(const Aec3Fft& fft, const FftData& S, - rtc::ArrayView y, + ArrayView y, std::array* e, std::array* s) { std::array tmp; @@ -48,10 +49,10 @@ void PredictionError(const Aec3Fft& fft, } } -void ScaleFilterOutput(rtc::ArrayView y, +void ScaleFilterOutput(ArrayView y, float factor, - rtc::ArrayView e, - rtc::ArrayView s) { + ArrayView e, + ArrayView s) { RTC_DCHECK_EQ(y.size(), e.size()); RTC_DCHECK_EQ(y.size(), s.size()); for (size_t k = 0; k < y.size(); ++k) { @@ -62,7 +63,8 @@ void ScaleFilterOutput(rtc::ArrayView y, } // namespace -Subtractor::Subtractor(const EchoCanceller3Config& config, +Subtractor::Subtractor(const Environment& env, + const EchoCanceller3Config& config, size_t num_render_channels, size_t num_capture_channels, ApmDataDumper* data_dumper, @@ -72,7 +74,8 @@ Subtractor::Subtractor(const EchoCanceller3Config& config, optimization_(optimization), config_(config), num_capture_channels_(num_capture_channels), - use_coarse_filter_reset_hangover_(UseCoarseFilterResetHangover()), + use_coarse_filter_reset_hangover_( + UseCoarseFilterResetHangover(env.field_trials())), refined_filters_(num_capture_channels_), coarse_filter_(num_capture_channels_), refined_gains_(num_capture_channels_), @@ -179,7 +182,7 @@ void Subtractor::Process(const RenderBuffer& render_buffer, const Block& capture, const RenderSignalAnalyzer& render_signal_analyzer, const AecState& aec_state, - rtc::ArrayView outputs) { + ArrayView outputs) { RTC_DCHECK_EQ(num_capture_channels_, capture.NumChannels()); // Compute the render powers. @@ -205,7 +208,7 @@ void Subtractor::Process(const RenderBuffer& render_buffer, // Process all capture channels for (size_t ch = 0; ch < num_capture_channels_; ++ch) { SubtractorOutput& output = outputs[ch]; - rtc::ArrayView y = capture.View(/*band=*/0, ch); + ArrayView y = capture.View(/*band=*/0, ch); FftData& E_refined = output.E_refined; FftData E_coarse; std::array& e_refined = output.e_refined; @@ -313,7 +316,7 @@ void Subtractor::Process(const RenderBuffer& render_buffer, } std::for_each(e_refined.begin(), e_refined.end(), - [](float& a) { a = rtc::SafeClamp(a, -32768.f, 32767.f); }); + [](float& a) { a = SafeClamp(a, -32768.f, 32767.f); }); if (ch == 0) { data_dumper_->DumpWav("aec3_refined_filters_output", kBlockSize, diff --git a/modules/audio_processing/aec3/subtractor.h b/modules/audio_processing/aec3/subtractor.h index 86159a3442..0b8d04719a 100644 --- a/modules/audio_processing/aec3/subtractor.h +++ b/modules/audio_processing/aec3/subtractor.h @@ -19,6 +19,7 @@ #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" +#include "api/environment/environment.h" #include "modules/audio_processing/aec3/adaptive_fir_filter.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/aec3_fft.h" @@ -38,7 +39,8 @@ namespace webrtc { // Proves linear echo cancellation functionality class Subtractor { public: - Subtractor(const EchoCanceller3Config& config, + Subtractor(const Environment& env, + const EchoCanceller3Config& config, size_t num_render_channels, size_t num_capture_channels, ApmDataDumper* data_dumper, @@ -52,7 +54,7 @@ class Subtractor { const Block& capture, const RenderSignalAnalyzer& render_signal_analyzer, const AecState& aec_state, - rtc::ArrayView outputs); + ArrayView outputs); void HandleEchoPathChange(const EchoPathVariability& echo_path_variability); @@ -75,7 +77,7 @@ class Subtractor { void DumpFilters() { data_dumper_->DumpRaw( "aec3_subtractor_h_refined", - rtc::ArrayView( + ArrayView( refined_impulse_responses_[0].data(), GetTimeDomainLength( refined_filters_[0]->max_filter_size_partitions()))); @@ -83,7 +85,7 @@ class Subtractor { RTC_DCHECK_GT(coarse_impulse_responses_.size(), 0); data_dumper_->DumpRaw( "aec3_subtractor_h_coarse", - rtc::ArrayView( + ArrayView( coarse_impulse_responses_[0].data(), GetTimeDomainLength( coarse_filter_[0]->max_filter_size_partitions()))); diff --git a/modules/audio_processing/aec3/subtractor_output.cc b/modules/audio_processing/aec3/subtractor_output.cc index ed80101f06..0aef6b9ad5 100644 --- a/modules/audio_processing/aec3/subtractor_output.cc +++ b/modules/audio_processing/aec3/subtractor_output.cc @@ -33,7 +33,7 @@ void SubtractorOutput::Reset() { y2 = 0.f; } -void SubtractorOutput::ComputeMetrics(rtc::ArrayView y) { +void SubtractorOutput::ComputeMetrics(ArrayView y) { const auto sum_of_squares = [](float a, float b) { return a + b * b; }; y2 = std::accumulate(y.begin(), y.end(), 0.f, sum_of_squares); e2_refined = diff --git a/modules/audio_processing/aec3/subtractor_output.h b/modules/audio_processing/aec3/subtractor_output.h index d2d12082c6..10349f5937 100644 --- a/modules/audio_processing/aec3/subtractor_output.h +++ b/modules/audio_processing/aec3/subtractor_output.h @@ -44,7 +44,7 @@ struct SubtractorOutput { void Reset(); // Updates the powers of the signals. - void ComputeMetrics(rtc::ArrayView y); + void ComputeMetrics(ArrayView y); }; } // namespace webrtc diff --git a/modules/audio_processing/aec3/subtractor_output_analyzer.cc b/modules/audio_processing/aec3/subtractor_output_analyzer.cc index baf0600161..b2b5ce0b60 100644 --- a/modules/audio_processing/aec3/subtractor_output_analyzer.cc +++ b/modules/audio_processing/aec3/subtractor_output_analyzer.cc @@ -20,7 +20,7 @@ SubtractorOutputAnalyzer::SubtractorOutputAnalyzer(size_t num_capture_channels) : filters_converged_(num_capture_channels, false) {} void SubtractorOutputAnalyzer::Update( - rtc::ArrayView subtractor_output, + ArrayView subtractor_output, bool* any_filter_converged, bool* any_coarse_filter_converged, bool* all_filters_diverged) { @@ -44,7 +44,7 @@ void SubtractorOutputAnalyzer::Update( bool coarse_filter_converged_strict = e2_coarse < 0.05f * y2 && y2 > kConvergenceThreshold; bool coarse_filter_converged_relaxed = - e2_coarse < 0.2f * y2 && y2 > kConvergenceThresholdLowLevel; + e2_coarse < 0.3f * y2 && y2 > kConvergenceThresholdLowLevel; float min_e2 = std::min(e2_refined, e2_coarse); bool filter_diverged = min_e2 > 1.5f * y2 && y2 > 30.f * 30.f * kBlockSize; filters_converged_[ch] = diff --git a/modules/audio_processing/aec3/subtractor_output_analyzer.h b/modules/audio_processing/aec3/subtractor_output_analyzer.h index 32707dbb19..c9ccafacc3 100644 --- a/modules/audio_processing/aec3/subtractor_output_analyzer.h +++ b/modules/audio_processing/aec3/subtractor_output_analyzer.h @@ -24,7 +24,7 @@ class SubtractorOutputAnalyzer { ~SubtractorOutputAnalyzer() = default; // Analyses the subtractor output. - void Update(rtc::ArrayView subtractor_output, + void Update(ArrayView subtractor_output, bool* any_filter_converged, bool* any_coarse_filter_converged, bool* all_filters_diverged); diff --git a/modules/audio_processing/aec3/subtractor_unittest.cc b/modules/audio_processing/aec3/subtractor_unittest.cc index 56b9cec9f1..1b53273304 100644 --- a/modules/audio_processing/aec3/subtractor_unittest.cc +++ b/modules/audio_processing/aec3/subtractor_unittest.cc @@ -15,6 +15,8 @@ #include #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/aec3/aec_state.h" #include "modules/audio_processing/aec3/render_delay_buffer.h" #include "modules/audio_processing/test/echo_canceller_test_tools.h" @@ -27,6 +29,7 @@ namespace webrtc { namespace { std::vector RunSubtractorTest( + const Environment& env, size_t num_render_channels, size_t num_capture_channels, int num_blocks_to_process, @@ -42,9 +45,9 @@ std::vector RunSubtractorTest( config.filter.refined.length_blocks = refined_filter_length_blocks; config.filter.coarse.length_blocks = coarse_filter_length_blocks; - Subtractor subtractor(config, num_render_channels, num_capture_channels, + Subtractor subtractor(env, config, num_render_channels, num_capture_channels, &data_dumper, DetectOptimization()); - absl::optional delay_estimate; + std::optional delay_estimate; Block x(kNumBands, num_render_channels); Block y(/*num_bands=*/1, num_capture_channels); std::array x_old; @@ -59,7 +62,7 @@ std::vector RunSubtractorTest( std::vector> E2_refined( num_capture_channels); std::array E2_coarse; - AecState aec_state(config, num_capture_channels); + AecState aec_state(env, config, num_capture_channels); x_old.fill(0.f); for (auto& Y2_ch : Y2) { Y2_ch.fill(0.f); @@ -96,7 +99,7 @@ std::vector RunSubtractorTest( std::make_unique(kHighPassFilterCoefficients, 1); } - for (int k = 0; k < num_blocks_to_process; ++k) { + for (int block_num = 0; block_num < num_blocks_to_process; ++block_num) { for (size_t render_ch = 0; render_ch < num_render_channels; ++render_ch) { RandomizeSampleVector(&random_generator, x.View(/*band=*/0, render_ch)); } @@ -109,7 +112,7 @@ std::vector RunSubtractorTest( } else { for (size_t capture_ch = 0; capture_ch < num_capture_channels; ++capture_ch) { - rtc::ArrayView y_view = y.View(/*band=*/0, capture_ch); + ArrayView y_view = y.View(/*band=*/0, capture_ch); for (size_t render_ch = 0; render_ch < num_render_channels; ++render_ch) { std::array y_channel; @@ -129,7 +132,7 @@ std::vector RunSubtractorTest( } render_delay_buffer->Insert(x); - if (k == 0) { + if (block_num == 0) { render_delay_buffer->Reset(); } render_delay_buffer->PrepareCaptureProcessing(); @@ -139,7 +142,7 @@ std::vector RunSubtractorTest( // Handle echo path changes. if (std::find(blocks_with_echo_path_changes.begin(), blocks_with_echo_path_changes.end(), - k) != blocks_with_echo_path_changes.end()) { + block_num) != blocks_with_echo_path_changes.end()) { subtractor.HandleEchoPathChange(EchoPathVariability( true, EchoPathVariability::DelayAdjustment::kNewDetectedDelay, false)); @@ -176,7 +179,7 @@ std::string ProduceDebugText(size_t num_render_channels, size_t num_capture_channels, size_t delay, int filter_length_blocks) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "delay: " << delay << ", "; ss << "filter_length_blocks:" << filter_length_blocks << ", "; ss << "num_render_channels:" << num_render_channels << ", "; @@ -190,22 +193,23 @@ std::string ProduceDebugText(size_t num_render_channels, // Verifies that the check for non data dumper works. TEST(SubtractorDeathTest, NullDataDumper) { - EXPECT_DEATH( - Subtractor(EchoCanceller3Config(), 1, 1, nullptr, DetectOptimization()), - ""); + EXPECT_DEATH(Subtractor(CreateEnvironment(), EchoCanceller3Config(), 1, 1, + nullptr, DetectOptimization()), + ""); } #endif // Verifies that the subtractor is able to converge on correlated data. TEST(Subtractor, Convergence) { + const Environment env = CreateEnvironment(); std::vector blocks_with_echo_path_changes; for (size_t filter_length_blocks : {12, 20, 30}) { for (size_t delay_samples : {0, 64, 150, 200, 301}) { SCOPED_TRACE(ProduceDebugText(1, 1, delay_samples, filter_length_blocks)); std::vector echo_to_nearend_powers = RunSubtractorTest( - 1, 1, 2500, delay_samples, filter_length_blocks, filter_length_blocks, - false, blocks_with_echo_path_changes); + env, 1, 1, 2500, delay_samples, filter_length_blocks, + filter_length_blocks, false, blocks_with_echo_path_changes); for (float echo_to_nearend_power : echo_to_nearend_powers) { EXPECT_GT(0.1f, echo_to_nearend_power); @@ -217,9 +221,10 @@ TEST(Subtractor, Convergence) { // Verifies that the subtractor is able to handle the case when the refined // filter is longer than the coarse filter. TEST(Subtractor, RefinedFilterLongerThanCoarseFilter) { + const Environment env = CreateEnvironment(); std::vector blocks_with_echo_path_changes; std::vector echo_to_nearend_powers = RunSubtractorTest( - 1, 1, 400, 64, 20, 15, false, blocks_with_echo_path_changes); + env, 1, 1, 400, 64, 20, 15, false, blocks_with_echo_path_changes); for (float echo_to_nearend_power : echo_to_nearend_powers) { EXPECT_GT(0.5f, echo_to_nearend_power); } @@ -228,9 +233,10 @@ TEST(Subtractor, RefinedFilterLongerThanCoarseFilter) { // Verifies that the subtractor is able to handle the case when the coarse // filter is longer than the refined filter. TEST(Subtractor, CoarseFilterLongerThanRefinedFilter) { + const Environment env = CreateEnvironment(); std::vector blocks_with_echo_path_changes; std::vector echo_to_nearend_powers = RunSubtractorTest( - 1, 1, 400, 64, 15, 20, false, blocks_with_echo_path_changes); + env, 1, 1, 400, 64, 15, 20, false, blocks_with_echo_path_changes); for (float echo_to_nearend_power : echo_to_nearend_powers) { EXPECT_GT(0.5f, echo_to_nearend_power); } @@ -238,14 +244,15 @@ TEST(Subtractor, CoarseFilterLongerThanRefinedFilter) { // Verifies that the subtractor does not converge on uncorrelated signals. TEST(Subtractor, NonConvergenceOnUncorrelatedSignals) { + const Environment env = CreateEnvironment(); std::vector blocks_with_echo_path_changes; for (size_t filter_length_blocks : {12, 20, 30}) { for (size_t delay_samples : {0, 64, 150, 200, 301}) { SCOPED_TRACE(ProduceDebugText(1, 1, delay_samples, filter_length_blocks)); std::vector echo_to_nearend_powers = RunSubtractorTest( - 1, 1, 3000, delay_samples, filter_length_blocks, filter_length_blocks, - true, blocks_with_echo_path_changes); + env, 1, 1, 3000, delay_samples, filter_length_blocks, + filter_length_blocks, true, blocks_with_echo_path_changes); for (float echo_to_nearend_power : echo_to_nearend_powers) { EXPECT_NEAR(1.f, echo_to_nearend_power, 0.1); } @@ -273,12 +280,13 @@ INSTANTIATE_TEST_SUITE_P(DebugMultiChannel, TEST_P(SubtractorMultiChannelUpToEightRender, Convergence) { const size_t num_render_channels = std::get<0>(GetParam()); const size_t num_capture_channels = std::get<1>(GetParam()); + const Environment env = CreateEnvironment(); std::vector blocks_with_echo_path_changes; size_t num_blocks_to_process = 2500 * num_render_channels; std::vector echo_to_nearend_powers = RunSubtractorTest( - num_render_channels, num_capture_channels, num_blocks_to_process, 64, 20, - 20, false, blocks_with_echo_path_changes); + env, num_render_channels, num_capture_channels, num_blocks_to_process, 64, + 20, 20, false, blocks_with_echo_path_changes); for (float echo_to_nearend_power : echo_to_nearend_powers) { EXPECT_GT(0.1f, echo_to_nearend_power); @@ -306,12 +314,13 @@ TEST_P(SubtractorMultiChannelUpToFourRender, NonConvergenceOnUncorrelatedSignals) { const size_t num_render_channels = std::get<0>(GetParam()); const size_t num_capture_channels = std::get<1>(GetParam()); + const Environment env = CreateEnvironment(); std::vector blocks_with_echo_path_changes; size_t num_blocks_to_process = 5000 * num_render_channels; std::vector echo_to_nearend_powers = RunSubtractorTest( - num_render_channels, num_capture_channels, num_blocks_to_process, 64, 20, - 20, true, blocks_with_echo_path_changes); + env, num_render_channels, num_capture_channels, num_blocks_to_process, 64, + 20, 20, true, blocks_with_echo_path_changes); for (float echo_to_nearend_power : echo_to_nearend_powers) { EXPECT_LT(.8f, echo_to_nearend_power); EXPECT_NEAR(1.f, echo_to_nearend_power, 0.25f); diff --git a/modules/audio_processing/aec3/suppression_filter.cc b/modules/audio_processing/aec3/suppression_filter.cc index 83ded425d5..ec081ab6d1 100644 --- a/modules/audio_processing/aec3/suppression_filter.cc +++ b/modules/audio_processing/aec3/suppression_filter.cc @@ -81,11 +81,11 @@ SuppressionFilter::SuppressionFilter(Aec3Optimization optimization, SuppressionFilter::~SuppressionFilter() = default; void SuppressionFilter::ApplyGain( - rtc::ArrayView comfort_noise, - rtc::ArrayView comfort_noise_high_band, + ArrayView comfort_noise, + ArrayView comfort_noise_high_band, const std::array& suppression_gain, float high_bands_gain, - rtc::ArrayView E_lowest_band, + ArrayView E_lowest_band, Block* e) { RTC_DCHECK(e); RTC_DCHECK_EQ(e->NumBands(), NumBandsForRate(sample_rate_hz_)); @@ -171,7 +171,7 @@ void SuppressionFilter::ApplyGain( for (int b = 0; b < e->NumBands(); ++b) { auto e_band = e->View(b, ch); for (size_t i = 0; i < kFftLengthBy2; ++i) { - e_band[i] = rtc::SafeClamp(e_band[i], -32768.f, 32767.f); + e_band[i] = SafeClamp(e_band[i], -32768.f, 32767.f); } } } diff --git a/modules/audio_processing/aec3/suppression_filter.h b/modules/audio_processing/aec3/suppression_filter.h index c18b2334bf..e8082845f1 100644 --- a/modules/audio_processing/aec3/suppression_filter.h +++ b/modules/audio_processing/aec3/suppression_filter.h @@ -31,11 +31,11 @@ class SuppressionFilter { SuppressionFilter(const SuppressionFilter&) = delete; SuppressionFilter& operator=(const SuppressionFilter&) = delete; - void ApplyGain(rtc::ArrayView comfort_noise, - rtc::ArrayView comfort_noise_high_bands, + void ApplyGain(ArrayView comfort_noise, + ArrayView comfort_noise_high_bands, const std::array& suppression_gain, float high_bands_gain, - rtc::ArrayView E_lowest_band, + ArrayView E_lowest_band, Block* e); private: diff --git a/modules/audio_processing/aec3/suppression_gain.cc b/modules/audio_processing/aec3/suppression_gain.cc index 037dabaabe..6ae9bfc1a1 100644 --- a/modules/audio_processing/aec3/suppression_gain.cc +++ b/modules/audio_processing/aec3/suppression_gain.cc @@ -22,7 +22,6 @@ #include "modules/audio_processing/aec3/vector_math.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { @@ -65,14 +64,13 @@ void LimitHighFrequencyGains(bool conservative_hf_suppression, // Scales the echo according to assessed audibility at the other end. void WeightEchoForAudibility(const EchoCanceller3Config& config, - rtc::ArrayView echo, - rtc::ArrayView weighted_echo) { + ArrayView echo, + ArrayView weighted_echo) { RTC_DCHECK_EQ(kFftLengthBy2Plus1, echo.size()); RTC_DCHECK_EQ(kFftLengthBy2Plus1, weighted_echo.size()); auto weigh = [](float threshold, float normalizer, size_t begin, size_t end, - rtc::ArrayView echo, - rtc::ArrayView weighted_echo) { + ArrayView echo, ArrayView weighted_echo) { for (size_t k = begin; k < end; ++k) { if (echo[k] < threshold) { float tmp = (threshold - echo[k]) * normalizer; @@ -104,10 +102,10 @@ void WeightEchoForAudibility(const EchoCanceller3Config& config, std::atomic SuppressionGain::instance_count_(0); float SuppressionGain::UpperBandsGain( - rtc::ArrayView> echo_spectrum, - rtc::ArrayView> + ArrayView> echo_spectrum, + ArrayView> comfort_noise_spectrum, - const absl::optional& narrow_peak_band, + const std::optional& narrow_peak_band, bool saturated_echo, const Block& render, const std::array& low_band_gain) const { @@ -171,7 +169,7 @@ float SuppressionGain::UpperBandsGain( if (!dominant_nearend_detector_->IsNearendState()) { // Bound the upper gain during significant echo activity. const auto& cfg = config_.suppressor.high_bands_suppression; - auto low_frequency_energy = [](rtc::ArrayView spectrum) { + auto low_frequency_energy = [](ArrayView spectrum) { RTC_DCHECK_LE(16, spectrum.size()); return std::accumulate(spectrum.begin() + 1, spectrum.begin() + 16, 0.f); }; @@ -212,13 +210,12 @@ void SuppressionGain::GainToNoAudibleEcho( // Compute the minimum gain as the attenuating gain to put the signal just // above the zero sample values. -void SuppressionGain::GetMinGain( - rtc::ArrayView weighted_residual_echo, - rtc::ArrayView last_nearend, - rtc::ArrayView last_echo, - bool low_noise_render, - bool saturated_echo, - rtc::ArrayView min_gain) const { +void SuppressionGain::GetMinGain(ArrayView weighted_residual_echo, + ArrayView last_nearend, + ArrayView last_echo, + bool low_noise_render, + bool saturated_echo, + ArrayView min_gain) const { if (!saturated_echo) { const float min_echo_power = low_noise_render ? config_.echo_audibility.low_render_limit @@ -254,7 +251,7 @@ void SuppressionGain::GetMinGain( // Compute the maximum gain by limiting the gain increase from the previous // gain. -void SuppressionGain::GetMaxGain(rtc::ArrayView max_gain) const { +void SuppressionGain::GetMaxGain(ArrayView max_gain) const { const auto& inc = dominant_nearend_detector_->IsNearendState() ? nearend_params_.max_inc_factor : normal_params_.max_inc_factor; @@ -267,10 +264,9 @@ void SuppressionGain::GetMaxGain(rtc::ArrayView max_gain) const { void SuppressionGain::LowerBandGain( bool low_noise_render, const AecState& aec_state, - rtc::ArrayView> - suppressor_input, - rtc::ArrayView> residual_echo, - rtc::ArrayView> comfort_noise, + ArrayView> suppressor_input, + ArrayView> residual_echo, + ArrayView> comfort_noise, bool clock_drift, std::array* gain) { gain->fill(1.f); @@ -323,7 +319,7 @@ void SuppressionGain::LowerBandGain( SuppressionGain::SuppressionGain(const EchoCanceller3Config& config, Aec3Optimization optimization, - int sample_rate_hz, + int /* sample_rate_hz */, size_t num_capture_channels) : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), optimization_(optimization), @@ -360,14 +356,13 @@ SuppressionGain::SuppressionGain(const EchoCanceller3Config& config, SuppressionGain::~SuppressionGain() = default; void SuppressionGain::GetGain( - rtc::ArrayView> - nearend_spectrum, - rtc::ArrayView> echo_spectrum, - rtc::ArrayView> + ArrayView> nearend_spectrum, + ArrayView> echo_spectrum, + ArrayView> residual_echo_spectrum, - rtc::ArrayView> + ArrayView> residual_echo_spectrum_unbounded, - rtc::ArrayView> + ArrayView> comfort_noise_spectrum, const RenderSignalAnalyzer& render_signal_analyzer, const AecState& aec_state, @@ -394,7 +389,7 @@ void SuppressionGain::GetGain( low_band_gain); // Compute the gain for the upper bands. - const absl::optional narrow_peak_band = + const std::optional narrow_peak_band = render_signal_analyzer.NarrowPeakBand(); *high_bands_gain = diff --git a/modules/audio_processing/aec3/suppression_gain.h b/modules/audio_processing/aec3/suppression_gain.h index c19ddd7e30..65d7985fb5 100644 --- a/modules/audio_processing/aec3/suppression_gain.h +++ b/modules/audio_processing/aec3/suppression_gain.h @@ -14,9 +14,9 @@ #include #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" #include "modules/audio_processing/aec3/aec3_common.h" @@ -41,14 +41,13 @@ class SuppressionGain { SuppressionGain& operator=(const SuppressionGain&) = delete; void GetGain( - rtc::ArrayView> - nearend_spectrum, - rtc::ArrayView> echo_spectrum, - rtc::ArrayView> + ArrayView> nearend_spectrum, + ArrayView> echo_spectrum, + ArrayView> residual_echo_spectrum, - rtc::ArrayView> + ArrayView> residual_echo_spectrum_unbounded, - rtc::ArrayView> + ArrayView> comfort_noise_spectrum, const RenderSignalAnalyzer& render_signal_analyzer, const AecState& aec_state, @@ -67,10 +66,10 @@ class SuppressionGain { private: // Computes the gain to apply for the bands beyond the first band. float UpperBandsGain( - rtc::ArrayView> echo_spectrum, - rtc::ArrayView> + ArrayView> echo_spectrum, + ArrayView> comfort_noise_spectrum, - const absl::optional& narrow_peak_band, + const std::optional& narrow_peak_band, bool saturated_echo, const Block& render, const std::array& low_band_gain) const; @@ -83,21 +82,20 @@ class SuppressionGain { void LowerBandGain( bool stationary_with_low_power, const AecState& aec_state, - rtc::ArrayView> - suppressor_input, - rtc::ArrayView> residual_echo, - rtc::ArrayView> comfort_noise, + ArrayView> suppressor_input, + ArrayView> residual_echo, + ArrayView> comfort_noise, bool clock_drift, std::array* gain); - void GetMinGain(rtc::ArrayView weighted_residual_echo, - rtc::ArrayView last_nearend, - rtc::ArrayView last_echo, + void GetMinGain(ArrayView weighted_residual_echo, + ArrayView last_nearend, + ArrayView last_echo, bool low_noise_render, bool saturated_echo, - rtc::ArrayView min_gain) const; + ArrayView min_gain) const; - void GetMaxGain(rtc::ArrayView max_gain) const; + void GetMaxGain(ArrayView max_gain) const; class LowNoiseRenderDetector { public: diff --git a/modules/audio_processing/aec3/suppression_gain_unittest.cc b/modules/audio_processing/aec3/suppression_gain_unittest.cc index 02de706c77..b079186e92 100644 --- a/modules/audio_processing/aec3/suppression_gain_unittest.cc +++ b/modules/audio_processing/aec3/suppression_gain_unittest.cc @@ -10,6 +10,8 @@ #include "modules/audio_processing/aec3/suppression_gain.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/aec3/aec_state.h" #include "modules/audio_processing/aec3/render_delay_buffer.h" #include "modules/audio_processing/aec3/subtractor.h" @@ -42,7 +44,7 @@ TEST(SuppressionGainDeathTest, NullOutputGains) { Y.im.fill(0.0f); float high_bands_gain; - AecState aec_state(EchoCanceller3Config{}, 1); + AecState aec_state(CreateEnvironment(), EchoCanceller3Config{}, 1); EXPECT_DEATH( SuppressionGain(EchoCanceller3Config{}, DetectOptimization(), 16000, 1) .GetGain(E2, S2, R2, R2_unbounded, N2, @@ -74,14 +76,15 @@ TEST(SuppressionGain, BasicGainComputation) { std::array g; std::vector output(kNumCaptureChannels); Block x(kNumBands, kNumRenderChannels); + const Environment env = CreateEnvironment(); EchoCanceller3Config config; - AecState aec_state(config, kNumCaptureChannels); + AecState aec_state(env, config, kNumCaptureChannels); ApmDataDumper data_dumper(42); - Subtractor subtractor(config, kNumRenderChannels, kNumCaptureChannels, + Subtractor subtractor(env, config, kNumRenderChannels, kNumCaptureChannels, &data_dumper, DetectOptimization()); std::unique_ptr render_delay_buffer( RenderDelayBuffer::Create(config, kSampleRateHz, kNumRenderChannels)); - absl::optional delay_estimate; + std::optional delay_estimate; // Ensure that a strong noise is detected to mask any echoes. for (size_t ch = 0; ch < kNumCaptureChannels; ++ch) { diff --git a/modules/audio_processing/aec3/transparent_mode.cc b/modules/audio_processing/aec3/transparent_mode.cc index 489f53f4f1..1349d0d342 100644 --- a/modules/audio_processing/aec3/transparent_mode.cc +++ b/modules/audio_processing/aec3/transparent_mode.cc @@ -10,22 +10,24 @@ #include "modules/audio_processing/aec3/transparent_mode.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { constexpr size_t kBlocksSinceConvergencedFilterInit = 10000; constexpr size_t kBlocksSinceConsistentEstimateInit = 10000; +constexpr float kInitialTransparentStateProbability = 0.2f; -bool DeactivateTransparentMode() { - return field_trial::IsEnabled("WebRTC-Aec3TransparentModeKillSwitch"); +bool DeactivateTransparentMode(const FieldTrialsView& field_trials) { + return field_trials.IsEnabled("WebRTC-Aec3TransparentModeKillSwitch"); } -bool ActivateTransparentModeHmm() { - return field_trial::IsEnabled("WebRTC-Aec3TransparentModeHmm"); +bool ActivateTransparentModeHmm(const FieldTrialsView& field_trials) { + return field_trials.IsEnabled("WebRTC-Aec3TransparentModeHmm"); } } // namespace @@ -41,16 +43,16 @@ class TransparentModeImpl : public TransparentMode { transparency_activated_ = false; // The estimated probability of being transparent mode. - prob_transparent_state_ = 0.f; + prob_transparent_state_ = kInitialTransparentStateProbability; } - void Update(int filter_delay_blocks, - bool any_filter_consistent, - bool any_filter_converged, + void Update(int /* filter_delay_blocks */, + bool /* any_filter_consistent */, + bool /* any_filter_converged */, bool any_coarse_filter_converged, - bool all_filters_diverged, + bool /* all_filters_diverged */, bool active_render, - bool saturated_capture) override { + bool /* saturated_capture */) override { // The classifier is implemented as a Hidden Markov Model (HMM) with two // hidden states: "normal" and "transparent". The estimated probabilities of // the two states are updated by observing filter convergence during active @@ -118,7 +120,7 @@ class TransparentModeImpl : public TransparentMode { private: bool transparency_activated_ = false; - float prob_transparent_state_ = 0.f; + float prob_transparent_state_ = kInitialTransparentStateProbability; }; // Legacy classifier for toggling transparent mode. @@ -144,7 +146,7 @@ class LegacyTransparentModeImpl : public TransparentMode { void Update(int filter_delay_blocks, bool any_filter_consistent, bool any_filter_converged, - bool any_coarse_filter_converged, + bool /* any_coarse_filter_converged */, bool all_filters_diverged, bool active_render, bool saturated_capture) override { @@ -227,12 +229,14 @@ class LegacyTransparentModeImpl : public TransparentMode { }; std::unique_ptr TransparentMode::Create( + const Environment& env, const EchoCanceller3Config& config) { - if (config.ep_strength.bounded_erl || DeactivateTransparentMode()) { + if (config.ep_strength.bounded_erl || + DeactivateTransparentMode(env.field_trials())) { RTC_LOG(LS_INFO) << "AEC3 Transparent Mode: Disabled"; return nullptr; } - if (ActivateTransparentModeHmm()) { + if (ActivateTransparentModeHmm(env.field_trials())) { RTC_LOG(LS_INFO) << "AEC3 Transparent Mode: HMM"; return std::make_unique(); } diff --git a/modules/audio_processing/aec3/transparent_mode.h b/modules/audio_processing/aec3/transparent_mode.h index bc5dd0391b..7b6148f998 100644 --- a/modules/audio_processing/aec3/transparent_mode.h +++ b/modules/audio_processing/aec3/transparent_mode.h @@ -14,6 +14,7 @@ #include #include "api/audio/echo_canceller3_config.h" +#include "api/environment/environment.h" #include "modules/audio_processing/aec3/aec3_common.h" namespace webrtc { @@ -23,6 +24,7 @@ namespace webrtc { class TransparentMode { public: static std::unique_ptr Create( + const Environment& env, const EchoCanceller3Config& config); virtual ~TransparentMode() {} diff --git a/modules/audio_processing/aec3/vector_math.h b/modules/audio_processing/aec3/vector_math.h index e4d1381ae1..e6ccf6e803 100644 --- a/modules/audio_processing/aec3/vector_math.h +++ b/modules/audio_processing/aec3/vector_math.h @@ -40,8 +40,8 @@ class VectorMath { : optimization_(optimization) {} // Elementwise square root. - void SqrtAVX2(rtc::ArrayView x); - void Sqrt(rtc::ArrayView x) { + void SqrtAVX2(ArrayView x); + void Sqrt(ArrayView x) { switch (optimization_) { #if defined(WEBRTC_ARCH_X86_FAMILY) case Aec3Optimization::kSse2: { @@ -114,12 +114,12 @@ class VectorMath { } // Elementwise vector multiplication z = x * y. - void MultiplyAVX2(rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView z); - void Multiply(rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView z) { + void MultiplyAVX2(ArrayView x, + ArrayView y, + ArrayView z); + void Multiply(ArrayView x, + ArrayView y, + ArrayView z) { RTC_DCHECK_EQ(z.size(), x.size()); RTC_DCHECK_EQ(z.size(), y.size()); switch (optimization_) { @@ -169,8 +169,8 @@ class VectorMath { } // Elementwise vector accumulation z += x. - void AccumulateAVX2(rtc::ArrayView x, rtc::ArrayView z); - void Accumulate(rtc::ArrayView x, rtc::ArrayView z) { + void AccumulateAVX2(ArrayView x, ArrayView z); + void Accumulate(ArrayView x, ArrayView z) { RTC_DCHECK_EQ(z.size(), x.size()); switch (optimization_) { #if defined(WEBRTC_ARCH_X86_FAMILY) diff --git a/modules/audio_processing/aec3/vector_math_avx2.cc b/modules/audio_processing/aec3/vector_math_avx2.cc index a9805daf88..63b9c1e455 100644 --- a/modules/audio_processing/aec3/vector_math_avx2.cc +++ b/modules/audio_processing/aec3/vector_math_avx2.cc @@ -19,7 +19,7 @@ namespace webrtc { namespace aec3 { // Elementwise square root. -void VectorMath::SqrtAVX2(rtc::ArrayView x) { +void VectorMath::SqrtAVX2(ArrayView x) { const int x_size = static_cast(x.size()); const int vector_limit = x_size >> 3; @@ -36,9 +36,9 @@ void VectorMath::SqrtAVX2(rtc::ArrayView x) { } // Elementwise vector multiplication z = x * y. -void VectorMath::MultiplyAVX2(rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView z) { +void VectorMath::MultiplyAVX2(ArrayView x, + ArrayView y, + ArrayView z) { RTC_DCHECK_EQ(z.size(), x.size()); RTC_DCHECK_EQ(z.size(), y.size()); const int x_size = static_cast(x.size()); @@ -58,8 +58,7 @@ void VectorMath::MultiplyAVX2(rtc::ArrayView x, } // Elementwise vector accumulation z += x. -void VectorMath::AccumulateAVX2(rtc::ArrayView x, - rtc::ArrayView z) { +void VectorMath::AccumulateAVX2(ArrayView x, ArrayView z) { RTC_DCHECK_EQ(z.size(), x.size()); const int x_size = static_cast(x.size()); const int vector_limit = x_size >> 3; diff --git a/modules/audio_processing/aec_dump/BUILD.gn b/modules/audio_processing/aec_dump/BUILD.gn index 38d8776258..5fb29422d8 100644 --- a/modules/audio_processing/aec_dump/BUILD.gn +++ b/modules/audio_processing/aec_dump/BUILD.gn @@ -14,10 +14,12 @@ rtc_source_set("aec_dump") { deps = [ "..:aec_dump_interface", + "../../../api/task_queue", "../../../rtc_base/system:file_wrapper", "../../../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } if (rtc_include_tests) { @@ -43,9 +45,11 @@ if (rtc_include_tests) { deps = [ ":mock_aec_dump", - "..:api", "..:audioproc_test_utils", "../", + "../../../api/audio:audio_processing", + "../../../api/audio:builtin_audio_processing_builder", + "../../../api/environment:environment_factory", "//testing/gtest", ] } @@ -66,17 +70,16 @@ if (rtc_enable_protobuf) { "../../../api/audio:audio_frame_api", "../../../api/task_queue", "../../../rtc_base:checks", - "../../../rtc_base:ignore_wundef", "../../../rtc_base:logging", "../../../rtc_base:macromagic", "../../../rtc_base:protobuf_utils", "../../../rtc_base:race_checker", "../../../rtc_base:rtc_event", - "../../../rtc_base:rtc_task_queue", "../../../rtc_base/system:file_wrapper", "../../../system_wrappers", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] deps += [ "../:audioproc_debug_proto" ] } @@ -107,6 +110,8 @@ rtc_library("null_aec_dump_factory") { deps = [ ":aec_dump", "..:aec_dump_interface", + "../../../api/task_queue", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } diff --git a/modules/audio_processing/aec_dump/aec_dump_factory.h b/modules/audio_processing/aec_dump/aec_dump_factory.h index 20718c3d7f..9a68691f56 100644 --- a/modules/audio_processing/aec_dump/aec_dump_factory.h +++ b/modules/audio_processing/aec_dump/aec_dump_factory.h @@ -13,34 +13,34 @@ #include +#include "absl/base/nullability.h" #include "absl/strings/string_view.h" +#include "api/task_queue/task_queue_base.h" #include "modules/audio_processing/include/aec_dump.h" #include "rtc_base/system/file_wrapper.h" #include "rtc_base/system/rtc_export.h" -namespace rtc { -class TaskQueue; -} // namespace rtc - namespace webrtc { class RTC_EXPORT AecDumpFactory { public: - // The `worker_queue` may not be null and must outlive the created - // AecDump instance. `max_log_size_bytes == -1` means the log size - // will be unlimited. `handle` may not be null. The AecDump takes - // responsibility for `handle` and closes it in the destructor. A - // non-null return value indicates that the file has been + // The `worker_queue` must outlive the created AecDump instance. + // `max_log_size_bytes == -1` means the log size will be unlimited. + // The AecDump takes responsibility for `handle` and closes it in the + // destructor. A non-null return value indicates that the file has been // sucessfully opened. - static std::unique_ptr Create(webrtc::FileWrapper file, - int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue); - static std::unique_ptr Create(absl::string_view file_name, - int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue); - static std::unique_ptr Create(FILE* handle, - int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue); + static absl_nullable std::unique_ptr Create( + FileWrapper file, + int64_t max_log_size_bytes, + TaskQueueBase* absl_nonnull worker_queue); + static absl_nullable std::unique_ptr Create( + absl::string_view file_name, + int64_t max_log_size_bytes, + TaskQueueBase* absl_nonnull worker_queue); + static absl_nullable std::unique_ptr Create( + FILE* absl_nonnull handle, + int64_t max_log_size_bytes, + TaskQueueBase* absl_nonnull worker_queue); }; } // namespace webrtc diff --git a/modules/audio_processing/aec_dump/aec_dump_impl.cc b/modules/audio_processing/aec_dump/aec_dump_impl.cc index 94c24048e0..0c196506fd 100644 --- a/modules/audio_processing/aec_dump/aec_dump_impl.cc +++ b/modules/audio_processing/aec_dump/aec_dump_impl.cc @@ -13,11 +13,12 @@ #include #include +#include "absl/base/nullability.h" #include "absl/strings/string_view.h" +#include "api/task_queue/task_queue_base.h" #include "modules/audio_processing/aec_dump/aec_dump_factory.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" -#include "rtc_base/task_queue.h" namespace webrtc { @@ -59,18 +60,18 @@ void CopyFromConfigToEvent(const webrtc::InternalAPMConfig& config, AecDumpImpl::AecDumpImpl(FileWrapper debug_file, int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue) + TaskQueueBase* absl_nonnull worker_queue) : debug_file_(std::move(debug_file)), num_bytes_left_for_log_(max_log_size_bytes), worker_queue_(worker_queue) {} AecDumpImpl::~AecDumpImpl() { // Block until all tasks have finished running. - rtc::Event thread_sync_event; + Event thread_sync_event; worker_queue_->PostTask([&thread_sync_event] { thread_sync_event.Set(); }); // Wait until the event has been signaled with .Set(). By then all // pending tasks will have finished. - thread_sync_event.Wait(rtc::Event::kForever); + thread_sync_event.Wait(Event::kForever); } void AecDumpImpl::WriteInitMessage(const ProcessingConfig& api_format, @@ -254,9 +255,10 @@ void AecDumpImpl::PostWriteToFileTask(std::unique_ptr event) { }); } -std::unique_ptr AecDumpFactory::Create(webrtc::FileWrapper file, - int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue) { +absl_nullable std::unique_ptr AecDumpFactory::Create( + FileWrapper file, + int64_t max_log_size_bytes, + TaskQueueBase* absl_nonnull worker_queue) { RTC_DCHECK(worker_queue); if (!file.is_open()) return nullptr; @@ -265,16 +267,18 @@ std::unique_ptr AecDumpFactory::Create(webrtc::FileWrapper file, worker_queue); } -std::unique_ptr AecDumpFactory::Create(absl::string_view file_name, - int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue) { +absl_nullable std::unique_ptr AecDumpFactory::Create( + absl::string_view file_name, + int64_t max_log_size_bytes, + TaskQueueBase* absl_nonnull worker_queue) { return Create(FileWrapper::OpenWriteOnly(file_name), max_log_size_bytes, worker_queue); } -std::unique_ptr AecDumpFactory::Create(FILE* handle, - int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue) { +absl_nullable std::unique_ptr AecDumpFactory::Create( + FILE* absl_nonnull handle, + int64_t max_log_size_bytes, + TaskQueueBase* absl_nonnull worker_queue) { return Create(FileWrapper(handle), max_log_size_bytes, worker_queue); } diff --git a/modules/audio_processing/aec_dump/aec_dump_impl.h b/modules/audio_processing/aec_dump/aec_dump_impl.h index fac3712b7a..7638758dee 100644 --- a/modules/audio_processing/aec_dump/aec_dump_impl.h +++ b/modules/audio_processing/aec_dump/aec_dump_impl.h @@ -15,22 +15,19 @@ #include #include +#include "api/task_queue/task_queue_base.h" #include "modules/audio_processing/aec_dump/capture_stream_info.h" #include "modules/audio_processing/include/aec_dump.h" -#include "rtc_base/ignore_wundef.h" #include "rtc_base/race_checker.h" #include "rtc_base/system/file_wrapper.h" -#include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" // Files generated at build-time by the protobuf compiler. -RTC_PUSH_IGNORING_WUNDEF() #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h" #else #include "modules/audio_processing/debug.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() namespace webrtc { @@ -42,7 +39,7 @@ class AecDumpImpl : public AecDump { // `max_log_size_bytes == -1` means the log size will be unlimited. AecDumpImpl(FileWrapper debug_file, int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue); + TaskQueueBase* absl_nonnull worker_queue); AecDumpImpl(const AecDumpImpl&) = delete; AecDumpImpl& operator=(const AecDumpImpl&) = delete; ~AecDumpImpl() override; @@ -76,8 +73,8 @@ class AecDumpImpl : public AecDump { FileWrapper debug_file_; int64_t num_bytes_left_for_log_ = 0; - rtc::RaceChecker race_checker_; - rtc::TaskQueue* worker_queue_; + RaceChecker race_checker_; + TaskQueueBase* absl_nonnull worker_queue_; CaptureStreamInfo capture_stream_info_; }; } // namespace webrtc diff --git a/modules/audio_processing/aec_dump/aec_dump_integration_test.cc b/modules/audio_processing/aec_dump/aec_dump_integration_test.cc index 503135d87f..5c23c9678f 100644 --- a/modules/audio_processing/aec_dump/aec_dump_integration_test.cc +++ b/modules/audio_processing/aec_dump/aec_dump_integration_test.cc @@ -12,10 +12,11 @@ #include #include +#include "api/audio/audio_processing.h" +#include "api/audio/builtin_audio_processing_builder.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/aec_dump/mock_aec_dump.h" #include "modules/audio_processing/audio_processing_impl.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "modules/audio_processing/test/audio_processing_builder_for_testing.h" using ::testing::_; using ::testing::AtLeast; @@ -23,9 +24,10 @@ using ::testing::Exactly; using ::testing::StrictMock; namespace { -rtc::scoped_refptr CreateAudioProcessing() { - rtc::scoped_refptr apm( - webrtc::AudioProcessingBuilderForTesting().Create()); +webrtc::scoped_refptr CreateAudioProcessing() { + webrtc::scoped_refptr apm = + webrtc::BuiltinAudioProcessingBuilder().Build( + webrtc::CreateEnvironment()); RTC_DCHECK(apm); return apm; } diff --git a/modules/audio_processing/aec_dump/aec_dump_unittest.cc b/modules/audio_processing/aec_dump/aec_dump_unittest.cc index 62f896fe14..2a8110c4fc 100644 --- a/modules/audio_processing/aec_dump/aec_dump_unittest.cc +++ b/modules/audio_processing/aec_dump/aec_dump_unittest.cc @@ -28,7 +28,7 @@ TEST(AecDumper, APICallsDoNotCrash) { { std::unique_ptr aec_dump = - webrtc::AecDumpFactory::Create(filename, -1, &file_writer_queue); + webrtc::AecDumpFactory::Create(filename, -1, file_writer_queue.Get()); constexpr int kNumChannels = 1; constexpr int kNumSamplesPerChannel = 160; @@ -63,7 +63,7 @@ TEST(AecDumper, WriteToFile) { { std::unique_ptr aec_dump = - webrtc::AecDumpFactory::Create(filename, -1, &file_writer_queue); + webrtc::AecDumpFactory::Create(filename, -1, file_writer_queue.Get()); constexpr int kNumChannels = 1; constexpr int kNumSamplesPerChannel = 160; diff --git a/modules/audio_processing/aec_dump/capture_stream_info.h b/modules/audio_processing/aec_dump/capture_stream_info.h index 0819bbcb23..572990c150 100644 --- a/modules/audio_processing/aec_dump/capture_stream_info.h +++ b/modules/audio_processing/aec_dump/capture_stream_info.h @@ -15,16 +15,13 @@ #include #include "modules/audio_processing/include/aec_dump.h" -#include "rtc_base/ignore_wundef.h" // Files generated at build-time by the protobuf compiler. -RTC_PUSH_IGNORING_WUNDEF() #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h" #else #include "modules/audio_processing/debug.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() namespace webrtc { diff --git a/modules/audio_processing/aec_dump/null_aec_dump_factory.cc b/modules/audio_processing/aec_dump/null_aec_dump_factory.cc index 9bd9745069..9ab5d140e2 100644 --- a/modules/audio_processing/aec_dump/null_aec_dump_factory.cc +++ b/modules/audio_processing/aec_dump/null_aec_dump_factory.cc @@ -8,27 +8,32 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "absl/base/nullability.h" #include "absl/strings/string_view.h" +#include "api/task_queue/task_queue_base.h" #include "modules/audio_processing/aec_dump/aec_dump_factory.h" #include "modules/audio_processing/include/aec_dump.h" namespace webrtc { -std::unique_ptr AecDumpFactory::Create(webrtc::FileWrapper file, - int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue) { +absl_nullable std::unique_ptr AecDumpFactory::Create( + FileWrapper file, + int64_t max_log_size_bytes, + TaskQueueBase* absl_nonnull worker_queue) { return nullptr; } -std::unique_ptr AecDumpFactory::Create(absl::string_view file_name, - int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue) { +absl_nullable std::unique_ptr AecDumpFactory::Create( + absl::string_view file_name, + int64_t max_log_size_bytes, + TaskQueueBase* absl_nonnull worker_queue) { return nullptr; } -std::unique_ptr AecDumpFactory::Create(FILE* handle, - int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue) { +absl_nullable std::unique_ptr AecDumpFactory::Create( + FILE* absl_nonnull handle, + int64_t max_log_size_bytes, + TaskQueueBase* absl_nonnull worker_queue) { return nullptr; } } // namespace webrtc diff --git a/modules/audio_processing/aecm/aecm_core.cc b/modules/audio_processing/aecm/aecm_core.cc index b4631460ca..7d3cd9df55 100644 --- a/modules/audio_processing/aecm/aecm_core.cc +++ b/modules/audio_processing/aecm/aecm_core.cc @@ -854,7 +854,7 @@ void WebRtcAecm_UpdateChannel(AecmCore* aecm, // right shift of 32 is undefined. To avoid that, we // do this check. tmpU32no1 = - rtc::dchecked_cast( + dchecked_cast( shiftChFar >= 32 ? 0 : aecm->channelAdapt32[i] >> shiftChFar) * far_spectrum[i]; } diff --git a/modules/audio_processing/aecm/aecm_core_c.cc b/modules/audio_processing/aecm/aecm_core_c.cc index 59e0296bbf..1d750da3fe 100644 --- a/modules/audio_processing/aecm/aecm_core_c.cc +++ b/modules/audio_processing/aecm/aecm_core_c.cc @@ -513,8 +513,7 @@ int RTC_NO_SANITIZE("signed-integer-overflow") // bugs.webrtc.org/8200 // Far end signal through channel estimate in Q8 // How much can we shift right to preserve resolution tmp32no1 = echoEst32[i] - aecm->echoFilt[i]; - aecm->echoFilt[i] += - rtc::dchecked_cast((int64_t{tmp32no1} * 50) >> 8); + aecm->echoFilt[i] += dchecked_cast((int64_t{tmp32no1} * 50) >> 8); zeros32 = WebRtcSpl_NormW32(aecm->echoFilt[i]) + 1; zeros16 = WebRtcSpl_NormW16(supGain) + 1; diff --git a/modules/audio_processing/aecm/aecm_core_mips.cc b/modules/audio_processing/aecm/aecm_core_mips.cc index 16b03cfe51..819737d861 100644 --- a/modules/audio_processing/aecm/aecm_core_mips.cc +++ b/modules/audio_processing/aecm/aecm_core_mips.cc @@ -1079,7 +1079,7 @@ int WebRtcAecm_ProcessBlock(AecmCore* aecm, // How much can we shift right to preserve resolution tmp32no1 = echoEst32[i] - aecm->echoFilt[i]; aecm->echoFilt[i] += - rtc::dchecked_cast((int64_t{tmp32no1} * 50) >> 8); + webrtc::dchecked_cast((int64_t{tmp32no1} * 50) >> 8); zeros32 = WebRtcSpl_NormW32(aecm->echoFilt[i]) + 1; zeros16 = WebRtcSpl_NormW16(supGain) + 1; diff --git a/modules/audio_processing/agc/BUILD.gn b/modules/audio_processing/agc/BUILD.gn index 508f901b08..754b807879 100644 --- a/modules/audio_processing/agc/BUILD.gn +++ b/modules/audio_processing/agc/BUILD.gn @@ -21,11 +21,13 @@ rtc_library("agc") { deps = [ ":gain_control_interface", ":level_estimation", - "..:api", "..:apm_logging", "..:audio_buffer", "..:audio_frame_view", "../../../api:array_view", + "../../../api:field_trials_view", + "../../../api/audio:audio_processing", + "../../../api/environment", "../../../common_audio", "../../../common_audio:common_audio_c", "../../../rtc_base:checks", @@ -39,7 +41,6 @@ rtc_library("agc") { "../agc2:input_volume_stats_reporter", "../vad", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("level_estimation") { @@ -107,20 +108,19 @@ if (rtc_include_tests) { ":level_estimation", "..:mocks", "../../../api:array_view", + "../../../api:field_trials", + "../../../api/environment", + "../../../api/environment:environment_factory", "../../../rtc_base:checks", "../../../rtc_base:random", "../../../rtc_base:safe_conversions", "../../../rtc_base:safe_minmax", "../../../rtc_base:stringutils", "../../../system_wrappers:metrics", - "../../../test:field_trial", "../../../test:fileutils", "../../../test:test_support", "//testing/gtest", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } } diff --git a/modules/audio_processing/agc/agc.cc b/modules/audio_processing/agc/agc.cc index a018ff9f93..f172b03a5a 100644 --- a/modules/audio_processing/agc/agc.cc +++ b/modules/audio_processing/agc/agc.cc @@ -37,7 +37,7 @@ Agc::Agc() Agc::~Agc() = default; -void Agc::Process(rtc::ArrayView audio) { +void Agc::Process(ArrayView audio) { const int sample_rate_hz = audio.size() * kNum10msFramesInOneSecond; RTC_DCHECK_LE(sample_rate_hz, kMaxSampleRateHz); vad_.ProcessChunk(audio.data(), audio.size(), sample_rate_hz); diff --git a/modules/audio_processing/agc/agc.h b/modules/audio_processing/agc/agc.h index da42808225..65afbf07bc 100644 --- a/modules/audio_processing/agc/agc.h +++ b/modules/audio_processing/agc/agc.h @@ -27,7 +27,7 @@ class Agc { // `audio` must be mono; in a multi-channel stream, provide the first (usually // left) channel. - virtual void Process(rtc::ArrayView audio); + virtual void Process(ArrayView audio); // Retrieves the difference between the target RMS level and the current // signal RMS level in dB. Returns true if an update is available and false diff --git a/modules/audio_processing/agc/agc_manager_direct.cc b/modules/audio_processing/agc/agc_manager_direct.cc index b8ad4a8bb9..5fc12c0439 100644 --- a/modules/audio_processing/agc/agc_manager_direct.cc +++ b/modules/audio_processing/agc/agc_manager_direct.cc @@ -14,6 +14,8 @@ #include #include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "common_audio/include/audio_util.h" #include "modules/audio_processing/agc/gain_control.h" #include "modules/audio_processing/agc2/gain_map_internal.h" @@ -22,7 +24,6 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" namespace webrtc { @@ -69,14 +70,13 @@ using AnalogAgcConfig = // string. Returns an unspecified value if the field trial is not specified, if // disabled or if it cannot be parsed. Example: // 'WebRTC-Audio-2ndAgcMinMicLevelExperiment/Enabled-80' => returns 80. -absl::optional GetMinMicLevelOverride() { +std::optional GetMinMicLevelOverride(const FieldTrialsView& field_trials) { constexpr char kMinMicLevelFieldTrial[] = "WebRTC-Audio-2ndAgcMinMicLevelExperiment"; - if (!webrtc::field_trial::IsEnabled(kMinMicLevelFieldTrial)) { - return absl::nullopt; + if (!field_trials.IsEnabled(kMinMicLevelFieldTrial)) { + return std::nullopt; } - const auto field_trial_string = - webrtc::field_trial::FindFullName(kMinMicLevelFieldTrial); + const auto field_trial_string = field_trials.Lookup(kMinMicLevelFieldTrial); int min_mic_level = -1; sscanf(field_trial_string.c_str(), "Enabled-%d", &min_mic_level); if (min_mic_level >= 0 && min_mic_level <= 255) { @@ -84,7 +84,7 @@ absl::optional GetMinMicLevelOverride() { } else { RTC_LOG(LS_WARNING) << "[agc] Invalid parameter for " << kMinMicLevelFieldTrial << ", ignored."; - return absl::nullopt; + return std::nullopt; } } @@ -152,7 +152,7 @@ int GetSpeechLevelErrorDb(float speech_level_dbfs, float speech_probability) { return 0; } - const float speech_level = rtc::SafeClamp( + const float speech_level = SafeClamp( speech_level_dbfs, kMinSpeechLevelDbfs, kMaxSpeechLevelDbfs); return std::round(kOverrideTargetSpeechLevelDbfs - speech_level); @@ -160,7 +160,7 @@ int GetSpeechLevelErrorDb(float speech_level_dbfs, float speech_probability) { } // namespace -MonoAgc::MonoAgc(ApmDataDumper* data_dumper, +MonoAgc::MonoAgc(ApmDataDumper* /* data_dumper */, int clipped_level_min, bool disable_digital_adaptive, int min_mic_level) @@ -188,9 +188,9 @@ void MonoAgc::Initialize() { is_first_frame_ = true; } -void MonoAgc::Process(rtc::ArrayView audio, - absl::optional rms_error_override) { - new_compression_to_set_ = absl::nullopt; +void MonoAgc::Process(ArrayView audio, + std::optional rms_error_override) { + new_compression_to_set_ = std::nullopt; if (check_volume_on_next_process_) { check_volume_on_next_process_ = false; @@ -376,7 +376,7 @@ void MonoAgc::UpdateGain(int rms_error_db) { // Handle as much error as possible with the compressor first. int raw_compression = - rtc::SafeClamp(rms_error, kMinCompressionGain, max_compression_gain_); + SafeClamp(rms_error, kMinCompressionGain, max_compression_gain_); // Deemphasize the compression gain error. Move halfway between the current // target and the newly received target. This serves to soften perceptible @@ -397,8 +397,8 @@ void MonoAgc::UpdateGain(int rms_error_db) { // raw rather than deemphasized compression here as we would otherwise // shrink the amount of slack the compressor provides. const int residual_gain = - rtc::SafeClamp(rms_error - raw_compression, -kMaxResidualGainChange, - kMaxResidualGainChange); + SafeClamp(rms_error - raw_compression, -kMaxResidualGainChange, + kMaxResidualGainChange); RTC_DLOG(LS_INFO) << "[agc] rms_error=" << rms_error << ", target_compression=" << target_compression_ << ", residual_gain=" << residual_gain; @@ -447,19 +447,21 @@ void MonoAgc::UpdateCompressor() { std::atomic AgcManagerDirect::instance_counter_(0); AgcManagerDirect::AgcManagerDirect( + const Environment& env, const AudioProcessing::Config::GainController1::AnalogGainController& analog_config, Agc* agc) - : AgcManagerDirect(/*num_capture_channels=*/1, analog_config) { + : AgcManagerDirect(env, /*num_capture_channels=*/1, analog_config) { RTC_DCHECK(channel_agcs_[0]); RTC_DCHECK(agc); channel_agcs_[0]->set_agc(agc); } -AgcManagerDirect::AgcManagerDirect(int num_capture_channels, +AgcManagerDirect::AgcManagerDirect(const Environment& env, + int num_capture_channels, const AnalogAgcConfig& analog_config) : analog_controller_enabled_(analog_config.enabled), - min_mic_level_override_(GetMinMicLevelOverride()), + min_mic_level_override_(GetMinMicLevelOverride(env.field_trials())), data_dumper_(new ApmDataDumper(instance_counter_.fetch_add(1) + 1)), num_capture_channels_(num_capture_channels), disable_digital_adaptive_(!analog_config.enable_digital_adaptive), @@ -617,13 +619,13 @@ void AgcManagerDirect::AnalyzePreProcess(const AudioBuffer& audio_buffer) { } void AgcManagerDirect::Process(const AudioBuffer& audio_buffer) { - Process(audio_buffer, /*speech_probability=*/absl::nullopt, - /*speech_level_dbfs=*/absl::nullopt); + Process(audio_buffer, /*speech_probability=*/std::nullopt, + /*speech_level_dbfs=*/std::nullopt); } void AgcManagerDirect::Process(const AudioBuffer& audio_buffer, - absl::optional speech_probability, - absl::optional speech_level_dbfs) { + std::optional speech_probability, + std::optional speech_level_dbfs) { AggregateChannelLevels(); const int volume_after_clipping_handling = recommended_input_volume_; @@ -632,7 +634,7 @@ void AgcManagerDirect::Process(const AudioBuffer& audio_buffer, } const size_t num_frames_per_band = audio_buffer.num_frames_per_band(); - absl::optional rms_error_override = absl::nullopt; + std::optional rms_error_override = std::nullopt; if (speech_probability.has_value() && speech_level_dbfs.has_value()) { rms_error_override = GetSpeechLevelErrorDb(*speech_level_dbfs, *speech_probability); @@ -656,7 +658,7 @@ void AgcManagerDirect::Process(const AudioBuffer& audio_buffer, } } -absl::optional AgcManagerDirect::GetDigitalComressionGain() { +std::optional AgcManagerDirect::GetDigitalComressionGain() { return new_compressions_to_set_[channel_controlling_gain_]; } diff --git a/modules/audio_processing/agc/agc_manager_direct.h b/modules/audio_processing/agc/agc_manager_direct.h index adb2f5a63f..c3100e5f15 100644 --- a/modules/audio_processing/agc/agc_manager_direct.h +++ b/modules/audio_processing/agc/agc_manager_direct.h @@ -13,13 +13,14 @@ #include #include +#include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/audio/audio_processing.h" +#include "api/environment/environment.h" #include "modules/audio_processing/agc/agc.h" #include "modules/audio_processing/agc2/clipping_predictor.h" #include "modules/audio_processing/audio_buffer.h" -#include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/gtest_prod_util.h" @@ -42,6 +43,7 @@ class AgcManagerDirect final { // passed to `AnalyzePreProcess()` and `Process()`. Clamps // `analog_config.startup_min_level` in the [12, 255] range. AgcManagerDirect( + const Environment& env, int num_capture_channels, const AudioProcessing::Config::GainController1::AnalogGainController& analog_config); @@ -75,8 +77,8 @@ class AgcManagerDirect final { // TODO(webrtc:7494): This signature is needed for testing purposes, unify // the signatures when the clean-up is done. void Process(const AudioBuffer& audio_buffer, - absl::optional speech_probability, - absl::optional speech_level_dbfs); + std::optional speech_probability, + std::optional speech_level_dbfs); // Processes `audio_buffer`. Chooses a digital compression gain and the new // input volume to recommend. Must be called after `AnalyzePreProcess()`. @@ -100,7 +102,7 @@ class AgcManagerDirect final { // If available, returns the latest digital compression gain that has been // chosen. - absl::optional GetDigitalComressionGain(); + std::optional GetDigitalComressionGain(); // Returns true if clipping prediction is enabled. bool clipping_predictor_enabled() const { return !!clipping_predictor_; } @@ -142,6 +144,7 @@ class AgcManagerDirect final { // Ctor that creates a single channel AGC and by injecting `agc`. // `agc` will be owned by this class; hence, do not delete it. AgcManagerDirect( + const Environment& env, const AudioProcessing::Config::GainController1::AnalogGainController& analog_config, Agc* agc); @@ -150,7 +153,7 @@ class AgcManagerDirect final { const bool analog_controller_enabled_; - const absl::optional min_mic_level_override_; + const std::optional min_mic_level_override_; std::unique_ptr data_dumper_; static std::atomic instance_counter_; const int num_capture_channels_; @@ -176,7 +179,7 @@ class AgcManagerDirect final { const int clipped_wait_frames_; std::vector> channel_agcs_; - std::vector> new_compressions_to_set_; + std::vector> new_compressions_to_set_; const std::unique_ptr clipping_predictor_; const bool use_clipping_predictor_step_; @@ -212,17 +215,15 @@ class MonoAgc { // the (digital) compression gain to be applied by `agc_`. Must be called // after `HandleClipping()`. If `rms_error_override` has a value, RMS error // from AGC is overridden by it. - void Process(rtc::ArrayView audio, - absl::optional rms_error_override); + void Process(ArrayView audio, + std::optional rms_error_override); // Returns the recommended input volume. Must be called after `Process()`. int recommended_analog_level() const { return recommended_input_volume_; } float voice_probability() const { return agc_->voice_probability(); } void ActivateLogging() { log_to_histograms_ = true; } - absl::optional new_compression() const { - return new_compression_to_set_; - } + std::optional new_compression() const { return new_compression_to_set_; } // Only used for testing. void set_agc(Agc* agc) { agc_.reset(agc); } @@ -263,7 +264,7 @@ class MonoAgc { // recommended input volume. int recommended_input_volume_ = 0; - absl::optional new_compression_to_set_; + std::optional new_compression_to_set_; bool log_to_histograms_ = false; const int clipped_level_min_; diff --git a/modules/audio_processing/agc/agc_manager_direct_unittest.cc b/modules/audio_processing/agc/agc_manager_direct_unittest.cc index 70ac0b5b34..51708df740 100644 --- a/modules/audio_processing/agc/agc_manager_direct_unittest.cc +++ b/modules/audio_processing/agc/agc_manager_direct_unittest.cc @@ -15,12 +15,14 @@ #include #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/field_trials.h" #include "modules/audio_processing/agc/gain_control.h" #include "modules/audio_processing/agc/mock_agc.h" #include "modules/audio_processing/include/mock_audio_processing.h" #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/strings/string_builder.h" -#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -78,24 +80,32 @@ class MockGainControl : public GainControl { MOCK_METHOD(bool, stream_is_saturated, (), (const, override)); }; -// TODO(bugs.webrtc.org/12874): Remove and use designated initializers once -// fixed. +// Construction parameters that tests may explicitely specify. +struct AgcManagerDirectTestParams { + std::string field_trials; + int clipped_level_min = kClippedMin; + bool enable_digital_adaptive = false; + int clipped_level_step = kClippedLevelStep; + float clipped_ratio_threshold = kClippedRatioThreshold; + int clipped_wait_frames = kClippedWaitFrames; + AnalogAgcConfig::ClippingPredictor clipping_predictor; +}; + std::unique_ptr CreateAgcManagerDirect( - int startup_min_volume, - int clipped_level_step, - float clipped_ratio_threshold, - int clipped_wait_frames, - const ClippingPredictorConfig& clipping_predictor_config = - kDefaultAnalogConfig.clipping_predictor) { - AnalogAgcConfig config; - config.startup_min_volume = startup_min_volume; - config.clipped_level_min = kClippedMin; - config.enable_digital_adaptive = false; - config.clipped_level_step = clipped_level_step; - config.clipped_ratio_threshold = clipped_ratio_threshold; - config.clipped_wait_frames = clipped_wait_frames; - config.clipping_predictor = clipping_predictor_config; - return std::make_unique(/*num_capture_channels=*/1, config); + AgcManagerDirectTestParams p = {}) { + auto manager = std::make_unique( + CreateEnvironment(FieldTrials::CreateNoGlobal(p.field_trials)), + kNumChannels, + AnalogAgcConfig{.startup_min_volume = kInitialInputVolume, + .clipped_level_min = p.clipped_level_min, + .enable_digital_adaptive = p.enable_digital_adaptive, + .clipped_level_step = p.clipped_level_step, + .clipped_ratio_threshold = p.clipped_ratio_threshold, + .clipped_wait_frames = p.clipped_wait_frames, + .clipping_predictor = p.clipping_predictor}); + manager->Initialize(); + manager->set_stream_analog_level(kInitialInputVolume); + return manager; } // Deprecated. @@ -142,7 +152,7 @@ constexpr char kMinMicLevelFieldTrial[] = std::string GetAgcMinMicLevelExperimentFieldTrial(const std::string& value) { char field_trial_buffer[64]; - rtc::SimpleStringBuilder builder(field_trial_buffer); + SimpleStringBuilder builder(field_trial_buffer); builder << kMinMicLevelFieldTrial << "/" << value << "/"; return builder.str(); } @@ -153,14 +163,14 @@ std::string GetAgcMinMicLevelExperimentFieldTrialEnabled( RTC_DCHECK_GE(enabled_value, 0); RTC_DCHECK_LE(enabled_value, 255); char field_trial_buffer[64]; - rtc::SimpleStringBuilder builder(field_trial_buffer); + SimpleStringBuilder builder(field_trial_buffer); builder << kMinMicLevelFieldTrial << "/Enabled-" << enabled_value << suffix << "/"; return builder.str(); } std::string GetAgcMinMicLevelExperimentFieldTrial( - absl::optional min_mic_level) { + std::optional min_mic_level) { if (min_mic_level.has_value()) { return GetAgcMinMicLevelExperimentFieldTrialEnabled(*min_mic_level); } @@ -196,8 +206,8 @@ void WriteAudioBufferSamples(float samples_value, // `AgcManagerDirectTestHelper::CallAgcSequence()` instead. void CallPreProcessAndProcess(int num_calls, const AudioBuffer& audio_buffer, - absl::optional speech_probability_override, - absl::optional speech_level_override, + std::optional speech_probability_override, + std::optional speech_level_override, AgcManagerDirect& manager) { for (int n = 0; n < num_calls; ++n) { manager.AnalyzePreProcess(audio_buffer); @@ -248,8 +258,8 @@ class SpeechSamplesReader { // Apply gain and copy samples into `audio_buffer_`. std::transform(buffer_.begin(), buffer_.end(), audio_buffer_.channels()[0], [gain](int16_t v) -> float { - return rtc::SafeClamp(static_cast(v) * gain, - kMinSample, kMaxSample); + return SafeClamp(static_cast(v) * gain, + kMinSample, kMaxSample); }); agc.AnalyzePreProcess(audio_buffer_); @@ -266,8 +276,8 @@ class SpeechSamplesReader { // have a value. void Feed(int num_frames, int gain_db, - absl::optional speech_probability_override, - absl::optional speech_level_override, + std::optional speech_probability_override, + std::optional speech_level_override, AgcManagerDirect& agc) { float gain = std::pow(10.0f, gain_db / 20.0f); // From dB to linear gain. is_.seekg(0, is_.beg); // Start from the beginning of the PCM file. @@ -282,8 +292,8 @@ class SpeechSamplesReader { // Apply gain and copy samples into `audio_buffer_`. std::transform(buffer_.begin(), buffer_.end(), audio_buffer_.channels()[0], [gain](int16_t v) -> float { - return rtc::SafeClamp(static_cast(v) * gain, - kMinSample, kMaxSample); + return SafeClamp(static_cast(v) * gain, + kMinSample, kMaxSample); }); agc.AnalyzePreProcess(audio_buffer_); @@ -329,7 +339,7 @@ constexpr AnalogAgcConfig GetDisabledAnalogAgcConfig() { class AgcManagerDirectTestHelper { public: // Ctor. Initializes `audio_buffer` with zeros. - AgcManagerDirectTestHelper() + explicit AgcManagerDirectTestHelper(const Environment& env) : audio_buffer(kSampleRateHz, kNumChannels, kSampleRateHz, @@ -337,7 +347,7 @@ class AgcManagerDirectTestHelper { kSampleRateHz, kNumChannels), mock_agc(new ::testing::NiceMock()), - manager(GetAnalogAgcTestConfig(), mock_agc) { + manager(env, GetAnalogAgcTestConfig(), mock_agc) { manager.Initialize(); manager.SetupDigitalGainControl(mock_gain_control); WriteAudioBufferSamples(/*samples_value=*/0.0f, /*clipped_ratio=*/0.0f, @@ -353,13 +363,13 @@ class AgcManagerDirectTestHelper { // AGC is replaced by an override value if `speech_probability_override` // and `speech_level_override` have a value. int CallAgcSequence(int applied_input_volume, - absl::optional speech_probability_override, - absl::optional speech_level_override) { + std::optional speech_probability_override, + std::optional speech_level_override) { manager.set_stream_analog_level(applied_input_volume); manager.AnalyzePreProcess(audio_buffer); manager.Process(audio_buffer, speech_probability_override, speech_level_override); - absl::optional digital_gain = manager.GetDigitalComressionGain(); + std::optional digital_gain = manager.GetDigitalComressionGain(); if (digital_gain) { mock_gain_control.set_compression_gain_db(*digital_gain); } @@ -372,13 +382,13 @@ class AgcManagerDirectTestHelper { // value if `speech_probability_override` and `speech_level_override` have // a value. void CallProcess(int num_calls, - absl::optional speech_probability_override, - absl::optional speech_level_override) { + std::optional speech_probability_override, + std::optional speech_level_override) { for (int i = 0; i < num_calls; ++i) { EXPECT_CALL(*mock_agc, Process(_)).WillOnce(Return()); manager.Process(audio_buffer, speech_probability_override, speech_level_override); - absl::optional new_digital_gain = manager.GetDigitalComressionGain(); + std::optional new_digital_gain = manager.GetDigitalComressionGain(); if (new_digital_gain) { mock_gain_control.set_compression_gain_db(*new_digital_gain); } @@ -435,11 +445,11 @@ class AgcManagerDirectTestHelper { }; class AgcManagerDirectParametrizedTest - : public ::testing::TestWithParam, bool>> { + : public ::testing::TestWithParam, bool>> { protected: AgcManagerDirectParametrizedTest() - : field_trials_( - GetAgcMinMicLevelExperimentFieldTrial(std::get<0>(GetParam()))) {} + : env_(CreateEnvironment(FieldTrials::CreateNoGlobal( + GetAgcMinMicLevelExperimentFieldTrial(std::get<0>(GetParam()))))) {} bool IsMinMicLevelOverridden() const { return std::get<0>(GetParam()).has_value(); @@ -449,20 +459,17 @@ class AgcManagerDirectParametrizedTest } bool IsRmsErrorOverridden() const { return std::get<1>(GetParam()); } - absl::optional GetOverrideOrEmpty(float value) const { - return IsRmsErrorOverridden() ? absl::optional(value) - : absl::nullopt; + std::optional GetOverrideOrEmpty(float value) const { + return IsRmsErrorOverridden() ? std::optional(value) : std::nullopt; } - private: - test::ScopedFieldTrials field_trials_; + const Environment env_; }; INSTANTIATE_TEST_SUITE_P( , AgcManagerDirectParametrizedTest, - ::testing::Combine(testing::Values(absl::nullopt, 12, 20), - testing::Bool())); + ::testing::Combine(testing::Values(std::nullopt, 12, 20), testing::Bool())); // Checks that when the analog controller is disabled, no downward adaptation // takes place. @@ -471,10 +478,10 @@ INSTANTIATE_TEST_SUITE_P( // differs. TEST_P(AgcManagerDirectParametrizedTest, DisabledAnalogAgcDoesNotAdaptDownwards) { - AgcManagerDirect manager_no_analog_agc(kNumChannels, + AgcManagerDirect manager_no_analog_agc(env_, kNumChannels, GetDisabledAnalogAgcConfig()); manager_no_analog_agc.Initialize(); - AgcManagerDirect manager_with_analog_agc(kNumChannels, + AgcManagerDirect manager_with_analog_agc(env_, kNumChannels, GetAnalogAgcTestConfig()); manager_with_analog_agc.Initialize(); @@ -525,10 +532,10 @@ TEST_P(AgcManagerDirectParametrizedTest, // frames to APM config. The test passes but internally the gain update timing // differs. TEST_P(AgcManagerDirectParametrizedTest, DisabledAnalogAgcDoesNotAdaptUpwards) { - AgcManagerDirect manager_no_analog_agc(kNumChannels, + AgcManagerDirect manager_no_analog_agc(env_, kNumChannels, GetDisabledAnalogAgcConfig()); manager_no_analog_agc.Initialize(); - AgcManagerDirect manager_with_analog_agc(kNumChannels, + AgcManagerDirect manager_with_analog_agc(env_, kNumChannels, GetAnalogAgcTestConfig()); manager_with_analog_agc.Initialize(); @@ -554,7 +561,7 @@ TEST_P(AgcManagerDirectParametrizedTest, DisabledAnalogAgcDoesNotAdaptUpwards) { TEST_P(AgcManagerDirectParametrizedTest, StartupMinVolumeConfigurationIsRespected) { - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, GetOverrideOrEmpty(kHighSpeechProbability), GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -565,7 +572,7 @@ TEST_P(AgcManagerDirectParametrizedTest, MicVolumeResponseToRmsError) { const auto speech_probability_override = GetOverrideOrEmpty(kHighSpeechProbability); - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, speech_probability_override, GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -628,7 +635,7 @@ TEST_P(AgcManagerDirectParametrizedTest, MicVolumeIsLimited) { const auto speech_probability_override = GetOverrideOrEmpty(kHighSpeechProbability); - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, speech_probability_override, GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -706,11 +713,11 @@ TEST_P(AgcManagerDirectParametrizedTest, MicVolumeIsLimited) { } TEST_P(AgcManagerDirectParametrizedTest, CompressorStepsTowardsTarget) { - constexpr absl::optional kNoOverride = absl::nullopt; + constexpr std::optional kNoOverride = std::nullopt; const auto speech_probability_override = GetOverrideOrEmpty(kHighSpeechProbability); - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, speech_probability_override, GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -723,7 +730,7 @@ TEST_P(AgcManagerDirectParametrizedTest, CompressorStepsTowardsTarget) { GetOverrideOrEmpty(-23.0f)); EXPECT_CALL(helper.mock_gain_control, set_compression_gain_db(_)).Times(0); // The mock `GetRmsErrorDb()` returns false; mimic this by passing - // absl::nullopt as an override. + // std::nullopt as an override. helper.CallProcess(/*num_calls=*/19, kNoOverride, kNoOverride); // Moves slowly upwards. @@ -778,11 +785,11 @@ TEST_P(AgcManagerDirectParametrizedTest, CompressorStepsTowardsTarget) { } TEST_P(AgcManagerDirectParametrizedTest, CompressorErrorIsDeemphasized) { - constexpr absl::optional kNoOverride = absl::nullopt; + constexpr std::optional kNoOverride = std::nullopt; const auto speech_probability_override = GetOverrideOrEmpty(kHighSpeechProbability); - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, speech_probability_override, GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -792,7 +799,7 @@ TEST_P(AgcManagerDirectParametrizedTest, CompressorErrorIsDeemphasized) { helper.CallProcess(/*num_calls=*/1, speech_probability_override, GetOverrideOrEmpty(-28.0f)); // The mock `GetRmsErrorDb()` returns false; mimic this by passing - // absl::nullopt as an override. + // std::nullopt as an override. helper.CallProcess(/*num_calls=*/18, kNoOverride, kNoOverride); EXPECT_CALL(helper.mock_gain_control, set_compression_gain_db(8)) .WillOnce(Return(0)); @@ -823,11 +830,11 @@ TEST_P(AgcManagerDirectParametrizedTest, CompressorErrorIsDeemphasized) { } TEST_P(AgcManagerDirectParametrizedTest, CompressorReachesMaximum) { - constexpr absl::optional kNoOverride = absl::nullopt; + constexpr std::optional kNoOverride = std::nullopt; const auto speech_probability_override = GetOverrideOrEmpty(kHighSpeechProbability); - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, speech_probability_override, GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -840,7 +847,7 @@ TEST_P(AgcManagerDirectParametrizedTest, CompressorReachesMaximum) { helper.CallProcess(/*num_calls=*/4, speech_probability_override, GetOverrideOrEmpty(-28.0f)); // The mock `GetRmsErrorDb()` returns false; mimic this by passing - // absl::nullopt as an override. + // std::nullopt as an override. helper.CallProcess(/*num_calls=*/15, kNoOverride, kNoOverride); EXPECT_CALL(helper.mock_gain_control, set_compression_gain_db(8)) .WillOnce(Return(0)); @@ -860,11 +867,11 @@ TEST_P(AgcManagerDirectParametrizedTest, CompressorReachesMaximum) { } TEST_P(AgcManagerDirectParametrizedTest, CompressorReachesMinimum) { - constexpr absl::optional kNoOverride = absl::nullopt; + constexpr std::optional kNoOverride = std::nullopt; const auto speech_probability_override = GetOverrideOrEmpty(kHighSpeechProbability); - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, speech_probability_override, GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -877,7 +884,7 @@ TEST_P(AgcManagerDirectParametrizedTest, CompressorReachesMinimum) { helper.CallProcess(/*num_calls=*/4, speech_probability_override, GetOverrideOrEmpty(-18.0f)); // The mock `GetRmsErrorDb()` returns false; mimic this by passing - // absl::nullopt as an override. + // std::nullopt as an override. helper.CallProcess(/*num_calls=*/15, kNoOverride, kNoOverride); EXPECT_CALL(helper.mock_gain_control, set_compression_gain_db(6)) .WillOnce(Return(0)); @@ -897,7 +904,7 @@ TEST_P(AgcManagerDirectParametrizedTest, CompressorReachesMinimum) { } TEST_P(AgcManagerDirectParametrizedTest, NoActionWhileMuted) { - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, GetOverrideOrEmpty(kHighSpeechProbability), GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -907,7 +914,7 @@ TEST_P(AgcManagerDirectParametrizedTest, NoActionWhileMuted) { GetOverrideOrEmpty(kHighSpeechProbability), GetOverrideOrEmpty(kSpeechLevelDbfs)); - absl::optional new_digital_gain = + std::optional new_digital_gain = helper.manager.GetDigitalComressionGain(); if (new_digital_gain) { helper.mock_gain_control.set_compression_gain_db(*new_digital_gain); @@ -915,7 +922,7 @@ TEST_P(AgcManagerDirectParametrizedTest, NoActionWhileMuted) { } TEST_P(AgcManagerDirectParametrizedTest, UnmutingChecksVolumeWithoutRaising) { - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, GetOverrideOrEmpty(kHighSpeechProbability), GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -936,7 +943,7 @@ TEST_P(AgcManagerDirectParametrizedTest, UnmutingChecksVolumeWithoutRaising) { } TEST_P(AgcManagerDirectParametrizedTest, UnmutingRaisesTooLowVolume) { - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, GetOverrideOrEmpty(kHighSpeechProbability), GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -960,7 +967,7 @@ TEST_P(AgcManagerDirectParametrizedTest, const auto speech_probability_override = GetOverrideOrEmpty(kHighSpeechProbability); - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, speech_probability_override, GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1002,7 +1009,7 @@ TEST_P(AgcManagerDirectParametrizedTest, const auto speech_probability_override = GetOverrideOrEmpty(kHighSpeechProbability); - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, speech_probability_override, GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1050,7 +1057,7 @@ TEST_P(AgcManagerDirectParametrizedTest, const auto speech_probability_override = GetOverrideOrEmpty(kHighSpeechProbability); - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, speech_probability_override, GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1096,7 +1103,7 @@ TEST_P(AgcManagerDirectParametrizedTest, const auto speech_probability_override = GetOverrideOrEmpty(kHighSpeechProbability); - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, speech_probability_override, GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1112,7 +1119,7 @@ TEST_P(AgcManagerDirectParametrizedTest, } TEST_P(AgcManagerDirectParametrizedTest, NoClippingHasNoImpact) { - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, GetOverrideOrEmpty(kHighSpeechProbability), GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1122,7 +1129,7 @@ TEST_P(AgcManagerDirectParametrizedTest, NoClippingHasNoImpact) { } TEST_P(AgcManagerDirectParametrizedTest, ClippingUnderThresholdHasNoImpact) { - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, GetOverrideOrEmpty(kHighSpeechProbability), GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1132,7 +1139,7 @@ TEST_P(AgcManagerDirectParametrizedTest, ClippingUnderThresholdHasNoImpact) { } TEST_P(AgcManagerDirectParametrizedTest, ClippingLowersVolume) { - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(/*applied_input_volume=*/255, GetOverrideOrEmpty(kHighSpeechProbability), GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1143,7 +1150,7 @@ TEST_P(AgcManagerDirectParametrizedTest, ClippingLowersVolume) { } TEST_P(AgcManagerDirectParametrizedTest, WaitingPeriodBetweenClippingChecks) { - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(/*applied_input_volume=*/255, GetOverrideOrEmpty(kHighSpeechProbability), GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1163,7 +1170,7 @@ TEST_P(AgcManagerDirectParametrizedTest, WaitingPeriodBetweenClippingChecks) { } TEST_P(AgcManagerDirectParametrizedTest, ClippingLoweringIsLimited) { - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(/*applied_input_volume=*/180, GetOverrideOrEmpty(kHighSpeechProbability), GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1183,7 +1190,7 @@ TEST_P(AgcManagerDirectParametrizedTest, const auto speech_probability_override = GetOverrideOrEmpty(kHighSpeechProbability); - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(/*applied_input_volume=*/255, speech_probability_override, GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1204,7 +1211,7 @@ TEST_P(AgcManagerDirectParametrizedTest, const auto speech_probability_override = GetOverrideOrEmpty(kHighSpeechProbability); - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(/*applied_input_volume=*/200, speech_probability_override, GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1225,11 +1232,11 @@ TEST_P(AgcManagerDirectParametrizedTest, TEST_P(AgcManagerDirectParametrizedTest, MaxCompressionIsIncreasedAfterClipping) { - constexpr absl::optional kNoOverride = absl::nullopt; + constexpr std::optional kNoOverride = std::nullopt; const auto speech_probability_override = GetOverrideOrEmpty(kHighSpeechProbability); - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(/*applied_input_volume=*/210, speech_probability_override, GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1248,7 +1255,7 @@ TEST_P(AgcManagerDirectParametrizedTest, helper.CallProcess(/*num_calls=*/5, speech_probability_override, GetOverrideOrEmpty(-29.0f)); // The mock `GetRmsErrorDb()` returns false; mimic this by passing - // absl::nullopt as an override. + // std::nullopt as an override. helper.CallProcess(/*num_calls=*/14, kNoOverride, kNoOverride); EXPECT_CALL(helper.mock_gain_control, set_compression_gain_db(8)) .WillOnce(Return(0)); @@ -1326,7 +1333,7 @@ TEST_P(AgcManagerDirectParametrizedTest, UserCanRaiseVolumeAfterClipping) { const auto speech_probability_override = GetOverrideOrEmpty(kHighSpeechProbability); - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(/*applied_input_volume=*/225, speech_probability_override, GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1366,7 +1373,7 @@ TEST_P(AgcManagerDirectParametrizedTest, UserCanRaiseVolumeAfterClipping) { } TEST_P(AgcManagerDirectParametrizedTest, ClippingDoesNotPullLowVolumeBackUp) { - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(/*applied_input_volume=*/80, GetOverrideOrEmpty(kHighSpeechProbability), GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1378,7 +1385,7 @@ TEST_P(AgcManagerDirectParametrizedTest, ClippingDoesNotPullLowVolumeBackUp) { } TEST_P(AgcManagerDirectParametrizedTest, TakesNoActionOnZeroMicVolume) { - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(kInitialInputVolume, GetOverrideOrEmpty(kHighSpeechProbability), GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1393,7 +1400,7 @@ TEST_P(AgcManagerDirectParametrizedTest, TakesNoActionOnZeroMicVolume) { } TEST_P(AgcManagerDirectParametrizedTest, ClippingDetectionLowersVolume) { - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(/*applied_input_volume=*/255, GetOverrideOrEmpty(kHighSpeechProbability), GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1407,7 +1414,7 @@ TEST_P(AgcManagerDirectParametrizedTest, ClippingDetectionLowersVolume) { TEST_P(AgcManagerDirectParametrizedTest, DisabledClippingPredictorDoesNotLowerVolume) { - AgcManagerDirectTestHelper helper; + AgcManagerDirectTestHelper helper(env_); helper.CallAgcSequence(/*applied_input_volume=*/255, GetOverrideOrEmpty(kHighSpeechProbability), GetOverrideOrEmpty(kSpeechLevelDbfs)); @@ -1434,25 +1441,21 @@ TEST_P(AgcManagerDirectParametrizedTest, DisableDigitalDisablesDigital) { AnalogAgcConfig config; config.enable_digital_adaptive = false; - auto manager = std::make_unique(kNumChannels, config); + auto manager = std::make_unique(env_, kNumChannels, config); manager->Initialize(); manager->SetupDigitalGainControl(mock_gain_control); } TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentDefault) { - std::unique_ptr manager = - CreateAgcManagerDirect(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); + std::unique_ptr manager = CreateAgcManagerDirect(); EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), kMinMicLevel); } TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentDisabled) { for (const std::string& field_trial_suffix : {"", "_20220210"}) { - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrial("Disabled" + field_trial_suffix)); - std::unique_ptr manager = - CreateAgcManagerDirect(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); + std::unique_ptr manager = CreateAgcManagerDirect( + {.field_trials = GetAgcMinMicLevelExperimentFieldTrial( + "Disabled" + field_trial_suffix)}); EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), kMinMicLevel); } } @@ -1460,22 +1463,16 @@ TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentDisabled) { // Checks that a field-trial parameter outside of the valid range [0,255] is // ignored. TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentOutOfRangeAbove) { - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrial("Enabled-256")); - std::unique_ptr manager = - CreateAgcManagerDirect(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); + std::unique_ptr manager = CreateAgcManagerDirect( + {.field_trials = GetAgcMinMicLevelExperimentFieldTrial("Enabled-256")}); EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), kMinMicLevel); } // Checks that a field-trial parameter outside of the valid range [0,255] is // ignored. TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentOutOfRangeBelow) { - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrial("Enabled--1")); - std::unique_ptr manager = - CreateAgcManagerDirect(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); + std::unique_ptr manager = CreateAgcManagerDirect( + {.field_trials = GetAgcMinMicLevelExperimentFieldTrial("Enabled--1")}); EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), kMinMicLevel); } @@ -1486,12 +1483,11 @@ TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentEnabled50) { constexpr int kMinMicLevelOverride = 50; for (const std::string& field_trial_suffix : {"", "_20220210"}) { SCOPED_TRACE(field_trial_suffix); - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrialEnabled(kMinMicLevelOverride, - field_trial_suffix)); - std::unique_ptr manager = - CreateAgcManagerDirect(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); + + std::unique_ptr manager = CreateAgcManagerDirect( + {.field_trials = GetAgcMinMicLevelExperimentFieldTrialEnabled( + kMinMicLevelOverride, field_trial_suffix)}); + EXPECT_EQ(manager->channel_agcs_[0]->min_mic_level(), kMinMicLevelOverride); } } @@ -1504,21 +1500,11 @@ TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentCheckMinLevelWithClipping) { // Create and initialize two AGCs by specifying and leaving unspecified the // relevant field trial. - const auto factory = []() { - std::unique_ptr manager = - CreateAgcManagerDirect(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); - manager->Initialize(); - manager->set_stream_analog_level(kInitialInputVolume); - return manager; - }; - std::unique_ptr manager = factory(); - std::unique_ptr manager_with_override; - { - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrialEnabled(kMinMicLevelOverride)); - manager_with_override = factory(); - } + std::unique_ptr manager = CreateAgcManagerDirect(); + std::unique_ptr manager_with_override = + CreateAgcManagerDirect( + {.field_trials = GetAgcMinMicLevelExperimentFieldTrialEnabled( + kMinMicLevelOverride)}); // Create a test input signal which containts 80% of clipped samples. AudioBuffer audio_buffer(kSampleRateHz, 1, kSampleRateHz, 1, kSampleRateHz, @@ -1529,11 +1515,11 @@ TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentCheckMinLevelWithClipping) { // Simulate 4 seconds of clipping; it is expected to trigger a downward // adjustment of the analog gain. CallPreProcessAndProcess(/*num_calls=*/400, audio_buffer, - /*speech_probability_override=*/absl::nullopt, - /*speech_level_override=*/absl::nullopt, *manager); + /*speech_probability_override=*/std::nullopt, + /*speech_level_override=*/std::nullopt, *manager); CallPreProcessAndProcess(/*num_calls=*/400, audio_buffer, - /*speech_probability_override=*/absl::nullopt, - /*speech_level_override=*/absl::nullopt, + /*speech_probability_override=*/std::nullopt, + /*speech_level_override=*/std::nullopt, *manager_with_override); // Make sure that an adaptation occurred. @@ -1561,21 +1547,11 @@ TEST(AgcManagerDirectTest, // Create and initialize two AGCs by specifying and leaving unspecified the // relevant field trial. - const auto factory = []() { - std::unique_ptr manager = - CreateAgcManagerDirect(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); - manager->Initialize(); - manager->set_stream_analog_level(kInitialInputVolume); - return manager; - }; - std::unique_ptr manager = factory(); - std::unique_ptr manager_with_override; - { - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrialEnabled(kMinMicLevelOverride)); - manager_with_override = factory(); - } + std::unique_ptr manager = CreateAgcManagerDirect(); + std::unique_ptr manager_with_override = + CreateAgcManagerDirect( + {.field_trials = GetAgcMinMicLevelExperimentFieldTrialEnabled( + kMinMicLevelOverride)}); // Create a test input signal which containts 80% of clipped samples. AudioBuffer audio_buffer(kSampleRateHz, 1, kSampleRateHz, 1, kSampleRateHz, @@ -1591,8 +1567,8 @@ TEST(AgcManagerDirectTest, /*speech_probability_level=*/-18.0f, *manager); CallPreProcessAndProcess( /*num_calls=*/400, audio_buffer, - /*speech_probability_override=*/absl::optional(0.7f), - /*speech_probability_level=*/absl::optional(-18.0f), + /*speech_probability_override=*/std::optional(0.7f), + /*speech_probability_level=*/std::optional(-18.0f), *manager_with_override); // Make sure that an adaptation occurred. @@ -1616,32 +1592,25 @@ TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentCompareMicLevelWithClipping) { // Create and initialize two AGCs by specifying and leaving unspecified the // relevant field trial. - const auto factory = []() { - // Use a large clipped level step to more quickly decrease the analog gain - // with clipping. - AnalogAgcConfig config = kDefaultAnalogConfig; - config.startup_min_volume = kInitialInputVolume; - config.enable_digital_adaptive = false; - config.clipped_level_step = 64; - config.clipped_ratio_threshold = kClippedRatioThreshold; - config.clipped_wait_frames = kClippedWaitFrames; - auto controller = - std::make_unique(/*num_capture_channels=*/1, config); - controller->Initialize(); - controller->set_stream_analog_level(kInitialInputVolume); - return controller; - }; - std::unique_ptr manager = factory(); - std::unique_ptr manager_with_override; - { - constexpr int kMinMicLevelOverride = 20; - static_assert( - kDefaultAnalogConfig.clipped_level_min >= kMinMicLevelOverride, - "Use a lower override value."); - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrialEnabled(kMinMicLevelOverride)); - manager_with_override = factory(); - } + // Use a large clipped level step to more quickly decrease the analog gain + // with clipping. + std::unique_ptr manager = CreateAgcManagerDirect({ + .clipped_level_min = kDefaultAnalogConfig.clipped_level_min, + .enable_digital_adaptive = false, + .clipped_level_step = 64, + }); + + constexpr int kMinMicLevelOverride = 20; + static_assert(kDefaultAnalogConfig.clipped_level_min >= kMinMicLevelOverride, + "Use a lower override value."); + std::unique_ptr manager_with_override = + CreateAgcManagerDirect({ + .field_trials = GetAgcMinMicLevelExperimentFieldTrialEnabled( + kMinMicLevelOverride), + .clipped_level_min = kDefaultAnalogConfig.clipped_level_min, + .enable_digital_adaptive = false, + .clipped_level_step = 64, + }); // Create a test input signal which containts 80% of clipped samples. AudioBuffer audio_buffer(kSampleRateHz, 1, kSampleRateHz, 1, kSampleRateHz, @@ -1652,11 +1621,11 @@ TEST(AgcManagerDirectTest, // Simulate 4 seconds of clipping; it is expected to trigger a downward // adjustment of the analog gain. CallPreProcessAndProcess(/*num_calls=*/400, audio_buffer, - /*speech_probability_override=*/absl::nullopt, - /*speech_level_override=*/absl::nullopt, *manager); + /*speech_probability_override=*/std::nullopt, + /*speech_level_override=*/std::nullopt, *manager); CallPreProcessAndProcess(/*num_calls=*/400, audio_buffer, - /*speech_probability_override=*/absl::nullopt, - /*speech_level_override=*/absl::nullopt, + /*speech_probability_override=*/std::nullopt, + /*speech_level_override=*/std::nullopt, *manager_with_override); // Make sure that an adaptation occurred. @@ -1683,32 +1652,23 @@ TEST(AgcManagerDirectTest, AgcMinMicLevelExperimentCompareMicLevelWithClippingWithRmsErrorOverride) { // Create and initialize two AGCs by specifying and leaving unspecified the // relevant field trial. - const auto factory = []() { - // Use a large clipped level step to more quickly decrease the analog gain - // with clipping. - AnalogAgcConfig config = kDefaultAnalogConfig; - config.startup_min_volume = kInitialInputVolume; - config.enable_digital_adaptive = false; - config.clipped_level_step = 64; - config.clipped_ratio_threshold = kClippedRatioThreshold; - config.clipped_wait_frames = kClippedWaitFrames; - auto controller = - std::make_unique(/*num_capture_channels=*/1, config); - controller->Initialize(); - controller->set_stream_analog_level(kInitialInputVolume); - return controller; - }; - std::unique_ptr manager = factory(); - std::unique_ptr manager_with_override; - { - constexpr int kMinMicLevelOverride = 20; - static_assert( - kDefaultAnalogConfig.clipped_level_min >= kMinMicLevelOverride, - "Use a lower override value."); - test::ScopedFieldTrials field_trial( - GetAgcMinMicLevelExperimentFieldTrialEnabled(kMinMicLevelOverride)); - manager_with_override = factory(); - } + std::unique_ptr manager = CreateAgcManagerDirect({ + .clipped_level_min = kDefaultAnalogConfig.clipped_level_min, + .enable_digital_adaptive = false, + .clipped_level_step = 64, + }); + + constexpr int kMinMicLevelOverride = 20; + static_assert(kDefaultAnalogConfig.clipped_level_min >= kMinMicLevelOverride, + "Use a lower override value."); + std::unique_ptr manager_with_override = + CreateAgcManagerDirect({ + .field_trials = GetAgcMinMicLevelExperimentFieldTrialEnabled( + kMinMicLevelOverride), + .clipped_level_min = kDefaultAnalogConfig.clipped_level_min, + .enable_digital_adaptive = false, + .clipped_level_step = 64, + }); // Create a test input signal which containts 80% of clipped samples. AudioBuffer audio_buffer(kSampleRateHz, 1, kSampleRateHz, 1, kSampleRateHz, @@ -1718,12 +1678,12 @@ TEST(AgcManagerDirectTest, CallPreProcessAndProcess( /*num_calls=*/400, audio_buffer, - /*speech_probability_override=*/absl::optional(0.7f), - /*speech_level_override=*/absl::optional(-18.0f), *manager); + /*speech_probability_override=*/std::optional(0.7f), + /*speech_level_override=*/std::optional(-18.0f), *manager); CallPreProcessAndProcess( /*num_calls=*/400, audio_buffer, - /*speech_probability_override=*/absl::optional(0.7f), - /*speech_level_override=*/absl::optional(-18.0f), + /*speech_probability_override=*/std::optional(0.7f), + /*speech_level_override=*/std::optional(-18.0f), *manager_with_override); // Make sure that an adaptation occurred. @@ -1748,19 +1708,15 @@ TEST_P(AgcManagerDirectParametrizedTest, ClippingParametersVerified) { GTEST_SKIP() << "Skipped. RMS error override does not affect the test."; } - std::unique_ptr manager = - CreateAgcManagerDirect(kInitialInputVolume, kClippedLevelStep, - kClippedRatioThreshold, kClippedWaitFrames); - manager->Initialize(); + std::unique_ptr manager = CreateAgcManagerDirect(); EXPECT_EQ(manager->clipped_level_step_, kClippedLevelStep); EXPECT_EQ(manager->clipped_ratio_threshold_, kClippedRatioThreshold); EXPECT_EQ(manager->clipped_wait_frames_, kClippedWaitFrames); + std::unique_ptr manager_custom = - CreateAgcManagerDirect(kInitialInputVolume, - /*clipped_level_step=*/10, - /*clipped_ratio_threshold=*/0.2f, - /*clipped_wait_frames=*/50); - manager_custom->Initialize(); + CreateAgcManagerDirect({.clipped_level_step = 10, + .clipped_ratio_threshold = 0.2f, + .clipped_wait_frames = 50}); EXPECT_EQ(manager_custom->clipped_level_step_, 10); EXPECT_EQ(manager_custom->clipped_ratio_threshold_, 0.2f); EXPECT_EQ(manager_custom->clipped_wait_frames_, 50); @@ -1772,14 +1728,9 @@ TEST_P(AgcManagerDirectParametrizedTest, GTEST_SKIP() << "Skipped. RMS error override does not affect the test."; } - // TODO(bugs.webrtc.org/12874): Use designated initializers once fixed. - ClippingPredictorConfig config; - config.enabled = false; + std::unique_ptr manager = + CreateAgcManagerDirect({.clipping_predictor = {.enabled = false}}); - std::unique_ptr manager = CreateAgcManagerDirect( - kInitialInputVolume, kClippedLevelStep, kClippedRatioThreshold, - kClippedWaitFrames, config); - manager->Initialize(); EXPECT_FALSE(manager->clipping_predictor_enabled()); EXPECT_FALSE(manager->use_clipping_predictor_step()); } @@ -1799,15 +1750,9 @@ TEST_P(AgcManagerDirectParametrizedTest, GTEST_SKIP() << "Skipped. RMS error override does not affect the test."; } - // TODO(bugs.webrtc.org/12874): Use designated initializers once fixed. - ClippingPredictorConfig config; - config.enabled = true; - config.use_predicted_step = true; - std::unique_ptr manager = CreateAgcManagerDirect( - kInitialInputVolume, kClippedLevelStep, kClippedRatioThreshold, - kClippedWaitFrames, config); - manager->Initialize(); + {.clipping_predictor = {.enabled = true, .use_predicted_step = true}}); + EXPECT_TRUE(manager->clipping_predictor_enabled()); EXPECT_TRUE(manager->use_clipping_predictor_step()); } @@ -1819,7 +1764,7 @@ TEST_P(AgcManagerDirectParametrizedTest, AnalogAgcConfig config = GetAnalogAgcTestConfig(); config.clipping_predictor.enabled = false; - AgcManagerDirect manager(config, new ::testing::NiceMock()); + AgcManagerDirect manager(env_, config, new ::testing::NiceMock()); manager.Initialize(); manager.set_stream_analog_level(/*level=*/255); EXPECT_FALSE(manager.clipping_predictor_enabled()); @@ -1845,10 +1790,10 @@ TEST_P(AgcManagerDirectParametrizedTest, config_with_prediction.clipping_predictor.use_predicted_step = true; AnalogAgcConfig config_without_prediction = GetAnalogAgcTestConfig(); config_without_prediction.clipping_predictor.enabled = false; - AgcManagerDirect manager_with_prediction(config_with_prediction, + AgcManagerDirect manager_with_prediction(env_, config_with_prediction, new ::testing::NiceMock()); AgcManagerDirect manager_without_prediction( - config_without_prediction, new ::testing::NiceMock()); + env_, config_without_prediction, new ::testing::NiceMock()); manager_with_prediction.Initialize(); manager_without_prediction.Initialize(); @@ -1953,10 +1898,10 @@ TEST_P(AgcManagerDirectParametrizedTest, config_with_prediction.clipping_predictor.use_predicted_step = false; AnalogAgcConfig config_without_prediction = GetAnalogAgcTestConfig(); config_without_prediction.clipping_predictor.enabled = false; - AgcManagerDirect manager_with_prediction(config_with_prediction, + AgcManagerDirect manager_with_prediction(env_, config_with_prediction, new ::testing::NiceMock()); AgcManagerDirect manager_without_prediction( - config_without_prediction, new ::testing::NiceMock()); + env_, config_without_prediction, new ::testing::NiceMock()); constexpr int kInitialLevel = 255; constexpr float kClippingPeakRatio = 1.0f; @@ -2040,8 +1985,8 @@ TEST_P(AgcManagerDirectParametrizedTest, // Checks that passing an empty speech level and probability overrides to // `Process()` has the same effect as passing no overrides. TEST_P(AgcManagerDirectParametrizedTest, EmptyRmsErrorOverrideHasNoEffect) { - AgcManagerDirect manager_1(kNumChannels, GetAnalogAgcTestConfig()); - AgcManagerDirect manager_2(kNumChannels, GetAnalogAgcTestConfig()); + AgcManagerDirect manager_1(env_, kNumChannels, GetAnalogAgcTestConfig()); + AgcManagerDirect manager_2(env_, kNumChannels, GetAnalogAgcTestConfig()); manager_1.Initialize(); manager_2.Initialize(); @@ -2059,7 +2004,7 @@ TEST_P(AgcManagerDirectParametrizedTest, EmptyRmsErrorOverrideHasNoEffect) { ASSERT_EQ(manager_1.recommended_analog_level(), kAnalogLevel); ASSERT_EQ(manager_2.recommended_analog_level(), kAnalogLevel); - reader.Feed(kNumFrames, kGainDb, absl::nullopt, absl::nullopt, manager_1); + reader.Feed(kNumFrames, kGainDb, std::nullopt, std::nullopt, manager_1); reader.Feed(kNumFrames, kGainDb, manager_2); // Check that the states are the same and adaptation occurs. @@ -2082,8 +2027,8 @@ TEST_P(AgcManagerDirectParametrizedTest, EmptyRmsErrorOverrideHasNoEffect) { // Checks that passing a non-empty speech level and probability overrides to // `Process()` has an effect. TEST_P(AgcManagerDirectParametrizedTest, NonEmptyRmsErrorOverrideHasEffect) { - AgcManagerDirect manager_1(kNumChannels, GetAnalogAgcTestConfig()); - AgcManagerDirect manager_2(kNumChannels, GetAnalogAgcTestConfig()); + AgcManagerDirect manager_1(env_, kNumChannels, GetAnalogAgcTestConfig()); + AgcManagerDirect manager_2(env_, kNumChannels, GetAnalogAgcTestConfig()); manager_1.Initialize(); manager_2.Initialize(); @@ -2127,7 +2072,7 @@ TEST_P(AgcManagerDirectChannelSampleRateTest, CheckIsAlive) { constexpr AnalogAgcConfig kConfig{.enabled = true, .clipping_predictor{.enabled = true}}; - AgcManagerDirect manager(num_channels, kConfig); + AgcManagerDirect manager(CreateEnvironment(), num_channels, kConfig); manager.Initialize(); AudioBuffer buffer(sample_rate_hz, num_channels, sample_rate_hz, num_channels, sample_rate_hz, num_channels); diff --git a/modules/audio_processing/agc/legacy/digital_agc.cc b/modules/audio_processing/agc/legacy/digital_agc.cc index 4cd86acba8..5bf7aaff57 100644 --- a/modules/audio_processing/agc/legacy/digital_agc.cc +++ b/modules/audio_processing/agc/legacy/digital_agc.cc @@ -271,7 +271,7 @@ int32_t WebRtcAgc_AddFarendToDigital(DigitalAgc* stt, // Gains is an 11 element long array (one value per ms, incl start & end). int32_t WebRtcAgc_ComputeDigitalGains(DigitalAgc* stt, const int16_t* const* in_near, - size_t num_bands, + size_t /* num_bands */, uint32_t FS, int16_t lowlevelSignal, int32_t gains[11]) { diff --git a/modules/audio_processing/agc/mock_agc.h b/modules/audio_processing/agc/mock_agc.h index 3080e1563c..bc31148153 100644 --- a/modules/audio_processing/agc/mock_agc.h +++ b/modules/audio_processing/agc/mock_agc.h @@ -20,7 +20,10 @@ namespace webrtc { class MockAgc : public Agc { public: virtual ~MockAgc() {} - MOCK_METHOD(void, Process, (rtc::ArrayView audio), (override)); + MOCK_METHOD(void, + Process, + (webrtc::ArrayView audio), + (override)); MOCK_METHOD(bool, GetRmsErrorDb, (int* error), (override)); MOCK_METHOD(void, Reset, (), (override)); MOCK_METHOD(int, set_target_level_dbfs, (int level), (override)); diff --git a/modules/audio_processing/agc2/BUILD.gn b/modules/audio_processing/agc2/BUILD.gn index bd59ad3dae..3808a92ca2 100644 --- a/modules/audio_processing/agc2/BUILD.gn +++ b/modules/audio_processing/agc2/BUILD.gn @@ -23,9 +23,9 @@ rtc_library("speech_level_estimator") { deps = [ ":common", - "..:api", "..:apm_logging", "../../../api:array_view", + "../../../api/audio:audio_processing", "../../../rtc_base:checks", "../../../rtc_base:logging", "../../../rtc_base:safe_minmax", @@ -48,9 +48,9 @@ rtc_library("adaptive_digital_gain_controller") { deps = [ ":common", ":gain_applier", - "..:api", "..:apm_logging", - "..:audio_frame_view", + "../../../api/audio:audio_frame_api", + "../../../api/audio:audio_processing", "../../../common_audio", "../../../rtc_base:checks", "../../../rtc_base:logging", @@ -81,8 +81,6 @@ rtc_library("saturation_protector") { "../../../rtc_base:safe_compare", "../../../rtc_base:safe_minmax", ] - - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("biquad_filter") { @@ -112,15 +110,13 @@ rtc_library("clipping_predictor") { deps = [ ":gain_map", - "..:api", "..:audio_frame_view", + "../../../api/audio:audio_processing", "../../../common_audio", "../../../rtc_base:checks", "../../../rtc_base:logging", "../../../rtc_base:safe_minmax", ] - - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("common") { @@ -150,6 +146,7 @@ rtc_library("fixed_digital") { "..:apm_logging", "..:audio_frame_view", "../../../api:array_view", + "../../../api/audio:audio_frame_api", "../../../common_audio", "../../../rtc_base:checks", "../../../rtc_base:gtest_prod", @@ -157,8 +154,8 @@ rtc_library("fixed_digital") { "../../../rtc_base:safe_minmax", "../../../rtc_base:stringutils", "../../../system_wrappers:metrics", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("gain_applier") { @@ -175,7 +172,7 @@ rtc_library("gain_applier") { deps = [ ":common", "..:audio_frame_view", - "../../../api:array_view", + "../../../api/audio:audio_frame_api", "../../../rtc_base:safe_minmax", ] } @@ -209,10 +206,10 @@ rtc_library("input_volume_controller") { ":clipping_predictor", ":gain_map", ":input_volume_stats_reporter", - "..:api", "..:audio_buffer", "..:audio_frame_view", "../../../api:array_view", + "../../../api/audio:audio_processing", "../../../rtc_base:checks", "../../../rtc_base:checks", "../../../rtc_base:gtest_prod", @@ -222,8 +219,6 @@ rtc_library("input_volume_controller") { "../../../system_wrappers:field_trial", "../../../system_wrappers:metrics", ] - - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("noise_level_estimator") { @@ -234,8 +229,7 @@ rtc_library("noise_level_estimator") { deps = [ ":biquad_filter", "..:apm_logging", - "..:audio_frame_view", - "../../../api:array_view", + "../../../api/audio:audio_frame_api", "../../../rtc_base:checks", "../../../system_wrappers", ] @@ -268,8 +262,7 @@ rtc_library("vad_wrapper") { deps = [ ":common", ":cpu_features", - "..:audio_frame_view", - "../../../api:array_view", + "../../../api/audio:audio_frame_api", "../../../common_audio", "../../../rtc_base:checks", "rnn_vad", @@ -303,8 +296,8 @@ rtc_library("speech_level_estimator_unittest") { deps = [ ":common", ":speech_level_estimator", - "..:api", "..:apm_logging", + "../../../api/audio:audio_processing", "../../../rtc_base:gunit_helpers", "../../../test:test_support", ] @@ -320,9 +313,9 @@ rtc_library("adaptive_digital_gain_controller_unittest") { ":adaptive_digital_gain_controller", ":common", ":test_utils", - "..:api", "..:apm_logging", "..:audio_frame_view", + "../../../api/audio:audio_processing", "../../../common_audio", "../../../rtc_base:gunit_helpers", "../../../test:test_support", @@ -337,7 +330,7 @@ rtc_library("gain_applier_unittest") { deps = [ ":gain_applier", ":test_utils", - "..:audio_frame_view", + "../../../api/audio:audio_frame_api", "../../../rtc_base:gunit_helpers", "../../../test:test_support", ] @@ -391,6 +384,7 @@ rtc_library("fixed_digital_unittests") { "..:apm_logging", "..:audio_frame_view", "../../../api:array_view", + "../../../api/audio:audio_frame_api", "../../../common_audio", "../../../rtc_base:checks", "../../../rtc_base:gunit_helpers", @@ -413,8 +407,9 @@ rtc_library("input_volume_controller_unittests") { ":clipping_predictor", ":gain_map", ":input_volume_controller", - "..:api", + "..:audio_buffer", "../../../api:array_view", + "../../../api/audio:audio_processing", "../../../rtc_base:checks", "../../../rtc_base:random", "../../../rtc_base:safe_conversions", @@ -426,8 +421,6 @@ rtc_library("input_volume_controller_unittests") { "../../../test:test_support", "//testing/gtest", ] - - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("noise_estimator_unittests") { @@ -439,9 +432,8 @@ rtc_library("noise_estimator_unittests") { ":noise_level_estimator", ":test_utils", "..:apm_logging", - "..:audio_frame_view", - "../../../api:array_view", "../../../api:function_view", + "../../../api/audio:audio_frame_api", "../../../rtc_base:checks", "../../../rtc_base:gunit_helpers", ] @@ -453,7 +445,7 @@ rtc_library("vad_wrapper_unittests") { deps = [ ":common", ":vad_wrapper", - "..:audio_frame_view", + "../../../api/audio:audio_frame_api", "../../../rtc_base:checks", "../../../rtc_base:gunit_helpers", "../../../rtc_base:safe_compare", @@ -475,6 +467,7 @@ rtc_library("test_utils") { ] deps = [ "..:audio_frame_view", + "../../../api/audio:audio_frame_api", "../../../rtc_base:checks", "../../../rtc_base:random", ] @@ -491,10 +484,7 @@ rtc_library("input_volume_stats_reporter") { "../../../rtc_base:safe_minmax", "../../../rtc_base:stringutils", "../../../system_wrappers:metrics", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -506,6 +496,6 @@ rtc_library("input_volume_stats_reporter_unittests") { "../../../rtc_base:stringutils", "../../../system_wrappers:metrics", "../../../test:test_support", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } diff --git a/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc b/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc index e8edab602c..8bbbedced5 100644 --- a/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc +++ b/modules/audio_processing/agc2/adaptive_digital_gain_controller.cc @@ -96,8 +96,8 @@ float ComputeGainChangeThisFrameDb(float target_gain_db, if (!gain_increase_allowed) { target_gain_difference_db = std::min(target_gain_difference_db, 0.0f); } - return rtc::SafeClamp(target_gain_difference_db, -max_gain_decrease_db, - max_gain_increase_db); + return SafeClamp(target_gain_difference_db, -max_gain_decrease_db, + max_gain_increase_db); } } // namespace @@ -124,7 +124,7 @@ AdaptiveDigitalGainController::AdaptiveDigitalGainController( } void AdaptiveDigitalGainController::Process(const FrameInfo& info, - AudioFrameView frame) { + DeinterleavedView frame) { RTC_DCHECK_GE(info.speech_level_dbfs, -150.0f); RTC_DCHECK_GE(frame.num_channels(), 1); RTC_DCHECK( diff --git a/modules/audio_processing/agc2/adaptive_digital_gain_controller.h b/modules/audio_processing/agc2/adaptive_digital_gain_controller.h index 01335e79db..d464dc6b2c 100644 --- a/modules/audio_processing/agc2/adaptive_digital_gain_controller.h +++ b/modules/audio_processing/agc2/adaptive_digital_gain_controller.h @@ -13,9 +13,9 @@ #include +#include "api/audio/audio_processing.h" +#include "api/audio/audio_view.h" #include "modules/audio_processing/agc2/gain_applier.h" -#include "modules/audio_processing/include/audio_frame_view.h" -#include "modules/audio_processing/include/audio_processing.h" namespace webrtc { @@ -46,7 +46,7 @@ class AdaptiveDigitalGainController { // Analyzes `info`, updates the digital gain and applies it to a 10 ms // `frame`. Supports any sample rate supported by APM. - void Process(const FrameInfo& info, AudioFrameView frame); + void Process(const FrameInfo& info, DeinterleavedView frame); private: ApmDataDumper* const apm_data_dumper_; diff --git a/modules/audio_processing/agc2/adaptive_digital_gain_controller_unittest.cc b/modules/audio_processing/agc2/adaptive_digital_gain_controller_unittest.cc index e95cbb5067..1647a5b6f9 100644 --- a/modules/audio_processing/agc2/adaptive_digital_gain_controller_unittest.cc +++ b/modules/audio_processing/agc2/adaptive_digital_gain_controller_unittest.cc @@ -11,12 +11,13 @@ #include "modules/audio_processing/agc2/adaptive_digital_gain_controller.h" #include +#include #include +#include "api/audio/audio_processing.h" #include "common_audio/include/audio_util.h" #include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/agc2/vector_float_frame.h" -#include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/gunit.h" @@ -83,7 +84,7 @@ TEST(GainController2AdaptiveDigitalGainControllerTest, // Make one call with reasonable audio level values and settings. VectorFloatFrame fake_audio(kStereo, kFrameLen10ms48kHz, 10000.0f); helper.gain_applier->Process(GetFrameInfoToNotAdapt(kDefaultConfig), - fake_audio.float_frame_view()); + fake_audio.view()); } // Checks that the maximum allowed gain is applied. @@ -93,15 +94,17 @@ TEST(GainController2AdaptiveDigitalGainControllerTest, MaxGainApplied) { GetMaxGainChangePerFrameDb( kDefaultConfig.max_gain_change_db_per_second)) + kNumExtraFrames; - - GainApplierHelper helper(kDefaultConfig, kAdjacentSpeechFramesThreshold); + constexpr AdaptiveDigitalConfig kConfig = AdaptiveDigitalConfig{ + // Increase from the default in order to reach the maximum gain. + .max_output_noise_level_dbfs = -40.0f}; + GainApplierHelper helper(kConfig, kAdjacentSpeechFramesThreshold); AdaptiveDigitalGainController::FrameInfo info = - GetFrameInfoToNotAdapt(kDefaultConfig); + GetFrameInfoToNotAdapt(kConfig); info.speech_level_dbfs = -60.0f; float applied_gain; for (int i = 0; i < kNumFramesToAdapt; ++i) { VectorFloatFrame fake_audio(kMono, kFrameLen10ms8kHz, 1.0f); - helper.gain_applier->Process(info, fake_audio.float_frame_view()); + helper.gain_applier->Process(info, fake_audio.view()); applied_gain = fake_audio.float_frame_view().channel(0)[0]; } const float applied_gain_db = 20.0f * std::log10f(applied_gain); @@ -127,8 +130,8 @@ TEST(GainController2AdaptiveDigitalGainControllerTest, GainDoesNotChangeFast) { AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = initial_level_dbfs; - helper.gain_applier->Process(info, fake_audio.float_frame_view()); - float current_gain_linear = fake_audio.float_frame_view().channel(0)[0]; + helper.gain_applier->Process(info, fake_audio.view()); + float current_gain_linear = fake_audio.view()[0][0]; EXPECT_LE(std::abs(current_gain_linear - last_gain_linear), max_change_per_frame_linear); last_gain_linear = current_gain_linear; @@ -141,8 +144,8 @@ TEST(GainController2AdaptiveDigitalGainControllerTest, GainDoesNotChangeFast) { AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = 0.f; - helper.gain_applier->Process(info, fake_audio.float_frame_view()); - float current_gain_linear = fake_audio.float_frame_view().channel(0)[0]; + helper.gain_applier->Process(info, fake_audio.view()); + float current_gain_linear = fake_audio.view()[0][0]; EXPECT_LE(std::abs(current_gain_linear - last_gain_linear), max_change_per_frame_linear); last_gain_linear = current_gain_linear; @@ -158,10 +161,10 @@ TEST(GainController2AdaptiveDigitalGainControllerTest, GainIsRampedInAFrame) { AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = initial_level_dbfs; - helper.gain_applier->Process(info, fake_audio.float_frame_view()); + helper.gain_applier->Process(info, fake_audio.view()); float maximal_difference = 0.0f; float current_value = 1.0f * DbToRatio(kDefaultConfig.initial_gain_db); - for (const auto& x : fake_audio.float_frame_view().channel(0)) { + for (const auto& x : fake_audio.view()[0]) { const float difference = std::abs(x - current_value); maximal_difference = std::max(maximal_difference, difference); current_value = x; @@ -193,13 +196,13 @@ TEST(GainController2AdaptiveDigitalGainControllerTest, NoiseLimitsGain) { GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = initial_level_dbfs; info.noise_rms_dbfs = kWithNoiseDbfs; - helper.gain_applier->Process(info, fake_audio.float_frame_view()); + auto fake_view = fake_audio.view(); + helper.gain_applier->Process(info, fake_view); // Wait so that the adaptive gain applier has time to lower the gain. if (i > num_initial_frames) { const float maximal_ratio = - *std::max_element(fake_audio.float_frame_view().channel(0).begin(), - fake_audio.float_frame_view().channel(0).end()); + *std::max_element(fake_view[0].begin(), fake_view[0].end()); EXPECT_NEAR(maximal_ratio, 1.0f, 0.001f); } @@ -215,7 +218,7 @@ TEST(GainController2AdaptiveDigitalGainControllerTest, AdaptiveDigitalGainController::FrameInfo info = GetFrameInfoToNotAdapt(kDefaultConfig); info.speech_level_dbfs = 5.0f; - helper.gain_applier->Process(info, fake_audio.float_frame_view()); + helper.gain_applier->Process(info, fake_audio.view()); } TEST(GainController2AdaptiveDigitalGainControllerTest, AudioLevelLimitsGain) { @@ -237,13 +240,13 @@ TEST(GainController2AdaptiveDigitalGainControllerTest, AudioLevelLimitsGain) { info.speech_level_dbfs = initial_level_dbfs; info.limiter_envelope_dbfs = 1.0f; info.speech_level_reliable = false; - helper.gain_applier->Process(info, fake_audio.float_frame_view()); + auto fake_view = fake_audio.view(); + helper.gain_applier->Process(info, fake_view); // Wait so that the adaptive gain applier has time to lower the gain. if (i > num_initial_frames) { const float maximal_ratio = - *std::max_element(fake_audio.float_frame_view().channel(0).begin(), - fake_audio.float_frame_view().channel(0).end()); + *std::max_element(fake_view[0].begin(), fake_view[0].end()); EXPECT_NEAR(maximal_ratio, 1.0f, 0.001f); } @@ -269,8 +272,8 @@ TEST_P(AdaptiveDigitalGainControllerParametrizedTest, for (int i = 0; i < adjacent_speech_frames_threshold(); ++i) { SCOPED_TRACE(i); VectorFloatFrame audio(kMono, kFrameLen10ms48kHz, 1.0f); - helper.gain_applier->Process(info, audio.float_frame_view()); - const float gain = audio.float_frame_view().channel(0)[0]; + helper.gain_applier->Process(info, audio.view()); + const float gain = audio.view()[0][0]; if (i > 0) { EXPECT_EQ(prev_gain, gain); // No gain increase applied. } @@ -291,16 +294,16 @@ TEST_P(AdaptiveDigitalGainControllerParametrizedTest, for (int i = 0; i < adjacent_speech_frames_threshold(); ++i) { SCOPED_TRACE(i); VectorFloatFrame audio(kMono, kFrameLen10ms48kHz, 1.0f); - helper.gain_applier->Process(info, audio.float_frame_view()); - prev_gain = audio.float_frame_view().channel(0)[0]; + helper.gain_applier->Process(info, audio.view()); + prev_gain = audio.view()[0][0]; } // Process one more speech frame. VectorFloatFrame audio(kMono, kFrameLen10ms48kHz, 1.0f); - helper.gain_applier->Process(info, audio.float_frame_view()); + helper.gain_applier->Process(info, audio.view()); // An increased gain has been applied. - EXPECT_GT(audio.float_frame_view().channel(0)[0], prev_gain); + EXPECT_GT(audio.view()[0][0], prev_gain); } INSTANTIATE_TEST_SUITE_P( diff --git a/modules/audio_processing/agc2/biquad_filter.cc b/modules/audio_processing/agc2/biquad_filter.cc index c1b80d7320..e2e112904c 100644 --- a/modules/audio_processing/agc2/biquad_filter.cc +++ b/modules/audio_processing/agc2/biquad_filter.cc @@ -28,8 +28,7 @@ void BiQuadFilter::Reset() { state_ = {}; } -void BiQuadFilter::Process(rtc::ArrayView x, - rtc::ArrayView y) { +void BiQuadFilter::Process(ArrayView x, ArrayView y) { RTC_DCHECK_EQ(x.size(), y.size()); const float config_a0 = config_.a[0]; const float config_a1 = config_.a[1]; diff --git a/modules/audio_processing/agc2/biquad_filter.h b/modules/audio_processing/agc2/biquad_filter.h index 5273ff9386..766a750ea8 100644 --- a/modules/audio_processing/agc2/biquad_filter.h +++ b/modules/audio_processing/agc2/biquad_filter.h @@ -41,7 +41,7 @@ class BiQuadFilter { // Filters `x` and writes the output in `y`, which must have the same length // of `x`. In-place processing is supported. - void Process(rtc::ArrayView x, rtc::ArrayView y); + void Process(ArrayView x, ArrayView y); private: Config config_; diff --git a/modules/audio_processing/agc2/biquad_filter_unittest.cc b/modules/audio_processing/agc2/biquad_filter_unittest.cc index a53036b08e..8ef85744d0 100644 --- a/modules/audio_processing/agc2/biquad_filter_unittest.cc +++ b/modules/audio_processing/agc2/biquad_filter_unittest.cc @@ -56,11 +56,11 @@ constexpr FloatArraySequence kBiQuadOutputSeq = { {{24.84286614f, -62.18094158f, 57.91488056f, -106.65685933f, 13.38760103f, -36.60367134f, -94.44880104f, -3.59920354f}}}}; -// Fails for every pair from two equally sized rtc::ArrayView views such -// that their relative error is above a given threshold. If the expected value -// of a pair is 0, `tolerance` is used to check the absolute error. -void ExpectNearRelative(rtc::ArrayView expected, - rtc::ArrayView computed, +// Fails for every pair from two equally sized webrtc::ArrayView views +// such that their relative error is above a given threshold. If the expected +// value of a pair is 0, `tolerance` is used to check the absolute error. +void ExpectNearRelative(ArrayView expected, + ArrayView computed, const float tolerance) { // The relative error is undefined when the expected value is 0. // When that happens, check the absolute error instead. `safe_den` is used diff --git a/modules/audio_processing/agc2/clipping_predictor.cc b/modules/audio_processing/agc2/clipping_predictor.cc index fd759c63e8..77f928d2bd 100644 --- a/modules/audio_processing/agc2/clipping_predictor.cc +++ b/modules/audio_processing/agc2/clipping_predictor.cc @@ -131,11 +131,11 @@ class ClippingEventPredictor : public ClippingPredictor { // if at least `GetMinFramesProcessed()` frames have been processed since the // last reset and a clipping event is predicted. `level`, `min_mic_level`, and // `max_mic_level` are limited to [0, 255] and `default_step` to [1, 255]. - absl::optional EstimateClippedLevelStep(int channel, - int level, - int default_step, - int min_mic_level, - int max_mic_level) const { + std::optional EstimateClippedLevelStep(int channel, + int level, + int default_step, + int min_mic_level, + int max_mic_level) const { RTC_CHECK_GE(channel, 0); RTC_CHECK_LT(channel, ch_buffers_.size()); RTC_DCHECK_GE(level, 0); @@ -147,17 +147,17 @@ class ClippingEventPredictor : public ClippingPredictor { RTC_DCHECK_GE(max_mic_level, 0); RTC_DCHECK_LE(max_mic_level, 255); if (level <= min_mic_level) { - return absl::nullopt; + return std::nullopt; } if (PredictClippingEvent(channel)) { const int new_level = - rtc::SafeClamp(level - default_step, min_mic_level, max_mic_level); + SafeClamp(level - default_step, min_mic_level, max_mic_level); const int step = level - new_level; if (step > 0) { return step; } } - return absl::nullopt; + return std::nullopt; } private: @@ -271,11 +271,11 @@ class ClippingPeakPredictor : public ClippingPredictor { // least `GetMinFramesProcessed()` frames have been processed since the last // reset and a clipping event is predicted. `level`, `min_mic_level`, and // `max_mic_level` are limited to [0, 255] and `default_step` to [1, 255]. - absl::optional EstimateClippedLevelStep(int channel, - int level, - int default_step, - int min_mic_level, - int max_mic_level) const { + std::optional EstimateClippedLevelStep(int channel, + int level, + int default_step, + int min_mic_level, + int max_mic_level) const { RTC_DCHECK_GE(channel, 0); RTC_DCHECK_LT(channel, ch_buffers_.size()); RTC_DCHECK_GE(level, 0); @@ -287,29 +287,29 @@ class ClippingPeakPredictor : public ClippingPredictor { RTC_DCHECK_GE(max_mic_level, 0); RTC_DCHECK_LE(max_mic_level, 255); if (level <= min_mic_level) { - return absl::nullopt; + return std::nullopt; } - absl::optional estimate_db = EstimatePeakValue(channel); + std::optional estimate_db = EstimatePeakValue(channel); if (estimate_db.has_value() && estimate_db.value() > clipping_threshold_) { int step = 0; if (!adaptive_step_estimation_) { step = default_step; } else { const int estimated_gain_change = - rtc::SafeClamp(-static_cast(std::ceil(estimate_db.value())), - -kClippingPredictorMaxGainChange, 0); + SafeClamp(-static_cast(std::ceil(estimate_db.value())), + -kClippingPredictorMaxGainChange, 0); step = std::max(level - ComputeVolumeUpdate(estimated_gain_change, level, min_mic_level, max_mic_level), default_step); } const int new_level = - rtc::SafeClamp(level - step, min_mic_level, max_mic_level); + SafeClamp(level - step, min_mic_level, max_mic_level); if (level > new_level) { return level - new_level; } } - return absl::nullopt; + return std::nullopt; } private: @@ -319,18 +319,18 @@ class ClippingPeakPredictor : public ClippingPredictor { // Predicts clipping sample peaks based on the processed audio frames. // Returns the estimated peak value if clipping is predicted. Otherwise - // returns absl::nullopt. - absl::optional EstimatePeakValue(int channel) const { + // returns std::nullopt. + std::optional EstimatePeakValue(int channel) const { const auto reference_metrics = ch_buffers_[channel]->ComputePartialMetrics( reference_window_delay_, reference_window_length_); if (!reference_metrics.has_value()) { - return absl::nullopt; + return std::nullopt; } const auto metrics = ch_buffers_[channel]->ComputePartialMetrics(0, window_length_); if (!metrics.has_value() || !(FloatS16ToDbfs(metrics.value().max) > clipping_threshold_)) { - return absl::nullopt; + return std::nullopt; } const float reference_crest_factor = ComputeCrestFactor(reference_metrics.value()); diff --git a/modules/audio_processing/agc2/clipping_predictor.h b/modules/audio_processing/agc2/clipping_predictor.h index 14612508c0..3fd1086631 100644 --- a/modules/audio_processing/agc2/clipping_predictor.h +++ b/modules/audio_processing/agc2/clipping_predictor.h @@ -12,11 +12,11 @@ #define MODULES_AUDIO_PROCESSING_AGC2_CLIPPING_PREDICTOR_H_ #include +#include #include -#include "absl/types/optional.h" +#include "api/audio/audio_processing.h" #include "modules/audio_processing/include/audio_frame_view.h" -#include "modules/audio_processing/include/audio_processing.h" namespace webrtc { @@ -35,12 +35,12 @@ class ClippingPredictor { // Predicts if clipping is going to occur for the specified `channel` in the // near-future and, if so, it returns a recommended analog mic level decrease - // step. Returns absl::nullopt if clipping is not predicted. + // step. Returns std::nullopt if clipping is not predicted. // `level` is the current analog mic level, `default_step` is the amount the // mic level is lowered by the analog controller with every clipping event and // `min_mic_level` and `max_mic_level` is the range of allowed analog mic // levels. - virtual absl::optional EstimateClippedLevelStep( + virtual std::optional EstimateClippedLevelStep( int channel, int level, int default_step, diff --git a/modules/audio_processing/agc2/clipping_predictor_level_buffer.cc b/modules/audio_processing/agc2/clipping_predictor_level_buffer.cc index fe4cf2a154..acc114c476 100644 --- a/modules/audio_processing/agc2/clipping_predictor_level_buffer.cc +++ b/modules/audio_processing/agc2/clipping_predictor_level_buffer.cc @@ -50,7 +50,7 @@ void ClippingPredictorLevelBuffer::Push(Level level) { } // TODO(bugs.webrtc.org/12774): Optimize partial computation for long buffers. -absl::optional +std::optional ClippingPredictorLevelBuffer::ComputePartialMetrics(int delay, int num_items) const { RTC_DCHECK_GE(delay, 0); @@ -59,7 +59,7 @@ ClippingPredictorLevelBuffer::ComputePartialMetrics(int delay, RTC_DCHECK_LE(num_items, Capacity()); RTC_DCHECK_LE(delay + num_items, Capacity()); if (delay + num_items > Size()) { - return absl::nullopt; + return std::nullopt; } float sum = 0.0f; float max = 0.0f; @@ -71,7 +71,7 @@ ClippingPredictorLevelBuffer::ComputePartialMetrics(int delay, sum += data_[idx].average; max = std::fmax(data_[idx].max, max); } - return absl::optional({sum / static_cast(num_items), max}); + return std::optional({sum / static_cast(num_items), max}); } } // namespace webrtc diff --git a/modules/audio_processing/agc2/clipping_predictor_level_buffer.h b/modules/audio_processing/agc2/clipping_predictor_level_buffer.h index c9032773a6..21e9b461a7 100644 --- a/modules/audio_processing/agc2/clipping_predictor_level_buffer.h +++ b/modules/audio_processing/agc2/clipping_predictor_level_buffer.h @@ -12,10 +12,9 @@ #define MODULES_AUDIO_PROCESSING_AGC2_CLIPPING_PREDICTOR_LEVEL_BUFFER_H_ #include +#include #include -#include "absl/types/optional.h" - namespace webrtc { // A circular buffer to store frame-wise `Level` items for clipping prediction. @@ -58,7 +57,7 @@ class ClippingPredictorLevelBuffer { // from `delay` to `delay` - `num_items` (a delay equal to zero corresponds // to the most recently pushed item). The value of `delay` is limited to // [0, N] and `num_items` to [1, M] where N + M is the capacity of the buffer. - absl::optional ComputePartialMetrics(int delay, int num_items) const; + std::optional ComputePartialMetrics(int delay, int num_items) const; private: int tail_; diff --git a/modules/audio_processing/agc2/clipping_predictor_level_buffer_unittest.cc b/modules/audio_processing/agc2/clipping_predictor_level_buffer_unittest.cc index 7af9a436c9..4f20ddb0b7 100644 --- a/modules/audio_processing/agc2/clipping_predictor_level_buffer_unittest.cc +++ b/modules/audio_processing/agc2/clipping_predictor_level_buffer_unittest.cc @@ -108,9 +108,9 @@ TEST(ClippingPredictorLevelBufferTest, CheckMetricsAfterTooFewItems) { buffer.Push({1, 2}); buffer.Push({3, 6}); EXPECT_EQ(buffer.ComputePartialMetrics(/*delay=*/0, /*num_items=*/3), - absl::nullopt); + std::nullopt); EXPECT_EQ(buffer.ComputePartialMetrics(/*delay=*/2, /*num_items=*/1), - absl::nullopt); + std::nullopt); } TEST(ClippingPredictorLevelBufferTest, CheckMetricsAfterReset) { diff --git a/modules/audio_processing/agc2/clipping_predictor_unittest.cc b/modules/audio_processing/agc2/clipping_predictor_unittest.cc index af73107749..dbb6c23eb7 100644 --- a/modules/audio_processing/agc2/clipping_predictor_unittest.cc +++ b/modules/audio_processing/agc2/clipping_predictor_unittest.cc @@ -105,7 +105,7 @@ void CheckChannelEstimatesWithoutValue(int num_channels, SCOPED_TRACE(i); EXPECT_EQ(predictor.EstimateClippedLevelStep(i, level, default_step, min_mic_level, max_mic_level), - absl::nullopt); + std::nullopt); } } diff --git a/modules/audio_processing/agc2/cpu_features.cc b/modules/audio_processing/agc2/cpu_features.cc index cced7614bc..aa5a57834f 100644 --- a/modules/audio_processing/agc2/cpu_features.cc +++ b/modules/audio_processing/agc2/cpu_features.cc @@ -18,7 +18,7 @@ namespace webrtc { std::string AvailableCpuFeatures::ToString() const { char buf[64]; - rtc::SimpleStringBuilder builder(buf); + SimpleStringBuilder builder(buf); bool first = true; if (sse2) { builder << (first ? "SSE2" : "_SSE2"); diff --git a/modules/audio_processing/agc2/fixed_digital_level_estimator.cc b/modules/audio_processing/agc2/fixed_digital_level_estimator.cc index 1995b24913..20e8491c0e 100644 --- a/modules/audio_processing/agc2/fixed_digital_level_estimator.cc +++ b/modules/audio_processing/agc2/fixed_digital_level_estimator.cc @@ -14,6 +14,7 @@ #include #include "api/array_view.h" +#include "api/audio/audio_frame.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" @@ -34,14 +35,17 @@ constexpr float kDecayFilterConstant = 0.9971259f; } // namespace FixedDigitalLevelEstimator::FixedDigitalLevelEstimator( - int sample_rate_hz, + size_t samples_per_channel, ApmDataDumper* apm_data_dumper) : apm_data_dumper_(apm_data_dumper), filter_state_level_(kInitialFilterStateLevel) { - SetSampleRate(sample_rate_hz); + SetSamplesPerChannel(samples_per_channel); CheckParameterCombination(); RTC_DCHECK(apm_data_dumper_); - apm_data_dumper_->DumpRaw("agc2_level_estimator_samplerate", sample_rate_hz); + // Convert `samples_per_channel` to sample rate for + // `agc2_level_estimator_samplerate`. + apm_data_dumper_->DumpRaw("agc2_level_estimator_samplerate", + samples_per_channel * kDefaultAudioBuffersPerSec); } void FixedDigitalLevelEstimator::CheckParameterCombination() { @@ -52,15 +56,15 @@ void FixedDigitalLevelEstimator::CheckParameterCombination() { } std::array FixedDigitalLevelEstimator::ComputeLevel( - const AudioFrameView& float_frame) { + DeinterleavedView float_frame) { RTC_DCHECK_GT(float_frame.num_channels(), 0); RTC_DCHECK_EQ(float_frame.samples_per_channel(), samples_in_frame_); // Compute max envelope without smoothing. std::array envelope{}; - for (int channel_idx = 0; channel_idx < float_frame.num_channels(); + for (size_t channel_idx = 0; channel_idx < float_frame.num_channels(); ++channel_idx) { - const auto channel = float_frame.channel(channel_idx); + const auto channel = float_frame[channel_idx]; for (int sub_frame = 0; sub_frame < kSubFramesInFrame; ++sub_frame) { for (int sample_in_sub_frame = 0; sample_in_sub_frame < samples_in_sub_frame_; ++sample_in_sub_frame) { @@ -95,7 +99,7 @@ std::array FixedDigitalLevelEstimator::ComputeLevel( // Dump data for debug. RTC_DCHECK(apm_data_dumper_); - const auto channel = float_frame.channel(0); + const auto channel = float_frame[0]; apm_data_dumper_->DumpRaw("agc2_level_estimator_samples", samples_in_sub_frame_, &channel[sub_frame * samples_in_sub_frame_]); @@ -106,11 +110,10 @@ std::array FixedDigitalLevelEstimator::ComputeLevel( return envelope; } -void FixedDigitalLevelEstimator::SetSampleRate(int sample_rate_hz) { - samples_in_frame_ = - rtc::CheckedDivExact(sample_rate_hz * kFrameDurationMs, 1000); - samples_in_sub_frame_ = - rtc::CheckedDivExact(samples_in_frame_, kSubFramesInFrame); +void FixedDigitalLevelEstimator::SetSamplesPerChannel( + size_t samples_per_channel) { + samples_in_frame_ = static_cast(samples_per_channel); + samples_in_sub_frame_ = CheckedDivExact(samples_in_frame_, kSubFramesInFrame); CheckParameterCombination(); } diff --git a/modules/audio_processing/agc2/fixed_digital_level_estimator.h b/modules/audio_processing/agc2/fixed_digital_level_estimator.h index d26b55950c..1669acdc71 100644 --- a/modules/audio_processing/agc2/fixed_digital_level_estimator.h +++ b/modules/audio_processing/agc2/fixed_digital_level_estimator.h @@ -25,12 +25,16 @@ class ApmDataDumper; // filtering. class FixedDigitalLevelEstimator { public: - // Sample rates are allowed if the number of samples in a frame - // (sample_rate_hz * kFrameDurationMs / 1000) is divisible by + // `samples_per_channel` is expected to be derived from this formula: + // sample_rate_hz * kFrameDurationMs / 1000 + // or, for a 10ms duration: + // sample_rate_hz / 100 + // I.e. the number of samples for 10ms of the given sample rate. The + // expectation is that samples per channel is divisible by // kSubFramesInSample. For kFrameDurationMs=10 and - // kSubFramesInSample=20, this means that sample_rate_hz has to be - // divisible by 2000. - FixedDigitalLevelEstimator(int sample_rate_hz, + // kSubFramesInSample=20, this means that the original sample rate has to be + // divisible by 2000 and therefore `samples_per_channel` by 20. + FixedDigitalLevelEstimator(size_t samples_per_channel, ApmDataDumper* apm_data_dumper); FixedDigitalLevelEstimator(const FixedDigitalLevelEstimator&) = delete; @@ -42,11 +46,11 @@ class FixedDigitalLevelEstimator { // ms of audio produces a level estimates in the same scale. The // level estimate contains kSubFramesInFrame values. std::array ComputeLevel( - const AudioFrameView& float_frame); + DeinterleavedView float_frame); // Rate may be changed at any time (but not concurrently) from the // value passed to the constructor. The class is not thread safe. - void SetSampleRate(int sample_rate_hz); + void SetSamplesPerChannel(size_t samples_per_channel); // Resets the level estimator internal state. void Reset(); diff --git a/modules/audio_processing/agc2/fixed_digital_level_estimator_unittest.cc b/modules/audio_processing/agc2/fixed_digital_level_estimator_unittest.cc index 97b421d04c..c76db85a5c 100644 --- a/modules/audio_processing/agc2/fixed_digital_level_estimator_unittest.cc +++ b/modules/audio_processing/agc2/fixed_digital_level_estimator_unittest.cc @@ -12,6 +12,7 @@ #include +#include "api/audio/audio_frame.h" #include "common_audio/include/audio_util.h" #include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/agc2/agc2_testing_common.h" @@ -26,21 +27,21 @@ constexpr float kInputLevel = 10000.f; // Run audio at specified settings through the level estimator, and // verify that the output level falls within the bounds. -void TestLevelEstimator(int sample_rate_hz, +void TestLevelEstimator(size_t samples_per_channel, int num_channels, float input_level_linear_scale, float expected_min, float expected_max) { ApmDataDumper apm_data_dumper(0); - FixedDigitalLevelEstimator level_estimator(sample_rate_hz, &apm_data_dumper); + FixedDigitalLevelEstimator level_estimator(samples_per_channel, + &apm_data_dumper); const VectorFloatFrame vectors_with_float_frame( - num_channels, rtc::CheckedDivExact(sample_rate_hz, 100), - input_level_linear_scale); + num_channels, samples_per_channel, input_level_linear_scale); for (int i = 0; i < 500; ++i) { - const auto level = level_estimator.ComputeLevel( - vectors_with_float_frame.float_frame_view()); + const auto level = + level_estimator.ComputeLevel(vectors_with_float_frame.view()); // Give the estimator some time to ramp up. if (i < 50) { @@ -56,7 +57,7 @@ void TestLevelEstimator(int sample_rate_hz, // Returns time it takes for the level estimator to decrease its level // estimate by 'level_reduction_db'. -float TimeMsToDecreaseLevel(int sample_rate_hz, +float TimeMsToDecreaseLevel(size_t samples_per_channel, int num_channels, float input_level_db, float level_reduction_db) { @@ -64,29 +65,30 @@ float TimeMsToDecreaseLevel(int sample_rate_hz, RTC_DCHECK_GT(level_reduction_db, 0); const VectorFloatFrame vectors_with_float_frame( - num_channels, rtc::CheckedDivExact(sample_rate_hz, 100), input_level); + num_channels, samples_per_channel, input_level); ApmDataDumper apm_data_dumper(0); - FixedDigitalLevelEstimator level_estimator(sample_rate_hz, &apm_data_dumper); + FixedDigitalLevelEstimator level_estimator(samples_per_channel, + &apm_data_dumper); // Give the LevelEstimator plenty of time to ramp up and stabilize float last_level = 0.f; for (int i = 0; i < 500; ++i) { - const auto level_envelope = level_estimator.ComputeLevel( - vectors_with_float_frame.float_frame_view()); + const auto level_envelope = + level_estimator.ComputeLevel(vectors_with_float_frame.view()); last_level = *level_envelope.rbegin(); } // Set input to 0. - VectorFloatFrame vectors_with_zero_float_frame( - num_channels, rtc::CheckedDivExact(sample_rate_hz, 100), 0); + VectorFloatFrame vectors_with_zero_float_frame(num_channels, + samples_per_channel, 0); const float reduced_level_linear = DbfsToFloatS16(input_level_db - level_reduction_db); int sub_frames_until_level_reduction = 0; while (last_level > reduced_level_linear) { - const auto level_envelope = level_estimator.ComputeLevel( - vectors_with_zero_float_frame.float_frame_view()); + const auto level_envelope = + level_estimator.ComputeLevel(vectors_with_zero_float_frame.view()); for (const auto& v : level_envelope) { EXPECT_LT(v, last_level); sub_frames_until_level_reduction++; @@ -102,21 +104,22 @@ float TimeMsToDecreaseLevel(int sample_rate_hz, } // namespace TEST(GainController2FixedDigitalLevelEstimator, EstimatorShouldNotCrash) { - TestLevelEstimator(8000, 1, 0, std::numeric_limits::lowest(), + TestLevelEstimator(SampleRateToDefaultChannelSize(8000u), 1, 0, + std::numeric_limits::lowest(), std::numeric_limits::max()); } TEST(GainController2FixedDigitalLevelEstimator, EstimatorShouldEstimateConstantLevel) { - TestLevelEstimator(10000, 1, kInputLevel, kInputLevel * 0.99, - kInputLevel * 1.01); + TestLevelEstimator(SampleRateToDefaultChannelSize(10000u), 1, kInputLevel, + kInputLevel * 0.99, kInputLevel * 1.01); } TEST(GainController2FixedDigitalLevelEstimator, EstimatorShouldEstimateConstantLevelForManyChannels) { constexpr size_t num_channels = 10; - TestLevelEstimator(20000, num_channels, kInputLevel, kInputLevel * 0.99, - kInputLevel * 1.01); + TestLevelEstimator(SampleRateToDefaultChannelSize(20000u), num_channels, + kInputLevel, kInputLevel * 0.99, kInputLevel * 1.01); } TEST(GainController2FixedDigitalLevelEstimator, TimeToDecreaseForLowLevel) { @@ -125,7 +128,8 @@ TEST(GainController2FixedDigitalLevelEstimator, TimeToDecreaseForLowLevel) { constexpr float kExpectedTime = kLevelReductionDb * test::kDecayMs; const float time_to_decrease = - TimeMsToDecreaseLevel(22000, 1, kInitialLowLevel, kLevelReductionDb); + TimeMsToDecreaseLevel(SampleRateToDefaultChannelSize(22000u), 1, + kInitialLowLevel, kLevelReductionDb); EXPECT_LE(kExpectedTime * 0.9, time_to_decrease); EXPECT_LE(time_to_decrease, kExpectedTime * 1.1); @@ -136,8 +140,8 @@ TEST(GainController2FixedDigitalLevelEstimator, constexpr float kLevelReductionDb = 25; constexpr float kExpectedTime = kLevelReductionDb * test::kDecayMs; - const float time_to_decrease = - TimeMsToDecreaseLevel(26000, 1, 0, kLevelReductionDb); + const float time_to_decrease = TimeMsToDecreaseLevel( + SampleRateToDefaultChannelSize(26000u), 1, 0, kLevelReductionDb); EXPECT_LE(kExpectedTime * 0.9, time_to_decrease); EXPECT_LE(time_to_decrease, kExpectedTime * 1.1); @@ -150,7 +154,8 @@ TEST(GainController2FixedDigitalLevelEstimator, constexpr size_t kNumChannels = 10; const float time_to_decrease = - TimeMsToDecreaseLevel(28000, kNumChannels, 0, kLevelReductionDb); + TimeMsToDecreaseLevel(SampleRateToDefaultChannelSize(28000u), + kNumChannels, 0, kLevelReductionDb); EXPECT_LE(kExpectedTime * 0.9, time_to_decrease); EXPECT_LE(time_to_decrease, kExpectedTime * 1.1); diff --git a/modules/audio_processing/agc2/gain_applier.cc b/modules/audio_processing/agc2/gain_applier.cc index f9e276d3a8..927bb554c4 100644 --- a/modules/audio_processing/agc2/gain_applier.cc +++ b/modules/audio_processing/agc2/gain_applier.cc @@ -10,7 +10,7 @@ #include "modules/audio_processing/agc2/gain_applier.h" -#include "api/array_view.h" +#include "api/audio/audio_view.h" #include "modules/audio_processing/agc2/agc2_common.h" #include "rtc_base/numerics/safe_minmax.h" @@ -24,11 +24,11 @@ bool GainCloseToOne(float gain_factor) { gain_factor <= 1.f + 1.f / kMaxFloatS16Value; } -void ClipSignal(AudioFrameView signal) { - for (int k = 0; k < signal.num_channels(); ++k) { - rtc::ArrayView channel_view = signal.channel(k); +void ClipSignal(DeinterleavedView signal) { + for (size_t k = 0; k < signal.num_channels(); ++k) { + MonoView channel_view = signal[k]; for (auto& sample : channel_view) { - sample = rtc::SafeClamp(sample, kMinFloatS16Value, kMaxFloatS16Value); + sample = SafeClamp(sample, kMinFloatS16Value, kMaxFloatS16Value); } } } @@ -36,7 +36,7 @@ void ClipSignal(AudioFrameView signal) { void ApplyGainWithRamping(float last_gain_linear, float gain_at_end_of_frame_linear, float inverse_samples_per_channel, - AudioFrameView float_frame) { + DeinterleavedView float_frame) { // Do not modify the signal. if (last_gain_linear == gain_at_end_of_frame_linear && GainCloseToOne(gain_at_end_of_frame_linear)) { @@ -45,8 +45,8 @@ void ApplyGainWithRamping(float last_gain_linear, // Gain is constant and different from 1. if (last_gain_linear == gain_at_end_of_frame_linear) { - for (int k = 0; k < float_frame.num_channels(); ++k) { - rtc::ArrayView channel_view = float_frame.channel(k); + for (size_t k = 0; k < float_frame.num_channels(); ++k) { + MonoView channel_view = float_frame[k]; for (auto& sample : channel_view) { sample *= gain_at_end_of_frame_linear; } @@ -57,12 +57,12 @@ void ApplyGainWithRamping(float last_gain_linear, // The gain changes. We have to change slowly to avoid discontinuities. const float increment = (gain_at_end_of_frame_linear - last_gain_linear) * inverse_samples_per_channel; - float gain = last_gain_linear; - for (int i = 0; i < float_frame.samples_per_channel(); ++i) { - for (int ch = 0; ch < float_frame.num_channels(); ++ch) { - float_frame.channel(ch)[i] *= gain; + for (size_t ch = 0; ch < float_frame.num_channels(); ++ch) { + float gain = last_gain_linear; + for (float& sample : float_frame[ch]) { + sample *= gain; + gain += increment; } - gain += increment; } } @@ -73,7 +73,7 @@ GainApplier::GainApplier(bool hard_clip_samples, float initial_gain_factor) last_gain_factor_(initial_gain_factor), current_gain_factor_(initial_gain_factor) {} -void GainApplier::ApplyGain(AudioFrameView signal) { +void GainApplier::ApplyGain(DeinterleavedView signal) { if (static_cast(signal.samples_per_channel()) != samples_per_channel_) { Initialize(signal.samples_per_channel()); } diff --git a/modules/audio_processing/agc2/gain_applier.h b/modules/audio_processing/agc2/gain_applier.h index ba8a4a4cd2..82ae82eeef 100644 --- a/modules/audio_processing/agc2/gain_applier.h +++ b/modules/audio_processing/agc2/gain_applier.h @@ -13,6 +13,7 @@ #include +#include "api/audio/audio_view.h" #include "modules/audio_processing/include/audio_frame_view.h" namespace webrtc { @@ -20,10 +21,15 @@ class GainApplier { public: GainApplier(bool hard_clip_samples, float initial_gain_factor); - void ApplyGain(AudioFrameView signal); + void ApplyGain(DeinterleavedView signal); void SetGainFactor(float gain_factor); float GetGainFactor() const { return current_gain_factor_; } + [[deprecated("Use DeinterleavedView<> version")]] void ApplyGain( + AudioFrameView signal) { + ApplyGain(signal.view()); + } + private: void Initialize(int samples_per_channel); diff --git a/modules/audio_processing/agc2/gain_applier_unittest.cc b/modules/audio_processing/agc2/gain_applier_unittest.cc index 3296345e62..7548faa61b 100644 --- a/modules/audio_processing/agc2/gain_applier_unittest.cc +++ b/modules/audio_processing/agc2/gain_applier_unittest.cc @@ -15,6 +15,7 @@ #include #include +#include "api/audio/audio_view.h" #include "modules/audio_processing/agc2/vector_float_frame.h" #include "rtc_base/gunit.h" @@ -25,9 +26,9 @@ TEST(AutomaticGainController2GainApplier, InitialGainIsRespected) { VectorFloatFrame fake_audio(1, 1, initial_signal_level); GainApplier gain_applier(true, gain_factor); - gain_applier.ApplyGain(fake_audio.float_frame_view()); - EXPECT_NEAR(fake_audio.float_frame_view().channel(0)[0], - initial_signal_level * gain_factor, 0.1f); + auto fake_view = fake_audio.view(); + gain_applier.ApplyGain(fake_audio.view()); + EXPECT_NEAR(fake_view[0][0], initial_signal_level * gain_factor, 0.1f); } TEST(AutomaticGainController2GainApplier, ClippingIsDone) { @@ -36,9 +37,9 @@ TEST(AutomaticGainController2GainApplier, ClippingIsDone) { VectorFloatFrame fake_audio(1, 1, initial_signal_level); GainApplier gain_applier(true, gain_factor); - gain_applier.ApplyGain(fake_audio.float_frame_view()); - EXPECT_NEAR(fake_audio.float_frame_view().channel(0)[0], - std::numeric_limits::max(), 0.1f); + gain_applier.ApplyGain(fake_audio.view()); + EXPECT_NEAR(fake_audio.view()[0][0], std::numeric_limits::max(), + 0.1f); } TEST(AutomaticGainController2GainApplier, ClippingIsNotDone) { @@ -47,10 +48,10 @@ TEST(AutomaticGainController2GainApplier, ClippingIsNotDone) { VectorFloatFrame fake_audio(1, 1, initial_signal_level); GainApplier gain_applier(false, gain_factor); - gain_applier.ApplyGain(fake_audio.float_frame_view()); + gain_applier.ApplyGain(fake_audio.view()); - EXPECT_NEAR(fake_audio.float_frame_view().channel(0)[0], - initial_signal_level * gain_factor, 0.1f); + EXPECT_NEAR(fake_audio.view()[0][0], initial_signal_level * gain_factor, + 0.1f); } TEST(AutomaticGainController2GainApplier, RampingIsDone) { @@ -64,13 +65,13 @@ TEST(AutomaticGainController2GainApplier, RampingIsDone) { GainApplier gain_applier(false, initial_gain_factor); gain_applier.SetGainFactor(target_gain_factor); - gain_applier.ApplyGain(fake_audio.float_frame_view()); + gain_applier.ApplyGain(fake_audio.view()); // The maximal gain change should be close to that in linear interpolation. for (size_t channel = 0; channel < num_channels; ++channel) { float max_signal_change = 0.f; float last_signal_level = initial_signal_level; - for (const auto sample : fake_audio.float_frame_view().channel(channel)) { + for (const auto sample : fake_audio.view()[channel]) { const float current_change = fabs(last_signal_level - sample); max_signal_change = std::max(max_signal_change, current_change); last_signal_level = sample; @@ -84,10 +85,10 @@ TEST(AutomaticGainController2GainApplier, RampingIsDone) { // Next frame should have the desired level. VectorFloatFrame next_fake_audio_frame(num_channels, samples_per_channel, initial_signal_level); - gain_applier.ApplyGain(next_fake_audio_frame.float_frame_view()); + gain_applier.ApplyGain(next_fake_audio_frame.view()); // The last sample should have the new gain. - EXPECT_NEAR(next_fake_audio_frame.float_frame_view().channel(0)[0], + EXPECT_NEAR(next_fake_audio_frame.view()[0][0], initial_signal_level * target_gain_factor, 0.1f); } } // namespace webrtc diff --git a/modules/audio_processing/agc2/input_volume_controller.cc b/modules/audio_processing/agc2/input_volume_controller.cc index bcc650fb3e..557c1e67d1 100644 --- a/modules/audio_processing/agc2/input_volume_controller.cc +++ b/modules/audio_processing/agc2/input_volume_controller.cc @@ -116,8 +116,8 @@ int GetSpeechLevelRmsErrorDb(float speech_level_dbfs, constexpr float kMaxSpeechLevelDbfs = 30.0f; RTC_DCHECK_GE(speech_level_dbfs, kMinSpeechLevelDbfs); RTC_DCHECK_LE(speech_level_dbfs, kMaxSpeechLevelDbfs); - speech_level_dbfs = rtc::SafeClamp( - speech_level_dbfs, kMinSpeechLevelDbfs, kMaxSpeechLevelDbfs); + speech_level_dbfs = SafeClamp(speech_level_dbfs, kMinSpeechLevelDbfs, + kMaxSpeechLevelDbfs); int rms_error_db = 0; if (speech_level_dbfs > target_range_max_dbfs) { @@ -173,7 +173,7 @@ void MonoInputVolumeController::Initialize() { // previous update and the ratio of non-silence frames (i.e., frames with a // `speech_probability` higher than `speech_probability_threshold_`) is at least // `speech_ratio_threshold_`. -void MonoInputVolumeController::Process(absl::optional rms_error_db, +void MonoInputVolumeController::Process(std::optional rms_error_db, float speech_probability) { if (check_volume_on_next_process_) { check_volume_on_next_process_ = false; @@ -343,7 +343,7 @@ void MonoInputVolumeController::UpdateInputVolume(int rms_error_db) { // Prevent too large microphone input volume changes by clamping the RMS // error. rms_error_db = - rtc::SafeClamp(rms_error_db, -KMaxAbsRmsErrorDbfs, KMaxAbsRmsErrorDbfs); + SafeClamp(rms_error_db, -KMaxAbsRmsErrorDbfs, KMaxAbsRmsErrorDbfs); if (rms_error_db == 0) { return; } @@ -404,7 +404,7 @@ void InputVolumeController::Initialize() { clipping_rate_log_ = 0.0f; clipping_rate_log_counter_ = 0; - applied_input_volume_ = absl::nullopt; + applied_input_volume_ = std::nullopt; } void InputVolumeController::AnalyzeInputAudio(int applied_input_volume, @@ -498,13 +498,13 @@ void InputVolumeController::AnalyzeInputAudio(int applied_input_volume, AggregateChannelLevels(); } -absl::optional InputVolumeController::RecommendInputVolume( +std::optional InputVolumeController::RecommendInputVolume( float speech_probability, - absl::optional speech_level_dbfs) { + std::optional speech_level_dbfs) { // Only process if applied input volume is set. if (!applied_input_volume_.has_value()) { RTC_LOG(LS_ERROR) << "[AGC2] Applied input volume not set."; - return absl::nullopt; + return std::nullopt; } AggregateChannelLevels(); @@ -514,7 +514,7 @@ absl::optional InputVolumeController::RecommendInputVolume( return applied_input_volume_; } - absl::optional rms_error_db; + std::optional rms_error_db; if (speech_level_dbfs.has_value()) { // Compute the error for all frames (both speech and non-speech frames). rms_error_db = GetSpeechLevelRmsErrorDb( @@ -533,7 +533,7 @@ absl::optional InputVolumeController::RecommendInputVolume( recommended_input_volume_); } - applied_input_volume_ = absl::nullopt; + applied_input_volume_ = std::nullopt; return recommended_input_volume(); } diff --git a/modules/audio_processing/agc2/input_volume_controller.h b/modules/audio_processing/agc2/input_volume_controller.h index 21405542dc..60e76d8e04 100644 --- a/modules/audio_processing/agc2/input_volume_controller.h +++ b/modules/audio_processing/agc2/input_volume_controller.h @@ -12,13 +12,13 @@ #define MODULES_AUDIO_PROCESSING_AGC2_INPUT_VOLUME_CONTROLLER_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/audio/audio_processing.h" #include "modules/audio_processing/agc2/clipping_predictor.h" #include "modules/audio_processing/audio_buffer.h" -#include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/gtest_prod_util.h" namespace webrtc { @@ -50,7 +50,7 @@ class InputVolumeController final { // Limited to values higher than 0. int clipped_wait_frames = 300; // Enables clipping prediction functionality. - bool enable_clipping_predictor = false; + bool enable_clipping_predictor = true; // Speech level target range (dBFS). If the speech level is in the range // [`target_range_min_dbfs`, `target_range_max_dbfs`], no input volume // adjustments are done based on the speech level. For speech levels below @@ -95,9 +95,9 @@ class InputVolumeController final { // suppression are applied. Returns a non-empty input volume recommendation if // available. If `capture_output_used_` is true, returns the applied input // volume. - absl::optional RecommendInputVolume( + std::optional RecommendInputVolume( float speech_probability, - absl::optional speech_level_dbfs); + std::optional speech_level_dbfs); // Stores whether the capture output will be used or not. Call when the // capture stream output has been flagged to be used/not-used. If unused, the @@ -155,7 +155,7 @@ class InputVolumeController final { int recommended_input_volume_ = 0; // Applied input volume. After `SetAppliedInputVolume()` is called it holds // the current applied volume. - absl::optional applied_input_volume_; + std::optional applied_input_volume_; bool capture_output_used_; @@ -213,7 +213,7 @@ class MonoInputVolumeController { // result of `HandleClipping()` and on `rms_error_dbfs`. Updates are only // allowed for active speech segments and when `rms_error_dbfs` is not empty. // Must be called after `HandleClipping()`. - void Process(absl::optional rms_error_dbfs, float speech_probability); + void Process(std::optional rms_error_dbfs, float speech_probability); // Returns the recommended input volume. Must be called after `Process()`. int recommended_analog_level() const { return recommended_input_volume_; } diff --git a/modules/audio_processing/agc2/input_volume_controller_unittest.cc b/modules/audio_processing/agc2/input_volume_controller_unittest.cc index d1bdcf25a5..750246739e 100644 --- a/modules/audio_processing/agc2/input_volume_controller_unittest.cc +++ b/modules/audio_processing/agc2/input_volume_controller_unittest.cc @@ -11,15 +11,21 @@ #include "modules/audio_processing/agc2/input_volume_controller.h" #include +#include +#include #include +#include #include -#include +#include +#include +#include #include +#include "api/audio/audio_processing.h" +#include "modules/audio_processing/audio_buffer.h" +#include "rtc_base/checks.h" #include "rtc_base/numerics/safe_minmax.h" -#include "rtc_base/strings/string_builder.h" #include "system_wrappers/include/metrics.h" -#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -35,17 +41,15 @@ namespace { constexpr int kSampleRateHz = 32000; constexpr int kNumChannels = 1; -constexpr int kInitialInputVolume = 128; +constexpr int kDefaultInitialInputVolume = 128; constexpr int kClippedMin = 165; // Arbitrary, but different from the default. constexpr float kAboveClippedThreshold = 0.2f; -constexpr int kMinMicLevel = 20; constexpr int kClippedLevelStep = 15; constexpr float kClippedRatioThreshold = 0.1f; constexpr int kClippedWaitFrames = 300; constexpr float kHighSpeechProbability = 0.7f; constexpr float kLowSpeechProbability = 0.1f; constexpr float kSpeechLevel = -25.0f; -constexpr float kSpeechProbabilityThreshold = 0.5f; constexpr float kSpeechRatioThreshold = 0.8f; constexpr float kMinSample = std::numeric_limits::min(); @@ -65,7 +69,7 @@ std::unique_ptr CreateInputVolumeController( bool enable_clipping_predictor = false, int update_input_volume_wait_frames = 0) { InputVolumeControllerConfig config{ - .min_input_volume = kMinMicLevel, + .min_input_volume = 20, .clipped_level_min = kClippedMin, .clipped_level_step = clipped_level_step, .clipped_ratio_threshold = clipped_ratio_threshold, @@ -74,7 +78,7 @@ std::unique_ptr CreateInputVolumeController( .target_range_max_dbfs = -18, .target_range_min_dbfs = -30, .update_input_volume_wait_frames = update_input_volume_wait_frames, - .speech_probability_threshold = kSpeechProbabilityThreshold, + .speech_probability_threshold = 0.5f, .speech_ratio_threshold = kSpeechRatioThreshold, }; @@ -156,7 +160,7 @@ class SpeechSamplesReader { int applied_input_volume, int gain_db, float speech_probability, - absl::optional speech_level_dbfs, + std::optional speech_level_dbfs, InputVolumeController& controller) { RTC_DCHECK(controller.capture_output_used()); @@ -173,8 +177,8 @@ class SpeechSamplesReader { // Apply gain and copy samples into `audio_buffer_`. std::transform(buffer_.begin(), buffer_.end(), audio_buffer_.channels()[0], [gain](int16_t v) -> float { - return rtc::SafeClamp(static_cast(v) * gain, - kMinSample, kMaxSample); + return SafeClamp(static_cast(v) * gain, + kMinSample, kMaxSample); }); controller.AnalyzeInputAudio(applied_input_volume, audio_buffer_); const auto recommended_input_volume = controller.RecommendInputVolume( @@ -201,7 +205,7 @@ class SpeechSamplesReader { float UpdateRecommendedInputVolume(MonoInputVolumeController& mono_controller, int applied_input_volume, float speech_probability, - absl::optional rms_error_dbfs) { + std::optional rms_error_dbfs) { mono_controller.set_stream_analog_level(applied_input_volume); EXPECT_EQ(mono_controller.recommended_analog_level(), applied_input_volume); mono_controller.Process(rms_error_dbfs, speech_probability); @@ -256,12 +260,12 @@ class InputVolumeControllerTestHelper { // - Uses `audio_buffer` to call `AnalyzeInputAudio()` and // `RecommendInputVolume()`; // Returns the recommended input volume. - absl::optional CallAgcSequence(int applied_input_volume, - float speech_probability, - absl::optional speech_level_dbfs, - int num_calls = 1) { + std::optional CallAgcSequence(int applied_input_volume, + float speech_probability, + std::optional speech_level_dbfs, + int num_calls = 1) { RTC_DCHECK_GE(num_calls, 1); - absl::optional volume = applied_input_volume; + std::optional volume = applied_input_volume; for (int i = 0; i < num_calls; ++i) { // Repeat the initial volume if `RecommendInputVolume()` doesn't return a // value. @@ -285,7 +289,7 @@ class InputVolumeControllerTestHelper { int CallRecommendInputVolume(int num_calls, int initial_volume, float speech_probability, - absl::optional speech_level_dbfs) { + std::optional speech_level_dbfs) { RTC_DCHECK(controller.capture_output_used()); // Create non-clipping audio for `AnalyzeInputAudio()`. @@ -487,7 +491,7 @@ TEST_P(InputVolumeControllerParametrizedTest, MicVolumeResponseToRmsError) { InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); config.min_input_volume = GetParam(); InputVolumeControllerTestHelper helper(config); - int volume = *helper.CallAgcSequence(kInitialInputVolume, + int volume = *helper.CallAgcSequence(kDefaultInitialInputVolume, kHighSpeechProbability, kSpeechLevel); // Inside the digital gain's window; no change of volume. @@ -532,7 +536,7 @@ TEST_P(InputVolumeControllerParametrizedTest, MicVolumeIsLimited) { const int min_input_volume = GetParam(); config.min_input_volume = min_input_volume; InputVolumeControllerTestHelper helper(config); - int volume = *helper.CallAgcSequence(kInitialInputVolume, + int volume = *helper.CallAgcSequence(kDefaultInitialInputVolume, kHighSpeechProbability, kSpeechLevel); // Maximum upwards change is limited. @@ -620,7 +624,7 @@ TEST_P(InputVolumeControllerParametrizedTest, UnmutingChecksVolumeWithoutRaising) { InputVolumeControllerTestHelper helper( /*config=*/{.min_input_volume = GetParam()}); - helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + helper.CallAgcSequence(kDefaultInitialInputVolume, kHighSpeechProbability, kSpeechLevel); helper.controller.HandleCaptureOutputUsedChange(false); @@ -639,7 +643,7 @@ TEST_P(InputVolumeControllerParametrizedTest, UnmutingRaisesTooLowVolume) { const int min_input_volume = GetParam(); InputVolumeControllerTestHelper helper( /*config=*/{.min_input_volume = min_input_volume}); - helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + helper.CallAgcSequence(kDefaultInitialInputVolume, kHighSpeechProbability, kSpeechLevel); helper.controller.HandleCaptureOutputUsedChange(false); @@ -658,7 +662,7 @@ TEST_P(InputVolumeControllerParametrizedTest, InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); config.min_input_volume = GetParam(); InputVolumeControllerTestHelper helper(config); - int volume = *helper.CallAgcSequence(kInitialInputVolume, + int volume = *helper.CallAgcSequence(kDefaultInitialInputVolume, kHighSpeechProbability, kSpeechLevel); // GetMicVolume returns a value outside of the quantization slack, indicating @@ -684,7 +688,7 @@ TEST_P(InputVolumeControllerParametrizedTest, InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); config.min_input_volume = GetParam(); InputVolumeControllerTestHelper helper(config); - int volume = *helper.CallAgcSequence(kInitialInputVolume, + int volume = *helper.CallAgcSequence(kDefaultInitialInputVolume, kHighSpeechProbability, kSpeechLevel); // Force the mic up to max volume. Takes a few steps due to the residual @@ -719,7 +723,7 @@ TEST_P(InputVolumeControllerParametrizedTest, InputVolumeControllerConfig config = GetInputVolumeControllerTestConfig(); config.min_input_volume = min_input_volume; InputVolumeControllerTestHelper helper(config); - int volume = *helper.CallAgcSequence(kInitialInputVolume, + int volume = *helper.CallAgcSequence(kDefaultInitialInputVolume, kHighSpeechProbability, kSpeechLevel); // Manual change below min, but strictly positive, otherwise no action will be @@ -751,7 +755,7 @@ TEST_P(InputVolumeControllerParametrizedTest, const int min_input_volume = GetParam(); InputVolumeControllerTestHelper helper( /*config=*/{.min_input_volume = min_input_volume}); - int volume = *helper.CallAgcSequence(kInitialInputVolume, + int volume = *helper.CallAgcSequence(kDefaultInitialInputVolume, kHighSpeechProbability, kSpeechLevel); // Manual change below min, but strictly positive, otherwise @@ -764,7 +768,7 @@ TEST_P(InputVolumeControllerParametrizedTest, TEST_P(InputVolumeControllerParametrizedTest, NoClippingHasNoImpact) { InputVolumeControllerTestHelper helper( /*config=*/{.min_input_volume = GetParam()}); - helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + helper.CallAgcSequence(kDefaultInitialInputVolume, kHighSpeechProbability, kSpeechLevel); helper.CallAnalyzeInputAudio(/*num_calls=*/100, /*clipped_ratio=*/0); @@ -775,7 +779,7 @@ TEST_P(InputVolumeControllerParametrizedTest, ClippingUnderThresholdHasNoImpact) { InputVolumeControllerTestHelper helper( /*config=*/{.min_input_volume = GetParam()}); - helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + helper.CallAgcSequence(kDefaultInitialInputVolume, kHighSpeechProbability, kSpeechLevel); helper.CallAnalyzeInputAudio(/*num_calls=*/1, /*clipped_ratio=*/0.099); @@ -912,7 +916,7 @@ TEST_P(InputVolumeControllerParametrizedTest, TEST_P(InputVolumeControllerParametrizedTest, TakesNoActionOnZeroMicVolume) { InputVolumeControllerTestHelper helper( /*config=*/{.min_input_volume = GetParam()}); - helper.CallAgcSequence(kInitialInputVolume, kHighSpeechProbability, + helper.CallAgcSequence(kDefaultInitialInputVolume, kHighSpeechProbability, kSpeechLevel); EXPECT_EQ( @@ -1188,17 +1192,17 @@ TEST_P(InputVolumeControllerParametrizedTest, EmptyRmsErrorHasNoEffect) { constexpr int kNumFrames = 125; constexpr int kGainDb = -20; SpeechSamplesReader reader; - int volume = reader.Feed(kNumFrames, kInitialInputVolume, kGainDb, - kLowSpeechProbability, absl::nullopt, controller); + int volume = reader.Feed(kNumFrames, kDefaultInitialInputVolume, kGainDb, + kLowSpeechProbability, std::nullopt, controller); // Check that no adaptation occurs. - ASSERT_EQ(volume, kInitialInputVolume); + ASSERT_EQ(volume, kDefaultInitialInputVolume); } // Checks that the recommended input volume is not updated unless enough // frames have been processed after the previous update. TEST(InputVolumeControllerTest, UpdateInputVolumeWaitFramesIsEffective) { - constexpr int kInputVolume = kInitialInputVolume; + constexpr int kInputVolume = kDefaultInitialInputVolume; std::unique_ptr controller_wait_0 = CreateInputVolumeController(kClippedLevelStep, kClippedRatioThreshold, kClippedWaitFrames, @@ -1276,7 +1280,7 @@ TEST(InputVolumeControllerTest, } TEST(InputVolumeControllerTest, SpeechRatioThresholdIsEffective) { - constexpr int kInputVolume = kInitialInputVolume; + constexpr int kInputVolume = kDefaultInitialInputVolume; // Create two input volume controllers with 10 frames between volume updates // and the minimum speech ratio of 0.8 and speech probability threshold 0.5. std::unique_ptr controller_1 = @@ -1327,7 +1331,7 @@ TEST(InputVolumeControllerTest, SpeechRatioThresholdIsEffective) { } TEST(InputVolumeControllerTest, SpeechProbabilityThresholdIsEffective) { - constexpr int kInputVolume = kInitialInputVolume; + constexpr int kInputVolume = kDefaultInitialInputVolume; // Create two input volume controllers with the exact same settings and // 10 frames between volume updates. std::unique_ptr controller_1 = @@ -1845,8 +1849,8 @@ TEST(MonoInputVolumeControllerTest, EXPECT_EQ(volume_1, kInitialInputVolume); EXPECT_EQ(volume_2, kInitialInputVolume); - volume_1 = UpdateRecommendedInputVolume( - mono_controller_1, volume_1, kHighSpeechProbability, absl::nullopt); + volume_1 = UpdateRecommendedInputVolume(mono_controller_1, volume_1, + kHighSpeechProbability, std::nullopt); volume_2 = UpdateRecommendedInputVolume(mono_controller_2, volume_2, kHighSpeechProbability, -10.0f); diff --git a/modules/audio_processing/agc2/input_volume_stats_reporter.cc b/modules/audio_processing/agc2/input_volume_stats_reporter.cc index 05624b1f92..a0f33a7311 100644 --- a/modules/audio_processing/agc2/input_volume_stats_reporter.cc +++ b/modules/audio_processing/agc2/input_volume_stats_reporter.cc @@ -52,7 +52,7 @@ constexpr absl::string_view MetricNamePrefix( metrics::Histogram* CreateVolumeHistogram(InputVolumeType input_volume_type) { char buffer[64]; - rtc::SimpleStringBuilder builder(buffer); + SimpleStringBuilder builder(buffer); builder << MetricNamePrefix(input_volume_type) << "OnChange"; return metrics::HistogramFactoryGetCountsLinear(/*name=*/builder.str(), /*min=*/1, @@ -63,7 +63,7 @@ metrics::Histogram* CreateVolumeHistogram(InputVolumeType input_volume_type) { metrics::Histogram* CreateRateHistogram(InputVolumeType input_volume_type, absl::string_view name) { char buffer[64]; - rtc::SimpleStringBuilder builder(buffer); + SimpleStringBuilder builder(buffer); builder << MetricNamePrefix(input_volume_type) << name; return metrics::HistogramFactoryGetCountsLinear(/*name=*/builder.str(), /*min=*/1, @@ -74,7 +74,7 @@ metrics::Histogram* CreateRateHistogram(InputVolumeType input_volume_type, metrics::Histogram* CreateAverageHistogram(InputVolumeType input_volume_type, absl::string_view name) { char buffer[64]; - rtc::SimpleStringBuilder builder(buffer); + SimpleStringBuilder builder(buffer); builder << MetricNamePrefix(input_volume_type) << name; return metrics::HistogramFactoryGetCountsLinear(/*name=*/builder.str(), /*min=*/1, diff --git a/modules/audio_processing/agc2/input_volume_stats_reporter.h b/modules/audio_processing/agc2/input_volume_stats_reporter.h index 31b110031c..2f31aa0791 100644 --- a/modules/audio_processing/agc2/input_volume_stats_reporter.h +++ b/modules/audio_processing/agc2/input_volume_stats_reporter.h @@ -11,7 +11,8 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC2_INPUT_VOLUME_STATS_REPORTER_H_ #define MODULES_AUDIO_PROCESSING_AGC2_INPUT_VOLUME_STATS_REPORTER_H_ -#include "absl/types/optional.h" +#include + #include "rtc_base/gtest_prod_util.h" #include "system_wrappers/include/metrics.h" @@ -83,7 +84,7 @@ class InputVolumeStatsReporter { const bool cannot_log_stats_; int log_volume_update_stats_counter_ = 0; - absl::optional previous_input_volume_ = absl::nullopt; + std::optional previous_input_volume_ = std::nullopt; }; // Updates the histogram that keeps track of recommended input volume changes diff --git a/modules/audio_processing/agc2/input_volume_stats_reporter_unittest.cc b/modules/audio_processing/agc2/input_volume_stats_reporter_unittest.cc index e762c1fb59..5db063d53d 100644 --- a/modules/audio_processing/agc2/input_volume_stats_reporter_unittest.cc +++ b/modules/audio_processing/agc2/input_volume_stats_reporter_unittest.cc @@ -32,37 +32,33 @@ class InputVolumeStatsReporterTest protected: InputVolumeType InputVolumeType() const { return GetParam(); } std::string VolumeLabel() const { - return (rtc::StringBuilder(kLabelPrefix) << VolumeTypeLabel() << "OnChange") + return (StringBuilder(kLabelPrefix) << VolumeTypeLabel() << "OnChange") .str(); } std::string DecreaseRateLabel() const { - return (rtc::StringBuilder(kLabelPrefix) - << VolumeTypeLabel() << "DecreaseRate") + return (StringBuilder(kLabelPrefix) << VolumeTypeLabel() << "DecreaseRate") .str(); } std::string DecreaseAverageLabel() const { - return (rtc::StringBuilder(kLabelPrefix) + return (StringBuilder(kLabelPrefix) << VolumeTypeLabel() << "DecreaseAverage") .str(); } std::string IncreaseRateLabel() const { - return (rtc::StringBuilder(kLabelPrefix) - << VolumeTypeLabel() << "IncreaseRate") + return (StringBuilder(kLabelPrefix) << VolumeTypeLabel() << "IncreaseRate") .str(); } std::string IncreaseAverageLabel() const { - return (rtc::StringBuilder(kLabelPrefix) + return (StringBuilder(kLabelPrefix) << VolumeTypeLabel() << "IncreaseAverage") .str(); } std::string UpdateRateLabel() const { - return (rtc::StringBuilder(kLabelPrefix) - << VolumeTypeLabel() << "UpdateRate") + return (StringBuilder(kLabelPrefix) << VolumeTypeLabel() << "UpdateRate") .str(); } std::string UpdateAverageLabel() const { - return (rtc::StringBuilder(kLabelPrefix) - << VolumeTypeLabel() << "UpdateAverage") + return (StringBuilder(kLabelPrefix) << VolumeTypeLabel() << "UpdateAverage") .str(); } diff --git a/modules/audio_processing/agc2/interpolated_gain_curve.cc b/modules/audio_processing/agc2/interpolated_gain_curve.cc index bb6e038514..d7651dddd7 100644 --- a/modules/audio_processing/agc2/interpolated_gain_curve.cc +++ b/modules/audio_processing/agc2/interpolated_gain_curve.cc @@ -34,16 +34,16 @@ InterpolatedGainCurve::InterpolatedGainCurve( ApmDataDumper* apm_data_dumper, absl::string_view histogram_name_prefix) : region_logger_( - (rtc::StringBuilder("WebRTC.Audio.") + (StringBuilder("WebRTC.Audio.") << histogram_name_prefix << ".FixedDigitalGainCurveRegion.Identity") .str(), - (rtc::StringBuilder("WebRTC.Audio.") + (StringBuilder("WebRTC.Audio.") << histogram_name_prefix << ".FixedDigitalGainCurveRegion.Knee") .str(), - (rtc::StringBuilder("WebRTC.Audio.") + (StringBuilder("WebRTC.Audio.") << histogram_name_prefix << ".FixedDigitalGainCurveRegion.Limiter") .str(), - (rtc::StringBuilder("WebRTC.Audio.") + (StringBuilder("WebRTC.Audio.") << histogram_name_prefix << ".FixedDigitalGainCurveRegion.Saturation") .str()), diff --git a/modules/audio_processing/agc2/limiter.cc b/modules/audio_processing/agc2/limiter.cc index 7a1e2202be..072b79ed48 100644 --- a/modules/audio_processing/agc2/limiter.cc +++ b/modules/audio_processing/agc2/limiter.cc @@ -35,8 +35,8 @@ constexpr float kAttackFirstSubframeInterpolationPower = 8.0f; void InterpolateFirstSubframe(float last_factor, float current_factor, - rtc::ArrayView subframe) { - const int n = rtc::dchecked_cast(subframe.size()); + ArrayView subframe) { + const int n = dchecked_cast(subframe.size()); constexpr float p = kAttackFirstSubframeInterpolationPower; for (int i = 0; i < n; ++i) { subframe[i] = std::pow(1.f - i / n, p) * (last_factor - current_factor) + @@ -46,22 +46,20 @@ void InterpolateFirstSubframe(float last_factor, void ComputePerSampleSubframeFactors( const std::array& scaling_factors, - int samples_per_channel, - rtc::ArrayView per_sample_scaling_factors) { - const int num_subframes = scaling_factors.size() - 1; - const int subframe_size = - rtc::CheckedDivExact(samples_per_channel, num_subframes); + MonoView per_sample_scaling_factors) { + const size_t num_subframes = scaling_factors.size() - 1; + const int subframe_size = CheckedDivExact( + SamplesPerChannel(per_sample_scaling_factors), num_subframes); // Handle first sub-frame differently in case of attack. const bool is_attack = scaling_factors[0] > scaling_factors[1]; if (is_attack) { InterpolateFirstSubframe( scaling_factors[0], scaling_factors[1], - rtc::ArrayView( - per_sample_scaling_factors.subview(0, subframe_size))); + per_sample_scaling_factors.subview(0, subframe_size)); } - for (int i = is_attack ? 1 : 0; i < num_subframes; ++i) { + for (size_t i = is_attack ? 1 : 0; i < num_subframes; ++i) { const int subframe_start = i * subframe_size; const float scaling_start = scaling_factors[i]; const float scaling_end = scaling_factors[i + 1]; @@ -73,39 +71,36 @@ void ComputePerSampleSubframeFactors( } } -void ScaleSamples(rtc::ArrayView per_sample_scaling_factors, - AudioFrameView signal) { +void ScaleSamples(MonoView per_sample_scaling_factors, + DeinterleavedView signal) { const int samples_per_channel = signal.samples_per_channel(); - RTC_DCHECK_EQ(samples_per_channel, per_sample_scaling_factors.size()); - for (int i = 0; i < signal.num_channels(); ++i) { - rtc::ArrayView channel = signal.channel(i); + RTC_DCHECK_EQ(samples_per_channel, + SamplesPerChannel(per_sample_scaling_factors)); + for (size_t i = 0; i < signal.num_channels(); ++i) { + MonoView channel = signal[i]; for (int j = 0; j < samples_per_channel; ++j) { - channel[j] = rtc::SafeClamp(channel[j] * per_sample_scaling_factors[j], - kMinFloatS16Value, kMaxFloatS16Value); + channel[j] = SafeClamp(channel[j] * per_sample_scaling_factors[j], + kMinFloatS16Value, kMaxFloatS16Value); } } } - -void CheckLimiterSampleRate(int sample_rate_hz) { - // Check that per_sample_scaling_factors_ is large enough. - RTC_DCHECK_LE(sample_rate_hz, - kMaximalNumberOfSamplesPerChannel * 1000 / kFrameDurationMs); -} - } // namespace -Limiter::Limiter(int sample_rate_hz, - ApmDataDumper* apm_data_dumper, +Limiter::Limiter(ApmDataDumper* apm_data_dumper, + size_t samples_per_channel, absl::string_view histogram_name) : interp_gain_curve_(apm_data_dumper, histogram_name), - level_estimator_(sample_rate_hz, apm_data_dumper), + level_estimator_(samples_per_channel, apm_data_dumper), apm_data_dumper_(apm_data_dumper) { - CheckLimiterSampleRate(sample_rate_hz); + RTC_DCHECK_LE(samples_per_channel, kMaximalNumberOfSamplesPerChannel); } Limiter::~Limiter() = default; -void Limiter::Process(AudioFrameView signal) { +void Limiter::Process(DeinterleavedView signal) { + RTC_DCHECK_LE(signal.samples_per_channel(), + kMaximalNumberOfSamplesPerChannel); + const std::array level_estimate = level_estimator_.ComputeLevel(signal); @@ -116,13 +111,9 @@ void Limiter::Process(AudioFrameView signal) { return interp_gain_curve_.LookUpGainToApply(x); }); - const int samples_per_channel = signal.samples_per_channel(); - RTC_DCHECK_LE(samples_per_channel, kMaximalNumberOfSamplesPerChannel); - - auto per_sample_scaling_factors = rtc::ArrayView( - &per_sample_scaling_factors_[0], samples_per_channel); - ComputePerSampleSubframeFactors(scaling_factors_, samples_per_channel, - per_sample_scaling_factors); + MonoView per_sample_scaling_factors(&per_sample_scaling_factors_[0], + signal.samples_per_channel()); + ComputePerSampleSubframeFactors(scaling_factors_, per_sample_scaling_factors); ScaleSamples(per_sample_scaling_factors, signal); last_scaling_factor_ = scaling_factors_.back(); @@ -139,9 +130,9 @@ InterpolatedGainCurve::Stats Limiter::GetGainCurveStats() const { return interp_gain_curve_.get_stats(); } -void Limiter::SetSampleRate(int sample_rate_hz) { - CheckLimiterSampleRate(sample_rate_hz); - level_estimator_.SetSampleRate(sample_rate_hz); +void Limiter::SetSamplesPerChannel(size_t samples_per_channel) { + RTC_DCHECK_LE(samples_per_channel, kMaximalNumberOfSamplesPerChannel); + level_estimator_.SetSamplesPerChannel(samples_per_channel); } void Limiter::Reset() { diff --git a/modules/audio_processing/agc2/limiter.h b/modules/audio_processing/agc2/limiter.h index d4d556349c..55cb1a5b15 100644 --- a/modules/audio_processing/agc2/limiter.h +++ b/modules/audio_processing/agc2/limiter.h @@ -14,6 +14,7 @@ #include #include "absl/strings/string_view.h" +#include "api/audio/audio_frame.h" #include "modules/audio_processing/agc2/fixed_digital_level_estimator.h" #include "modules/audio_processing/agc2/interpolated_gain_curve.h" #include "modules/audio_processing/include/audio_frame_view.h" @@ -23,23 +24,25 @@ class ApmDataDumper; class Limiter { public: - Limiter(int sample_rate_hz, - ApmDataDumper* apm_data_dumper, + // See `SetSamplesPerChannel()` for valid values for `samples_per_channel`. + Limiter(ApmDataDumper* apm_data_dumper, + size_t samples_per_channel, absl::string_view histogram_name_prefix); + Limiter(const Limiter& limiter) = delete; Limiter& operator=(const Limiter& limiter) = delete; ~Limiter(); // Applies limiter and hard-clipping to `signal`. - void Process(AudioFrameView signal); + void Process(DeinterleavedView signal); + InterpolatedGainCurve::Stats GetGainCurveStats() const; - // Supported rates must be - // * supported by FixedDigitalLevelEstimator - // * below kMaximalNumberOfSamplesPerChannel*1000/kFrameDurationMs - // so that samples_per_channel fit in the - // per_sample_scaling_factors_ array. - void SetSampleRate(int sample_rate_hz); + // Supported values must be + // * Supported by FixedDigitalLevelEstimator + // * Below or equal to kMaximalNumberOfSamplesPerChannel so that samples + // fit in the per_sample_scaling_factors_ array. + void SetSamplesPerChannel(size_t samples_per_channel); // Resets the internal state. void Reset(); diff --git a/modules/audio_processing/agc2/limiter_unittest.cc b/modules/audio_processing/agc2/limiter_unittest.cc index e662a7fc89..6c72e729ee 100644 --- a/modules/audio_processing/agc2/limiter_unittest.cc +++ b/modules/audio_processing/agc2/limiter_unittest.cc @@ -10,6 +10,8 @@ #include "modules/audio_processing/agc2/limiter.h" +#include + #include "common_audio/include/audio_util.h" #include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/agc2/agc2_testing_common.h" @@ -20,40 +22,40 @@ namespace webrtc { TEST(Limiter, LimiterShouldConstructAndRun) { - const int sample_rate_hz = 48000; + constexpr size_t kSamplesPerChannel = 480; ApmDataDumper apm_data_dumper(0); - Limiter limiter(sample_rate_hz, &apm_data_dumper, ""); + Limiter limiter(&apm_data_dumper, kSamplesPerChannel, ""); - VectorFloatFrame vectors_with_float_frame(1, sample_rate_hz / 100, - kMaxAbsFloatS16Value); - limiter.Process(vectors_with_float_frame.float_frame_view()); + std::array buffer; + buffer.fill(kMaxAbsFloatS16Value); + limiter.Process( + DeinterleavedView(buffer.data(), kSamplesPerChannel, 1)); } TEST(Limiter, OutputVolumeAboveThreshold) { - const int sample_rate_hz = 48000; + constexpr size_t kSamplesPerChannel = 480; const float input_level = (kMaxAbsFloatS16Value + DbfsToFloatS16(test::kLimiterMaxInputLevelDbFs)) / 2.f; ApmDataDumper apm_data_dumper(0); - Limiter limiter(sample_rate_hz, &apm_data_dumper, ""); + Limiter limiter(&apm_data_dumper, kSamplesPerChannel, ""); + + std::array buffer; // Give the level estimator time to adapt. for (int i = 0; i < 5; ++i) { - VectorFloatFrame vectors_with_float_frame(1, sample_rate_hz / 100, - input_level); - limiter.Process(vectors_with_float_frame.float_frame_view()); + std::fill(buffer.begin(), buffer.end(), input_level); + limiter.Process( + DeinterleavedView(buffer.data(), kSamplesPerChannel, 1)); } - VectorFloatFrame vectors_with_float_frame(1, sample_rate_hz / 100, - input_level); - limiter.Process(vectors_with_float_frame.float_frame_view()); - rtc::ArrayView channel = - vectors_with_float_frame.float_frame_view().channel(0); - - for (const auto& sample : channel) { - EXPECT_LT(0.9f * kMaxAbsFloatS16Value, sample); + std::fill(buffer.begin(), buffer.end(), input_level); + limiter.Process( + DeinterleavedView(buffer.data(), kSamplesPerChannel, 1)); + for (const auto& sample : buffer) { + ASSERT_LT(0.9f * kMaxAbsFloatS16Value, sample); } } diff --git a/modules/audio_processing/agc2/noise_level_estimator.cc b/modules/audio_processing/agc2/noise_level_estimator.cc index 691513b509..c43738aad3 100644 --- a/modules/audio_processing/agc2/noise_level_estimator.cc +++ b/modules/audio_processing/agc2/noise_level_estimator.cc @@ -16,7 +16,7 @@ #include #include -#include "api/array_view.h" +#include "api/audio/audio_view.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" @@ -25,11 +25,12 @@ namespace { constexpr int kFramesPerSecond = 100; -float FrameEnergy(const AudioFrameView& audio) { +float FrameEnergy(DeinterleavedView audio) { float energy = 0.0f; - for (int k = 0; k < audio.num_channels(); ++k) { + for (size_t k = 0; k < audio.num_channels(); ++k) { + MonoView ch = audio[k]; float channel_energy = - std::accumulate(audio.channel(k).begin(), audio.channel(k).end(), 0.0f, + std::accumulate(ch.begin(), ch.end(), 0.0f, [](float a, float b) -> float { return a + b * b; }); energy = std::max(channel_energy, energy); } @@ -81,7 +82,7 @@ class NoiseFloorEstimator : public NoiseLevelEstimator { NoiseFloorEstimator& operator=(const NoiseFloorEstimator&) = delete; ~NoiseFloorEstimator() = default; - float Analyze(const AudioFrameView& frame) override { + float Analyze(DeinterleavedView frame) override { // Detect sample rate changes. const int sample_rate_hz = static_cast(frame.samples_per_channel() * kFramesPerSecond); diff --git a/modules/audio_processing/agc2/noise_level_estimator.h b/modules/audio_processing/agc2/noise_level_estimator.h index 9f3b957486..8df4cbc93d 100644 --- a/modules/audio_processing/agc2/noise_level_estimator.h +++ b/modules/audio_processing/agc2/noise_level_estimator.h @@ -13,7 +13,7 @@ #include -#include "modules/audio_processing/include/audio_frame_view.h" +#include "api/audio/audio_view.h" namespace webrtc { class ApmDataDumper; @@ -24,7 +24,7 @@ class NoiseLevelEstimator { virtual ~NoiseLevelEstimator() = default; // Analyzes a 10 ms `frame`, updates the noise level estimation and returns // the value for the latter in dBFS. - virtual float Analyze(const AudioFrameView& frame) = 0; + virtual float Analyze(DeinterleavedView frame) = 0; }; // Creates a noise level estimator based on noise floor detection. diff --git a/modules/audio_processing/agc2/noise_level_estimator_unittest.cc b/modules/audio_processing/agc2/noise_level_estimator_unittest.cc index 8168c5a229..1825f5c29c 100644 --- a/modules/audio_processing/agc2/noise_level_estimator_unittest.cc +++ b/modules/audio_processing/agc2/noise_level_estimator_unittest.cc @@ -15,6 +15,7 @@ #include #include +#include "api/audio/audio_view.h" #include "api/function_view.h" #include "modules/audio_processing/agc2/agc2_testing_common.h" #include "modules/audio_processing/agc2/vector_float_frame.h" @@ -29,20 +30,20 @@ constexpr int kFramesPerSecond = 100; // Runs the noise estimator on audio generated by 'sample_generator' // for kNumIterations. Returns the last noise level estimate. -float RunEstimator(rtc::FunctionView sample_generator, +float RunEstimator(FunctionView sample_generator, NoiseLevelEstimator& estimator, int sample_rate_hz) { const int samples_per_channel = - rtc::CheckedDivExact(sample_rate_hz, kFramesPerSecond); + CheckedDivExact(sample_rate_hz, kFramesPerSecond); VectorFloatFrame signal(1, samples_per_channel, 0.0f); for (int i = 0; i < kNumIterations; ++i) { - AudioFrameView frame_view = signal.float_frame_view(); + DeinterleavedView frame_view = signal.view(); for (int j = 0; j < samples_per_channel; ++j) { - frame_view.channel(0)[j] = sample_generator(); + frame_view[0][j] = sample_generator(); } estimator.Analyze(frame_view); } - return estimator.Analyze(signal.float_frame_view()); + return estimator.Analyze(signal.view()); } class NoiseEstimatorParametrization : public ::testing::TestWithParam { diff --git a/modules/audio_processing/agc2/rnn_vad/BUILD.gn b/modules/audio_processing/agc2/rnn_vad/BUILD.gn index 9093a68cf3..025794d262 100644 --- a/modules/audio_processing/agc2/rnn_vad/BUILD.gn +++ b/modules/audio_processing/agc2/rnn_vad/BUILD.gn @@ -54,7 +54,6 @@ rtc_library("rnn_vad_auto_correlation") { } rtc_source_set("rnn_vad_common") { - # TODO(alessiob): Make this target visibility private. visibility = [ ":*", "..:vad_wrapper", @@ -100,12 +99,12 @@ rtc_source_set("rnn_vad_layers") { "../../../../api:function_view", "../../../../rtc_base:checks", "../../../../rtc_base:safe_conversions", + "//third_party/abseil-cpp/absl/strings:string_view", "//third_party/rnnoise:rnn_vad", ] if (current_cpu == "x86" || current_cpu == "x64") { deps += [ ":vector_math_avx2" ] } - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_source_set("vector_math") { @@ -229,8 +228,8 @@ if (rtc_include_tests) { "../../../../rtc_base:safe_compare", "../../../../test:fileutils", "../../../../test:test_support", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } unittest_resources = [ @@ -306,7 +305,6 @@ if (rtc_include_tests) { if (current_cpu == "x86" || current_cpu == "x64") { deps += [ ":vector_math_avx2" ] } - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] data = unittest_resources if (is_ios) { deps += [ ":unittests_bundle_data" ] diff --git a/modules/audio_processing/agc2/rnn_vad/auto_correlation.cc b/modules/audio_processing/agc2/rnn_vad/auto_correlation.cc index 3ddeec8dba..f079b0107c 100644 --- a/modules/audio_processing/agc2/rnn_vad/auto_correlation.cc +++ b/modules/audio_processing/agc2/rnn_vad/auto_correlation.cc @@ -44,8 +44,8 @@ AutoCorrelationCalculator::~AutoCorrelationCalculator() = default; // inverted lag equal to 0 that corresponds to a lag equal to the maximum // pitch period. void AutoCorrelationCalculator::ComputeOnPitchBuffer( - rtc::ArrayView pitch_buf, - rtc::ArrayView auto_corr) { + ArrayView pitch_buf, + ArrayView auto_corr) { RTC_DCHECK_LT(auto_corr.size(), kMaxPitch12kHz); RTC_DCHECK_GT(pitch_buf.size(), kMaxPitch12kHz); constexpr int kFftFrameSize = 1 << kAutoCorrelationFftOrder; diff --git a/modules/audio_processing/agc2/rnn_vad/auto_correlation.h b/modules/audio_processing/agc2/rnn_vad/auto_correlation.h index 1ae5054567..127b259860 100644 --- a/modules/audio_processing/agc2/rnn_vad/auto_correlation.h +++ b/modules/audio_processing/agc2/rnn_vad/auto_correlation.h @@ -32,9 +32,8 @@ class AutoCorrelationCalculator { // Computes the auto-correlation coefficients for a target pitch interval. // `auto_corr` indexes are inverted lags. - void ComputeOnPitchBuffer( - rtc::ArrayView pitch_buf, - rtc::ArrayView auto_corr); + void ComputeOnPitchBuffer(ArrayView pitch_buf, + ArrayView auto_corr); private: Pffft fft_; diff --git a/modules/audio_processing/agc2/rnn_vad/features_extraction.cc b/modules/audio_processing/agc2/rnn_vad/features_extraction.cc index 502023428d..b090281493 100644 --- a/modules/audio_processing/agc2/rnn_vad/features_extraction.cc +++ b/modules/audio_processing/agc2/rnn_vad/features_extraction.cc @@ -50,8 +50,8 @@ void FeaturesExtractor::Reset() { } bool FeaturesExtractor::CheckSilenceComputeFeatures( - rtc::ArrayView samples, - rtc::ArrayView feature_vector) { + ArrayView samples, + ArrayView feature_vector) { // Pre-processing. if (use_high_pass_filter_) { std::array samples_filtered; diff --git a/modules/audio_processing/agc2/rnn_vad/features_extraction.h b/modules/audio_processing/agc2/rnn_vad/features_extraction.h index d47a85bfb0..3fe14a4990 100644 --- a/modules/audio_processing/agc2/rnn_vad/features_extraction.h +++ b/modules/audio_processing/agc2/rnn_vad/features_extraction.h @@ -36,8 +36,8 @@ class FeaturesExtractor { // `feature_vector` is partially written and therefore must not be used to // feed the VAD RNN. bool CheckSilenceComputeFeatures( - rtc::ArrayView samples, - rtc::ArrayView feature_vector); + ArrayView samples, + ArrayView feature_vector); private: const bool use_high_pass_filter_; @@ -46,11 +46,11 @@ class FeaturesExtractor { BiQuadFilter hpf_; SequenceBuffer pitch_buf_24kHz_; - rtc::ArrayView pitch_buf_24kHz_view_; + ArrayView pitch_buf_24kHz_view_; std::vector lp_residual_; - rtc::ArrayView lp_residual_view_; + ArrayView lp_residual_view_; PitchEstimator pitch_estimator_; - rtc::ArrayView reference_frame_view_; + ArrayView reference_frame_view_; SpectralFeaturesExtractor spectral_features_extractor_; int pitch_period_48kHz_; }; diff --git a/modules/audio_processing/agc2/rnn_vad/features_extraction_unittest.cc b/modules/audio_processing/agc2/rnn_vad/features_extraction_unittest.cc index 96f956adfe..f6a1392f62 100644 --- a/modules/audio_processing/agc2/rnn_vad/features_extraction_unittest.cc +++ b/modules/audio_processing/agc2/rnn_vad/features_extraction_unittest.cc @@ -41,8 +41,8 @@ bool PitchIsValid(float pitch_hz) { pitch_period <= kMaxPitch24kHz; } -void CreatePureTone(float amplitude, float freq_hz, rtc::ArrayView dst) { - for (int i = 0; rtc::SafeLt(i, dst.size()); ++i) { +void CreatePureTone(float amplitude, float freq_hz, ArrayView dst) { + for (int i = 0; SafeLt(i, dst.size()); ++i) { dst[i] = amplitude * std::sin(2.f * kPi * freq_hz * i / kSampleRate24kHz); } } @@ -51,8 +51,8 @@ void CreatePureTone(float amplitude, float freq_hz, rtc::ArrayView dst) { // For every frame, the output is written into `feature_vector`. Returns true // if silence is detected in the last frame. bool FeedTestData(FeaturesExtractor& features_extractor, - rtc::ArrayView samples, - rtc::ArrayView feature_vector) { + ArrayView samples, + ArrayView feature_vector) { // TODO(bugs.webrtc.org/8948): Add when the issue is fixed. // FloatingPointExceptionObserver fpe_observer; bool is_silence = true; @@ -78,8 +78,8 @@ TEST(RnnVadTest, FeatureExtractionLowHighPitch) { FeaturesExtractor features_extractor(cpu_features); std::vector samples(kNumTestDataSize); std::vector feature_vector(kFeatureVectorSize); - ASSERT_EQ(kFeatureVectorSize, rtc::dchecked_cast(feature_vector.size())); - rtc::ArrayView feature_vector_view( + ASSERT_EQ(kFeatureVectorSize, dchecked_cast(feature_vector.size())); + ArrayView feature_vector_view( feature_vector.data(), kFeatureVectorSize); // Extract the normalized scalar feature that is proportional to the estimated diff --git a/modules/audio_processing/agc2/rnn_vad/lp_residual.cc b/modules/audio_processing/agc2/rnn_vad/lp_residual.cc index 484bfba459..f942099b01 100644 --- a/modules/audio_processing/agc2/rnn_vad/lp_residual.cc +++ b/modules/audio_processing/agc2/rnn_vad/lp_residual.cc @@ -25,9 +25,8 @@ namespace { // Computes auto-correlation coefficients for `x` and writes them in // `auto_corr`. The lag values are in {0, ..., max_lag - 1}, where max_lag // equals the size of `auto_corr`. -void ComputeAutoCorrelation( - rtc::ArrayView x, - rtc::ArrayView auto_corr) { +void ComputeAutoCorrelation(ArrayView x, + ArrayView auto_corr) { constexpr int max_lag = auto_corr.size(); RTC_DCHECK_LT(max_lag, x.size()); for (int lag = 0; lag < max_lag; ++lag) { @@ -37,8 +36,7 @@ void ComputeAutoCorrelation( } // Applies denoising to the auto-correlation coefficients. -void DenoiseAutoCorrelation( - rtc::ArrayView auto_corr) { +void DenoiseAutoCorrelation(ArrayView auto_corr) { // Assume -40 dB white noise floor. auto_corr[0] *= 1.0001f; // Hard-coded values obtained as @@ -53,8 +51,8 @@ void DenoiseAutoCorrelation( // Computes the initial inverse filter coefficients given the auto-correlation // coefficients of an input frame. void ComputeInitialInverseFilterCoefficients( - rtc::ArrayView auto_corr, - rtc::ArrayView lpc_coeffs) { + ArrayView auto_corr, + ArrayView lpc_coeffs) { float error = auto_corr[0]; for (int i = 0; i < kNumLpcCoefficients - 1; ++i) { float reflection_coeff = 0.f; @@ -88,8 +86,8 @@ void ComputeInitialInverseFilterCoefficients( } // namespace void ComputeAndPostProcessLpcCoefficients( - rtc::ArrayView x, - rtc::ArrayView lpc_coeffs) { + ArrayView x, + ArrayView lpc_coeffs) { std::array auto_corr; ComputeAutoCorrelation(x, auto_corr); if (auto_corr[0] == 0.f) { // Empty frame. @@ -114,10 +112,9 @@ void ComputeAndPostProcessLpcCoefficients( static_assert(kNumLpcCoefficients == 5, "Update `lpc_coeffs(_pre)`."); } -void ComputeLpResidual( - rtc::ArrayView lpc_coeffs, - rtc::ArrayView x, - rtc::ArrayView y) { +void ComputeLpResidual(ArrayView lpc_coeffs, + ArrayView x, + ArrayView y) { RTC_DCHECK_GT(x.size(), kNumLpcCoefficients); RTC_DCHECK_EQ(x.size(), y.size()); // The code below implements the following operation: @@ -131,7 +128,7 @@ void ComputeLpResidual( } // Regular case. auto last = x.crend(); - for (int i = kNumLpcCoefficients; rtc::SafeLt(i, y.size()); ++i, --last) { + for (int i = kNumLpcCoefficients; SafeLt(i, y.size()); ++i, --last) { y[i] = std::inner_product(last - kNumLpcCoefficients, last, lpc_coeffs.cbegin(), x[i]); } diff --git a/modules/audio_processing/agc2/rnn_vad/lp_residual.h b/modules/audio_processing/agc2/rnn_vad/lp_residual.h index d04c536ec1..f29dfe03ad 100644 --- a/modules/audio_processing/agc2/rnn_vad/lp_residual.h +++ b/modules/audio_processing/agc2/rnn_vad/lp_residual.h @@ -24,16 +24,15 @@ constexpr int kNumLpcCoefficients = 5; // Given a frame `x`, computes a post-processed version of LPC coefficients // tailored for pitch estimation. void ComputeAndPostProcessLpcCoefficients( - rtc::ArrayView x, - rtc::ArrayView lpc_coeffs); + ArrayView x, + ArrayView lpc_coeffs); // Computes the LP residual for the input frame `x` and the LPC coefficients // `lpc_coeffs`. `y` and `x` can point to the same array for in-place // computation. -void ComputeLpResidual( - rtc::ArrayView lpc_coeffs, - rtc::ArrayView x, - rtc::ArrayView y); +void ComputeLpResidual(ArrayView lpc_coeffs, + ArrayView x, + ArrayView y); } // namespace rnn_vad } // namespace webrtc diff --git a/modules/audio_processing/agc2/rnn_vad/pitch_search.cc b/modules/audio_processing/agc2/rnn_vad/pitch_search.cc index 419620fc0c..3d4bb3f848 100644 --- a/modules/audio_processing/agc2/rnn_vad/pitch_search.cc +++ b/modules/audio_processing/agc2/rnn_vad/pitch_search.cc @@ -27,11 +27,11 @@ PitchEstimator::PitchEstimator(const AvailableCpuFeatures& cpu_features) PitchEstimator::~PitchEstimator() = default; int PitchEstimator::Estimate( - rtc::ArrayView pitch_buffer) { - rtc::ArrayView pitch_buffer_12kHz_view( + ArrayView pitch_buffer) { + ArrayView pitch_buffer_12kHz_view( pitch_buffer_12kHz_.data(), kBufSize12kHz); RTC_DCHECK_EQ(pitch_buffer_12kHz_.size(), pitch_buffer_12kHz_view.size()); - rtc::ArrayView auto_correlation_12kHz_view( + ArrayView auto_correlation_12kHz_view( auto_correlation_12kHz_.data(), kNumLags12kHz); RTC_DCHECK_EQ(auto_correlation_12kHz_.size(), auto_correlation_12kHz_view.size()); @@ -51,7 +51,7 @@ int PitchEstimator::Estimate( // Refine the initial pitch period estimation from 12 kHz to 48 kHz. // Pre-compute frame energies at 24 kHz. - rtc::ArrayView y_energy_24kHz_view( + ArrayView y_energy_24kHz_view( y_energy_24kHz_.data(), kRefineNumLags24kHz); RTC_DCHECK_EQ(y_energy_24kHz_.size(), y_energy_24kHz_view.size()); ComputeSlidingFrameSquareEnergies24kHz(pitch_buffer, y_energy_24kHz_view, diff --git a/modules/audio_processing/agc2/rnn_vad/pitch_search.h b/modules/audio_processing/agc2/rnn_vad/pitch_search.h index 42c448eb56..28f755e316 100644 --- a/modules/audio_processing/agc2/rnn_vad/pitch_search.h +++ b/modules/audio_processing/agc2/rnn_vad/pitch_search.h @@ -32,7 +32,7 @@ class PitchEstimator { PitchEstimator& operator=(const PitchEstimator&) = delete; ~PitchEstimator(); // Returns the estimated pitch period at 48 kHz. - int Estimate(rtc::ArrayView pitch_buffer); + int Estimate(ArrayView pitch_buffer); private: FRIEND_TEST_ALL_PREFIXES(RnnVadTest, PitchSearchWithinTolerance); diff --git a/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc b/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc index e8c912518d..ebb03bd854 100644 --- a/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc +++ b/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc @@ -28,10 +28,9 @@ namespace webrtc { namespace rnn_vad { namespace { -float ComputeAutoCorrelation( - int inverted_lag, - rtc::ArrayView pitch_buffer, - const VectorMath& vector_math) { +float ComputeAutoCorrelation(int inverted_lag, + ArrayView pitch_buffer, + const VectorMath& vector_math) { RTC_DCHECK_LT(inverted_lag, kBufSize24kHz); RTC_DCHECK_LT(inverted_lag, kRefineNumLags24kHz); static_assert(kMaxPitch24kHz < kBufSize24kHz, ""); @@ -68,7 +67,7 @@ int GetPitchPseudoInterpolationOffset(float prev_auto_correlation, // output sample rate is twice as that of `lag`. int PitchPseudoInterpolationLagPitchBuf( int lag, - rtc::ArrayView pitch_buffer, + ArrayView pitch_buffer, const VectorMath& vector_math) { int offset = 0; // Cannot apply pseudo-interpolation at the boundaries. @@ -154,8 +153,8 @@ class InvertedLagsIndex { // the inverted lags for the computed auto correlation values. void ComputeAutoCorrelation( Range inverted_lags, - rtc::ArrayView pitch_buffer, - rtc::ArrayView auto_correlation, + ArrayView pitch_buffer, + ArrayView auto_correlation, InvertedLagsIndex& inverted_lags_index, const VectorMath& vector_math) { // Check valid range. @@ -182,11 +181,11 @@ void ComputeAutoCorrelation( // Searches the strongest pitch period at 24 kHz and returns its inverted lag at // 48 kHz. int ComputePitchPeriod48kHz( - rtc::ArrayView pitch_buffer, - rtc::ArrayView inverted_lags, - rtc::ArrayView auto_correlation, - rtc::ArrayView y_energy, - const VectorMath& vector_math) { + ArrayView /* pitch_buffer */, + ArrayView inverted_lags, + ArrayView auto_correlation, + ArrayView y_energy, + const VectorMath& /* vector_math */) { static_assert(kMaxPitch24kHz > kInitialNumLags24kHz, ""); static_assert(kMaxPitch24kHz < kBufSize24kHz, ""); int best_inverted_lag = 0; // Pitch period. @@ -283,8 +282,8 @@ bool IsAlternativePitchStrongerThanInitial(PitchInfo last, } // namespace -void Decimate2x(rtc::ArrayView src, - rtc::ArrayView dst) { +void Decimate2x(ArrayView src, + ArrayView dst) { // TODO(bugs.webrtc.org/9076): Consider adding anti-aliasing filter. static_assert(2 * kBufSize12kHz == kBufSize24kHz, ""); for (int i = 0; i < kBufSize12kHz; ++i) { @@ -293,8 +292,8 @@ void Decimate2x(rtc::ArrayView src, } void ComputeSlidingFrameSquareEnergies24kHz( - rtc::ArrayView pitch_buffer, - rtc::ArrayView y_energy, + ArrayView pitch_buffer, + ArrayView y_energy, AvailableCpuFeatures cpu_features) { VectorMath vector_math(cpu_features); static_assert(kFrameSize20ms24kHz < kBufSize24kHz, ""); @@ -313,8 +312,8 @@ void ComputeSlidingFrameSquareEnergies24kHz( } CandidatePitchPeriods ComputePitchPeriod12kHz( - rtc::ArrayView pitch_buffer, - rtc::ArrayView auto_correlation, + ArrayView pitch_buffer, + ArrayView auto_correlation, AvailableCpuFeatures cpu_features) { static_assert(kMaxPitch12kHz > kNumLags12kHz, ""); static_assert(kMaxPitch12kHz < kBufSize12kHz, ""); @@ -370,8 +369,8 @@ CandidatePitchPeriods ComputePitchPeriod12kHz( } int ComputePitchPeriod48kHz( - rtc::ArrayView pitch_buffer, - rtc::ArrayView y_energy, + ArrayView pitch_buffer, + ArrayView y_energy, CandidatePitchPeriods pitch_candidates, AvailableCpuFeatures cpu_features) { // Compute the auto-correlation terms only for neighbors of the two pitch @@ -408,8 +407,8 @@ int ComputePitchPeriod48kHz( } PitchInfo ComputeExtendedPitchPeriod48kHz( - rtc::ArrayView pitch_buffer, - rtc::ArrayView y_energy, + ArrayView pitch_buffer, + ArrayView y_energy, int initial_pitch_period_48kHz, PitchInfo last_pitch_48kHz, AvailableCpuFeatures cpu_features) { diff --git a/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h b/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h index aa2dd13745..2366c5aa07 100644 --- a/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h +++ b/modules/audio_processing/agc2/rnn_vad/pitch_search_internal.h @@ -24,8 +24,8 @@ namespace webrtc { namespace rnn_vad { // Performs 2x decimation without any anti-aliasing filter. -void Decimate2x(rtc::ArrayView src, - rtc::ArrayView dst); +void Decimate2x(ArrayView src, + ArrayView dst); // Key concepts and keywords used below in this file. // @@ -65,8 +65,8 @@ void Decimate2x(rtc::ArrayView src, // Computes the sum of squared samples for every sliding frame `y` in the pitch // buffer. The indexes of `y_energy` are inverted lags. void ComputeSlidingFrameSquareEnergies24kHz( - rtc::ArrayView pitch_buffer, - rtc::ArrayView y_energy, + ArrayView pitch_buffer, + ArrayView y_energy, AvailableCpuFeatures cpu_features); // Top-2 pitch period candidates. Unit: number of samples - i.e., inverted lags. @@ -79,16 +79,16 @@ struct CandidatePitchPeriods { // pitch buffer and the auto-correlation values (having inverted lags as // indexes). CandidatePitchPeriods ComputePitchPeriod12kHz( - rtc::ArrayView pitch_buffer, - rtc::ArrayView auto_correlation, + ArrayView pitch_buffer, + ArrayView auto_correlation, AvailableCpuFeatures cpu_features); // Computes the pitch period at 48 kHz given a view on the 24 kHz pitch buffer, // the energies for the sliding frames `y` at 24 kHz and the pitch period // candidates at 24 kHz (encoded as inverted lag). int ComputePitchPeriod48kHz( - rtc::ArrayView pitch_buffer, - rtc::ArrayView y_energy, + ArrayView pitch_buffer, + ArrayView y_energy, CandidatePitchPeriods pitch_candidates_24kHz, AvailableCpuFeatures cpu_features); @@ -102,8 +102,8 @@ struct PitchInfo { // `y` at 24 kHz, the initial 48 kHz estimation (computed by // `ComputePitchPeriod48kHz()`) and the last estimated pitch. PitchInfo ComputeExtendedPitchPeriod48kHz( - rtc::ArrayView pitch_buffer, - rtc::ArrayView y_energy, + ArrayView pitch_buffer, + ArrayView y_energy, int initial_pitch_period_48kHz, PitchInfo last_pitch_48kHz, AvailableCpuFeatures cpu_features); diff --git a/modules/audio_processing/agc2/rnn_vad/pitch_search_internal_unittest.cc b/modules/audio_processing/agc2/rnn_vad/pitch_search_internal_unittest.cc index 2a6e68f157..fa879c1e49 100644 --- a/modules/audio_processing/agc2/rnn_vad/pitch_search_internal_unittest.cc +++ b/modules/audio_processing/agc2/rnn_vad/pitch_search_internal_unittest.cc @@ -33,7 +33,7 @@ constexpr float kTestPitchStrengthHigh = 0.75f; template std::string PrintTestIndexAndCpuFeatures( const ::testing::TestParamInfo& info) { - rtc::StringBuilder builder; + StringBuilder builder; builder << info.index << "_" << info.param.cpu_features.ToString(); return builder.str(); } @@ -90,8 +90,8 @@ TEST(RnnVadTest, ComputePitchPeriod48kHzBitExactness) { PitchTestData test_data; std::vector y_energy(kRefineNumLags24kHz); - rtc::ArrayView y_energy_view(y_energy.data(), - kRefineNumLags24kHz); + ArrayView y_energy_view(y_energy.data(), + kRefineNumLags24kHz); ComputeSlidingFrameSquareEnergies24kHz(test_data.PitchBuffer24kHzView(), y_energy_view, cpu_features); // TODO(bugs.webrtc.org/8948): Add when the issue is fixed. @@ -124,8 +124,8 @@ TEST_P(PitchCandidatesParametrization, PitchTestData test_data; std::vector y_energy(kRefineNumLags24kHz); - rtc::ArrayView y_energy_view(y_energy.data(), - kRefineNumLags24kHz); + ArrayView y_energy_view(y_energy.data(), + kRefineNumLags24kHz); ComputeSlidingFrameSquareEnergies24kHz(test_data.PitchBuffer24kHzView(), y_energy_view, params.cpu_features); EXPECT_EQ( @@ -171,8 +171,8 @@ TEST_P(ExtendedPitchPeriodSearchParametrizaion, PitchTestData test_data; std::vector y_energy(kRefineNumLags24kHz); - rtc::ArrayView y_energy_view(y_energy.data(), - kRefineNumLags24kHz); + ArrayView y_energy_view(y_energy.data(), + kRefineNumLags24kHz); ComputeSlidingFrameSquareEnergies24kHz(test_data.PitchBuffer24kHzView(), y_energy_view, params.cpu_features); // TODO(bugs.webrtc.org/8948): Add when the issue is fixed. diff --git a/modules/audio_processing/agc2/rnn_vad/ring_buffer.h b/modules/audio_processing/agc2/rnn_vad/ring_buffer.h index a6f7fdd1a6..a37df4e1cf 100644 --- a/modules/audio_processing/agc2/rnn_vad/ring_buffer.h +++ b/modules/audio_processing/agc2/rnn_vad/ring_buffer.h @@ -36,7 +36,7 @@ class RingBuffer { // Set the ring buffer values to zero. void Reset() { buffer_.fill(0); } // Replace the least recently pushed array in the buffer with `new_values`. - void Push(rtc::ArrayView new_values) { + void Push(ArrayView new_values) { std::memcpy(buffer_.data() + S * tail_, new_values.data(), S * sizeof(T)); tail_ += 1; if (tail_ == N) @@ -45,7 +45,7 @@ class RingBuffer { // Return an array view onto the array with a given delay. A view on the last // and least recently push array is returned when `delay` is 0 and N - 1 // respectively. - rtc::ArrayView GetArrayView(int delay) const { + ArrayView GetArrayView(int delay) const { RTC_DCHECK_LE(0, delay); RTC_DCHECK_LT(delay, N); int offset = tail_ - 1 - delay; diff --git a/modules/audio_processing/agc2/rnn_vad/ring_buffer_unittest.cc b/modules/audio_processing/agc2/rnn_vad/ring_buffer_unittest.cc index d11d4eac3e..e5dded51dc 100644 --- a/modules/audio_processing/agc2/rnn_vad/ring_buffer_unittest.cc +++ b/modules/audio_processing/agc2/rnn_vad/ring_buffer_unittest.cc @@ -18,7 +18,7 @@ namespace { // Compare the elements of two given array views. template -void ExpectEq(rtc::ArrayView a, rtc::ArrayView b) { +void ExpectEq(ArrayView a, ArrayView b) { for (int i = 0; i < S; ++i) { SCOPED_TRACE(i); EXPECT_EQ(a[i], b[i]); @@ -32,7 +32,7 @@ void TestRingBuffer() { SCOPED_TRACE(S); std::array prev_pushed_array; std::array pushed_array; - rtc::ArrayView pushed_array_view(pushed_array.data(), S); + ArrayView pushed_array_view(pushed_array.data(), S); // Init. RingBuffer ring_buf; diff --git a/modules/audio_processing/agc2/rnn_vad/rnn.cc b/modules/audio_processing/agc2/rnn_vad/rnn.cc index 475bef9775..394aea9ad1 100644 --- a/modules/audio_processing/agc2/rnn_vad/rnn.cc +++ b/modules/audio_processing/agc2/rnn_vad/rnn.cc @@ -74,7 +74,7 @@ void RnnVad::Reset() { } float RnnVad::ComputeVadProbability( - rtc::ArrayView feature_vector, + ArrayView feature_vector, bool is_silence) { if (is_silence) { Reset(); diff --git a/modules/audio_processing/agc2/rnn_vad/rnn.h b/modules/audio_processing/agc2/rnn_vad/rnn.h index 3148f1b3ff..2f9063a1a8 100644 --- a/modules/audio_processing/agc2/rnn_vad/rnn.h +++ b/modules/audio_processing/agc2/rnn_vad/rnn.h @@ -38,7 +38,7 @@ class RnnVad { // Observes `feature_vector` and `is_silence`, updates the RNN and returns the // current voice probability. float ComputeVadProbability( - rtc::ArrayView feature_vector, + ArrayView feature_vector, bool is_silence); private: diff --git a/modules/audio_processing/agc2/rnn_vad/rnn_fc.cc b/modules/audio_processing/agc2/rnn_vad/rnn_fc.cc index a13e77461a..b5a35b2a3d 100644 --- a/modules/audio_processing/agc2/rnn_vad/rnn_fc.cc +++ b/modules/audio_processing/agc2/rnn_vad/rnn_fc.cc @@ -22,7 +22,7 @@ namespace webrtc { namespace rnn_vad { namespace { -std::vector GetScaledParams(rtc::ArrayView params) { +std::vector GetScaledParams(ArrayView params) { std::vector scaled_params(params.size()); std::transform(params.begin(), params.end(), scaled_params.begin(), [](int8_t x) -> float { @@ -34,14 +34,14 @@ std::vector GetScaledParams(rtc::ArrayView params) { // TODO(bugs.chromium.org/10480): Hard-code optimized layout and remove this // function to improve setup time. // Casts and scales `weights` and re-arranges the layout. -std::vector PreprocessWeights(rtc::ArrayView weights, +std::vector PreprocessWeights(ArrayView weights, int output_size) { if (output_size == 1) { return GetScaledParams(weights); } // Transpose, scale and cast. - const int input_size = rtc::CheckedDivExact( - rtc::dchecked_cast(weights.size()), output_size); + const int input_size = + CheckedDivExact(dchecked_cast(weights.size()), output_size); std::vector w(weights.size()); for (int o = 0; o < output_size; ++o) { for (int i = 0; i < input_size; ++i) { @@ -52,7 +52,7 @@ std::vector PreprocessWeights(rtc::ArrayView weights, return w; } -rtc::FunctionView GetActivationFunction( +FunctionView GetActivationFunction( ActivationFunction activation_function) { switch (activation_function) { case ActivationFunction::kTansigApproximated: @@ -67,8 +67,8 @@ rtc::FunctionView GetActivationFunction( FullyConnectedLayer::FullyConnectedLayer( const int input_size, const int output_size, - const rtc::ArrayView bias, - const rtc::ArrayView weights, + const ArrayView bias, + const ArrayView weights, ActivationFunction activation_function, const AvailableCpuFeatures& cpu_features, absl::string_view layer_name) @@ -90,9 +90,9 @@ FullyConnectedLayer::FullyConnectedLayer( FullyConnectedLayer::~FullyConnectedLayer() = default; -void FullyConnectedLayer::ComputeOutput(rtc::ArrayView input) { +void FullyConnectedLayer::ComputeOutput(ArrayView input) { RTC_DCHECK_EQ(input.size(), input_size_); - rtc::ArrayView weights(weights_); + ArrayView weights(weights_); for (int o = 0; o < output_size_; ++o) { output_[o] = activation_function_( bias_[o] + vector_math_.DotProduct( diff --git a/modules/audio_processing/agc2/rnn_vad/rnn_fc.h b/modules/audio_processing/agc2/rnn_vad/rnn_fc.h index d23957a6f2..b802ea78e6 100644 --- a/modules/audio_processing/agc2/rnn_vad/rnn_fc.h +++ b/modules/audio_processing/agc2/rnn_vad/rnn_fc.h @@ -36,8 +36,8 @@ class FullyConnectedLayer { // Ctor. `output_size` cannot be greater than `kFullyConnectedLayerMaxUnits`. FullyConnectedLayer(int input_size, int output_size, - rtc::ArrayView bias, - rtc::ArrayView weights, + ArrayView bias, + ArrayView weights, ActivationFunction activation_function, const AvailableCpuFeatures& cpu_features, absl::string_view layer_name); @@ -53,7 +53,7 @@ class FullyConnectedLayer { int size() const { return output_size_; } // Computes the fully-connected layer output. - void ComputeOutput(rtc::ArrayView input); + void ComputeOutput(ArrayView input); private: const int input_size_; @@ -61,7 +61,7 @@ class FullyConnectedLayer { const std::vector bias_; const std::vector weights_; const VectorMath vector_math_; - rtc::FunctionView activation_function_; + FunctionView activation_function_; // Over-allocated array with size equal to `output_size_`. std::array output_; }; diff --git a/modules/audio_processing/agc2/rnn_vad/rnn_gru.cc b/modules/audio_processing/agc2/rnn_vad/rnn_gru.cc index ef37410caa..e463045a9f 100644 --- a/modules/audio_processing/agc2/rnn_vad/rnn_gru.cc +++ b/modules/audio_processing/agc2/rnn_vad/rnn_gru.cc @@ -21,12 +21,12 @@ namespace { constexpr int kNumGruGates = 3; // Update, reset, output. -std::vector PreprocessGruTensor(rtc::ArrayView tensor_src, +std::vector PreprocessGruTensor(ArrayView tensor_src, int output_size) { // Transpose, cast and scale. // `n` is the size of the first dimension of the 3-dim tensor `weights`. - const int n = rtc::CheckedDivExact(rtc::dchecked_cast(tensor_src.size()), - output_size * kNumGruGates); + const int n = CheckedDivExact(dchecked_cast(tensor_src.size()), + output_size * kNumGruGates); const int stride_src = kNumGruGates * output_size; const int stride_dst = n * output_size; std::vector tensor_dst(tensor_src.size()); @@ -54,12 +54,12 @@ std::vector PreprocessGruTensor(rtc::ArrayView tensor_src, void ComputeUpdateResetGate(int input_size, int output_size, const VectorMath& vector_math, - rtc::ArrayView input, - rtc::ArrayView state, - rtc::ArrayView bias, - rtc::ArrayView weights, - rtc::ArrayView recurrent_weights, - rtc::ArrayView gate) { + ArrayView input, + ArrayView state, + ArrayView bias, + ArrayView weights, + ArrayView recurrent_weights, + ArrayView gate) { RTC_DCHECK_EQ(input.size(), input_size); RTC_DCHECK_EQ(state.size(), output_size); RTC_DCHECK_EQ(bias.size(), output_size); @@ -90,13 +90,13 @@ void ComputeUpdateResetGate(int input_size, void ComputeStateGate(int input_size, int output_size, const VectorMath& vector_math, - rtc::ArrayView input, - rtc::ArrayView update, - rtc::ArrayView reset, - rtc::ArrayView bias, - rtc::ArrayView weights, - rtc::ArrayView recurrent_weights, - rtc::ArrayView state) { + ArrayView input, + ArrayView update, + ArrayView reset, + ArrayView bias, + ArrayView weights, + ArrayView recurrent_weights, + ArrayView state) { RTC_DCHECK_EQ(input.size(), input_size); RTC_DCHECK_GE(update.size(), output_size); // `update` is over-allocated. RTC_DCHECK_GE(reset.size(), output_size); // `reset` is over-allocated. @@ -124,9 +124,9 @@ void ComputeStateGate(int input_size, GatedRecurrentLayer::GatedRecurrentLayer( const int input_size, const int output_size, - const rtc::ArrayView bias, - const rtc::ArrayView weights, - const rtc::ArrayView recurrent_weights, + const ArrayView bias, + const ArrayView weights, + const ArrayView recurrent_weights, const AvailableCpuFeatures& cpu_features, absl::string_view layer_name) : input_size_(input_size), @@ -157,19 +157,19 @@ void GatedRecurrentLayer::Reset() { state_.fill(0.f); } -void GatedRecurrentLayer::ComputeOutput(rtc::ArrayView input) { +void GatedRecurrentLayer::ComputeOutput(ArrayView input) { RTC_DCHECK_EQ(input.size(), input_size_); // The tensors below are organized as a sequence of flattened tensors for the // `update`, `reset` and `state` gates. - rtc::ArrayView bias(bias_); - rtc::ArrayView weights(weights_); - rtc::ArrayView recurrent_weights(recurrent_weights_); + ArrayView bias(bias_); + ArrayView weights(weights_); + ArrayView recurrent_weights(recurrent_weights_); // Strides to access to the flattened tensors for a specific gate. const int stride_weights = input_size_ * output_size_; const int stride_recurrent_weights = output_size_ * output_size_; - rtc::ArrayView state(state_.data(), output_size_); + ArrayView state(state_.data(), output_size_); // Update gate. std::array update; diff --git a/modules/audio_processing/agc2/rnn_vad/rnn_gru.h b/modules/audio_processing/agc2/rnn_vad/rnn_gru.h index 3407dfcdf1..ae3063b357 100644 --- a/modules/audio_processing/agc2/rnn_vad/rnn_gru.h +++ b/modules/audio_processing/agc2/rnn_vad/rnn_gru.h @@ -32,9 +32,9 @@ class GatedRecurrentLayer { // Ctor. `output_size` cannot be greater than `kGruLayerMaxUnits`. GatedRecurrentLayer(int input_size, int output_size, - rtc::ArrayView bias, - rtc::ArrayView weights, - rtc::ArrayView recurrent_weights, + ArrayView bias, + ArrayView weights, + ArrayView recurrent_weights, const AvailableCpuFeatures& cpu_features, absl::string_view layer_name); GatedRecurrentLayer(const GatedRecurrentLayer&) = delete; @@ -51,7 +51,7 @@ class GatedRecurrentLayer { // Resets the GRU state. void Reset(); // Computes the recurrent layer output and updates the status. - void ComputeOutput(rtc::ArrayView input); + void ComputeOutput(ArrayView input); private: const int input_size_; diff --git a/modules/audio_processing/agc2/rnn_vad/rnn_gru_unittest.cc b/modules/audio_processing/agc2/rnn_vad/rnn_gru_unittest.cc index 88ae72803a..43b3e6885a 100644 --- a/modules/audio_processing/agc2/rnn_vad/rnn_gru_unittest.cc +++ b/modules/audio_processing/agc2/rnn_vad/rnn_gru_unittest.cc @@ -26,14 +26,13 @@ namespace webrtc { namespace rnn_vad { namespace { -void TestGatedRecurrentLayer( - GatedRecurrentLayer& gru, - rtc::ArrayView input_sequence, - rtc::ArrayView expected_output_sequence) { - const int input_sequence_length = rtc::CheckedDivExact( - rtc::dchecked_cast(input_sequence.size()), gru.input_size()); - const int output_sequence_length = rtc::CheckedDivExact( - rtc::dchecked_cast(expected_output_sequence.size()), gru.size()); +void TestGatedRecurrentLayer(GatedRecurrentLayer& gru, + ArrayView input_sequence, + ArrayView expected_output_sequence) { + const int input_sequence_length = CheckedDivExact( + dchecked_cast(input_sequence.size()), gru.input_size()); + const int output_sequence_length = CheckedDivExact( + dchecked_cast(expected_output_sequence.size()), gru.size()); ASSERT_EQ(input_sequence_length, output_sequence_length) << "The test data length is invalid."; // Feed the GRU layer and check the output at every step. @@ -135,7 +134,7 @@ TEST_P(RnnGruParametrization, DISABLED_BenchmarkGatedRecurrentLayer) { /*cpu_features=*/GetParam(), /*layer_name=*/"GRU"); - rtc::ArrayView input_sequence(gru_input_sequence); + ArrayView input_sequence(gru_input_sequence); ASSERT_EQ(input_sequence.size() % kInputLayerOutputSize, static_cast(0)); const int input_sequence_length = diff --git a/modules/audio_processing/agc2/rnn_vad/rnn_vad_tool.cc b/modules/audio_processing/agc2/rnn_vad/rnn_vad_tool.cc index a0e1242eb4..6686391a4e 100644 --- a/modules/audio_processing/agc2/rnn_vad/rnn_vad_tool.cc +++ b/modules/audio_processing/agc2/rnn_vad/rnn_vad_tool.cc @@ -33,7 +33,7 @@ namespace test { int main(int argc, char* argv[]) { absl::ParseCommandLine(argc, argv); - rtc::LogMessage::LogToDebug(rtc::LS_INFO); + LogMessage::LogToDebug(LS_INFO); // Open wav input file and check properties. const std::string input_wav_file = absl::GetFlag(FLAGS_i); @@ -58,8 +58,7 @@ int main(int argc, char* argv[]) { } // Initialize. - const int frame_size_10ms = - rtc::CheckedDivExact(wav_reader.sample_rate(), 100); + const int frame_size_10ms = CheckedDivExact(wav_reader.sample_rate(), 100); std::vector samples_10ms; samples_10ms.resize(frame_size_10ms); std::array samples_10ms_24kHz; @@ -74,7 +73,7 @@ int main(int argc, char* argv[]) { // Read frame at the input sample rate. const size_t read_samples = wav_reader.ReadSamples(frame_size_10ms, samples_10ms.data()); - if (rtc::SafeLt(read_samples, frame_size_10ms)) { + if (SafeLt(read_samples, frame_size_10ms)) { break; // EOF. } // Resample input. diff --git a/modules/audio_processing/agc2/rnn_vad/sequence_buffer.h b/modules/audio_processing/agc2/rnn_vad/sequence_buffer.h index a7402788c8..c88683a2da 100644 --- a/modules/audio_processing/agc2/rnn_vad/sequence_buffer.h +++ b/modules/audio_processing/agc2/rnn_vad/sequence_buffer.h @@ -50,18 +50,16 @@ class SequenceBuffer { // Sets the sequence buffer values to zero. void Reset() { std::fill(buffer_.begin(), buffer_.end(), 0); } // Returns a view on the whole buffer. - rtc::ArrayView GetBufferView() const { - return {buffer_.data(), S}; - } + ArrayView GetBufferView() const { return {buffer_.data(), S}; } // Returns a view on the M most recent values of the buffer. - rtc::ArrayView GetMostRecentValuesView() const { + ArrayView GetMostRecentValuesView() const { static_assert(M <= S, "The number of most recent values cannot be larger than the " "sequence buffer size."); return {buffer_.data() + S - M, M}; } // Shifts left the buffer by N items and add new N items at the end. - void Push(rtc::ArrayView new_values) { + void Push(ArrayView new_values) { // Make space for the new values. if (S > N) std::memmove(buffer_.data(), buffer_.data() + N, (S - N) * sizeof(T)); diff --git a/modules/audio_processing/agc2/rnn_vad/spectral_features.cc b/modules/audio_processing/agc2/rnn_vad/spectral_features.cc index 96086babb6..3ad4cae4b9 100644 --- a/modules/audio_processing/agc2/rnn_vad/spectral_features.cc +++ b/modules/audio_processing/agc2/rnn_vad/spectral_features.cc @@ -27,7 +27,7 @@ constexpr float kSilenceThreshold = 0.04f; // Computes the new cepstral difference stats and pushes them into the passed // symmetric matrix buffer. void UpdateCepstralDifferenceStats( - rtc::ArrayView new_cepstral_coeffs, + ArrayView new_cepstral_coeffs, const RingBuffer& ring_buf, SymmetricMatrixBuffer* sym_matrix_buf) { RTC_DCHECK(sym_matrix_buf); @@ -64,7 +64,7 @@ std::array ComputeScaledHalfVorbisWindow( // applied. The Fourier coefficient corresponding to the Nyquist frequency is // set to zero (it is never used and this allows to simplify the code). void ComputeWindowedForwardFft( - rtc::ArrayView frame, + ArrayView frame, const std::array& half_window, Pffft::FloatBuffer* fft_input_buffer, Pffft::FloatBuffer* fft_output_buffer, @@ -72,8 +72,8 @@ void ComputeWindowedForwardFft( RTC_DCHECK_EQ(frame.size(), 2 * half_window.size()); // Apply windowing. auto in = fft_input_buffer->GetView(); - for (int i = 0, j = kFrameSize20ms24kHz - 1; - rtc::SafeLt(i, half_window.size()); ++i, --j) { + for (int i = 0, j = kFrameSize20ms24kHz - 1; SafeLt(i, half_window.size()); + ++i, --j) { in[i] = frame[i] * half_window[i]; in[j] = frame[j] * half_window[i]; } @@ -102,13 +102,13 @@ void SpectralFeaturesExtractor::Reset() { } bool SpectralFeaturesExtractor::CheckSilenceComputeFeatures( - rtc::ArrayView reference_frame, - rtc::ArrayView lagged_frame, - rtc::ArrayView higher_bands_cepstrum, - rtc::ArrayView average, - rtc::ArrayView first_derivative, - rtc::ArrayView second_derivative, - rtc::ArrayView bands_cross_corr, + ArrayView reference_frame, + ArrayView lagged_frame, + ArrayView higher_bands_cepstrum, + ArrayView average, + ArrayView first_derivative, + ArrayView second_derivative, + ArrayView bands_cross_corr, float* variability) { // Compute the Opus band energies for the reference frame. ComputeWindowedForwardFft(reference_frame, half_window_, fft_buffer_.get(), @@ -154,16 +154,16 @@ bool SpectralFeaturesExtractor::CheckSilenceComputeFeatures( } void SpectralFeaturesExtractor::ComputeAvgAndDerivatives( - rtc::ArrayView average, - rtc::ArrayView first_derivative, - rtc::ArrayView second_derivative) const { + ArrayView average, + ArrayView first_derivative, + ArrayView second_derivative) const { auto curr = cepstral_coeffs_ring_buf_.GetArrayView(0); auto prev1 = cepstral_coeffs_ring_buf_.GetArrayView(1); auto prev2 = cepstral_coeffs_ring_buf_.GetArrayView(2); RTC_DCHECK_EQ(average.size(), first_derivative.size()); RTC_DCHECK_EQ(first_derivative.size(), second_derivative.size()); RTC_DCHECK_LE(average.size(), curr.size()); - for (int i = 0; rtc::SafeLt(i, average.size()); ++i) { + for (int i = 0; SafeLt(i, average.size()); ++i) { // Average, kernel: [1, 1, 1]. average[i] = curr[i] + prev1[i] + prev2[i]; // First derivative, kernel: [1, 0, - 1]. @@ -174,12 +174,12 @@ void SpectralFeaturesExtractor::ComputeAvgAndDerivatives( } void SpectralFeaturesExtractor::ComputeNormalizedCepstralCorrelation( - rtc::ArrayView bands_cross_corr) { + ArrayView bands_cross_corr) { spectral_correlator_.ComputeCrossCorrelation( reference_frame_fft_->GetConstView(), lagged_frame_fft_->GetConstView(), bands_cross_corr_); // Normalize. - for (int i = 0; rtc::SafeLt(i, bands_cross_corr_.size()); ++i) { + for (int i = 0; SafeLt(i, bands_cross_corr_.size()); ++i) { bands_cross_corr_[i] = bands_cross_corr_[i] / std::sqrt(0.001f + reference_frame_bands_energy_[i] * diff --git a/modules/audio_processing/agc2/rnn_vad/spectral_features.h b/modules/audio_processing/agc2/rnn_vad/spectral_features.h index d327ef8e01..714999c46b 100644 --- a/modules/audio_processing/agc2/rnn_vad/spectral_features.h +++ b/modules/audio_processing/agc2/rnn_vad/spectral_features.h @@ -40,22 +40,22 @@ class SpectralFeaturesExtractor { // detects silence and computes features. If silence is detected, the output // is neither computed nor written. bool CheckSilenceComputeFeatures( - rtc::ArrayView reference_frame, - rtc::ArrayView lagged_frame, - rtc::ArrayView higher_bands_cepstrum, - rtc::ArrayView average, - rtc::ArrayView first_derivative, - rtc::ArrayView second_derivative, - rtc::ArrayView bands_cross_corr, + ArrayView reference_frame, + ArrayView lagged_frame, + ArrayView higher_bands_cepstrum, + ArrayView average, + ArrayView first_derivative, + ArrayView second_derivative, + ArrayView bands_cross_corr, float* variability); private: void ComputeAvgAndDerivatives( - rtc::ArrayView average, - rtc::ArrayView first_derivative, - rtc::ArrayView second_derivative) const; + ArrayView average, + ArrayView first_derivative, + ArrayView second_derivative) const; void ComputeNormalizedCepstralCorrelation( - rtc::ArrayView bands_cross_corr); + ArrayView bands_cross_corr); float ComputeVariability() const; const std::array half_window_; diff --git a/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc b/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc index a10b0f7ec9..c9aa26a707 100644 --- a/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc +++ b/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc @@ -92,15 +92,15 @@ SpectralCorrelator::SpectralCorrelator() SpectralCorrelator::~SpectralCorrelator() = default; void SpectralCorrelator::ComputeAutoCorrelation( - rtc::ArrayView x, - rtc::ArrayView auto_corr) const { + ArrayView x, + ArrayView auto_corr) const { ComputeCrossCorrelation(x, x, auto_corr); } void SpectralCorrelator::ComputeCrossCorrelation( - rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView cross_corr) const { + ArrayView x, + ArrayView y, + ArrayView cross_corr) const { RTC_DCHECK_EQ(x.size(), kFrameSize20ms24kHz); RTC_DCHECK_EQ(x.size(), y.size()); RTC_DCHECK_EQ(x[1], 0.f) << "The Nyquist coefficient must be zeroed."; @@ -123,8 +123,8 @@ void SpectralCorrelator::ComputeCrossCorrelation( } void ComputeSmoothedLogMagnitudeSpectrum( - rtc::ArrayView bands_energy, - rtc::ArrayView log_bands_energy) { + ArrayView bands_energy, + ArrayView log_bands_energy) { RTC_DCHECK_LE(bands_energy.size(), kNumBands); constexpr float kOneByHundred = 1e-2f; constexpr float kLogOneByHundred = -2.f; @@ -138,7 +138,7 @@ void ComputeSmoothedLogMagnitudeSpectrum( return x; }; // Smoothing over the bands for which the band energy is defined. - for (int i = 0; rtc::SafeLt(i, bands_energy.size()); ++i) { + for (int i = 0; SafeLt(i, bands_energy.size()); ++i) { log_bands_energy[i] = smooth(std::log10(kOneByHundred + bands_energy[i])); } // Smoothing over the remaining bands (zero energy). @@ -158,9 +158,9 @@ std::array ComputeDctTable() { return dct_table; } -void ComputeDct(rtc::ArrayView in, - rtc::ArrayView dct_table, - rtc::ArrayView out) { +void ComputeDct(ArrayView in, + ArrayView dct_table, + ArrayView out) { // DCT scaling factor - i.e., sqrt(2 / kNumBands). constexpr float kDctScalingFactor = 0.301511345f; constexpr float kDctScalingFactorError = @@ -174,9 +174,9 @@ void ComputeDct(rtc::ArrayView in, RTC_DCHECK_LE(in.size(), kNumBands); RTC_DCHECK_LE(1, out.size()); RTC_DCHECK_LE(out.size(), in.size()); - for (int i = 0; rtc::SafeLt(i, out.size()); ++i) { + for (int i = 0; SafeLt(i, out.size()); ++i) { out[i] = 0.f; - for (int j = 0; rtc::SafeLt(j, in.size()); ++j) { + for (int j = 0; SafeLt(j, in.size()); ++j) { out[i] += in[j] * dct_table[j * kNumBands + i]; } // TODO(bugs.webrtc.org/10480): Scaling factor in the DCT table. diff --git a/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.h b/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.h index f4b293a567..4d9fd52b97 100644 --- a/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.h +++ b/modules/audio_processing/agc2/rnn_vad/spectral_features_internal.h @@ -55,8 +55,8 @@ class SpectralCorrelator { // - be encoded as vectors of interleaved real-complex FFT coefficients // where x[1] = y[1] = 0 (the Nyquist frequency coefficient is omitted). void ComputeAutoCorrelation( - rtc::ArrayView x, - rtc::ArrayView auto_corr) const; + ArrayView x, + ArrayView auto_corr) const; // Computes the band-wise spectral cross-correlations. // `x` and `y` must: @@ -64,9 +64,9 @@ class SpectralCorrelator { // - be encoded as vectors of interleaved real-complex FFT coefficients where // x[1] = y[1] = 0 (the Nyquist frequency coefficient is omitted). void ComputeCrossCorrelation( - rtc::ArrayView x, - rtc::ArrayView y, - rtc::ArrayView cross_corr) const; + ArrayView x, + ArrayView y, + ArrayView cross_corr) const; private: const std::vector weights_; // Weights for each Fourier coefficient. @@ -77,8 +77,8 @@ class SpectralCorrelator { // computes the log magnitude spectrum applying smoothing both over time and // over frequency. Declared here for unit testing. void ComputeSmoothedLogMagnitudeSpectrum( - rtc::ArrayView bands_energy, - rtc::ArrayView log_bands_energy); + ArrayView bands_energy, + ArrayView log_bands_energy); // TODO(bugs.webrtc.org/10480): Move to anonymous namespace in // spectral_features.cc. Creates a DCT table for arrays having size equal to @@ -90,9 +90,9 @@ std::array ComputeDctTable(); // In-place computation is not allowed and `out` can be smaller than `in` in // order to only compute the first DCT coefficients. Declared here for unit // testing. -void ComputeDct(rtc::ArrayView in, - rtc::ArrayView dct_table, - rtc::ArrayView out); +void ComputeDct(ArrayView in, + ArrayView dct_table, + ArrayView out); } // namespace rnn_vad } // namespace webrtc diff --git a/modules/audio_processing/agc2/rnn_vad/spectral_features_internal_unittest.cc b/modules/audio_processing/agc2/rnn_vad/spectral_features_internal_unittest.cc index ece4eb5024..9c8226ac34 100644 --- a/modules/audio_processing/agc2/rnn_vad/spectral_features_internal_unittest.cc +++ b/modules/audio_processing/agc2/rnn_vad/spectral_features_internal_unittest.cc @@ -38,9 +38,9 @@ std::vector ComputeTriangularFiltersWeights() { kOpusScaleNumBins24kHz20ms.end(), 0); std::vector weights(num_weights); int next_fft_coeff_index = 0; - for (int band = 0; rtc::SafeLt(band, v.size()); ++band) { + for (int band = 0; SafeLt(band, v.size()); ++band) { const int band_size = v[band]; - for (int j = 0; rtc::SafeLt(j, band_size); ++j) { + for (int j = 0; SafeLt(j, band_size); ++j) { weights[next_fft_coeff_index + j] = static_cast(j) / band_size; } next_fft_coeff_index += band_size; @@ -56,7 +56,7 @@ TEST(RnnVadTest, TestOpusScaleBoundaries) { 3200, 4000, 4800, 5600, 6800, 8000, 9600, 12000, 15600, 20000}; constexpr auto kOpusScaleNumBins24kHz20ms = GetOpusScaleNumBins24kHz20ms(); int prev = 0; - for (int i = 0; rtc::SafeLt(i, kOpusScaleNumBins24kHz20ms.size()); ++i) { + for (int i = 0; SafeLt(i, kOpusScaleNumBins24kHz20ms.size()); ++i) { int boundary = kBandFrequencyBoundariesHz[i] * kFrameSize20ms24kHz / kSampleRate24kHz; EXPECT_EQ(kOpusScaleNumBins24kHz20ms[i], boundary - prev); @@ -73,7 +73,7 @@ TEST(RnnVadTest, DISABLED_TestOpusScaleWeights) { int i = 0; for (int band_size : GetOpusScaleNumBins24kHz20ms()) { SCOPED_TRACE(band_size); - rtc::ArrayView band_weights(weights.data() + i, band_size); + ArrayView band_weights(weights.data() + i, band_size); float prev = -1.f; for (float weight : band_weights) { EXPECT_LT(prev, weight); diff --git a/modules/audio_processing/agc2/rnn_vad/spectral_features_unittest.cc b/modules/audio_processing/agc2/rnn_vad/spectral_features_unittest.cc index 324d694957..df3f35a55c 100644 --- a/modules/audio_processing/agc2/rnn_vad/spectral_features_unittest.cc +++ b/modules/audio_processing/agc2/rnn_vad/spectral_features_unittest.cc @@ -26,33 +26,33 @@ namespace { constexpr int kTestFeatureVectorSize = kNumBands + 3 * kNumLowerBands + 1; // Writes non-zero sample values. -void WriteTestData(rtc::ArrayView samples) { - for (int i = 0; rtc::SafeLt(i, samples.size()); ++i) { +void WriteTestData(ArrayView samples) { + for (int i = 0; SafeLt(i, samples.size()); ++i) { samples[i] = i % 100; } } -rtc::ArrayView GetHigherBandsSpectrum( +ArrayView GetHigherBandsSpectrum( std::array* feature_vector) { return {feature_vector->data() + kNumLowerBands, kNumBands - kNumLowerBands}; } -rtc::ArrayView GetAverage( +ArrayView GetAverage( std::array* feature_vector) { return {feature_vector->data(), kNumLowerBands}; } -rtc::ArrayView GetFirstDerivative( +ArrayView GetFirstDerivative( std::array* feature_vector) { return {feature_vector->data() + kNumBands, kNumLowerBands}; } -rtc::ArrayView GetSecondDerivative( +ArrayView GetSecondDerivative( std::array* feature_vector) { return {feature_vector->data() + kNumBands + kNumLowerBands, kNumLowerBands}; } -rtc::ArrayView GetCepstralCrossCorrelation( +ArrayView GetCepstralCrossCorrelation( std::array* feature_vector) { return {feature_vector->data() + kNumBands + 2 * kNumLowerBands, kNumLowerBands}; @@ -71,7 +71,7 @@ TEST(RnnVadTest, SpectralFeaturesWithAndWithoutSilence) { // Initialize. SpectralFeaturesExtractor sfe; std::array samples; - rtc::ArrayView samples_view(samples); + ArrayView samples_view(samples); bool is_silence; std::array feature_vector; @@ -116,7 +116,7 @@ TEST(RnnVadTest, CepstralFeaturesConstantAverageZeroDerivative) { // Initialize. SpectralFeaturesExtractor sfe; std::array samples; - rtc::ArrayView samples_view(samples); + ArrayView samples_view(samples); WriteTestData(samples); // Fill the spectral features with test data. diff --git a/modules/audio_processing/agc2/rnn_vad/symmetric_matrix_buffer.h b/modules/audio_processing/agc2/rnn_vad/symmetric_matrix_buffer.h index d186479551..a84fa32b52 100644 --- a/modules/audio_processing/agc2/rnn_vad/symmetric_matrix_buffer.h +++ b/modules/audio_processing/agc2/rnn_vad/symmetric_matrix_buffer.h @@ -51,12 +51,12 @@ class SymmetricMatrixBuffer { // most recent one in the ring buffer, whereas the last element in `values` // must correspond to the comparison between the most recent item and the // oldest one in the ring buffer. - void Push(rtc::ArrayView values) { + void Push(ArrayView values) { // Move the lower-right sub-matrix of size (S-2) x (S-2) one row up and one // column left. std::memmove(buf_.data(), buf_.data() + S, (buf_.size() - S) * sizeof(T)); // Copy new values in the last column in the right order. - for (int i = 0; rtc::SafeLt(i, values.size()); ++i) { + for (int i = 0; SafeLt(i, values.size()); ++i) { const int index = (S - 1 - i) * (S - 1) - 1; RTC_DCHECK_GE(index, 0); RTC_DCHECK_LT(index, buf_.size()); diff --git a/modules/audio_processing/agc2/rnn_vad/test_utils.cc b/modules/audio_processing/agc2/rnn_vad/test_utils.cc index 857a9f2706..a3951bf260 100644 --- a/modules/audio_processing/agc2/rnn_vad/test_utils.cc +++ b/modules/audio_processing/agc2/rnn_vad/test_utils.cc @@ -44,7 +44,7 @@ class FloatFileReader : public FileReader { ~FloatFileReader() = default; int size() const override { return size_; } - bool ReadChunk(rtc::ArrayView dst) override { + bool ReadChunk(ArrayView dst) override { const std::streamsize bytes_to_read = dst.size() * sizeof(T); if (std::is_same::value) { is_.read(reinterpret_cast(dst.data()), bytes_to_read); @@ -70,20 +70,20 @@ class FloatFileReader : public FileReader { using webrtc::test::ResourcePath; -void ExpectEqualFloatArray(rtc::ArrayView expected, - rtc::ArrayView computed) { +void ExpectEqualFloatArray(ArrayView expected, + ArrayView computed) { ASSERT_EQ(expected.size(), computed.size()); - for (int i = 0; rtc::SafeLt(i, expected.size()); ++i) { + for (int i = 0; SafeLt(i, expected.size()); ++i) { SCOPED_TRACE(i); EXPECT_FLOAT_EQ(expected[i], computed[i]); } } -void ExpectNearAbsolute(rtc::ArrayView expected, - rtc::ArrayView computed, +void ExpectNearAbsolute(ArrayView expected, + ArrayView computed, float tolerance) { ASSERT_EQ(expected.size(), computed.size()); - for (int i = 0; rtc::SafeLt(i, expected.size()); ++i) { + for (int i = 0; SafeLt(i, expected.size()); ++i) { SCOPED_TRACE(i); EXPECT_NEAR(expected[i], computed[i], tolerance); } @@ -99,7 +99,7 @@ ChunksFileReader CreatePitchBuffer24kHzReader() { auto reader = std::make_unique>( /*filename=*/test::ResourcePath( "audio_processing/agc2/rnn_vad/pitch_buf_24k", "dat")); - const int num_chunks = rtc::CheckedDivExact(reader->size(), kBufSize24kHz); + const int num_chunks = CheckedDivExact(reader->size(), kBufSize24kHz); return {/*chunk_size=*/kBufSize24kHz, num_chunks, std::move(reader)}; } @@ -109,7 +109,7 @@ ChunksFileReader CreateLpResidualAndPitchInfoReader() { auto reader = std::make_unique>( /*filename=*/test::ResourcePath( "audio_processing/agc2/rnn_vad/pitch_lp_res", "dat")); - const int num_chunks = rtc::CheckedDivExact(reader->size(), kChunkSize); + const int num_chunks = CheckedDivExact(reader->size(), kChunkSize); return {kChunkSize, num_chunks, std::move(reader)}; } diff --git a/modules/audio_processing/agc2/rnn_vad/test_utils.h b/modules/audio_processing/agc2/rnn_vad/test_utils.h index e64b7b7ecd..60417826e3 100644 --- a/modules/audio_processing/agc2/rnn_vad/test_utils.h +++ b/modules/audio_processing/agc2/rnn_vad/test_utils.h @@ -27,15 +27,15 @@ namespace rnn_vad { constexpr float kFloatMin = std::numeric_limits::min(); -// Fails for every pair from two equally sized rtc::ArrayView views such -// that the values in the pair do not match. -void ExpectEqualFloatArray(rtc::ArrayView expected, - rtc::ArrayView computed); - -// Fails for every pair from two equally sized rtc::ArrayView views such -// that their absolute error is above a given threshold. -void ExpectNearAbsolute(rtc::ArrayView expected, - rtc::ArrayView computed, +// Fails for every pair from two equally sized webrtc::ArrayView views +// such that the values in the pair do not match. +void ExpectEqualFloatArray(ArrayView expected, + ArrayView computed); + +// Fails for every pair from two equally sized webrtc::ArrayView views +// such that their absolute error is above a given threshold. +void ExpectNearAbsolute(ArrayView expected, + ArrayView computed, float tolerance); // File reader interface. @@ -49,7 +49,7 @@ class FileReader { // values are correctly read. If the number of remaining bytes in the file is // not sufficient to read `dst.size()` float values, `dst` is partially // modified and false is returned. - virtual bool ReadChunk(rtc::ArrayView dst) = 0; + virtual bool ReadChunk(ArrayView dst) = 0; // Reads a single float value, advances the internal file position according // to the number of read bytes and returns true if the value is correctly // read. If the number of remaining bytes in the file is not sufficient to @@ -90,14 +90,13 @@ class PitchTestData { public: PitchTestData(); ~PitchTestData(); - rtc::ArrayView PitchBuffer24kHzView() const { + ArrayView PitchBuffer24kHzView() const { return pitch_buffer_24k_; } - rtc::ArrayView SquareEnergies24kHzView() - const { + ArrayView SquareEnergies24kHzView() const { return square_energies_24k_; } - rtc::ArrayView AutoCorrelation12kHzView() const { + ArrayView AutoCorrelation12kHzView() const { return auto_correlation_12k_; } @@ -115,7 +114,7 @@ class FileWriter { FileWriter(const FileWriter&) = delete; FileWriter& operator=(const FileWriter&) = delete; ~FileWriter() = default; - void WriteChunk(rtc::ArrayView value) { + void WriteChunk(ArrayView value) { const std::streamsize bytes_to_write = value.size() * sizeof(float); os_.write(reinterpret_cast(value.data()), bytes_to_write); } diff --git a/modules/audio_processing/agc2/rnn_vad/vector_math.h b/modules/audio_processing/agc2/rnn_vad/vector_math.h index 47f681196a..0cf4e34efb 100644 --- a/modules/audio_processing/agc2/rnn_vad/vector_math.h +++ b/modules/audio_processing/agc2/rnn_vad/vector_math.h @@ -40,8 +40,7 @@ class VectorMath { : cpu_features_(cpu_features) {} // Computes the dot product between two equally sized vectors. - float DotProduct(rtc::ArrayView x, - rtc::ArrayView y) const { + float DotProduct(ArrayView x, ArrayView y) const { RTC_DCHECK_EQ(x.size(), y.size()); #if defined(WEBRTC_ARCH_X86_FAMILY) if (cpu_features_.avx2) { @@ -67,8 +66,8 @@ class VectorMath { accumulator = _mm_add_ps(accumulator, high); float dot_product = _mm_cvtss_f32(accumulator); // Add the result for the last block if incomplete. - for (int i = incomplete_block_index; - i < rtc::dchecked_cast(x.size()); ++i) { + for (int i = incomplete_block_index; i < dchecked_cast(x.size()); + ++i) { dot_product += x[i] * y[i]; } return dot_product; @@ -92,7 +91,7 @@ class VectorMath { float dot_product = vget_lane_f32(vpadd_f32(tmp, vrev64_f32(tmp)), 0); // Add the result for the last block if incomplete. for (int i = incomplete_block_index; - i < rtc::dchecked_cast(x.size()); ++i) { + i < webrtc::dchecked_cast(x.size()); ++i) { dot_product += x[i] * y[i]; } return dot_product; @@ -102,8 +101,8 @@ class VectorMath { } private: - float DotProductAvx2(rtc::ArrayView x, - rtc::ArrayView y) const; + float DotProductAvx2(ArrayView x, + ArrayView y) const; const AvailableCpuFeatures cpu_features_; }; diff --git a/modules/audio_processing/agc2/rnn_vad/vector_math_avx2.cc b/modules/audio_processing/agc2/rnn_vad/vector_math_avx2.cc index a875e11daf..71466dab20 100644 --- a/modules/audio_processing/agc2/rnn_vad/vector_math_avx2.cc +++ b/modules/audio_processing/agc2/rnn_vad/vector_math_avx2.cc @@ -18,8 +18,8 @@ namespace webrtc { namespace rnn_vad { -float VectorMath::DotProductAvx2(rtc::ArrayView x, - rtc::ArrayView y) const { +float VectorMath::DotProductAvx2(ArrayView x, + ArrayView y) const { RTC_DCHECK(cpu_features_.avx2); RTC_DCHECK_EQ(x.size(), y.size()); __m256 accumulator = _mm256_setzero_ps(); @@ -43,8 +43,7 @@ float VectorMath::DotProductAvx2(rtc::ArrayView x, low = _mm_add_ss(high, low); float dot_product = _mm_cvtss_f32(low); // Add the result for the last block if incomplete. - for (int i = incomplete_block_index; i < rtc::dchecked_cast(x.size()); - ++i) { + for (int i = incomplete_block_index; i < dchecked_cast(x.size()); ++i) { dot_product += x[i] * y[i]; } return dot_product; diff --git a/modules/audio_processing/agc2/saturation_protector.cc b/modules/audio_processing/agc2/saturation_protector.cc index 961baf4cd3..fa35045662 100644 --- a/modules/audio_processing/agc2/saturation_protector.cc +++ b/modules/audio_processing/agc2/saturation_protector.cc @@ -64,7 +64,7 @@ void UpdateSaturationProtectorState(float peak_dbfs, // Get the max peak over `kPeakEnveloperSuperFrameLengthMs` ms. state.max_peaks_dbfs = std::max(state.max_peaks_dbfs, peak_dbfs); state.time_since_push_ms += kFrameDurationMs; - if (rtc::SafeGt(state.time_since_push_ms, kPeakEnveloperSuperFrameLengthMs)) { + if (SafeGt(state.time_since_push_ms, kPeakEnveloperSuperFrameLengthMs)) { // Push `max_peaks_dbfs` back into the ring buffer. state.peak_delay_buffer.PushBack(state.max_peaks_dbfs); // Reset. @@ -88,7 +88,7 @@ void UpdateSaturationProtectorState(float peak_dbfs, } state.headroom_db = - rtc::SafeClamp(state.headroom_db, kMinMarginDb, kMaxMarginDb); + SafeClamp(state.headroom_db, kMinMarginDb, kMaxMarginDb); } // Saturation protector which recommends a headroom based on the recent peaks. diff --git a/modules/audio_processing/agc2/saturation_protector_buffer.cc b/modules/audio_processing/agc2/saturation_protector_buffer.cc index 41efdad2c8..be0c121fab 100644 --- a/modules/audio_processing/agc2/saturation_protector_buffer.cc +++ b/modules/audio_processing/agc2/saturation_protector_buffer.cc @@ -54,24 +54,24 @@ void SaturationProtectorBuffer::PushBack(float v) { RTC_DCHECK_LT(next_, buffer_.size()); RTC_DCHECK_LE(size_, buffer_.size()); buffer_[next_++] = v; - if (rtc::SafeEq(next_, buffer_.size())) { + if (SafeEq(next_, buffer_.size())) { next_ = 0; } - if (rtc::SafeLt(size_, buffer_.size())) { + if (SafeLt(size_, buffer_.size())) { size_++; } } -absl::optional SaturationProtectorBuffer::Front() const { +std::optional SaturationProtectorBuffer::Front() const { if (size_ == 0) { - return absl::nullopt; + return std::nullopt; } RTC_DCHECK_LT(FrontIndex(), buffer_.size()); return buffer_[FrontIndex()]; } int SaturationProtectorBuffer::FrontIndex() const { - return rtc::SafeEq(size_, buffer_.size()) ? next_ : 0; + return SafeEq(size_, buffer_.size()) ? next_ : 0; } } // namespace webrtc diff --git a/modules/audio_processing/agc2/saturation_protector_buffer.h b/modules/audio_processing/agc2/saturation_protector_buffer.h index e17d0998c4..3965e93746 100644 --- a/modules/audio_processing/agc2/saturation_protector_buffer.h +++ b/modules/audio_processing/agc2/saturation_protector_buffer.h @@ -12,8 +12,8 @@ #define MODULES_AUDIO_PROCESSING_AGC2_SATURATION_PROTECTOR_BUFFER_H_ #include +#include -#include "absl/types/optional.h" #include "modules/audio_processing/agc2/agc2_common.h" namespace webrtc { @@ -43,7 +43,7 @@ class SaturationProtectorBuffer { // Returns the oldest item in the buffer. Returns an empty value if the // buffer is empty. - absl::optional Front() const; + std::optional Front() const; private: int FrontIndex() const; diff --git a/modules/audio_processing/agc2/speech_level_estimator.cc b/modules/audio_processing/agc2/speech_level_estimator.cc index 7bf3252116..f9354d1607 100644 --- a/modules/audio_processing/agc2/speech_level_estimator.cc +++ b/modules/audio_processing/agc2/speech_level_estimator.cc @@ -20,7 +20,7 @@ namespace webrtc { namespace { float ClampLevelEstimateDbfs(float level_estimate_dbfs) { - return rtc::SafeClamp(level_estimate_dbfs, -90.0f, 30.0f); + return SafeClamp(level_estimate_dbfs, -90.0f, 30.0f); } // Returns the initial speech level estimate needed to apply the initial gain. diff --git a/modules/audio_processing/agc2/speech_level_estimator.h b/modules/audio_processing/agc2/speech_level_estimator.h index 4d9f106ba9..adbf84c6c5 100644 --- a/modules/audio_processing/agc2/speech_level_estimator.h +++ b/modules/audio_processing/agc2/speech_level_estimator.h @@ -15,8 +15,8 @@ #include +#include "api/audio/audio_processing.h" #include "modules/audio_processing/agc2/agc2_common.h" -#include "modules/audio_processing/include/audio_processing.h" namespace webrtc { class ApmDataDumper; diff --git a/modules/audio_processing/agc2/speech_level_estimator_unittest.cc b/modules/audio_processing/agc2/speech_level_estimator_unittest.cc index e1c5f85434..939b1146de 100644 --- a/modules/audio_processing/agc2/speech_level_estimator_unittest.cc +++ b/modules/audio_processing/agc2/speech_level_estimator_unittest.cc @@ -12,8 +12,8 @@ #include +#include "api/audio/audio_processing.h" #include "modules/audio_processing/agc2/agc2_common.h" -#include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/gunit.h" @@ -152,7 +152,7 @@ TEST(GainController2SpeechLevelEstimator, ConvergenceSpeedAfterConfidence) { level_estimator.initial_speech_level_dbfs); ASSERT_TRUE(level_estimator.estimator->is_confident()); // After confidence. - constexpr float kConvergenceTimeAfterConfidenceNumFrames = 600; // 6 seconds. + constexpr float kConvergenceTimeAfterConfidenceNumFrames = 700; // 7 seconds. static_assert( kConvergenceTimeAfterConfidenceNumFrames > kNumFramesToConfidence, ""); RunOnConstantLevel( diff --git a/modules/audio_processing/agc2/vad_wrapper.cc b/modules/audio_processing/agc2/vad_wrapper.cc index af6325dea7..238c51e422 100644 --- a/modules/audio_processing/agc2/vad_wrapper.cc +++ b/modules/audio_processing/agc2/vad_wrapper.cc @@ -13,7 +13,6 @@ #include #include -#include "api/array_view.h" #include "common_audio/resampler/include/push_resampler.h" #include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/agc2/rnn_vad/common.h" @@ -36,7 +35,7 @@ class MonoVadImpl : public VoiceActivityDetectorWrapper::MonoVad { int SampleRateHz() const override { return rnn_vad::kSampleRate24kHz; } void Reset() override { rnn_vad_.Reset(); } - float Analyze(rtc::ArrayView frame) override { + float Analyze(MonoView frame) override { RTC_DCHECK_EQ(frame.size(), rnn_vad::kFrameSize10ms24kHz); std::array feature_vector; const bool is_silence = features_extractor_.CheckSilenceComputeFeatures( @@ -72,40 +71,34 @@ VoiceActivityDetectorWrapper::VoiceActivityDetectorWrapper( std::unique_ptr vad, int sample_rate_hz) : vad_reset_period_frames_( - rtc::CheckedDivExact(vad_reset_period_ms, kFrameDurationMs)), + CheckedDivExact(vad_reset_period_ms, kFrameDurationMs)), + frame_size_(CheckedDivExact(sample_rate_hz, kNumFramesPerSecond)), time_to_vad_reset_(vad_reset_period_frames_), - vad_(std::move(vad)) { - RTC_DCHECK(vad_); + vad_(std::move(vad)), + resampled_buffer_( + CheckedDivExact(vad_->SampleRateHz(), kNumFramesPerSecond)), + resampler_(frame_size_, + resampled_buffer_.size(), + /*num_channels=*/1) { RTC_DCHECK_GT(vad_reset_period_frames_, 1); - resampled_buffer_.resize( - rtc::CheckedDivExact(vad_->SampleRateHz(), kNumFramesPerSecond)); - Initialize(sample_rate_hz); + vad_->Reset(); } VoiceActivityDetectorWrapper::~VoiceActivityDetectorWrapper() = default; -void VoiceActivityDetectorWrapper::Initialize(int sample_rate_hz) { - RTC_DCHECK_GT(sample_rate_hz, 0); - frame_size_ = rtc::CheckedDivExact(sample_rate_hz, kNumFramesPerSecond); - int status = - resampler_.InitializeIfNeeded(sample_rate_hz, vad_->SampleRateHz(), - /*num_channels=*/1); - constexpr int kStatusOk = 0; - RTC_DCHECK_EQ(status, kStatusOk); - vad_->Reset(); -} - -float VoiceActivityDetectorWrapper::Analyze(AudioFrameView frame) { +float VoiceActivityDetectorWrapper::Analyze( + DeinterleavedView frame) { // Periodically reset the VAD. time_to_vad_reset_--; if (time_to_vad_reset_ <= 0) { vad_->Reset(); time_to_vad_reset_ = vad_reset_period_frames_; } + // Resample the first channel of `frame`. RTC_DCHECK_EQ(frame.samples_per_channel(), frame_size_); - resampler_.Resample(frame.channel(0).data(), frame_size_, - resampled_buffer_.data(), resampled_buffer_.size()); + MonoView dst(resampled_buffer_.data(), resampled_buffer_.size()); + resampler_.Resample(frame[0], dst); return vad_->Analyze(resampled_buffer_); } diff --git a/modules/audio_processing/agc2/vad_wrapper.h b/modules/audio_processing/agc2/vad_wrapper.h index 459c471630..025a48ef22 100644 --- a/modules/audio_processing/agc2/vad_wrapper.h +++ b/modules/audio_processing/agc2/vad_wrapper.h @@ -14,10 +14,9 @@ #include #include -#include "api/array_view.h" +#include "api/audio/audio_view.h" #include "common_audio/resampler/include/push_resampler.h" #include "modules/audio_processing/agc2/cpu_features.h" -#include "modules/audio_processing/include/audio_frame_view.h" namespace webrtc { @@ -37,7 +36,7 @@ class VoiceActivityDetectorWrapper { // Resets the internal state. virtual void Reset() = 0; // Analyzes an audio frame and returns the speech probability. - virtual float Analyze(rtc::ArrayView frame) = 0; + virtual float Analyze(MonoView frame) = 0; }; // Ctor. Uses `cpu_features` to instantiate the default VAD. @@ -60,21 +59,18 @@ class VoiceActivityDetectorWrapper { delete; ~VoiceActivityDetectorWrapper(); - // Initializes the VAD wrapper. - void Initialize(int sample_rate_hz); - // Analyzes the first channel of `frame` and returns the speech probability. // `frame` must be a 10 ms frame with the sample rate specified in the last // `Initialize()` call. - float Analyze(AudioFrameView frame); + float Analyze(DeinterleavedView frame); private: const int vad_reset_period_frames_; - int frame_size_; + const int frame_size_; int time_to_vad_reset_; - PushResampler resampler_; std::unique_ptr vad_; std::vector resampled_buffer_; + PushResampler resampler_; }; } // namespace webrtc diff --git a/modules/audio_processing/agc2/vad_wrapper_unittest.cc b/modules/audio_processing/agc2/vad_wrapper_unittest.cc index 91efdb566e..a4c1dcba21 100644 --- a/modules/audio_processing/agc2/vad_wrapper_unittest.cc +++ b/modules/audio_processing/agc2/vad_wrapper_unittest.cc @@ -16,8 +16,8 @@ #include #include +#include "api/audio/audio_view.h" #include "modules/audio_processing/agc2/agc2_common.h" -#include "modules/audio_processing/include/audio_frame_view.h" #include "rtc_base/checks.h" #include "rtc_base/gunit.h" #include "rtc_base/numerics/safe_compare.h" @@ -42,7 +42,10 @@ class MockVad : public VoiceActivityDetectorWrapper::MonoVad { public: MOCK_METHOD(int, SampleRateHz, (), (const, override)); MOCK_METHOD(void, Reset, (), (override)); - MOCK_METHOD(float, Analyze, (rtc::ArrayView frame), (override)); + MOCK_METHOD(float, + Analyze, + (webrtc::ArrayView frame), + (override)); }; // Checks that the ctor and `Initialize()` read the sample rate of the wrapped @@ -50,7 +53,7 @@ class MockVad : public VoiceActivityDetectorWrapper::MonoVad { TEST(GainController2VoiceActivityDetectorWrapper, CtorAndInitReadSampleRate) { auto vad = std::make_unique(); EXPECT_CALL(*vad, SampleRateHz) - .Times(2) + .Times(1) .WillRepeatedly(Return(kSampleRate8kHz)); EXPECT_CALL(*vad, Reset).Times(AnyNumber()); auto vad_wrapper = std::make_unique( @@ -83,13 +86,10 @@ std::unique_ptr CreateMockVadWrapper( struct FrameWithView { // Ctor. Initializes the frame samples with `value`. explicit FrameWithView(int sample_rate_hz) - : samples(rtc::CheckedDivExact(sample_rate_hz, kNumFramesPerSecond), - 0.0f), - channel0(samples.data()), - view(&channel0, /*num_channels=*/1, samples.size()) {} + : samples(CheckedDivExact(sample_rate_hz, kNumFramesPerSecond), 0.0f), + view(samples.data(), samples.size(), /*num_channels=*/1) {} std::vector samples; - const float* const channel0; - const AudioFrameView view; + const DeinterleavedView view; }; // Checks that the expected speech probabilities are returned. @@ -101,7 +101,7 @@ TEST(GainController2VoiceActivityDetectorWrapper, CheckSpeechProbabilities) { speech_probabilities, /*expected_vad_reset_calls=*/1); FrameWithView frame(kSampleRate8kHz); - for (int i = 0; rtc::SafeLt(i, speech_probabilities.size()); ++i) { + for (int i = 0; SafeLt(i, speech_probabilities.size()); ++i) { SCOPED_TRACE(i); EXPECT_EQ(speech_probabilities[i], vad_wrapper->Analyze(frame.view)); } @@ -161,9 +161,9 @@ TEST_P(VadResamplingParametrization, CheckResampledFrameSize) { .Times(AnyNumber()) .WillRepeatedly(Return(vad_sample_rate_hz())); EXPECT_CALL(*vad, Reset).Times(1); - EXPECT_CALL(*vad, Analyze(Truly([this](rtc::ArrayView frame) { - return rtc::SafeEq(frame.size(), rtc::CheckedDivExact(vad_sample_rate_hz(), - kNumFramesPerSecond)); + EXPECT_CALL(*vad, Analyze(Truly([this](ArrayView frame) { + return SafeEq(frame.size(), + CheckedDivExact(vad_sample_rate_hz(), kNumFramesPerSecond)); }))).Times(1); auto vad_wrapper = std::make_unique( kNoVadPeriodicReset, std::move(vad), input_sample_rate_hz()); diff --git a/modules/audio_processing/agc2/vector_float_frame.cc b/modules/audio_processing/agc2/vector_float_frame.cc index a70d815196..85dd7feb21 100644 --- a/modules/audio_processing/agc2/vector_float_frame.cc +++ b/modules/audio_processing/agc2/vector_float_frame.cc @@ -12,28 +12,20 @@ namespace webrtc { -namespace { - -std::vector ConstructChannelPointers( - std::vector>* x) { - std::vector channel_ptrs; - for (auto& v : *x) { - channel_ptrs.push_back(v.data()); - } - return channel_ptrs; -} -} // namespace - VectorFloatFrame::VectorFloatFrame(int num_channels, int samples_per_channel, float start_value) - : channels_(num_channels, - std::vector(samples_per_channel, start_value)), - channel_ptrs_(ConstructChannelPointers(&channels_)), - float_frame_view_(channel_ptrs_.data(), - channels_.size(), - samples_per_channel) {} + : channels_(num_channels * samples_per_channel, start_value), + view_(channels_.data(), samples_per_channel, num_channels) {} VectorFloatFrame::~VectorFloatFrame() = default; +AudioFrameView VectorFloatFrame::float_frame_view() { + return AudioFrameView(view_); +} + +AudioFrameView VectorFloatFrame::float_frame_view() const { + return AudioFrameView(view_); +} + } // namespace webrtc diff --git a/modules/audio_processing/agc2/vector_float_frame.h b/modules/audio_processing/agc2/vector_float_frame.h index b521f346f9..e2a3211313 100644 --- a/modules/audio_processing/agc2/vector_float_frame.h +++ b/modules/audio_processing/agc2/vector_float_frame.h @@ -13,6 +13,7 @@ #include +#include "api/audio/audio_view.h" #include "modules/audio_processing/include/audio_frame_view.h" namespace webrtc { @@ -24,17 +25,17 @@ class VectorFloatFrame { VectorFloatFrame(int num_channels, int samples_per_channel, float start_value); - const AudioFrameView& float_frame_view() { return float_frame_view_; } - AudioFrameView float_frame_view() const { - return float_frame_view_; - } - ~VectorFloatFrame(); + AudioFrameView float_frame_view(); + AudioFrameView float_frame_view() const; + + DeinterleavedView view() { return view_; } + DeinterleavedView view() const { return view_; } + private: - std::vector> channels_; - std::vector channel_ptrs_; - AudioFrameView float_frame_view_; + std::vector channels_; + DeinterleavedView view_; }; } // namespace webrtc diff --git a/modules/audio_processing/audio_buffer.cc b/modules/audio_processing/audio_buffer.cc index 3dbe1fe072..a1b55fe10a 100644 --- a/modules/audio_processing/audio_buffer.cc +++ b/modules/audio_processing/audio_buffer.cc @@ -15,7 +15,6 @@ #include #include "common_audio/channel_buffer.h" -#include "common_audio/include/audio_util.h" #include "common_audio/resampler/push_sinc_resampler.h" #include "modules/audio_processing/splitting_filter.h" #include "rtc_base/checks.h" @@ -25,7 +24,6 @@ namespace { constexpr size_t kSamplesPer32kHzChannel = 320; constexpr size_t kSamplesPer48kHzChannel = 480; -constexpr size_t kMaxSamplesPerChannel = AudioBuffer::kMaxSampleRate / 100; size_t NumBandsFromFramesPerChannel(size_t num_frames) { if (num_frames == kSamplesPer32kHzChannel) { @@ -44,7 +42,7 @@ AudioBuffer::AudioBuffer(size_t input_rate, size_t buffer_rate, size_t buffer_num_channels, size_t output_rate, - size_t output_num_channels) + size_t /* output_num_channels */) : input_num_frames_(static_cast(input_rate) / 100), input_num_channels_(input_num_channels), buffer_num_frames_(static_cast(buffer_rate) / 100), @@ -53,7 +51,7 @@ AudioBuffer::AudioBuffer(size_t input_rate, output_num_channels_(0), num_channels_(buffer_num_channels), num_bands_(NumBandsFromFramesPerChannel(buffer_num_frames_)), - num_split_frames_(rtc::CheckedDivExact(buffer_num_frames_, num_bands_)), + num_split_frames_(CheckedDivExact(buffer_num_frames_, num_bands_)), data_( new ChannelBuffer(buffer_num_frames_, buffer_num_channels_)) { RTC_DCHECK_GT(input_num_frames_, 0); @@ -110,9 +108,9 @@ void AudioBuffer::CopyFrom(const float* const* stacked_data, const bool resampling_needed = input_num_frames_ != buffer_num_frames_; if (downmix_needed) { - RTC_DCHECK_GE(kMaxSamplesPerChannel, input_num_frames_); + RTC_DCHECK_GE(kMaxSamplesPerChannel10ms, input_num_frames_); - std::array downmix; + std::array downmix; if (downmix_by_averaging_) { const float kOneByNumChannels = 1.f / input_num_channels_; for (size_t i = 0; i < input_num_frames_; ++i) { @@ -230,7 +228,7 @@ void AudioBuffer::CopyFrom(const int16_t* const interleaved_data, if (num_channels_ == 1) { if (input_num_channels_ == 1) { if (resampling_required) { - std::array float_buffer; + std::array float_buffer; S16ToFloatS16(interleaved, input_num_frames_, float_buffer.data()); input_resamplers_[0]->Resample(float_buffer.data(), input_num_frames_, data_->channels()[0], @@ -239,7 +237,7 @@ void AudioBuffer::CopyFrom(const int16_t* const interleaved_data, S16ToFloatS16(interleaved, input_num_frames_, data_->channels()[0]); } } else { - std::array float_buffer; + std::array float_buffer; float* downmixed_data = resampling_required ? float_buffer.data() : data_->channels()[0]; if (downmix_by_averaging_) { @@ -274,7 +272,7 @@ void AudioBuffer::CopyFrom(const int16_t* const interleaved_data, }; if (resampling_required) { - std::array float_buffer; + std::array float_buffer; for (size_t i = 0; i < num_channels_; ++i) { deinterleave_channel(i, num_channels_, input_num_frames_, interleaved, float_buffer.data()); @@ -302,7 +300,7 @@ void AudioBuffer::CopyTo(const StreamConfig& stream_config, int16_t* interleaved = interleaved_data; if (num_channels_ == 1) { - std::array float_buffer; + std::array float_buffer; if (resampling_required) { output_resamplers_[0]->Resample(data_->channels()[0], buffer_num_frames_, @@ -335,7 +333,7 @@ void AudioBuffer::CopyTo(const StreamConfig& stream_config, if (resampling_required) { for (size_t i = 0; i < num_channels_; ++i) { - std::array float_buffer; + std::array float_buffer; output_resamplers_[i]->Resample(data_->channels()[i], buffer_num_frames_, float_buffer.data(), output_num_frames_); diff --git a/modules/audio_processing/audio_buffer.h b/modules/audio_processing/audio_buffer.h index b9ea3000a2..9369572af8 100644 --- a/modules/audio_processing/audio_buffer.h +++ b/modules/audio_processing/audio_buffer.h @@ -17,8 +17,10 @@ #include #include +#include "api/audio/audio_processing.h" +#include "api/audio/audio_view.h" #include "common_audio/channel_buffer.h" -#include "modules/audio_processing/include/audio_processing.h" +#include "common_audio/include/audio_util.h" namespace webrtc { @@ -32,7 +34,8 @@ enum Band { kBand0To8kHz = 0, kBand8To16kHz = 1, kBand16To24kHz = 2 }; class AudioBuffer { public: static const int kSplitBandSize = 160; - static const int kMaxSampleRate = 384000; + // TODO(tommi): Remove this (`AudioBuffer::kMaxSampleRate`) constant. + static const int kMaxSampleRate = webrtc::kMaxSampleRateHz; AudioBuffer(size_t input_rate, size_t input_num_channels, size_t buffer_rate, @@ -56,6 +59,13 @@ class AudioBuffer { // reset at each call to CopyFrom or InterleaveFrom. void set_num_channels(size_t num_channels); + // Returns a DeinterleavedView<> over the channel data. + DeinterleavedView view() { + return DeinterleavedView( + num_channels_ && buffer_num_frames_ ? channels()[0] : nullptr, + buffer_num_frames_, num_channels_); + } + size_t num_channels() const { return num_channels_; } size_t num_frames() const { return buffer_num_frames_; } size_t num_frames_per_band() const { return num_split_frames_; } diff --git a/modules/audio_processing/audio_buffer_unittest.cc b/modules/audio_processing/audio_buffer_unittest.cc index f3b2ddc689..ef3479e4f5 100644 --- a/modules/audio_processing/audio_buffer_unittest.cc +++ b/modules/audio_processing/audio_buffer_unittest.cc @@ -12,6 +12,7 @@ #include +#include "api/audio/audio_view.h" #include "test/gtest.h" #include "test/testsupport/rtc_expect_death.h" @@ -90,4 +91,28 @@ TEST(AudioBufferTest, CopyWithResampling) { // Verify that energies match. EXPECT_NEAR(energy_ab1, energy_ab2 * 32000.f / 48000.f, .01f * energy_ab1); } + +TEST(AudioBufferTest, DeinterleavedView) { + AudioBuffer ab(48000, 2, 48000, 2, 48000, 2); + // Fill the buffer with data. + const float pi = std::acos(-1.f); + float* const* channels = ab.channels(); + for (size_t ch = 0; ch < ab.num_channels(); ++ch) { + for (size_t i = 0; i < ab.num_frames(); ++i) { + channels[ch][i] = std::sin(2 * pi * 100.f / 32000.f * i); + } + } + + // Verify that the DeinterleavedView correctly maps to channels. + DeinterleavedView view = ab.view(); + ASSERT_EQ(view.num_channels(), ab.num_channels()); + for (size_t c = 0; c < view.num_channels(); ++c) { + MonoView channel = view[c]; + EXPECT_EQ(SamplesPerChannel(channel), ab.num_frames()); + for (size_t s = 0; s < SamplesPerChannel(channel); ++s) { + ASSERT_EQ(channel[s], channels[c][s]); + } + } +} + } // namespace webrtc diff --git a/modules/audio_processing/audio_frame_view_unittest.cc b/modules/audio_processing/audio_frame_view_unittest.cc index fd25bc3b0b..30f1d8e0c3 100644 --- a/modules/audio_processing/audio_frame_view_unittest.cc +++ b/modules/audio_processing/audio_frame_view_unittest.cc @@ -10,7 +10,11 @@ #include "modules/audio_processing/include/audio_frame_view.h" +#include + +#include "common_audio/channel_buffer.h" #include "modules/audio_processing/audio_buffer.h" +#include "rtc_base/arraysize.h" #include "test/gtest.h" namespace webrtc { @@ -19,8 +23,8 @@ TEST(AudioFrameTest, ConstructFromAudioBuffer) { constexpr int kNumChannels = 2; constexpr float kFloatConstant = 1272.f; constexpr float kIntConstant = 17252; - const webrtc::StreamConfig stream_config(kSampleRateHz, kNumChannels); - webrtc::AudioBuffer buffer( + const StreamConfig stream_config(kSampleRateHz, kNumChannels); + AudioBuffer buffer( stream_config.sample_rate_hz(), stream_config.num_channels(), stream_config.sample_rate_hz(), stream_config.num_channels(), stream_config.sample_rate_hz(), stream_config.num_channels()); @@ -48,4 +52,40 @@ TEST(AudioFrameTest, ConstructFromAudioBuffer) { non_const_float_view.channel(0)[0] = kIntConstant; EXPECT_EQ(buffer.channels()[0][0], kIntConstant); } + +TEST(AudioFrameTest, ConstructFromChannelBuffer) { + ChannelBuffer buffer(480, 2); + AudioFrameView view(buffer.channels(), buffer.num_channels(), + buffer.num_frames()); + EXPECT_EQ(view.num_channels(), 2); + EXPECT_EQ(view.samples_per_channel(), 480); +} + +TEST(AudioFrameTest, ToDeinterleavedView) { + ChannelBuffer buffer(480, 2); + AudioFrameView view(buffer.channels(), buffer.num_channels(), + buffer.num_frames()); + + DeinterleavedView non_const_view = view.view(); + DeinterleavedView const_view = + static_cast&>(view).view(); + + ASSERT_EQ(non_const_view.num_channels(), 2u); + ASSERT_EQ(const_view.num_channels(), 2u); + for (size_t i = 0; i < non_const_view.num_channels(); ++i) { + EXPECT_EQ(non_const_view[i].data(), const_view[i].data()); + EXPECT_EQ(non_const_view[i].data(), view.channel(i).data()); + } +} + +TEST(AudioFrameTest, FromDeinterleavedView) { + std::array buffer; + DeinterleavedView view(buffer.data(), buffer.size() / 2u, 2u); + AudioFrameView frame_view(view); + EXPECT_EQ(static_cast(frame_view.num_channels()), + view.num_channels()); + EXPECT_EQ(frame_view[0], view[0]); + EXPECT_EQ(frame_view[1], view[1]); +} + } // namespace webrtc diff --git a/modules/audio_processing/audio_processing_impl.cc b/modules/audio_processing/audio_processing_impl.cc index c80cc76a3d..56a9149551 100644 --- a/modules/audio_processing/audio_processing_impl.cc +++ b/modules/audio_processing/audio_processing_impl.cc @@ -14,29 +14,29 @@ #include #include #include +#include #include #include #include -#include "absl/strings/match.h" +#include "absl/base/nullability.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio/audio_frame.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/task_queue/task_queue_base.h" #include "common_audio/audio_converter.h" #include "common_audio/include/audio_util.h" #include "modules/audio_processing/aec_dump/aec_dump_factory.h" #include "modules/audio_processing/audio_buffer.h" #include "modules/audio_processing/include/audio_frame_view.h" #include "modules/audio_processing/logging/apm_data_dumper.h" -#include "modules/audio_processing/optionally_built_submodule_creators.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/denormal_disabler.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" #define RETURN_ON_ERR(expr) \ @@ -57,15 +57,15 @@ bool SampleRateSupportsMultiBand(int sample_rate_hz) { } // Checks whether the high-pass filter should be done in the full-band. -bool EnforceSplitBandHpf() { - return field_trial::IsEnabled("WebRTC-FullBandHpfKillSwitch"); +bool EnforceSplitBandHpf(const FieldTrialsView& field_trials) { + return field_trials.IsEnabled("WebRTC-FullBandHpfKillSwitch"); } // Checks whether AEC3 should be allowed to decide what the default // configuration should be based on the render and capture channel configuration // at hand. -bool UseSetupSpecificDefaultAec3Congfig() { - return !field_trial::IsEnabled( +bool UseSetupSpecificDefaultAec3Congfig(const FieldTrialsView& field_trials) { + return !field_trials.IsEnabled( "WebRTC-Aec3SetupSpecificDefaultConfigDefaultsKillSwitch"); } @@ -101,8 +101,8 @@ GainControl::Mode Agc1ConfigModeToInterfaceMode( RTC_CHECK_NOTREACHED(); } -bool MinimizeProcessingForUnusedOutput() { - return !field_trial::IsEnabled("WebRTC-MutedStateKillSwitch"); +bool MinimizeProcessingForUnusedOutput(const FieldTrialsView& field_trials) { + return !field_trials.IsEnabled("WebRTC-MutedStateKillSwitch"); } // Maximum lengths that frame of samples being passed from the render side to @@ -322,231 +322,6 @@ constexpr int kUnspecifiedDataDumpInputVolume = -100; // Throughout webrtc, it's assumed that success is represented by zero. static_assert(AudioProcessing::kNoError == 0, "kNoError must be zero"); -absl::optional -AudioProcessingImpl::GetGainController2ExperimentParams() { - constexpr char kFieldTrialName[] = "WebRTC-Audio-GainController2"; - - if (!field_trial::IsEnabled(kFieldTrialName)) { - return absl::nullopt; - } - - FieldTrialFlag enabled("Enabled", false); - - // Whether the gain control should switch to AGC2. Enabled by default. - FieldTrialParameter switch_to_agc2("switch_to_agc2", true); - - // AGC2 input volume controller configuration. - constexpr InputVolumeController::Config kDefaultInputVolumeControllerConfig; - FieldTrialConstrained min_input_volume( - "min_input_volume", kDefaultInputVolumeControllerConfig.min_input_volume, - 0, 255); - FieldTrialConstrained clipped_level_min( - "clipped_level_min", - kDefaultInputVolumeControllerConfig.clipped_level_min, 0, 255); - FieldTrialConstrained clipped_level_step( - "clipped_level_step", - kDefaultInputVolumeControllerConfig.clipped_level_step, 0, 255); - FieldTrialConstrained clipped_ratio_threshold( - "clipped_ratio_threshold", - kDefaultInputVolumeControllerConfig.clipped_ratio_threshold, 0, 1); - FieldTrialConstrained clipped_wait_frames( - "clipped_wait_frames", - kDefaultInputVolumeControllerConfig.clipped_wait_frames, 0, - absl::nullopt); - FieldTrialParameter enable_clipping_predictor( - "enable_clipping_predictor", - kDefaultInputVolumeControllerConfig.enable_clipping_predictor); - FieldTrialConstrained target_range_max_dbfs( - "target_range_max_dbfs", - kDefaultInputVolumeControllerConfig.target_range_max_dbfs, -90, 30); - FieldTrialConstrained target_range_min_dbfs( - "target_range_min_dbfs", - kDefaultInputVolumeControllerConfig.target_range_min_dbfs, -90, 30); - FieldTrialConstrained update_input_volume_wait_frames( - "update_input_volume_wait_frames", - kDefaultInputVolumeControllerConfig.update_input_volume_wait_frames, 0, - absl::nullopt); - FieldTrialConstrained speech_probability_threshold( - "speech_probability_threshold", - kDefaultInputVolumeControllerConfig.speech_probability_threshold, 0, 1); - FieldTrialConstrained speech_ratio_threshold( - "speech_ratio_threshold", - kDefaultInputVolumeControllerConfig.speech_ratio_threshold, 0, 1); - - // AGC2 adaptive digital controller configuration. - constexpr AudioProcessing::Config::GainController2::AdaptiveDigital - kDefaultAdaptiveDigitalConfig; - FieldTrialConstrained headroom_db( - "headroom_db", kDefaultAdaptiveDigitalConfig.headroom_db, 0, - absl::nullopt); - FieldTrialConstrained max_gain_db( - "max_gain_db", kDefaultAdaptiveDigitalConfig.max_gain_db, 0, - absl::nullopt); - FieldTrialConstrained initial_gain_db( - "initial_gain_db", kDefaultAdaptiveDigitalConfig.initial_gain_db, 0, - absl::nullopt); - FieldTrialConstrained max_gain_change_db_per_second( - "max_gain_change_db_per_second", - kDefaultAdaptiveDigitalConfig.max_gain_change_db_per_second, 0, - absl::nullopt); - FieldTrialConstrained max_output_noise_level_dbfs( - "max_output_noise_level_dbfs", - kDefaultAdaptiveDigitalConfig.max_output_noise_level_dbfs, absl::nullopt, - 0); - - // Transient suppressor. - FieldTrialParameter disallow_transient_suppressor_usage( - "disallow_transient_suppressor_usage", false); - - // Field-trial based override for the input volume controller and adaptive - // digital configs. - ParseFieldTrial( - {&enabled, &switch_to_agc2, &min_input_volume, &clipped_level_min, - &clipped_level_step, &clipped_ratio_threshold, &clipped_wait_frames, - &enable_clipping_predictor, &target_range_max_dbfs, - &target_range_min_dbfs, &update_input_volume_wait_frames, - &speech_probability_threshold, &speech_ratio_threshold, &headroom_db, - &max_gain_db, &initial_gain_db, &max_gain_change_db_per_second, - &max_output_noise_level_dbfs, &disallow_transient_suppressor_usage}, - field_trial::FindFullName(kFieldTrialName)); - // Checked already by `IsEnabled()` before parsing, therefore always true. - RTC_DCHECK(enabled); - - const bool do_not_change_agc_config = !switch_to_agc2.Get(); - if (do_not_change_agc_config && !disallow_transient_suppressor_usage.Get()) { - // Return an unspecifed value since, in this case, both the AGC2 and TS - // configurations won't be adjusted. - return absl::nullopt; - } - using Params = AudioProcessingImpl::GainController2ExperimentParams; - if (do_not_change_agc_config) { - // Return a value that leaves the AGC2 config unchanged and that always - // disables TS. - return Params{.agc2_config = absl::nullopt, - .disallow_transient_suppressor_usage = true}; - } - // Return a value that switches all the gain control to AGC2. - return Params{ - .agc2_config = - Params::Agc2Config{ - .input_volume_controller = - { - .min_input_volume = min_input_volume.Get(), - .clipped_level_min = clipped_level_min.Get(), - .clipped_level_step = clipped_level_step.Get(), - .clipped_ratio_threshold = - static_cast(clipped_ratio_threshold.Get()), - .clipped_wait_frames = clipped_wait_frames.Get(), - .enable_clipping_predictor = - enable_clipping_predictor.Get(), - .target_range_max_dbfs = target_range_max_dbfs.Get(), - .target_range_min_dbfs = target_range_min_dbfs.Get(), - .update_input_volume_wait_frames = - update_input_volume_wait_frames.Get(), - .speech_probability_threshold = static_cast( - speech_probability_threshold.Get()), - .speech_ratio_threshold = - static_cast(speech_ratio_threshold.Get()), - }, - .adaptive_digital_controller = - { - .headroom_db = static_cast(headroom_db.Get()), - .max_gain_db = static_cast(max_gain_db.Get()), - .initial_gain_db = - static_cast(initial_gain_db.Get()), - .max_gain_change_db_per_second = static_cast( - max_gain_change_db_per_second.Get()), - .max_output_noise_level_dbfs = - static_cast(max_output_noise_level_dbfs.Get()), - }}, - .disallow_transient_suppressor_usage = - disallow_transient_suppressor_usage.Get()}; -} - -AudioProcessing::Config AudioProcessingImpl::AdjustConfig( - const AudioProcessing::Config& config, - const absl::optional& - experiment_params) { - if (!experiment_params.has_value() || - (!experiment_params->agc2_config.has_value() && - !experiment_params->disallow_transient_suppressor_usage)) { - // When the experiment parameters are unspecified or when the AGC and TS - // configuration are not overridden, return the unmodified configuration. - return config; - } - - AudioProcessing::Config adjusted_config = config; - - // Override the transient suppressor configuration. - if (experiment_params->disallow_transient_suppressor_usage) { - adjusted_config.transient_suppression.enabled = false; - } - - // Override the auto gain control configuration if the AGC1 analog gain - // controller is active and `experiment_params->agc2_config` is specified. - const bool agc1_analog_enabled = - config.gain_controller1.enabled && - (config.gain_controller1.mode == - AudioProcessing::Config::GainController1::kAdaptiveAnalog || - config.gain_controller1.analog_gain_controller.enabled); - if (agc1_analog_enabled && experiment_params->agc2_config.has_value()) { - // Check that the unadjusted AGC config meets the preconditions. - const bool hybrid_agc_config_detected = - config.gain_controller1.enabled && - config.gain_controller1.analog_gain_controller.enabled && - !config.gain_controller1.analog_gain_controller - .enable_digital_adaptive && - config.gain_controller2.enabled && - config.gain_controller2.adaptive_digital.enabled; - const bool full_agc1_config_detected = - config.gain_controller1.enabled && - config.gain_controller1.analog_gain_controller.enabled && - config.gain_controller1.analog_gain_controller - .enable_digital_adaptive && - !config.gain_controller2.enabled; - const bool one_and_only_one_input_volume_controller = - hybrid_agc_config_detected != full_agc1_config_detected; - const bool agc2_input_volume_controller_enabled = - config.gain_controller2.enabled && - config.gain_controller2.input_volume_controller.enabled; - if (!one_and_only_one_input_volume_controller || - agc2_input_volume_controller_enabled) { - RTC_LOG(LS_ERROR) << "Cannot adjust AGC config (precondition failed)"; - if (!one_and_only_one_input_volume_controller) - RTC_LOG(LS_ERROR) - << "One and only one input volume controller must be enabled."; - if (agc2_input_volume_controller_enabled) - RTC_LOG(LS_ERROR) - << "The AGC2 input volume controller must be disabled."; - } else { - adjusted_config.gain_controller1.enabled = false; - adjusted_config.gain_controller1.analog_gain_controller.enabled = false; - - adjusted_config.gain_controller2.enabled = true; - adjusted_config.gain_controller2.input_volume_controller.enabled = true; - adjusted_config.gain_controller2.adaptive_digital = - experiment_params->agc2_config->adaptive_digital_controller; - adjusted_config.gain_controller2.adaptive_digital.enabled = true; - } - } - - return adjusted_config; -} - -bool AudioProcessingImpl::UseApmVadSubModule( - const AudioProcessing::Config& config, - const absl::optional& experiment_params) { - // The VAD as an APM sub-module is needed only in one case, that is when TS - // and AGC2 are both enabled and when the AGC2 experiment is running and its - // parameters require to fully switch the gain control to AGC2. - return config.transient_suppression.enabled && - config.gain_controller2.enabled && - (config.gain_controller2.input_volume_controller.enabled || - config.gain_controller2.adaptive_digital.enabled) && - experiment_params.has_value() && - experiment_params->agc2_config.has_value(); -} - AudioProcessingImpl::SubmoduleStates::SubmoduleStates( bool capture_post_processor_enabled, bool render_pre_processor_enabled, @@ -561,10 +336,8 @@ bool AudioProcessingImpl::SubmoduleStates::Update( bool noise_suppressor_enabled, bool adaptive_gain_controller_enabled, bool gain_controller2_enabled, - bool voice_activity_detector_enabled, bool gain_adjustment_enabled, - bool echo_controller_enabled, - bool transient_suppressor_enabled) { + bool echo_controller_enabled) { bool changed = false; changed |= (high_pass_filter_enabled != high_pass_filter_enabled_); changed |= @@ -573,21 +346,16 @@ bool AudioProcessingImpl::SubmoduleStates::Update( changed |= (adaptive_gain_controller_enabled != adaptive_gain_controller_enabled_); changed |= (gain_controller2_enabled != gain_controller2_enabled_); - changed |= - (voice_activity_detector_enabled != voice_activity_detector_enabled_); changed |= (gain_adjustment_enabled != gain_adjustment_enabled_); changed |= (echo_controller_enabled != echo_controller_enabled_); - changed |= (transient_suppressor_enabled != transient_suppressor_enabled_); if (changed) { high_pass_filter_enabled_ = high_pass_filter_enabled; mobile_echo_controller_enabled_ = mobile_echo_controller_enabled; noise_suppressor_enabled_ = noise_suppressor_enabled; adaptive_gain_controller_enabled_ = adaptive_gain_controller_enabled; gain_controller2_enabled_ = gain_controller2_enabled; - voice_activity_detector_enabled_ = voice_activity_detector_enabled; gain_adjustment_enabled_ = gain_adjustment_enabled; echo_controller_enabled_ = echo_controller_enabled; - transient_suppressor_enabled_ = transient_suppressor_enabled; } changed |= first_update_; @@ -644,8 +412,9 @@ bool AudioProcessingImpl::SubmoduleStates::HighPassFilteringRequired() const { noise_suppressor_enabled_; } -AudioProcessingImpl::AudioProcessingImpl() - : AudioProcessingImpl(/*config=*/{}, +AudioProcessingImpl::AudioProcessingImpl(const Environment& env) + : AudioProcessingImpl(env, + /*config=*/{}, /*capture_post_processor=*/nullptr, /*render_pre_processor=*/nullptr, /*echo_control_factory=*/nullptr, @@ -655,23 +424,23 @@ AudioProcessingImpl::AudioProcessingImpl() std::atomic AudioProcessingImpl::instance_count_(0); AudioProcessingImpl::AudioProcessingImpl( + const Environment& env, const AudioProcessing::Config& config, std::unique_ptr capture_post_processor, std::unique_ptr render_pre_processor, std::unique_ptr echo_control_factory, - rtc::scoped_refptr echo_detector, + scoped_refptr echo_detector, std::unique_ptr capture_analyzer) - : data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), + : env_(env), + data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)), use_setup_specific_default_aec3_config_( - UseSetupSpecificDefaultAec3Congfig()), - gain_controller2_experiment_params_(GetGainController2ExperimentParams()), - transient_suppressor_vad_mode_(TransientSuppressor::VadMode::kDefault), + UseSetupSpecificDefaultAec3Congfig(env.field_trials())), capture_runtime_settings_(RuntimeSettingQueueSize()), render_runtime_settings_(RuntimeSettingQueueSize()), capture_runtime_settings_enqueuer_(&capture_runtime_settings_), render_runtime_settings_enqueuer_(&render_runtime_settings_), echo_control_factory_(std::move(echo_control_factory)), - config_(AdjustConfig(config, gain_controller2_experiment_params_)), + config_(config), submodule_states_(!!capture_post_processor, !!render_pre_processor, !!capture_analyzer), @@ -679,13 +448,12 @@ AudioProcessingImpl::AudioProcessingImpl( std::move(render_pre_processor), std::move(echo_detector), std::move(capture_analyzer)), - constants_(!field_trial::IsEnabled( + constants_(!env.field_trials().IsEnabled( "WebRTC-ApmExperimentalMultiChannelRenderKillSwitch"), - !field_trial::IsEnabled( + !env.field_trials().IsEnabled( "WebRTC-ApmExperimentalMultiChannelCaptureKillSwitch"), - EnforceSplitBandHpf(), - MinimizeProcessingForUnusedOutput(), - field_trial::IsEnabled("WebRTC-TransientSuppressorForcedOff")), + EnforceSplitBandHpf(env.field_trials()), + MinimizeProcessingForUnusedOutput(env.field_trials())), capture_(), capture_nonlocked_(), applied_input_volume_stats_reporter_( @@ -806,12 +574,10 @@ void AudioProcessingImpl::InitializeLocked() { AllocateRenderQueue(); InitializeGainController1(); - InitializeTransientSuppressor(); InitializeHighPassFilter(true); InitializeResidualEchoDetector(); InitializeEchoController(); InitializeGainController2(); - InitializeVoiceActivityDetector(); InitializeNoiseSuppressor(); InitializeAnalyzer(); InitializePostProcessor(); @@ -819,7 +585,7 @@ void AudioProcessingImpl::InitializeLocked() { InitializeCaptureLevelsAdjuster(); if (aec_dump_) { - aec_dump_->WriteInitMessage(formats_.api_format, rtc::TimeUTCMillis()); + aec_dump_->WriteInitMessage(formats_.api_format, TimeUTCMillis()); } } @@ -906,52 +672,41 @@ void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { MutexLock lock_render(&mutex_render_); MutexLock lock_capture(&mutex_capture_); - const auto adjusted_config = - AdjustConfig(config, gain_controller2_experiment_params_); - RTC_LOG(LS_INFO) << "AudioProcessing::ApplyConfig: " - << adjusted_config.ToString(); + RTC_LOG(LS_INFO) << "AudioProcessing::ApplyConfig: " << config.ToString(); const bool pipeline_config_changed = config_.pipeline.multi_channel_render != - adjusted_config.pipeline.multi_channel_render || + config.pipeline.multi_channel_render || config_.pipeline.multi_channel_capture != - adjusted_config.pipeline.multi_channel_capture || + config.pipeline.multi_channel_capture || config_.pipeline.maximum_internal_processing_rate != - adjusted_config.pipeline.maximum_internal_processing_rate || + config.pipeline.maximum_internal_processing_rate || config_.pipeline.capture_downmix_method != - adjusted_config.pipeline.capture_downmix_method; + config.pipeline.capture_downmix_method; const bool aec_config_changed = - config_.echo_canceller.enabled != - adjusted_config.echo_canceller.enabled || - config_.echo_canceller.mobile_mode != - adjusted_config.echo_canceller.mobile_mode; + config_.echo_canceller.enabled != config.echo_canceller.enabled || + config_.echo_canceller.mobile_mode != config.echo_canceller.mobile_mode; const bool agc1_config_changed = - config_.gain_controller1 != adjusted_config.gain_controller1; + config_.gain_controller1 != config.gain_controller1; const bool agc2_config_changed = - config_.gain_controller2 != adjusted_config.gain_controller2; + config_.gain_controller2 != config.gain_controller2; const bool ns_config_changed = - config_.noise_suppression.enabled != - adjusted_config.noise_suppression.enabled || - config_.noise_suppression.level != - adjusted_config.noise_suppression.level; - - const bool ts_config_changed = config_.transient_suppression.enabled != - adjusted_config.transient_suppression.enabled; + config_.noise_suppression.enabled != config.noise_suppression.enabled || + config_.noise_suppression.level != config.noise_suppression.level; const bool pre_amplifier_config_changed = - config_.pre_amplifier.enabled != adjusted_config.pre_amplifier.enabled || + config_.pre_amplifier.enabled != config.pre_amplifier.enabled || config_.pre_amplifier.fixed_gain_factor != - adjusted_config.pre_amplifier.fixed_gain_factor; + config.pre_amplifier.fixed_gain_factor; const bool gain_adjustment_config_changed = - config_.capture_level_adjustment != - adjusted_config.capture_level_adjustment; + config_.capture_level_adjustment != config.capture_level_adjustment; - config_ = adjusted_config; + config_ = config; if (aec_config_changed) { InitializeEchoController(); @@ -961,10 +716,6 @@ void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { InitializeNoiseSuppressor(); } - if (ts_config_changed) { - InitializeTransientSuppressor(); - } - InitializeHighPassFilter(false); if (agc1_config_changed) { @@ -978,11 +729,8 @@ void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { config_.gain_controller2 = AudioProcessing::Config::GainController2(); } - if (agc2_config_changed || ts_config_changed) { - // AGC2 also depends on TS because of the possible dependency on the APM VAD - // sub-module. + if (agc2_config_changed) { InitializeGainController2(); - InitializeVoiceActivityDetector(); } if (pre_amplifier_config_changed || gain_adjustment_config_changed) { @@ -996,12 +744,6 @@ void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { } } -void AudioProcessingImpl::OverrideSubmoduleCreationForTesting( - const ApmSubmoduleCreationOverrides& overrides) { - MutexLock lock(&mutex_capture_); - submodule_creation_overrides_ = overrides; -} - int AudioProcessingImpl::proc_sample_rate_hz() const { // Used as callback from submodules, hence locking is not allowed. return capture_nonlocked_.capture_processing_format.sample_rate_hz(); @@ -1530,7 +1272,7 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { *capture_buffer); } - capture_input_rms_.Analyze(rtc::ArrayView( + capture_input_rms_.Analyze(ArrayView( capture_buffer->channels_const()[0], capture_nonlocked_.capture_processing_format.num_frames())); const bool log_rms = ++capture_rms_interval_counter_ >= 1000; @@ -1660,7 +1402,7 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { if (submodules_.agc_manager) { submodules_.agc_manager->Process(*capture_buffer); - absl::optional new_digital_gain = + std::optional new_digital_gain = submodules_.agc_manager->GetDigitalComressionGain(); if (new_digital_gain && submodules_.gain_control) { submodules_.gain_control->set_compression_gain_db(*new_digital_gain); @@ -1692,47 +1434,8 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { } if (submodules_.echo_detector) { - submodules_.echo_detector->AnalyzeCaptureAudio( - rtc::ArrayView(capture_buffer->channels()[0], - capture_buffer->num_frames())); - } - - absl::optional voice_probability; - if (!!submodules_.voice_activity_detector) { - voice_probability = submodules_.voice_activity_detector->Analyze( - AudioFrameView(capture_buffer->channels(), - capture_buffer->num_channels(), - capture_buffer->num_frames())); - } - - if (submodules_.transient_suppressor) { - float transient_suppressor_voice_probability = 1.0f; - switch (transient_suppressor_vad_mode_) { - case TransientSuppressor::VadMode::kDefault: - if (submodules_.agc_manager) { - transient_suppressor_voice_probability = - submodules_.agc_manager->voice_probability(); - } - break; - case TransientSuppressor::VadMode::kRnnVad: - RTC_DCHECK(voice_probability.has_value()); - transient_suppressor_voice_probability = *voice_probability; - break; - case TransientSuppressor::VadMode::kNoVad: - // The transient suppressor will ignore `voice_probability`. - break; - } - float delayed_voice_probability = - submodules_.transient_suppressor->Suppress( - capture_buffer->channels()[0], capture_buffer->num_frames(), - capture_buffer->num_channels(), - capture_buffer->split_bands_const(0)[kBand0To8kHz], - capture_buffer->num_frames_per_band(), - /*reference_data=*/nullptr, /*reference_length=*/0, - transient_suppressor_voice_probability, capture_.key_pressed); - if (voice_probability.has_value()) { - *voice_probability = delayed_voice_probability; - } + submodules_.echo_detector->AnalyzeCaptureAudio(ArrayView( + capture_buffer->channels()[0], capture_buffer->num_frames())); } // Experimental APM sub-module that analyzes `capture_buffer`. @@ -1744,15 +1447,15 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { // TODO(bugs.webrtc.org/7494): Let AGC2 detect applied input volume // changes. submodules_.gain_controller2->Process( - voice_probability, capture_.applied_input_volume_changed, - capture_buffer); + /*speech_probability=*/std::nullopt, + capture_.applied_input_volume_changed, capture_buffer); } if (submodules_.capture_post_processor) { submodules_.capture_post_processor->Process(capture_buffer); } - capture_output_rms_.Analyze(rtc::ArrayView( + capture_output_rms_.Analyze(ArrayView( capture_buffer->channels_const()[0], capture_nonlocked_.capture_processing_format.num_frames())); if (log_rms) { @@ -1812,8 +1515,8 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { if (!capture_.capture_output_used_last_frame && capture_.capture_output_used) { for (size_t ch = 0; ch < capture_buffer->num_channels(); ++ch) { - rtc::ArrayView channel_view(capture_buffer->channels()[ch], - capture_buffer->num_frames()); + ArrayView channel_view(capture_buffer->channels()[ch], + capture_buffer->num_frames()); std::fill(channel_view.begin(), channel_view.end(), 0.f); } } @@ -1877,8 +1580,8 @@ int AudioProcessingImpl::ProcessReverseStream(const float* const* src, int AudioProcessingImpl::AnalyzeReverseStreamLocked( const float* const* src, - const StreamConfig& input_config, - const StreamConfig& output_config) { + const StreamConfig& /* input_config */, + const StreamConfig& /* output_config */) { if (aec_dump_) { const size_t channel_size = formats_.api_format.reverse_input_stream().num_frames(); @@ -1976,7 +1679,7 @@ int AudioProcessingImpl::set_stream_delay_ms(int delay) { } bool AudioProcessingImpl::GetLinearAecOutput( - rtc::ArrayView> linear_output) const { + ArrayView> linear_output) const { MutexLock lock(&mutex_capture_); AudioBuffer* linear_aec_buffer = capture_.linear_aec_output.get(); @@ -1987,9 +1690,9 @@ bool AudioProcessingImpl::GetLinearAecOutput( for (size_t ch = 0; ch < linear_aec_buffer->num_channels(); ++ch) { RTC_DCHECK_EQ(linear_output[ch].size(), linear_aec_buffer->num_frames()); - rtc::ArrayView channel_view = - rtc::ArrayView(linear_aec_buffer->channels_const()[ch], - linear_aec_buffer->num_frames()); + ArrayView channel_view = + ArrayView(linear_aec_buffer->channels_const()[ch], + linear_aec_buffer->num_frames()); FloatS16ToFloat(channel_view.data(), channel_view.size(), linear_output[ch].data()); } @@ -2023,7 +1726,7 @@ void AudioProcessingImpl::set_stream_analog_level_locked(int level) { // Invalidate any previously recommended input volume which will be updated by // `ProcessStream()`. - capture_.recommended_input_volume = absl::nullopt; + capture_.recommended_input_volume = std::nullopt; if (submodules_.agc_manager) { submodules_.agc_manager->set_stream_analog_level(level); @@ -2056,7 +1759,7 @@ void AudioProcessingImpl::UpdateRecommendedInputVolumeLocked() { if (!capture_.applied_input_volume.has_value()) { // When `set_stream_analog_level()` is not called, no input level can be // recommended. - capture_.recommended_input_volume = absl::nullopt; + capture_.recommended_input_volume = std::nullopt; return; } @@ -2084,7 +1787,8 @@ void AudioProcessingImpl::UpdateRecommendedInputVolumeLocked() { bool AudioProcessingImpl::CreateAndAttachAecDump(absl::string_view file_name, int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue) { + TaskQueueBase* absl_nonnull + worker_queue) { std::unique_ptr aec_dump = AecDumpFactory::Create(file_name, max_log_size_bytes, worker_queue); if (!aec_dump) { @@ -2097,7 +1801,8 @@ bool AudioProcessingImpl::CreateAndAttachAecDump(absl::string_view file_name, bool AudioProcessingImpl::CreateAndAttachAecDump(FILE* handle, int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue) { + TaskQueueBase* absl_nonnull + worker_queue) { std::unique_ptr aec_dump = AecDumpFactory::Create(handle, max_log_size_bytes, worker_queue); if (!aec_dump) { @@ -2117,7 +1822,7 @@ void AudioProcessingImpl::AttachAecDump(std::unique_ptr aec_dump) { // 'aec_dump' parameter, which is after locks are released. aec_dump_.swap(aec_dump); WriteAecDumpConfigMessage(true); - aec_dump_->WriteInitMessage(formats_.api_format, rtc::TimeUTCMillis()); + aec_dump_->WriteInitMessage(formats_.api_format, TimeUTCMillis()); } void AudioProcessingImpl::DetachAecDump() { @@ -2142,43 +1847,9 @@ bool AudioProcessingImpl::UpdateActiveSubmoduleStates() { return submodule_states_.Update( config_.high_pass_filter.enabled, !!submodules_.echo_control_mobile, !!submodules_.noise_suppressor, !!submodules_.gain_control, - !!submodules_.gain_controller2, !!submodules_.voice_activity_detector, + !!submodules_.gain_controller2, config_.pre_amplifier.enabled || config_.capture_level_adjustment.enabled, - capture_nonlocked_.echo_controller_enabled, - !!submodules_.transient_suppressor); -} - -void AudioProcessingImpl::InitializeTransientSuppressor() { - // Choose the VAD mode for TS and detect a VAD mode change. - const TransientSuppressor::VadMode previous_vad_mode = - transient_suppressor_vad_mode_; - transient_suppressor_vad_mode_ = TransientSuppressor::VadMode::kDefault; - if (UseApmVadSubModule(config_, gain_controller2_experiment_params_)) { - transient_suppressor_vad_mode_ = TransientSuppressor::VadMode::kRnnVad; - } - const bool vad_mode_changed = - previous_vad_mode != transient_suppressor_vad_mode_; - - if (config_.transient_suppression.enabled && - !constants_.transient_suppressor_forced_off) { - // Attempt to create a transient suppressor, if one is not already created. - if (!submodules_.transient_suppressor || vad_mode_changed) { - submodules_.transient_suppressor = CreateTransientSuppressor( - submodule_creation_overrides_, transient_suppressor_vad_mode_, - proc_fullband_sample_rate_hz(), capture_nonlocked_.split_rate, - num_proc_channels()); - if (!submodules_.transient_suppressor) { - RTC_LOG(LS_WARNING) - << "No transient suppressor created (probably disabled)"; - } - } else { - submodules_.transient_suppressor->Initialize( - proc_fullband_sample_rate_hz(), capture_nonlocked_.split_rate, - num_proc_channels()); - } - } else { - submodules_.transient_suppressor.reset(); - } + capture_nonlocked_.echo_controller_enabled); } void AudioProcessingImpl::InitializeHighPassFilter(bool forced_reset) { @@ -2216,16 +1887,18 @@ void AudioProcessingImpl::InitializeEchoController() { // Create and activate the echo controller. if (echo_control_factory_) { submodules_.echo_controller = echo_control_factory_->Create( - proc_sample_rate_hz(), num_reverse_channels(), num_proc_channels()); + env_, proc_sample_rate_hz(), num_reverse_channels(), + num_proc_channels()); RTC_DCHECK(submodules_.echo_controller); } else { EchoCanceller3Config config; - absl::optional multichannel_config; + std::optional multichannel_config; if (use_setup_specific_default_aec3_config_) { - multichannel_config = EchoCanceller3::CreateDefaultMultichannelConfig(); + multichannel_config = + EchoCanceller3Config::CreateDefaultMultichannelConfig(); } submodules_.echo_controller = std::make_unique( - config, multichannel_config, proc_sample_rate_hz(), + env_, config, multichannel_config, proc_sample_rate_hz(), num_reverse_channels(), num_proc_channels()); } @@ -2344,8 +2017,9 @@ void AudioProcessingImpl::InitializeGainController1() { if (re_creation) { stream_analog_level = submodules_.agc_manager->recommended_analog_level(); } - submodules_.agc_manager.reset(new AgcManagerDirect( - num_proc_channels(), config_.gain_controller1.analog_gain_controller)); + submodules_.agc_manager = std::make_unique( + env_, num_proc_channels(), + config_.gain_controller1.analog_gain_controller); if (re_creation) { submodules_.agc_manager->set_stream_analog_level(stream_analog_level); } @@ -2361,46 +2035,19 @@ void AudioProcessingImpl::InitializeGainController2() { submodules_.gain_controller2.reset(); return; } - // Override the input volume controller configuration if the AGC2 experiment - // is running and its parameters require to fully switch the gain control to + // Input volume controller configuration if the AGC2 is running + // and its parameters require to fully switch the gain control to // AGC2. - const bool input_volume_controller_config_overridden = - gain_controller2_experiment_params_.has_value() && - gain_controller2_experiment_params_->agc2_config.has_value(); const InputVolumeController::Config input_volume_controller_config = - input_volume_controller_config_overridden - ? gain_controller2_experiment_params_->agc2_config - ->input_volume_controller - : InputVolumeController::Config{}; - // If the APM VAD sub-module is not used, let AGC2 use its internal VAD. - const bool use_internal_vad = - !UseApmVadSubModule(config_, gain_controller2_experiment_params_); + InputVolumeController::Config{}; submodules_.gain_controller2 = std::make_unique( - config_.gain_controller2, input_volume_controller_config, - proc_fullband_sample_rate_hz(), num_proc_channels(), use_internal_vad); + env_, config_.gain_controller2, input_volume_controller_config, + proc_fullband_sample_rate_hz(), num_output_channels(), + /*use_internal_vad=*/true); submodules_.gain_controller2->SetCaptureOutputUsed( capture_.capture_output_used); } -void AudioProcessingImpl::InitializeVoiceActivityDetector() { - if (!UseApmVadSubModule(config_, gain_controller2_experiment_params_)) { - submodules_.voice_activity_detector.reset(); - return; - } - - if (!submodules_.voice_activity_detector) { - RTC_DCHECK(!!submodules_.gain_controller2); - // TODO(bugs.webrtc.org/13663): Cache CPU features in APM and use here. - submodules_.voice_activity_detector = - std::make_unique( - submodules_.gain_controller2->GetCpuFeatures(), - proc_fullband_sample_rate_hz()); - } else { - submodules_.voice_activity_detector->Initialize( - proc_fullband_sample_rate_hz()); - } -} - void AudioProcessingImpl::InitializeNoiseSuppressor() { submodules_.noise_suppressor.reset(); @@ -2535,8 +2182,6 @@ void AudioProcessingImpl::WriteAecDumpConfigMessage(bool forced) { apm_config.ns_enabled = config_.noise_suppression.enabled; apm_config.ns_level = static_cast(config_.noise_suppression.level); - apm_config.transient_suppression_enabled = - config_.transient_suppression.enabled; apm_config.experiments_description = experiments_description; apm_config.pre_amplifier_enabled = config_.pre_amplifier.enabled; apm_config.pre_amplifier_fixed_gain_factor = diff --git a/modules/audio_processing/audio_processing_impl.h b/modules/audio_processing/audio_processing_impl.h index fe80e0d912..c148d9c8a8 100644 --- a/modules/audio_processing/audio_processing_impl.h +++ b/modules/audio_processing/audio_processing_impl.h @@ -16,13 +16,18 @@ #include #include #include +#include #include #include +#include "absl/base/nullability.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/audio/audio_processing.h" +#include "api/audio/audio_processing_statistics.h" +#include "api/environment/environment.h" #include "api/function_view.h" +#include "api/task_queue/task_queue_base.h" #include "modules/audio_processing/aec3/echo_canceller3.h" #include "modules/audio_processing/agc/agc_manager_direct.h" #include "modules/audio_processing/agc/gain_control.h" @@ -35,15 +40,10 @@ #include "modules/audio_processing/high_pass_filter.h" #include "modules/audio_processing/include/aec_dump.h" #include "modules/audio_processing/include/audio_frame_proxies.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "modules/audio_processing/include/audio_processing_statistics.h" #include "modules/audio_processing/ns/noise_suppressor.h" -#include "modules/audio_processing/optionally_built_submodule_creators.h" #include "modules/audio_processing/render_queue_item_verifier.h" #include "modules/audio_processing/rms_level.h" -#include "modules/audio_processing/transient/transient_suppressor.h" #include "rtc_base/gtest_prod_util.h" -#include "rtc_base/ignore_wundef.h" #include "rtc_base/swap_queue.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -61,12 +61,13 @@ class AudioProcessingImpl : public AudioProcessing { public: // Methods forcing APM to run in a single-threaded manner. // Acquires both the render and capture locks. - AudioProcessingImpl(); - AudioProcessingImpl(const AudioProcessing::Config& config, + explicit AudioProcessingImpl(const Environment& env); + AudioProcessingImpl(const Environment& env, + const AudioProcessing::Config& config, std::unique_ptr capture_post_processor, std::unique_ptr render_pre_processor, std::unique_ptr echo_control_factory, - rtc::scoped_refptr echo_detector, + scoped_refptr echo_detector, std::unique_ptr capture_analyzer); ~AudioProcessingImpl() override; int Initialize() override; @@ -74,10 +75,12 @@ class AudioProcessingImpl : public AudioProcessing { void ApplyConfig(const AudioProcessing::Config& config) override; bool CreateAndAttachAecDump(absl::string_view file_name, int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue) override; + TaskQueueBase* absl_nonnull + worker_queue) override; bool CreateAndAttachAecDump(FILE* handle, int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue) override; + TaskQueueBase* absl_nonnull + worker_queue) override; // TODO(webrtc:5298) Deprecated variant. void AttachAecDump(std::unique_ptr aec_dump) override; void DetachAecDump() override; @@ -95,7 +98,7 @@ class AudioProcessingImpl : public AudioProcessing { const StreamConfig& output_config, float* const* dest) override; bool GetLinearAecOutput( - rtc::ArrayView> linear_output) const override; + ArrayView> linear_output) const override; void set_output_will_be_muted(bool muted) override; void HandleCaptureOutputUsedSetting(bool capture_output_used) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); @@ -129,7 +132,7 @@ class AudioProcessingImpl : public AudioProcessing { size_t num_reverse_channels() const override; int stream_delay_ms() const override; - AudioProcessingStats GetStatistics(bool has_remote_tracks) override { + AudioProcessingStats GetStatistics(bool /* has_remote_tracks */) override { return GetStatistics(); } AudioProcessingStats GetStatistics() override { @@ -154,24 +157,12 @@ class AudioProcessingImpl : public AudioProcessing { FRIEND_TEST_ALL_PREFIXES(ApmConfiguration, DefaultBehavior); FRIEND_TEST_ALL_PREFIXES(ApmConfiguration, ValidConfigBehavior); FRIEND_TEST_ALL_PREFIXES(ApmConfiguration, InValidConfigBehavior); - FRIEND_TEST_ALL_PREFIXES(ApmWithSubmodulesExcludedTest, - ToggleTransientSuppressor); - FRIEND_TEST_ALL_PREFIXES(ApmWithSubmodulesExcludedTest, - ReinitializeTransientSuppressor); - FRIEND_TEST_ALL_PREFIXES(ApmWithSubmodulesExcludedTest, - BitexactWithDisabledModules); - FRIEND_TEST_ALL_PREFIXES( - AudioProcessingImplGainController2FieldTrialParametrizedTest, - ConfigAdjustedWhenExperimentEnabled); void set_stream_analog_level_locked(int level) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void UpdateRecommendedInputVolumeLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); - void OverrideSubmoduleCreationForTesting( - const ApmSubmoduleCreationOverrides& overrides); - // Class providing thread-safe message pipe functionality for // `runtime_settings_`. class RuntimeSettingEnqueuer { @@ -187,53 +178,11 @@ class AudioProcessingImpl : public AudioProcessing { SwapQueue& runtime_settings_; }; + const Environment env_; const std::unique_ptr data_dumper_; static std::atomic instance_count_; const bool use_setup_specific_default_aec3_config_; - // Parameters for the "GainController2" experiment which determines whether - // the following APM sub-modules are created and, if so, their configurations: - // AGC2 (`gain_controller2`), AGC1 (`gain_control`, `agc_manager`) and TS - // (`transient_suppressor`). - // TODO(bugs.webrtc.org/7494): Remove when the "WebRTC-Audio-GainController2" - // field trial is removed. - struct GainController2ExperimentParams { - struct Agc2Config { - InputVolumeController::Config input_volume_controller; - AudioProcessing::Config::GainController2::AdaptiveDigital - adaptive_digital_controller; - }; - // When `agc2_config` is specified, all gain control switches to AGC2 and - // the configuration is overridden. - absl::optional agc2_config; - // When true, the transient suppressor submodule is never created regardless - // of the APM configuration. - bool disallow_transient_suppressor_usage; - }; - // Specified when the "WebRTC-Audio-GainController2" field trial is specified. - // TODO(bugs.webrtc.org/7494): Remove when the "WebRTC-Audio-GainController2" - // field trial is removed. - const absl::optional - gain_controller2_experiment_params_; - - // Parses the "WebRTC-Audio-GainController2" field trial. If disabled, returns - // an unspecified value. - static absl::optional - GetGainController2ExperimentParams(); - - // When `experiment_params` is specified, returns an APM configuration - // modified according to the experiment parameters. Otherwise returns - // `config`. - static AudioProcessing::Config AdjustConfig( - const AudioProcessing::Config& config, - const absl::optional& experiment_params); - // Returns true if the APM VAD sub-module should be used. - static bool UseApmVadSubModule( - const AudioProcessing::Config& config, - const absl::optional& experiment_params); - - TransientSuppressor::VadMode transient_suppressor_vad_mode_; - SwapQueue capture_runtime_settings_; SwapQueue render_runtime_settings_; @@ -254,10 +203,8 @@ class AudioProcessingImpl : public AudioProcessing { bool noise_suppressor_enabled, bool adaptive_gain_controller_enabled, bool gain_controller2_enabled, - bool voice_activity_detector_enabled, bool gain_adjustment_enabled, - bool echo_controller_enabled, - bool transient_suppressor_enabled); + bool echo_controller_enabled); bool CaptureMultiBandSubModulesActive() const; bool CaptureMultiBandProcessingPresent() const; bool CaptureMultiBandProcessingActive(bool ec_processing_active) const; @@ -276,11 +223,9 @@ class AudioProcessingImpl : public AudioProcessing { bool mobile_echo_controller_enabled_ = false; bool noise_suppressor_enabled_ = false; bool adaptive_gain_controller_enabled_ = false; - bool voice_activity_detector_enabled_ = false; bool gain_controller2_enabled_ = false; bool gain_adjustment_enabled_ = false; bool echo_controller_enabled_ = false; - bool transient_suppressor_enabled_ = false; bool first_update_ = true; }; @@ -317,18 +262,9 @@ class AudioProcessingImpl : public AudioProcessing { void InitializeHighPassFilter(bool forced_reset) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void InitializeGainController1() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); - void InitializeTransientSuppressor() - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); // Initializes the `GainController2` sub-module. If the sub-module is enabled, // recreates it. void InitializeGainController2() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); - // Initializes the `VoiceActivityDetectorWrapper` sub-module. If the - // sub-module is enabled, recreates it. Call `InitializeGainController2()` - // first. - // TODO(bugs.webrtc.org/13663): Remove if TS is removed otherwise remove call - // order requirement - i.e., decouple from `InitializeGainController2()`. - void InitializeVoiceActivityDetector() - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void InitializeNoiseSuppressor() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void InitializeCaptureLevelsAdjuster() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); @@ -423,10 +359,6 @@ class AudioProcessingImpl : public AudioProcessing { // Struct containing the Config specifying the behavior of APM. AudioProcessing::Config config_; - // Overrides for testing the exclusion of some submodules from the build. - ApmSubmoduleCreationOverrides submodule_creation_overrides_ - RTC_GUARDED_BY(mutex_capture_); - // Class containing information about what submodules are active. SubmoduleStates submodule_states_; @@ -434,26 +366,24 @@ class AudioProcessingImpl : public AudioProcessing { struct Submodules { Submodules(std::unique_ptr capture_post_processor, std::unique_ptr render_pre_processor, - rtc::scoped_refptr echo_detector, + scoped_refptr echo_detector, std::unique_ptr capture_analyzer) : echo_detector(std::move(echo_detector)), capture_post_processor(std::move(capture_post_processor)), render_pre_processor(std::move(render_pre_processor)), capture_analyzer(std::move(capture_analyzer)) {} // Accessed internally from capture or during initialization. - const rtc::scoped_refptr echo_detector; + const scoped_refptr echo_detector; const std::unique_ptr capture_post_processor; const std::unique_ptr render_pre_processor; const std::unique_ptr capture_analyzer; std::unique_ptr agc_manager; std::unique_ptr gain_control; std::unique_ptr gain_controller2; - std::unique_ptr voice_activity_detector; std::unique_ptr high_pass_filter; std::unique_ptr echo_controller; std::unique_ptr echo_control_mobile; std::unique_ptr noise_suppressor; - std::unique_ptr transient_suppressor; std::unique_ptr capture_levels_adjuster; } submodules_; @@ -479,19 +409,16 @@ class AudioProcessingImpl : public AudioProcessing { ApmConstants(bool multi_channel_render_support, bool multi_channel_capture_support, bool enforce_split_band_hpf, - bool minimize_processing_for_unused_output, - bool transient_suppressor_forced_off) + bool minimize_processing_for_unused_output) : multi_channel_render_support(multi_channel_render_support), multi_channel_capture_support(multi_channel_capture_support), enforce_split_band_hpf(enforce_split_band_hpf), minimize_processing_for_unused_output( - minimize_processing_for_unused_output), - transient_suppressor_forced_off(transient_suppressor_forced_off) {} + minimize_processing_for_unused_output) {} bool multi_channel_render_support; bool multi_channel_capture_support; bool enforce_split_band_hpf; bool minimize_processing_for_unused_output; - bool transient_suppressor_forced_off; } constants_; struct ApmCaptureState { @@ -516,12 +443,12 @@ class AudioProcessingImpl : public AudioProcessing { AudioProcessingStats stats; // Input volume applied on the audio input device when the audio is // acquired. Unspecified when unknown. - absl::optional applied_input_volume; + std::optional applied_input_volume; bool applied_input_volume_changed; // Recommended input volume to apply on the audio input device the next time // that audio is acquired. Unspecified when no input volume can be // recommended. - absl::optional recommended_input_volume; + std::optional recommended_input_volume; } capture_ RTC_GUARDED_BY(mutex_capture_); struct ApmCaptureNonLockedState { diff --git a/modules/audio_processing/audio_processing_impl_locking_unittest.cc b/modules/audio_processing/audio_processing_impl_locking_unittest.cc index 3614b574df..dc1bf1287b 100644 --- a/modules/audio_processing/audio_processing_impl_locking_unittest.cc +++ b/modules/audio_processing/audio_processing_impl_locking_unittest.cc @@ -13,8 +13,9 @@ #include #include "api/array_view.h" +#include "api/audio/builtin_audio_processing_builder.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/audio_processing_impl.h" -#include "modules/audio_processing/test/audio_processing_builder_for_testing.h" #include "modules/audio_processing/test/test_utils.h" #include "rtc_base/event.h" #include "rtc_base/platform_thread.h" @@ -226,8 +227,8 @@ struct TestConfig { auto available_rates = (test_config.aec_type == AecType::BasicWebRtcAecSettingsWithAecMobile - ? rtc::ArrayView(sample_rates, 2) - : rtc::ArrayView(sample_rates)); + ? ArrayView(sample_rates, 2) + : ArrayView(sample_rates)); for (auto rate : available_rates) { test_config.initial_sample_rate_hz = rate; @@ -304,8 +305,8 @@ class CaptureProcessor { public: CaptureProcessor(int max_frame_size, RandomGenerator* rand_gen, - rtc::Event* render_call_event, - rtc::Event* capture_call_event, + Event* render_call_event, + Event* capture_call_event, FrameCounters* shared_counters_state, const TestConfig* test_config, AudioProcessing* apm); @@ -321,8 +322,8 @@ class CaptureProcessor { void ApplyRuntimeSettingScheme(); RandomGenerator* const rand_gen_ = nullptr; - rtc::Event* const render_call_event_ = nullptr; - rtc::Event* const capture_call_event_ = nullptr; + Event* const render_call_event_ = nullptr; + Event* const capture_call_event_ = nullptr; FrameCounters* const frame_counters_ = nullptr; const TestConfig* const test_config_ = nullptr; AudioProcessing* const apm_ = nullptr; @@ -348,8 +349,8 @@ class RenderProcessor { public: RenderProcessor(int max_frame_size, RandomGenerator* rand_gen, - rtc::Event* render_call_event, - rtc::Event* capture_call_event, + Event* render_call_event, + Event* capture_call_event, FrameCounters* shared_counters_state, const TestConfig* test_config, AudioProcessing* apm); @@ -365,8 +366,8 @@ class RenderProcessor { void ApplyRuntimeSettingScheme(); RandomGenerator* const rand_gen_ = nullptr; - rtc::Event* const render_call_event_ = nullptr; - rtc::Event* const capture_call_event_ = nullptr; + Event* const render_call_event_ = nullptr; + Event* const capture_call_event_ = nullptr; FrameCounters* const frame_counters_ = nullptr; const TestConfig* const test_config_ = nullptr; AudioProcessing* const apm_ = nullptr; @@ -395,14 +396,14 @@ class AudioProcessingImplLockTest // Start the threads used in the test. void StartThreads() { const auto attributes = - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime); - render_thread_ = rtc::PlatformThread::SpawnJoinable( + ThreadAttributes().SetPriority(ThreadPriority::kRealtime); + render_thread_ = PlatformThread::SpawnJoinable( [this] { while (!MaybeEndTest()) render_thread_state_.Process(); }, "render", attributes); - capture_thread_ = rtc::PlatformThread::SpawnJoinable( + capture_thread_ = PlatformThread::SpawnJoinable( [this] { while (!MaybeEndTest()) { capture_thread_state_.Process(); @@ -410,7 +411,7 @@ class AudioProcessingImplLockTest }, "capture", attributes); - stats_thread_ = rtc::PlatformThread::SpawnJoinable( + stats_thread_ = PlatformThread::SpawnJoinable( [this] { while (!MaybeEndTest()) stats_thread_state_.Process(); @@ -419,22 +420,22 @@ class AudioProcessingImplLockTest } // Event handlers for the test. - rtc::Event test_complete_; - rtc::Event render_call_event_; - rtc::Event capture_call_event_; + Event test_complete_; + Event render_call_event_; + Event capture_call_event_; // Thread related variables. mutable RandomGenerator rand_gen_; const TestConfig test_config_; - rtc::scoped_refptr apm_; + scoped_refptr apm_; FrameCounters frame_counters_; RenderProcessor render_thread_state_; CaptureProcessor capture_thread_state_; StatsProcessor stats_thread_state_; - rtc::PlatformThread render_thread_; - rtc::PlatformThread capture_thread_; - rtc::PlatformThread stats_thread_; + PlatformThread render_thread_; + PlatformThread capture_thread_; + PlatformThread stats_thread_; }; // Sleeps a random time between 0 and max_sleep milliseconds. @@ -461,7 +462,7 @@ void PopulateAudioFrame(float** frame, void PopulateAudioFrame(float amplitude, size_t num_channels, size_t samples_per_channel, - rtc::ArrayView frame, + ArrayView frame, RandomGenerator* rand_gen) { ASSERT_GT(amplitude, 0); ASSERT_LE(amplitude, 32767); @@ -488,9 +489,9 @@ AudioProcessing::Config GetApmTestConfig(AecType aec_type) { AudioProcessingImplLockTest::AudioProcessingImplLockTest() : test_config_(GetParam()), - apm_(AudioProcessingBuilderForTesting() + apm_(BuiltinAudioProcessingBuilder() .SetConfig(GetApmTestConfig(test_config_.aec_type)) - .Create()), + .Build(CreateEnvironment())), render_thread_state_(kMaxFrameSize, &rand_gen_, &render_call_event_, @@ -556,8 +557,8 @@ void StatsProcessor::Process() { CaptureProcessor::CaptureProcessor(int max_frame_size, RandomGenerator* rand_gen, - rtc::Event* render_call_event, - rtc::Event* capture_call_event, + Event* render_call_event, + Event* capture_call_event, FrameCounters* shared_counters_state, const TestConfig* test_config, AudioProcessing* apm) @@ -577,7 +578,7 @@ void CaptureProcessor::Process() { // Ensure that the number of render and capture calls do not // differ too much. if (frame_counters_->CaptureMinusRenderCounters() > kMaxCallDifference) { - render_call_event_->Wait(rtc::Event::kForever); + render_call_event_->Wait(Event::kForever); } // Apply any specified capture side APM non-processing runtime calls. @@ -782,8 +783,8 @@ void CaptureProcessor::ApplyRuntimeSettingScheme() { RenderProcessor::RenderProcessor(int max_frame_size, RandomGenerator* rand_gen, - rtc::Event* render_call_event, - rtc::Event* capture_call_event, + Event* render_call_event, + Event* capture_call_event, FrameCounters* shared_counters_state, const TestConfig* test_config, AudioProcessing* apm) @@ -801,7 +802,7 @@ void RenderProcessor::Process() { // before the first render call is performed (implicitly // required by the APM API). if (first_render_call_) { - capture_call_event_->Wait(rtc::Event::kForever); + capture_call_event_->Wait(Event::kForever); first_render_call_ = false; } @@ -811,7 +812,7 @@ void RenderProcessor::Process() { // Ensure that the number of render and capture calls do not // differ too much. if (frame_counters_->RenderMinusCaptureCounters() > kMaxCallDifference) { - capture_call_event_->Wait(rtc::Event::kForever); + capture_call_event_->Wait(Event::kForever); } // Apply any specified render side APM non-processing runtime calls. diff --git a/modules/audio_processing/audio_processing_impl_unittest.cc b/modules/audio_processing/audio_processing_impl_unittest.cc index 9e50f994b1..5d4b04e692 100644 --- a/modules/audio_processing/audio_processing_impl_unittest.cc +++ b/modules/audio_processing/audio_processing_impl_unittest.cc @@ -13,14 +13,15 @@ #include #include #include +#include #include -#include "absl/types/optional.h" +#include "api/audio/audio_processing.h" +#include "api/audio/builtin_audio_processing_builder.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/make_ref_counted.h" #include "api/scoped_refptr.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "modules/audio_processing/optionally_built_submodule_creators.h" -#include "modules/audio_processing/test/audio_processing_builder_for_testing.h" #include "modules/audio_processing/test/echo_canceller_test_tools.h" #include "modules/audio_processing/test/echo_control_mock.h" #include "modules/audio_processing/test/test_utils.h" @@ -39,7 +40,7 @@ using ::testing::NotNull; class MockInitialize : public AudioProcessingImpl { public: - MockInitialize() : AudioProcessingImpl() {} + MockInitialize() : AudioProcessingImpl(CreateEnvironment()) {} MOCK_METHOD(void, InitializeLocked, (), (override)); void RealInitializeLocked() { @@ -48,7 +49,7 @@ class MockInitialize : public AudioProcessingImpl { } MOCK_METHOD(void, AddRef, (), (const, override)); - MOCK_METHOD(rtc::RefCountReleaseStatus, Release, (), (const, override)); + MOCK_METHOD(RefCountReleaseStatus, Release, (), (const, override)); }; // Creates MockEchoControl instances and provides a raw pointer access to @@ -63,9 +64,10 @@ class MockEchoControlFactory : public EchoControlFactory { MockEchoControlFactory() : next_mock_(std::make_unique()) {} // Returns a pointer to the next MockEchoControl that this factory creates. MockEchoControl* GetNext() const { return next_mock_.get(); } - std::unique_ptr Create(int sample_rate_hz, - int num_render_channels, - int num_capture_channels) override { + std::unique_ptr Create(const Environment& /* env */, + int /* sample_rate_hz */, + int /* num_render_channels */, + int /* num_capture_channels */) override { std::unique_ptr mock = std::move(next_mock_); next_mock_ = std::make_unique(); return mock; @@ -84,16 +86,16 @@ class TestEchoDetector : public EchoDetector { : analyze_render_audio_called_(false), last_render_audio_first_sample_(0.f) {} ~TestEchoDetector() override = default; - void AnalyzeRenderAudio(rtc::ArrayView render_audio) override { + void AnalyzeRenderAudio(ArrayView render_audio) override { last_render_audio_first_sample_ = render_audio[0]; analyze_render_audio_called_ = true; } - void AnalyzeCaptureAudio(rtc::ArrayView capture_audio) override { - } - void Initialize(int capture_sample_rate_hz, - int num_capture_channels, - int render_sample_rate_hz, - int num_render_channels) override {} + void AnalyzeCaptureAudio( + ArrayView /* capture_audio */) override {} + void Initialize(int /* capture_sample_rate_hz */, + int /* num_capture_channels */, + int /* render_sample_rate_hz */, + int /* num_render_channels */) override {} EchoDetector::Metrics GetMetrics() const override { return {}; } // Returns true if AnalyzeRenderAudio() has been called at least once. bool analyze_render_audio_called() const { @@ -116,17 +118,17 @@ class TestRenderPreProcessor : public CustomProcessing { public: TestRenderPreProcessor() = default; ~TestRenderPreProcessor() = default; - void Initialize(int sample_rate_hz, int num_channels) override {} + void Initialize(int /* sample_rate_hz */, int /* num_channels */) override {} void Process(AudioBuffer* audio) override { for (size_t k = 0; k < audio->num_channels(); ++k) { - rtc::ArrayView channel_view(audio->channels()[k], - audio->num_frames()); + ArrayView channel_view(audio->channels()[k], audio->num_frames()); std::transform(channel_view.begin(), channel_view.end(), channel_view.begin(), ProcessSample); } } std::string ToString() const override { return "TestRenderPreProcessor"; } - void SetRuntimeSetting(AudioProcessing::RuntimeSetting setting) override {} + void SetRuntimeSetting( + AudioProcessing::RuntimeSetting /* setting */) override {} // Modifies a sample. This member is used in Process() to modify a frame and // it is publicly visible to enable tests. static constexpr float ProcessSample(float x) { return 2.f * x; } @@ -168,9 +170,10 @@ TEST(AudioProcessingImplTest, AudioParameterChangeTriggersInit) { EXPECT_CALL(mock, InitializeLocked).Times(1); mock.Initialize(); - constexpr size_t kMaxSampleRateHz = 32000; - constexpr size_t kMaxNumChannels = 2; - std::array frame; + constexpr size_t kMaxTestedSampleRateHz = 32000; + constexpr size_t kMaxTestedNumChannels = 2; + std::array + frame; frame.fill(0); StreamConfig config(16000, 1); // Call with the default parameters; there should be an init. @@ -199,8 +202,8 @@ TEST(AudioProcessingImplTest, AudioParameterChangeTriggersInit) { } TEST(AudioProcessingImplTest, UpdateCapturePreGainRuntimeSetting) { - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting().Create(); + scoped_refptr apm = + BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); webrtc::AudioProcessing::Config apm_config; apm_config.pre_amplifier.enabled = true; apm_config.pre_amplifier.fixed_gain_factor = 1.f; @@ -232,8 +235,8 @@ TEST(AudioProcessingImplTest, UpdateCapturePreGainRuntimeSetting) { TEST(AudioProcessingImplTest, LevelAdjustmentUpdateCapturePreGainRuntimeSetting) { - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting().Create(); + scoped_refptr apm = + BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); webrtc::AudioProcessing::Config apm_config; apm_config.capture_level_adjustment.enabled = true; apm_config.capture_level_adjustment.pre_gain_factor = 1.f; @@ -265,8 +268,8 @@ TEST(AudioProcessingImplTest, TEST(AudioProcessingImplTest, LevelAdjustmentUpdateCapturePostGainRuntimeSetting) { - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting().Create(); + scoped_refptr apm = + BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); webrtc::AudioProcessing::Config apm_config; apm_config.capture_level_adjustment.enabled = true; apm_config.capture_level_adjustment.post_gain_factor = 1.f; @@ -303,10 +306,10 @@ TEST(AudioProcessingImplTest, EchoControllerObservesSetCaptureUsageChange) { const MockEchoControlFactory* echo_control_factory_ptr = echo_control_factory.get(); - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetEchoControlFactory(std::move(echo_control_factory)) - .Create(); + .Build(CreateEnvironment()); constexpr int16_t kAudioLevel = 10000; constexpr int kSampleRateHz = 48000; @@ -385,10 +388,10 @@ TEST(AudioProcessingImplTest, auto echo_control_factory = std::make_unique(); const auto* echo_control_factory_ptr = echo_control_factory.get(); - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetEchoControlFactory(std::move(echo_control_factory)) - .Create(); + .Build(CreateEnvironment()); // Disable AGC. webrtc::AudioProcessing::Config apm_config; apm_config.gain_controller1.enabled = false; @@ -428,10 +431,10 @@ TEST(AudioProcessingImplTest, auto echo_control_factory = std::make_unique(); const auto* echo_control_factory_ptr = echo_control_factory.get(); - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetEchoControlFactory(std::move(echo_control_factory)) - .Create(); + .Build(CreateEnvironment()); // Disable AGC. webrtc::AudioProcessing::Config apm_config; apm_config.gain_controller1.enabled = false; @@ -471,10 +474,10 @@ TEST(AudioProcessingImplTest, auto echo_control_factory = std::make_unique(); const auto* echo_control_factory_ptr = echo_control_factory.get(); - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetEchoControlFactory(std::move(echo_control_factory)) - .Create(); + .Build(CreateEnvironment()); webrtc::AudioProcessing::Config apm_config; // Enable AGC1. apm_config.gain_controller1.enabled = true; @@ -520,74 +523,16 @@ TEST(AudioProcessingImplTest, apm->ProcessStream(frame.data(), stream_config, stream_config, frame.data()); } -TEST(AudioProcessingImplTest, - ProcessWithAgc2AndTransientSuppressorVadModeDefault) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Disabled/"); - auto apm = AudioProcessingBuilder() - .SetConfig({.gain_controller1{.enabled = false}}) - .Create(); - ASSERT_EQ(apm->Initialize(), AudioProcessing::kNoError); - webrtc::AudioProcessing::Config apm_config; - apm_config.gain_controller1.enabled = false; - apm_config.gain_controller2.enabled = true; - apm_config.gain_controller2.adaptive_digital.enabled = true; - apm_config.transient_suppression.enabled = true; - apm->ApplyConfig(apm_config); - constexpr int kSampleRateHz = 48000; - constexpr int kNumChannels = 1; - std::array buffer; - float* channel_pointers[] = {buffer.data()}; - StreamConfig stream_config(/*sample_rate_hz=*/kSampleRateHz, - /*num_channels=*/kNumChannels); - Random random_generator(2341U); - constexpr int kFramesToProcess = 10; - for (int i = 0; i < kFramesToProcess; ++i) { - RandomizeSampleVector(&random_generator, buffer); - ASSERT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, - channel_pointers), - kNoErr); - } -} - -TEST(AudioProcessingImplTest, - ProcessWithAgc2AndTransientSuppressorVadModeRnnVad) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true/"); - rtc::scoped_refptr apm = AudioProcessingBuilder().Create(); - ASSERT_EQ(apm->Initialize(), AudioProcessing::kNoError); - webrtc::AudioProcessing::Config apm_config; - apm_config.gain_controller1.enabled = false; - apm_config.gain_controller2.enabled = true; - apm_config.gain_controller2.adaptive_digital.enabled = true; - apm_config.transient_suppression.enabled = true; - apm->ApplyConfig(apm_config); - constexpr int kSampleRateHz = 48000; - constexpr int kNumChannels = 1; - std::array buffer; - float* channel_pointers[] = {buffer.data()}; - StreamConfig stream_config(/*sample_rate_hz=*/kSampleRateHz, - /*num_channels=*/kNumChannels); - Random random_generator(2341U); - constexpr int kFramesToProcess = 10; - for (int i = 0; i < kFramesToProcess; ++i) { - RandomizeSampleVector(&random_generator, buffer); - ASSERT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, - channel_pointers), - kNoErr); - } -} - TEST(AudioProcessingImplTest, EchoControllerObservesPlayoutVolumeChange) { // Tests that the echo controller observes an echo path gain change when a // playout volume change is reported. auto echo_control_factory = std::make_unique(); const auto* echo_control_factory_ptr = echo_control_factory.get(); - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetEchoControlFactory(std::move(echo_control_factory)) - .Create(); + .Build(CreateEnvironment()); // Disable AGC. webrtc::AudioProcessing::Config apm_config; apm_config.gain_controller1.enabled = false; @@ -637,15 +582,15 @@ TEST(AudioProcessingImplTest, EchoControllerObservesPlayoutVolumeChange) { TEST(AudioProcessingImplTest, RenderPreProcessorBeforeEchoDetector) { // Make sure that signal changes caused by a render pre-processing sub-module // take place before any echo detector analysis. - auto test_echo_detector = rtc::make_ref_counted(); + auto test_echo_detector = make_ref_counted(); std::unique_ptr test_render_pre_processor( new TestRenderPreProcessor()); // Create APM injecting the test echo detector and render pre-processor. - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetEchoDetector(test_echo_detector) .SetRenderPreProcessing(std::move(test_render_pre_processor)) - .Create(); + .Build(CreateEnvironment()); webrtc::AudioProcessing::Config apm_config; apm_config.pre_amplifier.enabled = true; apm->ApplyConfig(apm_config); @@ -691,151 +636,6 @@ TEST(AudioProcessingImplTest, RenderPreProcessorBeforeEchoDetector) { test_echo_detector->last_render_audio_first_sample()); } -// Disabling build-optional submodules and trying to enable them via the APM -// config should be bit-exact with running APM with said submodules disabled. -// This mainly tests that SetCreateOptionalSubmodulesForTesting has an effect. -TEST(ApmWithSubmodulesExcludedTest, BitexactWithDisabledModules) { - auto apm = rtc::make_ref_counted(); - ASSERT_EQ(apm->Initialize(), AudioProcessing::kNoError); - - ApmSubmoduleCreationOverrides overrides; - overrides.transient_suppression = true; - apm->OverrideSubmoduleCreationForTesting(overrides); - - AudioProcessing::Config apm_config = apm->GetConfig(); - apm_config.transient_suppression.enabled = true; - apm->ApplyConfig(apm_config); - - rtc::scoped_refptr apm_reference = - AudioProcessingBuilder().Create(); - apm_config = apm_reference->GetConfig(); - apm_config.transient_suppression.enabled = false; - apm_reference->ApplyConfig(apm_config); - - constexpr int kSampleRateHz = 16000; - constexpr int kNumChannels = 1; - std::array buffer; - std::array buffer_reference; - float* channel_pointers[] = {buffer.data()}; - float* channel_pointers_reference[] = {buffer_reference.data()}; - StreamConfig stream_config(/*sample_rate_hz=*/kSampleRateHz, - /*num_channels=*/kNumChannels); - Random random_generator(2341U); - constexpr int kFramesToProcessPerConfiguration = 10; - - for (int i = 0; i < kFramesToProcessPerConfiguration; ++i) { - RandomizeSampleVector(&random_generator, buffer); - std::copy(buffer.begin(), buffer.end(), buffer_reference.begin()); - ASSERT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, - channel_pointers), - kNoErr); - ASSERT_EQ( - apm_reference->ProcessStream(channel_pointers_reference, stream_config, - stream_config, channel_pointers_reference), - kNoErr); - for (int j = 0; j < kSampleRateHz / 100; ++j) { - EXPECT_EQ(buffer[j], buffer_reference[j]); - } - } -} - -// Disable transient suppressor creation and run APM in ways that should trigger -// calls to the transient suppressor API. -TEST(ApmWithSubmodulesExcludedTest, ReinitializeTransientSuppressor) { - auto apm = rtc::make_ref_counted(); - ASSERT_EQ(apm->Initialize(), kNoErr); - - ApmSubmoduleCreationOverrides overrides; - overrides.transient_suppression = true; - apm->OverrideSubmoduleCreationForTesting(overrides); - - AudioProcessing::Config config = apm->GetConfig(); - config.transient_suppression.enabled = true; - apm->ApplyConfig(config); - // 960 samples per frame: 10 ms of <= 48 kHz audio with <= 2 channels. - float buffer[960]; - float* channel_pointers[] = {&buffer[0], &buffer[480]}; - Random random_generator(2341U); - constexpr int kFramesToProcessPerConfiguration = 3; - - StreamConfig initial_stream_config(/*sample_rate_hz=*/16000, - /*num_channels=*/1); - for (int i = 0; i < kFramesToProcessPerConfiguration; ++i) { - RandomizeSampleVector(&random_generator, buffer); - EXPECT_EQ(apm->ProcessStream(channel_pointers, initial_stream_config, - initial_stream_config, channel_pointers), - kNoErr); - } - - StreamConfig stereo_stream_config(/*sample_rate_hz=*/16000, - /*num_channels=*/2); - for (int i = 0; i < kFramesToProcessPerConfiguration; ++i) { - RandomizeSampleVector(&random_generator, buffer); - EXPECT_EQ(apm->ProcessStream(channel_pointers, stereo_stream_config, - stereo_stream_config, channel_pointers), - kNoErr); - } - - StreamConfig high_sample_rate_stream_config(/*sample_rate_hz=*/48000, - /*num_channels=*/2); - for (int i = 0; i < kFramesToProcessPerConfiguration; ++i) { - RandomizeSampleVector(&random_generator, buffer); - EXPECT_EQ( - apm->ProcessStream(channel_pointers, high_sample_rate_stream_config, - high_sample_rate_stream_config, channel_pointers), - kNoErr); - } -} - -// Disable transient suppressor creation and run APM in ways that should trigger -// calls to the transient suppressor API. -TEST(ApmWithSubmodulesExcludedTest, ToggleTransientSuppressor) { - auto apm = rtc::make_ref_counted(); - ASSERT_EQ(apm->Initialize(), AudioProcessing::kNoError); - - ApmSubmoduleCreationOverrides overrides; - overrides.transient_suppression = true; - apm->OverrideSubmoduleCreationForTesting(overrides); - - // 960 samples per frame: 10 ms of <= 48 kHz audio with <= 2 channels. - float buffer[960]; - float* channel_pointers[] = {&buffer[0], &buffer[480]}; - Random random_generator(2341U); - constexpr int kFramesToProcessPerConfiguration = 3; - StreamConfig stream_config(/*sample_rate_hz=*/16000, - /*num_channels=*/1); - - AudioProcessing::Config config = apm->GetConfig(); - config.transient_suppression.enabled = true; - apm->ApplyConfig(config); - for (int i = 0; i < kFramesToProcessPerConfiguration; ++i) { - RandomizeSampleVector(&random_generator, buffer); - EXPECT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, - channel_pointers), - kNoErr); - } - - config = apm->GetConfig(); - config.transient_suppression.enabled = false; - apm->ApplyConfig(config); - for (int i = 0; i < kFramesToProcessPerConfiguration; ++i) { - RandomizeSampleVector(&random_generator, buffer); - EXPECT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, - channel_pointers), - kNoErr); - } - - config = apm->GetConfig(); - config.transient_suppression.enabled = true; - apm->ApplyConfig(config); - for (int i = 0; i < kFramesToProcessPerConfiguration; ++i) { - RandomizeSampleVector(&random_generator, buffer); - EXPECT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, - channel_pointers), - kNoErr); - } -} - class StartupInputVolumeParameterizedTest : public ::testing::TestWithParam {}; @@ -846,7 +646,7 @@ TEST_P(StartupInputVolumeParameterizedTest, webrtc::AudioProcessing::Config config; config.gain_controller1.enabled = false; config.gain_controller2.enabled = false; - auto apm = AudioProcessingBuilder().SetConfig(config).Create(); + auto apm = BuiltinAudioProcessingBuilder(config).Build(CreateEnvironment()); int startup_volume = GetParam(); int recommended_volume = ProcessInputVolume( @@ -865,7 +665,7 @@ TEST(AudioProcessingImplTest, webrtc::AudioProcessing::Config config; config.gain_controller1.enabled = false; config.gain_controller2.enabled = false; - auto apm = AudioProcessingBuilder().SetConfig(config).Create(); + auto apm = BuiltinAudioProcessingBuilder(config).Build(CreateEnvironment()); Random rand_gen(42); for (int i = 0; i < 32; ++i) { @@ -910,7 +710,8 @@ class ApmInputVolumeControllerParametrizedTest TEST_P(ApmInputVolumeControllerParametrizedTest, EnforceMinInputVolumeAtStartupWithZeroVolume) { const StreamConfig stream_config(sample_rate_hz(), num_channels()); - auto apm = AudioProcessingBuilder().SetConfig(GetConfig()).Create(); + auto apm = + BuiltinAudioProcessingBuilder(GetConfig()).Build(CreateEnvironment()); apm->set_stream_analog_level(0); apm->ProcessStream(channel_pointers(), stream_config, stream_config, @@ -921,7 +722,8 @@ TEST_P(ApmInputVolumeControllerParametrizedTest, TEST_P(ApmInputVolumeControllerParametrizedTest, EnforceMinInputVolumeAtStartupWithNonZeroVolume) { const StreamConfig stream_config(sample_rate_hz(), num_channels()); - auto apm = AudioProcessingBuilder().SetConfig(GetConfig()).Create(); + auto apm = + BuiltinAudioProcessingBuilder(GetConfig()).Build(CreateEnvironment()); constexpr int kStartupVolume = 3; apm->set_stream_analog_level(kStartupVolume); @@ -939,7 +741,8 @@ TEST_P(ApmInputVolumeControllerParametrizedTest, GTEST_SKIP() << "Does not apply to AGC1"; } const StreamConfig stream_config(sample_rate_hz(), num_channels()); - auto apm = AudioProcessingBuilder().SetConfig(GetConfig()).Create(); + auto apm = + BuiltinAudioProcessingBuilder(GetConfig()).Build(CreateEnvironment()); apm->set_stream_analog_level(20); apm->ProcessStream(channel_pointers(), stream_config, stream_config, @@ -954,7 +757,8 @@ TEST_P(ApmInputVolumeControllerParametrizedTest, TEST_P(ApmInputVolumeControllerParametrizedTest, DoNotEnforceMinInputVolumeAtStartupWithHighVolume) { const StreamConfig stream_config(sample_rate_hz(), num_channels()); - auto apm = AudioProcessingBuilder().SetConfig(GetConfig()).Create(); + auto apm = + BuiltinAudioProcessingBuilder(GetConfig()).Build(CreateEnvironment()); constexpr int kStartupVolume = 200; apm->set_stream_analog_level(kStartupVolume); @@ -966,7 +770,8 @@ TEST_P(ApmInputVolumeControllerParametrizedTest, TEST_P(ApmInputVolumeControllerParametrizedTest, DoNotEnforceMinInputVolumeAfterManualVolumeAdjustmentToZero) { const StreamConfig stream_config(sample_rate_hz(), num_channels()); - auto apm = AudioProcessingBuilder().SetConfig(GetConfig()).Create(); + auto apm = + BuiltinAudioProcessingBuilder(GetConfig()).Build(CreateEnvironment()); apm->set_stream_analog_level(100); apm->ProcessStream(channel_pointers(), stream_config, stream_config, @@ -1004,10 +809,10 @@ INSTANTIATE_TEST_SUITE_P( // active, the recommended volume must always be the applied volume. TEST(AudioProcessingImplTest, RecommendAppliedInputVolumeWithNoAgcWithNoEmulation) { - auto apm = AudioProcessingBuilder() - .SetConfig({.capture_level_adjustment = {.enabled = false}, - .gain_controller1 = {.enabled = false}}) - .Create(); + auto apm = BuiltinAudioProcessingBuilder( + {.capture_level_adjustment = {.enabled = false}, + .gain_controller1 = {.enabled = false}}) + .Build(CreateEnvironment()); constexpr int kOneFrame = 1; EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/123), 123); @@ -1021,14 +826,13 @@ TEST(AudioProcessingImplTest, // TODO(bugs.webrtc.org/14581): Enable when APM fixed to let this test pass. TEST(AudioProcessingImplTest, DISABLED_RecommendAppliedInputVolumeWithNoAgcWithEmulation) { - auto apm = - AudioProcessingBuilder() - .SetConfig({.capture_level_adjustment = {.enabled = true, - .analog_mic_gain_emulation{ - .enabled = true, - .initial_level = 255}}, - .gain_controller1 = {.enabled = false}}) - .Create(); + auto apm = BuiltinAudioProcessingBuilder( + {.capture_level_adjustment = {.enabled = true, + .analog_mic_gain_emulation{ + .enabled = true, + .initial_level = 255}}, + .gain_controller1 = {.enabled = false}}) + .Build(CreateEnvironment()); constexpr int kOneFrame = 1; EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/123), 123); @@ -1043,16 +847,15 @@ TEST(AudioProcessingImplTest, // TODO(bugs.webrtc.org/14581): Enable when APM fixed to let this test pass. TEST(AudioProcessingImplTest, DISABLED_RecommendAppliedInputVolumeWithAgcWithEmulation) { - auto apm = - AudioProcessingBuilder() - .SetConfig({.capture_level_adjustment = {.enabled = true, - .analog_mic_gain_emulation{ - .enabled = true}}, - .gain_controller1 = {.enabled = true, - .analog_gain_controller{ - .enabled = true, - }}}) - .Create(); + auto apm = BuiltinAudioProcessingBuilder( + {.capture_level_adjustment = {.enabled = true, + .analog_mic_gain_emulation{ + .enabled = true}}, + .gain_controller1 = {.enabled = true, + .analog_gain_controller{ + .enabled = true, + }}}) + .Build(CreateEnvironment()); constexpr int kOneFrame = 1; EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/123), 123); @@ -1060,293 +863,12 @@ TEST(AudioProcessingImplTest, EXPECT_EQ(ProcessInputVolume(*apm, kOneFrame, /*initial_volume=*/135), 135); } -TEST(AudioProcessingImplTest, - Agc2FieldTrialDoNotSwitchToFullAgc2WhenNoAgcIsActive) { - constexpr AudioProcessing::Config kOriginal{ - .gain_controller1{.enabled = false}, - .gain_controller2{.enabled = false}, - }; - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true/"); - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); - EXPECT_EQ(adjusted.gain_controller1, kOriginal.gain_controller1); - EXPECT_EQ(adjusted.gain_controller2, kOriginal.gain_controller2); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(kOriginal); - adjusted = apm->GetConfig(); - EXPECT_EQ(adjusted.gain_controller1, kOriginal.gain_controller1); - EXPECT_EQ(adjusted.gain_controller2, kOriginal.gain_controller2); -} - -TEST(AudioProcessingImplTest, - Agc2FieldTrialDoNotSwitchToFullAgc2WithAgc1Agc2InputVolumeControllers) { - constexpr AudioProcessing::Config kOriginal{ - .gain_controller1{.enabled = true, - .analog_gain_controller{.enabled = true}}, - .gain_controller2{.enabled = true, - .input_volume_controller{.enabled = true}}, - }; - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true/"); - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); - EXPECT_EQ(adjusted.gain_controller1, kOriginal.gain_controller1); - EXPECT_EQ(adjusted.gain_controller2, kOriginal.gain_controller2); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(kOriginal); - adjusted = apm->GetConfig(); - EXPECT_EQ(adjusted.gain_controller1, kOriginal.gain_controller1); - EXPECT_EQ(adjusted.gain_controller2, kOriginal.gain_controller2); -} - -class Agc2FieldTrialParametrizedTest +class Agc2ParametrizedTest : public ::testing::TestWithParam {}; -TEST_P(Agc2FieldTrialParametrizedTest, DoNotChangeConfigIfDisabled) { - const AudioProcessing::Config original = GetParam(); - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Disabled/"); - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); - EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); - EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(original); - adjusted = apm->GetConfig(); - EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); - EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); -} - -TEST_P(Agc2FieldTrialParametrizedTest, DoNotChangeConfigIfNoOverride) { - const AudioProcessing::Config original = GetParam(); - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Enabled," - "switch_to_agc2:false," - "disallow_transient_suppressor_usage:false/"); - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); - EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); - EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(original); - adjusted = apm->GetConfig(); - EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); - EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); -} - -TEST_P(Agc2FieldTrialParametrizedTest, DoNotSwitchToFullAgc2) { - const AudioProcessing::Config original = GetParam(); - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:false/"); - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); - EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); - EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(original); - adjusted = apm->GetConfig(); - EXPECT_EQ(adjusted.gain_controller1, original.gain_controller1); - EXPECT_EQ(adjusted.gain_controller2, original.gain_controller2); -} - -TEST_P(Agc2FieldTrialParametrizedTest, SwitchToFullAgc2) { - const AudioProcessing::Config original = GetParam(); - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true/"); - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); - EXPECT_FALSE(adjusted.gain_controller1.enabled); - EXPECT_TRUE(adjusted.gain_controller2.enabled); - EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); - EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(original); - adjusted = apm->GetConfig(); - EXPECT_FALSE(adjusted.gain_controller1.enabled); - EXPECT_TRUE(adjusted.gain_controller2.enabled); - EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); - EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); -} - -TEST_P(Agc2FieldTrialParametrizedTest, - SwitchToFullAgc2AndOverrideInputVolumeControllerParameters) { - const AudioProcessing::Config original = GetParam(); - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true," - "min_input_volume:123," - "clipped_level_min:20," - "clipped_level_step:30," - "clipped_ratio_threshold:0.4," - "clipped_wait_frames:50," - "enable_clipping_predictor:true," - "target_range_max_dbfs:-6," - "target_range_min_dbfs:-70," - "update_input_volume_wait_frames:80," - "speech_probability_threshold:0.9," - "speech_ratio_threshold:1.0/"); - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); - EXPECT_FALSE(adjusted.gain_controller1.enabled); - EXPECT_TRUE(adjusted.gain_controller2.enabled); - EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); - EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(original); - adjusted = apm->GetConfig(); - EXPECT_FALSE(adjusted.gain_controller1.enabled); - EXPECT_TRUE(adjusted.gain_controller2.enabled); - EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); - EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); -} - -TEST_P(Agc2FieldTrialParametrizedTest, - SwitchToFullAgc2AndOverrideAdaptiveDigitalControllerParameters) { - const AudioProcessing::Config original = GetParam(); - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Enabled,switch_to_agc2:true," - "headroom_db:10," - "max_gain_db:20," - "initial_gain_db:7," - "max_gain_change_db_per_second:5," - "max_output_noise_level_dbfs:-40/"); - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(original).Create()->GetConfig(); - EXPECT_FALSE(adjusted.gain_controller1.enabled); - EXPECT_TRUE(adjusted.gain_controller2.enabled); - EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); - EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); - ASSERT_NE(adjusted.gain_controller2.adaptive_digital, - original.gain_controller2.adaptive_digital); - EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.headroom_db, 10); - EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.max_gain_db, 20); - EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.initial_gain_db, 7); - EXPECT_EQ( - adjusted.gain_controller2.adaptive_digital.max_gain_change_db_per_second, - 5); - EXPECT_EQ( - adjusted.gain_controller2.adaptive_digital.max_output_noise_level_dbfs, - -40); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(original); - adjusted = apm->GetConfig(); - EXPECT_FALSE(adjusted.gain_controller1.enabled); - EXPECT_TRUE(adjusted.gain_controller2.enabled); - EXPECT_TRUE(adjusted.gain_controller2.input_volume_controller.enabled); - EXPECT_TRUE(adjusted.gain_controller2.adaptive_digital.enabled); - ASSERT_NE(adjusted.gain_controller2.adaptive_digital, - original.gain_controller2.adaptive_digital); - EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.headroom_db, 10); - EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.max_gain_db, 20); - EXPECT_EQ(adjusted.gain_controller2.adaptive_digital.initial_gain_db, 7); - EXPECT_EQ( - adjusted.gain_controller2.adaptive_digital.max_gain_change_db_per_second, - 5); - EXPECT_EQ( - adjusted.gain_controller2.adaptive_digital.max_output_noise_level_dbfs, - -40); -} - -TEST_P(Agc2FieldTrialParametrizedTest, ProcessSucceedsWithTs) { - AudioProcessing::Config config = GetParam(); - if (!config.transient_suppression.enabled) { - GTEST_SKIP() << "TS is disabled, skip."; - } - - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Disabled/"); - auto apm = AudioProcessingBuilder().SetConfig(config).Create(); - - constexpr int kSampleRateHz = 48000; - constexpr int kNumChannels = 1; - std::array buffer; - float* channel_pointers[] = {buffer.data()}; - StreamConfig stream_config(kSampleRateHz, kNumChannels); - Random random_generator(2341U); - constexpr int kFramesToProcess = 10; - int volume = 100; - for (int i = 0; i < kFramesToProcess; ++i) { - SCOPED_TRACE(i); - RandomizeSampleVector(&random_generator, buffer); - apm->set_stream_analog_level(volume); - ASSERT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, - channel_pointers), - kNoErr); - volume = apm->recommended_stream_analog_level(); - } -} - -TEST_P(Agc2FieldTrialParametrizedTest, ProcessSucceedsWithoutTs) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Enabled," - "switch_to_agc2:false," - "disallow_transient_suppressor_usage:true/"); - auto apm = AudioProcessingBuilder().SetConfig(GetParam()).Create(); - - constexpr int kSampleRateHz = 48000; - constexpr int kNumChannels = 1; - std::array buffer; - float* channel_pointers[] = {buffer.data()}; - StreamConfig stream_config(kSampleRateHz, kNumChannels); - Random random_generator(2341U); - constexpr int kFramesToProcess = 10; - int volume = 100; - for (int i = 0; i < kFramesToProcess; ++i) { - SCOPED_TRACE(i); - RandomizeSampleVector(&random_generator, buffer); - apm->set_stream_analog_level(volume); - ASSERT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, - channel_pointers), - kNoErr); - volume = apm->recommended_stream_analog_level(); - } -} - -TEST_P(Agc2FieldTrialParametrizedTest, - ProcessSucceedsWhenSwitchToFullAgc2WithTs) { - AudioProcessing::Config config = GetParam(); - if (!config.transient_suppression.enabled) { - GTEST_SKIP() << "TS is disabled, skip."; - } - - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Enabled," - "switch_to_agc2:true," - "disallow_transient_suppressor_usage:false/"); - auto apm = AudioProcessingBuilder().SetConfig(config).Create(); - +TEST_P(Agc2ParametrizedTest, ProcessSucceedsWhenOneAgcEnabled) { + auto apm = + BuiltinAudioProcessingBuilder(GetParam()).Build(CreateEnvironment()); constexpr int kSampleRateHz = 48000; constexpr int kNumChannels = 1; std::array buffer; @@ -1366,36 +888,57 @@ TEST_P(Agc2FieldTrialParametrizedTest, } } -TEST_P(Agc2FieldTrialParametrizedTest, - ProcessSucceedsWhenSwitchToFullAgc2WithoutTs) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Enabled," - "switch_to_agc2:true," - "disallow_transient_suppressor_usage:true/"); - auto apm = AudioProcessingBuilder().SetConfig(GetParam()).Create(); +TEST_P(Agc2ParametrizedTest, + BitExactWithAndWithoutTransientSuppressionEnabledInConfig) { + const Environment env = CreateEnvironment(); + // Enable transient suppression in the config (expect no effect). + auto config = GetParam(); + config.transient_suppression.enabled = true; + auto apm = BuiltinAudioProcessingBuilder(config).Build(env); + ASSERT_EQ(apm->Initialize(), AudioProcessing::kNoError); + // Disable transient suppression in the config. + auto config_reference = GetParam(); + config_reference.transient_suppression.enabled = false; + auto apm_reference = + BuiltinAudioProcessingBuilder(config_reference).Build(env); + ASSERT_EQ(apm_reference->Initialize(), AudioProcessing::kNoError); - constexpr int kSampleRateHz = 48000; + constexpr int kSampleRateHz = 16000; constexpr int kNumChannels = 1; std::array buffer; + std::array buffer_reference; float* channel_pointers[] = {buffer.data()}; - StreamConfig stream_config(kSampleRateHz, kNumChannels); + float* channel_pointers_reference[] = {buffer_reference.data()}; + StreamConfig stream_config(/*sample_rate_hz=*/kSampleRateHz, + /*num_channels=*/kNumChannels); Random random_generator(2341U); - constexpr int kFramesToProcess = 10; + constexpr int kFramesToProcessPerConfiguration = 100; int volume = 100; - for (int i = 0; i < kFramesToProcess; ++i) { - SCOPED_TRACE(i); + int volume_reference = 100; + for (int i = 0; i < kFramesToProcessPerConfiguration; ++i) { RandomizeSampleVector(&random_generator, buffer); + std::copy(buffer.begin(), buffer.end(), buffer_reference.begin()); apm->set_stream_analog_level(volume); + apm_reference->set_stream_analog_level(volume_reference); ASSERT_EQ(apm->ProcessStream(channel_pointers, stream_config, stream_config, channel_pointers), kNoErr); + ASSERT_EQ( + apm_reference->ProcessStream(channel_pointers_reference, stream_config, + stream_config, channel_pointers_reference), + kNoErr); volume = apm->recommended_stream_analog_level(); + volume_reference = apm_reference->recommended_stream_analog_level(); + for (int j = 0; j < kSampleRateHz / 100; ++j) { + // Expect no effect from transient suppression. + EXPECT_EQ(buffer[j], buffer_reference[j]); + } } } INSTANTIATE_TEST_SUITE_P( AudioProcessingImplTest, - Agc2FieldTrialParametrizedTest, + Agc2ParametrizedTest, ::testing::Values( // Full AGC1, TS disabled. AudioProcessing::Config{ @@ -1405,14 +948,6 @@ INSTANTIATE_TEST_SUITE_P( .analog_gain_controller = {.enabled = true, .enable_digital_adaptive = true}}, .gain_controller2 = {.enabled = false}}, - // Full AGC1, TS enabled. - AudioProcessing::Config{ - .transient_suppression = {.enabled = true}, - .gain_controller1 = - {.enabled = true, - .analog_gain_controller = {.enabled = true, - .enable_digital_adaptive = true}}, - .gain_controller2 = {.enabled = false}}, // Hybrid AGC, TS disabled. AudioProcessing::Config{ .transient_suppression = {.enabled = false}, @@ -1422,142 +957,15 @@ INSTANTIATE_TEST_SUITE_P( .enable_digital_adaptive = false}}, .gain_controller2 = {.enabled = true, .adaptive_digital = {.enabled = true}}}, - // Hybrid AGC, TS enabled. + // Full AGC2, TS disabled. AudioProcessing::Config{ - .transient_suppression = {.enabled = true}, + .transient_suppression = {.enabled = false}, .gain_controller1 = - {.enabled = true, - .analog_gain_controller = {.enabled = true, + {.enabled = false, + .analog_gain_controller = {.enabled = false, .enable_digital_adaptive = false}}, .gain_controller2 = {.enabled = true, + .input_volume_controller = {.enabled = true}, .adaptive_digital = {.enabled = true}}})); -TEST(AudioProcessingImplTest, CanDisableTransientSuppressor) { - constexpr AudioProcessing::Config kOriginal = { - .transient_suppression = {.enabled = false}}; - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); - EXPECT_FALSE(adjusted.transient_suppression.enabled); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(kOriginal); - adjusted = apm->GetConfig(); - EXPECT_FALSE(apm->GetConfig().transient_suppression.enabled); -} - -TEST(AudioProcessingImplTest, CanEnableTs) { - constexpr AudioProcessing::Config kOriginal = { - .transient_suppression = {.enabled = true}}; - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); - EXPECT_TRUE(adjusted.transient_suppression.enabled); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(kOriginal); - adjusted = apm->GetConfig(); - EXPECT_TRUE(adjusted.transient_suppression.enabled); -} - -TEST(AudioProcessingImplTest, CanDisableTsWithAgc2FieldTrialDisabled) { - constexpr AudioProcessing::Config kOriginal = { - .transient_suppression = {.enabled = false}}; - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Disabled/"); - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); - EXPECT_FALSE(adjusted.transient_suppression.enabled); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(kOriginal); - adjusted = apm->GetConfig(); - EXPECT_FALSE(apm->GetConfig().transient_suppression.enabled); -} - -TEST(AudioProcessingImplTest, CanEnableTsWithAgc2FieldTrialDisabled) { - constexpr AudioProcessing::Config kOriginal = { - .transient_suppression = {.enabled = true}}; - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Disabled/"); - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); - EXPECT_TRUE(adjusted.transient_suppression.enabled); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(kOriginal); - adjusted = apm->GetConfig(); - EXPECT_TRUE(adjusted.transient_suppression.enabled); -} - -TEST(AudioProcessingImplTest, - CanDisableTsWithAgc2FieldTrialEnabledAndUsageAllowed) { - constexpr AudioProcessing::Config kOriginal = { - .transient_suppression = {.enabled = false}}; - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Enabled," - "disallow_transient_suppressor_usage:false/"); - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); - EXPECT_FALSE(adjusted.transient_suppression.enabled); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(kOriginal); - adjusted = apm->GetConfig(); - EXPECT_FALSE(adjusted.transient_suppression.enabled); -} - -TEST(AudioProcessingImplTest, - CanEnableTsWithAgc2FieldTrialEnabledAndUsageAllowed) { - constexpr AudioProcessing::Config kOriginal = { - .transient_suppression = {.enabled = true}}; - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Enabled," - "disallow_transient_suppressor_usage:false/"); - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); - EXPECT_TRUE(adjusted.transient_suppression.enabled); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(kOriginal); - adjusted = apm->GetConfig(); - EXPECT_TRUE(adjusted.transient_suppression.enabled); -} - -TEST(AudioProcessingImplTest, - CannotEnableTsWithAgc2FieldTrialEnabledAndUsageDisallowed) { - constexpr AudioProcessing::Config kOriginal = { - .transient_suppression = {.enabled = true}}; - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-Audio-GainController2/Enabled," - "disallow_transient_suppressor_usage:true/"); - - // Test config application via `AudioProcessing` ctor. - auto adjusted = - AudioProcessingBuilder().SetConfig(kOriginal).Create()->GetConfig(); - EXPECT_FALSE(adjusted.transient_suppression.enabled); - - // Test config application via `AudioProcessing::ApplyConfig()`. - auto apm = AudioProcessingBuilder().Create(); - apm->ApplyConfig(kOriginal); - adjusted = apm->GetConfig(); - EXPECT_FALSE(apm->GetConfig().transient_suppression.enabled); -} - } // namespace webrtc diff --git a/modules/audio_processing/audio_processing_performance_unittest.cc b/modules/audio_processing/audio_processing_performance_unittest.cc index 10d3d84951..d12302cb58 100644 --- a/modules/audio_processing/audio_processing_performance_unittest.cc +++ b/modules/audio_processing/audio_processing_performance_unittest.cc @@ -16,11 +16,12 @@ #include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/audio/builtin_audio_processing_builder.h" +#include "api/environment/environment_factory.h" #include "api/numerics/samples_stats_counter.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/metrics/metric.h" #include "modules/audio_processing/audio_processing_impl.h" -#include "modules/audio_processing/test/audio_processing_builder_for_testing.h" #include "modules/audio_processing/test/test_utils.h" #include "rtc_base/event.h" #include "rtc_base/numerics/safe_conversions.h" @@ -448,31 +449,31 @@ class CallSimulator : public ::testing::TestWithParam { int num_capture_channels = 1; switch (simulation_config_.simulation_settings) { case SettingsType::kDefaultApmMobile: { - apm_ = AudioProcessingBuilderForTesting().Create(); + apm_ = BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); ASSERT_TRUE(!!apm_); set_default_mobile_apm_runtime_settings(apm_.get()); break; } case SettingsType::kDefaultApmDesktop: { - apm_ = AudioProcessingBuilderForTesting().Create(); + apm_ = BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); ASSERT_TRUE(!!apm_); set_default_desktop_apm_runtime_settings(apm_.get()); break; } case SettingsType::kAllSubmodulesTurnedOff: { - apm_ = AudioProcessingBuilderForTesting().Create(); + apm_ = BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); ASSERT_TRUE(!!apm_); turn_off_default_apm_runtime_settings(apm_.get()); break; } case SettingsType::kDefaultApmDesktopWithoutDelayAgnostic: { - apm_ = AudioProcessingBuilderForTesting().Create(); + apm_ = BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); ASSERT_TRUE(!!apm_); set_default_desktop_apm_runtime_settings(apm_.get()); break; } case SettingsType::kDefaultApmDesktopWithoutExtendedFilter: { - apm_ = AudioProcessingBuilderForTesting().Create(); + apm_ = BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); ASSERT_TRUE(!!apm_); set_default_desktop_apm_runtime_settings(apm_.get()); break; @@ -492,14 +493,14 @@ class CallSimulator : public ::testing::TestWithParam { // Start the threads used in the test. void StartThreads() { const auto attributes = - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime); - render_thread_ = rtc::PlatformThread::SpawnJoinable( + ThreadAttributes().SetPriority(ThreadPriority::kRealtime); + render_thread_ = PlatformThread::SpawnJoinable( [this] { while (render_thread_state_->Process()) { } }, "render", attributes); - capture_thread_ = rtc::PlatformThread::SpawnJoinable( + capture_thread_ = PlatformThread::SpawnJoinable( [this] { while (capture_thread_state_->Process()) { } @@ -508,19 +509,19 @@ class CallSimulator : public ::testing::TestWithParam { } // Event handler for the test. - rtc::Event test_complete_; + Event test_complete_; // Thread related variables. Random rand_gen_; - rtc::scoped_refptr apm_; + scoped_refptr apm_; const SimulationConfig simulation_config_; FrameCounters frame_counters_; LockedFlag capture_call_checker_; std::unique_ptr render_thread_state_; std::unique_ptr capture_thread_state_; - rtc::PlatformThread render_thread_; - rtc::PlatformThread capture_thread_; + PlatformThread render_thread_; + PlatformThread capture_thread_; }; // Implements the callback functionality for the threads. @@ -554,8 +555,7 @@ const float CallSimulator::kRenderInputFloatLevel = 0.5f; const float CallSimulator::kCaptureInputFloatLevel = 0.03125f; } // anonymous namespace -// TODO(peah): Reactivate once issue 7712 has been resolved. -TEST_P(CallSimulator, DISABLED_ApiCallDurationTest) { +TEST_P(CallSimulator, ApiCallDurationTest) { // Run test and verify that it did not time out. EXPECT_TRUE(Run()); } diff --git a/modules/audio_processing/audio_processing_unittest.cc b/modules/audio_processing/audio_processing_unittest.cc index e320e71405..8e32d34add 100644 --- a/modules/audio_processing/audio_processing_unittest.cc +++ b/modules/audio_processing/audio_processing_unittest.cc @@ -7,7 +7,7 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/audio_processing/include/audio_processing.h" +#include "api/audio/audio_processing.h" #include #include @@ -22,7 +22,9 @@ #include "absl/flags/flag.h" #include "absl/strings/string_view.h" +#include "api/audio/builtin_audio_processing_builder.h" #include "api/audio/echo_detector_creator.h" +#include "api/environment/environment_factory.h" #include "api/make_ref_counted.h" #include "common_audio/include/audio_util.h" #include "common_audio/resampler/include/push_resampler.h" @@ -31,14 +33,12 @@ #include "modules/audio_processing/aec_dump/aec_dump_factory.h" #include "modules/audio_processing/audio_processing_impl.h" #include "modules/audio_processing/include/mock_audio_processing.h" -#include "modules/audio_processing/test/audio_processing_builder_for_testing.h" #include "modules/audio_processing/test/protobuf_utils.h" #include "modules/audio_processing/test/test_utils.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "rtc_base/fake_clock.h" #include "rtc_base/gtest_prod_util.h" -#include "rtc_base/ignore_wundef.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/protobuf_utils.h" @@ -51,14 +51,13 @@ #include "test/gtest.h" #include "test/testsupport/file_utils.h" -RTC_PUSH_IGNORING_WUNDEF() -#include "modules/audio_processing/debug.pb.h" #ifdef WEBRTC_ANDROID_PLATFORM_BUILD +#include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h" #include "external/webrtc/webrtc/modules/audio_processing/test/unittest.pb.h" #else +#include "modules/audio_processing/debug.pb.h" #include "modules/audio_processing/test/unittest.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() ABSL_FLAG(bool, write_apm_ref_data, @@ -69,6 +68,9 @@ ABSL_FLAG(bool, namespace webrtc { namespace { +using ::testing::_; +using ::testing::WithoutArgs; + // All sample rates used by APM internally during processing. Other input / // output rates are resampled to / from one of these. const int kProcessSampleRates[] = {16000, 32000, 48000}; @@ -114,56 +116,6 @@ void VerifyChannelsAreEqual(const int16_t* stereo, size_t samples_per_channel) { } } -void SetFrameTo(Int16FrameData* frame, int16_t value) { - for (size_t i = 0; i < frame->samples_per_channel * frame->num_channels; - ++i) { - frame->data[i] = value; - } -} - -void SetFrameTo(Int16FrameData* frame, int16_t left, int16_t right) { - ASSERT_EQ(2u, frame->num_channels); - for (size_t i = 0; i < frame->samples_per_channel * 2; i += 2) { - frame->data[i] = left; - frame->data[i + 1] = right; - } -} - -void ScaleFrame(Int16FrameData* frame, float scale) { - for (size_t i = 0; i < frame->samples_per_channel * frame->num_channels; - ++i) { - frame->data[i] = FloatS16ToS16(frame->data[i] * scale); - } -} - -bool FrameDataAreEqual(const Int16FrameData& frame1, - const Int16FrameData& frame2) { - if (frame1.samples_per_channel != frame2.samples_per_channel) { - return false; - } - if (frame1.num_channels != frame2.num_channels) { - return false; - } - if (memcmp( - frame1.data.data(), frame2.data.data(), - frame1.samples_per_channel * frame1.num_channels * sizeof(int16_t))) { - return false; - } - return true; -} - -rtc::ArrayView GetMutableFrameData(Int16FrameData* frame) { - int16_t* ptr = frame->data.data(); - const size_t len = frame->samples_per_channel * frame->num_channels; - return rtc::ArrayView(ptr, len); -} - -rtc::ArrayView GetFrameData(const Int16FrameData& frame) { - const int16_t* ptr = frame.data.data(); - const size_t len = frame.samples_per_channel * frame.num_channels; - return rtc::ArrayView(ptr, len); -} - void EnableAllAPComponents(AudioProcessing* ap) { AudioProcessing::Config apm_config = ap->GetConfig(); apm_config.echo_canceller.enabled = true; @@ -195,9 +147,8 @@ T AbsValue(T a) { } int16_t MaxAudioFrame(const Int16FrameData& frame) { - const size_t length = frame.samples_per_channel * frame.num_channels; int16_t max_data = AbsValue(frame.data[0]); - for (size_t i = 1; i < length; i++) { + for (size_t i = 1; i < frame.size(); i++) { max_data = std::max(max_data, AbsValue(frame.data[i])); } @@ -209,7 +160,7 @@ void OpenFileAndWriteMessage(absl::string_view filename, FILE* file = fopen(std::string(filename).c_str(), "wb"); ASSERT_TRUE(file != NULL); - int32_t size = rtc::checked_cast(msg.ByteSizeLong()); + int32_t size = checked_cast(msg.ByteSizeLong()); ASSERT_GT(size, 0); std::unique_ptr array(new uint8_t[size]); ASSERT_TRUE(msg.SerializeToArray(array.get(), size)); @@ -221,7 +172,7 @@ void OpenFileAndWriteMessage(absl::string_view filename, } std::string ResourceFilePath(absl::string_view name, int sample_rate_hz) { - rtc::StringBuilder ss; + StringBuilder ss; // Resource files are all stereo. ss << name << sample_rate_hz / 1000 << "_stereo"; return test::ResourcePath(ss.str(), "pcm"); @@ -242,7 +193,7 @@ std::string OutputFilePath(absl::string_view name, size_t num_reverse_input_channels, size_t num_reverse_output_channels, StreamDirection file_direction) { - rtc::StringBuilder ss; + StringBuilder ss; ss << name << "_i" << num_input_channels << "_" << input_rate / 1000 << "_ir" << num_reverse_input_channels << "_" << reverse_input_rate / 1000 << "_"; if (num_output_channels == 1) { @@ -383,8 +334,8 @@ void ExpectEventFieldsEq(const audioproc::Event& actual, // and contain the same data. If they differ and `kDumpWhenExpectMessageEqFails` // is true, checks the equality of a subset of `audioproc::Event` (nested) // fields. -bool ExpectMessageEq(rtc::ArrayView actual, - rtc::ArrayView expected) { +bool ExpectMessageEq(ArrayView actual, + ArrayView expected) { EXPECT_EQ(actual.size(), expected.size()); if (actual.size() != expected.size()) { return false; @@ -457,7 +408,7 @@ class ApmTest : public ::testing::Test { const std::string output_path_; const std::string ref_filename_; - rtc::scoped_refptr apm_; + scoped_refptr apm_; Int16FrameData frame_; Int16FrameData revframe_; std::unique_ptr> float_cb_; @@ -477,7 +428,7 @@ ApmTest::ApmTest() far_file_(NULL), near_file_(NULL), out_file_(NULL) { - apm_ = AudioProcessingBuilderForTesting().Create(); + apm_ = BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); AudioProcessing::Config apm_config = apm_->GetConfig(); apm_config.gain_controller1.analog_gain_controller.enabled = false; apm_config.pipeline.maximum_internal_processing_rate = 48000; @@ -509,11 +460,11 @@ void ApmTest::TearDown() { void ApmTest::Init(AudioProcessing* ap) { ASSERT_EQ( - kNoErr, - ap->Initialize({{{frame_.sample_rate_hz, frame_.num_channels}, + AudioProcessing::kNoError, + ap->Initialize({{{frame_.sample_rate_hz, frame_.num_channels()}, {output_sample_rate_hz_, num_output_channels_}, - {revframe_.sample_rate_hz, revframe_.num_channels}, - {revframe_.sample_rate_hz, revframe_.num_channels}}})); + {revframe_.sample_rate_hz, revframe_.num_channels()}, + {revframe_.sample_rate_hz, revframe_.num_channels()}}})); } void ApmTest::Init(int sample_rate_hz, @@ -567,7 +518,7 @@ bool ApmTest::ReadFrame(FILE* file, Int16FrameData* frame, ChannelBuffer* cb) { // The files always contain stereo audio. - size_t frame_size = frame->samples_per_channel * 2; + size_t frame_size = frame->samples_per_channel() * 2; size_t read_count = fread(frame->data.data(), sizeof(int16_t), frame_size, file); if (read_count != frame_size) { @@ -576,9 +527,9 @@ bool ApmTest::ReadFrame(FILE* file, return false; // This is expected. } - if (frame->num_channels == 1) { + if (frame->num_channels() == 1) { MixStereoToMono(frame->data.data(), frame->data.data(), - frame->samples_per_channel); + frame->samples_per_channel()); } if (cb) { @@ -593,8 +544,8 @@ bool ApmTest::ReadFrame(FILE* file, Int16FrameData* frame) { // If the end of the file has been reached, rewind it and attempt to read the // frame again. -void ApmTest::ReadFrameWithRewind(FILE* file, - Int16FrameData* frame, +void ApmTest::ReadFrameWithRewind(FILE* /* file */, + Int16FrameData* /* frame */, ChannelBuffer* cb) { if (!ReadFrame(near_file_, &frame_, cb)) { rewind(near_file_); @@ -610,13 +561,13 @@ int ApmTest::ProcessStreamChooser(Format format) { if (format == kIntFormat) { return apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data()); } return apm_->ProcessStream( float_cb_->channels(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), StreamConfig(output_sample_rate_hz_, num_output_channels_), float_cb_->channels()); } @@ -625,13 +576,13 @@ int ApmTest::AnalyzeReverseStreamChooser(Format format) { if (format == kIntFormat) { return apm_->ProcessReverseStream( revframe_.data.data(), - StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels), - StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels), + StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels()), + StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels()), revframe_.data.data()); } return apm_->AnalyzeReverseStream( revfloat_cb_->channels(), - StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels)); + StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels())); } void ApmTest::ProcessDelayVerificationTest(int delay_ms, @@ -645,9 +596,9 @@ void ApmTest::ProcessDelayVerificationTest(int delay_ms, bool causal = true; tmp_frame.CopyFrom(revframe_); - SetFrameTo(&tmp_frame, 0); + tmp_frame.FillData(0); - EXPECT_EQ(apm_->kNoError, apm_->Initialize()); + EXPECT_EQ(AudioProcessing::kNoError, apm_->Initialize()); // Initialize the `frame_queue` with empty frames. int frame_delay = delay_ms / 10; while (frame_delay < 0) { @@ -683,20 +634,22 @@ void ApmTest::ProcessDelayVerificationTest(int delay_ms, process_frame = &tmp_frame; process_frame->CopyFrom(*frame); } - EXPECT_EQ(apm_->kNoError, apm_->ProcessReverseStream( - reverse_frame->data.data(), - StreamConfig(reverse_frame->sample_rate_hz, - reverse_frame->num_channels), - StreamConfig(reverse_frame->sample_rate_hz, - reverse_frame->num_channels), - reverse_frame->data.data())); - EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(system_delay_ms)); - EXPECT_EQ(apm_->kNoError, + EXPECT_EQ( + AudioProcessing::kNoError, + apm_->ProcessReverseStream(reverse_frame->data.data(), + StreamConfig(reverse_frame->sample_rate_hz, + reverse_frame->num_channels()), + StreamConfig(reverse_frame->sample_rate_hz, + reverse_frame->num_channels()), + reverse_frame->data.data())); + EXPECT_EQ(AudioProcessing::kNoError, + apm_->set_stream_delay_ms(system_delay_ms)); + EXPECT_EQ(AudioProcessing::kNoError, apm_->ProcessStream(process_frame->data.data(), StreamConfig(process_frame->sample_rate_hz, - process_frame->num_channels), + process_frame->num_channels()), StreamConfig(process_frame->sample_rate_hz, - process_frame->num_channels), + process_frame->num_channels()), process_frame->data.data())); frame = frame_queue.front(); frame_queue.pop(); @@ -717,15 +670,15 @@ void ApmTest::ProcessDelayVerificationTest(int delay_ms, // Calculate expected delay estimate and acceptable regions. Further, // limit them w.r.t. AEC delay estimation support. const size_t samples_per_ms = - rtc::SafeMin(16u, frame_.samples_per_channel / 10); + SafeMin(16u, frame_.samples_per_channel() / 10); const int expected_median = - rtc::SafeClamp(delay_ms - system_delay_ms, delay_min, delay_max); - const int expected_median_high = rtc::SafeClamp( - expected_median + rtc::dchecked_cast(96 / samples_per_ms), delay_min, - delay_max); - const int expected_median_low = rtc::SafeClamp( - expected_median - rtc::dchecked_cast(96 / samples_per_ms), delay_min, - delay_max); + SafeClamp(delay_ms - system_delay_ms, delay_min, delay_max); + const int expected_median_high = + SafeClamp(expected_median + dchecked_cast(96 / samples_per_ms), + delay_min, delay_max); + const int expected_median_low = + SafeClamp(expected_median - dchecked_cast(96 / samples_per_ms), + delay_min, delay_max); // Verify delay metrics. AudioProcessingStats stats = apm_->GetStatistics(); ASSERT_TRUE(stats.delay_median_ms.has_value()); @@ -736,7 +689,7 @@ void ApmTest::ProcessDelayVerificationTest(int delay_ms, void ApmTest::StreamParametersTest(Format format) { // No errors when the components are disabled. - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(format)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(format)); // -- Missing AGC level -- AudioProcessing::Config apm_config = apm_->GetConfig(); @@ -746,43 +699,43 @@ void ApmTest::StreamParametersTest(Format format) { // Resets after successful ProcessStream(). apm_->set_stream_analog_level(127); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(format)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(format)); EXPECT_EQ(apm_->kStreamParameterNotSetError, ProcessStreamChooser(format)); // Other stream parameters set correctly. apm_config.echo_canceller.enabled = true; apm_config.echo_canceller.mobile_mode = false; apm_->ApplyConfig(apm_config); - EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(100)); + EXPECT_EQ(AudioProcessing::kNoError, apm_->set_stream_delay_ms(100)); EXPECT_EQ(apm_->kStreamParameterNotSetError, ProcessStreamChooser(format)); apm_config.gain_controller1.enabled = false; apm_->ApplyConfig(apm_config); // -- Missing delay -- - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(format)); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(format)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(format)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(format)); // Resets after successful ProcessStream(). - EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(100)); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(format)); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(format)); + EXPECT_EQ(AudioProcessing::kNoError, apm_->set_stream_delay_ms(100)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(format)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(format)); // Other stream parameters set correctly. apm_config.gain_controller1.enabled = true; apm_->ApplyConfig(apm_config); apm_->set_stream_analog_level(127); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(format)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(format)); apm_config.gain_controller1.enabled = false; apm_->ApplyConfig(apm_config); // -- No stream parameters -- - EXPECT_EQ(apm_->kNoError, AnalyzeReverseStreamChooser(format)); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(format)); + EXPECT_EQ(AudioProcessing::kNoError, AnalyzeReverseStreamChooser(format)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(format)); // -- All there -- - EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(100)); + EXPECT_EQ(AudioProcessing::kNoError, apm_->set_stream_delay_ms(100)); apm_->set_stream_analog_level(127); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(format)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(format)); } TEST_F(ApmTest, StreamParametersInt) { @@ -796,19 +749,19 @@ TEST_F(ApmTest, StreamParametersFloat) { void ApmTest::TestChangingChannelsInt16Interface( size_t num_channels, AudioProcessing::Error expected_return) { - frame_.num_channels = num_channels; + frame_.set_num_channels(num_channels); EXPECT_EQ(expected_return, apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); EXPECT_EQ(expected_return, apm_->ProcessReverseStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); } @@ -847,7 +800,7 @@ TEST_F(ApmTest, ChannelsInt16Interface) { TestChangingChannelsInt16Interface(0, apm_->kBadNumberChannelsError); for (size_t i = 1; i < 4; i++) { - TestChangingChannelsInt16Interface(i, kNoErr); + TestChangingChannelsInt16Interface(i, AudioProcessing::kNoError); EXPECT_EQ(i, apm_->num_input_channels()); } } @@ -863,8 +816,8 @@ TEST_F(ApmTest, Channels) { for (size_t j = 0; j < 1; ++j) { // Output channels much be one or match input channels. if (j == 1 || i == j) { - TestChangingForwardChannels(i, j, kNoErr); - TestChangingReverseChannels(i, kNoErr); + TestChangingForwardChannels(i, j, AudioProcessing::kNoError); + TestChangingReverseChannels(i, AudioProcessing::kNoError); EXPECT_EQ(i, apm_->num_input_channels()); EXPECT_EQ(j, apm_->num_output_channels()); @@ -890,10 +843,10 @@ TEST_F(ApmTest, SampleRatesInt) { // number of frames, and checks that output signal has the right level. TEST_F(ApmTest, PreAmplifier) { // Fill the audio frame with a sawtooth pattern. - rtc::ArrayView frame_data = GetMutableFrameData(&frame_); - const size_t samples_per_channel = frame_.samples_per_channel; + InterleavedView frame_data = frame_.view(); + const size_t samples_per_channel = frame_.samples_per_channel(); for (size_t i = 0; i < samples_per_channel; i++) { - for (size_t ch = 0; ch < frame_.num_channels; ++ch) { + for (size_t ch = 0; ch < frame_.num_channels(); ++ch) { frame_data[i + ch * samples_per_channel] = 10000 * ((i % 3) - 1); } } @@ -902,7 +855,7 @@ TEST_F(ApmTest, PreAmplifier) { tmp_frame.CopyFrom(frame_); auto compute_power = [](const Int16FrameData& frame) { - rtc::ArrayView data = GetFrameData(frame); + ArrayView data = frame.view().data(); return std::accumulate(data.begin(), data.end(), 0.0f, [](float a, float b) { return a + b * b; }) / data.size() / 32768 / 32768; @@ -921,7 +874,7 @@ TEST_F(ApmTest, PreAmplifier) { for (int i = 0; i < 20; ++i) { frame_.CopyFrom(tmp_frame); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(kIntFormat)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(kIntFormat)); } float output_power = compute_power(frame_); EXPECT_NEAR(output_power, input_power, kEpsilon); @@ -934,7 +887,7 @@ TEST_F(ApmTest, PreAmplifier) { for (int i = 0; i < 20; ++i) { frame_.CopyFrom(tmp_frame); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(kIntFormat)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(kIntFormat)); } output_power = compute_power(frame_); EXPECT_NEAR(output_power, 4 * input_power, kEpsilon); @@ -947,7 +900,7 @@ TEST_F(ApmTest, PreAmplifier) { for (int i = 0; i < 20; ++i) { frame_.CopyFrom(tmp_frame); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(kIntFormat)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(kIntFormat)); } output_power = compute_power(frame_); EXPECT_NEAR(output_power, 2.25 * input_power, kEpsilon); @@ -959,10 +912,10 @@ TEST_F(ApmTest, PreAmplifier) { // crashing. TEST_F(ApmTest, AnalogMicGainEmulation) { // Fill the audio frame with a sawtooth pattern. - rtc::ArrayView frame_data = GetMutableFrameData(&frame_); - const size_t samples_per_channel = frame_.samples_per_channel; + InterleavedView frame_data = frame_.view(); + const size_t samples_per_channel = frame_.samples_per_channel(); for (size_t i = 0; i < samples_per_channel; i++) { - for (size_t ch = 0; ch < frame_.num_channels; ++ch) { + for (size_t ch = 0; ch < frame_.num_channels(); ++ch) { frame_data[i + ch * samples_per_channel] = 100 * ((i % 3) - 1); } } @@ -984,7 +937,7 @@ TEST_F(ApmTest, AnalogMicGainEmulation) { // Process a number of frames to ensure that the code runs without crashes. for (int i = 0; i < 20; ++i) { frame_.CopyFrom(tmp_frame); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(kIntFormat)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(kIntFormat)); } } @@ -993,10 +946,10 @@ TEST_F(ApmTest, AnalogMicGainEmulation) { // right level. TEST_F(ApmTest, CaptureLevelAdjustment) { // Fill the audio frame with a sawtooth pattern. - rtc::ArrayView frame_data = GetMutableFrameData(&frame_); - const size_t samples_per_channel = frame_.samples_per_channel; + InterleavedView frame_data = frame_.view(); + const size_t samples_per_channel = frame_.samples_per_channel(); for (size_t i = 0; i < samples_per_channel; i++) { - for (size_t ch = 0; ch < frame_.num_channels; ++ch) { + for (size_t ch = 0; ch < frame_.num_channels(); ++ch) { frame_data[i + ch * samples_per_channel] = 100 * ((i % 3) - 1); } } @@ -1005,7 +958,7 @@ TEST_F(ApmTest, CaptureLevelAdjustment) { tmp_frame.CopyFrom(frame_); auto compute_power = [](const Int16FrameData& frame) { - rtc::ArrayView data = GetFrameData(frame); + ArrayView data = frame.view().data(); return std::accumulate(data.begin(), data.end(), 0.0f, [](float a, float b) { return a + b * b; }) / data.size() / 32768 / 32768; @@ -1030,7 +983,7 @@ TEST_F(ApmTest, CaptureLevelAdjustment) { for (int i = 0; i < 20; ++i) { frame_.CopyFrom(tmp_frame); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(kIntFormat)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(kIntFormat)); } float output_power = compute_power(frame_); EXPECT_NEAR(output_power, expected_output_power1, kEpsilon); @@ -1050,7 +1003,7 @@ TEST_F(ApmTest, CaptureLevelAdjustment) { for (int i = 0; i < 20; ++i) { frame_.CopyFrom(tmp_frame); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(kIntFormat)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(kIntFormat)); } output_power = compute_power(frame_); EXPECT_NEAR(output_power, expected_output_power2, kEpsilon); @@ -1071,7 +1024,7 @@ TEST_F(ApmTest, CaptureLevelAdjustment) { for (int i = 0; i < 20; ++i) { frame_.CopyFrom(tmp_frame); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(kIntFormat)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(kIntFormat)); } output_power = compute_power(frame_); EXPECT_NEAR(output_power, expected_output_power3, kEpsilon); @@ -1095,7 +1048,7 @@ TEST_F(ApmTest, GainControl) { config.gain_controller1.mode = mode; apm_->ApplyConfig(config); apm_->set_stream_analog_level(100); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(kFloatFormat)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(kFloatFormat)); } // Testing target levels @@ -1103,7 +1056,7 @@ TEST_F(ApmTest, GainControl) { config.gain_controller1.target_level_dbfs = target_level_dbfs; apm_->ApplyConfig(config); apm_->set_stream_analog_level(100); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(kFloatFormat)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(kFloatFormat)); } // Testing compression gains @@ -1111,7 +1064,7 @@ TEST_F(ApmTest, GainControl) { config.gain_controller1.compression_gain_db = compression_gain_db; apm_->ApplyConfig(config); apm_->set_stream_analog_level(100); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(kFloatFormat)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(kFloatFormat)); } // Testing limiter off/on @@ -1119,18 +1072,18 @@ TEST_F(ApmTest, GainControl) { config.gain_controller1.enable_limiter = enable; apm_->ApplyConfig(config); apm_->set_stream_analog_level(100); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(kFloatFormat)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(kFloatFormat)); } // Testing level limits. constexpr int kMinLevel = 0; constexpr int kMaxLevel = 255; apm_->set_stream_analog_level(kMinLevel); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(kFloatFormat)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(kFloatFormat)); apm_->set_stream_analog_level((kMinLevel + kMaxLevel) / 2); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(kFloatFormat)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(kFloatFormat)); apm_->set_stream_analog_level(kMaxLevel); - EXPECT_EQ(apm_->kNoError, ProcessStreamChooser(kFloatFormat)); + EXPECT_EQ(AudioProcessing::kNoError, ProcessStreamChooser(kFloatFormat)); } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) @@ -1191,15 +1144,15 @@ void ApmTest::RunQuantizedVolumeDoesNotGetStuckTest(int sample_rate) { for (int i = 0; i < 2000; ++i) { ReadFrameWithRewind(near_file_, &frame_); // Ensure the audio is at a low level, so the AGC will try to increase it. - ScaleFrame(&frame_, 0.25); + frame_.Scale(0.25f); // Always pass in the same volume. apm_->set_stream_analog_level(100); - EXPECT_EQ(apm_->kNoError, + EXPECT_EQ(AudioProcessing::kNoError, apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); out_analog_level = apm_->recommended_stream_analog_level(); } @@ -1229,14 +1182,14 @@ void ApmTest::RunManualVolumeChangeIsPossibleTest(int sample_rate) { for (int i = 0; i < 1000; ++i) { ReadFrameWithRewind(near_file_, &frame_); // Ensure the audio is at a low level, so the AGC will try to increase it. - ScaleFrame(&frame_, 0.25); + frame_.Scale(0.25f); apm_->set_stream_analog_level(out_analog_level); - EXPECT_EQ(apm_->kNoError, + EXPECT_EQ(AudioProcessing::kNoError, apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); out_analog_level = apm_->recommended_stream_analog_level(); } @@ -1249,14 +1202,14 @@ void ApmTest::RunManualVolumeChangeIsPossibleTest(int sample_rate) { for (int i = 0; i < 300; ++i) { ReadFrameWithRewind(near_file_, &frame_); - ScaleFrame(&frame_, 0.25); + frame_.Scale(0.25f); apm_->set_stream_analog_level(out_analog_level); - EXPECT_EQ(apm_->kNoError, + EXPECT_EQ(AudioProcessing::kNoError, apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); out_analog_level = apm_->recommended_stream_analog_level(); // Check that AGC respected the manually adjusted volume. @@ -1302,24 +1255,24 @@ TEST_F(ApmTest, NoProcessingWhenAllComponentsDisabledInt) { for (size_t sample_rate_hz : kSampleRatesHz) { SCOPED_TRACE(::testing::Message() << "sample_rate_hz=" << sample_rate_hz); Init(sample_rate_hz, sample_rate_hz, sample_rate_hz, 2, 2, 2, false); - SetFrameTo(&frame_, 1000, 2000); + frame_.FillStereoData(1000, 2000); Int16FrameData frame_copy; frame_copy.CopyFrom(frame_); for (int j = 0; j < 1000; j++) { - EXPECT_EQ(apm_->kNoError, + EXPECT_EQ(AudioProcessing::kNoError, apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); - EXPECT_TRUE(FrameDataAreEqual(frame_, frame_copy)); - EXPECT_EQ(apm_->kNoError, + EXPECT_TRUE(frame_.IsEqual(frame_copy)); + EXPECT_EQ(AudioProcessing::kNoError, apm_->ProcessReverseStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); - EXPECT_TRUE(FrameDataAreEqual(frame_, frame_copy)); + EXPECT_TRUE(frame_.IsEqual(frame_copy)); } } } @@ -1335,7 +1288,7 @@ TEST_F(ApmTest, NoProcessingWhenAllComponentsDisabledFloat) { auto src_channels = &src[0]; auto dest_channels = &dest[0]; - apm_ = AudioProcessingBuilderForTesting().Create(); + apm_ = BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); EXPECT_NOERR(apm_->ProcessStream(&src_channels, StreamConfig(sample_rate, 1), StreamConfig(sample_rate, 1), &dest_channels)); @@ -1369,29 +1322,29 @@ TEST_F(ApmTest, IdenticalInputChannelsResultInIdenticalOutputChannels) { ASSERT_EQ(0, feof(near_file_)); while (ReadFrame(far_file_, &revframe_) && ReadFrame(near_file_, &frame_)) { CopyLeftToRightChannel(revframe_.data.data(), - revframe_.samples_per_channel); + revframe_.samples_per_channel()); ASSERT_EQ( - kNoErr, + AudioProcessing::kNoError, apm_->ProcessReverseStream( revframe_.data.data(), - StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels), - StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels), + StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels()), + StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels()), revframe_.data.data())); - CopyLeftToRightChannel(frame_.data.data(), frame_.samples_per_channel); + CopyLeftToRightChannel(frame_.data.data(), frame_.samples_per_channel()); - ASSERT_EQ(kNoErr, apm_->set_stream_delay_ms(0)); + ASSERT_EQ(AudioProcessing::kNoError, apm_->set_stream_delay_ms(0)); apm_->set_stream_analog_level(analog_level); - ASSERT_EQ(kNoErr, + ASSERT_EQ(AudioProcessing::kNoError, apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); analog_level = apm_->recommended_stream_analog_level(); - VerifyChannelsAreEqual(frame_.data.data(), frame_.samples_per_channel); + VerifyChannelsAreEqual(frame_.data.data(), frame_.samples_per_channel()); } rewind(far_file_); rewind(near_file_); @@ -1401,41 +1354,41 @@ TEST_F(ApmTest, IdenticalInputChannelsResultInIdenticalOutputChannels) { TEST_F(ApmTest, SplittingFilter) { // Verify the filter is not active through undistorted audio when: // 1. No components are enabled... - SetFrameTo(&frame_, 1000); + frame_.FillData(1000); Int16FrameData frame_copy; frame_copy.CopyFrom(frame_); - EXPECT_EQ(apm_->kNoError, + EXPECT_EQ(AudioProcessing::kNoError, apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); - EXPECT_EQ(apm_->kNoError, + EXPECT_EQ(AudioProcessing::kNoError, apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); - EXPECT_TRUE(FrameDataAreEqual(frame_, frame_copy)); + EXPECT_TRUE(frame_.IsEqual(frame_copy)); // 2. Only the level estimator is enabled... auto apm_config = apm_->GetConfig(); - SetFrameTo(&frame_, 1000); + frame_.FillData(1000); frame_copy.CopyFrom(frame_); apm_->ApplyConfig(apm_config); - EXPECT_EQ(apm_->kNoError, + EXPECT_EQ(AudioProcessing::kNoError, apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); - EXPECT_EQ(apm_->kNoError, + EXPECT_EQ(AudioProcessing::kNoError, apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); - EXPECT_TRUE(FrameDataAreEqual(frame_, frame_copy)); + EXPECT_TRUE(frame_.IsEqual(frame_copy)); apm_->ApplyConfig(apm_config); // Check the test is valid. We should have distortion from the filter @@ -1443,19 +1396,17 @@ TEST_F(ApmTest, SplittingFilter) { apm_config.echo_canceller.enabled = true; apm_config.echo_canceller.mobile_mode = false; apm_->ApplyConfig(apm_config); - frame_.samples_per_channel = 320; - frame_.num_channels = 2; - frame_.sample_rate_hz = 32000; - SetFrameTo(&frame_, 1000); + frame_.SetProperties(/* samples_per_channel=*/320, /* num_channels=*/2); + frame_.FillData(1000); frame_copy.CopyFrom(frame_); - EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(0)); - EXPECT_EQ(apm_->kNoError, + EXPECT_EQ(AudioProcessing::kNoError, apm_->set_stream_delay_ms(0)); + EXPECT_EQ(AudioProcessing::kNoError, apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); - EXPECT_FALSE(FrameDataAreEqual(frame_, frame_copy)); + EXPECT_FALSE(frame_.IsEqual(frame_copy)); } #ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP @@ -1487,8 +1438,8 @@ void ApmTest::ProcessDebugDump(absl::string_view in_filename, if (first_init) { // AttachAecDump() writes an additional init message. Don't start // recording until after the first init to avoid the extra message. - auto aec_dump = - AecDumpFactory::Create(out_filename, max_size_bytes, &worker_queue); + auto aec_dump = AecDumpFactory::Create(out_filename, max_size_bytes, + worker_queue.Get()); EXPECT_TRUE(aec_dump); apm_->AttachAecDump(std::move(aec_dump)); first_init = false; @@ -1498,7 +1449,7 @@ void ApmTest::ProcessDebugDump(absl::string_view in_filename, const audioproc::ReverseStream msg = event_msg.reverse_stream(); if (msg.channel_size() > 0) { - ASSERT_EQ(revframe_.num_channels, + ASSERT_EQ(revframe_.num_channels(), static_cast(msg.channel_size())); for (int i = 0; i < msg.channel_size(); ++i) { memcpy(revfloat_cb_->channels()[i], msg.channel(i).data(), @@ -1516,7 +1467,7 @@ void ApmTest::ProcessDebugDump(absl::string_view in_filename, } else if (event_msg.type() == audioproc::Event::STREAM) { const audioproc::Stream msg = event_msg.stream(); // ProcessStream could have changed this for the output frame. - frame_.num_channels = apm_->num_input_channels(); + frame_.set_num_channels(apm_->num_input_channels()); apm_->set_stream_analog_level(msg.applied_input_volume()); EXPECT_NOERR(apm_->set_stream_delay_ms(msg.delay())); @@ -1527,7 +1478,7 @@ void ApmTest::ProcessDebugDump(absl::string_view in_filename, } if (msg.input_channel_size() > 0) { - ASSERT_EQ(frame_.num_channels, + ASSERT_EQ(frame_.num_channels(), static_cast(msg.input_channel_size())); for (int i = 0; i < msg.input_channel_size(); ++i) { memcpy(float_cb_->channels()[i], msg.input_channel(i).data(), @@ -1549,7 +1500,7 @@ void ApmTest::ProcessDebugDump(absl::string_view in_filename, } void ApmTest::VerifyDebugDumpTest(Format format) { - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; const std::string in_filename = test::ResourcePath("ref03", "aecdump"); std::string format_string; switch (format) { @@ -1634,7 +1585,7 @@ TEST_F(ApmTest, DebugDump) { const std::string filename = test::TempFilename(test::OutputPath(), "debug_aec"); { - auto aec_dump = AecDumpFactory::Create("", -1, &worker_queue); + auto aec_dump = AecDumpFactory::Create("", -1, worker_queue.Get()); EXPECT_FALSE(aec_dump); } @@ -1642,21 +1593,22 @@ TEST_F(ApmTest, DebugDump) { // Stopping without having started should be OK. apm_->DetachAecDump(); - auto aec_dump = AecDumpFactory::Create(filename, -1, &worker_queue); + auto aec_dump = AecDumpFactory::Create(filename, -1, worker_queue.Get()); EXPECT_TRUE(aec_dump); apm_->AttachAecDump(std::move(aec_dump)); - EXPECT_EQ(apm_->kNoError, + EXPECT_EQ(AudioProcessing::kNoError, apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); - EXPECT_EQ(apm_->kNoError, - apm_->ProcessReverseStream( - revframe_.data.data(), - StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels), - StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels), - revframe_.data.data())); + EXPECT_EQ( + AudioProcessing::kNoError, + apm_->ProcessReverseStream( + revframe_.data.data(), + StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels()), + StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels()), + revframe_.data.data())); apm_->DetachAecDump(); // Verify the file has been written. @@ -1685,20 +1637,21 @@ TEST_F(ApmTest, DebugDumpFromFileHandle) { // Stopping without having started should be OK. apm_->DetachAecDump(); - auto aec_dump = AecDumpFactory::Create(std::move(f), -1, &worker_queue); + auto aec_dump = AecDumpFactory::Create(std::move(f), -1, worker_queue.Get()); EXPECT_TRUE(aec_dump); apm_->AttachAecDump(std::move(aec_dump)); - EXPECT_EQ(apm_->kNoError, - apm_->ProcessReverseStream( - revframe_.data.data(), - StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels), - StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels), - revframe_.data.data())); - EXPECT_EQ(apm_->kNoError, + EXPECT_EQ( + AudioProcessing::kNoError, + apm_->ProcessReverseStream( + revframe_.data.data(), + StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels()), + StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels()), + revframe_.data.data())); + EXPECT_EQ(AudioProcessing::kNoError, apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); apm_->DetachAecDump(); @@ -1756,9 +1709,9 @@ TEST_F(ApmTest, Process) { if (test->num_input_channels() != test->num_output_channels()) continue; - apm_ = AudioProcessingBuilderForTesting() + apm_ = BuiltinAudioProcessingBuilder() .SetEchoDetector(CreateEchoDetector()) - .Create(); + .Build(CreateEnvironment()); AudioProcessing::Config apm_config = apm_->GetConfig(); apm_config.gain_controller1.analog_gain_controller.enabled = false; apm_->ApplyConfig(apm_config); @@ -1780,26 +1733,26 @@ TEST_F(ApmTest, Process) { while (ReadFrame(far_file_, &revframe_) && ReadFrame(near_file_, &frame_)) { EXPECT_EQ( - apm_->kNoError, + AudioProcessing::kNoError, apm_->ProcessReverseStream( revframe_.data.data(), - StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels), - StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels), + StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels()), + StreamConfig(revframe_.sample_rate_hz, revframe_.num_channels()), revframe_.data.data())); - EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(0)); + EXPECT_EQ(AudioProcessing::kNoError, apm_->set_stream_delay_ms(0)); apm_->set_stream_analog_level(analog_level); - EXPECT_EQ(apm_->kNoError, + EXPECT_EQ(AudioProcessing::kNoError, apm_->ProcessStream( frame_.data.data(), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), - StreamConfig(frame_.sample_rate_hz, frame_.num_channels), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), + StreamConfig(frame_.sample_rate_hz, frame_.num_channels()), frame_.data.data())); // Ensure the frame was downmixed properly. EXPECT_EQ(static_cast(test->num_output_channels()), - frame_.num_channels); + frame_.num_channels()); max_output_average += MaxAudioFrame(frame_); @@ -1807,13 +1760,12 @@ TEST_F(ApmTest, Process) { analog_level_average += analog_level; AudioProcessingStats stats = apm_->GetStatistics(); - size_t frame_size = frame_.samples_per_channel * frame_.num_channels; size_t write_count = - fwrite(frame_.data.data(), sizeof(int16_t), frame_size, out_file_); - ASSERT_EQ(frame_size, write_count); + fwrite(frame_.data.data(), sizeof(int16_t), frame_.size(), out_file_); + ASSERT_EQ(frame_.size(), write_count); // Reset in case of downmixing. - frame_.num_channels = static_cast(test->num_input_channels()); + frame_.set_num_channels(static_cast(test->num_input_channels())); frame_count++; #if defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE) @@ -1871,7 +1823,7 @@ TEST_F(ApmTest, Process) { const int kMaxOutputAverageNear = 26; #else const int kMaxOutputAverageOffset = 0; - const int kMaxOutputAverageNear = kIntNear; + const int kMaxOutputAverageNear = 7; #endif EXPECT_NEAR(test->analog_level_average(), analog_level_average, kIntNear); EXPECT_NEAR(test->max_output_average(), @@ -1995,8 +1947,9 @@ class AudioProcessingTest absl::string_view output_file_prefix) { AudioProcessing::Config apm_config; apm_config.gain_controller1.analog_gain_controller.enabled = false; - rtc::scoped_refptr ap = - AudioProcessingBuilderForTesting().SetConfig(apm_config).Create(); + scoped_refptr ap = BuiltinAudioProcessingBuilder() + .SetConfig(apm_config) + .Build(CreateEnvironment()); EnableAllAPComponents(ap.get()); @@ -2062,16 +2015,25 @@ class AudioProcessingTest StreamConfig(output_rate, num_output_channels), out_cb.channels())); // Dump forward output to file. - Interleave(out_cb.channels(), out_cb.num_frames(), out_cb.num_channels(), - float_data.get()); + RTC_DCHECK_EQ(out_cb.num_bands(), 1u); // Assumes full frequency band. + DeinterleavedView deinterleaved_src( + out_cb.channels()[0], out_cb.num_frames(), out_cb.num_channels()); + InterleavedView interleaved_dst( + float_data.get(), out_cb.num_frames(), out_cb.num_channels()); + Interleave(deinterleaved_src, interleaved_dst); size_t out_length = out_cb.num_channels() * out_cb.num_frames(); ASSERT_EQ(out_length, fwrite(float_data.get(), sizeof(float_data[0]), out_length, out_file)); // Dump reverse output to file. - Interleave(rev_out_cb.channels(), rev_out_cb.num_frames(), - rev_out_cb.num_channels(), float_data.get()); + RTC_DCHECK_EQ(rev_out_cb.num_bands(), 1u); + deinterleaved_src = DeinterleavedView( + rev_out_cb.channels()[0], rev_out_cb.num_frames(), + rev_out_cb.num_channels()); + interleaved_dst = InterleavedView( + float_data.get(), rev_out_cb.num_frames(), rev_out_cb.num_channels()); + Interleave(deinterleaved_src, interleaved_dst); size_t rev_out_length = rev_out_cb.num_channels() * rev_out_cb.num_frames(); @@ -2152,10 +2114,12 @@ TEST_P(AudioProcessingTest, Formats) { ASSERT_TRUE(out_file != NULL); ASSERT_TRUE(ref_file != NULL); - const size_t ref_length = - AudioProcessing::GetFrameSize(ref_rate) * out_num; - const size_t out_length = - AudioProcessing::GetFrameSize(out_rate) * out_num; + const size_t ref_samples_per_channel = + AudioProcessing::GetFrameSize(ref_rate); + const size_t ref_length = ref_samples_per_channel * out_num; + const size_t out_samples_per_channel = + AudioProcessing::GetFrameSize(out_rate); + const size_t out_length = out_samples_per_channel * out_num; // Data from the reference file. std::unique_ptr ref_data(new float[ref_length]); // Data from the output file. @@ -2164,8 +2128,8 @@ TEST_P(AudioProcessingTest, Formats) { // don't match. std::unique_ptr cmp_data(new float[ref_length]); - PushResampler resampler; - resampler.InitializeIfNeeded(out_rate, ref_rate, out_num); + PushResampler resampler(out_samples_per_channel, + ref_samples_per_channel, out_num); // Compute the resampling delay of the output relative to the reference, // to find the region over which we should search for the best SNR. @@ -2198,9 +2162,12 @@ TEST_P(AudioProcessingTest, Formats) { if (out_rate != ref_rate) { // Resample the output back to its internal processing rate if // necessary. + InterleavedView src(out_ptr, out_samples_per_channel, + out_num); + InterleavedView dst(cmp_data.get(), ref_samples_per_channel, + out_num); ASSERT_EQ(ref_length, - static_cast(resampler.Resample( - out_ptr, out_length, cmp_data.get(), ref_length))); + static_cast(resampler.Resample(src, dst))); out_ptr = cmp_data.get(); } @@ -2374,7 +2341,7 @@ std::string ProduceDebugText(int render_input_sample_rate_hz, size_t render_output_num_channels, size_t capture_input_num_channels, size_t capture_output_num_channels) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Sample rates:" "\n Render input: " << render_input_sample_rate_hz @@ -2399,16 +2366,15 @@ std::string ProduceDebugText(int render_input_sample_rate_hz, // Validates that running the audio processing module using various combinations // of sample rates and number of channels works as intended. -void RunApmRateAndChannelTest( - rtc::ArrayView sample_rates_hz, - rtc::ArrayView render_channel_counts, - rtc::ArrayView capture_channel_counts) { +void RunApmRateAndChannelTest(ArrayView sample_rates_hz, + ArrayView render_channel_counts, + ArrayView capture_channel_counts) { webrtc::AudioProcessing::Config apm_config; apm_config.pipeline.multi_channel_render = true; apm_config.pipeline.multi_channel_capture = true; apm_config.echo_canceller.enabled = true; - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting().SetConfig(apm_config).Create(); + scoped_refptr apm = + BuiltinAudioProcessingBuilder(apm_config).Build(CreateEnvironment()); StreamConfig render_input_stream_config; StreamConfig render_output_stream_config; @@ -2496,8 +2462,6 @@ constexpr void Toggle(bool& b) { b ^= true; } -} // namespace - TEST(RuntimeSettingTest, TestDefaultCtor) { auto s = AudioProcessing::RuntimeSetting(); EXPECT_EQ(AudioProcessing::RuntimeSetting::Type::kNotSpecified, s.type()); @@ -2517,19 +2481,20 @@ TEST(ApmConfiguration, EnablePostProcessing) { new ::testing::NiceMock(); auto mock_post_processor = std::unique_ptr(mock_post_processor_ptr); - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetCapturePostProcessing(std::move(mock_post_processor)) - .Create(); + .Build(CreateEnvironment()); Int16FrameData audio; - audio.num_channels = 1; - SetFrameSampleRate(&audio, AudioProcessing::NativeRate::kSampleRate16kHz); + audio.SetProperties(AudioProcessing::GetFrameSize( + AudioProcessing::NativeRate::kSampleRate16kHz), + /* num_channels=*/1); EXPECT_CALL(*mock_post_processor_ptr, Process(::testing::_)).Times(1); apm->ProcessStream(audio.data.data(), - StreamConfig(audio.sample_rate_hz, audio.num_channels), - StreamConfig(audio.sample_rate_hz, audio.num_channels), + StreamConfig(audio.sample_rate_hz, audio.num_channels()), + StreamConfig(audio.sample_rate_hz, audio.num_channels()), audio.data.data()); } @@ -2539,19 +2504,21 @@ TEST(ApmConfiguration, EnablePreProcessing) { new ::testing::NiceMock(); auto mock_pre_processor = std::unique_ptr(mock_pre_processor_ptr); - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetRenderPreProcessing(std::move(mock_pre_processor)) - .Create(); + .Build(CreateEnvironment()); Int16FrameData audio; - audio.num_channels = 1; - SetFrameSampleRate(&audio, AudioProcessing::NativeRate::kSampleRate16kHz); + audio.SetProperties(AudioProcessing::GetFrameSize( + AudioProcessing::NativeRate::kSampleRate16kHz), + /* num_channels=*/1); EXPECT_CALL(*mock_pre_processor_ptr, Process(::testing::_)).Times(1); apm->ProcessReverseStream( - audio.data.data(), StreamConfig(audio.sample_rate_hz, audio.num_channels), - StreamConfig(audio.sample_rate_hz, audio.num_channels), + audio.data.data(), + StreamConfig(audio.sample_rate_hz, audio.num_channels()), + StreamConfig(audio.sample_rate_hz, audio.num_channels()), audio.data.data()); } @@ -2561,19 +2528,20 @@ TEST(ApmConfiguration, EnableCaptureAnalyzer) { new ::testing::NiceMock(); auto mock_capture_analyzer = std::unique_ptr(mock_capture_analyzer_ptr); - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetCaptureAnalyzer(std::move(mock_capture_analyzer)) - .Create(); + .Build(CreateEnvironment()); Int16FrameData audio; - audio.num_channels = 1; - SetFrameSampleRate(&audio, AudioProcessing::NativeRate::kSampleRate16kHz); + audio.SetProperties(AudioProcessing::GetFrameSize( + AudioProcessing::NativeRate::kSampleRate16kHz), + /* num_channels=*/1); EXPECT_CALL(*mock_capture_analyzer_ptr, Analyze(::testing::_)).Times(1); apm->ProcessStream(audio.data.data(), - StreamConfig(audio.sample_rate_hz, audio.num_channels), - StreamConfig(audio.sample_rate_hz, audio.num_channels), + StreamConfig(audio.sample_rate_hz, audio.num_channels()), + StreamConfig(audio.sample_rate_hz, audio.num_channels()), audio.data.data()); } @@ -2582,99 +2550,100 @@ TEST(ApmConfiguration, PreProcessingReceivesRuntimeSettings) { new ::testing::NiceMock(); auto mock_pre_processor = std::unique_ptr(mock_pre_processor_ptr); - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetRenderPreProcessing(std::move(mock_pre_processor)) - .Create(); + .Build(CreateEnvironment()); apm->SetRuntimeSetting( AudioProcessing::RuntimeSetting::CreateCustomRenderSetting(0)); // RuntimeSettings forwarded during 'Process*Stream' calls. // Therefore we have to make one such call. Int16FrameData audio; - audio.num_channels = 1; - SetFrameSampleRate(&audio, AudioProcessing::NativeRate::kSampleRate16kHz); + audio.SetProperties(AudioProcessing::GetFrameSize( + AudioProcessing::NativeRate::kSampleRate16kHz), + /* num_channels=*/1); EXPECT_CALL(*mock_pre_processor_ptr, SetRuntimeSetting(::testing::_)) .Times(1); apm->ProcessReverseStream( - audio.data.data(), StreamConfig(audio.sample_rate_hz, audio.num_channels), - StreamConfig(audio.sample_rate_hz, audio.num_channels), + audio.data.data(), + StreamConfig(audio.sample_rate_hz, audio.num_channels()), + StreamConfig(audio.sample_rate_hz, audio.num_channels()), audio.data.data()); } -class MyEchoControlFactory : public EchoControlFactory { +class MockEchoControlFactory : public EchoControlFactory { public: - std::unique_ptr Create(int sample_rate_hz) { - auto ec = new test::MockEchoControl(); - EXPECT_CALL(*ec, AnalyzeRender(::testing::_)).Times(1); - EXPECT_CALL(*ec, AnalyzeCapture(::testing::_)).Times(2); - EXPECT_CALL(*ec, ProcessCapture(::testing::_, ::testing::_, ::testing::_)) - .Times(2); - return std::unique_ptr(ec); - } - - std::unique_ptr Create(int sample_rate_hz, - int num_render_channels, - int num_capture_channels) { - return Create(sample_rate_hz); - } + MOCK_METHOD(std::unique_ptr, + Create, + (const Environment&, int, int, int), + (override)); }; TEST(ApmConfiguration, EchoControlInjection) { // Verify that apm uses an injected echo controller if one is provided. - std::unique_ptr echo_control_factory( - new MyEchoControlFactory()); - - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + auto echo_control_factory = std::make_unique(); + EXPECT_CALL(*echo_control_factory, Create(_, _, _, _)) + .WillOnce(WithoutArgs([] { + auto ec = std::make_unique(); + EXPECT_CALL(*ec, AnalyzeRender).Times(1); + EXPECT_CALL(*ec, AnalyzeCapture).Times(2); + EXPECT_CALL(*ec, ProcessCapture(_, _, _)).Times(2); + return ec; + })); + + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetEchoControlFactory(std::move(echo_control_factory)) - .Create(); + .Build(CreateEnvironment()); Int16FrameData audio; - audio.num_channels = 1; - SetFrameSampleRate(&audio, AudioProcessing::NativeRate::kSampleRate16kHz); + audio.SetProperties(AudioProcessing::GetFrameSize( + AudioProcessing::NativeRate::kSampleRate16kHz), + /* num_channels=*/1); apm->ProcessStream(audio.data.data(), - StreamConfig(audio.sample_rate_hz, audio.num_channels), - StreamConfig(audio.sample_rate_hz, audio.num_channels), + StreamConfig(audio.sample_rate_hz, audio.num_channels()), + StreamConfig(audio.sample_rate_hz, audio.num_channels()), audio.data.data()); apm->ProcessReverseStream( - audio.data.data(), StreamConfig(audio.sample_rate_hz, audio.num_channels), - StreamConfig(audio.sample_rate_hz, audio.num_channels), + audio.data.data(), + StreamConfig(audio.sample_rate_hz, audio.num_channels()), + StreamConfig(audio.sample_rate_hz, audio.num_channels()), audio.data.data()); apm->ProcessStream(audio.data.data(), - StreamConfig(audio.sample_rate_hz, audio.num_channels), - StreamConfig(audio.sample_rate_hz, audio.num_channels), + StreamConfig(audio.sample_rate_hz, audio.num_channels()), + StreamConfig(audio.sample_rate_hz, audio.num_channels()), audio.data.data()); } TEST(ApmConfiguration, EchoDetectorInjection) { using ::testing::_; - rtc::scoped_refptr mock_echo_detector = - rtc::make_ref_counted<::testing::StrictMock>(); + scoped_refptr mock_echo_detector = + make_ref_counted<::testing::StrictMock>(); EXPECT_CALL(*mock_echo_detector, Initialize(/*capture_sample_rate_hz=*/16000, _, /*render_sample_rate_hz=*/16000, _)) .Times(1); - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() - .SetEchoDetector(mock_echo_detector) - .Create(); + scoped_refptr apm = BuiltinAudioProcessingBuilder() + .SetEchoDetector(mock_echo_detector) + .Build(CreateEnvironment()); // The echo detector is included in processing when enabled. EXPECT_CALL(*mock_echo_detector, AnalyzeRenderAudio(_)) - .WillOnce([](rtc::ArrayView render_audio) { + .WillOnce([](ArrayView render_audio) { EXPECT_EQ(render_audio.size(), 160u); }); EXPECT_CALL(*mock_echo_detector, AnalyzeCaptureAudio(_)) - .WillOnce([](rtc::ArrayView capture_audio) { + .WillOnce([](ArrayView capture_audio) { EXPECT_EQ(capture_audio.size(), 160u); }); EXPECT_CALL(*mock_echo_detector, GetMetrics()).Times(1); Int16FrameData frame; - frame.num_channels = 1; - SetFrameSampleRate(&frame, 16000); + frame.SetProperties(AudioProcessing::GetFrameSize( + AudioProcessing::NativeRate::kSampleRate16kHz), + /* num_channels=*/1); apm->ProcessReverseStream(frame.data.data(), StreamConfig(16000, 1), StreamConfig(16000, 1), frame.data.data()); @@ -2692,17 +2661,19 @@ TEST(ApmConfiguration, EchoDetectorInjection) { /*render_sample_rate_hz=*/48000, _)) .Times(1); EXPECT_CALL(*mock_echo_detector, AnalyzeRenderAudio(_)) - .WillOnce([](rtc::ArrayView render_audio) { + .WillOnce([](ArrayView render_audio) { EXPECT_EQ(render_audio.size(), 480u); }); EXPECT_CALL(*mock_echo_detector, AnalyzeCaptureAudio(_)) .Times(2) - .WillRepeatedly([](rtc::ArrayView capture_audio) { + .WillRepeatedly([](ArrayView capture_audio) { EXPECT_EQ(capture_audio.size(), 480u); }); EXPECT_CALL(*mock_echo_detector, GetMetrics()).Times(2); - SetFrameSampleRate(&frame, 48000); + frame.SetProperties(AudioProcessing::GetFrameSize( + AudioProcessing::NativeRate::kSampleRate48kHz), + frame.num_channels()); apm->ProcessStream(frame.data.data(), StreamConfig(48000, 1), StreamConfig(48000, 1), frame.data.data()); apm->ProcessReverseStream(frame.data.data(), StreamConfig(48000, 1), @@ -2711,12 +2682,12 @@ TEST(ApmConfiguration, EchoDetectorInjection) { StreamConfig(48000, 1), frame.data.data()); } -rtc::scoped_refptr CreateApm(bool mobile_aec) { +scoped_refptr CreateApm(bool mobile_aec) { // Enable residual echo detection, for stats. - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetEchoDetector(CreateEchoDetector()) - .Create(); + .Build(CreateEnvironment()); if (!apm) { return apm; } @@ -2748,7 +2719,7 @@ rtc::scoped_refptr CreateApm(bool mobile_aec) { TEST(MAYBE_ApmStatistics, AECEnabledTest) { // Set up APM with AEC3 and process some audio. - rtc::scoped_refptr apm = CreateApm(false); + scoped_refptr apm = CreateApm(false); ASSERT_TRUE(apm); AudioProcessing::Config apm_config; apm_config.echo_canceller.enabled = true; @@ -2756,8 +2727,9 @@ TEST(MAYBE_ApmStatistics, AECEnabledTest) { // Set up an audioframe. Int16FrameData frame; - frame.num_channels = 1; - SetFrameSampleRate(&frame, AudioProcessing::NativeRate::kSampleRate32kHz); + frame.SetProperties(AudioProcessing::GetFrameSize( + AudioProcessing::NativeRate::kSampleRate32kHz), + /* num_channels=*/1); // Fill the audio frame with a sawtooth pattern. int16_t* ptr = frame.data.data(); @@ -2769,15 +2741,15 @@ TEST(MAYBE_ApmStatistics, AECEnabledTest) { for (int i = 0; i < 200; i++) { EXPECT_EQ(apm->ProcessReverseStream( frame.data.data(), - StreamConfig(frame.sample_rate_hz, frame.num_channels), - StreamConfig(frame.sample_rate_hz, frame.num_channels), + StreamConfig(frame.sample_rate_hz, frame.num_channels()), + StreamConfig(frame.sample_rate_hz, frame.num_channels()), frame.data.data()), 0); EXPECT_EQ(apm->set_stream_delay_ms(0), 0); EXPECT_EQ(apm->ProcessStream( frame.data.data(), - StreamConfig(frame.sample_rate_hz, frame.num_channels), - StreamConfig(frame.sample_rate_hz, frame.num_channels), + StreamConfig(frame.sample_rate_hz, frame.num_channels()), + StreamConfig(frame.sample_rate_hz, frame.num_channels()), frame.data.data()), 0); } @@ -2799,13 +2771,14 @@ TEST(MAYBE_ApmStatistics, AECEnabledTest) { TEST(MAYBE_ApmStatistics, AECMEnabledTest) { // Set up APM with AECM and process some audio. - rtc::scoped_refptr apm = CreateApm(true); + scoped_refptr apm = CreateApm(true); ASSERT_TRUE(apm); // Set up an audioframe. Int16FrameData frame; - frame.num_channels = 1; - SetFrameSampleRate(&frame, AudioProcessing::NativeRate::kSampleRate32kHz); + frame.SetProperties(AudioProcessing::GetFrameSize( + AudioProcessing::NativeRate::kSampleRate32kHz), + /* num_channels=*/1); // Fill the audio frame with a sawtooth pattern. int16_t* ptr = frame.data.data(); @@ -2817,15 +2790,15 @@ TEST(MAYBE_ApmStatistics, AECMEnabledTest) { for (int i = 0; i < 200; i++) { EXPECT_EQ(apm->ProcessReverseStream( frame.data.data(), - StreamConfig(frame.sample_rate_hz, frame.num_channels), - StreamConfig(frame.sample_rate_hz, frame.num_channels), + StreamConfig(frame.sample_rate_hz, frame.num_channels()), + StreamConfig(frame.sample_rate_hz, frame.num_channels()), frame.data.data()), 0); EXPECT_EQ(apm->set_stream_delay_ms(0), 0); EXPECT_EQ(apm->ProcessStream( frame.data.data(), - StreamConfig(frame.sample_rate_hz, frame.num_channels), - StreamConfig(frame.sample_rate_hz, frame.num_channels), + StreamConfig(frame.sample_rate_hz, frame.num_channels()), + StreamConfig(frame.sample_rate_hz, frame.num_channels()), frame.data.data()), 0); } @@ -2850,8 +2823,9 @@ TEST(ApmStatistics, DoNotReportVoiceDetectedStat) { // Set up an audioframe. Int16FrameData frame; - frame.num_channels = 1; - SetFrameSampleRate(&frame, AudioProcessing::NativeRate::kSampleRate32kHz); + frame.SetProperties(AudioProcessing::GetFrameSize( + AudioProcessing::NativeRate::kSampleRate32kHz), + /* num_channels=*/1); // Fill the audio frame with a sawtooth pattern. int16_t* ptr = frame.data.data(); @@ -2859,32 +2833,33 @@ TEST(ApmStatistics, DoNotReportVoiceDetectedStat) { ptr[i] = 10000 * ((i % 3) - 1); } - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting().Create(); + scoped_refptr apm = + BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); apm->Initialize(processing_config); // No metric should be reported. - EXPECT_EQ( - apm->ProcessStream(frame.data.data(), - StreamConfig(frame.sample_rate_hz, frame.num_channels), - StreamConfig(frame.sample_rate_hz, frame.num_channels), - frame.data.data()), - 0); + EXPECT_EQ(apm->ProcessStream( + frame.data.data(), + StreamConfig(frame.sample_rate_hz, frame.num_channels()), + StreamConfig(frame.sample_rate_hz, frame.num_channels()), + frame.data.data()), + 0); EXPECT_FALSE(apm->GetStatistics().voice_detected.has_value()); } TEST(ApmStatistics, GetStatisticsReportsNoEchoDetectorStatsWhenDisabled) { - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting().Create(); + scoped_refptr apm = + BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); Int16FrameData frame; - frame.num_channels = 1; - SetFrameSampleRate(&frame, AudioProcessing::NativeRate::kSampleRate32kHz); - ASSERT_EQ( - apm->ProcessStream(frame.data.data(), - StreamConfig(frame.sample_rate_hz, frame.num_channels), - StreamConfig(frame.sample_rate_hz, frame.num_channels), - frame.data.data()), - 0); + frame.SetProperties(AudioProcessing::GetFrameSize( + AudioProcessing::NativeRate::kSampleRate32kHz), + /* num_channels=*/1); + ASSERT_EQ(apm->ProcessStream( + frame.data.data(), + StreamConfig(frame.sample_rate_hz, frame.num_channels()), + StreamConfig(frame.sample_rate_hz, frame.num_channels()), + frame.data.data()), + 0); // Echo detector is disabled by default, no stats reported. AudioProcessingStats stats = apm->GetStatistics(); EXPECT_FALSE(stats.residual_echo_likelihood.has_value()); @@ -2893,20 +2868,21 @@ TEST(ApmStatistics, GetStatisticsReportsNoEchoDetectorStatsWhenDisabled) { TEST(ApmStatistics, GetStatisticsReportsEchoDetectorStatsWhenEnabled) { // Create APM with an echo detector injected. - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetEchoDetector(CreateEchoDetector()) - .Create(); + .Build(CreateEnvironment()); Int16FrameData frame; - frame.num_channels = 1; - SetFrameSampleRate(&frame, AudioProcessing::NativeRate::kSampleRate32kHz); + frame.SetProperties(AudioProcessing::GetFrameSize( + AudioProcessing::NativeRate::kSampleRate32kHz), + /* num_channels=*/1); // Echo detector enabled: Report stats. - ASSERT_EQ( - apm->ProcessStream(frame.data.data(), - StreamConfig(frame.sample_rate_hz, frame.num_channels), - StreamConfig(frame.sample_rate_hz, frame.num_channels), - frame.data.data()), - 0); + ASSERT_EQ(apm->ProcessStream( + frame.data.data(), + StreamConfig(frame.sample_rate_hz, frame.num_channels()), + StreamConfig(frame.sample_rate_hz, frame.num_channels()), + frame.data.data()), + 0); AudioProcessingStats stats = apm->GetStatistics(); EXPECT_TRUE(stats.residual_echo_likelihood.has_value()); EXPECT_TRUE(stats.residual_echo_likelihood_recent_max.has_value()); @@ -3270,8 +3246,8 @@ TEST_P(ApmFormatHandlingTest, IntApi) { } // Call APM. - rtc::scoped_refptr ap = - AudioProcessingBuilderForTesting().Create(); + scoped_refptr ap = + BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); int error; if (stream_direction_ == kForward) { error = ap->ProcessStream(input_block.data(), test_params_.input_config, @@ -3349,8 +3325,8 @@ TEST_P(ApmFormatHandlingTest, FloatApi) { } // Call APM. - rtc::scoped_refptr ap = - AudioProcessingBuilderForTesting().Create(); + scoped_refptr ap = + BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); int error; if (stream_direction_ == kForward) { error = @@ -3425,8 +3401,8 @@ TEST(ApmAnalyzeReverseStreamFormatTest, AnalyzeReverseStream) { input_config.num_channels()); // Call APM. - rtc::scoped_refptr ap = - AudioProcessingBuilderForTesting().Create(); + scoped_refptr ap = + BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); int error = ap->AnalyzeReverseStream(input_block.channels(), input_config); // Check output. @@ -3438,4 +3414,5 @@ TEST(ApmAnalyzeReverseStreamFormatTest, AnalyzeReverseStream) { } } +} // namespace } // namespace webrtc diff --git a/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.cc b/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.cc index cb2336b87d..e9d765320e 100644 --- a/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.cc +++ b/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.cc @@ -39,8 +39,8 @@ void AudioSamplesScaler::Process(AudioBuffer& audio_buffer) { if (previous_gain_ == target_gain_) { // Apply a non-changing gain. for (size_t channel = 0; channel < audio_buffer.num_channels(); ++channel) { - rtc::ArrayView channel_view(audio_buffer.channels()[channel], - samples_per_channel_); + ArrayView channel_view(audio_buffer.channels()[channel], + samples_per_channel_); for (float& sample : channel_view) { sample *= gain; } @@ -54,8 +54,8 @@ void AudioSamplesScaler::Process(AudioBuffer& audio_buffer) { for (size_t channel = 0; channel < audio_buffer.num_channels(); ++channel) { gain = previous_gain_; - rtc::ArrayView channel_view(audio_buffer.channels()[channel], - samples_per_channel_); + ArrayView channel_view(audio_buffer.channels()[channel], + samples_per_channel_); for (float& sample : channel_view) { gain = std::min(gain + increment, target_gain_); sample *= gain; @@ -66,8 +66,8 @@ void AudioSamplesScaler::Process(AudioBuffer& audio_buffer) { for (size_t channel = 0; channel < audio_buffer.num_channels(); ++channel) { gain = previous_gain_; - rtc::ArrayView channel_view(audio_buffer.channels()[channel], - samples_per_channel_); + ArrayView channel_view(audio_buffer.channels()[channel], + samples_per_channel_); for (float& sample : channel_view) { gain = std::max(gain + increment, target_gain_); sample *= gain; @@ -79,12 +79,12 @@ void AudioSamplesScaler::Process(AudioBuffer& audio_buffer) { // Saturate the samples to be in the S16 range. for (size_t channel = 0; channel < audio_buffer.num_channels(); ++channel) { - rtc::ArrayView channel_view(audio_buffer.channels()[channel], - samples_per_channel_); + ArrayView channel_view(audio_buffer.channels()[channel], + samples_per_channel_); for (float& sample : channel_view) { constexpr float kMinFloatS16Value = -32768.f; constexpr float kMaxFloatS16Value = 32767.f; - sample = rtc::SafeClamp(sample, kMinFloatS16Value, kMaxFloatS16Value); + sample = SafeClamp(sample, kMinFloatS16Value, kMaxFloatS16Value); } } } diff --git a/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.cc b/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.cc index dfda582915..f65aae8923 100644 --- a/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.cc +++ b/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.cc @@ -80,7 +80,7 @@ void CaptureLevelsAdjuster::SetAnalogMicGainLevel(int level) { RTC_DCHECK_GE(level, kMinAnalogMicGainLevel); RTC_DCHECK_LE(level, kMaxAnalogMicGainLevel); int clamped_level = - rtc::SafeClamp(level, kMinAnalogMicGainLevel, kMaxAnalogMicGainLevel); + SafeClamp(level, kMinAnalogMicGainLevel, kMaxAnalogMicGainLevel); emulated_analog_mic_gain_level_ = clamped_level; UpdatePreAdjustmentGain(); diff --git a/modules/audio_processing/echo_control_mobile_bit_exact_unittest.cc b/modules/audio_processing/echo_control_mobile_bit_exact_unittest.cc index f351811e08..636ba6026e 100644 --- a/modules/audio_processing/echo_control_mobile_bit_exact_unittest.cc +++ b/modules/audio_processing/echo_control_mobile_bit_exact_unittest.cc @@ -62,12 +62,12 @@ void RunBitexactnessTest(int sample_rate_hz, int stream_delay_ms, EchoControlMobileImpl::RoutingMode routing_mode, bool comfort_noise_enabled, - const rtc::ArrayView& output_reference) { + const ArrayView& output_reference) { EchoControlMobileImpl echo_control_mobile; SetupComponent(sample_rate_hz, routing_mode, comfort_noise_enabled, &echo_control_mobile); - const int samples_per_channel = rtc::CheckedDivExact(sample_rate_hz, 100); + const int samples_per_channel = CheckedDivExact(sample_rate_hz, 100); const StreamConfig render_config(sample_rate_hz, num_channels); AudioBuffer render_buffer( render_config.sample_rate_hz(), render_config.num_channels(), diff --git a/modules/audio_processing/echo_control_mobile_impl.cc b/modules/audio_processing/echo_control_mobile_impl.cc index fa5cb8ffec..aff9d4e722 100644 --- a/modules/audio_processing/echo_control_mobile_impl.cc +++ b/modules/audio_processing/echo_control_mobile_impl.cc @@ -14,9 +14,9 @@ #include +#include "api/audio/audio_processing.h" #include "modules/audio_processing/aecm/echo_control_mobile.h" #include "modules/audio_processing/audio_buffer.h" -#include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/checks.h" namespace webrtc { @@ -108,7 +108,7 @@ EchoControlMobileImpl::EchoControlMobileImpl() EchoControlMobileImpl::~EchoControlMobileImpl() {} void EchoControlMobileImpl::ProcessRenderAudio( - rtc::ArrayView packed_render_audio) { + ArrayView packed_render_audio) { RTC_DCHECK(stream_properties_); size_t buffer_index = 0; diff --git a/modules/audio_processing/echo_control_mobile_impl.h b/modules/audio_processing/echo_control_mobile_impl.h index f7f2626a0e..0d605fe42b 100644 --- a/modules/audio_processing/echo_control_mobile_impl.h +++ b/modules/audio_processing/echo_control_mobile_impl.h @@ -52,7 +52,7 @@ class EchoControlMobileImpl { int enable_comfort_noise(bool enable); bool is_comfort_noise_enabled() const; - void ProcessRenderAudio(rtc::ArrayView packed_render_audio); + void ProcessRenderAudio(ArrayView packed_render_audio); int ProcessCaptureAudio(AudioBuffer* audio, int stream_delay_ms); void Initialize(int sample_rate_hz, diff --git a/modules/audio_processing/echo_control_mobile_unittest.cc b/modules/audio_processing/echo_control_mobile_unittest.cc index ed0393043c..dc11a812d3 100644 --- a/modules/audio_processing/echo_control_mobile_unittest.cc +++ b/modules/audio_processing/echo_control_mobile_unittest.cc @@ -11,8 +11,8 @@ #include #include +#include "api/audio/audio_processing.h" #include "modules/audio_processing/echo_control_mobile_impl.h" -#include "modules/audio_processing/include/audio_processing.h" #include "test/gtest.h" namespace webrtc { diff --git a/modules/audio_processing/echo_detector/circular_buffer.cc b/modules/audio_processing/echo_detector/circular_buffer.cc index a6d10edfe2..c24f920c8e 100644 --- a/modules/audio_processing/echo_detector/circular_buffer.cc +++ b/modules/audio_processing/echo_detector/circular_buffer.cc @@ -28,9 +28,9 @@ void CircularBuffer::Push(float value) { RTC_DCHECK_LE(nr_elements_in_buffer_, buffer_.size()); } -absl::optional CircularBuffer::Pop() { +std::optional CircularBuffer::Pop() { if (nr_elements_in_buffer_ == 0) { - return absl::nullopt; + return std::nullopt; } const size_t index = (buffer_.size() + next_insertion_index_ - nr_elements_in_buffer_) % diff --git a/modules/audio_processing/echo_detector/circular_buffer.h b/modules/audio_processing/echo_detector/circular_buffer.h index db1aeaebf6..5cce0d7abe 100644 --- a/modules/audio_processing/echo_detector/circular_buffer.h +++ b/modules/audio_processing/echo_detector/circular_buffer.h @@ -13,10 +13,9 @@ #include +#include #include -#include "absl/types/optional.h" - namespace webrtc { // Ring buffer containing floating point values. @@ -26,7 +25,7 @@ struct CircularBuffer { ~CircularBuffer(); void Push(float value); - absl::optional Pop(); + std::optional Pop(); size_t Size() const { return nr_elements_in_buffer_; } // This function fills the buffer with zeros, but does not change its size. void Clear(); diff --git a/modules/audio_processing/echo_detector/circular_buffer_unittest.cc b/modules/audio_processing/echo_detector/circular_buffer_unittest.cc index 7a234d4a55..95bea5e2e2 100644 --- a/modules/audio_processing/echo_detector/circular_buffer_unittest.cc +++ b/modules/audio_processing/echo_detector/circular_buffer_unittest.cc @@ -47,7 +47,7 @@ TEST(CircularBufferTests, OverflowTest) { TEST(CircularBufferTests, ReadFromEmpty) { CircularBuffer test_buffer(3); - EXPECT_EQ(absl::nullopt, test_buffer.Pop()); + EXPECT_EQ(std::nullopt, test_buffer.Pop()); } } // namespace webrtc diff --git a/modules/audio_processing/g3doc/audio_processing_module.md b/modules/audio_processing/g3doc/audio_processing_module.md index a77f62fbaf..2e0a4b0d4e 100644 --- a/modules/audio_processing/g3doc/audio_processing_module.md +++ b/modules/audio_processing/g3doc/audio_processing_module.md @@ -10,8 +10,8 @@ microphone signal. These effects are required for VoIP calling and some examples include echo cancellation (AEC), noise suppression (NS) and automatic gain control (AGC). -The API for APM resides in [`/modules/audio_processing/include`][https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/modules/audio_processing/include]. -APM is created using the [`AudioProcessingBuilder`][https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/modules/audio_processing/include/audio_processing.h] +The API for APM resides in [`/api/audio/audio_processing.h`][https://webrtc.googlesource.com/src/+/refs/heads/main/api/audio/audio_processing.h]. +APM is created using the [`BuiltinAudioProcessingBuilder`][https://webrtc.googlesource.com/src/+/refs/heads/main/api/audio/builtin_audio_processing_builder.h] builder that allows it to be customized and configured. Some specific aspects of APM include that: diff --git a/modules/audio_processing/gain_control_impl.cc b/modules/audio_processing/gain_control_impl.cc index 5f2b4872b9..f20548e019 100644 --- a/modules/audio_processing/gain_control_impl.cc +++ b/modules/audio_processing/gain_control_impl.cc @@ -11,15 +11,14 @@ #include "modules/audio_processing/gain_control_impl.h" #include +#include -#include "absl/types/optional.h" +#include "api/audio/audio_processing.h" #include "modules/audio_processing/agc/legacy/gain_control.h" #include "modules/audio_processing/audio_buffer.h" -#include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -106,7 +105,7 @@ GainControlImpl::GainControlImpl() GainControlImpl::~GainControlImpl() = default; void GainControlImpl::ProcessRenderAudio( - rtc::ArrayView packed_render_audio) { + ArrayView packed_render_audio) { for (size_t ch = 0; ch < mono_agcs_.size(); ++ch) { WebRtcAgc_AddFarend(mono_agcs_[ch]->state, packed_render_audio.data(), packed_render_audio.size()); @@ -119,8 +118,8 @@ void GainControlImpl::PackRenderAudioBuffer( RTC_DCHECK_GE(AudioBuffer::kMaxSplitFrameLength, audio.num_frames_per_band()); std::array mixed_16_kHz_render_data; - rtc::ArrayView mixed_16_kHz_render( - mixed_16_kHz_render_data.data(), audio.num_frames_per_band()); + ArrayView mixed_16_kHz_render(mixed_16_kHz_render_data.data(), + audio.num_frames_per_band()); if (audio.num_channels() == 1) { FloatS16ToS16(audio.split_bands_const(0)[kBand0To8kHz], audio.num_frames_per_band(), mixed_16_kHz_render_data.data()); diff --git a/modules/audio_processing/gain_control_impl.h b/modules/audio_processing/gain_control_impl.h index 8aea8f2e95..8a3e94d1e5 100644 --- a/modules/audio_processing/gain_control_impl.h +++ b/modules/audio_processing/gain_control_impl.h @@ -15,9 +15,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "modules/audio_processing/agc/gain_control.h" @@ -34,7 +34,7 @@ class GainControlImpl : public GainControl { ~GainControlImpl() override; - void ProcessRenderAudio(rtc::ArrayView packed_render_audio); + void ProcessRenderAudio(ArrayView packed_render_audio); int AnalyzeCaptureAudio(const AudioBuffer& audio); int ProcessCaptureAudio(AudioBuffer* audio, bool stream_has_echo); @@ -81,8 +81,8 @@ class GainControlImpl : public GainControl { std::vector> mono_agcs_; std::vector capture_levels_; - absl::optional num_proc_channels_; - absl::optional sample_rate_hz_; + std::optional num_proc_channels_; + std::optional sample_rate_hz_; static int instance_counter_; }; diff --git a/modules/audio_processing/gain_control_unittest.cc b/modules/audio_processing/gain_control_unittest.cc index 1662dc506f..d3549ab0f0 100644 --- a/modules/audio_processing/gain_control_unittest.cc +++ b/modules/audio_processing/gain_control_unittest.cc @@ -70,13 +70,13 @@ void RunBitExactnessTest(int sample_rate_hz, int analog_level_min, int analog_level_max, int achieved_stream_analog_level_reference, - rtc::ArrayView output_reference) { + ArrayView output_reference) { GainControlImpl gain_controller; SetupComponent(sample_rate_hz, mode, target_level_dbfs, stream_analog_level, compression_gain_db, enable_limiter, analog_level_min, analog_level_max, &gain_controller); - const int samples_per_channel = rtc::CheckedDivExact(sample_rate_hz, 100); + const int samples_per_channel = CheckedDivExact(sample_rate_hz, 100); const StreamConfig render_config(sample_rate_hz, num_channels); AudioBuffer render_buffer( render_config.sample_rate_hz(), render_config.num_channels(), diff --git a/modules/audio_processing/gain_controller2.cc b/modules/audio_processing/gain_controller2.cc index dd3521268d..6e4a251456 100644 --- a/modules/audio_processing/gain_controller2.cc +++ b/modules/audio_processing/gain_controller2.cc @@ -13,6 +13,9 @@ #include #include +#include "api/audio/audio_frame.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "common_audio/include/audio_util.h" #include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/agc2/cpu_features.h" @@ -22,7 +25,6 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { @@ -36,15 +38,16 @@ constexpr int kLogLimiterStatsPeriodNumFrames = kLogLimiterStatsPeriodMs / kFrameLengthMs; // Detects the available CPU features and applies any kill-switches. -AvailableCpuFeatures GetAllowedCpuFeatures() { +AvailableCpuFeatures GetAllowedCpuFeatures( + const FieldTrialsView& field_trials) { AvailableCpuFeatures features = GetAvailableCpuFeatures(); - if (field_trial::IsEnabled("WebRTC-Agc2SimdSse2KillSwitch")) { + if (field_trials.IsEnabled("WebRTC-Agc2SimdSse2KillSwitch")) { features.sse2 = false; } - if (field_trial::IsEnabled("WebRTC-Agc2SimdAvx2KillSwitch")) { + if (field_trials.IsEnabled("WebRTC-Agc2SimdAvx2KillSwitch")) { features.avx2 = false; } - if (field_trial::IsEnabled("WebRTC-Agc2SimdNeonKillSwitch")) { + if (field_trials.IsEnabled("WebRTC-Agc2SimdNeonKillSwitch")) { features.neon = false; } return features; @@ -63,11 +66,11 @@ struct SpeechLevel { }; // Computes the audio levels for the first channel in `frame`. -AudioLevels ComputeAudioLevels(AudioFrameView frame, +AudioLevels ComputeAudioLevels(DeinterleavedView frame, ApmDataDumper& data_dumper) { float peak = 0.0f; float rms = 0.0f; - for (const auto& x : frame.channel(0)) { + for (const auto& x : frame[0]) { peak = std::max(std::fabs(x), peak); rms += x * x; } @@ -84,17 +87,20 @@ AudioLevels ComputeAudioLevels(AudioFrameView frame, std::atomic GainController2::instance_count_(0); GainController2::GainController2( + const Environment& env, const Agc2Config& config, const InputVolumeControllerConfig& input_volume_controller_config, int sample_rate_hz, int num_channels, bool use_internal_vad) - : cpu_features_(GetAllowedCpuFeatures()), + : cpu_features_(GetAllowedCpuFeatures(env.field_trials())), data_dumper_(instance_count_.fetch_add(1) + 1), fixed_gain_applier_( /*hard_clip_samples=*/false, /*initial_gain_factor=*/DbToRatio(config.fixed_digital.gain_db)), - limiter_(sample_rate_hz, &data_dumper_, /*histogram_name_prefix=*/"Agc2"), + limiter_(&data_dumper_, + SampleRateToDefaultChannelSize(sample_rate_hz), + /*histogram_name_prefix=*/"Agc2"), calls_since_last_limiter_log_(0) { RTC_DCHECK(Validate(config)); data_dumper_.InitiateNewSetOfRecordings(); @@ -153,7 +159,7 @@ void GainController2::SetFixedGainDb(float gain_db) { void GainController2::Analyze(int applied_input_volume, const AudioBuffer& audio_buffer) { - recommended_input_volume_ = absl::nullopt; + recommended_input_volume_ = std::nullopt; RTC_DCHECK_GE(applied_input_volume, 0); RTC_DCHECK_LE(applied_input_volume, 255); @@ -164,10 +170,10 @@ void GainController2::Analyze(int applied_input_volume, } } -void GainController2::Process(absl::optional speech_probability, +void GainController2::Process(std::optional speech_probability, bool input_volume_changed, AudioBuffer* audio) { - recommended_input_volume_ = absl::nullopt; + recommended_input_volume_ = std::nullopt; data_dumper_.DumpRaw("agc2_applied_input_volume_changed", input_volume_changed); @@ -179,8 +185,8 @@ void GainController2::Process(absl::optional speech_probability, saturation_protector_->Reset(); } - AudioFrameView float_frame(audio->channels(), audio->num_channels(), - audio->num_frames()); + DeinterleavedView float_frame = audio->view(); + // Compute speech probability. if (vad_) { // When the VAD component runs, `speech_probability` should not be specified @@ -200,13 +206,13 @@ void GainController2::Process(absl::optional speech_probability, // Compute audio, noise and speech levels. AudioLevels audio_levels = ComputeAudioLevels(float_frame, data_dumper_); - absl::optional noise_rms_dbfs; + std::optional noise_rms_dbfs; if (noise_level_estimator_) { // TODO(bugs.webrtc.org/7494): Pass `audio_levels` to remove duplicated // computation in `noise_level_estimator_`. noise_rms_dbfs = noise_level_estimator_->Analyze(float_frame); } - absl::optional speech_level; + std::optional speech_level; if (speech_level_estimator_) { RTC_DCHECK(speech_probability.has_value()); speech_level_estimator_->Update( @@ -225,8 +231,8 @@ void GainController2::Process(absl::optional speech_probability, input_volume_controller_->RecommendInputVolume( *speech_probability, speech_level->is_confident - ? absl::optional(speech_level->rms_dbfs) - : absl::nullopt); + ? std::optional(speech_level->rms_dbfs) + : std::nullopt); } } diff --git a/modules/audio_processing/gain_controller2.h b/modules/audio_processing/gain_controller2.h index 43b5828d35..5594e80b0d 100644 --- a/modules/audio_processing/gain_controller2.h +++ b/modules/audio_processing/gain_controller2.h @@ -15,6 +15,8 @@ #include #include +#include "api/audio/audio_processing.h" +#include "api/environment/environment.h" #include "modules/audio_processing/agc2/adaptive_digital_gain_controller.h" #include "modules/audio_processing/agc2/cpu_features.h" #include "modules/audio_processing/agc2/gain_applier.h" @@ -24,7 +26,6 @@ #include "modules/audio_processing/agc2/saturation_protector.h" #include "modules/audio_processing/agc2/speech_level_estimator.h" #include "modules/audio_processing/agc2/vad_wrapper.h" -#include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/logging/apm_data_dumper.h" namespace webrtc { @@ -38,6 +39,7 @@ class GainController2 { // Ctor. If `use_internal_vad` is true, an internal voice activity // detector is used for digital adaptive gain. GainController2( + const Environment& env, const AudioProcessing::Config::GainController2& config, const InputVolumeController::Config& input_volume_controller_config, int sample_rate_hz, @@ -68,7 +70,8 @@ class GainController2 { // computes the speech probability via `vad_`. // Handles input volume changes; if the caller cannot determine whether an // input volume change occurred, set `input_volume_changed` to false. - void Process(absl::optional speech_probability, + // TODO(bugs.webrtc.org/7494): Remove `speech_probability`. + void Process(std::optional speech_probability, bool input_volume_changed, AudioBuffer* audio); @@ -76,7 +79,7 @@ class GainController2 { AvailableCpuFeatures GetCpuFeatures() const { return cpu_features_; } - absl::optional recommended_input_volume() const { + std::optional recommended_input_volume() const { return recommended_input_volume_; } @@ -102,7 +105,7 @@ class GainController2 { // Recommended input volume from `InputVolumecontroller`. Non-empty after // `Process()` if input volume controller is enabled and // `InputVolumeController::Process()` has returned a non-empty value. - absl::optional recommended_input_volume_; + std::optional recommended_input_volume_; }; } // namespace webrtc diff --git a/modules/audio_processing/gain_controller2_unittest.cc b/modules/audio_processing/gain_controller2_unittest.cc index 5023bab617..1d415a5de4 100644 --- a/modules/audio_processing/gain_controller2_unittest.cc +++ b/modules/audio_processing/gain_controller2_unittest.cc @@ -16,7 +16,8 @@ #include #include -#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/agc2/agc2_testing_common.h" #include "modules/audio_processing/audio_buffer.h" #include "modules/audio_processing/test/audio_buffer_tools.h" @@ -48,7 +49,7 @@ float RunAgc2WithConstantInput(GainController2& agc2, int sample_rate_hz, int num_channels = 1, int applied_initial_volume = 0) { - const int num_samples = rtc::CheckedDivExact(sample_rate_hz, 100); + const int num_samples = CheckedDivExact(sample_rate_hz, 100); AudioBuffer ab(sample_rate_hz, num_channels, sample_rate_hz, num_channels, sample_rate_hz, num_channels); @@ -58,7 +59,7 @@ float RunAgc2WithConstantInput(GainController2& agc2, const auto applied_volume = agc2.recommended_input_volume(); agc2.Analyze(applied_volume.value_or(applied_initial_volume), ab); - agc2.Process(/*speech_probability=*/absl::nullopt, + agc2.Process(/*speech_probability=*/std::nullopt, /*input_volume_changed=*/false, &ab); } @@ -73,10 +74,11 @@ std::unique_ptr CreateAgc2FixedDigitalMode( config.adaptive_digital.enabled = false; config.fixed_digital.gain_db = fixed_gain_db; EXPECT_TRUE(GainController2::Validate(config)); - return std::make_unique( - config, InputVolumeControllerConfig{}, sample_rate_hz, - /*num_channels=*/1, - /*use_internal_vad=*/true); + return std::make_unique(CreateEnvironment(), config, + InputVolumeControllerConfig{}, + sample_rate_hz, + /*num_channels=*/1, + /*use_internal_vad=*/true); } constexpr InputVolumeControllerConfig kTestInputVolumeControllerConfig{ @@ -175,7 +177,8 @@ TEST(GainController2, config.input_volume_controller.enabled = false; auto gain_controller = std::make_unique( - config, InputVolumeControllerConfig{}, kSampleRateHz, kNumChannels, + CreateEnvironment(), config, InputVolumeControllerConfig{}, kSampleRateHz, + kNumChannels, /*use_internal_vad=*/true); EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); @@ -207,7 +210,8 @@ TEST( config.input_volume_controller.enabled = false; auto gain_controller = std::make_unique( - config, kTestInputVolumeControllerConfig, kSampleRateHz, kNumChannels, + CreateEnvironment(), config, kTestInputVolumeControllerConfig, + kSampleRateHz, kNumChannels, /*use_internal_vad=*/true); EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); @@ -239,7 +243,8 @@ TEST(GainController2, config.adaptive_digital.enabled = true; auto gain_controller = std::make_unique( - config, InputVolumeControllerConfig{}, kSampleRateHz, kNumChannels, + CreateEnvironment(), config, InputVolumeControllerConfig{}, kSampleRateHz, + kNumChannels, /*use_internal_vad=*/true); EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); @@ -272,7 +277,8 @@ TEST( config.adaptive_digital.enabled = true; auto gain_controller = std::make_unique( - config, kTestInputVolumeControllerConfig, kSampleRateHz, kNumChannels, + CreateEnvironment(), config, kTestInputVolumeControllerConfig, + kSampleRateHz, kNumChannels, /*use_internal_vad=*/true); EXPECT_FALSE(gain_controller->recommended_input_volume().has_value()); @@ -293,7 +299,7 @@ TEST( // Checks that the default config is applied. TEST(GainController2, ApplyDefaultConfig) { auto gain_controller2 = std::make_unique( - Agc2Config{}, InputVolumeControllerConfig{}, + CreateEnvironment(), Agc2Config{}, InputVolumeControllerConfig{}, /*sample_rate_hz=*/16000, /*num_channels=*/2, /*use_internal_vad=*/true); EXPECT_TRUE(gain_controller2.get()); @@ -410,8 +416,9 @@ TEST(GainController2, CheckFinalGainWithAdaptiveDigitalController) { Agc2Config config; config.fixed_digital.gain_db = 0.0f; config.adaptive_digital.enabled = true; - GainController2 agc2(config, /*input_volume_controller_config=*/{}, - kSampleRateHz, kStereo, + GainController2 agc2(CreateEnvironment(), config, + /*input_volume_controller_config=*/{}, kSampleRateHz, + kStereo, /*use_internal_vad=*/true); test::InputAudioFile input_file( @@ -439,18 +446,18 @@ TEST(GainController2, CheckFinalGainWithAdaptiveDigitalController) { x *= gain; } test::CopyVectorToAudioBuffer(stream_config, frame, &audio_buffer); - agc2.Process(/*speech_probability=*/absl::nullopt, + agc2.Process(/*speech_probability=*/std::nullopt, /*input_volume_changed=*/false, &audio_buffer); } // Estimate the applied gain by processing a probing frame. SetAudioBufferSamples(/*value=*/1.0f, audio_buffer); - agc2.Process(/*speech_probability=*/absl::nullopt, + agc2.Process(/*speech_probability=*/std::nullopt, /*input_volume_changed=*/false, &audio_buffer); const float applied_gain_db = 20.0f * std::log10(audio_buffer.channels_const()[0][0]); - constexpr float kExpectedGainDb = 5.6f; + constexpr float kExpectedGainDb = 7.0f; constexpr float kToleranceDb = 0.3f; EXPECT_NEAR(applied_gain_db, kExpectedGainDb, kToleranceDb); } @@ -465,10 +472,10 @@ TEST(GainController2DeathTest, AudioBuffer audio_buffer(kSampleRateHz, kStereo, kSampleRateHz, kStereo, kSampleRateHz, kStereo); // Create AGC2 so that the interval VAD is also created. - GainController2 agc2(/*config=*/{.adaptive_digital = {.enabled = true}}, - /*input_volume_controller_config=*/{}, kSampleRateHz, - kStereo, - /*use_internal_vad=*/true); + GainController2 agc2( + CreateEnvironment(), /*config=*/{.adaptive_digital = {.enabled = true}}, + /*input_volume_controller_config=*/{}, kSampleRateHz, kStereo, + /*use_internal_vad=*/true); EXPECT_DEATH(agc2.Process(/*speech_probability=*/0.123f, /*input_volume_changed=*/false, &audio_buffer), @@ -484,13 +491,15 @@ TEST(GainController2, constexpr int kStereo = 2; // Create AGC2 enabling only the adaptive digital controller. + const Environment env = CreateEnvironment(); Agc2Config config; config.fixed_digital.gain_db = 0.0f; config.adaptive_digital.enabled = true; - GainController2 agc2(config, /*input_volume_controller_config=*/{}, + GainController2 agc2(env, config, /*input_volume_controller_config=*/{}, kSampleRateHz, kStereo, /*use_internal_vad=*/false); - GainController2 agc2_reference(config, /*input_volume_controller_config=*/{}, + GainController2 agc2_reference(env, config, + /*input_volume_controller_config=*/{}, kSampleRateHz, kStereo, /*use_internal_vad=*/true); @@ -528,17 +537,20 @@ TEST(GainController2, &audio_buffer); test::CopyVectorToAudioBuffer(stream_config, frame, &audio_buffer_reference); - agc2_reference.Process(/*speech_probability=*/absl::nullopt, + agc2_reference.Process(/*speech_probability=*/std::nullopt, /*input_volume_changed=*/false, &audio_buffer_reference); // Check the output buffers. - for (int i = 0; i < kStereo; ++i) { - for (int j = 0; j < static_cast(audio_buffer.num_frames()); ++j) { + for (int channel = 0; channel < kStereo; ++channel) { + for (int frame_num = 0; + frame_num < static_cast(audio_buffer.num_frames()); + ++frame_num) { all_samples_zero &= - fabs(audio_buffer.channels_const()[i][j]) < kEpsilon; + fabs(audio_buffer.channels_const()[channel][frame_num]) < kEpsilon; all_samples_equal &= - fabs(audio_buffer.channels_const()[i][j] - - audio_buffer_reference.channels_const()[i][j]) < kEpsilon; + fabs(audio_buffer.channels_const()[channel][frame_num] - + audio_buffer_reference.channels_const()[channel][frame_num]) < + kEpsilon; } } } @@ -555,13 +567,15 @@ TEST(GainController2, constexpr int kStereo = 2; // Create AGC2 enabling only the adaptive digital controller. + const Environment env = CreateEnvironment(); Agc2Config config; config.fixed_digital.gain_db = 0.0f; config.adaptive_digital.enabled = true; - GainController2 agc2(config, /*input_volume_controller_config=*/{}, + GainController2 agc2(env, config, /*input_volume_controller_config=*/{}, kSampleRateHz, kStereo, /*use_internal_vad=*/false); - GainController2 agc2_reference(config, /*input_volume_controller_config=*/{}, + GainController2 agc2_reference(env, config, + /*input_volume_controller_config=*/{}, kSampleRateHz, kStereo, /*use_internal_vad=*/true); VoiceActivityDetectorWrapper vad(GetAvailableCpuFeatures(), kSampleRateHz); @@ -593,19 +607,20 @@ TEST(GainController2, } test::CopyVectorToAudioBuffer(stream_config, frame, &audio_buffer_reference); - agc2_reference.Process(absl::nullopt, /*input_volume_changed=*/false, + agc2_reference.Process(std::nullopt, /*input_volume_changed=*/false, &audio_buffer_reference); test::CopyVectorToAudioBuffer(stream_config, frame, &audio_buffer); - float speech_probability = vad.Analyze(AudioFrameView( - audio_buffer.channels(), audio_buffer.num_channels(), - audio_buffer.num_frames())); + float speech_probability = vad.Analyze(audio_buffer.view()); agc2.Process(speech_probability, /*input_volume_changed=*/false, &audio_buffer); // Check the output buffer. - for (int i = 0; i < kStereo; ++i) { - for (int j = 0; j < static_cast(audio_buffer.num_frames()); ++j) { - EXPECT_FLOAT_EQ(audio_buffer.channels_const()[i][j], - audio_buffer_reference.channels_const()[i][j]); + for (int channel = 0; channel < kStereo; ++channel) { + for (int frame_num = 0; + frame_num < static_cast(audio_buffer.num_frames()); + ++frame_num) { + EXPECT_FLOAT_EQ( + audio_buffer.channels_const()[channel][frame_num], + audio_buffer_reference.channels_const()[channel][frame_num]); } } } diff --git a/modules/audio_processing/high_pass_filter.cc b/modules/audio_processing/high_pass_filter.cc index 3b4740f6a5..9d8659ee8c 100644 --- a/modules/audio_processing/high_pass_filter.cc +++ b/modules/audio_processing/high_pass_filter.cc @@ -19,18 +19,21 @@ namespace webrtc { namespace { // [B,A] = butter(2,100/8000,'high') constexpr CascadedBiQuadFilter::BiQuadCoefficients - kHighPassFilterCoefficients16kHz = {{0.97261f, -1.94523f, 0.97261f}, - {-1.94448f, 0.94598f}}; + kHighPassFilterCoefficients16kHz = { + {0.972613898f, -1.945227797f, 0.972613898f}, + {-1.944477658f, 0.945977936f}}; // [B,A] = butter(2,100/16000,'high') constexpr CascadedBiQuadFilter::BiQuadCoefficients - kHighPassFilterCoefficients32kHz = {{0.98621f, -1.97242f, 0.98621f}, - {-1.97223f, 0.97261f}}; + kHighPassFilterCoefficients32kHz = { + {0.986211925f, -1.972423849f, 0.986211925f}, + {-1.972233729f, 0.972613969f}}; // [B,A] = butter(2,100/24000,'high') constexpr CascadedBiQuadFilter::BiQuadCoefficients - kHighPassFilterCoefficients48kHz = {{0.99079f, -1.98157f, 0.99079f}, - {-1.98149f, 0.98166f}}; + kHighPassFilterCoefficients48kHz = { + {0.990786698f, -1.981573396f, 0.990786698f}, + {-1.981488509f, 0.981658283f}}; constexpr size_t kNumberOfHighPassBiQuads = 1; @@ -69,14 +72,14 @@ void HighPassFilter::Process(AudioBuffer* audio, bool use_split_band_data) { RTC_DCHECK_EQ(filters_.size(), audio->num_channels()); if (use_split_band_data) { for (size_t k = 0; k < audio->num_channels(); ++k) { - rtc::ArrayView channel_data = rtc::ArrayView( + ArrayView channel_data = ArrayView( audio->split_bands(k)[0], audio->num_frames_per_band()); filters_[k]->Process(channel_data); } } else { for (size_t k = 0; k < audio->num_channels(); ++k) { - rtc::ArrayView channel_data = - rtc::ArrayView(&audio->channels()[k][0], audio->num_frames()); + ArrayView channel_data = + ArrayView(&audio->channels()[k][0], audio->num_frames()); filters_[k]->Process(channel_data); } } diff --git a/modules/audio_processing/high_pass_filter_unittest.cc b/modules/audio_processing/high_pass_filter_unittest.cc index 9f3c8fe595..4fa7e7534e 100644 --- a/modules/audio_processing/high_pass_filter_unittest.cc +++ b/modules/audio_processing/high_pass_filter_unittest.cc @@ -121,13 +121,41 @@ void RunBitexactnessTest(int num_channels, // Method for forming a vector out of an array. // TODO(peah): Remove once braced initialization is allowed. -std::vector CreateVector(const rtc::ArrayView& array_view) { +std::vector CreateVector(const ArrayView& array_view) { std::vector v; for (auto value : array_view) { v.push_back(value); } return v; } + +float DcSignalAtenuation(float sampleRate) { + constexpr int kNumChannels = 1; + HighPassFilter high_pass_filter(sampleRate, kNumChannels); + std::vector> audio_data( + 1, std::vector(sampleRate / 100)); + + constexpr float kMaxDcLevel = 32767.0f; + float energy_before_filtering; + float energy_after_filtering; + for (int run = 0; run < 2; ++run) { + // Set input. + energy_before_filtering = 0.0f; + for (unsigned int sample = 0; sample < audio_data[0].size(); ++sample) { + audio_data[0][sample] = kMaxDcLevel; + energy_before_filtering += audio_data[0][sample] * audio_data[0][sample]; + } + + high_pass_filter.Process(&audio_data); + energy_after_filtering = 0.0f; + for (unsigned int sample = 0; sample < audio_data[0].size(); ++sample) { + energy_after_filtering += audio_data[0][sample] * audio_data[0][sample]; + } + } + const float attenuation_db = + 10.0f * std::log10(energy_before_filtering / energy_after_filtering); + return attenuation_db; +} } // namespace TEST(HighPassFilterAccuracyTest, ResetWithAudioBufferInterface) { @@ -191,15 +219,14 @@ TEST(HighPassFilterAccuracyTest, MonoInitial) { -0.232329f, -0.273644f, -0.323162f, -0.149105f, -0.559646f, 0.269458f, 0.145333f, -0.005597f, -0.009717f, -0.223051f, 0.284676f, -0.037228f, -0.199679f, 0.377651f, -0.062813f, -0.164607f}; - const float kReference[] = {0.146139f, 0.490336f, -0.649520f, 0.233881f, - 0.073214f, -0.373256f, -0.115394f, 0.102109f, - 0.976217f, 0.702270f, -0.457697f, 0.757116f}; + const float kReference[] = {0.146139f, 0.490339f, -0.649516f, 0.233889f, + 0.073224f, -0.373246f, -0.115382f, 0.102119f, + 0.976229f, 0.702288f, -0.457669f, 0.757161f}; for (bool use_audio_buffer_interface : {true, false}) { - RunBitexactnessTest( - 1, use_audio_buffer_interface, - CreateVector(rtc::ArrayView(kReferenceInput)), - CreateVector(rtc::ArrayView(kReference))); + RunBitexactnessTest(1, use_audio_buffer_interface, + CreateVector(ArrayView(kReferenceInput)), + CreateVector(ArrayView(kReference))); } } @@ -286,16 +313,26 @@ TEST(HighPassFilterAccuracyTest, MonoConverged) { 0.263284f, 0.083972f, -0.104256f, 0.227892f, 0.223253f, 0.033592f, 0.159638f, 0.115358f, -0.275811f, 0.212265f, -0.183658f, -0.168768f}; - const float kReference[] = {-0.248836f, -0.086982f, 0.083715f, -0.036787f, - 0.127212f, 0.147464f, -0.221733f, -0.004484f, - -0.535107f, 0.385999f, -0.116346f, -0.265302f}; + const float kReference[] = {-0.248978f, -0.087127f, 0.083567f, -0.036940f, + 0.127056f, 0.147304f, -0.221897f, -0.004650f, + -0.535279f, 0.385823f, -0.116531f, -0.265494f}; for (bool use_audio_buffer_interface : {true, false}) { - RunBitexactnessTest( - 1, use_audio_buffer_interface, - CreateVector(rtc::ArrayView(kReferenceInput)), - CreateVector(rtc::ArrayView(kReference))); + RunBitexactnessTest(1, use_audio_buffer_interface, + CreateVector(ArrayView(kReferenceInput)), + CreateVector(ArrayView(kReference))); } } +TEST(HighPassFilterAccuracyTest, DcSignalAtenuation16) { + EXPECT_GE(DcSignalAtenuation(16000), 47.3f); +} + +TEST(HighPassFilterAccuracyTest, DcSignalAtenuation32) { + EXPECT_GE(DcSignalAtenuation(32000), 47.3f); +} + +TEST(HighPassFilterAccuracyTest, DcSignalAtenuation48) { + EXPECT_GE(DcSignalAtenuation(48000), 47.3f); +} } // namespace webrtc diff --git a/modules/audio_processing/include/aec_dump.h b/modules/audio_processing/include/aec_dump.h index 6f2eb64f3a..532fa210c7 100644 --- a/modules/audio_processing/include/aec_dump.h +++ b/modules/audio_processing/include/aec_dump.h @@ -13,12 +13,12 @@ #include +#include #include #include "absl/base/attributes.h" -#include "absl/types/optional.h" +#include "api/audio/audio_processing.h" #include "modules/audio_processing/include/audio_frame_view.h" -#include "modules/audio_processing/include/audio_processing.h" namespace webrtc { @@ -68,7 +68,7 @@ class AecDump { struct AudioProcessingState { int delay; int drift; - absl::optional applied_input_volume; + std::optional applied_input_volume; bool keypress; }; diff --git a/modules/audio_processing/include/audio_frame_proxies.cc b/modules/audio_processing/include/audio_frame_proxies.cc index 7cc4fb75e4..e37645ef20 100644 --- a/modules/audio_processing/include/audio_frame_proxies.cc +++ b/modules/audio_processing/include/audio_frame_proxies.cc @@ -11,7 +11,7 @@ #include "modules/audio_processing/include/audio_frame_proxies.h" #include "api/audio/audio_frame.h" -#include "modules/audio_processing/include/audio_processing.h" +#include "api/audio/audio_processing.h" namespace webrtc { diff --git a/modules/audio_processing/include/audio_frame_view.h b/modules/audio_processing/include/audio_frame_view.h index 164784a7cc..27e2009067 100644 --- a/modules/audio_processing/include/audio_frame_view.h +++ b/modules/audio_processing/include/audio_frame_view.h @@ -11,7 +11,7 @@ #ifndef MODULES_AUDIO_PROCESSING_INCLUDE_AUDIO_FRAME_VIEW_H_ #define MODULES_AUDIO_PROCESSING_INCLUDE_AUDIO_FRAME_VIEW_H_ -#include "api/array_view.h" +#include "api/audio/audio_view.h" namespace webrtc { @@ -22,46 +22,44 @@ class AudioFrameView { // `num_channels` and `channel_size` describe the T** // `audio_samples`. `audio_samples` is assumed to point to a // two-dimensional |num_channels * channel_size| array of floats. + // + // Note: The implementation now only requires the first channel pointer. + // The previous implementation retained a pointer to externally owned array + // of channel pointers, but since the channel size and count are provided + // and the array is assumed to be a single two-dimensional array, the other + // channel pointers can be calculated based on that (which is what the class + // now uses `DeinterleavedView<>` internally for). AudioFrameView(T* const* audio_samples, int num_channels, int channel_size) - : audio_samples_(audio_samples), - num_channels_(num_channels), - channel_size_(channel_size) { - RTC_DCHECK_GE(num_channels_, 0); - RTC_DCHECK_GE(channel_size_, 0); + : view_(num_channels && channel_size ? audio_samples[0] : nullptr, + channel_size, + num_channels) { + RTC_DCHECK_GE(view_.num_channels(), 0); + RTC_DCHECK_GE(view_.samples_per_channel(), 0); } - // Implicit cast to allow converting Frame to - // Frame. + // Implicit cast to allow converting AudioFrameView to + // AudioFrameView. template - AudioFrameView(AudioFrameView other) - : audio_samples_(other.data()), - num_channels_(other.num_channels()), - channel_size_(other.samples_per_channel()) {} + AudioFrameView(AudioFrameView other) : view_(other.view()) {} - AudioFrameView() = delete; - - int num_channels() const { return num_channels_; } - - int samples_per_channel() const { return channel_size_; } + // Allow constructing AudioFrameView from a DeinterleavedView. + template + explicit AudioFrameView(DeinterleavedView view) : view_(view) {} - rtc::ArrayView channel(int idx) { - RTC_DCHECK_LE(0, idx); - RTC_DCHECK_LE(idx, num_channels_); - return rtc::ArrayView(audio_samples_[idx], channel_size_); - } + AudioFrameView() = delete; - rtc::ArrayView channel(int idx) const { - RTC_DCHECK_LE(0, idx); - RTC_DCHECK_LE(idx, num_channels_); - return rtc::ArrayView(audio_samples_[idx], channel_size_); - } + int num_channels() const { return view_.num_channels(); } + int samples_per_channel() const { return view_.samples_per_channel(); } + MonoView channel(int idx) { return view_[idx]; } + MonoView channel(int idx) const { return view_[idx]; } + MonoView operator[](int idx) { return view_[idx]; } + MonoView operator[](int idx) const { return view_[idx]; } - T* const* data() { return audio_samples_; } + DeinterleavedView view() { return view_; } + DeinterleavedView view() const { return view_; } private: - T* const* audio_samples_; - int num_channels_; - int channel_size_; + DeinterleavedView view_; }; } // namespace webrtc diff --git a/modules/audio_processing/include/audio_processing.h b/modules/audio_processing/include/audio_processing.h index f613a38de1..fe938f868f 100644 --- a/modules/audio_processing/include/audio_processing.h +++ b/modules/audio_processing/include/audio_processing.h @@ -11,931 +11,8 @@ #ifndef MODULES_AUDIO_PROCESSING_INCLUDE_AUDIO_PROCESSING_H_ #define MODULES_AUDIO_PROCESSING_INCLUDE_AUDIO_PROCESSING_H_ -// MSVC++ requires this to be set before any other includes to get M_PI. -#ifndef _USE_MATH_DEFINES -#define _USE_MATH_DEFINES -#endif - -#include -#include // size_t -#include // FILE -#include - -#include - -#include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/audio/echo_canceller3_config.h" -#include "api/audio/echo_control.h" -#include "api/scoped_refptr.h" -#include "modules/audio_processing/include/audio_processing_statistics.h" -#include "rtc_base/arraysize.h" -#include "rtc_base/ref_count.h" -#include "rtc_base/system/file_wrapper.h" -#include "rtc_base/system/rtc_export.h" - -namespace rtc { -class TaskQueue; -} // namespace rtc - -namespace webrtc { - -class AecDump; -class AudioBuffer; - -class StreamConfig; -class ProcessingConfig; - -class EchoDetector; -class CustomAudioAnalyzer; -class CustomProcessing; - -// The Audio Processing Module (APM) provides a collection of voice processing -// components designed for real-time communications software. -// -// APM operates on two audio streams on a frame-by-frame basis. Frames of the -// primary stream, on which all processing is applied, are passed to -// `ProcessStream()`. Frames of the reverse direction stream are passed to -// `ProcessReverseStream()`. On the client-side, this will typically be the -// near-end (capture) and far-end (render) streams, respectively. APM should be -// placed in the signal chain as close to the audio hardware abstraction layer -// (HAL) as possible. -// -// On the server-side, the reverse stream will normally not be used, with -// processing occurring on each incoming stream. -// -// Component interfaces follow a similar pattern and are accessed through -// corresponding getters in APM. All components are disabled at create-time, -// with default settings that are recommended for most situations. New settings -// can be applied without enabling a component. Enabling a component triggers -// memory allocation and initialization to allow it to start processing the -// streams. -// -// Thread safety is provided with the following assumptions to reduce locking -// overhead: -// 1. The stream getters and setters are called from the same thread as -// ProcessStream(). More precisely, stream functions are never called -// concurrently with ProcessStream(). -// 2. Parameter getters are never called concurrently with the corresponding -// setter. -// -// APM accepts only linear PCM audio data in chunks of ~10 ms (see -// AudioProcessing::GetFrameSize() for details) and sample rates ranging from -// 8000 Hz to 384000 Hz. The int16 interfaces use interleaved data, while the -// float interfaces use deinterleaved data. -// -// Usage example, omitting error checking: -// rtc::scoped_refptr apm = AudioProcessingBuilder().Create(); -// -// AudioProcessing::Config config; -// config.echo_canceller.enabled = true; -// config.echo_canceller.mobile_mode = false; -// -// config.gain_controller1.enabled = true; -// config.gain_controller1.mode = -// AudioProcessing::Config::GainController1::kAdaptiveAnalog; -// config.gain_controller1.analog_level_minimum = 0; -// config.gain_controller1.analog_level_maximum = 255; -// -// config.gain_controller2.enabled = true; -// -// config.high_pass_filter.enabled = true; -// -// apm->ApplyConfig(config) -// -// // Start a voice call... -// -// // ... Render frame arrives bound for the audio HAL ... -// apm->ProcessReverseStream(render_frame); -// -// // ... Capture frame arrives from the audio HAL ... -// // Call required set_stream_ functions. -// apm->set_stream_delay_ms(delay_ms); -// apm->set_stream_analog_level(analog_level); -// -// apm->ProcessStream(capture_frame); -// -// // Call required stream_ functions. -// analog_level = apm->recommended_stream_analog_level(); -// has_voice = apm->stream_has_voice(); -// -// // Repeat render and capture processing for the duration of the call... -// // Start a new call... -// apm->Initialize(); -// -// // Close the application... -// apm.reset(); -// -class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { - public: - // The struct below constitutes the new parameter scheme for the audio - // processing. It is being introduced gradually and until it is fully - // introduced, it is prone to change. - // TODO(peah): Remove this comment once the new config scheme is fully rolled - // out. - // - // The parameters and behavior of the audio processing module are controlled - // by changing the default values in the AudioProcessing::Config struct. - // The config is applied by passing the struct to the ApplyConfig method. - // - // This config is intended to be used during setup, and to enable/disable - // top-level processing effects. Use during processing may cause undesired - // submodule resets, affecting the audio quality. Use the RuntimeSetting - // construct for runtime configuration. - struct RTC_EXPORT Config { - // Sets the properties of the audio processing pipeline. - struct RTC_EXPORT Pipeline { - // Ways to downmix a multi-channel track to mono. - enum class DownmixMethod { - kAverageChannels, // Average across channels. - kUseFirstChannel // Use the first channel. - }; - - // Maximum allowed processing rate used internally. May only be set to - // 32000 or 48000 and any differing values will be treated as 48000. - int maximum_internal_processing_rate = 48000; - // Allow multi-channel processing of render audio. - bool multi_channel_render = false; - // Allow multi-channel processing of capture audio when AEC3 is active - // or a custom AEC is injected.. - bool multi_channel_capture = false; - // Indicates how to downmix multi-channel capture audio to mono (when - // needed). - DownmixMethod capture_downmix_method = DownmixMethod::kAverageChannels; - } pipeline; - - // Enabled the pre-amplifier. It amplifies the capture signal - // before any other processing is done. - // TODO(webrtc:5298): Deprecate and use the pre-gain functionality in - // capture_level_adjustment instead. - struct PreAmplifier { - bool enabled = false; - float fixed_gain_factor = 1.0f; - } pre_amplifier; - - // Functionality for general level adjustment in the capture pipeline. This - // should not be used together with the legacy PreAmplifier functionality. - struct CaptureLevelAdjustment { - bool operator==(const CaptureLevelAdjustment& rhs) const; - bool operator!=(const CaptureLevelAdjustment& rhs) const { - return !(*this == rhs); - } - bool enabled = false; - // The `pre_gain_factor` scales the signal before any processing is done. - float pre_gain_factor = 1.0f; - // The `post_gain_factor` scales the signal after all processing is done. - float post_gain_factor = 1.0f; - struct AnalogMicGainEmulation { - bool operator==(const AnalogMicGainEmulation& rhs) const; - bool operator!=(const AnalogMicGainEmulation& rhs) const { - return !(*this == rhs); - } - bool enabled = false; - // Initial analog gain level to use for the emulated analog gain. Must - // be in the range [0...255]. - int initial_level = 255; - } analog_mic_gain_emulation; - } capture_level_adjustment; - - struct HighPassFilter { - bool enabled = false; - bool apply_in_full_band = true; - } high_pass_filter; - - struct EchoCanceller { - bool enabled = false; - bool mobile_mode = false; - bool export_linear_aec_output = false; - // Enforce the highpass filter to be on (has no effect for the mobile - // mode). - bool enforce_high_pass_filtering = true; - } echo_canceller; - - // Enables background noise suppression. - struct NoiseSuppression { - bool enabled = false; - enum Level { kLow, kModerate, kHigh, kVeryHigh }; - Level level = kModerate; - bool analyze_linear_aec_output_when_available = false; - } noise_suppression; - - // Enables transient suppression. - struct TransientSuppression { - bool enabled = false; - } transient_suppression; - - // Enables automatic gain control (AGC) functionality. - // The automatic gain control (AGC) component brings the signal to an - // appropriate range. This is done by applying a digital gain directly and, - // in the analog mode, prescribing an analog gain to be applied at the audio - // HAL. - // Recommended to be enabled on the client-side. - struct RTC_EXPORT GainController1 { - bool operator==(const GainController1& rhs) const; - bool operator!=(const GainController1& rhs) const { - return !(*this == rhs); - } - - bool enabled = false; - enum Mode { - // Adaptive mode intended for use if an analog volume control is - // available on the capture device. It will require the user to provide - // coupling between the OS mixer controls and AGC through the - // stream_analog_level() functions. - // It consists of an analog gain prescription for the audio device and a - // digital compression stage. - kAdaptiveAnalog, - // Adaptive mode intended for situations in which an analog volume - // control is unavailable. It operates in a similar fashion to the - // adaptive analog mode, but with scaling instead applied in the digital - // domain. As with the analog mode, it additionally uses a digital - // compression stage. - kAdaptiveDigital, - // Fixed mode which enables only the digital compression stage also used - // by the two adaptive modes. - // It is distinguished from the adaptive modes by considering only a - // short time-window of the input signal. It applies a fixed gain - // through most of the input level range, and compresses (gradually - // reduces gain with increasing level) the input signal at higher - // levels. This mode is preferred on embedded devices where the capture - // signal level is predictable, so that a known gain can be applied. - kFixedDigital - }; - Mode mode = kAdaptiveAnalog; - // Sets the target peak level (or envelope) of the AGC in dBFs (decibels - // from digital full-scale). The convention is to use positive values. For - // instance, passing in a value of 3 corresponds to -3 dBFs, or a target - // level 3 dB below full-scale. Limited to [0, 31]. - int target_level_dbfs = 3; - // Sets the maximum gain the digital compression stage may apply, in dB. A - // higher number corresponds to greater compression, while a value of 0 - // will leave the signal uncompressed. Limited to [0, 90]. - // For updates after APM setup, use a RuntimeSetting instead. - int compression_gain_db = 9; - // When enabled, the compression stage will hard limit the signal to the - // target level. Otherwise, the signal will be compressed but not limited - // above the target level. - bool enable_limiter = true; - - // Enables the analog gain controller functionality. - struct AnalogGainController { - bool enabled = true; - // TODO(bugs.webrtc.org/7494): Deprecated. Stop using and remove. - int startup_min_volume = 0; - // Lowest analog microphone level that will be applied in response to - // clipping. - int clipped_level_min = 70; - // If true, an adaptive digital gain is applied. - bool enable_digital_adaptive = true; - // Amount the microphone level is lowered with every clipping event. - // Limited to (0, 255]. - int clipped_level_step = 15; - // Proportion of clipped samples required to declare a clipping event. - // Limited to (0.f, 1.f). - float clipped_ratio_threshold = 0.1f; - // Time in frames to wait after a clipping event before checking again. - // Limited to values higher than 0. - int clipped_wait_frames = 300; - - // Enables clipping prediction functionality. - struct ClippingPredictor { - bool enabled = false; - enum Mode { - // Clipping event prediction mode with fixed step estimation. - kClippingEventPrediction, - // Clipped peak estimation mode with adaptive step estimation. - kAdaptiveStepClippingPeakPrediction, - // Clipped peak estimation mode with fixed step estimation. - kFixedStepClippingPeakPrediction, - }; - Mode mode = kClippingEventPrediction; - // Number of frames in the sliding analysis window. - int window_length = 5; - // Number of frames in the sliding reference window. - int reference_window_length = 5; - // Reference window delay (unit: number of frames). - int reference_window_delay = 5; - // Clipping prediction threshold (dBFS). - float clipping_threshold = -1.0f; - // Crest factor drop threshold (dB). - float crest_factor_margin = 3.0f; - // If true, the recommended clipped level step is used to modify the - // analog gain. Otherwise, the predictor runs without affecting the - // analog gain. - bool use_predicted_step = true; - } clipping_predictor; - } analog_gain_controller; - } gain_controller1; - - // Parameters for AGC2, an Automatic Gain Control (AGC) sub-module which - // replaces the AGC sub-module parametrized by `gain_controller1`. - // AGC2 brings the captured audio signal to the desired level by combining - // three different controllers (namely, input volume controller, adapative - // digital controller and fixed digital controller) and a limiter. - // TODO(bugs.webrtc.org:7494): Name `GainController` when AGC1 removed. - struct RTC_EXPORT GainController2 { - bool operator==(const GainController2& rhs) const; - bool operator!=(const GainController2& rhs) const { - return !(*this == rhs); - } - - // AGC2 must be created if and only if `enabled` is true. - bool enabled = false; - - // Parameters for the input volume controller, which adjusts the input - // volume applied when the audio is captured (e.g., microphone volume on - // a soundcard, input volume on HAL). - struct InputVolumeController { - bool operator==(const InputVolumeController& rhs) const; - bool operator!=(const InputVolumeController& rhs) const { - return !(*this == rhs); - } - bool enabled = false; - } input_volume_controller; - - // Parameters for the adaptive digital controller, which adjusts and - // applies a digital gain after echo cancellation and after noise - // suppression. - struct RTC_EXPORT AdaptiveDigital { - bool operator==(const AdaptiveDigital& rhs) const; - bool operator!=(const AdaptiveDigital& rhs) const { - return !(*this == rhs); - } - bool enabled = false; - float headroom_db = 6.0f; - float max_gain_db = 30.0f; - float initial_gain_db = 8.0f; - float max_gain_change_db_per_second = 3.0f; - float max_output_noise_level_dbfs = -50.0f; - } adaptive_digital; - - // Parameters for the fixed digital controller, which applies a fixed - // digital gain after the adaptive digital controller and before the - // limiter. - struct FixedDigital { - // By setting `gain_db` to a value greater than zero, the limiter can be - // turned into a compressor that first applies a fixed gain. - float gain_db = 0.0f; - } fixed_digital; - } gain_controller2; - - std::string ToString() const; - }; - - // Specifies the properties of a setting to be passed to AudioProcessing at - // runtime. - class RuntimeSetting { - public: - enum class Type { - kNotSpecified, - kCapturePreGain, - kCaptureCompressionGain, - kCaptureFixedPostGain, - kPlayoutVolumeChange, - kCustomRenderProcessingRuntimeSetting, - kPlayoutAudioDeviceChange, - kCapturePostGain, - kCaptureOutputUsed - }; - - // Play-out audio device properties. - struct PlayoutAudioDeviceInfo { - int id; // Identifies the audio device. - int max_volume; // Maximum play-out volume. - }; - - RuntimeSetting() : type_(Type::kNotSpecified), value_(0.0f) {} - ~RuntimeSetting() = default; - - static RuntimeSetting CreateCapturePreGain(float gain) { - return {Type::kCapturePreGain, gain}; - } - - static RuntimeSetting CreateCapturePostGain(float gain) { - return {Type::kCapturePostGain, gain}; - } - - // Corresponds to Config::GainController1::compression_gain_db, but for - // runtime configuration. - static RuntimeSetting CreateCompressionGainDb(int gain_db) { - RTC_DCHECK_GE(gain_db, 0); - RTC_DCHECK_LE(gain_db, 90); - return {Type::kCaptureCompressionGain, static_cast(gain_db)}; - } - - // Corresponds to Config::GainController2::fixed_digital::gain_db, but for - // runtime configuration. - static RuntimeSetting CreateCaptureFixedPostGain(float gain_db) { - RTC_DCHECK_GE(gain_db, 0.0f); - RTC_DCHECK_LE(gain_db, 90.0f); - return {Type::kCaptureFixedPostGain, gain_db}; - } - - // Creates a runtime setting to notify play-out (aka render) audio device - // changes. - static RuntimeSetting CreatePlayoutAudioDeviceChange( - PlayoutAudioDeviceInfo audio_device) { - return {Type::kPlayoutAudioDeviceChange, audio_device}; - } - - // Creates a runtime setting to notify play-out (aka render) volume changes. - // `volume` is the unnormalized volume, the maximum of which - static RuntimeSetting CreatePlayoutVolumeChange(int volume) { - return {Type::kPlayoutVolumeChange, volume}; - } - - static RuntimeSetting CreateCustomRenderSetting(float payload) { - return {Type::kCustomRenderProcessingRuntimeSetting, payload}; - } - - static RuntimeSetting CreateCaptureOutputUsedSetting( - bool capture_output_used) { - return {Type::kCaptureOutputUsed, capture_output_used}; - } - - Type type() const { return type_; } - // Getters do not return a value but instead modify the argument to protect - // from implicit casting. - void GetFloat(float* value) const { - RTC_DCHECK(value); - *value = value_.float_value; - } - void GetInt(int* value) const { - RTC_DCHECK(value); - *value = value_.int_value; - } - void GetBool(bool* value) const { - RTC_DCHECK(value); - *value = value_.bool_value; - } - void GetPlayoutAudioDeviceInfo(PlayoutAudioDeviceInfo* value) const { - RTC_DCHECK(value); - *value = value_.playout_audio_device_info; - } - - private: - RuntimeSetting(Type id, float value) : type_(id), value_(value) {} - RuntimeSetting(Type id, int value) : type_(id), value_(value) {} - RuntimeSetting(Type id, PlayoutAudioDeviceInfo value) - : type_(id), value_(value) {} - Type type_; - union U { - U() {} - U(int value) : int_value(value) {} - U(float value) : float_value(value) {} - U(PlayoutAudioDeviceInfo value) : playout_audio_device_info(value) {} - float float_value; - int int_value; - bool bool_value; - PlayoutAudioDeviceInfo playout_audio_device_info; - } value_; - }; - - ~AudioProcessing() override {} - - // Initializes internal states, while retaining all user settings. This - // should be called before beginning to process a new audio stream. However, - // it is not necessary to call before processing the first stream after - // creation. - // - // It is also not necessary to call if the audio parameters (sample - // rate and number of channels) have changed. Passing updated parameters - // directly to `ProcessStream()` and `ProcessReverseStream()` is permissible. - // If the parameters are known at init-time though, they may be provided. - // TODO(webrtc:5298): Change to return void. - virtual int Initialize() = 0; - - // The int16 interfaces require: - // - only `NativeRate`s be used - // - that the input, output and reverse rates must match - // - that `processing_config.output_stream()` matches - // `processing_config.input_stream()`. - // - // The float interfaces accept arbitrary rates and support differing input and - // output layouts, but the output must have either one channel or the same - // number of channels as the input. - virtual int Initialize(const ProcessingConfig& processing_config) = 0; - - // TODO(peah): This method is a temporary solution used to take control - // over the parameters in the audio processing module and is likely to change. - virtual void ApplyConfig(const Config& config) = 0; - - // TODO(ajm): Only intended for internal use. Make private and friend the - // necessary classes? - virtual int proc_sample_rate_hz() const = 0; - virtual int proc_split_sample_rate_hz() const = 0; - virtual size_t num_input_channels() const = 0; - virtual size_t num_proc_channels() const = 0; - virtual size_t num_output_channels() const = 0; - virtual size_t num_reverse_channels() const = 0; - - // Set to true when the output of AudioProcessing will be muted or in some - // other way not used. Ideally, the captured audio would still be processed, - // but some components may change behavior based on this information. - // Default false. This method takes a lock. To achieve this in a lock-less - // manner the PostRuntimeSetting can instead be used. - virtual void set_output_will_be_muted(bool muted) = 0; - - // Enqueues a runtime setting. - virtual void SetRuntimeSetting(RuntimeSetting setting) = 0; - - // Enqueues a runtime setting. Returns a bool indicating whether the - // enqueueing was successfull. - virtual bool PostRuntimeSetting(RuntimeSetting setting) = 0; - - // Accepts and produces a ~10 ms frame of interleaved 16 bit integer audio as - // specified in `input_config` and `output_config`. `src` and `dest` may use - // the same memory, if desired. - virtual int ProcessStream(const int16_t* const src, - const StreamConfig& input_config, - const StreamConfig& output_config, - int16_t* const dest) = 0; - - // Accepts deinterleaved float audio with the range [-1, 1]. Each element of - // `src` points to a channel buffer, arranged according to `input_stream`. At - // output, the channels will be arranged according to `output_stream` in - // `dest`. - // - // The output must have one channel or as many channels as the input. `src` - // and `dest` may use the same memory, if desired. - virtual int ProcessStream(const float* const* src, - const StreamConfig& input_config, - const StreamConfig& output_config, - float* const* dest) = 0; - - // Accepts and produces a ~10 ms frame of interleaved 16 bit integer audio for - // the reverse direction audio stream as specified in `input_config` and - // `output_config`. `src` and `dest` may use the same memory, if desired. - virtual int ProcessReverseStream(const int16_t* const src, - const StreamConfig& input_config, - const StreamConfig& output_config, - int16_t* const dest) = 0; - - // Accepts deinterleaved float audio with the range [-1, 1]. Each element of - // `data` points to a channel buffer, arranged according to `reverse_config`. - virtual int ProcessReverseStream(const float* const* src, - const StreamConfig& input_config, - const StreamConfig& output_config, - float* const* dest) = 0; - - // Accepts deinterleaved float audio with the range [-1, 1]. Each element - // of `data` points to a channel buffer, arranged according to - // `reverse_config`. - virtual int AnalyzeReverseStream(const float* const* data, - const StreamConfig& reverse_config) = 0; - - // Returns the most recently produced ~10 ms of the linear AEC output at a - // rate of 16 kHz. If there is more than one capture channel, a mono - // representation of the input is returned. Returns true/false to indicate - // whether an output returned. - virtual bool GetLinearAecOutput( - rtc::ArrayView> linear_output) const = 0; - - // This must be called prior to ProcessStream() if and only if adaptive analog - // gain control is enabled, to pass the current analog level from the audio - // HAL. Must be within the range [0, 255]. - virtual void set_stream_analog_level(int level) = 0; - - // When an analog mode is set, this should be called after - // `set_stream_analog_level()` and `ProcessStream()` to obtain the recommended - // new analog level for the audio HAL. It is the user's responsibility to - // apply this level. - virtual int recommended_stream_analog_level() const = 0; - - // This must be called if and only if echo processing is enabled. - // - // Sets the `delay` in ms between ProcessReverseStream() receiving a far-end - // frame and ProcessStream() receiving a near-end frame containing the - // corresponding echo. On the client-side this can be expressed as - // delay = (t_render - t_analyze) + (t_process - t_capture) - // where, - // - t_analyze is the time a frame is passed to ProcessReverseStream() and - // t_render is the time the first sample of the same frame is rendered by - // the audio hardware. - // - t_capture is the time the first sample of a frame is captured by the - // audio hardware and t_process is the time the same frame is passed to - // ProcessStream(). - virtual int set_stream_delay_ms(int delay) = 0; - virtual int stream_delay_ms() const = 0; - - // Call to signal that a key press occurred (true) or did not occur (false) - // with this chunk of audio. - virtual void set_stream_key_pressed(bool key_pressed) = 0; - - // Creates and attaches an webrtc::AecDump for recording debugging - // information. - // The `worker_queue` may not be null and must outlive the created - // AecDump instance. |max_log_size_bytes == -1| means the log size - // will be unlimited. `handle` may not be null. The AecDump takes - // responsibility for `handle` and closes it in the destructor. A - // return value of true indicates that the file has been - // sucessfully opened, while a value of false indicates that - // opening the file failed. - virtual bool CreateAndAttachAecDump(absl::string_view file_name, - int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue) = 0; - virtual bool CreateAndAttachAecDump(FILE* handle, - int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue) = 0; - - // TODO(webrtc:5298) Deprecated variant. - // Attaches provided webrtc::AecDump for recording debugging - // information. Log file and maximum file size logic is supposed to - // be handled by implementing instance of AecDump. Calling this - // method when another AecDump is attached resets the active AecDump - // with a new one. This causes the d-tor of the earlier AecDump to - // be called. The d-tor call may block until all pending logging - // tasks are completed. - virtual void AttachAecDump(std::unique_ptr aec_dump) = 0; - - // If no AecDump is attached, this has no effect. If an AecDump is - // attached, it's destructor is called. The d-tor may block until - // all pending logging tasks are completed. - virtual void DetachAecDump() = 0; - - // Get audio processing statistics. - virtual AudioProcessingStats GetStatistics() = 0; - // TODO(webrtc:5298) Deprecated variant. The `has_remote_tracks` argument - // should be set if there are active remote tracks (this would usually be true - // during a call). If there are no remote tracks some of the stats will not be - // set by AudioProcessing, because they only make sense if there is at least - // one remote track. - virtual AudioProcessingStats GetStatistics(bool has_remote_tracks) = 0; - - // Returns the last applied configuration. - virtual AudioProcessing::Config GetConfig() const = 0; - - enum Error { - // Fatal errors. - kNoError = 0, - kUnspecifiedError = -1, - kCreationFailedError = -2, - kUnsupportedComponentError = -3, - kUnsupportedFunctionError = -4, - kNullPointerError = -5, - kBadParameterError = -6, - kBadSampleRateError = -7, - kBadDataLengthError = -8, - kBadNumberChannelsError = -9, - kFileError = -10, - kStreamParameterNotSetError = -11, - kNotEnabledError = -12, - - // Warnings are non-fatal. - // This results when a set_stream_ parameter is out of range. Processing - // will continue, but the parameter may have been truncated. - kBadStreamParameterWarning = -13 - }; - - // Native rates supported by the integer interfaces. - enum NativeRate { - kSampleRate8kHz = 8000, - kSampleRate16kHz = 16000, - kSampleRate32kHz = 32000, - kSampleRate48kHz = 48000 - }; - - // TODO(kwiberg): We currently need to support a compiler (Visual C++) that - // complains if we don't explicitly state the size of the array here. Remove - // the size when that's no longer the case. - static constexpr int kNativeSampleRatesHz[4] = { - kSampleRate8kHz, kSampleRate16kHz, kSampleRate32kHz, kSampleRate48kHz}; - static constexpr size_t kNumNativeSampleRates = - arraysize(kNativeSampleRatesHz); - static constexpr int kMaxNativeSampleRateHz = - kNativeSampleRatesHz[kNumNativeSampleRates - 1]; - - // APM processes audio in chunks of about 10 ms. See GetFrameSize() for - // details. - static constexpr int kChunkSizeMs = 10; - - // Returns floor(sample_rate_hz/100): the number of samples per channel used - // as input and output to the audio processing module in calls to - // ProcessStream, ProcessReverseStream, AnalyzeReverseStream, and - // GetLinearAecOutput. - // - // This is exactly 10 ms for sample rates divisible by 100. For example: - // - 48000 Hz (480 samples per channel), - // - 44100 Hz (441 samples per channel), - // - 16000 Hz (160 samples per channel). - // - // Sample rates not divisible by 100 are received/produced in frames of - // approximately 10 ms. For example: - // - 22050 Hz (220 samples per channel, or ~9.98 ms per frame), - // - 11025 Hz (110 samples per channel, or ~9.98 ms per frame). - // These nondivisible sample rates yield lower audio quality compared to - // multiples of 100. Internal resampling to 10 ms frames causes a simulated - // clock drift effect which impacts the performance of (for example) echo - // cancellation. - static int GetFrameSize(int sample_rate_hz) { return sample_rate_hz / 100; } -}; - -class RTC_EXPORT AudioProcessingBuilder { - public: - AudioProcessingBuilder(); - AudioProcessingBuilder(const AudioProcessingBuilder&) = delete; - AudioProcessingBuilder& operator=(const AudioProcessingBuilder&) = delete; - ~AudioProcessingBuilder(); - - // Sets the APM configuration. - AudioProcessingBuilder& SetConfig(const AudioProcessing::Config& config) { - config_ = config; - return *this; - } - - // Sets the echo controller factory to inject when APM is created. - AudioProcessingBuilder& SetEchoControlFactory( - std::unique_ptr echo_control_factory) { - echo_control_factory_ = std::move(echo_control_factory); - return *this; - } - - // Sets the capture post-processing sub-module to inject when APM is created. - AudioProcessingBuilder& SetCapturePostProcessing( - std::unique_ptr capture_post_processing) { - capture_post_processing_ = std::move(capture_post_processing); - return *this; - } - - // Sets the render pre-processing sub-module to inject when APM is created. - AudioProcessingBuilder& SetRenderPreProcessing( - std::unique_ptr render_pre_processing) { - render_pre_processing_ = std::move(render_pre_processing); - return *this; - } - - // Sets the echo detector to inject when APM is created. - AudioProcessingBuilder& SetEchoDetector( - rtc::scoped_refptr echo_detector) { - echo_detector_ = std::move(echo_detector); - return *this; - } - - // Sets the capture analyzer sub-module to inject when APM is created. - AudioProcessingBuilder& SetCaptureAnalyzer( - std::unique_ptr capture_analyzer) { - capture_analyzer_ = std::move(capture_analyzer); - return *this; - } - - // Creates an APM instance with the specified config or the default one if - // unspecified. Injects the specified components transferring the ownership - // to the newly created APM instance - i.e., except for the config, the - // builder is reset to its initial state. - rtc::scoped_refptr Create(); - - private: - AudioProcessing::Config config_; - std::unique_ptr echo_control_factory_; - std::unique_ptr capture_post_processing_; - std::unique_ptr render_pre_processing_; - rtc::scoped_refptr echo_detector_; - std::unique_ptr capture_analyzer_; -}; - -class StreamConfig { - public: - // sample_rate_hz: The sampling rate of the stream. - // num_channels: The number of audio channels in the stream. - StreamConfig(int sample_rate_hz = 0, size_t num_channels = 0) - : sample_rate_hz_(sample_rate_hz), - num_channels_(num_channels), - num_frames_(calculate_frames(sample_rate_hz)) {} - - void set_sample_rate_hz(int value) { - sample_rate_hz_ = value; - num_frames_ = calculate_frames(value); - } - void set_num_channels(size_t value) { num_channels_ = value; } - - int sample_rate_hz() const { return sample_rate_hz_; } - - // The number of channels in the stream. - size_t num_channels() const { return num_channels_; } - - size_t num_frames() const { return num_frames_; } - size_t num_samples() const { return num_channels_ * num_frames_; } - - bool operator==(const StreamConfig& other) const { - return sample_rate_hz_ == other.sample_rate_hz_ && - num_channels_ == other.num_channels_; - } - - bool operator!=(const StreamConfig& other) const { return !(*this == other); } - - private: - static size_t calculate_frames(int sample_rate_hz) { - return static_cast(AudioProcessing::GetFrameSize(sample_rate_hz)); - } - - int sample_rate_hz_; - size_t num_channels_; - size_t num_frames_; -}; - -class ProcessingConfig { - public: - enum StreamName { - kInputStream, - kOutputStream, - kReverseInputStream, - kReverseOutputStream, - kNumStreamNames, - }; - - const StreamConfig& input_stream() const { - return streams[StreamName::kInputStream]; - } - const StreamConfig& output_stream() const { - return streams[StreamName::kOutputStream]; - } - const StreamConfig& reverse_input_stream() const { - return streams[StreamName::kReverseInputStream]; - } - const StreamConfig& reverse_output_stream() const { - return streams[StreamName::kReverseOutputStream]; - } - - StreamConfig& input_stream() { return streams[StreamName::kInputStream]; } - StreamConfig& output_stream() { return streams[StreamName::kOutputStream]; } - StreamConfig& reverse_input_stream() { - return streams[StreamName::kReverseInputStream]; - } - StreamConfig& reverse_output_stream() { - return streams[StreamName::kReverseOutputStream]; - } - - bool operator==(const ProcessingConfig& other) const { - for (int i = 0; i < StreamName::kNumStreamNames; ++i) { - if (this->streams[i] != other.streams[i]) { - return false; - } - } - return true; - } - - bool operator!=(const ProcessingConfig& other) const { - return !(*this == other); - } - - StreamConfig streams[StreamName::kNumStreamNames]; -}; - -// Experimental interface for a custom analysis submodule. -class CustomAudioAnalyzer { - public: - // (Re-) Initializes the submodule. - virtual void Initialize(int sample_rate_hz, int num_channels) = 0; - // Analyzes the given capture or render signal. - virtual void Analyze(const AudioBuffer* audio) = 0; - // Returns a string representation of the module state. - virtual std::string ToString() const = 0; - - virtual ~CustomAudioAnalyzer() {} -}; - -// Interface for a custom processing submodule. -class CustomProcessing { - public: - // (Re-)Initializes the submodule. - virtual void Initialize(int sample_rate_hz, int num_channels) = 0; - // Processes the given capture or render signal. - virtual void Process(AudioBuffer* audio) = 0; - // Returns a string representation of the module state. - virtual std::string ToString() const = 0; - // Handles RuntimeSettings. TODO(webrtc:9262): make pure virtual - // after updating dependencies. - virtual void SetRuntimeSetting(AudioProcessing::RuntimeSetting setting); - - virtual ~CustomProcessing() {} -}; - -// Interface for an echo detector submodule. -class EchoDetector : public rtc::RefCountInterface { - public: - // (Re-)Initializes the submodule. - virtual void Initialize(int capture_sample_rate_hz, - int num_capture_channels, - int render_sample_rate_hz, - int num_render_channels) = 0; - - // Analysis (not changing) of the first channel of the render signal. - virtual void AnalyzeRenderAudio(rtc::ArrayView render_audio) = 0; - - // Analysis (not changing) of the capture signal. - virtual void AnalyzeCaptureAudio( - rtc::ArrayView capture_audio) = 0; - - struct Metrics { - absl::optional echo_likelihood; - absl::optional echo_likelihood_recent_max; - }; - - // Collect current metrics from the echo detector. - virtual Metrics GetMetrics() const = 0; -}; - -} // namespace webrtc +// This is a transitional header forwarding to the new version in the api/ +// folder. +#include "api/audio/audio_processing.h" #endif // MODULES_AUDIO_PROCESSING_INCLUDE_AUDIO_PROCESSING_H_ diff --git a/modules/audio_processing/include/audio_processing_statistics.h b/modules/audio_processing/include/audio_processing_statistics.h index 3b43319951..594d3f5a4d 100644 --- a/modules/audio_processing/include/audio_processing_statistics.h +++ b/modules/audio_processing/include/audio_processing_statistics.h @@ -11,57 +11,8 @@ #ifndef MODULES_AUDIO_PROCESSING_INCLUDE_AUDIO_PROCESSING_STATISTICS_H_ #define MODULES_AUDIO_PROCESSING_INCLUDE_AUDIO_PROCESSING_STATISTICS_H_ -#include - -#include "absl/types/optional.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { -// This version of the stats uses Optionals, it will replace the regular -// AudioProcessingStatistics struct. -struct RTC_EXPORT AudioProcessingStats { - AudioProcessingStats(); - AudioProcessingStats(const AudioProcessingStats& other); - ~AudioProcessingStats(); - - // Deprecated. - // TODO(bugs.webrtc.org/11226): Remove. - // True if voice is detected in the last capture frame, after processing. - // It is conservative in flagging audio as speech, with low likelihood of - // incorrectly flagging a frame as voice. - // Only reported if voice detection is enabled in AudioProcessing::Config. - absl::optional voice_detected; - - // AEC Statistics. - // ERL = 10log_10(P_far / P_echo) - absl::optional echo_return_loss; - // ERLE = 10log_10(P_echo / P_out) - absl::optional echo_return_loss_enhancement; - // Fraction of time that the AEC linear filter is divergent, in a 1-second - // non-overlapped aggregation window. - absl::optional divergent_filter_fraction; - - // The delay metrics consists of the delay median and standard deviation. It - // also consists of the fraction of delay estimates that can make the echo - // cancellation perform poorly. The values are aggregated until the first - // call to `GetStatistics()` and afterwards aggregated and updated every - // second. Note that if there are several clients pulling metrics from - // `GetStatistics()` during a session the first call from any of them will - // change to one second aggregation window for all. - absl::optional delay_median_ms; - absl::optional delay_standard_deviation_ms; - - // Residual echo detector likelihood. - absl::optional residual_echo_likelihood; - // Maximum residual echo likelihood from the last time period. - absl::optional residual_echo_likelihood_recent_max; - - // The instantaneous delay estimate produced in the AEC. The unit is in - // milliseconds and the value is the instantaneous value at the time of the - // call to `GetStatistics()`. - absl::optional delay_ms; -}; - -} // namespace webrtc +// This is a transitional header forwarding to the new version in the api/ +// folder. +#include "api/audio/audio_processing_statistics.h" #endif // MODULES_AUDIO_PROCESSING_INCLUDE_AUDIO_PROCESSING_STATISTICS_H_ diff --git a/modules/audio_processing/include/mock_audio_processing.h b/modules/audio_processing/include/mock_audio_processing.h index 2ea1a865c3..805aa411e8 100644 --- a/modules/audio_processing/include/mock_audio_processing.h +++ b/modules/audio_processing/include/mock_audio_processing.h @@ -13,10 +13,14 @@ #include +#include "absl/base/nullability.h" #include "absl/strings/string_view.h" +#include "api/audio/audio_processing.h" +#include "api/audio/audio_processing_statistics.h" +#include "api/environment/environment.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" #include "modules/audio_processing/include/aec_dump.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "modules/audio_processing/include/audio_processing_statistics.h" #include "test/gmock.h" namespace webrtc { @@ -80,11 +84,11 @@ class MockEchoDetector : public EchoDetector { (override)); MOCK_METHOD(void, AnalyzeRenderAudio, - (rtc::ArrayView render_audio), + (webrtc::ArrayView render_audio), (override)); MOCK_METHOD(void, AnalyzeCaptureAudio, - (rtc::ArrayView capture_audio), + (webrtc::ArrayView capture_audio), (override)); MOCK_METHOD(Metrics, GetMetrics, (), (const, override)); }; @@ -144,7 +148,7 @@ class MockAudioProcessing : public AudioProcessing { (override)); MOCK_METHOD(bool, GetLinearAecOutput, - ((rtc::ArrayView> linear_output)), + ((webrtc::ArrayView> linear_output)), (const, override)); MOCK_METHOD(int, set_stream_delay_ms, (int delay), (override)); MOCK_METHOD(int, stream_delay_ms, (), (const, override)); @@ -155,13 +159,13 @@ class MockAudioProcessing : public AudioProcessing { CreateAndAttachAecDump, (absl::string_view file_name, int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue), + TaskQueueBase* absl_nonnull worker_queue), (override)); MOCK_METHOD(bool, CreateAndAttachAecDump, (FILE * handle, int64_t max_log_size_bytes, - rtc::TaskQueue* worker_queue), + TaskQueueBase* absl_nonnull worker_queue), (override)); MOCK_METHOD(void, AttachAecDump, (std::unique_ptr), (override)); MOCK_METHOD(void, DetachAecDump, (), (override)); @@ -172,6 +176,14 @@ class MockAudioProcessing : public AudioProcessing { MOCK_METHOD(AudioProcessing::Config, GetConfig, (), (const, override)); }; +class MockAudioProcessingBuilder : public AudioProcessingBuilderInterface { + public: + MOCK_METHOD(scoped_refptr, + Build, + (const Environment&), + (override)); +}; + } // namespace test } // namespace webrtc diff --git a/modules/audio_processing/logging/apm_data_dumper.cc b/modules/audio_processing/logging/apm_data_dumper.cc index 65d2167d37..ec2cc7f958 100644 --- a/modules/audio_processing/logging/apm_data_dumper.cc +++ b/modules/audio_processing/logging/apm_data_dumper.cc @@ -36,7 +36,7 @@ std::string FormFileName(absl::string_view output_dir, int reinit_index, absl::string_view suffix) { char buf[1024]; - rtc::SimpleStringBuilder ss(buf); + webrtc::SimpleStringBuilder ss(buf); if (!output_dir.empty()) { ss << output_dir; if (output_dir.back() != kPathDelimiter) { @@ -54,14 +54,14 @@ std::string FormFileName(absl::string_view output_dir, ApmDataDumper::ApmDataDumper(int instance_index) : instance_index_(instance_index) {} #else -ApmDataDumper::ApmDataDumper(int instance_index) {} +ApmDataDumper::ApmDataDumper(int /* instance_index */) {} #endif ApmDataDumper::~ApmDataDumper() = default; #if WEBRTC_APM_DEBUG_DUMP == 1 bool ApmDataDumper::recording_activated_ = false; -absl::optional ApmDataDumper::dump_set_to_use_; +std::optional ApmDataDumper::dump_set_to_use_; char ApmDataDumper::output_dir_[] = ""; FILE* ApmDataDumper::GetRawFile(absl::string_view name) { diff --git a/modules/audio_processing/logging/apm_data_dumper.h b/modules/audio_processing/logging/apm_data_dumper.h index 4ab6baad83..a8178bf151 100644 --- a/modules/audio_processing/logging/apm_data_dumper.h +++ b/modules/audio_processing/logging/apm_data_dumper.h @@ -20,8 +20,9 @@ #include #endif +#include + #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #if WEBRTC_APM_DEBUG_DUMP == 1 #include "common_audio/wav_file.h" @@ -60,7 +61,7 @@ class ApmDataDumper { ~ApmDataDumper(); // Activates or deactivate the dumping functionality. - static void SetActivated(bool activated) { + static void SetActivated([[maybe_unused]] bool activated) { #if WEBRTC_APM_DEBUG_DUMP == 1 recording_activated_ = activated; #endif @@ -81,17 +82,18 @@ class ApmDataDumper { // Specifies what dump set to use. All dump commands with a different dump set // than the one specified will be discarded. If not specificed, all dump sets // will be used. - static void SetDumpSetToUse(int dump_set_to_use) { + static void SetDumpSetToUse([[maybe_unused]] int dump_set_to_use) { #if WEBRTC_APM_DEBUG_DUMP == 1 dump_set_to_use_ = dump_set_to_use; #endif } // Set an optional output directory. - static void SetOutputDirectory(absl::string_view output_dir) { + static void SetOutputDirectory( + [[maybe_unused]] absl::string_view output_dir) { #if WEBRTC_APM_DEBUG_DUMP == 1 RTC_CHECK_LT(output_dir.size(), kOutputDirMaxLength); - rtc::strcpyn(output_dir_, output_dir.size(), output_dir); + webrtc::strcpyn(output_dir_, kOutputDirMaxLength, output_dir); #endif } @@ -105,9 +107,9 @@ class ApmDataDumper { // Methods for performing dumping of data of various types into // various formats. - void DumpRaw(absl::string_view name, - double v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] double v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -119,10 +121,10 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - size_t v_length, - const double* v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] size_t v_length, + [[maybe_unused]] const double* v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -134,9 +136,9 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - rtc::ArrayView v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] ArrayView v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -147,9 +149,9 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - float v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] float v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -161,10 +163,10 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - size_t v_length, - const float* v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] size_t v_length, + [[maybe_unused]] const float* v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -176,9 +178,9 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - rtc::ArrayView v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] ArrayView v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -189,7 +191,9 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, bool v, int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] bool v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -200,10 +204,10 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - size_t v_length, - const bool* v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] size_t v_length, + [[maybe_unused]] const bool* v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -218,9 +222,9 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - rtc::ArrayView v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] ArrayView v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -231,9 +235,9 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - int16_t v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] int16_t v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -245,10 +249,10 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - size_t v_length, - const int16_t* v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] size_t v_length, + [[maybe_unused]] const int16_t* v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -260,9 +264,9 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - rtc::ArrayView v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] ArrayView v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -273,9 +277,9 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - int32_t v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] int32_t v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -287,10 +291,10 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - size_t v_length, - const int32_t* v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] size_t v_length, + [[maybe_unused]] const int32_t* v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -302,9 +306,9 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - size_t v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] size_t v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -316,10 +320,10 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - size_t v_length, - const size_t* v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] size_t v_length, + [[maybe_unused]] const size_t* v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -331,9 +335,9 @@ class ApmDataDumper { #endif } - void DumpRaw(absl::string_view name, - rtc::ArrayView v, - int dump_set = kDefaultDumpSet) { + void DumpRaw([[maybe_unused]] absl::string_view name, + [[maybe_unused]] ArrayView v, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -345,7 +349,7 @@ class ApmDataDumper { } void DumpRaw(absl::string_view name, - rtc::ArrayView v, + ArrayView v, int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) @@ -355,12 +359,12 @@ class ApmDataDumper { #endif } - void DumpWav(absl::string_view name, - size_t v_length, - const float* v, - int sample_rate_hz, - int num_channels, - int dump_set = kDefaultDumpSet) { + void DumpWav([[maybe_unused]] absl::string_view name, + [[maybe_unused]] size_t v_length, + [[maybe_unused]] const float* v, + [[maybe_unused]] int sample_rate_hz, + [[maybe_unused]] int num_channels, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -373,11 +377,11 @@ class ApmDataDumper { #endif } - void DumpWav(absl::string_view name, - rtc::ArrayView v, - int sample_rate_hz, - int num_channels, - int dump_set = kDefaultDumpSet) { + void DumpWav([[maybe_unused]] absl::string_view name, + [[maybe_unused]] ArrayView v, + [[maybe_unused]] int sample_rate_hz, + [[maybe_unused]] int num_channels, + [[maybe_unused]] int dump_set = kDefaultDumpSet) { #if WEBRTC_APM_DEBUG_DUMP == 1 if (dump_set_to_use_ && *dump_set_to_use_ != dump_set) return; @@ -391,7 +395,7 @@ class ApmDataDumper { private: #if WEBRTC_APM_DEBUG_DUMP == 1 static bool recording_activated_; - static absl::optional dump_set_to_use_; + static std::optional dump_set_to_use_; static constexpr size_t kOutputDirMaxLength = 1024; static char output_dir_[kOutputDirMaxLength]; const int instance_index_; diff --git a/modules/audio_processing/ns/BUILD.gn b/modules/audio_processing/ns/BUILD.gn index d818e23f3c..31ad61156d 100644 --- a/modules/audio_processing/ns/BUILD.gn +++ b/modules/audio_processing/ns/BUILD.gn @@ -64,7 +64,6 @@ rtc_static_library("ns") { "../../../system_wrappers:metrics", "../utility:cascaded_biquad_filter", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (rtc_include_tests) { @@ -89,7 +88,6 @@ if (rtc_include_tests) { "../../../test:test_support", "../utility:cascaded_biquad_filter", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] defines = [] diff --git a/modules/audio_processing/ns/fast_math.cc b/modules/audio_processing/ns/fast_math.cc index d13110c43f..72f6fb2145 100644 --- a/modules/audio_processing/ns/fast_math.cc +++ b/modules/audio_processing/ns/fast_math.cc @@ -57,7 +57,7 @@ float LogApproximation(float x) { return FastLog2f(x) * kLogOf2; } -void LogApproximation(rtc::ArrayView x, rtc::ArrayView y) { +void LogApproximation(ArrayView x, ArrayView y) { for (size_t k = 0; k < x.size(); ++k) { y[k] = LogApproximation(x[k]); } @@ -68,14 +68,13 @@ float ExpApproximation(float x) { return PowApproximation(10.f, x * kLog10Ofe); } -void ExpApproximation(rtc::ArrayView x, rtc::ArrayView y) { +void ExpApproximation(ArrayView x, ArrayView y) { for (size_t k = 0; k < x.size(); ++k) { y[k] = ExpApproximation(x[k]); } } -void ExpApproximationSignFlip(rtc::ArrayView x, - rtc::ArrayView y) { +void ExpApproximationSignFlip(ArrayView x, ArrayView y) { for (size_t k = 0; k < x.size(); ++k) { y[k] = ExpApproximation(-x[k]); } diff --git a/modules/audio_processing/ns/fast_math.h b/modules/audio_processing/ns/fast_math.h index 0aefee940b..598b31cfd7 100644 --- a/modules/audio_processing/ns/fast_math.h +++ b/modules/audio_processing/ns/fast_math.h @@ -20,7 +20,7 @@ float SqrtFastApproximation(float f); // Log base conversion log(x) = log2(x)/log2(e). float LogApproximation(float x); -void LogApproximation(rtc::ArrayView x, rtc::ArrayView y); +void LogApproximation(ArrayView x, ArrayView y); // 2^x approximation. float Pow2Approximation(float p); @@ -30,9 +30,8 @@ float PowApproximation(float x, float p); // e^x approximation. float ExpApproximation(float x); -void ExpApproximation(rtc::ArrayView x, rtc::ArrayView y); -void ExpApproximationSignFlip(rtc::ArrayView x, - rtc::ArrayView y); +void ExpApproximation(ArrayView x, ArrayView y); +void ExpApproximationSignFlip(ArrayView x, ArrayView y); } // namespace webrtc #endif // MODULES_AUDIO_PROCESSING_NS_FAST_MATH_H_ diff --git a/modules/audio_processing/ns/histograms.h b/modules/audio_processing/ns/histograms.h index 9640e743cf..54efbe097b 100644 --- a/modules/audio_processing/ns/histograms.h +++ b/modules/audio_processing/ns/histograms.h @@ -36,11 +36,11 @@ class Histograms { void Update(const SignalModel& features_); // Methods for accessing the histograms. - rtc::ArrayView get_lrt() const { return lrt_; } - rtc::ArrayView get_spectral_flatness() const { + ArrayView get_lrt() const { return lrt_; } + ArrayView get_spectral_flatness() const { return spectral_flatness_; } - rtc::ArrayView get_spectral_diff() const { + ArrayView get_spectral_diff() const { return spectral_diff_; } diff --git a/modules/audio_processing/ns/noise_estimator.cc b/modules/audio_processing/ns/noise_estimator.cc index 5367545f25..bf21d38d0b 100644 --- a/modules/audio_processing/ns/noise_estimator.cc +++ b/modules/audio_processing/ns/noise_estimator.cc @@ -11,8 +11,14 @@ #include "modules/audio_processing/ns/noise_estimator.h" #include +#include +#include +#include +#include "api/array_view.h" #include "modules/audio_processing/ns/fast_math.h" +#include "modules/audio_processing/ns/ns_common.h" +#include "modules/audio_processing/ns/suppression_params.h" #include "rtc_base/checks.h" namespace webrtc { @@ -58,7 +64,7 @@ void NoiseEstimator::PrepareAnalysis() { void NoiseEstimator::PreUpdate( int32_t num_analyzed_frames, - rtc::ArrayView signal_spectrum, + ArrayView signal_spectrum, float signal_spectral_sum) { quantile_noise_estimator_.Estimate(signal_spectrum, noise_spectrum_); @@ -129,9 +135,9 @@ void NoiseEstimator::PreUpdate( } else { // Use pink noise estimate. float use_band = i < kStartBand ? kStartBand : i; - float denom = PowApproximation(use_band, parametric_exp); - RTC_DCHECK_NE(denom, 0.f); - parametric_noise_spectrum_[i] = parametric_num / denom; + float parametric_denom = PowApproximation(use_band, parametric_exp); + RTC_DCHECK_NE(parametric_denom, 0.f); + parametric_noise_spectrum_[i] = parametric_num / parametric_denom; } } @@ -147,8 +153,8 @@ void NoiseEstimator::PreUpdate( } void NoiseEstimator::PostUpdate( - rtc::ArrayView speech_probability, - rtc::ArrayView signal_spectrum) { + ArrayView speech_probability, + ArrayView signal_spectrum) { // Time-avg parameter for noise_spectrum update. constexpr float kNoiseUpdate = 0.9f; diff --git a/modules/audio_processing/ns/noise_estimator.h b/modules/audio_processing/ns/noise_estimator.h index 0c0466a679..124ab930f3 100644 --- a/modules/audio_processing/ns/noise_estimator.h +++ b/modules/audio_processing/ns/noise_estimator.h @@ -31,32 +31,30 @@ class NoiseEstimator { // Performs the first step of the estimator update. void PreUpdate(int32_t num_analyzed_frames, - rtc::ArrayView signal_spectrum, + ArrayView signal_spectrum, float signal_spectral_sum); // Performs the second step of the estimator update. - void PostUpdate( - rtc::ArrayView speech_probability, - rtc::ArrayView signal_spectrum); + void PostUpdate(ArrayView speech_probability, + ArrayView signal_spectrum); // Returns the noise spectral estimate. - rtc::ArrayView get_noise_spectrum() const { + ArrayView get_noise_spectrum() const { return noise_spectrum_; } // Returns the noise from the previous frame. - rtc::ArrayView get_prev_noise_spectrum() - const { + ArrayView get_prev_noise_spectrum() const { return prev_noise_spectrum_; } // Returns a noise spectral estimate based on white and pink noise parameters. - rtc::ArrayView get_parametric_noise_spectrum() + ArrayView get_parametric_noise_spectrum() const { return parametric_noise_spectrum_; } - rtc::ArrayView - get_conservative_noise_spectrum() const { + ArrayView get_conservative_noise_spectrum() + const { return conservative_noise_spectrum_; } diff --git a/modules/audio_processing/ns/noise_suppressor.cc b/modules/audio_processing/ns/noise_suppressor.cc index 7c524dadf3..070d046ccd 100644 --- a/modules/audio_processing/ns/noise_suppressor.cc +++ b/modules/audio_processing/ns/noise_suppressor.cc @@ -70,7 +70,7 @@ constexpr std::array kBlocks160w256FirstHalf = { 0.99986614f}; // Applies the filterbank window to a buffer. -void ApplyFilterBankWindow(rtc::ArrayView x) { +void ApplyFilterBankWindow(ArrayView x) { for (size_t i = 0; i < 96; ++i) { x[i] = kBlocks160w256FirstHalf[i] * x[i]; } @@ -82,9 +82,9 @@ void ApplyFilterBankWindow(rtc::ArrayView x) { } // Extends a frame with previous data. -void FormExtendedFrame(rtc::ArrayView frame, - rtc::ArrayView old_data, - rtc::ArrayView extended_frame) { +void FormExtendedFrame(ArrayView frame, + ArrayView old_data, + ArrayView extended_frame) { std::copy(old_data.begin(), old_data.end(), extended_frame.begin()); std::copy(frame.begin(), frame.end(), extended_frame.begin() + old_data.size()); @@ -93,9 +93,9 @@ void FormExtendedFrame(rtc::ArrayView frame, } // Uses overlap-and-add to produce an output frame. -void OverlapAndAdd(rtc::ArrayView extended_frame, - rtc::ArrayView overlap_memory, - rtc::ArrayView output_frame) { +void OverlapAndAdd(ArrayView extended_frame, + ArrayView overlap_memory, + ArrayView output_frame) { for (size_t i = 0; i < kOverlapSize; ++i) { output_frame[i] = overlap_memory[i] + extended_frame[i]; } @@ -107,9 +107,9 @@ void OverlapAndAdd(rtc::ArrayView extended_frame, } // Produces a delayed frame. -void DelaySignal(rtc::ArrayView frame, - rtc::ArrayView delay_buffer, - rtc::ArrayView delayed_frame) { +void DelaySignal(ArrayView frame, + ArrayView delay_buffer, + ArrayView delayed_frame) { constexpr size_t kSamplesFromFrame = kNsFrameSize - (kFftSize - kNsFrameSize); std::copy(delay_buffer.begin(), delay_buffer.end(), delayed_frame.begin()); std::copy(frame.begin(), frame.begin() + kSamplesFromFrame, @@ -120,7 +120,7 @@ void DelaySignal(rtc::ArrayView frame, } // Computes the energy of an extended frame. -float ComputeEnergyOfExtendedFrame(rtc::ArrayView x) { +float ComputeEnergyOfExtendedFrame(ArrayView x) { float energy = 0.f; for (float x_k : x) { energy += x_k * x_k; @@ -131,8 +131,8 @@ float ComputeEnergyOfExtendedFrame(rtc::ArrayView x) { // Computes the energy of an extended frame based on its subcomponents. float ComputeEnergyOfExtendedFrame( - rtc::ArrayView frame, - rtc::ArrayView old_data) { + ArrayView frame, + ArrayView old_data) { float energy = 0.f; for (float v : old_data) { energy += v * v; @@ -146,9 +146,9 @@ float ComputeEnergyOfExtendedFrame( // Computes the magnitude spectrum based on an FFT output. void ComputeMagnitudeSpectrum( - rtc::ArrayView real, - rtc::ArrayView imag, - rtc::ArrayView signal_spectrum) { + ArrayView real, + ArrayView imag, + ArrayView signal_spectrum) { signal_spectrum[0] = fabsf(real[0]) + 1.f; signal_spectrum[kFftSizeBy2Plus1 - 1] = fabsf(real[kFftSizeBy2Plus1 - 1]) + 1.f; @@ -160,13 +160,13 @@ void ComputeMagnitudeSpectrum( } // Compute prior and post SNR. -void ComputeSnr(rtc::ArrayView filter, - rtc::ArrayView prev_signal_spectrum, - rtc::ArrayView signal_spectrum, - rtc::ArrayView prev_noise_spectrum, - rtc::ArrayView noise_spectrum, - rtc::ArrayView prior_snr, - rtc::ArrayView post_snr) { +void ComputeSnr(ArrayView filter, + ArrayView prev_signal_spectrum, + ArrayView signal_spectrum, + ArrayView prev_noise_spectrum, + ArrayView noise_spectrum, + ArrayView prior_snr, + ArrayView post_snr) { for (size_t i = 0; i < kFftSizeBy2Plus1; ++i) { // Previous post SNR. // Previous estimate: based on previous frame with gain filter. @@ -187,10 +187,10 @@ void ComputeSnr(rtc::ArrayView filter, // Computes the attenuating gain for the noise suppression of the upper bands. float ComputeUpperBandsGain( float minimum_attenuating_gain, - rtc::ArrayView filter, - rtc::ArrayView speech_probability, - rtc::ArrayView prev_analysis_signal_spectrum, - rtc::ArrayView signal_spectrum) { + ArrayView filter, + ArrayView speech_probability, + ArrayView prev_analysis_signal_spectrum, + ArrayView signal_spectrum) { // Average speech prob and filter gain for the end of the lowest band. constexpr int kNumAvgBins = 32; constexpr float kOneByNumAvgBins = 1.f / kNumAvgBins; @@ -271,13 +271,13 @@ NoiseSuppressor::NoiseSuppressor(const NsConfig& config, } void NoiseSuppressor::AggregateWienerFilters( - rtc::ArrayView filter) const { - rtc::ArrayView filter0 = + ArrayView filter) const { + ArrayView filter0 = channels_[0]->wiener_filter.get_filter(); std::copy(filter0.begin(), filter0.end(), filter.begin()); for (size_t ch = 1; ch < num_channels_; ++ch) { - rtc::ArrayView filter_ch = + ArrayView filter_ch = channels_[ch]->wiener_filter.get_filter(); for (size_t k = 0; k < kFftSizeBy2Plus1; ++k) { @@ -295,7 +295,7 @@ void NoiseSuppressor::Analyze(const AudioBuffer& audio) { // Check for zero frames. bool zero_frame = true; for (size_t ch = 0; ch < num_channels_; ++ch) { - rtc::ArrayView y_band0( + ArrayView y_band0( &audio.split_bands_const(ch)[0][0], kNsFrameSize); float energy = ComputeEnergyOfExtendedFrame( y_band0, channels_[ch]->analyze_analysis_memory); @@ -325,7 +325,7 @@ void NoiseSuppressor::Analyze(const AudioBuffer& audio) { // Analyze all channels. for (size_t ch = 0; ch < num_channels_; ++ch) { std::unique_ptr& ch_p = channels_[ch]; - rtc::ArrayView y_band0( + ArrayView y_band0( &audio.split_bands_const(ch)[0][0], kNsFrameSize); // Form an extended frame and apply analysis filter bank windowing. @@ -383,35 +383,35 @@ void NoiseSuppressor::Analyze(const AudioBuffer& audio) { void NoiseSuppressor::Process(AudioBuffer* audio) { // Select the space for storing data during the processing. std::array filter_bank_states_stack; - rtc::ArrayView filter_bank_states( - filter_bank_states_stack.data(), num_channels_); + ArrayView filter_bank_states(filter_bank_states_stack.data(), + num_channels_); std::array upper_band_gains_stack; - rtc::ArrayView upper_band_gains(upper_band_gains_stack.data(), - num_channels_); + ArrayView upper_band_gains(upper_band_gains_stack.data(), + num_channels_); std::array energies_before_filtering_stack; - rtc::ArrayView energies_before_filtering( + ArrayView energies_before_filtering( energies_before_filtering_stack.data(), num_channels_); std::array gain_adjustments_stack; - rtc::ArrayView gain_adjustments(gain_adjustments_stack.data(), - num_channels_); + ArrayView gain_adjustments(gain_adjustments_stack.data(), + num_channels_); if (NumChannelsOnHeap(num_channels_) > 0) { // If the stack-allocated space is too small, use the heap for storing the // data. - filter_bank_states = rtc::ArrayView( + filter_bank_states = ArrayView( filter_bank_states_heap_.data(), num_channels_); upper_band_gains = - rtc::ArrayView(upper_band_gains_heap_.data(), num_channels_); - energies_before_filtering = rtc::ArrayView( - energies_before_filtering_heap_.data(), num_channels_); + ArrayView(upper_band_gains_heap_.data(), num_channels_); + energies_before_filtering = + ArrayView(energies_before_filtering_heap_.data(), num_channels_); gain_adjustments = - rtc::ArrayView(gain_adjustments_heap_.data(), num_channels_); + ArrayView(gain_adjustments_heap_.data(), num_channels_); } // Compute the suppression filters for all channels. for (size_t ch = 0; ch < num_channels_; ++ch) { // Form an extended frame and apply analysis filter bank windowing. - rtc::ArrayView y_band0(&audio->split_bands(ch)[0][0], - kNsFrameSize); + ArrayView y_band0(&audio->split_bands(ch)[0][0], + kNsFrameSize); FormExtendedFrame(y_band0, channels_[ch]->process_analysis_memory, filter_bank_states[ch].extended_frame); @@ -457,7 +457,7 @@ void NoiseSuppressor::Process(AudioBuffer* audio) { // Aggregate the Wiener filters for all channels. std::array filter_data; - rtc::ArrayView filter = filter_data; + ArrayView filter = filter_data; if (num_channels_ == 1) { filter = channels_[0]->wiener_filter.get_filter(); } else { @@ -509,8 +509,8 @@ void NoiseSuppressor::Process(AudioBuffer* audio) { // Use overlap-and-add to form the output frame of the lowest band. for (size_t ch = 0; ch < num_channels_; ++ch) { - rtc::ArrayView y_band0(&audio->split_bands(ch)[0][0], - kNsFrameSize); + ArrayView y_band0(&audio->split_bands(ch)[0][0], + kNsFrameSize); OverlapAndAdd(filter_bank_states[ch].extended_frame, channels_[ch]->process_synthesis_memory, y_band0); } @@ -527,8 +527,8 @@ void NoiseSuppressor::Process(AudioBuffer* audio) { for (size_t b = 1; b < num_bands_; ++b) { // Delay the upper bands to match the delay of the filterbank applied to // the lowest band. - rtc::ArrayView y_band( - &audio->split_bands(ch)[b][0], kNsFrameSize); + ArrayView y_band(&audio->split_bands(ch)[b][0], + kNsFrameSize); std::array delayed_frame; DelaySignal(y_band, channels_[ch]->process_delay_memory[b - 1], delayed_frame); @@ -544,8 +544,8 @@ void NoiseSuppressor::Process(AudioBuffer* audio) { // Limit the output the allowed range. for (size_t ch = 0; ch < num_channels_; ++ch) { for (size_t b = 0; b < num_bands_; ++b) { - rtc::ArrayView y_band(&audio->split_bands(ch)[b][0], - kNsFrameSize); + ArrayView y_band(&audio->split_bands(ch)[b][0], + kNsFrameSize); for (size_t j = 0; j < kNsFrameSize; j++) { y_band[j] = std::min(std::max(y_band[j], -32768.f), 32767.f); } diff --git a/modules/audio_processing/ns/noise_suppressor.h b/modules/audio_processing/ns/noise_suppressor.h index 1e321cf4a2..8ca0735fce 100644 --- a/modules/audio_processing/ns/noise_suppressor.h +++ b/modules/audio_processing/ns/noise_suppressor.h @@ -83,8 +83,7 @@ class NoiseSuppressor { std::vector> channels_; // Aggregates the Wiener filters into a single filter to use. - void AggregateWienerFilters( - rtc::ArrayView filter) const; + void AggregateWienerFilters(ArrayView filter) const; }; } // namespace webrtc diff --git a/modules/audio_processing/ns/noise_suppressor_unittest.cc b/modules/audio_processing/ns/noise_suppressor_unittest.cc index 28ea63ae40..f1da06eabc 100644 --- a/modules/audio_processing/ns/noise_suppressor_unittest.cc +++ b/modules/audio_processing/ns/noise_suppressor_unittest.cc @@ -26,7 +26,7 @@ namespace { std::string ProduceDebugText(int sample_rate_hz, size_t num_channels, NsConfig::SuppressionLevel level) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Sample rate: " << sample_rate_hz << ", num_channels: " << num_channels << ", level: " << static_cast(level); return ss.Release(); @@ -48,7 +48,7 @@ void PopulateInputFrameWithIdenticalChannels(size_t num_channels, void VerifyIdenticalChannels(size_t num_channels, size_t num_bands, - size_t frame_index, + size_t /* frame_index */, const AudioBuffer& audio) { EXPECT_GT(num_channels, 1u); for (size_t ch = 1; ch < num_channels; ++ch) { @@ -74,7 +74,7 @@ TEST(NoiseSuppressor, IdenticalChannelEffects) { SCOPED_TRACE(ProduceDebugText(rate, num_channels, level)); const size_t num_bands = rate / 16000; - // const int frame_length = rtc::CheckedDivExact(rate, 100); + // const int frame_length = webrtc::CheckedDivExact(rate, 100); AudioBuffer audio(rate, num_channels, rate, num_channels, rate, num_channels); NsConfig cfg; diff --git a/modules/audio_processing/ns/ns_fft.cc b/modules/audio_processing/ns/ns_fft.cc index 264c46972c..bdbda8e17d 100644 --- a/modules/audio_processing/ns/ns_fft.cc +++ b/modules/audio_processing/ns/ns_fft.cc @@ -24,9 +24,9 @@ NrFft::NrFft() : bit_reversal_state_(kFftSize / 2), tables_(kFftSize / 2) { tables_.data()); } -void NrFft::Fft(rtc::ArrayView time_data, - rtc::ArrayView real, - rtc::ArrayView imag) { +void NrFft::Fft(ArrayView time_data, + ArrayView real, + ArrayView imag) { WebRtc_rdft(kFftSize, 1, time_data.data(), bit_reversal_state_.data(), tables_.data()); @@ -42,9 +42,9 @@ void NrFft::Fft(rtc::ArrayView time_data, } } -void NrFft::Ifft(rtc::ArrayView real, - rtc::ArrayView imag, - rtc::ArrayView time_data) { +void NrFft::Ifft(ArrayView real, + ArrayView imag, + ArrayView time_data) { time_data[0] = real[0]; time_data[1] = real[kFftSizeBy2Plus1 - 1]; for (size_t i = 1; i < kFftSizeBy2Plus1 - 1; ++i) { diff --git a/modules/audio_processing/ns/ns_fft.h b/modules/audio_processing/ns/ns_fft.h index 539251eef2..8cba188f80 100644 --- a/modules/audio_processing/ns/ns_fft.h +++ b/modules/audio_processing/ns/ns_fft.h @@ -26,14 +26,14 @@ class NrFft { NrFft& operator=(const NrFft&) = delete; // Transforms the signal from time to frequency domain. - void Fft(rtc::ArrayView time_data, - rtc::ArrayView real, - rtc::ArrayView imag); + void Fft(ArrayView time_data, + ArrayView real, + ArrayView imag); // Transforms the signal from frequency to time domain. - void Ifft(rtc::ArrayView real, - rtc::ArrayView imag, - rtc::ArrayView time_data); + void Ifft(ArrayView real, + ArrayView imag, + ArrayView time_data); private: std::vector bit_reversal_state_; diff --git a/modules/audio_processing/ns/prior_signal_model_estimator.cc b/modules/audio_processing/ns/prior_signal_model_estimator.cc index f77dcd6dac..3a03dc9f8a 100644 --- a/modules/audio_processing/ns/prior_signal_model_estimator.cc +++ b/modules/audio_processing/ns/prior_signal_model_estimator.cc @@ -24,7 +24,7 @@ namespace { // Identifies the first of the two largest peaks in the histogram. void FindFirstOfTwoLargestPeaks( float bin_size, - rtc::ArrayView spectral_flatness, + ArrayView spectral_flatness, float* peak_position, int* peak_weight) { RTC_DCHECK(peak_position); @@ -65,7 +65,7 @@ void FindFirstOfTwoLargestPeaks( } } -void UpdateLrt(rtc::ArrayView lrt_histogram, +void UpdateLrt(ArrayView lrt_histogram, float* prior_model_lrt, bool* low_lrt_fluctuations) { RTC_DCHECK(prior_model_lrt); diff --git a/modules/audio_processing/ns/quantile_noise_estimator.cc b/modules/audio_processing/ns/quantile_noise_estimator.cc index bab494ff21..b7847551d9 100644 --- a/modules/audio_processing/ns/quantile_noise_estimator.cc +++ b/modules/audio_processing/ns/quantile_noise_estimator.cc @@ -28,8 +28,8 @@ QuantileNoiseEstimator::QuantileNoiseEstimator() { } void QuantileNoiseEstimator::Estimate( - rtc::ArrayView signal_spectrum, - rtc::ArrayView noise_spectrum) { + ArrayView signal_spectrum, + ArrayView noise_spectrum) { std::array log_spectrum; LogApproximation(signal_spectrum, log_spectrum); @@ -77,8 +77,8 @@ void QuantileNoiseEstimator::Estimate( if (quantile_index_to_return >= 0) { ExpApproximation( - rtc::ArrayView(&log_quantile_[quantile_index_to_return], - kFftSizeBy2Plus1), + ArrayView(&log_quantile_[quantile_index_to_return], + kFftSizeBy2Plus1), quantile_); } diff --git a/modules/audio_processing/ns/quantile_noise_estimator.h b/modules/audio_processing/ns/quantile_noise_estimator.h index 55b0bfa3fe..79a1b3e1cc 100644 --- a/modules/audio_processing/ns/quantile_noise_estimator.h +++ b/modules/audio_processing/ns/quantile_noise_estimator.h @@ -30,8 +30,8 @@ class QuantileNoiseEstimator { QuantileNoiseEstimator& operator=(const QuantileNoiseEstimator&) = delete; // Estimate noise. - void Estimate(rtc::ArrayView signal_spectrum, - rtc::ArrayView noise_spectrum); + void Estimate(ArrayView signal_spectrum, + ArrayView noise_spectrum); private: std::array density_; diff --git a/modules/audio_processing/ns/signal_model_estimator.cc b/modules/audio_processing/ns/signal_model_estimator.cc index 67dd3bb687..55c36ef43a 100644 --- a/modules/audio_processing/ns/signal_model_estimator.cc +++ b/modules/audio_processing/ns/signal_model_estimator.cc @@ -21,8 +21,8 @@ constexpr float kOneByFftSizeBy2Plus1 = 1.f / kFftSizeBy2Plus1; // Computes the difference measure between input spectrum and a template/learned // noise spectrum. float ComputeSpectralDiff( - rtc::ArrayView conservative_noise_spectrum, - rtc::ArrayView signal_spectrum, + ArrayView conservative_noise_spectrum, + ArrayView signal_spectrum, float signal_spectral_sum, float diff_normalization) { // spectral_diff = var(signal_spectrum) - cov(signal_spectrum, magnAvgPause)^2 @@ -61,7 +61,7 @@ float ComputeSpectralDiff( // Updates the spectral flatness based on the input spectrum. void UpdateSpectralFlatness( - rtc::ArrayView signal_spectrum, + ArrayView signal_spectrum, float signal_spectral_sum, float* spectral_flatness) { RTC_DCHECK(spectral_flatness); @@ -94,9 +94,9 @@ void UpdateSpectralFlatness( } // Updates the log LRT measures. -void UpdateSpectralLrt(rtc::ArrayView prior_snr, - rtc::ArrayView post_snr, - rtc::ArrayView avg_log_lrt, +void UpdateSpectralLrt(ArrayView prior_snr, + ArrayView post_snr, + ArrayView avg_log_lrt, float* lrt) { RTC_DCHECK(lrt); @@ -129,10 +129,10 @@ void SignalModelEstimator::AdjustNormalization(int32_t num_analyzed_frames, // Update the noise features. void SignalModelEstimator::Update( - rtc::ArrayView prior_snr, - rtc::ArrayView post_snr, - rtc::ArrayView conservative_noise_spectrum, - rtc::ArrayView signal_spectrum, + ArrayView prior_snr, + ArrayView post_snr, + ArrayView conservative_noise_spectrum, + ArrayView signal_spectrum, float signal_spectral_sum, float signal_energy) { // Compute spectral flatness on input spectrum. diff --git a/modules/audio_processing/ns/signal_model_estimator.h b/modules/audio_processing/ns/signal_model_estimator.h index 58ce00acbf..834af1ede9 100644 --- a/modules/audio_processing/ns/signal_model_estimator.h +++ b/modules/audio_processing/ns/signal_model_estimator.h @@ -32,10 +32,10 @@ class SignalModelEstimator { void AdjustNormalization(int32_t num_analyzed_frames, float signal_energy); void Update( - rtc::ArrayView prior_snr, - rtc::ArrayView post_snr, - rtc::ArrayView conservative_noise_spectrum, - rtc::ArrayView signal_spectrum, + ArrayView prior_snr, + ArrayView post_snr, + ArrayView conservative_noise_spectrum, + ArrayView signal_spectrum, float signal_spectral_sum, float signal_energy); diff --git a/modules/audio_processing/ns/speech_probability_estimator.cc b/modules/audio_processing/ns/speech_probability_estimator.cc index 65f17f4af2..ddd20bddad 100644 --- a/modules/audio_processing/ns/speech_probability_estimator.cc +++ b/modules/audio_processing/ns/speech_probability_estimator.cc @@ -25,10 +25,10 @@ SpeechProbabilityEstimator::SpeechProbabilityEstimator() { void SpeechProbabilityEstimator::Update( int32_t num_analyzed_frames, - rtc::ArrayView prior_snr, - rtc::ArrayView post_snr, - rtc::ArrayView conservative_noise_spectrum, - rtc::ArrayView signal_spectrum, + ArrayView prior_snr, + ArrayView post_snr, + ArrayView conservative_noise_spectrum, + ArrayView signal_spectrum, float signal_spectral_sum, float signal_energy) { // Update models. diff --git a/modules/audio_processing/ns/speech_probability_estimator.h b/modules/audio_processing/ns/speech_probability_estimator.h index 259c3b6776..64ed60247c 100644 --- a/modules/audio_processing/ns/speech_probability_estimator.h +++ b/modules/audio_processing/ns/speech_probability_estimator.h @@ -30,15 +30,15 @@ class SpeechProbabilityEstimator { // Compute speech probability. void Update( int32_t num_analyzed_frames, - rtc::ArrayView prior_snr, - rtc::ArrayView post_snr, - rtc::ArrayView conservative_noise_spectrum, - rtc::ArrayView signal_spectrum, + ArrayView prior_snr, + ArrayView post_snr, + ArrayView conservative_noise_spectrum, + ArrayView signal_spectrum, float signal_spectral_sum, float signal_energy); float get_prior_probability() const { return prior_speech_prob_; } - rtc::ArrayView get_probability() { return speech_probability_; } + ArrayView get_probability() { return speech_probability_; } private: SignalModelEstimator signal_model_estimator_; diff --git a/modules/audio_processing/ns/wiener_filter.cc b/modules/audio_processing/ns/wiener_filter.cc index 1eb50a7166..83e40dcb07 100644 --- a/modules/audio_processing/ns/wiener_filter.cc +++ b/modules/audio_processing/ns/wiener_filter.cc @@ -30,10 +30,10 @@ WienerFilter::WienerFilter(const SuppressionParams& suppression_params) void WienerFilter::Update( int32_t num_analyzed_frames, - rtc::ArrayView noise_spectrum, - rtc::ArrayView prev_noise_spectrum, - rtc::ArrayView parametric_noise_spectrum, - rtc::ArrayView signal_spectrum) { + ArrayView noise_spectrum, + ArrayView prev_noise_spectrum, + ArrayView parametric_noise_spectrum, + ArrayView signal_spectrum) { for (size_t i = 0; i < kFftSizeBy2Plus1; ++i) { // Previous estimate based on previous frame with gain filter. float prev_tsa = spectrum_prev_process_[i] / diff --git a/modules/audio_processing/ns/wiener_filter.h b/modules/audio_processing/ns/wiener_filter.h index b55c5dc59d..cd45541eca 100644 --- a/modules/audio_processing/ns/wiener_filter.h +++ b/modules/audio_processing/ns/wiener_filter.h @@ -29,10 +29,10 @@ class WienerFilter { // Updates the filter estimate. void Update( int32_t num_analyzed_frames, - rtc::ArrayView noise_spectrum, - rtc::ArrayView prev_noise_spectrum, - rtc::ArrayView parametric_noise_spectrum, - rtc::ArrayView signal_spectrum); + ArrayView noise_spectrum, + ArrayView prev_noise_spectrum, + ArrayView parametric_noise_spectrum, + ArrayView signal_spectrum); // Compute an overall gain scaling factor. float ComputeOverallScalingFactor(int32_t num_analyzed_frames, @@ -41,7 +41,7 @@ class WienerFilter { float energy_after_filtering) const; // Returns the filter. - rtc::ArrayView get_filter() const { + ArrayView get_filter() const { return filter_; } diff --git a/modules/audio_processing/optionally_built_submodule_creators.cc b/modules/audio_processing/optionally_built_submodule_creators.cc deleted file mode 100644 index cea5c837dc..0000000000 --- a/modules/audio_processing/optionally_built_submodule_creators.cc +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/optionally_built_submodule_creators.h" - -#include - -#include "modules/audio_processing/transient/transient_suppressor_impl.h" - -namespace webrtc { - -std::unique_ptr CreateTransientSuppressor( - const ApmSubmoduleCreationOverrides& overrides, - TransientSuppressor::VadMode vad_mode, - int sample_rate_hz, - int detection_rate_hz, - int num_channels) { -#ifdef WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR - return nullptr; -#else - if (overrides.transient_suppression) { - return nullptr; - } - return std::make_unique( - vad_mode, sample_rate_hz, detection_rate_hz, num_channels); -#endif -} - -} // namespace webrtc diff --git a/modules/audio_processing/optionally_built_submodule_creators.h b/modules/audio_processing/optionally_built_submodule_creators.h deleted file mode 100644 index 1be2743986..0000000000 --- a/modules/audio_processing/optionally_built_submodule_creators.h +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_OPTIONALLY_BUILT_SUBMODULE_CREATORS_H_ -#define MODULES_AUDIO_PROCESSING_OPTIONALLY_BUILT_SUBMODULE_CREATORS_H_ - -#include - -#include "modules/audio_processing/transient/transient_suppressor.h" - -namespace webrtc { - -// These overrides are only to be used for testing purposes. -// Each flag emulates a preprocessor macro to exclude a submodule of APM from -// the build, e.g. WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR. If the corresponding -// flag `transient_suppression` is enabled, then the creators will return -// nullptr instead of a submodule instance, as if the macro had been defined. -struct ApmSubmoduleCreationOverrides { - bool transient_suppression = false; -}; - -// Creates a transient suppressor. -// Will instead return nullptr if one of the following is true: -// * WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR is defined -// * The corresponding override in `overrides` is enabled. -std::unique_ptr CreateTransientSuppressor( - const ApmSubmoduleCreationOverrides& overrides, - TransientSuppressor::VadMode vad_mode, - int sample_rate_hz, - int detection_rate_hz, - int num_channels); - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_OPTIONALLY_BUILT_SUBMODULE_CREATORS_H_ diff --git a/modules/audio_processing/residual_echo_detector.cc b/modules/audio_processing/residual_echo_detector.cc index 2a564fc233..437d857c22 100644 --- a/modules/audio_processing/residual_echo_detector.cc +++ b/modules/audio_processing/residual_echo_detector.cc @@ -11,9 +11,13 @@ #include "modules/audio_processing/residual_echo_detector.h" #include +#include +#include #include +#include -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/audio/audio_processing.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -21,7 +25,7 @@ namespace { -float Power(rtc::ArrayView input) { +float Power(webrtc::ArrayView input) { if (input.empty()) { return 0.f; } @@ -54,7 +58,7 @@ ResidualEchoDetector::ResidualEchoDetector() ResidualEchoDetector::~ResidualEchoDetector() = default; void ResidualEchoDetector::AnalyzeRenderAudio( - rtc::ArrayView render_audio) { + ArrayView render_audio) { // Dump debug data assuming 48 kHz sample rate (if this assumption is not // valid the dumped audio will need to be converted offline accordingly). data_dumper_->DumpWav("ed_render", render_audio.size(), render_audio.data(), @@ -75,7 +79,7 @@ void ResidualEchoDetector::AnalyzeRenderAudio( } void ResidualEchoDetector::AnalyzeCaptureAudio( - rtc::ArrayView capture_audio) { + ArrayView capture_audio) { // Dump debug data assuming 48 kHz sample rate (if this assumption is not // valid the dumped audio will need to be converted offline accordingly). data_dumper_->DumpWav("ed_capture", capture_audio.size(), @@ -89,7 +93,7 @@ void ResidualEchoDetector::AnalyzeCaptureAudio( } // Get the next render value. - const absl::optional buffered_render_power = render_buffer_.Pop(); + const std::optional buffered_render_power = render_buffer_.Pop(); if (!buffered_render_power) { // This can happen in a few cases: at the start of a call, due to a glitch // or due to clock drift. The excess capture value will be ignored. @@ -135,25 +139,25 @@ void ResidualEchoDetector::AnalyzeCaptureAudio( if (echo_likelihood_ > 1.1f) { // Make sure we don't spam the log. if (log_counter_ < 5 && best_delay != -1) { - size_t read_index = kLookbackFrames + next_insertion_index_ - best_delay; - if (read_index >= kLookbackFrames) { - read_index -= kLookbackFrames; + size_t read_index_high_echo = + kLookbackFrames + next_insertion_index_ - best_delay; + if (read_index_high_echo >= kLookbackFrames) { + read_index_high_echo -= kLookbackFrames; } - RTC_DCHECK_LT(read_index, render_power_.size()); - RTC_LOG_F(LS_ERROR) << "Echo detector internal state: {" - "Echo likelihood: " - << echo_likelihood_ << ", Best Delay: " << best_delay - << ", Covariance: " - << covariances_[best_delay].covariance() - << ", Last capture power: " << capture_power - << ", Capture mean: " << capture_mean - << ", Capture_standard deviation: " - << capture_std_deviation << ", Last render power: " - << render_power_[read_index] - << ", Render mean: " << render_power_mean_[read_index] - << ", Render standard deviation: " - << render_power_std_dev_[read_index] - << ", Reliability: " << reliability_ << "}"; + RTC_DCHECK_LT(read_index_high_echo, render_power_.size()); + RTC_LOG_F(LS_ERROR) + << "Echo detector internal state: {" + "Echo likelihood: " + << echo_likelihood_ << ", Best Delay: " << best_delay + << ", Covariance: " << covariances_[best_delay].covariance() + << ", Last capture power: " << capture_power + << ", Capture mean: " << capture_mean + << ", Capture_standard deviation: " << capture_std_deviation + << ", Last render power: " << render_power_[read_index_high_echo] + << ", Render mean: " << render_power_mean_[read_index_high_echo] + << ", Render standard deviation: " + << render_power_std_dev_[read_index_high_echo] + << ", Reliability: " << reliability_ << "}"; log_counter_++; } } diff --git a/modules/audio_processing/residual_echo_detector.h b/modules/audio_processing/residual_echo_detector.h index ac554b17c4..495cc557b1 100644 --- a/modules/audio_processing/residual_echo_detector.h +++ b/modules/audio_processing/residual_echo_detector.h @@ -15,11 +15,11 @@ #include #include "api/array_view.h" +#include "api/audio/audio_processing.h" #include "modules/audio_processing/echo_detector/circular_buffer.h" #include "modules/audio_processing/echo_detector/mean_variance_estimator.h" #include "modules/audio_processing/echo_detector/moving_max.h" #include "modules/audio_processing/echo_detector/normalized_covariance_estimator.h" -#include "modules/audio_processing/include/audio_processing.h" namespace webrtc { @@ -32,10 +32,10 @@ class ResidualEchoDetector : public EchoDetector { ~ResidualEchoDetector() override; // This function should be called while holding the render lock. - void AnalyzeRenderAudio(rtc::ArrayView render_audio) override; + void AnalyzeRenderAudio(ArrayView render_audio) override; // This function should be called while holding the capture lock. - void AnalyzeCaptureAudio(rtc::ArrayView capture_audio) override; + void AnalyzeCaptureAudio(ArrayView capture_audio) override; // This function should be called while holding the capture lock. void Initialize(int capture_sample_rate_hz, diff --git a/modules/audio_processing/residual_echo_detector_unittest.cc b/modules/audio_processing/residual_echo_detector_unittest.cc index d8c227a443..f17ba5867f 100644 --- a/modules/audio_processing/residual_echo_detector_unittest.cc +++ b/modules/audio_processing/residual_echo_detector_unittest.cc @@ -18,7 +18,7 @@ namespace webrtc { TEST(ResidualEchoDetectorTests, Echo) { - auto echo_detector = rtc::make_ref_counted(); + auto echo_detector = make_ref_counted(); echo_detector->SetReliabilityForTest(1.0f); std::vector ones(160, 1.f); std::vector zeros(160, 0.f); @@ -45,7 +45,7 @@ TEST(ResidualEchoDetectorTests, Echo) { } TEST(ResidualEchoDetectorTests, NoEcho) { - auto echo_detector = rtc::make_ref_counted(); + auto echo_detector = make_ref_counted(); echo_detector->SetReliabilityForTest(1.0f); std::vector ones(160, 1.f); std::vector zeros(160, 0.f); @@ -67,7 +67,7 @@ TEST(ResidualEchoDetectorTests, NoEcho) { } TEST(ResidualEchoDetectorTests, EchoWithRenderClockDrift) { - auto echo_detector = rtc::make_ref_counted(); + auto echo_detector = make_ref_counted(); echo_detector->SetReliabilityForTest(1.0f); std::vector ones(160, 1.f); std::vector zeros(160, 0.f); @@ -104,7 +104,7 @@ TEST(ResidualEchoDetectorTests, EchoWithRenderClockDrift) { } TEST(ResidualEchoDetectorTests, EchoWithCaptureClockDrift) { - auto echo_detector = rtc::make_ref_counted(); + auto echo_detector = make_ref_counted(); echo_detector->SetReliabilityForTest(1.0f); std::vector ones(160, 1.f); std::vector zeros(160, 0.f); diff --git a/modules/audio_processing/rms_level.cc b/modules/audio_processing/rms_level.cc index b0a45cb403..ffb8e91414 100644 --- a/modules/audio_processing/rms_level.cc +++ b/modules/audio_processing/rms_level.cc @@ -54,10 +54,10 @@ void RmsLevel::Reset() { sum_square_ = 0.f; sample_count_ = 0; max_sum_square_ = 0.f; - block_size_ = absl::nullopt; + block_size_ = std::nullopt; } -void RmsLevel::Analyze(rtc::ArrayView data) { +void RmsLevel::Analyze(ArrayView data) { if (data.empty()) { return; } @@ -74,7 +74,7 @@ void RmsLevel::Analyze(rtc::ArrayView data) { max_sum_square_ = std::max(max_sum_square_, sum_square); } -void RmsLevel::Analyze(rtc::ArrayView data) { +void RmsLevel::Analyze(ArrayView data) { if (data.empty()) { return; } @@ -119,7 +119,7 @@ int RmsLevel::Average() { RmsLevel::Levels RmsLevel::AverageAndPeak() { // Note that block_size_ should by design always be non-empty when - // sample_count_ != 0. Also, the * operator of absl::optional enforces this + // sample_count_ != 0. Also, the * operator of std::optional enforces this // with a DCHECK. Levels levels = (sample_count_ == 0) ? Levels{RmsLevel::kMinLevelDb, RmsLevel::kMinLevelDb} diff --git a/modules/audio_processing/rms_level.h b/modules/audio_processing/rms_level.h index fbece19ecd..879fdff224 100644 --- a/modules/audio_processing/rms_level.h +++ b/modules/audio_processing/rms_level.h @@ -14,7 +14,8 @@ #include #include -#include "absl/types/optional.h" +#include + #include "api/array_view.h" namespace webrtc { @@ -44,8 +45,8 @@ class RmsLevel { void Reset(); // Pass each chunk of audio to Analyze() to accumulate the level. - void Analyze(rtc::ArrayView data); - void Analyze(rtc::ArrayView data); + void Analyze(ArrayView data); + void Analyze(ArrayView data); // If all samples with the given `length` have a magnitude of zero, this is // a shortcut to avoid some computation. @@ -69,7 +70,7 @@ class RmsLevel { float sum_square_; size_t sample_count_; float max_sum_square_; - absl::optional block_size_; + std::optional block_size_; }; } // namespace webrtc diff --git a/modules/audio_processing/rms_level_unittest.cc b/modules/audio_processing/rms_level_unittest.cc index 4cbad461e7..f9f1f9cbeb 100644 --- a/modules/audio_processing/rms_level_unittest.cc +++ b/modules/audio_processing/rms_level_unittest.cc @@ -25,7 +25,7 @@ namespace { constexpr int kSampleRateHz = 48000; constexpr size_t kBlockSizeSamples = kSampleRateHz / 100; -std::unique_ptr RunTest(rtc::ArrayView input) { +std::unique_ptr RunTest(ArrayView input) { std::unique_ptr level(new RmsLevel); for (size_t n = 0; n + kBlockSizeSamples <= input.size(); n += kBlockSizeSamples) { @@ -34,7 +34,7 @@ std::unique_ptr RunTest(rtc::ArrayView input) { return level; } -std::unique_ptr RunTest(rtc::ArrayView input) { +std::unique_ptr RunTest(ArrayView input) { std::unique_ptr level(new RmsLevel); for (size_t n = 0; n + kBlockSizeSamples <= input.size(); n += kBlockSizeSamples) { @@ -48,7 +48,7 @@ std::vector CreateInt16Sinusoid(int frequency_hz, size_t num_samples) { std::vector x(num_samples); for (size_t n = 0; n < num_samples; ++n) { - x[n] = rtc::saturated_cast( + x[n] = saturated_cast( amplitude * std::sin(2 * M_PI * n * frequency_hz / kSampleRateHz)); } return x; @@ -143,8 +143,8 @@ TEST(RmsLevelTest, Reset) { TEST(RmsLevelTest, ProcessMuted) { auto x = CreateInt16Sinusoid(1000, INT16_MAX, kSampleRateHz); auto level = RunTest(x); - const size_t kBlocksPerSecond = rtc::CheckedDivExact( - static_cast(kSampleRateHz), kBlockSizeSamples); + const size_t kBlocksPerSecond = + CheckedDivExact(static_cast(kSampleRateHz), kBlockSizeSamples); for (size_t i = 0; i < kBlocksPerSecond; ++i) { level->AnalyzeMuted(kBlockSizeSamples); } diff --git a/modules/audio_processing/splitting_filter.cc b/modules/audio_processing/splitting_filter.cc index d47090bc03..f607552301 100644 --- a/modules/audio_processing/splitting_filter.cc +++ b/modules/audio_processing/splitting_filter.cc @@ -27,7 +27,7 @@ constexpr size_t kTwoBandFilterSamplesPerFrame = 320; SplittingFilter::SplittingFilter(size_t num_channels, size_t num_bands, - size_t num_frames) + size_t /* num_frames */) : num_bands_(num_bands), two_bands_states_(num_bands_ == 2 ? num_channels : 0), three_band_filter_banks_(num_bands_ == 3 ? num_channels : 0) { @@ -110,11 +110,10 @@ void SplittingFilter::ThreeBandsAnalysis(const ChannelBuffer* data, for (size_t i = 0; i < three_band_filter_banks_.size(); ++i) { three_band_filter_banks_[i].Analysis( - rtc::ArrayView( + ArrayView( data->channels_view()[i].data(), ThreeBandFilterBank::kFullBandSize), - rtc::ArrayView, - ThreeBandFilterBank::kNumBands>( + ArrayView, ThreeBandFilterBank::kNumBands>( bands->bands_view(i).data(), ThreeBandFilterBank::kNumBands)); } } @@ -132,10 +131,9 @@ void SplittingFilter::ThreeBandsSynthesis(const ChannelBuffer* bands, for (size_t i = 0; i < data->num_channels(); ++i) { three_band_filter_banks_[i].Synthesis( - rtc::ArrayView, - ThreeBandFilterBank::kNumBands>( + ArrayView, ThreeBandFilterBank::kNumBands>( bands->bands_view(i).data(), ThreeBandFilterBank::kNumBands), - rtc::ArrayView( + ArrayView( data->channels_view()[i].data(), ThreeBandFilterBank::kFullBandSize)); } diff --git a/modules/audio_processing/test/aec_dump_based_simulator.cc b/modules/audio_processing/test/aec_dump_based_simulator.cc index 416e287751..896c4b8be3 100644 --- a/modules/audio_processing/test/aec_dump_based_simulator.cc +++ b/modules/audio_processing/test/aec_dump_based_simulator.cc @@ -10,13 +10,30 @@ #include "modules/audio_processing/test/aec_dump_based_simulator.h" +#include +#include +#include +#include +#include +#include #include #include - +#include +#include // no-presubmit-check TODO(webrtc:8982) +#include +#include + +#include "absl/base/nullability.h" +#include "api/audio/audio_processing.h" +#include "api/scoped_refptr.h" +#include "common_audio/channel_buffer.h" +#include "common_audio/wav_file.h" +#include "modules/audio_processing/debug.pb.h" #include "modules/audio_processing/echo_control_mobile_impl.h" -#include "modules/audio_processing/logging/apm_data_dumper.h" #include "modules/audio_processing/test/aec_dump_based_simulator.h" +#include "modules/audio_processing/test/audio_processing_simulator.h" #include "modules/audio_processing/test/protobuf_utils.h" +#include "modules/audio_processing/test/test_utils.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -34,7 +51,7 @@ bool VerifyFixedBitExactness(const webrtc::audioproc::Stream& msg, return false; } else { const int16_t* frame_data = frame.data.data(); - for (int k = 0; k < frame.num_channels * frame.samples_per_channel; ++k) { + for (int k = 0; k < frame.num_channels_ * frame.samples_per_channel_; ++k) { if (msg.output_data().data()[k] != frame_data[k]) { return false; } @@ -80,11 +97,8 @@ bool ReadNextMessage(bool use_dump_file, AecDumpBasedSimulator::AecDumpBasedSimulator( const SimulationSettings& settings, - rtc::scoped_refptr audio_processing, - std::unique_ptr ap_builder) - : AudioProcessingSimulator(settings, - std::move(audio_processing), - std::move(ap_builder)) { + absl_nonnull scoped_refptr audio_processing) + : AudioProcessingSimulator(settings, std::move(audio_processing)) { MaybeOpenCallOrderFile(); } @@ -129,7 +143,7 @@ void AecDumpBasedSimulator::PrepareProcessStreamCall( if (msg.has_input_data()) { int16_t* fwd_frame_data = fwd_frame_.data.data(); for (size_t k = 0; k < in_buf_->num_frames(); ++k) { - fwd_frame_data[k] = rtc::saturated_cast( + fwd_frame_data[k] = saturated_cast( fwd_frame_data[k] + static_cast(32767 * artificial_nearend_buf_->channels()[0][k])); @@ -177,8 +191,8 @@ void AecDumpBasedSimulator::PrepareProcessStreamCall( // Set the applied input level if available. aec_dump_applied_input_level_ = msg.has_applied_input_volume() - ? absl::optional(msg.applied_input_volume()) - : absl::nullopt; + ? std::optional(msg.applied_input_volume()) + : std::nullopt; } void AecDumpBasedSimulator::VerifyProcessStreamBitExactness( @@ -241,7 +255,7 @@ void AecDumpBasedSimulator::Process() { artificial_nearend_buffer_reader_.reset( new ChannelBufferWavReader(std::move(artificial_nearend_file))); artificial_nearend_buf_.reset(new ChannelBuffer( - rtc::CheckedDivExact(sample_rate_hz, kChunksPerSecond), 1)); + CheckedDivExact(sample_rate_hz, kChunksPerSecond), 1)); } const bool use_dump_file = !settings_.aec_dump_input_string.has_value(); diff --git a/modules/audio_processing/test/aec_dump_based_simulator.h b/modules/audio_processing/test/aec_dump_based_simulator.h index e2c1f3e4ba..e5bee3375b 100644 --- a/modules/audio_processing/test/aec_dump_based_simulator.h +++ b/modules/audio_processing/test/aec_dump_based_simulator.h @@ -11,19 +11,22 @@ #ifndef MODULES_AUDIO_PROCESSING_TEST_AEC_DUMP_BASED_SIMULATOR_H_ #define MODULES_AUDIO_PROCESSING_TEST_AEC_DUMP_BASED_SIMULATOR_H_ +#include #include -#include +#include +#include "absl/base/nullability.h" +#include "api/audio/audio_processing.h" +#include "api/scoped_refptr.h" +#include "common_audio/channel_buffer.h" #include "modules/audio_processing/test/audio_processing_simulator.h" -#include "rtc_base/ignore_wundef.h" +#include "modules/audio_processing/test/test_utils.h" -RTC_PUSH_IGNORING_WUNDEF() #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/modules/audio_processing/debug.pb.h" #else #include "modules/audio_processing/debug.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() namespace webrtc { namespace test { @@ -31,9 +34,9 @@ namespace test { // Used to perform an audio processing simulation from an aec dump. class AecDumpBasedSimulator final : public AudioProcessingSimulator { public: - AecDumpBasedSimulator(const SimulationSettings& settings, - rtc::scoped_refptr audio_processing, - std::unique_ptr ap_builder); + AecDumpBasedSimulator( + const SimulationSettings& settings, + absl_nonnull scoped_refptr audio_processing); AecDumpBasedSimulator() = delete; AecDumpBasedSimulator(const AecDumpBasedSimulator&) = delete; diff --git a/modules/audio_processing/test/android/apmtest/AndroidManifest.xml b/modules/audio_processing/test/android/apmtest/AndroidManifest.xml deleted file mode 100644 index c6063b3d76..0000000000 --- a/modules/audio_processing/test/android/apmtest/AndroidManifest.xml +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/modules/audio_processing/test/android/apmtest/default.properties b/modules/audio_processing/test/android/apmtest/default.properties deleted file mode 100644 index 9a2c9f6c88..0000000000 --- a/modules/audio_processing/test/android/apmtest/default.properties +++ /dev/null @@ -1,11 +0,0 @@ -# This file is automatically generated by Android Tools. -# Do not modify this file -- YOUR CHANGES WILL BE ERASED! -# -# This file must be checked in Version Control Systems. -# -# To customize properties used by the Ant build system use, -# "build.properties", and override values to adapt the script to your -# project structure. - -# Project target. -target=android-9 diff --git a/modules/audio_processing/test/android/apmtest/jni/main.c b/modules/audio_processing/test/android/apmtest/jni/main.c deleted file mode 100644 index 2e19635683..0000000000 --- a/modules/audio_processing/test/android/apmtest/jni/main.c +++ /dev/null @@ -1,307 +0,0 @@ -/* - * Copyright (C) 2010 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -//BEGIN_INCLUDE(all) -#include -#include - -#include -#include - -#include -#include -#include - -#define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, "native-activity", __VA_ARGS__)) -#define LOGW(...) ((void)__android_log_print(ANDROID_LOG_WARN, "native-activity", __VA_ARGS__)) - -/** - * Our saved state data. - */ -struct saved_state { - float angle; - int32_t x; - int32_t y; -}; - -/** - * Shared state for our app. - */ -struct engine { - struct android_app* app; - - ASensorManager* sensorManager; - const ASensor* accelerometerSensor; - ASensorEventQueue* sensorEventQueue; - - int animating; - EGLDisplay display; - EGLSurface surface; - EGLContext context; - int32_t width; - int32_t height; - struct saved_state state; -}; - -/** - * Initialize an EGL context for the current display. - */ -static int engine_init_display(struct engine* engine) { - // initialize OpenGL ES and EGL - - /* - * Here specify the attributes of the desired configuration. - * Below, we select an EGLConfig with at least 8 bits per color - * component compatible with on-screen windows - */ - const EGLint attribs[] = { - EGL_SURFACE_TYPE, EGL_WINDOW_BIT, - EGL_BLUE_SIZE, 8, - EGL_GREEN_SIZE, 8, - EGL_RED_SIZE, 8, - EGL_NONE - }; - EGLint w, h, dummy, format; - EGLint numConfigs; - EGLConfig config; - EGLSurface surface; - EGLContext context; - - EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY); - - eglInitialize(display, 0, 0); - - /* Here, the application chooses the configuration it desires. In this - * sample, we have a very simplified selection process, where we pick - * the first EGLConfig that matches our criteria */ - eglChooseConfig(display, attribs, &config, 1, &numConfigs); - - /* EGL_NATIVE_VISUAL_ID is an attribute of the EGLConfig that is - * guaranteed to be accepted by ANativeWindow_setBuffersGeometry(). - * As soon as we picked a EGLConfig, we can safely reconfigure the - * ANativeWindow buffers to match, using EGL_NATIVE_VISUAL_ID. */ - eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format); - - ANativeWindow_setBuffersGeometry(engine->app->window, 0, 0, format); - - surface = eglCreateWindowSurface(display, config, engine->app->window, NULL); - context = eglCreateContext(display, config, NULL, NULL); - - if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) { - LOGW("Unable to eglMakeCurrent"); - return -1; - } - - eglQuerySurface(display, surface, EGL_WIDTH, &w); - eglQuerySurface(display, surface, EGL_HEIGHT, &h); - - engine->display = display; - engine->context = context; - engine->surface = surface; - engine->width = w; - engine->height = h; - engine->state.angle = 0; - - // Initialize GL state. - glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST); - glEnable(GL_CULL_FACE); - glShadeModel(GL_SMOOTH); - glDisable(GL_DEPTH_TEST); - - return 0; -} - -/** - * Just the current frame in the display. - */ -static void engine_draw_frame(struct engine* engine) { - if (engine->display == NULL) { - // No display. - return; - } - - // Just fill the screen with a color. - glClearColor(((float)engine->state.x)/engine->width, engine->state.angle, - ((float)engine->state.y)/engine->height, 1); - glClear(GL_COLOR_BUFFER_BIT); - - eglSwapBuffers(engine->display, engine->surface); -} - -/** - * Tear down the EGL context currently associated with the display. - */ -static void engine_term_display(struct engine* engine) { - if (engine->display != EGL_NO_DISPLAY) { - eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT); - if (engine->context != EGL_NO_CONTEXT) { - eglDestroyContext(engine->display, engine->context); - } - if (engine->surface != EGL_NO_SURFACE) { - eglDestroySurface(engine->display, engine->surface); - } - eglTerminate(engine->display); - } - engine->animating = 0; - engine->display = EGL_NO_DISPLAY; - engine->context = EGL_NO_CONTEXT; - engine->surface = EGL_NO_SURFACE; -} - -/** - * Process the next input event. - */ -static int32_t engine_handle_input(struct android_app* app, AInputEvent* event) { - struct engine* engine = (struct engine*)app->userData; - if (AInputEvent_getType(event) == AINPUT_EVENT_TYPE_MOTION) { - engine->animating = 1; - engine->state.x = AMotionEvent_getX(event, 0); - engine->state.y = AMotionEvent_getY(event, 0); - return 1; - } - return 0; -} - -/** - * Process the next main command. - */ -static void engine_handle_cmd(struct android_app* app, int32_t cmd) { - struct engine* engine = (struct engine*)app->userData; - switch (cmd) { - case APP_CMD_SAVE_STATE: - // The system has asked us to save our current state. Do so. - engine->app->savedState = malloc(sizeof(struct saved_state)); - *((struct saved_state*)engine->app->savedState) = engine->state; - engine->app->savedStateSize = sizeof(struct saved_state); - break; - case APP_CMD_INIT_WINDOW: - // The window is being shown, get it ready. - if (engine->app->window != NULL) { - engine_init_display(engine); - engine_draw_frame(engine); - } - break; - case APP_CMD_TERM_WINDOW: - // The window is being hidden or closed, clean it up. - engine_term_display(engine); - break; - case APP_CMD_GAINED_FOCUS: - // When our app gains focus, we start monitoring the accelerometer. - if (engine->accelerometerSensor != NULL) { - ASensorEventQueue_enableSensor(engine->sensorEventQueue, - engine->accelerometerSensor); - // We'd like to get 60 events per second (in us). - ASensorEventQueue_setEventRate(engine->sensorEventQueue, - engine->accelerometerSensor, (1000L/60)*1000); - } - break; - case APP_CMD_LOST_FOCUS: - // When our app loses focus, we stop monitoring the accelerometer. - // This is to avoid consuming battery while not being used. - if (engine->accelerometerSensor != NULL) { - ASensorEventQueue_disableSensor(engine->sensorEventQueue, - engine->accelerometerSensor); - } - // Also stop animating. - engine->animating = 0; - engine_draw_frame(engine); - break; - } -} - -/** - * This is the main entry point of a native application that is using - * android_native_app_glue. It runs in its own thread, with its own - * event loop for receiving input events and doing other things. - */ -void android_main(struct android_app* state) { - struct engine engine; - - // Make sure glue isn't stripped. - app_dummy(); - - memset(&engine, 0, sizeof(engine)); - state->userData = &engine; - state->onAppCmd = engine_handle_cmd; - state->onInputEvent = engine_handle_input; - engine.app = state; - - // Prepare to monitor accelerometer - engine.sensorManager = ASensorManager_getInstance(); - engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager, - ASENSOR_TYPE_ACCELEROMETER); - engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager, - state->looper, LOOPER_ID_USER, NULL, NULL); - - if (state->savedState != NULL) { - // We are starting with a previous saved state; restore from it. - engine.state = *(struct saved_state*)state->savedState; - } - - // loop waiting for stuff to do. - - while (1) { - // Read all pending events. - int ident; - int events; - struct android_poll_source* source; - - // If not animating, we will block forever waiting for events. - // If animating, we loop until all events are read, then continue - // to draw the next frame of animation. - while ((ident=ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events, - (void**)&source)) >= 0) { - - // Process this event. - if (source != NULL) { - source->process(state, source); - } - - // If a sensor has data, process it now. - if (ident == LOOPER_ID_USER) { - if (engine.accelerometerSensor != NULL) { - ASensorEvent event; - while (ASensorEventQueue_getEvents(engine.sensorEventQueue, - &event, 1) > 0) { - LOGI("accelerometer: x=%f y=%f z=%f", - event.acceleration.x, event.acceleration.y, - event.acceleration.z); - } - } - } - - // Check if we are exiting. - if (state->destroyRequested != 0) { - engine_term_display(&engine); - return; - } - } - - if (engine.animating) { - // Done with events; draw next animation frame. - engine.state.angle += .01f; - if (engine.state.angle > 1) { - engine.state.angle = 0; - } - - // Drawing is throttled to the screen update rate, so there - // is no need to do timing here. - engine_draw_frame(&engine); - } - } -} -//END_INCLUDE(all) diff --git a/modules/audio_processing/test/android/apmtest/res/values/strings.xml b/modules/audio_processing/test/android/apmtest/res/values/strings.xml deleted file mode 100644 index d0bd0f3051..0000000000 --- a/modules/audio_processing/test/android/apmtest/res/values/strings.xml +++ /dev/null @@ -1,4 +0,0 @@ - - - apmtest - diff --git a/modules/audio_processing/test/api_call_statistics.cc b/modules/audio_processing/test/api_call_statistics.cc index ee8a308596..f3fcb12c7f 100644 --- a/modules/audio_processing/test/api_call_statistics.cc +++ b/modules/audio_processing/test/api_call_statistics.cc @@ -52,12 +52,12 @@ void ApiCallStatistics::PrintReport() const { sum_capture += v.duration_nanos; } } - min_render /= rtc::kNumNanosecsPerMicrosec; - max_render /= rtc::kNumNanosecsPerMicrosec; - sum_render /= rtc::kNumNanosecsPerMicrosec; - min_capture /= rtc::kNumNanosecsPerMicrosec; - max_capture /= rtc::kNumNanosecsPerMicrosec; - sum_capture /= rtc::kNumNanosecsPerMicrosec; + min_render /= kNumNanosecsPerMicrosec; + max_render /= kNumNanosecsPerMicrosec; + sum_render /= kNumNanosecsPerMicrosec; + min_capture /= kNumNanosecsPerMicrosec; + max_capture /= kNumNanosecsPerMicrosec; + sum_capture /= kNumNanosecsPerMicrosec; avg_render = num_render > 0 ? sum_render / num_render : 0; avg_capture = num_capture > 0 ? sum_capture / num_capture : 0; @@ -83,7 +83,7 @@ void ApiCallStatistics::WriteReportToFile(absl::string_view filename) const { } else { *out << "capture, "; } - *out << (v.duration_nanos / rtc::kNumNanosecsPerMicrosec) << std::endl; + *out << (v.duration_nanos / kNumNanosecsPerMicrosec) << std::endl; } } diff --git a/modules/audio_processing/test/apmtest.m b/modules/audio_processing/test/apmtest.m index 1c8183c3ec..1367295d5d 100644 --- a/modules/audio_processing/test/apmtest.m +++ b/modules/audio_processing/test/apmtest.m @@ -1,3 +1,4 @@ +% // clang-format off % % Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. % diff --git a/modules/audio_processing/test/audio_buffer_tools.cc b/modules/audio_processing/test/audio_buffer_tools.cc index 64fb9c7ab1..10e6ad4585 100644 --- a/modules/audio_processing/test/audio_buffer_tools.cc +++ b/modules/audio_processing/test/audio_buffer_tools.cc @@ -27,7 +27,7 @@ void SetupFrame(const StreamConfig& stream_config, } void CopyVectorToAudioBuffer(const StreamConfig& stream_config, - rtc::ArrayView source, + ArrayView source, AudioBuffer* destination) { std::vector input; std::vector input_samples; diff --git a/modules/audio_processing/test/audio_buffer_tools.h b/modules/audio_processing/test/audio_buffer_tools.h index faac4bf9ff..a324bb23ca 100644 --- a/modules/audio_processing/test/audio_buffer_tools.h +++ b/modules/audio_processing/test/audio_buffer_tools.h @@ -14,15 +14,15 @@ #include #include "api/array_view.h" +#include "api/audio/audio_processing.h" #include "modules/audio_processing/audio_buffer.h" -#include "modules/audio_processing/include/audio_processing.h" namespace webrtc { namespace test { // Copies a vector into an audiobuffer. void CopyVectorToAudioBuffer(const StreamConfig& stream_config, - rtc::ArrayView source, + ArrayView source, AudioBuffer* destination); // Extracts a vector from an audiobuffer. diff --git a/modules/audio_processing/test/audio_processing_builder_for_testing.cc b/modules/audio_processing/test/audio_processing_builder_for_testing.cc deleted file mode 100644 index 6bd266dc58..0000000000 --- a/modules/audio_processing/test/audio_processing_builder_for_testing.cc +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/test/audio_processing_builder_for_testing.h" - -#include -#include - -#include "modules/audio_processing/audio_processing_impl.h" - -namespace webrtc { - -AudioProcessingBuilderForTesting::AudioProcessingBuilderForTesting() = default; -AudioProcessingBuilderForTesting::~AudioProcessingBuilderForTesting() = default; - -#ifdef WEBRTC_EXCLUDE_AUDIO_PROCESSING_MODULE - -rtc::scoped_refptr AudioProcessingBuilderForTesting::Create() { - return rtc::make_ref_counted( - config_, std::move(capture_post_processing_), - std::move(render_pre_processing_), std::move(echo_control_factory_), - std::move(echo_detector_), std::move(capture_analyzer_)); -} - -#else - -rtc::scoped_refptr AudioProcessingBuilderForTesting::Create() { - AudioProcessingBuilder builder; - TransferOwnershipsToBuilder(&builder); - return builder.SetConfig(config_).Create(); -} - -#endif - -void AudioProcessingBuilderForTesting::TransferOwnershipsToBuilder( - AudioProcessingBuilder* builder) { - builder->SetCapturePostProcessing(std::move(capture_post_processing_)); - builder->SetRenderPreProcessing(std::move(render_pre_processing_)); - builder->SetEchoControlFactory(std::move(echo_control_factory_)); - builder->SetEchoDetector(std::move(echo_detector_)); - builder->SetCaptureAnalyzer(std::move(capture_analyzer_)); -} - -} // namespace webrtc diff --git a/modules/audio_processing/test/audio_processing_simulator.cc b/modules/audio_processing/test/audio_processing_simulator.cc index 7497d49fde..a273083720 100644 --- a/modules/audio_processing/test/audio_processing_simulator.cc +++ b/modules/audio_processing/test/audio_processing_simulator.cc @@ -10,7 +10,8 @@ #include "modules/audio_processing/test/audio_processing_simulator.h" -#include +#include +#include #include #include #include @@ -18,52 +19,30 @@ #include #include +#include "absl/base/nullability.h" #include "absl/strings/string_view.h" -#include "api/audio/echo_canceller3_config_json.h" -#include "api/audio/echo_canceller3_factory.h" -#include "api/audio/echo_detector_creator.h" +#include "api/audio/audio_processing.h" +#include "api/scoped_refptr.h" +#include "common_audio/channel_buffer.h" +#include "common_audio/include/audio_util.h" +#include "common_audio/wav_file.h" #include "modules/audio_processing/aec_dump/aec_dump_factory.h" -#include "modules/audio_processing/echo_control_mobile_impl.h" -#include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/logging/apm_data_dumper.h" +#include "modules/audio_processing/test/api_call_statistics.h" #include "modules/audio_processing/test/fake_recording_device.h" +#include "modules/audio_processing/test/test_utils.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/strings/json.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/time_utils.h" namespace webrtc { namespace test { namespace { -// Helper for reading JSON from a file and parsing it to an AEC3 configuration. -EchoCanceller3Config ReadAec3ConfigFromJsonFile(absl::string_view filename) { - std::string json_string; - std::string s; - std::ifstream f(std::string(filename).c_str()); - if (f.fail()) { - std::cout << "Failed to open the file " << filename << std::endl; - RTC_CHECK_NOTREACHED(); - } - while (std::getline(f, s)) { - json_string += s; - } - - bool parsing_successful; - EchoCanceller3Config cfg; - Aec3ConfigFromJsonString(json_string, &cfg, &parsing_successful); - if (!parsing_successful) { - std::cout << "Parsing of json string failed: " << std::endl - << json_string << std::endl; - RTC_CHECK_NOTREACHED(); - } - RTC_CHECK(EchoCanceller3Config::Validate(&cfg)); - - return cfg; -} std::string GetIndexedOutputWavFilename(absl::string_view wav_name, int counter) { - rtc::StringBuilder ss; + StringBuilder ss; ss << wav_name.substr(0, wav_name.size() - 4) << "_" << counter << wav_name.substr(wav_name.size() - 4); return ss.Release(); @@ -92,12 +71,12 @@ class ScopedTimer { public: ScopedTimer(ApiCallStatistics* api_call_statistics, ApiCallStatistics::CallType call_type) - : start_time_(rtc::TimeNanos()), + : start_time_(TimeNanos()), call_type_(call_type), api_call_statistics_(api_call_statistics) {} ~ScopedTimer() { - api_call_statistics_->Add(rtc::TimeNanos() - start_time_, call_type_); + api_call_statistics_->Add(TimeNanos() - start_time_, call_type_); } private: @@ -114,8 +93,7 @@ SimulationSettings::~SimulationSettings() = default; AudioProcessingSimulator::AudioProcessingSimulator( const SimulationSettings& settings, - rtc::scoped_refptr audio_processing, - std::unique_ptr ap_builder) + absl_nonnull scoped_refptr audio_processing) : settings_(settings), ap_(std::move(audio_processing)), applied_input_volume_(settings.initial_mic_level), @@ -149,55 +127,6 @@ AudioProcessingSimulator::AudioProcessingSimulator( if (settings_.simulate_mic_gain) RTC_LOG(LS_VERBOSE) << "Simulating analog mic gain"; - - // Create the audio processing object. - RTC_CHECK(!(ap_ && ap_builder)) - << "The AudioProcessing and the AudioProcessingBuilder cannot both be " - "specified at the same time."; - - if (ap_) { - RTC_CHECK(!settings_.aec_settings_filename); - RTC_CHECK(!settings_.print_aec_parameter_values); - } else { - // Use specied builder if such is provided, otherwise create a new builder. - std::unique_ptr builder = - !!ap_builder ? std::move(ap_builder) - : std::make_unique(); - - // Create and set an EchoCanceller3Factory if needed. - const bool use_aec = settings_.use_aec && *settings_.use_aec; - if (use_aec) { - EchoCanceller3Config cfg; - if (settings_.aec_settings_filename) { - if (settings_.use_verbose_logging) { - std::cout << "Reading AEC Parameters from JSON input." << std::endl; - } - cfg = ReadAec3ConfigFromJsonFile(*settings_.aec_settings_filename); - } - - if (settings_.linear_aec_output_filename) { - cfg.filter.export_linear_aec_output = true; - } - - if (settings_.print_aec_parameter_values) { - if (!settings_.use_quiet_output) { - std::cout << "AEC settings:" << std::endl; - } - std::cout << Aec3ConfigToJsonString(cfg) << std::endl; - } - - auto echo_control_factory = std::make_unique(cfg); - builder->SetEchoControlFactory(std::move(echo_control_factory)); - } - - if (settings_.use_ed && *settings.use_ed) { - builder->SetEchoDetector(CreateEchoDetector()); - } - - // Create an audio processing object. - ap_ = builder->Create(); - RTC_CHECK(ap_); - } } AudioProcessingSimulator::~AudioProcessingSimulator() { @@ -362,24 +291,24 @@ void AudioProcessingSimulator::SetupBuffersConfigsOutputs( int reverse_output_num_channels) { in_config_ = StreamConfig(input_sample_rate_hz, input_num_channels); in_buf_.reset(new ChannelBuffer( - rtc::CheckedDivExact(input_sample_rate_hz, kChunksPerSecond), + CheckedDivExact(input_sample_rate_hz, kChunksPerSecond), input_num_channels)); reverse_in_config_ = StreamConfig(reverse_input_sample_rate_hz, reverse_input_num_channels); reverse_in_buf_.reset(new ChannelBuffer( - rtc::CheckedDivExact(reverse_input_sample_rate_hz, kChunksPerSecond), + CheckedDivExact(reverse_input_sample_rate_hz, kChunksPerSecond), reverse_input_num_channels)); out_config_ = StreamConfig(output_sample_rate_hz, output_num_channels); out_buf_.reset(new ChannelBuffer( - rtc::CheckedDivExact(output_sample_rate_hz, kChunksPerSecond), + CheckedDivExact(output_sample_rate_hz, kChunksPerSecond), output_num_channels)); reverse_out_config_ = StreamConfig(reverse_output_sample_rate_hz, reverse_output_num_channels); reverse_out_buf_.reset(new ChannelBuffer( - rtc::CheckedDivExact(reverse_output_sample_rate_hz, kChunksPerSecond), + CheckedDivExact(reverse_output_sample_rate_hz, kChunksPerSecond), reverse_output_num_channels)); fwd_frame_.SetFormat(input_sample_rate_hz, input_num_channels); @@ -387,7 +316,7 @@ void AudioProcessingSimulator::SetupBuffersConfigsOutputs( reverse_input_num_channels); if (settings_.use_verbose_logging) { - rtc::LogMessage::LogToDebug(rtc::LS_VERBOSE); + LogMessage::LogToDebug(LS_VERBOSE); std::cout << "Sample rates:" << std::endl; std::cout << " Forward input: " << input_sample_rate_hz << std::endl; @@ -515,6 +444,10 @@ void AudioProcessingSimulator::ConfigureAudioProcessor() { apm_config.gain_controller2.adaptive_digital.enabled = *settings_.agc2_use_adaptive_gain; } + if (settings_.agc2_use_input_volume_controller) { + apm_config.gain_controller2.input_volume_controller.enabled = + *settings_.agc2_use_input_volume_controller; + } } if (settings_.use_pre_amplifier) { apm_config.pre_amplifier.enabled = *settings_.use_pre_amplifier; @@ -622,7 +555,7 @@ void AudioProcessingSimulator::ConfigureAudioProcessor() { if (settings_.aec_dump_output_filename) { ap_->AttachAecDump(AecDumpFactory::Create( - *settings_.aec_dump_output_filename, -1, &worker_queue_)); + *settings_.aec_dump_output_filename, -1, worker_queue_.Get())); } } diff --git a/modules/audio_processing/test/audio_processing_simulator.h b/modules/audio_processing/test/audio_processing_simulator.h index e40d818bd8..3d78cb44ef 100644 --- a/modules/audio_processing/test/audio_processing_simulator.h +++ b/modules/audio_processing/test/audio_processing_simulator.h @@ -11,21 +11,27 @@ #ifndef MODULES_AUDIO_PROCESSING_TEST_AUDIO_PROCESSING_SIMULATOR_H_ #define MODULES_AUDIO_PROCESSING_TEST_AUDIO_PROCESSING_SIMULATOR_H_ -#include +#include +#include +#include #include -#include #include +#include #include +#include -#include "absl/types/optional.h" +#include "absl/base/nullability.h" +#include "absl/strings/string_view.h" +#include "api/audio/audio_processing.h" +#include "api/scoped_refptr.h" #include "common_audio/channel_buffer.h" #include "common_audio/include/audio_util.h" -#include "modules/audio_processing/include/audio_processing.h" +#include "common_audio/wav_file.h" #include "modules/audio_processing/test/api_call_statistics.h" #include "modules/audio_processing/test/fake_recording_device.h" #include "modules/audio_processing/test/test_utils.h" +#include "rtc_base/checks.h" #include "rtc_base/task_queue_for_test.h" -#include "rtc_base/time_utils.h" namespace webrtc { namespace test { @@ -34,41 +40,40 @@ static const int kChunksPerSecond = 1000 / AudioProcessing::kChunkSizeMs; struct Int16Frame { void SetFormat(int sample_rate_hz, int num_channels) { - this->sample_rate_hz = sample_rate_hz; - samples_per_channel = - rtc::CheckedDivExact(sample_rate_hz, kChunksPerSecond); - this->num_channels = num_channels; + sample_rate_hz_ = sample_rate_hz; + samples_per_channel_ = CheckedDivExact(sample_rate_hz, kChunksPerSecond); + num_channels_ = num_channels; config = StreamConfig(sample_rate_hz, num_channels); - data.resize(num_channels * samples_per_channel); + data.resize(num_channels * samples_per_channel_); } void CopyTo(ChannelBuffer* dest) { RTC_DCHECK(dest); - RTC_CHECK_EQ(num_channels, dest->num_channels()); - RTC_CHECK_EQ(samples_per_channel, dest->num_frames()); + RTC_CHECK_EQ(num_channels_, dest->num_channels()); + RTC_CHECK_EQ(samples_per_channel_, dest->num_frames()); // Copy the data from the input buffer. - std::vector tmp(samples_per_channel * num_channels); + std::vector tmp(samples_per_channel_ * num_channels_); S16ToFloat(data.data(), tmp.size(), tmp.data()); - Deinterleave(tmp.data(), samples_per_channel, num_channels, + Deinterleave(tmp.data(), samples_per_channel_, num_channels_, dest->channels()); } void CopyFrom(const ChannelBuffer& src) { - RTC_CHECK_EQ(src.num_channels(), num_channels); - RTC_CHECK_EQ(src.num_frames(), samples_per_channel); - data.resize(num_channels * samples_per_channel); + RTC_CHECK_EQ(src.num_channels(), num_channels_); + RTC_CHECK_EQ(src.num_frames(), samples_per_channel_); + data.resize(num_channels_ * samples_per_channel_); int16_t* dest_data = data.data(); - for (int ch = 0; ch < num_channels; ++ch) { - for (int sample = 0; sample < samples_per_channel; ++sample) { - dest_data[sample * num_channels + ch] = + for (int ch = 0; ch < num_channels_; ++ch) { + for (int sample = 0; sample < samples_per_channel_; ++sample) { + dest_data[sample * num_channels_ + ch] = src.channels()[ch][sample] * 32767; } } } - int sample_rate_hz; - int samples_per_channel; - int num_channels; + int sample_rate_hz_; + int samples_per_channel_; + int num_channels_; StreamConfig config; @@ -80,86 +85,87 @@ struct SimulationSettings { SimulationSettings(); SimulationSettings(const SimulationSettings&); ~SimulationSettings(); - absl::optional stream_delay; - absl::optional use_stream_delay; - absl::optional output_sample_rate_hz; - absl::optional output_num_channels; - absl::optional reverse_output_sample_rate_hz; - absl::optional reverse_output_num_channels; - absl::optional output_filename; - absl::optional reverse_output_filename; - absl::optional input_filename; - absl::optional reverse_input_filename; - absl::optional artificial_nearend_filename; - absl::optional linear_aec_output_filename; - absl::optional use_aec; - absl::optional use_aecm; - absl::optional use_ed; // Residual Echo Detector. - absl::optional ed_graph_output_filename; - absl::optional use_agc; - absl::optional use_agc2; - absl::optional use_pre_amplifier; - absl::optional use_capture_level_adjustment; - absl::optional use_analog_mic_gain_emulation; - absl::optional use_hpf; - absl::optional use_ns; - absl::optional use_ts; - absl::optional use_analog_agc; - absl::optional use_all; - absl::optional analog_agc_use_digital_adaptive_controller; - absl::optional agc_mode; - absl::optional agc_target_level; - absl::optional use_agc_limiter; - absl::optional agc_compression_gain; - absl::optional agc2_use_adaptive_gain; - absl::optional agc2_fixed_gain_db; - absl::optional pre_amplifier_gain_factor; - absl::optional pre_gain_factor; - absl::optional post_gain_factor; - absl::optional analog_mic_gain_emulation_initial_level; - absl::optional ns_level; - absl::optional ns_analysis_on_linear_aec_output; - absl::optional override_key_pressed; - absl::optional maximum_internal_processing_rate; + std::optional stream_delay; + std::optional use_stream_delay; + std::optional output_sample_rate_hz; + std::optional output_num_channels; + std::optional reverse_output_sample_rate_hz; + std::optional reverse_output_num_channels; + std::optional output_filename; + std::optional reverse_output_filename; + std::optional input_filename; + std::optional reverse_input_filename; + std::optional artificial_nearend_filename; + std::optional linear_aec_output_filename; + std::optional use_aec; + std::optional use_aecm; + std::optional use_ed; // Residual Echo Detector. + std::optional ed_graph_output_filename; + std::optional use_agc; + std::optional use_agc2; + std::optional use_pre_amplifier; + std::optional use_capture_level_adjustment; + std::optional use_analog_mic_gain_emulation; + std::optional use_hpf; + std::optional use_ns; + std::optional use_ts; + std::optional use_analog_agc; + std::optional use_all; + std::optional analog_agc_use_digital_adaptive_controller; + std::optional agc_mode; + std::optional agc_target_level; + std::optional use_agc_limiter; + std::optional agc_compression_gain; + std::optional agc2_use_adaptive_gain; + std::optional agc2_fixed_gain_db; + std::optional agc2_use_input_volume_controller; + std::optional pre_amplifier_gain_factor; + std::optional pre_gain_factor; + std::optional post_gain_factor; + std::optional analog_mic_gain_emulation_initial_level; + std::optional ns_level; + std::optional ns_analysis_on_linear_aec_output; + std::optional override_key_pressed; + std::optional maximum_internal_processing_rate; int initial_mic_level; bool simulate_mic_gain = false; - absl::optional multi_channel_render; - absl::optional multi_channel_capture; - absl::optional simulated_mic_kind; - absl::optional frame_for_sending_capture_output_used_false; - absl::optional frame_for_sending_capture_output_used_true; + std::optional multi_channel_render; + std::optional multi_channel_capture; + std::optional simulated_mic_kind; + std::optional frame_for_sending_capture_output_used_false; + std::optional frame_for_sending_capture_output_used_true; bool report_performance = false; - absl::optional performance_report_output_filename; + std::optional performance_report_output_filename; bool report_bitexactness = false; bool use_verbose_logging = false; bool use_quiet_output = false; bool discard_all_settings_in_aecdump = true; - absl::optional aec_dump_input_filename; - absl::optional aec_dump_output_filename; + std::optional aec_dump_input_filename; + std::optional aec_dump_output_filename; bool fixed_interface = false; bool store_intermediate_output = false; bool print_aec_parameter_values = false; bool dump_internal_data = false; WavFile::SampleFormat wav_output_format = WavFile::SampleFormat::kInt16; - absl::optional dump_internal_data_output_dir; - absl::optional dump_set_to_use; - absl::optional call_order_input_filename; - absl::optional call_order_output_filename; - absl::optional aec_settings_filename; - absl::optional aec_dump_input_string; + std::optional dump_internal_data_output_dir; + std::optional dump_set_to_use; + std::optional call_order_input_filename; + std::optional call_order_output_filename; + std::optional aec_settings_filename; + std::optional aec_dump_input_string; std::vector* processed_capture_samples = nullptr; bool analysis_only = false; - absl::optional dump_start_frame; - absl::optional dump_end_frame; - absl::optional init_to_process; + std::optional dump_start_frame; + std::optional dump_end_frame; + std::optional init_to_process; }; // Provides common functionality for performing audioprocessing simulations. class AudioProcessingSimulator { public: - AudioProcessingSimulator(const SimulationSettings& settings, - rtc::scoped_refptr audio_processing, - std::unique_ptr ap_builder); + AudioProcessingSimulator( + const SimulationSettings& settings, + absl_nonnull scoped_refptr audio_processing); AudioProcessingSimulator() = delete; AudioProcessingSimulator(const AudioProcessingSimulator&) = delete; @@ -203,7 +209,7 @@ class AudioProcessingSimulator { int capture_frames_since_init) const; const SimulationSettings settings_; - rtc::scoped_refptr ap_; + scoped_refptr ap_; std::unique_ptr> in_buf_; std::unique_ptr> out_buf_; @@ -219,7 +225,7 @@ class AudioProcessingSimulator { Int16Frame rev_frame_; Int16Frame fwd_frame_; bool bitexact_output_ = true; - absl::optional aec_dump_applied_input_level_ = 0; + std::optional aec_dump_applied_input_level_ = 0; protected: size_t output_reset_counter_ = 0; diff --git a/modules/audio_processing/test/audioproc_float_impl.cc b/modules/audio_processing/test/audioproc_float_impl.cc index c23ec74366..294162121f 100644 --- a/modules/audio_processing/test/audioproc_float_impl.cc +++ b/modules/audio_processing/test/audioproc_float_impl.cc @@ -10,24 +10,36 @@ #include "modules/audio_processing/test/audioproc_float_impl.h" -#include - +#include +#include +#include +#include #include #include +#include #include #include #include +#include "absl/base/nullability.h" #include "absl/flags/flag.h" #include "absl/flags/parse.h" #include "absl/strings/string_view.h" -#include "modules/audio_processing/include/audio_processing.h" +#include "api/audio/audio_processing.h" +#include "api/audio/builtin_audio_processing_builder.h" +#include "api/audio/echo_canceller3_config.h" +#include "api/audio/echo_canceller3_factory.h" +#include "api/audio/echo_detector_creator.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/field_trials.h" +#include "api/scoped_refptr.h" +#include "common_audio/wav_file.h" #include "modules/audio_processing/test/aec_dump_based_simulator.h" #include "modules/audio_processing/test/audio_processing_simulator.h" +#include "modules/audio_processing/test/echo_canceller3_config_json.h" #include "modules/audio_processing/test/wav_based_simulator.h" #include "rtc_base/checks.h" -#include "rtc_base/strings/string_builder.h" -#include "system_wrappers/include/field_trial.h" constexpr int kParameterNotSpecifiedValue = -10000; @@ -150,6 +162,10 @@ ABSL_FLAG(float, agc2_fixed_gain_db, kParameterNotSpecifiedValue, "AGC2 fixed gain (dB) to apply"); +ABSL_FLAG(int, + agc2_enable_input_volume_controller, + kParameterNotSpecifiedValue, + "Activate (1) or deactivate (0) the AGC2 input volume adjustments"); ABSL_FLAG(float, pre_amplifier_gain_factor, kParameterNotSpecifiedValue, @@ -335,19 +351,19 @@ const char kUsageDescription[] = "protobuf debug dump recordings.\n"; void SetSettingIfSpecified(absl::string_view value, - absl::optional* parameter) { + std::optional* parameter) { if (value.compare("") != 0) { *parameter = std::string(value); } } -void SetSettingIfSpecified(int value, absl::optional* parameter) { +void SetSettingIfSpecified(int value, std::optional* parameter) { if (value != kParameterNotSpecifiedValue) { *parameter = value; } } -void SetSettingIfSpecified(float value, absl::optional* parameter) { +void SetSettingIfSpecified(float value, std::optional* parameter) { constexpr float kFloatParameterNotSpecifiedValue = kParameterNotSpecifiedValue; if (value != kFloatParameterNotSpecifiedValue) { @@ -355,7 +371,7 @@ void SetSettingIfSpecified(float value, absl::optional* parameter) { } } -void SetSettingIfFlagSet(int32_t flag, absl::optional* parameter) { +void SetSettingIfFlagSet(int32_t flag, std::optional* parameter) { if (flag == 0) { *parameter = false; } else if (flag == 1) { @@ -429,9 +445,10 @@ SimulationSettings CreateSettings() { &settings.agc_compression_gain); SetSettingIfFlagSet(absl::GetFlag(FLAGS_agc2_enable_adaptive_gain), &settings.agc2_use_adaptive_gain); - SetSettingIfSpecified(absl::GetFlag(FLAGS_agc2_fixed_gain_db), &settings.agc2_fixed_gain_db); + SetSettingIfFlagSet(absl::GetFlag(FLAGS_agc2_enable_input_volume_controller), + &settings.agc2_use_input_volume_controller); SetSettingIfSpecified(absl::GetFlag(FLAGS_pre_amplifier_gain_factor), &settings.pre_amplifier_gain_factor); SetSettingIfSpecified(absl::GetFlag(FLAGS_pre_gain_factor), @@ -502,14 +519,14 @@ SimulationSettings CreateSettings() { &settings.dump_end_frame); constexpr int kFramesPerSecond = 100; - absl::optional start_seconds; + std::optional start_seconds; SetSettingIfSpecified(absl::GetFlag(FLAGS_dump_start_seconds), &start_seconds); if (start_seconds) { settings.dump_start_frame = *start_seconds * kFramesPerSecond; } - absl::optional end_seconds; + std::optional end_seconds; SetSettingIfSpecified(absl::GetFlag(FLAGS_dump_end_seconds), &end_seconds); if (end_seconds) { settings.dump_end_frame = *end_seconds * kFramesPerSecond; @@ -528,10 +545,7 @@ void ReportConditionalErrorAndExit(bool condition, absl::string_view message) { } } -void PerformBasicParameterSanityChecks( - const SimulationSettings& settings, - bool pre_constructed_ap_provided, - bool pre_constructed_ap_builder_provided) { +void PerformBasicParameterSanityChecks(const SimulationSettings& settings) { if (settings.input_filename || settings.reverse_input_filename) { ReportConditionalErrorAndExit( !!settings.aec_dump_input_filename, @@ -716,23 +730,21 @@ void PerformBasicParameterSanityChecks( settings.pre_amplifier_gain_factor.has_value(), "Error: --pre_amplifier_gain_factor needs --pre_amplifier to be " "specified and set.\n"); +} +void CheckSettingsForBuiltinBuilderAreUnused( + const SimulationSettings& settings) { ReportConditionalErrorAndExit( - pre_constructed_ap_provided && pre_constructed_ap_builder_provided, - "Error: The AudioProcessing and the AudioProcessingBuilder cannot both " - "be specified at the same time.\n"); - - ReportConditionalErrorAndExit( - settings.aec_settings_filename && pre_constructed_ap_provided, + settings.aec_settings_filename.has_value(), "Error: The aec_settings_filename cannot be specified when a " "pre-constructed audio processing object is provided.\n"); ReportConditionalErrorAndExit( - settings.aec_settings_filename && pre_constructed_ap_provided, + settings.print_aec_parameter_values, "Error: The print_aec_parameter_values cannot be set when a " "pre-constructed audio processing object is provided.\n"); - if (settings.linear_aec_output_filename && pre_constructed_ap_provided) { + if (settings.linear_aec_output_filename) { std::cout << "Warning: For the linear AEC output to be stored, this must " "be configured in the AEC that is part of the provided " "AudioProcessing object." @@ -740,37 +752,94 @@ void PerformBasicParameterSanityChecks( } } -int RunSimulation(rtc::scoped_refptr audio_processing, - std::unique_ptr ap_builder, - int argc, - char* argv[], - absl::string_view input_aecdump, - std::vector* processed_capture_samples) { +// Helper for reading JSON from a file and parsing it to an AEC3 configuration. +EchoCanceller3Config ReadAec3ConfigFromJsonFile(absl::string_view filename) { + std::string json_string; + std::string s; + std::ifstream f(std::string(filename).c_str()); + if (f.fail()) { + std::cout << "Failed to open the file " << filename << std::endl; + RTC_CHECK_NOTREACHED(); + } + while (std::getline(f, s)) { + json_string += s; + } + + bool parsing_successful; + EchoCanceller3Config cfg; + Aec3ConfigFromJsonString(json_string, &cfg, &parsing_successful); + if (!parsing_successful) { + std::cout << "Parsing of json string failed: " << std::endl + << json_string << std::endl; + RTC_CHECK_NOTREACHED(); + } + RTC_CHECK(EchoCanceller3Config::Validate(&cfg)); + + return cfg; +} + +void SetDependencies(const SimulationSettings& settings, + BuiltinAudioProcessingBuilder& builder) { + // Create and set an EchoCanceller3Factory if needed. + if (settings.use_aec && *settings.use_aec) { + EchoCanceller3Config cfg; + if (settings.aec_settings_filename) { + if (settings.use_verbose_logging) { + std::cout << "Reading AEC Parameters from JSON input." << std::endl; + } + cfg = ReadAec3ConfigFromJsonFile(*settings.aec_settings_filename); + } + + if (settings.linear_aec_output_filename) { + cfg.filter.export_linear_aec_output = true; + } + + if (settings.print_aec_parameter_values) { + if (!settings.use_quiet_output) { + std::cout << "AEC settings:" << std::endl; + } + std::cout << Aec3ConfigToJsonString(cfg) << std::endl; + } + + builder.SetEchoControlFactory(std::make_unique(cfg)); + } + + if (settings.use_ed && *settings.use_ed) { + builder.SetEchoDetector(CreateEchoDetector()); + } +} + +int RunSimulation( + absl_nonnull std::unique_ptr ap_builder, + bool builtin_builder_provided, + int argc, + char* argv[]) { std::vector args = absl::ParseCommandLine(argc, argv); if (args.size() != 1) { printf("%s", kUsageDescription); return 1; } - // InitFieldTrialsFromString stores the char*, so the char array must - // outlive the application. - const std::string field_trials = absl::GetFlag(FLAGS_force_fieldtrials); - webrtc::field_trial::InitFieldTrialsFromString(field_trials.c_str()); + FieldTrials field_trials(absl::GetFlag(FLAGS_force_fieldtrials)); + const Environment env = CreateEnvironment(&field_trials); SimulationSettings settings = CreateSettings(); - if (!input_aecdump.empty()) { - settings.aec_dump_input_string = input_aecdump; - settings.processed_capture_samples = processed_capture_samples; - RTC_CHECK(settings.processed_capture_samples); + PerformBasicParameterSanityChecks(settings); + if (builtin_builder_provided) { + SetDependencies(settings, + static_cast(*ap_builder)); + } else { + CheckSettingsForBuiltinBuilderAreUnused(settings); } - PerformBasicParameterSanityChecks(settings, !!audio_processing, !!ap_builder); - std::unique_ptr processor; + scoped_refptr audio_processing = ap_builder->Build(env); + RTC_CHECK(audio_processing); + std::unique_ptr processor; if (settings.aec_dump_input_filename || settings.aec_dump_input_string) { - processor.reset(new AecDumpBasedSimulator( - settings, std::move(audio_processing), std::move(ap_builder))); + processor = std::make_unique( + settings, std::move(audio_processing)); } else { - processor.reset(new WavBasedSimulator(settings, std::move(audio_processing), - std::move(ap_builder))); + processor = std::make_unique( + settings, std::move(audio_processing)); } if (settings.analysis_only) { @@ -794,27 +863,25 @@ int RunSimulation(rtc::scoped_refptr audio_processing, std::cout << "The processing was not bitexact."; } } - return 0; } } // namespace -int AudioprocFloatImpl(rtc::scoped_refptr audio_processing, - int argc, - char* argv[]) { - return RunSimulation( - std::move(audio_processing), /*ap_builder=*/nullptr, argc, argv, - /*input_aecdump=*/"", /*processed_capture_samples=*/nullptr); +int AudioprocFloatImpl( + absl_nonnull std::unique_ptr ap_builder, + int argc, + char* argv[]) { + return RunSimulation(std::move(ap_builder), /*builtin_builder_provided=*/true, + argc, argv); } -int AudioprocFloatImpl(std::unique_ptr ap_builder, - int argc, - char* argv[], - absl::string_view input_aecdump, - std::vector* processed_capture_samples) { - return RunSimulation(/*audio_processing=*/nullptr, std::move(ap_builder), - argc, argv, input_aecdump, processed_capture_samples); +int AudioprocFloatImpl( + absl_nonnull std::unique_ptr ap_builder, + int argc, + char* argv[]) { + return RunSimulation(std::move(ap_builder), + /*builtin_builder_provided=*/false, argc, argv); } } // namespace test diff --git a/modules/audio_processing/test/audioproc_float_impl.h b/modules/audio_processing/test/audioproc_float_impl.h index 5ed3aefab7..6681605b3f 100644 --- a/modules/audio_processing/test/audioproc_float_impl.h +++ b/modules/audio_processing/test/audioproc_float_impl.h @@ -13,37 +13,22 @@ #include -#include "modules/audio_processing/include/audio_processing.h" +#include "absl/base/nullability.h" +#include "api/audio/audio_processing.h" +#include "api/audio/builtin_audio_processing_builder.h" namespace webrtc { namespace test { -// This function implements the audio processing simulation utility. Pass -// `input_aecdump` to provide the content of an AEC dump file as a string; if -// `input_aecdump` is not passed, a WAV or AEC input dump file must be specified -// via the `argv` argument. Pass `processed_capture_samples` to write in it the -// samples processed on the capture side; if `processed_capture_samples` is not -// passed, the output file can optionally be specified via the `argv` argument. -// Any audio_processing object specified in the input is used for the -// simulation. Note that when the audio_processing object is specified all -// functionality that relies on using the internal builder is deactivated, -// since the AudioProcessing object is already created and the builder is not -// used in the simulation. -int AudioprocFloatImpl(rtc::scoped_refptr audio_processing, - int argc, - char* argv[]); - -// This function implements the audio processing simulation utility. Pass -// `input_aecdump` to provide the content of an AEC dump file as a string; if -// `input_aecdump` is not passed, a WAV or AEC input dump file must be specified -// via the `argv` argument. Pass `processed_capture_samples` to write in it the -// samples processed on the capture side; if `processed_capture_samples` is not -// passed, the output file can optionally be specified via the `argv` argument. -int AudioprocFloatImpl(std::unique_ptr ap_builder, - int argc, - char* argv[], - absl::string_view input_aecdump, - std::vector* processed_capture_samples); +int AudioprocFloatImpl( + absl_nonnull std::unique_ptr ap_builder, + int argc, + char* argv[]); + +int AudioprocFloatImpl( + absl_nonnull std::unique_ptr ap_builder, + int argc, + char* argv[]); } // namespace test } // namespace webrtc diff --git a/modules/audio_processing/test/bitexactness_tools.cc b/modules/audio_processing/test/bitexactness_tools.cc index 0464345364..25d8da9fa9 100644 --- a/modules/audio_processing/test/bitexactness_tools.cc +++ b/modules/audio_processing/test/bitexactness_tools.cc @@ -57,7 +57,7 @@ std::string GetApmCaptureTestVectorFileName(int sample_rate_hz) { void ReadFloatSamplesFromStereoFile(size_t samples_per_channel, size_t num_channels, InputAudioFile* stereo_pcm_file, - rtc::ArrayView data) { + ArrayView data) { RTC_DCHECK_LE(num_channels, 2); RTC_DCHECK_EQ(data.size(), samples_per_channel * num_channels); std::vector read_samples(samples_per_channel * 2); @@ -75,14 +75,14 @@ void ReadFloatSamplesFromStereoFile(size_t samples_per_channel, ::testing::AssertionResult VerifyDeinterleavedArray( size_t samples_per_channel, size_t num_channels, - rtc::ArrayView reference, - rtc::ArrayView output, + ArrayView reference, + ArrayView output, float element_error_bound) { // Form vectors to compare the reference to. Only the first values of the // outputs are compared in order not having to specify all preceeding frames // as testvectors. const size_t reference_frame_length = - rtc::CheckedDivExact(reference.size(), num_channels); + CheckedDivExact(reference.size(), num_channels); std::vector output_to_verify; for (size_t channel_no = 0; channel_no < num_channels; ++channel_no) { @@ -95,8 +95,8 @@ ::testing::AssertionResult VerifyDeinterleavedArray( return VerifyArray(reference, output_to_verify, element_error_bound); } -::testing::AssertionResult VerifyArray(rtc::ArrayView reference, - rtc::ArrayView output, +::testing::AssertionResult VerifyArray(ArrayView reference, + ArrayView output, float element_error_bound) { // The vectors are deemed to be bitexact only if // a) output have a size at least as long as the reference. @@ -122,7 +122,7 @@ ::testing::AssertionResult VerifyArray(rtc::ArrayView reference, // Lambda function that produces a formatted string with the data in the // vector. - auto print_vector_in_c_format = [](rtc::ArrayView v, + auto print_vector_in_c_format = [](ArrayView v, size_t num_values_to_print) { std::string s = "{ "; for (size_t k = 0; k < std::min(num_values_to_print, v.size()); ++k) { diff --git a/modules/audio_processing/test/bitexactness_tools.h b/modules/audio_processing/test/bitexactness_tools.h index 2d3113276d..30b491609f 100644 --- a/modules/audio_processing/test/bitexactness_tools.h +++ b/modules/audio_processing/test/bitexactness_tools.h @@ -33,21 +33,21 @@ std::string GetApmCaptureTestVectorFileName(int sample_rate_hz); void ReadFloatSamplesFromStereoFile(size_t samples_per_channel, size_t num_channels, InputAudioFile* stereo_pcm_file, - rtc::ArrayView data); + ArrayView data); // Verifies a frame against a reference and returns the results as an // AssertionResult. ::testing::AssertionResult VerifyDeinterleavedArray( size_t samples_per_channel, size_t num_channels, - rtc::ArrayView reference, - rtc::ArrayView output, + ArrayView reference, + ArrayView output, float element_error_bound); // Verifies a vector against a reference and returns the results as an // AssertionResult. -::testing::AssertionResult VerifyArray(rtc::ArrayView reference, - rtc::ArrayView output, +::testing::AssertionResult VerifyArray(ArrayView reference, + ArrayView output, float element_error_bound); } // namespace test diff --git a/modules/audio_processing/test/conversational_speech/BUILD.gn b/modules/audio_processing/test/conversational_speech/BUILD.gn index 2c3678092e..b7a317db57 100644 --- a/modules/audio_processing/test/conversational_speech/BUILD.gn +++ b/modules/audio_processing/test/conversational_speech/BUILD.gn @@ -51,8 +51,8 @@ rtc_library("lib") { "../../../../rtc_base:safe_conversions", "../../../../rtc_base:stringutils", "../../../../test:fileutils", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] visibility = [ ":*" ] # Only targets in this file can depend on this. } @@ -70,12 +70,10 @@ rtc_library("unittest") { "../../../../api:array_view", "../../../../common_audio", "../../../../rtc_base:logging", + "../../../../rtc_base:safe_conversions", "../../../../test:fileutils", "../../../../test:test_support", "//testing/gtest", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } diff --git a/modules/audio_processing/test/conversational_speech/generator_unittest.cc b/modules/audio_processing/test/conversational_speech/generator_unittest.cc index 17714440d4..71acb15448 100644 --- a/modules/audio_processing/test/conversational_speech/generator_unittest.cc +++ b/modules/audio_processing/test/conversational_speech/generator_unittest.cc @@ -36,15 +36,20 @@ // MSVC++ requires this to be set before any other includes to get M_PI. #define _USE_MATH_DEFINES +#include #include #include +#include +#include #include #include +#include +#include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "common_audio/wav_file.h" #include "modules/audio_processing/test/conversational_speech/config.h" #include "modules/audio_processing/test/conversational_speech/mock_wavreader_factory.h" @@ -53,6 +58,7 @@ #include "modules/audio_processing/test/conversational_speech/timing.h" #include "modules/audio_processing/test/conversational_speech/wavreader_factory.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -101,17 +107,15 @@ std::unique_ptr CreateMockWavReaderFactory() { void CreateSineWavFile(absl::string_view filepath, const MockWavReaderFactory::Params& params, - float frequency = 440.0f) { - // Create samples. - constexpr double two_pi = 2.0 * M_PI; + float frequency_hz = 440.0f) { + const double phase_step = 2 * M_PI * frequency_hz / params.sample_rate; + double phase = 0.0; std::vector samples(params.num_samples); - for (std::size_t i = 0; i < params.num_samples; ++i) { - // TODO(alessiob): the produced tone is not pure, improve. - samples[i] = std::lround( - 32767.0f * std::sin(two_pi * i * frequency / params.sample_rate)); + for (size_t i = 0; i < params.num_samples; ++i) { + samples[i] = saturated_cast(32767.0f * std::sin(phase)); + phase += phase_step; } - // Write samples. WavWriter wav_writer(filepath, params.sample_rate, params.num_channels); wav_writer.WriteSamples(samples.data(), params.num_samples); } @@ -152,7 +156,7 @@ void DeleteFolderAndContents(absl::string_view dir) { if (!DirExists(dir)) { return; } - absl::optional> dir_content = ReadDirectory(dir); + std::optional> dir_content = ReadDirectory(dir); EXPECT_TRUE(dir_content); for (const auto& path : *dir_content) { if (DirExists(path)) { @@ -624,7 +628,7 @@ TEST(ConversationalSpeechTest, DISABLED_MultiEndCallSimulator) { // Simulated call (one character corresponding to 500 ms): // A 0*********...........2*********..... // B ...........1*********.....3********* - const std::vector expected_timing = { + const std::vector expected_timing_multiend = { {"A", "t5000_440.wav", 0, 0}, {"B", "t5000_880.wav", 500, 0}, {"A", "t5000_440.wav", 0, 0}, @@ -638,21 +642,23 @@ TEST(ConversationalSpeechTest, DISABLED_MultiEndCallSimulator) { {"t5000_440.wav", {{sample_rate, 1u, sample_rate * 5}, 440.0}}, {"t5000_880.wav", {{sample_rate, 1u, sample_rate * 5}, 880.0}}, }; - const std::string audiotracks_path = + const std::string audiotracks_path_multiend = CreateTemporarySineAudioTracks(sine_tracks_params); // Set up the multi-end call. auto wavreader_factory = std::unique_ptr(new WavReaderFactory()); - MultiEndCall multiend_call(expected_timing, audiotracks_path, + MultiEndCall multiend_call(expected_timing_multiend, + audiotracks_path_multiend, std::move(wavreader_factory)); // Simulate the call. - std::string output_path = JoinFilename(audiotracks_path, "output"); - CreateDir(output_path); - RTC_LOG(LS_VERBOSE) << "simulator output path: " << output_path; + std::string output_path_multiend = + JoinFilename(audiotracks_path_multiend, "output"); + CreateDir(output_path_multiend); + RTC_LOG(LS_VERBOSE) << "simulator output path: " << output_path_multiend; auto generated_audiotrak_pairs = - conversational_speech::Simulate(multiend_call, output_path); + conversational_speech::Simulate(multiend_call, output_path_multiend); EXPECT_EQ(2u, generated_audiotrak_pairs->size()); // Check the output. @@ -668,7 +674,7 @@ TEST(ConversationalSpeechTest, DISABLED_MultiEndCallSimulator) { } // Clean. - EXPECT_NO_FATAL_FAILURE(DeleteFolderAndContents(audiotracks_path)); + EXPECT_NO_FATAL_FAILURE(DeleteFolderAndContents(audiotracks_path_multiend)); } } // namespace test diff --git a/modules/audio_processing/test/conversational_speech/mock_wavreader.h b/modules/audio_processing/test/conversational_speech/mock_wavreader.h index 94e20b9ec6..298728c262 100644 --- a/modules/audio_processing/test/conversational_speech/mock_wavreader.h +++ b/modules/audio_processing/test/conversational_speech/mock_wavreader.h @@ -27,9 +27,11 @@ class MockWavReader : public WavReaderInterface { MockWavReader(int sample_rate, size_t num_channels, size_t num_samples); ~MockWavReader(); - // TODO(alessiob): use ON_CALL to return random samples if needed. - MOCK_METHOD(size_t, ReadFloatSamples, (rtc::ArrayView), (override)); - MOCK_METHOD(size_t, ReadInt16Samples, (rtc::ArrayView), (override)); + MOCK_METHOD(size_t, ReadFloatSamples, (webrtc::ArrayView), (override)); + MOCK_METHOD(size_t, + ReadInt16Samples, + (webrtc::ArrayView), + (override)); MOCK_METHOD(int, SampleRate, (), (const, override)); MOCK_METHOD(size_t, NumChannels, (), (const, override)); diff --git a/modules/audio_processing/test/conversational_speech/multiend_call.cc b/modules/audio_processing/test/conversational_speech/multiend_call.cc index 952114a78b..db92c5d701 100644 --- a/modules/audio_processing/test/conversational_speech/multiend_call.cc +++ b/modules/audio_processing/test/conversational_speech/multiend_call.cc @@ -22,7 +22,7 @@ namespace test { namespace conversational_speech { MultiEndCall::MultiEndCall( - rtc::ArrayView timing, + ArrayView timing, absl::string_view audiotracks_path, std::unique_ptr wavreader_abstract_factory) : timing_(timing), diff --git a/modules/audio_processing/test/conversational_speech/multiend_call.h b/modules/audio_processing/test/conversational_speech/multiend_call.h index 63283465fa..ffdbf16911 100644 --- a/modules/audio_processing/test/conversational_speech/multiend_call.h +++ b/modules/audio_processing/test/conversational_speech/multiend_call.h @@ -52,7 +52,7 @@ class MultiEndCall { }; MultiEndCall( - rtc::ArrayView timing, + ArrayView timing, absl::string_view audiotracks_path, std::unique_ptr wavreader_abstract_factory); ~MultiEndCall(); @@ -85,7 +85,7 @@ class MultiEndCall { // only up to 2 speakers. Rejects unordered turns and self cross-talk. bool CheckTiming(); - rtc::ArrayView timing_; + ArrayView timing_; std::string audiotracks_path_; std::unique_ptr wavreader_abstract_factory_; std::set speaker_names_; diff --git a/modules/audio_processing/test/conversational_speech/simulator.cc b/modules/audio_processing/test/conversational_speech/simulator.cc index 89bcd48d84..1294c551eb 100644 --- a/modules/audio_processing/test/conversational_speech/simulator.cc +++ b/modules/audio_processing/test/conversational_speech/simulator.cc @@ -129,7 +129,7 @@ std::unique_ptr>> PreloadAudioTracks( // previously written samples in `wav_writer` is less than `interval_begin`, it // adds zeros as left padding. The padding corresponds to intervals during which // a speaker is not active. -void PadLeftWriteChunk(rtc::ArrayView source_samples, +void PadLeftWriteChunk(ArrayView source_samples, size_t interval_begin, WavWriter* wav_writer) { // Add left padding. @@ -158,14 +158,14 @@ void PadRightWrite(WavWriter* wav_writer, size_t pad_samples) { } } -void ScaleSignal(rtc::ArrayView source_samples, +void ScaleSignal(ArrayView source_samples, int gain, - rtc::ArrayView output_samples) { + ArrayView output_samples) { const float gain_linear = DbToRatio(gain); RTC_DCHECK_EQ(source_samples.size(), output_samples.size()); std::transform(source_samples.begin(), source_samples.end(), output_samples.begin(), [gain_linear](int16_t x) -> int16_t { - return rtc::saturated_cast(x * gain_linear); + return saturated_cast(x * gain_linear); }); } @@ -187,13 +187,6 @@ std::unique_ptr> Simulate( const auto& audiotrack_readers = multiend_call.audiotrack_readers(); auto audiotracks = PreloadAudioTracks(audiotrack_readers); - // TODO(alessiob): When speaker_names.size() == 2, near-end and far-end - // across the 2 speakers are symmetric; hence, the code below could be - // replaced by only creating the near-end or the far-end. However, this would - // require to split the unit tests and document the behavior in README.md. - // In practice, it should not be an issue since the files are not expected to - // be signinificant. - // Write near-end and far-end output tracks. for (const auto& speaking_turn : multiend_call.speaking_turns()) { const std::string& active_speaker_name = speaking_turn.speaker_name; diff --git a/modules/audio_processing/test/conversational_speech/timing.cc b/modules/audio_processing/test/conversational_speech/timing.cc index 95ec9f542e..1fc62db9f6 100644 --- a/modules/audio_processing/test/conversational_speech/timing.cc +++ b/modules/audio_processing/test/conversational_speech/timing.cc @@ -30,15 +30,15 @@ bool Turn::operator==(const Turn& b) const { std::vector LoadTiming(absl::string_view timing_filepath) { // Line parser. auto parse_line = [](absl::string_view line) { - std::vector fields = rtc::split(line, ' '); + std::vector fields = split(line, ' '); RTC_CHECK_GE(fields.size(), 3); RTC_CHECK_LE(fields.size(), 4); int gain = 0; if (fields.size() == 4) { - gain = rtc::StringToNumber(fields[3]).value_or(0); + gain = StringToNumber(fields[3]).value_or(0); } return Turn(fields[0], fields[1], - rtc::StringToNumber(fields[2]).value_or(0), gain); + StringToNumber(fields[2]).value_or(0), gain); }; // Init. @@ -58,7 +58,7 @@ std::vector LoadTiming(absl::string_view timing_filepath) { } void SaveTiming(absl::string_view timing_filepath, - rtc::ArrayView timing) { + ArrayView timing) { std::ofstream outfile(std::string{timing_filepath}); RTC_CHECK(outfile.is_open()); for (const Turn& turn : timing) { diff --git a/modules/audio_processing/test/conversational_speech/timing.h b/modules/audio_processing/test/conversational_speech/timing.h index 9314f6fc43..56e19c6e8e 100644 --- a/modules/audio_processing/test/conversational_speech/timing.h +++ b/modules/audio_processing/test/conversational_speech/timing.h @@ -42,7 +42,7 @@ std::vector LoadTiming(absl::string_view timing_filepath); // Writes a list of turns into a file. void SaveTiming(absl::string_view timing_filepath, - rtc::ArrayView timing); + ArrayView timing); } // namespace conversational_speech } // namespace test diff --git a/modules/audio_processing/test/conversational_speech/wavreader_factory.cc b/modules/audio_processing/test/conversational_speech/wavreader_factory.cc index 99b1686484..b277c6dc7c 100644 --- a/modules/audio_processing/test/conversational_speech/wavreader_factory.cc +++ b/modules/audio_processing/test/conversational_speech/wavreader_factory.cc @@ -29,11 +29,11 @@ class WavReaderAdaptor final : public WavReaderInterface { : wav_reader_(filepath) {} ~WavReaderAdaptor() override = default; - size_t ReadFloatSamples(rtc::ArrayView samples) override { + size_t ReadFloatSamples(ArrayView samples) override { return wav_reader_.ReadSamples(samples.size(), samples.begin()); } - size_t ReadInt16Samples(rtc::ArrayView samples) override { + size_t ReadInt16Samples(ArrayView samples) override { return wav_reader_.ReadSamples(samples.size(), samples.begin()); } diff --git a/modules/audio_processing/test/conversational_speech/wavreader_interface.h b/modules/audio_processing/test/conversational_speech/wavreader_interface.h index c74f639461..e013726680 100644 --- a/modules/audio_processing/test/conversational_speech/wavreader_interface.h +++ b/modules/audio_processing/test/conversational_speech/wavreader_interface.h @@ -24,8 +24,8 @@ class WavReaderInterface { virtual ~WavReaderInterface() = default; // Returns the number of samples read. - virtual size_t ReadFloatSamples(rtc::ArrayView samples) = 0; - virtual size_t ReadInt16Samples(rtc::ArrayView samples) = 0; + virtual size_t ReadFloatSamples(ArrayView samples) = 0; + virtual size_t ReadInt16Samples(ArrayView samples) = 0; // Getters. virtual int SampleRate() const = 0; diff --git a/modules/audio_processing/test/debug_dump_replayer.cc b/modules/audio_processing/test/debug_dump_replayer.cc index 2f483f5077..bd3631d2c3 100644 --- a/modules/audio_processing/test/debug_dump_replayer.cc +++ b/modules/audio_processing/test/debug_dump_replayer.cc @@ -13,7 +13,8 @@ #include #include "absl/strings/string_view.h" -#include "modules/audio_processing/test/audio_processing_builder_for_testing.h" +#include "api/audio/builtin_audio_processing_builder.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/test/protobuf_utils.h" #include "modules/audio_processing/test/runtime_setting_util.h" #include "rtc_base/checks.h" @@ -54,9 +55,9 @@ bool DebugDumpReplayer::SetDumpFile(absl::string_view filename) { } // Get next event that has not run. -absl::optional DebugDumpReplayer::GetNextEvent() const { +std::optional DebugDumpReplayer::GetNextEvent() const { if (!has_next_event_) - return absl::nullopt; + return std::nullopt; else return next_event_; } @@ -188,8 +189,8 @@ void DebugDumpReplayer::MaybeRecreateApm(const audioproc::Config& msg) { // We only create APM once, since changes on these fields should not // happen in current implementation. - if (!apm_.get()) { - apm_ = AudioProcessingBuilderForTesting().Create(); + if (apm_ == nullptr) { + apm_ = BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); } } diff --git a/modules/audio_processing/test/debug_dump_replayer.h b/modules/audio_processing/test/debug_dump_replayer.h index be21c68663..6101fc32fd 100644 --- a/modules/audio_processing/test/debug_dump_replayer.h +++ b/modules/audio_processing/test/debug_dump_replayer.h @@ -14,13 +14,11 @@ #include #include "absl/strings/string_view.h" +#include "api/audio/audio_processing.h" #include "common_audio/channel_buffer.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "rtc_base/ignore_wundef.h" -RTC_PUSH_IGNORING_WUNDEF() +// Generated at build-time by the protobuf compiler. #include "modules/audio_processing/debug.pb.h" -RTC_POP_IGNORING_WUNDEF() namespace webrtc { namespace test { @@ -34,7 +32,7 @@ class DebugDumpReplayer { bool SetDumpFile(absl::string_view filename); // Return next event. - absl::optional GetNextEvent() const; + std::optional GetNextEvent() const; // Run the next event. Returns true if succeeded. bool RunNextEvent(); @@ -60,7 +58,7 @@ class DebugDumpReplayer { std::unique_ptr> reverse_; std::unique_ptr> output_; - rtc::scoped_refptr apm_; + scoped_refptr apm_; FILE* debug_file_; diff --git a/modules/audio_processing/test/debug_dump_test.cc b/modules/audio_processing/test/debug_dump_test.cc index cded5de217..b6cb8a9716 100644 --- a/modules/audio_processing/test/debug_dump_test.cc +++ b/modules/audio_processing/test/debug_dump_test.cc @@ -15,10 +15,11 @@ #include #include "absl/strings/string_view.h" +#include "api/audio/builtin_audio_processing_builder.h" #include "api/audio/echo_canceller3_factory.h" +#include "api/environment/environment_factory.h" #include "modules/audio_coding/neteq/tools/resample_input_audio_file.h" #include "modules/audio_processing/aec_dump/aec_dump_factory.h" -#include "modules/audio_processing/test/audio_processing_builder_for_testing.h" #include "modules/audio_processing/test/debug_dump_replayer.h" #include "modules/audio_processing/test/test_utils.h" #include "rtc_base/task_queue_for_test.h" @@ -111,7 +112,7 @@ class DebugDumpGenerator { bool enable_pre_amplifier_; TaskQueueForTest worker_queue_; - rtc::scoped_refptr apm_; + scoped_refptr apm_; const std::string dump_file_name_; }; @@ -140,8 +141,7 @@ DebugDumpGenerator::DebugDumpGenerator(absl::string_view input_file_name, enable_pre_amplifier_(enable_pre_amplifier), worker_queue_("debug_dump_generator_worker_queue"), dump_file_name_(dump_file_name) { - AudioProcessingBuilderForTesting apm_builder; - apm_ = apm_builder.Create(); + apm_ = BuiltinAudioProcessingBuilder().Build(CreateEnvironment()); } DebugDumpGenerator::DebugDumpGenerator( @@ -197,7 +197,7 @@ void DebugDumpGenerator::SetOutputChannels(int channels) { void DebugDumpGenerator::StartRecording() { apm_->AttachAecDump( - AecDumpFactory::Create(dump_file_name_.c_str(), -1, &worker_queue_)); + AecDumpFactory::Create(dump_file_name_.c_str(), -1, worker_queue_.Get())); } void DebugDumpGenerator::Process(size_t num_blocks) { @@ -264,7 +264,7 @@ class DebugDumpTest : public ::testing::Test { void DebugDumpTest::VerifyDebugDump(absl::string_view in_filename) { ASSERT_TRUE(debug_dump_replayer_.SetDumpFile(in_filename)); - while (const absl::optional event = + while (const std::optional event = debug_dump_replayer_.GetNextEvent()) { debug_dump_replayer_.RunNextEvent(); if (event->type() == audioproc::Event::STREAM) { @@ -293,7 +293,12 @@ TEST_F(DebugDumpTest, SimpleCase) { VerifyDebugDump(generator.dump_file_name()); } +// TODO(bugs.webrtc.org/345674542): Fix/enable. +#if defined(__has_feature) && __has_feature(undefined_behavior_sanitizer) +TEST_F(DebugDumpTest, DISABLED_ChangeInputFormat) { +#else TEST_F(DebugDumpTest, ChangeInputFormat) { +#endif DebugDumpGenerator generator(/*apm_config=*/{}); generator.StartRecording(); @@ -310,7 +315,12 @@ TEST_F(DebugDumpTest, ChangeInputFormat) { VerifyDebugDump(generator.dump_file_name()); } +// TODO(bugs.webrtc.org/345674542): Fix/enable. +#if defined(__has_feature) && __has_feature(undefined_behavior_sanitizer) +TEST_F(DebugDumpTest, DISABLED_ChangeReverseFormat) { +#else TEST_F(DebugDumpTest, ChangeReverseFormat) { +#endif DebugDumpGenerator generator(/*apm_config=*/{}); generator.StartRecording(); generator.Process(100); @@ -361,7 +371,7 @@ TEST_F(DebugDumpTest, VerifyCombinedExperimentalStringInclusive) { ASSERT_TRUE(debug_dump_replayer_.SetDumpFile(generator.dump_file_name())); - while (const absl::optional event = + while (const std::optional event = debug_dump_replayer_.GetNextEvent()) { debug_dump_replayer_.RunNextEvent(); if (event->type() == audioproc::Event::CONFIG) { @@ -385,7 +395,7 @@ TEST_F(DebugDumpTest, VerifyCombinedExperimentalStringExclusive) { ASSERT_TRUE(debug_dump_replayer_.SetDumpFile(generator.dump_file_name())); - while (const absl::optional event = + while (const std::optional event = debug_dump_replayer_.GetNextEvent()) { debug_dump_replayer_.RunNextEvent(); if (event->type() == audioproc::Event::CONFIG) { @@ -410,7 +420,7 @@ TEST_F(DebugDumpTest, VerifyAec3ExperimentalString) { ASSERT_TRUE(debug_dump_replayer_.SetDumpFile(generator.dump_file_name())); - while (const absl::optional event = + while (const std::optional event = debug_dump_replayer_.GetNextEvent()) { debug_dump_replayer_.RunNextEvent(); if (event->type() == audioproc::Event::CONFIG) { @@ -432,7 +442,7 @@ TEST_F(DebugDumpTest, VerifyEmptyExperimentalString) { ASSERT_TRUE(debug_dump_replayer_.SetDumpFile(generator.dump_file_name())); - while (const absl::optional event = + while (const std::optional event = debug_dump_replayer_.GetNextEvent()) { debug_dump_replayer_.RunNextEvent(); if (event->type() == audioproc::Event::CONFIG) { diff --git a/api/audio/echo_canceller3_config_json.cc b/modules/audio_processing/test/echo_canceller3_config_json.cc similarity index 92% rename from api/audio/echo_canceller3_config_json.cc rename to modules/audio_processing/test/echo_canceller3_config_json.cc index 96e45ffe6d..4b89be6ee7 100644 --- a/api/audio/echo_canceller3_config_json.cc +++ b/modules/audio_processing/test/echo_canceller3_config_json.cc @@ -7,7 +7,7 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "api/audio/echo_canceller3_config_json.h" +#include "modules/audio_processing/test/echo_canceller3_config_json.h" #include @@ -25,7 +25,7 @@ namespace { void ReadParam(const Json::Value& root, std::string param_name, bool* param) { RTC_DCHECK(param); bool v; - if (rtc::GetBoolFromJsonObject(root, param_name, &v)) { + if (GetBoolFromJsonObject(root, param_name, &v)) { *param = v; } } @@ -33,7 +33,7 @@ void ReadParam(const Json::Value& root, std::string param_name, bool* param) { void ReadParam(const Json::Value& root, std::string param_name, size_t* param) { RTC_DCHECK(param); int v; - if (rtc::GetIntFromJsonObject(root, param_name, &v) && v >= 0) { + if (GetIntFromJsonObject(root, param_name, &v) && v >= 0) { *param = v; } } @@ -41,7 +41,7 @@ void ReadParam(const Json::Value& root, std::string param_name, size_t* param) { void ReadParam(const Json::Value& root, std::string param_name, int* param) { RTC_DCHECK(param); int v; - if (rtc::GetIntFromJsonObject(root, param_name, &v)) { + if (GetIntFromJsonObject(root, param_name, &v)) { *param = v; } } @@ -49,7 +49,7 @@ void ReadParam(const Json::Value& root, std::string param_name, int* param) { void ReadParam(const Json::Value& root, std::string param_name, float* param) { RTC_DCHECK(param); double v; - if (rtc::GetDoubleFromJsonObject(root, param_name, &v)) { + if (GetDoubleFromJsonObject(root, param_name, &v)) { *param = static_cast(v); } } @@ -59,9 +59,9 @@ void ReadParam(const Json::Value& root, EchoCanceller3Config::Filter::RefinedConfiguration* param) { RTC_DCHECK(param); Json::Value json_array; - if (rtc::GetValueFromJsonObject(root, param_name, &json_array)) { + if (GetValueFromJsonObject(root, param_name, &json_array)) { std::vector v; - rtc::JsonArrayToDoubleVector(json_array, &v); + JsonArrayToDoubleVector(json_array, &v); if (v.size() != 6) { RTC_LOG(LS_ERROR) << "Incorrect array size for " << param_name; return; @@ -80,9 +80,9 @@ void ReadParam(const Json::Value& root, EchoCanceller3Config::Filter::CoarseConfiguration* param) { RTC_DCHECK(param); Json::Value json_array; - if (rtc::GetValueFromJsonObject(root, param_name, &json_array)) { + if (GetValueFromJsonObject(root, param_name, &json_array)) { std::vector v; - rtc::JsonArrayToDoubleVector(json_array, &v); + JsonArrayToDoubleVector(json_array, &v); if (v.size() != 3) { RTC_LOG(LS_ERROR) << "Incorrect array size for " << param_name; return; @@ -99,7 +99,7 @@ void ReadParam(const Json::Value& root, RTC_DCHECK(param); Json::Value subsection; - if (rtc::GetValueFromJsonObject(root, param_name, &subsection)) { + if (GetValueFromJsonObject(root, param_name, &subsection)) { ReadParam(subsection, "downmix", ¶m->downmix); ReadParam(subsection, "adaptive_selection", ¶m->adaptive_selection); ReadParam(subsection, "activity_power_threshold", @@ -116,9 +116,9 @@ void ReadParam( param) { RTC_DCHECK(param); Json::Value json_array; - if (rtc::GetValueFromJsonObject(root, param_name, &json_array)) { + if (GetValueFromJsonObject(root, param_name, &json_array)) { std::vector v; - rtc::JsonArrayToIntVector(json_array, &v); + JsonArrayToIntVector(json_array, &v); if (v.size() != 2) { RTC_LOG(LS_ERROR) << "Incorrect array size for " << param_name; return; @@ -133,9 +133,9 @@ void ReadParam(const Json::Value& root, EchoCanceller3Config::Suppressor::MaskingThresholds* param) { RTC_DCHECK(param); Json::Value json_array; - if (rtc::GetValueFromJsonObject(root, param_name, &json_array)) { + if (GetValueFromJsonObject(root, param_name, &json_array)) { std::vector v; - rtc::JsonArrayToDoubleVector(json_array, &v); + JsonArrayToDoubleVector(json_array, &v); if (v.size() != 3) { RTC_LOG(LS_ERROR) << "Incorrect array size for " << param_name; return; @@ -170,7 +170,7 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, } Json::Value aec3_root; - success = rtc::GetValueFromJsonObject(root, "aec3", &aec3_root); + success = GetValueFromJsonObject(root, "aec3", &aec3_root); if (!success) { RTC_LOG(LS_ERROR) << "Missing AEC3 config field: " << json_string; *parsing_successful = false; @@ -178,14 +178,14 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, } Json::Value section; - if (rtc::GetValueFromJsonObject(aec3_root, "buffering", §ion)) { + if (GetValueFromJsonObject(aec3_root, "buffering", §ion)) { ReadParam(section, "excess_render_detection_interval_blocks", &cfg.buffering.excess_render_detection_interval_blocks); ReadParam(section, "max_allowed_excess_render_blocks", &cfg.buffering.max_allowed_excess_render_blocks); } - if (rtc::GetValueFromJsonObject(aec3_root, "delay", §ion)) { + if (GetValueFromJsonObject(aec3_root, "delay", §ion)) { ReadParam(section, "default_delay", &cfg.delay.default_delay); ReadParam(section, "down_sampling_factor", &cfg.delay.down_sampling_factor); ReadParam(section, "num_filters", &cfg.delay.num_filters); @@ -203,8 +203,8 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, &cfg.delay.delay_candidate_detection_threshold); Json::Value subsection; - if (rtc::GetValueFromJsonObject(section, "delay_selection_thresholds", - &subsection)) { + if (GetValueFromJsonObject(section, "delay_selection_thresholds", + &subsection)) { ReadParam(subsection, "initial", &cfg.delay.delay_selection_thresholds.initial); ReadParam(subsection, "converged", @@ -223,7 +223,7 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, ReadParam(section, "detect_pre_echo", &cfg.delay.detect_pre_echo); } - if (rtc::GetValueFromJsonObject(aec3_root, "filter", §ion)) { + if (GetValueFromJsonObject(aec3_root, "filter", §ion)) { ReadParam(section, "refined", &cfg.filter.refined); ReadParam(section, "coarse", &cfg.filter.coarse); ReadParam(section, "refined_initial", &cfg.filter.refined_initial); @@ -245,7 +245,7 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, &cfg.filter.export_linear_aec_output); } - if (rtc::GetValueFromJsonObject(aec3_root, "erle", §ion)) { + if (GetValueFromJsonObject(aec3_root, "erle", §ion)) { ReadParam(section, "min", &cfg.erle.min); ReadParam(section, "max_l", &cfg.erle.max_l); ReadParam(section, "max_h", &cfg.erle.max_h); @@ -257,7 +257,7 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, &cfg.erle.clamp_quality_estimate_to_one); } - if (rtc::GetValueFromJsonObject(aec3_root, "ep_strength", §ion)) { + if (GetValueFromJsonObject(aec3_root, "ep_strength", §ion)) { ReadParam(section, "default_gain", &cfg.ep_strength.default_gain); ReadParam(section, "default_len", &cfg.ep_strength.default_len); ReadParam(section, "nearend_len", &cfg.ep_strength.nearend_len); @@ -269,7 +269,7 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, &cfg.ep_strength.use_conservative_tail_frequency_response); } - if (rtc::GetValueFromJsonObject(aec3_root, "echo_audibility", §ion)) { + if (GetValueFromJsonObject(aec3_root, "echo_audibility", §ion)) { ReadParam(section, "low_render_limit", &cfg.echo_audibility.low_render_limit); ReadParam(section, "normal_render_limit", @@ -288,7 +288,7 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, &cfg.echo_audibility.use_stationarity_properties_at_init); } - if (rtc::GetValueFromJsonObject(aec3_root, "render_levels", §ion)) { + if (GetValueFromJsonObject(aec3_root, "render_levels", §ion)) { ReadParam(section, "active_render_limit", &cfg.render_levels.active_render_limit); ReadParam(section, "poor_excitation_render_limit", @@ -299,15 +299,14 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, &cfg.render_levels.render_power_gain_db); } - if (rtc::GetValueFromJsonObject(aec3_root, "echo_removal_control", - §ion)) { + if (GetValueFromJsonObject(aec3_root, "echo_removal_control", §ion)) { ReadParam(section, "has_clock_drift", &cfg.echo_removal_control.has_clock_drift); ReadParam(section, "linear_and_stable_echo_path", &cfg.echo_removal_control.linear_and_stable_echo_path); } - if (rtc::GetValueFromJsonObject(aec3_root, "echo_model", §ion)) { + if (GetValueFromJsonObject(aec3_root, "echo_model", §ion)) { Json::Value subsection; ReadParam(section, "noise_floor_hold", &cfg.echo_model.noise_floor_hold); ReadParam(section, "min_noise_floor_power", @@ -324,16 +323,16 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, &cfg.echo_model.model_reverb_in_nonlinear_mode); } - if (rtc::GetValueFromJsonObject(aec3_root, "comfort_noise", §ion)) { + if (GetValueFromJsonObject(aec3_root, "comfort_noise", §ion)) { ReadParam(section, "noise_floor_dbfs", &cfg.comfort_noise.noise_floor_dbfs); } Json::Value subsection; - if (rtc::GetValueFromJsonObject(aec3_root, "suppressor", §ion)) { + if (GetValueFromJsonObject(aec3_root, "suppressor", §ion)) { ReadParam(section, "nearend_average_blocks", &cfg.suppressor.nearend_average_blocks); - if (rtc::GetValueFromJsonObject(section, "normal_tuning", &subsection)) { + if (GetValueFromJsonObject(section, "normal_tuning", &subsection)) { ReadParam(subsection, "mask_lf", &cfg.suppressor.normal_tuning.mask_lf); ReadParam(subsection, "mask_hf", &cfg.suppressor.normal_tuning.mask_hf); ReadParam(subsection, "max_inc_factor", @@ -342,7 +341,7 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, &cfg.suppressor.normal_tuning.max_dec_factor_lf); } - if (rtc::GetValueFromJsonObject(section, "nearend_tuning", &subsection)) { + if (GetValueFromJsonObject(section, "nearend_tuning", &subsection)) { ReadParam(subsection, "mask_lf", &cfg.suppressor.nearend_tuning.mask_lf); ReadParam(subsection, "mask_hf", &cfg.suppressor.nearend_tuning.mask_hf); ReadParam(subsection, "max_inc_factor", @@ -360,8 +359,8 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, ReadParam(section, "last_lf_band", &cfg.suppressor.last_lf_band); ReadParam(section, "first_hf_band", &cfg.suppressor.first_hf_band); - if (rtc::GetValueFromJsonObject(section, "dominant_nearend_detection", - &subsection)) { + if (GetValueFromJsonObject(section, "dominant_nearend_detection", + &subsection)) { ReadParam(subsection, "enr_threshold", &cfg.suppressor.dominant_nearend_detection.enr_threshold); ReadParam(subsection, "enr_exit_threshold", @@ -380,8 +379,8 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, .use_unbounded_echo_spectrum); } - if (rtc::GetValueFromJsonObject(section, "subband_nearend_detection", - &subsection)) { + if (GetValueFromJsonObject(section, "subband_nearend_detection", + &subsection)) { ReadParam( subsection, "nearend_average_blocks", &cfg.suppressor.subband_nearend_detection.nearend_average_blocks); @@ -398,8 +397,8 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, ReadParam(section, "use_subband_nearend_detection", &cfg.suppressor.use_subband_nearend_detection); - if (rtc::GetValueFromJsonObject(section, "high_bands_suppression", - &subsection)) { + if (GetValueFromJsonObject(section, "high_bands_suppression", + &subsection)) { ReadParam(subsection, "enr_threshold", &cfg.suppressor.high_bands_suppression.enr_threshold); ReadParam(subsection, "max_gain_during_echo", @@ -417,7 +416,7 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, &cfg.suppressor.conservative_hf_suppression); } - if (rtc::GetValueFromJsonObject(aec3_root, "multi_channel", §ion)) { + if (GetValueFromJsonObject(aec3_root, "multi_channel", §ion)) { ReadParam(section, "detect_stereo_content", &cfg.multi_channel.detect_stereo_content); ReadParam(section, "stereo_detection_threshold", @@ -429,15 +428,8 @@ void Aec3ConfigFromJsonString(absl::string_view json_string, } } -EchoCanceller3Config Aec3ConfigFromJsonString(absl::string_view json_string) { - EchoCanceller3Config cfg; - bool not_used; - Aec3ConfigFromJsonString(json_string, &cfg, ¬_used); - return cfg; -} - std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { - rtc::StringBuilder ost; + StringBuilder ost; ost << "{"; ost << "\"aec3\": {"; ost << "\"buffering\": {"; diff --git a/api/audio/echo_canceller3_config_json.h b/modules/audio_processing/test/echo_canceller3_config_json.h similarity index 54% rename from api/audio/echo_canceller3_config_json.h rename to modules/audio_processing/test/echo_canceller3_config_json.h index ecee9541c7..3db39a2691 100644 --- a/api/audio/echo_canceller3_config_json.h +++ b/modules/audio_processing/test/echo_canceller3_config_json.h @@ -8,14 +8,13 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef API_AUDIO_ECHO_CANCELLER3_CONFIG_JSON_H_ -#define API_AUDIO_ECHO_CANCELLER3_CONFIG_JSON_H_ +#ifndef MODULES_AUDIO_PROCESSING_TEST_ECHO_CANCELLER3_CONFIG_JSON_H_ +#define MODULES_AUDIO_PROCESSING_TEST_ECHO_CANCELLER3_CONFIG_JSON_H_ #include #include "absl/strings/string_view.h" #include "api/audio/echo_canceller3_config.h" -#include "rtc_base/system/rtc_export.h" namespace webrtc { // Parses a JSON-encoded string into an Aec3 config. Fields corresponds to @@ -23,23 +22,14 @@ namespace webrtc { // "aec3". Produces default config values for anything that cannot be parsed // from the string. If any error was found in the parsing, parsing_successful is // set to false. -RTC_EXPORT void Aec3ConfigFromJsonString(absl::string_view json_string, - EchoCanceller3Config* config, - bool* parsing_successful); - -// To be deprecated. -// Parses a JSON-encoded string into an Aec3 config. Fields corresponds to -// substruct names, with the addition that there must be a top-level node -// "aec3". Returns default config values for anything that cannot be parsed from -// the string. -RTC_EXPORT EchoCanceller3Config -Aec3ConfigFromJsonString(absl::string_view json_string); +void Aec3ConfigFromJsonString(absl::string_view json_string, + EchoCanceller3Config* config, + bool* parsing_successful); // Encodes an Aec3 config in JSON format. Fields corresponds to substruct names, // with the addition that the top-level node is named "aec3". -RTC_EXPORT std::string Aec3ConfigToJsonString( - const EchoCanceller3Config& config); +std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config); } // namespace webrtc -#endif // API_AUDIO_ECHO_CANCELLER3_CONFIG_JSON_H_ +#endif // MODULES_AUDIO_PROCESSING_TEST_ECHO_CANCELLER3_CONFIG_JSON_H_ diff --git a/api/audio/test/echo_canceller3_config_json_unittest.cc b/modules/audio_processing/test/echo_canceller3_config_json_unittest.cc similarity index 94% rename from api/audio/test/echo_canceller3_config_json_unittest.cc rename to modules/audio_processing/test/echo_canceller3_config_json_unittest.cc index 4146dda9fe..ce95d8179f 100644 --- a/api/audio/test/echo_canceller3_config_json_unittest.cc +++ b/modules/audio_processing/test/echo_canceller3_config_json_unittest.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "api/audio/echo_canceller3_config_json.h" +#include "modules/audio_processing/test/echo_canceller3_config_json.h" #include "api/audio/echo_canceller3_config.h" #include "test/gtest.h" @@ -36,8 +36,12 @@ TEST(EchoCanceller3JsonHelpers, ToStringAndParseJson) { cfg.multi_channel.stereo_detection_threshold += 1.0f; cfg.multi_channel.stereo_detection_timeout_threshold_seconds += 1; cfg.multi_channel.stereo_detection_hysteresis_seconds += 1; + std::string json_string = Aec3ConfigToJsonString(cfg); - EchoCanceller3Config cfg_transformed = Aec3ConfigFromJsonString(json_string); + EchoCanceller3Config cfg_transformed; + bool parsing_successful; + Aec3ConfigFromJsonString(json_string, &cfg_transformed, &parsing_successful); + ASSERT_TRUE(parsing_successful); // Expect unchanged values to remain default. EXPECT_EQ(cfg.ep_strength.default_len, diff --git a/modules/audio_processing/test/echo_canceller_test_tools.cc b/modules/audio_processing/test/echo_canceller_test_tools.cc index 1d36b954f9..f627d6783c 100644 --- a/modules/audio_processing/test/echo_canceller_test_tools.cc +++ b/modules/audio_processing/test/echo_canceller_test_tools.cc @@ -14,13 +14,13 @@ namespace webrtc { -void RandomizeSampleVector(Random* random_generator, rtc::ArrayView v) { +void RandomizeSampleVector(Random* random_generator, ArrayView v) { RandomizeSampleVector(random_generator, v, /*amplitude=*/32767.f); } void RandomizeSampleVector(Random* random_generator, - rtc::ArrayView v, + ArrayView v, float amplitude) { for (auto& v_k : v) { v_k = 2 * amplitude * random_generator->Rand() - amplitude; @@ -28,8 +28,7 @@ void RandomizeSampleVector(Random* random_generator, } template -void DelayBuffer::Delay(rtc::ArrayView x, - rtc::ArrayView x_delayed) { +void DelayBuffer::Delay(ArrayView x, ArrayView x_delayed) { RTC_DCHECK_EQ(x.size(), x_delayed.size()); if (buffer_.empty()) { std::copy(x.begin(), x.end(), x_delayed.begin()); diff --git a/modules/audio_processing/test/echo_canceller_test_tools.h b/modules/audio_processing/test/echo_canceller_test_tools.h index 0d70cd39c6..eb5c326d46 100644 --- a/modules/audio_processing/test/echo_canceller_test_tools.h +++ b/modules/audio_processing/test/echo_canceller_test_tools.h @@ -20,11 +20,11 @@ namespace webrtc { // Randomizes the elements in a vector with values -32767.f:32767.f. -void RandomizeSampleVector(Random* random_generator, rtc::ArrayView v); +void RandomizeSampleVector(Random* random_generator, ArrayView v); // Randomizes the elements in a vector with values -amplitude:amplitude. void RandomizeSampleVector(Random* random_generator, - rtc::ArrayView v, + ArrayView v, float amplitude); // Class for delaying a signal a fixed number of samples. @@ -35,7 +35,7 @@ class DelayBuffer { ~DelayBuffer() = default; // Produces a delayed signal copy of x. - void Delay(rtc::ArrayView x, rtc::ArrayView x_delayed); + void Delay(ArrayView x, ArrayView x_delayed); private: std::vector buffer_; diff --git a/modules/audio_processing/test/echo_canceller_test_tools_unittest.cc b/modules/audio_processing/test/echo_canceller_test_tools_unittest.cc index 164d28fa16..cc86a192ac 100644 --- a/modules/audio_processing/test/echo_canceller_test_tools_unittest.cc +++ b/modules/audio_processing/test/echo_canceller_test_tools_unittest.cc @@ -28,10 +28,10 @@ TEST(EchoCancellerTestTools, FloatDelayBuffer) { } std::vector v_delayed = v; constexpr size_t kBlockSize = 50; - for (size_t k = 0; k < rtc::CheckedDivExact(v.size(), kBlockSize); ++k) { + for (size_t k = 0; k < CheckedDivExact(v.size(), kBlockSize); ++k) { delay_buffer.Delay( - rtc::ArrayView(&v[k * kBlockSize], kBlockSize), - rtc::ArrayView(&v_delayed[k * kBlockSize], kBlockSize)); + ArrayView(&v[k * kBlockSize], kBlockSize), + ArrayView(&v_delayed[k * kBlockSize], kBlockSize)); } for (size_t k = kDelay; k < v.size(); ++k) { EXPECT_EQ(v[k - kDelay], v_delayed[k]); @@ -47,10 +47,9 @@ TEST(EchoCancellerTestTools, IntDelayBuffer) { } std::vector v_delayed = v; const size_t kBlockSize = 50; - for (size_t k = 0; k < rtc::CheckedDivExact(v.size(), kBlockSize); ++k) { - delay_buffer.Delay( - rtc::ArrayView(&v[k * kBlockSize], kBlockSize), - rtc::ArrayView(&v_delayed[k * kBlockSize], kBlockSize)); + for (size_t k = 0; k < CheckedDivExact(v.size(), kBlockSize); ++k) { + delay_buffer.Delay(ArrayView(&v[k * kBlockSize], kBlockSize), + ArrayView(&v_delayed[k * kBlockSize], kBlockSize)); } for (size_t k = kDelay; k < v.size(); ++k) { EXPECT_EQ(v[k - kDelay], v_delayed[k]); diff --git a/modules/audio_processing/test/fake_recording_device.cc b/modules/audio_processing/test/fake_recording_device.cc index 3fd80b2ede..dec9a93d45 100644 --- a/modules/audio_processing/test/fake_recording_device.cc +++ b/modules/audio_processing/test/fake_recording_device.cc @@ -12,8 +12,8 @@ #include #include +#include -#include "absl/types/optional.h" #include "modules/audio_processing/agc2/gain_map_internal.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -38,14 +38,14 @@ class FakeRecordingDeviceWorker { void set_mic_level(const int level) { mic_level_ = level; } void set_undo_mic_level(const int level) { undo_mic_level_ = level; } virtual ~FakeRecordingDeviceWorker() = default; - virtual void ModifyBufferInt16(rtc::ArrayView buffer) = 0; + virtual void ModifyBufferInt16(ArrayView buffer) = 0; virtual void ModifyBufferFloat(ChannelBuffer* buffer) = 0; protected: // Mic level to simulate. int mic_level_; // Optional mic level to undo. - absl::optional undo_mic_level_; + std::optional undo_mic_level_; }; namespace { @@ -57,8 +57,8 @@ class FakeRecordingDeviceIdentity final : public FakeRecordingDeviceWorker { explicit FakeRecordingDeviceIdentity(const int initial_mic_level) : FakeRecordingDeviceWorker(initial_mic_level) {} ~FakeRecordingDeviceIdentity() override = default; - void ModifyBufferInt16(rtc::ArrayView buffer) override {} - void ModifyBufferFloat(ChannelBuffer* buffer) override {} + void ModifyBufferInt16(ArrayView /* buffer */) override {} + void ModifyBufferFloat(ChannelBuffer* /* buffer */) override {} }; // Linear fake recording device. The gain curve is a linear function mapping the @@ -68,7 +68,7 @@ class FakeRecordingDeviceLinear final : public FakeRecordingDeviceWorker { explicit FakeRecordingDeviceLinear(const int initial_mic_level) : FakeRecordingDeviceWorker(initial_mic_level) {} ~FakeRecordingDeviceLinear() override = default; - void ModifyBufferInt16(rtc::ArrayView buffer) override { + void ModifyBufferInt16(ArrayView buffer) override { const size_t number_of_samples = buffer.size(); int16_t* data = buffer.data(); // If an undo level is specified, virtually restore the unmodified @@ -76,7 +76,7 @@ class FakeRecordingDeviceLinear final : public FakeRecordingDeviceWorker { const float divisor = (undo_mic_level_ && *undo_mic_level_ > 0) ? *undo_mic_level_ : 255.f; for (size_t i = 0; i < number_of_samples; ++i) { - data[i] = rtc::saturated_cast(data[i] * mic_level_ / divisor); + data[i] = saturated_cast(data[i] * mic_level_ / divisor); } } void ModifyBufferFloat(ChannelBuffer* buffer) override { @@ -87,14 +87,14 @@ class FakeRecordingDeviceLinear final : public FakeRecordingDeviceWorker { for (size_t c = 0; c < buffer->num_channels(); ++c) { for (size_t i = 0; i < buffer->num_frames(); ++i) { buffer->channels()[c][i] = - rtc::SafeClamp(buffer->channels()[c][i] * mic_level_ / divisor, - kFloatSampleMin, kFloatSampleMax); + SafeClamp(buffer->channels()[c][i] * mic_level_ / divisor, + kFloatSampleMin, kFloatSampleMax); } } } }; -float ComputeAgcLinearFactor(const absl::optional& undo_mic_level, +float ComputeAgcLinearFactor(const std::optional& undo_mic_level, int mic_level) { // If an undo level is specified, virtually restore the unmodified // microphone level; otherwise simulate the mic gain only. @@ -110,13 +110,13 @@ class FakeRecordingDeviceAgc final : public FakeRecordingDeviceWorker { explicit FakeRecordingDeviceAgc(const int initial_mic_level) : FakeRecordingDeviceWorker(initial_mic_level) {} ~FakeRecordingDeviceAgc() override = default; - void ModifyBufferInt16(rtc::ArrayView buffer) override { + void ModifyBufferInt16(ArrayView buffer) override { const float scaling_factor = ComputeAgcLinearFactor(undo_mic_level_, mic_level_); const size_t number_of_samples = buffer.size(); int16_t* data = buffer.data(); for (size_t i = 0; i < number_of_samples; ++i) { - data[i] = rtc::saturated_cast(data[i] * scaling_factor); + data[i] = saturated_cast(data[i] * scaling_factor); } } void ModifyBufferFloat(ChannelBuffer* buffer) override { @@ -125,8 +125,8 @@ class FakeRecordingDeviceAgc final : public FakeRecordingDeviceWorker { for (size_t c = 0; c < buffer->num_channels(); ++c) { for (size_t i = 0; i < buffer->num_frames(); ++i) { buffer->channels()[c][i] = - rtc::SafeClamp(buffer->channels()[c][i] * scaling_factor, - kFloatSampleMin, kFloatSampleMax); + SafeClamp(buffer->channels()[c][i] * scaling_factor, + kFloatSampleMin, kFloatSampleMax); } } } @@ -169,14 +169,11 @@ void FakeRecordingDevice::SetMicLevel(const int level) { void FakeRecordingDevice::SetUndoMicLevel(const int level) { RTC_DCHECK(worker_); - // TODO(alessiob): The behavior with undo level equal to zero is not clear yet - // and will be defined in future CLs once more FakeRecordingDeviceWorker - // implementations need to be added. RTC_CHECK(level > 0) << "Zero undo mic level is unsupported"; worker_->set_undo_mic_level(level); } -void FakeRecordingDevice::SimulateAnalogGain(rtc::ArrayView buffer) { +void FakeRecordingDevice::SimulateAnalogGain(ArrayView buffer) { RTC_DCHECK(worker_); worker_->ModifyBufferInt16(buffer); } diff --git a/modules/audio_processing/test/fake_recording_device.h b/modules/audio_processing/test/fake_recording_device.h index da3c0cf794..439ab84151 100644 --- a/modules/audio_processing/test/fake_recording_device.h +++ b/modules/audio_processing/test/fake_recording_device.h @@ -55,7 +55,7 @@ class FakeRecordingDevice final { // If `real_device_level` is a valid level, the unmodified mic signal is // virtually restored. To skip the latter step set `real_device_level` to // an empty value. - void SimulateAnalogGain(rtc::ArrayView buffer); + void SimulateAnalogGain(ArrayView buffer); // Simulates the analog gain. // If `real_device_level` is a valid level, the unmodified mic signal is diff --git a/modules/audio_processing/test/fake_recording_device_unittest.cc b/modules/audio_processing/test/fake_recording_device_unittest.cc index 2ac8b1dc48..9da2903a01 100644 --- a/modules/audio_processing/test/fake_recording_device_unittest.cc +++ b/modules/audio_processing/test/fake_recording_device_unittest.cc @@ -98,13 +98,13 @@ void CheckSameSign(const ChannelBuffer* src, } std::string FakeRecordingDeviceKindToString(int fake_rec_device_kind) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "fake recording device: " << fake_rec_device_kind; return ss.Release(); } std::string AnalogLevelToString(int level) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "analog level: " << level; return ss.Release(); } diff --git a/modules/audio_processing/test/performance_timer.h b/modules/audio_processing/test/performance_timer.h index 5375ba74e8..b97a67d01b 100644 --- a/modules/audio_processing/test/performance_timer.h +++ b/modules/audio_processing/test/performance_timer.h @@ -11,9 +11,9 @@ #ifndef MODULES_AUDIO_PROCESSING_TEST_PERFORMANCE_TIMER_H_ #define MODULES_AUDIO_PROCESSING_TEST_PERFORMANCE_TIMER_H_ +#include #include -#include "absl/types/optional.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -37,7 +37,7 @@ class PerformanceTimer { private: webrtc::Clock* clock_; - absl::optional start_timestamp_us_; + std::optional start_timestamp_us_; std::vector timestamps_us_; }; diff --git a/modules/audio_processing/test/protobuf_utils.h b/modules/audio_processing/test/protobuf_utils.h index b9c2e819f9..eb93383f5a 100644 --- a/modules/audio_processing/test/protobuf_utils.h +++ b/modules/audio_processing/test/protobuf_utils.h @@ -14,12 +14,10 @@ #include #include // no-presubmit-check TODO(webrtc:8982) -#include "rtc_base/ignore_wundef.h" #include "rtc_base/protobuf_utils.h" -RTC_PUSH_IGNORING_WUNDEF() +// Generated at build-time by the protobuf compiler. #include "modules/audio_processing/debug.pb.h" -RTC_POP_IGNORING_WUNDEF() namespace webrtc { diff --git a/modules/audio_processing/test/py_quality_assessment/BUILD.gn b/modules/audio_processing/test/py_quality_assessment/BUILD.gn deleted file mode 100644 index e53a829623..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/BUILD.gn +++ /dev/null @@ -1,170 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -import("../../../../webrtc.gni") - -if (!build_with_chromium) { - group("py_quality_assessment") { - testonly = true - deps = [ - ":scripts", - ":unit_tests", - ] - } - - copy("scripts") { - testonly = true - sources = [ - "README.md", - "apm_quality_assessment.py", - "apm_quality_assessment.sh", - "apm_quality_assessment_boxplot.py", - "apm_quality_assessment_export.py", - "apm_quality_assessment_gencfgs.py", - "apm_quality_assessment_optimize.py", - ] - outputs = [ "$root_build_dir/py_quality_assessment/{{source_file_part}}" ] - deps = [ - ":apm_configs", - ":lib", - ":output", - "../../../../resources/audio_processing/test/py_quality_assessment:probing_signals", - "../../../../rtc_tools:audioproc_f", - ] - } - - copy("apm_configs") { - testonly = true - sources = [ "apm_configs/default.json" ] - visibility = [ ":*" ] # Only targets in this file can depend on this. - outputs = [ - "$root_build_dir/py_quality_assessment/apm_configs/{{source_file_part}}", - ] - } # apm_configs - - copy("lib") { - testonly = true - sources = [ - "quality_assessment/__init__.py", - "quality_assessment/annotations.py", - "quality_assessment/audioproc_wrapper.py", - "quality_assessment/collect_data.py", - "quality_assessment/data_access.py", - "quality_assessment/echo_path_simulation.py", - "quality_assessment/echo_path_simulation_factory.py", - "quality_assessment/eval_scores.py", - "quality_assessment/eval_scores_factory.py", - "quality_assessment/evaluation.py", - "quality_assessment/exceptions.py", - "quality_assessment/export.py", - "quality_assessment/export_unittest.py", - "quality_assessment/external_vad.py", - "quality_assessment/input_mixer.py", - "quality_assessment/input_signal_creator.py", - "quality_assessment/results.css", - "quality_assessment/results.js", - "quality_assessment/signal_processing.py", - "quality_assessment/simulation.py", - "quality_assessment/test_data_generation.py", - "quality_assessment/test_data_generation_factory.py", - ] - visibility = [ ":*" ] # Only targets in this file can depend on this. - outputs = [ "$root_build_dir/py_quality_assessment/quality_assessment/{{source_file_part}}" ] - deps = [ "../../../../resources/audio_processing/test/py_quality_assessment:noise_tracks" ] - } - - copy("output") { - testonly = true - sources = [ "output/README.md" ] - visibility = [ ":*" ] # Only targets in this file can depend on this. - outputs = - [ "$root_build_dir/py_quality_assessment/output/{{source_file_part}}" ] - } - - group("unit_tests") { - testonly = true - visibility = [ ":*" ] # Only targets in this file can depend on this. - deps = [ - ":apm_vad", - ":fake_polqa", - ":lib_unit_tests", - ":scripts_unit_tests", - ":vad", - ] - } - - rtc_executable("fake_polqa") { - testonly = true - sources = [ "quality_assessment/fake_polqa.cc" ] - visibility = [ ":*" ] # Only targets in this file can depend on this. - output_dir = "${root_out_dir}/py_quality_assessment/quality_assessment" - deps = [ - "../../../../rtc_base:checks", - "//third_party/abseil-cpp/absl/strings", - ] - } - - rtc_executable("vad") { - testonly = true - sources = [ "quality_assessment/vad.cc" ] - deps = [ - "../../../../common_audio", - "../../../../rtc_base:logging", - "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/flags:parse", - ] - } - - rtc_executable("apm_vad") { - testonly = true - sources = [ "quality_assessment/apm_vad.cc" ] - deps = [ - "../..", - "../../../../common_audio", - "../../../../rtc_base:logging", - "../../vad", - "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/flags:parse", - ] - } - - rtc_executable("sound_level") { - testonly = true - sources = [ "quality_assessment/sound_level.cc" ] - deps = [ - "../..", - "../../../../common_audio", - "../../../../rtc_base:logging", - "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/flags:parse", - ] - } - - copy("lib_unit_tests") { - testonly = true - sources = [ - "quality_assessment/annotations_unittest.py", - "quality_assessment/echo_path_simulation_unittest.py", - "quality_assessment/eval_scores_unittest.py", - "quality_assessment/fake_external_vad.py", - "quality_assessment/input_mixer_unittest.py", - "quality_assessment/signal_processing_unittest.py", - "quality_assessment/simulation_unittest.py", - "quality_assessment/test_data_generation_unittest.py", - ] - visibility = [ ":*" ] # Only targets in this file can depend on this. - outputs = [ "$root_build_dir/py_quality_assessment/quality_assessment/{{source_file_part}}" ] - } - - copy("scripts_unit_tests") { - testonly = true - sources = [ "apm_quality_assessment_unittest.py" ] - visibility = [ ":*" ] # Only targets in this file can depend on this. - outputs = [ "$root_build_dir/py_quality_assessment/{{source_file_part}}" ] - } -} diff --git a/modules/audio_processing/test/py_quality_assessment/OWNERS b/modules/audio_processing/test/py_quality_assessment/OWNERS deleted file mode 100644 index 9f56bb830d..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/OWNERS +++ /dev/null @@ -1,5 +0,0 @@ -aleloi@webrtc.org -alessiob@webrtc.org -henrik.lundin@webrtc.org -ivoc@webrtc.org -peah@webrtc.org diff --git a/modules/audio_processing/test/py_quality_assessment/README.md b/modules/audio_processing/test/py_quality_assessment/README.md deleted file mode 100644 index 4156112df2..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/README.md +++ /dev/null @@ -1,125 +0,0 @@ -# APM Quality Assessment tool - -Python wrapper of APM simulators (e.g., `audioproc_f`) with which quality -assessment can be automatized. The tool allows to simulate different noise -conditions, input signals, APM configurations and it computes different scores. -Once the scores are computed, the results can be easily exported to an HTML page -which allows to listen to the APM input and output signals and also the -reference one used for evaluation. - -## Dependencies - - OS: Linux - - Python 2.7 - - Python libraries: enum34, numpy, scipy, pydub (0.17.0+), pandas (0.20.1+), - pyquery (1.2+), jsmin (2.2+), csscompressor (0.9.4) - - It is recommended that a dedicated Python environment is used - - install `virtualenv` - - `$ sudo apt-get install python-virtualenv` - - setup a new Python environment (e.g., `my_env`) - - `$ cd ~ && virtualenv my_env` - - activate the new Python environment - - `$ source ~/my_env/bin/activate` - - add dependcies via `pip` - - `(my_env)$ pip install enum34 numpy pydub scipy pandas pyquery jsmin \` - `csscompressor` - - PolqaOem64 (see http://www.polqa.info/) - - Tested with POLQA Library v1.180 / P863 v2.400 - - Aachen Impulse Response (AIR) Database - - Download https://www2.iks.rwth-aachen.de/air/air_database_release_1_4.zip - - Input probing signals and noise tracks (you can make your own dataset - *1) - -## Build - - Compile WebRTC - - Go to `out/Default/py_quality_assessment` and check that - `apm_quality_assessment.py` exists - -## Unit tests - - Compile WebRTC - - Go to `out/Default/py_quality_assessment` - - Run `python -m unittest discover -p "*_unittest.py"` - -## First time setup - - Deploy PolqaOem64 and set the `POLQA_PATH` environment variable - - e.g., `$ export POLQA_PATH=/var/opt/PolqaOem64` - - Deploy the AIR Database and set the `AECHEN_IR_DATABASE_PATH` environment - variable - - e.g., `$ export AECHEN_IR_DATABASE_PATH=/var/opt/AIR_1_4` - - Deploy probing signal tracks into - - `out/Default/py_quality_assessment/probing_signals` (*1) - - Deploy noise tracks into - - `out/Default/py_quality_assessment/noise_tracks` (*1, *2) - -(*1) You can use custom files as long as they are mono tracks sampled at 48kHz -encoded in the 16 bit signed format (it is recommended that the tracks are -converted and exported with Audacity). - -## Usage (scores computation) - - Go to `out/Default/py_quality_assessment` - - Check the `apm_quality_assessment.sh` as an example script to parallelize the - experiments - - Adjust the script according to your preferences (e.g., output path) - - Run `apm_quality_assessment.sh` - - The script will end by opening the browser and showing ALL the computed - scores - -## Usage (export reports) -Showing all the results at once can be confusing. You therefore may want to -export separate reports. In this case, you can use the -`apm_quality_assessment_export.py` script as follows: - - - Set `--output_dir, -o` to the same value used in `apm_quality_assessment.sh` - - Use regular expressions to select/filter out scores by - - APM configurations: `--config_names, -c` - - capture signals: `--capture_names, -i` - - render signals: `--render_names, -r` - - echo simulator: `--echo_simulator_names, -e` - - test data generators: `--test_data_generators, -t` - - scores: `--eval_scores, -s` - - Assign a suffix to the report name using `-f ` - -For instance: - -``` -$ ./apm_quality_assessment_export.py \ - -o output/ \ - -c "(^default$)|(.*AE.*)" \ - -t \(white_noise\) \ - -s \(polqa\) \ - -f echo -``` - -## Usage (boxplot) -After generating stats, it can help to visualize how a score depends on a -certain APM simulator parameter. The `apm_quality_assessment_boxplot.py` script -helps with that, producing plots similar to [this -one](https://matplotlib.org/mpl_examples/pylab_examples/boxplot_demo_06.png). - -Suppose some scores come from running the APM simulator `audioproc_f` with -or without the level controller: `--lc=1` or `--lc=0`. Then two boxplots -side by side can be generated with - -``` -$ ./apm_quality_assessment_boxplot.py \ - -o /path/to/output - -v - -n /path/to/dir/with/apm_configs - -z lc -``` - -## Troubleshooting -The input wav file must be: - - sampled at a sample rate that is a multiple of 100 (required by POLQA) - - in the 16 bit format (required by `audioproc_f`) - - encoded in the Microsoft WAV signed 16 bit PCM format (Audacity default - when exporting) - -Depending on the license, the POLQA tool may take “breaks” as a way to limit the -throughput. When this happens, the APM Quality Assessment tool is slowed down. -For more details about this limitation, check Section 10.9.1 in the POLQA manual -v.1.18. - -In case of issues with the POLQA score computation, check -`py_quality_assessment/eval_scores.py` and adapt -`PolqaScore._parse_output_file()`. -The code can be also fixed directly into the build directory (namely, -`out/Default/py_quality_assessment/eval_scores.py`). diff --git a/modules/audio_processing/test/py_quality_assessment/apm_configs/default.json b/modules/audio_processing/test/py_quality_assessment/apm_configs/default.json deleted file mode 100644 index 5c3277bac0..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/apm_configs/default.json +++ /dev/null @@ -1 +0,0 @@ -{"-all_default": null} diff --git a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment.py b/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment.py deleted file mode 100755 index e067ecb692..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment.py +++ /dev/null @@ -1,217 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Perform APM module quality assessment on one or more input files using one or - more APM simulator configuration files and one or more test data generators. - -Usage: apm_quality_assessment.py -i audio1.wav [audio2.wav ...] - -c cfg1.json [cfg2.json ...] - -n white [echo ...] - -e audio_level [polqa ...] - -o /path/to/output -""" - -import argparse -import logging -import os -import sys - -import quality_assessment.audioproc_wrapper as audioproc_wrapper -import quality_assessment.echo_path_simulation as echo_path_simulation -import quality_assessment.eval_scores as eval_scores -import quality_assessment.evaluation as evaluation -import quality_assessment.eval_scores_factory as eval_scores_factory -import quality_assessment.external_vad as external_vad -import quality_assessment.test_data_generation as test_data_generation -import quality_assessment.test_data_generation_factory as \ - test_data_generation_factory -import quality_assessment.simulation as simulation - -_ECHO_PATH_SIMULATOR_NAMES = ( - echo_path_simulation.EchoPathSimulator.REGISTERED_CLASSES) -_TEST_DATA_GENERATOR_CLASSES = ( - test_data_generation.TestDataGenerator.REGISTERED_CLASSES) -_TEST_DATA_GENERATORS_NAMES = _TEST_DATA_GENERATOR_CLASSES.keys() -_EVAL_SCORE_WORKER_CLASSES = eval_scores.EvaluationScore.REGISTERED_CLASSES -_EVAL_SCORE_WORKER_NAMES = _EVAL_SCORE_WORKER_CLASSES.keys() - -_DEFAULT_CONFIG_FILE = 'apm_configs/default.json' - -_POLQA_BIN_NAME = 'PolqaOem64' - - -def _InstanceArgumentsParser(): - """Arguments parser factory. - """ - parser = argparse.ArgumentParser(description=( - 'Perform APM module quality assessment on one or more input files using ' - 'one or more APM simulator configuration files and one or more ' - 'test data generators.')) - - parser.add_argument('-c', - '--config_files', - nargs='+', - required=False, - help=('path to the configuration files defining the ' - 'arguments with which the APM simulator tool is ' - 'called'), - default=[_DEFAULT_CONFIG_FILE]) - - parser.add_argument( - '-i', - '--capture_input_files', - nargs='+', - required=True, - help='path to the capture input wav files (one or more)') - - parser.add_argument('-r', - '--render_input_files', - nargs='+', - required=False, - help=('path to the render input wav files; either ' - 'omitted or one file for each file in ' - '--capture_input_files (files will be paired by ' - 'index)'), - default=None) - - parser.add_argument('-p', - '--echo_path_simulator', - required=False, - help=('custom echo path simulator name; required if ' - '--render_input_files is specified'), - choices=_ECHO_PATH_SIMULATOR_NAMES, - default=echo_path_simulation.NoEchoPathSimulator.NAME) - - parser.add_argument('-t', - '--test_data_generators', - nargs='+', - required=False, - help='custom list of test data generators to use', - choices=_TEST_DATA_GENERATORS_NAMES, - default=_TEST_DATA_GENERATORS_NAMES) - - parser.add_argument('--additive_noise_tracks_path', required=False, - help='path to the wav files for the additive', - default=test_data_generation. \ - AdditiveNoiseTestDataGenerator. \ - DEFAULT_NOISE_TRACKS_PATH) - - parser.add_argument('-e', - '--eval_scores', - nargs='+', - required=False, - help='custom list of evaluation scores to use', - choices=_EVAL_SCORE_WORKER_NAMES, - default=_EVAL_SCORE_WORKER_NAMES) - - parser.add_argument('-o', - '--output_dir', - required=False, - help=('base path to the output directory in which the ' - 'output wav files and the evaluation outcomes ' - 'are saved'), - default='output') - - parser.add_argument('--polqa_path', - required=True, - help='path to the POLQA tool') - - parser.add_argument('--air_db_path', - required=True, - help='path to the Aechen IR database') - - parser.add_argument('--apm_sim_path', required=False, - help='path to the APM simulator tool', - default=audioproc_wrapper. \ - AudioProcWrapper. \ - DEFAULT_APM_SIMULATOR_BIN_PATH) - - parser.add_argument('--echo_metric_tool_bin_path', - required=False, - help=('path to the echo metric binary ' - '(required for the echo eval score)'), - default=None) - - parser.add_argument( - '--copy_with_identity_generator', - required=False, - help=('If true, the identity test data generator makes a ' - 'copy of the clean speech input file.'), - default=False) - - parser.add_argument('--external_vad_paths', - nargs='+', - required=False, - help=('Paths to external VAD programs. Each must take' - '\'-i -o \' inputs'), - default=[]) - - parser.add_argument('--external_vad_names', - nargs='+', - required=False, - help=('Keys to the vad paths. Must be different and ' - 'as many as the paths.'), - default=[]) - - return parser - - -def _ValidateArguments(args, parser): - if args.capture_input_files and args.render_input_files and (len( - args.capture_input_files) != len(args.render_input_files)): - parser.error( - '--render_input_files and --capture_input_files must be lists ' - 'having the same length') - sys.exit(1) - - if args.render_input_files and not args.echo_path_simulator: - parser.error( - 'when --render_input_files is set, --echo_path_simulator is ' - 'also required') - sys.exit(1) - - if len(args.external_vad_names) != len(args.external_vad_paths): - parser.error('If provided, --external_vad_paths and ' - '--external_vad_names must ' - 'have the same number of arguments.') - sys.exit(1) - - -def main(): - # TODO(alessiob): level = logging.INFO once debugged. - logging.basicConfig(level=logging.DEBUG) - parser = _InstanceArgumentsParser() - args = parser.parse_args() - _ValidateArguments(args, parser) - - simulator = simulation.ApmModuleSimulator( - test_data_generator_factory=( - test_data_generation_factory.TestDataGeneratorFactory( - aechen_ir_database_path=args.air_db_path, - noise_tracks_path=args.additive_noise_tracks_path, - copy_with_identity=args.copy_with_identity_generator)), - evaluation_score_factory=eval_scores_factory. - EvaluationScoreWorkerFactory( - polqa_tool_bin_path=os.path.join(args.polqa_path, _POLQA_BIN_NAME), - echo_metric_tool_bin_path=args.echo_metric_tool_bin_path), - ap_wrapper=audioproc_wrapper.AudioProcWrapper(args.apm_sim_path), - evaluator=evaluation.ApmModuleEvaluator(), - external_vads=external_vad.ExternalVad.ConstructVadDict( - args.external_vad_paths, args.external_vad_names)) - simulator.Run(config_filepaths=args.config_files, - capture_input_filepaths=args.capture_input_files, - render_input_filepaths=args.render_input_files, - echo_path_simulator_name=args.echo_path_simulator, - test_data_generator_names=args.test_data_generators, - eval_score_names=args.eval_scores, - output_dir=args.output_dir) - sys.exit(0) - - -if __name__ == '__main__': - main() diff --git a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment.sh b/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment.sh deleted file mode 100755 index aa563ee26b..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment.sh +++ /dev/null @@ -1,91 +0,0 @@ -#!/bin/bash -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -# Path to the POLQA tool. -if [ -z ${POLQA_PATH} ]; then # Check if defined. - # Default location. - export POLQA_PATH='/var/opt/PolqaOem64' -fi -if [ -d "${POLQA_PATH}" ]; then - echo "POLQA found in ${POLQA_PATH}" -else - echo "POLQA not found in ${POLQA_PATH}" - exit 1 -fi - -# Path to the Aechen IR database. -if [ -z ${AECHEN_IR_DATABASE_PATH} ]; then # Check if defined. - # Default location. - export AECHEN_IR_DATABASE_PATH='/var/opt/AIR_1_4' -fi -if [ -d "${AECHEN_IR_DATABASE_PATH}" ]; then - echo "AIR database found in ${AECHEN_IR_DATABASE_PATH}" -else - echo "AIR database not found in ${AECHEN_IR_DATABASE_PATH}" - exit 1 -fi - -# Customize probing signals, test data generators and scores if needed. -CAPTURE_SIGNALS=(probing_signals/*.wav) -TEST_DATA_GENERATORS=( \ - "identity" \ - "white_noise" \ - # "environmental_noise" \ - # "reverberation" \ -) -SCORES=( \ - # "polqa" \ - "audio_level_peak" \ - "audio_level_mean" \ -) -OUTPUT_PATH=output - -# Generate standard APM config files. -chmod +x apm_quality_assessment_gencfgs.py -./apm_quality_assessment_gencfgs.py - -# Customize APM configurations if needed. -APM_CONFIGS=(apm_configs/*.json) - -# Add output path if missing. -if [ ! -d ${OUTPUT_PATH} ]; then - mkdir ${OUTPUT_PATH} -fi - -# Start one process for each "probing signal"-"test data source" pair. -chmod +x apm_quality_assessment.py -for capture_signal_filepath in "${CAPTURE_SIGNALS[@]}" ; do - probing_signal_name="$(basename $capture_signal_filepath)" - probing_signal_name="${probing_signal_name%.*}" - for test_data_gen_name in "${TEST_DATA_GENERATORS[@]}" ; do - LOG_FILE="${OUTPUT_PATH}/apm_qa-${probing_signal_name}-"` - `"${test_data_gen_name}.log" - echo "Starting ${probing_signal_name} ${test_data_gen_name} "` - `"(see ${LOG_FILE})" - ./apm_quality_assessment.py \ - --polqa_path ${POLQA_PATH}\ - --air_db_path ${AECHEN_IR_DATABASE_PATH}\ - -i ${capture_signal_filepath} \ - -o ${OUTPUT_PATH} \ - -t ${test_data_gen_name} \ - -c "${APM_CONFIGS[@]}" \ - -e "${SCORES[@]}" > $LOG_FILE 2>&1 & - done -done - -# Join Python processes running apm_quality_assessment.py. -wait - -# Export results. -chmod +x ./apm_quality_assessment_export.py -./apm_quality_assessment_export.py -o ${OUTPUT_PATH} - -# Show results in the browser. -RESULTS_FILE="$(realpath ${OUTPUT_PATH}/results.html)" -sensible-browser "file://${RESULTS_FILE}" > /dev/null 2>&1 & diff --git a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_boxplot.py b/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_boxplot.py deleted file mode 100644 index c425885b95..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_boxplot.py +++ /dev/null @@ -1,154 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Shows boxplots of given score for different values of selected -parameters. Can be used to compare scores by audioproc_f flag. - -Usage: apm_quality_assessment_boxplot.py -o /path/to/output - -v polqa - -n /path/to/dir/with/apm_configs - -z audioproc_f_arg1 [arg2 ...] - -Arguments --config_names, --render_names, --echo_simulator_names, ---test_data_generators, --eval_scores can be used to filter the data -used for plotting. -""" - -import collections -import logging -import matplotlib.pyplot as plt -import os - -import quality_assessment.data_access as data_access -import quality_assessment.collect_data as collect_data - - -def InstanceArgumentsParser(): - """Arguments parser factory. - """ - parser = collect_data.InstanceArgumentsParser() - parser.description = ( - 'Shows boxplot of given score for different values of selected' - 'parameters. Can be used to compare scores by audioproc_f flag') - - parser.add_argument('-v', - '--eval_score', - required=True, - help=('Score name for constructing boxplots')) - - parser.add_argument( - '-n', - '--config_dir', - required=False, - help=('path to the folder with the configuration files'), - default='apm_configs') - - parser.add_argument('-z', - '--params_to_plot', - required=True, - nargs='+', - help=('audioproc_f parameter values' - 'by which to group scores (no leading dash)')) - - return parser - - -def FilterScoresByParams(data_frame, filter_params, score_name, config_dir): - """Filters data on the values of one or more parameters. - - Args: - data_frame: pandas.DataFrame of all used input data. - - filter_params: each config of the input data is assumed to have - exactly one parameter from `filter_params` defined. Every value - of the parameters in `filter_params` is a key in the returned - dict; the associated value is all cells of the data with that - value of the parameter. - - score_name: Name of score which value is boxplotted. Currently cannot do - more than one value. - - config_dir: path to dir with APM configs. - - Returns: dictionary, key is a param value, result is all scores for - that param value (see `filter_params` for explanation). - """ - results = collections.defaultdict(dict) - config_names = data_frame['apm_config'].drop_duplicates().values.tolist() - - for config_name in config_names: - config_json = data_access.AudioProcConfigFile.Load( - os.path.join(config_dir, config_name + '.json')) - data_with_config = data_frame[data_frame.apm_config == config_name] - data_cell_scores = data_with_config[data_with_config.eval_score_name == - score_name] - - # Exactly one of `params_to_plot` must match: - (matching_param, ) = [ - x for x in filter_params if '-' + x in config_json - ] - - # Add scores for every track to the result. - for capture_name in data_cell_scores.capture: - result_score = float(data_cell_scores[data_cell_scores.capture == - capture_name].score) - config_dict = results[config_json['-' + matching_param]] - if capture_name not in config_dict: - config_dict[capture_name] = {} - - config_dict[capture_name][matching_param] = result_score - - return results - - -def _FlattenToScoresList(config_param_score_dict): - """Extracts a list of scores from input data structure. - - Args: - config_param_score_dict: of the form {'capture_name': - {'param_name' : score_value,.. } ..} - - Returns: Plain list of all score value present in input data - structure - """ - result = [] - for capture_name in config_param_score_dict: - result += list(config_param_score_dict[capture_name].values()) - return result - - -def main(): - # Init. - # TODO(alessiob): INFO once debugged. - logging.basicConfig(level=logging.DEBUG) - parser = InstanceArgumentsParser() - args = parser.parse_args() - - # Get the scores. - src_path = collect_data.ConstructSrcPath(args) - logging.debug(src_path) - scores_data_frame = collect_data.FindScores(src_path, args) - - # Filter the data by `args.params_to_plot` - scores_filtered = FilterScoresByParams(scores_data_frame, - args.params_to_plot, - args.eval_score, args.config_dir) - - data_list = sorted(scores_filtered.items()) - data_values = [_FlattenToScoresList(x) for (_, x) in data_list] - data_labels = [x for (x, _) in data_list] - - _, axes = plt.subplots(nrows=1, ncols=1, figsize=(6, 6)) - axes.boxplot(data_values, labels=data_labels) - axes.set_ylabel(args.eval_score) - axes.set_xlabel('/'.join(args.params_to_plot)) - plt.show() - - -if __name__ == "__main__": - main() diff --git a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_export.py b/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_export.py deleted file mode 100755 index c20accb9dc..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_export.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Export the scores computed by the apm_quality_assessment.py script into an - HTML file. -""" - -import logging -import os -import sys - -import quality_assessment.collect_data as collect_data -import quality_assessment.export as export - - -def _BuildOutputFilename(filename_suffix): - """Builds the filename for the exported file. - - Args: - filename_suffix: suffix for the output file name. - - Returns: - A string. - """ - if filename_suffix is None: - return 'results.html' - return 'results-{}.html'.format(filename_suffix) - - -def main(): - # Init. - logging.basicConfig( - level=logging.DEBUG) # TODO(alessio): INFO once debugged. - parser = collect_data.InstanceArgumentsParser() - parser.add_argument('-f', - '--filename_suffix', - help=('suffix of the exported file')) - parser.description = ('Exports pre-computed APM module quality assessment ' - 'results into HTML tables') - args = parser.parse_args() - - # Get the scores. - src_path = collect_data.ConstructSrcPath(args) - logging.debug(src_path) - scores_data_frame = collect_data.FindScores(src_path, args) - - # Export. - output_filepath = os.path.join(args.output_dir, - _BuildOutputFilename(args.filename_suffix)) - exporter = export.HtmlExport(output_filepath) - exporter.Export(scores_data_frame) - - logging.info('output file successfully written in %s', output_filepath) - sys.exit(0) - - -if __name__ == '__main__': - main() diff --git a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_gencfgs.py b/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_gencfgs.py deleted file mode 100755 index ca80f85bd1..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_gencfgs.py +++ /dev/null @@ -1,128 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Generate .json files with which the APM module can be tested using the - apm_quality_assessment.py script and audioproc_f as APM simulator. -""" - -import logging -import os - -import quality_assessment.data_access as data_access - -OUTPUT_PATH = os.path.abspath('apm_configs') - - -def _GenerateDefaultOverridden(config_override): - """Generates one or more APM overriden configurations. - - For each item in config_override, it overrides the default configuration and - writes a new APM configuration file. - - The default settings are loaded via "-all_default". - Check "src/modules/audio_processing/test/audioproc_float.cc" and search - for "if (FLAG_all_default) {". - - For instance, in 55eb6d621489730084927868fed195d3645a9ec9 the default is this: - settings.use_aec = rtc::Optional(true); - settings.use_aecm = rtc::Optional(false); - settings.use_agc = rtc::Optional(true); - settings.use_bf = rtc::Optional(false); - settings.use_ed = rtc::Optional(false); - settings.use_hpf = rtc::Optional(true); - settings.use_le = rtc::Optional(true); - settings.use_ns = rtc::Optional(true); - settings.use_ts = rtc::Optional(true); - settings.use_vad = rtc::Optional(true); - - Args: - config_override: dict of APM configuration file names as keys; the values - are dict instances encoding the audioproc_f flags. - """ - for config_filename in config_override: - config = config_override[config_filename] - config['-all_default'] = None - - config_filepath = os.path.join( - OUTPUT_PATH, 'default-{}.json'.format(config_filename)) - logging.debug('config file <%s> | %s', config_filepath, config) - - data_access.AudioProcConfigFile.Save(config_filepath, config) - logging.info('config file created: <%s>', config_filepath) - - -def _GenerateAllDefaultButOne(): - """Disables the flags enabled by default one-by-one. - """ - config_sets = { - 'no_AEC': { - '-aec': 0, - }, - 'no_AGC': { - '-agc': 0, - }, - 'no_HP_filter': { - '-hpf': 0, - }, - 'no_level_estimator': { - '-le': 0, - }, - 'no_noise_suppressor': { - '-ns': 0, - }, - 'no_transient_suppressor': { - '-ts': 0, - }, - 'no_vad': { - '-vad': 0, - }, - } - _GenerateDefaultOverridden(config_sets) - - -def _GenerateAllDefaultPlusOne(): - """Enables the flags disabled by default one-by-one. - """ - config_sets = { - 'with_AECM': { - '-aec': 0, - '-aecm': 1, - }, # AEC and AECM are exclusive. - 'with_AGC_limiter': { - '-agc_limiter': 1, - }, - 'with_AEC_delay_agnostic': { - '-delay_agnostic': 1, - }, - 'with_drift_compensation': { - '-drift_compensation': 1, - }, - 'with_residual_echo_detector': { - '-ed': 1, - }, - 'with_AEC_extended_filter': { - '-extended_filter': 1, - }, - 'with_LC': { - '-lc': 1, - }, - 'with_refined_adaptive_filter': { - '-refined_adaptive_filter': 1, - }, - } - _GenerateDefaultOverridden(config_sets) - - -def main(): - logging.basicConfig(level=logging.INFO) - _GenerateAllDefaultPlusOne() - _GenerateAllDefaultButOne() - - -if __name__ == '__main__': - main() diff --git a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_optimize.py b/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_optimize.py deleted file mode 100644 index ecae2ed995..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_optimize.py +++ /dev/null @@ -1,189 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Finds the APM configuration that maximizes a provided metric by -parsing the output generated apm_quality_assessment.py. -""" - -from __future__ import division - -import collections -import logging -import os - -import quality_assessment.data_access as data_access -import quality_assessment.collect_data as collect_data - - -def _InstanceArgumentsParser(): - """Arguments parser factory. Extends the arguments from 'collect_data' - with a few extra for selecting what parameters to optimize for. - """ - parser = collect_data.InstanceArgumentsParser() - parser.description = ( - 'Rudimentary optimization of a function over different parameter' - 'combinations.') - - parser.add_argument( - '-n', - '--config_dir', - required=False, - help=('path to the folder with the configuration files'), - default='apm_configs') - - parser.add_argument('-p', - '--params', - required=True, - nargs='+', - help=('parameters to parse from the config files in' - 'config_dir')) - - parser.add_argument( - '-z', - '--params_not_to_optimize', - required=False, - nargs='+', - default=[], - help=('parameters from `params` not to be optimized for')) - - return parser - - -def _ConfigurationAndScores(data_frame, params, params_not_to_optimize, - config_dir): - """Returns a list of all configurations and scores. - - Args: - data_frame: A pandas data frame with the scores and config name - returned by _FindScores. - params: The parameter names to parse from configs the config - directory - - params_not_to_optimize: The parameter names which shouldn't affect - the optimal parameter - selection. E.g., fixed settings and not - tunable parameters. - - config_dir: Path to folder with config files. - - Returns: - Dictionary of the form - {param_combination: [{params: {param1: value1, ...}, - scores: {score1: value1, ...}}]}. - - The key `param_combination` runs over all parameter combinations - of the parameters in `params` and not in - `params_not_to_optimize`. A corresponding value is a list of all - param combinations for params in `params_not_to_optimize` and - their scores. - """ - results = collections.defaultdict(list) - config_names = data_frame['apm_config'].drop_duplicates().values.tolist() - score_names = data_frame['eval_score_name'].drop_duplicates( - ).values.tolist() - - # Normalize the scores - normalization_constants = {} - for score_name in score_names: - scores = data_frame[data_frame.eval_score_name == score_name].score - normalization_constants[score_name] = max(scores) - - params_to_optimize = [p for p in params if p not in params_not_to_optimize] - param_combination = collections.namedtuple("ParamCombination", - params_to_optimize) - - for config_name in config_names: - config_json = data_access.AudioProcConfigFile.Load( - os.path.join(config_dir, config_name + ".json")) - scores = {} - data_cell = data_frame[data_frame.apm_config == config_name] - for score_name in score_names: - data_cell_scores = data_cell[data_cell.eval_score_name == - score_name].score - scores[score_name] = sum(data_cell_scores) / len(data_cell_scores) - scores[score_name] /= normalization_constants[score_name] - - result = {'scores': scores, 'params': {}} - config_optimize_params = {} - for param in params: - if param in params_to_optimize: - config_optimize_params[param] = config_json['-' + param] - else: - result['params'][param] = config_json['-' + param] - - current_param_combination = param_combination(**config_optimize_params) - results[current_param_combination].append(result) - return results - - -def _FindOptimalParameter(configs_and_scores, score_weighting): - """Finds the config producing the maximal score. - - Args: - configs_and_scores: structure of the form returned by - _ConfigurationAndScores - - score_weighting: a function to weight together all score values of - the form [{params: {param1: value1, ...}, scores: - {score1: value1, ...}}] into a numeric - value - Returns: - the config that has the largest values of `score_weighting` applied - to its scores. - """ - - min_score = float('+inf') - best_params = None - for config in configs_and_scores: - scores_and_params = configs_and_scores[config] - current_score = score_weighting(scores_and_params) - if current_score < min_score: - min_score = current_score - best_params = config - logging.debug("Score: %f", current_score) - logging.debug("Config: %s", str(config)) - return best_params - - -def _ExampleWeighting(scores_and_configs): - """Example argument to `_FindOptimalParameter` - Args: - scores_and_configs: a list of configs and scores, in the form - described in _FindOptimalParameter - Returns: - numeric value, the sum of all scores - """ - res = 0 - for score_config in scores_and_configs: - res += sum(score_config['scores'].values()) - return res - - -def main(): - # Init. - # TODO(alessiob): INFO once debugged. - logging.basicConfig(level=logging.DEBUG) - parser = _InstanceArgumentsParser() - args = parser.parse_args() - - # Get the scores. - src_path = collect_data.ConstructSrcPath(args) - logging.debug('Src path <%s>', src_path) - scores_data_frame = collect_data.FindScores(src_path, args) - all_scores = _ConfigurationAndScores(scores_data_frame, args.params, - args.params_not_to_optimize, - args.config_dir) - - opt_param = _FindOptimalParameter(all_scores, _ExampleWeighting) - - logging.info('Optimal parameter combination: <%s>', opt_param) - logging.info('It\'s score values: <%s>', all_scores[opt_param]) - - -if __name__ == "__main__": - main() diff --git a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_unittest.py b/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_unittest.py deleted file mode 100644 index 80338c1373..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/apm_quality_assessment_unittest.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Unit tests for the apm_quality_assessment module. -""" - -import sys -import unittest - -import mock - -import apm_quality_assessment - - -class TestSimulationScript(unittest.TestCase): - """Unit tests for the apm_quality_assessment module. - """ - - def testMain(self): - # Exit with error code if no arguments are passed. - with self.assertRaises(SystemExit) as cm, mock.patch.object( - sys, 'argv', ['apm_quality_assessment.py']): - apm_quality_assessment.main() - self.assertGreater(cm.exception.code, 0) diff --git a/modules/audio_processing/test/py_quality_assessment/output/README.md b/modules/audio_processing/test/py_quality_assessment/output/README.md deleted file mode 100644 index 66e2a1c848..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/output/README.md +++ /dev/null @@ -1 +0,0 @@ -You can use this folder for the output generated by the apm_quality_assessment scripts. diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/annotations.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/annotations.py deleted file mode 100644 index 93a8248397..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/annotations.py +++ /dev/null @@ -1,296 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Extraction of annotations from audio files. -""" - -from __future__ import division -import logging -import os -import shutil -import struct -import subprocess -import sys -import tempfile - -try: - import numpy as np -except ImportError: - logging.critical('Cannot import the third-party Python package numpy') - sys.exit(1) - -from . import external_vad -from . import exceptions -from . import signal_processing - - -class AudioAnnotationsExtractor(object): - """Extracts annotations from audio files. - """ - - class VadType(object): - ENERGY_THRESHOLD = 1 # TODO(alessiob): Consider switching to P56 standard. - WEBRTC_COMMON_AUDIO = 2 # common_audio/vad/include/vad.h - WEBRTC_APM = 4 # modules/audio_processing/vad/vad.h - - def __init__(self, value): - if (not isinstance(value, int)) or not 0 <= value <= 7: - raise exceptions.InitializationException('Invalid vad type: ' + - value) - self._value = value - - def Contains(self, vad_type): - return self._value | vad_type == self._value - - def __str__(self): - vads = [] - if self.Contains(self.ENERGY_THRESHOLD): - vads.append("energy") - if self.Contains(self.WEBRTC_COMMON_AUDIO): - vads.append("common_audio") - if self.Contains(self.WEBRTC_APM): - vads.append("apm") - return "VadType({})".format(", ".join(vads)) - - _OUTPUT_FILENAME_TEMPLATE = '{}annotations.npz' - - # Level estimation params. - _ONE_DB_REDUCTION = np.power(10.0, -1.0 / 20.0) - _LEVEL_FRAME_SIZE_MS = 1.0 - # The time constants in ms indicate the time it takes for the level estimate - # to go down/up by 1 db if the signal is zero. - _LEVEL_ATTACK_MS = 5.0 - _LEVEL_DECAY_MS = 20.0 - - # VAD params. - _VAD_THRESHOLD = 1 - _VAD_WEBRTC_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), - os.pardir, os.pardir) - _VAD_WEBRTC_COMMON_AUDIO_PATH = os.path.join(_VAD_WEBRTC_PATH, 'vad') - - _VAD_WEBRTC_APM_PATH = os.path.join(_VAD_WEBRTC_PATH, 'apm_vad') - - def __init__(self, vad_type, external_vads=None): - self._signal = None - self._level = None - self._level_frame_size = None - self._common_audio_vad = None - self._energy_vad = None - self._apm_vad_probs = None - self._apm_vad_rms = None - self._vad_frame_size = None - self._vad_frame_size_ms = None - self._c_attack = None - self._c_decay = None - - self._vad_type = self.VadType(vad_type) - logging.info('VADs used for annotations: ' + str(self._vad_type)) - - if external_vads is None: - external_vads = {} - self._external_vads = external_vads - - assert len(self._external_vads) == len(external_vads), ( - 'The external VAD names must be unique.') - for vad in external_vads.values(): - if not isinstance(vad, external_vad.ExternalVad): - raise exceptions.InitializationException('Invalid vad type: ' + - str(type(vad))) - logging.info('External VAD used for annotation: ' + str(vad.name)) - - assert os.path.exists(self._VAD_WEBRTC_COMMON_AUDIO_PATH), \ - self._VAD_WEBRTC_COMMON_AUDIO_PATH - assert os.path.exists(self._VAD_WEBRTC_APM_PATH), \ - self._VAD_WEBRTC_APM_PATH - - @classmethod - def GetOutputFileNameTemplate(cls): - return cls._OUTPUT_FILENAME_TEMPLATE - - def GetLevel(self): - return self._level - - def GetLevelFrameSize(self): - return self._level_frame_size - - @classmethod - def GetLevelFrameSizeMs(cls): - return cls._LEVEL_FRAME_SIZE_MS - - def GetVadOutput(self, vad_type): - if vad_type == self.VadType.ENERGY_THRESHOLD: - return self._energy_vad - elif vad_type == self.VadType.WEBRTC_COMMON_AUDIO: - return self._common_audio_vad - elif vad_type == self.VadType.WEBRTC_APM: - return (self._apm_vad_probs, self._apm_vad_rms) - else: - raise exceptions.InitializationException('Invalid vad type: ' + - vad_type) - - def GetVadFrameSize(self): - return self._vad_frame_size - - def GetVadFrameSizeMs(self): - return self._vad_frame_size_ms - - def Extract(self, filepath): - # Load signal. - self._signal = signal_processing.SignalProcessingUtils.LoadWav( - filepath) - if self._signal.channels != 1: - raise NotImplementedError( - 'Multiple-channel annotations not implemented') - - # Level estimation params. - self._level_frame_size = int(self._signal.frame_rate / 1000 * - (self._LEVEL_FRAME_SIZE_MS)) - self._c_attack = 0.0 if self._LEVEL_ATTACK_MS == 0 else ( - self._ONE_DB_REDUCTION**(self._LEVEL_FRAME_SIZE_MS / - self._LEVEL_ATTACK_MS)) - self._c_decay = 0.0 if self._LEVEL_DECAY_MS == 0 else ( - self._ONE_DB_REDUCTION**(self._LEVEL_FRAME_SIZE_MS / - self._LEVEL_DECAY_MS)) - - # Compute level. - self._LevelEstimation() - - # Ideal VAD output, it requires clean speech with high SNR as input. - if self._vad_type.Contains(self.VadType.ENERGY_THRESHOLD): - # Naive VAD based on level thresholding. - vad_threshold = np.percentile(self._level, self._VAD_THRESHOLD) - self._energy_vad = np.uint8(self._level > vad_threshold) - self._vad_frame_size = self._level_frame_size - self._vad_frame_size_ms = self._LEVEL_FRAME_SIZE_MS - if self._vad_type.Contains(self.VadType.WEBRTC_COMMON_AUDIO): - # WebRTC common_audio/ VAD. - self._RunWebRtcCommonAudioVad(filepath, self._signal.frame_rate) - if self._vad_type.Contains(self.VadType.WEBRTC_APM): - # WebRTC modules/audio_processing/ VAD. - self._RunWebRtcApmVad(filepath) - for extvad_name in self._external_vads: - self._external_vads[extvad_name].Run(filepath) - - def Save(self, output_path, annotation_name=""): - ext_kwargs = { - 'extvad_conf-' + ext_vad: - self._external_vads[ext_vad].GetVadOutput() - for ext_vad in self._external_vads - } - np.savez_compressed(file=os.path.join( - output_path, - self.GetOutputFileNameTemplate().format(annotation_name)), - level=self._level, - level_frame_size=self._level_frame_size, - level_frame_size_ms=self._LEVEL_FRAME_SIZE_MS, - vad_output=self._common_audio_vad, - vad_energy_output=self._energy_vad, - vad_frame_size=self._vad_frame_size, - vad_frame_size_ms=self._vad_frame_size_ms, - vad_probs=self._apm_vad_probs, - vad_rms=self._apm_vad_rms, - **ext_kwargs) - - def _LevelEstimation(self): - # Read samples. - samples = signal_processing.SignalProcessingUtils.AudioSegmentToRawData( - self._signal).astype(np.float32) / 32768.0 - num_frames = len(samples) // self._level_frame_size - num_samples = num_frames * self._level_frame_size - - # Envelope. - self._level = np.max(np.reshape(np.abs(samples[:num_samples]), - (num_frames, self._level_frame_size)), - axis=1) - assert len(self._level) == num_frames - - # Envelope smoothing. - smooth = lambda curr, prev, k: (1 - k) * curr + k * prev - self._level[0] = smooth(self._level[0], 0.0, self._c_attack) - for i in range(1, num_frames): - self._level[i] = smooth( - self._level[i], self._level[i - 1], self._c_attack if - (self._level[i] > self._level[i - 1]) else self._c_decay) - - def _RunWebRtcCommonAudioVad(self, wav_file_path, sample_rate): - self._common_audio_vad = None - self._vad_frame_size = None - - # Create temporary output path. - tmp_path = tempfile.mkdtemp() - output_file_path = os.path.join( - tmp_path, - os.path.split(wav_file_path)[1] + '_vad.tmp') - - # Call WebRTC VAD. - try: - subprocess.call([ - self._VAD_WEBRTC_COMMON_AUDIO_PATH, '-i', wav_file_path, '-o', - output_file_path - ], - cwd=self._VAD_WEBRTC_PATH) - - # Read bytes. - with open(output_file_path, 'rb') as f: - raw_data = f.read() - - # Parse side information. - self._vad_frame_size_ms = struct.unpack('B', raw_data[0])[0] - self._vad_frame_size = self._vad_frame_size_ms * sample_rate / 1000 - assert self._vad_frame_size_ms in [10, 20, 30] - extra_bits = struct.unpack('B', raw_data[-1])[0] - assert 0 <= extra_bits <= 8 - - # Init VAD vector. - num_bytes = len(raw_data) - num_frames = 8 * (num_bytes - - 2) - extra_bits # 8 frames for each byte. - self._common_audio_vad = np.zeros(num_frames, np.uint8) - - # Read VAD decisions. - for i, byte in enumerate(raw_data[1:-1]): - byte = struct.unpack('B', byte)[0] - for j in range(8 if i < num_bytes - 3 else (8 - extra_bits)): - self._common_audio_vad[i * 8 + j] = int(byte & 1) - byte = byte >> 1 - except Exception as e: - logging.error('Error while running the WebRTC VAD (' + e.message + - ')') - finally: - if os.path.exists(tmp_path): - shutil.rmtree(tmp_path) - - def _RunWebRtcApmVad(self, wav_file_path): - # Create temporary output path. - tmp_path = tempfile.mkdtemp() - output_file_path_probs = os.path.join( - tmp_path, - os.path.split(wav_file_path)[1] + '_vad_probs.tmp') - output_file_path_rms = os.path.join( - tmp_path, - os.path.split(wav_file_path)[1] + '_vad_rms.tmp') - - # Call WebRTC VAD. - try: - subprocess.call([ - self._VAD_WEBRTC_APM_PATH, '-i', wav_file_path, '-o_probs', - output_file_path_probs, '-o_rms', output_file_path_rms - ], - cwd=self._VAD_WEBRTC_PATH) - - # Parse annotations. - self._apm_vad_probs = np.fromfile(output_file_path_probs, - np.double) - self._apm_vad_rms = np.fromfile(output_file_path_rms, np.double) - assert len(self._apm_vad_rms) == len(self._apm_vad_probs) - - except Exception as e: - logging.error('Error while running the WebRTC APM VAD (' + - e.message + ')') - finally: - if os.path.exists(tmp_path): - shutil.rmtree(tmp_path) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/annotations_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/annotations_unittest.py deleted file mode 100644 index 8230208808..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/annotations_unittest.py +++ /dev/null @@ -1,160 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Unit tests for the annotations module. -""" - -from __future__ import division -import logging -import os -import shutil -import tempfile -import unittest - -import numpy as np - -from . import annotations -from . import external_vad -from . import input_signal_creator -from . import signal_processing - - -class TestAnnotationsExtraction(unittest.TestCase): - """Unit tests for the annotations module. - """ - - _CLEAN_TMP_OUTPUT = True - _DEBUG_PLOT_VAD = False - _VAD_TYPE_CLASS = annotations.AudioAnnotationsExtractor.VadType - _ALL_VAD_TYPES = (_VAD_TYPE_CLASS.ENERGY_THRESHOLD - | _VAD_TYPE_CLASS.WEBRTC_COMMON_AUDIO - | _VAD_TYPE_CLASS.WEBRTC_APM) - - def setUp(self): - """Create temporary folder.""" - self._tmp_path = tempfile.mkdtemp() - self._wav_file_path = os.path.join(self._tmp_path, 'tone.wav') - pure_tone, _ = input_signal_creator.InputSignalCreator.Create( - 'pure_tone', [440, 1000]) - signal_processing.SignalProcessingUtils.SaveWav( - self._wav_file_path, pure_tone) - self._sample_rate = pure_tone.frame_rate - - def tearDown(self): - """Recursively delete temporary folder.""" - if self._CLEAN_TMP_OUTPUT: - shutil.rmtree(self._tmp_path) - else: - logging.warning(self.id() + ' did not clean the temporary path ' + - (self._tmp_path)) - - def testFrameSizes(self): - e = annotations.AudioAnnotationsExtractor(self._ALL_VAD_TYPES) - e.Extract(self._wav_file_path) - samples_to_ms = lambda n, sr: 1000 * n // sr - self.assertEqual( - samples_to_ms(e.GetLevelFrameSize(), self._sample_rate), - e.GetLevelFrameSizeMs()) - self.assertEqual(samples_to_ms(e.GetVadFrameSize(), self._sample_rate), - e.GetVadFrameSizeMs()) - - def testVoiceActivityDetectors(self): - for vad_type_value in range(0, self._ALL_VAD_TYPES + 1): - vad_type = self._VAD_TYPE_CLASS(vad_type_value) - e = annotations.AudioAnnotationsExtractor(vad_type=vad_type_value) - e.Extract(self._wav_file_path) - if vad_type.Contains(self._VAD_TYPE_CLASS.ENERGY_THRESHOLD): - # pylint: disable=unpacking-non-sequence - vad_output = e.GetVadOutput( - self._VAD_TYPE_CLASS.ENERGY_THRESHOLD) - self.assertGreater(len(vad_output), 0) - self.assertGreaterEqual( - float(np.sum(vad_output)) / len(vad_output), 0.95) - - if vad_type.Contains(self._VAD_TYPE_CLASS.WEBRTC_COMMON_AUDIO): - # pylint: disable=unpacking-non-sequence - vad_output = e.GetVadOutput( - self._VAD_TYPE_CLASS.WEBRTC_COMMON_AUDIO) - self.assertGreater(len(vad_output), 0) - self.assertGreaterEqual( - float(np.sum(vad_output)) / len(vad_output), 0.95) - - if vad_type.Contains(self._VAD_TYPE_CLASS.WEBRTC_APM): - # pylint: disable=unpacking-non-sequence - (vad_probs, - vad_rms) = e.GetVadOutput(self._VAD_TYPE_CLASS.WEBRTC_APM) - self.assertGreater(len(vad_probs), 0) - self.assertGreater(len(vad_rms), 0) - self.assertGreaterEqual( - float(np.sum(vad_probs)) / len(vad_probs), 0.5) - self.assertGreaterEqual( - float(np.sum(vad_rms)) / len(vad_rms), 20000) - - if self._DEBUG_PLOT_VAD: - frame_times_s = lambda num_frames, frame_size_ms: np.arange( - num_frames).astype(np.float32) * frame_size_ms / 1000.0 - level = e.GetLevel() - t_level = frame_times_s(num_frames=len(level), - frame_size_ms=e.GetLevelFrameSizeMs()) - t_vad = frame_times_s(num_frames=len(vad_output), - frame_size_ms=e.GetVadFrameSizeMs()) - import matplotlib.pyplot as plt - plt.figure() - plt.hold(True) - plt.plot(t_level, level) - plt.plot(t_vad, vad_output * np.max(level), '.') - plt.show() - - def testSaveLoad(self): - e = annotations.AudioAnnotationsExtractor(self._ALL_VAD_TYPES) - e.Extract(self._wav_file_path) - e.Save(self._tmp_path, "fake-annotation") - - data = np.load( - os.path.join( - self._tmp_path, - e.GetOutputFileNameTemplate().format("fake-annotation"))) - np.testing.assert_array_equal(e.GetLevel(), data['level']) - self.assertEqual(np.float32, data['level'].dtype) - np.testing.assert_array_equal( - e.GetVadOutput(self._VAD_TYPE_CLASS.ENERGY_THRESHOLD), - data['vad_energy_output']) - np.testing.assert_array_equal( - e.GetVadOutput(self._VAD_TYPE_CLASS.WEBRTC_COMMON_AUDIO), - data['vad_output']) - np.testing.assert_array_equal( - e.GetVadOutput(self._VAD_TYPE_CLASS.WEBRTC_APM)[0], - data['vad_probs']) - np.testing.assert_array_equal( - e.GetVadOutput(self._VAD_TYPE_CLASS.WEBRTC_APM)[1], - data['vad_rms']) - self.assertEqual(np.uint8, data['vad_energy_output'].dtype) - self.assertEqual(np.float64, data['vad_probs'].dtype) - self.assertEqual(np.float64, data['vad_rms'].dtype) - - def testEmptyExternalShouldNotCrash(self): - for vad_type_value in range(0, self._ALL_VAD_TYPES + 1): - annotations.AudioAnnotationsExtractor(vad_type_value, {}) - - def testFakeExternalSaveLoad(self): - def FakeExternalFactory(): - return external_vad.ExternalVad( - os.path.join(os.path.dirname(os.path.abspath(__file__)), - 'fake_external_vad.py'), 'fake') - - for vad_type_value in range(0, self._ALL_VAD_TYPES + 1): - e = annotations.AudioAnnotationsExtractor( - vad_type_value, {'fake': FakeExternalFactory()}) - e.Extract(self._wav_file_path) - e.Save(self._tmp_path, annotation_name="fake-annotation") - data = np.load( - os.path.join( - self._tmp_path, - e.GetOutputFileNameTemplate().format("fake-annotation"))) - self.assertEqual(np.float32, data['extvad_conf-fake'].dtype) - np.testing.assert_almost_equal(np.arange(100, dtype=np.float32), - data['extvad_conf-fake']) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/apm_configs/default.json b/modules/audio_processing/test/py_quality_assessment/quality_assessment/apm_configs/default.json deleted file mode 100644 index 5c3277bac0..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/apm_configs/default.json +++ /dev/null @@ -1 +0,0 @@ -{"-all_default": null} diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/apm_vad.cc b/modules/audio_processing/test/py_quality_assessment/quality_assessment/apm_vad.cc deleted file mode 100644 index 73ce4ed3f7..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/apm_vad.cc +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -// -// Use of this source code is governed by a BSD-style license -// that can be found in the LICENSE file in the root of the source -// tree. An additional intellectual property rights grant can be found -// in the file PATENTS. All contributing project authors may -// be found in the AUTHORS file in the root of the source tree. - -#include -#include -#include - -#include "absl/flags/flag.h" -#include "absl/flags/parse.h" -#include "common_audio/wav_file.h" -#include "modules/audio_processing/vad/voice_activity_detector.h" -#include "rtc_base/logging.h" - -ABSL_FLAG(std::string, i, "", "Input wav file"); -ABSL_FLAG(std::string, o_probs, "", "VAD probabilities output file"); -ABSL_FLAG(std::string, o_rms, "", "VAD output file"); - -namespace webrtc { -namespace test { -namespace { - -constexpr uint8_t kAudioFrameLengthMilliseconds = 10; -constexpr int kMaxSampleRate = 48000; -constexpr size_t kMaxFrameLen = - kAudioFrameLengthMilliseconds * kMaxSampleRate / 1000; - -int main(int argc, char* argv[]) { - absl::ParseCommandLine(argc, argv); - const std::string input_file = absl::GetFlag(FLAGS_i); - const std::string output_probs_file = absl::GetFlag(FLAGS_o_probs); - const std::string output_file = absl::GetFlag(FLAGS_o_rms); - // Open wav input file and check properties. - WavReader wav_reader(input_file); - if (wav_reader.num_channels() != 1) { - RTC_LOG(LS_ERROR) << "Only mono wav files supported"; - return 1; - } - if (wav_reader.sample_rate() > kMaxSampleRate) { - RTC_LOG(LS_ERROR) << "Beyond maximum sample rate (" << kMaxSampleRate - << ")"; - return 1; - } - const size_t audio_frame_len = rtc::CheckedDivExact( - kAudioFrameLengthMilliseconds * wav_reader.sample_rate(), 1000); - if (audio_frame_len > kMaxFrameLen) { - RTC_LOG(LS_ERROR) << "The frame size and/or the sample rate are too large."; - return 1; - } - - // Create output file and write header. - std::ofstream out_probs_file(output_probs_file, std::ofstream::binary); - std::ofstream out_rms_file(output_file, std::ofstream::binary); - - // Run VAD and write decisions. - VoiceActivityDetector vad; - std::array samples; - - while (true) { - // Process frame. - const auto read_samples = - wav_reader.ReadSamples(audio_frame_len, samples.data()); - if (read_samples < audio_frame_len) { - break; - } - vad.ProcessChunk(samples.data(), audio_frame_len, wav_reader.sample_rate()); - // Write output. - auto probs = vad.chunkwise_voice_probabilities(); - auto rms = vad.chunkwise_rms(); - RTC_CHECK_EQ(probs.size(), rms.size()); - RTC_CHECK_EQ(sizeof(double), 8); - - for (const auto& p : probs) { - out_probs_file.write(reinterpret_cast(&p), 8); - } - for (const auto& r : rms) { - out_rms_file.write(reinterpret_cast(&r), 8); - } - } - - out_probs_file.close(); - out_rms_file.close(); - return 0; -} - -} // namespace -} // namespace test -} // namespace webrtc - -int main(int argc, char* argv[]) { - return webrtc::test::main(argc, argv); -} diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/audioproc_wrapper.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/audioproc_wrapper.py deleted file mode 100644 index 04aeaa95b9..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/audioproc_wrapper.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Class implementing a wrapper for APM simulators. -""" - -import cProfile -import logging -import os -import subprocess - -from . import data_access -from . import exceptions - - -class AudioProcWrapper(object): - """Wrapper for APM simulators. - """ - - DEFAULT_APM_SIMULATOR_BIN_PATH = os.path.abspath( - os.path.join(os.pardir, 'audioproc_f')) - OUTPUT_FILENAME = 'output.wav' - - def __init__(self, simulator_bin_path): - """Ctor. - - Args: - simulator_bin_path: path to the APM simulator binary. - """ - self._simulator_bin_path = simulator_bin_path - self._config = None - self._output_signal_filepath = None - - # Profiler instance to measure running time. - self._profiler = cProfile.Profile() - - @property - def output_filepath(self): - return self._output_signal_filepath - - def Run(self, - config_filepath, - capture_input_filepath, - output_path, - render_input_filepath=None): - """Runs APM simulator. - - Args: - config_filepath: path to the configuration file specifying the arguments - for the APM simulator. - capture_input_filepath: path to the capture audio track input file (aka - forward or near-end). - output_path: path of the audio track output file. - render_input_filepath: path to the render audio track input file (aka - reverse or far-end). - """ - # Init. - self._output_signal_filepath = os.path.join(output_path, - self.OUTPUT_FILENAME) - profiling_stats_filepath = os.path.join(output_path, 'profiling.stats') - - # Skip if the output has already been generated. - if os.path.exists(self._output_signal_filepath) and os.path.exists( - profiling_stats_filepath): - return - - # Load configuration. - self._config = data_access.AudioProcConfigFile.Load(config_filepath) - - # Set remaining parameters. - if not os.path.exists(capture_input_filepath): - raise exceptions.FileNotFoundError( - 'cannot find capture input file') - self._config['-i'] = capture_input_filepath - self._config['-o'] = self._output_signal_filepath - if render_input_filepath is not None: - if not os.path.exists(render_input_filepath): - raise exceptions.FileNotFoundError( - 'cannot find render input file') - self._config['-ri'] = render_input_filepath - - # Build arguments list. - args = [self._simulator_bin_path] - for param_name in self._config: - args.append(param_name) - if self._config[param_name] is not None: - args.append(str(self._config[param_name])) - logging.debug(' '.join(args)) - - # Run. - self._profiler.enable() - subprocess.call(args) - self._profiler.disable() - - # Save profiling stats. - self._profiler.dump_stats(profiling_stats_filepath) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/collect_data.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/collect_data.py deleted file mode 100644 index 38aac0cbe2..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/collect_data.py +++ /dev/null @@ -1,243 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Imports a filtered subset of the scores and configurations computed -by apm_quality_assessment.py into a pandas data frame. -""" - -import argparse -import glob -import logging -import os -import re -import sys - -try: - import pandas as pd -except ImportError: - logging.critical('Cannot import the third-party Python package pandas') - sys.exit(1) - -from . import data_access as data_access -from . import simulation as sim - -# Compiled regular expressions used to extract score descriptors. -RE_CONFIG_NAME = re.compile(sim.ApmModuleSimulator.GetPrefixApmConfig() + - r'(.+)') -RE_CAPTURE_NAME = re.compile(sim.ApmModuleSimulator.GetPrefixCapture() + - r'(.+)') -RE_RENDER_NAME = re.compile(sim.ApmModuleSimulator.GetPrefixRender() + r'(.+)') -RE_ECHO_SIM_NAME = re.compile(sim.ApmModuleSimulator.GetPrefixEchoSimulator() + - r'(.+)') -RE_TEST_DATA_GEN_NAME = re.compile( - sim.ApmModuleSimulator.GetPrefixTestDataGenerator() + r'(.+)') -RE_TEST_DATA_GEN_PARAMS = re.compile( - sim.ApmModuleSimulator.GetPrefixTestDataGeneratorParameters() + r'(.+)') -RE_SCORE_NAME = re.compile(sim.ApmModuleSimulator.GetPrefixScore() + - r'(.+)(\..+)') - - -def InstanceArgumentsParser(): - """Arguments parser factory. - """ - parser = argparse.ArgumentParser( - description=('Override this description in a user script by changing' - ' `parser.description` of the returned parser.')) - - parser.add_argument('-o', - '--output_dir', - required=True, - help=('the same base path used with the ' - 'apm_quality_assessment tool')) - - parser.add_argument( - '-c', - '--config_names', - type=re.compile, - help=('regular expression to filter the APM configuration' - ' names')) - - parser.add_argument( - '-i', - '--capture_names', - type=re.compile, - help=('regular expression to filter the capture signal ' - 'names')) - - parser.add_argument('-r', - '--render_names', - type=re.compile, - help=('regular expression to filter the render signal ' - 'names')) - - parser.add_argument( - '-e', - '--echo_simulator_names', - type=re.compile, - help=('regular expression to filter the echo simulator ' - 'names')) - - parser.add_argument('-t', - '--test_data_generators', - type=re.compile, - help=('regular expression to filter the test data ' - 'generator names')) - - parser.add_argument( - '-s', - '--eval_scores', - type=re.compile, - help=('regular expression to filter the evaluation score ' - 'names')) - - return parser - - -def _GetScoreDescriptors(score_filepath): - """Extracts a score descriptor from the given score file path. - - Args: - score_filepath: path to the score file. - - Returns: - A tuple of strings (APM configuration name, capture audio track name, - render audio track name, echo simulator name, test data generator name, - test data generator parameters as string, evaluation score name). - """ - fields = score_filepath.split(os.sep)[-7:] - extract_name = lambda index, reg_expr: (reg_expr.match(fields[index]). - groups(0)[0]) - return ( - extract_name(0, RE_CONFIG_NAME), - extract_name(1, RE_CAPTURE_NAME), - extract_name(2, RE_RENDER_NAME), - extract_name(3, RE_ECHO_SIM_NAME), - extract_name(4, RE_TEST_DATA_GEN_NAME), - extract_name(5, RE_TEST_DATA_GEN_PARAMS), - extract_name(6, RE_SCORE_NAME), - ) - - -def _ExcludeScore(config_name, capture_name, render_name, echo_simulator_name, - test_data_gen_name, score_name, args): - """Decides whether excluding a score. - - A set of optional regular expressions in args is used to determine if the - score should be excluded (depending on its |*_name| descriptors). - - Args: - config_name: APM configuration name. - capture_name: capture audio track name. - render_name: render audio track name. - echo_simulator_name: echo simulator name. - test_data_gen_name: test data generator name. - score_name: evaluation score name. - args: parsed arguments. - - Returns: - A boolean. - """ - value_regexpr_pairs = [ - (config_name, args.config_names), - (capture_name, args.capture_names), - (render_name, args.render_names), - (echo_simulator_name, args.echo_simulator_names), - (test_data_gen_name, args.test_data_generators), - (score_name, args.eval_scores), - ] - - # Score accepted if each value matches the corresponding regular expression. - for value, regexpr in value_regexpr_pairs: - if regexpr is None: - continue - if not regexpr.match(value): - return True - - return False - - -def FindScores(src_path, args): - """Given a search path, find scores and return a DataFrame object. - - Args: - src_path: Search path pattern. - args: parsed arguments. - - Returns: - A DataFrame object. - """ - # Get scores. - scores = [] - for score_filepath in glob.iglob(src_path): - # Extract score descriptor fields from the path. - (config_name, capture_name, render_name, echo_simulator_name, - test_data_gen_name, test_data_gen_params, - score_name) = _GetScoreDescriptors(score_filepath) - - # Ignore the score if required. - if _ExcludeScore(config_name, capture_name, render_name, - echo_simulator_name, test_data_gen_name, score_name, - args): - logging.info('ignored score: %s %s %s %s %s %s', config_name, - capture_name, render_name, echo_simulator_name, - test_data_gen_name, score_name) - continue - - # Read metadata and score. - metadata = data_access.Metadata.LoadAudioTestDataPaths( - os.path.split(score_filepath)[0]) - score = data_access.ScoreFile.Load(score_filepath) - - # Add a score with its descriptor fields. - scores.append(( - metadata['clean_capture_input_filepath'], - metadata['echo_free_capture_filepath'], - metadata['echo_filepath'], - metadata['render_filepath'], - metadata['capture_filepath'], - metadata['apm_output_filepath'], - metadata['apm_reference_filepath'], - config_name, - capture_name, - render_name, - echo_simulator_name, - test_data_gen_name, - test_data_gen_params, - score_name, - score, - )) - - return pd.DataFrame(data=scores, - columns=( - 'clean_capture_input_filepath', - 'echo_free_capture_filepath', - 'echo_filepath', - 'render_filepath', - 'capture_filepath', - 'apm_output_filepath', - 'apm_reference_filepath', - 'apm_config', - 'capture', - 'render', - 'echo_simulator', - 'test_data_gen', - 'test_data_gen_params', - 'eval_score_name', - 'score', - )) - - -def ConstructSrcPath(args): - return os.path.join( - args.output_dir, - sim.ApmModuleSimulator.GetPrefixApmConfig() + '*', - sim.ApmModuleSimulator.GetPrefixCapture() + '*', - sim.ApmModuleSimulator.GetPrefixRender() + '*', - sim.ApmModuleSimulator.GetPrefixEchoSimulator() + '*', - sim.ApmModuleSimulator.GetPrefixTestDataGenerator() + '*', - sim.ApmModuleSimulator.GetPrefixTestDataGeneratorParameters() + '*', - sim.ApmModuleSimulator.GetPrefixScore() + '*') diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/data_access.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/data_access.py deleted file mode 100644 index c1aebb67f1..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/data_access.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Data access utility functions and classes. -""" - -import json -import os - - -def MakeDirectory(path): - """Makes a directory recursively without rising exceptions if existing. - - Args: - path: path to the directory to be created. - """ - if os.path.exists(path): - return - os.makedirs(path) - - -class Metadata(object): - """Data access class to save and load metadata. - """ - - def __init__(self): - pass - - _GENERIC_METADATA_SUFFIX = '.mdata' - _AUDIO_TEST_DATA_FILENAME = 'audio_test_data.json' - - @classmethod - def LoadFileMetadata(cls, filepath): - """Loads generic metadata linked to a file. - - Args: - filepath: path to the metadata file to read. - - Returns: - A dict. - """ - with open(filepath + cls._GENERIC_METADATA_SUFFIX) as f: - return json.load(f) - - @classmethod - def SaveFileMetadata(cls, filepath, metadata): - """Saves generic metadata linked to a file. - - Args: - filepath: path to the metadata file to write. - metadata: a dict. - """ - with open(filepath + cls._GENERIC_METADATA_SUFFIX, 'w') as f: - json.dump(metadata, f) - - @classmethod - def LoadAudioTestDataPaths(cls, metadata_path): - """Loads the input and the reference audio track paths. - - Args: - metadata_path: path to the directory containing the metadata file. - - Returns: - Tuple with the paths to the input and output audio tracks. - """ - metadata_filepath = os.path.join(metadata_path, - cls._AUDIO_TEST_DATA_FILENAME) - with open(metadata_filepath) as f: - return json.load(f) - - @classmethod - def SaveAudioTestDataPaths(cls, output_path, **filepaths): - """Saves the input and the reference audio track paths. - - Args: - output_path: path to the directory containing the metadata file. - - Keyword Args: - filepaths: collection of audio track file paths to save. - """ - output_filepath = os.path.join(output_path, - cls._AUDIO_TEST_DATA_FILENAME) - with open(output_filepath, 'w') as f: - json.dump(filepaths, f) - - -class AudioProcConfigFile(object): - """Data access to load/save APM simulator argument lists. - - The arguments stored in the config files are used to control the APM flags. - """ - - def __init__(self): - pass - - @classmethod - def Load(cls, filepath): - """Loads a configuration file for an APM simulator. - - Args: - filepath: path to the configuration file. - - Returns: - A dict containing the configuration. - """ - with open(filepath) as f: - return json.load(f) - - @classmethod - def Save(cls, filepath, config): - """Saves a configuration file for an APM simulator. - - Args: - filepath: path to the configuration file. - config: a dict containing the configuration. - """ - with open(filepath, 'w') as f: - json.dump(config, f) - - -class ScoreFile(object): - """Data access class to save and load float scalar scores. - """ - - def __init__(self): - pass - - @classmethod - def Load(cls, filepath): - """Loads a score from file. - - Args: - filepath: path to the score file. - - Returns: - A float encoding the score. - """ - with open(filepath) as f: - return float(f.readline().strip()) - - @classmethod - def Save(cls, filepath, score): - """Saves a score into a file. - - Args: - filepath: path to the score file. - score: float encoding the score. - """ - with open(filepath, 'w') as f: - f.write('{0:f}\n'.format(score)) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/echo_path_simulation.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/echo_path_simulation.py deleted file mode 100644 index 65903ea32d..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/echo_path_simulation.py +++ /dev/null @@ -1,136 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Echo path simulation module. -""" - -import hashlib -import os - -from . import signal_processing - - -class EchoPathSimulator(object): - """Abstract class for the echo path simulators. - - In general, an echo path simulator is a function of the render signal and - simulates the propagation of the latter into the microphone (e.g., due to - mechanical or electrical paths). - """ - - NAME = None - REGISTERED_CLASSES = {} - - def __init__(self): - pass - - def Simulate(self, output_path): - """Creates the echo signal and stores it in an audio file (abstract method). - - Args: - output_path: Path in which any output can be saved. - - Returns: - Path to the generated audio track file or None if no echo is present. - """ - raise NotImplementedError() - - @classmethod - def RegisterClass(cls, class_to_register): - """Registers an EchoPathSimulator implementation. - - Decorator to automatically register the classes that extend - EchoPathSimulator. - Example usage: - - @EchoPathSimulator.RegisterClass - class NoEchoPathSimulator(EchoPathSimulator): - pass - """ - cls.REGISTERED_CLASSES[class_to_register.NAME] = class_to_register - return class_to_register - - -@EchoPathSimulator.RegisterClass -class NoEchoPathSimulator(EchoPathSimulator): - """Simulates absence of echo.""" - - NAME = 'noecho' - - def __init__(self): - EchoPathSimulator.__init__(self) - - def Simulate(self, output_path): - return None - - -@EchoPathSimulator.RegisterClass -class LinearEchoPathSimulator(EchoPathSimulator): - """Simulates linear echo path. - - This class applies a given impulse response to the render input and then it - sums the signal to the capture input signal. - """ - - NAME = 'linear' - - def __init__(self, render_input_filepath, impulse_response): - """ - Args: - render_input_filepath: Render audio track file. - impulse_response: list or numpy vector of float values. - """ - EchoPathSimulator.__init__(self) - self._render_input_filepath = render_input_filepath - self._impulse_response = impulse_response - - def Simulate(self, output_path): - """Simulates linear echo path.""" - # Form the file name with a hash of the impulse response. - impulse_response_hash = hashlib.sha256( - str(self._impulse_response).encode('utf-8', 'ignore')).hexdigest() - echo_filepath = os.path.join( - output_path, 'linear_echo_{}.wav'.format(impulse_response_hash)) - - # If the simulated echo audio track file does not exists, create it. - if not os.path.exists(echo_filepath): - render = signal_processing.SignalProcessingUtils.LoadWav( - self._render_input_filepath) - echo = signal_processing.SignalProcessingUtils.ApplyImpulseResponse( - render, self._impulse_response) - signal_processing.SignalProcessingUtils.SaveWav( - echo_filepath, echo) - - return echo_filepath - - -@EchoPathSimulator.RegisterClass -class RecordedEchoPathSimulator(EchoPathSimulator): - """Uses recorded echo. - - This class uses the clean capture input file name to build the file name of - the corresponding recording containing echo (a predefined suffix is used). - Such a file is expected to be already existing. - """ - - NAME = 'recorded' - - _FILE_NAME_SUFFIX = '_echo' - - def __init__(self, render_input_filepath): - EchoPathSimulator.__init__(self) - self._render_input_filepath = render_input_filepath - - def Simulate(self, output_path): - """Uses recorded echo path.""" - path, file_name_ext = os.path.split(self._render_input_filepath) - file_name, file_ext = os.path.splitext(file_name_ext) - echo_filepath = os.path.join( - path, '{}{}{}'.format(file_name, self._FILE_NAME_SUFFIX, file_ext)) - assert os.path.exists(echo_filepath), ( - 'cannot find the echo audio track file {}'.format(echo_filepath)) - return echo_filepath diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/echo_path_simulation_factory.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/echo_path_simulation_factory.py deleted file mode 100644 index 4b46b36b47..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/echo_path_simulation_factory.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Echo path simulation factory module. -""" - -import numpy as np - -from . import echo_path_simulation - - -class EchoPathSimulatorFactory(object): - - # TODO(alessiob): Replace 20 ms delay (at 48 kHz sample rate) with a more - # realistic impulse response. - _LINEAR_ECHO_IMPULSE_RESPONSE = np.array([0.0] * (20 * 48) + [0.15]) - - def __init__(self): - pass - - @classmethod - def GetInstance(cls, echo_path_simulator_class, render_input_filepath): - """Creates an EchoPathSimulator instance given a class object. - - Args: - echo_path_simulator_class: EchoPathSimulator class object (not an - instance). - render_input_filepath: Path to the render audio track file. - - Returns: - An EchoPathSimulator instance. - """ - assert render_input_filepath is not None or ( - echo_path_simulator_class == - echo_path_simulation.NoEchoPathSimulator) - - if echo_path_simulator_class == echo_path_simulation.NoEchoPathSimulator: - return echo_path_simulation.NoEchoPathSimulator() - elif echo_path_simulator_class == ( - echo_path_simulation.LinearEchoPathSimulator): - return echo_path_simulation.LinearEchoPathSimulator( - render_input_filepath, cls._LINEAR_ECHO_IMPULSE_RESPONSE) - else: - return echo_path_simulator_class(render_input_filepath) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/echo_path_simulation_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/echo_path_simulation_unittest.py deleted file mode 100644 index b6cc8abdde..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/echo_path_simulation_unittest.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Unit tests for the echo path simulation module. -""" - -import shutil -import os -import tempfile -import unittest - -import pydub - -from . import echo_path_simulation -from . import echo_path_simulation_factory -from . import signal_processing - - -class TestEchoPathSimulators(unittest.TestCase): - """Unit tests for the eval_scores module. - """ - - def setUp(self): - """Creates temporary data.""" - self._tmp_path = tempfile.mkdtemp() - - # Create and save white noise. - silence = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) - white_noise = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - silence) - self._audio_track_num_samples = ( - signal_processing.SignalProcessingUtils.CountSamples(white_noise)) - self._audio_track_filepath = os.path.join(self._tmp_path, - 'white_noise.wav') - signal_processing.SignalProcessingUtils.SaveWav( - self._audio_track_filepath, white_noise) - - # Make a copy the white noise audio track file; it will be used by - # echo_path_simulation.RecordedEchoPathSimulator. - shutil.copy(self._audio_track_filepath, - os.path.join(self._tmp_path, 'white_noise_echo.wav')) - - def tearDown(self): - """Recursively deletes temporary folders.""" - shutil.rmtree(self._tmp_path) - - def testRegisteredClasses(self): - # Check that there is at least one registered echo path simulator. - registered_classes = ( - echo_path_simulation.EchoPathSimulator.REGISTERED_CLASSES) - self.assertIsInstance(registered_classes, dict) - self.assertGreater(len(registered_classes), 0) - - # Instance factory. - factory = echo_path_simulation_factory.EchoPathSimulatorFactory() - - # Try each registered echo path simulator. - for echo_path_simulator_name in registered_classes: - simulator = factory.GetInstance( - echo_path_simulator_class=registered_classes[ - echo_path_simulator_name], - render_input_filepath=self._audio_track_filepath) - - echo_filepath = simulator.Simulate(self._tmp_path) - if echo_filepath is None: - self.assertEqual(echo_path_simulation.NoEchoPathSimulator.NAME, - echo_path_simulator_name) - # No other tests in this case. - continue - - # Check that the echo audio track file exists and its length is greater or - # equal to that of the render audio track. - self.assertTrue(os.path.exists(echo_filepath)) - echo = signal_processing.SignalProcessingUtils.LoadWav( - echo_filepath) - self.assertGreaterEqual( - signal_processing.SignalProcessingUtils.CountSamples(echo), - self._audio_track_num_samples) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/eval_scores.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/eval_scores.py deleted file mode 100644 index 59c5f74be4..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/eval_scores.py +++ /dev/null @@ -1,427 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Evaluation score abstract class and implementations. -""" - -from __future__ import division -import logging -import os -import re -import subprocess -import sys - -try: - import numpy as np -except ImportError: - logging.critical('Cannot import the third-party Python package numpy') - sys.exit(1) - -from . import data_access -from . import exceptions -from . import signal_processing - - -class EvaluationScore(object): - - NAME = None - REGISTERED_CLASSES = {} - - def __init__(self, score_filename_prefix): - self._score_filename_prefix = score_filename_prefix - self._input_signal_metadata = None - self._reference_signal = None - self._reference_signal_filepath = None - self._tested_signal = None - self._tested_signal_filepath = None - self._output_filepath = None - self._score = None - self._render_signal_filepath = None - - @classmethod - def RegisterClass(cls, class_to_register): - """Registers an EvaluationScore implementation. - - Decorator to automatically register the classes that extend EvaluationScore. - Example usage: - - @EvaluationScore.RegisterClass - class AudioLevelScore(EvaluationScore): - pass - """ - cls.REGISTERED_CLASSES[class_to_register.NAME] = class_to_register - return class_to_register - - @property - def output_filepath(self): - return self._output_filepath - - @property - def score(self): - return self._score - - def SetInputSignalMetadata(self, metadata): - """Sets input signal metadata. - - Args: - metadata: dict instance. - """ - self._input_signal_metadata = metadata - - def SetReferenceSignalFilepath(self, filepath): - """Sets the path to the audio track used as reference signal. - - Args: - filepath: path to the reference audio track. - """ - self._reference_signal_filepath = filepath - - def SetTestedSignalFilepath(self, filepath): - """Sets the path to the audio track used as test signal. - - Args: - filepath: path to the test audio track. - """ - self._tested_signal_filepath = filepath - - def SetRenderSignalFilepath(self, filepath): - """Sets the path to the audio track used as render signal. - - Args: - filepath: path to the test audio track. - """ - self._render_signal_filepath = filepath - - def Run(self, output_path): - """Extracts the score for the set test data pair. - - Args: - output_path: path to the directory where the output is written. - """ - self._output_filepath = os.path.join( - output_path, self._score_filename_prefix + self.NAME + '.txt') - try: - # If the score has already been computed, load. - self._LoadScore() - logging.debug('score found and loaded') - except IOError: - # Compute the score. - logging.debug('score not found, compute') - self._Run(output_path) - - def _Run(self, output_path): - # Abstract method. - raise NotImplementedError() - - def _LoadReferenceSignal(self): - assert self._reference_signal_filepath is not None - self._reference_signal = signal_processing.SignalProcessingUtils.LoadWav( - self._reference_signal_filepath) - - def _LoadTestedSignal(self): - assert self._tested_signal_filepath is not None - self._tested_signal = signal_processing.SignalProcessingUtils.LoadWav( - self._tested_signal_filepath) - - def _LoadScore(self): - return data_access.ScoreFile.Load(self._output_filepath) - - def _SaveScore(self): - return data_access.ScoreFile.Save(self._output_filepath, self._score) - - -@EvaluationScore.RegisterClass -class AudioLevelPeakScore(EvaluationScore): - """Peak audio level score. - - Defined as the difference between the peak audio level of the tested and - the reference signals. - - Unit: dB - Ideal: 0 dB - Worst case: +/-inf dB - """ - - NAME = 'audio_level_peak' - - def __init__(self, score_filename_prefix): - EvaluationScore.__init__(self, score_filename_prefix) - - def _Run(self, output_path): - self._LoadReferenceSignal() - self._LoadTestedSignal() - self._score = self._tested_signal.dBFS - self._reference_signal.dBFS - self._SaveScore() - - -@EvaluationScore.RegisterClass -class MeanAudioLevelScore(EvaluationScore): - """Mean audio level score. - - Defined as the difference between the mean audio level of the tested and - the reference signals. - - Unit: dB - Ideal: 0 dB - Worst case: +/-inf dB - """ - - NAME = 'audio_level_mean' - - def __init__(self, score_filename_prefix): - EvaluationScore.__init__(self, score_filename_prefix) - - def _Run(self, output_path): - self._LoadReferenceSignal() - self._LoadTestedSignal() - - dbfs_diffs_sum = 0.0 - seconds = min(len(self._tested_signal), len( - self._reference_signal)) // 1000 - for t in range(seconds): - t0 = t * seconds - t1 = t0 + seconds - dbfs_diffs_sum += (self._tested_signal[t0:t1].dBFS - - self._reference_signal[t0:t1].dBFS) - self._score = dbfs_diffs_sum / float(seconds) - self._SaveScore() - - -@EvaluationScore.RegisterClass -class EchoMetric(EvaluationScore): - """Echo score. - - Proportion of detected echo. - - Unit: ratio - Ideal: 0 - Worst case: 1 - """ - - NAME = 'echo_metric' - - def __init__(self, score_filename_prefix, echo_detector_bin_filepath): - EvaluationScore.__init__(self, score_filename_prefix) - - # POLQA binary file path. - self._echo_detector_bin_filepath = echo_detector_bin_filepath - if not os.path.exists(self._echo_detector_bin_filepath): - logging.error('cannot find EchoMetric tool binary file') - raise exceptions.FileNotFoundError() - - self._echo_detector_bin_path, _ = os.path.split( - self._echo_detector_bin_filepath) - - def _Run(self, output_path): - echo_detector_out_filepath = os.path.join(output_path, - 'echo_detector.out') - if os.path.exists(echo_detector_out_filepath): - os.unlink(echo_detector_out_filepath) - - logging.debug("Render signal filepath: %s", - self._render_signal_filepath) - if not os.path.exists(self._render_signal_filepath): - logging.error( - "Render input required for evaluating the echo metric.") - - args = [ - self._echo_detector_bin_filepath, '--output_file', - echo_detector_out_filepath, '--', '-i', - self._tested_signal_filepath, '-ri', self._render_signal_filepath - ] - logging.debug(' '.join(args)) - subprocess.call(args, cwd=self._echo_detector_bin_path) - - # Parse Echo detector tool output and extract the score. - self._score = self._ParseOutputFile(echo_detector_out_filepath) - self._SaveScore() - - @classmethod - def _ParseOutputFile(cls, echo_metric_file_path): - """ - Parses the POLQA tool output formatted as a table ('-t' option). - - Args: - polqa_out_filepath: path to the POLQA tool output file. - - Returns: - The score as a number in [0, 1]. - """ - with open(echo_metric_file_path) as f: - return float(f.read()) - - -@EvaluationScore.RegisterClass -class PolqaScore(EvaluationScore): - """POLQA score. - - See http://www.polqa.info/. - - Unit: MOS - Ideal: 4.5 - Worst case: 1.0 - """ - - NAME = 'polqa' - - def __init__(self, score_filename_prefix, polqa_bin_filepath): - EvaluationScore.__init__(self, score_filename_prefix) - - # POLQA binary file path. - self._polqa_bin_filepath = polqa_bin_filepath - if not os.path.exists(self._polqa_bin_filepath): - logging.error('cannot find POLQA tool binary file') - raise exceptions.FileNotFoundError() - - # Path to the POLQA directory with binary and license files. - self._polqa_tool_path, _ = os.path.split(self._polqa_bin_filepath) - - def _Run(self, output_path): - polqa_out_filepath = os.path.join(output_path, 'polqa.out') - if os.path.exists(polqa_out_filepath): - os.unlink(polqa_out_filepath) - - args = [ - self._polqa_bin_filepath, - '-t', - '-q', - '-Overwrite', - '-Ref', - self._reference_signal_filepath, - '-Test', - self._tested_signal_filepath, - '-LC', - 'NB', - '-Out', - polqa_out_filepath, - ] - logging.debug(' '.join(args)) - subprocess.call(args, cwd=self._polqa_tool_path) - - # Parse POLQA tool output and extract the score. - polqa_output = self._ParseOutputFile(polqa_out_filepath) - self._score = float(polqa_output['PolqaScore']) - - self._SaveScore() - - @classmethod - def _ParseOutputFile(cls, polqa_out_filepath): - """ - Parses the POLQA tool output formatted as a table ('-t' option). - - Args: - polqa_out_filepath: path to the POLQA tool output file. - - Returns: - A dict. - """ - data = [] - with open(polqa_out_filepath) as f: - for line in f: - line = line.strip() - if len(line) == 0 or line.startswith('*'): - # Ignore comments. - continue - # Read fields. - data.append(re.split(r'\t+', line)) - - # Two rows expected (header and values). - assert len(data) == 2, 'Cannot parse POLQA output' - number_of_fields = len(data[0]) - assert number_of_fields == len(data[1]) - - # Build and return a dictionary with field names (header) as keys and the - # corresponding field values as values. - return { - data[0][index]: data[1][index] - for index in range(number_of_fields) - } - - -@EvaluationScore.RegisterClass -class TotalHarmonicDistorsionScore(EvaluationScore): - """Total harmonic distorsion plus noise score. - - Total harmonic distorsion plus noise score. - See "https://en.wikipedia.org/wiki/Total_harmonic_distortion#THD.2BN". - - Unit: -. - Ideal: 0. - Worst case: +inf - """ - - NAME = 'thd' - - def __init__(self, score_filename_prefix): - EvaluationScore.__init__(self, score_filename_prefix) - self._input_frequency = None - - def _Run(self, output_path): - self._CheckInputSignal() - - self._LoadTestedSignal() - if self._tested_signal.channels != 1: - raise exceptions.EvaluationScoreException( - 'unsupported number of channels') - samples = signal_processing.SignalProcessingUtils.AudioSegmentToRawData( - self._tested_signal) - - # Init. - num_samples = len(samples) - duration = len(self._tested_signal) / 1000.0 - scaling = 2.0 / num_samples - max_freq = self._tested_signal.frame_rate / 2 - f0_freq = float(self._input_frequency) - t = np.linspace(0, duration, num_samples) - - # Analyze harmonics. - b_terms = [] - n = 1 - while f0_freq * n < max_freq: - x_n = np.sum( - samples * np.sin(2.0 * np.pi * n * f0_freq * t)) * scaling - y_n = np.sum( - samples * np.cos(2.0 * np.pi * n * f0_freq * t)) * scaling - b_terms.append(np.sqrt(x_n**2 + y_n**2)) - n += 1 - - output_without_fundamental = samples - b_terms[0] * np.sin( - 2.0 * np.pi * f0_freq * t) - distortion_and_noise = np.sqrt( - np.sum(output_without_fundamental**2) * np.pi * scaling) - - # TODO(alessiob): Fix or remove if not needed. - # thd = np.sqrt(np.sum(b_terms[1:]**2)) / b_terms[0] - - # TODO(alessiob): Check the range of `thd_plus_noise` and update the class - # docstring above if accordingly. - thd_plus_noise = distortion_and_noise / b_terms[0] - - self._score = thd_plus_noise - self._SaveScore() - - def _CheckInputSignal(self): - # Check input signal and get properties. - try: - if self._input_signal_metadata['signal'] != 'pure_tone': - raise exceptions.EvaluationScoreException( - 'The THD score requires a pure tone as input signal') - self._input_frequency = self._input_signal_metadata['frequency'] - if self._input_signal_metadata[ - 'test_data_gen_name'] != 'identity' or ( - self._input_signal_metadata['test_data_gen_config'] != - 'default'): - raise exceptions.EvaluationScoreException( - 'The THD score cannot be used with any test data generator other ' - 'than "identity"') - except TypeError: - raise exceptions.EvaluationScoreException( - 'The THD score requires an input signal with associated metadata' - ) - except KeyError: - raise exceptions.EvaluationScoreException( - 'Invalid input signal metadata to compute the THD score') diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/eval_scores_factory.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/eval_scores_factory.py deleted file mode 100644 index 5749a8924b..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/eval_scores_factory.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""EvaluationScore factory class. -""" - -import logging - -from . import exceptions -from . import eval_scores - - -class EvaluationScoreWorkerFactory(object): - """Factory class used to instantiate evaluation score workers. - - The ctor gets the parametrs that are used to instatiate the evaluation score - workers. - """ - - def __init__(self, polqa_tool_bin_path, echo_metric_tool_bin_path): - self._score_filename_prefix = None - self._polqa_tool_bin_path = polqa_tool_bin_path - self._echo_metric_tool_bin_path = echo_metric_tool_bin_path - - def SetScoreFilenamePrefix(self, prefix): - self._score_filename_prefix = prefix - - def GetInstance(self, evaluation_score_class): - """Creates an EvaluationScore instance given a class object. - - Args: - evaluation_score_class: EvaluationScore class object (not an instance). - - Returns: - An EvaluationScore instance. - """ - if self._score_filename_prefix is None: - raise exceptions.InitializationException( - 'The score file name prefix for evaluation score workers is not set' - ) - logging.debug('factory producing a %s evaluation score', - evaluation_score_class) - - if evaluation_score_class == eval_scores.PolqaScore: - return eval_scores.PolqaScore(self._score_filename_prefix, - self._polqa_tool_bin_path) - elif evaluation_score_class == eval_scores.EchoMetric: - return eval_scores.EchoMetric(self._score_filename_prefix, - self._echo_metric_tool_bin_path) - else: - return evaluation_score_class(self._score_filename_prefix) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/eval_scores_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/eval_scores_unittest.py deleted file mode 100644 index 12e043320e..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/eval_scores_unittest.py +++ /dev/null @@ -1,137 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Unit tests for the eval_scores module. -""" - -import os -import shutil -import tempfile -import unittest - -import pydub - -from . import data_access -from . import eval_scores -from . import eval_scores_factory -from . import signal_processing - - -class TestEvalScores(unittest.TestCase): - """Unit tests for the eval_scores module. - """ - - def setUp(self): - """Create temporary output folder and two audio track files.""" - self._output_path = tempfile.mkdtemp() - - # Create fake reference and tested (i.e., APM output) audio track files. - silence = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) - fake_reference_signal = (signal_processing.SignalProcessingUtils. - GenerateWhiteNoise(silence)) - fake_tested_signal = (signal_processing.SignalProcessingUtils. - GenerateWhiteNoise(silence)) - - # Save fake audio tracks. - self._fake_reference_signal_filepath = os.path.join( - self._output_path, 'fake_ref.wav') - signal_processing.SignalProcessingUtils.SaveWav( - self._fake_reference_signal_filepath, fake_reference_signal) - self._fake_tested_signal_filepath = os.path.join( - self._output_path, 'fake_test.wav') - signal_processing.SignalProcessingUtils.SaveWav( - self._fake_tested_signal_filepath, fake_tested_signal) - - def tearDown(self): - """Recursively delete temporary folder.""" - shutil.rmtree(self._output_path) - - def testRegisteredClasses(self): - # Evaluation score names to exclude (tested separately). - exceptions = ['thd', 'echo_metric'] - - # Preliminary check. - self.assertTrue(os.path.exists(self._output_path)) - - # Check that there is at least one registered evaluation score worker. - registered_classes = eval_scores.EvaluationScore.REGISTERED_CLASSES - self.assertIsInstance(registered_classes, dict) - self.assertGreater(len(registered_classes), 0) - - # Instance evaluation score workers factory with fake dependencies. - eval_score_workers_factory = ( - eval_scores_factory.EvaluationScoreWorkerFactory( - polqa_tool_bin_path=os.path.join( - os.path.dirname(os.path.abspath(__file__)), 'fake_polqa'), - echo_metric_tool_bin_path=None)) - eval_score_workers_factory.SetScoreFilenamePrefix('scores-') - - # Try each registered evaluation score worker. - for eval_score_name in registered_classes: - if eval_score_name in exceptions: - continue - - # Instance evaluation score worker. - eval_score_worker = eval_score_workers_factory.GetInstance( - registered_classes[eval_score_name]) - - # Set fake input metadata and reference and test file paths, then run. - eval_score_worker.SetReferenceSignalFilepath( - self._fake_reference_signal_filepath) - eval_score_worker.SetTestedSignalFilepath( - self._fake_tested_signal_filepath) - eval_score_worker.Run(self._output_path) - - # Check output. - score = data_access.ScoreFile.Load( - eval_score_worker.output_filepath) - self.assertTrue(isinstance(score, float)) - - def testTotalHarmonicDistorsionScore(self): - # Init. - pure_tone_freq = 5000.0 - eval_score_worker = eval_scores.TotalHarmonicDistorsionScore('scores-') - eval_score_worker.SetInputSignalMetadata({ - 'signal': - 'pure_tone', - 'frequency': - pure_tone_freq, - 'test_data_gen_name': - 'identity', - 'test_data_gen_config': - 'default', - }) - template = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) - - # Create 3 test signals: pure tone, pure tone + white noise, white noise - # only. - pure_tone = signal_processing.SignalProcessingUtils.GeneratePureTone( - template, pure_tone_freq) - white_noise = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - template) - noisy_tone = signal_processing.SignalProcessingUtils.MixSignals( - pure_tone, white_noise) - - # Compute scores for increasingly distorted pure tone signals. - scores = [None, None, None] - for index, tested_signal in enumerate( - [pure_tone, noisy_tone, white_noise]): - # Save signal. - tmp_filepath = os.path.join(self._output_path, 'tmp_thd.wav') - signal_processing.SignalProcessingUtils.SaveWav( - tmp_filepath, tested_signal) - - # Compute score. - eval_score_worker.SetTestedSignalFilepath(tmp_filepath) - eval_score_worker.Run(self._output_path) - scores[index] = eval_score_worker.score - - # Remove output file to avoid caching. - os.remove(eval_score_worker.output_filepath) - - # Validate scores (lowest score with a pure tone). - self.assertTrue(all([scores[i + 1] > scores[i] for i in range(2)])) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/evaluation.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/evaluation.py deleted file mode 100644 index 2599085329..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/evaluation.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Evaluator of the APM module. -""" - -import logging - - -class ApmModuleEvaluator(object): - """APM evaluator class. - """ - - def __init__(self): - pass - - @classmethod - def Run(cls, evaluation_score_workers, apm_input_metadata, - apm_output_filepath, reference_input_filepath, - render_input_filepath, output_path): - """Runs the evaluation. - - Iterates over the given evaluation score workers. - - Args: - evaluation_score_workers: list of EvaluationScore instances. - apm_input_metadata: dictionary with metadata of the APM input. - apm_output_filepath: path to the audio track file with the APM output. - reference_input_filepath: path to the reference audio track file. - output_path: output path. - - Returns: - A dict of evaluation score name and score pairs. - """ - # Init. - scores = {} - - for evaluation_score_worker in evaluation_score_workers: - logging.info(' computing <%s> score', - evaluation_score_worker.NAME) - evaluation_score_worker.SetInputSignalMetadata(apm_input_metadata) - evaluation_score_worker.SetReferenceSignalFilepath( - reference_input_filepath) - evaluation_score_worker.SetTestedSignalFilepath( - apm_output_filepath) - evaluation_score_worker.SetRenderSignalFilepath( - render_input_filepath) - - evaluation_score_worker.Run(output_path) - scores[ - evaluation_score_worker.NAME] = evaluation_score_worker.score - - return scores diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/exceptions.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/exceptions.py deleted file mode 100644 index 893901d359..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/exceptions.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Exception classes. -""" - - -class FileNotFoundError(Exception): - """File not found exception. - """ - pass - - -class SignalProcessingException(Exception): - """Signal processing exception. - """ - pass - - -class InputMixerException(Exception): - """Input mixer exception. - """ - pass - - -class InputSignalCreatorException(Exception): - """Input signal creator exception. - """ - pass - - -class EvaluationScoreException(Exception): - """Evaluation score exception. - """ - pass - - -class InitializationException(Exception): - """Initialization exception. - """ - pass diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/export.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/export.py deleted file mode 100644 index fe3a6c7cb9..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/export.py +++ /dev/null @@ -1,426 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -import functools -import hashlib -import logging -import os -import re -import sys - -try: - import csscompressor -except ImportError: - logging.critical( - 'Cannot import the third-party Python package csscompressor') - sys.exit(1) - -try: - import jsmin -except ImportError: - logging.critical('Cannot import the third-party Python package jsmin') - sys.exit(1) - - -class HtmlExport(object): - """HTML exporter class for APM quality scores.""" - - _NEW_LINE = '\n' - - # CSS and JS file paths. - _PATH = os.path.dirname(os.path.realpath(__file__)) - _CSS_FILEPATH = os.path.join(_PATH, 'results.css') - _CSS_MINIFIED = True - _JS_FILEPATH = os.path.join(_PATH, 'results.js') - _JS_MINIFIED = True - - def __init__(self, output_filepath): - self._scores_data_frame = None - self._output_filepath = output_filepath - - def Export(self, scores_data_frame): - """Exports scores into an HTML file. - - Args: - scores_data_frame: DataFrame instance. - """ - self._scores_data_frame = scores_data_frame - html = [ - '', - self._BuildHeader(), - (''), '', - self._BuildBody(), '', '' - ] - self._Save(self._output_filepath, self._NEW_LINE.join(html)) - - def _BuildHeader(self): - """Builds the section of the HTML file. - - The header contains the page title and either embedded or linked CSS and JS - files. - - Returns: - A string with ... HTML. - """ - html = ['', 'Results'] - - # Add Material Design hosted libs. - html.append('') - html.append( - '') - html.append( - '') - html.append('') - - # Embed custom JavaScript and CSS files. - html.append('') - html.append('') - - html.append('') - - return self._NEW_LINE.join(html) - - def _BuildBody(self): - """Builds the content of the section.""" - score_names = self._scores_data_frame[ - 'eval_score_name'].drop_duplicates().values.tolist() - - html = [ - ('
'), - '
', - '
', - 'APM QA results ({})'.format( - self._output_filepath), - '
', - ] - - # Tab selectors. - html.append('
') - for tab_index, score_name in enumerate(score_names): - is_active = tab_index == 0 - html.append('' - '{}'.format(tab_index, - ' is-active' if is_active else '', - self._FormatName(score_name))) - html.append('
') - - html.append('
') - html.append( - '
') - - # Tabs content. - for tab_index, score_name in enumerate(score_names): - html.append('
'.format( - ' is-active' if is_active else '', tab_index)) - html.append('
') - html.append( - self._BuildScoreTab(score_name, ('s{}'.format(tab_index), ))) - html.append('
') - html.append('
') - - html.append('
') - html.append('
') - - # Add snackbar for notifications. - html.append( - '
' - '
' - '' - '
') - - return self._NEW_LINE.join(html) - - def _BuildScoreTab(self, score_name, anchor_data): - """Builds the content of a tab.""" - # Find unique values. - scores = self._scores_data_frame[ - self._scores_data_frame.eval_score_name == score_name] - apm_configs = sorted(self._FindUniqueTuples(scores, ['apm_config'])) - test_data_gen_configs = sorted( - self._FindUniqueTuples(scores, - ['test_data_gen', 'test_data_gen_params'])) - - html = [ - '
', - '
', - '
', - (''), - ] - - # Header. - html.append('') - for test_data_gen_info in test_data_gen_configs: - html.append(''.format( - self._FormatName(test_data_gen_info[0]), - test_data_gen_info[1])) - html.append('') - - # Body. - html.append('') - for apm_config in apm_configs: - html.append('') - for test_data_gen_info in test_data_gen_configs: - dialog_id = self._ScoreStatsInspectorDialogId( - score_name, apm_config[0], test_data_gen_info[0], - test_data_gen_info[1]) - html.append( - ''. - format( - dialog_id, - self._BuildScoreTableCell(score_name, - test_data_gen_info[0], - test_data_gen_info[1], - apm_config[0]))) - html.append('') - html.append('') - - html.append( - '
APM config / Test data generator{} {}
' + self._FormatName(apm_config[0]) + '{}
') - - html.append( - self._BuildScoreStatsInspectorDialogs(score_name, apm_configs, - test_data_gen_configs, - anchor_data)) - - return self._NEW_LINE.join(html) - - def _BuildScoreTableCell(self, score_name, test_data_gen, - test_data_gen_params, apm_config): - """Builds the content of a table cell for a score table.""" - scores = self._SliceDataForScoreTableCell(score_name, apm_config, - test_data_gen, - test_data_gen_params) - stats = self._ComputeScoreStats(scores) - - html = [] - items_id_prefix = (score_name + test_data_gen + test_data_gen_params + - apm_config) - if stats['count'] == 1: - # Show the only available score. - item_id = hashlib.md5(items_id_prefix.encode('utf-8')).hexdigest() - html.append('
{1:f}
'.format( - item_id, scores['score'].mean())) - html.append( - '
{}' - '
'.format(item_id, 'single value')) - else: - # Show stats. - for stat_name in ['min', 'max', 'mean', 'std dev']: - item_id = hashlib.md5( - (items_id_prefix + stat_name).encode('utf-8')).hexdigest() - html.append('
{1:f}
'.format( - item_id, stats[stat_name])) - html.append( - '
{}' - '
'.format(item_id, stat_name)) - - return self._NEW_LINE.join(html) - - def _BuildScoreStatsInspectorDialogs(self, score_name, apm_configs, - test_data_gen_configs, anchor_data): - """Builds a set of score stats inspector dialogs.""" - html = [] - for apm_config in apm_configs: - for test_data_gen_info in test_data_gen_configs: - dialog_id = self._ScoreStatsInspectorDialogId( - score_name, apm_config[0], test_data_gen_info[0], - test_data_gen_info[1]) - - html.append(''.format(dialog_id)) - - # Content. - html.append('
') - html.append( - '
APM config preset: {}
' - 'Test data generator: {} ({})
'. - format(self._FormatName(apm_config[0]), - self._FormatName(test_data_gen_info[0]), - test_data_gen_info[1])) - html.append( - self._BuildScoreStatsInspectorDialog( - score_name, apm_config[0], test_data_gen_info[0], - test_data_gen_info[1], anchor_data + (dialog_id, ))) - html.append('
') - - # Actions. - html.append('
') - html.append('') - html.append('
') - - html.append('
') - - return self._NEW_LINE.join(html) - - def _BuildScoreStatsInspectorDialog(self, score_name, apm_config, - test_data_gen, test_data_gen_params, - anchor_data): - """Builds one score stats inspector dialog.""" - scores = self._SliceDataForScoreTableCell(score_name, apm_config, - test_data_gen, - test_data_gen_params) - - capture_render_pairs = sorted( - self._FindUniqueTuples(scores, ['capture', 'render'])) - echo_simulators = sorted( - self._FindUniqueTuples(scores, ['echo_simulator'])) - - html = [ - '' - ] - - # Header. - html.append('') - for echo_simulator in echo_simulators: - html.append('') - html.append('') - - # Body. - html.append('') - for row, (capture, render) in enumerate(capture_render_pairs): - html.append(''.format( - capture, render)) - for col, echo_simulator in enumerate(echo_simulators): - score_tuple = self._SliceDataForScoreStatsTableCell( - scores, capture, render, echo_simulator[0]) - cell_class = 'r{}c{}'.format(row, col) - html.append(''.format( - cell_class, - self._BuildScoreStatsInspectorTableCell( - score_tuple, anchor_data + (cell_class, )))) - html.append('') - html.append('') - - html.append('
Capture-Render / Echo simulator' + self._FormatName(echo_simulator[0]) + '
{}
{}
{}
') - - # Placeholder for the audio inspector. - html.append('
') - - return self._NEW_LINE.join(html) - - def _BuildScoreStatsInspectorTableCell(self, score_tuple, anchor_data): - """Builds the content of a cell of a score stats inspector.""" - anchor = '&'.join(anchor_data) - html = [('
{}
' - '').format(score_tuple.score, anchor)] - - # Add all the available file paths as hidden data. - for field_name in score_tuple.keys(): - if field_name.endswith('_filepath'): - html.append( - ''.format( - field_name, score_tuple[field_name])) - - return self._NEW_LINE.join(html) - - def _SliceDataForScoreTableCell(self, score_name, apm_config, - test_data_gen, test_data_gen_params): - """Slices `self._scores_data_frame` to extract the data for a tab.""" - masks = [] - masks.append(self._scores_data_frame.eval_score_name == score_name) - masks.append(self._scores_data_frame.apm_config == apm_config) - masks.append(self._scores_data_frame.test_data_gen == test_data_gen) - masks.append(self._scores_data_frame.test_data_gen_params == - test_data_gen_params) - mask = functools.reduce((lambda i1, i2: i1 & i2), masks) - del masks - return self._scores_data_frame[mask] - - @classmethod - def _SliceDataForScoreStatsTableCell(cls, scores, capture, render, - echo_simulator): - """Slices `scores` to extract the data for a tab.""" - masks = [] - - masks.append(scores.capture == capture) - masks.append(scores.render == render) - masks.append(scores.echo_simulator == echo_simulator) - mask = functools.reduce((lambda i1, i2: i1 & i2), masks) - del masks - - sliced_data = scores[mask] - assert len(sliced_data) == 1, 'single score is expected' - return sliced_data.iloc[0] - - @classmethod - def _FindUniqueTuples(cls, data_frame, fields): - """Slices `data_frame` to a list of fields and finds unique tuples.""" - return data_frame[fields].drop_duplicates().values.tolist() - - @classmethod - def _ComputeScoreStats(cls, data_frame): - """Computes score stats.""" - scores = data_frame['score'] - return { - 'count': scores.count(), - 'min': scores.min(), - 'max': scores.max(), - 'mean': scores.mean(), - 'std dev': scores.std(), - } - - @classmethod - def _ScoreStatsInspectorDialogId(cls, score_name, apm_config, - test_data_gen, test_data_gen_params): - """Assigns a unique name to a dialog.""" - return 'score-stats-dialog-' + hashlib.md5( - 'score-stats-inspector-{}-{}-{}-{}'.format( - score_name, apm_config, test_data_gen, - test_data_gen_params).encode('utf-8')).hexdigest() - - @classmethod - def _Save(cls, output_filepath, html): - """Writes the HTML file. - - Args: - output_filepath: output file path. - html: string with the HTML content. - """ - with open(output_filepath, 'w') as f: - f.write(html) - - @classmethod - def _FormatName(cls, name): - """Formats a name. - - Args: - name: a string. - - Returns: - A copy of name in which underscores and dashes are replaced with a space. - """ - return re.sub(r'[_\-]', ' ', name) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/export_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/export_unittest.py deleted file mode 100644 index 412aa7c4e7..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/export_unittest.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Unit tests for the export module. -""" - -import logging -import os -import shutil -import tempfile -import unittest - -import pyquery as pq - -from . import audioproc_wrapper -from . import collect_data -from . import eval_scores_factory -from . import evaluation -from . import export -from . import simulation -from . import test_data_generation_factory - - -class TestExport(unittest.TestCase): - """Unit tests for the export module. - """ - - _CLEAN_TMP_OUTPUT = True - - def setUp(self): - """Creates temporary data to export.""" - self._tmp_path = tempfile.mkdtemp() - - # Run a fake experiment to produce data to export. - simulator = simulation.ApmModuleSimulator( - test_data_generator_factory=( - test_data_generation_factory.TestDataGeneratorFactory( - aechen_ir_database_path='', - noise_tracks_path='', - copy_with_identity=False)), - evaluation_score_factory=( - eval_scores_factory.EvaluationScoreWorkerFactory( - polqa_tool_bin_path=os.path.join( - os.path.dirname(os.path.abspath(__file__)), - 'fake_polqa'), - echo_metric_tool_bin_path=None)), - ap_wrapper=audioproc_wrapper.AudioProcWrapper( - audioproc_wrapper.AudioProcWrapper. - DEFAULT_APM_SIMULATOR_BIN_PATH), - evaluator=evaluation.ApmModuleEvaluator()) - simulator.Run( - config_filepaths=['apm_configs/default.json'], - capture_input_filepaths=[ - os.path.join(self._tmp_path, 'pure_tone-440_1000.wav'), - os.path.join(self._tmp_path, 'pure_tone-880_1000.wav'), - ], - test_data_generator_names=['identity', 'white_noise'], - eval_score_names=['audio_level_peak', 'audio_level_mean'], - output_dir=self._tmp_path) - - # Export results. - p = collect_data.InstanceArgumentsParser() - args = p.parse_args(['--output_dir', self._tmp_path]) - src_path = collect_data.ConstructSrcPath(args) - self._data_to_export = collect_data.FindScores(src_path, args) - - def tearDown(self): - """Recursively deletes temporary folders.""" - if self._CLEAN_TMP_OUTPUT: - shutil.rmtree(self._tmp_path) - else: - logging.warning(self.id() + ' did not clean the temporary path ' + - (self._tmp_path)) - - def testCreateHtmlReport(self): - fn_out = os.path.join(self._tmp_path, 'results.html') - exporter = export.HtmlExport(fn_out) - exporter.Export(self._data_to_export) - - document = pq.PyQuery(filename=fn_out) - self.assertIsInstance(document, pq.PyQuery) - # TODO(alessiob): Use PyQuery API to check the HTML file. diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/external_vad.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/external_vad.py deleted file mode 100644 index a7db7b4840..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/external_vad.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -from __future__ import division - -import logging -import os -import subprocess -import shutil -import sys -import tempfile - -try: - import numpy as np -except ImportError: - logging.critical('Cannot import the third-party Python package numpy') - sys.exit(1) - -from . import signal_processing - - -class ExternalVad(object): - def __init__(self, path_to_binary, name): - """Args: - path_to_binary: path to binary that accepts '-i ', '-o - '. There must be one float value per - 10ms audio - name: a name to identify the external VAD. Used for saving - the output as extvad_output-. - """ - self._path_to_binary = path_to_binary - self.name = name - assert os.path.exists(self._path_to_binary), (self._path_to_binary) - self._vad_output = None - - def Run(self, wav_file_path): - _signal = signal_processing.SignalProcessingUtils.LoadWav( - wav_file_path) - if _signal.channels != 1: - raise NotImplementedError('Multiple-channel' - ' annotations not implemented') - if _signal.frame_rate != 48000: - raise NotImplementedError('Frame rates ' - 'other than 48000 not implemented') - - tmp_path = tempfile.mkdtemp() - try: - output_file_path = os.path.join(tmp_path, self.name + '_vad.tmp') - subprocess.call([ - self._path_to_binary, '-i', wav_file_path, '-o', - output_file_path - ]) - self._vad_output = np.fromfile(output_file_path, np.float32) - except Exception as e: - logging.error('Error while running the ' + self.name + ' VAD (' + - e.message + ')') - finally: - if os.path.exists(tmp_path): - shutil.rmtree(tmp_path) - - def GetVadOutput(self): - assert self._vad_output is not None - return self._vad_output - - @classmethod - def ConstructVadDict(cls, vad_paths, vad_names): - external_vads = {} - for path, name in zip(vad_paths, vad_names): - external_vads[name] = ExternalVad(path, name) - return external_vads diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_external_vad.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_external_vad.py deleted file mode 100755 index f679f8c94a..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_external_vad.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/python -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -import argparse -import numpy as np - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('-i', required=True) - parser.add_argument('-o', required=True) - - args = parser.parse_args() - - array = np.arange(100, dtype=np.float32) - array.tofile(open(args.o, 'w')) - - -if __name__ == '__main__': - main() diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_polqa.cc b/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_polqa.cc deleted file mode 100644 index 6f3b2d1dd7..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_polqa.cc +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "rtc_base/checks.h" - -namespace webrtc { -namespace test { -namespace { - -const char* const kErrorMessage = "-Out /path/to/output/file is mandatory"; - -// Writes fake output intended to be parsed by -// quality_assessment.eval_scores.PolqaScore. -void WriteOutputFile(absl::string_view output_file_path) { - RTC_CHECK_NE(output_file_path, ""); - std::ofstream out(std::string{output_file_path}); - RTC_CHECK(!out.bad()); - out << "* Fake Polqa output" << std::endl; - out << "FakeField1\tPolqaScore\tFakeField2" << std::endl; - out << "FakeValue1\t3.25\tFakeValue2" << std::endl; - out.close(); -} - -} // namespace - -int main(int argc, char* argv[]) { - // Find "-Out" and use its next argument as output file path. - RTC_CHECK_GE(argc, 3) << kErrorMessage; - const std::string kSoughtFlagName = "-Out"; - for (int i = 1; i < argc - 1; ++i) { - if (kSoughtFlagName.compare(argv[i]) == 0) { - WriteOutputFile(argv[i + 1]); - return 0; - } - } - RTC_FATAL() << kErrorMessage; -} - -} // namespace test -} // namespace webrtc - -int main(int argc, char* argv[]) { - return webrtc::test::main(argc, argv); -} diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer.py deleted file mode 100644 index af022bd461..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Input mixer module. -""" - -import logging -import os - -from . import exceptions -from . import signal_processing - - -class ApmInputMixer(object): - """Class to mix a set of audio segments down to the APM input.""" - - _HARD_CLIPPING_LOG_MSG = 'hard clipping detected in the mixed signal' - - def __init__(self): - pass - - @classmethod - def HardClippingLogMessage(cls): - """Returns the log message used when hard clipping is detected in the mix. - - This method is mainly intended to be used by the unit tests. - """ - return cls._HARD_CLIPPING_LOG_MSG - - @classmethod - def Mix(cls, output_path, capture_input_filepath, echo_filepath): - """Mixes capture and echo. - - Creates the overall capture input for APM by mixing the "echo-free" capture - signal with the echo signal (e.g., echo simulated via the - echo_path_simulation module). - - The echo signal cannot be shorter than the capture signal and the generated - mix will have the same duration of the capture signal. The latter property - is enforced in order to let the input of APM and the reference signal - created by TestDataGenerator have the same length (required for the - evaluation step). - - Hard-clipping may occur in the mix; a warning is raised when this happens. - - If `echo_filepath` is None, nothing is done and `capture_input_filepath` is - returned. - - Args: - speech: AudioSegment instance. - echo_path: AudioSegment instance or None. - - Returns: - Path to the mix audio track file. - """ - if echo_filepath is None: - return capture_input_filepath - - # Build the mix output file name as a function of the echo file name. - # This ensures that if the internal parameters of the echo path simulator - # change, no erroneous cache hit occurs. - echo_file_name, _ = os.path.splitext(os.path.split(echo_filepath)[1]) - capture_input_file_name, _ = os.path.splitext( - os.path.split(capture_input_filepath)[1]) - mix_filepath = os.path.join( - output_path, - 'mix_capture_{}_{}.wav'.format(capture_input_file_name, - echo_file_name)) - - # Create the mix if not done yet. - mix = None - if not os.path.exists(mix_filepath): - echo_free_capture = signal_processing.SignalProcessingUtils.LoadWav( - capture_input_filepath) - echo = signal_processing.SignalProcessingUtils.LoadWav( - echo_filepath) - - if signal_processing.SignalProcessingUtils.CountSamples(echo) < ( - signal_processing.SignalProcessingUtils.CountSamples( - echo_free_capture)): - raise exceptions.InputMixerException( - 'echo cannot be shorter than capture') - - mix = echo_free_capture.overlay(echo) - signal_processing.SignalProcessingUtils.SaveWav(mix_filepath, mix) - - # Check if hard clipping occurs. - if mix is None: - mix = signal_processing.SignalProcessingUtils.LoadWav(mix_filepath) - if signal_processing.SignalProcessingUtils.DetectHardClipping(mix): - logging.warning(cls._HARD_CLIPPING_LOG_MSG) - - return mix_filepath diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py deleted file mode 100644 index 4fd5e4f1ee..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Unit tests for the input mixer module. -""" - -import logging -import os -import shutil -import tempfile -import unittest - -import mock - -from . import exceptions -from . import input_mixer -from . import signal_processing - - -class TestApmInputMixer(unittest.TestCase): - """Unit tests for the ApmInputMixer class. - """ - - # Audio track file names created in setUp(). - _FILENAMES = ['capture', 'echo_1', 'echo_2', 'shorter', 'longer'] - - # Target peak power level (dBFS) of each audio track file created in setUp(). - # These values are hand-crafted in order to make saturation happen when - # capture and echo_2 are mixed and the contrary for capture and echo_1. - # None means that the power is not changed. - _MAX_PEAK_POWER_LEVELS = [-10.0, -5.0, 0.0, None, None] - - # Audio track file durations in milliseconds. - _DURATIONS = [1000, 1000, 1000, 800, 1200] - - _SAMPLE_RATE = 48000 - - def setUp(self): - """Creates temporary data.""" - self._tmp_path = tempfile.mkdtemp() - - # Create audio track files. - self._audio_tracks = {} - for filename, peak_power, duration in zip(self._FILENAMES, - self._MAX_PEAK_POWER_LEVELS, - self._DURATIONS): - audio_track_filepath = os.path.join(self._tmp_path, - '{}.wav'.format(filename)) - - # Create a pure tone with the target peak power level. - template = signal_processing.SignalProcessingUtils.GenerateSilence( - duration=duration, sample_rate=self._SAMPLE_RATE) - signal = signal_processing.SignalProcessingUtils.GeneratePureTone( - template) - if peak_power is not None: - signal = signal.apply_gain(-signal.max_dBFS + peak_power) - - signal_processing.SignalProcessingUtils.SaveWav( - audio_track_filepath, signal) - self._audio_tracks[filename] = { - 'filepath': - audio_track_filepath, - 'num_samples': - signal_processing.SignalProcessingUtils.CountSamples(signal) - } - - def tearDown(self): - """Recursively deletes temporary folders.""" - shutil.rmtree(self._tmp_path) - - def testCheckMixSameDuration(self): - """Checks the duration when mixing capture and echo with same duration.""" - mix_filepath = input_mixer.ApmInputMixer.Mix( - self._tmp_path, self._audio_tracks['capture']['filepath'], - self._audio_tracks['echo_1']['filepath']) - self.assertTrue(os.path.exists(mix_filepath)) - - mix = signal_processing.SignalProcessingUtils.LoadWav(mix_filepath) - self.assertEqual( - self._audio_tracks['capture']['num_samples'], - signal_processing.SignalProcessingUtils.CountSamples(mix)) - - def testRejectShorterEcho(self): - """Rejects echo signals that are shorter than the capture signal.""" - try: - _ = input_mixer.ApmInputMixer.Mix( - self._tmp_path, self._audio_tracks['capture']['filepath'], - self._audio_tracks['shorter']['filepath']) - self.fail('no exception raised') - except exceptions.InputMixerException: - pass - - def testCheckMixDurationWithLongerEcho(self): - """Checks the duration when mixing an echo longer than the capture.""" - mix_filepath = input_mixer.ApmInputMixer.Mix( - self._tmp_path, self._audio_tracks['capture']['filepath'], - self._audio_tracks['longer']['filepath']) - self.assertTrue(os.path.exists(mix_filepath)) - - mix = signal_processing.SignalProcessingUtils.LoadWav(mix_filepath) - self.assertEqual( - self._audio_tracks['capture']['num_samples'], - signal_processing.SignalProcessingUtils.CountSamples(mix)) - - def testCheckOutputFileNamesConflict(self): - """Checks that different echo files lead to different output file names.""" - mix1_filepath = input_mixer.ApmInputMixer.Mix( - self._tmp_path, self._audio_tracks['capture']['filepath'], - self._audio_tracks['echo_1']['filepath']) - self.assertTrue(os.path.exists(mix1_filepath)) - - mix2_filepath = input_mixer.ApmInputMixer.Mix( - self._tmp_path, self._audio_tracks['capture']['filepath'], - self._audio_tracks['echo_2']['filepath']) - self.assertTrue(os.path.exists(mix2_filepath)) - - self.assertNotEqual(mix1_filepath, mix2_filepath) - - def testHardClippingLogExpected(self): - """Checks that hard clipping warning is raised when occurring.""" - logging.warning = mock.MagicMock(name='warning') - _ = input_mixer.ApmInputMixer.Mix( - self._tmp_path, self._audio_tracks['capture']['filepath'], - self._audio_tracks['echo_2']['filepath']) - logging.warning.assert_called_once_with( - input_mixer.ApmInputMixer.HardClippingLogMessage()) - - def testHardClippingLogNotExpected(self): - """Checks that hard clipping warning is not raised when not occurring.""" - logging.warning = mock.MagicMock(name='warning') - _ = input_mixer.ApmInputMixer.Mix( - self._tmp_path, self._audio_tracks['capture']['filepath'], - self._audio_tracks['echo_1']['filepath']) - self.assertNotIn( - mock.call(input_mixer.ApmInputMixer.HardClippingLogMessage()), - logging.warning.call_args_list) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_signal_creator.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_signal_creator.py deleted file mode 100644 index b64fdcca89..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_signal_creator.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Input signal creator module. -""" - -from . import exceptions -from . import signal_processing - - -class InputSignalCreator(object): - """Input signal creator class. - """ - - @classmethod - def Create(cls, name, raw_params): - """Creates a input signal and its metadata. - - Args: - name: Input signal creator name. - raw_params: Tuple of parameters to pass to the specific signal creator. - - Returns: - (AudioSegment, dict) tuple. - """ - try: - signal = {} - params = {} - - if name == 'pure_tone': - params['frequency'] = float(raw_params[0]) - params['duration'] = int(raw_params[1]) - signal = cls._CreatePureTone(params['frequency'], - params['duration']) - else: - raise exceptions.InputSignalCreatorException( - 'Invalid input signal creator name') - - # Complete metadata. - params['signal'] = name - - return signal, params - except (TypeError, AssertionError) as e: - raise exceptions.InputSignalCreatorException( - 'Invalid signal creator parameters: {}'.format(e)) - - @classmethod - def _CreatePureTone(cls, frequency, duration): - """ - Generates a pure tone at 48000 Hz. - - Args: - frequency: Float in (0-24000] (Hz). - duration: Integer (milliseconds). - - Returns: - AudioSegment instance. - """ - assert 0 < frequency <= 24000 - assert duration > 0 - template = signal_processing.SignalProcessingUtils.GenerateSilence( - duration) - return signal_processing.SignalProcessingUtils.GeneratePureTone( - template, frequency) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/results.css b/modules/audio_processing/test/py_quality_assessment/quality_assessment/results.css deleted file mode 100644 index 2f406bb002..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/results.css +++ /dev/null @@ -1,32 +0,0 @@ -/* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -td.selected-score { - background-color: #DDD; -} - -td.single-score-cell{ - text-align: center; -} - -.audio-inspector { - text-align: center; -} - -.audio-inspector div{ - margin-bottom: 0; - padding-bottom: 0; - padding-top: 0; -} - -.audio-inspector div div{ - margin-bottom: 0; - padding-bottom: 0; - padding-top: 0; -} diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/results.js b/modules/audio_processing/test/py_quality_assessment/quality_assessment/results.js deleted file mode 100644 index 8e47411058..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/results.js +++ /dev/null @@ -1,376 +0,0 @@ -// Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -// -// Use of this source code is governed by a BSD-style license -// that can be found in the LICENSE file in the root of the source -// tree. An additional intellectual property rights grant can be found -// in the file PATENTS. All contributing project authors may -// be found in the AUTHORS file in the root of the source tree. - -/** - * Opens the score stats inspector dialog. - * @param {String} dialogId: identifier of the dialog to show. - * @return {DOMElement} The dialog element that has been opened. - */ -function openScoreStatsInspector(dialogId) { - var dialog = document.getElementById(dialogId); - dialog.showModal(); - return dialog; -} - -/** - * Closes the score stats inspector dialog. - */ -function closeScoreStatsInspector() { - var dialog = document.querySelector('dialog[open]'); - if (dialog == null) - return; - dialog.close(); -} - -/** - * Audio inspector class. - * @constructor - */ -function AudioInspector() { - console.debug('Creating an AudioInspector instance.'); - this.audioPlayer_ = new Audio(); - this.metadata_ = {}; - this.currentScore_ = null; - this.audioInspector_ = null; - this.snackbarContainer_ = document.querySelector('#snackbar'); - - // Get base URL without anchors. - this.baseUrl_ = window.location.href; - var index = this.baseUrl_.indexOf('#'); - if (index > 0) - this.baseUrl_ = this.baseUrl_.substr(0, index) - console.info('Base URL set to "' + window.location.href + '".'); - - window.event.stopPropagation(); - this.createTextAreasForCopy_(); - this.createAudioInspector_(); - this.initializeEventHandlers_(); - - // When MDL is ready, parse the anchor (if any) to show the requested - // experiment. - var self = this; - document.querySelectorAll('header a')[0].addEventListener( - 'mdl-componentupgraded', function() { - if (!self.parseWindowAnchor()) { - // If not experiment is requested, open the first section. - console.info('No anchor parsing, opening the first section.'); - document.querySelectorAll('header a > span')[0].click(); - } - }); -} - -/** - * Parse the anchor in the window URL. - * @return {bool} True if the parsing succeeded. - */ -AudioInspector.prototype.parseWindowAnchor = function() { - var index = location.href.indexOf('#'); - if (index == -1) { - console.debug('No # found in the URL.'); - return false; - } - - var anchor = location.href.substr(index - location.href.length + 1); - console.info('Anchor changed: "' + anchor + '".'); - - var parts = anchor.split('&'); - if (parts.length != 3) { - console.info('Ignoring anchor with invalid number of fields.'); - return false; - } - - var openDialog = document.querySelector('dialog[open]'); - try { - // Open the requested dialog if not already open. - if (!openDialog || openDialog.id != parts[1]) { - !openDialog || openDialog.close(); - document.querySelectorAll('header a > span')[ - parseInt(parts[0].substr(1))].click(); - openDialog = openScoreStatsInspector(parts[1]); - } - - // Trigger click on cell. - var cell = openDialog.querySelector('td.' + parts[2]); - cell.focus(); - cell.click(); - - this.showNotification_('Experiment selected.'); - return true; - } catch (e) { - this.showNotification_('Cannot select experiment :('); - console.error('Exception caught while selecting experiment: "' + e + '".'); - } - - return false; -} - -/** - * Set up the inspector for a new score. - * @param {DOMElement} element: Element linked to the selected score. - */ -AudioInspector.prototype.selectedScoreChange = function(element) { - if (this.currentScore_ == element) { return; } - if (this.currentScore_ != null) { - this.currentScore_.classList.remove('selected-score'); - } - this.currentScore_ = element; - this.currentScore_.classList.add('selected-score'); - this.stopAudio(); - - // Read metadata. - var matches = element.querySelectorAll('input[type=hidden]'); - this.metadata_ = {}; - for (var index = 0; index < matches.length; ++index) { - this.metadata_[matches[index].name] = matches[index].value; - } - - // Show the audio inspector interface. - var container = element.parentNode.parentNode.parentNode.parentNode; - var audioInspectorPlaceholder = container.querySelector( - '.audio-inspector-placeholder'); - this.moveInspector_(audioInspectorPlaceholder); -}; - -/** - * Stop playing audio. - */ -AudioInspector.prototype.stopAudio = function() { - console.info('Pausing audio play out.'); - this.audioPlayer_.pause(); -}; - -/** - * Show a text message using the snackbar. - */ -AudioInspector.prototype.showNotification_ = function(text) { - try { - this.snackbarContainer_.MaterialSnackbar.showSnackbar({ - message: text, timeout: 2000}); - } catch (e) { - // Fallback to an alert. - alert(text); - console.warn('Cannot use snackbar: "' + e + '"'); - } -} - -/** - * Move the audio inspector DOM node into the given parent. - * @param {DOMElement} newParentNode: New parent for the inspector. - */ -AudioInspector.prototype.moveInspector_ = function(newParentNode) { - newParentNode.appendChild(this.audioInspector_); -}; - -/** - * Play audio file from url. - * @param {string} metadataFieldName: Metadata field name. - */ -AudioInspector.prototype.playAudio = function(metadataFieldName) { - if (this.metadata_[metadataFieldName] == undefined) { return; } - if (this.metadata_[metadataFieldName] == 'None') { - alert('The selected stream was not used during the experiment.'); - return; - } - this.stopAudio(); - this.audioPlayer_.src = this.metadata_[metadataFieldName]; - console.debug('Audio source URL: "' + this.audioPlayer_.src + '"'); - this.audioPlayer_.play(); - console.info('Playing out audio.'); -}; - -/** - * Create hidden text areas to copy URLs. - * - * For each dialog, one text area is created since it is not possible to select - * text on a text area outside of the active dialog. - */ -AudioInspector.prototype.createTextAreasForCopy_ = function() { - var self = this; - document.querySelectorAll('dialog.mdl-dialog').forEach(function(element) { - var textArea = document.createElement("textarea"); - textArea.classList.add('url-copy'); - textArea.style.position = 'fixed'; - textArea.style.bottom = 0; - textArea.style.left = 0; - textArea.style.width = '2em'; - textArea.style.height = '2em'; - textArea.style.border = 'none'; - textArea.style.outline = 'none'; - textArea.style.boxShadow = 'none'; - textArea.style.background = 'transparent'; - textArea.style.fontSize = '6px'; - element.appendChild(textArea); - }); -} - -/** - * Create audio inspector. - */ -AudioInspector.prototype.createAudioInspector_ = function() { - var buttonIndex = 0; - function getButtonHtml(icon, toolTipText, caption, metadataFieldName) { - var buttonId = 'audioInspectorButton' + buttonIndex++; - html = caption == null ? '' : caption; - html += '' - - return html; - } - - // TODO(alessiob): Add timeline and highlight current track by changing icon - // color. - - this.audioInspector_ = document.createElement('div'); - this.audioInspector_.classList.add('audio-inspector'); - this.audioInspector_.innerHTML = - '
' + - '
' + - '
' + - getButtonHtml('play_arrow', 'Simulated echo', 'Ein', - 'echo_filepath') + - '
' + - '
' + - getButtonHtml('stop', 'Stop playing [S]', null, '__stop__') + - '
' + - '
' + - getButtonHtml('play_arrow', 'Render stream', 'Rin', - 'render_filepath') + - '
' + - '
' + - '
' + - '
' + - '
' + - '
' + - getButtonHtml('play_arrow', 'Capture stream (APM input) [1]', - 'Y\'in', 'capture_filepath') + - '
' + - '
APM
' + - '
' + - getButtonHtml('play_arrow', 'APM output [2]', 'Yout', - 'apm_output_filepath') + - '
' + - '
' + - '
' + - '
' + - '
' + - '
' + - getButtonHtml('play_arrow', 'Echo-free capture stream', - 'Yin', 'echo_free_capture_filepath') + - '
' + - '
' + - getButtonHtml('play_arrow', 'Clean capture stream', - 'Yclean', 'clean_capture_input_filepath') + - '
' + - '
' + - getButtonHtml('play_arrow', 'APM reference [3]', 'Yref', - 'apm_reference_filepath') + - '
' + - '
' + - '
'; - - // Add an invisible node as initial container for the audio inspector. - var parent = document.createElement('div'); - parent.style.display = 'none'; - this.moveInspector_(parent); - document.body.appendChild(parent); -}; - -/** - * Initialize event handlers. - */ -AudioInspector.prototype.initializeEventHandlers_ = function() { - var self = this; - - // Score cells. - document.querySelectorAll('td.single-score-cell').forEach(function(element) { - element.onclick = function() { - self.selectedScoreChange(this); - } - }); - - // Copy anchor URLs icons. - if (document.queryCommandSupported('copy')) { - document.querySelectorAll('td.single-score-cell button').forEach( - function(element) { - element.onclick = function() { - // Find the text area in the dialog. - var textArea = element.closest('dialog').querySelector( - 'textarea.url-copy'); - - // Copy. - textArea.value = self.baseUrl_ + '#' + element.getAttribute( - 'data-anchor'); - textArea.select(); - try { - if (!document.execCommand('copy')) - throw 'Copy returned false'; - self.showNotification_('Experiment URL copied.'); - } catch (e) { - self.showNotification_('Cannot copy experiment URL :('); - console.error(e); - } - } - }); - } else { - self.showNotification_( - 'The copy command is disabled. URL copy is not enabled.'); - } - - // Audio inspector buttons. - this.audioInspector_.querySelectorAll('button').forEach(function(element) { - var target = element.querySelector('input[type=hidden]'); - if (target == null) { return; } - element.onclick = function() { - if (target.value == '__stop__') { - self.stopAudio(); - } else { - self.playAudio(target.value); - } - }; - }); - - // Dialog close handlers. - var dialogs = document.querySelectorAll('dialog').forEach(function(element) { - element.onclose = function() { - self.stopAudio(); - } - }); - - // Keyboard shortcuts. - window.onkeyup = function(e) { - var key = e.keyCode ? e.keyCode : e.which; - switch (key) { - case 49: // 1. - self.playAudio('capture_filepath'); - break; - case 50: // 2. - self.playAudio('apm_output_filepath'); - break; - case 51: // 3. - self.playAudio('apm_reference_filepath'); - break; - case 83: // S. - case 115: // s. - self.stopAudio(); - break; - } - }; - - // Hash change. - window.onhashchange = function(e) { - self.parseWindowAnchor(); - } -}; diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing.py deleted file mode 100644 index 95e801903d..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing.py +++ /dev/null @@ -1,359 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Signal processing utility module. -""" - -import array -import logging -import os -import sys -import enum - -try: - import numpy as np -except ImportError: - logging.critical('Cannot import the third-party Python package numpy') - sys.exit(1) - -try: - import pydub - import pydub.generators -except ImportError: - logging.critical('Cannot import the third-party Python package pydub') - sys.exit(1) - -try: - import scipy.signal - import scipy.fftpack -except ImportError: - logging.critical('Cannot import the third-party Python package scipy') - sys.exit(1) - -from . import exceptions - - -class SignalProcessingUtils(object): - """Collection of signal processing utilities. - """ - - @enum.unique - class MixPadding(enum.Enum): - NO_PADDING = 0 - ZERO_PADDING = 1 - LOOP = 2 - - def __init__(self): - pass - - @classmethod - def LoadWav(cls, filepath, channels=1): - """Loads wav file. - - Args: - filepath: path to the wav audio track file to load. - channels: number of channels (downmixing to mono by default). - - Returns: - AudioSegment instance. - """ - if not os.path.exists(filepath): - logging.error('cannot find the <%s> audio track file', filepath) - raise exceptions.FileNotFoundError() - return pydub.AudioSegment.from_file(filepath, - format='wav', - channels=channels) - - @classmethod - def SaveWav(cls, output_filepath, signal): - """Saves wav file. - - Args: - output_filepath: path to the wav audio track file to save. - signal: AudioSegment instance. - """ - return signal.export(output_filepath, format='wav') - - @classmethod - def CountSamples(cls, signal): - """Number of samples per channel. - - Args: - signal: AudioSegment instance. - - Returns: - An integer. - """ - number_of_samples = len(signal.get_array_of_samples()) - assert signal.channels > 0 - assert number_of_samples % signal.channels == 0 - return number_of_samples / signal.channels - - @classmethod - def GenerateSilence(cls, duration=1000, sample_rate=48000): - """Generates silence. - - This method can also be used to create a template AudioSegment instance. - A template can then be used with other Generate*() methods accepting an - AudioSegment instance as argument. - - Args: - duration: duration in ms. - sample_rate: sample rate. - - Returns: - AudioSegment instance. - """ - return pydub.AudioSegment.silent(duration, sample_rate) - - @classmethod - def GeneratePureTone(cls, template, frequency=440.0): - """Generates a pure tone. - - The pure tone is generated with the same duration and in the same format of - the given template signal. - - Args: - template: AudioSegment instance. - frequency: Frequency of the pure tone in Hz. - - Return: - AudioSegment instance. - """ - if frequency > template.frame_rate >> 1: - raise exceptions.SignalProcessingException('Invalid frequency') - - generator = pydub.generators.Sine(sample_rate=template.frame_rate, - bit_depth=template.sample_width * 8, - freq=frequency) - - return generator.to_audio_segment(duration=len(template), volume=0.0) - - @classmethod - def GenerateWhiteNoise(cls, template): - """Generates white noise. - - The white noise is generated with the same duration and in the same format - of the given template signal. - - Args: - template: AudioSegment instance. - - Return: - AudioSegment instance. - """ - generator = pydub.generators.WhiteNoise( - sample_rate=template.frame_rate, - bit_depth=template.sample_width * 8) - return generator.to_audio_segment(duration=len(template), volume=0.0) - - @classmethod - def AudioSegmentToRawData(cls, signal): - samples = signal.get_array_of_samples() - if samples.typecode != 'h': - raise exceptions.SignalProcessingException( - 'Unsupported samples type') - return np.array(signal.get_array_of_samples(), np.int16) - - @classmethod - def Fft(cls, signal, normalize=True): - if signal.channels != 1: - raise NotImplementedError('multiple-channel FFT not implemented') - x = cls.AudioSegmentToRawData(signal).astype(np.float32) - if normalize: - x /= max(abs(np.max(x)), 1.0) - y = scipy.fftpack.fft(x) - return y[:len(y) / 2] - - @classmethod - def DetectHardClipping(cls, signal, threshold=2): - """Detects hard clipping. - - Hard clipping is simply detected by counting samples that touch either the - lower or upper bound too many times in a row (according to `threshold`). - The presence of a single sequence of samples meeting such property is enough - to label the signal as hard clipped. - - Args: - signal: AudioSegment instance. - threshold: minimum number of samples at full-scale in a row. - - Returns: - True if hard clipping is detect, False otherwise. - """ - if signal.channels != 1: - raise NotImplementedError( - 'multiple-channel clipping not implemented') - if signal.sample_width != 2: # Note that signal.sample_width is in bytes. - raise exceptions.SignalProcessingException( - 'hard-clipping detection only supported for 16 bit samples') - samples = cls.AudioSegmentToRawData(signal) - - # Detect adjacent clipped samples. - samples_type_info = np.iinfo(samples.dtype) - mask_min = samples == samples_type_info.min - mask_max = samples == samples_type_info.max - - def HasLongSequence(vector, min_legth=threshold): - """Returns True if there are one or more long sequences of True flags.""" - seq_length = 0 - for b in vector: - seq_length = seq_length + 1 if b else 0 - if seq_length >= min_legth: - return True - return False - - return HasLongSequence(mask_min) or HasLongSequence(mask_max) - - @classmethod - def ApplyImpulseResponse(cls, signal, impulse_response): - """Applies an impulse response to a signal. - - Args: - signal: AudioSegment instance. - impulse_response: list or numpy vector of float values. - - Returns: - AudioSegment instance. - """ - # Get samples. - assert signal.channels == 1, ( - 'multiple-channel recordings not supported') - samples = signal.get_array_of_samples() - - # Convolve. - logging.info( - 'applying %d order impulse response to a signal lasting %d ms', - len(impulse_response), len(signal)) - convolved_samples = scipy.signal.fftconvolve(in1=samples, - in2=impulse_response, - mode='full').astype( - np.int16) - logging.info('convolution computed') - - # Cast. - convolved_samples = array.array(signal.array_type, convolved_samples) - - # Verify. - logging.debug('signal length: %d samples', len(samples)) - logging.debug('convolved signal length: %d samples', - len(convolved_samples)) - assert len(convolved_samples) > len(samples) - - # Generate convolved signal AudioSegment instance. - convolved_signal = pydub.AudioSegment(data=convolved_samples, - metadata={ - 'sample_width': - signal.sample_width, - 'frame_rate': - signal.frame_rate, - 'frame_width': - signal.frame_width, - 'channels': signal.channels, - }) - assert len(convolved_signal) > len(signal) - - return convolved_signal - - @classmethod - def Normalize(cls, signal): - """Normalizes a signal. - - Args: - signal: AudioSegment instance. - - Returns: - An AudioSegment instance. - """ - return signal.apply_gain(-signal.max_dBFS) - - @classmethod - def Copy(cls, signal): - """Makes a copy os a signal. - - Args: - signal: AudioSegment instance. - - Returns: - An AudioSegment instance. - """ - return pydub.AudioSegment(data=signal.get_array_of_samples(), - metadata={ - 'sample_width': signal.sample_width, - 'frame_rate': signal.frame_rate, - 'frame_width': signal.frame_width, - 'channels': signal.channels, - }) - - @classmethod - def MixSignals(cls, - signal, - noise, - target_snr=0.0, - pad_noise=MixPadding.NO_PADDING): - """Mixes `signal` and `noise` with a target SNR. - - Mix `signal` and `noise` with a desired SNR by scaling `noise`. - If the target SNR is +/- infinite, a copy of signal/noise is returned. - If `signal` is shorter than `noise`, the length of the mix equals that of - `signal`. Otherwise, the mix length depends on whether padding is applied. - When padding is not applied, that is `pad_noise` is set to NO_PADDING - (default), the mix length equals that of `noise` - i.e., `signal` is - truncated. Otherwise, `noise` is extended and the resulting mix has the same - length of `signal`. - - Args: - signal: AudioSegment instance (signal). - noise: AudioSegment instance (noise). - target_snr: float, numpy.Inf or -numpy.Inf (dB). - pad_noise: SignalProcessingUtils.MixPadding, default: NO_PADDING. - - Returns: - An AudioSegment instance. - """ - # Handle infinite target SNR. - if target_snr == -np.Inf: - # Return a copy of noise. - logging.warning('SNR = -Inf, returning noise') - return cls.Copy(noise) - elif target_snr == np.Inf: - # Return a copy of signal. - logging.warning('SNR = +Inf, returning signal') - return cls.Copy(signal) - - # Check signal and noise power. - signal_power = float(signal.dBFS) - noise_power = float(noise.dBFS) - if signal_power == -np.Inf: - logging.error('signal has -Inf power, cannot mix') - raise exceptions.SignalProcessingException( - 'cannot mix a signal with -Inf power') - if noise_power == -np.Inf: - logging.error('noise has -Inf power, cannot mix') - raise exceptions.SignalProcessingException( - 'cannot mix a signal with -Inf power') - - # Mix. - gain_db = signal_power - noise_power - target_snr - signal_duration = len(signal) - noise_duration = len(noise) - if signal_duration <= noise_duration: - # Ignore `pad_noise`, `noise` is truncated if longer that `signal`, the - # mix will have the same length of `signal`. - return signal.overlay(noise.apply_gain(gain_db)) - elif pad_noise == cls.MixPadding.NO_PADDING: - # `signal` is longer than `noise`, but no padding is applied to `noise`. - # Truncate `signal`. - return noise.overlay(signal, gain_during_overlay=gain_db) - elif pad_noise == cls.MixPadding.ZERO_PADDING: - # TODO(alessiob): Check that this works as expected. - return signal.overlay(noise.apply_gain(gain_db)) - elif pad_noise == cls.MixPadding.LOOP: - # `signal` is longer than `noise`, extend `noise` by looping. - return signal.overlay(noise.apply_gain(gain_db), loop=True) - else: - raise exceptions.SignalProcessingException('invalid padding type') diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing_unittest.py deleted file mode 100644 index 881fb66800..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing_unittest.py +++ /dev/null @@ -1,183 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Unit tests for the signal_processing module. -""" - -import unittest - -import numpy as np -import pydub - -from . import exceptions -from . import signal_processing - - -class TestSignalProcessing(unittest.TestCase): - """Unit tests for the signal_processing module. - """ - - def testMixSignals(self): - # Generate a template signal with which white noise can be generated. - silence = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) - - # Generate two distinct AudioSegment instances with 1 second of white noise. - signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - silence) - noise = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - silence) - - # Extract samples. - signal_samples = signal.get_array_of_samples() - noise_samples = noise.get_array_of_samples() - - # Test target SNR -Inf (noise expected). - mix_neg_inf = signal_processing.SignalProcessingUtils.MixSignals( - signal, noise, -np.Inf) - self.assertTrue(len(noise), len(mix_neg_inf)) # Check duration. - mix_neg_inf_samples = mix_neg_inf.get_array_of_samples() - self.assertTrue( # Check samples. - all([x == y for x, y in zip(noise_samples, mix_neg_inf_samples)])) - - # Test target SNR 0.0 (different data expected). - mix_0 = signal_processing.SignalProcessingUtils.MixSignals( - signal, noise, 0.0) - self.assertTrue(len(signal), len(mix_0)) # Check duration. - self.assertTrue(len(noise), len(mix_0)) - mix_0_samples = mix_0.get_array_of_samples() - self.assertTrue( - any([x != y for x, y in zip(signal_samples, mix_0_samples)])) - self.assertTrue( - any([x != y for x, y in zip(noise_samples, mix_0_samples)])) - - # Test target SNR +Inf (signal expected). - mix_pos_inf = signal_processing.SignalProcessingUtils.MixSignals( - signal, noise, np.Inf) - self.assertTrue(len(signal), len(mix_pos_inf)) # Check duration. - mix_pos_inf_samples = mix_pos_inf.get_array_of_samples() - self.assertTrue( # Check samples. - all([x == y for x, y in zip(signal_samples, mix_pos_inf_samples)])) - - def testMixSignalsMinInfPower(self): - silence = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) - signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - silence) - - with self.assertRaises(exceptions.SignalProcessingException): - _ = signal_processing.SignalProcessingUtils.MixSignals( - signal, silence, 0.0) - - with self.assertRaises(exceptions.SignalProcessingException): - _ = signal_processing.SignalProcessingUtils.MixSignals( - silence, signal, 0.0) - - def testMixSignalNoiseDifferentLengths(self): - # Test signals. - shorter = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - pydub.AudioSegment.silent(duration=1000, frame_rate=8000)) - longer = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - pydub.AudioSegment.silent(duration=2000, frame_rate=8000)) - - # When the signal is shorter than the noise, the mix length always equals - # that of the signal regardless of whether padding is applied. - # No noise padding, length of signal less than that of noise. - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=shorter, - noise=longer, - pad_noise=signal_processing.SignalProcessingUtils.MixPadding. - NO_PADDING) - self.assertEqual(len(shorter), len(mix)) - # With noise padding, length of signal less than that of noise. - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=shorter, - noise=longer, - pad_noise=signal_processing.SignalProcessingUtils.MixPadding. - ZERO_PADDING) - self.assertEqual(len(shorter), len(mix)) - - # When the signal is longer than the noise, the mix length depends on - # whether padding is applied. - # No noise padding, length of signal greater than that of noise. - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=longer, - noise=shorter, - pad_noise=signal_processing.SignalProcessingUtils.MixPadding. - NO_PADDING) - self.assertEqual(len(shorter), len(mix)) - # With noise padding, length of signal greater than that of noise. - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=longer, - noise=shorter, - pad_noise=signal_processing.SignalProcessingUtils.MixPadding. - ZERO_PADDING) - self.assertEqual(len(longer), len(mix)) - - def testMixSignalNoisePaddingTypes(self): - # Test signals. - shorter = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - pydub.AudioSegment.silent(duration=1000, frame_rate=8000)) - longer = signal_processing.SignalProcessingUtils.GeneratePureTone( - pydub.AudioSegment.silent(duration=2000, frame_rate=8000), 440.0) - - # Zero padding: expect pure tone only in 1-2s. - mix_zero_pad = signal_processing.SignalProcessingUtils.MixSignals( - signal=longer, - noise=shorter, - target_snr=-6, - pad_noise=signal_processing.SignalProcessingUtils.MixPadding. - ZERO_PADDING) - - # Loop: expect pure tone plus noise in 1-2s. - mix_loop = signal_processing.SignalProcessingUtils.MixSignals( - signal=longer, - noise=shorter, - target_snr=-6, - pad_noise=signal_processing.SignalProcessingUtils.MixPadding.LOOP) - - def Energy(signal): - samples = signal_processing.SignalProcessingUtils.AudioSegmentToRawData( - signal).astype(np.float32) - return np.sum(samples * samples) - - e_mix_zero_pad = Energy(mix_zero_pad[-1000:]) - e_mix_loop = Energy(mix_loop[-1000:]) - self.assertLess(0, e_mix_zero_pad) - self.assertLess(e_mix_zero_pad, e_mix_loop) - - def testMixSignalSnr(self): - # Test signals. - tone_low = signal_processing.SignalProcessingUtils.GeneratePureTone( - pydub.AudioSegment.silent(duration=64, frame_rate=8000), 250.0) - tone_high = signal_processing.SignalProcessingUtils.GeneratePureTone( - pydub.AudioSegment.silent(duration=64, frame_rate=8000), 3000.0) - - def ToneAmplitudes(mix): - """Returns the amplitude of the coefficients #16 and #192, which - correspond to the tones at 250 and 3k Hz respectively.""" - mix_fft = np.absolute( - signal_processing.SignalProcessingUtils.Fft(mix)) - return mix_fft[16], mix_fft[192] - - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=tone_low, noise=tone_high, target_snr=-6) - ampl_low, ampl_high = ToneAmplitudes(mix) - self.assertLess(ampl_low, ampl_high) - - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=tone_high, noise=tone_low, target_snr=-6) - ampl_low, ampl_high = ToneAmplitudes(mix) - self.assertLess(ampl_high, ampl_low) - - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=tone_low, noise=tone_high, target_snr=6) - ampl_low, ampl_high = ToneAmplitudes(mix) - self.assertLess(ampl_high, ampl_low) - - mix = signal_processing.SignalProcessingUtils.MixSignals( - signal=tone_high, noise=tone_low, target_snr=6) - ampl_low, ampl_high = ToneAmplitudes(mix) - self.assertLess(ampl_low, ampl_high) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation.py deleted file mode 100644 index 69b3a1624e..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation.py +++ /dev/null @@ -1,446 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""APM module simulator. -""" - -import logging -import os - -from . import annotations -from . import data_access -from . import echo_path_simulation -from . import echo_path_simulation_factory -from . import eval_scores -from . import exceptions -from . import input_mixer -from . import input_signal_creator -from . import signal_processing -from . import test_data_generation - - -class ApmModuleSimulator(object): - """Audio processing module (APM) simulator class. - """ - - _TEST_DATA_GENERATOR_CLASSES = ( - test_data_generation.TestDataGenerator.REGISTERED_CLASSES) - _EVAL_SCORE_WORKER_CLASSES = eval_scores.EvaluationScore.REGISTERED_CLASSES - - _PREFIX_APM_CONFIG = 'apmcfg-' - _PREFIX_CAPTURE = 'capture-' - _PREFIX_RENDER = 'render-' - _PREFIX_ECHO_SIMULATOR = 'echosim-' - _PREFIX_TEST_DATA_GEN = 'datagen-' - _PREFIX_TEST_DATA_GEN_PARAMS = 'datagen_params-' - _PREFIX_SCORE = 'score-' - - def __init__(self, - test_data_generator_factory, - evaluation_score_factory, - ap_wrapper, - evaluator, - external_vads=None): - if external_vads is None: - external_vads = {} - self._test_data_generator_factory = test_data_generator_factory - self._evaluation_score_factory = evaluation_score_factory - self._audioproc_wrapper = ap_wrapper - self._evaluator = evaluator - self._annotator = annotations.AudioAnnotationsExtractor( - annotations.AudioAnnotationsExtractor.VadType.ENERGY_THRESHOLD - | annotations.AudioAnnotationsExtractor.VadType.WEBRTC_COMMON_AUDIO - | annotations.AudioAnnotationsExtractor.VadType.WEBRTC_APM, - external_vads) - - # Init. - self._test_data_generator_factory.SetOutputDirectoryPrefix( - self._PREFIX_TEST_DATA_GEN_PARAMS) - self._evaluation_score_factory.SetScoreFilenamePrefix( - self._PREFIX_SCORE) - - # Properties for each run. - self._base_output_path = None - self._output_cache_path = None - self._test_data_generators = None - self._evaluation_score_workers = None - self._config_filepaths = None - self._capture_input_filepaths = None - self._render_input_filepaths = None - self._echo_path_simulator_class = None - - @classmethod - def GetPrefixApmConfig(cls): - return cls._PREFIX_APM_CONFIG - - @classmethod - def GetPrefixCapture(cls): - return cls._PREFIX_CAPTURE - - @classmethod - def GetPrefixRender(cls): - return cls._PREFIX_RENDER - - @classmethod - def GetPrefixEchoSimulator(cls): - return cls._PREFIX_ECHO_SIMULATOR - - @classmethod - def GetPrefixTestDataGenerator(cls): - return cls._PREFIX_TEST_DATA_GEN - - @classmethod - def GetPrefixTestDataGeneratorParameters(cls): - return cls._PREFIX_TEST_DATA_GEN_PARAMS - - @classmethod - def GetPrefixScore(cls): - return cls._PREFIX_SCORE - - def Run(self, - config_filepaths, - capture_input_filepaths, - test_data_generator_names, - eval_score_names, - output_dir, - render_input_filepaths=None, - echo_path_simulator_name=( - echo_path_simulation.NoEchoPathSimulator.NAME)): - """Runs the APM simulation. - - Initializes paths and required instances, then runs all the simulations. - The render input can be optionally added. If added, the number of capture - input audio tracks and the number of render input audio tracks have to be - equal. The two lists are used to form pairs of capture and render input. - - Args: - config_filepaths: set of APM configuration files to test. - capture_input_filepaths: set of capture input audio track files to test. - test_data_generator_names: set of test data generator names to test. - eval_score_names: set of evaluation score names to test. - output_dir: base path to the output directory for wav files and outcomes. - render_input_filepaths: set of render input audio track files to test. - echo_path_simulator_name: name of the echo path simulator to use when - render input is provided. - """ - assert render_input_filepaths is None or ( - len(capture_input_filepaths) == len(render_input_filepaths)), ( - 'render input set size not matching input set size') - assert render_input_filepaths is None or echo_path_simulator_name in ( - echo_path_simulation.EchoPathSimulator.REGISTERED_CLASSES), ( - 'invalid echo path simulator') - self._base_output_path = os.path.abspath(output_dir) - - # Output path used to cache the data shared across simulations. - self._output_cache_path = os.path.join(self._base_output_path, - '_cache') - - # Instance test data generators. - self._test_data_generators = [ - self._test_data_generator_factory.GetInstance( - test_data_generators_class=( - self._TEST_DATA_GENERATOR_CLASSES[name])) - for name in (test_data_generator_names) - ] - - # Instance evaluation score workers. - self._evaluation_score_workers = [ - self._evaluation_score_factory.GetInstance( - evaluation_score_class=self._EVAL_SCORE_WORKER_CLASSES[name]) - for (name) in eval_score_names - ] - - # Set APM configuration file paths. - self._config_filepaths = self._CreatePathsCollection(config_filepaths) - - # Set probing signal file paths. - if render_input_filepaths is None: - # Capture input only. - self._capture_input_filepaths = self._CreatePathsCollection( - capture_input_filepaths) - self._render_input_filepaths = None - else: - # Set both capture and render input signals. - self._SetTestInputSignalFilePaths(capture_input_filepaths, - render_input_filepaths) - - # Set the echo path simulator class. - self._echo_path_simulator_class = ( - echo_path_simulation.EchoPathSimulator. - REGISTERED_CLASSES[echo_path_simulator_name]) - - self._SimulateAll() - - def _SimulateAll(self): - """Runs all the simulations. - - Iterates over the combinations of APM configurations, probing signals, and - test data generators. This method is mainly responsible for the creation of - the cache and output directories required in order to call _Simulate(). - """ - without_render_input = self._render_input_filepaths is None - - # Try different APM config files. - for config_name in self._config_filepaths: - config_filepath = self._config_filepaths[config_name] - - # Try different capture-render pairs. - for capture_input_name in self._capture_input_filepaths: - # Output path for the capture signal annotations. - capture_annotations_cache_path = os.path.join( - self._output_cache_path, - self._PREFIX_CAPTURE + capture_input_name) - data_access.MakeDirectory(capture_annotations_cache_path) - - # Capture. - capture_input_filepath = self._capture_input_filepaths[ - capture_input_name] - if not os.path.exists(capture_input_filepath): - # If the input signal file does not exist, try to create using the - # available input signal creators. - self._CreateInputSignal(capture_input_filepath) - assert os.path.exists(capture_input_filepath) - self._ExtractCaptureAnnotations( - capture_input_filepath, capture_annotations_cache_path) - - # Render and simulated echo path (optional). - render_input_filepath = None if without_render_input else ( - self._render_input_filepaths[capture_input_name]) - render_input_name = '(none)' if without_render_input else ( - self._ExtractFileName(render_input_filepath)) - echo_path_simulator = (echo_path_simulation_factory. - EchoPathSimulatorFactory.GetInstance( - self._echo_path_simulator_class, - render_input_filepath)) - - # Try different test data generators. - for test_data_generators in self._test_data_generators: - logging.info( - 'APM config preset: <%s>, capture: <%s>, render: <%s>,' - 'test data generator: <%s>, echo simulator: <%s>', - config_name, capture_input_name, render_input_name, - test_data_generators.NAME, echo_path_simulator.NAME) - - # Output path for the generated test data. - test_data_cache_path = os.path.join( - capture_annotations_cache_path, - self._PREFIX_TEST_DATA_GEN + test_data_generators.NAME) - data_access.MakeDirectory(test_data_cache_path) - logging.debug('test data cache path: <%s>', - test_data_cache_path) - - # Output path for the echo simulator and APM input mixer output. - echo_test_data_cache_path = os.path.join( - test_data_cache_path, - 'echosim-{}'.format(echo_path_simulator.NAME)) - data_access.MakeDirectory(echo_test_data_cache_path) - logging.debug('echo test data cache path: <%s>', - echo_test_data_cache_path) - - # Full output path. - output_path = os.path.join( - self._base_output_path, - self._PREFIX_APM_CONFIG + config_name, - self._PREFIX_CAPTURE + capture_input_name, - self._PREFIX_RENDER + render_input_name, - self._PREFIX_ECHO_SIMULATOR + echo_path_simulator.NAME, - self._PREFIX_TEST_DATA_GEN + test_data_generators.NAME) - data_access.MakeDirectory(output_path) - logging.debug('output path: <%s>', output_path) - - self._Simulate(test_data_generators, - capture_input_filepath, - render_input_filepath, test_data_cache_path, - echo_test_data_cache_path, output_path, - config_filepath, echo_path_simulator) - - @staticmethod - def _CreateInputSignal(input_signal_filepath): - """Creates a missing input signal file. - - The file name is parsed to extract input signal creator and params. If a - creator is matched and the parameters are valid, a new signal is generated - and written in `input_signal_filepath`. - - Args: - input_signal_filepath: Path to the input signal audio file to write. - - Raises: - InputSignalCreatorException - """ - filename = os.path.splitext( - os.path.split(input_signal_filepath)[-1])[0] - filename_parts = filename.split('-') - - if len(filename_parts) < 2: - raise exceptions.InputSignalCreatorException( - 'Cannot parse input signal file name') - - signal, metadata = input_signal_creator.InputSignalCreator.Create( - filename_parts[0], filename_parts[1].split('_')) - - signal_processing.SignalProcessingUtils.SaveWav( - input_signal_filepath, signal) - data_access.Metadata.SaveFileMetadata(input_signal_filepath, metadata) - - def _ExtractCaptureAnnotations(self, - input_filepath, - output_path, - annotation_name=""): - self._annotator.Extract(input_filepath) - self._annotator.Save(output_path, annotation_name) - - def _Simulate(self, test_data_generators, clean_capture_input_filepath, - render_input_filepath, test_data_cache_path, - echo_test_data_cache_path, output_path, config_filepath, - echo_path_simulator): - """Runs a single set of simulation. - - Simulates a given combination of APM configuration, probing signal, and - test data generator. It iterates over the test data generator - internal configurations. - - Args: - test_data_generators: TestDataGenerator instance. - clean_capture_input_filepath: capture input audio track file to be - processed by a test data generator and - not affected by echo. - render_input_filepath: render input audio track file to test. - test_data_cache_path: path for the generated test audio track files. - echo_test_data_cache_path: path for the echo simulator. - output_path: base output path for the test data generator. - config_filepath: APM configuration file to test. - echo_path_simulator: EchoPathSimulator instance. - """ - # Generate pairs of noisy input and reference signal files. - test_data_generators.Generate( - input_signal_filepath=clean_capture_input_filepath, - test_data_cache_path=test_data_cache_path, - base_output_path=output_path) - - # Extract metadata linked to the clean input file (if any). - apm_input_metadata = None - try: - apm_input_metadata = data_access.Metadata.LoadFileMetadata( - clean_capture_input_filepath) - except IOError as e: - apm_input_metadata = {} - apm_input_metadata['test_data_gen_name'] = test_data_generators.NAME - apm_input_metadata['test_data_gen_config'] = None - - # For each test data pair, simulate a call and evaluate. - for config_name in test_data_generators.config_names: - logging.info(' - test data generator config: <%s>', config_name) - apm_input_metadata['test_data_gen_config'] = config_name - - # Paths to the test data generator output. - # Note that the reference signal does not depend on the render input - # which is optional. - noisy_capture_input_filepath = ( - test_data_generators.noisy_signal_filepaths[config_name]) - reference_signal_filepath = ( - test_data_generators.reference_signal_filepaths[config_name]) - - # Output path for the evaluation (e.g., APM output file). - evaluation_output_path = test_data_generators.apm_output_paths[ - config_name] - - # Paths to the APM input signals. - echo_path_filepath = echo_path_simulator.Simulate( - echo_test_data_cache_path) - apm_input_filepath = input_mixer.ApmInputMixer.Mix( - echo_test_data_cache_path, noisy_capture_input_filepath, - echo_path_filepath) - - # Extract annotations for the APM input mix. - apm_input_basepath, apm_input_filename = os.path.split( - apm_input_filepath) - self._ExtractCaptureAnnotations( - apm_input_filepath, apm_input_basepath, - os.path.splitext(apm_input_filename)[0] + '-') - - # Simulate a call using APM. - self._audioproc_wrapper.Run( - config_filepath=config_filepath, - capture_input_filepath=apm_input_filepath, - render_input_filepath=render_input_filepath, - output_path=evaluation_output_path) - - try: - # Evaluate. - self._evaluator.Run( - evaluation_score_workers=self._evaluation_score_workers, - apm_input_metadata=apm_input_metadata, - apm_output_filepath=self._audioproc_wrapper. - output_filepath, - reference_input_filepath=reference_signal_filepath, - render_input_filepath=render_input_filepath, - output_path=evaluation_output_path, - ) - - # Save simulation metadata. - data_access.Metadata.SaveAudioTestDataPaths( - output_path=evaluation_output_path, - clean_capture_input_filepath=clean_capture_input_filepath, - echo_free_capture_filepath=noisy_capture_input_filepath, - echo_filepath=echo_path_filepath, - render_filepath=render_input_filepath, - capture_filepath=apm_input_filepath, - apm_output_filepath=self._audioproc_wrapper. - output_filepath, - apm_reference_filepath=reference_signal_filepath, - apm_config_filepath=config_filepath, - ) - except exceptions.EvaluationScoreException as e: - logging.warning('the evaluation failed: %s', e.message) - continue - - def _SetTestInputSignalFilePaths(self, capture_input_filepaths, - render_input_filepaths): - """Sets input and render input file paths collections. - - Pairs the input and render input files by storing the file paths into two - collections. The key is the file name of the input file. - - Args: - capture_input_filepaths: list of file paths. - render_input_filepaths: list of file paths. - """ - self._capture_input_filepaths = {} - self._render_input_filepaths = {} - assert len(capture_input_filepaths) == len(render_input_filepaths) - for capture_input_filepath, render_input_filepath in zip( - capture_input_filepaths, render_input_filepaths): - name = self._ExtractFileName(capture_input_filepath) - self._capture_input_filepaths[name] = os.path.abspath( - capture_input_filepath) - self._render_input_filepaths[name] = os.path.abspath( - render_input_filepath) - - @classmethod - def _CreatePathsCollection(cls, filepaths): - """Creates a collection of file paths. - - Given a list of file paths, makes a collection with one item for each file - path. The value is absolute path, the key is the file name without - extenstion. - - Args: - filepaths: list of file paths. - - Returns: - A dict. - """ - filepaths_collection = {} - for filepath in filepaths: - name = cls._ExtractFileName(filepath) - filepaths_collection[name] = os.path.abspath(filepath) - return filepaths_collection - - @classmethod - def _ExtractFileName(cls, filepath): - return os.path.splitext(os.path.split(filepath)[-1])[0] diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py deleted file mode 100644 index 78ca17f589..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py +++ /dev/null @@ -1,203 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Unit tests for the simulation module. -""" - -import logging -import os -import shutil -import tempfile -import unittest - -import mock -import pydub - -from . import audioproc_wrapper -from . import eval_scores_factory -from . import evaluation -from . import external_vad -from . import signal_processing -from . import simulation -from . import test_data_generation_factory - - -class TestApmModuleSimulator(unittest.TestCase): - """Unit tests for the ApmModuleSimulator class. - """ - - def setUp(self): - """Create temporary folders and fake audio track.""" - self._output_path = tempfile.mkdtemp() - self._tmp_path = tempfile.mkdtemp() - - silence = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) - fake_signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - silence) - self._fake_audio_track_path = os.path.join(self._output_path, - 'fake.wav') - signal_processing.SignalProcessingUtils.SaveWav( - self._fake_audio_track_path, fake_signal) - - def tearDown(self): - """Recursively delete temporary folders.""" - shutil.rmtree(self._output_path) - shutil.rmtree(self._tmp_path) - - def testSimulation(self): - # Instance dependencies to mock and inject. - ap_wrapper = audioproc_wrapper.AudioProcWrapper( - audioproc_wrapper.AudioProcWrapper.DEFAULT_APM_SIMULATOR_BIN_PATH) - evaluator = evaluation.ApmModuleEvaluator() - ap_wrapper.Run = mock.MagicMock(name='Run') - evaluator.Run = mock.MagicMock(name='Run') - - # Instance non-mocked dependencies. - test_data_generator_factory = ( - test_data_generation_factory.TestDataGeneratorFactory( - aechen_ir_database_path='', - noise_tracks_path='', - copy_with_identity=False)) - evaluation_score_factory = eval_scores_factory.EvaluationScoreWorkerFactory( - polqa_tool_bin_path=os.path.join(os.path.dirname(__file__), - 'fake_polqa'), - echo_metric_tool_bin_path=None) - - # Instance simulator. - simulator = simulation.ApmModuleSimulator( - test_data_generator_factory=test_data_generator_factory, - evaluation_score_factory=evaluation_score_factory, - ap_wrapper=ap_wrapper, - evaluator=evaluator, - external_vads={ - 'fake': - external_vad.ExternalVad( - os.path.join(os.path.dirname(__file__), - 'fake_external_vad.py'), 'fake') - }) - - # What to simulate. - config_files = ['apm_configs/default.json'] - input_files = [self._fake_audio_track_path] - test_data_generators = ['identity', 'white_noise'] - eval_scores = ['audio_level_mean', 'polqa'] - - # Run all simulations. - simulator.Run(config_filepaths=config_files, - capture_input_filepaths=input_files, - test_data_generator_names=test_data_generators, - eval_score_names=eval_scores, - output_dir=self._output_path) - - # Check. - # TODO(alessiob): Once the TestDataGenerator classes can be configured by - # the client code (e.g., number of SNR pairs for the white noise test data - # generator), the exact number of calls to ap_wrapper.Run and evaluator.Run - # is known; use that with assertEqual. - min_number_of_simulations = len(config_files) * len(input_files) * len( - test_data_generators) - self.assertGreaterEqual(len(ap_wrapper.Run.call_args_list), - min_number_of_simulations) - self.assertGreaterEqual(len(evaluator.Run.call_args_list), - min_number_of_simulations) - - def testInputSignalCreation(self): - # Instance simulator. - simulator = simulation.ApmModuleSimulator( - test_data_generator_factory=( - test_data_generation_factory.TestDataGeneratorFactory( - aechen_ir_database_path='', - noise_tracks_path='', - copy_with_identity=False)), - evaluation_score_factory=( - eval_scores_factory.EvaluationScoreWorkerFactory( - polqa_tool_bin_path=os.path.join(os.path.dirname(__file__), - 'fake_polqa'), - echo_metric_tool_bin_path=None)), - ap_wrapper=audioproc_wrapper.AudioProcWrapper( - audioproc_wrapper.AudioProcWrapper. - DEFAULT_APM_SIMULATOR_BIN_PATH), - evaluator=evaluation.ApmModuleEvaluator()) - - # Inexistent input files to be silently created. - input_files = [ - os.path.join(self._tmp_path, 'pure_tone-440_1000.wav'), - os.path.join(self._tmp_path, 'pure_tone-1000_500.wav'), - ] - self.assertFalse( - any([os.path.exists(input_file) for input_file in (input_files)])) - - # The input files are created during the simulation. - simulator.Run(config_filepaths=['apm_configs/default.json'], - capture_input_filepaths=input_files, - test_data_generator_names=['identity'], - eval_score_names=['audio_level_peak'], - output_dir=self._output_path) - self.assertTrue( - all([os.path.exists(input_file) for input_file in (input_files)])) - - def testPureToneGenerationWithTotalHarmonicDistorsion(self): - logging.warning = mock.MagicMock(name='warning') - - # Instance simulator. - simulator = simulation.ApmModuleSimulator( - test_data_generator_factory=( - test_data_generation_factory.TestDataGeneratorFactory( - aechen_ir_database_path='', - noise_tracks_path='', - copy_with_identity=False)), - evaluation_score_factory=( - eval_scores_factory.EvaluationScoreWorkerFactory( - polqa_tool_bin_path=os.path.join(os.path.dirname(__file__), - 'fake_polqa'), - echo_metric_tool_bin_path=None)), - ap_wrapper=audioproc_wrapper.AudioProcWrapper( - audioproc_wrapper.AudioProcWrapper. - DEFAULT_APM_SIMULATOR_BIN_PATH), - evaluator=evaluation.ApmModuleEvaluator()) - - # What to simulate. - config_files = ['apm_configs/default.json'] - input_files = [os.path.join(self._tmp_path, 'pure_tone-440_1000.wav')] - eval_scores = ['thd'] - - # Should work. - simulator.Run(config_filepaths=config_files, - capture_input_filepaths=input_files, - test_data_generator_names=['identity'], - eval_score_names=eval_scores, - output_dir=self._output_path) - self.assertFalse(logging.warning.called) - - # Warning expected. - simulator.Run( - config_filepaths=config_files, - capture_input_filepaths=input_files, - test_data_generator_names=['white_noise'], # Not allowed with THD. - eval_score_names=eval_scores, - output_dir=self._output_path) - logging.warning.assert_called_with('the evaluation failed: %s', ( - 'The THD score cannot be used with any test data generator other than ' - '"identity"')) - - # # Init. - # generator = test_data_generation.IdentityTestDataGenerator('tmp') - # input_signal_filepath = os.path.join( - # self._test_data_cache_path, 'pure_tone-440_1000.wav') - - # # Check that the input signal is generated. - # self.assertFalse(os.path.exists(input_signal_filepath)) - # generator.Generate( - # input_signal_filepath=input_signal_filepath, - # test_data_cache_path=self._test_data_cache_path, - # base_output_path=self._base_output_path) - # self.assertTrue(os.path.exists(input_signal_filepath)) - - # # Check input signal properties. - # input_signal = signal_processing.SignalProcessingUtils.LoadWav( - # input_signal_filepath) - # self.assertEqual(1000, len(input_signal)) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/sound_level.cc b/modules/audio_processing/test/py_quality_assessment/quality_assessment/sound_level.cc deleted file mode 100644 index 1f24d9d370..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/sound_level.cc +++ /dev/null @@ -1,127 +0,0 @@ -// Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. -// -// Use of this source code is governed by a BSD-style license -// that can be found in the LICENSE file in the root of the source -// tree. An additional intellectual property rights grant can be found -// in the file PATENTS. All contributing project authors may -// be found in the AUTHORS file in the root of the source tree. - -#include -#include -#include -#include - -#include "absl/flags/flag.h" -#include "absl/flags/parse.h" -#include "common_audio/include/audio_util.h" -#include "common_audio/wav_file.h" -#include "rtc_base/logging.h" - -ABSL_FLAG(std::string, i, "", "Input wav file"); -ABSL_FLAG(std::string, oc, "", "Config output file"); -ABSL_FLAG(std::string, ol, "", "Levels output file"); -ABSL_FLAG(float, a, 5.f, "Attack (ms)"); -ABSL_FLAG(float, d, 20.f, "Decay (ms)"); -ABSL_FLAG(int, f, 10, "Frame length (ms)"); - -namespace webrtc { -namespace test { -namespace { - -constexpr int kMaxSampleRate = 48000; -constexpr uint8_t kMaxFrameLenMs = 30; -constexpr size_t kMaxFrameLen = kMaxFrameLenMs * kMaxSampleRate / 1000; - -const double kOneDbReduction = DbToRatio(-1.0); - -int main(int argc, char* argv[]) { - absl::ParseCommandLine(argc, argv); - // Check parameters. - if (absl::GetFlag(FLAGS_f) < 1 || absl::GetFlag(FLAGS_f) > kMaxFrameLenMs) { - RTC_LOG(LS_ERROR) << "Invalid frame length (min: 1, max: " << kMaxFrameLenMs - << ")"; - return 1; - } - if (absl::GetFlag(FLAGS_a) < 0 || absl::GetFlag(FLAGS_d) < 0) { - RTC_LOG(LS_ERROR) << "Attack and decay must be non-negative"; - return 1; - } - - // Open wav input file and check properties. - const std::string input_file = absl::GetFlag(FLAGS_i); - const std::string config_output_file = absl::GetFlag(FLAGS_oc); - const std::string levels_output_file = absl::GetFlag(FLAGS_ol); - WavReader wav_reader(input_file); - if (wav_reader.num_channels() != 1) { - RTC_LOG(LS_ERROR) << "Only mono wav files supported"; - return 1; - } - if (wav_reader.sample_rate() > kMaxSampleRate) { - RTC_LOG(LS_ERROR) << "Beyond maximum sample rate (" << kMaxSampleRate - << ")"; - return 1; - } - - // Map from milliseconds to samples. - const size_t audio_frame_length = rtc::CheckedDivExact( - absl::GetFlag(FLAGS_f) * wav_reader.sample_rate(), 1000); - auto time_const = [](double c) { - return std::pow(kOneDbReduction, absl::GetFlag(FLAGS_f) / c); - }; - const float attack = - absl::GetFlag(FLAGS_a) == 0.0 ? 0.0 : time_const(absl::GetFlag(FLAGS_a)); - const float decay = - absl::GetFlag(FLAGS_d) == 0.0 ? 0.0 : time_const(absl::GetFlag(FLAGS_d)); - - // Write config to file. - std::ofstream out_config(config_output_file); - out_config << "{" - "'frame_len_ms': " - << absl::GetFlag(FLAGS_f) - << ", " - "'attack_ms': " - << absl::GetFlag(FLAGS_a) - << ", " - "'decay_ms': " - << absl::GetFlag(FLAGS_d) << "}\n"; - out_config.close(); - - // Measure level frame-by-frame. - std::ofstream out_levels(levels_output_file, std::ofstream::binary); - std::array samples; - float level_prev = 0.f; - while (true) { - // Process frame. - const auto read_samples = - wav_reader.ReadSamples(audio_frame_length, samples.data()); - if (read_samples < audio_frame_length) - break; // EOF. - - // Frame peak level. - std::transform(samples.begin(), samples.begin() + audio_frame_length, - samples.begin(), [](int16_t s) { return std::abs(s); }); - const int16_t peak_level = *std::max_element( - samples.cbegin(), samples.cbegin() + audio_frame_length); - const float level_curr = static_cast(peak_level) / 32768.f; - - // Temporal smoothing. - auto smooth = [&level_prev, &level_curr](float c) { - return (1.0 - c) * level_curr + c * level_prev; - }; - level_prev = smooth(level_curr > level_prev ? attack : decay); - - // Write output. - out_levels.write(reinterpret_cast(&level_prev), sizeof(float)); - } - out_levels.close(); - - return 0; -} - -} // namespace -} // namespace test -} // namespace webrtc - -int main(int argc, char* argv[]) { - return webrtc::test::main(argc, argv); -} diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation.py deleted file mode 100644 index 7e86faccec..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation.py +++ /dev/null @@ -1,526 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Test data generators producing signals pairs intended to be used to -test the APM module. Each pair consists of a noisy input and a reference signal. -The former is used as APM input and it is generated by adding noise to a -clean audio track. The reference is the expected APM output. - -Throughout this file, the following naming convention is used: - - input signal: the clean signal (e.g., speech), - - noise signal: the noise to be summed up to the input signal (e.g., white - noise, Gaussian noise), - - noisy signal: input + noise. -The noise signal may or may not be a function of the clean signal. For -instance, white noise is independently generated, whereas reverberation is -obtained by convolving the input signal with an impulse response. -""" - -import logging -import os -import shutil -import sys - -try: - import scipy.io -except ImportError: - logging.critical('Cannot import the third-party Python package scipy') - sys.exit(1) - -from . import data_access -from . import exceptions -from . import signal_processing - - -class TestDataGenerator(object): - """Abstract class responsible for the generation of noisy signals. - - Given a clean signal, it generates two streams named noisy signal and - reference. The former is the clean signal deteriorated by the noise source, - the latter goes through the same deterioration process, but more "gently". - Noisy signal and reference are produced so that the reference is the signal - expected at the output of the APM module when the latter is fed with the noisy - signal. - - An test data generator generates one or more pairs. - """ - - NAME = None - REGISTERED_CLASSES = {} - - def __init__(self, output_directory_prefix): - self._output_directory_prefix = output_directory_prefix - # Init dictionaries with one entry for each test data generator - # configuration (e.g., different SNRs). - # Noisy audio track files (stored separately in a cache folder). - self._noisy_signal_filepaths = None - # Path to be used for the APM simulation output files. - self._apm_output_paths = None - # Reference audio track files (stored separately in a cache folder). - self._reference_signal_filepaths = None - self.Clear() - - @classmethod - def RegisterClass(cls, class_to_register): - """Registers a TestDataGenerator implementation. - - Decorator to automatically register the classes that extend - TestDataGenerator. - Example usage: - - @TestDataGenerator.RegisterClass - class IdentityGenerator(TestDataGenerator): - pass - """ - cls.REGISTERED_CLASSES[class_to_register.NAME] = class_to_register - return class_to_register - - @property - def config_names(self): - return self._noisy_signal_filepaths.keys() - - @property - def noisy_signal_filepaths(self): - return self._noisy_signal_filepaths - - @property - def apm_output_paths(self): - return self._apm_output_paths - - @property - def reference_signal_filepaths(self): - return self._reference_signal_filepaths - - def Generate(self, input_signal_filepath, test_data_cache_path, - base_output_path): - """Generates a set of noisy input and reference audiotrack file pairs. - - This method initializes an empty set of pairs and calls the _Generate() - method implemented in a concrete class. - - Args: - input_signal_filepath: path to the clean input audio track file. - test_data_cache_path: path to the cache of the generated audio track - files. - base_output_path: base path where output is written. - """ - self.Clear() - self._Generate(input_signal_filepath, test_data_cache_path, - base_output_path) - - def Clear(self): - """Clears the generated output path dictionaries. - """ - self._noisy_signal_filepaths = {} - self._apm_output_paths = {} - self._reference_signal_filepaths = {} - - def _Generate(self, input_signal_filepath, test_data_cache_path, - base_output_path): - """Abstract method to be implemented in each concrete class. - """ - raise NotImplementedError() - - def _AddNoiseSnrPairs(self, base_output_path, noisy_mix_filepaths, - snr_value_pairs): - """Adds noisy-reference signal pairs. - - Args: - base_output_path: noisy tracks base output path. - noisy_mix_filepaths: nested dictionary of noisy signal paths organized - by noisy track name and SNR level. - snr_value_pairs: list of SNR pairs. - """ - for noise_track_name in noisy_mix_filepaths: - for snr_noisy, snr_refence in snr_value_pairs: - config_name = '{0}_{1:d}_{2:d}_SNR'.format( - noise_track_name, snr_noisy, snr_refence) - output_path = self._MakeDir(base_output_path, config_name) - self._AddNoiseReferenceFilesPair( - config_name=config_name, - noisy_signal_filepath=noisy_mix_filepaths[noise_track_name] - [snr_noisy], - reference_signal_filepath=noisy_mix_filepaths[ - noise_track_name][snr_refence], - output_path=output_path) - - def _AddNoiseReferenceFilesPair(self, config_name, noisy_signal_filepath, - reference_signal_filepath, output_path): - """Adds one noisy-reference signal pair. - - Args: - config_name: name of the APM configuration. - noisy_signal_filepath: path to noisy audio track file. - reference_signal_filepath: path to reference audio track file. - output_path: APM output path. - """ - assert config_name not in self._noisy_signal_filepaths - self._noisy_signal_filepaths[config_name] = os.path.abspath( - noisy_signal_filepath) - self._apm_output_paths[config_name] = os.path.abspath(output_path) - self._reference_signal_filepaths[config_name] = os.path.abspath( - reference_signal_filepath) - - def _MakeDir(self, base_output_path, test_data_generator_config_name): - output_path = os.path.join( - base_output_path, - self._output_directory_prefix + test_data_generator_config_name) - data_access.MakeDirectory(output_path) - return output_path - - -@TestDataGenerator.RegisterClass -class IdentityTestDataGenerator(TestDataGenerator): - """Generator that adds no noise. - - Both the noisy and the reference signals are the input signal. - """ - - NAME = 'identity' - - def __init__(self, output_directory_prefix, copy_with_identity): - TestDataGenerator.__init__(self, output_directory_prefix) - self._copy_with_identity = copy_with_identity - - @property - def copy_with_identity(self): - return self._copy_with_identity - - def _Generate(self, input_signal_filepath, test_data_cache_path, - base_output_path): - config_name = 'default' - output_path = self._MakeDir(base_output_path, config_name) - - if self._copy_with_identity: - input_signal_filepath_new = os.path.join( - test_data_cache_path, - os.path.split(input_signal_filepath)[1]) - logging.info('copying ' + input_signal_filepath + ' to ' + - (input_signal_filepath_new)) - shutil.copy(input_signal_filepath, input_signal_filepath_new) - input_signal_filepath = input_signal_filepath_new - - self._AddNoiseReferenceFilesPair( - config_name=config_name, - noisy_signal_filepath=input_signal_filepath, - reference_signal_filepath=input_signal_filepath, - output_path=output_path) - - -@TestDataGenerator.RegisterClass -class WhiteNoiseTestDataGenerator(TestDataGenerator): - """Generator that adds white noise. - """ - - NAME = 'white_noise' - - # Each pair indicates the clean vs. noisy and reference vs. noisy SNRs. - # The reference (second value of each pair) always has a lower amount of noise - # - i.e., the SNR is 10 dB higher. - _SNR_VALUE_PAIRS = [ - [20, 30], # Smallest noise. - [10, 20], - [5, 15], - [0, 10], # Largest noise. - ] - - _NOISY_SIGNAL_FILENAME_TEMPLATE = 'noise_{0:d}_SNR.wav' - - def __init__(self, output_directory_prefix): - TestDataGenerator.__init__(self, output_directory_prefix) - - def _Generate(self, input_signal_filepath, test_data_cache_path, - base_output_path): - # Load the input signal. - input_signal = signal_processing.SignalProcessingUtils.LoadWav( - input_signal_filepath) - - # Create the noise track. - noise_signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( - input_signal) - - # Create the noisy mixes (once for each unique SNR value). - noisy_mix_filepaths = {} - snr_values = set( - [snr for pair in self._SNR_VALUE_PAIRS for snr in pair]) - for snr in snr_values: - noisy_signal_filepath = os.path.join( - test_data_cache_path, - self._NOISY_SIGNAL_FILENAME_TEMPLATE.format(snr)) - - # Create and save if not done. - if not os.path.exists(noisy_signal_filepath): - # Create noisy signal. - noisy_signal = signal_processing.SignalProcessingUtils.MixSignals( - input_signal, noise_signal, snr) - - # Save. - signal_processing.SignalProcessingUtils.SaveWav( - noisy_signal_filepath, noisy_signal) - - # Add file to the collection of mixes. - noisy_mix_filepaths[snr] = noisy_signal_filepath - - # Add all the noisy-reference signal pairs. - for snr_noisy, snr_refence in self._SNR_VALUE_PAIRS: - config_name = '{0:d}_{1:d}_SNR'.format(snr_noisy, snr_refence) - output_path = self._MakeDir(base_output_path, config_name) - self._AddNoiseReferenceFilesPair( - config_name=config_name, - noisy_signal_filepath=noisy_mix_filepaths[snr_noisy], - reference_signal_filepath=noisy_mix_filepaths[snr_refence], - output_path=output_path) - - -# TODO(alessiob): remove comment when class implemented. -# @TestDataGenerator.RegisterClass -class NarrowBandNoiseTestDataGenerator(TestDataGenerator): - """Generator that adds narrow-band noise. - """ - - NAME = 'narrow_band_noise' - - def __init__(self, output_directory_prefix): - TestDataGenerator.__init__(self, output_directory_prefix) - - def _Generate(self, input_signal_filepath, test_data_cache_path, - base_output_path): - # TODO(alessiob): implement. - pass - - -@TestDataGenerator.RegisterClass -class AdditiveNoiseTestDataGenerator(TestDataGenerator): - """Generator that adds noise loops. - - This generator uses all the wav files in a given path (default: noise_tracks/) - and mixes them to the clean speech with different target SNRs (hard-coded). - """ - - NAME = 'additive_noise' - _NOISY_SIGNAL_FILENAME_TEMPLATE = '{0}_{1:d}_SNR.wav' - - DEFAULT_NOISE_TRACKS_PATH = os.path.join(os.path.dirname(__file__), - os.pardir, 'noise_tracks') - - # TODO(alessiob): Make the list of SNR pairs customizable. - # Each pair indicates the clean vs. noisy and reference vs. noisy SNRs. - # The reference (second value of each pair) always has a lower amount of noise - # - i.e., the SNR is 10 dB higher. - _SNR_VALUE_PAIRS = [ - [20, 30], # Smallest noise. - [10, 20], - [5, 15], - [0, 10], # Largest noise. - ] - - def __init__(self, output_directory_prefix, noise_tracks_path): - TestDataGenerator.__init__(self, output_directory_prefix) - self._noise_tracks_path = noise_tracks_path - self._noise_tracks_file_names = [ - n for n in os.listdir(self._noise_tracks_path) - if n.lower().endswith('.wav') - ] - if len(self._noise_tracks_file_names) == 0: - raise exceptions.InitializationException( - 'No wav files found in the noise tracks path %s' % - (self._noise_tracks_path)) - - def _Generate(self, input_signal_filepath, test_data_cache_path, - base_output_path): - """Generates test data pairs using environmental noise. - - For each noise track and pair of SNR values, the following two audio tracks - are created: the noisy signal and the reference signal. The former is - obtained by mixing the (clean) input signal to the corresponding noise - track enforcing the target SNR. - """ - # Init. - snr_values = set( - [snr for pair in self._SNR_VALUE_PAIRS for snr in pair]) - - # Load the input signal. - input_signal = signal_processing.SignalProcessingUtils.LoadWav( - input_signal_filepath) - - noisy_mix_filepaths = {} - for noise_track_filename in self._noise_tracks_file_names: - # Load the noise track. - noise_track_name, _ = os.path.splitext(noise_track_filename) - noise_track_filepath = os.path.join(self._noise_tracks_path, - noise_track_filename) - if not os.path.exists(noise_track_filepath): - logging.error('cannot find the <%s> noise track', - noise_track_filename) - raise exceptions.FileNotFoundError() - - noise_signal = signal_processing.SignalProcessingUtils.LoadWav( - noise_track_filepath) - - # Create the noisy mixes (once for each unique SNR value). - noisy_mix_filepaths[noise_track_name] = {} - for snr in snr_values: - noisy_signal_filepath = os.path.join( - test_data_cache_path, - self._NOISY_SIGNAL_FILENAME_TEMPLATE.format( - noise_track_name, snr)) - - # Create and save if not done. - if not os.path.exists(noisy_signal_filepath): - # Create noisy signal. - noisy_signal = signal_processing.SignalProcessingUtils.MixSignals( - input_signal, - noise_signal, - snr, - pad_noise=signal_processing.SignalProcessingUtils. - MixPadding.LOOP) - - # Save. - signal_processing.SignalProcessingUtils.SaveWav( - noisy_signal_filepath, noisy_signal) - - # Add file to the collection of mixes. - noisy_mix_filepaths[noise_track_name][ - snr] = noisy_signal_filepath - - # Add all the noise-SNR pairs. - self._AddNoiseSnrPairs(base_output_path, noisy_mix_filepaths, - self._SNR_VALUE_PAIRS) - - -@TestDataGenerator.RegisterClass -class ReverberationTestDataGenerator(TestDataGenerator): - """Generator that adds reverberation noise. - - TODO(alessiob): Make this class more generic since the impulse response can be - anything (not just reverberation); call it e.g., - ConvolutionalNoiseTestDataGenerator. - """ - - NAME = 'reverberation' - - _IMPULSE_RESPONSES = { - 'lecture': 'air_binaural_lecture_0_0_1.mat', # Long echo. - 'booth': 'air_binaural_booth_0_0_1.mat', # Short echo. - } - _MAX_IMPULSE_RESPONSE_LENGTH = None - - # Each pair indicates the clean vs. noisy and reference vs. noisy SNRs. - # The reference (second value of each pair) always has a lower amount of noise - # - i.e., the SNR is 5 dB higher. - _SNR_VALUE_PAIRS = [ - [3, 8], # Smallest noise. - [-3, 2], # Largest noise. - ] - - _NOISE_TRACK_FILENAME_TEMPLATE = '{0}.wav' - _NOISY_SIGNAL_FILENAME_TEMPLATE = '{0}_{1:d}_SNR.wav' - - def __init__(self, output_directory_prefix, aechen_ir_database_path): - TestDataGenerator.__init__(self, output_directory_prefix) - self._aechen_ir_database_path = aechen_ir_database_path - - def _Generate(self, input_signal_filepath, test_data_cache_path, - base_output_path): - """Generates test data pairs using reverberation noise. - - For each impulse response, one noise track is created. For each impulse - response and pair of SNR values, the following 2 audio tracks are - created: the noisy signal and the reference signal. The former is - obtained by mixing the (clean) input signal to the corresponding noise - track enforcing the target SNR. - """ - # Init. - snr_values = set( - [snr for pair in self._SNR_VALUE_PAIRS for snr in pair]) - - # Load the input signal. - input_signal = signal_processing.SignalProcessingUtils.LoadWav( - input_signal_filepath) - - noisy_mix_filepaths = {} - for impulse_response_name in self._IMPULSE_RESPONSES: - noise_track_filename = self._NOISE_TRACK_FILENAME_TEMPLATE.format( - impulse_response_name) - noise_track_filepath = os.path.join(test_data_cache_path, - noise_track_filename) - noise_signal = None - try: - # Load noise track. - noise_signal = signal_processing.SignalProcessingUtils.LoadWav( - noise_track_filepath) - except exceptions.FileNotFoundError: - # Generate noise track by applying the impulse response. - impulse_response_filepath = os.path.join( - self._aechen_ir_database_path, - self._IMPULSE_RESPONSES[impulse_response_name]) - noise_signal = self._GenerateNoiseTrack( - noise_track_filepath, input_signal, - impulse_response_filepath) - assert noise_signal is not None - - # Create the noisy mixes (once for each unique SNR value). - noisy_mix_filepaths[impulse_response_name] = {} - for snr in snr_values: - noisy_signal_filepath = os.path.join( - test_data_cache_path, - self._NOISY_SIGNAL_FILENAME_TEMPLATE.format( - impulse_response_name, snr)) - - # Create and save if not done. - if not os.path.exists(noisy_signal_filepath): - # Create noisy signal. - noisy_signal = signal_processing.SignalProcessingUtils.MixSignals( - input_signal, noise_signal, snr) - - # Save. - signal_processing.SignalProcessingUtils.SaveWav( - noisy_signal_filepath, noisy_signal) - - # Add file to the collection of mixes. - noisy_mix_filepaths[impulse_response_name][ - snr] = noisy_signal_filepath - - # Add all the noise-SNR pairs. - self._AddNoiseSnrPairs(base_output_path, noisy_mix_filepaths, - self._SNR_VALUE_PAIRS) - - def _GenerateNoiseTrack(self, noise_track_filepath, input_signal, - impulse_response_filepath): - """Generates noise track. - - Generate a signal by convolving input_signal with the impulse response in - impulse_response_filepath; then save to noise_track_filepath. - - Args: - noise_track_filepath: output file path for the noise track. - input_signal: (clean) input signal samples. - impulse_response_filepath: impulse response file path. - - Returns: - AudioSegment instance. - """ - # Load impulse response. - data = scipy.io.loadmat(impulse_response_filepath) - impulse_response = data['h_air'].flatten() - if self._MAX_IMPULSE_RESPONSE_LENGTH is not None: - logging.info('truncating impulse response from %d to %d samples', - len(impulse_response), - self._MAX_IMPULSE_RESPONSE_LENGTH) - impulse_response = impulse_response[:self. - _MAX_IMPULSE_RESPONSE_LENGTH] - - # Apply impulse response. - processed_signal = ( - signal_processing.SignalProcessingUtils.ApplyImpulseResponse( - input_signal, impulse_response)) - - # Save. - signal_processing.SignalProcessingUtils.SaveWav( - noise_track_filepath, processed_signal) - - return processed_signal diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_factory.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_factory.py deleted file mode 100644 index 948888e775..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_factory.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""TestDataGenerator factory class. -""" - -import logging - -from . import exceptions -from . import test_data_generation - - -class TestDataGeneratorFactory(object): - """Factory class used to create test data generators. - - Usage: Create a factory passing parameters to the ctor with which the - generators will be produced. - """ - - def __init__(self, aechen_ir_database_path, noise_tracks_path, - copy_with_identity): - """Ctor. - - Args: - aechen_ir_database_path: Path to the Aechen Impulse Response database. - noise_tracks_path: Path to the noise tracks to add. - copy_with_identity: Flag indicating whether the identity generator has to - make copies of the clean speech input files. - """ - self._output_directory_prefix = None - self._aechen_ir_database_path = aechen_ir_database_path - self._noise_tracks_path = noise_tracks_path - self._copy_with_identity = copy_with_identity - - def SetOutputDirectoryPrefix(self, prefix): - self._output_directory_prefix = prefix - - def GetInstance(self, test_data_generators_class): - """Creates an TestDataGenerator instance given a class object. - - Args: - test_data_generators_class: TestDataGenerator class object (not an - instance). - - Returns: - TestDataGenerator instance. - """ - if self._output_directory_prefix is None: - raise exceptions.InitializationException( - 'The output directory prefix for test data generators is not set' - ) - logging.debug('factory producing %s', test_data_generators_class) - - if test_data_generators_class == ( - test_data_generation.IdentityTestDataGenerator): - return test_data_generation.IdentityTestDataGenerator( - self._output_directory_prefix, self._copy_with_identity) - elif test_data_generators_class == ( - test_data_generation.ReverberationTestDataGenerator): - return test_data_generation.ReverberationTestDataGenerator( - self._output_directory_prefix, self._aechen_ir_database_path) - elif test_data_generators_class == ( - test_data_generation.AdditiveNoiseTestDataGenerator): - return test_data_generation.AdditiveNoiseTestDataGenerator( - self._output_directory_prefix, self._noise_tracks_path) - else: - return test_data_generators_class(self._output_directory_prefix) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_unittest.py deleted file mode 100644 index f75098ae2c..0000000000 --- a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_unittest.py +++ /dev/null @@ -1,207 +0,0 @@ -# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Unit tests for the test_data_generation module. -""" - -import os -import shutil -import tempfile -import unittest - -import numpy as np -import scipy.io - -from . import test_data_generation -from . import test_data_generation_factory -from . import signal_processing - - -class TestTestDataGenerators(unittest.TestCase): - """Unit tests for the test_data_generation module. - """ - - def setUp(self): - """Create temporary folders.""" - self._base_output_path = tempfile.mkdtemp() - self._test_data_cache_path = tempfile.mkdtemp() - self._fake_air_db_path = tempfile.mkdtemp() - - # Fake AIR DB impulse responses. - # TODO(alessiob): ReverberationTestDataGenerator will change to allow custom - # impulse responses. When changed, the coupling below between - # impulse_response_mat_file_names and - # ReverberationTestDataGenerator._IMPULSE_RESPONSES can be removed. - impulse_response_mat_file_names = [ - 'air_binaural_lecture_0_0_1.mat', - 'air_binaural_booth_0_0_1.mat', - ] - for impulse_response_mat_file_name in impulse_response_mat_file_names: - data = {'h_air': np.random.rand(1, 1000).astype(' -#include -#include - -#include "absl/flags/flag.h" -#include "absl/flags/parse.h" -#include "common_audio/wav_file.h" -#include "rtc_base/logging.h" - -ABSL_FLAG(std::string, i, "", "Input wav file"); -ABSL_FLAG(std::string, o, "", "VAD output file"); - -namespace webrtc { -namespace test { -namespace { - -// The allowed values are 10, 20 or 30 ms. -constexpr uint8_t kAudioFrameLengthMilliseconds = 30; -constexpr int kMaxSampleRate = 48000; -constexpr size_t kMaxFrameLen = - kAudioFrameLengthMilliseconds * kMaxSampleRate / 1000; - -constexpr uint8_t kBitmaskBuffSize = 8; - -int main(int argc, char* argv[]) { - absl::ParseCommandLine(argc, argv); - const std::string input_file = absl::GetFlag(FLAGS_i); - const std::string output_file = absl::GetFlag(FLAGS_o); - // Open wav input file and check properties. - WavReader wav_reader(input_file); - if (wav_reader.num_channels() != 1) { - RTC_LOG(LS_ERROR) << "Only mono wav files supported"; - return 1; - } - if (wav_reader.sample_rate() > kMaxSampleRate) { - RTC_LOG(LS_ERROR) << "Beyond maximum sample rate (" << kMaxSampleRate - << ")"; - return 1; - } - const size_t audio_frame_length = rtc::CheckedDivExact( - kAudioFrameLengthMilliseconds * wav_reader.sample_rate(), 1000); - if (audio_frame_length > kMaxFrameLen) { - RTC_LOG(LS_ERROR) << "The frame size and/or the sample rate are too large."; - return 1; - } - - // Create output file and write header. - std::ofstream out_file(output_file, std::ofstream::binary); - const char audio_frame_length_ms = kAudioFrameLengthMilliseconds; - out_file.write(&audio_frame_length_ms, 1); // Header. - - // Run VAD and write decisions. - std::unique_ptr vad = CreateVad(Vad::Aggressiveness::kVadNormal); - std::array samples; - char buff = 0; // Buffer to write one bit per frame. - uint8_t next = 0; // Points to the next bit to write in `buff`. - while (true) { - // Process frame. - const auto read_samples = - wav_reader.ReadSamples(audio_frame_length, samples.data()); - if (read_samples < audio_frame_length) - break; - const auto is_speech = vad->VoiceActivity( - samples.data(), audio_frame_length, wav_reader.sample_rate()); - - // Write output. - buff = is_speech ? buff | (1 << next) : buff & ~(1 << next); - if (++next == kBitmaskBuffSize) { - out_file.write(&buff, 1); // Flush. - buff = 0; // Reset. - next = 0; - } - } - - // Finalize. - char extra_bits = 0; - if (next > 0) { - extra_bits = kBitmaskBuffSize - next; - out_file.write(&buff, 1); // Flush. - } - out_file.write(&extra_bits, 1); - out_file.close(); - - return 0; -} - -} // namespace -} // namespace test -} // namespace webrtc - -int main(int argc, char* argv[]) { - return webrtc::test::main(argc, argv); -} diff --git a/modules/audio_processing/test/runtime_setting_util.h b/modules/audio_processing/test/runtime_setting_util.h index d8cbe82076..85ed5ecdab 100644 --- a/modules/audio_processing/test/runtime_setting_util.h +++ b/modules/audio_processing/test/runtime_setting_util.h @@ -11,7 +11,7 @@ #ifndef MODULES_AUDIO_PROCESSING_TEST_RUNTIME_SETTING_UTIL_H_ #define MODULES_AUDIO_PROCESSING_TEST_RUNTIME_SETTING_UTIL_H_ -#include "modules/audio_processing/include/audio_processing.h" +#include "api/audio/audio_processing.h" #include "modules/audio_processing/test/protobuf_utils.h" namespace webrtc { diff --git a/modules/audio_processing/test/simulator_buffers.cc b/modules/audio_processing/test/simulator_buffers.cc index 458f6ced76..a47604ee0d 100644 --- a/modules/audio_processing/test/simulator_buffers.cc +++ b/modules/audio_processing/test/simulator_buffers.cc @@ -57,7 +57,7 @@ void SimulatorBuffers::CreateConfigAndBuffer( StreamConfig* config, std::vector* buffer_data, std::vector* buffer_data_samples) { - int samples_per_channel = rtc::CheckedDivExact(sample_rate_hz, 100); + int samples_per_channel = CheckedDivExact(sample_rate_hz, 100); *config = StreamConfig(sample_rate_hz, num_channels); buffer->reset( new AudioBuffer(config->sample_rate_hz(), config->num_channels(), diff --git a/modules/audio_processing/test/simulator_buffers.h b/modules/audio_processing/test/simulator_buffers.h index 36dcf301a2..4e1b086f7b 100644 --- a/modules/audio_processing/test/simulator_buffers.h +++ b/modules/audio_processing/test/simulator_buffers.h @@ -14,8 +14,8 @@ #include #include +#include "api/audio/audio_processing.h" #include "modules/audio_processing/audio_buffer.h" -#include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/random.h" namespace webrtc { diff --git a/modules/audio_processing/test/test_utils.cc b/modules/audio_processing/test/test_utils.cc index 9aeebe5155..fcbd5e495e 100644 --- a/modules/audio_processing/test/test_utils.cc +++ b/modules/audio_processing/test/test_utils.cc @@ -10,6 +10,7 @@ #include "modules/audio_processing/test/test_utils.h" +#include #include #include @@ -19,6 +20,52 @@ namespace webrtc { +void Int16FrameData::CopyFrom(const Int16FrameData& src) { + sample_rate_hz = src.sample_rate_hz; + view_ = InterleavedView(data.data(), src.samples_per_channel(), + src.num_channels()); + RTC_CHECK_LE(view_.size(), kMaxDataSizeSamples); + CopySamples(view_, src.view()); +} + +bool Int16FrameData::IsEqual(const Int16FrameData& frame) const { + return samples_per_channel() == frame.samples_per_channel() && + num_channels() == num_channels() && + memcmp(data.data(), frame.data.data(), + samples_per_channel() * num_channels() * sizeof(int16_t)) == 0; +} + +void Int16FrameData::Scale(float f) { + std::for_each(data.begin(), data.end(), + [f](int16_t& sample) { sample = FloatS16ToS16(sample * f); }); +} + +void Int16FrameData::SetProperties(size_t samples_per_channel, + size_t num_channels) { + sample_rate_hz = samples_per_channel * 100; + view_ = + InterleavedView(data.data(), samples_per_channel, num_channels); + RTC_CHECK_LE(view_.size(), kMaxDataSizeSamples); +} + +void Int16FrameData::set_num_channels(size_t num_channels) { + view_ = InterleavedView(data.data(), samples_per_channel(), + num_channels); + RTC_CHECK_LE(view_.size(), kMaxDataSizeSamples); +} + +void Int16FrameData::FillData(int16_t value) { + std::fill(&data[0], &data[size()], value); +} + +void Int16FrameData::FillStereoData(int16_t left, int16_t right) { + RTC_DCHECK_EQ(num_channels(), 2u); + for (size_t i = 0; i < samples_per_channel() * 2u; i += 2u) { + data[i] = left; + data[i + 1] = right; + } +} + ChannelBufferWavReader::ChannelBufferWavReader(std::unique_ptr file) : file_(std::move(file)) {} @@ -46,8 +93,12 @@ ChannelBufferWavWriter::~ChannelBufferWavWriter() = default; void ChannelBufferWavWriter::Write(const ChannelBuffer& buffer) { RTC_CHECK_EQ(file_->num_channels(), buffer.num_channels()); interleaved_.resize(buffer.size()); - Interleave(buffer.channels(), buffer.num_frames(), buffer.num_channels(), - &interleaved_[0]); + InterleavedView view(&interleaved_[0], buffer.num_frames(), + buffer.num_channels()); + const float* samples = buffer.channels()[0]; + DeinterleavedView source(samples, buffer.num_frames(), + buffer.num_channels()); + Interleave(source, view); FloatToFloatS16(&interleaved_[0], interleaved_.size(), &interleaved_[0]); file_->WriteSamples(&interleaved_[0], interleaved_.size()); } @@ -62,8 +113,10 @@ ChannelBufferVectorWriter::~ChannelBufferVectorWriter() = default; void ChannelBufferVectorWriter::Write(const ChannelBuffer& buffer) { // Account for sample rate changes throughout a simulation. interleaved_buffer_.resize(buffer.size()); + InterleavedView view(&interleaved_buffer_[0], buffer.num_frames(), + buffer.num_channels()); Interleave(buffer.channels(), buffer.num_frames(), buffer.num_channels(), - interleaved_buffer_.data()); + view); size_t old_size = output_->size(); output_->resize(old_size + interleaved_buffer_.size()); FloatToFloatS16(interleaved_buffer_.data(), interleaved_buffer_.size(), @@ -80,10 +133,4 @@ FILE* OpenFile(absl::string_view filename, absl::string_view mode) { return file; } -void SetFrameSampleRate(Int16FrameData* frame, int sample_rate_hz) { - frame->sample_rate_hz = sample_rate_hz; - frame->samples_per_channel = - AudioProcessing::kChunkSizeMs * sample_rate_hz / 1000; -} - } // namespace webrtc diff --git a/modules/audio_processing/test/test_utils.h b/modules/audio_processing/test/test_utils.h index bf82f9d66d..90cfb5ccc4 100644 --- a/modules/audio_processing/test/test_utils.h +++ b/modules/audio_processing/test/test_utils.h @@ -16,46 +16,55 @@ #include #include #include -#include // no-presubmit-check TODO(webrtc:8982) #include #include #include "absl/strings/string_view.h" +#include "api/audio/audio_frame.h" +#include "api/audio/audio_processing.h" +#include "api/audio/audio_view.h" #include "common_audio/channel_buffer.h" #include "common_audio/wav_file.h" -#include "modules/audio_processing/include/audio_processing.h" namespace webrtc { static const AudioProcessing::Error kNoErr = AudioProcessing::kNoError; -#define EXPECT_NOERR(expr) EXPECT_EQ(kNoErr, (expr)) +#define EXPECT_NOERR(expr) EXPECT_EQ(AudioProcessing::kNoError, (expr)) // Encapsulates samples and metadata for an integer frame. struct Int16FrameData { // Max data size that matches the data size of the AudioFrame class, providing // storage for 8 channels of 96 kHz data. - static const int kMaxDataSizeSamples = 7680; + static const int kMaxDataSizeSamples = AudioFrame::kMaxDataSizeSamples; - Int16FrameData() { - sample_rate_hz = 0; - num_channels = 0; - samples_per_channel = 0; - data.fill(0); - } + Int16FrameData() = default; - void CopyFrom(const Int16FrameData& src) { - samples_per_channel = src.samples_per_channel; - sample_rate_hz = src.sample_rate_hz; - num_channels = src.num_channels; + void CopyFrom(const Int16FrameData& src); + bool IsEqual(const Int16FrameData& frame) const; + void Scale(float f); - const size_t length = samples_per_channel * num_channels; - RTC_CHECK_LE(length, kMaxDataSizeSamples); - memcpy(data.data(), src.data.data(), sizeof(int16_t) * length); - } - std::array data; - int32_t sample_rate_hz; - size_t num_channels; - size_t samples_per_channel; + // Sets `samples_per_channel`, `num_channels` and, implicitly, the sample + // rate. The sample rate is set to 100x that of samples per channel. I.e. if + // samples_per_channel is 320, the sample rate will be set to 32000. + void SetProperties(size_t samples_per_channel, size_t num_channels); + + size_t size() const { return view_.size(); } + size_t samples_per_channel() const { return view_.samples_per_channel(); } + size_t num_channels() const { return view_.num_channels(); } + void set_num_channels(size_t num_channels); + + InterleavedView view() { return view_; } + InterleavedView view() const { return view_; } + + void FillData(int16_t value); + void FillStereoData(int16_t left, int16_t right); + + // public struct members. + std::array data = {}; + int32_t sample_rate_hz = 0; + + private: + InterleavedView view_; }; // Reads ChannelBuffers from a provided WavReader. @@ -115,16 +124,13 @@ class ChannelBufferVectorWriter final { // Exits on failure; do not use in unit tests. FILE* OpenFile(absl::string_view filename, absl::string_view mode); -void SetFrameSampleRate(Int16FrameData* frame, int sample_rate_hz); - template void SetContainerFormat(int sample_rate_hz, size_t num_channels, Int16FrameData* frame, std::unique_ptr >* cb) { - SetFrameSampleRate(frame, sample_rate_hz); - frame->num_channels = num_channels; - cb->reset(new ChannelBuffer(frame->samples_per_channel, num_channels)); + frame->SetProperties(sample_rate_hz / 100, num_channels); + cb->reset(new ChannelBuffer(frame->samples_per_channel(), num_channels)); } template @@ -149,22 +155,6 @@ float ComputeSNR(const T* ref, const T* test, size_t length, float* variance) { return snr; } -// Returns a vector parsed from whitespace delimited values in to_parse, -// or an empty vector if the string could not be parsed. -template -std::vector ParseList(absl::string_view to_parse) { - std::vector values; - - std::istringstream str( // no-presubmit-check TODO(webrtc:8982) - std::string{to_parse}); - std::copy( - std::istream_iterator(str), // no-presubmit-check TODO(webrtc:8982) - std::istream_iterator(), // no-presubmit-check TODO(webrtc:8982) - std::back_inserter(values)); - - return values; -} - } // namespace webrtc #endif // MODULES_AUDIO_PROCESSING_TEST_TEST_UTILS_H_ diff --git a/modules/audio_processing/test/wav_based_simulator.cc b/modules/audio_processing/test/wav_based_simulator.cc index ee87f9e1a8..24d68b63d8 100644 --- a/modules/audio_processing/test/wav_based_simulator.cc +++ b/modules/audio_processing/test/wav_based_simulator.cc @@ -13,9 +13,16 @@ #include #include +#include +#include +#include +#include "absl/base/nullability.h" #include "absl/strings/string_view.h" -#include "modules/audio_processing/logging/apm_data_dumper.h" +#include "api/audio/audio_processing.h" +#include "api/scoped_refptr.h" +#include "common_audio/wav_file.h" +#include "modules/audio_processing/test/audio_processing_simulator.h" #include "modules/audio_processing/test/test_utils.h" #include "rtc_base/checks.h" #include "rtc_base/system/file_wrapper.h" @@ -56,11 +63,8 @@ WavBasedSimulator::GetCustomEventChain(absl::string_view filename) { WavBasedSimulator::WavBasedSimulator( const SimulationSettings& settings, - rtc::scoped_refptr audio_processing, - std::unique_ptr ap_builder) - : AudioProcessingSimulator(settings, - std::move(audio_processing), - std::move(ap_builder)) { + absl_nonnull scoped_refptr audio_processing) + : AudioProcessingSimulator(settings, std::move(audio_processing)) { if (settings_.call_order_input_filename) { call_chain_ = WavBasedSimulator::GetCustomEventChain( *settings_.call_order_input_filename); diff --git a/modules/audio_processing/test/wav_based_simulator.h b/modules/audio_processing/test/wav_based_simulator.h index 44e9ee2b7f..b5399c68d6 100644 --- a/modules/audio_processing/test/wav_based_simulator.h +++ b/modules/audio_processing/test/wav_based_simulator.h @@ -13,7 +13,10 @@ #include +#include "absl/base/nullability.h" #include "absl/strings/string_view.h" +#include "api/audio/audio_processing.h" +#include "api/scoped_refptr.h" #include "modules/audio_processing/test/audio_processing_simulator.h" namespace webrtc { @@ -22,9 +25,9 @@ namespace test { // Used to perform an audio processing simulation from wav files. class WavBasedSimulator final : public AudioProcessingSimulator { public: - WavBasedSimulator(const SimulationSettings& settings, - rtc::scoped_refptr audio_processing, - std::unique_ptr ap_builder); + WavBasedSimulator( + const SimulationSettings& settings, + absl_nonnull scoped_refptr audio_processing); WavBasedSimulator() = delete; WavBasedSimulator(const WavBasedSimulator&) = delete; diff --git a/modules/audio_processing/three_band_filter_bank.cc b/modules/audio_processing/three_band_filter_bank.cc index bd1c50477a..d69db9bf9a 100644 --- a/modules/audio_processing/three_band_filter_bank.cc +++ b/modules/audio_processing/three_band_filter_bank.cc @@ -102,12 +102,11 @@ const float kDctModulation[ThreeBandFilterBank::kNumNonZeroFilters][kDctSize] = // Filters the input signal `in` with the filter `filter` using a shift by // `in_shift`, taking into account the previous state. -void FilterCore( - rtc::ArrayView filter, - rtc::ArrayView in, - const int in_shift, - rtc::ArrayView out, - rtc::ArrayView state) { +void FilterCore(ArrayView filter, + ArrayView in, + const int in_shift, + ArrayView out, + ArrayView state) { constexpr int kMaxInShift = (kStride - 1); RTC_DCHECK_GE(in_shift, 0); RTC_DCHECK_LE(in_shift, kMaxInShift); @@ -170,9 +169,8 @@ ThreeBandFilterBank::~ThreeBandFilterBank() = default; // of `kSparsity`. // 3. Modulating with cosines and accumulating to get the desired band. void ThreeBandFilterBank::Analysis( - rtc::ArrayView in, - rtc::ArrayView, ThreeBandFilterBank::kNumBands> - out) { + ArrayView in, + ArrayView, ThreeBandFilterBank::kNumBands> out) { // Initialize the output to zero. for (int band = 0; band < ThreeBandFilterBank::kNumBands; ++band) { RTC_DCHECK_EQ(out[band].size(), kSplitBandSize); @@ -199,11 +197,10 @@ void ThreeBandFilterBank::Analysis( ? index : (index < kZeroFilterIndex2 ? index - 1 : index - 2); - rtc::ArrayView filter( - kFilterCoeffs[filter_index]); - rtc::ArrayView dct_modulation( + ArrayView filter(kFilterCoeffs[filter_index]); + ArrayView dct_modulation( kDctModulation[filter_index]); - rtc::ArrayView state(state_analysis_[filter_index]); + ArrayView state(state_analysis_[filter_index]); // Filter. std::array out_subsampled; @@ -227,9 +224,8 @@ void ThreeBandFilterBank::Analysis( // `kSparsity` signals with different delays. // 3. Parallel to serial upsampling by a factor of `kNumBands`. void ThreeBandFilterBank::Synthesis( - rtc::ArrayView, ThreeBandFilterBank::kNumBands> - in, - rtc::ArrayView out) { + ArrayView, ThreeBandFilterBank::kNumBands> in, + ArrayView out) { std::fill(out.begin(), out.end(), 0); for (int upsampling_index = 0; upsampling_index < kSubSampling; ++upsampling_index) { @@ -244,11 +240,10 @@ void ThreeBandFilterBank::Synthesis( ? index : (index < kZeroFilterIndex2 ? index - 1 : index - 2); - rtc::ArrayView filter( - kFilterCoeffs[filter_index]); - rtc::ArrayView dct_modulation( + ArrayView filter(kFilterCoeffs[filter_index]); + ArrayView dct_modulation( kDctModulation[filter_index]); - rtc::ArrayView state(state_synthesis_[filter_index]); + ArrayView state(state_synthesis_[filter_index]); // Prepare filter input by modulating the banded input. std::array in_subsampled; diff --git a/modules/audio_processing/three_band_filter_bank.h b/modules/audio_processing/three_band_filter_bank.h index db66caba4a..5c7dc1c834 100644 --- a/modules/audio_processing/three_band_filter_bank.h +++ b/modules/audio_processing/three_band_filter_bank.h @@ -57,13 +57,13 @@ class ThreeBandFilterBank final { // Splits `in` of size kFullBandSize into 3 downsampled frequency bands in // `out`, each of size 160. - void Analysis(rtc::ArrayView in, - rtc::ArrayView, kNumBands> out); + void Analysis(ArrayView in, + ArrayView, kNumBands> out); // Merges the 3 downsampled frequency bands in `in`, each of size 160, into // `out`, which is of size kFullBandSize. - void Synthesis(rtc::ArrayView, kNumBands> in, - rtc::ArrayView out); + void Synthesis(ArrayView, kNumBands> in, + ArrayView out); private: std::array, kNumNonZeroFilters> diff --git a/modules/audio_processing/transient/BUILD.gn b/modules/audio_processing/transient/BUILD.gn deleted file mode 100644 index 41aeab0abe..0000000000 --- a/modules/audio_processing/transient/BUILD.gn +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -import("../../../webrtc.gni") - -rtc_source_set("transient_suppressor_api") { - sources = [ "transient_suppressor.h" ] -} - -rtc_library("transient_suppressor_impl") { - visibility = [ - ":click_annotate", - ":transient_suppression_test", - ":transient_suppression_unittests", - "..:optionally_built_submodule_creators", - ] - sources = [ - "common.h", - "daubechies_8_wavelet_coeffs.h", - "dyadic_decimator.h", - "moving_moments.cc", - "moving_moments.h", - "transient_detector.cc", - "transient_detector.h", - "transient_suppressor_impl.cc", - "transient_suppressor_impl.h", - "windows_private.h", - "wpd_node.cc", - "wpd_node.h", - "wpd_tree.cc", - "wpd_tree.h", - ] - deps = [ - ":transient_suppressor_api", - ":voice_probability_delay_unit", - "../../../common_audio:common_audio", - "../../../common_audio:common_audio_c", - "../../../common_audio:fir_filter", - "../../../common_audio:fir_filter_factory", - "../../../common_audio/third_party/ooura:fft_size_256", - "../../../rtc_base:checks", - "../../../rtc_base:gtest_prod", - "../../../rtc_base:logging", - ] -} - -rtc_library("voice_probability_delay_unit") { - sources = [ - "voice_probability_delay_unit.cc", - "voice_probability_delay_unit.h", - ] - deps = [ "../../../rtc_base:checks" ] -} - -if (rtc_include_tests) { - if (!build_with_chromium) { - rtc_executable("click_annotate") { - testonly = true - sources = [ - "click_annotate.cc", - "file_utils.cc", - "file_utils.h", - ] - deps = [ - ":transient_suppressor_impl", - "..:audio_processing", - "../../../rtc_base/system:file_wrapper", - "../../../system_wrappers", - ] - } - - rtc_executable("transient_suppression_test") { - testonly = true - sources = [ - "file_utils.cc", - "file_utils.h", - "transient_suppression_test.cc", - "voice_probability_delay_unit_unittest.cc", - ] - deps = [ - ":transient_suppressor_api", - ":transient_suppressor_impl", - ":voice_probability_delay_unit", - "..:audio_processing", - "../../../common_audio", - "../../../rtc_base/system:file_wrapper", - "../../../system_wrappers", - "../../../test:fileutils", - "../../../test:test_support", - "../agc:level_estimation", - "//testing/gtest", - "//third_party/abseil-cpp/absl/flags:flag", - "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/types:optional", - ] - } - } - - rtc_library("transient_suppression_unittests") { - testonly = true - sources = [ - "dyadic_decimator_unittest.cc", - "file_utils.cc", - "file_utils.h", - "file_utils_unittest.cc", - "moving_moments_unittest.cc", - "transient_detector_unittest.cc", - "transient_suppressor_unittest.cc", - "voice_probability_delay_unit_unittest.cc", - "wpd_node_unittest.cc", - "wpd_tree_unittest.cc", - ] - deps = [ - ":transient_suppressor_api", - ":transient_suppressor_impl", - ":voice_probability_delay_unit", - "../../../rtc_base:stringutils", - "../../../rtc_base/system:file_wrapper", - "../../../test:fileutils", - "../../../test:test_support", - "//testing/gtest", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] - } -} diff --git a/modules/audio_processing/transient/click_annotate.cc b/modules/audio_processing/transient/click_annotate.cc deleted file mode 100644 index f3f040f9aa..0000000000 --- a/modules/audio_processing/transient/click_annotate.cc +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include -#include -#include -#include - -#include "modules/audio_processing/transient/file_utils.h" -#include "modules/audio_processing/transient/transient_detector.h" -#include "rtc_base/system/file_wrapper.h" - -using webrtc::FileWrapper; -using webrtc::TransientDetector; - -// Application to generate a RTP timing file. -// Opens the PCM file and divides the signal in frames. -// Creates a send times array, one for each step. -// Each block that contains a transient, has an infinite send time. -// The resultant array is written to a DAT file -// Returns -1 on error or `lost_packets` otherwise. -int main(int argc, char* argv[]) { - if (argc != 5) { - printf("\n%s - Application to generate a RTP timing file.\n\n", argv[0]); - printf("%s PCMfile DATfile chunkSize sampleRate\n\n", argv[0]); - printf("Opens the PCMfile with sampleRate in Hertz.\n"); - printf("Creates a send times array, one for each chunkSize "); - printf("milliseconds step.\n"); - printf("Each block that contains a transient, has an infinite send time. "); - printf("The resultant array is written to a DATfile.\n\n"); - return 0; - } - - FileWrapper pcm_file = FileWrapper::OpenReadOnly(argv[1]); - if (!pcm_file.is_open()) { - printf("\nThe %s could not be opened.\n\n", argv[1]); - return -1; - } - - FileWrapper dat_file = FileWrapper::OpenWriteOnly(argv[2]); - if (!dat_file.is_open()) { - printf("\nThe %s could not be opened.\n\n", argv[2]); - return -1; - } - - int chunk_size_ms = atoi(argv[3]); - if (chunk_size_ms <= 0) { - printf("\nThe chunkSize must be a positive integer\n\n"); - return -1; - } - - int sample_rate_hz = atoi(argv[4]); - if (sample_rate_hz <= 0) { - printf("\nThe sampleRate must be a positive integer\n\n"); - return -1; - } - - TransientDetector detector(sample_rate_hz); - int lost_packets = 0; - size_t audio_buffer_length = chunk_size_ms * sample_rate_hz / 1000; - std::unique_ptr audio_buffer(new float[audio_buffer_length]); - std::vector send_times; - - // Read first buffer from the PCM test file. - size_t file_samples_read = ReadInt16FromFileToFloatBuffer( - &pcm_file, audio_buffer_length, audio_buffer.get()); - for (int time = 0; file_samples_read > 0; time += chunk_size_ms) { - // Pad the rest of the buffer with zeros. - for (size_t i = file_samples_read; i < audio_buffer_length; ++i) { - audio_buffer[i] = 0.0; - } - float value = - detector.Detect(audio_buffer.get(), audio_buffer_length, NULL, 0); - if (value < 0.5f) { - value = time; - } else { - value = FLT_MAX; - ++lost_packets; - } - send_times.push_back(value); - - // Read next buffer from the PCM test file. - file_samples_read = ReadInt16FromFileToFloatBuffer( - &pcm_file, audio_buffer_length, audio_buffer.get()); - } - - size_t floats_written = - WriteFloatBufferToFile(&dat_file, send_times.size(), &send_times[0]); - - if (floats_written == 0) { - printf("\nThe send times could not be written to DAT file\n\n"); - return -1; - } - - pcm_file.Close(); - dat_file.Close(); - - return lost_packets; -} diff --git a/modules/audio_processing/transient/common.h b/modules/audio_processing/transient/common.h deleted file mode 100644 index 63c9a7b315..0000000000 --- a/modules/audio_processing/transient/common.h +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_COMMON_H_ -#define MODULES_AUDIO_PROCESSING_TRANSIENT_COMMON_H_ -namespace webrtc { -namespace ts { - -static const float kPi = 3.14159265358979323846f; -static const int kChunkSizeMs = 10; -enum { - kSampleRate8kHz = 8000, - kSampleRate16kHz = 16000, - kSampleRate32kHz = 32000, - kSampleRate48kHz = 48000 -}; - -} // namespace ts -} // namespace webrtc -#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_COMMON_H_ diff --git a/modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h b/modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h deleted file mode 100644 index 92233bfd74..0000000000 --- a/modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This header file defines the coefficients of the FIR based approximation of -// the Meyer Wavelet -#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_DAUBECHIES_8_WAVELET_COEFFS_H_ -#define MODULES_AUDIO_PROCESSING_TRANSIENT_DAUBECHIES_8_WAVELET_COEFFS_H_ - -// Decomposition coefficients Daubechies 8. - -namespace webrtc { - -const int kDaubechies8CoefficientsLength = 16; - -const float kDaubechies8HighPassCoefficients[kDaubechies8CoefficientsLength] = { - -5.44158422430816093862e-02f, 3.12871590914465924627e-01f, - -6.75630736298012846142e-01f, 5.85354683654869090148e-01f, - 1.58291052560238926228e-02f, -2.84015542962428091389e-01f, - -4.72484573997972536787e-04f, 1.28747426620186011803e-01f, - 1.73693010020221083600e-02f, -4.40882539310647192377e-02f, - -1.39810279170155156436e-02f, 8.74609404701565465445e-03f, - 4.87035299301066034600e-03f, -3.91740372995977108837e-04f, - -6.75449405998556772109e-04f, -1.17476784002281916305e-04f}; - -const float kDaubechies8LowPassCoefficients[kDaubechies8CoefficientsLength] = { - -1.17476784002281916305e-04f, 6.75449405998556772109e-04f, - -3.91740372995977108837e-04f, -4.87035299301066034600e-03f, - 8.74609404701565465445e-03f, 1.39810279170155156436e-02f, - -4.40882539310647192377e-02f, -1.73693010020221083600e-02f, - 1.28747426620186011803e-01f, 4.72484573997972536787e-04f, - -2.84015542962428091389e-01f, -1.58291052560238926228e-02f, - 5.85354683654869090148e-01f, 6.75630736298012846142e-01f, - 3.12871590914465924627e-01f, 5.44158422430816093862e-02f}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_DAUBECHIES_8_WAVELET_COEFFS_H_ diff --git a/modules/audio_processing/transient/dyadic_decimator.h b/modules/audio_processing/transient/dyadic_decimator.h deleted file mode 100644 index 52467e8c25..0000000000 --- a/modules/audio_processing/transient/dyadic_decimator.h +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_DYADIC_DECIMATOR_H_ -#define MODULES_AUDIO_PROCESSING_TRANSIENT_DYADIC_DECIMATOR_H_ - -#include - -// Provides a set of static methods to perform dyadic decimations. - -namespace webrtc { - -// Returns the proper length of the output buffer that you should use for the -// given `in_length` and decimation `odd_sequence`. -// Return -1 on error. -inline size_t GetOutLengthToDyadicDecimate(size_t in_length, - bool odd_sequence) { - size_t out_length = in_length / 2; - - if (in_length % 2 == 1 && !odd_sequence) { - ++out_length; - } - - return out_length; -} - -// Performs a dyadic decimation: removes every odd/even member of a sequence -// halving its overall length. -// Arguments: -// in: array of `in_length`. -// odd_sequence: If false, the odd members will be removed (1, 3, 5, ...); -// if true, the even members will be removed (0, 2, 4, ...). -// out: array of `out_length`. `out_length` must be large enough to -// hold the decimated output. The necessary length can be provided by -// GetOutLengthToDyadicDecimate(). -// Must be previously allocated. -// Returns the number of output samples, -1 on error. -template -static size_t DyadicDecimate(const T* in, - size_t in_length, - bool odd_sequence, - T* out, - size_t out_length) { - size_t half_length = GetOutLengthToDyadicDecimate(in_length, odd_sequence); - - if (!in || !out || in_length <= 0 || out_length < half_length) { - return 0; - } - - size_t output_samples = 0; - size_t index_adjustment = odd_sequence ? 1 : 0; - for (output_samples = 0; output_samples < half_length; ++output_samples) { - out[output_samples] = in[output_samples * 2 + index_adjustment]; - } - - return output_samples; -} - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_DYADIC_DECIMATOR_H_ diff --git a/modules/audio_processing/transient/dyadic_decimator_unittest.cc b/modules/audio_processing/transient/dyadic_decimator_unittest.cc deleted file mode 100644 index e4776d694f..0000000000 --- a/modules/audio_processing/transient/dyadic_decimator_unittest.cc +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/dyadic_decimator.h" - -#include "test/gtest.h" - -namespace webrtc { - -static const size_t kEvenBufferLength = 6; -static const size_t kOddBufferLength = 5; -static const size_t kOutBufferLength = 3; - -int16_t const test_buffer_even_len[] = {0, 1, 2, 3, 4, 5}; -int16_t const test_buffer_odd_len[] = {0, 1, 2, 3, 4}; -int16_t test_buffer_out[kOutBufferLength]; - -TEST(DyadicDecimatorTest, GetOutLengthToDyadicDecimate) { - EXPECT_EQ(3u, GetOutLengthToDyadicDecimate(6, false)); - EXPECT_EQ(3u, GetOutLengthToDyadicDecimate(6, true)); - EXPECT_EQ(3u, GetOutLengthToDyadicDecimate(5, false)); - EXPECT_EQ(2u, GetOutLengthToDyadicDecimate(5, true)); -} - -TEST(DyadicDecimatorTest, DyadicDecimateErrorValues) { - size_t out_samples = 0; - - out_samples = DyadicDecimate(static_cast(NULL), kEvenBufferLength, - false, // Even sequence. - test_buffer_out, kOutBufferLength); - EXPECT_EQ(0u, out_samples); - - out_samples = DyadicDecimate(test_buffer_even_len, kEvenBufferLength, - false, // Even sequence. - static_cast(NULL), kOutBufferLength); - EXPECT_EQ(0u, out_samples); - - // Less than required `out_length`. - out_samples = DyadicDecimate(test_buffer_even_len, kEvenBufferLength, - false, // Even sequence. - test_buffer_out, 2); - EXPECT_EQ(0u, out_samples); -} - -TEST(DyadicDecimatorTest, DyadicDecimateEvenLengthEvenSequence) { - size_t expected_out_samples = - GetOutLengthToDyadicDecimate(kEvenBufferLength, false); - - size_t out_samples = DyadicDecimate(test_buffer_even_len, kEvenBufferLength, - false, // Even sequence. - test_buffer_out, kOutBufferLength); - - EXPECT_EQ(expected_out_samples, out_samples); - - EXPECT_EQ(0, test_buffer_out[0]); - EXPECT_EQ(2, test_buffer_out[1]); - EXPECT_EQ(4, test_buffer_out[2]); -} - -TEST(DyadicDecimatorTest, DyadicDecimateEvenLengthOddSequence) { - size_t expected_out_samples = - GetOutLengthToDyadicDecimate(kEvenBufferLength, true); - - size_t out_samples = DyadicDecimate(test_buffer_even_len, kEvenBufferLength, - true, // Odd sequence. - test_buffer_out, kOutBufferLength); - - EXPECT_EQ(expected_out_samples, out_samples); - - EXPECT_EQ(1, test_buffer_out[0]); - EXPECT_EQ(3, test_buffer_out[1]); - EXPECT_EQ(5, test_buffer_out[2]); -} - -TEST(DyadicDecimatorTest, DyadicDecimateOddLengthEvenSequence) { - size_t expected_out_samples = - GetOutLengthToDyadicDecimate(kOddBufferLength, false); - - size_t out_samples = DyadicDecimate(test_buffer_odd_len, kOddBufferLength, - false, // Even sequence. - test_buffer_out, kOutBufferLength); - - EXPECT_EQ(expected_out_samples, out_samples); - - EXPECT_EQ(0, test_buffer_out[0]); - EXPECT_EQ(2, test_buffer_out[1]); - EXPECT_EQ(4, test_buffer_out[2]); -} - -TEST(DyadicDecimatorTest, DyadicDecimateOddLengthOddSequence) { - size_t expected_out_samples = - GetOutLengthToDyadicDecimate(kOddBufferLength, true); - - size_t out_samples = DyadicDecimate(test_buffer_odd_len, kOddBufferLength, - true, // Odd sequence. - test_buffer_out, kOutBufferLength); - - EXPECT_EQ(expected_out_samples, out_samples); - - EXPECT_EQ(1, test_buffer_out[0]); - EXPECT_EQ(3, test_buffer_out[1]); -} - -} // namespace webrtc diff --git a/modules/audio_processing/transient/file_utils.cc b/modules/audio_processing/transient/file_utils.cc deleted file mode 100644 index 58f99325d1..0000000000 --- a/modules/audio_processing/transient/file_utils.cc +++ /dev/null @@ -1,257 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/file_utils.h" - -#include - -#include "rtc_base/system/file_wrapper.h" - -namespace webrtc { - -int ConvertByteArrayToFloat(const uint8_t bytes[4], float* out) { - if (!bytes || !out) { - return -1; - } - - uint32_t binary_value = 0; - for (int i = 3; i >= 0; --i) { - binary_value <<= 8; - binary_value += bytes[i]; - } - - *out = bit_cast(binary_value); - - return 0; -} - -int ConvertByteArrayToDouble(const uint8_t bytes[8], double* out) { - if (!bytes || !out) { - return -1; - } - - uint64_t binary_value = 0; - for (int i = 7; i >= 0; --i) { - binary_value <<= 8; - binary_value += bytes[i]; - } - - *out = bit_cast(binary_value); - - return 0; -} - -int ConvertFloatToByteArray(float value, uint8_t out_bytes[4]) { - if (!out_bytes) { - return -1; - } - - uint32_t binary_value = bit_cast(value); - for (size_t i = 0; i < 4; ++i) { - out_bytes[i] = binary_value; - binary_value >>= 8; - } - - return 0; -} - -int ConvertDoubleToByteArray(double value, uint8_t out_bytes[8]) { - if (!out_bytes) { - return -1; - } - - uint64_t binary_value = bit_cast(value); - for (size_t i = 0; i < 8; ++i) { - out_bytes[i] = binary_value; - binary_value >>= 8; - } - - return 0; -} - -size_t ReadInt16BufferFromFile(FileWrapper* file, - size_t length, - int16_t* buffer) { - if (!file || !file->is_open() || !buffer || length <= 0) { - return 0; - } - - std::unique_ptr byte_array(new uint8_t[2]); - - size_t int16s_read = 0; - - while (int16s_read < length) { - size_t bytes_read = file->Read(byte_array.get(), 2); - if (bytes_read < 2) { - break; - } - int16_t value = byte_array[1]; - value <<= 8; - value += byte_array[0]; - buffer[int16s_read] = value; - ++int16s_read; - } - - return int16s_read; -} - -size_t ReadInt16FromFileToFloatBuffer(FileWrapper* file, - size_t length, - float* buffer) { - if (!file || !file->is_open() || !buffer || length <= 0) { - return 0; - } - - std::unique_ptr buffer16(new int16_t[length]); - - size_t int16s_read = ReadInt16BufferFromFile(file, length, buffer16.get()); - - for (size_t i = 0; i < int16s_read; ++i) { - buffer[i] = buffer16[i]; - } - - return int16s_read; -} - -size_t ReadInt16FromFileToDoubleBuffer(FileWrapper* file, - size_t length, - double* buffer) { - if (!file || !file->is_open() || !buffer || length <= 0) { - return 0; - } - - std::unique_ptr buffer16(new int16_t[length]); - - size_t int16s_read = ReadInt16BufferFromFile(file, length, buffer16.get()); - - for (size_t i = 0; i < int16s_read; ++i) { - buffer[i] = buffer16[i]; - } - - return int16s_read; -} - -size_t ReadFloatBufferFromFile(FileWrapper* file, - size_t length, - float* buffer) { - if (!file || !file->is_open() || !buffer || length <= 0) { - return 0; - } - - std::unique_ptr byte_array(new uint8_t[4]); - - size_t floats_read = 0; - - while (floats_read < length) { - size_t bytes_read = file->Read(byte_array.get(), 4); - if (bytes_read < 4) { - break; - } - ConvertByteArrayToFloat(byte_array.get(), &buffer[floats_read]); - ++floats_read; - } - - return floats_read; -} - -size_t ReadDoubleBufferFromFile(FileWrapper* file, - size_t length, - double* buffer) { - if (!file || !file->is_open() || !buffer || length <= 0) { - return 0; - } - - std::unique_ptr byte_array(new uint8_t[8]); - - size_t doubles_read = 0; - - while (doubles_read < length) { - size_t bytes_read = file->Read(byte_array.get(), 8); - if (bytes_read < 8) { - break; - } - ConvertByteArrayToDouble(byte_array.get(), &buffer[doubles_read]); - ++doubles_read; - } - - return doubles_read; -} - -size_t WriteInt16BufferToFile(FileWrapper* file, - size_t length, - const int16_t* buffer) { - if (!file || !file->is_open() || !buffer || length <= 0) { - return 0; - } - - std::unique_ptr byte_array(new uint8_t[2]); - - size_t int16s_written = 0; - - for (int16s_written = 0; int16s_written < length; ++int16s_written) { - // Get byte representation. - byte_array[0] = buffer[int16s_written] & 0xFF; - byte_array[1] = (buffer[int16s_written] >> 8) & 0xFF; - - file->Write(byte_array.get(), 2); - } - - file->Flush(); - - return int16s_written; -} - -size_t WriteFloatBufferToFile(FileWrapper* file, - size_t length, - const float* buffer) { - if (!file || !file->is_open() || !buffer || length <= 0) { - return 0; - } - - std::unique_ptr byte_array(new uint8_t[4]); - - size_t floats_written = 0; - - for (floats_written = 0; floats_written < length; ++floats_written) { - // Get byte representation. - ConvertFloatToByteArray(buffer[floats_written], byte_array.get()); - - file->Write(byte_array.get(), 4); - } - - file->Flush(); - - return floats_written; -} - -size_t WriteDoubleBufferToFile(FileWrapper* file, - size_t length, - const double* buffer) { - if (!file || !file->is_open() || !buffer || length <= 0) { - return 0; - } - - std::unique_ptr byte_array(new uint8_t[8]); - - size_t doubles_written = 0; - - for (doubles_written = 0; doubles_written < length; ++doubles_written) { - // Get byte representation. - ConvertDoubleToByteArray(buffer[doubles_written], byte_array.get()); - - file->Write(byte_array.get(), 8); - } - - file->Flush(); - - return doubles_written; -} - -} // namespace webrtc diff --git a/modules/audio_processing/transient/file_utils.h b/modules/audio_processing/transient/file_utils.h deleted file mode 100644 index b748337773..0000000000 --- a/modules/audio_processing/transient/file_utils.h +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_FILE_UTILS_H_ -#define MODULES_AUDIO_PROCESSING_TRANSIENT_FILE_UTILS_H_ - -#include - -#include "rtc_base/system/file_wrapper.h" - -namespace webrtc { - -// This is a copy of the cast included in the Chromium codebase here: -// http://cs.chromium.org/src/third_party/cld/base/casts.h -template -inline Dest bit_cast(const Source& source) { - // A compile error here means your Dest and Source have different sizes. - static_assert(sizeof(Dest) == sizeof(Source), - "Dest and Source have different sizes"); - - Dest dest; - memcpy(&dest, &source, sizeof(dest)); - return dest; -} - -// Converts the byte array with binary float representation to float. -// Bytes must be in little-endian order. -// Returns 0 if correct, -1 on error. -int ConvertByteArrayToFloat(const uint8_t bytes[4], float* out); - -// Converts the byte array with binary double representation to double. -// Bytes must be in little-endian order. -// Returns 0 if correct, -1 on error. -int ConvertByteArrayToDouble(const uint8_t bytes[8], double* out); - -// Converts a float to a byte array with binary float representation. -// Bytes will be in little-endian order. -// Returns 0 if correct, -1 on error. -int ConvertFloatToByteArray(float value, uint8_t out_bytes[4]); - -// Converts a double to a byte array with binary double representation. -// Bytes will be in little-endian order. -// Returns 0 if correct, -1 on error. -int ConvertDoubleToByteArray(double value, uint8_t out_bytes[8]); - -// Reads `length` 16-bit integers from `file` to `buffer`. -// `file` must be previously opened. -// Returns the number of 16-bit integers read or -1 on error. -size_t ReadInt16BufferFromFile(FileWrapper* file, - size_t length, - int16_t* buffer); - -// Reads `length` 16-bit integers from `file` and stores those values -// (converting them) in `buffer`. -// `file` must be previously opened. -// Returns the number of 16-bit integers read or -1 on error. -size_t ReadInt16FromFileToFloatBuffer(FileWrapper* file, - size_t length, - float* buffer); - -// Reads `length` 16-bit integers from `file` and stores those values -// (converting them) in `buffer`. -// `file` must be previously opened. -// Returns the number of 16-bit integers read or -1 on error. -size_t ReadInt16FromFileToDoubleBuffer(FileWrapper* file, - size_t length, - double* buffer); - -// Reads `length` floats in binary representation (4 bytes) from `file` to -// `buffer`. -// `file` must be previously opened. -// Returns the number of floats read or -1 on error. -size_t ReadFloatBufferFromFile(FileWrapper* file, size_t length, float* buffer); - -// Reads `length` doubles in binary representation (8 bytes) from `file` to -// `buffer`. -// `file` must be previously opened. -// Returns the number of doubles read or -1 on error. -size_t ReadDoubleBufferFromFile(FileWrapper* file, - size_t length, - double* buffer); - -// Writes `length` 16-bit integers from `buffer` in binary representation (2 -// bytes) to `file`. It flushes `file`, so after this call there are no -// writings pending. -// `file` must be previously opened. -// Returns the number of doubles written or -1 on error. -size_t WriteInt16BufferToFile(FileWrapper* file, - size_t length, - const int16_t* buffer); - -// Writes `length` floats from `buffer` in binary representation (4 bytes) to -// `file`. It flushes `file`, so after this call there are no writtings pending. -// `file` must be previously opened. -// Returns the number of doubles written or -1 on error. -size_t WriteFloatBufferToFile(FileWrapper* file, - size_t length, - const float* buffer); - -// Writes `length` doubles from `buffer` in binary representation (8 bytes) to -// `file`. It flushes `file`, so after this call there are no writings pending. -// `file` must be previously opened. -// Returns the number of doubles written or -1 on error. -size_t WriteDoubleBufferToFile(FileWrapper* file, - size_t length, - const double* buffer); - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_FILE_UTILS_H_ diff --git a/modules/audio_processing/transient/file_utils_unittest.cc b/modules/audio_processing/transient/file_utils_unittest.cc deleted file mode 100644 index a9dddb1eda..0000000000 --- a/modules/audio_processing/transient/file_utils_unittest.cc +++ /dev/null @@ -1,501 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/file_utils.h" - -#include - -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "rtc_base/system/file_wrapper.h" -#include "test/gtest.h" -#include "test/testsupport/file_utils.h" - -namespace webrtc { - -static const uint8_t kPiBytesf[4] = {0xDB, 0x0F, 0x49, 0x40}; -static const uint8_t kEBytesf[4] = {0x54, 0xF8, 0x2D, 0x40}; -static const uint8_t kAvogadroBytesf[4] = {0x2F, 0x0C, 0xFF, 0x66}; - -static const uint8_t kPiBytes[8] = {0x18, 0x2D, 0x44, 0x54, - 0xFB, 0x21, 0x09, 0x40}; -static const uint8_t kEBytes[8] = {0x69, 0x57, 0x14, 0x8B, - 0x0A, 0xBF, 0x05, 0x40}; -static const uint8_t kAvogadroBytes[8] = {0xF4, 0xBC, 0xA8, 0xDF, - 0x85, 0xE1, 0xDF, 0x44}; - -static const double kPi = 3.14159265358979323846; -static const double kE = 2.71828182845904523536; -static const double kAvogadro = 602214100000000000000000.0; - -class TransientFileUtilsTest : public ::testing::Test { - protected: - TransientFileUtilsTest() - : kTestFileName( - test::ResourcePath("audio_processing/transient/double-utils", - "dat")), - kTestFileNamef( - test::ResourcePath("audio_processing/transient/float-utils", - "dat")) {} - - ~TransientFileUtilsTest() override { CleanupTempFiles(); } - - std::string CreateTempFilename(absl::string_view dir, - absl::string_view prefix) { - std::string filename = test::TempFilename(dir, prefix); - temp_filenames_.push_back(filename); - return filename; - } - - void CleanupTempFiles() { - for (const std::string& filename : temp_filenames_) { - remove(filename.c_str()); - } - temp_filenames_.clear(); - } - - // This file (used in some tests) contains binary data. The data correspond to - // the double representation of the constants: Pi, E, and the Avogadro's - // Number; - // appended in that order. - const std::string kTestFileName; - - // This file (used in some tests) contains binary data. The data correspond to - // the float representation of the constants: Pi, E, and the Avogadro's - // Number; - // appended in that order. - const std::string kTestFileNamef; - - // List of temporary filenames created by CreateTempFilename. - std::vector temp_filenames_; -}; - -#if defined(WEBRTC_IOS) -#define MAYBE_ConvertByteArrayToFloat DISABLED_ConvertByteArrayToFloat -#else -#define MAYBE_ConvertByteArrayToFloat ConvertByteArrayToFloat -#endif -TEST_F(TransientFileUtilsTest, MAYBE_ConvertByteArrayToFloat) { - float value = 0.0; - - EXPECT_EQ(0, ConvertByteArrayToFloat(kPiBytesf, &value)); - EXPECT_FLOAT_EQ(kPi, value); - - EXPECT_EQ(0, ConvertByteArrayToFloat(kEBytesf, &value)); - EXPECT_FLOAT_EQ(kE, value); - - EXPECT_EQ(0, ConvertByteArrayToFloat(kAvogadroBytesf, &value)); - EXPECT_FLOAT_EQ(kAvogadro, value); -} - -#if defined(WEBRTC_IOS) -#define MAYBE_ConvertByteArrayToDouble DISABLED_ConvertByteArrayToDouble -#else -#define MAYBE_ConvertByteArrayToDouble ConvertByteArrayToDouble -#endif -TEST_F(TransientFileUtilsTest, MAYBE_ConvertByteArrayToDouble) { - double value = 0.0; - - EXPECT_EQ(0, ConvertByteArrayToDouble(kPiBytes, &value)); - EXPECT_DOUBLE_EQ(kPi, value); - - EXPECT_EQ(0, ConvertByteArrayToDouble(kEBytes, &value)); - EXPECT_DOUBLE_EQ(kE, value); - - EXPECT_EQ(0, ConvertByteArrayToDouble(kAvogadroBytes, &value)); - EXPECT_DOUBLE_EQ(kAvogadro, value); -} - -#if defined(WEBRTC_IOS) -#define MAYBE_ConvertFloatToByteArray DISABLED_ConvertFloatToByteArray -#else -#define MAYBE_ConvertFloatToByteArray ConvertFloatToByteArray -#endif -TEST_F(TransientFileUtilsTest, MAYBE_ConvertFloatToByteArray) { - std::unique_ptr bytes(new uint8_t[4]); - - EXPECT_EQ(0, ConvertFloatToByteArray(kPi, bytes.get())); - EXPECT_EQ(0, memcmp(bytes.get(), kPiBytesf, 4)); - - EXPECT_EQ(0, ConvertFloatToByteArray(kE, bytes.get())); - EXPECT_EQ(0, memcmp(bytes.get(), kEBytesf, 4)); - - EXPECT_EQ(0, ConvertFloatToByteArray(kAvogadro, bytes.get())); - EXPECT_EQ(0, memcmp(bytes.get(), kAvogadroBytesf, 4)); -} - -#if defined(WEBRTC_IOS) -#define MAYBE_ConvertDoubleToByteArray DISABLED_ConvertDoubleToByteArray -#else -#define MAYBE_ConvertDoubleToByteArray ConvertDoubleToByteArray -#endif -TEST_F(TransientFileUtilsTest, MAYBE_ConvertDoubleToByteArray) { - std::unique_ptr bytes(new uint8_t[8]); - - EXPECT_EQ(0, ConvertDoubleToByteArray(kPi, bytes.get())); - EXPECT_EQ(0, memcmp(bytes.get(), kPiBytes, 8)); - - EXPECT_EQ(0, ConvertDoubleToByteArray(kE, bytes.get())); - EXPECT_EQ(0, memcmp(bytes.get(), kEBytes, 8)); - - EXPECT_EQ(0, ConvertDoubleToByteArray(kAvogadro, bytes.get())); - EXPECT_EQ(0, memcmp(bytes.get(), kAvogadroBytes, 8)); -} - -#if defined(WEBRTC_IOS) -#define MAYBE_ReadInt16BufferFromFile DISABLED_ReadInt16BufferFromFile -#else -#define MAYBE_ReadInt16BufferFromFile ReadInt16BufferFromFile -#endif -TEST_F(TransientFileUtilsTest, MAYBE_ReadInt16BufferFromFile) { - std::string test_filename = kTestFileName; - - FileWrapper file = FileWrapper::OpenReadOnly(test_filename); - ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" - << kTestFileName.c_str(); - - const size_t kBufferLength = 12; - std::unique_ptr buffer(new int16_t[kBufferLength]); - - EXPECT_EQ(kBufferLength, - ReadInt16BufferFromFile(&file, kBufferLength, buffer.get())); - EXPECT_EQ(22377, buffer[4]); - EXPECT_EQ(16389, buffer[7]); - EXPECT_EQ(17631, buffer[kBufferLength - 1]); - - file.Rewind(); - - // The next test is for checking the case where there are not as much data as - // needed in the file, but reads to the end, and it returns the number of - // int16s read. - const size_t kBufferLenghtLargerThanFile = kBufferLength * 2; - buffer.reset(new int16_t[kBufferLenghtLargerThanFile]); - EXPECT_EQ(kBufferLength, - ReadInt16BufferFromFile(&file, kBufferLenghtLargerThanFile, - buffer.get())); - EXPECT_EQ(11544, buffer[0]); - EXPECT_EQ(22377, buffer[4]); - EXPECT_EQ(16389, buffer[7]); - EXPECT_EQ(17631, buffer[kBufferLength - 1]); -} - -#if defined(WEBRTC_IOS) -#define MAYBE_ReadInt16FromFileToFloatBuffer \ - DISABLED_ReadInt16FromFileToFloatBuffer -#else -#define MAYBE_ReadInt16FromFileToFloatBuffer ReadInt16FromFileToFloatBuffer -#endif -TEST_F(TransientFileUtilsTest, MAYBE_ReadInt16FromFileToFloatBuffer) { - std::string test_filename = kTestFileName; - - FileWrapper file = FileWrapper::OpenReadOnly(test_filename); - ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" - << kTestFileName.c_str(); - - const size_t kBufferLength = 12; - std::unique_ptr buffer(new float[kBufferLength]); - - EXPECT_EQ(kBufferLength, - ReadInt16FromFileToFloatBuffer(&file, kBufferLength, buffer.get())); - - EXPECT_DOUBLE_EQ(11544, buffer[0]); - EXPECT_DOUBLE_EQ(22377, buffer[4]); - EXPECT_DOUBLE_EQ(16389, buffer[7]); - EXPECT_DOUBLE_EQ(17631, buffer[kBufferLength - 1]); - - file.Rewind(); - - // The next test is for checking the case where there are not as much data as - // needed in the file, but reads to the end, and it returns the number of - // int16s read. - const size_t kBufferLenghtLargerThanFile = kBufferLength * 2; - buffer.reset(new float[kBufferLenghtLargerThanFile]); - EXPECT_EQ(kBufferLength, - ReadInt16FromFileToFloatBuffer(&file, kBufferLenghtLargerThanFile, - buffer.get())); - EXPECT_DOUBLE_EQ(11544, buffer[0]); - EXPECT_DOUBLE_EQ(22377, buffer[4]); - EXPECT_DOUBLE_EQ(16389, buffer[7]); - EXPECT_DOUBLE_EQ(17631, buffer[kBufferLength - 1]); -} - -#if defined(WEBRTC_IOS) -#define MAYBE_ReadInt16FromFileToDoubleBuffer \ - DISABLED_ReadInt16FromFileToDoubleBuffer -#else -#define MAYBE_ReadInt16FromFileToDoubleBuffer ReadInt16FromFileToDoubleBuffer -#endif -TEST_F(TransientFileUtilsTest, MAYBE_ReadInt16FromFileToDoubleBuffer) { - std::string test_filename = kTestFileName; - - FileWrapper file = FileWrapper::OpenReadOnly(test_filename); - ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" - << kTestFileName.c_str(); - - const size_t kBufferLength = 12; - std::unique_ptr buffer(new double[kBufferLength]); - - EXPECT_EQ(kBufferLength, ReadInt16FromFileToDoubleBuffer(&file, kBufferLength, - buffer.get())); - EXPECT_DOUBLE_EQ(11544, buffer[0]); - EXPECT_DOUBLE_EQ(22377, buffer[4]); - EXPECT_DOUBLE_EQ(16389, buffer[7]); - EXPECT_DOUBLE_EQ(17631, buffer[kBufferLength - 1]); - - file.Rewind(); - - // The next test is for checking the case where there are not as much data as - // needed in the file, but reads to the end, and it returns the number of - // int16s read. - const size_t kBufferLenghtLargerThanFile = kBufferLength * 2; - buffer.reset(new double[kBufferLenghtLargerThanFile]); - EXPECT_EQ(kBufferLength, - ReadInt16FromFileToDoubleBuffer(&file, kBufferLenghtLargerThanFile, - buffer.get())); - EXPECT_DOUBLE_EQ(11544, buffer[0]); - EXPECT_DOUBLE_EQ(22377, buffer[4]); - EXPECT_DOUBLE_EQ(16389, buffer[7]); - EXPECT_DOUBLE_EQ(17631, buffer[kBufferLength - 1]); -} - -#if defined(WEBRTC_IOS) -#define MAYBE_ReadFloatBufferFromFile DISABLED_ReadFloatBufferFromFile -#else -#define MAYBE_ReadFloatBufferFromFile ReadFloatBufferFromFile -#endif -TEST_F(TransientFileUtilsTest, MAYBE_ReadFloatBufferFromFile) { - std::string test_filename = kTestFileNamef; - - FileWrapper file = FileWrapper::OpenReadOnly(test_filename); - ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" - << kTestFileNamef.c_str(); - - const size_t kBufferLength = 3; - std::unique_ptr buffer(new float[kBufferLength]); - - EXPECT_EQ(kBufferLength, - ReadFloatBufferFromFile(&file, kBufferLength, buffer.get())); - EXPECT_FLOAT_EQ(kPi, buffer[0]); - EXPECT_FLOAT_EQ(kE, buffer[1]); - EXPECT_FLOAT_EQ(kAvogadro, buffer[2]); - - file.Rewind(); - - // The next test is for checking the case where there are not as much data as - // needed in the file, but reads to the end, and it returns the number of - // doubles read. - const size_t kBufferLenghtLargerThanFile = kBufferLength * 2; - buffer.reset(new float[kBufferLenghtLargerThanFile]); - EXPECT_EQ(kBufferLength, - ReadFloatBufferFromFile(&file, kBufferLenghtLargerThanFile, - buffer.get())); - EXPECT_FLOAT_EQ(kPi, buffer[0]); - EXPECT_FLOAT_EQ(kE, buffer[1]); - EXPECT_FLOAT_EQ(kAvogadro, buffer[2]); -} - -#if defined(WEBRTC_IOS) -#define MAYBE_ReadDoubleBufferFromFile DISABLED_ReadDoubleBufferFromFile -#else -#define MAYBE_ReadDoubleBufferFromFile ReadDoubleBufferFromFile -#endif -TEST_F(TransientFileUtilsTest, MAYBE_ReadDoubleBufferFromFile) { - std::string test_filename = kTestFileName; - - FileWrapper file = FileWrapper::OpenReadOnly(test_filename); - ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" - << kTestFileName.c_str(); - - const size_t kBufferLength = 3; - std::unique_ptr buffer(new double[kBufferLength]); - - EXPECT_EQ(kBufferLength, - ReadDoubleBufferFromFile(&file, kBufferLength, buffer.get())); - EXPECT_DOUBLE_EQ(kPi, buffer[0]); - EXPECT_DOUBLE_EQ(kE, buffer[1]); - EXPECT_DOUBLE_EQ(kAvogadro, buffer[2]); - - file.Rewind(); - - // The next test is for checking the case where there are not as much data as - // needed in the file, but reads to the end, and it returns the number of - // doubles read. - const size_t kBufferLenghtLargerThanFile = kBufferLength * 2; - buffer.reset(new double[kBufferLenghtLargerThanFile]); - EXPECT_EQ(kBufferLength, - ReadDoubleBufferFromFile(&file, kBufferLenghtLargerThanFile, - buffer.get())); - EXPECT_DOUBLE_EQ(kPi, buffer[0]); - EXPECT_DOUBLE_EQ(kE, buffer[1]); - EXPECT_DOUBLE_EQ(kAvogadro, buffer[2]); -} - -#if defined(WEBRTC_IOS) -#define MAYBE_WriteInt16BufferToFile DISABLED_WriteInt16BufferToFile -#else -#define MAYBE_WriteInt16BufferToFile WriteInt16BufferToFile -#endif -TEST_F(TransientFileUtilsTest, MAYBE_WriteInt16BufferToFile) { - std::string kOutFileName = - CreateTempFilename(test::OutputPath(), "utils_test"); - - FileWrapper file = FileWrapper::OpenWriteOnly(kOutFileName); - ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" - << kOutFileName.c_str(); - - const size_t kBufferLength = 3; - std::unique_ptr written_buffer(new int16_t[kBufferLength]); - std::unique_ptr read_buffer(new int16_t[kBufferLength]); - - written_buffer[0] = 1; - written_buffer[1] = 2; - written_buffer[2] = 3; - - EXPECT_EQ(kBufferLength, - WriteInt16BufferToFile(&file, kBufferLength, written_buffer.get())); - - file.Close(); - - file = FileWrapper::OpenReadOnly(kOutFileName); - ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" - << kOutFileName.c_str(); - - EXPECT_EQ(kBufferLength, - ReadInt16BufferFromFile(&file, kBufferLength, read_buffer.get())); - EXPECT_EQ(0, memcmp(written_buffer.get(), read_buffer.get(), - kBufferLength * sizeof(written_buffer[0]))); -} - -#if defined(WEBRTC_IOS) -#define MAYBE_WriteFloatBufferToFile DISABLED_WriteFloatBufferToFile -#else -#define MAYBE_WriteFloatBufferToFile WriteFloatBufferToFile -#endif -TEST_F(TransientFileUtilsTest, MAYBE_WriteFloatBufferToFile) { - std::string kOutFileName = - CreateTempFilename(test::OutputPath(), "utils_test"); - - FileWrapper file = FileWrapper::OpenWriteOnly(kOutFileName); - ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" - << kOutFileName.c_str(); - - const size_t kBufferLength = 3; - std::unique_ptr written_buffer(new float[kBufferLength]); - std::unique_ptr read_buffer(new float[kBufferLength]); - - written_buffer[0] = static_cast(kPi); - written_buffer[1] = static_cast(kE); - written_buffer[2] = static_cast(kAvogadro); - - EXPECT_EQ(kBufferLength, - WriteFloatBufferToFile(&file, kBufferLength, written_buffer.get())); - - file.Close(); - - file = FileWrapper::OpenReadOnly(kOutFileName); - ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" - << kOutFileName.c_str(); - - EXPECT_EQ(kBufferLength, - ReadFloatBufferFromFile(&file, kBufferLength, read_buffer.get())); - EXPECT_EQ(0, memcmp(written_buffer.get(), read_buffer.get(), - kBufferLength * sizeof(written_buffer[0]))); -} - -#if defined(WEBRTC_IOS) -#define MAYBE_WriteDoubleBufferToFile DISABLED_WriteDoubleBufferToFile -#else -#define MAYBE_WriteDoubleBufferToFile WriteDoubleBufferToFile -#endif -TEST_F(TransientFileUtilsTest, MAYBE_WriteDoubleBufferToFile) { - std::string kOutFileName = - CreateTempFilename(test::OutputPath(), "utils_test"); - - FileWrapper file = FileWrapper::OpenWriteOnly(kOutFileName); - ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" - << kOutFileName.c_str(); - - const size_t kBufferLength = 3; - std::unique_ptr written_buffer(new double[kBufferLength]); - std::unique_ptr read_buffer(new double[kBufferLength]); - - written_buffer[0] = kPi; - written_buffer[1] = kE; - written_buffer[2] = kAvogadro; - - EXPECT_EQ(kBufferLength, WriteDoubleBufferToFile(&file, kBufferLength, - written_buffer.get())); - - file.Close(); - - file = FileWrapper::OpenReadOnly(kOutFileName); - ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" - << kOutFileName.c_str(); - - EXPECT_EQ(kBufferLength, - ReadDoubleBufferFromFile(&file, kBufferLength, read_buffer.get())); - EXPECT_EQ(0, memcmp(written_buffer.get(), read_buffer.get(), - kBufferLength * sizeof(written_buffer[0]))); -} - -#if defined(WEBRTC_IOS) -#define MAYBE_ExpectedErrorReturnValues DISABLED_ExpectedErrorReturnValues -#else -#define MAYBE_ExpectedErrorReturnValues ExpectedErrorReturnValues -#endif -TEST_F(TransientFileUtilsTest, MAYBE_ExpectedErrorReturnValues) { - std::string test_filename = kTestFileName; - - double value; - std::unique_ptr int16_buffer(new int16_t[1]); - std::unique_ptr double_buffer(new double[1]); - FileWrapper file; - - EXPECT_EQ(-1, ConvertByteArrayToDouble(NULL, &value)); - EXPECT_EQ(-1, ConvertByteArrayToDouble(kPiBytes, NULL)); - - EXPECT_EQ(-1, ConvertDoubleToByteArray(kPi, NULL)); - - // Tests with file not opened. - EXPECT_EQ(0u, ReadInt16BufferFromFile(&file, 1, int16_buffer.get())); - EXPECT_EQ(0u, ReadInt16FromFileToDoubleBuffer(&file, 1, double_buffer.get())); - EXPECT_EQ(0u, ReadDoubleBufferFromFile(&file, 1, double_buffer.get())); - EXPECT_EQ(0u, WriteInt16BufferToFile(&file, 1, int16_buffer.get())); - EXPECT_EQ(0u, WriteDoubleBufferToFile(&file, 1, double_buffer.get())); - - file = FileWrapper::OpenReadOnly(test_filename); - ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" - << kTestFileName.c_str(); - - EXPECT_EQ(0u, ReadInt16BufferFromFile(NULL, 1, int16_buffer.get())); - EXPECT_EQ(0u, ReadInt16BufferFromFile(&file, 1, NULL)); - EXPECT_EQ(0u, ReadInt16BufferFromFile(&file, 0, int16_buffer.get())); - - EXPECT_EQ(0u, ReadInt16FromFileToDoubleBuffer(NULL, 1, double_buffer.get())); - EXPECT_EQ(0u, ReadInt16FromFileToDoubleBuffer(&file, 1, NULL)); - EXPECT_EQ(0u, ReadInt16FromFileToDoubleBuffer(&file, 0, double_buffer.get())); - - EXPECT_EQ(0u, ReadDoubleBufferFromFile(NULL, 1, double_buffer.get())); - EXPECT_EQ(0u, ReadDoubleBufferFromFile(&file, 1, NULL)); - EXPECT_EQ(0u, ReadDoubleBufferFromFile(&file, 0, double_buffer.get())); - - EXPECT_EQ(0u, WriteInt16BufferToFile(NULL, 1, int16_buffer.get())); - EXPECT_EQ(0u, WriteInt16BufferToFile(&file, 1, NULL)); - EXPECT_EQ(0u, WriteInt16BufferToFile(&file, 0, int16_buffer.get())); - - EXPECT_EQ(0u, WriteDoubleBufferToFile(NULL, 1, double_buffer.get())); - EXPECT_EQ(0u, WriteDoubleBufferToFile(&file, 1, NULL)); - EXPECT_EQ(0u, WriteDoubleBufferToFile(&file, 0, double_buffer.get())); -} - -} // namespace webrtc diff --git a/modules/audio_processing/transient/moving_moments.cc b/modules/audio_processing/transient/moving_moments.cc deleted file mode 100644 index 83810bfe3c..0000000000 --- a/modules/audio_processing/transient/moving_moments.cc +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/moving_moments.h" - -#include - -#include "rtc_base/checks.h" - -namespace webrtc { - -MovingMoments::MovingMoments(size_t length) - : length_(length), queue_(), sum_(0.0), sum_of_squares_(0.0) { - RTC_DCHECK_GT(length, 0); - for (size_t i = 0; i < length; ++i) { - queue_.push(0.0); - } -} - -MovingMoments::~MovingMoments() {} - -void MovingMoments::CalculateMoments(const float* in, - size_t in_length, - float* first, - float* second) { - RTC_DCHECK(in); - RTC_DCHECK_GT(in_length, 0); - RTC_DCHECK(first); - RTC_DCHECK(second); - - for (size_t i = 0; i < in_length; ++i) { - const float old_value = queue_.front(); - queue_.pop(); - queue_.push(in[i]); - - sum_ += in[i] - old_value; - sum_of_squares_ += in[i] * in[i] - old_value * old_value; - first[i] = sum_ / length_; - second[i] = std::max(0.f, sum_of_squares_ / length_); - } -} - -} // namespace webrtc diff --git a/modules/audio_processing/transient/moving_moments.h b/modules/audio_processing/transient/moving_moments.h deleted file mode 100644 index 70451dcb71..0000000000 --- a/modules/audio_processing/transient/moving_moments.h +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_MOVING_MOMENTS_H_ -#define MODULES_AUDIO_PROCESSING_TRANSIENT_MOVING_MOMENTS_H_ - -#include - -#include - -namespace webrtc { - -// Calculates the first and second moments for each value of a buffer taking -// into account a given number of previous values. -// It preserves its state, so it can be multiple-called. -// TODO(chadan): Implement a function that takes a buffer of first moments and a -// buffer of second moments; and calculates the variances. When needed. -// TODO(chadan): Add functionality to update with a buffer but only output are -// the last values of the moments. When needed. -class MovingMoments { - public: - // Creates a Moving Moments object, that uses the last `length` values - // (including the new value introduced in every new calculation). - explicit MovingMoments(size_t length); - ~MovingMoments(); - - // Calculates the new values using `in`. Results will be in the out buffers. - // `first` and `second` must be allocated with at least `in_length`. - void CalculateMoments(const float* in, - size_t in_length, - float* first, - float* second); - - private: - size_t length_; - // A queue holding the `length_` latest input values. - std::queue queue_; - // Sum of the values of the queue. - float sum_; - // Sum of the squares of the values of the queue. - float sum_of_squares_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_MOVING_MOMENTS_H_ diff --git a/modules/audio_processing/transient/moving_moments_unittest.cc b/modules/audio_processing/transient/moving_moments_unittest.cc deleted file mode 100644 index b0e613e7ab..0000000000 --- a/modules/audio_processing/transient/moving_moments_unittest.cc +++ /dev/null @@ -1,207 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/moving_moments.h" - -#include - -#include "test/gtest.h" - -namespace webrtc { - -static const float kTolerance = 0.0001f; - -class MovingMomentsTest : public ::testing::Test { - protected: - static const size_t kMovingMomentsBufferLength = 5; - static const size_t kMaxOutputLength = 20; // Valid for this tests only. - - virtual void SetUp(); - // Calls CalculateMoments and verifies that it produces the expected - // outputs. - void CalculateMomentsAndVerify(const float* input, - size_t input_length, - const float* expected_mean, - const float* expected_mean_squares); - - std::unique_ptr moving_moments_; - float output_mean_[kMaxOutputLength]; - float output_mean_squares_[kMaxOutputLength]; -}; - -const size_t MovingMomentsTest::kMaxOutputLength; - -void MovingMomentsTest::SetUp() { - moving_moments_.reset(new MovingMoments(kMovingMomentsBufferLength)); -} - -void MovingMomentsTest::CalculateMomentsAndVerify( - const float* input, - size_t input_length, - const float* expected_mean, - const float* expected_mean_squares) { - ASSERT_LE(input_length, kMaxOutputLength); - - moving_moments_->CalculateMoments(input, input_length, output_mean_, - output_mean_squares_); - - for (size_t i = 1; i < input_length; ++i) { - EXPECT_NEAR(expected_mean[i], output_mean_[i], kTolerance); - EXPECT_NEAR(expected_mean_squares[i], output_mean_squares_[i], kTolerance); - } -} - -TEST_F(MovingMomentsTest, CorrectMomentsOfAnAllZerosBuffer) { - const float kInput[] = {0.f, 0.f, 0.f, 0.f, 0.f}; - const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); - - const float expected_mean[kInputLength] = {0.f, 0.f, 0.f, 0.f, 0.f}; - const float expected_mean_squares[kInputLength] = {0.f, 0.f, 0.f, 0.f, 0.f}; - - CalculateMomentsAndVerify(kInput, kInputLength, expected_mean, - expected_mean_squares); -} - -TEST_F(MovingMomentsTest, CorrectMomentsOfAConstantBuffer) { - const float kInput[] = {5.f, 5.f, 5.f, 5.f, 5.f, 5.f, 5.f, 5.f, 5.f, 5.f}; - const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); - - const float expected_mean[kInputLength] = {1.f, 2.f, 3.f, 4.f, 5.f, - 5.f, 5.f, 5.f, 5.f, 5.f}; - const float expected_mean_squares[kInputLength] = { - 5.f, 10.f, 15.f, 20.f, 25.f, 25.f, 25.f, 25.f, 25.f, 25.f}; - - CalculateMomentsAndVerify(kInput, kInputLength, expected_mean, - expected_mean_squares); -} - -TEST_F(MovingMomentsTest, CorrectMomentsOfAnIncreasingBuffer) { - const float kInput[] = {1.f, 2.f, 3.f, 4.f, 5.f, 6.f, 7.f, 8.f, 9.f}; - const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); - - const float expected_mean[kInputLength] = {0.2f, 0.6f, 1.2f, 2.f, 3.f, - 4.f, 5.f, 6.f, 7.f}; - const float expected_mean_squares[kInputLength] = { - 0.2f, 1.f, 2.8f, 6.f, 11.f, 18.f, 27.f, 38.f, 51.f}; - - CalculateMomentsAndVerify(kInput, kInputLength, expected_mean, - expected_mean_squares); -} - -TEST_F(MovingMomentsTest, CorrectMomentsOfADecreasingBuffer) { - const float kInput[] = {-1.f, -2.f, -3.f, -4.f, -5.f, -6.f, -7.f, -8.f, -9.f}; - const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); - - const float expected_mean[kInputLength] = {-0.2f, -0.6f, -1.2f, -2.f, -3.f, - -4.f, -5.f, -6.f, -7.f}; - const float expected_mean_squares[kInputLength] = { - 0.2f, 1.f, 2.8f, 6.f, 11.f, 18.f, 27.f, 38.f, 51.f}; - - CalculateMomentsAndVerify(kInput, kInputLength, expected_mean, - expected_mean_squares); -} - -TEST_F(MovingMomentsTest, CorrectMomentsOfAZeroMeanSequence) { - const size_t kMovingMomentsBufferLength = 4; - moving_moments_.reset(new MovingMoments(kMovingMomentsBufferLength)); - const float kInput[] = {1.f, -1.f, 1.f, -1.f, 1.f, - -1.f, 1.f, -1.f, 1.f, -1.f}; - const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); - - const float expected_mean[kInputLength] = {0.25f, 0.f, 0.25f, 0.f, 0.f, - 0.f, 0.f, 0.f, 0.f, 0.f}; - const float expected_mean_squares[kInputLength] = { - 0.25f, 0.5f, 0.75f, 1.f, 1.f, 1.f, 1.f, 1.f, 1.f, 1.f}; - - CalculateMomentsAndVerify(kInput, kInputLength, expected_mean, - expected_mean_squares); -} - -TEST_F(MovingMomentsTest, CorrectMomentsOfAnArbitraryBuffer) { - const float kInput[] = {0.2f, 0.3f, 0.5f, 0.7f, 0.11f, - 0.13f, 0.17f, 0.19f, 0.23f}; - const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); - - const float expected_mean[kInputLength] = { - 0.04f, 0.1f, 0.2f, 0.34f, 0.362f, 0.348f, 0.322f, 0.26f, 0.166f}; - const float expected_mean_squares[kInputLength] = {0.008f, 0.026f, 0.076f, - 0.174f, 0.1764f, 0.1718f, - 0.1596f, 0.1168f, 0.0294f}; - - CalculateMomentsAndVerify(kInput, kInputLength, expected_mean, - expected_mean_squares); -} - -TEST_F(MovingMomentsTest, MutipleCalculateMomentsCalls) { - const float kInputFirstCall[] = {0.2f, 0.3f, 0.5f, 0.7f, 0.11f, - 0.13f, 0.17f, 0.19f, 0.23f}; - const size_t kInputFirstCallLength = - sizeof(kInputFirstCall) / sizeof(kInputFirstCall[0]); - const float kInputSecondCall[] = {0.29f, 0.31f}; - const size_t kInputSecondCallLength = - sizeof(kInputSecondCall) / sizeof(kInputSecondCall[0]); - const float kInputThirdCall[] = {0.37f, 0.41f, 0.43f, 0.47f}; - const size_t kInputThirdCallLength = - sizeof(kInputThirdCall) / sizeof(kInputThirdCall[0]); - - const float expected_mean_first_call[kInputFirstCallLength] = { - 0.04f, 0.1f, 0.2f, 0.34f, 0.362f, 0.348f, 0.322f, 0.26f, 0.166f}; - const float expected_mean_squares_first_call[kInputFirstCallLength] = { - 0.008f, 0.026f, 0.076f, 0.174f, 0.1764f, - 0.1718f, 0.1596f, 0.1168f, 0.0294f}; - - const float expected_mean_second_call[kInputSecondCallLength] = {0.202f, - 0.238f}; - const float expected_mean_squares_second_call[kInputSecondCallLength] = { - 0.0438f, 0.0596f}; - - const float expected_mean_third_call[kInputThirdCallLength] = { - 0.278f, 0.322f, 0.362f, 0.398f}; - const float expected_mean_squares_third_call[kInputThirdCallLength] = { - 0.0812f, 0.1076f, 0.134f, 0.1614f}; - - CalculateMomentsAndVerify(kInputFirstCall, kInputFirstCallLength, - expected_mean_first_call, - expected_mean_squares_first_call); - - CalculateMomentsAndVerify(kInputSecondCall, kInputSecondCallLength, - expected_mean_second_call, - expected_mean_squares_second_call); - - CalculateMomentsAndVerify(kInputThirdCall, kInputThirdCallLength, - expected_mean_third_call, - expected_mean_squares_third_call); -} - -TEST_F(MovingMomentsTest, VerifySampleBasedVsBlockBasedCalculation) { - const float kInput[] = {0.2f, 0.3f, 0.5f, 0.7f, 0.11f, - 0.13f, 0.17f, 0.19f, 0.23f}; - const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); - - float output_mean_block_based[kInputLength]; - float output_mean_squares_block_based[kInputLength]; - - float output_mean_sample_based; - float output_mean_squares_sample_based; - - moving_moments_->CalculateMoments(kInput, kInputLength, - output_mean_block_based, - output_mean_squares_block_based); - moving_moments_.reset(new MovingMoments(kMovingMomentsBufferLength)); - for (size_t i = 0; i < kInputLength; ++i) { - moving_moments_->CalculateMoments(&kInput[i], 1, &output_mean_sample_based, - &output_mean_squares_sample_based); - EXPECT_FLOAT_EQ(output_mean_block_based[i], output_mean_sample_based); - EXPECT_FLOAT_EQ(output_mean_squares_block_based[i], - output_mean_squares_sample_based); - } -} - -} // namespace webrtc diff --git a/modules/audio_processing/transient/test/plotDetection.m b/modules/audio_processing/transient/test/plotDetection.m deleted file mode 100644 index 8e12ab920b..0000000000 --- a/modules/audio_processing/transient/test/plotDetection.m +++ /dev/null @@ -1,22 +0,0 @@ -% -% Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. -% -% Use of this source code is governed by a BSD-style license -% that can be found in the LICENSE file in the root of the source -% tree. An additional intellectual property rights grant can be found -% in the file PATENTS. All contributing project authors may -% be found in the AUTHORS file in the root of the source tree. -% - -function [] = plotDetection(PCMfile, DATfile, fs, chunkSize) -%[] = plotDetection(PCMfile, DATfile, fs, chunkSize) -% -%Plots the signal alongside the detection values. -% -%PCMfile: The file of the input signal in PCM format. -%DATfile: The file containing the detection values in binary float format. -%fs: The sample rate of the signal in Hertz. -%chunkSize: The chunk size used to compute the detection values in seconds. -[x, tx] = readPCM(PCMfile, fs); -[d, td] = readDetection(DATfile, fs, chunkSize); -plot(tx, x, td, d); diff --git a/modules/audio_processing/transient/test/readDetection.m b/modules/audio_processing/transient/test/readDetection.m deleted file mode 100644 index 832bf31ec8..0000000000 --- a/modules/audio_processing/transient/test/readDetection.m +++ /dev/null @@ -1,26 +0,0 @@ -% -% Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. -% -% Use of this source code is governed by a BSD-style license -% that can be found in the LICENSE file in the root of the source -% tree. An additional intellectual property rights grant can be found -% in the file PATENTS. All contributing project authors may -% be found in the AUTHORS file in the root of the source tree. -% - -function [d, t] = readDetection(file, fs, chunkSize) -%[d, t] = readDetection(file, fs, chunkSize) -% -%Reads a detection signal from a DAT file. -% -%d: The detection signal. -%t: The respective time vector. -% -%file: The DAT file where the detection signal is stored in float format. -%fs: The signal sample rate in Hertz. -%chunkSize: The chunk size used for the detection in seconds. -fid = fopen(file); -d = fread(fid, inf, 'float'); -fclose(fid); -t = 0:(1 / fs):(length(d) * chunkSize - 1 / fs); -d = d(floor(t / chunkSize) + 1); diff --git a/modules/audio_processing/transient/test/readPCM.m b/modules/audio_processing/transient/test/readPCM.m deleted file mode 100644 index cd3cef8a3c..0000000000 --- a/modules/audio_processing/transient/test/readPCM.m +++ /dev/null @@ -1,26 +0,0 @@ -% -% Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. -% -% Use of this source code is governed by a BSD-style license -% that can be found in the LICENSE file in the root of the source -% tree. An additional intellectual property rights grant can be found -% in the file PATENTS. All contributing project authors may -% be found in the AUTHORS file in the root of the source tree. -% - -function [x, t] = readPCM(file, fs) -%[x, t] = readPCM(file, fs) -% -%Reads a signal from a PCM file. -% -%x: The read signal after normalization. -%t: The respective time vector. -% -%file: The PCM file where the signal is stored in int16 format. -%fs: The signal sample rate in Hertz. -fid = fopen(file); -x = fread(fid, inf, 'int16'); -fclose(fid); -x = x - mean(x); -x = x / max(abs(x)); -t = 0:(1 / fs):((length(x) - 1) / fs); diff --git a/modules/audio_processing/transient/transient_detector.cc b/modules/audio_processing/transient/transient_detector.cc deleted file mode 100644 index 5c35505368..0000000000 --- a/modules/audio_processing/transient/transient_detector.cc +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/transient_detector.h" - -#include -#include - -#include -#include - -#include "modules/audio_processing/transient/common.h" -#include "modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h" -#include "modules/audio_processing/transient/moving_moments.h" -#include "modules/audio_processing/transient/wpd_node.h" -#include "modules/audio_processing/transient/wpd_tree.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -static const int kTransientLengthMs = 30; -static const int kChunksAtStartupLeftToDelete = - kTransientLengthMs / ts::kChunkSizeMs; -static const float kDetectThreshold = 16.f; - -TransientDetector::TransientDetector(int sample_rate_hz) - : samples_per_chunk_(sample_rate_hz * ts::kChunkSizeMs / 1000), - last_first_moment_(), - last_second_moment_(), - chunks_at_startup_left_to_delete_(kChunksAtStartupLeftToDelete), - reference_energy_(1.f), - using_reference_(false) { - RTC_DCHECK(sample_rate_hz == ts::kSampleRate8kHz || - sample_rate_hz == ts::kSampleRate16kHz || - sample_rate_hz == ts::kSampleRate32kHz || - sample_rate_hz == ts::kSampleRate48kHz); - int samples_per_transient = sample_rate_hz * kTransientLengthMs / 1000; - // Adjustment to avoid data loss while downsampling, making - // `samples_per_chunk_` and `samples_per_transient` always divisible by - // `kLeaves`. - samples_per_chunk_ -= samples_per_chunk_ % kLeaves; - samples_per_transient -= samples_per_transient % kLeaves; - - tree_leaves_data_length_ = samples_per_chunk_ / kLeaves; - wpd_tree_.reset(new WPDTree(samples_per_chunk_, - kDaubechies8HighPassCoefficients, - kDaubechies8LowPassCoefficients, - kDaubechies8CoefficientsLength, kLevels)); - for (size_t i = 0; i < kLeaves; ++i) { - moving_moments_[i].reset( - new MovingMoments(samples_per_transient / kLeaves)); - } - - first_moments_.reset(new float[tree_leaves_data_length_]); - second_moments_.reset(new float[tree_leaves_data_length_]); - - for (int i = 0; i < kChunksAtStartupLeftToDelete; ++i) { - previous_results_.push_back(0.f); - } -} - -TransientDetector::~TransientDetector() {} - -float TransientDetector::Detect(const float* data, - size_t data_length, - const float* reference_data, - size_t reference_length) { - RTC_DCHECK(data); - RTC_DCHECK_EQ(samples_per_chunk_, data_length); - - // TODO(aluebs): Check if these errors can logically happen and if not assert - // on them. - if (wpd_tree_->Update(data, samples_per_chunk_) != 0) { - return -1.f; - } - - float result = 0.f; - - for (size_t i = 0; i < kLeaves; ++i) { - WPDNode* leaf = wpd_tree_->NodeAt(kLevels, i); - - moving_moments_[i]->CalculateMoments(leaf->data(), tree_leaves_data_length_, - first_moments_.get(), - second_moments_.get()); - - // Add value delayed (Use the last moments from the last call to Detect). - float unbiased_data = leaf->data()[0] - last_first_moment_[i]; - result += - unbiased_data * unbiased_data / (last_second_moment_[i] + FLT_MIN); - - // Add new values. - for (size_t j = 1; j < tree_leaves_data_length_; ++j) { - unbiased_data = leaf->data()[j] - first_moments_[j - 1]; - result += - unbiased_data * unbiased_data / (second_moments_[j - 1] + FLT_MIN); - } - - last_first_moment_[i] = first_moments_[tree_leaves_data_length_ - 1]; - last_second_moment_[i] = second_moments_[tree_leaves_data_length_ - 1]; - } - - result /= tree_leaves_data_length_; - - result *= ReferenceDetectionValue(reference_data, reference_length); - - if (chunks_at_startup_left_to_delete_ > 0) { - chunks_at_startup_left_to_delete_--; - result = 0.f; - } - - if (result >= kDetectThreshold) { - result = 1.f; - } else { - // Get proportional value. - // Proportion achieved with a squared raised cosine function with domain - // [0, kDetectThreshold) and image [0, 1), it's always increasing. - const float horizontal_scaling = ts::kPi / kDetectThreshold; - const float kHorizontalShift = ts::kPi; - const float kVerticalScaling = 0.5f; - const float kVerticalShift = 1.f; - - result = (std::cos(result * horizontal_scaling + kHorizontalShift) + - kVerticalShift) * - kVerticalScaling; - result *= result; - } - - previous_results_.pop_front(); - previous_results_.push_back(result); - - // In the current implementation we return the max of the current result and - // the previous results, so the high results have a width equals to - // `transient_length`. - return *std::max_element(previous_results_.begin(), previous_results_.end()); -} - -// Looks for the highest slope and compares it with the previous ones. -// An exponential transformation takes this to the [0, 1] range. This value is -// multiplied by the detection result to avoid false positives. -float TransientDetector::ReferenceDetectionValue(const float* data, - size_t length) { - if (data == NULL) { - using_reference_ = false; - return 1.f; - } - static const float kEnergyRatioThreshold = 0.2f; - static const float kReferenceNonLinearity = 20.f; - static const float kMemory = 0.99f; - float reference_energy = 0.f; - for (size_t i = 1; i < length; ++i) { - reference_energy += data[i] * data[i]; - } - if (reference_energy == 0.f) { - using_reference_ = false; - return 1.f; - } - RTC_DCHECK_NE(0, reference_energy_); - float result = 1.f / (1.f + std::exp(kReferenceNonLinearity * - (kEnergyRatioThreshold - - reference_energy / reference_energy_))); - reference_energy_ = - kMemory * reference_energy_ + (1.f - kMemory) * reference_energy; - - using_reference_ = true; - - return result; -} - -} // namespace webrtc diff --git a/modules/audio_processing/transient/transient_detector.h b/modules/audio_processing/transient/transient_detector.h deleted file mode 100644 index a3dbb7ffde..0000000000 --- a/modules/audio_processing/transient/transient_detector.h +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_DETECTOR_H_ -#define MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_DETECTOR_H_ - -#include - -#include -#include - -#include "modules/audio_processing/transient/moving_moments.h" -#include "modules/audio_processing/transient/wpd_tree.h" - -namespace webrtc { - -// This is an implementation of the transient detector described in "Causal -// Wavelet based transient detector". -// Calculates the log-likelihood of a transient to happen on a signal at any -// given time based on the previous samples; it uses a WPD tree to analyze the -// signal. It preserves its state, so it can be multiple-called. -class TransientDetector { - public: - // TODO(chadan): The only supported wavelet is Daubechies 8 using a WPD tree - // of 3 levels. Make an overloaded constructor to allow different wavelets and - // depths of the tree. When needed. - - // Creates a wavelet based transient detector. - TransientDetector(int sample_rate_hz); - - ~TransientDetector(); - - // Calculates the log-likelihood of the existence of a transient in `data`. - // `data_length` has to be equal to `samples_per_chunk_`. - // Returns a value between 0 and 1, as a non linear representation of this - // likelihood. - // Returns a negative value on error. - float Detect(const float* data, - size_t data_length, - const float* reference_data, - size_t reference_length); - - bool using_reference() { return using_reference_; } - - private: - float ReferenceDetectionValue(const float* data, size_t length); - - static const size_t kLevels = 3; - static const size_t kLeaves = 1 << kLevels; - - size_t samples_per_chunk_; - - std::unique_ptr wpd_tree_; - size_t tree_leaves_data_length_; - - // A MovingMoments object is needed for each leaf in the WPD tree. - std::unique_ptr moving_moments_[kLeaves]; - - std::unique_ptr first_moments_; - std::unique_ptr second_moments_; - - // Stores the last calculated moments from the previous detection. - float last_first_moment_[kLeaves]; - float last_second_moment_[kLeaves]; - - // We keep track of the previous results from the previous chunks, so it can - // be used to effectively give results according to the `transient_length`. - std::deque previous_results_; - - // Number of chunks that are going to return only zeros at the beginning of - // the detection. It helps to avoid infs and nans due to the lack of - // information. - int chunks_at_startup_left_to_delete_; - - float reference_energy_; - - bool using_reference_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_DETECTOR_H_ diff --git a/modules/audio_processing/transient/transient_detector_unittest.cc b/modules/audio_processing/transient/transient_detector_unittest.cc deleted file mode 100644 index a7364626fd..0000000000 --- a/modules/audio_processing/transient/transient_detector_unittest.cc +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/transient_detector.h" - -#include -#include - -#include "modules/audio_processing/transient/common.h" -#include "modules/audio_processing/transient/file_utils.h" -#include "rtc_base/strings/string_builder.h" -#include "rtc_base/system/file_wrapper.h" -#include "test/gtest.h" -#include "test/testsupport/file_utils.h" - -namespace webrtc { - -static const int kSampleRatesHz[] = {ts::kSampleRate8kHz, ts::kSampleRate16kHz, - ts::kSampleRate32kHz, - ts::kSampleRate48kHz}; -static const size_t kNumberOfSampleRates = - sizeof(kSampleRatesHz) / sizeof(*kSampleRatesHz); - -// This test is for the correctness of the transient detector. -// Checks the results comparing them with the ones stored in the detect files in -// the directory: resources/audio_processing/transient/ -// The files contain all the results in double precision (Little endian). -// The audio files used with different sample rates are stored in the same -// directory. -#if defined(WEBRTC_IOS) -TEST(TransientDetectorTest, DISABLED_CorrectnessBasedOnFiles) { -#else -TEST(TransientDetectorTest, CorrectnessBasedOnFiles) { -#endif - for (size_t i = 0; i < kNumberOfSampleRates; ++i) { - int sample_rate_hz = kSampleRatesHz[i]; - - // Prepare detect file. - rtc::StringBuilder detect_file_name; - detect_file_name << "audio_processing/transient/detect" - << (sample_rate_hz / 1000) << "kHz"; - - FileWrapper detect_file = FileWrapper::OpenReadOnly( - test::ResourcePath(detect_file_name.str(), "dat")); - - bool file_opened = detect_file.is_open(); - ASSERT_TRUE(file_opened) << "File could not be opened.\n" - << detect_file_name.str().c_str(); - - // Prepare audio file. - rtc::StringBuilder audio_file_name; - audio_file_name << "audio_processing/transient/audio" - << (sample_rate_hz / 1000) << "kHz"; - - FileWrapper audio_file = FileWrapper::OpenReadOnly( - test::ResourcePath(audio_file_name.str(), "pcm")); - - // Create detector. - TransientDetector detector(sample_rate_hz); - - const size_t buffer_length = sample_rate_hz * ts::kChunkSizeMs / 1000; - std::unique_ptr buffer(new float[buffer_length]); - - const float kTolerance = 0.02f; - - size_t frames_read = 0; - - while (ReadInt16FromFileToFloatBuffer(&audio_file, buffer_length, - buffer.get()) == buffer_length) { - ++frames_read; - - float detector_value = - detector.Detect(buffer.get(), buffer_length, NULL, 0); - double file_value; - ASSERT_EQ(1u, ReadDoubleBufferFromFile(&detect_file, 1, &file_value)) - << "Detect test file is malformed.\n"; - - // Compare results with data from the matlab test file. - EXPECT_NEAR(file_value, detector_value, kTolerance) - << "Frame: " << frames_read; - } - - detect_file.Close(); - audio_file.Close(); - } -} - -} // namespace webrtc diff --git a/modules/audio_processing/transient/transient_suppression_test.cc b/modules/audio_processing/transient/transient_suppression_test.cc deleted file mode 100644 index 2d8baf9416..0000000000 --- a/modules/audio_processing/transient/transient_suppression_test.cc +++ /dev/null @@ -1,238 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include -#include - -#include -#include -#include - -#include "absl/flags/flag.h" -#include "absl/flags/parse.h" -#include "common_audio/include/audio_util.h" -#include "modules/audio_processing/agc/agc.h" -#include "modules/audio_processing/transient/transient_suppressor.h" -#include "modules/audio_processing/transient/transient_suppressor_impl.h" -#include "test/gtest.h" -#include "test/testsupport/file_utils.h" - -ABSL_FLAG(std::string, in_file_name, "", "PCM file that contains the signal."); -ABSL_FLAG(std::string, - detection_file_name, - "", - "PCM file that contains the detection signal."); -ABSL_FLAG(std::string, - reference_file_name, - "", - "PCM file that contains the reference signal."); - -ABSL_FLAG(int, - chunk_size_ms, - 10, - "Time between each chunk of samples in milliseconds."); - -ABSL_FLAG(int, - sample_rate_hz, - 16000, - "Sampling frequency of the signal in Hertz."); -ABSL_FLAG(int, - detection_rate_hz, - 0, - "Sampling frequency of the detection signal in Hertz."); - -ABSL_FLAG(int, num_channels, 1, "Number of channels."); - -namespace webrtc { - -const char kUsage[] = - "\nDetects and suppresses transients from file.\n\n" - "This application loads the signal from the in_file_name with a specific\n" - "num_channels and sample_rate_hz, the detection signal from the\n" - "detection_file_name with a specific detection_rate_hz, and the reference\n" - "signal from the reference_file_name with sample_rate_hz, divides them\n" - "into chunk_size_ms blocks, computes its voice value and depending on the\n" - "voice_threshold does the respective restoration. You can always get the\n" - "all-voiced or all-unvoiced cases by setting the voice_threshold to 0 or\n" - "1 respectively.\n\n"; - -// Read next buffers from the test files (signed 16-bit host-endian PCM -// format). audio_buffer has int16 samples, detection_buffer has float samples -// with range [-32768,32767], and reference_buffer has float samples with range -// [-1,1]. Return true iff all the buffers were filled completely. -bool ReadBuffers(FILE* in_file, - size_t audio_buffer_size, - int num_channels, - int16_t* audio_buffer, - FILE* detection_file, - size_t detection_buffer_size, - float* detection_buffer, - FILE* reference_file, - float* reference_buffer) { - std::unique_ptr tmpbuf; - int16_t* read_ptr = audio_buffer; - if (num_channels > 1) { - tmpbuf.reset(new int16_t[num_channels * audio_buffer_size]); - read_ptr = tmpbuf.get(); - } - if (fread(read_ptr, sizeof(*read_ptr), num_channels * audio_buffer_size, - in_file) != num_channels * audio_buffer_size) { - return false; - } - // De-interleave. - if (num_channels > 1) { - for (int i = 0; i < num_channels; ++i) { - for (size_t j = 0; j < audio_buffer_size; ++j) { - audio_buffer[i * audio_buffer_size + j] = - read_ptr[i + j * num_channels]; - } - } - } - if (detection_file) { - std::unique_ptr ibuf(new int16_t[detection_buffer_size]); - if (fread(ibuf.get(), sizeof(ibuf[0]), detection_buffer_size, - detection_file) != detection_buffer_size) - return false; - for (size_t i = 0; i < detection_buffer_size; ++i) - detection_buffer[i] = ibuf[i]; - } - if (reference_file) { - std::unique_ptr ibuf(new int16_t[audio_buffer_size]); - if (fread(ibuf.get(), sizeof(ibuf[0]), audio_buffer_size, reference_file) != - audio_buffer_size) - return false; - S16ToFloat(ibuf.get(), audio_buffer_size, reference_buffer); - } - return true; -} - -// Write a number of samples to an open signed 16-bit host-endian PCM file. -static void WritePCM(FILE* f, - size_t num_samples, - int num_channels, - const float* buffer) { - std::unique_ptr ibuf(new int16_t[num_channels * num_samples]); - // Interleave. - for (int i = 0; i < num_channels; ++i) { - for (size_t j = 0; j < num_samples; ++j) { - ibuf[i + j * num_channels] = FloatS16ToS16(buffer[i * num_samples + j]); - } - } - fwrite(ibuf.get(), sizeof(ibuf[0]), num_channels * num_samples, f); -} - -// This application tests the transient suppression by providing a processed -// PCM file, which has to be listened to in order to evaluate the -// performance. -// It gets an audio file, and its voice gain information, and the suppressor -// process it giving the output file "suppressed_keystrokes.pcm". -void void_main() { - // TODO(aluebs): Remove all FileWrappers. - // Prepare the input file. - FILE* in_file = fopen(absl::GetFlag(FLAGS_in_file_name).c_str(), "rb"); - ASSERT_TRUE(in_file != NULL); - - // Prepare the detection file. - FILE* detection_file = NULL; - if (!absl::GetFlag(FLAGS_detection_file_name).empty()) { - detection_file = - fopen(absl::GetFlag(FLAGS_detection_file_name).c_str(), "rb"); - } - - // Prepare the reference file. - FILE* reference_file = NULL; - if (!absl::GetFlag(FLAGS_reference_file_name).empty()) { - reference_file = - fopen(absl::GetFlag(FLAGS_reference_file_name).c_str(), "rb"); - } - - // Prepare the output file. - std::string out_file_name = test::OutputPath() + "suppressed_keystrokes.pcm"; - FILE* out_file = fopen(out_file_name.c_str(), "wb"); - ASSERT_TRUE(out_file != NULL); - - int detection_rate_hz = absl::GetFlag(FLAGS_detection_rate_hz); - if (detection_rate_hz == 0) { - detection_rate_hz = absl::GetFlag(FLAGS_sample_rate_hz); - } - - Agc agc; - - TransientSuppressorImpl suppressor(TransientSuppressor::VadMode::kDefault, - absl::GetFlag(FLAGS_sample_rate_hz), - detection_rate_hz, - absl::GetFlag(FLAGS_num_channels)); - - const size_t audio_buffer_size = absl::GetFlag(FLAGS_chunk_size_ms) * - absl::GetFlag(FLAGS_sample_rate_hz) / 1000; - const size_t detection_buffer_size = - absl::GetFlag(FLAGS_chunk_size_ms) * detection_rate_hz / 1000; - - // int16 and float variants of the same data. - std::unique_ptr audio_buffer_i( - new int16_t[absl::GetFlag(FLAGS_num_channels) * audio_buffer_size]); - std::unique_ptr audio_buffer_f( - new float[absl::GetFlag(FLAGS_num_channels) * audio_buffer_size]); - - std::unique_ptr detection_buffer, reference_buffer; - - if (detection_file) - detection_buffer.reset(new float[detection_buffer_size]); - if (reference_file) - reference_buffer.reset(new float[audio_buffer_size]); - - while (ReadBuffers( - in_file, audio_buffer_size, absl::GetFlag(FLAGS_num_channels), - audio_buffer_i.get(), detection_file, detection_buffer_size, - detection_buffer.get(), reference_file, reference_buffer.get())) { - agc.Process({audio_buffer_i.get(), audio_buffer_size}); - - for (size_t i = 0; - i < absl::GetFlag(FLAGS_num_channels) * audio_buffer_size; ++i) { - audio_buffer_f[i] = audio_buffer_i[i]; - } - - suppressor.Suppress(audio_buffer_f.get(), audio_buffer_size, - absl::GetFlag(FLAGS_num_channels), - detection_buffer.get(), detection_buffer_size, - reference_buffer.get(), audio_buffer_size, - agc.voice_probability(), true); - - // Write result to out file. - WritePCM(out_file, audio_buffer_size, absl::GetFlag(FLAGS_num_channels), - audio_buffer_f.get()); - } - - fclose(in_file); - if (detection_file) { - fclose(detection_file); - } - if (reference_file) { - fclose(reference_file); - } - fclose(out_file); -} - -} // namespace webrtc - -int main(int argc, char* argv[]) { - std::vector args = absl::ParseCommandLine(argc, argv); - if (args.size() != 1) { - printf("%s", webrtc::kUsage); - return 1; - } - RTC_CHECK_GT(absl::GetFlag(FLAGS_chunk_size_ms), 0); - RTC_CHECK_GT(absl::GetFlag(FLAGS_sample_rate_hz), 0); - RTC_CHECK_GT(absl::GetFlag(FLAGS_num_channels), 0); - - webrtc::void_main(); - return 0; -} diff --git a/modules/audio_processing/transient/transient_suppressor.h b/modules/audio_processing/transient/transient_suppressor.h deleted file mode 100644 index ecb3c3baab..0000000000 --- a/modules/audio_processing/transient/transient_suppressor.h +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_H_ -#define MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_H_ - -#include - -namespace webrtc { - -// Detects transients in an audio stream and suppress them using a simple -// restoration algorithm that attenuates unexpected spikes in the spectrum. -class TransientSuppressor { - public: - // Type of VAD used by the caller to compute the `voice_probability` argument - // `Suppress()`. - enum class VadMode { - // By default, `TransientSuppressor` assumes that `voice_probability` is - // computed by `AgcManagerDirect`. - kDefault = 0, - // Use this mode when `TransientSuppressor` must assume that - // `voice_probability` is computed by the RNN VAD. - kRnnVad, - // Use this mode to let `TransientSuppressor::Suppressor()` ignore - // `voice_probability` and behave as if voice information is unavailable - // (regardless of the passed value). - kNoVad, - }; - - virtual ~TransientSuppressor() {} - - virtual void Initialize(int sample_rate_hz, - int detector_rate_hz, - int num_channels) = 0; - - // Processes a `data` chunk, and returns it with keystrokes suppressed from - // it. The float format is assumed to be int16 ranged. If there are more than - // one channel, the chunks are concatenated one after the other in `data`. - // `data_length` must be equal to `data_length_`. - // `num_channels` must be equal to `num_channels_`. - // A sub-band, ideally the higher, can be used as `detection_data`. If it is - // NULL, `data` is used for the detection too. The `detection_data` is always - // assumed mono. - // If a reference signal (e.g. keyboard microphone) is available, it can be - // passed in as `reference_data`. It is assumed mono and must have the same - // length as `data`. NULL is accepted if unavailable. - // This suppressor performs better if voice information is available. - // `voice_probability` is the probability of voice being present in this chunk - // of audio. If voice information is not available, `voice_probability` must - // always be set to 1. - // `key_pressed` determines if a key was pressed on this audio chunk. - // Returns a delayed version of `voice_probability` according to the - // algorithmic delay introduced by this method. In this way, the modified - // `data` and the returned voice probability will be temporally aligned. - virtual float Suppress(float* data, - size_t data_length, - int num_channels, - const float* detection_data, - size_t detection_length, - const float* reference_data, - size_t reference_length, - float voice_probability, - bool key_pressed) = 0; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_H_ diff --git a/modules/audio_processing/transient/transient_suppressor_impl.cc b/modules/audio_processing/transient/transient_suppressor_impl.cc deleted file mode 100644 index 90428464e3..0000000000 --- a/modules/audio_processing/transient/transient_suppressor_impl.cc +++ /dev/null @@ -1,455 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/transient_suppressor_impl.h" - -#include - -#include -#include -#include -#include -#include -#include -#include - -#include "common_audio/include/audio_util.h" -#include "common_audio/signal_processing/include/signal_processing_library.h" -#include "common_audio/third_party/ooura/fft_size_256/fft4g.h" -#include "modules/audio_processing/transient/common.h" -#include "modules/audio_processing/transient/transient_detector.h" -#include "modules/audio_processing/transient/transient_suppressor.h" -#include "modules/audio_processing/transient/windows_private.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -static const float kMeanIIRCoefficient = 0.5f; - -// TODO(aluebs): Check if these values work also for 48kHz. -static const size_t kMinVoiceBin = 3; -static const size_t kMaxVoiceBin = 60; - -namespace { - -float ComplexMagnitude(float a, float b) { - return std::abs(a) + std::abs(b); -} - -std::string GetVadModeLabel(TransientSuppressor::VadMode vad_mode) { - switch (vad_mode) { - case TransientSuppressor::VadMode::kDefault: - return "default"; - case TransientSuppressor::VadMode::kRnnVad: - return "RNN VAD"; - case TransientSuppressor::VadMode::kNoVad: - return "no VAD"; - } -} - -} // namespace - -TransientSuppressorImpl::TransientSuppressorImpl(VadMode vad_mode, - int sample_rate_hz, - int detector_rate_hz, - int num_channels) - : vad_mode_(vad_mode), - voice_probability_delay_unit_(/*delay_num_samples=*/0, sample_rate_hz), - analyzed_audio_is_silent_(false), - data_length_(0), - detection_length_(0), - analysis_length_(0), - buffer_delay_(0), - complex_analysis_length_(0), - num_channels_(0), - window_(NULL), - detector_smoothed_(0.f), - keypress_counter_(0), - chunks_since_keypress_(0), - detection_enabled_(false), - suppression_enabled_(false), - use_hard_restoration_(false), - chunks_since_voice_change_(0), - seed_(182), - using_reference_(false) { - RTC_LOG(LS_INFO) << "VAD mode: " << GetVadModeLabel(vad_mode_); - Initialize(sample_rate_hz, detector_rate_hz, num_channels); -} - -TransientSuppressorImpl::~TransientSuppressorImpl() {} - -void TransientSuppressorImpl::Initialize(int sample_rate_hz, - int detection_rate_hz, - int num_channels) { - RTC_DCHECK(sample_rate_hz == ts::kSampleRate8kHz || - sample_rate_hz == ts::kSampleRate16kHz || - sample_rate_hz == ts::kSampleRate32kHz || - sample_rate_hz == ts::kSampleRate48kHz); - RTC_DCHECK(detection_rate_hz == ts::kSampleRate8kHz || - detection_rate_hz == ts::kSampleRate16kHz || - detection_rate_hz == ts::kSampleRate32kHz || - detection_rate_hz == ts::kSampleRate48kHz); - RTC_DCHECK_GT(num_channels, 0); - - switch (sample_rate_hz) { - case ts::kSampleRate8kHz: - analysis_length_ = 128u; - window_ = kBlocks80w128; - break; - case ts::kSampleRate16kHz: - analysis_length_ = 256u; - window_ = kBlocks160w256; - break; - case ts::kSampleRate32kHz: - analysis_length_ = 512u; - window_ = kBlocks320w512; - break; - case ts::kSampleRate48kHz: - analysis_length_ = 1024u; - window_ = kBlocks480w1024; - break; - default: - RTC_DCHECK_NOTREACHED(); - return; - } - - detector_.reset(new TransientDetector(detection_rate_hz)); - data_length_ = sample_rate_hz * ts::kChunkSizeMs / 1000; - RTC_DCHECK_LE(data_length_, analysis_length_); - buffer_delay_ = analysis_length_ - data_length_; - - voice_probability_delay_unit_.Initialize(/*delay_num_samples=*/buffer_delay_, - sample_rate_hz); - - complex_analysis_length_ = analysis_length_ / 2 + 1; - RTC_DCHECK_GE(complex_analysis_length_, kMaxVoiceBin); - num_channels_ = num_channels; - in_buffer_.reset(new float[analysis_length_ * num_channels_]); - memset(in_buffer_.get(), 0, - analysis_length_ * num_channels_ * sizeof(in_buffer_[0])); - detection_length_ = detection_rate_hz * ts::kChunkSizeMs / 1000; - detection_buffer_.reset(new float[detection_length_]); - memset(detection_buffer_.get(), 0, - detection_length_ * sizeof(detection_buffer_[0])); - out_buffer_.reset(new float[analysis_length_ * num_channels_]); - memset(out_buffer_.get(), 0, - analysis_length_ * num_channels_ * sizeof(out_buffer_[0])); - // ip[0] must be zero to trigger initialization using rdft(). - size_t ip_length = 2 + sqrtf(analysis_length_); - ip_.reset(new size_t[ip_length]()); - memset(ip_.get(), 0, ip_length * sizeof(ip_[0])); - wfft_.reset(new float[complex_analysis_length_ - 1]); - memset(wfft_.get(), 0, (complex_analysis_length_ - 1) * sizeof(wfft_[0])); - spectral_mean_.reset(new float[complex_analysis_length_ * num_channels_]); - memset(spectral_mean_.get(), 0, - complex_analysis_length_ * num_channels_ * sizeof(spectral_mean_[0])); - fft_buffer_.reset(new float[analysis_length_ + 2]); - memset(fft_buffer_.get(), 0, (analysis_length_ + 2) * sizeof(fft_buffer_[0])); - magnitudes_.reset(new float[complex_analysis_length_]); - memset(magnitudes_.get(), 0, - complex_analysis_length_ * sizeof(magnitudes_[0])); - mean_factor_.reset(new float[complex_analysis_length_]); - - static const float kFactorHeight = 10.f; - static const float kLowSlope = 1.f; - static const float kHighSlope = 0.3f; - for (size_t i = 0; i < complex_analysis_length_; ++i) { - mean_factor_[i] = - kFactorHeight / - (1.f + std::exp(kLowSlope * static_cast(i - kMinVoiceBin))) + - kFactorHeight / - (1.f + std::exp(kHighSlope * static_cast(kMaxVoiceBin - i))); - } - detector_smoothed_ = 0.f; - keypress_counter_ = 0; - chunks_since_keypress_ = 0; - detection_enabled_ = false; - suppression_enabled_ = false; - use_hard_restoration_ = false; - chunks_since_voice_change_ = 0; - seed_ = 182; - using_reference_ = false; -} - -float TransientSuppressorImpl::Suppress(float* data, - size_t data_length, - int num_channels, - const float* detection_data, - size_t detection_length, - const float* reference_data, - size_t reference_length, - float voice_probability, - bool key_pressed) { - if (!data || data_length != data_length_ || num_channels != num_channels_ || - detection_length != detection_length_ || voice_probability < 0 || - voice_probability > 1) { - // The audio is not modified, so the voice probability is returned as is - // (delay not applied). - return voice_probability; - } - - UpdateKeypress(key_pressed); - UpdateBuffers(data); - - if (detection_enabled_) { - UpdateRestoration(voice_probability); - - if (!detection_data) { - // Use the input data of the first channel if special detection data is - // not supplied. - detection_data = &in_buffer_[buffer_delay_]; - } - - float detector_result = detector_->Detect(detection_data, detection_length, - reference_data, reference_length); - if (detector_result < 0) { - // The audio is not modified, so the voice probability is returned as is - // (delay not applied). - return voice_probability; - } - - using_reference_ = detector_->using_reference(); - - // `detector_smoothed_` follows the `detector_result` when this last one is - // increasing, but has an exponential decaying tail to be able to suppress - // the ringing of keyclicks. - float smooth_factor = using_reference_ ? 0.6 : 0.1; - detector_smoothed_ = detector_result >= detector_smoothed_ - ? detector_result - : smooth_factor * detector_smoothed_ + - (1 - smooth_factor) * detector_result; - - for (int i = 0; i < num_channels_; ++i) { - Suppress(&in_buffer_[i * analysis_length_], - &spectral_mean_[i * complex_analysis_length_], - &out_buffer_[i * analysis_length_]); - } - } - - // If the suppression isn't enabled, we use the in buffer to delay the signal - // appropriately. This also gives time for the out buffer to be refreshed with - // new data between detection and suppression getting enabled. - for (int i = 0; i < num_channels_; ++i) { - memcpy(&data[i * data_length_], - suppression_enabled_ ? &out_buffer_[i * analysis_length_] - : &in_buffer_[i * analysis_length_], - data_length_ * sizeof(*data)); - } - - // The audio has been modified, return the delayed voice probability. - return voice_probability_delay_unit_.Delay(voice_probability); -} - -// This should only be called when detection is enabled. UpdateBuffers() must -// have been called. At return, `out_buffer_` will be filled with the -// processed output. -void TransientSuppressorImpl::Suppress(float* in_ptr, - float* spectral_mean, - float* out_ptr) { - // Go to frequency domain. - for (size_t i = 0; i < analysis_length_; ++i) { - // TODO(aluebs): Rename windows - fft_buffer_[i] = in_ptr[i] * window_[i]; - } - - WebRtc_rdft(analysis_length_, 1, fft_buffer_.get(), ip_.get(), wfft_.get()); - - // Since WebRtc_rdft puts R[n/2] in fft_buffer_[1], we move it to the end - // for convenience. - fft_buffer_[analysis_length_] = fft_buffer_[1]; - fft_buffer_[analysis_length_ + 1] = 0.f; - fft_buffer_[1] = 0.f; - - for (size_t i = 0; i < complex_analysis_length_; ++i) { - magnitudes_[i] = - ComplexMagnitude(fft_buffer_[i * 2], fft_buffer_[i * 2 + 1]); - } - // Restore audio if necessary. - if (suppression_enabled_) { - if (use_hard_restoration_) { - HardRestoration(spectral_mean); - } else { - SoftRestoration(spectral_mean); - } - } - - // Update the spectral mean. - for (size_t i = 0; i < complex_analysis_length_; ++i) { - spectral_mean[i] = (1 - kMeanIIRCoefficient) * spectral_mean[i] + - kMeanIIRCoefficient * magnitudes_[i]; - } - - // Back to time domain. - // Put R[n/2] back in fft_buffer_[1]. - fft_buffer_[1] = fft_buffer_[analysis_length_]; - - WebRtc_rdft(analysis_length_, -1, fft_buffer_.get(), ip_.get(), wfft_.get()); - const float fft_scaling = 2.f / analysis_length_; - - for (size_t i = 0; i < analysis_length_; ++i) { - out_ptr[i] += fft_buffer_[i] * window_[i] * fft_scaling; - } -} - -void TransientSuppressorImpl::UpdateKeypress(bool key_pressed) { - const int kKeypressPenalty = 1000 / ts::kChunkSizeMs; - const int kIsTypingThreshold = 1000 / ts::kChunkSizeMs; - const int kChunksUntilNotTyping = 4000 / ts::kChunkSizeMs; // 4 seconds. - - if (key_pressed) { - keypress_counter_ += kKeypressPenalty; - chunks_since_keypress_ = 0; - detection_enabled_ = true; - } - keypress_counter_ = std::max(0, keypress_counter_ - 1); - - if (keypress_counter_ > kIsTypingThreshold) { - if (!suppression_enabled_) { - RTC_LOG(LS_INFO) << "[ts] Transient suppression is now enabled."; - } - suppression_enabled_ = true; - keypress_counter_ = 0; - } - - if (detection_enabled_ && ++chunks_since_keypress_ > kChunksUntilNotTyping) { - if (suppression_enabled_) { - RTC_LOG(LS_INFO) << "[ts] Transient suppression is now disabled."; - } - detection_enabled_ = false; - suppression_enabled_ = false; - keypress_counter_ = 0; - } -} - -void TransientSuppressorImpl::UpdateRestoration(float voice_probability) { - bool not_voiced; - switch (vad_mode_) { - case TransientSuppressor::VadMode::kDefault: { - constexpr float kVoiceThreshold = 0.02f; - not_voiced = voice_probability < kVoiceThreshold; - break; - } - case TransientSuppressor::VadMode::kRnnVad: { - constexpr float kVoiceThreshold = 0.7f; - not_voiced = voice_probability < kVoiceThreshold; - break; - } - case TransientSuppressor::VadMode::kNoVad: - // Always assume that voice is detected. - not_voiced = false; - break; - } - - if (not_voiced == use_hard_restoration_) { - chunks_since_voice_change_ = 0; - } else { - ++chunks_since_voice_change_; - - // Number of 10 ms frames to wait to transition to and from hard - // restoration. - constexpr int kHardRestorationOffsetDelay = 3; - constexpr int kHardRestorationOnsetDelay = 80; - - if ((use_hard_restoration_ && - chunks_since_voice_change_ > kHardRestorationOffsetDelay) || - (!use_hard_restoration_ && - chunks_since_voice_change_ > kHardRestorationOnsetDelay)) { - use_hard_restoration_ = not_voiced; - chunks_since_voice_change_ = 0; - } - } -} - -// Shift buffers to make way for new data. Must be called after -// `detection_enabled_` is updated by UpdateKeypress(). -void TransientSuppressorImpl::UpdateBuffers(float* data) { - // TODO(aluebs): Change to ring buffer. - memmove(in_buffer_.get(), &in_buffer_[data_length_], - (buffer_delay_ + (num_channels_ - 1) * analysis_length_) * - sizeof(in_buffer_[0])); - // Copy new chunk to buffer. - for (int i = 0; i < num_channels_; ++i) { - memcpy(&in_buffer_[buffer_delay_ + i * analysis_length_], - &data[i * data_length_], data_length_ * sizeof(*data)); - } - if (detection_enabled_) { - // Shift previous chunk in out buffer. - memmove(out_buffer_.get(), &out_buffer_[data_length_], - (buffer_delay_ + (num_channels_ - 1) * analysis_length_) * - sizeof(out_buffer_[0])); - // Initialize new chunk in out buffer. - for (int i = 0; i < num_channels_; ++i) { - memset(&out_buffer_[buffer_delay_ + i * analysis_length_], 0, - data_length_ * sizeof(out_buffer_[0])); - } - } -} - -// Restores the unvoiced signal if a click is present. -// Attenuates by a certain factor every peak in the `fft_buffer_` that exceeds -// the spectral mean. The attenuation depends on `detector_smoothed_`. -// If a restoration takes place, the `magnitudes_` are updated to the new value. -void TransientSuppressorImpl::HardRestoration(float* spectral_mean) { - const float detector_result = - 1.f - std::pow(1.f - detector_smoothed_, using_reference_ ? 200.f : 50.f); - // To restore, we get the peaks in the spectrum. If higher than the previous - // spectral mean we adjust them. - for (size_t i = 0; i < complex_analysis_length_; ++i) { - if (magnitudes_[i] > spectral_mean[i] && magnitudes_[i] > 0) { - // RandU() generates values on [0, int16::max()] - const float phase = 2 * ts::kPi * WebRtcSpl_RandU(&seed_) / - std::numeric_limits::max(); - const float scaled_mean = detector_result * spectral_mean[i]; - - fft_buffer_[i * 2] = (1 - detector_result) * fft_buffer_[i * 2] + - scaled_mean * cosf(phase); - fft_buffer_[i * 2 + 1] = (1 - detector_result) * fft_buffer_[i * 2 + 1] + - scaled_mean * sinf(phase); - magnitudes_[i] = magnitudes_[i] - - detector_result * (magnitudes_[i] - spectral_mean[i]); - } - } -} - -// Restores the voiced signal if a click is present. -// Attenuates by a certain factor every peak in the `fft_buffer_` that exceeds -// the spectral mean and that is lower than some function of the current block -// frequency mean. The attenuation depends on `detector_smoothed_`. -// If a restoration takes place, the `magnitudes_` are updated to the new value. -void TransientSuppressorImpl::SoftRestoration(float* spectral_mean) { - // Get the spectral magnitude mean of the current block. - float block_frequency_mean = 0; - for (size_t i = kMinVoiceBin; i < kMaxVoiceBin; ++i) { - block_frequency_mean += magnitudes_[i]; - } - block_frequency_mean /= (kMaxVoiceBin - kMinVoiceBin); - - // To restore, we get the peaks in the spectrum. If higher than the - // previous spectral mean and lower than a factor of the block mean - // we adjust them. The factor is a double sigmoid that has a minimum in the - // voice frequency range (300Hz - 3kHz). - for (size_t i = 0; i < complex_analysis_length_; ++i) { - if (magnitudes_[i] > spectral_mean[i] && magnitudes_[i] > 0 && - (using_reference_ || - magnitudes_[i] < block_frequency_mean * mean_factor_[i])) { - const float new_magnitude = - magnitudes_[i] - - detector_smoothed_ * (magnitudes_[i] - spectral_mean[i]); - const float magnitude_ratio = new_magnitude / magnitudes_[i]; - - fft_buffer_[i * 2] *= magnitude_ratio; - fft_buffer_[i * 2 + 1] *= magnitude_ratio; - magnitudes_[i] = new_magnitude; - } - } -} - -} // namespace webrtc diff --git a/modules/audio_processing/transient/transient_suppressor_impl.h b/modules/audio_processing/transient/transient_suppressor_impl.h deleted file mode 100644 index 4005a16b0a..0000000000 --- a/modules/audio_processing/transient/transient_suppressor_impl.h +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_IMPL_H_ -#define MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_IMPL_H_ - -#include -#include - -#include - -#include "modules/audio_processing/transient/transient_suppressor.h" -#include "modules/audio_processing/transient/voice_probability_delay_unit.h" -#include "rtc_base/gtest_prod_util.h" - -namespace webrtc { - -class TransientDetector; - -// Detects transients in an audio stream and suppress them using a simple -// restoration algorithm that attenuates unexpected spikes in the spectrum. -class TransientSuppressorImpl : public TransientSuppressor { - public: - TransientSuppressorImpl(VadMode vad_mode, - int sample_rate_hz, - int detector_rate_hz, - int num_channels); - ~TransientSuppressorImpl() override; - - void Initialize(int sample_rate_hz, - int detector_rate_hz, - int num_channels) override; - - float Suppress(float* data, - size_t data_length, - int num_channels, - const float* detection_data, - size_t detection_length, - const float* reference_data, - size_t reference_length, - float voice_probability, - bool key_pressed) override; - - private: - FRIEND_TEST_ALL_PREFIXES(TransientSuppressorVadModeParametrization, - TypingDetectionLogicWorksAsExpectedForMono); - void Suppress(float* in_ptr, float* spectral_mean, float* out_ptr); - - void UpdateKeypress(bool key_pressed); - void UpdateRestoration(float voice_probability); - - void UpdateBuffers(float* data); - - void HardRestoration(float* spectral_mean); - void SoftRestoration(float* spectral_mean); - - const VadMode vad_mode_; - VoiceProbabilityDelayUnit voice_probability_delay_unit_; - - std::unique_ptr detector_; - - bool analyzed_audio_is_silent_; - - size_t data_length_; - size_t detection_length_; - size_t analysis_length_; - size_t buffer_delay_; - size_t complex_analysis_length_; - int num_channels_; - // Input buffer where the original samples are stored. - std::unique_ptr in_buffer_; - std::unique_ptr detection_buffer_; - // Output buffer where the restored samples are stored. - std::unique_ptr out_buffer_; - - // Arrays for fft. - std::unique_ptr ip_; - std::unique_ptr wfft_; - - std::unique_ptr spectral_mean_; - - // Stores the data for the fft. - std::unique_ptr fft_buffer_; - - std::unique_ptr magnitudes_; - - const float* window_; - - std::unique_ptr mean_factor_; - - float detector_smoothed_; - - int keypress_counter_; - int chunks_since_keypress_; - bool detection_enabled_; - bool suppression_enabled_; - - bool use_hard_restoration_; - int chunks_since_voice_change_; - - uint32_t seed_; - - bool using_reference_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_IMPL_H_ diff --git a/modules/audio_processing/transient/transient_suppressor_unittest.cc b/modules/audio_processing/transient/transient_suppressor_unittest.cc deleted file mode 100644 index ab48504af6..0000000000 --- a/modules/audio_processing/transient/transient_suppressor_unittest.cc +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/transient_suppressor.h" - -#include - -#include "absl/types/optional.h" -#include "modules/audio_processing/transient/common.h" -#include "modules/audio_processing/transient/transient_suppressor_impl.h" -#include "test/gtest.h" - -namespace webrtc { -namespace { -constexpr int kMono = 1; - -// Returns the index of the first non-zero sample in `samples` or an unspecified -// value if no value is zero. -absl::optional FindFirstNonZeroSample(const std::vector& samples) { - for (size_t i = 0; i < samples.size(); ++i) { - if (samples[i] != 0.0f) { - return i; - } - } - return absl::nullopt; -} - -} // namespace - -class TransientSuppressorVadModeParametrization - : public ::testing::TestWithParam {}; - -TEST_P(TransientSuppressorVadModeParametrization, - TypingDetectionLogicWorksAsExpectedForMono) { - TransientSuppressorImpl ts(GetParam(), ts::kSampleRate16kHz, - ts::kSampleRate16kHz, kMono); - - // Each key-press enables detection. - EXPECT_FALSE(ts.detection_enabled_); - ts.UpdateKeypress(true); - EXPECT_TRUE(ts.detection_enabled_); - - // It takes four seconds without any key-press to disable the detection - for (int time_ms = 0; time_ms < 3990; time_ms += ts::kChunkSizeMs) { - ts.UpdateKeypress(false); - EXPECT_TRUE(ts.detection_enabled_); - } - ts.UpdateKeypress(false); - EXPECT_FALSE(ts.detection_enabled_); - - // Key-presses that are more than a second apart from each other don't enable - // suppression. - for (int i = 0; i < 100; ++i) { - EXPECT_FALSE(ts.suppression_enabled_); - ts.UpdateKeypress(true); - EXPECT_TRUE(ts.detection_enabled_); - EXPECT_FALSE(ts.suppression_enabled_); - for (int time_ms = 0; time_ms < 990; time_ms += ts::kChunkSizeMs) { - ts.UpdateKeypress(false); - EXPECT_TRUE(ts.detection_enabled_); - EXPECT_FALSE(ts.suppression_enabled_); - } - ts.UpdateKeypress(false); - } - - // Two consecutive key-presses is enough to enable the suppression. - ts.UpdateKeypress(true); - EXPECT_FALSE(ts.suppression_enabled_); - ts.UpdateKeypress(true); - EXPECT_TRUE(ts.suppression_enabled_); - - // Key-presses that are less than a second apart from each other don't disable - // detection nor suppression. - for (int i = 0; i < 100; ++i) { - for (int time_ms = 0; time_ms < 1000; time_ms += ts::kChunkSizeMs) { - ts.UpdateKeypress(false); - EXPECT_TRUE(ts.detection_enabled_); - EXPECT_TRUE(ts.suppression_enabled_); - } - ts.UpdateKeypress(true); - EXPECT_TRUE(ts.detection_enabled_); - EXPECT_TRUE(ts.suppression_enabled_); - } - - // It takes four seconds without any key-press to disable the detection and - // suppression. - for (int time_ms = 0; time_ms < 3990; time_ms += ts::kChunkSizeMs) { - ts.UpdateKeypress(false); - EXPECT_TRUE(ts.detection_enabled_); - EXPECT_TRUE(ts.suppression_enabled_); - } - for (int time_ms = 0; time_ms < 1000; time_ms += ts::kChunkSizeMs) { - ts.UpdateKeypress(false); - EXPECT_FALSE(ts.detection_enabled_); - EXPECT_FALSE(ts.suppression_enabled_); - } -} - -INSTANTIATE_TEST_SUITE_P( - TransientSuppressorImplTest, - TransientSuppressorVadModeParametrization, - ::testing::Values(TransientSuppressor::VadMode::kDefault, - TransientSuppressor::VadMode::kRnnVad, - TransientSuppressor::VadMode::kNoVad)); - -class TransientSuppressorSampleRateParametrization - : public ::testing::TestWithParam {}; - -// Checks that voice probability and processed audio data are temporally aligned -// after `Suppress()` is called. -TEST_P(TransientSuppressorSampleRateParametrization, - CheckAudioAndVoiceProbabilityTemporallyAligned) { - const int sample_rate_hz = GetParam(); - TransientSuppressorImpl ts(TransientSuppressor::VadMode::kDefault, - sample_rate_hz, - /*detection_rate_hz=*/sample_rate_hz, kMono); - - const int frame_size = sample_rate_hz * ts::kChunkSizeMs / 1000; - std::vector frame(frame_size); - - constexpr int kMaxAttempts = 3; - for (int i = 0; i < kMaxAttempts; ++i) { - SCOPED_TRACE(i); - - // Call `Suppress()` on frames of non-zero audio samples. - std::fill(frame.begin(), frame.end(), 1000.0f); - float delayed_voice_probability = ts.Suppress( - frame.data(), frame.size(), kMono, /*detection_data=*/nullptr, - /*detection_length=*/frame_size, /*reference_data=*/nullptr, - /*reference_length=*/frame_size, /*voice_probability=*/1.0f, - /*key_pressed=*/false); - - // Detect the algorithmic delay of `TransientSuppressorImpl`. - absl::optional frame_delay = FindFirstNonZeroSample(frame); - - // Check that the delayed voice probability is delayed according to the - // measured delay. - if (frame_delay.has_value()) { - if (*frame_delay == 0) { - // When the delay is a multiple integer of the frame duration, - // `Suppress()` returns a copy of a previously observed voice - // probability value. - EXPECT_EQ(delayed_voice_probability, 1.0f); - } else { - // Instead, when the delay is fractional, `Suppress()` returns an - // interpolated value. Since the exact value depends on the - // interpolation method, we only check that the delayed voice - // probability is not zero as it must converge towards the previoulsy - // observed value. - EXPECT_GT(delayed_voice_probability, 0.0f); - } - break; - } else { - // The algorithmic delay is longer than the duration of a single frame. - // Until the delay is detected, the delayed voice probability is zero. - EXPECT_EQ(delayed_voice_probability, 0.0f); - } - } -} - -INSTANTIATE_TEST_SUITE_P(TransientSuppressorImplTest, - TransientSuppressorSampleRateParametrization, - ::testing::Values(ts::kSampleRate8kHz, - ts::kSampleRate16kHz, - ts::kSampleRate32kHz, - ts::kSampleRate48kHz)); - -} // namespace webrtc diff --git a/modules/audio_processing/transient/voice_probability_delay_unit.cc b/modules/audio_processing/transient/voice_probability_delay_unit.cc deleted file mode 100644 index 27b2b42b38..0000000000 --- a/modules/audio_processing/transient/voice_probability_delay_unit.cc +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/voice_probability_delay_unit.h" - -#include - -#include "rtc_base/checks.h" - -namespace webrtc { - -VoiceProbabilityDelayUnit::VoiceProbabilityDelayUnit(int delay_num_samples, - int sample_rate_hz) { - Initialize(delay_num_samples, sample_rate_hz); -} - -void VoiceProbabilityDelayUnit::Initialize(int delay_num_samples, - int sample_rate_hz) { - RTC_DCHECK_GE(delay_num_samples, 0); - RTC_DCHECK_LE(delay_num_samples, sample_rate_hz / 50) - << "The implementation does not support delays greater than 20 ms."; - int frame_size = rtc::CheckedDivExact(sample_rate_hz, 100); // 10 ms. - if (delay_num_samples <= frame_size) { - weights_[0] = 0.0f; - weights_[1] = static_cast(delay_num_samples) / frame_size; - weights_[2] = - static_cast(frame_size - delay_num_samples) / frame_size; - } else { - delay_num_samples -= frame_size; - weights_[0] = static_cast(delay_num_samples) / frame_size; - weights_[1] = - static_cast(frame_size - delay_num_samples) / frame_size; - weights_[2] = 0.0f; - } - - // Resets the delay unit. - last_probabilities_.fill(0.0f); -} - -float VoiceProbabilityDelayUnit::Delay(float voice_probability) { - float weighted_probability = weights_[0] * last_probabilities_[0] + - weights_[1] * last_probabilities_[1] + - weights_[2] * voice_probability; - last_probabilities_[0] = last_probabilities_[1]; - last_probabilities_[1] = voice_probability; - return weighted_probability; -} - -} // namespace webrtc diff --git a/modules/audio_processing/transient/voice_probability_delay_unit.h b/modules/audio_processing/transient/voice_probability_delay_unit.h deleted file mode 100644 index 05961663e3..0000000000 --- a/modules/audio_processing/transient/voice_probability_delay_unit.h +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_VOICE_PROBABILITY_DELAY_UNIT_H_ -#define MODULES_AUDIO_PROCESSING_TRANSIENT_VOICE_PROBABILITY_DELAY_UNIT_H_ - -#include - -namespace webrtc { - -// Iteratively produces a sequence of delayed voice probability values given a -// fixed delay between 0 and 20 ms and given a sequence of voice probability -// values observed every 10 ms. Supports fractional delays, that are delays -// which are not a multiple integer of 10 ms. Applies interpolation with -// fractional delays; otherwise, returns a previously observed value according -// to the given fixed delay. -class VoiceProbabilityDelayUnit { - public: - // Ctor. `delay_num_samples` is the delay in number of samples and it must be - // non-negative and less than 20 ms. - VoiceProbabilityDelayUnit(int delay_num_samples, int sample_rate_hz); - - // Handles delay and sample rate changes and resets the delay unit. - void Initialize(int delay_num_samples, int sample_rate_hz); - - // Observes `voice_probability` and returns a delayed voice probability. - float Delay(float voice_probability); - - private: - std::array weights_; - std::array last_probabilities_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_VOICE_PROBABILITY_DELAY_UNIT_H_ diff --git a/modules/audio_processing/transient/voice_probability_delay_unit_unittest.cc b/modules/audio_processing/transient/voice_probability_delay_unit_unittest.cc deleted file mode 100644 index 04848e6f2c..0000000000 --- a/modules/audio_processing/transient/voice_probability_delay_unit_unittest.cc +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/voice_probability_delay_unit.h" - -#include "test/gtest.h" - -namespace webrtc { -namespace { - -// Checks that with zero delay, the observed value is immediately returned as -// delayed value. -TEST(VoiceProbabilityDelayUnit, NoDelay) { - VoiceProbabilityDelayUnit delay_unit(/*delay_num_samples=*/0, - /*sample_rate_hz=*/48000); - constexpr int kMax = 5; - for (int i = 0; i <= kMax; ++i) { - SCOPED_TRACE(i); - float voice_probability = static_cast(i) / kMax; - EXPECT_EQ(voice_probability, delay_unit.Delay(voice_probability)); - } -} - -// Checks that with integer delays, an exact copy of a previously observed value -// is returned. -TEST(VoiceProbabilityDelayUnit, IntegerDelay) { - VoiceProbabilityDelayUnit delay_unit_10ms(/*delay_num_samples=*/480, - /*sample_rate_hz=*/48000); - delay_unit_10ms.Delay(0.125f); - EXPECT_EQ(0.125f, delay_unit_10ms.Delay(0.9f)); - - VoiceProbabilityDelayUnit delay_unit_20ms(/*delay_num_samples=*/960, - /*sample_rate_hz=*/48000); - delay_unit_20ms.Delay(0.125f); - delay_unit_20ms.Delay(0.8f); - EXPECT_EQ(0.125f, delay_unit_20ms.Delay(0.9f)); -} - -// Checks that with a fractional delay < 10 ms, interpolation is applied. -TEST(VoiceProbabilityDelayUnit, FractionalDelayLessThan10ms) { - // Create delay unit with fractional delay of 6 ms. - VoiceProbabilityDelayUnit delay_unit(/*delay_num_samples=*/288, - /*sample_rate_hz=*/48000); - // frame 0 - // --------- frame 1 - // --------- - // 0000001111 - delay_unit.Delay(1.0f); - EXPECT_FLOAT_EQ(0.68f, delay_unit.Delay(0.2f)); -} - -// Checks that with a fractional delay > 10 ms, interpolation is applied. -TEST(VoiceProbabilityDelayUnit, FractionalDelayGreaterThan10ms) { - // Create delay unit with fractional delay of 14 ms. - VoiceProbabilityDelayUnit delay_unit(/*delay_num_samples=*/672, - /*sample_rate_hz=*/48000); - // frame 0 - // --------- frame 1 - // --------- frame 2 - // --------- - // 0000111111 - delay_unit.Delay(1.0f); - delay_unit.Delay(0.2f); - EXPECT_FLOAT_EQ(0.52f, delay_unit.Delay(1.0f)); -} - -// Checks that `Initialize()` resets the delay unit. -TEST(VoiceProbabilityDelayUnit, InitializeResetsDelayUnit) { - VoiceProbabilityDelayUnit delay_unit(/*delay_num_samples=*/960, - /*sample_rate_hz=*/48000); - delay_unit.Delay(1.0f); - delay_unit.Delay(0.9f); - - delay_unit.Initialize(/*delay_num_samples=*/160, /*sample_rate_hz=*/8000); - EXPECT_EQ(0.0f, delay_unit.Delay(0.1f)); - EXPECT_EQ(0.0f, delay_unit.Delay(0.2f)); - EXPECT_EQ(0.1f, delay_unit.Delay(0.3f)); -} - -// Checks that `Initialize()` handles delay changes. -TEST(VoiceProbabilityDelayUnit, InitializeHandlesDelayChanges) { - // Start with a 20 ms delay. - VoiceProbabilityDelayUnit delay_unit(/*delay_num_samples=*/960, - /*sample_rate_hz=*/48000); - delay_unit.Delay(1.0f); - delay_unit.Delay(0.9f); - - // Lower the delay to 10 ms. - delay_unit.Initialize(/*delay_num_samples=*/80, /*sample_rate_hz=*/8000); - EXPECT_EQ(0.0f, delay_unit.Delay(0.1f)); - EXPECT_EQ(0.1f, delay_unit.Delay(0.2f)); - - // Increase the delay to 15 ms. - delay_unit.Initialize(/*delay_num_samples=*/120, /*sample_rate_hz=*/8000); - EXPECT_EQ(0.0f, delay_unit.Delay(0.1f)); - EXPECT_EQ(0.05f, delay_unit.Delay(0.2f)); - EXPECT_EQ(0.15f, delay_unit.Delay(0.3f)); -} - -} // namespace -} // namespace webrtc diff --git a/modules/audio_processing/transient/windows_private.h b/modules/audio_processing/transient/windows_private.h deleted file mode 100644 index 54e3c25785..0000000000 --- a/modules/audio_processing/transient/windows_private.h +++ /dev/null @@ -1,557 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_WINDOWS_PRIVATE_H_ -#define MODULES_AUDIO_PROCESSING_TRANSIENT_WINDOWS_PRIVATE_H_ - -namespace webrtc { - -// Hanning window for 4ms 16kHz -static const float kHanning64w128[128] = { - 0.00000000000000f, 0.02454122852291f, 0.04906767432742f, 0.07356456359967f, - 0.09801714032956f, 0.12241067519922f, 0.14673047445536f, 0.17096188876030f, - 0.19509032201613f, 0.21910124015687f, 0.24298017990326f, 0.26671275747490f, - 0.29028467725446f, 0.31368174039889f, 0.33688985339222f, 0.35989503653499f, - 0.38268343236509f, 0.40524131400499f, 0.42755509343028f, 0.44961132965461f, - 0.47139673682600f, 0.49289819222978f, 0.51410274419322f, 0.53499761988710f, - 0.55557023301960f, 0.57580819141785f, 0.59569930449243f, 0.61523159058063f, - 0.63439328416365f, 0.65317284295378f, 0.67155895484702f, 0.68954054473707f, - 0.70710678118655f, 0.72424708295147f, 0.74095112535496f, 0.75720884650648f, - 0.77301045336274f, 0.78834642762661f, 0.80320753148064f, 0.81758481315158f, - 0.83146961230255f, 0.84485356524971f, 0.85772861000027f, 0.87008699110871f, - 0.88192126434835f, 0.89322430119552f, 0.90398929312344f, 0.91420975570353f, - 0.92387953251129f, 0.93299279883474f, 0.94154406518302f, 0.94952818059304f, - 0.95694033573221f, 0.96377606579544f, 0.97003125319454f, 0.97570213003853f, - 0.98078528040323f, 0.98527764238894f, 0.98917650996478f, 0.99247953459871f, - 0.99518472667220f, 0.99729045667869f, 0.99879545620517f, 0.99969881869620f, - 1.00000000000000f, 0.99969881869620f, 0.99879545620517f, 0.99729045667869f, - 0.99518472667220f, 0.99247953459871f, 0.98917650996478f, 0.98527764238894f, - 0.98078528040323f, 0.97570213003853f, 0.97003125319454f, 0.96377606579544f, - 0.95694033573221f, 0.94952818059304f, 0.94154406518302f, 0.93299279883474f, - 0.92387953251129f, 0.91420975570353f, 0.90398929312344f, 0.89322430119552f, - 0.88192126434835f, 0.87008699110871f, 0.85772861000027f, 0.84485356524971f, - 0.83146961230255f, 0.81758481315158f, 0.80320753148064f, 0.78834642762661f, - 0.77301045336274f, 0.75720884650648f, 0.74095112535496f, 0.72424708295147f, - 0.70710678118655f, 0.68954054473707f, 0.67155895484702f, 0.65317284295378f, - 0.63439328416365f, 0.61523159058063f, 0.59569930449243f, 0.57580819141785f, - 0.55557023301960f, 0.53499761988710f, 0.51410274419322f, 0.49289819222978f, - 0.47139673682600f, 0.44961132965461f, 0.42755509343028f, 0.40524131400499f, - 0.38268343236509f, 0.35989503653499f, 0.33688985339222f, 0.31368174039889f, - 0.29028467725446f, 0.26671275747490f, 0.24298017990326f, 0.21910124015687f, - 0.19509032201613f, 0.17096188876030f, 0.14673047445536f, 0.12241067519922f, - 0.09801714032956f, 0.07356456359967f, 0.04906767432742f, 0.02454122852291f}; - -// hybrib Hanning & flat window -static const float kBlocks80w128[128] = { - 0.00000000f, 0.03271908f, 0.06540313f, 0.09801714f, 0.13052619f, - 0.16289547f, 0.19509032f, 0.22707626f, 0.25881905f, 0.29028468f, - 0.32143947f, 0.35225005f, 0.38268343f, 0.41270703f, 0.44228869f, - 0.47139674f, 0.50000000f, 0.52806785f, 0.55557023f, 0.58247770f, - 0.60876143f, 0.63439328f, 0.65934582f, 0.68359230f, 0.70710678f, - 0.72986407f, 0.75183981f, 0.77301045f, 0.79335334f, 0.81284668f, - 0.83146961f, 0.84920218f, 0.86602540f, 0.88192126f, 0.89687274f, - 0.91086382f, 0.92387953f, 0.93590593f, 0.94693013f, 0.95694034f, - 0.96592583f, 0.97387698f, 0.98078528f, 0.98664333f, 0.99144486f, - 0.99518473f, 0.99785892f, 0.99946459f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 0.99946459f, 0.99785892f, 0.99518473f, 0.99144486f, - 0.98664333f, 0.98078528f, 0.97387698f, 0.96592583f, 0.95694034f, - 0.94693013f, 0.93590593f, 0.92387953f, 0.91086382f, 0.89687274f, - 0.88192126f, 0.86602540f, 0.84920218f, 0.83146961f, 0.81284668f, - 0.79335334f, 0.77301045f, 0.75183981f, 0.72986407f, 0.70710678f, - 0.68359230f, 0.65934582f, 0.63439328f, 0.60876143f, 0.58247770f, - 0.55557023f, 0.52806785f, 0.50000000f, 0.47139674f, 0.44228869f, - 0.41270703f, 0.38268343f, 0.35225005f, 0.32143947f, 0.29028468f, - 0.25881905f, 0.22707626f, 0.19509032f, 0.16289547f, 0.13052619f, - 0.09801714f, 0.06540313f, 0.03271908f}; - -// hybrib Hanning & flat window -static const float kBlocks160w256[256] = { - 0.00000000f, 0.01636173f, 0.03271908f, 0.04906767f, 0.06540313f, - 0.08172107f, 0.09801714f, 0.11428696f, 0.13052619f, 0.14673047f, - 0.16289547f, 0.17901686f, 0.19509032f, 0.21111155f, 0.22707626f, - 0.24298018f, 0.25881905f, 0.27458862f, 0.29028468f, 0.30590302f, - 0.32143947f, 0.33688985f, 0.35225005f, 0.36751594f, 0.38268343f, - 0.39774847f, 0.41270703f, 0.42755509f, 0.44228869f, 0.45690388f, - 0.47139674f, 0.48576339f, 0.50000000f, 0.51410274f, 0.52806785f, - 0.54189158f, 0.55557023f, 0.56910015f, 0.58247770f, 0.59569930f, - 0.60876143f, 0.62166057f, 0.63439328f, 0.64695615f, 0.65934582f, - 0.67155895f, 0.68359230f, 0.69544264f, 0.70710678f, 0.71858162f, - 0.72986407f, 0.74095113f, 0.75183981f, 0.76252720f, 0.77301045f, - 0.78328675f, 0.79335334f, 0.80320753f, 0.81284668f, 0.82226822f, - 0.83146961f, 0.84044840f, 0.84920218f, 0.85772861f, 0.86602540f, - 0.87409034f, 0.88192126f, 0.88951608f, 0.89687274f, 0.90398929f, - 0.91086382f, 0.91749450f, 0.92387953f, 0.93001722f, 0.93590593f, - 0.94154407f, 0.94693013f, 0.95206268f, 0.95694034f, 0.96156180f, - 0.96592583f, 0.97003125f, 0.97387698f, 0.97746197f, 0.98078528f, - 0.98384601f, 0.98664333f, 0.98917651f, 0.99144486f, 0.99344778f, - 0.99518473f, 0.99665524f, 0.99785892f, 0.99879546f, 0.99946459f, - 0.99986614f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 0.99986614f, 0.99946459f, 0.99879546f, 0.99785892f, - 0.99665524f, 0.99518473f, 0.99344778f, 0.99144486f, 0.98917651f, - 0.98664333f, 0.98384601f, 0.98078528f, 0.97746197f, 0.97387698f, - 0.97003125f, 0.96592583f, 0.96156180f, 0.95694034f, 0.95206268f, - 0.94693013f, 0.94154407f, 0.93590593f, 0.93001722f, 0.92387953f, - 0.91749450f, 0.91086382f, 0.90398929f, 0.89687274f, 0.88951608f, - 0.88192126f, 0.87409034f, 0.86602540f, 0.85772861f, 0.84920218f, - 0.84044840f, 0.83146961f, 0.82226822f, 0.81284668f, 0.80320753f, - 0.79335334f, 0.78328675f, 0.77301045f, 0.76252720f, 0.75183981f, - 0.74095113f, 0.72986407f, 0.71858162f, 0.70710678f, 0.69544264f, - 0.68359230f, 0.67155895f, 0.65934582f, 0.64695615f, 0.63439328f, - 0.62166057f, 0.60876143f, 0.59569930f, 0.58247770f, 0.56910015f, - 0.55557023f, 0.54189158f, 0.52806785f, 0.51410274f, 0.50000000f, - 0.48576339f, 0.47139674f, 0.45690388f, 0.44228869f, 0.42755509f, - 0.41270703f, 0.39774847f, 0.38268343f, 0.36751594f, 0.35225005f, - 0.33688985f, 0.32143947f, 0.30590302f, 0.29028468f, 0.27458862f, - 0.25881905f, 0.24298018f, 0.22707626f, 0.21111155f, 0.19509032f, - 0.17901686f, 0.16289547f, 0.14673047f, 0.13052619f, 0.11428696f, - 0.09801714f, 0.08172107f, 0.06540313f, 0.04906767f, 0.03271908f, - 0.01636173f}; - -// hybrib Hanning & flat window: for 20ms -static const float kBlocks320w512[512] = { - 0.00000000f, 0.00818114f, 0.01636173f, 0.02454123f, 0.03271908f, - 0.04089475f, 0.04906767f, 0.05723732f, 0.06540313f, 0.07356456f, - 0.08172107f, 0.08987211f, 0.09801714f, 0.10615561f, 0.11428696f, - 0.12241068f, 0.13052619f, 0.13863297f, 0.14673047f, 0.15481816f, - 0.16289547f, 0.17096189f, 0.17901686f, 0.18705985f, 0.19509032f, - 0.20310773f, 0.21111155f, 0.21910124f, 0.22707626f, 0.23503609f, - 0.24298018f, 0.25090801f, 0.25881905f, 0.26671276f, 0.27458862f, - 0.28244610f, 0.29028468f, 0.29810383f, 0.30590302f, 0.31368174f, - 0.32143947f, 0.32917568f, 0.33688985f, 0.34458148f, 0.35225005f, - 0.35989504f, 0.36751594f, 0.37511224f, 0.38268343f, 0.39022901f, - 0.39774847f, 0.40524131f, 0.41270703f, 0.42014512f, 0.42755509f, - 0.43493645f, 0.44228869f, 0.44961133f, 0.45690388f, 0.46416584f, - 0.47139674f, 0.47859608f, 0.48576339f, 0.49289819f, 0.50000000f, - 0.50706834f, 0.51410274f, 0.52110274f, 0.52806785f, 0.53499762f, - 0.54189158f, 0.54874927f, 0.55557023f, 0.56235401f, 0.56910015f, - 0.57580819f, 0.58247770f, 0.58910822f, 0.59569930f, 0.60225052f, - 0.60876143f, 0.61523159f, 0.62166057f, 0.62804795f, 0.63439328f, - 0.64069616f, 0.64695615f, 0.65317284f, 0.65934582f, 0.66547466f, - 0.67155895f, 0.67759830f, 0.68359230f, 0.68954054f, 0.69544264f, - 0.70129818f, 0.70710678f, 0.71286806f, 0.71858162f, 0.72424708f, - 0.72986407f, 0.73543221f, 0.74095113f, 0.74642045f, 0.75183981f, - 0.75720885f, 0.76252720f, 0.76779452f, 0.77301045f, 0.77817464f, - 0.78328675f, 0.78834643f, 0.79335334f, 0.79830715f, 0.80320753f, - 0.80805415f, 0.81284668f, 0.81758481f, 0.82226822f, 0.82689659f, - 0.83146961f, 0.83598698f, 0.84044840f, 0.84485357f, 0.84920218f, - 0.85349396f, 0.85772861f, 0.86190585f, 0.86602540f, 0.87008699f, - 0.87409034f, 0.87803519f, 0.88192126f, 0.88574831f, 0.88951608f, - 0.89322430f, 0.89687274f, 0.90046115f, 0.90398929f, 0.90745693f, - 0.91086382f, 0.91420976f, 0.91749450f, 0.92071783f, 0.92387953f, - 0.92697940f, 0.93001722f, 0.93299280f, 0.93590593f, 0.93875641f, - 0.94154407f, 0.94426870f, 0.94693013f, 0.94952818f, 0.95206268f, - 0.95453345f, 0.95694034f, 0.95928317f, 0.96156180f, 0.96377607f, - 0.96592583f, 0.96801094f, 0.97003125f, 0.97198664f, 0.97387698f, - 0.97570213f, 0.97746197f, 0.97915640f, 0.98078528f, 0.98234852f, - 0.98384601f, 0.98527764f, 0.98664333f, 0.98794298f, 0.98917651f, - 0.99034383f, 0.99144486f, 0.99247953f, 0.99344778f, 0.99434953f, - 0.99518473f, 0.99595331f, 0.99665524f, 0.99729046f, 0.99785892f, - 0.99836060f, 0.99879546f, 0.99916346f, 0.99946459f, 0.99969882f, - 0.99986614f, 0.99996653f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, - 1.00000000f, 0.99996653f, 0.99986614f, 0.99969882f, 0.99946459f, - 0.99916346f, 0.99879546f, 0.99836060f, 0.99785892f, 0.99729046f, - 0.99665524f, 0.99595331f, 0.99518473f, 0.99434953f, 0.99344778f, - 0.99247953f, 0.99144486f, 0.99034383f, 0.98917651f, 0.98794298f, - 0.98664333f, 0.98527764f, 0.98384601f, 0.98234852f, 0.98078528f, - 0.97915640f, 0.97746197f, 0.97570213f, 0.97387698f, 0.97198664f, - 0.97003125f, 0.96801094f, 0.96592583f, 0.96377607f, 0.96156180f, - 0.95928317f, 0.95694034f, 0.95453345f, 0.95206268f, 0.94952818f, - 0.94693013f, 0.94426870f, 0.94154407f, 0.93875641f, 0.93590593f, - 0.93299280f, 0.93001722f, 0.92697940f, 0.92387953f, 0.92071783f, - 0.91749450f, 0.91420976f, 0.91086382f, 0.90745693f, 0.90398929f, - 0.90046115f, 0.89687274f, 0.89322430f, 0.88951608f, 0.88574831f, - 0.88192126f, 0.87803519f, 0.87409034f, 0.87008699f, 0.86602540f, - 0.86190585f, 0.85772861f, 0.85349396f, 0.84920218f, 0.84485357f, - 0.84044840f, 0.83598698f, 0.83146961f, 0.82689659f, 0.82226822f, - 0.81758481f, 0.81284668f, 0.80805415f, 0.80320753f, 0.79830715f, - 0.79335334f, 0.78834643f, 0.78328675f, 0.77817464f, 0.77301045f, - 0.76779452f, 0.76252720f, 0.75720885f, 0.75183981f, 0.74642045f, - 0.74095113f, 0.73543221f, 0.72986407f, 0.72424708f, 0.71858162f, - 0.71286806f, 0.70710678f, 0.70129818f, 0.69544264f, 0.68954054f, - 0.68359230f, 0.67759830f, 0.67155895f, 0.66547466f, 0.65934582f, - 0.65317284f, 0.64695615f, 0.64069616f, 0.63439328f, 0.62804795f, - 0.62166057f, 0.61523159f, 0.60876143f, 0.60225052f, 0.59569930f, - 0.58910822f, 0.58247770f, 0.57580819f, 0.56910015f, 0.56235401f, - 0.55557023f, 0.54874927f, 0.54189158f, 0.53499762f, 0.52806785f, - 0.52110274f, 0.51410274f, 0.50706834f, 0.50000000f, 0.49289819f, - 0.48576339f, 0.47859608f, 0.47139674f, 0.46416584f, 0.45690388f, - 0.44961133f, 0.44228869f, 0.43493645f, 0.42755509f, 0.42014512f, - 0.41270703f, 0.40524131f, 0.39774847f, 0.39022901f, 0.38268343f, - 0.37511224f, 0.36751594f, 0.35989504f, 0.35225005f, 0.34458148f, - 0.33688985f, 0.32917568f, 0.32143947f, 0.31368174f, 0.30590302f, - 0.29810383f, 0.29028468f, 0.28244610f, 0.27458862f, 0.26671276f, - 0.25881905f, 0.25090801f, 0.24298018f, 0.23503609f, 0.22707626f, - 0.21910124f, 0.21111155f, 0.20310773f, 0.19509032f, 0.18705985f, - 0.17901686f, 0.17096189f, 0.16289547f, 0.15481816f, 0.14673047f, - 0.13863297f, 0.13052619f, 0.12241068f, 0.11428696f, 0.10615561f, - 0.09801714f, 0.08987211f, 0.08172107f, 0.07356456f, 0.06540313f, - 0.05723732f, 0.04906767f, 0.04089475f, 0.03271908f, 0.02454123f, - 0.01636173f, 0.00818114f}; - -// Hanning window: for 15ms at 16kHz with symmetric zeros -static const float kBlocks240w512[512] = { - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00654494f, 0.01308960f, 0.01963369f, - 0.02617695f, 0.03271908f, 0.03925982f, 0.04579887f, 0.05233596f, - 0.05887080f, 0.06540313f, 0.07193266f, 0.07845910f, 0.08498218f, - 0.09150162f, 0.09801714f, 0.10452846f, 0.11103531f, 0.11753740f, - 0.12403446f, 0.13052620f, 0.13701233f, 0.14349262f, 0.14996676f, - 0.15643448f, 0.16289547f, 0.16934951f, 0.17579629f, 0.18223552f, - 0.18866697f, 0.19509032f, 0.20150533f, 0.20791170f, 0.21430916f, - 0.22069745f, 0.22707628f, 0.23344538f, 0.23980446f, 0.24615330f, - 0.25249159f, 0.25881904f, 0.26513544f, 0.27144045f, 0.27773386f, - 0.28401536f, 0.29028466f, 0.29654160f, 0.30278578f, 0.30901700f, - 0.31523499f, 0.32143945f, 0.32763019f, 0.33380687f, 0.33996925f, - 0.34611708f, 0.35225007f, 0.35836795f, 0.36447051f, 0.37055743f, - 0.37662852f, 0.38268346f, 0.38872197f, 0.39474389f, 0.40074885f, - 0.40673664f, 0.41270703f, 0.41865975f, 0.42459452f, 0.43051112f, - 0.43640924f, 0.44228873f, 0.44814920f, 0.45399052f, 0.45981237f, - 0.46561453f, 0.47139674f, 0.47715878f, 0.48290035f, 0.48862126f, - 0.49432120f, 0.50000000f, 0.50565743f, 0.51129311f, 0.51690692f, - 0.52249855f, 0.52806789f, 0.53361452f, 0.53913832f, 0.54463905f, - 0.55011642f, 0.55557024f, 0.56100029f, 0.56640625f, 0.57178795f, - 0.57714522f, 0.58247769f, 0.58778524f, 0.59306765f, 0.59832460f, - 0.60355598f, 0.60876143f, 0.61394083f, 0.61909395f, 0.62422055f, - 0.62932038f, 0.63439333f, 0.63943899f, 0.64445734f, 0.64944810f, - 0.65441096f, 0.65934587f, 0.66425246f, 0.66913062f, 0.67398012f, - 0.67880076f, 0.68359232f, 0.68835455f, 0.69308740f, 0.69779050f, - 0.70246369f, 0.70710677f, 0.71171963f, 0.71630198f, 0.72085363f, - 0.72537440f, 0.72986406f, 0.73432255f, 0.73874950f, 0.74314487f, - 0.74750835f, 0.75183982f, 0.75613910f, 0.76040596f, 0.76464027f, - 0.76884186f, 0.77301043f, 0.77714598f, 0.78124821f, 0.78531694f, - 0.78935206f, 0.79335338f, 0.79732066f, 0.80125386f, 0.80515265f, - 0.80901700f, 0.81284672f, 0.81664157f, 0.82040149f, 0.82412618f, - 0.82781565f, 0.83146966f, 0.83508795f, 0.83867061f, 0.84221727f, - 0.84572780f, 0.84920216f, 0.85264021f, 0.85604161f, 0.85940641f, - 0.86273444f, 0.86602545f, 0.86927933f, 0.87249607f, 0.87567532f, - 0.87881714f, 0.88192129f, 0.88498765f, 0.88801610f, 0.89100653f, - 0.89395881f, 0.89687276f, 0.89974827f, 0.90258533f, 0.90538365f, - 0.90814316f, 0.91086388f, 0.91354549f, 0.91618794f, 0.91879123f, - 0.92135513f, 0.92387950f, 0.92636442f, 0.92880958f, 0.93121493f, - 0.93358046f, 0.93590593f, 0.93819135f, 0.94043654f, 0.94264150f, - 0.94480604f, 0.94693011f, 0.94901365f, 0.95105654f, 0.95305866f, - 0.95501995f, 0.95694035f, 0.95881975f, 0.96065807f, 0.96245527f, - 0.96421117f, 0.96592581f, 0.96759909f, 0.96923089f, 0.97082120f, - 0.97236991f, 0.97387701f, 0.97534233f, 0.97676587f, 0.97814763f, - 0.97948742f, 0.98078531f, 0.98204112f, 0.98325491f, 0.98442656f, - 0.98555607f, 0.98664331f, 0.98768836f, 0.98869103f, 0.98965138f, - 0.99056935f, 0.99144489f, 0.99227792f, 0.99306846f, 0.99381649f, - 0.99452192f, 0.99518472f, 0.99580491f, 0.99638247f, 0.99691731f, - 0.99740952f, 0.99785894f, 0.99826562f, 0.99862951f, 0.99895066f, - 0.99922901f, 0.99946457f, 0.99965733f, 0.99980724f, 0.99991435f, - 0.99997860f, 1.00000000f, 0.99997860f, 0.99991435f, 0.99980724f, - 0.99965733f, 0.99946457f, 0.99922901f, 0.99895066f, 0.99862951f, - 0.99826562f, 0.99785894f, 0.99740946f, 0.99691731f, 0.99638247f, - 0.99580491f, 0.99518472f, 0.99452192f, 0.99381644f, 0.99306846f, - 0.99227792f, 0.99144489f, 0.99056935f, 0.98965138f, 0.98869103f, - 0.98768836f, 0.98664331f, 0.98555607f, 0.98442656f, 0.98325491f, - 0.98204112f, 0.98078525f, 0.97948742f, 0.97814757f, 0.97676587f, - 0.97534227f, 0.97387695f, 0.97236991f, 0.97082120f, 0.96923089f, - 0.96759909f, 0.96592581f, 0.96421117f, 0.96245521f, 0.96065807f, - 0.95881969f, 0.95694029f, 0.95501995f, 0.95305860f, 0.95105648f, - 0.94901365f, 0.94693011f, 0.94480604f, 0.94264150f, 0.94043654f, - 0.93819129f, 0.93590593f, 0.93358046f, 0.93121493f, 0.92880952f, - 0.92636436f, 0.92387950f, 0.92135507f, 0.91879123f, 0.91618794f, - 0.91354543f, 0.91086382f, 0.90814310f, 0.90538365f, 0.90258527f, - 0.89974827f, 0.89687276f, 0.89395875f, 0.89100647f, 0.88801610f, - 0.88498759f, 0.88192123f, 0.87881714f, 0.87567532f, 0.87249595f, - 0.86927933f, 0.86602539f, 0.86273432f, 0.85940641f, 0.85604161f, - 0.85264009f, 0.84920216f, 0.84572780f, 0.84221715f, 0.83867055f, - 0.83508795f, 0.83146954f, 0.82781565f, 0.82412612f, 0.82040137f, - 0.81664157f, 0.81284660f, 0.80901700f, 0.80515265f, 0.80125374f, - 0.79732066f, 0.79335332f, 0.78935200f, 0.78531694f, 0.78124815f, - 0.77714586f, 0.77301049f, 0.76884180f, 0.76464021f, 0.76040596f, - 0.75613904f, 0.75183970f, 0.74750835f, 0.74314481f, 0.73874938f, - 0.73432249f, 0.72986400f, 0.72537428f, 0.72085363f, 0.71630186f, - 0.71171951f, 0.70710677f, 0.70246363f, 0.69779032f, 0.69308734f, - 0.68835449f, 0.68359220f, 0.67880070f, 0.67398006f, 0.66913044f, - 0.66425240f, 0.65934575f, 0.65441096f, 0.64944804f, 0.64445722f, - 0.63943905f, 0.63439327f, 0.62932026f, 0.62422055f, 0.61909389f, - 0.61394072f, 0.60876143f, 0.60355592f, 0.59832448f, 0.59306765f, - 0.58778518f, 0.58247757f, 0.57714522f, 0.57178789f, 0.56640613f, - 0.56100023f, 0.55557019f, 0.55011630f, 0.54463905f, 0.53913826f, - 0.53361434f, 0.52806783f, 0.52249849f, 0.51690674f, 0.51129305f, - 0.50565726f, 0.50000006f, 0.49432117f, 0.48862115f, 0.48290038f, - 0.47715873f, 0.47139663f, 0.46561456f, 0.45981231f, 0.45399037f, - 0.44814920f, 0.44228864f, 0.43640912f, 0.43051112f, 0.42459446f, - 0.41865960f, 0.41270703f, 0.40673658f, 0.40074870f, 0.39474386f, - 0.38872188f, 0.38268328f, 0.37662849f, 0.37055734f, 0.36447033f, - 0.35836792f, 0.35224995f, 0.34611690f, 0.33996922f, 0.33380675f, - 0.32763001f, 0.32143945f, 0.31523487f, 0.30901679f, 0.30278572f, - 0.29654145f, 0.29028472f, 0.28401530f, 0.27773371f, 0.27144048f, - 0.26513538f, 0.25881892f, 0.25249159f, 0.24615324f, 0.23980433f, - 0.23344538f, 0.22707619f, 0.22069728f, 0.21430916f, 0.20791161f, - 0.20150517f, 0.19509031f, 0.18866688f, 0.18223536f, 0.17579627f, - 0.16934940f, 0.16289529f, 0.15643445f, 0.14996666f, 0.14349243f, - 0.13701232f, 0.13052608f, 0.12403426f, 0.11753736f, 0.11103519f, - 0.10452849f, 0.09801710f, 0.09150149f, 0.08498220f, 0.07845904f, - 0.07193252f, 0.06540315f, 0.05887074f, 0.05233581f, 0.04579888f, - 0.03925974f, 0.03271893f, 0.02617695f, 0.01963361f, 0.01308943f, - 0.00654493f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f}; - -// Hanning window: for 30ms with 1024 fft with symmetric zeros at 16kHz -static const float kBlocks480w1024[1024] = { - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00327249f, 0.00654494f, - 0.00981732f, 0.01308960f, 0.01636173f, 0.01963369f, 0.02290544f, - 0.02617695f, 0.02944817f, 0.03271908f, 0.03598964f, 0.03925982f, - 0.04252957f, 0.04579887f, 0.04906768f, 0.05233596f, 0.05560368f, - 0.05887080f, 0.06213730f, 0.06540313f, 0.06866825f, 0.07193266f, - 0.07519628f, 0.07845910f, 0.08172107f, 0.08498218f, 0.08824237f, - 0.09150162f, 0.09475989f, 0.09801714f, 0.10127335f, 0.10452846f, - 0.10778246f, 0.11103531f, 0.11428697f, 0.11753740f, 0.12078657f, - 0.12403446f, 0.12728101f, 0.13052620f, 0.13376999f, 0.13701233f, - 0.14025325f, 0.14349262f, 0.14673047f, 0.14996676f, 0.15320145f, - 0.15643448f, 0.15966582f, 0.16289547f, 0.16612339f, 0.16934951f, - 0.17257382f, 0.17579629f, 0.17901687f, 0.18223552f, 0.18545224f, - 0.18866697f, 0.19187967f, 0.19509032f, 0.19829889f, 0.20150533f, - 0.20470962f, 0.20791170f, 0.21111156f, 0.21430916f, 0.21750447f, - 0.22069745f, 0.22388805f, 0.22707628f, 0.23026206f, 0.23344538f, - 0.23662618f, 0.23980446f, 0.24298020f, 0.24615330f, 0.24932377f, - 0.25249159f, 0.25565669f, 0.25881904f, 0.26197866f, 0.26513544f, - 0.26828939f, 0.27144045f, 0.27458861f, 0.27773386f, 0.28087610f, - 0.28401536f, 0.28715158f, 0.29028466f, 0.29341471f, 0.29654160f, - 0.29966527f, 0.30278578f, 0.30590302f, 0.30901700f, 0.31212768f, - 0.31523499f, 0.31833893f, 0.32143945f, 0.32453656f, 0.32763019f, - 0.33072028f, 0.33380687f, 0.33688986f, 0.33996925f, 0.34304500f, - 0.34611708f, 0.34918544f, 0.35225007f, 0.35531089f, 0.35836795f, - 0.36142117f, 0.36447051f, 0.36751595f, 0.37055743f, 0.37359497f, - 0.37662852f, 0.37965801f, 0.38268346f, 0.38570479f, 0.38872197f, - 0.39173502f, 0.39474389f, 0.39774847f, 0.40074885f, 0.40374491f, - 0.40673664f, 0.40972406f, 0.41270703f, 0.41568562f, 0.41865975f, - 0.42162940f, 0.42459452f, 0.42755508f, 0.43051112f, 0.43346250f, - 0.43640924f, 0.43935132f, 0.44228873f, 0.44522133f, 0.44814920f, - 0.45107228f, 0.45399052f, 0.45690390f, 0.45981237f, 0.46271592f, - 0.46561453f, 0.46850815f, 0.47139674f, 0.47428030f, 0.47715878f, - 0.48003215f, 0.48290035f, 0.48576337f, 0.48862126f, 0.49147385f, - 0.49432120f, 0.49716330f, 0.50000000f, 0.50283140f, 0.50565743f, - 0.50847799f, 0.51129311f, 0.51410276f, 0.51690692f, 0.51970553f, - 0.52249855f, 0.52528602f, 0.52806789f, 0.53084403f, 0.53361452f, - 0.53637928f, 0.53913832f, 0.54189163f, 0.54463905f, 0.54738063f, - 0.55011642f, 0.55284631f, 0.55557024f, 0.55828828f, 0.56100029f, - 0.56370628f, 0.56640625f, 0.56910014f, 0.57178795f, 0.57446963f, - 0.57714522f, 0.57981455f, 0.58247769f, 0.58513463f, 0.58778524f, - 0.59042960f, 0.59306765f, 0.59569931f, 0.59832460f, 0.60094351f, - 0.60355598f, 0.60616195f, 0.60876143f, 0.61135441f, 0.61394083f, - 0.61652070f, 0.61909395f, 0.62166059f, 0.62422055f, 0.62677383f, - 0.62932038f, 0.63186020f, 0.63439333f, 0.63691956f, 0.63943899f, - 0.64195162f, 0.64445734f, 0.64695615f, 0.64944810f, 0.65193301f, - 0.65441096f, 0.65688187f, 0.65934587f, 0.66180271f, 0.66425246f, - 0.66669512f, 0.66913062f, 0.67155898f, 0.67398012f, 0.67639405f, - 0.67880076f, 0.68120021f, 0.68359232f, 0.68597710f, 0.68835455f, - 0.69072467f, 0.69308740f, 0.69544262f, 0.69779050f, 0.70013082f, - 0.70246369f, 0.70478904f, 0.70710677f, 0.70941699f, 0.71171963f, - 0.71401459f, 0.71630198f, 0.71858168f, 0.72085363f, 0.72311789f, - 0.72537440f, 0.72762316f, 0.72986406f, 0.73209721f, 0.73432255f, - 0.73653996f, 0.73874950f, 0.74095118f, 0.74314487f, 0.74533057f, - 0.74750835f, 0.74967808f, 0.75183982f, 0.75399351f, 0.75613910f, - 0.75827658f, 0.76040596f, 0.76252723f, 0.76464027f, 0.76674515f, - 0.76884186f, 0.77093029f, 0.77301043f, 0.77508241f, 0.77714598f, - 0.77920127f, 0.78124821f, 0.78328675f, 0.78531694f, 0.78733873f, - 0.78935206f, 0.79135692f, 0.79335338f, 0.79534125f, 0.79732066f, - 0.79929149f, 0.80125386f, 0.80320752f, 0.80515265f, 0.80708915f, - 0.80901700f, 0.81093621f, 0.81284672f, 0.81474853f, 0.81664157f, - 0.81852591f, 0.82040149f, 0.82226825f, 0.82412618f, 0.82597536f, - 0.82781565f, 0.82964706f, 0.83146966f, 0.83328325f, 0.83508795f, - 0.83688378f, 0.83867061f, 0.84044838f, 0.84221727f, 0.84397703f, - 0.84572780f, 0.84746957f, 0.84920216f, 0.85092574f, 0.85264021f, - 0.85434544f, 0.85604161f, 0.85772866f, 0.85940641f, 0.86107504f, - 0.86273444f, 0.86438453f, 0.86602545f, 0.86765707f, 0.86927933f, - 0.87089235f, 0.87249607f, 0.87409031f, 0.87567532f, 0.87725097f, - 0.87881714f, 0.88037390f, 0.88192129f, 0.88345921f, 0.88498765f, - 0.88650668f, 0.88801610f, 0.88951612f, 0.89100653f, 0.89248741f, - 0.89395881f, 0.89542055f, 0.89687276f, 0.89831537f, 0.89974827f, - 0.90117162f, 0.90258533f, 0.90398932f, 0.90538365f, 0.90676826f, - 0.90814316f, 0.90950841f, 0.91086388f, 0.91220951f, 0.91354549f, - 0.91487163f, 0.91618794f, 0.91749454f, 0.91879123f, 0.92007810f, - 0.92135513f, 0.92262226f, 0.92387950f, 0.92512691f, 0.92636442f, - 0.92759192f, 0.92880958f, 0.93001723f, 0.93121493f, 0.93240267f, - 0.93358046f, 0.93474817f, 0.93590593f, 0.93705362f, 0.93819135f, - 0.93931901f, 0.94043654f, 0.94154406f, 0.94264150f, 0.94372880f, - 0.94480604f, 0.94587320f, 0.94693011f, 0.94797695f, 0.94901365f, - 0.95004016f, 0.95105654f, 0.95206273f, 0.95305866f, 0.95404440f, - 0.95501995f, 0.95598525f, 0.95694035f, 0.95788521f, 0.95881975f, - 0.95974404f, 0.96065807f, 0.96156180f, 0.96245527f, 0.96333838f, - 0.96421117f, 0.96507370f, 0.96592581f, 0.96676767f, 0.96759909f, - 0.96842021f, 0.96923089f, 0.97003126f, 0.97082120f, 0.97160077f, - 0.97236991f, 0.97312868f, 0.97387701f, 0.97461486f, 0.97534233f, - 0.97605932f, 0.97676587f, 0.97746199f, 0.97814763f, 0.97882277f, - 0.97948742f, 0.98014158f, 0.98078531f, 0.98141843f, 0.98204112f, - 0.98265332f, 0.98325491f, 0.98384601f, 0.98442656f, 0.98499662f, - 0.98555607f, 0.98610497f, 0.98664331f, 0.98717111f, 0.98768836f, - 0.98819500f, 0.98869103f, 0.98917651f, 0.98965138f, 0.99011570f, - 0.99056935f, 0.99101239f, 0.99144489f, 0.99186671f, 0.99227792f, - 0.99267852f, 0.99306846f, 0.99344778f, 0.99381649f, 0.99417448f, - 0.99452192f, 0.99485862f, 0.99518472f, 0.99550015f, 0.99580491f, - 0.99609905f, 0.99638247f, 0.99665523f, 0.99691731f, 0.99716878f, - 0.99740952f, 0.99763954f, 0.99785894f, 0.99806762f, 0.99826562f, - 0.99845290f, 0.99862951f, 0.99879545f, 0.99895066f, 0.99909520f, - 0.99922901f, 0.99935216f, 0.99946457f, 0.99956632f, 0.99965733f, - 0.99973762f, 0.99980724f, 0.99986613f, 0.99991435f, 0.99995178f, - 0.99997860f, 0.99999464f, 1.00000000f, 0.99999464f, 0.99997860f, - 0.99995178f, 0.99991435f, 0.99986613f, 0.99980724f, 0.99973762f, - 0.99965733f, 0.99956632f, 0.99946457f, 0.99935216f, 0.99922901f, - 0.99909520f, 0.99895066f, 0.99879545f, 0.99862951f, 0.99845290f, - 0.99826562f, 0.99806762f, 0.99785894f, 0.99763954f, 0.99740946f, - 0.99716872f, 0.99691731f, 0.99665523f, 0.99638247f, 0.99609905f, - 0.99580491f, 0.99550015f, 0.99518472f, 0.99485862f, 0.99452192f, - 0.99417448f, 0.99381644f, 0.99344778f, 0.99306846f, 0.99267852f, - 0.99227792f, 0.99186671f, 0.99144489f, 0.99101239f, 0.99056935f, - 0.99011564f, 0.98965138f, 0.98917651f, 0.98869103f, 0.98819494f, - 0.98768836f, 0.98717111f, 0.98664331f, 0.98610497f, 0.98555607f, - 0.98499656f, 0.98442656f, 0.98384601f, 0.98325491f, 0.98265326f, - 0.98204112f, 0.98141843f, 0.98078525f, 0.98014158f, 0.97948742f, - 0.97882277f, 0.97814757f, 0.97746193f, 0.97676587f, 0.97605932f, - 0.97534227f, 0.97461486f, 0.97387695f, 0.97312862f, 0.97236991f, - 0.97160077f, 0.97082120f, 0.97003126f, 0.96923089f, 0.96842015f, - 0.96759909f, 0.96676761f, 0.96592581f, 0.96507365f, 0.96421117f, - 0.96333838f, 0.96245521f, 0.96156180f, 0.96065807f, 0.95974404f, - 0.95881969f, 0.95788515f, 0.95694029f, 0.95598525f, 0.95501995f, - 0.95404440f, 0.95305860f, 0.95206267f, 0.95105648f, 0.95004016f, - 0.94901365f, 0.94797695f, 0.94693011f, 0.94587314f, 0.94480604f, - 0.94372880f, 0.94264150f, 0.94154406f, 0.94043654f, 0.93931895f, - 0.93819129f, 0.93705362f, 0.93590593f, 0.93474817f, 0.93358046f, - 0.93240267f, 0.93121493f, 0.93001723f, 0.92880952f, 0.92759192f, - 0.92636436f, 0.92512691f, 0.92387950f, 0.92262226f, 0.92135507f, - 0.92007804f, 0.91879123f, 0.91749448f, 0.91618794f, 0.91487157f, - 0.91354543f, 0.91220951f, 0.91086382f, 0.90950835f, 0.90814310f, - 0.90676820f, 0.90538365f, 0.90398932f, 0.90258527f, 0.90117157f, - 0.89974827f, 0.89831525f, 0.89687276f, 0.89542055f, 0.89395875f, - 0.89248741f, 0.89100647f, 0.88951600f, 0.88801610f, 0.88650662f, - 0.88498759f, 0.88345915f, 0.88192123f, 0.88037384f, 0.87881714f, - 0.87725091f, 0.87567532f, 0.87409031f, 0.87249595f, 0.87089223f, - 0.86927933f, 0.86765701f, 0.86602539f, 0.86438447f, 0.86273432f, - 0.86107504f, 0.85940641f, 0.85772860f, 0.85604161f, 0.85434544f, - 0.85264009f, 0.85092574f, 0.84920216f, 0.84746951f, 0.84572780f, - 0.84397697f, 0.84221715f, 0.84044844f, 0.83867055f, 0.83688372f, - 0.83508795f, 0.83328319f, 0.83146954f, 0.82964706f, 0.82781565f, - 0.82597530f, 0.82412612f, 0.82226813f, 0.82040137f, 0.81852591f, - 0.81664157f, 0.81474847f, 0.81284660f, 0.81093609f, 0.80901700f, - 0.80708915f, 0.80515265f, 0.80320752f, 0.80125374f, 0.79929143f, - 0.79732066f, 0.79534125f, 0.79335332f, 0.79135686f, 0.78935200f, - 0.78733861f, 0.78531694f, 0.78328675f, 0.78124815f, 0.77920121f, - 0.77714586f, 0.77508223f, 0.77301049f, 0.77093029f, 0.76884180f, - 0.76674509f, 0.76464021f, 0.76252711f, 0.76040596f, 0.75827658f, - 0.75613904f, 0.75399339f, 0.75183970f, 0.74967796f, 0.74750835f, - 0.74533057f, 0.74314481f, 0.74095106f, 0.73874938f, 0.73653996f, - 0.73432249f, 0.73209721f, 0.72986400f, 0.72762305f, 0.72537428f, - 0.72311789f, 0.72085363f, 0.71858162f, 0.71630186f, 0.71401453f, - 0.71171951f, 0.70941705f, 0.70710677f, 0.70478898f, 0.70246363f, - 0.70013070f, 0.69779032f, 0.69544268f, 0.69308734f, 0.69072461f, - 0.68835449f, 0.68597704f, 0.68359220f, 0.68120021f, 0.67880070f, - 0.67639399f, 0.67398006f, 0.67155886f, 0.66913044f, 0.66669512f, - 0.66425240f, 0.66180259f, 0.65934575f, 0.65688181f, 0.65441096f, - 0.65193301f, 0.64944804f, 0.64695609f, 0.64445722f, 0.64195150f, - 0.63943905f, 0.63691956f, 0.63439327f, 0.63186014f, 0.62932026f, - 0.62677372f, 0.62422055f, 0.62166059f, 0.61909389f, 0.61652064f, - 0.61394072f, 0.61135429f, 0.60876143f, 0.60616189f, 0.60355592f, - 0.60094339f, 0.59832448f, 0.59569913f, 0.59306765f, 0.59042960f, - 0.58778518f, 0.58513451f, 0.58247757f, 0.57981461f, 0.57714522f, - 0.57446963f, 0.57178789f, 0.56910002f, 0.56640613f, 0.56370628f, - 0.56100023f, 0.55828822f, 0.55557019f, 0.55284619f, 0.55011630f, - 0.54738069f, 0.54463905f, 0.54189152f, 0.53913826f, 0.53637916f, - 0.53361434f, 0.53084403f, 0.52806783f, 0.52528596f, 0.52249849f, - 0.51970541f, 0.51690674f, 0.51410276f, 0.51129305f, 0.50847787f, - 0.50565726f, 0.50283122f, 0.50000006f, 0.49716327f, 0.49432117f, - 0.49147379f, 0.48862115f, 0.48576325f, 0.48290038f, 0.48003212f, - 0.47715873f, 0.47428021f, 0.47139663f, 0.46850798f, 0.46561456f, - 0.46271589f, 0.45981231f, 0.45690379f, 0.45399037f, 0.45107210f, - 0.44814920f, 0.44522130f, 0.44228864f, 0.43935123f, 0.43640912f, - 0.43346232f, 0.43051112f, 0.42755505f, 0.42459446f, 0.42162928f, - 0.41865960f, 0.41568545f, 0.41270703f, 0.40972400f, 0.40673658f, - 0.40374479f, 0.40074870f, 0.39774850f, 0.39474386f, 0.39173496f, - 0.38872188f, 0.38570464f, 0.38268328f, 0.37965804f, 0.37662849f, - 0.37359491f, 0.37055734f, 0.36751580f, 0.36447033f, 0.36142117f, - 0.35836792f, 0.35531086f, 0.35224995f, 0.34918529f, 0.34611690f, - 0.34304500f, 0.33996922f, 0.33688980f, 0.33380675f, 0.33072016f, - 0.32763001f, 0.32453656f, 0.32143945f, 0.31833887f, 0.31523487f, - 0.31212750f, 0.30901679f, 0.30590302f, 0.30278572f, 0.29966521f, - 0.29654145f, 0.29341453f, 0.29028472f, 0.28715155f, 0.28401530f, - 0.28087601f, 0.27773371f, 0.27458847f, 0.27144048f, 0.26828936f, - 0.26513538f, 0.26197854f, 0.25881892f, 0.25565651f, 0.25249159f, - 0.24932374f, 0.24615324f, 0.24298008f, 0.23980433f, 0.23662600f, - 0.23344538f, 0.23026201f, 0.22707619f, 0.22388794f, 0.22069728f, - 0.21750426f, 0.21430916f, 0.21111152f, 0.20791161f, 0.20470949f, - 0.20150517f, 0.19829892f, 0.19509031f, 0.19187963f, 0.18866688f, - 0.18545210f, 0.18223536f, 0.17901689f, 0.17579627f, 0.17257376f, - 0.16934940f, 0.16612324f, 0.16289529f, 0.15966584f, 0.15643445f, - 0.15320137f, 0.14996666f, 0.14673033f, 0.14349243f, 0.14025325f, - 0.13701232f, 0.13376991f, 0.13052608f, 0.12728085f, 0.12403426f, - 0.12078657f, 0.11753736f, 0.11428688f, 0.11103519f, 0.10778230f, - 0.10452849f, 0.10127334f, 0.09801710f, 0.09475980f, 0.09150149f, - 0.08824220f, 0.08498220f, 0.08172106f, 0.07845904f, 0.07519618f, - 0.07193252f, 0.06866808f, 0.06540315f, 0.06213728f, 0.05887074f, - 0.05560357f, 0.05233581f, 0.04906749f, 0.04579888f, 0.04252954f, - 0.03925974f, 0.03598953f, 0.03271893f, 0.02944798f, 0.02617695f, - 0.02290541f, 0.01963361f, 0.01636161f, 0.01308943f, 0.00981712f, - 0.00654493f, 0.00327244f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, - 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_WINDOWS_PRIVATE_H_ diff --git a/modules/audio_processing/transient/wpd_node.cc b/modules/audio_processing/transient/wpd_node.cc deleted file mode 100644 index 2e0ee7e5b7..0000000000 --- a/modules/audio_processing/transient/wpd_node.cc +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/wpd_node.h" - -#include -#include - -#include "common_audio/fir_filter.h" -#include "common_audio/fir_filter_factory.h" -#include "modules/audio_processing/transient/dyadic_decimator.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -WPDNode::WPDNode(size_t length, - const float* coefficients, - size_t coefficients_length) - : // The data buffer has parent data length to be able to contain and - // filter it. - data_(new float[2 * length + 1]), - length_(length), - filter_( - CreateFirFilter(coefficients, coefficients_length, 2 * length + 1)) { - RTC_DCHECK_GT(length, 0); - RTC_DCHECK(coefficients); - RTC_DCHECK_GT(coefficients_length, 0); - memset(data_.get(), 0.f, (2 * length + 1) * sizeof(data_[0])); -} - -WPDNode::~WPDNode() {} - -int WPDNode::Update(const float* parent_data, size_t parent_data_length) { - if (!parent_data || (parent_data_length / 2) != length_) { - return -1; - } - - // Filter data. - filter_->Filter(parent_data, parent_data_length, data_.get()); - - // Decimate data. - const bool kOddSequence = true; - size_t output_samples = DyadicDecimate(data_.get(), parent_data_length, - kOddSequence, data_.get(), length_); - if (output_samples != length_) { - return -1; - } - - // Get abs to all values. - for (size_t i = 0; i < length_; ++i) { - data_[i] = fabs(data_[i]); - } - - return 0; -} - -int WPDNode::set_data(const float* new_data, size_t length) { - if (!new_data || length != length_) { - return -1; - } - memcpy(data_.get(), new_data, length * sizeof(data_[0])); - return 0; -} - -} // namespace webrtc diff --git a/modules/audio_processing/transient/wpd_node.h b/modules/audio_processing/transient/wpd_node.h deleted file mode 100644 index 645bc5f2e8..0000000000 --- a/modules/audio_processing/transient/wpd_node.h +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_WPD_NODE_H_ -#define MODULES_AUDIO_PROCESSING_TRANSIENT_WPD_NODE_H_ - -#include -#include - -namespace webrtc { - -class FIRFilter; - -// A single node of a Wavelet Packet Decomposition (WPD) tree. -class WPDNode { - public: - // Creates a WPDNode. The data vector will contain zeros. The filter will have - // the coefficients provided. - WPDNode(size_t length, const float* coefficients, size_t coefficients_length); - ~WPDNode(); - - // Updates the node data. `parent_data` / 2 must be equals to `length_`. - // Returns 0 if correct, and -1 otherwise. - int Update(const float* parent_data, size_t parent_data_length); - - const float* data() const { return data_.get(); } - // Returns 0 if correct, and -1 otherwise. - int set_data(const float* new_data, size_t length); - size_t length() const { return length_; } - - private: - std::unique_ptr data_; - size_t length_; - std::unique_ptr filter_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_WPD_NODE_H_ diff --git a/modules/audio_processing/transient/wpd_node_unittest.cc b/modules/audio_processing/transient/wpd_node_unittest.cc deleted file mode 100644 index 5f9238255c..0000000000 --- a/modules/audio_processing/transient/wpd_node_unittest.cc +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/wpd_node.h" - -#include - -#include "test/gtest.h" - -namespace webrtc { - -static const size_t kDataLength = 5; -static const float kTolerance = 0.0001f; - -static const size_t kParentDataLength = kDataLength * 2; -static const float kParentData[kParentDataLength] = {1.f, 2.f, 3.f, 4.f, 5.f, - 6.f, 7.f, 8.f, 9.f, 10.f}; - -static const float kCoefficients[] = {0.2f, -0.3f, 0.5f, -0.7f, 0.11f}; -static const size_t kCoefficientsLength = - sizeof(kCoefficients) / sizeof(kCoefficients[0]); - -TEST(WPDNodeTest, Accessors) { - WPDNode node(kDataLength, kCoefficients, kCoefficientsLength); - EXPECT_EQ(0, node.set_data(kParentData, kDataLength)); - EXPECT_EQ(0, memcmp(node.data(), kParentData, - kDataLength * sizeof(node.data()[0]))); -} - -TEST(WPDNodeTest, UpdateThatOnlyDecimates) { - const float kIndentyCoefficient = 1.f; - WPDNode node(kDataLength, &kIndentyCoefficient, 1); - EXPECT_EQ(0, node.Update(kParentData, kParentDataLength)); - for (size_t i = 0; i < kDataLength; ++i) { - EXPECT_FLOAT_EQ(kParentData[i * 2 + 1], node.data()[i]); - } -} - -TEST(WPDNodeTest, UpdateWithArbitraryDataAndArbitraryFilter) { - WPDNode node(kDataLength, kCoefficients, kCoefficientsLength); - EXPECT_EQ(0, node.Update(kParentData, kParentDataLength)); - EXPECT_NEAR(0.1f, node.data()[0], kTolerance); - EXPECT_NEAR(0.2f, node.data()[1], kTolerance); - EXPECT_NEAR(0.18f, node.data()[2], kTolerance); - EXPECT_NEAR(0.56f, node.data()[3], kTolerance); - EXPECT_NEAR(0.94f, node.data()[4], kTolerance); -} - -TEST(WPDNodeTest, ExpectedErrorReturnValue) { - WPDNode node(kDataLength, kCoefficients, kCoefficientsLength); - EXPECT_EQ(-1, node.Update(kParentData, kParentDataLength - 1)); - EXPECT_EQ(-1, node.Update(NULL, kParentDataLength)); - EXPECT_EQ(-1, node.set_data(kParentData, kDataLength - 1)); - EXPECT_EQ(-1, node.set_data(NULL, kDataLength)); -} - -} // namespace webrtc diff --git a/modules/audio_processing/transient/wpd_tree.cc b/modules/audio_processing/transient/wpd_tree.cc deleted file mode 100644 index c8aa615881..0000000000 --- a/modules/audio_processing/transient/wpd_tree.cc +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/wpd_tree.h" - -#include - -#include "modules/audio_processing/transient/wpd_node.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -WPDTree::WPDTree(size_t data_length, - const float* high_pass_coefficients, - const float* low_pass_coefficients, - size_t coefficients_length, - int levels) - : data_length_(data_length), - levels_(levels), - num_nodes_((1 << (levels + 1)) - 1) { - RTC_DCHECK_GT(data_length, (static_cast(1) << levels)); - RTC_DCHECK(high_pass_coefficients); - RTC_DCHECK(low_pass_coefficients); - RTC_DCHECK_GT(levels, 0); - // Size is 1 more, so we can use the array as 1-based. nodes_[0] is never - // allocated. - nodes_.reset(new std::unique_ptr[num_nodes_ + 1]); - - // Create the first node - const float kRootCoefficient = 1.f; // Identity Coefficient. - nodes_[1].reset(new WPDNode(data_length, &kRootCoefficient, 1)); - // Variables used to create the rest of the nodes. - size_t index = 1; - size_t index_left_child = 0; - size_t index_right_child = 0; - - int num_nodes_at_curr_level = 0; - - // Branching each node in each level to create its children. The last level is - // not branched (all the nodes of that level are leaves). - for (int current_level = 0; current_level < levels; ++current_level) { - num_nodes_at_curr_level = 1 << current_level; - for (int i = 0; i < num_nodes_at_curr_level; ++i) { - index = (1 << current_level) + i; - // Obtain the index of the current node children. - index_left_child = index * 2; - index_right_child = index_left_child + 1; - nodes_[index_left_child].reset(new WPDNode(nodes_[index]->length() / 2, - low_pass_coefficients, - coefficients_length)); - nodes_[index_right_child].reset(new WPDNode(nodes_[index]->length() / 2, - high_pass_coefficients, - coefficients_length)); - } - } -} - -WPDTree::~WPDTree() {} - -WPDNode* WPDTree::NodeAt(int level, int index) { - if (level < 0 || level > levels_ || index < 0 || index >= 1 << level) { - return NULL; - } - - return nodes_[(1 << level) + index].get(); -} - -int WPDTree::Update(const float* data, size_t data_length) { - if (!data || data_length != data_length_) { - return -1; - } - - // Update the root node. - int update_result = nodes_[1]->set_data(data, data_length); - if (update_result != 0) { - return -1; - } - - // Variables used to update the rest of the nodes. - size_t index = 1; - size_t index_left_child = 0; - size_t index_right_child = 0; - - int num_nodes_at_curr_level = 0; - - for (int current_level = 0; current_level < levels_; ++current_level) { - num_nodes_at_curr_level = 1 << current_level; - for (int i = 0; i < num_nodes_at_curr_level; ++i) { - index = (1 << current_level) + i; - // Obtain the index of the current node children. - index_left_child = index * 2; - index_right_child = index_left_child + 1; - - update_result = nodes_[index_left_child]->Update(nodes_[index]->data(), - nodes_[index]->length()); - if (update_result != 0) { - return -1; - } - - update_result = nodes_[index_right_child]->Update( - nodes_[index]->data(), nodes_[index]->length()); - if (update_result != 0) { - return -1; - } - } - } - - return 0; -} - -} // namespace webrtc diff --git a/modules/audio_processing/transient/wpd_tree.h b/modules/audio_processing/transient/wpd_tree.h deleted file mode 100644 index 13cb8d9c2f..0000000000 --- a/modules/audio_processing/transient/wpd_tree.h +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_WPD_TREE_H_ -#define MODULES_AUDIO_PROCESSING_TRANSIENT_WPD_TREE_H_ - -#include - -#include - -#include "modules/audio_processing/transient/wpd_node.h" - -namespace webrtc { - -// Tree of a Wavelet Packet Decomposition (WPD). -// -// The root node contains all the data provided; for each node in the tree, the -// left child contains the approximation coefficients extracted from the node, -// and the right child contains the detail coefficients. -// It preserves its state, so it can be multiple-called. -// -// The number of nodes in the tree will be 2 ^ levels - 1. -// -// Implementation details: Since the tree always will be a complete binary tree, -// it is implemented using a single linear array instead of managing the -// relationships in each node. For convience is better to use a array that -// starts in 1 (instead of 0). Taking that into account, the following formulas -// apply: -// Root node index: 1. -// Node(Level, Index in that level): 2 ^ Level + (Index in that level). -// Left Child: Current node index * 2. -// Right Child: Current node index * 2 + 1. -// Parent: Current Node Index / 2 (Integer division). -class WPDTree { - public: - // Creates a WPD tree using the data length and coefficients provided. - WPDTree(size_t data_length, - const float* high_pass_coefficients, - const float* low_pass_coefficients, - size_t coefficients_length, - int levels); - ~WPDTree(); - - // Returns the number of nodes at any given level. - static int NumberOfNodesAtLevel(int level) { return 1 << level; } - - // Returns a pointer to the node at the given level and index(of that level). - // Level goes from 0 to levels(). - // Index goes from 0 to the number of NumberOfNodesAtLevel(level) - 1. - // - // You can use the following formulas to get any node within the tree: - // Notation: (Level, Index of node in that level). - // Root node: (0/0). - // Left Child: (Current node level + 1, Current node index * 2). - // Right Child: (Current node level + 1, Current node index * 2 + 1). - // Parent: (Current node level - 1, Current node index / 2) (Integer division) - // - // If level or index are out of bounds the function will return NULL. - WPDNode* NodeAt(int level, int index); - - // Updates all the nodes of the tree with the new data. `data_length` must be - // teh same that was used for the creation of the tree. - // Returns 0 if correct, and -1 otherwise. - int Update(const float* data, size_t data_length); - - // Returns the total number of levels below the root. Root is cosidered level - // 0. - int levels() const { return levels_; } - - // Returns the total number of nodes. - int num_nodes() const { return num_nodes_; } - - // Returns the total number of leaves. - int num_leaves() const { return 1 << levels_; } - - private: - size_t data_length_; - int levels_; - int num_nodes_; - std::unique_ptr[]> nodes_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_WPD_TREE_H_ diff --git a/modules/audio_processing/transient/wpd_tree_unittest.cc b/modules/audio_processing/transient/wpd_tree_unittest.cc deleted file mode 100644 index bf3ff987d7..0000000000 --- a/modules/audio_processing/transient/wpd_tree_unittest.cc +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_processing/transient/wpd_tree.h" - -#include -#include - -#include "modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h" -#include "modules/audio_processing/transient/file_utils.h" -#include "rtc_base/strings/string_builder.h" -#include "rtc_base/system/file_wrapper.h" -#include "test/gtest.h" -#include "test/testsupport/file_utils.h" - -namespace webrtc { - -TEST(WPDTreeTest, Construction) { - const size_t kTestBufferSize = 100; - const int kLevels = 5; - const int kExpectedNumberOfNodes = (1 << (kLevels + 1)) - 1; - - float test_buffer[kTestBufferSize]; - memset(test_buffer, 0.f, kTestBufferSize * sizeof(*test_buffer)); - float test_coefficients[] = {1.f, 2.f, 3.f, 4.f, 5.f}; - const size_t kTestCoefficientsLength = - sizeof(test_coefficients) / sizeof(test_coefficients[0]); - WPDTree tree(kTestBufferSize, test_coefficients, test_coefficients, - kTestCoefficientsLength, kLevels); - ASSERT_EQ(kExpectedNumberOfNodes, tree.num_nodes()); - // Checks for NodeAt(level, index). - int nodes_at_level = 0; - for (int level = 0; level <= kLevels; ++level) { - nodes_at_level = 1 << level; - for (int i = 0; i < nodes_at_level; ++i) { - ASSERT_TRUE(NULL != tree.NodeAt(level, i)); - } - // Out of bounds. - EXPECT_EQ(NULL, tree.NodeAt(level, -1)); - EXPECT_EQ(NULL, tree.NodeAt(level, -12)); - EXPECT_EQ(NULL, tree.NodeAt(level, nodes_at_level)); - EXPECT_EQ(NULL, tree.NodeAt(level, nodes_at_level + 5)); - } - // Out of bounds. - EXPECT_EQ(NULL, tree.NodeAt(-1, 0)); - EXPECT_EQ(NULL, tree.NodeAt(-12, 0)); - EXPECT_EQ(NULL, tree.NodeAt(kLevels + 1, 0)); - EXPECT_EQ(NULL, tree.NodeAt(kLevels + 5, 0)); - // Checks for Update(). - EXPECT_EQ(0, tree.Update(test_buffer, kTestBufferSize)); - EXPECT_EQ(-1, tree.Update(NULL, kTestBufferSize)); - EXPECT_EQ(-1, tree.Update(test_buffer, kTestBufferSize - 1)); -} - -// This test is for the correctness of the tree. -// Checks the results from the Matlab equivalent, it is done comparing the -// results that are stored in the output files from Matlab. -// It also writes the results in its own set of files in the out directory. -// Matlab and output files contain all the results in double precision (Little -// endian) appended. -#if defined(WEBRTC_IOS) -TEST(WPDTreeTest, DISABLED_CorrectnessBasedOnMatlabFiles) { -#else -TEST(WPDTreeTest, CorrectnessBasedOnMatlabFiles) { -#endif - // 10 ms at 16000 Hz. - const size_t kTestBufferSize = 160; - const int kLevels = 3; - const int kLeaves = 1 << kLevels; - const size_t kLeavesSamples = kTestBufferSize >> kLevels; - // Create tree with Discrete Meyer Wavelet Coefficients. - WPDTree tree(kTestBufferSize, kDaubechies8HighPassCoefficients, - kDaubechies8LowPassCoefficients, kDaubechies8CoefficientsLength, - kLevels); - // Allocate and open all matlab and out files. - FileWrapper matlab_files_data[kLeaves]; - FileWrapper out_files_data[kLeaves]; - - for (int i = 0; i < kLeaves; ++i) { - // Matlab files. - rtc::StringBuilder matlab_stream; - matlab_stream << "audio_processing/transient/wpd" << i; - std::string matlab_string = test::ResourcePath(matlab_stream.str(), "dat"); - matlab_files_data[i] = FileWrapper::OpenReadOnly(matlab_string); - - bool file_opened = matlab_files_data[i].is_open(); - ASSERT_TRUE(file_opened) << "File could not be opened.\n" << matlab_string; - - // Out files. - rtc::StringBuilder out_stream; - out_stream << test::OutputPath() << "wpd_" << i << ".out"; - std::string out_string = out_stream.str(); - - out_files_data[i] = FileWrapper::OpenWriteOnly(out_string); - - file_opened = out_files_data[i].is_open(); - ASSERT_TRUE(file_opened) << "File could not be opened.\n" << out_string; - } - - // Prepare the test file. - std::string test_file_name = test::ResourcePath( - "audio_processing/transient/ajm-macbook-1-spke16m", "pcm"); - - FileWrapper test_file = FileWrapper::OpenReadOnly(test_file_name); - - bool file_opened = test_file.is_open(); - ASSERT_TRUE(file_opened) << "File could not be opened.\n" << test_file_name; - - float test_buffer[kTestBufferSize]; - - // Only the first frames of the audio file are tested. The matlab files also - // only contains information about the first frames. - const size_t kMaxFramesToTest = 100; - const float kTolerance = 0.03f; - - size_t frames_read = 0; - - // Read first buffer from the PCM test file. - size_t file_samples_read = - ReadInt16FromFileToFloatBuffer(&test_file, kTestBufferSize, test_buffer); - while (file_samples_read > 0 && frames_read < kMaxFramesToTest) { - ++frames_read; - - if (file_samples_read < kTestBufferSize) { - // Pad the rest of the buffer with zeros. - for (size_t i = file_samples_read; i < kTestBufferSize; ++i) { - test_buffer[i] = 0.0; - } - } - tree.Update(test_buffer, kTestBufferSize); - double matlab_buffer[kTestBufferSize]; - - // Compare results with data from the matlab test files. - for (int i = 0; i < kLeaves; ++i) { - // Compare data values - size_t matlab_samples_read = ReadDoubleBufferFromFile( - &matlab_files_data[i], kLeavesSamples, matlab_buffer); - - ASSERT_EQ(kLeavesSamples, matlab_samples_read) - << "Matlab test files are malformed.\n" - "File: 3_" - << i; - // Get output data from the corresponding node - const float* node_data = tree.NodeAt(kLevels, i)->data(); - // Compare with matlab files. - for (size_t j = 0; j < kLeavesSamples; ++j) { - EXPECT_NEAR(matlab_buffer[j], node_data[j], kTolerance) - << "\nLeaf: " << i << "\nSample: " << j - << "\nFrame: " << frames_read - 1; - } - - // Write results to out files. - WriteFloatBufferToFile(&out_files_data[i], kLeavesSamples, node_data); - } - - // Read next buffer from the PCM test file. - file_samples_read = ReadInt16FromFileToFloatBuffer( - &test_file, kTestBufferSize, test_buffer); - } - - // Close all matlab and out files. - for (int i = 0; i < kLeaves; ++i) { - matlab_files_data[i].Close(); - out_files_data[i].Close(); - } - - test_file.Close(); -} - -} // namespace webrtc diff --git a/modules/audio_processing/utility/cascaded_biquad_filter.cc b/modules/audio_processing/utility/cascaded_biquad_filter.cc index 0d236ce0be..19faadfc96 100644 --- a/modules/audio_processing/utility/cascaded_biquad_filter.cc +++ b/modules/audio_processing/utility/cascaded_biquad_filter.cc @@ -71,8 +71,8 @@ CascadedBiQuadFilter::CascadedBiQuadFilter( CascadedBiQuadFilter::~CascadedBiQuadFilter() = default; -void CascadedBiQuadFilter::Process(rtc::ArrayView x, - rtc::ArrayView y) { +void CascadedBiQuadFilter::Process(ArrayView x, + ArrayView y) { if (biquads_.size() > 0) { ApplyBiQuad(x, y, &biquads_[0]); for (size_t k = 1; k < biquads_.size(); ++k) { @@ -83,7 +83,7 @@ void CascadedBiQuadFilter::Process(rtc::ArrayView x, } } -void CascadedBiQuadFilter::Process(rtc::ArrayView y) { +void CascadedBiQuadFilter::Process(ArrayView y) { for (auto& biquad : biquads_) { ApplyBiQuad(y, y, &biquad); } @@ -95,8 +95,8 @@ void CascadedBiQuadFilter::Reset() { } } -void CascadedBiQuadFilter::ApplyBiQuad(rtc::ArrayView x, - rtc::ArrayView y, +void CascadedBiQuadFilter::ApplyBiQuad(ArrayView x, + ArrayView y, CascadedBiQuadFilter::BiQuad* biquad) { RTC_DCHECK_EQ(x.size(), y.size()); const float c_a_0 = biquad->coefficients.a[0]; diff --git a/modules/audio_processing/utility/cascaded_biquad_filter.h b/modules/audio_processing/utility/cascaded_biquad_filter.h index 120b52aa57..799b439416 100644 --- a/modules/audio_processing/utility/cascaded_biquad_filter.h +++ b/modules/audio_processing/utility/cascaded_biquad_filter.h @@ -61,15 +61,15 @@ class CascadedBiQuadFilter { CascadedBiQuadFilter& operator=(const CascadedBiQuadFilter&) = delete; // Applies the biquads on the values in x in order to form the output in y. - void Process(rtc::ArrayView x, rtc::ArrayView y); + void Process(ArrayView x, ArrayView y); // Applies the biquads on the values in y in an in-place manner. - void Process(rtc::ArrayView y); + void Process(ArrayView y); // Resets the filter to its initial state. void Reset(); private: - void ApplyBiQuad(rtc::ArrayView x, - rtc::ArrayView y, + void ApplyBiQuad(ArrayView x, + ArrayView y, CascadedBiQuadFilter::BiQuad* biquad); std::vector biquads_; diff --git a/modules/audio_processing/utility/pffft_wrapper.cc b/modules/audio_processing/utility/pffft_wrapper.cc index 88642fb12b..3f65941c76 100644 --- a/modules/audio_processing/utility/pffft_wrapper.cc +++ b/modules/audio_processing/utility/pffft_wrapper.cc @@ -34,11 +34,11 @@ Pffft::FloatBuffer::~FloatBuffer() { pffft_aligned_free(data_); } -rtc::ArrayView Pffft::FloatBuffer::GetConstView() const { +ArrayView Pffft::FloatBuffer::GetConstView() const { return {data_, size_}; } -rtc::ArrayView Pffft::FloatBuffer::GetView() { +ArrayView Pffft::FloatBuffer::GetView() { return {data_, size_}; } diff --git a/modules/audio_processing/utility/pffft_wrapper.h b/modules/audio_processing/utility/pffft_wrapper.h index 983c2fd1bc..b555b4b1e1 100644 --- a/modules/audio_processing/utility/pffft_wrapper.h +++ b/modules/audio_processing/utility/pffft_wrapper.h @@ -34,8 +34,8 @@ class Pffft { FloatBuffer& operator=(const FloatBuffer&) = delete; ~FloatBuffer(); - rtc::ArrayView GetConstView() const; - rtc::ArrayView GetView(); + ArrayView GetConstView() const; + ArrayView GetView(); private: friend class Pffft; @@ -67,7 +67,7 @@ class Pffft { // Creates a buffer of the right size. std::unique_ptr CreateBuffer() const; - // TODO(https://crbug.com/webrtc/9577): Overload with rtc::ArrayView args. + // TODO(https://crbug.com/webrtc/9577): Overload with webrtc::ArrayView args. // Computes the forward fast Fourier transform. void ForwardTransform(const FloatBuffer& in, FloatBuffer* out, bool ordered); // Computes the backward fast Fourier transform. diff --git a/modules/audio_processing/utility/pffft_wrapper_unittest.cc b/modules/audio_processing/utility/pffft_wrapper_unittest.cc index 2ad6849cd4..71cda9333e 100644 --- a/modules/audio_processing/utility/pffft_wrapper_unittest.cc +++ b/modules/audio_processing/utility/pffft_wrapper_unittest.cc @@ -40,8 +40,8 @@ double frand() { return std::rand() / static_cast(RAND_MAX); } -void ExpectArrayViewsEquality(rtc::ArrayView a, - rtc::ArrayView b) { +void ExpectArrayViewsEquality(ArrayView a, + ArrayView b) { ASSERT_EQ(a.size(), b.size()); for (size_t i = 0; i < a.size(); ++i) { SCOPED_TRACE(i); @@ -74,8 +74,8 @@ void PffftValidateWrapper(size_t fft_size, bool complex_fft) { auto out_wrapper = pffft_wrapper.CreateBuffer(); // Input and output buffers views. - rtc::ArrayView in_view(in, num_floats); - rtc::ArrayView out_view(out, num_floats); + ArrayView in_view(in, num_floats); + ArrayView out_view(out, num_floats); auto in_wrapper_view = in_wrapper->GetView(); EXPECT_EQ(in_wrapper_view.size(), num_floats); auto out_wrapper_view = out_wrapper->GetConstView(); diff --git a/modules/audio_processing/vad/voice_activity_detector.h b/modules/audio_processing/vad/voice_activity_detector.h index 92b9a8c208..401acca699 100644 --- a/modules/audio_processing/vad/voice_activity_detector.h +++ b/modules/audio_processing/vad/voice_activity_detector.h @@ -33,7 +33,7 @@ class VoiceActivityDetector { ~VoiceActivityDetector(); // Processes each audio chunk and estimates the voice probability. - // TODO(bugs.webrtc.org/7494): Switch to rtc::ArrayView and remove + // TODO(bugs.webrtc.org/7494): Switch to webrtc::ArrayView and remove // `sample_rate_hz`. void ProcessChunk(const int16_t* audio, size_t length, int sample_rate_hz); diff --git a/modules/congestion_controller/BUILD.gn b/modules/congestion_controller/BUILD.gn index 9845754566..64dbdcd845 100644 --- a/modules/congestion_controller/BUILD.gn +++ b/modules/congestion_controller/BUILD.gn @@ -8,17 +8,8 @@ import("../../webrtc.gni") -config("bwe_test_logging") { - if (rtc_enable_bwe_test_logging) { - defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=1" ] - } else { - defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0" ] - } -} - rtc_library("congestion_controller") { visibility = [ "*" ] - configs += [ ":bwe_test_logging" ] sources = [ "include/receive_side_congestion_controller.h", "receive_side_congestion_controller.cc", @@ -27,17 +18,27 @@ rtc_library("congestion_controller") { ] deps = [ + "..:module_api", "../../api:rtp_parameters", + "../../api:sequence_checker", + "../../api/environment", "../../api/transport:network_control", "../../api/units:data_rate", + "../../api/units:data_size", "../../api/units:time_delta", "../../api/units:timestamp", "../../rtc_base:logging", "../../rtc_base:macromagic", + "../../rtc_base/experiments:field_trial_parser", "../../rtc_base/synchronization:mutex", + "../../system_wrappers", "../pacing", "../remote_bitrate_estimator", + "../remote_bitrate_estimator:congestion_control_feedback_generator", + "../remote_bitrate_estimator:transport_sequence_number_feedback_generator", "../rtp_rtcp:rtp_rtcp_format", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/base:nullability", ] } @@ -51,13 +52,17 @@ if (rtc_include_tests && !build_with_chromium) { ] deps = [ ":congestion_controller", + "../../api:rtp_parameters", + "../../api/environment:environment_factory", "../../api/test/network_emulation", "../../api/test/network_emulation:create_cross_traffic", "../../api/units:data_rate", "../../api/units:data_size", "../../api/units:time_delta", "../../api/units:timestamp", + "../../rtc_base:buffer", "../../system_wrappers", + "../../test:explicit_key_value_config", "../../test:test_support", "../../test/scenario", "../pacing", diff --git a/modules/congestion_controller/DEPS b/modules/congestion_controller/DEPS index 2ed9952e22..4bb4026c37 100644 --- a/modules/congestion_controller/DEPS +++ b/modules/congestion_controller/DEPS @@ -3,3 +3,9 @@ include_rules = [ "+system_wrappers", "+video", ] +specific_include_rules = { + "goog_cc_network_control_unittest.cc": [ + "+call/video_receive_stream.h", + ], +} + diff --git a/modules/congestion_controller/goog_cc/BUILD.gn b/modules/congestion_controller/goog_cc/BUILD.gn index 150201e1bd..326168679d 100644 --- a/modules/congestion_controller/goog_cc/BUILD.gn +++ b/modules/congestion_controller/goog_cc/BUILD.gn @@ -8,16 +8,7 @@ import("../../../webrtc.gni") -config("bwe_test_logging") { - if (rtc_enable_bwe_test_logging) { - defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=1" ] - } else { - defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0" ] - } -} - rtc_library("goog_cc") { - configs += [ ":bwe_test_logging" ] sources = [ "goog_cc_network_control.cc", "goog_cc_network_control.h", @@ -33,8 +24,9 @@ rtc_library("goog_cc") { ":send_side_bwe", "../../../api:field_trials_view", "../../../api:network_state_predictor_api", + "../../../api/environment", "../../../api/rtc_event_log", - "../../../api/transport:field_trial_based_config", + "../../../api/transport:bandwidth_usage", "../../../api/transport:network_control", "../../../api/units:data_rate", "../../../api/units:data_size", @@ -49,10 +41,8 @@ rtc_library("goog_cc") { "../../../rtc_base/experiments:rate_control_settings", "../../../system_wrappers", "../../remote_bitrate_estimator", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -65,7 +55,6 @@ rtc_library("link_capacity_estimator") { "../../../api/units:data_rate", "../../../rtc_base:safe_minmax", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("pushback_controller") { @@ -80,10 +69,6 @@ rtc_library("pushback_controller") { "../../../rtc_base:checks", "../../../rtc_base/experiments:rate_control_settings", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("alr_detector") { @@ -103,10 +88,8 @@ rtc_library("alr_detector") { "../../../rtc_base/experiments:field_trial_parser", "../../pacing:interval_budget", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("estimators") { - configs += [ ":bwe_test_logging" ] sources = [ "acknowledged_bitrate_estimator.cc", "acknowledged_bitrate_estimator.h", @@ -127,6 +110,7 @@ rtc_library("estimators") { "../../../api:field_trials_view", "../../../api:network_state_predictor_api", "../../../api/rtc_event_log", + "../../../api/transport:bandwidth_usage", "../../../api/transport:network_control", "../../../api/units:data_rate", "../../../api/units:data_size", @@ -141,10 +125,7 @@ rtc_library("estimators") { "../../../rtc_base:safe_minmax", "../../../rtc_base/experiments:field_trial_parser", "../../remote_bitrate_estimator", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -156,7 +137,6 @@ rtc_library("loss_based_bwe_v2") { deps = [ "../../../api:array_view", "../../../api:field_trials_view", - "../../../api:network_state_predictor_api", "../../../api/transport:network_control", "../../../api/units:data_rate", "../../../api/units:data_size", @@ -165,15 +145,11 @@ rtc_library("loss_based_bwe_v2") { "../../../rtc_base:logging", "../../../rtc_base/experiments:field_trial_parser", "../../remote_bitrate_estimator", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/types:optional", ] } rtc_library("loss_based_bwe_v1") { - configs += [ ":bwe_test_logging" ] sources = [ "loss_based_bandwidth_estimation.cc", "loss_based_bandwidth_estimation.h", @@ -186,12 +162,12 @@ rtc_library("loss_based_bwe_v1") { "../../../api/units:timestamp", "../../../rtc_base:checks", "../../../rtc_base/experiments:field_trial_parser", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("send_side_bwe") { - configs += [ ":bwe_test_logging" ] sources = [ "send_side_bandwidth_estimation.cc", "send_side_bandwidth_estimation.h", @@ -200,8 +176,8 @@ rtc_library("send_side_bwe") { ":loss_based_bwe_v1", ":loss_based_bwe_v2", "../../../api:field_trials_view", - "../../../api:network_state_predictor_api", "../../../api/rtc_event_log", + "../../../api/transport:bandwidth_usage", "../../../api/transport:network_control", "../../../api/units:data_rate", "../../../api/units:time_delta", @@ -210,18 +186,13 @@ rtc_library("send_side_bwe") { "../../../rtc_base:checks", "../../../rtc_base:logging", "../../../rtc_base/experiments:field_trial_parser", - "../../../system_wrappers:field_trial", "../../../system_wrappers:metrics", "../../remote_bitrate_estimator", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } rtc_library("delay_based_bwe") { - configs += [ ":bwe_test_logging" ] sources = [ "delay_based_bwe.cc", "delay_based_bwe.h", @@ -231,10 +202,14 @@ rtc_library("delay_based_bwe") { deps = [ ":estimators", + ":link_capacity_estimator", "../../../api:field_trials_view", "../../../api:network_state_predictor_api", "../../../api/rtc_event_log", + "../../../api/transport:bandwidth_usage", "../../../api/transport:network_control", + "../../../api/units:data_rate", + "../../../api/units:data_size", "../../../api/units:time_delta", "../../../api/units:timestamp", "../../../logging:rtc_event_bwe", @@ -246,10 +221,6 @@ rtc_library("delay_based_bwe") { "../../pacing", "../../remote_bitrate_estimator", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("probe_controller") { @@ -274,11 +245,8 @@ rtc_library("probe_controller") { "../../../rtc_base:safe_conversions", "../../../rtc_base/experiments:field_trial_parser", "../../../system_wrappers:metrics", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -294,17 +262,18 @@ if (rtc_include_tests) { ":delay_based_bwe", ":estimators", ":goog_cc", + "../../../api:libjingle_logging_api", "../../../api/rtc_event_log", "../../../api/transport:goog_cc", "../../../api/transport:network_control", + "../../../api/units:data_rate", + "../../../api/units:data_size", + "../../../api/units:time_delta", "../../../api/units:timestamp", "../../../rtc_base:checks", "../../../test/logging:log_writer", "../../remote_bitrate_estimator", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } if (!build_with_chromium) { @@ -337,9 +306,12 @@ if (rtc_include_tests) { ":send_side_bwe", "../../../api:field_trials_view", "../../../api:network_state_predictor_api", + "../../../api/environment", + "../../../api/environment:environment_factory", "../../../api/rtc_event_log", "../../../api/test/network_emulation", "../../../api/test/network_emulation:create_cross_traffic", + "../../../api/transport:bandwidth_usage", "../../../api/transport:field_trial_based_config", "../../../api/transport:goog_cc", "../../../api/transport:network_control", @@ -347,6 +319,7 @@ if (rtc_include_tests) { "../../../api/units:data_size", "../../../api/units:time_delta", "../../../api/units:timestamp", + "../../../call:video_receive_stream_api", "../../../logging:mocks", "../../../logging:rtc_event_bwe", "../../../rtc_base:checks", @@ -359,11 +332,13 @@ if (rtc_include_tests) { "../../../test:explicit_key_value_config", "../../../test:field_trial", "../../../test:test_support", + "../../../test/network:emulated_network", "../../../test/scenario", + "../../../test/scenario:column_printer", "../../pacing", "//testing/gmock", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ] } } } diff --git a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.cc b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.cc index 08b42a8168..d330cad6e7 100644 --- a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.cc +++ b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.cc @@ -10,14 +10,19 @@ #include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h" -#include - #include #include +#include #include +#include +#include "api/field_trials_view.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/timestamp.h" +#include "modules/congestion_controller/goog_cc/bitrate_estimator.h" #include "rtc_base/checks.h" -#include "rtc_base/numerics/safe_conversions.h" namespace webrtc { @@ -30,7 +35,7 @@ AcknowledgedBitrateEstimator::AcknowledgedBitrateEstimator( AcknowledgedBitrateEstimator::~AcknowledgedBitrateEstimator() {} AcknowledgedBitrateEstimator::AcknowledgedBitrateEstimator( - const FieldTrialsView* key_value_config, + const FieldTrialsView* /* key_value_config */, std::unique_ptr bitrate_estimator) : in_alr_(false), bitrate_estimator_(std::move(bitrate_estimator)) {} @@ -51,11 +56,11 @@ void AcknowledgedBitrateEstimator::IncomingPacketFeedbackVector( } } -absl::optional AcknowledgedBitrateEstimator::bitrate() const { +std::optional AcknowledgedBitrateEstimator::bitrate() const { return bitrate_estimator_->bitrate(); } -absl::optional AcknowledgedBitrateEstimator::PeekRate() const { +std::optional AcknowledgedBitrateEstimator::PeekRate() const { return bitrate_estimator_->PeekRate(); } diff --git a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h index d10846ab3a..68766f3e15 100644 --- a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h +++ b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h @@ -12,12 +12,13 @@ #define MODULES_CONGESTION_CONTROLLER_GOOG_CC_ACKNOWLEDGED_BITRATE_ESTIMATOR_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/field_trials_view.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" +#include "api/units/timestamp.h" #include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h" #include "modules/congestion_controller/goog_cc/bitrate_estimator.h" @@ -36,13 +37,13 @@ class AcknowledgedBitrateEstimator void IncomingPacketFeedbackVector( const std::vector& packet_feedback_vector) override; - absl::optional bitrate() const override; - absl::optional PeekRate() const override; + std::optional bitrate() const override; + std::optional PeekRate() const override; void SetAlr(bool in_alr) override; void SetAlrEndedTime(Timestamp alr_ended_time) override; private: - absl::optional alr_ended_time_; + std::optional alr_ended_time_; bool in_alr_; std::unique_ptr bitrate_estimator_; }; diff --git a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc index c043353a7a..571bbff71a 100644 --- a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc +++ b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc @@ -11,10 +11,13 @@ #include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h" #include +#include +#include "api/field_trials_view.h" #include "api/units/time_delta.h" #include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h" #include "modules/congestion_controller/goog_cc/robust_throughput_estimator.h" +#include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/logging.h" namespace webrtc { diff --git a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h index 6184cdc114..4ff8dc13cb 100644 --- a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h +++ b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h @@ -11,12 +11,10 @@ #ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_ACKNOWLEDGED_BITRATE_ESTIMATOR_INTERFACE_H_ #define MODULES_CONGESTION_CONTROLLER_GOOG_CC_ACKNOWLEDGED_BITRATE_ESTIMATOR_INTERFACE_H_ -#include - #include +#include #include -#include "absl/types/optional.h" #include "api/field_trials_view.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" @@ -76,8 +74,8 @@ class AcknowledgedBitrateEstimatorInterface { virtual void IncomingPacketFeedbackVector( const std::vector& packet_feedback_vector) = 0; - virtual absl::optional bitrate() const = 0; - virtual absl::optional PeekRate() const = 0; + virtual std::optional bitrate() const = 0; + virtual std::optional PeekRate() const = 0; virtual void SetAlr(bool in_alr) = 0; virtual void SetAlrEndedTime(Timestamp alr_ended_time) = 0; }; diff --git a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc index e5b733b119..cc6e4a07e5 100644 --- a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc +++ b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc @@ -10,17 +10,23 @@ #include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h" +#include +#include #include +#include #include +#include #include "api/transport/field_trial_based_config.h" -#include "rtc_base/fake_clock.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/timestamp.h" +#include "modules/congestion_controller/goog_cc/bitrate_estimator.h" #include "test/gmock.h" #include "test/gtest.h" -using ::testing::_; using ::testing::InSequence; -using ::testing::NiceMock; using ::testing::Return; namespace webrtc { @@ -39,7 +45,7 @@ class MockBitrateEstimator : public BitrateEstimator { Update, (Timestamp at_time, DataSize data_size, bool in_alr), (override)); - MOCK_METHOD(absl::optional, bitrate, (), (const, override)); + MOCK_METHOD(std::optional, bitrate, (), (const, override)); MOCK_METHOD(void, ExpectFastRateChange, (), (override)); }; @@ -126,7 +132,7 @@ TEST(TestAcknowledgedBitrateEstimator, ExpectFastRateChangeWhenLeftAlr) { TEST(TestAcknowledgedBitrateEstimator, ReturnBitrate) { auto states = CreateTestStates(); - absl::optional return_value = DataRate::KilobitsPerSec(42); + std::optional return_value = DataRate::KilobitsPerSec(42); EXPECT_CALL(*states.mock_bitrate_estimator, bitrate()) .Times(1) .WillOnce(Return(return_value)); diff --git a/modules/congestion_controller/goog_cc/alr_detector.cc b/modules/congestion_controller/goog_cc/alr_detector.cc index f1e649b7cd..801d23a4be 100644 --- a/modules/congestion_controller/goog_cc/alr_detector.cc +++ b/modules/congestion_controller/goog_cc/alr_detector.cc @@ -13,12 +13,14 @@ #include #include #include +#include -#include "api/rtc_event_log/rtc_event.h" +#include "api/field_trials_view.h" #include "api/rtc_event_log/rtc_event_log.h" #include "logging/rtc_event_log/events/rtc_event_alr_state.h" #include "rtc_base/checks.h" -#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/experiments/alr_experiment.h" +#include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/time_utils.h" namespace webrtc { @@ -26,7 +28,7 @@ namespace webrtc { namespace { AlrDetectorConfig GetConfigFromTrials(const FieldTrialsView* key_value_config) { RTC_CHECK(AlrExperimentSettings::MaxOneFieldTrialEnabled(*key_value_config)); - absl::optional experiment_settings = + std::optional experiment_settings = AlrExperimentSettings::CreateFromFieldTrial( *key_value_config, AlrExperimentSettings::kScreenshareProbingBweExperimentName); @@ -83,7 +85,7 @@ void AlrDetector::OnBytesSent(size_t bytes_sent, int64_t send_time_ms) { bool state_changed = false; if (alr_budget_.budget_ratio() > conf_.start_budget_level_ratio && !alr_started_time_ms_) { - alr_started_time_ms_.emplace(rtc::TimeMillis()); + alr_started_time_ms_.emplace(TimeMillis()); state_changed = true; } else if (alr_budget_.budget_ratio() < conf_.stop_budget_level_ratio && alr_started_time_ms_) { @@ -103,7 +105,7 @@ void AlrDetector::SetEstimatedBitrate(int bitrate_bps) { alr_budget_.set_target_rate_kbps(target_rate_kbps); } -absl::optional AlrDetector::GetApplicationLimitedRegionStartTime() +std::optional AlrDetector::GetApplicationLimitedRegionStartTime() const { return alr_started_time_ms_; } diff --git a/modules/congestion_controller/goog_cc/alr_detector.h b/modules/congestion_controller/goog_cc/alr_detector.h index 5e7a3e1075..e758b26c9a 100644 --- a/modules/congestion_controller/goog_cc/alr_detector.h +++ b/modules/congestion_controller/goog_cc/alr_detector.h @@ -15,11 +15,10 @@ #include #include +#include -#include "absl/types/optional.h" #include "api/field_trials_view.h" #include "modules/pacing/interval_budget.h" -#include "rtc_base/experiments/alr_experiment.h" #include "rtc_base/experiments/struct_parameters_parser.h" namespace webrtc { @@ -58,16 +57,16 @@ class AlrDetector { // Returns time in milliseconds when the current application-limited region // started or empty result if the sender is currently not application-limited. - absl::optional GetApplicationLimitedRegionStartTime() const; + std::optional GetApplicationLimitedRegionStartTime() const; private: friend class GoogCcStatePrinter; const AlrDetectorConfig conf_; - absl::optional last_send_time_ms_; + std::optional last_send_time_ms_; IntervalBudget alr_budget_; - absl::optional alr_started_time_ms_; + std::optional alr_started_time_ms_; RtcEventLog* event_log_; }; diff --git a/modules/congestion_controller/goog_cc/alr_detector_unittest.cc b/modules/congestion_controller/goog_cc/alr_detector_unittest.cc index eac19d0081..57c583dd67 100644 --- a/modules/congestion_controller/goog_cc/alr_detector_unittest.cc +++ b/modules/congestion_controller/goog_cc/alr_detector_unittest.cc @@ -10,6 +10,9 @@ #include "modules/congestion_controller/goog_cc/alr_detector.h" +#include +#include + #include "api/transport/field_trial_based_config.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/alr_experiment.h" @@ -65,8 +68,8 @@ class SimulateOutgoingTrafficIn { } AlrDetector* const alr_detector_; int64_t* timestamp_ms_; - absl::optional interval_ms_; - absl::optional usage_percentage_; + std::optional interval_ms_; + std::optional usage_percentage_; }; } // namespace @@ -155,7 +158,7 @@ TEST(AlrDetectorTest, BandwidthEstimateChanges) { TEST(AlrDetectorTest, ParseControlFieldTrial) { webrtc::test::ScopedFieldTrials scoped_field_trial( "WebRTC-ProbingScreenshareBwe/Control/"); - absl::optional parsed_params = + std::optional parsed_params = AlrExperimentSettings::CreateFromFieldTrial( FieldTrialBasedConfig(), "WebRTC-ProbingScreenshareBwe"); EXPECT_FALSE(static_cast(parsed_params)); @@ -164,7 +167,7 @@ TEST(AlrDetectorTest, ParseControlFieldTrial) { TEST(AlrDetectorTest, ParseActiveFieldTrial) { webrtc::test::ScopedFieldTrials scoped_field_trial( "WebRTC-ProbingScreenshareBwe/1.1,2875,85,20,-20,1/"); - absl::optional parsed_params = + std::optional parsed_params = AlrExperimentSettings::CreateFromFieldTrial( FieldTrialBasedConfig(), "WebRTC-ProbingScreenshareBwe"); ASSERT_TRUE(static_cast(parsed_params)); diff --git a/modules/congestion_controller/goog_cc/bitrate_estimator.cc b/modules/congestion_controller/goog_cc/bitrate_estimator.cc index 9c68e48886..9140b8ea04 100644 --- a/modules/congestion_controller/goog_cc/bitrate_estimator.cc +++ b/modules/congestion_controller/goog_cc/bitrate_estimator.cc @@ -10,15 +10,18 @@ #include "modules/congestion_controller/goog_cc/bitrate_estimator.h" -#include - #include #include -#include +#include +#include +#include "api/field_trials_view.h" #include "api/units/data_rate.h" -#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" -#include "rtc_base/logging.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { @@ -110,8 +113,6 @@ void BitrateEstimator::Update(Timestamp at_time, DataSize amount, bool in_alr) { std::max(bitrate_estimate_kbps_, estimate_floor_.Get().kbps()); bitrate_estimate_var_ = sample_var * pred_bitrate_estimate_var / (sample_var + pred_bitrate_estimate_var); - BWE_TEST_LOGGING_PLOT(1, "acknowledged_bitrate", at_time.ms(), - bitrate_estimate_kbps_ * 1000); } float BitrateEstimator::UpdateWindow(int64_t now_ms, @@ -145,16 +146,16 @@ float BitrateEstimator::UpdateWindow(int64_t now_ms, return bitrate_sample; } -absl::optional BitrateEstimator::bitrate() const { +std::optional BitrateEstimator::bitrate() const { if (bitrate_estimate_kbps_ < 0.f) - return absl::nullopt; + return std::nullopt; return DataRate::KilobitsPerSec(bitrate_estimate_kbps_); } -absl::optional BitrateEstimator::PeekRate() const { +std::optional BitrateEstimator::PeekRate() const { if (current_window_ms_ > 0) return DataSize::Bytes(sum_) / TimeDelta::Millis(current_window_ms_); - return absl::nullopt; + return std::nullopt; } void BitrateEstimator::ExpectFastRateChange() { diff --git a/modules/congestion_controller/goog_cc/bitrate_estimator.h b/modules/congestion_controller/goog_cc/bitrate_estimator.h index a6f985800e..a808a82cec 100644 --- a/modules/congestion_controller/goog_cc/bitrate_estimator.h +++ b/modules/congestion_controller/goog_cc/bitrate_estimator.h @@ -13,9 +13,11 @@ #include -#include "absl/types/optional.h" +#include + #include "api/field_trials_view.h" #include "api/units/data_rate.h" +#include "api/units/data_size.h" #include "api/units/timestamp.h" #include "rtc_base/experiments/field_trial_parser.h" @@ -32,8 +34,8 @@ class BitrateEstimator { virtual ~BitrateEstimator(); virtual void Update(Timestamp at_time, DataSize amount, bool in_alr); - virtual absl::optional bitrate() const; - absl::optional PeekRate() const; + virtual std::optional bitrate() const; + std::optional PeekRate() const; virtual void ExpectFastRateChange(); diff --git a/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc index 2f188f30ca..d3b791d3f4 100644 --- a/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc +++ b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc @@ -10,30 +10,24 @@ #include "modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h" -#include -#include - #include -#include +#include -#include "absl/strings/match.h" -#include "rtc_base/checks.h" +#include "api/field_trials_view.h" +#include "api/units/data_size.h" #include "rtc_base/experiments/rate_control_settings.h" namespace webrtc { CongestionWindowPushbackController::CongestionWindowPushbackController( - const FieldTrialsView* key_value_config) - : add_pacing_( - absl::StartsWith(key_value_config->Lookup( - "WebRTC-AddPacingToCongestionWindowPushback"), - "Enabled")), + const FieldTrialsView& key_value_config) + : add_pacing_(key_value_config.IsEnabled( + "WebRTC-AddPacingToCongestionWindowPushback")), min_pushback_target_bitrate_bps_( - RateControlSettings::ParseFromKeyValueConfig(key_value_config) + RateControlSettings(key_value_config) .CongestionWindowMinPushbackTargetBitrateBps()), - current_data_window_( - RateControlSettings::ParseFromKeyValueConfig(key_value_config) - .CongestionWindowInitialDataWindow()) {} + current_data_window_(RateControlSettings(key_value_config) + .CongestionWindowInitialDataWindow()) {} void CongestionWindowPushbackController::UpdateOutstandingData( int64_t outstanding_bytes) { diff --git a/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h index ea9ed97c3d..208331fa28 100644 --- a/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h +++ b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h @@ -11,10 +11,10 @@ #ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_CONGESTION_WINDOW_PUSHBACK_CONTROLLER_H_ #define MODULES_CONGESTION_CONTROLLER_GOOG_CC_CONGESTION_WINDOW_PUSHBACK_CONTROLLER_H_ -#include #include -#include "absl/types/optional.h" +#include + #include "api/field_trials_view.h" #include "api/units/data_size.h" @@ -28,7 +28,7 @@ namespace webrtc { class CongestionWindowPushbackController { public: explicit CongestionWindowPushbackController( - const FieldTrialsView* key_value_config); + const FieldTrialsView& key_value_config); void UpdateOutstandingData(int64_t outstanding_bytes); void UpdatePacingQueue(int64_t pacing_bytes); uint32_t UpdateTargetBitrate(uint32_t bitrate_bps); @@ -37,7 +37,7 @@ class CongestionWindowPushbackController { private: const bool add_pacing_; const uint32_t min_pushback_target_bitrate_bps_; - absl::optional current_data_window_; + std::optional current_data_window_; int64_t outstanding_bytes_ = 0; int64_t pacing_bytes_ = 0; double encoding_rate_ratio_ = 1.0; diff --git a/modules/congestion_controller/goog_cc/congestion_window_pushback_controller_unittest.cc b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller_unittest.cc index 62dde02323..6f85c4c8d4 100644 --- a/modules/congestion_controller/goog_cc/congestion_window_pushback_controller_unittest.cc +++ b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller_unittest.cc @@ -10,94 +10,90 @@ #include "modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h" -#include +#include -#include "api/transport/field_trial_based_config.h" -#include "test/field_trial.h" -#include "test/gmock.h" +#include "api/units/data_size.h" +#include "test/explicit_key_value_config.h" #include "test/gtest.h" -using ::testing::_; - namespace webrtc { namespace test { -class CongestionWindowPushbackControllerTest : public ::testing::Test { - public: - CongestionWindowPushbackControllerTest() { - cwnd_controller_.reset( - new CongestionWindowPushbackController(&field_trial_config_)); - } - - protected: - FieldTrialBasedConfig field_trial_config_; +TEST(CongestionWindowPushbackControllerTest, FullCongestionWindow) { + CongestionWindowPushbackController cwnd_controller( + ExplicitKeyValueConfig("")); - std::unique_ptr cwnd_controller_; -}; - -TEST_F(CongestionWindowPushbackControllerTest, FullCongestionWindow) { - cwnd_controller_->UpdateOutstandingData(100000); - cwnd_controller_->SetDataWindow(DataSize::Bytes(50000)); + cwnd_controller.UpdateOutstandingData(100000); + cwnd_controller.SetDataWindow(DataSize::Bytes(50000)); uint32_t bitrate_bps = 80000; - bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + bitrate_bps = cwnd_controller.UpdateTargetBitrate(bitrate_bps); EXPECT_EQ(72000u, bitrate_bps); - cwnd_controller_->SetDataWindow(DataSize::Bytes(50000)); - bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + cwnd_controller.SetDataWindow(DataSize::Bytes(50000)); + bitrate_bps = cwnd_controller.UpdateTargetBitrate(bitrate_bps); EXPECT_EQ(static_cast(72000 * 0.9 * 0.9), bitrate_bps); } -TEST_F(CongestionWindowPushbackControllerTest, NormalCongestionWindow) { - cwnd_controller_->UpdateOutstandingData(199999); - cwnd_controller_->SetDataWindow(DataSize::Bytes(200000)); +TEST(CongestionWindowPushbackControllerTest, NormalCongestionWindow) { + CongestionWindowPushbackController cwnd_controller( + ExplicitKeyValueConfig("")); + + cwnd_controller.UpdateOutstandingData(199999); + cwnd_controller.SetDataWindow(DataSize::Bytes(200000)); uint32_t bitrate_bps = 80000; - bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + bitrate_bps = cwnd_controller.UpdateTargetBitrate(bitrate_bps); EXPECT_EQ(80000u, bitrate_bps); } -TEST_F(CongestionWindowPushbackControllerTest, LowBitrate) { - cwnd_controller_->UpdateOutstandingData(100000); - cwnd_controller_->SetDataWindow(DataSize::Bytes(50000)); +TEST(CongestionWindowPushbackControllerTest, LowBitrate) { + CongestionWindowPushbackController cwnd_controller( + ExplicitKeyValueConfig("")); + + cwnd_controller.UpdateOutstandingData(100000); + cwnd_controller.SetDataWindow(DataSize::Bytes(50000)); uint32_t bitrate_bps = 35000; - bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + bitrate_bps = cwnd_controller.UpdateTargetBitrate(bitrate_bps); EXPECT_EQ(static_cast(35000 * 0.9), bitrate_bps); - cwnd_controller_->SetDataWindow(DataSize::Bytes(20000)); - bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + cwnd_controller.SetDataWindow(DataSize::Bytes(20000)); + bitrate_bps = cwnd_controller.UpdateTargetBitrate(bitrate_bps); EXPECT_EQ(30000u, bitrate_bps); } -TEST_F(CongestionWindowPushbackControllerTest, NoPushbackOnDataWindowUnset) { - cwnd_controller_->UpdateOutstandingData(1e8); // Large number +TEST(CongestionWindowPushbackControllerTest, NoPushbackOnDataWindowUnset) { + CongestionWindowPushbackController cwnd_controller( + ExplicitKeyValueConfig("")); + + cwnd_controller.UpdateOutstandingData(1e8); // Large number uint32_t bitrate_bps = 80000; - bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + bitrate_bps = cwnd_controller.UpdateTargetBitrate(bitrate_bps); EXPECT_EQ(80000u, bitrate_bps); } -TEST_F(CongestionWindowPushbackControllerTest, PushbackOnInititialDataWindow) { - test::ScopedFieldTrials trials("WebRTC-CongestionWindow/InitWin:100000/"); - cwnd_controller_.reset( - new CongestionWindowPushbackController(&field_trial_config_)); - cwnd_controller_->UpdateOutstandingData(1e8); // Large number +TEST(CongestionWindowPushbackControllerTest, PushbackOnInititialDataWindow) { + CongestionWindowPushbackController cwnd_controller( + ExplicitKeyValueConfig("WebRTC-CongestionWindow/InitWin:100000/")); + + cwnd_controller.UpdateOutstandingData(1e8); // Large number uint32_t bitrate_bps = 80000; - bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + bitrate_bps = cwnd_controller.UpdateTargetBitrate(bitrate_bps); EXPECT_GT(80000u, bitrate_bps); } -TEST_F(CongestionWindowPushbackControllerTest, PushbackDropFrame) { - test::ScopedFieldTrials trials("WebRTC-CongestionWindow/DropFrame:true/"); - cwnd_controller_.reset( - new CongestionWindowPushbackController(&field_trial_config_)); - cwnd_controller_->UpdateOutstandingData(1e8); // Large number - cwnd_controller_->SetDataWindow(DataSize::Bytes(50000)); +TEST(CongestionWindowPushbackControllerTest, PushbackDropFrame) { + CongestionWindowPushbackController cwnd_controller( + ExplicitKeyValueConfig("WebRTC-CongestionWindow/DropFrame:true/")); + + cwnd_controller.UpdateOutstandingData(1e8); // Large number + cwnd_controller.SetDataWindow(DataSize::Bytes(50000)); uint32_t bitrate_bps = 80000; - bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + bitrate_bps = cwnd_controller.UpdateTargetBitrate(bitrate_bps); EXPECT_GT(80000u, bitrate_bps); } diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe.cc b/modules/congestion_controller/goog_cc/delay_based_bwe.cc index f0562bc964..9769792de3 100644 --- a/modules/congestion_controller/goog_cc/delay_based_bwe.cc +++ b/modules/congestion_controller/goog_cc/delay_based_bwe.cc @@ -12,21 +12,29 @@ #include #include -#include #include -#include +#include #include +#include -#include "absl/strings/match.h" -#include "api/rtc_event_log/rtc_event.h" +#include "api/field_trials_view.h" +#include "api/network_state_predictor.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/transport/bandwidth_usage.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" +#include "modules/congestion_controller/goog_cc/delay_increase_detector_interface.h" +#include "modules/congestion_controller/goog_cc/inter_arrival_delta.h" #include "modules/congestion_controller/goog_cc/trendline_estimator.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" -#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/logging.h" +#include "rtc_base/race_checker.h" #include "system_wrappers/include/metrics.h" namespace webrtc { @@ -90,9 +98,9 @@ DelayBasedBwe::~DelayBasedBwe() {} DelayBasedBwe::Result DelayBasedBwe::IncomingPacketFeedbackVector( const TransportPacketsFeedback& msg, - absl::optional acked_bitrate, - absl::optional probe_bitrate, - absl::optional network_estimate, + std::optional acked_bitrate, + std::optional probe_bitrate, + std::optional network_estimate, bool in_alr) { RTC_DCHECK_RUNS_SERIALIZED(&network_race_); @@ -196,17 +204,17 @@ void DelayBasedBwe::IncomingPacketFeedback(const PacketResult& packet_feedback, } DataRate DelayBasedBwe::TriggerOveruse(Timestamp at_time, - absl::optional link_capacity) { + std::optional link_capacity) { RateControlInput input(BandwidthUsage::kBwOverusing, link_capacity); return rate_control_.Update(input, at_time); } DelayBasedBwe::Result DelayBasedBwe::MaybeUpdateEstimate( - absl::optional acked_bitrate, - absl::optional probe_bitrate, - absl::optional state_estimate, + std::optional acked_bitrate, + std::optional probe_bitrate, + std::optional /* state_estimate */, bool recovered_from_overuse, - bool in_alr, + bool /* in_alr */, Timestamp at_time) { Result result; @@ -244,8 +252,6 @@ DelayBasedBwe::Result DelayBasedBwe::MaybeUpdateEstimate( detector_state != prev_state_) { DataRate bitrate = result.updated ? result.target_bitrate : prev_bitrate_; - BWE_TEST_LOGGING_PLOT(1, "target_bitrate_bps", at_time.ms(), bitrate.bps()); - if (event_log_) { event_log_->Log(std::make_unique( bitrate.bps(), detector_state)); @@ -260,7 +266,7 @@ DelayBasedBwe::Result DelayBasedBwe::MaybeUpdateEstimate( } bool DelayBasedBwe::UpdateEstimate(Timestamp at_time, - absl::optional acked_bitrate, + std::optional acked_bitrate, DataRate* target_rate) { const RateControlInput input(active_delay_detector_->State(), acked_bitrate); *target_rate = rate_control_.Update(input, at_time); diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe.h b/modules/congestion_controller/goog_cc/delay_based_bwe.h index e91a1dff54..4f02e2e7fc 100644 --- a/modules/congestion_controller/goog_cc/delay_based_bwe.h +++ b/modules/congestion_controller/goog_cc/delay_based_bwe.h @@ -11,18 +11,22 @@ #ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_DELAY_BASED_BWE_H_ #define MODULES_CONGESTION_CONTROLLER_GOOG_CC_DELAY_BASED_BWE_H_ -#include #include #include +#include #include -#include "absl/types/optional.h" #include "api/field_trials_view.h" #include "api/network_state_predictor.h" +#include "api/transport/bandwidth_usage.h" #include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/congestion_controller/goog_cc/delay_increase_detector_interface.h" #include "modules/congestion_controller/goog_cc/inter_arrival_delta.h" +#include "modules/congestion_controller/goog_cc/link_capacity_estimator.h" #include "modules/congestion_controller/goog_cc/probe_bitrate_estimator.h" #include "modules/remote_bitrate_estimator/aimd_rate_control.h" #include "modules/remote_bitrate_estimator/inter_arrival.h" @@ -70,9 +74,9 @@ class DelayBasedBwe { Result IncomingPacketFeedbackVector( const TransportPacketsFeedback& msg, - absl::optional acked_bitrate, - absl::optional probe_bitrate, - absl::optional network_estimate, + std::optional acked_bitrate, + std::optional probe_bitrate, + std::optional network_estimate, bool in_alr); void OnRttUpdate(TimeDelta avg_rtt); bool LatestEstimate(std::vector* ssrcs, DataRate* bitrate) const; @@ -80,7 +84,7 @@ class DelayBasedBwe { void SetMinBitrate(DataRate min_bitrate); TimeDelta GetExpectedBwePeriod() const; DataRate TriggerOveruse(Timestamp at_time, - absl::optional link_capacity); + std::optional link_capacity); DataRate last_estimate() const { return prev_bitrate_; } BandwidthUsage last_state() const { return prev_state_; } @@ -88,20 +92,19 @@ class DelayBasedBwe { friend class GoogCcStatePrinter; void IncomingPacketFeedback(const PacketResult& packet_feedback, Timestamp at_time); - Result MaybeUpdateEstimate( - absl::optional acked_bitrate, - absl::optional probe_bitrate, - absl::optional state_estimate, - bool recovered_from_overuse, - bool in_alr, - Timestamp at_time); + Result MaybeUpdateEstimate(std::optional acked_bitrate, + std::optional probe_bitrate, + std::optional state_estimate, + bool recovered_from_overuse, + bool in_alr, + Timestamp at_time); // Updates the current remote rate estimate and returns true if a valid // estimate exists. bool UpdateEstimate(Timestamp at_time, - absl::optional acked_bitrate, + std::optional acked_bitrate, DataRate* target_rate); - rtc::RaceChecker network_race_; + RaceChecker network_race_; RtcEventLog* const event_log_; const FieldTrialsView* const key_value_config_; diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe_unittest.cc b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest.cc index 5a4dbfdcc0..024e1962d6 100644 --- a/modules/congestion_controller/goog_cc/delay_based_bwe_unittest.cc +++ b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest.cc @@ -10,10 +10,12 @@ #include "modules/congestion_controller/goog_cc/delay_based_bwe.h" -#include +#include +#include "api/transport/bandwidth_usage.h" #include "api/transport/network_types.h" -#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" #include "modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.h" #include "system_wrappers/include/clock.h" #include "test/gtest.h" diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.cc b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.cc index 2730c5d49b..c93bf29c51 100644 --- a/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.cc +++ b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.cc @@ -10,12 +10,23 @@ #include "modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.h" #include +#include #include #include - -#include "absl/strings/string_view.h" +#include +#include + +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h" #include "modules/congestion_controller/goog_cc/delay_based_bwe.h" +#include "modules/congestion_controller/goog_cc/probe_bitrate_estimator.h" #include "rtc_base/checks.h" +#include "test/field_trial.h" +#include "test/gtest.h" namespace webrtc { constexpr size_t kMtu = 1200; @@ -42,6 +53,7 @@ RtpStream::RtpStream(int fps, int bitrate_bps) // previous frame, no frame will be generated. The frame is split into // packets. int64_t RtpStream::GenerateFrame(int64_t time_now_us, + int64_t* next_sequence_number, std::vector* packets) { if (time_now_us < next_rtp_time_) { return next_rtp_time_; @@ -56,6 +68,7 @@ int64_t RtpStream::GenerateFrame(int64_t time_now_us, packet.sent_packet.send_time = Timestamp::Micros(time_now_us + kSendSideOffsetUs); packet.sent_packet.size = DataSize::Bytes(payload_size); + packet.sent_packet.sequence_number = (*next_sequence_number)++; packets->push_back(packet); } next_rtp_time_ = time_now_us + (1000000 + fps_ / 2) / fps_; @@ -121,14 +134,15 @@ void StreamGenerator::SetBitrateBps(int bitrate_bps) { // TODO(holmer): Break out the channel simulation part from this class to make // it possible to simulate different types of channels. -int64_t StreamGenerator::GenerateFrame(std::vector* packets, - int64_t time_now_us) { +int64_t StreamGenerator::GenerateFrame(int64_t time_now_us, + int64_t* next_sequence_number, + std::vector* packets) { RTC_CHECK(packets != NULL); RTC_CHECK(packets->empty()); RTC_CHECK_GT(capacity_, 0); auto it = std::min_element(streams_.begin(), streams_.end(), RtpStream::Compare); - (*it)->GenerateFrame(time_now_us, packets); + (*it)->GenerateFrame(time_now_us, next_sequence_number, packets); for (PacketResult& packet : *packets) { int capacity_bpus = capacity_ / 1000; int64_t required_network_time_us = @@ -156,6 +170,7 @@ DelayBasedBweTest::DelayBasedBweTest() stream_generator_(new test::StreamGenerator(1e6, // Capacity. clock_.TimeInMicroseconds())), arrival_time_offset_ms_(0), + next_sequence_number_(0), first_update_(true) {} DelayBasedBweTest::~DelayBasedBweTest() {} @@ -191,6 +206,7 @@ void DelayBasedBweTest::IncomingFeedback(Timestamp receive_time, packet.sent_packet.send_time = send_time; packet.sent_packet.size = DataSize::Bytes(payload_size); packet.sent_packet.pacing_info = pacing_info; + packet.sent_packet.sequence_number = next_sequence_number_++; if (packet.sent_packet.pacing_info.probe_cluster_id != PacedPacketInfo::kNotAProbe) probe_bitrate_estimator_->HandleProbeAndEstimateBitrate(packet); @@ -204,7 +220,7 @@ void DelayBasedBweTest::IncomingFeedback(Timestamp receive_time, bitrate_estimator_->IncomingPacketFeedbackVector( msg, acknowledged_bitrate_estimator_->bitrate(), probe_bitrate_estimator_->FetchAndResetLastEstimatedBitrate(), - /*network_estimate*/ absl::nullopt, /*in_alr*/ false); + /*network_estimate*/ std::nullopt, /*in_alr*/ false); if (result.updated) { bitrate_observer_.OnReceiveBitrateChanged(result.target_bitrate.bps()); } @@ -216,13 +232,13 @@ void DelayBasedBweTest::IncomingFeedback(Timestamp receive_time, // Returns true if an over-use was seen, false otherwise. // The StreamGenerator::updated() should be used to check for any changes in // target bitrate after the call to this function. -bool DelayBasedBweTest::GenerateAndProcessFrame(uint32_t ssrc, +bool DelayBasedBweTest::GenerateAndProcessFrame(uint32_t /* ssrc */, uint32_t bitrate_bps) { stream_generator_->SetBitrateBps(bitrate_bps); std::vector packets; - int64_t next_time_us = - stream_generator_->GenerateFrame(&packets, clock_.TimeInMicroseconds()); + int64_t next_time_us = stream_generator_->GenerateFrame( + clock_.TimeInMicroseconds(), &next_sequence_number_, &packets); if (packets.empty()) return false; @@ -248,7 +264,7 @@ bool DelayBasedBweTest::GenerateAndProcessFrame(uint32_t ssrc, bitrate_estimator_->IncomingPacketFeedbackVector( msg, acknowledged_bitrate_estimator_->bitrate(), probe_bitrate_estimator_->FetchAndResetLastEstimatedBitrate(), - /*network_estimate*/ absl::nullopt, /*in_alr*/ false); + /*network_estimate*/ std::nullopt, /*in_alr*/ false); if (result.updated) { bitrate_observer_.OnReceiveBitrateChanged(result.target_bitrate.bps()); if (!first_update_ && result.target_bitrate.bps() < bitrate_bps) @@ -403,7 +419,7 @@ void DelayBasedBweTest::RateIncreaseRtpTimestampsTestHelper( void DelayBasedBweTest::CapacityDropTestHelper( int number_of_streams, - bool wrap_time_stamp, + bool /* wrap_time_stamp */, uint32_t expected_bitrate_drop_delta, int64_t receiver_clock_offset_change_ms) { const int kFramerate = 30; diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.h b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.h index 4b06173cdb..b885ef2d23 100644 --- a/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.h +++ b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.h @@ -15,14 +15,14 @@ #include #include -#include #include -#include "absl/strings/string_view.h" #include "api/transport/field_trial_based_config.h" #include "api/transport/network_types.h" -#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h" +#include "api/units/timestamp.h" +#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h" #include "modules/congestion_controller/goog_cc/delay_based_bwe.h" +#include "modules/congestion_controller/goog_cc/probe_bitrate_estimator.h" #include "system_wrappers/include/clock.h" #include "test/field_trial.h" #include "test/gtest.h" @@ -61,6 +61,7 @@ class RtpStream { // previous frame, no frame will be generated. The frame is split into // packets. int64_t GenerateFrame(int64_t time_now_us, + int64_t* next_sequence_number, std::vector* packets); // The send-side time when the next frame can be generated. @@ -102,8 +103,9 @@ class StreamGenerator { // TODO(holmer): Break out the channel simulation part from this class to make // it possible to simulate different types of channels. - int64_t GenerateFrame(std::vector* packets, - int64_t time_now_us); + int64_t GenerateFrame(int64_t time_now_us, + int64_t* next_sequence_number, + std::vector* packets); private: // Capacity of the simulated channel in bits per second. @@ -180,6 +182,7 @@ class DelayBasedBweTest : public ::testing::Test { std::unique_ptr bitrate_estimator_; std::unique_ptr stream_generator_; int64_t arrival_time_offset_ms_; + int64_t next_sequence_number_; bool first_update_; }; diff --git a/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h b/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h index fc12cff7d5..977d320557 100644 --- a/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h +++ b/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h @@ -12,7 +12,9 @@ #include -#include "api/network_state_predictor.h" +#include + +#include "api/transport/bandwidth_usage.h" namespace webrtc { diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control.cc b/modules/congestion_controller/goog_cc/goog_cc_network_control.cc index 68e623189e..a50a9b6cc6 100644 --- a/modules/congestion_controller/goog_cc/goog_cc_network_control.cc +++ b/modules/congestion_controller/goog_cc/goog_cc_network_control.cc @@ -10,30 +10,39 @@ #include "modules/congestion_controller/goog_cc/goog_cc_network_control.h" -#include #include #include #include #include #include -#include +#include #include #include -#include "absl/strings/match.h" -#include "api/network_state_predictor.h" +#include "absl/strings/string_view.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/transport/bandwidth_usage.h" +#include "api/transport/network_control.h" +#include "api/transport/network_types.h" #include "api/units/data_rate.h" +#include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" +#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h" #include "modules/congestion_controller/goog_cc/alr_detector.h" +#include "modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h" +#include "modules/congestion_controller/goog_cc/delay_based_bwe.h" #include "modules/congestion_controller/goog_cc/loss_based_bwe_v2.h" +#include "modules/congestion_controller/goog_cc/probe_bitrate_estimator.h" #include "modules/congestion_controller/goog_cc/probe_controller.h" #include "modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" -#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/logging.h" namespace webrtc { @@ -55,33 +64,26 @@ constexpr float kDefaultPaceMultiplier = 2.5f; // below the current throughput estimate to drain the network queues. constexpr double kProbeDropThroughputFraction = 0.85; -bool IsEnabled(const FieldTrialsView* config, absl::string_view key) { - return absl::StartsWith(config->Lookup(key), "Enabled"); -} - -bool IsNotDisabled(const FieldTrialsView* config, absl::string_view key) { - return !absl::StartsWith(config->Lookup(key), "Disabled"); -} - -BandwidthLimitedCause GetBandwidthLimitedCause( - LossBasedState loss_based_state, - bool is_rtt_above_limit, - BandwidthUsage bandwidth_usage, - bool not_probe_if_delay_increased) { - if (not_probe_if_delay_increased) { - if (bandwidth_usage == BandwidthUsage::kBwOverusing || - bandwidth_usage == BandwidthUsage::kBwUnderusing) { - return BandwidthLimitedCause::kDelayBasedLimitedDelayIncreased; - } else if (is_rtt_above_limit) { - return BandwidthLimitedCause::kRttBasedBackOffHighRtt; - } +BandwidthLimitedCause GetBandwidthLimitedCause(LossBasedState loss_based_state, + bool is_rtt_above_limit, + BandwidthUsage bandwidth_usage) { + if (bandwidth_usage == BandwidthUsage::kBwOverusing || + bandwidth_usage == BandwidthUsage::kBwUnderusing) { + return BandwidthLimitedCause::kDelayBasedLimitedDelayIncreased; + } else if (is_rtt_above_limit) { + return BandwidthLimitedCause::kRttBasedBackOffHighRtt; } switch (loss_based_state) { case LossBasedState::kDecreasing: - return BandwidthLimitedCause::kLossLimitedBweDecreasing; + // Probes may not be sent in this state. + return BandwidthLimitedCause::kLossLimitedBwe; + case webrtc::LossBasedState::kIncreaseUsingPadding: + // Probes may not be sent in this state. + return BandwidthLimitedCause::kLossLimitedBwe; case LossBasedState::kIncreasing: + // Probes may be sent in this state. return BandwidthLimitedCause::kLossLimitedBweIncreasing; - default: + case LossBasedState::kDelayBasedEstimate: return BandwidthLimitedCause::kDelayBasedLimited; } } @@ -90,50 +92,48 @@ BandwidthLimitedCause GetBandwidthLimitedCause( GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, GoogCcConfig goog_cc_config) - : key_value_config_(config.key_value_config ? config.key_value_config - : &trial_based_config_), - event_log_(config.event_log), + : env_(config.env), packet_feedback_only_(goog_cc_config.feedback_only), safe_reset_on_route_change_("Enabled"), safe_reset_acknowledged_rate_("ack"), use_min_allocatable_as_lower_bound_( - IsNotDisabled(key_value_config_, "WebRTC-Bwe-MinAllocAsLowerBound")), - ignore_probes_lower_than_network_estimate_(IsNotDisabled( - key_value_config_, - "WebRTC-Bwe-IgnoreProbesLowerThanNetworkStateEstimate")), + !env_.field_trials().IsDisabled("WebRTC-Bwe-MinAllocAsLowerBound")), + ignore_probes_lower_than_network_estimate_( + !env_.field_trials().IsDisabled( + "WebRTC-Bwe-IgnoreProbesLowerThanNetworkStateEstimate")), limit_probes_lower_than_throughput_estimate_( - IsEnabled(key_value_config_, - "WebRTC-Bwe-LimitProbesLowerThanThroughputEstimate")), - rate_control_settings_( - RateControlSettings::ParseFromKeyValueConfig(key_value_config_)), - pace_at_max_of_bwe_and_lower_link_capacity_( - IsEnabled(key_value_config_, - "WebRTC-Bwe-PaceAtMaxOfBweAndLowerLinkCapacity")), + !env_.field_trials().IsDisabled( + "WebRTC-Bwe-LimitProbesLowerThanThroughputEstimate")), + rate_control_settings_(env_.field_trials()), + limit_pacingfactor_by_upper_link_capacity_estimate_( + env_.field_trials().IsEnabled( + "WebRTC-Bwe-LimitPacingFactorByUpperLinkCapacityEstimate")), probe_controller_( - new ProbeController(key_value_config_, config.event_log)), + new ProbeController(&env_.field_trials(), &env_.event_log())), congestion_window_pushback_controller_( rate_control_settings_.UseCongestionWindowPushback() ? std::make_unique( - key_value_config_) + env_.field_trials()) : nullptr), bandwidth_estimation_( - std::make_unique(key_value_config_, - event_log_)), - alr_detector_( - std::make_unique(key_value_config_, config.event_log)), - probe_bitrate_estimator_(new ProbeBitrateEstimator(config.event_log)), + std::make_unique(&env_.field_trials(), + &env_.event_log())), + alr_detector_(std::make_unique(&env_.field_trials(), + &env_.event_log())), + probe_bitrate_estimator_(new ProbeBitrateEstimator(&env_.event_log())), network_estimator_(std::move(goog_cc_config.network_state_estimator)), network_state_predictor_( std::move(goog_cc_config.network_state_predictor)), - delay_based_bwe_(new DelayBasedBwe(key_value_config_, - event_log_, + delay_based_bwe_(new DelayBasedBwe(&env_.field_trials(), + &env_.event_log(), network_state_predictor_.get())), acknowledged_bitrate_estimator_( - AcknowledgedBitrateEstimatorInterface::Create(key_value_config_)), + AcknowledgedBitrateEstimatorInterface::Create(&env_.field_trials())), initial_config_(config), last_loss_based_target_rate_(*config.constraints.starting_rate), last_pushback_target_rate_(last_loss_based_target_rate_), last_stable_target_rate_(last_loss_based_target_rate_), + last_loss_base_state_(LossBasedState::kDelayBasedEstimate), pacing_factor_(config.stream_based_config.pacing_factor.value_or( kDefaultPaceMultiplier)), min_total_allocated_bitrate_( @@ -144,7 +144,7 @@ GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, RTC_DCHECK(config.constraints.at_time.IsFinite()); ParseFieldTrial( {&safe_reset_on_route_change_, &safe_reset_acknowledged_rate_}, - key_value_config_->Lookup("WebRTC-Bwe-SafeResetOnRouteChange")); + env_.field_trials().Lookup("WebRTC-Bwe-SafeResetOnRouteChange")); if (delay_based_bwe_) delay_based_bwe_->SetMinBitrate(kCongestionControllerMinBitrate); } @@ -161,7 +161,7 @@ NetworkControlUpdate GoogCcNetworkController::OnNetworkAvailability( NetworkControlUpdate GoogCcNetworkController::OnNetworkRouteChange( NetworkRouteChange msg) { if (safe_reset_on_route_change_) { - absl::optional estimated_bitrate; + std::optional estimated_bitrate; if (safe_reset_acknowledged_rate_) { estimated_bitrate = acknowledged_bitrate_estimator_->bitrate(); if (!estimated_bitrate) @@ -180,12 +180,12 @@ NetworkControlUpdate GoogCcNetworkController::OnNetworkRouteChange( } acknowledged_bitrate_estimator_ = - AcknowledgedBitrateEstimatorInterface::Create(key_value_config_); - probe_bitrate_estimator_.reset(new ProbeBitrateEstimator(event_log_)); + AcknowledgedBitrateEstimatorInterface::Create(&env_.field_trials()); + probe_bitrate_estimator_.reset(new ProbeBitrateEstimator(&env_.event_log())); if (network_estimator_) network_estimator_->OnRouteChange(msg); - delay_based_bwe_.reset(new DelayBasedBwe(key_value_config_, event_log_, - network_state_predictor_.get())); + delay_based_bwe_.reset(new DelayBasedBwe( + &env_.field_trials(), &env_.event_log(), network_state_predictor_.get())); bandwidth_estimation_->OnRouteChange(); probe_controller_->Reset(msg.at_time); NetworkControlUpdate update; @@ -206,7 +206,12 @@ NetworkControlUpdate GoogCcNetworkController::OnProcessInterval( probe_controller_->EnablePeriodicAlrProbing( *initial_config_->stream_based_config.requests_alr_probing); } - absl::optional total_bitrate = + if (initial_config_->stream_based_config.enable_repeated_initial_probing) { + probe_controller_->EnableRepeatedInitialProbing( + *initial_config_->stream_based_config + .enable_repeated_initial_probing); + } + std::optional total_bitrate = initial_config_->stream_based_config.max_total_allocated_bitrate; if (total_bitrate) { auto probes = probe_controller_->OnMaxTotalAllocatedBitrate( @@ -221,7 +226,7 @@ NetworkControlUpdate GoogCcNetworkController::OnProcessInterval( msg.pacer_queue->bytes()); } bandwidth_estimation_->UpdateEstimate(msg.at_time); - absl::optional start_time_ms = + std::optional start_time_ms = alr_detector_->GetApplicationLimitedRegionStartTime(); probe_controller_->SetAlrStartTimeMs(start_time_ms); @@ -230,7 +235,7 @@ NetworkControlUpdate GoogCcNetworkController::OnProcessInterval( probes.begin(), probes.end()); if (rate_control_settings_.UseCongestionWindow() && - last_packet_received_time_.IsFinite() && !feedback_max_rtts_.empty()) { + !feedback_max_rtts_.empty()) { UpdateCongestionWindowSize(); } if (congestion_window_pushback_controller_ && current_data_window_) { @@ -251,8 +256,6 @@ NetworkControlUpdate GoogCcNetworkController::OnRemoteBitrateReport( } bandwidth_estimation_->UpdateReceiverEstimate(msg.receive_time, msg.bandwidth); - BWE_TEST_LOGGING_PLOT(1, "REMB_kbps", msg.receive_time.ms(), - msg.bandwidth.bps() / 1000); return NetworkControlUpdate(); } @@ -295,8 +298,7 @@ NetworkControlUpdate GoogCcNetworkController::OnSentPacket( } NetworkControlUpdate GoogCcNetworkController::OnReceivedPacket( - ReceivedPacket received_packet) { - last_packet_received_time_ = received_packet.receive_time; + ReceivedPacket /* received_packet */) { return NetworkControlUpdate(); } @@ -490,7 +492,7 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( lost_packets_since_last_loss_update_ = 0; } } - absl::optional alr_start_time = + std::optional alr_start_time = alr_detector_->GetApplicationLimitedRegionStartTime(); if (previously_in_alr_ && !alr_start_time.has_value()) { @@ -513,18 +515,9 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( if (network_estimator_) { network_estimator_->OnTransportPacketsFeedback(report); - auto prev_estimate = estimate_; - estimate_ = network_estimator_->GetCurrentEstimate(); - // TODO(srte): Make OnTransportPacketsFeedback signal whether the state - // changed to avoid the need for this check. - if (estimate_ && (!prev_estimate || estimate_->last_feed_time != - prev_estimate->last_feed_time)) { - event_log_->Log(std::make_unique( - estimate_->link_capacity_lower, estimate_->link_capacity_upper)); - probe_controller_->SetNetworkStateEstimate(*estimate_); - } + SetNetworkStateEstimate(network_estimator_->GetCurrentEstimate()); } - absl::optional probe_bitrate = + std::optional probe_bitrate = probe_bitrate_estimator_->FetchAndResetLastEstimatedBitrate(); if (ignore_probes_lower_than_network_estimate_ && probe_bitrate && estimate_ && *probe_bitrate < delay_based_bwe_->last_estimate() && @@ -566,7 +559,6 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( } bandwidth_estimation_->UpdateLossBasedEstimator( report, result.delay_detector_state, probe_bitrate, - estimate_ ? estimate_->link_capacity_upper : DataRate::PlusInfinity(), alr_start_time.has_value()); if (result.updated) { // Update the estimate in the ProbeController, in case we want to probe. @@ -600,10 +592,24 @@ NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( NetworkControlUpdate GoogCcNetworkController::OnNetworkStateEstimate( NetworkStateEstimate msg) { - estimate_ = msg; + if (!network_estimator_) { + SetNetworkStateEstimate(msg); + } return NetworkControlUpdate(); } +void GoogCcNetworkController::SetNetworkStateEstimate( + std::optional estimate) { + auto prev_estimate = estimate_; + estimate_ = estimate; + if (estimate_ && (!prev_estimate || + estimate_->update_time != prev_estimate->update_time)) { + env_.event_log().Log(std::make_unique( + estimate_->link_capacity_lower, estimate_->link_capacity_upper)); + probe_controller_->SetNetworkStateEstimate(*estimate_); + } +} + NetworkControlUpdate GoogCcNetworkController::GetNetworkState( Timestamp at_time) const { NetworkControlUpdate update; @@ -631,14 +637,9 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( uint8_t fraction_loss = bandwidth_estimation_->fraction_loss(); TimeDelta round_trip_time = bandwidth_estimation_->round_trip_time(); DataRate loss_based_target_rate = bandwidth_estimation_->target_rate(); + LossBasedState loss_based_state = bandwidth_estimation_->loss_based_state(); DataRate pushback_target_rate = loss_based_target_rate; - BWE_TEST_LOGGING_PLOT(1, "fraction_loss_%", at_time.ms(), - (fraction_loss * 100) / 256); - BWE_TEST_LOGGING_PLOT(1, "rtt_ms", at_time.ms(), round_trip_time.ms()); - BWE_TEST_LOGGING_PLOT(1, "Target_bitrate_kbps", at_time.ms(), - loss_based_target_rate.kbps()); - double cwnd_reduce_ratio = 0.0; if (congestion_window_pushback_controller_) { int64_t pushback_rate = @@ -658,6 +659,7 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( stable_target_rate = std::min(stable_target_rate, pushback_target_rate); if ((loss_based_target_rate != last_loss_based_target_rate_) || + (loss_based_state != last_loss_base_state_) || (fraction_loss != last_estimated_fraction_loss_) || (round_trip_time != last_estimated_round_trip_time_) || (pushback_target_rate != last_pushback_target_rate_) || @@ -667,6 +669,7 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( last_estimated_fraction_loss_ = fraction_loss; last_estimated_round_trip_time_ = round_trip_time; last_stable_target_rate_ = stable_target_rate; + last_loss_base_state_ = loss_based_state; alr_detector_->SetEstimatedBitrate(loss_based_target_rate.bps()); @@ -690,11 +693,9 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( auto probes = probe_controller_->SetEstimatedBitrate( loss_based_target_rate, - GetBandwidthLimitedCause( - bandwidth_estimation_->loss_based_state(), - bandwidth_estimation_->IsRttAboveLimit(), - delay_based_bwe_->last_state(), - probe_controller_->DontProbeIfDelayIncreased()), + GetBandwidthLimitedCause(bandwidth_estimation_->loss_based_state(), + bandwidth_estimation_->IsRttAboveLimit(), + delay_based_bwe_->last_state()), at_time); update->probe_cluster_configs.insert(update->probe_cluster_configs.end(), probes.begin(), probes.end()); @@ -708,19 +709,23 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( PacerConfig GoogCcNetworkController::GetPacingRates(Timestamp at_time) const { // Pacing rate is based on target rate before congestion window pushback, // because we don't want to build queues in the pacer when pushback occurs. - DataRate pacing_rate = DataRate::Zero(); - if (pace_at_max_of_bwe_and_lower_link_capacity_ && estimate_) { - pacing_rate = - std::max({min_total_allocated_bitrate_, estimate_->link_capacity_lower, - last_loss_based_target_rate_}) * - pacing_factor_; - } else { + DataRate pacing_rate = + std::max(min_total_allocated_bitrate_, last_loss_based_target_rate_) * + pacing_factor_; + + if (limit_pacingfactor_by_upper_link_capacity_estimate_ && estimate_ && + estimate_->link_capacity_upper.IsFinite() && + pacing_rate > estimate_->link_capacity_upper) { pacing_rate = - std::max(min_total_allocated_bitrate_, last_loss_based_target_rate_) * - pacing_factor_; + std::max({estimate_->link_capacity_upper, min_total_allocated_bitrate_, + last_loss_based_target_rate_}); } + DataRate padding_rate = - std::min(max_padding_rate_, last_pushback_target_rate_); + (last_loss_base_state_ == LossBasedState::kIncreaseUsingPadding) + ? std::max(max_padding_rate_, last_loss_based_target_rate_) + : max_padding_rate_; + padding_rate = std::min(padding_rate, last_pushback_target_rate_); PacerConfig msg; msg.at_time = at_time; msg.time_window = TimeDelta::Seconds(1); diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control.h b/modules/congestion_controller/goog_cc/goog_cc_network_control.h index 37a064e37c..46d1cabe53 100644 --- a/modules/congestion_controller/goog_cc/goog_cc_network_control.h +++ b/modules/congestion_controller/goog_cc/goog_cc_network_control.h @@ -15,22 +15,23 @@ #include #include +#include #include -#include "absl/types/optional.h" -#include "api/field_trials_view.h" +#include "api/environment/environment.h" #include "api/network_state_predictor.h" -#include "api/rtc_event_log/rtc_event_log.h" -#include "api/transport/field_trial_based_config.h" #include "api/transport/network_control.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h" #include "modules/congestion_controller/goog_cc/alr_detector.h" #include "modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h" #include "modules/congestion_controller/goog_cc/delay_based_bwe.h" +#include "modules/congestion_controller/goog_cc/loss_based_bwe_v2.h" +#include "modules/congestion_controller/goog_cc/probe_bitrate_estimator.h" #include "modules/congestion_controller/goog_cc/probe_controller.h" #include "modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h" #include "rtc_base/experiments/field_trial_parser.h" @@ -82,10 +83,9 @@ class GoogCcNetworkController : public NetworkControllerInterface { Timestamp at_time); void UpdateCongestionWindowSize(); PacerConfig GetPacingRates(Timestamp at_time) const; - const FieldTrialBasedConfig trial_based_config_; + void SetNetworkStateEstimate(std::optional estimate); - const FieldTrialsView* const key_value_config_; - RtcEventLog* const event_log_; + const Environment env_; const bool packet_feedback_only_; FieldTrialFlag safe_reset_on_route_change_; FieldTrialFlag safe_reset_acknowledged_rate_; @@ -93,7 +93,7 @@ class GoogCcNetworkController : public NetworkControllerInterface { const bool ignore_probes_lower_than_network_estimate_; const bool limit_probes_lower_than_throughput_estimate_; const RateControlSettings rate_control_settings_; - const bool pace_at_max_of_bwe_and_lower_link_capacity_; + const bool limit_pacingfactor_by_upper_link_capacity_estimate_; const std::unique_ptr probe_controller_; const std::unique_ptr @@ -108,16 +108,16 @@ class GoogCcNetworkController : public NetworkControllerInterface { std::unique_ptr acknowledged_bitrate_estimator_; - absl::optional initial_config_; + std::optional initial_config_; DataRate min_target_rate_ = DataRate::Zero(); DataRate min_data_rate_ = DataRate::Zero(); DataRate max_data_rate_ = DataRate::PlusInfinity(); - absl::optional starting_rate_; + std::optional starting_rate_; bool first_packet_sent_ = false; - absl::optional estimate_; + std::optional estimate_; Timestamp next_loss_update_ = Timestamp::MinusInfinity(); int lost_packets_since_last_loss_update_ = 0; @@ -128,10 +128,10 @@ class GoogCcNetworkController : public NetworkControllerInterface { DataRate last_loss_based_target_rate_; DataRate last_pushback_target_rate_; DataRate last_stable_target_rate_; + LossBasedState last_loss_base_state_; - absl::optional last_estimated_fraction_loss_ = 0; + std::optional last_estimated_fraction_loss_ = 0; TimeDelta last_estimated_round_trip_time_ = TimeDelta::PlusInfinity(); - Timestamp last_packet_received_time_ = Timestamp::MinusInfinity(); double pacing_factor_; DataRate min_total_allocated_bitrate_; @@ -139,7 +139,7 @@ class GoogCcNetworkController : public NetworkControllerInterface { bool previously_in_alr_ = false; - absl::optional current_data_window_; + std::optional current_data_window_; }; } // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc b/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc index 0483dbf54d..abe9a9b53d 100644 --- a/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc +++ b/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc @@ -8,20 +8,38 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include +#include +#include #include +#include +#include +#include #include "absl/strings/string_view.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/test/network_emulation/create_cross_traffic.h" #include "api/test/network_emulation/cross_traffic.h" #include "api/transport/goog_cc_factory.h" +#include "api/transport/network_control.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" +#include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "call/video_receive_stream.h" #include "logging/rtc_event_log/mock/mock_rtc_event_log.h" #include "test/field_trial.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/network/network_emulation.h" +#include "test/scenario/call_client.h" +#include "test/scenario/column_printer.h" #include "test/scenario/scenario.h" +#include "test/scenario/scenario_config.h" using ::testing::IsEmpty; using ::testing::NiceMock; @@ -78,9 +96,9 @@ CallClient* CreateVideoSendingClient( NetworkRouteChange CreateRouteChange( Timestamp time, - absl::optional start_rate = absl::nullopt, - absl::optional min_rate = absl::nullopt, - absl::optional max_rate = absl::nullopt) { + std::optional start_rate = std::nullopt, + std::optional min_rate = std::nullopt, + std::optional max_rate = std::nullopt) { NetworkRouteChange route_change; route_change.at_time = time; route_change.constraints.at_time = time; @@ -105,13 +123,13 @@ PacketResult CreatePacketResult(Timestamp arrival_time, // Simulate sending packets and receiving transport feedback during // `runtime_ms`, then return the final target birate. -absl::optional PacketTransmissionAndFeedbackBlock( +std::optional PacketTransmissionAndFeedbackBlock( NetworkControllerInterface* controller, int64_t runtime_ms, int64_t delay, Timestamp& current_time) { NetworkControlUpdate update; - absl::optional target_bitrate; + std::optional target_bitrate; int64_t delay_buildup = 0; int64_t start_time_ms = current_time.ms(); while (current_time.ms() - start_time_ms < runtime_ms) { @@ -260,7 +278,7 @@ class NetworkControllerTestFixture { int starting_bandwidth_kbps = kInitialBitrateKbps, int min_data_rate_kbps = 0, int max_data_rate_kbps = 5 * kInitialBitrateKbps) { - NetworkControllerConfig config; + NetworkControllerConfig config(env_); config.constraints.at_time = Timestamp::Zero(); config.constraints.min_data_rate = DataRate::KilobitsPerSec(min_data_rate_kbps); @@ -268,20 +286,23 @@ class NetworkControllerTestFixture { DataRate::KilobitsPerSec(max_data_rate_kbps); config.constraints.starting_rate = DataRate::KilobitsPerSec(starting_bandwidth_kbps); - config.event_log = &event_log_; return config; } NiceMock event_log_; + const Environment env_ = CreateEnvironment(&event_log_); GoogCcNetworkControllerFactory factory_; }; -TEST(GoogCcNetworkControllerTest, InitializeTargetRateOnFirstProcessInterval) { +TEST(GoogCcNetworkControllerTest, + InitializeTargetRateOnFirstProcessIntervalAfterNetworkAvailable) { NetworkControllerTestFixture fixture; std::unique_ptr controller = fixture.CreateController(); - NetworkControlUpdate update = + NetworkControlUpdate update = controller->OnNetworkAvailability( + {.at_time = Timestamp::Millis(123456), .network_available = true}); + update = controller->OnProcessInterval({.at_time = Timestamp::Millis(123456)}); EXPECT_EQ(update.target_rate->target_rate, kInitialBitrate); @@ -298,8 +319,9 @@ TEST(GoogCcNetworkControllerTest, ReactsToChangedNetworkConditions) { std::unique_ptr controller = fixture.CreateController(); Timestamp current_time = Timestamp::Millis(123); - NetworkControlUpdate update = - controller->OnProcessInterval({.at_time = current_time}); + NetworkControlUpdate update = controller->OnNetworkAvailability( + {.at_time = current_time, .network_available = true}); + update = controller->OnProcessInterval({.at_time = current_time}); update = controller->OnRemoteBitrateReport( {.receive_time = current_time, .bandwidth = kInitialBitrate * 2}); @@ -323,8 +345,11 @@ TEST(GoogCcNetworkControllerTest, OnNetworkRouteChanged) { std::unique_ptr controller = fixture.CreateController(); Timestamp current_time = Timestamp::Millis(123); + NetworkControlUpdate update = controller->OnNetworkAvailability( + {.at_time = current_time, .network_available = true}); DataRate new_bitrate = DataRate::BitsPerSec(200000); - NetworkControlUpdate update = controller->OnNetworkRouteChange( + + update = controller->OnNetworkRouteChange( CreateRouteChange(current_time, new_bitrate)); EXPECT_EQ(update.target_rate->target_rate, new_bitrate); EXPECT_EQ(update.pacer_config->data_rate(), new_bitrate * kDefaultPacingRate); @@ -345,7 +370,11 @@ TEST(GoogCcNetworkControllerTest, ProbeOnRouteChange) { std::unique_ptr controller = fixture.CreateController(); Timestamp current_time = Timestamp::Millis(123); - NetworkControlUpdate update = controller->OnNetworkRouteChange( + NetworkControlUpdate update = controller->OnNetworkAvailability( + {.at_time = current_time, .network_available = true}); + current_time += TimeDelta::Seconds(3); + + update = controller->OnNetworkRouteChange( CreateRouteChange(current_time, 2 * kInitialBitrate, DataRate::Zero(), 20 * kInitialBitrate)); @@ -390,35 +419,38 @@ TEST(GoogCcNetworkControllerTest, UpdatesDelayBasedEstimate) { fixture.CreateController(); const int64_t kRunTimeMs = 6000; Timestamp current_time = Timestamp::Millis(123); + NetworkControlUpdate update = controller->OnNetworkAvailability( + {.at_time = current_time, .network_available = true}); // The test must run and insert packets/feedback long enough that the // BWE computes a valid estimate. This is first done in an environment which // simulates no bandwidth limitation, and therefore not built-up delay. - absl::optional target_bitrate_before_delay = + std::optional target_bitrate_before_delay = PacketTransmissionAndFeedbackBlock(controller.get(), kRunTimeMs, 0, current_time); ASSERT_TRUE(target_bitrate_before_delay.has_value()); // Repeat, but this time with a building delay, and make sure that the // estimation is adjusted downwards. - absl::optional target_bitrate_after_delay = + std::optional target_bitrate_after_delay = PacketTransmissionAndFeedbackBlock(controller.get(), kRunTimeMs, 50, current_time); EXPECT_LT(*target_bitrate_after_delay, *target_bitrate_before_delay); } -TEST(GoogCcNetworkControllerTest, PaceAtMaxOfLowerLinkCapacityAndBwe) { +TEST(GoogCcNetworkControllerTest, LimitPacingFactorToUpperLinkCapacity) { ScopedFieldTrials trial( - "WebRTC-Bwe-PaceAtMaxOfBweAndLowerLinkCapacity/Enabled/"); + "WebRTC-Bwe-LimitPacingFactorByUpperLinkCapacityEstimate/Enabled/"); NetworkControllerTestFixture fixture; std::unique_ptr controller = fixture.CreateController(); Timestamp current_time = Timestamp::Millis(123); - NetworkControlUpdate update = - controller->OnProcessInterval({.at_time = current_time}); + NetworkControlUpdate update = controller->OnNetworkAvailability( + {.at_time = current_time, .network_available = true}); + update = controller->OnProcessInterval({.at_time = current_time}); current_time += TimeDelta::Millis(100); - NetworkStateEstimate network_estimate = {.link_capacity_lower = - 10 * kInitialBitrate}; + NetworkStateEstimate network_estimate = { + .link_capacity_upper = kInitialBitrate * kDefaultPacingRate / 2}; update = controller->OnNetworkStateEstimate(network_estimate); // OnNetworkStateEstimate does not trigger processing a new estimate. So add a // dummy loss report to trigger a BWE update in the next process interval. @@ -432,30 +464,9 @@ TEST(GoogCcNetworkControllerTest, PaceAtMaxOfLowerLinkCapacityAndBwe) { update = controller->OnProcessInterval({.at_time = current_time}); ASSERT_TRUE(update.pacer_config); ASSERT_TRUE(update.target_rate); - ASSERT_LT(update.target_rate->target_rate, - network_estimate.link_capacity_lower); - EXPECT_EQ(update.pacer_config->data_rate().kbps(), - network_estimate.link_capacity_lower.kbps() * kDefaultPacingRate); - - current_time += TimeDelta::Millis(100); - // Set a low link capacity estimate and verify that pacing rate is set - // relative to loss based/delay based estimate. - network_estimate = {.link_capacity_lower = 0.5 * kInitialBitrate}; - update = controller->OnNetworkStateEstimate(network_estimate); - // Again, we need to inject a dummy loss report to trigger an update of the - // BWE in the next process interval. - loss_report.start_time = current_time; - loss_report.end_time = current_time; - loss_report.receive_time = current_time; - loss_report.packets_received_delta = 50; - loss_report.packets_lost_delta = 0; - update = controller->OnTransportLossReport(loss_report); - update = controller->OnProcessInterval({.at_time = current_time}); - ASSERT_TRUE(update.target_rate); - ASSERT_GT(update.target_rate->target_rate, - network_estimate.link_capacity_lower); - EXPECT_EQ(update.pacer_config->data_rate().kbps(), - update.target_rate->target_rate.kbps() * kDefaultPacingRate); + EXPECT_GE(update.target_rate->target_rate, kInitialBitrate); + EXPECT_EQ(update.pacer_config->data_rate(), + network_estimate.link_capacity_upper); } // Test congestion window pushback on network delay happens. @@ -738,6 +749,44 @@ TEST(GoogCcScenario, MaintainsLowRateInSafeResetTrial) { EXPECT_NEAR(client->send_bandwidth().kbps(), kLinkCapacity.kbps(), 50); } +TEST(GoogCcScenario, DoNotResetBweUnlessNetworkAdapterChangeOnRoutChange) { + ScopedFieldTrials trial("WebRTC-Bwe-ResetOnAdapterIdChange/Enabled/"); + Scenario s("googcc_unit/do_not_reset_bwe_unless_adapter_change"); + + const DataRate kLinkCapacity = DataRate::KilobitsPerSec(1000); + const DataRate kStartRate = DataRate::KilobitsPerSec(300); + + auto send_net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { + c->bandwidth = kLinkCapacity; + c->delay = TimeDelta::Millis(50); + }); + auto* client = s.CreateClient("send", [&](CallClientConfig* c) { + c->transport.rates.start_rate = kStartRate; + }); + client->UpdateNetworkAdapterId(0); + auto* route = s.CreateRoutes( + client, {send_net}, s.CreateClient("return", CallClientConfig()), + {s.CreateSimulationNode(NetworkSimulationConfig())}); + s.CreateVideoStream(route->forward(), VideoStreamConfig()); + // Allow the controller to stabilize. + s.RunFor(TimeDelta::Millis(500)); + EXPECT_NEAR(client->send_bandwidth().kbps(), kLinkCapacity.kbps(), 300); + s.ChangeRoute(route->forward(), {send_net}); + // Allow new settings to propagate. + s.RunFor(TimeDelta::Millis(50)); + // Under the trial, the target should not drop. + EXPECT_NEAR(client->send_bandwidth().kbps(), kLinkCapacity.kbps(), 300); + + s.RunFor(TimeDelta::Millis(500)); + // But if adapter id change, BWE should reset and start from the beginning if + // the network route changes. + client->UpdateNetworkAdapterId(1); + s.ChangeRoute(route->forward(), {send_net}); + // Allow new settings to propagate. + s.RunFor(TimeDelta::Millis(50)); + EXPECT_NEAR(client->send_bandwidth().kbps(), kStartRate.kbps(), 30); +} + TEST(GoogCcScenario, CutsHighRateInSafeResetTrial) { const DataRate kLinkCapacity = DataRate::KilobitsPerSec(1000); const DataRate kStartRate = DataRate::KilobitsPerSec(300); @@ -758,6 +807,7 @@ TEST(GoogCcScenario, CutsHighRateInSafeResetTrial) { // Allow the controller to stabilize. s.RunFor(TimeDelta::Millis(500)); EXPECT_NEAR(client->send_bandwidth().kbps(), kLinkCapacity.kbps(), 300); + client->UpdateNetworkAdapterId(1); s.ChangeRoute(route->forward(), {send_net}); // Allow new settings to propagate. s.RunFor(TimeDelta::Millis(50)); @@ -790,6 +840,7 @@ TEST(GoogCcScenario, DetectsHighRateInSafeResetTrial) { // Allow the controller to stabilize. s.RunFor(TimeDelta::Millis(2000)); EXPECT_NEAR(client->send_bandwidth().kbps(), kInitialLinkCapacity.kbps(), 50); + client->UpdateNetworkAdapterId(1); s.ChangeRoute(route->forward(), {new_net}); // Allow new settings to propagate, but not probes to be received. s.RunFor(TimeDelta::Millis(50)); @@ -997,7 +1048,7 @@ TEST_P(GoogCcRttTest, CalculatesRttFromTransporFeedback) { fixture.CreateController(); Timestamp current_time = Timestamp::Millis(123); TimeDelta one_way_delay = TimeDelta::Millis(10); - absl::optional rtt = absl::nullopt; + std::optional rtt = std::nullopt; TransportPacketsFeedback feedback = CreateTransportPacketsFeedback( /*per_packet_network_delay=*/TimeDelta::Millis(50), one_way_delay, diff --git a/modules/congestion_controller/goog_cc/inter_arrival_delta.cc b/modules/congestion_controller/goog_cc/inter_arrival_delta.cc index 2d50d08e6a..0d69257355 100644 --- a/modules/congestion_controller/goog_cc/inter_arrival_delta.cc +++ b/modules/congestion_controller/goog_cc/inter_arrival_delta.cc @@ -11,9 +11,11 @@ #include "modules/congestion_controller/goog_cc/inter_arrival_delta.h" #include +#include #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { diff --git a/modules/congestion_controller/goog_cc/inter_arrival_delta.h b/modules/congestion_controller/goog_cc/inter_arrival_delta.h index 4046590eeb..cfa87f8631 100644 --- a/modules/congestion_controller/goog_cc/inter_arrival_delta.h +++ b/modules/congestion_controller/goog_cc/inter_arrival_delta.h @@ -11,6 +11,8 @@ #ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_INTER_ARRIVAL_DELTA_H_ #define MODULES_CONGESTION_CONTROLLER_GOOG_CC_INTER_ARRIVAL_DELTA_H_ +#include + #include "api/units/time_delta.h" #include "api/units/timestamp.h" diff --git a/modules/congestion_controller/goog_cc/link_capacity_estimator.cc b/modules/congestion_controller/goog_cc/link_capacity_estimator.cc index 9fd537a422..5f3e523c2b 100644 --- a/modules/congestion_controller/goog_cc/link_capacity_estimator.cc +++ b/modules/congestion_controller/goog_cc/link_capacity_estimator.cc @@ -10,7 +10,9 @@ #include "modules/congestion_controller/goog_cc/link_capacity_estimator.h" #include +#include +#include "api/units/data_rate.h" #include "rtc_base/numerics/safe_minmax.h" namespace webrtc { @@ -57,7 +59,7 @@ void LinkCapacityEstimator::Update(DataRate capacity_sample, double alpha) { (1 - alpha) * deviation_kbps_ + alpha * error_kbps * error_kbps / norm; // 0.4 ~= 14 kbit/s at 500 kbit/s // 2.5f ~= 35 kbit/s at 500 kbit/s - deviation_kbps_ = rtc::SafeClamp(deviation_kbps_, 0.4f, 2.5f); + deviation_kbps_ = SafeClamp(deviation_kbps_, 0.4f, 2.5f); } bool LinkCapacityEstimator::has_estimate() const { diff --git a/modules/congestion_controller/goog_cc/link_capacity_estimator.h b/modules/congestion_controller/goog_cc/link_capacity_estimator.h index aa23491d9d..5c4cbd4fe7 100644 --- a/modules/congestion_controller/goog_cc/link_capacity_estimator.h +++ b/modules/congestion_controller/goog_cc/link_capacity_estimator.h @@ -10,7 +10,8 @@ #ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_LINK_CAPACITY_ESTIMATOR_H_ #define MODULES_CONGESTION_CONTROLLER_GOOG_CC_LINK_CAPACITY_ESTIMATOR_H_ -#include "absl/types/optional.h" +#include + #include "api/units/data_rate.h" namespace webrtc { @@ -30,7 +31,7 @@ class LinkCapacityEstimator { void Update(DataRate capacity_sample, double alpha); double deviation_estimate_kbps() const; - absl::optional estimate_kbps_; + std::optional estimate_kbps_; double deviation_kbps_ = 0.4; }; } // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc b/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc index 7524c84d92..81cdad512e 100644 --- a/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc +++ b/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc @@ -11,12 +11,18 @@ #include "modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h" #include -#include +#include #include #include "absl/strings/match.h" +#include "absl/strings/string_view.h" +#include "api/field_trials_view.h" +#include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { namespace { diff --git a/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc b/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc index 4c0f5fc5ee..25978593ca 100644 --- a/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc +++ b/modules/congestion_controller/goog_cc/loss_based_bwe_v2.cc @@ -12,18 +12,15 @@ #include #include -#include #include #include #include -#include +#include #include #include "absl/algorithm/container.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/field_trials_view.h" -#include "api/network_state_predictor.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" @@ -38,11 +35,14 @@ namespace webrtc { namespace { +constexpr TimeDelta kInitHoldDuration = TimeDelta::Millis(300); +constexpr TimeDelta kMaxHoldDuration = TimeDelta::Seconds(60); + bool IsValid(DataRate datarate) { return datarate.IsFinite(); } -bool IsValid(absl::optional datarate) { +bool IsValid(std::optional datarate) { return datarate.has_value() && IsValid(datarate.value()); } @@ -50,33 +50,8 @@ bool IsValid(Timestamp timestamp) { return timestamp.IsFinite(); } -struct PacketResultsSummary { - int num_packets = 0; - int num_lost_packets = 0; - DataSize total_size = DataSize::Zero(); - Timestamp first_send_time = Timestamp::PlusInfinity(); - Timestamp last_send_time = Timestamp::MinusInfinity(); -}; - -// Returns a `PacketResultsSummary` where `first_send_time` is `PlusInfinity, -// and `last_send_time` is `MinusInfinity`, if `packet_results` is empty. -PacketResultsSummary GetPacketResultsSummary( - rtc::ArrayView packet_results) { - PacketResultsSummary packet_results_summary; - - packet_results_summary.num_packets = packet_results.size(); - for (const PacketResult& packet : packet_results) { - if (!packet.IsReceived()) { - packet_results_summary.num_lost_packets++; - } - packet_results_summary.total_size += packet.sent_packet.size; - packet_results_summary.first_send_time = std::min( - packet_results_summary.first_send_time, packet.sent_packet.send_time); - packet_results_summary.last_send_time = std::max( - packet_results_summary.last_send_time, packet.sent_packet.send_time); - } - - return packet_results_summary; +double ToKiloBytes(DataSize datasize) { + return datasize.bytes() / 1000.0; } double GetLossProbability(double inherent_loss, @@ -121,12 +96,14 @@ LossBasedBweV2::LossBasedBweV2(const FieldTrialsView* key_value_config) return; } - current_estimate_.inherent_loss = config_->initial_inherent_loss_estimate; + current_best_estimate_.inherent_loss = + config_->initial_inherent_loss_estimate; observations_.resize(config_->observation_window_size); temporal_weights_.resize(config_->observation_window_size); instant_upper_bound_temporal_weights_.resize( config_->observation_window_size); CalculateTemporalWeights(); + last_hold_info_.duration = kInitHoldDuration; } bool LossBasedBweV2::IsEnabled() const { @@ -134,47 +111,46 @@ bool LossBasedBweV2::IsEnabled() const { } bool LossBasedBweV2::IsReady() const { - return IsEnabled() && IsValid(current_estimate_.loss_limited_bandwidth) && - num_observations_ > 0; + return IsEnabled() && + IsValid(current_best_estimate_.loss_limited_bandwidth) && + num_observations_ >= config_->min_num_observations; +} + +bool LossBasedBweV2::ReadyToUseInStartPhase() const { + return IsReady() && config_->use_in_start_phase; +} + +bool LossBasedBweV2::UseInStartPhase() const { + return config_->use_in_start_phase; } LossBasedBweV2::Result LossBasedBweV2::GetLossBasedResult() const { - Result result; - result.state = current_state_; if (!IsReady()) { if (!IsEnabled()) { RTC_LOG(LS_WARNING) << "The estimator must be enabled before it can be used."; } else { - if (!IsValid(current_estimate_.loss_limited_bandwidth)) { + if (!IsValid(current_best_estimate_.loss_limited_bandwidth)) { RTC_LOG(LS_WARNING) << "The estimator must be initialized before it can be used."; } - if (num_observations_ <= 0) { + if (num_observations_ <= config_->min_num_observations) { RTC_LOG(LS_WARNING) << "The estimator must receive enough loss " "statistics before it can be used."; } } - result.bandwidth_estimate = IsValid(delay_based_estimate_) - ? delay_based_estimate_ - : DataRate::PlusInfinity(); - return result; - } - - if (IsValid(delay_based_estimate_)) { - result.bandwidth_estimate = - std::min({current_estimate_.loss_limited_bandwidth, - GetInstantUpperBound(), delay_based_estimate_}); - } else { - result.bandwidth_estimate = std::min( - current_estimate_.loss_limited_bandwidth, GetInstantUpperBound()); + return {.bandwidth_estimate = IsValid(delay_based_estimate_) + ? delay_based_estimate_ + : DataRate::PlusInfinity(), + .state = LossBasedState::kDelayBasedEstimate}; } - return result; + return loss_based_result_; } void LossBasedBweV2::SetAcknowledgedBitrate(DataRate acknowledged_bitrate) { if (IsValid(acknowledged_bitrate)) { acknowledged_bitrate_ = acknowledged_bitrate; + CalculateInstantLowerBound(); } else { RTC_LOG(LS_WARNING) << "The acknowledged bitrate must be finite: " << ToString(acknowledged_bitrate); @@ -183,7 +159,9 @@ void LossBasedBweV2::SetAcknowledgedBitrate(DataRate acknowledged_bitrate) { void LossBasedBweV2::SetBandwidthEstimate(DataRate bandwidth_estimate) { if (IsValid(bandwidth_estimate)) { - current_estimate_.loss_limited_bandwidth = bandwidth_estimate; + current_best_estimate_.loss_limited_bandwidth = bandwidth_estimate; + loss_based_result_ = {.bandwidth_estimate = bandwidth_estimate, + .state = LossBasedState::kDelayBasedEstimate}; } else { RTC_LOG(LS_WARNING) << "The bandwidth estimate must be finite: " << ToString(bandwidth_estimate); @@ -194,6 +172,7 @@ void LossBasedBweV2::SetMinMaxBitrate(DataRate min_bitrate, DataRate max_bitrate) { if (IsValid(min_bitrate)) { min_bitrate_ = min_bitrate; + CalculateInstantLowerBound(); } else { RTC_LOG(LS_WARNING) << "The min bitrate must be finite: " << ToString(min_bitrate); @@ -207,22 +186,11 @@ void LossBasedBweV2::SetMinMaxBitrate(DataRate min_bitrate, } } -void LossBasedBweV2::SetProbeBitrate(absl::optional probe_bitrate) { - if (probe_bitrate.has_value() && IsValid(probe_bitrate.value())) { - probe_bitrate_ = probe_bitrate.value(); - last_probe_timestamp_ = last_send_time_most_recent_observation_; - } -} - void LossBasedBweV2::UpdateBandwidthEstimate( - rtc::ArrayView packet_results, + ArrayView packet_results, DataRate delay_based_estimate, - BandwidthUsage delay_detector_state, - absl::optional probe_bitrate, - DataRate upper_link_capacity, bool in_alr) { delay_based_estimate_ = delay_based_estimate; - upper_link_capacity_ = upper_link_capacity; if (!IsEnabled()) { RTC_LOG(LS_WARNING) << "The estimator must be enabled before it can be used."; @@ -235,19 +203,22 @@ void LossBasedBweV2::UpdateBandwidthEstimate( return; } - if (!PushBackObservation(packet_results, delay_detector_state)) { + if (!PushBackObservation(packet_results)) { return; } - SetProbeBitrate(probe_bitrate); - - if (!IsValid(current_estimate_.loss_limited_bandwidth)) { - RTC_LOG(LS_VERBOSE) - << "The estimator must be initialized before it can be used."; - return; + if (!IsValid(current_best_estimate_.loss_limited_bandwidth)) { + if (!IsValid(delay_based_estimate)) { + RTC_LOG(LS_WARNING) << "The delay based estimate must be finite: " + << ToString(delay_based_estimate); + return; + } + current_best_estimate_.loss_limited_bandwidth = delay_based_estimate; + loss_based_result_ = {.bandwidth_estimate = delay_based_estimate, + .state = LossBasedState::kDelayBasedEstimate}; } - ChannelParameters best_candidate = current_estimate_; + ChannelParameters best_candidate = current_best_estimate_; double objective_max = std::numeric_limits::lowest(); for (ChannelParameters candidate : GetCandidates(in_alr)) { NewtonsMethodUpdate(candidate); @@ -259,21 +230,21 @@ void LossBasedBweV2::UpdateBandwidthEstimate( } } if (best_candidate.loss_limited_bandwidth < - current_estimate_.loss_limited_bandwidth) { + current_best_estimate_.loss_limited_bandwidth) { last_time_estimate_reduced_ = last_send_time_most_recent_observation_; } // Do not increase the estimate if the average loss is greater than current // inherent loss. - if (GetAverageReportedLossRatio() > best_candidate.inherent_loss && + if (average_reported_loss_ratio_ > best_candidate.inherent_loss && config_->not_increase_if_inherent_loss_less_than_average_loss && - current_estimate_.loss_limited_bandwidth < + current_best_estimate_.loss_limited_bandwidth < best_candidate.loss_limited_bandwidth) { best_candidate.loss_limited_bandwidth = - current_estimate_.loss_limited_bandwidth; + current_best_estimate_.loss_limited_bandwidth; } - if (IsBandwidthLimitedDueToLoss()) { + if (IsInLossLimitedState()) { // Bound the estimate increase if: // 1. The estimate has been increased for less than // `delayed_increase_window` ago, and @@ -287,72 +258,155 @@ void LossBasedBweV2::UpdateBandwidthEstimate( bandwidth_limit_in_current_window_; } - bool increasing_when_loss_limited = - IsEstimateIncreasingWhenLossLimited(best_candidate); - // Bound the best candidate by the acked bitrate unless there is a recent - // probe result. - if (increasing_when_loss_limited && !IsValid(probe_bitrate_) && - IsValid(acknowledged_bitrate_)) { + bool increasing_when_loss_limited = IsEstimateIncreasingWhenLossLimited( + /*old_estimate=*/current_best_estimate_.loss_limited_bandwidth, + /*new_estimate=*/best_candidate.loss_limited_bandwidth); + // Bound the best candidate by the acked bitrate. + if (increasing_when_loss_limited && IsValid(acknowledged_bitrate_)) { + double rampup_factor = config_->bandwidth_rampup_upper_bound_factor; + if (IsValid(last_hold_info_.rate) && + acknowledged_bitrate_ < + config_->bandwidth_rampup_hold_threshold * last_hold_info_.rate) { + rampup_factor = config_->bandwidth_rampup_upper_bound_factor_in_hold; + } + best_candidate.loss_limited_bandwidth = - IsValid(best_candidate.loss_limited_bandwidth) - ? std::min(best_candidate.loss_limited_bandwidth, - config_->bandwidth_rampup_upper_bound_factor * - (*acknowledged_bitrate_)) - : config_->bandwidth_rampup_upper_bound_factor * - (*acknowledged_bitrate_); + std::max(current_best_estimate_.loss_limited_bandwidth, + std::min(best_candidate.loss_limited_bandwidth, + rampup_factor * (*acknowledged_bitrate_))); + // Increase current estimate by at least 1kbps to make sure that the state + // will be switched to kIncreasing, thus padding is triggered. + if (loss_based_result_.state == LossBasedState::kDecreasing && + best_candidate.loss_limited_bandwidth == + current_best_estimate_.loss_limited_bandwidth) { + best_candidate.loss_limited_bandwidth = + current_best_estimate_.loss_limited_bandwidth + + DataRate::BitsPerSec(1); + } } } - if (IsEstimateIncreasingWhenLossLimited(best_candidate) && - best_candidate.loss_limited_bandwidth < delay_based_estimate_) { - current_state_ = LossBasedState::kIncreasing; - } else if (best_candidate.loss_limited_bandwidth < delay_based_estimate_) { - current_state_ = LossBasedState::kDecreasing; - } else if (best_candidate.loss_limited_bandwidth >= delay_based_estimate_) { - current_state_ = LossBasedState::kDelayBasedEstimate; + DataRate bounded_bandwidth_estimate = DataRate::PlusInfinity(); + if (IsValid(delay_based_estimate_)) { + bounded_bandwidth_estimate = + std::max(GetInstantLowerBound(), + std::min({best_candidate.loss_limited_bandwidth, + GetInstantUpperBound(), delay_based_estimate_})); + } else { + bounded_bandwidth_estimate = std::max( + GetInstantLowerBound(), std::min(best_candidate.loss_limited_bandwidth, + GetInstantUpperBound())); + } + if (config_->bound_best_candidate && + bounded_bandwidth_estimate < best_candidate.loss_limited_bandwidth) { + RTC_LOG(LS_INFO) << "Resetting loss based BWE to " + << bounded_bandwidth_estimate.kbps() + << "due to loss. Avg loss rate: " + << average_reported_loss_ratio_; + current_best_estimate_.loss_limited_bandwidth = bounded_bandwidth_estimate; + current_best_estimate_.inherent_loss = 0; + } else { + current_best_estimate_ = best_candidate; + if (config_->lower_bound_by_acked_rate_factor > 0.0) { + current_best_estimate_.loss_limited_bandwidth = + std::max(current_best_estimate_.loss_limited_bandwidth, + GetInstantLowerBound()); + } } - // Use probe bitrate as the estimate limit when probes are requested. - if (config_->probe_integration_enabled && IsValid(probe_bitrate_) && - IsRequestingProbe()) { - if (last_probe_timestamp_ + config_->probe_expiration >= - last_send_time_most_recent_observation_) { - best_candidate.loss_limited_bandwidth = - std::min(probe_bitrate_, best_candidate.loss_limited_bandwidth); + if (loss_based_result_.state == LossBasedState::kDecreasing && + last_hold_info_.timestamp > last_send_time_most_recent_observation_ && + bounded_bandwidth_estimate < delay_based_estimate_) { + // Ensure that acked rate is the lower bound of HOLD rate. + if (config_->lower_bound_by_acked_rate_factor > 0.0) { + last_hold_info_.rate = + std::max(GetInstantLowerBound(), last_hold_info_.rate); } + // BWE is not allowed to increase above the HOLD rate. The purpose of + // HOLD is to not immediately ramp up BWE to a rate that may cause loss. + loss_based_result_.bandwidth_estimate = + std::min(last_hold_info_.rate, bounded_bandwidth_estimate); + return; } - current_estimate_ = best_candidate; + if (IsEstimateIncreasingWhenLossLimited( + /*old_estimate=*/loss_based_result_.bandwidth_estimate, + /*new_estimate=*/bounded_bandwidth_estimate) && + CanKeepIncreasingState(bounded_bandwidth_estimate) && + bounded_bandwidth_estimate < delay_based_estimate_ && + bounded_bandwidth_estimate < max_bitrate_) { + if (config_->padding_duration > TimeDelta::Zero() && + bounded_bandwidth_estimate > last_padding_info_.padding_rate) { + // Start a new padding duration. + last_padding_info_.padding_rate = bounded_bandwidth_estimate; + last_padding_info_.padding_timestamp = + last_send_time_most_recent_observation_; + } + loss_based_result_.state = config_->padding_duration > TimeDelta::Zero() + ? LossBasedState::kIncreaseUsingPadding + : LossBasedState::kIncreasing; + } else if (bounded_bandwidth_estimate < delay_based_estimate_ && + bounded_bandwidth_estimate < max_bitrate_) { + if (loss_based_result_.state != LossBasedState::kDecreasing && + config_->hold_duration_factor > 0) { + RTC_LOG(LS_INFO) << this << " " << "Switch to HOLD. Bounded BWE: " + << bounded_bandwidth_estimate.kbps() + << ", duration: " << last_hold_info_.duration.ms(); + last_hold_info_ = { + .timestamp = last_send_time_most_recent_observation_ + + last_hold_info_.duration, + .duration = + std::min(kMaxHoldDuration, last_hold_info_.duration * + config_->hold_duration_factor), + .rate = bounded_bandwidth_estimate}; + } + last_padding_info_ = PaddingInfo(); + loss_based_result_.state = LossBasedState::kDecreasing; + } else { + // Reset the HOLD info if delay based estimate works to avoid getting + // stuck in low bitrate. + last_hold_info_ = {.timestamp = Timestamp::MinusInfinity(), + .duration = kInitHoldDuration, + .rate = DataRate::PlusInfinity()}; + last_padding_info_ = PaddingInfo(); + loss_based_result_.state = LossBasedState::kDelayBasedEstimate; + } + loss_based_result_.bandwidth_estimate = bounded_bandwidth_estimate; - if (IsBandwidthLimitedDueToLoss() && + if (IsInLossLimitedState() && (recovering_after_loss_timestamp_.IsInfinite() || recovering_after_loss_timestamp_ + config_->delayed_increase_window < last_send_time_most_recent_observation_)) { bandwidth_limit_in_current_window_ = std::max(kCongestionControllerMinBitrate, - current_estimate_.loss_limited_bandwidth * + current_best_estimate_.loss_limited_bandwidth * config_->max_increase_factor); recovering_after_loss_timestamp_ = last_send_time_most_recent_observation_; } } bool LossBasedBweV2::IsEstimateIncreasingWhenLossLimited( - const ChannelParameters& best_candidate) { - return (current_estimate_.loss_limited_bandwidth < - best_candidate.loss_limited_bandwidth || - (current_estimate_.loss_limited_bandwidth == - best_candidate.loss_limited_bandwidth && - current_state_ == LossBasedState::kIncreasing)) && - IsBandwidthLimitedDueToLoss(); + DataRate old_estimate, + DataRate new_estimate) { + return (old_estimate < new_estimate || + (old_estimate == new_estimate && + (loss_based_result_.state == LossBasedState::kIncreasing || + loss_based_result_.state == + LossBasedState::kIncreaseUsingPadding))) && + IsInLossLimitedState(); } // Returns a `LossBasedBweV2::Config` iff the `key_value_config` specifies a // configuration for the `LossBasedBweV2` which is explicitly enabled. -absl::optional LossBasedBweV2::CreateConfig( +std::optional LossBasedBweV2::CreateConfig( const FieldTrialsView* key_value_config) { FieldTrialParameter enabled("Enabled", true); FieldTrialParameter bandwidth_rampup_upper_bound_factor( "BwRampupUpperBoundFactor", 1000000.0); + FieldTrialParameter bandwidth_rampup_upper_bound_factor_in_hold( + "BwRampupUpperBoundInHoldFactor", 1000000.0); + FieldTrialParameter bandwidth_rampup_hold_threshold( + "BwRampupUpperBoundHoldThreshold", 1.3); FieldTrialParameter rampup_acceleration_max_factor( "BwRampupAccelMaxFactor", 0.0); FieldTrialParameter rampup_acceleration_maxout_time( @@ -381,6 +435,8 @@ absl::optional LossBasedBweV2::CreateConfig( "AckedRateCandidate", true); FieldTrialParameter append_delay_based_estimate_candidate( "DelayBasedCandidate", true); + FieldTrialParameter append_upper_bound_candidate_in_alr( + "UpperBoundCandidateInAlr", false); FieldTrialParameter observation_duration_lower_bound( "ObservationDurationLowerBound", TimeDelta::Millis(250)); FieldTrialParameter observation_window_size("ObservationWindowSize", 20); @@ -396,35 +452,32 @@ absl::optional LossBasedBweV2::CreateConfig( 0.9); FieldTrialParameter bandwidth_backoff_lower_bound_factor( "BwBackoffLowerBoundFactor", 1.0); - FieldTrialParameter trendline_integration_enabled( - "TrendlineIntegrationEnabled", false); - FieldTrialParameter trendline_observations_window_size( - "TrendlineObservationsWindowSize", 20); FieldTrialParameter max_increase_factor("MaxIncreaseFactor", 1.3); FieldTrialParameter delayed_increase_window( "DelayedIncreaseWindow", TimeDelta::Millis(300)); - FieldTrialParameter use_acked_bitrate_only_when_overusing( - "UseAckedBitrateOnlyWhenOverusing", false); FieldTrialParameter not_increase_if_inherent_loss_less_than_average_loss( "NotIncreaseIfInherentLossLessThanAverageLoss", true); - FieldTrialParameter high_loss_rate_threshold("HighLossRateThreshold", - 1.0); - FieldTrialParameter bandwidth_cap_at_high_loss_rate( - "BandwidthCapAtHighLossRate", DataRate::KilobitsPerSec(500.0)); - FieldTrialParameter slope_of_bwe_high_loss_func( - "SlopeOfBweHighLossFunc", 1000); - FieldTrialParameter probe_integration_enabled("ProbeIntegrationEnabled", - false); - FieldTrialParameter probe_expiration("ProbeExpiration", - TimeDelta::Seconds(10)); - FieldTrialParameter bound_by_upper_link_capacity_when_loss_limited( - "BoundByUpperLinkCapacityWhenLossLimited", true); FieldTrialParameter not_use_acked_rate_in_alr("NotUseAckedRateInAlr", - false); + true); + FieldTrialParameter use_in_start_phase("UseInStartPhase", false); + FieldTrialParameter min_num_observations("MinNumObservations", 3); + FieldTrialParameter lower_bound_by_acked_rate_factor( + "LowerBoundByAckedRateFactor", 0.0); + FieldTrialParameter hold_duration_factor("HoldDurationFactor", 0.0); + FieldTrialParameter use_byte_loss_rate("UseByteLossRate", false); + FieldTrialParameter padding_duration("PaddingDuration", + TimeDelta::Zero()); + FieldTrialParameter bound_best_candidate("BoundBestCandidate", false); + FieldTrialParameter pace_at_loss_based_estimate( + "PaceAtLossBasedEstimate", false); + FieldTrialParameter median_sending_rate_factor( + "MedianSendingRateFactor", 2.0); if (key_value_config) { ParseFieldTrial({&enabled, &bandwidth_rampup_upper_bound_factor, + &bandwidth_rampup_upper_bound_factor_in_hold, + &bandwidth_rampup_hold_threshold, &rampup_acceleration_max_factor, &rampup_acceleration_maxout_time, &candidate_factors, @@ -440,6 +493,7 @@ absl::optional LossBasedBweV2::CreateConfig( &newton_step_size, &append_acknowledged_rate_candidate, &append_delay_based_estimate_candidate, + &append_upper_bound_candidate_in_alr, &observation_duration_lower_bound, &observation_window_size, &sending_rate_smoothing_factor, @@ -448,84 +502,85 @@ absl::optional LossBasedBweV2::CreateConfig( &instant_upper_bound_loss_offset, &temporal_weight_factor, &bandwidth_backoff_lower_bound_factor, - &trendline_integration_enabled, - &trendline_observations_window_size, &max_increase_factor, &delayed_increase_window, - &use_acked_bitrate_only_when_overusing, ¬_increase_if_inherent_loss_less_than_average_loss, - &probe_integration_enabled, - &probe_expiration, - &high_loss_rate_threshold, - &bandwidth_cap_at_high_loss_rate, - &slope_of_bwe_high_loss_func, - &bound_by_upper_link_capacity_when_loss_limited, - ¬_use_acked_rate_in_alr}, + ¬_use_acked_rate_in_alr, + &use_in_start_phase, + &min_num_observations, + &lower_bound_by_acked_rate_factor, + &hold_duration_factor, + &use_byte_loss_rate, + &padding_duration, + &bound_best_candidate, + &pace_at_loss_based_estimate, + &median_sending_rate_factor}, key_value_config->Lookup("WebRTC-Bwe-LossBasedBweV2")); } - absl::optional config; if (!enabled.Get()) { - return config; + return std::nullopt; } - config.emplace(); - config->bandwidth_rampup_upper_bound_factor = + Config config; + config.bandwidth_rampup_upper_bound_factor = bandwidth_rampup_upper_bound_factor.Get(); - config->rampup_acceleration_max_factor = rampup_acceleration_max_factor.Get(); - config->rampup_acceleration_maxout_time = + config.bandwidth_rampup_upper_bound_factor_in_hold = + bandwidth_rampup_upper_bound_factor_in_hold.Get(); + config.bandwidth_rampup_hold_threshold = + bandwidth_rampup_hold_threshold.Get(); + config.rampup_acceleration_max_factor = rampup_acceleration_max_factor.Get(); + config.rampup_acceleration_maxout_time = rampup_acceleration_maxout_time.Get(); - config->candidate_factors = candidate_factors.Get(); - config->higher_bandwidth_bias_factor = higher_bandwidth_bias_factor.Get(); - config->higher_log_bandwidth_bias_factor = + config.candidate_factors = candidate_factors.Get(); + config.higher_bandwidth_bias_factor = higher_bandwidth_bias_factor.Get(); + config.higher_log_bandwidth_bias_factor = higher_log_bandwidth_bias_factor.Get(); - config->inherent_loss_lower_bound = inherent_loss_lower_bound.Get(); - config->loss_threshold_of_high_bandwidth_preference = + config.inherent_loss_lower_bound = inherent_loss_lower_bound.Get(); + config.loss_threshold_of_high_bandwidth_preference = loss_threshold_of_high_bandwidth_preference.Get(); - config->bandwidth_preference_smoothing_factor = + config.bandwidth_preference_smoothing_factor = bandwidth_preference_smoothing_factor.Get(); - config->inherent_loss_upper_bound_bandwidth_balance = + config.inherent_loss_upper_bound_bandwidth_balance = inherent_loss_upper_bound_bandwidth_balance.Get(); - config->inherent_loss_upper_bound_offset = + config.inherent_loss_upper_bound_offset = inherent_loss_upper_bound_offset.Get(); - config->initial_inherent_loss_estimate = initial_inherent_loss_estimate.Get(); - config->newton_iterations = newton_iterations.Get(); - config->newton_step_size = newton_step_size.Get(); - config->append_acknowledged_rate_candidate = + config.initial_inherent_loss_estimate = initial_inherent_loss_estimate.Get(); + config.newton_iterations = newton_iterations.Get(); + config.newton_step_size = newton_step_size.Get(); + config.append_acknowledged_rate_candidate = append_acknowledged_rate_candidate.Get(); - config->append_delay_based_estimate_candidate = + config.append_delay_based_estimate_candidate = append_delay_based_estimate_candidate.Get(); - config->observation_duration_lower_bound = + config.append_upper_bound_candidate_in_alr = + append_upper_bound_candidate_in_alr.Get(); + config.observation_duration_lower_bound = observation_duration_lower_bound.Get(); - config->observation_window_size = observation_window_size.Get(); - config->sending_rate_smoothing_factor = sending_rate_smoothing_factor.Get(); - config->instant_upper_bound_temporal_weight_factor = + config.observation_window_size = observation_window_size.Get(); + config.sending_rate_smoothing_factor = sending_rate_smoothing_factor.Get(); + config.instant_upper_bound_temporal_weight_factor = instant_upper_bound_temporal_weight_factor.Get(); - config->instant_upper_bound_bandwidth_balance = + config.instant_upper_bound_bandwidth_balance = instant_upper_bound_bandwidth_balance.Get(); - config->instant_upper_bound_loss_offset = + config.instant_upper_bound_loss_offset = instant_upper_bound_loss_offset.Get(); - config->temporal_weight_factor = temporal_weight_factor.Get(); - config->bandwidth_backoff_lower_bound_factor = + config.temporal_weight_factor = temporal_weight_factor.Get(); + config.bandwidth_backoff_lower_bound_factor = bandwidth_backoff_lower_bound_factor.Get(); - config->trendline_integration_enabled = trendline_integration_enabled.Get(); - config->trendline_observations_window_size = - trendline_observations_window_size.Get(); - config->max_increase_factor = max_increase_factor.Get(); - config->delayed_increase_window = delayed_increase_window.Get(); - config->use_acked_bitrate_only_when_overusing = - use_acked_bitrate_only_when_overusing.Get(); - config->not_increase_if_inherent_loss_less_than_average_loss = + config.max_increase_factor = max_increase_factor.Get(); + config.delayed_increase_window = delayed_increase_window.Get(); + config.not_increase_if_inherent_loss_less_than_average_loss = not_increase_if_inherent_loss_less_than_average_loss.Get(); - config->high_loss_rate_threshold = high_loss_rate_threshold.Get(); - config->bandwidth_cap_at_high_loss_rate = - bandwidth_cap_at_high_loss_rate.Get(); - config->slope_of_bwe_high_loss_func = slope_of_bwe_high_loss_func.Get(); - config->probe_integration_enabled = probe_integration_enabled.Get(); - config->probe_expiration = probe_expiration.Get(); - config->bound_by_upper_link_capacity_when_loss_limited = - bound_by_upper_link_capacity_when_loss_limited.Get(); - config->not_use_acked_rate_in_alr = not_use_acked_rate_in_alr.Get(); - + config.not_use_acked_rate_in_alr = not_use_acked_rate_in_alr.Get(); + config.use_in_start_phase = use_in_start_phase.Get(); + config.min_num_observations = min_num_observations.Get(); + config.lower_bound_by_acked_rate_factor = + lower_bound_by_acked_rate_factor.Get(); + config.hold_duration_factor = hold_duration_factor.Get(); + config.use_byte_loss_rate = use_byte_loss_rate.Get(); + config.padding_duration = padding_duration.Get(); + config.bound_best_candidate = bound_best_candidate.Get(); + config.pace_at_loss_based_estimate = pace_at_loss_based_estimate.Get(); + config.median_sending_rate_factor = median_sending_rate_factor.Get(); return config; } @@ -542,6 +597,18 @@ bool LossBasedBweV2::IsConfigValid() const { << config_->bandwidth_rampup_upper_bound_factor; valid = false; } + if (config_->bandwidth_rampup_upper_bound_factor_in_hold <= 1.0) { + RTC_LOG(LS_WARNING) << "The bandwidth rampup upper bound factor in hold " + "must be greater than 1: " + << config_->bandwidth_rampup_upper_bound_factor_in_hold; + valid = false; + } + if (config_->bandwidth_rampup_hold_threshold < 0.0) { + RTC_LOG(LS_WARNING) << "The bandwidth rampup hold threshold must" + "must be non-negative.: " + << config_->bandwidth_rampup_hold_threshold; + valid = false; + } if (config_->rampup_acceleration_max_factor < 0.0) { RTC_LOG(LS_WARNING) << "The rampup acceleration max factor must be non-negative.: " @@ -689,11 +756,6 @@ bool LossBasedBweV2::IsConfigValid() const { << config_->bandwidth_backoff_lower_bound_factor; valid = false; } - if (config_->trendline_observations_window_size < 1) { - RTC_LOG(LS_WARNING) << "The trendline window size must be at least 1: " - << config_->trendline_observations_window_size; - valid = false; - } if (config_->max_increase_factor <= 0.0) { RTC_LOG(LS_WARNING) << "The maximum increase factor must be positive: " << config_->max_increase_factor; @@ -704,16 +766,28 @@ bool LossBasedBweV2::IsConfigValid() const { << config_->delayed_increase_window.ms(); valid = false; } - if (config_->high_loss_rate_threshold <= 0.0 || - config_->high_loss_rate_threshold > 1.0) { - RTC_LOG(LS_WARNING) << "The high loss rate threshold must be in (0, 1]: " - << config_->high_loss_rate_threshold; + if (config_->min_num_observations <= 0) { + RTC_LOG(LS_WARNING) << "The min number of observations must be positive: " + << config_->min_num_observations; + valid = false; + } + if (config_->lower_bound_by_acked_rate_factor < 0.0) { + RTC_LOG(LS_WARNING) + << "The estimate lower bound by acknowledged rate factor must be " + "non-negative: " + << config_->lower_bound_by_acked_rate_factor; valid = false; } return valid; } -double LossBasedBweV2::GetAverageReportedLossRatio() const { +void LossBasedBweV2::UpdateAverageReportedLossRatio() { + average_reported_loss_ratio_ = + (config_->use_byte_loss_rate ? CalculateAverageReportedByteLossRatio() + : CalculateAverageReportedPacketLossRatio()); +} + +double LossBasedBweV2::CalculateAverageReportedPacketLossRatio() const { if (num_observations_ <= 0) { return 0.0; } @@ -735,21 +809,64 @@ double LossBasedBweV2::GetAverageReportedLossRatio() const { return num_lost_packets / num_packets; } -DataRate LossBasedBweV2::GetCandidateBandwidthUpperBound() const { - DataRate candidate_bandwidth_upper_bound = max_bitrate_; - if (IsBandwidthLimitedDueToLoss() && - IsValid(bandwidth_limit_in_current_window_)) { - candidate_bandwidth_upper_bound = bandwidth_limit_in_current_window_; +double LossBasedBweV2::CalculateAverageReportedByteLossRatio() const { + if (num_observations_ <= 0) { + return 0.0; } - if (config_->trendline_integration_enabled) { - candidate_bandwidth_upper_bound = - std::min(GetInstantUpperBound(), candidate_bandwidth_upper_bound); - if (IsValid(delay_based_estimate_)) { - candidate_bandwidth_upper_bound = - std::min(delay_based_estimate_, candidate_bandwidth_upper_bound); + DataSize total_bytes = DataSize::Zero(); + DataSize lost_bytes = DataSize::Zero(); + double min_loss_rate = 1.0; + double max_loss_rate = 0.0; + DataSize min_lost_bytes = DataSize::Zero(); + DataSize max_lost_bytes = DataSize::Zero(); + DataSize min_bytes_received = DataSize::Zero(); + DataSize max_bytes_received = DataSize::Zero(); + DataRate send_rate_of_max_loss_observation = DataRate::Zero(); + for (const Observation& observation : observations_) { + if (!observation.IsInitialized()) { + continue; + } + + double instant_temporal_weight = + instant_upper_bound_temporal_weights_[(num_observations_ - 1) - + observation.id]; + total_bytes += instant_temporal_weight * observation.size; + lost_bytes += instant_temporal_weight * observation.lost_size; + + double loss_rate = !observation.size.IsZero() + ? observation.lost_size / observation.size + : 0.0; + if (num_observations_ > 3) { + if (loss_rate > max_loss_rate) { + max_loss_rate = loss_rate; + max_lost_bytes = instant_temporal_weight * observation.lost_size; + max_bytes_received = instant_temporal_weight * observation.size; + send_rate_of_max_loss_observation = observation.sending_rate; + } + if (loss_rate < min_loss_rate) { + min_loss_rate = loss_rate; + min_lost_bytes = instant_temporal_weight * observation.lost_size; + min_bytes_received = instant_temporal_weight * observation.size; + } } } + if (GetMedianSendingRate() * config_->median_sending_rate_factor <= + send_rate_of_max_loss_observation) { + // If the median sending rate is less than half of the sending rate of the + // observation with max loss rate, i.e. we suddenly send a lot of data, then + // the loss rate might not be due to a spike. + return lost_bytes / total_bytes; + } + return (lost_bytes - min_lost_bytes - max_lost_bytes) / + (total_bytes - max_bytes_received - min_bytes_received); +} + +DataRate LossBasedBweV2::GetCandidateBandwidthUpperBound() const { + DataRate candidate_bandwidth_upper_bound = max_bitrate_; + if (IsInLossLimitedState() && IsValid(bandwidth_limit_in_current_window_)) { + candidate_bandwidth_upper_bound = bandwidth_limit_in_current_window_; + } if (!acknowledged_bitrate_.has_value()) return candidate_bandwidth_upper_bound; @@ -771,20 +888,19 @@ DataRate LossBasedBweV2::GetCandidateBandwidthUpperBound() const { std::vector LossBasedBweV2::GetCandidates( bool in_alr) const { + ChannelParameters best_estimate = current_best_estimate_; std::vector bandwidths; - bool can_increase_bitrate = TrendlineEsimateAllowBitrateIncrease(); for (double candidate_factor : config_->candidate_factors) { - if (!can_increase_bitrate && candidate_factor > 1.0) { - continue; - } bandwidths.push_back(candidate_factor * - current_estimate_.loss_limited_bandwidth); + best_estimate.loss_limited_bandwidth); } if (acknowledged_bitrate_.has_value() && - config_->append_acknowledged_rate_candidate && - TrendlineEsimateAllowEmergencyBackoff()) { - if (!(config_->not_use_acked_rate_in_alr && in_alr)) { + config_->append_acknowledged_rate_candidate) { + if (!(config_->not_use_acked_rate_in_alr && in_alr) || + (config_->padding_duration > TimeDelta::Zero() && + last_padding_info_.padding_timestamp + config_->padding_duration >= + last_send_time_most_recent_observation_)) { bandwidths.push_back(*acknowledged_bitrate_ * config_->bandwidth_backoff_lower_bound_factor); } @@ -792,27 +908,26 @@ std::vector LossBasedBweV2::GetCandidates( if (IsValid(delay_based_estimate_) && config_->append_delay_based_estimate_candidate) { - if (can_increase_bitrate && - delay_based_estimate_ > current_estimate_.loss_limited_bandwidth) { + if (delay_based_estimate_ > best_estimate.loss_limited_bandwidth) { bandwidths.push_back(delay_based_estimate_); } } + if (in_alr && config_->append_upper_bound_candidate_in_alr && + best_estimate.loss_limited_bandwidth > GetInstantUpperBound()) { + bandwidths.push_back(GetInstantUpperBound()); + } + const DataRate candidate_bandwidth_upper_bound = GetCandidateBandwidthUpperBound(); std::vector candidates; candidates.resize(bandwidths.size()); for (size_t i = 0; i < bandwidths.size(); ++i) { - ChannelParameters candidate = current_estimate_; - if (config_->trendline_integration_enabled) { - candidate.loss_limited_bandwidth = - std::min(bandwidths[i], candidate_bandwidth_upper_bound); - } else { - candidate.loss_limited_bandwidth = std::min( - bandwidths[i], std::max(current_estimate_.loss_limited_bandwidth, - candidate_bandwidth_upper_bound)); - } + ChannelParameters candidate = best_estimate; + candidate.loss_limited_bandwidth = + std::min(bandwidths[i], std::max(best_estimate.loss_limited_bandwidth, + candidate_bandwidth_upper_bound)); candidate.inherent_loss = GetFeasibleInherentLoss(candidate); candidates[i] = candidate; } @@ -834,16 +949,29 @@ LossBasedBweV2::Derivatives LossBasedBweV2::GetDerivatives( double temporal_weight = temporal_weights_[(num_observations_ - 1) - observation.id]; - - derivatives.first += - temporal_weight * - ((observation.num_lost_packets / loss_probability) - - (observation.num_received_packets / (1.0 - loss_probability))); - derivatives.second -= - temporal_weight * - ((observation.num_lost_packets / std::pow(loss_probability, 2)) + - (observation.num_received_packets / - std::pow(1.0 - loss_probability, 2))); + if (config_->use_byte_loss_rate) { + derivatives.first += + temporal_weight * + ((ToKiloBytes(observation.lost_size) / loss_probability) - + (ToKiloBytes(observation.size - observation.lost_size) / + (1.0 - loss_probability))); + derivatives.second -= + temporal_weight * + ((ToKiloBytes(observation.lost_size) / + std::pow(loss_probability, 2)) + + (ToKiloBytes(observation.size - observation.lost_size) / + std::pow(1.0 - loss_probability, 2))); + } else { + derivatives.first += + temporal_weight * + ((observation.num_lost_packets / loss_probability) - + (observation.num_received_packets / (1.0 - loss_probability))); + derivatives.second -= + temporal_weight * + ((observation.num_lost_packets / std::pow(loss_probability, 2)) + + (observation.num_received_packets / + std::pow(1.0 - loss_probability, 2))); + } } if (derivatives.second >= 0.0) { @@ -886,11 +1014,10 @@ double LossBasedBweV2::AdjustBiasFactor(double loss_rate, double LossBasedBweV2::GetHighBandwidthBias(DataRate bandwidth) const { if (IsValid(bandwidth)) { - const double average_reported_loss_ratio = GetAverageReportedLossRatio(); - return AdjustBiasFactor(average_reported_loss_ratio, + return AdjustBiasFactor(average_reported_loss_ratio_, config_->higher_bandwidth_bias_factor) * bandwidth.kbps() + - AdjustBiasFactor(average_reported_loss_ratio, + AdjustBiasFactor(average_reported_loss_ratio_, config_->higher_log_bandwidth_bias_factor) * std::log(1.0 + bandwidth.kbps()); } @@ -915,13 +1042,23 @@ double LossBasedBweV2::GetObjective( double temporal_weight = temporal_weights_[(num_observations_ - 1) - observation.id]; - - objective += - temporal_weight * - ((observation.num_lost_packets * std::log(loss_probability)) + - (observation.num_received_packets * std::log(1.0 - loss_probability))); - objective += - temporal_weight * high_bandwidth_bias * observation.num_packets; + if (config_->use_byte_loss_rate) { + objective += + temporal_weight * + ((ToKiloBytes(observation.lost_size) * std::log(loss_probability)) + + (ToKiloBytes(observation.size - observation.lost_size) * + std::log(1.0 - loss_probability))); + objective += + temporal_weight * high_bandwidth_bias * ToKiloBytes(observation.size); + } else { + objective += + temporal_weight * + ((observation.num_lost_packets * std::log(loss_probability)) + + (observation.num_received_packets * + std::log(1.0 - loss_probability))); + objective += + temporal_weight * high_bandwidth_bias * observation.num_packets; + } } return objective; @@ -952,30 +1089,33 @@ DataRate LossBasedBweV2::GetInstantUpperBound() const { void LossBasedBweV2::CalculateInstantUpperBound() { DataRate instant_limit = max_bitrate_; - const double average_reported_loss_ratio = GetAverageReportedLossRatio(); - if (average_reported_loss_ratio > config_->instant_upper_bound_loss_offset) { + if (average_reported_loss_ratio_ > config_->instant_upper_bound_loss_offset) { instant_limit = config_->instant_upper_bound_bandwidth_balance / - (average_reported_loss_ratio - + (average_reported_loss_ratio_ - config_->instant_upper_bound_loss_offset); - if (average_reported_loss_ratio > config_->high_loss_rate_threshold) { - instant_limit = std::min( - instant_limit, DataRate::KilobitsPerSec(std::max( - static_cast(min_bitrate_.kbps()), - config_->bandwidth_cap_at_high_loss_rate.kbps() - - config_->slope_of_bwe_high_loss_func * - average_reported_loss_ratio))); - } } - if (IsBandwidthLimitedDueToLoss()) { - if (IsValid(upper_link_capacity_) && - config_->bound_by_upper_link_capacity_when_loss_limited) { - instant_limit = std::min(instant_limit, upper_link_capacity_); - } - } cached_instant_upper_bound_ = instant_limit; } +DataRate LossBasedBweV2::GetInstantLowerBound() const { + return cached_instant_lower_bound_.value_or(DataRate::Zero()); +} + +void LossBasedBweV2::CalculateInstantLowerBound() { + DataRate instance_lower_bound = DataRate::Zero(); + if (IsValid(acknowledged_bitrate_) && + config_->lower_bound_by_acked_rate_factor > 0.0) { + instance_lower_bound = config_->lower_bound_by_acked_rate_factor * + acknowledged_bitrate_.value(); + } + + if (IsValid(min_bitrate_)) { + instance_lower_bound = std::max(instance_lower_bound, min_bitrate_); + } + cached_instant_lower_bound_ = instance_lower_bound; +} + void LossBasedBweV2::CalculateTemporalWeights() { for (int i = 0; i < config_->observation_window_size; ++i) { temporal_weights_[i] = std::pow(config_->temporal_weight_factor, i); @@ -999,73 +1139,40 @@ void LossBasedBweV2::NewtonsMethodUpdate( } } -bool LossBasedBweV2::TrendlineEsimateAllowBitrateIncrease() const { - if (!config_->trendline_integration_enabled) { - return true; - } - - for (const auto& detector_state : delay_detector_states_) { - if (detector_state == BandwidthUsage::kBwOverusing || - detector_state == BandwidthUsage::kBwUnderusing) { - return false; - } - } - return true; -} - -bool LossBasedBweV2::TrendlineEsimateAllowEmergencyBackoff() const { - if (!config_->trendline_integration_enabled) { - return true; - } - - if (!config_->use_acked_bitrate_only_when_overusing) { - return true; - } - - for (const auto& detector_state : delay_detector_states_) { - if (detector_state == BandwidthUsage::kBwOverusing) { - return true; - } - } - - return false; -} - bool LossBasedBweV2::PushBackObservation( - rtc::ArrayView packet_results, - BandwidthUsage delay_detector_state) { - delay_detector_states_.push_front(delay_detector_state); - if (static_cast(delay_detector_states_.size()) > - config_->trendline_observations_window_size) { - delay_detector_states_.pop_back(); - } - + ArrayView packet_results) { if (packet_results.empty()) { return false; } - PacketResultsSummary packet_results_summary = - GetPacketResultsSummary(packet_results); - - partial_observation_.num_packets += packet_results_summary.num_packets; - partial_observation_.num_lost_packets += - packet_results_summary.num_lost_packets; - partial_observation_.size += packet_results_summary.total_size; + partial_observation_.num_packets += packet_results.size(); + Timestamp last_send_time = Timestamp::MinusInfinity(); + Timestamp first_send_time = Timestamp::PlusInfinity(); + for (const PacketResult& packet : packet_results) { + if (packet.IsReceived()) { + partial_observation_.lost_packets.erase( + packet.sent_packet.sequence_number); + } else { + partial_observation_.lost_packets.emplace( + packet.sent_packet.sequence_number, packet.sent_packet.size); + } + partial_observation_.size += packet.sent_packet.size; + last_send_time = std::max(last_send_time, packet.sent_packet.send_time); + first_send_time = std::min(first_send_time, packet.sent_packet.send_time); + } // This is the first packet report we have received. if (!IsValid(last_send_time_most_recent_observation_)) { - last_send_time_most_recent_observation_ = - packet_results_summary.first_send_time; + last_send_time_most_recent_observation_ = first_send_time; } - const Timestamp last_send_time = packet_results_summary.last_send_time; const TimeDelta observation_duration = last_send_time - last_send_time_most_recent_observation_; // Too small to be meaningful. + // To consider: what if it is too long?, i.e. we did not receive any packets + // for a long time, then all the packets we received are too old. if (observation_duration <= TimeDelta::Zero() || - (observation_duration < config_->observation_duration_lower_bound && - (delay_detector_state != BandwidthUsage::kBwOverusing || - !config_->trendline_integration_enabled))) { + observation_duration < config_->observation_duration_lower_bound) { return false; } @@ -1073,27 +1180,66 @@ bool LossBasedBweV2::PushBackObservation( Observation observation; observation.num_packets = partial_observation_.num_packets; - observation.num_lost_packets = partial_observation_.num_lost_packets; + observation.num_lost_packets = partial_observation_.lost_packets.size(); observation.num_received_packets = observation.num_packets - observation.num_lost_packets; observation.sending_rate = GetSendingRate(partial_observation_.size / observation_duration); + for (auto const& [key, packet_size] : partial_observation_.lost_packets) { + observation.lost_size += packet_size; + } + observation.size = partial_observation_.size; observation.id = num_observations_++; observations_[observation.id % config_->observation_window_size] = observation; partial_observation_ = PartialObservation(); - + UpdateAverageReportedLossRatio(); CalculateInstantUpperBound(); return true; } -bool LossBasedBweV2::IsBandwidthLimitedDueToLoss() const { - return current_state_ != LossBasedState::kDelayBasedEstimate; +bool LossBasedBweV2::IsInLossLimitedState() const { + return loss_based_result_.state != LossBasedState::kDelayBasedEstimate; +} + +bool LossBasedBweV2::CanKeepIncreasingState(DataRate estimate) const { + if (config_->padding_duration == TimeDelta::Zero() || + loss_based_result_.state != LossBasedState::kIncreaseUsingPadding) + return true; + + // Keep using the kIncreaseUsingPadding if either the state has been + // kIncreaseUsingPadding for less than kPaddingDuration or the estimate + // increases. + return last_padding_info_.padding_timestamp + config_->padding_duration >= + last_send_time_most_recent_observation_ || + last_padding_info_.padding_rate < estimate; +} + +bool LossBasedBweV2::PaceAtLossBasedEstimate() const { + return config_->pace_at_loss_based_estimate && + loss_based_result_.state != LossBasedState::kDelayBasedEstimate; } -bool LossBasedBweV2::IsRequestingProbe() const { - return current_state_ == LossBasedState::kIncreasing; +DataRate LossBasedBweV2::GetMedianSendingRate() const { + std::vector sending_rates; + for (const Observation& observation : observations_) { + if (!observation.IsInitialized() || !IsValid(observation.sending_rate) || + observation.sending_rate.IsZero()) { + continue; + } + sending_rates.push_back(observation.sending_rate); + } + if (sending_rates.empty()) { + return DataRate::Zero(); + } + absl::c_sort(sending_rates); + if (sending_rates.size() % 2 == 0) { + return (sending_rates[sending_rates.size() / 2 - 1] + + sending_rates[sending_rates.size() / 2]) / + 2; + } + return sending_rates[sending_rates.size() / 2]; } } // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h b/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h index f5a6396de2..5102f463f9 100644 --- a/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h +++ b/modules/congestion_controller/goog_cc/loss_based_bwe_v2.h @@ -11,14 +11,13 @@ #ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_LOSS_BASED_BWE_V2_H_ #define MODULES_CONGESTION_CONTROLLER_GOOG_CC_LOSS_BASED_BWE_V2_H_ -#include -#include +#include +#include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/field_trials_view.h" -#include "api/network_state_predictor.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" @@ -31,8 +30,12 @@ namespace webrtc { // when network is loss limited, or equal to the delay based estimate. enum class LossBasedState { kIncreasing = 0, - kDecreasing = 1, - kDelayBasedEstimate = 2 + // TODO(bugs.webrtc.org/12707): Remove one of the increasing states once we + // have decided if padding is usefull for ramping up when BWE is loss + // limited. + kIncreaseUsingPadding = 1, + kDecreasing = 2, + kDelayBasedEstimate = 3 }; class LossBasedBweV2 { @@ -58,19 +61,24 @@ class LossBasedBweV2 { // initialized with a BWE and then has received enough `PacketResult`s. bool IsReady() const; + // Returns true if loss based BWE is ready to be used in the start phase. + bool ReadyToUseInStartPhase() const; + + // Returns true if loss based BWE can be used in the start phase. + bool UseInStartPhase() const; + // Returns `DataRate::PlusInfinity` if no BWE can be calculated. Result GetLossBasedResult() const; void SetAcknowledgedBitrate(DataRate acknowledged_bitrate); - void SetBandwidthEstimate(DataRate bandwidth_estimate); void SetMinMaxBitrate(DataRate min_bitrate, DataRate max_bitrate); - void UpdateBandwidthEstimate( - rtc::ArrayView packet_results, - DataRate delay_based_estimate, - BandwidthUsage delay_detector_state, - absl::optional probe_bitrate, - DataRate upper_link_capacity, - bool in_alr); + void UpdateBandwidthEstimate(ArrayView packet_results, + DataRate delay_based_estimate, + bool in_alr); + bool PaceAtLossBasedEstimate() const; + + // For unit testing only. + void SetBandwidthEstimate(DataRate bandwidth_estimate); private: struct ChannelParameters { @@ -80,6 +88,8 @@ class LossBasedBweV2 { struct Config { double bandwidth_rampup_upper_bound_factor = 0.0; + double bandwidth_rampup_upper_bound_factor_in_hold = 0; + double bandwidth_rampup_hold_threshold = 0; double rampup_acceleration_max_factor = 0.0; TimeDelta rampup_acceleration_maxout_time = TimeDelta::Zero(); std::vector candidate_factors; @@ -96,6 +106,7 @@ class LossBasedBweV2 { double newton_step_size = 0.0; bool append_acknowledged_rate_candidate = true; bool append_delay_based_estimate_candidate = false; + bool append_upper_bound_candidate_in_alr = false; TimeDelta observation_duration_lower_bound = TimeDelta::Zero(); int observation_window_size = 0; double sending_rate_smoothing_factor = 0.0; @@ -104,19 +115,19 @@ class LossBasedBweV2 { double instant_upper_bound_loss_offset = 0.0; double temporal_weight_factor = 0.0; double bandwidth_backoff_lower_bound_factor = 0.0; - bool trendline_integration_enabled = false; - int trendline_observations_window_size = 0; double max_increase_factor = 0.0; TimeDelta delayed_increase_window = TimeDelta::Zero(); - bool use_acked_bitrate_only_when_overusing = false; bool not_increase_if_inherent_loss_less_than_average_loss = false; - double high_loss_rate_threshold = 1.0; - DataRate bandwidth_cap_at_high_loss_rate = DataRate::MinusInfinity(); - double slope_of_bwe_high_loss_func = 1000.0; - bool probe_integration_enabled = false; - TimeDelta probe_expiration = TimeDelta::Zero(); - bool bound_by_upper_link_capacity_when_loss_limited = false; bool not_use_acked_rate_in_alr = false; + bool use_in_start_phase = false; + int min_num_observations = 0; + double lower_bound_by_acked_rate_factor = 0.0; + double hold_duration_factor = 0.0; + bool use_byte_loss_rate = false; + TimeDelta padding_duration = TimeDelta::Zero(); + bool bound_best_candidate = false; + bool pace_at_loss_based_estimate = false; + double median_sending_rate_factor = 0.0; }; struct Derivatives { @@ -131,21 +142,39 @@ class LossBasedBweV2 { int num_lost_packets = 0; int num_received_packets = 0; DataRate sending_rate = DataRate::MinusInfinity(); + DataSize size = DataSize::Zero(); + DataSize lost_size = DataSize::Zero(); int id = -1; }; struct PartialObservation { int num_packets = 0; - int num_lost_packets = 0; + std::unordered_map lost_packets; DataSize size = DataSize::Zero(); }; - static absl::optional CreateConfig( + struct PaddingInfo { + DataRate padding_rate = DataRate::MinusInfinity(); + Timestamp padding_timestamp = Timestamp::MinusInfinity(); + }; + + struct HoldInfo { + Timestamp timestamp = Timestamp::MinusInfinity(); + TimeDelta duration = TimeDelta::Zero(); + DataRate rate = DataRate::PlusInfinity(); + }; + + static std::optional CreateConfig( const FieldTrialsView* key_value_config); bool IsConfigValid() const; // Returns `0.0` if not enough loss statistics have been received. - double GetAverageReportedLossRatio() const; + void UpdateAverageReportedLossRatio(); + double CalculateAverageReportedPacketLossRatio() const; + // Calculates the average loss ratio over the last `observation_window_size` + // observations but skips the observation with min and max loss ratio in order + // to filter out loss spikes. + double CalculateAverageReportedByteLossRatio() const; std::vector GetCandidates(bool in_alr) const; DataRate GetCandidateBandwidthUpperBound() const; Derivatives GetDerivatives(const ChannelParameters& channel_parameters) const; @@ -158,51 +187,41 @@ class LossBasedBweV2 { DataRate GetSendingRate(DataRate instantaneous_sending_rate) const; DataRate GetInstantUpperBound() const; void CalculateInstantUpperBound(); + DataRate GetInstantLowerBound() const; + void CalculateInstantLowerBound(); void CalculateTemporalWeights(); void NewtonsMethodUpdate(ChannelParameters& channel_parameters) const; - // Returns false if there exists a kBwOverusing or kBwUnderusing in the - // window. - bool TrendlineEsimateAllowBitrateIncrease() const; - - // Returns true if there exists an overusing state in the window. - bool TrendlineEsimateAllowEmergencyBackoff() const; - // Returns false if no observation was created. - bool PushBackObservation(rtc::ArrayView packet_results, - BandwidthUsage delay_detector_state); - void UpdateTrendlineEstimator( - const std::vector& packet_feedbacks, - Timestamp at_time); - void UpdateDelayDetector(BandwidthUsage delay_detector_state); - bool IsEstimateIncreasingWhenLossLimited( - const ChannelParameters& best_candidate); - bool IsBandwidthLimitedDueToLoss() const; - void SetProbeBitrate(absl::optional probe_bitrate); - bool IsRequestingProbe() const; - - absl::optional acknowledged_bitrate_; - absl::optional config_; - ChannelParameters current_estimate_; + bool PushBackObservation(ArrayView packet_results); + bool IsEstimateIncreasingWhenLossLimited(DataRate old_estimate, + DataRate new_estimate); + bool IsInLossLimitedState() const; + bool CanKeepIncreasingState(DataRate estimate) const; + DataRate GetMedianSendingRate() const; + + std::optional acknowledged_bitrate_; + std::optional config_; + ChannelParameters current_best_estimate_; int num_observations_ = 0; std::vector observations_; PartialObservation partial_observation_; Timestamp last_send_time_most_recent_observation_ = Timestamp::PlusInfinity(); Timestamp last_time_estimate_reduced_ = Timestamp::MinusInfinity(); - absl::optional cached_instant_upper_bound_; + std::optional cached_instant_upper_bound_; + std::optional cached_instant_lower_bound_; std::vector instant_upper_bound_temporal_weights_; std::vector temporal_weights_; - std::deque delay_detector_states_; Timestamp recovering_after_loss_timestamp_ = Timestamp::MinusInfinity(); DataRate bandwidth_limit_in_current_window_ = DataRate::PlusInfinity(); DataRate min_bitrate_ = DataRate::KilobitsPerSec(1); DataRate max_bitrate_ = DataRate::PlusInfinity(); - LossBasedState current_state_ = LossBasedState::kDelayBasedEstimate; - DataRate probe_bitrate_ = DataRate::PlusInfinity(); DataRate delay_based_estimate_ = DataRate::PlusInfinity(); - DataRate upper_link_capacity_ = DataRate::PlusInfinity(); - Timestamp last_probe_timestamp_ = Timestamp::MinusInfinity(); + LossBasedBweV2::Result loss_based_result_ = LossBasedBweV2::Result(); + HoldInfo last_hold_info_ = HoldInfo(); + PaddingInfo last_padding_info_ = PaddingInfo(); + double average_reported_loss_ratio_ = 0.0; }; } // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc b/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc index d745f37a5c..f900473db8 100644 --- a/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc +++ b/modules/congestion_controller/goog_cc/loss_based_bwe_v2_test.cc @@ -10,10 +10,10 @@ #include "modules/congestion_controller/goog_cc/loss_based_bwe_v2.h" +#include #include #include -#include "api/network_state_predictor.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" @@ -29,17 +29,16 @@ namespace { using ::webrtc::test::ExplicitKeyValueConfig; -constexpr TimeDelta kObservationDurationLowerBound = TimeDelta::Millis(200); +constexpr TimeDelta kObservationDurationLowerBound = TimeDelta::Millis(250); constexpr TimeDelta kDelayedIncreaseWindow = TimeDelta::Millis(300); constexpr double kMaxIncreaseFactor = 1.5; +constexpr int kPacketSize = 15'000; class LossBasedBweV2Test : public ::testing::TestWithParam { protected: - std::string Config(bool enabled, - bool valid, - bool trendline_integration_enabled) { + std::string Config(bool enabled, bool valid) { char buffer[1024]; - rtc::SimpleStringBuilder config_string(buffer); + SimpleStringBuilder config_string(buffer); config_string << "WebRTC-Bwe-LossBasedBweV2/"; @@ -54,16 +53,8 @@ class LossBasedBweV2Test : public ::testing::TestWithParam { } else { config_string << ",BwRampupUpperBoundFactor:0.0"; } - - if (trendline_integration_enabled) { - config_string << ",TrendlineIntegrationEnabled:true"; - } else { - config_string << ",TrendlineIntegrationEnabled:false"; - } - config_string << ",CandidateFactors:1.1|1.0|0.95,HigherBwBiasFactor:0.01," - "DelayBasedCandidate:true," "InherentLossLowerBound:0.001,InherentLossUpperBoundBwBalance:" "14kbps," "InherentLossUpperBoundOffset:0.9,InitialInherentLossEstimate:0.01," @@ -71,7 +62,8 @@ class LossBasedBweV2Test : public ::testing::TestWithParam { "SendingRateSmoothingFactor:0.01," "InstantUpperBoundTemporalWeightFactor:0.97," "InstantUpperBoundBwBalance:90kbps," - "InstantUpperBoundLossOffset:0.1,TemporalWeightFactor:0.98"; + "InstantUpperBoundLossOffset:0.1,TemporalWeightFactor:0.98," + "MinNumObservations:1"; config_string.AppendFormat( ",ObservationDurationLowerBound:%dms", @@ -85,11 +77,27 @@ class LossBasedBweV2Test : public ::testing::TestWithParam { return config_string.str(); } + std::string ShortObservationConfig(std::string custom_config) { + char buffer[1024]; + SimpleStringBuilder config_string(buffer); + + config_string << "WebRTC-Bwe-LossBasedBweV2/" + "MinNumObservations:1,ObservationWindowSize:2,"; + config_string << custom_config; + config_string << "/"; + + return config_string.str(); + } + std::vector CreatePacketResultsWithReceivedPackets( Timestamp first_packet_timestamp) { std::vector enough_feedback(2); - enough_feedback[0].sent_packet.size = DataSize::Bytes(15'000); - enough_feedback[1].sent_packet.size = DataSize::Bytes(15'000); + enough_feedback[0].sent_packet.sequence_number = + transport_sequence_number_++; + enough_feedback[1].sent_packet.sequence_number = + transport_sequence_number_++; + enough_feedback[0].sent_packet.size = DataSize::Bytes(kPacketSize); + enough_feedback[1].sent_packet.size = DataSize::Bytes(kPacketSize); enough_feedback[0].sent_packet.send_time = first_packet_timestamp; enough_feedback[1].sent_packet.send_time = first_packet_timestamp + kObservationDurationLowerBound; @@ -100,12 +108,14 @@ class LossBasedBweV2Test : public ::testing::TestWithParam { return enough_feedback; } - std::vector CreatePacketResultsWith10pLossRate( - Timestamp first_packet_timestamp) { + std::vector CreatePacketResultsWith10pPacketLossRate( + Timestamp first_packet_timestamp, + DataSize lost_packet_size = DataSize::Bytes(kPacketSize)) { std::vector enough_feedback(10); - enough_feedback[0].sent_packet.size = DataSize::Bytes(15'000); for (unsigned i = 0; i < enough_feedback.size(); ++i) { - enough_feedback[i].sent_packet.size = DataSize::Bytes(15'000); + enough_feedback[i].sent_packet.sequence_number = + transport_sequence_number_++; + enough_feedback[i].sent_packet.size = DataSize::Bytes(kPacketSize); enough_feedback[i].sent_packet.send_time = first_packet_timestamp + static_cast(i) * kObservationDurationLowerBound; @@ -114,14 +124,19 @@ class LossBasedBweV2Test : public ::testing::TestWithParam { static_cast(i + 1) * kObservationDurationLowerBound; } enough_feedback[9].receive_time = Timestamp::PlusInfinity(); + enough_feedback[9].sent_packet.size = lost_packet_size; return enough_feedback; } - std::vector CreatePacketResultsWith50pLossRate( + std::vector CreatePacketResultsWith50pPacketLossRate( Timestamp first_packet_timestamp) { std::vector enough_feedback(2); - enough_feedback[0].sent_packet.size = DataSize::Bytes(15'000); - enough_feedback[1].sent_packet.size = DataSize::Bytes(15'000); + enough_feedback[0].sent_packet.sequence_number = + transport_sequence_number_++; + enough_feedback[1].sent_packet.sequence_number = + transport_sequence_number_++; + enough_feedback[0].sent_packet.size = DataSize::Bytes(kPacketSize); + enough_feedback[1].sent_packet.size = DataSize::Bytes(kPacketSize); enough_feedback[0].sent_packet.send_time = first_packet_timestamp; enough_feedback[1].sent_packet.send_time = first_packet_timestamp + kObservationDurationLowerBound; @@ -132,148 +147,149 @@ class LossBasedBweV2Test : public ::testing::TestWithParam { } std::vector CreatePacketResultsWith100pLossRate( - Timestamp first_packet_timestamp) { - std::vector enough_feedback(2); - enough_feedback[0].sent_packet.size = DataSize::Bytes(15'000); - enough_feedback[1].sent_packet.size = DataSize::Bytes(15'000); - enough_feedback[0].sent_packet.send_time = first_packet_timestamp; - enough_feedback[1].sent_packet.send_time = + Timestamp first_packet_timestamp, + unsigned num_packets = 2) { + std::vector enough_feedback(num_packets); + for (unsigned i = 0; i < num_packets - 1; ++i) { + enough_feedback[i].sent_packet.sequence_number = + transport_sequence_number_++; + enough_feedback[i].sent_packet.size = DataSize::Bytes(kPacketSize); + enough_feedback[i].sent_packet.send_time = + first_packet_timestamp + TimeDelta::Millis(i * 10); + enough_feedback[i].receive_time = Timestamp::PlusInfinity(); + } + enough_feedback[num_packets - 1].sent_packet.sequence_number = + transport_sequence_number_++; + enough_feedback[num_packets - 1].sent_packet.size = + DataSize::Bytes(kPacketSize); + enough_feedback[num_packets - 1].sent_packet.send_time = first_packet_timestamp + kObservationDurationLowerBound; - enough_feedback[0].receive_time = Timestamp::PlusInfinity(); - enough_feedback[1].receive_time = Timestamp::PlusInfinity(); + enough_feedback[num_packets - 1].receive_time = Timestamp::PlusInfinity(); return enough_feedback; } + + private: + int64_t transport_sequence_number_ = 0; }; -TEST_P(LossBasedBweV2Test, EnabledWhenGivenValidConfigurationValues) { +TEST_F(LossBasedBweV2Test, EnabledWhenGivenValidConfigurationValues) { ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); EXPECT_TRUE(loss_based_bandwidth_estimator.IsEnabled()); } -TEST_P(LossBasedBweV2Test, DisabledWhenGivenDisabledConfiguration) { +TEST_F(LossBasedBweV2Test, DisabledWhenGivenDisabledConfiguration) { ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/false, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/false, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); EXPECT_FALSE(loss_based_bandwidth_estimator.IsEnabled()); } -TEST_P(LossBasedBweV2Test, DisabledWhenGivenNonValidConfigurationValues) { +TEST_F(LossBasedBweV2Test, DisabledWhenGivenNonValidConfigurationValues) { ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/false, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/false)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); EXPECT_FALSE(loss_based_bandwidth_estimator.IsEnabled()); } -TEST_P(LossBasedBweV2Test, DisabledWhenGivenNonPositiveCandidateFactor) { +TEST_F(LossBasedBweV2Test, DisabledWhenGivenNonPositiveCandidateFactor) { ExplicitKeyValueConfig key_value_config_negative_candidate_factor( - "WebRTC-Bwe-LossBasedBweV2/Enabled:true,CandidateFactors:-1.3|1.1/"); + "WebRTC-Bwe-LossBasedBweV2/CandidateFactors:-1.3|1.1/"); LossBasedBweV2 loss_based_bandwidth_estimator_1( &key_value_config_negative_candidate_factor); EXPECT_FALSE(loss_based_bandwidth_estimator_1.IsEnabled()); ExplicitKeyValueConfig key_value_config_zero_candidate_factor( - "WebRTC-Bwe-LossBasedBweV2/Enabled:true,CandidateFactors:0.0|1.1/"); + "WebRTC-Bwe-LossBasedBweV2/CandidateFactors:0.0|1.1/"); LossBasedBweV2 loss_based_bandwidth_estimator_2( &key_value_config_zero_candidate_factor); EXPECT_FALSE(loss_based_bandwidth_estimator_2.IsEnabled()); } -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, DisabledWhenGivenConfigurationThatDoesNotAllowGeneratingCandidates) { ExplicitKeyValueConfig key_value_config( "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.0,AckedRateCandidate:false," + "CandidateFactors:1.0,AckedRateCandidate:false," "DelayBasedCandidate:false/"); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); EXPECT_FALSE(loss_based_bandwidth_estimator.IsEnabled()); } -TEST_P(LossBasedBweV2Test, ReturnsDelayBasedEstimateWhenDisabled) { +TEST_F(LossBasedBweV2Test, ReturnsDelayBasedEstimateWhenDisabled) { ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/false, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/false, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( /*packet_results=*/{}, /*delay_based_estimate=*/DataRate::KilobitsPerSec(100), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(100)); } -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, ReturnsDelayBasedEstimateWhenWhenGivenNonValidConfigurationValues) { ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/false, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/false)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( /*packet_results=*/{}, /*delay_based_estimate=*/DataRate::KilobitsPerSec(100), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(100)); } -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, BandwidthEstimateGivenInitializationAndThenFeedback) { std::vector enough_feedback = CreatePacketResultsWithReceivedPackets( /*first_packet_timestamp=*/Timestamp::Zero()); ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); EXPECT_TRUE(loss_based_bandwidth_estimator.IsReady()); EXPECT_TRUE(loss_based_bandwidth_estimator.GetLossBasedResult() .bandwidth_estimate.IsFinite()); } -TEST_P(LossBasedBweV2Test, NoBandwidthEstimateGivenNoInitialization) { +TEST_F(LossBasedBweV2Test, NoBandwidthEstimateGivenNoInitialization) { std::vector enough_feedback = CreatePacketResultsWithReceivedPackets( /*first_packet_timestamp=*/Timestamp::Zero()); ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); EXPECT_FALSE(loss_based_bandwidth_estimator.IsReady()); EXPECT_TRUE(loss_based_bandwidth_estimator.GetLossBasedResult() .bandwidth_estimate.IsPlusInfinity()); } -TEST_P(LossBasedBweV2Test, NoBandwidthEstimateGivenNotEnoughFeedback) { +TEST_F(LossBasedBweV2Test, NoBandwidthEstimateGivenNotEnoughFeedback) { // Create packet results where the observation duration is less than the lower // bound. PacketResult not_enough_feedback[2]; @@ -288,8 +304,7 @@ TEST_P(LossBasedBweV2Test, NoBandwidthEstimateGivenNotEnoughFeedback) { Timestamp::Zero() + kObservationDurationLowerBound; ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.SetBandwidthEstimate( @@ -301,16 +316,15 @@ TEST_P(LossBasedBweV2Test, NoBandwidthEstimateGivenNotEnoughFeedback) { loss_based_bandwidth_estimator.UpdateBandwidthEstimate( not_enough_feedback, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); EXPECT_FALSE(loss_based_bandwidth_estimator.IsReady()); EXPECT_TRUE(loss_based_bandwidth_estimator.GetLossBasedResult() .bandwidth_estimate.IsPlusInfinity()); } -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, SetValueIsTheEstimateUntilAdditionalFeedbackHasBeenReceived) { std::vector enough_feedback_1 = CreatePacketResultsWithReceivedPackets( @@ -321,17 +335,15 @@ TEST_P(LossBasedBweV2Test, 2 * kObservationDurationLowerBound); ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); EXPECT_NE( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, @@ -346,16 +358,15 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); EXPECT_NE( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(600)); } -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, SetAcknowledgedBitrateOnlyAffectsTheBweWhenAdditionalFeedbackIsGiven) { std::vector enough_feedback_1 = CreatePacketResultsWithReceivedPackets( @@ -366,8 +377,7 @@ TEST_P(LossBasedBweV2Test, 2 * kObservationDurationLowerBound); ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator_1(&key_value_config); LossBasedBweV2 loss_based_bandwidth_estimator_2(&key_value_config); @@ -377,14 +387,12 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator_1.UpdateBandwidthEstimate( enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); loss_based_bandwidth_estimator_2.UpdateBandwidthEstimate( enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); EXPECT_EQ( loss_based_bandwidth_estimator_1.GetLossBasedResult().bandwidth_estimate, @@ -399,29 +407,26 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator_1.UpdateBandwidthEstimate( enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); loss_based_bandwidth_estimator_2.UpdateBandwidthEstimate( enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); EXPECT_NE( loss_based_bandwidth_estimator_1.GetLossBasedResult().bandwidth_estimate, loss_based_bandwidth_estimator_2.GetLossBasedResult().bandwidth_estimate); } -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, BandwidthEstimateIsCappedToBeTcpFairGivenTooHighLossRate) { std::vector enough_feedback_no_received_packets = CreatePacketResultsWith100pLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.SetBandwidthEstimate( @@ -429,54 +434,16 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_no_received_packets, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + /*in_alr=*/false); EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(100)); } -TEST_P(LossBasedBweV2Test, BandwidthEstimateNotIncreaseWhenNetworkUnderusing) { - if (!GetParam()) { - GTEST_SKIP() << "This test should run only if " - "trendline_integration_enabled is enabled"; - } - std::vector enough_feedback_1 = - CreatePacketResultsWithReceivedPackets( - /*first_packet_timestamp=*/Timestamp::Zero()); - std::vector enough_feedback_2 = - CreatePacketResultsWithReceivedPackets( - /*first_packet_timestamp=*/Timestamp::Zero() + - 2 * kObservationDurationLowerBound); - - ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); - LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - - loss_based_bandwidth_estimator.SetBandwidthEstimate( - DataRate::KilobitsPerSec(600)); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwUnderusing, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); - EXPECT_LE( - loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, - DataRate::KilobitsPerSec(600)); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); - EXPECT_LE( - loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, - DataRate::KilobitsPerSec(600)); -} - // When network is normal, estimate can increase but never be higher than // the delay based estimate. -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, BandwidthEstimateCappedByDelayBasedEstimateWhenNetworkNormal) { // Create two packet results, network is in normal state, 100% packets are // received, and no delay increase. @@ -488,17 +455,15 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero() + 2 * kObservationDurationLowerBound); ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); // If the delay based estimate is infinity, then loss based estimate increases // and not bounded by delay based estimate. EXPECT_GT( @@ -506,9 +471,8 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, /*delay_based_estimate=*/DataRate::KilobitsPerSec(500), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); // If the delay based estimate is not infinity, then loss based estimate is // bounded by delay based estimate. EXPECT_EQ( @@ -518,11 +482,11 @@ TEST_P(LossBasedBweV2Test, // When loss based bwe receives a strong signal of overusing and an increase in // loss rate, it should acked bitrate for emegency backoff. -TEST_P(LossBasedBweV2Test, UseAckedBitrateForEmegencyBackOff) { +TEST_F(LossBasedBweV2Test, UseAckedBitrateForEmegencyBackOff) { // Create two packet results, first packet has 50% loss rate, second packet // has 100% loss rate. std::vector enough_feedback_1 = - CreatePacketResultsWith50pLossRate( + CreatePacketResultsWith50pPacketLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); std::vector enough_feedback_2 = CreatePacketResultsWith100pLossRate( @@ -530,8 +494,7 @@ TEST_P(LossBasedBweV2Test, UseAckedBitrateForEmegencyBackOff) { 2 * kObservationDurationLowerBound); ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.SetBandwidthEstimate( @@ -541,16 +504,12 @@ TEST_P(LossBasedBweV2Test, UseAckedBitrateForEmegencyBackOff) { // Update estimate when network is overusing, and 50% loss rate. loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwOverusing, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + /*in_alr=*/false); // Update estimate again when network is continuously overusing, and 100% // loss rate. loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwOverusing, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + /*in_alr=*/false); // The estimate bitrate now is backed off based on acked bitrate. EXPECT_LE( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, @@ -559,13 +518,12 @@ TEST_P(LossBasedBweV2Test, UseAckedBitrateForEmegencyBackOff) { // When receiving the same packet feedback, loss based bwe ignores the feedback // and returns the current estimate. -TEST_P(LossBasedBweV2Test, NoBweChangeIfObservationDurationUnchanged) { +TEST_F(LossBasedBweV2Test, NoBweChangeIfObservationDurationUnchanged) { std::vector enough_feedback_1 = CreatePacketResultsWithReceivedPackets( /*first_packet_timestamp=*/Timestamp::Zero()); ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); @@ -574,18 +532,16 @@ TEST_P(LossBasedBweV2Test, NoBweChangeIfObservationDurationUnchanged) { loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); DataRate estimate_1 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; // Use the same feedback and check if the estimate is unchanged. loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); DataRate estimate_2 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; EXPECT_EQ(estimate_2, estimate_1); @@ -594,7 +550,7 @@ TEST_P(LossBasedBweV2Test, NoBweChangeIfObservationDurationUnchanged) { // When receiving feedback of packets that were sent within an observation // duration, and network is in the normal state, loss based bwe returns the // current estimate. -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, NoBweChangeIfObservationDurationIsSmallAndNetworkNormal) { std::vector enough_feedback_1 = CreatePacketResultsWithReceivedPackets( @@ -604,24 +560,21 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero() + kObservationDurationLowerBound - TimeDelta::Millis(1)); ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); DataRate estimate_1 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); DataRate estimate_2 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; EXPECT_EQ(estimate_2, estimate_1); @@ -630,7 +583,7 @@ TEST_P(LossBasedBweV2Test, // When receiving feedback of packets that were sent within an observation // duration, and network is in the underusing state, loss based bwe returns the // current estimate. -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, NoBweIncreaseIfObservationDurationIsSmallAndNetworkUnderusing) { std::vector enough_feedback_1 = CreatePacketResultsWithReceivedPackets( @@ -640,72 +593,26 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero() + kObservationDurationLowerBound - TimeDelta::Millis(1)); ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); - DataRate estimate_1 = - loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; - - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwUnderusing, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); - DataRate estimate_2 = - loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; - EXPECT_LE(estimate_2, estimate_1); -} - -// When receiving feedback of packets that were sent within an observation -// duration, network is overusing, and trendline integration is enabled, loss -// based bwe updates its estimate. -TEST_P(LossBasedBweV2Test, - UpdateEstimateIfObservationDurationIsSmallAndNetworkOverusing) { - if (!GetParam()) { - GTEST_SKIP() << "This test should run only if " - "trendline_integration_enabled is enabled"; - } - std::vector enough_feedback_1 = - CreatePacketResultsWith50pLossRate( - /*first_packet_timestamp=*/Timestamp::Zero()); - std::vector enough_feedback_2 = - CreatePacketResultsWith100pLossRate( - /*first_packet_timestamp=*/Timestamp::Zero() + - kObservationDurationLowerBound - TimeDelta::Millis(1)); - ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); - LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - loss_based_bandwidth_estimator.SetBandwidthEstimate( - DataRate::KilobitsPerSec(600)); - loss_based_bandwidth_estimator.SetAcknowledgedBitrate( - DataRate::KilobitsPerSec(300)); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + /*in_alr=*/false); DataRate estimate_1 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_2, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwOverusing, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + /*in_alr=*/false); DataRate estimate_2 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; - EXPECT_LT(estimate_2, estimate_1); + EXPECT_LE(estimate_2, estimate_1); } -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, IncreaseToDelayBasedEstimateIfNoLossOrDelayIncrease) { std::vector enough_feedback_1 = CreatePacketResultsWithReceivedPackets( @@ -715,38 +622,34 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero() + 2 * kObservationDurationLowerBound); ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_1, + delay_based_estimate, + /*in_alr=*/false); EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, delay_based_estimate); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_2, + delay_based_estimate, + /*in_alr=*/false); EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, delay_based_estimate); } -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, IncreaseByMaxIncreaseFactorAfterLossBasedBweBacksOff) { - ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.2|1|0.5,AckedRateCandidate:true," - "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," + ExplicitKeyValueConfig key_value_config(ShortObservationConfig( + "CandidateFactors:1.2|1|0.5," "InstantUpperBoundBwBalance:10000kbps," - "DelayBasedCandidate:true,MaxIncreaseFactor:1.5,BwRampupUpperBoundFactor:" - "2.0,NotIncreaseIfInherentLossLessThanAverageLoss:false/"); + "MaxIncreaseFactor:1.5,NotIncreaseIfInherentLossLessThanAverageLoss:" + "false")); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); DataRate acked_rate = DataRate::KilobitsPerSec(300); @@ -758,10 +661,9 @@ TEST_P(LossBasedBweV2Test, std::vector enough_feedback_1 = CreatePacketResultsWith100pLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_1, + delay_based_estimate, + /*in_alr=*/false); LossBasedBweV2::Result result_at_loss = loss_based_bandwidth_estimator.GetLossBasedResult(); @@ -772,10 +674,9 @@ TEST_P(LossBasedBweV2Test, kObservationDurationLowerBound); loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(600)); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_2, + delay_based_estimate, + /*in_alr=*/false); LossBasedBweV2::Result result_after_recovery = loss_based_bandwidth_estimator.GetLossBasedResult(); @@ -783,16 +684,13 @@ TEST_P(LossBasedBweV2Test, result_at_loss.bandwidth_estimate * 1.5); } -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, LossBasedStateIsDelayBasedEstimateAfterNetworkRecovering) { - ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:100|1|0.5,AckedRateCandidate:true," - "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," + ExplicitKeyValueConfig key_value_config(ShortObservationConfig( + "CandidateFactors:100|1|0.5," "InstantUpperBoundBwBalance:10000kbps," - "DelayBasedCandidate:true,MaxIncreaseFactor:100," - "BwRampupUpperBoundFactor:" - "2.0,NotIncreaseIfInherentLossLessThanAverageLoss:false/"); + "MaxIncreaseFactor:100," + "NotIncreaseIfInherentLossLessThanAverageLoss:false")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); DataRate delay_based_estimate = DataRate::KilobitsPerSec(600); DataRate acked_rate = DataRate::KilobitsPerSec(300); @@ -804,10 +702,9 @@ TEST_P(LossBasedBweV2Test, std::vector enough_feedback_1 = CreatePacketResultsWith100pLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_1, + delay_based_estimate, + /*in_alr=*/false); ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, LossBasedState::kDecreasing); @@ -818,10 +715,9 @@ TEST_P(LossBasedBweV2Test, kObservationDurationLowerBound); loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(600)); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_2, + delay_based_estimate, + /*in_alr=*/false); EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, LossBasedState::kDelayBasedEstimate); @@ -832,39 +728,31 @@ TEST_P(LossBasedBweV2Test, kObservationDurationLowerBound * 2); loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(600)); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_3, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_3, + delay_based_estimate, + /*in_alr=*/false); EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, LossBasedState::kDelayBasedEstimate); } -TEST_P(LossBasedBweV2Test, - LossBasedStateIsNotDelayBasedEstimateIfDelayBasedEsimtateInfinite) { +TEST_F(LossBasedBweV2Test, + LossBasedStateIsNotDelayBasedEstimateIfDelayBasedEstimateInfinite) { ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:100|1|0.5,AckedRateCandidate:true," - "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," - "InstantUpperBoundBwBalance:10000kbps," - "DelayBasedCandidate:true,MaxIncreaseFactor:100," - "BwRampupUpperBoundFactor:" - "2.0/"); + ShortObservationConfig("CandidateFactors:100|1|0.5," + "InstantUpperBoundBwBalance:10000kbps," + "MaxIncreaseFactor:100")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - DataRate delay_based_estimate = DataRate::PlusInfinity(); - DataRate acked_rate = DataRate::KilobitsPerSec(300); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); - loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); // Create some loss to create the loss limited scenario. std::vector enough_feedback_1 = CreatePacketResultsWith100pLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + enough_feedback_1, + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, LossBasedState::kDecreasing); @@ -876,29 +764,25 @@ TEST_P(LossBasedBweV2Test, loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + enough_feedback_2, + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); EXPECT_NE(loss_based_bandwidth_estimator.GetLossBasedResult().state, LossBasedState::kDelayBasedEstimate); } // After loss based bwe backs off, the next estimate is capped by // a factor of acked bitrate. -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, IncreaseByFactorOfAckedBitrateAfterLossBasedBweBacksOff) { - ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,LossThresholdOfHighBandwidthPreference:0.99," + ExplicitKeyValueConfig key_value_config(ShortObservationConfig( + "LossThresholdOfHighBandwidthPreference:0.99," "BwRampupUpperBoundFactor:1.2," - "InherentLossUpperBoundOffset:0.9,ObservationDurationLowerBound:200ms/"); + // Set InstantUpperBoundBwBalance high to disable InstantUpperBound cap. + "InstantUpperBoundBwBalance:10000kbps,")); std::vector enough_feedback_1 = CreatePacketResultsWith100pLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); - std::vector enough_feedback_2 = - CreatePacketResultsWith10pLossRate( - /*first_packet_timestamp=*/Timestamp::Zero() + - kObservationDurationLowerBound); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); @@ -906,35 +790,103 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(300)); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_1, + delay_based_estimate, + /*in_alr=*/false); + ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + LossBasedBweV2::Result result = + loss_based_bandwidth_estimator.GetLossBasedResult(); + DataRate estimate_1 = result.bandwidth_estimate; + ASSERT_LT(estimate_1.kbps(), 600); - // Change the acked bitrate to make sure that the estimate is bounded by a - // factor of acked bitrate. - DataRate acked_bitrate = DataRate::KilobitsPerSec(50); - loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_bitrate); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(estimate_1 * 0.9); + + int feedback_count = 1; + while (feedback_count < 5 && result.state != LossBasedState::kIncreasing) { + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + feedback_count++ * kObservationDurationLowerBound), + delay_based_estimate, + /*in_alr=*/false); + result = loss_based_bandwidth_estimator.GetLossBasedResult(); + } + ASSERT_EQ(result.state, LossBasedState::kIncreasing); // The estimate is capped by acked_bitrate * BwRampupUpperBoundFactor. - DataRate estimate_2 = - loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; - EXPECT_EQ(estimate_2, acked_bitrate * 1.2); + EXPECT_EQ(result.bandwidth_estimate, estimate_1 * 0.9 * 1.2); + + // But if acked bitrate decreases, BWE does not decrease when there is no + // loss. + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(estimate_1 * 0.9); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + feedback_count++ * kObservationDurationLowerBound), + delay_based_estimate, + /*in_alr=*/false); + EXPECT_EQ( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + result.bandwidth_estimate); +} + +// Ensure that the state can switch to kIncrease even when the bandwidth is +// bounded by acked bitrate. +TEST_F(LossBasedBweV2Test, EnsureIncreaseEvenIfAckedBitrateBound) { + ExplicitKeyValueConfig key_value_config(ShortObservationConfig( + "LossThresholdOfHighBandwidthPreference:0.99," + "BwRampupUpperBoundFactor:1.2," + // Set InstantUpperBoundBwBalance high to disable InstantUpperBound cap. + "InstantUpperBoundBwBalance:10000kbps,")); + std::vector enough_feedback_1 = + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); + + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(300)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_1, + delay_based_estimate, + /*in_alr=*/false); + ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + LossBasedBweV2::Result result = + loss_based_bandwidth_estimator.GetLossBasedResult(); + DataRate estimate_1 = result.bandwidth_estimate; + ASSERT_LT(estimate_1.kbps(), 600); + + // Set a low acked bitrate. + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(estimate_1 / 2); + + int feedback_count = 1; + while (feedback_count < 5 && result.state != LossBasedState::kIncreasing) { + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + feedback_count++ * kObservationDurationLowerBound), + delay_based_estimate, + /*in_alr=*/false); + result = loss_based_bandwidth_estimator.GetLossBasedResult(); + } + + ASSERT_EQ(result.state, LossBasedState::kIncreasing); + // The estimate increases by 1kbps. + EXPECT_EQ(result.bandwidth_estimate, estimate_1 + DataRate::BitsPerSec(1)); } // After loss based bwe backs off, the estimate is bounded during the delayed // window. -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, EstimateBitrateIsBoundedDuringDelayedWindowAfterLossBasedBweBacksOff) { std::vector enough_feedback_1 = CreatePacketResultsWithReceivedPackets( /*first_packet_timestamp=*/Timestamp::Zero()); std::vector enough_feedback_2 = - CreatePacketResultsWith50pLossRate( + CreatePacketResultsWith50pPacketLossRate( /*first_packet_timestamp=*/Timestamp::Zero() + kDelayedIncreaseWindow - TimeDelta::Millis(2)); std::vector enough_feedback_3 = @@ -942,8 +894,7 @@ TEST_P(LossBasedBweV2Test, /*first_packet_timestamp=*/Timestamp::Zero() + kDelayedIncreaseWindow - TimeDelta::Millis(1)); ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); @@ -951,28 +902,25 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(300)); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_1, + delay_based_estimate, + /*in_alr=*/false); // Increase the acknowledged bitrate to make sure that the estimate is not // capped too low. loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(5000)); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_2, + delay_based_estimate, + /*in_alr=*/false); // The estimate is capped by current_estimate * kMaxIncreaseFactor because // it recently backed off. DataRate estimate_2 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_3, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_3, + delay_based_estimate, + /*in_alr=*/false); // The latest estimate is the same as the previous estimate since the sent // packets were sent within the DelayedIncreaseWindow. EXPECT_EQ( @@ -981,7 +929,7 @@ TEST_P(LossBasedBweV2Test, } // The estimate is not bounded after the delayed increase window. -TEST_P(LossBasedBweV2Test, KeepIncreasingEstimateAfterDelayedIncreaseWindow) { +TEST_F(LossBasedBweV2Test, KeepIncreasingEstimateAfterDelayedIncreaseWindow) { std::vector enough_feedback_1 = CreatePacketResultsWithReceivedPackets( /*first_packet_timestamp=*/Timestamp::Zero()); @@ -994,8 +942,7 @@ TEST_P(LossBasedBweV2Test, KeepIncreasingEstimateAfterDelayedIncreaseWindow) { /*first_packet_timestamp=*/Timestamp::Zero() + kDelayedIncreaseWindow + TimeDelta::Millis(1)); ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); @@ -1003,64 +950,56 @@ TEST_P(LossBasedBweV2Test, KeepIncreasingEstimateAfterDelayedIncreaseWindow) { DataRate::KilobitsPerSec(600)); loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(300)); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_1, + delay_based_estimate, + /*in_alr=*/false); // Increase the acknowledged bitrate to make sure that the estimate is not // capped too low. loss_based_bandwidth_estimator.SetAcknowledgedBitrate( DataRate::KilobitsPerSec(5000)); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_2, + delay_based_estimate, + /*in_alr=*/false); // The estimate is capped by current_estimate * kMaxIncreaseFactor because it // recently backed off. DataRate estimate_2 = loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_3, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate(enough_feedback_3, + delay_based_estimate, + /*in_alr=*/false); // The estimate can continue increasing after the DelayedIncreaseWindow. EXPECT_GE( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, estimate_2); } -TEST_P(LossBasedBweV2Test, NotIncreaseIfInherentLossLessThanAverageLoss) { - ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.2,AckedRateCandidate:false," - "ObservationWindowSize:2," - "DelayBasedCandidate:true,InstantUpperBoundBwBalance:100kbps," - "ObservationDurationLowerBound:200ms," - "NotIncreaseIfInherentLossLessThanAverageLoss:true/"); +TEST_F(LossBasedBweV2Test, NotIncreaseIfInherentLossLessThanAverageLoss) { + ExplicitKeyValueConfig key_value_config(ShortObservationConfig( + "CandidateFactors:1.2," + "NotIncreaseIfInherentLossLessThanAverageLoss:true")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); std::vector enough_feedback_10p_loss_1 = - CreatePacketResultsWith10pLossRate( + CreatePacketResultsWith10pPacketLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_10p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + enough_feedback_10p_loss_1, + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); std::vector enough_feedback_10p_loss_2 = - CreatePacketResultsWith10pLossRate( + CreatePacketResultsWith10pPacketLossRate( /*first_packet_timestamp=*/Timestamp::Zero() + kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_10p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + enough_feedback_10p_loss_2, + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); // Do not increase the bitrate because inherent loss is less than average loss EXPECT_EQ( @@ -1068,16 +1007,11 @@ TEST_P(LossBasedBweV2Test, NotIncreaseIfInherentLossLessThanAverageLoss) { DataRate::KilobitsPerSec(600)); } -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, SelectHighBandwidthCandidateIfLossRateIsLessThanThreshold) { - ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.2|0.8,AckedRateCandidate:false," - "ObservationWindowSize:2," - "DelayBasedCandidate:true,InstantUpperBoundBwBalance:100kbps," - "ObservationDurationLowerBound:200ms,HigherBwBiasFactor:1000," - "HigherLogBwBiasFactor:1000,LossThresholdOfHighBandwidthPreference:0." - "20,NotIncreaseIfInherentLossLessThanAverageLoss:false/"); + ExplicitKeyValueConfig key_value_config(ShortObservationConfig( + "LossThresholdOfHighBandwidthPreference:0.20," + "NotIncreaseIfInherentLossLessThanAverageLoss:false")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); @@ -1085,21 +1019,21 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(600)); std::vector enough_feedback_10p_loss_1 = - CreatePacketResultsWith10pLossRate( + CreatePacketResultsWith10pPacketLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); std::vector enough_feedback_10p_loss_2 = - CreatePacketResultsWith10pLossRate( + CreatePacketResultsWith10pPacketLossRate( /*first_packet_timestamp=*/Timestamp::Zero() + kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); // Because LossThresholdOfHighBandwidthPreference is 20%, the average loss is // 10%, bandwidth estimate should increase. @@ -1108,16 +1042,10 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(600)); } -TEST_P(LossBasedBweV2Test, +TEST_F(LossBasedBweV2Test, SelectLowBandwidthCandidateIfLossRateIsIsHigherThanThreshold) { ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.2|0.8,AckedRateCandidate:false," - "ObservationWindowSize:2," - "DelayBasedCandidate:true,InstantUpperBoundBwBalance:100kbps," - "ObservationDurationLowerBound:200ms,HigherBwBiasFactor:1000," - "HigherLogBwBiasFactor:1000,LossThresholdOfHighBandwidthPreference:0." - "05/"); + ShortObservationConfig("LossThresholdOfHighBandwidthPreference:0.05")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); @@ -1125,21 +1053,21 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(600)); std::vector enough_feedback_10p_loss_1 = - CreatePacketResultsWith10pLossRate( + CreatePacketResultsWith10pPacketLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); std::vector enough_feedback_10p_loss_2 = - CreatePacketResultsWith10pLossRate( + CreatePacketResultsWith10pPacketLossRate( /*first_packet_timestamp=*/Timestamp::Zero() + kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( enough_feedback_10p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + + /*in_alr=*/false); // Because LossThresholdOfHighBandwidthPreference is 5%, the average loss is // 10%, bandwidth estimate should decrease. @@ -1148,519 +1076,902 @@ TEST_P(LossBasedBweV2Test, DataRate::KilobitsPerSec(600)); } -TEST_P(LossBasedBweV2Test, LimitByProbeResultWhenRecoveringFromLoss) { +TEST_F(LossBasedBweV2Test, EstimateIsNotHigherThanMaxBitrate) { ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.2|1|0.5,AckedRateCandidate:true," - "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," - "InstantUpperBoundBwBalance:10000kbps,DelayedIncreaseWindow:100s," - "DelayBasedCandidate:true,MaxIncreaseFactor:1.3," - "BwRampupUpperBoundFactor:2.0,ProbeIntegrationEnabled:true/"); + Config(/*enabled=*/true, /*valid=*/true)); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); - DataRate acked_rate = DataRate::KilobitsPerSec(300); + loss_based_bandwidth_estimator.SetMinMaxBitrate( + /*min_bitrate=*/DataRate::KilobitsPerSec(10), + /*max_bitrate=*/DataRate::KilobitsPerSec(1000)); loss_based_bandwidth_estimator.SetBandwidthEstimate( - DataRate::KilobitsPerSec(600)); - loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); - - // Create some loss to create the loss limited scenario. - std::vector enough_feedback_1 = - CreatePacketResultsWith100pLossRate( + DataRate::KilobitsPerSec(1000)); + std::vector enough_feedback = + CreatePacketResultsWithReceivedPackets( /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + enough_feedback, /*delay_based_estimate=*/DataRate::PlusInfinity(), - // Network recovers after loss - DataRate probe_estimate = DataRate::KilobitsPerSec(300); - std::vector enough_feedback_2 = - CreatePacketResultsWithReceivedPackets( - /*first_packet_timestamp=*/Timestamp::Zero() + - kObservationDurationLowerBound); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - probe_estimate, /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); - for (int i = 2; i < 5; ++i) { - enough_feedback_2 = CreatePacketResultsWithReceivedPackets( - /*first_packet_timestamp=*/Timestamp::Zero() + - kObservationDurationLowerBound * i); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), - /*in_alr=*/false); - LossBasedBweV2::Result result_after_recovery = - loss_based_bandwidth_estimator.GetLossBasedResult(); - EXPECT_LE(result_after_recovery.bandwidth_estimate, probe_estimate); - } + EXPECT_LE( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(1000)); } -TEST_P(LossBasedBweV2Test, NotLimitByProbeResultWhenProbeResultIsExpired) { +TEST_F(LossBasedBweV2Test, NotBackOffToAckedRateInAlr) { ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.2|1|0.5,AckedRateCandidate:true," - "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," - "InstantUpperBoundBwBalance:10000kbps,DelayedIncreaseWindow:100s," - "DelayBasedCandidate:true,MaxIncreaseFactor:1.3," - "BwRampupUpperBoundFactor:2.0,ProbeIntegrationEnabled:true," - "ProbeExpiration:10s/"); + ShortObservationConfig("InstantUpperBoundBwBalance:100kbps")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + loss_based_bandwidth_estimator.SetMinMaxBitrate( + /*min_bitrate=*/DataRate::KilobitsPerSec(10), + /*max_bitrate=*/DataRate::KilobitsPerSec(1000000)); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); - DataRate acked_rate = DataRate::KilobitsPerSec(300); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); - loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); - // Create some loss to create the loss limited scenario. - std::vector enough_feedback_1 = + DataRate acked_rate = DataRate::KilobitsPerSec(100); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); + std::vector enough_feedback_100p_loss_1 = CreatePacketResultsWith100pLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + enough_feedback_100p_loss_1, delay_based_estimate, + /*in_alr=*/true); - // Network recovers after loss - DataRate probe_estimate = DataRate::KilobitsPerSec(300); - std::vector enough_feedback_2 = - CreatePacketResultsWithReceivedPackets( - /*first_packet_timestamp=*/Timestamp::Zero() + - kObservationDurationLowerBound); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - probe_estimate, /*upper_link_capacity=*/DataRate::PlusInfinity(), - /*in_alr=*/false); + // Make sure that the estimate decreases but higher than acked rate. + EXPECT_GT( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + acked_rate); - for (int i = 2; i < 5; ++i) { - enough_feedback_2 = CreatePacketResultsWithReceivedPackets( - /*first_packet_timestamp=*/Timestamp::Zero() + - kObservationDurationLowerBound * i); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), - /*in_alr=*/false); - } - - std::vector enough_feedback_3 = - CreatePacketResultsWithReceivedPackets( - /*first_packet_timestamp=*/Timestamp::Zero() + - kObservationDurationLowerBound + TimeDelta::Seconds(11)); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_3, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), - /*in_alr=*/false); - - // Probe result is expired after 10s. - LossBasedBweV2::Result result_after_recovery = - loss_based_bandwidth_estimator.GetLossBasedResult(); - EXPECT_GT(result_after_recovery.bandwidth_estimate, probe_estimate); + EXPECT_LT( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(600)); } -// If BoundByUpperLinkCapacityWhenLossLimited is enabled, the estimate is -// bounded by the upper link capacity when bandwidth is loss limited. -TEST_P(LossBasedBweV2Test, BoundEstimateByUpperLinkCapacityWhenLossLimited) { +TEST_F(LossBasedBweV2Test, BackOffToAckedRateIfNotInAlr) { ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.2|1|0.5,AckedRateCandidate:true," - "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," - "InstantUpperBoundBwBalance:10000kbps," - "DelayBasedCandidate:true,MaxIncreaseFactor:1000," - "BwRampupUpperBoundFactor:2.0,BoundByUpperLinkCapacityWhenLossLimited:" - "true/"); + ShortObservationConfig("InstantUpperBoundBwBalance:100kbps")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + loss_based_bandwidth_estimator.SetMinMaxBitrate( + /*min_bitrate=*/DataRate::KilobitsPerSec(10), + /*max_bitrate=*/DataRate::KilobitsPerSec(1000000)); DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); - DataRate acked_rate = DataRate::KilobitsPerSec(300); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); - loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); - // Create some loss to create the loss limited scenario. - std::vector enough_feedback_1 = + DataRate acked_rate = DataRate::KilobitsPerSec(100); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); + std::vector enough_feedback_100p_loss_1 = CreatePacketResultsWith100pLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + enough_feedback_100p_loss_1, delay_based_estimate, - // Network recovers after loss. - DataRate upper_link_capacity = DataRate::KilobitsPerSec(10); - std::vector enough_feedback_2 = - CreatePacketResultsWithReceivedPackets( - /*first_packet_timestamp=*/Timestamp::Zero() + - kObservationDurationLowerBound); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, upper_link_capacity, /*in_alr=*/false); + /*in_alr=*/false); - LossBasedBweV2::Result result_after_recovery = - loss_based_bandwidth_estimator.GetLossBasedResult(); - EXPECT_EQ(result_after_recovery.bandwidth_estimate, upper_link_capacity); + // Make sure that the estimate decreases but higher than acked rate. + EXPECT_EQ( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + acked_rate); } -// If BoundByUpperLinkCapacityWhenLossLimited is enabled, the estimate is not -// bounded by the upper link capacity when bandwidth is not loss limited. -TEST_P(LossBasedBweV2Test, - NotBoundEstimateByUpperLinkCapacityWhenNotLossLimited) { +TEST_F(LossBasedBweV2Test, NotReadyToUseInStartPhase) { ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.2|1|0.5,AckedRateCandidate:true," - "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," - "InstantUpperBoundBwBalance:10000kbps," - "DelayBasedCandidate:true,MaxIncreaseFactor:1000," - "BwRampupUpperBoundFactor:2.0,BoundByUpperLinkCapacityWhenLossLimited:" - "true/"); + ShortObservationConfig("UseInStartPhase:true")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); - DataRate acked_rate = DataRate::KilobitsPerSec(300); - loss_based_bandwidth_estimator.SetBandwidthEstimate( - DataRate::KilobitsPerSec(600)); - loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); + // Make sure that the estimator is not ready to use in start phase because of + // lacking TWCC feedback. + EXPECT_FALSE(loss_based_bandwidth_estimator.ReadyToUseInStartPhase()); +} - // Create a normal network without loss - std::vector enough_feedback_1 = +TEST_F(LossBasedBweV2Test, ReadyToUseInStartPhase) { + ExplicitKeyValueConfig key_value_config( + ShortObservationConfig("UseInStartPhase:true")); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + std::vector enough_feedback = CreatePacketResultsWithReceivedPackets( /*first_packet_timestamp=*/Timestamp::Zero()); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); - DataRate upper_link_capacity = DataRate::KilobitsPerSec(10); - std::vector enough_feedback_2 = - CreatePacketResultsWithReceivedPackets( - /*first_packet_timestamp=*/Timestamp::Zero() + - kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, upper_link_capacity, /*in_alr=*/false); - - LossBasedBweV2::Result loss_based_result = - loss_based_bandwidth_estimator.GetLossBasedResult(); - EXPECT_GT(loss_based_result.bandwidth_estimate, upper_link_capacity); + enough_feedback, /*delay_based_estimate=*/DataRate::KilobitsPerSec(600), + /*in_alr=*/false); + EXPECT_TRUE(loss_based_bandwidth_estimator.ReadyToUseInStartPhase()); } -// If BoundByUpperLinkCapacityWhenLossLimited is disabled, the estimate is not -// bounded by the upper link capacity. -TEST_P(LossBasedBweV2Test, NotBoundEstimateByUpperLinkCapacity) { +TEST_F(LossBasedBweV2Test, BoundEstimateByAckedRate) { ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.2|1|0.5,AckedRateCandidate:true," - "ObservationWindowSize:2,ObservationDurationLowerBound:200ms," - "InstantUpperBoundBwBalance:10000kbps," - "DelayBasedCandidate:true,MaxIncreaseFactor:1000," - "BwRampupUpperBoundFactor:2.0,BoundByUpperLinkCapacityWhenLossLimited:" - "false/"); + ShortObservationConfig("LowerBoundByAckedRateFactor:1.0")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); - DataRate acked_rate = DataRate::KilobitsPerSec(300); + loss_based_bandwidth_estimator.SetMinMaxBitrate( + /*min_bitrate=*/DataRate::KilobitsPerSec(10), + /*max_bitrate=*/DataRate::KilobitsPerSec(1000000)); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); - loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(500)); - // Create some loss to create the loss limited scenario. - std::vector enough_feedback_1 = + std::vector enough_feedback_100p_loss_1 = CreatePacketResultsWith100pLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_1, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); - - // Network recovers after loss. - DataRate upper_link_capacity = DataRate::KilobitsPerSec(10); - std::vector enough_feedback_2 = - CreatePacketResultsWithReceivedPackets( - /*first_packet_timestamp=*/Timestamp::Zero() + - kObservationDurationLowerBound); - loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_2, delay_based_estimate, BandwidthUsage::kBwNormal, - /*probe_estimate=*/absl::nullopt, upper_link_capacity, /*in_alr=*/false); + enough_feedback_100p_loss_1, + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); - LossBasedBweV2::Result result_after_recovery = - loss_based_bandwidth_estimator.GetLossBasedResult(); - EXPECT_GT(result_after_recovery.bandwidth_estimate, upper_link_capacity); + EXPECT_EQ( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(500)); } -TEST_P(LossBasedBweV2Test, - StricterBoundUsingHighLossRateThresholdAt10pLossRate) { +TEST_F(LossBasedBweV2Test, NotBoundEstimateByAckedRate) { ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.0,AckedRateCandidate:false," - "ObservationWindowSize:2," - "DelayBasedCandidate:true,InstantUpperBoundBwBalance:100kbps," - "ObservationDurationLowerBound:200ms,HigherBwBiasFactor:1000," - "HigherLogBwBiasFactor:1000,LossThresholdOfHighBandwidthPreference:0." - "05,HighLossRateThreshold:0.09/"); + ShortObservationConfig("LowerBoundByAckedRateFactor:0.0")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.SetMinMaxBitrate( /*min_bitrate=*/DataRate::KilobitsPerSec(10), /*max_bitrate=*/DataRate::KilobitsPerSec(1000000)); - DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); loss_based_bandwidth_estimator.SetBandwidthEstimate( DataRate::KilobitsPerSec(600)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(500)); - std::vector enough_feedback_10p_loss_1 = - CreatePacketResultsWith10pLossRate( + std::vector enough_feedback_100p_loss_1 = + CreatePacketResultsWith100pLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_10p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + enough_feedback_100p_loss_1, + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); - std::vector enough_feedback_10p_loss_2 = - CreatePacketResultsWith10pLossRate( - /*first_packet_timestamp=*/Timestamp::Zero() + - kObservationDurationLowerBound); + EXPECT_LT( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(500)); +} + +TEST_F(LossBasedBweV2Test, HasDecreaseStateBecauseOfUpperBound) { + ExplicitKeyValueConfig key_value_config(ShortObservationConfig( + "CandidateFactors:1.0,InstantUpperBoundBwBalance:10kbps")); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + loss_based_bandwidth_estimator.SetMinMaxBitrate( + /*min_bitrate=*/DataRate::KilobitsPerSec(10), + /*max_bitrate=*/DataRate::KilobitsPerSec(1000000)); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(500)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(500)); + + std::vector enough_feedback_10p_loss_1 = + CreatePacketResultsWith10pPacketLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_10p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + enough_feedback_10p_loss_1, + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); - // At 10% loss rate and high loss rate threshold to be 10%, cap the estimate - // to be 500 * 1000-0.1 = 400kbps. + // Verify that the instant upper bound decreases the estimate, and state is + // updated to kDecreasing. EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, - DataRate::KilobitsPerSec(400)); + DataRate::KilobitsPerSec(200)); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); } -TEST_P(LossBasedBweV2Test, - StricterBoundUsingHighLossRateThresholdAt50pLossRate) { - ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.0,AckedRateCandidate:false," - "ObservationWindowSize:2," - "DelayBasedCandidate:true,InstantUpperBoundBwBalance:100kbps," - "ObservationDurationLowerBound:200ms,HigherBwBiasFactor:1000," - "HigherLogBwBiasFactor:1000,LossThresholdOfHighBandwidthPreference:0." - "05,HighLossRateThreshold:0.3/"); +TEST_F(LossBasedBweV2Test, HasIncreaseStateBecauseOfLowerBound) { + ExplicitKeyValueConfig key_value_config(ShortObservationConfig( + "CandidateFactors:1.0,LowerBoundByAckedRateFactor:10.0")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.SetMinMaxBitrate( /*min_bitrate=*/DataRate::KilobitsPerSec(10), /*max_bitrate=*/DataRate::KilobitsPerSec(1000000)); - DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); loss_based_bandwidth_estimator.SetBandwidthEstimate( - DataRate::KilobitsPerSec(600)); + DataRate::KilobitsPerSec(500)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(1)); + // Network has a high loss to create a loss scenario. std::vector enough_feedback_50p_loss_1 = - CreatePacketResultsWith50pLossRate( + CreatePacketResultsWith50pPacketLossRate( /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_50p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + enough_feedback_50p_loss_1, + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + + ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + // Network still has a high loss, but better acked rate. + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(200)); std::vector enough_feedback_50p_loss_2 = - CreatePacketResultsWith50pLossRate( + CreatePacketResultsWith50pPacketLossRate( /*first_packet_timestamp=*/Timestamp::Zero() + kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_50p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + enough_feedback_50p_loss_2, + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); - // At 50% loss rate and high loss rate threshold to be 30%, cap the estimate - // to be the min bitrate. + // Verify that the instant lower bound increases the estimate, and state is + // updated to kIncreasing. EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, - DataRate::KilobitsPerSec(10)); + DataRate::KilobitsPerSec(200) * 10); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kIncreasing); } -TEST_P(LossBasedBweV2Test, - StricterBoundUsingHighLossRateThresholdAt100pLossRate) { +TEST_F(LossBasedBweV2Test, + EstimateIncreaseSlowlyFromInstantUpperBoundInAlrIfFieldTrial) { ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.0,AckedRateCandidate:false," - "ObservationWindowSize:2," - "DelayBasedCandidate:true,InstantUpperBoundBwBalance:100kbps," - "ObservationDurationLowerBound:200ms,HigherBwBiasFactor:1000," - "HigherLogBwBiasFactor:1000,LossThresholdOfHighBandwidthPreference:0." - "05,HighLossRateThreshold:0.3/"); + ShortObservationConfig("UpperBoundCandidateInAlr:true")); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(1000)); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(150)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith50pPacketLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/true); + LossBasedBweV2::Result result_after_loss = + loss_based_bandwidth_estimator.GetLossBasedResult(); + ASSERT_EQ(result_after_loss.state, LossBasedState::kDecreasing); + + for (int feedback_count = 1; feedback_count <= 3; ++feedback_count) { + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + feedback_count * kObservationDurationLowerBound), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/true); + } + // Expect less than 100% increase. + EXPECT_LT( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + 2 * result_after_loss.bandwidth_estimate); +} + +TEST_F(LossBasedBweV2Test, HasDelayBasedStateIfLossBasedBweIsMax) { + ExplicitKeyValueConfig key_value_config(ShortObservationConfig("")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); loss_based_bandwidth_estimator.SetMinMaxBitrate( /*min_bitrate=*/DataRate::KilobitsPerSec(10), - /*max_bitrate=*/DataRate::KilobitsPerSec(1000000)); - DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); - loss_based_bandwidth_estimator.SetBandwidthEstimate( - DataRate::KilobitsPerSec(600)); + /*max_bitrate=*/DataRate::KilobitsPerSec(1000)); - std::vector enough_feedback_100p_loss_1 = - CreatePacketResultsWith100pLossRate( - /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_100p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + /*feedback = */ CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero()), + /*delay_based_estimate=*/DataRate::KilobitsPerSec(2000), + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDelayBasedEstimate); + EXPECT_EQ( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(1000)); - std::vector enough_feedback_100p_loss_2 = - CreatePacketResultsWith100pLossRate( - /*first_packet_timestamp=*/Timestamp::Zero() + - kObservationDurationLowerBound); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_100p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + /*feedback=*/CreatePacketResultsWith50pPacketLossRate( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound), + /*delay_based_estimate=*/DataRate::KilobitsPerSec(2000), + /*in_alr=*/false); + LossBasedBweV2::Result result = + loss_based_bandwidth_estimator.GetLossBasedResult(); + ASSERT_EQ(result.state, LossBasedState::kDecreasing); + ASSERT_LT(result.bandwidth_estimate, DataRate::KilobitsPerSec(1000)); - // At 100% loss rate and high loss rate threshold to be 30%, cap the estimate - // to be the min bitrate. + // Eventually the estimator recovers to delay based state. + int feedback_count = 2; + while (feedback_count < 5 && + result.state != LossBasedState::kDelayBasedEstimate) { + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + /*feedback = */ CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + feedback_count++ * kObservationDurationLowerBound), + /*delay_based_estimate=*/DataRate::KilobitsPerSec(2000), + /*in_alr=*/false); + result = loss_based_bandwidth_estimator.GetLossBasedResult(); + } + EXPECT_EQ(result.state, LossBasedState::kDelayBasedEstimate); EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, - DataRate::KilobitsPerSec(10)); + DataRate::KilobitsPerSec(1000)); } -TEST_P(LossBasedBweV2Test, EstimateRecoversAfterHighLoss) { +TEST_F(LossBasedBweV2Test, IncreaseUsingPaddingStateIfFieldTrial) { ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.1|1.0|0.9,AckedRateCandidate:false," - "ObservationWindowSize:2," - "DelayBasedCandidate:true,InstantUpperBoundBwBalance:100kbps," - "ObservationDurationLowerBound:200ms,HigherBwBiasFactor:1000," - "HigherLogBwBiasFactor:1000,LossThresholdOfHighBandwidthPreference:0." - "05,HighLossRateThreshold:0.3/"); + ShortObservationConfig("PaddingDuration:1000ms")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - loss_based_bandwidth_estimator.SetMinMaxBitrate( - /*min_bitrate=*/DataRate::KilobitsPerSec(10), - /*max_bitrate=*/DataRate::KilobitsPerSec(1000000)); - DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); loss_based_bandwidth_estimator.SetBandwidthEstimate( - DataRate::KilobitsPerSec(600)); + DataRate::KilobitsPerSec(2500)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith50pPacketLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); - std::vector enough_feedback_100p_loss_1 = - CreatePacketResultsWith100pLossRate( - /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_100p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kIncreaseUsingPadding); +} - // Make sure that the estimate is set to min bitrate because of 100% loss - // rate. +TEST_F(LossBasedBweV2Test, BestCandidateResetsToUpperBoundInFieldTrial) { + ExplicitKeyValueConfig key_value_config( + ShortObservationConfig("PaddingDuration:1000ms,BoundBestCandidate:true")); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(2500)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith50pPacketLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/true); + LossBasedBweV2::Result result_after_loss = + loss_based_bandwidth_estimator.GetLossBasedResult(); + ASSERT_EQ(result_after_loss.state, LossBasedState::kDecreasing); + + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/true); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + 2 * kObservationDurationLowerBound), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/true); + // After a BWE decrease due to large loss, BWE is expected to ramp up slowly + // and follow the acked bitrate. + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kIncreaseUsingPadding); + EXPECT_NEAR(loss_based_bandwidth_estimator.GetLossBasedResult() + .bandwidth_estimate.kbps(), + result_after_loss.bandwidth_estimate.kbps(), 100); +} + +TEST_F(LossBasedBweV2Test, DecreaseToAckedCandidateIfPaddingInAlr) { + ExplicitKeyValueConfig key_value_config(ShortObservationConfig( + "PaddingDuration:1000ms," + // Set InstantUpperBoundBwBalance high to disable InstantUpperBound cap. + "InstantUpperBoundBwBalance:10000kbps")); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(1000)); + int feedback_id = 0; + while (loss_based_bandwidth_estimator.GetLossBasedResult().state != + LossBasedState::kDecreasing) { + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound * feedback_id), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/true); + feedback_id++; + } + + while (loss_based_bandwidth_estimator.GetLossBasedResult().state != + LossBasedState::kIncreaseUsingPadding) { + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound * feedback_id), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/true); + feedback_id++; + } + ASSERT_GT( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(900)); + + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(100)); + // Padding is sent now, create some lost packets. + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound * feedback_id), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/true); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, - DataRate::KilobitsPerSec(10)); + DataRate::KilobitsPerSec(100)); +} + +TEST_F(LossBasedBweV2Test, DecreaseAfterPadding) { + ExplicitKeyValueConfig key_value_config(ShortObservationConfig( + "PaddingDuration:1000ms,BwRampupUpperBoundFactor:2.0")); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(2500)); + DataRate acknowledged_bitrate = DataRate::KilobitsPerSec(51); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acknowledged_bitrate); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith50pPacketLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + ASSERT_EQ( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + acknowledged_bitrate); + + acknowledged_bitrate = DataRate::KilobitsPerSec(26); + loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acknowledged_bitrate); + int feedback_id = 1; + while (loss_based_bandwidth_estimator.GetLossBasedResult().state != + LossBasedState::kIncreaseUsingPadding) { + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound * feedback_id), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + feedback_id++; + } + + const Timestamp estimate_increased = + Timestamp::Zero() + kObservationDurationLowerBound * feedback_id; + // The state is kIncreaseUsingPadding for a while without changing the + // estimate, which is limited by 2 * acked rate. + while (loss_based_bandwidth_estimator.GetLossBasedResult().state == + LossBasedState::kIncreaseUsingPadding) { + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound * feedback_id), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + feedback_id++; + } + + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + const Timestamp start_decreasing = + Timestamp::Zero() + kObservationDurationLowerBound * (feedback_id - 1); + EXPECT_EQ(start_decreasing - estimate_increased, TimeDelta::Seconds(1)); +} - // Create some feedbacks with 0 loss rate to simulate network recovering. - std::vector enough_feedback_0p_loss_1 = +TEST_F(LossBasedBweV2Test, IncreaseEstimateIfNotHold) { + ExplicitKeyValueConfig key_value_config( + ShortObservationConfig("HoldDurationFactor:0")); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(2500)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith50pPacketLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + DataRate estimate = + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; + + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( CreatePacketResultsWithReceivedPackets( /*first_packet_timestamp=*/Timestamp::Zero() + - kObservationDurationLowerBound); + kObservationDurationLowerBound), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kIncreasing); + EXPECT_GT( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + estimate); +} + +TEST_F(LossBasedBweV2Test, IncreaseEstimateAfterHoldDuration) { + ExplicitKeyValueConfig key_value_config( + ShortObservationConfig("HoldDurationFactor:10")); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(2500)); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_0p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + CreatePacketResultsWith50pPacketLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + DataRate estimate = + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; - std::vector enough_feedback_0p_loss_2 = + // During the hold duration, e.g. first 300ms, the estimate cannot increase. + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( CreatePacketResultsWithReceivedPackets( /*first_packet_timestamp=*/Timestamp::Zero() + - kObservationDurationLowerBound * 2); + kObservationDurationLowerBound), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + EXPECT_EQ( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + estimate); + + // After the hold duration, the estimate can increase. loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_0p_loss_2, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound * 2), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kIncreasing); + EXPECT_GE( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + estimate); - // The estimate increases as network recovers. - EXPECT_GT( + // Get another 50p packet loss. + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith50pPacketLossRate( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound * 3), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + DataRate estimate_at_hold = + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; + + // In the hold duration, e.g. next 3s, the estimate cannot increase above the + // hold rate. Get some lost packets to get lower estimate than the HOLD rate. + for (int i = 4; i <= 6; ++i) { + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound * i), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + EXPECT_LT( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + estimate_at_hold); + } + + int feedback_id = 7; + while (loss_based_bandwidth_estimator.GetLossBasedResult().state != + LossBasedState::kIncreasing) { + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound * feedback_id), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + if (loss_based_bandwidth_estimator.GetLossBasedResult().state == + LossBasedState::kDecreasing) { + // In the hold duration, the estimate can not go higher than estimate at + // hold. + EXPECT_LE(loss_based_bandwidth_estimator.GetLossBasedResult() + .bandwidth_estimate, + estimate_at_hold); + } else if (loss_based_bandwidth_estimator.GetLossBasedResult().state == + LossBasedState::kIncreasing) { + // After the hold duration, the estimate can increase again. + EXPECT_GT(loss_based_bandwidth_estimator.GetLossBasedResult() + .bandwidth_estimate, + estimate_at_hold); + } + feedback_id++; + } +} + +TEST_F(LossBasedBweV2Test, HoldRateNotLowerThanAckedRate) { + ExplicitKeyValueConfig key_value_config(ShortObservationConfig( + "HoldDurationFactor:10,LowerBoundByAckedRateFactor:1.0")); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(2500)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith50pPacketLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + + // During the hold duration, hold rate is not lower than the acked rate. + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(1000)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith50pPacketLossRate( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, - DataRate::KilobitsPerSec(10)); + DataRate::KilobitsPerSec(1000)); } -TEST_P(LossBasedBweV2Test, EstimateIsNotHigherThanMaxBitrate) { +TEST_F(LossBasedBweV2Test, EstimateNotLowerThanAckedRate) { ExplicitKeyValueConfig key_value_config( - Config(/*enabled=*/true, /*valid=*/true, - /*trendline_integration_enabled=*/GetParam())); + ShortObservationConfig("LowerBoundByAckedRateFactor:1.0")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - loss_based_bandwidth_estimator.SetMinMaxBitrate( - /*min_bitrate=*/DataRate::KilobitsPerSec(10), - /*max_bitrate=*/DataRate::KilobitsPerSec(1000)); loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(2500)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + ASSERT_LT( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(1000)); - std::vector enough_feedback = + + loss_based_bandwidth_estimator.SetAcknowledgedBitrate( + DataRate::KilobitsPerSec(1000)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + EXPECT_EQ( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + DataRate::KilobitsPerSec(1000)); + + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( CreatePacketResultsWithReceivedPackets( - /*first_packet_timestamp=*/Timestamp::Zero()); + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound * 2), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback, /*delay_based_estimate=*/DataRate::PlusInfinity(), - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound * 3), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); - EXPECT_LE( + // Verify that the estimate recovers from the acked rate. + EXPECT_GT( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, DataRate::KilobitsPerSec(1000)); } -TEST_P(LossBasedBweV2Test, NotBackOffToAckedRateInAlr) { +TEST_F(LossBasedBweV2Test, EndHoldDurationIfDelayBasedEstimateWorks) { ExplicitKeyValueConfig key_value_config( - "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.1|1.0|0.9,AckedRateCandidate:true," - "ObservationWindowSize:2," - "DelayBasedCandidate:true,InstantUpperBoundBwBalance:100kbps," - "ObservationDurationLowerBound:200ms,NotUseAckedRateInAlr:true/"); + ShortObservationConfig("HoldDurationFactor:3")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - loss_based_bandwidth_estimator.SetMinMaxBitrate( - /*min_bitrate=*/DataRate::KilobitsPerSec(10), - /*max_bitrate=*/DataRate::KilobitsPerSec(1000000)); - DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); loss_based_bandwidth_estimator.SetBandwidthEstimate( - DataRate::KilobitsPerSec(600)); + DataRate::KilobitsPerSec(2500)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith50pPacketLossRate( + /*first_packet_timestamp=*/Timestamp::Zero()), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + ASSERT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + DataRate estimate = + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate; - DataRate acked_rate = DataRate::KilobitsPerSec(100); - loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); - std::vector enough_feedback_100p_loss_1 = - CreatePacketResultsWith100pLossRate( - /*first_packet_timestamp=*/Timestamp::Zero()); loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_100p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/true); + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound), + /*delay_based_estimate=*/estimate + DataRate::KilobitsPerSec(10), + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDelayBasedEstimate); + EXPECT_EQ( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + estimate + DataRate::KilobitsPerSec(10)); +} - // Make sure that the estimate decreases but higher than acked rate. - EXPECT_GT( +TEST_F(LossBasedBweV2Test, UseByteLossRate) { + ExplicitKeyValueConfig key_value_config( + ShortObservationConfig("UseByteLossRate:true")); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + loss_based_bandwidth_estimator.SetBandwidthEstimate( + DataRate::KilobitsPerSec(500)); + // Create packet feedback having 10% packet loss but more than 50% byte loss. + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith10pPacketLossRate( + /*first_packet_timestamp=*/Timestamp::Zero(), + /*lost_packet_size=*/DataSize::Bytes(kPacketSize * 20)), + /*delay_based_estimate=*/DataRate::PlusInfinity(), + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + // The estimate is bounded by the instant upper bound due to high loss. + EXPECT_LT( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, - acked_rate); + DataRate::KilobitsPerSec(150)); +} + +TEST_F(LossBasedBweV2Test, UseByteLossRateIgnoreLossSpike) { + ExplicitKeyValueConfig key_value_config( + "WebRTC-Bwe-LossBasedBweV2/" + "UseByteLossRate:true,ObservationWindowSize:5/"); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + const DataRate kDelayBasedEstimate = DataRate::KilobitsPerSec(500); + loss_based_bandwidth_estimator.SetBandwidthEstimate(kDelayBasedEstimate); + + // Fill the observation window. + for (int i = 0; i < 5; ++i) { + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + i * kObservationDurationLowerBound), + kDelayBasedEstimate, + /*in_alr=*/false); + } + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero() + + 5 * kObservationDurationLowerBound), + kDelayBasedEstimate, + /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + 6 * kObservationDurationLowerBound), + kDelayBasedEstimate, + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDelayBasedEstimate); + EXPECT_EQ( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + kDelayBasedEstimate); + // But if more loss happen in a new observation, BWE back down. + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero() + + 7 * kObservationDurationLowerBound), + kDelayBasedEstimate, + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); EXPECT_LT( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, - DataRate::KilobitsPerSec(600)); + kDelayBasedEstimate); } -TEST_P(LossBasedBweV2Test, BackOffToAckedRateIfNotInAlr) { +TEST_F(LossBasedBweV2Test, UseByteLossRateDoesNotIgnoreLossSpikeOnSendBurst) { ExplicitKeyValueConfig key_value_config( "WebRTC-Bwe-LossBasedBweV2/" - "Enabled:true,CandidateFactors:1.1|1.0|0.9,AckedRateCandidate:true," - "ObservationWindowSize:2," - "DelayBasedCandidate:true,InstantUpperBoundBwBalance:100kbps," - "ObservationDurationLowerBound:200ms,NotUseAckedRateInAlr:true/"); + "UseByteLossRate:true,ObservationWindowSize:5/"); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + const DataRate kDelayBasedEstimate = DataRate::KilobitsPerSec(500); + loss_based_bandwidth_estimator.SetBandwidthEstimate(kDelayBasedEstimate); + + // Fill the observation window. + for (int i = 0; i < 5; ++i) { + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + i * kObservationDurationLowerBound), + kDelayBasedEstimate, + /*in_alr=*/false); + } + + // If the loss happens when increasing sending rate, then + // the BWE should back down. + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWith100pLossRate( + /*first_packet_timestamp=*/Timestamp::Zero() + + 5 * kObservationDurationLowerBound, + /*num_packets=*/5), + kDelayBasedEstimate, + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + EXPECT_LE( + loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, + kDelayBasedEstimate); +} + +TEST_F(LossBasedBweV2Test, PaceAtLossBasedEstimate) { + ExplicitKeyValueConfig key_value_config(ShortObservationConfig( + "PaceAtLossBasedEstimate:true,PaddingDuration:1000ms")); LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); - loss_based_bandwidth_estimator.SetMinMaxBitrate( - /*min_bitrate=*/DataRate::KilobitsPerSec(10), - /*max_bitrate=*/DataRate::KilobitsPerSec(1000000)); - DataRate delay_based_estimate = DataRate::KilobitsPerSec(5000); loss_based_bandwidth_estimator.SetBandwidthEstimate( - DataRate::KilobitsPerSec(600)); + DataRate::KilobitsPerSec(1000)); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero()), + /*delay_based_estimate=*/DataRate::KilobitsPerSec(1000), + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDelayBasedEstimate); + EXPECT_FALSE(loss_based_bandwidth_estimator.PaceAtLossBasedEstimate()); - DataRate acked_rate = DataRate::KilobitsPerSec(100); - loss_based_bandwidth_estimator.SetAcknowledgedBitrate(acked_rate); - std::vector enough_feedback_100p_loss_1 = + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( CreatePacketResultsWith100pLossRate( - /*first_packet_timestamp=*/Timestamp::Zero()); + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound), + /*delay_based_estimate=*/DataRate::KilobitsPerSec(1000), + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kDecreasing); + EXPECT_TRUE(loss_based_bandwidth_estimator.PaceAtLossBasedEstimate()); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( - enough_feedback_100p_loss_1, delay_based_estimate, - BandwidthUsage::kBwNormal, /*probe_estimate=*/absl::nullopt, - /*upper_link_capacity=*/DataRate::PlusInfinity(), /*in_alr=*/false); + CreatePacketResultsWithReceivedPackets( + /*first_packet_timestamp=*/Timestamp::Zero() + + kObservationDurationLowerBound * 2), + /*delay_based_estimate=*/DataRate::KilobitsPerSec(1000), + /*in_alr=*/false); + EXPECT_EQ(loss_based_bandwidth_estimator.GetLossBasedResult().state, + LossBasedState::kIncreaseUsingPadding); + EXPECT_TRUE(loss_based_bandwidth_estimator.PaceAtLossBasedEstimate()); +} - // Make sure that the estimate decreases but higher than acked rate. +TEST_F(LossBasedBweV2Test, + EstimateDoesNotBackOffDueToPacketReorderingBetweenFeedback) { + ExplicitKeyValueConfig key_value_config(ShortObservationConfig("")); + LossBasedBweV2 loss_based_bandwidth_estimator(&key_value_config); + const DataRate kStartBitrate = DataRate::KilobitsPerSec(2500); + loss_based_bandwidth_estimator.SetBandwidthEstimate(kStartBitrate); + + std::vector feedback_1(3); + feedback_1[0].sent_packet.sequence_number = 1; + feedback_1[0].sent_packet.size = DataSize::Bytes(kPacketSize); + feedback_1[0].sent_packet.send_time = Timestamp::Zero(); + feedback_1[0].receive_time = + feedback_1[0].sent_packet.send_time + TimeDelta::Millis(10); + feedback_1[1].sent_packet.sequence_number = 2; + feedback_1[1].sent_packet.size = DataSize::Bytes(kPacketSize); + feedback_1[1].sent_packet.send_time = Timestamp::Zero(); + // Lost or reordered + feedback_1[1].receive_time = Timestamp::PlusInfinity(); + + feedback_1[2].sent_packet.sequence_number = 3; + feedback_1[2].sent_packet.size = DataSize::Bytes(kPacketSize); + feedback_1[2].sent_packet.send_time = Timestamp::Zero(); + feedback_1[2].receive_time = + feedback_1[2].sent_packet.send_time + TimeDelta::Millis(10); + + std::vector feedback_2(3); + feedback_2[0].sent_packet.sequence_number = 2; + feedback_2[0].sent_packet.size = DataSize::Bytes(kPacketSize); + feedback_2[0].sent_packet.send_time = Timestamp::Zero(); + feedback_2[0].receive_time = + feedback_1[0].sent_packet.send_time + TimeDelta::Millis(10); + feedback_2[1].sent_packet.sequence_number = 4; + feedback_2[1].sent_packet.size = DataSize::Bytes(kPacketSize); + feedback_2[1].sent_packet.send_time = + Timestamp::Zero() + kObservationDurationLowerBound; + feedback_2[1].receive_time = + feedback_2[1].sent_packet.send_time + TimeDelta::Millis(10); + feedback_2[2].sent_packet.sequence_number = 5; + feedback_2[2].sent_packet.size = DataSize::Bytes(kPacketSize); + feedback_2[2].sent_packet.send_time = Timestamp::Zero(); + feedback_2[2].receive_time = + feedback_2[2].sent_packet.send_time + TimeDelta::Millis(10); + + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + feedback_1, + /*delay_based_estimate=*/kStartBitrate, + /*in_alr=*/false); + loss_based_bandwidth_estimator.UpdateBandwidthEstimate( + feedback_2, + /*delay_based_estimate=*/kStartBitrate, + /*in_alr=*/false); EXPECT_EQ( loss_based_bandwidth_estimator.GetLossBasedResult().bandwidth_estimate, - acked_rate); + kStartBitrate); } -INSTANTIATE_TEST_SUITE_P(LossBasedBweV2Tests, - LossBasedBweV2Test, - ::testing::Bool()); - } // namespace } // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/probe_bitrate_estimator.cc b/modules/congestion_controller/goog_cc/probe_bitrate_estimator.cc index a94f653157..62b6a6bca9 100644 --- a/modules/congestion_controller/goog_cc/probe_bitrate_estimator.cc +++ b/modules/congestion_controller/goog_cc/probe_bitrate_estimator.cc @@ -12,13 +12,18 @@ #include #include +#include #include "api/rtc_event_log/rtc_event_log.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" #include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" namespace webrtc { namespace { @@ -59,7 +64,7 @@ ProbeBitrateEstimator::ProbeBitrateEstimator(RtcEventLog* event_log) ProbeBitrateEstimator::~ProbeBitrateEstimator() = default; -absl::optional ProbeBitrateEstimator::HandleProbeAndEstimateBitrate( +std::optional ProbeBitrateEstimator::HandleProbeAndEstimateBitrate( const PacketResult& packet_feedback) { int cluster_id = packet_feedback.sent_packet.pacing_info.probe_cluster_id; RTC_DCHECK_NE(cluster_id, PacedPacketInfo::kNotAProbe); @@ -98,7 +103,7 @@ absl::optional ProbeBitrateEstimator::HandleProbeAndEstimateBitrate( packet_feedback.sent_packet.pacing_info.probe_cluster_min_bytes) * kMinReceivedBytesRatio; if (cluster->num_probes < min_probes || cluster->size_total < min_size) - return absl::nullopt; + return std::nullopt; TimeDelta send_interval = cluster->last_send - cluster->first_send; TimeDelta receive_interval = cluster->last_receive - cluster->first_receive; @@ -117,7 +122,7 @@ absl::optional ProbeBitrateEstimator::HandleProbeAndEstimateBitrate( event_log_->Log(std::make_unique( cluster_id, ProbeFailureReason::kInvalidSendReceiveInterval)); } - return absl::nullopt; + return std::nullopt; } // Since the `send_interval` does not include the time it takes to actually // send the last packet the size of the last sent packet should not be @@ -154,7 +159,7 @@ absl::optional ProbeBitrateEstimator::HandleProbeAndEstimateBitrate( event_log_->Log(std::make_unique( cluster_id, ProbeFailureReason::kInvalidSendReceiveRatio)); } - return absl::nullopt; + return std::nullopt; } RTC_LOG(LS_INFO) << "Probing successful" " [cluster id: " @@ -182,9 +187,9 @@ absl::optional ProbeBitrateEstimator::HandleProbeAndEstimateBitrate( return estimated_data_rate_; } -absl::optional +std::optional ProbeBitrateEstimator::FetchAndResetLastEstimatedBitrate() { - absl::optional estimated_data_rate = estimated_data_rate_; + std::optional estimated_data_rate = estimated_data_rate_; estimated_data_rate_.reset(); return estimated_data_rate; } diff --git a/modules/congestion_controller/goog_cc/probe_bitrate_estimator.h b/modules/congestion_controller/goog_cc/probe_bitrate_estimator.h index d5a523b7f3..fdb554003e 100644 --- a/modules/congestion_controller/goog_cc/probe_bitrate_estimator.h +++ b/modules/congestion_controller/goog_cc/probe_bitrate_estimator.h @@ -11,12 +11,13 @@ #ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_PROBE_BITRATE_ESTIMATOR_H_ #define MODULES_CONGESTION_CONTROLLER_GOOG_CC_PROBE_BITRATE_ESTIMATOR_H_ -#include #include +#include -#include "absl/types/optional.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/timestamp.h" namespace webrtc { class RtcEventLog; @@ -28,10 +29,10 @@ class ProbeBitrateEstimator { // Should be called for every probe packet we receive feedback about. // Returns the estimated bitrate if the probe completes a valid cluster. - absl::optional HandleProbeAndEstimateBitrate( + std::optional HandleProbeAndEstimateBitrate( const PacketResult& packet_feedback); - absl::optional FetchAndResetLastEstimatedBitrate(); + std::optional FetchAndResetLastEstimatedBitrate(); private: struct AggregatedCluster { @@ -50,7 +51,7 @@ class ProbeBitrateEstimator { std::map clusters_; RtcEventLog* const event_log_; - absl::optional estimated_data_rate_; + std::optional estimated_data_rate_; }; } // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/probe_bitrate_estimator_unittest.cc b/modules/congestion_controller/goog_cc/probe_bitrate_estimator_unittest.cc index 6b4146d2bf..a8113c9722 100644 --- a/modules/congestion_controller/goog_cc/probe_bitrate_estimator_unittest.cc +++ b/modules/congestion_controller/goog_cc/probe_bitrate_estimator_unittest.cc @@ -12,7 +12,14 @@ #include +#include +#include + #include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "test/gtest.h" namespace webrtc { @@ -48,7 +55,7 @@ class TestProbeBitrateEstimator : public ::testing::Test { } protected: - absl::optional measured_data_rate_; + std::optional measured_data_rate_; ProbeBitrateEstimator probe_bitrate_estimator_; }; diff --git a/modules/congestion_controller/goog_cc/probe_controller.cc b/modules/congestion_controller/goog_cc/probe_controller.cc index 8fde515934..bf8253ded5 100644 --- a/modules/congestion_controller/goog_cc/probe_controller.cc +++ b/modules/congestion_controller/goog_cc/probe_controller.cc @@ -11,18 +11,23 @@ #include "modules/congestion_controller/goog_cc/probe_controller.h" #include +#include #include #include -#include +#include +#include #include "absl/strings/match.h" -#include "absl/types/optional.h" +#include "api/field_trials_view.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" #include "system_wrappers/include/metrics.h" @@ -83,6 +88,11 @@ ProbeControllerConfig::ProbeControllerConfig( second_exponential_probe_scale("p2", 6.0), further_exponential_probe_scale("step_size", 2), further_probe_threshold("further_probe_threshold", 0.7), + abort_further_probe_if_max_lower_than_current("abort_further", false), + repeated_initial_probing_time_period("initial_probing", + TimeDelta::Seconds(5)), + initial_probe_duration("initial_probe_duration", TimeDelta::Millis(100)), + initial_min_probe_delta("initial_min_probe_delta", TimeDelta::Millis(20)), alr_probing_interval("alr_interval", TimeDelta::Seconds(5)), alr_probe_scale("alr_scale", 2), network_state_estimate_probing_interval("network_state_interval", @@ -96,42 +106,46 @@ ProbeControllerConfig::ProbeControllerConfig( network_state_probe_scale("network_state_scale", 1.0), network_state_probe_duration("network_state_probe_duration", TimeDelta::Millis(15)), - + network_state_min_probe_delta("network_state_min_probe_delta", + TimeDelta::Millis(20)), probe_on_max_allocated_bitrate_change("probe_max_allocation", true), first_allocation_probe_scale("alloc_p1", 1), second_allocation_probe_scale("alloc_p2", 2), - allocation_allow_further_probing("alloc_probe_further", false), - allocation_probe_max("alloc_probe_max", DataRate::PlusInfinity()), + allocation_probe_limit_by_current_scale("alloc_current_bwe_limit", 2), min_probe_packets_sent("min_probe_packets_sent", 5), min_probe_duration("min_probe_duration", TimeDelta::Millis(15)), - limit_probe_target_rate_to_loss_bwe("limit_probe_target_rate_to_loss_bwe", - false), + min_probe_delta("min_probe_delta", TimeDelta::Millis(2)), loss_limited_probe_scale("loss_limited_scale", 1.5), skip_if_estimate_larger_than_fraction_of_max( "skip_if_est_larger_than_fraction_of_max", 0.0), - not_probe_if_delay_increased("not_probe_if_delay_increased", false) { + skip_probe_max_allocated_scale("skip_max_allocated_scale", 1.0) { ParseFieldTrial({&first_exponential_probe_scale, &second_exponential_probe_scale, &further_exponential_probe_scale, &further_probe_threshold, + &abort_further_probe_if_max_lower_than_current, + &repeated_initial_probing_time_period, + &initial_probe_duration, &alr_probing_interval, &alr_probe_scale, &probe_on_max_allocated_bitrate_change, &first_allocation_probe_scale, &second_allocation_probe_scale, - &allocation_allow_further_probing, + &allocation_probe_limit_by_current_scale, &min_probe_duration, + &min_probe_delta, + &initial_min_probe_delta, &network_state_estimate_probing_interval, + &network_state_min_probe_delta, &probe_if_estimate_lower_than_network_state_estimate_ratio, &estimate_lower_than_network_state_estimate_probing_interval, &network_state_probe_scale, &network_state_probe_duration, &min_probe_packets_sent, - &limit_probe_target_rate_to_loss_bwe, &loss_limited_probe_scale, &skip_if_estimate_larger_than_fraction_of_max, - ¬_probe_if_delay_increased}, + &skip_probe_max_allocated_scale}, key_value_config->Lookup("WebRTC-Bwe-ProbingConfiguration")); // Specialized keys overriding subsets of WebRTC-Bwe-ProbingConfiguration @@ -145,7 +159,7 @@ ProbeControllerConfig::ProbeControllerConfig( key_value_config->Lookup("WebRTC-Bwe-AlrProbing")); ParseFieldTrial( {&first_allocation_probe_scale, &second_allocation_probe_scale, - &allocation_allow_further_probing, &allocation_probe_max}, + &allocation_probe_limit_by_current_scale}, key_value_config->Lookup("WebRTC-Bwe-AllocationProbing")); ParseFieldTrial({&min_probe_packets_sent, &min_probe_duration}, key_value_config->Lookup("WebRTC-Bwe-ProbingBehavior")); @@ -157,7 +171,7 @@ ProbeControllerConfig::~ProbeControllerConfig() = default; ProbeController::ProbeController(const FieldTrialsView* key_value_config, RtcEventLog* event_log) - : network_available_(true), + : network_available_(false), enable_periodic_alr_probing_(false), in_rapid_recovery_experiment_(absl::StartsWith( key_value_config->Lookup(kBweRapidRecoveryExperiment), @@ -213,7 +227,6 @@ std::vector ProbeController::OnMaxTotalAllocatedBitrate( Timestamp at_time) { const bool in_alr = alr_start_time_.has_value(); const bool allow_allocation_probe = in_alr; - if (config_.probe_on_max_allocated_bitrate_change && state_ == State::kProbingComplete && max_total_allocated_bitrate != max_total_allocated_bitrate_ && @@ -227,20 +240,34 @@ std::vector ProbeController::OnMaxTotalAllocatedBitrate( DataRate first_probe_rate = max_total_allocated_bitrate * config_.first_allocation_probe_scale.Value(); - DataRate probe_cap = config_.allocation_probe_max.Get(); - first_probe_rate = std::min(first_probe_rate, probe_cap); + const DataRate current_bwe_limit = + config_.allocation_probe_limit_by_current_scale.Get() * + estimated_bitrate_; + bool limited_by_current_bwe = current_bwe_limit < first_probe_rate; + if (limited_by_current_bwe) { + first_probe_rate = current_bwe_limit; + } + std::vector probes = {first_probe_rate}; - if (config_.second_allocation_probe_scale) { + if (!limited_by_current_bwe && config_.second_allocation_probe_scale) { DataRate second_probe_rate = max_total_allocated_bitrate * config_.second_allocation_probe_scale.Value(); - second_probe_rate = std::min(second_probe_rate, probe_cap); + limited_by_current_bwe = current_bwe_limit < second_probe_rate; + if (limited_by_current_bwe) { + second_probe_rate = current_bwe_limit; + } if (second_probe_rate > first_probe_rate) probes.push_back(second_probe_rate); } - return InitiateProbing(at_time, probes, - config_.allocation_allow_further_probing.Get()); + bool allow_further_probing = limited_by_current_bwe; + + return InitiateProbing(at_time, probes, allow_further_probing); + } + if (!max_total_allocated_bitrate.IsZero()) { + last_allowed_repeated_initial_probe_ = at_time; } + max_total_allocated_bitrate_ = max_total_allocated_bitrate; return std::vector(); } @@ -259,6 +286,21 @@ std::vector ProbeController::OnNetworkAvailability( return std::vector(); } +void ProbeController::UpdateState(State new_state) { + switch (new_state) { + case State::kInit: + state_ = State::kInit; + break; + case State::kWaitingForProbingResult: + state_ = State::kWaitingForProbingResult; + break; + case State::kProbingComplete: + state_ = State::kProbingComplete; + min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); + break; + } +} + std::vector ProbeController::InitiateExponentialProbing( Timestamp at_time) { RTC_DCHECK(network_available_); @@ -274,6 +316,15 @@ std::vector ProbeController::InitiateExponentialProbing( probes.push_back(config_.second_exponential_probe_scale.Value() * start_bitrate_); } + if (repeated_initial_probing_enabled_ && + max_total_allocated_bitrate_.IsZero()) { + last_allowed_repeated_initial_probe_ = + at_time + config_.repeated_initial_probing_time_period; + RTC_LOG(LS_INFO) << "Repeated initial probing enabled, last allowed probe: " + << last_allowed_repeated_initial_probe_ + << " now: " << at_time; + } + return InitiateProbing(at_time, probes, true); } @@ -290,10 +341,18 @@ std::vector ProbeController::SetEstimatedBitrate( if (state_ == State::kWaitingForProbingResult) { // Continue probing if probing results indicate channel has greater - // capacity. + // capacity unless we already reached the needed bitrate. + if (config_.abort_further_probe_if_max_lower_than_current && + (bitrate > max_bitrate_ || + (!max_total_allocated_bitrate_.IsZero() && + bitrate > 2 * max_total_allocated_bitrate_))) { + // No need to continue probing. + min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); + } DataRate network_state_estimate_probe_further_limit = config_.network_state_estimate_probing_interval->IsFinite() && - network_estimate_ + network_estimate_ && + network_estimate_->link_capacity_upper.IsFinite() ? network_estimate_->link_capacity_upper * config_.further_probe_threshold : DataRate::PlusInfinity(); @@ -315,12 +374,16 @@ void ProbeController::EnablePeriodicAlrProbing(bool enable) { enable_periodic_alr_probing_ = enable; } +void ProbeController::EnableRepeatedInitialProbing(bool enable) { + repeated_initial_probing_enabled_ = enable; +} + void ProbeController::SetAlrStartTimeMs( - absl::optional alr_start_time_ms) { + std::optional alr_start_time_ms) { if (alr_start_time_ms) { alr_start_time_ = Timestamp::Millis(*alr_start_time_ms); } else { - alr_start_time_ = absl::nullopt; + alr_start_time_ = std::nullopt; } } void ProbeController::SetAlrEndedTimeMs(int64_t alr_end_time_ms) { @@ -374,7 +437,7 @@ void ProbeController::Reset(Timestamp at_time) { min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); time_last_probing_initiated_ = Timestamp::Zero(); estimated_bitrate_ = DataRate::Zero(); - network_estimate_ = absl::nullopt; + network_estimate_ = std::nullopt; start_bitrate_ = DataRate::Zero(); max_bitrate_ = kDefaultMaxProbingBitrate; Timestamp now = at_time; @@ -382,7 +445,6 @@ void ProbeController::Reset(Timestamp at_time) { alr_end_time_.reset(); time_of_last_large_drop_ = now; bitrate_before_last_large_drop_ = DataRate::Zero(); - max_total_allocated_bitrate_ = DataRate::Zero(); } bool ProbeController::TimeForAlrProbe(Timestamp at_time) const { @@ -427,18 +489,34 @@ bool ProbeController::TimeForNetworkStateProbe(Timestamp at_time) const { return false; } +bool ProbeController::TimeForNextRepeatedInitialProbe(Timestamp at_time) const { + if (state_ != State::kWaitingForProbingResult && + last_allowed_repeated_initial_probe_ > at_time) { + Timestamp next_probe_time = + time_last_probing_initiated_ + kMaxWaitingTimeForProbingResult; + if (at_time >= next_probe_time) { + return true; + } + } + return false; +} + std::vector ProbeController::Process(Timestamp at_time) { if (at_time - time_last_probing_initiated_ > kMaxWaitingTimeForProbingResult) { if (state_ == State::kWaitingForProbingResult) { RTC_LOG(LS_INFO) << "kWaitingForProbingResult: timeout"; - state_ = State::kProbingComplete; - min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); + UpdateState(State::kProbingComplete); } } if (estimated_bitrate_.IsZero() || state_ != State::kProbingComplete) { return {}; } + if (TimeForNextRepeatedInitialProbe(at_time)) { + return InitiateProbing( + at_time, {estimated_bitrate_ * config_.first_exponential_probe_scale}, + true); + } if (TimeForAlrProbe(at_time) || TimeForNetworkStateProbe(at_time)) { return InitiateProbing( at_time, {estimated_bitrate_ * config_.alr_probe_scale}, true); @@ -446,6 +524,31 @@ std::vector ProbeController::Process(Timestamp at_time) { return std::vector(); } +ProbeClusterConfig ProbeController::CreateProbeClusterConfig(Timestamp at_time, + DataRate bitrate) { + ProbeClusterConfig config; + config.at_time = at_time; + config.target_data_rate = bitrate; + if (network_estimate_ && + config_.network_state_estimate_probing_interval->IsFinite() && + network_estimate_->link_capacity_upper.IsFinite() && + network_estimate_->link_capacity_upper >= bitrate) { + config.target_duration = config_.network_state_probe_duration; + config.min_probe_delta = config_.network_state_min_probe_delta; + } else if (at_time < last_allowed_repeated_initial_probe_) { + config.target_duration = config_.initial_probe_duration; + config.min_probe_delta = config_.initial_min_probe_delta; + } else { + config.target_duration = config_.min_probe_duration; + config.min_probe_delta = config_.min_probe_delta; + } + config.target_probe_count = config_.min_probe_packets_sent; + config.id = next_probe_cluster_id_; + next_probe_cluster_id_++; + MaybeLogProbeClusterCreated(event_log_, config); + return config; +} + std::vector ProbeController::InitiateProbing( Timestamp now, std::vector bitrates_to_probe, @@ -457,11 +560,12 @@ std::vector ProbeController::InitiateProbing( DataRate max_probe_rate = max_total_allocated_bitrate_.IsZero() ? max_bitrate_ - : std::min(max_total_allocated_bitrate_, max_bitrate_); + : std::min(config_.skip_probe_max_allocated_scale * + max_total_allocated_bitrate_, + max_bitrate_); if (std::min(network_estimate, estimated_bitrate_) > config_.skip_if_estimate_larger_than_fraction_of_max * max_probe_rate) { - state_ = State::kProbingComplete; - min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); + UpdateState(State::kProbingComplete); return {}; } } @@ -478,30 +582,22 @@ std::vector ProbeController::InitiateProbing( std::min(max_probe_bitrate, max_total_allocated_bitrate_ * 2); } - DataRate estimate_capped_bitrate = DataRate::PlusInfinity(); - if (config_.limit_probe_target_rate_to_loss_bwe) { - switch (bandwidth_limited_cause_) { - case BandwidthLimitedCause::kLossLimitedBweDecreasing: - // If bandwidth estimate is decreasing because of packet loss, do not - // send probes. - return {}; - case BandwidthLimitedCause::kLossLimitedBweIncreasing: - estimate_capped_bitrate = - std::min(max_probe_bitrate, - estimated_bitrate_ * config_.loss_limited_probe_scale); - break; - case BandwidthLimitedCause::kDelayBasedLimited: - break; - default: - break; - } - } - if (config_.not_probe_if_delay_increased && - (bandwidth_limited_cause_ == - BandwidthLimitedCause::kDelayBasedLimitedDelayIncreased || - bandwidth_limited_cause_ == - BandwidthLimitedCause::kRttBasedBackOffHighRtt)) { - return {}; + switch (bandwidth_limited_cause_) { + case BandwidthLimitedCause::kRttBasedBackOffHighRtt: + case BandwidthLimitedCause::kDelayBasedLimitedDelayIncreased: + case BandwidthLimitedCause::kLossLimitedBwe: + RTC_LOG(LS_INFO) << "Not sending probe in bandwidth limited state. " + << static_cast(bandwidth_limited_cause_); + return {}; + case BandwidthLimitedCause::kLossLimitedBweIncreasing: + max_probe_bitrate = + std::min(max_probe_bitrate, + estimated_bitrate_ * config_.loss_limited_probe_scale); + break; + case BandwidthLimitedCause::kDelayBasedLimited: + break; + default: + break; } if (config_.network_state_estimate_probing_interval->IsFinite() && @@ -510,49 +606,30 @@ std::vector ProbeController::InitiateProbing( RTC_LOG(LS_INFO) << "Not sending probe, Network state estimate is zero"; return {}; } - estimate_capped_bitrate = - std::min({estimate_capped_bitrate, max_probe_bitrate, - network_estimate_->link_capacity_upper * - config_.network_state_probe_scale}); + max_probe_bitrate = std::min( + {max_probe_bitrate, + std::max(estimated_bitrate_, network_estimate_->link_capacity_upper * + config_.network_state_probe_scale)}); } std::vector pending_probes; for (DataRate bitrate : bitrates_to_probe) { RTC_DCHECK(!bitrate.IsZero()); - - bitrate = std::min(bitrate, estimate_capped_bitrate); - if (bitrate > max_probe_bitrate) { + if (bitrate >= max_probe_bitrate) { bitrate = max_probe_bitrate; probe_further = false; } - - ProbeClusterConfig config; - config.at_time = now; - config.target_data_rate = bitrate; - if (network_estimate_ && - config_.network_state_estimate_probing_interval->IsFinite()) { - config.target_duration = config_.network_state_probe_duration; - } else { - config.target_duration = config_.min_probe_duration; - } - - config.target_probe_count = config_.min_probe_packets_sent; - config.id = next_probe_cluster_id_; - next_probe_cluster_id_++; - MaybeLogProbeClusterCreated(event_log_, config); - pending_probes.push_back(config); + pending_probes.push_back(CreateProbeClusterConfig(now, bitrate)); } time_last_probing_initiated_ = now; if (probe_further) { - state_ = State::kWaitingForProbingResult; + UpdateState(State::kWaitingForProbingResult); // Dont expect probe results to be larger than a fraction of the actual // probe rate. - min_bitrate_to_probe_further_ = - std::min(estimate_capped_bitrate, (*(bitrates_to_probe.end() - 1))) * - config_.further_probe_threshold; + min_bitrate_to_probe_further_ = pending_probes.back().target_data_rate * + config_.further_probe_threshold; } else { - state_ = State::kProbingComplete; - min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); + UpdateState(State::kProbingComplete); } return pending_probes; } diff --git a/modules/congestion_controller/goog_cc/probe_controller.h b/modules/congestion_controller/goog_cc/probe_controller.h index ae00182c68..9c9c39ecad 100644 --- a/modules/congestion_controller/goog_cc/probe_controller.h +++ b/modules/congestion_controller/goog_cc/probe_controller.h @@ -13,16 +13,15 @@ #include -#include +#include #include #include "absl/base/attributes.h" -#include "absl/types/optional.h" #include "api/field_trials_view.h" #include "api/rtc_event_log/rtc_event_log.h" -#include "api/transport/network_control.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "rtc_base/experiments/field_trial_parser.h" @@ -43,14 +42,22 @@ struct ProbeControllerConfig { FieldTrialOptional second_exponential_probe_scale; FieldTrialParameter further_exponential_probe_scale; FieldTrialParameter further_probe_threshold; - + FieldTrialParameter abort_further_probe_if_max_lower_than_current; + // Duration of time from the first initial probe where repeated initial probes + // are sent if repeated initial probing is enabled. + FieldTrialParameter repeated_initial_probing_time_period; + // The minimum probing duration of an individual probe during + // the repeated_initial_probing_time_period. + FieldTrialParameter initial_probe_duration; + // Delta time between sent bursts of packets in a probe during + // the repeated_initial_probing_time_period. + FieldTrialParameter initial_min_probe_delta; // Configures how often we send ALR probes and how big they are. FieldTrialParameter alr_probing_interval; FieldTrialParameter alr_probe_scale; - // Configures how often we send probes if NetworkStateEstimate is available. FieldTrialParameter network_state_estimate_probing_interval; - // Periodically probe as long as the the ratio beteeen current estimate and + // Periodically probe as long as the ratio between current estimate and // NetworkStateEstimate is lower then this. FieldTrialParameter probe_if_estimate_lower_than_network_state_estimate_ratio; @@ -58,36 +65,41 @@ struct ProbeControllerConfig { estimate_lower_than_network_state_estimate_probing_interval; FieldTrialParameter network_state_probe_scale; // Overrides min_probe_duration if network_state_estimate_probing_interval - // is set and a network state estimate is known. + // is set and a network state estimate is known and equal or higher than the + // probe target. FieldTrialParameter network_state_probe_duration; + // Overrides min_probe_delta if network_state_estimate_probing_interval + // is set and a network state estimate is known and equal or higher than the + // probe target. + FieldTrialParameter network_state_min_probe_delta; // Configures the probes emitted by changed to the allocated bitrate. FieldTrialParameter probe_on_max_allocated_bitrate_change; FieldTrialOptional first_allocation_probe_scale; FieldTrialOptional second_allocation_probe_scale; - FieldTrialFlag allocation_allow_further_probing; - FieldTrialParameter allocation_probe_max; + FieldTrialParameter allocation_probe_limit_by_current_scale; // The minimum number probing packets used. FieldTrialParameter min_probe_packets_sent; // The minimum probing duration. FieldTrialParameter min_probe_duration; - // Periodically probe when bandwidth estimate is loss limited. - FieldTrialParameter limit_probe_target_rate_to_loss_bwe; + // Delta time between sent bursts of packets in a probe. + FieldTrialParameter min_probe_delta; FieldTrialParameter loss_limited_probe_scale; - // Dont send a probe if min(estimate, network state estimate) is larger than - // this fraction of the set max bitrate. + // Don't send a probe if min(estimate, network state estimate) is larger than + // this fraction of the set max or max allocated bitrate. FieldTrialParameter skip_if_estimate_larger_than_fraction_of_max; - // Do not send probes if either overusing/underusing network or high rtt. - FieldTrialParameter not_probe_if_delay_increased; + // Scale factor of the max allocated bitrate. Used when deciding if a probe + // can be skiped due to that the estimate is already high enough. + FieldTrialParameter skip_probe_max_allocated_scale; }; // Reason that bandwidth estimate is limited. Bandwidth estimate can be limited // by either delay based bwe, or loss based bwe when it increases/decreases the // estimate. -enum class BandwidthLimitedCause { +enum class BandwidthLimitedCause : int { kLossLimitedBweIncreasing = 0, - kLossLimitedBweDecreasing = 1, + kLossLimitedBwe = 1, kDelayBasedLimited = 2, kDelayBasedLimitedDelayIncreased = 3, kRttBasedBackOffHighRtt = 4 @@ -127,7 +139,14 @@ class ProbeController { void EnablePeriodicAlrProbing(bool enable); - void SetAlrStartTimeMs(absl::optional alr_start_time); + // Probes are sent periodically every 1s during the first 5s after the network + // becomes available or until OnMaxTotalAllocatedBitrate is invoked with a + // none zero max_total_allocated_bitrate (there are active streams being + // sent.) Probe rate is up to max configured bitrate configured via + // SetBitrates. + void EnableRepeatedInitialProbing(bool enable); + + void SetAlrStartTimeMs(std::optional alr_start_time); void SetAlrEndedTimeMs(int64_t alr_end_time); ABSL_MUST_USE_RESULT std::vector RequestProbe( @@ -136,18 +155,14 @@ class ProbeController { void SetNetworkStateEstimate(webrtc::NetworkStateEstimate estimate); // Resets the ProbeController to a state equivalent to as if it was just - // created EXCEPT for `enable_periodic_alr_probing_` and - // `network_available_`. + // created EXCEPT for configuration settings like + // `enable_periodic_alr_probing_` `network_available_` and + // `max_total_allocated_bitrate_`. void Reset(Timestamp at_time); ABSL_MUST_USE_RESULT std::vector Process( Timestamp at_time); - // Gets the value of field trial not_probe_if_delay_increased. - bool DontProbeIfDelayIncreased() { - return config_.not_probe_if_delay_increased; - } - private: enum class State { // Initial state where no probing has been triggered yet. @@ -158,6 +173,7 @@ class ProbeController { kProbingComplete, }; + void UpdateState(State new_state); ABSL_MUST_USE_RESULT std::vector InitiateExponentialProbing(Timestamp at_time); ABSL_MUST_USE_RESULT std::vector InitiateProbing( @@ -166,20 +182,25 @@ class ProbeController { bool probe_further); bool TimeForAlrProbe(Timestamp at_time) const; bool TimeForNetworkStateProbe(Timestamp at_time) const; + bool TimeForNextRepeatedInitialProbe(Timestamp at_time) const; + ProbeClusterConfig CreateProbeClusterConfig(Timestamp at_time, + DataRate bitrate); bool network_available_; + bool repeated_initial_probing_enabled_ = false; + Timestamp last_allowed_repeated_initial_probe_ = Timestamp::MinusInfinity(); BandwidthLimitedCause bandwidth_limited_cause_ = BandwidthLimitedCause::kDelayBasedLimited; State state_; DataRate min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); Timestamp time_last_probing_initiated_ = Timestamp::MinusInfinity(); DataRate estimated_bitrate_ = DataRate::Zero(); - absl::optional network_estimate_; + std::optional network_estimate_; DataRate start_bitrate_ = DataRate::Zero(); DataRate max_bitrate_ = DataRate::PlusInfinity(); Timestamp last_bwe_drop_probing_time_ = Timestamp::Zero(); - absl::optional alr_start_time_; - absl::optional alr_end_time_; + std::optional alr_start_time_; + std::optional alr_end_time_; bool enable_periodic_alr_probing_; Timestamp time_of_last_large_drop_ = Timestamp::MinusInfinity(); DataRate bitrate_before_last_large_drop_ = DataRate::Zero(); diff --git a/modules/congestion_controller/goog_cc/probe_controller_unittest.cc b/modules/congestion_controller/goog_cc/probe_controller_unittest.cc index 99efde80e0..39712ad4f6 100644 --- a/modules/congestion_controller/goog_cc/probe_controller_unittest.cc +++ b/modules/congestion_controller/goog_cc/probe_controller_unittest.cc @@ -10,19 +10,24 @@ #include "modules/congestion_controller/goog_cc/probe_controller.h" #include +#include +#include +#include "absl/strings/string_view.h" +#include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "logging/rtc_event_log/mock/mock_rtc_event_log.h" -#include "rtc_base/logging.h" #include "system_wrappers/include/clock.h" #include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" +using ::testing::Gt; using ::testing::IsEmpty; using ::testing::NiceMock; +using ::testing::SizeIs; namespace webrtc { namespace test { @@ -32,6 +37,7 @@ namespace { constexpr DataRate kMinBitrate = DataRate::BitsPerSec(100); constexpr DataRate kStartBitrate = DataRate::BitsPerSec(300); constexpr DataRate kMaxBitrate = DataRate::BitsPerSec(10000); +constexpr DataRate kMbpsMultiplier = DataRate::KilobitsPerSec(1000); constexpr TimeDelta kExponentialProbingTimeout = TimeDelta::Seconds(5); @@ -58,20 +64,38 @@ class ProbeControllerFixture { NiceMock mock_rtc_event_log; }; -TEST(ProbeControllerTest, InitiatesProbingAtStart) { +TEST(ProbeControllerTest, InitiatesProbingAfterSetBitrates) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_GE(probes.size(), 2u); } +TEST(ProbeControllerTest, InitiatesProbingWhenNetworkAvailable) { + ProbeControllerFixture fixture; + std::unique_ptr probe_controller = + fixture.CreateController(); + + std::vector probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + EXPECT_THAT(probes, IsEmpty()); + probes = probe_controller->OnNetworkAvailability({.network_available = true}); + EXPECT_GE(probes.size(), 2u); +} + TEST(ProbeControllerTest, SetsDefaultTargetDurationAndTargetProbeCount) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); std::vector probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); ASSERT_GE(probes.size(), 2u); @@ -87,6 +111,9 @@ TEST(ProbeControllerTest, "min_probe_packets_sent:2,min_probe_duration:123ms/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); std::vector probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); ASSERT_GE(probes.size(), 2u); @@ -113,6 +140,9 @@ TEST(ProbeControllerTest, CanConfigureInitialProbeRateFactor) { ProbeControllerFixture fixture("WebRTC-Bwe-ProbingConfiguration/p1:2,p2:3/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 2u); @@ -124,6 +154,9 @@ TEST(ProbeControllerTest, DisableSecondInitialProbeIfRateFactorZero) { ProbeControllerFixture fixture("WebRTC-Bwe-ProbingConfiguration/p1:2,p2:0/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 1u); @@ -134,6 +167,9 @@ TEST(ProbeControllerTest, InitiatesProbingOnMaxBitrateIncrease) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); // Long enough to time out exponential probing. @@ -153,6 +189,9 @@ TEST(ProbeControllerTest, ProbesOnMaxAllocatedBitrateIncreaseOnlyWhenInAlr) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( @@ -172,19 +211,56 @@ TEST(ProbeControllerTest, ProbesOnMaxAllocatedBitrateIncreaseOnlyWhenInAlr) { EXPECT_EQ(probes.at(0).target_data_rate, kMaxBitrate); // Do not probe when not in alr. - probe_controller->SetAlrStartTimeMs(absl::nullopt); + probe_controller->SetAlrStartTimeMs(std::nullopt); probes = probe_controller->OnMaxTotalAllocatedBitrate( kMaxBitrate + DataRate::BitsPerSec(2), fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); } +TEST(ProbeControllerTest, ProbesOnMaxAllocatedBitrateLimitedByCurrentBwe) { + ProbeControllerFixture fixture(""); + + ASSERT_TRUE(kMaxBitrate > 1.5 * kStartBitrate); + std::unique_ptr probe_controller = + fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + probes = probe_controller->SetEstimatedBitrate( + kStartBitrate, BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + + // Wait long enough to time out exponential probing. + fixture.AdvanceTime(kExponentialProbingTimeout); + probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); + + // Probe when in alr. + probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); + probes = probe_controller->OnMaxTotalAllocatedBitrate(kMaxBitrate, + fixture.CurrentTime()); + EXPECT_EQ(probes.size(), 1u); + EXPECT_EQ(probes.at(0).target_data_rate, 2.0 * kStartBitrate); + + // Continue probing if probe succeeds. + probes = probe_controller->SetEstimatedBitrate( + 1.5 * kStartBitrate, BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + EXPECT_EQ(probes.size(), 1u); + EXPECT_GT(probes.at(0).target_data_rate, 1.5 * kStartBitrate); +} + TEST(ProbeControllerTest, CanDisableProbingOnMaxTotalAllocatedBitrateIncrease) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" "probe_max_allocation:false/"); std::unique_ptr probe_controller = fixture.CreateController(); - + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( @@ -206,6 +282,9 @@ TEST(ProbeControllerTest, InitiatesProbingOnMaxBitrateIncreaseAtMaxBitrate) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); // Long enough to time out exponential probing. @@ -229,6 +308,9 @@ TEST(ProbeControllerTest, TestExponentialProbing) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -246,26 +328,165 @@ TEST(ProbeControllerTest, TestExponentialProbing) { EXPECT_EQ(probes[0].target_data_rate.bps(), 2 * 1800); } -TEST(ProbeControllerTest, TestExponentialProbingTimeout) { +TEST(ProbeControllerTest, ExponentialProbingStopIfMaxBitrateLow) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/abort_further:true/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + ASSERT_THAT(probes, SizeIs(Gt(0))); + + // Repeated probe normally is sent when estimated bitrate climbs above + // 0.7 * 6 * kStartBitrate = 1260. But since max bitrate is low, expect + // exponential probing to stop. + probes = probe_controller->SetBitrates(kMinBitrate, kStartBitrate, + /*max_bitrate=*/kStartBitrate, + fixture.CurrentTime()); + EXPECT_THAT(probes, IsEmpty()); + + probes = probe_controller->SetEstimatedBitrate( + DataRate::BitsPerSec(1800), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + EXPECT_THAT(probes, IsEmpty()); +} + +TEST(ProbeControllerTest, ExponentialProbingStopIfMaxAllocatedBitrateLow) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/abort_further:true/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + ASSERT_THAT(probes, SizeIs(Gt(0))); + + // Repeated probe normally is sent when estimated bitrate climbs above + // 0.7 * 6 * kStartBitrate = 1260. But since allocated bitrate i slow, expect + // exponential probing to stop. + probes = probe_controller->OnMaxTotalAllocatedBitrate(kStartBitrate, + fixture.CurrentTime()); + EXPECT_THAT(probes, IsEmpty()); + + probes = probe_controller->SetEstimatedBitrate( + DataRate::BitsPerSec(1800), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + EXPECT_THAT(probes, IsEmpty()); +} + +TEST(ProbeControllerTest, InitialProbingToLowMaxAllocatedbitrate) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + ASSERT_THAT(probes, SizeIs(Gt(0))); + + // Repeated probe is sent when estimated bitrate climbs above + // 0.7 * 6 * kStartBitrate = 1260. + probes = probe_controller->OnMaxTotalAllocatedBitrate(kStartBitrate, + fixture.CurrentTime()); + EXPECT_THAT(probes, IsEmpty()); + + // If the inital probe result is received, a new probe is sent at 2x the + // needed max bitrate. + probes = probe_controller->SetEstimatedBitrate( + DataRate::BitsPerSec(1800), BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + ASSERT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate.bps(), 2 * kStartBitrate.bps()); +} + +TEST(ProbeControllerTest, InitialProbingTimeout) { + ProbeControllerFixture fixture; + std::unique_ptr probe_controller = + fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + EXPECT_THAT(probes, SizeIs(Gt(0))); // Advance far enough to cause a time out in waiting for probing result. fixture.AdvanceTime(kExponentialProbingTimeout); probes = probe_controller->Process(fixture.CurrentTime()); - + EXPECT_THAT(probes, IsEmpty()); probes = probe_controller->SetEstimatedBitrate( DataRate::BitsPerSec(1800), BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); - EXPECT_TRUE(probes.empty()); + EXPECT_THAT(probes, IsEmpty()); +} + +TEST(ProbeControllerTest, RepeatedInitialProbingSendsNewProbeAfterTimeout) { + ProbeControllerFixture fixture; + std::unique_ptr probe_controller = + fixture.CreateController(); + probe_controller->EnableRepeatedInitialProbing(true); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + EXPECT_THAT(probes, SizeIs(Gt(0))); + Timestamp start_time = fixture.CurrentTime(); + Timestamp last_probe_time = fixture.CurrentTime(); + while (fixture.CurrentTime() < start_time + TimeDelta::Seconds(5)) { + fixture.AdvanceTime(TimeDelta::Millis(100)); + probes = probe_controller->Process(fixture.CurrentTime()); + if (!probes.empty()) { + // Expect a probe every second. + EXPECT_EQ(fixture.CurrentTime() - last_probe_time, + TimeDelta::Seconds(1.1)); + EXPECT_EQ(probes[0].min_probe_delta, TimeDelta::Millis(20)); + EXPECT_EQ(probes[0].target_duration, TimeDelta::Millis(100)); + last_probe_time = fixture.CurrentTime(); + } else { + EXPECT_LT(fixture.CurrentTime() - last_probe_time, + TimeDelta::Seconds(1.1)); + } + } + fixture.AdvanceTime(TimeDelta::Seconds(1)); + // After 5s, repeated initial probing stops. + EXPECT_THAT(probe_controller->Process(fixture.CurrentTime()), IsEmpty()); +} + +TEST(ProbeControllerTest, RepeatedInitialProbingStopIfMaxAllocatedBitrateSet) { + ProbeControllerFixture fixture; + std::unique_ptr probe_controller = + fixture.CreateController(); + probe_controller->EnableRepeatedInitialProbing(true); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + EXPECT_THAT(probes, SizeIs(Gt(0))); + + fixture.AdvanceTime(TimeDelta::Millis(1100)); + probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_THAT(probes, SizeIs(1)); + probes = probe_controller->OnMaxTotalAllocatedBitrate(kMinBitrate, + fixture.CurrentTime()); + fixture.AdvanceTime(TimeDelta::Millis(1100)); + probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_THAT(probes, IsEmpty()); } TEST(ProbeControllerTest, RequestProbeInAlr) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_GE(probes.size(), 2u); @@ -289,6 +510,9 @@ TEST(ProbeControllerTest, RequestProbeWhenAlrEndedRecently) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 2u); @@ -296,7 +520,7 @@ TEST(ProbeControllerTest, RequestProbeWhenAlrEndedRecently) { DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); - probe_controller->SetAlrStartTimeMs(absl::nullopt); + probe_controller->SetAlrStartTimeMs(std::nullopt); fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( @@ -314,6 +538,9 @@ TEST(ProbeControllerTest, RequestProbeWhenAlrNotEndedRecently) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 2u); @@ -321,7 +548,7 @@ TEST(ProbeControllerTest, RequestProbeWhenAlrNotEndedRecently) { DataRate::BitsPerSec(500), BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); - probe_controller->SetAlrStartTimeMs(absl::nullopt); + probe_controller->SetAlrStartTimeMs(std::nullopt); fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); probes = probe_controller->Process(fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( @@ -337,6 +564,9 @@ TEST(ProbeControllerTest, RequestProbeWhenBweDropNotRecent) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); EXPECT_EQ(probes.size(), 2u); @@ -359,6 +589,9 @@ TEST(ProbeControllerTest, PeriodicProbing) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); probe_controller->EnablePeriodicAlrProbing(true); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -403,6 +636,9 @@ TEST(ProbeControllerTest, PeriodicProbingAfterReset) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); Timestamp alr_start_time = fixture.CurrentTime(); probe_controller->SetAlrStartTimeMs(alr_start_time.ms()); @@ -461,7 +697,9 @@ TEST(ProbeControllerTest, TestExponentialProbingOverflow) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); - const DataRate kMbpsMultiplier = DataRate::KilobitsPerSec(1000); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates(kMinBitrate, 10 * kMbpsMultiplier, 100 * kMbpsMultiplier, fixture.CurrentTime()); @@ -482,17 +720,19 @@ TEST(ProbeControllerTest, TestAllocatedBitrateCap) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); - const DataRate kMbpsMultiplier = DataRate::KilobitsPerSec(1000); - const DataRate kMaxBitrate = 100 * kMbpsMultiplier; - auto probes = probe_controller->SetBitrates( - kMinBitrate, 10 * kMbpsMultiplier, kMaxBitrate, fixture.CurrentTime()); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); + auto probes = probe_controller->SetBitrates(kMinBitrate, 10 * kMbpsMultiplier, + 100 * kMbpsMultiplier, + fixture.CurrentTime()); // Configure ALR for periodic probing. probe_controller->EnablePeriodicAlrProbing(true); Timestamp alr_start_time = fixture.CurrentTime(); probe_controller->SetAlrStartTimeMs(alr_start_time.ms()); - DataRate estimated_bitrate = kMaxBitrate / 10; + DataRate estimated_bitrate = 10 * kMbpsMultiplier; probes = probe_controller->SetEstimatedBitrate( estimated_bitrate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); @@ -524,9 +764,13 @@ TEST(ProbeControllerTest, ConfigurableProbingFieldTrial) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" "p1:2,p2:5,step_size:3,further_probe_threshold:0.8," - "alloc_p1:2,alloc_p2,min_probe_packets_sent:2/"); + "alloc_p1:2,alloc_current_bwe_limit:1000.0,alloc_p2,min_probe_packets_" + "sent:2/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates(kMinBitrate, kStartBitrate, DataRate::KilobitsPerSec(5000), @@ -561,11 +805,12 @@ TEST(ProbeControllerTest, ConfigurableProbingFieldTrial) { } TEST(ProbeControllerTest, LimitAlrProbeWhenLossBasedBweLimited) { - ProbeControllerFixture fixture( - "WebRTC-Bwe-ProbingConfiguration/" - "limit_probe_target_rate_to_loss_bwe:true/"); + ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); probe_controller->EnablePeriodicAlrProbing(true); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -600,6 +845,9 @@ TEST(ProbeControllerTest, PeriodicProbeAtUpperNetworkStateEstimate) { "WebRTC-Bwe-ProbingConfiguration/network_state_interval:5s/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -625,9 +873,12 @@ TEST(ProbeControllerTest, LimitProbeAtUpperNetworkStateEstimateIfLossBasedLimited) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" - "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); + "network_state_interval:5s/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -657,6 +908,9 @@ TEST(ProbeControllerTest, AlrProbesLimitedByNetworkStateEstimate) { "WebRTC-Bwe-ProbingConfiguration/network_state_interval:5s/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); probe_controller->EnablePeriodicAlrProbing(true); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -685,6 +939,9 @@ TEST(ProbeControllerTest, CanSetLongerProbeDurationAfterNetworkStateEstimate) { "network_state_interval:5s,network_state_probe_duration:100ms/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -704,11 +961,12 @@ TEST(ProbeControllerTest, CanSetLongerProbeDurationAfterNetworkStateEstimate) { } TEST(ProbeControllerTest, ProbeInAlrIfLossBasedIncreasing) { - ProbeControllerFixture fixture( - "WebRTC-Bwe-ProbingConfiguration/" - "limit_probe_target_rate_to_loss_bwe:true/"); + ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probe_controller->EnablePeriodicAlrProbing(true); @@ -729,49 +987,18 @@ TEST(ProbeControllerTest, ProbeInAlrIfLossBasedIncreasing) { EXPECT_EQ(probes.at(0).target_data_rate, 1.5 * kStartBitrate); } -TEST(ProbeControllerTest, ProbeFurtherInAlrIfLossBasedIncreasing) { - ProbeControllerFixture fixture( - "WebRTC-Bwe-ProbingConfiguration/" - "limit_probe_target_rate_to_loss_bwe:true/"); - std::unique_ptr probe_controller = - fixture.CreateController(); - auto probes = probe_controller->SetBitrates( - kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); - probe_controller->EnablePeriodicAlrProbing(true); - probes = probe_controller->SetEstimatedBitrate( - kStartBitrate, BandwidthLimitedCause::kLossLimitedBweIncreasing, - fixture.CurrentTime()); - - // Wait long enough to time out exponential probing. - fixture.AdvanceTime(kExponentialProbingTimeout); - probes = probe_controller->Process(fixture.CurrentTime()); - ASSERT_TRUE(probes.empty()); - - // Probe when in alr. - probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); - fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); - probes = probe_controller->Process(fixture.CurrentTime()); - ASSERT_EQ(probes.size(), 1u); - ASSERT_EQ(probes.at(0).target_data_rate, 1.5 * kStartBitrate); - - probes = probe_controller->SetEstimatedBitrate( - 1.5 * kStartBitrate, BandwidthLimitedCause::kLossLimitedBweIncreasing, - fixture.CurrentTime()); - ASSERT_EQ(probes.size(), 1u); - EXPECT_EQ(probes[0].target_data_rate, 1.5 * 1.5 * kStartBitrate); -} - TEST(ProbeControllerTest, NotProbeWhenInAlrIfLossBasedDecreases) { - ProbeControllerFixture fixture( - "WebRTC-Bwe-ProbingConfiguration/" - "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); + ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probe_controller->EnablePeriodicAlrProbing(true); probes = probe_controller->SetEstimatedBitrate( - kStartBitrate, BandwidthLimitedCause::kLossLimitedBweDecreasing, + kStartBitrate, BandwidthLimitedCause::kLossLimitedBwe, fixture.CurrentTime()); // Wait long enough to time out exponential probing. @@ -787,11 +1014,12 @@ TEST(ProbeControllerTest, NotProbeWhenInAlrIfLossBasedDecreases) { } TEST(ProbeControllerTest, NotProbeIfLossBasedIncreasingOutsideAlr) { - ProbeControllerFixture fixture( - "WebRTC-Bwe-ProbingConfiguration/" - "limit_probe_target_rate_to_loss_bwe:true/"); + ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probe_controller->EnablePeriodicAlrProbing(true); @@ -804,7 +1032,7 @@ TEST(ProbeControllerTest, NotProbeIfLossBasedIncreasingOutsideAlr) { probes = probe_controller->Process(fixture.CurrentTime()); ASSERT_TRUE(probes.empty()); - probe_controller->SetAlrStartTimeMs(absl::nullopt); + probe_controller->SetAlrStartTimeMs(std::nullopt); fixture.AdvanceTime(kAlrProbeInterval + TimeDelta::Millis(1)); probes = probe_controller->Process(fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); @@ -813,9 +1041,12 @@ TEST(ProbeControllerTest, NotProbeIfLossBasedIncreasingOutsideAlr) { TEST(ProbeControllerTest, ProbeFurtherWhenLossBasedIsSameAsDelayBasedEstimate) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" - "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); + "network_state_interval:5s/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -853,6 +1084,9 @@ TEST(ProbeControllerTest, ProbeIfEstimateLowerThanNetworkStateEstimate) { "target_rate_to_loss_bwe:true/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -894,9 +1128,12 @@ TEST(ProbeControllerTest, ProbeIfEstimateLowerThanNetworkStateEstimate) { TEST(ProbeControllerTest, DontProbeFurtherWhenLossLimited) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" - "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); + "network_state_interval:5s/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -916,17 +1153,20 @@ TEST(ProbeControllerTest, DontProbeFurtherWhenLossLimited) { EXPECT_LT(probes[0].target_data_rate, state_estimate.link_capacity_upper); // Expect that no more probes are sent immediately if BWE is loss limited. probes = probe_controller->SetEstimatedBitrate( - probes[0].target_data_rate, - BandwidthLimitedCause::kLossLimitedBweDecreasing, fixture.CurrentTime()); + probes[0].target_data_rate, BandwidthLimitedCause::kLossLimitedBwe, + fixture.CurrentTime()); EXPECT_TRUE(probes.empty()); } TEST(ProbeControllerTest, ProbeFurtherWhenDelayBasedLimited) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" - "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); + "network_state_interval:5s/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -953,12 +1193,16 @@ TEST(ProbeControllerTest, ProbeFurtherWhenDelayBasedLimited) { } TEST(ProbeControllerTest, - ProbeFurtherIfNetworkStateEstimateIncreaseAfterProbeSent) { + ProbeAfterTimeoutIfNetworkStateEstimateIncreaseAfterProbeSent) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" - "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); + "network_state_interval:5s,est_lower_than_network_interval:3s,est_lower_" + "than_network_ratio:0.7/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); ASSERT_FALSE(probes.empty()); @@ -975,14 +1219,18 @@ TEST(ProbeControllerTest, probes = probe_controller->Process(fixture.CurrentTime()); ASSERT_FALSE(probes.empty()); EXPECT_LE(probes[0].target_data_rate, state_estimate.link_capacity_upper); - // If the network state estimate increase above the threshold to probe - // further, and the probe suceeed, expect a new probe. + // If the network state estimate increase, even before the probe result, + // expect a new probe after `est_lower_than_network_interval` timeout. state_estimate.link_capacity_upper = 3 * kStartBitrate; probe_controller->SetNetworkStateEstimate(state_estimate); probes = probe_controller->SetEstimatedBitrate( probes[0].target_data_rate, BandwidthLimitedCause::kDelayBasedLimited, fixture.CurrentTime()); - EXPECT_FALSE(probes.empty()); + EXPECT_THAT(probes, IsEmpty()); + + fixture.AdvanceTime(TimeDelta::Seconds(3)); + probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_THAT(probes, Not(IsEmpty())); // But no more probes if estimate is close to the link capacity. probes = probe_controller->SetEstimatedBitrate( @@ -991,12 +1239,75 @@ TEST(ProbeControllerTest, EXPECT_TRUE(probes.empty()); } +TEST(ProbeControllerTest, SkipProbeFurtherIfAlreadyProbedToMaxRate) { + ProbeControllerFixture fixture( + "WebRTC-Bwe-ProbingConfiguration/" + "network_state_interval:2s,skip_if_est_larger_than_fraction_of_max:0.9/"); + std::unique_ptr probe_controller = + fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + ASSERT_FALSE(probes.empty()); + + probe_controller->SetNetworkStateEstimate( + {.link_capacity_upper = 2 * kMaxBitrate}); + + // Attempt to probe up to max rate. + probes = probe_controller->SetEstimatedBitrate( + kMaxBitrate * 0.8, BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + ASSERT_FALSE(probes.empty()); + EXPECT_EQ(probes[0].target_data_rate, kMaxBitrate); + + // If the probe result arrives, dont expect a new probe immediately since we + // already tried to probe at the max rate. + probes = probe_controller->SetEstimatedBitrate( + kMaxBitrate * 0.8, BandwidthLimitedCause::kDelayBasedLimited, + fixture.CurrentTime()); + EXPECT_TRUE(probes.empty()); + + fixture.AdvanceTime(TimeDelta::Millis(1000)); + probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_THAT(probes, IsEmpty()); + // But when enough time has passed, expect a new probe. + fixture.AdvanceTime(TimeDelta::Millis(1000)); + probes = probe_controller->Process(fixture.CurrentTime()); + EXPECT_THAT(probes, Not(IsEmpty())); +} + +TEST(ProbeControllerTest, MaxAllocatedBitrateNotReset) { + ProbeControllerFixture fixture; + std::unique_ptr probe_controller = + fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); + auto probes = probe_controller->SetBitrates( + kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); + ASSERT_FALSE(probes.empty()); + + probes = probe_controller->OnMaxTotalAllocatedBitrate(kStartBitrate / 4, + fixture.CurrentTime()); + probe_controller->Reset(fixture.CurrentTime()); + + probes = probe_controller->SetBitrates(kMinBitrate, kStartBitrate, + kMaxBitrate, fixture.CurrentTime()); + ASSERT_FALSE(probes.empty()); + EXPECT_EQ(probes[0].target_data_rate, kStartBitrate / 4 * 2); +} + TEST(ProbeControllerTest, SkipAlrProbeIfEstimateLargerThanMaxProbe) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" "skip_if_est_larger_than_fraction_of_max:0.9/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); probe_controller->EnablePeriodicAlrProbing(true); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -1025,6 +1336,9 @@ TEST(ProbeControllerTest, "skip_if_est_larger_than_fraction_of_max:1.0/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); probe_controller->EnablePeriodicAlrProbing(true); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -1055,6 +1369,9 @@ TEST(ProbeControllerTest, SkipNetworkStateProbeIfEstimateLargerThanMaxProbe) { "network_state_interval:2s,skip_if_est_larger_than_fraction_of_max:0.9/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); ASSERT_FALSE(probes.empty()); @@ -1075,9 +1392,13 @@ TEST(ProbeControllerTest, SendsProbeIfNetworkStateEstimateLowerThanMaxProbe) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" "network_state_interval:2s,skip_if_est_larger_than_fraction_of_max:0.9," - "/"); + "network_state_probe_duration:100ms,network_" + "state_min_probe_delta:20/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); ASSERT_FALSE(probes.empty()); @@ -1099,14 +1420,25 @@ TEST(ProbeControllerTest, SendsProbeIfNetworkStateEstimateLowerThanMaxProbe) { {.link_capacity_upper = 2 * kStartBitrate}); probes = probe_controller->Process(fixture.CurrentTime()); EXPECT_FALSE(probes.empty()); + EXPECT_LE(probes[0].target_data_rate, 2 * kStartBitrate); + // Expect probe durations to be picked from field trial probe target is lower + // or equal to the network state estimate. + EXPECT_EQ(probes[0].min_probe_delta, TimeDelta::Millis(20)); + EXPECT_EQ(probes[0].target_duration, TimeDelta::Millis(100)); } -TEST(ProbeControllerTest, DontSendProbeIfNetworkStateEstimateIsZero) { +TEST(ProbeControllerTest, + ProbeNotLimitedByNetworkStateEsimateIfLowerThantCurrent) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/" - "network_state_interval:5s,limit_probe_target_rate_to_loss_bwe:true/"); + "network_state_interval:5s,network_state_probe_duration:100ms,network_" + "state_min_probe_delta:20/"); std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); + probe_controller->EnablePeriodicAlrProbing(true); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); probes = probe_controller->SetEstimatedBitrate( @@ -1119,21 +1451,26 @@ TEST(ProbeControllerTest, DontSendProbeIfNetworkStateEstimateIsZero) { probes = probe_controller->Process(fixture.CurrentTime()); ASSERT_TRUE(probes.empty()); + probe_controller->SetAlrStartTimeMs(fixture.CurrentTime().ms()); probe_controller->SetNetworkStateEstimate( - {.link_capacity_upper = DataRate::Zero()}); - probes = probe_controller->Process(fixture.CurrentTime()); - EXPECT_TRUE(probes.empty()); + {.link_capacity_upper = kStartBitrate / 2}); fixture.AdvanceTime(TimeDelta::Seconds(6)); probes = probe_controller->Process(fixture.CurrentTime()); - EXPECT_TRUE(probes.empty()); + ASSERT_FALSE(probes.empty()); + EXPECT_EQ(probes[0].target_data_rate, kStartBitrate); + // Expect probe durations to be default since network state estimate is lower + // than the probe rate. + EXPECT_EQ(probes[0].min_probe_delta, TimeDelta::Millis(2)); + EXPECT_EQ(probes[0].target_duration, TimeDelta::Millis(15)); } TEST(ProbeControllerTest, DontProbeIfDelayIncreased) { - ProbeControllerFixture fixture( - "WebRTC-Bwe-ProbingConfiguration/" - "network_state_interval:5s,not_probe_if_delay_increased:true/"); + ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); @@ -1158,11 +1495,12 @@ TEST(ProbeControllerTest, DontProbeIfDelayIncreased) { } TEST(ProbeControllerTest, DontProbeIfHighRtt) { - ProbeControllerFixture fixture( - "WebRTC-Bwe-ProbingConfiguration/" - "network_state_interval:5s,not_probe_if_delay_increased:true/"); + ProbeControllerFixture fixture; std::unique_ptr probe_controller = fixture.CreateController(); + ASSERT_THAT( + probe_controller->OnNetworkAvailability({.network_available = true}), + IsEmpty()); auto probes = probe_controller->SetBitrates( kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); diff --git a/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc b/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc index 3f66f7fdae..3aad0c954c 100644 --- a/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc +++ b/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc @@ -13,12 +13,16 @@ #include #include +#include #include +#include +#include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -97,9 +101,9 @@ void RobustThroughputEstimator::IncomingPacketFeedbackVector( } } -absl::optional RobustThroughputEstimator::bitrate() const { +std::optional RobustThroughputEstimator::bitrate() const { if (window_.empty() || window_.size() < settings_.required_packets) - return absl::nullopt; + return std::nullopt; TimeDelta largest_recv_gap(TimeDelta::Zero()); TimeDelta second_largest_recv_gap(TimeDelta::Zero()); diff --git a/modules/congestion_controller/goog_cc/robust_throughput_estimator.h b/modules/congestion_controller/goog_cc/robust_throughput_estimator.h index 9d89856496..a1dc13918a 100644 --- a/modules/congestion_controller/goog_cc/robust_throughput_estimator.h +++ b/modules/congestion_controller/goog_cc/robust_throughput_estimator.h @@ -12,9 +12,9 @@ #define MODULES_CONGESTION_CONTROLLER_GOOG_CC_ROBUST_THROUGHPUT_ESTIMATOR_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/timestamp.h" @@ -31,9 +31,9 @@ class RobustThroughputEstimator : public AcknowledgedBitrateEstimatorInterface { void IncomingPacketFeedbackVector( const std::vector& packet_feedback_vector) override; - absl::optional bitrate() const override; + std::optional bitrate() const override; - absl::optional PeekRate() const override { return bitrate(); } + std::optional PeekRate() const override { return bitrate(); } void SetAlr(bool /*in_alr*/) override {} void SetAlrEndedTime(Timestamp /*alr_ended_time*/) override {} diff --git a/modules/congestion_controller/goog_cc/robust_throughput_estimator_unittest.cc b/modules/congestion_controller/goog_cc/robust_throughput_estimator_unittest.cc index f41ee7f3d6..ef58263678 100644 --- a/modules/congestion_controller/goog_cc/robust_throughput_estimator_unittest.cc +++ b/modules/congestion_controller/goog_cc/robust_throughput_estimator_unittest.cc @@ -14,12 +14,15 @@ #include #include -#include +#include #include "absl/strings/string_view.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h" #include "test/explicit_key_value_config.h" #include "test/gtest.h" @@ -372,14 +375,16 @@ TEST(RobustThroughputEstimatorTest, DeepReordering) { // Since the window is 500 ms, the delayed packet was sent ~500 // ms before the second oldest packet. However, the send rate // should not drop. - delayed_packets.front().receive_time = - feedback_generator.CurrentReceiveClock(); - throughput_estimator.IncomingPacketFeedbackVector(delayed_packets); - auto throughput = throughput_estimator.bitrate(); - ASSERT_TRUE(throughput.has_value()); - EXPECT_NEAR(throughput.value().bytes_per_sec(), - send_rate.bytes_per_sec(), - 0.05 * send_rate.bytes_per_sec()); // Allow 5% error + { + delayed_packets.front().receive_time = + feedback_generator.CurrentReceiveClock(); + throughput_estimator.IncomingPacketFeedbackVector(delayed_packets); + auto throughput = throughput_estimator.bitrate(); + ASSERT_TRUE(throughput.has_value()); + EXPECT_NEAR(throughput.value().bytes_per_sec(), + send_rate.bytes_per_sec(), + 0.05 * send_rate.bytes_per_sec()); // Allow 5% error + } // Thoughput should stay stable. for (int i = 0; i < 10; i++) { diff --git a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc index 55270be7f5..7fe27bf910 100644 --- a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc +++ b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc @@ -11,24 +11,28 @@ #include "modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h" #include +#include +#include #include #include #include +#include #include +#include -#include "absl/strings/match.h" #include "api/field_trials_view.h" -#include "api/network_state_predictor.h" -#include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" +#include "api/transport/bandwidth_usage.h" +#include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" #include "modules/congestion_controller/goog_cc/loss_based_bwe_v2.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" namespace webrtc { @@ -62,21 +66,18 @@ const size_t kNumUmaRampupMetrics = const char kBweLosExperiment[] = "WebRTC-BweLossExperiment"; -bool BweLossExperimentIsEnabled() { - std::string experiment_string = - webrtc::field_trial::FindFullName(kBweLosExperiment); - // The experiment is enabled iff the field trial string begins with "Enabled". - return absl::StartsWith(experiment_string, "Enabled"); +bool BweLossExperimentIsEnabled(const FieldTrialsView& field_trials) { + return field_trials.IsEnabled(kBweLosExperiment); } -bool ReadBweLossExperimentParameters(float* low_loss_threshold, +bool ReadBweLossExperimentParameters(const FieldTrialsView& field_trials, + float* low_loss_threshold, float* high_loss_threshold, uint32_t* bitrate_threshold_kbps) { RTC_DCHECK(low_loss_threshold); RTC_DCHECK(high_loss_threshold); RTC_DCHECK(bitrate_threshold_kbps); - std::string experiment_string = - webrtc::field_trial::FindFullName(kBweLosExperiment); + std::string experiment_string = field_trials.Lookup(kBweLosExperiment); int parsed_values = sscanf(experiment_string.c_str(), "Enabled-%f,%f,%u", low_loss_threshold, high_loss_threshold, bitrate_threshold_kbps); @@ -108,14 +109,6 @@ bool ReadBweLossExperimentParameters(float* low_loss_threshold, } } // namespace -LinkCapacityTracker::LinkCapacityTracker() - : tracking_rate("rate", TimeDelta::Seconds(10)) { - ParseFieldTrial({&tracking_rate}, - field_trial::FindFullName("WebRTC-Bwe-LinkCapacity")); -} - -LinkCapacityTracker::~LinkCapacityTracker() {} - void LinkCapacityTracker::UpdateDelayBasedEstimate( Timestamp at_time, DataRate delay_based_bitrate) { @@ -132,7 +125,7 @@ void LinkCapacityTracker::OnStartingRate(DataRate start_rate) { capacity_estimate_bps_ = start_rate.bps(); } -void LinkCapacityTracker::OnRateUpdate(absl::optional acknowledged, +void LinkCapacityTracker::OnRateUpdate(std::optional acknowledged, DataRate target, Timestamp at_time) { if (!acknowledged) @@ -140,7 +133,8 @@ void LinkCapacityTracker::OnRateUpdate(absl::optional acknowledged, DataRate acknowledged_target = std::min(*acknowledged, target); if (acknowledged_target.bps() > capacity_estimate_bps_) { TimeDelta delta = at_time - last_link_capacity_update_; - double alpha = delta.IsFinite() ? exp(-(delta / tracking_rate.Get())) : 0; + double alpha = + delta.IsFinite() ? exp(-(delta / TimeDelta::Seconds(10))) : 0; capacity_estimate_bps_ = alpha * capacity_estimate_bps_ + (1 - alpha) * acknowledged_target.bps(); } @@ -200,7 +194,8 @@ RttBasedBackoff::~RttBasedBackoff() = default; SendSideBandwidthEstimation::SendSideBandwidthEstimation( const FieldTrialsView* key_value_config, RtcEventLog* event_log) - : rtt_backoff_(key_value_config), + : key_value_config_(key_value_config), + rtt_backoff_(key_value_config), lost_packets_since_last_loss_update_(0), expected_packets_since_last_loss_update_(0), current_target_(DataRate::Zero()), @@ -229,15 +224,15 @@ SendSideBandwidthEstimation::SendSideBandwidthEstimation( high_loss_threshold_(kDefaultHighLossThreshold), bitrate_threshold_(kDefaultBitrateThreshold), loss_based_bandwidth_estimator_v1_(key_value_config), - loss_based_bandwidth_estimator_v2_(key_value_config), + loss_based_bandwidth_estimator_v2_(new LossBasedBweV2(key_value_config)), loss_based_state_(LossBasedState::kDelayBasedEstimate), disable_receiver_limit_caps_only_("Disabled") { RTC_DCHECK(event_log); - if (BweLossExperimentIsEnabled()) { + if (BweLossExperimentIsEnabled(*key_value_config_)) { uint32_t bitrate_threshold_kbps; - if (ReadBweLossExperimentParameters(&low_loss_threshold_, - &high_loss_threshold_, - &bitrate_threshold_kbps)) { + if (ReadBweLossExperimentParameters( + *key_value_config_, &low_loss_threshold_, &high_loss_threshold_, + &bitrate_threshold_kbps)) { RTC_LOG(LS_INFO) << "Enabled BweLossExperiment with parameters " << low_loss_threshold_ << ", " << high_loss_threshold_ << ", " << bitrate_threshold_kbps; @@ -247,7 +242,7 @@ SendSideBandwidthEstimation::SendSideBandwidthEstimation( ParseFieldTrial({&disable_receiver_limit_caps_only_}, key_value_config->Lookup("WebRTC-Bwe-ReceiverLimitCapsOnly")); if (LossBasedBandwidthEstimatorV2Enabled()) { - loss_based_bandwidth_estimator_v2_.SetMinMaxBitrate( + loss_based_bandwidth_estimator_v2_->SetMinMaxBitrate( min_bitrate_configured_, max_bitrate_configured_); } } @@ -276,10 +271,15 @@ void SendSideBandwidthEstimation::OnRouteChange() { uma_update_state_ = kNoUpdate; uma_rtt_state_ = kNoUpdate; last_rtc_event_log_ = Timestamp::MinusInfinity(); + if (LossBasedBandwidthEstimatorV2Enabled() && + loss_based_bandwidth_estimator_v2_->UseInStartPhase()) { + loss_based_bandwidth_estimator_v2_.reset( + new LossBasedBweV2(key_value_config_)); + } } void SendSideBandwidthEstimation::SetBitrates( - absl::optional send_bitrate, + std::optional send_bitrate, DataRate min_bitrate, DataRate max_bitrate, Timestamp at_time) { @@ -310,8 +310,8 @@ void SendSideBandwidthEstimation::SetMinMaxBitrate(DataRate min_bitrate, } else { max_bitrate_configured_ = kDefaultMaxBitrate; } - loss_based_bandwidth_estimator_v2_.SetMinMaxBitrate(min_bitrate_configured_, - max_bitrate_configured_); + loss_based_bandwidth_estimator_v2_->SetMinMaxBitrate(min_bitrate_configured_, + max_bitrate_configured_); } int SendSideBandwidthEstimation::GetMinBitrate() const { @@ -355,7 +355,7 @@ void SendSideBandwidthEstimation::UpdateDelayBasedEstimate(Timestamp at_time, } void SendSideBandwidthEstimation::SetAcknowledgedRate( - absl::optional acknowledged_rate, + std::optional acknowledged_rate, Timestamp at_time) { acknowledged_rate_ = acknowledged_rate; if (!acknowledged_rate.has_value()) { @@ -366,25 +366,23 @@ void SendSideBandwidthEstimation::SetAcknowledgedRate( *acknowledged_rate, at_time); } if (LossBasedBandwidthEstimatorV2Enabled()) { - loss_based_bandwidth_estimator_v2_.SetAcknowledgedBitrate( + loss_based_bandwidth_estimator_v2_->SetAcknowledgedBitrate( *acknowledged_rate); } } void SendSideBandwidthEstimation::UpdateLossBasedEstimator( const TransportPacketsFeedback& report, - BandwidthUsage delay_detector_state, - absl::optional probe_bitrate, - DataRate upper_link_capacity, + BandwidthUsage /* delay_detector_state */, + std::optional /* probe_bitrate */, bool in_alr) { if (LossBasedBandwidthEstimatorV1Enabled()) { loss_based_bandwidth_estimator_v1_.UpdateLossStatistics( report.packet_feedbacks, report.feedback_time); } if (LossBasedBandwidthEstimatorV2Enabled()) { - loss_based_bandwidth_estimator_v2_.UpdateBandwidthEstimate( - report.packet_feedbacks, delay_based_limit_, delay_detector_state, - probe_bitrate, upper_link_capacity, in_alr); + loss_based_bandwidth_estimator_v2_->UpdateBandwidthEstimate( + report.packet_feedbacks, delay_based_limit_, in_alr); UpdateEstimate(report.feedback_time); } } @@ -488,7 +486,8 @@ void SendSideBandwidthEstimation::UpdateEstimate(Timestamp at_time) { // We trust the REMB and/or delay-based estimate during the first 2 seconds if // we haven't had any packet loss reported, to allow startup bitrate probing. - if (last_fraction_loss_ == 0 && IsInStartPhase(at_time)) { + if (last_fraction_loss_ == 0 && IsInStartPhase(at_time) && + !loss_based_bandwidth_estimator_v2_->ReadyToUseInStartPhase()) { DataRate new_bitrate = current_target_; // TODO(srte): We should not allow the new_bitrate to be larger than the // receiver limit here. @@ -499,9 +498,6 @@ void SendSideBandwidthEstimation::UpdateEstimate(Timestamp at_time) { if (LossBasedBandwidthEstimatorV1Enabled()) { loss_based_bandwidth_estimator_v1_.Initialize(new_bitrate); } - if (LossBasedBandwidthEstimatorV2Enabled()) { - loss_based_bandwidth_estimator_v2_.SetBandwidthEstimate(new_bitrate); - } if (new_bitrate != current_target_) { min_bitrate_history_.clear(); @@ -533,7 +529,7 @@ void SendSideBandwidthEstimation::UpdateEstimate(Timestamp at_time) { if (LossBasedBandwidthEstimatorV2ReadyForUse()) { LossBasedBweV2::Result result = - loss_based_bandwidth_estimator_v2_.GetLossBasedResult(); + loss_based_bandwidth_estimator_v2_->GetLossBasedResult(); loss_based_state_ = result.state; UpdateTargetBitrate(result.bandwidth_estimate, at_time); return; @@ -689,13 +685,17 @@ bool SendSideBandwidthEstimation::LossBasedBandwidthEstimatorV1ReadyForUse() } bool SendSideBandwidthEstimation::LossBasedBandwidthEstimatorV2Enabled() const { - return loss_based_bandwidth_estimator_v2_.IsEnabled(); + return loss_based_bandwidth_estimator_v2_->IsEnabled(); } bool SendSideBandwidthEstimation::LossBasedBandwidthEstimatorV2ReadyForUse() const { - return LossBasedBandwidthEstimatorV2Enabled() && - loss_based_bandwidth_estimator_v2_.IsReady(); + return loss_based_bandwidth_estimator_v2_->IsReady(); +} + +bool SendSideBandwidthEstimation::PaceAtLossBasedEstimate() const { + return LossBasedBandwidthEstimatorV2ReadyForUse() && + loss_based_bandwidth_estimator_v2_->PaceAtLossBasedEstimate(); } } // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h index 5a037db045..9a56a280ab 100644 --- a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h +++ b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h @@ -16,12 +16,13 @@ #include #include +#include +#include #include #include -#include "absl/types/optional.h" #include "api/field_trials_view.h" -#include "api/network_state_predictor.h" +#include "api/transport/bandwidth_usage.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" @@ -36,20 +37,19 @@ class RtcEventLog; class LinkCapacityTracker { public: - LinkCapacityTracker(); - ~LinkCapacityTracker(); + LinkCapacityTracker() = default; + ~LinkCapacityTracker() = default; // Call when a new delay-based estimate is available. void UpdateDelayBasedEstimate(Timestamp at_time, DataRate delay_based_bitrate); void OnStartingRate(DataRate start_rate); - void OnRateUpdate(absl::optional acknowledged, + void OnRateUpdate(std::optional acknowledged, DataRate target, Timestamp at_time); void OnRttBackoff(DataRate backoff_rate, Timestamp at_time); DataRate estimate() const; private: - FieldTrialParameter tracking_rate; double capacity_estimate_bps_ = 0; Timestamp last_link_capacity_update_ = Timestamp::MinusInfinity(); DataRate last_delay_based_estimate_ = DataRate::PlusInfinity(); @@ -115,20 +115,20 @@ class SendSideBandwidthEstimation { // Call when we receive a RTCP message with a ReceiveBlock. void UpdateRtt(TimeDelta rtt, Timestamp at_time); - void SetBitrates(absl::optional send_bitrate, + void SetBitrates(std::optional send_bitrate, DataRate min_bitrate, DataRate max_bitrate, Timestamp at_time); void SetSendBitrate(DataRate bitrate, Timestamp at_time); void SetMinMaxBitrate(DataRate min_bitrate, DataRate max_bitrate); int GetMinBitrate() const; - void SetAcknowledgedRate(absl::optional acknowledged_rate, + void SetAcknowledgedRate(std::optional acknowledged_rate, Timestamp at_time); void UpdateLossBasedEstimator(const TransportPacketsFeedback& report, BandwidthUsage delay_detector_state, - absl::optional probe_bitrate, - DataRate upper_link_capacity, + std::optional probe_bitrate, bool in_alr); + bool PaceAtLossBasedEstimate() const; private: friend class GoogCcStatePrinter; @@ -168,6 +168,7 @@ class SendSideBandwidthEstimation { bool LossBasedBandwidthEstimatorV1ReadyForUse() const; bool LossBasedBandwidthEstimatorV2ReadyForUse() const; + const FieldTrialsView* key_value_config_; RttBasedBackoff rtt_backoff_; LinkCapacityTracker link_capacity_; @@ -177,7 +178,7 @@ class SendSideBandwidthEstimation { int lost_packets_since_last_loss_update_; int expected_packets_since_last_loss_update_; - absl::optional acknowledged_rate_; + std::optional acknowledged_rate_; DataRate current_target_; DataRate last_logged_target_; DataRate min_bitrate_configured_; @@ -209,7 +210,7 @@ class SendSideBandwidthEstimation { float high_loss_threshold_; DataRate bitrate_threshold_; LossBasedBandwidthEstimation loss_based_bandwidth_estimator_v1_; - LossBasedBweV2 loss_based_bandwidth_estimator_v2_; + std::unique_ptr loss_based_bandwidth_estimator_v2_; LossBasedState loss_based_state_; FieldTrialFlag disable_receiver_limit_caps_only_; }; diff --git a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation_unittest.cc b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation_unittest.cc index 1e4ca6a01d..866700b09a 100644 --- a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation_unittest.cc +++ b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation_unittest.cc @@ -10,7 +10,12 @@ #include "modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h" +#include + #include "api/rtc_event_log/rtc_event.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" #include "logging/rtc_event_log/mock/mock_rtc_event_log.h" #include "test/explicit_key_value_config.h" diff --git a/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc b/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc index 6a8849ed6d..cef4bf4158 100644 --- a/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc +++ b/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc @@ -11,15 +11,28 @@ #include +#include +#include +#include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/rtc_event_log_output.h" +#include "api/transport/goog_cc_factory.h" +#include "api/transport/network_control.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/congestion_controller/goog_cc/alr_detector.h" #include "modules/congestion_controller/goog_cc/delay_based_bwe.h" +#include "modules/congestion_controller/goog_cc/goog_cc_network_control.h" #include "modules/congestion_controller/goog_cc/trendline_estimator.h" #include "modules/remote_bitrate_estimator/aimd_rate_control.h" #include "rtc_base/checks.h" +#include "test/logging/log_writer.h" namespace webrtc { namespace { @@ -29,16 +42,16 @@ void WriteTypedValue(RtcEventLogOutput* out, int value) { void WriteTypedValue(RtcEventLogOutput* out, double value) { LogWriteFormat(out, "%.6f", value); } -void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { +void WriteTypedValue(RtcEventLogOutput* out, std::optional value) { LogWriteFormat(out, "%.0f", value ? value->bytes_per_sec() : NAN); } -void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { +void WriteTypedValue(RtcEventLogOutput* out, std::optional value) { LogWriteFormat(out, "%.0f", value ? value->bytes() : NAN); } -void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { +void WriteTypedValue(RtcEventLogOutput* out, std::optional value) { LogWriteFormat(out, "%.3f", value ? value->seconds() : NAN); } -void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { +void WriteTypedValue(RtcEventLogOutput* out, std::optional value) { LogWriteFormat(out, "%.3f", value ? value->seconds() : NAN); } @@ -91,13 +104,13 @@ std::deque GoogCcStatePrinter::CreateLoggers() { ->loss_based_bandwidth_estimator_v1_; }; std::deque loggers({ - Log("time", [=] { return target_.at_time; }), - Log("rtt", [=] { return target_.network_estimate.round_trip_time; }), - Log("target", [=] { return target_.target_rate; }), - Log("stable_target", [=] { return target_.stable_target_rate; }), - Log("pacing", [=] { return pacing_.data_rate(); }), - Log("padding", [=] { return pacing_.pad_rate(); }), - Log("window", [=] { return congestion_window_; }), + Log("time", [this] { return target_.at_time; }), + Log("rtt", [this] { return target_.network_estimate.round_trip_time; }), + Log("target", [this] { return target_.target_rate; }), + Log("stable_target", [this] { return target_.stable_target_rate; }), + Log("pacing", [this] { return pacing_.data_rate(); }), + Log("padding", [this] { return pacing_.pad_rate(); }), + Log("window", [this] { return congestion_window_; }), Log("rate_control_state", [=] { return rate_control_state(); }), Log("stable_estimate", [=] { return stable_estimate(); }), Log("trendline", [=] { return trend()->prev_trend_; }), @@ -105,15 +118,15 @@ std::deque GoogCcStatePrinter::CreateLoggers() { [=] { return trend()->prev_modified_trend_; }), Log("trendline_offset_threshold", [=] { return trend()->threshold_; }), Log("acknowledged_rate", [=] { return acknowledged_rate(); }), - Log("est_capacity", [=] { return est_.link_capacity; }), - Log("est_capacity_dev", [=] { return est_.link_capacity_std_dev; }), - Log("est_capacity_min", [=] { return est_.link_capacity_min; }), - Log("est_cross_traffic", [=] { return est_.cross_traffic_ratio; }), - Log("est_cross_delay", [=] { return est_.cross_delay_rate; }), - Log("est_spike_delay", [=] { return est_.spike_delay_rate; }), - Log("est_pre_buffer", [=] { return est_.pre_link_buffer_delay; }), - Log("est_post_buffer", [=] { return est_.post_link_buffer_delay; }), - Log("est_propagation", [=] { return est_.propagation_delay; }), + Log("est_capacity", [this] { return est_.link_capacity; }), + Log("est_capacity_dev", [this] { return est_.link_capacity_std_dev; }), + Log("est_capacity_min", [this] { return est_.link_capacity_min; }), + Log("est_cross_traffic", [this] { return est_.cross_traffic_ratio; }), + Log("est_cross_delay", [this] { return est_.cross_delay_rate; }), + Log("est_spike_delay", [this] { return est_.spike_delay_rate; }), + Log("est_pre_buffer", [this] { return est_.pre_link_buffer_delay; }), + Log("est_post_buffer", [this] { return est_.post_link_buffer_delay; }), + Log("est_propagation", [this] { return est_.propagation_delay; }), Log("loss_ratio", [=] { return loss_cont()->last_loss_ratio_; }), Log("loss_average", [=] { return loss_cont()->average_loss_; }), Log("loss_average_max", [=] { return loss_cont()->average_loss_max_; }), @@ -125,9 +138,9 @@ std::deque GoogCcStatePrinter::CreateLoggers() { Log("loss_based_rate", [=] { return loss_cont()->loss_based_bitrate_; }), Log("loss_ack_rate", [=] { return loss_cont()->acknowledged_bitrate_max_; }), - Log("data_window", [=] { return controller_->current_data_window_; }), + Log("data_window", [this] { return controller_->current_data_window_; }), Log("pushback_target", - [=] { return controller_->last_pushback_target_rate_; }), + [this] { return controller_->last_pushback_target_rate_; }), }); return loggers; } diff --git a/modules/congestion_controller/goog_cc/test/goog_cc_printer.h b/modules/congestion_controller/goog_cc/test/goog_cc_printer.h index 16fa657e71..cdea139e40 100644 --- a/modules/congestion_controller/goog_cc/test/goog_cc_printer.h +++ b/modules/congestion_controller/goog_cc/test/goog_cc_printer.h @@ -14,13 +14,13 @@ #include #include -#include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtc_event_log_output.h" #include "api/transport/goog_cc_factory.h" #include "api/transport/network_control.h" #include "api/transport/network_types.h" +#include "api/units/data_size.h" #include "api/units/timestamp.h" #include "modules/congestion_controller/goog_cc/goog_cc_network_control.h" -#include "test/logging/log_writer.h" namespace webrtc { diff --git a/modules/congestion_controller/goog_cc/trendline_estimator.cc b/modules/congestion_controller/goog_cc/trendline_estimator.cc index 88182d4f80..2f8835a3cd 100644 --- a/modules/congestion_controller/goog_cc/trendline_estimator.cc +++ b/modules/congestion_controller/goog_cc/trendline_estimator.cc @@ -13,12 +13,19 @@ #include #include +#include +#include +#include +#include +#include +#include #include +#include #include "absl/strings/match.h" -#include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/network_state_predictor.h" -#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "api/transport/bandwidth_usage.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/logging.h" @@ -50,7 +57,7 @@ size_t ReadTrendlineFilterWindowSize(const FieldTrialsView* key_value_config) { return TrendlineEstimatorSettings::kDefaultTrendlineWindowSize; } -absl::optional LinearFitSlope( +std::optional LinearFitSlope( const std::deque& packets) { RTC_DCHECK(packets.size() >= 2); // Compute the "center of mass". @@ -72,11 +79,11 @@ absl::optional LinearFitSlope( denominator += (x - x_avg) * (x - x_avg); } if (denominator == 0) - return absl::nullopt; + return std::nullopt; return numerator / denominator; } -absl::optional ComputeSlopeCap( +std::optional ComputeSlopeCap( const std::deque& packets, const TrendlineEstimatorSettings& settings) { RTC_DCHECK(1 <= settings.beginning_packets && @@ -97,7 +104,7 @@ absl::optional ComputeSlopeCap( late = packets[i]; } if (late.arrival_time_ms - early.arrival_time_ms < 1) { - return absl::nullopt; + return std::nullopt; } return (late.raw_delay_ms - early.raw_delay_ms) / (late.arrival_time_ms - early.arrival_time_ms) + @@ -192,9 +199,9 @@ TrendlineEstimator::~TrendlineEstimator() {} void TrendlineEstimator::UpdateTrendline(double recv_delta_ms, double send_delta_ms, - int64_t send_time_ms, + int64_t /* send_time_ms */, int64_t arrival_time_ms, - size_t packet_size) { + size_t /* packet_size */) { const double delta_ms = recv_delta_ms - send_delta_ms; ++num_of_deltas_; num_of_deltas_ = std::min(num_of_deltas_, kDeltaCounterMax); @@ -203,12 +210,8 @@ void TrendlineEstimator::UpdateTrendline(double recv_delta_ms, // Exponential backoff filter. accumulated_delay_ += delta_ms; - BWE_TEST_LOGGING_PLOT(1, "accumulated_delay_ms", arrival_time_ms, - accumulated_delay_); smoothed_delay_ = smoothing_coef_ * smoothed_delay_ + (1 - smoothing_coef_) * accumulated_delay_; - BWE_TEST_LOGGING_PLOT(1, "smoothed_delay_ms", arrival_time_ms, - smoothed_delay_); // Maintain packet window delay_hist_.emplace_back( @@ -235,7 +238,7 @@ void TrendlineEstimator::UpdateTrendline(double recv_delta_ms, // trend < 0 -> the delay decreases, queues are being emptied trend = LinearFitSlope(delay_hist_).value_or(trend); if (settings_.enable_cap) { - absl::optional cap = ComputeSlopeCap(delay_hist_, settings_); + std::optional cap = ComputeSlopeCap(delay_hist_, settings_); // We only use the cap to filter out overuse detections, not // to detect additional underuses. if (trend >= 0 && cap.has_value() && trend > cap.value()) { @@ -243,7 +246,6 @@ void TrendlineEstimator::UpdateTrendline(double recv_delta_ms, } } } - BWE_TEST_LOGGING_PLOT(1, "trendline_slope", arrival_time_ms, trend); Detect(trend, send_delta_ms, arrival_time_ms); } @@ -276,8 +278,6 @@ void TrendlineEstimator::Detect(double trend, double ts_delta, int64_t now_ms) { const double modified_trend = std::min(num_of_deltas_, kMinNumDeltas) * trend * threshold_gain_; prev_modified_trend_ = modified_trend; - BWE_TEST_LOGGING_PLOT(1, "T", now_ms, modified_trend); - BWE_TEST_LOGGING_PLOT(1, "threshold", now_ms, threshold_); if (modified_trend > threshold_) { if (time_over_using_ == -1) { // Initialize the timer. Assume that we've been @@ -325,7 +325,7 @@ void TrendlineEstimator::UpdateThreshold(double modified_trend, const int64_t kMaxTimeDeltaMs = 100; int64_t time_delta_ms = std::min(now_ms - last_update_ms_, kMaxTimeDeltaMs); threshold_ += k * (fabs(modified_trend) - threshold_) * time_delta_ms; - threshold_ = rtc::SafeClamp(threshold_, 6.f, 600.f); + threshold_ = SafeClamp(threshold_, 6.f, 600.f); last_update_ms_ = now_ms; } diff --git a/modules/congestion_controller/goog_cc/trendline_estimator.h b/modules/congestion_controller/goog_cc/trendline_estimator.h index ffda25df74..70336ea09f 100644 --- a/modules/congestion_controller/goog_cc/trendline_estimator.h +++ b/modules/congestion_controller/goog_cc/trendline_estimator.h @@ -15,10 +15,10 @@ #include #include -#include #include "api/field_trials_view.h" #include "api/network_state_predictor.h" +#include "api/transport/bandwidth_usage.h" #include "modules/congestion_controller/goog_cc/delay_increase_detector_interface.h" #include "rtc_base/experiments/struct_parameters_parser.h" @@ -91,7 +91,7 @@ class TrendlineEstimator : public DelayIncreaseDetectorInterface { friend class GoogCcStatePrinter; void Detect(double trend, double ts_delta, int64_t now_ms); - void UpdateThreshold(double modified_offset, int64_t now_ms); + void UpdateThreshold(double modified_trend, int64_t now_ms); // Parameters. TrendlineEstimatorSettings settings_; diff --git a/modules/congestion_controller/goog_cc/trendline_estimator_unittest.cc b/modules/congestion_controller/goog_cc/trendline_estimator_unittest.cc index b0195abdf5..2c52d7dd4b 100644 --- a/modules/congestion_controller/goog_cc/trendline_estimator_unittest.cc +++ b/modules/congestion_controller/goog_cc/trendline_estimator_unittest.cc @@ -11,11 +11,13 @@ #include "modules/congestion_controller/goog_cc/trendline_estimator.h" #include -#include +#include +#include #include +#include "api/transport/bandwidth_usage.h" #include "api/transport/field_trial_based_config.h" -#include "rtc_base/random.h" +#include "rtc_base/checks.h" #include "test/gtest.h" namespace webrtc { diff --git a/modules/congestion_controller/include/receive_side_congestion_controller.h b/modules/congestion_controller/include/receive_side_congestion_controller.h index 8d81ccbe69..2041404e2e 100644 --- a/modules/congestion_controller/include/receive_side_congestion_controller.h +++ b/modules/congestion_controller/include/receive_side_congestion_controller.h @@ -11,15 +11,18 @@ #ifndef MODULES_CONGESTION_CONTROLLER_INCLUDE_RECEIVE_SIDE_CONGESTION_CONTROLLER_H_ #define MODULES_CONGESTION_CONTROLLER_INCLUDE_RECEIVE_SIDE_CONGESTION_CONTROLLER_H_ +#include #include -#include -#include "api/transport/network_control.h" +#include "api/environment/environment.h" +#include "api/media_types.h" +#include "api/sequence_checker.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "modules/congestion_controller/remb_throttler.h" -#include "modules/pacing/packet_router.h" -#include "modules/remote_bitrate_estimator/remote_estimator_proxy.h" +#include "modules/include/module_common_types.h" +#include "modules/remote_bitrate_estimator/congestion_control_feedback_generator.h" +#include "modules/remote_bitrate_estimator/transport_sequence_number_feedback_generator.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -35,12 +38,13 @@ class RemoteBitrateEstimator; class ReceiveSideCongestionController : public CallStatsObserver { public: ReceiveSideCongestionController( - Clock* clock, - RemoteEstimatorProxy::TransportFeedbackSender feedback_sender, - RembThrottler::RembSender remb_sender, - NetworkStateEstimator* network_state_estimator); + const Environment& env, + TransportSequenceNumberFeedbackGenenerator::RtcpSender feedback_sender, + RembThrottler::RembSender remb_sender); - ~ReceiveSideCongestionController() override {} + ~ReceiveSideCongestionController() override = default; + + void EnableSendCongestionControlFeedbackAccordingToRfc8888(); void OnReceivedPacket(const RtpPacketReceived& packet, MediaType media_type); @@ -54,8 +58,6 @@ class ReceiveSideCongestionController : public CallStatsObserver { // `bitrate` using RTCP REMB. void SetMaxDesiredReceiveBitrate(DataRate bitrate); - void SetTransportOverhead(DataSize overhead_per_packet); - // Returns latest receive side bandwidth estimation. // Returns zero if receive side bandwidth estimation is unavailable. DataRate LatestReceiveSideEstimate() const; @@ -72,9 +74,20 @@ class ReceiveSideCongestionController : public CallStatsObserver { void PickEstimator(bool has_absolute_send_time) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - Clock& clock_; + const Environment env_; RembThrottler remb_throttler_; - RemoteEstimatorProxy remote_estimator_proxy_; + + // TODO: bugs.webrtc.org/42224904 - Use sequence checker for all usage of + // ReceiveSideCongestionController. At the time of + // writing OnReceivedPacket and MaybeProcess can unfortunately be called on an + // arbitrary thread by external projects. + SequenceChecker sequence_checker_; + + bool send_rfc8888_congestion_feedback_ = false; + TransportSequenceNumberFeedbackGenenerator + transport_sequence_number_feedback_generator_; + CongestionControlFeedbackGenerator congestion_control_feedback_generator_ + RTC_GUARDED_BY(sequence_checker_); mutable Mutex mutex_; std::unique_ptr rbe_ RTC_GUARDED_BY(mutex_); diff --git a/modules/congestion_controller/pcc/BUILD.gn b/modules/congestion_controller/pcc/BUILD.gn index 85b12b3771..5fce915259 100644 --- a/modules/congestion_controller/pcc/BUILD.gn +++ b/modules/congestion_controller/pcc/BUILD.gn @@ -37,7 +37,6 @@ rtc_library("pcc_controller") { "../../../rtc_base:checks", "../../../rtc_base:random", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("monitor_interval") { @@ -91,7 +90,6 @@ rtc_library("bitrate_controller") { "../../../api/transport:network_control", "../../../api/units:data_rate", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (rtc_include_tests && !build_with_chromium) { @@ -111,6 +109,8 @@ if (rtc_include_tests && !build_with_chromium) { ":pcc_controller", ":rtt_tracker", ":utility_function", + "../../../api/environment", + "../../../api/environment:environment_factory", "../../../api/transport:network_control", "../../../api/units:data_rate", "../../../api/units:data_size", diff --git a/modules/congestion_controller/pcc/bitrate_controller.cc b/modules/congestion_controller/pcc/bitrate_controller.cc index 1a9cddb519..bb3e3e2e77 100644 --- a/modules/congestion_controller/pcc/bitrate_controller.cc +++ b/modules/congestion_controller/pcc/bitrate_controller.cc @@ -12,11 +12,17 @@ #include #include +#include #include #include +#include #include #include +#include "api/units/data_rate.h" +#include "modules/congestion_controller/pcc/monitor_interval.h" +#include "modules/congestion_controller/pcc/utility_function.h" + namespace webrtc { namespace pcc { @@ -107,12 +113,11 @@ double PccBitrateController::ApplyDynamicBoundary(double rate_change, return rate_change; } -absl::optional -PccBitrateController::ComputeRateUpdateForSlowStartMode( +std::optional PccBitrateController::ComputeRateUpdateForSlowStartMode( const PccMonitorInterval& monitor_interval) { double utility_value = utility_function_->Compute(monitor_interval); if (previous_utility_.has_value() && utility_value <= previous_utility_) { - return absl::nullopt; + return std::nullopt; } previous_utility_ = utility_value; return monitor_interval.GetTargetSendingRate(); diff --git a/modules/congestion_controller/pcc/bitrate_controller.h b/modules/congestion_controller/pcc/bitrate_controller.h index fadeea1b55..a34d429820 100644 --- a/modules/congestion_controller/pcc/bitrate_controller.h +++ b/modules/congestion_controller/pcc/bitrate_controller.h @@ -14,9 +14,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/units/data_rate.h" #include "modules/congestion_controller/pcc/monitor_interval.h" #include "modules/congestion_controller/pcc/utility_function.h" @@ -42,7 +42,7 @@ class PccBitrateController { double dynamic_boundary_increment, std::unique_ptr utility_function); - absl::optional ComputeRateUpdateForSlowStartMode( + std::optional ComputeRateUpdateForSlowStartMode( const PccMonitorInterval& monitor_interval); DataRate ComputeRateUpdateForOnlineLearningMode( @@ -65,7 +65,7 @@ class PccBitrateController { int64_t step_size_adjustments_number_; const double initial_conversion_factor_; - absl::optional previous_utility_; + std::optional previous_utility_; }; } // namespace pcc diff --git a/modules/congestion_controller/pcc/bitrate_controller_unittest.cc b/modules/congestion_controller/pcc/bitrate_controller_unittest.cc index 957d99b1de..b16beaacea 100644 --- a/modules/congestion_controller/pcc/bitrate_controller_unittest.cc +++ b/modules/congestion_controller/pcc/bitrate_controller_unittest.cc @@ -10,10 +10,19 @@ #include "modules/congestion_controller/pcc/bitrate_controller.h" +#include #include +#include #include +#include +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/congestion_controller/pcc/monitor_interval.h" +#include "modules/congestion_controller/pcc/utility_function.h" #include "test/gmock.h" #include "test/gtest.h" @@ -253,7 +262,7 @@ TEST(PccBitrateControllerTest, SlowStartMode) { kTargetSendingRate * 2); EXPECT_EQ( bitrate_controller.ComputeRateUpdateForSlowStartMode(monitor_block[0]), - absl::nullopt); + std::nullopt); } TEST(PccBitrateControllerTest, StepSizeIncrease) { diff --git a/modules/congestion_controller/pcc/monitor_interval.cc b/modules/congestion_controller/pcc/monitor_interval.cc index de1e2d5e69..116dda6195 100644 --- a/modules/congestion_controller/pcc/monitor_interval.cc +++ b/modules/congestion_controller/pcc/monitor_interval.cc @@ -13,7 +13,13 @@ #include #include +#include +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "rtc_base/logging.h" namespace webrtc { diff --git a/modules/congestion_controller/pcc/monitor_interval_unittest.cc b/modules/congestion_controller/pcc/monitor_interval_unittest.cc index aaff57bd2a..1d33f0040b 100644 --- a/modules/congestion_controller/pcc/monitor_interval_unittest.cc +++ b/modules/congestion_controller/pcc/monitor_interval_unittest.cc @@ -12,6 +12,13 @@ #include +#include + +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "test/gtest.h" namespace webrtc { diff --git a/modules/congestion_controller/pcc/pcc_factory.cc b/modules/congestion_controller/pcc/pcc_factory.cc index c35c6e8ab2..149c990760 100644 --- a/modules/congestion_controller/pcc/pcc_factory.cc +++ b/modules/congestion_controller/pcc/pcc_factory.cc @@ -12,6 +12,8 @@ #include +#include "api/transport/network_control.h" +#include "api/units/time_delta.h" #include "modules/congestion_controller/pcc/pcc_network_controller.h" namespace webrtc { diff --git a/modules/congestion_controller/pcc/pcc_network_controller.cc b/modules/congestion_controller/pcc/pcc_network_controller.cc index 8653470955..4a643fc55f 100644 --- a/modules/congestion_controller/pcc/pcc_network_controller.cc +++ b/modules/congestion_controller/pcc/pcc_network_controller.cc @@ -11,9 +11,16 @@ #include "modules/congestion_controller/pcc/pcc_network_controller.h" #include +#include +#include +#include -#include "absl/types/optional.h" +#include "api/transport/network_control.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" #include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "rtc_base/checks.h" namespace webrtc { @@ -339,12 +346,12 @@ void PccNetworkController::UpdateSendingRateAndMode() { } NetworkControlUpdate PccNetworkController::OnNetworkAvailability( - NetworkAvailability msg) { + NetworkAvailability /* msg */) { return NetworkControlUpdate(); } NetworkControlUpdate PccNetworkController::OnNetworkRouteChange( - NetworkRouteChange msg) { + NetworkRouteChange /* msg */) { return NetworkControlUpdate(); } @@ -354,7 +361,7 @@ NetworkControlUpdate PccNetworkController::OnProcessInterval( } NetworkControlUpdate PccNetworkController::OnTargetRateConstraints( - TargetRateConstraints msg) { + TargetRateConstraints /* msg */) { return NetworkControlUpdate(); } @@ -373,17 +380,18 @@ NetworkControlUpdate PccNetworkController::OnTransportLossReport( return NetworkControlUpdate(); } -NetworkControlUpdate PccNetworkController::OnStreamsConfig(StreamsConfig msg) { +NetworkControlUpdate PccNetworkController::OnStreamsConfig( + StreamsConfig /* msg */) { return NetworkControlUpdate(); } NetworkControlUpdate PccNetworkController::OnReceivedPacket( - ReceivedPacket msg) { + ReceivedPacket /* msg */) { return NetworkControlUpdate(); } NetworkControlUpdate PccNetworkController::OnNetworkStateEstimate( - NetworkStateEstimate msg) { + NetworkStateEstimate /* msg */) { return NetworkControlUpdate(); } diff --git a/modules/congestion_controller/pcc/pcc_network_controller_unittest.cc b/modules/congestion_controller/pcc/pcc_network_controller_unittest.cc index 31bc13f824..6391f42368 100644 --- a/modules/congestion_controller/pcc/pcc_network_controller_unittest.cc +++ b/modules/congestion_controller/pcc/pcc_network_controller_unittest.cc @@ -10,12 +10,18 @@ #include "modules/congestion_controller/pcc/pcc_network_controller.h" -#include - +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/transport/network_control.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/congestion_controller/pcc/pcc_factory.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/scenario/scenario.h" +#include "test/scenario/scenario_config.h" using ::testing::AllOf; using ::testing::Field; @@ -41,35 +47,19 @@ inline Matcher TargetRateCloseTo(DataRate rate) { AllOf(Ge(min_data_rate), Le(max_data_rate))); } -NetworkControllerConfig InitialConfig( - int starting_bandwidth_kbps = kInitialBitrate.kbps(), - int min_data_rate_kbps = 0, - int max_data_rate_kbps = 5 * kInitialBitrate.kbps()) { - NetworkControllerConfig config; - config.constraints.at_time = kDefaultStartTime; - config.constraints.min_data_rate = - DataRate::KilobitsPerSec(min_data_rate_kbps); - config.constraints.max_data_rate = - DataRate::KilobitsPerSec(max_data_rate_kbps); - config.constraints.starting_rate = - DataRate::KilobitsPerSec(starting_bandwidth_kbps); - return config; -} - -ProcessInterval InitialProcessInterval() { - ProcessInterval process_interval; - process_interval.at_time = kDefaultStartTime; - return process_interval; -} - } // namespace TEST(PccNetworkControllerTest, SendsConfigurationOnFirstProcess) { - std::unique_ptr controller_; - controller_.reset(new pcc::PccNetworkController(InitialConfig())); + Environment env = CreateEnvironment(); + NetworkControllerConfig config(env); + config.constraints.at_time = kDefaultStartTime; + config.constraints.min_data_rate = DataRate::Zero(); + config.constraints.max_data_rate = 5 * kInitialBitrate; + config.constraints.starting_rate = kInitialBitrate; + pcc::PccNetworkController controller(config); NetworkControlUpdate update = - controller_->OnProcessInterval(InitialProcessInterval()); + controller.OnProcessInterval({.at_time = kDefaultStartTime}); EXPECT_THAT(*update.target_rate, TargetRateCloseTo(kInitialBitrate)); EXPECT_THAT(*update.pacer_config, Property(&PacerConfig::data_rate, Ge(kInitialBitrate))); @@ -112,7 +102,8 @@ TEST(PccNetworkControllerTest, UpdatesTargetSendRate) { ret_net->UpdateConfig( [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(200); }); s.RunFor(TimeDelta::Seconds(35)); - EXPECT_NEAR(client->target_rate().kbps(), 170, 50); + EXPECT_LE(client->target_rate().kbps(), 200); + EXPECT_GT(client->target_rate().kbps(), 90); } } // namespace test diff --git a/modules/congestion_controller/pcc/rtt_tracker.cc b/modules/congestion_controller/pcc/rtt_tracker.cc index af9dc8f11b..250a309547 100644 --- a/modules/congestion_controller/pcc/rtt_tracker.cc +++ b/modules/congestion_controller/pcc/rtt_tracker.cc @@ -11,6 +11,11 @@ #include "modules/congestion_controller/pcc/rtt_tracker.h" #include +#include + +#include "api/transport/network_types.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" namespace webrtc { namespace pcc { diff --git a/modules/congestion_controller/pcc/rtt_tracker_unittest.cc b/modules/congestion_controller/pcc/rtt_tracker_unittest.cc index 7d90e86822..dbe6497afb 100644 --- a/modules/congestion_controller/pcc/rtt_tracker_unittest.cc +++ b/modules/congestion_controller/pcc/rtt_tracker_unittest.cc @@ -10,6 +10,9 @@ #include "modules/congestion_controller/pcc/rtt_tracker.h" +#include "api/transport/network_types.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "test/gtest.h" namespace webrtc { diff --git a/modules/congestion_controller/pcc/utility_function.cc b/modules/congestion_controller/pcc/utility_function.cc index 006a2fccd9..7ab177f2de 100644 --- a/modules/congestion_controller/pcc/utility_function.cc +++ b/modules/congestion_controller/pcc/utility_function.cc @@ -14,6 +14,7 @@ #include #include "api/units/data_rate.h" +#include "modules/congestion_controller/pcc/monitor_interval.h" #include "rtc_base/checks.h" namespace webrtc { diff --git a/modules/congestion_controller/pcc/utility_function_unittest.cc b/modules/congestion_controller/pcc/utility_function_unittest.cc index 19b2d15920..8f4add6797 100644 --- a/modules/congestion_controller/pcc/utility_function_unittest.cc +++ b/modules/congestion_controller/pcc/utility_function_unittest.cc @@ -13,7 +13,6 @@ #include #include -#include #include #include "api/transport/network_types.h" @@ -21,6 +20,7 @@ #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "modules/congestion_controller/pcc/monitor_interval.h" #include "test/gtest.h" namespace webrtc { diff --git a/modules/congestion_controller/receive_side_congestion_controller.cc b/modules/congestion_controller/receive_side_congestion_controller.cc index e042678897..d6aec6639b 100644 --- a/modules/congestion_controller/receive_side_congestion_controller.cc +++ b/modules/congestion_controller/receive_side_congestion_controller.cc @@ -10,13 +10,27 @@ #include "modules/congestion_controller/include/receive_side_congestion_controller.h" +#include +#include +#include +#include + +#include "api/environment/environment.h" #include "api/media_types.h" +#include "api/sequence_checker.h" #include "api/units/data_rate.h" -#include "modules/pacing/packet_router.h" -#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/congestion_controller/remb_throttler.h" +#include "modules/remote_bitrate_estimator/congestion_control_feedback_generator.h" #include "modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h" #include "modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h" +#include "modules/remote_bitrate_estimator/transport_sequence_number_feedback_generator.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -49,7 +63,7 @@ void ReceiveSideCongestionController::PickEstimator( << "WrappingBitrateEstimator: Switching to absolute send time RBE."; using_absolute_send_time_ = true; rbe_ = std::make_unique( - &remb_throttler_, &clock_); + env_, &remb_throttler_); } packets_since_absolute_send_time_ = 0; } else { @@ -62,24 +76,39 @@ void ReceiveSideCongestionController::PickEstimator( "time offset RBE."; using_absolute_send_time_ = false; rbe_ = std::make_unique( - &remb_throttler_, &clock_); + env_, &remb_throttler_); } } } } ReceiveSideCongestionController::ReceiveSideCongestionController( - Clock* clock, - RemoteEstimatorProxy::TransportFeedbackSender feedback_sender, - RembThrottler::RembSender remb_sender, - NetworkStateEstimator* network_state_estimator) - : clock_(*clock), - remb_throttler_(std::move(remb_sender), clock), - remote_estimator_proxy_(std::move(feedback_sender), - network_state_estimator), - rbe_(new RemoteBitrateEstimatorSingleStream(&remb_throttler_, clock)), + const Environment& env, + TransportSequenceNumberFeedbackGenenerator::RtcpSender feedback_sender, + RembThrottler::RembSender remb_sender) + : env_(env), + remb_throttler_(std::move(remb_sender), &env_.clock()), + transport_sequence_number_feedback_generator_(feedback_sender), + congestion_control_feedback_generator_(env, feedback_sender), + rbe_(std::make_unique( + env_, + &remb_throttler_)), using_absolute_send_time_(false), - packets_since_absolute_send_time_(0) {} + packets_since_absolute_send_time_(0) { + FieldTrialParameter force_send_rfc8888_feedback("force_send", false); + ParseFieldTrial( + {&force_send_rfc8888_feedback}, + env.field_trials().Lookup("WebRTC-RFC8888CongestionControlFeedback")); + if (force_send_rfc8888_feedback) { + EnableSendCongestionControlFeedbackAccordingToRfc8888(); + } +} + +void ReceiveSideCongestionController:: + EnableSendCongestionControlFeedbackAccordingToRfc8888() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + send_rfc8888_congestion_feedback_ = true; +} void ReceiveSideCongestionController::OnReceivedPacket( const RtpPacketReceived& packet, @@ -87,6 +116,19 @@ void ReceiveSideCongestionController::OnReceivedPacket( bool has_transport_sequence_number = packet.HasExtension() || packet.HasExtension(); + if (send_rfc8888_congestion_feedback_) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + congestion_control_feedback_generator_.OnReceivedPacket(packet); + // TODO(https://bugs.webrtc.org/374197376): Utilize RFC 8888 feedback, which + // provides comprehensive details similar to transport-cc. To ensure a + // smooth transition, we will continue using transport sequence number + // feedback temporarily. Once validation is complete, we will fully + // transition to using RFC 8888 feedback exclusively. + if (has_transport_sequence_number) { + transport_sequence_number_feedback_generator_.OnReceivedPacket(packet); + } + return; + } if (media_type == MediaType::AUDIO && !has_transport_sequence_number) { // For audio, we only support send side BWE. return; @@ -94,7 +136,7 @@ void ReceiveSideCongestionController::OnReceivedPacket( if (has_transport_sequence_number) { // Send-side BWE. - remote_estimator_proxy_.IncomingPacket(packet); + transport_sequence_number_feedback_generator_.OnReceivedPacket(packet); } else { // Receive-side BWE. MutexLock lock(&mutex_); @@ -104,15 +146,30 @@ void ReceiveSideCongestionController::OnReceivedPacket( } void ReceiveSideCongestionController::OnBitrateChanged(int bitrate_bps) { - remote_estimator_proxy_.OnBitrateChanged(bitrate_bps); + RTC_DCHECK_RUN_ON(&sequence_checker_); + DataRate send_bandwidth_estimate = DataRate::BitsPerSec(bitrate_bps); + transport_sequence_number_feedback_generator_.OnSendBandwidthEstimateChanged( + send_bandwidth_estimate); + congestion_control_feedback_generator_.OnSendBandwidthEstimateChanged( + send_bandwidth_estimate); } TimeDelta ReceiveSideCongestionController::MaybeProcess() { - Timestamp now = clock_.CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); + if (send_rfc8888_congestion_feedback_) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + TimeDelta time_until_cc_rep = + congestion_control_feedback_generator_.Process(now); + TimeDelta time_until_rep = + transport_sequence_number_feedback_generator_.Process(now); + TimeDelta time_until = std::min(time_until_cc_rep, time_until_rep); + return std::max(time_until, TimeDelta::Zero()); + } mutex_.Lock(); TimeDelta time_until_rbe = rbe_->Process(); mutex_.Unlock(); - TimeDelta time_until_rep = remote_estimator_proxy_.Process(now); + TimeDelta time_until_rep = + transport_sequence_number_feedback_generator_.Process(now); TimeDelta time_until = std::min(time_until_rbe, time_until_rep); return std::max(time_until, TimeDelta::Zero()); } @@ -122,9 +179,4 @@ void ReceiveSideCongestionController::SetMaxDesiredReceiveBitrate( remb_throttler_.SetMaxDesiredReceiveBitrate(bitrate); } -void ReceiveSideCongestionController::SetTransportOverhead( - DataSize overhead_per_packet) { - remote_estimator_proxy_.SetTransportOverhead(overhead_per_packet); -} - } // namespace webrtc diff --git a/modules/congestion_controller/receive_side_congestion_controller_unittest.cc b/modules/congestion_controller/receive_side_congestion_controller_unittest.cc index a0658476ca..0fb59147a6 100644 --- a/modules/congestion_controller/receive_side_congestion_controller_unittest.cc +++ b/modules/congestion_controller/receive_side_congestion_controller_unittest.cc @@ -10,20 +10,31 @@ #include "modules/congestion_controller/include/receive_side_congestion_controller.h" +#include +#include +#include + +#include "api/environment/environment_factory.h" +#include "api/media_types.h" #include "api/test/network_emulation/create_cross_traffic.h" #include "api/test/network_emulation/cross_traffic.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "modules/pacing/packet_router.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/buffer.h" #include "system_wrappers/include/clock.h" +#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/scenario/scenario.h" +#include "test/scenario/scenario_config.h" namespace webrtc { namespace test { @@ -33,6 +44,7 @@ using ::testing::_; using ::testing::AtLeast; using ::testing::ElementsAre; using ::testing::MockFunction; +using ::testing::SizeIs; constexpr DataRate kInitialBitrate = DataRate::BitsPerSec(60'000); @@ -41,11 +53,11 @@ TEST(ReceiveSideCongestionControllerTest, SendsRembWithAbsSendTime) { MockFunction>)> feedback_sender; MockFunction)> remb_sender; - SimulatedClock clock_(123456); + SimulatedClock clock(123456); - ReceiveSideCongestionController controller( - &clock_, feedback_sender.AsStdFunction(), remb_sender.AsStdFunction(), - nullptr); + ReceiveSideCongestionController controller(CreateEnvironment(&clock), + feedback_sender.AsStdFunction(), + remb_sender.AsStdFunction()); RtpHeaderExtensionMap extensions; extensions.Register(1); @@ -58,8 +70,8 @@ TEST(ReceiveSideCongestionControllerTest, SendsRembWithAbsSendTime) { .Times(AtLeast(1)); for (int i = 0; i < 10; ++i) { - clock_.AdvanceTime(kPayloadSize / kInitialBitrate); - Timestamp now = clock_.CurrentTime(); + clock.AdvanceTime(kPayloadSize / kInitialBitrate); + Timestamp now = clock.CurrentTime(); packet.SetExtension(AbsoluteSendTime::To24Bits(now)); packet.set_arrival_time(now); controller.OnReceivedPacket(packet, MediaType::VIDEO); @@ -71,15 +83,102 @@ TEST(ReceiveSideCongestionControllerTest, MockFunction>)> feedback_sender; MockFunction)> remb_sender; - SimulatedClock clock_(123456); + SimulatedClock clock(123456); - ReceiveSideCongestionController controller( - &clock_, feedback_sender.AsStdFunction(), remb_sender.AsStdFunction(), - nullptr); + ReceiveSideCongestionController controller(CreateEnvironment(&clock), + feedback_sender.AsStdFunction(), + remb_sender.AsStdFunction()); EXPECT_CALL(remb_sender, Call(123, _)); controller.SetMaxDesiredReceiveBitrate(DataRate::BitsPerSec(123)); } +void CheckRfc8888Feedback( + const std::vector>& rtcp_packets) { + ASSERT_THAT(rtcp_packets, SizeIs(1)); + Buffer buffer = rtcp_packets[0]->Build(); + rtcp::CommonHeader header; + EXPECT_TRUE(header.Parse(buffer.data(), buffer.size())); + // Check for RFC 8888 format message type 11(CCFB) + EXPECT_EQ(header.fmt(), + rtcp::CongestionControlFeedback::kFeedbackMessageType); +} + +TEST(ReceiveSideCongestionControllerTest, SendsRfc8888FeedbackIfForced) { + test::ExplicitKeyValueConfig field_trials( + "WebRTC-RFC8888CongestionControlFeedback/force_send:true/"); + MockFunction>)> + rtcp_sender; + MockFunction)> remb_sender; + SimulatedClock clock(123456); + ReceiveSideCongestionController controller( + CreateEnvironment(&clock, &field_trials), rtcp_sender.AsStdFunction(), + remb_sender.AsStdFunction()); + + // Expect that RTCP feedback is sent. + EXPECT_CALL(rtcp_sender, Call) + .WillOnce( + [&](std::vector> rtcp_packets) { + CheckRfc8888Feedback(rtcp_packets); + }); + // Expect that REMB is not sent. + EXPECT_CALL(remb_sender, Call).Times(0); + + RtpPacketReceived packet; + packet.set_arrival_time(clock.CurrentTime()); + controller.OnReceivedPacket(packet, MediaType::VIDEO); + TimeDelta next_process = controller.MaybeProcess(); + clock.AdvanceTime(next_process); + next_process = controller.MaybeProcess(); +} + +TEST(ReceiveSideCongestionControllerTest, SendsRfc8888FeedbackIfEnabled) { + MockFunction>)> + rtcp_sender; + MockFunction)> remb_sender; + SimulatedClock clock(123456); + ReceiveSideCongestionController controller(CreateEnvironment(&clock), + rtcp_sender.AsStdFunction(), + remb_sender.AsStdFunction()); + controller.EnableSendCongestionControlFeedbackAccordingToRfc8888(); + + // Expect that RTCP feedback is sent. + EXPECT_CALL(rtcp_sender, Call) + .WillOnce( + [&](std::vector> rtcp_packets) { + CheckRfc8888Feedback(rtcp_packets); + }); + // Expect that REMB is not sent. + EXPECT_CALL(remb_sender, Call).Times(0); + + RtpPacketReceived packet; + packet.set_arrival_time(clock.CurrentTime()); + controller.OnReceivedPacket(packet, MediaType::VIDEO); + TimeDelta next_process = controller.MaybeProcess(); + clock.AdvanceTime(next_process); + next_process = controller.MaybeProcess(); +} + +TEST(ReceiveSideCongestionControllerTest, + SendsNoFeedbackIfNotRfcRfc8888EnabledAndNoTransportFeedback) { + MockFunction>)> + rtcp_sender; + MockFunction)> remb_sender; + SimulatedClock clock(123456); + ReceiveSideCongestionController controller(CreateEnvironment(&clock), + rtcp_sender.AsStdFunction(), + remb_sender.AsStdFunction()); + + // No Transport feedback is sent because received packet does not have + // transport sequence number rtp header extension. + EXPECT_CALL(rtcp_sender, Call).Times(0); + RtpPacketReceived packet; + packet.set_arrival_time(clock.CurrentTime()); + controller.OnReceivedPacket(packet, MediaType::VIDEO); + TimeDelta next_process = controller.MaybeProcess(); + clock.AdvanceTime(next_process); + next_process = controller.MaybeProcess(); +} + TEST(ReceiveSideCongestionControllerTest, ConvergesToCapacity) { Scenario s("receive_cc_unit/converge"); NetworkSimulationConfig net_conf; diff --git a/modules/congestion_controller/remb_throttler.cc b/modules/congestion_controller/remb_throttler.cc index fcc30af9a8..d51897e025 100644 --- a/modules/congestion_controller/remb_throttler.cc +++ b/modules/congestion_controller/remb_throttler.cc @@ -11,7 +11,15 @@ #include "modules/congestion_controller/remb_throttler.h" #include +#include #include +#include + +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/synchronization/mutex.h" +#include "system_wrappers/include/clock.h" namespace webrtc { diff --git a/modules/congestion_controller/remb_throttler.h b/modules/congestion_controller/remb_throttler.h index 85292cbc09..4b0f0a246d 100644 --- a/modules/congestion_controller/remb_throttler.h +++ b/modules/congestion_controller/remb_throttler.h @@ -10,14 +10,16 @@ #ifndef MODULES_CONGESTION_CONTROLLER_REMB_THROTTLER_H_ #define MODULES_CONGESTION_CONTROLLER_REMB_THROTTLER_H_ +#include #include #include #include "api/units/data_rate.h" -#include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" +#include "system_wrappers/include/clock.h" namespace webrtc { diff --git a/modules/congestion_controller/remb_throttler_unittest.cc b/modules/congestion_controller/remb_throttler_unittest.cc index 3f8df8a7bb..4e4d0b63ac 100644 --- a/modules/congestion_controller/remb_throttler_unittest.cc +++ b/modules/congestion_controller/remb_throttler_unittest.cc @@ -9,10 +9,12 @@ */ #include "modules/congestion_controller/remb_throttler.h" +#include #include #include "api/units/data_rate.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "system_wrappers/include/clock.h" #include "test/gmock.h" #include "test/gtest.h" diff --git a/modules/congestion_controller/rtp/BUILD.gn b/modules/congestion_controller/rtp/BUILD.gn index cd13332b7f..302c3dee01 100644 --- a/modules/congestion_controller/rtp/BUILD.gn +++ b/modules/congestion_controller/rtp/BUILD.gn @@ -8,14 +8,6 @@ import("../../../webrtc.gni") -config("bwe_test_logging") { - if (rtc_enable_bwe_test_logging) { - defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=1" ] - } else { - defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0" ] - } -} - rtc_library("control_handler") { visibility = [ "*" ] sources = [ @@ -34,10 +26,8 @@ rtc_library("control_handler") { "../../../rtc_base:safe_conversions", "../../../rtc_base:safe_minmax", "../../../rtc_base/system:no_unique_address", - "../../../system_wrappers:field_trial", "../../pacing", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("transport_feedback") { visibility = [ "*" ] @@ -51,8 +41,10 @@ rtc_library("transport_feedback") { deps = [ "../..:module_api_public", "../../../api:sequence_checker", + "../../../api/transport:ecn_marking", "../../../api/transport:network_control", "../../../api/units:data_size", + "../../../api/units:time_delta", "../../../api/units:timestamp", "../../../rtc_base:checks", "../../../rtc_base:logging", @@ -64,11 +56,9 @@ rtc_library("transport_feedback") { "../../../rtc_base/system:no_unique_address", "../../../system_wrappers", "../../../system_wrappers:field_trial", + "../../rtp_rtcp:ntp_time_util", "../../rtp_rtcp:rtp_rtcp_format", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -83,9 +73,16 @@ if (rtc_include_tests) { deps = [ ":transport_feedback", "../:congestion_controller", + "../../../api:array_view", + "../../../api/transport:ecn_marking", "../../../api/transport:network_control", + "../../../api/units:data_size", + "../../../api/units:time_delta", + "../../../api/units:timestamp", "../../../logging:mocks", + "../../../rtc_base:buffer", "../../../rtc_base:checks", + "../../../rtc_base:logging", "../../../rtc_base:safe_conversions", "../../../rtc_base/network:sent_packet", "../../../system_wrappers", @@ -93,6 +90,7 @@ if (rtc_include_tests) { "../../../test:test_support", "../../pacing", "../../remote_bitrate_estimator", + "../../rtp_rtcp:ntp_time_util", "../../rtp_rtcp:rtp_rtcp_format", "//testing/gmock", ] diff --git a/modules/congestion_controller/rtp/control_handler.cc b/modules/congestion_controller/rtp/control_handler.cc index ffa373aeba..8322baae3b 100644 --- a/modules/congestion_controller/rtp/control_handler.cc +++ b/modules/congestion_controller/rtp/control_handler.cc @@ -10,31 +10,17 @@ #include "modules/congestion_controller/rtp/control_handler.h" -#include -#include +#include +#include "api/sequence_checker.h" +#include "api/transport/network_types.h" #include "api/units/data_rate.h" +#include "api/units/time_delta.h" #include "modules/pacing/pacing_controller.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/numerics/safe_minmax.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { -namespace { - -// By default, pacer emergency stops encoder when buffer reaches a high level. -bool IsPacerEmergencyStopDisabled() { - return field_trial::IsEnabled("WebRTC-DisablePacerEmergencyStop"); -} - -} // namespace -CongestionControlHandler::CongestionControlHandler() - : disable_pacer_emergency_stop_(IsPacerEmergencyStopDisabled()) { - sequenced_checker_.Detach(); -} - -CongestionControlHandler::~CongestionControlHandler() {} void CongestionControlHandler::SetTargetRate( TargetTransferRate new_target_rate) { @@ -53,18 +39,17 @@ void CongestionControlHandler::SetPacerQueue(TimeDelta expected_queue_time) { pacer_expected_queue_ms_ = expected_queue_time.ms(); } -absl::optional CongestionControlHandler::GetUpdate() { +std::optional CongestionControlHandler::GetUpdate() { RTC_DCHECK_RUN_ON(&sequenced_checker_); if (!last_incoming_.has_value()) - return absl::nullopt; + return std::nullopt; TargetTransferRate new_outgoing = *last_incoming_; DataRate log_target_rate = new_outgoing.target_rate; bool pause_encoding = false; if (!network_available_) { pause_encoding = true; - } else if (!disable_pacer_emergency_stop_ && - pacer_expected_queue_ms_ > - PacingController::kMaxExpectedQueueLength.ms()) { + } else if (pacer_expected_queue_ms_ > + PacingController::kMaxExpectedQueueLength.ms()) { pause_encoding = true; } if (pause_encoding) @@ -83,7 +68,7 @@ absl::optional CongestionControlHandler::GetUpdate() { last_reported_ = new_outgoing; return new_outgoing; } - return absl::nullopt; + return std::nullopt; } } // namespace webrtc diff --git a/modules/congestion_controller/rtp/control_handler.h b/modules/congestion_controller/rtp/control_handler.h index d8e7263a02..1ab246c12e 100644 --- a/modules/congestion_controller/rtp/control_handler.h +++ b/modules/congestion_controller/rtp/control_handler.h @@ -13,10 +13,10 @@ #include -#include "absl/types/optional.h" +#include + #include "api/sequence_checker.h" #include "api/transport/network_types.h" -#include "api/units/data_size.h" #include "api/units/time_delta.h" #include "rtc_base/system/no_unique_address.h" @@ -28,24 +28,24 @@ namespace webrtc { // destruction unless members are properly ordered. class CongestionControlHandler { public: - CongestionControlHandler(); - ~CongestionControlHandler(); + CongestionControlHandler() = default; CongestionControlHandler(const CongestionControlHandler&) = delete; CongestionControlHandler& operator=(const CongestionControlHandler&) = delete; + ~CongestionControlHandler() = default; + void SetTargetRate(TargetTransferRate new_target_rate); void SetNetworkAvailability(bool network_available); void SetPacerQueue(TimeDelta expected_queue_time); - absl::optional GetUpdate(); + std::optional GetUpdate(); private: - absl::optional last_incoming_; - absl::optional last_reported_; + std::optional last_incoming_; + std::optional last_reported_; bool network_available_ = true; bool encoder_paused_in_last_report_ = false; - const bool disable_pacer_emergency_stop_; int64_t pacer_expected_queue_ms_ = 0; RTC_NO_UNIQUE_ADDRESS SequenceChecker sequenced_checker_; diff --git a/modules/congestion_controller/rtp/transport_feedback_adapter.cc b/modules/congestion_controller/rtp/transport_feedback_adapter.cc index be17e50472..53a12f1e67 100644 --- a/modules/congestion_controller/rtp/transport_feedback_adapter.cc +++ b/modules/congestion_controller/rtp/transport_feedback_adapter.cc @@ -13,15 +13,26 @@ #include #include -#include +#include +#include #include +#include #include "absl/algorithm/container.h" +#include "api/transport/ecn_marking.h" +#include "api/transport/network_types.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" namespace webrtc { @@ -52,7 +63,7 @@ void InFlightBytesTracker::RemoveInFlightPacketBytes( } DataSize InFlightBytesTracker::GetOutstandingData( - const rtc::NetworkRoute& network_route) const { + const NetworkRoute& network_route) const { auto it = in_flight_data_.find(network_route); if (it != in_flight_data_.end()) { return it->second; @@ -63,8 +74,8 @@ DataSize InFlightBytesTracker::GetOutstandingData( // Comparator for consistent map with NetworkRoute as key. bool InFlightBytesTracker::NetworkRouteComparator::operator()( - const rtc::NetworkRoute& a, - const rtc::NetworkRoute& b) const { + const NetworkRoute& a, + const NetworkRoute& b) const { if (a.local.network_id() != b.local.network_id()) return a.local.network_id() < b.local.network_id(); if (a.remote.network_id() != b.remote.network_id()) @@ -85,17 +96,26 @@ bool InFlightBytesTracker::NetworkRouteComparator::operator()( TransportFeedbackAdapter::TransportFeedbackAdapter() = default; -void TransportFeedbackAdapter::AddPacket(const RtpPacketSendInfo& packet_info, +void TransportFeedbackAdapter::AddPacket(const RtpPacketToSend& packet_to_send, + const PacedPacketInfo& pacing_info, size_t overhead_bytes, Timestamp creation_time) { - PacketFeedback packet; - packet.creation_time = creation_time; - packet.sent.sequence_number = - seq_num_unwrapper_.Unwrap(packet_info.transport_sequence_number); - packet.sent.size = DataSize::Bytes(packet_info.length + overhead_bytes); - packet.sent.audio = packet_info.packet_type == RtpPacketMediaType::kAudio; - packet.network_route = network_route_; - packet.sent.pacing_info = packet_info.pacing_info; + RTC_DCHECK(packet_to_send.transport_sequence_number()); + PacketFeedback feedback; + + feedback.creation_time = creation_time; + // Note, if transport sequence number header extension is used, transport + // sequence numbers are wrapped to 16 bit. See + // RtpSenderEgress::CompleteSendPacket. + feedback.sent.sequence_number = seq_num_unwrapper_.Unwrap( + packet_to_send.transport_sequence_number().value_or(0)); + feedback.sent.size = DataSize::Bytes(packet_to_send.size() + overhead_bytes); + feedback.sent.audio = + packet_to_send.packet_type() == RtpPacketMediaType::kAudio; + feedback.network_route = network_route_; + feedback.sent.pacing_info = pacing_info; + feedback.ssrc = packet_to_send.Ssrc(); + feedback.rtp_sequence_number = packet_to_send.SequenceNumber(); while (!history_.empty() && creation_time - history_.begin()->second.creation_time > @@ -103,13 +123,25 @@ void TransportFeedbackAdapter::AddPacket(const RtpPacketSendInfo& packet_info, // TODO(sprang): Warn if erasing (too many) old items? if (history_.begin()->second.sent.sequence_number > last_ack_seq_num_) in_flight_.RemoveInFlightPacketBytes(history_.begin()->second); + + const PacketFeedback& packet = history_.begin()->second; + rtp_to_transport_sequence_number_.erase( + {.ssrc = packet.ssrc, + .rtp_sequence_number = packet.rtp_sequence_number}); history_.erase(history_.begin()); } - history_.insert(std::make_pair(packet.sent.sequence_number, packet)); + // Note that it can happen that the same SSRC and sequence number is sent + // again. e.g, audio retransmission. + rtp_to_transport_sequence_number_.emplace( + SsrcAndRtpSequencenumber( + {.ssrc = feedback.ssrc, + .rtp_sequence_number = feedback.rtp_sequence_number}), + feedback.sent.sequence_number); + history_.emplace(feedback.sent.sequence_number, feedback); } -absl::optional TransportFeedbackAdapter::ProcessSentPacket( - const rtc::SentPacket& sent_packet) { +std::optional TransportFeedbackAdapter::ProcessSentPacket( + const SentPacketInfo& sent_packet) { auto send_time = Timestamp::Millis(sent_packet.send_time_ms); // TODO(srte): Only use one way to indicate that packet feedback is used. if (sent_packet.info.included_in_feedback || sent_packet.packet_id != -1) { @@ -144,58 +176,28 @@ absl::optional TransportFeedbackAdapter::ProcessSentPacket( DataSize::Bytes(sent_packet.info.packet_size_bytes); last_untracked_send_time_ = std::max(last_untracked_send_time_, send_time); } - return absl::nullopt; + return std::nullopt; } -absl::optional +std::optional TransportFeedbackAdapter::ProcessTransportFeedback( const rtcp::TransportFeedback& feedback, Timestamp feedback_receive_time) { if (feedback.GetPacketStatusCount() == 0) { RTC_LOG(LS_INFO) << "Empty transport feedback packet received."; - return absl::nullopt; - } - - TransportPacketsFeedback msg; - msg.feedback_time = feedback_receive_time; - - msg.prior_in_flight = in_flight_.GetOutstandingData(network_route_); - msg.packet_feedbacks = - ProcessTransportFeedbackInner(feedback, feedback_receive_time); - if (msg.packet_feedbacks.empty()) - return absl::nullopt; - - auto it = history_.find(last_ack_seq_num_); - if (it != history_.end()) { - msg.first_unacked_send_time = it->second.sent.send_time; + return std::nullopt; } - msg.data_in_flight = in_flight_.GetOutstandingData(network_route_); - - return msg; -} - -void TransportFeedbackAdapter::SetNetworkRoute( - const rtc::NetworkRoute& network_route) { - network_route_ = network_route; -} - -DataSize TransportFeedbackAdapter::GetOutstandingData() const { - return in_flight_.GetOutstandingData(network_route_); -} -std::vector -TransportFeedbackAdapter::ProcessTransportFeedbackInner( - const rtcp::TransportFeedback& feedback, - Timestamp feedback_receive_time) { // Add timestamp deltas to a local time base selected on first packet arrival. // This won't be the true time base, but makes it easier to manually inspect // time stamps. - if (last_timestamp_.IsInfinite()) { + if (last_transport_feedback_base_time_.IsInfinite()) { current_offset_ = feedback_receive_time; } else { // TODO(srte): We shouldn't need to do rounding here. - const TimeDelta delta = feedback.GetBaseDelta(last_timestamp_) - .RoundDownTo(TimeDelta::Millis(1)); + const TimeDelta delta = + feedback.GetBaseDelta(last_transport_feedback_base_time_) + .RoundDownTo(TimeDelta::Millis(1)); // Protect against assigning current_offset_ negative value. if (delta < Timestamp::Zero() - current_offset_) { RTC_LOG(LS_WARNING) << "Unexpected feedback timestamp received."; @@ -204,7 +206,7 @@ TransportFeedbackAdapter::ProcessTransportFeedbackInner( current_offset_ += delta; } } - last_timestamp_ = feedback.BaseTime(); + last_transport_feedback_base_time_ = feedback.BaseTime(); std::vector packet_result_vector; packet_result_vector.reserve(feedback.GetPacketStatusCount()); @@ -212,64 +214,191 @@ TransportFeedbackAdapter::ProcessTransportFeedbackInner( size_t failed_lookups = 0; size_t ignored = 0; - feedback.ForAllPackets( - [&](uint16_t sequence_number, TimeDelta delta_since_base) { - int64_t seq_num = seq_num_unwrapper_.Unwrap(sequence_number); - - if (seq_num > last_ack_seq_num_) { - // Starts at history_.begin() if last_ack_seq_num_ < 0, since any - // valid sequence number is >= 0. - for (auto it = history_.upper_bound(last_ack_seq_num_); - it != history_.upper_bound(seq_num); ++it) { - in_flight_.RemoveInFlightPacketBytes(it->second); - } - last_ack_seq_num_ = seq_num; - } - - auto it = history_.find(seq_num); - if (it == history_.end()) { - ++failed_lookups; - return; - } - - if (it->second.sent.send_time.IsInfinite()) { - // TODO(srte): Fix the tests that makes this happen and make this a - // DCHECK. - RTC_DLOG(LS_ERROR) - << "Received feedback before packet was indicated as sent"; - return; - } - - PacketFeedback packet_feedback = it->second; - if (delta_since_base.IsFinite()) { - packet_feedback.receive_time = - current_offset_ + - delta_since_base.RoundDownTo(TimeDelta::Millis(1)); - // Note: Lost packets are not removed from history because they might - // be reported as received by a later feedback. - history_.erase(it); - } - if (packet_feedback.network_route == network_route_) { - PacketResult result; - result.sent_packet = packet_feedback.sent; - result.receive_time = packet_feedback.receive_time; - packet_result_vector.push_back(result); - } else { - ++ignored; - } - }); + feedback.ForAllPackets([&](uint16_t sequence_number, + TimeDelta delta_since_base) { + int64_t seq_num = seq_num_unwrapper_.Unwrap(sequence_number); + std::optional packet_feedback = RetrievePacketFeedback( + seq_num, /*received=*/delta_since_base.IsFinite()); + if (!packet_feedback) { + ++failed_lookups; + return; + } + if (delta_since_base.IsFinite()) { + packet_feedback->receive_time = + current_offset_ + delta_since_base.RoundDownTo(TimeDelta::Millis(1)); + } + if (packet_feedback->network_route == network_route_) { + PacketResult result; + result.sent_packet = packet_feedback->sent; + result.receive_time = packet_feedback->receive_time; + packet_result_vector.push_back(result); + } else { + ++ignored; + } + }); if (failed_lookups > 0) { - RTC_LOG(LS_WARNING) << "Failed to lookup send time for " << failed_lookups - << " packet" << (failed_lookups > 1 ? "s" : "") - << ". Send time history too small?"; + RTC_LOG(LS_WARNING) + << "Failed to lookup send time for " << failed_lookups << " packet" + << (failed_lookups > 1 ? "s" : "") + << ". Packets reordered or send time history too small?"; } if (ignored > 0) { RTC_LOG(LS_INFO) << "Ignoring " << ignored << " packets because they were sent on a different route."; } + return ToTransportFeedback(std::move(packet_result_vector), + feedback_receive_time, /*suports_ecn=*/false); +} + +std::optional +TransportFeedbackAdapter::ProcessCongestionControlFeedback( + const rtcp::CongestionControlFeedback& feedback, + Timestamp feedback_receive_time) { + if (feedback.packets().empty()) { + RTC_LOG(LS_INFO) << "Empty congestion control feedback packet received."; + return std::nullopt; + } + if (current_offset_.IsInfinite()) { + current_offset_ = feedback_receive_time; + } + TimeDelta feedback_delta = last_feedback_compact_ntp_time_ + ? CompactNtpIntervalToTimeDelta( + feedback.report_timestamp_compact_ntp() - + *last_feedback_compact_ntp_time_) + : TimeDelta::Zero(); + last_feedback_compact_ntp_time_ = feedback.report_timestamp_compact_ntp(); + if (feedback_delta < TimeDelta::Zero()) { + RTC_LOG(LS_WARNING) << "Unexpected feedback ntp time delta " + << feedback_delta << "."; + current_offset_ = feedback_receive_time; + } else { + current_offset_ += feedback_delta; + } + + int ignored_packets = 0; + int failed_lookups = 0; + bool supports_ecn = true; + std::vector packet_result_vector; + for (const rtcp::CongestionControlFeedback::PacketInfo& packet_info : + feedback.packets()) { + std::optional packet_feedback = RetrievePacketFeedback( + {.ssrc = packet_info.ssrc, + .rtp_sequence_number = packet_info.sequence_number}, + /*received=*/packet_info.arrival_time_offset.IsFinite()); + if (!packet_feedback) { + ++failed_lookups; + continue; + } + if (packet_feedback->network_route != network_route_) { + ++ignored_packets; + continue; + } + PacketResult result; + result.sent_packet = packet_feedback->sent; + if (packet_info.arrival_time_offset.IsFinite()) { + result.receive_time = current_offset_ - packet_info.arrival_time_offset; + supports_ecn &= packet_info.ecn != EcnMarking::kNotEct; + } + result.ecn = packet_info.ecn; + packet_result_vector.push_back(result); + } + + if (failed_lookups > 0) { + RTC_LOG(LS_WARNING) + << "Failed to lookup send time for " << failed_lookups << " packet" + << (failed_lookups > 1 ? "s" : "") + << ". Packets reordered or send time history too small?"; + } + if (ignored_packets > 0) { + RTC_LOG(LS_INFO) << "Ignoring " << ignored_packets + << " packets because they were sent on a different route."; + } + + // Feedback is expected to be sorted in send order. + absl::c_sort(packet_result_vector, [](const PacketResult& lhs, + const PacketResult& rhs) { + return lhs.sent_packet.sequence_number < rhs.sent_packet.sequence_number; + }); + return ToTransportFeedback(std::move(packet_result_vector), + feedback_receive_time, supports_ecn); +} + +std::optional +TransportFeedbackAdapter::ToTransportFeedback( + std::vector packet_results, + Timestamp feedback_receive_time, + bool supports_ecn) { + TransportPacketsFeedback msg; + msg.feedback_time = feedback_receive_time; + if (packet_results.empty()) { + return std::nullopt; + } + msg.packet_feedbacks = std::move(packet_results); + msg.data_in_flight = in_flight_.GetOutstandingData(network_route_); + msg.transport_supports_ecn = supports_ecn; + + return msg; +} + +void TransportFeedbackAdapter::SetNetworkRoute( + const NetworkRoute& network_route) { + network_route_ = network_route; +} + +DataSize TransportFeedbackAdapter::GetOutstandingData() const { + return in_flight_.GetOutstandingData(network_route_); +} + +std::optional TransportFeedbackAdapter::RetrievePacketFeedback( + const SsrcAndRtpSequencenumber& key, + bool received) { + auto it = rtp_to_transport_sequence_number_.find(key); + if (it == rtp_to_transport_sequence_number_.end()) { + return std::nullopt; + } + return RetrievePacketFeedback(it->second, received); +} - return packet_result_vector; +std::optional TransportFeedbackAdapter::RetrievePacketFeedback( + int64_t transport_seq_num, + bool received) { + if (transport_seq_num > last_ack_seq_num_) { + // Starts at history_.begin() if last_ack_seq_num_ < 0, since any + // valid sequence number is >= 0. + for (auto it = history_.upper_bound(last_ack_seq_num_); + it != history_.upper_bound(transport_seq_num); ++it) { + in_flight_.RemoveInFlightPacketBytes(it->second); + } + last_ack_seq_num_ = transport_seq_num; + } + + auto it = history_.find(transport_seq_num); + if (it == history_.end()) { + RTC_LOG(LS_WARNING) << "Failed to lookup send time for packet with " + << transport_seq_num + << ". Send time history too small?"; + return std::nullopt; + } + + if (it->second.sent.send_time.IsInfinite()) { + // TODO(srte): Fix the tests that makes this happen and make this a + // DCHECK. + RTC_DLOG(LS_ERROR) + << "Received feedback before packet was indicated as sent"; + return std::nullopt; + } + + PacketFeedback packet_feedback = it->second; + if (received) { + // Note: Lost packets are not removed from history because they might + // be reported as received by a later feedback. + rtp_to_transport_sequence_number_.erase( + {.ssrc = packet_feedback.ssrc, + .rtp_sequence_number = packet_feedback.rtp_sequence_number}); + history_.erase(it); + } + return packet_feedback; } } // namespace webrtc diff --git a/modules/congestion_controller/rtp/transport_feedback_adapter.h b/modules/congestion_controller/rtp/transport_feedback_adapter.h index 7b1243b64b..7f6e9427ad 100644 --- a/modules/congestion_controller/rtp/transport_feedback_adapter.h +++ b/modules/congestion_controller/rtp/transport_feedback_adapter.h @@ -11,19 +11,22 @@ #ifndef MODULES_CONGESTION_CONTROLLER_RTP_TRANSPORT_FEEDBACK_ADAPTER_H_ #define MODULES_CONGESTION_CONTROLLER_RTP_TRANSPORT_FEEDBACK_ADAPTER_H_ -#include +#include +#include #include -#include +#include +#include #include -#include "api/sequence_checker.h" #include "api/transport/network_types.h" +#include "api/units/data_size.h" #include "api/units/timestamp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" -#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -38,63 +41,101 @@ struct PacketFeedback { Timestamp receive_time = Timestamp::PlusInfinity(); // The network route that this packet is associated with. - rtc::NetworkRoute network_route; + NetworkRoute network_route; + + uint32_t ssrc = 0; + uint16_t rtp_sequence_number = 0; }; class InFlightBytesTracker { public: void AddInFlightPacketBytes(const PacketFeedback& packet); void RemoveInFlightPacketBytes(const PacketFeedback& packet); - DataSize GetOutstandingData(const rtc::NetworkRoute& network_route) const; + DataSize GetOutstandingData(const NetworkRoute& network_route) const; private: struct NetworkRouteComparator { - bool operator()(const rtc::NetworkRoute& a, - const rtc::NetworkRoute& b) const; + bool operator()(const NetworkRoute& a, const NetworkRoute& b) const; }; - std::map in_flight_data_; + std::map in_flight_data_; }; +// TransportFeedbackAdapter converts RTCP feedback packets to RTCP agnostic per +// packet send/receive information. +// It supports rtcp::CongestionControlFeedback according to RFC 8888 and +// rtcp::TransportFeedback according to +// https://datatracker.ietf.org/doc/html/draft-holmer-rmcat-transport-wide-cc-extensions-01 class TransportFeedbackAdapter { public: TransportFeedbackAdapter(); - void AddPacket(const RtpPacketSendInfo& packet_info, + void AddPacket(const RtpPacketToSend& packet, + const PacedPacketInfo& pacing_info, size_t overhead_bytes, Timestamp creation_time); - absl::optional ProcessSentPacket( - const rtc::SentPacket& sent_packet); - absl::optional ProcessTransportFeedback( + std::optional ProcessSentPacket( + const SentPacketInfo& sent_packet); + + std::optional ProcessTransportFeedback( const rtcp::TransportFeedback& feedback, Timestamp feedback_receive_time); - void SetNetworkRoute(const rtc::NetworkRoute& network_route); + std::optional ProcessCongestionControlFeedback( + const rtcp::CongestionControlFeedback& feedback, + Timestamp feedback_receive_time); + + void SetNetworkRoute(const NetworkRoute& network_route); DataSize GetOutstandingData() const; private: enum class SendTimeHistoryStatus { kNotAdded, kOk, kDuplicate }; - std::vector ProcessTransportFeedbackInner( - const rtcp::TransportFeedback& feedback, - Timestamp feedback_receive_time); + struct SsrcAndRtpSequencenumber { + uint32_t ssrc; + uint16_t rtp_sequence_number; + + bool operator<(const SsrcAndRtpSequencenumber& other) const { + return std::tie(ssrc, rtp_sequence_number) < + std::tie(other.ssrc, other.rtp_sequence_number); + } + }; + + std::optional RetrievePacketFeedback( + int64_t transport_seq_num, + bool received); + std::optional RetrievePacketFeedback( + const SsrcAndRtpSequencenumber& key, + bool received); + std::optional ToTransportFeedback( + std::vector packet_results, + Timestamp feedback_receive_time, + bool supports_ecn); DataSize pending_untracked_size_ = DataSize::Zero(); Timestamp last_send_time_ = Timestamp::MinusInfinity(); Timestamp last_untracked_send_time_ = Timestamp::MinusInfinity(); RtpSequenceNumberUnwrapper seq_num_unwrapper_; - std::map history_; // Sequence numbers are never negative, using -1 as it always < a real // sequence number. int64_t last_ack_seq_num_ = -1; InFlightBytesTracker in_flight_; + NetworkRoute network_route_; Timestamp current_offset_ = Timestamp::MinusInfinity(); - Timestamp last_timestamp_ = Timestamp::MinusInfinity(); - rtc::NetworkRoute network_route_; + // `last_transport_feedback_base_time` is only used for transport feedback to + // track base time. + Timestamp last_transport_feedback_base_time_ = Timestamp::MinusInfinity(); + // Used by RFC 8888 congestion control feedback to track base time. + std::optional last_feedback_compact_ntp_time_; + + // Map SSRC and RTP sequence number to transport sequence number. + std::map + rtp_to_transport_sequence_number_; + std::map history_; }; } // namespace webrtc diff --git a/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc b/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc index 5aad74c46e..2ceceaea5d 100644 --- a/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc +++ b/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc @@ -10,33 +10,84 @@ #include "modules/congestion_controller/rtp/transport_feedback_adapter.h" -#include +#include +#include #include +#include +#include #include +#include "api/array_view.h" +#include "api/transport/ecn_marking.h" +#include "api/transport/network_types.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" -#include "rtc_base/checks.h" -#include "rtc_base/numerics/safe_conversions.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "rtc_base/buffer.h" +#include "rtc_base/network/sent_packet.h" #include "system_wrappers/include/clock.h" -#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" -using ::testing::_; -using ::testing::Invoke; - namespace webrtc { - namespace { -constexpr uint32_t kSsrc = 8492; + +using ::testing::Bool; +using ::testing::NotNull; +using ::testing::SizeIs; +using ::testing::TestParamInfo; + const PacedPacketInfo kPacingInfo0(0, 5, 2000); -const PacedPacketInfo kPacingInfo1(1, 8, 4000); -const PacedPacketInfo kPacingInfo2(2, 14, 7000); -const PacedPacketInfo kPacingInfo3(3, 20, 10000); -const PacedPacketInfo kPacingInfo4(4, 22, 10000); -void ComparePacketFeedbackVectors(const std::vector& truth, +struct PacketTemplate { + uint32_t ssrc = 1; + int64_t transport_sequence_number = 0; + uint16_t rtp_sequence_number = 2; + RtpPacketMediaType media_type = RtpPacketMediaType::kVideo; + DataSize packet_size = DataSize::Bytes(100); + + EcnMarking ecn = EcnMarking::kNotEct; + Timestamp send_timestamp = Timestamp::Millis(0); + PacedPacketInfo pacing_info; + Timestamp receive_timestamp = Timestamp::MinusInfinity(); + + bool is_audio = false; +}; + +std::vector CreatePacketTemplates( + uint32_t number_of_ssrcs, + uint32_t packets_per_ssrc, + int64_t first_transport_sequence_number = 99) { + int64_t transport_sequence_number = first_transport_sequence_number; + Timestamp send_time = Timestamp::Millis(200); + Timestamp receive_time = Timestamp::Millis(100); + std::vector packets; + + for (uint32_t ssrc = 3; ssrc < 3 + number_of_ssrcs; ++ssrc) { + for (int rtp_sequence_number = ssrc * 10; + rtp_sequence_number < static_cast(ssrc * 10 + packets_per_ssrc); + ++rtp_sequence_number) { + packets.push_back({ + .ssrc = ssrc, + .transport_sequence_number = transport_sequence_number++, + .rtp_sequence_number = static_cast(rtp_sequence_number), + .send_timestamp = send_time, + .pacing_info = kPacingInfo0, + .receive_timestamp = receive_time, + }); + send_time += TimeDelta::Millis(10); + receive_time += TimeDelta::Millis(13); + } + } + return packets; +} + +void ComparePacketFeedbackVectors(const std::vector& truth, const std::vector& input) { ASSERT_EQ(truth.size(), input.size()); size_t len = truth.size(); @@ -47,355 +98,509 @@ void ComparePacketFeedbackVectors(const std::vector& truth, // base adjustment performed by the TransportFeedbackAdapter at the first // packet, the truth[x].arrival_time and input[x].arrival_time may not be // equal. However, the difference must be the same for all x. - TimeDelta arrival_time_delta = truth[0].receive_time - input[0].receive_time; + TimeDelta arrival_time_delta = + truth[0].receive_timestamp - input[0].receive_time; for (size_t i = 0; i < len; ++i) { - RTC_CHECK(truth[i].IsReceived()); + EXPECT_EQ(truth[i].receive_timestamp.IsFinite(), input[i].IsReceived()); if (input[i].IsReceived()) { - EXPECT_EQ(truth[i].receive_time - input[i].receive_time, + EXPECT_EQ(truth[i].receive_timestamp - input[i].receive_time, arrival_time_delta); } - EXPECT_EQ(truth[i].sent_packet.send_time, input[i].sent_packet.send_time); - EXPECT_EQ(truth[i].sent_packet.sequence_number, + EXPECT_EQ(truth[i].send_timestamp, input[i].sent_packet.send_time); + EXPECT_EQ(truth[i].transport_sequence_number, input[i].sent_packet.sequence_number); - EXPECT_EQ(truth[i].sent_packet.size, input[i].sent_packet.size); - EXPECT_EQ(truth[i].sent_packet.pacing_info, - input[i].sent_packet.pacing_info); + EXPECT_EQ(truth[i].packet_size, input[i].sent_packet.size); + EXPECT_EQ(truth[i].pacing_info, input[i].sent_packet.pacing_info); + EXPECT_EQ(truth[i].is_audio, input[i].sent_packet.audio); } } -PacketResult CreatePacket(int64_t receive_time_ms, - int64_t send_time_ms, - int64_t sequence_number, - size_t payload_size, - const PacedPacketInfo& pacing_info) { - PacketResult res; - res.receive_time = Timestamp::Millis(receive_time_ms); - res.sent_packet.send_time = Timestamp::Millis(send_time_ms); - res.sent_packet.sequence_number = sequence_number; - res.sent_packet.size = DataSize::Bytes(payload_size); - res.sent_packet.pacing_info = pacing_info; - return res; +RtpPacketToSend CreatePacketToSend(PacketTemplate packet) { + RtpPacketToSend send_packet(nullptr); + send_packet.SetSsrc(packet.ssrc); + send_packet.SetPayloadSize(packet.packet_size.bytes() - + send_packet.headers_size()); + send_packet.SetSequenceNumber(packet.rtp_sequence_number); + send_packet.set_transport_sequence_number(packet.transport_sequence_number); + send_packet.set_packet_type(packet.is_audio ? RtpPacketMediaType::kAudio + : RtpPacketMediaType::kVideo); + + return send_packet; } -class MockStreamFeedbackObserver : public webrtc::StreamFeedbackObserver { - public: - MOCK_METHOD(void, - OnPacketFeedbackVector, - (std::vector packet_feedback_vector), - (override)); -}; +rtcp::TransportFeedback BuildRtcpTransportFeedbackPacket( + ArrayView packets) { + rtcp::TransportFeedback feedback; + feedback.SetBase(packets[0].transport_sequence_number, + packets[0].receive_timestamp); -} // namespace + for (const PacketTemplate& packet : packets) { + if (packet.receive_timestamp.IsFinite()) { + EXPECT_TRUE(feedback.AddReceivedPacket(packet.transport_sequence_number, + packet.receive_timestamp)); + } + } + return feedback; +} -class TransportFeedbackAdapterTest : public ::testing::Test { - public: - TransportFeedbackAdapterTest() : clock_(0) {} - - virtual ~TransportFeedbackAdapterTest() {} - - virtual void SetUp() { adapter_.reset(new TransportFeedbackAdapter()); } - - virtual void TearDown() { adapter_.reset(); } - - protected: - void OnSentPacket(const PacketResult& packet_feedback) { - RtpPacketSendInfo packet_info; - packet_info.media_ssrc = kSsrc; - packet_info.transport_sequence_number = - packet_feedback.sent_packet.sequence_number; - packet_info.rtp_sequence_number = 0; - packet_info.length = packet_feedback.sent_packet.size.bytes(); - packet_info.pacing_info = packet_feedback.sent_packet.pacing_info; - packet_info.packet_type = RtpPacketMediaType::kVideo; - adapter_->AddPacket(RtpPacketSendInfo(packet_info), 0u, - clock_.CurrentTime()); - adapter_->ProcessSentPacket(rtc::SentPacket( - packet_feedback.sent_packet.sequence_number, - packet_feedback.sent_packet.send_time.ms(), rtc::PacketInfo())); +rtcp::CongestionControlFeedback BuildRtcpCongestionControlFeedbackPacket( + ArrayView packets) { + // Assume the feedback was sent when the last packet was received. + Timestamp feedback_sent_time = Timestamp::MinusInfinity(); + for (auto it = packets.crbegin(); it != packets.crend(); ++it) { + if (it->receive_timestamp.IsFinite()) { + feedback_sent_time = it->receive_timestamp; + break; + } } - SimulatedClock clock_; - std::unique_ptr adapter_; -}; + std::vector packet_infos; + for (const PacketTemplate& packet : packets) { + rtcp::CongestionControlFeedback::PacketInfo packet_info = { + .ssrc = packet.ssrc, + .sequence_number = packet.rtp_sequence_number, + .ecn = packet.ecn}; + if (packet.receive_timestamp.IsFinite()) { + packet_info.arrival_time_offset = + feedback_sent_time - packet.receive_timestamp; + } + packet_infos.push_back(packet_info); + } -TEST_F(TransportFeedbackAdapterTest, AdaptsFeedbackAndPopulatesSendTimes) { - std::vector packets; - packets.push_back(CreatePacket(100, 200, 0, 1500, kPacingInfo0)); - packets.push_back(CreatePacket(110, 210, 1, 1500, kPacingInfo0)); - packets.push_back(CreatePacket(120, 220, 2, 1500, kPacingInfo0)); - packets.push_back(CreatePacket(130, 230, 3, 1500, kPacingInfo1)); - packets.push_back(CreatePacket(140, 240, 4, 1500, kPacingInfo1)); + SimulatedClock clock(feedback_sent_time); + uint32_t compact_ntp = + CompactNtp(clock.ConvertTimestampToNtpTime(feedback_sent_time)); + return rtcp::CongestionControlFeedback(std::move(packet_infos), compact_ntp); +} - for (const auto& packet : packets) - OnSentPacket(packet); +Timestamp TimeNow() { + return Timestamp::Millis(1234); +} - rtcp::TransportFeedback feedback; - feedback.SetBase(packets[0].sent_packet.sequence_number, - packets[0].receive_time); +} // namespace - for (const auto& packet : packets) { - EXPECT_TRUE(feedback.AddReceivedPacket(packet.sent_packet.sequence_number, - packet.receive_time)); +class TransportFeedbackAdapterTest : public ::testing::TestWithParam { + public: + bool UseRfc8888CongestionControlFeedback() const { return GetParam(); } + + std::optional CreateAndProcessFeedback( + ArrayView packets, + TransportFeedbackAdapter& adapter) { + if (UseRfc8888CongestionControlFeedback()) { + rtcp::CongestionControlFeedback rtcp_feedback = + BuildRtcpCongestionControlFeedbackPacket(packets); + return adapter.ProcessCongestionControlFeedback(rtcp_feedback, TimeNow()); + } else { + rtcp::TransportFeedback rtcp_feedback = + BuildRtcpTransportFeedbackPacket(packets); + return adapter.ProcessTransportFeedback(rtcp_feedback, TimeNow()); + } } +}; - feedback.Build(); +INSTANTIATE_TEST_SUITE_P(FeedbackFormats, + TransportFeedbackAdapterTest, + Bool(), + [](TestParamInfo param) { + if (param.param) + return "CongestionControlFeedback"; + else + return "TransportFeedback"; + }); + +TEST_P(TransportFeedbackAdapterTest, AdaptsFeedbackAndPopulatesSendTimes) { + TransportFeedbackAdapter adapter; + std::vector packets = + CreatePacketTemplates(/*number_of_ssrcs=*/2, /*packets_per_ssrc=*/3); + + for (const PacketTemplate& packet : packets) { + adapter.AddPacket(CreatePacketToSend(packet), packet.pacing_info, + /*overhead=*/0u, TimeNow()); + adapter.ProcessSentPacket(SentPacketInfo(packet.transport_sequence_number, + packet.send_timestamp.ms())); + } - auto result = - adapter_->ProcessTransportFeedback(feedback, clock_.CurrentTime()); - ComparePacketFeedbackVectors(packets, result->packet_feedbacks); + std::optional adapted_feedback = + CreateAndProcessFeedback(packets, adapter); + ComparePacketFeedbackVectors(packets, adapted_feedback->packet_feedbacks); } -TEST_F(TransportFeedbackAdapterTest, FeedbackVectorReportsUnreceived) { - std::vector sent_packets = { - CreatePacket(100, 220, 0, 1500, kPacingInfo0), - CreatePacket(110, 210, 1, 1500, kPacingInfo0), - CreatePacket(120, 220, 2, 1500, kPacingInfo0), - CreatePacket(130, 230, 3, 1500, kPacingInfo0), - CreatePacket(140, 240, 4, 1500, kPacingInfo0), - CreatePacket(150, 250, 5, 1500, kPacingInfo0), - CreatePacket(160, 260, 6, 1500, kPacingInfo0)}; - - for (const auto& packet : sent_packets) - OnSentPacket(packet); +TEST_P(TransportFeedbackAdapterTest, FeedbackVectorReportsUnreceived) { + TransportFeedbackAdapter adapter; - // Note: Important to include the last packet, as only unreceived packets in - // between received packets can be inferred. - std::vector received_packets = { - sent_packets[0], sent_packets[2], sent_packets[6]}; - - rtcp::TransportFeedback feedback; - feedback.SetBase(received_packets[0].sent_packet.sequence_number, - received_packets[0].receive_time); + std::vector sent_packets = + CreatePacketTemplates(/*number_of_ssrcs=*/2, /*packets_per_ssrc=*/3); - for (const auto& packet : received_packets) { - EXPECT_TRUE(feedback.AddReceivedPacket(packet.sent_packet.sequence_number, - packet.receive_time)); + for (const PacketTemplate& packet : sent_packets) { + adapter.AddPacket(CreatePacketToSend(packet), packet.pacing_info, + /*overhead=*/0u, TimeNow()); + adapter.ProcessSentPacket(SentPacketInfo(packet.transport_sequence_number, + packet.send_timestamp.ms())); } - feedback.Build(); + // Note: Important to include the last packet per SSRC, as only unreceived + // packets in between received packets can be inferred. + sent_packets[1].receive_timestamp = Timestamp::PlusInfinity(); + sent_packets[4].receive_timestamp = Timestamp::PlusInfinity(); + std::optional adapted_feedback = + CreateAndProcessFeedback(sent_packets, adapter); - auto res = adapter_->ProcessTransportFeedback(feedback, clock_.CurrentTime()); - ComparePacketFeedbackVectors(sent_packets, res->packet_feedbacks); + ComparePacketFeedbackVectors(sent_packets, + adapted_feedback->packet_feedbacks); } -TEST_F(TransportFeedbackAdapterTest, HandlesDroppedPackets) { - std::vector packets; - packets.push_back(CreatePacket(100, 200, 0, 1500, kPacingInfo0)); - packets.push_back(CreatePacket(110, 210, 1, 1500, kPacingInfo1)); - packets.push_back(CreatePacket(120, 220, 2, 1500, kPacingInfo2)); - packets.push_back(CreatePacket(130, 230, 3, 1500, kPacingInfo3)); - packets.push_back(CreatePacket(140, 240, 4, 1500, kPacingInfo4)); +TEST_P(TransportFeedbackAdapterTest, HandlesDroppedPackets) { + TransportFeedbackAdapter adapter; + + std::vector packets = + CreatePacketTemplates(/*number_of_ssrcs=*/2, /*packets_per_ssrc=*/2, + /*first_transport_sequence_number=*/0); const uint16_t kSendSideDropBefore = 1; const uint16_t kReceiveSideDropAfter = 3; - for (const auto& packet : packets) { - if (packet.sent_packet.sequence_number >= kSendSideDropBefore) - OnSentPacket(packet); + std::vector sent_packets; + for (const PacketTemplate& packet : packets) { + if (packet.transport_sequence_number >= kSendSideDropBefore) { + sent_packets.push_back(packet); + } + } + for (const PacketTemplate& packet : sent_packets) { + adapter.AddPacket(CreatePacketToSend(packet), packet.pacing_info, + /*overhead=*/0u, TimeNow()); + adapter.ProcessSentPacket(SentPacketInfo(packet.transport_sequence_number, + packet.send_timestamp.ms())); } - rtcp::TransportFeedback feedback; - feedback.SetBase(packets[0].sent_packet.sequence_number, - packets[0].receive_time); - - for (const auto& packet : packets) { - if (packet.sent_packet.sequence_number <= kReceiveSideDropAfter) { - EXPECT_TRUE(feedback.AddReceivedPacket(packet.sent_packet.sequence_number, - packet.receive_time)); + std::vector received_packets; + for (const PacketTemplate& packet : packets) { + if (packet.transport_sequence_number <= kReceiveSideDropAfter) { + received_packets.push_back(packet); } } + std::optional adapted_feedback = + CreateAndProcessFeedback(received_packets, adapter); - feedback.Build(); - - std::vector expected_packets( + std::vector expected_packets( packets.begin() + kSendSideDropBefore, packets.begin() + kReceiveSideDropAfter + 1); // Packets that have timed out on the send-side have lost the // information stored on the send-side. And they will not be reported to // observers since we won't know that they come from the same networks. - - auto res = adapter_->ProcessTransportFeedback(feedback, clock_.CurrentTime()); - ComparePacketFeedbackVectors(expected_packets, res->packet_feedbacks); + ComparePacketFeedbackVectors(expected_packets, + adapted_feedback->packet_feedbacks); } -TEST_F(TransportFeedbackAdapterTest, SendTimeWrapsBothWays) { - TimeDelta kHighArrivalTime = - rtcp::TransportFeedback::kDeltaTick * (1 << 8) * ((1 << 23) - 1); - std::vector packets; - packets.push_back(CreatePacket(kHighArrivalTime.ms() + 64, 210, 0, 1500, - PacedPacketInfo())); - packets.push_back(CreatePacket(kHighArrivalTime.ms() - 64, 210, 1, 1500, - PacedPacketInfo())); - packets.push_back( - CreatePacket(kHighArrivalTime.ms(), 220, 2, 1500, PacedPacketInfo())); - - for (const auto& packet : packets) - OnSentPacket(packet); +TEST_P(TransportFeedbackAdapterTest, FeedbackReportsIfPacketIsAudio) { + TransportFeedbackAdapter adapter; - for (size_t i = 0; i < packets.size(); ++i) { - std::unique_ptr feedback( - new rtcp::TransportFeedback()); - feedback->SetBase(packets[i].sent_packet.sequence_number, - packets[i].receive_time); - - EXPECT_TRUE(feedback->AddReceivedPacket( - packets[i].sent_packet.sequence_number, packets[i].receive_time)); - - rtc::Buffer raw_packet = feedback->Build(); - feedback = rtcp::TransportFeedback::ParseFrom(raw_packet.data(), - raw_packet.size()); + PacketTemplate packets[] = { + {.receive_timestamp = TimeNow(), .is_audio = true}}; + PacketTemplate& packet = packets[0]; - std::vector expected_packets; - expected_packets.push_back(packets[i]); + adapter.AddPacket(CreatePacketToSend(packet), packet.pacing_info, + /*overhead=*/0u, TimeNow()); + adapter.ProcessSentPacket(SentPacketInfo(packet.transport_sequence_number, + packet.send_timestamp.ms())); - auto res = adapter_->ProcessTransportFeedback(*feedback.get(), - clock_.CurrentTime()); - ComparePacketFeedbackVectors(expected_packets, res->packet_feedbacks); - } + std::optional adapted_feedback = + CreateAndProcessFeedback(packets, adapter); + ASSERT_THAT(adapted_feedback->packet_feedbacks, SizeIs(1)); + EXPECT_TRUE(adapted_feedback->packet_feedbacks[0].sent_packet.audio); } -TEST_F(TransportFeedbackAdapterTest, HandlesArrivalReordering) { - std::vector packets; - packets.push_back(CreatePacket(120, 200, 0, 1500, kPacingInfo0)); - packets.push_back(CreatePacket(110, 210, 1, 1500, kPacingInfo0)); - packets.push_back(CreatePacket(100, 220, 2, 1500, kPacingInfo0)); +TEST_P(TransportFeedbackAdapterTest, ReceiveTimeWrapsBothWays) { + TransportFeedbackAdapter adapter; - for (const auto& packet : packets) - OnSentPacket(packet); + TimeDelta kHighArrivalTime = + rtcp::TransportFeedback::kDeltaTick * (1 << 8) * ((1 << 23) - 1); - rtcp::TransportFeedback feedback; - feedback.SetBase(packets[0].sent_packet.sequence_number, - packets[0].receive_time); + std::vector packets = { + {.transport_sequence_number = 0, + .rtp_sequence_number = 102, + .receive_timestamp = + Timestamp::Zero() + kHighArrivalTime + TimeDelta::Millis(64)}, + {.transport_sequence_number = 1, + .rtp_sequence_number = 103, + .receive_timestamp = + Timestamp::Zero() + kHighArrivalTime - TimeDelta::Millis(64)}, + {.transport_sequence_number = 2, + .rtp_sequence_number = 104, + .receive_timestamp = Timestamp::Zero() + kHighArrivalTime}}; + + for (const PacketTemplate& packet : packets) { + adapter.AddPacket(CreatePacketToSend(packet), packet.pacing_info, + /*overhead=*/0u, TimeNow()); + adapter.ProcessSentPacket(SentPacketInfo(packet.transport_sequence_number, + packet.send_timestamp.ms())); + } - for (const auto& packet : packets) { - EXPECT_TRUE(feedback.AddReceivedPacket(packet.sent_packet.sequence_number, - packet.receive_time)); + for (size_t i = 0; i < packets.size(); ++i) { + std::vector received_packets = {packets[i]}; + + std::optional result; + if (UseRfc8888CongestionControlFeedback()) { + rtcp::CongestionControlFeedback feedback = + BuildRtcpCongestionControlFeedbackPacket(received_packets); + Buffer raw_packet = feedback.Build(); + rtcp::CommonHeader header; + ASSERT_TRUE(header.Parse(raw_packet.data(), raw_packet.size())); + rtcp::CongestionControlFeedback parsed_feedback; + ASSERT_TRUE(parsed_feedback.Parse(header)); + result = + adapter.ProcessCongestionControlFeedback(parsed_feedback, TimeNow()); + } else { + rtcp::TransportFeedback feedback = + BuildRtcpTransportFeedbackPacket(received_packets); + Buffer raw_packet = feedback.Build(); + std::unique_ptr parsed_feedback = + rtcp::TransportFeedback::ParseFrom(raw_packet.data(), + raw_packet.size()); + ASSERT_THAT(parsed_feedback, NotNull()); + result = adapter.ProcessTransportFeedback(*parsed_feedback, TimeNow()); + } + ASSERT_TRUE(result.has_value()); + ComparePacketFeedbackVectors(received_packets, result->packet_feedbacks); } +} - feedback.Build(); +TEST_P(TransportFeedbackAdapterTest, HandlesArrivalReordering) { + TransportFeedbackAdapter adapter; + + std::vector packets = { + {.transport_sequence_number = 0, + .rtp_sequence_number = 101, + .send_timestamp = Timestamp::Millis(200), + .receive_timestamp = Timestamp::Millis(120)}, + {.transport_sequence_number = 1, + .rtp_sequence_number = 102, + .send_timestamp = Timestamp::Millis(210), + .receive_timestamp = Timestamp::Millis(110)}, + {.transport_sequence_number = 2, + .rtp_sequence_number = 103, + .send_timestamp = Timestamp::Millis(220), + .receive_timestamp = Timestamp::Millis(100)}}; + + for (const PacketTemplate& packet : packets) { + adapter.AddPacket(CreatePacketToSend(packet), packet.pacing_info, + /*overhead=*/0u, TimeNow()); + adapter.ProcessSentPacket(SentPacketInfo(packet.transport_sequence_number, + packet.send_timestamp.ms())); + } // Adapter keeps the packets ordered by sequence number (which is itself // assigned by the order of transmission). Reordering by some other criteria, // eg. arrival time, is up to the observers. - auto res = adapter_->ProcessTransportFeedback(feedback, clock_.CurrentTime()); - ComparePacketFeedbackVectors(packets, res->packet_feedbacks); -} - -TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) { - std::vector sent_packets; - // TODO(srte): Consider using us resolution in the constants. - const TimeDelta kSmallDelta = (rtcp::TransportFeedback::kDeltaTick * 0xFF) - .RoundDownTo(TimeDelta::Millis(1)); - const TimeDelta kLargePositiveDelta = (rtcp::TransportFeedback::kDeltaTick * - std::numeric_limits::max()) - .RoundDownTo(TimeDelta::Millis(1)); - const TimeDelta kLargeNegativeDelta = (rtcp::TransportFeedback::kDeltaTick * - std::numeric_limits::min()) - .RoundDownTo(TimeDelta::Millis(1)); - - PacketResult packet_feedback; - packet_feedback.sent_packet.sequence_number = 1; - packet_feedback.sent_packet.send_time = Timestamp::Millis(100); - packet_feedback.receive_time = Timestamp::Millis(200); - packet_feedback.sent_packet.size = DataSize::Bytes(1500); - sent_packets.push_back(packet_feedback); - - // TODO(srte): This rounding maintains previous behavior, but should ot be - // required. - packet_feedback.sent_packet.send_time += kSmallDelta; - packet_feedback.receive_time += kSmallDelta; - ++packet_feedback.sent_packet.sequence_number; - sent_packets.push_back(packet_feedback); - - packet_feedback.sent_packet.send_time += kLargePositiveDelta; - packet_feedback.receive_time += kLargePositiveDelta; - ++packet_feedback.sent_packet.sequence_number; - sent_packets.push_back(packet_feedback); - - packet_feedback.sent_packet.send_time += kLargeNegativeDelta; - packet_feedback.receive_time += kLargeNegativeDelta; - ++packet_feedback.sent_packet.sequence_number; - sent_packets.push_back(packet_feedback); - - // Too large, delta - will need two feedback messages. - packet_feedback.sent_packet.send_time += - kLargePositiveDelta + TimeDelta::Millis(1); - packet_feedback.receive_time += kLargePositiveDelta + TimeDelta::Millis(1); - ++packet_feedback.sent_packet.sequence_number; - - // Packets will be added to send history. - for (const auto& packet : sent_packets) - OnSentPacket(packet); - OnSentPacket(packet_feedback); - - // Create expected feedback and send into adapter. - std::unique_ptr feedback( - new rtcp::TransportFeedback()); - feedback->SetBase(sent_packets[0].sent_packet.sequence_number, - sent_packets[0].receive_time); - - for (const auto& packet : sent_packets) { - EXPECT_TRUE(feedback->AddReceivedPacket(packet.sent_packet.sequence_number, - packet.receive_time)); - } - EXPECT_FALSE( - feedback->AddReceivedPacket(packet_feedback.sent_packet.sequence_number, - packet_feedback.receive_time)); - - rtc::Buffer raw_packet = feedback->Build(); - feedback = - rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size()); - - std::vector received_feedback; - - EXPECT_TRUE(feedback.get() != nullptr); - auto res = - adapter_->ProcessTransportFeedback(*feedback.get(), clock_.CurrentTime()); - ComparePacketFeedbackVectors(sent_packets, res->packet_feedbacks); - - // Create a new feedback message and add the trailing item. - feedback.reset(new rtcp::TransportFeedback()); - feedback->SetBase(packet_feedback.sent_packet.sequence_number, - packet_feedback.receive_time); - EXPECT_TRUE( - feedback->AddReceivedPacket(packet_feedback.sent_packet.sequence_number, - packet_feedback.receive_time)); - raw_packet = feedback->Build(); - feedback = - rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size()); - - EXPECT_TRUE(feedback.get() != nullptr); - { - auto res = adapter_->ProcessTransportFeedback(*feedback.get(), - clock_.CurrentTime()); - std::vector expected_packets; - expected_packets.push_back(packet_feedback); - ComparePacketFeedbackVectors(expected_packets, res->packet_feedbacks); - } + std::optional adapted_feedback = + CreateAndProcessFeedback(packets, adapter); + ComparePacketFeedbackVectors(packets, adapted_feedback->packet_feedbacks); } -TEST_F(TransportFeedbackAdapterTest, IgnoreDuplicatePacketSentCalls) { - auto packet = CreatePacket(100, 200, 0, 1500, kPacingInfo0); +TEST_P(TransportFeedbackAdapterTest, IgnoreDuplicatePacketSentCalls) { + TransportFeedbackAdapter adapter; + PacketTemplate packet = {}; // Add a packet and then mark it as sent. - RtpPacketSendInfo packet_info; - packet_info.media_ssrc = kSsrc; - packet_info.transport_sequence_number = packet.sent_packet.sequence_number; - packet_info.length = packet.sent_packet.size.bytes(); - packet_info.pacing_info = packet.sent_packet.pacing_info; - packet_info.packet_type = RtpPacketMediaType::kVideo; - adapter_->AddPacket(packet_info, 0u, clock_.CurrentTime()); - absl::optional sent_packet = adapter_->ProcessSentPacket( - rtc::SentPacket(packet.sent_packet.sequence_number, - packet.sent_packet.send_time.ms(), rtc::PacketInfo())); + adapter.AddPacket(CreatePacketToSend(packet), packet.pacing_info, 0u, + TimeNow()); + std::optional sent_packet = adapter.ProcessSentPacket( + SentPacketInfo(packet.transport_sequence_number, + packet.send_timestamp.ms(), PacketInfo())); EXPECT_TRUE(sent_packet.has_value()); // Call ProcessSentPacket() again with the same sequence number. This packet // has already been marked as sent and the call should be ignored. - absl::optional duplicate_packet = adapter_->ProcessSentPacket( - rtc::SentPacket(packet.sent_packet.sequence_number, - packet.sent_packet.send_time.ms(), rtc::PacketInfo())); + std::optional duplicate_packet = adapter.ProcessSentPacket( + SentPacketInfo(packet.transport_sequence_number, + packet.send_timestamp.ms(), PacketInfo())); EXPECT_FALSE(duplicate_packet.has_value()); } +TEST_P(TransportFeedbackAdapterTest, + SendReceiveTimeDiffTimeContinuouseBetweenFeedback) { + TransportFeedbackAdapter adapter; + + PacketTemplate packets[] = {{.transport_sequence_number = 1, + .rtp_sequence_number = 101, + .send_timestamp = Timestamp::Millis(100), + .pacing_info = kPacingInfo0, + .receive_timestamp = Timestamp::Millis(200)}, + {.transport_sequence_number = 2, + .rtp_sequence_number = 102, + .send_timestamp = Timestamp::Millis(110), + .pacing_info = kPacingInfo0, + .receive_timestamp = Timestamp::Millis(210)}}; + + for (const PacketTemplate& packet : packets) { + adapter.AddPacket(CreatePacketToSend(packet), packet.pacing_info, + /*overhead=*/0u, TimeNow()); + + adapter.ProcessSentPacket(SentPacketInfo(packet.transport_sequence_number, + packet.send_timestamp.ms())); + } + + std::optional adapted_feedback_1 = + CreateAndProcessFeedback(std::vector({packets[0]}), + adapter); + std::optional adapted_feedback_2 = + CreateAndProcessFeedback(std::vector({packets[1]}), + adapter); + + ASSERT_EQ(adapted_feedback_1->packet_feedbacks.size(), + adapted_feedback_2->packet_feedbacks.size()); + ASSERT_THAT(adapted_feedback_1->packet_feedbacks, testing::SizeIs(1)); + EXPECT_EQ((adapted_feedback_1->packet_feedbacks[0].receive_time - + adapted_feedback_1->packet_feedbacks[0].sent_packet.send_time) + .RoundTo(TimeDelta::Millis(1)), + (adapted_feedback_2->packet_feedbacks[0].receive_time - + adapted_feedback_2->packet_feedbacks[0].sent_packet.send_time) + .RoundTo(TimeDelta::Millis(1))); +} + +TEST_P(TransportFeedbackAdapterTest, ProcessSentPacketIncreaseOutstandingData) { + TransportFeedbackAdapter adapter; + + PacketTemplate packet_1 = {.transport_sequence_number = 1, + .packet_size = DataSize::Bytes(200)}; + PacketTemplate packet_2 = {.transport_sequence_number = 2, + .packet_size = DataSize::Bytes(300)}; + adapter.AddPacket(CreatePacketToSend(packet_1), packet_1.pacing_info, + /*overhead=*/0u, TimeNow()); + std::optional sent_packet_1 = + adapter.ProcessSentPacket(SentPacketInfo( + packet_1.transport_sequence_number, packet_1.send_timestamp.ms())); + + ASSERT_TRUE(sent_packet_1.has_value()); + EXPECT_EQ(sent_packet_1->sequence_number, packet_1.transport_sequence_number); + // Only one packet in flight. + EXPECT_EQ(sent_packet_1->data_in_flight, packet_1.packet_size); + EXPECT_EQ(adapter.GetOutstandingData(), packet_1.packet_size); + + adapter.AddPacket(CreatePacketToSend(packet_2), packet_2.pacing_info, + /*overhead=*/0u, TimeNow()); + std::optional sent_packet_2 = + adapter.ProcessSentPacket(SentPacketInfo( + packet_2.transport_sequence_number, packet_2.send_timestamp.ms())); + + ASSERT_TRUE(sent_packet_2.has_value()); + // Two packets in flight. + EXPECT_EQ(sent_packet_2->data_in_flight, + packet_1.packet_size + packet_2.packet_size); + + EXPECT_EQ(adapter.GetOutstandingData(), + packet_1.packet_size + packet_2.packet_size); +} + +TEST_P(TransportFeedbackAdapterTest, TransportPacketFeedbackHasDataInFlight) { + TransportFeedbackAdapter adapter; + + const PacketTemplate packets[] = { + { + .transport_sequence_number = 1, + .rtp_sequence_number = 101, + .packet_size = DataSize::Bytes(200), + .send_timestamp = Timestamp::Millis(100), + .pacing_info = kPacingInfo0, + .receive_timestamp = Timestamp::Millis(200), + }, + { + .transport_sequence_number = 2, + .rtp_sequence_number = 102, + .packet_size = DataSize::Bytes(300), + .send_timestamp = Timestamp::Millis(110), + .pacing_info = kPacingInfo0, + .receive_timestamp = Timestamp::Millis(210), + }}; + + for (const PacketTemplate& packet : packets) { + adapter.AddPacket(CreatePacketToSend(packet), packet.pacing_info, + /*overhead=*/0u, TimeNow()); + + adapter.ProcessSentPacket(SentPacketInfo(packet.transport_sequence_number, + packet.send_timestamp.ms())); + } + + std::optional adapted_feedback_1 = + CreateAndProcessFeedback(MakeArrayView(&packets[0], 1), adapter); + std::optional adapted_feedback_2 = + CreateAndProcessFeedback(MakeArrayView(&packets[1], 1), adapter); + EXPECT_EQ(adapted_feedback_1->data_in_flight, packets[1].packet_size); + EXPECT_EQ(adapted_feedback_2->data_in_flight, DataSize::Zero()); +} + +TEST(TransportFeedbackAdapterCongestionFeedbackTest, + CongestionControlFeedbackResultHasEcn) { + TransportFeedbackAdapter adapter; + + const PacketTemplate packets[] = { + { + .transport_sequence_number = 1, + .rtp_sequence_number = 101, + .ecn = EcnMarking::kCe, + .send_timestamp = Timestamp::Millis(100), + .receive_timestamp = Timestamp::Millis(200), + }, + { + .transport_sequence_number = 2, + .rtp_sequence_number = 102, + .ecn = EcnMarking::kEct1, + .send_timestamp = Timestamp::Millis(110), + .receive_timestamp = Timestamp::Millis(210), + }}; + + for (const PacketTemplate& packet : packets) { + adapter.AddPacket(CreatePacketToSend(packet), packet.pacing_info, + /*overhead=*/0u, TimeNow()); + + adapter.ProcessSentPacket(SentPacketInfo(packet.transport_sequence_number, + packet.send_timestamp.ms())); + } + + rtcp::CongestionControlFeedback rtcp_feedback = + BuildRtcpCongestionControlFeedbackPacket(packets); + std::optional adapted_feedback = + adapter.ProcessCongestionControlFeedback(rtcp_feedback, TimeNow()); + + ASSERT_THAT(adapted_feedback->packet_feedbacks, SizeIs(2)); + EXPECT_THAT(adapted_feedback->packet_feedbacks[0].ecn, EcnMarking::kCe); + EXPECT_THAT(adapted_feedback->packet_feedbacks[1].ecn, EcnMarking::kEct1); + EXPECT_TRUE(adapted_feedback->transport_supports_ecn); +} + +TEST(TransportFeedbackAdapterCongestionFeedbackTest, + ReportTransportDoesNotSupportEcnIfFeedbackContainNotEctPacket) { + TransportFeedbackAdapter adapter; + + const PacketTemplate packets[] = { + { + .transport_sequence_number = 1, + .rtp_sequence_number = 101, + .ecn = EcnMarking::kCe, + .send_timestamp = Timestamp::Millis(100), + .receive_timestamp = Timestamp::Millis(200), + }, + { + .transport_sequence_number = 2, + .rtp_sequence_number = 102, + .ecn = EcnMarking::kNotEct, + .send_timestamp = Timestamp::Millis(110), + .receive_timestamp = Timestamp::Millis(210), + }}; + + for (const PacketTemplate& packet : packets) { + adapter.AddPacket(CreatePacketToSend(packet), packet.pacing_info, + /*overhead=*/0u, TimeNow()); + + adapter.ProcessSentPacket(SentPacketInfo(packet.transport_sequence_number, + packet.send_timestamp.ms())); + } + + rtcp::CongestionControlFeedback rtcp_feedback = + BuildRtcpCongestionControlFeedbackPacket(packets); + std::optional adapted_feedback = + adapter.ProcessCongestionControlFeedback(rtcp_feedback, TimeNow()); + EXPECT_FALSE(adapted_feedback->transport_supports_ecn); + ASSERT_THAT(adapted_feedback->packet_feedbacks, SizeIs(2)); +} + } // namespace webrtc diff --git a/modules/congestion_controller/rtp/transport_feedback_demuxer.cc b/modules/congestion_controller/rtp/transport_feedback_demuxer.cc index 5a6a2e1e9b..1c29108ffb 100644 --- a/modules/congestion_controller/rtp/transport_feedback_demuxer.cc +++ b/modules/congestion_controller/rtp/transport_feedback_demuxer.cc @@ -9,8 +9,17 @@ */ #include "modules/congestion_controller/rtp/transport_feedback_demuxer.h" +#include +#include +#include +#include + #include "absl/algorithm/container.h" +#include "api/sequence_checker.h" +#include "api/units/time_delta.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" +#include "rtc_base/checks.h" namespace webrtc { namespace { diff --git a/modules/congestion_controller/rtp/transport_feedback_demuxer.h b/modules/congestion_controller/rtp/transport_feedback_demuxer.h index 278c144b61..80661176d3 100644 --- a/modules/congestion_controller/rtp/transport_feedback_demuxer.h +++ b/modules/congestion_controller/rtp/transport_feedback_demuxer.h @@ -10,15 +10,16 @@ #ifndef MODULES_CONGESTION_CONTROLLER_RTP_TRANSPORT_FEEDBACK_DEMUXER_H_ #define MODULES_CONGESTION_CONTROLLER_RTP_TRANSPORT_FEEDBACK_DEMUXER_H_ +#include #include #include #include #include "api/sequence_checker.h" -#include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" #include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { diff --git a/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc b/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc index 52d8018bff..79f25a9879 100644 --- a/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc +++ b/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc @@ -9,6 +9,11 @@ */ #include "modules/congestion_controller/rtp/transport_feedback_demuxer.h" +#include +#include + +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" #include "test/gmock.h" #include "test/gtest.h" @@ -53,12 +58,13 @@ TEST(TransportFeedbackDemuxerTest, ObserverSanity) { const uint16_t kRtpStartSeq = 55; const int64_t kTransportStartSeq = 1; demuxer.AddPacket(CreatePacket(kSsrc, kRtpStartSeq, kTransportStartSeq, - /*is_retransmit=*/false)); + /*is_retransmission=*/false)); demuxer.AddPacket(CreatePacket(kSsrc, kRtpStartSeq + 1, kTransportStartSeq + 1, - /*is_retransmit=*/false)); - demuxer.AddPacket(CreatePacket( - kSsrc, kRtpStartSeq + 2, kTransportStartSeq + 2, /*is_retransmit=*/true)); + /*is_retransmission=*/false)); + demuxer.AddPacket(CreatePacket(kSsrc, kRtpStartSeq + 2, + kTransportStartSeq + 2, + /*is_retransmission=*/true)); rtcp::TransportFeedback feedback; feedback.SetBase(kTransportStartSeq, Timestamp::Millis(1)); diff --git a/modules/desktop_capture/BUILD.gn b/modules/desktop_capture/BUILD.gn index fcf1836930..4b004c6527 100644 --- a/modules/desktop_capture/BUILD.gn +++ b/modules/desktop_capture/BUILD.gn @@ -64,10 +64,10 @@ if (rtc_include_tests) { ":desktop_capture_mock", ":primitives", ":screen_drawer", - "../../rtc_base/third_party/base64", + "../../api:array_view", + "../../rtc_base:base64", "../../system_wrappers", "../../test:test_support", - "../../test:video_test_support", ] sources += [ "screen_capturer_integration_test.cc", @@ -236,8 +236,8 @@ if (rtc_include_tests) { "../../rtc_base:checks", "../../rtc_base:logging", "../../system_wrappers", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] if (is_posix || is_fuchsia) { sources += [ @@ -277,7 +277,11 @@ if ((is_linux || is_chromeos) && rtc_use_pipewire) { rtc_library("desktop_capture") { visibility = [ "*" ] defines = [] - deps = [] + deps = [ + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", + ] public_configs = [ ":x11_config" ] sources = [ "blank_detector_desktop_capturer_wrapper.cc", @@ -405,8 +409,11 @@ rtc_library("desktop_capture") { "Xdamage", "Xext", "Xfixes", - "Xrender", + + # Xrandr depends on Xrender and needs to be listed before its dependency. "Xrandr", + + "Xrender", "Xtst", ] } @@ -453,8 +460,7 @@ rtc_library("desktop_capture") { "../../rtc_base:divide_round", "//third_party/fuchsia-sdk/sdk/fidl/fuchsia.sysmem", "//third_party/fuchsia-sdk/sdk/fidl/fuchsia.ui.composition", - "//third_party/fuchsia-sdk/sdk/fidl/fuchsia.ui.scenic", - "//third_party/fuchsia-sdk/sdk/pkg/scenic_cpp", + "//third_party/fuchsia-sdk/sdk/fidl/fuchsia.ui.display.singleton", "//third_party/fuchsia-sdk/sdk/pkg/sys_cpp", ] } @@ -529,12 +535,6 @@ rtc_library("desktop_capture") { ] } - absl_deps = [ - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] - if (rtc_use_x11_extensions) { deps += [ "../../rtc_base:sanitizer" ] } @@ -628,8 +628,12 @@ if (is_mac) { "mac/desktop_frame_iosurface.mm", "mac/desktop_frame_provider.h", "mac/desktop_frame_provider.mm", + "mac/sck_picker_handle.h", + "mac/sck_picker_handle.mm", "mac/screen_capturer_mac.h", "mac/screen_capturer_mac.mm", + "mac/screen_capturer_sck.h", + "mac/screen_capturer_sck.mm", "mac/window_list_utils.h", "mouse_cursor.h", "mouse_cursor_monitor.h", @@ -652,16 +656,21 @@ if (is_mac) { "../../rtc_base:event_tracer", "../../rtc_base:logging", "../../rtc_base:macromagic", + "../../rtc_base:safe_conversions", "../../rtc_base:timeutils", "../../rtc_base/synchronization:mutex", "../../rtc_base/system:rtc_export", "../../sdk:helpers_objc", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/strings:str_format", ] frameworks = [ "AppKit.framework", + "CoreVideo.framework", "IOKit.framework", "IOSurface.framework", ] + weak_frameworks = [ "ScreenCaptureKit.framework" ] # macOS 12.3 } } diff --git a/modules/desktop_capture/DEPS b/modules/desktop_capture/DEPS index 8c894c4430..d1d41b5ef0 100644 --- a/modules/desktop_capture/DEPS +++ b/modules/desktop_capture/DEPS @@ -16,4 +16,8 @@ specific_include_rules = { "screen_capturer_mac\.mm": [ "+sdk/objc", ], + "screen_capturer_sck\.mm": [ + "+absl/strings/str_format.h", + "+sdk/objc", + ], } diff --git a/modules/desktop_capture/cropping_window_capturer_win.cc b/modules/desktop_capture/cropping_window_capturer_win.cc index ab2f807d33..8376f1110a 100644 --- a/modules/desktop_capture/cropping_window_capturer_win.cc +++ b/modules/desktop_capture/cropping_window_capturer_win.cc @@ -152,7 +152,7 @@ class CroppingWindowCapturerWin : public CroppingWindowCapturer { bool enumerate_current_process_windows_; - rtc::scoped_refptr full_screen_window_detector_; + webrtc::scoped_refptr full_screen_window_detector_; // Used to make sure that we only log the usage of fullscreen detection once. mutable bool fullscreen_usage_logged_ = false; @@ -199,7 +199,7 @@ void CroppingWindowCapturerWin::CaptureFrame() { } bool CroppingWindowCapturerWin::ShouldUseScreenCapturer() { - if (rtc::rtc_win::GetVersion() < rtc::rtc_win::Version::VERSION_WIN8 && + if (webrtc::rtc_win::GetVersion() < webrtc::rtc_win::Version::VERSION_WIN8 && window_capture_helper_.IsAeroEnabled()) { return false; } diff --git a/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc b/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc index 179e002bc5..45673722fe 100644 --- a/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc +++ b/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc @@ -121,7 +121,9 @@ class FakeScreenCapturer : public DesktopCapturer { next_frame_ = std::move(next_frame); } - bool IsOccluded(const DesktopVector& pos) override { return is_occluded_; } + bool IsOccluded(const DesktopVector& /* pos */) override { + return is_occluded_; + } void set_is_occluded(bool value) { is_occluded_ = value; } @@ -147,7 +149,9 @@ class FakeMouseMonitor : public MouseCursorMonitor { hotspot_ = hotspot; } - void Init(Callback* callback, Mode mode) override { callback_ = callback; } + void Init(Callback* callback, Mode /* mode */) override { + callback_ = callback; + } void Capture() override { if (changed_) { @@ -209,7 +213,7 @@ class DesktopAndCursorComposerTest : public ::testing::Test, } // DesktopCapturer::Callback interface - void OnCaptureResult(DesktopCapturer::Result result, + void OnCaptureResult(DesktopCapturer::Result /* result */, std::unique_ptr frame) override { frame_ = std::move(frame); } diff --git a/modules/desktop_capture/desktop_capture_options.cc b/modules/desktop_capture/desktop_capture_options.cc index 22c59ef4cc..8d28d2bacc 100644 --- a/modules/desktop_capture/desktop_capture_options.cc +++ b/modules/desktop_capture/desktop_capture_options.cc @@ -46,13 +46,13 @@ DesktopCaptureOptions DesktopCaptureOptions::CreateDefault() { #endif #if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) result.set_configuration_monitor( - rtc::make_ref_counted()); + webrtc::make_ref_counted()); result.set_full_screen_window_detector( - rtc::make_ref_counted( + webrtc::make_ref_counted( CreateFullScreenMacApplicationHandler)); #elif defined(WEBRTC_WIN) result.set_full_screen_window_detector( - rtc::make_ref_counted( + webrtc::make_ref_counted( CreateFullScreenWinApplicationHandler)); #endif return result; diff --git a/modules/desktop_capture/desktop_capture_options.h b/modules/desktop_capture/desktop_capture_options.h index ee823d296f..dca0c6386f 100644 --- a/modules/desktop_capture/desktop_capture_options.h +++ b/modules/desktop_capture/desktop_capture_options.h @@ -47,10 +47,8 @@ class RTC_EXPORT DesktopCaptureOptions { DesktopCaptureOptions& operator=(DesktopCaptureOptions&& options); #if defined(WEBRTC_USE_X11) - const rtc::scoped_refptr& x_display() const { - return x_display_; - } - void set_x_display(rtc::scoped_refptr x_display) { + const scoped_refptr& x_display() const { return x_display_; } + void set_x_display(scoped_refptr x_display) { x_display_ = x_display; } #endif @@ -59,27 +57,42 @@ class RTC_EXPORT DesktopCaptureOptions { // TODO(zijiehe): Remove both DesktopConfigurationMonitor and // FullScreenChromeWindowDetector out of DesktopCaptureOptions. It's not // reasonable for external consumers to set these two parameters. - const rtc::scoped_refptr& configuration_monitor() - const { + const webrtc::scoped_refptr& + configuration_monitor() const { return configuration_monitor_; } // If nullptr is set, ScreenCapturer won't work and WindowCapturer may return // inaccurate result from IsOccluded() function. void set_configuration_monitor( - rtc::scoped_refptr m) { + webrtc::scoped_refptr m) { configuration_monitor_ = m; } bool allow_iosurface() const { return allow_iosurface_; } void set_allow_iosurface(bool allow) { allow_iosurface_ = allow; } + + // If this flag is set, and the system supports it, ScreenCaptureKit will be + // used for desktop capture. + // TODO: crbug.com/327458809 - Force the use of SCK and ignore this flag in + // new versions of macOS that remove support for the CGDisplay-based APIs. + bool allow_sck_capturer() const { return allow_sck_capturer_; } + void set_allow_sck_capturer(bool allow) { allow_sck_capturer_ = allow; } + + // If ScreenCaptureKit is used for desktop capture and this flag is + // set, the ScreenCaptureKit backend will use SCContentSharingPicker for + // picking source. + bool allow_sck_system_picker() const { return allow_sck_system_picker_; } + void set_allow_sck_system_picker(bool allow) { + allow_sck_system_picker_ = allow; + } #endif - const rtc::scoped_refptr& - full_screen_window_detector() const { + const scoped_refptr& full_screen_window_detector() + const { return full_screen_window_detector_; } void set_full_screen_window_detector( - rtc::scoped_refptr detector) { + scoped_refptr detector) { full_screen_window_detector_ = detector; } @@ -193,6 +206,12 @@ class RTC_EXPORT DesktopCaptureOptions { // The flag has no effect if the allow_wgc_capturer flag is false. bool allow_wgc_zero_hertz() const { return allow_wgc_zero_hertz_; } void set_allow_wgc_zero_hertz(bool allow) { allow_wgc_zero_hertz_ = allow; } + + // This flag controls whether the WGC capturer is required to draw a border + // around the captured window/screen. + // The flag has no effect if the allow_wgc_capturer flag is false. + bool wgc_require_border() const { return wgc_require_border_; } + void set_wgc_require_border(bool require) { wgc_require_border_ = require; } #endif // defined(RTC_ENABLE_WIN_WGC) #endif // defined(WEBRTC_WIN) @@ -200,11 +219,12 @@ class RTC_EXPORT DesktopCaptureOptions { bool allow_pipewire() const { return allow_pipewire_; } void set_allow_pipewire(bool allow) { allow_pipewire_ = allow; } - const rtc::scoped_refptr& screencast_stream() const { + const webrtc::scoped_refptr& screencast_stream() + const { return screencast_stream_; } void set_screencast_stream( - rtc::scoped_refptr stream) { + webrtc::scoped_refptr stream) { screencast_stream_ = stream; } @@ -224,20 +244,22 @@ class RTC_EXPORT DesktopCaptureOptions { private: #if defined(WEBRTC_USE_X11) - rtc::scoped_refptr x_display_; + scoped_refptr x_display_; #endif #if defined(WEBRTC_USE_PIPEWIRE) // An instance of shared PipeWire ScreenCast stream we share between // BaseCapturerPipeWire and MouseCursorMonitorPipeWire as cursor information // is sent together with screen content. - rtc::scoped_refptr screencast_stream_; + webrtc::scoped_refptr screencast_stream_; #endif #if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) - rtc::scoped_refptr configuration_monitor_; + webrtc::scoped_refptr configuration_monitor_; bool allow_iosurface_ = false; + bool allow_sck_capturer_ = false; + bool allow_sck_system_picker_ = false; #endif - rtc::scoped_refptr full_screen_window_detector_; + scoped_refptr full_screen_window_detector_; #if defined(WEBRTC_WIN) bool enumerate_current_process_windows_ = true; @@ -248,6 +270,7 @@ class RTC_EXPORT DesktopCaptureOptions { bool allow_wgc_window_capturer_ = false; bool allow_wgc_capturer_fallback_ = false; bool allow_wgc_zero_hertz_ = false; + bool wgc_require_border_ = false; #endif #endif #if defined(WEBRTC_USE_X11) diff --git a/modules/desktop_capture/desktop_capturer.cc b/modules/desktop_capture/desktop_capturer.cc index a52a76c262..94605f409c 100644 --- a/modules/desktop_capture/desktop_capturer.cc +++ b/modules/desktop_capture/desktop_capturer.cc @@ -30,6 +30,10 @@ #include "modules/desktop_capture/linux/wayland/base_capturer_pipewire.h" #endif +#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) +#include "modules/desktop_capture/mac/screen_capturer_sck.h" +#endif + namespace webrtc { void LogDesktopCapturerFullscreenDetectorUsage() { @@ -45,15 +49,15 @@ DesktopCapturer::GetDelegatedSourceListController() { } void DesktopCapturer::SetSharedMemoryFactory( - std::unique_ptr shared_memory_factory) {} + std::unique_ptr /* shared_memory_factory */) {} -void DesktopCapturer::SetExcludedWindow(WindowId window) {} +void DesktopCapturer::SetExcludedWindow(WindowId /* window */) {} -bool DesktopCapturer::GetSourceList(SourceList* sources) { +bool DesktopCapturer::GetSourceList(SourceList* /* sources */) { return true; } -bool DesktopCapturer::SelectSource(SourceId id) { +bool DesktopCapturer::SelectSource(SourceId /* id */) { return false; } @@ -61,7 +65,7 @@ bool DesktopCapturer::FocusOnSelectedSource() { return false; } -bool DesktopCapturer::IsOccluded(const DesktopVector& pos) { +bool DesktopCapturer::IsOccluded(const DesktopVector& /* pos */) { return false; } @@ -109,7 +113,7 @@ std::unique_ptr DesktopCapturer::CreateScreenCapturer( // static std::unique_ptr DesktopCapturer::CreateGenericCapturer( - const DesktopCaptureOptions& options) { + [[maybe_unused]] const DesktopCaptureOptions& options) { std::unique_ptr capturer; #if defined(WEBRTC_USE_PIPEWIRE) @@ -117,11 +121,13 @@ std::unique_ptr DesktopCapturer::CreateGenericCapturer( capturer = std::make_unique( options, CaptureType::kAnyScreenContent); } +#elif defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) + capturer = CreateGenericCapturerSck(options); +#endif if (capturer && options.detect_updated_region()) { capturer.reset(new DesktopCapturerDifferWrapper(std::move(capturer))); } -#endif // defined(WEBRTC_USE_PIPEWIRE) return capturer; } diff --git a/modules/desktop_capture/desktop_capturer.h b/modules/desktop_capture/desktop_capturer.h index 9c7ecc78f4..bb0742a410 100644 --- a/modules/desktop_capture/desktop_capturer.h +++ b/modules/desktop_capture/desktop_capturer.h @@ -109,7 +109,7 @@ class RTC_EXPORT DesktopCapturer { // supported by all capturers. This will only affect the frequency at which // new frames are available, not the frequency at which you are allowed to // capture the frames. - virtual void SetMaxFrameRate(uint32_t max_frame_rate) {} + virtual void SetMaxFrameRate(uint32_t /* max_frame_rate */) {} // Returns a valid pointer if the capturer requires the user to make a // selection from a source list provided by the capturer. diff --git a/modules/desktop_capture/desktop_capturer_differ_wrapper.cc b/modules/desktop_capture/desktop_capturer_differ_wrapper.cc index 77543e4060..6656fdda2f 100644 --- a/modules/desktop_capture/desktop_capturer_differ_wrapper.cc +++ b/modules/desktop_capture/desktop_capturer_differ_wrapper.cc @@ -195,7 +195,7 @@ DesktopCaptureMetadata DesktopCapturerDifferWrapper::GetMetadata() { void DesktopCapturerDifferWrapper::OnCaptureResult( Result result, std::unique_ptr input_frame) { - int64_t start_time_nanos = rtc::TimeNanos(); + int64_t start_time_nanos = TimeNanos(); if (!input_frame) { callback_->OnCaptureResult(result, nullptr); return; @@ -224,8 +224,8 @@ void DesktopCapturerDifferWrapper::OnCaptureResult( last_frame_ = frame->Share(); frame->set_capture_time_ms(frame->capture_time_ms() + - (rtc::TimeNanos() - start_time_nanos) / - rtc::kNumNanosecsPerMillisec); + (TimeNanos() - start_time_nanos) / + kNumNanosecsPerMillisec); callback_->OnCaptureResult(result, std::move(frame)); } diff --git a/modules/desktop_capture/desktop_capturer_differ_wrapper_unittest.cc b/modules/desktop_capture/desktop_capturer_differ_wrapper_unittest.cc index 9ccef3cc10..dba6c3041f 100644 --- a/modules/desktop_capture/desktop_capturer_differ_wrapper_unittest.cc +++ b/modules/desktop_capture/desktop_capturer_differ_wrapper_unittest.cc @@ -199,7 +199,7 @@ void ExecuteDifferWrapperTest(bool with_hints, frame_generator.size()->height())}, check_result, updated_region_should_exactly_match); - Random random(rtc::TimeMillis()); + Random random(TimeMillis()); // Fuzzing tests. for (int i = 0; i < 1000; i++) { if (enlarge_updated_region) { @@ -258,34 +258,34 @@ TEST(DesktopCapturerDifferWrapperTest, CaptureWithEnlargedAndRandomHints) { // [ RUN ] DISABLED_CaptureWithEnlargedAndRandomHintsPerf // [ OK ] DISABLED_CaptureWithEnlargedAndRandomHintsPerf (6347 ms) TEST(DesktopCapturerDifferWrapperTest, DISABLED_CaptureWithoutHintsPerf) { - int64_t started = rtc::TimeMillis(); + int64_t started = TimeMillis(); ExecuteDifferWrapperTest(false, false, false, false); - ASSERT_LE(rtc::TimeMillis() - started, 15000); + ASSERT_LE(TimeMillis() - started, 15000); } TEST(DesktopCapturerDifferWrapperTest, DISABLED_CaptureWithHintsPerf) { - int64_t started = rtc::TimeMillis(); + int64_t started = TimeMillis(); ExecuteDifferWrapperTest(true, false, false, false); - ASSERT_LE(rtc::TimeMillis() - started, 15000); + ASSERT_LE(TimeMillis() - started, 15000); } TEST(DesktopCapturerDifferWrapperTest, DISABLED_CaptureWithEnlargedHintsPerf) { - int64_t started = rtc::TimeMillis(); + int64_t started = TimeMillis(); ExecuteDifferWrapperTest(true, true, false, false); - ASSERT_LE(rtc::TimeMillis() - started, 15000); + ASSERT_LE(TimeMillis() - started, 15000); } TEST(DesktopCapturerDifferWrapperTest, DISABLED_CaptureWithRandomHintsPerf) { - int64_t started = rtc::TimeMillis(); + int64_t started = TimeMillis(); ExecuteDifferWrapperTest(true, false, true, false); - ASSERT_LE(rtc::TimeMillis() - started, 15000); + ASSERT_LE(TimeMillis() - started, 15000); } TEST(DesktopCapturerDifferWrapperTest, DISABLED_CaptureWithEnlargedAndRandomHintsPerf) { - int64_t started = rtc::TimeMillis(); + int64_t started = TimeMillis(); ExecuteDifferWrapperTest(true, true, true, false); - ASSERT_LE(rtc::TimeMillis() - started, 15000); + ASSERT_LE(TimeMillis() - started, 15000); } } // namespace webrtc diff --git a/modules/desktop_capture/desktop_frame.cc b/modules/desktop_capture/desktop_frame.cc index c2c8688990..00316e3fdd 100644 --- a/modules/desktop_capture/desktop_frame.cc +++ b/modules/desktop_capture/desktop_frame.cc @@ -138,6 +138,7 @@ void DesktopFrame::CopyFrameInfoFrom(const DesktopFrame& other) { set_top_left(other.top_left()); set_icc_profile(other.icc_profile()); set_may_contain_cursor(other.may_contain_cursor()); + set_device_scale_factor(other.device_scale_factor()); } void DesktopFrame::MoveFrameInfoFrom(DesktopFrame* other) { @@ -148,6 +149,7 @@ void DesktopFrame::MoveFrameInfoFrom(DesktopFrame* other) { set_top_left(other->top_left()); set_icc_profile(other->icc_profile()); set_may_contain_cursor(other->may_contain_cursor()); + set_device_scale_factor(other->device_scale_factor()); } bool DesktopFrame::FrameDataIsBlack() const { diff --git a/modules/desktop_capture/desktop_frame.h b/modules/desktop_capture/desktop_frame.h index 35ac8e2475..141ac92af7 100644 --- a/modules/desktop_capture/desktop_frame.h +++ b/modules/desktop_capture/desktop_frame.h @@ -14,6 +14,7 @@ #include #include +#include #include #include "modules/desktop_capture/desktop_geometry.h" @@ -74,6 +75,12 @@ class RTC_EXPORT DesktopFrame { const DesktopVector& dpi() const { return dpi_; } void set_dpi(const DesktopVector& dpi) { dpi_ = dpi; } + std::optional device_scale_factor() const { + return device_scale_factor_; + } + void set_device_scale_factor(std::optional device_scale_factor) { + device_scale_factor_ = device_scale_factor; + } // Indicates if this frame may have the mouse cursor in it. Capturers that // support cursor capture may set this to true. If the cursor was // outside of the captured area, this may be true even though the cursor is @@ -172,6 +179,10 @@ class RTC_EXPORT DesktopFrame { int64_t capture_time_ms_; uint32_t capturer_id_; std::vector icc_profile_; + // Currently only used on Windows. It stores the device scale factor of the + // captured surface and has distinct values possible in the range of + // [1,5]. + std::optional device_scale_factor_; }; // A DesktopFrame that stores data in the heap. diff --git a/modules/desktop_capture/desktop_frame_generator.cc b/modules/desktop_capture/desktop_frame_generator.cc index b5dfc28e46..7ec96c5126 100644 --- a/modules/desktop_capture/desktop_frame_generator.cc +++ b/modules/desktop_capture/desktop_frame_generator.cc @@ -35,7 +35,7 @@ void SetUpdatedRegion(DesktopFrame* frame, int enlarge_range, bool add_random_updated_region) { const DesktopRect screen_rect = DesktopRect::MakeSize(frame->size()); - Random random(rtc::TimeMicros()); + Random random(TimeMicros()); frame->mutable_updated_region()->Clear(); for (DesktopRegion::Iterator it(updated_region); !it.IsAtEnd(); it.Advance()) { diff --git a/modules/desktop_capture/desktop_frame_unittest.cc b/modules/desktop_capture/desktop_frame_unittest.cc index e690e6ae5b..77945f6be9 100644 --- a/modules/desktop_capture/desktop_frame_unittest.cc +++ b/modules/desktop_capture/desktop_frame_unittest.cc @@ -97,6 +97,19 @@ TEST(DesktopFrameTest, EmptyFrameIsNotBlack) { EXPECT_FALSE(frame->FrameDataIsBlack()); } +TEST(DesktopFrameTest, FrameHasDefaultDeviceScaleFactor) { + auto frame = std::make_unique(DesktopSize()); + EXPECT_EQ(frame->device_scale_factor(), std::nullopt); +} + +TEST(DesktopFrameTest, FrameSetsDeviceScaleFactorCorrectly) { + auto frame = std::make_unique(DesktopSize()); + EXPECT_EQ(frame->device_scale_factor(), std::nullopt); + float device_scale_factor = 1.5f; + frame->set_device_scale_factor(device_scale_factor); + EXPECT_EQ(frame->device_scale_factor(), device_scale_factor); +} + TEST(DesktopFrameTest, FrameDataSwitchesBetweenNonBlackAndBlack) { auto frame = CreateTestFrame(DesktopRect::MakeXYWH(0, 0, 10, 10), 0xff); EXPECT_FALSE(frame->FrameDataIsBlack()); @@ -234,7 +247,6 @@ TEST(DesktopFrameTest, CopyIntersectingPixelsFullyContainedRectsScaled) { RunTests(tests, arraysize(tests)); } - TEST(DesktopFrameTest, CopyIntersectingPixelsPartiallyContainedRects) { // clang-format off const TestData tests[] = { @@ -299,7 +311,6 @@ TEST(DesktopFrameTest, CopyIntersectingPixelsPartiallyContainedRectsScaled) { RunTests(tests, arraysize(tests)); } - TEST(DesktopFrameTest, CopyIntersectingPixelsUncontainedRects) { // clang-format off const TestData tests[] = { diff --git a/modules/desktop_capture/full_screen_application_handler.h b/modules/desktop_capture/full_screen_application_handler.h index b7e097a474..f816dd416c 100644 --- a/modules/desktop_capture/full_screen_application_handler.h +++ b/modules/desktop_capture/full_screen_application_handler.h @@ -41,7 +41,20 @@ class FullScreenApplicationHandler { // FullScreenApplicationHandler DesktopCapturer::SourceId GetSourceId() const; + void SetUseHeuristicFullscreenPowerPointWindows( + bool use_heuristic_fullscreen_powerpoint_windows) { + use_heuristic_fullscreen_powerpoint_windows_ = + use_heuristic_fullscreen_powerpoint_windows; + } + + bool UseHeuristicFullscreenPowerPointWindows() const { + return use_heuristic_fullscreen_powerpoint_windows_; + } + private: + // `use_heuristic_fullscreen_powerpoint_windows_` is used to implement a + // killswitch. + bool use_heuristic_fullscreen_powerpoint_windows_ = true; const DesktopCapturer::SourceId source_id_; }; diff --git a/modules/desktop_capture/full_screen_window_detector.cc b/modules/desktop_capture/full_screen_window_detector.cc index 956a0b4663..b4eafc1771 100644 --- a/modules/desktop_capture/full_screen_window_detector.cc +++ b/modules/desktop_capture/full_screen_window_detector.cc @@ -33,7 +33,7 @@ DesktopCapturer::SourceId FullScreenWindowDetector::FindFullScreenWindow( void FullScreenWindowDetector::UpdateWindowListIfNeeded( DesktopCapturer::SourceId original_source_id, - rtc::FunctionView get_sources) { + FunctionView get_sources) { const bool skip_update = previous_source_id_ != original_source_id; previous_source_id_ = original_source_id; @@ -54,13 +54,13 @@ void FullScreenWindowDetector::UpdateWindowListIfNeeded( constexpr int64_t kUpdateIntervalMs = 500; - if ((rtc::TimeMillis() - last_update_time_ms_) <= kUpdateIntervalMs) { + if ((TimeMillis() - last_update_time_ms_) <= kUpdateIntervalMs) { return; } DesktopCapturer::SourceList window_list; if (get_sources(&window_list)) { - last_update_time_ms_ = rtc::TimeMillis(); + last_update_time_ms_ = TimeMillis(); window_list_.swap(window_list); } } @@ -79,6 +79,9 @@ void FullScreenWindowDetector::CreateApplicationHandlerIfNeeded( if (app_handler_ == nullptr) { no_handler_source_id_ = source_id; + } else { + app_handler_->SetUseHeuristicFullscreenPowerPointWindows( + use_heuristic_fullscreen_powerpoint_windows_); } } diff --git a/modules/desktop_capture/full_screen_window_detector.h b/modules/desktop_capture/full_screen_window_detector.h index 998b720d90..ad50c2ce47 100644 --- a/modules/desktop_capture/full_screen_window_detector.h +++ b/modules/desktop_capture/full_screen_window_detector.h @@ -33,7 +33,7 @@ namespace webrtc { // FullScreenApplicationHandler. class FullScreenWindowDetector - : public rtc::RefCountedNonVirtual { + : public RefCountedNonVirtual { public: using ApplicationHandlerFactory = std::function( @@ -55,10 +55,19 @@ class FullScreenWindowDetector // update internal state no often than twice per second void UpdateWindowListIfNeeded( DesktopCapturer::SourceId original_source_id, - rtc::FunctionView get_sources); + FunctionView get_sources); - static rtc::scoped_refptr + static scoped_refptr CreateFullScreenWindowDetector(); + void SetUseHeuristicFullscreenPowerPointWindows( + bool use_heuristic_fullscreen_powerpoint_windows) { + use_heuristic_fullscreen_powerpoint_windows_ = + use_heuristic_fullscreen_powerpoint_windows; + if (app_handler_) { + app_handler_->SetUseHeuristicFullscreenPowerPointWindows( + use_heuristic_fullscreen_powerpoint_windows); + } + } protected: std::unique_ptr app_handler_; @@ -67,6 +76,12 @@ class FullScreenWindowDetector void CreateApplicationHandlerIfNeeded(DesktopCapturer::SourceId source_id); ApplicationHandlerFactory application_handler_factory_; + // `use_heuristic_fullscreen_powerpoint_windows_` controls if we create the + // FullScreenPowerPointHandler class or not. + // TODO(crbug.com/409473386): Remove + // `use_heuristic_fullscreen_powerpoint_windows_` once the feature is + // available in stable for some milestones. + bool use_heuristic_fullscreen_powerpoint_windows_ = true; int64_t last_update_time_ms_; DesktopCapturer::SourceId previous_source_id_; diff --git a/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc b/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc index 4ef00e68ab..7228063401 100644 --- a/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc +++ b/modules/desktop_capture/linux/wayland/base_capturer_pipewire.cc @@ -82,8 +82,10 @@ void BaseCapturerPipeWire::OnScreenCastRequestResult(RequestResponse result, << static_cast(result); } else if (ScreenCastPortal* screencast_portal = GetScreenCastPortal()) { if (!screencast_portal->RestoreToken().empty()) { + const SourceId token_id = + selected_source_id_ ? selected_source_id_ : source_id_; RestoreTokenManager::GetInstance().AddToken( - source_id_, screencast_portal->RestoreToken()); + token_id, screencast_portal->RestoreToken()); } } @@ -110,6 +112,7 @@ void BaseCapturerPipeWire::OnScreenCastSessionClosed() { if (!capturer_failed_) { options_.screencast_stream()->StopScreenCastStream(); } + capturer_failed_ = true; } void BaseCapturerPipeWire::UpdateResolution(uint32_t width, uint32_t height) { @@ -137,7 +140,7 @@ void BaseCapturerPipeWire::Start(Callback* callback) { ScreenCastPortal::PersistMode::kTransient); if (selected_source_id_) { screencast_portal->SetRestoreToken( - RestoreTokenManager::GetInstance().TakeToken(selected_source_id_)); + RestoreTokenManager::GetInstance().GetToken(selected_source_id_)); } } @@ -155,7 +158,7 @@ void BaseCapturerPipeWire::CaptureFrame() { return; } - int64_t capture_start_time_nanos = rtc::TimeNanos(); + int64_t capture_start_time_nanos = webrtc::TimeNanos(); std::unique_ptr frame = options_.screencast_stream()->CaptureFrame(); @@ -168,8 +171,8 @@ void BaseCapturerPipeWire::CaptureFrame() { // the frame, see ScreenCapturerX11::CaptureFrame. frame->set_capturer_id(DesktopCapturerId::kWaylandCapturerLinux); - frame->set_capture_time_ms((rtc::TimeNanos() - capture_start_time_nanos) / - rtc::kNumNanosecsPerMillisec); + frame->set_capture_time_ms((webrtc::TimeNanos() - capture_start_time_nanos) / + webrtc::kNumNanosecsPerMillisec); callback_->OnCaptureResult(Result::SUCCESS, std::move(frame)); } diff --git a/modules/desktop_capture/linux/wayland/egl_dmabuf.cc b/modules/desktop_capture/linux/wayland/egl_dmabuf.cc index b529077c6d..c67f126fad 100644 --- a/modules/desktop_capture/linux/wayland/egl_dmabuf.cc +++ b/modules/desktop_capture/linux/wayland/egl_dmabuf.cc @@ -10,6 +10,7 @@ #include "modules/desktop_capture/linux/wayland/egl_dmabuf.h" +#include #include #include #include @@ -19,8 +20,9 @@ #include #include +#include + #include "absl/memory/memory.h" -#include "absl/types/optional.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/sanitizer.h" @@ -369,7 +371,7 @@ EglDmaBuf::EglDmaBuf() { RTC_LOG(LS_ERROR) << "Failed to obtain default EGL display: " << FormatEGLError(EglGetError()) << "\n" << "Defaulting to using first available render node"; - absl::optional render_node = GetRenderNode(); + std::optional render_node = GetRenderNode(); if (!render_node) { return; } @@ -496,7 +498,7 @@ bool EglDmaBuf::GetClientExtensions(EGLDisplay dpy, EGLint name) { } std::vector client_extensions = - rtc::split(client_extensions_cstring, ' '); + webrtc::split(client_extensions_cstring, ' '); for (const auto& extension : client_extensions) { egl_.extensions.push_back(std::string(extension)); } @@ -724,19 +726,19 @@ std::vector EglDmaBuf::QueryDmaBufModifiers(uint32_t format) { return modifiers; } -absl::optional EglDmaBuf::GetRenderNode() { +std::optional EglDmaBuf::GetRenderNode() { int max_devices = drmGetDevices2(0, nullptr, 0); if (max_devices <= 0) { RTC_LOG(LS_ERROR) << "drmGetDevices2() has not found any devices (errno=" << -max_devices << ")"; - return absl::nullopt; + return std::nullopt; } std::vector devices(max_devices); int ret = drmGetDevices2(0, devices.data(), max_devices); if (ret < 0) { RTC_LOG(LS_ERROR) << "drmGetDevices2() returned an error " << ret; - return absl::nullopt; + return std::nullopt; } std::string render_node; diff --git a/modules/desktop_capture/linux/wayland/egl_dmabuf.h b/modules/desktop_capture/linux/wayland/egl_dmabuf.h index 22a8f5ab52..08df16bc63 100644 --- a/modules/desktop_capture/linux/wayland/egl_dmabuf.h +++ b/modules/desktop_capture/linux/wayland/egl_dmabuf.h @@ -11,15 +11,15 @@ #ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_EGL_DMABUF_H_ #define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_EGL_DMABUF_H_ -#include -#include +#include +#include #include #include +#include #include #include -#include "absl/types/optional.h" #include "modules/desktop_capture/desktop_geometry.h" namespace webrtc { @@ -66,7 +66,7 @@ class EglDmaBuf { GLuint texture_ = 0; EGLStruct egl_; - absl::optional GetRenderNode(); + std::optional GetRenderNode(); }; } // namespace webrtc diff --git a/modules/desktop_capture/linux/wayland/mouse_cursor_monitor_pipewire.cc b/modules/desktop_capture/linux/wayland/mouse_cursor_monitor_pipewire.cc index 3d33b0fbb8..21f550bea2 100644 --- a/modules/desktop_capture/linux/wayland/mouse_cursor_monitor_pipewire.cc +++ b/modules/desktop_capture/linux/wayland/mouse_cursor_monitor_pipewire.cc @@ -40,6 +40,14 @@ void MouseCursorMonitorPipeWire::Capture() { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK(callback_); + std::optional mouse_cursor_position = + options_.screencast_stream()->CaptureCursorPosition(); + // Invalid cursor or position + if (!mouse_cursor_position) { + callback_->OnMouseCursor(nullptr); + return; + } + std::unique_ptr mouse_cursor = options_.screencast_stream()->CaptureCursor(); @@ -48,11 +56,7 @@ void MouseCursorMonitorPipeWire::Capture() { } if (mode_ == SHAPE_AND_POSITION) { - absl::optional mouse_cursor_position = - options_.screencast_stream()->CaptureCursorPosition(); - if (mouse_cursor_position) { - callback_->OnMouseCursorPosition(mouse_cursor_position.value()); - } + callback_->OnMouseCursorPosition(mouse_cursor_position.value()); } } diff --git a/modules/desktop_capture/linux/wayland/restore_token_manager.cc b/modules/desktop_capture/linux/wayland/restore_token_manager.cc index 5ca9b957a9..a17d9a49bb 100644 --- a/modules/desktop_capture/linux/wayland/restore_token_manager.cc +++ b/modules/desktop_capture/linux/wayland/restore_token_manager.cc @@ -23,10 +23,8 @@ void RestoreTokenManager::AddToken(DesktopCapturer::SourceId id, restore_tokens_.insert({id, token}); } -std::string RestoreTokenManager::TakeToken(DesktopCapturer::SourceId id) { - std::string token = restore_tokens_[id]; - // Remove the token as it cannot be used anymore - restore_tokens_.erase(id); +std::string RestoreTokenManager::GetToken(DesktopCapturer::SourceId id) { + const std::string token = restore_tokens_[id]; return token; } diff --git a/modules/desktop_capture/linux/wayland/restore_token_manager.h b/modules/desktop_capture/linux/wayland/restore_token_manager.h index 174bef121f..ad4f74790f 100644 --- a/modules/desktop_capture/linux/wayland/restore_token_manager.h +++ b/modules/desktop_capture/linux/wayland/restore_token_manager.h @@ -27,7 +27,7 @@ class RestoreTokenManager { static RestoreTokenManager& GetInstance(); void AddToken(DesktopCapturer::SourceId id, const std::string& token); - std::string TakeToken(DesktopCapturer::SourceId id); + std::string GetToken(DesktopCapturer::SourceId id); // Returns a source ID which does not have any token associated with it yet. DesktopCapturer::SourceId GetUnusedId(); diff --git a/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc b/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc index 8177cfd211..e5eb267dcf 100644 --- a/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc +++ b/modules/desktop_capture/linux/wayland/screencast_stream_utils.cc @@ -28,15 +28,15 @@ namespace webrtc { PipeWireVersion PipeWireVersion::Parse(const absl::string_view& version) { - std::vector parsed_version = rtc::split(version, '.'); + std::vector parsed_version = webrtc::split(version, '.'); if (parsed_version.size() != 3) { return {}; } - absl::optional major = rtc::StringToNumber(parsed_version.at(0)); - absl::optional minor = rtc::StringToNumber(parsed_version.at(1)); - absl::optional micro = rtc::StringToNumber(parsed_version.at(2)); + std::optional major = webrtc::StringToNumber(parsed_version.at(0)); + std::optional minor = webrtc::StringToNumber(parsed_version.at(1)); + std::optional micro = webrtc::StringToNumber(parsed_version.at(2)); // Return invalid version if we failed to parse it if (!major || !minor || !micro) { diff --git a/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc b/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc index 0c26e7a7d5..a022de6c6a 100644 --- a/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc +++ b/modules/desktop_capture/linux/wayland/shared_screencast_stream.cc @@ -14,7 +14,6 @@ #include #include #include -#include #include @@ -42,33 +41,6 @@ constexpr int CursorMetaSize(int w, int h) { constexpr PipeWireVersion kDmaBufModifierMinVersion = {0, 3, 33}; constexpr PipeWireVersion kDropSingleModifierMinVersion = {0, 3, 40}; -class ScopedBuf { - public: - ScopedBuf() {} - ScopedBuf(uint8_t* map, int map_size, int fd) - : map_(map), map_size_(map_size), fd_(fd) {} - ~ScopedBuf() { - if (map_ != MAP_FAILED) { - munmap(map_, map_size_); - } - } - - explicit operator bool() { return map_ != MAP_FAILED; } - - void initialize(uint8_t* map, int map_size, int fd) { - map_ = map; - map_size_ = map_size; - fd_ = fd; - } - - uint8_t* get() { return map_; } - - protected: - uint8_t* map_ = static_cast(MAP_FAILED); - int map_size_; - int fd_; -}; - class SharedScreenCastStreamPrivate { public: SharedScreenCastStreamPrivate(); @@ -101,7 +73,7 @@ class SharedScreenCastStreamPrivate { // Track damage region updates that were reported since the last time // frame was captured - DesktopRegion damage_region_; + DesktopRegion damage_region_ RTC_GUARDED_BY(&latest_frame_lock_); uint32_t pw_stream_node_id_ = 0; @@ -111,6 +83,9 @@ class SharedScreenCastStreamPrivate { webrtc::Mutex queue_lock_; ScreenCaptureFrameQueue queue_ RTC_GUARDED_BY(&queue_lock_); + webrtc::Mutex latest_frame_lock_ RTC_ACQUIRED_AFTER(queue_lock_); + SharedDesktopFrame* latest_available_frame_ + RTC_GUARDED_BY(&latest_frame_lock_) = nullptr; std::unique_ptr mouse_cursor_; DesktopVector mouse_cursor_position_ = DesktopVector(-1, -1); @@ -165,7 +140,6 @@ class SharedScreenCastStreamPrivate { void ConvertRGBxToBGRx(uint8_t* frame, uint32_t size); void UpdateFrameUpdatedRegions(const spa_buffer* spa_buffer, DesktopFrame& frame); - void NotifyCallbackOfNewFrame(std::unique_ptr frame); // PipeWire callbacks static void OnCoreError(void* data, @@ -351,6 +325,19 @@ void SharedScreenCastStreamPrivate::OnStreamProcess(void* data) { return; } + struct spa_meta_header* header = + static_cast(spa_buffer_find_meta_data( + buffer->buffer, SPA_META_Header, sizeof(*header))); + if (header && (header->flags & SPA_META_HEADER_FLAG_CORRUPTED)) { + RTC_LOG(LS_INFO) << "Dropping corrupted buffer"; + if (that->observer_) { + that->observer_->OnBufferCorruptedMetadata(); + } + // Queue buffer for reuse; it will not be processed further. + pw_stream_queue_buffer(that->pw_stream_, buffer); + return; + } + that->ProcessBuffer(buffer); pw_stream_queue_buffer(that->pw_stream_, buffer); @@ -600,6 +587,10 @@ void SharedScreenCastStreamPrivate::StopAndCleanupStream() { webrtc::MutexLock lock(&queue_lock_); queue_.Reset(); } + { + webrtc::MutexLock latest_frame_lock(&latest_frame_lock_); + latest_available_frame_ = nullptr; + } } if (pw_core_) { @@ -618,13 +609,13 @@ void SharedScreenCastStreamPrivate::StopAndCleanupStream() { std::unique_ptr SharedScreenCastStreamPrivate::CaptureFrame() { - webrtc::MutexLock lock(&queue_lock_); + webrtc::MutexLock latest_frame_lock(&latest_frame_lock_); - if (!pw_stream_ || !queue_.current_frame()) { + if (!pw_stream_ || !latest_available_frame_) { return std::unique_ptr{}; } - std::unique_ptr frame = queue_.current_frame()->Share(); + std::unique_ptr frame = latest_available_frame_->Share(); if (use_damage_region_) { frame->mutable_updated_region()->Swap(&damage_region_); damage_region_.Clear(); @@ -648,6 +639,8 @@ DesktopVector SharedScreenCastStreamPrivate::CaptureCursorPosition() { void SharedScreenCastStreamPrivate::UpdateFrameUpdatedRegions( const spa_buffer* spa_buffer, DesktopFrame& frame) { + latest_frame_lock_.AssertHeld(); + if (!use_damage_region_) { frame.mutable_updated_region()->SetRect( DesktopRect::MakeSize(frame.size())); @@ -676,25 +669,9 @@ void SharedScreenCastStreamPrivate::UpdateFrameUpdatedRegions( } } -void SharedScreenCastStreamPrivate::NotifyCallbackOfNewFrame( - std::unique_ptr frame) { - if (!pw_stream_ || !frame->data()) { - callback_->OnCaptureResult(DesktopCapturer::Result::ERROR_TEMPORARY, - nullptr); - return; - } - - if (use_damage_region_) { - frame->mutable_updated_region()->Swap(&damage_region_); - damage_region_.Clear(); - } - callback_->OnCaptureResult(DesktopCapturer::Result::SUCCESS, - std::move(frame)); -} - RTC_NO_SANITIZE("cfi-icall") void SharedScreenCastStreamPrivate::ProcessBuffer(pw_buffer* buffer) { - int64_t capture_start_time_nanos = rtc::TimeNanos(); + int64_t capture_start_time_nanos = webrtc::TimeNanos(); if (callback_) { callback_->OnFrameCaptureStart(); } @@ -707,37 +684,56 @@ void SharedScreenCastStreamPrivate::ProcessBuffer(pw_buffer* buffer) { const struct spa_meta_cursor* cursor = static_cast(spa_buffer_find_meta_data( spa_buffer, SPA_META_Cursor, sizeof(*cursor))); - if (cursor && spa_meta_cursor_is_valid(cursor)) { - struct spa_meta_bitmap* bitmap = nullptr; - - if (cursor->bitmap_offset) - bitmap = - SPA_MEMBER(cursor, cursor->bitmap_offset, struct spa_meta_bitmap); - - if (bitmap && bitmap->size.width > 0 && bitmap->size.height > 0) { - const uint8_t* bitmap_data = - SPA_MEMBER(bitmap, bitmap->offset, uint8_t); - BasicDesktopFrame* mouse_frame = new BasicDesktopFrame( - DesktopSize(bitmap->size.width, bitmap->size.height)); - mouse_frame->CopyPixelsFrom( - bitmap_data, bitmap->stride, - DesktopRect::MakeWH(bitmap->size.width, bitmap->size.height)); - mouse_cursor_ = std::make_unique( - mouse_frame, DesktopVector(cursor->hotspot.x, cursor->hotspot.y)); + + if (cursor) { + if (spa_meta_cursor_is_valid(cursor)) { + struct spa_meta_bitmap* bitmap = nullptr; + + if (cursor->bitmap_offset) + bitmap = + SPA_MEMBER(cursor, cursor->bitmap_offset, struct spa_meta_bitmap); + + if (bitmap && bitmap->size.width > 0 && bitmap->size.height > 0) { + const uint8_t* bitmap_data = + SPA_MEMBER(bitmap, bitmap->offset, uint8_t); + BasicDesktopFrame* mouse_frame = new BasicDesktopFrame( + DesktopSize(bitmap->size.width, bitmap->size.height)); + mouse_frame->CopyPixelsFrom( + bitmap_data, bitmap->stride, + DesktopRect::MakeWH(bitmap->size.width, bitmap->size.height)); + mouse_cursor_ = std::make_unique( + mouse_frame, DesktopVector(cursor->hotspot.x, cursor->hotspot.y)); + + if (observer_) { + observer_->OnCursorShapeChanged(); + } + } + mouse_cursor_position_.set(cursor->position.x, cursor->position.y); if (observer_) { - observer_->OnCursorShapeChanged(); + observer_->OnCursorPositionChanged(); } + } else { + // Indicate an invalid cursor + mouse_cursor_position_.set(-1, -1); } - mouse_cursor_position_.set(cursor->position.x, cursor->position.y); + } + } - if (observer_) { - observer_->OnCursorPositionChanged(); - } + if (spa_buffer->datas[0].chunk->flags & SPA_CHUNK_FLAG_CORRUPTED) { + RTC_LOG(LS_INFO) << "Dropping buffer with corrupted or missing data"; + if (observer_) { + observer_->OnBufferCorruptedData(); } + return; } - if (spa_buffer->datas[0].chunk->size == 0) { + if (spa_buffer->datas[0].type == SPA_DATA_MemFd && + spa_buffer->datas[0].chunk->size == 0) { + RTC_LOG(LS_INFO) << "Dropping buffer with empty data"; + if (observer_) { + observer_->OnEmptyBuffer(); + } return; } @@ -844,6 +840,8 @@ void SharedScreenCastStreamPrivate::ProcessBuffer(pw_buffer* buffer) { if (observer_) { observer_->OnFailedToProcessBuffer(); } + webrtc::MutexLock latest_frame_lock(&latest_frame_lock_); + latest_available_frame_ = nullptr; return; } @@ -862,15 +860,33 @@ void SharedScreenCastStreamPrivate::ProcessBuffer(pw_buffer* buffer) { observer_->OnDesktopFrameChanged(); } - UpdateFrameUpdatedRegions(spa_buffer, *queue_.current_frame()); - queue_.current_frame()->set_may_contain_cursor(is_cursor_embedded_); + std::unique_ptr frame; + { + webrtc::MutexLock latest_frame_lock(&latest_frame_lock_); - if (callback_) { - std::unique_ptr frame = queue_.current_frame()->Share(); + UpdateFrameUpdatedRegions(spa_buffer, *queue_.current_frame()); + queue_.current_frame()->set_may_contain_cursor(is_cursor_embedded_); + + latest_available_frame_ = queue_.current_frame(); + + if (!callback_) { + return; + } + + frame = latest_available_frame_->Share(); frame->set_capturer_id(DesktopCapturerId::kWaylandCapturerLinux); - frame->set_capture_time_ms((rtc::TimeNanos() - capture_start_time_nanos) / - rtc::kNumNanosecsPerMillisec); - NotifyCallbackOfNewFrame(std::move(frame)); + frame->set_capture_time_ms( + (webrtc::TimeNanos() - capture_start_time_nanos) / + webrtc::kNumNanosecsPerMillisec); + if (use_damage_region_) { + frame->mutable_updated_region()->Swap(&damage_region_); + damage_region_.Clear(); + } + } + + if (callback_) { + callback_->OnCaptureResult(DesktopCapturer::Result::SUCCESS, + std::move(frame)); } } @@ -970,10 +986,10 @@ SharedScreenCastStream::SharedScreenCastStream() SharedScreenCastStream::~SharedScreenCastStream() {} -rtc::scoped_refptr +webrtc::scoped_refptr SharedScreenCastStream::CreateDefault() { // Explicit new, to access non-public constructor. - return rtc::scoped_refptr( + return webrtc::scoped_refptr( new SharedScreenCastStream()); } @@ -1023,12 +1039,12 @@ std::unique_ptr SharedScreenCastStream::CaptureCursor() { return private_->CaptureCursor(); } -absl::optional SharedScreenCastStream::CaptureCursorPosition() { +std::optional SharedScreenCastStream::CaptureCursorPosition() { DesktopVector position = private_->CaptureCursorPosition(); // Consider only (x >= 0 and y >= 0) a valid position if (position.x() < 0 || position.y() < 0) { - return absl::nullopt; + return std::nullopt; } return position; diff --git a/modules/desktop_capture/linux/wayland/shared_screencast_stream.h b/modules/desktop_capture/linux/wayland/shared_screencast_stream.h index f57e22cb69..0043f7af77 100644 --- a/modules/desktop_capture/linux/wayland/shared_screencast_stream.h +++ b/modules/desktop_capture/linux/wayland/shared_screencast_stream.h @@ -12,8 +12,8 @@ #define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SHARED_SCREENCAST_STREAM_H_ #include +#include -#include "absl/types/optional.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "modules/desktop_capture/desktop_capturer.h" @@ -27,7 +27,7 @@ namespace webrtc { class SharedScreenCastStreamPrivate; class RTC_EXPORT SharedScreenCastStream - : public rtc::RefCountedNonVirtual { + : public webrtc::RefCountedNonVirtual { public: class Observer { public: @@ -35,6 +35,9 @@ class RTC_EXPORT SharedScreenCastStream virtual void OnCursorShapeChanged() = 0; virtual void OnDesktopFrameChanged() = 0; virtual void OnFailedToProcessBuffer() = 0; + virtual void OnBufferCorruptedMetadata() = 0; + virtual void OnBufferCorruptedData() = 0; + virtual void OnEmptyBuffer() = 0; virtual void OnStreamConfigured() = 0; virtual void OnFrameRateChanged(uint32_t frame_rate) = 0; @@ -43,7 +46,7 @@ class RTC_EXPORT SharedScreenCastStream virtual ~Observer() = default; }; - static rtc::scoped_refptr CreateDefault(); + static webrtc::scoped_refptr CreateDefault(); bool StartScreenCastStream(uint32_t stream_node_id); bool StartScreenCastStream(uint32_t stream_node_id, @@ -78,7 +81,7 @@ class RTC_EXPORT SharedScreenCastStream // Returns the most recent mouse cursor position. Will not return a value in // case we didn't manage to get it from PipeWire buffer. - absl::optional CaptureCursorPosition(); + std::optional CaptureCursorPosition(); ~SharedScreenCastStream(); diff --git a/modules/desktop_capture/linux/wayland/shared_screencast_stream_unittest.cc b/modules/desktop_capture/linux/wayland/shared_screencast_stream_unittest.cc index 6a72edd025..4ae7b78d6e 100644 --- a/modules/desktop_capture/linux/wayland/shared_screencast_stream_unittest.cc +++ b/modules/desktop_capture/linux/wayland/shared_screencast_stream_unittest.cc @@ -55,6 +55,9 @@ class PipeWireStreamTest : public ::testing::Test, MOCK_METHOD(void, OnCursorShapeChanged, (), (override)); MOCK_METHOD(void, OnDesktopFrameChanged, (), (override)); MOCK_METHOD(void, OnFailedToProcessBuffer, (), (override)); + MOCK_METHOD(void, OnBufferCorruptedMetadata, (), (override)); + MOCK_METHOD(void, OnBufferCorruptedData, (), (override)); + MOCK_METHOD(void, OnEmptyBuffer, (), (override)); MOCK_METHOD(void, OnStreamConfigured, (), (override)); MOCK_METHOD(void, OnFrameRateChanged, (uint32_t), (override)); @@ -74,15 +77,15 @@ class PipeWireStreamTest : public ::testing::Test, bool streaming_ = false; std::unique_ptr test_screencast_stream_provider_; - rtc::scoped_refptr shared_screencast_stream_; + webrtc::scoped_refptr shared_screencast_stream_; }; TEST_F(PipeWireStreamTest, TestPipeWire) { // Set expectations for PipeWire to successfully connect both streams - rtc::Event waitConnectEvent; - rtc::Event waitStartStreamingEvent; - rtc::Event waitStreamParamChangedEvent1; - rtc::Event waitStreamParamChangedEvent2; + webrtc::Event waitConnectEvent; + webrtc::Event waitStartStreamingEvent; + webrtc::Event waitStreamParamChangedEvent1; + webrtc::Event waitStreamParamChangedEvent2; EXPECT_CALL(*this, OnStreamReady(_)) .WillOnce(Invoke(this, &PipeWireStreamTest::StartScreenCastStream)); @@ -102,9 +105,10 @@ TEST_F(PipeWireStreamTest, TestPipeWire) { // Wait until we start streaming waitStartStreamingEvent.Wait(kShortWait); - rtc::Event frameRetrievedEvent; - EXPECT_CALL(*this, OnFrameRecorded).Times(3); + webrtc::Event frameRetrievedEvent; + EXPECT_CALL(*this, OnFrameRecorded).Times(6); EXPECT_CALL(*this, OnDesktopFrameChanged) + .Times(3) .WillRepeatedly([&frameRetrievedEvent] { frameRetrievedEvent.Set(); }); // Record a frame in FakePipeWireStream @@ -143,7 +147,7 @@ TEST_F(PipeWireStreamTest, TestPipeWire) { EXPECT_NE(frame->data(), frame2->data()); // This should result into overwriting a frame in use - rtc::Event frameRecordedEvent; + webrtc::Event frameRecordedEvent; RgbaColor blue_color(255, 0, 0); EXPECT_CALL(*this, OnFailedToProcessBuffer).WillOnce([&frameRecordedEvent] { frameRecordedEvent.Set(); @@ -156,6 +160,34 @@ TEST_F(PipeWireStreamTest, TestPipeWire) { frameRetrievedEvent.Wait(kShortWait); EXPECT_EQ(RgbaColor(frame->data()), blue_color); + // Check we don't process faulty buffers + webrtc::Event corruptedMetadataFrameEvent; + EXPECT_CALL(*this, OnBufferCorruptedMetadata) + .WillOnce([&corruptedMetadataFrameEvent] { + corruptedMetadataFrameEvent.Set(); + }); + + test_screencast_stream_provider_->RecordFrame( + blue_color, TestScreenCastStreamProvider::CorruptedMetadata); + corruptedMetadataFrameEvent.Wait(kShortWait); + + webrtc::Event corruptedDataFrameEvent; + EXPECT_CALL(*this, OnBufferCorruptedData) + .WillOnce([&corruptedDataFrameEvent] { corruptedDataFrameEvent.Set(); }); + + test_screencast_stream_provider_->RecordFrame( + blue_color, TestScreenCastStreamProvider::CorruptedData); + corruptedDataFrameEvent.Wait(kShortWait); + + webrtc::Event emptyFrameEvent; + EXPECT_CALL(*this, OnEmptyBuffer).WillOnce([&emptyFrameEvent] { + emptyFrameEvent.Set(); + }); + + test_screencast_stream_provider_->RecordFrame( + blue_color, TestScreenCastStreamProvider::EmptyData); + emptyFrameEvent.Wait(kShortWait); + // Update stream parameters. EXPECT_CALL(*this, OnFrameRateChanged(0)) .Times(1) diff --git a/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.cc b/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.cc index ee5c17e7d7..10551047a9 100644 --- a/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.cc +++ b/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.cc @@ -131,7 +131,8 @@ TestScreenCastStreamProvider::~TestScreenCastStreamProvider() { } } -void TestScreenCastStreamProvider::RecordFrame(RgbaColor rgba_color) { +void TestScreenCastStreamProvider::RecordFrame(RgbaColor rgba_color, + FrameDefect frame_defect) { const char* error; if (pw_stream_get_state(pw_stream_, &error) != PW_STREAM_STATE_STREAMING) { if (error) { @@ -163,13 +164,27 @@ void TestScreenCastStreamProvider::RecordFrame(RgbaColor rgba_color) { spa_data->chunk->size = height_ * stride; spa_data->chunk->stride = stride; - uint32_t color = rgba_color.ToUInt32(); - for (uint32_t i = 0; i < height_; i++) { - uint32_t* column = reinterpret_cast(data); - for (uint32_t j = 0; j < width_; j++) { - column[j] = color; + // Produce a frame with given defect + if (frame_defect == EmptyData) { + spa_data->chunk->size = 0; + } else if (frame_defect == CorruptedData) { + spa_data->chunk->flags = SPA_CHUNK_FLAG_CORRUPTED; + } else if (frame_defect == CorruptedMetadata) { + struct spa_meta_header* spa_header = + static_cast(spa_buffer_find_meta_data( + spa_buffer, SPA_META_Header, sizeof(spa_meta_header))); + if (spa_header) { + spa_header->flags = SPA_META_HEADER_FLAG_CORRUPTED; + } + } else { + uint32_t color = rgba_color.ToUInt32(); + for (uint32_t i = 0; i < height_; i++) { + uint32_t* column = reinterpret_cast(data); + for (uint32_t j = 0; j < width_; j++) { + column[j] = color; + } + data += stride; } - data += stride; } pw_stream_queue_buffer(pw_stream_, buffer); diff --git a/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.h b/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.h index d893aa63ab..f63a2e647c 100644 --- a/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.h +++ b/modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.h @@ -35,6 +35,8 @@ class TestScreenCastStreamProvider { virtual ~Observer() = default; }; + enum FrameDefect { None, EmptyData, CorruptedData, CorruptedMetadata }; + explicit TestScreenCastStreamProvider(Observer* observer, uint32_t width, uint32_t height); @@ -42,7 +44,7 @@ class TestScreenCastStreamProvider { uint32_t PipeWireNodeId(); - void RecordFrame(RgbaColor rgba_color); + void RecordFrame(RgbaColor rgba_color, FrameDefect frame_defect = None); void StartStreaming(); void StopStreaming(); diff --git a/modules/desktop_capture/linux/x11/mouse_cursor_monitor_x11.h b/modules/desktop_capture/linux/x11/mouse_cursor_monitor_x11.h index 980d254a0a..855d5c6e55 100644 --- a/modules/desktop_capture/linux/x11/mouse_cursor_monitor_x11.h +++ b/modules/desktop_capture/linux/x11/mouse_cursor_monitor_x11.h @@ -51,7 +51,7 @@ class MouseCursorMonitorX11 : public MouseCursorMonitor, // Captures current cursor shape and stores it in `cursor_shape_`. void CaptureCursor(); - rtc::scoped_refptr x_display_; + scoped_refptr x_display_; Callback* callback_; Mode mode_; Window window_; diff --git a/modules/desktop_capture/linux/x11/screen_capturer_x11.cc b/modules/desktop_capture/linux/x11/screen_capturer_x11.cc index d5dcd7af86..0702c755a1 100644 --- a/modules/desktop_capture/linux/x11/screen_capturer_x11.cc +++ b/modules/desktop_capture/linux/x11/screen_capturer_x11.cc @@ -239,7 +239,7 @@ void ScreenCapturerX11::Start(Callback* callback) { void ScreenCapturerX11::CaptureFrame() { TRACE_EVENT0("webrtc", "ScreenCapturerX11::CaptureFrame"); - int64_t capture_start_time_nanos = rtc::TimeNanos(); + int64_t capture_start_time_nanos = TimeNanos(); queue_.MoveToNextFrame(); if (queue_.current_frame() && queue_.current_frame()->IsShared()) { @@ -280,8 +280,8 @@ void ScreenCapturerX11::CaptureFrame() { } last_invalid_region_ = result->updated_region(); - result->set_capture_time_ms((rtc::TimeNanos() - capture_start_time_nanos) / - rtc::kNumNanosecsPerMillisec); + result->set_capture_time_ms((TimeNanos() - capture_start_time_nanos) / + kNumNanosecsPerMillisec); result->set_capturer_id(DesktopCapturerId::kX11CapturerLinux); callback_->OnCaptureResult(Result::SUCCESS, std::move(result)); } diff --git a/modules/desktop_capture/linux/x11/shared_x_display.cc b/modules/desktop_capture/linux/x11/shared_x_display.cc index d690b0e2ba..aee9183c53 100644 --- a/modules/desktop_capture/linux/x11/shared_x_display.cc +++ b/modules/desktop_capture/linux/x11/shared_x_display.cc @@ -31,7 +31,7 @@ SharedXDisplay::~SharedXDisplay() { } // static -rtc::scoped_refptr SharedXDisplay::Create( +scoped_refptr SharedXDisplay::Create( absl::string_view display_name) { Display* display = XOpenDisplay( display_name.empty() ? NULL : std::string(display_name).c_str()); @@ -39,11 +39,11 @@ rtc::scoped_refptr SharedXDisplay::Create( RTC_LOG(LS_ERROR) << "Unable to open display"; return nullptr; } - return rtc::scoped_refptr(new SharedXDisplay(display)); + return scoped_refptr(new SharedXDisplay(display)); } // static -rtc::scoped_refptr SharedXDisplay::CreateDefault() { +scoped_refptr SharedXDisplay::CreateDefault() { return Create(std::string()); } @@ -70,7 +70,7 @@ void SharedXDisplay::RemoveEventHandler(int type, XEventHandler* handler) { void SharedXDisplay::ProcessPendingXEvents() { // Hold reference to `this` to prevent it from being destroyed while // processing events. - rtc::scoped_refptr self(this); + scoped_refptr self(this); // Protect access to `event_handlers_` after incrementing the refcount for // `this` to ensure the instance is still valid when the lock is acquired. diff --git a/modules/desktop_capture/linux/x11/shared_x_display.h b/modules/desktop_capture/linux/x11/shared_x_display.h index c05fc46546..5c42b55b79 100644 --- a/modules/desktop_capture/linux/x11/shared_x_display.h +++ b/modules/desktop_capture/linux/x11/shared_x_display.h @@ -29,8 +29,7 @@ typedef union _XEvent XEvent; namespace webrtc { // A ref-counted object to store XDisplay connection. -class RTC_EXPORT SharedXDisplay - : public rtc::RefCountedNonVirtual { +class RTC_EXPORT SharedXDisplay : public RefCountedNonVirtual { public: class XEventHandler { public: @@ -43,12 +42,11 @@ class RTC_EXPORT SharedXDisplay // Creates a new X11 Display for the `display_name`. NULL is returned if X11 // connection failed. Equivalent to CreateDefault() when `display_name` is // empty. - static rtc::scoped_refptr Create( - absl::string_view display_name); + static scoped_refptr Create(absl::string_view display_name); // Creates X11 Display connection for the default display (e.g. specified in // DISPLAY). NULL is returned if X11 connection failed. - static rtc::scoped_refptr CreateDefault(); + static scoped_refptr CreateDefault(); Display* display() { return display_; } diff --git a/modules/desktop_capture/linux/x11/window_capturer_x11.h b/modules/desktop_capture/linux/x11/window_capturer_x11.h index ac591c272e..2605c54650 100644 --- a/modules/desktop_capture/linux/x11/window_capturer_x11.h +++ b/modules/desktop_capture/linux/x11/window_capturer_x11.h @@ -59,7 +59,7 @@ class WindowCapturerX11 : public DesktopCapturer, Callback* callback_ = nullptr; - rtc::scoped_refptr x_display_; + scoped_refptr x_display_; bool has_composite_extension_ = false; diff --git a/modules/desktop_capture/linux/x11/window_list_utils.cc b/modules/desktop_capture/linux/x11/window_list_utils.cc index ff2d467e29..41de4f84b4 100644 --- a/modules/desktop_capture/linux/x11/window_list_utils.cc +++ b/modules/desktop_capture/linux/x11/window_list_utils.cc @@ -120,8 +120,7 @@ int32_t GetWindowState(XAtomCache* cache, ::Window window) { return window_state.is_valid() ? *window_state.data() : WithdrawnState; } -bool GetWindowList(XAtomCache* cache, - rtc::FunctionView on_window) { +bool GetWindowList(XAtomCache* cache, FunctionView on_window) { RTC_DCHECK(cache); RTC_DCHECK(on_window); ::Display* const display = cache->display(); diff --git a/modules/desktop_capture/linux/x11/window_list_utils.h b/modules/desktop_capture/linux/x11/window_list_utils.h index 923842df14..d141b8ed5d 100644 --- a/modules/desktop_capture/linux/x11/window_list_utils.h +++ b/modules/desktop_capture/linux/x11/window_list_utils.h @@ -28,8 +28,7 @@ namespace webrtc { // native APIs failed. If multiple screens are attached to the `display`, this // function returns false only when native APIs failed on all screens. Menus, // panels and minimized windows will be ignored. -bool GetWindowList(XAtomCache* cache, - rtc::FunctionView on_window); +bool GetWindowList(XAtomCache* cache, FunctionView on_window); // Returns WM_STATE property of the `window`. This function returns // WithdrawnState if the `window` is missing. diff --git a/modules/desktop_capture/mac/desktop_configuration.mm b/modules/desktop_capture/mac/desktop_configuration.mm index 93fb3f6226..0b264d6f2c 100644 --- a/modules/desktop_capture/mac/desktop_configuration.mm +++ b/modules/desktop_capture/mac/desktop_configuration.mm @@ -10,22 +10,12 @@ #include "modules/desktop_capture/mac/desktop_configuration.h" +#include #include #include -#include #include "rtc_base/checks.h" -#if !defined(MAC_OS_X_VERSION_10_7) || \ - MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_7 - -@interface NSScreen (LionAPI) -- (CGFloat)backingScaleFactor; -- (NSRect)convertRectToBacking:(NSRect)aRect; -@end - -#endif // MAC_OS_X_VERSION_10_7 - namespace webrtc { namespace { @@ -40,12 +30,12 @@ DesktopRect NSRectToDesktopRect(const NSRect& ns_rect) { // Inverts the position of `rect` from bottom-up coordinates to top-down, // relative to `bounds`. -void InvertRectYOrigin(const DesktopRect& bounds, - DesktopRect* rect) { +void InvertRectYOrigin(const DesktopRect& bounds, DesktopRect* rect) { RTC_DCHECK_EQ(bounds.top(), 0); - *rect = DesktopRect::MakeXYWH( - rect->left(), bounds.bottom() - rect->bottom(), - rect->width(), rect->height()); + *rect = DesktopRect::MakeXYWH(rect->left(), + bounds.bottom() - rect->bottom(), + rect->width(), + rect->height()); } MacDisplayConfiguration GetConfigurationForScreen(NSScreen* screen) { @@ -60,16 +50,9 @@ MacDisplayConfiguration GetConfigurationForScreen(NSScreen* screen) { NSRect ns_bounds = [screen frame]; display_config.bounds = NSRectToDesktopRect(ns_bounds); - // If the host is running Mac OS X 10.7+ or later, query the scaling factor - // between logical and physical (aka "backing") pixels, otherwise assume 1:1. - if ([screen respondsToSelector:@selector(backingScaleFactor)] && - [screen respondsToSelector:@selector(convertRectToBacking:)]) { - display_config.dip_to_pixel_scale = [screen backingScaleFactor]; - NSRect ns_pixel_bounds = [screen convertRectToBacking: ns_bounds]; - display_config.pixel_bounds = NSRectToDesktopRect(ns_pixel_bounds); - } else { - display_config.pixel_bounds = display_config.bounds; - } + display_config.dip_to_pixel_scale = [screen backingScaleFactor]; + NSRect ns_pixel_bounds = [screen convertRectToBacking:ns_bounds]; + display_config.pixel_bounds = NSRectToDesktopRect(ns_pixel_bounds); // Determine if the display is built-in or external. display_config.is_builtin = CGDisplayIsBuiltin(display_config.id); @@ -114,7 +97,7 @@ MacDisplayConfiguration GetConfigurationForScreen(NSScreen* screen) { // DPI match that of the primary monitor. for (NSUInteger i = 0; i < [screens count]; ++i) { MacDisplayConfiguration display_config = - GetConfigurationForScreen([screens objectAtIndex: i]); + GetConfigurationForScreen([screens objectAtIndex:i]); if (i == 0) desktop_config.dip_to_pixel_scale = display_config.dip_to_pixel_scale; @@ -156,8 +139,7 @@ MacDisplayConfiguration GetConfigurationForScreen(NSScreen* screen) { // MacDesktopConfiguration::Equals. bool operator==(const MacDisplayConfiguration& left, const MacDisplayConfiguration& right) { - return left.id == right.id && - left.bounds.equals(right.bounds) && + return left.id == right.id && left.bounds.equals(right.bounds) && left.pixel_bounds.equals(right.pixel_bounds) && left.dip_to_pixel_scale == right.dip_to_pixel_scale; } @@ -170,18 +152,20 @@ MacDisplayConfiguration GetConfigurationForScreen(NSScreen* screen) { } const MacDisplayConfiguration* -MacDesktopConfiguration::FindDisplayConfigurationById( - CGDirectDisplayID id) { + MacDesktopConfiguration::FindDisplayConfigurationById( + CGDirectDisplayID id) { bool is_builtin = CGDisplayIsBuiltin(id); for (MacDisplayConfigurations::const_iterator it = displays.begin(); - it != displays.end(); ++it) { + it != displays.end(); + ++it) { // The MBP having both discrete and integrated graphic cards will do // automate graphics switching by default. When it switches from discrete to // integrated one, the current display ID of the built-in display will // change and this will cause screen capture stops. // So make screen capture of built-in display continuing even if its display // ID is changed. - if ((is_builtin && it->is_builtin) || (!is_builtin && it->id == id)) return &(*it); + if ((is_builtin && it->is_builtin) || (!is_builtin && it->id == id)) + return &(*it); } return NULL; } diff --git a/modules/desktop_capture/mac/desktop_configuration_monitor.h b/modules/desktop_capture/mac/desktop_configuration_monitor.h index 747295a538..b4650e31cc 100644 --- a/modules/desktop_capture/mac/desktop_configuration_monitor.h +++ b/modules/desktop_capture/mac/desktop_configuration_monitor.h @@ -25,7 +25,7 @@ namespace webrtc { // The class provides functions to synchronize capturing and display // reconfiguring across threads, and the up-to-date MacDesktopConfiguration. class DesktopConfigurationMonitor final - : public rtc::RefCountedNonVirtual { + : public webrtc::RefCountedNonVirtual { public: DesktopConfigurationMonitor(); ~DesktopConfigurationMonitor(); diff --git a/modules/desktop_capture/mac/desktop_frame_cgimage.h b/modules/desktop_capture/mac/desktop_frame_cgimage.h index fa1a6502d9..16db9e80a7 100644 --- a/modules/desktop_capture/mac/desktop_frame_cgimage.h +++ b/modules/desktop_capture/mac/desktop_frame_cgimage.h @@ -34,7 +34,7 @@ class RTC_EXPORT DesktopFrameCGImage final : public DesktopFrame { CGWindowID window_id); static std::unique_ptr CreateFromCGImage( - rtc::ScopedCFTypeRef cg_image); + webrtc::ScopedCFTypeRef cg_image); ~DesktopFrameCGImage() override; @@ -46,11 +46,11 @@ class RTC_EXPORT DesktopFrameCGImage final : public DesktopFrame { DesktopFrameCGImage(DesktopSize size, int stride, uint8_t* data, - rtc::ScopedCFTypeRef cg_image, - rtc::ScopedCFTypeRef cg_data); + webrtc::ScopedCFTypeRef cg_image, + webrtc::ScopedCFTypeRef cg_data); - const rtc::ScopedCFTypeRef cg_image_; - const rtc::ScopedCFTypeRef cg_data_; + const webrtc::ScopedCFTypeRef cg_image_; + const webrtc::ScopedCFTypeRef cg_data_; }; } // namespace webrtc diff --git a/modules/desktop_capture/mac/desktop_frame_cgimage.mm b/modules/desktop_capture/mac/desktop_frame_cgimage.mm index 0fb69b272d..1beb668ae5 100644 --- a/modules/desktop_capture/mac/desktop_frame_cgimage.mm +++ b/modules/desktop_capture/mac/desktop_frame_cgimage.mm @@ -21,7 +21,8 @@ std::unique_ptr DesktopFrameCGImage::CreateForDisplay( CGDirectDisplayID display_id) { // Create an image containing a snapshot of the display. - rtc::ScopedCFTypeRef cg_image(CGDisplayCreateImage(display_id)); + webrtc::ScopedCFTypeRef cg_image( + CGDisplayCreateImage(display_id)); if (!cg_image) { return nullptr; } @@ -30,8 +31,9 @@ } // static -std::unique_ptr DesktopFrameCGImage::CreateForWindow(CGWindowID window_id) { - rtc::ScopedCFTypeRef cg_image( +std::unique_ptr DesktopFrameCGImage::CreateForWindow( + CGWindowID window_id) { + webrtc::ScopedCFTypeRef cg_image( CGWindowListCreateImage(CGRectNull, kCGWindowListOptionIncludingWindow, window_id, @@ -45,11 +47,12 @@ // static std::unique_ptr DesktopFrameCGImage::CreateFromCGImage( - rtc::ScopedCFTypeRef cg_image) { + webrtc::ScopedCFTypeRef cg_image) { // Verify that the image has 32-bit depth. int bits_per_pixel = CGImageGetBitsPerPixel(cg_image.get()); if (bits_per_pixel / 8 != DesktopFrame::kBytesPerPixel) { - RTC_LOG(LS_ERROR) << "CGDisplayCreateImage() returned imaged with " << bits_per_pixel + RTC_LOG(LS_ERROR) << "CGDisplayCreateImage() returned imaged with " + << bits_per_pixel << " bits per pixel. Only 32-bit depth is supported."; return nullptr; } @@ -58,16 +61,19 @@ CGDataProviderRef cg_provider = CGImageGetDataProvider(cg_image.get()); RTC_DCHECK(cg_provider); - // CGDataProviderCopyData returns a new data object containing a copy of the provider’s - // data. - rtc::ScopedCFTypeRef cg_data(CGDataProviderCopyData(cg_provider)); + // CGDataProviderCopyData returns a new data object containing a copy of the + // provider’s data. + webrtc::ScopedCFTypeRef cg_data( + CGDataProviderCopyData(cg_provider)); RTC_DCHECK(cg_data); - // CFDataGetBytePtr returns a read-only pointer to the bytes of a CFData object. + // CFDataGetBytePtr returns a read-only pointer to the bytes of a CFData + // object. uint8_t* data = const_cast(CFDataGetBytePtr(cg_data.get())); RTC_DCHECK(data); - DesktopSize size(CGImageGetWidth(cg_image.get()), CGImageGetHeight(cg_image.get())); + DesktopSize size(CGImageGetWidth(cg_image.get()), + CGImageGetHeight(cg_image.get())); int stride = CGImageGetBytesPerRow(cg_image.get()); std::unique_ptr frame( @@ -75,17 +81,21 @@ CGColorSpaceRef cg_color_space = CGImageGetColorSpace(cg_image.get()); if (cg_color_space) { -#if !defined(MAC_OS_X_VERSION_10_13) || MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_13 - rtc::ScopedCFTypeRef cf_icc_profile(CGColorSpaceCopyICCProfile(cg_color_space)); +#if !defined(MAC_OS_X_VERSION_10_13) || \ + MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_13 + webrtc::ScopedCFTypeRef cf_icc_profile( + CGColorSpaceCopyICCProfile(cg_color_space)); #else - rtc::ScopedCFTypeRef cf_icc_profile(CGColorSpaceCopyICCData(cg_color_space)); + webrtc::ScopedCFTypeRef cf_icc_profile( + CGColorSpaceCopyICCData(cg_color_space)); #endif if (cf_icc_profile) { - const uint8_t* data_as_byte = - reinterpret_cast(CFDataGetBytePtr(cf_icc_profile.get())); + const uint8_t* data_as_byte = reinterpret_cast( + CFDataGetBytePtr(cf_icc_profile.get())); const size_t data_size = CFDataGetLength(cf_icc_profile.get()); if (data_as_byte && data_size > 0) { - frame->set_icc_profile(std::vector(data_as_byte, data_as_byte + data_size)); + frame->set_icc_profile( + std::vector(data_as_byte, data_as_byte + data_size)); } } } @@ -93,12 +103,15 @@ return frame; } -DesktopFrameCGImage::DesktopFrameCGImage(DesktopSize size, - int stride, - uint8_t* data, - rtc::ScopedCFTypeRef cg_image, - rtc::ScopedCFTypeRef cg_data) - : DesktopFrame(size, stride, data, nullptr), cg_image_(cg_image), cg_data_(cg_data) { +DesktopFrameCGImage::DesktopFrameCGImage( + DesktopSize size, + int stride, + uint8_t* data, + webrtc::ScopedCFTypeRef cg_image, + webrtc::ScopedCFTypeRef cg_data) + : DesktopFrame(size, stride, data, nullptr), + cg_image_(cg_image), + cg_data_(cg_data) { RTC_DCHECK(cg_image_); RTC_DCHECK(cg_data_); } diff --git a/modules/desktop_capture/mac/desktop_frame_iosurface.h b/modules/desktop_capture/mac/desktop_frame_iosurface.h index 73da0f693c..3dc2abad3e 100644 --- a/modules/desktop_capture/mac/desktop_frame_iosurface.h +++ b/modules/desktop_capture/mac/desktop_frame_iosurface.h @@ -24,9 +24,10 @@ namespace webrtc { class DesktopFrameIOSurface final : public DesktopFrame { public: // Lock an IOSurfaceRef containing a snapshot of a display. Return NULL if - // failed to lock. + // failed to lock. `rect` specifies the portion of the surface that the + // DesktopFrame should be cropped to. static std::unique_ptr Wrap( - rtc::ScopedCFTypeRef io_surface); + webrtc::ScopedCFTypeRef io_surface, CGRect rect = {}); ~DesktopFrameIOSurface() override; @@ -34,10 +35,18 @@ class DesktopFrameIOSurface final : public DesktopFrame { DesktopFrameIOSurface& operator=(const DesktopFrameIOSurface&) = delete; private: - // This constructor expects `io_surface` to hold a non-null IOSurfaceRef. - explicit DesktopFrameIOSurface(rtc::ScopedCFTypeRef io_surface); - - const rtc::ScopedCFTypeRef io_surface_; + // `io_surface` must hold a non-null IOSurfaceRef that is already locked. + // `data` is the address of the first byte of data in `io_surface`'s locked + // buffer. + // `width` and `height` make up the dimensions of `io_surface` in pixels. + // `stride` is the number of bytes of a single row of pixels in `data`. + DesktopFrameIOSurface(webrtc::ScopedCFTypeRef io_surface, + uint8_t* data, + int32_t width, + int32_t height, + int32_t stride); + + const webrtc::ScopedCFTypeRef io_surface_; }; } // namespace webrtc diff --git a/modules/desktop_capture/mac/desktop_frame_iosurface.mm b/modules/desktop_capture/mac/desktop_frame_iosurface.mm index b59b319db9..33f96da963 100644 --- a/modules/desktop_capture/mac/desktop_frame_iosurface.mm +++ b/modules/desktop_capture/mac/desktop_frame_iosurface.mm @@ -12,18 +12,20 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" namespace webrtc { // static std::unique_ptr DesktopFrameIOSurface::Wrap( - rtc::ScopedCFTypeRef io_surface) { + webrtc::ScopedCFTypeRef io_surface, CGRect rect) { if (!io_surface) { return nullptr; } IOSurfaceIncrementUseCount(io_surface.get()); - IOReturn status = IOSurfaceLock(io_surface.get(), kIOSurfaceLockReadOnly, nullptr); + IOReturn status = + IOSurfaceLock(io_surface.get(), kIOSurfaceLockReadOnly, nullptr); if (status != kIOReturnSuccess) { RTC_LOG(LS_ERROR) << "Failed to lock the IOSurface with status " << status; IOSurfaceDecrementUseCount(io_surface.get()); @@ -33,22 +35,58 @@ // Verify that the image has 32-bit depth. int bytes_per_pixel = IOSurfaceGetBytesPerElement(io_surface.get()); if (bytes_per_pixel != DesktopFrame::kBytesPerPixel) { - RTC_LOG(LS_ERROR) << "CGDisplayStream handler returned IOSurface with " << (8 * bytes_per_pixel) + RTC_LOG(LS_ERROR) << "CGDisplayStream handler returned IOSurface with " + << (8 * bytes_per_pixel) << " bits per pixel. Only 32-bit depth is supported."; IOSurfaceUnlock(io_surface.get(), kIOSurfaceLockReadOnly, nullptr); IOSurfaceDecrementUseCount(io_surface.get()); return nullptr; } - return std::unique_ptr(new DesktopFrameIOSurface(io_surface)); + const size_t surface_width = IOSurfaceGetWidth(io_surface.get()); + const size_t surface_height = IOSurfaceGetHeight(io_surface.get()); + const int32_t stride = + checked_cast(IOSurfaceGetBytesPerRow(io_surface.get())); + uint8_t* const data = + static_cast(IOSurfaceGetBaseAddress(io_surface.get())); + int32_t width = checked_cast(surface_width); + int32_t height = checked_cast(surface_height); + ptrdiff_t offset = 0; + ptrdiff_t offset_columns = 0; + ptrdiff_t offset_rows = 0; + if (rect.size.width > 0 && rect.size.height > 0) { + width = checked_cast(std::floor(rect.size.width)); + height = checked_cast(std::floor(rect.size.height)); + offset_columns = checked_cast(std::ceil(rect.origin.x)); + offset_rows = checked_cast(std::ceil(rect.origin.y)); + offset = stride * offset_rows + bytes_per_pixel * offset_columns; + } + + RTC_LOG(LS_VERBOSE) << "DesktopFrameIOSurface wrapping IOSurface with size " + << surface_width << "x" << surface_height + << ". Cropping to (" << offset_columns << "," + << offset_rows << "; " << width << "x" << height + << "). Stride=" << stride / bytes_per_pixel + << ", buffer-offset-px=" << offset / bytes_per_pixel + << ", buffer-offset-bytes=" << offset; + + RTC_CHECK_GE(surface_width, offset_columns + width); + RTC_CHECK_GE(surface_height, offset_rows + height); + RTC_CHECK_GE(offset, 0); + RTC_CHECK_LE(offset + ((height - 1) * stride) + (width * bytes_per_pixel) - 1, + IOSurfaceGetAllocSize(io_surface.get())); + + return std::unique_ptr(new DesktopFrameIOSurface( + io_surface, data + offset, width, height, stride)); } -DesktopFrameIOSurface::DesktopFrameIOSurface(rtc::ScopedCFTypeRef io_surface) - : DesktopFrame( - DesktopSize(IOSurfaceGetWidth(io_surface.get()), IOSurfaceGetHeight(io_surface.get())), - IOSurfaceGetBytesPerRow(io_surface.get()), - static_cast(IOSurfaceGetBaseAddress(io_surface.get())), - nullptr), +DesktopFrameIOSurface::DesktopFrameIOSurface( + webrtc::ScopedCFTypeRef io_surface, + uint8_t* data, + int32_t width, + int32_t height, + int32_t stride) + : DesktopFrame(DesktopSize(width, height), stride, data, nullptr), io_surface_(io_surface) { RTC_DCHECK(io_surface_); } diff --git a/modules/desktop_capture/mac/desktop_frame_provider.h b/modules/desktop_capture/mac/desktop_frame_provider.h index aad28d2f30..24276f1e4f 100644 --- a/modules/desktop_capture/mac/desktop_frame_provider.h +++ b/modules/desktop_capture/mac/desktop_frame_provider.h @@ -41,11 +41,13 @@ class DesktopFrameProvider { // OS sends the latest IOSurfaceRef through // CGDisplayStreamFrameAvailableHandler callback; we store it here. void InvalidateIOSurface(CGDirectDisplayID display_id, - rtc::ScopedCFTypeRef io_surface); + ScopedCFTypeRef io_surface); // Expected to be called before stopping the CGDisplayStreamRef streams. void Release(); + bool allow_iosurface() const { return allow_iosurface_; } + private: SequenceChecker thread_checker_; const bool allow_iosurface_; diff --git a/modules/desktop_capture/mac/desktop_frame_provider.mm b/modules/desktop_capture/mac/desktop_frame_provider.mm index 009504a22b..01c916cd53 100644 --- a/modules/desktop_capture/mac/desktop_frame_provider.mm +++ b/modules/desktop_capture/mac/desktop_frame_provider.mm @@ -41,8 +41,9 @@ return io_surfaces_[display_id]->Share(); } -void DesktopFrameProvider::InvalidateIOSurface(CGDirectDisplayID display_id, - rtc::ScopedCFTypeRef io_surface) { +void DesktopFrameProvider::InvalidateIOSurface( + CGDirectDisplayID display_id, + webrtc::ScopedCFTypeRef io_surface) { RTC_DCHECK(thread_checker_.IsCurrent()); if (!allow_iosurface_) { diff --git a/modules/desktop_capture/mac/desktop_frame_utils.cc b/modules/desktop_capture/mac/desktop_frame_utils.cc index 32ba67e999..a2f04c724a 100644 --- a/modules/desktop_capture/mac/desktop_frame_utils.cc +++ b/modules/desktop_capture/mac/desktop_frame_utils.cc @@ -15,7 +15,7 @@ namespace webrtc { std::unique_ptr CreateDesktopFrameFromCGImage( - rtc::ScopedCFTypeRef cg_image) { + webrtc::ScopedCFTypeRef cg_image) { return DesktopFrameCGImage::CreateFromCGImage(cg_image); } diff --git a/modules/desktop_capture/mac/desktop_frame_utils.h b/modules/desktop_capture/mac/desktop_frame_utils.h index 1be2436098..a7be7c5b8e 100644 --- a/modules/desktop_capture/mac/desktop_frame_utils.h +++ b/modules/desktop_capture/mac/desktop_frame_utils.h @@ -20,7 +20,7 @@ namespace webrtc { std::unique_ptr RTC_EXPORT -CreateDesktopFrameFromCGImage(rtc::ScopedCFTypeRef cg_image); +CreateDesktopFrameFromCGImage(webrtc::ScopedCFTypeRef cg_image); } // namespace webrtc diff --git a/modules/desktop_capture/mac/full_screen_mac_application_handler.cc b/modules/desktop_capture/mac/full_screen_mac_application_handler.cc index 45cd3223d2..48767ba6ab 100644 --- a/modules/desktop_capture/mac/full_screen_mac_application_handler.cc +++ b/modules/desktop_capture/mac/full_screen_mac_application_handler.cc @@ -68,7 +68,7 @@ class FullScreenMacApplicationHandler : public FullScreenApplicationHandler { protected: using CachePredicate = - rtc::FunctionView; + webrtc::FunctionView; void InvalidateCacheIfNeeded(const DesktopCapturer::SourceList& source_list, int64_t timestamp, diff --git a/modules/desktop_capture/mac/sck_picker_handle.h b/modules/desktop_capture/mac/sck_picker_handle.h new file mode 100644 index 0000000000..bc32dfd5f7 --- /dev/null +++ b/modules/desktop_capture/mac/sck_picker_handle.h @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_MAC_SCK_PICKER_HANDLE_H_ +#define MODULES_DESKTOP_CAPTURE_MAC_SCK_PICKER_HANDLE_H_ + +#include +#include +#include +#include "modules/desktop_capture/desktop_capturer.h" + +@class SCContentSharingPicker; +@class SCStream; + +namespace webrtc { + +// Helper class to manage multiple users of SCContentSharingPicker. +// +// The `active` and `maximumStreamCount` properties are automatically managed on +// `SCContentSharingPicker.sharedPicker`, which is what is returned from +// GetPicker(). +// +// When using this class, for stream limits to work, only create one stream per +// handle. +// +// Designed for single thread use. +class API_AVAILABLE(macos(14.0)) SckPickerHandleInterface { + public: + virtual ~SckPickerHandleInterface() = default; + // Effectively identical to `SCContentSharingPicker.sharedPicker`. + virtual SCContentSharingPicker* GetPicker() const = 0; + // A SourceId unique to this handle. + virtual DesktopCapturer::SourceId Source() const = 0; +}; + +// Returns a newly created picker handle if the stream count limit has not been +// reached, null otherwise. +std::unique_ptr API_AVAILABLE(macos(14.0)) + CreateSckPickerHandle(); + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_MAC_SCK_PICKER_HANDLE_H_ diff --git a/modules/desktop_capture/mac/sck_picker_handle.mm b/modules/desktop_capture/mac/sck_picker_handle.mm new file mode 100644 index 0000000000..c1b4db19f6 --- /dev/null +++ b/modules/desktop_capture/mac/sck_picker_handle.mm @@ -0,0 +1,113 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sck_picker_handle.h" + +#import + +#include "absl/base/attributes.h" +#include "api/sequence_checker.h" + +#include +#include + +namespace webrtc { + +class SckPickerProxy; + +class API_AVAILABLE(macos(14.0)) SckPickerProxy { + public: + static SckPickerProxy* Get() { + static SckPickerProxy* g_picker = new SckPickerProxy(); + return g_picker; + } + + SckPickerProxy() : thread_checker_(SequenceChecker::kDetached) {} + + bool AtCapacity() const { + RTC_DCHECK_RUN_ON(&thread_checker_); + return handle_count_ == kMaximumStreamCount; + } + + SCContentSharingPicker* GetPicker() const { + return SCContentSharingPicker.sharedPicker; + } + + ABSL_MUST_USE_RESULT std::optional + AcquireSourceId() { + RTC_DCHECK_RUN_ON(&thread_checker_); + if (AtCapacity()) { + return std::nullopt; + } + if (handle_count_ == 0) { + auto* picker = GetPicker(); + picker.maximumStreamCount = + [NSNumber numberWithUnsignedInt:kMaximumStreamCount]; + picker.active = YES; + } + handle_count_ += 1; + unique_source_id_ += 1; + return unique_source_id_; + } + + void RelinquishSourceId(DesktopCapturer::SourceId source) { + RTC_DCHECK_RUN_ON(&thread_checker_); + handle_count_ -= 1; + if (handle_count_ > 0) { + return; + } + // Detach now in case the next user (possibly after a long time) uses a + // different thread. + thread_checker_.Detach(); + GetPicker().active = NO; + } + + private: + webrtc::SequenceChecker thread_checker_; + // 100 is an arbitrary number that seems high enough to never get reached, + // while still providing a reasonably low upper bound. + static constexpr size_t kMaximumStreamCount = 100; + size_t handle_count_ RTC_GUARDED_BY(thread_checker_) = 0; + DesktopCapturer::SourceId unique_source_id_ RTC_GUARDED_BY(thread_checker_) = + 0; +}; + +class API_AVAILABLE(macos(14.0)) SckPickerHandle + : public SckPickerHandleInterface { + public: + static std::unique_ptr Create(SckPickerProxy* proxy) { + std::optional id = proxy->AcquireSourceId(); + if (!id) { + return nullptr; + } + return std::unique_ptr(new SckPickerHandle(proxy, *id)); + } + + ~SckPickerHandle() { proxy_->RelinquishSourceId(source_); } + + SCContentSharingPicker* GetPicker() const override { + return proxy_->GetPicker(); + } + + DesktopCapturer::SourceId Source() const override { return source_; } + + private: + SckPickerHandle(SckPickerProxy* proxy, DesktopCapturer::SourceId source) + : proxy_(proxy), source_(source) {} + + SckPickerProxy* const proxy_; + const DesktopCapturer::SourceId source_; +}; + +std::unique_ptr CreateSckPickerHandle() { + return SckPickerHandle::Create(SckPickerProxy::Get()); +} + +} // namespace webrtc diff --git a/modules/desktop_capture/mac/screen_capturer_mac.h b/modules/desktop_capture/mac/screen_capturer_mac.h index 7e38b5bd08..127cf5ee24 100644 --- a/modules/desktop_capture/mac/screen_capturer_mac.h +++ b/modules/desktop_capture/mac/screen_capturer_mac.h @@ -36,9 +36,10 @@ class DisplayStreamManager; // A class to perform video frame capturing for mac. class ScreenCapturerMac final : public DesktopCapturer { public: - ScreenCapturerMac(rtc::scoped_refptr desktop_config_monitor, - bool detect_updated_region, - bool allow_iosurface); + ScreenCapturerMac( + scoped_refptr desktop_config_monitor, + bool detect_updated_region, + bool allow_iosurface); ~ScreenCapturerMac() override; ScreenCapturerMac(const ScreenCapturerMac&) = delete; @@ -101,7 +102,7 @@ class ScreenCapturerMac final : public DesktopCapturer { DesktopRegion last_invalid_region_; // Monitoring display reconfiguration. - rtc::scoped_refptr desktop_config_monitor_; + scoped_refptr desktop_config_monitor_; CGWindowID excluded_window_ = 0; diff --git a/modules/desktop_capture/mac/screen_capturer_mac.mm b/modules/desktop_capture/mac/screen_capturer_mac.mm index 8f0c68d48b..5f36971ea9 100644 --- a/modules/desktop_capture/mac/screen_capturer_mac.mm +++ b/modules/desktop_capture/mac/screen_capturer_mac.mm @@ -20,97 +20,17 @@ #include "rtc_base/trace_event.h" #include "sdk/objc/helpers/scoped_cftyperef.h" -// All these symbols have incorrect availability annotations in the 13.3 SDK. -// These have the correct annotation. See https://crbug.com/1431897. -// TODO(thakis): Remove this once FB12109479 is fixed and we updated to an SDK -// with the fix. - -static CGDisplayStreamRef __nullable - wrapCGDisplayStreamCreate(CGDirectDisplayID display, - size_t outputWidth, - size_t outputHeight, - int32_t pixelFormat, - CFDictionaryRef __nullable properties, - CGDisplayStreamFrameAvailableHandler __nullable handler) - CG_AVAILABLE_BUT_DEPRECATED( - 10.8, - 14.0, - "Please use ScreenCaptureKit API's initWithFilter:configuration:delegate: instead") { -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wunguarded-availability-new" - return CGDisplayStreamCreate( - display, outputWidth, outputHeight, pixelFormat, properties, handler); -#pragma clang diagnostic pop -} - -static CFRunLoopSourceRef __nullable - wrapCGDisplayStreamGetRunLoopSource(CGDisplayStreamRef cg_nullable displayStream) - CG_AVAILABLE_BUT_DEPRECATED(10.8, - 14.0, - "There is no direct replacement for this function. Please use " - "ScreenCaptureKit API's SCStream to replace CGDisplayStream") { -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wunguarded-availability-new" - return CGDisplayStreamGetRunLoopSource(displayStream); -#pragma clang diagnostic pop -} - -static CGError wrapCGDisplayStreamStart(CGDisplayStreamRef cg_nullable displayStream) - CG_AVAILABLE_BUT_DEPRECATED(10.8, - 14.0, - "Please use ScreenCaptureKit API's " - "startCaptureWithCompletionHandler: to start a stream instead") { -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wunguarded-availability-new" - return CGDisplayStreamStart(displayStream); -#pragma clang diagnostic pop -} - -static CGError wrapCGDisplayStreamStop(CGDisplayStreamRef cg_nullable displayStream) - CG_AVAILABLE_BUT_DEPRECATED(10.8, - 14.0, - "Please use ScreenCaptureKit API's " - "stopCaptureWithCompletionHandler: to stop a stream instead") { -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wunguarded-availability-new" - return CGDisplayStreamStop(displayStream); -#pragma clang diagnostic pop -} - -static CFStringRef wrapkCGDisplayStreamShowCursor() CG_AVAILABLE_BUT_DEPRECATED( - 10.8, - 14.0, - "Please use ScreenCaptureKit API's SCStreamConfiguration showsCursor property instead") { -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wunguarded-availability-new" - return kCGDisplayStreamShowCursor; -#pragma clang diagnostic pop -} - -static const CGRect* __nullable - wrapCGDisplayStreamUpdateGetRects(CGDisplayStreamUpdateRef __nullable updateRef, - CGDisplayStreamUpdateRectType rectType, - size_t* rectCount) - CG_AVAILABLE_BUT_DEPRECATED(10.8, - 14.0, - "Please use ScreenCaptureKit API's SCStreamFrameInfo with " - "SCStreamFrameInfoContentRect instead") { -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wunguarded-availability-new" - return CGDisplayStreamUpdateGetRects(updateRef, rectType, rectCount); -#pragma clang diagnostic pop -} - namespace webrtc { namespace { // Scales all coordinates of a rect by a specified factor. DesktopRect ScaleAndRoundCGRect(const CGRect& rect, float scale) { - return DesktopRect::MakeLTRB(static_cast(floor(rect.origin.x * scale)), - static_cast(floor(rect.origin.y * scale)), - static_cast(ceil((rect.origin.x + rect.size.width) * scale)), - static_cast(ceil((rect.origin.y + rect.size.height) * scale))); + return DesktopRect::MakeLTRB( + static_cast(floor(rect.origin.x * scale)), + static_cast(floor(rect.origin.y * scale)), + static_cast(ceil((rect.origin.x + rect.size.width) * scale)), + static_cast(ceil((rect.origin.y + rect.size.height) * scale))); } // Copy pixels in the `rect` from `src_place` to `dest_plane`. `rect` should be @@ -144,8 +64,8 @@ void CopyRect(const uint8_t* src_plane, CFArrayRef CreateWindowListWithExclusion(CGWindowID window_to_exclude) { if (!window_to_exclude) return nullptr; - CFArrayRef all_windows = - CGWindowListCopyWindowInfo(kCGWindowListOptionOnScreenOnly, kCGNullWindowID); + CFArrayRef all_windows = CGWindowListCopyWindowInfo( + kCGWindowListOptionOnScreenOnly, kCGNullWindowID); if (!all_windows) return nullptr; CFMutableArrayRef returned_array = @@ -153,8 +73,8 @@ CFArrayRef CreateWindowListWithExclusion(CGWindowID window_to_exclude) { bool found = false; for (CFIndex i = 0; i < CFArrayGetCount(all_windows); ++i) { - CFDictionaryRef window = - reinterpret_cast(CFArrayGetValueAtIndex(all_windows, i)); + CFDictionaryRef window = reinterpret_cast( + CFArrayGetValueAtIndex(all_windows, i)); CGWindowID id = GetWindowId(window); if (id == window_to_exclude) { @@ -174,7 +94,8 @@ CFArrayRef CreateWindowListWithExclusion(CGWindowID window_to_exclude) { // Returns the bounds of `window` in physical pixels, enlarged by a small amount // on four edges to take account of the border/shadow effects. -DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_scale) { +DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, + float dip_to_pixel_scale) { // The amount of pixels to add to the actual window bounds to take into // account of the border/shadow effects. static const int kBorderEffectSize = 20; @@ -184,13 +105,14 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s CFArrayRef window_id_array = CFArrayCreate(nullptr, reinterpret_cast(&ids), 1, nullptr); - CFArrayRef window_array = CGWindowListCreateDescriptionFromArray(window_id_array); + CFArrayRef window_array = + CGWindowListCreateDescriptionFromArray(window_id_array); if (CFArrayGetCount(window_array) > 0) { - CFDictionaryRef win = - reinterpret_cast(CFArrayGetValueAtIndex(window_array, 0)); - CFDictionaryRef bounds_ref = - reinterpret_cast(CFDictionaryGetValue(win, kCGWindowBounds)); + CFDictionaryRef win = reinterpret_cast( + CFArrayGetValueAtIndex(window_array, 0)); + CFDictionaryRef bounds_ref = reinterpret_cast( + CFDictionaryGetValue(win, kCGWindowBounds)); CGRectMakeWithDictionaryRepresentation(bounds_ref, &rect); } @@ -208,9 +130,10 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s // Create an image of the given region using the given `window_list`. // `pixel_bounds` should be in the primary display's coordinate in physical // pixels. -rtc::ScopedCFTypeRef CreateExcludedWindowRegionImage(const DesktopRect& pixel_bounds, - float dip_to_pixel_scale, - CFArrayRef window_list) { +webrtc::ScopedCFTypeRef CreateExcludedWindowRegionImage( + const DesktopRect& pixel_bounds, + float dip_to_pixel_scale, + CFArrayRef window_list) { CGRect window_bounds; // The origin is in DIP while the size is in physical pixels. That's what // CGWindowListCreateImageFromArray expects. @@ -219,14 +142,14 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s window_bounds.size.width = pixel_bounds.width(); window_bounds.size.height = pixel_bounds.height(); - return rtc::ScopedCFTypeRef( - CGWindowListCreateImageFromArray(window_bounds, window_list, kCGWindowImageDefault)); + return webrtc::ScopedCFTypeRef(CGWindowListCreateImageFromArray( + window_bounds, window_list, kCGWindowImageDefault)); } } // namespace ScreenCapturerMac::ScreenCapturerMac( - rtc::scoped_refptr desktop_config_monitor, + webrtc::scoped_refptr desktop_config_monitor, bool detect_updated_region, bool allow_iosurface) : detect_updated_region_(detect_updated_region), @@ -259,8 +182,11 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s RTC_DCHECK(thread_checker_.IsCurrent()); RTC_DCHECK(!callback_); RTC_DCHECK(callback); - TRACE_EVENT_INSTANT1( - "webrtc", "ScreenCapturermac::Start", "target display id ", current_display_); + TRACE_EVENT_INSTANT1("webrtc", + "ScreenCapturermac::Start", + TRACE_EVENT_SCOPE_GLOBAL, + "target display id ", + current_display_); callback_ = callback; // Start and operate CGDisplayStream handler all from capture thread. @@ -275,14 +201,15 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s void ScreenCapturerMac::CaptureFrame() { RTC_DCHECK(thread_checker_.IsCurrent()); TRACE_EVENT0("webrtc", "creenCapturerMac::CaptureFrame"); - int64_t capture_start_time_nanos = rtc::TimeNanos(); + int64_t capture_start_time_nanos = TimeNanos(); queue_.MoveToNextFrame(); if (queue_.current_frame() && queue_.current_frame()->IsShared()) { RTC_DLOG(LS_WARNING) << "Overwriting frame that is still shared."; } - MacDesktopConfiguration new_config = desktop_config_monitor_->desktop_configuration(); + MacDesktopConfiguration new_config = + desktop_config_monitor_->desktop_configuration(); if (!desktop_config_.Equals(new_config)) { desktop_config_ = new_config; // If the display configuraiton has changed then refresh capturer data @@ -314,7 +241,8 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s // If the current buffer is from an older generation then allocate a new one. // Note that we can't reallocate other buffers at this point, since the caller // may still be reading from them. - if (!queue_.current_frame()) queue_.ReplaceCurrentFrame(SharedDesktopFrame::Wrap(CreateFrame())); + if (!queue_.current_frame()) + queue_.ReplaceCurrentFrame(SharedDesktopFrame::Wrap(CreateFrame())); DesktopFrame* current_frame = queue_.current_frame(); @@ -326,22 +254,24 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s if (detect_updated_region_) { *new_frame->mutable_updated_region() = region; } else { - new_frame->mutable_updated_region()->AddRect(DesktopRect::MakeSize(new_frame->size())); + new_frame->mutable_updated_region()->AddRect( + DesktopRect::MakeSize(new_frame->size())); } if (current_display_) { const MacDisplayConfiguration* config = desktop_config_.FindDisplayConfigurationById(current_display_); if (config) { - new_frame->set_top_left( - config->bounds.top_left().subtract(desktop_config_.bounds.top_left())); + new_frame->set_top_left(config->bounds.top_left().subtract( + desktop_config_.bounds.top_left())); } } helper_.set_size_most_recent(new_frame->size()); - new_frame->set_capture_time_ms((rtc::TimeNanos() - capture_start_time_nanos) / - rtc::kNumNanosecsPerMillisec); + new_frame->set_capture_time_ms( + (webrtc::TimeNanos() - capture_start_time_nanos) / + webrtc::kNumNanosecsPerMillisec); callback_->OnCaptureResult(Result::SUCCESS, std::move(new_frame)); } @@ -365,7 +295,8 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s current_display_ = 0; } else { const MacDisplayConfiguration* config = - desktop_config_.FindDisplayConfigurationById(static_cast(id)); + desktop_config_.FindDisplayConfigurationById( + static_cast(id)); if (!config) return false; current_display_ = config->id; } @@ -374,11 +305,15 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s return true; } -bool ScreenCapturerMac::CgBlit(const DesktopFrame& frame, const DesktopRegion& region) { - // If not all screen region is dirty, copy the entire contents of the previous capture buffer, - // to capture over. - if (queue_.previous_frame() && !region.Equals(DesktopRegion(screen_pixel_bounds_))) { - memcpy(frame.data(), queue_.previous_frame()->data(), frame.stride() * frame.size().height()); +bool ScreenCapturerMac::CgBlit(const DesktopFrame& frame, + const DesktopRegion& region) { + // If not all screen region is dirty, copy the entire contents of the previous + // capture buffer, to capture over. + if (queue_.previous_frame() && + !region.Equals(DesktopRegion(screen_pixel_bounds_))) { + memcpy(frame.data(), + queue_.previous_frame()->data(), + frame.stride() * frame.size().height()); } MacDisplayConfigurations displays_to_capture; @@ -407,12 +342,15 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s // Capturing mixed-DPI on one surface is hard, so we only return displays // that match the "primary" display's DPI. The primary display is always // the first in the list. - if (i > 0 && display_config.dip_to_pixel_scale != displays_to_capture[0].dip_to_pixel_scale) { + if (i > 0 && + display_config.dip_to_pixel_scale != + displays_to_capture[0].dip_to_pixel_scale) { continue; } // Determine the display's position relative to the desktop, in pixels. DesktopRect display_bounds = display_config.pixel_bounds; - display_bounds.Translate(-screen_pixel_bounds_.left(), -screen_pixel_bounds_.top()); + display_bounds.Translate(-screen_pixel_bounds_.left(), + -screen_pixel_bounds_.top()); // Determine which parts of the blit region, if any, lay within the monitor. DesktopRegion copy_region = region; @@ -423,18 +361,20 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s copy_region.Translate(-display_bounds.left(), -display_bounds.top()); DesktopRect excluded_window_bounds; - rtc::ScopedCFTypeRef excluded_image; + webrtc::ScopedCFTypeRef excluded_image; if (excluded_window_ && window_list) { // Get the region of the excluded window relative the primary display. - excluded_window_bounds = - GetExcludedWindowPixelBounds(excluded_window_, display_config.dip_to_pixel_scale); + excluded_window_bounds = GetExcludedWindowPixelBounds( + excluded_window_, display_config.dip_to_pixel_scale); excluded_window_bounds.IntersectWith(display_config.pixel_bounds); // Create the image under the excluded window first, because it's faster // than captuing the whole display. if (!excluded_window_bounds.is_empty()) { - excluded_image = CreateExcludedWindowRegionImage( - excluded_window_bounds, display_config.dip_to_pixel_scale, window_list); + excluded_image = + CreateExcludedWindowRegionImage(excluded_window_bounds, + display_config.dip_to_pixel_scale, + window_list); } } @@ -448,8 +388,8 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s int src_bytes_per_row = frame_source->stride(); RTC_DCHECK(display_base_address); - // `frame_source` size may be different from display_bounds in case the screen was - // resized recently. + // `frame_source` size may be different from display_bounds in case the + // screen was resized recently. copy_region.IntersectWith(frame_source->rect()); // Copy the dirty region from the display buffer into our desktop buffer. @@ -465,7 +405,8 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s if (excluded_image) { CGDataProviderRef provider = CGImageGetDataProvider(excluded_image.get()); - rtc::ScopedCFTypeRef excluded_image_data(CGDataProviderCopyData(provider)); + webrtc::ScopedCFTypeRef excluded_image_data( + CGDataProviderCopyData(provider)); RTC_DCHECK(excluded_image_data); display_base_address = CFDataGetBytePtr(excluded_image_data.get()); src_bytes_per_row = CGImageGetBytesPerRow(excluded_image.get()); @@ -476,14 +417,18 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s window_bounds_relative_to_desktop.Translate(-screen_pixel_bounds_.left(), -screen_pixel_bounds_.top()); - DesktopRect rect_to_copy = DesktopRect::MakeSize(excluded_window_bounds.size()); - rect_to_copy.IntersectWith(DesktopRect::MakeWH(CGImageGetWidth(excluded_image.get()), - CGImageGetHeight(excluded_image.get()))); + DesktopRect rect_to_copy = + DesktopRect::MakeSize(excluded_window_bounds.size()); + rect_to_copy.IntersectWith( + DesktopRect::MakeWH(CGImageGetWidth(excluded_image.get()), + CGImageGetHeight(excluded_image.get()))); - if (CGImageGetBitsPerPixel(excluded_image.get()) / 8 == DesktopFrame::kBytesPerPixel) { + if (CGImageGetBitsPerPixel(excluded_image.get()) / 8 == + DesktopFrame::kBytesPerPixel) { CopyRect(display_base_address, src_bytes_per_row, - frame.GetFrameDataAtPos(window_bounds_relative_to_desktop.top_left()), + frame.GetFrameDataAtPos( + window_bounds_relative_to_desktop.top_left()), frame.stride(), DesktopFrame::kBytesPerPixel, rect_to_copy); @@ -520,6 +465,10 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s bool ScreenCapturerMac::RegisterRefreshAndMoveHandlers() { RTC_DCHECK(thread_checker_.IsCurrent()); + if (!desktop_frame_provider_.allow_iosurface()) { + return true; + } + desktop_config_ = desktop_config_monitor_->desktop_configuration(); for (const auto& config : desktop_config_.displays) { size_t pixel_width = config.pixel_bounds.width(); @@ -528,10 +477,11 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s CGDirectDisplayID display_id = config.id; DesktopVector display_origin = config.pixel_bounds.top_left(); - CGDisplayStreamFrameAvailableHandler handler = ^(CGDisplayStreamFrameStatus status, - uint64_t display_time, - IOSurfaceRef frame_surface, - CGDisplayStreamUpdateRef updateRef) { + CGDisplayStreamFrameAvailableHandler handler = ^( + CGDisplayStreamFrameStatus status, + uint64_t /* display_time */, + IOSurfaceRef frame_surface, + CGDisplayStreamUpdateRef updateRef) { RTC_DCHECK(thread_checker_.IsCurrent()); if (status == kCGDisplayStreamFrameStatusStopped) return; @@ -539,8 +489,8 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s if (status != kCGDisplayStreamFrameStatusFrameComplete) return; size_t count = 0; - const CGRect* rects = - wrapCGDisplayStreamUpdateGetRects(updateRef, kCGDisplayStreamUpdateDirtyRects, &count); + const CGRect* rects = CGDisplayStreamUpdateGetRects( + updateRef, kCGDisplayStreamUpdateDirtyRects, &count); if (count != 0) { // According to CGDisplayStream.h, it's safe to call // CGDisplayStreamStop() from within the callback. @@ -548,22 +498,28 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s } }; - rtc::ScopedCFTypeRef properties_dict( + webrtc::ScopedCFTypeRef properties_dict( CFDictionaryCreate(kCFAllocatorDefault, - (const void*[]){wrapkCGDisplayStreamShowCursor()}, + (const void*[]){kCGDisplayStreamShowCursor}, (const void*[]){kCFBooleanFalse}, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks)); - CGDisplayStreamRef display_stream = wrapCGDisplayStreamCreate( - display_id, pixel_width, pixel_height, 'BGRA', properties_dict.get(), handler); + CGDisplayStreamRef display_stream = + CGDisplayStreamCreate(display_id, + pixel_width, + pixel_height, + 'BGRA', + properties_dict.get(), + handler); if (display_stream) { - CGError error = wrapCGDisplayStreamStart(display_stream); + CGError error = CGDisplayStreamStart(display_stream); if (error != kCGErrorSuccess) return false; - CFRunLoopSourceRef source = wrapCGDisplayStreamGetRunLoopSource(display_stream); + CFRunLoopSourceRef source = + CGDisplayStreamGetRunLoopSource(display_stream); CFRunLoopAddSource(CFRunLoopGetCurrent(), source, kCFRunLoopCommonModes); display_streams_.push_back(display_stream); } @@ -576,9 +532,9 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s RTC_DCHECK(thread_checker_.IsCurrent()); for (CGDisplayStreamRef stream : display_streams_) { - CFRunLoopSourceRef source = wrapCGDisplayStreamGetRunLoopSource(stream); + CFRunLoopSourceRef source = CGDisplayStreamGetRunLoopSource(stream); CFRunLoopRemoveSource(CFRunLoopGetCurrent(), source, kCFRunLoopCommonModes); - wrapCGDisplayStreamStop(stream); + CGDisplayStreamStop(stream); CFRelease(stream); } display_streams_.clear(); @@ -616,14 +572,17 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s // Always having the latest iosurface before invalidating a region. // See https://bugs.chromium.org/p/webrtc/issues/detail?id=8652 for details. desktop_frame_provider_.InvalidateIOSurface( - display_id, rtc::ScopedCFTypeRef(io_surface, rtc::RetainPolicy::RETAIN)); + display_id, + webrtc::ScopedCFTypeRef(io_surface, + webrtc::RetainPolicy::RETAIN)); helper_.InvalidateRegion(region); } std::unique_ptr ScreenCapturerMac::CreateFrame() { - std::unique_ptr frame(new BasicDesktopFrame(screen_pixel_bounds_.size())); - frame->set_dpi( - DesktopVector(kStandardDPI * dip_to_pixel_scale_, kStandardDPI * dip_to_pixel_scale_)); + std::unique_ptr frame( + new BasicDesktopFrame(screen_pixel_bounds_.size())); + frame->set_dpi(DesktopVector(kStandardDPI * dip_to_pixel_scale_, + kStandardDPI * dip_to_pixel_scale_)); return frame; } diff --git a/modules/desktop_capture/mac/screen_capturer_sck.h b/modules/desktop_capture/mac/screen_capturer_sck.h new file mode 100644 index 0000000000..e0c467714a --- /dev/null +++ b/modules/desktop_capture/mac/screen_capturer_sck.h @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_MAC_SCREEN_CAPTURER_SCK_H_ +#define MODULES_DESKTOP_CAPTURE_MAC_SCREEN_CAPTURER_SCK_H_ + +#include + +#include "modules/desktop_capture/desktop_capture_options.h" +#include "modules/desktop_capture/desktop_capturer.h" + +namespace webrtc { + +// Returns true if the ScreenCaptureKit capturer is available. +bool ScreenCapturerSckAvailable(); + +// Returns true if the ScreenCaptureKit capturer is available using +// SCContentSharingPicker for picking a generic source. +bool GenericCapturerSckWithPickerAvailable(); + +// A DesktopCapturer implementation that uses ScreenCaptureKit. +std::unique_ptr CreateScreenCapturerSck( + const DesktopCaptureOptions& options); + +std::unique_ptr CreateGenericCapturerSck( + const DesktopCaptureOptions& options); + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_MAC_SCREEN_CAPTURER_SCK_H_ diff --git a/modules/desktop_capture/mac/screen_capturer_sck.mm b/modules/desktop_capture/mac/screen_capturer_sck.mm new file mode 100644 index 0000000000..d6e3c2eeee --- /dev/null +++ b/modules/desktop_capture/mac/screen_capturer_sck.mm @@ -0,0 +1,891 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/mac/screen_capturer_sck.h" + +#import + +#include + +#include "absl/strings/str_format.h" +#include "api/sequence_checker.h" +#include "modules/desktop_capture/mac/desktop_frame_iosurface.h" +#include "modules/desktop_capture/shared_desktop_frame.h" +#include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/time_utils.h" +#include "sck_picker_handle.h" +#include "sdk/objc/helpers/scoped_cftyperef.h" + +using webrtc::DesktopFrameIOSurface; + +namespace webrtc { +class ScreenCapturerSck; +} // namespace webrtc + +// The ScreenCaptureKit API was available in macOS 12.3, but full-screen capture +// was reported to be broken before macOS 13 - see http://crbug.com/40234870. +// Also, the `SCContentFilter` fields `contentRect` and `pointPixelScale` were +// introduced in macOS 14. +API_AVAILABLE(macos(14.0)) +@interface SckHelper : NSObject + +- (instancetype)initWithCapturer:(webrtc::ScreenCapturerSck*)capturer; + +- (void)onShareableContentCreated:(SCShareableContent*)content + error:(NSError*)error; + +// Called just before the capturer is destroyed. This avoids a dangling pointer, +// and prevents any new calls into a deleted capturer. If any method-call on the +// capturer is currently running on a different thread, this blocks until it +// completes. +- (void)releaseCapturer; + +@end + +namespace webrtc { + +class API_AVAILABLE(macos(14.0)) ScreenCapturerSck final + : public DesktopCapturer { + public: + explicit ScreenCapturerSck(const DesktopCaptureOptions& options); + ScreenCapturerSck(const DesktopCaptureOptions& options, + SCContentSharingPickerMode modes); + ScreenCapturerSck(const ScreenCapturerSck&) = delete; + ScreenCapturerSck& operator=(const ScreenCapturerSck&) = delete; + + ~ScreenCapturerSck() override; + + // DesktopCapturer interface. All these methods run on the caller's thread. + void Start(DesktopCapturer::Callback* callback) override; + void SetMaxFrameRate(uint32_t max_frame_rate) override; + void CaptureFrame() override; + bool GetSourceList(SourceList* sources) override; + bool SelectSource(SourceId id) override; + // Prep for implementing DelegatedSourceListController interface, for now used + // by Start(). Triggers SCContentSharingPicker. Runs on the caller's thread. + void EnsureVisible(); + // Helper functions to forward SCContentSharingPickerObserver notifications to + // source_list_observer_. + void NotifySourceSelection(SCContentFilter* filter, SCStream* stream); + void NotifySourceCancelled(SCStream* stream); + void NotifySourceError(); + + // Called after a SCStreamDelegate stop notification. + void NotifyCaptureStopped(SCStream* stream); + + // Called by SckHelper when shareable content is returned by ScreenCaptureKit. + // `content` will be nil if an error occurred. May run on an arbitrary thread. + void OnShareableContentCreated(SCShareableContent* content, NSError* error); + + // Start capture with the given filter. Creates or updates stream_ as needed. + void StartWithFilter(SCContentFilter* filter) + RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); + + // Called by SckHelper to notify of a newly captured frame. May run on an + // arbitrary thread. + void OnNewIOSurface(IOSurfaceRef io_surface, NSDictionary* attachment); + + private: + // Called when starting the capturer or the configuration has changed (either + // from a SelectSource() call, or the screen-resolution has changed). This + // tells SCK to fetch new shareable content, and the completion-handler will + // either start a new stream, or reconfigure the existing stream. Runs on the + // caller's thread. + void StartOrReconfigureCapturer(); + + // Calls to the public API must happen on a single thread. + webrtc::SequenceChecker api_checker_; + + // Helper object to receive Objective-C callbacks from ScreenCaptureKit and + // call into this C++ object. The helper may outlive this C++ instance, if a + // completion-handler is passed to ScreenCaptureKit APIs and the C++ object is + // deleted before the handler executes. + SckHelper* __strong helper_; + + // Callback for returning captured frames, or errors, to the caller. + Callback* callback_ RTC_GUARDED_BY(api_checker_) = nullptr; + + // Helper class that tracks the number of capturers needing + // SCContentSharingPicker to stay active. + std::unique_ptr picker_handle_ + RTC_GUARDED_BY(api_checker_); + + // Flag to track if we have added ourselves as observer to picker_handle_. + bool picker_handle_registered_ RTC_GUARDED_BY(api_checker_) = false; + + // Options passed to the constructor. May be accessed on any thread, but the + // options are unchanged during the capturer's lifetime. + const DesktopCaptureOptions capture_options_; + + // Modes to use iff using the system picker. + // See docs on SCContentSharingPickerMode. + const SCContentSharingPickerMode picker_modes_; + + // Signals that a permanent error occurred. This may be set on any thread, and + // is read by CaptureFrame() which runs on the caller's thread. + std::atomic permanent_error_ = false; + + // Guards some variables that may be accessed on different threads. + Mutex lock_; + + // Provides captured desktop frames. + SCStream* __strong stream_ RTC_GUARDED_BY(lock_); + + // Current filter on stream_. + SCContentFilter* __strong filter_ RTC_GUARDED_BY(lock_); + + // Currently selected display, or 0 if the full desktop is selected. This + // capturer does not support full-desktop capture, and will fall back to the + // first display. + CGDirectDisplayID current_display_ RTC_GUARDED_BY(lock_) = 0; + + // Configured maximum frame rate in frames per second. + uint32_t max_frame_rate_ RTC_GUARDED_BY(lock_) = 0; + + // Used by CaptureFrame() to detect if the screen configuration has changed. + MacDesktopConfiguration desktop_config_ RTC_GUARDED_BY(api_checker_); + + Mutex latest_frame_lock_ RTC_ACQUIRED_AFTER(lock_); + std::unique_ptr latest_frame_ + RTC_GUARDED_BY(latest_frame_lock_); + + int32_t latest_frame_dpi_ RTC_GUARDED_BY(latest_frame_lock_) = kStandardDPI; + + // Tracks whether the latest frame contains new data since it was returned to + // the caller. This is used to set the DesktopFrame's `updated_region` + // property. The flag is cleared after the frame is sent to OnCaptureResult(), + // and is set when SCK reports a new frame with non-empty "dirty" rectangles. + // TODO: crbug.com/327458809 - Replace this flag with ScreenCapturerHelper to + // more accurately track the dirty rectangles from the + // SCStreamFrameInfoDirtyRects attachment. + bool frame_is_dirty_ RTC_GUARDED_BY(latest_frame_lock_) = true; + + // Tracks whether a reconfigure is needed. + bool frame_needs_reconfigure_ RTC_GUARDED_BY(latest_frame_lock_) = false; + // If a reconfigure is needed, this will be set to the size in pixels required + // to fit the entire source without downscaling. + std::optional frame_reconfigure_img_size_ + RTC_GUARDED_BY(latest_frame_lock_); +}; + +/* Helper class for stringifying SCContentSharingPickerMode. Needed as + * SCContentSharingPickerMode is a typedef to NSUInteger which we cannot add a + * AbslStringify function for. */ +struct StringifiableSCContentSharingPickerMode { + const SCContentSharingPickerMode modes_; + + template + friend void AbslStringify(Sink& sink, + const StringifiableSCContentSharingPickerMode& m) { + auto modes = m.modes_; + if (@available(macos 14, *)) { + bool empty = true; + const std::tuple all_modes[] = { + {SCContentSharingPickerModeSingleWindow, "SingleWindow"}, + {SCContentSharingPickerModeMultipleWindows, "MultiWindow"}, + {SCContentSharingPickerModeSingleApplication, "SingleApp"}, + {SCContentSharingPickerModeMultipleApplications, "MultiApp"}, + {SCContentSharingPickerModeSingleDisplay, "SingleDisplay"}}; + for (const auto& [mode, text] : all_modes) { + if (modes & mode) { + modes = modes & (~mode); + absl::Format(&sink, "%s%s", empty ? "" : "|", text); + empty = false; + } + } + if (modes) { + absl::Format(&sink, "%sRemaining=%v", empty ? "" : "|", modes); + } + return; + } + absl::Format(&sink, "%v", modes); + } +}; + +ScreenCapturerSck::ScreenCapturerSck(const DesktopCaptureOptions& options, + SCContentSharingPickerMode modes) + : api_checker_(SequenceChecker::kDetached), + capture_options_(options), + picker_modes_(modes) { + if (capture_options_.allow_sck_system_picker()) { + picker_handle_ = CreateSckPickerHandle(); + } + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this + << " created. allow_sck_system_picker=" + << capture_options_.allow_sck_system_picker() << ", source=" + << (picker_handle_ ? picker_handle_->Source() : -1) + << ", modes=" + << StringifiableSCContentSharingPickerMode{.modes_ = modes}; + helper_ = [[SckHelper alloc] initWithCapturer:this]; +} + +ScreenCapturerSck::ScreenCapturerSck(const DesktopCaptureOptions& options) + : ScreenCapturerSck(options, SCContentSharingPickerModeSingleDisplay) {} + +ScreenCapturerSck::~ScreenCapturerSck() { + RTC_DCHECK_RUN_ON(&api_checker_); + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this << " destroyed."; + [stream_ stopCaptureWithCompletionHandler:nil]; + [helper_ releaseCapturer]; +} + +void ScreenCapturerSck::Start(DesktopCapturer::Callback* callback) { + RTC_DCHECK_RUN_ON(&api_checker_); + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this << " " << __func__ << "."; + callback_ = callback; + desktop_config_ = + capture_options_.configuration_monitor()->desktop_configuration(); + if (capture_options_.allow_sck_system_picker()) { + EnsureVisible(); + return; + } + StartOrReconfigureCapturer(); +} + +void ScreenCapturerSck::SetMaxFrameRate(uint32_t max_frame_rate) { + RTC_DCHECK_RUN_ON(&api_checker_); + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this << " SetMaxFrameRate(" + << max_frame_rate << ")."; + bool stream_started = false; + { + MutexLock lock(&lock_); + if (max_frame_rate_ == max_frame_rate) { + return; + } + + max_frame_rate_ = max_frame_rate; + stream_started = stream_; + } + if (stream_started) { + StartOrReconfigureCapturer(); + } +} + +void ScreenCapturerSck::CaptureFrame() { + RTC_DCHECK_RUN_ON(&api_checker_); + int64_t capture_start_time_millis = webrtc::TimeMillis(); + + if (permanent_error_) { + RTC_LOG(LS_VERBOSE) << "ScreenCapturerSck " << this + << " CaptureFrame() -> ERROR_PERMANENT"; + callback_->OnCaptureResult(Result::ERROR_PERMANENT, nullptr); + return; + } + + MacDesktopConfiguration new_config = + capture_options_.configuration_monitor()->desktop_configuration(); + if (!desktop_config_.Equals(new_config)) { + desktop_config_ = new_config; + StartOrReconfigureCapturer(); + } + + std::unique_ptr frame; + bool needs_reconfigure = false; + { + MutexLock lock(&latest_frame_lock_); + if (latest_frame_) { + frame = latest_frame_->Share(); + if (frame_is_dirty_) { + frame->mutable_updated_region()->AddRect( + DesktopRect::MakeSize(frame->size())); + frame_is_dirty_ = false; + } + } + needs_reconfigure = frame_needs_reconfigure_; + frame_needs_reconfigure_ = false; + } + + if (needs_reconfigure) { + StartOrReconfigureCapturer(); + } + + if (frame) { + RTC_LOG(LS_VERBOSE) << "ScreenCapturerSck " << this + << " CaptureFrame() -> SUCCESS"; + frame->set_capture_time_ms(webrtc::TimeSince(capture_start_time_millis)); + callback_->OnCaptureResult(Result::SUCCESS, std::move(frame)); + } else { + RTC_LOG(LS_VERBOSE) << "ScreenCapturerSck " << this + << " CaptureFrame() -> ERROR_TEMPORARY"; + callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr); + } +} + +void ScreenCapturerSck::EnsureVisible() { + RTC_DCHECK_RUN_ON(&api_checker_); + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this << " " << __func__ << "."; + if (picker_handle_) { + if (!picker_handle_registered_) { + picker_handle_registered_ = true; + [picker_handle_->GetPicker() addObserver:helper_]; + } + } else { + // We reached the maximum number of streams. + RTC_LOG(LS_ERROR) + << "ScreenCapturerSck " << this + << " EnsureVisible() reached the maximum number of streams."; + permanent_error_ = true; + return; + } + SCContentSharingPicker* picker = picker_handle_->GetPicker(); + SCStream* stream; + { + MutexLock lock(&lock_); + stream = stream_; + stream_ = nil; + filter_ = nil; + MutexLock lock2(&latest_frame_lock_); + frame_needs_reconfigure_ = false; + frame_reconfigure_img_size_ = std::nullopt; + } + [stream removeStreamOutput:helper_ type:SCStreamOutputTypeScreen error:nil]; + [stream stopCaptureWithCompletionHandler:nil]; + SCContentSharingPickerConfiguration* config = picker.defaultConfiguration; + config.allowedPickerModes = picker_modes_; + picker.defaultConfiguration = config; + SCShareableContentStyle style = SCShareableContentStyleNone; + // Pick a sensible style to start out with, based on our current mode. + if (@available(macOS 15, *)) { + // Stick with None because if we use Display, the picker doesn't let us + // pick a window when first opened. Behaves like Window in 14 except doesn't + // change window focus. + } else { + // Default to Display because if using Window the picker automatically hides + // our current window to show others. Saves a click compared to None when + // picking a display. + style = SCShareableContentStyleDisplay; + } + if (picker_modes_ == SCContentSharingPickerModeSingleDisplay) { + style = SCShareableContentStyleDisplay; + } else if (picker_modes_ == SCContentSharingPickerModeSingleWindow || + picker_modes_ == SCContentSharingPickerModeMultipleWindows) { + style = SCShareableContentStyleWindow; + } else if (picker_modes_ == SCContentSharingPickerModeSingleApplication || + picker_modes_ == SCContentSharingPickerModeMultipleApplications) { + style = SCShareableContentStyleApplication; + } + // This dies silently if maximumStreamCount streams are already running. We + // need our own stream count bookkeeping because of this, and to be able to + // unset `active`. + [picker presentPickerForStream:stream usingContentStyle:style]; +} + +void ScreenCapturerSck::NotifySourceSelection(SCContentFilter* filter, + SCStream* stream) { + MutexLock lock(&lock_); + if (stream_ != stream) { + // The picker selected a source for another capturer. + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this << " " << __func__ + << ". stream_ != stream."; + return; + } + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this << " " << __func__ + << ". Starting."; + StartWithFilter(filter); +} + +void ScreenCapturerSck::NotifySourceCancelled(SCStream* stream) { + MutexLock lock(&lock_); + if (stream_ != stream) { + // The picker was cancelled for another capturer. + return; + } + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this << " " << __func__ << "."; + if (!stream_) { + // The initial picker was cancelled. There is no stream to fall back to. + permanent_error_ = true; + } +} + +void ScreenCapturerSck::NotifySourceError() { + { + MutexLock lock(&lock_); + if (stream_) { + // The picker failed to start. But fear not, it was not our picker, + // we already have a stream! + return; + } + } + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this << " " << __func__ << "."; + permanent_error_ = true; +} + +void ScreenCapturerSck::NotifyCaptureStopped(SCStream* stream) { + MutexLock lock(&lock_); + if (stream_ != stream) { + return; + } + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this << " " << __func__ << "."; + permanent_error_ = true; +} + +bool ScreenCapturerSck::GetSourceList(SourceList* sources) { + RTC_DCHECK_RUN_ON(&api_checker_); + sources->clear(); + if (capture_options_.allow_sck_system_picker() && picker_handle_) { + sources->push_back({picker_handle_->Source(), std::string()}); + } + return true; +} + +bool ScreenCapturerSck::SelectSource(SourceId id) { + RTC_DCHECK_RUN_ON(&api_checker_); + + if (capture_options_.allow_sck_system_picker()) { + return true; + } + + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this << " SelectSource(id=" << id + << ")."; + bool stream_started = false; + { + MutexLock lock(&lock_); + if (current_display_ == id) { + return true; + } + current_display_ = id; + + if (stream_) { + stream_started = true; + } + } + + // If the capturer was already started, reconfigure it. Otherwise, wait until + // Start() gets called. + if (stream_started) { + StartOrReconfigureCapturer(); + } + + return true; +} + +void ScreenCapturerSck::OnShareableContentCreated(SCShareableContent* content, + NSError* error) { + if (!content) { + RTC_LOG(LS_ERROR) << "ScreenCapturerSck " << this + << " getShareableContent failed with error code " + << (error ? error.code : 0) << "."; + permanent_error_ = true; + return; + } + + if (!content.displays.count) { + RTC_LOG(LS_ERROR) << "ScreenCapturerSck " << this + << " getShareableContent returned no displays."; + permanent_error_ = true; + return; + } + + MutexLock lock(&lock_); + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this << " " << __func__ + << ". current_display_=" << current_display_; + SCDisplay* captured_display; + for (SCDisplay* display in content.displays) { + if (current_display_ == display.displayID) { + captured_display = display; + break; + } + } + if (!captured_display) { + if (current_display_ == + static_cast(kFullDesktopScreenId)) { + RTC_LOG(LS_WARNING) << "ScreenCapturerSck " << this + << " Full screen " + "capture is not supported, falling back to first " + "display."; + } else { + RTC_LOG(LS_WARNING) << "ScreenCapturerSck " << this << " Display " + << current_display_ + << " not found, falling back to " + "first display."; + } + captured_display = content.displays.firstObject; + } + + SCContentFilter* filter = + [[SCContentFilter alloc] initWithDisplay:captured_display + excludingWindows:@[]]; + StartWithFilter(filter); +} + +void ScreenCapturerSck::StartWithFilter(SCContentFilter* __strong filter) { + lock_.AssertHeld(); + SCStreamConfiguration* config = [[SCStreamConfiguration alloc] init]; + config.pixelFormat = kCVPixelFormatType_32BGRA; + config.colorSpaceName = kCGColorSpaceSRGB; + config.showsCursor = capture_options_.prefer_cursor_embedded(); + config.captureResolution = SCCaptureResolutionNominal; + config.minimumFrameInterval = max_frame_rate_ > 0 ? + CMTimeMake(1, static_cast(max_frame_rate_)) : + kCMTimeZero; + + { + MutexLock lock(&latest_frame_lock_); + latest_frame_dpi_ = filter.pointPixelScale * kStandardDPI; + if (filter_ != filter) { + frame_reconfigure_img_size_ = std::nullopt; + } + auto sourceImgRect = frame_reconfigure_img_size_.value_or( + CGSizeMake(filter.contentRect.size.width * filter.pointPixelScale, + filter.contentRect.size.height * filter.pointPixelScale)); + config.width = sourceImgRect.width; + config.height = sourceImgRect.height; + } + + filter_ = filter; + + if (stream_) { + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this + << " Updating stream configuration to size=" + << config.width << "x" << config.height + << " and max_frame_rate=" << max_frame_rate_ << "."; + [stream_ updateContentFilter:filter completionHandler:nil]; + [stream_ updateConfiguration:config completionHandler:nil]; + } else { + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this << " Creating new stream."; + stream_ = [[SCStream alloc] initWithFilter:filter + configuration:config + delegate:helper_]; + + // TODO: crbug.com/327458809 - Choose an appropriate sampleHandlerQueue for + // best performance. + NSError* add_stream_output_error; + bool add_stream_output_result = + [stream_ addStreamOutput:helper_ + type:SCStreamOutputTypeScreen + sampleHandlerQueue:nil + error:&add_stream_output_error]; + if (!add_stream_output_result) { + stream_ = nil; + filter_ = nil; + RTC_LOG(LS_ERROR) << "ScreenCapturerSck " << this + << " addStreamOutput failed."; + permanent_error_ = true; + return; + } + + auto handler = ^(NSError* error) { + if (error) { + // It should be safe to access `this` here, because the C++ destructor + // calls stopCaptureWithCompletionHandler on the stream, which cancels + // this handler. + permanent_error_ = true; + RTC_LOG(LS_ERROR) << "ScreenCapturerSck " << this + << " Starting failed."; + } else { + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this << " Capture started."; + } + }; + + [stream_ startCaptureWithCompletionHandler:handler]; + } +} + +void ScreenCapturerSck::OnNewIOSurface(IOSurfaceRef io_surface, + NSDictionary* attachment) { + bool has_frame_to_process = false; + if (auto status_nr = (NSNumber*)attachment[SCStreamFrameInfoStatus]) { + auto status = (SCFrameStatus)[status_nr integerValue]; + has_frame_to_process = + status == SCFrameStatusComplete || status == SCFrameStatusStarted; + } + if (!has_frame_to_process) { + return; + } + + double scale_factor = 1; + if (auto factor = (NSNumber*)attachment[SCStreamFrameInfoScaleFactor]) { + scale_factor = [factor floatValue]; + } + double content_scale = 1; + if (auto scale = (NSNumber*)attachment[SCStreamFrameInfoContentScale]) { + content_scale = [scale floatValue]; + } + CGRect content_rect = {}; + if (const auto* rect_dict = + (__bridge CFDictionaryRef)attachment[SCStreamFrameInfoContentRect]) { + if (!CGRectMakeWithDictionaryRepresentation(rect_dict, &content_rect)) { + content_rect = CGRect(); + } + } + CGRect bounding_rect = {}; + if (const auto* rect_dict = + (__bridge CFDictionaryRef)attachment[SCStreamFrameInfoBoundingRect]) { + if (!CGRectMakeWithDictionaryRepresentation(rect_dict, &bounding_rect)) { + bounding_rect = CGRect(); + } + } + CGRect overlay_rect = {}; + if (@available(macOS 14.2, *)) { + if (const auto* rect_dict = (__bridge CFDictionaryRef) + attachment[SCStreamFrameInfoPresenterOverlayContentRect]) { + if (!CGRectMakeWithDictionaryRepresentation(rect_dict, &overlay_rect)) { + overlay_rect = CGRect(); + } + } + } + const auto* dirty_rects = (NSArray*)attachment[SCStreamFrameInfoDirtyRects]; + + auto img_bounding_rect = CGRectMake(scale_factor * bounding_rect.origin.x, + scale_factor * bounding_rect.origin.y, + scale_factor * bounding_rect.size.width, + scale_factor * bounding_rect.size.height); + + webrtc::ScopedCFTypeRef scoped_io_surface( + io_surface, webrtc::RetainPolicy::RETAIN); + std::unique_ptr desktop_frame_io_surface = + DesktopFrameIOSurface::Wrap(scoped_io_surface, img_bounding_rect); + if (!desktop_frame_io_surface) { + RTC_LOG(LS_ERROR) << "Failed to lock IOSurface."; + return; + } + + const size_t width = IOSurfaceGetWidth(io_surface); + const size_t height = IOSurfaceGetHeight(io_surface); + + RTC_LOG(LS_VERBOSE) << "ScreenCapturerSck " << this << " " << __func__ + << ". New surface: width=" << width + << ", height=" << height << ", content_rect=" + << NSStringFromRect(content_rect).UTF8String + << ", bounding_rect=" + << NSStringFromRect(bounding_rect).UTF8String + << ", overlay_rect=(" + << NSStringFromRect(overlay_rect).UTF8String + << ", scale_factor=" << scale_factor + << ", content_scale=" << content_scale + << ". Cropping to rect " + << NSStringFromRect(img_bounding_rect).UTF8String << "."; + + std::unique_ptr frame = + SharedDesktopFrame::Wrap(std::move(desktop_frame_io_surface)); + + bool dirty; + { + MutexLock lock(&latest_frame_lock_); + // Mark the frame as dirty if it has a different size, and ignore any + // DirtyRects attachment in this case. This is because SCK does not apply a + // correct attachment to the frame in the case where the stream was + // reconfigured. + dirty = !latest_frame_ || !latest_frame_->size().equals(frame->size()); + } + + if (!dirty) { + if (!dirty_rects) { + // This is never expected to happen - SCK attaches a non-empty dirty-rects + // list to every frame, even when nothing has changed. + return; + } + for (NSUInteger i = 0; i < dirty_rects.count; i++) { + const auto* rect_ptr = (__bridge CFDictionaryRef)dirty_rects[i]; + if (CFGetTypeID(rect_ptr) != CFDictionaryGetTypeID()) { + // This is never expected to happen - the dirty-rects attachment should + // always be an array of dictionaries. + return; + } + CGRect rect{}; + CGRectMakeWithDictionaryRepresentation(rect_ptr, &rect); + if (!CGRectIsEmpty(rect)) { + dirty = true; + break; + } + } + } + + MutexLock lock(&latest_frame_lock_); + if (content_scale > 0 && content_scale < 1) { + frame_needs_reconfigure_ = true; + double scale = 1 / content_scale; + frame_reconfigure_img_size_ = + CGSizeMake(std::ceil(scale * width), std::ceil(scale * height)); + } + if (dirty) { + frame->set_dpi(DesktopVector(latest_frame_dpi_, latest_frame_dpi_)); + frame->set_may_contain_cursor(capture_options_.prefer_cursor_embedded()); + + frame_is_dirty_ = true; + std::swap(latest_frame_, frame); + } +} + +void ScreenCapturerSck::StartOrReconfigureCapturer() { + if (capture_options_.allow_sck_system_picker()) { + MutexLock lock(&lock_); + if (filter_) { + StartWithFilter(filter_); + } + return; + } + + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << this << " " << __func__ << "."; + // The copy is needed to avoid capturing `this` in the Objective-C block. + // Accessing `helper_` inside the block is equivalent to `this->helper_` and + // would crash (UAF) if `this` is deleted before the block is executed. + SckHelper* local_helper = helper_; + auto handler = ^(SCShareableContent* content, NSError* error) { + [local_helper onShareableContentCreated:content error:error]; + }; + + [SCShareableContent getShareableContentWithCompletionHandler:handler]; +} + +bool ScreenCapturerSckAvailable() { + static bool available = ([] { + if (@available(macOS 14.0, *)) { + return true; + } + return false; + })(); + return available; +} + +std::unique_ptr CreateScreenCapturerSck( + const DesktopCaptureOptions& options) { + if (@available(macOS 14.0, *)) { + return std::make_unique(options); + } + return nullptr; +} + +bool GenericCapturerSckWithPickerAvailable() { + bool available = false; + if (@available(macOS 14.0, *)) { + available = true; + } + return available; +} + +std::unique_ptr CreateGenericCapturerSck( + const DesktopCaptureOptions& options) { + if (@available(macOS 14.0, *)) { + if (options.allow_sck_system_picker()) { + return std::make_unique( + options, + SCContentSharingPickerModeSingleDisplay | + SCContentSharingPickerModeMultipleWindows); + } + } + return nullptr; +} + +} // namespace webrtc + +@implementation SckHelper { + // This lock is to prevent the capturer being destroyed while an instance + // method is still running on another thread. + webrtc::Mutex _capturer_lock; + webrtc::ScreenCapturerSck* _capturer; +} + +- (instancetype)initWithCapturer:(webrtc::ScreenCapturerSck*)capturer { + self = [super init]; + if (self) { + _capturer = capturer; + } + return self; +} + +- (void)onShareableContentCreated:(SCShareableContent*)content + error:(NSError*)error { + webrtc::MutexLock lock(&_capturer_lock); + if (_capturer) { + _capturer->OnShareableContentCreated(content, error); + } +} + +- (void)stream:(SCStream*)stream didStopWithError:(NSError*)error { + webrtc::MutexLock lock(&_capturer_lock); + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << _capturer << " " << __func__ + << "."; + if (_capturer) { + _capturer->NotifyCaptureStopped(stream); + } +} + +- (void)userDidStopStream:(SCStream*)stream NS_SWIFT_NAME(userDidStopStream(_:)) + API_AVAILABLE(macos(14.4)) { + webrtc::MutexLock lock(&_capturer_lock); + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << _capturer << " " << __func__ + << "."; + if (_capturer) { + _capturer->NotifyCaptureStopped(stream); + } +} + +- (void)contentSharingPicker:(SCContentSharingPicker*)picker + didUpdateWithFilter:(SCContentFilter*)filter + forStream:(SCStream*)stream { + webrtc::MutexLock lock(&_capturer_lock); + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << _capturer << " " << __func__ + << "."; + if (_capturer) { + _capturer->NotifySourceSelection(filter, stream); + } +} + +- (void)contentSharingPicker:(SCContentSharingPicker*)picker + didCancelForStream:(SCStream*)stream { + webrtc::MutexLock lock(&_capturer_lock); + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << _capturer << " " << __func__ + << "."; + if (_capturer) { + _capturer->NotifySourceCancelled(stream); + } +} + +- (void)contentSharingPickerStartDidFailWithError:(NSError*)error { + webrtc::MutexLock lock(&_capturer_lock); + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << _capturer << " " << __func__ + << ". error.code=" << error.code; + if (_capturer) { + _capturer->NotifySourceError(); + } +} + +- (void)stream:(SCStream*)stream + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + ofType:(SCStreamOutputType)type { + CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + if (!pixelBuffer) { + return; + } + + IOSurfaceRef ioSurface = CVPixelBufferGetIOSurface(pixelBuffer); + if (!ioSurface) { + return; + } + + CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray( + sampleBuffer, /*createIfNecessary=*/false); + if (!attachmentsArray || CFArrayGetCount(attachmentsArray) <= 0) { + RTC_LOG(LS_ERROR) << "Discarding frame with no attachments."; + return; + } + + CFDictionaryRef attachment = + static_cast(CFArrayGetValueAtIndex(attachmentsArray, 0)); + + webrtc::MutexLock lock(&_capturer_lock); + if (_capturer) { + _capturer->OnNewIOSurface(ioSurface, (__bridge NSDictionary*)attachment); + } +} + +- (void)releaseCapturer { + webrtc::MutexLock lock(&_capturer_lock); + RTC_LOG(LS_INFO) << "ScreenCapturerSck " << _capturer << " " << __func__ + << "."; + _capturer = nullptr; +} + +@end diff --git a/modules/desktop_capture/mac/window_list_utils.cc b/modules/desktop_capture/mac/window_list_utils.cc index 5d881662ea..3aeb8dd02b 100644 --- a/modules/desktop_capture/mac/window_list_utils.cc +++ b/modules/desktop_capture/mac/window_list_utils.cc @@ -54,7 +54,7 @@ bool ToUtf8(const CFStringRef str16, std::string* str8) { // not represent a window. `on_window` will not be called if false is returned // from this function. bool GetWindowRef(CGWindowID id, - rtc::FunctionView on_window) { + webrtc::FunctionView on_window) { RTC_DCHECK(on_window); // TODO(zijiehe): `id` is a 32-bit integer, casting it to an array seems not @@ -85,7 +85,7 @@ bool GetWindowRef(CGWindowID id, } // namespace -bool GetWindowList(rtc::FunctionView on_window, +bool GetWindowList(webrtc::FunctionView on_window, bool ignore_minimized, bool only_zero_layer) { RTC_DCHECK(on_window); diff --git a/modules/desktop_capture/mac/window_list_utils.h b/modules/desktop_capture/mac/window_list_utils.h index 34d1313234..98666ec521 100644 --- a/modules/desktop_capture/mac/window_list_utils.h +++ b/modules/desktop_capture/mac/window_list_utils.h @@ -20,6 +20,7 @@ #include "modules/desktop_capture/desktop_capturer.h" #include "modules/desktop_capture/desktop_geometry.h" #include "modules/desktop_capture/mac/desktop_configuration.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -29,14 +30,15 @@ namespace webrtc { // failed. Menus, dock (if `only_zero_layer`), minimized windows (if // `ignore_minimized` is true) and any windows which do not have a valid window // id or title will be ignored. -bool GetWindowList(rtc::FunctionView on_window, - bool ignore_minimized, - bool only_zero_layer); +bool RTC_EXPORT +GetWindowList(webrtc::FunctionView on_window, + bool ignore_minimized, + bool only_zero_layer); // Another helper function to get the on-screen windows. -bool GetWindowList(DesktopCapturer::SourceList* windows, - bool ignore_minimized, - bool only_zero_layer); +bool RTC_EXPORT GetWindowList(DesktopCapturer::SourceList* windows, + bool ignore_minimized, + bool only_zero_layer); // Returns true if the window is occupying a full screen. bool IsWindowFullScreen(const MacDesktopConfiguration& desktop_config, diff --git a/modules/desktop_capture/mouse_cursor_monitor.h b/modules/desktop_capture/mouse_cursor_monitor.h index ad134df36d..31549cbc81 100644 --- a/modules/desktop_capture/mouse_cursor_monitor.h +++ b/modules/desktop_capture/mouse_cursor_monitor.h @@ -53,8 +53,8 @@ class MouseCursorMonitor { // Called in response to Capture(). `position` indicates cursor position // relative to the `window` specified in the constructor. // Deprecated: use the following overload instead. - virtual void OnMouseCursorPosition(CursorState state, - const DesktopVector& position) {} + virtual void OnMouseCursorPosition(CursorState /* state */, + const DesktopVector& /* position */) {} // Called in response to Capture(). `position` indicates cursor absolute // position on the system in fullscreen coordinate, i.e. the top-left @@ -64,7 +64,7 @@ class MouseCursorMonitor { // TODO(zijiehe): Ensure all implementations return the absolute position. // TODO(zijiehe): Current this overload works correctly only when capturing // mouse cursor against fullscreen. - virtual void OnMouseCursorPosition(const DesktopVector& position) {} + virtual void OnMouseCursorPosition(const DesktopVector& /* position */) {} protected: virtual ~Callback() {} diff --git a/modules/desktop_capture/mouse_cursor_monitor_mac.mm b/modules/desktop_capture/mouse_cursor_monitor_mac.mm index 3db4332cd1..248a782762 100644 --- a/modules/desktop_capture/mouse_cursor_monitor_mac.mm +++ b/modules/desktop_capture/mouse_cursor_monitor_mac.mm @@ -10,7 +10,6 @@ #include "modules/desktop_capture/mouse_cursor_monitor.h" - #include #include @@ -33,13 +32,14 @@ CGImageRef CreateScaledCGImage(CGImageRef image, int width, int height) { // Create context, keeping original image properties. CGColorSpaceRef colorspace = CGImageGetColorSpace(image); - CGContextRef context = CGBitmapContextCreate(nullptr, - width, - height, - CGImageGetBitsPerComponent(image), - width * DesktopFrame::kBytesPerPixel, - colorspace, - CGImageGetBitmapInfo(image)); + CGContextRef context = + CGBitmapContextCreate(nullptr, + width, + height, + CGImageGetBitsPerComponent(image), + width * DesktopFrame::kBytesPerPixel, + colorspace, + CGImageGetBitmapInfo(image)); if (!context) return nil; @@ -66,13 +66,13 @@ CGImageRef CreateScaledCGImage(CGImageRef image, int width, int height) { private: static void DisplaysReconfiguredCallback(CGDirectDisplayID display, CGDisplayChangeSummaryFlags flags, - void *user_parameter); + void* user_parameter); void DisplaysReconfigured(CGDirectDisplayID display, CGDisplayChangeSummaryFlags flags); void CaptureImage(float scale); - rtc::scoped_refptr configuration_monitor_; + webrtc::scoped_refptr configuration_monitor_; CGWindowID window_id_; ScreenId screen_id_; Callback* callback_ = NULL; @@ -80,9 +80,10 @@ void DisplaysReconfigured(CGDirectDisplayID display, __strong NSImage* last_cursor_ = NULL; }; -MouseCursorMonitorMac::MouseCursorMonitorMac(const DesktopCaptureOptions& options, - CGWindowID window_id, - ScreenId screen_id) +MouseCursorMonitorMac::MouseCursorMonitorMac( + const DesktopCaptureOptions& options, + CGWindowID window_id, + ScreenId screen_id) : configuration_monitor_(options.configuration_monitor()), window_id_(window_id), screen_id_(screen_id), @@ -115,8 +116,7 @@ void DisplaysReconfigured(CGDirectDisplayID display, CaptureImage(scale); - if (mode_ != SHAPE_AND_POSITION) - return; + if (mode_ != SHAPE_AND_POSITION) return; // Always report cursor position in DIP pixel. callback_->OnMouseCursorPosition( @@ -133,7 +133,8 @@ void DisplaysReconfigured(CGDirectDisplayID display, NSSize nssize = [nsimage size]; // DIP size // No need to caputre cursor image if it's unchanged since last capture. - if ([[nsimage TIFFRepresentation] isEqual:[last_cursor_ TIFFRepresentation]]) return; + if ([[nsimage TIFFRepresentation] isEqual:[last_cursor_ TIFFRepresentation]]) + return; last_cursor_ = nsimage; DesktopSize size(round(nssize.width * scale), @@ -144,17 +145,18 @@ DesktopVector hotspot( std::min(size.width(), static_cast(nshotspot.x * scale))), std::max(0, std::min(size.height(), static_cast(nshotspot.y * scale)))); - CGImageRef cg_image = - [nsimage CGImageForProposedRect:NULL context:nil hints:nil]; - if (!cg_image) - return; + CGImageRef cg_image = [nsimage CGImageForProposedRect:NULL + context:nil + hints:nil]; + if (!cg_image) return; // Before 10.12, OSX may report 1X cursor on Retina screen. (See // crbug.com/632995.) After 10.12, OSX may report 2X cursor on non-Retina // screen. (See crbug.com/671436.) So scaling the cursor if needed. CGImageRef scaled_cg_image = nil; if (CGImageGetWidth(cg_image) != static_cast(size.width())) { - scaled_cg_image = CreateScaledCGImage(cg_image, size.width(), size.height()); + scaled_cg_image = + CreateScaledCGImage(cg_image, size.width(), size.height()); if (scaled_cg_image != nil) { cg_image = scaled_cg_image; } @@ -199,8 +201,7 @@ DesktopVector hotspot( } MouseCursorMonitor* MouseCursorMonitor::CreateForScreen( - const DesktopCaptureOptions& options, - ScreenId screen) { + const DesktopCaptureOptions& options, ScreenId screen) { return new MouseCursorMonitorMac(options, kCGNullWindowID, screen); } diff --git a/modules/desktop_capture/mouse_cursor_monitor_win.cc b/modules/desktop_capture/mouse_cursor_monitor_win.cc index c892d59955..85b8c5cfab 100644 --- a/modules/desktop_capture/mouse_cursor_monitor_win.cc +++ b/modules/desktop_capture/mouse_cursor_monitor_win.cc @@ -20,6 +20,7 @@ #include "modules/desktop_capture/win/cursor.h" #include "modules/desktop_capture/win/screen_capture_utils.h" #include "modules/desktop_capture/win/window_capture_utils.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { diff --git a/modules/desktop_capture/screen_capturer_darwin.mm b/modules/desktop_capture/screen_capturer_darwin.mm index d5a7bb0522..95a877c45d 100644 --- a/modules/desktop_capture/screen_capturer_darwin.mm +++ b/modules/desktop_capture/screen_capturer_darwin.mm @@ -11,6 +11,7 @@ #include #include "modules/desktop_capture/mac/screen_capturer_mac.h" +#include "modules/desktop_capture/mac/screen_capturer_sck.h" namespace webrtc { @@ -21,9 +22,20 @@ return nullptr; } - std::unique_ptr capturer(new ScreenCapturerMac( - options.configuration_monitor(), options.detect_updated_region(), options.allow_iosurface())); - if (!capturer.get()->Init()) { + if (options.allow_sck_capturer()) { + // This will return nullptr on systems that don't support ScreenCaptureKit. + std::unique_ptr sck_capturer = + CreateScreenCapturerSck(options); + if (sck_capturer) { + return sck_capturer; + } + } + + auto capturer = + std::make_unique(options.configuration_monitor(), + options.detect_updated_region(), + options.allow_iosurface()); + if (!capturer->Init()) { return nullptr; } diff --git a/modules/desktop_capture/screen_capturer_fuchsia.cc b/modules/desktop_capture/screen_capturer_fuchsia.cc index b67632d1f9..7294290ef8 100644 --- a/modules/desktop_capture/screen_capturer_fuchsia.cc +++ b/modules/desktop_capture/screen_capturer_fuchsia.cc @@ -10,9 +10,9 @@ #include "modules/desktop_capture/screen_capturer_fuchsia.h" -#include +#include #include -#include +#include #include #include @@ -42,10 +42,10 @@ static constexpr uint32_t kFuchsiaBytesPerPixel = 4; static constexpr DesktopCapturer::SourceId kFuchsiaScreenId = 1; // 500 milliseconds static constexpr zx::duration kEventDelay = zx::msec(500); -static constexpr fuchsia::sysmem::ColorSpaceType kSRGBColorSpace = - fuchsia::sysmem::ColorSpaceType::SRGB; -static constexpr fuchsia::sysmem::PixelFormatType kBGRA32PixelFormatType = - fuchsia::sysmem::PixelFormatType::BGRA32; +static constexpr fuchsia::images2::ColorSpace kSRGBColorSpace = + fuchsia::images2::ColorSpace::SRGB; +static constexpr fuchsia::images2::PixelFormat kBGRA32PixelFormatType = + fuchsia::images2::PixelFormat::B8G8R8A8; // Round |value| up to the closest multiple of |multiple| size_t RoundUpToMultiple(size_t value, size_t multiple) { @@ -66,9 +66,10 @@ ScreenCapturerFuchsia::ScreenCapturerFuchsia() ScreenCapturerFuchsia::~ScreenCapturerFuchsia() { // unmap virtual memory mapped pointers uint32_t virt_mem_bytes = - buffer_collection_info_.settings.buffer_settings.size_bytes; + buffer_collection_info_.settings().buffer_settings().size_bytes(); for (uint32_t buffer_index = 0; - buffer_index < buffer_collection_info_.buffer_count; buffer_index++) { + buffer_index < buffer_collection_info_.buffers().size(); + buffer_index++) { uintptr_t address = reinterpret_cast(virtual_memory_mapped_addrs_[buffer_index]); zx_status_t status = zx::vmar::root_self()->unmap(address, virt_mem_bytes); @@ -92,7 +93,7 @@ void ScreenCapturerFuchsia::CaptureFrame() { return; } - int64_t capture_start_time_nanos = rtc::TimeNanos(); + int64_t capture_start_time_nanos = webrtc::TimeNanos(); zx::event event; zx::event dup; @@ -132,7 +133,7 @@ void ScreenCapturerFuchsia::CaptureFrame() { new BasicDesktopFrame(DesktopSize(width_, height_))); uint32_t pixels_per_row = GetPixelsPerRow( - buffer_collection_info_.settings.image_format_constraints); + buffer_collection_info_.settings().image_format_constraints()); uint32_t stride = kFuchsiaBytesPerPixel * pixels_per_row; frame->CopyPixelsFrom(virtual_memory_mapped_addrs_[buffer_index], stride, DesktopRect::MakeWH(width_, height_)); @@ -147,8 +148,8 @@ void ScreenCapturerFuchsia::CaptureFrame() { << release_result.err(); } - int capture_time_ms = (rtc::TimeNanos() - capture_start_time_nanos) / - rtc::kNumNanosecsPerMillisec; + int capture_time_ms = (webrtc::TimeNanos() - capture_start_time_nanos) / + webrtc::kNumNanosecsPerMillisec; frame->set_capture_time_ms(capture_time_ms); callback_->OnCaptureResult(Result::SUCCESS, std::move(frame)); } @@ -167,110 +168,117 @@ bool ScreenCapturerFuchsia::SelectSource(SourceId id) { return false; } -fuchsia::sysmem::BufferCollectionConstraints +fuchsia::sysmem2::BufferCollectionConstraints ScreenCapturerFuchsia::GetBufferConstraints() { - fuchsia::sysmem::BufferCollectionConstraints constraints; - constraints.usage.cpu = - fuchsia::sysmem::cpuUsageRead | fuchsia::sysmem::cpuUsageWrite; - constraints.min_buffer_count = kMinBufferCount; - - constraints.has_buffer_memory_constraints = true; - constraints.buffer_memory_constraints.ram_domain_supported = true; - constraints.buffer_memory_constraints.cpu_domain_supported = true; - - constraints.image_format_constraints_count = 1; - fuchsia::sysmem::ImageFormatConstraints& image_constraints = - constraints.image_format_constraints[0]; - image_constraints.color_spaces_count = 1; - image_constraints.color_space[0] = - fuchsia::sysmem::ColorSpace{.type = kSRGBColorSpace}; - image_constraints.pixel_format.type = kBGRA32PixelFormatType; - image_constraints.pixel_format.has_format_modifier = true; - image_constraints.pixel_format.format_modifier.value = - fuchsia::sysmem::FORMAT_MODIFIER_LINEAR; - - image_constraints.required_min_coded_width = width_; - image_constraints.required_min_coded_height = height_; - image_constraints.required_max_coded_width = width_; - image_constraints.required_max_coded_height = height_; - - image_constraints.bytes_per_row_divisor = kFuchsiaBytesPerPixel; + fuchsia::sysmem2::BufferCollectionConstraints constraints; + constraints.mutable_usage()->set_cpu(fuchsia::sysmem2::CPU_USAGE_READ | + fuchsia::sysmem2::CPU_USAGE_WRITE); + constraints.set_min_buffer_count(kMinBufferCount); + + auto& memory_constraints = *constraints.mutable_buffer_memory_constraints(); + memory_constraints.set_ram_domain_supported(true); + memory_constraints.set_cpu_domain_supported(true); + + fuchsia::sysmem2::ImageFormatConstraints& image_constraints = + constraints.mutable_image_format_constraints()->emplace_back(); + image_constraints.mutable_color_spaces()->emplace_back(kSRGBColorSpace); + image_constraints.set_pixel_format(kBGRA32PixelFormatType); + image_constraints.set_pixel_format_modifier( + fuchsia::images2::PixelFormatModifier::LINEAR); + + image_constraints.set_required_min_size( + fuchsia::math::SizeU{width_, height_}); + image_constraints.set_required_max_size( + fuchsia::math::SizeU{width_, height_}); + + image_constraints.set_bytes_per_row_divisor(kFuchsiaBytesPerPixel); return constraints; } void ScreenCapturerFuchsia::SetupBuffers() { - fuchsia::ui::scenic::ScenicSyncPtr scenic; - zx_status_t status = component_context_->svc()->Connect(scenic.NewRequest()); + fuchsia::ui::display::singleton::InfoSyncPtr display_info; + zx_status_t status = + component_context_->svc()->Connect(display_info.NewRequest()); if (status != ZX_OK) { fatal_error_ = true; - RTC_LOG(LS_ERROR) << "Failed to connect to Scenic: " << status; + RTC_LOG(LS_ERROR) + << "Failed to connect to fuchsia.ui.display.singleton.Info: " << status; return; } - fuchsia::ui::gfx::DisplayInfo display_info; - status = scenic->GetDisplayInfo(&display_info); + fuchsia::ui::display::singleton::Metrics metrics; + status = display_info->GetMetrics(&metrics); if (status != ZX_OK) { fatal_error_ = true; RTC_LOG(LS_ERROR) << "Failed to connect to get display dimensions: " << status; return; } - width_ = display_info.width_in_px; - height_ = display_info.height_in_px; + width_ = metrics.extent_in_px().width; + height_ = metrics.extent_in_px().height; status = component_context_->svc()->Connect(sysmem_allocator_.NewRequest()); if (status != ZX_OK) { fatal_error_ = true; - RTC_LOG(LS_ERROR) << "Failed to connect to Sysmem Allocator: " << status; + RTC_LOG(LS_ERROR) << "Failed to connect to fuchsia.sysmem2.Allocator: " + << status; return; } - fuchsia::sysmem::BufferCollectionTokenSyncPtr sysmem_token; - status = - sysmem_allocator_->AllocateSharedCollection(sysmem_token.NewRequest()); + fuchsia::sysmem2::BufferCollectionTokenSyncPtr sysmem_token; + status = sysmem_allocator_->AllocateSharedCollection( + std::move(fuchsia::sysmem2::AllocatorAllocateSharedCollectionRequest{} + .set_token_request(sysmem_token.NewRequest()))); if (status != ZX_OK) { fatal_error_ = true; RTC_LOG(LS_ERROR) - << "fuchsia.sysmem.Allocator.AllocateSharedCollection() failed: " + << "fuchsia.sysmem2.Allocator.AllocateSharedCollection() failed: " << status; return; } - fuchsia::sysmem::BufferCollectionTokenSyncPtr flatland_token; - status = sysmem_token->Duplicate(ZX_RIGHT_SAME_RIGHTS, - flatland_token.NewRequest()); + fuchsia::sysmem2::BufferCollectionTokenSyncPtr flatland_token; + status = sysmem_token->Duplicate( + std::move(fuchsia::sysmem2::BufferCollectionTokenDuplicateRequest{} + .set_rights_attenuation_mask(ZX_RIGHT_SAME_RIGHTS) + .set_token_request(flatland_token.NewRequest()))); if (status != ZX_OK) { fatal_error_ = true; RTC_LOG(LS_ERROR) - << "fuchsia.sysmem.BufferCollectionToken.Duplicate() failed: " + << "fuchsia.sysmem2.BufferCollectionToken.Duplicate() failed: " << status; return; } - status = sysmem_token->Sync(); + fuchsia::sysmem2::Node_Sync_Result sync_result; + status = sysmem_token->Sync(&sync_result); if (status != ZX_OK) { fatal_error_ = true; - RTC_LOG(LS_ERROR) << "fuchsia.sysmem.BufferCollectionToken.Sync() failed: " + RTC_LOG(LS_ERROR) << "fuchsia.sysmem2.BufferCollectionToken.Sync() failed: " << status; return; } - status = sysmem_allocator_->BindSharedCollection(std::move(sysmem_token), - collection_.NewRequest()); + status = sysmem_allocator_->BindSharedCollection( + std::move(fuchsia::sysmem2::AllocatorBindSharedCollectionRequest{} + .set_token(std::move(sysmem_token)) + .set_buffer_collection_request(collection_.NewRequest()))); if (status != ZX_OK) { fatal_error_ = true; RTC_LOG(LS_ERROR) - << "fuchsia.sysmem.Allocator.BindSharedCollection() failed: " << status; + << "fuchsia.sysmem2.Allocator.BindSharedCollection() failed: " + << status; return; } - status = collection_->SetConstraints(/*has_constraints=*/true, - GetBufferConstraints()); + status = collection_->SetConstraints(std::move( + fuchsia::sysmem2::BufferCollectionSetConstraintsRequest{}.set_constraints( + GetBufferConstraints()))); if (status != ZX_OK) { fatal_error_ = true; RTC_LOG(LS_ERROR) - << "fuchsia.sysmem.BufferCollection.SetConstraints() failed: " + << "fuchsia.sysmem2.BufferCollection.SetConstraints() failed: " << status; return; } @@ -295,7 +303,9 @@ void ScreenCapturerFuchsia::SetupBuffers() { fuchsia::ui::composition::RegisterBufferCollectionArgs buffer_collection_args; buffer_collection_args.set_export_token(std::move(export_token)); - buffer_collection_args.set_buffer_collection_token(std::move(flatland_token)); + buffer_collection_args.set_buffer_collection_token( + fuchsia::sysmem::BufferCollectionTokenHandle( + flatland_token.Unbind().TakeChannel())); buffer_collection_args.set_usage( fuchsia::ui::composition::RegisterBufferCollectionUsage::SCREENSHOT); @@ -310,21 +320,31 @@ void ScreenCapturerFuchsia::SetupBuffers() { return; } - zx_status_t allocation_status; - status = collection_->WaitForBuffersAllocated(&allocation_status, - &buffer_collection_info_); + fuchsia::sysmem2::BufferCollection_WaitForAllBuffersAllocated_Result + wait_result; + status = collection_->WaitForAllBuffersAllocated(&wait_result); if (status != ZX_OK) { fatal_error_ = true; RTC_LOG(LS_ERROR) << "Failed to wait for buffer collection info: " << status; return; } - if (allocation_status != ZX_OK) { + if (!wait_result.is_response()) { + if (wait_result.is_framework_err()) { + RTC_LOG(LS_ERROR) + << "Failed to allocate buffer collection (framework_err): " + << fidl::ToUnderlying(wait_result.framework_err()); + } else { + RTC_LOG(LS_ERROR) << "Failed to allocate buffer collection (err): " + << static_cast(wait_result.err()); + } fatal_error_ = true; - RTC_LOG(LS_ERROR) << "Failed to allocate buffer collection: " << status; return; } - status = collection_->Close(); + buffer_collection_info_ = + std::move(*wait_result.response().mutable_buffer_collection_info()); + + status = collection_->Release(); if (status != ZX_OK) { fatal_error_ = true; RTC_LOG(LS_ERROR) << "Failed to close buffer collection token: " << status; @@ -341,7 +361,7 @@ void ScreenCapturerFuchsia::SetupBuffers() { // Configure buffers in ScreenCapture client. fuchsia::ui::composition::ScreenCaptureConfig configure_args; configure_args.set_import_token(std::move(import_token)); - configure_args.set_buffer_count(buffer_collection_info_.buffer_count); + configure_args.set_buffer_count(buffer_collection_info_.buffers().size()); configure_args.set_size({width_, height_}); fuchsia::ui::composition::ScreenCapture_Configure_Result configure_result; @@ -359,11 +379,13 @@ void ScreenCapturerFuchsia::SetupBuffers() { // onto a pointer stored in virtual_memory_mapped_addrs_ which we can use to // access this data. uint32_t virt_mem_bytes = - buffer_collection_info_.settings.buffer_settings.size_bytes; + buffer_collection_info_.settings().buffer_settings().size_bytes(); RTC_DCHECK(virt_mem_bytes > 0); for (uint32_t buffer_index = 0; - buffer_index < buffer_collection_info_.buffer_count; buffer_index++) { - const zx::vmo& virt_mem = buffer_collection_info_.buffers[buffer_index].vmo; + buffer_index < buffer_collection_info_.buffers().size(); + buffer_index++) { + const zx::vmo& virt_mem = + buffer_collection_info_.buffers()[buffer_index].vmo(); virtual_memory_mapped_addrs_[buffer_index] = nullptr; auto status = zx::vmar::root_self()->map( ZX_VM_PERM_READ, /*vmar_offset*/ 0, virt_mem, @@ -379,10 +401,10 @@ void ScreenCapturerFuchsia::SetupBuffers() { } uint32_t ScreenCapturerFuchsia::GetPixelsPerRow( - const fuchsia::sysmem::ImageFormatConstraints& constraints) { + const fuchsia::sysmem2::ImageFormatConstraints& constraints) { uint32_t stride = RoundUpToMultiple( - std::max(constraints.min_bytes_per_row, width_ * kFuchsiaBytesPerPixel), - constraints.bytes_per_row_divisor); + std::max(constraints.min_bytes_per_row(), width_ * kFuchsiaBytesPerPixel), + constraints.bytes_per_row_divisor()); uint32_t pixels_per_row = stride / kFuchsiaBytesPerPixel; return pixels_per_row; diff --git a/modules/desktop_capture/screen_capturer_fuchsia.h b/modules/desktop_capture/screen_capturer_fuchsia.h index 6e0f87cc58..614da82e87 100644 --- a/modules/desktop_capture/screen_capturer_fuchsia.h +++ b/modules/desktop_capture/screen_capturer_fuchsia.h @@ -11,7 +11,7 @@ #ifndef MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURER_FUCHSIA_H_ #define MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURER_FUCHSIA_H_ -#include +#include #include #include @@ -36,19 +36,19 @@ class ScreenCapturerFuchsia final : public DesktopCapturer { bool SelectSource(SourceId id) override; private: - fuchsia::sysmem::BufferCollectionConstraints GetBufferConstraints(); + fuchsia::sysmem2::BufferCollectionConstraints GetBufferConstraints(); void SetupBuffers(); uint32_t GetPixelsPerRow( - const fuchsia::sysmem::ImageFormatConstraints& constraints); + const fuchsia::sysmem2::ImageFormatConstraints& constraints); Callback* callback_ = nullptr; std::unique_ptr component_context_; - fuchsia::sysmem::AllocatorSyncPtr sysmem_allocator_; + fuchsia::sysmem2::AllocatorSyncPtr sysmem_allocator_; fuchsia::ui::composition::AllocatorSyncPtr flatland_allocator_; fuchsia::ui::composition::ScreenCaptureSyncPtr screen_capture_; - fuchsia::sysmem::BufferCollectionSyncPtr collection_; - fuchsia::sysmem::BufferCollectionInfo_2 buffer_collection_info_; + fuchsia::sysmem2::BufferCollectionSyncPtr collection_; + fuchsia::sysmem2::BufferCollectionInfo buffer_collection_info_; std::unordered_map virtual_memory_mapped_addrs_; bool fatal_error_; diff --git a/modules/desktop_capture/screen_capturer_integration_test.cc b/modules/desktop_capture/screen_capturer_integration_test.cc index 13170bc288..76e010281f 100644 --- a/modules/desktop_capture/screen_capturer_integration_test.cc +++ b/modules/desktop_capture/screen_capturer_integration_test.cc @@ -11,22 +11,26 @@ #include #include +#include #include #include // TODO(zijiehe): Remove once flaky has been resolved. #include +#include #include +#include -// TODO(zijiehe): Remove once flaky has been resolved. +#include "api/array_view.h" #include "modules/desktop_capture/desktop_capture_options.h" #include "modules/desktop_capture/desktop_capturer.h" #include "modules/desktop_capture/desktop_frame.h" +#include "modules/desktop_capture/desktop_geometry.h" #include "modules/desktop_capture/desktop_region.h" #include "modules/desktop_capture/mock_desktop_capturer_callback.h" #include "modules/desktop_capture/rgba_color.h" #include "modules/desktop_capture/screen_drawer.h" +#include "rtc_base/base64.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/third_party/base64/base64.h" #include "test/gmock.h" #include "test/gtest.h" @@ -209,14 +213,13 @@ class ScreenCapturerIntegrationTest : public ::testing::Test { drawer->MayDrawIncompleteShapes())) { capturers[j] = nullptr; succeeded_capturers++; - } - // The following else if statement is for debugging purpose only, which - // should be removed after flaky of ScreenCapturerIntegrationTest has - // been resolved. - else if (i == wait_capture_round - 1) { - std::string result; - rtc::Base64::EncodeFromArray( - frame->data(), frame->size().height() * frame->stride(), &result); + } else if (i == wait_capture_round - 1) { + // The else if statement is for debugging purpose only, + // which should be removed after flakiness of + // ScreenCapturerIntegrationTest has been resolved. + ArrayView frame_data( + frame->data(), frame->size().height() * frame->stride()); + std::string result = Base64Encode(frame_data); std::cout << frame->size().width() << " x " << frame->size().height() << std::endl; // Split the entire string (can be over 4M) into several lines to @@ -326,7 +329,7 @@ TEST_F(ScreenCapturerIntegrationTest, DISABLED_TwoDirectxCapturers) { TEST_F(ScreenCapturerIntegrationTest, DISABLED_MaybeCaptureUpdatedRegionWithDirectxCapturer) { - if (rtc::rtc_win::GetVersion() < rtc::rtc_win::Version::VERSION_WIN8) { + if (rtc_win::GetVersion() < rtc_win::Version::VERSION_WIN8) { // ScreenCapturerWinGdi randomly returns blank screen, the root cause is // still unknown. Bug, // https://bugs.chromium.org/p/webrtc/issues/detail?id=6843. diff --git a/modules/desktop_capture/screen_capturer_mac_unittest.cc b/modules/desktop_capture/screen_capturer_mac_unittest.cc index 96e844066a..7adbd8d3b7 100644 --- a/modules/desktop_capture/screen_capturer_mac_unittest.cc +++ b/modules/desktop_capture/screen_capturer_mac_unittest.cc @@ -10,8 +10,9 @@ #include +#include #include -#include +#include #include "modules/desktop_capture/desktop_capture_options.h" #include "modules/desktop_capture/desktop_capturer.h" @@ -24,7 +25,8 @@ using ::testing::_; using ::testing::AnyNumber; -using ::testing::Return; +using ::testing::AnyOf; +using ::testing::InSequence; namespace webrtc { @@ -49,6 +51,18 @@ class ScreenCapturerMacTest : public ::testing::Test { MockDesktopCapturerCallback callback_; }; +class ScreenCapturerSckTest : public ScreenCapturerMacTest { + protected: + void SetUp() override { + auto options = DesktopCaptureOptions::CreateDefault(); + options.set_allow_sck_capturer(true); + capturer_ = DesktopCapturer::CreateScreenCapturer(options); + } + + std::unique_ptr capturer_; + MockDesktopCapturerCallback callback_; +}; + void ScreenCapturerMacTest::CaptureDoneCallback1( DesktopCapturer::Result result, std::unique_ptr* frame) { @@ -77,7 +91,11 @@ void ScreenCapturerMacTest::CaptureDoneCallback2( EXPECT_TRUE((*frame)->data() != NULL); // Depending on the capture method, the screen may be flipped or not, so // the stride may be positive or negative. - EXPECT_EQ(static_cast(sizeof(uint32_t) * width), + // The stride may in theory be larger than the width due to alignment, but in + // other cases, like window capture, the stride normally matches the monitor + // resolution whereas the width matches the window region on said monitor. + // Make no assumptions. + EXPECT_LE(static_cast(sizeof(uint32_t) * width), abs((*frame)->stride())); } @@ -98,4 +116,53 @@ TEST_F(ScreenCapturerMacTest, Capture) { capturer_->CaptureFrame(); } +TEST_F(ScreenCapturerSckTest, Capture) { + if (!CGPreflightScreenCaptureAccess()) { + GTEST_SKIP() + << "ScreenCapturerSckTest needs TCC ScreenCapture authorization"; + } + + std::atomic done{false}; + std::atomic result{ + DesktopCapturer::Result::ERROR_TEMPORARY}; + InSequence s; + EXPECT_CALL(callback_, + OnCaptureResultPtr(DesktopCapturer::Result::ERROR_TEMPORARY, _)) + .Times(AnyNumber()); + EXPECT_CALL(callback_, + OnCaptureResultPtr(AnyOf(DesktopCapturer::Result::ERROR_PERMANENT, + DesktopCapturer::Result::SUCCESS), + _)) + .WillOnce([this, &result](DesktopCapturer::Result res, + std::unique_ptr* frame) { + result = res; + if (res == DesktopCapturer::Result::SUCCESS) { + CaptureDoneCallback1(res, frame); + } + }); + SCOPED_TRACE(""); + capturer_->Start(&callback_); + + while (result == DesktopCapturer::Result::ERROR_TEMPORARY) { + // Check that we get an initial full-screen updated. + capturer_->CaptureFrame(); + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + } + ASSERT_NE(result, DesktopCapturer::Result::ERROR_PERMANENT); + + EXPECT_CALL(callback_, + OnCaptureResultPtr(DesktopCapturer::Result::SUCCESS, _)) + .Times(1) + .WillOnce([this, &done](auto res, auto frame) { + CaptureDoneCallback2(res, frame); + done = true; + }); + + while (!done) { + // Check that we get an initial full-screen updated. + capturer_->CaptureFrame(); + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + } +} + } // namespace webrtc diff --git a/modules/desktop_capture/screen_drawer_linux.cc b/modules/desktop_capture/screen_drawer_linux.cc index fce036b4aa..55b9570d9c 100644 --- a/modules/desktop_capture/screen_drawer_linux.cc +++ b/modules/desktop_capture/screen_drawer_linux.cc @@ -47,7 +47,7 @@ class ScreenDrawerLinux : public ScreenDrawer { // windows or shadow effect. void BringToFront(); - rtc::scoped_refptr display_; + scoped_refptr display_; int screen_num_; DesktopRect rect_; Window window_; diff --git a/modules/desktop_capture/screen_drawer_unittest.cc b/modules/desktop_capture/screen_drawer_unittest.cc index 584770dbf8..f674355eb0 100644 --- a/modules/desktop_capture/screen_drawer_unittest.cc +++ b/modules/desktop_capture/screen_drawer_unittest.cc @@ -33,7 +33,7 @@ namespace webrtc { namespace { void TestScreenDrawerLock( - rtc::FunctionView()> ctor) { + FunctionView()> ctor) { constexpr int kLockDurationMs = 100; std::atomic created(false); @@ -43,7 +43,7 @@ void TestScreenDrawerLock( public: Task(std::atomic* created, const std::atomic& ready, - rtc::FunctionView()> ctor) + FunctionView()> ctor) : created_(created), ready_(ready), ctor_(ctor) {} ~Task() = default; @@ -60,24 +60,24 @@ void TestScreenDrawerLock( // it's still possible the second lock won't be created before the // following sleep has been finished, the possibility will be // significantly reduced. - const int64_t current_ms = rtc::TimeMillis(); + const int64_t current_ms = TimeMillis(); // SleepMs() may return early. See // https://cs.chromium.org/chromium/src/third_party/webrtc/system_wrappers/include/sleep.h?rcl=4a604c80cecce18aff6fc5e16296d04675312d83&l=20 // But we need to ensure at least 100 ms has been passed before unlocking // `lock`. - while (rtc::TimeMillis() - current_ms < kLockDurationMs) { - SleepMs(kLockDurationMs - (rtc::TimeMillis() - current_ms)); + while (TimeMillis() - current_ms < kLockDurationMs) { + SleepMs(kLockDurationMs - (TimeMillis() - current_ms)); } } private: std::atomic* const created_; const std::atomic& ready_; - const rtc::FunctionView()> ctor_; + const FunctionView()> ctor_; } task(&created, ready, ctor); - auto lock_thread = rtc::PlatformThread::SpawnJoinable( - [&task] { task.RunTask(); }, "lock_thread"); + auto lock_thread = + PlatformThread::SpawnJoinable([&task] { task.RunTask(); }, "lock_thread"); // Wait for the first lock in Task::RunTask() to be created. // TODO(zijiehe): Find a better solution to wait for the creation of the first @@ -87,13 +87,13 @@ void TestScreenDrawerLock( SleepMs(1); } - const int64_t start_ms = rtc::TimeMillis(); + const int64_t start_ms = TimeMillis(); ready.store(true); // This is unlikely to fail, but just in case current thread is too laggy and // cause the SleepMs() in RunTask() to finish before we creating another lock. - ASSERT_GT(kLockDurationMs, rtc::TimeMillis() - start_ms); + ASSERT_GT(kLockDurationMs, TimeMillis() - start_ms); ctor(); - ASSERT_LE(kLockDurationMs, rtc::TimeMillis() - start_ms); + ASSERT_LE(kLockDurationMs, TimeMillis() - start_ms); } } // namespace @@ -118,7 +118,7 @@ TEST(ScreenDrawerTest, DISABLED_DrawRectangles) { } DesktopRect rect = drawer->DrawableRegion(); - Random random(rtc::TimeMicros()); + Random random(TimeMicros()); for (int i = 0; i < 100; i++) { // Make sure we at least draw one pixel. int left = random.Rand(rect.left(), rect.right() - 2); diff --git a/modules/desktop_capture/shared_desktop_frame.cc b/modules/desktop_capture/shared_desktop_frame.cc index e374038cbc..538e27b6dd 100644 --- a/modules/desktop_capture/shared_desktop_frame.cc +++ b/modules/desktop_capture/shared_desktop_frame.cc @@ -22,7 +22,7 @@ SharedDesktopFrame::~SharedDesktopFrame() {} std::unique_ptr SharedDesktopFrame::Wrap( std::unique_ptr desktop_frame) { return std::unique_ptr(new SharedDesktopFrame( - rtc::scoped_refptr(new Core(std::move(desktop_frame))))); + scoped_refptr(new Core(std::move(desktop_frame))))); } SharedDesktopFrame* SharedDesktopFrame::Wrap(DesktopFrame* desktop_frame) { @@ -47,7 +47,7 @@ bool SharedDesktopFrame::IsShared() { return !core_->HasOneRef(); } -SharedDesktopFrame::SharedDesktopFrame(rtc::scoped_refptr core) +SharedDesktopFrame::SharedDesktopFrame(scoped_refptr core) : DesktopFrame((*core)->size(), (*core)->stride(), (*core)->data(), diff --git a/modules/desktop_capture/shared_desktop_frame.h b/modules/desktop_capture/shared_desktop_frame.h index c6f52247f4..15b90148e5 100644 --- a/modules/desktop_capture/shared_desktop_frame.h +++ b/modules/desktop_capture/shared_desktop_frame.h @@ -53,11 +53,11 @@ class RTC_EXPORT SharedDesktopFrame final : public DesktopFrame { bool IsShared(); private: - typedef rtc::FinalRefCountedObject> Core; + typedef FinalRefCountedObject> Core; - SharedDesktopFrame(rtc::scoped_refptr core); + SharedDesktopFrame(scoped_refptr core); - const rtc::scoped_refptr core_; + const scoped_refptr core_; }; } // namespace webrtc diff --git a/modules/desktop_capture/win/OWNERS b/modules/desktop_capture/win/OWNERS new file mode 100644 index 0000000000..d312ad61cf --- /dev/null +++ b/modules/desktop_capture/win/OWNERS @@ -0,0 +1,2 @@ +alcooper@chromium.org +ilnik@webrtc.org diff --git a/modules/desktop_capture/win/cursor.cc b/modules/desktop_capture/win/cursor.cc index 1d645098e2..82fe43ee0c 100644 --- a/modules/desktop_capture/win/cursor.cc +++ b/modules/desktop_capture/win/cursor.cc @@ -28,13 +28,13 @@ namespace { #define RGBA(r, g, b, a) \ ((((a) << 24) & 0xff000000) | (((b) << 16) & 0xff0000) | \ - (((g) << 8) & 0xff00) | ((r)&0xff)) + (((g) << 8) & 0xff00) | ((r) & 0xff)) #else // !defined(WEBRTC_ARCH_LITTLE_ENDIAN) #define RGBA(r, g, b, a) \ ((((r) << 24) & 0xff000000) | (((g) << 16) & 0xff0000) | \ - (((b) << 8) & 0xff00) | ((a)&0xff)) + (((b) << 8) & 0xff00) | ((a) & 0xff)) #endif // !defined(WEBRTC_ARCH_LITTLE_ENDIAN) diff --git a/modules/desktop_capture/win/desktop.cc b/modules/desktop_capture/win/desktop.cc index 4a671dd9ae..b062ee0cc1 100644 --- a/modules/desktop_capture/win/desktop.cc +++ b/modules/desktop_capture/win/desktop.cc @@ -78,8 +78,8 @@ Desktop* Desktop::GetDesktop(const WCHAR* desktop_name) { DESKTOP_SWITCHDESKTOP | GENERIC_WRITE; HDESK desktop = OpenDesktopW(desktop_name, 0, FALSE, desired_access); if (desktop == NULL) { - RTC_LOG(LS_ERROR) << "Failed to open the desktop '" - << rtc::ToUtf8(desktop_name) << "': " << GetLastError(); + RTC_LOG(LS_ERROR) << "Failed to open the desktop '" << ToUtf8(desktop_name) + << "': " << GetLastError(); return NULL; } diff --git a/modules/desktop_capture/win/desktop_capture_utils.cc b/modules/desktop_capture/win/desktop_capture_utils.cc index 476ddc4aba..4acd31c5cb 100644 --- a/modules/desktop_capture/win/desktop_capture_utils.cc +++ b/modules/desktop_capture/win/desktop_capture_utils.cc @@ -19,7 +19,7 @@ namespace utils { // Generates a human-readable string from a COM error. std::string ComErrorToString(const _com_error& error) { char buffer[1024]; - rtc::SimpleStringBuilder string_builder(buffer); + webrtc::SimpleStringBuilder string_builder(buffer); // Use _bstr_t to simplify the wchar to char conversion for ErrorMessage(). _bstr_t error_message(error.ErrorMessage()); string_builder.AppendFormat("HRESULT: 0x%08X, Message: %s", error.Error(), diff --git a/modules/desktop_capture/win/dxgi_adapter_duplicator.cc b/modules/desktop_capture/win/dxgi_adapter_duplicator.cc index 88ec4e25bf..d3d7ea03a3 100644 --- a/modules/desktop_capture/win/dxgi_adapter_duplicator.cc +++ b/modules/desktop_capture/win/dxgi_adapter_duplicator.cc @@ -148,6 +148,14 @@ bool DxgiAdapterDuplicator::DuplicateMonitor(Context* context, DesktopVector(), target); } +std::optional DxgiAdapterDuplicator::GetDeviceScaleFactor( + int screen_id) const { + if (screen_id < 0 || static_cast(screen_id) >= duplicators_.size()) { + return std::nullopt; + } + return duplicators_[screen_id].device_scale_factor(); +} + DesktopRect DxgiAdapterDuplicator::ScreenRect(int id) const { RTC_DCHECK_GE(id, 0); RTC_DCHECK_LT(id, duplicators_.size()); @@ -164,12 +172,15 @@ int DxgiAdapterDuplicator::screen_count() const { return static_cast(duplicators_.size()); } -int64_t DxgiAdapterDuplicator::GetNumFramesCaptured() const { +int64_t DxgiAdapterDuplicator::GetNumFramesCaptured(int monitor_id) const { int64_t min = INT64_MAX; - for (const auto& duplicator : duplicators_) { - min = std::min(min, duplicator.num_frames_captured()); + if (monitor_id < 0) { + for (const auto& duplicator : duplicators_) { + min = std::min(min, duplicator.num_frames_captured()); + } + } else if (static_cast(monitor_id) < duplicators_.size()) { + min = duplicators_[monitor_id].num_frames_captured(); } - return min; } diff --git a/modules/desktop_capture/win/dxgi_adapter_duplicator.h b/modules/desktop_capture/win/dxgi_adapter_duplicator.h index 5931b51f9e..4779a3bfb9 100644 --- a/modules/desktop_capture/win/dxgi_adapter_duplicator.h +++ b/modules/desktop_capture/win/dxgi_adapter_duplicator.h @@ -55,6 +55,11 @@ class DxgiAdapterDuplicator { // Returns desktop rect covered by this DxgiAdapterDuplicator. DesktopRect desktop_rect() const { return desktop_rect_; } + // Returns the device scale factor of screen identified by `screen_id`, which + // is owned by this DxgiAdapterDuplicator. `screen_id` should be between [0, + // screen_count()). + std::optional GetDeviceScaleFactor(int screen_id) const; + // Returns the size of one screen owned by this DxgiAdapterDuplicator. `id` // should be between [0, screen_count()). DesktopRect ScreenRect(int id) const; @@ -73,7 +78,7 @@ class DxgiAdapterDuplicator { void Unregister(const Context* const context); // The minimum num_frames_captured() returned by `duplicators_`. - int64_t GetNumFramesCaptured() const; + int64_t GetNumFramesCaptured(int monitor_id) const; // Moves `desktop_rect_` and all underlying `duplicators_`. See // DxgiDuplicatorController::TranslateRect(). diff --git a/modules/desktop_capture/win/dxgi_duplicator_controller.cc b/modules/desktop_capture/win/dxgi_duplicator_controller.cc index 973aa3fd99..bee0ba72f2 100644 --- a/modules/desktop_capture/win/dxgi_duplicator_controller.cc +++ b/modules/desktop_capture/win/dxgi_duplicator_controller.cc @@ -67,12 +67,12 @@ std::string DxgiDuplicatorController::ResultName( } // static -rtc::scoped_refptr +webrtc::scoped_refptr DxgiDuplicatorController::Instance() { // The static instance won't be deleted to ensure it can be used by other // threads even during program exiting. static DxgiDuplicatorController* instance = new DxgiDuplicatorController(); - return rtc::scoped_refptr(instance); + return webrtc::scoped_refptr(instance); } // static @@ -194,6 +194,7 @@ DxgiDuplicatorController::Result DxgiDuplicatorController::DoDuplicate( } frame->frame()->mutable_updated_region()->Clear(); + frame->frame()->set_device_scale_factor(GetDeviceScaleFactor(monitor_id)); if (DoDuplicateUnlocked(frame->context(), monitor_id, frame->frame())) { succeeded_duplications_++; @@ -331,7 +332,7 @@ bool DxgiDuplicatorController::DoDuplicateUnlocked(Context* context, SharedDesktopFrame* target) { Setup(context); - if (!EnsureFrameCaptured(context, target)) { + if (!EnsureFrameCaptured(context, monitor_id, target)) { return false; } @@ -381,12 +382,21 @@ bool DxgiDuplicatorController::DoDuplicateOne(Context* context, return false; } -int64_t DxgiDuplicatorController::GetNumFramesCaptured() const { +int64_t DxgiDuplicatorController::GetNumFramesCaptured(int monitor_id) const { int64_t min = INT64_MAX; + if (monitor_id < 0) { + for (const auto& duplicator : duplicators_) { + min = std::min(min, duplicator.GetNumFramesCaptured(monitor_id)); + } + return min; + } for (const auto& duplicator : duplicators_) { - min = std::min(min, duplicator.GetNumFramesCaptured()); + if (monitor_id >= duplicator.screen_count()) { + monitor_id -= duplicator.screen_count(); + } else { + return duplicator.GetNumFramesCaptured(monitor_id); + } } - return min; } @@ -394,6 +404,21 @@ DesktopSize DxgiDuplicatorController::desktop_size() const { return desktop_rect_.size(); } +std::optional DxgiDuplicatorController::GetDeviceScaleFactor( + int monitor_id) const { + if (monitor_id < 0) { + return std::nullopt; + } + for (const auto& duplicator : duplicators_) { + if (monitor_id >= duplicator.screen_count()) { + monitor_id -= duplicator.screen_count(); + } else { + return duplicator.GetDeviceScaleFactor(monitor_id); + } + } + return std::nullopt; +} + DesktopRect DxgiDuplicatorController::ScreenRect(int id) const { RTC_DCHECK(id >= 0); for (size_t i = 0; i < duplicators_.size(); i++) { @@ -434,6 +459,7 @@ DesktopSize DxgiDuplicatorController::SelectedDesktopSize( } bool DxgiDuplicatorController::EnsureFrameCaptured(Context* context, + int monitor_id, SharedDesktopFrame* target) { // On a modern system, the FPS / monitor refresh rate is usually larger than // or equal to 60. So 17 milliseconds is enough to capture at least one frame. @@ -448,7 +474,7 @@ bool DxgiDuplicatorController::EnsureFrameCaptured(Context* context, // called. 500 milliseconds should be enough for ~30 frames. const int64_t timeout_ms = 500; - if (GetNumFramesCaptured() == 0 && !IsConsoleSession()) { + if (GetNumFramesCaptured(monitor_id) == 0 && !IsConsoleSession()) { // When capturing a console session, waiting for a single frame is // sufficient to ensure that DXGI output duplication is working. When the // session is not attached to the console, it has been observed that DXGI @@ -460,35 +486,42 @@ bool DxgiDuplicatorController::EnsureFrameCaptured(Context* context, frames_to_skip = 5; } - if (GetNumFramesCaptured() >= frames_to_skip) { + if (GetNumFramesCaptured(monitor_id) >= frames_to_skip) { return true; } std::unique_ptr fallback_frame; SharedDesktopFrame* shared_frame = nullptr; - if (target->size().width() >= desktop_size().width() && - target->size().height() >= desktop_size().height()) { - // `target` is large enough to cover entire screen, we do not need to use - // `fallback_frame`. + DesktopSize selected_size = SelectedDesktopSize(monitor_id); + if (target->size().width() >= selected_size.width() && + target->size().height() >= selected_size.height()) { + // `target` is large enough to cover the currently captured screen, + // we do not need to use `fallback_frame`. shared_frame = target; } else { fallback_frame = SharedDesktopFrame::Wrap( - std::unique_ptr(new BasicDesktopFrame(desktop_size()))); + std::unique_ptr(new BasicDesktopFrame(selected_size))); shared_frame = fallback_frame.get(); } - const int64_t start_ms = rtc::TimeMillis(); - while (GetNumFramesCaptured() < frames_to_skip) { - if (!DoDuplicateAll(context, shared_frame)) { - return false; + const int64_t start_ms = webrtc::TimeMillis(); + while (GetNumFramesCaptured(monitor_id) < frames_to_skip) { + if (monitor_id < 0) { + if (!DoDuplicateAll(context, shared_frame)) { + return false; + } + } else { + if (!DoDuplicateOne(context, monitor_id, shared_frame)) { + return false; + } } // Calling DoDuplicateAll() may change the number of frames captured. - if (GetNumFramesCaptured() >= frames_to_skip) { + if (GetNumFramesCaptured(monitor_id) >= frames_to_skip) { break; } - if (rtc::TimeMillis() - start_ms > timeout_ms) { + if (webrtc::TimeMillis() - start_ms > timeout_ms) { RTC_LOG(LS_ERROR) << "Failed to capture " << frames_to_skip << " frames " "within " @@ -500,6 +533,13 @@ bool DxgiDuplicatorController::EnsureFrameCaptured(Context* context, // ensure the video adapter has time to update the screen. webrtc::SleepMs(ms_per_frame); } + // When capturing multiple monitors, we need to update the captured region to + // prevent flickering by re-setting context. See + // https://crbug.com/webrtc/15718 for details. + if (shared_frame != target) { + context->Reset(); + Setup(context); + } return true; } diff --git a/modules/desktop_capture/win/dxgi_duplicator_controller.h b/modules/desktop_capture/win/dxgi_duplicator_controller.h index 2b1e0ab041..a63bafbf6e 100644 --- a/modules/desktop_capture/win/dxgi_duplicator_controller.h +++ b/modules/desktop_capture/win/dxgi_duplicator_controller.h @@ -82,7 +82,7 @@ class RTC_EXPORT DxgiDuplicatorController { static std::string ResultName(Result result); // Returns the singleton instance of DxgiDuplicatorController. - static rtc::scoped_refptr Instance(); + static webrtc::scoped_refptr Instance(); // See ScreenCapturerWinDirectx::IsCurrentSessionSupported(). static bool IsCurrentSessionSupported(); @@ -132,7 +132,7 @@ class RTC_EXPORT DxgiDuplicatorController { // scoped_refptr accesses private AddRef() and // Release() functions. - friend class rtc::scoped_refptr; + friend class webrtc::scoped_refptr; // A private constructor to ensure consumers to use // DxgiDuplicatorController::Instance(). @@ -198,12 +198,21 @@ class RTC_EXPORT DxgiDuplicatorController { SharedDesktopFrame* target) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - // The minimum GetNumFramesCaptured() returned by `duplicators_`. - int64_t GetNumFramesCaptured() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + // When monitor_id is kFullDesktopScreenId, meaning capturing all screens, + // the minimum GetNumFramesCaptured(int monitor_id) returned by duplicators_. + int64_t GetNumFramesCaptured(int monitor_id) const + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Returns a DesktopSize to cover entire `desktop_rect_`. DesktopSize desktop_size() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + // Returns the device scale factor of one screen. `monitor_id` should be >= 0. + // If system does not support DXGI based capturer, or `monitor_id` is greater + // than the total screen count of all the Duplicators, this function returns + // std::nullopt. + std::optional GetDeviceScaleFactor(int monitor_id) const + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + // Returns the size of one screen. `id` should be >= 0. If system does not // support DXGI based capturer, or `id` is greater than the total screen count // of all the Duplicators, this function returns an empty DesktopRect. @@ -224,7 +233,9 @@ class RTC_EXPORT DxgiDuplicatorController { // GetNumFramesCaptured() has never reached the requirement. // According to http://crbug.com/682112, dxgi capturer returns a black frame // during first several capture attempts. - bool EnsureFrameCaptured(Context* context, SharedDesktopFrame* target) + bool EnsureFrameCaptured(Context* context, + int monitor_id, + SharedDesktopFrame* target) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Moves `desktop_rect_` and all underlying `duplicators_`, putting top left diff --git a/modules/desktop_capture/win/dxgi_frame.cc b/modules/desktop_capture/win/dxgi_frame.cc index 13d5b4b62e..55495aec26 100644 --- a/modules/desktop_capture/win/dxgi_frame.cc +++ b/modules/desktop_capture/win/dxgi_frame.cc @@ -57,7 +57,6 @@ bool DxgiFrame::Prepare(DesktopSize size, DesktopCapturer::SourceId source_id) { } else { frame.reset(new BasicDesktopFrame(size)); } - frame_ = SharedDesktopFrame::Wrap(std::move(frame)); } diff --git a/modules/desktop_capture/win/dxgi_output_duplicator.cc b/modules/desktop_capture/win/dxgi_output_duplicator.cc index 9c64125b4e..706265a7ed 100644 --- a/modules/desktop_capture/win/dxgi_output_duplicator.cc +++ b/modules/desktop_capture/win/dxgi_output_duplicator.cc @@ -67,8 +67,9 @@ DxgiOutputDuplicator::DxgiOutputDuplicator(const D3dDevice& device, const DXGI_OUTPUT_DESC& desc) : device_(device), output_(output), - device_name_(rtc::ToUtf8(desc.DeviceName)), - desktop_rect_(RECTToDesktopRect(desc.DesktopCoordinates)) { + device_name_(webrtc::ToUtf8(desc.DeviceName)), + desktop_rect_(RECTToDesktopRect(desc.DesktopCoordinates)), + monitor_(desc.Monitor) { RTC_DCHECK(output_); RTC_DCHECK(!desktop_rect_.is_empty()); RTC_DCHECK_GT(desktop_rect_.width(), 0); @@ -112,9 +113,13 @@ bool DxgiOutputDuplicator::DuplicateOutput() { memset(&desc_, 0, sizeof(desc_)); duplication_->GetDesc(&desc_); - if (desc_.ModeDesc.Format != DXGI_FORMAT_B8G8R8A8_UNORM) { - RTC_LOG(LS_ERROR) << "IDXGIDuplicateOutput does not use RGBA (8 bit) " - << "format, which is required by downstream components, " + + // DXGI_FORMAT_R16G16B16A16_FLOAT is returned for HDR monitor, + // DXGI_FORMAT_B8G8R8A8_UNORM for others. + if ((desc_.ModeDesc.Format != DXGI_FORMAT_B8G8R8A8_UNORM) && + (desc_.ModeDesc.Format != DXGI_FORMAT_R16G16B16A16_FLOAT)) { + RTC_LOG(LS_ERROR) << "IDXGIDuplicateOutput does not use RGBA (8, 16 bit)" + << "which is required by downstream components" << "format is " << desc_.ModeDesc.Format; return false; } @@ -414,6 +419,17 @@ int64_t DxgiOutputDuplicator::num_frames_captured() const { return num_frames_captured_; } +std::optional DxgiOutputDuplicator::device_scale_factor() const { + DEVICE_SCALE_FACTOR device_scale_factor = DEVICE_SCALE_FACTOR_INVALID; + HRESULT hr = GetScaleFactorForMonitor(monitor_, &device_scale_factor); + if (FAILED(hr)) { + RTC_LOG(LS_ERROR) << "Failed to get scale factor for monitor: " << hr; + return std::nullopt; + } + RTC_DCHECK(device_scale_factor != DEVICE_SCALE_FACTOR_INVALID); + return static_cast(device_scale_factor) / 100.0f; +} + void DxgiOutputDuplicator::TranslateRect(const DesktopVector& position) { desktop_rect_.Translate(position); RTC_DCHECK_GE(desktop_rect_.left(), 0); diff --git a/modules/desktop_capture/win/dxgi_output_duplicator.h b/modules/desktop_capture/win/dxgi_output_duplicator.h index a4ce035d8b..e2a87751d6 100644 --- a/modules/desktop_capture/win/dxgi_output_duplicator.h +++ b/modules/desktop_capture/win/dxgi_output_duplicator.h @@ -14,6 +14,7 @@ #include #include #include +#include #include #include @@ -83,6 +84,10 @@ class DxgiOutputDuplicator { // How many frames have been captured by this DxigOutputDuplicator. int64_t num_frames_captured() const; + // Device scale factor of the monitor associated with this + // DxigOutputDuplicator. + std::optional device_scale_factor() const; + // Moves `desktop_rect_`. See DxgiDuplicatorController::TranslateRect(). void TranslateRect(const DesktopVector& position); @@ -127,6 +132,7 @@ class DxgiOutputDuplicator { const Microsoft::WRL::ComPtr output_; const std::string device_name_; DesktopRect desktop_rect_; + const HMONITOR monitor_; Microsoft::WRL::ComPtr duplication_; DXGI_OUTDUPL_DESC desc_; std::vector metadata_; diff --git a/modules/desktop_capture/win/full_screen_win_application_handler.cc b/modules/desktop_capture/win/full_screen_win_application_handler.cc index 4222dfc01e..88ab8f617e 100644 --- a/modules/desktop_capture/win/full_screen_win_application_handler.cc +++ b/modules/desktop_capture/win/full_screen_win_application_handler.cc @@ -55,7 +55,7 @@ std::string WindowText(HWND window) { size_t copied = ::GetWindowTextW(window, buffer.data(), buffer.size()); if (copied == 0) return std::string(); - return rtc::ToUtf8(buffer.data(), copied); + return webrtc::ToUtf8(buffer.data(), copied); } DWORD WindowProcessId(HWND window) { @@ -99,35 +99,37 @@ class FullScreenPowerPointHandler : public FullScreenApplicationHandler { DesktopCapturer::SourceId FindFullScreenWindow( const DesktopCapturer::SourceList& window_list, int64_t timestamp) const override { + if (!UseHeuristicFullscreenPowerPointWindows()) { + return 0; + } + if (window_list.empty()) return 0; HWND original_window = reinterpret_cast(GetSourceId()); - DWORD process_id = WindowProcessId(original_window); + if (GetWindowType(original_window) != WindowType::kEditor) + return 0; - DesktopCapturer::SourceList powerpoint_windows = - GetProcessWindows(window_list, process_id, original_window); + DesktopCapturer::SourceList powerpoint_windows = GetProcessWindows( + window_list, WindowProcessId(original_window), original_window); + // No relevant window with the same process id as the `original_window` was + // found. if (powerpoint_windows.empty()) return 0; - if (GetWindowType(original_window) != WindowType::kEditor) - return 0; - - const auto original_document = GetDocumentFromEditorTitle(original_window); - + const std::string original_document_title = + GetDocumentTitleFromEditor(original_window); for (const auto& source : powerpoint_windows) { HWND window = reinterpret_cast(source.id); - // Looking for slide show window for the same document - if (GetWindowType(window) != WindowType::kSlideShow || - GetDocumentFromSlideShowTitle(window) != original_document) { - continue; + // Looking for fullscreen slide show window for the corresponding editor + // document + if (GetWindowType(window) == WindowType::kSlideShow && + GetDocumentTitleFromSlideShow(window) == original_document_title) { + return source.id; } - - return source.id; } - return 0; } @@ -143,33 +145,41 @@ class FullScreenPowerPointHandler : public FullScreenApplicationHandler { return WindowType::kOther; } - constexpr static char kDocumentTitleSeparator[] = " - "; + constexpr static char kDocumentTitleSeparator = '-'; - std::string GetDocumentFromEditorTitle(HWND window) const { + // This function extracts the title from the editor. It needs to be + // updated everytime PowerPoint changes its editor title format. Currently, it + // supports editor title in the format "Window - Title - PowerPoint". + std::string GetDocumentTitleFromEditor(HWND window) const { std::string title = WindowText(window); - auto position = title.find(kDocumentTitleSeparator); - return std::string(absl::StripAsciiWhitespace( - absl::string_view(title).substr(0, position))); + return std::string( + absl::StripAsciiWhitespace(absl::string_view(title).substr( + 0, title.rfind(kDocumentTitleSeparator)))); } - std::string GetDocumentFromSlideShowTitle(HWND window) const { + // This function extracts the title from the slideshow when PowerPoint goes + // fullscreen. This function needs to be updated whenever PowerPoint changes + // its title format. Currently, it supports Fullscreen titles of the format + // "PowerPoint Slide Show - [Window - Title]" or "PowerPoint Slide Show - + // Window - Title". + std::string GetDocumentTitleFromSlideShow(HWND window) const { std::string title = WindowText(window); - auto left_pos = title.find(kDocumentTitleSeparator); - auto right_pos = title.rfind(kDocumentTitleSeparator); - constexpr size_t kSeparatorLength = arraysize(kDocumentTitleSeparator) - 1; - if (left_pos == std::string::npos || right_pos == std::string::npos) - return title; + auto position = title.find(kDocumentTitleSeparator); + if (position != std::string::npos) { + title = std::string(absl::StripAsciiWhitespace( + absl::string_view(title).substr(position + 1, std::wstring::npos))); + } - if (right_pos > left_pos + kSeparatorLength) { - auto result_len = right_pos - left_pos - kSeparatorLength; - auto document = absl::string_view(title).substr( - left_pos + kSeparatorLength, result_len); - return std::string(absl::StripAsciiWhitespace(document)); - } else { - auto document = absl::string_view(title).substr( - left_pos + kSeparatorLength, std::wstring::npos); - return std::string(absl::StripAsciiWhitespace(document)); + auto left_bracket_pos = title.find("["); + auto right_bracket_pos = title.rfind("]"); + if (left_bracket_pos == std::string::npos || + right_bracket_pos == std::string::npos || + right_bracket_pos <= left_bracket_pos) { + return title; } + + return std::string(absl::StripAsciiWhitespace(title.substr( + left_bracket_pos + 1, right_bracket_pos - left_bracket_pos - 1))); } bool IsEditorWindow(HWND window) const { diff --git a/modules/desktop_capture/win/screen_capture_utils.cc b/modules/desktop_capture/win/screen_capture_utils.cc index 3745e9cba5..30120db3f4 100644 --- a/modules/desktop_capture/win/screen_capture_utils.cc +++ b/modules/desktop_capture/win/screen_capture_utils.cc @@ -54,7 +54,7 @@ bool GetScreenList(DesktopCapturer::SourceList* screens, screens->push_back({device_index, std::string()}); if (device_names) { - device_names->push_back(rtc::ToUtf8(device.DeviceName)); + device_names->push_back(webrtc::ToUtf8(device.DeviceName)); } } return true; diff --git a/modules/desktop_capture/win/screen_capturer_win_directx.cc b/modules/desktop_capture/win/screen_capturer_win_directx.cc index 0ed2e12423..9bf22d8ac6 100644 --- a/modules/desktop_capture/win/screen_capturer_win_directx.cc +++ b/modules/desktop_capture/win/screen_capturer_win_directx.cc @@ -129,7 +129,7 @@ void ScreenCapturerWinDirectx::CaptureFrame() { RTC_DCHECK(callback_); TRACE_EVENT0("webrtc", "ScreenCapturerWinDirectx::CaptureFrame"); - int64_t capture_start_time_nanos = rtc::TimeNanos(); + int64_t capture_start_time_nanos = webrtc::TimeNanos(); // Note that the [] operator will create the ScreenCaptureFrameQueue if it // doesn't exist, so this is safe. @@ -190,8 +190,8 @@ void ScreenCapturerWinDirectx::CaptureFrame() { std::unique_ptr frame = frames.current_frame()->frame()->Share(); - int capture_time_ms = (rtc::TimeNanos() - capture_start_time_nanos) / - rtc::kNumNanosecsPerMillisec; + int capture_time_ms = (webrtc::TimeNanos() - capture_start_time_nanos) / + webrtc::kNumNanosecsPerMillisec; RTC_HISTOGRAM_COUNTS_1000( "WebRTC.DesktopCapture.Win.DirectXCapturerFrameTime", capture_time_ms); diff --git a/modules/desktop_capture/win/screen_capturer_win_directx.h b/modules/desktop_capture/win/screen_capturer_win_directx.h index a231643c33..ea1deb9293 100644 --- a/modules/desktop_capture/win/screen_capturer_win_directx.h +++ b/modules/desktop_capture/win/screen_capturer_win_directx.h @@ -88,7 +88,7 @@ class RTC_EXPORT ScreenCapturerWinDirectx : public DesktopCapturer { bool SelectSource(SourceId id) override; private: - const rtc::scoped_refptr controller_; + const webrtc::scoped_refptr controller_; DesktopCaptureOptions options_; // The underlying DxgiDuplicators may retain a reference to the frames that diff --git a/modules/desktop_capture/win/screen_capturer_win_gdi.cc b/modules/desktop_capture/win/screen_capturer_win_gdi.cc index 4d07b6b92f..24716813b5 100644 --- a/modules/desktop_capture/win/screen_capturer_win_gdi.cc +++ b/modules/desktop_capture/win/screen_capturer_win_gdi.cc @@ -75,7 +75,7 @@ void ScreenCapturerWinGdi::SetSharedMemoryFactory( void ScreenCapturerWinGdi::CaptureFrame() { TRACE_EVENT0("webrtc", "ScreenCapturerWinGdi::CaptureFrame"); - int64_t capture_start_time_nanos = rtc::TimeNanos(); + int64_t capture_start_time_nanos = webrtc::TimeNanos(); queue_.MoveToNextFrame(); if (queue_.current_frame() && queue_.current_frame()->IsShared()) { @@ -98,8 +98,8 @@ void ScreenCapturerWinGdi::CaptureFrame() { frame->mutable_updated_region()->SetRect( DesktopRect::MakeSize(frame->size())); - int capture_time_ms = (rtc::TimeNanos() - capture_start_time_nanos) / - rtc::kNumNanosecsPerMillisec; + int capture_time_ms = (webrtc::TimeNanos() - capture_start_time_nanos) / + webrtc::kNumNanosecsPerMillisec; RTC_HISTOGRAM_COUNTS_1000( "WebRTC.DesktopCapture.Win.ScreenGdiCapturerFrameTime", capture_time_ms); frame->set_capture_time_ms(capture_time_ms); diff --git a/modules/desktop_capture/win/screen_capturer_win_magnifier.cc b/modules/desktop_capture/win/screen_capturer_win_magnifier.cc index 214eb0e463..9a3c9d5f53 100644 --- a/modules/desktop_capture/win/screen_capturer_win_magnifier.cc +++ b/modules/desktop_capture/win/screen_capturer_win_magnifier.cc @@ -87,7 +87,7 @@ void ScreenCapturerWinMagnifier::CaptureFrame() { return; } - int64_t capture_start_time_nanos = rtc::TimeNanos(); + int64_t capture_start_time_nanos = webrtc::TimeNanos(); // Switch to the desktop receiving user input if different from the current // one. @@ -121,8 +121,8 @@ void ScreenCapturerWinMagnifier::CaptureFrame() { frame->mutable_updated_region()->SetRect( DesktopRect::MakeSize(frame->size())); - int capture_time_ms = (rtc::TimeNanos() - capture_start_time_nanos) / - rtc::kNumNanosecsPerMillisec; + int capture_time_ms = (webrtc::TimeNanos() - capture_start_time_nanos) / + webrtc::kNumNanosecsPerMillisec; RTC_HISTOGRAM_COUNTS_1000( "WebRTC.DesktopCapture.Win.MagnifierCapturerFrameTime", capture_time_ms); frame->set_capture_time_ms(capture_time_ms); diff --git a/modules/desktop_capture/win/wgc_capture_session.cc b/modules/desktop_capture/win/wgc_capture_session.cc index a291f20167..b4f9425699 100644 --- a/modules/desktop_capture/win/wgc_capture_session.cc +++ b/modules/desktop_capture/win/wgc_capture_session.cc @@ -22,6 +22,7 @@ #include #include +#include "modules/desktop_capture/win/screen_capture_utils.h" #include "modules/desktop_capture/win/wgc_desktop_frame.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -75,7 +76,8 @@ enum class GetFrameResult { kGetContentSizeFailed = 9, kResizeMappedTextureFailed = 10, kRecreateFramePoolFailed = 11, - kMaxValue = kRecreateFramePoolFailed + kFramePoolEmpty = 12, + kMaxValue = kFramePoolEmpty }; void RecordStartCaptureResult(StartCaptureResult error) { @@ -98,12 +100,16 @@ bool SizeHasChanged(ABI::Windows::Graphics::SizeInt32 size_new, } // namespace -WgcCaptureSession::WgcCaptureSession(ComPtr d3d11_device, +WgcCaptureSession::WgcCaptureSession(intptr_t source_id, + ComPtr d3d11_device, ComPtr item, ABI::Windows::Graphics::SizeInt32 size) : d3d11_device_(std::move(d3d11_device)), item_(std::move(item)), - size_(size) {} + size_(size), + source_id_(source_id) { + is_window_source_ = ::IsWindow(reinterpret_cast(source_id_)); +} WgcCaptureSession::~WgcCaptureSession() { RemoveEventHandler(); @@ -187,6 +193,18 @@ HRESULT WgcCaptureSession::StartCapture(const DesktopCaptureOptions& options) { } } + // By default, the WGC capture API adds a yellow border around the captured + // window or display to indicate that a capture is in progress. The section + // below is an attempt to remove this yellow border to make the capture + // experience more inline with the DXGI capture path. + // This requires 10.0.20348.0 or later, which practically means Windows 11. + ComPtr session3; + if (SUCCEEDED(session_->QueryInterface( + ABI::Windows::Graphics::Capture::IID_IGraphicsCaptureSession3, + &session3))) { + session3->put_IsBorderRequired(options.wgc_require_border()); + } + allow_zero_hertz_ = options.allow_wgc_zero_hertz(); hr = session_->StartCapture(); @@ -244,10 +262,22 @@ void WgcCaptureSession::EnsureFrame() { << "Unable to process a valid frame even after trying 10 times."; } -bool WgcCaptureSession::GetFrame(std::unique_ptr* output_frame) { +bool WgcCaptureSession::GetFrame(std::unique_ptr* output_frame, + bool source_should_be_capturable) { RTC_DCHECK_RUN_ON(&sequence_checker_); - EnsureFrame(); + if (item_closed_) { + RTC_LOG(LS_ERROR) << "The target source has been closed."; + RecordGetFrameResult(GetFrameResult::kItemClosed); + return false; + } + + // Try to process the captured frame and wait some if needed. Avoid trying + // if we know that the source will not be capturable. This can happen e.g. + // when captured window is minimized and if EnsureFrame() was called in this + // state a large amount of kFrameDropped errors would be logged. + if (source_should_be_capturable) + EnsureFrame(); // Return a NULL frame and false as `result` if we still don't have a valid // frame. This will lead to a DesktopCapturer::Result::ERROR_PERMANENT being @@ -297,12 +327,6 @@ HRESULT WgcCaptureSession::CreateMappedTexture( HRESULT WgcCaptureSession::ProcessFrame() { RTC_DCHECK_RUN_ON(&sequence_checker_); - if (item_closed_) { - RTC_LOG(LS_ERROR) << "The target source has been closed."; - RecordGetFrameResult(GetFrameResult::kItemClosed); - return E_ABORT; - } - RTC_DCHECK(is_capture_started_); ComPtr capture_frame; @@ -314,10 +338,15 @@ HRESULT WgcCaptureSession::ProcessFrame() { } if (!capture_frame) { - // Avoid logging errors until at least one valid frame has been captured. - if (queue_.current_frame()) { - RTC_DLOG(LS_WARNING) << "Frame pool was empty => kFrameDropped."; + if (!queue_.current_frame()) { + // The frame pool was empty and so is the external queue. + RTC_DLOG(LS_ERROR) << "Frame pool was empty => kFrameDropped."; RecordGetFrameResult(GetFrameResult::kFrameDropped); + } else { + // The frame pool was empty but there is still one old frame available in + // external the queue. + RTC_DLOG(LS_WARNING) << "Frame pool was empty => kFramePoolEmpty."; + RecordGetFrameResult(GetFrameResult::kFramePoolEmpty); } return E_FAIL; } @@ -434,6 +463,34 @@ HRESULT WgcCaptureSession::ProcessFrame() { DesktopFrame* current_frame = queue_.current_frame(); DesktopFrame* previous_frame = queue_.previous_frame(); + HMONITOR monitor; + if (is_window_source_) { + // If the captured window moves to another screen, the HMONITOR associated + // with the captured window will change. Therefore, we need to get the value + // of HMONITOR per frame. + monitor = MonitorFromWindow(reinterpret_cast(source_id_), + /*dwFlags=*/MONITOR_DEFAULTTONEAREST); + } else { + if (!GetHmonitorFromDeviceIndex(source_id_, &monitor)) { + RTC_LOG(LS_ERROR) << "Failed to get HMONITOR from device index."; + return E_FAIL; + } + } + + // Captures the device scale factor of the monitor where the frame is captured + // from. This value is the same as the scale from windows settings. Valid + // values are some distinct numbers in the range of [1,5], for example, + // 1, 1.5, 2.5, etc. + DEVICE_SCALE_FACTOR device_scale_factor = DEVICE_SCALE_FACTOR_INVALID; + HRESULT scale_factor_hr = + GetScaleFactorForMonitor(monitor, &device_scale_factor); + RTC_LOG_IF(LS_ERROR, FAILED(scale_factor_hr)) + << "Failed to get scale factor for monitor: " << scale_factor_hr; + if (device_scale_factor != DEVICE_SCALE_FACTOR_INVALID) { + current_frame->set_device_scale_factor( + static_cast(device_scale_factor) / 100.0f); + } + // Will be set to true while copying the frame data to the `current_frame` if // we can already determine that the content of the new frame differs from the // previous. The idea is to get a low-complexity indication of if the content diff --git a/modules/desktop_capture/win/wgc_capture_session.h b/modules/desktop_capture/win/wgc_capture_session.h index 499c75ee98..5d3f056945 100644 --- a/modules/desktop_capture/win/wgc_capture_session.h +++ b/modules/desktop_capture/win/wgc_capture_session.h @@ -12,6 +12,7 @@ #define MODULES_DESKTOP_CAPTURE_WIN_WGC_CAPTURE_SESSION_H_ #include +#include #include #include #include @@ -29,7 +30,12 @@ namespace webrtc { class WgcCaptureSession final { public: + // WgcCaptureSession supports capturing a window as well as a screen. + // If it is a window, `source_id` is the HWND of the window to be + // captured, which is never `0`'. If it is a screen, `source_id` is a number + // in a 0-based monitor index. WgcCaptureSession( + intptr_t source_id, Microsoft::WRL::ComPtr d3d11_device, Microsoft::WRL::ComPtr< ABI::Windows::Graphics::Capture::IGraphicsCaptureItem> item, @@ -44,17 +50,20 @@ class WgcCaptureSession final { HRESULT StartCapture(const DesktopCaptureOptions& options); // Returns a frame from the local frame queue, if any are present. - bool GetFrame(std::unique_ptr* output_frame); + bool GetFrame(std::unique_ptr* output_frame, + bool source_should_be_capturable); bool IsCaptureStarted() const { RTC_DCHECK_RUN_ON(&sequence_checker_); return is_capture_started_; } - // We only keep 1 buffer in the internal frame pool to reduce the latency as - // much as possible. + // We keep 2 buffers in the frame pool since it results in a good compromise + // between latency/capture-rate and the rate at which + // Direct3D11CaptureFramePool.TryGetNextFrame returns NULL and we have to fall + // back to providing a copy from our external queue instead. // We make this public for tests. - static constexpr int kNumBuffers = 1; + static constexpr int kNumBuffers = 2; private: // Initializes `mapped_texture_` with the properties of the `src_texture`, @@ -143,6 +152,13 @@ class WgcCaptureSession final { // false. DesktopRegion damage_region_; + // The unique id to represent a Source of current DesktopCapturer. + intptr_t source_id_; + + // The source type of the capture session. It can be either a window or a + // screen. + bool is_window_source_; + SequenceChecker sequence_checker_; }; diff --git a/modules/desktop_capture/win/wgc_capture_source.cc b/modules/desktop_capture/win/wgc_capture_source.cc index 24e6129ec7..1688878cde 100644 --- a/modules/desktop_capture/win/wgc_capture_source.cc +++ b/modules/desktop_capture/win/wgc_capture_source.cc @@ -29,6 +29,10 @@ WgcCaptureSource::WgcCaptureSource(DesktopCapturer::SourceId source_id) : source_id_(source_id) {} WgcCaptureSource::~WgcCaptureSource() = default; +bool WgcCaptureSource::ShouldBeCapturable() { + return true; +} + bool WgcCaptureSource::IsCapturable() { // If we can create a capture item, then we can capture it. Unfortunately, // we can't cache this item because it may be created in a different COM @@ -105,9 +109,14 @@ ABI::Windows::Graphics::SizeInt32 WgcWindowSource::GetSize() { window_rect.bottom - window_rect.top}; } +bool WgcWindowSource::ShouldBeCapturable() { + return IsWindowValidAndVisible(reinterpret_cast(GetSourceId())); +} + bool WgcWindowSource::IsCapturable() { - if (!IsWindowValidAndVisible(reinterpret_cast(GetSourceId()))) + if (!ShouldBeCapturable()) { return false; + } return WgcCaptureSource::IsCapturable(); } diff --git a/modules/desktop_capture/win/wgc_capture_source.h b/modules/desktop_capture/win/wgc_capture_source.h index d1275b6168..f155cdde1c 100644 --- a/modules/desktop_capture/win/wgc_capture_source.h +++ b/modules/desktop_capture/win/wgc_capture_source.h @@ -16,8 +16,8 @@ #include #include +#include -#include "absl/types/optional.h" #include "modules/desktop_capture/desktop_capturer.h" #include "modules/desktop_capture/desktop_geometry.h" @@ -33,6 +33,10 @@ class WgcCaptureSource { virtual ~WgcCaptureSource(); virtual DesktopVector GetTopLeft() = 0; + // Lightweight version of IsCapturable which avoids allocating/deallocating + // COM objects for each call. As such may return a different value than + // IsCapturable. + virtual bool ShouldBeCapturable(); virtual bool IsCapturable(); virtual bool FocusOnSource(); virtual ABI::Windows::Graphics::SizeInt32 GetSize(); @@ -99,6 +103,7 @@ class WgcWindowSource final : public WgcCaptureSource { DesktopVector GetTopLeft() override; ABI::Windows::Graphics::SizeInt32 GetSize() override; + bool ShouldBeCapturable() override; bool IsCapturable() override; bool FocusOnSource() override; @@ -133,7 +138,7 @@ class WgcScreenSource final : public WgcCaptureSource { // device index as it's SourceId. However, WGC requires we use an HMONITOR to // describe which screen to capture. So, we internally convert the supplied // device index into an HMONITOR when `IsCapturable()` is called. - absl::optional hmonitor_; + std::optional hmonitor_; }; } // namespace webrtc diff --git a/modules/desktop_capture/win/wgc_capturer_win.cc b/modules/desktop_capture/win/wgc_capturer_win.cc index 20d4eb938d..f6e84658fc 100644 --- a/modules/desktop_capture/win/wgc_capturer_win.cc +++ b/modules/desktop_capture/win/wgc_capturer_win.cc @@ -38,6 +38,7 @@ constexpr wchar_t kCoreMessagingDll[] = L"CoreMessaging.dll"; constexpr wchar_t kWgcSessionType[] = L"Windows.Graphics.Capture.GraphicsCaptureSession"; constexpr wchar_t kApiContract[] = L"Windows.Foundation.UniversalApiContract"; +constexpr wchar_t kDirtyRegionMode[] = L"DirtyRegionMode"; constexpr UINT16 kRequiredApiContractVersion = 8; enum class WgcCapturerResult { @@ -58,6 +59,44 @@ void RecordWgcCapturerResult(WgcCapturerResult error) { static_cast(WgcCapturerResult::kMaxValue)); } +// Checks if the DirtyRegionMode property is present in GraphicsCaptureSession +// and logs a boolean histogram with the result. +// TODO(https://crbug.com/40259177): Detecting support for this property means +// that the WGC API supports dirty regions and it can be utilized to improve +// the capture performance and the existing zero-herz support. +void LogDirtyRegionSupport() { + ComPtr + api_info_statics; + HRESULT hr = GetActivationFactory< + ABI::Windows::Foundation::Metadata::IApiInformationStatics, + RuntimeClass_Windows_Foundation_Metadata_ApiInformation>( + &api_info_statics); + if (FAILED(hr)) { + return; + } + + HSTRING dirty_region_mode; + hr = webrtc::CreateHstring(kDirtyRegionMode, wcslen(kDirtyRegionMode), + &dirty_region_mode); + if (FAILED(hr)) { + webrtc::DeleteHstring(dirty_region_mode); + return; + } + + HSTRING wgc_session_type; + hr = webrtc::CreateHstring(kWgcSessionType, wcslen(kWgcSessionType), + &wgc_session_type); + if (SUCCEEDED(hr)) { + boolean is_dirty_region_mode_supported = + api_info_statics->IsPropertyPresent(wgc_session_type, dirty_region_mode, + &is_dirty_region_mode_supported); + RTC_HISTOGRAM_BOOLEAN("WebRTC.DesktopCapture.Win.WgcDirtyRegionSupport", + !!is_dirty_region_mode_supported); + } + webrtc::DeleteHstring(dirty_region_mode); + webrtc::DeleteHstring(wgc_session_type); +} + } // namespace bool IsWgcSupported(CaptureType capture_type) { @@ -71,7 +110,7 @@ bool IsWgcSupported(CaptureType capture_type) { // There is a bug in the DWM (Desktop Window Manager) that prevents it from // providing image data if there are no displays attached. This was fixed in // Windows 11. - if (rtc::rtc_win::GetVersion() < rtc::rtc_win::Version::VERSION_WIN11) + if (webrtc::rtc_win::GetVersion() < webrtc::rtc_win::Version::VERSION_WIN11) return false; } @@ -80,14 +119,15 @@ bool IsWgcSupported(CaptureType capture_type) { // we can't assert that we won't be asked to capture the entire virtual // screen, we report unsupported so we can fallback to another capturer. if (capture_type == CaptureType::kScreen && - rtc::rtc_win::GetVersion() < rtc::rtc_win::Version::VERSION_WIN10_20H1) { + webrtc::rtc_win::GetVersion() < + webrtc::rtc_win::Version::VERSION_WIN10_20H1) { return false; } if (!ResolveCoreWinRTDelayload()) return false; - // We need to check if the WGC APIs are presesnt on the system. Certain SKUs + // We need to check if the WGC APIs are present on the system. Certain SKUs // of Windows ship without these APIs. ComPtr api_info_statics; @@ -156,6 +196,7 @@ WgcCapturerWin::WgcCapturerWin( reinterpret_cast(GetProcAddress( core_messaging_library_, "CreateDispatcherQueueController")); } + LogDirtyRegionSupport(); } WgcCapturerWin::~WgcCapturerWin() { @@ -282,7 +323,7 @@ void WgcCapturerWin::CaptureFrame() { } } - int64_t capture_start_time_nanos = rtc::TimeNanos(); + int64_t capture_start_time_nanos = webrtc::TimeNanos(); WgcCaptureSession* capture_session = nullptr; std::map::iterator session_iter = @@ -302,8 +343,8 @@ void WgcCapturerWin::CaptureFrame() { iter_success_pair = ongoing_captures_.emplace( std::piecewise_construct, std::forward_as_tuple(capture_source_->GetSourceId()), - std::forward_as_tuple(d3d11_device_, item, - capture_source_->GetSize())); + std::forward_as_tuple(capture_source_->GetSourceId(), d3d11_device_, + item, capture_source_->GetSize())); RTC_DCHECK(iter_success_pair.second); capture_session = &iter_success_pair.first->second; } else { @@ -323,7 +364,8 @@ void WgcCapturerWin::CaptureFrame() { } std::unique_ptr frame; - if (!capture_session->GetFrame(&frame)) { + if (!capture_session->GetFrame(&frame, + capture_source_->ShouldBeCapturable())) { RTC_LOG(LS_ERROR) << "GetFrame failed."; ongoing_captures_.erase(capture_source_->GetSourceId()); callback_->OnCaptureResult(DesktopCapturer::Result::ERROR_PERMANENT, @@ -339,8 +381,8 @@ void WgcCapturerWin::CaptureFrame() { return; } - int capture_time_ms = (rtc::TimeNanos() - capture_start_time_nanos) / - rtc::kNumNanosecsPerMillisec; + int capture_time_ms = (webrtc::TimeNanos() - capture_start_time_nanos) / + webrtc::kNumNanosecsPerMillisec; RTC_HISTOGRAM_COUNTS_1000("WebRTC.DesktopCapture.Win.WgcCapturerFrameTime", capture_time_ms); frame->set_capture_time_ms(capture_time_ms); diff --git a/modules/desktop_capture/win/wgc_capturer_win_unittest.cc b/modules/desktop_capture/win/wgc_capturer_win_unittest.cc index a7b656fcfc..5d0884a398 100644 --- a/modules/desktop_capture/win/wgc_capturer_win_unittest.cc +++ b/modules/desktop_capture/win/wgc_capturer_win_unittest.cc @@ -126,7 +126,7 @@ class WgcCapturerWinTest : public ::testing::TestWithParam, // having GraphicsCaptureItem events (i.e. the Closed event) fire, and it more // closely resembles how capture works in the wild. void CreateWindowOnSeparateThread(int window_width, int window_height) { - window_thread_ = rtc::Thread::Create(); + window_thread_ = webrtc::Thread::Create(); window_thread_->SetName(kWindowThreadName, nullptr); window_thread_->Start(); SendTask(window_thread_.get(), [this, window_width, window_height]() { @@ -267,7 +267,7 @@ class WgcCapturerWinTest : public ::testing::TestWithParam, protected: std::unique_ptr com_initializer_; DWORD window_thread_id_; - std::unique_ptr window_thread_; + std::unique_ptr window_thread_; WindowInfo window_info_; intptr_t source_id_; bool window_open_ = false; @@ -331,11 +331,11 @@ TEST_P(WgcCapturerWinTest, CaptureTime) { capturer_->Start(this); int64_t start_time; - start_time = rtc::TimeNanos(); + start_time = webrtc::TimeNanos(); capturer_->CaptureFrame(); int capture_time_ms = - (rtc::TimeNanos() - start_time) / rtc::kNumNanosecsPerMillisec; + (webrtc::TimeNanos() - start_time) / webrtc::kNumNanosecsPerMillisec; EXPECT_EQ(result_, DesktopCapturer::Result::SUCCESS); EXPECT_TRUE(frame_); @@ -366,7 +366,7 @@ TEST(WgcCapturerNoMonitorTest, NoMonitors) { // A bug in the DWM (Desktop Window Manager) prevents it from providing image // data if there are no displays attached. This was fixed in Windows 11. - if (rtc::rtc_win::GetVersion() < rtc::rtc_win::Version::VERSION_WIN11) + if (webrtc::rtc_win::GetVersion() < webrtc::rtc_win::Version::VERSION_WIN11) EXPECT_FALSE(IsWgcSupported(CaptureType::kWindow)); else EXPECT_TRUE(IsWgcSupported(CaptureType::kWindow)); diff --git a/modules/desktop_capture/win/window_capture_utils.cc b/modules/desktop_capture/win/window_capture_utils.cc index ccfef49bc5..263b2c3780 100644 --- a/modules/desktop_capture/win/window_capture_utils.cc +++ b/modules/desktop_capture/win/window_capture_utils.cc @@ -104,7 +104,7 @@ BOOL CALLBACK GetWindowListHandler(HWND hwnd, LPARAM param) { WCHAR window_title[kTitleLength] = L""; if (GetWindowTextLength(hwnd) != 0 && GetWindowTextW(hwnd, window_title, kTitleLength) > 0) { - window.title = rtc::ToUtf8(window_title); + window.title = webrtc::ToUtf8(window_title); } } @@ -179,10 +179,17 @@ bool GetCroppedWindowRect(HWND window, // As of Windows8, transparent resize borders are added by the OS at // left/bottom/right sides of a resizeable window. If the cropped window // doesn't remove these borders, the background will be exposed a bit. - if (rtc::rtc_win::GetVersion() >= rtc::rtc_win::Version::VERSION_WIN8 || + if (webrtc::rtc_win::GetVersion() >= webrtc::rtc_win::Version::VERSION_WIN8 || is_maximized) { // Only apply this cropping to windows with a resize border (otherwise, // it'd clip the edges of captured pop-up windows without this border). + RECT rect; + DwmGetWindowAttribute(window, DWMWA_EXTENDED_FRAME_BOUNDS, &rect, + sizeof(RECT)); + // it's means that the window edge is not transparent + if (original_rect && rect.left == original_rect->left()) { + return true; + } LONG style = GetWindowLong(window, GWL_STYLE); if (style & WS_THICKFRAME || style & DS_MODALFRAME) { int width = GetSystemMetrics(SM_CXSIZEFRAME); @@ -312,7 +319,8 @@ WindowCaptureHelperWin::WindowCaptureHelperWin() { GetProcAddress(dwmapi_library_, "DwmGetWindowAttribute")); } - if (rtc::rtc_win::GetVersion() >= rtc::rtc_win::Version::VERSION_WIN10) { + if (webrtc::rtc_win::GetVersion() >= + webrtc::rtc_win::Version::VERSION_WIN10) { if (FAILED(::CoCreateInstance(__uuidof(VirtualDesktopManager), nullptr, CLSCTX_ALL, IID_PPV_ARGS(&virtual_desktop_manager_)))) { diff --git a/modules/desktop_capture/win/window_capture_utils_unittest.cc b/modules/desktop_capture/win/window_capture_utils_unittest.cc index 137440b09e..76cb537a87 100644 --- a/modules/desktop_capture/win/window_capture_utils_unittest.cc +++ b/modules/desktop_capture/win/window_capture_utils_unittest.cc @@ -28,10 +28,10 @@ namespace { const char kWindowThreadName[] = "window_capture_utils_test_thread"; const WCHAR kWindowTitle[] = L"Window Capture Utils Test"; -std::unique_ptr SetUpUnresponsiveWindow(std::mutex& mtx, - WindowInfo& info) { - std::unique_ptr window_thread; - window_thread = rtc::Thread::Create(); +std::unique_ptr SetUpUnresponsiveWindow(std::mutex& mtx, + WindowInfo& info) { + std::unique_ptr window_thread; + window_thread = webrtc::Thread::Create(); window_thread->SetName(kWindowThreadName, nullptr); window_thread->Start(); @@ -66,7 +66,7 @@ TEST(WindowCaptureUtilsTest, GetWindowList) { TEST(WindowCaptureUtilsTest, IncludeUnresponsiveWindows) { std::mutex mtx; WindowInfo info; - std::unique_ptr window_thread = + std::unique_ptr window_thread = SetUpUnresponsiveWindow(mtx, info); EXPECT_FALSE(IsWindowResponding(info.hwnd)); @@ -89,7 +89,7 @@ TEST(WindowCaptureUtilsTest, IncludeUnresponsiveWindows) { TEST(WindowCaptureUtilsTest, IgnoreUnresponsiveWindows) { std::mutex mtx; WindowInfo info; - std::unique_ptr window_thread = + std::unique_ptr window_thread = SetUpUnresponsiveWindow(mtx, info); EXPECT_FALSE(IsWindowResponding(info.hwnd)); diff --git a/modules/desktop_capture/win/window_capturer_win_gdi.cc b/modules/desktop_capture/win/window_capturer_win_gdi.cc index bc3a762264..610e058a9f 100644 --- a/modules/desktop_capture/win/window_capturer_win_gdi.cc +++ b/modules/desktop_capture/win/window_capturer_win_gdi.cc @@ -157,7 +157,7 @@ void WindowCapturerWinGdi::Start(Callback* callback) { void WindowCapturerWinGdi::CaptureFrame() { RTC_DCHECK(callback_); - int64_t capture_start_time_nanos = rtc::TimeNanos(); + int64_t capture_start_time_nanos = webrtc::TimeNanos(); CaptureResults results = CaptureFrame(/*capture_owned_windows*/ true); if (!results.frame) { @@ -168,8 +168,8 @@ void WindowCapturerWinGdi::CaptureFrame() { return; } - int capture_time_ms = (rtc::TimeNanos() - capture_start_time_nanos) / - rtc::kNumNanosecsPerMillisec; + int capture_time_ms = (webrtc::TimeNanos() - capture_start_time_nanos) / + webrtc::kNumNanosecsPerMillisec; RTC_HISTOGRAM_COUNTS_1000( "WebRTC.DesktopCapture.Win.WindowGdiCapturerFrameTime", capture_time_ms); results.frame->set_capture_time_ms(capture_time_ms); @@ -298,7 +298,7 @@ WindowCapturerWinGdi::CaptureResults WindowCapturerWinGdi::CaptureFrame( // on Windows 8.1 and later, PrintWindow is only used when the window is // occluded. When the window is not occluded, it is much faster to capture // the screen and to crop it to the window position and size. - if (rtc::rtc_win::GetVersion() >= rtc::rtc_win::Version::VERSION_WIN8) { + if (webrtc::rtc_win::GetVersion() >= webrtc::rtc_win::Version::VERSION_WIN8) { // Special flag that makes PrintWindow to work on Windows 8.1 and later. // Indeed certain apps (e.g. those using DirectComposition rendering) can't // be captured using BitBlt or PrintWindow without this flag. Note that on @@ -365,6 +365,10 @@ WindowCapturerWinGdi::CaptureResults WindowCapturerWinGdi::CaptureFrame( for (auto it = owned_windows_.rbegin(); it != owned_windows_.rend(); it++) { HWND hwnd = *it; + LONG style = GetWindowLong(hwnd, GWL_EXSTYLE); + if (style & WS_EX_LAYERED) { + continue; + } if (owned_window_capturer_->SelectSource( reinterpret_cast(hwnd))) { CaptureResults results = owned_window_capturer_->CaptureFrame( diff --git a/modules/desktop_capture/window_capturer_mac.mm b/modules/desktop_capture/window_capturer_mac.mm index f99b4a74d1..a1e6157b87 100644 --- a/modules/desktop_capture/window_capturer_mac.mm +++ b/modules/desktop_capture/window_capturer_mac.mm @@ -47,8 +47,9 @@ bool IsWindowValid(CGWindowID id) { class WindowCapturerMac : public DesktopCapturer { public: explicit WindowCapturerMac( - rtc::scoped_refptr full_screen_window_detector, - rtc::scoped_refptr configuration_monitor); + webrtc::scoped_refptr + full_screen_window_detector, + webrtc::scoped_refptr configuration_monitor); ~WindowCapturerMac() override; WindowCapturerMac(const WindowCapturerMac&) = delete; @@ -68,9 +69,10 @@ explicit WindowCapturerMac( // The window being captured. CGWindowID window_id_ = 0; - rtc::scoped_refptr full_screen_window_detector_; + webrtc::scoped_refptr full_screen_window_detector_; - const rtc::scoped_refptr configuration_monitor_; + const webrtc::scoped_refptr + configuration_monitor_; WindowFinderMac window_finder_; @@ -79,8 +81,8 @@ explicit WindowCapturerMac( }; WindowCapturerMac::WindowCapturerMac( - rtc::scoped_refptr full_screen_window_detector, - rtc::scoped_refptr configuration_monitor) + webrtc::scoped_refptr full_screen_window_detector, + webrtc::scoped_refptr configuration_monitor) : full_screen_window_detector_(std::move(full_screen_window_detector)), configuration_monitor_(std::move(configuration_monitor)), window_finder_(configuration_monitor_) {} @@ -92,15 +94,13 @@ explicit WindowCapturerMac( } bool WindowCapturerMac::SelectSource(SourceId id) { - if (!IsWindowValid(id)) - return false; + if (!IsWindowValid(id)) return false; window_id_ = id; return true; } bool WindowCapturerMac::FocusOnSelectedSource() { - if (!window_id_) - return false; + if (!window_id_) return false; CGWindowID ids[1]; ids[0] = window_id_; @@ -126,9 +126,8 @@ explicit WindowCapturerMac( // TODO(jiayl): this will bring the process main window to the front. We // should find a way to bring only the window to the front. - bool result = - [[NSRunningApplication runningApplicationWithProcessIdentifier: pid] - activateWithOptions: NSApplicationActivateIgnoringOtherApps]; + bool result = [[NSRunningApplication + runningApplicationWithProcessIdentifier:pid] activateWithOptions:0]; CFRelease(window_id_array); CFRelease(window_array); @@ -171,7 +170,8 @@ explicit WindowCapturerMac( [sources](CFDictionaryRef window) { WindowId window_id = GetWindowId(window); if (window_id != kNullWindowId) { - sources->push_back(DesktopCapturer::Source{window_id, GetWindowTitle(window)}); + sources->push_back(DesktopCapturer::Source{ + window_id, GetWindowTitle(window)}); } return true; }, @@ -179,10 +179,12 @@ explicit WindowCapturerMac( false); }); - CGWindowID full_screen_window = full_screen_window_detector_->FindFullScreenWindow(window_id_); + CGWindowID full_screen_window = + full_screen_window_detector_->FindFullScreenWindow(window_id_); if (full_screen_window != kCGNullWindowID) { - // If this is the first time this happens, report to UMA that the feature is active. + // If this is the first time this happens, report to UMA that the feature + // is active. if (!fullscreen_usage_logged_) { LogDesktopCapturerFullscreenDetectorUsage(); fullscreen_usage_logged_ = true; @@ -191,7 +193,8 @@ explicit WindowCapturerMac( } } - std::unique_ptr frame = DesktopFrameCGImage::CreateForWindow(on_screen_window); + std::unique_ptr frame = + DesktopFrameCGImage::CreateForWindow(on_screen_window); if (!frame) { RTC_LOG(LS_WARNING) << "Temporarily failed to capture window."; callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr); @@ -203,7 +206,8 @@ explicit WindowCapturerMac( frame->set_top_left(GetWindowBounds(on_screen_window).top_left()); float scale_factor = GetWindowScaleFactor(window_id_, frame->size()); - frame->set_dpi(DesktopVector(kStandardDPI * scale_factor, kStandardDPI * scale_factor)); + frame->set_dpi( + DesktopVector(kStandardDPI * scale_factor, kStandardDPI * scale_factor)); callback_->OnCaptureResult(Result::SUCCESS, std::move(frame)); } diff --git a/modules/desktop_capture/window_capturer_unittest.cc b/modules/desktop_capture/window_capturer_unittest.cc index 519c04601b..3483d4d2c8 100644 --- a/modules/desktop_capture/window_capturer_unittest.cc +++ b/modules/desktop_capture/window_capturer_unittest.cc @@ -33,7 +33,7 @@ class WindowCapturerTest : public ::testing::Test, void TearDown() override {} // DesktopCapturer::Callback interface - void OnCaptureResult(DesktopCapturer::Result result, + void OnCaptureResult(DesktopCapturer::Result /* result */, std::unique_ptr frame) override { frame_ = std::move(frame); } diff --git a/modules/desktop_capture/window_capturer_win.cc b/modules/desktop_capture/window_capturer_win.cc index f289746e30..c525dcc5fe 100644 --- a/modules/desktop_capture/window_capturer_win.cc +++ b/modules/desktop_capture/window_capturer_win.cc @@ -28,7 +28,7 @@ std::unique_ptr DesktopCapturer::CreateRawWindowCapturer( WindowCapturerWinGdi::CreateRawWindowCapturer(options)); #if defined(RTC_ENABLE_WIN_WGC) if (options.allow_wgc_capturer_fallback() && - rtc::rtc_win::GetVersion() >= rtc::rtc_win::Version::VERSION_WIN11) { + rtc_win::GetVersion() >= rtc_win::Version::VERSION_WIN11) { // BlankDectector capturer will send an error when it detects a failed // GDI rendering, then Fallback capturer will try to capture it again with // WGC. diff --git a/modules/desktop_capture/window_finder.h b/modules/desktop_capture/window_finder.h index 99e3cce559..1b79f932b7 100644 --- a/modules/desktop_capture/window_finder.h +++ b/modules/desktop_capture/window_finder.h @@ -50,7 +50,7 @@ class WindowFinder { XAtomCache* cache = nullptr; #endif #if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) - rtc::scoped_refptr configuration_monitor; + webrtc::scoped_refptr configuration_monitor; #endif }; diff --git a/modules/desktop_capture/window_finder_mac.h b/modules/desktop_capture/window_finder_mac.h index 988dd497dd..171f098396 100644 --- a/modules/desktop_capture/window_finder_mac.h +++ b/modules/desktop_capture/window_finder_mac.h @@ -22,14 +22,15 @@ class DesktopConfigurationMonitor; class WindowFinderMac final : public WindowFinder { public: explicit WindowFinderMac( - rtc::scoped_refptr configuration_monitor); + webrtc::scoped_refptr configuration_monitor); ~WindowFinderMac() override; // WindowFinder implementation. WindowId GetWindowUnderPoint(DesktopVector point) override; private: - const rtc::scoped_refptr configuration_monitor_; + const webrtc::scoped_refptr + configuration_monitor_; }; } // namespace webrtc diff --git a/modules/desktop_capture/window_finder_mac.mm b/modules/desktop_capture/window_finder_mac.mm index e1d0316c79..b6c49160d8 100644 --- a/modules/desktop_capture/window_finder_mac.mm +++ b/modules/desktop_capture/window_finder_mac.mm @@ -22,7 +22,7 @@ namespace webrtc { WindowFinderMac::WindowFinderMac( - rtc::scoped_refptr configuration_monitor) + webrtc::scoped_refptr configuration_monitor) : configuration_monitor_(std::move(configuration_monitor)) {} WindowFinderMac::~WindowFinderMac() = default; diff --git a/modules/desktop_capture/window_finder_unittest.cc b/modules/desktop_capture/window_finder_unittest.cc index ac13f124d3..e30d715494 100644 --- a/modules/desktop_capture/window_finder_unittest.cc +++ b/modules/desktop_capture/window_finder_unittest.cc @@ -40,7 +40,8 @@ namespace { // ScreenDrawerWin does not have a message loop, so it's unresponsive to user // inputs. WindowFinderWin cannot detect this kind of unresponsive windows. // Instead, console window is used to test WindowFinderWin. -TEST(WindowFinderTest, FindConsoleWindow) { +// TODO(b/373792116): Reenable once flakiness is fixed. +TEST(WindowFinderTest, DISABLED_FindConsoleWindow) { // Creates a ScreenDrawer to avoid this test from conflicting with // ScreenCapturerIntegrationTest: both tests require its window to be in // foreground. @@ -61,25 +62,35 @@ TEST(WindowFinderTest, FindConsoleWindow) { // Ensures that current console window is visible. ShowWindow(console_window, SW_MAXIMIZE); // Moves the window to the top-left of the display. - MoveWindow(console_window, 0, 0, kMaxSize, kMaxSize, true); + if (!MoveWindow(console_window, 0, 0, kMaxSize, kMaxSize, true)) { + FAIL() << "Failed to move window. Error code: " << GetLastError(); + } bool should_restore_notopmost = (GetWindowLong(console_window, GWL_EXSTYLE) & WS_EX_TOPMOST) == 0; // Brings console window to top. - SetWindowPos(console_window, HWND_TOPMOST, 0, 0, 0, 0, - SWP_NOMOVE | SWP_NOSIZE); - BringWindowToTop(console_window); + if (!SetWindowPos(console_window, HWND_TOPMOST, 0, 0, 0, 0, + SWP_NOMOVE | SWP_NOSIZE)) { + FAIL() << "Failed to bring window to top. Error code: " << GetLastError(); + } + if (!BringWindowToTop(console_window)) { + FAIL() << "Failed second attempt to bring window to top. Error code: " + << GetLastError(); + } bool success = false; WindowFinderWin finder; for (int i = 0; i < kMaxSize; i++) { const DesktopVector spot(i, i); const HWND id = reinterpret_cast(finder.GetWindowUnderPoint(spot)); + if (id == console_window) { success = true; break; } + RTC_LOG(LS_INFO) << "Expected window " << console_window + << ". Found window " << id; } if (should_restore_notopmost) SetWindowPos(console_window, HWND_NOTOPMOST, 0, 0, 0, 0, diff --git a/modules/include/module_common_types_public.h b/modules/include/module_common_types_public.h index 5a6f634df7..289b5d0b64 100644 --- a/modules/include/module_common_types_public.h +++ b/modules/include/module_common_types_public.h @@ -12,8 +12,7 @@ #define MODULES_INCLUDE_MODULE_COMMON_TYPES_PUBLIC_H_ #include - -#include "absl/types/optional.h" +#include namespace webrtc { diff --git a/modules/pacing/BUILD.gn b/modules/pacing/BUILD.gn index ea80c8c819..21d529ff04 100644 --- a/modules/pacing/BUILD.gn +++ b/modules/pacing/BUILD.gn @@ -30,9 +30,12 @@ rtc_library("pacing") { deps = [ ":interval_budget", + "../../api:array_view", "../../api:field_trials_view", "../../api:field_trials_view", "../../api:function_view", + "../../api:rtp_headers", + "../../api:rtp_packet_sender", "../../api:sequence_checker", "../../api/rtc_event_log", "../../api/task_queue:pending_task_safety_flag", @@ -50,7 +53,7 @@ rtc_library("pacing") { "../../rtc_base:logging", "../../rtc_base:macromagic", "../../rtc_base:rtc_numerics", - "../../rtc_base:rtc_task_queue", + "../../rtc_base:safe_conversions", "../../rtc_base:timeutils", "../../rtc_base/experiments:field_trial_parser", "../../rtc_base/synchronization:mutex", @@ -60,12 +63,10 @@ rtc_library("pacing") { "../rtp_rtcp", "../rtp_rtcp:rtp_rtcp_format", "../utility:utility", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/cleanup", - "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -96,7 +97,11 @@ if (rtc_include_tests) { deps = [ ":interval_budget", ":pacing", + "../../api:array_view", + "../../api:rtp_headers", + "../../api:sequence_checker", "../../api/task_queue:task_queue", + "../../api/transport:field_trial_based_config", "../../api/transport:network_control", "../../api/units:data_rate", "../../api/units:data_size", @@ -114,6 +119,5 @@ if (rtc_include_tests) { "../rtp_rtcp:mock_rtp_rtcp", "../rtp_rtcp:rtp_rtcp_format", ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable" ] } } diff --git a/modules/pacing/OWNERS b/modules/pacing/OWNERS index 7266156891..f709476d43 100644 --- a/modules/pacing/OWNERS +++ b/modules/pacing/OWNERS @@ -1,5 +1,5 @@ stefan@webrtc.org mflodman@webrtc.org philipel@webrtc.org -srte@webrtc.org sprang@webrtc.org +perkj@webrtc.org diff --git a/modules/pacing/bitrate_prober.cc b/modules/pacing/bitrate_prober.cc index 3151a35075..ba34546b2b 100644 --- a/modules/pacing/bitrate_prober.cc +++ b/modules/pacing/bitrate_prober.cc @@ -11,9 +11,17 @@ #include "modules/pacing/bitrate_prober.h" #include +#include +#include +#include "api/field_trials_view.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" #include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" namespace webrtc { @@ -26,10 +34,9 @@ constexpr size_t kMaxPendingProbeClusters = 5; BitrateProberConfig::BitrateProberConfig( const FieldTrialsView* key_value_config) - : min_probe_delta("min_probe_delta", TimeDelta::Millis(2)), - max_probe_delay("max_probe_delay", TimeDelta::Millis(10)), + : max_probe_delay("max_probe_delay", TimeDelta::Millis(10)), min_packet_size("min_packet_size", DataSize::Bytes(200)) { - ParseFieldTrial({&min_probe_delta, &max_probe_delay, &min_packet_size}, + ParseFieldTrial({&max_probe_delay, &min_packet_size}, key_value_config->Lookup("WebRTC-Bwe-ProbingBehavior")); } @@ -52,6 +59,18 @@ void BitrateProber::SetEnabled(bool enable) { } } +void BitrateProber::SetAllowProbeWithoutMediaPacket(bool allow) { + config_.allow_start_probing_immediately = allow; + MaybeSetActiveState(/*packet_size=*/DataSize::Zero()); +} + +void BitrateProber::MaybeSetActiveState(DataSize packet_size) { + if (ReadyToSetActiveState(packet_size)) { + next_probe_time_ = Timestamp::MinusInfinity(); + probing_state_ = ProbingState::kActive; + } +} + bool BitrateProber::ReadyToSetActiveState(DataSize packet_size) const { if (clusters_.empty()) { RTC_DCHECK(probing_state_ == ProbingState::kDisabled || @@ -63,24 +82,25 @@ bool BitrateProber::ReadyToSetActiveState(DataSize packet_size) const { case ProbingState::kActive: return false; case ProbingState::kInactive: - // If config_.min_packet_size > 0, a "large enough" packet must be sent - // first, before a probe can be generated and sent. Otherwise, send the - // probe asap. + if (config_.allow_start_probing_immediately) { + return true; + } + // If config_.min_packet_size > 0, a "large enough" packet must be + // sent first, before a probe can be generated and sent. Otherwise, + // send the probe asap. return packet_size >= std::min(RecommendedMinProbeSize(), config_.min_packet_size.Get()); } } void BitrateProber::OnIncomingPacket(DataSize packet_size) { - if (ReadyToSetActiveState(packet_size)) { - next_probe_time_ = Timestamp::MinusInfinity(); - probing_state_ = ProbingState::kActive; - } + MaybeSetActiveState(packet_size); } void BitrateProber::CreateProbeCluster( const ProbeClusterConfig& cluster_config) { RTC_DCHECK(probing_state_ != ProbingState::kDisabled); + RTC_DCHECK(cluster_config.min_probe_delta > TimeDelta::Zero()); while (!clusters_.empty() && (cluster_config.at_time - clusters_.front().requested_at > @@ -97,19 +117,18 @@ void BitrateProber::CreateProbeCluster( (cluster_config.target_data_rate * cluster_config.target_duration) .bytes(); RTC_DCHECK_GE(cluster.pace_info.probe_cluster_min_bytes, 0); - cluster.pace_info.send_bitrate_bps = cluster_config.target_data_rate.bps(); + cluster.min_probe_delta = cluster_config.min_probe_delta; + cluster.pace_info.send_bitrate = cluster_config.target_data_rate; cluster.pace_info.probe_cluster_id = cluster_config.id; clusters_.push(cluster); - if (ReadyToSetActiveState(/*packet_size=*/DataSize::Zero())) { - next_probe_time_ = Timestamp::MinusInfinity(); - probing_state_ = ProbingState::kActive; - } + MaybeSetActiveState(/*packet_size=*/DataSize::Zero()); + RTC_DCHECK(probing_state_ == ProbingState::kActive || probing_state_ == ProbingState::kInactive); RTC_LOG(LS_INFO) << "Probe cluster (bitrate_bps:min bytes:min packets): (" - << cluster.pace_info.send_bitrate_bps << ":" + << cluster.pace_info.send_bitrate << ":" << cluster.pace_info.probe_cluster_min_bytes << ":" << cluster.pace_info.probe_cluster_min_probes << ", " << (probing_state_ == ProbingState::kInactive ? "Inactive" @@ -117,7 +136,7 @@ void BitrateProber::CreateProbeCluster( << ")"; } -Timestamp BitrateProber::NextProbeTime(Timestamp now) const { +Timestamp BitrateProber::NextProbeTime(Timestamp /* now */) const { // Probing is not active or probing is already complete. if (probing_state_ != ProbingState::kActive || clusters_.empty()) { return Timestamp::PlusInfinity(); @@ -126,9 +145,9 @@ Timestamp BitrateProber::NextProbeTime(Timestamp now) const { return next_probe_time_; } -absl::optional BitrateProber::CurrentCluster(Timestamp now) { +std::optional BitrateProber::CurrentCluster(Timestamp now) { if (clusters_.empty() || probing_state_ != ProbingState::kActive) { - return absl::nullopt; + return std::nullopt; } if (next_probe_time_.IsFinite() && @@ -140,7 +159,7 @@ absl::optional BitrateProber::CurrentCluster(Timestamp now) { clusters_.pop(); if (clusters_.empty()) { probing_state_ = ProbingState::kInactive; - return absl::nullopt; + return std::nullopt; } } @@ -153,9 +172,8 @@ DataSize BitrateProber::RecommendedMinProbeSize() const { if (clusters_.empty()) { return DataSize::Zero(); } - DataRate send_rate = - DataRate::BitsPerSec(clusters_.front().pace_info.send_bitrate_bps); - return send_rate * config_.min_probe_delta; + DataRate send_rate = clusters_.front().pace_info.send_bitrate; + return send_rate * clusters_.front().min_probe_delta; } void BitrateProber::ProbeSent(Timestamp now, DataSize size) { @@ -183,14 +201,13 @@ void BitrateProber::ProbeSent(Timestamp now, DataSize size) { Timestamp BitrateProber::CalculateNextProbeTime( const ProbeCluster& cluster) const { - RTC_CHECK_GT(cluster.pace_info.send_bitrate_bps, 0); + RTC_CHECK_GT(cluster.pace_info.send_bitrate.bps(), 0); RTC_CHECK(cluster.started_at.IsFinite()); // Compute the time delta from the cluster start to ensure probe bitrate stays // close to the target bitrate. Result is in milliseconds. DataSize sent_bytes = DataSize::Bytes(cluster.sent_bytes); - DataRate send_bitrate = - DataRate::BitsPerSec(cluster.pace_info.send_bitrate_bps); + DataRate send_bitrate = cluster.pace_info.send_bitrate; TimeDelta delta = sent_bytes / send_bitrate; return cluster.started_at + delta; diff --git a/modules/pacing/bitrate_prober.h b/modules/pacing/bitrate_prober.h index 82aba6ee3a..86da9c1794 100644 --- a/modules/pacing/bitrate_prober.h +++ b/modules/pacing/bitrate_prober.h @@ -12,12 +12,15 @@ #define MODULES_PACING_BITRATE_PROBER_H_ #include -#include +#include #include -#include "api/transport/field_trial_based_config.h" +#include "api/field_trials_view.h" #include "api/transport/network_types.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { @@ -29,8 +32,6 @@ struct BitrateProberConfig { BitrateProberConfig& operator=(const BitrateProberConfig&) = default; ~BitrateProberConfig() = default; - // A minimum interval between probes to allow scheduling to be feasible. - FieldTrialParameter min_probe_delta; // Maximum amount of time each probe can be delayed. FieldTrialParameter max_probe_delay; // This is used to start sending a probe after a large enough packet. @@ -38,6 +39,9 @@ struct BitrateProberConfig { // This defines the max min packet size, meaning that on high bitrates // a packet of at least this size is needed to trigger sending a probe. FieldTrialParameter min_packet_size; + + // If true, `min_packet_size` is ignored. + bool allow_start_probing_immediately = false; }; // Note that this class isn't thread-safe by itself and therefore relies @@ -48,6 +52,7 @@ class BitrateProber { ~BitrateProber() = default; void SetEnabled(bool enable); + void SetAllowProbeWithoutMediaPacket(bool allow); // Returns true if the prober is in a probing session, i.e., it currently // wants packets to be sent out according to the time returned by @@ -68,7 +73,7 @@ class BitrateProber { Timestamp NextProbeTime(Timestamp now) const; // Information about the current probing cluster. - absl::optional CurrentCluster(Timestamp now); + std::optional CurrentCluster(Timestamp now); // Returns the minimum number of bytes that the prober recommends for // the next probe, or zero if not probing. A probe can consist of multiple @@ -99,12 +104,14 @@ class BitrateProber { int sent_probes = 0; int sent_bytes = 0; + TimeDelta min_probe_delta = TimeDelta::Zero(); Timestamp requested_at = Timestamp::MinusInfinity(); Timestamp started_at = Timestamp::MinusInfinity(); - int retries = 0; }; Timestamp CalculateNextProbeTime(const ProbeCluster& cluster) const; + + void MaybeSetActiveState(DataSize packet_size); bool ReadyToSetActiveState(DataSize packet_size) const; ProbingState probing_state_; diff --git a/modules/pacing/bitrate_prober_unittest.cc b/modules/pacing/bitrate_prober_unittest.cc index 3c1c93f1f4..826e895aa4 100644 --- a/modules/pacing/bitrate_prober_unittest.cc +++ b/modules/pacing/bitrate_prober_unittest.cc @@ -12,8 +12,10 @@ #include +#include "api/transport/field_trial_based_config.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" +#include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "test/explicit_key_value_config.h" @@ -39,11 +41,13 @@ TEST(BitrateProberTest, VerifyStatesAndTimeBetweenProbes) { prober.CreateProbeCluster({.at_time = now, .target_data_rate = kTestBitrate1, .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 0}); prober.CreateProbeCluster({.at_time = now, .target_data_rate = kTestBitrate2, .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 1}); EXPECT_FALSE(prober.is_probing()); @@ -100,6 +104,7 @@ TEST(BitrateProberTest, DoesntProbeWithoutRecentPackets) { prober.CreateProbeCluster({.at_time = now, .target_data_rate = DataRate::KilobitsPerSec(900), .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 0}); EXPECT_FALSE(prober.is_probing()); @@ -125,6 +130,7 @@ TEST(BitrateProberTest, DiscardsDelayedProbes) { prober.CreateProbeCluster({.at_time = now, .target_data_rate = DataRate::KilobitsPerSec(900), .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 0}); @@ -154,6 +160,7 @@ TEST(BitrateProberTest, LimitsNumberOfPendingProbeClusters) { prober.CreateProbeCluster({.at_time = now, .target_data_rate = DataRate::KilobitsPerSec(900), .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 0}); prober.OnIncomingPacket(kProbeSize); @@ -165,6 +172,7 @@ TEST(BitrateProberTest, LimitsNumberOfPendingProbeClusters) { {.at_time = now, .target_data_rate = DataRate::KilobitsPerSec(900), .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = i}); prober.OnIncomingPacket(kProbeSize); @@ -190,6 +198,7 @@ TEST(BitrateProberTest, DoesntInitializeProbingForSmallPackets) { prober.CreateProbeCluster({.at_time = Timestamp::Zero(), .target_data_rate = DataRate::KilobitsPerSec(1000), .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 0}); prober.OnIncomingPacket(DataSize::Bytes(100)); @@ -208,6 +217,7 @@ TEST(BitrateProberTest, DoesInitializeProbingForSmallPacketsIfConfigured) { prober.CreateProbeCluster({.at_time = Timestamp::Zero(), .target_data_rate = DataRate::KilobitsPerSec(1000), .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 0}); prober.OnIncomingPacket(DataSize::Bytes(10)); @@ -224,6 +234,7 @@ TEST(BitrateProberTest, VerifyProbeSizeOnHighBitrate) { prober.CreateProbeCluster({.at_time = Timestamp::Zero(), .target_data_rate = kHighBitrate, .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 0}); // Probe size should ensure a minimum of 1 ms interval. @@ -231,10 +242,9 @@ TEST(BitrateProberTest, VerifyProbeSizeOnHighBitrate) { kHighBitrate * TimeDelta::Millis(1)); } -TEST(BitrateProberTest, ProbeSizeCanBeSetWithFieldTrial) { - const test::ExplicitKeyValueConfig trials( - "WebRTC-Bwe-ProbingBehavior/min_probe_delta:20ms/"); - BitrateProber prober(trials); +TEST(BitrateProberTest, ProbeSizeCanBeSetInProbeClusterConfig) { + const FieldTrialBasedConfig config; + BitrateProber prober(config); prober.SetEnabled(true); const DataRate kHighBitrate = DataRate::KilobitsPerSec(10000); // 10 Mbps @@ -242,6 +252,7 @@ TEST(BitrateProberTest, ProbeSizeCanBeSetWithFieldTrial) { prober.CreateProbeCluster({.at_time = Timestamp::Zero(), .target_data_rate = kHighBitrate, .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(20), .target_probe_count = 5, .id = 0}); EXPECT_EQ(prober.RecommendedMinProbeSize(), @@ -267,6 +278,7 @@ TEST(BitrateProberTest, MinumumNumberOfProbingPackets) { prober.CreateProbeCluster({.at_time = Timestamp::Zero(), .target_data_rate = kBitrate, .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 0}); @@ -290,6 +302,7 @@ TEST(BitrateProberTest, ScaleBytesUsedForProbing) { prober.CreateProbeCluster({.at_time = Timestamp::Zero(), .target_data_rate = kBitrate, .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 0}); prober.OnIncomingPacket(kPacketSize); @@ -314,6 +327,7 @@ TEST(BitrateProberTest, HighBitrateProbing) { prober.CreateProbeCluster({.at_time = Timestamp::Zero(), .target_data_rate = kBitrate, .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 0}); prober.OnIncomingPacket(kPacketSize); @@ -340,6 +354,7 @@ TEST(BitrateProberTest, ProbeClusterTimeout) { prober.CreateProbeCluster({.at_time = now, .target_data_rate = kBitrate, .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 0}); prober.OnIncomingPacket(kSmallPacketSize); @@ -348,6 +363,7 @@ TEST(BitrateProberTest, ProbeClusterTimeout) { prober.CreateProbeCluster({.at_time = now, .target_data_rate = kBitrate / 10, .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 1}); prober.OnIncomingPacket(kSmallPacketSize); @@ -356,6 +372,7 @@ TEST(BitrateProberTest, ProbeClusterTimeout) { prober.CreateProbeCluster({.at_time = now, .target_data_rate = kBitrate / 10, .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 2}); prober.OnIncomingPacket(kSmallPacketSize); @@ -378,6 +395,7 @@ TEST(BitrateProberTest, CanProbeImmediatelyIfConfigured) { prober.CreateProbeCluster({.at_time = Timestamp::Zero(), .target_data_rate = DataRate::KilobitsPerSec(300), .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 5, .id = 0}); EXPECT_TRUE(prober.is_probing()); @@ -392,6 +410,7 @@ TEST(BitrateProberTest, CanProbeImmediatelyAgainAfterProbeIfConfigured) { .at_time = Timestamp::Zero(), .target_data_rate = DataRate::KilobitsPerSec(300), .target_duration = TimeDelta::Millis(15), + .min_probe_delta = TimeDelta::Millis(2), .target_probe_count = 1, .id = 0}; prober.CreateProbeCluster(cluster_config); diff --git a/modules/pacing/g3doc/index.md b/modules/pacing/g3doc/index.md index 69f1e69513..bd17746704 100644 --- a/modules/pacing/g3doc/index.md +++ b/modules/pacing/g3doc/index.md @@ -157,7 +157,7 @@ Several methods are used to gather statistics in pacer state: * `ExpectedQueueTime()` total bytes in the queue divided by the send rate. [RTPSender]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h;drc=77ee8542dd35d5143b5788ddf47fb7cdb96eb08e -[RtpPacketSender]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/modules/rtp_rtcp/include/rtp_packet_sender.h;drc=ea55b0872f14faab23a4e5dbcb6956369c8ed5dc +[RtpPacketSender]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/rtp_packet_sender.h;drc=ea55b0872f14faab23a4e5dbcb6956369c8ed5dc [RtpPacketPacer]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/modules/pacing/rtp_packet_pacer.h;drc=e7bc3a347760023dd4840cf6ebdd1e6c8592f4d7 [PacketRouter]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/modules/pacing/packet_router.h;drc=3d2210876e31d0bb5c7de88b27fd02ceb1f4e03e [TaskQueuePacedSender]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/modules/pacing/task_queue_paced_sender.h;drc=5051693ada61bc7b78855c6fb3fa87a0394fa813 diff --git a/modules/pacing/interval_budget.cc b/modules/pacing/interval_budget.cc index 321ca46be4..b7f29acdb6 100644 --- a/modules/pacing/interval_budget.cc +++ b/modules/pacing/interval_budget.cc @@ -11,6 +11,8 @@ #include "modules/pacing/interval_budget.h" #include +#include +#include #include "rtc_base/numerics/safe_conversions.h" @@ -52,7 +54,7 @@ void IntervalBudget::UseBudget(size_t bytes) { } size_t IntervalBudget::bytes_remaining() const { - return rtc::saturated_cast(std::max(0, bytes_remaining_)); + return saturated_cast(std::max(0, bytes_remaining_)); } double IntervalBudget::budget_ratio() const { diff --git a/modules/pacing/interval_budget_unittest.cc b/modules/pacing/interval_budget_unittest.cc index e182d35510..8be0f6186d 100644 --- a/modules/pacing/interval_budget_unittest.cc +++ b/modules/pacing/interval_budget_unittest.cc @@ -10,6 +10,8 @@ #include "modules/pacing/interval_budget.h" +#include + #include "test/gtest.h" namespace webrtc { diff --git a/modules/pacing/pacing_controller.cc b/modules/pacing/pacing_controller.cc index 74def9c538..7e1fb3c60a 100644 --- a/modules/pacing/pacing_controller.cc +++ b/modules/pacing/pacing_controller.cc @@ -11,17 +11,29 @@ #include "modules/pacing/pacing_controller.h" #include +#include +#include +#include #include +#include #include #include #include "absl/cleanup/cleanup.h" #include "absl/strings/match.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/field_trials_view.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/pacing/bitrate_prober.h" -#include "modules/pacing/interval_budget.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/time_utils.h" +#include "rtc_base/numerics/safe_conversions.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -42,8 +54,6 @@ bool IsEnabled(const FieldTrialsView& field_trials, absl::string_view key) { } // namespace -const TimeDelta PacingController::kMaxExpectedQueueLength = - TimeDelta::Millis(2000); const TimeDelta PacingController::kPausedProcessInterval = kCongestedPacketInterval; const TimeDelta PacingController::kMinSleepTime = TimeDelta::Millis(1); @@ -55,11 +65,13 @@ const TimeDelta PacingController::kMaxEarlyProbeProcessing = PacingController::PacingController(Clock* clock, PacketSender* packet_sender, - const FieldTrialsView& field_trials) + const FieldTrialsView& field_trials, + Configuration configuration) : clock_(clock), packet_sender_(packet_sender), field_trials_(field_trials), drain_large_queues_( + configuration.drain_large_queues && !IsDisabled(field_trials_, "WebRTC-Pacer-DrainQueue")), send_padding_if_silent_( IsEnabled(field_trials_, "WebRTC-Pacer-PadInSilence")), @@ -69,9 +81,10 @@ PacingController::PacingController(Clock* clock, fast_retransmissions_( IsEnabled(field_trials_, "WebRTC-Pacer-FastRetransmissions")), keyframe_flushing_( + configuration.keyframe_flushing || IsEnabled(field_trials_, "WebRTC-Pacer-KeyframeFlushing")), transport_overhead_per_packet_(DataSize::Zero()), - send_burst_interval_(TimeDelta::Zero()), + send_burst_interval_(configuration.send_burst_interval), last_timestamp_(clock_->CurrentTime()), paused_(false), media_debt_(DataSize::Zero()), @@ -84,9 +97,11 @@ PacingController::PacingController(Clock* clock, last_process_time_(clock->CurrentTime()), last_send_time_(last_process_time_), seen_first_packet_(false), - packet_queue_(/*creation_time=*/last_process_time_), + packet_queue_(/*creation_time=*/last_process_time_, + configuration.prioritize_audio_retransmission, + configuration.packet_queue_ttl), congested_(false), - queue_time_limit_(kMaxExpectedQueueLength), + queue_time_limit_(configuration.queue_time_limit), account_for_audio_(false), include_overhead_(false), circuit_breaker_threshold_(1 << 16) { @@ -99,7 +114,7 @@ PacingController::PacingController(Clock* clock, PacingController::~PacingController() = default; void PacingController::CreateProbeClusters( - rtc::ArrayView probe_cluster_configs) { + ArrayView probe_cluster_configs) { for (const ProbeClusterConfig probe_cluster_config : probe_cluster_configs) { prober_.CreateProbeCluster(probe_cluster_config); } @@ -162,7 +177,6 @@ void PacingController::SetProbingEnabled(bool enabled) { void PacingController::SetPacingRates(DataRate pacing_rate, DataRate padding_rate) { - static constexpr DataRate kMaxRate = DataRate::KilobitsPerSec(100'000); RTC_CHECK_GT(pacing_rate, DataRate::Zero()); RTC_CHECK_GE(padding_rate, DataRate::Zero()); if (padding_rate > pacing_rate) { @@ -172,11 +186,12 @@ void PacingController::SetPacingRates(DataRate pacing_rate, padding_rate = pacing_rate; } - if (pacing_rate > kMaxRate || padding_rate > kMaxRate) { - RTC_LOG(LS_WARNING) << "Very high pacing rates ( > " << kMaxRate.kbps() + if (pacing_rate > max_rate || padding_rate > max_rate) { + RTC_LOG(LS_WARNING) << "Very high pacing rates ( > " << max_rate.kbps() << " kbps) configured: pacing = " << pacing_rate.kbps() << " kbps, padding = " << padding_rate.kbps() << " kbps."; + max_rate = std::max(pacing_rate, padding_rate) * 1.1; } pacing_rate_ = pacing_rate; padding_rate_ = padding_rate; @@ -199,7 +214,7 @@ void PacingController::EnqueuePacket(std::unique_ptr packet) { // queue). Flush any pending packets currently in the queue for that stream // in order to get the new keyframe out as quickly as possible. packet_queue_.RemovePacketsForSsrc(packet->Ssrc()); - absl::optional rtx_ssrc = + std::optional rtx_ssrc = packet_sender_->GetRtxSsrcForMedia(packet->Ssrc()); if (rtx_ssrc) { packet_queue_.RemovePacketsForSsrc(*rtx_ssrc); @@ -247,13 +262,17 @@ void PacingController::SetSendBurstInterval(TimeDelta burst_interval) { send_burst_interval_ = burst_interval; } +void PacingController::SetAllowProbeWithoutMediaPacket(bool allow) { + prober_.SetAllowProbeWithoutMediaPacket(allow); +} + TimeDelta PacingController::ExpectedQueueTime() const { RTC_DCHECK_GT(adjusted_media_rate_, DataRate::Zero()); return QueueSizeData() / adjusted_media_rate_; } size_t PacingController::QueueSizePackets() const { - return rtc::checked_cast(packet_queue_.SizeInPackets()); + return checked_cast(packet_queue_.SizeInPackets()); } const std::array& @@ -274,7 +293,7 @@ DataSize PacingController::CurrentBufferLevel() const { return std::max(media_debt_, padding_debt_); } -absl::optional PacingController::FirstSentPacketTime() const { +std::optional PacingController::FirstSentPacketTime() const { return first_sent_packet_time_; } @@ -291,9 +310,9 @@ TimeDelta PacingController::UpdateTimeAndGetElapsed(Timestamp now) { TimeDelta elapsed_time = now - last_process_time_; last_process_time_ = now; if (elapsed_time > kMaxElapsedTime) { - RTC_LOG(LS_WARNING) << "Elapsed time (" << ToLogString(elapsed_time) + RTC_LOG(LS_WARNING) << "Elapsed time (" << elapsed_time << ") longer than expected, limiting to " - << ToLogString(kMaxElapsedTime); + << kMaxElapsedTime; elapsed_time = kMaxElapsedTime; } return elapsed_time; @@ -343,9 +362,13 @@ Timestamp PacingController::NextSendTime() const { // debt is allowed to grow up to one packet more than what can be sent // during 'send_burst_period_'. TimeDelta drain_time = media_debt_ / adjusted_media_rate_; + // Ensure that a burst of sent packet is not larger than kMaxBurstSize in + // order to not risk overfilling socket buffers at high bitrate. + TimeDelta send_burst_interval = + std::min(send_burst_interval_, kMaxBurstSize / adjusted_media_rate_); next_send_time = last_process_time_ + - ((send_burst_interval_ > drain_time) ? TimeDelta::Zero() : drain_time); + ((send_burst_interval > drain_time) ? TimeDelta::Zero() : drain_time); } else if (padding_rate_ > DataRate::Zero() && packet_queue_.Empty()) { // If we _don't_ have pending packets, check how long until we have // bandwidth for padding packets. Both media and padding debts must @@ -392,8 +415,8 @@ void PacingController::ProcessPackets() { keepalive_data_sent += DataSize::Bytes(packet->payload_size() + packet->padding_size()); packet_sender_->SendPacket(std::move(packet), PacedPacketInfo()); - for (auto& packet : packet_sender_->FetchFec()) { - EnqueuePacket(std::move(packet)); + for (auto& fec_packet : packet_sender_->FetchFec()) { + EnqueuePacket(std::move(fec_packet)); } } } @@ -704,8 +727,7 @@ Timestamp PacingController::NextUnpacedSendTime() const { } if (fast_retransmissions_) { Timestamp leading_retransmission_send_time = - packet_queue_.LeadingPacketEnqueueTime( - RtpPacketMediaType::kRetransmission); + packet_queue_.LeadingPacketEnqueueTimeForRetransmission(); if (leading_retransmission_send_time.IsFinite()) { return leading_retransmission_send_time; } diff --git a/modules/pacing/pacing_controller.h b/modules/pacing/pacing_controller.h index 2145868a62..a745f4480f 100644 --- a/modules/pacing/pacing_controller.h +++ b/modules/pacing/pacing_controller.h @@ -15,24 +15,23 @@ #include #include -#include #include +#include #include -#include "absl/types/optional.h" +#include "api/array_view.h" #include "api/field_trials_view.h" -#include "api/function_view.h" -#include "api/transport/field_trial_based_config.h" +#include "api/rtp_packet_sender.h" #include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/pacing/bitrate_prober.h" -#include "modules/pacing/interval_budget.h" #include "modules/pacing/prioritized_packet_queue.h" -#include "modules/pacing/rtp_packet_pacer.h" -#include "modules/rtp_rtcp/include/rtp_packet_sender.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/thread_annotations.h" +#include "system_wrappers/include/clock.h" namespace webrtc { @@ -58,18 +57,14 @@ class PacingController { // TODO(bugs.webrtc.org/11340): Make pure virtual once downstream projects // have been updated. virtual void OnAbortedRetransmissions( - uint32_t ssrc, - rtc::ArrayView sequence_numbers) {} - virtual absl::optional GetRtxSsrcForMedia(uint32_t ssrc) const { - return absl::nullopt; + uint32_t /* ssrc */, + ArrayView /* sequence_numbers */) {} + virtual std::optional GetRtxSsrcForMedia( + uint32_t /* ssrc */) const { + return std::nullopt; } }; - // Expected max pacer delay. If ExpectedQueueTime() is higher than - // this value, the packet producers should wait (eg drop frames rather than - // encoding them). Bitrate sent may temporarily exceed target set by - // UpdateBitrate() so that this limit will be upheld. - static const TimeDelta kMaxExpectedQueueLength; // If no media or paused, wake up at least every `kPausedProcessIntervalMs` in // order to send a keep-alive packet so we don't get stuck in a bad state due // to lack of feedback. @@ -86,10 +81,50 @@ class PacingController { // set to 1ms as this is intended to allow times be rounded down to the // nearest millisecond. static const TimeDelta kMaxEarlyProbeProcessing; + // Max total size of packets expected to be sent in a burst in order to not + // risk loosing packets due to too small send socket buffers. It upper limits + // the send burst interval. + // Ex: max send burst interval = 63Kb / 10Mbit/s = 50ms. + static constexpr DataSize kMaxBurstSize = DataSize::Bytes(63 * 1000); + + // Configuration default values. + static constexpr TimeDelta kDefaultBurstInterval = TimeDelta::Millis(40); + static constexpr TimeDelta kMaxExpectedQueueLength = TimeDelta::Millis(2000); + + struct Configuration { + // If the pacer queue grows longer than the configured max queue limit, + // pacer sends at the minimum rate needed to keep the max queue limit and + // ignore the current bandwidth estimate. + bool drain_large_queues = true; + // Expected max pacer delay. If ExpectedQueueTime() is higher than + // this value, the packet producers should wait (eg drop frames rather than + // encoding them). Bitrate sent may temporarily exceed target set by + // SetPacingRates() so that this limit will be upheld if + // `drain_large_queues` is set. + TimeDelta queue_time_limit = kMaxExpectedQueueLength; + // If the first packet of a keyframe is enqueued on a RTP stream, pacer + // skips forward to that packet and drops other enqueued packets on that + // stream, unless a keyframe is already being paced. + bool keyframe_flushing = false; + // Audio retransmission is prioritized before video retransmission packets. + bool prioritize_audio_retransmission = false; + // Configure separate timeouts per priority. After a timeout, a packet of + // that sort will not be paced and instead dropped. + // Note: to set TTL on audio retransmission, + // `prioritize_audio_retransmission` must be true. + PacketQueueTTL packet_queue_ttl; + // The pacer is allowed to send enqueued packets in bursts and can build up + // a packet "debt" that correspond to approximately the send rate during the + // burst interval. + TimeDelta send_burst_interval = kDefaultBurstInterval; + }; + + static Configuration DefaultConfiguration() { return Configuration{}; } PacingController(Clock* clock, PacketSender* packet_sender, - const FieldTrialsView& field_trials); + const FieldTrialsView& field_trials, + Configuration configuration = DefaultConfiguration()); ~PacingController(); @@ -98,7 +133,7 @@ class PacingController { void EnqueuePacket(std::unique_ptr packet); void CreateProbeClusters( - rtc::ArrayView probe_cluster_configs); + ArrayView probe_cluster_configs); void Pause(); // Temporarily pause all sending. void Resume(); // Resume sending packets. @@ -123,6 +158,9 @@ class PacingController { // 'burst_interval'. void SetSendBurstInterval(TimeDelta burst_interval); + // A probe may be sent without first waing for a media packet. + void SetAllowProbeWithoutMediaPacket(bool allow); + // Returns the time when the oldest packet was queued. Timestamp OldestPacketEnqueueTime() const; @@ -139,7 +177,7 @@ class PacingController { DataSize CurrentBufferLevel() const; // Returns the time when the first packet was sent. - absl::optional FirstSentPacketTime() const; + std::optional FirstSentPacketTime() const; // Returns the number of milliseconds it will take to send the current // packets in the queue, given the current size and bitrate, ignoring prio. @@ -208,7 +246,7 @@ class PacingController { const bool ignore_transport_overhead_; const bool fast_retransmissions_; const bool keyframe_flushing_; - + DataRate max_rate = DataRate::BitsPerSec(100'000'000); DataSize transport_overhead_per_packet_; TimeDelta send_burst_interval_; @@ -235,7 +273,7 @@ class PacingController { Timestamp last_process_time_; Timestamp last_send_time_; - absl::optional first_sent_packet_time_; + std::optional first_sent_packet_time_; bool seen_first_packet_; PrioritizedPacketQueue packet_queue_; diff --git a/modules/pacing/pacing_controller_unittest.cc b/modules/pacing/pacing_controller_unittest.cc index ade71cd5f5..2b755ff507 100644 --- a/modules/pacing/pacing_controller_unittest.cc +++ b/modules/pacing/pacing_controller_unittest.cc @@ -11,17 +11,24 @@ #include "modules/pacing/pacing_controller.h" #include -#include +#include +#include +#include +#include #include -#include +#include #include #include +#include "api/array_view.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" +#include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "modules/pacing/packet_router.h" +#include "modules/pacing/bitrate_prober.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "system_wrappers/include/clock.h" #include "test/explicit_key_value_config.h" #include "test/gmock.h" @@ -30,6 +37,7 @@ using ::testing::_; using ::testing::AnyNumber; using ::testing::Field; +using ::testing::NiceMock; using ::testing::Pointee; using ::testing::Property; using ::testing::Return; @@ -98,7 +106,7 @@ class MediaStream { class MockPacingControllerCallback : public PacingController::PacketSender { public: void SendPacket(std::unique_ptr packet, - const PacedPacketInfo& cluster_info) override { + const PacedPacketInfo& /* cluster_info */) override { SendPacket(packet->Ssrc(), packet->SequenceNumber(), packet->capture_time().ms(), packet->packet_type() == RtpPacketMediaType::kRetransmission, @@ -132,9 +140,9 @@ class MockPacingControllerCallback : public PacingController::PacketSender { MOCK_METHOD(size_t, SendPadding, (size_t target_size)); MOCK_METHOD(void, OnAbortedRetransmissions, - (uint32_t, rtc::ArrayView), + (uint32_t, webrtc::ArrayView), (override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, GetRtxSsrcForMedia, (uint32_t), (const, override)); @@ -160,9 +168,9 @@ class MockPacketSender : public PacingController::PacketSender { (override)); MOCK_METHOD(void, OnAbortedRetransmissions, - (uint32_t, rtc::ArrayView), + (uint32_t, webrtc::ArrayView), (override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, GetRtxSsrcForMedia, (uint32_t), (const, override)); @@ -176,7 +184,7 @@ class PacingControllerPadding : public PacingController::PacketSender { PacingControllerPadding() : padding_sent_(0), total_bytes_sent_(0) {} void SendPacket(std::unique_ptr packet, - const PacedPacketInfo& pacing_info) override { + const PacedPacketInfo& /* pacing_info */) override { total_bytes_sent_ += packet->payload_size(); } @@ -198,10 +206,9 @@ class PacingControllerPadding : public PacingController::PacketSender { return packets; } - void OnAbortedRetransmissions(uint32_t, - rtc::ArrayView) override {} - absl::optional GetRtxSsrcForMedia(uint32_t) const override { - return absl::nullopt; + void OnAbortedRetransmissions(uint32_t, ArrayView) override {} + std::optional GetRtxSsrcForMedia(uint32_t) const override { + return std::nullopt; } void OnBatchComplete() override {} @@ -259,10 +266,9 @@ class PacingControllerProbing : public PacingController::PacketSender { return packets; } - void OnAbortedRetransmissions(uint32_t, - rtc::ArrayView) override {} - absl::optional GetRtxSsrcForMedia(uint32_t) const override { - return absl::nullopt; + void OnAbortedRetransmissions(uint32_t, ArrayView) override {} + std::optional GetRtxSsrcForMedia(uint32_t) const override { + return std::nullopt; } void OnBatchComplete() override {} @@ -427,6 +433,7 @@ TEST_F(PacingControllerTest, BudgetAffectsAudioInTrial) { DataRate pacing_rate = DataRate::BitsPerSec(kPacketSize / 3 * 8 * kProcessIntervalsPerSecond); pacer.SetPacingRates(pacing_rate, DataRate::Zero()); + pacer.SetSendBurstInterval(TimeDelta::Zero()); // Video fills budget for following process periods. pacer.EnqueuePacket(video_.BuildNextPacket(kPacketSize)); EXPECT_CALL(callback_, SendPacket).Times(1); @@ -484,7 +491,7 @@ TEST_F(PacingControllerTest, FirstSentPacketTimeIsSet) { EXPECT_EQ(kStartTime, pacer->FirstSentPacketTime()); } -TEST_F(PacingControllerTest, QueueAndPacePackets) { +TEST_F(PacingControllerTest, QueueAndPacePacketsWithZeroBurstPeriod) { const uint32_t kSsrc = 12345; uint16_t sequence_number = 1234; const DataSize kPackeSize = DataSize::Bytes(250); @@ -495,6 +502,7 @@ TEST_F(PacingControllerTest, QueueAndPacePackets) { const size_t kPacketsToSend = (kSendInterval * kTargetRate).bytes() * kPaceMultiplier / kPackeSize.bytes(); auto pacer = std::make_unique(&clock_, &callback_, trials_); + pacer->SetSendBurstInterval(TimeDelta::Zero()); pacer->SetPacingRates(kTargetRate * kPaceMultiplier, DataRate::Zero()); for (size_t i = 0; i < kPacketsToSend; ++i) { @@ -536,30 +544,30 @@ TEST_F(PacingControllerTest, PaceQueuedPackets) { auto pacer = std::make_unique(&clock_, &callback_, trials_); pacer->SetPacingRates(kTargetRate * kPaceMultiplier, DataRate::Zero()); - // Due to the multiplicative factor we can send 5 packets during a send - // interval. (network capacity * multiplier / (8 bits per byte * - // (packet size * #send intervals per second) - const size_t packets_to_send_per_interval = - kTargetRate.bps() * kPaceMultiplier / (8 * kPacketSize * 200); - for (size_t i = 0; i < packets_to_send_per_interval; ++i) { + const size_t packets_to_send_per_burst_interval = + (kTargetRate * kPaceMultiplier * PacingController::kDefaultBurstInterval) + .bytes() / + kPacketSize; + for (size_t i = 0; i < packets_to_send_per_burst_interval; ++i) { SendAndExpectPacket(pacer.get(), RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); } - for (size_t j = 0; j < packets_to_send_per_interval * 10; ++j) { + for (size_t j = 0; j < packets_to_send_per_burst_interval * 10; ++j) { pacer->EnqueuePacket(BuildPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize)); } - EXPECT_EQ(packets_to_send_per_interval + packets_to_send_per_interval * 10, + EXPECT_EQ(packets_to_send_per_burst_interval + + packets_to_send_per_burst_interval * 10, pacer->QueueSizePackets()); - while (pacer->QueueSizePackets() > packets_to_send_per_interval * 10) { + while (pacer->QueueSizePackets() > packets_to_send_per_burst_interval * 10) { AdvanceTimeUntil(pacer->NextSendTime()); pacer->ProcessPackets(); } - EXPECT_EQ(pacer->QueueSizePackets(), packets_to_send_per_interval * 10); + EXPECT_EQ(pacer->QueueSizePackets(), packets_to_send_per_burst_interval * 10); EXPECT_CALL(callback_, SendPadding).Times(0); EXPECT_CALL(callback_, SendPacket(ssrc, _, _, false, false)) @@ -582,12 +590,12 @@ TEST_F(PacingControllerTest, PaceQueuedPackets) { pacer->ProcessPackets(); // Send some more packet, just show that we can..? - for (size_t i = 0; i < packets_to_send_per_interval; ++i) { + for (size_t i = 0; i < packets_to_send_per_burst_interval; ++i) { SendAndExpectPacket(pacer.get(), RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), 250); } - EXPECT_EQ(packets_to_send_per_interval, pacer->QueueSizePackets()); - for (size_t i = 0; i < packets_to_send_per_interval; ++i) { + EXPECT_EQ(packets_to_send_per_burst_interval, pacer->QueueSizePackets()); + for (size_t i = 0; i < packets_to_send_per_burst_interval; ++i) { AdvanceTimeUntil(pacer->NextSendTime()); pacer->ProcessPackets(); } @@ -641,19 +649,23 @@ TEST_F(PacingControllerTest, TEST_F(PacingControllerTest, Padding) { uint32_t ssrc = 12345; uint16_t sequence_number = 1234; - const size_t kPacketSize = 250; + const size_t kPacketSize = 1000; auto pacer = std::make_unique(&clock_, &callback_, trials_); pacer->SetPacingRates(kTargetRate * kPaceMultiplier, kTargetRate); - const size_t kPacketsToSend = 20; + const size_t kPacketsToSend = 30; for (size_t i = 0; i < kPacketsToSend; ++i) { SendAndExpectPacket(pacer.get(), RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); } + + int expected_bursts = + floor(DataSize::Bytes(pacer->QueueSizePackets() * kPacketSize) / + (kPaceMultiplier * kTargetRate) / + PacingController::kDefaultBurstInterval); const TimeDelta expected_pace_time = - DataSize::Bytes(pacer->QueueSizePackets() * kPacketSize) / - (kPaceMultiplier * kTargetRate); + (expected_bursts - 1) * PacingController::kDefaultBurstInterval; EXPECT_CALL(callback_, SendPadding).Times(0); // Only the media packets should be sent. Timestamp start_time = clock_.CurrentTime(); @@ -663,7 +675,7 @@ TEST_F(PacingControllerTest, Padding) { } const TimeDelta actual_pace_time = clock_.CurrentTime() - start_time; EXPECT_LE((actual_pace_time - expected_pace_time).Abs(), - PacingController::kMinSleepTime); + PacingController::kDefaultBurstInterval); // Pacing media happens at 2.5x, but padding was configured with 1.0x // factor. We have to wait until the padding debt is gone before we start @@ -766,8 +778,8 @@ TEST_F(PacingControllerTest, VerifyAverageBitrateVaryingMediaPayload) { media_payload)); media_bytes += media_payload; } - - AdvanceTimeUntil(pacer->NextSendTime()); + AdvanceTimeUntil(std::min(clock_.CurrentTime() + TimeDelta::Millis(20), + pacer->NextSendTime())); pacer->ProcessPackets(); } @@ -805,20 +817,18 @@ TEST_F(PacingControllerTest, Priority) { // Expect all high and normal priority to be sent out first. EXPECT_CALL(callback_, SendPadding).Times(0); + testing::Sequence s; EXPECT_CALL(callback_, SendPacket(ssrc, _, capture_time_ms, _, _)) - .Times(packets_to_send_per_interval + 1); + .Times(packets_to_send_per_interval + 1) + .InSequence(s); + EXPECT_CALL(callback_, SendPacket(ssrc_low_priority, _, + capture_time_ms_low_priority, _, _)) + .InSequence(s); - while (pacer->QueueSizePackets() > 1) { + while (pacer->QueueSizePackets() > 0) { AdvanceTimeUntil(pacer->NextSendTime()); pacer->ProcessPackets(); } - - EXPECT_EQ(1u, pacer->QueueSizePackets()); - - EXPECT_CALL(callback_, SendPacket(ssrc_low_priority, _, - capture_time_ms_low_priority, _, _)); - AdvanceTimeUntil(pacer->NextSendTime()); - pacer->ProcessPackets(); } TEST_F(PacingControllerTest, RetransmissionPriority) { @@ -829,23 +839,22 @@ TEST_F(PacingControllerTest, RetransmissionPriority) { auto pacer = std::make_unique(&clock_, &callback_, trials_); pacer->SetPacingRates(kTargetRate * kPaceMultiplier, DataRate::Zero()); - // Due to the multiplicative factor we can send 5 packets during a send - // interval. (network capacity * multiplier / (8 bits per byte * - // (packet size * #send intervals per second) - const size_t packets_to_send_per_interval = - kTargetRate.bps() * kPaceMultiplier / (8 * 250 * 200); + const size_t packets_to_send_per_burst_interval = + (kTargetRate * kPaceMultiplier * PacingController::kDefaultBurstInterval) + .bytes() / + 250; pacer->ProcessPackets(); EXPECT_EQ(0u, pacer->QueueSizePackets()); // Alternate retransmissions and normal packets. - for (size_t i = 0; i < packets_to_send_per_interval; ++i) { + for (size_t i = 0; i < packets_to_send_per_burst_interval; ++i) { pacer->EnqueuePacket(BuildPacket(RtpPacketMediaType::kVideo, ssrc, sequence_number++, capture_time_ms, 250)); pacer->EnqueuePacket(BuildPacket(RtpPacketMediaType::kRetransmission, ssrc, sequence_number++, capture_time_ms_retransmission, 250)); } - EXPECT_EQ(2 * packets_to_send_per_interval, pacer->QueueSizePackets()); + EXPECT_EQ(2 * packets_to_send_per_burst_interval, pacer->QueueSizePackets()); // Expect all retransmissions to be sent out first despite having a later // capture time. @@ -853,19 +862,19 @@ TEST_F(PacingControllerTest, RetransmissionPriority) { EXPECT_CALL(callback_, SendPacket(_, _, _, false, _)).Times(0); EXPECT_CALL(callback_, SendPacket(ssrc, _, capture_time_ms_retransmission, true, _)) - .Times(packets_to_send_per_interval); + .Times(packets_to_send_per_burst_interval); - while (pacer->QueueSizePackets() > packets_to_send_per_interval) { + while (pacer->QueueSizePackets() > packets_to_send_per_burst_interval) { AdvanceTimeUntil(pacer->NextSendTime()); pacer->ProcessPackets(); } - EXPECT_EQ(packets_to_send_per_interval, pacer->QueueSizePackets()); + EXPECT_EQ(packets_to_send_per_burst_interval, pacer->QueueSizePackets()); // Expect the remaining (non-retransmission) packets to be sent. EXPECT_CALL(callback_, SendPadding).Times(0); EXPECT_CALL(callback_, SendPacket(_, _, _, true, _)).Times(0); EXPECT_CALL(callback_, SendPacket(ssrc, _, capture_time_ms, false, _)) - .Times(packets_to_send_per_interval); + .Times(packets_to_send_per_burst_interval); while (pacer->QueueSizePackets() > 0) { AdvanceTimeUntil(pacer->NextSendTime()); @@ -890,13 +899,13 @@ TEST_F(PacingControllerTest, HighPrioDoesntAffectBudget) { sequence_number++, capture_time_ms, kPacketSize); } pacer->ProcessPackets(); + EXPECT_EQ(pacer->QueueSizePackets(), 0u); // Low prio packets does affect the budget. - // Due to the multiplicative factor we can send 5 packets during a send - // interval. (network capacity * multiplier / (8 bits per byte * - // (packet size * #send intervals per second) - const size_t kPacketsToSendPerInterval = - kTargetRate.bps() * kPaceMultiplier / (8 * kPacketSize * 200); - for (size_t i = 0; i < kPacketsToSendPerInterval; ++i) { + const size_t kPacketsToSendPerBurstInterval = + (kTargetRate * kPaceMultiplier * PacingController::kDefaultBurstInterval) + .bytes() / + kPacketSize; + for (size_t i = 0; i < kPacketsToSendPerBurstInterval; ++i) { SendAndExpectPacket(pacer.get(), RtpPacketMediaType::kVideo, ssrc, sequence_number++, clock_.TimeInMilliseconds(), kPacketSize); @@ -904,16 +913,16 @@ TEST_F(PacingControllerTest, HighPrioDoesntAffectBudget) { // Send all packets and measure pace time. Timestamp start_time = clock_.CurrentTime(); + EXPECT_EQ(pacer->NextSendTime(), clock_.CurrentTime()); while (pacer->QueueSizePackets() > 0) { AdvanceTimeUntil(pacer->NextSendTime()); pacer->ProcessPackets(); } - // Measure pacing time. Expect only low-prio packets to affect this. + // Measure pacing time. TimeDelta pacing_time = clock_.CurrentTime() - start_time; - TimeDelta expected_pacing_time = - DataSize::Bytes(kPacketsToSendPerInterval * kPacketSize) / - (kTargetRate * kPaceMultiplier); + // All packets sent in one burst since audio packets are not accounted for. + TimeDelta expected_pacing_time = TimeDelta::Zero(); EXPECT_NEAR(pacing_time.us(), expected_pacing_time.us(), PacingController::kMinSleepTime.us()); } @@ -965,6 +974,7 @@ TEST_F(PacingControllerTest, DoesNotAllowOveruseAfterCongestion) { auto now_ms = [this] { return clock_.TimeInMilliseconds(); }; auto pacer = std::make_unique(&clock_, &callback_, trials_); pacer->SetPacingRates(kTargetRate * kPaceMultiplier, DataRate::Zero()); + pacer->SetSendBurstInterval(TimeDelta::Zero()); EXPECT_CALL(callback_, SendPadding).Times(0); // The pacing rate is low enough that the budget should not allow two packets // to be sent in a row. @@ -1362,10 +1372,9 @@ TEST_F(PacingControllerTest, CanProbeWithPaddingBeforeFirstMediaPacket) { const int kInitialBitrateBps = 300000; PacingControllerProbing packet_sender; - const test::ExplicitKeyValueConfig trials( - "WebRTC-Bwe-ProbingBehavior/min_packet_size:0/"); auto pacer = - std::make_unique(&clock_, &packet_sender, trials); + std::make_unique(&clock_, &packet_sender, trials_); + pacer->SetAllowProbeWithoutMediaPacket(true); std::vector probe_clusters = { {.at_time = clock_.CurrentTime(), .target_data_rate = kFirstClusterRate, @@ -1389,16 +1398,46 @@ TEST_F(PacingControllerTest, CanProbeWithPaddingBeforeFirstMediaPacket) { EXPECT_GT(packet_sender.padding_packets_sent(), 5); } +TEST_F(PacingControllerTest, ProbeSentAfterSetAllowProbeWithoutMediaPacket) { + const int kInitialBitrateBps = 300000; + + PacingControllerProbing packet_sender; + auto pacer = + std::make_unique(&clock_, &packet_sender, trials_); + std::vector probe_clusters = { + {.at_time = clock_.CurrentTime(), + .target_data_rate = kFirstClusterRate, + .target_duration = TimeDelta::Millis(15), + .target_probe_count = 5, + .id = 0}}; + pacer->CreateProbeClusters(probe_clusters); + + pacer->SetPacingRates( + DataRate::BitsPerSec(kInitialBitrateBps * kPaceMultiplier), + DataRate::Zero()); + + pacer->SetAllowProbeWithoutMediaPacket(true); + + Timestamp start = clock_.CurrentTime(); + Timestamp next_process = pacer->NextSendTime(); + while (clock_.CurrentTime() < start + TimeDelta::Millis(100) && + next_process.IsFinite()) { + AdvanceTimeUntil(next_process); + pacer->ProcessPackets(); + next_process = pacer->NextSendTime(); + } + EXPECT_GT(packet_sender.padding_packets_sent(), 5); +} + TEST_F(PacingControllerTest, CanNotProbeWithPaddingIfGeneratePaddingFails) { // const size_t kPacketSize = 1200; const int kInitialBitrateBps = 300000; PacingControllerProbing packet_sender; packet_sender.SetCanGeneratePadding(false); - const test::ExplicitKeyValueConfig trials( - "WebRTC-Bwe-ProbingBehavior/min_packet_size:0/"); auto pacer = - std::make_unique(&clock_, &packet_sender, trials); + std::make_unique(&clock_, &packet_sender, trials_); + pacer->SetAllowProbeWithoutMediaPacket(true); std::vector probe_clusters = { {.at_time = clock_.CurrentTime(), .target_data_rate = kFirstClusterRate, @@ -1524,7 +1563,7 @@ TEST_F(PacingControllerTest, ProbeClusterId) { }); bool non_probe_packet_seen = false; EXPECT_CALL(callback, SendPacket) - .WillOnce([&](std::unique_ptr packet, + .WillOnce([&](std::unique_ptr /* packet */, const PacedPacketInfo& cluster_info) { EXPECT_EQ(cluster_info.probe_cluster_id, kNotAProbe); non_probe_packet_seen = true; @@ -1606,7 +1645,7 @@ TEST_F(PacingControllerTest, SmallFirstProbePacket) { // Expect small padding packet to be requested. EXPECT_CALL(callback, GeneratePadding(DataSize::Bytes(1))) - .WillOnce([&](DataSize padding_size) { + .WillOnce([&](DataSize /* padding_size */) { std::vector> padding_packets; padding_packets.emplace_back( BuildPacket(RtpPacketMediaType::kPadding, kAudioSsrc, 1, @@ -1619,7 +1658,7 @@ TEST_F(PacingControllerTest, SmallFirstProbePacket) { EXPECT_CALL(callback, SendPacket) .Times(AnyNumber()) .WillRepeatedly([&](std::unique_ptr packet, - const PacedPacketInfo& cluster_info) { + const PacedPacketInfo& /* cluster_info */) { if (packets_sent == 0) { EXPECT_EQ(packet->packet_type(), RtpPacketMediaType::kPadding); } else { @@ -1853,6 +1892,7 @@ TEST_F(PacingControllerTest, AccountsForAudioEnqueueTime) { // Audio not paced, but still accounted for in budget. pacer->SetAccountForAudioPackets(true); pacer->SetPacingRates(kPacingDataRate, kPaddingDataRate); + pacer->SetSendBurstInterval(TimeDelta::Zero()); // Enqueue two audio packets, advance clock to where one packet // should have drained the buffer already, has they been sent @@ -1898,13 +1938,12 @@ TEST_F(PacingControllerTest, NextSendTimeAccountsForPadding) { EXPECT_EQ(pacer->NextSendTime() - clock_.CurrentTime(), PacingController::kPausedProcessInterval); - // Enqueue a new packet, that can't be sent until previous buffer has - // drained. + // Enqueue a new packet, that can be sent immediately due to default burst + // rate is 40ms. SendAndExpectPacket(pacer.get(), RtpPacketMediaType::kVideo, kSsrc, sequnce_number++, clock_.TimeInMilliseconds(), kPacketSize.bytes()); - EXPECT_EQ(pacer->NextSendTime() - clock_.CurrentTime(), kPacketPacingTime); - clock_.AdvanceTime(kPacketPacingTime); + EXPECT_EQ(pacer->NextSendTime() - clock_.CurrentTime(), TimeDelta::Zero()); pacer->ProcessPackets(); ::testing::Mock::VerifyAndClearExpectations(&callback_); @@ -1916,11 +1955,13 @@ TEST_F(PacingControllerTest, NextSendTimeAccountsForPadding) { // previous debt has cleared. Since padding was disabled before, there // currently is no padding debt. pacer->SetPacingRates(kPacingDataRate, kPacingDataRate / 2); - EXPECT_EQ(pacer->NextSendTime() - clock_.CurrentTime(), kPacketPacingTime); + EXPECT_EQ(pacer->QueueSizePackets(), 0u); + EXPECT_LT(pacer->NextSendTime() - clock_.CurrentTime(), + PacingController::kDefaultBurstInterval); // Advance time, expect padding. EXPECT_CALL(callback_, SendPadding).WillOnce(Return(kPacketSize.bytes())); - clock_.AdvanceTime(kPacketPacingTime); + clock_.AdvanceTime(pacer->NextSendTime() - clock_.CurrentTime()); pacer->ProcessPackets(); ::testing::Mock::VerifyAndClearExpectations(&callback_); @@ -1933,7 +1974,7 @@ TEST_F(PacingControllerTest, NextSendTimeAccountsForPadding) { pacer->EnqueuePacket( BuildPacket(RtpPacketMediaType::kVideo, kSsrc, sequnce_number++, clock_.TimeInMilliseconds(), kPacketSize.bytes())); - EXPECT_EQ(pacer->NextSendTime() - clock_.CurrentTime(), kPacketPacingTime); + EXPECT_EQ(pacer->NextSendTime(), clock_.CurrentTime()); } TEST_F(PacingControllerTest, PaddingTargetAccountsForPaddingRate) { @@ -2011,8 +2052,8 @@ TEST_F(PacingControllerTest, SendsFecPackets) { TEST_F(PacingControllerTest, GapInPacingDoesntAccumulateBudget) { const uint32_t kSsrc = 12345; uint16_t sequence_number = 1234; - const DataSize kPackeSize = DataSize::Bytes(250); - const TimeDelta kPacketSendTime = TimeDelta::Millis(15); + const DataSize kPackeSize = DataSize::Bytes(1000); + const TimeDelta kPacketSendTime = TimeDelta::Millis(25); auto pacer = std::make_unique(&clock_, &callback_, trials_); pacer->SetPacingRates(kPackeSize / kPacketSendTime, @@ -2028,15 +2069,20 @@ TEST_F(PacingControllerTest, GapInPacingDoesntAccumulateBudget) { // Advance time kPacketSendTime past where the media debt should be 0. clock_.AdvanceTime(2 * kPacketSendTime); - // Enqueue two new packets. Expect only one to be sent one ProcessPackets(). + // Enqueue three new packets. Expect only two to be sent one ProcessPackets() + // since the default burst interval is 40ms. + SendAndExpectPacket(pacer.get(), RtpPacketMediaType::kVideo, kSsrc, + sequence_number++, clock_.TimeInMilliseconds(), + kPackeSize.bytes()); + SendAndExpectPacket(pacer.get(), RtpPacketMediaType::kVideo, kSsrc, + sequence_number++, clock_.TimeInMilliseconds(), + kPackeSize.bytes()); + EXPECT_CALL(callback_, SendPacket(kSsrc, sequence_number + 1, _, _, _)) + .Times(0); pacer->EnqueuePacket( BuildPacket(RtpPacketMediaType::kVideo, kSsrc, sequence_number + 1, clock_.TimeInMilliseconds(), kPackeSize.bytes())); - pacer->EnqueuePacket( - BuildPacket(RtpPacketMediaType::kVideo, kSsrc, sequence_number + 2, - clock_.TimeInMilliseconds(), kPackeSize.bytes())); - EXPECT_CALL(callback_, SendPacket(kSsrc, sequence_number + 1, - clock_.TimeInMilliseconds(), false, false)); + pacer->ProcessPackets(); } @@ -2044,6 +2090,7 @@ TEST_F(PacingControllerTest, HandlesSubMicrosecondSendIntervals) { static constexpr DataSize kPacketSize = DataSize::Bytes(1); static constexpr TimeDelta kPacketSendTime = TimeDelta::Micros(1); auto pacer = std::make_unique(&clock_, &callback_, trials_); + pacer->SetSendBurstInterval(TimeDelta::Zero()); // Set pacing rate such that a packet is sent in 0.5us. pacer->SetPacingRates(/*pacing_rate=*/2 * kPacketSize / kPacketSendTime, @@ -2146,6 +2193,36 @@ TEST_F(PacingControllerTest, RespectsTargetRateWhenSendingPacketsInBursts) { EXPECT_EQ(number_of_bursts, 4); } +TEST_F(PacingControllerTest, + MaxBurstSizeLimitedAtHighPacingRateWhenSendingPacketsInBursts) { + NiceMock callback; + PacingController pacer(&clock_, &callback, trials_); + pacer.SetSendBurstInterval(TimeDelta::Millis(100)); + pacer.SetPacingRates(DataRate::KilobitsPerSec(10'000), DataRate::Zero()); + + size_t sent_size_in_burst = 0; + EXPECT_CALL(callback, SendPacket) + .WillRepeatedly([&](std::unique_ptr packet, + const PacedPacketInfo& /* cluster_info */) { + sent_size_in_burst += packet->size(); + }); + + // Enqueue 200 packets from a 200Kb encoded frame. + for (int i = 0; i < 200; ++i) { + pacer.EnqueuePacket(video_.BuildNextPacket(1000)); + } + + while (pacer.QueueSizePackets() > 70) { + pacer.ProcessPackets(); + EXPECT_NEAR(sent_size_in_burst, PacingController::kMaxBurstSize.bytes(), + 1000); + sent_size_in_burst = 0; + TimeDelta time_to_next = pacer.NextSendTime() - clock_.CurrentTime(); + EXPECT_NEAR(time_to_next.ms(), 50, 2); + clock_.AdvanceTime(time_to_next); + } +} + TEST_F(PacingControllerTest, RespectsQueueTimeLimit) { static constexpr DataSize kPacketSize = DataSize::Bytes(100); static constexpr DataRate kNominalPacingRate = DataRate::KilobitsPerSec(200); @@ -2306,5 +2383,42 @@ TEST_F(PacingControllerTest, FlushesPacketsOnKeyFrames) { pacer->ProcessPackets(); } +TEST_F(PacingControllerTest, CanControlQueueSizeUsingTtl) { + const uint32_t kSsrc = 54321; + uint16_t sequence_number = 1234; + + PacingController::Configuration config; + config.drain_large_queues = false; + config.packet_queue_ttl.video = TimeDelta::Millis(500); + auto pacer = + std::make_unique(&clock_, &callback_, trials_, config); + pacer->SetPacingRates(DataRate::BitsPerSec(100'000), DataRate::Zero()); + + Timestamp send_time = Timestamp::Zero(); + for (int i = 0; i < 100; ++i) { + // Enqueue a new audio and video frame every 33ms. + if (clock_.CurrentTime() - send_time > TimeDelta::Millis(33)) { + for (int j = 0; j < 3; ++j) { + auto packet = BuildPacket(RtpPacketMediaType::kVideo, kSsrc, + /*sequence_number=*/++sequence_number, + /*capture_time_ms=*/2, + /*size_bytes=*/1000); + pacer->EnqueuePacket(std::move(packet)); + } + auto packet = BuildPacket(RtpPacketMediaType::kAudio, kAudioSsrc, + /*sequence_number=*/++sequence_number, + /*capture_time_ms=*/2, + /*size_bytes=*/100); + pacer->EnqueuePacket(std::move(packet)); + send_time = clock_.CurrentTime(); + } + + EXPECT_LE(clock_.CurrentTime() - pacer->OldestPacketEnqueueTime(), + TimeDelta::Millis(500)); + clock_.AdvanceTime(pacer->NextSendTime() - clock_.CurrentTime()); + pacer->ProcessPackets(); + } +} + } // namespace } // namespace webrtc diff --git a/modules/pacing/packet_router.cc b/modules/pacing/packet_router.cc index 4c986ad027..58e2cb0acc 100644 --- a/modules/pacing/packet_router.cc +++ b/modules/pacing/packet_router.cc @@ -12,13 +12,20 @@ #include #include -#include #include +#include #include - -#include "absl/types/optional.h" +#include + +#include "absl/functional/any_invocable.h" +#include "api/array_view.h" +#include "api/rtp_headers.h" +#include "api/sequence_checker.h" +#include "api/transport/network_types.h" +#include "api/units/data_size.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -27,12 +34,10 @@ namespace webrtc { -PacketRouter::PacketRouter() : PacketRouter(0) {} - -PacketRouter::PacketRouter(uint16_t start_transport_seq) +PacketRouter::PacketRouter() : last_send_module_(nullptr), active_remb_module_(nullptr), - transport_seq_(start_transport_seq) {} + transport_seq_(1) {} PacketRouter::~PacketRouter() { RTC_DCHECK_RUN_ON(&thread_checker_); @@ -49,10 +54,10 @@ void PacketRouter::AddSendRtpModule(RtpRtcpInterface* rtp_module, RTC_DCHECK_RUN_ON(&thread_checker_); AddSendRtpModuleToMap(rtp_module, rtp_module->SSRC()); - if (absl::optional rtx_ssrc = rtp_module->RtxSsrc()) { + if (std::optional rtx_ssrc = rtp_module->RtxSsrc()) { AddSendRtpModuleToMap(rtp_module, *rtx_ssrc); } - if (absl::optional flexfec_ssrc = rtp_module->FlexfecSsrc()) { + if (std::optional flexfec_ssrc = rtp_module->FlexfecSsrc()) { AddSendRtpModuleToMap(rtp_module, *flexfec_ssrc); } @@ -65,6 +70,29 @@ void PacketRouter::AddSendRtpModule(RtpRtcpInterface* rtp_module, } } +bool PacketRouter::SupportsRtxPayloadPadding() const { + RTC_DCHECK_RUN_ON(&thread_checker_); + for (RtpRtcpInterface* rtp_module : send_modules_list_) { + if (rtp_module->SupportsRtxPayloadPadding()) { + return true; + } + } + return false; +} + +void PacketRouter::RegisterNotifyBweCallback( + absl::AnyInvocable callback) { + RTC_DCHECK_RUN_ON(&thread_checker_); + notify_bwe_callback_ = std::move(callback); +} + +void PacketRouter::ConfigureForRfc8888Feedback(bool send_rtp_packets_as_ect1) { + RTC_DCHECK_RUN_ON(&thread_checker_); + use_cc_feedback_according_to_rfc8888_ = true; + send_rtp_packets_as_ect1_ = send_rtp_packets_as_ect1; +} + void PacketRouter::AddSendRtpModuleToMap(RtpRtcpInterface* rtp_module, uint32_t ssrc) { RTC_DCHECK_RUN_ON(&thread_checker_); @@ -102,10 +130,10 @@ void PacketRouter::RemoveSendRtpModule(RtpRtcpInterface* rtp_module) { MaybeRemoveRembModuleCandidate(rtp_module, /* media_sender = */ true); RemoveSendRtpModuleFromMap(rtp_module->SSRC()); - if (absl::optional rtx_ssrc = rtp_module->RtxSsrc()) { + if (std::optional rtx_ssrc = rtp_module->RtxSsrc()) { RemoveSendRtpModuleFromMap(*rtx_ssrc); } - if (absl::optional flexfec_ssrc = rtp_module->FlexfecSsrc()) { + if (std::optional flexfec_ssrc = rtp_module->FlexfecSsrc()) { RemoveSendRtpModuleFromMap(*flexfec_ssrc); } @@ -146,15 +174,6 @@ void PacketRouter::SendPacket(std::unique_ptr packet, "sequence_number", packet->SequenceNumber(), "rtp_timestamp", packet->Timestamp()); - // With the new pacer code path, transport sequence numbers are only set here, - // on the pacer thread. Therefore we don't need atomics/synchronization. - bool assign_transport_sequence_number = - packet->HasExtension(); - if (assign_transport_sequence_number) { - packet->SetExtension((transport_seq_ + 1) & - 0xFFFF); - } - uint32_t ssrc = packet->Ssrc(); auto it = send_modules_map_.find(ssrc); if (it == send_modules_map_.end()) { @@ -166,26 +185,45 @@ void PacketRouter::SendPacket(std::unique_ptr packet, } RtpRtcpInterface* rtp_module = it->second; - if (!rtp_module->TrySendPacket(std::move(packet), cluster_info)) { - RTC_LOG(LS_WARNING) << "Failed to send packet, rejected by RTP module."; + if (!packet || !rtp_module->CanSendPacket(*packet)) { + RTC_LOG(LS_WARNING) << "Failed to send packet, Not sending media"; return; } - modules_used_in_current_batch_.insert(rtp_module); - // Sending succeeded. - - if (assign_transport_sequence_number) { - ++transport_seq_; + // Transport sequence numbers are used if send side bandwidth estimation is + // used. Send side BWE relies on RTCP feedback either using format described + // in RFC 8888 or + // https://datatracker.ietf.org/doc/html/draft-holmer-rmcat-transport-wide-cc-extensions-01. + // If RFC 8888 feedback is used, a transport + // sequence number is created for all RTP packets, but not sent in the RTP + // packet. Otherwise, the transport sequence number is only created + // if the TransportSequenceNumber header extension is negotiated for the + // specific media type. Historically, webrtc only used TransportSequenceNumber + // on video packets. + if (use_cc_feedback_according_to_rfc8888_ || + packet->HasExtension()) { + packet->set_transport_sequence_number(transport_seq_++); + } + if (send_rtp_packets_as_ect1_) { + packet->set_send_as_ect1(); + } + rtp_module->AssignSequenceNumber(*packet); + if (notify_bwe_callback_) { + notify_bwe_callback_(*packet, cluster_info); } + rtp_module->SendPacket(std::move(packet), cluster_info); + modules_used_in_current_batch_.insert(rtp_module); + + // Sending succeeded. if (rtp_module->SupportsRtxPayloadPadding()) { // This is now the last module to send media, and has the desired // properties needed for payload based padding. Cache it for later use. last_send_module_ = rtp_module; } - for (auto& packet : rtp_module->FetchFecPackets()) { - pending_fec_packets_.push_back(std::move(packet)); + for (auto& fec_packet : rtp_module->FetchFecPackets()) { + pending_fec_packets_.push_back(std::move(fec_packet)); } } @@ -253,7 +291,7 @@ std::vector> PacketRouter::GeneratePadding( void PacketRouter::OnAbortedRetransmissions( uint32_t ssrc, - rtc::ArrayView sequence_numbers) { + ArrayView sequence_numbers) { RTC_DCHECK_RUN_ON(&thread_checker_); auto it = send_modules_map_.find(ssrc); if (it != send_modules_map_.end()) { @@ -261,7 +299,7 @@ void PacketRouter::OnAbortedRetransmissions( } } -absl::optional PacketRouter::GetRtxSsrcForMedia(uint32_t ssrc) const { +std::optional PacketRouter::GetRtxSsrcForMedia(uint32_t ssrc) const { RTC_DCHECK_RUN_ON(&thread_checker_); auto it = send_modules_map_.find(ssrc); if (it != send_modules_map_.end() && it->second->SSRC() == ssrc) { @@ -269,12 +307,7 @@ absl::optional PacketRouter::GetRtxSsrcForMedia(uint32_t ssrc) const { // media SSRC for that RTP module. return it->second->RtxSsrc(); } - return absl::nullopt; -} - -uint16_t PacketRouter::CurrentTransportSequenceNumber() const { - RTC_DCHECK_RUN_ON(&thread_checker_); - return transport_seq_ & 0xFFFF; + return std::nullopt; } void PacketRouter::SendRemb(int64_t bitrate_bps, std::vector ssrcs) { diff --git a/modules/pacing/packet_router.h b/modules/pacing/packet_router.h index 61779f49e5..57f05a7d66 100644 --- a/modules/pacing/packet_router.h +++ b/modules/pacing/packet_router.h @@ -14,15 +14,19 @@ #include #include +#include #include #include +#include #include #include -#include #include +#include "absl/functional/any_invocable.h" +#include "api/array_view.h" #include "api/sequence_checker.h" #include "api/transport/network_types.h" +#include "api/units/data_size.h" #include "modules/pacing/pacing_controller.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" @@ -41,15 +45,27 @@ class RtpRtcpInterface; class PacketRouter : public PacingController::PacketSender { public: PacketRouter(); - explicit PacketRouter(uint16_t start_transport_seq); ~PacketRouter() override; PacketRouter(const PacketRouter&) = delete; PacketRouter& operator=(const PacketRouter&) = delete; + // Callback is invoked after pacing, before a packet is forwarded to the + // sending rtp module. + void RegisterNotifyBweCallback( + absl::AnyInvocable callback); + + // Ensures that PacketRouter generates transport sequence numbers for all RTP + // packets. If `send_rtp_packets_as_ect1` is true, packets will be requested + // to be sent as ect1. + void ConfigureForRfc8888Feedback(bool send_rtp_packets_as_ect1); + void AddSendRtpModule(RtpRtcpInterface* rtp_module, bool remb_candidate); void RemoveSendRtpModule(RtpRtcpInterface* rtp_module); + bool SupportsRtxPayloadPadding() const; + void AddReceiveRtpModule(RtcpFeedbackSenderInterface* rtcp_sender, bool remb_candidate); void RemoveReceiveRtpModule(RtcpFeedbackSenderInterface* rtcp_sender); @@ -61,12 +77,10 @@ class PacketRouter : public PacingController::PacketSender { DataSize size) override; void OnAbortedRetransmissions( uint32_t ssrc, - rtc::ArrayView sequence_numbers) override; - absl::optional GetRtxSsrcForMedia(uint32_t ssrc) const override; + ArrayView sequence_numbers) override; + std::optional GetRtxSsrcForMedia(uint32_t ssrc) const override; void OnBatchComplete() override; - uint16_t CurrentTransportSequenceNumber() const; - // Send REMB feedback. void SendRemb(int64_t bitrate_bps, std::vector ssrcs); @@ -107,6 +121,12 @@ class PacketRouter : public PacingController::PacketSender { RTC_GUARDED_BY(thread_checker_); uint64_t transport_seq_ RTC_GUARDED_BY(thread_checker_); + bool use_cc_feedback_according_to_rfc8888_ RTC_GUARDED_BY(thread_checker_) = + false; + bool send_rtp_packets_as_ect1_ RTC_GUARDED_BY(thread_checker_) = false; + absl::AnyInvocable + notify_bwe_callback_ RTC_GUARDED_BY(thread_checker_) = nullptr; std::vector> pending_fec_packets_ RTC_GUARDED_BY(thread_checker_); diff --git a/modules/pacing/packet_router_unittest.cc b/modules/pacing/packet_router_unittest.cc index af8534316c..7b9467edb7 100644 --- a/modules/pacing/packet_router_unittest.cc +++ b/modules/pacing/packet_router_unittest.cc @@ -13,12 +13,21 @@ #include #include #include +#include #include +#include +#include "api/rtp_headers.h" +#include "api/transport/network_types.h" +#include "api/units/data_size.h" #include "api/units/time_delta.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "rtc_base/checks.h" #include "rtc_base/fake_clock.h" #include "test/gmock.h" @@ -34,17 +43,13 @@ namespace webrtc { namespace { using ::testing::_; -using ::testing::AnyNumber; -using ::testing::AtLeast; using ::testing::ElementsAreArray; -using ::testing::Field; -using ::testing::Gt; -using ::testing::Le; +using ::testing::InSequence; +using ::testing::MockFunction; using ::testing::NiceMock; using ::testing::Pointee; using ::testing::Property; using ::testing::Return; -using ::testing::SaveArg; constexpr int kProbeMinProbes = 5; constexpr int kProbeMinBytes = 1000; @@ -113,7 +118,7 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesRtx) { const size_t kExpectedPaddingPackets = 1; EXPECT_CALL(rtp_1, GeneratePadding(_)).Times(0); EXPECT_CALL(rtp_2, GeneratePadding(kPaddingSize)) - .WillOnce([&](size_t padding_size) { + .WillOnce([&](size_t /* padding_size */) { return std::vector>( kExpectedPaddingPackets); }); @@ -125,6 +130,31 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesRtx) { packet_router_.RemoveSendRtpModule(&rtp_2); } +TEST_F(PacketRouterTest, SupportsRtxPayloadPaddingFalseIfNoRtxSendModule) { + EXPECT_FALSE(packet_router_.SupportsRtxPayloadPadding()); + + NiceMock none_rtx_module; + ON_CALL(none_rtx_module, SupportsRtxPayloadPadding()) + .WillByDefault(Return(false)); + + packet_router_.AddSendRtpModule(&none_rtx_module, false); + EXPECT_FALSE(packet_router_.SupportsRtxPayloadPadding()); + + packet_router_.RemoveSendRtpModule(&none_rtx_module); + EXPECT_FALSE(packet_router_.SupportsRtxPayloadPadding()); +} + +TEST_F(PacketRouterTest, SupportsRtxPayloadPaddingTrueIfRtxSendModule) { + NiceMock rtx_module; + ON_CALL(rtx_module, SupportsRtxPayloadPadding()).WillByDefault(Return(true)); + + packet_router_.AddSendRtpModule(&rtx_module, false); + EXPECT_TRUE(packet_router_.SupportsRtxPayloadPadding()); + + packet_router_.RemoveSendRtpModule(&rtx_module); + EXPECT_FALSE(packet_router_.SupportsRtxPayloadPadding()); +} + TEST_F(PacketRouterTest, GeneratePaddingPrioritizesVideo) { // Two RTP modules. Neither support RTX, both support padding, // but the first one is for audio and second for video. @@ -133,7 +163,7 @@ TEST_F(PacketRouterTest, GeneratePaddingPrioritizesVideo) { const size_t kPaddingSize = 123; const size_t kExpectedPaddingPackets = 1; - auto generate_padding = [&](size_t padding_size) { + auto generate_padding = [&](size_t /* padding_size */) { return std::vector>( kExpectedPaddingPackets); }; @@ -194,29 +224,38 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) { EXPECT_CALL(rtp_1, SSRC()).WillRepeatedly(Return(kSsrc1)); EXPECT_CALL(rtp_1, SupportsPadding).WillRepeatedly(Return(true)); EXPECT_CALL(rtp_1, SupportsRtxPayloadPadding).WillRepeatedly(Return(true)); - EXPECT_CALL(rtp_1, TrySendPacket).WillRepeatedly(Return(false)); - EXPECT_CALL(rtp_1, TrySendPacket( - Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc1)), _)) - .WillRepeatedly(Return(true)); + EXPECT_CALL(rtp_1, CanSendPacket) + .WillRepeatedly([&](const RtpPacketToSend& packet) { + if (packet.Ssrc() == kSsrc1) { + return true; + } + return false; + }); NiceMock rtp_2; EXPECT_CALL(rtp_2, SSRC()).WillRepeatedly(Return(kSsrc2)); EXPECT_CALL(rtp_2, SupportsPadding).WillRepeatedly(Return(true)); EXPECT_CALL(rtp_2, SupportsRtxPayloadPadding).WillRepeatedly(Return(true)); - EXPECT_CALL(rtp_2, TrySendPacket).WillRepeatedly(Return(false)); - EXPECT_CALL(rtp_2, TrySendPacket( - Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc2)), _)) - .WillRepeatedly(Return(true)); + EXPECT_CALL(rtp_2, CanSendPacket) + .WillRepeatedly([&](const RtpPacketToSend& packet) { + if (packet.Ssrc() == kSsrc2) { + return true; + } + return false; + }); // Third module is sending media, but does not support rtx. NiceMock rtp_3; EXPECT_CALL(rtp_3, SSRC()).WillRepeatedly(Return(kSsrc3)); EXPECT_CALL(rtp_3, SupportsPadding).WillRepeatedly(Return(true)); EXPECT_CALL(rtp_3, SupportsRtxPayloadPadding).WillRepeatedly(Return(false)); - EXPECT_CALL(rtp_3, TrySendPacket).WillRepeatedly(Return(false)); - EXPECT_CALL(rtp_3, TrySendPacket( - Pointee(Property(&RtpPacketToSend::Ssrc, kSsrc3)), _)) - .WillRepeatedly(Return(true)); + EXPECT_CALL(rtp_3, CanSendPacket) + .WillRepeatedly([&](const RtpPacketToSend& packet) { + if (packet.Ssrc() == kSsrc3) { + return true; + } + return false; + }); packet_router_.AddSendRtpModule(&rtp_1, false); packet_router_.AddSendRtpModule(&rtp_2, false); @@ -228,7 +267,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) { // and supports rtx. EXPECT_CALL(rtp_2, GeneratePadding(kPaddingBytes)) .Times(1) - .WillOnce([&](size_t target_size_bytes) { + .WillOnce([&](size_t /* target_size_bytes */) { std::vector> packets; packets.push_back(BuildRtpPacket(kSsrc2)); return packets; @@ -240,7 +279,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) { EXPECT_CALL(rtp_1, GeneratePadding(kPaddingBytes)) .Times(1) - .WillOnce([&](size_t target_size_bytes) { + .WillOnce([&](size_t /* target_size_bytes */) { std::vector> packets; packets.push_back(BuildRtpPacket(kSsrc1)); return packets; @@ -259,7 +298,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) { RtpRtcpInterface* last_send_module; EXPECT_CALL(rtp_1, GeneratePadding(kPaddingBytes)) .Times(1) - .WillOnce([&](size_t target_size_bytes) { + .WillOnce([&](size_t /* target_size_bytes */) { last_send_module = &rtp_1; std::vector> packets; packets.push_back(BuildRtpPacket(kSsrc1)); @@ -267,7 +306,7 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) { }); EXPECT_CALL(rtp_3, GeneratePadding(kPaddingBytes)) .Times(1) - .WillOnce([&](size_t target_size_bytes) { + .WillOnce([&](size_t /* target_size_bytes */) { last_send_module = &rtp_3; std::vector> packets; packets.push_back(BuildRtpPacket(kSsrc3)); @@ -282,25 +321,145 @@ TEST_F(PacketRouterTest, PadsOnLastActiveMediaStream) { } } +TEST_F(PacketRouterTest, AllocatesRtpSequenceNumbersIfPacketCanBeSent) { + const uint16_t kSsrc1 = 1234; + PacketRouter packet_router; + NiceMock rtp; + ON_CALL(rtp, SSRC()).WillByDefault(Return(kSsrc1)); + + InSequence s; + EXPECT_CALL(rtp, CanSendPacket).WillRepeatedly(Return(true)); + EXPECT_CALL(rtp, AssignSequenceNumber); + packet_router.AddSendRtpModule(&rtp, false); + packet_router.SendPacket(BuildRtpPacket(kSsrc1), PacedPacketInfo()); + + packet_router.OnBatchComplete(); + packet_router.RemoveSendRtpModule(&rtp); +} + +TEST_F(PacketRouterTest, DoNotAllocatesRtpSequenceNumbersIfPacketCanNotBeSent) { + const uint16_t kSsrc1 = 1234; + PacketRouter packet_router; + NiceMock rtp; + ON_CALL(rtp, SSRC()).WillByDefault(Return(kSsrc1)); + + EXPECT_CALL(rtp, CanSendPacket).WillRepeatedly(Return(false)); + EXPECT_CALL(rtp, AssignSequenceNumber).Times(0); + packet_router.AddSendRtpModule(&rtp, false); + packet_router.SendPacket(BuildRtpPacket(kSsrc1), PacedPacketInfo()); + + packet_router.OnBatchComplete(); + packet_router.RemoveSendRtpModule(&rtp); +} + TEST_F(PacketRouterTest, AllocatesTransportSequenceNumbers) { - const uint16_t kStartSeq = 0xFFF0; - const size_t kNumPackets = 32; const uint16_t kSsrc1 = 1234; - PacketRouter packet_router(kStartSeq - 1); + PacketRouter packet_router; + testing::MockFunction + notify_bwe_callback; + NiceMock rtp_1; + packet_router.RegisterNotifyBweCallback(notify_bwe_callback.AsStdFunction()); + + EXPECT_CALL(rtp_1, SSRC()).WillRepeatedly(Return(kSsrc1)); + EXPECT_CALL(rtp_1, CanSendPacket).WillRepeatedly(Return(true)); + + packet_router.AddSendRtpModule(&rtp_1, false); + + auto packet = BuildRtpPacket(kSsrc1); + EXPECT_TRUE(packet->ReserveExtension()); + EXPECT_CALL(notify_bwe_callback, Call) + .WillOnce([](const RtpPacketToSend& packet, + const PacedPacketInfo& /* pacing_info */) { + EXPECT_EQ(packet.transport_sequence_number(), 1); + }); + packet_router.SendPacket(std::move(packet), PacedPacketInfo()); + + packet_router.OnBatchComplete(); + packet_router.RemoveSendRtpModule(&rtp_1); +} + +TEST_F(PacketRouterTest, + DoesNotAllocateTransportSequenceNumberWithoutExtension) { + const uint16_t kSsrc1 = 1234; + + PacketRouter packet_router; + testing::MockFunction + notify_bwe_callback; NiceMock rtp_1; + packet_router.RegisterNotifyBweCallback(notify_bwe_callback.AsStdFunction()); + EXPECT_CALL(rtp_1, SSRC()).WillRepeatedly(Return(kSsrc1)); - EXPECT_CALL(rtp_1, TrySendPacket).WillRepeatedly(Return(true)); + EXPECT_CALL(rtp_1, CanSendPacket).WillRepeatedly(Return(true)); + packet_router.AddSendRtpModule(&rtp_1, false); - for (size_t i = 0; i < kNumPackets; ++i) { - auto packet = BuildRtpPacket(kSsrc1); - EXPECT_TRUE(packet->ReserveExtension()); - packet_router.SendPacket(std::move(packet), PacedPacketInfo()); - uint32_t expected_unwrapped_seq = static_cast(kStartSeq) + i; - EXPECT_EQ(static_cast(expected_unwrapped_seq & 0xFFFF), - packet_router.CurrentTransportSequenceNumber()); - } + auto packet = BuildRtpPacket(kSsrc1); + EXPECT_CALL(notify_bwe_callback, Call) + .WillOnce([](const RtpPacketToSend& packet, + const PacedPacketInfo& /* pacing_info */) { + EXPECT_EQ(packet.transport_sequence_number(), std::nullopt); + }); + packet_router.SendPacket(std::move(packet), PacedPacketInfo()); + + packet_router.OnBatchComplete(); + packet_router.RemoveSendRtpModule(&rtp_1); +} + +TEST_F(PacketRouterTest, + AllocateTransportSequenceNumberWithoutExtensionIfRfc8888Enabled) { + const uint16_t kSsrc1 = 1234; + + PacketRouter packet_router; + testing::MockFunction + notify_bwe_callback; + NiceMock rtp_1; + packet_router.RegisterNotifyBweCallback(notify_bwe_callback.AsStdFunction()); + + EXPECT_CALL(rtp_1, SSRC()).WillRepeatedly(Return(kSsrc1)); + EXPECT_CALL(rtp_1, CanSendPacket).WillRepeatedly(Return(true)); + + packet_router.AddSendRtpModule(&rtp_1, false); + packet_router.ConfigureForRfc8888Feedback(/*send_rtp_packets_as_ect1=*/false); + + auto packet = BuildRtpPacket(kSsrc1); + EXPECT_CALL(notify_bwe_callback, Call) + .WillOnce([](const RtpPacketToSend& packet, + const PacedPacketInfo& /* pacing_info */) { + EXPECT_EQ(packet.transport_sequence_number(), 1); + }); + packet_router.SendPacket(std::move(packet), PacedPacketInfo()); + + packet_router.OnBatchComplete(); + packet_router.RemoveSendRtpModule(&rtp_1); +} + +TEST_F(PacketRouterTest, SendPacketsAsEct1IfConfigured) { + const uint16_t kSsrc1 = 1234; + PacketRouter packet_router; + NiceMock rtp_1; + ON_CALL(rtp_1, SSRC()).WillByDefault(Return(kSsrc1)); + ON_CALL(rtp_1, CanSendPacket).WillByDefault(Return(kSsrc1)); + + packet_router.AddSendRtpModule(&rtp_1, false); + packet_router.ConfigureForRfc8888Feedback(/*send_rtp_packets_as_ect1=*/true); + + testing::Sequence s; + EXPECT_CALL( + rtp_1, + SendPacket(Pointee(Property(&RtpPacketToSend::send_as_ect1, true)), _)) + .InSequence(s); + EXPECT_CALL( + rtp_1, + SendPacket(Pointee(Property(&RtpPacketToSend::send_as_ect1, false)), _)) + .InSequence(s); + + packet_router.SendPacket(BuildRtpPacket(kSsrc1), PacedPacketInfo()); + packet_router.ConfigureForRfc8888Feedback(/*send_rtp_packets_as_ect1=*/false); + packet_router.SendPacket(BuildRtpPacket(kSsrc1), PacedPacketInfo()); packet_router.OnBatchComplete(); packet_router.RemoveSendRtpModule(&rtp_1); @@ -333,6 +492,7 @@ TEST_F(PacketRouterTest, SendPacketWithoutTransportSequenceNumbers) { NiceMock rtp_1; ON_CALL(rtp_1, SendingMedia).WillByDefault(Return(true)); ON_CALL(rtp_1, SSRC).WillByDefault(Return(kSsrc1)); + ON_CALL(rtp_1, CanSendPacket).WillByDefault(Return(true)); packet_router_.AddSendRtpModule(&rtp_1, false); // Send a packet without TransportSequenceNumber extension registered, @@ -342,59 +502,16 @@ TEST_F(PacketRouterTest, SendPacketWithoutTransportSequenceNumbers) { packet->SetSsrc(kSsrc1); EXPECT_CALL( rtp_1, - TrySendPacket( - Pointee(Property( - &RtpPacketToSend::HasExtension, false)), - _)) - .WillOnce(Return(true)); - packet_router_.SendPacket(std::move(packet), PacedPacketInfo()); - packet_router_.OnBatchComplete(); - packet_router_.RemoveSendRtpModule(&rtp_1); -} - -TEST_F(PacketRouterTest, SendPacketAssignsTransportSequenceNumbers) { - NiceMock rtp_1; - NiceMock rtp_2; - - const uint16_t kSsrc1 = 1234; - const uint16_t kSsrc2 = 2345; - - ON_CALL(rtp_1, SSRC).WillByDefault(Return(kSsrc1)); - ON_CALL(rtp_2, SSRC).WillByDefault(Return(kSsrc2)); - - packet_router_.AddSendRtpModule(&rtp_1, false); - packet_router_.AddSendRtpModule(&rtp_2, false); - - // Transport sequence numbers start at 1, for historical reasons. - uint16_t transport_sequence_number = 1; - - auto packet = BuildRtpPacket(kSsrc1); - EXPECT_TRUE(packet->ReserveExtension()); - EXPECT_CALL( - rtp_1, - TrySendPacket(Pointee(Property( - &RtpPacketToSend::GetExtension, - transport_sequence_number)), - _)) - .WillOnce(Return(true)); - packet_router_.SendPacket(std::move(packet), PacedPacketInfo()); - - ++transport_sequence_number; - packet = BuildRtpPacket(kSsrc2); - EXPECT_TRUE(packet->ReserveExtension()); - - EXPECT_CALL( - rtp_2, - TrySendPacket(Pointee(Property( - &RtpPacketToSend::GetExtension, - transport_sequence_number)), - _)) - .WillOnce(Return(true)); + SendPacket( + AllOf(Pointee(Property( + &RtpPacketToSend::HasExtension, + false)), + Pointee(Property(&RtpPacketToSend::transport_sequence_number, + std::nullopt))), + _)); packet_router_.SendPacket(std::move(packet), PacedPacketInfo()); - packet_router_.OnBatchComplete(); packet_router_.RemoveSendRtpModule(&rtp_1); - packet_router_.RemoveSendRtpModule(&rtp_2); } TEST_F(PacketRouterTest, DoesNotIncrementTransportSequenceNumberOnSendFailure) { @@ -410,13 +527,8 @@ TEST_F(PacketRouterTest, DoesNotIncrementTransportSequenceNumberOnSendFailure) { // Return failure status code to make sure sequence number is not incremented. auto packet = BuildRtpPacket(kSsrc); EXPECT_TRUE(packet->ReserveExtension()); - EXPECT_CALL( - rtp, - TrySendPacket(Pointee(Property( - &RtpPacketToSend::GetExtension, - kStartTransportSequenceNumber)), - _)) - .WillOnce(Return(false)); + EXPECT_CALL(rtp, CanSendPacket) + .WillOnce([&](const RtpPacketToSend& /* packet */) { return false; }); packet_router_.SendPacket(std::move(packet), PacedPacketInfo()); // Send another packet, verify transport sequence number is still at the @@ -424,13 +536,13 @@ TEST_F(PacketRouterTest, DoesNotIncrementTransportSequenceNumberOnSendFailure) { packet = BuildRtpPacket(kSsrc); EXPECT_TRUE(packet->ReserveExtension()); - EXPECT_CALL( - rtp, - TrySendPacket(Pointee(Property( - &RtpPacketToSend::GetExtension, - kStartTransportSequenceNumber)), - _)) - .WillOnce(Return(true)); + EXPECT_CALL(rtp, CanSendPacket).WillOnce(Return(true)); + EXPECT_CALL(rtp, SendPacket) + .WillOnce([&](std::unique_ptr packet, + const PacedPacketInfo& /* pacing_info */) { + EXPECT_EQ(packet->transport_sequence_number(), + kStartTransportSequenceNumber); + }); packet_router_.SendPacket(std::move(packet), PacedPacketInfo()); packet_router_.OnBatchComplete(); @@ -491,9 +603,9 @@ TEST_F(PacketRouterTest, ReportsRtxSsrc) { packet_router_.AddSendRtpModule(&rtp_2, false); EXPECT_EQ(packet_router_.GetRtxSsrcForMedia(kSsrc1), kRtxSsrc1); - EXPECT_EQ(packet_router_.GetRtxSsrcForMedia(kRtxSsrc1), absl::nullopt); - EXPECT_EQ(packet_router_.GetRtxSsrcForMedia(kSsrc2), absl::nullopt); - EXPECT_EQ(packet_router_.GetRtxSsrcForMedia(kInvalidSsrc), absl::nullopt); + EXPECT_EQ(packet_router_.GetRtxSsrcForMedia(kRtxSsrc1), std::nullopt); + EXPECT_EQ(packet_router_.GetRtxSsrcForMedia(kSsrc2), std::nullopt); + EXPECT_EQ(packet_router_.GetRtxSsrcForMedia(kInvalidSsrc), std::nullopt); packet_router_.RemoveSendRtpModule(&rtp_1); packet_router_.RemoveSendRtpModule(&rtp_2); @@ -508,7 +620,7 @@ TEST_F(PacketRouterTest, RoutesBatchCompleteToActiveModules) { ON_CALL(rtp_2, SSRC).WillByDefault(Return(kSsrc2)); packet_router_.AddSendRtpModule(&rtp_1, false); packet_router_.AddSendRtpModule(&rtp_2, false); - EXPECT_CALL(rtp_1, TrySendPacket).WillOnce(Return(true)); + EXPECT_CALL(rtp_1, CanSendPacket).WillOnce(Return(true)); packet_router_.SendPacket(BuildRtpPacket(kSsrc1), PacedPacketInfo()); EXPECT_CALL(rtp_1, OnBatchComplete); EXPECT_CALL(rtp_2, OnBatchComplete).Times(0); @@ -561,7 +673,7 @@ TEST_F(PacketRouterTest, DuplicateRemovalOfSendModuleIgnored) { } TEST(PacketRouterRembTest, ChangeSendRtpModuleChangeRembSender) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; NiceMock rtp_send; NiceMock rtp_recv; PacketRouter packet_router; @@ -586,7 +698,7 @@ TEST(PacketRouterRembTest, ChangeSendRtpModuleChangeRembSender) { // Only register receiving modules and make sure we fallback to trigger a REMB // packet on this one. TEST(PacketRouterRembTest, NoSendingRtpModule) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; NiceMock rtp; PacketRouter packet_router; @@ -607,7 +719,7 @@ TEST(PacketRouterRembTest, NoSendingRtpModule) { } TEST(PacketRouterRembTest, NonCandidateSendRtpModuleNotUsedForRemb) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; PacketRouter packet_router; NiceMock module; @@ -625,7 +737,7 @@ TEST(PacketRouterRembTest, NonCandidateSendRtpModuleNotUsedForRemb) { } TEST(PacketRouterRembTest, CandidateSendRtpModuleUsedForRemb) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; PacketRouter packet_router; NiceMock module; @@ -643,7 +755,7 @@ TEST(PacketRouterRembTest, CandidateSendRtpModuleUsedForRemb) { } TEST(PacketRouterRembTest, NonCandidateReceiveRtpModuleNotUsedForRemb) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; PacketRouter packet_router; NiceMock module; @@ -661,7 +773,7 @@ TEST(PacketRouterRembTest, NonCandidateReceiveRtpModuleNotUsedForRemb) { } TEST(PacketRouterRembTest, CandidateReceiveRtpModuleUsedForRemb) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; PacketRouter packet_router; NiceMock module; @@ -680,7 +792,7 @@ TEST(PacketRouterRembTest, CandidateReceiveRtpModuleUsedForRemb) { TEST(PacketRouterRembTest, SendCandidatePreferredOverReceiveCandidate_SendModuleAddedFirst) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; PacketRouter packet_router; NiceMock send_module; NiceMock receive_module; @@ -707,7 +819,7 @@ TEST(PacketRouterRembTest, TEST(PacketRouterRembTest, SendCandidatePreferredOverReceiveCandidate_ReceiveModuleAddedFirst) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; PacketRouter packet_router; NiceMock send_module; NiceMock receive_module; @@ -734,7 +846,7 @@ TEST(PacketRouterRembTest, } TEST(PacketRouterRembTest, ReceiveModuleTakesOverWhenLastSendModuleRemoved) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; PacketRouter packet_router; NiceMock send_module; NiceMock receive_module; diff --git a/modules/pacing/prioritized_packet_queue.cc b/modules/pacing/prioritized_packet_queue.cc index ea211ea683..136c878793 100644 --- a/modules/pacing/prioritized_packet_queue.cc +++ b/modules/pacing/prioritized_packet_queue.cc @@ -10,41 +10,75 @@ #include "modules/pacing/prioritized_packet_queue.h" +#include +#include +#include +#include +#include +#include +#include #include +#include "absl/container/inlined_vector.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" namespace webrtc { namespace { constexpr int kAudioPrioLevel = 0; -int GetPriorityForType(RtpPacketMediaType type) { +int GetPriorityForType( + RtpPacketMediaType type, + std::optional original_type) { // Lower number takes priority over higher. switch (type) { case RtpPacketMediaType::kAudio: // Audio is always prioritized over other packet types. return kAudioPrioLevel; case RtpPacketMediaType::kRetransmission: - // Send retransmissions before new media. + // Send retransmissions before new media. If original_type is set, audio + // retransmission is prioritized more than video retransmission. + if (original_type == RtpPacketToSend::OriginalType::kVideo) { + return kAudioPrioLevel + 2; + } return kAudioPrioLevel + 1; case RtpPacketMediaType::kVideo: case RtpPacketMediaType::kForwardErrorCorrection: // Video has "normal" priority, in the old speak. // Send redundancy concurrently to video. If it is delayed it might have a // lower chance of being useful. - return kAudioPrioLevel + 2; + return kAudioPrioLevel + 3; case RtpPacketMediaType::kPadding: // Packets that are in themselves likely useless, only sent to keep the // BWE high. - return kAudioPrioLevel + 3; + return kAudioPrioLevel + 4; } RTC_CHECK_NOTREACHED(); } } // namespace +absl::InlinedVector +PrioritizedPacketQueue::ToTtlPerPrio(PacketQueueTTL packet_queue_ttl) { + absl::InlinedVector + ttl_per_prio(kNumPriorityLevels, TimeDelta::PlusInfinity()); + ttl_per_prio[GetPriorityForType(RtpPacketMediaType::kRetransmission, + RtpPacketToSend::OriginalType::kAudio)] = + packet_queue_ttl.audio_retransmission; + ttl_per_prio[GetPriorityForType(RtpPacketMediaType::kRetransmission, + RtpPacketToSend::OriginalType::kVideo)] = + packet_queue_ttl.video_retransmission; + ttl_per_prio[GetPriorityForType(RtpPacketMediaType::kVideo, std::nullopt)] = + packet_queue_ttl.video; + return ttl_per_prio; +} + DataSize PrioritizedPacketQueue::QueuedPacket::PacketSize() const { return DataSize::Bytes(packet->payload_size() + packet->padding_size()); } @@ -109,8 +143,13 @@ PrioritizedPacketQueue::StreamQueue::DequeueAll() { return packets_by_prio; } -PrioritizedPacketQueue::PrioritizedPacketQueue(Timestamp creation_time) - : queue_time_sum_(TimeDelta::Zero()), +PrioritizedPacketQueue::PrioritizedPacketQueue( + Timestamp creation_time, + bool prioritize_audio_retransmission, + PacketQueueTTL packet_queue_ttl) + : prioritize_audio_retransmission_(prioritize_audio_retransmission), + time_to_live_per_prio_(ToTtlPerPrio(packet_queue_ttl)), + queue_time_sum_(TimeDelta::Zero()), pause_time_sum_(TimeDelta::Zero()), size_packets_(0), size_packets_per_media_type_({}), @@ -133,7 +172,11 @@ void PrioritizedPacketQueue::Push(Timestamp enqueue_time, enqueue_times_.insert(enqueue_times_.end(), enqueue_time); RTC_DCHECK(packet->packet_type().has_value()); RtpPacketMediaType packet_type = packet->packet_type().value(); - int prio_level = GetPriorityForType(packet_type); + int prio_level = + GetPriorityForType(packet_type, prioritize_audio_retransmission_ + ? packet->original_packet_type() + : std::nullopt); + PurgeOldPacketsAtPriorityLevel(prio_level, enqueue_time); RTC_DCHECK_GE(prio_level, 0); RTC_DCHECK_LT(prio_level, kNumPriorityLevels); QueuedPacket queued_packed = {.packet = std::move(packet), @@ -161,12 +204,12 @@ void PrioritizedPacketQueue::Push(Timestamp enqueue_time, static constexpr TimeDelta kTimeout = TimeDelta::Millis(500); if (enqueue_time - last_culling_time_ > kTimeout) { - for (auto it = streams_.begin(); it != streams_.end();) { - if (it->second->IsEmpty() && - it->second->LastEnqueueTime() + kTimeout < enqueue_time) { - streams_.erase(it++); + for (auto stream_it = streams_.begin(); stream_it != streams_.end();) { + if (stream_it->second->IsEmpty() && + stream_it->second->LastEnqueueTime() + kTimeout < enqueue_time) { + streams_.erase(stream_it++); } else { - ++it; + ++stream_it; } } last_culling_time_ = enqueue_time; @@ -214,7 +257,8 @@ PrioritizedPacketQueue::SizeInPacketsPerRtpPacketMediaType() const { Timestamp PrioritizedPacketQueue::LeadingPacketEnqueueTime( RtpPacketMediaType type) const { - const int priority_level = GetPriorityForType(type); + RTC_DCHECK(type != RtpPacketMediaType::kRetransmission); + const int priority_level = GetPriorityForType(type, std::nullopt); if (streams_by_prio_[priority_level].empty()) { return Timestamp::MinusInfinity(); } @@ -222,6 +266,39 @@ Timestamp PrioritizedPacketQueue::LeadingPacketEnqueueTime( priority_level); } +Timestamp PrioritizedPacketQueue::LeadingPacketEnqueueTimeForRetransmission() + const { + if (!prioritize_audio_retransmission_) { + const int priority_level = + GetPriorityForType(RtpPacketMediaType::kRetransmission, std::nullopt); + if (streams_by_prio_[priority_level].empty()) { + return Timestamp::PlusInfinity(); + } + return streams_by_prio_[priority_level].front()->LeadingPacketEnqueueTime( + priority_level); + } + const int audio_priority_level = + GetPriorityForType(RtpPacketMediaType::kRetransmission, + RtpPacketToSend::OriginalType::kAudio); + const int video_priority_level = + GetPriorityForType(RtpPacketMediaType::kRetransmission, + RtpPacketToSend::OriginalType::kVideo); + + Timestamp next_audio = + streams_by_prio_[audio_priority_level].empty() + ? Timestamp::PlusInfinity() + : streams_by_prio_[audio_priority_level] + .front() + ->LeadingPacketEnqueueTime(audio_priority_level); + Timestamp next_video = + streams_by_prio_[video_priority_level].empty() + ? Timestamp::PlusInfinity() + : streams_by_prio_[video_priority_level] + .front() + ->LeadingPacketEnqueueTime(video_priority_level); + return std::min(next_audio, next_video); +} + Timestamp PrioritizedPacketQueue::OldestEnqueueTime() const { return enqueue_times_.empty() ? Timestamp::MinusInfinity() : enqueue_times_.front(); @@ -283,9 +360,6 @@ void PrioritizedPacketQueue::RemovePacketsForSsrc(uint32_t ssrc) { // Update the global top prio level if neccessary. RTC_DCHECK(streams_by_prio_[i].front() == &queue); streams_by_prio_[i].pop_front(); - if (i == top_active_prio_level_) { - MaybeUpdateTopPrioLevel(); - } } else { // More than stream had packets at this prio level, filter this one out. std::deque filtered_queue; @@ -298,6 +372,7 @@ void PrioritizedPacketQueue::RemovePacketsForSsrc(uint32_t ssrc) { } } } + MaybeUpdateTopPrioLevel(); } bool PrioritizedPacketQueue::HasKeyframePackets(uint32_t ssrc) const { @@ -340,18 +415,53 @@ void PrioritizedPacketQueue::DequeuePacketInternal(QueuedPacket& packet) { } void PrioritizedPacketQueue::MaybeUpdateTopPrioLevel() { - if (streams_by_prio_[top_active_prio_level_].empty()) { - // No stream queues have packets at this prio level, find top priority - // that is not empty. - if (size_packets_ == 0) { - top_active_prio_level_ = -1; + if (top_active_prio_level_ != -1 && + !streams_by_prio_[top_active_prio_level_].empty()) { + return; + } + // No stream queues have packets at top_active_prio_level_, find top priority + // that is not empty. + for (int i = 0; i < kNumPriorityLevels; ++i) { + PurgeOldPacketsAtPriorityLevel(i, last_update_time_); + if (!streams_by_prio_[i].empty()) { + top_active_prio_level_ = i; + break; + } + } + if (size_packets_ == 0) { + // There are no packets left to send. Last packet may have been purged. Prio + // will change when a new packet is pushed. + top_active_prio_level_ = -1; + } +} + +void PrioritizedPacketQueue::PurgeOldPacketsAtPriorityLevel(int prio_level, + Timestamp now) { + RTC_DCHECK(prio_level >= 0 && prio_level < kNumPriorityLevels); + TimeDelta time_to_live = time_to_live_per_prio_[prio_level]; + if (time_to_live.IsInfinite()) { + return; + } + + std::deque& queues = streams_by_prio_[prio_level]; + auto iter = queues.begin(); + while (iter != queues.end()) { + StreamQueue* queue_ptr = *iter; + while (queue_ptr->HasPacketsAtPrio(prio_level) && + (now - queue_ptr->LeadingPacketEnqueueTime(prio_level)) > + time_to_live) { + QueuedPacket packet = queue_ptr->DequeuePacket(prio_level); + RTC_LOG(LS_INFO) << "Dropping old packet on SSRC: " + << packet.packet->Ssrc() + << " seq:" << packet.packet->SequenceNumber() + << " time in queue:" << (now - packet.enqueue_time).ms() + << " ms"; + DequeuePacketInternal(packet); + } + if (!queue_ptr->HasPacketsAtPrio(prio_level)) { + iter = queues.erase(iter); } else { - for (int i = 0; i < kNumPriorityLevels; ++i) { - if (!streams_by_prio_[i].empty()) { - top_active_prio_level_ = i; - break; - } - } + ++iter; } } } diff --git a/modules/pacing/prioritized_packet_queue.h b/modules/pacing/prioritized_packet_queue.h index 935c530027..bb6465d755 100644 --- a/modules/pacing/prioritized_packet_queue.h +++ b/modules/pacing/prioritized_packet_queue.h @@ -14,22 +14,34 @@ #include #include +#include #include #include #include #include -#include +#include "absl/container/inlined_vector.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" namespace webrtc { +// Describes how long time a packet may stay in the queue before being dropped. +struct PacketQueueTTL { + TimeDelta audio_retransmission = TimeDelta::PlusInfinity(); + TimeDelta video_retransmission = TimeDelta::PlusInfinity(); + TimeDelta video = TimeDelta::PlusInfinity(); +}; + class PrioritizedPacketQueue { public: - explicit PrioritizedPacketQueue(Timestamp creation_time); + explicit PrioritizedPacketQueue( + Timestamp creation_time, + bool prioritize_audio_retransmission = false, + PacketQueueTTL packet_queue_ttl = PacketQueueTTL()); PrioritizedPacketQueue(const PrioritizedPacketQueue&) = delete; PrioritizedPacketQueue& operator=(const PrioritizedPacketQueue&) = delete; @@ -63,6 +75,7 @@ class PrioritizedPacketQueue { // method, for the given packet type. If queue has no packets, of that type, // returns Timestamp::MinusInfinity(). Timestamp LeadingPacketEnqueueTime(RtpPacketMediaType type) const; + Timestamp LeadingPacketEnqueueTimeForRetransmission() const; // Enqueue time of the oldest packet in the queue, // Timestamp::MinusInfinity() if queue is empty. @@ -90,7 +103,7 @@ class PrioritizedPacketQueue { bool HasKeyframePackets(uint32_t ssrc) const; private: - static constexpr int kNumPriorityLevels = 4; + static constexpr int kNumPriorityLevels = 5; class QueuedPacket { public: @@ -139,6 +152,15 @@ class PrioritizedPacketQueue { // if so move it to the lowest non-empty index. void MaybeUpdateTopPrioLevel(); + void PurgeOldPacketsAtPriorityLevel(int prio_level, Timestamp now); + + static absl::InlinedVector ToTtlPerPrio( + PacketQueueTTL); + + const bool prioritize_audio_retransmission_; + const absl::InlinedVector + time_to_live_per_prio_; + // Cumulative sum, over all packets, of time spent in the queue. TimeDelta queue_time_sum_; // Cumulative sum of time the queue has spent in a paused state. diff --git a/modules/pacing/prioritized_packet_queue_unittest.cc b/modules/pacing/prioritized_packet_queue_unittest.cc index 9ed19642c7..6370fac79c 100644 --- a/modules/pacing/prioritized_packet_queue_unittest.cc +++ b/modules/pacing/prioritized_packet_queue_unittest.cc @@ -10,13 +10,17 @@ #include "modules/pacing/prioritized_packet_queue.h" +#include +#include +#include #include +#include "api/units/data_size.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "rtc_base/checks.h" -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { @@ -26,18 +30,39 @@ constexpr uint32_t kDefaultSsrc = 123; constexpr int kDefaultPayloadSize = 789; std::unique_ptr CreatePacket(RtpPacketMediaType type, - uint16_t sequence_number, + uint16_t seq, uint32_t ssrc = kDefaultSsrc, bool is_key_frame = false) { auto packet = std::make_unique(/*extensions=*/nullptr); packet->set_packet_type(type); packet->SetSsrc(ssrc); - packet->SetSequenceNumber(sequence_number); + packet->SetSequenceNumber(seq); packet->SetPayloadSize(kDefaultPayloadSize); packet->set_is_key_frame(is_key_frame); return packet; } +std::unique_ptr CreateRetransmissionPacket( + RtpPacketMediaType original_type, + uint16_t seq, + uint32_t ssrc = kDefaultSsrc) { + auto packet = std::make_unique(/*extensions=*/nullptr); + packet->set_packet_type(original_type); + packet->set_packet_type(RtpPacketMediaType::kRetransmission); + RTC_DCHECK(packet->packet_type() == RtpPacketMediaType::kRetransmission); + if (original_type == RtpPacketMediaType::kVideo) { + RTC_DCHECK(packet->original_packet_type() == + RtpPacketToSend::OriginalType::kVideo); + } else { + RTC_DCHECK(packet->original_packet_type() == + RtpPacketToSend::OriginalType::kAudio); + } + packet->SetSsrc(ssrc); + packet->SetSequenceNumber(seq); + packet->SetPayloadSize(kDefaultPayloadSize); + return packet; +} + } // namespace TEST(PrioritizedPacketQueue, ReturnsPacketsInPrioritizedOrder) { @@ -49,18 +74,42 @@ TEST(PrioritizedPacketQueue, ReturnsPacketsInPrioritizedOrder) { queue.Push(now, CreatePacket(RtpPacketMediaType::kVideo, /*seq=*/2)); queue.Push(now, CreatePacket(RtpPacketMediaType::kForwardErrorCorrection, /*seq=*/3)); - queue.Push(now, CreatePacket(RtpPacketMediaType::kRetransmission, /*seq=*/4)); - queue.Push(now, CreatePacket(RtpPacketMediaType::kAudio, /*seq=*/5)); + queue.Push(now, + CreateRetransmissionPacket(RtpPacketMediaType::kVideo, /*seq=*/4)); + queue.Push(now, + CreateRetransmissionPacket(RtpPacketMediaType::kAudio, /*seq=*/5)); + queue.Push(now, CreatePacket(RtpPacketMediaType::kAudio, /*seq=*/6)); // Packets should be returned in high to low order. - EXPECT_EQ(queue.Pop()->SequenceNumber(), 5); + EXPECT_EQ(queue.Pop()->SequenceNumber(), 6); + // Audio and video retransmission has same prio, but video was enqueued first. EXPECT_EQ(queue.Pop()->SequenceNumber(), 4); + EXPECT_EQ(queue.Pop()->SequenceNumber(), 5); // Video and FEC prioritized equally - but video was enqueued first. EXPECT_EQ(queue.Pop()->SequenceNumber(), 2); EXPECT_EQ(queue.Pop()->SequenceNumber(), 3); EXPECT_EQ(queue.Pop()->SequenceNumber(), 1); } +TEST(PrioritizedPacketQueue, + PrioritizeAudioRetransmissionBeforeVideoRetransmissionIfConfigured) { + Timestamp now = Timestamp::Zero(); + PrioritizedPacketQueue queue(now, /*prioritize_audio_retransmission=*/true); + + // Add packets in low to high packet order. + queue.Push(now, CreatePacket(RtpPacketMediaType::kVideo, /*seq=*/3)); + queue.Push(now, + CreateRetransmissionPacket(RtpPacketMediaType::kVideo, /*seq=*/4)); + queue.Push(now, + CreateRetransmissionPacket(RtpPacketMediaType::kAudio, /*seq=*/5)); + queue.Push(now, CreatePacket(RtpPacketMediaType::kAudio, /*seq=*/6)); + + // Packets should be returned in high to low order. + EXPECT_EQ(queue.Pop()->SequenceNumber(), 6); + EXPECT_EQ(queue.Pop()->SequenceNumber(), 5); + EXPECT_EQ(queue.Pop()->SequenceNumber(), 4); +} + TEST(PrioritizedPacketQueue, ReturnsEqualPrioPacketsInRoundRobinOrder) { Timestamp now = Timestamp::Zero(); PrioritizedPacketQueue queue(now); @@ -251,6 +300,26 @@ TEST(PrioritizedPacketQueue, ReportsLeadingPacketEnqueueTime) { Timestamp::MinusInfinity()); } +TEST(PrioritizedPacketQueue, ReportsLeadingPacketEnqueueTimeForRetransmission) { + PrioritizedPacketQueue queue(/*creation_time=*/Timestamp::Zero(), + /*prioritize_audio_retransmission=*/true); + EXPECT_EQ(queue.LeadingPacketEnqueueTimeForRetransmission(), + Timestamp::PlusInfinity()); + + queue.Push(Timestamp::Millis(10), + CreateRetransmissionPacket(RtpPacketMediaType::kVideo, /*seq=*/1)); + queue.Push(Timestamp::Millis(11), + CreateRetransmissionPacket(RtpPacketMediaType::kAudio, /*seq=*/2)); + EXPECT_EQ(queue.LeadingPacketEnqueueTimeForRetransmission(), + Timestamp::Millis(10)); + queue.Pop(); // Pop audio retransmission since it has higher prio. + EXPECT_EQ(queue.LeadingPacketEnqueueTimeForRetransmission(), + Timestamp::Millis(10)); + queue.Pop(); // Pop video retransmission. + EXPECT_EQ(queue.LeadingPacketEnqueueTimeForRetransmission(), + Timestamp::PlusInfinity()); +} + TEST(PrioritizedPacketQueue, PushAndPopUpdatesSizeInPacketsPerRtpPacketMediaType) { Timestamp now = Timestamp::Zero(); @@ -272,7 +341,7 @@ TEST(PrioritizedPacketQueue, RtpPacketMediaType::kVideo)], 1); - queue.Push(now, CreatePacket(RtpPacketMediaType::kRetransmission, 3)); + queue.Push(now, CreateRetransmissionPacket(RtpPacketMediaType::kVideo, 3)); EXPECT_EQ(queue.SizeInPacketsPerRtpPacketMediaType()[static_cast( RtpPacketMediaType::kRetransmission)], 1); @@ -326,6 +395,8 @@ TEST(PrioritizedPacketQueue, ClearsPackets) { // Remove all of them. queue.RemovePacketsForSsrc(kSsrc); EXPECT_TRUE(queue.Empty()); + queue.RemovePacketsForSsrc(kSsrc); + EXPECT_TRUE(queue.Empty()); } TEST(PrioritizedPacketQueue, ClearPacketsAffectsOnlySpecifiedSsrc) { @@ -338,16 +409,16 @@ TEST(PrioritizedPacketQueue, ClearPacketsAffectsOnlySpecifiedSsrc) { // ensuring they are first in line. queue.Push( now, CreatePacket(RtpPacketMediaType::kAudio, /*seq=*/1, kRemovingSsrc)); - queue.Push(now, CreatePacket(RtpPacketMediaType::kRetransmission, /*seq=*/2, - kRemovingSsrc)); + queue.Push(now, CreateRetransmissionPacket(RtpPacketMediaType::kVideo, + /*seq=*/2, kRemovingSsrc)); // Add a video packet and a retransmission for the SSRC that will remain. // The retransmission packets now both have pointers to their respective qeues // from the same prio level. queue.Push(now, CreatePacket(RtpPacketMediaType::kVideo, /*seq=*/3, kStayingSsrc)); - queue.Push(now, CreatePacket(RtpPacketMediaType::kRetransmission, /*seq=*/4, - kStayingSsrc)); + queue.Push(now, CreateRetransmissionPacket(RtpPacketMediaType::kVideo, + /*seq=*/4, kStayingSsrc)); EXPECT_EQ(queue.SizeInPackets(), 4); @@ -413,4 +484,87 @@ TEST(PrioritizedPacketQueue, ReportsKeyframePackets) { EXPECT_FALSE(queue.HasKeyframePackets(kVideoSsrc2)); } +TEST(PrioritizedPacketQueue, PacketsDroppedIfNotPulledWithinTttl) { + Timestamp now = Timestamp::Zero(); + PacketQueueTTL ttls; + ttls.audio_retransmission = TimeDelta::Millis(200); + PrioritizedPacketQueue queue(now, /*prioritize_audio_retransmission=*/true, + ttls); + + queue.Push(now, + CreateRetransmissionPacket(RtpPacketMediaType::kAudio, /*seq=*/1)); + now += ttls.audio_retransmission + TimeDelta::Millis(1); + EXPECT_EQ(queue.SizeInPackets(), 1); + queue.Push(now, + CreateRetransmissionPacket(RtpPacketMediaType::kAudio, /*seq=*/2)); + EXPECT_EQ(queue.SizeInPackets(), 1); + EXPECT_EQ(queue.Pop()->SequenceNumber(), 2); +} + +TEST(PrioritizedPacketQueue, DontSendPacketsAfterTttl) { + Timestamp now = Timestamp::Zero(); + PacketQueueTTL ttls; + ttls.audio_retransmission = TimeDelta::Millis(200); + PrioritizedPacketQueue queue(now, /*prioritize_audio_retransmission=*/true, + ttls); + + queue.Push(now, + CreateRetransmissionPacket(RtpPacketMediaType::kAudio, /*seq=*/1)); + now += ttls.audio_retransmission + TimeDelta::Millis(1); + EXPECT_EQ(queue.SizeInPackets(), 1); + queue.Push(now, CreatePacket(RtpPacketMediaType::kVideo, /*seq=*/2)); + queue.Push(now, CreatePacket(RtpPacketMediaType::kAudio, /*seq=*/3)); + // Expect the old packet to have been removed since it was not popped in time. + EXPECT_EQ(queue.SizeInPackets(), 3); + EXPECT_EQ(queue.Pop()->SequenceNumber(), 3); + EXPECT_EQ(queue.SizeInPackets(), 1); + EXPECT_EQ(queue.Pop()->SequenceNumber(), 2); + EXPECT_EQ(queue.SizeInPackets(), 0); +} + +TEST(PrioritizedPacketQueue, SendsNewVideoPacketAfterPurgingLastOldRtxPacket) { + Timestamp now = Timestamp::Zero(); + PacketQueueTTL ttls; + ttls.video_retransmission = TimeDelta::Millis(400); + PrioritizedPacketQueue queue(now, /*prioritize_audio_retransmission=*/true, + ttls); + + queue.Push(now, + CreateRetransmissionPacket(RtpPacketMediaType::kVideo, /*seq=*/1)); + now += ttls.video_retransmission + TimeDelta::Millis(1); + queue.Push(now, CreatePacket(RtpPacketMediaType::kAudio, /*seq=*/2)); + EXPECT_EQ(queue.SizeInPackets(), 2); + // Expect the audio packet to be send and the video retransmission packet to + // be dropped since it is old. + EXPECT_EQ(queue.Pop()->SequenceNumber(), 2); + EXPECT_EQ(queue.SizeInPackets(), 0); + + queue.Push(now, CreatePacket(RtpPacketMediaType::kVideo, /*seq=*/3)); + EXPECT_EQ(queue.SizeInPackets(), 1); + EXPECT_EQ(queue.Pop()->SequenceNumber(), 3); + EXPECT_EQ(queue.SizeInPackets(), 0); +} + +TEST(PrioritizedPacketQueue, + SendsPacketsAfterTttlIfPrioHigherThanPushedPackets) { + Timestamp now = Timestamp::Zero(); + PacketQueueTTL ttls; + ttls.audio_retransmission = TimeDelta::Millis(200); + PrioritizedPacketQueue queue(now, /*prioritize_audio_retransmission=*/true, + ttls); + + queue.Push(now, + CreateRetransmissionPacket(RtpPacketMediaType::kAudio, /*seq=*/1)); + now += ttls.audio_retransmission + TimeDelta::Millis(1); + EXPECT_EQ(queue.SizeInPackets(), 1); + queue.Push(now, CreatePacket(RtpPacketMediaType::kVideo, /*seq=*/2)); + + // This test just show that TTL is not enforced strictly. If a new audio + // packet had been queued before a packet was popped, the audio retransmission + // packet would have been dropped. + EXPECT_EQ(queue.SizeInPackets(), 2); + EXPECT_EQ(queue.Pop()->SequenceNumber(), 1); + EXPECT_EQ(queue.SizeInPackets(), 1); +} + } // namespace webrtc diff --git a/modules/pacing/rtp_packet_pacer.h b/modules/pacing/rtp_packet_pacer.h index e2cf806385..5f12048d90 100644 --- a/modules/pacing/rtp_packet_pacer.h +++ b/modules/pacing/rtp_packet_pacer.h @@ -11,16 +11,14 @@ #ifndef MODULES_PACING_RTP_PACKET_PACER_H_ #define MODULES_PACING_RTP_PACKET_PACER_H_ -#include - +#include #include -#include "absl/types/optional.h" +#include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "modules/rtp_rtcp/include/rtp_packet_sender.h" namespace webrtc { @@ -49,7 +47,7 @@ class RtpPacketPacer { virtual DataSize QueueSizeData() const = 0; // Returns the time when the first packet was sent. - virtual absl::optional FirstSentPacketTime() const = 0; + virtual std::optional FirstSentPacketTime() const = 0; // Returns the expected number of milliseconds it will take to send the // current packets in the queue, given the current size and bitrate, ignoring diff --git a/modules/pacing/task_queue_paced_sender.cc b/modules/pacing/task_queue_paced_sender.cc index afa36ea88d..5207b786fc 100644 --- a/modules/pacing/task_queue_paced_sender.cc +++ b/modules/pacing/task_queue_paced_sender.cc @@ -11,41 +11,40 @@ #include "modules/pacing/task_queue_paced_sender.h" #include +#include +#include +#include +#include #include +#include #include "absl/cleanup/cleanup.h" +#include "api/field_trials_view.h" +#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/pacing/pacing_controller.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/experiments/field_trial_units.h" +#include "rtc_base/numerics/exp_filter.h" #include "rtc_base/trace_event.h" namespace webrtc { -namespace { - -constexpr const char* kBurstyPacerFieldTrial = "WebRTC-BurstyPacer"; - -} // namespace - const int TaskQueuePacedSender::kNoPacketHoldback = -1; -TaskQueuePacedSender::BurstyPacerFlags::BurstyPacerFlags( - const FieldTrialsView& field_trials) - : burst("burst") { - ParseFieldTrial({&burst}, field_trials.Lookup(kBurstyPacerFieldTrial)); -} - TaskQueuePacedSender::TaskQueuePacedSender( Clock* clock, PacingController::PacketSender* packet_sender, const FieldTrialsView& field_trials, TimeDelta max_hold_back_window, - int max_hold_back_window_in_packets, - absl::optional burst_interval) + int max_hold_back_window_in_packets) : clock_(clock), - bursty_pacer_flags_(field_trials), max_hold_back_window_(max_hold_back_window), max_hold_back_window_in_packets_(max_hold_back_window_in_packets), pacing_controller_(clock, packet_sender, field_trials), @@ -56,17 +55,6 @@ TaskQueuePacedSender::TaskQueuePacedSender( include_overhead_(false), task_queue_(TaskQueueBase::Current()) { RTC_DCHECK_GE(max_hold_back_window_, PacingController::kMinSleepTime); - // There are multiple field trials that can affect burst. If multiple bursts - // are specified we pick the largest of the values. - absl::optional burst = bursty_pacer_flags_.burst.GetOptional(); - // If not overriden by an experiment, the burst is specified by the - // `burst_interval` argument. - if (!burst.has_value()) { - burst = burst_interval; - } - if (burst.has_value()) { - pacing_controller_.SetSendBurstInterval(burst.value()); - } } TaskQueuePacedSender::~TaskQueuePacedSender() { @@ -74,6 +62,16 @@ TaskQueuePacedSender::~TaskQueuePacedSender() { is_shutdown_ = true; } +void TaskQueuePacedSender::SetSendBurstInterval(TimeDelta burst_interval) { + RTC_DCHECK_RUN_ON(task_queue_); + pacing_controller_.SetSendBurstInterval(burst_interval); +} + +void TaskQueuePacedSender::SetAllowProbeWithoutMediaPacket(bool allow) { + RTC_DCHECK_RUN_ON(task_queue_); + pacing_controller_.SetAllowProbeWithoutMediaPacket(allow); +} + void TaskQueuePacedSender::EnsureStarted() { RTC_DCHECK_RUN_ON(task_queue_); is_started_ = true; @@ -177,7 +175,7 @@ DataSize TaskQueuePacedSender::QueueSizeData() const { return GetStats().queue_size; } -absl::optional TaskQueuePacedSender::FirstSentPacketTime() const { +std::optional TaskQueuePacedSender::FirstSentPacketTime() const { return GetStats().first_sent_packet_time; } @@ -263,7 +261,7 @@ void TaskQueuePacedSender::MaybeProcessPackets( DataRate pacing_rate = pacing_controller_.pacing_rate(); if (max_hold_back_window_in_packets_ != kNoPacketHoldback && !pacing_rate.IsZero() && - packet_size_.filtered() != rtc::ExpFilter::kValueUndefined) { + packet_size_.filtered() != ExpFilter::kValueUndefined) { TimeDelta avg_packet_send_time = DataSize::Bytes(packet_size_.filtered()) / pacing_rate; hold_back_window = diff --git a/modules/pacing/task_queue_paced_sender.h b/modules/pacing/task_queue_paced_sender.h index fd71be1654..88db41b864 100644 --- a/modules/pacing/task_queue_paced_sender.h +++ b/modules/pacing/task_queue_paced_sender.h @@ -15,19 +15,22 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/field_trials_view.h" +#include "api/rtp_packet_sender.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/pacing/pacing_controller.h" #include "modules/pacing/rtp_packet_pacer.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/thread_annotations.h" @@ -45,23 +48,24 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { // processed. Increasing this reduces thread wakeups at the expense of higher // latency. // - // If the `burst_interval` parameter is set, the pacer is allowed to build up - // a packet "debt" that correspond to approximately the send rate during the - // specified interval. This greatly reduced wake ups by not pacing packets - // within the allowed burst budget. - // // The taskqueue used when constructing a TaskQueuePacedSender will also be // used for pacing. - TaskQueuePacedSender( - Clock* clock, - PacingController::PacketSender* packet_sender, - const FieldTrialsView& field_trials, - TimeDelta max_hold_back_window, - int max_hold_back_window_in_packets, - absl::optional burst_interval = absl::nullopt); + TaskQueuePacedSender(Clock* clock, + PacingController::PacketSender* packet_sender, + const FieldTrialsView& field_trials, + TimeDelta max_hold_back_window, + int max_hold_back_window_in_packets); ~TaskQueuePacedSender() override; + // The pacer is allowed to send enqued packets in bursts and can build up a + // packet "debt" that correspond to approximately the send rate during + // 'burst_interval'. + void SetSendBurstInterval(TimeDelta burst_interval); + + // A probe may be sent without first waing for a media packet. + void SetAllowProbeWithoutMediaPacket(bool allow); + // Ensure that necessary delayed tasks are scheduled. void EnsureStarted(); @@ -106,7 +110,7 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { DataSize QueueSizeData() const override; // Returns the time when the first packet was sent; - absl::optional FirstSentPacketTime() const override; + std::optional FirstSentPacketTime() const override; // Returns the number of milliseconds it will take to send the current // packets in the queue, given the current size and bitrate, ignoring prio. @@ -126,7 +130,7 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { Timestamp oldest_packet_enqueue_time; DataSize queue_size; TimeDelta expected_queue_time; - absl::optional first_sent_packet_time; + std::optional first_sent_packet_time; }; void OnStatsUpdated(const Stats& stats); @@ -145,15 +149,6 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { Stats GetStats() const; Clock* const clock_; - struct BurstyPacerFlags { - // Parses `kBurstyPacerFieldTrial`. Example: - // --force-fieldtrials=WebRTC-BurstyPacer/burst:20ms/ - explicit BurstyPacerFlags(const FieldTrialsView& field_trials); - // If set, the pacer is allowed to build up a packet "debt" that correspond - // to approximately the send rate during the specified interval. - FieldTrialOptional burst; - }; - const BurstyPacerFlags bursty_pacer_flags_; // The holdback window prevents too frequent delayed MaybeProcessPackets() // calls. These are only applicable if `allow_low_precision` is false. @@ -179,7 +174,7 @@ class TaskQueuePacedSender : public RtpPacketPacer, public RtpPacketSender { bool is_shutdown_ RTC_GUARDED_BY(task_queue_); // Filtered size of enqueued packets, in bytes. - rtc::ExpFilter packet_size_ RTC_GUARDED_BY(task_queue_); + ExpFilter packet_size_ RTC_GUARDED_BY(task_queue_); bool include_overhead_ RTC_GUARDED_BY(task_queue_); Stats current_stats_ RTC_GUARDED_BY(task_queue_); diff --git a/modules/pacing/task_queue_paced_sender_unittest.cc b/modules/pacing/task_queue_paced_sender_unittest.cc index 54347493e7..16590ad09b 100644 --- a/modules/pacing/task_queue_paced_sender_unittest.cc +++ b/modules/pacing/task_queue_paced_sender_unittest.cc @@ -11,19 +11,21 @@ #include "modules/pacing/task_queue_paced_sender.h" #include -#include -#include +#include +#include #include -#include #include #include +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/pacing/pacing_controller.h" #include "modules/pacing/packet_router.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "test/gmock.h" @@ -33,8 +35,8 @@ using ::testing::_; using ::testing::AtLeast; -using ::testing::Return; -using ::testing::SaveArg; +using ::testing::AtMost; +using ::testing::NiceMock; namespace webrtc { namespace { @@ -140,8 +142,8 @@ TEST(TaskQueuePacedSenderTest, PacesPackets) { size_t packets_sent = 0; Timestamp end_time = Timestamp::PlusInfinity(); EXPECT_CALL(packet_router, SendPacket) - .WillRepeatedly([&](std::unique_ptr packet, - const PacedPacketInfo& cluster_info) { + .WillRepeatedly([&](std::unique_ptr /* packet */, + const PacedPacketInfo& /* cluster_info */) { ++packets_sent; if (packets_sent == kPacketsToSend) { end_time = time_controller.GetClock()->CurrentTime(); @@ -167,9 +169,10 @@ TEST(TaskQueuePacedSenderTest, PacesPacketsWithBurst) { TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials, PacingController::kMinSleepTime, - TaskQueuePacedSender::kNoPacketHoldback, - // Half a second of bursting. - TimeDelta::Seconds(0.5)); + TaskQueuePacedSender::kNoPacketHoldback); + pacer.SetSendBurstInterval( + // Half a second of bursting. + TimeDelta::Seconds(0.5)); // Insert a number of packets, covering one second. static constexpr size_t kPacketsToSend = 42; @@ -185,8 +188,8 @@ TEST(TaskQueuePacedSenderTest, PacesPacketsWithBurst) { size_t packets_sent = 0; Timestamp end_time = Timestamp::PlusInfinity(); EXPECT_CALL(packet_router, SendPacket) - .WillRepeatedly([&](std::unique_ptr packet, - const PacedPacketInfo& cluster_info) { + .WillRepeatedly([&](std::unique_ptr /* packet */, + const PacedPacketInfo& /* cluster_info */) { ++packets_sent; if (packets_sent == kPacketsToSend) { end_time = time_controller.GetClock()->CurrentTime(); @@ -237,8 +240,8 @@ TEST(TaskQueuePacedSenderTest, ReschedulesProcessOnRateChange) { EXPECT_CALL(packet_router, SendPacket) .Times(3) - .WillRepeatedly([&](std::unique_ptr packet, - const PacedPacketInfo& cluster_info) { + .WillRepeatedly([&](std::unique_ptr /* packet */, + const PacedPacketInfo& /* cluster_info */) { if (first_packet_time.IsInfinite()) { first_packet_time = time_controller.GetClock()->CurrentTime(); } else if (second_packet_time.IsInfinite()) { @@ -262,7 +265,7 @@ TEST(TaskQueuePacedSenderTest, ReschedulesProcessOnRateChange) { TEST(TaskQueuePacedSenderTest, SendsAudioImmediately) { GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); - MockPacketRouter packet_router; + NiceMock packet_router; ScopedKeyValueConfig trials; TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials, @@ -270,21 +273,16 @@ TEST(TaskQueuePacedSenderTest, SendsAudioImmediately) { TaskQueuePacedSender::kNoPacketHoldback); const DataRate kPacingDataRate = DataRate::KilobitsPerSec(125); - const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize); - const TimeDelta kPacketPacingTime = kPacketSize / kPacingDataRate; pacer.SetPacingRates(kPacingDataRate, DataRate::Zero()); pacer.EnsureStarted(); - // Add some initial video packets, only one should be sent. - EXPECT_CALL(packet_router, SendPacket); + // Add some initial video packets. Not all should be sent immediately. + EXPECT_CALL(packet_router, SendPacket).Times(AtMost(9)); pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10)); time_controller.AdvanceTime(TimeDelta::Zero()); ::testing::Mock::VerifyAndClearExpectations(&packet_router); - // Advance time, but still before next packet should be sent. - time_controller.AdvanceTime(kPacketPacingTime / 2); - // Insert an audio packet, it should be sent immediately. EXPECT_CALL(packet_router, SendPacket); pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kAudio, 1)); @@ -295,12 +293,13 @@ TEST(TaskQueuePacedSenderTest, SendsAudioImmediately) { TEST(TaskQueuePacedSenderTest, SleepsDuringCoalscingWindow) { const TimeDelta kCoalescingWindow = TimeDelta::Millis(5); GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); - MockPacketRouter packet_router; + NiceMock packet_router; ScopedKeyValueConfig trials; TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials, kCoalescingWindow, TaskQueuePacedSender::kNoPacketHoldback); + pacer.SetSendBurstInterval(TimeDelta::Zero()); // Set rates so one packet adds one ms of buffer level. const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize); @@ -310,9 +309,9 @@ TEST(TaskQueuePacedSenderTest, SleepsDuringCoalscingWindow) { pacer.SetPacingRates(kPacingDataRate, DataRate::Zero()); pacer.EnsureStarted(); - // Add 10 packets. The first should be sent immediately since the buffers - // are clear. - EXPECT_CALL(packet_router, SendPacket); + // Add 10 packets. The first burst should be sent immediately since the + // buffers are clear. + EXPECT_CALL(packet_router, SendPacket).Times(AtMost(9)); pacer.EnqueuePackets(GeneratePackets(RtpPacketMediaType::kVideo, 10)); time_controller.AdvanceTime(TimeDelta::Zero()); ::testing::Mock::VerifyAndClearExpectations(&packet_router); @@ -370,11 +369,12 @@ TEST(TaskQueuePacedSenderTest, SchedulesProbeAtSentTime) { ScopedKeyValueConfig trials( "WebRTC-Bwe-ProbingBehavior/min_probe_delta:1ms/"); GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); - MockPacketRouter packet_router; + NiceMock packet_router; TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials, PacingController::kMinSleepTime, TaskQueuePacedSender::kNoPacketHoldback); + pacer.SetSendBurstInterval(TimeDelta::Zero()); // Set rates so one packet adds 4ms of buffer level. const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize); @@ -440,8 +440,7 @@ TEST(TaskQueuePacedSenderTest, SchedulesProbeAtSentTime) { TEST(TaskQueuePacedSenderTest, NoMinSleepTimeWhenProbing) { // Set min_probe_delta to be less than kMinSleepTime (1ms). const TimeDelta kMinProbeDelta = TimeDelta::Micros(200); - ScopedKeyValueConfig trials( - "WebRTC-Bwe-ProbingBehavior/min_probe_delta:200us/"); + ScopedKeyValueConfig trials; GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); MockPacketRouter packet_router; TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials, @@ -470,6 +469,7 @@ TEST(TaskQueuePacedSenderTest, NoMinSleepTimeWhenProbing) { {{.at_time = time_controller.GetClock()->CurrentTime(), .target_data_rate = kProbingRate, .target_duration = TimeDelta::Millis(15), + .min_probe_delta = kMinProbeDelta, .target_probe_count = 5, .id = kProbeClusterId}}); @@ -504,11 +504,12 @@ TEST(TaskQueuePacedSenderTest, PacketBasedCoalescing) { const int kPacketBasedHoldback = 5; GlobalSimulatedTimeController time_controller(Timestamp::Millis(1234)); - MockPacketRouter packet_router; + NiceMock packet_router; ScopedKeyValueConfig trials; TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials, kFixedCoalescingWindow, kPacketBasedHoldback); + pacer.SetSendBurstInterval(TimeDelta::Zero()); // Set rates so one packet adds one ms of buffer level. const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize); @@ -559,6 +560,7 @@ TEST(TaskQueuePacedSenderTest, FixedHoldBackHasPriorityOverPackets) { TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials, kFixedCoalescingWindow, kPacketBasedHoldback); + pacer.SetSendBurstInterval(TimeDelta::Zero()); // Set rates so one packet adds one ms of buffer level. const DataSize kPacketSize = DataSize::Bytes(kDefaultPacketSize); @@ -691,7 +693,7 @@ TEST(TaskQueuePacedSenderTest, PostedPacketsNotSendFromRemovePacketsForSsrc) { TEST(TaskQueuePacedSenderTest, Stats) { static constexpr Timestamp kStartTime = Timestamp::Millis(1234); GlobalSimulatedTimeController time_controller(kStartTime); - MockPacketRouter packet_router; + NiceMock packet_router; ScopedKeyValueConfig trials; TaskQueuePacedSender pacer(time_controller.GetClock(), &packet_router, trials, @@ -708,7 +710,8 @@ TEST(TaskQueuePacedSenderTest, Stats) { // Allowed `QueueSizeData` and `ExpectedQueueTime` deviation. static constexpr size_t kAllowedPacketsDeviation = 1; static constexpr DataSize kAllowedQueueSizeDeviation = - DataSize::Bytes(kDefaultPacketSize * kAllowedPacketsDeviation); + DataSize::Bytes(kDefaultPacketSize * kAllowedPacketsDeviation) + + kPacingRate * PacingController::kDefaultBurstInterval; static constexpr TimeDelta kAllowedQueueTimeDeviation = kAllowedQueueSizeDeviation / kPacingRate; diff --git a/modules/portal/BUILD.gn b/modules/portal/BUILD.gn index d0756f269b..de8a81be55 100644 --- a/modules/portal/BUILD.gn +++ b/modules/portal/BUILD.gn @@ -28,11 +28,12 @@ if ((is_linux || is_chromeos) && rtc_use_pipewire) { pkg_config("gbm") { packages = [ "gbm" ] } + pkg_config("gl") { + packages = [ "gl" ] + ignore_libs = true + } pkg_config("egl") { packages = [ "egl" ] - } - pkg_config("epoxy") { - packages = [ "epoxy" ] ignore_libs = true } pkg_config("libdrm") { @@ -72,8 +73,8 @@ if ((is_linux || is_chromeos) && rtc_use_pipewire) { configs = [ ":pipewire_base", ":gbm", + ":gl", ":egl", - ":epoxy", ":libdrm", ] } @@ -111,9 +112,10 @@ if ((is_linux || is_chromeos) && rtc_use_pipewire) { "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:sanitizer", + "../../rtc_base:stringutils", "../../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/strings", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] if (!rtc_link_pipewire) { defines = [ "WEBRTC_DLOPEN_PIPEWIRE" ] diff --git a/modules/portal/pipewire_utils.h b/modules/portal/pipewire_utils.h index 8344a8cefb..c1327b85c9 100644 --- a/modules/portal/pipewire_utils.h +++ b/modules/portal/pipewire_utils.h @@ -11,6 +11,21 @@ #ifndef MODULES_PORTAL_PIPEWIRE_UTILS_H_ #define MODULES_PORTAL_PIPEWIRE_UTILS_H_ +#include +#include +#include +#include + +// static +struct dma_buf_sync { + uint64_t flags; +}; +#define DMA_BUF_SYNC_READ (1 << 0) +#define DMA_BUF_SYNC_START (0 << 2) +#define DMA_BUF_SYNC_END (1 << 2) +#define DMA_BUF_BASE 'b' +#define DMA_BUF_IOCTL_SYNC _IOW(DMA_BUF_BASE, 0, struct dma_buf_sync) + struct pw_thread_loop; namespace webrtc { @@ -32,6 +47,66 @@ class PipeWireThreadLoopLock { pw_thread_loop* const loop_; }; +// We should synchronize DMA Buffer object access from CPU to avoid potential +// cache incoherency and data loss. +// See +// https://01.org/linuxgraphics/gfx-docs/drm/driver-api/dma-buf.html#cpu-access-to-dma-buffer-objects +static bool SyncDmaBuf(int fd, uint64_t start_or_end) { + struct dma_buf_sync sync = {0}; + + sync.flags = start_or_end | DMA_BUF_SYNC_READ; + + while (true) { + int ret; + ret = ioctl(fd, DMA_BUF_IOCTL_SYNC, &sync); + if (ret == -1 && errno == EINTR) { + continue; + } else if (ret == -1) { + return false; + } else { + break; + } + } + + return true; +} + +class ScopedBuf { + public: + ScopedBuf() {} + ScopedBuf(uint8_t* map, int map_size, int fd, bool is_dma_buf = false) + : map_(map), map_size_(map_size), fd_(fd), is_dma_buf_(is_dma_buf) {} + ~ScopedBuf() { + if (map_ != MAP_FAILED) { + if (is_dma_buf_) { + SyncDmaBuf(fd_, DMA_BUF_SYNC_END); + } + munmap(map_, map_size_); + } + } + + explicit operator bool() { return map_ != MAP_FAILED; } + + void initialize(uint8_t* map, int map_size, int fd, bool is_dma_buf = false) { + map_ = map; + map_size_ = map_size; + is_dma_buf_ = is_dma_buf; + fd_ = fd; + + if (is_dma_buf_) { + SyncDmaBuf(fd_, DMA_BUF_SYNC_START); + } + } + + uint8_t* get() { return map_; } + + protected: + uint8_t* map_ = static_cast(MAP_FAILED); + int map_size_; + int fd_; + bool is_dma_buf_; +}; + } // namespace webrtc #endif // MODULES_PORTAL_PIPEWIRE_UTILS_H_ diff --git a/modules/portal/xdg_desktop_portal_utils.cc b/modules/portal/xdg_desktop_portal_utils.cc index 271e084463..d911f0d106 100644 --- a/modules/portal/xdg_desktop_portal_utils.cc +++ b/modules/portal/xdg_desktop_portal_utils.cc @@ -11,9 +11,11 @@ #include +#include "absl/strings/str_replace.h" #include "absl/strings/string_view.h" #include "modules/portal/scoped_glib.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/str_join.h" namespace webrtc { namespace xdg_portal { @@ -50,17 +52,20 @@ RequestResponse RequestResponseFromPortalResponse(uint32_t portal_response) { std::string PrepareSignalHandle(absl::string_view token, GDBusConnection* connection) { - Scoped sender( - g_strdup(g_dbus_connection_get_unique_name(connection) + 1)); - for (int i = 0; sender.get()[i]; ++i) { - if (sender.get()[i] == '.') { - sender.get()[i] = '_'; - } + const char* unique_name = g_dbus_connection_get_unique_name(connection); + if (unique_name == nullptr || *unique_name == '\0') { + return std::string(); + } + + absl::string_view unique_name_sv = unique_name; + if (!unique_name_sv.empty()) { + unique_name_sv.remove_prefix(1); } - const char* handle = - g_strconcat(kDesktopRequestObjectPath, "/", sender.get(), "/", - std::string(token).c_str(), /*end of varargs*/ nullptr); - return handle; + + std::string sender = absl::StrReplaceAll(unique_name_sv, {{".", "_"}}); + std::vector parts = {kDesktopRequestObjectPath, sender, + token}; + return webrtc::StrJoin(parts, "/"); } uint32_t SetupRequestResponseSignal(absl::string_view object_path, diff --git a/modules/portal/xdg_session_details.h b/modules/portal/xdg_session_details.h index ab52508c2f..779f5011c7 100644 --- a/modules/portal/xdg_session_details.h +++ b/modules/portal/xdg_session_details.h @@ -12,6 +12,7 @@ #define MODULES_PORTAL_XDG_SESSION_DETAILS_H_ #include +#include #include diff --git a/modules/remote_bitrate_estimator/BUILD.gn b/modules/remote_bitrate_estimator/BUILD.gn index b9346ae299..a79b4363e0 100644 --- a/modules/remote_bitrate_estimator/BUILD.gn +++ b/modules/remote_bitrate_estimator/BUILD.gn @@ -21,55 +21,103 @@ rtc_library("remote_bitrate_estimator") { "overuse_detector.h", "overuse_estimator.cc", "overuse_estimator.h", - "packet_arrival_map.cc", - "packet_arrival_map.h", "remote_bitrate_estimator_abs_send_time.cc", "remote_bitrate_estimator_abs_send_time.h", "remote_bitrate_estimator_single_stream.cc", "remote_bitrate_estimator_single_stream.h", - "remote_estimator_proxy.cc", - "remote_estimator_proxy.h", - "test/bwe_test_logging.h", ] deps = [ + "..:module_api", + "..:module_api_public", "../../api:field_trials_view", - "../../api:network_state_predictor_api", "../../api:rtp_headers", - "../../api/transport:field_trial_based_config", + "../../api/environment", + "../../api/transport:bandwidth_usage", "../../api/transport:network_control", "../../api/units:data_rate", "../../api/units:data_size", "../../api/units:time_delta", "../../api/units:timestamp", - "../../modules:module_api", - "../../modules:module_api_public", - "../../modules/congestion_controller/goog_cc:link_capacity_estimator", - "../../modules/rtp_rtcp:rtp_rtcp_format", "../../rtc_base:bitrate_tracker", "../../rtc_base:checks", "../../rtc_base:logging", - "../../rtc_base:rtc_numerics", "../../rtc_base:safe_minmax", - "../../rtc_base:stringutils", "../../rtc_base/experiments:field_trial_parser", "../../rtc_base/synchronization:mutex", "../../system_wrappers", - "../../system_wrappers:field_trial", "../../system_wrappers:metrics", - ] - absl_deps = [ + "../congestion_controller/goog_cc:link_capacity_estimator", + "../rtp_rtcp:rtp_rtcp_format", + "//third_party/abseil-cpp/absl/base:nullability", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] +} - if (rtc_enable_bwe_test_logging) { - defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=1" ] - sources += [ "test/bwe_test_logging.cc" ] - deps += [ "../../rtc_base:platform_thread" ] - } else { - defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0" ] - } +rtc_source_set("rtp_transport_feedback_generator") { + sources = [ "rtp_transport_feedback_generator.h" ] + deps = [ + "../../api/units:data_rate", + "../../api/units:data_size", + "../../api/units:time_delta", + "../../api/units:timestamp", + "../rtp_rtcp:rtp_rtcp_format", + ] +} + +rtc_library("transport_sequence_number_feedback_generator") { + sources = [ + "packet_arrival_map.cc", + "packet_arrival_map.h", + "transport_sequence_number_feedback_generator.cc", + "transport_sequence_number_feedback_generator.h", + ] + deps = [ + ":rtp_transport_feedback_generator", + "../../api:field_trials_view", + "../../api:rtp_headers", + "../../api/transport:network_control", + "../../api/units:data_rate", + "../../api/units:data_size", + "../../api/units:time_delta", + "../../api/units:timestamp", + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../rtc_base:macromagic", + "../../rtc_base:rtc_numerics", + "../../rtc_base:safe_minmax", + "../../rtc_base/synchronization:mutex", + "../../system_wrappers", + "../rtp_rtcp:rtp_rtcp_format", + ] +} + +rtc_library("congestion_control_feedback_generator") { + sources = [ + "congestion_control_feedback_generator.cc", + "congestion_control_feedback_generator.h", + "congestion_control_feedback_tracker.cc", + "congestion_control_feedback_tracker.h", + ] + deps = [ + ":rtp_transport_feedback_generator", + "../../api:field_trials_view", + "../../api:sequence_checker", + "../../api/environment", + "../../api/units:data_rate", + "../../api/units:data_size", + "../../api/units:time_delta", + "../../api/units:timestamp", + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../rtc_base:rtc_numerics", + "../../rtc_base/experiments:field_trial_parser", + "../../rtc_base/network:ecn_marking", + "../rtp_rtcp:ntp_time_util", + "../rtp_rtcp:rtp_rtcp_format", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/types:span", + ] } if (!build_with_chromium) { @@ -82,8 +130,6 @@ if (!build_with_chromium) { deps = [ "../../test:rtp_test_utils", "../rtp_rtcp:rtp_rtcp_format", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", ] @@ -108,6 +154,8 @@ if (rtc_include_tests) { sources = [ "aimd_rate_control_unittest.cc", + "congestion_control_feedback_generator_unittest.cc", + "congestion_control_feedback_tracker_unittest.cc", "inter_arrival_unittest.cc", "overuse_detector_unittest.cc", "packet_arrival_map_test.cc", @@ -115,19 +163,27 @@ if (rtc_include_tests) { "remote_bitrate_estimator_single_stream_unittest.cc", "remote_bitrate_estimator_unittest_helper.cc", "remote_bitrate_estimator_unittest_helper.h", - "remote_estimator_proxy_unittest.cc", + "transport_sequence_number_feedback_generator_unittest.cc", ] deps = [ + ":congestion_control_feedback_generator", ":remote_bitrate_estimator", + ":transport_sequence_number_feedback_generator", "..:module_api_public", + "../../api:rtp_headers", + "../../api/environment:environment_factory", + "../../api/transport:bandwidth_usage", "../../api/transport:mock_network_control", "../../api/transport:network_control", "../../api/units:data_rate", "../../api/units:data_size", "../../api/units:time_delta", "../../api/units:timestamp", + "../../rtc_base:buffer", "../../rtc_base:checks", + "../../rtc_base:logging", "../../rtc_base:random", + "../../rtc_base/network:ecn_marking", "../../system_wrappers", "../../test:explicit_key_value_config", "../../test:fileutils", @@ -135,6 +191,5 @@ if (rtc_include_tests) { "../pacing", "../rtp_rtcp:rtp_rtcp_format", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } } diff --git a/modules/remote_bitrate_estimator/aimd_rate_control.cc b/modules/remote_bitrate_estimator/aimd_rate_control.cc index 5ac4ce829d..e647207a2e 100644 --- a/modules/remote_bitrate_estimator/aimd_rate_control.cc +++ b/modules/remote_bitrate_estimator/aimd_rate_control.cc @@ -10,22 +10,23 @@ #include "modules/remote_bitrate_estimator/aimd_rate_control.h" -#include - #include #include #include +#include #include -#include "absl/strings/match.h" +#include "api/field_trials_view.h" +#include "api/transport/bandwidth_usage.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" -#include "modules/remote_bitrate_estimator/overuse_detector.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_minmax.h" namespace webrtc { namespace { @@ -183,7 +184,7 @@ void AimdRateControl::SetEstimate(DataRate bitrate, Timestamp at_time) { } void AimdRateControl::SetNetworkStateEstimate( - const absl::optional& estimate) { + const std::optional& estimate) { network_estimate_ = estimate; } @@ -221,7 +222,7 @@ TimeDelta AimdRateControl::GetExpectedBandwidthPeriod() const { void AimdRateControl::ChangeBitrate(const RateControlInput& input, Timestamp at_time) { - absl::optional new_bitrate; + std::optional new_bitrate; DataRate estimated_throughput = input.estimated_throughput.value_or(latest_estimated_throughput_); if (input.estimated_throughput) @@ -287,6 +288,10 @@ void AimdRateControl::ChangeBitrate(const RateControlInput& input, // Set bit rate to something slightly lower than the measured throughput // to get rid of any self-induced delay. decreased_bitrate = estimated_throughput * beta_; + if (decreased_bitrate > DataRate::KilobitsPerSec(5)) { + decreased_bitrate -= DataRate::KilobitsPerSec(5); + } + if (decreased_bitrate > current_bitrate_) { // TODO(terelius): The link_capacity estimate may be based on old // throughput measurements. Relying on them may lead to unnecessary diff --git a/modules/remote_bitrate_estimator/aimd_rate_control.h b/modules/remote_bitrate_estimator/aimd_rate_control.h index 4efde54410..e5d63915d0 100644 --- a/modules/remote_bitrate_estimator/aimd_rate_control.h +++ b/modules/remote_bitrate_estimator/aimd_rate_control.h @@ -13,10 +13,12 @@ #include -#include "absl/types/optional.h" +#include + #include "api/field_trials_view.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/congestion_controller/goog_cc/link_capacity_estimator.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" @@ -57,7 +59,7 @@ class AimdRateControl { void SetInApplicationLimitedRegion(bool in_alr); void SetEstimate(DataRate bitrate, Timestamp at_time); void SetNetworkStateEstimate( - const absl::optional& estimate); + const std::optional& estimate); // Returns the increase rate when used bandwidth is near the link capacity. double GetNearMaxIncreaseRateBpsPerSecond() const; @@ -90,7 +92,7 @@ class AimdRateControl { DataRate current_bitrate_; DataRate latest_estimated_throughput_; LinkCapacityEstimator link_capacity_; - absl::optional network_estimate_; + std::optional network_estimate_; RateControlState rate_control_state_; Timestamp time_last_bitrate_change_; Timestamp time_last_bitrate_decrease_; @@ -106,8 +108,8 @@ class AimdRateControl { // If "Disabled", estimated link capacity is not used as upper bound. FieldTrialFlag disable_estimate_bounded_increase_{"Disabled"}; FieldTrialParameter use_current_estimate_as_min_upper_bound_{"c_upper", - false}; - absl::optional last_decrease_; + true}; + std::optional last_decrease_; }; } // namespace webrtc diff --git a/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc b/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc index 5b8b0caffe..26c0a869da 100644 --- a/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc +++ b/modules/remote_bitrate_estimator/aimd_rate_control_unittest.cc @@ -9,6 +9,10 @@ */ #include "modules/remote_bitrate_estimator/aimd_rate_control.h" +#include + +#include "api/transport/bandwidth_usage.h" +#include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -106,18 +110,22 @@ TEST(AimdRateControlTest, DefaultPeriodUntilFirstOveruse) { EXPECT_NE(aimd_rate_control.GetExpectedBandwidthPeriod(), kDefaultPeriod); } -TEST(AimdRateControlTest, ExpectedPeriodAfter20kbpsDropAnd5kbpsIncrease) { +TEST(AimdRateControlTest, ExpectedPeriodAfterTypicalDrop) { AimdRateControl aimd_rate_control(ExplicitKeyValueConfig("")); - constexpr DataRate kInitialBitrate = DataRate::BitsPerSec(110'000); + // The rate increase at 216 kbps should be 12 kbps. If we drop from + // 216 + 4*12 = 264 kbps, it should take 4 seconds to recover. Since we + // back off to 0.85*acked_rate-5kbps, the acked bitrate needs to be 260 + // kbps to end up at 216 kbps. + constexpr DataRate kInitialBitrate = DataRate::BitsPerSec(264'000); + constexpr DataRate kUpdatedBitrate = DataRate::BitsPerSec(216'000); + const DataRate kAckedBitrate = + (kUpdatedBitrate + DataRate::BitsPerSec(5'000)) / kFractionAfterOveruse; Timestamp now = kInitialTime; aimd_rate_control.SetEstimate(kInitialBitrate, now); now += TimeDelta::Millis(100); - // Make the bitrate drop by 20 kbps to get to 90 kbps. - // The rate increase at 90 kbps should be 5 kbps, so the period should be 4 s. - const DataRate kAckedBitrate = - (kInitialBitrate - DataRate::BitsPerSec(20'000)) / kFractionAfterOveruse; aimd_rate_control.Update({BandwidthUsage::kBwOverusing, kAckedBitrate}, now); - EXPECT_EQ(aimd_rate_control.GetNearMaxIncreaseRateBpsPerSecond(), 5'000); + EXPECT_EQ(aimd_rate_control.LatestEstimate(), kUpdatedBitrate); + EXPECT_EQ(aimd_rate_control.GetNearMaxIncreaseRateBpsPerSecond(), 12'000); EXPECT_EQ(aimd_rate_control.GetExpectedBandwidthPeriod(), TimeDelta::Seconds(4)); } @@ -161,7 +169,7 @@ TEST(AimdRateControlTest, SendingRateBoundedWhenThroughputNotEstimated) { now += (kInitializationTime + TimeDelta::Millis(1)); aimd_rate_control.Update({BandwidthUsage::kBwNormal, kInitialBitrate}, now); for (int i = 0; i < 100; ++i) { - aimd_rate_control.Update({BandwidthUsage::kBwNormal, absl::nullopt}, now); + aimd_rate_control.Update({BandwidthUsage::kBwNormal, std::nullopt}, now); now += TimeDelta::Millis(100); } EXPECT_LE(aimd_rate_control.LatestEstimate(), @@ -183,7 +191,7 @@ TEST(AimdRateControlTest, EstimateDoesNotIncreaseInAlr) { ASSERT_EQ(aimd_rate_control.LatestEstimate(), kInitialBitrate); for (int i = 0; i < 100; ++i) { - aimd_rate_control.Update({BandwidthUsage::kBwNormal, absl::nullopt}, now); + aimd_rate_control.Update({BandwidthUsage::kBwNormal, std::nullopt}, now); now += TimeDelta::Millis(100); } EXPECT_EQ(aimd_rate_control.LatestEstimate(), kInitialBitrate); @@ -204,6 +212,7 @@ TEST(AimdRateControlTest, SetEstimateIncreaseBweInAlr) { TEST(AimdRateControlTest, SetEstimateUpperLimitedByNetworkEstimate) { AimdRateControl aimd_rate_control(ExplicitKeyValueConfig(""), /*send_side=*/true); + aimd_rate_control.SetEstimate(DataRate::BitsPerSec(300'000), kInitialTime); NetworkStateEstimate network_estimate; network_estimate.link_capacity_upper = DataRate::BitsPerSec(400'000); aimd_rate_control.SetNetworkStateEstimate(network_estimate); @@ -213,11 +222,9 @@ TEST(AimdRateControlTest, SetEstimateUpperLimitedByNetworkEstimate) { } TEST(AimdRateControlTest, - SetEstimateUpperLimitedByCurrentBitrateIfNetworkEstimateIsLow) { - AimdRateControl aimd_rate_control( - ExplicitKeyValueConfig( - "WebRTC-Bwe-EstimateBoundedIncrease/c_upper:true/"), - /*send_side=*/true); + SetEstimateDefaultUpperLimitedByCurrentBitrateIfNetworkEstimateIsLow) { + AimdRateControl aimd_rate_control(ExplicitKeyValueConfig(""), + /*send_side=*/true); aimd_rate_control.SetEstimate(DataRate::BitsPerSec(500'000), kInitialTime); ASSERT_EQ(aimd_rate_control.LatestEstimate(), DataRate::BitsPerSec(500'000)); @@ -229,9 +236,12 @@ TEST(AimdRateControlTest, } TEST(AimdRateControlTest, - SetEstimateDefaultNotUpperLimitedByCurrentBitrateIfNetworkEstimateIsLow) { - AimdRateControl aimd_rate_control(ExplicitKeyValueConfig(""), - /*send_side=*/true); + SetEstimateNotUpperLimitedByCurrentBitrateIfNetworkEstimateIsLowIf) { + AimdRateControl aimd_rate_control( + ExplicitKeyValueConfig( + "WebRTC-Bwe-EstimateBoundedIncrease/c_upper:false/"), + /*send_side=*/true); + aimd_rate_control.SetEstimate(DataRate::BitsPerSec(500'000), kInitialTime); ASSERT_EQ(aimd_rate_control.LatestEstimate(), DataRate::BitsPerSec(500'000)); @@ -281,7 +291,7 @@ TEST(AimdRateControlTest, EstimateIncreaseWhileNotInAlr) { aimd_rate_control.SetInApplicationLimitedRegion(false); aimd_rate_control.Update({BandwidthUsage::kBwNormal, kInitialBitrate}, now); for (int i = 0; i < 100; ++i) { - aimd_rate_control.Update({BandwidthUsage::kBwNormal, absl::nullopt}, now); + aimd_rate_control.Update({BandwidthUsage::kBwNormal, std::nullopt}, now); now += TimeDelta::Millis(100); } EXPECT_GT(aimd_rate_control.LatestEstimate(), kInitialBitrate); @@ -300,7 +310,7 @@ TEST(AimdRateControlTest, EstimateNotLimitedByNetworkEstimateIfDisabled) { aimd_rate_control.SetNetworkStateEstimate(network_estimate); for (int i = 0; i < 100; ++i) { - aimd_rate_control.Update({BandwidthUsage::kBwNormal, absl::nullopt}, now); + aimd_rate_control.Update({BandwidthUsage::kBwNormal, std::nullopt}, now); now += TimeDelta::Millis(100); } EXPECT_GT(aimd_rate_control.LatestEstimate(), diff --git a/modules/remote_bitrate_estimator/bwe_defines.cc b/modules/remote_bitrate_estimator/bwe_defines.cc index db92f46717..fa4ff0cecf 100644 --- a/modules/remote_bitrate_estimator/bwe_defines.cc +++ b/modules/remote_bitrate_estimator/bwe_defines.cc @@ -10,13 +10,18 @@ #include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include + +#include "api/transport/bandwidth_usage.h" +#include "api/units/data_rate.h" + namespace webrtc { const char kBweTypeHistogram[] = "WebRTC.BWE.Types"; RateControlInput::RateControlInput( BandwidthUsage bw_state, - const absl::optional& estimated_throughput) + const std::optional& estimated_throughput) : bw_state(bw_state), estimated_throughput(estimated_throughput) {} RateControlInput::~RateControlInput() = default; diff --git a/modules/remote_bitrate_estimator/congestion_control_feedback_generator.cc b/modules/remote_bitrate_estimator/congestion_control_feedback_generator.cc new file mode 100644 index 0000000000..bf13fa352f --- /dev/null +++ b/modules/remote_bitrate_estimator/congestion_control_feedback_generator.cc @@ -0,0 +1,123 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/remote_bitrate_estimator/congestion_control_feedback_generator.h" + +#include +#include +#include +#include +#include +#include + +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/sequence_checker.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/checks.h" +#include "rtc_base/experiments/field_trial_parser.h" + +namespace webrtc { + +constexpr DataRate kMaxFeedbackRate = webrtc::DataRate::KilobitsPerSec(500); + +CongestionControlFeedbackGenerator::CongestionControlFeedbackGenerator( + const Environment& env, + RtcpSender rtcp_sender) + : env_(env), + rtcp_sender_(std::move(rtcp_sender)), + min_time_between_feedback_("min_send_delta", TimeDelta::Millis(25)), + max_time_to_wait_for_packet_with_marker_("max_wait_for_marker", + TimeDelta::Millis(25)), + max_time_between_feedback_("max_send_delta", TimeDelta::Millis(500)) { + ParseFieldTrial( + {&min_time_between_feedback_, &max_time_to_wait_for_packet_with_marker_, + &max_time_between_feedback_}, + env.field_trials().Lookup("WebRTC-RFC8888CongestionControlFeedback")); +} + +void CongestionControlFeedbackGenerator::OnReceivedPacket( + const RtpPacketReceived& packet) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + + marker_bit_seen_ |= packet.Marker(); + if (!first_arrival_time_since_feedback_) { + first_arrival_time_since_feedback_ = packet.arrival_time(); + } + feedback_trackers_[packet.Ssrc()].ReceivedPacket(packet); + if (NextFeedbackTime() < packet.arrival_time()) { + SendFeedback(env_.clock().CurrentTime()); + } +} + +Timestamp CongestionControlFeedbackGenerator::NextFeedbackTime() const { + if (!first_arrival_time_since_feedback_) { + return std::max(env_.clock().CurrentTime() + min_time_between_feedback_, + next_possible_feedback_send_time_); + } + + if (!marker_bit_seen_) { + return std::max(next_possible_feedback_send_time_, + *first_arrival_time_since_feedback_ + + max_time_to_wait_for_packet_with_marker_.Get()); + } + return next_possible_feedback_send_time_; +} + +TimeDelta CongestionControlFeedbackGenerator::Process(Timestamp now) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (NextFeedbackTime() <= now) { + SendFeedback(now); + } + return NextFeedbackTime() - now; +} + +void CongestionControlFeedbackGenerator::SendFeedback(Timestamp now) { + RTC_DCHECK_GE(now, next_possible_feedback_send_time_); + uint32_t compact_ntp = + CompactNtp(env_.clock().ConvertTimestampToNtpTime(now)); + std::vector rtcp_packet_info; + for (auto& [unused, tracker] : feedback_trackers_) { + tracker.AddPacketsToFeedback(now, rtcp_packet_info); + } + marker_bit_seen_ = false; + first_arrival_time_since_feedback_ = std::nullopt; + + auto feedback = std::make_unique( + std::move(rtcp_packet_info), compact_ntp); + CalculateNextPossibleSendTime(DataSize::Bytes(feedback->BlockLength()), now); + + std::vector> rtcp_packets; + rtcp_packets.push_back(std::move(feedback)); + rtcp_sender_(std::move(rtcp_packets)); +} + +void CongestionControlFeedbackGenerator::CalculateNextPossibleSendTime( + DataSize feedback_size, + Timestamp now) { + TimeDelta time_since_last_sent = now - last_feedback_sent_time_; + DataSize debt_payed = time_since_last_sent * kMaxFeedbackRate; + send_rate_debt_ = debt_payed > send_rate_debt_ ? DataSize::Zero() + : send_rate_debt_ - debt_payed; + send_rate_debt_ += feedback_size; + last_feedback_sent_time_ = now; + next_possible_feedback_send_time_ = + now + std::clamp(send_rate_debt_ / kMaxFeedbackRate, + min_time_between_feedback_.Get(), + max_time_between_feedback_.Get()); +} + +} // namespace webrtc diff --git a/modules/remote_bitrate_estimator/congestion_control_feedback_generator.h b/modules/remote_bitrate_estimator/congestion_control_feedback_generator.h new file mode 100644 index 0000000000..a39f1daaef --- /dev/null +++ b/modules/remote_bitrate_estimator/congestion_control_feedback_generator.h @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_REMOTE_BITRATE_ESTIMATOR_CONGESTION_CONTROL_FEEDBACK_GENERATOR_H_ +#define MODULES_REMOTE_BITRATE_ESTIMATOR_CONGESTION_CONTROL_FEEDBACK_GENERATOR_H_ + +#include +#include +#include + +#include "api/environment/environment.h" +#include "api/sequence_checker.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/remote_bitrate_estimator/congestion_control_feedback_tracker.h" +#include "modules/remote_bitrate_estimator/rtp_transport_feedback_generator.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/experiments/field_trial_parser.h" + +namespace webrtc { + +// The class is responsible for generating RTCP feedback packets based on +// incoming media packets. Feedback format will comply with RFC 8888. +// https://datatracker.ietf.org/doc/rfc8888/ + +// Min and max duration between feedback is configurable using field +// trials, but per default, min is 25ms and max is 500ms. +// +// RTCP should not use more than 5% of the uplink link capacity. +// However, there is no good way for a feedback sender to know the +// link capacity unless media is sent in both directions. So we just assume that +// the link capacity is 10 Mbit/s or more and allow sending 500 kbit/s of +// feedback packets. This allows an approximate receive rate of 200 +// Mbit/s with feedback every 25ms. (200 Mbit/s with average size of 800 bytes = +// 31250 packets/s => 40 feedback packets/s with feedback of 780 packets each) + +// If possible, given the other constraints, feedback will be sent when a packet +// with marker bit is received in order to provide feedback as soon as possible +// after receiving a complete video frame. If no packet with marker bit is +// received, feedback can be delayed up to 25ms after the first packet since the +// last sent feedback. On good networks, this means that a sender may receive +// feedback for every sent frame. +class CongestionControlFeedbackGenerator + : public RtpTransportFeedbackGenerator { + public: + CongestionControlFeedbackGenerator( + const Environment& env, + RtpTransportFeedbackGenerator::RtcpSender feedback_sender); + ~CongestionControlFeedbackGenerator() = default; + + void OnReceivedPacket(const RtpPacketReceived& packet) override; + + void OnSendBandwidthEstimateChanged(DataRate estimate) override {} + + TimeDelta Process(Timestamp now) override; + + private: + Timestamp NextFeedbackTime() const RTC_RUN_ON(sequence_checker_); + + void SendFeedback(Timestamp now) RTC_RUN_ON(sequence_checker_); + + void CalculateNextPossibleSendTime(DataSize feedback_size, Timestamp now) + RTC_RUN_ON(sequence_checker_); + + const Environment env_; + SequenceChecker sequence_checker_; + const RtcpSender rtcp_sender_; + + FieldTrialParameter min_time_between_feedback_; + FieldTrialParameter max_time_to_wait_for_packet_with_marker_; + FieldTrialParameter max_time_between_feedback_; + + DataSize packet_overhead_ = DataSize::Zero(); + DataSize send_rate_debt_ = DataSize::Zero(); + + std::map + feedback_trackers_; + + // std::vector packets_; + Timestamp last_feedback_sent_time_ = Timestamp::Zero(); + std::optional first_arrival_time_since_feedback_; + bool marker_bit_seen_ = false; + Timestamp next_possible_feedback_send_time_ = Timestamp::Zero(); +}; + +} // namespace webrtc + +#endif // MODULES_REMOTE_BITRATE_ESTIMATOR_CONGESTION_CONTROL_FEEDBACK_GENERATOR_H_ diff --git a/modules/remote_bitrate_estimator/congestion_control_feedback_generator_unittest.cc b/modules/remote_bitrate_estimator/congestion_control_feedback_generator_unittest.cc new file mode 100644 index 0000000000..1f91735df9 --- /dev/null +++ b/modules/remote_bitrate_estimator/congestion_control_feedback_generator_unittest.cc @@ -0,0 +1,321 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/remote_bitrate_estimator/congestion_control_feedback_generator.h" + +#include +#include +#include +#include +#include + +#include "api/environment/environment_factory.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/buffer.h" +#include "rtc_base/network/ecn_marking.h" +#include "system_wrappers/include/clock.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using rtcp::CongestionControlFeedback; +using ::testing::MockFunction; +using ::testing::SizeIs; +using ::testing::WithoutArgs; + +RtpPacketReceived CreatePacket(Timestamp arrival_time, + bool marker, + uint32_t ssrc = 1234, + uint16_t seq = 1, + EcnMarking /* ecn */ = EcnMarking::kNotEct) { + RtpPacketReceived packet; + packet.SetSsrc(ssrc); + packet.SetSequenceNumber(seq); + packet.SetMarker(marker); + packet.set_arrival_time(arrival_time); + return packet; +} + +// If possible feedback should be sent when a packet with marker bit is +// received in order to provide feedback as soon as possible after receiving a +// complete frame. On good networks, this means that a sender may receive +// feedback for every sent frame. +TEST(CongestionControlFeedbackGeneratorTest, + SendsFeedbackAfterPacketWithMarkerBitReceived) { + MockFunction>)> + rtcp_sender; + SimulatedClock clock(123456); + CongestionControlFeedbackGenerator generator(CreateEnvironment(&clock), + rtcp_sender.AsStdFunction()); + + EXPECT_GT(generator.Process(clock.CurrentTime()), TimeDelta::Millis(10)); + clock.AdvanceTimeMilliseconds(10); + + EXPECT_CALL(rtcp_sender, Call); + generator.OnReceivedPacket( + CreatePacket(clock.CurrentTime(), /*marker=*/false)); + generator.OnReceivedPacket( + CreatePacket(clock.CurrentTime(), /*marker=*/true)); +} + +TEST(CongestionControlFeedbackGeneratorTest, + SendsFeedbackDelayedIfNoPacketWithMarkerBitReceived) { + MockFunction>)> + rtcp_sender; + SimulatedClock clock(123456); + CongestionControlFeedbackGenerator generator(CreateEnvironment(&clock), + rtcp_sender.AsStdFunction()); + + TimeDelta time_to_next = generator.Process(clock.CurrentTime()); + EXPECT_EQ(time_to_next, TimeDelta::Millis(25)); + clock.AdvanceTimeMilliseconds(10); + generator.OnReceivedPacket( + CreatePacket(clock.CurrentTime(), /*marker=*/false)); + // Expect feedback to be delayed another 25ms since no packet with marker is + // received. + Timestamp expected_feedback_time = + clock.CurrentTime() + TimeDelta::Millis(25); + EXPECT_CALL(rtcp_sender, Call).WillOnce(WithoutArgs([&] { + EXPECT_EQ(clock.CurrentTime(), expected_feedback_time); + })); + clock.AdvanceTime(time_to_next - TimeDelta::Millis(10)); + time_to_next = generator.Process(clock.CurrentTime()); + clock.AdvanceTime(time_to_next); + time_to_next = generator.Process(clock.CurrentTime()); +} + +TEST(CongestionControlFeedbackGeneratorTest, + SendsFeedbackAfterMinTimeIfPacketsWithMarkerBitReceived) { + MockFunction>)> + rtcp_sender; + constexpr TimeDelta kSmallTimeInterval = TimeDelta::Millis(2); + SimulatedClock clock(123456); + CongestionControlFeedbackGenerator generator(CreateEnvironment(&clock), + rtcp_sender.AsStdFunction()); + + TimeDelta time_to_next_process = generator.Process(clock.CurrentTime()); + Timestamp expected_feedback_time = clock.CurrentTime(); + EXPECT_CALL(rtcp_sender, Call).Times(2).WillRepeatedly(WithoutArgs([&] { + EXPECT_EQ(clock.CurrentTime(), expected_feedback_time); + // Next feedback can not be sent until 25ms after the previouse + expected_feedback_time += TimeDelta::Millis(25); + })); + + // 3 packets are received, with an interval kSmallTimeInterval. + for (int i = 0; i < 3; ++i) { + generator.OnReceivedPacket( + CreatePacket(clock.CurrentTime(), /*marker=*/true)); + clock.AdvanceTime(kSmallTimeInterval); + time_to_next_process -= kSmallTimeInterval; + } + clock.AdvanceTime(time_to_next_process); + time_to_next_process = generator.Process(clock.CurrentTime()); + clock.AdvanceTime(time_to_next_process); + time_to_next_process = generator.Process(clock.CurrentTime()); +} + +TEST(CongestionControlFeedbackGeneratorTest, + FeedbackFor30KPacketsUtilizeLessThan500kbitPerSecond) { + MockFunction>)> + rtcp_sender; + SimulatedClock clock(123456); + CongestionControlFeedbackGenerator generator(CreateEnvironment(&clock), + rtcp_sender.AsStdFunction()); + + int number_of_feedback_packets = 0; + DataSize total_feedback_size; + EXPECT_CALL(rtcp_sender, Call) + .WillRepeatedly( + [&](std::vector> rtcp_packets) { + ASSERT_THAT(rtcp_packets, SizeIs(1)); + number_of_feedback_packets++; + total_feedback_size += + DataSize::Bytes(rtcp_packets[0]->BlockLength()); + }); + Timestamp start_time = clock.CurrentTime(); + Timestamp last_process_time = clock.CurrentTime(); + TimeDelta time_to_next_process = generator.Process(clock.CurrentTime()); + uint16_t rtp_sequence_number = 0; + // Receive 30 packet per ms in 1s => 30'0000 packets. + while (clock.CurrentTime() < start_time + TimeDelta::Seconds(1)) { + for (int i = 0; i < 30; ++i) { + generator.OnReceivedPacket(CreatePacket(clock.CurrentTime(), + /*marker=*/true, /*ssrc=*/1234, + rtp_sequence_number++)); + } + if (clock.CurrentTime() >= last_process_time + time_to_next_process) { + last_process_time = clock.CurrentTime(); + time_to_next_process = generator.Process(clock.CurrentTime()); + } + clock.AdvanceTime(TimeDelta::Millis(1)); + } + + EXPECT_LE(total_feedback_size / TimeDelta::Seconds(1), + DataRate::KilobitsPerSec(500)); + EXPECT_EQ(number_of_feedback_packets, 40); +} + +TEST(CongestionControlFeedbackGeneratorTest, + FeedbackFor60KPacketsUtilizeApproximately500kbitPerSecond) { + MockFunction>)> + rtcp_sender; + SimulatedClock clock(123456); + CongestionControlFeedbackGenerator generator(CreateEnvironment(&clock), + rtcp_sender.AsStdFunction()); + + int number_of_feedback_packets = 0; + DataSize total_feedback_size; + DataSize last_feedback_size; + EXPECT_CALL(rtcp_sender, Call) + .WillRepeatedly( + [&](std::vector> rtcp_packets) { + ASSERT_THAT(rtcp_packets, SizeIs(1)); + number_of_feedback_packets++; + last_feedback_size = + DataSize::Bytes(rtcp_packets[0]->BlockLength()); + total_feedback_size += last_feedback_size; + }); + Timestamp start_time = clock.CurrentTime(); + Timestamp last_process_time = clock.CurrentTime(); + TimeDelta time_to_next_process = generator.Process(clock.CurrentTime()); + uint16_t rtp_sequence_number = 0; + // Receive 60 packet per ms in 1s => 60'0000 packets. + while (clock.CurrentTime() < start_time + TimeDelta::Seconds(1)) { + for (int i = 0; i < 60; ++i) { + generator.OnReceivedPacket(CreatePacket(clock.CurrentTime(), + /*marker=*/true, /*ssrc=*/1234, + rtp_sequence_number++)); + } + if (clock.CurrentTime() >= last_process_time + time_to_next_process) { + last_process_time = clock.CurrentTime(); + time_to_next_process = generator.Process(clock.CurrentTime()); + } + clock.AdvanceTime(TimeDelta::Millis(1)); + } + EXPECT_LE(total_feedback_size, + DataSize::Bytes(500'000 / 8) + last_feedback_size); + EXPECT_LT(number_of_feedback_packets, 40); +} + +TEST(CongestionControlFeedbackGeneratorTest, + CanGenerateRtcpPacketFromTwoSsrcWithMissingPacketsAndWrap) { + MockFunction>)> + rtcp_sender; + SimulatedClock clock(123456); + constexpr TimeDelta kSmallTimeInterval = TimeDelta::Millis(2); + CongestionControlFeedbackGenerator generator(CreateEnvironment(&clock), + rtcp_sender.AsStdFunction()); + + TimeDelta time_to_next_process = generator.Process(clock.CurrentTime()); + + // Receive packets out of order, with missing packets (between 0xFFA and 1 = + // 6 and FFFC and 1 = 4) => total 14 packets is expected in the feedback. + const std::vector kReceivedPackets = { + // Reordered packet. + CreatePacket(clock.CurrentTime() + kSmallTimeInterval, /*marker*/ false, + /*ssrc=*/123, + /*seq=*/0xFFFA), + CreatePacket(clock.CurrentTime(), /*marker*/ false, /*ssrc=*/123, + /*seq=*/1), + // Reordered packet. + CreatePacket(clock.CurrentTime() + kSmallTimeInterval, + /*marker*/ false, /*ssrc=*/ + /*ssrc=*/234, + /*seq=*/0xFFFC), + CreatePacket(clock.CurrentTime(), /*marker*/ false, /*ssrc=*/234, + /*seq=*/1), + }; + + EXPECT_CALL(rtcp_sender, Call) + .WillOnce( + [&](std::vector> rtcp_packets) { + ASSERT_THAT(rtcp_packets, SizeIs(1)); + rtcp::CongestionControlFeedback* rtcp = + static_cast( + rtcp_packets[0].get()); + + ASSERT_THAT(rtcp->packets(), SizeIs(14)); + Buffer buffer = rtcp->Build(); + CongestionControlFeedback parsed_fb; + rtcp::CommonHeader header; + EXPECT_TRUE(header.Parse(buffer.data(), buffer.size())); + EXPECT_TRUE(parsed_fb.Parse(header)); + EXPECT_THAT(parsed_fb.packets(), SizeIs(14)); + }); + + std::vector receive_time_sorted = kReceivedPackets; + std::sort(receive_time_sorted.begin(), receive_time_sorted.end(), + [](const RtpPacketReceived& a, const RtpPacketReceived& b) { + return a.arrival_time() < b.arrival_time(); + }); + for (const RtpPacketReceived& packet : receive_time_sorted) { + TimeDelta time_to_receive = packet.arrival_time() - clock.CurrentTime(); + time_to_next_process -= time_to_receive; + clock.AdvanceTime(time_to_receive); + generator.OnReceivedPacket(packet); + } + clock.AdvanceTime(time_to_next_process); + time_to_next_process = generator.Process(clock.CurrentTime()); + clock.AdvanceTime(time_to_next_process); + generator.Process(clock.CurrentTime()); +} + +TEST(CongestionControlFeedbackGeneratorTest, + ReportsFirstReceivedPacketArrivalTimeButEcnFromCePacketIfDuplicate) { + MockFunction>)> + rtcp_sender; + SimulatedClock clock(123456); + constexpr TimeDelta kSmallTimeInterval = TimeDelta::Millis(2); + CongestionControlFeedbackGenerator generator(CreateEnvironment(&clock), + rtcp_sender.AsStdFunction()); + + TimeDelta time_to_next_process = generator.Process(clock.CurrentTime()); + RtpPacketReceived packet_1 = + CreatePacket(clock.CurrentTime(), /*marker=*/false, /* ssrc=*/1, + /* seq=*/2, EcnMarking::kEct1); + generator.OnReceivedPacket(packet_1); + RtpPacketReceived packet_2 = packet_1; + packet_2.set_arrival_time(clock.CurrentTime() + kSmallTimeInterval); + packet_2.set_ecn(EcnMarking::kCe); + time_to_next_process -= kSmallTimeInterval; + clock.AdvanceTime(kSmallTimeInterval); + generator.OnReceivedPacket(packet_2); + + EXPECT_CALL(rtcp_sender, Call) + .WillOnce( + [&](std::vector> rtcp_packets) { + ASSERT_THAT(rtcp_packets, SizeIs(1)); + rtcp::CongestionControlFeedback* rtcp = + static_cast( + rtcp_packets[0].get()); + Timestamp feedback_send_time = clock.CurrentTime(); + ASSERT_THAT(rtcp->packets(), SizeIs(1)); + EXPECT_EQ(rtcp->packets()[0].ecn, EcnMarking::kCe); + EXPECT_EQ(rtcp->packets()[0].arrival_time_offset, + feedback_send_time - packet_1.arrival_time()); + }); + + clock.AdvanceTime(time_to_next_process); + time_to_next_process = generator.Process(clock.CurrentTime()); + clock.AdvanceTime(time_to_next_process); + generator.Process(clock.CurrentTime()); +} + +} // namespace +} // namespace webrtc diff --git a/modules/remote_bitrate_estimator/congestion_control_feedback_tracker.cc b/modules/remote_bitrate_estimator/congestion_control_feedback_tracker.cc new file mode 100644 index 0000000000..b871dd314b --- /dev/null +++ b/modules/remote_bitrate_estimator/congestion_control_feedback_tracker.cc @@ -0,0 +1,121 @@ + +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/remote_bitrate_estimator/congestion_control_feedback_tracker.h" + +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/network/ecn_marking.h" + +namespace webrtc { + +constexpr int kMaxPacketsPerSsrc = 16384; + +void CongestionControlFeedbackTracker::ReceivedPacket( + const RtpPacketReceived& packet) { + if (packets_.size() > kMaxPacketsPerSsrc) { + RTC_LOG(LS_VERBOSE) + << "Unexpected number of packets without sending reports:" + << packets_.size(); + return; + } + int64_t unwrapped_sequence_number = + unwrapper_.Unwrap(packet.SequenceNumber()); + if (last_sequence_number_in_feedback_ && + unwrapped_sequence_number < *last_sequence_number_in_feedback_ + 1) { + RTC_LOG(LS_WARNING) + << "Received packet unorderered between feeedback. SSRC: " + << packet.Ssrc() << " Seq: " << packet.SequenceNumber() + << " last feedback: " + << static_cast(*last_sequence_number_in_feedback_); + // TODO: bugs.webrtc.org/374550342 - According to spec, the old packets + // should be reported again. But at the moment, we dont store history of + // packet we already reported and thus, they will be reported as lost. + // Note that this is likely not a problem in webrtc since the packets will + // also be removed from the send history when they are first reported as + // received. + last_sequence_number_in_feedback_ = unwrapped_sequence_number - 1; + } + packets_.push_back({.ssrc = packet.Ssrc(), + .unwrapped_sequence_number = unwrapped_sequence_number, + .arrival_time = packet.arrival_time(), + .ecn = packet.ecn()}); +} + +void CongestionControlFeedbackTracker::AddPacketsToFeedback( + Timestamp feedback_time, + std::vector& packet_feedback) { + if (packets_.empty()) { + return; + } + absl::c_sort(packets_, [](const PacketInfo& a, const PacketInfo& b) { + return std::tie(a.unwrapped_sequence_number, a.arrival_time) < + std::tie(b.unwrapped_sequence_number, b.arrival_time); + }); + if (!last_sequence_number_in_feedback_) { + last_sequence_number_in_feedback_ = + packets_.front().unwrapped_sequence_number - 1; + } + + auto packet_it = packets_.begin(); + uint32_t ssrc = packet_it->ssrc; + for (int64_t sequence_number = *last_sequence_number_in_feedback_ + 1; + sequence_number <= packets_.back().unwrapped_sequence_number && + sequence_number <= + *last_sequence_number_in_feedback_ + kMaxPacketsPerSsrc; + ++sequence_number) { + RTC_DCHECK(packet_it != packets_.end()); + RTC_DCHECK_EQ(ssrc, packet_it->ssrc); + + EcnMarking ecn = EcnMarking::kNotEct; + TimeDelta arrival_time_offset = TimeDelta::MinusInfinity(); + + if (sequence_number == packet_it->unwrapped_sequence_number) { + arrival_time_offset = feedback_time - packet_it->arrival_time; + ecn = packet_it->ecn; + ++packet_it; + while (packet_it != packets_.end() && + packet_it->unwrapped_sequence_number == sequence_number) { + // According to RFC 8888: + // If duplicate copies of a particular RTP packet are received, then the + // arrival time of the first copy to arrive MUST be reported. If any of + // the copies of the duplicated packet are ECN-CE marked, then an ECN-CE + // mark MUST be reported for that packet; otherwise, the ECN mark of the + // first copy to arrive is reported. + if (packet_it->ecn == EcnMarking::kCe) { + ecn = EcnMarking::kCe; + } + RTC_LOG(LS_WARNING) << "Received duplicate packet ssrc:" << ssrc + << " seq:" << static_cast(sequence_number) + << " ecn: " << static_cast(ecn); + ++packet_it; + } + } // else - the packet has not been received yet. + packet_feedback.push_back( + {.ssrc = ssrc, + .sequence_number = static_cast(sequence_number), + .arrival_time_offset = arrival_time_offset, + .ecn = ecn}); + } + last_sequence_number_in_feedback_ = packets_.back().unwrapped_sequence_number; + packets_.clear(); +} + +} // namespace webrtc diff --git a/modules/remote_bitrate_estimator/congestion_control_feedback_tracker.h b/modules/remote_bitrate_estimator/congestion_control_feedback_tracker.h new file mode 100644 index 0000000000..f2b2727864 --- /dev/null +++ b/modules/remote_bitrate_estimator/congestion_control_feedback_tracker.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_REMOTE_BITRATE_ESTIMATOR_CONGESTION_CONTROL_FEEDBACK_TRACKER_H_ +#define MODULES_REMOTE_BITRATE_ESTIMATOR_CONGESTION_CONTROL_FEEDBACK_TRACKER_H_ + +#include +#include +#include + +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/network/ecn_marking.h" +#include "rtc_base/numerics/sequence_number_unwrapper.h" +namespace webrtc { + +// CongestionControlFeedbackTracker is reponsible for creating and keeping track +// of feedback sent for a specific SSRC when feedback is sent according to +// https://datatracker.ietf.org/doc/rfc8888/ +class CongestionControlFeedbackTracker { + public: + CongestionControlFeedbackTracker() = default; + + void ReceivedPacket(const RtpPacketReceived& packet); + + // Adds received packets to `packet_feedback` + // RTP sequence numbers are continous from the last created feedback unless + // reordering has occured between feedback packets. If so, the sequence + // number range may overlap with previousely sent feedback. + void AddPacketsToFeedback( + Timestamp feedback_time, + std::vector& + packet_feedback); + + private: + struct PacketInfo { + uint32_t ssrc; + int64_t unwrapped_sequence_number = 0; + Timestamp arrival_time; + EcnMarking ecn = EcnMarking::kNotEct; + }; + + std::optional last_sequence_number_in_feedback_; + SeqNumUnwrapper unwrapper_; + + std::vector packets_; +}; + +} // namespace webrtc + +#endif // MODULES_REMOTE_BITRATE_ESTIMATOR_CONGESTION_CONTROL_FEEDBACK_TRACKER_H_ diff --git a/modules/remote_bitrate_estimator/congestion_control_feedback_tracker_unittest.cc b/modules/remote_bitrate_estimator/congestion_control_feedback_tracker_unittest.cc new file mode 100644 index 0000000000..e70e207a32 --- /dev/null +++ b/modules/remote_bitrate_estimator/congestion_control_feedback_tracker_unittest.cc @@ -0,0 +1,199 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/remote_bitrate_estimator/congestion_control_feedback_tracker.h" + +#include +#include + +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/network/ecn_marking.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::AllOf; +using ::testing::Field; +using ::testing::Property; +using ::testing::SizeIs; + +RtpPacketReceived CreatePacket(Timestamp arrival_time, + uint16_t seq = 1, + EcnMarking ecn = EcnMarking::kNotEct) { + RtpPacketReceived packet; + packet.SetSsrc(1234); + packet.SetSequenceNumber(seq); + packet.set_arrival_time(arrival_time); + packet.set_ecn(ecn); + return packet; +} + +TEST(CongestionControlFeedbackTrackerTest, + FeedbackIncludeReceivedPacketsInSequenceNumberOrder) { + RtpPacketReceived packet_1 = + CreatePacket(/*arrival_time=*/Timestamp::Millis(123), /*seq =*/2); + RtpPacketReceived packet_2 = + CreatePacket(/*arrival_time=*/Timestamp::Millis(125), /*seq=*/1); + + CongestionControlFeedbackTracker tracker; + tracker.ReceivedPacket(packet_1); + tracker.ReceivedPacket(packet_2); + + Timestamp feedback_time = Timestamp::Millis(567); + std::vector feedback_info; + tracker.AddPacketsToFeedback(feedback_time, feedback_info); + ASSERT_THAT(feedback_info, SizeIs(2)); + EXPECT_THAT( + feedback_info[0], + AllOf( + Field(&rtcp::CongestionControlFeedback::PacketInfo::sequence_number, + packet_2.SequenceNumber()), + Field( + &rtcp::CongestionControlFeedback::PacketInfo::arrival_time_offset, + feedback_time - packet_2.arrival_time()))); + EXPECT_THAT( + feedback_info[1], + AllOf( + Field(&rtcp::CongestionControlFeedback::PacketInfo::sequence_number, + packet_1.SequenceNumber()), + Field( + &rtcp::CongestionControlFeedback::PacketInfo::arrival_time_offset, + feedback_time - packet_1.arrival_time()))); +} + +TEST(CongestionControlFeedbackTrackerTest, + ReportsFirstReceivedPacketArrivalTimeButEcnFromCePacketIfDuplicate) { + RtpPacketReceived packet_1 = CreatePacket( + /*arrival_time=*/Timestamp::Millis(123), /*seq =*/1, EcnMarking::kEct1); + RtpPacketReceived packet_2 = CreatePacket( + /*arrival_time=*/Timestamp::Millis(125), /*seq=*/1, EcnMarking::kCe); + RtpPacketReceived packet_3 = CreatePacket( + /*arrival_time=*/Timestamp::Millis(126), /*seq=*/1, EcnMarking::kEct1); + + CongestionControlFeedbackTracker tracker; + tracker.ReceivedPacket(packet_1); + tracker.ReceivedPacket(packet_2); + tracker.ReceivedPacket(packet_3); + + Timestamp feedback_time = Timestamp::Millis(567); + std::vector feedback_info; + tracker.AddPacketsToFeedback(feedback_time, feedback_info); + ASSERT_THAT(feedback_info, SizeIs(1)); + EXPECT_THAT( + feedback_info[0], + AllOf( + Field( + &rtcp::CongestionControlFeedback::PacketInfo::arrival_time_offset, + feedback_time - packet_1.arrival_time()), + Field(&rtcp::CongestionControlFeedback::PacketInfo::ecn, + EcnMarking::kCe))); +} + +TEST(CongestionControlFeedbackTrackerTest, + FeedbackGeneratesContinouseSequenceNumbers) { + RtpPacketReceived packet_1 = + CreatePacket(/*arrival_time=*/Timestamp::Millis(123), /*seq =*/1); + // Packet with sequence number 2 is lost or reordered. + RtpPacketReceived packet_2 = CreatePacket( + /*arrival_time=*/Timestamp::Millis(125), /*seq=*/3); + + CongestionControlFeedbackTracker tracker; + tracker.ReceivedPacket(packet_1); + tracker.ReceivedPacket(packet_2); + + std::vector feedback_info; + Timestamp feedback_time = Timestamp::Millis(567); + tracker.AddPacketsToFeedback(feedback_time, feedback_info); + ASSERT_THAT(feedback_info, SizeIs(3)); + EXPECT_THAT(feedback_info[0].sequence_number, 1); + EXPECT_THAT(feedback_info[0].arrival_time_offset, + feedback_time - packet_1.arrival_time()); + EXPECT_THAT(feedback_info[1].sequence_number, 2); + EXPECT_THAT(feedback_info[1].arrival_time_offset, TimeDelta::MinusInfinity()); + EXPECT_THAT(feedback_info[2].sequence_number, 3); + EXPECT_THAT(feedback_info[2].arrival_time_offset, + feedback_time - packet_2.arrival_time()); +} + +TEST(CongestionControlFeedbackTrackerTest, + FeedbackGeneratesContinouseSequenceNumbersBetweenFeedbackPackets) { + RtpPacketReceived packet_1 = + CreatePacket(/*arrival_time=*/Timestamp::Millis(123), /*seq =*/1); + RtpPacketReceived packet_2 = CreatePacket( + /*arrival_time=*/Timestamp::Millis(125), /*seq=*/3); + + CongestionControlFeedbackTracker tracker; + tracker.ReceivedPacket(packet_1); + + std::vector feedback_info; + Timestamp feedback_time = Timestamp::Millis(567); + tracker.AddPacketsToFeedback(feedback_time, feedback_info); + ASSERT_THAT(feedback_info, SizeIs(1)); + EXPECT_THAT(feedback_info[0].sequence_number, 1); + EXPECT_THAT(feedback_info[0].arrival_time_offset, + feedback_time - packet_1.arrival_time()); + + feedback_info.clear(); + feedback_time = Timestamp::Millis(678); + tracker.ReceivedPacket(packet_2); + tracker.AddPacketsToFeedback(feedback_time, feedback_info); + ASSERT_THAT(feedback_info, SizeIs(2)); + EXPECT_THAT(feedback_info[0].sequence_number, 2); + EXPECT_THAT(feedback_info[0].arrival_time_offset, TimeDelta::MinusInfinity()); + EXPECT_THAT(feedback_info[1].sequence_number, 3); + EXPECT_THAT(feedback_info[1].arrival_time_offset, + feedback_time - packet_2.arrival_time()); +} + +TEST(CongestionControlFeedbackTrackerTest, + FeedbackGeneratesRepeatedSequenceNumbersOnReorderingBetweenFeedback) { + RtpPacketReceived packet_1 = + CreatePacket(/*arrival_time=*/Timestamp::Millis(123), /*seq =*/2); + RtpPacketReceived packet_2 = CreatePacket( + /*arrival_time=*/Timestamp::Millis(125), /*seq=*/1); + RtpPacketReceived packet_3 = CreatePacket( + /*arrival_time=*/Timestamp::Millis(125), /*seq=*/3); + + CongestionControlFeedbackTracker tracker; + tracker.ReceivedPacket(packet_1); + + std::vector feedback_info; + Timestamp feedback_time = Timestamp::Millis(567); + tracker.AddPacketsToFeedback(feedback_time, feedback_info); + ASSERT_THAT(feedback_info, SizeIs(1)); + EXPECT_THAT(feedback_info[0].sequence_number, 2); + EXPECT_THAT(feedback_info[0].arrival_time_offset, + feedback_time - packet_1.arrival_time()); + + feedback_info.clear(); + feedback_time = Timestamp::Millis(678); + tracker.ReceivedPacket(packet_2); + tracker.ReceivedPacket(packet_3); + tracker.AddPacketsToFeedback(feedback_time, feedback_info); + ASSERT_THAT(feedback_info, SizeIs(3)); + EXPECT_THAT(feedback_info[0].sequence_number, 1); + EXPECT_THAT(feedback_info[0].arrival_time_offset, + feedback_time - packet_2.arrival_time()); + EXPECT_THAT(feedback_info[1].sequence_number, 2); + // TODO: bugs.webrtc.org/374550342 - This is against the spec. According to + // the specification, we should have kept the history. + EXPECT_THAT(feedback_info[1].arrival_time_offset, TimeDelta::MinusInfinity()); + EXPECT_THAT(feedback_info[2].sequence_number, 3); + EXPECT_THAT(feedback_info[2].arrival_time_offset, + feedback_time - packet_3.arrival_time()); +} + +} // namespace +} // namespace webrtc diff --git a/modules/remote_bitrate_estimator/include/bwe_defines.h b/modules/remote_bitrate_estimator/include/bwe_defines.h index 4596b13863..e3f4e8c952 100644 --- a/modules/remote_bitrate_estimator/include/bwe_defines.h +++ b/modules/remote_bitrate_estimator/include/bwe_defines.h @@ -13,8 +13,9 @@ #include -#include "absl/types/optional.h" -#include "api/network_state_predictor.h" +#include + +#include "api/transport/bandwidth_usage.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" @@ -36,11 +37,11 @@ enum BweNames { struct RateControlInput { RateControlInput(BandwidthUsage bw_state, - const absl::optional& estimated_throughput); + const std::optional& estimated_throughput); ~RateControlInput(); BandwidthUsage bw_state; - absl::optional estimated_throughput; + std::optional estimated_throughput; }; } // namespace webrtc diff --git a/modules/remote_bitrate_estimator/inter_arrival.cc b/modules/remote_bitrate_estimator/inter_arrival.cc index 06ff580b92..875c1f830c 100644 --- a/modules/remote_bitrate_estimator/inter_arrival.cc +++ b/modules/remote_bitrate_estimator/inter_arrival.cc @@ -10,7 +10,11 @@ #include "modules/remote_bitrate_estimator/inter_arrival.h" +#include +#include + #include "modules/include/module_common_types_public.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { diff --git a/modules/remote_bitrate_estimator/inter_arrival_unittest.cc b/modules/remote_bitrate_estimator/inter_arrival_unittest.cc index 581963e96d..80cb74cc65 100644 --- a/modules/remote_bitrate_estimator/inter_arrival_unittest.cc +++ b/modules/remote_bitrate_estimator/inter_arrival_unittest.cc @@ -10,6 +10,8 @@ #include "modules/remote_bitrate_estimator/inter_arrival.h" +#include +#include #include #include "test/gtest.h" diff --git a/modules/remote_bitrate_estimator/overuse_detector.cc b/modules/remote_bitrate_estimator/overuse_detector.cc index 888f18cd9e..33996add7d 100644 --- a/modules/remote_bitrate_estimator/overuse_detector.cc +++ b/modules/remote_bitrate_estimator/overuse_detector.cc @@ -14,10 +14,9 @@ #include #include -#include +#include -#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" -#include "rtc_base/checks.h" +#include "api/transport/bandwidth_usage.h" #include "rtc_base/numerics/safe_minmax.h" namespace webrtc { @@ -45,8 +44,6 @@ BandwidthUsage OveruseDetector::Detect(double offset, return BandwidthUsage::kBwNormal; } const double T = std::min(num_of_deltas, kMaxNumDeltas) * offset; - BWE_TEST_LOGGING_PLOT(1, "T", now_ms, T); - BWE_TEST_LOGGING_PLOT(1, "threshold", now_ms, threshold_); if (T > threshold_) { if (time_over_using_ == -1) { // Initialize the timer. Assume that we've been @@ -96,7 +93,7 @@ void OveruseDetector::UpdateThreshold(double modified_offset, int64_t now_ms) { const int64_t kMaxTimeDeltaMs = 100; int64_t time_delta_ms = std::min(now_ms - last_update_ms_, kMaxTimeDeltaMs); threshold_ += k * (fabs(modified_offset) - threshold_) * time_delta_ms; - threshold_ = rtc::SafeClamp(threshold_, 6.f, 600.f); + threshold_ = SafeClamp(threshold_, 6.f, 600.f); last_update_ms_ = now_ms; } diff --git a/modules/remote_bitrate_estimator/overuse_detector.h b/modules/remote_bitrate_estimator/overuse_detector.h index 444e8eece5..8c1c63cda9 100644 --- a/modules/remote_bitrate_estimator/overuse_detector.h +++ b/modules/remote_bitrate_estimator/overuse_detector.h @@ -12,7 +12,7 @@ #include -#include "api/network_state_predictor.h" +#include "api/transport/bandwidth_usage.h" namespace webrtc { diff --git a/modules/remote_bitrate_estimator/overuse_detector_unittest.cc b/modules/remote_bitrate_estimator/overuse_detector_unittest.cc index c4c72b1de5..8b064bdbc4 100644 --- a/modules/remote_bitrate_estimator/overuse_detector_unittest.cc +++ b/modules/remote_bitrate_estimator/overuse_detector_unittest.cc @@ -14,9 +14,10 @@ #include #include +#include #include -#include +#include "api/transport/bandwidth_usage.h" #include "modules/remote_bitrate_estimator/inter_arrival.h" #include "modules/remote_bitrate_estimator/overuse_estimator.h" #include "rtc_base/random.h" diff --git a/modules/remote_bitrate_estimator/overuse_estimator.cc b/modules/remote_bitrate_estimator/overuse_estimator.cc index 5ca21d9629..f02ebca4e8 100644 --- a/modules/remote_bitrate_estimator/overuse_estimator.cc +++ b/modules/remote_bitrate_estimator/overuse_estimator.cc @@ -11,12 +11,12 @@ #include "modules/remote_bitrate_estimator/overuse_estimator.h" #include -#include #include +#include -#include "api/network_state_predictor.h" -#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "api/transport/bandwidth_usage.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { @@ -33,10 +33,9 @@ void OveruseEstimator::Update(int64_t t_delta, double ts_delta, int size_delta, BandwidthUsage current_hypothesis, - int64_t now_ms) { + int64_t /* now_ms */) { const double min_frame_period = UpdateMinFramePeriod(ts_delta); const double t_ts_delta = t_delta - ts_delta; - BWE_TEST_LOGGING_PLOT(1, "dm_ms", now_ms, t_ts_delta); double fs_delta = size_delta; ++num_of_deltas_; @@ -59,8 +58,6 @@ void OveruseEstimator::Update(int64_t t_delta, const double Eh[2] = {E_[0][0] * h[0] + E_[0][1] * h[1], E_[1][0] * h[0] + E_[1][1] * h[1]}; - BWE_TEST_LOGGING_PLOT(1, "d_ms", now_ms, slope_ * h[0] - offset_); - const double residual = t_ts_delta - slope_ * h[0] - offset_; const bool in_stable_state = @@ -104,11 +101,6 @@ void OveruseEstimator::Update(int64_t t_delta, slope_ = slope_ + K[0] * residual; prev_offset_ = offset_; offset_ = offset_ + K[1] * residual; - - BWE_TEST_LOGGING_PLOT(1, "kc", now_ms, K[0]); - BWE_TEST_LOGGING_PLOT(1, "km", now_ms, K[1]); - BWE_TEST_LOGGING_PLOT(1, "slope_1/bps", now_ms, slope_); - BWE_TEST_LOGGING_PLOT(1, "var_noise", now_ms, var_noise_); } double OveruseEstimator::UpdateMinFramePeriod(double ts_delta) { diff --git a/modules/remote_bitrate_estimator/overuse_estimator.h b/modules/remote_bitrate_estimator/overuse_estimator.h index d75ae04b58..edde6e1f55 100644 --- a/modules/remote_bitrate_estimator/overuse_estimator.h +++ b/modules/remote_bitrate_estimator/overuse_estimator.h @@ -14,7 +14,7 @@ #include -#include "api/network_state_predictor.h" +#include "api/transport/bandwidth_usage.h" namespace webrtc { diff --git a/modules/remote_bitrate_estimator/packet_arrival_map_test.cc b/modules/remote_bitrate_estimator/packet_arrival_map_test.cc index d86f0397e7..b30c7422ae 100644 --- a/modules/remote_bitrate_estimator/packet_arrival_map_test.cc +++ b/modules/remote_bitrate_estimator/packet_arrival_map_test.cc @@ -9,7 +9,9 @@ */ #include "modules/remote_bitrate_estimator/packet_arrival_map.h" -#include "test/gmock.h" +#include + +#include "api/units/timestamp.h" #include "test/gtest.h" namespace webrtc { diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc index fcfbb2ecb5..5847b088b9 100644 --- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc +++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc @@ -13,16 +13,25 @@ #include #include +#include +#include +#include #include -#include +#include +#include -#include "api/transport/field_trial_based_config.h" +#include "absl/base/nullability.h" +#include "api/environment/environment.h" +#include "api/rtp_headers.h" +#include "api/transport/bandwidth_usage.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" +#include "modules/remote_bitrate_estimator/inter_arrival.h" +#include "modules/remote_bitrate_estimator/overuse_estimator.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" @@ -88,11 +97,9 @@ void RemoteBitrateEstimatorAbsSendTime::MaybeAddCluster( } RemoteBitrateEstimatorAbsSendTime::RemoteBitrateEstimatorAbsSendTime( - RemoteBitrateObserver* observer, - Clock* clock) - : clock_(clock), observer_(observer), remote_rate_(field_trials_) { - RTC_DCHECK(clock_); - RTC_DCHECK(observer_); + const Environment& env, + RemoteBitrateObserver* absl_nonnull observer) + : env_(env), observer_(observer), remote_rate_(env_.field_trials()) { RTC_LOG(LS_INFO) << "RemoteBitrateEstimatorAbsSendTime: Instantiating."; } @@ -225,12 +232,12 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacket( Timestamp send_time = Timestamp::Millis(static_cast(timestamp) * kTimestampToMs); - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); // TODO(holmer): SSRCs are only needed for REMB, should be broken out from // here. // Check if incoming bitrate estimate is valid, and if it needs to be reset. - absl::optional incoming_bitrate = + std::optional incoming_bitrate = incoming_bitrate_.Rate(arrival_time); if (incoming_bitrate) { incoming_bitrate_initialized_ = true; @@ -303,7 +310,7 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacket( remote_rate_.GetFeedbackInterval().ms()) { update_estimate = true; } else if (detector_.State() == BandwidthUsage::kBwOverusing) { - absl::optional incoming_rate = + std::optional incoming_rate = incoming_bitrate_.Rate(arrival_time); if (incoming_rate.has_value() && remote_rate_.TimeToReduceFurther(now, *incoming_rate)) { diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h index 9bcdfb8aff..a2b781a376 100644 --- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h +++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h @@ -17,29 +17,29 @@ #include #include #include -#include -#include "api/rtp_headers.h" -#include "api/transport/field_trial_based_config.h" +#include "absl/base/nullability.h" +#include "api/environment/environment.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/remote_bitrate_estimator/aimd_rate_control.h" +#include "modules/remote_bitrate_estimator/include/bwe_defines.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/remote_bitrate_estimator/inter_arrival.h" #include "modules/remote_bitrate_estimator/overuse_detector.h" #include "modules/remote_bitrate_estimator/overuse_estimator.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/bitrate_tracker.h" -#include "rtc_base/checks.h" -#include "system_wrappers/include/clock.h" namespace webrtc { class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator { public: - RemoteBitrateEstimatorAbsSendTime(RemoteBitrateObserver* observer, - Clock* clock); + RemoteBitrateEstimatorAbsSendTime(const Environment& env, + RemoteBitrateObserver* absl_nonnull + observer); RemoteBitrateEstimatorAbsSendTime() = delete; RemoteBitrateEstimatorAbsSendTime(const RemoteBitrateEstimatorAbsSendTime&) = @@ -98,9 +98,8 @@ class RemoteBitrateEstimatorAbsSendTime : public RemoteBitrateEstimator { void TimeoutStreams(Timestamp now); - Clock* const clock_; - const FieldTrialBasedConfig field_trials_; - RemoteBitrateObserver* const observer_; + const Environment env_; + RemoteBitrateObserver* absl_nonnull const observer_; std::unique_ptr inter_arrival_; std::unique_ptr estimator_; OveruseDetector detector_; diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc index d8ef23cc92..4f6c94db69 100644 --- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc +++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time_unittest.cc @@ -10,6 +10,10 @@ #include "modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h" +#include +#include + +#include "api/environment/environment_factory.h" #include "modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h" #include "test/gtest.h" @@ -25,12 +29,10 @@ class RemoteBitrateEstimatorAbsSendTimeTest RemoteBitrateEstimatorAbsSendTimeTest& operator=( const RemoteBitrateEstimatorAbsSendTimeTest&) = delete; - virtual void SetUp() { - bitrate_estimator_.reset(new RemoteBitrateEstimatorAbsSendTime( - bitrate_observer_.get(), &clock_)); + void SetUp() override { + bitrate_estimator_ = std::make_unique( + CreateEnvironment(&clock_), bitrate_observer_.get()); } - - protected: }; TEST_F(RemoteBitrateEstimatorAbsSendTimeTest, InitialBehavior) { diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc index 1605008299..b05610d4e9 100644 --- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc +++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc @@ -10,12 +10,20 @@ #include "modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h" +#include #include -#include - -#include "absl/types/optional.h" +#include +#include + +#include "absl/base/nullability.h" +#include "api/environment/environment.h" +#include "api/transport/bandwidth_usage.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/remote_bitrate_estimator/aimd_rate_control.h" #include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/remote_bitrate_estimator/inter_arrival.h" #include "modules/remote_bitrate_estimator/overuse_detector.h" #include "modules/remote_bitrate_estimator/overuse_estimator.h" @@ -39,13 +47,13 @@ RemoteBitrateEstimatorSingleStream::Detector::Detector() inter_arrival(90 * kTimestampGroupLengthMs, kTimestampToMs) {} RemoteBitrateEstimatorSingleStream::RemoteBitrateEstimatorSingleStream( - RemoteBitrateObserver* observer, - Clock* clock) - : clock_(clock), + const Environment& env, + RemoteBitrateObserver* absl_nonnull observer) + : env_(env), + observer_(observer), incoming_bitrate_(kBitrateWindow), last_valid_incoming_bitrate_(DataRate::Zero()), - remote_rate_(field_trials_), - observer_(observer), + remote_rate_(env_.field_trials()), process_interval_(kProcessInterval), uma_recorded_(false) { RTC_LOG(LS_INFO) << "RemoteBitrateEstimatorSingleStream: Instantiating."; @@ -56,7 +64,7 @@ RemoteBitrateEstimatorSingleStream::~RemoteBitrateEstimatorSingleStream() = void RemoteBitrateEstimatorSingleStream::IncomingPacket( const RtpPacketReceived& rtp_packet) { - absl::optional transmission_time_offset = + std::optional transmission_time_offset = rtp_packet.GetExtension(); if (!uma_recorded_) { BweNames type = transmission_time_offset.has_value() @@ -68,12 +76,12 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket( uint32_t ssrc = rtp_packet.Ssrc(); uint32_t rtp_timestamp = rtp_packet.Timestamp() + transmission_time_offset.value_or(0); - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); Detector& estimator = overuse_detectors_[ssrc]; estimator.last_packet_time = now; // Check if incoming bitrate estimate is valid, and if it needs to be reset. - absl::optional incoming_bitrate = incoming_bitrate_.Rate(now); + std::optional incoming_bitrate = incoming_bitrate_.Rate(now); if (incoming_bitrate) { last_valid_incoming_bitrate_ = *incoming_bitrate; } else if (last_valid_incoming_bitrate_ > DataRate::Zero()) { @@ -101,7 +109,7 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket( estimator.estimator.num_of_deltas(), now_ms); } if (estimator.detector.State() == BandwidthUsage::kBwOverusing) { - absl::optional incoming_bitrate = incoming_bitrate_.Rate(now); + incoming_bitrate = incoming_bitrate_.Rate(now); if (incoming_bitrate.has_value() && (prior_state != BandwidthUsage::kBwOverusing || remote_rate_.TimeToReduceFurther(now, *incoming_bitrate))) { @@ -114,7 +122,7 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket( } TimeDelta RemoteBitrateEstimatorSingleStream::Process() { - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); Timestamp next_process_time = last_process_time_.has_value() ? *last_process_time_ + process_interval_ : now; @@ -162,7 +170,7 @@ void RemoteBitrateEstimatorSingleStream::UpdateEstimate(Timestamp now) { } void RemoteBitrateEstimatorSingleStream::OnRttUpdate(int64_t avg_rtt_ms, - int64_t max_rtt_ms) { + int64_t /* max_rtt_ms */) { remote_rate_.SetRtt(TimeDelta::Millis(avg_rtt_ms)); } diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h index 44976caf5e..9f87823ae6 100644 --- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h +++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h @@ -15,10 +15,11 @@ #include #include +#include #include -#include "absl/types/optional.h" -#include "api/transport/field_trial_based_config.h" +#include "absl/base/nullability.h" +#include "api/environment/environment.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -27,17 +28,16 @@ #include "modules/remote_bitrate_estimator/inter_arrival.h" #include "modules/remote_bitrate_estimator/overuse_detector.h" #include "modules/remote_bitrate_estimator/overuse_estimator.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/bitrate_tracker.h" namespace webrtc { -class Clock; -struct RTPHeader; - class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator { public: - RemoteBitrateEstimatorSingleStream(RemoteBitrateObserver* observer, - Clock* clock); + RemoteBitrateEstimatorSingleStream(const Environment& env, + RemoteBitrateObserver* absl_nonnull + observer); RemoteBitrateEstimatorSingleStream() = delete; RemoteBitrateEstimatorSingleStream( @@ -68,14 +68,13 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator { std::vector GetSsrcs() const; - Clock* const clock_; - const FieldTrialBasedConfig field_trials_; + const Environment env_; + RemoteBitrateObserver* absl_nonnull const observer_; std::map overuse_detectors_; BitrateTracker incoming_bitrate_; DataRate last_valid_incoming_bitrate_; AimdRateControl remote_rate_; - RemoteBitrateObserver* const observer_; - absl::optional last_process_time_; + std::optional last_process_time_; TimeDelta process_interval_; bool uma_recorded_; }; diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc index 64ef39d935..15696dc34c 100644 --- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc +++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc @@ -10,6 +10,9 @@ #include "modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h" +#include + +#include "api/environment/environment_factory.h" #include "modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h" #include "test/gtest.h" @@ -24,12 +27,10 @@ class RemoteBitrateEstimatorSingleTest : public RemoteBitrateEstimatorTest { RemoteBitrateEstimatorSingleTest& operator=( const RemoteBitrateEstimatorSingleTest&) = delete; - virtual void SetUp() { - bitrate_estimator_.reset(new RemoteBitrateEstimatorSingleStream( - bitrate_observer_.get(), &clock_)); + void SetUp() override { + bitrate_estimator_ = std::make_unique( + CreateEnvironment(&clock_), bitrate_observer_.get()); } - - protected: }; TEST_F(RemoteBitrateEstimatorSingleTest, InitialBehavior) { @@ -53,7 +54,7 @@ TEST_F(RemoteBitrateEstimatorSingleTest, CapacityDropOneStreamWrap) { } TEST_F(RemoteBitrateEstimatorSingleTest, CapacityDropTwoStreamsWrap) { - CapacityDropTestHelper(2, true, 767, 0); + CapacityDropTestHelper(2, true, 567, 0); } TEST_F(RemoteBitrateEstimatorSingleTest, CapacityDropThreeStreamsWrap) { @@ -61,11 +62,11 @@ TEST_F(RemoteBitrateEstimatorSingleTest, CapacityDropThreeStreamsWrap) { } TEST_F(RemoteBitrateEstimatorSingleTest, CapacityDropThirteenStreamsWrap) { - CapacityDropTestHelper(13, true, 567, 0); + CapacityDropTestHelper(13, true, 767, 0); } TEST_F(RemoteBitrateEstimatorSingleTest, CapacityDropNineteenStreamsWrap) { - CapacityDropTestHelper(19, true, 700, 0); + CapacityDropTestHelper(19, true, 767, 0); } TEST_F(RemoteBitrateEstimatorSingleTest, CapacityDropThirtyStreamsWrap) { diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc b/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc index ee9644530a..5adbadd296 100644 --- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc +++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc @@ -10,13 +10,19 @@ #include "modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h" #include +#include +#include #include #include +#include +#include "api/units/data_rate.h" +#include "api/units/timestamp.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" +#include "test/gtest.h" namespace webrtc { @@ -29,7 +35,7 @@ const int kNumInitialPackets = 2; namespace testing { void TestBitrateObserver::OnReceiveBitrateChanged( - const std::vector& ssrcs, + const std::vector& /* ssrcs */, uint32_t bitrate) { latest_bitrate_ = bitrate; updated_ = true; @@ -251,7 +257,7 @@ void RemoteBitrateEstimatorTest::IncomingPacket(uint32_t ssrc, // Returns true if an over-use was seen, false otherwise. // The StreamGenerator::updated() should be used to check for any changes in // target bitrate after the call to this function. -bool RemoteBitrateEstimatorTest::GenerateAndProcessFrame(uint32_t ssrc, +bool RemoteBitrateEstimatorTest::GenerateAndProcessFrame(uint32_t /* ssrc */, uint32_t bitrate_bps) { RTC_DCHECK_GT(bitrate_bps, 0); stream_generator_->SetBitrateBps(bitrate_bps); @@ -477,7 +483,8 @@ void RemoteBitrateEstimatorTest::CapacityDropTestHelper( uint32_t bitrate_bps = SteadyStateRun( kDefaultSsrc, steady_state_time * kFramerate, kStartBitrate, kMinExpectedBitrate, kMaxExpectedBitrate, kInitialCapacityBps); - EXPECT_NEAR(kInitialCapacityBps, bitrate_bps, 130000u); + EXPECT_GE(bitrate_bps, 0.85 * kInitialCapacityBps); + EXPECT_LE(bitrate_bps, 1.05 * kInitialCapacityBps); bitrate_observer_->Reset(); // Add an offset to make sure the BWE can handle it. diff --git a/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h b/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h index a3b1cfdb34..9c6d380fde 100644 --- a/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h +++ b/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h @@ -11,6 +11,8 @@ #ifndef MODULES_REMOTE_BITRATE_ESTIMATOR_REMOTE_BITRATE_ESTIMATOR_UNITTEST_HELPER_H_ #define MODULES_REMOTE_BITRATE_ESTIMATOR_REMOTE_BITRATE_ESTIMATOR_UNITTEST_HELPER_H_ +#include +#include #include #include #include diff --git a/modules/remote_bitrate_estimator/rtp_transport_feedback_generator.h b/modules/remote_bitrate_estimator/rtp_transport_feedback_generator.h new file mode 100644 index 0000000000..8dc956fec8 --- /dev/null +++ b/modules/remote_bitrate_estimator/rtp_transport_feedback_generator.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_REMOTE_BITRATE_ESTIMATOR_RTP_TRANSPORT_FEEDBACK_GENERATOR_H_ +#define MODULES_REMOTE_BITRATE_ESTIMATOR_RTP_TRANSPORT_FEEDBACK_GENERATOR_H_ + +#include +#include +#include + +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" + +namespace webrtc { + +class RtpTransportFeedbackGenerator { + public: + // Function intented to be used for sending RTCP messages generated by an + // implementation of this class. + using RtcpSender = std::function> packets)>; + virtual ~RtpTransportFeedbackGenerator() = default; + + virtual void OnReceivedPacket(const RtpPacketReceived& packet) = 0; + + // Sends periodic feedback if it is time to send it. + // Returns time until next call to Process should be made. + virtual TimeDelta Process(Timestamp now) = 0; + + virtual void OnSendBandwidthEstimateChanged(DataRate estimate) = 0; +}; + +} // namespace webrtc + +#endif // MODULES_REMOTE_BITRATE_ESTIMATOR_RTP_TRANSPORT_FEEDBACK_GENERATOR_H_ diff --git a/modules/remote_bitrate_estimator/test/bwe_test_logging.cc b/modules/remote_bitrate_estimator/test/bwe_test_logging.cc deleted file mode 100644 index c8f6faa127..0000000000 --- a/modules/remote_bitrate_estimator/test/bwe_test_logging.cc +++ /dev/null @@ -1,262 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" - -#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE - -#include -#include -#include - -#include - -#include "rtc_base/checks.h" -#include "rtc_base/platform_thread.h" -#include "rtc_base/strings/string_builder.h" - -namespace webrtc { -namespace testing { -namespace bwe { - -static std::string ToString(uint32_t v) { - rtc::StringBuilder ss; - ss << v; - return ss.Release(); -} - -Logging::ThreadState::ThreadState() = default; -Logging::ThreadState::~ThreadState() = default; - -Logging::Context::Context(uint32_t name, int64_t timestamp_ms, bool enabled) { - Logging::GetInstance()->PushState(ToString(name), timestamp_ms, enabled); -} - -Logging::Context::Context(const std::string& name, - int64_t timestamp_ms, - bool enabled) { - Logging::GetInstance()->PushState(name, timestamp_ms, enabled); -} - -Logging::Context::Context(const char* name, - int64_t timestamp_ms, - bool enabled) { - Logging::GetInstance()->PushState(name, timestamp_ms, enabled); -} - -Logging::Context::~Context() { - Logging::GetInstance()->PopState(); -} - -Logging* Logging::GetInstance() { - static Logging* logging = new Logging(); - return logging; -} - -void Logging::SetGlobalContext(uint32_t name) { - MutexLock lock(&mutex_); - thread_map_[rtc::CurrentThreadId()].global_state.tag = ToString(name); -} - -void Logging::SetGlobalContext(const std::string& name) { - MutexLock lock(&mutex_); - thread_map_[rtc::CurrentThreadId()].global_state.tag = name; -} - -void Logging::SetGlobalContext(const char* name) { - MutexLock lock(&mutex_); - thread_map_[rtc::CurrentThreadId()].global_state.tag = name; -} - -void Logging::SetGlobalEnable(bool enabled) { - MutexLock lock(&mutex_); - thread_map_[rtc::CurrentThreadId()].global_state.enabled = enabled; -} - -void Logging::Log(const char format[], ...) { - MutexLock lock(&mutex_); - ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); - RTC_DCHECK(it != thread_map_.end()); - const State& state = it->second.stack.top(); - if (state.enabled) { - printf("%s\t", state.tag.c_str()); - va_list args; - va_start(args, format); - vprintf(format, args); - va_end(args); - printf("\n"); - } -} - -void Logging::Plot(int figure, const std::string& name, double value) { - Plot(figure, name, value, 0, "-"); -} - -void Logging::Plot(int figure, - const std::string& name, - double value, - uint32_t ssrc) { - Plot(figure, name, value, ssrc, "-"); -} - -void Logging::Plot(int figure, - const std::string& name, - double value, - const std::string& alg_name) { - Plot(figure, name, value, 0, alg_name); -} - -void Logging::Plot(int figure, - const std::string& name, - double value, - uint32_t ssrc, - const std::string& alg_name) { - MutexLock lock(&mutex_); - ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); - RTC_DCHECK(it != thread_map_.end()); - const State& state = it->second.stack.top(); - if (state.enabled) { - printf("PLOT\t%d\t%s:%" PRIu32 "@%s\t%f\t%f\n", figure, name.c_str(), ssrc, - alg_name.c_str(), state.timestamp_ms * 0.001, value); - } -} - -void Logging::PlotBar(int figure, - const std::string& name, - double value, - int flow_id) { - MutexLock lock(&mutex_); - ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); - RTC_DCHECK(it != thread_map_.end()); - const State& state = it->second.stack.top(); - if (state.enabled) { - printf("BAR\t%d\t%s_%d\t%f\n", figure, name.c_str(), flow_id, value); - } -} - -void Logging::PlotBaselineBar(int figure, - const std::string& name, - double value, - int flow_id) { - MutexLock lock(&mutex_); - ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); - RTC_DCHECK(it != thread_map_.end()); - const State& state = it->second.stack.top(); - if (state.enabled) { - printf("BASELINE\t%d\t%s_%d\t%f\n", figure, name.c_str(), flow_id, value); - } -} - -void Logging::PlotErrorBar(int figure, - const std::string& name, - double value, - double ylow, - double yhigh, - const std::string& error_title, - int flow_id) { - MutexLock lock(&mutex_); - ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); - RTC_DCHECK(it != thread_map_.end()); - const State& state = it->second.stack.top(); - if (state.enabled) { - printf("ERRORBAR\t%d\t%s_%d\t%f\t%f\t%f\t%s\n", figure, name.c_str(), - flow_id, value, ylow, yhigh, error_title.c_str()); - } -} - -void Logging::PlotLimitErrorBar(int figure, - const std::string& name, - double value, - double ylow, - double yhigh, - const std::string& error_title, - double ymax, - const std::string& limit_title, - int flow_id) { - MutexLock lock(&mutex_); - ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); - RTC_DCHECK(it != thread_map_.end()); - const State& state = it->second.stack.top(); - if (state.enabled) { - printf("LIMITERRORBAR\t%d\t%s_%d\t%f\t%f\t%f\t%s\t%f\t%s\n", figure, - name.c_str(), flow_id, value, ylow, yhigh, error_title.c_str(), ymax, - limit_title.c_str()); - } -} - -void Logging::PlotLabel(int figure, - const std::string& title, - const std::string& y_label, - int num_flows) { - MutexLock lock(&mutex_); - ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); - RTC_DCHECK(it != thread_map_.end()); - const State& state = it->second.stack.top(); - if (state.enabled) { - printf("LABEL\t%d\t%s\t%s\t%d\n", figure, title.c_str(), y_label.c_str(), - num_flows); - } -} - -Logging::Logging() : thread_map_() {} - -Logging::~Logging() = default; - -Logging::State::State() : tag(""), timestamp_ms(0), enabled(true) {} - -Logging::State::State(const std::string& tag, - int64_t timestamp_ms, - bool enabled) - : tag(tag), timestamp_ms(timestamp_ms), enabled(enabled) {} - -void Logging::State::MergePrevious(const State& previous) { - if (tag.empty()) { - tag = previous.tag; - } else if (!previous.tag.empty()) { - tag = previous.tag + "_" + tag; - } - timestamp_ms = std::max(previous.timestamp_ms, timestamp_ms); - enabled = previous.enabled && enabled; -} - -void Logging::PushState(const std::string& append_to_tag, - int64_t timestamp_ms, - bool enabled) { - MutexLock lock(&mutex_); - State new_state(append_to_tag, timestamp_ms, enabled); - ThreadState* thread_state = &thread_map_[rtc::CurrentThreadId()]; - std::stack* stack = &thread_state->stack; - if (stack->empty()) { - new_state.MergePrevious(thread_state->global_state); - } else { - new_state.MergePrevious(stack->top()); - } - stack->push(new_state); -} - -void Logging::PopState() { - MutexLock lock(&mutex_); - ThreadMap::iterator it = thread_map_.find(rtc::CurrentThreadId()); - RTC_DCHECK(it != thread_map_.end()); - std::stack* stack = &it->second.stack; - int64_t newest_timestamp_ms = stack->top().timestamp_ms; - stack->pop(); - if (!stack->empty()) { - State* state = &stack->top(); - // Update time so that next log/plot will use the latest time seen so far - // in this call tree. - state->timestamp_ms = std::max(state->timestamp_ms, newest_timestamp_ms); - } -} -} // namespace bwe -} // namespace testing -} // namespace webrtc - -#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE diff --git a/modules/remote_bitrate_estimator/test/bwe_test_logging.h b/modules/remote_bitrate_estimator/test/bwe_test_logging.h deleted file mode 100644 index 49e1e716b2..0000000000 --- a/modules/remote_bitrate_estimator/test/bwe_test_logging.h +++ /dev/null @@ -1,360 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_LOGGING_H_ -#define MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_LOGGING_H_ - -// To enable BWE logging, run this command from trunk/ : -// build/gyp_chromium --depth=. webrtc/modules/modules.gyp -// -Denable_bwe_test_logging=1 -#ifndef BWE_TEST_LOGGING_COMPILE_TIME_ENABLE -#define BWE_TEST_LOGGING_COMPILE_TIME_ENABLE 0 -#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE - -// BWE logging allows you to insert dynamically named log/plot points in the -// call tree. E.g. the function: -// void f1() { -// BWE_TEST_LOGGING_TIME(clock_->TimeInMilliseconds()); -// BWE_TEST_LOGGING_CONTEXT("stream"); -// for (uint32_t i=0; i<4; ++i) { -// BWE_TEST_LOGGING_ENABLE(i & 1); -// BWE_TEST_LOGGING_CONTEXT(i); -// BWE_TEST_LOGGING_LOG1("weight", "%f tonnes", weights_[i]); -// for (float j=0.0f; j<1.0; j+=0.4f) { -// BWE_TEST_LOGGING_PLOT(0, "bps", -1, j); -// } -// } -// } -// -// Might produce the output: -// stream_00000001_weight 13.000000 tonnes -// PLOT stream_00000001_bps 1.000000 0.000000 -// PLOT stream_00000001_bps 1.000000 0.400000 -// PLOT stream_00000001_bps 1.000000 0.800000 -// stream_00000003_weight 39.000000 tonnes -// PLOT stream_00000003_bps 1.000000 0.000000 -// PLOT stream_00000003_bps 1.000000 0.400000 -// PLOT stream_00000003_bps 1.000000 0.800000 -// -// Log *contexts* are names concatenated with '_' between them, with the name -// of the logged/plotted string/value last. Plot *time* is inherited down the -// tree. A branch is enabled by default but can be *disabled* to reduce output. -// The difference between the RTC_LOG and PLOT macros is that PLOT prefixes the -// line so it can be easily filtered, plus it outputs the current time. - -#if !(BWE_TEST_LOGGING_COMPILE_TIME_ENABLE) - -// Set a thread-global base logging context. This name will be prepended to all -// hierarchical contexts. -// `name` is a char*, std::string or uint32_t to name the context. -#define BWE_TEST_LOGGING_GLOBAL_CONTEXT(name) - -// Thread-globally allow/disallow logging. -// `enable` is expected to be a bool. -#define BWE_TEST_LOGGING_GLOBAL_ENABLE(enabled) - -// Insert a (hierarchical) logging context. -// `name` is a char*, std::string or uint32_t to name the context. -#define BWE_TEST_LOGGING_CONTEXT(name) - -// Allow/disallow logging down the call tree from this point. Logging must be -// enabled all the way to the root of the call tree to take place. -// `enable` is expected to be a bool. -#define BWE_TEST_LOGGING_ENABLE(enabled) - -// Set current time (only affects PLOT output). Down the call tree, the latest -// time set always takes precedence. -// `time` is an int64_t time in ms, or -1 to inherit time from previous context. -#define BWE_TEST_LOGGING_TIME(time) - -// Print to stdout, e.g.: -// Context1_Context2_Name printf-formated-string -// `name` is a char*, std::string or uint32_t to name the log line. -// `format` is a printf format string. -// |_1...| are arguments for printf. -#define BWE_TEST_LOGGING_LOG1(name, format, _1) -#define BWE_TEST_LOGGING_LOG2(name, format, _1, _2) -#define BWE_TEST_LOGGING_LOG3(name, format, _1, _2, _3) -#define BWE_TEST_LOGGING_LOG4(name, format, _1, _2, _3, _4) -#define BWE_TEST_LOGGING_LOG5(name, format, _1, _2, _3, _4, _5) - -// Print to stdout in tab-separated format suitable for plotting, e.g.: -// PLOT figure Context1_Context2_Name time value -// `figure` is a figure id. Different figures are plotted in different windows. -// `name` is a char*, std::string or uint32_t to name the plotted value. -// `time` is an int64_t time in ms, or -1 to inherit time from previous context. -// `value` is a double precision float to be plotted. -// `ssrc` identifies the source of a stream -// `alg_name` is an optional argument, a string -#define BWE_TEST_LOGGING_PLOT(figure, name, time, value) -#define BWE_TEST_LOGGING_PLOT_WITH_NAME(figure, name, time, value, alg_name) -#define BWE_TEST_LOGGING_PLOT_WITH_SSRC(figure, name, time, value, ssrc) -#define BWE_TEST_LOGGING_PLOT_WITH_NAME_AND_SSRC(figure, name, time, value, \ - ssrc, alg_name) - -// Print to stdout in tab-separated format suitable for plotting, e.g.: -// BAR figure Context1_Context2_Name x_left width value -// `figure` is a figure id. Different figures are plotted in different windows. -// `name` is a char*, std::string or uint32_t to name the plotted value. -// `value` is a double precision float to be plotted. -// `ylow` and `yhigh` are double precision float for the error line. -// `title` is a string and refers to the error label. -// `ymax` is a double precision float for the limit horizontal line. -// `limit_title` is a string and refers to the limit label. -#define BWE_TEST_LOGGING_BAR(figure, name, value, flow_id) -#define BWE_TEST_LOGGING_ERRORBAR(figure, name, value, ylow, yhigh, \ - error_title, flow_id) -#define BWE_TEST_LOGGING_LIMITERRORBAR( \ - figure, name, value, ylow, yhigh, error_title, ymax, limit_title, flow_id) - -#define BWE_TEST_LOGGING_BASELINEBAR(figure, name, value, flow_id) - -// `num_flows` is an integer refering to the number of RMCAT flows in the -// scenario. -// Define `x_label` and `y_label` for plots. -#define BWE_TEST_LOGGING_LABEL(figure, x_label, y_label, num_flows) - -#else // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE - -#include -#include -#include -#include - -#include "rtc_base/synchronization/mutex.h" - -#define BWE_TEST_LOGGING_GLOBAL_CONTEXT(name) \ - do { \ - webrtc::testing::bwe::Logging::GetInstance()->SetGlobalContext(name); \ - } while (0) - -#define BWE_TEST_LOGGING_GLOBAL_ENABLE(enabled) \ - do { \ - webrtc::testing::bwe::Logging::GetInstance()->SetGlobalEnable(enabled); \ - } while (0) - -#define __BWE_TEST_LOGGING_CONTEXT_NAME(ctx, line) ctx##line -#define __BWE_TEST_LOGGING_CONTEXT_DECLARE(ctx, line, name, time, enabled) \ - webrtc::testing::bwe::Logging::Context __BWE_TEST_LOGGING_CONTEXT_NAME( \ - ctx, line)(name, time, enabled) - -#define BWE_TEST_LOGGING_CONTEXT(name) \ - __BWE_TEST_LOGGING_CONTEXT_DECLARE(__bwe_log_, __LINE__, name, -1, true) -#define BWE_TEST_LOGGING_ENABLE(enabled) \ - __BWE_TEST_LOGGING_CONTEXT_DECLARE(__bwe_log_, __LINE__, "", -1, \ - static_cast(enabled)) -#define BWE_TEST_LOGGING_TIME(time) \ - __BWE_TEST_LOGGING_CONTEXT_DECLARE(__bwe_log_, __LINE__, "", \ - static_cast(time), true) - -#define BWE_TEST_LOGGING_LOG1(name, format, _1) \ - do { \ - BWE_TEST_LOGGING_CONTEXT(name); \ - webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1); \ - } while (0) -#define BWE_TEST_LOGGING_LOG2(name, format, _1, _2) \ - do { \ - BWE_TEST_LOGGING_CONTEXT(name); \ - webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1, _2); \ - } while (0) -#define BWE_TEST_LOGGING_LOG3(name, format, _1, _2, _3) \ - do { \ - BWE_TEST_LOGGING_CONTEXT(name); \ - webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1, _2, _3); \ - } while (0) -#define BWE_TEST_LOGGING_LOG4(name, format, _1, _2, _3, _4) \ - do { \ - BWE_TEST_LOGGING_CONTEXT(name); \ - webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1, _2, _3, _4); \ - } while (0) -#define BWE_TEST_LOGGING_LOG5(name, format, _1, _2, _3, _4, _5) \ - do { \ - BWE_TEST_LOGGING_CONTEXT(name); \ - webrtc::testing::bwe::Logging::GetInstance()->Log(format, _1, _2, _3, _4, \ - _5); \ - } while (0) - -#define BWE_TEST_LOGGING_PLOT(figure, name, time, value) \ - do { \ - __BWE_TEST_LOGGING_CONTEXT_DECLARE(__bwe_log_, __PLOT__, name, \ - static_cast(time), true); \ - webrtc::testing::bwe::Logging::GetInstance()->Plot(figure, name, value); \ - } while (0) - -#define BWE_TEST_LOGGING_PLOT_WITH_NAME(figure, name, time, value, alg_name) \ - do { \ - __BWE_TEST_LOGGING_CONTEXT_DECLARE(__bwe_log_, __PLOT__, name, \ - static_cast(time), true); \ - webrtc::testing::bwe::Logging::GetInstance()->Plot(figure, name, value, \ - alg_name); \ - } while (0) - -#define BWE_TEST_LOGGING_PLOT_WITH_SSRC(figure, name, time, value, ssrc) \ - do { \ - __BWE_TEST_LOGGING_CONTEXT_DECLARE(__bwe_log_, __PLOT__, name, \ - static_cast(time), true); \ - webrtc::testing::bwe::Logging::GetInstance()->Plot(figure, name, value, \ - ssrc); \ - } while (0) - -#define BWE_TEST_LOGGING_PLOT_WITH_NAME_AND_SSRC(figure, name, time, value, \ - ssrc, alg_name) \ - do { \ - __BWE_TEST_LOGGING_CONTEXT_DECLARE(__bwe_log_, __PLOT__, name, \ - static_cast(time), true); \ - webrtc::testing::bwe::Logging::GetInstance()->Plot(figure, name, value, \ - ssrc, alg_name); \ - } while (0) - -#define BWE_TEST_LOGGING_BAR(figure, name, value, flow_id) \ - do { \ - BWE_TEST_LOGGING_CONTEXT(name); \ - webrtc::testing::bwe::Logging::GetInstance()->PlotBar(figure, name, value, \ - flow_id); \ - } while (0) - -#define BWE_TEST_LOGGING_BASELINEBAR(figure, name, value, flow_id) \ - do { \ - BWE_TEST_LOGGING_CONTEXT(name); \ - webrtc::testing::bwe::Logging::GetInstance()->PlotBaselineBar( \ - figure, name, value, flow_id); \ - } while (0) - -#define BWE_TEST_LOGGING_ERRORBAR(figure, name, value, ylow, yhigh, title, \ - flow_id) \ - do { \ - BWE_TEST_LOGGING_CONTEXT(name); \ - webrtc::testing::bwe::Logging::GetInstance()->PlotErrorBar( \ - figure, name, value, ylow, yhigh, title, flow_id); \ - } while (0) - -#define BWE_TEST_LOGGING_LIMITERRORBAR( \ - figure, name, value, ylow, yhigh, error_title, ymax, limit_title, flow_id) \ - do { \ - BWE_TEST_LOGGING_CONTEXT(name); \ - webrtc::testing::bwe::Logging::GetInstance()->PlotLimitErrorBar( \ - figure, name, value, ylow, yhigh, error_title, ymax, limit_title, \ - flow_id); \ - } while (0) - -#define BWE_TEST_LOGGING_LABEL(figure, title, y_label, num_flows) \ - do { \ - BWE_TEST_LOGGING_CONTEXT(title); \ - webrtc::testing::bwe::Logging::GetInstance()->PlotLabel( \ - figure, title, y_label, num_flows); \ - } while (0) - -namespace webrtc { -namespace testing { -namespace bwe { - -class Logging { - public: - class Context { - public: - Context(uint32_t name, int64_t timestamp_ms, bool enabled); - Context(const std::string& name, int64_t timestamp_ms, bool enabled); - Context(const char* name, int64_t timestamp_ms, bool enabled); - - Context() = delete; - Context(const Context&) = delete; - Context& operator=(const Context&) = delete; - ~Context(); - }; - - static Logging* GetInstance(); - - void SetGlobalContext(uint32_t name); - void SetGlobalContext(const std::string& name); - void SetGlobalContext(const char* name); - void SetGlobalEnable(bool enabled); - -#if defined(__GNUC__) - // Note: Implicit `this` argument counts as the first argument. - __attribute__((__format__(__printf__, 2, 3))) -#endif - void - Log(const char format[], ...); - void Plot(int figure, const std::string& name, double value); - void Plot(int figure, - const std::string& name, - double value, - const std::string& alg_name); - void Plot(int figure, const std::string& name, double value, uint32_t ssrc); - void Plot(int figure, - const std::string& name, - double value, - uint32_t ssrc, - const std::string& alg_name); - void PlotBar(int figure, const std::string& name, double value, int flow_id); - void PlotBaselineBar(int figure, - const std::string& name, - double value, - int flow_id); - void PlotErrorBar(int figure, - const std::string& name, - double value, - double ylow, - double yhigh, - const std::string& error_title, - int flow_id); - - void PlotLimitErrorBar(int figure, - const std::string& name, - double value, - double ylow, - double yhigh, - const std::string& error_title, - double ymax, - const std::string& limit_title, - int flow_id); - void PlotLabel(int figure, - const std::string& title, - const std::string& y_label, - int num_flows); - - private: - struct State { - State(); - State(const std::string& new_tag, int64_t timestamp_ms, bool enabled); - void MergePrevious(const State& previous); - - std::string tag; - int64_t timestamp_ms; - bool enabled; - }; - struct ThreadState { - ThreadState(); - ~ThreadState(); - State global_state; - std::stack stack; - }; - typedef std::map ThreadMap; - - Logging(); - ~Logging(); - - Logging(const Logging&) = delete; - Logging& operator=(const Logging&) = delete; - - void PushState(const std::string& append_to_tag, - int64_t timestamp_ms, - bool enabled); - void PopState(); - - Mutex mutex_; - ThreadMap thread_map_; -}; -} // namespace bwe -} // namespace testing -} // namespace webrtc - -#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE -#endif // MODULES_REMOTE_BITRATE_ESTIMATOR_TEST_BWE_TEST_LOGGING_H_ diff --git a/modules/remote_bitrate_estimator/tools/bwe_rtp.cc b/modules/remote_bitrate_estimator/tools/bwe_rtp.cc index 403f81fd03..539cadbd66 100644 --- a/modules/remote_bitrate_estimator/tools/bwe_rtp.cc +++ b/modules/remote_bitrate_estimator/tools/bwe_rtp.cc @@ -12,6 +12,9 @@ #include +#include +#include +#include #include #include #include @@ -19,6 +22,7 @@ #include "absl/flags/flag.h" #include "absl/flags/parse.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "test/rtp_file_reader.h" ABSL_FLAG(std::string, diff --git a/modules/remote_bitrate_estimator/tools/rtp_to_text.cc b/modules/remote_bitrate_estimator/tools/rtp_to_text.cc index e8dc59f740..d2cc762b9b 100644 --- a/modules/remote_bitrate_estimator/tools/rtp_to_text.cc +++ b/modules/remote_bitrate_estimator/tools/rtp_to_text.cc @@ -10,6 +10,8 @@ #include +#include +#include #include #include "modules/remote_bitrate_estimator/tools/bwe_rtp.h" @@ -47,7 +49,7 @@ int main(int argc, char* argv[]) { toffset != 0) ++non_zero_ts_offsets; if (arrival_time_only) { - rtc::StringBuilder ss; + webrtc::StringBuilder ss; ss << static_cast(packet.time_ms) * 1000000; fprintf(stdout, "%s\n", ss.str().c_str()); } else { diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy.cc b/modules/remote_bitrate_estimator/transport_sequence_number_feedback_generator.cc similarity index 71% rename from modules/remote_bitrate_estimator/remote_estimator_proxy.cc rename to modules/remote_bitrate_estimator/transport_sequence_number_feedback_generator.cc index e100995b8e..bab811282d 100644 --- a/modules/remote_bitrate_estimator/remote_estimator_proxy.cc +++ b/modules/remote_bitrate_estimator/transport_sequence_number_feedback_generator.cc @@ -8,24 +8,28 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/remote_bitrate_estimator/remote_estimator_proxy.h" +#include "modules/remote_bitrate_estimator/transport_sequence_number_feedback_generator.h" #include #include -#include #include +#include #include +#include -#include "absl/types/optional.h" +#include "api/rtp_headers.h" +#include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" -#include "modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h" +#include "api/units/timestamp.h" +#include "modules/remote_bitrate_estimator/packet_arrival_map.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_minmax.h" -#include "system_wrappers/include/clock.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { namespace { @@ -35,45 +39,27 @@ constexpr TimeDelta kMinInterval = TimeDelta::Millis(50); constexpr TimeDelta kMaxInterval = TimeDelta::Millis(250); constexpr TimeDelta kDefaultInterval = TimeDelta::Millis(100); -TimeDelta GetAbsoluteSendTimeDelta(uint32_t new_sendtime, - uint32_t previous_sendtime) { - static constexpr uint32_t kWrapAroundPeriod = 0x0100'0000; - RTC_DCHECK_LT(new_sendtime, kWrapAroundPeriod); - RTC_DCHECK_LT(previous_sendtime, kWrapAroundPeriod); - uint32_t delta = (new_sendtime - previous_sendtime) % kWrapAroundPeriod; - if (delta >= kWrapAroundPeriod / 2) { - // absolute send time wraps around, thus treat deltas larger than half of - // the wrap around period as negative. - delta = (previous_sendtime - new_sendtime) % kWrapAroundPeriod; - return TimeDelta::Micros(int64_t{delta} * -1'000'000 / (1 << 18)); - } - return TimeDelta::Micros(int64_t{delta} * 1'000'000 / (1 << 18)); -} } // namespace -RemoteEstimatorProxy::RemoteEstimatorProxy( - TransportFeedbackSender feedback_sender, - NetworkStateEstimator* network_state_estimator) +TransportSequenceNumberFeedbackGenenerator:: + TransportSequenceNumberFeedbackGenenerator(RtcpSender feedback_sender) : feedback_sender_(std::move(feedback_sender)), last_process_time_(Timestamp::MinusInfinity()), - network_state_estimator_(network_state_estimator), media_ssrc_(0), feedback_packet_count_(0), - packet_overhead_(DataSize::Zero()), send_interval_(kDefaultInterval), - send_periodic_feedback_(true), - previous_abs_send_time_(0), - abs_send_timestamp_(Timestamp::Zero()), - last_arrival_time_with_abs_send_time_(Timestamp::MinusInfinity()) { + send_periodic_feedback_(true) { RTC_LOG(LS_INFO) << "Maximum interval between transport feedback RTCP messages: " << kMaxInterval; } -RemoteEstimatorProxy::~RemoteEstimatorProxy() {} +TransportSequenceNumberFeedbackGenenerator:: + ~TransportSequenceNumberFeedbackGenenerator() {} -void RemoteEstimatorProxy::MaybeCullOldPackets(int64_t sequence_number, - Timestamp arrival_time) { +void TransportSequenceNumberFeedbackGenenerator::MaybeCullOldPackets( + int64_t sequence_number, + Timestamp arrival_time) { if (periodic_window_start_seq_ >= packet_arrival_times_.end_sequence_number() && arrival_time - Timestamp::Zero() >= kBackWindow) { @@ -83,14 +69,15 @@ void RemoteEstimatorProxy::MaybeCullOldPackets(int64_t sequence_number, } } -void RemoteEstimatorProxy::IncomingPacket(const RtpPacketReceived& packet) { +void TransportSequenceNumberFeedbackGenenerator::OnReceivedPacket( + const RtpPacketReceived& packet) { if (packet.arrival_time().IsInfinite()) { RTC_LOG(LS_WARNING) << "Arrival time not set."; return; } uint16_t seqnum = 0; - absl::optional feedback_request; + std::optional feedback_request; if (!packet.GetExtension(&seqnum) && !packet.GetExtension(&seqnum, &feedback_request)) { @@ -134,30 +121,9 @@ void RemoteEstimatorProxy::IncomingPacket(const RtpPacketReceived& packet) { // Send feedback packet immediately. SendFeedbackOnRequest(seq, *feedback_request); } - - absl::optional absolute_send_time_24bits = - packet.GetExtension(); - if (network_state_estimator_ && absolute_send_time_24bits.has_value()) { - PacketResult packet_result; - packet_result.receive_time = packet.arrival_time(); - if (packet.arrival_time() - last_arrival_time_with_abs_send_time_ < - TimeDelta::Seconds(10)) { - abs_send_timestamp_ += GetAbsoluteSendTimeDelta( - *absolute_send_time_24bits, previous_abs_send_time_); - } else { - abs_send_timestamp_ = packet.arrival_time(); - } - last_arrival_time_with_abs_send_time_ = packet.arrival_time(); - previous_abs_send_time_ = *absolute_send_time_24bits; - packet_result.sent_packet.send_time = abs_send_timestamp_; - packet_result.sent_packet.size = - DataSize::Bytes(packet.size()) + packet_overhead_; - packet_result.sent_packet.sequence_number = seq; - network_state_estimator_->OnReceivedPacket(packet_result); - } } -TimeDelta RemoteEstimatorProxy::Process(Timestamp now) { +TimeDelta TransportSequenceNumberFeedbackGenenerator::Process(Timestamp now) { MutexLock lock(&lock_); if (!send_periodic_feedback_) { // If TransportSequenceNumberV2 has been received in one packet, @@ -174,7 +140,8 @@ TimeDelta RemoteEstimatorProxy::Process(Timestamp now) { return next_process_time - now; } -void RemoteEstimatorProxy::OnBitrateChanged(int bitrate_bps) { +void TransportSequenceNumberFeedbackGenenerator::OnSendBandwidthEstimateChanged( + DataRate estimate) { // TwccReportSize = Ipv4(20B) + UDP(8B) + SRTP(10B) + // AverageTwccReport(30B) // TwccReport size at 50ms interval is 24 byte. @@ -184,7 +151,7 @@ void RemoteEstimatorProxy::OnBitrateChanged(int bitrate_bps) { constexpr DataRate kMinTwccRate = kTwccReportSize / kMaxInterval; // Let TWCC reports occupy 5% of total bandwidth. - DataRate twcc_bitrate = DataRate::BitsPerSec(0.05 * bitrate_bps); + DataRate twcc_bitrate = 0.05 * estimate; // Check upper send_interval bound by checking bitrate to avoid overflow when // dividing by small bitrate, in particular avoid dividing by zero bitrate. @@ -197,28 +164,13 @@ void RemoteEstimatorProxy::OnBitrateChanged(int bitrate_bps) { send_interval_ = send_interval; } -void RemoteEstimatorProxy::SetTransportOverhead(DataSize overhead_per_packet) { - MutexLock lock(&lock_); - packet_overhead_ = overhead_per_packet; -} - -void RemoteEstimatorProxy::SendPeriodicFeedbacks() { +void TransportSequenceNumberFeedbackGenenerator::SendPeriodicFeedbacks() { // `periodic_window_start_seq_` is the first sequence number to include in // the current feedback packet. Some older may still be in the map, in case // a reordering happens and we need to retransmit them. if (!periodic_window_start_seq_) return; - std::unique_ptr remote_estimate; - if (network_state_estimator_) { - absl::optional state_estimate = - network_state_estimator_->GetCurrentEstimate(); - if (state_estimate) { - remote_estimate = std::make_unique(); - remote_estimate->SetEstimate(state_estimate.value()); - } - } - int64_t packet_arrival_times_end_seq = packet_arrival_times_.end_sequence_number(); while (periodic_window_start_seq_ < packet_arrival_times_end_seq) { @@ -232,13 +184,8 @@ void RemoteEstimatorProxy::SendPeriodicFeedbacks() { } RTC_DCHECK(feedback_sender_ != nullptr); - std::vector> packets; - if (remote_estimate) { - packets.push_back(std::move(remote_estimate)); - } packets.push_back(std::move(feedback_packet)); - feedback_sender_(std::move(packets)); // Note: Don't erase items from packet_arrival_times_ after sending, in // case they need to be re-sent after a reordering. Removal will be @@ -246,7 +193,7 @@ void RemoteEstimatorProxy::SendPeriodicFeedbacks() { } } -void RemoteEstimatorProxy::SendFeedbackOnRequest( +void TransportSequenceNumberFeedbackGenenerator::SendFeedbackOnRequest( int64_t sequence_number, const FeedbackRequest& feedback_request) { if (feedback_request.sequence_count == 0) { @@ -260,8 +207,11 @@ void RemoteEstimatorProxy::SendFeedbackOnRequest( feedback_request.include_timestamps, first_sequence_number, sequence_number + 1, /*is_periodic_update=*/false); - // This is called when a packet has just been added. - RTC_DCHECK(feedback_packet != nullptr); + // Even though this is called when a packet has just been added, + // no feedback may be produced when that new packet is too old. + if (feedback_packet == nullptr) { + return; + } // Clear up to the first packet that is included in this feedback packet. packet_arrival_times_.EraseTo(first_sequence_number); @@ -273,7 +223,7 @@ void RemoteEstimatorProxy::SendFeedbackOnRequest( } std::unique_ptr -RemoteEstimatorProxy::MaybeBuildFeedbackPacket( +TransportSequenceNumberFeedbackGenenerator::MaybeBuildFeedbackPacket( bool include_timestamps, int64_t begin_sequence_number_inclusive, int64_t end_sequence_number_exclusive, diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy.h b/modules/remote_bitrate_estimator/transport_sequence_number_feedback_generator.h similarity index 53% rename from modules/remote_bitrate_estimator/remote_estimator_proxy.h rename to modules/remote_bitrate_estimator/transport_sequence_number_feedback_generator.h index b50d2f0db0..6bde614692 100644 --- a/modules/remote_bitrate_estimator/remote_estimator_proxy.h +++ b/modules/remote_bitrate_estimator/transport_sequence_number_feedback_generator.h @@ -8,51 +8,45 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_REMOTE_BITRATE_ESTIMATOR_REMOTE_ESTIMATOR_PROXY_H_ -#define MODULES_REMOTE_BITRATE_ESTIMATOR_REMOTE_ESTIMATOR_PROXY_H_ +#ifndef MODULES_REMOTE_BITRATE_ESTIMATOR_TRANSPORT_SEQUENCE_NUMBER_FEEDBACK_GENERATOR_H_ +#define MODULES_REMOTE_BITRATE_ESTIMATOR_TRANSPORT_SEQUENCE_NUMBER_FEEDBACK_GENERATOR_H_ -#include -#include +#include #include -#include +#include -#include "absl/types/optional.h" -#include "api/field_trials_view.h" #include "api/rtp_headers.h" -#include "api/transport/network_control.h" -#include "api/units/data_size.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/remote_bitrate_estimator/packet_arrival_map.h" -#include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/remote_bitrate_estimator/rtp_transport_feedback_generator.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { -// Class used when send-side BWE is enabled: This proxy is instantiated on the -// receive side. It buffers a number of receive timestamps and then sends -// transport feedback messages back too the send side. -class RemoteEstimatorProxy { +// Class used when send-side BWE is enabled. +// The class is responsible for generating RTCP feedback packets based on +// incoming media packets. Incoming packets must have a transport sequence +// number, Ie. either the extension +// http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01 or +// http://www.webrtc.org/experiments/rtp-hdrext/transport-wide-cc-02 must be +// used. +class TransportSequenceNumberFeedbackGenenerator + : public RtpTransportFeedbackGenerator { public: - // Used for sending transport feedback messages when send side - // BWE is used. - using TransportFeedbackSender = std::function> packets)>; - RemoteEstimatorProxy(TransportFeedbackSender feedback_sender, - NetworkStateEstimator* network_state_estimator); - ~RemoteEstimatorProxy(); + TransportSequenceNumberFeedbackGenenerator( + RtpTransportFeedbackGenerator::RtcpSender feedback_sender); + ~TransportSequenceNumberFeedbackGenenerator(); - void IncomingPacket(const RtpPacketReceived& packet); + void OnReceivedPacket(const RtpPacketReceived& packet) override; + void OnSendBandwidthEstimateChanged(DataRate estimate) override; - // Sends periodic feedback if it is time to send it. - // Returns time until next call to Process should be made. - TimeDelta Process(Timestamp now); - - void OnBitrateChanged(int bitrate); - void SetTransportOverhead(DataSize overhead_per_packet); + TimeDelta Process(Timestamp now) override; private: void MaybeCullOldPackets(int64_t sequence_number, Timestamp arrival_time) @@ -62,12 +56,12 @@ class RemoteEstimatorProxy { const FeedbackRequest& feedback_request) RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); - // Returns a Transport Feedback packet with information about as many packets - // that has been received between [`begin_sequence_number_incl`, + // Returns a Transport Feedback packet with information about as many + // packets that has been received between [`begin_sequence_number_incl`, // `end_sequence_number_excl`) that can fit in it. If `is_periodic_update`, // this represents sending a periodic feedback message, which will make it - // update the `periodic_window_start_seq_` variable with the first packet that - // was not included in the feedback packet, so that the next update can + // update the `periodic_window_start_seq_` variable with the first packet + // that was not included in the feedback packet, so that the next update can // continue from that sequence number. // // If no incoming packets were added, nullptr is returned. @@ -80,34 +74,25 @@ class RemoteEstimatorProxy { int64_t end_sequence_number_exclusive, bool is_periodic_update) RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); - const TransportFeedbackSender feedback_sender_; + const RtcpSender feedback_sender_; Timestamp last_process_time_; Mutex lock_; - // `network_state_estimator_` may be null. - NetworkStateEstimator* const network_state_estimator_ - RTC_PT_GUARDED_BY(&lock_); uint32_t media_ssrc_ RTC_GUARDED_BY(&lock_); uint8_t feedback_packet_count_ RTC_GUARDED_BY(&lock_); SeqNumUnwrapper unwrapper_ RTC_GUARDED_BY(&lock_); - DataSize packet_overhead_ RTC_GUARDED_BY(&lock_); // The next sequence number that should be the start sequence number during - // periodic reporting. Will be absl::nullopt before the first seen packet. - absl::optional periodic_window_start_seq_ RTC_GUARDED_BY(&lock_); + // periodic reporting. Will be std::nullopt before the first seen packet. + std::optional periodic_window_start_seq_ RTC_GUARDED_BY(&lock_); // Packet arrival times, by sequence number. PacketArrivalTimeMap packet_arrival_times_ RTC_GUARDED_BY(&lock_); TimeDelta send_interval_ RTC_GUARDED_BY(&lock_); bool send_periodic_feedback_ RTC_GUARDED_BY(&lock_); - - // Unwraps absolute send times. - uint32_t previous_abs_send_time_ RTC_GUARDED_BY(&lock_); - Timestamp abs_send_timestamp_ RTC_GUARDED_BY(&lock_); - Timestamp last_arrival_time_with_abs_send_time_ RTC_GUARDED_BY(&lock_); }; } // namespace webrtc -#endif // MODULES_REMOTE_BITRATE_ESTIMATOR_REMOTE_ESTIMATOR_PROXY_H_ +#endif // MODULES_REMOTE_BITRATE_ESTIMATOR_TRANSPORT_SEQUENCE_NUMBER_FEEDBACK_GENERATOR_H_ diff --git a/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc b/modules/remote_bitrate_estimator/transport_sequence_number_feedback_generator_unittest.cc similarity index 75% rename from modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc rename to modules/remote_bitrate_estimator/transport_sequence_number_feedback_generator_unittest.cc index 47ad457be9..3c908af6bc 100644 --- a/modules/remote_bitrate_estimator/remote_estimator_proxy_unittest.cc +++ b/modules/remote_bitrate_estimator/transport_sequence_number_feedback_generator_unittest.cc @@ -8,18 +8,20 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/remote_bitrate_estimator/remote_estimator_proxy.h" +#include "modules/remote_bitrate_estimator/transport_sequence_number_feedback_generator.h" #include #include -#include +#include +#include -#include "absl/types/optional.h" -#include "api/transport/network_types.h" -#include "api/transport/test/mock_network_control.h" +#include "api/rtp_headers.h" +#include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" @@ -76,16 +78,15 @@ std::vector Timestamps( return timestamps; } -class RemoteEstimatorProxyTest : public ::testing::Test { +class TransportSequenceNumberFeedbackGeneneratorTest : public ::testing::Test { public: - RemoteEstimatorProxyTest() - : clock_(0), - proxy_(feedback_sender_.AsStdFunction(), &network_state_estimator_) {} + TransportSequenceNumberFeedbackGeneneratorTest() + : clock_(0), feedback_generator_(feedback_sender_.AsStdFunction()) {} protected: void IncomingPacket(uint16_t seq, Timestamp arrival_time, - absl::optional abs_send_time = absl::nullopt) { + std::optional abs_send_time = std::nullopt) { RtpHeaderExtensionMap map; map.Register(1); map.Register(2); @@ -95,35 +96,35 @@ class RemoteEstimatorProxyTest : public ::testing::Test { if (abs_send_time) { packet.SetExtension(*abs_send_time); } - proxy_.IncomingPacket(packet); + feedback_generator_.OnReceivedPacket(packet); } void IncomingPacketV2( uint16_t seq, Timestamp arrival_time, - absl::optional feedback_request = absl::nullopt) { + std::optional feedback_request = std::nullopt) { RtpHeaderExtensionMap map; map.Register(1); RtpPacketReceived packet(&map, arrival_time); packet.SetSsrc(kMediaSsrc); packet.SetExtension(seq, feedback_request); - proxy_.IncomingPacket(packet); + feedback_generator_.OnReceivedPacket(packet); } void Process() { clock_.AdvanceTime(kDefaultSendInterval); - proxy_.Process(clock_.CurrentTime()); + feedback_generator_.Process(clock_.CurrentTime()); } SimulatedClock clock_; MockFunction>)> feedback_sender_; - ::testing::NiceMock network_state_estimator_; - RemoteEstimatorProxy proxy_; + TransportSequenceNumberFeedbackGenenerator feedback_generator_; }; -TEST_F(RemoteEstimatorProxyTest, SendsSinglePacketFeedback) { +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, + SendsSinglePacketFeedback) { IncomingPacket(kBaseSeq, kBaseTime); EXPECT_CALL(feedback_sender_, Call) @@ -143,7 +144,7 @@ TEST_F(RemoteEstimatorProxyTest, SendsSinglePacketFeedback) { Process(); } -TEST_F(RemoteEstimatorProxyTest, DuplicatedPackets) { +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, DuplicatedPackets) { IncomingPacket(kBaseSeq, kBaseTime); IncomingPacket(kBaseSeq, kBaseTime + TimeDelta::Seconds(1)); @@ -165,7 +166,8 @@ TEST_F(RemoteEstimatorProxyTest, DuplicatedPackets) { Process(); } -TEST_F(RemoteEstimatorProxyTest, FeedbackWithMissingStart) { +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, + FeedbackWithMissingStart) { // First feedback. IncomingPacket(kBaseSeq, kBaseTime); IncomingPacket(kBaseSeq + 1, kBaseTime + TimeDelta::Seconds(1)); @@ -193,7 +195,8 @@ TEST_F(RemoteEstimatorProxyTest, FeedbackWithMissingStart) { Process(); } -TEST_F(RemoteEstimatorProxyTest, SendsFeedbackWithVaryingDeltas) { +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, + SendsFeedbackWithVaryingDeltas) { IncomingPacket(kBaseSeq, kBaseTime); IncomingPacket(kBaseSeq + 1, kBaseTime + kMaxSmallDelta); IncomingPacket(kBaseSeq + 2, @@ -219,7 +222,8 @@ TEST_F(RemoteEstimatorProxyTest, SendsFeedbackWithVaryingDeltas) { Process(); } -TEST_F(RemoteEstimatorProxyTest, SendsFragmentedFeedback) { +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, + SendsFragmentedFeedback) { static constexpr TimeDelta kTooLargeDelta = rtcp::TransportFeedback::kDeltaTick * (1 << 16); @@ -256,7 +260,8 @@ TEST_F(RemoteEstimatorProxyTest, SendsFragmentedFeedback) { Process(); } -TEST_F(RemoteEstimatorProxyTest, HandlesReorderingAndWrap) { +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, + HandlesReorderingAndWrap) { const TimeDelta kDelta = TimeDelta::Seconds(1); const uint16_t kLargeSeq = 62762; IncomingPacket(kBaseSeq, kBaseTime); @@ -278,7 +283,8 @@ TEST_F(RemoteEstimatorProxyTest, HandlesReorderingAndWrap) { Process(); } -TEST_F(RemoteEstimatorProxyTest, HandlesMalformedSequenceNumbers) { +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, + HandlesMalformedSequenceNumbers) { // This test generates incoming packets with large jumps in sequence numbers. // When unwrapped, the sequeunce numbers of these 30 incoming packets, will // span a range of roughly 650k packets. Test that we only send feedback for @@ -308,7 +314,8 @@ TEST_F(RemoteEstimatorProxyTest, HandlesMalformedSequenceNumbers) { Process(); } -TEST_F(RemoteEstimatorProxyTest, HandlesBackwardsWrappingSequenceNumbers) { +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, + HandlesBackwardsWrappingSequenceNumbers) { // This test is like HandlesMalformedSequenceNumbers but for negative wrap // arounds. Test that we only send feedback for the packets with highest // sequence numbers. Test for regression found in chromium:949020. @@ -337,7 +344,8 @@ TEST_F(RemoteEstimatorProxyTest, HandlesBackwardsWrappingSequenceNumbers) { Process(); } -TEST_F(RemoteEstimatorProxyTest, ResendsTimestampsOnReordering) { +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, + ResendsTimestampsOnReordering) { IncomingPacket(kBaseSeq, kBaseTime); IncomingPacket(kBaseSeq + 2, kBaseTime + TimeDelta::Millis(2)); @@ -380,7 +388,8 @@ TEST_F(RemoteEstimatorProxyTest, ResendsTimestampsOnReordering) { Process(); } -TEST_F(RemoteEstimatorProxyTest, RemovesTimestampsOutOfScope) { +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, + RemovesTimestampsOutOfScope) { const Timestamp kTimeoutTime = kBaseTime + kBackWindow; IncomingPacket(kBaseSeq + 2, kBaseTime); @@ -438,42 +447,57 @@ TEST_F(RemoteEstimatorProxyTest, RemovesTimestampsOutOfScope) { Process(); } -TEST_F(RemoteEstimatorProxyTest, TimeUntilNextProcessIsDefaultOnUnkownBitrate) { - EXPECT_EQ(proxy_.Process(clock_.CurrentTime()), kDefaultSendInterval); +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, + TimeUntilNextProcessIsDefaultOnUnkownBitrate) { + EXPECT_EQ(feedback_generator_.Process(clock_.CurrentTime()), + kDefaultSendInterval); } -TEST_F(RemoteEstimatorProxyTest, TimeUntilNextProcessIsMinIntervalOn300kbps) { - proxy_.OnBitrateChanged(300'000); - EXPECT_EQ(proxy_.Process(clock_.CurrentTime()), kMinSendInterval); +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, + TimeUntilNextProcessIsMinIntervalOn300kbps) { + feedback_generator_.OnSendBandwidthEstimateChanged( + DataRate::BitsPerSec(300'000)); + EXPECT_EQ(feedback_generator_.Process(clock_.CurrentTime()), + kMinSendInterval); } -TEST_F(RemoteEstimatorProxyTest, TimeUntilNextProcessIsMaxIntervalOn0kbps) { +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, + TimeUntilNextProcessIsMaxIntervalOn0kbps) { // TimeUntilNextProcess should be limited by `kMaxSendIntervalMs` when // bitrate is small. We choose 0 bps as a special case, which also tests // erroneous behaviors like division-by-zero. - proxy_.OnBitrateChanged(0); - EXPECT_EQ(proxy_.Process(clock_.CurrentTime()), kMaxSendInterval); + feedback_generator_.OnSendBandwidthEstimateChanged(DataRate::Zero()); + EXPECT_EQ(feedback_generator_.Process(clock_.CurrentTime()), + kMaxSendInterval); } -TEST_F(RemoteEstimatorProxyTest, TimeUntilNextProcessIsMaxIntervalOn20kbps) { - proxy_.OnBitrateChanged(20'000); - EXPECT_EQ(proxy_.Process(clock_.CurrentTime()), kMaxSendInterval); +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, + TimeUntilNextProcessIsMaxIntervalOn20kbps) { + feedback_generator_.OnSendBandwidthEstimateChanged( + DataRate::BitsPerSec(20'000)); + EXPECT_EQ(feedback_generator_.Process(clock_.CurrentTime()), + kMaxSendInterval); } -TEST_F(RemoteEstimatorProxyTest, TwccReportsUse5PercentOfAvailableBandwidth) { - proxy_.OnBitrateChanged(80'000); +TEST_F(TransportSequenceNumberFeedbackGeneneratorTest, + TwccReportsUse5PercentOfAvailableBandwidth) { + feedback_generator_.OnSendBandwidthEstimateChanged( + DataRate::BitsPerSec(80'000)); // 80kbps * 0.05 = TwccReportSize(68B * 8b/B) * 1000ms / SendInterval(136ms) - EXPECT_EQ(proxy_.Process(clock_.CurrentTime()), TimeDelta::Millis(136)); + EXPECT_EQ(feedback_generator_.Process(clock_.CurrentTime()), + TimeDelta::Millis(136)); } ////////////////////////////////////////////////////////////////////////////// // Tests for the extended protocol where the feedback is explicitly requested // by the sender. ////////////////////////////////////////////////////////////////////////////// -typedef RemoteEstimatorProxyTest RemoteEstimatorProxyOnRequestTest; +typedef TransportSequenceNumberFeedbackGeneneratorTest + RemoteEstimatorProxyOnRequestTest; TEST_F(RemoteEstimatorProxyOnRequestTest, DisablesPeriodicProcess) { IncomingPacketV2(kBaseSeq, kBaseTime); - EXPECT_EQ(proxy_.Process(clock_.CurrentTime()), TimeDelta::PlusInfinity()); + EXPECT_EQ(feedback_generator_.Process(clock_.CurrentTime()), + TimeDelta::PlusInfinity()); } TEST_F(RemoteEstimatorProxyOnRequestTest, ProcessDoesNotSendFeedback) { @@ -571,100 +595,5 @@ TEST_F(RemoteEstimatorProxyOnRequestTest, kFivePacketsFeedbackRequest); } -TEST_F(RemoteEstimatorProxyTest, ReportsIncomingPacketToNetworkStateEstimator) { - const DataSize kPacketOverhead = DataSize::Bytes(38); - proxy_.SetTransportOverhead(kPacketOverhead); - - EXPECT_CALL(network_state_estimator_, OnReceivedPacket) - .WillOnce([&](const PacketResult& packet) { - EXPECT_EQ(packet.receive_time, kBaseTime); - EXPECT_GT(packet.sent_packet.size, kPacketOverhead); - // Expect first send time to be equal to the arrival time. - EXPECT_EQ(packet.sent_packet.send_time, kBaseTime); - }); - IncomingPacket(kBaseSeq, kBaseTime, AbsoluteSendTime::To24Bits(kBaseTime)); -} - -TEST_F(RemoteEstimatorProxyTest, IncomingPacketHandlesWrapInAbsSendTime) { - // abs send time use 24bit precision. - const uint32_t kFirstAbsSendTime = - AbsoluteSendTime::To24Bits(Timestamp::Millis((1 << 24) - 30)); - // Second abs send time has wrapped. - const uint32_t kSecondAbsSendTime = - AbsoluteSendTime::To24Bits(Timestamp::Millis(1 << 24)); - const TimeDelta kExpectedAbsSendTimeDelta = TimeDelta::Millis(30); - - Timestamp first_send_timestamp = Timestamp::Zero(); - EXPECT_CALL(network_state_estimator_, OnReceivedPacket) - .WillOnce([&](const PacketResult& packet) { - EXPECT_EQ(packet.receive_time, kBaseTime); - first_send_timestamp = packet.sent_packet.send_time; - }); - IncomingPacket(kBaseSeq, kBaseTime, kFirstAbsSendTime); - - EXPECT_CALL(network_state_estimator_, OnReceivedPacket) - .WillOnce([&](const PacketResult& packet) { - EXPECT_EQ(packet.receive_time, kBaseTime + TimeDelta::Millis(123)); - EXPECT_EQ(packet.sent_packet.send_time.ms(), - (first_send_timestamp + kExpectedAbsSendTimeDelta).ms()); - }); - IncomingPacket(kBaseSeq + 1, kBaseTime + TimeDelta::Millis(123), - kSecondAbsSendTime); -} - -TEST_F(RemoteEstimatorProxyTest, IncomingPacketHandlesReorderedPackets) { - const uint32_t kFirstAbsSendTime = - AbsoluteSendTime::To24Bits(Timestamp::Millis((1 << 12))); - Timestamp first_send_timestamp = Timestamp::Zero(); - EXPECT_CALL(network_state_estimator_, OnReceivedPacket) - .WillOnce([&](const PacketResult& packet) { - EXPECT_EQ(packet.receive_time, kBaseTime); - first_send_timestamp = packet.sent_packet.send_time; - }); - IncomingPacket(kBaseSeq + 1, kBaseTime, kFirstAbsSendTime); - - const TimeDelta kExpectedAbsSendTimeDelta = -TimeDelta::Millis(30); - const uint32_t kSecondAbsSendTime = AbsoluteSendTime::To24Bits( - Timestamp::Millis(1 << 12) + kExpectedAbsSendTimeDelta); - EXPECT_CALL(network_state_estimator_, OnReceivedPacket) - .WillOnce([&](const PacketResult& packet) { - EXPECT_EQ(packet.sent_packet.send_time.ms(), - (first_send_timestamp + kExpectedAbsSendTimeDelta).ms()); - }); - IncomingPacket(kBaseSeq, kBaseTime + TimeDelta::Millis(123), - kSecondAbsSendTime); -} - -TEST_F(RemoteEstimatorProxyTest, - IncomingPacketResetSendTimeToArrivalTimeAfterLargeArrivaltimeDelta) { - const uint32_t kFirstAbsSendTime = - AbsoluteSendTime::To24Bits(Timestamp::Millis((1 << 12))); - EXPECT_CALL(network_state_estimator_, OnReceivedPacket) - .WillOnce([&](const PacketResult& packet) { - EXPECT_EQ(packet.receive_time, kBaseTime); - EXPECT_EQ(packet.sent_packet.send_time, kBaseTime); - }); - IncomingPacket(kBaseSeq + 1, kBaseTime, kFirstAbsSendTime); - - EXPECT_CALL(network_state_estimator_, OnReceivedPacket) - .WillOnce([&](const PacketResult& packet) { - EXPECT_EQ(packet.receive_time, kBaseTime + TimeDelta::Seconds(20)); - EXPECT_EQ(packet.sent_packet.send_time, - kBaseTime + TimeDelta::Seconds(20)); - }); - IncomingPacket(kBaseSeq, kBaseTime + TimeDelta::Seconds(20), - kFirstAbsSendTime + 123); -} - -TEST_F(RemoteEstimatorProxyTest, SendTransportFeedbackAndNetworkStateUpdate) { - IncomingPacket(kBaseSeq, kBaseTime, - AbsoluteSendTime::To24Bits(kBaseTime - TimeDelta::Millis(1))); - - EXPECT_CALL(network_state_estimator_, GetCurrentEstimate()) - .WillOnce(Return(NetworkStateEstimate())); - EXPECT_CALL(feedback_sender_, Call(SizeIs(2))); - Process(); -} - } // namespace } // namespace webrtc diff --git a/modules/rtp_rtcp/BUILD.gn b/modules/rtp_rtcp/BUILD.gn index 0fc9931f39..96ccf3713c 100644 --- a/modules/rtp_rtcp/BUILD.gn +++ b/modules/rtp_rtcp/BUILD.gn @@ -21,14 +21,15 @@ rtc_library("rtp_rtcp_format") { "include/rtcp_statistics.h", "include/rtp_cvo.h", "include/rtp_header_extension_map.h", - "include/rtp_packet_sender.h", "include/rtp_rtcp_defines.h", "source/byte_io.h", + "source/corruption_detection_extension.h", "source/rtcp_packet.h", "source/rtcp_packet/app.h", "source/rtcp_packet/bye.h", "source/rtcp_packet/common_header.h", "source/rtcp_packet/compound_packet.h", + "source/rtcp_packet/congestion_control_feedback.h", "source/rtcp_packet/dlrr.h", "source/rtcp_packet/extended_reports.h", "source/rtcp_packet/fir.h", @@ -63,11 +64,13 @@ rtc_library("rtp_rtcp_format") { sources = [ "include/report_block_data.cc", "include/rtp_rtcp_defines.cc", + "source/corruption_detection_extension.cc", "source/rtcp_packet.cc", "source/rtcp_packet/app.cc", "source/rtcp_packet/bye.cc", "source/rtcp_packet/common_header.cc", "source/rtcp_packet/compound_packet.cc", + "source/rtcp_packet/congestion_control_feedback.cc", "source/rtcp_packet/dlrr.cc", "source/rtcp_packet/extended_reports.cc", "source/rtcp_packet/fir.cc", @@ -112,14 +115,17 @@ rtc_library("rtp_rtcp_format") { "../../api:function_view", "../../api:refcountedbase", "../../api:rtp_headers", + "../../api:rtp_packet_sender", # For compatibility with downstream projects "../../api:rtp_parameters", "../../api:scoped_refptr", "../../api/audio_codecs:audio_codecs_api", "../../api/transport:network_control", + "../../api/transport/rtp:corruption_detection_message", "../../api/transport/rtp:dependency_descriptor", "../../api/units:data_rate", "../../api/units:time_delta", "../../api/units:timestamp", + "../../api/video:render_resolution", "../../api/video:video_layers_allocation", "../../api/video:video_rtp_headers", "../../rtc_base:bit_buffer", @@ -131,17 +137,16 @@ rtc_library("rtp_rtcp_format") { "../../rtc_base:event_tracer", "../../rtc_base:logging", "../../rtc_base:macromagic", + "../../rtc_base:safe_compare", "../../rtc_base:safe_conversions", "../../rtc_base:stringutils", + "../../rtc_base/network:ecn_marking", "../../system_wrappers", "../video_coding:codec_globals_headers", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", + "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -209,6 +214,7 @@ rtc_library("rtp_rtcp") { "source/rtp_header_extension_size.h", "source/rtp_packet_history.cc", "source/rtp_packet_history.h", + "source/rtp_packet_send_info.cc", "source/rtp_packetizer_av1.cc", "source/rtp_packetizer_av1.h", "source/rtp_rtcp_config.h", @@ -231,8 +237,6 @@ rtc_library("rtp_rtcp") { "source/rtp_video_stream_receiver_frame_transformer_delegate.h", "source/source_tracker.cc", "source/source_tracker.h", - "source/time_util.cc", - "source/time_util.h", "source/tmmbr_help.cc", "source/tmmbr_help.h", "source/ulpfec_generator.cc", @@ -258,14 +262,19 @@ rtc_library("rtp_rtcp") { "source/video_rtp_depacketizer_vp9.h", ] - if (rtc_enable_bwe_test_logging) { - defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=1" ] - } else { - defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0" ] + if (rtc_use_h265) { + sources += [ + "source/rtp_packet_h265_common.h", + "source/rtp_packetizer_h265.cc", + "source/rtp_packetizer_h265.h", + "source/video_rtp_depacketizer_h265.cc", + "source/video_rtp_depacketizer_h265.h", + ] } deps = [ ":leb128", + ":ntp_time_util", ":rtp_rtcp_format", ":rtp_video_header", "..:module_api_public", @@ -274,17 +283,22 @@ rtc_library("rtp_rtcp") { "../../api:field_trials_view", "../../api:frame_transformer_interface", "../../api:function_view", + "../../api:make_ref_counted", "../../api:rtp_headers", "../../api:rtp_packet_info", + "../../api:rtp_packet_sender", "../../api:rtp_parameters", "../../api:scoped_refptr", "../../api:sequence_checker", "../../api:transport_api", "../../api/audio_codecs:audio_codecs_api", "../../api/crypto:frame_encryptor_interface", + "../../api/environment", "../../api/rtc_event_log", + "../../api/task_queue", "../../api/task_queue:pending_task_safety_flag", - "../../api/task_queue:task_queue", + "../../api/transport:network_control", + "../../api/transport/rtp:corruption_detection_message", "../../api/transport/rtp:dependency_descriptor", "../../api/transport/rtp:rtp_source", "../../api/units:data_rate", @@ -301,11 +315,12 @@ rtc_library("rtp_rtcp") { "../../api/video:video_frame_type", "../../api/video:video_layers_allocation", "../../api/video:video_rtp_headers", + "../../api/video_codecs:video_codecs_api", "../../call:rtp_interfaces", - "../../call:video_stream_api", "../../common_video", + "../../common_video:corruption_detection_converters", + "../../common_video:frame_instrumentation_data", "../../logging:rtc_event_rtp_rtcp", - "../../modules/audio_coding:audio_coding_module_typedefs", "../../rtc_base:bit_buffer", "../../rtc_base:bitrate_tracker", "../../rtc_base:bitstream_reader", @@ -313,7 +328,6 @@ rtc_library("rtp_rtcp") { "../../rtc_base:byte_buffer", "../../rtc_base:checks", "../../rtc_base:copy_on_write_buffer", - "../../rtc_base:divide_round", "../../rtc_base:event_tracer", "../../rtc_base:frequency_tracker", "../../rtc_base:gtest_prod", @@ -336,16 +350,14 @@ rtc_library("rtp_rtcp") { "../../rtc_base/task_utils:repeating_task", "../../system_wrappers", "../../system_wrappers:metrics", - "../remote_bitrate_estimator", + "../audio_coding:audio_coding_module_typedefs", "../video_coding:codec_globals_headers", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -358,13 +370,19 @@ rtc_source_set("rtp_rtcp_legacy") { "source/rtp_rtcp_impl.h", ] deps = [ + ":ntp_time_util", ":rtp_rtcp", ":rtp_rtcp_format", "..:module_fec_api", + "../../api:array_view", "../../api:rtp_headers", + "../../api:rtp_packet_sender", "../../api:transport_api", + "../../api/environment", "../../api/rtc_event_log", + "../../api/transport:network_control", "../../api/units:data_rate", + "../../api/units:time_delta", "../../api/units:timestamp", "../../api/video:video_bitrate_allocation", "../../logging:rtc_event_rtp_rtcp", @@ -375,12 +393,24 @@ rtc_source_set("rtp_rtcp_legacy") { "../../rtc_base:macromagic", "../../rtc_base/synchronization:mutex", "../../system_wrappers", - "../remote_bitrate_estimator", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("ntp_time_util") { + visibility = [ "*" ] + sources = [ + "source/ntp_time_util.cc", + "source/ntp_time_util.h", + ] + deps = [ + "../../api/units:time_delta", + "../../rtc_base:checks", + "../../rtc_base:divide_round", + "../../rtc_base:safe_conversions", + "../../rtc_base:timeutils", + "../../system_wrappers", ] } @@ -397,6 +427,7 @@ rtc_library("rtcp_transceiver") { "source/rtcp_transceiver_impl.cc", ] deps = [ + ":ntp_time_util", ":rtp_rtcp", ":rtp_rtcp_format", "../../api:array_view", @@ -406,6 +437,7 @@ rtc_library("rtcp_transceiver") { "../../api/units:time_delta", "../../api/units:timestamp", "../../api/video:video_bitrate_allocation", + "../../api/video:video_codec_constants", "../../rtc_base:checks", "../../rtc_base:copy_on_write_buffer", "../../rtc_base:divide_round", @@ -415,13 +447,10 @@ rtc_library("rtcp_transceiver") { "../../rtc_base/containers:flat_map", "../../rtc_base/task_utils:repeating_task", "../../system_wrappers", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/cleanup", "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -438,12 +467,9 @@ rtc_library("rtp_video_header") { "../../api/video:video_frame_metadata", "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", + "../../common_video:frame_instrumentation_data", "../../modules/video_coding:codec_globals_headers", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", ] } @@ -452,9 +478,14 @@ rtc_source_set("rtp_video_header_unittest") { sources = [ "source/rtp_video_header_unittest.cc" ] deps = [ ":rtp_video_header", + "../../api/transport/rtp:dependency_descriptor", + "../../api/video:video_frame", "../../api/video:video_frame_metadata", "../../api/video:video_frame_type", + "../../api/video:video_rtp_headers", "../../test:test_support", + "../video_coding:codec_globals_headers", + "//third_party/abseil-cpp/absl/container:inlined_vector", ] } @@ -467,7 +498,9 @@ rtc_library("fec_test_helper") { deps = [ ":rtp_rtcp", ":rtp_rtcp_format", + "../../api:rtp_headers", "../../rtc_base:checks", + "../../rtc_base:copy_on_write_buffer", "../../rtc_base:random", ] } @@ -476,6 +509,7 @@ rtc_library("mock_rtp_rtcp") { testonly = true public = [ "mocks/mock_network_link_rtcp_observer.h", + "mocks/mock_network_state_estimator_observer.h", "mocks/mock_recovered_packet_receiver.h", "mocks/mock_rtcp_rtt_stats.h", "mocks/mock_rtp_rtcp.h", @@ -483,17 +517,17 @@ rtc_library("mock_rtp_rtcp") { deps = [ ":rtp_rtcp", ":rtp_rtcp_format", + "..:module_fec_api", "../../api:array_view", + "../../api:rtp_headers", + "../../api/transport:network_control", "../../api/units:data_rate", "../../api/units:time_delta", "../../api/units:timestamp", "../../api/video:video_bitrate_allocation", "../../rtc_base:checks", "../../test:test_support", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -517,6 +551,9 @@ if (rtc_include_tests) { deps = [ ":rtp_rtcp", + "..:module_fec_api", + "../../api:array_view", + "../../rtc_base:checks", "../../test:fileutils", "../../test:test_main", "../../test:test_support", @@ -532,6 +569,8 @@ if (rtc_include_tests) { deps = [ ":rtp_rtcp", ":rtp_rtcp_format", + "..:module_fec_api", + "../../rtc_base:checks", "../../rtc_base:random", "../../test:fileutils", "../../test:test_support", @@ -554,6 +593,7 @@ if (rtc_include_tests) { "source/flexfec_sender_unittest.cc", "source/leb128_unittest.cc", "source/nack_rtx_unittest.cc", + "source/ntp_time_util_unittest.cc", "source/packet_loss_stats_unittest.cc", "source/packet_sequencer_unittest.cc", "source/receive_statistics_unittest.cc", @@ -563,6 +603,7 @@ if (rtc_include_tests) { "source/rtcp_packet/bye_unittest.cc", "source/rtcp_packet/common_header_unittest.cc", "source/rtcp_packet/compound_packet_unittest.cc", + "source/rtcp_packet/congestion_control_feedback_unittest.cc", "source/rtcp_packet/dlrr_unittest.cc", "source/rtcp_packet/extended_reports_unittest.cc", "source/rtcp_packet/fir_unittest.cc", @@ -599,6 +640,7 @@ if (rtc_include_tests) { "source/rtp_header_extension_map_unittest.cc", "source/rtp_header_extension_size_unittest.cc", "source/rtp_packet_history_unittest.cc", + "source/rtp_packet_send_info_unittest.cc", "source/rtp_packet_unittest.cc", "source/rtp_packetizer_av1_unittest.cc", "source/rtp_rtcp_impl2_unittest.cc", @@ -613,7 +655,6 @@ if (rtc_include_tests) { "source/rtp_video_layers_allocation_extension_unittest.cc", "source/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc", "source/source_tracker_unittest.cc", - "source/time_util_unittest.cc", "source/ulpfec_generator_unittest.cc", "source/ulpfec_header_reader_writer_unittest.cc", "source/ulpfec_receiver_unittest.cc", @@ -624,33 +665,54 @@ if (rtc_include_tests) { "source/video_rtp_depacketizer_vp8_unittest.cc", "source/video_rtp_depacketizer_vp9_unittest.cc", ] + if (rtc_use_h265) { + sources += [ + "source/rtp_packetizer_h265_unittest.cc", + "source/video_rtp_depacketizer_h265_unittest.cc", + ] + } + deps = [ + ":corruption_detection_extension_unittest", ":fec_test_helper", ":frame_transformer_factory_unittest", ":leb128", ":mock_rtp_rtcp", + ":ntp_time_util", ":rtcp_transceiver", ":rtp_packetizer_av1_test_helper", ":rtp_rtcp", ":rtp_rtcp_format", ":rtp_rtcp_legacy", + ":rtp_video_header", ":rtp_video_header_unittest", + "..:module_fec_api", "../../api:array_view", "../../api:create_time_controller", "../../api:field_trials_registry", + "../../api:field_trials_view", "../../api:frame_transformer_factory", + "../../api:frame_transformer_interface", + "../../api:function_view", "../../api:make_ref_counted", "../../api:mock_frame_encryptor", + "../../api:mock_frame_transformer", "../../api:mock_transformable_video_frame", "../../api:rtp_headers", "../../api:rtp_packet_info", + "../../api:rtp_packet_sender", "../../api:rtp_parameters", "../../api:scoped_refptr", "../../api:time_controller", "../../api:transport_api", + "../../api/environment:environment", + "../../api/environment:environment_factory", "../../api/rtc_event_log", "../../api/task_queue", + "../../api/transport:network_control", + "../../api/transport/rtp:corruption_detection_message", "../../api/transport/rtp:dependency_descriptor", + "../../api/transport/rtp:rtp_source", "../../api/units:data_rate", "../../api/units:data_size", "../../api/units:frequency", @@ -661,12 +723,16 @@ if (rtc_include_tests) { "../../api/video:video_bitrate_allocator", "../../api/video:video_codec_constants", "../../api/video:video_frame", + "../../api/video:video_frame_metadata", + "../../api/video:video_frame_type", "../../api/video:video_layers_allocation", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", + "../../call:rtp_interfaces", "../../call:rtp_receiver", - "../../call:video_stream_api", + "../../call:video_receive_stream_api", "../../common_video", + "../../common_video:frame_instrumentation_data", "../../common_video/generic_frame_descriptor", "../../common_video/test:utilities", "../../logging:mocks", @@ -685,22 +751,22 @@ if (rtc_include_tests) { "../../rtc_base:task_queue_for_test", "../../rtc_base:threading", "../../rtc_base:timeutils", + "../../rtc_base/network:ecn_marking", "../../system_wrappers", + "../../system_wrappers:metrics", "../../test:explicit_key_value_config", - "../../test:mock_frame_transformer", "../../test:mock_transport", "../../test:rtp_test_utils", "../../test:run_loop", "../../test:test_support", "../../test/time_controller:time_controller", + "../audio_coding:audio_coding_module_typedefs", "../video_coding:codec_globals_headers", - ] - absl_deps = [ + "//testing/gtest", "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -708,17 +774,29 @@ if (rtc_include_tests) { testonly = true sources = [ "source/frame_transformer_factory_unittest.cc" ] deps = [ + "../../api:array_view", "../../api:frame_transformer_factory", + "../../api:mock_frame_transformer", "../../api:mock_transformable_audio_frame", "../../api:mock_transformable_video_frame", "../../api:transport_api", - "../../call:video_stream_api", + "../../api/video:video_frame_metadata", + "../../call:video_receive_stream_api", "../../modules/rtp_rtcp", "../../rtc_base:rtc_event", - "../../test:mock_frame_transformer", "../../test:test_support", "../../video", + "//third_party/abseil-cpp/absl/memory", + ] + } + + rtc_library("corruption_detection_extension_unittest") { + testonly = true + sources = [ "source/corruption_detection_extension_unittest.cc" ] + deps = [ + ":rtp_rtcp_format", + "../../api/transport/rtp:corruption_detection_message", + "../../test:test_support", ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } } diff --git a/modules/rtp_rtcp/include/flexfec_sender.h b/modules/rtp_rtcp/include/flexfec_sender.h index 8f21ab7517..9df7c89221 100644 --- a/modules/rtp_rtcp/include/flexfec_sender.h +++ b/modules/rtp_rtcp/include/flexfec_sender.h @@ -11,14 +11,20 @@ #ifndef MODULES_RTP_RTCP_INCLUDE_FLEXFEC_SENDER_H_ #define MODULES_RTP_RTCP_INCLUDE_FLEXFEC_SENDER_H_ +#include +#include #include +#include #include #include #include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/environment/environment.h" #include "api/rtp_parameters.h" +#include "api/units/data_rate.h" #include "api/units/timestamp.h" +#include "modules/include/module_fec_types.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_header_extension_size.h" @@ -27,10 +33,10 @@ #include "rtc_base/bitrate_tracker.h" #include "rtc_base/random.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { -class Clock; class RtpPacketToSend; // Note that this class is not thread safe, and thus requires external @@ -38,20 +44,20 @@ class RtpPacketToSend; class FlexfecSender : public VideoFecGenerator { public: - FlexfecSender(int payload_type, + FlexfecSender(const Environment& env, + int payload_type, uint32_t ssrc, uint32_t protected_media_ssrc, absl::string_view mid, const std::vector& rtp_header_extensions, - rtc::ArrayView extension_sizes, - const RtpState* rtp_state, - Clock* clock); + ArrayView extension_sizes, + const RtpState* rtp_state); ~FlexfecSender(); FecType GetFecType() const override { return VideoFecGenerator::FecType::kFlexFec; } - absl::optional FecSsrc() override { return ssrc_; } + std::optional FecSsrc() override { return ssrc_; } // Sets the FEC rate, max frames sent before FEC packets are sent, // and what type of generator matrices are used. @@ -72,11 +78,11 @@ class FlexfecSender : public VideoFecGenerator { DataRate CurrentFecRate() const override; // Only called on the VideoSendStream queue, after operation has shut down. - absl::optional GetRtpState() override; + std::optional GetRtpState() override; private: // Utility. - Clock* const clock_; + const Environment env_; Random random_; Timestamp last_generated_packet_ = Timestamp::MinusInfinity(); diff --git a/modules/rtp_rtcp/include/receive_statistics.h b/modules/rtp_rtcp/include/receive_statistics.h index 827fd3a7a8..32134faa70 100644 --- a/modules/rtp_rtcp/include/receive_statistics.h +++ b/modules/rtp_rtcp/include/receive_statistics.h @@ -11,13 +11,13 @@ #ifndef MODULES_RTP_RTCP_INCLUDE_RECEIVE_STATISTICS_H_ #define MODULES_RTP_RTCP_INCLUDE_RECEIVE_STATISTICS_H_ -#include +#include +#include #include +#include #include -#include "absl/types/optional.h" #include "call/rtp_packet_sink_interface.h" -#include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" @@ -41,7 +41,7 @@ class StreamStatistician { virtual RtpReceiveStats GetStats() const = 0; // Returns average over the stream life time. - virtual absl::optional GetFractionLostInPercent() const = 0; + virtual std::optional GetFractionLostInPercent() const = 0; // TODO(bugs.webrtc.org/10679): Delete, migrate users to the above GetStats // method (and extend RtpReceiveStats if needed). @@ -66,11 +66,6 @@ class ReceiveStatistics : public ReceiveStatisticsProvider, // Returns a pointer to the statistician of an ssrc. virtual StreamStatistician* GetStatistician(uint32_t ssrc) const = 0; - // TODO(bugs.webrtc.org/10669): Deprecated, delete as soon as downstream - // projects are updated. This method sets the max reordering threshold of all - // current and future streams. - virtual void SetMaxReorderingThreshold(int max_reordering_threshold) = 0; - // Sets the max reordering threshold in number of packets. virtual void SetMaxReorderingThreshold(uint32_t ssrc, int max_reordering_threshold) = 0; diff --git a/modules/rtp_rtcp/include/recovered_packet_receiver.h b/modules/rtp_rtcp/include/recovered_packet_receiver.h index 4e92c486e2..811bfc8cc2 100644 --- a/modules/rtp_rtcp/include/recovered_packet_receiver.h +++ b/modules/rtp_rtcp/include/recovered_packet_receiver.h @@ -11,7 +11,6 @@ #define MODULES_RTP_RTCP_INCLUDE_RECOVERED_PACKET_RECEIVER_H_ #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "rtc_base/checks.h" namespace webrtc { diff --git a/modules/rtp_rtcp/include/remote_ntp_time_estimator.h b/modules/rtp_rtcp/include/remote_ntp_time_estimator.h index 01d0c85f94..9c8404b38e 100644 --- a/modules/rtp_rtcp/include/remote_ntp_time_estimator.h +++ b/modules/rtp_rtcp/include/remote_ntp_time_estimator.h @@ -13,10 +13,12 @@ #include -#include "absl/types/optional.h" +#include + #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "rtc_base/numerics/moving_percentile_filter.h" +#include "system_wrappers/include/ntp_time.h" #include "system_wrappers/include/rtp_to_ntp_estimator.h" namespace webrtc { @@ -58,7 +60,7 @@ class RemoteNtpTimeEstimator { // local one. This is equal to local NTP clock - remote NTP clock. // The offset is returned in ntp time resolution, i.e. 1/2^32 sec ~= 0.2 ns. // Returns nullopt on failure. - absl::optional EstimateRemoteToLocalClockOffset(); + std::optional EstimateRemoteToLocalClockOffset(); private: Clock* clock_; diff --git a/modules/rtp_rtcp/include/report_block_data.cc b/modules/rtp_rtcp/include/report_block_data.cc index 0d4fed043f..90cd883be4 100644 --- a/modules/rtp_rtcp/include/report_block_data.cc +++ b/modules/rtp_rtcp/include/report_block_data.cc @@ -10,6 +10,11 @@ #include "modules/rtp_rtcp/include/report_block_data.h" +#include + +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "rtc_base/checks.h" namespace webrtc { @@ -21,9 +26,12 @@ TimeDelta ReportBlockData::jitter(int rtp_clock_rate_hz) const { return TimeDelta::Seconds(jitter()) / rtp_clock_rate_hz; } +// TODO: bugs.webrtc.org/370535296 - Remove the utc timestamp when linked +// issue is fixed. void ReportBlockData::SetReportBlock(uint32_t sender_ssrc, const rtcp::ReportBlock& report_block, - Timestamp report_block_timestamp_utc) { + Timestamp report_block_timestamp_utc, + Timestamp report_block_timestamp) { sender_ssrc_ = sender_ssrc; source_ssrc_ = report_block.source_ssrc(); fraction_lost_raw_ = report_block.fraction_lost(); @@ -31,6 +39,7 @@ void ReportBlockData::SetReportBlock(uint32_t sender_ssrc, extended_highest_sequence_number_ = report_block.extended_high_seq_num(); jitter_ = report_block.jitter(); report_block_timestamp_utc_ = report_block_timestamp_utc; + report_block_timestamp_ = report_block_timestamp; } void ReportBlockData::AddRoundTripTimeSample(TimeDelta rtt) { diff --git a/modules/rtp_rtcp/include/report_block_data.h b/modules/rtp_rtcp/include/report_block_data.h index 9987fc46b9..895ac0b00b 100644 --- a/modules/rtp_rtcp/include/report_block_data.h +++ b/modules/rtp_rtcp/include/report_block_data.h @@ -11,6 +11,9 @@ #ifndef MODULES_RTP_RTCP_INCLUDE_REPORT_BLOCK_DATA_H_ #define MODULES_RTP_RTCP_INCLUDE_REPORT_BLOCK_DATA_H_ +#include +#include + #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" @@ -38,7 +41,9 @@ class ReportBlockData { // The fraction of RTP data packets from 'source_ssrc()' lost since the // previous report block was sent. // Fraction loss in range [0.0, 1.0]. - float fraction_lost() const { return fraction_lost_raw() / 256.0; } + float fraction_lost() const { + return static_cast(fraction_lost_raw()) / 256.0f; + } // Fraction loss as was written in the raw packet: range is [0, 255] where 0 // represents no loss, and 255 represents 99.6% loss (255/256 * 100%). @@ -70,10 +75,15 @@ class ReportBlockData { TimeDelta jitter(int rtp_clock_rate_hz) const; // Time in utc epoch (Jan 1st, 1970) the report block was received. + // TODO: bugs.webrtc.org/370535296 - Remove the utc timestamp when linked + // issue is fixed. Timestamp report_block_timestamp_utc() const { return report_block_timestamp_utc_; } + // Monotonic time when the report block was received. + Timestamp report_block_timestamp() const { return report_block_timestamp_; } + // Round Trip Time measurments for given (sender_ssrc, source_ssrc) pair. // Min, max, sum, number of measurements are since beginning of the call. TimeDelta last_rtt() const { return last_rtt_; } @@ -89,13 +99,19 @@ class ReportBlockData { extended_highest_sequence_number_ = sn; } void set_jitter(uint32_t jitter) { jitter_ = jitter; } + // TODO: bugs.webrtc.org/370535296 - Remove the utc timestamp when linked + // issue is fixed. void set_report_block_timestamp_utc(Timestamp arrival_time) { report_block_timestamp_utc_ = arrival_time; } + void set_report_block_timestamp(Timestamp arrival_time) { + report_block_timestamp_ = arrival_time; + } void SetReportBlock(uint32_t sender_ssrc, const rtcp::ReportBlock& report_block, - Timestamp report_block_timestamp_utc); + Timestamp report_block_timestamp_utc, + Timestamp report_block_timestamp); void AddRoundTripTimeSample(TimeDelta rtt); private: @@ -105,7 +121,10 @@ class ReportBlockData { int32_t cumulative_lost_ = 0; uint32_t extended_highest_sequence_number_ = 0; uint32_t jitter_ = 0; + // TODO: bugs.webrtc.org/370535296 - Remove the utc timestamp when linked + // issue is fixed. Timestamp report_block_timestamp_utc_ = Timestamp::Zero(); + Timestamp report_block_timestamp_ = Timestamp::Zero(); TimeDelta last_rtt_ = TimeDelta::Zero(); TimeDelta sum_rtt_ = TimeDelta::Zero(); size_t num_rtts_ = 0; diff --git a/modules/rtp_rtcp/include/rtp_cvo.h b/modules/rtp_rtcp/include/rtp_cvo.h index 497946d6a7..2b31466a06 100644 --- a/modules/rtp_rtcp/include/rtp_cvo.h +++ b/modules/rtp_rtcp/include/rtp_cvo.h @@ -10,6 +10,8 @@ #ifndef MODULES_RTP_RTCP_INCLUDE_RTP_CVO_H_ #define MODULES_RTP_RTCP_INCLUDE_RTP_CVO_H_ +#include + #include "api/video/video_rotation.h" #include "rtc_base/checks.h" diff --git a/modules/rtp_rtcp/include/rtp_header_extension_map.h b/modules/rtp_rtcp/include/rtp_header_extension_map.h index ff1ea61f52..2975c4aebb 100644 --- a/modules/rtp_rtcp/include/rtp_header_extension_map.h +++ b/modules/rtp_rtcp/include/rtp_header_extension_map.h @@ -13,7 +13,6 @@ #include -#include #include "absl/strings/string_view.h" #include "api/array_view.h" @@ -30,9 +29,9 @@ class RtpHeaderExtensionMap { RtpHeaderExtensionMap(); explicit RtpHeaderExtensionMap(bool extmap_allow_mixed); - explicit RtpHeaderExtensionMap(rtc::ArrayView extensions); + explicit RtpHeaderExtensionMap(ArrayView extensions); - void Reset(rtc::ArrayView extensions); + void Reset(ArrayView extensions); template bool Register(int id) { diff --git a/modules/rtp_rtcp/include/rtp_rtcp.h b/modules/rtp_rtcp/include/rtp_rtcp.h index e56d5ef637..84a3c40b03 100644 --- a/modules/rtp_rtcp/include/rtp_rtcp.h +++ b/modules/rtp_rtcp/include/rtp_rtcp.h @@ -13,21 +13,18 @@ #include -#include "absl/base/attributes.h" +#include "api/environment/environment.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" namespace webrtc { -class ABSL_DEPRECATED("") RtpRtcp : public RtpRtcpInterface { +// A deprecated version of the RtpRtcp module. +class [[deprecated("bugs.webrtc.org/42224904")]] RtpRtcp + : public RtpRtcpInterface { public: - // Instantiates a deprecated version of the RtpRtcp module. - static std::unique_ptr ABSL_DEPRECATED("") - Create(const Configuration& configuration) { - return DEPRECATED_Create(configuration); - } - - static std::unique_ptr DEPRECATED_Create( - const Configuration& configuration); + [[deprecated("bugs.webrtc.org/42224904")]] // + static std::unique_ptr + Create(const Environment& env, const Configuration& configuration); // Process any pending tasks such as timeouts. virtual void Process() = 0; diff --git a/modules/rtp_rtcp/include/rtp_rtcp_defines.cc b/modules/rtp_rtcp/include/rtp_rtcp_defines.cc index 0d91ab9546..1e11367512 100644 --- a/modules/rtp_rtcp/include/rtp_rtcp_defines.cc +++ b/modules/rtp_rtcp/include/rtp_rtcp_defines.cc @@ -12,10 +12,11 @@ #include -#include +#include #include "absl/algorithm/container.h" -#include "api/array_view.h" +#include "absl/strings/string_view.h" +#include "api/units/time_delta.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" diff --git a/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/modules/rtp_rtcp/include/rtp_rtcp_defines.h index 698f284fa5..aae48bc268 100644 --- a/modules/rtp_rtcp/include/rtp_rtcp_defines.h +++ b/modules/rtp_rtcp/include/rtp_rtcp_defines.h @@ -13,23 +13,24 @@ #include +#include +#include +#include #include +#include #include #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "absl/types/variant.h" #include "api/array_view.h" -#include "api/audio_codecs/audio_format.h" -#include "api/rtp_headers.h" #include "api/transport/network_types.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/rtp_rtcp/include/report_block_data.h" -#include "modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h" -#include "system_wrappers/include/clock.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" +#include "rtc_base/checks.h" #define RTCP_CNAME_SIZE 256 // RFC 3550 page 44, including null termination #define IP_PACKET_SIZE 1500 // we assume ethernet @@ -82,6 +83,7 @@ enum RTPExtensionType : int { kRtpExtensionDependencyDescriptor, kRtpExtensionColorSpace, kRtpExtensionVideoFrameTrackingId, + kRtpExtensionCorruptionDetection, kRtpExtensionNumberOfExtensions // Must be the last entity in the enum. }; @@ -106,7 +108,7 @@ enum RTCPPacketType : uint32_t { kRtcpXrReceiverReferenceTime = 0x40000, kRtcpXrDlrrReportBlock = 0x80000, kRtcpTransportFeedback = 0x100000, - kRtcpXrTargetBitrate = 0x200000 + kRtcpXrTargetBitrate = 0x200000, }; enum class KeyFrameReqMethod : uint8_t { @@ -161,16 +163,21 @@ class NetworkLinkRtcpObserver { public: virtual ~NetworkLinkRtcpObserver() = default; - virtual void OnTransportFeedback(Timestamp receive_time, - const rtcp::TransportFeedback& feedback) {} - virtual void OnReceiverEstimatedMaxBitrate(Timestamp receive_time, - DataRate bitrate) {} + virtual void OnTransportFeedback( + Timestamp /* receive_time */, + const rtcp::TransportFeedback& /* feedback */) {} + // RFC 8888 congestion control feedback. + virtual void OnCongestionControlFeedback( + Timestamp /* receive_time */, + const rtcp::CongestionControlFeedback& /* feedback */) {} + virtual void OnReceiverEstimatedMaxBitrate(Timestamp /* receive_time */, + DataRate /* bitrate */) {} // Called on an RTCP packet with sender or receiver reports with non zero // report blocks. Report blocks are combined from all reports into one array. - virtual void OnReport(Timestamp receive_time, - rtc::ArrayView report_blocks) {} - virtual void OnRttUpdate(Timestamp receive_time, TimeDelta rtt) {} + virtual void OnReport(Timestamp /* receive_time */, + ArrayView /* report_blocks */) {} + virtual void OnRttUpdate(Timestamp /* receive_time */, TimeDelta /* rtt */) {} }; // NOTE! `kNumMediaTypes` must be kept in sync with RtpPacketMediaType! @@ -185,12 +192,15 @@ enum class RtpPacketMediaType : size_t { }; struct RtpPacketSendInfo { + static RtpPacketSendInfo From(const RtpPacketToSend& rtp_packet_to_send, + const PacedPacketInfo& pacing_info); + uint16_t transport_sequence_number = 0; - absl::optional media_ssrc; + std::optional media_ssrc; uint16_t rtp_sequence_number = 0; // Only valid if `media_ssrc` is set. uint32_t rtp_timestamp = 0; size_t length = 0; - absl::optional packet_type; + std::optional packet_type; PacedPacketInfo pacing_info; }; @@ -227,7 +237,7 @@ class StreamFeedbackObserver { // `rtp_sequence_number` and `is_retransmission` are only valid if `ssrc` // is populated. - absl::optional ssrc; + std::optional ssrc; uint16_t rtp_sequence_number; bool is_retransmission; }; @@ -340,16 +350,8 @@ struct StreamDataCounters { }; class RtpSendRates { - template - constexpr std::array make_zero_array( - std::index_sequence) { - return {{(static_cast(Is), DataRate::Zero())...}}; - } - public: - RtpSendRates() - : send_rates_( - make_zero_array(std::make_index_sequence())) {} + constexpr RtpSendRates() = default; RtpSendRates(const RtpSendRates& rhs) = default; RtpSendRates& operator=(const RtpSendRates&) = default; @@ -372,6 +374,10 @@ class StreamDataCountersCallback { public: virtual ~StreamDataCountersCallback() {} + // TODO: webrtc:40644448 - Make this pure virtual. + virtual StreamDataCounters GetDataCounters(uint32_t ssrc) const { + RTC_CHECK_NOTREACHED(); + } virtual void DataCountersUpdated(const StreamDataCounters& counters, uint32_t ssrc) = 0; }; @@ -389,7 +395,7 @@ struct RtpReceiveStats { // Time of the last packet received in unix epoch, // i.e. Timestamp::Zero() represents 1st Jan 1970 00:00 - absl::optional last_packet_received; + std::optional last_packet_received; // Counters exposed in RTCInboundRtpStreamStats, see // https://w3c.github.io/webrtc-stats/#inboundrtpstats-dict* @@ -406,23 +412,12 @@ class BitrateStatisticsObserver { uint32_t ssrc) = 0; }; -// Callback, used to notify an observer whenever the send-side delay is updated. -class SendSideDelayObserver { - public: - virtual ~SendSideDelayObserver() {} - virtual void SendSideDelayUpdated(int avg_delay_ms, - int max_delay_ms, - uint32_t ssrc) = 0; -}; - // Callback, used to notify an observer whenever a packet is sent to the // transport. -// TODO(asapersson): This class will remove the need for SendSideDelayObserver. -// Remove SendSideDelayObserver once possible. class SendPacketObserver { public: virtual ~SendPacketObserver() = default; - virtual void OnSendPacket(uint16_t packet_id, + virtual void OnSendPacket(std::optional packet_id, Timestamp capture_time, uint32_t ssrc) = 0; }; diff --git a/modules/rtp_rtcp/mocks/mock_network_link_rtcp_observer.h b/modules/rtp_rtcp/mocks/mock_network_link_rtcp_observer.h index 16b7db7892..f2775b79d0 100644 --- a/modules/rtp_rtcp/mocks/mock_network_link_rtcp_observer.h +++ b/modules/rtp_rtcp/mocks/mock_network_link_rtcp_observer.h @@ -17,6 +17,7 @@ #include "api/units/timestamp.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" #include "test/gmock.h" @@ -32,6 +33,11 @@ class MockNetworkLinkRtcpObserver : public NetworkLinkRtcpObserver { OnTransportFeedback, (Timestamp receive_time, const rtcp::TransportFeedback& feedback), (override)); + MOCK_METHOD(void, + OnCongestionControlFeedback, + (Timestamp receive_time, + const rtcp::CongestionControlFeedback& feedback), + (override)); MOCK_METHOD(void, OnReceiverEstimatedMaxBitrate, (Timestamp receive_time, DataRate bitrate), @@ -39,7 +45,7 @@ class MockNetworkLinkRtcpObserver : public NetworkLinkRtcpObserver { MOCK_METHOD(void, OnReport, (Timestamp receive_time, - rtc::ArrayView report_blocks), + ArrayView report_blocks), (override)); }; diff --git a/modules/rtp_rtcp/mocks/mock_network_state_estimator_observer.h b/modules/rtp_rtcp/mocks/mock_network_state_estimator_observer.h new file mode 100644 index 0000000000..f853c5e884 --- /dev/null +++ b/modules/rtp_rtcp/mocks/mock_network_state_estimator_observer.h @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_RTP_RTCP_MOCKS_MOCK_NETWORK_STATE_ESTIMATOR_OBSERVER_H_ +#define MODULES_RTP_RTCP_MOCKS_MOCK_NETWORK_STATE_ESTIMATOR_OBSERVER_H_ + +#include "api/transport/network_types.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockNetworkStateEstimateObserver : public NetworkStateEstimateObserver { + public: + MOCK_METHOD(void, + OnRemoteNetworkEstimate, + (NetworkStateEstimate estimate), + (override)); +}; + +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_MOCKS_MOCK_NETWORK_STATE_ESTIMATOR_OBSERVER_H_ diff --git a/modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.h b/modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.h index d308b2cfa8..88472af7b9 100644 --- a/modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.h +++ b/modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.h @@ -11,7 +11,7 @@ #ifndef MODULES_RTP_RTCP_MOCKS_MOCK_RECOVERED_PACKET_RECEIVER_H_ #define MODULES_RTP_RTCP_MOCKS_MOCK_RECOVERED_PACKET_RECEIVER_H_ -#include "modules/rtp_rtcp/include/flexfec_receiver.h" +#include "modules/rtp_rtcp/include/recovered_packet_receiver.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "test/gmock.h" diff --git a/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h b/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h index e9a7d52691..f416c26d4f 100644 --- a/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h +++ b/modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h @@ -11,6 +11,8 @@ #ifndef MODULES_RTP_RTCP_MOCKS_MOCK_RTCP_RTT_STATS_H_ #define MODULES_RTP_RTCP_MOCKS_MOCK_RTCP_RTT_STATS_H_ +#include + #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "test/gmock.h" diff --git a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h index 6872448f98..46163f7f35 100644 --- a/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h +++ b/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h @@ -11,18 +11,25 @@ #ifndef MODULES_RTP_RTCP_MOCKS_MOCK_RTP_RTCP_H_ #define MODULES_RTP_RTCP_MOCKS_MOCK_RTP_RTCP_H_ +#include +#include #include -#include -#include -#include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/rtp_headers.h" +#include "api/transport/network_types.h" +#include "api/units/time_delta.h" #include "api/video/video_bitrate_allocation.h" +#include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include "test/gmock.h" namespace webrtc { @@ -31,7 +38,7 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { public: MOCK_METHOD(void, IncomingRtcpPacket, - (rtc::ArrayView packet), + (ArrayView packet), (override)); MOCK_METHOD(void, SetRemoteSSRC, (uint32_t ssrc), (override)); MOCK_METHOD(void, SetLocalSsrc, (uint32_t ssrc), (override)); @@ -69,9 +76,9 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { MOCK_METHOD(void, SetMid, (absl::string_view mid), (override)); MOCK_METHOD(void, SetRtxSendStatus, (int modes), (override)); MOCK_METHOD(int, RtxSendStatus, (), (const, override)); - MOCK_METHOD(absl::optional, RtxSsrc, (), (const, override)); + MOCK_METHOD(std::optional, RtxSsrc, (), (const, override)); MOCK_METHOD(void, SetRtxSendPayloadType, (int, int), (override)); - MOCK_METHOD(absl::optional, FlexfecSsrc, (), (const, override)); + MOCK_METHOD(std::optional, FlexfecSsrc, (), (const, override)); MOCK_METHOD(int32_t, SetSendingStatus, (bool sending), (override)); MOCK_METHOD(bool, Sending, (), (const, override)); MOCK_METHOD(void, SetSendingMediaStatus, (bool sending), (override)); @@ -83,6 +90,19 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { OnSendingRtpFrame, (uint32_t, int64_t, int, bool), (override)); + MOCK_METHOD(bool, + CanSendPacket, + (const RtpPacketToSend& packet), + (const, override)); + MOCK_METHOD(void, + AssignSequenceNumber, + (RtpPacketToSend & packet), + (override)); + MOCK_METHOD(void, + SendPacket, + (std::unique_ptr packet, + const PacedPacketInfo& pacing_info), + (override)); MOCK_METHOD(bool, TrySendPacket, (std::unique_ptr packet, @@ -100,11 +120,11 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { (override)); MOCK_METHOD(void, OnAbortedRetransmissions, - (rtc::ArrayView), + (ArrayView), (override)); MOCK_METHOD(void, OnPacketsAcknowledged, - (rtc::ArrayView), + (ArrayView), (override)); MOCK_METHOD(std::vector>, GeneratePadding, @@ -112,14 +132,14 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { (override)); MOCK_METHOD(std::vector, GetSentRtpPacketInfos, - (rtc::ArrayView sequence_numbers), + (ArrayView sequence_numbers), (const, override)); MOCK_METHOD(size_t, ExpectedPerPacketOverhead, (), (const, override)); MOCK_METHOD(void, OnPacketSendingThreadSwitched, (), (override)); MOCK_METHOD(RtcpMode, RTCP, (), (const, override)); MOCK_METHOD(void, SetRTCPStatus, (RtcpMode method), (override)); MOCK_METHOD(int32_t, SetCNAME, (absl::string_view cname), (override)); - MOCK_METHOD(absl::optional, LastRtt, (), (const, override)); + MOCK_METHOD(std::optional, LastRtt, (), (const, override)); MOCK_METHOD(TimeDelta, ExpectedRetransmissionTime, (), (const, override)); MOCK_METHOD(int32_t, SendRTCP, (RTCPPacketType packet_type), (override)); MOCK_METHOD(void, @@ -130,11 +150,11 @@ class MockRtpRtcpInterface : public RtpRtcpInterface { GetLatestReportBlockData, (), (const, override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, GetSenderReportStats, (), (const, override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, GetNonSenderRttStats, (), (const, override)); diff --git a/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc b/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc index 99fc030aca..ef7aa4ed6c 100644 --- a/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc +++ b/modules/rtp_rtcp/source/absolute_capture_time_interpolator.cc @@ -10,24 +10,28 @@ #include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h" -#include - +#include +#include +#include + +#include "api/array_view.h" +#include "api/rtp_headers.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "rtc_base/checks.h" +#include "rtc_base/synchronization/mutex.h" +#include "system_wrappers/include/clock.h" +#include "system_wrappers/include/metrics.h" +#include "system_wrappers/include/ntp_time.h" namespace webrtc { -namespace { - -constexpr Timestamp kInvalidLastReceiveTime = Timestamp::MinusInfinity(); -} // namespace - -constexpr TimeDelta AbsoluteCaptureTimeInterpolator::kInterpolationMaxInterval; AbsoluteCaptureTimeInterpolator::AbsoluteCaptureTimeInterpolator(Clock* clock) - : clock_(clock), last_receive_time_(kInvalidLastReceiveTime) {} + : clock_(clock) {} uint32_t AbsoluteCaptureTimeInterpolator::GetSource( uint32_t ssrc, - rtc::ArrayView csrcs) { + ArrayView csrcs) { if (csrcs.empty()) { return ssrc; } @@ -35,72 +39,100 @@ uint32_t AbsoluteCaptureTimeInterpolator::GetSource( return csrcs[0]; } -absl::optional +std::optional AbsoluteCaptureTimeInterpolator::OnReceivePacket( uint32_t source, uint32_t rtp_timestamp, - uint32_t rtp_clock_frequency, - const absl::optional& received_extension) { + int rtp_clock_frequency_hz, + const std::optional& received_extension) { const Timestamp receive_time = clock_->CurrentTime(); + if (!first_packet_time_) { + first_packet_time_ = receive_time; + } MutexLock lock(&mutex_); - AbsoluteCaptureTime extension; - if (received_extension == absl::nullopt) { + if (received_extension == std::nullopt) { if (!ShouldInterpolateExtension(receive_time, source, rtp_timestamp, - rtp_clock_frequency)) { - last_receive_time_ = kInvalidLastReceiveTime; - return absl::nullopt; + rtp_clock_frequency_hz)) { + last_receive_time_ = Timestamp::MinusInfinity(); + return std::nullopt; } - extension.absolute_capture_timestamp = InterpolateAbsoluteCaptureTimestamp( - rtp_timestamp, rtp_clock_frequency, last_rtp_timestamp_, - last_absolute_capture_timestamp_); - extension.estimated_capture_clock_offset = - last_estimated_capture_clock_offset_; + return AbsoluteCaptureTime{ + .absolute_capture_timestamp = InterpolateAbsoluteCaptureTimestamp( + rtp_timestamp, rtp_clock_frequency_hz, last_rtp_timestamp_, + last_received_extension_.absolute_capture_timestamp), + .estimated_capture_clock_offset = + last_received_extension_.estimated_capture_clock_offset, + }; } else { last_source_ = source; last_rtp_timestamp_ = rtp_timestamp; - last_rtp_clock_frequency_ = rtp_clock_frequency; - last_absolute_capture_timestamp_ = - received_extension->absolute_capture_timestamp; - last_estimated_capture_clock_offset_ = - received_extension->estimated_capture_clock_offset; + last_rtp_clock_frequency_hz_ = rtp_clock_frequency_hz; + last_received_extension_ = *received_extension; last_receive_time_ = receive_time; - - extension = *received_extension; + // Record statistics on the abs-capture-time extension + if (!first_extension_time_) { + RTC_HISTOGRAM_COUNTS_1M("WebRTC.Call.AbsCapture.ExtensionWait", + (receive_time - *first_packet_time_).ms()); + first_extension_time_ = receive_time; + } + int64_t ntp_delta = + uint64_t{clock_->ConvertTimestampToNtpTime(receive_time)} - + received_extension->absolute_capture_timestamp; + TimeDelta capture_delta = TimeDelta::Micros(Q32x32ToInt64Us(ntp_delta)); + RTC_HISTOGRAM_COUNTS_1G("WebRTC.Call.AbsCapture.Delta", + abs(capture_delta.us())); + if (previous_capture_delta_) { + RTC_HISTOGRAM_COUNTS_1G( + "WebRTC.Call.AbsCapture.DeltaDeviation", + abs((capture_delta - *previous_capture_delta_).us())); + } + previous_capture_delta_ = capture_delta; + if (received_extension->estimated_capture_clock_offset) { + if (!first_offset_time_) { + RTC_HISTOGRAM_COUNTS_1M("WebRTC.Call.AbsCapture.OffsetWait", + (receive_time - *first_packet_time_).ms()); + first_offset_time_ = receive_time; + } + TimeDelta offset_as_delta = TimeDelta::Micros( + Q32x32ToInt64Us(*received_extension->estimated_capture_clock_offset)); + RTC_HISTOGRAM_COUNTS_1G("WebRTC.Call.AbsCapture.Offset", + abs(offset_as_delta.us())); + if (previous_offset_as_delta_) { + RTC_HISTOGRAM_COUNTS_1G( + "WebRTC.Call.AbsCapture.OffsetDeviation", + abs((offset_as_delta - *previous_offset_as_delta_).us())); + } + previous_offset_as_delta_ = offset_as_delta; + } + return received_extension; } - - return extension; } uint64_t AbsoluteCaptureTimeInterpolator::InterpolateAbsoluteCaptureTimestamp( uint32_t rtp_timestamp, - uint32_t rtp_clock_frequency, + int rtp_clock_frequency_hz, uint32_t last_rtp_timestamp, uint64_t last_absolute_capture_timestamp) { - RTC_DCHECK_GT(rtp_clock_frequency, 0); + RTC_DCHECK_GT(rtp_clock_frequency_hz, 0); return last_absolute_capture_timestamp + - static_cast( - rtc::dchecked_cast(rtp_timestamp - last_rtp_timestamp) - << 32) / - rtp_clock_frequency; + static_cast(uint64_t{rtp_timestamp - last_rtp_timestamp} + << 32) / + rtp_clock_frequency_hz; } bool AbsoluteCaptureTimeInterpolator::ShouldInterpolateExtension( Timestamp receive_time, uint32_t source, - uint32_t rtp_timestamp, - uint32_t rtp_clock_frequency) const { - // Shouldn't if we don't have a previously received extension stored. - if (last_receive_time_ == kInvalidLastReceiveTime) { - return false; - } - - // Shouldn't if the last received extension is too old. - if ((receive_time - last_receive_time_) > kInterpolationMaxInterval) { + uint32_t /* rtp_timestamp */, + int rtp_clock_frequency_hz) const { + // Shouldn't if the last received extension is not eligible for interpolation, + // in particular if we don't have a previously received extension stored. + if (receive_time - last_receive_time_ > kInterpolationMaxInterval) { return false; } @@ -110,12 +142,12 @@ bool AbsoluteCaptureTimeInterpolator::ShouldInterpolateExtension( } // Shouldn't if the RTP clock frequency has changed. - if (last_rtp_clock_frequency_ != rtp_clock_frequency) { + if (last_rtp_clock_frequency_hz_ != rtp_clock_frequency_hz) { return false; } // Shouldn't if the RTP clock frequency is invalid. - if (rtp_clock_frequency <= 0) { + if (rtp_clock_frequency_hz <= 0) { return false; } diff --git a/modules/rtp_rtcp/source/absolute_capture_time_interpolator.h b/modules/rtp_rtcp/source/absolute_capture_time_interpolator.h index f5ec820dd5..c6212a8646 100644 --- a/modules/rtp_rtcp/source/absolute_capture_time_interpolator.h +++ b/modules/rtp_rtcp/source/absolute_capture_time_interpolator.h @@ -11,6 +11,9 @@ #ifndef MODULES_RTP_RTCP_SOURCE_ABSOLUTE_CAPTURE_TIME_INTERPOLATOR_H_ #define MODULES_RTP_RTCP_SOURCE_ABSOLUTE_CAPTURE_TIME_INTERPOLATOR_H_ +#include +#include + #include "api/array_view.h" #include "api/rtp_headers.h" #include "api/units/time_delta.h" @@ -35,51 +38,57 @@ namespace webrtc { // class AbsoluteCaptureTimeInterpolator { public: - static constexpr TimeDelta kInterpolationMaxInterval = - TimeDelta::Millis(5000); + static constexpr TimeDelta kInterpolationMaxInterval = TimeDelta::Seconds(5); explicit AbsoluteCaptureTimeInterpolator(Clock* clock); // Returns the source (i.e. SSRC or CSRC) of the capture system. - static uint32_t GetSource(uint32_t ssrc, - rtc::ArrayView csrcs); + static uint32_t GetSource(uint32_t ssrc, ArrayView csrcs); // Returns a received header extension, an interpolated header extension, or - // `absl::nullopt` if it's not possible to interpolate a header extension. - absl::optional OnReceivePacket( + // `std::nullopt` if it's not possible to interpolate a header extension. + std::optional OnReceivePacket( uint32_t source, uint32_t rtp_timestamp, - uint32_t rtp_clock_frequency, - const absl::optional& received_extension); + int rtp_clock_frequency_hz, + const std::optional& received_extension); private: friend class AbsoluteCaptureTimeSender; static uint64_t InterpolateAbsoluteCaptureTimestamp( uint32_t rtp_timestamp, - uint32_t rtp_clock_frequency, + int rtp_clock_frequency_hz, uint32_t last_rtp_timestamp, uint64_t last_absolute_capture_timestamp); bool ShouldInterpolateExtension(Timestamp receive_time, uint32_t source, uint32_t rtp_timestamp, - uint32_t rtp_clock_frequency) const + int rtp_clock_frequency_hz) const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); Clock* const clock_; Mutex mutex_; - Timestamp last_receive_time_ RTC_GUARDED_BY(mutex_); + // Time of the last received header extension eligible for interpolation, + // MinusInfinity() if no extension was received, or last received one is + // not eligible for interpolation. + Timestamp last_receive_time_ RTC_GUARDED_BY(mutex_) = + Timestamp::MinusInfinity(); uint32_t last_source_ RTC_GUARDED_BY(mutex_); uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(mutex_); - uint32_t last_rtp_clock_frequency_ RTC_GUARDED_BY(mutex_); - uint64_t last_absolute_capture_timestamp_ RTC_GUARDED_BY(mutex_); - absl::optional last_estimated_capture_clock_offset_ - RTC_GUARDED_BY(mutex_); -}; // AbsoluteCaptureTimeInterpolator + int last_rtp_clock_frequency_hz_ RTC_GUARDED_BY(mutex_); + AbsoluteCaptureTime last_received_extension_ RTC_GUARDED_BY(mutex_); + // Variables used for statistics generation + std::optional first_packet_time_; + std::optional first_offset_time_; + std::optional first_extension_time_; + std::optional previous_capture_delta_; + std::optional previous_offset_as_delta_; +}; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/absolute_capture_time_interpolator_unittest.cc b/modules/rtp_rtcp/source/absolute_capture_time_interpolator_unittest.cc index 6a312f9b43..a13734622d 100644 --- a/modules/rtp_rtcp/source/absolute_capture_time_interpolator_unittest.cc +++ b/modules/rtp_rtcp/source/absolute_capture_time_interpolator_unittest.cc @@ -10,12 +10,24 @@ #include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h" +#include +#include + +#include "api/rtp_headers.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "system_wrappers/include/clock.h" +#include "system_wrappers/include/metrics.h" #include "system_wrappers/include/ntp_time.h" #include "test/gmock.h" #include "test/gtest.h" namespace webrtc { +using testing::AllOf; +using testing::Ge; +using testing::Le; + TEST(AbsoluteCaptureTimeInterpolatorTest, GetSourceWithoutCsrcs) { constexpr uint32_t kSsrc = 12; @@ -32,13 +44,13 @@ TEST(AbsoluteCaptureTimeInterpolatorTest, GetSourceWithCsrcs) { TEST(AbsoluteCaptureTimeInterpolatorTest, ReceiveExtensionReturnsExtension) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 1280; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9020), absl::nullopt}; + const AbsoluteCaptureTime kExtension0 = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension1 = {Int64MsToUQ32x32(9020), + std::nullopt}; SimulatedClock clock(0); AbsoluteCaptureTimeInterpolator interpolator(&clock); @@ -55,299 +67,357 @@ TEST(AbsoluteCaptureTimeInterpolatorTest, ReceiveExtensionReturnsExtension) { TEST(AbsoluteCaptureTimeInterpolatorTest, ReceiveNoExtensionReturnsNoExtension) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 1280; - static const absl::optional kExtension0 = absl::nullopt; - static const absl::optional kExtension1 = absl::nullopt; SimulatedClock clock(0); AbsoluteCaptureTimeInterpolator interpolator(&clock); - EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0, - kRtpClockFrequency, kExtension0), - absl::nullopt); + EXPECT_EQ( + interpolator.OnReceivePacket(kSource, kRtpTimestamp0, kRtpClockFrequency, + /*received_extension=*/std::nullopt), + std::nullopt); - EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp1, - kRtpClockFrequency, kExtension1), - absl::nullopt); + EXPECT_EQ( + interpolator.OnReceivePacket(kSource, kRtpTimestamp1, kRtpClockFrequency, + /*received_extension=*/std::nullopt), + std::nullopt); } TEST(AbsoluteCaptureTimeInterpolatorTest, InterpolateLaterPacketArrivingLater) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 1280; constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp0 + 2560; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = absl::nullopt; - static const absl::optional kExtension2 = absl::nullopt; + const AbsoluteCaptureTime kExtension = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeInterpolator interpolator(&clock); EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0, - kRtpClockFrequency, kExtension0), - kExtension0); + kRtpClockFrequency, kExtension), + kExtension); - absl::optional extension = interpolator.OnReceivePacket( - kSource, kRtpTimestamp1, kRtpClockFrequency, kExtension1); - EXPECT_TRUE(extension.has_value()); + std::optional extension = + interpolator.OnReceivePacket(kSource, kRtpTimestamp1, kRtpClockFrequency, + /*received_extension=*/std::nullopt); + ASSERT_TRUE(extension.has_value()); EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp), - UQ32x32ToInt64Ms(kExtension0->absolute_capture_timestamp) + 20); + UQ32x32ToInt64Ms(kExtension.absolute_capture_timestamp) + 20); EXPECT_EQ(extension->estimated_capture_clock_offset, - kExtension0->estimated_capture_clock_offset); + kExtension.estimated_capture_clock_offset); - extension = interpolator.OnReceivePacket(kSource, kRtpTimestamp2, - kRtpClockFrequency, kExtension2); - EXPECT_TRUE(extension.has_value()); + extension = + interpolator.OnReceivePacket(kSource, kRtpTimestamp2, kRtpClockFrequency, + /*received_extension=*/std::nullopt); + ASSERT_TRUE(extension.has_value()); EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp), - UQ32x32ToInt64Ms(kExtension0->absolute_capture_timestamp) + 40); + UQ32x32ToInt64Ms(kExtension.absolute_capture_timestamp) + 40); EXPECT_EQ(extension->estimated_capture_clock_offset, - kExtension0->estimated_capture_clock_offset); + kExtension.estimated_capture_clock_offset); } TEST(AbsoluteCaptureTimeInterpolatorTest, InterpolateEarlierPacketArrivingLater) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 - 1280; constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp0 - 2560; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = absl::nullopt; - static const absl::optional kExtension2 = absl::nullopt; + const AbsoluteCaptureTime kExtension = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeInterpolator interpolator(&clock); EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0, - kRtpClockFrequency, kExtension0), - kExtension0); + kRtpClockFrequency, kExtension), + kExtension); - absl::optional extension = interpolator.OnReceivePacket( - kSource, kRtpTimestamp1, kRtpClockFrequency, kExtension1); - EXPECT_TRUE(extension.has_value()); + std::optional extension = + interpolator.OnReceivePacket(kSource, kRtpTimestamp1, kRtpClockFrequency, + /*received_extension=*/std::nullopt); + ASSERT_TRUE(extension.has_value()); EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp), - UQ32x32ToInt64Ms(kExtension0->absolute_capture_timestamp) - 20); + UQ32x32ToInt64Ms(kExtension.absolute_capture_timestamp) - 20); EXPECT_EQ(extension->estimated_capture_clock_offset, - kExtension0->estimated_capture_clock_offset); + kExtension.estimated_capture_clock_offset); - extension = interpolator.OnReceivePacket(kSource, kRtpTimestamp2, - kRtpClockFrequency, kExtension2); - EXPECT_TRUE(extension.has_value()); + extension = + interpolator.OnReceivePacket(kSource, kRtpTimestamp2, kRtpClockFrequency, + /*received_extension=*/std::nullopt); + ASSERT_TRUE(extension.has_value()); EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp), - UQ32x32ToInt64Ms(kExtension0->absolute_capture_timestamp) - 40); + UQ32x32ToInt64Ms(kExtension.absolute_capture_timestamp) - 40); EXPECT_EQ(extension->estimated_capture_clock_offset, - kExtension0->estimated_capture_clock_offset); + kExtension.estimated_capture_clock_offset); } TEST(AbsoluteCaptureTimeInterpolatorTest, InterpolateLaterPacketArrivingLaterWithRtpTimestampWrapAround) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; - constexpr uint32_t kRtpTimestamp0 = ~uint32_t{0} - 79; + constexpr int kRtpClockFrequency = 64'000; + constexpr uint32_t kRtpTimestamp0 = uint32_t{0} - 80; constexpr uint32_t kRtpTimestamp1 = 1280 - 80; constexpr uint32_t kRtpTimestamp2 = 2560 - 80; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = absl::nullopt; - static const absl::optional kExtension2 = absl::nullopt; + const AbsoluteCaptureTime kExtension = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeInterpolator interpolator(&clock); EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0, - kRtpClockFrequency, kExtension0), - kExtension0); + kRtpClockFrequency, kExtension), + kExtension); - absl::optional extension = interpolator.OnReceivePacket( - kSource, kRtpTimestamp1, kRtpClockFrequency, kExtension1); - EXPECT_TRUE(extension.has_value()); + std::optional extension = + interpolator.OnReceivePacket(kSource, kRtpTimestamp1, kRtpClockFrequency, + /*received_extension=*/std::nullopt); + ASSERT_TRUE(extension.has_value()); EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp), - UQ32x32ToInt64Ms(kExtension0->absolute_capture_timestamp) + 20); + UQ32x32ToInt64Ms(kExtension.absolute_capture_timestamp) + 20); EXPECT_EQ(extension->estimated_capture_clock_offset, - kExtension0->estimated_capture_clock_offset); + kExtension.estimated_capture_clock_offset); - extension = interpolator.OnReceivePacket(kSource, kRtpTimestamp2, - kRtpClockFrequency, kExtension2); - EXPECT_TRUE(extension.has_value()); + extension = + interpolator.OnReceivePacket(kSource, kRtpTimestamp2, kRtpClockFrequency, + /*received_extension=*/std::nullopt); + ASSERT_TRUE(extension.has_value()); EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp), - UQ32x32ToInt64Ms(kExtension0->absolute_capture_timestamp) + 40); + UQ32x32ToInt64Ms(kExtension.absolute_capture_timestamp) + 40); EXPECT_EQ(extension->estimated_capture_clock_offset, - kExtension0->estimated_capture_clock_offset); + kExtension.estimated_capture_clock_offset); } TEST(AbsoluteCaptureTimeInterpolatorTest, InterpolateEarlierPacketArrivingLaterWithRtpTimestampWrapAround) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 799; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 - 1280; constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp0 - 2560; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = absl::nullopt; - static const absl::optional kExtension2 = absl::nullopt; + const AbsoluteCaptureTime kExtension = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeInterpolator interpolator(&clock); EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0, - kRtpClockFrequency, kExtension0), - kExtension0); + kRtpClockFrequency, kExtension), + kExtension); - absl::optional extension = interpolator.OnReceivePacket( - kSource, kRtpTimestamp1, kRtpClockFrequency, kExtension1); - EXPECT_TRUE(extension.has_value()); + std::optional extension = + interpolator.OnReceivePacket(kSource, kRtpTimestamp1, kRtpClockFrequency, + /*received_extension=*/std::nullopt); + ASSERT_TRUE(extension.has_value()); EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp), - UQ32x32ToInt64Ms(kExtension0->absolute_capture_timestamp) - 20); + UQ32x32ToInt64Ms(kExtension.absolute_capture_timestamp) - 20); EXPECT_EQ(extension->estimated_capture_clock_offset, - kExtension0->estimated_capture_clock_offset); + kExtension.estimated_capture_clock_offset); - extension = interpolator.OnReceivePacket(kSource, kRtpTimestamp2, - kRtpClockFrequency, kExtension2); - EXPECT_TRUE(extension.has_value()); + extension = + interpolator.OnReceivePacket(kSource, kRtpTimestamp2, kRtpClockFrequency, + /*received_extension=*/std::nullopt); + ASSERT_TRUE(extension.has_value()); EXPECT_EQ(UQ32x32ToInt64Ms(extension->absolute_capture_timestamp), - UQ32x32ToInt64Ms(kExtension0->absolute_capture_timestamp) - 40); + UQ32x32ToInt64Ms(kExtension.absolute_capture_timestamp) - 40); EXPECT_EQ(extension->estimated_capture_clock_offset, - kExtension0->estimated_capture_clock_offset); + kExtension.estimated_capture_clock_offset); } TEST(AbsoluteCaptureTimeInterpolatorTest, SkipInterpolateIfTooLate) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 1280; constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp1 + 1280; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = absl::nullopt; - static const absl::optional kExtension2 = absl::nullopt; + const AbsoluteCaptureTime kExtension = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeInterpolator interpolator(&clock); EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0, - kRtpClockFrequency, kExtension0), - kExtension0); + kRtpClockFrequency, kExtension), + kExtension); clock.AdvanceTime(AbsoluteCaptureTimeInterpolator::kInterpolationMaxInterval); - EXPECT_TRUE(interpolator - .OnReceivePacket(kSource, kRtpTimestamp1, kRtpClockFrequency, - kExtension1) - .has_value()); + EXPECT_NE( + interpolator.OnReceivePacket(kSource, kRtpTimestamp1, kRtpClockFrequency, + /*received_extension=*/std::nullopt), + std::nullopt); - clock.AdvanceTimeMilliseconds(1); + clock.AdvanceTime(TimeDelta::Millis(1)); - EXPECT_FALSE(interpolator - .OnReceivePacket(kSource, kRtpTimestamp2, kRtpClockFrequency, - kExtension2) - .has_value()); + EXPECT_EQ( + interpolator.OnReceivePacket(kSource, kRtpTimestamp2, kRtpClockFrequency, + /*received_extension=*/std::nullopt), + std::nullopt); } TEST(AbsoluteCaptureTimeInterpolatorTest, SkipInterpolateIfSourceChanged) { constexpr uint32_t kSource0 = 1337; constexpr uint32_t kSource1 = 1338; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 1280; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = absl::nullopt; + const AbsoluteCaptureTime kExtension = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeInterpolator interpolator(&clock); EXPECT_EQ(interpolator.OnReceivePacket(kSource0, kRtpTimestamp0, - kRtpClockFrequency, kExtension0), - kExtension0); + kRtpClockFrequency, kExtension), + kExtension); - EXPECT_FALSE(interpolator - .OnReceivePacket(kSource1, kRtpTimestamp1, - kRtpClockFrequency, kExtension1) - .has_value()); + EXPECT_EQ( + interpolator.OnReceivePacket(kSource1, kRtpTimestamp1, kRtpClockFrequency, + /*received_extension=*/std::nullopt), + std::nullopt); } TEST(AbsoluteCaptureTimeInterpolatorTest, SkipInterpolateIfRtpClockFrequencyChanged) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency0 = 64000; - constexpr uint32_t kRtpClockFrequency1 = 32000; + constexpr int kRtpClockFrequency0 = 64'000; + constexpr int kRtpClockFrequency1 = 32'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 640; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = absl::nullopt; + const AbsoluteCaptureTime kExtension = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeInterpolator interpolator(&clock); EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0, - kRtpClockFrequency0, kExtension0), - kExtension0); + kRtpClockFrequency0, kExtension), + kExtension); - EXPECT_FALSE(interpolator - .OnReceivePacket(kSource, kRtpTimestamp1, - kRtpClockFrequency1, kExtension1) - .has_value()); + EXPECT_EQ( + interpolator.OnReceivePacket(kSource, kRtpTimestamp1, kRtpClockFrequency1, + /*received_extension=*/std::nullopt), + std::nullopt); } TEST(AbsoluteCaptureTimeInterpolatorTest, SkipInterpolateIfRtpClockFrequencyIsInvalid) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 0; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 640; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = absl::nullopt; + const AbsoluteCaptureTime kExtension = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeInterpolator interpolator(&clock); - EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp0, - kRtpClockFrequency, kExtension0), - kExtension0); + EXPECT_EQ( + interpolator.OnReceivePacket(kSource, kRtpTimestamp0, + /*rtp_clock_frequency_hz=*/0, kExtension), + kExtension); - EXPECT_FALSE(interpolator - .OnReceivePacket(kSource, kRtpTimestamp1, kRtpClockFrequency, - kExtension1) - .has_value()); + EXPECT_EQ(interpolator.OnReceivePacket(kSource, kRtpTimestamp1, + /*rtp_clock_frequency_hz=*/0, + /*received_extension=*/std::nullopt), + std::nullopt); } TEST(AbsoluteCaptureTimeInterpolatorTest, SkipInterpolateIsSticky) { constexpr uint32_t kSource0 = 1337; constexpr uint32_t kSource1 = 1338; - constexpr uint32_t kSource2 = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 1280; constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp1 + 1280; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = absl::nullopt; - static const absl::optional kExtension2 = absl::nullopt; + const AbsoluteCaptureTime kExtension = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeInterpolator interpolator(&clock); EXPECT_EQ(interpolator.OnReceivePacket(kSource0, kRtpTimestamp0, - kRtpClockFrequency, kExtension0), - kExtension0); + kRtpClockFrequency, kExtension), + kExtension); + + EXPECT_EQ( + interpolator.OnReceivePacket(kSource1, kRtpTimestamp1, kRtpClockFrequency, + /*received_extension=*/std::nullopt), + std::nullopt); + + EXPECT_EQ( + interpolator.OnReceivePacket(kSource0, kRtpTimestamp2, kRtpClockFrequency, + /*received_extension=*/std::nullopt), + std::nullopt); +} + +TEST(AbsoluteCaptureTimeInterpolatorTest, MetricsAreUpdated) { + constexpr uint32_t kRtpTimestamp0 = 102030000; + constexpr uint32_t kSource = 1234; + constexpr uint32_t kFrequency = 1000; + SimulatedClock clock(0); + AbsoluteCaptureTimeInterpolator interpolator(&clock); - EXPECT_FALSE(interpolator - .OnReceivePacket(kSource1, kRtpTimestamp1, - kRtpClockFrequency, kExtension1) - .has_value()); + metrics::Reset(); + // First packet has no extension. + interpolator.OnReceivePacket(kSource, kRtpTimestamp0, kFrequency, + std::nullopt); + EXPECT_METRIC_EQ(metrics::NumSamples("WebRTC.Call.AbsCapture.ExtensionWait"), + 0); + + // Second packet has extension, but no offset. + clock.AdvanceTimeMilliseconds(10); + interpolator.OnReceivePacket( + kSource, kRtpTimestamp0 + 10, kFrequency, + AbsoluteCaptureTime{Int64MsToUQ32x32(5000), std::nullopt}); + EXPECT_METRIC_EQ(metrics::NumSamples("WebRTC.Call.AbsCapture.ExtensionWait"), + 1); + + // Third packet has extension with offset, value zero. + clock.AdvanceTimeMilliseconds(10); + interpolator.OnReceivePacket( + kSource, kRtpTimestamp0 + 20, kFrequency, + AbsoluteCaptureTime{Int64MsToUQ32x32(20), Int64MsToUQ32x32(0)}); + EXPECT_METRIC_EQ(metrics::NumSamples("WebRTC.Call.AbsCapture.Delta"), 2); + EXPECT_METRIC_EQ(metrics::NumSamples("WebRTC.Call.AbsCapture.DeltaDeviation"), + 1); +} + +TEST(AbsoluteCaptureTimeInterpolatorTest, DeltaRecordedCorrectly) { + constexpr uint32_t kRtpTimestamp0 = 102030000; + constexpr uint32_t kSource = 1234; + constexpr uint32_t kFrequency = 1000; + SimulatedClock clock(0); + AbsoluteCaptureTimeInterpolator interpolator(&clock); - EXPECT_FALSE(interpolator - .OnReceivePacket(kSource2, kRtpTimestamp2, - kRtpClockFrequency, kExtension2) - .has_value()); + metrics::Reset(); + clock.AdvanceTimeMilliseconds(10); + // Packet has extension, with delta 5 ms in the past. + interpolator.OnReceivePacket( + kSource, kRtpTimestamp0 + 10, kFrequency, + AbsoluteCaptureTime{ + uint64_t{clock.ConvertTimestampToNtpTime(Timestamp::Millis(5))}, + std::nullopt}); + + EXPECT_METRIC_EQ(metrics::NumSamples("WebRTC.Call.AbsCapture.ExtensionWait"), + 1); + int sample = metrics::MinSample("WebRTC.Call.AbsCapture.Delta"); + EXPECT_THAT(sample, AllOf(Ge(5000), Le(5000))); + + metrics::Reset(); + // Packet has extension, with timestamp 6 ms in the future. + interpolator.OnReceivePacket( + kSource, kRtpTimestamp0 + 15, kFrequency, + AbsoluteCaptureTime{ + uint64_t{clock.ConvertTimestampToNtpTime(Timestamp::Millis(16))}, + std::nullopt}); + + sample = metrics::MinSample("WebRTC.Call.AbsCapture.Delta"); + // Since we capture with abs(), this should also be recorded as 6 ms + EXPECT_THAT(sample, AllOf(Ge(6000), Le(6000))); } } // namespace webrtc diff --git a/modules/rtp_rtcp/source/absolute_capture_time_sender.cc b/modules/rtp_rtcp/source/absolute_capture_time_sender.cc index 28266769ff..61e321f6bb 100644 --- a/modules/rtp_rtcp/source/absolute_capture_time_sender.cc +++ b/modules/rtp_rtcp/source/absolute_capture_time_sender.cc @@ -10,20 +10,18 @@ #include "modules/rtp_rtcp/source/absolute_capture_time_sender.h" -#include +#include +#include +#include +#include "api/array_view.h" +#include "api/rtp_headers.h" +#include "api/units/timestamp.h" #include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h" +#include "system_wrappers/include/clock.h" #include "system_wrappers/include/ntp_time.h" namespace webrtc { -namespace { - -constexpr Timestamp kInvalidLastSendTime = Timestamp::MinusInfinity(); - -} // namespace - -constexpr TimeDelta AbsoluteCaptureTimeSender::kInterpolationMaxInterval; -constexpr TimeDelta AbsoluteCaptureTimeSender::kInterpolationMaxError; static_assert( AbsoluteCaptureTimeInterpolator::kInterpolationMaxInterval >= @@ -31,58 +29,61 @@ static_assert( "Receivers should be as willing to interpolate timestamps as senders."); AbsoluteCaptureTimeSender::AbsoluteCaptureTimeSender(Clock* clock) - : clock_(clock), last_send_time_(kInvalidLastSendTime) {} + : clock_(clock) {} -uint32_t AbsoluteCaptureTimeSender::GetSource( - uint32_t ssrc, - rtc::ArrayView csrcs) { +uint32_t AbsoluteCaptureTimeSender::GetSource(uint32_t ssrc, + ArrayView csrcs) { return AbsoluteCaptureTimeInterpolator::GetSource(ssrc, csrcs); } -absl::optional AbsoluteCaptureTimeSender::OnSendPacket( +std::optional AbsoluteCaptureTimeSender::OnSendPacket( uint32_t source, uint32_t rtp_timestamp, uint32_t rtp_clock_frequency, uint64_t absolute_capture_timestamp, - absl::optional estimated_capture_clock_offset) { - const Timestamp send_time = clock_->CurrentTime(); - - MutexLock lock(&mutex_); + std::optional estimated_capture_clock_offset) { + return OnSendPacket(source, rtp_timestamp, rtp_clock_frequency, + NtpTime(absolute_capture_timestamp), + estimated_capture_clock_offset, /*force=*/false); +} - if (!ShouldSendExtension(send_time, source, rtp_timestamp, - rtp_clock_frequency, absolute_capture_timestamp, - estimated_capture_clock_offset)) { - return absl::nullopt; +std::optional AbsoluteCaptureTimeSender::OnSendPacket( + uint32_t source, + uint32_t rtp_timestamp, + int rtp_clock_frequency_hz, + NtpTime absolute_capture_time, + std::optional estimated_capture_clock_offset, + bool force) { + Timestamp send_time = clock_->CurrentTime(); + if (!(force || ShouldSendExtension( + send_time, source, rtp_timestamp, rtp_clock_frequency_hz, + absolute_capture_time, estimated_capture_clock_offset))) { + return std::nullopt; } last_source_ = source; last_rtp_timestamp_ = rtp_timestamp; - last_rtp_clock_frequency_ = rtp_clock_frequency; - last_absolute_capture_timestamp_ = absolute_capture_timestamp; + last_rtp_clock_frequency_hz_ = rtp_clock_frequency_hz; + last_absolute_capture_time_ = absolute_capture_time; last_estimated_capture_clock_offset_ = estimated_capture_clock_offset; - last_send_time_ = send_time; - AbsoluteCaptureTime extension; - extension.absolute_capture_timestamp = absolute_capture_timestamp; - extension.estimated_capture_clock_offset = estimated_capture_clock_offset; - return extension; + return AbsoluteCaptureTime{ + .absolute_capture_timestamp = uint64_t{absolute_capture_time}, + .estimated_capture_clock_offset = estimated_capture_clock_offset, + }; } bool AbsoluteCaptureTimeSender::ShouldSendExtension( Timestamp send_time, uint32_t source, uint32_t rtp_timestamp, - uint32_t rtp_clock_frequency, - uint64_t absolute_capture_timestamp, - absl::optional estimated_capture_clock_offset) const { - // Should if we've never sent anything before. - if (last_send_time_ == kInvalidLastSendTime) { - return true; - } - - // Should if the last sent extension is too old. - if ((send_time - last_send_time_) > kInterpolationMaxInterval) { + int rtp_clock_frequency_hz, + NtpTime absolute_capture_time, + std::optional estimated_capture_clock_offset) const { + // Should if the last sent extension is too old, in particular if we've never + // sent anything before. + if (send_time - last_send_time_ > kInterpolationMaxInterval) { return true; } @@ -92,12 +93,12 @@ bool AbsoluteCaptureTimeSender::ShouldSendExtension( } // Should if the RTP clock frequency has changed. - if (last_rtp_clock_frequency_ != rtp_clock_frequency) { + if (last_rtp_clock_frequency_hz_ != rtp_clock_frequency_hz) { return true; } // Should if the RTP clock frequency is invalid. - if (rtp_clock_frequency <= 0) { + if (rtp_clock_frequency_hz <= 0) { return true; } @@ -109,8 +110,9 @@ bool AbsoluteCaptureTimeSender::ShouldSendExtension( // Should if interpolation would introduce too much error. const uint64_t interpolated_absolute_capture_timestamp = AbsoluteCaptureTimeInterpolator::InterpolateAbsoluteCaptureTimestamp( - rtp_timestamp, rtp_clock_frequency, last_rtp_timestamp_, - last_absolute_capture_timestamp_); + rtp_timestamp, rtp_clock_frequency_hz, last_rtp_timestamp_, + uint64_t{last_absolute_capture_time_}); + const uint64_t absolute_capture_timestamp = uint64_t{absolute_capture_time}; const int64_t interpolation_error_ms = UQ32x32ToInt64Ms(std::min( interpolated_absolute_capture_timestamp - absolute_capture_timestamp, absolute_capture_timestamp - interpolated_absolute_capture_timestamp)); diff --git a/modules/rtp_rtcp/source/absolute_capture_time_sender.h b/modules/rtp_rtcp/source/absolute_capture_time_sender.h index be5a77d5e1..352754c7b7 100644 --- a/modules/rtp_rtcp/source/absolute_capture_time_sender.h +++ b/modules/rtp_rtcp/source/absolute_capture_time_sender.h @@ -11,13 +11,15 @@ #ifndef MODULES_RTP_RTCP_SOURCE_ABSOLUTE_CAPTURE_TIME_SENDER_H_ #define MODULES_RTP_RTCP_SOURCE_ABSOLUTE_CAPTURE_TIME_SENDER_H_ +#include +#include + #include "api/array_view.h" #include "api/rtp_headers.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" +#include "system_wrappers/include/ntp_time.h" namespace webrtc { @@ -40,48 +42,67 @@ namespace webrtc { // class AbsoluteCaptureTimeSender { public: - static constexpr TimeDelta kInterpolationMaxInterval = - TimeDelta::Millis(1000); + static constexpr TimeDelta kInterpolationMaxInterval = TimeDelta::Seconds(1); static constexpr TimeDelta kInterpolationMaxError = TimeDelta::Millis(1); explicit AbsoluteCaptureTimeSender(Clock* clock); // Returns the source (i.e. SSRC or CSRC) of the capture system. - static uint32_t GetSource(uint32_t ssrc, - rtc::ArrayView csrcs); + static uint32_t GetSource(uint32_t ssrc, ArrayView csrcs); + + // Returns value to write into AbsoluteCaptureTime RTP header extension to be + // sent, or `std::nullopt` if the header extension shouldn't be attached to + // the outgoing packet. + // + // - `source` - id of the capture system. + // - `rtp_timestamp` - capture time represented as rtp timestamp in the + // outgoing packet + // - `rtp_clock_frequency_hz` - description of the `rtp_timestamp` units - + // `rtp_timetamp` delta of `rtp_clock_freqnecy_hz` represents 1 second. + // - `absolute_capture_time` - time when a frame was captured by the capture + // system. + // - `estimated_capture_clock_offset` - estimated offset between capture + // system clock and local `clock` passed as the AbsoluteCaptureTimeSender + // construction paramter. Uses the same units as `absolute_capture_time`, + // i.e. delta of 2^32 represents 1 second. See AbsoluteCaptureTime type + // comments for more details. + // - `force` - when set to true, OnSendPacket is forced to return non-nullopt. + std::optional OnSendPacket( + uint32_t source, + uint32_t rtp_timestamp, + int rtp_clock_frequency_hz, + NtpTime absolute_capture_time, + std::optional estimated_capture_clock_offset, + bool force = false); - // Returns a header extension to be sent, or `absl::nullopt` if the header + // Returns a header extension to be sent, or `std::nullopt` if the header // extension shouldn't be sent. - absl::optional OnSendPacket( + [[deprecated]] std::optional OnSendPacket( uint32_t source, uint32_t rtp_timestamp, uint32_t rtp_clock_frequency, uint64_t absolute_capture_timestamp, - absl::optional estimated_capture_clock_offset); + std::optional estimated_capture_clock_offset); private: bool ShouldSendExtension( Timestamp send_time, uint32_t source, uint32_t rtp_timestamp, - uint32_t rtp_clock_frequency, - uint64_t absolute_capture_timestamp, - absl::optional estimated_capture_clock_offset) const - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + int rtp_clock_frequency_hz, + NtpTime absolute_capture_time, + std::optional estimated_capture_clock_offset) const; Clock* const clock_; - Mutex mutex_; - - Timestamp last_send_time_ RTC_GUARDED_BY(mutex_); + Timestamp last_send_time_ = Timestamp::MinusInfinity(); - uint32_t last_source_ RTC_GUARDED_BY(mutex_); - uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(mutex_); - uint32_t last_rtp_clock_frequency_ RTC_GUARDED_BY(mutex_); - uint64_t last_absolute_capture_timestamp_ RTC_GUARDED_BY(mutex_); - absl::optional last_estimated_capture_clock_offset_ - RTC_GUARDED_BY(mutex_); -}; // AbsoluteCaptureTimeSender + uint32_t last_source_; + uint32_t last_rtp_timestamp_; + int last_rtp_clock_frequency_hz_; + NtpTime last_absolute_capture_time_; + std::optional last_estimated_capture_clock_offset_; +}; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/absolute_capture_time_sender_unittest.cc b/modules/rtp_rtcp/source/absolute_capture_time_sender_unittest.cc index db3fc75100..2b8e240b94 100644 --- a/modules/rtp_rtcp/source/absolute_capture_time_sender_unittest.cc +++ b/modules/rtp_rtcp/source/absolute_capture_time_sender_unittest.cc @@ -10,8 +10,13 @@ #include "modules/rtp_rtcp/source/absolute_capture_time_sender.h" +#include +#include + +#include "api/rtp_headers.h" +#include "api/units/time_delta.h" +#include "system_wrappers/include/clock.h" #include "system_wrappers/include/ntp_time.h" -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { @@ -19,7 +24,7 @@ namespace webrtc { TEST(AbsoluteCaptureTimeSenderTest, GetSourceWithoutCsrcs) { constexpr uint32_t kSsrc = 12; - EXPECT_EQ(AbsoluteCaptureTimeSender::GetSource(kSsrc, nullptr), kSsrc); + EXPECT_EQ(AbsoluteCaptureTimeSender::GetSource(kSsrc, {}), kSsrc); } TEST(AbsoluteCaptureTimeSenderTest, GetSourceWithCsrcs) { @@ -31,343 +36,368 @@ TEST(AbsoluteCaptureTimeSenderTest, GetSourceWithCsrcs) { TEST(AbsoluteCaptureTimeSenderTest, InterpolateLaterPacketSentLater) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 1280; constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp0 + 2560; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 + 20), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension2 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 + 40), Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension0 = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension1 = {Int64MsToUQ32x32(9000 + 20), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension2 = {Int64MsToUQ32x32(9000 + 40), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeSender sender(&clock); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp0, kRtpClockFrequency, - kExtension0->absolute_capture_timestamp, - kExtension0->estimated_capture_clock_offset), + NtpTime(kExtension0.absolute_capture_timestamp), + kExtension0.estimated_capture_clock_offset), kExtension0); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp1, kRtpClockFrequency, - kExtension1->absolute_capture_timestamp, - kExtension1->estimated_capture_clock_offset), - absl::nullopt); + NtpTime(kExtension1.absolute_capture_timestamp), + kExtension1.estimated_capture_clock_offset), + std::nullopt); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp2, kRtpClockFrequency, - kExtension2->absolute_capture_timestamp, - kExtension2->estimated_capture_clock_offset), - absl::nullopt); + NtpTime(kExtension2.absolute_capture_timestamp), + kExtension2.estimated_capture_clock_offset), + std::nullopt); } TEST(AbsoluteCaptureTimeSenderTest, InterpolateEarlierPacketSentLater) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 - 1280; constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp0 - 2560; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 - 20), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension2 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 - 40), Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension0 = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension1 = {Int64MsToUQ32x32(9000 - 20), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension2 = {Int64MsToUQ32x32(9000 - 40), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeSender sender(&clock); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp0, kRtpClockFrequency, - kExtension0->absolute_capture_timestamp, - kExtension0->estimated_capture_clock_offset), + NtpTime(kExtension0.absolute_capture_timestamp), + kExtension0.estimated_capture_clock_offset), kExtension0); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp1, kRtpClockFrequency, - kExtension1->absolute_capture_timestamp, - kExtension1->estimated_capture_clock_offset), - absl::nullopt); + NtpTime(kExtension1.absolute_capture_timestamp), + kExtension1.estimated_capture_clock_offset), + std::nullopt); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp2, kRtpClockFrequency, - kExtension2->absolute_capture_timestamp, - kExtension2->estimated_capture_clock_offset), - absl::nullopt); + NtpTime(kExtension2.absolute_capture_timestamp), + kExtension2.estimated_capture_clock_offset), + std::nullopt); } TEST(AbsoluteCaptureTimeSenderTest, InterpolateLaterPacketSentLaterWithRtpTimestampWrapAround) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; - constexpr uint32_t kRtpTimestamp0 = ~uint32_t{0} - 79; + constexpr int kRtpClockFrequency = 64'000; + constexpr uint32_t kRtpTimestamp0 = uint32_t{0} - 80; constexpr uint32_t kRtpTimestamp1 = 1280 - 80; constexpr uint32_t kRtpTimestamp2 = 2560 - 80; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 + 20), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension2 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 + 40), Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension0 = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension1 = {Int64MsToUQ32x32(9000 + 20), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension2 = {Int64MsToUQ32x32(9000 + 40), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeSender sender(&clock); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp0, kRtpClockFrequency, - kExtension0->absolute_capture_timestamp, - kExtension0->estimated_capture_clock_offset), + NtpTime(kExtension0.absolute_capture_timestamp), + kExtension0.estimated_capture_clock_offset), kExtension0); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp1, kRtpClockFrequency, - kExtension1->absolute_capture_timestamp, - kExtension1->estimated_capture_clock_offset), - absl::nullopt); + NtpTime(kExtension1.absolute_capture_timestamp), + kExtension1.estimated_capture_clock_offset), + std::nullopt); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp2, kRtpClockFrequency, - kExtension2->absolute_capture_timestamp, - kExtension2->estimated_capture_clock_offset), - absl::nullopt); + NtpTime(kExtension2.absolute_capture_timestamp), + kExtension2.estimated_capture_clock_offset), + std::nullopt); } TEST(AbsoluteCaptureTimeSenderTest, InterpolateEarlierPacketSentLaterWithRtpTimestampWrapAround) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 799; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 - 1280; constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp0 - 2560; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 - 20), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension2 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 - 40), Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension0 = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension1 = {Int64MsToUQ32x32(9000 - 20), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension2 = {Int64MsToUQ32x32(9000 - 40), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeSender sender(&clock); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp0, kRtpClockFrequency, - kExtension0->absolute_capture_timestamp, - kExtension0->estimated_capture_clock_offset), + NtpTime(kExtension0.absolute_capture_timestamp), + kExtension0.estimated_capture_clock_offset), kExtension0); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp1, kRtpClockFrequency, - kExtension1->absolute_capture_timestamp, - kExtension1->estimated_capture_clock_offset), - absl::nullopt); + NtpTime(kExtension1.absolute_capture_timestamp), + kExtension1.estimated_capture_clock_offset), + std::nullopt); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp2, kRtpClockFrequency, - kExtension2->absolute_capture_timestamp, - kExtension2->estimated_capture_clock_offset), - absl::nullopt); + NtpTime(kExtension2.absolute_capture_timestamp), + kExtension2.estimated_capture_clock_offset), + std::nullopt); } TEST(AbsoluteCaptureTimeSenderTest, SkipInterpolateIfTooLate) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 1280; constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp0 + 2560; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 + 20), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension2 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 + 40), Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension0 = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension1 = {Int64MsToUQ32x32(9000 + 20), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension2 = {Int64MsToUQ32x32(9000 + 40), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeSender sender(&clock); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp0, kRtpClockFrequency, - kExtension0->absolute_capture_timestamp, - kExtension0->estimated_capture_clock_offset), + NtpTime(kExtension0.absolute_capture_timestamp), + kExtension0.estimated_capture_clock_offset), kExtension0); clock.AdvanceTime(AbsoluteCaptureTimeSender::kInterpolationMaxInterval); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp1, kRtpClockFrequency, - kExtension1->absolute_capture_timestamp, - kExtension1->estimated_capture_clock_offset), - absl::nullopt); + NtpTime(kExtension1.absolute_capture_timestamp), + kExtension1.estimated_capture_clock_offset), + std::nullopt); - clock.AdvanceTimeMicroseconds(1); + clock.AdvanceTime(TimeDelta::Millis(1)); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp2, kRtpClockFrequency, - kExtension2->absolute_capture_timestamp, - kExtension2->estimated_capture_clock_offset), + NtpTime(kExtension2.absolute_capture_timestamp), + kExtension2.estimated_capture_clock_offset), kExtension2); } TEST(AbsoluteCaptureTimeSenderTest, SkipInterpolateIfSourceChanged) { constexpr uint32_t kSource0 = 1337; constexpr uint32_t kSource1 = 1338; - constexpr uint32_t kSource2 = 1338; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 1280; constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp0 + 2560; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 + 20), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension2 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 + 40), Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension0 = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension1 = {Int64MsToUQ32x32(9000 + 20), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension2 = {Int64MsToUQ32x32(9000 + 40), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeSender sender(&clock); EXPECT_EQ(sender.OnSendPacket(kSource0, kRtpTimestamp0, kRtpClockFrequency, - kExtension0->absolute_capture_timestamp, - kExtension0->estimated_capture_clock_offset), + NtpTime(kExtension0.absolute_capture_timestamp), + kExtension0.estimated_capture_clock_offset), kExtension0); EXPECT_EQ(sender.OnSendPacket(kSource1, kRtpTimestamp1, kRtpClockFrequency, - kExtension1->absolute_capture_timestamp, - kExtension1->estimated_capture_clock_offset), + NtpTime(kExtension1.absolute_capture_timestamp), + kExtension1.estimated_capture_clock_offset), kExtension1); - EXPECT_EQ(sender.OnSendPacket(kSource2, kRtpTimestamp2, kRtpClockFrequency, - kExtension2->absolute_capture_timestamp, - kExtension2->estimated_capture_clock_offset), - absl::nullopt); + EXPECT_EQ(sender.OnSendPacket(kSource1, kRtpTimestamp2, kRtpClockFrequency, + NtpTime(kExtension2.absolute_capture_timestamp), + kExtension2.estimated_capture_clock_offset), + std::nullopt); +} + +TEST(AbsoluteCaptureTimeSenderTest, SkipInterpolateWhenForced) { + constexpr uint32_t kSource = 1337; + constexpr int kRtpClockFrequency = 64'000; + constexpr uint32_t kRtpTimestamp0 = 1020300000; + constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 1280; + constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp0 + 2560; + const AbsoluteCaptureTime kExtension0 = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension1 = {Int64MsToUQ32x32(9000 + 20), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension2 = {Int64MsToUQ32x32(9000 + 40), + Int64MsToQ32x32(-350)}; + + SimulatedClock clock(0); + AbsoluteCaptureTimeSender sender(&clock); + + EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp0, kRtpClockFrequency, + NtpTime(kExtension0.absolute_capture_timestamp), + kExtension0.estimated_capture_clock_offset), + kExtension0); + + EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp1, kRtpClockFrequency, + NtpTime(kExtension1.absolute_capture_timestamp), + kExtension1.estimated_capture_clock_offset, + /*force=*/true), + kExtension1); + + EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp2, kRtpClockFrequency, + NtpTime(kExtension2.absolute_capture_timestamp), + kExtension2.estimated_capture_clock_offset, + /*force=*/false), + std::nullopt); } TEST(AbsoluteCaptureTimeSenderTest, SkipInterpolateIfRtpClockFrequencyChanged) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency0 = 64000; - constexpr uint32_t kRtpClockFrequency1 = 32000; - constexpr uint32_t kRtpClockFrequency2 = 32000; + constexpr int kRtpClockFrequency0 = 64'000; + constexpr int kRtpClockFrequency1 = 32'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 640; constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp0 + 1280; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 + 20), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension2 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 + 40), Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension0 = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension1 = {Int64MsToUQ32x32(9000 + 20), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension2 = {Int64MsToUQ32x32(9000 + 40), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeSender sender(&clock); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp0, kRtpClockFrequency0, - kExtension0->absolute_capture_timestamp, - kExtension0->estimated_capture_clock_offset), + NtpTime(kExtension0.absolute_capture_timestamp), + kExtension0.estimated_capture_clock_offset), kExtension0); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp1, kRtpClockFrequency1, - kExtension1->absolute_capture_timestamp, - kExtension1->estimated_capture_clock_offset), + NtpTime(kExtension1.absolute_capture_timestamp), + kExtension1.estimated_capture_clock_offset), kExtension1); - EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp2, kRtpClockFrequency2, - kExtension2->absolute_capture_timestamp, - kExtension2->estimated_capture_clock_offset), - absl::nullopt); + EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp2, kRtpClockFrequency1, + NtpTime(kExtension2.absolute_capture_timestamp), + kExtension2.estimated_capture_clock_offset), + std::nullopt); } TEST(AbsoluteCaptureTimeSenderTest, SkipInterpolateIfRtpClockFrequencyIsInvalid) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency0 = 0; - constexpr uint32_t kRtpClockFrequency1 = 0; - constexpr uint32_t kRtpClockFrequency2 = 0; - constexpr uint32_t kRtpTimestamp0 = 1020300000; - constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0; - constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp0; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 + 20), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension2 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 + 40), Int64MsToQ32x32(-350)}; + constexpr int kRtpClockFrequency = 0; + constexpr uint32_t kRtpTimestamp = 1020300000; + const AbsoluteCaptureTime kExtension0 = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension1 = {Int64MsToUQ32x32(9000 + 20), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension2 = {Int64MsToUQ32x32(9000 + 40), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeSender sender(&clock); - EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp0, kRtpClockFrequency0, - kExtension0->absolute_capture_timestamp, - kExtension0->estimated_capture_clock_offset), + EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp, kRtpClockFrequency, + NtpTime(kExtension0.absolute_capture_timestamp), + kExtension0.estimated_capture_clock_offset), kExtension0); - EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp1, kRtpClockFrequency1, - kExtension1->absolute_capture_timestamp, - kExtension1->estimated_capture_clock_offset), + EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp, kRtpClockFrequency, + NtpTime(kExtension1.absolute_capture_timestamp), + kExtension1.estimated_capture_clock_offset), kExtension1); - EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp2, kRtpClockFrequency2, - kExtension2->absolute_capture_timestamp, - kExtension2->estimated_capture_clock_offset), + EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp, kRtpClockFrequency, + NtpTime(kExtension2.absolute_capture_timestamp), + kExtension2.estimated_capture_clock_offset), kExtension2); } TEST(AbsoluteCaptureTimeSenderTest, SkipInterpolateIfEstimatedCaptureClockOffsetChanged) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 1280; constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp0 + 2560; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 + 20), Int64MsToQ32x32(370)}; - static const absl::optional kExtension2 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000 + 40), absl::nullopt}; + const AbsoluteCaptureTime kExtension0 = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension1 = {Int64MsToUQ32x32(9000 + 20), + Int64MsToQ32x32(370)}; + const AbsoluteCaptureTime kExtension2 = {Int64MsToUQ32x32(9000 + 40), + std::nullopt}; SimulatedClock clock(0); AbsoluteCaptureTimeSender sender(&clock); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp0, kRtpClockFrequency, - kExtension0->absolute_capture_timestamp, - kExtension0->estimated_capture_clock_offset), + NtpTime(kExtension0.absolute_capture_timestamp), + kExtension0.estimated_capture_clock_offset), kExtension0); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp1, kRtpClockFrequency, - kExtension1->absolute_capture_timestamp, - kExtension1->estimated_capture_clock_offset), + NtpTime(kExtension1.absolute_capture_timestamp), + kExtension1.estimated_capture_clock_offset), kExtension1); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp2, kRtpClockFrequency, - kExtension2->absolute_capture_timestamp, - kExtension2->estimated_capture_clock_offset), + NtpTime(kExtension2.absolute_capture_timestamp), + kExtension2.estimated_capture_clock_offset), kExtension2); } TEST(AbsoluteCaptureTimeSenderTest, SkipInterpolateIfTooMuchInterpolationError) { constexpr uint32_t kSource = 1337; - constexpr uint32_t kRtpClockFrequency = 64000; + constexpr int kRtpClockFrequency = 64'000; constexpr uint32_t kRtpTimestamp0 = 1020300000; constexpr uint32_t kRtpTimestamp1 = kRtpTimestamp0 + 1280; constexpr uint32_t kRtpTimestamp2 = kRtpTimestamp0 + 2560; - static const absl::optional kExtension0 = - AbsoluteCaptureTime{Int64MsToUQ32x32(9000), Int64MsToQ32x32(-350)}; - static const absl::optional kExtension1 = - AbsoluteCaptureTime{ - Int64MsToUQ32x32( - 9000 + 20 + - AbsoluteCaptureTimeSender::kInterpolationMaxError.ms()), - Int64MsToQ32x32(-350)}; - static const absl::optional kExtension2 = - AbsoluteCaptureTime{ - Int64MsToUQ32x32( - 9000 + 40 + - AbsoluteCaptureTimeSender::kInterpolationMaxError.ms() + 1), - Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension0 = {Int64MsToUQ32x32(9000), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension1 = { + Int64MsToUQ32x32(9000 + 20 + + AbsoluteCaptureTimeSender::kInterpolationMaxError.ms()), + Int64MsToQ32x32(-350)}; + const AbsoluteCaptureTime kExtension2 = { + Int64MsToUQ32x32(9000 + 40 + + AbsoluteCaptureTimeSender::kInterpolationMaxError.ms() + + 1), + Int64MsToQ32x32(-350)}; SimulatedClock clock(0); AbsoluteCaptureTimeSender sender(&clock); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp0, kRtpClockFrequency, - kExtension0->absolute_capture_timestamp, - kExtension0->estimated_capture_clock_offset), + NtpTime(kExtension0.absolute_capture_timestamp), + kExtension0.estimated_capture_clock_offset), kExtension0); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp1, kRtpClockFrequency, - kExtension1->absolute_capture_timestamp, - kExtension1->estimated_capture_clock_offset), - absl::nullopt); + NtpTime(kExtension1.absolute_capture_timestamp), + kExtension1.estimated_capture_clock_offset), + std::nullopt); EXPECT_EQ(sender.OnSendPacket(kSource, kRtpTimestamp2, kRtpClockFrequency, - kExtension2->absolute_capture_timestamp, - kExtension2->estimated_capture_clock_offset), + NtpTime(kExtension2.absolute_capture_timestamp), + kExtension2.estimated_capture_clock_offset), kExtension2); } diff --git a/modules/rtp_rtcp/source/active_decode_targets_helper.cc b/modules/rtp_rtcp/source/active_decode_targets_helper.cc index 71e7e8cf78..d158532482 100644 --- a/modules/rtp_rtcp/source/active_decode_targets_helper.cc +++ b/modules/rtp_rtcp/source/active_decode_targets_helper.cc @@ -12,6 +12,9 @@ #include +#include +#include + #include "api/array_view.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -24,7 +27,7 @@ namespace { // missing. That assumptions allows a simple detection when previous frame is // part of a chain. std::bitset<32> LastSendOnChain(int frame_diff, - rtc::ArrayView chain_diffs) { + ArrayView chain_diffs) { std::bitset<32> bitmask = 0; for (size_t i = 0; i < chain_diffs.size(); ++i) { if (frame_diff == chain_diffs[i]) { @@ -42,7 +45,7 @@ std::bitset<32> AllActive(size_t num) { // Returns bitmask of chains that protect at least one active decode target. std::bitset<32> ActiveChains( - rtc::ArrayView decode_target_protected_by_chain, + ArrayView decode_target_protected_by_chain, int num_chains, std::bitset<32> active_decode_targets) { std::bitset<32> active_chains = 0; @@ -60,11 +63,11 @@ std::bitset<32> ActiveChains( } // namespace void ActiveDecodeTargetsHelper::OnFrame( - rtc::ArrayView decode_target_protected_by_chain, + ArrayView decode_target_protected_by_chain, std::bitset<32> active_decode_targets, bool is_keyframe, int64_t frame_id, - rtc::ArrayView chain_diffs) { + ArrayView chain_diffs) { const int num_chains = chain_diffs.size(); if (num_chains == 0) { // Avoid printing the warning diff --git a/modules/rtp_rtcp/source/active_decode_targets_helper.h b/modules/rtp_rtcp/source/active_decode_targets_helper.h index 13755e8d80..293abafb73 100644 --- a/modules/rtp_rtcp/source/active_decode_targets_helper.h +++ b/modules/rtp_rtcp/source/active_decode_targets_helper.h @@ -14,8 +14,8 @@ #include #include +#include -#include "absl/types/optional.h" #include "api/array_view.h" namespace webrtc { @@ -34,16 +34,16 @@ class ActiveDecodeTargetsHelper { // Decides if active decode target bitmask should be attached to the frame // that is about to be sent. - void OnFrame(rtc::ArrayView decode_target_protected_by_chain, + void OnFrame(ArrayView decode_target_protected_by_chain, std::bitset<32> active_decode_targets, bool is_keyframe, int64_t frame_id, - rtc::ArrayView chain_diffs); + ArrayView chain_diffs); // Returns active decode target to attach to the dependency descriptor. - absl::optional ActiveDecodeTargetsBitmask() const { + std::optional ActiveDecodeTargetsBitmask() const { if (unsent_on_chain_.none()) - return absl::nullopt; + return std::nullopt; return last_active_decode_targets_.to_ulong(); } diff --git a/modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc b/modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc index 6f64fd1418..e87ae1719d 100644 --- a/modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc +++ b/modules/rtp_rtcp/source/active_decode_targets_helper_unittest.cc @@ -10,9 +10,12 @@ #include "modules/rtp_rtcp/source/active_decode_targets_helper.h" +#include +#include +#include #include -#include "absl/types/optional.h" +#include "api/array_view.h" #include "test/gtest.h" namespace webrtc { @@ -29,7 +32,7 @@ TEST(ActiveDecodeTargetsHelperTest, /*active_decode_targets=*/0b11, /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs); - EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), std::nullopt); } TEST(ActiveDecodeTargetsHelperTest, @@ -50,7 +53,7 @@ TEST(ActiveDecodeTargetsHelperTest, /*active_decode_targets=*/0b11, /*is_keyframe=*/true, /*frame_id=*/3, chain_diffs_key); - EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), std::nullopt); } TEST(ActiveDecodeTargetsHelperTest, @@ -78,7 +81,7 @@ TEST(ActiveDecodeTargetsHelperTest, /*active_decode_targets=*/0b01, /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta); - ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), std::nullopt); helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/0b01, /*is_keyframe=*/true, /*frame_id=*/3, chain_diffs_key); @@ -94,12 +97,12 @@ TEST(ActiveDecodeTargetsHelperTest, helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/kAll, /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs); - EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), std::nullopt); helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/kAll, /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs); - EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), std::nullopt); } TEST(ActiveDecodeTargetsHelperTest, @@ -115,7 +118,7 @@ TEST(ActiveDecodeTargetsHelperTest, /*active_decode_targets=*/0b01, /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta); - EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), std::nullopt); } TEST(ActiveDecodeTargetsHelperTest, ReturnsNewBitmaskOnDeltaFrame) { @@ -125,7 +128,7 @@ TEST(ActiveDecodeTargetsHelperTest, ReturnsNewBitmaskOnDeltaFrame) { helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/0b11, /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key); - ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), std::nullopt); int chain_diffs_delta[] = {1}; helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/0b01, @@ -142,12 +145,12 @@ TEST(ActiveDecodeTargetsHelperTest, helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/0b01, /*is_keyframe=*/true, /*frame_id=*/1, chain_diffs_key); - ASSERT_NE(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + ASSERT_NE(helper.ActiveDecodeTargetsBitmask(), std::nullopt); int chain_diffs_delta[] = {1}; helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/0b01, /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta); - ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), std::nullopt); // Reactive all the decode targets helper.OnFrame(kDecodeTargetProtectedByChain, @@ -167,7 +170,7 @@ TEST(ActiveDecodeTargetsHelperTest, ReturnsNulloptAfterSentOnAllActiveChains) { /*active_decode_targets=*/0b111, /*is_keyframe=*/true, /*frame_id=*/0, chain_diffs_key); - ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + ASSERT_EQ(helper.ActiveDecodeTargetsBitmask(), std::nullopt); int chain_diffs_delta1[] = {1, 1, 1}; helper.OnFrame(kDecodeTargetProtectedByChain, @@ -190,7 +193,7 @@ TEST(ActiveDecodeTargetsHelperTest, ReturnsNulloptAfterSentOnAllActiveChains) { /*active_decode_targets=*/kSome, /*is_keyframe=*/false, /*frame_id=*/3, chain_diffs_delta3); - EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), std::nullopt); } TEST(ActiveDecodeTargetsHelperTest, ReturnsBitmaskWhenChanged) { @@ -225,20 +228,20 @@ TEST(ActiveDecodeTargetsHelperTest, ReturnsBitmaskWhenChanged) { } TEST(ActiveDecodeTargetsHelperTest, ReturnsNulloptWhenChainsAreNotUsed) { - const rtc::ArrayView kDecodeTargetProtectedByChain; - const rtc::ArrayView kNoChainDiffs; + const ArrayView kDecodeTargetProtectedByChain; + const ArrayView kNoChainDiffs; ActiveDecodeTargetsHelper helper; helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/kAll, /*is_keyframe=*/true, /*frame_id=*/0, kNoChainDiffs); - EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), std::nullopt); helper.OnFrame(kDecodeTargetProtectedByChain, /*active_decode_targets=*/0b101, /*is_keyframe=*/false, /*frame_id=*/1, kNoChainDiffs); - EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), std::nullopt); } TEST(ActiveDecodeTargetsHelperTest, Supports32DecodeTargets) { @@ -261,7 +264,7 @@ TEST(ActiveDecodeTargetsHelperTest, Supports32DecodeTargets) { /*active_decode_targets=*/some, /*is_keyframe=*/false, /*frame_id=*/2, chain_diffs_delta); - EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), absl::nullopt); + EXPECT_EQ(helper.ActiveDecodeTargetsBitmask(), std::nullopt); helper.OnFrame(decode_target_protected_by_chain, /*active_decode_targets=*/kAll, /*is_keyframe=*/false, diff --git a/modules/rtp_rtcp/source/byte_io_unittest.cc b/modules/rtp_rtcp/source/byte_io_unittest.cc index e4dea813b8..681915f122 100644 --- a/modules/rtp_rtcp/source/byte_io_unittest.cc +++ b/modules/rtp_rtcp/source/byte_io_unittest.cc @@ -10,6 +10,7 @@ #include "modules/rtp_rtcp/source/byte_io.h" +#include #include #include "test/gtest.h" diff --git a/modules/rtp_rtcp/source/capture_clock_offset_updater.cc b/modules/rtp_rtcp/source/capture_clock_offset_updater.cc index a5b12cb422..861f177739 100644 --- a/modules/rtp_rtcp/source/capture_clock_offset_updater.cc +++ b/modules/rtp_rtcp/source/capture_clock_offset_updater.cc @@ -10,14 +10,20 @@ #include "modules/rtp_rtcp/source/capture_clock_offset_updater.h" +#include +#include + +#include "api/units/time_delta.h" +#include "system_wrappers/include/ntp_time.h" + namespace webrtc { -absl::optional +std::optional CaptureClockOffsetUpdater::AdjustEstimatedCaptureClockOffset( - absl::optional remote_capture_clock_offset) const { - if (remote_capture_clock_offset == absl::nullopt || - remote_to_local_clock_offset_ == absl::nullopt) { - return absl::nullopt; + std::optional remote_capture_clock_offset) const { + if (remote_capture_clock_offset == std::nullopt || + remote_to_local_clock_offset_ == std::nullopt) { + return std::nullopt; } // Do calculations as "unsigned" to make overflows deterministic. @@ -25,8 +31,16 @@ CaptureClockOffsetUpdater::AdjustEstimatedCaptureClockOffset( static_cast(*remote_to_local_clock_offset_); } +std::optional CaptureClockOffsetUpdater::ConvertsToTimeDela( + std::optional q32x32) { + if (q32x32 == std::nullopt) { + return std::nullopt; + } + return TimeDelta::Millis(Q32x32ToInt64Ms(*q32x32)); +} + void CaptureClockOffsetUpdater::SetRemoteToLocalClockOffset( - absl::optional offset_q32x32) { + std::optional offset_q32x32) { remote_to_local_clock_offset_ = offset_q32x32; } diff --git a/modules/rtp_rtcp/source/capture_clock_offset_updater.h b/modules/rtp_rtcp/source/capture_clock_offset_updater.h index 71d3eb4831..5bae8b882c 100644 --- a/modules/rtp_rtcp/source/capture_clock_offset_updater.h +++ b/modules/rtp_rtcp/source/capture_clock_offset_updater.h @@ -13,7 +13,9 @@ #include -#include "absl/types/optional.h" +#include + +#include "api/units/time_delta.h" namespace webrtc { @@ -31,8 +33,8 @@ class CaptureClockOffsetUpdater { // Adjusts remote_capture_clock_offset, which originates from Absolute Capture // Time RTP header extension, to get the local clock offset against the // capturer's clock. - absl::optional AdjustEstimatedCaptureClockOffset( - absl::optional remote_capture_clock_offset) const; + std::optional AdjustEstimatedCaptureClockOffset( + std::optional remote_capture_clock_offset) const; // Sets the NTP clock offset between the sender system (which may be different // from the capture system) and the local system. This information is normally @@ -40,10 +42,14 @@ class CaptureClockOffsetUpdater { // by RTCP sender reports (see DLSR/DLRR). // // Note that the value must be in Q32.32-formatted fixed-point seconds. - void SetRemoteToLocalClockOffset(absl::optional offset_q32x32); + void SetRemoteToLocalClockOffset(std::optional offset_q32x32); + + // Converts a signed Q32.32-formatted fixed-point to a TimeDelta. + static std::optional ConvertsToTimeDela( + std::optional q32x32); private: - absl::optional remote_to_local_clock_offset_; + std::optional remote_to_local_clock_offset_; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/capture_clock_offset_updater_unittest.cc b/modules/rtp_rtcp/source/capture_clock_offset_updater_unittest.cc index 43e1dd1379..1723f8e20c 100644 --- a/modules/rtp_rtcp/source/capture_clock_offset_updater_unittest.cc +++ b/modules/rtp_rtcp/source/capture_clock_offset_updater_unittest.cc @@ -10,6 +10,10 @@ #include "modules/rtp_rtcp/source/capture_clock_offset_updater.h" +#include +#include + +#include "api/units/time_delta.h" #include "system_wrappers/include/ntp_time.h" #include "test/gmock.h" #include "test/gtest.h" @@ -18,24 +22,24 @@ namespace webrtc { TEST(AbsoluteCaptureTimeReceiverTest, SkipEstimatedCaptureClockOffsetIfRemoteToLocalClockOffsetIsUnknown) { - static const absl::optional kRemoteCaptureClockOffset = + static const std::optional kRemoteCaptureClockOffset = Int64MsToQ32x32(-350); CaptureClockOffsetUpdater updater; - updater.SetRemoteToLocalClockOffset(absl::nullopt); + updater.SetRemoteToLocalClockOffset(std::nullopt); EXPECT_EQ( updater.AdjustEstimatedCaptureClockOffset(kRemoteCaptureClockOffset), - absl::nullopt); + std::nullopt); } TEST(AbsoluteCaptureTimeReceiverTest, SkipEstimatedCaptureClockOffsetIfRemoteCaptureClockOffsetIsUnknown) { - static const absl::optional kCaptureClockOffsetNull = absl::nullopt; + static const std::optional kCaptureClockOffsetNull = std::nullopt; CaptureClockOffsetUpdater updater; updater.SetRemoteToLocalClockOffset(0); EXPECT_EQ(updater.AdjustEstimatedCaptureClockOffset(kCaptureClockOffsetNull), kCaptureClockOffsetNull); - static const absl::optional kRemoteCaptureClockOffset = + static const std::optional kRemoteCaptureClockOffset = Int64MsToQ32x32(-350); EXPECT_EQ( updater.AdjustEstimatedCaptureClockOffset(kRemoteCaptureClockOffset), @@ -43,9 +47,9 @@ TEST(AbsoluteCaptureTimeReceiverTest, } TEST(AbsoluteCaptureTimeReceiverTest, EstimatedCaptureClockOffsetArithmetic) { - static const absl::optional kRemoteCaptureClockOffset = + static const std::optional kRemoteCaptureClockOffset = Int64MsToQ32x32(-350); - static const absl::optional kRemoteToLocalClockOffset = + static const std::optional kRemoteToLocalClockOffset = Int64MsToQ32x32(-7000007); CaptureClockOffsetUpdater updater; updater.SetRemoteToLocalClockOffset(kRemoteToLocalClockOffset); @@ -55,4 +59,25 @@ TEST(AbsoluteCaptureTimeReceiverTest, EstimatedCaptureClockOffsetArithmetic) { *kRemoteToLocalClockOffset))); } +TEST(AbsoluteCaptureTimeReceiverTest, ConvertClockOffset) { + constexpr TimeDelta kNegative = TimeDelta::Millis(-350); + constexpr int64_t kNegativeQ32x32 = + kNegative.ms() * (NtpTime::kFractionsPerSecond / 1000); + constexpr TimeDelta kPositive = TimeDelta::Millis(400); + constexpr int64_t kPositiveQ32x32 = + kPositive.ms() * (NtpTime::kFractionsPerSecond / 1000); + constexpr TimeDelta kEpsilon = TimeDelta::Millis(1); + std::optional converted = + CaptureClockOffsetUpdater::ConvertsToTimeDela(kNegativeQ32x32); + EXPECT_GT(converted, kNegative - kEpsilon); + EXPECT_LT(converted, kNegative + kEpsilon); + + converted = CaptureClockOffsetUpdater::ConvertsToTimeDela(kPositiveQ32x32); + EXPECT_GT(converted, kPositive - kEpsilon); + EXPECT_LT(converted, kPositive + kEpsilon); + + EXPECT_FALSE( + CaptureClockOffsetUpdater::ConvertsToTimeDela(std::nullopt).has_value()); +} + } // namespace webrtc diff --git a/modules/rtp_rtcp/source/corruption_detection_extension.cc b/modules/rtp_rtcp/source/corruption_detection_extension.cc new file mode 100644 index 0000000000..934fb2c4f1 --- /dev/null +++ b/modules/rtp_rtcp/source/corruption_detection_extension.cc @@ -0,0 +1,90 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/corruption_detection_extension.h" + +#include +#include +#include + +#include "absl/container/inlined_vector.h" +#include "api/array_view.h" +#include "api/transport/rtp/corruption_detection_message.h" + +namespace webrtc { +namespace { + +constexpr size_t kMandatoryPayloadBytes = 1; +constexpr size_t kConfigurationBytes = 3; +constexpr double kMaxValueForStdDev = 40.0; + +} // namespace + +// A description of the extension can be found at +// http://www.webrtc.org/experiments/rtp-hdrext/corruption-detection + +bool CorruptionDetectionExtension::Parse(ArrayView data, + CorruptionDetectionMessage* message) { + if (message == nullptr) { + return false; + } + if ((data.size() != kMandatoryPayloadBytes && + data.size() <= kConfigurationBytes) || + data.size() > kMaxValueSizeBytes) { + return false; + } + message->interpret_sequence_index_as_most_significant_bits_ = data[0] >> 7; + message->sequence_index_ = data[0] & 0b0111'1111; + if (data.size() == kMandatoryPayloadBytes) { + return true; + } + message->std_dev_ = data[1] * kMaxValueForStdDev / 255.0; + uint8_t channel_error_thresholds = data[2]; + message->luma_error_threshold_ = channel_error_thresholds >> 4; + message->chroma_error_threshold_ = channel_error_thresholds & 0xF; + message->sample_values_.assign(data.cbegin() + kConfigurationBytes, + data.cend()); + return true; +} + +bool CorruptionDetectionExtension::Write( + ArrayView data, + const CorruptionDetectionMessage& message) { + if (data.size() != ValueSize(message) || data.size() > kMaxValueSizeBytes) { + return false; + } + + data[0] = message.sequence_index() & 0b0111'1111; + if (message.interpret_sequence_index_as_most_significant_bits()) { + data[0] |= 0b1000'0000; + } + if (message.sample_values().empty()) { + return true; + } + data[1] = static_cast( + std::round(message.std_dev() / kMaxValueForStdDev * 255.0)); + data[2] = (message.luma_error_threshold() << 4) | + (message.chroma_error_threshold() & 0xF); + ArrayView sample_values = data.subview(kConfigurationBytes); + for (size_t i = 0; i < message.sample_values().size(); ++i) { + sample_values[i] = std::floor(message.sample_values()[i]); + } + return true; +} + +size_t CorruptionDetectionExtension::ValueSize( + const CorruptionDetectionMessage& message) { + if (message.sample_values_.empty()) { + return kMandatoryPayloadBytes; + } + return kConfigurationBytes + message.sample_values_.size(); +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/corruption_detection_extension.h b/modules/rtp_rtcp/source/corruption_detection_extension.h new file mode 100644 index 0000000000..4dc6c55b39 --- /dev/null +++ b/modules/rtp_rtcp/source/corruption_detection_extension.h @@ -0,0 +1,51 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_CORRUPTION_DETECTION_EXTENSION_H_ +#define MODULES_RTP_RTCP_SOURCE_CORRUPTION_DETECTION_EXTENSION_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/rtp_parameters.h" +#include "api/transport/rtp/corruption_detection_message.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" + +namespace webrtc { + +// RTP Corruption Detection Header Extension. +// +// The class reads and writes the corruption detection RTP header extension. +// The class implements traits so that the class is compatible with being an +// argument to the templated `RtpPacket::GetExtension` and +// `RtpPacketToSend::SetExtension` methods. +class CorruptionDetectionExtension { + public: + using value_type = CorruptionDetectionMessage; + + static constexpr RTPExtensionType kId = kRtpExtensionCorruptionDetection; + static constexpr uint8_t kMaxValueSizeBytes = 16; + + static constexpr absl::string_view Uri() { + return RtpExtension::kCorruptionDetectionUri; + } + static bool Parse(ArrayView data, + CorruptionDetectionMessage* message); + static bool Write(ArrayView data, + const CorruptionDetectionMessage& message); + // Size of the header extension in bytes. + static size_t ValueSize(const CorruptionDetectionMessage& message); +}; + +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_CORRUPTION_DETECTION_EXTENSION_H_ diff --git a/modules/rtp_rtcp/source/corruption_detection_extension_unittest.cc b/modules/rtp_rtcp/source/corruption_detection_extension_unittest.cc new file mode 100644 index 0000000000..58f888f65b --- /dev/null +++ b/modules/rtp_rtcp/source/corruption_detection_extension_unittest.cc @@ -0,0 +1,249 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/corruption_detection_extension.h" + +#include +#include +#include + +#include "api/transport/rtp/corruption_detection_message.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::DoubleEq; +using ::testing::ElementsAre; +using ::testing::ElementsAreArray; + +TEST(CorruptionDetectionExtensionTest, ValueSizeIs1UnlessSamplesAreSpecified) { + const std::optional kMessage = + CorruptionDetectionMessage::Builder() + .WithSequenceIndex(0b0110'1111) + .WithInterpretSequenceIndexAsMostSignificantBits(true) + .WithStdDev(8.0) + .WithSampleValues({}) + .Build(); + + ASSERT_NE(kMessage, std::nullopt); + EXPECT_EQ(CorruptionDetectionExtension::ValueSize(*kMessage), size_t{1}); +} + +TEST(CorruptionDetectionExtensionTest, + GivenSamplesTheValueSizeIsTheSumOfTheNumberOfSamplesPlus3) { + const double kSampleValues[] = {1.0, 2.0, 3.0, 4.0}; + const std::optional kMessage = + CorruptionDetectionMessage::Builder() + .WithSampleValues(kSampleValues) + .Build(); + + ASSERT_NE(kMessage, std::nullopt); + EXPECT_EQ(CorruptionDetectionExtension::ValueSize(*kMessage), size_t{7}); +} + +TEST(CorruptionDetectionExtensionTest, + WritesMandatoryWhenEnoughMemoryIsAllocatedWithoutSamples) { + const std::optional kMessage = + CorruptionDetectionMessage::Builder() + .WithSequenceIndex(0b0110'1111) + .WithInterpretSequenceIndexAsMostSignificantBits(true) + .Build(); + uint8_t data[] = {0}; + + ASSERT_NE(kMessage, std::nullopt); + EXPECT_TRUE(CorruptionDetectionExtension::Write(data, *kMessage)); + EXPECT_THAT(data, ElementsAre(0b1110'1111)); +} + +TEST(CorruptionDetectionExtensionTest, + FailsToWriteWhenTooMuchMemoryIsAllocatedWithoutSamples) { + const std::optional kMessage = + CorruptionDetectionMessage::Builder() + .WithSequenceIndex(0b0110'1111) + .WithInterpretSequenceIndexAsMostSignificantBits(true) + .Build(); + uint8_t data[] = {0, 0, 0}; + + ASSERT_NE(kMessage, std::nullopt); + EXPECT_FALSE(CorruptionDetectionExtension::Write(data, *kMessage)); +} + +TEST(CorruptionDetectionExtensionTest, + FailsToWriteWhenTooMuchMemoryIsAllocatedWithSamples) { + const double kSampleValues[] = {1.0}; + const std::optional kMessage = + CorruptionDetectionMessage::Builder() + .WithSequenceIndex(0b0110'1111) + .WithInterpretSequenceIndexAsMostSignificantBits(true) + .WithStdDev(8.0) + .WithSampleValues(kSampleValues) + .Build(); + uint8_t data[] = {0, 0, 0, 0, 0}; + + ASSERT_NE(kMessage, std::nullopt); + EXPECT_FALSE(CorruptionDetectionExtension::Write(data, *kMessage)); +} + +TEST(CorruptionDetectionExtensionTest, + WritesEverythingWhenEnoughMemoryIsAllocatedWithSamples) { + const double kSampleValues[] = {1.0}; + const std::optional kMessage = + CorruptionDetectionMessage::Builder() + .WithSequenceIndex(0b0110'1111) + .WithInterpretSequenceIndexAsMostSignificantBits(true) + .WithStdDev(8.0) + .WithSampleValues(kSampleValues) + .Build(); + uint8_t data[] = {0, 0, 0, 0}; + + ASSERT_NE(kMessage, std::nullopt); + EXPECT_TRUE(CorruptionDetectionExtension::Write(data, *kMessage)); + EXPECT_THAT(data, ElementsAre(0b1110'1111, 51, 0, 1)); +} + +TEST(CorruptionDetectionExtensionTest, + WritesEverythingToExtensionWhenUpperBitsAreUsedForSequenceIndex) { + const double kSampleValues[] = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, + 8.0, 9.0, 10.0, 11.0, 12.0, 13.0}; + const std::optional kMessage = + CorruptionDetectionMessage::Builder() + .WithSequenceIndex(0b0110'1111) + .WithInterpretSequenceIndexAsMostSignificantBits(true) + .WithStdDev(34.5098) // 220 / 255.0 * 40.0 + .WithLumaErrorThreshold(0b1110) + .WithChromaErrorThreshold(0b1111) + .WithSampleValues(kSampleValues) + .Build(); + uint8_t data[16]; + + ASSERT_NE(kMessage, std::nullopt); + EXPECT_TRUE(CorruptionDetectionExtension::Write(data, *kMessage)); + EXPECT_THAT(data, ElementsAre(0b1110'1111, 220, 0b1110'1111, 1, 2, 3, 4, 5, 6, + 7, 8, 9, 10, 11, 12, 13)); +} + +TEST(CorruptionDetectionExtensionTest, + WritesEverythingToExtensionWhenLowerBitsAreUsedForSequenceIndex) { + const double kSampleValues[] = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, + 8.0, 9.0, 10.0, 11.0, 12.0, 13.0}; + const std::optional kMessage = + CorruptionDetectionMessage::Builder() + .WithSequenceIndex(0b0110'1111) + .WithInterpretSequenceIndexAsMostSignificantBits(false) + .WithStdDev(34.5098) // 220 / 255.0 * 40.0 + .WithLumaErrorThreshold(0b1110) + .WithChromaErrorThreshold(0b1111) + .WithSampleValues(kSampleValues) + .Build(); + uint8_t data[16]; + + ASSERT_NE(kMessage, std::nullopt); + EXPECT_TRUE(CorruptionDetectionExtension::Write(data, *kMessage)); + EXPECT_THAT(data, ElementsAre(0b0110'1111, 220, 0b1110'1111, 1, 2, 3, 4, 5, 6, + 7, 8, 9, 10, 11, 12, 13)); +} + +TEST(CorruptionDetectionExtensionTest, TruncatesSampleValuesWhenWriting) { + const double kSampleValues[] = {1.4, 2.5, 3.6}; + const std::optional kMessage = + CorruptionDetectionMessage::Builder() + .WithSampleValues(kSampleValues) + .Build(); + uint8_t data[6]; + + ASSERT_NE(kMessage, std::nullopt); + EXPECT_TRUE(CorruptionDetectionExtension::Write(data, *kMessage)); + EXPECT_THAT(data, ElementsAre(0, 0, 0, 1, 2, 3)); +} + +TEST(CorruptionDetectionExtensionTest, ParsesMandatoryFieldsFromExtension) { + CorruptionDetectionMessage message; + const uint8_t kData[] = {0b1110'1111}; + + EXPECT_TRUE(CorruptionDetectionExtension::Parse(kData, &message)); + EXPECT_EQ(message.sequence_index(), 0b0110'1111); + EXPECT_TRUE(message.interpret_sequence_index_as_most_significant_bits()); + EXPECT_THAT(message.std_dev(), DoubleEq(0.0)); + EXPECT_EQ(message.luma_error_threshold(), 0); + EXPECT_EQ(message.chroma_error_threshold(), 0); + EXPECT_THAT(message.sample_values(), ElementsAre()); +} + +TEST(CorruptionDetectionExtensionTest, FailsToParseWhenGivenTooFewFields) { + CorruptionDetectionMessage message; + const uint8_t kData[] = {0b1110'1111, 8, 0}; + + EXPECT_FALSE(CorruptionDetectionExtension::Parse(kData, &message)); +} + +TEST(CorruptionDetectionExtensionTest, + ParsesEverythingFromExtensionWhenUpperBitsAreUsedForSequenceIndex) { + CorruptionDetectionMessage message; + const uint8_t kSampleValues[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13}; + const uint8_t kData[] = {0b1100'0100, 220, + 0b1110'1111, kSampleValues[0], + kSampleValues[1], kSampleValues[2], + kSampleValues[3], kSampleValues[4], + kSampleValues[5], kSampleValues[6], + kSampleValues[7], kSampleValues[8], + kSampleValues[9], kSampleValues[10], + kSampleValues[11], kSampleValues[12]}; + + EXPECT_TRUE(CorruptionDetectionExtension::Parse(kData, &message)); + EXPECT_EQ(message.sequence_index(), 0b0100'0100); + EXPECT_TRUE(message.interpret_sequence_index_as_most_significant_bits()); + EXPECT_THAT(message.std_dev(), + DoubleEq(34.509803921568626)); // 220 / 255.0 * 40.0 + EXPECT_EQ(message.luma_error_threshold(), 0b1110); + EXPECT_EQ(message.chroma_error_threshold(), 0b1111); + EXPECT_THAT(message.sample_values(), ElementsAreArray(kSampleValues)); +} + +TEST(CorruptionDetectionExtensionTest, + ParsesEverythingFromExtensionWhenLowerBitsAreUsedForSequenceIndex) { + CorruptionDetectionMessage message; + const uint8_t kSampleValues[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13}; + const uint8_t kData[] = {0b0100'0100, 220, + 0b1110'1111, kSampleValues[0], + kSampleValues[1], kSampleValues[2], + kSampleValues[3], kSampleValues[4], + kSampleValues[5], kSampleValues[6], + kSampleValues[7], kSampleValues[8], + kSampleValues[9], kSampleValues[10], + kSampleValues[11], kSampleValues[12]}; + + EXPECT_TRUE(CorruptionDetectionExtension::Parse(kData, &message)); + EXPECT_EQ(message.sequence_index(), 0b0100'0100); + EXPECT_FALSE(message.interpret_sequence_index_as_most_significant_bits()); + EXPECT_THAT(message.std_dev(), + DoubleEq(34.509803921568626)); // 220 / 255.0 * 40.0 + EXPECT_EQ(message.luma_error_threshold(), 0b1110); + EXPECT_EQ(message.chroma_error_threshold(), 0b1111); + EXPECT_THAT(message.sample_values(), ElementsAreArray(kSampleValues)); +} + +TEST(CorruptionDetectionExtensionTest, FailsToParseWhenGivenNullptrAsOutput) { + const uint8_t kData[] = {0, 0, 0}; + + EXPECT_FALSE(CorruptionDetectionExtension::Parse(kData, nullptr)); +} + +TEST(CorruptionDetectionExtensionTest, + FailsToParseWhenTooManySamplesAreSpecified) { + CorruptionDetectionMessage message; + uint8_t data[17]; + + EXPECT_FALSE(CorruptionDetectionExtension::Parse(data, &message)); +} + +} // namespace +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc b/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc index f1e4eddb4b..cf4b8072ec 100644 --- a/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc +++ b/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc @@ -19,6 +19,10 @@ #include "modules/rtp_rtcp/source/video_rtp_depacketizer_h264.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h" +#include "rtc_base/checks.h" +#ifdef RTC_ENABLE_H265 +#include "modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h" +#endif namespace webrtc { @@ -33,8 +37,13 @@ std::unique_ptr CreateVideoRtpDepacketizer( return std::make_unique(); case kVideoCodecAV1: return std::make_unique(); + case kVideoCodecH265: +#ifdef RTC_ENABLE_H265 + return std::make_unique(); +#else + return nullptr; +#endif case kVideoCodecGeneric: - case kVideoCodecMultiplex: return std::make_unique(); } RTC_CHECK_NOTREACHED(); diff --git a/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc index e2531bb1b1..b6cf0d678f 100644 --- a/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc +++ b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc @@ -10,20 +10,35 @@ #include "modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h" -#include +#include +#include #include -#include - -#include "absl/strings/match.h" +#include +#include + +#include "api/array_view.h" +#include "api/call/transport.h" +#include "api/environment/environment.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" -#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/packet_sequencer.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_history.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" +#include "rtc_base/bitrate_tracker.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { namespace { constexpr uint32_t kTimestampTicksPerMs = 90; -constexpr int kSendSideDelayWindowMs = 1000; constexpr TimeDelta kBitrateStatisticsWindow = TimeDelta::Seconds(1); constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; @@ -59,29 +74,25 @@ void DEPRECATED_RtpSenderEgress::NonPacedPacketSender::EnqueuePackets( } DEPRECATED_RtpSenderEgress::DEPRECATED_RtpSenderEgress( + const Environment& env, const RtpRtcpInterface::Configuration& config, RtpPacketHistory* packet_history) - : ssrc_(config.local_media_ssrc), + : env_(env), + ssrc_(config.local_media_ssrc), rtx_ssrc_(config.rtx_send_ssrc), flexfec_ssrc_(config.fec_generator ? config.fec_generator->FecSsrc() - : absl::nullopt), + : std::nullopt), populate_network2_timestamp_(config.populate_network2_timestamp), - clock_(config.clock), packet_history_(packet_history), transport_(config.outgoing_transport), - event_log_(config.event_log), - is_audio_(config.audio), need_rtp_packet_infos_(config.need_rtp_packet_infos), transport_feedback_observer_(config.transport_feedback_callback), - send_side_delay_observer_(config.send_side_delay_observer), send_packet_observer_(config.send_packet_observer), rtp_stats_callback_(config.rtp_stats_callback), bitrate_callback_(config.send_bitrate_observer), media_has_been_sent_(false), force_part_of_allocation_(false), timestamp_offset_(0), - max_delay_it_(send_delays_.end()), - sum_delays_ms_(0), send_rates_(kNumMediaTypes, BitrateTracker(kBitrateStatisticsWindow)), rtp_sequence_number_map_(need_rtp_packet_infos_ ? std::make_unique( @@ -96,29 +107,8 @@ void DEPRECATED_RtpSenderEgress::SendPacket( const uint32_t packet_ssrc = packet->Ssrc(); RTC_DCHECK(packet->packet_type().has_value()); RTC_DCHECK(HasCorrectSsrc(*packet)); - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); int64_t now_ms = now.ms(); - - if (is_audio_) { -#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE - BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "AudioTotBitrate_kbps", now_ms, - GetSendRates().Sum().kbps(), packet_ssrc); - BWE_TEST_LOGGING_PLOT_WITH_SSRC( - 1, "AudioNackBitrate_kbps", now_ms, - GetSendRates()[RtpPacketMediaType::kRetransmission].kbps(), - packet_ssrc); -#endif - } else { -#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE - BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "VideoTotBitrate_kbps", now_ms, - GetSendRates().Sum().kbps(), packet_ssrc); - BWE_TEST_LOGGING_PLOT_WITH_SSRC( - 1, "VideoNackBitrate_kbps", now_ms, - GetSendRates()[RtpPacketMediaType::kRetransmission].kbps(), - packet_ssrc); -#endif - } - PacketOptions options; { MutexLock lock(&lock_); @@ -163,12 +153,9 @@ void DEPRECATED_RtpSenderEgress::SendPacket( } } - const bool is_media = packet->packet_type() == RtpPacketMediaType::kAudio || - packet->packet_type() == RtpPacketMediaType::kVideo; + options.is_media = packet->packet_type() == RtpPacketMediaType::kAudio || + packet->packet_type() == RtpPacketMediaType::kVideo; - // Downstream code actually uses this flag to distinguish between media and - // everything else. - options.is_retransmit = !is_media; if (auto packet_id = packet->GetExtension()) { options.packet_id = *packet_id; options.included_in_feedback = true; @@ -176,11 +163,8 @@ void DEPRECATED_RtpSenderEgress::SendPacket( AddPacketToTransportFeedback(*packet_id, *packet, pacing_info); } - options.additional_data = packet->additional_data(); - if (packet->packet_type() != RtpPacketMediaType::kPadding && packet->packet_type() != RtpPacketMediaType::kRetransmission) { - UpdateDelayStatistics(packet->capture_time().ms(), now_ms, packet_ssrc); UpdateOnSendPacket(options.packet_id, packet->capture_time().ms(), packet_ssrc); } @@ -189,7 +173,7 @@ void DEPRECATED_RtpSenderEgress::SendPacket( // Put packet in retransmission history or update pending status even if // actual sending fails. - if (is_media && packet->allow_retransmission()) { + if (options.is_media && packet->allow_retransmission()) { packet_history_->PutRtpPacket(std::make_unique(*packet), now); } else if (packet->retransmitted_sequence_number()) { @@ -220,7 +204,7 @@ RtpSendRates DEPRECATED_RtpSenderEgress::GetSendRates() const { } RtpSendRates DEPRECATED_RtpSenderEgress::GetSendRatesLocked() const { - const Timestamp now = clock_->CurrentTime(); + const Timestamp now = env_.clock().CurrentTime(); RtpSendRates current_rates; for (size_t i = 0; i < kNumMediaTypes; ++i) { RtpPacketMediaType type = static_cast(i); @@ -233,8 +217,15 @@ void DEPRECATED_RtpSenderEgress::GetDataCounters( StreamDataCounters* rtp_stats, StreamDataCounters* rtx_stats) const { MutexLock lock(&lock_); - *rtp_stats = rtp_stats_; - *rtx_stats = rtx_rtp_stats_; + if (rtp_stats_callback_) { + *rtp_stats = rtp_stats_callback_->GetDataCounters(ssrc_); + if (rtx_ssrc_.has_value()) { + *rtx_stats = rtp_stats_callback_->GetDataCounters(*rtx_ssrc_); + } + } else { + *rtp_stats = rtp_stats_; + *rtx_stats = rtx_rtp_stats_; + } } void DEPRECATED_RtpSenderEgress::ForceIncludeSendPacketsInAllocation( @@ -260,7 +251,7 @@ void DEPRECATED_RtpSenderEgress::SetTimestampOffset(uint32_t timestamp) { std::vector DEPRECATED_RtpSenderEgress::GetSentRtpPacketInfos( - rtc::ArrayView sequence_numbers) const { + ArrayView sequence_numbers) const { RTC_DCHECK(!sequence_numbers.empty()); if (!need_rtp_packet_infos_) { return std::vector(); @@ -317,88 +308,6 @@ void DEPRECATED_RtpSenderEgress::AddPacketToTransportFeedback( } } -void DEPRECATED_RtpSenderEgress::UpdateDelayStatistics(int64_t capture_time_ms, - int64_t now_ms, - uint32_t ssrc) { - if (!send_side_delay_observer_ || capture_time_ms <= 0) - return; - - int avg_delay_ms = 0; - int max_delay_ms = 0; - { - MutexLock lock(&lock_); - // Compute the max and average of the recent capture-to-send delays. - // The time complexity of the current approach depends on the distribution - // of the delay values. This could be done more efficiently. - - // Remove elements older than kSendSideDelayWindowMs. - auto lower_bound = - send_delays_.lower_bound(now_ms - kSendSideDelayWindowMs); - for (auto it = send_delays_.begin(); it != lower_bound; ++it) { - if (max_delay_it_ == it) { - max_delay_it_ = send_delays_.end(); - } - sum_delays_ms_ -= it->second; - } - send_delays_.erase(send_delays_.begin(), lower_bound); - if (max_delay_it_ == send_delays_.end()) { - // Removed the previous max. Need to recompute. - RecomputeMaxSendDelay(); - } - - // Add the new element. - RTC_DCHECK_GE(now_ms, 0); - RTC_DCHECK_LE(now_ms, std::numeric_limits::max() / 2); - RTC_DCHECK_GE(capture_time_ms, 0); - RTC_DCHECK_LE(capture_time_ms, std::numeric_limits::max() / 2); - int64_t diff_ms = now_ms - capture_time_ms; - RTC_DCHECK_GE(diff_ms, static_cast(0)); - RTC_DCHECK_LE(diff_ms, std::numeric_limits::max()); - int new_send_delay = rtc::dchecked_cast(now_ms - capture_time_ms); - SendDelayMap::iterator it; - bool inserted; - std::tie(it, inserted) = - send_delays_.insert(std::make_pair(now_ms, new_send_delay)); - if (!inserted) { - // TODO(terelius): If we have multiple delay measurements during the same - // millisecond then we keep the most recent one. It is not clear that this - // is the right decision, but it preserves an earlier behavior. - int previous_send_delay = it->second; - sum_delays_ms_ -= previous_send_delay; - it->second = new_send_delay; - if (max_delay_it_ == it && new_send_delay < previous_send_delay) { - RecomputeMaxSendDelay(); - } - } - if (max_delay_it_ == send_delays_.end() || - it->second >= max_delay_it_->second) { - max_delay_it_ = it; - } - sum_delays_ms_ += new_send_delay; - - size_t num_delays = send_delays_.size(); - RTC_DCHECK(max_delay_it_ != send_delays_.end()); - max_delay_ms = rtc::dchecked_cast(max_delay_it_->second); - int64_t avg_ms = (sum_delays_ms_ + num_delays / 2) / num_delays; - RTC_DCHECK_GE(avg_ms, static_cast(0)); - RTC_DCHECK_LE(avg_ms, - static_cast(std::numeric_limits::max())); - avg_delay_ms = - rtc::dchecked_cast((sum_delays_ms_ + num_delays / 2) / num_delays); - } - send_side_delay_observer_->SendSideDelayUpdated(avg_delay_ms, max_delay_ms, - ssrc); -} - -void DEPRECATED_RtpSenderEgress::RecomputeMaxSendDelay() { - max_delay_it_ = send_delays_.begin(); - for (auto it = send_delays_.begin(); it != send_delays_.end(); ++it) { - if (it->second >= max_delay_it_->second) { - max_delay_it_ = it; - } - } -} - void DEPRECATED_RtpSenderEgress::UpdateOnSendPacket(int packet_id, int64_t capture_time_ms, uint32_t ssrc) { @@ -414,29 +323,26 @@ bool DEPRECATED_RtpSenderEgress::SendPacketToNetwork( const RtpPacketToSend& packet, const PacketOptions& options, const PacedPacketInfo& pacing_info) { - int bytes_sent = -1; - if (transport_) { - bytes_sent = transport_->SendRtp(packet, options) - ? static_cast(packet.size()) - : -1; - if (event_log_ && bytes_sent > 0) { - event_log_->Log(std::make_unique( - packet, pacing_info.probe_cluster_id)); - } - } - - if (bytes_sent <= 0) { + if (transport_ == nullptr || !transport_->SendRtp(packet, options)) { RTC_LOG(LS_WARNING) << "Transport failed to send packet."; return false; } + + env_.event_log().Log(std::make_unique( + packet, pacing_info.probe_cluster_id)); return true; } void DEPRECATED_RtpSenderEgress::UpdateRtpStats(const RtpPacketToSend& packet) { - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); - StreamDataCounters* counters = - packet.Ssrc() == rtx_ssrc_ ? &rtx_rtp_stats_ : &rtp_stats_; + StreamDataCounters* counters = nullptr; + if (rtp_stats_callback_) { + rtp_stats_ = rtp_stats_callback_->GetDataCounters(packet.Ssrc()); + counters = &rtp_stats_; + } else { + counters = packet.Ssrc() == rtx_ssrc_ ? &rtx_rtp_stats_ : &rtp_stats_; + } counters->MaybeSetFirstPacketTime(now); diff --git a/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h index e786d90c2f..4a3e1f8bff 100644 --- a/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h +++ b/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h @@ -11,14 +11,16 @@ #ifndef MODULES_RTP_RTCP_SOURCE_DEPRECATED_DEPRECATED_RTP_SENDER_EGRESS_H_ #define MODULES_RTP_RTCP_SOURCE_DEPRECATED_DEPRECATED_RTP_SENDER_EGRESS_H_ -#include +#include #include +#include #include -#include "absl/types/optional.h" +#include "api/array_view.h" #include "api/call/transport.h" -#include "api/rtc_event_log/rtc_event_log.h" -#include "api/units/data_rate.h" +#include "api/environment/environment.h" +#include "api/rtp_packet_sender.h" +#include "api/transport/network_types.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/packet_sequencer.h" #include "modules/rtp_rtcp/source/rtp_packet_history.h" @@ -43,7 +45,7 @@ class DEPRECATED_RtpSenderEgress { void EnqueuePackets( std::vector> packets) override; - void RemovePacketsForSsrc(uint32_t ssrc) override {} + void RemovePacketsForSsrc(uint32_t /* ssrc */) override {} private: uint16_t transport_sequence_number_; @@ -51,15 +53,16 @@ class DEPRECATED_RtpSenderEgress { PacketSequencer* sequence_number_assigner_; }; - DEPRECATED_RtpSenderEgress(const RtpRtcpInterface::Configuration& config, + DEPRECATED_RtpSenderEgress(const Environment& env, + const RtpRtcpInterface::Configuration& config, RtpPacketHistory* packet_history); ~DEPRECATED_RtpSenderEgress() = default; void SendPacket(RtpPacketToSend* packet, const PacedPacketInfo& pacing_info) RTC_LOCKS_EXCLUDED(lock_); uint32_t Ssrc() const { return ssrc_; } - absl::optional RtxSsrc() const { return rtx_ssrc_; } - absl::optional FlexFecSsrc() const { return flexfec_ssrc_; } + std::optional RtxSsrc() const { return rtx_ssrc_; } + std::optional FlexFecSsrc() const { return flexfec_ssrc_; } void ProcessBitrateAndNotifyObservers() RTC_LOCKS_EXCLUDED(lock_); RtpSendRates GetSendRates() const RTC_LOCKS_EXCLUDED(lock_); @@ -79,24 +82,15 @@ class DEPRECATED_RtpSenderEgress { // recalled, return a vector with all of them (in corresponding order). // If any could not be recalled, return an empty vector. std::vector GetSentRtpPacketInfos( - rtc::ArrayView sequence_numbers) const + ArrayView sequence_numbers) const RTC_LOCKS_EXCLUDED(lock_); private: - // Maps capture time in milliseconds to send-side delay in milliseconds. - // Send-side delay is the difference between transmission time and capture - // time. - typedef std::map SendDelayMap; - RtpSendRates GetSendRatesLocked() const RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); bool HasCorrectSsrc(const RtpPacketToSend& packet) const; void AddPacketToTransportFeedback(uint16_t packet_id, const RtpPacketToSend& packet, const PacedPacketInfo& pacing_info); - void UpdateDelayStatistics(int64_t capture_time_ms, - int64_t now_ms, - uint32_t ssrc); - void RecomputeMaxSendDelay() RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); void UpdateOnSendPacket(int packet_id, int64_t capture_time_ms, uint32_t ssrc); @@ -107,19 +101,16 @@ class DEPRECATED_RtpSenderEgress { void UpdateRtpStats(const RtpPacketToSend& packet) RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); + const Environment env_; const uint32_t ssrc_; - const absl::optional rtx_ssrc_; - const absl::optional flexfec_ssrc_; + const std::optional rtx_ssrc_; + const std::optional flexfec_ssrc_; const bool populate_network2_timestamp_; - Clock* const clock_; RtpPacketHistory* const packet_history_; Transport* const transport_; - RtcEventLog* const event_log_; - const bool is_audio_; const bool need_rtp_packet_infos_; TransportFeedbackObserver* const transport_feedback_observer_; - SendSideDelayObserver* const send_side_delay_observer_; SendPacketObserver* const send_packet_observer_; StreamDataCountersCallback* const rtp_stats_callback_; BitrateStatisticsObserver* const bitrate_callback_; @@ -129,12 +120,10 @@ class DEPRECATED_RtpSenderEgress { bool force_part_of_allocation_ RTC_GUARDED_BY(lock_); uint32_t timestamp_offset_ RTC_GUARDED_BY(lock_); - SendDelayMap send_delays_ RTC_GUARDED_BY(lock_); - SendDelayMap::const_iterator max_delay_it_ RTC_GUARDED_BY(lock_); - // The sum of delays over a kSendSideDelayWindowMs sliding window. - int64_t sum_delays_ms_ RTC_GUARDED_BY(lock_); + // These counters are only used if `rtp_stats_callback_` is null. StreamDataCounters rtp_stats_ RTC_GUARDED_BY(lock_); StreamDataCounters rtx_rtp_stats_ RTC_GUARDED_BY(lock_); + // One element per value in RtpPacketMediaType, with index matching value. std::vector send_rates_ RTC_GUARDED_BY(lock_); diff --git a/modules/rtp_rtcp/source/dtmf_queue.cc b/modules/rtp_rtcp/source/dtmf_queue.cc index df06d2a2f3..4c315bd8c2 100644 --- a/modules/rtp_rtcp/source/dtmf_queue.cc +++ b/modules/rtp_rtcp/source/dtmf_queue.cc @@ -13,6 +13,7 @@ #include #include "rtc_base/checks.h" +#include "rtc_base/synchronization/mutex.h" namespace { constexpr size_t kDtmfOutbandMax = 20; diff --git a/modules/rtp_rtcp/source/fec_private_tables_bursty.cc b/modules/rtp_rtcp/source/fec_private_tables_bursty.cc index 9dbc012368..d0b9da4a3f 100644 --- a/modules/rtp_rtcp/source/fec_private_tables_bursty.cc +++ b/modules/rtp_rtcp/source/fec_private_tables_bursty.cc @@ -10,6 +10,8 @@ #include "modules/rtp_rtcp/source/fec_private_tables_bursty.h" +#include + namespace { // clang-format off #define kMaskBursty1_1 \ diff --git a/modules/rtp_rtcp/source/fec_private_tables_bursty_unittest.cc b/modules/rtp_rtcp/source/fec_private_tables_bursty_unittest.cc index c62f7d5606..69fbde03ae 100644 --- a/modules/rtp_rtcp/source/fec_private_tables_bursty_unittest.cc +++ b/modules/rtp_rtcp/source/fec_private_tables_bursty_unittest.cc @@ -10,6 +10,11 @@ #include "modules/rtp_rtcp/source/fec_private_tables_bursty.h" +#include +#include + +#include "api/array_view.h" +#include "modules/include/module_fec_types.h" #include "modules/rtp_rtcp/source/fec_private_tables_random.h" #include "modules/rtp_rtcp/source/forward_error_correction_internal.h" #include "test/gtest.h" @@ -25,7 +30,7 @@ namespace fec_private_tables { using internal::LookUpInFecTable; TEST(FecTable, TestBurstyLookup) { - rtc::ArrayView result; + ArrayView result; result = LookUpInFecTable(&kPacketMaskBurstyTbl[0], 0, 0); // Should match kMaskBursty1_1. EXPECT_EQ(2u, result.size()); @@ -51,7 +56,7 @@ TEST(FecTable, TestBurstyLookup) { } TEST(FecTable, TestRandomLookup) { - rtc::ArrayView result; + ArrayView result; result = LookUpInFecTable(&kPacketMaskRandomTbl[0], 0, 0); EXPECT_EQ(2u, result.size()); EXPECT_EQ(0x80u, result[0]); @@ -70,7 +75,7 @@ TEST(FecTable, TestRandomGenerated) { int num_fec_packets = 6; size_t mask_size = sizeof(kMaskRandom15_6) / sizeof(uint8_t); internal::PacketMaskTable mask_table(fec_mask_type, num_media_packets); - rtc::ArrayView mask = + ArrayView mask = mask_table.LookUp(num_media_packets, num_fec_packets); EXPECT_EQ(mask.size(), mask_size); for (size_t i = 0; i < mask_size; ++i) { diff --git a/modules/rtp_rtcp/source/fec_private_tables_random.cc b/modules/rtp_rtcp/source/fec_private_tables_random.cc index 3cac5db17b..885aeb5ef2 100644 --- a/modules/rtp_rtcp/source/fec_private_tables_random.cc +++ b/modules/rtp_rtcp/source/fec_private_tables_random.cc @@ -10,6 +10,8 @@ #include "modules/rtp_rtcp/source/fec_private_tables_random.h" +#include + namespace { // clang-format off #define kMaskRandom1_1 \ diff --git a/modules/rtp_rtcp/source/fec_test_helper.cc b/modules/rtp_rtcp/source/fec_test_helper.cc index 23e66c23bf..3186a476b6 100644 --- a/modules/rtp_rtcp/source/fec_test_helper.cc +++ b/modules/rtp_rtcp/source/fec_test_helper.cc @@ -10,13 +10,20 @@ #include "modules/rtp_rtcp/source/fec_test_helper.h" +#include +#include +#include #include #include +#include "api/rtp_headers.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/byte_io.h" -#include "modules/rtp_rtcp/source/rtp_packet.h" +#include "modules/rtp_rtcp/source/forward_error_correction.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/random.h" namespace webrtc { namespace test { @@ -187,7 +194,7 @@ RtpPacketReceived UlpfecPacketGenerator::BuildMediaRedPacket( const AugmentedPacket& packet, bool is_recovered) { // Create a temporary buffer used to wrap the media packet in RED. - rtc::CopyOnWriteBuffer red_buffer; + CopyOnWriteBuffer red_buffer; const size_t kHeaderLength = packet.header.headerLength; // Append header. red_buffer.SetData(packet.data.data(), kHeaderLength); diff --git a/modules/rtp_rtcp/source/fec_test_helper.h b/modules/rtp_rtcp/source/fec_test_helper.h index 92e09fd44f..8d848b66d1 100644 --- a/modules/rtp_rtcp/source/fec_test_helper.h +++ b/modules/rtp_rtcp/source/fec_test_helper.h @@ -11,8 +11,11 @@ #ifndef MODULES_RTP_RTCP_SOURCE_FEC_TEST_HELPER_H_ #define MODULES_RTP_RTCP_SOURCE_FEC_TEST_HELPER_H_ +#include +#include #include +#include "api/rtp_headers.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/random.h" diff --git a/modules/rtp_rtcp/source/flexfec_03_header_reader_writer.cc b/modules/rtp_rtcp/source/flexfec_03_header_reader_writer.cc index 87f3f10bfe..3ecbec91af 100644 --- a/modules/rtp_rtcp/source/flexfec_03_header_reader_writer.cc +++ b/modules/rtp_rtcp/source/flexfec_03_header_reader_writer.cc @@ -12,8 +12,12 @@ #include +#include + +#include "api/array_view.h" #include "api/scoped_refptr.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/forward_error_correction_internal.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -246,7 +250,7 @@ size_t Flexfec03HeaderWriter::FecHeaderSize(size_t packet_mask_size) const { // FecHeaderSize(), so in this function we can be sure that we are // writing in space that is intended for the header. void Flexfec03HeaderWriter::FinalizeFecHeader( - rtc::ArrayView protected_streams, + ArrayView protected_streams, ForwardErrorCorrection::Packet& fec_packet) const { RTC_CHECK_EQ(protected_streams.size(), 1); uint32_t media_ssrc = protected_streams[0].ssrc; diff --git a/modules/rtp_rtcp/source/flexfec_03_header_reader_writer.h b/modules/rtp_rtcp/source/flexfec_03_header_reader_writer.h index 189561ed17..04dcf31ee1 100644 --- a/modules/rtp_rtcp/source/flexfec_03_header_reader_writer.h +++ b/modules/rtp_rtcp/source/flexfec_03_header_reader_writer.h @@ -14,6 +14,7 @@ #include #include +#include "api/array_view.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" namespace webrtc { @@ -76,7 +77,7 @@ class Flexfec03HeaderWriter : public FecHeaderWriter { size_t FecHeaderSize(size_t packet_mask_row_size) const override; void FinalizeFecHeader( - rtc::ArrayView protected_streams, + ArrayView protected_streams, ForwardErrorCorrection::Packet& fec_packet) const override; }; diff --git a/modules/rtp_rtcp/source/flexfec_03_header_reader_writer_unittest.cc b/modules/rtp_rtcp/source/flexfec_03_header_reader_writer_unittest.cc index ad68de4525..80b4ba3373 100644 --- a/modules/rtp_rtcp/source/flexfec_03_header_reader_writer_unittest.cc +++ b/modules/rtp_rtcp/source/flexfec_03_header_reader_writer_unittest.cc @@ -12,6 +12,7 @@ #include +#include #include #include @@ -19,7 +20,6 @@ #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/forward_error_correction_internal.h" -#include "rtc_base/checks.h" #include "rtc_base/random.h" #include "test/gmock.h" #include "test/gtest.h" @@ -76,10 +76,10 @@ void SetBit(size_t index, uint8_t* packet_mask) { packet_mask[index / 8] |= (1 << (7 - index % 8)); } -rtc::scoped_refptr WriteHeader(const uint8_t* packet_mask, - size_t packet_mask_size) { +scoped_refptr WriteHeader(const uint8_t* packet_mask, + size_t packet_mask_size) { Flexfec03HeaderWriter writer; - rtc::scoped_refptr written_packet(new Packet()); + scoped_refptr written_packet(new Packet()); written_packet->data.SetSize(kMediaPacketLength); uint8_t* data = written_packet->data.MutableData(); for (size_t i = 0; i < written_packet->data.size(); ++i) { @@ -97,7 +97,7 @@ std::unique_ptr ReadHeader(const Packet& written_packet) { Flexfec03HeaderReader reader; std::unique_ptr read_packet(new ReceivedFecPacket()); read_packet->ssrc = kFlexfecSsrc; - read_packet->pkt = rtc::scoped_refptr(new Packet()); + read_packet->pkt = scoped_refptr(new Packet()); read_packet->pkt->data = written_packet.data; EXPECT_TRUE(reader.ReadFecHeader(read_packet.get())); return read_packet; @@ -188,7 +188,7 @@ TEST(Flexfec03HeaderReaderTest, ReadsHeaderWithKBit0Set) { kPayloadBits, kPayloadBits, kPayloadBits, kPayloadBits}; const size_t packet_length = sizeof(kPacketData); ReceivedFecPacket read_packet; - read_packet.pkt = rtc::scoped_refptr(new Packet()); + read_packet.pkt = scoped_refptr(new Packet()); read_packet.pkt->data.SetData(kPacketData, packet_length); Flexfec03HeaderReader reader; @@ -219,7 +219,7 @@ TEST(Flexfec03HeaderReaderTest, ReadsHeaderWithKBit1Set) { kPayloadBits, kPayloadBits, kPayloadBits, kPayloadBits}; const size_t packet_length = sizeof(kPacketData); ReceivedFecPacket read_packet; - read_packet.pkt = rtc::scoped_refptr(new Packet()); + read_packet.pkt = scoped_refptr(new Packet()); read_packet.pkt->data.SetData(kPacketData, packet_length); Flexfec03HeaderReader reader; @@ -257,7 +257,7 @@ TEST(Flexfec03HeaderReaderTest, ReadsHeaderWithKBit2Set) { kPayloadBits, kPayloadBits, kPayloadBits, kPayloadBits}; const size_t packet_length = sizeof(kPacketData); ReceivedFecPacket read_packet; - read_packet.pkt = rtc::scoped_refptr(new Packet()); + read_packet.pkt = scoped_refptr(new Packet()); read_packet.pkt->data.SetData(kPacketData, packet_length); Flexfec03HeaderReader reader; diff --git a/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc b/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc index cfca7cb066..d1e0b99d3b 100644 --- a/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc +++ b/modules/rtp_rtcp/source/flexfec_header_reader_writer.cc @@ -12,8 +12,12 @@ #include +#include + +#include "api/array_view.h" #include "api/scoped_refptr.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/forward_error_correction_internal.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -138,9 +142,9 @@ bool FlexfecHeaderReader::ReadFecHeader( mask_part0 <<= 1; ByteWriter::WriteBigEndian(&data[byte_index], mask_part0); byte_index += kFlexfecPacketMaskSizes[0]; - if (k_bit0) { - // The first K-bit is set, and the packet mask is thus only 2 bytes long. - // We have finished reading the properties for current ssrc. + if (!k_bit0) { + // The first K-bit is clear, and the packet mask is thus only 2 bytes + // long. We have finished reading the properties for current ssrc. fec_packet->protected_streams[i].packet_mask_size = kFlexfecPacketMaskSizes[0]; } else { @@ -162,8 +166,8 @@ bool FlexfecHeaderReader::ReadFecHeader( mask_part1 <<= 2; ByteWriter::WriteBigEndian(&data[byte_index], mask_part1); byte_index += kFlexfecPacketMaskSizes[1] - kFlexfecPacketMaskSizes[0]; - if (k_bit1) { - // The first K-bit is clear, but the second K-bit is set. The packet + if (!k_bit1) { + // The first K-bit is set, but the second K-bit is clear. The packet // mask is thus 6 bytes long. We have finished reading the properties // for current ssrc. fec_packet->protected_streams[i].packet_mask_size = @@ -247,7 +251,7 @@ size_t FlexfecHeaderWriter::FecHeaderSize(size_t packet_mask_size) const { // TODO(brandtr): Update this function when we support offset-based masks // and retransmissions. void FlexfecHeaderWriter::FinalizeFecHeader( - rtc::ArrayView protected_streams, + ArrayView protected_streams, ForwardErrorCorrection::Packet& fec_packet) const { uint8_t* data = fec_packet.data.MutableData(); *data &= 0x7f; // Clear R bit. @@ -273,8 +277,9 @@ void FlexfecHeaderWriter::FinalizeFecHeader( tmp_mask_part0 >>= 1; // Shift, thus clearing K-bit 0. ByteWriter::WriteBigEndian(write_at, tmp_mask_part0); + *write_at |= 0x80; // Set K-bit 0. write_at += kFlexfecPacketMaskSizes[0]; - tmp_mask_part1 >>= 2; // Shift, thus clearing K-bit 1 and bit 15. + tmp_mask_part1 >>= 2; // Shift twice, thus clearing K-bit 1 and bit 15. ByteWriter::WriteBigEndian(write_at, tmp_mask_part1); bool bit15 = (protected_stream.packet_mask[1] & 0x01) != 0; @@ -284,9 +289,9 @@ void FlexfecHeaderWriter::FinalizeFecHeader( bool bit46 = (protected_stream.packet_mask[5] & 0x02) != 0; bool bit47 = (protected_stream.packet_mask[5] & 0x01) != 0; if (!bit46 && !bit47) { - *write_at |= 0x80; // Set K-bit 1. write_at += kFlexfecPacketMaskSizes[1] - kFlexfecPacketMaskSizes[0]; } else { + *write_at |= 0x80; // Set K-bit 1. write_at += kFlexfecPacketMaskSizes[1] - kFlexfecPacketMaskSizes[0]; // Clear all trailing bits. memset(write_at, 0, @@ -307,14 +312,13 @@ void FlexfecHeaderWriter::FinalizeFecHeader( ByteWriter::WriteBigEndian(write_at, tmp_mask_part0); bool bit15 = (protected_stream.packet_mask[1] & 0x01) != 0; if (!bit15) { - *write_at |= 0x80; // Set K-bit 0. write_at += kFlexfecPacketMaskSizes[0]; } else { + *write_at |= 0x80; // Set K-bit 0. write_at += kFlexfecPacketMaskSizes[0]; // Clear all trailing bits. memset(write_at, 0U, kFlexfecPacketMaskSizes[1] - kFlexfecPacketMaskSizes[0]); - *write_at |= 0x80; // Set K-bit 1. *write_at |= 0x40; // Set bit 15. write_at += kFlexfecPacketMaskSizes[1] - kFlexfecPacketMaskSizes[0]; } diff --git a/modules/rtp_rtcp/source/flexfec_header_reader_writer.h b/modules/rtp_rtcp/source/flexfec_header_reader_writer.h index 81103821e7..1f5202cd18 100644 --- a/modules/rtp_rtcp/source/flexfec_header_reader_writer.h +++ b/modules/rtp_rtcp/source/flexfec_header_reader_writer.h @@ -14,6 +14,7 @@ #include #include +#include "api/array_view.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" namespace webrtc { @@ -58,7 +59,7 @@ class FlexfecHeaderWriter : public FecHeaderWriter { size_t FecHeaderSize(size_t packet_mask_row_size) const override; void FinalizeFecHeader( - rtc::ArrayView protected_streams, + ArrayView protected_streams, ForwardErrorCorrection::Packet& fec_packet) const override; }; diff --git a/modules/rtp_rtcp/source/flexfec_header_reader_writer_unittest.cc b/modules/rtp_rtcp/source/flexfec_header_reader_writer_unittest.cc index 6995ba3871..241675abb4 100644 --- a/modules/rtp_rtcp/source/flexfec_header_reader_writer_unittest.cc +++ b/modules/rtp_rtcp/source/flexfec_header_reader_writer_unittest.cc @@ -12,17 +12,13 @@ #include -#include -#include +#include #include #include "api/array_view.h" #include "api/make_ref_counted.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" -#include "modules/rtp_rtcp/source/forward_error_correction_internal.h" -#include "rtc_base/checks.h" -#include "rtc_base/random.h" #include "test/gmock.h" #include "test/gtest.h" @@ -36,11 +32,12 @@ using ReceivedFecPacket = ForwardErrorCorrection::ReceivedFecPacket; using ::testing::Each; using ::testing::ElementsAreArray; -constexpr uint8_t kMask0[] = {0xAB, 0xCD}; // First K bit is set. -constexpr uint8_t kMask1[] = {0x12, 0x34, // First K bit cleared. - 0xF6, 0x78, 0x9A, 0xBC}; // Second K bit set. -constexpr uint8_t kMask2[] = {0x12, 0x34, // First K bit cleared. - 0x56, 0x78, 0x9A, 0xBC, // Second K bit cleared. +constexpr uint8_t kKBit = 1 << 7; +constexpr uint8_t kMask0[] = {0x2B, 0xCD}; // First K bit is cleared. +constexpr uint8_t kMask1[] = {0x92, 0x34, // First K bit set. + 0x76, 0x78, 0x9A, 0xBC}; // Second K bit cleared. +constexpr uint8_t kMask2[] = {0x92, 0x34, // First K bit set. + 0xD6, 0x78, 0x9A, 0xBC, // Second K bit set. 0xDE, 0xF0, 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC}; constexpr size_t kMediaPacketLength = 1234; @@ -58,13 +55,13 @@ constexpr uint8_t kPayloadBits = 0x00; struct FecPacketStreamReadProperties { ProtectedStream stream; - rtc::ArrayView mask; + ArrayView mask; }; struct FecPacketStreamWriteProperties { size_t byte_index; uint16_t seq_num_base; - rtc::ArrayView mask; + ArrayView mask; }; Packet WritePacket( @@ -97,9 +94,9 @@ void VerifyReadHeaders(size_t expected_fec_header_size, EXPECT_EQ(protected_stream.packet_mask_size, expected[i].stream.packet_mask_size); // Ensure that the K-bits are removed and the packet mask has been packed. - EXPECT_THAT(rtc::MakeArrayView(read_packet.pkt->data.cdata() + - protected_stream.packet_mask_offset, - protected_stream.packet_mask_size), + EXPECT_THAT(MakeArrayView(read_packet.pkt->data.cdata() + + protected_stream.packet_mask_offset, + protected_stream.packet_mask_size), ElementsAreArray(expected[i].mask)); } EXPECT_EQ(read_packet.pkt->data.size() - expected_fec_header_size, @@ -119,8 +116,8 @@ void VerifyFinalizedHeaders( ByteReader::ReadBigEndian(packet + expected[i].byte_index), expected[i].seq_num_base); // Verify mask. - EXPECT_THAT(rtc::MakeArrayView(packet + expected[i].byte_index + 2, - expected[i].mask.size()), + EXPECT_THAT(MakeArrayView(packet + expected[i].byte_index + 2, + expected[i].mask.size()), ElementsAreArray(expected[i].mask)); } } @@ -133,7 +130,7 @@ void VerifyWrittenAndReadHeaders( // Read FEC Header using written data. ReceivedFecPacket read_packet; - read_packet.pkt = rtc::make_ref_counted(); + read_packet.pkt = make_ref_counted(); read_packet.pkt->data = written_packet.data; for (const FecHeaderWriter::ProtectedStream& stream : write_protected_streams) { @@ -166,31 +163,29 @@ void VerifyWrittenAndReadHeaders( read_packet.pkt->data.cdata() + read_packet.protected_streams[i].packet_mask_offset; // Verify actual mask bits. - EXPECT_THAT(rtc::MakeArrayView(read_mask_ptr, mask_write_size), + EXPECT_THAT(MakeArrayView(read_mask_ptr, mask_write_size), ElementsAreArray(write_protected_streams[i].packet_mask)); // If read mask size is larger than written mask size, verify all other bits // are 0. - EXPECT_THAT(rtc::MakeArrayView(read_mask_ptr + mask_write_size, - expected_mask_read_size - mask_write_size), + EXPECT_THAT(MakeArrayView(read_mask_ptr + mask_write_size, + expected_mask_read_size - mask_write_size), Each(0)); } // Verify that the call to ReadFecHeader did not tamper with the payload. EXPECT_THAT( - rtc::MakeArrayView( - read_packet.pkt->data.cdata() + read_packet.fec_header_size, - read_packet.pkt->data.size() - read_packet.fec_header_size), + MakeArrayView(read_packet.pkt->data.cdata() + read_packet.fec_header_size, + read_packet.pkt->data.size() - read_packet.fec_header_size), ElementsAreArray(written_packet.data.cdata() + expected_header_size, written_packet.data.size() - expected_header_size)); } } // namespace -TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit0SetSingleStream) { - constexpr uint8_t kKBit0 = 1 << 7; +TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit0ClearSingleStream) { constexpr size_t kExpectedFecHeaderSize = 12; constexpr uint16_t kSnBase = 0x0102; - constexpr uint8_t kFlexfecPktMask[] = {kKBit0 | 0x08, 0x81}; + constexpr uint8_t kFlexfecPktMask[] = {0x08, 0x81}; constexpr uint8_t kUlpfecPacketMask[] = {0x11, 0x02}; constexpr uint8_t kPacketData[] = { kFlexible, kPtRecovery, kLengthRecovery[0], kLengthRecovery[1], @@ -198,7 +193,7 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit0SetSingleStream) { kSnBase >> 8, kSnBase & 0xFF, kFlexfecPktMask[0], kFlexfecPktMask[1], kPayloadBits, kPayloadBits, kPayloadBits, kPayloadBits}; ReceivedFecPacket read_packet; - read_packet.pkt = rtc::make_ref_counted(); + read_packet.pkt = make_ref_counted(); read_packet.pkt->data.SetData(kPacketData); read_packet.protected_streams = {{.ssrc = 0x01}}; @@ -215,13 +210,11 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit0SetSingleStream) { VerifyReadHeaders(kExpectedFecHeaderSize, read_packet, expected); } -TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit1SetSingleStream) { - constexpr uint8_t kKBit0 = 0 << 7; - constexpr uint8_t kKBit1 = 1 << 7; +TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit1ClearSingleStream) { constexpr size_t kExpectedFecHeaderSize = 16; constexpr uint16_t kSnBase = 0x0102; - constexpr uint8_t kFlexfecPktMask[] = {kKBit0 | 0x48, 0x81, // - kKBit1 | 0x02, 0x11, 0x00, 0x21}; + constexpr uint8_t kFlexfecPktMask[] = {kKBit | 0x48, 0x81, // + 0x02, 0x11, 0x00, 0x21}; constexpr uint8_t kUlpfecPacketMask[] = {0x91, 0x02, // 0x08, 0x44, 0x00, 0x84}; constexpr uint8_t kPacketData[] = { @@ -233,7 +226,7 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit1SetSingleStream) { kFlexfecPktMask[5], kPayloadBits, kPayloadBits, kPayloadBits, kPayloadBits}; ReceivedFecPacket read_packet; - read_packet.pkt = rtc::make_ref_counted(); + read_packet.pkt = make_ref_counted(); read_packet.pkt->data.SetData(kPacketData); read_packet.protected_streams = {{.ssrc = 0x01}}; @@ -250,15 +243,13 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit1SetSingleStream) { VerifyReadHeaders(kExpectedFecHeaderSize, read_packet, expected); } -TEST(FlexfecHeaderReaderTest, ReadsHeaderWithNoKBitsSetSingleStream) { - constexpr uint8_t kKBit0 = 0 << 7; - constexpr uint8_t kKBit1 = 0 << 7; +TEST(FlexfecHeaderReaderTest, ReadsHeaderWithBothKBitsSetSingleStream) { constexpr size_t kExpectedFecHeaderSize = 24; constexpr uint16_t kSnBase = 0x0102; - constexpr uint8_t kFlexfecPacketMask[] = {kKBit0 | 0x48, 0x81, // - kKBit1 | 0x02, 0x11, 0x00, 0x21, // - 0x01, 0x11, 0x11, 0x11, - 0x11, 0x11, 0x11, 0x11}; + constexpr uint8_t kFlexfecPacketMask[] = {kKBit | 0x48, 0x81, // + kKBit | 0x02, 0x11, 0x00, 0x21, // + 0x01, 0x11, 0x11, 0x11, + 0x11, 0x11, 0x11, 0x11}; constexpr uint8_t kUlpfecPacketMask[] = {0x91, 0x02, // 0x08, 0x44, 0x00, 0x84, // 0x04, 0x44, 0x44, 0x44, @@ -292,7 +283,7 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithNoKBitsSetSingleStream) { kPayloadBits, kPayloadBits}; ReceivedFecPacket read_packet; - read_packet.pkt = rtc::make_ref_counted(); + read_packet.pkt = make_ref_counted(); read_packet.pkt->data.SetData(kPacketData); read_packet.protected_streams = {{.ssrc = 0x01}}; @@ -309,14 +300,13 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithNoKBitsSetSingleStream) { VerifyReadHeaders(kExpectedFecHeaderSize, read_packet, expected); } -TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit0Set2Streams) { - constexpr uint8_t kKBit0 = 1 << 7; +TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit0Clear2Streams) { constexpr size_t kExpectedFecHeaderSize = 16; constexpr uint16_t kSnBase0 = 0x0102; constexpr uint16_t kSnBase1 = 0x0304; - constexpr uint8_t kFlexfecPktMask1[] = {kKBit0 | 0x08, 0x81}; + constexpr uint8_t kFlexfecPktMask1[] = {0x08, 0x81}; constexpr uint8_t kUlpfecPacketMask1[] = {0x11, 0x02}; - constexpr uint8_t kFlexfecPktMask2[] = {kKBit0 | 0x04, 0x41}; + constexpr uint8_t kFlexfecPktMask2[] = {0x04, 0x41}; constexpr uint8_t kUlpfecPacketMask2[] = {0x08, 0x82}; constexpr uint8_t kPacketData[] = { @@ -326,7 +316,7 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit0Set2Streams) { kSnBase1 >> 8, kSnBase1 & 0xFF, kFlexfecPktMask2[0], kFlexfecPktMask2[1], kPayloadBits, kPayloadBits, kPayloadBits, kPayloadBits}; ReceivedFecPacket read_packet; - read_packet.pkt = rtc::make_ref_counted(); + read_packet.pkt = make_ref_counted(); read_packet.pkt->data.SetData(kPacketData); read_packet.protected_streams = {{.ssrc = 0x01}, {.ssrc = 0x02}}; @@ -349,18 +339,16 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit0Set2Streams) { VerifyReadHeaders(kExpectedFecHeaderSize, read_packet, expected); } -TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit1Set2Streams) { - constexpr uint8_t kKBit0 = 0 << 7; - constexpr uint8_t kKBit1 = 1 << 7; +TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit1Clear2Streams) { constexpr size_t kExpectedFecHeaderSize = 24; constexpr uint16_t kSnBase0 = 0x0102; constexpr uint16_t kSnBase1 = 0x0304; - constexpr uint8_t kFlexfecPktMask1[] = {kKBit0 | 0x48, 0x81, // - kKBit1 | 0x02, 0x11, 0x00, 0x21}; + constexpr uint8_t kFlexfecPktMask1[] = {kKBit | 0x48, 0x81, // + 0x02, 0x11, 0x00, 0x21}; constexpr uint8_t kUlpfecPacketMask1[] = {0x91, 0x02, // 0x08, 0x44, 0x00, 0x84}; - constexpr uint8_t kFlexfecPktMask2[] = {kKBit0 | 0x57, 0x82, // - kKBit1 | 0x04, 0x33, 0x00, 0x51}; + constexpr uint8_t kFlexfecPktMask2[] = {kKBit | 0x57, 0x82, // + 0x04, 0x33, 0x00, 0x51}; constexpr uint8_t kUlpfecPacketMask2[] = {0xAF, 0x04, // 0x10, 0xCC, 0x01, 0x44}; constexpr uint8_t kPacketData[] = { @@ -375,7 +363,7 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit1Set2Streams) { kPayloadBits, kPayloadBits, kPayloadBits, kPayloadBits}; ReceivedFecPacket read_packet; - read_packet.pkt = rtc::make_ref_counted(); + read_packet.pkt = make_ref_counted(); read_packet.pkt->data.SetData(kPacketData); read_packet.protected_streams = {{.ssrc = 0x01}, {.ssrc = 0x02}}; @@ -398,24 +386,22 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithKBit1Set2Streams) { VerifyReadHeaders(kExpectedFecHeaderSize, read_packet, expected); } -TEST(FlexfecHeaderReaderTest, ReadsHeaderWithNoKBitsSet2Streams) { - constexpr uint8_t kKBit0 = 0 << 7; - constexpr uint8_t kKBit1 = 0 << 7; +TEST(FlexfecHeaderReaderTest, ReadsHeaderWithBothKBitsSet2Streams) { constexpr size_t kExpectedFecHeaderSize = 40; constexpr uint16_t kSnBase0 = 0x0102; constexpr uint16_t kSnBase1 = 0x0304; - constexpr uint8_t kFlexfecPktMask1[] = {kKBit0 | 0x48, 0x81, // - kKBit1 | 0x02, 0x11, 0x00, 0x21, // - 0x01, 0x11, 0x11, 0x11, - 0x11, 0x11, 0x11, 0x11}; + constexpr uint8_t kFlexfecPktMask1[] = {kKBit | 0x48, 0x81, // + kKBit | 0x02, 0x11, 0x00, 0x21, // + 0x01, 0x11, 0x11, 0x11, + 0x11, 0x11, 0x11, 0x11}; constexpr uint8_t kUlpfecPacketMask1[] = {0x91, 0x02, // 0x08, 0x44, 0x00, 0x84, // 0x04, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44}; - constexpr uint8_t kFlexfecPktMask2[] = {kKBit0 | 0x32, 0x84, // - kKBit1 | 0x05, 0x23, 0x00, 0x55, // - 0xA3, 0x22, 0x22, 0x22, - 0x22, 0x22, 0x22, 0x35}; + constexpr uint8_t kFlexfecPktMask2[] = {kKBit | 0x32, 0x84, // + kKBit | 0x05, 0x23, 0x00, 0x55, // + 0xA3, 0x22, 0x22, 0x22, + 0x22, 0x22, 0x22, 0x35}; constexpr uint8_t kUlpfecPacketMask2[] = {0x65, 0x08, // 0x14, 0x8C, 0x01, 0x56, // 0x8C, 0x88, 0x88, 0x88, @@ -466,7 +452,7 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithNoKBitsSet2Streams) { kPayloadBits, kPayloadBits}; ReceivedFecPacket read_packet; - read_packet.pkt = rtc::make_ref_counted(); + read_packet.pkt = make_ref_counted(); read_packet.pkt->data.SetData(kPacketData); read_packet.protected_streams = {{.ssrc = 0x01}, {.ssrc = 0x02}}; @@ -490,29 +476,27 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithNoKBitsSet2Streams) { } TEST(FlexfecHeaderReaderTest, ReadsHeaderWithMultipleStreamsMultipleMasks) { - constexpr uint8_t kBit0 = 0 << 7; - constexpr uint8_t kBit1 = 1 << 7; constexpr size_t kExpectedFecHeaderSize = 44; constexpr uint16_t kSnBase0 = 0x0102; constexpr uint16_t kSnBase1 = 0x0304; constexpr uint16_t kSnBase2 = 0x0506; constexpr uint16_t kSnBase3 = 0x0708; - constexpr uint8_t kFlexfecPacketMask1[] = {kBit1 | 0x29, 0x91}; + constexpr uint8_t kFlexfecPacketMask1[] = {0x29, 0x91}; constexpr uint8_t kUlpfecPacketMask1[] = {0x53, 0x22}; - constexpr uint8_t kFlexfecPacketMask2[] = {kBit0 | 0x32, 0xA1, // - kBit1 | 0x02, 0x11, 0x00, 0x21}; + constexpr uint8_t kFlexfecPacketMask2[] = {kKBit | 0x32, 0xA1, // + 0x02, 0x11, 0x00, 0x21}; constexpr uint8_t kUlpfecPacketMask2[] = {0x65, 0x42, // 0x08, 0x44, 0x00, 0x84}; - constexpr uint8_t kFlexfecPacketMask3[] = {kBit0 | 0x48, 0x81, // - kBit0 | 0x02, 0x11, 0x00, 0x21, // + constexpr uint8_t kFlexfecPacketMask3[] = {kKBit | 0x48, 0x81, // + kKBit | 0x02, 0x11, 0x00, 0x21, // 0x01, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11}; constexpr uint8_t kUlpfecPacketMask3[] = {0x91, 0x02, // 0x08, 0x44, 0x00, 0x84, // 0x04, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44, 0x44}; - constexpr uint8_t kFlexfecPacketMask4[] = {kBit0 | 0x32, 0x84, // - kBit1 | 0x05, 0x23, 0x00, 0x55}; + constexpr uint8_t kFlexfecPacketMask4[] = {kKBit | 0x32, 0x84, // + 0x05, 0x23, 0x00, 0x55}; constexpr uint8_t kUlpfecPacketMask4[] = {0x65, 0x08, // 0x14, 0x8C, 0x01, 0x54}; constexpr uint8_t kPacketData[] = {kFlexible, @@ -564,7 +548,7 @@ TEST(FlexfecHeaderReaderTest, ReadsHeaderWithMultipleStreamsMultipleMasks) { kPayloadBits, kPayloadBits}; ReceivedFecPacket read_packet; - read_packet.pkt = rtc::make_ref_counted(); + read_packet.pkt = make_ref_counted(); read_packet.pkt->data.SetData(kPacketData); read_packet.protected_streams = { {.ssrc = 0x01}, {.ssrc = 0x02}, {.ssrc = 0x03}, {.ssrc = 0x04}}; @@ -603,7 +587,7 @@ TEST(FlexfecHeaderReaderTest, ReadPacketWithoutProtectedSsrcsShouldFail) { kFlexible, kPtRecovery, kLengthRecovery[0], kLengthRecovery[1], kTsRecovery[0], kTsRecovery[1], kTsRecovery[2], kTsRecovery[3]}; ReceivedFecPacket read_packet; - read_packet.pkt = rtc::make_ref_counted(); + read_packet.pkt = make_ref_counted(); read_packet.pkt->data.SetData(kPacketData); // No protected ssrcs. read_packet.protected_streams = {}; @@ -618,7 +602,7 @@ TEST(FlexfecHeaderReaderTest, ReadPacketWithoutStreamSpecificHeaderShouldFail) { kFlexible, kPtRecovery, kLengthRecovery[0], kLengthRecovery[1], kTsRecovery[0], kTsRecovery[1], kTsRecovery[2], kTsRecovery[3]}; ReceivedFecPacket read_packet; - read_packet.pkt = rtc::make_ref_counted(); + read_packet.pkt = make_ref_counted(); read_packet.pkt->data.SetData(kPacketData); read_packet.protected_streams = {{.ssrc = 0x01}}; @@ -633,7 +617,7 @@ TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit0SetShouldFail) { kTsRecovery[0], kTsRecovery[1], kTsRecovery[2], kTsRecovery[3], kSnBases[0][0], kSnBases[0][1], kMask0[0], kMask0[1]}; ReceivedFecPacket read_packet; - read_packet.pkt = rtc::make_ref_counted(); + read_packet.pkt = make_ref_counted(); // Expected to have 2 bytes of mask but length of packet misses 1 byte. read_packet.pkt->data.SetData(kPacketData, sizeof(kPacketData) - 1); read_packet.protected_streams = {{.ssrc = 0x01}}; @@ -642,7 +626,7 @@ TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit0SetShouldFail) { EXPECT_FALSE(reader.ReadFecHeader(&read_packet)); } -TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit1SetShouldFail) { +TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit1ClearShouldFail) { // Simulate short received packet. constexpr uint8_t kPacketData[] = { kFlexible, kPtRecovery, kLengthRecovery[0], kLengthRecovery[1], @@ -650,7 +634,7 @@ TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit1SetShouldFail) { kSnBases[0][0], kSnBases[0][1], kMask1[0], kMask1[1], kMask1[2], kMask1[3], kMask1[4], kMask1[5]}; ReceivedFecPacket read_packet; - read_packet.pkt = rtc::make_ref_counted(); + read_packet.pkt = make_ref_counted(); // Expected to have 6 bytes of mask but length of packet misses 2 bytes. read_packet.pkt->data.SetData(kPacketData, sizeof(kPacketData) - 2); read_packet.protected_streams = {{.ssrc = 0x01}}; @@ -659,7 +643,7 @@ TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit1SetShouldFail) { EXPECT_FALSE(reader.ReadFecHeader(&read_packet)); } -TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit1ClearedShouldFail) { +TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit1SetShouldFail) { // Simulate short received packet. constexpr uint8_t kPacketData[] = { kFlexible, kPtRecovery, kLengthRecovery[0], kLengthRecovery[1], @@ -669,7 +653,7 @@ TEST(FlexfecHeaderReaderTest, ReadShortPacketWithKBit1ClearedShouldFail) { kMask2[6], kMask2[7], kMask2[8], kMask2[9], kMask2[10], kMask2[11], kMask2[12], kMask2[13]}; ReceivedFecPacket read_packet; - read_packet.pkt = rtc::make_ref_counted(); + read_packet.pkt = make_ref_counted(); // Expected to have 14 bytes of mask but length of packet misses 2 bytes. read_packet.pkt->data.SetData(kPacketData, sizeof(kPacketData) - 2); read_packet.protected_streams = {{.ssrc = 0x01}}; @@ -689,7 +673,7 @@ TEST(FlexfecHeaderReaderTest, ReadShortPacketMultipleStreamsShouldFail) { kMask2[6], kMask2[7], kMask2[8], kMask2[9], kMask2[10], kMask2[11], kMask2[12], kMask2[13]}; ReceivedFecPacket read_packet; - read_packet.pkt = rtc::make_ref_counted(); + read_packet.pkt = make_ref_counted(); // Subtract 2 bytes from length, so the read will fail on parsing second read_packet.pkt->data.SetData(kPacketData, sizeof(kPacketData) - 2); read_packet.protected_streams = {{.ssrc = 0x01}, {.ssrc = 0x02}}; @@ -698,8 +682,8 @@ TEST(FlexfecHeaderReaderTest, ReadShortPacketMultipleStreamsShouldFail) { EXPECT_FALSE(reader.ReadFecHeader(&read_packet)); } -TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithKBit0SetSingleStream) { - constexpr uint8_t kFlexfecPacketMask[] = {0x88, 0x81}; +TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithKBit0ClearSingleStream) { + constexpr uint8_t kFlexfecPacketMask[] = {0x08, 0x81}; constexpr uint8_t kUlpfecPacketMask[] = {0x11, 0x02}; constexpr uint16_t kMediaStartSeqNum = 1234; Packet written_packet = WritePacket({{.ssrc = 0x01, @@ -714,8 +698,8 @@ TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithKBit0SetSingleStream) { VerifyFinalizedHeaders(written_packet, expected); } -TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithKBit1SetSingleStream) { - constexpr uint8_t kFlexfecPacketMask[] = {0x48, 0x81, 0x82, 0x11, 0x00, 0x21}; +TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithKBit1ClearSingleStream) { + constexpr uint8_t kFlexfecPacketMask[] = {0xC8, 0x81, 0x02, 0x11, 0x00, 0x21}; constexpr uint8_t kUlpfecPacketMask[] = {0x91, 0x02, 0x08, 0x44, 0x00, 0x84}; constexpr uint16_t kMediaStartSeqNum = 1234; Packet written_packet = WritePacket({{.ssrc = 0x01, @@ -730,10 +714,10 @@ TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithKBit1SetSingleStream) { VerifyFinalizedHeaders(written_packet, expected); } -TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithNoKBitsSetSingleStream) { +TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithBothKBitsSetSingleStream) { constexpr uint8_t kFlexfecPacketMask[] = { - 0x11, 0x11, // K-bit 0 clear. - 0x11, 0x11, 0x11, 0x10, // K-bit 1 clear. + 0x91, 0x11, // K-bit 0 set. + 0x91, 0x11, 0x11, 0x10, // K-bit 1 set. 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // }; constexpr uint8_t kUlpfecPacketMask[] = {0x22, 0x22, 0x44, 0x44, 0x44, 0x41}; @@ -752,22 +736,22 @@ TEST(FlexfecHeaderWriterTest, FinalizesHeaderWithNoKBitsSetSingleStream) { TEST(FlexfecHeaderWriterTest, FinalizesHeaderMultipleStreamsMultipleMasks) { constexpr uint8_t kFlexfecPacketMask1[] = { - 0x11, 0x11, // K-bit 0 clear. - 0x11, 0x11, 0x11, 0x10, // K-bit 1 clear. + 0x91, 0x11, // K-bit 0 set. + 0x91, 0x11, 0x11, 0x10, // K-bit 1 set. 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // }; constexpr uint8_t kUlpfecPacketMask1[] = {0x22, 0x22, 0x44, 0x44, 0x44, 0x41}; constexpr uint16_t kMediaStartSeqNum1 = 1234; - constexpr uint8_t kFlexfecPacketMask2[] = {0x88, 0x81}; + constexpr uint8_t kFlexfecPacketMask2[] = {0x08, 0x81}; constexpr uint8_t kUlpfecPacketMask2[] = {0x11, 0x02}; constexpr uint16_t kMediaStartSeqNum2 = 2345; - constexpr uint8_t kFlexfecPacketMask3[] = {0x48, 0x81, 0x82, + constexpr uint8_t kFlexfecPacketMask3[] = {0xC8, 0x81, 0x02, 0x11, 0x00, 0x21}; constexpr uint8_t kUlpfecPacketMask3[] = {0x91, 0x02, 0x08, 0x44, 0x00, 0x84}; constexpr uint16_t kMediaStartSeqNum3 = 3456; constexpr uint8_t kFlexfecPacketMask4[] = { - 0x55, 0xAA, // K-bit 0 clear. - 0x22, 0xAB, 0xCD, 0xEF, // K-bit 1 clear. + 0xD5, 0xAA, // K-bit 0 set. + 0xA2, 0xAB, 0xCD, 0xEF, // K-bit 1 set. 0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 // }; constexpr uint8_t kUlpfecPacketMask4[] = {0xAB, 0x54, 0x8A, 0xAF, 0x37, 0xBF}; diff --git a/modules/rtp_rtcp/source/flexfec_receiver.cc b/modules/rtp_rtcp/source/flexfec_receiver.cc index 7f2cc0cb3c..c6465fbccf 100644 --- a/modules/rtp_rtcp/source/flexfec_receiver.cc +++ b/modules/rtp_rtcp/source/flexfec_receiver.cc @@ -12,10 +12,18 @@ #include -#include "api/array_view.h" +#include +#include + #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "modules/rtp_rtcp/include/recovered_packet_receiver.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/forward_error_correction.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/ulpfec_receiver.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -109,7 +117,7 @@ FlexfecReceiver::AddReceivedPacket(const RtpPacketReceived& packet) { ++packet_counter_.num_fec_packets; // Insert packet payload into erasure code. - received_packet->pkt = rtc::scoped_refptr( + received_packet->pkt = scoped_refptr( new ForwardErrorCorrection::Packet()); received_packet->pkt->data = packet.Buffer().Slice(packet.headers_size(), packet.payload_size()); @@ -123,7 +131,7 @@ FlexfecReceiver::AddReceivedPacket(const RtpPacketReceived& packet) { // Insert entire packet into erasure code. // Create a copy and fill with zeros all mutable extensions. - received_packet->pkt = rtc::scoped_refptr( + received_packet->pkt = scoped_refptr( new ForwardErrorCorrection::Packet()); RtpPacketReceived packet_copy(packet); packet_copy.ZeroMutableExtensions(); @@ -183,8 +191,7 @@ void FlexfecReceiver::ProcessReceivedPacket( bool should_log_periodically = now - last_recovered_packet_ > kPacketLogInterval; if (RTC_LOG_CHECK_LEVEL(LS_VERBOSE) || should_log_periodically) { - rtc::LoggingSeverity level = - should_log_periodically ? rtc::LS_INFO : rtc::LS_VERBOSE; + LoggingSeverity level = should_log_periodically ? LS_INFO : LS_VERBOSE; RTC_LOG_V(level) << "Recovered media packet with SSRC: " << parsed_packet.Ssrc() << " seq " << parsed_packet.SequenceNumber() << " recovered length " diff --git a/modules/rtp_rtcp/source/flexfec_receiver_unittest.cc b/modules/rtp_rtcp/source/flexfec_receiver_unittest.cc index 1243858b6b..da200ea0f5 100644 --- a/modules/rtp_rtcp/source/flexfec_receiver_unittest.cc +++ b/modules/rtp_rtcp/source/flexfec_receiver_unittest.cc @@ -10,13 +10,21 @@ #include "modules/rtp_rtcp/include/flexfec_receiver.h" -#include +#include +#include +#include +#include #include +#include +#include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/recovered_packet_receiver.h" #include "modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.h" #include "modules/rtp_rtcp/source/fec_test_helper.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/ulpfec_receiver.h" +#include "rtc_base/checks.h" #include "test/gmock.h" #include "test/gtest.h" diff --git a/modules/rtp_rtcp/source/flexfec_sender.cc b/modules/rtp_rtcp/source/flexfec_sender.cc index 3a98778d16..9b5ceac577 100644 --- a/modules/rtp_rtcp/source/flexfec_sender.cc +++ b/modules/rtp_rtcp/source/flexfec_sender.cc @@ -12,18 +12,31 @@ #include +#include #include +#include +#include #include +#include #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/rtp_parameters.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" +#include "modules/rtp_rtcp/source/rtp_header_extension_size.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/race_checker.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -71,16 +84,16 @@ RtpHeaderExtensionMap RegisterSupportedExtensions( } // namespace FlexfecSender::FlexfecSender( + const Environment& env, int payload_type, uint32_t ssrc, uint32_t protected_media_ssrc, absl::string_view mid, const std::vector& rtp_header_extensions, - rtc::ArrayView extension_sizes, - const RtpState* rtp_state, - Clock* clock) - : clock_(clock), - random_(clock_->TimeInMicroseconds()), + ArrayView extension_sizes, + const RtpState* rtp_state) + : env_(env), + random_(env_.clock().TimeInMicroseconds()), payload_type_(payload_type), // Reset RTP state if this is not the first time we are operating. // Otherwise, randomize the initial timestamp offset and RTP sequence @@ -93,8 +106,8 @@ FlexfecSender::FlexfecSender( seq_num_(rtp_state ? rtp_state->sequence_number : random_.Rand(1, kMaxInitRtpSeqNumber)), ulpfec_generator_( - ForwardErrorCorrection::CreateFlexfec(ssrc, protected_media_ssrc), - clock_), + env_, + ForwardErrorCorrection::CreateFlexfec(ssrc, protected_media_ssrc)), rtp_header_extension_map_( RegisterSupportedExtensions(rtp_header_extensions)), header_extensions_size_( @@ -141,10 +154,10 @@ std::vector> FlexfecSender::GetFecPackets() { fec_packet_to_send->SetTimestamp( timestamp_offset_ + static_cast(kMsToRtpTimestamp * - clock_->TimeInMilliseconds())); + env_.clock().TimeInMilliseconds())); // Set "capture time" so that the TransmissionOffset header extension // can be set by the RTPSender. - fec_packet_to_send->set_capture_time(clock_->CurrentTime()); + fec_packet_to_send->set_capture_time(env_.clock().CurrentTime()); fec_packet_to_send->SetSsrc(ssrc_); // Reserve extensions, if registered. These will be set by the RTPSender. fec_packet_to_send->ReserveExtension(); @@ -169,7 +182,7 @@ std::vector> FlexfecSender::GetFecPackets() { ulpfec_generator_.ResetState(); } - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); if (!fec_packets_to_send.empty() && now - last_generated_packet_ > kPacketLogInterval) { RTC_LOG(LS_VERBOSE) << "Generated " << fec_packets_to_send.size() @@ -191,10 +204,11 @@ size_t FlexfecSender::MaxPacketOverhead() const { DataRate FlexfecSender::CurrentFecRate() const { MutexLock lock(&mutex_); - return fec_bitrate_.Rate(clock_->CurrentTime()).value_or(DataRate::Zero()); + return fec_bitrate_.Rate(env_.clock().CurrentTime()) + .value_or(DataRate::Zero()); } -absl::optional FlexfecSender::GetRtpState() { +std::optional FlexfecSender::GetRtpState() { RtpState rtp_state; rtp_state.sequence_number = seq_num_; rtp_state.start_timestamp = timestamp_offset_; diff --git a/modules/rtp_rtcp/source/flexfec_sender_unittest.cc b/modules/rtp_rtcp/source/flexfec_sender_unittest.cc index 19614d2bbd..9c35cc897f 100644 --- a/modules/rtp_rtcp/source/flexfec_sender_unittest.cc +++ b/modules/rtp_rtcp/source/flexfec_sender_unittest.cc @@ -10,11 +10,20 @@ #include "modules/rtp_rtcp/include/flexfec_sender.h" +#include +#include +#include +#include +#include #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/rtp_parameters.h" +#include "modules/include/module_fec_types.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/fec_test_helper.h" +#include "modules/rtp_rtcp/source/rtp_header_extension_size.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_sender.h" @@ -83,27 +92,30 @@ std::unique_ptr GenerateSingleFlexfecPacket( TEST(FlexfecSenderTest, Ssrc) { SimulatedClock clock(kInitialSimulatedClockTime); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, - kNoRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, - nullptr /* rtp_state */, &clock); + const Environment env = CreateEnvironment(&clock); + FlexfecSender sender(env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, + kNoMid, kNoRtpHeaderExtensions, + kNoRtpHeaderExtensionSizes, nullptr /* rtp_state */); EXPECT_EQ(kFlexfecSsrc, sender.FecSsrc()); } TEST(FlexfecSenderTest, NoFecAvailableBeforeMediaAdded) { SimulatedClock clock(kInitialSimulatedClockTime); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, - kNoRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, - nullptr /* rtp_state */, &clock); + const Environment env = CreateEnvironment(&clock); + FlexfecSender sender(env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, + kNoMid, kNoRtpHeaderExtensions, + kNoRtpHeaderExtensionSizes, nullptr /* rtp_state */); EXPECT_TRUE(sender.GetFecPackets().empty()); } TEST(FlexfecSenderTest, ProtectOneFrameWithOneFecPacket) { SimulatedClock clock(kInitialSimulatedClockTime); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, - kNoRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, - nullptr /* rtp_state */, &clock); + const Environment env = CreateEnvironment(&clock); + FlexfecSender sender(env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, + kNoMid, kNoRtpHeaderExtensions, + kNoRtpHeaderExtensionSizes, nullptr /* rtp_state */); auto fec_packet = GenerateSingleFlexfecPacket(&sender); EXPECT_EQ(kRtpHeaderSize, fec_packet->headers_size()); @@ -116,6 +128,8 @@ TEST(FlexfecSenderTest, ProtectOneFrameWithOneFecPacket) { } TEST(FlexfecSenderTest, ProtectTwoFramesWithOneFecPacket) { + SimulatedClock clock(kInitialSimulatedClockTime); + const Environment env = CreateEnvironment(&clock); // FEC parameters selected to generate a single FEC packet per frame. FecProtectionParams params; params.fec_rate = 15; @@ -123,10 +137,9 @@ TEST(FlexfecSenderTest, ProtectTwoFramesWithOneFecPacket) { params.fec_mask_type = kFecMaskRandom; constexpr size_t kNumFrames = 2; constexpr size_t kNumPacketsPerFrame = 2; - SimulatedClock clock(kInitialSimulatedClockTime); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, - kNoRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, - nullptr /* rtp_state */, &clock); + FlexfecSender sender(env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, + kNoMid, kNoRtpHeaderExtensions, + kNoRtpHeaderExtensionSizes, nullptr /* rtp_state */); sender.SetProtectionParameters(params, params); AugmentedPacketGenerator packet_generator(kMediaSsrc); @@ -155,6 +168,8 @@ TEST(FlexfecSenderTest, ProtectTwoFramesWithOneFecPacket) { } TEST(FlexfecSenderTest, ProtectTwoFramesWithTwoFecPackets) { + SimulatedClock clock(kInitialSimulatedClockTime); + const Environment env = CreateEnvironment(&clock); // FEC parameters selected to generate a single FEC packet per frame. FecProtectionParams params; params.fec_rate = 30; @@ -162,10 +177,9 @@ TEST(FlexfecSenderTest, ProtectTwoFramesWithTwoFecPackets) { params.fec_mask_type = kFecMaskRandom; constexpr size_t kNumFrames = 2; constexpr size_t kNumPacketsPerFrame = 2; - SimulatedClock clock(kInitialSimulatedClockTime); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, - kNoRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, - nullptr /* rtp_state */, &clock); + FlexfecSender sender(env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, + kNoMid, kNoRtpHeaderExtensions, + kNoRtpHeaderExtensionSizes, nullptr /* rtp_state */); sender.SetProtectionParameters(params, params); AugmentedPacketGenerator packet_generator(kMediaSsrc); @@ -196,11 +210,12 @@ TEST(FlexfecSenderTest, ProtectTwoFramesWithTwoFecPackets) { // In the tests, we only consider RTP header extensions that are useful for BWE. TEST(FlexfecSenderTest, NoRtpHeaderExtensionsForBweByDefault) { - const std::vector kRtpHeaderExtensions{}; SimulatedClock clock(kInitialSimulatedClockTime); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, - kRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, - nullptr /* rtp_state */, &clock); + const Environment env = CreateEnvironment(&clock); + const std::vector kRtpHeaderExtensions{}; + FlexfecSender sender(env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, + kNoMid, kRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, + nullptr /* rtp_state */); auto fec_packet = GenerateSingleFlexfecPacket(&sender); EXPECT_FALSE(fec_packet->HasExtension()); @@ -209,12 +224,13 @@ TEST(FlexfecSenderTest, NoRtpHeaderExtensionsForBweByDefault) { } TEST(FlexfecSenderTest, RegisterAbsoluteSendTimeRtpHeaderExtension) { + SimulatedClock clock(kInitialSimulatedClockTime); + const Environment env = CreateEnvironment(&clock); const std::vector kRtpHeaderExtensions{ {RtpExtension::kAbsSendTimeUri, 1}}; - SimulatedClock clock(kInitialSimulatedClockTime); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, - kRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, - nullptr /* rtp_state */, &clock); + FlexfecSender sender(env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, + kNoMid, kRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, + nullptr /* rtp_state */); auto fec_packet = GenerateSingleFlexfecPacket(&sender); EXPECT_TRUE(fec_packet->HasExtension()); @@ -223,12 +239,13 @@ TEST(FlexfecSenderTest, RegisterAbsoluteSendTimeRtpHeaderExtension) { } TEST(FlexfecSenderTest, RegisterTransmissionOffsetRtpHeaderExtension) { + SimulatedClock clock(kInitialSimulatedClockTime); + const Environment env = CreateEnvironment(&clock); const std::vector kRtpHeaderExtensions{ {RtpExtension::kTimestampOffsetUri, 1}}; - SimulatedClock clock(kInitialSimulatedClockTime); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, - kRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, - nullptr /* rtp_state */, &clock); + FlexfecSender sender(env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, + kNoMid, kRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, + nullptr /* rtp_state */); auto fec_packet = GenerateSingleFlexfecPacket(&sender); EXPECT_FALSE(fec_packet->HasExtension()); @@ -237,12 +254,13 @@ TEST(FlexfecSenderTest, RegisterTransmissionOffsetRtpHeaderExtension) { } TEST(FlexfecSenderTest, RegisterTransportSequenceNumberRtpHeaderExtension) { + SimulatedClock clock(kInitialSimulatedClockTime); + const Environment env = CreateEnvironment(&clock); const std::vector kRtpHeaderExtensions{ {RtpExtension::kTransportSequenceNumberUri, 1}}; - SimulatedClock clock(kInitialSimulatedClockTime); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, - kRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, - nullptr /* rtp_state */, &clock); + FlexfecSender sender(env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, + kNoMid, kRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, + nullptr /* rtp_state */); auto fec_packet = GenerateSingleFlexfecPacket(&sender); EXPECT_FALSE(fec_packet->HasExtension()); @@ -251,14 +269,15 @@ TEST(FlexfecSenderTest, RegisterTransportSequenceNumberRtpHeaderExtension) { } TEST(FlexfecSenderTest, RegisterAllRtpHeaderExtensionsForBwe) { + SimulatedClock clock(kInitialSimulatedClockTime); + const Environment env = CreateEnvironment(&clock); const std::vector kRtpHeaderExtensions{ {RtpExtension::kAbsSendTimeUri, 1}, {RtpExtension::kTimestampOffsetUri, 2}, {RtpExtension::kTransportSequenceNumberUri, 3}}; - SimulatedClock clock(kInitialSimulatedClockTime); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, - kRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, - nullptr /* rtp_state */, &clock); + FlexfecSender sender(env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, + kNoMid, kRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, + nullptr /* rtp_state */); auto fec_packet = GenerateSingleFlexfecPacket(&sender); EXPECT_TRUE(fec_packet->HasExtension()); @@ -268,19 +287,21 @@ TEST(FlexfecSenderTest, RegisterAllRtpHeaderExtensionsForBwe) { TEST(FlexfecSenderTest, MaxPacketOverhead) { SimulatedClock clock(kInitialSimulatedClockTime); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, - kNoRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, - nullptr /* rtp_state */, &clock); + const Environment env = CreateEnvironment(&clock); + FlexfecSender sender(env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, + kNoMid, kNoRtpHeaderExtensions, + kNoRtpHeaderExtensionSizes, nullptr /* rtp_state */); EXPECT_EQ(kFlexfecMaxHeaderSize, sender.MaxPacketOverhead()); } TEST(FlexfecSenderTest, MaxPacketOverheadWithExtensions) { + SimulatedClock clock(kInitialSimulatedClockTime); + const Environment env = CreateEnvironment(&clock); const std::vector kRtpHeaderExtensions{ {RtpExtension::kAbsSendTimeUri, 1}, {RtpExtension::kTimestampOffsetUri, 2}, {RtpExtension::kTransportSequenceNumberUri, 3}}; - SimulatedClock clock(kInitialSimulatedClockTime); const size_t kExtensionHeaderLength = 1; const size_t kRtpOneByteHeaderLength = 4; const size_t kExtensionsTotalSize = @@ -288,22 +309,23 @@ TEST(FlexfecSenderTest, MaxPacketOverheadWithExtensions) { AbsoluteSendTime::kValueSizeBytes + kExtensionHeaderLength + TransmissionOffset::kValueSizeBytes + kExtensionHeaderLength + TransportSequenceNumber::kValueSizeBytes); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, - kRtpHeaderExtensions, RTPSender::FecExtensionSizes(), - nullptr /* rtp_state */, &clock); + FlexfecSender sender(env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, + kNoMid, kRtpHeaderExtensions, + RTPSender::FecExtensionSizes(), nullptr /* rtp_state */); EXPECT_EQ(kExtensionsTotalSize + kFlexfecMaxHeaderSize, sender.MaxPacketOverhead()); } TEST(FlexfecSenderTest, MidIncludedInPacketsWhenSet) { + SimulatedClock clock(kInitialSimulatedClockTime); + const Environment env = CreateEnvironment(&clock); const std::vector kRtpHeaderExtensions{ {RtpExtension::kMidUri, 1}}; const char kMid[] = "mid"; - SimulatedClock clock(kInitialSimulatedClockTime); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kMid, + FlexfecSender sender(env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kMid, kRtpHeaderExtensions, RTPSender::FecExtensionSizes(), - nullptr /* rtp_state */, &clock); + nullptr /* rtp_state */); auto fec_packet = GenerateSingleFlexfecPacket(&sender); @@ -313,13 +335,14 @@ TEST(FlexfecSenderTest, MidIncludedInPacketsWhenSet) { } TEST(FlexfecSenderTest, SetsAndGetsRtpState) { + SimulatedClock clock(kInitialSimulatedClockTime); + const Environment env = CreateEnvironment(&clock); RtpState initial_rtp_state; initial_rtp_state.sequence_number = 100; initial_rtp_state.start_timestamp = 200; - SimulatedClock clock(kInitialSimulatedClockTime); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, - kNoRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, - &initial_rtp_state, &clock); + FlexfecSender sender(env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, + kNoMid, kNoRtpHeaderExtensions, + kNoRtpHeaderExtensionSizes, &initial_rtp_state); auto fec_packet = GenerateSingleFlexfecPacket(&sender); EXPECT_EQ(initial_rtp_state.sequence_number, fec_packet->SequenceNumber()); diff --git a/modules/rtp_rtcp/source/forward_error_correction.cc b/modules/rtp_rtcp/source/forward_error_correction.cc index 15a0801ac0..39c098a65c 100644 --- a/modules/rtp_rtcp/source/forward_error_correction.cc +++ b/modules/rtp_rtcp/source/forward_error_correction.cc @@ -13,10 +13,15 @@ #include #include +#include +#include +#include #include +#include #include "absl/algorithm/container.h" #include "modules/include/module_common_types_public.h" +#include "modules/include/module_fec_types.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/flexfec_03_header_reader_writer.h" @@ -351,8 +356,15 @@ void ForwardErrorCorrection::InsertMediaPacket( RTC_DCHECK_EQ(received_packet.ssrc, protected_media_ssrc_); // Search for duplicate packets. - for (const auto& recovered_packet : *recovered_packets) { + auto insert_pos = recovered_packets->begin(); + for (auto it = recovered_packets->rbegin(), end = recovered_packets->rend(); + it != end; ++it) { + const auto& recovered_packet = *it; RTC_DCHECK_EQ(recovered_packet->ssrc, received_packet.ssrc); + if (SortablePacket::LessThan()(recovered_packet, &received_packet)) { + insert_pos = it.base(); + break; + } if (recovered_packet->seq_num == received_packet.seq_num) { // Duplicate packet, no need to add to list. return; @@ -367,11 +379,8 @@ void ForwardErrorCorrection::InsertMediaPacket( recovered_packet->ssrc = received_packet.ssrc; recovered_packet->seq_num = received_packet.seq_num; recovered_packet->pkt = received_packet.pkt; - // TODO(holmer): Consider replacing this with a binary search for the right - // position, and then just insert the new packet. Would get rid of the sort. RecoveredPacket* recovered_packet_ptr = recovered_packet.get(); - recovered_packets->push_back(std::move(recovered_packet)); - recovered_packets->sort(SortablePacket::LessThan()); + recovered_packets->insert(insert_pos, std::move(recovered_packet)); UpdateCoveringFecPackets(*recovered_packet_ptr); } @@ -395,8 +404,16 @@ void ForwardErrorCorrection::InsertFecPacket( RTC_DCHECK_EQ(received_packet.ssrc, ssrc_); // Check for duplicate. - for (const auto& existing_fec_packet : received_fec_packets_) { + auto insert_pos = received_fec_packets_.begin(); + for (auto it = received_fec_packets_.rbegin(), + end = received_fec_packets_.rend(); + it != end; ++it) { + const auto& existing_fec_packet = *it; RTC_DCHECK_EQ(existing_fec_packet->ssrc, received_packet.ssrc); + if (SortablePacket::LessThan()(existing_fec_packet, &received_packet)) { + insert_pos = it.base(); + break; + } if (existing_fec_packet->seq_num == received_packet.seq_num) { // Drop duplicate FEC packet data. return; @@ -456,10 +473,7 @@ void ForwardErrorCorrection::InsertFecPacket( RTC_LOG(LS_WARNING) << "Received FEC packet has an all-zero packet mask."; } else { AssignRecoveredPackets(recovered_packets, fec_packet.get()); - // TODO(holmer): Consider replacing this with a binary search for the right - // position, and then just insert the new packet. Would get rid of the sort. - received_fec_packets_.push_back(std::move(fec_packet)); - received_fec_packets_.sort(SortablePacket::LessThan()); + received_fec_packets_.insert(insert_pos, std::move(fec_packet)); const size_t max_fec_packets = fec_header_reader_->MaxFecPackets(); if (received_fec_packets_.size() > max_fec_packets) { received_fec_packets_.pop_front(); @@ -571,7 +585,7 @@ bool ForwardErrorCorrection::StartPacketRecovery( } bool ForwardErrorCorrection::FinishPacketRecovery( - const ReceivedFecPacket& fec_packet, + const ReceivedFecPacket& /* fec_packet */, RecoveredPacket* recovered_packet) { uint8_t* data = recovered_packet->pkt->data.MutableData(); // Set the RTP version to 2. @@ -690,11 +704,16 @@ size_t ForwardErrorCorrection::AttemptRecovery( auto* recovered_packet_ptr = recovered_packet.get(); // Add recovered packet to the list of recovered packets and update any // FEC packets covering this packet with a pointer to the data. - // TODO(holmer): Consider replacing this with a binary search for the - // right position, and then just insert the new packet. Would get rid of - // the sort. - recovered_packets->push_back(std::move(recovered_packet)); - recovered_packets->sort(SortablePacket::LessThan()); + auto insert_pos = recovered_packets->begin(); + for (auto it = recovered_packets->rbegin(), + end = recovered_packets->rend(); + it != end; ++it) { + if (!SortablePacket::LessThan()(recovered_packet, *it)) { + insert_pos = it.base(); + break; + } + } + recovered_packets->insert(insert_pos, std::move(recovered_packet)); UpdateCoveringFecPackets(*recovered_packet_ptr); DiscardOldRecoveredPackets(recovered_packets); fec_packet_it = received_fec_packets_.erase(fec_packet_it); @@ -709,7 +728,7 @@ size_t ForwardErrorCorrection::AttemptRecovery( // packet is old. We can discard this FEC packet. fec_packet_it = received_fec_packets_.erase(fec_packet_it); } else { - fec_packet_it++; + ++fec_packet_it; } } diff --git a/modules/rtp_rtcp/source/forward_error_correction.h b/modules/rtp_rtcp/source/forward_error_correction.h index 84278a8c5f..b4e1a50f6e 100644 --- a/modules/rtp_rtcp/source/forward_error_correction.h +++ b/modules/rtp_rtcp/source/forward_error_correction.h @@ -19,11 +19,10 @@ #include #include "absl/container/inlined_vector.h" +#include "api/array_view.h" #include "api/scoped_refptr.h" -#include "api/units/timestamp.h" #include "modules/include/module_fec_types.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/forward_error_correction_internal.h" #include "rtc_base/copy_on_write_buffer.h" @@ -56,7 +55,7 @@ class ForwardErrorCorrection { // reaches zero. virtual int32_t Release(); - rtc::CopyOnWriteBuffer data; // Packet data. + CopyOnWriteBuffer data; // Packet data. private: int32_t ref_count_; // Counts the number of references to a packet. @@ -87,7 +86,7 @@ class ForwardErrorCorrection { // otherwise. bool is_recovered; RtpHeaderExtensionMap extensions; - rtc::scoped_refptr pkt; // Pointer to the packet storage. + scoped_refptr pkt; // Pointer to the packet storage. }; // The recovered list parameter of DecodeFec() references structs of @@ -103,7 +102,7 @@ class ForwardErrorCorrection { // through the received packet list. bool returned; // True when the packet already has been returned to the // caller through the callback. - rtc::scoped_refptr pkt; // Pointer to the packet storage. + scoped_refptr pkt; // Pointer to the packet storage. }; // Used to link media packets to their protecting FEC packets. @@ -114,7 +113,7 @@ class ForwardErrorCorrection { ProtectedPacket(); ~ProtectedPacket(); - rtc::scoped_refptr pkt; + scoped_refptr pkt; }; using ProtectedPacketList = std::list>; @@ -149,7 +148,7 @@ class ForwardErrorCorrection { protected_streams; size_t protection_length; // Raw data. - rtc::scoped_refptr pkt; + scoped_refptr pkt; }; using PacketList = std::list>; @@ -399,7 +398,7 @@ class FecHeaderWriter { struct ProtectedStream { uint32_t ssrc = 0; uint16_t seq_num_base = 0; - rtc::ArrayView packet_mask; + ArrayView packet_mask; }; virtual ~FecHeaderWriter(); @@ -425,7 +424,7 @@ class FecHeaderWriter { // Writes FEC header. virtual void FinalizeFecHeader( - rtc::ArrayView protected_streams, + ArrayView protected_streams, ForwardErrorCorrection::Packet& fec_packet) const = 0; protected: diff --git a/modules/rtp_rtcp/source/forward_error_correction_internal.cc b/modules/rtp_rtcp/source/forward_error_correction_internal.cc index a10f2e6a21..98aa2b3ca5 100644 --- a/modules/rtp_rtcp/source/forward_error_correction_internal.cc +++ b/modules/rtp_rtcp/source/forward_error_correction_internal.cc @@ -13,7 +13,10 @@ #include #include +#include +#include "api/array_view.h" +#include "modules/include/module_fec_types.h" #include "modules/rtp_rtcp/source/fec_private_tables_bursty.h" #include "modules/rtp_rtcp/source/fec_private_tables_random.h" #include "rtc_base/checks.h" @@ -148,8 +151,8 @@ PacketMaskTable::PacketMaskTable(FecMaskType fec_mask_type, PacketMaskTable::~PacketMaskTable() = default; -rtc::ArrayView PacketMaskTable::LookUp(int num_media_packets, - int num_fec_packets) { +ArrayView PacketMaskTable::LookUp(int num_media_packets, + int num_fec_packets) { RTC_DCHECK_GT(num_media_packets, 0); RTC_DCHECK_GT(num_fec_packets, 0); RTC_DCHECK_LE(num_media_packets, kUlpfecMaxMediaPackets); @@ -244,7 +247,7 @@ void RemainingPacketProtection(int num_media_packets, PacketMaskSize(num_media_packets - num_fec_for_imp_packets); auto end_row = (num_fec_for_imp_packets + num_fec_remaining); - rtc::ArrayView packet_mask_sub_21 = mask_table->LookUp( + ArrayView packet_mask_sub_21 = mask_table->LookUp( num_media_packets - num_fec_for_imp_packets, num_fec_remaining); ShiftFitSubMask(num_mask_bytes, res_mask_bytes, num_fec_for_imp_packets, @@ -252,7 +255,7 @@ void RemainingPacketProtection(int num_media_packets, } else if (mode == kModeOverlap || mode == kModeBiasFirstPacket) { // sub_mask22 - rtc::ArrayView packet_mask_sub_22 = + ArrayView packet_mask_sub_22 = mask_table->LookUp(num_media_packets, num_fec_remaining); FitSubMask(num_mask_bytes, num_mask_bytes, num_fec_remaining, @@ -279,7 +282,7 @@ void ImportantPacketProtection(int num_fec_for_imp_packets, const int num_imp_mask_bytes = PacketMaskSize(num_imp_packets); // Get sub_mask1 from table - rtc::ArrayView packet_mask_sub_1 = + ArrayView packet_mask_sub_1 = mask_table->LookUp(num_imp_packets, num_fec_for_imp_packets); FitSubMask(num_mask_bytes, num_imp_mask_bytes, num_fec_for_imp_packets, @@ -408,9 +411,9 @@ void UnequalProtectionMask(int num_media_packets, // * For all entries: 2 * fec index (1 based) // * Size for kPacketMaskBurstyTbl: 2 bytes. // * For all entries: 2 * fec index (1 based) -rtc::ArrayView LookUpInFecTable(const uint8_t* table, - int media_packet_index, - int fec_index) { +ArrayView LookUpInFecTable(const uint8_t* table, + int media_packet_index, + int fec_index) { RTC_DCHECK_LT(media_packet_index, table[0]); // Skip over the table size. @@ -464,14 +467,14 @@ void GeneratePacketMasks(int num_media_packets, // Retrieve corresponding mask table directly:for equal-protection case. // Mask = (k,n-k), with protection factor = (n-k)/k, // where k = num_media_packets, n=total#packets, (n-k)=num_fec_packets. - rtc::ArrayView mask = + ArrayView mask = mask_table->LookUp(num_media_packets, num_fec_packets); memcpy(packet_mask, &mask[0], mask.size()); } else { // UEP case UnequalProtectionMask(num_media_packets, num_fec_packets, num_imp_packets, num_mask_bytes, packet_mask, mask_table); } // End of UEP modification -} // End of GetPacketMasks +} // End of GetPacketMasks size_t PacketMaskSize(size_t num_sequence_numbers) { RTC_DCHECK_LE(num_sequence_numbers, 8 * kUlpfecPacketMaskSizeLBitSet); diff --git a/modules/rtp_rtcp/source/forward_error_correction_internal.h b/modules/rtp_rtcp/source/forward_error_correction_internal.h index 31acf73e3e..e02ba499d8 100644 --- a/modules/rtp_rtcp/source/forward_error_correction_internal.h +++ b/modules/rtp_rtcp/source/forward_error_correction_internal.h @@ -43,8 +43,7 @@ class PacketMaskTable { PacketMaskTable(FecMaskType fec_mask_type, int num_media_packets); ~PacketMaskTable(); - rtc::ArrayView LookUp(int num_media_packets, - int num_fec_packets); + ArrayView LookUp(int num_media_packets, int num_fec_packets); private: static const uint8_t* PickTable(FecMaskType fec_mask_type, @@ -53,9 +52,9 @@ class PacketMaskTable { uint8_t fec_packet_mask_[kFECPacketMaskMaxSize]; }; -rtc::ArrayView LookUpInFecTable(const uint8_t* table, - int media_packet_index, - int fec_index); +ArrayView LookUpInFecTable(const uint8_t* table, + int media_packet_index, + int fec_index); // Returns an array of packet masks. The mask of a single FEC packet // corresponds to a number of mask bytes. The mask indicates which diff --git a/modules/rtp_rtcp/source/frame_object.cc b/modules/rtp_rtcp/source/frame_object.cc index 734f1b0a4f..d1ae7b98d5 100644 --- a/modules/rtp_rtcp/source/frame_object.cc +++ b/modules/rtp_rtcp/source/frame_object.cc @@ -10,13 +10,23 @@ #include "modules/rtp_rtcp/source/frame_object.h" -#include - +#include +#include #include +#include +#include "api/rtp_packet_infos.h" +#include "api/scoped_refptr.h" +#include "api/video/color_space.h" #include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_metadata.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" #include "api/video/video_timing.h" -#include "rtc_base/checks.h" +#include "common_video/frame_instrumentation_data.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" namespace webrtc { RtpFrameObject::RtpFrameObject( @@ -34,9 +44,12 @@ RtpFrameObject::RtpFrameObject( VideoRotation rotation, VideoContentType content_type, const RTPVideoHeader& video_header, - const absl::optional& color_space, + const std::optional& color_space, + const std::optional< + std::variant>& + frame_instrumentation_data, RtpPacketInfos packet_infos, - rtc::scoped_refptr image_buffer) + scoped_refptr image_buffer) : image_buffer_(image_buffer), first_seq_num_(first_seq_num), last_seq_num_(last_seq_num), @@ -49,9 +62,10 @@ RtpFrameObject::RtpFrameObject( // TODO(philipel): Remove when encoded image is replaced by EncodedFrame. // VCMEncodedFrame members + _codecSpecificInfo.frame_instrumentation_data = frame_instrumentation_data; CopyCodecSpecific(&rtp_video_header_); _payloadType = payload_type; - SetTimestamp(rtp_timestamp); + SetRtpTimestamp(rtp_timestamp); ntp_time_ms_ = ntp_time_ms; _frameType = rtp_video_header_.frame_type; diff --git a/modules/rtp_rtcp/source/frame_object.h b/modules/rtp_rtcp/source/frame_object.h index 481c561795..44bbf6eec5 100644 --- a/modules/rtp_rtcp/source/frame_object.h +++ b/modules/rtp_rtcp/source/frame_object.h @@ -11,11 +11,24 @@ #ifndef MODULES_RTP_RTCP_SOURCE_FRAME_OBJECT_H_ #define MODULES_RTP_RTCP_SOURCE_FRAME_OBJECT_H_ +#include +#include +#include #include -#include "absl/types/optional.h" +#include "api/rtp_packet_infos.h" +#include "api/scoped_refptr.h" +#include "api/video/color_space.h" #include "api/video/encoded_frame.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" #include "api/video/video_frame_metadata.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "api/video/video_timing.h" +#include "common_video/frame_instrumentation_data.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" namespace webrtc { @@ -35,9 +48,12 @@ class RtpFrameObject : public EncodedFrame { VideoRotation rotation, VideoContentType content_type, const RTPVideoHeader& video_header, - const absl::optional& color_space, + const std::optional& color_space, + const std::optional>& + frame_instrumentation_data, RtpPacketInfos packet_infos, - rtc::scoped_refptr image_buffer); + scoped_refptr image_buffer); ~RtpFrameObject() override; uint16_t first_seq_num() const; @@ -62,7 +78,7 @@ class RtpFrameObject : public EncodedFrame { private: // Reference for mutable access. - rtc::scoped_refptr image_buffer_; + scoped_refptr image_buffer_; RTPVideoHeader rtp_video_header_; VideoCodecType codec_type_; uint16_t first_seq_num_; diff --git a/modules/rtp_rtcp/source/frame_transformer_factory_unittest.cc b/modules/rtp_rtcp/source/frame_transformer_factory_unittest.cc index a61179e9d3..312bdcbb86 100644 --- a/modules/rtp_rtcp/source/frame_transformer_factory_unittest.cc +++ b/modules/rtp_rtcp/source/frame_transformer_factory_unittest.cc @@ -10,21 +10,17 @@ #include "api/frame_transformer_factory.h" -#include +#include +#include #include -#include #include -#include "absl/memory/memory.h" -#include "api/call/transport.h" +#include "api/array_view.h" #include "api/test/mock_transformable_audio_frame.h" #include "api/test/mock_transformable_video_frame.h" -#include "call/video_receive_stream.h" -#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" -#include "rtc_base/event.h" +#include "api/video/video_frame_metadata.h" #include "test/gmock.h" #include "test/gtest.h" -#include "test/mock_frame_transformer.h" namespace webrtc { namespace { @@ -39,7 +35,7 @@ TEST(FrameTransformerFactory, CloneAudioFrame) { NiceMock original_frame; uint8_t data[10]; std::fill_n(data, 10, 5); - rtc::ArrayView data_view(data); + ArrayView data_view(data); ON_CALL(original_frame, GetData()).WillByDefault(Return(data_view)); auto cloned_frame = CloneAudioFrame(&original_frame); @@ -50,7 +46,7 @@ TEST(FrameTransformerFactory, CloneVideoFrame) { NiceMock original_frame; uint8_t data[10]; std::fill_n(data, 10, 5); - rtc::ArrayView data_view(data); + ArrayView data_view(data); EXPECT_CALL(original_frame, GetData()).WillRepeatedly(Return(data_view)); webrtc::VideoFrameMetadata metadata; std::vector csrcs{123, 321}; diff --git a/modules/rtp_rtcp/source/leb128_unittest.cc b/modules/rtp_rtcp/source/leb128_unittest.cc index dbabcb36f2..c58b8bc776 100644 --- a/modules/rtp_rtcp/source/leb128_unittest.cc +++ b/modules/rtp_rtcp/source/leb128_unittest.cc @@ -122,7 +122,7 @@ TEST(Leb128Test, WriteNearlyMaxValue) { uint8_t buffer[16]; EXPECT_EQ(WriteLeb128(0x7fff'ffff'ffff'ffff, buffer), 9); EXPECT_THAT( - rtc::MakeArrayView(buffer, 9), + MakeArrayView(buffer, 9), ElementsAre(0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f)); } @@ -130,7 +130,7 @@ TEST(Leb128Test, WriteMaxValue) { uint8_t buffer[16]; EXPECT_EQ(WriteLeb128(0xffff'ffff'ffff'ffff, buffer), 10); EXPECT_THAT( - rtc::MakeArrayView(buffer, 10), + MakeArrayView(buffer, 10), ElementsAre(0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01)); } diff --git a/modules/rtp_rtcp/source/nack_rtx_unittest.cc b/modules/rtp_rtcp/source/nack_rtx_unittest.cc index e578be86c4..28b75a7a25 100644 --- a/modules/rtp_rtcp/source/nack_rtx_unittest.cc +++ b/modules/rtp_rtcp/source/nack_rtx_unittest.cc @@ -8,23 +8,37 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include #include #include +#include #include #include #include "absl/algorithm/container.h" +#include "api/array_view.h" #include "api/call/transport.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/rtp_headers.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" +#include "call/rtp_packet_sink_interface.h" #include "call/rtp_stream_receiver_controller.h" +#include "call/rtp_stream_receiver_controller_interface.h" #include "call/rtx_receive_stream.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_sender_video.h" #include "rtc_base/rate_limiter.h" #include "rtc_base/thread.h" -#include "test/explicit_key_value_config.h" +#include "system_wrappers/include/clock.h" #include "test/gtest.h" namespace webrtc { @@ -76,8 +90,8 @@ class RtxLoopBackTransport : public webrtc::Transport { packet_loss_ = 0; } - bool SendRtp(rtc::ArrayView data, - const PacketOptions& options) override { + bool SendRtp(ArrayView data, + const PacketOptions& /* options */) override { count_++; RtpPacketReceived packet; if (!packet.Parse(data)) @@ -101,7 +115,7 @@ class RtxLoopBackTransport : public webrtc::Transport { return true; } - bool SendRtcp(rtc::ArrayView data) override { + bool SendRtcp(ArrayView data) override { module_->IncomingRtcpPacket(data); return true; } @@ -119,29 +133,28 @@ class RtxLoopBackTransport : public webrtc::Transport { class RtpRtcpRtxNackTest : public ::testing::Test { protected: RtpRtcpRtxNackTest() - : rtp_rtcp_module_(nullptr), + : fake_clock_(123456), + env_(CreateEnvironment(&fake_clock_)), transport_(kTestRtxSsrc), rtx_stream_(&media_stream_, rtx_associated_payload_types_, kTestSsrc), - fake_clock(123456), - retransmission_rate_limiter_(&fake_clock, kMaxRttMs) {} + retransmission_rate_limiter_(&fake_clock_, kMaxRttMs) {} ~RtpRtcpRtxNackTest() override {} void SetUp() override { RtpRtcpInterface::Configuration configuration; configuration.audio = false; - configuration.clock = &fake_clock; - receive_statistics_ = ReceiveStatistics::Create(&fake_clock); + receive_statistics_ = ReceiveStatistics::Create(&fake_clock_); configuration.receive_statistics = receive_statistics_.get(); configuration.outgoing_transport = &transport_; configuration.retransmission_rate_limiter = &retransmission_rate_limiter_; configuration.local_media_ssrc = kTestSsrc; configuration.rtx_send_ssrc = kTestRtxSsrc; - rtp_rtcp_module_ = ModuleRtpRtcpImpl2::Create(configuration); - test::ExplicitKeyValueConfig field_trials(""); + rtp_rtcp_module_ = + std::make_unique(env_, configuration); RTPSenderVideo::Config video_config; - video_config.clock = &fake_clock; + video_config.clock = &fake_clock_; video_config.rtp_sender = rtp_rtcp_module_->RtpSender(); - video_config.field_trials = &field_trials; + video_config.field_trials = &env_.field_trials(); rtp_sender_video_ = std::make_unique(video_config); rtp_rtcp_module_->SetRTCPStatus(RtcpMode::kCompound); rtp_rtcp_module_->SetStorePacketsStatus(true, 600); @@ -212,18 +225,20 @@ class RtpRtcpRtxNackTest : public ::testing::Test { /*capture_time=*/Timestamp::Millis(timestamp / 90), payload_data, sizeof(payload_data), video_header, TimeDelta::Zero(), {})); // Min required delay until retransmit = 5 + RTT ms (RTT = 0). - fake_clock.AdvanceTimeMilliseconds(5); + fake_clock_.AdvanceTimeMilliseconds(5); int length = BuildNackList(nack_list); if (length > 0) rtp_rtcp_module_->SendNACK(nack_list, length); - fake_clock.AdvanceTimeMilliseconds(28); // 33ms - 5ms delay. + fake_clock_.AdvanceTimeMilliseconds(28); // 33ms - 5ms delay. // Prepare next frame. timestamp += 3000; } media_stream_.sequence_numbers_.sort(); } - rtc::AutoThread main_thread_; + AutoThread main_thread_; + SimulatedClock fake_clock_; + const Environment env_; std::unique_ptr receive_statistics_; std::unique_ptr rtp_rtcp_module_; std::unique_ptr rtp_sender_video_; @@ -233,7 +248,6 @@ class RtpRtcpRtxNackTest : public ::testing::Test { VerifyingMediaStream media_stream_; RtxReceiveStream rtx_stream_; uint8_t payload_data[65000]; - SimulatedClock fake_clock; RateLimiter retransmission_rate_limiter_; std::unique_ptr media_receiver_; std::unique_ptr rtx_receiver_; @@ -264,7 +278,7 @@ TEST_F(RtpRtcpRtxNackTest, LongNackList) { video_header, TimeDelta::Zero(), {})); // Prepare next frame. timestamp += 3000; - fake_clock.AdvanceTimeMilliseconds(33); + fake_clock_.AdvanceTimeMilliseconds(33); } EXPECT_FALSE(transport_.expected_sequence_numbers_.empty()); EXPECT_FALSE(media_stream_.sequence_numbers_.empty()); diff --git a/modules/rtp_rtcp/source/time_util.cc b/modules/rtp_rtcp/source/ntp_time_util.cc similarity index 74% rename from modules/rtp_rtcp/source/time_util.cc rename to modules/rtp_rtcp/source/ntp_time_util.cc index 44ca07dabe..6ef2e312a9 100644 --- a/modules/rtp_rtcp/source/time_util.cc +++ b/modules/rtp_rtcp/source/ntp_time_util.cc @@ -8,11 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/rtp_rtcp/source/time_util.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" #include +#include -#include "rtc_base/checks.h" +#include "api/units/time_delta.h" #include "rtc_base/numerics/divide_round.h" #include "rtc_base/time_utils.h" @@ -23,14 +24,26 @@ uint32_t SaturatedToCompactNtp(TimeDelta delta) { constexpr int kCompactNtpInSecond = 0x10000; if (delta <= TimeDelta::Zero()) return 0; - if (delta.us() >= - kMaxCompactNtp * rtc::kNumMicrosecsPerSec / kCompactNtpInSecond) + if (delta.us() >= kMaxCompactNtp * kNumMicrosecsPerSec / kCompactNtpInSecond) return kMaxCompactNtp; // To convert to compact ntp need to divide by 1e6 to get seconds, // then multiply by 0x10000 to get the final result. // To avoid float operations, multiplication and division swapped. return DivideRoundToNearest(delta.us() * kCompactNtpInSecond, - rtc::kNumMicrosecsPerSec); + kNumMicrosecsPerSec); +} + +TimeDelta CompactNtpIntervalToTimeDelta(uint32_t compact_ntp_interval) { + // Convert to 64bit value to avoid multiplication overflow. + int64_t value = int64_t{compact_ntp_interval}; + if (compact_ntp_interval > 0x8000'0000) { + value -= (int64_t{1} << 32); + } + // To convert to TimeDelta need to divide by 2^16 to get seconds, + // then multiply by 1'000'000 to get microseconds. To avoid float operations, + // multiplication and division are swapped. + int64_t us = DivideRoundToNearest(value * kNumMicrosecsPerSec, 1 << 16); + return TimeDelta::Micros(us); } TimeDelta CompactNtpRttToTimeDelta(uint32_t compact_ntp_interval) { @@ -40,15 +53,7 @@ TimeDelta CompactNtpRttToTimeDelta(uint32_t compact_ntp_interval) { // it might become negative that is indistinguishable from very large values. // Since very large RTT/delay is less likely than non-monotonic ntp clock, // such value is considered negative and converted to minimum value of 1ms. - if (compact_ntp_interval > 0x80000000) - return kMinRtt; - // Convert to 64bit value to avoid multiplication overflow. - int64_t value = static_cast(compact_ntp_interval); - // To convert to TimeDelta need to divide by 2^16 to get seconds, - // then multiply by 1'000'000 to get microseconds. To avoid float operations, - // multiplication and division are swapped. - int64_t us = DivideRoundToNearest(value * rtc::kNumMicrosecsPerSec, 1 << 16); // Small RTT value is considered too good to be true and increased to 1ms. - return std::max(TimeDelta::Micros(us), kMinRtt); + return std::max(CompactNtpIntervalToTimeDelta(compact_ntp_interval), kMinRtt); } } // namespace webrtc diff --git a/modules/rtp_rtcp/source/time_util.h b/modules/rtp_rtcp/source/ntp_time_util.h similarity index 80% rename from modules/rtp_rtcp/source/time_util.h rename to modules/rtp_rtcp/source/ntp_time_util.h index 9ff444b12e..7677fbddf8 100644 --- a/modules/rtp_rtcp/source/time_util.h +++ b/modules/rtp_rtcp/source/ntp_time_util.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_RTP_RTCP_SOURCE_TIME_UTIL_H_ -#define MODULES_RTP_RTCP_SOURCE_TIME_UTIL_H_ +#ifndef MODULES_RTP_RTCP_SOURCE_NTP_TIME_UTIL_H_ +#define MODULES_RTP_RTCP_SOURCE_NTP_TIME_UTIL_H_ #include @@ -42,10 +42,14 @@ inline constexpr int64_t ToNtpUnits(TimeDelta delta) { // then multiplaction and conversion to seconds are swapped to avoid float // arithmetic. // 2^31 us ~= 35.8 minutes. - return (rtc::saturated_cast(delta.us()) * (int64_t{1} << 32)) / - 1'000'000; + return (saturated_cast(delta.us()) * (int64_t{1} << 32)) / 1'000'000; } +// Converts interval from compact ntp (1/2^16 seconds) resolution to TimeDelta. +// This interval can be up to ~9.1 hours (2^15 seconds). +// Values close to 2^16 seconds are considered negative. +TimeDelta CompactNtpIntervalToTimeDelta(uint32_t compact_ntp_interval); + // Converts interval from compact ntp (1/2^16 seconds) resolution to TimeDelta. // This interval can be up to ~9.1 hours (2^15 seconds). // Values close to 2^16 seconds are considered negative and are converted to @@ -53,4 +57,4 @@ inline constexpr int64_t ToNtpUnits(TimeDelta delta) { TimeDelta CompactNtpRttToTimeDelta(uint32_t compact_ntp_interval); } // namespace webrtc -#endif // MODULES_RTP_RTCP_SOURCE_TIME_UTIL_H_ +#endif // MODULES_RTP_RTCP_SOURCE_NTP_TIME_UTIL_H_ diff --git a/modules/rtp_rtcp/source/time_util_unittest.cc b/modules/rtp_rtcp/source/ntp_time_util_unittest.cc similarity index 77% rename from modules/rtp_rtcp/source/time_util_unittest.cc rename to modules/rtp_rtcp/source/ntp_time_util_unittest.cc index b3d557fd83..f629481dcf 100644 --- a/modules/rtp_rtcp/source/time_util_unittest.cc +++ b/modules/rtp_rtcp/source/ntp_time_util_unittest.cc @@ -7,17 +7,18 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/rtp_rtcp/source/time_util.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" #include #include #include "api/units/time_delta.h" +#include "system_wrappers/include/ntp_time.h" #include "test/gtest.h" namespace webrtc { -TEST(TimeUtilTest, CompactNtp) { +TEST(NtpTimeUtilTest, CompactNtp) { const uint32_t kNtpSec = 0x12345678; const uint32_t kNtpFrac = 0x23456789; const NtpTime kNtp(kNtpSec, kNtpFrac); @@ -25,16 +26,16 @@ TEST(TimeUtilTest, CompactNtp) { EXPECT_EQ(kNtpMid, CompactNtp(kNtp)); } -TEST(TimeUtilTest, CompactNtpRttToTimeDelta) { +TEST(NtpTimeUtilTest, CompactNtpIntervalToTimeDelta) { const NtpTime ntp1(0x12345, 0x23456); const NtpTime ntp2(0x12654, 0x64335); int64_t ms_diff = ntp2.ToMs() - ntp1.ToMs(); uint32_t ntp_diff = CompactNtp(ntp2) - CompactNtp(ntp1); - EXPECT_NEAR(CompactNtpRttToTimeDelta(ntp_diff).ms(), ms_diff, 1); + EXPECT_NEAR(CompactNtpIntervalToTimeDelta(ntp_diff).ms(), ms_diff, 1); } -TEST(TimeUtilTest, CompactNtpRttToTimeDeltaWithWrap) { +TEST(NtpTimeUtilTest, CompactNtpIntervalToTimeDeltaWithWrap) { const NtpTime ntp1(0x1ffff, 0x23456); const NtpTime ntp2(0x20000, 0x64335); int64_t ms_diff = ntp2.ToMs() - ntp1.ToMs(); @@ -45,10 +46,10 @@ TEST(TimeUtilTest, CompactNtpRttToTimeDeltaWithWrap) { ASSERT_LT(CompactNtp(ntp2), CompactNtp(ntp1)); uint32_t ntp_diff = CompactNtp(ntp2) - CompactNtp(ntp1); - EXPECT_NEAR(CompactNtpRttToTimeDelta(ntp_diff).ms(), ms_diff, 1); + EXPECT_NEAR(CompactNtpIntervalToTimeDelta(ntp_diff).ms(), ms_diff, 1); } -TEST(TimeUtilTest, CompactNtpRttToTimeDeltaLarge) { +TEST(NtpTimeUtilTest, CompactNtpIntervalToTimeDeltaLarge) { const NtpTime ntp1(0x10000, 0x00006); const NtpTime ntp2(0x17fff, 0xffff5); int64_t ms_diff = ntp2.ToMs() - ntp1.ToMs(); @@ -58,17 +59,34 @@ TEST(TimeUtilTest, CompactNtpRttToTimeDeltaLarge) { EXPECT_NEAR(CompactNtpRttToTimeDelta(ntp_diff).ms(), ms_diff, 1); } -TEST(TimeUtilTest, CompactNtpRttToTimeDeltaNegative) { +TEST(NtpTimeUtilTest, CompactNtpIntervalToTimeDeltaNegative) { + const NtpTime ntp1(0x20000, 0x23456); + const NtpTime ntp2(0x1ffff, 0x64335); + int64_t ms_diff = ntp2.ToMs() - ntp1.ToMs(); + ASSERT_LT(ms_diff, 0); + // Ntp difference close to 2^16 seconds should be treated as negative. + uint32_t ntp_diff = CompactNtp(ntp2) - CompactNtp(ntp1); + EXPECT_NEAR(CompactNtpIntervalToTimeDelta(ntp_diff).ms(), ms_diff, 1); +} + +TEST(NtpTimeUtilTest, CompactNtpIntervalToTimeDeltaBorderToNegative) { + // Both +0x8000 and -x0x8000 seconds can be valid result when converting value + // exactly in the middle. + EXPECT_EQ(CompactNtpIntervalToTimeDelta(0x8000'0000).Abs(), + TimeDelta::Seconds(0x8000)); +} + +TEST(NtpTimeUtilTest, CompactNtpRttToTimeDeltaNegative) { const NtpTime ntp1(0x20000, 0x23456); const NtpTime ntp2(0x1ffff, 0x64335); int64_t ms_diff = ntp2.ToMs() - ntp1.ToMs(); - ASSERT_GT(0, ms_diff); + ASSERT_LT(ms_diff, 0); // Ntp difference close to 2^16 seconds should be treated as negative. uint32_t ntp_diff = CompactNtp(ntp2) - CompactNtp(ntp1); EXPECT_EQ(CompactNtpRttToTimeDelta(ntp_diff), TimeDelta::Millis(1)); } -TEST(TimeUtilTest, SaturatedToCompactNtp) { +TEST(NtpTimeUtilTest, SaturatedToCompactNtp) { // Converts negative to zero. EXPECT_EQ(SaturatedToCompactNtp(TimeDelta::Micros(-1)), 0u); EXPECT_EQ(SaturatedToCompactNtp(TimeDelta::Zero()), 0u); @@ -99,7 +117,7 @@ TEST(TimeUtilTest, SaturatedToCompactNtp) { 5'515, 16); } -TEST(TimeUtilTest, ToNtpUnits) { +TEST(NtpTimeUtilTest, ToNtpUnits) { EXPECT_EQ(ToNtpUnits(TimeDelta::Zero()), 0); EXPECT_EQ(ToNtpUnits(TimeDelta::Seconds(1)), int64_t{1} << 32); EXPECT_EQ(ToNtpUnits(TimeDelta::Seconds(-1)), -(int64_t{1} << 32)); diff --git a/modules/rtp_rtcp/source/packet_loss_stats.cc b/modules/rtp_rtcp/source/packet_loss_stats.cc index 36f0a63d59..df4c3fcb3d 100644 --- a/modules/rtp_rtcp/source/packet_loss_stats.cc +++ b/modules/rtp_rtcp/source/packet_loss_stats.cc @@ -12,15 +12,19 @@ #include #include +#include #include #include "rtc_base/checks.h" +namespace webrtc { +namespace { + // After this many packets are added, adding additional packets will cause the // oldest packets to be pruned from the buffer. -static const int kBufferSize = 100; +constexpr int kBufferSize = 100; -namespace webrtc { +} // namespace PacketLossStats::PacketLossStats() : single_loss_historic_count_(0), diff --git a/modules/rtp_rtcp/source/packet_sequencer.cc b/modules/rtp_rtcp/source/packet_sequencer.cc index 5f2f69f830..930ace9d38 100644 --- a/modules/rtp_rtcp/source/packet_sequencer.cc +++ b/modules/rtp_rtcp/source/packet_sequencer.cc @@ -10,8 +10,17 @@ #include "modules/rtp_rtcp/source/packet_sequencer.h" +#include +#include +#include + +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "rtc_base/checks.h" #include "rtc_base/random.h" +#include "system_wrappers/include/clock.h" namespace webrtc { @@ -24,7 +33,7 @@ constexpr uint32_t kTimestampTicksPerMs = 90; } // namespace PacketSequencer::PacketSequencer(uint32_t media_ssrc, - absl::optional rtx_ssrc, + std::optional rtx_ssrc, bool require_marker_before_media_padding, Clock* clock) : media_ssrc_(media_ssrc), @@ -38,9 +47,10 @@ PacketSequencer::PacketSequencer(uint32_t media_ssrc, last_packet_marker_bit_(false) { Random random(clock_->TimeInMicroseconds()); // Random start, 16 bits. Upper half of range is avoided in order to prevent - // wraparound issues during startup. Sequence number 0 is avoided for - // historical reasons, presumably to avoid debugability or test usage - // conflicts. + // SRTP wraparound issues during startup. See this unit test for details: + // SrtpSessionTest.ProtectUnprotectWrapAroundRocMismatch + // Sequence number 0 is avoided for historical reasons, presumably to avoid + // debugability or test usage conflicts. constexpr uint16_t kMaxInitRtpSeqNumber = 0x7fff; // 2^15 - 1. media_sequence_number_ = random.Rand(1, kMaxInitRtpSeqNumber); rtx_sequence_number_ = random.Rand(1, kMaxInitRtpSeqNumber); diff --git a/modules/rtp_rtcp/source/packet_sequencer.h b/modules/rtp_rtcp/source/packet_sequencer.h index 0ae069dabc..6f401ad316 100644 --- a/modules/rtp_rtcp/source/packet_sequencer.h +++ b/modules/rtp_rtcp/source/packet_sequencer.h @@ -11,6 +11,10 @@ #ifndef MODULES_RTP_RTCP_SOURCE_PACKET_SEQUENCER_H_ #define MODULES_RTP_RTCP_SOURCE_PACKET_SEQUENCER_H_ +#include +#include + +#include "api/units/timestamp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "system_wrappers/include/clock.h" @@ -28,7 +32,7 @@ class PacketSequencer { // last packets of a video frame). // Packets with unknown SSRCs will be ignored. PacketSequencer(uint32_t media_ssrc, - absl::optional rtx_ssrc, + std::optional rtx_ssrc, bool require_marker_before_media_padding, Clock* clock); @@ -58,7 +62,7 @@ class PacketSequencer { void PopulatePaddingFields(RtpPacketToSend& packet); const uint32_t media_ssrc_; - const absl::optional rtx_ssrc_; + const std::optional rtx_ssrc_; const bool require_marker_before_media_padding_; Clock* const clock_; diff --git a/modules/rtp_rtcp/source/packet_sequencer_unittest.cc b/modules/rtp_rtcp/source/packet_sequencer_unittest.cc index d892863768..f37963e16d 100644 --- a/modules/rtp_rtcp/source/packet_sequencer_unittest.cc +++ b/modules/rtp_rtcp/source/packet_sequencer_unittest.cc @@ -10,10 +10,13 @@ #include "modules/rtp_rtcp/source/packet_sequencer.h" +#include + +#include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "system_wrappers/include/clock.h" -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { diff --git a/modules/rtp_rtcp/source/receive_statistics_impl.cc b/modules/rtp_rtcp/source/receive_statistics_impl.cc index 0e5e40f502..3b1fbbd440 100644 --- a/modules/rtp_rtcp/source/receive_statistics_impl.cc +++ b/modules/rtp_rtcp/source/receive_statistics_impl.cc @@ -10,20 +10,29 @@ #include "modules/rtp_rtcp/source/receive_statistics_impl.h" +#include #include +#include #include +#include #include +#include #include #include +#include "api/units/data_rate.h" #include "api/units/time_delta.h" -#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/include/receive_statistics.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/clock.h" +#include "system_wrappers/include/ntp_time.h" namespace webrtc { namespace { @@ -33,22 +42,19 @@ constexpr TimeDelta kStatisticsProcessInterval = TimeDelta::Seconds(1); TimeDelta UnixEpochDelta(Clock& clock) { Timestamp now = clock.CurrentTime(); NtpTime ntp_now = clock.ConvertTimestampToNtpTime(now); - return TimeDelta::Millis(ntp_now.ToMs() - now.ms() - - rtc::kNtpJan1970Millisecs); + return TimeDelta::Millis(ntp_now.ToMs() - now.ms() - kNtpJan1970Millisecs); } } // namespace StreamStatistician::~StreamStatistician() {} -StreamStatisticianImpl::StreamStatisticianImpl(uint32_t ssrc, - Clock* clock, - int max_reordering_threshold) +StreamStatisticianImpl::StreamStatisticianImpl(uint32_t ssrc, Clock* clock) : ssrc_(ssrc), clock_(clock), delta_internal_unix_epoch_(UnixEpochDelta(*clock_)), incoming_bitrate_(/*max_window_size=*/kStatisticsProcessInterval), - max_reordering_threshold_(max_reordering_threshold), + max_reordering_threshold_(kDefaultMaxReorderingThreshold), enable_retransmit_detection_(false), cumulative_loss_is_capped_(false), jitter_q4_(0), @@ -72,7 +78,7 @@ bool StreamStatisticianImpl::UpdateOutOfOrder(const RtpPacketReceived& packet, --cumulative_loss_; uint16_t expected_sequence_number = *received_seq_out_of_order_ + 1; - received_seq_out_of_order_ = absl::nullopt; + received_seq_out_of_order_ = std::nullopt; if (packet.SequenceNumber() == expected_sequence_number) { // Ignore sequence number gap caused by stream restart for packet loss // calculation, by setting received_seq_max_ to the sequence number just @@ -159,16 +165,15 @@ void StreamStatisticianImpl::UpdateJitter(const RtpPacketReceived& packet, int32_t time_diff_samples = receive_diff_rtp - (packet.Timestamp() - last_received_timestamp_); - time_diff_samples = std::abs(time_diff_samples); - ReviseFrequencyAndJitter(packet.payload_type_frequency()); // lib_jingle sometimes deliver crazy jumps in TS for the same stream. // If this happens, don't update jitter value. Use 5 secs video frequency // as the threshold. - if (time_diff_samples < 450000) { + if (time_diff_samples < 5 * kVideoPayloadTypeFrequency && + time_diff_samples > -5 * kVideoPayloadTypeFrequency) { // Note we calculate in Q4 to avoid using float. - int32_t jitter_diff_q4 = (time_diff_samples << 4) - jitter_q4_; + int32_t jitter_diff_q4 = (std::abs(time_diff_samples) << 4) - jitter_q4_; jitter_q4_ += ((jitter_diff_q4 + 8) >> 4); } } @@ -282,20 +287,15 @@ void StreamStatisticianImpl::MaybeAppendReportBlockAndReset( // Only for report blocks in RTCP SR and RR. last_report_cumulative_loss_ = cumulative_loss_; last_report_seq_max_ = received_seq_max_; - BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "cumulative_loss_pkts", now.ms(), - cumulative_loss_, ssrc_); - BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "received_seq_max_pkts", now.ms(), - (received_seq_max_ - received_seq_first_), - ssrc_); } -absl::optional StreamStatisticianImpl::GetFractionLostInPercent() const { +std::optional StreamStatisticianImpl::GetFractionLostInPercent() const { if (!ReceivedRtpPacket()) { - return absl::nullopt; + return std::nullopt; } int64_t expected_packets = 1 + received_seq_max_ - received_seq_first_; if (expected_packets <= 0) { - return absl::nullopt; + return std::nullopt; } if (cumulative_loss_ <= 0) { return 0; @@ -340,18 +340,16 @@ bool StreamStatisticianImpl::IsRetransmitOfOldPacket( std::unique_ptr ReceiveStatistics::Create(Clock* clock) { return std::make_unique( - clock, [](uint32_t ssrc, Clock* clock, int max_reordering_threshold) { - return std::make_unique( - ssrc, clock, max_reordering_threshold); + clock, [](uint32_t ssrc, Clock* clock) { + return std::make_unique(ssrc, clock); }); } std::unique_ptr ReceiveStatistics::CreateThreadCompatible( Clock* clock) { return std::make_unique( - clock, [](uint32_t ssrc, Clock* clock, int max_reordering_threshold) { - return std::make_unique( - ssrc, clock, max_reordering_threshold); + clock, [](uint32_t ssrc, Clock* clock) { + return std::make_unique(ssrc, clock); }); } @@ -359,12 +357,10 @@ ReceiveStatisticsImpl::ReceiveStatisticsImpl( Clock* clock, std::function( uint32_t ssrc, - Clock* clock, - int max_reordering_threshold)> stream_statistician_factory) + Clock* clock)> stream_statistician_factory) : clock_(clock), stream_statistician_factory_(std::move(stream_statistician_factory)), - last_returned_ssrc_idx_(0), - max_reordering_threshold_(kDefaultMaxReorderingThreshold) {} + last_returned_ssrc_idx_(0) {} void ReceiveStatisticsImpl::OnRtpPacket(const RtpPacketReceived& packet) { // StreamStatisticianImpl instance is created once and only destroyed when @@ -386,21 +382,12 @@ StreamStatisticianImplInterface* ReceiveStatisticsImpl::GetOrCreateStatistician( uint32_t ssrc) { std::unique_ptr& impl = statisticians_[ssrc]; if (impl == nullptr) { // new element - impl = - stream_statistician_factory_(ssrc, clock_, max_reordering_threshold_); + impl = stream_statistician_factory_(ssrc, clock_); all_ssrcs_.push_back(ssrc); } return impl.get(); } -void ReceiveStatisticsImpl::SetMaxReorderingThreshold( - int max_reordering_threshold) { - max_reordering_threshold_ = max_reordering_threshold; - for (auto& statistician : statisticians_) { - statistician.second->SetMaxReorderingThreshold(max_reordering_threshold); - } -} - void ReceiveStatisticsImpl::SetMaxReorderingThreshold( uint32_t ssrc, int max_reordering_threshold) { diff --git a/modules/rtp_rtcp/source/receive_statistics_impl.h b/modules/rtp_rtcp/source/receive_statistics_impl.h index ccac2d55d6..5a391d61cd 100644 --- a/modules/rtp_rtcp/source/receive_statistics_impl.h +++ b/modules/rtp_rtcp/source/receive_statistics_impl.h @@ -11,16 +11,19 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_ #define MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_ -#include +#include +#include #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "call/rtp_packet_sink_interface.h" #include "modules/rtp_rtcp/include/receive_statistics.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "rtc_base/bitrate_tracker.h" #include "rtc_base/containers/flat_map.h" @@ -44,14 +47,12 @@ class StreamStatisticianImplInterface : public StreamStatistician { // Thread-compatible implementation of StreamStatisticianImplInterface. class StreamStatisticianImpl : public StreamStatisticianImplInterface { public: - StreamStatisticianImpl(uint32_t ssrc, - Clock* clock, - int max_reordering_threshold); + StreamStatisticianImpl(uint32_t ssrc, Clock* clock); ~StreamStatisticianImpl() override; // Implements StreamStatistician RtpReceiveStats GetStats() const override; - absl::optional GetFractionLostInPercent() const override; + std::optional GetFractionLostInPercent() const override; StreamDataCounters GetReceiveStreamDataCounters() const override; uint32_t BitrateReceived() const override; @@ -96,14 +97,14 @@ class StreamStatisticianImpl : public StreamStatisticianImplInterface { // senders, in particular, our own loss-based bandwidth estimator. int32_t cumulative_loss_rtcp_offset_; - absl::optional last_receive_time_; + std::optional last_receive_time_; uint32_t last_received_timestamp_; RtpSequenceNumberUnwrapper seq_unwrapper_; int64_t received_seq_first_; int64_t received_seq_max_; // Assume that the other side restarted when there are two sequential packets // with large jump from received_seq_max_. - absl::optional received_seq_out_of_order_; + std::optional received_seq_out_of_order_; // Current counter values. StreamDataCounters receive_counters_; @@ -119,17 +120,14 @@ class StreamStatisticianImpl : public StreamStatisticianImplInterface { // Thread-safe implementation of StreamStatisticianImplInterface. class StreamStatisticianLocked : public StreamStatisticianImplInterface { public: - StreamStatisticianLocked(uint32_t ssrc, - Clock* clock, - int max_reordering_threshold) - : impl_(ssrc, clock, max_reordering_threshold) {} + StreamStatisticianLocked(uint32_t ssrc, Clock* clock) : impl_(ssrc, clock) {} ~StreamStatisticianLocked() override = default; RtpReceiveStats GetStats() const override { MutexLock lock(&stream_lock_); return impl_.GetStats(); } - absl::optional GetFractionLostInPercent() const override { + std::optional GetFractionLostInPercent() const override { MutexLock lock(&stream_lock_); return impl_.GetFractionLostInPercent(); } @@ -171,8 +169,7 @@ class ReceiveStatisticsImpl : public ReceiveStatistics { Clock* clock, std::function( uint32_t ssrc, - Clock* clock, - int max_reordering_threshold)> stream_statistician_factory); + Clock* clock)> stream_statistician_factory); ~ReceiveStatisticsImpl() override = default; // Implements ReceiveStatisticsProvider. @@ -183,7 +180,6 @@ class ReceiveStatisticsImpl : public ReceiveStatistics { // Implements ReceiveStatistics. StreamStatistician* GetStatistician(uint32_t ssrc) const override; - void SetMaxReorderingThreshold(int max_reordering_threshold) override; void SetMaxReorderingThreshold(uint32_t ssrc, int max_reordering_threshold) override; void EnableRetransmitDetection(uint32_t ssrc, bool enable) override; @@ -192,15 +188,12 @@ class ReceiveStatisticsImpl : public ReceiveStatistics { StreamStatisticianImplInterface* GetOrCreateStatistician(uint32_t ssrc); Clock* const clock_; - std::function( - uint32_t ssrc, - Clock* clock, - int max_reordering_threshold)> + std::function(uint32_t ssrc, + Clock* clock)> stream_statistician_factory_; // The index within `all_ssrcs_` that was last returned. size_t last_returned_ssrc_idx_; std::vector all_ssrcs_; - int max_reordering_threshold_; flat_map> statisticians_; }; @@ -213,8 +206,7 @@ class ReceiveStatisticsLocked : public ReceiveStatistics { Clock* clock, std::function( uint32_t ssrc, - Clock* clock, - int max_reordering_threshold)> stream_statitician_factory) + Clock* clock)> stream_statitician_factory) : impl_(clock, std::move(stream_statitician_factory)) {} ~ReceiveStatisticsLocked() override = default; std::vector RtcpReportBlocks(size_t max_blocks) override { @@ -229,10 +221,6 @@ class ReceiveStatisticsLocked : public ReceiveStatistics { MutexLock lock(&receive_statistics_lock_); return impl_.GetStatistician(ssrc); } - void SetMaxReorderingThreshold(int max_reordering_threshold) override { - MutexLock lock(&receive_statistics_lock_); - return impl_.SetMaxReorderingThreshold(max_reordering_threshold); - } void SetMaxReorderingThreshold(uint32_t ssrc, int max_reordering_threshold) override { MutexLock lock(&receive_statistics_lock_); diff --git a/modules/rtp_rtcp/source/receive_statistics_unittest.cc b/modules/rtp_rtcp/source/receive_statistics_unittest.cc index a2558545f0..918c74de7f 100644 --- a/modules/rtp_rtcp/source/receive_statistics_unittest.cc +++ b/modules/rtp_rtcp/source/receive_statistics_unittest.cc @@ -10,13 +10,17 @@ #include "modules/rtp_rtcp/include/receive_statistics.h" +#include #include #include #include #include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "rtc_base/random.h" +#include "rtc_base/checks.h" #include "system_wrappers/include/clock.h" #include "test/gmock.h" #include "test/gtest.h" @@ -898,5 +902,22 @@ TEST(ReviseJitterTest, EXPECT_EQ(GetJitter(*statistics), 172U); } +TEST(ReviseJitterTest, TwoPacketsWithMaximumRtpTimestampDifference) { + SimulatedClock clock(0); + std::unique_ptr statistics = + ReceiveStatistics::Create(&clock); + RtpPacketReceived packet1 = MakeRtpPacket(/*payload_type_frequency=*/90'000, + /*timestamp=*/0x01234567); + RtpPacketReceived packet2 = + MakeNextRtpPacket(packet1, + /*payload_type_frequency=*/90'000, + /*timestamp=*/0x81234567); + statistics->OnRtpPacket(packet1); + statistics->OnRtpPacket(packet2); + + // Expect large jump in RTP timestamp is ignored for jitter calculation. + EXPECT_EQ(GetJitter(*statistics), 0U); +} + } // namespace } // namespace webrtc diff --git a/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc b/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc index 6f90cd175c..3faca87ca8 100644 --- a/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc +++ b/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc @@ -11,19 +11,23 @@ #include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h" #include +#include -#include "modules/rtp_rtcp/source/time_util.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" #include "rtc_base/logging.h" #include "system_wrappers/include/clock.h" #include "system_wrappers/include/ntp_time.h" +#include "system_wrappers/include/rtp_to_ntp_estimator.h" namespace webrtc { namespace { -constexpr int kMinimumNumberOfSamples = 2; +constexpr int kMinimumNumberOfSamples = 3; constexpr TimeDelta kTimingLogInterval = TimeDelta::Seconds(10); -constexpr int kClocksOffsetSmoothingWindow = 100; +constexpr int kClocksOffsetSmoothingWindow = 7; // Subtracts two NtpTime values keeping maximum precision. int64_t Subtract(NtpTime minuend, NtpTime subtrahend) { @@ -97,11 +101,11 @@ NtpTime RemoteNtpTimeEstimator::EstimateNtp(uint32_t rtp_timestamp) { return receiver_capture; } -absl::optional +std::optional RemoteNtpTimeEstimator::EstimateRemoteToLocalClockOffset() { if (ntp_clocks_offset_estimator_.GetNumberOfSamplesStored() < kMinimumNumberOfSamples) { - return absl::nullopt; + return std::nullopt; } return ntp_clocks_offset_estimator_.GetFilteredValue(); } diff --git a/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc b/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc index 8dbfaec940..5bde3445a0 100644 --- a/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc +++ b/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc @@ -10,11 +10,14 @@ #include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h" -#include "absl/types/optional.h" -#include "modules/rtp_rtcp/source/time_util.h" +#include +#include + +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" #include "system_wrappers/include/clock.h" #include "system_wrappers/include/ntp_time.h" -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { @@ -26,6 +29,9 @@ constexpr Timestamp kRemoteClockInitialTime = Timestamp::Millis(373); constexpr uint32_t kTimestampOffset = 567; constexpr int64_t kRemoteToLocalClockOffsetNtp = ToNtpUnits(kLocalClockInitialTime - kRemoteClockInitialTime); +// There can be small rounding differences when converting to the +// sub nano second precision of the NTP timestamps. +constexpr int64_t kEpsilon = 1; class RemoteNtpTimeEstimatorTest : public ::testing::Test { protected: @@ -44,7 +50,7 @@ class RemoteNtpTimeEstimatorTest : public ::testing::Test { NtpTime ntp = remote_clock_.CurrentNtpTime(); AdvanceTime(kTestRtt / 2); - RTC_DCHECK(estimator_.UpdateRtcpTimestamp(kTestRtt, ntp, rtcp_timestamp)); + EXPECT_TRUE(estimator_.UpdateRtcpTimestamp(kTestRtt, ntp, rtcp_timestamp)); } void SendRtcpSrInaccurately(TimeDelta ntp_error, TimeDelta networking_delay) { @@ -53,7 +59,7 @@ class RemoteNtpTimeEstimatorTest : public ::testing::Test { NtpTime ntp(static_cast(remote_clock_.CurrentNtpTime()) + ntp_error_fractions); AdvanceTime(kTestRtt / 2 + networking_delay); - RTC_DCHECK(estimator_.UpdateRtcpTimestamp(kTestRtt, ntp, rtcp_timestamp)); + EXPECT_TRUE(estimator_.UpdateRtcpTimestamp(kTestRtt, ntp, rtcp_timestamp)); } SimulatedClock local_clock_{kLocalClockInitialTime}; @@ -78,16 +84,20 @@ TEST_F(RemoteNtpTimeEstimatorTest, Estimate) { // Local peer needs at least 2 RTCP SR to calculate the capture time. const int64_t kNotEnoughRtcpSr = -1; EXPECT_EQ(kNotEnoughRtcpSr, estimator_.Estimate(rtp_timestamp)); - EXPECT_EQ(estimator_.EstimateRemoteToLocalClockOffset(), absl::nullopt); + EXPECT_EQ(estimator_.EstimateRemoteToLocalClockOffset(), std::nullopt); AdvanceTime(TimeDelta::Millis(800)); // Remote sends second RTCP SR. SendRtcpSr(); + AdvanceTime(TimeDelta::Millis(800)); + // Remote sends third RTCP SR. + SendRtcpSr(); + // Local peer gets enough RTCP SR to calculate the capture time. EXPECT_EQ(capture_ntp_time_ms, estimator_.Estimate(rtp_timestamp)); - EXPECT_EQ(estimator_.EstimateRemoteToLocalClockOffset(), - kRemoteToLocalClockOffsetNtp); + EXPECT_NEAR(*estimator_.EstimateRemoteToLocalClockOffset(), + kRemoteToLocalClockOffsetNtp, kEpsilon); } TEST_F(RemoteNtpTimeEstimatorTest, AveragesErrorsOut) { @@ -102,8 +112,8 @@ TEST_F(RemoteNtpTimeEstimatorTest, AveragesErrorsOut) { int64_t capture_ntp_time_ms = local_clock_.CurrentNtpInMilliseconds(); // Local peer gets enough RTCP SR to calculate the capture time. EXPECT_EQ(capture_ntp_time_ms, estimator_.Estimate(rtp_timestamp)); - EXPECT_EQ(kRemoteToLocalClockOffsetNtp, - estimator_.EstimateRemoteToLocalClockOffset()); + EXPECT_NEAR(kRemoteToLocalClockOffsetNtp, + *estimator_.EstimateRemoteToLocalClockOffset(), kEpsilon); // Remote sends corrupted RTCP SRs AdvanceTime(TimeDelta::Seconds(1)); @@ -120,8 +130,8 @@ TEST_F(RemoteNtpTimeEstimatorTest, AveragesErrorsOut) { // Errors should be averaged out. EXPECT_EQ(capture_ntp_time_ms, estimator_.Estimate(rtp_timestamp)); - EXPECT_EQ(kRemoteToLocalClockOffsetNtp, - estimator_.EstimateRemoteToLocalClockOffset()); + EXPECT_NEAR(kRemoteToLocalClockOffsetNtp, + *estimator_.EstimateRemoteToLocalClockOffset(), kEpsilon); } } // namespace diff --git a/modules/rtp_rtcp/source/rtcp_nack_stats.cc b/modules/rtp_rtcp/source/rtcp_nack_stats.cc index 1d652d0b5b..ea4afd3b6f 100644 --- a/modules/rtp_rtcp/source/rtcp_nack_stats.cc +++ b/modules/rtp_rtcp/source/rtcp_nack_stats.cc @@ -10,6 +10,8 @@ #include "modules/rtp_rtcp/source/rtcp_nack_stats.h" +#include + #include "modules/include/module_common_types_public.h" namespace webrtc { diff --git a/modules/rtp_rtcp/source/rtcp_packet.cc b/modules/rtp_rtcp/source/rtcp_packet.cc index bac03e73d2..5af4ccb73b 100644 --- a/modules/rtp_rtcp/source/rtcp_packet.cc +++ b/modules/rtp_rtcp/source/rtcp_packet.cc @@ -10,15 +10,19 @@ #include "modules/rtp_rtcp/source/rtcp_packet.h" +#include +#include + +#include "api/array_view.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" namespace webrtc { namespace rtcp { -constexpr size_t RtcpPacket::kHeaderLength; -rtc::Buffer RtcpPacket::Build() const { - rtc::Buffer packet(BlockLength()); +Buffer RtcpPacket::Build() const { + Buffer packet(BlockLength()); size_t length = 0; bool created = Create(packet.data(), &length, packet.capacity(), nullptr); @@ -44,7 +48,7 @@ bool RtcpPacket::OnBufferFull(uint8_t* packet, if (*index == 0) return false; RTC_DCHECK(callback) << "Fragmentation not supported."; - callback(rtc::ArrayView(packet, *index)); + callback(ArrayView(packet, *index)); *index = 0; return true; } diff --git a/modules/rtp_rtcp/source/rtcp_packet.h b/modules/rtp_rtcp/source/rtcp_packet.h index 07deb0f9bd..079414694c 100644 --- a/modules/rtp_rtcp/source/rtcp_packet.h +++ b/modules/rtp_rtcp/source/rtcp_packet.h @@ -39,13 +39,13 @@ namespace rtcp { // uint8_t packet[kPacketSize]; // with sequence number 56. // fir.Build(packet, &length, kPacketSize); // -// rtc::Buffer packet = fir.Build(); // Returns a RawPacket holding -// // the built rtcp packet. +// Buffer packet = fir.Build(); // Returns a RawPacket holding +// // the built rtcp packet. // -// CompoundPacket compound; // Builds a compound RTCP packet with -// compound.Append(&rr); // a receiver report, report block -// compound.Append(&fir); // and fir message. -// rtc::Buffer packet = compound.Build(); +// CompoundPacket compound; // Builds a compound RTCP packet with +// compound.Append(&rr); // a receiver report, report block +// compound.Append(&fir); // and fir message. +// Buffer packet = compound.Build(); class RtcpPacket { public: @@ -54,7 +54,7 @@ class RtcpPacket { // max_length bytes, it will be fragmented and multiple calls to this // callback will be made. using PacketReadyCallback = - rtc::FunctionView packet)>; + FunctionView packet)>; virtual ~RtcpPacket() = default; @@ -63,7 +63,7 @@ class RtcpPacket { // Convenience method mostly used for test. Creates packet without // fragmentation using BlockLength() to allocate big enough buffer. - rtc::Buffer Build() const; + Buffer Build() const; // Returns true if call to Create succeeded. bool Build(size_t max_length, PacketReadyCallback callback) const; diff --git a/modules/rtp_rtcp/source/rtcp_packet/app.cc b/modules/rtp_rtcp/source/rtcp_packet/app.cc index d5734c6dd5..f90c84ed48 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/app.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/app.cc @@ -21,8 +21,6 @@ namespace webrtc { namespace rtcp { -constexpr uint8_t App::kPacketType; -constexpr size_t App::kMaxDataSize; // Application-Defined packet (APP) (RFC 3550). // // 0 1 2 3 diff --git a/modules/rtp_rtcp/source/rtcp_packet/app.h b/modules/rtp_rtcp/source/rtcp_packet/app.h index 4518792e5a..c27a38409c 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/app.h +++ b/modules/rtp_rtcp/source/rtcp_packet/app.h @@ -59,7 +59,7 @@ class App : public RtcpPacket { uint8_t sub_type_; uint32_t name_; - rtc::Buffer data_; + Buffer data_; }; } // namespace rtcp diff --git a/modules/rtp_rtcp/source/rtcp_packet/app_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/app_unittest.cc index 8690e8e5a0..4448f346fa 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/app_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/app_unittest.cc @@ -10,6 +10,9 @@ #include "modules/rtp_rtcp/source/rtcp_packet/app.h" +#include + +#include "rtc_base/buffer.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -57,7 +60,7 @@ TEST(RtcpPacketAppTest, CreateWithoutData) { app.SetSubType(kSubtype); app.SetName(kName); - rtc::Buffer raw = app.Build(); + Buffer raw = app.Build(); EXPECT_THAT(make_tuple(raw.data(), raw.size()), ElementsAreArray(kPacketWithoutData)); @@ -80,7 +83,7 @@ TEST(RtcpPacketAppTest, CreateWithData) { app.SetName(kName); app.SetData(kData, sizeof(kData)); - rtc::Buffer raw = app.Build(); + Buffer raw = app.Build(); EXPECT_THAT(make_tuple(raw.data(), raw.size()), ElementsAreArray(kPacketWithData)); diff --git a/modules/rtp_rtcp/source/rtcp_packet/bye.cc b/modules/rtp_rtcp/source/rtcp_packet/bye.cc index a6471772b1..d4c9107217 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/bye.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/bye.cc @@ -13,7 +13,9 @@ #include #include +#include #include +#include #include "absl/strings/string_view.h" #include "modules/rtp_rtcp/source/byte_io.h" @@ -23,7 +25,6 @@ namespace webrtc { namespace rtcp { -constexpr uint8_t Bye::kPacketType; // Bye packet (BYE) (RFC 3550). // // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 diff --git a/modules/rtp_rtcp/source/rtcp_packet/bye.h b/modules/rtp_rtcp/source/rtcp_packet/bye.h index d31205793a..af00b071bb 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/bye.h +++ b/modules/rtp_rtcp/source/rtcp_packet/bye.h @@ -12,6 +12,8 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_BYE_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_BYE_H_ +#include +#include #include #include @@ -46,7 +48,8 @@ class Bye : public RtcpPacket { PacketReadyCallback callback) const override; private: - static const int kMaxNumberOfCsrcs = 0x1f - 1; // First item is sender SSRC. + static constexpr int kMaxNumberOfCsrcs = + 0x1f - 1; // First item is sender SSRC. std::vector csrcs_; std::string reason_; diff --git a/modules/rtp_rtcp/source/rtcp_packet/bye_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/bye_unittest.cc index 448c2d4194..af47d67d0a 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/bye_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/bye_unittest.cc @@ -10,6 +10,12 @@ #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" +#include +#include +#include +#include + +#include "rtc_base/buffer.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -28,7 +34,7 @@ TEST(RtcpPacketByeTest, CreateAndParseWithoutReason) { Bye bye; bye.SetSenderSsrc(kSenderSsrc); - rtc::Buffer raw = bye.Build(); + Buffer raw = bye.Build(); Bye parsed_bye; EXPECT_TRUE(test::ParseSinglePacket(raw, &parsed_bye)); @@ -43,7 +49,7 @@ TEST(RtcpPacketByeTest, CreateAndParseWithCsrcs) { EXPECT_TRUE(bye.SetCsrcs({kCsrc1, kCsrc2})); EXPECT_TRUE(bye.reason().empty()); - rtc::Buffer raw = bye.Build(); + Buffer raw = bye.Build(); Bye parsed_bye; EXPECT_TRUE(test::ParseSinglePacket(raw, &parsed_bye)); @@ -60,7 +66,7 @@ TEST(RtcpPacketByeTest, CreateAndParseWithCsrcsAndAReason) { EXPECT_TRUE(bye.SetCsrcs({kCsrc1, kCsrc2})); bye.SetReason(kReason); - rtc::Buffer raw = bye.Build(); + Buffer raw = bye.Build(); Bye parsed_bye; EXPECT_TRUE(test::ParseSinglePacket(raw, &parsed_bye)); @@ -84,7 +90,7 @@ TEST(RtcpPacketByeTest, CreateAndParseWithAReason) { bye.SetSenderSsrc(kSenderSsrc); bye.SetReason(kReason); - rtc::Buffer raw = bye.Build(); + Buffer raw = bye.Build(); Bye parsed_bye; EXPECT_TRUE(test::ParseSinglePacket(raw, &parsed_bye)); @@ -102,7 +108,7 @@ TEST(RtcpPacketByeTest, CreateAndParseWithReasons) { bye.SetSenderSsrc(kSenderSsrc); bye.SetReason(kReason); - rtc::Buffer raw = bye.Build(); + Buffer raw = bye.Build(); Bye parsed_bye; EXPECT_TRUE(test::ParseSinglePacket(raw, &parsed_bye)); @@ -123,7 +129,7 @@ TEST(RtcpPacketByeTest, ParseFailOnInvalidSrcCount) { Bye bye; bye.SetSenderSsrc(kSenderSsrc); - rtc::Buffer raw = bye.Build(); + Buffer raw = bye.Build(); raw[0]++; // Damage the packet: increase ssrc count by one. Bye parsed_bye; @@ -135,7 +141,7 @@ TEST(RtcpPacketByeTest, ParseFailOnInvalidReasonLength) { bye.SetSenderSsrc(kSenderSsrc); bye.SetReason("18 characters long"); - rtc::Buffer raw = bye.Build(); + Buffer raw = bye.Build(); // Damage the packet: decrease payload size by 4 bytes raw[3]--; raw.SetSize(raw.size() - 4); diff --git a/modules/rtp_rtcp/source/rtcp_packet/common_header.cc b/modules/rtp_rtcp/source/rtcp_packet/common_header.cc index 5b54982220..58593b6b4e 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/common_header.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/common_header.cc @@ -10,12 +10,14 @@ #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include +#include + #include "modules/rtp_rtcp/source/byte_io.h" #include "rtc_base/logging.h" namespace webrtc { namespace rtcp { -constexpr size_t CommonHeader::kHeaderSizeBytes; // 0 1 1 2 3 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ diff --git a/modules/rtp_rtcp/source/rtcp_packet/common_header_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/common_header_unittest.cc index e8b4c52c68..0432e9d052 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/common_header_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/common_header_unittest.cc @@ -10,6 +10,9 @@ #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include +#include + #include "test/gtest.h" using webrtc::rtcp::CommonHeader; diff --git a/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc b/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc index 54f3555fc6..cc89347a37 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc @@ -10,9 +10,12 @@ #include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h" +#include +#include #include #include +#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "rtc_base/checks.h" namespace webrtc { diff --git a/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h b/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h index d98dbd088d..027016d24b 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h +++ b/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h @@ -12,6 +12,8 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_COMPOUND_PACKET_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_COMPOUND_PACKET_H_ +#include +#include #include #include diff --git a/modules/rtp_rtcp/source/rtcp_packet/compound_packet_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/compound_packet_unittest.cc index ba7c241215..7645c267ba 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/compound_packet_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/compound_packet_unittest.cc @@ -10,14 +10,19 @@ #include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h" +#include +#include #include #include +#include "api/array_view.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/fir.h" #include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" +#include "rtc_base/buffer.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -50,7 +55,7 @@ TEST(RtcpCompoundPacketTest, AppendPacket) { compound.Append(std::move(rr)); compound.Append(std::move(fir)); - rtc::Buffer packet = compound.Build(); + Buffer packet = compound.Build(); RtcpPacketParser parser; parser.Parse(packet); EXPECT_EQ(1, parser.receiver_report()->num_packets()); @@ -78,7 +83,7 @@ TEST(RtcpCompoundPacketTest, AppendPacketWithOwnAppendedPacket) { root.Append(std::move(bye)); root.Append(std::move(leaf)); - rtc::Buffer packet = root.Build(); + Buffer packet = root.Build(); RtcpPacketParser parser; parser.Parse(packet); EXPECT_EQ(1, parser.sender_report()->num_packets()); @@ -104,9 +109,9 @@ TEST(RtcpCompoundPacketTest, BuildWithInputBuffer) { const size_t kFirLength = 20; const size_t kBufferSize = kRrLength + kReportBlockLength + kFirLength; - MockFunction)> callback; + MockFunction)> callback; EXPECT_CALL(callback, Call(_)) - .WillOnce(Invoke([&](rtc::ArrayView packet) { + .WillOnce(Invoke([&](ArrayView packet) { RtcpPacketParser parser; parser.Parse(packet); EXPECT_EQ(1, parser.receiver_report()->num_packets()); @@ -132,16 +137,16 @@ TEST(RtcpCompoundPacketTest, BuildWithTooSmallBuffer_FragmentedSend) { const size_t kReportBlockLength = 24; const size_t kBufferSize = kRrLength + kReportBlockLength; - MockFunction)> callback; + MockFunction)> callback; EXPECT_CALL(callback, Call(_)) - .WillOnce(Invoke([&](rtc::ArrayView packet) { + .WillOnce(Invoke([&](ArrayView packet) { RtcpPacketParser parser; parser.Parse(packet); EXPECT_EQ(1, parser.receiver_report()->num_packets()); EXPECT_EQ(1U, parser.receiver_report()->report_blocks().size()); EXPECT_EQ(0, parser.fir()->num_packets()); })) - .WillOnce(Invoke([&](rtc::ArrayView packet) { + .WillOnce(Invoke([&](ArrayView packet) { RtcpPacketParser parser; parser.Parse(packet); EXPECT_EQ(0, parser.receiver_report()->num_packets()); diff --git a/modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.cc b/modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.cc new file mode 100644 index 0000000000..ff37b00fa9 --- /dev/null +++ b/modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.cc @@ -0,0 +1,323 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" + +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "rtc_base/checks.h" +#include "rtc_base/network/ecn_marking.h" + +namespace webrtc { +namespace rtcp { + +/* + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + |V=2|P| FMT=11 | PT = 205 | length | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | SSRC of RTCP packet sender | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | SSRC of 1st RTP Stream | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | begin_seq | num_reports | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + |R|ECN| Arrival time offset | ... . + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + . . + . . + . . + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | SSRC of nth RTP Stream | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | begin_seq | num_reports | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + |R|ECN| Arrival time offset | ... | + . . + . . + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | Report Timestamp (32 bits) | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +*/ + +namespace { + +constexpr size_t kSenderSsrcLength = 4; +constexpr size_t kHeaderPerMediaSssrcLength = 8; +constexpr size_t kTimestampLength = 4; + +// RFC-3168, Section 5 +constexpr uint16_t kEcnEct1 = 0x01; +constexpr uint16_t kEcnEct0 = 0x02; +constexpr uint16_t kEcnCe = 0x03; + +// Arrival time offset (ATO, 13 bits): +// The arrival time of the RTP packet at the receiver, as an offset before the +// time represented by the Report Timestamp (RTS) field of this RTCP congestion +// control feedback report. The ATO field is in units of 1/1024 seconds (this +// unit is chosen to give exact offsets from the RTS field) so, for example, an +// ATO value of 512 indicates that the corresponding RTP packet arrived exactly +// half a second before the time instant represented by the RTS field. If the +// measured value is greater than 8189/1024 seconds (the value that would be +// coded as 0x1FFD), the value 0x1FFE MUST be reported to indicate an over-range +// measurement. If the measurement is unavailable or if the arrival time of the +// RTP packet is after the time represented by the RTS field, then an ATO value +// of 0x1FFF MUST be reported for the packet. +uint16_t To13bitAto(TimeDelta arrival_time_offset) { + if (arrival_time_offset < TimeDelta::Zero()) { + return 0x1FFF; + } + return std::min( + static_cast(1024 * arrival_time_offset.seconds()), + int64_t{0x1FFE}); +} + +TimeDelta AtoToTimeDelta(uint16_t receive_info) { + // receive_info + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // |R|ECN| Arrival time offset | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + const uint16_t ato = receive_info & 0x1FFF; + if (ato == 0x1FFE) { + return TimeDelta::PlusInfinity(); + } + if (ato == 0x1FFF) { + return TimeDelta::MinusInfinity(); + } + return TimeDelta::Seconds(ato) / 1024; +} + +uint16_t To2BitEcn(EcnMarking ecn_marking) { + switch (ecn_marking) { + case EcnMarking::kNotEct: + return 0; + case EcnMarking::kEct1: + return kEcnEct1 << 13; + case EcnMarking::kEct0: + return kEcnEct0 << 13; + case EcnMarking::kCe: + return kEcnCe << 13; + } +} + +EcnMarking ToEcnMarking(uint16_t receive_info) { + const uint16_t ecn = (receive_info >> 13) & 0b11; + if (ecn == kEcnEct1) { + return EcnMarking::kEct1; + } + if (ecn == kEcnEct0) { + return EcnMarking::kEct0; + } + if (ecn == kEcnCe) { + return EcnMarking::kCe; + } + return EcnMarking::kNotEct; +} + +} // namespace + +CongestionControlFeedback ::CongestionControlFeedback( + std::vector packets, + uint32_t compact_ntp_timestamp) + : packets_(std::move(packets)), + report_timestamp_compact_ntp_(compact_ntp_timestamp) {} + +bool CongestionControlFeedback::Create(uint8_t* buffer, + size_t* position, + size_t max_length, + PacketReadyCallback callback) const { + // Ensure there is enough room for this packet. + while (*position + BlockLength() > max_length) { + if (!OnBufferFull(buffer, position, callback)) + return false; + } + const size_t position_end = *position + BlockLength(); + + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // |V=2|P| FMT=11 | PT = 205 | length | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | SSRC of RTCP packet sender | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + CreateHeader(kFeedbackMessageType, kPacketType, HeaderLength(), buffer, + position); + ByteWriter::WriteBigEndian(&buffer[*position], sender_ssrc()); + *position += 4; + + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | SSRC of nth RTP Stream | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | begin_seq | num_reports | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // |R|ECN| Arrival time offset | ... . + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // . . + auto write_report_for_ssrc = [&](ArrayView packets) { + // SSRC of nth RTP stream. + ByteWriter::WriteBigEndian(&buffer[*position], packets[0].ssrc); + *position += 4; + + // begin_seq + ByteWriter::WriteBigEndian(&buffer[*position], + packets[0].sequence_number); + *position += 2; + // num_reports + uint16_t num_reports = packets.size(); + RTC_DCHECK_EQ(static_cast( + + packets[packets.size() - 1].sequence_number - + packets[0].sequence_number + 1), + packets.size()) + << "Expected continous rtp sequence numbers."; + + // Each report block MUST NOT include more than 16384 packet metric + // blocks (i.e., it MUST NOT report on more than one quarter of the + // sequence number space in a single report). + if (num_reports > 16384) { + RTC_DCHECK_NOTREACHED() << "Unexpected number of reports:" << num_reports; + return; + } + ByteWriter::WriteBigEndian(&buffer[*position], num_reports); + *position += 2; + + for (const PacketInfo& packet : packets) { + bool received = packet.arrival_time_offset.IsFinite(); + uint16_t packet_info = 0; + if (received) { + packet_info = 0x8000 | To2BitEcn(packet.ecn) | + To13bitAto(packet.arrival_time_offset); + } + ByteWriter::WriteBigEndian(&buffer[*position], packet_info); + *position += 2; + } + // 32bit align per SSRC block. + if (num_reports % 2 != 0) { + ByteWriter::WriteBigEndian(&buffer[*position], 0); + *position += 2; + } + }; + + ArrayView remaining(packets_); + while (!remaining.empty()) { + int number_of_packets_for_ssrc = 0; + uint32_t ssrc = remaining[0].ssrc; + for (const PacketInfo& packet_info : remaining) { + if (packet_info.ssrc != ssrc) { + break; + } + ++number_of_packets_for_ssrc; + } + write_report_for_ssrc(remaining.subview(0, number_of_packets_for_ssrc)); + remaining = remaining.subview(number_of_packets_for_ssrc); + } + + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | Report Timestamp (32 bits) | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + ByteWriter::WriteBigEndian(&buffer[*position], + report_timestamp_compact_ntp_); + *position += 4; + + RTC_DCHECK_EQ(*position, position_end); + return true; +} + +size_t CongestionControlFeedback::BlockLength() const { + // Total size of this packet + size_t total_size = kSenderSsrcLength + kHeaderLength + kTimestampLength; + if (packets_.empty()) { + return total_size; + } + + auto increase_size_per_ssrc = [](int number_of_packets_for_ssrc) { + // Each packet report needs two bytes. + size_t packet_block_size = number_of_packets_for_ssrc * 2; + // 32 bit aligned. + return kHeaderPerMediaSssrcLength + packet_block_size + + ((number_of_packets_for_ssrc % 2) != 0 ? 2 : 0); + }; + + uint32_t ssrc = packets_.front().ssrc; + uint16_t first_sequence_number = packets_.front().sequence_number; + for (size_t i = 0; i < packets_.size(); ++i) { + if (packets_[i].ssrc != ssrc) { + uint16_t number_of_packets = + packets_[i - 1].sequence_number - first_sequence_number + 1; + total_size += increase_size_per_ssrc(number_of_packets); + ssrc = packets_[i].ssrc; + first_sequence_number = packets_[i].sequence_number; + } + } + uint16_t number_of_packets = + packets_.back().sequence_number - first_sequence_number + 1; + total_size += increase_size_per_ssrc(number_of_packets); + + return total_size; +} + +bool CongestionControlFeedback::Parse(const rtcp::CommonHeader& packet) { + const uint8_t* payload = packet.payload(); + const uint8_t* payload_end = packet.payload() + packet.payload_size_bytes(); + + if (packet.payload_size_bytes() % 4 != 0 || + packet.payload_size_bytes() < kSenderSsrcLength + kTimestampLength) { + return false; + } + + SetSenderSsrc(ByteReader::ReadBigEndian(payload)); + payload += 4; + + report_timestamp_compact_ntp_ = + ByteReader::ReadBigEndian(payload_end - 4); + payload_end -= 4; + + while (payload + kHeaderPerMediaSssrcLength < payload_end) { + uint32_t ssrc = ByteReader::ReadBigEndian(payload); + payload += 4; + + uint16_t base_seqno = ByteReader::ReadBigEndian(payload); + payload += 2; + uint16_t num_reports = ByteReader::ReadBigEndian(payload); + payload += 2; + + constexpr size_t kPerPacketLength = 2; + if (payload + kPerPacketLength * num_reports > payload_end) { + return false; + } + + for (int i = 0; i < num_reports; ++i) { + uint16_t packet_info = ByteReader::ReadBigEndian(payload); + payload += 2; + + uint16_t seq_no = base_seqno + i; + bool received = (packet_info & 0x8000); + packets_.push_back( + {.ssrc = ssrc, + .sequence_number = seq_no, + .arrival_time_offset = received ? AtoToTimeDelta(packet_info) + : TimeDelta::MinusInfinity(), + .ecn = ToEcnMarking(packet_info)}); + } + if (num_reports % 2) { + // 2 bytes padding + payload += 2; + } + } + return payload == payload_end; +} +} // namespace rtcp +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h b/modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h new file mode 100644 index 0000000000..137c3d1e16 --- /dev/null +++ b/modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_CONGESTION_CONTROL_FEEDBACK_H_ +#define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_CONGESTION_CONTROL_FEEDBACK_H_ + +#include +#include +#include + +#include "api/array_view.h" +#include "api/units/time_delta.h" +#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" +#include "rtc_base/network/ecn_marking.h" + +namespace webrtc { +namespace rtcp { + +// Congestion control feedback message as specified in +// https://www.rfc-editor.org/rfc/rfc8888.html +class CongestionControlFeedback : public Rtpfb { + public: + struct PacketInfo { + uint32_t ssrc = 0; + uint16_t sequence_number = 0; + // Time offset from report timestamp. Minus infinity if the packet has not + // been received. + TimeDelta arrival_time_offset = TimeDelta::MinusInfinity(); + EcnMarking ecn = EcnMarking::kNotEct; + }; + + static constexpr uint8_t kFeedbackMessageType = 11; + + // `Packets` MUST be sorted in sequence_number order per SSRC. There MUST not + // be missing sequence numbers between `Packets`. `Packets` MUST not include + // duplicate sequence numbers. + CongestionControlFeedback(std::vector packets, + uint32_t report_timestamp_compact_ntp); + CongestionControlFeedback() = default; + + bool Parse(const CommonHeader& packet); + + ArrayView packets() const { return packets_; } + + uint32_t report_timestamp_compact_ntp() const { + return report_timestamp_compact_ntp_; + } + + // Serialize the packet. + bool Create(uint8_t* packet, + size_t* position, + size_t max_length, + PacketReadyCallback callback) const override; + size_t BlockLength() const override; + + private: + std::vector packets_; + uint32_t report_timestamp_compact_ntp_ = 0; +}; + +} // namespace rtcp +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_CONGESTION_CONTROL_FEEDBACK_H_ diff --git a/modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback_unittest.cc new file mode 100644 index 0000000000..55e4aae926 --- /dev/null +++ b/modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback_unittest.cc @@ -0,0 +1,239 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" + +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/function_view.h" +#include "api/units/time_delta.h" +#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" +#include "rtc_base/buffer.h" +#include "rtc_base/logging.h" +#include "rtc_base/network/ecn_marking.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace rtcp { + +using ::testing::IsEmpty; + +// PacketInfo is equal after serializing-deserializing if members are equal +// except for arrival time offset that may differ because of conversion back and +// forth to CompactNtp. +bool PacketInfoEqual(const CongestionControlFeedback::PacketInfo& a, + const CongestionControlFeedback::PacketInfo& b) { + bool arrival_time_offset_equal = + (a.arrival_time_offset.IsInfinite() && + b.arrival_time_offset.IsInfinite()) || + (a.arrival_time_offset.IsFinite() && b.arrival_time_offset.IsFinite() && + (a.arrival_time_offset - b.arrival_time_offset).Abs() < + TimeDelta::Seconds(1) / 1024); + + bool equal = a.ssrc == b.ssrc && a.sequence_number == b.sequence_number && + arrival_time_offset_equal && a.ecn == b.ecn; + RTC_LOG_IF(LS_INFO, !equal) + << " Not equal got ssrc: " << a.ssrc << ", seq: " << a.sequence_number + << " arrival_time_offset: " << a.arrival_time_offset.ms_or(-1) + << " ecn: " << a.ecn << " expected ssrc:" << b.ssrc + << ", seq: " << b.sequence_number + << " arrival_time_offset: " << b.arrival_time_offset.ms_or(-1) + << " ecn: " << b.ecn; + return equal; +} + +MATCHER_P(PacketInfoEqual, expected_vector, "") { + if (expected_vector.size() != arg.size()) { + RTC_LOG(LS_INFO) << " Wrong size, expected: " << expected_vector.size() + << " got: " << arg.size(); + return false; + } + for (size_t i = 0; i < expected_vector.size(); ++i) { + if (!PacketInfoEqual(arg[i], expected_vector[i])) { + return false; + } + } + return true; +} + +TEST(CongestionControlFeedbackTest, BlockLengthNoPackets) { + CongestionControlFeedback fb({}, /*compact_ntp_timestamp=*/1); + EXPECT_EQ(fb.BlockLength(), + /*common header */ 4u /*sender ssrc*/ + 4u + /*timestamp*/ 4u); +} + +TEST(CongestionControlFeedbackTest, BlockLengthTwoSsrcOnePacketEach) { + std::vector packets = { + {.ssrc = 1, .sequence_number = 1}, {.ssrc = 2, .sequence_number = 1}}; + + CongestionControlFeedback fb(std::move(packets), /*compact_ntp_timestamp=*/1); + EXPECT_EQ(fb.BlockLength(), + /*common header */ 4u + /*sender ssrc*/ + 4u + + /*timestamp*/ 4u + + /*per ssrc header*/ 2 * 8u + + /* padded packet info per ssrc*/ 2 * 4u); +} + +TEST(CongestionControlFeedbackTest, BlockLengthTwoSsrcTwoPacketsEach) { + std::vector packets = { + {.ssrc = 1, .sequence_number = 1}, + {.ssrc = 1, .sequence_number = 2}, + {.ssrc = 2, .sequence_number = 1}, + {.ssrc = 2, .sequence_number = 2}}; + + CongestionControlFeedback fb(std::move(packets), /*compact_ntp_timestamp=*/1); + EXPECT_EQ(fb.BlockLength(), + /*common header */ 4u + /*sender ssrc*/ + 4u + + /*timestamp*/ 4u + + /*per ssrc header*/ 2 * 8u + + /*packet info per ssrc*/ 2 * 4u); +} + +TEST(CongestionControlFeedbackTest, BlockLengthMissingPackets) { + std::vector packets = { + {.ssrc = 1, .sequence_number = 1}, + {.ssrc = 1, .sequence_number = 4}, + }; + + CongestionControlFeedback fb(std::move(packets), /*compact_ntp_timestamp=*/1); + EXPECT_EQ(fb.BlockLength(), + /*common header */ 4u + /*sender ssrc*/ + 4u + + /*timestamp*/ 4u + + /*per ssrc header*/ 1 * 8u + + /*packet info per ssrc*/ 2 * 4u); +} + +TEST(CongestionControlFeedbackTest, CreateReturnsTrueForBasicPacket) { + std::vector packets = { + {.ssrc = 1, + .sequence_number = 1, + .arrival_time_offset = TimeDelta::Millis(1)}, + {.ssrc = 2, + .sequence_number = 2, + .arrival_time_offset = TimeDelta::Millis(2)}}; + CongestionControlFeedback fb(std::move(packets), /*compact_ntp_timestamp=*/1); + + Buffer buf(fb.BlockLength()); + size_t position = 0; + FunctionView packet)> callback; + EXPECT_TRUE(fb.Create(buf.data(), &position, buf.capacity(), callback)); +} + +TEST(CongestionControlFeedbackTest, CanCreateAndParseWithoutPackets) { + const std::vector kPackets = {}; + uint32_t kCompactNtp = 1234; + CongestionControlFeedback fb(kPackets, kCompactNtp); + + Buffer buffer = fb.Build(); + CongestionControlFeedback parsed_fb; + CommonHeader header; + EXPECT_TRUE(header.Parse(buffer.data(), buffer.size())); + EXPECT_TRUE(parsed_fb.Parse(header)); + EXPECT_THAT(parsed_fb.packets(), IsEmpty()); + + EXPECT_EQ(parsed_fb.report_timestamp_compact_ntp(), kCompactNtp); + EXPECT_THAT(parsed_fb.packets(), PacketInfoEqual(kPackets)); +} + +TEST(CongestionControlFeedbackTest, CanCreateAndParsePacketsWithTwoSsrc) { + const std::vector kPackets = { + {.ssrc = 1, + .sequence_number = 1, + .arrival_time_offset = TimeDelta::Millis(1)}, + {.ssrc = 2, + .sequence_number = 1, + .arrival_time_offset = TimeDelta::Millis(3)}}; + uint32_t kCompactNtp = 1234; + CongestionControlFeedback fb(kPackets, kCompactNtp); + + Buffer buffer = fb.Build(); + CongestionControlFeedback parsed_fb; + CommonHeader header; + EXPECT_TRUE(header.Parse(buffer.data(), buffer.size())); + EXPECT_EQ(header.fmt(), CongestionControlFeedback::kFeedbackMessageType); + EXPECT_EQ(header.type(), Rtpfb::kPacketType); + EXPECT_TRUE(parsed_fb.Parse(header)); + + EXPECT_EQ(parsed_fb.report_timestamp_compact_ntp(), kCompactNtp); + EXPECT_THAT(parsed_fb.packets(), PacketInfoEqual(kPackets)); +} + +TEST(CongestionControlFeedbackTest, CanCreateAndParsePacketWithEcnCe) { + const std::vector kPackets = { + {.ssrc = 1, + .sequence_number = 1, + .arrival_time_offset = TimeDelta::Millis(1), + .ecn = EcnMarking::kCe}}; + uint32_t kCompactNtp = 1234; + CongestionControlFeedback fb(kPackets, kCompactNtp); + + Buffer buffer = fb.Build(); + CongestionControlFeedback parsed_fb; + CommonHeader header; + EXPECT_TRUE(header.Parse(buffer.data(), buffer.size())); + EXPECT_TRUE(parsed_fb.Parse(header)); + EXPECT_THAT(parsed_fb.packets(), PacketInfoEqual(kPackets)); +} + +TEST(CongestionControlFeedbackTest, CanCreateAndParsePacketWithEct1) { + const std::vector kPackets = { + {.ssrc = 1, + .sequence_number = 1, + .arrival_time_offset = TimeDelta::Millis(1), + .ecn = EcnMarking::kEct1}}; + uint32_t kCompactNtp = 1234; + CongestionControlFeedback fb(kPackets, kCompactNtp); + + Buffer buffer = fb.Build(); + CongestionControlFeedback parsed_fb; + CommonHeader header; + EXPECT_TRUE(header.Parse(buffer.data(), buffer.size())); + EXPECT_TRUE(parsed_fb.Parse(header)); + EXPECT_THAT(parsed_fb.packets(), PacketInfoEqual(kPackets)); +} + +TEST(CongestionControlFeedbackTest, CanCreateAndParseWithMissingPackets) { + const std::vector kPackets = { + {.ssrc = 1, + .sequence_number = 0xFFFE, + .arrival_time_offset = TimeDelta::Millis(1)}, + {.ssrc = 1, + .sequence_number = 0xFFFF, + // Packet lost + .arrival_time_offset = TimeDelta::MinusInfinity()}, + {.ssrc = 1, + .sequence_number = 0, + // Packet lost + .arrival_time_offset = TimeDelta::MinusInfinity()}, + {.ssrc = 1, + .sequence_number = 1, + .arrival_time_offset = TimeDelta::Millis(1)}}; + uint32_t kCompactNtp = 1234; + CongestionControlFeedback fb(kPackets, kCompactNtp); + + Buffer buffer = fb.Build(); + CongestionControlFeedback parsed_fb; + CommonHeader header; + EXPECT_TRUE(header.Parse(buffer.data(), buffer.size())); + EXPECT_TRUE(parsed_fb.Parse(header)); + EXPECT_THAT(parsed_fb.packets(), PacketInfoEqual(kPackets)); +} + +} // namespace rtcp +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtcp_packet/dlrr.cc b/modules/rtp_rtcp/source/rtcp_packet/dlrr.cc index 6863def2fe..87ac475ac7 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/dlrr.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/dlrr.cc @@ -10,6 +10,9 @@ #include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" +#include +#include + #include "modules/rtp_rtcp/source/byte_io.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -77,7 +80,7 @@ void Dlrr::Create(uint8_t* buffer) const { buffer[0] = kBlockType; buffer[1] = kReserved; ByteWriter::WriteBigEndian( - &buffer[2], rtc::dchecked_cast(3 * sub_blocks_.size())); + &buffer[2], dchecked_cast(3 * sub_blocks_.size())); // Create sub blocks. uint8_t* write_at = buffer + kBlockHeaderLength; for (const ReceiveTimeInfo& sub_block : sub_blocks_) { diff --git a/modules/rtp_rtcp/source/rtcp_packet/dlrr.h b/modules/rtp_rtcp/source/rtcp_packet/dlrr.h index ad91dfdcc6..d9c4123200 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/dlrr.h +++ b/modules/rtp_rtcp/source/rtcp_packet/dlrr.h @@ -42,7 +42,7 @@ inline bool operator!=(const ReceiveTimeInfo& lhs, const ReceiveTimeInfo& rhs) { // DLRR Report Block: Delay since the Last Receiver Report (RFC 3611). class Dlrr { public: - static const uint8_t kBlockType = 5; + static constexpr uint8_t kBlockType = 5; Dlrr(); Dlrr(const Dlrr& other); @@ -70,8 +70,8 @@ class Dlrr { const std::vector& sub_blocks() const { return sub_blocks_; } private: - static const size_t kBlockHeaderLength = 4; - static const size_t kSubBlockLength = 12; + static constexpr size_t kBlockHeaderLength = 4; + static constexpr size_t kSubBlockLength = 12; std::vector sub_blocks_; }; diff --git a/modules/rtp_rtcp/source/rtcp_packet/dlrr_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/dlrr_unittest.cc index 408d0011b8..751df2d2cc 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/dlrr_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/dlrr_unittest.cc @@ -10,6 +10,10 @@ #include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" +#include +#include +#include + #include "modules/rtp_rtcp/source/byte_io.h" #include "test/gtest.h" diff --git a/modules/rtp_rtcp/source/rtcp_packet/extended_reports.cc b/modules/rtp_rtcp/source/rtcp_packet/extended_reports.cc index ce57bd5a88..52be43bf6c 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/extended_reports.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/extended_reports.cc @@ -10,17 +10,21 @@ #include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" +#include +#include +#include #include #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" +#include "modules/rtp_rtcp/source/rtcp_packet/rrtr.h" +#include "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { namespace rtcp { -constexpr uint8_t ExtendedReports::kPacketType; -constexpr size_t ExtendedReports::kMaxNumberOfDlrrItems; // From RFC 3611: RTP Control Protocol Extended Reports (RTCP XR). // // Format for XR packets: @@ -59,7 +63,7 @@ bool ExtendedReports::Parse(const CommonHeader& packet) { SetSenderSsrc(ByteReader::ReadBigEndian(packet.payload())); rrtr_block_.reset(); dlrr_block_.ClearItems(); - target_bitrate_ = absl::nullopt; + target_bitrate_ = std::nullopt; const uint8_t* current_block = packet.payload() + kXrBaseLength; const uint8_t* const packet_end = diff --git a/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h b/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h index 6c804bbc7b..4aa18211b7 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h +++ b/modules/rtp_rtcp/source/rtcp_packet/extended_reports.h @@ -11,9 +11,10 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_EXTENDED_REPORTS_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_EXTENDED_REPORTS_H_ -#include +#include +#include +#include -#include "absl/types/optional.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" #include "modules/rtp_rtcp/source/rtcp_packet/rrtr.h" @@ -40,9 +41,9 @@ class ExtendedReports : public RtcpPacket { bool AddDlrrItem(const ReceiveTimeInfo& time_info); void SetTargetBitrate(const TargetBitrate& target_bitrate); - const absl::optional& rrtr() const { return rrtr_block_; } + const std::optional& rrtr() const { return rrtr_block_; } const Dlrr& dlrr() const { return dlrr_block_; } - const absl::optional& target_bitrate() const { + const std::optional& target_bitrate() const { return target_bitrate_; } @@ -64,9 +65,9 @@ class ExtendedReports : public RtcpPacket { void ParseDlrrBlock(const uint8_t* block, uint16_t block_length); void ParseTargetBitrateBlock(const uint8_t* block, uint16_t block_length); - absl::optional rrtr_block_; + std::optional rrtr_block_; Dlrr dlrr_block_; // Dlrr without items treated same as no dlrr block. - absl::optional target_bitrate_; + std::optional target_bitrate_; }; } // namespace rtcp } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtcp_packet/extended_reports_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/extended_reports_unittest.cc index 3d9a2a3408..0161e40638 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/extended_reports_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/extended_reports_unittest.cc @@ -10,7 +10,14 @@ #include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" +#include +#include + +#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" +#include "modules/rtp_rtcp/source/rtcp_packet/rrtr.h" +#include "rtc_base/buffer.h" #include "rtc_base/random.h" +#include "system_wrappers/include/ntp_time.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -70,7 +77,7 @@ TEST_F(RtcpPacketExtendedReportsTest, CreateWithoutReportBlocks) { ExtendedReports xr; xr.SetSenderSsrc(kSenderSsrc); - rtc::Buffer packet = xr.Build(); + Buffer packet = xr.Build(); EXPECT_THAT(make_tuple(packet.data(), packet.size()), ElementsAreArray(kEmptyPacket)); @@ -89,7 +96,7 @@ TEST_F(RtcpPacketExtendedReportsTest, CreateAndParseWithRrtrBlock) { ExtendedReports xr; xr.SetSenderSsrc(kSenderSsrc); xr.SetRrtr(kRrtr); - rtc::Buffer packet = xr.Build(); + Buffer packet = xr.Build(); ExtendedReports mparsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &mparsed)); @@ -105,7 +112,7 @@ TEST_F(RtcpPacketExtendedReportsTest, CreateAndParseWithDlrrWithOneSubBlock) { xr.SetSenderSsrc(kSenderSsrc); xr.AddDlrrItem(kTimeInfo); - rtc::Buffer packet = xr.Build(); + Buffer packet = xr.Build(); ExtendedReports mparsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &mparsed)); @@ -123,7 +130,7 @@ TEST_F(RtcpPacketExtendedReportsTest, CreateAndParseWithDlrrWithTwoSubBlocks) { xr.AddDlrrItem(kTimeInfo1); xr.AddDlrrItem(kTimeInfo2); - rtc::Buffer packet = xr.Build(); + Buffer packet = xr.Build(); ExtendedReports mparsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &mparsed)); @@ -154,7 +161,7 @@ TEST_F(RtcpPacketExtendedReportsTest, CreateAndParseWithMaximumReportBlocks) { for (size_t i = 0; i < ExtendedReports::kMaxNumberOfDlrrItems; ++i) xr.AddDlrrItem(Rand()); - rtc::Buffer packet = xr.Build(); + Buffer packet = xr.Build(); ExtendedReports mparsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &mparsed)); diff --git a/modules/rtp_rtcp/source/rtcp_packet/fir.cc b/modules/rtp_rtcp/source/rtcp_packet/fir.cc index fd4a4c947a..a3ad832f43 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/fir.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/fir.cc @@ -10,14 +10,17 @@ #include "modules/rtp_rtcp/source/rtcp_packet/fir.h" +#include +#include + #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { namespace rtcp { -constexpr uint8_t Fir::kFeedbackMessageType; // RFC 4585: Feedback format. // Common packet format: // diff --git a/modules/rtp_rtcp/source/rtcp_packet/fir.h b/modules/rtp_rtcp/source/rtcp_packet/fir.h index 383dc96114..452b8572d3 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/fir.h +++ b/modules/rtp_rtcp/source/rtcp_packet/fir.h @@ -11,6 +11,8 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_FIR_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_FIR_H_ +#include +#include #include #include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" diff --git a/modules/rtp_rtcp/source/rtcp_packet/fir_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/fir_unittest.cc index 01593e12ba..fe3e70f7af 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/fir_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/fir_unittest.cc @@ -10,6 +10,9 @@ #include "modules/rtp_rtcp/source/rtcp_packet/fir.h" +#include + +#include "rtc_base/buffer.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -50,7 +53,7 @@ TEST(RtcpPacketFirTest, Create) { fir.SetSenderSsrc(kSenderSsrc); fir.AddRequestTo(kRemoteSsrc, kSeqNr); - rtc::Buffer packet = fir.Build(); + Buffer packet = fir.Build(); EXPECT_THAT(make_tuple(packet.data(), packet.size()), ElementsAreArray(kPacket)); @@ -62,7 +65,7 @@ TEST(RtcpPacketFirTest, TwoFciEntries) { fir.AddRequestTo(kRemoteSsrc, kSeqNr); fir.AddRequestTo(kRemoteSsrc + 1, kSeqNr + 1); - rtc::Buffer packet = fir.Build(); + Buffer packet = fir.Build(); Fir parsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &parsed)); diff --git a/modules/rtp_rtcp/source/rtcp_packet/loss_notification.cc b/modules/rtp_rtcp/source/rtcp_packet/loss_notification.cc index 0817846f95..f39237e4c6 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/loss_notification.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/loss_notification.cc @@ -10,10 +10,13 @@ #include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" +#include +#include + #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" namespace webrtc { namespace rtcp { diff --git a/modules/rtp_rtcp/source/rtcp_packet/loss_notification.h b/modules/rtp_rtcp/source/rtcp_packet/loss_notification.h index 0f70cf75c3..6cbdcf962c 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/loss_notification.h +++ b/modules/rtp_rtcp/source/rtcp_packet/loss_notification.h @@ -11,6 +11,9 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_LOSS_NOTIFICATION_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_LOSS_NOTIFICATION_H_ +#include +#include + #include "absl/base/attributes.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" #include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" diff --git a/modules/rtp_rtcp/source/rtcp_packet/loss_notification_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/loss_notification_unittest.cc index c38e7f4438..b7b9c54acf 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/loss_notification_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/loss_notification_unittest.cc @@ -10,6 +10,11 @@ #include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" +#include +#include + +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -58,7 +63,7 @@ TEST(RtcpPacketLossNotificationTest, CreateProducesExpectedWireFormat) { ASSERT_TRUE( loss_notification.Set(kLastDecoded, kLastReceived, kDecodabilityFlag)); - rtc::Buffer packet = loss_notification.Build(); + Buffer packet = loss_notification.Build(); EXPECT_THAT(make_tuple(packet.data(), packet.size()), ElementsAreArray(kPacket)); diff --git a/modules/rtp_rtcp/source/rtcp_packet/nack.cc b/modules/rtp_rtcp/source/rtcp_packet/nack.cc index 6fe7eade62..e7966226d3 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/nack.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/nack.cc @@ -11,8 +11,10 @@ #include "modules/rtp_rtcp/source/rtcp_packet/nack.h" #include +#include #include #include +#include #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" @@ -21,8 +23,6 @@ namespace webrtc { namespace rtcp { -constexpr uint8_t Nack::kFeedbackMessageType; -constexpr size_t Nack::kNackItemLength; // RFC 4585: Feedback format. // // Common packet format: @@ -103,8 +103,7 @@ bool Nack::Create(uint8_t* packet, size_t payload_size_bytes = kCommonFeedbackLength + (num_nack_fields * kNackItemLength); - size_t payload_size_32bits = - rtc::CheckedDivExact(payload_size_bytes, 4); + size_t payload_size_32bits = CheckedDivExact(payload_size_bytes, 4); CreateHeader(kFeedbackMessageType, kPacketType, payload_size_32bits, packet, index); diff --git a/modules/rtp_rtcp/source/rtcp_packet/nack.h b/modules/rtp_rtcp/source/rtcp_packet/nack.h index 9153733fb9..f4288c23e6 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/nack.h +++ b/modules/rtp_rtcp/source/rtcp_packet/nack.h @@ -11,6 +11,8 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_NACK_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_NACK_H_ +#include +#include #include #include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" diff --git a/modules/rtp_rtcp/source/rtcp_packet/nack_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/nack_unittest.cc index aabae0dc48..de7b942ddf 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/nack_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/nack_unittest.cc @@ -10,6 +10,12 @@ #include "modules/rtp_rtcp/source/rtcp_packet/nack.h" +#include +#include +#include + +#include "api/array_view.h" +#include "rtc_base/buffer.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -29,12 +35,11 @@ using ::webrtc::rtcp::Nack; constexpr uint32_t kSenderSsrc = 0x12345678; constexpr uint32_t kRemoteSsrc = 0x23456789; +constexpr uint8_t kRtpVersionBits = 2 << 6; constexpr uint16_t kList[] = {0, 1, 3, 8, 16}; -constexpr size_t kListLength = sizeof(kList) / sizeof(kList[0]); -constexpr uint8_t kVersionBits = 2 << 6; // clang-format off constexpr uint8_t kPacket[] = { - kVersionBits | Nack::kFeedbackMessageType, Nack::kPacketType, 0, 3, + kRtpVersionBits | Nack::kFeedbackMessageType, Nack::kPacketType, 0, 3, 0x12, 0x34, 0x56, 0x78, 0x23, 0x45, 0x67, 0x89, 0x00, 0x00, 0x80, 0x85}; @@ -43,7 +48,7 @@ constexpr uint16_t kWrapList[] = {0xffdc, 0xffec, 0xfffe, 0xffff, 0x0000, 0x0001, 0x0003, 0x0014, 0x0064}; constexpr size_t kWrapListLength = sizeof(kWrapList) / sizeof(kWrapList[0]); constexpr uint8_t kWrapPacket[] = { - kVersionBits | Nack::kFeedbackMessageType, Nack::kPacketType, 0, 6, + kRtpVersionBits | Nack::kFeedbackMessageType, Nack::kPacketType, 0, 6, 0x12, 0x34, 0x56, 0x78, 0x23, 0x45, 0x67, 0x89, 0xff, 0xdc, 0x80, 0x00, @@ -51,7 +56,7 @@ constexpr uint8_t kWrapPacket[] = { 0x00, 0x14, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00}; constexpr uint8_t kTooSmallPacket[] = { - kVersionBits | Nack::kFeedbackMessageType, Nack::kPacketType, 0, 2, + kRtpVersionBits | Nack::kFeedbackMessageType, Nack::kPacketType, 0, 2, 0x12, 0x34, 0x56, 0x78, 0x23, 0x45, 0x67, 0x89}; // clang-format on @@ -61,9 +66,9 @@ TEST(RtcpPacketNackTest, Create) { Nack nack; nack.SetSenderSsrc(kSenderSsrc); nack.SetMediaSsrc(kRemoteSsrc); - nack.SetPacketIds(kList, kListLength); + nack.SetPacketIds(kList, std::size(kList)); - rtc::Buffer packet = nack.Build(); + Buffer packet = nack.Build(); EXPECT_THAT(make_tuple(packet.data(), packet.size()), ElementsAreArray(kPacket)); @@ -85,7 +90,7 @@ TEST(RtcpPacketNackTest, CreateWrap) { nack.SetMediaSsrc(kRemoteSsrc); nack.SetPacketIds(kWrapList, kWrapListLength); - rtc::Buffer packet = nack.Build(); + Buffer packet = nack.Build(); EXPECT_THAT(make_tuple(packet.data(), packet.size()), ElementsAreArray(kWrapPacket)); @@ -110,7 +115,7 @@ TEST(RtcpPacketNackTest, BadOrder) { nack.SetMediaSsrc(kRemoteSsrc); nack.SetPacketIds(kUnorderedList, kUnorderedListLength); - rtc::Buffer packet = nack.Build(); + Buffer packet = nack.Build(); Nack parsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &parsed)); @@ -122,24 +127,23 @@ TEST(RtcpPacketNackTest, BadOrder) { TEST(RtcpPacketNackTest, CreateFragmented) { Nack nack; - const uint16_t kList[] = {1, 100, 200, 300, 400}; - const uint16_t kListLength = sizeof(kList) / sizeof(kList[0]); + const uint16_t kFragmentedList[] = {1, 100, 200, 300, 400}; nack.SetSenderSsrc(kSenderSsrc); nack.SetMediaSsrc(kRemoteSsrc); - nack.SetPacketIds(kList, kListLength); + nack.SetPacketIds(kFragmentedList, std::size(kFragmentedList)); const size_t kBufferSize = 12 + (3 * 4); // Fits common header + 3 nack items - MockFunction)> callback; + MockFunction)> callback; EXPECT_CALL(callback, Call(_)) - .WillOnce(Invoke([&](rtc::ArrayView packet) { + .WillOnce(Invoke([&](ArrayView packet) { Nack nack; EXPECT_TRUE(test::ParseSinglePacket(packet, &nack)); EXPECT_EQ(kSenderSsrc, nack.sender_ssrc()); EXPECT_EQ(kRemoteSsrc, nack.media_ssrc()); EXPECT_THAT(nack.packet_ids(), ElementsAre(1, 100, 200)); })) - .WillOnce(Invoke([&](rtc::ArrayView packet) { + .WillOnce(Invoke([&](ArrayView packet) { Nack nack; EXPECT_TRUE(test::ParseSinglePacket(packet, &nack)); EXPECT_EQ(kSenderSsrc, nack.sender_ssrc()); @@ -151,14 +155,14 @@ TEST(RtcpPacketNackTest, CreateFragmented) { } TEST(RtcpPacketNackTest, CreateFailsWithTooSmallBuffer) { - const uint16_t kList[] = {1}; + const uint16_t kSmallList[] = {1}; const size_t kMinNackBlockSize = 16; Nack nack; nack.SetSenderSsrc(kSenderSsrc); nack.SetMediaSsrc(kRemoteSsrc); - nack.SetPacketIds(kList, 1); + nack.SetPacketIds(kSmallList, std::size(kSmallList)); - MockFunction)> callback; + MockFunction)> callback; EXPECT_CALL(callback, Call(_)).Times(0); EXPECT_FALSE(nack.Build(kMinNackBlockSize - 1, callback.AsStdFunction())); } diff --git a/modules/rtp_rtcp/source/rtcp_packet/pli.cc b/modules/rtp_rtcp/source/rtcp_packet/pli.cc index 5b41aa5c2c..4d29ce4ce7 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/pli.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/pli.cc @@ -10,13 +10,15 @@ #include "modules/rtp_rtcp/source/rtcp_packet/pli.h" +#include +#include + #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { namespace rtcp { -constexpr uint8_t Pli::kFeedbackMessageType; // RFC 4585: Feedback format. // // Common packet format: diff --git a/modules/rtp_rtcp/source/rtcp_packet/pli.h b/modules/rtp_rtcp/source/rtcp_packet/pli.h index b9b9c45a9c..0ccb09f3e3 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/pli.h +++ b/modules/rtp_rtcp/source/rtcp_packet/pli.h @@ -10,6 +10,9 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_PLI_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_PLI_H_ +#include +#include + #include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" namespace webrtc { diff --git a/modules/rtp_rtcp/source/rtcp_packet/pli_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/pli_unittest.cc index c971e22bc1..64d799e78a 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/pli_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/pli_unittest.cc @@ -10,6 +10,9 @@ #include "modules/rtp_rtcp/source/rtcp_packet/pli.h" +#include + +#include "rtc_base/buffer.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -41,7 +44,7 @@ TEST(RtcpPacketPliTest, Create) { pli.SetSenderSsrc(kSenderSsrc); pli.SetMediaSsrc(kRemoteSsrc); - rtc::Buffer packet = pli.Build(); + Buffer packet = pli.Build(); EXPECT_THAT(make_tuple(packet.data(), packet.size()), ElementsAreArray(kPacket)); diff --git a/modules/rtp_rtcp/source/rtcp_packet/psfb.cc b/modules/rtp_rtcp/source/rtcp_packet/psfb.cc index 384d8ba811..10336385fc 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/psfb.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/psfb.cc @@ -10,13 +10,12 @@ #include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" +#include + #include "modules/rtp_rtcp/source/byte_io.h" namespace webrtc { namespace rtcp { -constexpr uint8_t Psfb::kPacketType; -constexpr uint8_t Psfb::kAfbMessageType; -constexpr size_t Psfb::kCommonFeedbackLength; // RFC 4585: Feedback format. // // Common packet format: diff --git a/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.cc b/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.cc index 8563c28373..c4c0946463 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.cc @@ -10,13 +10,15 @@ #include "modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h" +#include +#include + #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { namespace rtcp { -constexpr uint8_t RapidResyncRequest::kFeedbackMessageType; // RFC 4585: Feedback format. // Rapid Resynchronisation Request (draft-perkins-avt-rapid-rtp-sync-03). // diff --git a/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h b/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h index 1955b98f5c..b48e7ed0bd 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h +++ b/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h @@ -11,6 +11,9 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RAPID_RESYNC_REQUEST_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RAPID_RESYNC_REQUEST_H_ +#include +#include + #include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" namespace webrtc { diff --git a/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request_unittest.cc index d0e40fd83d..450b7cd075 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request_unittest.cc @@ -10,6 +10,9 @@ #include "modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h" +#include + +#include "rtc_base/buffer.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -41,7 +44,7 @@ TEST(RtcpPacketRapidResyncRequestTest, Create) { rrr.SetSenderSsrc(kSenderSsrc); rrr.SetMediaSsrc(kRemoteSsrc); - rtc::Buffer packet = rrr.Build(); + Buffer packet = rrr.Build(); EXPECT_THAT(make_tuple(packet.data(), packet.size()), ElementsAreArray(kPacket)); diff --git a/modules/rtp_rtcp/source/rtcp_packet/receiver_report.cc b/modules/rtp_rtcp/source/rtcp_packet/receiver_report.cc index 185011dff1..eb1c014a40 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/receiver_report.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/receiver_report.cc @@ -10,17 +10,19 @@ #include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" +#include +#include #include +#include #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { namespace rtcp { -constexpr uint8_t ReceiverReport::kPacketType; -constexpr size_t ReceiverReport::kMaxNumberOfReportBlocks; // RTCP receiver report (RFC 3550). // // 0 1 2 3 diff --git a/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h b/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h index b9c1c466c7..4ea4126ac1 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h +++ b/modules/rtp_rtcp/source/rtcp_packet/receiver_report.h @@ -50,7 +50,7 @@ class ReceiverReport : public RtcpPacket { PacketReadyCallback callback) const override; private: - static const size_t kRrBaseLength = 4; + static constexpr size_t kRrBaseLength = 4; std::vector report_blocks_; }; diff --git a/modules/rtp_rtcp/source/rtcp_packet/receiver_report_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/receiver_report_unittest.cc index 47f8eb13cb..98ff4d2989 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/receiver_report_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/receiver_report_unittest.cc @@ -10,8 +10,13 @@ #include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" +#include +#include #include +#include +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" +#include "rtc_base/buffer.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -59,7 +64,7 @@ TEST(RtcpPacketReceiverReportTest, ParseWithOneReportBlock) { } TEST(RtcpPacketReceiverReportTest, ParseFailsOnIncorrectSize) { - rtc::Buffer damaged_packet(kPacket); + Buffer damaged_packet(kPacket); damaged_packet[0]++; // Damage the packet: increase count field. ReceiverReport rr; EXPECT_FALSE(test::ParseSinglePacket(damaged_packet, &rr)); @@ -78,7 +83,7 @@ TEST(RtcpPacketReceiverReportTest, CreateWithOneReportBlock) { rb.SetDelayLastSr(kDelayLastSr); rr.AddReportBlock(rb); - rtc::Buffer raw = rr.Build(); + Buffer raw = rr.Build(); EXPECT_THAT(make_tuple(raw.data(), raw.size()), ElementsAreArray(kPacket)); } @@ -87,7 +92,7 @@ TEST(RtcpPacketReceiverReportTest, CreateAndParseWithoutReportBlocks) { ReceiverReport rr; rr.SetSenderSsrc(kSenderSsrc); - rtc::Buffer raw = rr.Build(); + Buffer raw = rr.Build(); ReceiverReport parsed; EXPECT_TRUE(test::ParseSinglePacket(raw, &parsed)); @@ -106,7 +111,7 @@ TEST(RtcpPacketReceiverReportTest, CreateAndParseWithTwoReportBlocks) { EXPECT_TRUE(rr.AddReportBlock(rb1)); EXPECT_TRUE(rr.AddReportBlock(rb2)); - rtc::Buffer raw = rr.Build(); + Buffer raw = rr.Build(); ReceiverReport parsed; EXPECT_TRUE(test::ParseSinglePacket(raw, &parsed)); diff --git a/modules/rtp_rtcp/source/rtcp_packet/remb.cc b/modules/rtp_rtcp/source/rtcp_packet/remb.cc index 1389ca7836..e178438a5a 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/remb.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/remb.cc @@ -10,11 +10,14 @@ #include "modules/rtp_rtcp/source/rtcp_packet/remb.h" +#include #include #include +#include #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" diff --git a/modules/rtp_rtcp/source/rtcp_packet/remb.h b/modules/rtp_rtcp/source/rtcp_packet/remb.h index b7075c0f23..c2a3d65957 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/remb.h +++ b/modules/rtp_rtcp/source/rtcp_packet/remb.h @@ -11,6 +11,8 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_REMB_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_REMB_H_ +#include +#include #include #include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" diff --git a/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc index c439d9c5f6..34fe54cceb 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/remb_unittest.cc @@ -10,6 +10,13 @@ #include "modules/rtp_rtcp/source/rtcp_packet/remb.h" +#include +#include +#include +#include +#include + +#include "rtc_base/buffer.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -39,7 +46,7 @@ TEST(RtcpPacketRembTest, Create) { std::vector(std::begin(kRemoteSsrcs), std::end(kRemoteSsrcs))); remb.SetBitrateBps(kBitrateBps); - rtc::Buffer packet = remb.Build(); + Buffer packet = remb.Build(); EXPECT_THAT(make_tuple(packet.data(), packet.size()), ElementsAreArray(kPacket)); @@ -59,7 +66,7 @@ TEST(RtcpPacketRembTest, CreateAndParseWithoutSsrcs) { Remb remb; remb.SetSenderSsrc(kSenderSsrc); remb.SetBitrateBps(kBitrateBps); - rtc::Buffer packet = remb.Build(); + Buffer packet = remb.Build(); Remb parsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &parsed)); @@ -71,7 +78,7 @@ TEST(RtcpPacketRembTest, CreateAndParseWithoutSsrcs) { TEST(RtcpPacketRembTest, CreateAndParse64bitBitrate) { Remb remb; remb.SetBitrateBps(kBitrateBps64bit); - rtc::Buffer packet = remb.Build(); + Buffer packet = remb.Build(); Remb parsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &parsed)); diff --git a/modules/rtp_rtcp/source/rtcp_packet/remote_estimate.cc b/modules/rtp_rtcp/source/rtcp_packet/remote_estimate.cc index ca59791248..30c02cf319 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/remote_estimate.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/remote_estimate.cc @@ -9,14 +9,19 @@ */ #include "modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h" -#include -#include -#include +#include +#include +#include #include #include +#include "api/array_view.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" #include "modules/rtp_rtcp/source/byte_io.h" -#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/app.h" +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { @@ -79,10 +84,10 @@ class RemoteEstimateSerializerImpl : public RemoteEstimateSerializer { explicit RemoteEstimateSerializerImpl(std::vector fields) : fields_(fields) {} - rtc::Buffer Serialize(const NetworkStateEstimate& src) const override { + Buffer Serialize(const NetworkStateEstimate& src) const override { size_t max_size = fields_.size() * kFieldSize; size_t size = 0; - rtc::Buffer buf(max_size); + Buffer buf(max_size); for (const auto& field : fields_) { if (field.Write(src, buf.data() + size)) { size += kFieldSize; @@ -92,7 +97,7 @@ class RemoteEstimateSerializerImpl : public RemoteEstimateSerializer { return buf; } - bool Parse(rtc::ArrayView src, + bool Parse(ArrayView src, NetworkStateEstimate* target) const override { if (src.size() % kFieldSize != 0) return false; diff --git a/modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h b/modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h index 3400274568..4c8c29405f 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h +++ b/modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h @@ -10,11 +10,13 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_REMOTE_ESTIMATE_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_REMOTE_ESTIMATE_H_ -#include -#include +#include +#include "api/array_view.h" #include "api/transport/network_types.h" +#include "api/units/time_delta.h" #include "modules/rtp_rtcp/source/rtcp_packet/app.h" +#include "rtc_base/buffer.h" namespace webrtc { namespace rtcp { @@ -22,9 +24,9 @@ namespace rtcp { class CommonHeader; class RemoteEstimateSerializer { public: - virtual bool Parse(rtc::ArrayView src, + virtual bool Parse(ArrayView src, NetworkStateEstimate* target) const = 0; - virtual rtc::Buffer Serialize(const NetworkStateEstimate& src) const = 0; + virtual Buffer Serialize(const NetworkStateEstimate& src) const = 0; virtual ~RemoteEstimateSerializer() = default; }; diff --git a/modules/rtp_rtcp/source/rtcp_packet/remote_estimate_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/remote_estimate_unittest.cc index bf0e0e2610..52b57b4412 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/remote_estimate_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/remote_estimate_unittest.cc @@ -9,6 +9,9 @@ */ #include "modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "rtc_base/buffer.h" #include "test/gtest.h" namespace webrtc { @@ -17,7 +20,7 @@ TEST(RemoteEstimateTest, EncodesCapacityBounds) { NetworkStateEstimate src; src.link_capacity_lower = DataRate::KilobitsPerSec(10); src.link_capacity_upper = DataRate::KilobitsPerSec(1000000); - rtc::Buffer data = GetRemoteEstimateSerializer()->Serialize(src); + Buffer data = GetRemoteEstimateSerializer()->Serialize(src); NetworkStateEstimate dst; EXPECT_TRUE(GetRemoteEstimateSerializer()->Parse(data, &dst)); EXPECT_EQ(src.link_capacity_lower, dst.link_capacity_lower); @@ -30,7 +33,7 @@ TEST(RemoteEstimateTest, ExpandsToPlusInfinity) { // with kbps resolution. We expected it be represented as plus infinity. src.link_capacity_lower = DataRate::KilobitsPerSec(2 << 24); src.link_capacity_upper = DataRate::PlusInfinity(); - rtc::Buffer data = GetRemoteEstimateSerializer()->Serialize(src); + Buffer data = GetRemoteEstimateSerializer()->Serialize(src); NetworkStateEstimate dst; EXPECT_TRUE(GetRemoteEstimateSerializer()->Parse(data, &dst)); @@ -42,7 +45,7 @@ TEST(RemoteEstimateTest, DoesNotEncodeNegative) { NetworkStateEstimate src; src.link_capacity_lower = DataRate::MinusInfinity(); src.link_capacity_upper = DataRate::MinusInfinity(); - rtc::Buffer data = GetRemoteEstimateSerializer()->Serialize(src); + Buffer data = GetRemoteEstimateSerializer()->Serialize(src); // Since MinusInfinity can't be represented, the buffer should be empty. EXPECT_EQ(data.size(), 0u); NetworkStateEstimate dst; diff --git a/modules/rtp_rtcp/source/rtcp_packet/report_block.cc b/modules/rtp_rtcp/source/rtcp_packet/report_block.cc index e7e92d2bf1..35676cb17d 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/report_block.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/report_block.cc @@ -10,6 +10,9 @@ #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" +#include +#include + #include "modules/rtp_rtcp/source/byte_io.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" diff --git a/modules/rtp_rtcp/source/rtcp_packet/report_block.h b/modules/rtp_rtcp/source/rtcp_packet/report_block.h index b49219eceb..9c94411f6b 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/report_block.h +++ b/modules/rtp_rtcp/source/rtcp_packet/report_block.h @@ -22,7 +22,7 @@ namespace rtcp { // RFC 3550 section 6.4.1. class ReportBlock { public: - static const size_t kLength = 24; + static constexpr size_t kLength = 24; ReportBlock(); ~ReportBlock() {} diff --git a/modules/rtp_rtcp/source/rtcp_packet/report_block_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/report_block_unittest.cc index 11031a059a..68399f0c0c 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/report_block_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/report_block_unittest.cc @@ -10,7 +10,9 @@ #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" -#include +#include +#include +#include #include "rtc_base/random.h" #include "test/gtest.h" diff --git a/modules/rtp_rtcp/source/rtcp_packet/rrtr.cc b/modules/rtp_rtcp/source/rtcp_packet/rrtr.cc index 95fc890b19..7933ec931c 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/rrtr.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/rrtr.cc @@ -10,6 +10,8 @@ #include "modules/rtp_rtcp/source/rtcp_packet/rrtr.h" +#include + #include "modules/rtp_rtcp/source/byte_io.h" #include "rtc_base/checks.h" diff --git a/modules/rtp_rtcp/source/rtcp_packet/rrtr.h b/modules/rtp_rtcp/source/rtcp_packet/rrtr.h index 827bd74399..47ac35c200 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/rrtr.h +++ b/modules/rtp_rtcp/source/rtcp_packet/rrtr.h @@ -22,9 +22,9 @@ namespace rtcp { class Rrtr { public: - static const uint8_t kBlockType = 4; - static const uint16_t kBlockLength = 2; - static const size_t kLength = 4 * (kBlockLength + 1); // 12 + static constexpr uint8_t kBlockType = 4; + static constexpr uint16_t kBlockLength = 2; + static constexpr size_t kLength = 4 * (kBlockLength + 1); // 12 Rrtr() {} Rrtr(const Rrtr&) = default; diff --git a/modules/rtp_rtcp/source/rtcp_packet/rrtr_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/rrtr_unittest.cc index 56622ea81a..ce8640c408 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/rrtr_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/rrtr_unittest.cc @@ -10,6 +10,11 @@ #include "modules/rtp_rtcp/source/rtcp_packet/rrtr.h" +#include +#include +#include + +#include "system_wrappers/include/ntp_time.h" #include "test/gtest.h" using webrtc::rtcp::Rrtr; diff --git a/modules/rtp_rtcp/source/rtcp_packet/rtpfb.cc b/modules/rtp_rtcp/source/rtcp_packet/rtpfb.cc index 18097de330..83057158a9 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/rtpfb.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/rtpfb.cc @@ -10,11 +10,12 @@ #include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" +#include + #include "modules/rtp_rtcp/source/byte_io.h" namespace webrtc { namespace rtcp { -constexpr uint8_t Rtpfb::kPacketType; // RFC 4585, Section 6.1: Feedback format. // // Common packet format: diff --git a/modules/rtp_rtcp/source/rtcp_packet/sdes.cc b/modules/rtp_rtcp/source/rtcp_packet/sdes.cc index f244ec5f37..d58ff62f51 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/sdes.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/sdes.cc @@ -12,18 +12,20 @@ #include +#include +#include #include +#include #include "absl/strings/string_view.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { namespace rtcp { -constexpr uint8_t Sdes::kPacketType; -constexpr size_t Sdes::kMaxNumberOfChunks; // Source Description (SDES) (RFC 3550). // // 0 1 2 3 diff --git a/modules/rtp_rtcp/source/rtcp_packet/sdes.h b/modules/rtp_rtcp/source/rtcp_packet/sdes.h index 36b63ba29f..af6c43588d 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/sdes.h +++ b/modules/rtp_rtcp/source/rtcp_packet/sdes.h @@ -11,6 +11,8 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_SDES_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_SDES_H_ +#include +#include #include #include diff --git a/modules/rtp_rtcp/source/rtcp_packet/sdes_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/sdes_unittest.cc index 15a39efe87..df1a7b441d 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/sdes_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/sdes_unittest.cc @@ -10,6 +10,11 @@ #include "modules/rtp_rtcp/source/rtcp_packet/sdes.h" +#include +#include +#include + +#include "rtc_base/buffer.h" #include "rtc_base/strings/string_builder.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -29,7 +34,7 @@ const uint8_t kEmailTag = 3; TEST(RtcpPacketSdesTest, CreateAndParseWithoutChunks) { Sdes sdes; - rtc::Buffer packet = sdes.Build(); + Buffer packet = sdes.Build(); Sdes parsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &parsed)); @@ -42,7 +47,7 @@ TEST(RtcpPacketSdesTest, CreateAndParseWithOneChunk) { Sdes sdes; EXPECT_TRUE(sdes.AddCName(kSenderSsrc, kCname)); - rtc::Buffer packet = sdes.Build(); + Buffer packet = sdes.Build(); Sdes sdes_parsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &sdes_parsed)); const Sdes& parsed = sdes_parsed; // Ensure accessors are const. @@ -61,7 +66,7 @@ TEST(RtcpPacketSdesTest, CreateAndParseWithMultipleChunks) { EXPECT_TRUE(sdes.AddCName(kSenderSsrc + 4, "abcde")); EXPECT_TRUE(sdes.AddCName(kSenderSsrc + 5, "abcdef")); - rtc::Buffer packet = sdes.Build(); + Buffer packet = sdes.Build(); Sdes parsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &parsed)); @@ -75,7 +80,7 @@ TEST(RtcpPacketSdesTest, CreateWithTooManyChunks) { Sdes sdes; for (size_t i = 0; i < kMaxChunks; ++i) { uint32_t ssrc = kSenderSsrc + i; - rtc::StringBuilder oss; + StringBuilder oss; oss << "cname" << i; EXPECT_TRUE(sdes.AddCName(ssrc, oss.str())); } @@ -86,7 +91,7 @@ TEST(RtcpPacketSdesTest, CreateAndParseCnameItemWithEmptyString) { Sdes sdes; EXPECT_TRUE(sdes.AddCName(kSenderSsrc, "")); - rtc::Buffer packet = sdes.Build(); + Buffer packet = sdes.Build(); Sdes parsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &parsed)); @@ -221,7 +226,7 @@ TEST(RtcpPacketSdesTest, ParsedSdesCanBeReusedForBuilding) { const std::string kBob = "bob@host"; source.AddCName(kSenderSsrc, kAlice); - rtc::Buffer packet1 = source.Build(); + Buffer packet1 = source.Build(); Sdes middle; test::ParseSinglePacket(packet1, &middle); @@ -229,7 +234,7 @@ TEST(RtcpPacketSdesTest, ParsedSdesCanBeReusedForBuilding) { middle.AddCName(kSenderSsrc + 1, kBob); - rtc::Buffer packet2 = middle.Build(); + Buffer packet2 = middle.Build(); Sdes destination; test::ParseSinglePacket(packet2, &destination); diff --git a/modules/rtp_rtcp/source/rtcp_packet/sender_report.cc b/modules/rtp_rtcp/source/rtcp_packet/sender_report.cc index 73738376c3..77ed87fb3e 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/sender_report.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/sender_report.cc @@ -10,18 +10,19 @@ #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" +#include +#include #include +#include #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { namespace rtcp { -constexpr uint8_t SenderReport::kPacketType; -constexpr size_t SenderReport::kMaxNumberOfReportBlocks; -constexpr size_t SenderReport::kSenderBaseLength; // Sender report (SR) (RFC 3550). // 0 1 2 3 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 diff --git a/modules/rtp_rtcp/source/rtcp_packet/sender_report.h b/modules/rtp_rtcp/source/rtcp_packet/sender_report.h index 66ced31721..55ace6e124 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/sender_report.h +++ b/modules/rtp_rtcp/source/rtcp_packet/sender_report.h @@ -11,6 +11,8 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_SENDER_REPORT_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_SENDER_REPORT_H_ +#include +#include #include #include "modules/rtp_rtcp/source/rtcp_packet.h" diff --git a/modules/rtp_rtcp/source/rtcp_packet/sender_report_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/sender_report_unittest.cc index 37f268e6b4..f7d2478220 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/sender_report_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/sender_report_unittest.cc @@ -10,8 +10,14 @@ #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" +#include +#include #include +#include +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" +#include "rtc_base/buffer.h" +#include "system_wrappers/include/ntp_time.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -43,7 +49,7 @@ TEST(RtcpPacketSenderReportTest, CreateWithoutReportBlocks) { sr.SetPacketCount(kPacketCount); sr.SetOctetCount(kOctetCount); - rtc::Buffer raw = sr.Build(); + Buffer raw = sr.Build(); EXPECT_THAT(make_tuple(raw.data(), raw.size()), ElementsAreArray(kPacket)); } @@ -67,7 +73,7 @@ TEST(RtcpPacketSenderReportTest, CreateAndParseWithOneReportBlock) { sr.SetSenderSsrc(kSenderSsrc); EXPECT_TRUE(sr.AddReportBlock(rb)); - rtc::Buffer raw = sr.Build(); + Buffer raw = sr.Build(); SenderReport parsed; EXPECT_TRUE(test::ParseSinglePacket(raw, &parsed)); @@ -87,7 +93,7 @@ TEST(RtcpPacketSenderReportTest, CreateAndParseWithTwoReportBlocks) { EXPECT_TRUE(sr.AddReportBlock(rb1)); EXPECT_TRUE(sr.AddReportBlock(rb2)); - rtc::Buffer raw = sr.Build(); + Buffer raw = sr.Build(); SenderReport parsed; EXPECT_TRUE(test::ParseSinglePacket(raw, &parsed)); diff --git a/modules/rtp_rtcp/source/rtcp_packet/target_bitrate.cc b/modules/rtp_rtcp/source/rtcp_packet/target_bitrate.cc index 601b24fe94..17d86a7f5f 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/target_bitrate.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/target_bitrate.cc @@ -10,15 +10,21 @@ #include "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h" +#include +#include +#include + #include "modules/rtp_rtcp/source/byte_io.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" namespace webrtc { namespace rtcp { +namespace { + constexpr size_t kTargetBitrateHeaderSizeBytes = 4; -constexpr uint8_t TargetBitrate::kBlockType; -const size_t TargetBitrate::kBitrateItemSizeBytes = 4; + +} // namespace TargetBitrate::BitrateItem::BitrateItem() : spatial_layer(0), temporal_layer(0), target_bitrate_kbps(0) {} @@ -111,7 +117,7 @@ void TargetBitrate::Create(uint8_t* buffer) const { buffer[0] = kBlockType; buffer[1] = 0; // Reserved. uint16_t block_length_words = - rtc::dchecked_cast((BlockLength() / 4) - 1); + dchecked_cast((BlockLength() / 4) - 1); ByteWriter::WriteBigEndian(&buffer[2], block_length_words); size_t index = kTargetBitrateHeaderSizeBytes; diff --git a/modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h b/modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h index 07e5da1a49..5e0f1ca391 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h +++ b/modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h @@ -24,7 +24,7 @@ class TargetBitrate { // TODO(sprang): This block type is just a place holder. We need to get an // id assigned by IANA. static constexpr uint8_t kBlockType = 42; - static const size_t kBitrateItemSizeBytes; + static constexpr size_t kBitrateItemSizeBytes = 4; struct BitrateItem { BitrateItem(); diff --git a/modules/rtp_rtcp/source/rtcp_packet/target_bitrate_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/target_bitrate_unittest.cc index b16bb5beaa..dcaa85ed69 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/target_bitrate_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/target_bitrate_unittest.cc @@ -10,9 +10,13 @@ #include "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h" -#include "modules/rtp_rtcp/source/byte_io.h" +#include +#include +#include +#include +#include + #include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" -#include "rtc_base/buffer.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -67,7 +71,7 @@ TEST(TargetBitrateTest, FullPacket) { rtcp::ExtendedReports xr; EXPECT_TRUE(ParseSinglePacket(kRtcpPacket, &xr)); EXPECT_EQ(kSsrc, xr.sender_ssrc()); - const absl::optional& target_bitrate = xr.target_bitrate(); + const std::optional& target_bitrate = xr.target_bitrate(); ASSERT_TRUE(static_cast(target_bitrate)); CheckBitrateItems(target_bitrate->GetTargetBitrates()); } diff --git a/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.cc b/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.cc index 810e1e267a..2eba31ef7d 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.cc @@ -10,6 +10,8 @@ #include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" +#include + #include "modules/rtp_rtcp/source/byte_io.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" diff --git a/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h b/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h index dc5d1b2c2d..6156ffa8e7 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h +++ b/modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h @@ -21,7 +21,7 @@ namespace rtcp { // Used both by TMMBR and TMMBN rtcp packets. class TmmbItem { public: - static const size_t kLength = 8; + static constexpr size_t kLength = 8; TmmbItem() : ssrc_(0), bitrate_bps_(0), packet_overhead_(0) {} TmmbItem(uint32_t ssrc, uint64_t bitrate_bps, uint16_t overhead); diff --git a/modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc b/modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc index f57e5749c2..24aa3a3384 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc @@ -10,13 +10,17 @@ #include "modules/rtp_rtcp/source/rtcp_packet/tmmbn.h" +#include +#include + #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" +#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { namespace rtcp { -constexpr uint8_t Tmmbn::kFeedbackMessageType; // RFC 4585: Feedback format. // Common packet format: // diff --git a/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h b/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h index ff7779d8ac..6f9904af36 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h +++ b/modules/rtp_rtcp/source/rtcp_packet/tmmbn.h @@ -11,6 +11,8 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TMMBN_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TMMBN_H_ +#include +#include #include #include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" diff --git a/modules/rtp_rtcp/source/rtcp_packet/tmmbn_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/tmmbn_unittest.cc index 3a37bb1c0e..ce1544c263 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/tmmbn_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/tmmbn_unittest.cc @@ -10,6 +10,10 @@ #include "modules/rtp_rtcp/source/rtcp_packet/tmmbn.h" +#include + +#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" +#include "rtc_base/buffer.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -36,7 +40,7 @@ TEST(RtcpPacketTmmbnTest, Create) { tmmbn.SetSenderSsrc(kSenderSsrc); tmmbn.AddTmmbr(TmmbItem(kRemoteSsrc, kBitrateBps, kOverhead)); - rtc::Buffer packet = tmmbn.Build(); + Buffer packet = tmmbn.Build(); EXPECT_THAT(make_tuple(packet.data(), packet.size()), ElementsAreArray(kPacket)); @@ -59,7 +63,7 @@ TEST(RtcpPacketTmmbnTest, CreateAndParseWithoutItems) { Tmmbn tmmbn; tmmbn.SetSenderSsrc(kSenderSsrc); - rtc::Buffer packet = tmmbn.Build(); + Buffer packet = tmmbn.Build(); Tmmbn parsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &parsed)); @@ -73,7 +77,7 @@ TEST(RtcpPacketTmmbnTest, CreateAndParseWithTwoItems) { tmmbn.AddTmmbr(TmmbItem(kRemoteSsrc, kBitrateBps, kOverhead)); tmmbn.AddTmmbr(TmmbItem(kRemoteSsrc + 1, 4 * kBitrateBps, 40)); - rtc::Buffer packet = tmmbn.Build(); + Buffer packet = tmmbn.Build(); Tmmbn parsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &parsed)); diff --git a/modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc b/modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc index 9dc745e509..9f2ad5cecf 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc @@ -10,13 +10,17 @@ #include "modules/rtp_rtcp/source/rtcp_packet/tmmbr.h" +#include +#include + #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" +#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { namespace rtcp { -constexpr uint8_t Tmmbr::kFeedbackMessageType; // RFC 4585: Feedback format. // Common packet format: // diff --git a/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h b/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h index 7482cb75cc..c3d3056138 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h +++ b/modules/rtp_rtcp/source/rtcp_packet/tmmbr.h @@ -11,6 +11,8 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TMMBR_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TMMBR_H_ +#include +#include #include #include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" diff --git a/modules/rtp_rtcp/source/rtcp_packet/tmmbr_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/tmmbr_unittest.cc index 1bac808aa9..4f45577e86 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/tmmbr_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/tmmbr_unittest.cc @@ -10,6 +10,10 @@ #include "modules/rtp_rtcp/source/rtcp_packet/tmmbr.h" +#include + +#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" +#include "rtc_base/buffer.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -35,7 +39,7 @@ TEST(RtcpPacketTmmbrTest, Create) { tmmbr.SetSenderSsrc(kSenderSsrc); tmmbr.AddTmmbr(TmmbItem(kRemoteSsrc, kBitrateBps, kOverhead)); - rtc::Buffer packet = tmmbr.Build(); + Buffer packet = tmmbr.Build(); EXPECT_THAT(make_tuple(packet.data(), packet.size()), ElementsAreArray(kPacket)); @@ -59,7 +63,7 @@ TEST(RtcpPacketTmmbrTest, CreateAndParseWithTwoEntries) { tmmbr.AddTmmbr(TmmbItem(kRemoteSsrc, kBitrateBps, kOverhead)); tmmbr.AddTmmbr(TmmbItem(kRemoteSsrc + 1, 4 * kBitrateBps, kOverhead + 1)); - rtc::Buffer packet = tmmbr.Build(); + Buffer packet = tmmbr.Build(); Tmmbr parsed; EXPECT_TRUE(test::ParseSinglePacket(packet, &parsed)); diff --git a/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc b/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc index 003effad29..e8c94119f1 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc @@ -11,13 +11,19 @@ #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" #include +#include #include -#include +#include #include +#include #include "absl/algorithm/container.h" +#include "api/function_view.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -73,13 +79,6 @@ constexpr TimeDelta kTimeWrapPeriod = kBaseTimeTick * (1 << 24); // | recv delta | recv delta | zero padding | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ } // namespace -constexpr uint8_t TransportFeedback::kFeedbackMessageType; -constexpr size_t TransportFeedback::kMaxReportedPackets; - -constexpr size_t TransportFeedback::LastChunk::kMaxRunLengthCapacity; -constexpr size_t TransportFeedback::LastChunk::kMaxOneBitCapacity; -constexpr size_t TransportFeedback::LastChunk::kMaxTwoBitCapacity; -constexpr size_t TransportFeedback::LastChunk::kMaxVectorCapacity; TransportFeedback::LastChunk::LastChunk() { Clear(); @@ -373,7 +372,7 @@ TransportFeedback::GetReceivedPackets() const { } void TransportFeedback::ForAllPackets( - rtc::FunctionView handler) const { + FunctionView handler) const { TimeDelta delta_since_base = TimeDelta::Zero(); auto received_it = received_packets_.begin(); const uint16_t last_seq_num = base_seq_no_ + num_seq_no_; @@ -582,9 +581,8 @@ bool TransportFeedback::IsConsistent() const { return false; } if (timestamp != last_timestamp_) { - RTC_LOG(LS_ERROR) << "Last timestamp mismatch. Calculated: " - << ToLogString(timestamp) - << ". Saved: " << ToLogString(last_timestamp_); + RTC_LOG(LS_ERROR) << "Last timestamp mismatch. Calculated: " << timestamp + << ". Saved: " << last_timestamp_; return false; } if (size_bytes_ != packet_size) { diff --git a/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h b/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h index 4d17b54c3a..6132a7bd5a 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h +++ b/modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h @@ -12,10 +12,11 @@ #define MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_TRANSPORT_FEEDBACK_H_ #include +#include +#include #include #include -#include "absl/base/attributes.h" #include "api/function_view.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -72,8 +73,8 @@ class TransportFeedback : public Rtpfb { // `BaseTime()`. For missed packets calls `handler` with `delta_since_base = // PlusInfinity()`. void ForAllPackets( - rtc::FunctionView handler) const; + FunctionView + handler) const; uint16_t GetBaseSequence() const; diff --git a/modules/rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc index 4248a4d3ee..91a9450aec 100644 --- a/modules/rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet/transport_feedback_unittest.cc @@ -10,15 +10,22 @@ #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" +#include +#include +#include #include #include +#include #include +#include #include "api/array_view.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" #include "test/gmock.h" #include "test/gtest.h" @@ -58,13 +65,13 @@ MATCHER(IsValidFeedback, "") { feedback.Parse(rtcp_header); } -TransportFeedback Parse(rtc::ArrayView buffer) { +TransportFeedback Parse(ArrayView buffer) { rtcp::CommonHeader header; - RTC_DCHECK(header.Parse(buffer.data(), buffer.size())); - RTC_DCHECK_EQ(header.type(), TransportFeedback::kPacketType); - RTC_DCHECK_EQ(header.fmt(), TransportFeedback::kFeedbackMessageType); + EXPECT_TRUE(header.Parse(buffer.data(), buffer.size())); + EXPECT_EQ(header.type(), TransportFeedback::kPacketType); + EXPECT_EQ(header.fmt(), TransportFeedback::kFeedbackMessageType); TransportFeedback feedback; - RTC_DCHECK(feedback.Parse(header)); + EXPECT_TRUE(feedback.Parse(header)); return feedback; } @@ -82,14 +89,14 @@ class FeedbackTester { void WithDefaultDelta(TimeDelta delta) { default_delta_ = delta; } - void WithInput(rtc::ArrayView received_seq, - rtc::ArrayView received_ts = {}) { + void WithInput(ArrayView received_seq, + ArrayView received_ts = {}) { std::vector temp_timestamps; if (received_ts.empty()) { temp_timestamps = GenerateReceiveTimestamps(received_seq); received_ts = temp_timestamps; } - RTC_DCHECK_EQ(received_seq.size(), received_ts.size()); + ASSERT_EQ(received_seq.size(), received_ts.size()); expected_deltas_.clear(); feedback_.emplace(include_timestamps_); @@ -149,8 +156,8 @@ class FeedbackTester { } std::vector GenerateReceiveTimestamps( - rtc::ArrayView seq_nums) { - RTC_DCHECK(!seq_nums.empty()); + ArrayView seq_nums) { + RTC_CHECK(!seq_nums.empty()); uint16_t last_seq = seq_nums[0]; Timestamp time = Timestamp::Zero(); std::vector result; @@ -169,8 +176,8 @@ class FeedbackTester { std::vector expected_deltas_; size_t expected_size_; TimeDelta default_delta_; - absl::optional feedback_; - rtc::Buffer serialized_; + std::optional feedback_; + Buffer serialized_; bool include_timestamps_; }; @@ -519,7 +526,7 @@ TEST(RtcpPacketTest, TransportFeedbackPadding) { feedback.SetBase(0, Timestamp::Zero()); EXPECT_TRUE(feedback.AddReceivedPacket(0, Timestamp::Zero())); - rtc::Buffer packet = feedback.Build(); + Buffer packet = feedback.Build(); EXPECT_EQ(kExpectedSizeWords * 4, packet.size()); ASSERT_GT(kExpectedSizeWords * 4, kExpectedSizeBytes); for (size_t i = kExpectedSizeBytes; i < (kExpectedSizeWords * 4 - 1); ++i) @@ -558,7 +565,7 @@ TEST(RtcpPacketTest, TransportFeedbackPaddingBackwardsCompatibility) { feedback.SetBase(0, Timestamp::Zero()); EXPECT_TRUE(feedback.AddReceivedPacket(0, Timestamp::Zero())); - rtc::Buffer packet = feedback.Build(); + Buffer packet = feedback.Build(); EXPECT_EQ(kExpectedSizeWords * 4, packet.size()); ASSERT_GT(kExpectedSizeWords * 4, kExpectedSizeBytes); for (size_t i = kExpectedSizeBytes; i < (kExpectedSizeWords * 4 - 1); ++i) diff --git a/modules/rtp_rtcp/source/rtcp_packet_unittest.cc b/modules/rtp_rtcp/source/rtcp_packet_unittest.cc index dccd1354a9..2a87759629 100644 --- a/modules/rtp_rtcp/source/rtcp_packet_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_packet_unittest.cc @@ -10,7 +10,12 @@ #include "modules/rtp_rtcp/source/rtcp_packet.h" +#include +#include + +#include "api/array_view.h" #include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "test/gmock.h" #include "test/gtest.h" @@ -33,7 +38,7 @@ TEST(RtcpPacketTest, BuildWithTooSmallBuffer) { const size_t kReportBlockLength = 24; // No packet. - MockFunction)> callback; + MockFunction)> callback; EXPECT_CALL(callback, Call(_)).Times(0); const size_t kBufferSize = kRrLength + kReportBlockLength - 1; EXPECT_FALSE(rr.Build(kBufferSize, callback.AsStdFunction())); diff --git a/modules/rtp_rtcp/source/rtcp_receiver.cc b/modules/rtp_rtcp/source/rtcp_receiver.cc index ba5b951f4d..cf9975e764 100644 --- a/modules/rtp_rtcp/source/rtcp_receiver.cc +++ b/modules/rtp_rtcp/source/rtcp_receiver.cc @@ -13,41 +13,62 @@ #include #include +#include +#include +#include #include #include #include +#include #include #include +#include "absl/algorithm/container.h" +#include "absl/base/attributes.h" +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/sequence_checker.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_constants.h" +#include "modules/rtp_rtcp/include/report_block_data.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" +#include "modules/rtp_rtcp/source/rtcp_packet/app.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" -#include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" +#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" #include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" #include "modules/rtp_rtcp/source/rtcp_packet/fir.h" #include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" #include "modules/rtp_rtcp/source/rtcp_packet/nack.h" #include "modules/rtp_rtcp/source/rtcp_packet/pli.h" +#include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" #include "modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h" #include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" #include "modules/rtp_rtcp/source/rtcp_packet/remb.h" #include "modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h" +#include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" #include "modules/rtp_rtcp/source/rtcp_packet/sdes.h" #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmbn.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmbr.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" -#include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" -#include "modules/rtp_rtcp/source/time_util.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/tmmbr_help.h" #include "rtc_base/checks.h" +#include "rtc_base/containers/flat_map.h" #include "rtc_base/logging.h" -#include "rtc_base/time_utils.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/trace_event.h" +#include "system_wrappers/include/clock.h" #include "system_wrappers/include/ntp_time.h" namespace webrtc { @@ -86,9 +107,6 @@ bool ResetTimestampIfExpired(const Timestamp now, } // namespace -constexpr size_t RTCPReceiver::RegisteredSsrcs::kMediaSsrcIndex; -constexpr size_t RTCPReceiver::RegisteredSsrcs::kMaxSsrcs; - RTCPReceiver::RegisteredSsrcs::RegisteredSsrcs( bool disable_sequence_checker, const RtpRtcpInterface::Configuration& config) @@ -99,13 +117,13 @@ RTCPReceiver::RegisteredSsrcs::RegisteredSsrcs( ssrcs_.push_back(*config.rtx_send_ssrc); } if (config.fec_generator) { - absl::optional flexfec_ssrc = config.fec_generator->FecSsrc(); + std::optional flexfec_ssrc = config.fec_generator->FecSsrc(); if (flexfec_ssrc) { ssrcs_.push_back(*flexfec_ssrc); } } // Ensure that the RegisteredSsrcs can inline the SSRCs. - RTC_DCHECK_LE(ssrcs_.size(), RTCPReceiver::RegisteredSsrcs::kMaxSsrcs); + RTC_DCHECK_LE(ssrcs_.size(), kMaxSimulcastStreams); } bool RTCPReceiver::RegisteredSsrcs::contains(uint32_t ssrc) const { @@ -129,18 +147,22 @@ struct RTCPReceiver::PacketInformation { uint32_t remote_ssrc = 0; std::vector nack_sequence_numbers; std::vector report_block_datas; - absl::optional rtt; + std::optional rtt; uint32_t receiver_estimated_max_bitrate_bps = 0; std::unique_ptr transport_feedback; - absl::optional target_bitrate_allocation; - absl::optional network_state_estimate; + std::optional congestion_control_feedback; + std::optional target_bitrate_allocation; + std::optional network_state_estimate; std::unique_ptr loss_notification; }; -RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, +RTCPReceiver::RTCPReceiver(const Environment& env, + const RtpRtcpInterface::Configuration& config, ModuleRtpRtcpImpl2* owner) - : clock_(config.clock), + : env_(env), receiver_only_(config.receiver_only), + enable_congestion_controller_feedback_(env_.field_trials().IsEnabled( + "WebRTC-RFC8888CongestionControlFeedback")), rtp_rtcp_(owner), registered_ssrcs_(false, config), network_link_rtcp_observer_(config.network_link_rtcp_observer), @@ -160,14 +182,17 @@ RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, report_block_data_observer_(config.report_block_data_observer), packet_type_counter_observer_(config.rtcp_packet_type_counter_observer), num_skipped_packets_(0), - last_skipped_packets_warning_(clock_->CurrentTime()) { + last_skipped_packets_warning_(env_.clock().CurrentTime()) { RTC_DCHECK(owner); } -RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, +RTCPReceiver::RTCPReceiver(const Environment& env, + const RtpRtcpInterface::Configuration& config, ModuleRtpRtcp* owner) - : clock_(config.clock), + : env_(env), receiver_only_(config.receiver_only), + enable_congestion_controller_feedback_(env_.field_trials().IsEnabled( + "WebRTC-RFC8888CongestionControlFeedback")), rtp_rtcp_(owner), registered_ssrcs_(true, config), network_link_rtcp_observer_(config.network_link_rtcp_observer), @@ -187,7 +212,7 @@ RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, report_block_data_observer_(config.report_block_data_observer), packet_type_counter_observer_(config.rtcp_packet_type_counter_observer), num_skipped_packets_(0), - last_skipped_packets_warning_(clock_->CurrentTime()) { + last_skipped_packets_warning_(env_.clock().CurrentTime()) { RTC_DCHECK(owner); // Dear reader - if you're here because of this log statement and are // wondering what this is about, chances are that you are using an instance @@ -206,7 +231,7 @@ RTCPReceiver::RTCPReceiver(const RtpRtcpInterface::Configuration& config, RTCPReceiver::~RTCPReceiver() {} -void RTCPReceiver::IncomingPacket(rtc::ArrayView packet) { +void RTCPReceiver::IncomingPacket(ArrayView packet) { if (packet.empty()) { RTC_LOG(LS_WARNING) << "Incoming empty RTCP packet"; return; @@ -228,7 +253,7 @@ int64_t RTCPReceiver::LastReceivedReportBlockMs() const { void RTCPReceiver::SetRemoteSSRC(uint32_t ssrc) { MutexLock lock(&rtcp_receiver_lock_); // New SSRC reset old reports. - remote_sender_.last_arrival_timestamp.Reset(); + remote_sender_.last_arrival_ntp_timestamp.Reset(); remote_ssrc_ = ssrc; } @@ -251,20 +276,20 @@ void RTCPReceiver::RttStats::AddRtt(TimeDelta rtt) { ++num_rtts_; } -absl::optional RTCPReceiver::AverageRtt() const { +std::optional RTCPReceiver::AverageRtt() const { MutexLock lock(&rtcp_receiver_lock_); auto it = rtts_.find(remote_ssrc_); if (it == rtts_.end()) { - return absl::nullopt; + return std::nullopt; } return it->second.average_rtt(); } -absl::optional RTCPReceiver::LastRtt() const { +std::optional RTCPReceiver::LastRtt() const { MutexLock lock(&rtcp_receiver_lock_); auto it = rtts_.find(remote_ssrc_); if (it == rtts_.end()) { - return absl::nullopt; + return std::nullopt; } return it->second.last_rtt(); } @@ -283,19 +308,18 @@ void RTCPReceiver::SetNonSenderRttMeasurement(bool enabled) { xr_rrtr_status_ = enabled; } -absl::optional RTCPReceiver::GetAndResetXrRrRtt() { +std::optional RTCPReceiver::GetAndResetXrRrRtt() { MutexLock lock(&rtcp_receiver_lock_); - absl::optional rtt = xr_rr_rtt_; - xr_rr_rtt_ = absl::nullopt; + std::optional rtt = xr_rr_rtt_; + xr_rr_rtt_ = std::nullopt; return rtt; } // Called regularly (1/sec) on the worker thread to do rtt calculations. -absl::optional RTCPReceiver::OnPeriodicRttUpdate( - Timestamp newer_than, - bool sending) { +std::optional RTCPReceiver::OnPeriodicRttUpdate(Timestamp newer_than, + bool sending) { // Running on the worker thread (same as construction thread). - absl::optional rtt; + std::optional rtt; if (sending) { // Check if we've received a report block within the last kRttUpdateInterval @@ -314,7 +338,7 @@ absl::optional RTCPReceiver::OnPeriodicRttUpdate( } // Check for expired timers and if so, log and reset. - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); if (RtcpRrTimeoutLocked(now)) { RTC_LOG_F(LS_WARNING) << "Timeout: No RTCP RR received."; } else if (RtcpRrSequenceNumberTimeoutLocked(now)) { @@ -329,11 +353,11 @@ absl::optional RTCPReceiver::OnPeriodicRttUpdate( return rtt; } -absl::optional +std::optional RTCPReceiver::GetSenderReportStats() const { MutexLock lock(&rtcp_receiver_lock_); - if (!remote_sender_.last_arrival_timestamp.Valid()) { - return absl::nullopt; + if (!remote_sender_.last_arrival_ntp_timestamp.Valid()) { + return std::nullopt; } return remote_sender_; @@ -348,7 +372,7 @@ RTCPReceiver::ConsumeReceivedXrReferenceTimeInfo() { std::vector last_xr_rtis; last_xr_rtis.reserve(last_xr_rtis_size); - const uint32_t now_ntp = CompactNtp(clock_->CurrentNtpTime()); + const uint32_t now_ntp = CompactNtp(env_.clock().CurrentNtpTime()); for (size_t i = 0; i < last_xr_rtis_size; ++i) { RrtrInformation& rrtr = received_rrtrs_.front(); @@ -370,7 +394,7 @@ std::vector RTCPReceiver::GetLatestReportBlockData() const { return result; } -bool RTCPReceiver::ParseCompoundPacket(rtc::ArrayView packet, +bool RTCPReceiver::ParseCompoundPacket(ArrayView packet, PacketInformation* packet_information) { MutexLock lock(&rtcp_receiver_lock_); @@ -439,6 +463,13 @@ bool RTCPReceiver::ParseCompoundPacket(rtc::ArrayView packet, case rtcp::TransportFeedback::kFeedbackMessageType: HandleTransportFeedback(rtcp_block, packet_information); break; + case rtcp::CongestionControlFeedback::kFeedbackMessageType: + if (enable_congestion_controller_feedback_) { + valid = HandleCongestionControlFeedback(rtcp_block, + packet_information); + break; + } + ABSL_FALLTHROUGH_INTENDED; default: ++num_skipped_packets_; break; @@ -467,7 +498,7 @@ bool RTCPReceiver::ParseCompoundPacket(rtc::ArrayView packet, } if (num_skipped_packets_ > 0) { - const Timestamp now = clock_->CurrentTime(); + const Timestamp now = env_.clock().CurrentTime(); if (now - last_skipped_packets_warning_ >= kMaxWarningLogInterval) { last_skipped_packets_warning_ = now; RTC_LOG(LS_WARNING) @@ -518,9 +549,10 @@ bool RTCPReceiver::HandleSenderReport(const CommonHeader& rtcp_block, // Only signal that we have received a SR when we accept one. packet_information->packet_type_flags |= kRtcpSr; - remote_sender_.last_remote_timestamp = sender_report.ntp(); + remote_sender_.last_remote_ntp_timestamp = sender_report.ntp(); remote_sender_.last_remote_rtp_timestamp = sender_report.rtp_timestamp(); - remote_sender_.last_arrival_timestamp = clock_->CurrentNtpTime(); + remote_sender_.last_arrival_timestamp = env_.clock().CurrentTime(); + remote_sender_.last_arrival_ntp_timestamp = env_.clock().CurrentNtpTime(); remote_sender_.packets_sent = sender_report.sender_packet_count(); remote_sender_.bytes_sent = sender_report.sender_octet_count(); remote_sender_.reports_count++; @@ -575,7 +607,7 @@ void RTCPReceiver::HandleReportBlock(const ReportBlock& report_block, if (!registered_ssrcs_.contains(report_block.source_ssrc())) return; - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); last_received_rb_ = now; ReportBlockData* report_block_data = @@ -586,12 +618,11 @@ void RTCPReceiver::HandleReportBlock(const ReportBlock& report_block, // the last RR was sent from the remote side. last_increased_sequence_number_ = last_received_rb_; } - NtpTime now_ntp = clock_->ConvertTimestampToNtpTime(now); + NtpTime now_ntp = env_.clock().ConvertTimestampToNtpTime(now); // Number of seconds since 1900 January 1 00:00 GMT (see // https://tools.ietf.org/html/rfc868). - report_block_data->SetReportBlock( - remote_ssrc, report_block, - Timestamp::Millis(now_ntp.ToMs() - rtc::kNtpJan1970Millisecs)); + report_block_data->SetReportBlock(remote_ssrc, report_block, + Clock::NtpToUtc(now_ntp), now); uint32_t send_time_ntp = report_block.last_sr(); // RFC3550, section 6.4.1, LSR field discription states: @@ -623,14 +654,14 @@ RTCPReceiver::TmmbrInformation* RTCPReceiver::FindOrCreateTmmbrInfo( // Create or find receive information. TmmbrInformation* tmmbr_info = &tmmbr_infos_[remote_ssrc]; // Update that this remote is alive. - tmmbr_info->last_time_received = clock_->CurrentTime(); + tmmbr_info->last_time_received = env_.clock().CurrentTime(); return tmmbr_info; } void RTCPReceiver::UpdateTmmbrRemoteIsAlive(uint32_t remote_ssrc) { auto tmmbr_it = tmmbr_infos_.find(remote_ssrc); if (tmmbr_it != tmmbr_infos_.end()) - tmmbr_it->second.last_time_received = clock_->CurrentTime(); + tmmbr_it->second.last_time_received = env_.clock().CurrentTime(); } RTCPReceiver::TmmbrInformation* RTCPReceiver::GetTmmbrInformation( @@ -647,18 +678,18 @@ RTCPReceiver::TmmbrInformation* RTCPReceiver::GetTmmbrInformation( // the worker thread and thus no locking is needed. bool RTCPReceiver::RtcpRrTimeout() { MutexLock lock(&rtcp_receiver_lock_); - return RtcpRrTimeoutLocked(clock_->CurrentTime()); + return RtcpRrTimeoutLocked(env_.clock().CurrentTime()); } bool RTCPReceiver::RtcpRrSequenceNumberTimeout() { MutexLock lock(&rtcp_receiver_lock_); - return RtcpRrSequenceNumberTimeoutLocked(clock_->CurrentTime()); + return RtcpRrSequenceNumberTimeoutLocked(env_.clock().CurrentTime()); } bool RTCPReceiver::UpdateTmmbrTimers() { MutexLock lock(&rtcp_receiver_lock_); - Timestamp timeout = clock_->CurrentTime() - kTmmbrTimeoutInterval; + Timestamp timeout = env_.clock().CurrentTime() - kTmmbrTimeoutInterval; if (oldest_tmmbr_info_ >= timeout) return false; @@ -785,7 +816,7 @@ bool RTCPReceiver::HandleBye(const CommonHeader& rtcp_block) { received_rrtrs_.erase(it->second); received_rrtrs_ssrc_it_.erase(it); } - xr_rr_rtt_ = absl::nullopt; + xr_rr_rtt_ = std::nullopt; return true; } @@ -816,7 +847,8 @@ bool RTCPReceiver::HandleXr(const CommonHeader& rtcp_block, void RTCPReceiver::HandleXrReceiveReferenceTime(uint32_t sender_ssrc, const rtcp::Rrtr& rrtr) { uint32_t received_remote_mid_ntp_time = CompactNtp(rrtr.ntp()); - uint32_t local_receive_mid_ntp_time = CompactNtp(clock_->CurrentNtpTime()); + uint32_t local_receive_mid_ntp_time = + CompactNtp(env_.clock().CurrentNtpTime()); auto it = received_rrtrs_ssrc_it_.find(sender_ssrc); if (it != received_rrtrs_ssrc_it_.end()) { @@ -856,7 +888,7 @@ void RTCPReceiver::HandleXrDlrrReportBlock(uint32_t sender_ssrc, } uint32_t delay_ntp = rti.delay_since_last_rr; - uint32_t now_ntp = CompactNtp(clock_->CurrentNtpTime()); + uint32_t now_ntp = CompactNtp(env_.clock().CurrentNtpTime()); uint32_t rtt_ntp = now_ntp - delay_ntp - send_time_ntp; TimeDelta rtt = CompactNtpRttToTimeDelta(rtt_ntp); @@ -999,7 +1031,7 @@ bool RTCPReceiver::HandleFir(const CommonHeader& rtcp_block, if (fir.requests().empty()) return true; - const Timestamp now = clock_->CurrentTime(); + const Timestamp now = env_.clock().CurrentTime(); for (const rtcp::Fir::Request& fir_request : fir.requests()) { // Is it our sender that is requested to generate a new keyframe. if (local_media_ssrc() != fir_request.ssrc) @@ -1049,6 +1081,22 @@ void RTCPReceiver::HandleTransportFeedback( } } +bool RTCPReceiver::HandleCongestionControlFeedback( + const CommonHeader& rtcp_block, + PacketInformation* packet_information) { + rtcp::CongestionControlFeedback feedback; + if (!feedback.Parse(rtcp_block) || feedback.packets().empty()) { + return false; + } + uint32_t first_media_source_ssrc = feedback.packets()[0].ssrc; + if (first_media_source_ssrc == local_media_ssrc() || + registered_ssrcs_.contains(first_media_source_ssrc)) { + packet_information->congestion_control_feedback.emplace( + std::move(feedback)); + } + return true; +} + void RTCPReceiver::NotifyTmmbrUpdated() { // Find bounding set. std::vector bounding = @@ -1059,7 +1107,7 @@ void RTCPReceiver::NotifyTmmbrUpdated() { uint64_t bitrate_bps = TMMBRHelp::CalcMinBitrateBps(bounding); if (bitrate_bps < std::numeric_limits::max()) { network_link_rtcp_observer_->OnReceiverEstimatedMaxBitrate( - clock_->CurrentTime(), DataRate::BitsPerSec(bitrate_bps)); + env_.clock().CurrentTime(), DataRate::BitsPerSec(bitrate_bps)); } } @@ -1119,9 +1167,16 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket( loss_notification->decodability_flag()); } } + // Network state estimate should be applied before other feedback since it may + // affect how other feedback is handled. + if (network_state_estimate_observer_ && + packet_information.network_state_estimate) { + network_state_estimate_observer_->OnRemoteNetworkEstimate( + *packet_information.network_state_estimate); + } if (network_link_rtcp_observer_) { - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); if (packet_information.packet_type_flags & kRtcpRemb) { network_link_rtcp_observer_->OnReceiverEstimatedMaxBitrate( now, DataRate::BitsPerSec( @@ -1138,6 +1193,10 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket( network_link_rtcp_observer_->OnTransportFeedback( now, *packet_information.transport_feedback); } + if (packet_information.congestion_control_feedback) { + network_link_rtcp_observer_->OnCongestionControlFeedback( + now, *packet_information.congestion_control_feedback); + } } if ((packet_information.packet_type_flags & kRtcpSr) || @@ -1146,12 +1205,6 @@ void RTCPReceiver::TriggerCallbacksFromRtcpPacket( packet_information.report_block_datas); } - if (network_state_estimate_observer_ && - packet_information.network_state_estimate) { - network_state_estimate_observer_->OnRemoteNetworkEstimate( - *packet_information.network_state_estimate); - } - if (bitrate_allocation_observer_ && packet_information.target_bitrate_allocation) { bitrate_allocation_observer_->OnBitrateAllocationUpdated( @@ -1173,11 +1226,11 @@ std::vector RTCPReceiver::TmmbrReceived() { MutexLock lock(&rtcp_receiver_lock_); std::vector candidates; - Timestamp timeout = clock_->CurrentTime() - kTmmbrTimeoutInterval; + Timestamp now = env_.clock().CurrentTime(); for (auto& kv : tmmbr_infos_) { for (auto it = kv.second.tmmbr.begin(); it != kv.second.tmmbr.end();) { - if (it->second.last_updated < timeout) { + if (now - it->second.last_updated > kTmmbrTimeoutInterval) { // Erase timeout entries. it = kv.second.tmmbr.erase(it); } else { diff --git a/modules/rtp_rtcp/source/rtcp_receiver.h b/modules/rtp_rtcp/source/rtcp_receiver.h index a6175d0774..1dab65f7f6 100644 --- a/modules/rtp_rtcp/source/rtcp_receiver.h +++ b/modules/rtp_rtcp/source/rtcp_receiver.h @@ -11,16 +11,20 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_H_ +#include +#include #include #include -#include +#include #include -#include "absl/types/optional.h" +#include "absl/container/inlined_vector.h" #include "api/array_view.h" +#include "api/environment/environment.h" #include "api/sequence_checker.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "api/video/video_codec_constants.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -28,11 +32,11 @@ #include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "rtc_base/checks.h" #include "rtc_base/containers/flat_map.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" -#include "system_wrappers/include/ntp_time.h" namespace webrtc { @@ -56,7 +60,7 @@ class RTCPReceiver final { virtual void OnReceivedNack( const std::vector& nack_sequence_numbers) = 0; virtual void OnReceivedRtcpReportBlocks( - rtc::ArrayView report_blocks) = 0; + ArrayView report_blocks) = 0; protected: virtual ~ModuleRtpRtcp() = default; @@ -75,7 +79,7 @@ class RTCPReceiver final { } void Invalidate() { round_trip_time_.reset(); } // https://www.w3.org/TR/webrtc-stats/#dom-rtcremoteoutboundrtpstreamstats-roundtriptime - absl::optional round_trip_time() const { + std::optional round_trip_time() const { return round_trip_time_; } // https://www.w3.org/TR/webrtc-stats/#dom-rtcremoteoutboundrtpstreamstats-totalroundtriptime @@ -86,20 +90,22 @@ class RTCPReceiver final { } private: - absl::optional round_trip_time_; + std::optional round_trip_time_; TimeDelta total_round_trip_time_ = TimeDelta::Zero(); int round_trip_time_measurements_ = 0; }; - RTCPReceiver(const RtpRtcpInterface::Configuration& config, + RTCPReceiver(const Environment& env, + const RtpRtcpInterface::Configuration& config, ModuleRtpRtcp* owner); - RTCPReceiver(const RtpRtcpInterface::Configuration& config, + RTCPReceiver(const Environment& env, + const RtpRtcpInterface::Configuration& config, ModuleRtpRtcpImpl2* owner); ~RTCPReceiver(); - void IncomingPacket(rtc::ArrayView packet); + void IncomingPacket(ArrayView packet); int64_t LastReceivedReportBlockMs() const; @@ -112,24 +118,24 @@ class RTCPReceiver final { bool receiver_only() const { return receiver_only_; } // Returns stats based on the received RTCP Sender Reports. - absl::optional GetSenderReportStats() + std::optional GetSenderReportStats() const; std::vector ConsumeReceivedXrReferenceTimeInfo(); - absl::optional AverageRtt() const; - absl::optional LastRtt() const; + std::optional AverageRtt() const; + std::optional LastRtt() const; // Returns non-sender RTT metrics for the remote SSRC. NonSenderRttStats GetNonSenderRTT() const; void SetNonSenderRttMeasurement(bool enabled); - absl::optional GetAndResetXrRrRtt(); + std::optional GetAndResetXrRrRtt(); // Called once per second on the worker thread to do rtt calculations. // Returns an optional rtt value if one is available. - absl::optional OnPeriodicRttUpdate(Timestamp newer_than, - bool sending); + std::optional OnPeriodicRttUpdate(Timestamp newer_than, + bool sending); // A snapshot of Report Blocks with additional data of interest to statistics. // Within this list, the source SSRC is unique and ReportBlockData represents @@ -179,7 +185,6 @@ class RTCPReceiver final { class RegisteredSsrcs { public: static constexpr size_t kMediaSsrcIndex = 0; - static constexpr size_t kMaxSsrcs = 3; // Initializes the set of registered local SSRCS by extracting them from the // provided `config`. The `disable_sequence_checker` flag is a workaround // to be able to use a sequence checker without breaking downstream @@ -194,7 +199,7 @@ class RTCPReceiver final { private: RTC_NO_UNIQUE_ADDRESS CustomSequenceChecker packet_sequence_checker_; - absl::InlinedVector ssrcs_ + absl::InlinedVector ssrcs_ RTC_GUARDED_BY(packet_sequence_checker_); }; @@ -256,7 +261,7 @@ class RTCPReceiver final { size_t num_rtts_ = 0; }; - bool ParseCompoundPacket(rtc::ArrayView packet, + bool ParseCompoundPacket(ArrayView packet, PacketInformation* packet_information); void TriggerCallbacksFromRtcpPacket( @@ -343,6 +348,9 @@ class RTCPReceiver final { void HandleTransportFeedback(const rtcp::CommonHeader& rtcp_block, PacketInformation* packet_information) RTC_EXCLUSIVE_LOCKS_REQUIRED(rtcp_receiver_lock_); + bool HandleCongestionControlFeedback(const rtcp::CommonHeader& rtcp_block, + PacketInformation* packet_information) + RTC_EXCLUSIVE_LOCKS_REQUIRED(rtcp_receiver_lock_); bool RtcpRrTimeoutLocked(Timestamp now) RTC_EXCLUSIVE_LOCKS_REQUIRED(rtcp_receiver_lock_); @@ -350,8 +358,9 @@ class RTCPReceiver final { bool RtcpRrSequenceNumberTimeoutLocked(Timestamp now) RTC_EXCLUSIVE_LOCKS_REQUIRED(rtcp_receiver_lock_); - Clock* const clock_; + const Environment env_; const bool receiver_only_; + const bool enable_congestion_controller_feedback_; ModuleRtpRtcp* const rtp_rtcp_; // The set of registered local SSRCs. RegisteredSsrcs registered_ssrcs_; @@ -379,7 +388,7 @@ class RTCPReceiver final { // Estimated rtt, nullopt when there is no valid estimate. bool xr_rrtr_status_ RTC_GUARDED_BY(rtcp_receiver_lock_); - absl::optional xr_rr_rtt_; + std::optional xr_rr_rtt_; Timestamp oldest_tmmbr_info_ RTC_GUARDED_BY(rtcp_receiver_lock_); // Mapped by remote ssrc. diff --git a/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc b/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc index 1f5f138b83..4da2dceeb8 100644 --- a/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc @@ -10,22 +10,37 @@ #include "modules/rtp_rtcp/source/rtcp_receiver.h" +#include +#include #include +#include #include +#include #include +#include +#include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/environment/environment_factory.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_constants.h" #include "modules/rtp_rtcp/include/report_block_data.h" +#include "modules/rtp_rtcp/include/rtcp_statistics.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/mocks/mock_network_link_rtcp_observer.h" +#include "modules/rtp_rtcp/mocks/mock_network_state_estimator_observer.h" #include "modules/rtp_rtcp/source/byte_io.h" -#include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" #include "modules/rtp_rtcp/source/rtcp_packet/app.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" +#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" #include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" #include "modules/rtp_rtcp/source/rtcp_packet/fir.h" #include "modules/rtp_rtcp/source/rtcp_packet/nack.h" @@ -33,15 +48,19 @@ #include "modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h" #include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" #include "modules/rtp_rtcp/source/rtcp_packet/remb.h" +#include "modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h" +#include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" #include "modules/rtp_rtcp/source/rtcp_packet/sdes.h" #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmbr.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" -#include "modules/rtp_rtcp/source/time_util.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/arraysize.h" -#include "rtc_base/fake_clock.h" +#include "rtc_base/buffer.h" #include "rtc_base/random.h" +#include "system_wrappers/include/clock.h" #include "system_wrappers/include/ntp_time.h" +#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" @@ -64,6 +83,7 @@ using ::testing::SizeIs; using ::testing::StrEq; using ::testing::StrictMock; using ::testing::UnorderedElementsAre; +using ::webrtc::test::ExplicitKeyValueConfig; class MockRtcpPacketTypeCounterObserver : public RtcpPacketTypeCounterObserver { public: @@ -107,7 +127,7 @@ class MockModuleRtpRtcp : public RTCPReceiver::ModuleRtpRtcp { MOCK_METHOD(void, OnReceivedNack, (const std::vector&), (override)); MOCK_METHOD(void, OnReceivedRtcpReportBlocks, - (rtc::ArrayView), + (ArrayView), (override)); }; @@ -140,9 +160,8 @@ constexpr TimeDelta kEpsilon = TimeDelta::Millis(1); } // namespace struct ReceiverMocks { - ReceiverMocks() : clock(1335900000) {} - - SimulatedClock clock; + SimulatedClock clock{1335900000}; + std::string field_trials; // Callbacks to packet_type_counter_observer are frequent but most of the time // are not interesting. NiceMock packet_type_counter_observer; @@ -151,28 +170,32 @@ struct ReceiverMocks { StrictMock bitrate_allocation_observer; StrictMock rtp_rtcp_impl; NiceMock network_link_rtcp_observer; + NiceMock network_state_estimate_observer; + + RtpRtcpInterface::Configuration config = { + .receiver_only = false, + .intra_frame_callback = &intra_frame_observer, + .rtcp_loss_notification_observer = &rtcp_loss_notification_observer, + .network_link_rtcp_observer = &network_link_rtcp_observer, + .network_state_estimate_observer = &network_state_estimate_observer, + .bitrate_allocation_observer = &bitrate_allocation_observer, + .rtcp_packet_type_counter_observer = &packet_type_counter_observer, + .rtcp_report_interval_ms = kRtcpIntervalMs, + .local_media_ssrc = kReceiverMainSsrc, + .rtx_send_ssrc = kReceiverExtraSsrc}; }; -RtpRtcpInterface::Configuration DefaultConfiguration(ReceiverMocks* mocks) { - RtpRtcpInterface::Configuration config; - config.clock = &mocks->clock; - config.receiver_only = false; - config.rtcp_packet_type_counter_observer = - &mocks->packet_type_counter_observer; - config.network_link_rtcp_observer = &mocks->network_link_rtcp_observer; - config.intra_frame_callback = &mocks->intra_frame_observer; - config.rtcp_loss_notification_observer = - &mocks->rtcp_loss_notification_observer; - config.bitrate_allocation_observer = &mocks->bitrate_allocation_observer; - config.rtcp_report_interval_ms = kRtcpIntervalMs; - config.local_media_ssrc = kReceiverMainSsrc; - config.rtx_send_ssrc = kReceiverExtraSsrc; - return config; +RTCPReceiver Create(ReceiverMocks& mocks) { + return RTCPReceiver( + CreateEnvironment( + &mocks.clock, + std::make_unique(mocks.field_trials)), + mocks.config, &mocks.rtp_rtcp_impl); } TEST(RtcpReceiverTest, BrokenPacketIsIgnored) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); const uint8_t bad_packet[] = {0, 0, 0, 0}; EXPECT_CALL(mocks.packet_type_counter_observer, RtcpPacketTypesCounterUpdated) @@ -182,7 +205,7 @@ TEST(RtcpReceiverTest, BrokenPacketIsIgnored) { TEST(RtcpReceiverTest, InvalidFeedbackPacketIsIgnored) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); // Too short feedback packet. const uint8_t bad_packet[] = {0x81, rtcp::Rtpfb::kPacketType, 0, 0}; @@ -194,7 +217,7 @@ TEST(RtcpReceiverTest, InvalidFeedbackPacketIsIgnored) { TEST(RtcpReceiverTest, InjectSrPacket) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); EXPECT_FALSE(receiver.GetSenderReportStats()); @@ -210,7 +233,7 @@ TEST(RtcpReceiverTest, InjectSrPacket) { TEST(RtcpReceiverTest, InjectSrPacketFromUnknownSender) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::SenderReport sr; @@ -227,14 +250,14 @@ TEST(RtcpReceiverTest, InjectSrPacketFromUnknownSender) { TEST(RtcpReceiverTest, InjectSrPacketCalculatesRTT) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const TimeDelta kRtt = TimeDelta::Millis(123); const uint32_t kDelayNtp = 0x4321; const TimeDelta kDelay = CompactNtpRttToTimeDelta(kDelayNtp); - EXPECT_EQ(receiver.LastRtt(), absl::nullopt); + EXPECT_EQ(receiver.LastRtt(), std::nullopt); uint32_t sent_ntp = CompactNtp(mocks.clock.CurrentNtpTime()); mocks.clock.AdvanceTime(kRtt + kDelay); @@ -256,14 +279,14 @@ TEST(RtcpReceiverTest, InjectSrPacketCalculatesRTT) { TEST(RtcpReceiverTest, InjectSrPacketCalculatesNegativeRTTAsOneMs) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const TimeDelta kRtt = TimeDelta::Millis(-13); const uint32_t kDelayNtp = 0x4321; const TimeDelta kDelay = CompactNtpRttToTimeDelta(kDelayNtp); - EXPECT_EQ(receiver.LastRtt(), absl::nullopt); + EXPECT_EQ(receiver.LastRtt(), std::nullopt); uint32_t sent_ntp = CompactNtp(mocks.clock.CurrentNtpTime()); mocks.clock.AdvanceTime(kRtt + kDelay); @@ -286,7 +309,7 @@ TEST(RtcpReceiverTest, InjectSrPacketCalculatesNegativeRTTAsOneMs) { TEST(RtcpReceiverTest, TwoReportBlocksWithLastOneWithoutLastSrCalculatesRtt) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const TimeDelta kRtt = TimeDelta::Millis(125); @@ -314,7 +337,7 @@ TEST(RtcpReceiverTest, TwoReportBlocksWithLastOneWithoutLastSrCalculatesRtt) { TEST(RtcpReceiverTest, InjectRrPacket) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::ReceiverReport rr; @@ -328,7 +351,7 @@ TEST(RtcpReceiverTest, InjectRrPacket) { TEST(RtcpReceiverTest, InjectRrPacketWithReportBlockNotToUsIgnored) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::ReportBlock rb; @@ -347,7 +370,7 @@ TEST(RtcpReceiverTest, InjectRrPacketWithReportBlockNotToUsIgnored) { TEST(RtcpReceiverTest, InjectRrPacketWithOneReportBlock) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); Timestamp now = mocks.clock.CurrentTime(); @@ -368,7 +391,7 @@ TEST(RtcpReceiverTest, InjectRrPacketWithOneReportBlock) { TEST(RtcpReceiverTest, InjectSrPacketWithOneReportBlock) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); Timestamp now = mocks.clock.CurrentTime(); @@ -392,7 +415,7 @@ TEST(RtcpReceiverTest, InjectRrPacketWithTwoReportBlocks) { const uint32_t kCumLost[] = {13, 555}; const uint8_t kFracLost[] = {20, 11}; ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); Timestamp now = mocks.clock.CurrentTime(); @@ -470,7 +493,7 @@ TEST(RtcpReceiverTest, const int32_t kCumLost[] = {13, 555}; const uint8_t kFracLost[] = {20, 11}; ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::ReportBlock rb1; @@ -522,7 +545,7 @@ TEST(RtcpReceiverTest, TEST(RtcpReceiverTest, NotifiesNetworkLinkObserverOnReportBlocks) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::ReportBlock rb1; @@ -556,14 +579,13 @@ TEST(RtcpReceiverTest, GetRtt) { const uint32_t kSentCompactNtp = 0x1234; const uint32_t kDelayCompactNtp = 0x222; ReceiverMocks mocks; - RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks); - config.network_link_rtcp_observer = &mocks.network_link_rtcp_observer; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.network_link_rtcp_observer = &mocks.network_link_rtcp_observer; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); // No report block received. - EXPECT_EQ(receiver.LastRtt(), absl::nullopt); - EXPECT_EQ(receiver.AverageRtt(), absl::nullopt); + EXPECT_EQ(receiver.LastRtt(), std::nullopt); + EXPECT_EQ(receiver.AverageRtt(), std::nullopt); rtcp::ReportBlock rb; rb.SetMediaSsrc(kReceiverMainSsrc); @@ -581,14 +603,14 @@ TEST(RtcpReceiverTest, GetRtt) { receiver.IncomingPacket(rr.Build()); EXPECT_EQ(receiver.LastReceivedReportBlockMs(), now.ms()); - EXPECT_NE(receiver.LastRtt(), absl::nullopt); - EXPECT_NE(receiver.AverageRtt(), absl::nullopt); + EXPECT_NE(receiver.LastRtt(), std::nullopt); + EXPECT_NE(receiver.AverageRtt(), std::nullopt); } // App packets are ignored. TEST(RtcpReceiverTest, InjectApp) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::App app; @@ -603,9 +625,8 @@ TEST(RtcpReceiverTest, InjectApp) { TEST(RtcpReceiverTest, InjectSdesWithOneChunk) { ReceiverMocks mocks; MockCnameCallbackImpl callback; - RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks); - config.rtcp_cname_callback = &callback; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.rtcp_cname_callback = &callback; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const char kCname[] = "alice@host"; @@ -618,7 +639,7 @@ TEST(RtcpReceiverTest, InjectSdesWithOneChunk) { TEST(RtcpReceiverTest, InjectByePacketRemovesReportBlocks) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::ReportBlock rb1; @@ -652,7 +673,7 @@ TEST(RtcpReceiverTest, InjectByePacketRemovesReportBlocks) { TEST(RtcpReceiverTest, InjectByePacketRemovesReferenceTimeInfo) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::ExtendedReports xr; @@ -671,7 +692,7 @@ TEST(RtcpReceiverTest, InjectByePacketRemovesReferenceTimeInfo) { TEST(RtcpReceiverTest, InjectPliPacket) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::Pli pli; @@ -688,7 +709,7 @@ TEST(RtcpReceiverTest, InjectPliPacket) { TEST(RtcpReceiverTest, PliPacketNotToUsIgnored) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::Pli pli; @@ -704,7 +725,7 @@ TEST(RtcpReceiverTest, PliPacketNotToUsIgnored) { TEST(RtcpReceiverTest, InjectFirPacket) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::Fir fir; @@ -721,7 +742,7 @@ TEST(RtcpReceiverTest, InjectFirPacket) { TEST(RtcpReceiverTest, FirPacketNotToUsIgnored) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::Fir fir; @@ -733,7 +754,7 @@ TEST(RtcpReceiverTest, FirPacketNotToUsIgnored) { TEST(RtcpReceiverTest, ExtendedReportsPacketWithZeroReportBlocksIgnored) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::ExtendedReports xr; @@ -744,7 +765,7 @@ TEST(RtcpReceiverTest, ExtendedReportsPacketWithZeroReportBlocksIgnored) { TEST(RtcpReceiverTest, InjectExtendedReportsReceiverReferenceTimePacket) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const NtpTime kNtp(0x10203, 0x40506); @@ -769,10 +790,9 @@ TEST(RtcpReceiverTest, InjectExtendedReportsReceiverReferenceTimePacket) { TEST(RtcpReceiverTest, ExtendedReportsDlrrPacketNotToUsIgnored) { ReceiverMocks mocks; - auto config = DefaultConfiguration(&mocks); // Allow calculate rtt using dlrr/rrtr, simulating media receiver side. - config.non_sender_rtt_measurement = true; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.non_sender_rtt_measurement = true; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::ExtendedReports xr; @@ -791,9 +811,8 @@ TEST(RtcpReceiverTest, ExtendedReportsDlrrPacketNotToUsIgnored) { TEST(RtcpReceiverTest, InjectExtendedReportsDlrrPacketWithSubBlock) { ReceiverMocks mocks; - auto config = DefaultConfiguration(&mocks); - config.non_sender_rtt_measurement = true; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.non_sender_rtt_measurement = true; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const uint32_t kLastRR = 0x12345; @@ -819,9 +838,8 @@ TEST(RtcpReceiverTest, InjectExtendedReportsDlrrPacketWithSubBlock) { TEST(RtcpReceiverTest, InjectExtendedReportsDlrrPacketWithMultipleSubBlocks) { ReceiverMocks mocks; - auto config = DefaultConfiguration(&mocks); - config.non_sender_rtt_measurement = true; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.non_sender_rtt_measurement = true; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const uint32_t kLastRR = 0x12345; @@ -848,9 +866,8 @@ TEST(RtcpReceiverTest, InjectExtendedReportsDlrrPacketWithMultipleSubBlocks) { TEST(RtcpReceiverTest, InjectExtendedReportsPacketWithMultipleReportBlocks) { ReceiverMocks mocks; - auto config = DefaultConfiguration(&mocks); - config.non_sender_rtt_measurement = true; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.non_sender_rtt_measurement = true; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::Rrtr rrtr; @@ -869,9 +886,8 @@ TEST(RtcpReceiverTest, InjectExtendedReportsPacketWithMultipleReportBlocks) { TEST(RtcpReceiverTest, InjectExtendedReportsPacketWithUnknownReportBlock) { ReceiverMocks mocks; - auto config = DefaultConfiguration(&mocks); - config.non_sender_rtt_measurement = true; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.non_sender_rtt_measurement = true; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::Rrtr rrtr; @@ -880,7 +896,7 @@ TEST(RtcpReceiverTest, InjectExtendedReportsPacketWithUnknownReportBlock) { xr.SetRrtr(rrtr); xr.AddDlrrItem(ReceiveTimeInfo(kReceiverMainSsrc, 0x12345, 0x67890)); - rtc::Buffer packet = xr.Build(); + Buffer packet = xr.Build(); // Modify the DLRR block to have an unsupported block type, from 5 to 6. ASSERT_EQ(5, packet.data()[20]); packet.data()[20] = 6; @@ -901,9 +917,8 @@ TEST(RtcpReceiverTest, InjectExtendedReportsPacketWithUnknownReportBlock) { TEST(RtcpReceiverTest, TestExtendedReportsRrRttInitiallyFalse) { ReceiverMocks mocks; - auto config = DefaultConfiguration(&mocks); - config.non_sender_rtt_measurement = true; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.non_sender_rtt_measurement = true; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); EXPECT_FALSE(receiver.GetAndResetXrRrRtt()); @@ -916,9 +931,8 @@ TEST(RtcpReceiverTest, TestExtendedReportsRrRttInitiallyFalse) { TEST(RtcpReceiverTest, RttCalculatedAfterExtendedReportsDlrr) { ReceiverMocks mocks; - auto config = DefaultConfiguration(&mocks); - config.non_sender_rtt_measurement = true; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.non_sender_rtt_measurement = true; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); Random rand(0x0123456789abcdef); @@ -948,9 +962,8 @@ TEST(RtcpReceiverTest, RttCalculatedAfterExtendedReportsDlrr) { // the config struct. TEST(RtcpReceiverTest, SetterEnablesReceiverRtt) { ReceiverMocks mocks; - auto config = DefaultConfiguration(&mocks); - config.non_sender_rtt_measurement = false; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.non_sender_rtt_measurement = false; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); receiver.SetNonSenderRttMeasurement(true); @@ -981,9 +994,8 @@ TEST(RtcpReceiverTest, SetterEnablesReceiverRtt) { // the config struct. TEST(RtcpReceiverTest, DoesntCalculateRttOnReceivedDlrr) { ReceiverMocks mocks; - auto config = DefaultConfiguration(&mocks); - config.non_sender_rtt_measurement = true; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.non_sender_rtt_measurement = true; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); receiver.SetNonSenderRttMeasurement(false); @@ -1012,9 +1024,8 @@ TEST(RtcpReceiverTest, DoesntCalculateRttOnReceivedDlrr) { TEST(RtcpReceiverTest, XrDlrrCalculatesNegativeRttAsOneMillisecond) { ReceiverMocks mocks; - auto config = DefaultConfiguration(&mocks); - config.non_sender_rtt_measurement = true; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.non_sender_rtt_measurement = true; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); Random rand(0x0123456789abcdef); @@ -1043,9 +1054,8 @@ TEST(RtcpReceiverTest, XrDlrrCalculatesNegativeRttAsOneMillisecond) { // Test receiver RTT stats with multiple measurements. TEST(RtcpReceiverTest, ReceiverRttWithMultipleMeasurements) { ReceiverMocks mocks; - auto config = DefaultConfiguration(&mocks); - config.non_sender_rtt_measurement = true; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.non_sender_rtt_measurement = true; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); Random rand(0x0123456789abcdef); @@ -1098,9 +1108,8 @@ TEST(RtcpReceiverTest, ReceiverRttWithMultipleMeasurements) { // https://www.w3.org/TR/webrtc-stats/#dom-rtcremoteoutboundrtpstreamstats-roundtriptime. TEST(RtcpReceiverTest, ReceiverRttResetOnSrWithoutXr) { ReceiverMocks mocks; - auto config = DefaultConfiguration(&mocks); - config.non_sender_rtt_measurement = true; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.non_sender_rtt_measurement = true; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); Random rand(0x0123456789abcdef); @@ -1142,9 +1151,8 @@ TEST(RtcpReceiverTest, ReceiverRttResetOnSrWithoutXr) { // https://www.w3.org/TR/webrtc-stats/#dom-rtcremoteoutboundrtpstreamstats-roundtriptime. TEST(RtcpReceiverTest, ReceiverRttResetOnDlrrWithZeroTimestamp) { ReceiverMocks mocks; - auto config = DefaultConfiguration(&mocks); - config.non_sender_rtt_measurement = true; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.non_sender_rtt_measurement = true; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); Random rand(0x0123456789abcdef); @@ -1181,9 +1189,8 @@ TEST(RtcpReceiverTest, ReceiverRttResetOnDlrrWithZeroTimestamp) { // Check that the receiver RTT works correctly when the remote SSRC changes. TEST(RtcpReceiverTest, ReceiverRttWithMultipleRemoteSsrcs) { ReceiverMocks mocks; - auto config = DefaultConfiguration(&mocks); - config.non_sender_rtt_measurement = false; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.non_sender_rtt_measurement = false; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); receiver.SetNonSenderRttMeasurement(true); @@ -1235,7 +1242,7 @@ TEST(RtcpReceiverTest, ReceiverRttWithMultipleRemoteSsrcs) { TEST(RtcpReceiverTest, ConsumeReceivedXrReferenceTimeInfoInitiallyEmpty) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); EXPECT_THAT(receiver.ConsumeReceivedXrReferenceTimeInfo(), IsEmpty()); @@ -1243,7 +1250,7 @@ TEST(RtcpReceiverTest, ConsumeReceivedXrReferenceTimeInfoInitiallyEmpty) { TEST(RtcpReceiverTest, ConsumeReceivedXrReferenceTimeInfo) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const NtpTime kNtp(0x10203, 0x40506); @@ -1270,7 +1277,7 @@ TEST(RtcpReceiverTest, ConsumeReceivedXrReferenceTimeInfo) { TEST(RtcpReceiverTest, ReceivedRrtrFromSameSsrcUpdatesReceivedReferenceTimeInfo) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const NtpTime kNtp1(0x10203, 0x40506); @@ -1300,7 +1307,7 @@ TEST(RtcpReceiverTest, TEST(RtcpReceiverTest, StoresLastReceivedRrtrPerSsrc) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const size_t kNumBufferedReports = 1; @@ -1332,7 +1339,7 @@ TEST(RtcpReceiverTest, StoresLastReceivedRrtrPerSsrc) { TEST(RtcpReceiverTest, ReceiveReportTimeout) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const uint16_t kSequenceNumber = 1234; @@ -1404,7 +1411,7 @@ TEST(RtcpReceiverTest, ReceiveReportTimeout) { TEST(RtcpReceiverTest, TmmbrReceivedWithNoIncomingPacket) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); EXPECT_THAT(receiver.TmmbrReceived(), IsEmpty()); @@ -1412,7 +1419,7 @@ TEST(RtcpReceiverTest, TmmbrReceivedWithNoIncomingPacket) { TEST(RtcpReceiverTest, TmmbrPacketAccepted) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const DataRate kBitrate = DataRate::BitsPerSec(30'000); @@ -1440,7 +1447,7 @@ TEST(RtcpReceiverTest, TmmbrPacketAccepted) { TEST(RtcpReceiverTest, TmmbrPacketNotForUsIgnored) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const uint32_t kBitrateBps = 30000; @@ -1464,7 +1471,7 @@ TEST(RtcpReceiverTest, TmmbrPacketNotForUsIgnored) { TEST(RtcpReceiverTest, TmmbrPacketZeroRateIgnored) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); auto tmmbr = std::make_unique(); @@ -1486,7 +1493,7 @@ TEST(RtcpReceiverTest, TmmbrPacketZeroRateIgnored) { TEST(RtcpReceiverTest, TmmbrThreeConstraintsTimeOut) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); // Inject 3 packets "from" kSenderSsrc, kSenderSsrc+1, kSenderSsrc+2. @@ -1528,17 +1535,15 @@ TEST(RtcpReceiverTest, VerifyBlockAndTimestampObtainedFromReportBlockDataObserver) { ReceiverMocks mocks; MockReportBlockDataObserverImpl observer; - RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks); - config.report_block_data_observer = &observer; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.report_block_data_observer = &observer; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const uint8_t kFractionLoss = 3; const uint32_t kCumulativeLoss = 7; const uint32_t kJitter = 9; const uint16_t kSequenceNumber = 1234; - const int64_t kNtpNowMs = - mocks.clock.CurrentNtpInMilliseconds() - rtc::kNtpJan1970Millisecs; + const Timestamp kUtcNow = Clock::NtpToUtc(mocks.clock.CurrentNtpTime()); rtcp::ReportBlock rtcp_block; rtcp_block.SetMediaSsrc(kReceiverMainSsrc); @@ -1559,8 +1564,7 @@ TEST(RtcpReceiverTest, EXPECT_EQ(rtcp_block.extended_high_seq_num(), report_block.extended_highest_sequence_number()); EXPECT_EQ(rtcp_block.jitter(), report_block.jitter()); - EXPECT_EQ(report_block.report_block_timestamp_utc(), - Timestamp::Millis(kNtpNowMs)); + EXPECT_EQ(report_block.report_block_timestamp_utc(), kUtcNow); // No RTT is calculated in this test. EXPECT_EQ(0u, report_block.num_rtts()); }); @@ -1571,9 +1575,8 @@ TEST(RtcpReceiverTest, TEST(RtcpReceiverTest, VerifyRttObtainedFromReportBlockDataObserver) { ReceiverMocks mocks; MockReportBlockDataObserverImpl observer; - RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks); - config.report_block_data_observer = &observer; - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + mocks.config.report_block_data_observer = &observer; + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); // To avoid issues with rounding due to different way to represent time units, @@ -1617,7 +1620,7 @@ TEST(RtcpReceiverTest, VerifyRttObtainedFromReportBlockDataObserver) { TEST(RtcpReceiverTest, GetReportBlockDataAfterOneReportBlock) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const uint16_t kSequenceNumber = 1234; @@ -1641,7 +1644,7 @@ TEST(RtcpReceiverTest, GetReportBlockDataAfterOneReportBlock) { TEST(RtcpReceiverTest, GetReportBlockDataAfterTwoReportBlocksOfSameSsrc) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const uint16_t kSequenceNumber1 = 1234; @@ -1679,7 +1682,7 @@ TEST(RtcpReceiverTest, GetReportBlockDataAfterTwoReportBlocksOfSameSsrc) { TEST(RtcpReceiverTest, GetReportBlockDataAfterTwoReportBlocksOfDifferentSsrcs) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const uint16_t kSequenceNumber1 = 1234; @@ -1719,12 +1722,11 @@ TEST(RtcpReceiverTest, GetReportBlockDataAfterTwoReportBlocksOfDifferentSsrcs) { TEST(RtcpReceiverTest, NotifiesNetworkLinkObserverOnTransportFeedback) { ReceiverMocks mocks; - RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks); - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::TransportFeedback packet; - packet.SetMediaSsrc(config.local_media_ssrc); + packet.SetMediaSsrc(mocks.config.local_media_ssrc); packet.SetSenderSsrc(kSenderSsrc); packet.SetBase(123, Timestamp::Millis(1)); packet.AddReceivedPacket(123, Timestamp::Millis(1)); @@ -1740,15 +1742,135 @@ TEST(RtcpReceiverTest, NotifiesNetworkLinkObserverOnTransportFeedback) { receiver.IncomingPacket(packet.Build()); } +TEST(RtcpReceiverTest, NotifiesNetworkLinkObserverOnCongestionControlFeedback) { + ReceiverMocks mocks; + mocks.field_trials = "WebRTC-RFC8888CongestionControlFeedback/Enabled/"; + RTCPReceiver receiver = Create(mocks); + + rtcp::CongestionControlFeedback packet( + {{ + .ssrc = mocks.config.local_media_ssrc, + .sequence_number = 1, + }}, + /*report_timestamp_compact_ntp=*/324); + packet.SetSenderSsrc(kSenderSsrc); + + EXPECT_CALL( + mocks.network_link_rtcp_observer, + OnCongestionControlFeedback( + mocks.clock.CurrentTime(), + Property(&rtcp::CongestionControlFeedback::packets, SizeIs(1)))); + receiver.IncomingPacket(packet.Build()); +} + +TEST(RtcpReceiverTest, FiltersCongestionControlFeedbackOnFirstSsrc) { + ReceiverMocks mocks_1; + mocks_1.field_trials = "WebRTC-RFC8888CongestionControlFeedback/Enabled/"; + RTCPReceiver receiver_1 = Create(mocks_1); + + ReceiverMocks mocks_2; + mocks_2.field_trials = "WebRTC-RFC8888CongestionControlFeedback/Enabled/"; + mocks_2.config.local_media_ssrc = 789; + mocks_2.config.rtx_send_ssrc = 345; + RTCPReceiver receiver_2 = Create(mocks_2); + + rtcp::CongestionControlFeedback packet( + {{ + .ssrc = mocks_2.config.local_media_ssrc, + .sequence_number = 1, + }}, + /*report_timestamp_compact_ntp=*/324); + packet.SetSenderSsrc(kSenderSsrc); + + EXPECT_CALL(mocks_1.network_link_rtcp_observer, OnCongestionControlFeedback) + .Times(0); + EXPECT_CALL(mocks_2.network_link_rtcp_observer, OnCongestionControlFeedback) + .Times(1); + receiver_1.IncomingPacket(packet.Build()); + receiver_2.IncomingPacket(packet.Build()); +} + +TEST(RtcpReceiverTest, + NotifiesNetworkStateEstimateObserverOnRemoteNetworkEstimate) { + ReceiverMocks mocks; + RTCPReceiver receiver = Create(mocks); + receiver.SetRemoteSSRC(kSenderSsrc); + + NetworkStateEstimate estimate; + estimate.link_capacity_lower = DataRate::BitsPerSec(1000); + estimate.link_capacity_upper = DataRate::BitsPerSec(10000); + rtcp::RemoteEstimate remote_estimate; + remote_estimate.SetEstimate(estimate); + + EXPECT_CALL(mocks.network_state_estimate_observer, + OnRemoteNetworkEstimate( + AllOf(Field(&NetworkStateEstimate::link_capacity_lower, + DataRate::BitsPerSec(1000)), + Field(&NetworkStateEstimate::link_capacity_upper, + DataRate::BitsPerSec(10000))))); + + receiver.IncomingPacket(remote_estimate.Build()); +} + +TEST(RtcpReceiverTest, + NotifiesNetworkStateEstimateObserverBeforeNetworkLinkObserver) { + ReceiverMocks mocks; + RTCPReceiver receiver = Create(mocks); + receiver.SetRemoteSSRC(kSenderSsrc); + + NetworkStateEstimate estimate; + estimate.link_capacity_lower = DataRate::BitsPerSec(1000); + estimate.link_capacity_upper = DataRate::BitsPerSec(10000); + std::unique_ptr remote_estimate = + std::make_unique(); + remote_estimate->SetEstimate(estimate); + std::unique_ptr feedback_packet = + std::make_unique(); + feedback_packet->SetMediaSsrc(mocks.config.local_media_ssrc); + feedback_packet->SetSenderSsrc(kSenderSsrc); + feedback_packet->SetBase(123, Timestamp::Millis(1)); + feedback_packet->AddReceivedPacket(123, Timestamp::Millis(1)); + rtcp::CompoundPacket compound; + compound.Append(std::move(remote_estimate)); + compound.Append(std::move(feedback_packet)); + + InSequence s; + EXPECT_CALL(mocks.network_state_estimate_observer, OnRemoteNetworkEstimate); + EXPECT_CALL(mocks.network_link_rtcp_observer, OnTransportFeedback); + receiver.IncomingPacket(compound.Build()); +} + +TEST(RtcpReceiverTest, HandlesInvalidCongestionControlFeedback) { + ReceiverMocks mocks; + mocks.field_trials = "WebRTC-RFC8888CongestionControlFeedback/Enabled/"; + RTCPReceiver receiver = Create(mocks); + receiver.SetRemoteSSRC(kSenderSsrc); + + rtcp::CongestionControlFeedback packet({{ + .ssrc = 123, + .sequence_number = 1, + }}, + /*report_timestamp_compact_ntp=*/324); + packet.SetSenderSsrc(kSenderSsrc); + Buffer built_packet = packet.Build(); + // Modify the CongestionControlFeedback packet so that it is invalid. + const size_t kNumReportsOffset = 14; + ByteWriter::WriteBigEndian(&built_packet.data()[kNumReportsOffset], + 42); + + EXPECT_CALL(mocks.network_link_rtcp_observer, OnCongestionControlFeedback) + .Times(0); + receiver.IncomingPacket(built_packet); +} + TEST(RtcpReceiverTest, NotifiesNetworkLinkObserverOnTransportFeedbackOnRtxSsrc) { ReceiverMocks mocks; - RtpRtcpInterface::Configuration config = DefaultConfiguration(&mocks); - RTCPReceiver receiver(config, &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::TransportFeedback packet; - packet.SetMediaSsrc(*config.rtx_send_ssrc); + packet.SetMediaSsrc(*mocks.config.rtx_send_ssrc); packet.SetSenderSsrc(kSenderSsrc); packet.SetBase(1, Timestamp::Millis(1)); packet.AddReceivedPacket(1, Timestamp::Millis(1)); @@ -1760,7 +1882,7 @@ TEST(RtcpReceiverTest, TEST(RtcpReceiverTest, DoesNotNotifyNetworkLinkObserverOnTransportFeedbackForUnregistedSsrc) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::TransportFeedback packet; @@ -1775,7 +1897,7 @@ TEST(RtcpReceiverTest, TEST(RtcpReceiverTest, NotifiesNetworkLinkObserverOnRemb) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::Remb remb; @@ -1790,7 +1912,7 @@ TEST(RtcpReceiverTest, NotifiesNetworkLinkObserverOnRemb) { TEST(RtcpReceiverTest, HandlesInvalidTransportFeedback) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); // Send a compound packet with a TransportFeedback followed by something else. @@ -1807,7 +1929,7 @@ TEST(RtcpReceiverTest, HandlesInvalidTransportFeedback) { rtcp::CompoundPacket compound; compound.Append(std::move(packet)); compound.Append(std::move(remb)); - rtc::Buffer built_packet = compound.Build(); + Buffer built_packet = compound.Build(); // Modify the TransportFeedback packet so that it is invalid. const size_t kStatusCountOffset = 14; @@ -1824,7 +1946,7 @@ TEST(RtcpReceiverTest, HandlesInvalidTransportFeedback) { TEST(RtcpReceiverTest, Nack) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const uint16_t kNackList1[] = {1, 2, 3, 5}; @@ -1879,7 +2001,7 @@ TEST(RtcpReceiverTest, Nack) { TEST(RtcpReceiverTest, NackNotForUsIgnored) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); const uint16_t kNackList1[] = {1, 2, 3, 5}; @@ -1898,7 +2020,7 @@ TEST(RtcpReceiverTest, NackNotForUsIgnored) { TEST(RtcpReceiverTest, ForceSenderReport) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); rtcp::RapidResyncRequest rr; @@ -1911,7 +2033,7 @@ TEST(RtcpReceiverTest, ForceSenderReport) { TEST(RtcpReceiverTest, ReceivesTargetBitrate) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); VideoBitrateAllocation expected_allocation; @@ -1945,7 +2067,7 @@ TEST(RtcpReceiverTest, ReceivesTargetBitrate) { TEST(RtcpReceiverTest, HandlesIncorrectTargetBitrate) { ReceiverMocks mocks; - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); VideoBitrateAllocation expected_allocation; @@ -1968,7 +2090,7 @@ TEST(RtcpReceiverTest, HandlesIncorrectTargetBitrate) { TEST(RtcpReceiverTest, ChangeLocalMediaSsrc) { ReceiverMocks mocks; // Construct a receiver with `kReceiverMainSsrc` (default) local media ssrc. - RTCPReceiver receiver(DefaultConfiguration(&mocks), &mocks.rtp_rtcp_impl); + RTCPReceiver receiver = Create(mocks); receiver.SetRemoteSSRC(kSenderSsrc); constexpr uint32_t kSecondarySsrc = kReceiverMainSsrc + 1; diff --git a/modules/rtp_rtcp/source/rtcp_sender.cc b/modules/rtp_rtcp/source/rtcp_sender.cc index 9ca092c0cf..c08fd08a73 100644 --- a/modules/rtp_rtcp/source/rtcp_sender.cc +++ b/modules/rtp_rtcp/source/rtcp_sender.cc @@ -13,20 +13,30 @@ #include // memcpy #include // std::min +#include #include +#include +#include #include +#include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/environment/environment.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/rtp_headers.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_constants.h" #include "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/app.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" -#include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h" +#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" #include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" #include "modules/rtp_rtcp/source/rtcp_packet/fir.h" #include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" @@ -34,18 +44,21 @@ #include "modules/rtp_rtcp/source/rtcp_packet/pli.h" #include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" #include "modules/rtp_rtcp/source/rtcp_packet/remb.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" +#include "modules/rtp_rtcp/source/rtcp_packet/rrtr.h" #include "modules/rtp_rtcp/source/rtcp_packet/sdes.h" #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h" +#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmbn.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmbr.h" -#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" -#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" -#include "modules/rtp_rtcp/source/time_util.h" #include "modules/rtp_rtcp/source/tmmbr_help.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/trace_event.h" namespace webrtc { @@ -77,7 +90,7 @@ class RTCPSender::PacketSender { // Sends pending rtcp packet. void Send() { if (index_ > 0) { - callback_(rtc::ArrayView(buffer_, index_)); + callback_(ArrayView(buffer_, index_)); index_ = 0; } } @@ -124,10 +137,8 @@ RTCPSender::Configuration RTCPSender::Configuration::FromRtpRtcpConfiguration( RTCPSender::Configuration result; result.audio = configuration.audio; result.local_media_ssrc = configuration.local_media_ssrc; - result.clock = configuration.clock; result.outgoing_transport = configuration.outgoing_transport; result.non_sender_rtt_measurement = configuration.non_sender_rtt_measurement; - result.event_log = configuration.event_log; if (configuration.rtcp_report_interval_ms) { result.rtcp_report_interval = TimeDelta::Millis(configuration.rtcp_report_interval_ms); @@ -138,13 +149,12 @@ RTCPSender::Configuration RTCPSender::Configuration::FromRtpRtcpConfiguration( return result; } -RTCPSender::RTCPSender(Configuration config) - : audio_(config.audio), +RTCPSender::RTCPSender(const Environment& env, Configuration config) + : env_(env), + audio_(config.audio), ssrc_(config.local_media_ssrc), - clock_(config.clock), - random_(clock_->TimeInMicroseconds()), + random_(env_.clock().TimeInMicroseconds()), method_(RtcpMode::kOff), - event_log_(config.event_log), transport_(config.outgoing_transport), report_interval_(config.rtcp_report_interval.value_or( TimeDelta::Millis(config.audio ? kDefaultAudioReportInterval @@ -197,7 +207,7 @@ void RTCPSender::SetRTCPStatus(RtcpMode new_method) { MutexLock lock(&mutex_rtcp_sender_); if (new_method == RtcpMode::kOff) { - next_time_to_send_rtcp_ = absl::nullopt; + next_time_to_send_rtcp_ = std::nullopt; } else if (method_ == RtcpMode::kOff) { // When switching on, reschedule the next packet SetNextRtcpSendEvaluationDuration(report_interval_ / 2); @@ -210,25 +220,10 @@ bool RTCPSender::Sending() const { return sending_; } -void RTCPSender::SetSendingStatus(const FeedbackState& feedback_state, +void RTCPSender::SetSendingStatus(const FeedbackState& /* feedback_state */, bool sending) { - bool sendRTCPBye = false; - { - MutexLock lock(&mutex_rtcp_sender_); - - if (method_ != RtcpMode::kOff) { - if (sending == false && sending_ == true) { - // Trigger RTCP bye - sendRTCPBye = true; - } - } - sending_ = sending; - } - if (sendRTCPBye) { - if (SendRTCP(feedback_state, kRtcpBye) != 0) { - RTC_LOG(LS_WARNING) << "Failed to send RTCP BYE"; - } - } + MutexLock lock(&mutex_rtcp_sender_); + sending_ = sending; } void RTCPSender::SetNonSenderRttMeasurement(bool enabled) { @@ -242,14 +237,12 @@ int32_t RTCPSender::SendLossNotification(const FeedbackState& feedback_state, bool decodability_flag, bool buffering_allowed) { int32_t error_code = -1; - auto callback = [&](rtc::ArrayView packet) { + auto callback = [&](ArrayView packet) { transport_->SendRtcp(packet); error_code = 0; - if (event_log_) { - event_log_->Log(std::make_unique(packet)); - } + env_.event_log().Log(std::make_unique(packet)); }; - absl::optional sender; + std::optional sender; { MutexLock lock(&mutex_rtcp_sender_); @@ -317,8 +310,8 @@ void RTCPSender::SetTimestampOffset(uint32_t timestamp_offset) { } void RTCPSender::SetLastRtpTime(uint32_t rtp_timestamp, - absl::optional capture_time, - absl::optional payload_type) { + std::optional capture_time, + std::optional payload_type) { MutexLock lock(&mutex_rtcp_sender_); // For compatibility with clients who don't set payload type correctly on all // calls. @@ -328,7 +321,7 @@ void RTCPSender::SetLastRtpTime(uint32_t rtp_timestamp, last_rtp_timestamp_ = rtp_timestamp; if (!capture_time.has_value()) { // We don't currently get a capture time from VoiceEngine. - last_frame_capture_time_ = clock_->CurrentTime(); + last_frame_capture_time_ = env_.clock().CurrentTime(); } else { last_frame_capture_time_ = *capture_time; } @@ -362,7 +355,7 @@ int32_t RTCPSender::SetCNAME(absl::string_view c_name) { } bool RTCPSender::TimeToSendRTCPReport(bool send_keyframe_before_rtp) const { - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); MutexLock lock(&mutex_rtcp_sender_); RTC_DCHECK( @@ -402,7 +395,7 @@ void RTCPSender::BuildSR(const RtcpContext& ctx, PacketSender& sender) { rtcp::SenderReport report; report.SetSenderSsrc(ssrc_); - report.SetNtp(clock_->ConvertTimestampToNtpTime(ctx.now_)); + report.SetNtp(env_.clock().ConvertTimestampToNtpTime(ctx.now_)); report.SetRtpTimestamp(rtp_timestamp); report.SetPacketCount(ctx.feedback_state_.packets_sent); report.SetOctetCount(ctx.feedback_state_.media_bytes_sent); @@ -410,7 +403,7 @@ void RTCPSender::BuildSR(const RtcpContext& ctx, PacketSender& sender) { sender.AppendPacket(report); } -void RTCPSender::BuildSDES(const RtcpContext& ctx, PacketSender& sender) { +void RTCPSender::BuildSDES(const RtcpContext& /* ctx */, PacketSender& sender) { size_t length_cname = cname_.length(); RTC_CHECK_LT(length_cname, RTCP_CNAME_SIZE); @@ -428,7 +421,7 @@ void RTCPSender::BuildRR(const RtcpContext& ctx, PacketSender& sender) { } } -void RTCPSender::BuildPLI(const RtcpContext& ctx, PacketSender& sender) { +void RTCPSender::BuildPLI(const RtcpContext& /* ctx */, PacketSender& sender) { rtcp::Pli pli; pli.SetSenderSsrc(ssrc_); pli.SetMediaSsrc(remote_ssrc_); @@ -437,7 +430,7 @@ void RTCPSender::BuildPLI(const RtcpContext& ctx, PacketSender& sender) { sender.AppendPacket(pli); } -void RTCPSender::BuildFIR(const RtcpContext& ctx, PacketSender& sender) { +void RTCPSender::BuildFIR(const RtcpContext& /* ctx */, PacketSender& sender) { ++sequence_number_fir_; rtcp::Fir fir; @@ -448,7 +441,7 @@ void RTCPSender::BuildFIR(const RtcpContext& ctx, PacketSender& sender) { sender.AppendPacket(fir); } -void RTCPSender::BuildREMB(const RtcpContext& ctx, PacketSender& sender) { +void RTCPSender::BuildREMB(const RtcpContext& /* ctx */, PacketSender& sender) { rtcp::Remb remb; remb.SetSenderSsrc(ssrc_); remb.SetBitrateBps(remb_bitrate_); @@ -473,7 +466,7 @@ void RTCPSender::BuildTMMBR(const RtcpContext& ctx, PacketSender& sender) { bool tmmbr_owner = false; // holding mutex_rtcp_sender_ while calling RTCPreceiver which - // will accuire criticalSectionRTCPReceiver_ is a potental deadlock but + // will acquire criticalSectionRTCPReceiver_ is a potential deadlock but // since RTCPreceiver is not doing the reverse we should be fine std::vector candidates = ctx.feedback_state_.receiver->BoundingSet(&tmmbr_owner); @@ -515,7 +508,8 @@ void RTCPSender::BuildTMMBR(const RtcpContext& ctx, PacketSender& sender) { sender.AppendPacket(tmmbr); } -void RTCPSender::BuildTMMBN(const RtcpContext& ctx, PacketSender& sender) { +void RTCPSender::BuildTMMBN(const RtcpContext& /* ctx */, + PacketSender& sender) { rtcp::Tmmbn tmmbn; tmmbn.SetSenderSsrc(ssrc_); for (const rtcp::TmmbItem& tmmbr : tmmbn_to_send_) { @@ -526,13 +520,13 @@ void RTCPSender::BuildTMMBN(const RtcpContext& ctx, PacketSender& sender) { sender.AppendPacket(tmmbn); } -void RTCPSender::BuildAPP(const RtcpContext& ctx, PacketSender& sender) { +void RTCPSender::BuildAPP(const RtcpContext& /* ctx */, PacketSender& sender) { rtcp::App app; app.SetSenderSsrc(ssrc_); sender.AppendPacket(app); } -void RTCPSender::BuildLossNotification(const RtcpContext& ctx, +void RTCPSender::BuildLossNotification(const RtcpContext& /* ctx */, PacketSender& sender) { loss_notification_.SetSenderSsrc(ssrc_); loss_notification_.SetMediaSsrc(remote_ssrc_); @@ -556,7 +550,7 @@ void RTCPSender::BuildNACK(const RtcpContext& ctx, PacketSender& sender) { sender.AppendPacket(nack); } -void RTCPSender::BuildBYE(const RtcpContext& ctx, PacketSender& sender) { +void RTCPSender::BuildBYE(const RtcpContext& /* ctx */, PacketSender& sender) { rtcp::Bye bye; bye.SetSenderSsrc(ssrc_); bye.SetCsrcs(csrcs_); @@ -570,7 +564,7 @@ void RTCPSender::BuildExtendedReports(const RtcpContext& ctx, if (!sending_ && xr_send_receiver_reference_time_enabled_) { rtcp::Rrtr rrtr; - rrtr.SetNtp(clock_->ConvertTimestampToNtpTime(ctx.now_)); + rrtr.SetNtp(env_.clock().ConvertTimestampToNtpTime(ctx.now_)); xr.SetRrtr(rrtr); } @@ -601,15 +595,14 @@ int32_t RTCPSender::SendRTCP(const FeedbackState& feedback_state, int32_t nack_size, const uint16_t* nack_list) { int32_t error_code = -1; - auto callback = [&](rtc::ArrayView packet) { + auto callback = [&](ArrayView packet) { if (transport_->SendRtcp(packet)) { error_code = 0; - if (event_log_) { - event_log_->Log(std::make_unique(packet)); - } + env_.event_log().Log( + std::make_unique(packet)); } }; - absl::optional sender; + std::optional sender; { MutexLock lock(&mutex_rtcp_sender_); sender.emplace(callback, max_packet_size_); @@ -624,7 +617,7 @@ int32_t RTCPSender::SendRTCP(const FeedbackState& feedback_state, return error_code; } -absl::optional RTCPSender::ComputeCompoundRTCPPacket( +std::optional RTCPSender::ComputeCompoundRTCPPacket( const FeedbackState& feedback_state, RTCPPacketType packet_type, int32_t nack_size, @@ -656,7 +649,7 @@ absl::optional RTCPSender::ComputeCompoundRTCPPacket( // We need to send our NTP even if we haven't received any reports. RtcpContext context(feedback_state, nack_size, nack_list, - clock_->CurrentTime()); + env_.clock().CurrentTime()); PrepareReport(feedback_state); @@ -699,7 +692,7 @@ absl::optional RTCPSender::ComputeCompoundRTCPPacket( } RTC_DCHECK(AllVolatileFlagsConsumed()); - return absl::nullopt; + return std::nullopt; } TimeDelta RTCPSender::ComputeTimeUntilNextReport(DataRate send_bitrate) { @@ -732,7 +725,7 @@ TimeDelta RTCPSender::ComputeTimeUntilNextReport(DataRate send_bitrate) { // The interval between RTCP packets is varied randomly over the // range [1/2,3/2] times the calculated interval. - int min_interval_int = rtc::dchecked_cast(min_interval.ms()); + int min_interval_int = dchecked_cast(min_interval.ms()); TimeDelta time_to_next = TimeDelta::Millis( random_.Rand(min_interval_int * 1 / 2, min_interval_int * 3 / 2)); @@ -783,7 +776,7 @@ std::vector RTCPSender::CreateReportBlocks( if (!result.empty() && feedback_state.last_rr.Valid()) { // Get our NTP as late as possible to avoid a race. - uint32_t now = CompactNtp(clock_->CurrentNtpTime()); + uint32_t now = CompactNtp(env_.clock().CurrentNtpTime()); uint32_t receive_time = CompactNtp(feedback_state.last_rr); uint32_t delay_since_last_sr = now - receive_time; @@ -846,7 +839,7 @@ void RTCPSender::SetVideoBitrateAllocation( // Check if this allocation is first ever, or has a different set of // spatial/temporal layers signaled and enabled, if so trigger an rtcp report // as soon as possible. - absl::optional new_bitrate = + std::optional new_bitrate = CheckAndUpdateLayerStructure(bitrate); if (new_bitrate) { video_bitrate_allocation_ = *new_bitrate; @@ -862,9 +855,9 @@ void RTCPSender::SetVideoBitrateAllocation( SetFlag(kRtcpAnyExtendedReports, true); } -absl::optional RTCPSender::CheckAndUpdateLayerStructure( +std::optional RTCPSender::CheckAndUpdateLayerStructure( const VideoBitrateAllocation& bitrate) const { - absl::optional updated_bitrate; + std::optional updated_bitrate; for (size_t si = 0; si < kMaxSpatialLayers; ++si) { for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) { if (!updated_bitrate && @@ -900,10 +893,10 @@ void RTCPSender::SendCombinedRtcpPacket( ssrc = ssrc_; } RTC_DCHECK_LE(max_packet_size, IP_PACKET_SIZE); - auto callback = [&](rtc::ArrayView packet) { + auto callback = [&](ArrayView packet) { if (transport_->SendRtcp(packet)) { - if (event_log_) - event_log_->Log(std::make_unique(packet)); + env_.event_log().Log( + std::make_unique(packet)); } }; PacketSender sender(callback, max_packet_size); @@ -915,7 +908,7 @@ void RTCPSender::SendCombinedRtcpPacket( } void RTCPSender::SetNextRtcpSendEvaluationDuration(TimeDelta duration) { - next_time_to_send_rtcp_ = clock_->CurrentTime() + duration; + next_time_to_send_rtcp_ = env_.clock().CurrentTime() + duration; // TODO(bugs.webrtc.org/11581): make unconditional once downstream consumers // are using the callback method. if (schedule_next_rtcp_send_evaluation_function_) diff --git a/modules/rtp_rtcp/source/rtcp_sender.h b/modules/rtp_rtcp/source/rtcp_sender.h index 0ceec9a64a..307228134d 100644 --- a/modules/rtp_rtcp/source/rtcp_sender.h +++ b/modules/rtp_rtcp/source/rtcp_sender.h @@ -11,29 +11,34 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_SENDER_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_SENDER_H_ +#include +#include +#include #include #include +#include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/call/transport.h" +#include "api/environment/environment.h" +#include "api/rtp_headers.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "api/video/video_bitrate_allocation.h" -#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/rtp_rtcp/include/receive_statistics.h" +#include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_nack_stats.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" -#include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" #include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" +#include "modules/rtp_rtcp/source/rtcp_receiver.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/random.h" #include "rtc_base/synchronization/mutex.h" @@ -42,9 +47,6 @@ namespace webrtc { -class RTCPReceiver; -class RtcEventLog; - class RTCPSender final { public: struct Configuration { @@ -59,8 +61,7 @@ class RTCPSender final { // SSRCs for media and retransmission, respectively. // FlexFec SSRC is fetched from `flexfec_sender`. uint32_t local_media_ssrc = 0; - // The clock to use to read time. If nullptr then system clock will be used. - Clock* clock = nullptr; + // Transport object that will be called when packets are ready to be sent // out on the network. Transport* outgoing_transport = nullptr; @@ -79,8 +80,7 @@ class RTCPSender final { // have migrated to the callback solution. std::function schedule_next_rtcp_send_evaluation_function; - RtcEventLog* event_log = nullptr; - absl::optional rtcp_report_interval; + std::optional rtcp_report_interval; ReceiveStatisticsProvider* receive_statistics = nullptr; RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer = nullptr; }; @@ -104,13 +104,13 @@ class RTCPSender final { RTCPReceiver* receiver; }; - explicit RTCPSender(Configuration config); + RTCPSender(const Environment& env, Configuration config); RTCPSender() = delete; RTCPSender(const RTCPSender&) = delete; RTCPSender& operator=(const RTCPSender&) = delete; - virtual ~RTCPSender(); + ~RTCPSender(); RtcpMode Status() const RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); void SetRTCPStatus(RtcpMode method) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); @@ -127,8 +127,8 @@ class RTCPSender final { RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); void SetLastRtpTime(uint32_t rtp_timestamp, - absl::optional capture_time, - absl::optional payload_type) + std::optional capture_time, + std::optional payload_type) RTC_LOCKS_EXCLUDED(mutex_rtcp_sender_); void SetRtpClockRate(int8_t payload_type, int rtp_clock_rate_hz) @@ -186,7 +186,7 @@ class RTCPSender final { class RtcpContext; class PacketSender; - absl::optional ComputeCompoundRTCPPacket( + std::optional ComputeCompoundRTCPPacket( const FeedbackState& feedback_state, RTCPPacketType packet_type, int32_t nack_size, @@ -235,17 +235,16 @@ class RTCPSender final { void SetNextRtcpSendEvaluationDuration(TimeDelta duration) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); + const Environment env_; const bool audio_; // TODO(bugs.webrtc.org/11581): `mutex_rtcp_sender_` shouldn't be required if // we consistently run network related operations on the network thread. // This is currently not possible due to callbacks from the process thread in // ModuleRtpRtcpImpl2. uint32_t ssrc_ RTC_GUARDED_BY(mutex_rtcp_sender_); - Clock* const clock_; Random random_ RTC_GUARDED_BY(mutex_rtcp_sender_); RtcpMode method_ RTC_GUARDED_BY(mutex_rtcp_sender_); - RtcEventLog* const event_log_; Transport* const transport_; const TimeDelta report_interval_; @@ -257,12 +256,12 @@ class RTCPSender final { mutable Mutex mutex_rtcp_sender_; bool sending_ RTC_GUARDED_BY(mutex_rtcp_sender_); - absl::optional next_time_to_send_rtcp_ + std::optional next_time_to_send_rtcp_ RTC_GUARDED_BY(mutex_rtcp_sender_); uint32_t timestamp_offset_ RTC_GUARDED_BY(mutex_rtcp_sender_); uint32_t last_rtp_timestamp_ RTC_GUARDED_BY(mutex_rtcp_sender_); - absl::optional last_frame_capture_time_ + std::optional last_frame_capture_time_ RTC_GUARDED_BY(mutex_rtcp_sender_); // SSRC that we receive on our RTP channel uint32_t remote_ssrc_ RTC_GUARDED_BY(mutex_rtcp_sender_); @@ -304,7 +303,7 @@ class RTCPSender final { std::map rtp_clock_rates_khz_ RTC_GUARDED_BY(mutex_rtcp_sender_); int8_t last_payload_type_ RTC_GUARDED_BY(mutex_rtcp_sender_); - absl::optional CheckAndUpdateLayerStructure( + std::optional CheckAndUpdateLayerStructure( const VideoBitrateAllocation& bitrate) const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_rtcp_sender_); diff --git a/modules/rtp_rtcp/source/rtcp_sender_unittest.cc b/modules/rtp_rtcp/source/rtcp_sender_unittest.cc index 1dcb628722..ffba6d76dd 100644 --- a/modules/rtp_rtcp/source/rtcp_sender_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_sender_unittest.cc @@ -10,23 +10,48 @@ #include "modules/rtp_rtcp/source/rtcp_sender.h" +#include +#include #include +#include #include +#include #include "absl/base/macros.h" +#include "api/array_view.h" +#include "api/call/transport.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/rtp_headers.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/video_bitrate_allocation.h" +#include "modules/rtp_rtcp/include/receive_statistics.h" +#include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" +#include "modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" +#include "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h" +#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/rate_limiter.h" #include "rtc_base/thread.h" +#include "system_wrappers/include/clock.h" +#include "system_wrappers/include/ntp_time.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/mock_transport.h" #include "test/rtcp_packet_parser.h" +namespace webrtc { +namespace { + using ::testing::_; using ::testing::ElementsAre; using ::testing::Eq; @@ -34,8 +59,6 @@ using ::testing::Invoke; using ::testing::Property; using ::testing::SizeIs; -namespace webrtc { - class RtcpPacketTypeCounterObserverImpl : public RtcpPacketTypeCounterObserver { public: RtcpPacketTypeCounterObserverImpl() : ssrc_(0) {} @@ -54,49 +77,33 @@ class TestTransport : public Transport { public: TestTransport() {} - bool SendRtp(rtc::ArrayView /*data*/, - const PacketOptions& options) override { + bool SendRtp(ArrayView /*data*/, + const PacketOptions& /* options */) override { return false; } - bool SendRtcp(rtc::ArrayView data) override { + bool SendRtcp(ArrayView data) override { parser_.Parse(data); return true; } test::RtcpPacketParser parser_; }; -namespace { -static const uint32_t kSenderSsrc = 0x11111111; -static const uint32_t kRemoteSsrc = 0x22222222; -static const uint32_t kStartRtpTimestamp = 0x34567; -static const uint32_t kRtpTimestamp = 0x45678; - -std::unique_ptr CreateRtcpSender( - const RTCPSender::Configuration& config, - bool init_timestamps = true) { - auto rtcp_sender = std::make_unique(config); - rtcp_sender->SetRemoteSSRC(kRemoteSsrc); - if (init_timestamps) { - rtcp_sender->SetTimestampOffset(kStartRtpTimestamp); - rtcp_sender->SetLastRtpTime(kRtpTimestamp, config.clock->CurrentTime(), - /*payload_type=*/0); - } - return rtcp_sender; -} -} // namespace +constexpr uint32_t kSenderSsrc = 0x11111111; +constexpr uint32_t kRemoteSsrc = 0x22222222; +constexpr uint32_t kStartRtpTimestamp = 0x34567; +constexpr uint32_t kRtpTimestamp = 0x45678; class RtcpSenderTest : public ::testing::Test { protected: RtcpSenderTest() : clock_(1335900000), - receive_statistics_(ReceiveStatistics::Create(&clock_)) { - rtp_rtcp_impl_.reset(new ModuleRtpRtcpImpl2(GetDefaultRtpRtcpConfig())); - } + env_(CreateEnvironment(&clock_)), + receive_statistics_(ReceiveStatistics::Create(&clock_)), + rtp_rtcp_impl_(env_, GetDefaultRtpRtcpConfig()) {} RTCPSender::Configuration GetDefaultConfig() { RTCPSender::Configuration configuration; configuration.audio = false; - configuration.clock = &clock_; configuration.outgoing_transport = &test_transport_; configuration.rtcp_report_interval = TimeDelta::Millis(1000); configuration.receive_statistics = receive_statistics_.get(); @@ -108,7 +115,6 @@ class RtcpSenderTest : public ::testing::Test { RTCPSender::Configuration config = GetDefaultConfig(); RtpRtcpInterface::Configuration result; result.audio = config.audio; - result.clock = config.clock; result.outgoing_transport = config.outgoing_transport; result.rtcp_report_interval_ms = config.rtcp_report_interval->ms(); result.receive_statistics = config.receive_statistics; @@ -116,6 +122,19 @@ class RtcpSenderTest : public ::testing::Test { return result; } + std::unique_ptr CreateRtcpSender( + const RTCPSender::Configuration& config, + bool init_timestamps = true) { + auto rtcp_sender = std::make_unique(env_, config); + rtcp_sender->SetRemoteSSRC(kRemoteSsrc); + if (init_timestamps) { + rtcp_sender->SetTimestampOffset(kStartRtpTimestamp); + rtcp_sender->SetLastRtpTime(kRtpTimestamp, env_.clock().CurrentTime(), + /*payload_type=*/0); + } + return rtcp_sender; + } + void InsertIncomingPacket(uint32_t remote_ssrc, uint16_t seq_num) { RtpPacketReceived packet; packet.SetSsrc(remote_ssrc); @@ -128,14 +147,15 @@ class RtcpSenderTest : public ::testing::Test { test::RtcpPacketParser* parser() { return &test_transport_.parser_; } RTCPSender::FeedbackState feedback_state() { - return rtp_rtcp_impl_->GetFeedbackState(); + return rtp_rtcp_impl_.GetFeedbackState(); } - rtc::AutoThread main_thread_; + AutoThread main_thread_; SimulatedClock clock_; + const Environment env_; TestTransport test_transport_; std::unique_ptr receive_statistics_; - std::unique_ptr rtp_rtcp_impl_; + ModuleRtpRtcpImpl2 rtp_rtcp_impl_; }; TEST_F(RtcpSenderTest, SetRtcpStatus) { @@ -163,7 +183,7 @@ TEST_F(RtcpSenderTest, SendSr) { const uint32_t kOctetCount = 0x23456; auto rtcp_sender = CreateRtcpSender(GetDefaultConfig()); rtcp_sender->SetRTCPStatus(RtcpMode::kReducedSize); - RTCPSender::FeedbackState feedback_state = rtp_rtcp_impl_->GetFeedbackState(); + RTCPSender::FeedbackState feedback_state = rtp_rtcp_impl_.GetFeedbackState(); rtcp_sender->SetSendingStatus(feedback_state, true); feedback_state.packets_sent = kPacketCount; feedback_state.media_bytes_sent = kOctetCount; @@ -188,7 +208,7 @@ TEST_F(RtcpSenderTest, SendConsecutiveSrWithExactSlope) { // Make sure clock is not exactly at some milliseconds point. clock_.AdvanceTimeMicroseconds(kTimeBetweenSRsUs); rtcp_sender->SetRTCPStatus(RtcpMode::kReducedSize); - RTCPSender::FeedbackState feedback_state = rtp_rtcp_impl_->GetFeedbackState(); + RTCPSender::FeedbackState feedback_state = rtp_rtcp_impl_.GetFeedbackState(); rtcp_sender->SetSendingStatus(feedback_state, true); feedback_state.packets_sent = kPacketCount; feedback_state.media_bytes_sent = kOctetCount; @@ -215,7 +235,6 @@ TEST_F(RtcpSenderTest, SendConsecutiveSrWithExactSlope) { TEST_F(RtcpSenderTest, DoNotSendSrBeforeRtp) { RTCPSender::Configuration config; - config.clock = &clock_; config.receive_statistics = receive_statistics_.get(); config.outgoing_transport = &test_transport_; config.rtcp_report_interval = TimeDelta::Millis(1000); @@ -236,7 +255,6 @@ TEST_F(RtcpSenderTest, DoNotSendSrBeforeRtp) { TEST_F(RtcpSenderTest, DoNotSendCompundBeforeRtp) { RTCPSender::Configuration config; - config.clock = &clock_; config.receive_statistics = receive_statistics_.get(); config.outgoing_transport = &test_transport_; config.rtcp_report_interval = TimeDelta::Millis(1000); @@ -328,13 +346,12 @@ TEST_F(RtcpSenderTest, SendBye) { EXPECT_EQ(kSenderSsrc, parser()->bye()->sender_ssrc()); } -TEST_F(RtcpSenderTest, StopSendingTriggersBye) { +TEST_F(RtcpSenderTest, StopSendingDoesNotTriggersBye) { auto rtcp_sender = CreateRtcpSender(GetDefaultConfig()); rtcp_sender->SetRTCPStatus(RtcpMode::kReducedSize); rtcp_sender->SetSendingStatus(feedback_state(), true); rtcp_sender->SetSendingStatus(feedback_state(), false); - EXPECT_EQ(1, parser()->bye()->num_packets()); - EXPECT_EQ(kSenderSsrc, parser()->bye()->sender_ssrc()); + EXPECT_EQ(0, parser()->bye()->num_packets()); } TEST_F(RtcpSenderTest, SendFir) { @@ -481,7 +498,7 @@ TEST_F(RtcpSenderTest, RembIncludedInEachCompoundPacketAfterSet) { TEST_F(RtcpSenderTest, SendXrWithDlrr) { auto rtcp_sender = CreateRtcpSender(GetDefaultConfig()); rtcp_sender->SetRTCPStatus(RtcpMode::kCompound); - RTCPSender::FeedbackState feedback_state = rtp_rtcp_impl_->GetFeedbackState(); + RTCPSender::FeedbackState feedback_state = rtp_rtcp_impl_.GetFeedbackState(); rtcp::ReceiveTimeInfo last_xr_rr; last_xr_rr.ssrc = 0x11111111; last_xr_rr.last_rr = 0x22222222; @@ -501,7 +518,7 @@ TEST_F(RtcpSenderTest, SendXrWithMultipleDlrrSubBlocks) { const size_t kNumReceivers = 2; auto rtcp_sender = CreateRtcpSender(GetDefaultConfig()); rtcp_sender->SetRTCPStatus(RtcpMode::kCompound); - RTCPSender::FeedbackState feedback_state = rtp_rtcp_impl_->GetFeedbackState(); + RTCPSender::FeedbackState feedback_state = rtp_rtcp_impl_.GetFeedbackState(); for (size_t i = 0; i < kNumReceivers; ++i) { rtcp::ReceiveTimeInfo last_xr_rr; last_xr_rr.ssrc = i; @@ -591,7 +608,6 @@ TEST_F(RtcpSenderTest, TestNoXrRrtrSentIfNotEnabled) { TEST_F(RtcpSenderTest, TestRegisterRtcpPacketTypeObserver) { RtcpPacketTypeCounterObserverImpl observer; RTCPSender::Configuration config; - config.clock = &clock_; config.receive_statistics = receive_statistics_.get(); config.outgoing_transport = &test_transport_; config.rtcp_packet_type_counter_observer = &observer; @@ -665,7 +681,7 @@ TEST_F(RtcpSenderTest, SendsTmmbnIfSetAndEmpty) { TEST_F(RtcpSenderTest, ByeMustBeLast) { MockTransport mock_transport; EXPECT_CALL(mock_transport, SendRtcp(_)) - .WillOnce(Invoke([](rtc::ArrayView data) { + .WillOnce(Invoke([](ArrayView data) { const uint8_t* next_packet = data.data(); const uint8_t* const packet_end = data.data() + data.size(); rtcp::CommonHeader packet; @@ -686,7 +702,6 @@ TEST_F(RtcpSenderTest, ByeMustBeLast) { // Re-configure rtcp_sender with mock_transport_ RTCPSender::Configuration config; - config.clock = &clock_; config.receive_statistics = receive_statistics_.get(); config.outgoing_transport = &mock_transport; config.rtcp_report_interval = TimeDelta::Millis(1000); @@ -719,7 +734,7 @@ TEST_F(RtcpSenderTest, SendXrWithTargetBitrate) { EXPECT_EQ(0, rtcp_sender->SendRTCP(feedback_state(), kRtcpReport)); EXPECT_EQ(1, parser()->xr()->num_packets()); EXPECT_EQ(kSenderSsrc, parser()->xr()->sender_ssrc()); - const absl::optional& target_bitrate = + const std::optional& target_bitrate = parser()->xr()->target_bitrate(); ASSERT_TRUE(target_bitrate); const std::vector& bitrates = @@ -785,7 +800,7 @@ TEST_F(RtcpSenderTest, SendTargetBitrateExplicitZeroOnStreamRemoval) { allocation.SetBitrate(1, 0, 200000); rtcp_sender->SetVideoBitrateAllocation(allocation); EXPECT_EQ(0, rtcp_sender->SendRTCP(feedback_state(), kRtcpReport)); - absl::optional target_bitrate = + std::optional target_bitrate = parser()->xr()->target_bitrate(); ASSERT_TRUE(target_bitrate); std::vector bitrates = @@ -840,4 +855,5 @@ TEST_F(RtcpSenderTest, SendsCombinedRtcpPacket) { EXPECT_EQ(parser()->app()->sender_ssrc(), kSenderSsrc); } +} // namespace } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtcp_transceiver.cc b/modules/rtp_rtcp/source/rtcp_transceiver.cc index f265bd5825..9b0ed57d3c 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver.cc +++ b/modules/rtp_rtcp/source/rtcp_transceiver.cc @@ -10,15 +10,19 @@ #include "modules/rtp_rtcp/source/rtcp_transceiver.h" +#include #include #include #include #include "absl/cleanup/cleanup.h" +#include "absl/functional/any_invocable.h" #include "api/units/timestamp.h" -#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/source/rtcp_transceiver_config.h" +#include "modules/rtp_rtcp/source/rtcp_transceiver_impl.h" #include "rtc_base/checks.h" -#include "rtc_base/event.h" +#include "rtc_base/copy_on_write_buffer.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -79,7 +83,7 @@ void RtcpTransceiver::SetReadyToSend(bool ready) { task_queue_->PostTask([ptr, ready] { ptr->SetReadyToSend(ready); }); } -void RtcpTransceiver::ReceivePacket(rtc::CopyOnWriteBuffer packet) { +void RtcpTransceiver::ReceivePacket(CopyOnWriteBuffer packet) { RTC_CHECK(rtcp_transceiver_); RtcpTransceiverImpl* ptr = rtcp_transceiver_.get(); Timestamp now = clock_->CurrentTime(); diff --git a/modules/rtp_rtcp/source/rtcp_transceiver.h b/modules/rtp_rtcp/source/rtcp_transceiver.h index 22fcc73337..c1ff78bac6 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver.h +++ b/modules/rtp_rtcp/source/rtcp_transceiver.h @@ -11,12 +11,14 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_TRANSCEIVER_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_TRANSCEIVER_H_ +#include #include -#include #include #include "absl/functional/any_invocable.h" #include "api/task_queue/task_queue_base.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_transceiver_config.h" #include "modules/rtp_rtcp/source/rtcp_transceiver_impl.h" #include "rtc_base/copy_on_write_buffer.h" @@ -63,7 +65,7 @@ class RtcpTransceiver : public RtcpFeedbackSenderInterface { void SetReadyToSend(bool ready); // Handles incoming rtcp packets. - void ReceivePacket(rtc::CopyOnWriteBuffer packet); + void ReceivePacket(CopyOnWriteBuffer packet); // Sends RTCP packets starting with a sender or receiver report. void SendCompoundPacket(); diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_config.cc b/modules/rtp_rtcp/source/rtcp_transceiver_config.cc index 0f1e4def1c..806dfe4eee 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver_config.cc +++ b/modules/rtp_rtcp/source/rtcp_transceiver_config.cc @@ -10,6 +10,8 @@ #include "modules/rtp_rtcp/source/rtcp_transceiver_config.h" +#include "api/rtp_headers.h" +#include "api/units/time_delta.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/logging.h" diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_config.h b/modules/rtp_rtcp/source/rtcp_transceiver_config.h index 881666d704..37500fb13f 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver_config.h +++ b/modules/rtp_rtcp/source/rtcp_transceiver_config.h @@ -11,6 +11,9 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_TRANSCEIVER_CONFIG_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_TRANSCEIVER_CONFIG_H_ +#include +#include +#include #include #include "api/array_view.h" @@ -34,12 +37,13 @@ class MediaReceiverRtcpObserver { public: virtual ~MediaReceiverRtcpObserver() = default; - virtual void OnSenderReport(uint32_t sender_ssrc, - NtpTime ntp_time, - uint32_t rtp_time) {} - virtual void OnBye(uint32_t sender_ssrc) {} - virtual void OnBitrateAllocation(uint32_t sender_ssrc, - const VideoBitrateAllocation& allocation) {} + virtual void OnSenderReport(uint32_t /* sender_ssrc */, + NtpTime /* ntp_time */, + uint32_t /* rtp_time */) {} + virtual void OnBye(uint32_t /* sender_ssrc */) {} + virtual void OnBitrateAllocation( + uint32_t /* sender_ssrc */, + const VideoBitrateAllocation& /* allocation */) {} }; // Handles RTCP related messages for a single RTP stream (i.e. single SSRC) @@ -76,14 +80,14 @@ class RtpStreamRtcpHandler { }; virtual RtpStats SentStats() = 0; - virtual void OnNack(uint32_t sender_ssrc, - rtc::ArrayView sequence_numbers) {} - virtual void OnFir(uint32_t sender_ssrc) {} - virtual void OnPli(uint32_t sender_ssrc) {} + virtual void OnNack(uint32_t /* sender_ssrc */, + ArrayView /* sequence_numbers */) {} + virtual void OnFir(uint32_t /* sender_ssrc */) {} + virtual void OnPli(uint32_t /* sender_ssrc */) {} // Called on an RTCP packet with sender or receiver reports with a report // block for the handled RTP stream. - virtual void OnReport(const ReportBlockData& report_block) {} + virtual void OnReport(const ReportBlockData& /* report_block */) {} }; struct RtcpTransceiverConfig { @@ -113,7 +117,7 @@ struct RtcpTransceiverConfig { Clock* clock = nullptr; // Transport to send RTCP packets to. - std::function)> rtcp_transport; + std::function)> rtcp_transport; // Queue for scheduling delayed tasks, e.g. sending periodic compound packets. TaskQueueBase* task_queue = nullptr; diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc b/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc index 625cb7fefc..a8b3f2fb29 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc +++ b/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc @@ -11,34 +11,56 @@ #include "modules/rtp_rtcp/source/rtcp_transceiver_impl.h" #include +#include +#include +#include +#include +#include +#include +#include #include +#include #include "absl/algorithm/container.h" -#include "absl/memory/memory.h" -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/rtp_headers.h" +#include "api/task_queue/task_queue_base.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_constants.h" #include "modules/rtp_rtcp/include/receive_statistics.h" +#include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" +#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" #include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" #include "modules/rtp_rtcp/source/rtcp_packet/fir.h" #include "modules/rtp_rtcp/source/rtcp_packet/nack.h" #include "modules/rtp_rtcp/source/rtcp_packet/pli.h" +#include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" #include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/remb.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" +#include "modules/rtp_rtcp/source/rtcp_packet/rrtr.h" +#include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" #include "modules/rtp_rtcp/source/rtcp_packet/sdes.h" #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" -#include "modules/rtp_rtcp/source/time_util.h" +#include "modules/rtp_rtcp/source/rtcp_transceiver_config.h" #include "rtc_base/checks.h" #include "rtc_base/containers/flat_map.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/divide_round.h" #include "rtc_base/task_utils/repeating_task.h" -#include "rtc_base/time_utils.h" #include "system_wrappers/include/clock.h" +#include "system_wrappers/include/ntp_time.h" namespace webrtc { namespace { @@ -48,7 +70,7 @@ struct SenderReportTimes { NtpTime remote_sent_time; }; -std::function)> GetRtcpTransport( +std::function)> GetRtcpTransport( const RtcpTransceiverConfig& config) { if (config.rtcp_transport != nullptr) { return config.rtcp_transport; @@ -56,7 +78,7 @@ std::function)> GetRtcpTransport( bool first = true; std::string log_prefix = config.debug_id; - return [first, log_prefix](rtc::ArrayView packet) mutable { + return [first, log_prefix](ArrayView /* packet */) mutable { if (first) { RTC_LOG(LS_ERROR) << log_prefix << "Sending RTCP packets is disabled."; first = false; @@ -68,7 +90,7 @@ std::function)> GetRtcpTransport( struct RtcpTransceiverImpl::RemoteSenderState { uint8_t fir_sequence_number = 0; - absl::optional last_received_sender_report; + std::optional last_received_sender_report; std::vector observers; }; @@ -104,7 +126,7 @@ class RtcpTransceiverImpl::PacketSender { // Sends pending rtcp compound packet. void Send() { if (index_ > 0) { - callback_(rtc::ArrayView(buffer_, index_)); + callback_(ArrayView(buffer_, index_)); index_ = 0; } } @@ -193,7 +215,7 @@ void RtcpTransceiverImpl::SetReadyToSend(bool ready) { ready_to_send_ = ready; } -void RtcpTransceiverImpl::ReceivePacket(rtc::ArrayView packet, +void RtcpTransceiverImpl::ReceivePacket(ArrayView packet, Timestamp now) { // Report blocks may be spread across multiple sender and receiver reports. std::vector report_blocks; @@ -234,7 +256,7 @@ void RtcpTransceiverImpl::SetRemb(int64_t bitrate_bps, // immideately on large bitrate change when there is one RtcpTransceiver per // rtp transport. if (send_now) { - absl::optional remb; + std::optional remb; remb.swap(remb_); SendImmediateFeedback(*remb); remb.swap(remb_); @@ -266,9 +288,8 @@ void RtcpTransceiverImpl::SendPictureLossIndication(uint32_t ssrc) { SendImmediateFeedback(pli); } -void RtcpTransceiverImpl::SendFullIntraRequest( - rtc::ArrayView ssrcs, - bool new_request) { +void RtcpTransceiverImpl::SendFullIntraRequest(ArrayView ssrcs, + bool new_request) { RTC_DCHECK(!ssrcs.empty()); if (!ready_to_send_) return; @@ -355,18 +376,17 @@ void RtcpTransceiverImpl::HandleReceiverReport( void RtcpTransceiverImpl::HandleReportBlocks( uint32_t sender_ssrc, Timestamp now, - rtc::ArrayView rtcp_report_blocks, + ArrayView rtcp_report_blocks, std::vector& report_blocks) { if (rtcp_report_blocks.empty()) { return; } NtpTime now_ntp = config_.clock->ConvertTimestampToNtpTime(now); uint32_t receive_time_ntp = CompactNtp(now_ntp); - Timestamp now_utc = - Timestamp::Millis(now_ntp.ToMs() - rtc::kNtpJan1970Millisecs); + Timestamp now_utc = Clock::NtpToUtc(now_ntp); for (const rtcp::ReportBlock& block : rtcp_report_blocks) { - absl::optional rtt; + std::optional rtt; if (block.last_sr() != 0) { rtt = CompactNtpRttToTimeDelta( receive_time_ntp - block.delay_since_last_sr() - block.last_sr()); @@ -375,7 +395,7 @@ void RtcpTransceiverImpl::HandleReportBlocks( auto sender_it = local_senders_by_ssrc_.find(block.source_ssrc()); if (sender_it != local_senders_by_ssrc_.end()) { LocalSenderState& state = *sender_it->second; - state.report_block.SetReportBlock(sender_ssrc, block, now_utc); + state.report_block.SetReportBlock(sender_ssrc, block, now_utc, now); if (rtt.has_value()) { state.report_block.AddRoundTripTimeSample(*rtt); } @@ -385,7 +405,7 @@ void RtcpTransceiverImpl::HandleReportBlocks( // No registered sender for this report block, still report it to the // network link. ReportBlockData report_block; - report_block.SetReportBlock(sender_ssrc, block, now_utc); + report_block.SetReportBlock(sender_ssrc, block, now_utc, now); if (rtt.has_value()) { report_block.AddRoundTripTimeSample(*rtt); } @@ -464,6 +484,9 @@ void RtcpTransceiverImpl::HandleRtpFeedback( case rtcp::TransportFeedback::kFeedbackMessageType: HandleTransportFeedback(rtcp_packet_header, now); break; + case rtcp::CongestionControlFeedback::kFeedbackMessageType: + HandleCongestionControlFeedback(rtcp_packet_header, now); + break; } } @@ -493,6 +516,20 @@ void RtcpTransceiverImpl::HandleTransportFeedback( } } +void RtcpTransceiverImpl::HandleCongestionControlFeedback( + const rtcp::CommonHeader& rtcp_packet_header, + Timestamp now) { + RTC_DCHECK_EQ(rtcp_packet_header.fmt(), + rtcp::CongestionControlFeedback::kFeedbackMessageType); + if (config_.network_link_observer == nullptr) { + return; + } + rtcp::CongestionControlFeedback feedback; + if (feedback.Parse(rtcp_packet_header)) { + config_.network_link_observer->OnCongestionControlFeedback(now, feedback); + } +} + void RtcpTransceiverImpl::HandleExtendedReports( const rtcp::CommonHeader& rtcp_packet_header, Timestamp now) { @@ -537,7 +574,7 @@ void RtcpTransceiverImpl::HandleDlrr(const rtcp::Dlrr& dlrr, Timestamp now) { void RtcpTransceiverImpl::ProcessReportBlocks( Timestamp now, - rtc::ArrayView report_blocks) { + ArrayView report_blocks) { RTC_DCHECK(!report_blocks.empty()); if (config_.network_link_observer == nullptr) { return; @@ -736,7 +773,7 @@ void RtcpTransceiverImpl::CreateCompoundPacket(Timestamp now, PacketSender& sender) { RTC_DCHECK(sender.IsEmpty()); ReservedBytes reserved = {.per_packet = reserved_bytes}; - absl::optional sdes; + std::optional sdes; if (!config_.cname.empty()) { sdes.emplace(); bool added = sdes->AddCName(config_.feedback_ssrc, config_.cname); @@ -747,7 +784,7 @@ void RtcpTransceiverImpl::CreateCompoundPacket(Timestamp now, if (remb_.has_value()) { reserved.per_packet += remb_->BlockLength(); } - absl::optional xr_with_dlrr; + std::optional xr_with_dlrr; if (!received_rrtrs_.empty()) { RTC_DCHECK(config_.reply_to_non_sender_rtt_measurement); xr_with_dlrr.emplace(); @@ -799,7 +836,7 @@ void RtcpTransceiverImpl::CreateCompoundPacket(Timestamp now, sender.AppendPacket(xr_with_rrtr); } if (xr_with_dlrr.has_value()) { - rtc::ArrayView ssrcs(&sender_ssrc, 1); + ArrayView ssrcs(&sender_ssrc, 1); if (config_.reply_to_non_sender_rtt_mesaurments_on_all_ssrcs && !sender_ssrcs.empty()) { ssrcs = sender_ssrcs; diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_impl.h b/modules/rtp_rtcp/source/rtcp_transceiver_impl.h index c73b292e86..adc9cc5283 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver_impl.h +++ b/modules/rtp_rtcp/source/rtcp_transceiver_impl.h @@ -11,14 +11,19 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_TRANSCEIVER_IMPL_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_TRANSCEIVER_IMPL_H_ +#include +#include +#include #include #include -#include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "modules/rtp_rtcp/include/report_block_data.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" #include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" #include "modules/rtp_rtcp/source/rtcp_packet/remb.h" @@ -27,7 +32,6 @@ #include "modules/rtp_rtcp/source/rtcp_transceiver_config.h" #include "rtc_base/containers/flat_map.h" #include "rtc_base/task_utils/repeating_task.h" -#include "system_wrappers/include/ntp_time.h" namespace webrtc { // @@ -55,7 +59,7 @@ class RtcpTransceiverImpl { void SetReadyToSend(bool ready); - void ReceivePacket(rtc::ArrayView packet, Timestamp now); + void ReceivePacket(ArrayView packet, Timestamp now); void SendCompoundPacket(); @@ -67,8 +71,7 @@ class RtcpTransceiverImpl { void SendPictureLossIndication(uint32_t ssrc); // If new_request is true then requested sequence no. will increase for each // requested ssrc. - void SendFullIntraRequest(rtc::ArrayView ssrcs, - bool new_request); + void SendFullIntraRequest(ArrayView ssrcs, bool new_request); // SendCombinedRtcpPacket ignores rtcp mode and does not send a compound // message. https://tools.ietf.org/html/rfc4585#section-3.1 @@ -98,11 +101,10 @@ class RtcpTransceiverImpl { void HandleReceiverReport(const rtcp::CommonHeader& rtcp_packet_header, Timestamp now, std::vector& report_blocks); - void HandleReportBlocks( - uint32_t sender_ssrc, - Timestamp now, - rtc::ArrayView rtcp_report_blocks, - std::vector& report_blocks); + void HandleReportBlocks(uint32_t sender_ssrc, + Timestamp now, + ArrayView rtcp_report_blocks, + std::vector& report_blocks); void HandlePayloadSpecificFeedback( const rtcp::CommonHeader& rtcp_packet_header, Timestamp now); @@ -114,6 +116,9 @@ class RtcpTransceiverImpl { void HandleNack(const rtcp::CommonHeader& rtcp_packet_header); void HandleTransportFeedback(const rtcp::CommonHeader& rtcp_packet_header, Timestamp now); + void HandleCongestionControlFeedback( + const rtcp::CommonHeader& rtcp_packet_header, + Timestamp now); void HandleExtendedReports(const rtcp::CommonHeader& rtcp_packet_header, Timestamp now); // Extended Reports blocks handlers. @@ -121,7 +126,7 @@ class RtcpTransceiverImpl { void HandleTargetBitrate(const rtcp::TargetBitrate& target_bitrate, uint32_t remote_ssrc); void ProcessReportBlocks(Timestamp now, - rtc::ArrayView report_blocks); + ArrayView report_blocks); void ReschedulePeriodicCompoundPackets(); void SchedulePeriodicCompoundPackets(TimeDelta delay); @@ -151,10 +156,10 @@ class RtcpTransceiverImpl { size_t num_max_blocks); const RtcpTransceiverConfig config_; - std::function)> rtcp_transport_; + std::function)> rtcp_transport_; bool ready_to_send_; - absl::optional remb_; + std::optional remb_; // TODO(bugs.webrtc.org/8239): Remove entries from remote_senders_ that are no // longer needed. flat_map remote_senders_; diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc b/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc index e3f205dc1d..3a10304b4b 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_transceiver_impl_unittest.cc @@ -10,27 +10,48 @@ #include "modules/rtp_rtcp/source/rtcp_transceiver_impl.h" +#include +#include +#include #include +#include #include #include -#include "absl/memory/memory.h" +#include "api/array_view.h" #include "api/rtp_headers.h" +#include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" #include "api/test/create_time_controller.h" #include "api/test/time_controller.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_constants.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/mocks/mock_network_link_rtcp_observer.h" -#include "modules/rtp_rtcp/mocks/mock_rtcp_rtt_stats.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" #include "modules/rtp_rtcp/source/rtcp_packet/app.h" #include "modules/rtp_rtcp/source/rtcp_packet/bye.h" #include "modules/rtp_rtcp/source/rtcp_packet/compound_packet.h" -#include "modules/rtp_rtcp/source/time_util.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" +#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" +#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" +#include "modules/rtp_rtcp/source/rtcp_packet/fir.h" +#include "modules/rtp_rtcp/source/rtcp_packet/nack.h" +#include "modules/rtp_rtcp/source/rtcp_packet/pli.h" +#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/remb.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" +#include "modules/rtp_rtcp/source/rtcp_packet/rrtr.h" +#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.h" +#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" +#include "modules/rtp_rtcp/source/rtcp_transceiver_config.h" #include "system_wrappers/include/clock.h" +#include "system_wrappers/include/ntp_time.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" @@ -87,7 +108,7 @@ class MockRtpStreamRtcpHandler : public RtpStreamRtcpHandler { MOCK_METHOD(RtpStats, SentStats, (), (override)); MOCK_METHOD(void, OnNack, - (uint32_t, rtc::ArrayView), + (uint32_t, ArrayView), (override)); MOCK_METHOD(void, OnFir, (uint32_t), (override)); MOCK_METHOD(void, OnPli, (uint32_t), (override)); @@ -110,8 +131,8 @@ class FakeRtcpTransport { public: explicit FakeRtcpTransport(TimeController& time) : time_(time) {} - std::function)> AsStdFunction() { - return [this](rtc::ArrayView) { sent_rtcp_ = true; }; + std::function)> AsStdFunction() { + return [this](ArrayView) { sent_rtcp_ = true; }; } // Returns true when packet was received by the transport. @@ -127,9 +148,9 @@ class FakeRtcpTransport { bool sent_rtcp_ = false; }; -std::function)> RtcpParserTransport( +std::function)> RtcpParserTransport( RtcpPacketParser& parser) { - return [&parser](rtc::ArrayView packet) { + return [&parser](ArrayView packet) { return parser.Parse(packet); }; } @@ -221,7 +242,7 @@ TEST_F(RtcpTransceiverImplTest, DelaysSendingFirstCompondPacket) { config.rtcp_transport = transport.AsStdFunction(); config.initial_report_delay = TimeDelta::Millis(10); config.task_queue = queue.get(); - absl::optional rtcp_transceiver; + std::optional rtcp_transceiver; Timestamp started = CurrentTime(); queue->PostTask([&] { rtcp_transceiver.emplace(config); }); @@ -248,7 +269,7 @@ TEST_F(RtcpTransceiverImplTest, PeriodicallySendsPackets) { config.initial_report_delay = TimeDelta::Zero(); config.report_period = kReportPeriod; config.task_queue = queue.get(); - absl::optional rtcp_transceiver; + std::optional rtcp_transceiver; Timestamp time_just_before_1st_packet = Timestamp::MinusInfinity(); queue->PostTask([&] { // Because initial_report_delay_ms is set to 0, time_just_before_the_packet @@ -283,7 +304,7 @@ TEST_F(RtcpTransceiverImplTest, SendCompoundPacketDelaysPeriodicSendPackets) { config.initial_report_delay = TimeDelta::Zero(); config.report_period = kReportPeriod; config.task_queue = queue.get(); - absl::optional rtcp_transceiver; + std::optional rtcp_transceiver; queue->PostTask([&] { rtcp_transceiver.emplace(config); }); // Wait for the first packet. @@ -322,7 +343,7 @@ TEST_F(RtcpTransceiverImplTest, SendCompoundPacketDelaysPeriodicSendPackets) { } TEST_F(RtcpTransceiverImplTest, SendsNoRtcpWhenNetworkStateIsDown) { - MockFunction)> mock_transport; + MockFunction)> mock_transport; RtcpTransceiverConfig config = DefaultTestConfig(); config.initial_ready_to_send = false; config.rtcp_transport = mock_transport.AsStdFunction(); @@ -339,7 +360,7 @@ TEST_F(RtcpTransceiverImplTest, SendsNoRtcpWhenNetworkStateIsDown) { } TEST_F(RtcpTransceiverImplTest, SendsRtcpWhenNetworkStateIsUp) { - MockFunction)> mock_transport; + MockFunction)> mock_transport; RtcpTransceiverConfig config = DefaultTestConfig(); config.initial_ready_to_send = false; config.rtcp_transport = mock_transport.AsStdFunction(); @@ -365,7 +386,7 @@ TEST_F(RtcpTransceiverImplTest, SendsPeriodicRtcpWhenNetworkStateIsUp) { config.initial_ready_to_send = false; config.rtcp_transport = transport.AsStdFunction(); config.task_queue = queue.get(); - absl::optional rtcp_transceiver; + std::optional rtcp_transceiver; rtcp_transceiver.emplace(config); queue->PostTask([&] { rtcp_transceiver->SetReadyToSend(true); }); @@ -405,7 +426,7 @@ TEST_F(RtcpTransceiverImplTest, SendsMinimalCompoundPacket) { } TEST_F(RtcpTransceiverImplTest, AvoidsEmptyPacketsInReducedMode) { - MockFunction)> transport; + MockFunction)> transport; EXPECT_CALL(transport, Call).Times(0); NiceMock receive_statistics; @@ -1403,6 +1424,31 @@ TEST_F(RtcpTransceiverImplTest, ParsesTransportFeedback) { rtcp_transceiver.ReceivePacket(tb.Build(), receive_time); } +TEST_F(RtcpTransceiverImplTest, ParsesCongestionControlFeedback) { + MockNetworkLinkRtcpObserver link_observer; + RtcpTransceiverConfig config = DefaultTestConfig(); + config.network_link_observer = &link_observer; + const uint32_t receive_time_ntp = 5678; + Timestamp receive_time = Timestamp::Seconds(9843); + RtcpTransceiverImpl rtcp_transceiver(config); + + EXPECT_CALL(link_observer, OnCongestionControlFeedback(receive_time, _)) + .WillOnce(WithArg<1>([](const rtcp::CongestionControlFeedback& message) { + EXPECT_EQ(message.report_timestamp_compact_ntp(), 5678u); + EXPECT_THAT(message.packets(), SizeIs(2)); + })); + + std::vector packets = { + {.ssrc = 1, + .sequence_number = 321, + .arrival_time_offset = TimeDelta::Millis(15)}, + {.ssrc = 1, + .sequence_number = 322, + .arrival_time_offset = TimeDelta::Millis(17)}}; + rtcp::CongestionControlFeedback ccfb(std::move(packets), receive_time_ntp); + rtcp_transceiver.ReceivePacket(ccfb.Build(), receive_time); +} + TEST_F(RtcpTransceiverImplTest, ParsesRemb) { MockNetworkLinkRtcpObserver link_observer; RtcpTransceiverConfig config = DefaultTestConfig(); @@ -1586,10 +1632,10 @@ TEST_F(RtcpTransceiverImplTest, RotatesSendersWhenAllSenderReportDoNotFit) { rtcp_receiver.AddMediaReceiverRtcpObserver(kSenderSsrc[i], &receiver[i]); } - MockFunction)> transport; + MockFunction)> transport; EXPECT_CALL(transport, Call) .Times(kNumSenders) - .WillRepeatedly([&](rtc::ArrayView packet) { + .WillRepeatedly([&](ArrayView packet) { rtcp_receiver.ReceivePacket(packet, CurrentTime()); return true; }); diff --git a/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc b/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc index 40930a0495..6219a7b796 100644 --- a/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc +++ b/modules/rtp_rtcp/source/rtcp_transceiver_unittest.cc @@ -10,17 +10,25 @@ #include "modules/rtp_rtcp/source/rtcp_transceiver.h" +#include #include #include +#include +#include "api/array_view.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/remote_estimate.h" #include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" #include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" +#include "modules/rtp_rtcp/source/rtcp_transceiver_config.h" +#include "rtc_base/buffer.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/event.h" #include "rtc_base/task_queue_for_test.h" #include "system_wrappers/include/clock.h" +#include "system_wrappers/include/ntp_time.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/mock_transport.h" @@ -57,14 +65,14 @@ class MockMediaReceiverRtcpObserver : public webrtc::MediaReceiverRtcpObserver { constexpr webrtc::TimeDelta kTimeout = webrtc::TimeDelta::Seconds(1); void WaitPostedTasks(TaskQueueForTest* queue) { - rtc::Event done; + webrtc::Event done; queue->PostTask([&done] { done.Set(); }); ASSERT_TRUE(done.Wait(kTimeout)); } TEST(RtcpTransceiverTest, SendsRtcpOnTaskQueueWhenCreatedOffTaskQueue) { SimulatedClock clock(0); - MockFunction)> outgoing_transport; + MockFunction)> outgoing_transport; TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.clock = &clock; @@ -82,7 +90,7 @@ TEST(RtcpTransceiverTest, SendsRtcpOnTaskQueueWhenCreatedOffTaskQueue) { TEST(RtcpTransceiverTest, SendsRtcpOnTaskQueueWhenCreatedOnTaskQueue) { SimulatedClock clock(0); - MockFunction)> outgoing_transport; + MockFunction)> outgoing_transport; TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.clock = &clock; @@ -103,7 +111,7 @@ TEST(RtcpTransceiverTest, SendsRtcpOnTaskQueueWhenCreatedOnTaskQueue) { TEST(RtcpTransceiverTest, CanBeDestroyedOnTaskQueue) { SimulatedClock clock(0); - MockFunction)> outgoing_transport; + MockFunction)> outgoing_transport; TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.clock = &clock; @@ -128,8 +136,8 @@ TEST(RtcpTransceiverTest, CanBeDestroyedWithoutBlocking) { auto* rtcp_transceiver = new RtcpTransceiver(config); rtcp_transceiver->SendCompoundPacket(); - rtc::Event done; - rtc::Event heavy_task; + webrtc::Event done; + webrtc::Event heavy_task; queue.PostTask([&] { EXPECT_TRUE(heavy_task.Wait(kTimeout)); done.Set(); @@ -143,7 +151,7 @@ TEST(RtcpTransceiverTest, CanBeDestroyedWithoutBlocking) { TEST(RtcpTransceiverTest, MaySendPacketsAfterDestructor) { // i.e. Be careful! SimulatedClock clock(0); // Must outlive queue below. - NiceMock)>> transport; + NiceMock)>> transport; TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.clock = &clock; @@ -151,7 +159,7 @@ TEST(RtcpTransceiverTest, MaySendPacketsAfterDestructor) { // i.e. Be careful! config.task_queue = queue.Get(); auto* rtcp_transceiver = new RtcpTransceiver(config); - rtc::Event heavy_task; + webrtc::Event heavy_task; queue.PostTask([&] { EXPECT_TRUE(heavy_task.Wait(kTimeout)); }); rtcp_transceiver->SendCompoundPacket(); delete rtcp_transceiver; @@ -163,14 +171,14 @@ TEST(RtcpTransceiverTest, MaySendPacketsAfterDestructor) { // i.e. Be careful! } // Use rtp timestamp to distinguish different incoming sender reports. -rtc::CopyOnWriteBuffer CreateSenderReport(uint32_t ssrc, uint32_t rtp_time) { +webrtc::CopyOnWriteBuffer CreateSenderReport(uint32_t ssrc, uint32_t rtp_time) { webrtc::rtcp::SenderReport sr; sr.SetSenderSsrc(ssrc); sr.SetRtpTimestamp(rtp_time); - rtc::Buffer buffer = sr.Build(); + webrtc::Buffer buffer = sr.Build(); // Switch to an efficient way creating CopyOnWriteBuffer from RtcpPacket when // there is one. Until then do not worry about extra memcpy in test. - return rtc::CopyOnWriteBuffer(buffer.data(), buffer.size()); + return webrtc::CopyOnWriteBuffer(buffer.data(), buffer.size()); } TEST(RtcpTransceiverTest, DoesntPostToRtcpObserverAfterCallToRemove) { @@ -181,7 +189,7 @@ TEST(RtcpTransceiverTest, DoesntPostToRtcpObserverAfterCallToRemove) { config.clock = &clock; config.task_queue = queue.Get(); RtcpTransceiver rtcp_transceiver(config); - rtc::Event observer_deleted; + webrtc::Event observer_deleted; auto observer = std::make_unique(); EXPECT_CALL(*observer, OnSenderReport(kRemoteSsrc, _, 1)); @@ -211,8 +219,8 @@ TEST(RtcpTransceiverTest, RemoveMediaReceiverRtcpObserverIsNonBlocking) { auto observer = std::make_unique(); rtcp_transceiver.AddMediaReceiverRtcpObserver(kRemoteSsrc, observer.get()); - rtc::Event queue_blocker; - rtc::Event observer_deleted; + webrtc::Event queue_blocker; + webrtc::Event observer_deleted; queue.PostTask([&] { EXPECT_TRUE(queue_blocker.Wait(kTimeout)); }); rtcp_transceiver.RemoveMediaReceiverRtcpObserver(kRemoteSsrc, observer.get(), /*on_removed=*/[&] { @@ -227,7 +235,7 @@ TEST(RtcpTransceiverTest, RemoveMediaReceiverRtcpObserverIsNonBlocking) { TEST(RtcpTransceiverTest, CanCallSendCompoundPacketFromAnyThread) { SimulatedClock clock(0); - MockFunction)> outgoing_transport; + MockFunction)> outgoing_transport; TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.clock = &clock; @@ -258,7 +266,7 @@ TEST(RtcpTransceiverTest, CanCallSendCompoundPacketFromAnyThread) { TEST(RtcpTransceiverTest, DoesntSendPacketsAfterStopCallback) { SimulatedClock clock(0); - NiceMock)>> + NiceMock)>> outgoing_transport; TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; @@ -268,7 +276,7 @@ TEST(RtcpTransceiverTest, DoesntSendPacketsAfterStopCallback) { config.schedule_periodic_compound_packets = true; auto rtcp_transceiver = std::make_unique(config); - rtc::Event done; + webrtc::Event done; rtcp_transceiver->SendCompoundPacket(); rtcp_transceiver->Stop([&] { EXPECT_CALL(outgoing_transport, Call).Times(0); @@ -282,7 +290,7 @@ TEST(RtcpTransceiverTest, SendsCombinedRtcpPacketOnTaskQueue) { static constexpr uint32_t kSenderSsrc = 12345; SimulatedClock clock(0); - MockFunction)> outgoing_transport; + MockFunction)> outgoing_transport; TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.clock = &clock; @@ -293,7 +301,7 @@ TEST(RtcpTransceiverTest, SendsCombinedRtcpPacketOnTaskQueue) { RtcpTransceiver rtcp_transceiver(config); EXPECT_CALL(outgoing_transport, Call) - .WillOnce([&](rtc::ArrayView buffer) { + .WillOnce([&](webrtc::ArrayView buffer) { EXPECT_TRUE(queue.IsCurrent()); RtcpPacketParser rtcp_parser; rtcp_parser.Parse(buffer); @@ -321,7 +329,7 @@ TEST(RtcpTransceiverTest, SendFrameIntraRequestDefaultsToNewRequest) { static constexpr uint32_t kSenderSsrc = 12345; SimulatedClock clock(0); - MockFunction)> outgoing_transport; + MockFunction)> outgoing_transport; TaskQueueForTest queue("rtcp"); RtcpTransceiverConfig config; config.clock = &clock; @@ -333,7 +341,7 @@ TEST(RtcpTransceiverTest, SendFrameIntraRequestDefaultsToNewRequest) { uint8_t first_seq_nr; EXPECT_CALL(outgoing_transport, Call) - .WillOnce([&](rtc::ArrayView buffer) { + .WillOnce([&](webrtc::ArrayView buffer) { EXPECT_TRUE(queue.IsCurrent()); RtcpPacketParser rtcp_parser; rtcp_parser.Parse(buffer); @@ -341,7 +349,7 @@ TEST(RtcpTransceiverTest, SendFrameIntraRequestDefaultsToNewRequest) { first_seq_nr = rtcp_parser.fir()->requests()[0].seq_nr; return true; }) - .WillOnce([&](rtc::ArrayView buffer) { + .WillOnce([&](webrtc::ArrayView buffer) { EXPECT_TRUE(queue.IsCurrent()); RtcpPacketParser rtcp_parser; rtcp_parser.Parse(buffer); diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc index fd42b798d4..7a8b25d881 100644 --- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc +++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc @@ -11,22 +11,19 @@ #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include +#include #include #include "api/array_view.h" #include "api/transport/rtp/dependency_descriptor.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h" #include "rtc_base/numerics/divide_round.h" namespace webrtc { -constexpr RTPExtensionType RtpDependencyDescriptorExtension::kId; -constexpr std::bitset<32> RtpDependencyDescriptorExtension::kAllChainsAreActive; - bool RtpDependencyDescriptorExtension::Parse( - rtc::ArrayView data, + ArrayView data, const FrameDependencyStructure* structure, DependencyDescriptor* descriptor) { RtpDependencyDescriptorReader reader(data, structure, descriptor); @@ -43,7 +40,7 @@ size_t RtpDependencyDescriptorExtension::ValueSize( } bool RtpDependencyDescriptorExtension::Write( - rtc::ArrayView data, + ArrayView data, const FrameDependencyStructure& structure, std::bitset<32> active_chains, const DependencyDescriptor& descriptor) { @@ -52,4 +49,17 @@ bool RtpDependencyDescriptorExtension::Write( return writer.Write(); } +bool RtpDependencyDescriptorExtension::Parse( + ArrayView data, + DependencyDescriptorMandatory* descriptor) { + if (data.size() < 3) { + return false; + } + descriptor->set_first_packet_in_frame(data[0] & 0b1000'0000); + descriptor->set_last_packet_in_frame(data[0] & 0b0100'0000); + descriptor->set_template_id(data[0] & 0b0011'1111); + descriptor->set_frame_number((uint16_t{data[1]} << 8) | data[2]); + return true; +} + } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h index 8d6e4b8d37..97d421eb4d 100644 --- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h +++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h @@ -11,6 +11,7 @@ #define MODULES_RTP_RTCP_SOURCE_RTP_DEPENDENCY_DESCRIPTOR_EXTENSION_H_ #include +#include #include #include "absl/strings/string_view.h" @@ -29,10 +30,15 @@ class RtpDependencyDescriptorExtension { return RtpExtension::kDependencyDescriptorUri; } - static bool Parse(rtc::ArrayView data, + static bool Parse(ArrayView data, const FrameDependencyStructure* structure, DependencyDescriptor* descriptor); + // Reads the mandatory part of the descriptor. + // Such read is stateless, i.e., doesn't require `FrameDependencyStructure`. + static bool Parse(ArrayView data, + DependencyDescriptorMandatory* descriptor); + static size_t ValueSize(const FrameDependencyStructure& structure, const DependencyDescriptor& descriptor) { return ValueSize(structure, kAllChainsAreActive, descriptor); @@ -40,12 +46,12 @@ class RtpDependencyDescriptorExtension { static size_t ValueSize(const FrameDependencyStructure& structure, std::bitset<32> active_chains, const DependencyDescriptor& descriptor); - static bool Write(rtc::ArrayView data, + static bool Write(ArrayView data, const FrameDependencyStructure& structure, const DependencyDescriptor& descriptor) { return Write(data, structure, kAllChainsAreActive, descriptor); } - static bool Write(rtc::ArrayView data, + static bool Write(ArrayView data, const FrameDependencyStructure& structure, std::bitset<32> active_chains, const DependencyDescriptor& descriptor); diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc index 148e4f973b..ec64e819af 100644 --- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_extension_unittest.cc @@ -10,10 +10,15 @@ #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" +#include +#include +#include +#include + #include "api/array_view.h" #include "api/transport/rtp/dependency_descriptor.h" -#include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "test/gmock.h" +#include "test/gtest.h" namespace webrtc { namespace { @@ -59,7 +64,7 @@ TEST(RtpDependencyDescriptorExtensionTest, WriteZeroInUnusedBits) { const uint8_t* unused_bytes = buffer + value_size; size_t num_unused_bytes = buffer + sizeof(buffer) - unused_bytes; // Check remaining bytes are zeroed. - EXPECT_THAT(rtc::MakeArrayView(unused_bytes, num_unused_bytes), Each(0)); + EXPECT_THAT(MakeArrayView(unused_bytes, num_unused_bytes), Each(0)); } // In practice chain diff for inactive chain will grow uboundly because no @@ -132,5 +137,48 @@ TEST(RtpDependencyDescriptorExtensionTest, FailsToWriteInvalidDescriptor) { descriptor)); } +TEST(RtpDependencyDescriptorExtensionTest, + FailsToWriteWhenNumberOfChainsMismatch) { + uint8_t buffer[256]; + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 2; + structure.templates = { + FrameDependencyTemplate().T(0).Dtis("SR").ChainDiffs({2, 2})}; + DependencyDescriptor descriptor; + descriptor.frame_dependencies = structure.templates[0]; + + // Structure has 2 chains, but frame provide 1 chain diff, + descriptor.frame_dependencies.chain_diffs = {2}; + + EXPECT_EQ( + RtpDependencyDescriptorExtension::ValueSize(structure, 0b11, descriptor), + 0u); + EXPECT_FALSE(RtpDependencyDescriptorExtension::Write(buffer, structure, 0b11, + descriptor)); +} + +TEST(RtpDependencyDescriptorExtensionTest, + FailsToWriteWhenNumberOfDecodeTargetsMismatch) { + uint8_t buffer[256]; + FrameDependencyStructure structure; + structure.num_decode_targets = 2; + structure.num_chains = 2; + structure.templates = { + FrameDependencyTemplate().T(0).Dtis("SR").ChainDiffs({2, 2})}; + DependencyDescriptor descriptor; + descriptor.frame_dependencies = structure.templates[0]; + + // Structure has 2 decode targets, but frame provide 1 indication, + descriptor.frame_dependencies.decode_target_indications = { + DecodeTargetIndication::kSwitch}; + + EXPECT_EQ( + RtpDependencyDescriptorExtension::ValueSize(structure, 0b11, descriptor), + 0u); + EXPECT_FALSE(RtpDependencyDescriptorExtension::Write(buffer, structure, 0b11, + descriptor)); +} + } // namespace } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc b/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc index 1a56efd9b3..2d1f06df15 100644 --- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc +++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc @@ -9,10 +9,14 @@ */ #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.h" +#include +#include #include +#include #include #include +#include "api/array_view.h" #include "api/transport/rtp/dependency_descriptor.h" #include "rtc_base/bitstream_reader.h" #include "rtc_base/checks.h" @@ -20,7 +24,7 @@ namespace webrtc { RtpDependencyDescriptorReader::RtpDependencyDescriptorReader( - rtc::ArrayView raw_data, + ArrayView raw_data, const FrameDependencyStructure* structure, DependencyDescriptor* descriptor) : descriptor_(descriptor), buffer_(raw_data) { @@ -199,7 +203,7 @@ void RtpDependencyDescriptorReader::ReadFrameDependencyDefinition() { ReadFrameChains(); if (structure_->resolutions.empty()) { - descriptor_->resolution = absl::nullopt; + descriptor_->resolution = std::nullopt; } else { // Format guarantees that if there were resolutions in the last structure, // then each spatial layer got one. diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.h b/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.h index f79d3d1d07..a350428db0 100644 --- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.h +++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.h @@ -11,8 +11,6 @@ #define MODULES_RTP_RTCP_SOURCE_RTP_DEPENDENCY_DESCRIPTOR_READER_H_ #include -#include -#include #include "api/array_view.h" #include "api/transport/rtp/dependency_descriptor.h" @@ -23,7 +21,7 @@ namespace webrtc { class RtpDependencyDescriptorReader { public: // Parses the dependency descriptor. - RtpDependencyDescriptorReader(rtc::ArrayView raw_data, + RtpDependencyDescriptorReader(ArrayView raw_data, const FrameDependencyStructure* structure, DependencyDescriptor* descriptor); RtpDependencyDescriptorReader(const RtpDependencyDescriptorReader&) = delete; diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc b/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc index 31df783064..dd27525bea 100644 --- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc +++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc @@ -9,6 +9,7 @@ */ #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h" +#include #include #include #include @@ -18,8 +19,10 @@ #include "absl/algorithm/container.h" #include "api/array_view.h" #include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/render_resolution.h" #include "rtc_base/bit_buffer.h" #include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_compare.h" namespace webrtc { namespace { @@ -54,7 +57,7 @@ NextLayerIdc GetNextLayerIdc(const FrameDependencyTemplate& previous, } // namespace RtpDependencyDescriptorWriter::RtpDependencyDescriptorWriter( - rtc::ArrayView data, + ArrayView data, const FrameDependencyStructure& structure, std::bitset<32> active_chains, const DependencyDescriptor& descriptor) @@ -62,6 +65,16 @@ RtpDependencyDescriptorWriter::RtpDependencyDescriptorWriter( structure_(structure), active_chains_(active_chains), bit_writer_(data.data(), data.size()) { + if (SafeNe(descriptor.frame_dependencies.chain_diffs.size(), + structure_.num_chains)) { + build_failed_ = true; + return; + } + if (SafeNe(descriptor.frame_dependencies.decode_target_indications.size(), + structure_.num_decode_targets)) { + build_failed_ = true; + return; + } FindBestTemplate(); } @@ -113,12 +126,12 @@ int RtpDependencyDescriptorWriter::StructureSizeBits() const { for (const FrameDependencyTemplate& frame_template : structure_.templates) { bits += 5 * frame_template.frame_diffs.size(); } - bits += rtc::BitBufferWriter::SizeNonSymmetricBits( + bits += BitBufferWriter::SizeNonSymmetricBits( structure_.num_chains, structure_.num_decode_targets + 1); if (structure_.num_chains > 0) { for (int protected_by : structure_.decode_target_protected_by_chain) { - bits += rtc::BitBufferWriter::SizeNonSymmetricBits(protected_by, - structure_.num_chains); + bits += BitBufferWriter::SizeNonSymmetricBits(protected_by, + structure_.num_chains); } bits += 4 * structure_.templates.size() * structure_.num_chains; } diff --git a/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h b/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h index 568e0a8aab..4a6b362ade 100644 --- a/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h +++ b/modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.h @@ -24,7 +24,7 @@ class RtpDependencyDescriptorWriter { public: // Assumes `structure` and `descriptor` are valid and // `descriptor` matches the `structure`. - RtpDependencyDescriptorWriter(rtc::ArrayView data, + RtpDependencyDescriptorWriter(ArrayView data, const FrameDependencyStructure& structure, std::bitset<32> active_chains, const DependencyDescriptor& descriptor); @@ -80,7 +80,7 @@ class RtpDependencyDescriptorWriter { const DependencyDescriptor& descriptor_; const FrameDependencyStructure& structure_; std::bitset<32> active_chains_; - rtc::BitBufferWriter bit_writer_; + BitBufferWriter bit_writer_; TemplateMatch best_template_; }; diff --git a/modules/rtp_rtcp/source/rtp_fec_unittest.cc b/modules/rtp_rtcp/source/rtp_fec_unittest.cc index 7e5aef7634..8548dd5ba1 100644 --- a/modules/rtp_rtcp/source/rtp_fec_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_fec_unittest.cc @@ -8,15 +8,24 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include #include #include +#include +#include #include "absl/algorithm/container.h" +#include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/fec_test_helper.h" #include "modules/rtp_rtcp/source/flexfec_03_header_reader_writer.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" +#include "modules/rtp_rtcp/source/forward_error_correction_internal.h" #include "modules/rtp_rtcp/source/ulpfec_header_reader_writer.h" +#include "rtc_base/checks.h" #include "rtc_base/random.h" #include "test/gtest.h" @@ -173,7 +182,7 @@ bool RtpFecTest::IsRecoveryComplete() { class FlexfecForwardErrorCorrection : public ForwardErrorCorrection { public: - static const uint32_t kFecSsrc = kFlexfecSsrc; + static constexpr uint32_t kFecSsrc = kFlexfecSsrc; FlexfecForwardErrorCorrection() : ForwardErrorCorrection( @@ -184,7 +193,7 @@ class FlexfecForwardErrorCorrection : public ForwardErrorCorrection { // For FlexFEC we let the FEC packet sequence numbers be independent of // the media packet sequence numbers. - static uint16_t GetFirstFecSeqNum(uint16_t next_media_seq_num) { + static uint16_t GetFirstFecSeqNum(uint16_t /* next_media_seq_num */) { Random random(0xbe110); return random.Rand(); } @@ -192,7 +201,7 @@ class FlexfecForwardErrorCorrection : public ForwardErrorCorrection { class UlpfecForwardErrorCorrection : public ForwardErrorCorrection { public: - static const uint32_t kFecSsrc = kMediaSsrc; + static constexpr uint32_t kFecSsrc = kMediaSsrc; UlpfecForwardErrorCorrection() : ForwardErrorCorrection( diff --git a/modules/rtp_rtcp/source/rtp_format.cc b/modules/rtp_rtcp/source/rtp_format.cc index 7550b70f69..010ac6eec8 100644 --- a/modules/rtp_rtcp/source/rtp_format.cc +++ b/modules/rtp_rtcp/source/rtp_format.cc @@ -10,9 +10,13 @@ #include "modules/rtp_rtcp/source/rtp_format.h" +#include #include +#include +#include -#include "absl/types/variant.h" +#include "api/array_view.h" +#include "api/video/video_codec_type.h" #include "modules/rtp_rtcp/source/rtp_format_h264.h" #include "modules/rtp_rtcp/source/rtp_format_video_generic.h" #include "modules/rtp_rtcp/source/rtp_format_vp8.h" @@ -22,12 +26,15 @@ #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "rtc_base/checks.h" +#ifdef RTC_ENABLE_H265 +#include "modules/rtp_rtcp/source/rtp_packetizer_h265.h" +#endif namespace webrtc { std::unique_ptr RtpPacketizer::Create( - absl::optional type, - rtc::ArrayView payload, + std::optional type, + ArrayView payload, PayloadSizeLimits limits, // Codec-specific details. const RTPVideoHeader& rtp_video_header) { @@ -39,24 +46,29 @@ std::unique_ptr RtpPacketizer::Create( switch (*type) { case kVideoCodecH264: { const auto& h264 = - absl::get(rtp_video_header.video_type_header); + std::get(rtp_video_header.video_type_header); return std::make_unique(payload, limits, h264.packetization_mode); } case kVideoCodecVP8: { const auto& vp8 = - absl::get(rtp_video_header.video_type_header); + std::get(rtp_video_header.video_type_header); return std::make_unique(payload, limits, vp8); } case kVideoCodecVP9: { const auto& vp9 = - absl::get(rtp_video_header.video_type_header); + std::get(rtp_video_header.video_type_header); return std::make_unique(payload, limits, vp9); } case kVideoCodecAV1: return std::make_unique( payload, limits, rtp_video_header.frame_type, rtp_video_header.is_last_frame_in_picture); +#ifdef RTC_ENABLE_H265 + case kVideoCodecH265: { + return std::make_unique(payload, limits); + } +#endif default: { return std::make_unique(payload, limits, rtp_video_header); diff --git a/modules/rtp_rtcp/source/rtp_format.h b/modules/rtp_rtcp/source/rtp_format.h index 19abd3feb2..05be298b70 100644 --- a/modules/rtp_rtcp/source/rtp_format.h +++ b/modules/rtp_rtcp/source/rtp_format.h @@ -13,11 +13,13 @@ #include +#include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/video/video_codec_type.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" namespace webrtc { @@ -36,8 +38,8 @@ class RtpPacketizer { // If type is not set, returns a raw packetizer. static std::unique_ptr Create( - absl::optional type, - rtc::ArrayView payload, + std::optional type, + ArrayView payload, PayloadSizeLimits limits, // Codec-specific details. const RTPVideoHeader& rtp_video_header); diff --git a/modules/rtp_rtcp/source/rtp_format_h264.cc b/modules/rtp_rtcp/source/rtp_format_h264.cc index cc8d1bff34..fd501a2f28 100644 --- a/modules/rtp_rtcp/source/rtp_format_h264.cc +++ b/modules/rtp_rtcp/source/rtp_format_h264.cc @@ -14,32 +14,27 @@ #include #include -#include -#include -#include #include -#include "absl/types/optional.h" -#include "absl/types/variant.h" +#include "absl/algorithm/container.h" +#include "api/array_view.h" #include "common_video/h264/h264_common.h" -#include "common_video/h264/pps_parser.h" -#include "common_video/h264/sps_parser.h" -#include "common_video/h264/sps_vui_rewriter.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { namespace { -static const size_t kNalHeaderSize = 1; -static const size_t kFuAHeaderSize = 2; -static const size_t kLengthFieldSize = 2; +constexpr size_t kNalHeaderSize = 1; +constexpr size_t kFuAHeaderSize = 2; +constexpr size_t kLengthFieldSize = 2; } // namespace -RtpPacketizerH264::RtpPacketizerH264(rtc::ArrayView payload, +RtpPacketizerH264::RtpPacketizerH264(ArrayView payload, PayloadSizeLimits limits, H264PacketizationMode packetization_mode) : limits_(limits), num_packets_left_(0) { @@ -47,16 +42,17 @@ RtpPacketizerH264::RtpPacketizerH264(rtc::ArrayView payload, RTC_CHECK(packetization_mode == H264PacketizationMode::NonInterleaved || packetization_mode == H264PacketizationMode::SingleNalUnit); - for (const auto& nalu : - H264::FindNaluIndices(payload.data(), payload.size())) { + for (const auto& nalu : H264::FindNaluIndices(payload)) { input_fragments_.push_back( payload.subview(nalu.payload_start_offset, nalu.payload_size)); } - - if (!GeneratePackets(packetization_mode)) { - // If failed to generate all the packets, discard already generated - // packets in case the caller would ignore return value and still try to - // call NextPacket(). + bool has_empty_fragments = absl::c_any_of( + input_fragments_, + [](const ArrayView fragment) { return fragment.empty(); }); + if (has_empty_fragments || !GeneratePackets(packetization_mode)) { + // If empty fragments were found or we failed to generate all the packets, + // discard already generated packets in case the caller would ignore the + // return value and still try to call NextPacket(). num_packets_left_ = 0; while (!packets_.empty()) { packets_.pop(); @@ -73,6 +69,7 @@ size_t RtpPacketizerH264::NumPackets() const { bool RtpPacketizerH264::GeneratePackets( H264PacketizationMode packetization_mode) { for (size_t i = 0; i < input_fragments_.size();) { + RTC_DCHECK(!input_fragments_[i].empty()); switch (packetization_mode) { case H264PacketizationMode::SingleNalUnit: if (!PacketizeSingleNalu(i)) @@ -104,7 +101,7 @@ bool RtpPacketizerH264::GeneratePackets( bool RtpPacketizerH264::PacketizeFuA(size_t fragment_index) { // Fragment payload into packets (FU-A). - rtc::ArrayView fragment = input_fragments_[fragment_index]; + ArrayView fragment = input_fragments_[fragment_index]; PayloadSizeLimits limits = limits_; // Leave room for the FU-A header. @@ -153,34 +150,31 @@ bool RtpPacketizerH264::PacketizeFuA(size_t fragment_index) { size_t RtpPacketizerH264::PacketizeStapA(size_t fragment_index) { // Aggregate fragments into one packet (STAP-A). size_t payload_size_left = limits_.max_payload_len; - if (input_fragments_.size() == 1) - payload_size_left -= limits_.single_packet_reduction_len; - else if (fragment_index == 0) - payload_size_left -= limits_.first_packet_reduction_len; int aggregated_fragments = 0; size_t fragment_headers_length = 0; - rtc::ArrayView fragment = input_fragments_[fragment_index]; + ArrayView fragment = input_fragments_[fragment_index]; RTC_CHECK_GE(payload_size_left, fragment.size()); ++num_packets_left_; + const bool has_first_fragment = fragment_index == 0; auto payload_size_needed = [&] { size_t fragment_size = fragment.size() + fragment_headers_length; - if (input_fragments_.size() == 1) { - // Single fragment, single packet, payload_size_left already adjusted - // with limits_.single_packet_reduction_len. - return fragment_size; - } - if (fragment_index == input_fragments_.size() - 1) { - // Last fragment, so STAP-A might be the last packet. + bool has_last_fragment = fragment_index == input_fragments_.size() - 1; + if (has_first_fragment && has_last_fragment) { + return fragment_size + limits_.single_packet_reduction_len; + } else if (has_first_fragment) { + return fragment_size + limits_.first_packet_reduction_len; + } else if (has_last_fragment) { return fragment_size + limits_.last_packet_reduction_len; + } else { + return fragment_size; } - return fragment_size; }; - while (payload_size_left >= payload_size_needed()) { RTC_CHECK_GT(fragment.size(), 0); - packets_.push(PacketUnit(fragment, aggregated_fragments == 0, false, true, - fragment[0])); + + packets_.push(PacketUnit(fragment, /*first=*/aggregated_fragments == 0, + /*last=*/false, /*aggregated=*/true, fragment[0])); payload_size_left -= fragment.size(); payload_size_left -= fragment_headers_length; @@ -212,7 +206,7 @@ bool RtpPacketizerH264::PacketizeSingleNalu(size_t fragment_index) { payload_size_left -= limits_.first_packet_reduction_len; else if (fragment_index + 1 == input_fragments_.size()) payload_size_left -= limits_.last_packet_reduction_len; - rtc::ArrayView fragment = input_fragments_[fragment_index]; + ArrayView fragment = input_fragments_[fragment_index]; if (payload_size_left < fragment.size()) { RTC_LOG(LS_ERROR) << "Failed to fit a fragment to packet in SingleNalu " "packetization mode. Payload size left " @@ -221,9 +215,9 @@ bool RtpPacketizerH264::PacketizeSingleNalu(size_t fragment_index) { << limits_.max_payload_len; return false; } - RTC_CHECK_GT(fragment.size(), 0u); - packets_.push(PacketUnit(fragment, true /* first */, true /* last */, - false /* aggregated */, fragment[0])); + RTC_CHECK(!fragment.empty()); + packets_.push(PacketUnit(fragment, /*first=*/true, /*last=*/true, + /*aggregated=*/false, fragment[0])); ++num_packets_left_; return true; } @@ -266,7 +260,7 @@ void RtpPacketizerH264::NextAggregatePacket(RtpPacketToSend* rtp_packet) { size_t index = kNalHeaderSize; bool is_last_fragment = packet->last_fragment; while (packet->aggregated) { - rtc::ArrayView fragment = packet->source_fragment; + ArrayView fragment = packet->source_fragment; RTC_CHECK_LE(index + kLengthFieldSize + fragment.size(), payload_capacity); // Add NAL unit length field. ByteWriter::WriteBigEndian(&buffer[index], fragment.size()); @@ -299,7 +293,7 @@ void RtpPacketizerH264::NextFragmentPacket(RtpPacketToSend* rtp_packet) { fu_header |= (packet->last_fragment ? kH264EBit : 0); uint8_t type = packet->header & kH264TypeMask; fu_header |= type; - rtc::ArrayView fragment = packet->source_fragment; + ArrayView fragment = packet->source_fragment; uint8_t* buffer = rtp_packet->AllocatePayload(kFuAHeaderSize + fragment.size()); buffer[0] = fu_indicator; diff --git a/modules/rtp_rtcp/source/rtp_format_h264.h b/modules/rtp_rtcp/source/rtp_format_h264.h index f95c3b6c6b..def175aaf9 100644 --- a/modules/rtp_rtcp/source/rtp_format_h264.h +++ b/modules/rtp_rtcp/source/rtp_format_h264.h @@ -15,14 +15,12 @@ #include #include -#include #include #include "api/array_view.h" #include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" -#include "rtc_base/buffer.h" namespace webrtc { @@ -40,7 +38,7 @@ class RtpPacketizerH264 : public RtpPacketizer { public: // Initialize with payload from encoder. // The payload_data must be exactly one encoded H264 frame. - RtpPacketizerH264(rtc::ArrayView payload, + RtpPacketizerH264(ArrayView payload, PayloadSizeLimits limits, H264PacketizationMode packetization_mode); @@ -64,7 +62,7 @@ class RtpPacketizerH264 : public RtpPacketizer { // packet unit may represent a single NAL unit or a STAP-A packet, of which // there may be multiple in a single RTP packet (if so, aggregated = true). struct PacketUnit { - PacketUnit(rtc::ArrayView source_fragment, + PacketUnit(ArrayView source_fragment, bool first_fragment, bool last_fragment, bool aggregated, @@ -75,7 +73,7 @@ class RtpPacketizerH264 : public RtpPacketizer { aggregated(aggregated), header(header) {} - rtc::ArrayView source_fragment; + ArrayView source_fragment; bool first_fragment; bool last_fragment; bool aggregated; @@ -92,7 +90,7 @@ class RtpPacketizerH264 : public RtpPacketizer { const PayloadSizeLimits limits_; size_t num_packets_left_; - std::deque> input_fragments_; + std::deque> input_fragments_; std::queue packets_; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc b/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc index 80d8801437..b4288b10cc 100644 --- a/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_format_h264_unittest.cc @@ -10,15 +10,19 @@ #include "modules/rtp_rtcp/source/rtp_format_h264.h" -#include +#include +#include +#include +#include #include #include "absl/algorithm/container.h" #include "api/array_view.h" -#include "common_video/h264/h264_common.h" -#include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h" -#include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" #include "test/gmock.h" #include "test/gtest.h" @@ -47,13 +51,13 @@ enum Nalu { kFuA = 28 }; -static const size_t kNalHeaderSize = 1; -static const size_t kFuAHeaderSize = 2; +constexpr size_t kNalHeaderSize = 1; +constexpr size_t kFuAHeaderSize = 2; // Creates Buffer that looks like nal unit of given size. -rtc::Buffer GenerateNalUnit(size_t size) { +Buffer GenerateNalUnit(size_t size) { RTC_CHECK_GT(size, 0); - rtc::Buffer buffer(size); + Buffer buffer(size); // Set some valid header. buffer[0] = kSlice; for (size_t i = 1; i < size; ++i) { @@ -66,10 +70,10 @@ rtc::Buffer GenerateNalUnit(size_t size) { } // Create frame consisting of nalus of given size. -rtc::Buffer CreateFrame(std::initializer_list nalu_sizes) { +Buffer CreateFrame(std::initializer_list nalu_sizes) { static constexpr int kStartCodeSize = 3; - rtc::Buffer frame(absl::c_accumulate(nalu_sizes, size_t{0}) + - kStartCodeSize * nalu_sizes.size()); + Buffer frame(absl::c_accumulate(nalu_sizes, size_t{0}) + + kStartCodeSize * nalu_sizes.size()); size_t offset = 0; for (size_t nalu_size : nalu_sizes) { EXPECT_GE(nalu_size, 1u); @@ -89,15 +93,15 @@ rtc::Buffer CreateFrame(std::initializer_list nalu_sizes) { } // Create frame consisting of given nalus. -rtc::Buffer CreateFrame(rtc::ArrayView nalus) { +Buffer CreateFrame(ArrayView nalus) { static constexpr int kStartCodeSize = 3; int frame_size = 0; - for (const rtc::Buffer& nalu : nalus) { + for (const Buffer& nalu : nalus) { frame_size += (kStartCodeSize + nalu.size()); } - rtc::Buffer frame(frame_size); + Buffer frame(frame_size); size_t offset = 0; - for (const rtc::Buffer& nalu : nalus) { + for (const Buffer& nalu : nalus) { // Insert nalu start code frame[offset] = 0; frame[offset + 1] = 0; @@ -116,6 +120,7 @@ std::vector FetchAllPackets(RtpPacketizerH264* packetizer) { RtpPacketToSend packet(kNoExtensions); while (packetizer->NextPacket(&packet)) { result.push_back(packet); + packet.Clear(); } EXPECT_THAT(result, SizeIs(num_packets)); return result; @@ -139,9 +144,8 @@ TEST_P(RtpPacketizerH264ModeTest, SingleNalu) { TEST_P(RtpPacketizerH264ModeTest, SingleNaluTwoPackets) { RtpPacketizer::PayloadSizeLimits limits; limits.max_payload_len = kMaxPayloadSize; - rtc::Buffer nalus[] = {GenerateNalUnit(kMaxPayloadSize), - GenerateNalUnit(100)}; - rtc::Buffer frame = CreateFrame(nalus); + Buffer nalus[] = {GenerateNalUnit(kMaxPayloadSize), GenerateNalUnit(100)}; + Buffer frame = CreateFrame(nalus); RtpPacketizerH264 packetizer(frame, limits, GetParam()); std::vector packets = FetchAllPackets(&packetizer); @@ -156,10 +160,10 @@ TEST_P(RtpPacketizerH264ModeTest, RtpPacketizer::PayloadSizeLimits limits; limits.max_payload_len = 200; limits.first_packet_reduction_len = 5; - rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/195), - GenerateNalUnit(/*size=*/200), - GenerateNalUnit(/*size=*/200)}; - rtc::Buffer frame = CreateFrame(nalus); + Buffer nalus[] = {GenerateNalUnit(/*size=*/195), + GenerateNalUnit(/*size=*/200), + GenerateNalUnit(/*size=*/200)}; + Buffer frame = CreateFrame(nalus); RtpPacketizerH264 packetizer(frame, limits, GetParam()); std::vector packets = FetchAllPackets(&packetizer); @@ -175,10 +179,10 @@ TEST_P(RtpPacketizerH264ModeTest, RtpPacketizer::PayloadSizeLimits limits; limits.max_payload_len = 200; limits.last_packet_reduction_len = 5; - rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/200), - GenerateNalUnit(/*size=*/200), - GenerateNalUnit(/*size=*/195)}; - rtc::Buffer frame = CreateFrame(nalus); + Buffer nalus[] = {GenerateNalUnit(/*size=*/200), + GenerateNalUnit(/*size=*/200), + GenerateNalUnit(/*size=*/195)}; + Buffer frame = CreateFrame(nalus); RtpPacketizerH264 packetizer(frame, limits, GetParam()); std::vector packets = FetchAllPackets(&packetizer); @@ -195,7 +199,7 @@ TEST_P(RtpPacketizerH264ModeTest, limits.max_payload_len = 200; limits.first_packet_reduction_len = 20; limits.last_packet_reduction_len = 30; - rtc::Buffer frame = CreateFrame({150}); + Buffer frame = CreateFrame({150}); RtpPacketizerH264 packetizer(frame, limits, GetParam()); std::vector packets = FetchAllPackets(&packetizer); @@ -211,10 +215,9 @@ INSTANTIATE_TEST_SUITE_P( // Aggregation tests. TEST(RtpPacketizerH264Test, StapA) { - rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/2), - GenerateNalUnit(/*size=*/2), - GenerateNalUnit(/*size=*/0x123)}; - rtc::Buffer frame = CreateFrame(nalus); + Buffer nalus[] = {GenerateNalUnit(/*size=*/2), GenerateNalUnit(/*size=*/2), + GenerateNalUnit(/*size=*/0x123)}; + Buffer frame = CreateFrame(nalus); RtpPacketizerH264 packetizer(frame, kNoLimits, H264PacketizationMode::NonInterleaved); @@ -247,7 +250,7 @@ TEST(RtpPacketizerH264Test, StapA) { TEST(RtpPacketizerH264Test, SingleNalUnitModeHasNoStapA) { // This is the same setup as for the StapA test. - rtc::Buffer frame = CreateFrame({2, 2, 0x123}); + Buffer frame = CreateFrame({2, 2, 0x123}); RtpPacketizerH264 packetizer(frame, kNoLimits, H264PacketizationMode::SingleNalUnit); @@ -266,10 +269,9 @@ TEST(RtpPacketizerH264Test, StapARespectsFirstPacketReduction) { limits.first_packet_reduction_len = 100; const size_t kFirstFragmentSize = limits.max_payload_len - limits.first_packet_reduction_len; - rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/kFirstFragmentSize), - GenerateNalUnit(/*size=*/2), - GenerateNalUnit(/*size=*/2)}; - rtc::Buffer frame = CreateFrame(nalus); + Buffer nalus[] = {GenerateNalUnit(/*size=*/kFirstFragmentSize), + GenerateNalUnit(/*size=*/2), GenerateNalUnit(/*size=*/2)}; + Buffer frame = CreateFrame(nalus); RtpPacketizerH264 packetizer(frame, limits, H264PacketizationMode::NonInterleaved); @@ -285,29 +287,64 @@ TEST(RtpPacketizerH264Test, StapARespectsFirstPacketReduction) { 0, 2, nalus[2][0], nalus[2][1])); } +TEST(RtpPacketizerH264Test, StapARespectsSinglePacketReduction) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1000; + // It is possible for single_packet_reduction_len to be greater than + // first_packet_reduction_len + last_packet_reduction_len. Check that the + // right limit is used when first and last fragment go to one packet. + limits.first_packet_reduction_len = 4; + limits.last_packet_reduction_len = 0; + limits.single_packet_reduction_len = 8; + // 3 fragments of sizes 2 + 2 + 981, plus 7 bytes of headers, is expected to + // be packetized to single packet of size 992. + Buffer first_nalus[] = {GenerateNalUnit(/*size=*/2), + GenerateNalUnit(/*size=*/2), + GenerateNalUnit(/*size=*/981)}; + Buffer first_frame = CreateFrame(first_nalus); + + RtpPacketizerH264 first_packetizer(first_frame, limits, + H264PacketizationMode::NonInterleaved); + std::vector packets = FetchAllPackets(&first_packetizer); + + // Expect that everything fits in a single packet. + ASSERT_THAT(packets, SizeIs(1)); + EXPECT_EQ(packets[0].payload_size(), 992u); + + // Increasing the last fragment size by one exceeds + // single_packet_reduction_len and produces two packets. + Buffer second_nalus[] = {GenerateNalUnit(/*size=*/2), + GenerateNalUnit(/*size=*/2), + GenerateNalUnit(/*size=*/982)}; + Buffer second_frame = CreateFrame(second_nalus); + RtpPacketizerH264 second_packetizer(second_frame, limits, + H264PacketizationMode::NonInterleaved); + packets = FetchAllPackets(&second_packetizer); + ASSERT_THAT(packets, SizeIs(2)); +} + TEST(RtpPacketizerH264Test, StapARespectsLastPacketReduction) { RtpPacketizer::PayloadSizeLimits limits; limits.max_payload_len = 1000; limits.last_packet_reduction_len = 100; + const size_t kFirstFragmentSize = 1000; const size_t kLastFragmentSize = - limits.max_payload_len - limits.last_packet_reduction_len; - rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/2), - GenerateNalUnit(/*size=*/2), - GenerateNalUnit(/*size=*/kLastFragmentSize)}; - rtc::Buffer frame = CreateFrame(nalus); + limits.max_payload_len - limits.last_packet_reduction_len + 1; + Buffer nalus[] = {GenerateNalUnit(/*size=*/kFirstFragmentSize), + GenerateNalUnit(/*size=*/kLastFragmentSize)}; + Buffer frame = CreateFrame(nalus); RtpPacketizerH264 packetizer(frame, limits, H264PacketizationMode::NonInterleaved); std::vector packets = FetchAllPackets(&packetizer); - ASSERT_THAT(packets, SizeIs(2)); - // Expect 1st packet is aggregate of 1st two fragments. - EXPECT_THAT(packets[0].payload(), - ElementsAre(kStapA, // - 0, 2, nalus[0][0], nalus[0][1], // - 0, 2, nalus[1][0], nalus[1][1])); - // Expect 2nd packet is single nalu. - EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[2])); + ASSERT_THAT(packets, SizeIs(3)); + // Expect 1st packet contains first fragment. + EXPECT_THAT(packets[0].payload()[0], kSlice); + // Expect 2nd and 3rd packets to be FU-A since last_packet_reduction_len + // was exceeded by one byte. + EXPECT_THAT(packets[1].payload()[0], kFuA); + EXPECT_THAT(packets[2].payload()[0], kFuA); } TEST(RtpPacketizerH264Test, TooSmallForStapAHeaders) { @@ -315,10 +352,9 @@ TEST(RtpPacketizerH264Test, TooSmallForStapAHeaders) { limits.max_payload_len = 1000; const size_t kLastFragmentSize = limits.max_payload_len - 3 * kLengthFieldLength - 4; - rtc::Buffer nalus[] = {GenerateNalUnit(/*size=*/2), - GenerateNalUnit(/*size=*/2), - GenerateNalUnit(/*size=*/kLastFragmentSize)}; - rtc::Buffer frame = CreateFrame(nalus); + Buffer nalus[] = {GenerateNalUnit(/*size=*/2), GenerateNalUnit(/*size=*/2), + GenerateNalUnit(/*size=*/kLastFragmentSize)}; + Buffer frame = CreateFrame(nalus); RtpPacketizerH264 packetizer(frame, limits, H264PacketizationMode::NonInterleaved); @@ -341,10 +377,10 @@ TEST(RtpPacketizerH264Test, MixedStapAFUA) { const size_t kFuaPayloadSize = 70; const size_t kFuaNaluSize = kNalHeaderSize + 2 * kFuaPayloadSize; const size_t kStapANaluSize = 20; - rtc::Buffer nalus[] = {GenerateNalUnit(kFuaNaluSize), - GenerateNalUnit(kStapANaluSize), - GenerateNalUnit(kStapANaluSize)}; - rtc::Buffer frame = CreateFrame(nalus); + Buffer nalus[] = {GenerateNalUnit(kFuaNaluSize), + GenerateNalUnit(kStapANaluSize), + GenerateNalUnit(kStapANaluSize)}; + Buffer frame = CreateFrame(nalus); RtpPacketizerH264 packetizer(frame, limits, H264PacketizationMode::NonInterleaved); @@ -385,7 +421,7 @@ TEST(RtpPacketizerH264Test, LastFragmentFitsInSingleButNotLastPacket) { limits.last_packet_reduction_len = 20; limits.single_packet_reduction_len = 20; // Actual sizes, which triggered this bug. - rtc::Buffer frame = CreateFrame({20, 8, 18, 1161}); + Buffer frame = CreateFrame({20, 8, 18, 1161}); RtpPacketizerH264 packetizer(frame, limits, H264PacketizationMode::NonInterleaved); @@ -402,8 +438,8 @@ TEST(RtpPacketizerH264Test, LastFragmentFitsInSingleButNotLastPacket) { // Returns sizes of the payloads excluding fua headers. std::vector TestFua(size_t frame_payload_size, const RtpPacketizer::PayloadSizeLimits& limits) { - rtc::Buffer nalu[] = {GenerateNalUnit(kNalHeaderSize + frame_payload_size)}; - rtc::Buffer frame = CreateFrame(nalu); + Buffer nalu[] = {GenerateNalUnit(kNalHeaderSize + frame_payload_size)}; + Buffer frame = CreateFrame(nalu); RtpPacketizerH264 packetizer(frame, limits, H264PacketizationMode::NonInterleaved); @@ -484,7 +520,7 @@ TEST(RtpPacketizerH264Test, FUABig) { TEST(RtpPacketizerH264Test, RejectsOverlongDataInPacketizationMode0) { RtpPacketizer::PayloadSizeLimits limits; - rtc::Buffer frame = CreateFrame({kMaxPayloadSize + 1}); + Buffer frame = CreateFrame({kMaxPayloadSize + 1}); RtpPacketizerH264 packetizer(frame, limits, H264PacketizationMode::SingleNalUnit); @@ -492,5 +528,190 @@ TEST(RtpPacketizerH264Test, RejectsOverlongDataInPacketizationMode0) { EXPECT_THAT(packets, IsEmpty()); } + +TEST(RtpPacketizerH264Test, DoesNotPacketizeWithEmptyNalUnit) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = kMaxPayloadSize; + + uint8_t empty_nal_input[] = {0x00, 0x00, 0x01, /* empty NAL unit data */ + 0x00, 0x00, 0x01, 0x01}; + RtpPacketizerH264 packetizer(empty_nal_input, limits, + H264PacketizationMode::NonInterleaved); + EXPECT_EQ(packetizer.NumPackets(), 0u); +} + +TEST(RtpPacketizerH264Test, MultipleStapA) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = kMaxPayloadSize; + limits.first_packet_reduction_len = 0; + limits.last_packet_reduction_len = 0; + limits.single_packet_reduction_len = 0; + // A lot of small NAL units that will result in two STAP-A being generated. + // Input data must exceed the size of a single RTP packet. + uint8_t long_input[] = { + 0x19, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, + 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x04, + 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, + 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, + 0x00, 0x01, 0x00, 0xaf, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, + 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, + 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x01, + 0x04, 0x00, 0x19, 0x00, 0x00, 0x01, 0xf9, 0x01, 0x00, 0x00, 0x00, 0x01, + 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0xf7, 0x01, + 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0x0b, 0x01, 0x04, 0x00, + 0x19, 0x00, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, + 0x00, 0x01, 0x01, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, 0x00, + 0x01, 0x01, 0x02, 0x00, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, + 0x19, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, + 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x04, + 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x19, + 0x7a, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, + 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x04, + 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0x04, 0x00, 0x00, 0x01, 0x00, + 0xaf, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, + 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x04, + 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0xaf, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, + 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, + 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, + 0x00, 0x00, 0x11, 0xd4, 0x00, 0x19, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, + 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, + 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x00, 0x00, 0x01, + 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, + 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, + 0x00, 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, 0x01, 0x00, 0xaf, + 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x04, 0x00, + 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x10, 0x00, 0x00, + 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, + 0x00, 0x01, 0xf9, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, + 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x08, 0xfe, 0xfb, 0xff, 0xff, 0xf4, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0x04, 0x00, + 0x00, 0x01, 0x00, 0xaf, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, + 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, + 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, + 0x00, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, + 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, + 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x11, 0xd4, 0x00, 0x19, 0x00, 0x00, + 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x01, + 0x04, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, + 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, + 0xaf, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x04, + 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, + 0x00, 0x00, 0x00, 0x00, 0x11, 0xd4, 0x00, 0x19, 0x00, 0x00, 0x01, 0x04, + 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x01, 0x04, 0x00, + 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x00, + 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, + 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, + 0x04, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, 0x00, + 0x01, 0xf9, 0x01, 0x00, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x01, 0x00, 0xf7, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, + 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, + 0x00, 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, 0x00, 0x01, 0x01, + 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, 0x00, 0x01, 0x01, 0x02, + 0x00, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0xf7, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, + 0x00, 0xf7, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x26, 0x00, 0x00, 0x01, 0x00, + 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, + 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, 0x00, 0x01, 0x00, 0x01, + 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, + 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, 0x00, 0x01, 0xf9, 0x01, 0x00, 0x00, + 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, + 0xf7, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0x0b, 0x2c, + 0x01, 0x04, 0x00, 0x19, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, + 0x00, 0x01, 0x01, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, 0x00, + 0x01, 0x01, 0x02, 0x00, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, + 0x19, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, + 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x04, + 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x01, 0x00, 0x00, 0x19, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, + 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, + 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0xaf, + 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x04, 0x00, + 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x10, 0x00, 0x00, + 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, + 0x00, 0x01, 0xf9, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, + 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x08, 0xfe, 0xfb, 0xff, 0xff, 0xf4, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0x04, 0x00, + 0x00, 0x01, 0x00, 0xaf, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, + 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, + 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, + 0x00, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, + 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, + 0x04, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, + 0x8e, 0x00, 0x04, 0x00, 0x00, 0x01, 0x00, 0x04, 0x00, 0x00, 0x01, 0x00, + 0xaf, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x00, 0x00, 0x11, 0xd4, 0x00, + 0x19, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, + 0x8e, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, + 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, + 0x00, 0x01, 0x00, 0xaf, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, + 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, + 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x01, + 0x04, 0x00, 0x19, 0x00, 0x00, 0x01, 0xf9, 0x01, 0x00, 0x00, 0x00, 0x01, + 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0xf7, 0x01, + 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, + 0x19, 0x00, 0x00, 0x01, 0x01, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, + 0x00, 0x00, 0x01, 0x01, 0x02, 0x00, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0xf7, 0x01, 0x04, 0x00, 0x00, + 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0xf7, 0x01, 0x04, 0x00, 0x00, 0x01, + 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, + 0x26, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, 0x00, + 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, + 0x00, 0x00, 0x01, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, + 0x04, 0x00, 0x00, 0xaf, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, + 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, + 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, + 0x00, 0x00, 0x00, 0x11, 0xd4, 0x00, 0x19, 0x00, 0x00, 0x01, 0x04, 0x00, + 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x01, 0x04, 0x00, 0x00, + 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x00, 0xaf, 0x00, 0x00, + 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, + 0x01, 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, + 0x00, 0xaf, 0x01, 0x04, 0x00, 0x19, 0x00, 0x00, 0x01, 0xf9, 0x01, 0x00, + 0x00, 0x00, 0x01, 0x8e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, + 0x00, 0xf7, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, + 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x8e, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x04, 0x00, 0x00, 0x01, 0x04, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x01}; + RtpPacketizerH264 packetizer(long_input, limits, + H264PacketizationMode::NonInterleaved); + EXPECT_EQ(packetizer.NumPackets(), 2u); + EXPECT_THAT(FetchAllPackets(&packetizer), SizeIs(2)); +} + } // namespace } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_format_unittest.cc b/modules/rtp_rtcp/source/rtp_format_unittest.cc index 53264c6609..d67e17a376 100644 --- a/modules/rtp_rtcp/source/rtp_format_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_format_unittest.cc @@ -10,8 +10,7 @@ #include "modules/rtp_rtcp/source/rtp_format.h" -#include -#include +#include #include "absl/algorithm/container.h" #include "test/gmock.h" diff --git a/modules/rtp_rtcp/source/rtp_format_video_generic.cc b/modules/rtp_rtcp/source/rtp_format_video_generic.cc index f5c7f2ee29..db64feb467 100644 --- a/modules/rtp_rtcp/source/rtp_format_video_generic.cc +++ b/modules/rtp_rtcp/source/rtp_format_video_generic.cc @@ -12,18 +12,25 @@ #include -#include "absl/types/optional.h" +#include +#include + +#include "api/array_view.h" +#include "api/video/video_frame_type.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" namespace webrtc { +namespace { + +constexpr size_t kGenericHeaderLength = 1; +constexpr size_t kExtendedHeaderLength = 2; -static const size_t kGenericHeaderLength = 1; -static const size_t kExtendedHeaderLength = 2; +} // namespace RtpPacketizerGeneric::RtpPacketizerGeneric( - rtc::ArrayView payload, + ArrayView payload, PayloadSizeLimits limits, const RTPVideoHeader& rtp_video_header) : remaining_payload_(payload) { @@ -34,9 +41,8 @@ RtpPacketizerGeneric::RtpPacketizerGeneric( current_packet_ = payload_sizes_.begin(); } -RtpPacketizerGeneric::RtpPacketizerGeneric( - rtc::ArrayView payload, - PayloadSizeLimits limits) +RtpPacketizerGeneric::RtpPacketizerGeneric(ArrayView payload, + PayloadSizeLimits limits) : header_size_(0), remaining_payload_(payload) { payload_sizes_ = SplitAboutEqually(payload.size(), limits); current_packet_ = payload_sizes_.begin(); @@ -86,7 +92,7 @@ void RtpPacketizerGeneric::BuildHeader(const RTPVideoHeader& rtp_video_header) { if (rtp_video_header.frame_type == VideoFrameType::kVideoFrameKey) { header_[0] |= RtpFormatVideoGeneric::kKeyFrameBit; } - if (const auto* generic_header = absl::get_if( + if (const auto* generic_header = std::get_if( &rtp_video_header.video_type_header)) { // Store bottom 15 bits of the picture id. Only 15 bits are used for // compatibility with other packetizer implemenetations. diff --git a/modules/rtp_rtcp/source/rtp_format_video_generic.h b/modules/rtp_rtcp/source/rtp_format_video_generic.h index fd44bd1980..3e0c1e3f05 100644 --- a/modules/rtp_rtcp/source/rtp_format_video_generic.h +++ b/modules/rtp_rtcp/source/rtp_format_video_generic.h @@ -12,6 +12,7 @@ #include +#include #include #include "api/array_view.h" @@ -23,11 +24,11 @@ class RtpPacketToSend; struct RTPVideoHeader; namespace RtpFormatVideoGeneric { -static const uint8_t kKeyFrameBit = 0x01; -static const uint8_t kFirstPacketBit = 0x02; +inline constexpr uint8_t kKeyFrameBit = 0x01; +inline constexpr uint8_t kFirstPacketBit = 0x02; // If this bit is set, there will be an extended header contained in this // packet. This was added later so old clients will not send this. -static const uint8_t kExtendedHeaderBit = 0x04; +inline constexpr uint8_t kExtendedHeaderBit = 0x04; } // namespace RtpFormatVideoGeneric class RtpPacketizerGeneric : public RtpPacketizer { @@ -35,14 +36,14 @@ class RtpPacketizerGeneric : public RtpPacketizer { // Initialize with payload from encoder. // The payload_data must be exactly one encoded generic frame. // Packets returned by `NextPacket` will contain the generic payload header. - RtpPacketizerGeneric(rtc::ArrayView payload, + RtpPacketizerGeneric(ArrayView payload, PayloadSizeLimits limits, const RTPVideoHeader& rtp_video_header); // Initialize with payload from encoder. // The payload_data must be exactly one encoded generic frame. // Packets returned by `NextPacket` will contain raw payload without the // generic payload header. - RtpPacketizerGeneric(rtc::ArrayView payload, + RtpPacketizerGeneric(ArrayView payload, PayloadSizeLimits limits); ~RtpPacketizerGeneric() override; @@ -63,7 +64,7 @@ class RtpPacketizerGeneric : public RtpPacketizer { uint8_t header_[3]; size_t header_size_; - rtc::ArrayView remaining_payload_; + ArrayView remaining_payload_; std::vector payload_sizes_; std::vector::const_iterator current_packet_; }; diff --git a/modules/rtp_rtcp/source/rtp_format_video_generic_unittest.cc b/modules/rtp_rtcp/source/rtp_format_video_generic_unittest.cc index d83c3b03c9..e927038f04 100644 --- a/modules/rtp_rtcp/source/rtp_format_video_generic_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_format_video_generic_unittest.cc @@ -10,15 +10,15 @@ #include "modules/rtp_rtcp/source/rtp_format_video_generic.h" -#include -#include -#include +#include +#include #include #include "api/array_view.h" -#include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h" -#include "modules/rtp_rtcp/source/byte_io.h" +#include "api/video/video_frame_type.h" +#include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "test/gmock.h" #include "test/gtest.h" @@ -82,7 +82,7 @@ TEST(RtpPacketizerVideoGeneric, WritesExtendedHeaderWhenPictureIdIsSet) { RtpPacketToSend packet(nullptr); ASSERT_TRUE(packetizer.NextPacket(&packet)); - rtc::ArrayView payload = packet.payload(); + ArrayView payload = packet.payload(); EXPECT_EQ(payload.size(), 3 + kPayloadSize); EXPECT_TRUE(payload[0] & 0x04); // Extended header bit is set. // Frame id is 37. @@ -136,7 +136,7 @@ TEST(RtpPacketizerVideoGeneric, FrameIdOver15bitsWrapsAround) { RtpPacketToSend packet(nullptr); ASSERT_TRUE(packetizer.NextPacket(&packet)); - rtc::ArrayView payload = packet.payload(); + ArrayView payload = packet.payload(); EXPECT_TRUE(payload[0] & 0x04); // Extended header bit is set. // Frame id is 0x137. EXPECT_EQ(0x01u, payload[1]); @@ -152,7 +152,7 @@ TEST(RtpPacketizerVideoGeneric, NoFrameIdDoesNotWriteExtendedHeader) { RtpPacketToSend packet(nullptr); ASSERT_TRUE(packetizer.NextPacket(&packet)); - rtc::ArrayView payload = packet.payload(); + ArrayView payload = packet.payload(); EXPECT_FALSE(payload[0] & 0x04); } @@ -164,7 +164,7 @@ TEST(RtpPacketizerVideoGeneric, DoesNotWriteHeaderForRawPayload) { RtpPacketToSend packet(nullptr); ASSERT_TRUE(packetizer.NextPacket(&packet)); - rtc::ArrayView payload = packet.payload(); + ArrayView payload = packet.payload(); EXPECT_THAT(payload, ElementsAreArray(kPayload)); } diff --git a/modules/rtp_rtcp/source/rtp_format_vp8.cc b/modules/rtp_rtcp/source/rtp_format_vp8.cc index ae5f4e50a4..5550f48471 100644 --- a/modules/rtp_rtcp/source/rtp_format_vp8.cc +++ b/modules/rtp_rtcp/source/rtp_format_vp8.cc @@ -15,11 +15,11 @@ #include +#include "api/array_view.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h" #include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" namespace webrtc { namespace { @@ -58,12 +58,14 @@ bool ValidateHeader(const RTPVideoHeaderVP8& hdr_info) { } // namespace -RtpPacketizerVp8::RtpPacketizerVp8(rtc::ArrayView payload, +RtpPacketizerVp8::RtpPacketizerVp8(ArrayView payload, PayloadSizeLimits limits, const RTPVideoHeaderVP8& hdr_info) : hdr_(BuildHeader(hdr_info)), remaining_payload_(payload) { limits.max_payload_len -= hdr_.size(); - payload_sizes_ = SplitAboutEqually(payload.size(), limits); + if (!payload.empty()) { + payload_sizes_ = SplitAboutEqually(payload.size(), limits); + } current_packet_ = payload_sizes_.begin(); } diff --git a/modules/rtp_rtcp/source/rtp_format_vp8.h b/modules/rtp_rtcp/source/rtp_format_vp8.h index d1f569a946..36f4b8b3b3 100644 --- a/modules/rtp_rtcp/source/rtp_format_vp8.h +++ b/modules/rtp_rtcp/source/rtp_format_vp8.h @@ -43,7 +43,7 @@ class RtpPacketizerVp8 : public RtpPacketizer { public: // Initialize with payload from encoder. // The payload_data must be exactly one encoded VP8 frame. - RtpPacketizerVp8(rtc::ArrayView payload, + RtpPacketizerVp8(ArrayView payload, PayloadSizeLimits limits, const RTPVideoHeaderVP8& hdr_info); @@ -65,7 +65,7 @@ class RtpPacketizerVp8 : public RtpPacketizer { static RawHeader BuildHeader(const RTPVideoHeaderVP8& header); RawHeader hdr_; - rtc::ArrayView remaining_payload_; + ArrayView remaining_payload_; std::vector payload_sizes_; std::vector::const_iterator current_packet_; }; diff --git a/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.cc b/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.cc index 0088ff8f31..8dabcd8dc5 100644 --- a/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.cc +++ b/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.cc @@ -10,7 +10,14 @@ #include "modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h" +#include +#include + +#include "api/array_view.h" +#include "modules/rtp_rtcp/source/rtp_format_vp8.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "test/gmock.h" #include "test/gtest.h" @@ -57,7 +64,7 @@ RtpFormatVp8TestHelper::~RtpFormatVp8TestHelper() = default; void RtpFormatVp8TestHelper::GetAllPacketsAndCheck( RtpPacketizerVp8* packetizer, - rtc::ArrayView expected_sizes) { + ArrayView expected_sizes) { EXPECT_EQ(packetizer->NumPackets(), expected_sizes.size()); const uint8_t* data_ptr = payload_.begin(); RtpPacketToSend packet(kNoExtensions); @@ -77,7 +84,7 @@ void RtpFormatVp8TestHelper::GetAllPacketsAndCheck( EXPECT_EQ(payload_.end() - data_ptr, 0); } -int RtpFormatVp8TestHelper::CheckHeader(rtc::ArrayView buffer, +int RtpFormatVp8TestHelper::CheckHeader(ArrayView buffer, bool first) { int x_bit = Bit(buffer[0], 7); EXPECT_EQ(Bit(buffer[0], 6), 0); // Reserved. @@ -104,9 +111,8 @@ int RtpFormatVp8TestHelper::CheckHeader(rtc::ArrayView buffer, // Verify that the I bit and the PictureID field are both set in accordance // with the information in hdr_info_->pictureId. -void RtpFormatVp8TestHelper::CheckPictureID( - rtc::ArrayView buffer, - int* offset) { +void RtpFormatVp8TestHelper::CheckPictureID(ArrayView buffer, + int* offset) { int i_bit = Bit(buffer[1], 7); if (hdr_info_->pictureId != kNoPictureId) { EXPECT_EQ(i_bit, 1); @@ -122,9 +128,8 @@ void RtpFormatVp8TestHelper::CheckPictureID( // Verify that the L bit and the TL0PICIDX field are both set in accordance // with the information in hdr_info_->tl0PicIdx. -void RtpFormatVp8TestHelper::CheckTl0PicIdx( - rtc::ArrayView buffer, - int* offset) { +void RtpFormatVp8TestHelper::CheckTl0PicIdx(ArrayView buffer, + int* offset) { int l_bit = Bit(buffer[1], 6); if (hdr_info_->tl0PicIdx != kNoTl0PicIdx) { EXPECT_EQ(l_bit, 1); @@ -138,9 +143,8 @@ void RtpFormatVp8TestHelper::CheckTl0PicIdx( // Verify that the T bit and the TL0PICIDX field, and the K bit and KEYIDX // field are all set in accordance with the information in // hdr_info_->temporalIdx and hdr_info_->keyIdx, respectively. -void RtpFormatVp8TestHelper::CheckTIDAndKeyIdx( - rtc::ArrayView buffer, - int* offset) { +void RtpFormatVp8TestHelper::CheckTIDAndKeyIdx(ArrayView buffer, + int* offset) { int t_bit = Bit(buffer[1], 5); int k_bit = Bit(buffer[1], 4); if (hdr_info_->temporalIdx == kNoTemporalIdx && diff --git a/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h b/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h index 3ecaa476da..a364df0294 100644 --- a/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h +++ b/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h @@ -17,6 +17,9 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_ #define MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_ +#include +#include + #include "api/array_view.h" #include "modules/rtp_rtcp/source/rtp_format_vp8.h" #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" @@ -33,22 +36,21 @@ class RtpFormatVp8TestHelper { RtpFormatVp8TestHelper& operator=(const RtpFormatVp8TestHelper&) = delete; void GetAllPacketsAndCheck(RtpPacketizerVp8* packetizer, - rtc::ArrayView expected_sizes); + ArrayView expected_sizes); - rtc::ArrayView payload() const { return payload_; } + ArrayView payload() const { return payload_; } size_t payload_size() const { return payload_.size(); } private: // Returns header size, i.e. payload offset. - int CheckHeader(rtc::ArrayView rtp_payload, bool first); - void CheckPictureID(rtc::ArrayView rtp_payload, int* offset); - void CheckTl0PicIdx(rtc::ArrayView rtp_payload, int* offset); - void CheckTIDAndKeyIdx(rtc::ArrayView rtp_payload, - int* offset); + int CheckHeader(ArrayView rtp_payload, bool first); + void CheckPictureID(ArrayView rtp_payload, int* offset); + void CheckTl0PicIdx(ArrayView rtp_payload, int* offset); + void CheckTIDAndKeyIdx(ArrayView rtp_payload, int* offset); void CheckPayload(const uint8_t* data_ptr); const RTPVideoHeaderVP8* const hdr_info_; - rtc::Buffer payload_; + Buffer payload_; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc b/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc index 7934ff8ea9..304369ba49 100644 --- a/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc @@ -10,10 +10,11 @@ #include "modules/rtp_rtcp/source/rtp_format_vp8.h" -#include +#include +#include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h" -#include "test/gmock.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "test/gtest.h" namespace webrtc { @@ -21,6 +22,18 @@ namespace { constexpr RtpPacketizer::PayloadSizeLimits kNoSizeLimits; +TEST(RtpPacketizerVp8Test, EmptyPayload) { + RTPVideoHeaderVP8 hdr_info; + hdr_info.InitRTPVideoHeaderVP8(); + hdr_info.pictureId = 200; + RtpFormatVp8TestHelper helper(&hdr_info, /*payload_len=*/30); + + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 12; // Small enough to produce 4 packets. + RtpPacketizerVp8 packetizer({}, limits, hdr_info); + EXPECT_EQ(packetizer.NumPackets(), 0u); +} + TEST(RtpPacketizerVp8Test, ResultPacketsAreAlmostEqualSize) { RTPVideoHeaderVP8 hdr_info; hdr_info.InitRTPVideoHeaderVP8(); diff --git a/modules/rtp_rtcp/source/rtp_format_vp9.cc b/modules/rtp_rtcp/source/rtp_format_vp9.cc index 15e059e85c..4e8237e30f 100644 --- a/modules/rtp_rtcp/source/rtp_format_vp9.cc +++ b/modules/rtp_rtcp/source/rtp_format_vp9.cc @@ -12,10 +12,12 @@ #include -#include "api/video/video_codec_constants.h" +#include + +#include "api/array_view.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h" #include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "rtc_base/bit_buffer.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -94,8 +96,8 @@ size_t RefIndicesLength(const RTPVideoHeaderVP9& hdr) { if (!hdr.inter_pic_predicted || !hdr.flexible_mode) return 0; - RTC_DCHECK_GT(hdr.num_ref_pics, 0U); - RTC_DCHECK_LE(hdr.num_ref_pics, kMaxVp9RefPics); + RTC_CHECK_GT(hdr.num_ref_pics, 0U); + RTC_CHECK_LE(hdr.num_ref_pics, kMaxVp9RefPics); return hdr.num_ref_pics; } @@ -123,9 +125,9 @@ size_t SsDataLength(const RTPVideoHeaderVP9& hdr) { if (!hdr.ss_data_available) return 0; - RTC_DCHECK_GT(hdr.num_spatial_layers, 0U); - RTC_DCHECK_LE(hdr.num_spatial_layers, kMaxVp9NumberOfSpatialLayers); - RTC_DCHECK_LE(hdr.gof.num_frames_in_gof, kMaxVp9FramesInGof); + RTC_CHECK_GT(hdr.num_spatial_layers, 0U); + RTC_CHECK_LE(hdr.num_spatial_layers, kMaxVp9NumberOfSpatialLayers); + RTC_CHECK_LE(hdr.gof.num_frames_in_gof, kMaxVp9FramesInGof); size_t length = 1; // V if (hdr.spatial_layer_resolution_present) { length += 4 * hdr.num_spatial_layers; // Y @@ -136,7 +138,7 @@ size_t SsDataLength(const RTPVideoHeaderVP9& hdr) { // N_G length += hdr.gof.num_frames_in_gof; // T, U, R for (size_t i = 0; i < hdr.gof.num_frames_in_gof; ++i) { - RTC_DCHECK_LE(hdr.gof.num_ref_pics[i], kMaxVp9RefPics); + RTC_CHECK_LE(hdr.gof.num_ref_pics[i], kMaxVp9RefPics); length += hdr.gof.num_ref_pics[i]; // R times } return length; @@ -155,8 +157,7 @@ size_t PayloadDescriptorLengthMinusSsData(const RTPVideoHeaderVP9& hdr) { // M: | EXTENDED PID | // +-+-+-+-+-+-+-+-+ // -bool WritePictureId(const RTPVideoHeaderVP9& vp9, - rtc::BitBufferWriter* writer) { +bool WritePictureId(const RTPVideoHeaderVP9& vp9, BitBufferWriter* writer) { bool m_bit = (PictureIdLength(vp9) == 2); RETURN_FALSE_ON_ERROR(writer->WriteBits(m_bit ? 1 : 0, 1)); RETURN_FALSE_ON_ERROR(writer->WriteBits(vp9.picture_id, m_bit ? 15 : 7)); @@ -172,7 +173,7 @@ bool WritePictureId(const RTPVideoHeaderVP9& vp9, // +-+-+-+-+-+-+-+-+ // bool WriteLayerInfoCommon(const RTPVideoHeaderVP9& vp9, - rtc::BitBufferWriter* writer) { + BitBufferWriter* writer) { RETURN_FALSE_ON_ERROR(writer->WriteBits(TemporalIdxField(vp9, 0), 3)); RETURN_FALSE_ON_ERROR(writer->WriteBits(vp9.temporal_up_switch ? 1 : 0, 1)); RETURN_FALSE_ON_ERROR(writer->WriteBits(SpatialIdxField(vp9, 0), 3)); @@ -190,13 +191,12 @@ bool WriteLayerInfoCommon(const RTPVideoHeaderVP9& vp9, // +-+-+-+-+-+-+-+-+ // bool WriteLayerInfoNonFlexibleMode(const RTPVideoHeaderVP9& vp9, - rtc::BitBufferWriter* writer) { + BitBufferWriter* writer) { RETURN_FALSE_ON_ERROR(writer->WriteUInt8(Tl0PicIdxField(vp9, 0))); return true; } -bool WriteLayerInfo(const RTPVideoHeaderVP9& vp9, - rtc::BitBufferWriter* writer) { +bool WriteLayerInfo(const RTPVideoHeaderVP9& vp9, BitBufferWriter* writer) { if (!WriteLayerInfoCommon(vp9, writer)) return false; @@ -213,8 +213,7 @@ bool WriteLayerInfo(const RTPVideoHeaderVP9& vp9, // +-+-+-+-+-+-+-+-+ N=1: An additional P_DIFF follows // current P_DIFF. // -bool WriteRefIndices(const RTPVideoHeaderVP9& vp9, - rtc::BitBufferWriter* writer) { +bool WriteRefIndices(const RTPVideoHeaderVP9& vp9, BitBufferWriter* writer) { if (!PictureIdPresent(vp9) || vp9.num_ref_pics == 0 || vp9.num_ref_pics > kMaxVp9RefPics) { return false; @@ -247,10 +246,10 @@ bool WriteRefIndices(const RTPVideoHeaderVP9& vp9, // | P_DIFF | (OPTIONAL) . R times . // +-+-+-+-+-+-+-+-+ -| -| // -bool WriteSsData(const RTPVideoHeaderVP9& vp9, rtc::BitBufferWriter* writer) { - RTC_DCHECK_GT(vp9.num_spatial_layers, 0U); - RTC_DCHECK_LE(vp9.num_spatial_layers, kMaxVp9NumberOfSpatialLayers); - RTC_DCHECK_LE(vp9.gof.num_frames_in_gof, kMaxVp9FramesInGof); +bool WriteSsData(const RTPVideoHeaderVP9& vp9, BitBufferWriter* writer) { + RTC_CHECK_GT(vp9.num_spatial_layers, 0U); + RTC_CHECK_LE(vp9.num_spatial_layers, kMaxVp9NumberOfSpatialLayers); + RTC_CHECK_LE(vp9.gof.num_frames_in_gof, kMaxVp9FramesInGof); bool g_bit = vp9.gof.num_frames_in_gof > 0; RETURN_FALSE_ON_ERROR(writer->WriteBits(vp9.num_spatial_layers - 1, 3)); @@ -288,6 +287,8 @@ bool WriteSsData(const RTPVideoHeaderVP9& vp9, rtc::BitBufferWriter* writer) { // current API to invoke SVC is not flexible enough. RTPVideoHeaderVP9 RemoveInactiveSpatialLayers( const RTPVideoHeaderVP9& original_header) { + RTC_CHECK_LE(original_header.num_spatial_layers, + kMaxVp9NumberOfSpatialLayers); RTPVideoHeaderVP9 hdr(original_header); if (original_header.first_active_layer == 0) return hdr; @@ -307,20 +308,21 @@ RTPVideoHeaderVP9 RemoveInactiveSpatialLayers( } } // namespace -RtpPacketizerVp9::RtpPacketizerVp9(rtc::ArrayView payload, +RtpPacketizerVp9::RtpPacketizerVp9(ArrayView payload, PayloadSizeLimits limits, const RTPVideoHeaderVP9& hdr) : hdr_(RemoveInactiveSpatialLayers(hdr)), header_size_(PayloadDescriptorLengthMinusSsData(hdr_)), first_packet_extra_header_size_(SsDataLength(hdr_)), remaining_payload_(payload) { - RTC_DCHECK_EQ(hdr_.first_active_layer, 0); + RTC_CHECK_EQ(hdr_.first_active_layer, 0); limits.max_payload_len -= header_size_; limits.first_packet_reduction_len += first_packet_extra_header_size_; limits.single_packet_reduction_len += first_packet_extra_header_size_; - - payload_sizes_ = SplitAboutEqually(payload.size(), limits); + if (!payload.empty()) { + payload_sizes_ = SplitAboutEqually(payload.size(), limits); + } current_packet_ = payload_sizes_.begin(); } @@ -348,8 +350,7 @@ bool RtpPacketizerVp9::NextPacket(RtpPacketToSend* packet) { uint8_t* buffer = packet->AllocatePayload(header_size + packet_payload_len); RTC_CHECK(buffer); - if (!WriteHeader(layer_begin, layer_end, - rtc::MakeArrayView(buffer, header_size))) + if (!WriteHeader(layer_begin, layer_end, MakeArrayView(buffer, header_size))) return false; memcpy(buffer + header_size, remaining_payload_.data(), packet_payload_len); @@ -357,8 +358,8 @@ bool RtpPacketizerVp9::NextPacket(RtpPacketToSend* packet) { // Ensure end_of_picture is always set on top spatial layer when it is not // dropped. - RTC_DCHECK(hdr_.spatial_idx < hdr_.num_spatial_layers - 1 || - hdr_.end_of_picture); + RTC_CHECK(hdr_.spatial_idx < hdr_.num_spatial_layers - 1 || + hdr_.end_of_picture); packet->SetMarker(layer_end && hdr_.end_of_picture); return true; @@ -401,7 +402,7 @@ bool RtpPacketizerVp9::NextPacket(RtpPacketToSend* packet) { // +-+-+-+-+-+-+-+-+ bool RtpPacketizerVp9::WriteHeader(bool layer_begin, bool layer_end, - rtc::ArrayView buffer) const { + ArrayView buffer) const { // Required payload descriptor byte. bool i_bit = PictureIdPresent(hdr_); bool p_bit = hdr_.inter_pic_predicted; @@ -412,7 +413,7 @@ bool RtpPacketizerVp9::WriteHeader(bool layer_begin, bool v_bit = hdr_.ss_data_available && b_bit; bool z_bit = hdr_.non_ref_for_inter_layer_pred; - rtc::BitBufferWriter writer(buffer.data(), buffer.size()); + BitBufferWriter writer(buffer.data(), buffer.size()); RETURN_FALSE_ON_ERROR(writer.WriteBits(i_bit ? 1 : 0, 1)); RETURN_FALSE_ON_ERROR(writer.WriteBits(p_bit ? 1 : 0, 1)); RETURN_FALSE_ON_ERROR(writer.WriteBits(l_bit ? 1 : 0, 1)); diff --git a/modules/rtp_rtcp/source/rtp_format_vp9.h b/modules/rtp_rtcp/source/rtp_format_vp9.h index 3cf4dd56e5..80c43f35b0 100644 --- a/modules/rtp_rtcp/source/rtp_format_vp9.h +++ b/modules/rtp_rtcp/source/rtp_format_vp9.h @@ -36,7 +36,7 @@ namespace webrtc { class RtpPacketizerVp9 : public RtpPacketizer { public: // The `payload` must be one encoded VP9 layer frame. - RtpPacketizerVp9(rtc::ArrayView payload, + RtpPacketizerVp9(ArrayView payload, PayloadSizeLimits limits, const RTPVideoHeaderVP9& hdr); @@ -58,12 +58,12 @@ class RtpPacketizerVp9 : public RtpPacketizer { // the layer frame. Returns false on failure. bool WriteHeader(bool layer_begin, bool layer_end, - rtc::ArrayView rtp_payload) const; + ArrayView rtp_payload) const; const RTPVideoHeaderVP9 hdr_; const int header_size_; const int first_packet_extra_header_size_; - rtc::ArrayView remaining_payload_; + ArrayView remaining_payload_; std::vector payload_sizes_; std::vector::const_iterator current_packet_; }; diff --git a/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc b/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc index e18b8a803f..58789da14f 100644 --- a/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_format_vp9_unittest.cc @@ -10,13 +10,18 @@ #include "modules/rtp_rtcp/source/rtp_format_vp9.h" +#include +#include #include #include #include "api/array_view.h" +#include "api/video/video_codec_type.h" +#include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h" -#include "test/gmock.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "test/gtest.h" namespace webrtc { @@ -73,11 +78,11 @@ void ParseAndCheckPacket(const uint8_t* packet, size_t expected_length) { RTPVideoHeader video_header; EXPECT_EQ(VideoRtpDepacketizerVp9::ParseRtpPayload( - rtc::MakeArrayView(packet, expected_length), &video_header), + MakeArrayView(packet, expected_length), &video_header), expected_hdr_length); EXPECT_EQ(kVideoCodecVP9, video_header.codec); auto& vp9_header = - absl::get(video_header.video_type_header); + std::get(video_header.video_type_header); VerifyHeader(expected, vp9_header); } @@ -149,9 +154,8 @@ class RtpPacketizerVp9Test : public ::testing::Test { EXPECT_EQ(last, payload_pos_ == payload_.size()); } - void CreateParseAndCheckPackets( - rtc::ArrayView expected_hdr_sizes, - rtc::ArrayView expected_sizes) { + void CreateParseAndCheckPackets(ArrayView expected_hdr_sizes, + ArrayView expected_sizes) { ASSERT_EQ(expected_hdr_sizes.size(), expected_sizes.size()); ASSERT_TRUE(packetizer_ != nullptr); EXPECT_EQ(expected_sizes.size(), num_packets_); @@ -179,13 +183,18 @@ class RtpPacketizerVp9Test : public ::testing::Test { VideoRtpDepacketizerVp9::ParseRtpPayload(packet_.payload(), &video_header); const auto& vp9_header = - absl::get(video_header.video_type_header); + std::get(video_header.video_type_header); EXPECT_EQ(vp9_header.spatial_idx, expected_layer); EXPECT_EQ(vp9_header.num_spatial_layers, num_spatial_layers); } } }; +TEST_F(RtpPacketizerVp9Test, EmptyPayload) { + RTPVideoHeader video_header; + VideoRtpDepacketizerVp9::ParseRtpPayload({}, &video_header); +} + TEST_F(RtpPacketizerVp9Test, TestEqualSizedMode_OnePacket) { const size_t kFrameSize = 25; const size_t kPacketSize = 26; diff --git a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.cc b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.cc index 465308ec45..9bb88987b7 100644 --- a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.cc +++ b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.cc @@ -12,14 +12,11 @@ #include +#include "api/array_view.h" #include "rtc_base/checks.h" namespace webrtc { -constexpr int RtpGenericFrameDescriptor::kMaxNumFrameDependencies; -constexpr int RtpGenericFrameDescriptor::kMaxTemporalLayers; -constexpr int RtpGenericFrameDescriptor::kMaxSpatialLayers; - RtpGenericFrameDescriptor::RtpGenericFrameDescriptor() = default; RtpGenericFrameDescriptor::RtpGenericFrameDescriptor( const RtpGenericFrameDescriptor&) = default; @@ -78,10 +75,10 @@ void RtpGenericFrameDescriptor::SetFrameId(uint16_t frame_id) { frame_id_ = frame_id; } -rtc::ArrayView -RtpGenericFrameDescriptor::FrameDependenciesDiffs() const { +ArrayView RtpGenericFrameDescriptor::FrameDependenciesDiffs() + const { RTC_DCHECK(FirstPacketInSubFrame()); - return rtc::MakeArrayView(frame_deps_id_diffs_, num_frame_deps_); + return MakeArrayView(frame_deps_id_diffs_, num_frame_deps_); } bool RtpGenericFrameDescriptor::AddFrameDependencyDiff(uint16_t fdiff) { diff --git a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h index 8760acca2a..7ecdb2a69e 100644 --- a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h +++ b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h @@ -13,9 +13,7 @@ #include #include -#include -#include "absl/types/optional.h" #include "api/array_view.h" namespace webrtc { @@ -56,7 +54,7 @@ class RtpGenericFrameDescriptor { uint16_t FrameId() const; void SetFrameId(uint16_t frame_id); - rtc::ArrayView FrameDependenciesDiffs() const; + ArrayView FrameDependenciesDiffs() const; void ClearFrameDependencies() { num_frame_deps_ = 0; } // Returns false on failure, i.e. number of dependencies is too large. bool AddFrameDependencyDiff(uint16_t fdiff); diff --git a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.cc b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.cc index 8a0810f445..e8ca3dd3b6 100644 --- a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.cc +++ b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.cc @@ -10,6 +10,11 @@ #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" +#include +#include + +#include "api/array_view.h" +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "rtc_base/checks.h" namespace webrtc { @@ -56,10 +61,9 @@ constexpr uint8_t kFlageXtendedOffset = 0x02; // +---------------+ // | ... | // +-+-+-+-+-+-+-+-+ -constexpr RTPExtensionType RtpGenericFrameDescriptorExtension00::kId; bool RtpGenericFrameDescriptorExtension00::Parse( - rtc::ArrayView data, + ArrayView data, RtpGenericFrameDescriptor* descriptor) { if (data.empty()) { return false; @@ -127,7 +131,7 @@ size_t RtpGenericFrameDescriptorExtension00::ValueSize( } bool RtpGenericFrameDescriptorExtension00::Write( - rtc::ArrayView data, + ArrayView data, const RtpGenericFrameDescriptor& descriptor) { RTC_CHECK_EQ(data.size(), ValueSize(descriptor)); uint8_t base_header = @@ -148,7 +152,7 @@ bool RtpGenericFrameDescriptorExtension00::Write( uint16_t frame_id = descriptor.FrameId(); data[2] = frame_id & 0xff; data[3] = frame_id >> 8; - rtc::ArrayView fdiffs = descriptor.FrameDependenciesDiffs(); + ArrayView fdiffs = descriptor.FrameDependenciesDiffs(); size_t offset = 4; if (descriptor.FirstPacketInSubFrame() && fdiffs.empty() && descriptor.Width() > 0 && descriptor.Height() > 0) { diff --git a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h index b4f686565f..8c6555564c 100644 --- a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h +++ b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h @@ -33,10 +33,10 @@ class RtpGenericFrameDescriptorExtension00 { } static constexpr int kMaxSizeBytes = 16; - static bool Parse(rtc::ArrayView data, + static bool Parse(ArrayView data, RtpGenericFrameDescriptor* descriptor); static size_t ValueSize(const RtpGenericFrameDescriptor& descriptor); - static bool Write(rtc::ArrayView data, + static bool Write(ArrayView data, const RtpGenericFrameDescriptor& descriptor); }; diff --git a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension_unittest.cc b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension_unittest.cc index 7c27326f75..4e3ac9be12 100644 --- a/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension_unittest.cc @@ -9,6 +9,9 @@ */ #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" +#include + +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "test/gmock.h" #include "test/gtest.h" diff --git a/modules/rtp_rtcp/source/rtp_header_extension_map.cc b/modules/rtp_rtcp/source/rtp_header_extension_map.cc index 4b8c7b5385..ef28dbae20 100644 --- a/modules/rtp_rtcp/source/rtp_header_extension_map.cc +++ b/modules/rtp_rtcp/source/rtp_header_extension_map.cc @@ -10,7 +10,13 @@ #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include + #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/rtp_parameters.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/corruption_detection_extension.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" @@ -34,7 +40,7 @@ constexpr ExtensionInfo CreateExtensionInfo() { constexpr ExtensionInfo kExtensions[] = { CreateExtensionInfo(), - CreateExtensionInfo(), + CreateExtensionInfo(), CreateExtensionInfo(), CreateExtensionInfo(), CreateExtensionInfo(), @@ -53,6 +59,7 @@ constexpr ExtensionInfo kExtensions[] = { CreateExtensionInfo(), CreateExtensionInfo(), CreateExtensionInfo(), + CreateExtensionInfo(), }; // Because of kRtpExtensionNone, NumberOfExtension is 1 bigger than the actual @@ -63,9 +70,6 @@ static_assert(arraysize(kExtensions) == } // namespace -constexpr RTPExtensionType RtpHeaderExtensionMap::kInvalidType; -constexpr int RtpHeaderExtensionMap::kInvalidId; - RtpHeaderExtensionMap::RtpHeaderExtensionMap() : RtpHeaderExtensionMap(false) {} RtpHeaderExtensionMap::RtpHeaderExtensionMap(bool extmap_allow_mixed) @@ -75,14 +79,13 @@ RtpHeaderExtensionMap::RtpHeaderExtensionMap(bool extmap_allow_mixed) } RtpHeaderExtensionMap::RtpHeaderExtensionMap( - rtc::ArrayView extensions) + ArrayView extensions) : RtpHeaderExtensionMap(false) { for (const RtpExtension& extension : extensions) RegisterByUri(extension.id, extension.uri); } -void RtpHeaderExtensionMap::Reset( - rtc::ArrayView extensions) { +void RtpHeaderExtensionMap::Reset(ArrayView extensions) { for (auto& id : ids_) id = kInvalidId; for (const RtpExtension& extension : extensions) diff --git a/modules/rtp_rtcp/source/rtp_header_extension_map_unittest.cc b/modules/rtp_rtcp/source/rtp_header_extension_map_unittest.cc index 42842cc876..61188cd7c8 100644 --- a/modules/rtp_rtcp/source/rtp_header_extension_map_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_header_extension_map_unittest.cc @@ -11,7 +11,7 @@ #include -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "api/rtp_parameters.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "test/gtest.h" @@ -85,8 +85,8 @@ TEST(RtpHeaderExtensionTest, NonUniqueId) { RtpHeaderExtensionMap map; EXPECT_TRUE(map.Register(3)); - EXPECT_FALSE(map.Register(3)); - EXPECT_TRUE(map.Register(4)); + EXPECT_FALSE(map.Register(3)); + EXPECT_TRUE(map.Register(4)); } TEST(RtpHeaderExtensionTest, GetType) { diff --git a/modules/rtp_rtcp/source/rtp_header_extension_size.cc b/modules/rtp_rtcp/source/rtp_header_extension_size.cc index 4acbcf4e6b..b9016de16e 100644 --- a/modules/rtp_rtcp/source/rtp_header_extension_size.cc +++ b/modules/rtp_rtcp/source/rtp_header_extension_size.cc @@ -10,11 +10,13 @@ #include "modules/rtp_rtcp/source/rtp_header_extension_size.h" +#include "api/array_view.h" #include "api/rtp_parameters.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" namespace webrtc { -int RtpHeaderExtensionSize(rtc::ArrayView extensions, +int RtpHeaderExtensionSize(ArrayView extensions, const RtpHeaderExtensionMap& registered_extensions) { // RFC3550 Section 5.3.1 static constexpr int kExtensionBlockHeaderSize = 4; diff --git a/modules/rtp_rtcp/source/rtp_header_extension_size.h b/modules/rtp_rtcp/source/rtp_header_extension_size.h index 1fb2eb2a1e..b7fd960da1 100644 --- a/modules/rtp_rtcp/source/rtp_header_extension_size.h +++ b/modules/rtp_rtcp/source/rtp_header_extension_size.h @@ -24,7 +24,7 @@ struct RtpExtensionSize { // Calculates rtp header extension size in bytes assuming packet contain // all `extensions` with provided `value_size`. // Counts only extensions present among `registered_extensions`. -int RtpHeaderExtensionSize(rtc::ArrayView extensions, +int RtpHeaderExtensionSize(ArrayView extensions, const RtpHeaderExtensionMap& registered_extensions); } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_header_extension_size_unittest.cc b/modules/rtp_rtcp/source/rtp_header_extension_size_unittest.cc index 5cc26bc652..388b4d6269 100644 --- a/modules/rtp_rtcp/source/rtp_header_extension_size_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_header_extension_size_unittest.cc @@ -9,6 +9,7 @@ */ #include "modules/rtp_rtcp/source/rtp_header_extension_size.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "test/gtest.h" diff --git a/modules/rtp_rtcp/source/rtp_header_extensions.cc b/modules/rtp_rtcp/source/rtp_header_extensions.cc index e42a84bd06..3f9a852e18 100644 --- a/modules/rtp_rtcp/source/rtp_header_extensions.cc +++ b/modules/rtp_rtcp/source/rtp_header_extensions.cc @@ -16,13 +16,14 @@ #include #include #include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/rtp_headers.h" +#include "api/units/time_delta.h" #include "api/video/color_space.h" #include "api/video/hdr_metadata.h" #include "api/video/video_content_type.h" @@ -48,7 +49,7 @@ namespace webrtc { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // | ID | len=2 | absolute send time | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -bool AbsoluteSendTime::Parse(rtc::ArrayView data, +bool AbsoluteSendTime::Parse(ArrayView data, uint32_t* time_24bits) { if (data.size() != 3) return false; @@ -56,8 +57,7 @@ bool AbsoluteSendTime::Parse(rtc::ArrayView data, return true; } -bool AbsoluteSendTime::Write(rtc::ArrayView data, - uint32_t time_24bits) { +bool AbsoluteSendTime::Write(ArrayView data, uint32_t time_24bits) { RTC_DCHECK_EQ(data.size(), 3); RTC_DCHECK_LE(time_24bits, 0x00FFFFFF); ByteWriter::WriteBigEndian(data.data(), time_24bits); @@ -99,7 +99,7 @@ bool AbsoluteSendTime::Write(rtc::ArrayView data, // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // | ... (56-63) | // +-+-+-+-+-+-+-+-+ -bool AbsoluteCaptureTimeExtension::Parse(rtc::ArrayView data, +bool AbsoluteCaptureTimeExtension::Parse(ArrayView data, AbsoluteCaptureTime* extension) { if (data.size() != kValueSizeBytes && data.size() != kValueSizeBytesWithoutEstimatedCaptureClockOffset) { @@ -119,14 +119,14 @@ bool AbsoluteCaptureTimeExtension::Parse(rtc::ArrayView data, size_t AbsoluteCaptureTimeExtension::ValueSize( const AbsoluteCaptureTime& extension) { - if (extension.estimated_capture_clock_offset != absl::nullopt) { + if (extension.estimated_capture_clock_offset != std::nullopt) { return kValueSizeBytes; } else { return kValueSizeBytesWithoutEstimatedCaptureClockOffset; } } -bool AbsoluteCaptureTimeExtension::Write(rtc::ArrayView data, +bool AbsoluteCaptureTimeExtension::Write(ArrayView data, const AbsoluteCaptureTime& extension) { RTC_DCHECK_EQ(data.size(), ValueSize(extension)); @@ -160,24 +160,24 @@ bool AbsoluteCaptureTimeExtension::Write(rtc::ArrayView data, // | ID | len=1 |V| level | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // Sample Audio Level Encoding Using the Two-Byte Header Format -bool AudioLevel::Parse(rtc::ArrayView data, - bool* voice_activity, - uint8_t* audio_level) { +bool AudioLevelExtension::Parse(ArrayView data, + AudioLevel* extension) { // One-byte and two-byte format share the same data definition. if (data.size() != 1) return false; - *voice_activity = (data[0] & 0x80) != 0; - *audio_level = data[0] & 0x7F; + bool voice_activity = (data[0] & 0x80) != 0; + int audio_level = data[0] & 0x7F; + *extension = AudioLevel(voice_activity, audio_level); return true; } -bool AudioLevel::Write(rtc::ArrayView data, - bool voice_activity, - uint8_t audio_level) { +bool AudioLevelExtension::Write(ArrayView data, + const AudioLevel& extension) { // One-byte and two-byte format share the same data definition. RTC_DCHECK_EQ(data.size(), 1); - RTC_CHECK_LE(audio_level, 0x7f); - data[0] = (voice_activity ? 0x80 : 0x00) | audio_level; + RTC_CHECK_GE(extension.level(), 0); + RTC_CHECK_LE(extension.level(), 0x7f); + data[0] = (extension.voice_activity() ? 0x80 : 0x00) | extension.level(); return true; } @@ -202,7 +202,7 @@ bool AudioLevel::Write(rtc::ArrayView data, // |0| level 3 | 0 (pad) | ... | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // Sample Audio Level Encoding Using the Two-Byte Header Format -bool CsrcAudioLevel::Parse(rtc::ArrayView data, +bool CsrcAudioLevel::Parse(ArrayView data, std::vector* csrc_audio_levels) { if (data.size() > kRtpCsrcSize) { return false; @@ -214,13 +214,12 @@ bool CsrcAudioLevel::Parse(rtc::ArrayView data, return true; } -size_t CsrcAudioLevel::ValueSize( - rtc::ArrayView csrc_audio_levels) { +size_t CsrcAudioLevel::ValueSize(ArrayView csrc_audio_levels) { return csrc_audio_levels.size(); } -bool CsrcAudioLevel::Write(rtc::ArrayView data, - rtc::ArrayView csrc_audio_levels) { +bool CsrcAudioLevel::Write(ArrayView data, + ArrayView csrc_audio_levels) { RTC_CHECK_LE(csrc_audio_levels.size(), kRtpCsrcSize); if (csrc_audio_levels.size() != data.size()) { return false; @@ -247,7 +246,7 @@ bool CsrcAudioLevel::Write(rtc::ArrayView data, // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // | ID | len=2 | transmission offset | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -bool TransmissionOffset::Parse(rtc::ArrayView data, +bool TransmissionOffset::Parse(ArrayView data, int32_t* rtp_time) { if (data.size() != 3) return false; @@ -255,7 +254,7 @@ bool TransmissionOffset::Parse(rtc::ArrayView data, return true; } -bool TransmissionOffset::Write(rtc::ArrayView data, int32_t rtp_time) { +bool TransmissionOffset::Write(ArrayView data, int32_t rtp_time) { RTC_DCHECK_EQ(data.size(), 3); RTC_DCHECK_LE(rtp_time, 0x00ffffff); ByteWriter::WriteBigEndian(data.data(), rtp_time); @@ -269,7 +268,7 @@ bool TransmissionOffset::Write(rtc::ArrayView data, int32_t rtp_time) { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // | ID | L=1 |transport-wide sequence number | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -bool TransportSequenceNumber::Parse(rtc::ArrayView data, +bool TransportSequenceNumber::Parse(ArrayView data, uint16_t* transport_sequence_number) { if (data.size() != kValueSizeBytes) return false; @@ -277,7 +276,7 @@ bool TransportSequenceNumber::Parse(rtc::ArrayView data, return true; } -bool TransportSequenceNumber::Write(rtc::ArrayView data, +bool TransportSequenceNumber::Write(ArrayView data, uint16_t transport_sequence_number) { RTC_DCHECK_EQ(data.size(), ValueSize(transport_sequence_number)); ByteWriter::WriteBigEndian(data.data(), transport_sequence_number); @@ -302,16 +301,16 @@ bool TransportSequenceNumber::Write(rtc::ArrayView data, // cover including the current packet. If `seq_count` is zero no feedback is // requested. bool TransportSequenceNumberV2::Parse( - rtc::ArrayView data, + ArrayView data, uint16_t* transport_sequence_number, - absl::optional* feedback_request) { + std::optional* feedback_request) { if (data.size() != kValueSizeBytes && data.size() != kValueSizeBytesWithoutFeedbackRequest) return false; *transport_sequence_number = ByteReader::ReadBigEndian(data.data()); - *feedback_request = absl::nullopt; + *feedback_request = std::nullopt; if (data.size() == kValueSizeBytes) { uint16_t feedback_request_raw = ByteReader::ReadBigEndian(data.data() + 2); @@ -328,9 +327,9 @@ bool TransportSequenceNumberV2::Parse( } bool TransportSequenceNumberV2::Write( - rtc::ArrayView data, + ArrayView data, uint16_t transport_sequence_number, - const absl::optional& feedback_request) { + const std::optional& feedback_request) { RTC_DCHECK_EQ(data.size(), ValueSize(transport_sequence_number, feedback_request)); @@ -358,7 +357,7 @@ bool TransportSequenceNumberV2::Write( // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // | ID | len=0 |0 0 0 0 C F R R| // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -bool VideoOrientation::Parse(rtc::ArrayView data, +bool VideoOrientation::Parse(ArrayView data, VideoRotation* rotation) { if (data.size() != 1) return false; @@ -366,22 +365,20 @@ bool VideoOrientation::Parse(rtc::ArrayView data, return true; } -bool VideoOrientation::Write(rtc::ArrayView data, - VideoRotation rotation) { +bool VideoOrientation::Write(ArrayView data, VideoRotation rotation) { RTC_DCHECK_EQ(data.size(), 1); data[0] = ConvertVideoRotationToCVOByte(rotation); return true; } -bool VideoOrientation::Parse(rtc::ArrayView data, - uint8_t* value) { +bool VideoOrientation::Parse(ArrayView data, uint8_t* value) { if (data.size() != 1) return false; *value = data[0]; return true; } -bool VideoOrientation::Write(rtc::ArrayView data, uint8_t value) { +bool VideoOrientation::Write(ArrayView data, uint8_t value) { RTC_DCHECK_EQ(data.size(), 1); data[0] = value; return true; @@ -392,7 +389,7 @@ bool VideoOrientation::Write(rtc::ArrayView data, uint8_t value) { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // | ID | len=2 | MIN delay | MAX delay | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -bool PlayoutDelayLimits::Parse(rtc::ArrayView data, +bool PlayoutDelayLimits::Parse(ArrayView data, VideoPlayoutDelay* playout_delay) { RTC_DCHECK(playout_delay); if (data.size() != 3) @@ -403,7 +400,7 @@ bool PlayoutDelayLimits::Parse(rtc::ArrayView data, return playout_delay->Set(min_raw * kGranularity, max_raw * kGranularity); } -bool PlayoutDelayLimits::Write(rtc::ArrayView data, +bool PlayoutDelayLimits::Write(ArrayView data, const VideoPlayoutDelay& playout_delay) { RTC_DCHECK_EQ(data.size(), 3); @@ -432,7 +429,7 @@ bool PlayoutDelayLimits::Write(rtc::ArrayView data, // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // | ID | len=0 | Content type | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -bool VideoContentTypeExtension::Parse(rtc::ArrayView data, +bool VideoContentTypeExtension::Parse(ArrayView data, VideoContentType* content_type) { if (data.size() == 1 && videocontenttypehelpers::IsValidContentType(data[0])) { @@ -446,7 +443,7 @@ bool VideoContentTypeExtension::Parse(rtc::ArrayView data, return false; } -bool VideoContentTypeExtension::Write(rtc::ArrayView data, +bool VideoContentTypeExtension::Write(ArrayView data, VideoContentType content_type) { RTC_DCHECK_EQ(data.size(), 1); data[0] = static_cast(content_type); @@ -474,7 +471,7 @@ bool VideoContentTypeExtension::Write(rtc::ArrayView data, // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // | network2 timestamp ms delta | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -bool VideoTimingExtension::Parse(rtc::ArrayView data, +bool VideoTimingExtension::Parse(ArrayView data, VideoSendTiming* timing) { RTC_DCHECK(timing); // TODO(sprang): Deprecate support for old wire format. @@ -506,7 +503,7 @@ bool VideoTimingExtension::Parse(rtc::ArrayView data, return true; } -bool VideoTimingExtension::Write(rtc::ArrayView data, +bool VideoTimingExtension::Write(ArrayView data, const VideoSendTiming& timing) { RTC_DCHECK_EQ(data.size(), 1 + 2 * 6); ByteWriter::WriteBigEndian(data.data() + kFlagsOffset, timing.flags); @@ -528,7 +525,7 @@ bool VideoTimingExtension::Write(rtc::ArrayView data, return true; } -bool VideoTimingExtension::Write(rtc::ArrayView data, +bool VideoTimingExtension::Write(ArrayView data, uint16_t time_delta_ms, uint8_t offset) { RTC_DCHECK_GE(data.size(), offset + 2); @@ -572,7 +569,7 @@ bool VideoTimingExtension::Write(rtc::ArrayView data, // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // |range+chr.sit. | // +-+-+-+-+-+-+-+-+ -bool ColorSpaceExtension::Parse(rtc::ArrayView data, +bool ColorSpaceExtension::Parse(ArrayView data, ColorSpace* color_space) { RTC_DCHECK(color_space); if (data.size() != kValueSizeBytes && @@ -612,7 +609,7 @@ bool ColorSpaceExtension::Parse(rtc::ArrayView data, return true; } -bool ColorSpaceExtension::Write(rtc::ArrayView data, +bool ColorSpaceExtension::Write(ArrayView data, const ColorSpace& color_space) { RTC_DCHECK_EQ(data.size(), ValueSize(color_space)); size_t offset = 0; @@ -650,7 +647,7 @@ uint8_t ColorSpaceExtension::CombineRangeAndChromaSiting( static_cast(chroma_siting_vertical); } -size_t ColorSpaceExtension::ParseHdrMetadata(rtc::ArrayView data, +size_t ColorSpaceExtension::ParseHdrMetadata(ArrayView data, HdrMetadata* hdr_metadata) { RTC_DCHECK_EQ(data.size(), kValueSizeBytes - kValueSizeBytesWithoutHdrMetadata); @@ -697,7 +694,7 @@ size_t ColorSpaceExtension::ParseLuminance(const uint8_t* data, return 2; // Return number of bytes read. } -size_t ColorSpaceExtension::WriteHdrMetadata(rtc::ArrayView data, +size_t ColorSpaceExtension::WriteHdrMetadata(ArrayView data, const HdrMetadata& hdr_metadata) { RTC_DCHECK_EQ(data.size(), kValueSizeBytes - kValueSizeBytesWithoutHdrMetadata); @@ -751,7 +748,7 @@ size_t ColorSpaceExtension::WriteLuminance(uint8_t* data, return 2; // Return number of bytes written. } -bool BaseRtpStringExtension::Parse(rtc::ArrayView data, +bool BaseRtpStringExtension::Parse(ArrayView data, std::string* str) { if (data.empty() || data[0] == 0) // Valid string extension can't be empty. return false; @@ -763,7 +760,7 @@ bool BaseRtpStringExtension::Parse(rtc::ArrayView data, return true; } -bool BaseRtpStringExtension::Write(rtc::ArrayView data, +bool BaseRtpStringExtension::Write(ArrayView data, absl::string_view str) { if (str.size() > kMaxValueSizeBytes) { return false; @@ -791,18 +788,18 @@ bool BaseRtpStringExtension::Write(rtc::ArrayView data, // | ID | len=1 |N| level | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // Sample Audio Level Encoding Using the Two-Byte Header Format -bool InbandComfortNoiseExtension::Parse(rtc::ArrayView data, - absl::optional* level) { +bool InbandComfortNoiseExtension::Parse(ArrayView data, + std::optional* level) { if (data.size() != kValueSizeBytes) return false; *level = (data[0] & 0b1000'0000) != 0 - ? absl::nullopt - : absl::make_optional(data[0] & 0b0111'1111); + ? std::nullopt + : std::make_optional(data[0] & 0b0111'1111); return true; } -bool InbandComfortNoiseExtension::Write(rtc::ArrayView data, - absl::optional level) { +bool InbandComfortNoiseExtension::Write(ArrayView data, + std::optional level) { RTC_DCHECK_EQ(data.size(), kValueSizeBytes); data[0] = 0b0000'0000; if (level) { @@ -821,7 +818,7 @@ bool InbandComfortNoiseExtension::Write(rtc::ArrayView data, // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // | ID | L=1 | video-frame-tracking-id | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -bool VideoFrameTrackingIdExtension::Parse(rtc::ArrayView data, +bool VideoFrameTrackingIdExtension::Parse(ArrayView data, uint16_t* video_frame_tracking_id) { if (data.size() != kValueSizeBytes) { return false; @@ -830,7 +827,7 @@ bool VideoFrameTrackingIdExtension::Parse(rtc::ArrayView data, return true; } -bool VideoFrameTrackingIdExtension::Write(rtc::ArrayView data, +bool VideoFrameTrackingIdExtension::Write(ArrayView data, uint16_t video_frame_tracking_id) { RTC_DCHECK_EQ(data.size(), kValueSizeBytes); ByteWriter::WriteBigEndian(data.data(), video_frame_tracking_id); diff --git a/modules/rtp_rtcp/source/rtp_header_extensions.h b/modules/rtp_rtcp/source/rtp_header_extensions.h index 739d4765d0..8fb26e5522 100644 --- a/modules/rtp_rtcp/source/rtp_header_extensions.h +++ b/modules/rtp_rtcp/source/rtp_header_extensions.h @@ -14,6 +14,7 @@ #include #include +#include #include #include @@ -21,12 +22,23 @@ #include "api/array_view.h" #include "api/rtp_headers.h" #include "api/rtp_parameters.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "api/video/color_space.h" +#include "api/video/hdr_metadata.h" #include "api/video/video_content_type.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "rtc_base/checks.h" +#include "system_wrappers/include/ntp_time.h" + +// This file contains class definitions for reading/writing each RTP header +// extension. Each class must be defined such that it is compatible with being +// an argument to the templated RtpPacket::GetExtension and +// RtpPacketToSend::SetExtension methods. New header extensions must have class +// names ending with "Extension", for the purpose of avoiding collisions with +// RTP extension information exposed in the public API of WebRTC. namespace webrtc { @@ -39,9 +51,11 @@ class AbsoluteSendTime { return RtpExtension::kAbsSendTimeUri; } - static bool Parse(rtc::ArrayView data, uint32_t* time_24bits); - static size_t ValueSize(uint32_t time_24bits) { return kValueSizeBytes; } - static bool Write(rtc::ArrayView data, uint32_t time_24bits); + static bool Parse(ArrayView data, uint32_t* time_24bits); + static size_t ValueSize(uint32_t /* time_24bits */) { + return kValueSizeBytes; + } + static bool Write(ArrayView data, uint32_t time_24bits); static constexpr uint32_t To24Bits(Timestamp time) { int64_t time_us = time.us() % (int64_t{1 << 6} * 1'000'000); @@ -51,9 +65,14 @@ class AbsoluteSendTime { return static_cast(time6x18); } + static uint32_t To24Bits(NtpTime ntp_time) { + uint64_t ntp_time32x32 = static_cast(ntp_time); + return (ntp_time32x32 >> 14) & 0x00FF'FFFF; + } + static constexpr Timestamp ToTimestamp(uint32_t time_24bits) { RTC_DCHECK_LT(time_24bits, (1 << 24)); - return Timestamp::Micros((time_24bits* int64_t{1'000'000}) >> 18); + return Timestamp::Micros((time_24bits * int64_t{1'000'000}) >> 18); } }; @@ -68,30 +87,27 @@ class AbsoluteCaptureTimeExtension { return RtpExtension::kAbsoluteCaptureTimeUri; } - static bool Parse(rtc::ArrayView data, + static bool Parse(ArrayView data, AbsoluteCaptureTime* extension); static size_t ValueSize(const AbsoluteCaptureTime& extension); - static bool Write(rtc::ArrayView data, + static bool Write(ArrayView data, const AbsoluteCaptureTime& extension); }; -class AudioLevel { +class AudioLevelExtension { public: + using value_type = AudioLevel; static constexpr RTPExtensionType kId = kRtpExtensionAudioLevel; static constexpr uint8_t kValueSizeBytes = 1; static constexpr absl::string_view Uri() { return RtpExtension::kAudioLevelUri; } - static bool Parse(rtc::ArrayView data, - bool* voice_activity, - uint8_t* audio_level); - static size_t ValueSize(bool voice_activity, uint8_t audio_level) { + static bool Parse(ArrayView data, AudioLevel* extension); + static size_t ValueSize(const AudioLevel& /* extension */) { return kValueSizeBytes; } - static bool Write(rtc::ArrayView data, - bool voice_activity, - uint8_t audio_level); + static bool Write(ArrayView data, const AudioLevel& extension); }; class CsrcAudioLevel { @@ -102,11 +118,11 @@ class CsrcAudioLevel { return RtpExtension::kCsrcAudioLevelsUri; } - static bool Parse(rtc::ArrayView data, + static bool Parse(ArrayView data, std::vector* csrc_audio_levels); - static size_t ValueSize(rtc::ArrayView csrc_audio_levels); - static bool Write(rtc::ArrayView data, - rtc::ArrayView csrc_audio_levels); + static size_t ValueSize(ArrayView csrc_audio_levels); + static bool Write(ArrayView data, + ArrayView csrc_audio_levels); }; class TransmissionOffset { @@ -118,9 +134,9 @@ class TransmissionOffset { return RtpExtension::kTimestampOffsetUri; } - static bool Parse(rtc::ArrayView data, int32_t* rtp_time); - static size_t ValueSize(int32_t rtp_time) { return kValueSizeBytes; } - static bool Write(rtc::ArrayView data, int32_t rtp_time); + static bool Parse(ArrayView data, int32_t* rtp_time); + static size_t ValueSize(int32_t /* rtp_time */) { return kValueSizeBytes; } + static bool Write(ArrayView data, int32_t rtp_time); }; class TransportSequenceNumber { @@ -132,12 +148,12 @@ class TransportSequenceNumber { return RtpExtension::kTransportSequenceNumberUri; } - static bool Parse(rtc::ArrayView data, + static bool Parse(ArrayView data, uint16_t* transport_sequence_number); static size_t ValueSize(uint16_t /*transport_sequence_number*/) { return kValueSizeBytes; } - static bool Write(rtc::ArrayView data, + static bool Write(ArrayView data, uint16_t transport_sequence_number); }; @@ -151,18 +167,18 @@ class TransportSequenceNumberV2 { return RtpExtension::kTransportSequenceNumberV2Uri; } - static bool Parse(rtc::ArrayView data, + static bool Parse(ArrayView data, uint16_t* transport_sequence_number, - absl::optional* feedback_request); + std::optional* feedback_request); static size_t ValueSize( uint16_t /*transport_sequence_number*/, - const absl::optional& feedback_request) { + const std::optional& feedback_request) { return feedback_request ? kValueSizeBytes : kValueSizeBytesWithoutFeedbackRequest; } - static bool Write(rtc::ArrayView data, + static bool Write(ArrayView data, uint16_t transport_sequence_number, - const absl::optional& feedback_request); + const std::optional& feedback_request); private: static constexpr uint16_t kIncludeTimestampsBit = 1 << 15; @@ -177,12 +193,12 @@ class VideoOrientation { return RtpExtension::kVideoRotationUri; } - static bool Parse(rtc::ArrayView data, VideoRotation* value); + static bool Parse(ArrayView data, VideoRotation* value); static size_t ValueSize(VideoRotation) { return kValueSizeBytes; } - static bool Write(rtc::ArrayView data, VideoRotation value); - static bool Parse(rtc::ArrayView data, uint8_t* value); - static size_t ValueSize(uint8_t value) { return kValueSizeBytes; } - static bool Write(rtc::ArrayView data, uint8_t value); + static bool Write(ArrayView data, VideoRotation value); + static bool Parse(ArrayView data, uint8_t* value); + static size_t ValueSize(uint8_t /* value */) { return kValueSizeBytes; } + static bool Write(ArrayView data, uint8_t value); }; class PlayoutDelayLimits { @@ -201,10 +217,10 @@ class PlayoutDelayLimits { // Maximum playout delay value in milliseconds. static constexpr TimeDelta kMax = 0xfff * kGranularity; // 40950. - static bool Parse(rtc::ArrayView data, + static bool Parse(ArrayView data, VideoPlayoutDelay* playout_delay); static size_t ValueSize(const VideoPlayoutDelay&) { return kValueSizeBytes; } - static bool Write(rtc::ArrayView data, + static bool Write(ArrayView data, const VideoPlayoutDelay& playout_delay); }; @@ -217,11 +233,10 @@ class VideoContentTypeExtension { return RtpExtension::kVideoContentTypeUri; } - static bool Parse(rtc::ArrayView data, + static bool Parse(ArrayView data, VideoContentType* content_type); static size_t ValueSize(VideoContentType) { return kValueSizeBytes; } - static bool Write(rtc::ArrayView data, - VideoContentType content_type); + static bool Write(ArrayView data, VideoContentType content_type); }; class VideoTimingExtension { @@ -243,17 +258,15 @@ class VideoTimingExtension { static constexpr uint8_t kNetworkTimestampDeltaOffset = 9; static constexpr uint8_t kNetwork2TimestampDeltaOffset = 11; - static bool Parse(rtc::ArrayView data, - VideoSendTiming* timing); + static bool Parse(ArrayView data, VideoSendTiming* timing); static size_t ValueSize(const VideoSendTiming&) { return kValueSizeBytes; } - static bool Write(rtc::ArrayView data, - const VideoSendTiming& timing); + static bool Write(ArrayView data, const VideoSendTiming& timing); - static size_t ValueSize(uint16_t time_delta_ms, uint8_t idx) { + static size_t ValueSize(uint16_t /* time_delta_ms */, uint8_t /* idx */) { return kValueSizeBytes; } // Writes only single time delta to position idx. - static bool Write(rtc::ArrayView data, + static bool Write(ArrayView data, uint16_t time_delta_ms, uint8_t offset); }; @@ -268,14 +281,12 @@ class ColorSpaceExtension { return RtpExtension::kColorSpaceUri; } - static bool Parse(rtc::ArrayView data, - ColorSpace* color_space); + static bool Parse(ArrayView data, ColorSpace* color_space); static size_t ValueSize(const ColorSpace& color_space) { return color_space.hdr_metadata() ? kValueSizeBytes : kValueSizeBytesWithoutHdrMetadata; } - static bool Write(rtc::ArrayView data, - const ColorSpace& color_space); + static bool Write(ArrayView data, const ColorSpace& color_space); private: static constexpr int kChromaticityDenominator = 50000; // 0.00002 resolution. @@ -286,12 +297,12 @@ class ColorSpaceExtension { ColorSpace::RangeID range, ColorSpace::ChromaSiting chroma_siting_horizontal, ColorSpace::ChromaSiting chroma_siting_vertical); - static size_t ParseHdrMetadata(rtc::ArrayView data, + static size_t ParseHdrMetadata(ArrayView data, HdrMetadata* hdr_metadata); static size_t ParseChromaticity(const uint8_t* data, HdrMasteringMetadata::Chromaticity* p); static size_t ParseLuminance(const uint8_t* data, float* f, int denominator); - static size_t WriteHdrMetadata(rtc::ArrayView data, + static size_t WriteHdrMetadata(ArrayView data, const HdrMetadata& hdr_metadata); static size_t WriteChromaticity(uint8_t* data, const HdrMasteringMetadata::Chromaticity& p); @@ -307,9 +318,9 @@ class BaseRtpStringExtension { // maximum length that can be encoded with one-byte header extensions. static constexpr uint8_t kMaxValueSizeBytes = 16; - static bool Parse(rtc::ArrayView data, std::string* str); + static bool Parse(ArrayView data, std::string* str); static size_t ValueSize(absl::string_view str) { return str.size(); } - static bool Write(rtc::ArrayView data, absl::string_view str); + static bool Write(ArrayView data, absl::string_view str); }; class RtpStreamId : public BaseRtpStringExtension { @@ -334,7 +345,7 @@ class RtpMid : public BaseRtpStringExtension { class InbandComfortNoiseExtension { public: - using value_type = absl::optional; + using value_type = std::optional; static constexpr RTPExtensionType kId = kRtpExtensionInbandComfortNoise; static constexpr uint8_t kValueSizeBytes = 1; @@ -342,13 +353,12 @@ class InbandComfortNoiseExtension { "http://www.webrtc.org/experiments/rtp-hdrext/inband-cn"; static constexpr absl::string_view Uri() { return kUri; } - static bool Parse(rtc::ArrayView data, - absl::optional* level); - static size_t ValueSize(absl::optional level) { + static bool Parse(ArrayView data, + std::optional* level); + static size_t ValueSize(std::optional /* level */) { return kValueSizeBytes; } - static bool Write(rtc::ArrayView data, - absl::optional level); + static bool Write(ArrayView data, std::optional level); }; class VideoFrameTrackingIdExtension { @@ -360,13 +370,12 @@ class VideoFrameTrackingIdExtension { return RtpExtension::kVideoFrameTrackingIdUri; } - static bool Parse(rtc::ArrayView data, + static bool Parse(ArrayView data, uint16_t* video_frame_tracking_id); static size_t ValueSize(uint16_t /*video_frame_tracking_id*/) { return kValueSizeBytes; } - static bool Write(rtc::ArrayView data, - uint16_t video_frame_tracking_id); + static bool Write(ArrayView data, uint16_t video_frame_tracking_id); }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_packet.cc b/modules/rtp_rtcp/source/rtp_packet.cc index 2a95a3a816..fe9d3a7d95 100644 --- a/modules/rtp_rtcp/source/rtp_packet.cc +++ b/modules/rtp_rtcp/source/rtp_packet.cc @@ -12,11 +12,17 @@ #include #include +#include #include +#include +#include "api/array_view.h" +#include "api/rtp_parameters.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/strings/string_builder.h" @@ -87,11 +93,11 @@ bool RtpPacket::Parse(const uint8_t* buffer, size_t buffer_size) { return true; } -bool RtpPacket::Parse(rtc::ArrayView packet) { +bool RtpPacket::Parse(ArrayView packet) { return Parse(packet.data(), packet.size()); } -bool RtpPacket::Parse(rtc::CopyOnWriteBuffer buffer) { +bool RtpPacket::Parse(CopyOnWriteBuffer buffer) { if (!ParseBuffer(buffer.cdata(), buffer.size())) { Clear(); return false; @@ -190,6 +196,7 @@ void RtpPacket::ZeroMutableExtensions() { case RTPExtensionType::kRtpExtensionCsrcAudioLevel: case RTPExtensionType::kRtpExtensionAbsoluteCaptureTime: case RTPExtensionType::kRtpExtensionColorSpace: + case RTPExtensionType::kRtpExtensionCorruptionDetection: case RTPExtensionType::kRtpExtensionGenericFrameDescriptor: case RTPExtensionType::kRtpExtensionDependencyDescriptor: case RTPExtensionType::kRtpExtensionMid: @@ -209,14 +216,14 @@ void RtpPacket::ZeroMutableExtensions() { } } -void RtpPacket::SetCsrcs(rtc::ArrayView csrcs) { +void RtpPacket::SetCsrcs(ArrayView csrcs) { RTC_DCHECK_EQ(extensions_size_, 0); RTC_DCHECK_EQ(payload_size_, 0); RTC_DCHECK_EQ(padding_size_, 0); RTC_DCHECK_LE(csrcs.size(), 0x0fu); RTC_DCHECK_LE(kFixedHeaderSize + 4 * csrcs.size(), capacity()); payload_offset_ = kFixedHeaderSize + 4 * csrcs.size(); - WriteAt(0, (data()[0] & 0xF0) | rtc::dchecked_cast(csrcs.size())); + WriteAt(0, (data()[0] & 0xF0) | dchecked_cast(csrcs.size())); size_t offset = kFixedHeaderSize; for (uint32_t csrc : csrcs) { ByteWriter::WriteBigEndian(WriteAt(offset), csrc); @@ -225,7 +232,7 @@ void RtpPacket::SetCsrcs(rtc::ArrayView csrcs) { buffer_.SetSize(payload_offset_); } -rtc::ArrayView RtpPacket::AllocateRawExtension(int id, size_t length) { +ArrayView RtpPacket::AllocateRawExtension(int id, size_t length) { RTC_DCHECK_GE(id, RtpExtension::kMinId); RTC_DCHECK_LE(id, RtpExtension::kMaxId); RTC_DCHECK_GE(length, 1); @@ -234,7 +241,7 @@ rtc::ArrayView RtpPacket::AllocateRawExtension(int id, size_t length) { if (extension_entry != nullptr) { // Extension already reserved. Check if same length is used. if (extension_entry->length == length) - return rtc::MakeArrayView(WriteAt(extension_entry->offset), length); + return MakeArrayView(WriteAt(extension_entry->offset), length); RTC_LOG(LS_ERROR) << "Length mismatch for extension id " << id << ": expected " @@ -312,20 +319,20 @@ rtc::ArrayView RtpPacket::AllocateRawExtension(int id, size_t length) { } if (profile_id == kOneByteExtensionProfileId) { - uint8_t one_byte_header = rtc::dchecked_cast(id) << 4; - one_byte_header |= rtc::dchecked_cast(length - 1); + uint8_t one_byte_header = dchecked_cast(id) << 4; + one_byte_header |= dchecked_cast(length - 1); WriteAt(extensions_offset + extensions_size_, one_byte_header); } else { // TwoByteHeaderExtension. - uint8_t extension_id = rtc::dchecked_cast(id); + uint8_t extension_id = dchecked_cast(id); WriteAt(extensions_offset + extensions_size_, extension_id); - uint8_t extension_length = rtc::dchecked_cast(length); + uint8_t extension_length = dchecked_cast(length); WriteAt(extensions_offset + extensions_size_ + 1, extension_length); } - const uint16_t extension_info_offset = rtc::dchecked_cast( + const uint16_t extension_info_offset = dchecked_cast( extensions_offset + extensions_size_ + extension_header_size); - const uint8_t extension_info_length = rtc::dchecked_cast(length); + const uint8_t extension_info_length = dchecked_cast(length); extension_entries_.emplace_back(id, extension_info_length, extension_info_offset); @@ -335,8 +342,7 @@ rtc::ArrayView RtpPacket::AllocateRawExtension(int id, size_t length) { SetExtensionLengthMaybeAddZeroPadding(extensions_offset); payload_offset_ = extensions_offset + extensions_size_padded; buffer_.SetSize(payload_offset_); - return rtc::MakeArrayView(WriteAt(extension_info_offset), - extension_info_length); + return MakeArrayView(WriteAt(extension_info_offset), extension_info_length); } void RtpPacket::PromoteToTwoByteHeaderExtension() { @@ -356,7 +362,7 @@ void RtpPacket::PromoteToTwoByteHeaderExtension() { size_t read_index = extension_entry->offset; size_t write_index = read_index + write_read_delta; // Update offset. - extension_entry->offset = rtc::dchecked_cast(write_index); + extension_entry->offset = dchecked_cast(write_index); // Copy data. Use memmove since read/write regions may overlap. memmove(WriteAt(write_index), data() + read_index, extension_entry->length); // Rewrite id and length. @@ -378,8 +384,8 @@ void RtpPacket::PromoteToTwoByteHeaderExtension() { uint16_t RtpPacket::SetExtensionLengthMaybeAddZeroPadding( size_t extensions_offset) { // Update header length field. - uint16_t extensions_words = rtc::dchecked_cast( - (extensions_size_ + 3) / 4); // Wrap up to 32bit. + uint16_t extensions_words = + dchecked_cast((extensions_size_ + 3) / 4); // Wrap up to 32bit. ByteWriter::WriteBigEndian(WriteAt(extensions_offset - 2), extensions_words); // Fill extension padding place with zeroes. @@ -411,7 +417,7 @@ bool RtpPacket::SetPadding(size_t padding_bytes) { << " bytes left in buffer."; return false; } - padding_size_ = rtc::dchecked_cast(padding_bytes); + padding_size_ = dchecked_cast(padding_bytes); buffer_.SetSize(payload_offset_ + payload_size_ + padding_size_); if (padding_size_ > 0) { size_t padding_offset = payload_offset_ + payload_size_; @@ -532,7 +538,7 @@ bool RtpPacket::ParseBuffer(const uint8_t* buffer, size_t size) { size_t offset = extension_offset + extensions_size_ + extension_header_length; - if (!rtc::IsValueInRangeForNumericType(offset)) { + if (!IsValueInRangeForNumericType(offset)) { RTC_DLOG(LS_WARNING) << "Oversized rtp header extension."; break; } @@ -580,8 +586,7 @@ RtpPacket::ExtensionInfo& RtpPacket::FindOrCreateExtensionInfo(int id) { return extension_entries_.back(); } -rtc::ArrayView RtpPacket::FindExtension( - ExtensionType type) const { +ArrayView RtpPacket::FindExtension(ExtensionType type) const { uint8_t id = extensions_.GetId(type); if (id == ExtensionManager::kInvalidId) { // Extension not registered. @@ -591,12 +596,11 @@ rtc::ArrayView RtpPacket::FindExtension( if (extension_info == nullptr) { return nullptr; } - return rtc::MakeArrayView(data() + extension_info->offset, - extension_info->length); + return MakeArrayView(data() + extension_info->offset, extension_info->length); } -rtc::ArrayView RtpPacket::AllocateExtension(ExtensionType type, - size_t length) { +ArrayView RtpPacket::AllocateExtension(ExtensionType type, + size_t length) { // TODO(webrtc:7990): Add support for empty extensions (length==0). if (length == 0 || length > RtpExtension::kMaxValueSize || (!extensions_.ExtmapAllowMixed() && @@ -686,7 +690,7 @@ bool RtpPacket::RemoveExtension(ExtensionType type) { } std::string RtpPacket::ToString() const { - rtc::StringBuilder result; + StringBuilder result; result << "{payload_type=" << payload_type_ << ", marker=" << marker_ << ", sequence_number=" << sequence_number_ << ", padding_size=" << padding_size_ << ", timestamp=" << timestamp_ diff --git a/modules/rtp_rtcp/source/rtp_packet.h b/modules/rtp_rtcp/source/rtp_packet.h index e91ec6368b..9f98d5bf99 100644 --- a/modules/rtp_rtcp/source/rtp_packet.h +++ b/modules/rtp_rtcp/source/rtp_packet.h @@ -10,10 +10,14 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTP_PACKET_H_ #define MODULES_RTP_RTCP_SOURCE_RTP_PACKET_H_ +#include +#include +#include +#include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -48,10 +52,10 @@ class RtpPacket { // read or allocate extensions in methods GetExtension, AllocateExtension, // etc.) bool Parse(const uint8_t* buffer, size_t size); - bool Parse(rtc::ArrayView packet); + bool Parse(ArrayView packet); // Parse and move given buffer into Packet. - bool Parse(rtc::CopyOnWriteBuffer packet); + bool Parse(CopyOnWriteBuffer packet); // Maps extensions id to their types. void IdentifyExtensions(ExtensionManager extensions); @@ -73,15 +77,15 @@ class RtpPacket { size_t payload_size() const { return payload_size_; } bool has_padding() const { return buffer_[0] & 0x20; } size_t padding_size() const { return padding_size_; } - rtc::ArrayView payload() const { - return rtc::MakeArrayView(data() + payload_offset_, payload_size_); + ArrayView payload() const { + return MakeArrayView(data() + payload_offset_, payload_size_); } - rtc::CopyOnWriteBuffer PayloadBuffer() const { + CopyOnWriteBuffer PayloadBuffer() const { return buffer_.Slice(payload_offset_, payload_size_); } // Buffer. - rtc::CopyOnWriteBuffer Buffer() const { return buffer_; } + CopyOnWriteBuffer Buffer() const { return buffer_; } size_t capacity() const { return buffer_.capacity(); } size_t size() const { return payload_offset_ + payload_size_ + padding_size_; @@ -114,7 +118,7 @@ class RtpPacket { // Writes csrc list. Assumes: // a) There is enough room left in buffer. // b) Extension headers, payload or padding data has not already been added. - void SetCsrcs(rtc::ArrayView csrcs); + void SetCsrcs(ArrayView csrcs); // Header extensions. template @@ -127,31 +131,31 @@ class RtpPacket { bool IsRegistered() const; template - bool GetExtension(FirstValue, Values...) const; + bool GetExtension(FirstValue&&, Values&&...) const; template - absl::optional GetExtension() const; + std::optional GetExtension() const; // Returns view of the raw extension or empty view on failure. template - rtc::ArrayView GetRawExtension() const; + ArrayView GetRawExtension() const; template bool SetExtension(const Values&...); template - bool SetRawExtension(rtc::ArrayView data); + bool SetRawExtension(ArrayView data); template bool ReserveExtension(); // Find or allocate an extension `type`. Returns view of size `length` // to write raw extension to or an empty view on failure. - rtc::ArrayView AllocateExtension(ExtensionType type, size_t length); + ArrayView AllocateExtension(ExtensionType type, size_t length); // Find an extension `type`. // Returns view of the raw extension or empty view on failure. - rtc::ArrayView FindExtension(ExtensionType type) const; + ArrayView FindExtension(ExtensionType type) const; // Returns pointer to the payload of size at least `size_bytes`. // Keeps original payload, if any. If `size_bytes` is larger than current @@ -190,7 +194,7 @@ class RtpPacket { // Allocates and returns place to store rtp header extension. // Returns empty arrayview on failure. - rtc::ArrayView AllocateRawExtension(int id, size_t length); + ArrayView AllocateRawExtension(int id, size_t length); // Promotes existing one-byte header extensions to two-byte header extensions // by rewriting the data and updates the corresponding extension offsets. @@ -217,7 +221,7 @@ class RtpPacket { ExtensionManager extensions_; std::vector extension_entries_; size_t extensions_size_ = 0; // Unaligned. - rtc::CopyOnWriteBuffer buffer_; + CopyOnWriteBuffer buffer_; }; template @@ -231,24 +235,25 @@ bool RtpPacket::IsRegistered() const { } template -bool RtpPacket::GetExtension(FirstValue first, Values... values) const { +bool RtpPacket::GetExtension(FirstValue&& first, Values&&... values) const { auto raw = FindExtension(Extension::kId); if (raw.empty()) return false; - return Extension::Parse(raw, first, values...); + return Extension::Parse(raw, std::forward(first), + std::forward(values)...); } template -absl::optional RtpPacket::GetExtension() const { - absl::optional result; +std::optional RtpPacket::GetExtension() const { + std::optional result; auto raw = FindExtension(Extension::kId); if (raw.empty() || !Extension::Parse(raw, &result.emplace())) - result = absl::nullopt; + result = std::nullopt; return result; } template -rtc::ArrayView RtpPacket::GetRawExtension() const { +ArrayView RtpPacket::GetRawExtension() const { return FindExtension(Extension::kId); } @@ -262,9 +267,8 @@ bool RtpPacket::SetExtension(const Values&... values) { } template -bool RtpPacket::SetRawExtension(rtc::ArrayView data) { - rtc::ArrayView buffer = - AllocateExtension(Extension::kId, data.size()); +bool RtpPacket::SetRawExtension(ArrayView data) { + ArrayView buffer = AllocateExtension(Extension::kId, data.size()); if (buffer.empty()) { return false; } diff --git a/modules/rtp_rtcp/source/rtp_packet_h265_common.h b/modules/rtp_rtcp/source/rtp_packet_h265_common.h new file mode 100644 index 0000000000..904e1d235b --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_packet_h265_common.h @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_RTP_RTCP_SOURCE_RTP_PACKET_H265_COMMON_H_ +#define MODULES_RTP_RTCP_SOURCE_RTP_PACKET_H265_COMMON_H_ + +#include +#include + +namespace webrtc { +// The payload header consists of the same +// fields (F, Type, LayerId and TID) as the NAL unit header. Refer to +// section 4.4 in RFC 7798. +constexpr size_t kH265PayloadHeaderSizeBytes = 2; +constexpr uint8_t kH265MaxLayerId = 127; +constexpr uint8_t kH265MaxTemporalId = 7; +// Unlike H.264, H.265 NAL header is 2-bytes. +constexpr size_t kH265NalHeaderSizeBytes = 2; +// H.265's FU is constructed of 2-byte payload header, 1-byte FU header and FU +// payload. +constexpr size_t kH265FuHeaderSizeBytes = 1; +// The NALU size for H.265 RTP aggregated packet indicates the size of the NAL +// unit is 2-bytes. +constexpr size_t kH265LengthFieldSizeBytes = 2; +constexpr size_t kH265ApHeaderSizeBytes = + kH265NalHeaderSizeBytes + kH265LengthFieldSizeBytes; + +// Bit masks for NAL headers. +enum NalHdrMasks { + kH265FBit = 0x80, + kH265TypeMask = 0x7E, + kH265LayerIDHMask = 0x1, + kH265LayerIDLMask = 0xF8, + kH265TIDMask = 0x7, + kH265TypeMaskN = 0x81, + kH265TypeMaskInFuHeader = 0x3F +}; + +// Bit masks for FU headers. +enum FuBitmasks { + kH265SBitMask = 0x80, + kH265EBitMask = 0x40, + kH265FuTypeBitMask = 0x3F +}; + +constexpr uint8_t kStartCode[] = {0, 0, 0, 1}; + +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_RTP_PACKET_H265_COMMON_H_ diff --git a/modules/rtp_rtcp/source/rtp_packet_history.cc b/modules/rtp_rtcp/source/rtp_packet_history.cc index 1e75e4787e..9b2db996dd 100644 --- a/modules/rtp_rtcp/source/rtp_packet_history.cc +++ b/modules/rtp_rtcp/source/rtp_packet_history.cc @@ -11,15 +11,23 @@ #include "modules/rtp_rtcp/source/rtp_packet_history.h" #include +#include #include #include #include +#include #include +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/function_view.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -46,37 +54,13 @@ RtpPacketHistory::StoredPacket& RtpPacketHistory::StoredPacket::operator=( RtpPacketHistory::StoredPacket&&) = default; RtpPacketHistory::StoredPacket::~StoredPacket() = default; -void RtpPacketHistory::StoredPacket::IncrementTimesRetransmitted( - PacketPrioritySet* priority_set) { - // Check if this StoredPacket is in the priority set. If so, we need to remove - // it before updating `times_retransmitted_` since that is used in sorting, - // and then add it back. - const bool in_priority_set = priority_set && priority_set->erase(this) > 0; +void RtpPacketHistory::StoredPacket::IncrementTimesRetransmitted() { ++times_retransmitted_; - if (in_priority_set) { - auto it = priority_set->insert(this); - RTC_DCHECK(it.second) - << "ERROR: Priority set already contains matching packet! In set: " - "insert order = " - << (*it.first)->insert_order_ - << ", times retransmitted = " << (*it.first)->times_retransmitted_ - << ". Trying to add: insert order = " << insert_order_ - << ", times retransmitted = " << times_retransmitted_; - } -} - -bool RtpPacketHistory::MoreUseful::operator()(StoredPacket* lhs, - StoredPacket* rhs) const { - // Prefer to send packets we haven't already sent as padding. - if (lhs->times_retransmitted() != rhs->times_retransmitted()) { - return lhs->times_retransmitted() < rhs->times_retransmitted(); - } - // All else being equal, prefer newer packets. - return lhs->insert_order() > rhs->insert_order(); } -RtpPacketHistory::RtpPacketHistory(Clock* clock, PaddingMode padding_mode) - : clock_(clock), +RtpPacketHistory::RtpPacketHistory(const Environment& env, + PaddingMode padding_mode) + : clock_(&env.clock()), padding_mode_(padding_mode), number_to_store_(0), mode_(StorageMode::kDisabled), @@ -163,14 +147,6 @@ void RtpPacketHistory::PutRtpPacket(std::unique_ptr packet, packet_history_[packet_index] = StoredPacket(std::move(packet), send_time, packets_inserted_++); - - if (padding_priority_enabled()) { - if (padding_priority_.size() >= kMaxPaddingHistory - 1) { - padding_priority_.erase(std::prev(padding_priority_.end())); - } - auto prio_it = padding_priority_.insert(&packet_history_[packet_index]); - RTC_DCHECK(prio_it.second) << "Failed to insert packet into prio set."; - } } std::unique_ptr RtpPacketHistory::GetPacketAndMarkAsPending( @@ -183,7 +159,7 @@ std::unique_ptr RtpPacketHistory::GetPacketAndMarkAsPending( std::unique_ptr RtpPacketHistory::GetPacketAndMarkAsPending( uint16_t sequence_number, - rtc::FunctionView(const RtpPacketToSend&)> + FunctionView(const RtpPacketToSend&)> encapsulate) { MutexLock lock(&lock_); if (mode_ == StorageMode::kDisabled) { @@ -230,8 +206,7 @@ void RtpPacketHistory::MarkPacketAsSent(uint16_t sequence_number) { // transmission count. packet->set_send_time(clock_->CurrentTime()); packet->pending_transmission_ = false; - packet->IncrementTimesRetransmitted( - padding_priority_enabled() ? &padding_priority_ : nullptr); + packet->IncrementTimesRetransmitted(); } bool RtpPacketHistory::GetPacketState(uint16_t sequence_number) const { @@ -278,7 +253,7 @@ std::unique_ptr RtpPacketHistory::GetPayloadPaddingPacket() { } std::unique_ptr RtpPacketHistory::GetPayloadPaddingPacket( - rtc::FunctionView(const RtpPacketToSend&)> + FunctionView(const RtpPacketToSend&)> encapsulate) { MutexLock lock(&lock_); if (mode_ == StorageMode::kDisabled) { @@ -290,11 +265,8 @@ std::unique_ptr RtpPacketHistory::GetPayloadPaddingPacket( } StoredPacket* best_packet = nullptr; - if (padding_priority_enabled() && !padding_priority_.empty()) { - auto best_packet_it = padding_priority_.begin(); - best_packet = *best_packet_it; - } else if (!padding_priority_enabled() && !packet_history_.empty()) { - // Prioritization not available, pick the last packet. + if (!packet_history_.empty()) { + // Pick the last packet. for (auto it = packet_history_.rbegin(); it != packet_history_.rend(); ++it) { if (it->packet_ != nullptr) { @@ -322,14 +294,12 @@ std::unique_ptr RtpPacketHistory::GetPayloadPaddingPacket( } best_packet->set_send_time(clock_->CurrentTime()); - best_packet->IncrementTimesRetransmitted( - padding_priority_enabled() ? &padding_priority_ : nullptr); - + best_packet->IncrementTimesRetransmitted(); return padding_packet; } void RtpPacketHistory::CullAcknowledgedPackets( - rtc::ArrayView sequence_numbers) { + ArrayView sequence_numbers) { MutexLock lock(&lock_); for (uint16_t sequence_number : sequence_numbers) { int packet_index = GetPacketIndex(sequence_number); @@ -348,8 +318,7 @@ void RtpPacketHistory::Clear() { void RtpPacketHistory::Reset() { packet_history_.clear(); - padding_priority_.clear(); - large_payload_packet_ = absl::nullopt; + large_payload_packet_ = std::nullopt; } void RtpPacketHistory::CullOldPackets() { @@ -396,12 +365,6 @@ std::unique_ptr RtpPacketHistory::RemovePacket( // Move the packet out from the StoredPacket container. std::unique_ptr rtp_packet = std::move(packet_history_[packet_index].packet_); - - // Erase from padding priority set, if eligible. - if (padding_mode_ == PaddingMode::kPriority) { - padding_priority_.erase(&packet_history_[packet_index]); - } - if (packet_index == 0) { while (!packet_history_.empty() && packet_history_.front().packet_ == nullptr) { @@ -449,8 +412,4 @@ RtpPacketHistory::StoredPacket* RtpPacketHistory::GetStoredPacket( return &packet_history_[index]; } -bool RtpPacketHistory::padding_priority_enabled() const { - return padding_mode_ == PaddingMode::kPriority; -} - } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_packet_history.h b/modules/rtp_rtcp/source/rtp_packet_history.h index 18310a8bd3..ebd2f9119a 100644 --- a/modules/rtp_rtcp/source/rtp_packet_history.h +++ b/modules/rtp_rtcp/source/rtp_packet_history.h @@ -11,14 +11,14 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTP_PACKET_HISTORY_H_ #define MODULES_RTP_RTCP_SOURCE_RTP_PACKET_HISTORY_H_ +#include +#include #include -#include #include -#include -#include -#include +#include -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/environment/environment.h" #include "api/function_view.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -40,11 +40,8 @@ class RtpPacketHistory { }; enum class PaddingMode { - kDefault, // Last packet stored in the history that has not yet been - // culled. - kPriority, // Selects padding packets based on - // heuristics such as send time, retransmission count etc, in order to - // make padding potentially more useful. + kDefault, // Last packet stored in the history that has not yet been + // culled. kRecentLargePacket // Use the most recent large packet. Packet is kept for // padding even after it has been culled from history. }; @@ -59,11 +56,7 @@ class RtpPacketHistory { // With kStoreAndCull, always remove packets after 3x max(1000ms, 3x rtt). static constexpr int kPacketCullingDelayFactor = 3; - RtpPacketHistory(Clock* clock, bool enable_padding_prio) - : RtpPacketHistory(clock, - enable_padding_prio ? PaddingMode::kPriority - : PaddingMode::kDefault) {} - RtpPacketHistory(Clock* clock, PaddingMode padding_mode); + RtpPacketHistory(const Environment& env, PaddingMode padding_mode); RtpPacketHistory() = delete; RtpPacketHistory(const RtpPacketHistory&) = delete; @@ -97,8 +90,8 @@ class RtpPacketHistory { // packet will not be marked as pending. std::unique_ptr GetPacketAndMarkAsPending( uint16_t sequence_number, - rtc::FunctionView( - const RtpPacketToSend&)> encapsulate); + FunctionView(const RtpPacketToSend&)> + encapsulate); // Updates the send time for the given packet and increments the transmission // counter. Marks the packet as no longer being in the pacer queue. @@ -119,21 +112,17 @@ class RtpPacketHistory { // container, or to abort getting the packet if the function returns // nullptr. std::unique_ptr GetPayloadPaddingPacket( - rtc::FunctionView( - const RtpPacketToSend&)> encapsulate); + FunctionView(const RtpPacketToSend&)> + encapsulate); // Cull packets that have been acknowledged as received by the remote end. - void CullAcknowledgedPackets(rtc::ArrayView sequence_numbers); + void CullAcknowledgedPackets(ArrayView sequence_numbers); // Remove all pending packets from the history, but keep storage mode and // capacity. void Clear(); private: - struct MoreUseful; - class StoredPacket; - using PacketPrioritySet = std::set; - class StoredPacket { public: StoredPacket() = default; @@ -146,7 +135,7 @@ class RtpPacketHistory { uint64_t insert_order() const { return insert_order_; } size_t times_retransmitted() const { return times_retransmitted_; } - void IncrementTimesRetransmitted(PacketPrioritySet* priority_set); + void IncrementTimesRetransmitted(); // The time of last transmission, including retransmissions. Timestamp send_time() const { return send_time_; } @@ -168,11 +157,6 @@ class RtpPacketHistory { // Number of times RE-transmitted, ie excluding the first transmission. size_t times_retransmitted_; }; - struct MoreUseful { - bool operator()(StoredPacket* lhs, StoredPacket* rhs) const; - }; - - bool padding_priority_enabled() const; // Helper method to check if packet has too recently been sent. bool VerifyRtt(const StoredPacket& packet) const @@ -205,11 +189,8 @@ class RtpPacketHistory { // Total number of packets with inserted. uint64_t packets_inserted_ RTC_GUARDED_BY(lock_); - // Objects from `packet_history_` ordered by "most likely to be useful", used - // in GetPayloadPaddingPacket(). - PacketPrioritySet padding_priority_ RTC_GUARDED_BY(lock_); - absl::optional large_payload_packet_ RTC_GUARDED_BY(lock_); + std::optional large_payload_packet_ RTC_GUARDED_BY(lock_); }; } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RTP_PACKET_HISTORY_H_ diff --git a/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc b/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc index 5019a72296..a5531effa6 100644 --- a/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc @@ -10,15 +10,20 @@ #include "modules/rtp_rtcp/source/rtp_packet_history.h" +#include #include #include #include #include +#include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "rtc_base/copy_on_write_buffer.h" #include "system_wrappers/include/clock.h" #include "test/gmock.h" #include "test/gtest.h" @@ -57,9 +62,11 @@ class RtpPacketHistoryTest protected: RtpPacketHistoryTest() : fake_clock_(123456), - hist_(&fake_clock_, /*enable_padding_prio=*/GetParam()) {} + env_(CreateEnvironment(&fake_clock_)), + hist_(env_, /*enable_padding_prio=*/GetParam()) {} SimulatedClock fake_clock_; + Environment env_; RtpPacketHistory hist_; std::unique_ptr CreateRtpPacket(uint16_t seq_num) { @@ -145,7 +152,7 @@ TEST_P(RtpPacketHistoryTest, GetRtpPacket) { Timestamp capture_time = Timestamp::Millis(1); std::unique_ptr packet = CreateRtpPacket(kStartSeqNum); packet->set_capture_time(capture_time); - rtc::CopyOnWriteBuffer buffer = packet->Buffer(); + CopyOnWriteBuffer buffer = packet->Buffer(); hist_.PutRtpPacket(std::move(packet), /*send_time=*/fake_clock_.CurrentTime()); @@ -157,7 +164,7 @@ TEST_P(RtpPacketHistoryTest, GetRtpPacket) { } TEST_P(RtpPacketHistoryTest, MinResendTime) { - static const TimeDelta kMinRetransmitInterval = TimeDelta::Millis(100); + static constexpr TimeDelta kMinRetransmitInterval = TimeDelta::Millis(100); hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); hist_.SetRtt(kMinRetransmitInterval); @@ -244,42 +251,6 @@ TEST_P(RtpPacketHistoryTest, RemovesOldestPacketWhenAtMaxCapacity) { EXPECT_TRUE(hist_.GetPacketState(To16u(kStartSeqNum + 1))); } -TEST_P(RtpPacketHistoryTest, RemovesLowestPrioPaddingWhenAtMaxCapacity) { - if (GetParam() != RtpPacketHistory::PaddingMode::kPriority) { - GTEST_SKIP() << "Padding prioritization required for this test"; - } - - // Tests the absolute upper bound on number of packets in the prioritized - // set of potential padding packets. - const size_t kMaxNumPackets = RtpPacketHistory::kMaxPaddingHistory; - hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, kMaxNumPackets * 2); - hist_.SetRtt(TimeDelta::Millis(1)); - - // Add packets until the max is reached, and then yet another one. - for (size_t i = 0; i < kMaxNumPackets + 1; ++i) { - std::unique_ptr packet = - CreateRtpPacket(To16u(kStartSeqNum + i)); - // Don't mark packets as sent, preventing them from being removed. - hist_.PutRtpPacket(std::move(packet), fake_clock_.CurrentTime()); - } - - // Advance time to allow retransmission/padding. - fake_clock_.AdvanceTimeMilliseconds(1); - - // The oldest packet will be least prioritized and has fallen out of the - // priority set. - for (size_t i = kMaxNumPackets - 1; i > 0; --i) { - auto packet = hist_.GetPayloadPaddingPacket(); - ASSERT_TRUE(packet); - EXPECT_EQ(packet->SequenceNumber(), To16u(kStartSeqNum + i + 1)); - } - - // Wrap around to newest padding packet again. - auto packet = hist_.GetPayloadPaddingPacket(); - ASSERT_TRUE(packet); - EXPECT_EQ(packet->SequenceNumber(), To16u(kStartSeqNum + kMaxNumPackets)); -} - TEST_P(RtpPacketHistoryTest, DontRemoveTooRecentlyTransmittedPackets) { // Set size to remove old packets as soon as possible. hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 1); @@ -530,46 +501,6 @@ TEST_P(RtpPacketHistoryTest, DontRemovePendingTransmissions) { EXPECT_FALSE(hist_.GetPacketState(kStartSeqNum)); } -TEST_P(RtpPacketHistoryTest, PrioritizedPayloadPadding) { - if (GetParam() != RtpPacketHistory::PaddingMode::kPriority) { - GTEST_SKIP() << "Padding prioritization required for this test"; - } - - hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 1); - - // Add two sent packets, one millisecond apart. - hist_.PutRtpPacket(CreateRtpPacket(kStartSeqNum), fake_clock_.CurrentTime()); - fake_clock_.AdvanceTimeMilliseconds(1); - - hist_.PutRtpPacket(CreateRtpPacket(kStartSeqNum + 1), - fake_clock_.CurrentTime()); - fake_clock_.AdvanceTimeMilliseconds(1); - - // Latest packet given equal retransmission count. - EXPECT_EQ(hist_.GetPayloadPaddingPacket()->SequenceNumber(), - kStartSeqNum + 1); - - // Older packet has lower retransmission count. - EXPECT_EQ(hist_.GetPayloadPaddingPacket()->SequenceNumber(), kStartSeqNum); - - // Equal retransmission count again, use newest packet. - EXPECT_EQ(hist_.GetPayloadPaddingPacket()->SequenceNumber(), - kStartSeqNum + 1); - - // Older packet has lower retransmission count. - EXPECT_EQ(hist_.GetPayloadPaddingPacket()->SequenceNumber(), kStartSeqNum); - - // Remove newest packet. - hist_.CullAcknowledgedPackets(std::vector{kStartSeqNum + 1}); - - // Only older packet left. - EXPECT_EQ(hist_.GetPayloadPaddingPacket()->SequenceNumber(), kStartSeqNum); - - hist_.CullAcknowledgedPackets(std::vector{kStartSeqNum}); - - EXPECT_EQ(hist_.GetPayloadPaddingPacket(), nullptr); -} - TEST_P(RtpPacketHistoryTest, NoPendingPacketAsPadding) { hist_.SetStorePacketsStatus(StorageMode::kStoreAndCull, 1); @@ -651,7 +582,7 @@ TEST_P(RtpPacketHistoryTest, OutOfOrderInsertRemoval) { } } -TEST_P(RtpPacketHistoryTest, UsesLastPacketAsPaddingWithPrioOff) { +TEST_P(RtpPacketHistoryTest, UsesLastPacketAsPaddingWithDefaultMode) { if (GetParam() != RtpPacketHistory::PaddingMode::kDefault) { GTEST_SKIP() << "Default padding prioritization required for this test"; } @@ -697,13 +628,13 @@ INSTANTIATE_TEST_SUITE_P( WithAndWithoutPaddingPrio, RtpPacketHistoryTest, ::testing::Values(RtpPacketHistory::PaddingMode::kDefault, - RtpPacketHistory::PaddingMode::kPriority, RtpPacketHistory::PaddingMode::kRecentLargePacket)); TEST(RtpPacketHistoryRecentLargePacketMode, GetPayloadPaddingPacketAfterCullWithAcksReturnOldPacket) { SimulatedClock fake_clock(1234); - RtpPacketHistory history(&fake_clock, + Environment env = CreateEnvironment(&fake_clock); + RtpPacketHistory history(env, RtpPacketHistory::PaddingMode::kRecentLargePacket); history.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); @@ -723,7 +654,8 @@ TEST(RtpPacketHistoryRecentLargePacketMode, TEST(RtpPacketHistoryRecentLargePacketMode, GetPayloadPaddingPacketIgnoreSmallRecentPackets) { SimulatedClock fake_clock(1234); - RtpPacketHistory history(&fake_clock, + Environment env = CreateEnvironment(&fake_clock); + RtpPacketHistory history(env, RtpPacketHistory::PaddingMode::kRecentLargePacket); history.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); std::unique_ptr packet = CreatePacket(kStartSeqNum); @@ -744,7 +676,8 @@ TEST(RtpPacketHistoryRecentLargePacketMode, TEST(RtpPacketHistoryRecentLargePacketMode, GetPayloadPaddingPacketReturnsRecentPacketIfSizeNearMax) { SimulatedClock fake_clock(1234); - RtpPacketHistory history(&fake_clock, + Environment env = CreateEnvironment(&fake_clock); + RtpPacketHistory history(env, RtpPacketHistory::PaddingMode::kRecentLargePacket); history.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); std::unique_ptr packet = CreatePacket(kStartSeqNum); @@ -765,7 +698,8 @@ TEST(RtpPacketHistoryRecentLargePacketMode, TEST(RtpPacketHistoryRecentLargePacketMode, GetPayloadPaddingPacketReturnsLastPacketAfterLargeSequenceNumberGap) { SimulatedClock fake_clock(1234); - RtpPacketHistory history(&fake_clock, + Environment env = CreateEnvironment(&fake_clock); + RtpPacketHistory history(env, RtpPacketHistory::PaddingMode::kRecentLargePacket); history.SetStorePacketsStatus(StorageMode::kStoreAndCull, 10); uint16_t sequence_number = std::numeric_limits::max() - 50; diff --git a/modules/rtp_rtcp/source/rtp_packet_received.cc b/modules/rtp_rtcp/source/rtp_packet_received.cc index 9fa6197e7c..b8fd1e5b6d 100644 --- a/modules/rtp_rtcp/source/rtp_packet_received.cc +++ b/modules/rtp_rtcp/source/rtp_packet_received.cc @@ -15,7 +15,10 @@ #include #include +#include "api/rtp_headers.h" +#include "api/units/timestamp.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet.h" #include "rtc_base/numerics/safe_conversions.h" namespace webrtc { @@ -42,7 +45,7 @@ void RtpPacketReceived::GetHeader(RTPHeader* header) const { header->timestamp = Timestamp(); header->ssrc = Ssrc(); std::vector csrcs = Csrcs(); - header->numCSRCs = rtc::dchecked_cast(csrcs.size()); + header->numCSRCs = dchecked_cast(csrcs.size()); for (size_t i = 0; i < csrcs.size(); ++i) { header->arrOfCSRCs[i] = csrcs[i]; } @@ -61,8 +64,7 @@ void RtpPacketReceived::GetHeader(RTPHeader* header) const { &header->extension.feedback_request) || GetExtension( &header->extension.transportSequenceNumber); - header->extension.hasAudioLevel = GetExtension( - &header->extension.voiceActivity, &header->extension.audioLevel); + header->extension.set_audio_level(GetExtension()); header->extension.hasVideoRotation = GetExtension(&header->extension.videoRotation); header->extension.hasVideoContentType = diff --git a/modules/rtp_rtcp/source/rtp_packet_received.h b/modules/rtp_rtcp/source/rtp_packet_received.h index 51bd17d7bf..21a6955044 100644 --- a/modules/rtp_rtcp/source/rtp_packet_received.h +++ b/modules/rtp_rtcp/source/rtp_packet_received.h @@ -14,12 +14,12 @@ #include -#include "api/array_view.h" #include "api/ref_counted_base.h" #include "api/rtp_headers.h" #include "api/scoped_refptr.h" #include "api/units/timestamp.h" #include "modules/rtp_rtcp/source/rtp_packet.h" +#include "rtc_base/network/ecn_marking.h" namespace webrtc { // Class to hold rtp packet with metadata for receiver side. @@ -48,6 +48,11 @@ class RtpPacketReceived : public RtpPacket { webrtc::Timestamp arrival_time() const { return arrival_time_; } void set_arrival_time(webrtc::Timestamp time) { arrival_time_ = time; } + // Explicit Congestion Notification (ECN), RFC-3168, Section 5. + // Used by L4S: https://www.rfc-editor.org/rfc/rfc9331.html + EcnMarking ecn() const { return ecn_; } + void set_ecn(EcnMarking ecn) { ecn_ = ecn; } + // Flag if packet was recovered via RTX or FEC. bool recovered() const { return recovered_; } void set_recovered(bool value) { recovered_ = value; } @@ -59,18 +64,19 @@ class RtpPacketReceived : public RtpPacket { // An application can attach arbitrary data to an RTP packet using // `additional_data`. The additional data does not affect WebRTC processing. - rtc::scoped_refptr additional_data() const { + scoped_refptr additional_data() const { return additional_data_; } - void set_additional_data(rtc::scoped_refptr data) { + void set_additional_data(scoped_refptr data) { additional_data_ = std::move(data); } private: webrtc::Timestamp arrival_time_ = Timestamp::MinusInfinity(); + EcnMarking ecn_ = EcnMarking::kNotEct; int payload_type_frequency_ = 0; bool recovered_ = false; - rtc::scoped_refptr additional_data_; + scoped_refptr additional_data_; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_packet_send_info.cc b/modules/rtp_rtcp/source/rtp_packet_send_info.cc new file mode 100644 index 0000000000..f0e6091ea7 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_packet_send_info.cc @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "api/transport/network_types.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +RtpPacketSendInfo RtpPacketSendInfo::From(const RtpPacketToSend& packet, + const PacedPacketInfo& pacing_info) { + RtpPacketSendInfo packet_info; + if (packet.transport_sequence_number()) { + packet_info.transport_sequence_number = + *packet.transport_sequence_number() & 0xFFFF; + } else { + std::optional packet_id = + packet.GetExtension(); + if (packet_id) { + packet_info.transport_sequence_number = *packet_id; + } + } + + packet_info.rtp_timestamp = packet.Timestamp(); + packet_info.length = packet.size(); + packet_info.pacing_info = pacing_info; + packet_info.packet_type = packet.packet_type(); + + switch (*packet_info.packet_type) { + case RtpPacketMediaType::kAudio: + case RtpPacketMediaType::kVideo: + packet_info.media_ssrc = packet.Ssrc(); + packet_info.rtp_sequence_number = packet.SequenceNumber(); + break; + case RtpPacketMediaType::kRetransmission: + RTC_DCHECK(packet.original_ssrc() && + packet.retransmitted_sequence_number()); + // For retransmissions, we're want to remove the original media packet + // if the retransmit arrives - so populate that in the packet info. + packet_info.media_ssrc = packet.original_ssrc().value_or(0); + packet_info.rtp_sequence_number = + packet.retransmitted_sequence_number().value_or(0); + break; + case RtpPacketMediaType::kPadding: + case RtpPacketMediaType::kForwardErrorCorrection: + // We're not interested in feedback about these packets being received + // or lost. + break; + } + return packet_info; +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_packet_send_info_unittest.cc b/modules/rtp_rtcp/source/rtp_packet_send_info_unittest.cc new file mode 100644 index 0000000000..f7858f948e --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_packet_send_info_unittest.cc @@ -0,0 +1,119 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "api/transport/network_types.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +RtpPacketToSend BuildPacket(RtpPacketMediaType type) { + RtpHeaderExtensionMap extension_manager; + RtpPacketToSend packet(&extension_manager); + + packet.SetSsrc(1); + packet.SetSequenceNumber(89); + if (type == RtpPacketMediaType::kRetransmission) { + packet.set_original_ssrc(2); + packet.set_retransmitted_sequence_number(678); + } + packet.set_transport_sequence_number(0xFFFFFFFF01); + packet.SetTimestamp(123); + packet.SetPayloadSize(5); + packet.set_packet_type(type); + return packet; +} + +void VerifyDefaultProperties(const RtpPacketSendInfo& send_info, + const RtpPacketToSend& packet, + const PacedPacketInfo& paced_info) { + EXPECT_EQ(send_info.length, packet.size()); + EXPECT_EQ(send_info.rtp_timestamp, packet.Timestamp()); + EXPECT_EQ(send_info.packet_type, packet.packet_type()); + EXPECT_EQ(send_info.pacing_info, paced_info); + if (packet.transport_sequence_number()) { + EXPECT_EQ(send_info.transport_sequence_number, + *packet.transport_sequence_number() & 0xFFFF); + } else { + EXPECT_EQ(send_info.transport_sequence_number, + *packet.GetExtension()); + } +} + +TEST(RtpPacketSendInfoTest, FromConvertsMediaPackets) { + RtpPacketToSend packet = BuildPacket(RtpPacketMediaType::kAudio); + PacedPacketInfo paced_info; + paced_info.probe_cluster_id = 8; + + RtpPacketSendInfo send_info = RtpPacketSendInfo::From(packet, paced_info); + EXPECT_EQ(send_info.media_ssrc, packet.Ssrc()); + VerifyDefaultProperties(send_info, packet, paced_info); +} + +TEST(RtpPacketSendInfoTest, FromConvertsPadding) { + RtpPacketToSend packet = BuildPacket(RtpPacketMediaType::kPadding); + PacedPacketInfo paced_info; + paced_info.probe_cluster_id = 8; + + RtpPacketSendInfo send_info = RtpPacketSendInfo::From(packet, paced_info); + EXPECT_EQ(send_info.media_ssrc, std::nullopt); + VerifyDefaultProperties(send_info, packet, paced_info); +} + +TEST(RtpPacketSendInfoTest, FromConvertsFec) { + RtpPacketToSend packet = + BuildPacket(RtpPacketMediaType::kForwardErrorCorrection); + PacedPacketInfo paced_info; + paced_info.probe_cluster_id = 8; + + RtpPacketSendInfo send_info = RtpPacketSendInfo::From(packet, paced_info); + EXPECT_EQ(send_info.media_ssrc, std::nullopt); + VerifyDefaultProperties(send_info, packet, paced_info); +} + +TEST(RtpPacketSendInfoTest, FromConvertsRetransmission) { + RtpPacketToSend packet = BuildPacket(RtpPacketMediaType::kRetransmission); + PacedPacketInfo paced_info; + paced_info.probe_cluster_id = 8; + + RtpPacketSendInfo send_info = RtpPacketSendInfo::From(packet, paced_info); + EXPECT_EQ(send_info.media_ssrc, *packet.original_ssrc()); + EXPECT_EQ(send_info.rtp_sequence_number, + *packet.retransmitted_sequence_number()); + VerifyDefaultProperties(send_info, packet, paced_info); +} + +TEST(RtpPacketSendInfoTest, FromFallbackToTranportSequenceHeaderExtension) { + RtpHeaderExtensionMap extension_manager; + extension_manager.Register(/*id=*/1); + PacedPacketInfo paced_info; + paced_info.probe_cluster_id = 8; + RtpPacketToSend packet(&extension_manager); + packet.SetSsrc(1); + packet.SetSequenceNumber(89); + const uint16_t kTransportSequenceNumber = 5555; + packet.SetExtension(kTransportSequenceNumber); + packet.SetTimestamp(123); + packet.AllocatePayload(5); + packet.set_packet_type(RtpPacketMediaType::kAudio); + + RtpPacketSendInfo send_info = RtpPacketSendInfo::From(packet, paced_info); + VerifyDefaultProperties(send_info, packet, paced_info); +} + +} // namespace +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_packet_to_send.cc b/modules/rtp_rtcp/source/rtp_packet_to_send.cc index b55e74aaf0..014de42f07 100644 --- a/modules/rtp_rtcp/source/rtp_packet_to_send.cc +++ b/modules/rtp_rtcp/source/rtp_packet_to_send.cc @@ -10,7 +10,9 @@ #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include +#include + +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" namespace webrtc { @@ -28,4 +30,13 @@ RtpPacketToSend& RtpPacketToSend::operator=(RtpPacketToSend&& packet) = default; RtpPacketToSend::~RtpPacketToSend() = default; +void RtpPacketToSend::set_packet_type(RtpPacketMediaType type) { + if (packet_type_ == RtpPacketMediaType::kAudio) { + original_packet_type_ = OriginalType::kAudio; + } else if (packet_type_ == RtpPacketMediaType::kVideo) { + original_packet_type_ = OriginalType::kVideo; + } + packet_type_ = type; +} + } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_packet_to_send.h b/modules/rtp_rtcp/source/rtp_packet_to_send.h index 438ca354ed..4e60c1203f 100644 --- a/modules/rtp_rtcp/source/rtp_packet_to_send.h +++ b/modules/rtp_rtcp/source/rtp_packet_to_send.h @@ -13,10 +13,9 @@ #include #include +#include #include -#include "absl/types/optional.h" -#include "api/array_view.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "api/units/time_delta.h" @@ -49,9 +48,14 @@ class RtpPacketToSend : public RtpPacket { webrtc::Timestamp capture_time() const { return capture_time_; } void set_capture_time(webrtc::Timestamp time) { capture_time_ = time; } - void set_packet_type(RtpPacketMediaType type) { packet_type_ = type; } - absl::optional packet_type() const { - return packet_type_; + void set_packet_type(RtpPacketMediaType type); + + std::optional packet_type() const { return packet_type_; } + + enum class OriginalType { kAudio, kVideo }; + // Original type does not change if packet type is changed to kRetransmission. + std::optional original_packet_type() const { + return original_packet_type_; } // If this is a retransmission, indicates the sequence number of the original @@ -60,10 +64,15 @@ class RtpPacketToSend : public RtpPacket { void set_retransmitted_sequence_number(uint16_t sequence_number) { retransmitted_sequence_number_ = sequence_number; } - absl::optional retransmitted_sequence_number() const { + std::optional retransmitted_sequence_number() const { return retransmitted_sequence_number_; } + // If this is a retransmission, indicates the SSRC of the original + // media packet that this packet represents. + void set_original_ssrc(uint32_t ssrc) { original_ssrc_ = ssrc; } + std::optional original_ssrc() const { return original_ssrc_; } + void set_allow_retransmission(bool allow_retransmission) { allow_retransmission_ = allow_retransmission; } @@ -71,10 +80,10 @@ class RtpPacketToSend : public RtpPacket { // An application can attach arbitrary data to an RTP packet using // `additional_data`. The additional data does not affect WebRTC processing. - rtc::scoped_refptr additional_data() const { + scoped_refptr additional_data() const { return additional_data_; } - void set_additional_data(rtc::scoped_refptr data) { + void set_additional_data(scoped_refptr data) { additional_data_ = std::move(data); } @@ -126,21 +135,38 @@ class RtpPacketToSend : public RtpPacket { void set_time_in_send_queue(TimeDelta time_in_send_queue) { time_in_send_queue_ = time_in_send_queue; } - absl::optional time_in_send_queue() const { + std::optional time_in_send_queue() const { return time_in_send_queue_; } + // A sequence number guaranteed to be monotically increasing by one for all + // packets where transport feedback is expected. + std::optional transport_sequence_number() const { + return transport_sequence_number_; + } + void set_transport_sequence_number(int64_t transport_sequence_number) { + transport_sequence_number_ = transport_sequence_number; + } + // Transport is capable of handling explicit congestion notification and the + // RTP packet should be sent as ect(1) + // https://www.rfc-editor.org/rfc/rfc9331.html + bool send_as_ect1() const { return send_as_ect1_; } + void set_send_as_ect1() { send_as_ect1_ = true; } private: webrtc::Timestamp capture_time_ = webrtc::Timestamp::Zero(); - absl::optional packet_type_; + std::optional packet_type_; + std::optional original_packet_type_; + std::optional original_ssrc_; + std::optional transport_sequence_number_; bool allow_retransmission_ = false; - absl::optional retransmitted_sequence_number_; - rtc::scoped_refptr additional_data_; + std::optional retransmitted_sequence_number_; + scoped_refptr additional_data_; bool is_first_packet_of_frame_ = false; bool is_key_frame_ = false; bool fec_protect_packet_ = false; bool is_red_ = false; - absl::optional time_in_send_queue_; + bool send_as_ect1_ = false; + std::optional time_in_send_queue_; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_packet_unittest.cc b/modules/rtp_rtcp/source/rtp_packet_unittest.cc index a1d1c9d4df..80fe006c01 100644 --- a/modules/rtp_rtcp/source/rtp_packet_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_packet_unittest.cc @@ -7,13 +7,30 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +#include "modules/rtp_rtcp/source/rtp_packet.h" + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/rtp_headers.h" +#include "api/units/time_delta.h" +#include "api/video/color_space.h" +#include "api/video/video_timing.h" #include "common_video/test/utilities.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "rtc_base/random.h" +#include "rtc_base/copy_on_write_buffer.h" #include "test/gmock.h" #include "test/gtest.h" @@ -230,14 +247,15 @@ TEST(RtpPacketTest, CreateWithExtension) { TEST(RtpPacketTest, CreateWith2Extensions) { RtpPacketToSend::ExtensionManager extensions; extensions.Register(kTransmissionOffsetExtensionId); - extensions.Register(kAudioLevelExtensionId); + extensions.Register(kAudioLevelExtensionId); RtpPacketToSend packet(&extensions); packet.SetPayloadType(kPayloadType); packet.SetSequenceNumber(kSeqNum); packet.SetTimestamp(kTimestamp); packet.SetSsrc(kSsrc); packet.SetExtension(kTimeOffset); - packet.SetExtension(kVoiceActive, kAudioLevel); + packet.SetExtension( + AudioLevel(kVoiceActive, kAudioLevel)); EXPECT_THAT(kPacketWithTOAndAL, ElementsAreArray(packet.data(), packet.size())); } @@ -245,7 +263,7 @@ TEST(RtpPacketTest, CreateWith2Extensions) { TEST(RtpPacketTest, CreateWithTwoByteHeaderExtensionFirst) { RtpPacketToSend::ExtensionManager extensions(/*extmap_allow_mixed=*/true); extensions.Register(kTransmissionOffsetExtensionId); - extensions.Register(kAudioLevelExtensionId); + extensions.Register(kAudioLevelExtensionId); extensions.Register(kTwoByteExtensionId); RtpPacketToSend packet(&extensions); packet.SetPayloadType(kPayloadType); @@ -257,7 +275,8 @@ TEST(RtpPacketTest, CreateWithTwoByteHeaderExtensionFirst) { TimeDelta::Millis(340)); ASSERT_TRUE(packet.SetExtension(playout_delay)); packet.SetExtension(kTimeOffset); - packet.SetExtension(kVoiceActive, kAudioLevel); + packet.SetExtension( + AudioLevel(kVoiceActive, kAudioLevel)); EXPECT_THAT(kPacketWithTwoByteExtensionIdFirst, ElementsAreArray(packet.data(), packet.size())); } @@ -266,7 +285,7 @@ TEST(RtpPacketTest, CreateWithTwoByteHeaderExtensionLast) { // This test will trigger RtpPacket::PromoteToTwoByteHeaderExtension(). RtpPacketToSend::ExtensionManager extensions(/*extmap_allow_mixed=*/true); extensions.Register(kTransmissionOffsetExtensionId); - extensions.Register(kAudioLevelExtensionId); + extensions.Register(kAudioLevelExtensionId); extensions.Register(kTwoByteExtensionId); RtpPacketToSend packet(&extensions); packet.SetPayloadType(kPayloadType); @@ -274,7 +293,8 @@ TEST(RtpPacketTest, CreateWithTwoByteHeaderExtensionLast) { packet.SetTimestamp(kTimestamp); packet.SetSsrc(kSsrc); packet.SetExtension(kTimeOffset); - packet.SetExtension(kVoiceActive, kAudioLevel); + packet.SetExtension( + AudioLevel(kVoiceActive, kAudioLevel)); EXPECT_THAT(kPacketWithTOAndAL, ElementsAreArray(packet.data(), packet.size())); // Set extension that requires two-byte header. @@ -322,27 +342,29 @@ TEST(RtpPacketTest, TryToCreateWithEmptyMid) { TEST(RtpPacketTest, TryToCreateWithLongMid) { RtpPacketToSend::ExtensionManager extensions; - constexpr char kLongMid[] = "LoooooooooonogMid"; - ASSERT_EQ(strlen(kLongMid), 17u); + constexpr char kOtherLongMid[] = "LoooooooooonogMid"; + ASSERT_EQ(strlen(kOtherLongMid), 17u); extensions.Register(kRtpMidExtensionId); RtpPacketToSend packet(&extensions); - EXPECT_FALSE(packet.SetExtension(kLongMid)); + EXPECT_FALSE(packet.SetExtension(kOtherLongMid)); } TEST(RtpPacketTest, TryToCreateTwoByteHeaderNotSupported) { RtpPacketToSend::ExtensionManager extensions; - extensions.Register(kTwoByteExtensionId); + extensions.Register(kTwoByteExtensionId); RtpPacketToSend packet(&extensions); // Set extension that requires two-byte header. - EXPECT_FALSE(packet.SetExtension(kVoiceActive, kAudioLevel)); + EXPECT_FALSE(packet.SetExtension( + AudioLevel(kVoiceActive, kAudioLevel))); } TEST(RtpPacketTest, CreateTwoByteHeaderSupportedIfExtmapAllowMixed) { RtpPacketToSend::ExtensionManager extensions(/*extmap_allow_mixed=*/true); - extensions.Register(kTwoByteExtensionId); + extensions.Register(kTwoByteExtensionId); RtpPacketToSend packet(&extensions); // Set extension that requires two-byte header. - EXPECT_TRUE(packet.SetExtension(kVoiceActive, kAudioLevel)); + EXPECT_TRUE(packet.SetExtension( + AudioLevel(kVoiceActive, kAudioLevel))); } TEST(RtpPacketTest, CreateWithMaxSizeHeaderExtension) { @@ -394,20 +416,21 @@ TEST(RtpPacketTest, FailsToSetUnregisteredExtension) { EXPECT_FALSE(packet.SetExtension(42)); EXPECT_FALSE(packet.HasExtension()); - EXPECT_EQ(packet.GetExtension(), absl::nullopt); + EXPECT_EQ(packet.GetExtension(), std::nullopt); } TEST(RtpPacketTest, SetReservedExtensionsAfterPayload) { const size_t kPayloadSize = 4; RtpPacketToSend::ExtensionManager extensions; extensions.Register(kTransmissionOffsetExtensionId); - extensions.Register(kAudioLevelExtensionId); + extensions.Register(kAudioLevelExtensionId); RtpPacketToSend packet(&extensions); EXPECT_TRUE(packet.ReserveExtension()); packet.SetPayloadSize(kPayloadSize); // Can't set extension after payload. - EXPECT_FALSE(packet.SetExtension(kVoiceActive, kAudioLevel)); + EXPECT_FALSE(packet.SetExtension( + AudioLevel(kVoiceActive, kAudioLevel))); // Unless reserved. EXPECT_TRUE(packet.SetExtension(kTimeOffset)); } @@ -453,8 +476,7 @@ TEST(RtpPacketTest, UsesZerosForPadding) { RtpPacket packet; EXPECT_TRUE(packet.SetPadding(kPaddingSize)); - EXPECT_THAT(rtc::MakeArrayView(packet.data() + 12, kPaddingSize - 1), - Each(0)); + EXPECT_THAT(MakeArrayView(packet.data() + 12, kPaddingSize - 1), Each(0)); } TEST(RtpPacketTest, CreateOneBytePadding) { @@ -488,7 +510,7 @@ TEST(RtpPacketTest, ParseMinimum) { } TEST(RtpPacketTest, ParseBuffer) { - rtc::CopyOnWriteBuffer unparsed(kMinimumPacket); + CopyOnWriteBuffer unparsed(kMinimumPacket); const uint8_t* raw = unparsed.data(); RtpPacketReceived packet; @@ -527,7 +549,7 @@ TEST(RtpPacketTest, ParseHeaderOnly) { // clang-format on RtpPacket packet; - EXPECT_TRUE(packet.Parse(rtc::CopyOnWriteBuffer(kPaddingHeader))); + EXPECT_TRUE(packet.Parse(CopyOnWriteBuffer(kPaddingHeader))); EXPECT_EQ(packet.PayloadType(), 0x62u); EXPECT_EQ(packet.SequenceNumber(), 0x3579u); EXPECT_EQ(packet.Timestamp(), 0x65431278u); @@ -547,7 +569,7 @@ TEST(RtpPacketTest, ParseHeaderOnlyWithPadding) { // clang-format on RtpPacket packet; - EXPECT_TRUE(packet.Parse(rtc::CopyOnWriteBuffer(kPaddingHeader))); + EXPECT_TRUE(packet.Parse(CopyOnWriteBuffer(kPaddingHeader))); EXPECT_TRUE(packet.has_padding()); EXPECT_EQ(packet.padding_size(), 0u); @@ -567,7 +589,7 @@ TEST(RtpPacketTest, ParseHeaderOnlyWithExtensionAndPadding) { RtpHeaderExtensionMap extensions; extensions.Register(1); RtpPacket packet(&extensions); - EXPECT_TRUE(packet.Parse(rtc::CopyOnWriteBuffer(kPaddingHeader))); + EXPECT_TRUE(packet.Parse(CopyOnWriteBuffer(kPaddingHeader))); EXPECT_TRUE(packet.has_padding()); EXPECT_TRUE(packet.HasExtension()); EXPECT_EQ(packet.padding_size(), 0u); @@ -583,7 +605,7 @@ TEST(RtpPacketTest, ParsePaddingOnlyPacket) { // clang-format on RtpPacket packet; - EXPECT_TRUE(packet.Parse(rtc::CopyOnWriteBuffer(kPaddingHeader))); + EXPECT_TRUE(packet.Parse(CopyOnWriteBuffer(kPaddingHeader))); EXPECT_TRUE(packet.has_padding()); EXPECT_EQ(packet.padding_size(), 3u); } @@ -599,7 +621,7 @@ TEST(RtpPacketTest, GetExtensionWithoutParametersReturnsOptionalValue) { auto time_offset = packet.GetExtension(); static_assert( std::is_same>::value, + std::optional>::value, ""); EXPECT_EQ(time_offset, kTimeOffset); EXPECT_FALSE(packet.GetExtension().has_value()); @@ -679,62 +701,59 @@ TEST(RtpPacketTest, ParseWithOverSizedExtension) { TEST(RtpPacketTest, ParseWith2Extensions) { RtpPacketToSend::ExtensionManager extensions; extensions.Register(kTransmissionOffsetExtensionId); - extensions.Register(kAudioLevelExtensionId); + extensions.Register(kAudioLevelExtensionId); RtpPacketReceived packet(&extensions); EXPECT_TRUE(packet.Parse(kPacketWithTOAndAL, sizeof(kPacketWithTOAndAL))); int32_t time_offset; EXPECT_TRUE(packet.GetExtension(&time_offset)); EXPECT_EQ(kTimeOffset, time_offset); - bool voice_active; - uint8_t audio_level; - EXPECT_TRUE(packet.GetExtension(&voice_active, &audio_level)); - EXPECT_EQ(kVoiceActive, voice_active); - EXPECT_EQ(kAudioLevel, audio_level); + AudioLevel audio_level; + EXPECT_TRUE(packet.GetExtension(&audio_level)); + EXPECT_EQ(kVoiceActive, audio_level.voice_activity()); + EXPECT_EQ(kAudioLevel, audio_level.level()); } TEST(RtpPacketTest, ParseSecondPacketWithFewerExtensions) { RtpPacketToSend::ExtensionManager extensions; extensions.Register(kTransmissionOffsetExtensionId); - extensions.Register(kAudioLevelExtensionId); + extensions.Register(kAudioLevelExtensionId); RtpPacketReceived packet(&extensions); EXPECT_TRUE(packet.Parse(kPacketWithTOAndAL, sizeof(kPacketWithTOAndAL))); EXPECT_TRUE(packet.HasExtension()); - EXPECT_TRUE(packet.HasExtension()); + EXPECT_TRUE(packet.HasExtension()); // Second packet without audio level. EXPECT_TRUE(packet.Parse(kPacketWithTO, sizeof(kPacketWithTO))); EXPECT_TRUE(packet.HasExtension()); - EXPECT_FALSE(packet.HasExtension()); + EXPECT_FALSE(packet.HasExtension()); } TEST(RtpPacketTest, ParseWith2ExtensionsInvalidPadding) { RtpPacketToSend::ExtensionManager extensions; extensions.Register(kTransmissionOffsetExtensionId); - extensions.Register(kAudioLevelExtensionId); + extensions.Register(kAudioLevelExtensionId); RtpPacketReceived packet(&extensions); EXPECT_TRUE(packet.Parse(kPacketWithTOAndALInvalidPadding, sizeof(kPacketWithTOAndALInvalidPadding))); int32_t time_offset; EXPECT_TRUE(packet.GetExtension(&time_offset)); EXPECT_EQ(kTimeOffset, time_offset); - bool voice_active; - uint8_t audio_level; - EXPECT_FALSE(packet.GetExtension(&voice_active, &audio_level)); + AudioLevel audio_level; + EXPECT_FALSE(packet.GetExtension(&audio_level)); } TEST(RtpPacketTest, ParseWith2ExtensionsReservedExtensionId) { RtpPacketToSend::ExtensionManager extensions; extensions.Register(kTransmissionOffsetExtensionId); - extensions.Register(kAudioLevelExtensionId); + extensions.Register(kAudioLevelExtensionId); RtpPacketReceived packet(&extensions); EXPECT_TRUE(packet.Parse(kPacketWithTOAndALReservedExtensionId, sizeof(kPacketWithTOAndALReservedExtensionId))); int32_t time_offset; EXPECT_TRUE(packet.GetExtension(&time_offset)); EXPECT_EQ(kTimeOffset, time_offset); - bool voice_active; - uint8_t audio_level; - EXPECT_FALSE(packet.GetExtension(&voice_active, &audio_level)); + AudioLevel audio_level; + EXPECT_FALSE(packet.GetExtension(&audio_level)); } TEST(RtpPacketTest, ParseWithAllFeatures) { @@ -778,7 +797,7 @@ TEST(RtpPacketTest, ParseLongTwoByteHeaderExtension) { TEST(RtpPacketTest, ParseTwoByteHeaderExtensionWithPadding) { RtpPacketToSend::ExtensionManager extensions; extensions.Register(kTwoByteExtensionId); - extensions.Register(kAudioLevelExtensionId); + extensions.Register(kAudioLevelExtensionId); RtpPacketReceived packet(&extensions); EXPECT_TRUE( packet.Parse(kPacketWithTwoByteHeaderExtensionWithPadding, @@ -786,11 +805,10 @@ TEST(RtpPacketTest, ParseTwoByteHeaderExtensionWithPadding) { int32_t time_offset; EXPECT_TRUE(packet.GetExtension(&time_offset)); EXPECT_EQ(kTimeOffset, time_offset); - bool voice_active; - uint8_t audio_level; - EXPECT_TRUE(packet.GetExtension(&voice_active, &audio_level)); - EXPECT_EQ(kVoiceActive, voice_active); - EXPECT_EQ(kAudioLevel, audio_level); + AudioLevel audio_level; + EXPECT_TRUE(packet.GetExtension(&audio_level)); + EXPECT_EQ(kVoiceActive, audio_level.voice_activity()); + EXPECT_EQ(kAudioLevel, audio_level.level()); } TEST(RtpPacketTest, ParseWithExtensionDelayed) { @@ -872,13 +890,13 @@ struct UncopyableExtension { static constexpr RTPExtensionType kId = kRtpExtensionDependencyDescriptor; static constexpr absl::string_view Uri() { return "uri"; } - static size_t ValueSize(const UncopyableValue& value) { return 1; } - static bool Write(rtc::ArrayView data, - const UncopyableValue& value) { + static size_t ValueSize(const UncopyableValue& /* value */) { return 1; } + static bool Write(ArrayView /* data */, + const UncopyableValue& /* value */) { return true; } - static bool Parse(rtc::ArrayView data, - UncopyableValue* value) { + static bool Parse(ArrayView /* data */, + UncopyableValue* /* value */) { return true; } }; @@ -903,6 +921,41 @@ TEST(RtpPacketTest, GetUncopyableExtension) { EXPECT_TRUE(rtp_packet.GetExtension(&value2)); } +struct ParseByReferenceExtension { + static constexpr RTPExtensionType kId = kRtpExtensionDependencyDescriptor; + static constexpr absl::string_view Uri() { return "uri"; } + + static size_t ValueSize(uint8_t /* value1 */, uint8_t /* value2 */) { + return 2; + } + static bool Write(ArrayView data, uint8_t value1, uint8_t value2) { + data[0] = value1; + data[1] = value2; + return true; + } + static bool Parse(ArrayView data, + uint8_t& value1, + uint8_t& value2) { + value1 = data[0]; + value2 = data[1]; + return true; + } +}; + +TEST(RtpPacketTest, GetExtensionByReference) { + RtpHeaderExtensionMap extensions; + extensions.Register(1); + RtpPacket rtp_packet(&extensions); + rtp_packet.SetExtension(13, 42); + + uint8_t value1 = 1; + uint8_t value2 = 1; + EXPECT_TRUE( + rtp_packet.GetExtension(value1, value2)); + EXPECT_EQ(int{value1}, 13); + EXPECT_EQ(int{value2}, 42); +} + TEST(RtpPacketTest, CreateAndParseTimingFrameExtension) { // Create a packet with video frame timing extension populated. RtpPacketToSend::ExtensionManager send_extensions; @@ -1006,7 +1059,7 @@ TEST(RtpPacketTest, constexpr AbsoluteCaptureTime kAbsoluteCaptureTime{ /*absolute_capture_timestamp=*/9876543210123456789ULL, - /*estimated_capture_clock_offset=*/absl::nullopt}; + /*estimated_capture_clock_offset=*/std::nullopt}; ASSERT_TRUE(send_packet.SetExtension( kAbsoluteCaptureTime)); @@ -1062,7 +1115,7 @@ TEST(RtpPacketTest, CreateAndParseTransportSequenceNumberV2) { constexpr int kTransportSequenceNumber = 12345; send_packet.SetExtension(kTransportSequenceNumber, - absl::nullopt); + std::nullopt); EXPECT_EQ(send_packet.GetRawExtension().size(), 2u); @@ -1071,7 +1124,7 @@ TEST(RtpPacketTest, CreateAndParseTransportSequenceNumberV2) { EXPECT_TRUE(receive_packet.Parse(send_packet.Buffer())); uint16_t received_transport_sequeunce_number; - absl::optional received_feedback_request; + std::optional received_feedback_request; EXPECT_TRUE(receive_packet.GetExtension( &received_transport_sequeunce_number, &received_feedback_request)); EXPECT_EQ(received_transport_sequeunce_number, kTransportSequenceNumber); @@ -1093,7 +1146,7 @@ TEST(RtpPacketTest, CreateAndParseTransportSequenceNumberV2Preallocated) { send_packet.SetSsrc(kSsrc); constexpr int kTransportSequenceNumber = 12345; - constexpr absl::optional kNoFeedbackRequest = + constexpr std::optional kNoFeedbackRequest = FeedbackRequest{/*include_timestamps=*/false, /*sequence_count=*/0}; send_packet.ReserveExtension(); send_packet.SetExtension(kTransportSequenceNumber, @@ -1106,7 +1159,7 @@ TEST(RtpPacketTest, CreateAndParseTransportSequenceNumberV2Preallocated) { EXPECT_TRUE(receive_packet.Parse(send_packet.Buffer())); uint16_t received_transport_sequeunce_number; - absl::optional received_feedback_request; + std::optional received_feedback_request; EXPECT_TRUE(receive_packet.GetExtension( &received_transport_sequeunce_number, &received_feedback_request)); EXPECT_EQ(received_transport_sequeunce_number, kTransportSequenceNumber); @@ -1126,7 +1179,7 @@ TEST(RtpPacketTest, send_packet.SetSsrc(kSsrc); constexpr int kTransportSequenceNumber = 12345; - constexpr absl::optional kFeedbackRequest = + constexpr std::optional kFeedbackRequest = FeedbackRequest{/*include_timestamps=*/true, /*sequence_count=*/3}; send_packet.SetExtension(kTransportSequenceNumber, kFeedbackRequest); @@ -1137,7 +1190,7 @@ TEST(RtpPacketTest, // Parse transport sequence number and feedback request. uint16_t received_transport_sequeunce_number; - absl::optional received_feedback_request; + std::optional received_feedback_request; EXPECT_TRUE(receive_packet.GetExtension( &received_transport_sequeunce_number, &received_feedback_request)); EXPECT_EQ(received_transport_sequeunce_number, kTransportSequenceNumber); @@ -1152,7 +1205,7 @@ TEST(RtpPacketTest, ReservedExtensionsCountedAsSetExtension) { // Register two extensions. RtpPacketToSend::ExtensionManager extensions; extensions.Register(kTransmissionOffsetExtensionId); - extensions.Register(kAudioLevelExtensionId); + extensions.Register(kAudioLevelExtensionId); RtpPacketReceived packet(&extensions); @@ -1164,7 +1217,7 @@ TEST(RtpPacketTest, ReservedExtensionsCountedAsSetExtension) { // Only the extension that is both registered and reserved matches // IsExtensionReserved(). EXPECT_FALSE(packet.HasExtension()); - EXPECT_FALSE(packet.HasExtension()); + EXPECT_FALSE(packet.HasExtension()); EXPECT_TRUE(packet.HasExtension()); } @@ -1172,14 +1225,15 @@ TEST(RtpPacketTest, ReservedExtensionsCountedAsSetExtension) { TEST(RtpPacketTest, RemoveMultipleExtensions) { RtpPacketToSend::ExtensionManager extensions; extensions.Register(kTransmissionOffsetExtensionId); - extensions.Register(kAudioLevelExtensionId); + extensions.Register(kAudioLevelExtensionId); RtpPacketToSend packet(&extensions); packet.SetPayloadType(kPayloadType); packet.SetSequenceNumber(kSeqNum); packet.SetTimestamp(kTimestamp); packet.SetSsrc(kSsrc); packet.SetExtension(kTimeOffset); - packet.SetExtension(kVoiceActive, kAudioLevel); + packet.SetExtension( + AudioLevel(kVoiceActive, kAudioLevel)); EXPECT_THAT(kPacketWithTOAndAL, ElementsAreArray(packet.data(), packet.size())); @@ -1200,21 +1254,22 @@ TEST(RtpPacketTest, RemoveMultipleExtensions) { TEST(RtpPacketTest, RemoveExtensionPreservesOtherUnregisteredExtensions) { RtpPacketToSend::ExtensionManager extensions; extensions.Register(kTransmissionOffsetExtensionId); - extensions.Register(kAudioLevelExtensionId); + extensions.Register(kAudioLevelExtensionId); RtpPacketToSend packet(&extensions); packet.SetPayloadType(kPayloadType); packet.SetSequenceNumber(kSeqNum); packet.SetTimestamp(kTimestamp); packet.SetSsrc(kSsrc); packet.SetExtension(kTimeOffset); - packet.SetExtension(kVoiceActive, kAudioLevel); + packet.SetExtension( + AudioLevel(kVoiceActive, kAudioLevel)); EXPECT_THAT(kPacketWithTOAndAL, ElementsAreArray(packet.data(), packet.size())); // "Unregister" kRtpExtensionTransmissionTimeOffset. RtpPacketToSend::ExtensionManager extensions1; - extensions1.Register(kAudioLevelExtensionId); + extensions1.Register(kAudioLevelExtensionId); packet.IdentifyExtensions(extensions1); // Make sure we can not delete extension which is set but not registered. @@ -1231,7 +1286,7 @@ TEST(RtpPacketTest, RemoveExtensionPreservesOtherUnregisteredExtensions) { TEST(RtpPacketTest, RemoveExtensionFailure) { RtpPacketToSend::ExtensionManager extensions; extensions.Register(kTransmissionOffsetExtensionId); - extensions.Register(kAudioLevelExtensionId); + extensions.Register(kAudioLevelExtensionId); RtpPacketToSend packet(&extensions); packet.SetPayloadType(kPayloadType); packet.SetSequenceNumber(kSeqNum); diff --git a/modules/rtp_rtcp/source/rtp_packetizer_av1.cc b/modules/rtp_rtcp/source/rtp_packetizer_av1.cc index 95dbaf364c..e5293a90e2 100644 --- a/modules/rtp_rtcp/source/rtp_packetizer_av1.cc +++ b/modules/rtp_rtcp/source/rtp_packetizer_av1.cc @@ -13,10 +13,13 @@ #include #include +#include +#include #include "api/array_view.h" #include "api/video/video_frame_type.h" #include "modules/rtp_rtcp/source/leb128.h" +#include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "rtc_base/byte_buffer.h" #include "rtc_base/checks.h" @@ -34,6 +37,12 @@ constexpr int kObuTypeTemporalDelimiter = 2; constexpr int kObuTypeTileList = 8; constexpr int kObuTypePadding = 15; +// Overhead introduced by "even distribution" of packet sizes. +constexpr size_t kBytesOverheadEvenDistribution = 1; +// Experimentally determined minimum amount of potential savings per packet to +// make "even distribution" of packet sizes worthwhile. +constexpr size_t kMinBytesSavedPerPacketWithEvenDistribution = 10; + bool ObuHasExtension(uint8_t obu_header) { return obu_header & 0b0'0000'100; } @@ -62,7 +71,7 @@ int MaxFragmentSize(int remaining_bytes) { } // namespace -RtpPacketizerAv1::RtpPacketizerAv1(rtc::ArrayView payload, +RtpPacketizerAv1::RtpPacketizerAv1(ArrayView payload, RtpPacketizer::PayloadSizeLimits limits, VideoFrameType frame_type, bool is_last_frame_in_picture) @@ -72,10 +81,9 @@ RtpPacketizerAv1::RtpPacketizerAv1(rtc::ArrayView payload, is_last_frame_in_picture_(is_last_frame_in_picture) {} std::vector RtpPacketizerAv1::ParseObus( - rtc::ArrayView payload) { + ArrayView payload) { std::vector result; - rtc::ByteBufferReader payload_reader( - reinterpret_cast(payload.data()), payload.size()); + ByteBufferReader payload_reader(payload); while (payload_reader.Length() > 0) { Obu obu; payload_reader.ReadUInt8(&obu.header); @@ -91,9 +99,9 @@ std::vector RtpPacketizerAv1::ParseObus( ++obu.size; } if (!ObuHasSize(obu.header)) { - obu.payload = rtc::MakeArrayView( - reinterpret_cast(payload_reader.Data()), - payload_reader.Length()); + obu.payload = + MakeArrayView(reinterpret_cast(payload_reader.Data()), + payload_reader.Length()); payload_reader.Consume(payload_reader.Length()); } else { uint64_t size = 0; @@ -104,7 +112,7 @@ std::vector RtpPacketizerAv1::ParseObus( << payload_reader.Length(); return {}; } - obu.payload = rtc::MakeArrayView( + obu.payload = MakeArrayView( reinterpret_cast(payload_reader.Data()), size); payload_reader.Consume(size); } @@ -139,8 +147,8 @@ int RtpPacketizerAv1::AdditionalBytesForPreviousObuElement( return Leb128Size(packet.last_obu_size); } -std::vector RtpPacketizerAv1::Packetize( - rtc::ArrayView obus, +std::vector RtpPacketizerAv1::PacketizeInternal( + ArrayView obus, PayloadSizeLimits limits) { std::vector packets; if (obus.empty()) { @@ -244,12 +252,12 @@ std::vector RtpPacketizerAv1::Packetize( obu_offset + limits.max_payload_len < obu.size; obu_offset += limits.max_payload_len) { packets.emplace_back(/*first_obu_index=*/obu_index); - Packet& packet = packets.back(); - packet.num_obu_elements = 1; - packet.first_obu_offset = obu_offset; + Packet& middle_packet = packets.back(); + middle_packet.num_obu_elements = 1; + middle_packet.first_obu_offset = obu_offset; int middle_fragment_size = limits.max_payload_len; - packet.last_obu_size = middle_fragment_size; - packet.packet_size = middle_fragment_size; + middle_packet.last_obu_size = middle_fragment_size; + middle_packet.packet_size = middle_fragment_size; } // Add the last fragment of the obu. @@ -274,11 +282,11 @@ std::vector RtpPacketizerAv1::Packetize( last_fragment_size -= semi_last_fragment_size; packets.emplace_back(/*first_obu_index=*/obu_index); - Packet& packet = packets.back(); - packet.num_obu_elements = 1; - packet.first_obu_offset = obu_offset; - packet.last_obu_size = semi_last_fragment_size; - packet.packet_size = semi_last_fragment_size; + Packet& second_last_packet = packets.back(); + second_last_packet.num_obu_elements = 1; + second_last_packet.first_obu_offset = obu_offset; + second_last_packet.last_obu_size = semi_last_fragment_size; + second_last_packet.packet_size = semi_last_fragment_size; obu_offset += semi_last_fragment_size; } packets.emplace_back(/*first_obu_index=*/obu_index); @@ -292,6 +300,54 @@ std::vector RtpPacketizerAv1::Packetize( return packets; } +std::vector RtpPacketizerAv1::Packetize( + ArrayView obus, + PayloadSizeLimits limits) { + std::vector packets = PacketizeInternal(obus, limits); + if (packets.size() <= 1) { + return packets; + } + size_t packet_index = 0; + size_t packet_size_left_unused = 0; + for (const auto& packet : packets) { + // Every packet has to have an aggregation header of size + // kAggregationHeaderSize. + int available_bytes = limits.max_payload_len - kAggregationHeaderSize; + + if (packet_index == 0) { + available_bytes -= limits.first_packet_reduction_len; + } else if (packet_index == packets.size() - 1) { + available_bytes -= limits.last_packet_reduction_len; + } + if (available_bytes >= packet.packet_size) { + packet_size_left_unused += (available_bytes - packet.packet_size); + } + packet_index++; + } + if (packet_size_left_unused > + packets.size() * kMinBytesSavedPerPacketWithEvenDistribution) { + // Calculate new limits with a reduced max_payload_len. + size_t size_reduction = packet_size_left_unused / packets.size(); + RTC_DCHECK_GT(limits.max_payload_len, size_reduction); + RTC_DCHECK_GT(size_reduction, kBytesOverheadEvenDistribution); + limits.max_payload_len -= (size_reduction - kBytesOverheadEvenDistribution); + if (limits.max_payload_len - limits.last_packet_reduction_len < 3 || + limits.max_payload_len - limits.first_packet_reduction_len < 3) { + return packets; + } + std::vector packets_even = PacketizeInternal(obus, limits); + // The number of packets should not change in the second pass. If it does, + // conservatively return the original packets. + if (packets_even.size() == packets.size()) { + return packets_even; + } + RTC_LOG(LS_WARNING) << "AV1 even distribution caused a regression in " + "number of packets from " + << packets.size() << " to " << packets_even.size(); + } + return packets; +} + uint8_t RtpPacketizerAv1::AggregationHeader() const { const Packet& packet = packets_[packet_index_]; uint8_t aggregation_header = 0; diff --git a/modules/rtp_rtcp/source/rtp_packetizer_av1.h b/modules/rtp_rtcp/source/rtp_packetizer_av1.h index 520e746eac..b64418aace 100644 --- a/modules/rtp_rtcp/source/rtp_packetizer_av1.h +++ b/modules/rtp_rtcp/source/rtp_packetizer_av1.h @@ -24,7 +24,7 @@ namespace webrtc { class RtpPacketizerAv1 : public RtpPacketizer { public: - RtpPacketizerAv1(rtc::ArrayView payload, + RtpPacketizerAv1(ArrayView payload, PayloadSizeLimits limits, VideoFrameType frame_type, bool is_last_frame_in_picture); @@ -37,7 +37,7 @@ class RtpPacketizerAv1 : public RtpPacketizer { struct Obu { uint8_t header; uint8_t extension_header; // undefined if (header & kXbit) == 0 - rtc::ArrayView payload; + ArrayView payload; int size; // size of the header and payload combined. }; struct Packet { @@ -53,12 +53,16 @@ class RtpPacketizerAv1 : public RtpPacketizer { }; // Parses the payload into serie of OBUs. - static std::vector ParseObus(rtc::ArrayView payload); + static std::vector ParseObus(ArrayView payload); // Returns the number of additional bytes needed to store the previous OBU // element if an additonal OBU element is added to the packet. static int AdditionalBytesForPreviousObuElement(const Packet& packet); - static std::vector Packetize(rtc::ArrayView obus, + static std::vector PacketizeInternal(ArrayView obus, + PayloadSizeLimits limits); + // Packetize and try to distribute the payload evenly across packets. + static std::vector Packetize(ArrayView obus, PayloadSizeLimits limits); + uint8_t AggregationHeader() const; const VideoFrameType frame_type_; diff --git a/modules/rtp_rtcp/source/rtp_packetizer_av1_test_helper.cc b/modules/rtp_rtcp/source/rtp_packetizer_av1_test_helper.cc index 3d62bcef44..1e38352950 100644 --- a/modules/rtp_rtcp/source/rtp_packetizer_av1_test_helper.cc +++ b/modules/rtp_rtcp/source/rtp_packetizer_av1_test_helper.cc @@ -10,9 +10,11 @@ #include "modules/rtp_rtcp/source/rtp_packetizer_av1_test_helper.h" +#include #include #include +#include #include namespace webrtc { diff --git a/modules/rtp_rtcp/source/rtp_packetizer_av1_test_helper.h b/modules/rtp_rtcp/source/rtp_packetizer_av1_test_helper.h index 04a902fe56..7b0a0410c4 100644 --- a/modules/rtp_rtcp/source/rtp_packetizer_av1_test_helper.h +++ b/modules/rtp_rtcp/source/rtp_packetizer_av1_test_helper.h @@ -14,7 +14,6 @@ #include #include -#include #include namespace webrtc { diff --git a/modules/rtp_rtcp/source/rtp_packetizer_av1_unittest.cc b/modules/rtp_rtcp/source/rtp_packetizer_av1_unittest.cc index 2151a59295..bb46fcdf33 100644 --- a/modules/rtp_rtcp/source/rtp_packetizer_av1_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_packetizer_av1_unittest.cc @@ -20,6 +20,8 @@ #include "api/array_view.h" #include "api/scoped_refptr.h" #include "api/video/encoded_image.h" +#include "api/video/video_frame_type.h" +#include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_packetizer_av1_test_helper.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h" @@ -39,8 +41,8 @@ constexpr uint8_t kNewCodedVideoSequenceBit = 0b00'00'1000; // Wrapper around rtp_packet to make it look like container of payload bytes. struct RtpPayload { - using value_type = rtc::ArrayView::value_type; - using const_iterator = rtc::ArrayView::const_iterator; + using value_type = ArrayView::value_type; + using const_iterator = ArrayView::const_iterator; RtpPayload() : rtp_packet(/*extensions=*/nullptr) {} RtpPayload& operator=(RtpPayload&&) = default; @@ -63,7 +65,7 @@ class Av1Frame { using value_type = uint8_t; using const_iterator = const uint8_t*; - explicit Av1Frame(rtc::scoped_refptr frame) + explicit Av1Frame(scoped_refptr frame) : frame_(std::move(frame)) {} const_iterator begin() const { return frame_ ? frame_->data() : nullptr; } @@ -72,11 +74,11 @@ class Av1Frame { } private: - rtc::scoped_refptr frame_; + scoped_refptr frame_; }; std::vector Packetize( - rtc::ArrayView payload, + ArrayView payload, RtpPacketizer::PayloadSizeLimits limits, VideoFrameType frame_type = VideoFrameType::kVideoFrameDelta, bool is_last_frame_in_picture = true) { @@ -91,14 +93,20 @@ std::vector Packetize( return result; } -Av1Frame ReassembleFrame(rtc::ArrayView rtp_payloads) { - std::vector> payloads(rtp_payloads.size()); +Av1Frame ReassembleFrame(ArrayView rtp_payloads) { + std::vector> payloads(rtp_payloads.size()); for (size_t i = 0; i < rtp_payloads.size(); ++i) { payloads[i] = rtp_payloads[i]; } return Av1Frame(VideoRtpDepacketizerAv1().AssembleFrame(payloads)); } +TEST(RtpPacketizerAv1Test, EmptyPayload) { + RtpPacketizer::PayloadSizeLimits limits; + RtpPacketizerAv1 packetizer({}, limits, VideoFrameType::kVideoFrameKey, true); + EXPECT_EQ(packetizer.NumPackets(), 0u); +} + TEST(RtpPacketizerAv1Test, PacketizeOneObuWithoutSizeAndExtension) { auto kFrame = BuildAv1Frame({Av1Obu(kAv1ObuTypeFrame) .WithoutSize() @@ -338,5 +346,15 @@ TEST(RtpPacketizerAv1Test, EXPECT_THAT(ReassembleFrame(payloads), ElementsAreArray(kFrame)); } +TEST(RtpPacketizerAv1TestEven, EvenDistribution) { + auto kFrame = BuildAv1Frame({ + Av1Obu(kAv1ObuTypeFrame).WithPayload(std::vector(1206, 0)), + Av1Obu(kAv1ObuTypeFrame).WithPayload(std::vector(1476, 0)), + Av1Obu(kAv1ObuTypeFrame).WithPayload(std::vector(1431, 0)), + }); + EXPECT_THAT(Packetize(kFrame, {}), ElementsAre(SizeIs(1032), SizeIs(1032), + SizeIs(1032), SizeIs(1028))); +} + } // namespace } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_packetizer_h265.cc b/modules/rtp_rtcp/source/rtp_packetizer_h265.cc new file mode 100644 index 0000000000..0ae271c801 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_packetizer_h265.cc @@ -0,0 +1,335 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/rtp_packetizer_h265.h" + +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "common_video/h264/h264_common.h" +#include "common_video/h265/h265_common.h" +#include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/rtp_packet_h265_common.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +RtpPacketizerH265::RtpPacketizerH265(ArrayView payload, + PayloadSizeLimits limits) + : limits_(limits), num_packets_left_(0) { + for (const auto& nalu : H264::FindNaluIndices(payload)) { + if (!nalu.payload_size) { + input_fragments_.clear(); + return; + } + input_fragments_.push_back( + payload.subview(nalu.payload_start_offset, nalu.payload_size)); + } + + if (!GeneratePackets()) { + // If failed to generate all the packets, discard already generated + // packets in case the caller would ignore return value and still try to + // call NextPacket(). + num_packets_left_ = 0; + while (!packets_.empty()) { + packets_.pop(); + } + } +} + +RtpPacketizerH265::~RtpPacketizerH265() = default; + +size_t RtpPacketizerH265::NumPackets() const { + return num_packets_left_; +} + +bool RtpPacketizerH265::GeneratePackets() { + for (size_t i = 0; i < input_fragments_.size();) { + int fragment_len = input_fragments_[i].size(); + int single_packet_capacity = limits_.max_payload_len; + if (input_fragments_.size() == 1) { + single_packet_capacity -= limits_.single_packet_reduction_len; + } else if (i == 0) { + single_packet_capacity -= limits_.first_packet_reduction_len; + } else if (i + 1 == input_fragments_.size()) { + // Pretend that last fragment is larger instead of making last packet + // smaller. + single_packet_capacity -= limits_.last_packet_reduction_len; + } + if (fragment_len > single_packet_capacity) { + if (!PacketizeFu(i)) { + return false; + } + ++i; + } else { + i = PacketizeAp(i); + } + } + return true; +} + +bool RtpPacketizerH265::PacketizeFu(size_t fragment_index) { + // Fragment payload into packets (FU). + // Strip out the original header and leave room for the FU header. + ArrayView fragment = input_fragments_[fragment_index]; + PayloadSizeLimits limits = limits_; + // Refer to section 4.4.3 in RFC7798, each FU fragment will have a 2-bytes + // payload header and a one-byte FU header. DONL is not supported so ignore + // its size when calculating max_payload_len. + limits.max_payload_len -= + kH265FuHeaderSizeBytes + kH265PayloadHeaderSizeBytes; + + // Update single/first/last packet reductions unless it is single/first/last + // fragment. + if (input_fragments_.size() != 1) { + // if this fragment is put into a single packet, it might still be the + // first or the last packet in the whole sequence of packets. + if (fragment_index == input_fragments_.size() - 1) { + limits.single_packet_reduction_len = limits_.last_packet_reduction_len; + } else if (fragment_index == 0) { + limits.single_packet_reduction_len = limits_.first_packet_reduction_len; + } else { + limits.single_packet_reduction_len = 0; + } + } + if (fragment_index != 0) { + limits.first_packet_reduction_len = 0; + } + if (fragment_index != input_fragments_.size() - 1) { + limits.last_packet_reduction_len = 0; + } + + // Strip out the original header. + size_t payload_left = fragment.size() - kH265NalHeaderSizeBytes; + int offset = kH265NalHeaderSizeBytes; + + std::vector payload_sizes = SplitAboutEqually(payload_left, limits); + if (payload_sizes.empty()) { + return false; + } + + for (size_t i = 0; i < payload_sizes.size(); ++i) { + int packet_length = payload_sizes[i]; + RTC_CHECK_GT(packet_length, 0); + uint16_t header = (fragment[0] << 8) | fragment[1]; + packets_.push({.source_fragment = fragment.subview(offset, packet_length), + .first_fragment = (i == 0), + .last_fragment = (i == payload_sizes.size() - 1), + .aggregated = false, + .header = header}); + offset += packet_length; + payload_left -= packet_length; + } + num_packets_left_ += payload_sizes.size(); + RTC_CHECK_EQ(payload_left, 0); + return true; +} + +int RtpPacketizerH265::PacketizeAp(size_t fragment_index) { + // Aggregate fragments into one packet. + size_t payload_size_left = limits_.max_payload_len; + if (input_fragments_.size() == 1) { + payload_size_left -= limits_.single_packet_reduction_len; + } else if (fragment_index == 0) { + payload_size_left -= limits_.first_packet_reduction_len; + } + int aggregated_fragments = 0; + size_t fragment_headers_length = 0; + ArrayView fragment = input_fragments_[fragment_index]; + RTC_CHECK_GE(payload_size_left, fragment.size()); + ++num_packets_left_; + + auto payload_size_needed = [&] { + size_t fragment_size = fragment.size() + fragment_headers_length; + if (input_fragments_.size() == 1) { + // Single fragment, single packet, payload_size_left already adjusted + // with limits_.single_packet_reduction_len. + return fragment_size; + } + if (fragment_index == input_fragments_.size() - 1) { + // Last fragment, so this might be the last packet. + return fragment_size + limits_.last_packet_reduction_len; + } + return fragment_size; + }; + + uint16_t header = (fragment[0] << 8) | fragment[1]; + while (payload_size_left >= payload_size_needed()) { + RTC_CHECK_GT(fragment.size(), 0); + packets_.push({.source_fragment = fragment, + .first_fragment = (aggregated_fragments == 0), + .last_fragment = false, + .aggregated = true, + .header = header}); + payload_size_left -= fragment.size(); + payload_size_left -= fragment_headers_length; + + fragment_headers_length = kH265LengthFieldSizeBytes; + // If we are going to try to aggregate more fragments into this packet + // we need to add the AP NALU header and a length field for the first + // NALU of this packet. + if (aggregated_fragments == 0) { + fragment_headers_length += + kH265PayloadHeaderSizeBytes + kH265LengthFieldSizeBytes; + } + ++aggregated_fragments; + + // Next fragment. + ++fragment_index; + if (fragment_index == input_fragments_.size()) { + break; + } + fragment = input_fragments_[fragment_index]; + } + RTC_CHECK_GT(aggregated_fragments, 0); + packets_.back().last_fragment = true; + return fragment_index; +} + +bool RtpPacketizerH265::NextPacket(RtpPacketToSend* rtp_packet) { + RTC_DCHECK(rtp_packet); + + if (packets_.empty()) { + return false; + } + + PacketUnit packet = packets_.front(); + + if (packet.first_fragment && packet.last_fragment) { + // Single NAL unit packet. Do not support DONL for single NAL unit packets, + // DONL field is not present. + size_t bytes_to_send = packet.source_fragment.size(); + uint8_t* buffer = rtp_packet->AllocatePayload(bytes_to_send); + memcpy(buffer, packet.source_fragment.data(), bytes_to_send); + packets_.pop(); + input_fragments_.pop_front(); + } else if (packet.aggregated) { + NextAggregatePacket(rtp_packet); + } else { + NextFragmentPacket(rtp_packet); + } + rtp_packet->SetMarker(packets_.empty()); + --num_packets_left_; + return true; +} + +void RtpPacketizerH265::NextAggregatePacket(RtpPacketToSend* rtp_packet) { + size_t payload_capacity = rtp_packet->FreeCapacity(); + RTC_CHECK_GE(payload_capacity, kH265PayloadHeaderSizeBytes); + uint8_t* buffer = rtp_packet->AllocatePayload(payload_capacity); + RTC_CHECK(buffer); + PacketUnit* packet = &packets_.front(); + RTC_CHECK(packet->first_fragment); + + /* + +---------------+---------------+ + |0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7| + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + |F| Type | LayerId | TID | + +-------------+-----------------+ + */ + // Refer to section 4.4.2 for aggregation packets and modify type to + // 48 in PayloadHdr for aggregate packet. Do not support DONL for aggregation + // packets, DONL field is not present. + int index = kH265PayloadHeaderSizeBytes; + bool is_last_fragment = packet->last_fragment; + + // Refer to section 4.4.2 for aggregation packets and calculate the lowest + // value of LayerId and TID of all the aggregated NAL units + uint8_t layer_id_min = kH265MaxLayerId; + uint8_t temporal_id_min = kH265MaxTemporalId; + while (packet->aggregated) { + // Add NAL unit length field. + ArrayView fragment = packet->source_fragment; + uint8_t layer_id = ((fragment[0] & kH265LayerIDHMask) << 5) | + ((fragment[1] & kH265LayerIDLMask) >> 3); + layer_id_min = std::min(layer_id_min, layer_id); + uint8_t temporal_id = fragment[1] & kH265TIDMask; + temporal_id_min = std::min(temporal_id_min, temporal_id); + + ByteWriter::WriteBigEndian(&buffer[index], fragment.size()); + index += kH265LengthFieldSizeBytes; + // Add NAL unit. + memcpy(&buffer[index], fragment.data(), fragment.size()); + index += fragment.size(); + packets_.pop(); + input_fragments_.pop_front(); + if (is_last_fragment) { + break; + } + packet = &packets_.front(); + is_last_fragment = packet->last_fragment; + } + + buffer[0] = (H265::NaluType::kAp << 1) | (layer_id_min >> 5); + buffer[1] = (layer_id_min << 3) | temporal_id_min; + RTC_CHECK(is_last_fragment); + rtp_packet->SetPayloadSize(index); +} + +void RtpPacketizerH265::NextFragmentPacket(RtpPacketToSend* rtp_packet) { + PacketUnit* packet = &packets_.front(); + // NAL unit fragmented over multiple packets (FU). + // We do not send original NALU header, so it will be replaced by the + // PayloadHdr of the first packet. + /* + +---------------+---------------+ + |0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7| + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + |F| Type | LayerId | TID | + +-------------+-----------------+ + */ + // Refer to section section 4.4.3 for aggregation packets and modify type to + // 49 in PayloadHdr for aggregate packet. + uint8_t payload_hdr_h = + packet->header >> 8; // 1-bit F, 6-bit type, 1-bit layerID highest-bit + uint8_t payload_hdr_l = packet->header & 0xFF; + uint8_t layer_id_h = payload_hdr_h & kH265LayerIDHMask; + uint8_t fu_header = 0; + /* + +---------------+ + |0|1|2|3|4|5|6|7| + +-+-+-+-+-+-+-+-+ + |S|E| FuType | + +---------------+ + */ + // S bit indicates the start of a fragmented NAL unit. + // E bit indicates the end of a fragmented NAL unit. + // FuType must be equal to the field type value of the fragmented NAL unit. + fu_header |= (packet->first_fragment ? kH265SBitMask : 0); + fu_header |= (packet->last_fragment ? kH265EBitMask : 0); + uint8_t type = (payload_hdr_h & kH265TypeMask) >> 1; + fu_header |= type; + // Now update payload_hdr_h with FU type. + payload_hdr_h = (payload_hdr_h & kH265TypeMaskN) | + (H265::NaluType::kFu << 1) | layer_id_h; + ArrayView fragment = packet->source_fragment; + uint8_t* buffer = rtp_packet->AllocatePayload( + kH265FuHeaderSizeBytes + kH265PayloadHeaderSizeBytes + fragment.size()); + RTC_CHECK(buffer); + buffer[0] = payload_hdr_h; + buffer[1] = payload_hdr_l; + buffer[2] = fu_header; + + // Do not support DONL for fragmentation units, DONL field is not present. + memcpy(buffer + kH265FuHeaderSizeBytes + kH265PayloadHeaderSizeBytes, + fragment.data(), fragment.size()); + if (packet->last_fragment) { + input_fragments_.pop_front(); + } + packets_.pop(); +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_packetizer_h265.h b/modules/rtp_rtcp/source/rtp_packetizer_h265.h new file mode 100644 index 0000000000..9c57fd887d --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_packetizer_h265.h @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_RTP_PACKETIZER_H265_H_ +#define MODULES_RTP_RTCP_SOURCE_RTP_PACKETIZER_H265_H_ + +#include +#include +#include +#include + +#include "api/array_view.h" +#include "modules/rtp_rtcp/source/rtp_format.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" + +namespace webrtc { + +class RtpPacketizerH265 : public RtpPacketizer { + public: + // Initialize with payload from encoder. + // The payload_data must be exactly one encoded H.265 frame. + // For H265 we only support tx-mode SRST. + RtpPacketizerH265(ArrayView payload, PayloadSizeLimits limits); + + RtpPacketizerH265(const RtpPacketizerH265&) = delete; + RtpPacketizerH265& operator=(const RtpPacketizerH265&) = delete; + + ~RtpPacketizerH265() override; + + size_t NumPackets() const override; + + // Get the next payload with H.265 payload header. + // Write payload and set marker bit of the `packet`. + // Returns true on success or false if there was no payload to packetize. + bool NextPacket(RtpPacketToSend* rtp_packet) override; + + private: + struct PacketUnit { + ArrayView source_fragment; + bool first_fragment = false; + bool last_fragment = false; + bool aggregated = false; + uint16_t header = 0; + }; + std::deque> input_fragments_; + std::queue packets_; + + bool GeneratePackets(); + bool PacketizeFu(size_t fragment_index); + int PacketizeAp(size_t fragment_index); + + void NextAggregatePacket(RtpPacketToSend* rtp_packet); + void NextFragmentPacket(RtpPacketToSend* rtp_packet); + + const PayloadSizeLimits limits_; + size_t num_packets_left_ = 0; +}; +} // namespace webrtc +#endif // MODULES_RTP_RTCP_SOURCE_RTP_PACKETIZER_H265_H_ diff --git a/modules/rtp_rtcp/source/rtp_packetizer_h265_unittest.cc b/modules/rtp_rtcp/source/rtp_packetizer_h265_unittest.cc new file mode 100644 index 0000000000..8602e173d4 --- /dev/null +++ b/modules/rtp_rtcp/source/rtp_packetizer_h265_unittest.cc @@ -0,0 +1,656 @@ +/* + * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/rtp_packetizer_h265.h" + +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "api/array_view.h" +#include "common_video/h265/h265_common.h" +#include "modules/rtp_rtcp/source/rtp_format.h" +#include "modules/rtp_rtcp/source/rtp_packet_h265_common.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::Each; +using ::testing::ElementsAre; +using ::testing::ElementsAreArray; +using ::testing::Eq; +using ::testing::IsEmpty; +using ::testing::SizeIs; + +constexpr RtpPacketToSend::ExtensionManager* kNoExtensions = nullptr; +constexpr size_t kMaxPayloadSizeBytes = 1200; +constexpr size_t kH265LengthFieldSizeBytes = 2; +constexpr RtpPacketizer::PayloadSizeLimits kNoLimits; + +constexpr size_t kFuHeaderSizeBytes = + kH265FuHeaderSizeBytes + kH265PayloadHeaderSizeBytes; + +struct NalUnitHeader { + uint8_t forbidden_zero_bit = 0; + uint8_t nal_unit_type = 0; + uint8_t nuh_layer_id = 0; + uint8_t nuh_temporal_id_plus1 = 0; +}; + +// Creates Buffer that looks like nal unit of given header and size. +Buffer GenerateNalUnit(NalUnitHeader header, size_t size) { + RTC_CHECK_GT(size, 0); + Buffer buffer(size); + buffer[0] = (header.nal_unit_type << 1) | (header.nuh_layer_id >> 5); + buffer[1] = (header.nuh_layer_id << 3) | header.nuh_temporal_id_plus1; + for (size_t i = 2; i < size; ++i) { + buffer[i] = static_cast(i); + } + // Last byte shouldn't be 0, or it may be counted as part of next 4-byte start + // sequence. + buffer[size - 1] |= 0x10; + return buffer; +} + +// Create frame consisting of nalus of given size. +Buffer CreateFrame(std::initializer_list nalu_sizes) { + static constexpr int kStartCodeSize = 3; + Buffer frame(absl::c_accumulate(nalu_sizes, size_t{0}) + + kStartCodeSize * nalu_sizes.size()); + size_t offset = 0; + for (size_t nalu_size : nalu_sizes) { + EXPECT_GE(nalu_size, 1u); + // Insert nalu start code + frame[offset] = 0; + frame[offset + 1] = 0; + frame[offset + 2] = 1; + // Set some valid header. + frame[offset + 3] = 2; + // Fill payload avoiding accidental start codes + if (nalu_size > 1) { + memset(frame.data() + offset + 4, 0x3f, nalu_size - 1); + } + offset += (kStartCodeSize + nalu_size); + } + return frame; +} + +// Create frame consisting of given nalus. +Buffer CreateFrame(ArrayView nalus) { + static constexpr int kStartCodeSize = 3; + int frame_size = 0; + for (const Buffer& nalu : nalus) { + frame_size += (kStartCodeSize + nalu.size()); + } + Buffer frame(frame_size); + size_t offset = 0; + for (const Buffer& nalu : nalus) { + // Insert nalu start code + frame[offset] = 0; + frame[offset + 1] = 0; + frame[offset + 2] = 1; + // Copy the nalu unit. + memcpy(frame.data() + offset + 3, nalu.data(), nalu.size()); + offset += (kStartCodeSize + nalu.size()); + } + return frame; +} + +std::vector FetchAllPackets(RtpPacketizerH265* packetizer) { + std::vector result; + size_t num_packets = packetizer->NumPackets(); + result.reserve(num_packets); + RtpPacketToSend packet(kNoExtensions); + while (packetizer->NextPacket(&packet)) { + result.push_back(packet); + } + EXPECT_THAT(result, SizeIs(num_packets)); + return result; +} + +// Single nalu tests. +TEST(RtpPacketizerH265Test, SingleNalu) { + const uint8_t frame[] = {0, 0, 1, H265::kIdrWRadl, 0xFF}; + + RtpPacketizerH265 packetizer(frame, kNoLimits); + std::vector packets = FetchAllPackets(&packetizer); + + ASSERT_THAT(packets, SizeIs(1)); + EXPECT_THAT(packets[0].payload(), ElementsAre(H265::kIdrWRadl, 0xFF)); +} + +TEST(RtpPacketizerH265Test, SingleNaluTwoPackets) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = kMaxPayloadSizeBytes; + Buffer nalus[] = {GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + kMaxPayloadSizeBytes), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + 100)}; + Buffer frame = CreateFrame(nalus); + + RtpPacketizerH265 packetizer(frame, limits); + std::vector packets = FetchAllPackets(&packetizer); + + ASSERT_THAT(packets, SizeIs(2)); + EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0])); + EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[1])); +} + +TEST(RtpPacketizerH265Test, + SingleNaluFirstPacketReductionAppliesOnlyToFirstFragment) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 200; + limits.first_packet_reduction_len = 5; + Buffer nalus[] = {GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/195), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/200), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/200)}; + Buffer frame = CreateFrame(nalus); + + RtpPacketizerH265 packetizer(frame, limits); + std::vector packets = FetchAllPackets(&packetizer); + + ASSERT_THAT(packets, SizeIs(3)); + EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0])); + EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[1])); + EXPECT_THAT(packets[2].payload(), ElementsAreArray(nalus[2])); +} + +TEST(RtpPacketizerH265Test, + SingleNaluLastPacketReductionAppliesOnlyToLastFragment) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 200; + limits.last_packet_reduction_len = 5; + Buffer nalus[] = {GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/200), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/200), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/195)}; + Buffer frame = CreateFrame(nalus); + + RtpPacketizerH265 packetizer(frame, limits); + std::vector packets = FetchAllPackets(&packetizer); + + ASSERT_THAT(packets, SizeIs(3)); + EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0])); + EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[1])); + EXPECT_THAT(packets[2].payload(), ElementsAreArray(nalus[2])); +} + +TEST(RtpPacketizerH265Test, + SingleNaluFirstAndLastPacketReductionSumsForSinglePacket) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 200; + limits.first_packet_reduction_len = 20; + limits.last_packet_reduction_len = 30; + Buffer frame = CreateFrame({150}); + + RtpPacketizerH265 packetizer(frame, limits); + std::vector packets = FetchAllPackets(&packetizer); + + EXPECT_THAT(packets, SizeIs(1)); +} + +// Aggregation tests. +TEST(RtpPacketizerH265Test, ApRespectsNoPacketReduction) { + Buffer nalus[] = {GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/3), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/3), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/0x123)}; + Buffer frame = CreateFrame(nalus); + + RtpPacketizerH265 packetizer(frame, kNoLimits); + std::vector packets = FetchAllPackets(&packetizer); + + ASSERT_THAT(packets, SizeIs(1)); + auto payload = packets[0].payload(); + int type = H265::ParseNaluType(payload[0]); + EXPECT_EQ(payload.size(), kH265NalHeaderSizeBytes + + 3 * kH265LengthFieldSizeBytes + 3 + 3 + 0x123); + + EXPECT_EQ(type, H265::NaluType::kAp); + payload = payload.subview(kH265NalHeaderSizeBytes); + // 1st fragment. + EXPECT_THAT(payload.subview(0, kH265LengthFieldSizeBytes), + ElementsAre(0, 3)); // Size. + EXPECT_THAT(payload.subview(kH265LengthFieldSizeBytes, 3), + ElementsAreArray(nalus[0])); + payload = payload.subview(kH265LengthFieldSizeBytes + 3); + // 2nd fragment. + EXPECT_THAT(payload.subview(0, kH265LengthFieldSizeBytes), + ElementsAre(0, 3)); // Size. + EXPECT_THAT(payload.subview(kH265LengthFieldSizeBytes, 3), + ElementsAreArray(nalus[1])); + payload = payload.subview(kH265LengthFieldSizeBytes + 3); + // 3rd fragment. + EXPECT_THAT(payload.subview(0, kH265LengthFieldSizeBytes), + ElementsAre(0x1, 0x23)); // Size. + EXPECT_THAT(payload.subview(kH265LengthFieldSizeBytes), + ElementsAreArray(nalus[2])); +} + +TEST(RtpPacketizerH265Test, ApRespectsLayerIdAndTemporalId) { + // Generate 3 NALUs: NALU 1 with nuh_layer_id 2 and nuh_temporal_id_plus1 6, + // NALU 2 with nuh_layer_id 0 and nuh_temporal_id_plus1 1, + // NALU 3 with nuh_layer_id 32 and nuh_temporal_id_plus1 2, + // So in the AP packet header, nuh_layer_id should be 0 which is the lowest + // nuh_layer_id value of 3 NALUs and nuh_temporal_id_plus1 should be 1 which + // is the lowest nuh_temporal_id_plus1 value of 3 NALUs + Buffer nalus[] = {GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 2, + .nuh_temporal_id_plus1 = 6}, + /*size=*/3), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 0, + .nuh_temporal_id_plus1 = 1}, + /*size=*/3), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/0x123)}; + Buffer frame = CreateFrame(nalus); + + RtpPacketizerH265 packetizer(frame, kNoLimits); + std::vector packets = FetchAllPackets(&packetizer); + + ASSERT_THAT(packets, SizeIs(1)); + auto payload = packets[0].payload(); + uint8_t type = H265::ParseNaluType(payload[0]); + uint8_t layer_id = ((payload[0] & kH265LayerIDHMask) << 5) | + ((payload[1] & kH265LayerIDLMask) >> 3); + uint8_t temporal_id = payload[1] & kH265TIDMask; + EXPECT_EQ(payload.size(), kH265NalHeaderSizeBytes + + 3 * kH265LengthFieldSizeBytes + 3 + 3 + 0x123); + + EXPECT_EQ(type, H265::NaluType::kAp); + EXPECT_EQ(layer_id, 0); + EXPECT_EQ(temporal_id, 1); + payload = payload.subview(kH265NalHeaderSizeBytes); + // 1st fragment. + EXPECT_THAT(payload.subview(0, kH265LengthFieldSizeBytes), ElementsAre(0, 3)); + EXPECT_THAT(payload.subview(kH265LengthFieldSizeBytes, 3), + ElementsAreArray(nalus[0])); + payload = payload.subview(kH265LengthFieldSizeBytes + 3); + // 2nd fragment. + EXPECT_THAT(payload.subview(0, kH265LengthFieldSizeBytes), ElementsAre(0, 3)); + EXPECT_THAT(payload.subview(kH265LengthFieldSizeBytes, 3), + ElementsAreArray(nalus[1])); + payload = payload.subview(kH265LengthFieldSizeBytes + 3); + // 3rd fragment. + EXPECT_THAT(payload.subview(0, kH265LengthFieldSizeBytes), + ElementsAre(0x1, 0x23)); + EXPECT_THAT(payload.subview(kH265LengthFieldSizeBytes), + ElementsAreArray(nalus[2])); +} + +TEST(RtpPacketizerH265Test, ApRespectsFirstPacketReduction) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1000; + limits.first_packet_reduction_len = 100; + const size_t kFirstFragmentSize = + limits.max_payload_len - limits.first_packet_reduction_len; + Buffer nalus[] = {GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/kFirstFragmentSize), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/3), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/3)}; + Buffer frame = CreateFrame(nalus); + + RtpPacketizerH265 packetizer(frame, limits); + std::vector packets = FetchAllPackets(&packetizer); + + ASSERT_THAT(packets, SizeIs(2)); + // Expect 1st packet is single nalu. + EXPECT_THAT(packets[0].payload(), ElementsAreArray(nalus[0])); + // Expect 2nd packet is aggregate of last two fragments. + // The size of H265 nal_unit_header is 2 bytes, according to 7.3.1.2 + // in H265 spec. Aggregation packet type is 48, nuh_layer_id is 32 and + // nuh_temporal_id_plus1 is 2, so the nal_unit_header should be "01100001 + // 00000010", which is 97 and 2. + EXPECT_THAT(packets[1].payload(), + ElementsAre(97, 2, // + 0, 3, nalus[1][0], nalus[1][1], nalus[1][2], // + 0, 3, nalus[2][0], nalus[2][1], nalus[2][2])); +} + +TEST(RtpPacketizerH265Test, ApRespectsLastPacketReduction) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1000; + limits.last_packet_reduction_len = 100; + const size_t kLastFragmentSize = + limits.max_payload_len - limits.last_packet_reduction_len; + Buffer nalus[] = {GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/3), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/3), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/kLastFragmentSize)}; + Buffer frame = CreateFrame(nalus); + + RtpPacketizerH265 packetizer(frame, limits); + std::vector packets = FetchAllPackets(&packetizer); + + ASSERT_THAT(packets, SizeIs(2)); + // Expect 1st packet is aggregate of 1st two fragments. + EXPECT_THAT(packets[0].payload(), + ElementsAre(97, 2, // + 0, 3, nalus[0][0], nalus[0][1], nalus[0][2], // + 0, 3, nalus[1][0], nalus[1][1], nalus[1][2])); + // Expect 2nd packet is single nalu. + EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[2])); +} + +TEST(RtpPacketizerH265Test, TooSmallForApHeaders) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1000; + const size_t kLastFragmentSize = + limits.max_payload_len - 3 * kH265LengthFieldSizeBytes - 4; + Buffer nalus[] = {GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/3), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/3), + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + /*size=*/kLastFragmentSize)}; + Buffer frame = CreateFrame(nalus); + + RtpPacketizerH265 packetizer(frame, limits); + std::vector packets = FetchAllPackets(&packetizer); + + ASSERT_THAT(packets, SizeIs(2)); + // Expect 1st packet is aggregate of 1st two fragments. + EXPECT_THAT(packets[0].payload(), + ElementsAre(97, 2, // + 0, 3, nalus[0][0], nalus[0][1], nalus[0][2], // + 0, 3, nalus[1][0], nalus[1][1], nalus[1][2])); + // Expect 2nd packet is single nalu. + EXPECT_THAT(packets[1].payload(), ElementsAreArray(nalus[2])); +} + +TEST(RtpPacketizerH265Test, LastFragmentFitsInSingleButNotLastPacket) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1178; + limits.first_packet_reduction_len = 0; + limits.last_packet_reduction_len = 20; + limits.single_packet_reduction_len = 20; + // Actual sizes, which triggered this bug. + Buffer frame = CreateFrame({20, 8, 18, 1161}); + + RtpPacketizerH265 packetizer(frame, limits); + std::vector packets = FetchAllPackets(&packetizer); + + // Last packet has to be of correct size. + // Incorrect implementation might miss this constraint and not split the last + // fragment in two packets. + EXPECT_LE(static_cast(packets.back().payload_size()), + limits.max_payload_len - limits.last_packet_reduction_len); +} + +// Splits frame with payload size `frame_payload_size` without fragmentation, +// Returns sizes of the payloads excluding FU headers. +std::vector TestFu(size_t frame_payload_size, + const RtpPacketizer::PayloadSizeLimits& limits) { + Buffer nalu[] = { + GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + kH265NalHeaderSizeBytes + frame_payload_size)}; + Buffer frame = CreateFrame(nalu); + + RtpPacketizerH265 packetizer(frame, limits); + std::vector packets = FetchAllPackets(&packetizer); + + EXPECT_GE(packets.size(), 2u); // Single packet indicates it is not FU. + std::vector fu_header; + std::vector payload_sizes; + + for (const RtpPacketToSend& packet : packets) { + auto payload = packet.payload(); + EXPECT_GT(payload.size(), kFuHeaderSizeBytes); + // FU header is after the 2-bytes size PayloadHdr according to 4.4.3 in spec + fu_header.push_back(payload[2]); + payload_sizes.push_back(payload.size() - kFuHeaderSizeBytes); + } + + EXPECT_TRUE(fu_header.front() & kH265SBitMask); + EXPECT_TRUE(fu_header.back() & kH265EBitMask); + // Clear S and E bits before testing all are duplicating same original header. + fu_header.front() &= ~kH265SBitMask; + fu_header.back() &= ~kH265EBitMask; + uint8_t nalu_type = (nalu[0][0] & kH265TypeMask) >> 1; + EXPECT_THAT(fu_header, Each(Eq(nalu_type))); + + return payload_sizes; +} + +// Fragmentation tests. +TEST(RtpPacketizerH265Test, FuOddSize) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1200; + EXPECT_THAT(TestFu(1200, limits), ElementsAre(600, 600)); +} + +TEST(RtpPacketizerH265Test, FuWithFirstPacketReduction) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1200; + limits.first_packet_reduction_len = 4; + limits.single_packet_reduction_len = 4; + EXPECT_THAT(TestFu(1198, limits), ElementsAre(597, 601)); +} + +TEST(RtpPacketizerH265Test, FuWithLastPacketReduction) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1200; + limits.last_packet_reduction_len = 4; + limits.single_packet_reduction_len = 4; + EXPECT_THAT(TestFu(1198, limits), ElementsAre(601, 597)); +} + +TEST(RtpPacketizerH265Test, FuWithSinglePacketReduction) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1199; + limits.single_packet_reduction_len = 200; + EXPECT_THAT(TestFu(1000, limits), ElementsAre(500, 500)); +} + +TEST(RtpPacketizerH265Test, FuEvenSize) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1200; + EXPECT_THAT(TestFu(1201, limits), ElementsAre(600, 601)); +} + +TEST(RtpPacketizerH265Test, FuRounding) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1448; + EXPECT_THAT(TestFu(10123, limits), + ElementsAre(1265, 1265, 1265, 1265, 1265, 1266, 1266, 1266)); +} + +TEST(RtpPacketizerH265Test, FuBig) { + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1200; + // Generate 10 full sized packets, leave room for FU headers. + EXPECT_THAT( + TestFu(10 * (1200 - kFuHeaderSizeBytes), limits), + ElementsAre(1197, 1197, 1197, 1197, 1197, 1197, 1197, 1197, 1197, 1197)); +} + +struct PacketInfo { + bool first_fragment = false; + bool last_fragment = false; + bool aggregated = false; + int nalu_index = 0; + int nalu_number = 0; + int payload_size = 0; + int start_offset = 0; +}; + +struct MixedApFuTestParams { + std::vector nalus; + int expect_packetsSize = 0; + std::vector expected_packets; +}; + +class RtpPacketizerH265ParametrizedTest + : public ::testing::TestWithParam {}; + +// Fragmentation + aggregation mixed testing. +TEST_P(RtpPacketizerH265ParametrizedTest, MixedApFu) { + RtpPacketizer::PayloadSizeLimits limits; + const MixedApFuTestParams params = GetParam(); + limits.max_payload_len = 100; + std::vector nalus; + nalus.reserve(params.nalus.size()); + + // Generate nalus according to size specified in paramters + for (size_t index = 0; index < params.nalus.size(); index++) { + nalus.push_back(GenerateNalUnit({.nal_unit_type = H265::NaluType::kIdrNLp, + .nuh_layer_id = 32, + .nuh_temporal_id_plus1 = 2}, + params.nalus[index])); + } + Buffer frame = CreateFrame(nalus); + + RtpPacketizerH265 packetizer(frame, limits); + std::vector packets = FetchAllPackets(&packetizer); + + ASSERT_THAT(packets, SizeIs(params.expect_packetsSize)); + for (int i = 0; i < params.expect_packetsSize; i++) { + PacketInfo expected_packet = params.expected_packets[i]; + if (expected_packet.aggregated) { + int type = H265::ParseNaluType(packets[i].payload()[0]); + EXPECT_THAT(type, H265::NaluType::kAp); + auto payload = packets[i].payload().subview(kH265NalHeaderSizeBytes); + int offset = 0; + // Generated AP packet header and payload align + for (int j = expected_packet.nalu_index; j < expected_packet.nalu_number; + j++) { + EXPECT_THAT(payload.subview(0, kH265LengthFieldSizeBytes), + ElementsAre(0, nalus[j].size())); + EXPECT_THAT(payload.subview(offset + kH265LengthFieldSizeBytes, + nalus[j].size()), + ElementsAreArray(nalus[j])); + offset += kH265LengthFieldSizeBytes + nalus[j].size(); + } + } else { + uint8_t fu_header = 0; + fu_header |= (expected_packet.first_fragment ? kH265SBitMask : 0); + fu_header |= (expected_packet.last_fragment ? kH265EBitMask : 0); + fu_header |= H265::NaluType::kIdrNLp; + EXPECT_THAT(packets[i].payload().subview(0, kFuHeaderSizeBytes), + ElementsAre(99, 2, fu_header)); + EXPECT_THAT(packets[i].payload().subview(kFuHeaderSizeBytes), + ElementsAreArray(nalus[expected_packet.nalu_index].data() + + kH265NalHeaderSizeBytes + + expected_packet.start_offset, + expected_packet.payload_size)); + } + } +} + +INSTANTIATE_TEST_SUITE_P( + RtpPacketizerH265Test, + RtpPacketizerH265ParametrizedTest, + testing::Values( + // FU + AP + FU. + // GenerateNalUnit will include 2 bytes nalu header, for FU packet split + // calculation, this 2-byte nalu header length should be excluded. + MixedApFuTestParams{.nalus = {140, 20, 20, 160}, + .expect_packetsSize = 5, + .expected_packets = {{.first_fragment = true, + .nalu_index = 0, + .payload_size = 69, + .start_offset = 0}, + {.last_fragment = true, + .nalu_index = 0, + .payload_size = 69, + .start_offset = 69}, + {.aggregated = true, + .nalu_index = 1, + .nalu_number = 2}, + {.first_fragment = true, + .nalu_index = 3, + .payload_size = 79, + .start_offset = 0}, + {.last_fragment = true, + .nalu_index = 3, + .payload_size = 79, + .start_offset = 79}}}, + // AP + FU + AP + MixedApFuTestParams{ + .nalus = {20, 20, 160, 30, 30}, + .expect_packetsSize = 4, + .expected_packets = { + {.aggregated = true, .nalu_index = 0, .nalu_number = 2}, + {.first_fragment = true, + .nalu_index = 2, + .payload_size = 79, + .start_offset = 0}, + {.last_fragment = true, + .nalu_index = 2, + .payload_size = 79, + .start_offset = 79}, + {.aggregated = true, .nalu_index = 3, .nalu_number = 2}}})); + +} // namespace +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc index 3f9e093ff0..5bd844f3d4 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl.cc +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.cc @@ -12,22 +12,35 @@ #include -#include #include #include -#include -#include +#include #include +#include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/rtp_headers.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/video_bitrate_allocation.h" +#include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/report_block_data.h" +#include "modules/rtp_rtcp/include/rtp_rtcp.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "modules/rtp_rtcp/source/rtcp_sender.h" +#include "modules/rtp_rtcp/source/rtp_packet_history.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" -#include "modules/rtp_rtcp/source/time_util.h" +#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" #include "system_wrappers/include/ntp_time.h" #ifdef _WIN32 @@ -43,39 +56,42 @@ constexpr TimeDelta kDefaultExpectedRetransmissionTime = TimeDelta::Millis(125); } // namespace ModuleRtpRtcpImpl::RtpSenderContext::RtpSenderContext( + const Environment& env, const RtpRtcpInterface::Configuration& config) - : packet_history(config.clock, RtpPacketHistory::PaddingMode::kPriority), + : packet_history(env, RtpPacketHistory::PaddingMode::kRecentLargePacket), sequencer_(config.local_media_ssrc, config.rtx_send_ssrc, /*require_marker_before_media_padding=*/!config.audio, - config.clock), - packet_sender(config, &packet_history), + &env.clock()), + packet_sender(env, config, &packet_history), non_paced_sender(&packet_sender, &sequencer_), packet_generator( + env, config, &packet_history, config.paced_sender ? config.paced_sender : &non_paced_sender) {} -std::unique_ptr RtpRtcp::DEPRECATED_Create( - const Configuration& configuration) { - RTC_DCHECK(configuration.clock); - return std::make_unique(configuration); +std::unique_ptr RtpRtcp::Create(const Environment& env, + const Configuration& configuration) { + return std::make_unique(env, configuration); } -ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration) - : rtcp_sender_( +ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Environment& env, + const Configuration& configuration) + : env_(env), + rtcp_sender_( + env_, RTCPSender::Configuration::FromRtpRtcpConfiguration(configuration)), - rtcp_receiver_(configuration, this), - clock_(configuration.clock), - last_bitrate_process_time_(clock_->TimeInMilliseconds()), - last_rtt_process_time_(clock_->TimeInMilliseconds()), + rtcp_receiver_(env_, configuration, this), + last_bitrate_process_time_(env_.clock().TimeInMilliseconds()), + last_rtt_process_time_(env_.clock().TimeInMilliseconds()), packet_overhead_(28), // IPV4 UDP. nack_last_time_sent_full_ms_(0), nack_last_seq_number_sent_(0), rtt_stats_(configuration.rtt_stats), rtt_ms_(0) { if (!configuration.receiver_only) { - rtp_sender_ = std::make_unique(configuration); + rtp_sender_ = std::make_unique(env, configuration); // Make sure rtcp sender use same timestamp offset as rtp sender. rtcp_sender_.SetTimestampOffset( rtp_sender_->packet_generator.TimestampOffset()); @@ -92,7 +108,7 @@ ModuleRtpRtcpImpl::~ModuleRtpRtcpImpl() = default; // Process any pending tasks such as timeouts (non time critical events). void ModuleRtpRtcpImpl::Process() { - const int64_t now = clock_->TimeInMilliseconds(); + const int64_t now = env_.clock().TimeInMilliseconds(); if (rtp_sender_) { if (now >= last_bitrate_process_time_ + kRtpRtcpBitrateProcessTimeMs) { @@ -142,7 +158,7 @@ void ModuleRtpRtcpImpl::Process() { } else { // Report rtt from receiver. if (process_rtt && rtt_stats_ != nullptr) { - absl::optional rtt = rtcp_receiver_.GetAndResetXrRrRtt(); + std::optional rtt = rtcp_receiver_.GetAndResetXrRrRtt(); if (rtt.has_value()) { rtt_stats_->OnRttUpdate(rtt->ms()); } @@ -182,19 +198,19 @@ void ModuleRtpRtcpImpl::SetRtxSendPayloadType(int payload_type, associated_payload_type); } -absl::optional ModuleRtpRtcpImpl::RtxSsrc() const { - return rtp_sender_ ? rtp_sender_->packet_generator.RtxSsrc() : absl::nullopt; +std::optional ModuleRtpRtcpImpl::RtxSsrc() const { + return rtp_sender_ ? rtp_sender_->packet_generator.RtxSsrc() : std::nullopt; } -absl::optional ModuleRtpRtcpImpl::FlexfecSsrc() const { +std::optional ModuleRtpRtcpImpl::FlexfecSsrc() const { if (rtp_sender_) { return rtp_sender_->packet_generator.FlexfecSsrc(); } - return absl::nullopt; + return std::nullopt; } void ModuleRtpRtcpImpl::IncomingRtcpPacket( - rtc::ArrayView rtcp_packet) { + ArrayView rtcp_packet) { rtcp_receiver_.IncomingPacket(rtcp_packet); } @@ -203,7 +219,8 @@ void ModuleRtpRtcpImpl::RegisterSendPayloadFrequency(int payload_type, rtcp_sender_.SetRtpClockRate(payload_type, payload_frequency); } -int32_t ModuleRtpRtcpImpl::DeRegisterSendPayload(const int8_t payload_type) { +int32_t ModuleRtpRtcpImpl::DeRegisterSendPayload( + const int8_t /* payload_type */) { return 0; } @@ -282,11 +299,11 @@ RTCPSender::FeedbackState ModuleRtpRtcpImpl::GetFeedbackState() { } state.receiver = &rtcp_receiver_; - if (absl::optional last_sr = + if (std::optional last_sr = rtcp_receiver_.GetSenderReportStats(); last_sr.has_value()) { - state.remote_sr = CompactNtp(last_sr->last_remote_timestamp); - state.last_rr = last_sr->last_arrival_timestamp; + state.remote_sr = CompactNtp(last_sr->last_remote_ntp_timestamp); + state.last_rr = last_sr->last_arrival_ntp_timestamp; } state.last_xr_rtis = rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(); @@ -296,7 +313,6 @@ RTCPSender::FeedbackState ModuleRtpRtcpImpl::GetFeedbackState() { int32_t ModuleRtpRtcpImpl::SetSendingStatus(const bool sending) { if (rtcp_sender_.Sending() != sending) { - // Sends RTCP BYE when going from true to false rtcp_sender_.SetSendingStatus(GetFeedbackState(), sending); } return 0; @@ -334,11 +350,11 @@ bool ModuleRtpRtcpImpl::OnSendingRtpFrame(uint32_t timestamp, // TODO(bugs.webrtc.org/12873): Migrate this method and it's users to use // optional Timestamps. - absl::optional capture_time; + std::optional capture_time; if (capture_time_ms > 0) { capture_time = Timestamp::Millis(capture_time_ms); } - absl::optional payload_type_optional; + std::optional payload_type_optional; if (payload_type >= 0) payload_type_optional = payload_type; rtcp_sender_.SetLastRtpTime(timestamp, capture_time, payload_type_optional); @@ -387,13 +403,13 @@ ModuleRtpRtcpImpl::FetchFecPackets() { } void ModuleRtpRtcpImpl::OnAbortedRetransmissions( - rtc::ArrayView sequence_numbers) { + ArrayView /* sequence_numbers */) { RTC_DCHECK_NOTREACHED() << "Stream flushing not supported with legacy rtp modules."; } void ModuleRtpRtcpImpl::OnPacketsAcknowledged( - rtc::ArrayView sequence_numbers) { + ArrayView sequence_numbers) { RTC_DCHECK(rtp_sender_); rtp_sender_->packet_history.CullAcknowledgedPackets(sequence_numbers); } @@ -419,7 +435,7 @@ ModuleRtpRtcpImpl::GeneratePadding(size_t target_size_bytes) { std::vector ModuleRtpRtcpImpl::GetSentRtpPacketInfos( - rtc::ArrayView sequence_numbers) const { + ArrayView sequence_numbers) const { RTC_DCHECK(rtp_sender_); return rtp_sender_->packet_sender.GetSentRtpPacketInfos(sequence_numbers); } @@ -463,8 +479,8 @@ int32_t ModuleRtpRtcpImpl::SetCNAME(absl::string_view c_name) { return rtcp_sender_.SetCNAME(c_name); } -absl::optional ModuleRtpRtcpImpl::LastRtt() const { - absl::optional rtt = rtcp_receiver_.LastRtt(); +std::optional ModuleRtpRtcpImpl::LastRtt() const { + std::optional rtt = rtcp_receiver_.LastRtt(); if (!rtt.has_value()) { MutexLock lock(&mutex_rtt_); if (rtt_ms_ > 0) { @@ -481,7 +497,7 @@ TimeDelta ModuleRtpRtcpImpl::ExpectedRetransmissionTime() const { } // No rtt available (`kRtpRtcpRttProcessTimeMs` not yet passed?), so try to // poll avg_rtt_ms directly from rtcp receiver. - if (absl::optional rtt = rtcp_receiver_.AverageRtt()) { + if (std::optional rtt = rtcp_receiver_.AverageRtt()) { return *rtt; } return kDefaultExpectedRetransmissionTime; @@ -505,15 +521,15 @@ std::vector ModuleRtpRtcpImpl::GetLatestReportBlockData() return rtcp_receiver_.GetLatestReportBlockData(); } -absl::optional +std::optional ModuleRtpRtcpImpl::GetSenderReportStats() const { return rtcp_receiver_.GetSenderReportStats(); } -absl::optional +std::optional ModuleRtpRtcpImpl::GetNonSenderRttStats() const { // This is not implemented for this legacy class. - return absl::nullopt; + return std::nullopt; } // (REMB) Receiver Estimated Max Bitrate. @@ -551,7 +567,7 @@ int32_t ModuleRtpRtcpImpl::SendNACK(const uint16_t* nack_list, const uint16_t size) { uint16_t nack_length = size; uint16_t start_id = 0; - int64_t now_ms = clock_->TimeInMilliseconds(); + int64_t now_ms = env_.clock().TimeInMilliseconds(); if (TimeToSendFullNackList(now_ms)) { nack_last_time_sent_full_ms_ = now_ms; } else { @@ -591,7 +607,7 @@ bool ModuleRtpRtcpImpl::TimeToSendFullNackList(int64_t now) const { // Use RTT from RtcpRttStats class if provided. int64_t rtt = rtt_ms(); if (rtt == 0) { - if (absl::optional average_rtt = rtcp_receiver_.AverageRtt()) { + if (std::optional average_rtt = rtcp_receiver_.AverageRtt()) { rtt = average_rtt->ms(); } } @@ -664,7 +680,7 @@ void ModuleRtpRtcpImpl::OnReceivedNack( // Use RTT from RtcpRttStats class if provided. int64_t rtt = rtt_ms(); if (rtt == 0) { - if (absl::optional average_rtt = rtcp_receiver_.AverageRtt()) { + if (std::optional average_rtt = rtcp_receiver_.AverageRtt()) { rtt = average_rtt->ms(); } } @@ -672,10 +688,10 @@ void ModuleRtpRtcpImpl::OnReceivedNack( } void ModuleRtpRtcpImpl::OnReceivedRtcpReportBlocks( - rtc::ArrayView report_blocks) { + ArrayView report_blocks) { if (rtp_sender_) { uint32_t ssrc = SSRC(); - absl::optional rtx_ssrc; + std::optional rtx_ssrc; if (rtp_sender_->packet_generator.RtxStatus() != kRtxOff) { rtx_ssrc = rtp_sender_->packet_generator.RtxSsrc(); } diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/modules/rtp_rtcp/source/rtp_rtcp_impl.h index d298081432..32aba69a9a 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl.h +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl.h @@ -15,31 +15,38 @@ #include #include -#include -#include +#include #include +#include "absl/base/attributes.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/environment/environment.h" #include "api/rtp_headers.h" +#include "api/units/time_delta.h" #include "api/video/video_bitrate_allocation.h" #include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" // RTCPPacketType #include "modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.h" #include "modules/rtp_rtcp/source/packet_sequencer.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "modules/rtp_rtcp/source/rtcp_receiver.h" #include "modules/rtp_rtcp/source/rtcp_sender.h" #include "modules/rtp_rtcp/source/rtp_packet_history.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_sender.h" +#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" +#include "rtc_base/checks.h" #include "rtc_base/gtest_prod_util.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { -class Clock; struct PacedPacketInfo; struct RTPVideoHeader; @@ -50,8 +57,8 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl public RTCPReceiver::ModuleRtpRtcp { #pragma clang diagnostic pop public: - explicit ModuleRtpRtcpImpl( - const RtpRtcpInterface::Configuration& configuration); + ModuleRtpRtcpImpl(const Environment& env, + const RtpRtcpInterface::Configuration& configuration); ~ModuleRtpRtcpImpl() override; // Process any pending tasks such as timeouts. @@ -60,7 +67,7 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl // Receiver part. // Called when we receive an RTCP packet. - void IncomingRtcpPacket(rtc::ArrayView packet) override; + void IncomingRtcpPacket(ArrayView packet) override; void SetRemoteSSRC(uint32_t ssrc) override; void SetLocalSsrc(uint32_t ssrc) override; @@ -96,7 +103,7 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl RtpState GetRtpState() const override; RtpState GetRtxState() const override; - void SetNonSenderRttMeasurement(bool enabled) override {} + void SetNonSenderRttMeasurement(bool /* enabled */) override {} uint32_t SSRC() const override { return rtcp_sender_.SSRC(); } @@ -106,12 +113,12 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl void SetRtxSendStatus(int mode) override; int RtxSendStatus() const override; - absl::optional RtxSsrc() const override; + std::optional RtxSsrc() const override; void SetRtxSendPayloadType(int payload_type, int associated_payload_type) override; - absl::optional FlexfecSsrc() const override; + std::optional FlexfecSsrc() const override; // Sends kRtcpByeCode when going from true to false. int32_t SetSendingStatus(bool sending) override; @@ -135,6 +142,20 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl bool TrySendPacket(std::unique_ptr packet, const PacedPacketInfo& pacing_info) override; + bool CanSendPacket(const RtpPacketToSend& /* packet */) const override { + RTC_DCHECK_NOTREACHED() << "Not implemented"; + return false; + } + + void AssignSequenceNumber(RtpPacketToSend& /* packet */) override { + RTC_DCHECK_NOTREACHED() << "Not implemented"; + } + + void SendPacket(std::unique_ptr /* packet */, + const PacedPacketInfo& /* pacing_info */) override { + RTC_DCHECK_NOTREACHED() << "Not implemented"; + } + void OnBatchComplete() override {} void SetFecProtectionParams(const FecProtectionParams& delta_params, @@ -143,16 +164,16 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl std::vector> FetchFecPackets() override; void OnAbortedRetransmissions( - rtc::ArrayView sequence_numbers) override; + ArrayView sequence_numbers) override; void OnPacketsAcknowledged( - rtc::ArrayView sequence_numbers) override; + ArrayView sequence_numbers) override; std::vector> GeneratePadding( size_t target_size_bytes) override; std::vector GetSentRtpPacketInfos( - rtc::ArrayView sequence_numbers) const override; + ArrayView sequence_numbers) const override; size_t ExpectedPerPacketOverhead() const override; @@ -170,7 +191,7 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl int32_t SetCNAME(absl::string_view c_name) override; // Get RoundTripTime. - absl::optional LastRtt() const override; + std::optional LastRtt() const override; TimeDelta ExpectedRetransmissionTime() const override; @@ -187,10 +208,10 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl // Within this list, the `ReportBlockData::source_ssrc()`, which is the SSRC // of the corresponding outbound RTP stream, is unique. std::vector GetLatestReportBlockData() const override; - absl::optional GetSenderReportStats() const override; + std::optional GetSenderReportStats() const override; // Round trip time statistics computed from the XR block contained in the last // report. - absl::optional GetNonSenderRttStats() const override; + std::optional GetNonSenderRttStats() const override; // (REMB) Receiver Estimated Max Bitrate. void SetRemb(int64_t bitrate_bps, std::vector ssrcs) override; @@ -228,7 +249,7 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl void OnReceivedNack( const std::vector& nack_sequence_numbers) override; void OnReceivedRtcpReportBlocks( - rtc::ArrayView report_blocks) override; + ArrayView report_blocks) override; void OnRequestSendReport() override; void SetVideoBitrateAllocation( @@ -257,14 +278,13 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl rtp_sender_->packet_sender.SetMediaHasBeenSent(media_has_been_sent); } - Clock* clock() const { return clock_; } - private: FRIEND_TEST_ALL_PREFIXES(RtpRtcpImplTest, Rtt); FRIEND_TEST_ALL_PREFIXES(RtpRtcpImplTest, RttForReceiverOnly); struct RtpSenderContext { - explicit RtpSenderContext(const RtpRtcpInterface::Configuration& config); + RtpSenderContext(const Environment& env, + const RtpRtcpInterface::Configuration& config); // Storage of packets, for retransmissions and padding, if applicable. RtpPacketHistory packet_history; // Handles sequence number assignment and padding timestamp generation. @@ -290,13 +310,12 @@ class ABSL_DEPRECATED("") ModuleRtpRtcpImpl // Returns current Receiver Reference Time Report (RTTR) status. bool RtcpXrRrtrStatus() const; + const Environment env_; std::unique_ptr rtp_sender_; RTCPSender rtcp_sender_; RTCPReceiver rtcp_receiver_; - Clock* const clock_; - int64_t last_bitrate_process_time_; int64_t last_rtt_process_time_; uint16_t packet_overhead_; diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc index 800ec77d3e..f71ca3c1df 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.cc @@ -12,25 +12,41 @@ #include -#include #include +#include #include -#include -#include +#include #include +#include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/rtp_headers.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "modules/rtp_rtcp/source/rtcp_packet/dlrr.h" +#include "api/video/video_bitrate_allocation.h" +#include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/report_block_data.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/ntp_time_util.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" +#include "modules/rtp_rtcp/source/rtcp_receiver.h" +#include "modules/rtp_rtcp/source/rtcp_sender.h" #include "modules/rtp_rtcp/source/rtp_packet_history.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" -#include "modules/rtp_rtcp/source/time_util.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "modules/rtp_rtcp/source/rtp_sender.h" +#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/time_utils.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_utils/repeating_task.h" #include "system_wrappers/include/ntp_time.h" #ifdef _WIN32 @@ -51,41 +67,37 @@ RTCPSender::Configuration AddRtcpSendEvaluationCallback( return config; } -RtpPacketHistory::PaddingMode GetPaddingMode( - const FieldTrialsView* field_trials) { - if (field_trials && - field_trials->IsEnabled("WebRTC-PaddingMode-RecentLargePacket")) { - return RtpPacketHistory::PaddingMode::kRecentLargePacket; - } - return RtpPacketHistory::PaddingMode::kPriority; -} - } // namespace ModuleRtpRtcpImpl2::RtpSenderContext::RtpSenderContext( + const Environment& env, TaskQueueBase& worker_queue, const RtpRtcpInterface::Configuration& config) - : packet_history(config.clock, GetPaddingMode(config.field_trials)), + : packet_history(env, RtpPacketHistory::PaddingMode::kRecentLargePacket), sequencer(config.local_media_ssrc, config.rtx_send_ssrc, /*require_marker_before_media_padding=*/!config.audio, - config.clock), - packet_sender(config, &packet_history), + &env.clock()), + packet_sender(env, config, &packet_history), non_paced_sender(worker_queue, &packet_sender, &sequencer), packet_generator( + env, config, &packet_history, config.paced_sender ? config.paced_sender : &non_paced_sender) {} -ModuleRtpRtcpImpl2::ModuleRtpRtcpImpl2(const Configuration& configuration) - : worker_queue_(TaskQueueBase::Current()), - rtcp_sender_(AddRtcpSendEvaluationCallback( - RTCPSender::Configuration::FromRtpRtcpConfiguration(configuration), - [this](TimeDelta duration) { - ScheduleRtcpSendEvaluation(duration); - })), - rtcp_receiver_(configuration, this), - clock_(configuration.clock), +ModuleRtpRtcpImpl2::ModuleRtpRtcpImpl2(const Environment& env, + const Configuration& configuration) + : env_(env), + worker_queue_(TaskQueueBase::Current()), + rtcp_sender_(env_, + AddRtcpSendEvaluationCallback( + RTCPSender::Configuration::FromRtpRtcpConfiguration( + configuration), + [this](TimeDelta duration) { + ScheduleRtcpSendEvaluation(duration); + })), + rtcp_receiver_(env_, configuration, this), packet_overhead_(28), // IPV4 UDP. nack_last_time_sent_full_ms_(0), nack_last_seq_number_sent_(0), @@ -95,7 +107,7 @@ ModuleRtpRtcpImpl2::ModuleRtpRtcpImpl2(const Configuration& configuration) rtcp_thread_checker_.Detach(); if (!configuration.receiver_only) { rtp_sender_ = - std::make_unique(*worker_queue_, configuration); + std::make_unique(env_, *worker_queue_, configuration); rtp_sender_->sequencing_checker.Detach(); // Make sure rtcp sender use same timestamp offset as rtp sender. rtcp_sender_.SetTimestampOffset( @@ -121,14 +133,6 @@ ModuleRtpRtcpImpl2::~ModuleRtpRtcpImpl2() { rtt_update_task_.Stop(); } -// static -std::unique_ptr ModuleRtpRtcpImpl2::Create( - const Configuration& configuration) { - RTC_DCHECK(configuration.clock); - RTC_DCHECK(TaskQueueBase::Current()); - return std::make_unique(configuration); -} - void ModuleRtpRtcpImpl2::SetRtxSendStatus(int mode) { rtp_sender_->packet_generator.SetRtxStatus(mode); } @@ -143,19 +147,19 @@ void ModuleRtpRtcpImpl2::SetRtxSendPayloadType(int payload_type, associated_payload_type); } -absl::optional ModuleRtpRtcpImpl2::RtxSsrc() const { - return rtp_sender_ ? rtp_sender_->packet_generator.RtxSsrc() : absl::nullopt; +std::optional ModuleRtpRtcpImpl2::RtxSsrc() const { + return rtp_sender_ ? rtp_sender_->packet_generator.RtxSsrc() : std::nullopt; } -absl::optional ModuleRtpRtcpImpl2::FlexfecSsrc() const { +std::optional ModuleRtpRtcpImpl2::FlexfecSsrc() const { if (rtp_sender_) { return rtp_sender_->packet_generator.FlexfecSsrc(); } - return absl::nullopt; + return std::nullopt; } void ModuleRtpRtcpImpl2::IncomingRtcpPacket( - rtc::ArrayView rtcp_packet) { + ArrayView rtcp_packet) { RTC_DCHECK_RUN_ON(&rtcp_thread_checker_); rtcp_receiver_.IncomingPacket(rtcp_packet); } @@ -165,7 +169,8 @@ void ModuleRtpRtcpImpl2::RegisterSendPayloadFrequency(int payload_type, rtcp_sender_.SetRtpClockRate(payload_type, payload_frequency); } -int32_t ModuleRtpRtcpImpl2::DeRegisterSendPayload(const int8_t payload_type) { +int32_t ModuleRtpRtcpImpl2::DeRegisterSendPayload( + const int8_t /* payload_type */) { return 0; } @@ -260,15 +265,16 @@ RTCPSender::FeedbackState ModuleRtpRtcpImpl2::GetFeedbackState() { state.media_bytes_sent = rtp_stats.transmitted.payload_bytes + rtx_stats.transmitted.payload_bytes; state.send_bitrate = - rtp_sender_->packet_sender.GetSendRates(clock_->CurrentTime()).Sum(); + rtp_sender_->packet_sender.GetSendRates(env_.clock().CurrentTime()) + .Sum(); } state.receiver = &rtcp_receiver_; - if (absl::optional last_sr = + if (std::optional last_sr = rtcp_receiver_.GetSenderReportStats(); last_sr.has_value()) { - state.remote_sr = CompactNtp(last_sr->last_remote_timestamp); - state.last_rr = last_sr->last_arrival_timestamp; + state.remote_sr = CompactNtp(last_sr->last_remote_ntp_timestamp); + state.last_rr = last_sr->last_arrival_ntp_timestamp; } state.last_xr_rtis = rtcp_receiver_.ConsumeReceivedXrReferenceTimeInfo(); @@ -316,11 +322,11 @@ bool ModuleRtpRtcpImpl2::OnSendingRtpFrame(uint32_t timestamp, } // TODO(bugs.webrtc.org/12873): Migrate this method and it's users to use // optional Timestamps. - absl::optional capture_time; + std::optional capture_time; if (capture_time_ms > 0) { capture_time = Timestamp::Millis(capture_time_ms); } - absl::optional payload_type_optional; + std::optional payload_type_optional; if (payload_type >= 0) payload_type_optional = payload_type; @@ -340,27 +346,45 @@ bool ModuleRtpRtcpImpl2::OnSendingRtpFrame(uint32_t timestamp, return true; } -bool ModuleRtpRtcpImpl2::TrySendPacket(std::unique_ptr packet, - const PacedPacketInfo& pacing_info) { +bool ModuleRtpRtcpImpl2::CanSendPacket(const RtpPacketToSend& packet) const { RTC_DCHECK(rtp_sender_); RTC_DCHECK_RUN_ON(&rtp_sender_->sequencing_checker); if (!rtp_sender_->packet_generator.SendingMedia()) { return false; } - if (packet->packet_type() == RtpPacketMediaType::kPadding && - packet->Ssrc() == rtp_sender_->packet_generator.SSRC() && + if (packet.packet_type() == RtpPacketMediaType::kPadding && + packet.Ssrc() == rtp_sender_->packet_generator.SSRC() && !rtp_sender_->sequencer.CanSendPaddingOnMediaSsrc()) { // New media packet preempted this generated padding packet, discard it. return false; } + return true; +} + +void ModuleRtpRtcpImpl2::AssignSequenceNumber(RtpPacketToSend& packet) { + RTC_DCHECK_RUN_ON(&rtp_sender_->sequencing_checker); bool is_flexfec = - packet->packet_type() == RtpPacketMediaType::kForwardErrorCorrection && - packet->Ssrc() == rtp_sender_->packet_generator.FlexfecSsrc(); + packet.packet_type() == RtpPacketMediaType::kForwardErrorCorrection && + packet.Ssrc() == rtp_sender_->packet_generator.FlexfecSsrc(); if (!is_flexfec) { - rtp_sender_->sequencer.Sequence(*packet); + rtp_sender_->sequencer.Sequence(packet); } +} +void ModuleRtpRtcpImpl2::SendPacket(std::unique_ptr packet, + const PacedPacketInfo& pacing_info) { + RTC_DCHECK_RUN_ON(&rtp_sender_->sequencing_checker); + RTC_DCHECK(CanSendPacket(*packet)); rtp_sender_->packet_sender.SendPacket(std::move(packet), pacing_info); +} + +bool ModuleRtpRtcpImpl2::TrySendPacket(std::unique_ptr packet, + const PacedPacketInfo& pacing_info) { + if (!packet || !CanSendPacket(*packet)) { + return false; + } + AssignSequenceNumber(*packet); + SendPacket(std::move(packet), pacing_info); return true; } @@ -385,14 +409,14 @@ ModuleRtpRtcpImpl2::FetchFecPackets() { } void ModuleRtpRtcpImpl2::OnAbortedRetransmissions( - rtc::ArrayView sequence_numbers) { + ArrayView sequence_numbers) { RTC_DCHECK(rtp_sender_); RTC_DCHECK_RUN_ON(&rtp_sender_->sequencing_checker); rtp_sender_->packet_sender.OnAbortedRetransmissions(sequence_numbers); } void ModuleRtpRtcpImpl2::OnPacketsAcknowledged( - rtc::ArrayView sequence_numbers) { + ArrayView sequence_numbers) { RTC_DCHECK(rtp_sender_); rtp_sender_->packet_history.CullAcknowledgedPackets(sequence_numbers); } @@ -419,7 +443,7 @@ ModuleRtpRtcpImpl2::GeneratePadding(size_t target_size_bytes) { std::vector ModuleRtpRtcpImpl2::GetSentRtpPacketInfos( - rtc::ArrayView sequence_numbers) const { + ArrayView sequence_numbers) const { RTC_DCHECK(rtp_sender_); return rtp_sender_->packet_sender.GetSentRtpPacketInfos(sequence_numbers); } @@ -466,8 +490,8 @@ int32_t ModuleRtpRtcpImpl2::SetCNAME(absl::string_view c_name) { return rtcp_sender_.SetCNAME(c_name); } -absl::optional ModuleRtpRtcpImpl2::LastRtt() const { - absl::optional rtt = rtcp_receiver_.LastRtt(); +std::optional ModuleRtpRtcpImpl2::LastRtt() const { + std::optional rtt = rtcp_receiver_.LastRtt(); if (!rtt.has_value()) { MutexLock lock(&mutex_rtt_); if (rtt_ms_ > 0) { @@ -484,7 +508,7 @@ TimeDelta ModuleRtpRtcpImpl2::ExpectedRetransmissionTime() const { } // No rtt available (`kRttUpdateInterval` not yet passed?), so try to // poll avg_rtt_ms directly from rtcp receiver. - if (absl::optional rtt = rtcp_receiver_.AverageRtt()) { + if (std::optional rtt = rtcp_receiver_.AverageRtt()) { return *rtt; } return kDefaultExpectedRetransmissionTime; @@ -508,12 +532,12 @@ std::vector ModuleRtpRtcpImpl2::GetLatestReportBlockData() return rtcp_receiver_.GetLatestReportBlockData(); } -absl::optional +std::optional ModuleRtpRtcpImpl2::GetSenderReportStats() const { return rtcp_receiver_.GetSenderReportStats(); } -absl::optional +std::optional ModuleRtpRtcpImpl2::GetNonSenderRttStats() const { RTCPReceiver::NonSenderRttStats non_sender_rtt_stats = rtcp_receiver_.GetNonSenderRTT(); @@ -559,7 +583,7 @@ int32_t ModuleRtpRtcpImpl2::SendNACK(const uint16_t* nack_list, const uint16_t size) { uint16_t nack_length = size; uint16_t start_id = 0; - int64_t now_ms = clock_->TimeInMilliseconds(); + int64_t now_ms = env_.clock().TimeInMilliseconds(); if (TimeToSendFullNackList(now_ms)) { nack_last_time_sent_full_ms_ = now_ms; } else { @@ -599,7 +623,7 @@ bool ModuleRtpRtcpImpl2::TimeToSendFullNackList(int64_t now) const { // Use RTT from RtcpRttStats class if provided. int64_t rtt = rtt_ms(); if (rtt == 0) { - if (absl::optional average_rtt = rtcp_receiver_.AverageRtt()) { + if (std::optional average_rtt = rtcp_receiver_.AverageRtt()) { rtt = average_rtt->ms(); } } @@ -656,7 +680,7 @@ void ModuleRtpRtcpImpl2::SetLocalSsrc(uint32_t local_ssrc) { RtpSendRates ModuleRtpRtcpImpl2::GetSendRates() const { RTC_DCHECK_RUN_ON(&rtp_sender_->sequencing_checker); - return rtp_sender_->packet_sender.GetSendRates(clock_->CurrentTime()); + return rtp_sender_->packet_sender.GetSendRates(env_.clock().CurrentTime()); } void ModuleRtpRtcpImpl2::OnRequestSendReport() { @@ -674,7 +698,7 @@ void ModuleRtpRtcpImpl2::OnReceivedNack( // Use RTT from RtcpRttStats class if provided. int64_t rtt = rtt_ms(); if (rtt == 0) { - if (absl::optional average_rtt = rtcp_receiver_.AverageRtt()) { + if (std::optional average_rtt = rtcp_receiver_.AverageRtt()) { rtt = average_rtt->ms(); } } @@ -682,10 +706,10 @@ void ModuleRtpRtcpImpl2::OnReceivedNack( } void ModuleRtpRtcpImpl2::OnReceivedRtcpReportBlocks( - rtc::ArrayView report_blocks) { + ArrayView report_blocks) { if (rtp_sender_) { uint32_t ssrc = SSRC(); - absl::optional rtx_ssrc; + std::optional rtx_ssrc; if (rtp_sender_->packet_generator.RtxStatus() != kRtxOff) { rtx_ssrc = rtp_sender_->packet_generator.RtxSsrc(); } @@ -734,8 +758,8 @@ const RTPSender* ModuleRtpRtcpImpl2::RtpSender() const { void ModuleRtpRtcpImpl2::PeriodicUpdate() { RTC_DCHECK_RUN_ON(worker_queue_); - Timestamp check_since = clock_->CurrentTime() - kRttUpdateInterval; - absl::optional rtt = + Timestamp check_since = env_.clock().CurrentTime() - kRttUpdateInterval; + std::optional rtt = rtcp_receiver_.OnPeriodicRttUpdate(check_since, rtcp_sender_.Sending()); if (rtt) { if (rtt_stats_) { @@ -756,7 +780,7 @@ void ModuleRtpRtcpImpl2::MaybeSendRtcp() { void ModuleRtpRtcpImpl2::MaybeSendRtcpAtOrAfterTimestamp( Timestamp execution_time) { RTC_DCHECK_RUN_ON(worker_queue_); - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); if (now >= execution_time) { MaybeSendRtcp(); return; @@ -784,7 +808,7 @@ void ModuleRtpRtcpImpl2::ScheduleRtcpSendEvaluation(TimeDelta duration) { MaybeSendRtcp(); })); } else { - Timestamp execution_time = clock_->CurrentTime() + duration; + Timestamp execution_time = env_.clock().CurrentTime() + duration; ScheduleMaybeSendRtcpAtOrAfterTimestamp(execution_time, duration); } } diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h index 36e6deffba..d603c454f5 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl2.h +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2.h @@ -15,28 +15,33 @@ #include #include -#include -#include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/environment/environment.h" #include "api/rtp_headers.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/video_bitrate_allocation.h" #include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" // RTCPPacketType #include "modules/rtp_rtcp/source/packet_sequencer.h" +#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "modules/rtp_rtcp/source/rtcp_receiver.h" #include "modules/rtp_rtcp/source/rtcp_sender.h" #include "modules/rtp_rtcp/source/rtp_packet_history.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_sender.h" #include "modules/rtp_rtcp/source/rtp_sender_egress.h" +#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include "rtc_base/gtest_prod_util.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" @@ -45,29 +50,20 @@ namespace webrtc { -class Clock; struct PacedPacketInfo; struct RTPVideoHeader; class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, public RTCPReceiver::ModuleRtpRtcp { public: - explicit ModuleRtpRtcpImpl2( - const RtpRtcpInterface::Configuration& configuration); + ModuleRtpRtcpImpl2(const Environment& env, + const RtpRtcpInterface::Configuration& configuration); ~ModuleRtpRtcpImpl2() override; - // This method is provided to easy with migrating away from the - // RtpRtcp::Create factory method. Since this is an internal implementation - // detail though, creating an instance of ModuleRtpRtcpImpl2 directly should - // be fine. - static std::unique_ptr Create( - const Configuration& configuration); - // Receiver part. // Called when we receive an RTCP packet. - void IncomingRtcpPacket( - rtc::ArrayView incoming_packet) override; + void IncomingRtcpPacket(ArrayView incoming_packet) override; void SetRemoteSSRC(uint32_t ssrc) override; @@ -118,12 +114,12 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, void SetRtxSendStatus(int mode) override; int RtxSendStatus() const override; - absl::optional RtxSsrc() const override; + std::optional RtxSsrc() const override; void SetRtxSendPayloadType(int payload_type, int associated_payload_type) override; - absl::optional FlexfecSsrc() const override; + std::optional FlexfecSsrc() const override; // Sends kRtcpByeCode when going from true to false. int32_t SetSendingStatus(bool sending) override; @@ -144,6 +140,13 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, int payload_type, bool force_sender_report) override; + bool CanSendPacket(const RtpPacketToSend& packet) const override; + + void AssignSequenceNumber(RtpPacketToSend& packet) override; + + void SendPacket(std::unique_ptr packet, + const PacedPacketInfo& pacing_info) override; + bool TrySendPacket(std::unique_ptr packet, const PacedPacketInfo& pacing_info) override; void OnBatchComplete() override; @@ -154,16 +157,16 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, std::vector> FetchFecPackets() override; void OnAbortedRetransmissions( - rtc::ArrayView sequence_numbers) override; + ArrayView sequence_numbers) override; void OnPacketsAcknowledged( - rtc::ArrayView sequence_numbers) override; + ArrayView sequence_numbers) override; std::vector> GeneratePadding( size_t target_size_bytes) override; std::vector GetSentRtpPacketInfos( - rtc::ArrayView sequence_numbers) const override; + ArrayView sequence_numbers) const override; size_t ExpectedPerPacketOverhead() const override; @@ -181,7 +184,7 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, int32_t SetCNAME(absl::string_view c_name) override; // Get RoundTripTime. - absl::optional LastRtt() const override; + std::optional LastRtt() const override; TimeDelta ExpectedRetransmissionTime() const override; @@ -199,8 +202,8 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, // Within this list, the `ReportBlockData::source_ssrc()`, which is the SSRC // of the corresponding outbound RTP stream, is unique. std::vector GetLatestReportBlockData() const override; - absl::optional GetSenderReportStats() const override; - absl::optional GetNonSenderRttStats() const override; + std::optional GetSenderReportStats() const override; + std::optional GetNonSenderRttStats() const override; // (REMB) Receiver Estimated Max Bitrate. void SetRemb(int64_t bitrate_bps, std::vector ssrcs) override; @@ -238,7 +241,7 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, void OnReceivedNack( const std::vector& nack_sequence_numbers) override; void OnReceivedRtcpReportBlocks( - rtc::ArrayView report_blocks) override; + ArrayView report_blocks) override; void OnRequestSendReport() override; void SetVideoBitrateAllocation( @@ -252,7 +255,8 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, FRIEND_TEST_ALL_PREFIXES(RtpRtcpImpl2Test, RttForReceiverOnly); struct RtpSenderContext { - explicit RtpSenderContext(TaskQueueBase& worker_queue, + explicit RtpSenderContext(const Environment& env, + TaskQueueBase& worker_queue, const RtpRtcpInterface::Configuration& config); // Storage of packets, for retransmissions and padding, if applicable. RtpPacketHistory packet_history; @@ -297,6 +301,7 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, void ScheduleMaybeSendRtcpAtOrAfterTimestamp(Timestamp execution_time, TimeDelta duration); + const Environment env_; TaskQueueBase* const worker_queue_; RTC_NO_UNIQUE_ADDRESS SequenceChecker rtcp_thread_checker_; @@ -304,8 +309,6 @@ class ModuleRtpRtcpImpl2 final : public RtpRtcpInterface, RTCPSender rtcp_sender_; RTCPReceiver rtcp_receiver_; - Clock* const clock_; - uint16_t packet_overhead_; // Send side diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc index 0821f6deb0..40bfcd3b4b 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl2_unittest.cc @@ -10,29 +10,54 @@ #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include +#include #include #include #include -#include +#include #include - -#include "absl/types/optional.h" -#include "api/field_trials_registry.h" +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/call/transport.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" +#include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/flexfec_sender.h" +#include "modules/rtp_rtcp/include/receive_statistics.h" +#include "modules/rtp_rtcp/include/report_block_data.h" +#include "modules/rtp_rtcp/include/rtcp_statistics.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/nack.h" +#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" +#include "modules/rtp_rtcp/source/rtp_header_extension_size.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_sender_video.h" -#include "rtc_base/logging.h" +#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" +#include "modules/rtp_rtcp/source/ulpfec_generator.h" +#include "modules/rtp_rtcp/source/video_fec_generator.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "rtc_base/buffer.h" #include "rtc_base/rate_limiter.h" -#include "rtc_base/strings/string_builder.h" -#include "test/explicit_key_value_config.h" +#include "system_wrappers/include/ntp_time.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" -#include "test/run_loop.h" #include "test/time_controller/simulated_time_controller.h" using ::testing::AllOf; @@ -44,8 +69,6 @@ using ::testing::Not; using ::testing::Optional; using ::testing::SizeIs; -using webrtc::test::ExplicitKeyValueConfig; - namespace webrtc { namespace { constexpr uint32_t kSenderSsrc = 0x12345; @@ -102,13 +125,13 @@ class SendTransport : public Transport, void SetRtpRtcpModule(ModuleRtpRtcpImpl2* receiver) { receiver_ = receiver; } void SimulateNetworkDelay(TimeDelta delay) { delay_ = delay; } - bool SendRtp(rtc::ArrayView data, - const PacketOptions& options) override { + bool SendRtp(ArrayView data, + const PacketOptions& /* options */) override { EXPECT_TRUE(last_packet_.Parse(data)); ++rtp_packets_sent_; return true; } - bool SendRtcp(rtc::ArrayView data) override { + bool SendRtcp(ArrayView data) override { test::RtcpPacketParser parser; parser.Parse(data); last_nack_list_ = parser.nack()->packet_ids(); @@ -158,7 +181,8 @@ class SendTransport : public Transport, }; class RtpRtcpModule : public RtcpPacketTypeCounterObserver, - public SendPacketObserver { + public SendPacketObserver, + public StreamDataCountersCallback { public: struct SentPacket { SentPacket(uint16_t packet_id, Timestamp capture_time, uint32_t ssrc) @@ -168,21 +192,18 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver, uint32_t ssrc; }; - RtpRtcpModule(GlobalSimulatedTimeController* time_controller, - bool is_sender, - const FieldTrialsRegistry& trials) - : time_controller_(time_controller), + RtpRtcpModule(const Environment& env, + GlobalSimulatedTimeController* time_controller, + bool is_sender) + : env_(env), is_sender_(is_sender), - trials_(trials), - receive_statistics_( - ReceiveStatistics::Create(time_controller->GetClock())), + receive_statistics_(ReceiveStatistics::Create(&env.clock())), transport_(kOneWayNetworkDelay, time_controller) { CreateModuleImpl(); } - TimeController* const time_controller_; + const Environment env_; const bool is_sender_; - const FieldTrialsRegistry& trials_; RtcpPacketTypeCounter packets_sent_; RtcpPacketTypeCounter packets_received_; std::unique_ptr receive_statistics_; @@ -196,13 +217,24 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver, counter_map_[ssrc] = packet_counter; } - void OnSendPacket(uint16_t packet_id, + void OnSendPacket(std::optional packet_id, Timestamp capture_time, uint32_t ssrc) override { - last_sent_packet_.emplace(packet_id, capture_time, ssrc); + if (packet_id.has_value()) { + last_sent_packet_.emplace(*packet_id, capture_time, ssrc); + } } - absl::optional last_sent_packet() const { + StreamDataCounters GetDataCounters(uint32_t ssrc) const override { + auto it = counters_by_ssrc.find(ssrc); + return it != counters_by_ssrc.end() ? it->second : StreamDataCounters(); + } + void DataCountersUpdated(const StreamDataCounters& counters, + uint32_t ssrc) override { + counters_by_ssrc[ssrc] = counters; + } + + std::optional last_sent_packet() const { return last_sent_packet_; } @@ -238,7 +270,6 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver, void CreateModuleImpl() { RtpRtcpInterface::Configuration config; config.audio = false; - config.clock = time_controller_->GetClock(); config.outgoing_transport = &transport_; config.receive_statistics = receive_statistics_.get(); config.rtcp_packet_type_counter_observer = this; @@ -246,20 +277,21 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver, config.rtcp_report_interval_ms = rtcp_report_interval_.ms(); config.local_media_ssrc = is_sender_ ? kSenderSsrc : kReceiverSsrc; config.rtx_send_ssrc = - is_sender_ ? absl::make_optional(kRtxSenderSsrc) : absl::nullopt; + is_sender_ ? std::make_optional(kRtxSenderSsrc) : std::nullopt; config.need_rtp_packet_infos = true; config.non_sender_rtt_measurement = true; - config.field_trials = &trials_; config.send_packet_observer = this; + config.rtp_stats_callback = this; config.fec_generator = fec_generator_; - impl_.reset(new ModuleRtpRtcpImpl2(config)); + impl_ = std::make_unique(env_, config); impl_->SetRemoteSSRC(is_sender_ ? kReceiverSsrc : kSenderSsrc); impl_->SetRTCPStatus(RtcpMode::kCompound); } private: std::map counter_map_; - absl::optional last_sent_packet_; + std::map counters_by_ssrc; + std::optional last_sent_packet_; VideoFecGenerator* fec_generator_ = nullptr; TimeDelta rtcp_report_interval_ = kDefaultReportInterval; }; @@ -269,13 +301,14 @@ class RtpRtcpImpl2Test : public ::testing::Test { protected: RtpRtcpImpl2Test() : time_controller_(Timestamp::Micros(133590000000000)), - field_trials_(""), - sender_(&time_controller_, - /*is_sender=*/true, - field_trials_), - receiver_(&time_controller_, - /*is_sender=*/false, - field_trials_) {} + env_(CreateEnvironment(time_controller_.GetClock(), + time_controller_.CreateTaskQueueFactory())), + sender_(env_, + &time_controller_, + /*is_sender=*/true), + receiver_(env_, + &time_controller_, + /*is_sender=*/false) {} void SetUp() override { // Send module. @@ -287,7 +320,7 @@ class RtpRtcpImpl2Test : public ::testing::Test { RTPSenderVideo::Config video_config; video_config.clock = time_controller_.GetClock(); video_config.rtp_sender = sender_.impl_->RtpSender(); - video_config.field_trials = &field_trials_; + video_config.field_trials = &env_.field_trials(); sender_video_ = std::make_unique(video_config); // Receive module. @@ -303,7 +336,7 @@ class RtpRtcpImpl2Test : public ::testing::Test { } void ReinitWithFec(VideoFecGenerator* fec_generator, - absl::optional red_payload_type) { + std::optional red_payload_type) { sender_.ReinintWithFec(fec_generator); EXPECT_EQ(0, sender_.impl_->SetSendingStatus(true)); sender_.impl_->SetSendingMediaStatus(true); @@ -314,7 +347,7 @@ class RtpRtcpImpl2Test : public ::testing::Test { RTPSenderVideo::Config video_config; video_config.clock = time_controller_.GetClock(); video_config.rtp_sender = sender_.impl_->RtpSender(); - video_config.field_trials = &field_trials_; + video_config.field_trials = &env_.field_trials(); video_config.fec_overhead_bytes = fec_generator->MaxPacketOverhead(); video_config.fec_type = fec_generator->GetFecType(); video_config.red_payload_type = red_payload_type; @@ -322,7 +355,7 @@ class RtpRtcpImpl2Test : public ::testing::Test { } GlobalSimulatedTimeController time_controller_; - test::ExplicitKeyValueConfig field_trials_; + const Environment env_; RtpRtcpModule sender_; std::unique_ptr sender_video_; RtpRtcpModule receiver_; @@ -374,7 +407,7 @@ class RtpRtcpImpl2Test : public ::testing::Test { nack.SetSenderSsrc(sender ? kReceiverSsrc : kSenderSsrc); nack.SetMediaSsrc(sender ? kSenderSsrc : kReceiverSsrc); nack.SetPacketIds(list, kListLength); - rtc::Buffer packet = nack.Build(); + Buffer packet = nack.Build(); module->impl_->IncomingRtcpPacket(packet); } }; @@ -766,7 +799,7 @@ TEST_F(RtpRtcpImpl2Test, StoresPacketInfoForSentPackets) { // Checks that the sender report stats are not available if no RTCP SR was sent. TEST_F(RtpRtcpImpl2Test, SenderReportStatsNotAvailable) { - EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Eq(absl::nullopt)); + EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Eq(std::nullopt)); } // Checks that the sender report stats are available if an RTCP SR was sent. @@ -776,7 +809,7 @@ TEST_F(RtpRtcpImpl2Test, SenderReportStatsAvailable) { // Send an SR. ASSERT_THAT(sender_.impl_->SendRTCP(kRtcpReport), Eq(0)); AdvanceTime(kOneWayNetworkDelay); - EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Not(Eq(absl::nullopt))); + EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Not(Eq(std::nullopt))); } // Checks that the sender report stats are not available if an RTCP SR with an @@ -793,7 +826,7 @@ TEST_F(RtpRtcpImpl2Test, SenderReportStatsNotUpdatedWithUnexpectedSsrc) { sr.SetOctetCount(456u); auto raw_packet = sr.Build(); receiver_.impl_->IncomingRtcpPacket(raw_packet); - EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Eq(absl::nullopt)); + EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Eq(std::nullopt)); } // Checks the stats derived from the last received RTCP SR are set correctly. @@ -812,11 +845,11 @@ TEST_F(RtpRtcpImpl2Test, SenderReportStatsCheckStatsFromLastReport) { auto raw_packet = sr.Build(); receiver_.impl_->IncomingRtcpPacket(raw_packet); - EXPECT_THAT( - receiver_.impl_->GetSenderReportStats(), - Optional(AllOf(Field(&SenderReportStats::last_remote_timestamp, Eq(ntp)), - Field(&SenderReportStats::packets_sent, Eq(kPacketCount)), - Field(&SenderReportStats::bytes_sent, Eq(kOctetCount))))); + EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), + Optional(AllOf( + Field(&SenderReportStats::last_remote_ntp_timestamp, Eq(ntp)), + Field(&SenderReportStats::packets_sent, Eq(kPacketCount)), + Field(&SenderReportStats::bytes_sent, Eq(kOctetCount))))); } // Checks that the sender report stats count equals the number of sent RTCP SRs. @@ -845,8 +878,8 @@ TEST_F(RtpRtcpImpl2Test, SenderReportStatsArrivalTimestampSet) { ASSERT_THAT(sender_.impl_->SendRTCP(kRtcpReport), Eq(0)); AdvanceTime(kOneWayNetworkDelay); auto stats = receiver_.impl_->GetSenderReportStats(); - ASSERT_THAT(stats, Not(Eq(absl::nullopt))); - EXPECT_TRUE(stats->last_arrival_timestamp.Valid()); + ASSERT_THAT(stats, Not(Eq(std::nullopt))); + EXPECT_TRUE(stats->last_arrival_ntp_timestamp.Valid()); } // Checks that the packet and byte counters from an RTCP SR are not zero once @@ -1004,10 +1037,10 @@ TEST_F(RtpRtcpImpl2Test, GeneratesFlexfec) { const uint16_t fec_start_seq = sender_.impl_->SequenceNumber() + 100; RtpState start_state; start_state.sequence_number = fec_start_seq; - FlexfecSender flexfec_sender(kFlexfecPayloadType, kFlexfecSsrc, kSenderSsrc, - kNoMid, kNoRtpExtensions, kNoRtpExtensionSizes, - &start_state, time_controller_.GetClock()); - ReinitWithFec(&flexfec_sender, /*red_payload_type=*/absl::nullopt); + FlexfecSender flexfec_sender(env_, kFlexfecPayloadType, kFlexfecSsrc, + kSenderSsrc, kNoMid, kNoRtpExtensions, + kNoRtpExtensionSizes, &start_state); + ReinitWithFec(&flexfec_sender, /*red_payload_type=*/std::nullopt); // Parameters selected to generate a single FEC packet per media packet. FecProtectionParams params; @@ -1029,8 +1062,7 @@ TEST_F(RtpRtcpImpl2Test, GeneratesFlexfec) { TEST_F(RtpRtcpImpl2Test, GeneratesUlpfec) { constexpr int kUlpfecPayloadType = 118; constexpr int kRedPayloadType = 119; - UlpfecGenerator ulpfec_sender(kRedPayloadType, kUlpfecPayloadType, - time_controller_.GetClock()); + UlpfecGenerator ulpfec_sender(env_, kRedPayloadType, kUlpfecPayloadType); ReinitWithFec(&ulpfec_sender, kRedPayloadType); // Parameters selected to generate a single FEC packet per media packet. @@ -1147,4 +1179,58 @@ TEST_F(RtpRtcpImpl2Test, RtxRtpStateReflectsCurrentState) { EXPECT_EQ(rtx_state.sequence_number, rtx_packet.SequenceNumber() + 1); } +TEST_F(RtpRtcpImpl2Test, CanSendPacketReturnTrueForMediaPacketIfSendingMedia) { + RtpHeaderExtensionMap extensions; + RtpPacketToSend packet(&extensions); + packet.SetSsrc(sender_.impl_->SSRC()); + packet.set_packet_type(RtpPacketMediaType::kAudio); + sender_.impl_->SetSendingMediaStatus(true); + + EXPECT_TRUE(sender_.impl_->CanSendPacket(packet)); +} + +TEST_F(RtpRtcpImpl2Test, + CanSendPacketReturnFalseForMediaPacketIfNotSendingMedia) { + RtpHeaderExtensionMap extensions; + RtpPacketToSend packet(&extensions); + packet.SetSsrc(sender_.impl_->SSRC()); + packet.set_packet_type(RtpPacketMediaType::kAudio); + sender_.impl_->SetSendingMediaStatus(false); + + EXPECT_FALSE(sender_.impl_->CanSendPacket(packet)); +} + +TEST_F(RtpRtcpImpl2Test, + CanSendPacketReturnFalseForPaddingPacketOnMediaSsrcBeforeMediaPacket) { + RtpHeaderExtensionMap extensions; + RtpPacketToSend packet(&extensions); + packet.SetSsrc(sender_.impl_->SSRC()); + packet.set_packet_type(RtpPacketMediaType::kPadding); + sender_.impl_->SetSendingMediaStatus(true); + + EXPECT_FALSE(sender_.impl_->CanSendPacket(packet)); +} + +TEST_F(RtpRtcpImpl2Test, RtpSequenceNumberSetByAssignSequenceNumber) { + RtpHeaderExtensionMap extensions; + RtpPacketToSend packet(&extensions); + packet.SetSsrc(sender_.impl_->SSRC()); + + sender_.impl_->SetSequenceNumber(1); + sender_.impl_->AssignSequenceNumber(packet); + EXPECT_EQ(packet.SequenceNumber(), 1); + sender_.impl_->AssignSequenceNumber(packet); + EXPECT_EQ(packet.SequenceNumber(), 2); +} + +TEST_F(RtpRtcpImpl2Test, SendPacketSendsPacketOnTransport) { + RtpHeaderExtensionMap extensions; + auto packet = std::make_unique(&extensions); + packet->SetSsrc(sender_.impl_->SSRC()); + packet->set_packet_type(RtpPacketMediaType::kAudio); + + sender_.impl_->SendPacket(std::move(packet), PacedPacketInfo()); + EXPECT_EQ(sender_.RtpSent(), 1); +} + } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc index abf3b639f8..0fae24d0fb 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc @@ -10,17 +10,36 @@ #include "modules/rtp_rtcp/source/rtp_rtcp_impl.h" +#include +#include #include #include -#include - +#include +#include + +#include "api/array_view.h" +#include "api/call/transport.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/rtp_headers.h" #include "api/units/time_delta.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "modules/rtp_rtcp/include/receive_statistics.h" +#include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtcp_packet/nack.h" +#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_sender_video.h" -#include "rtc_base/rate_limiter.h" +#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "system_wrappers/include/clock.h" +#include "system_wrappers/include/ntp_time.h" #include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" @@ -76,15 +95,15 @@ class SendTransport : public Transport { clock_ = clock; delay_ms_ = delay_ms; } - bool SendRtp(rtc::ArrayView data, - const PacketOptions& options) override { + bool SendRtp(ArrayView data, + const PacketOptions& /* options */) override { RtpPacket packet; EXPECT_TRUE(packet.Parse(data)); ++rtp_packets_sent_; last_rtp_sequence_number_ = packet.SequenceNumber(); return true; } - bool SendRtcp(rtc::ArrayView data) override { + bool SendRtcp(ArrayView data) override { test::RtcpPacketParser parser; parser.Parse(data); last_nack_list_ = parser.nack()->packet_ids(); @@ -110,13 +129,14 @@ class SendTransport : public Transport { class RtpRtcpModule : public RtcpPacketTypeCounterObserver { public: RtpRtcpModule(SimulatedClock* clock, bool is_sender) - : is_sender_(is_sender), - receive_statistics_(ReceiveStatistics::Create(clock)), - clock_(clock) { + : env_(CreateEnvironment(clock)), + is_sender_(is_sender), + receive_statistics_(ReceiveStatistics::Create(clock)) { CreateModuleImpl(); transport_.SimulateNetworkDelay(kOneWayNetworkDelay.ms(), clock); } + const Environment env_; const bool is_sender_; RtcpPacketTypeCounter packets_sent_; RtcpPacketTypeCounter packets_received_; @@ -157,7 +177,6 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver { void CreateModuleImpl() { RtpRtcpInterface::Configuration config; config.audio = false; - config.clock = clock_; config.outgoing_transport = &transport_; config.receive_statistics = receive_statistics_.get(); config.rtcp_packet_type_counter_observer = this; @@ -167,12 +186,11 @@ class RtpRtcpModule : public RtcpPacketTypeCounterObserver { config.need_rtp_packet_infos = true; config.non_sender_rtt_measurement = true; - impl_.reset(new ModuleRtpRtcpImpl(config)); + impl_.reset(new ModuleRtpRtcpImpl(env_, config)); impl_->SetRemoteSSRC(is_sender_ ? kReceiverSsrc : kSenderSsrc); impl_->SetRTCPStatus(RtcpMode::kCompound); } - SimulatedClock* const clock_; std::map counter_map_; }; } // namespace @@ -599,7 +617,7 @@ TEST_F(RtpRtcpImplTest, StoresPacketInfoForSentPackets) { // Checks that the remote sender stats are not available if no RTCP SR was sent. TEST_F(RtpRtcpImplTest, SenderReportStatsNotAvailable) { - EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Eq(absl::nullopt)); + EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Eq(std::nullopt)); } // Checks that the remote sender stats are available if an RTCP SR was sent. @@ -608,7 +626,7 @@ TEST_F(RtpRtcpImplTest, SenderReportStatsAvailable) { SendFrame(&sender_, sender_video_.get(), kBaseLayerTid); // Send an SR. ASSERT_THAT(sender_.impl_->SendRTCP(kRtcpReport), Eq(0)); - EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Not(Eq(absl::nullopt))); + EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Not(Eq(std::nullopt))); } // Checks that the remote sender stats are not available if an RTCP SR with an @@ -624,7 +642,7 @@ TEST_F(RtpRtcpImplTest, SenderReportStatsNotUpdatedWithUnexpectedSsrc) { sr.SetPacketCount(123u); sr.SetOctetCount(456u); receiver_.impl_->IncomingRtcpPacket(sr.Build()); - EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Eq(absl::nullopt)); + EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), Eq(std::nullopt)); } // Checks the stats derived from the last received RTCP SR are set correctly. @@ -642,11 +660,11 @@ TEST_F(RtpRtcpImplTest, SenderReportStatsCheckStatsFromLastReport) { sr.SetOctetCount(kOctetCount); receiver_.impl_->IncomingRtcpPacket(sr.Build()); - EXPECT_THAT( - receiver_.impl_->GetSenderReportStats(), - Optional(AllOf(Field(&SenderReportStats::last_remote_timestamp, Eq(ntp)), - Field(&SenderReportStats::packets_sent, Eq(kPacketCount)), - Field(&SenderReportStats::bytes_sent, Eq(kOctetCount))))); + EXPECT_THAT(receiver_.impl_->GetSenderReportStats(), + Optional(AllOf( + Field(&SenderReportStats::last_remote_ntp_timestamp, Eq(ntp)), + Field(&SenderReportStats::packets_sent, Eq(kPacketCount)), + Field(&SenderReportStats::bytes_sent, Eq(kOctetCount))))); } // Checks that the remote sender stats count equals the number of sent RTCP SRs. @@ -672,8 +690,8 @@ TEST_F(RtpRtcpImplTest, SenderReportStatsArrivalTimestampSet) { // Send an SR. ASSERT_THAT(sender_.impl_->SendRTCP(kRtcpReport), Eq(0)); auto stats = receiver_.impl_->GetSenderReportStats(); - ASSERT_THAT(stats, Not(Eq(absl::nullopt))); - EXPECT_TRUE(stats->last_arrival_timestamp.Valid()); + ASSERT_THAT(stats, Not(Eq(std::nullopt))); + EXPECT_TRUE(stats->last_arrival_ntp_timestamp.Valid()); } // Checks that the packet and byte counters from an RTCP SR are not zero once diff --git a/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/modules/rtp_rtcp/source/rtp_rtcp_interface.h index d366bb77a2..ee577ea0eb 100644 --- a/modules/rtp_rtcp/source/rtp_rtcp_interface.h +++ b/modules/rtp_rtcp/source/rtp_rtcp_interface.h @@ -11,20 +11,27 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTP_RTCP_INTERFACE_H_ #define MODULES_RTP_RTCP_SOURCE_RTP_RTCP_INTERFACE_H_ +#include +#include #include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/field_trials_view.h" +#include "api/array_view.h" #include "api/frame_transformer_interface.h" +#include "api/rtp_headers.h" +#include "api/rtp_packet_sender.h" #include "api/scoped_refptr.h" +#include "api/transport/network_types.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/video_bitrate_allocation.h" +#include "modules/include/module_fec_types.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/report_block_data.h" -#include "modules/rtp_rtcp/include/rtp_packet_sender.h" +#include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" @@ -36,7 +43,6 @@ namespace webrtc { // Forward declarations. class FrameEncryptorInterface; class RateLimiter; -class RtcEventLog; class RTPSender; class Transport; class VideoBitrateAllocationObserver; @@ -44,20 +50,11 @@ class VideoBitrateAllocationObserver; class RtpRtcpInterface : public RtcpFeedbackSenderInterface { public: struct Configuration { - Configuration() = default; - Configuration(Configuration&& rhs) = default; - - Configuration(const Configuration&) = delete; - Configuration& operator=(const Configuration&) = delete; - // True for a audio version of the RTP/RTCP module object false will create // a video version. bool audio = false; bool receiver_only = false; - // The clock to use to read time. If nullptr then system clock will be used. - Clock* clock = nullptr; - ReceiveStatisticsProvider* receive_statistics = nullptr; // Transport object that will be called when packets are ready to be sent @@ -75,6 +72,11 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { NetworkLinkRtcpObserver* network_link_rtcp_observer = nullptr; NetworkStateEstimateObserver* network_state_estimate_observer = nullptr; + + // DEPRECATED, transport_feedback_callback is no longer invoked by the RTP + // module except from DEPRECATED_RtpSenderEgress. + // TODO: bugs.webrtc.org/15368 - Delete once DEPRECATED_RtpSenderEgress is + // deleted. TransportFeedbackObserver* transport_feedback_callback = nullptr; VideoBitrateAllocationObserver* bitrate_allocation_observer = nullptr; RtcpRttStats* rtt_stats = nullptr; @@ -95,8 +97,6 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { VideoFecGenerator* fec_generator = nullptr; BitrateStatisticsObserver* send_bitrate_observer = nullptr; - SendSideDelayObserver* send_side_delay_observer = nullptr; - RtcEventLog* event_log = nullptr; SendPacketObserver* send_packet_observer = nullptr; RateLimiter* retransmission_rate_limiter = nullptr; StreamDataCountersCallback* rtp_stats_callback = nullptr; @@ -106,7 +106,7 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // Update network2 instead of pacer_exit field of video timing extension. bool populate_network2_timestamp = false; - rtc::scoped_refptr frame_transformer; + scoped_refptr frame_transformer; // E2EE Custom Video Frame Encryption FrameEncryptorInterface* frame_encryptor = nullptr; @@ -123,13 +123,10 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // done by RTCP RR acking. bool always_send_mid_and_rid = false; - // If set, field trials are read from `field_trials`. - const FieldTrialsView* field_trials = nullptr; - // SSRCs for media and retransmission, respectively. // FlexFec SSRC is fetched from `flexfec_sender`. uint32_t local_media_ssrc = 0; - absl::optional rtx_send_ssrc; + std::optional rtx_send_ssrc; bool need_rtp_packet_infos = false; @@ -150,10 +147,14 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // Stats for RTCP sender reports (SR) for a specific SSRC. // Refer to https://tools.ietf.org/html/rfc3550#section-6.4.1. struct SenderReportStats { + // Arrival timestamp (enviroment clock) for the last received RTCP SR. + Timestamp last_arrival_timestamp = Timestamp::Zero(); // Arrival NTP timestamp for the last received RTCP SR. - NtpTime last_arrival_timestamp; + // TODO: bugs.webrtc.org/370535296 - Remove the ntp arrival timestamp when + // linked issue is fixed. + NtpTime last_arrival_ntp_timestamp; // Received (a.k.a., remote) NTP timestamp for the last received RTCP SR. - NtpTime last_remote_timestamp; + NtpTime last_remote_ntp_timestamp; // Received (a.k.a., remote) RTP timestamp from the last received RTCP SR. uint32_t last_remote_rtp_timestamp = 0; // Total number of RTP data packets transmitted by the sender since starting @@ -173,7 +174,7 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // Refer to https://datatracker.ietf.org/doc/html/rfc3611#section-2. struct NonSenderRttStats { // https://www.w3.org/TR/webrtc-stats/#dom-rtcremoteoutboundrtpstreamstats-roundtriptime - absl::optional round_trip_time; + std::optional round_trip_time; // https://www.w3.org/TR/webrtc-stats/#dom-rtcremoteoutboundrtpstreamstats-totalroundtriptime TimeDelta total_round_trip_time = TimeDelta::Zero(); // https://www.w3.org/TR/webrtc-stats/#dom-rtcremoteoutboundrtpstreamstats-roundtriptimemeasurements @@ -184,8 +185,7 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // Receiver functions // ************************************************************************** - virtual void IncomingRtcpPacket( - rtc::ArrayView incoming_packet) = 0; + virtual void IncomingRtcpPacket(ArrayView incoming_packet) = 0; virtual void SetRemoteSSRC(uint32_t ssrc) = 0; @@ -265,7 +265,7 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { virtual int RtxSendStatus() const = 0; // Returns the SSRC used for RTX if set, otherwise a nullopt. - virtual absl::optional RtxSsrc() const = 0; + virtual std::optional RtxSsrc() const = 0; // Sets the payload type to use when sending RTX packets. Note that this // doesn't enable RTX, only the payload type is set. @@ -273,9 +273,9 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { int associated_payload_type) = 0; // Returns the FlexFEC SSRC, if there is one. - virtual absl::optional FlexfecSsrc() const = 0; + virtual std::optional FlexfecSsrc() const = 0; - // Sets sending status. Sends kRtcpByeCode when going from true to false. + // Sets sending status. // Returns -1 on failure else 0. virtual int32_t SetSendingStatus(bool sending) = 0; @@ -314,8 +314,23 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { virtual bool TrySendPacket(std::unique_ptr packet, const PacedPacketInfo& pacing_info) = 0; - // Notifies that a batch of packet sends is completed. The implementation can - // use this to optimize packet sending. + // Returns true if the module can send media packets and the module is ready + // so send `packet` A RTP Sequence numbers may or may not have been assigned + // to the packet. + virtual bool CanSendPacket(const RtpPacketToSend& packet) const = 0; + + // Assigns continuous RTP sequence number to packet. + virtual void AssignSequenceNumber(RtpPacketToSend& packet) = 0; + + // Send the packet to transport. Before using this method, a caller must + // ensure the packet can be sent by first checking if the packet can be sent + // using CanSendPacket and the packet must be assigned a sequence number using + // AssignSequenceNumber. + virtual void SendPacket(std::unique_ptr packet, + const PacedPacketInfo& pacing_info) = 0; + + // Notifies that a batch of packet sends is completed. The implementation + // can use this to optimize packet sending. virtual void OnBatchComplete() = 0; // Update the FEC protection parameters to use for delta- and key-frames. @@ -330,16 +345,16 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { virtual std::vector> FetchFecPackets() = 0; virtual void OnAbortedRetransmissions( - rtc::ArrayView sequence_numbers) = 0; + ArrayView sequence_numbers) = 0; virtual void OnPacketsAcknowledged( - rtc::ArrayView sequence_numbers) = 0; + ArrayView sequence_numbers) = 0; virtual std::vector> GeneratePadding( size_t target_size_bytes) = 0; virtual std::vector GetSentRtpPacketInfos( - rtc::ArrayView sequence_numbers) const = 0; + ArrayView sequence_numbers) const = 0; // Returns an expected per packet overhead representing the main RTP header, // any CSRCs, and the registered header extensions that are expected on all @@ -373,7 +388,7 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { virtual int32_t SetCNAME(absl::string_view cname) = 0; // Returns current RTT (round-trip time) estimate. - virtual absl::optional LastRtt() const = 0; + virtual std::optional LastRtt() const = 0; // Returns the estimated RTT, with fallback to a default value. virtual TimeDelta ExpectedRetransmissionTime() const = 0; @@ -394,9 +409,9 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // that pair. virtual std::vector GetLatestReportBlockData() const = 0; // Returns stats based on the received RTCP SRs. - virtual absl::optional GetSenderReportStats() const = 0; + virtual std::optional GetSenderReportStats() const = 0; // Returns non-sender RTT stats, based on DLRR. - virtual absl::optional GetNonSenderRttStats() const = 0; + virtual std::optional GetNonSenderRttStats() const = 0; // (REMB) Receiver Estimated Max Bitrate. // Schedules sending REMB on next and following sender/receiver reports. diff --git a/modules/rtp_rtcp/source/rtp_sender.cc b/modules/rtp_rtcp/source/rtp_sender.cc index d899b4f44e..888e601e7f 100644 --- a/modules/rtp_rtcp/source/rtp_sender.cc +++ b/modules/rtp_rtcp/source/rtp_sender.cc @@ -11,29 +11,39 @@ #include "modules/rtp_rtcp/source/rtp_sender.h" #include -#include +#include +#include +#include #include +#include #include #include +#include -#include "absl/strings/match.h" #include "absl/strings/string_view.h" #include "api/array_view.h" -#include "api/rtc_event_log/rtc_event_log.h" +#include "api/environment/environment.h" +#include "api/rtp_headers.h" +#include "api/rtp_packet_sender.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" -#include "modules/rtp_rtcp/include/rtp_cvo.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/corruption_detection_extension.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" +#include "modules/rtp_rtcp/source/rtp_header_extension_size.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_history.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "modules/rtp_rtcp/source/time_util.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/rate_limiter.h" -#include "rtc_base/time_utils.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -81,6 +91,7 @@ constexpr RtpExtensionSize kVideoExtensionSizes[] = { CreateMaxExtensionSize(), CreateMaxExtensionSize(), CreateMaxExtensionSize(), + CreateMaxExtensionSize(), {RtpGenericFrameDescriptorExtension00::kId, RtpGenericFrameDescriptorExtension00::kMaxSizeBytes}, }; @@ -89,7 +100,7 @@ constexpr RtpExtensionSize kVideoExtensionSizes[] = { constexpr RtpExtensionSize kAudioExtensionSizes[] = { CreateExtensionSize(), CreateExtensionSize(), - CreateExtensionSize(), + CreateExtensionSize(), CreateExtensionSize(), CreateExtensionSize(), CreateExtensionSize(), @@ -123,6 +134,7 @@ bool IsNonVolatile(RTPExtensionType type) { case kRtpExtensionVideoTiming: case kRtpExtensionColorSpace: case kRtpExtensionVideoFrameTrackingId: + case kRtpExtensionCorruptionDetection: return false; case kRtpExtensionNone: case kRtpExtensionNumberOfExtensions: @@ -141,16 +153,17 @@ bool HasBweExtension(const RtpHeaderExtensionMap& extensions_map) { } // namespace -RTPSender::RTPSender(const RtpRtcpInterface::Configuration& config, +RTPSender::RTPSender(const Environment& env, + const RtpRtcpInterface::Configuration& config, RtpPacketHistory* packet_history, RtpPacketSender* packet_sender) - : clock_(config.clock), + : clock_(&env.clock()), random_(clock_->TimeInMicroseconds()), audio_configured_(config.audio), ssrc_(config.local_media_ssrc), rtx_ssrc_(config.rtx_send_ssrc), flexfec_ssrc_(config.fec_generator ? config.fec_generator->FecSsrc() - : absl::nullopt), + : std::nullopt), packet_history_(packet_history), paced_sender_(packet_sender), sending_media_(true), // Default to sending media. @@ -186,19 +199,17 @@ RTPSender::~RTPSender() { // to understand performance attributes and possibly remove locks. } -rtc::ArrayView RTPSender::FecExtensionSizes() { - return rtc::MakeArrayView(kFecOrPaddingExtensionSizes, - arraysize(kFecOrPaddingExtensionSizes)); +ArrayView RTPSender::FecExtensionSizes() { + return MakeArrayView(kFecOrPaddingExtensionSizes, + arraysize(kFecOrPaddingExtensionSizes)); } -rtc::ArrayView RTPSender::VideoExtensionSizes() { - return rtc::MakeArrayView(kVideoExtensionSizes, - arraysize(kVideoExtensionSizes)); +ArrayView RTPSender::VideoExtensionSizes() { + return MakeArrayView(kVideoExtensionSizes, arraysize(kVideoExtensionSizes)); } -rtc::ArrayView RTPSender::AudioExtensionSizes() { - return rtc::MakeArrayView(kAudioExtensionSizes, - arraysize(kAudioExtensionSizes)); +ArrayView RTPSender::AudioExtensionSizes() { + return MakeArrayView(kAudioExtensionSizes, arraysize(kAudioExtensionSizes)); } void RTPSender::SetExtmapAllowMixed(bool extmap_allow_mixed) { @@ -291,6 +302,7 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id) { if (retransmit_packet) { retransmit_packet->set_retransmitted_sequence_number( stored_packet.SequenceNumber()); + retransmit_packet->set_original_ssrc(stored_packet.Ssrc()); } return retransmit_packet; }); @@ -313,7 +325,8 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id) { return packet_size; } -void RTPSender::OnReceivedAckOnSsrc(int64_t extended_highest_sequence_number) { +void RTPSender::OnReceivedAckOnSsrc( + int64_t /* extended_highest_sequence_number */) { MutexLock lock(&send_mutex_); bool update_required = !ssrc_has_acked_; ssrc_has_acked_ = true; @@ -323,7 +336,7 @@ void RTPSender::OnReceivedAckOnSsrc(int64_t extended_highest_sequence_number) { } void RTPSender::OnReceivedAckOnRtxSsrc( - int64_t extended_highest_sequence_number) { + int64_t /* extended_highest_sequence_number */) { MutexLock lock(&send_mutex_); bool update_required = !rtx_ssrc_has_acked_; rtx_ssrc_has_acked_ = true; @@ -405,15 +418,15 @@ std::vector> RTPSender::GeneratePadding( max_packet_size_ - max_padding_fec_packet_header_; if (audio_configured_) { // Allow smaller padding packets for audio. - padding_bytes_in_packet = rtc::SafeClamp( - bytes_left, kMinAudioPaddingLength, - rtc::SafeMin(max_payload_size, kMaxPaddingLength)); + padding_bytes_in_packet = + SafeClamp(bytes_left, kMinAudioPaddingLength, + SafeMin(max_payload_size, kMaxPaddingLength)); } else { // Always send full padding packets. This is accounted for by the // RtpPacketSender, which will make sure we don't send too much padding even // if a single packet is larger than requested. // We do this to avoid frequently sending small packets on higher bitrates. - padding_bytes_in_packet = rtc::SafeMin(max_payload_size, kMaxPaddingLength); + padding_bytes_in_packet = SafeMin(max_payload_size, kMaxPaddingLength); } while (bytes_left > 0) { @@ -426,6 +439,17 @@ std::vector> RTPSender::GeneratePadding( break; } padding_packet->SetSsrc(ssrc_); + + if (always_send_mid_and_rid_ || !ssrc_has_acked_) { + // These are no-ops if the corresponding header extension is not + // registered. + if (!mid_.empty()) { + padding_packet->SetExtension(mid_); + } + if (!rid_.empty()) { + padding_packet->SetExtension(rid_); + } + } } else { // Without abs-send-time or transport sequence number a media packet // must be sent before padding so that the timestamps used for @@ -441,18 +465,21 @@ std::vector> RTPSender::GeneratePadding( RTC_DCHECK(!rtx_payload_type_map_.empty()); padding_packet->SetSsrc(*rtx_ssrc_); padding_packet->SetPayloadType(rtx_payload_type_map_.begin()->second); - } - if (rtp_header_extension_map_.IsRegistered(TransportSequenceNumber::kId)) { - padding_packet->ReserveExtension(); - } - if (rtp_header_extension_map_.IsRegistered(TransmissionOffset::kId)) { - padding_packet->ReserveExtension(); - } - if (rtp_header_extension_map_.IsRegistered(AbsoluteSendTime::kId)) { - padding_packet->ReserveExtension(); + if (always_send_mid_and_rid_ || !rtx_ssrc_has_acked_) { + if (!mid_.empty()) { + padding_packet->SetExtension(mid_); + } + if (!rid_.empty()) { + padding_packet->SetExtension(rid_); + } + } } + padding_packet->ReserveExtension(); + padding_packet->ReserveExtension(); + padding_packet->ReserveExtension(); + padding_packet->SetPadding(padding_bytes_in_packet); bytes_left -= std::min(bytes_left, padding_bytes_in_packet); padding_packets.push_back(std::move(padding_packet)); @@ -488,7 +515,7 @@ size_t RTPSender::ExpectedPerPacketOverhead() const { } std::unique_ptr RTPSender::AllocatePacket( - rtc::ArrayView csrcs) { + ArrayView csrcs) { MutexLock lock(&send_mutex_); RTC_DCHECK_LE(csrcs.size(), kRtpCsrcSize); if (csrcs.size() > max_num_csrcs_) { @@ -624,9 +651,9 @@ static void CopyHeaderAndExtensionsToRtxPacket(const RtpPacketToSend& packet, continue; } - rtc::ArrayView source = packet.FindExtension(extension); + ArrayView source = packet.FindExtension(extension); - rtc::ArrayView destination = + ArrayView destination = rtx_packet->AllocateExtension(extension, source.size()); // Could happen if any: diff --git a/modules/rtp_rtcp/source/rtp_sender.h b/modules/rtp_rtcp/source/rtp_sender.h index a398f16d46..80c1c84043 100644 --- a/modules/rtp_rtcp/source/rtp_sender.h +++ b/modules/rtp_rtcp/source/rtp_sender.h @@ -11,23 +11,23 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTP_SENDER_H_ #define MODULES_RTP_RTCP_SOURCE_RTP_SENDER_H_ +#include +#include #include #include +#include #include -#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" -#include "api/call/transport.h" -#include "api/field_trials_view.h" +#include "api/environment/environment.h" +#include "api/rtp_packet_sender.h" #include "modules/rtp_rtcp/include/flexfec_sender.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/include/rtp_packet_sender.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_header_extension_size.h" #include "modules/rtp_rtcp/source/rtp_packet_history.h" -#include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/random.h" #include "rtc_base/synchronization/mutex.h" @@ -37,7 +37,6 @@ namespace webrtc { class FrameEncryptorInterface; class RateLimiter; -class RtcEventLog; class RtpPacketToSend; // Maximum amount of padding in RFC 3550 is 255 bytes. @@ -45,9 +44,11 @@ constexpr size_t kMaxPaddingLength = 255; class RTPSender { public: - RTPSender(const RtpRtcpInterface::Configuration& config, + RTPSender(const Environment& env, + const RtpRtcpInterface::Configuration& config, RtpPacketHistory* packet_history, RtpPacketSender* packet_sender); + RTPSender(const RTPSender&) = delete; RTPSender& operator=(const RTPSender&) = delete; @@ -102,7 +103,7 @@ class RTPSender { // RTX. void SetRtxStatus(int mode) RTC_LOCKS_EXCLUDED(send_mutex_); int RtxStatus() const RTC_LOCKS_EXCLUDED(send_mutex_); - absl::optional RtxSsrc() const RTC_LOCKS_EXCLUDED(send_mutex_) { + std::optional RtxSsrc() const RTC_LOCKS_EXCLUDED(send_mutex_) { return rtx_ssrc_; } // Returns expected size difference between an RTX packet and media packet @@ -113,22 +114,21 @@ class RTPSender { RTC_LOCKS_EXCLUDED(send_mutex_); // Size info for header extensions used by FEC packets. - static rtc::ArrayView FecExtensionSizes() + static ArrayView FecExtensionSizes() RTC_LOCKS_EXCLUDED(send_mutex_); // Size info for header extensions used by video packets. - static rtc::ArrayView VideoExtensionSizes() + static ArrayView VideoExtensionSizes() RTC_LOCKS_EXCLUDED(send_mutex_); // Size info for header extensions used by audio packets. - static rtc::ArrayView AudioExtensionSizes() + static ArrayView AudioExtensionSizes() RTC_LOCKS_EXCLUDED(send_mutex_); // Create empty packet, fills ssrc, csrcs and reserve place for header // extensions RtpSender updates before sending. std::unique_ptr AllocatePacket( - rtc::ArrayView csrcs = {}) - RTC_LOCKS_EXCLUDED(send_mutex_); + ArrayView csrcs = {}) RTC_LOCKS_EXCLUDED(send_mutex_); // Maximum header overhead per fec/padding packet. size_t FecOrPaddingPacketMaxRtpHeaderLength() const @@ -140,7 +140,7 @@ class RTPSender { uint32_t SSRC() const RTC_LOCKS_EXCLUDED(send_mutex_) { return ssrc_; } - absl::optional FlexfecSsrc() const RTC_LOCKS_EXCLUDED(send_mutex_) { + std::optional FlexfecSsrc() const RTC_LOCKS_EXCLUDED(send_mutex_) { return flexfec_ssrc_; } @@ -172,8 +172,8 @@ class RTPSender { const bool audio_configured_; const uint32_t ssrc_; - const absl::optional rtx_ssrc_; - const absl::optional flexfec_ssrc_; + const std::optional rtx_ssrc_; + const std::optional flexfec_ssrc_; RtpPacketHistory* const packet_history_; RtpPacketSender* const paced_sender_; diff --git a/modules/rtp_rtcp/source/rtp_sender_audio.cc b/modules/rtp_rtcp/source/rtp_sender_audio.cc index 62174e343c..e6406a35a0 100644 --- a/modules/rtp_rtcp/source/rtp_sender_audio.cc +++ b/modules/rtp_rtcp/source/rtp_sender_audio.cc @@ -12,44 +12,32 @@ #include +#include #include +#include #include #include #include "absl/strings/match.h" -#include "absl/types/optional.h" -#include "api/audio_codecs/audio_format.h" +#include "absl/strings/string_view.h" #include "api/rtp_headers.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/absolute_capture_time_sender.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/dtmf_queue.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "modules/rtp_rtcp/source/time_util.h" +#include "modules/rtp_rtcp/source/rtp_sender.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/trace_event.h" -#include "system_wrappers/include/ntp_time.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/synchronization/mutex.h" +#include "system_wrappers/include/clock.h" namespace webrtc { -namespace { -[[maybe_unused]] const char* FrameTypeToString(AudioFrameType frame_type) { - switch (frame_type) { - case AudioFrameType::kEmptyFrame: - return "empty"; - case AudioFrameType::kAudioFrameSpeech: - return "audio_speech"; - case AudioFrameType::kAudioFrameCN: - return "audio_cn"; - } - RTC_CHECK_NOTREACHED(); -} - -} // namespace - RTPSenderAudio::RTPSenderAudio(Clock* clock, RTPSender* rtp_sender) : clock_(clock), rtp_sender_(rtp_sender), @@ -62,8 +50,8 @@ RTPSenderAudio::~RTPSenderAudio() {} int32_t RTPSenderAudio::RegisterAudioPayload(absl::string_view payload_name, const int8_t payload_type, const uint32_t frequency, - const size_t channels, - const uint32_t rate) { + const size_t /* channels */, + const uint32_t /* rate */) { if (absl::EqualsIgnoreCase(payload_name, "cn")) { MutexLock lock(&send_audio_mutex_); // we can have multiple CNG payload types @@ -92,7 +80,7 @@ int32_t RTPSenderAudio::RegisterAudioPayload(absl::string_view payload_name, return 0; } else if (payload_name == "audio") { MutexLock lock(&send_audio_mutex_); - encoder_rtp_timestamp_frequency_ = frequency; + encoder_rtp_timestamp_frequency_ = dchecked_cast(frequency); return 0; } return 0; @@ -141,40 +129,9 @@ bool RTPSenderAudio::MarkerBit(AudioFrameType frame_type, int8_t payload_type) { return marker_bit; } -bool RTPSenderAudio::SendAudio(AudioFrameType frame_type, - int8_t payload_type, - uint32_t rtp_timestamp, - const uint8_t* payload_data, - size_t payload_size) { - return SendAudio({.type = frame_type, - .payload{payload_data, payload_size}, - .payload_id = payload_type, - .rtp_timestamp = rtp_timestamp}); -} - -bool RTPSenderAudio::SendAudio(AudioFrameType frame_type, - int8_t payload_type, - uint32_t rtp_timestamp, - const uint8_t* payload_data, - size_t payload_size, - int64_t absolute_capture_timestamp_ms) { - RtpAudioFrame frame = { - .type = frame_type, - .payload{payload_data, payload_size}, - .payload_id = payload_type, - .rtp_timestamp = rtp_timestamp, - }; - if (absolute_capture_timestamp_ms > 0) { - frame.capture_time = Timestamp::Millis(absolute_capture_timestamp_ms); - } - return SendAudio(frame); -} - bool RTPSenderAudio::SendAudio(const RtpAudioFrame& frame) { RTC_DCHECK_GE(frame.payload_id, 0); RTC_DCHECK_LE(frame.payload_id, 127); - TRACE_EVENT_ASYNC_STEP1("webrtc", "Audio", frame.rtp_timestamp, "Send", - "type", FrameTypeToString(frame.type)); // From RFC 4733: // A source has wide latitude as to how often it sends event updates. A @@ -182,14 +139,23 @@ bool RTPSenderAudio::SendAudio(const RtpAudioFrame& frame) { // Alternatively, a source MAY decide to use a different spacing for event // updates, with a value of 50 ms RECOMMENDED. constexpr int kDtmfIntervalTimeMs = 50; - uint8_t audio_level_dbov = 0; uint32_t dtmf_payload_freq = 0; - absl::optional encoder_rtp_timestamp_frequency; + std::optional absolute_capture_time; { MutexLock lock(&send_audio_mutex_); - audio_level_dbov = audio_level_dbov_; dtmf_payload_freq = dtmf_payload_freq_; - encoder_rtp_timestamp_frequency = encoder_rtp_timestamp_frequency_; + if (frame.capture_time.has_value()) { + // Send absolute capture time periodically in order to optimize and save + // network traffic. Missing absolute capture times can be interpolated on + // the receiving end if sending intervals are small enough. + absolute_capture_time = absolute_capture_time_sender_.OnSendPacket( + rtp_sender_->SSRC(), frame.rtp_timestamp, + // Replace missing value with 0 (invalid frequency), this will trigger + // absolute capture time sending. + encoder_rtp_timestamp_frequency_.value_or(0), + clock_->ConvertTimestampToNtpTime(*frame.capture_time), + /*estimated_capture_clock_offset=*/0); + } } // Check if we have pending DTMFs to send @@ -273,35 +239,21 @@ bool RTPSenderAudio::SendAudio(const RtpAudioFrame& frame) { return false; } - std::unique_ptr packet = rtp_sender_->AllocatePacket(); + std::unique_ptr packet = + rtp_sender_->AllocatePacket(frame.csrcs); packet->SetMarker(MarkerBit(frame.type, frame.payload_id)); packet->SetPayloadType(frame.payload_id); packet->SetTimestamp(frame.rtp_timestamp); packet->set_capture_time(clock_->CurrentTime()); - // Update audio level extension, if included. - packet->SetExtension( - frame.type == AudioFrameType::kAudioFrameSpeech, - frame.audio_level_dbov.value_or(audio_level_dbov)); - - if (frame.capture_time.has_value()) { - // Send absolute capture time periodically in order to optimize and save - // network traffic. Missing absolute capture times can be interpolated on - // the receiving end if sending intervals are small enough. - auto absolute_capture_time = absolute_capture_time_sender_.OnSendPacket( - AbsoluteCaptureTimeSender::GetSource(packet->Ssrc(), packet->Csrcs()), - packet->Timestamp(), - // Replace missing value with 0 (invalid frequency), this will trigger - // absolute capture time sending. - encoder_rtp_timestamp_frequency.value_or(0), - static_cast( - clock_->ConvertTimestampToNtpTime(*frame.capture_time)), - /*estimated_capture_clock_offset=*/0); - if (absolute_capture_time) { - // It also checks that extension was registered during SDP negotiation. If - // not then setter won't do anything. - packet->SetExtension( - *absolute_capture_time); - } + // Set audio level extension, if included. + packet->SetExtension( + AudioLevel(frame.type == AudioFrameType::kAudioFrameSpeech, + frame.audio_level_dbov.value_or(127))); + + if (absolute_capture_time.has_value()) { + // It also checks that extension was registered during SDP negotiation. If + // not then setter won't do anything. + packet->SetExtension(*absolute_capture_time); } uint8_t* payload = packet->AllocatePayload(frame.payload.size()); @@ -312,9 +264,6 @@ bool RTPSenderAudio::SendAudio(const RtpAudioFrame& frame) { MutexLock lock(&send_audio_mutex_); last_payload_type_ = frame.payload_id; } - TRACE_EVENT_ASYNC_END2("webrtc", "Audio", frame.rtp_timestamp, "timestamp", - packet->Timestamp(), "seqnum", - packet->SequenceNumber()); packet->set_packet_type(RtpPacketMediaType::kAudio); packet->set_allow_retransmission(true); std::vector> packets(1); @@ -326,16 +275,6 @@ bool RTPSenderAudio::SendAudio(const RtpAudioFrame& frame) { return true; } -// Audio level magnitude and voice activity flag are set for each RTP packet -int32_t RTPSenderAudio::SetAudioLevel(uint8_t level_dbov) { - if (level_dbov > 127) { - return -1; - } - MutexLock lock(&send_audio_mutex_); - audio_level_dbov_ = level_dbov; - return 0; -} - // Send a TelephoneEvent tone using RFC 2833 (4733) int32_t RTPSenderAudio::SendTelephoneEvent(uint8_t key, uint16_t time_ms, diff --git a/modules/rtp_rtcp/source/rtp_sender_audio.h b/modules/rtp_rtcp/source/rtp_sender_audio.h index ee4e92635f..6a87d358cb 100644 --- a/modules/rtp_rtcp/source/rtp_sender_audio.h +++ b/modules/rtp_rtcp/source/rtp_sender_audio.h @@ -14,9 +14,11 @@ #include #include -#include +#include #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/units/timestamp.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/rtp_rtcp/source/absolute_capture_time_sender.h" #include "modules/rtp_rtcp/source/dtmf_queue.h" @@ -46,7 +48,7 @@ class RTPSenderAudio { struct RtpAudioFrame { AudioFrameType type = AudioFrameType::kAudioFrameSpeech; - rtc::ArrayView payload; + ArrayView payload; // Payload id to write to the payload type field of the rtp packet. int payload_id = -1; @@ -55,35 +57,18 @@ class RTPSenderAudio { uint32_t rtp_timestamp = 0; // capture time of the audio frame in the same epoch as `clock->CurrentTime` - absl::optional capture_time; + std::optional capture_time; // Audio level in dBov for // header-extension-for-audio-level-indication. // Valid range is [0,127]. Actual value is negative. - absl::optional audio_level_dbov; + std::optional audio_level_dbov; + + // Contributing sources list. + ArrayView csrcs; }; bool SendAudio(const RtpAudioFrame& frame); - [[deprecated]] bool SendAudio(AudioFrameType frame_type, - int8_t payload_type, - uint32_t rtp_timestamp, - const uint8_t* payload_data, - size_t payload_size); - - // `absolute_capture_timestamp_ms` and `Clock::CurrentTime` - // should be using the same epoch. - [[deprecated]] bool SendAudio(AudioFrameType frame_type, - int8_t payload_type, - uint32_t rtp_timestamp, - const uint8_t* payload_data, - size_t payload_size, - int64_t absolute_capture_timestamp_ms); - - // Store the audio level in dBov for - // header-extension-for-audio-level-indication. - // Valid range is [0,127]. Actual value is negative. - [[deprecated]] int32_t SetAudioLevel(uint8_t level_dbov); - // Send a DTMF tone using RFC 2833 (4733) int32_t SendTelephoneEvent(uint8_t key, uint16_t time_ms, uint8_t level); @@ -122,15 +107,13 @@ class RTPSenderAudio { int8_t cngfb_payload_type_ RTC_GUARDED_BY(send_audio_mutex_) = -1; int8_t last_payload_type_ RTC_GUARDED_BY(send_audio_mutex_) = -1; - // Audio level indication. - // (https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/) - uint8_t audio_level_dbov_ RTC_GUARDED_BY(send_audio_mutex_) = 127; OneTimeEvent first_packet_sent_; - absl::optional encoder_rtp_timestamp_frequency_ + std::optional encoder_rtp_timestamp_frequency_ RTC_GUARDED_BY(send_audio_mutex_); - AbsoluteCaptureTimeSender absolute_capture_time_sender_; + AbsoluteCaptureTimeSender absolute_capture_time_sender_ + RTC_GUARDED_BY(send_audio_mutex_); }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc index 0db610c149..6ce33aeb12 100644 --- a/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_sender_audio_unittest.cc @@ -10,15 +10,24 @@ #include "modules/rtp_rtcp/source/rtp_sender_audio.h" +#include #include #include +#include "api/array_view.h" +#include "api/call/transport.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/rtp_headers.h" +#include "api/units/timestamp.h" +#include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "rtc_base/thread.h" +#include "system_wrappers/include/clock.h" +#include "system_wrappers/include/ntp_time.h" #include "test/gmock.h" #include "test/gtest.h" @@ -39,18 +48,18 @@ using ::testing::ElementsAreArray; class LoopbackTransportTest : public webrtc::Transport { public: LoopbackTransportTest() { - receivers_extensions_.Register(kAudioLevelExtensionId); + receivers_extensions_.Register(kAudioLevelExtensionId); receivers_extensions_.Register( kAbsoluteCaptureTimeExtensionId); } - bool SendRtp(rtc::ArrayView data, + bool SendRtp(ArrayView data, const PacketOptions& /*options*/) override { sent_packets_.push_back(RtpPacketReceived(&receivers_extensions_)); EXPECT_TRUE(sent_packets_.back().Parse(data)); return true; } - bool SendRtcp(rtc::ArrayView data) override { return false; } + bool SendRtcp(ArrayView /* data */) override { return false; } const RtpPacketReceived& last_sent_packet() { return sent_packets_.back(); } int packets_sent() { return sent_packets_.size(); } @@ -65,24 +74,22 @@ class RtpSenderAudioTest : public ::testing::Test { public: RtpSenderAudioTest() : fake_clock_(kStartTime), - rtp_module_(ModuleRtpRtcpImpl2::Create([&] { - RtpRtcpInterface::Configuration config; - config.audio = true; - config.clock = &fake_clock_; - config.outgoing_transport = &transport_; - config.local_media_ssrc = kSsrc; - return config; - }())), + env_(CreateEnvironment(&fake_clock_)), + rtp_module_(env_, + {.audio = true, + .outgoing_transport = &transport_, + .local_media_ssrc = kSsrc}), rtp_sender_audio_( std::make_unique(&fake_clock_, - rtp_module_->RtpSender())) { - rtp_module_->SetSequenceNumber(kSeqNum); + rtp_module_.RtpSender())) { + rtp_module_.SetSequenceNumber(kSeqNum); } - rtc::AutoThread main_thread_; + AutoThread main_thread_; SimulatedClock fake_clock_; + const Environment env_; LoopbackTransportTest transport_; - std::unique_ptr rtp_module_; + ModuleRtpRtcpImpl2 rtp_module_; std::unique_ptr rtp_sender_audio_; }; @@ -102,8 +109,8 @@ TEST_F(RtpSenderAudioTest, SendAudio) { TEST_F(RtpSenderAudioTest, SendAudioWithAudioLevelExtension) { const uint8_t kAudioLevel = 0x5a; - rtp_module_->RegisterRtpHeaderExtension(AudioLevel::Uri(), - kAudioLevelExtensionId); + rtp_module_.RegisterRtpHeaderExtension(AudioLevelExtension::Uri(), + kAudioLevelExtensionId); const char payload_name[] = "PAYLOAD_NAME"; const uint8_t payload_type = 127; @@ -121,12 +128,11 @@ TEST_F(RtpSenderAudioTest, SendAudioWithAudioLevelExtension) { auto sent_payload = transport_.last_sent_packet().payload(); EXPECT_THAT(sent_payload, ElementsAreArray(payload)); // Verify AudioLevel extension. - bool voice_activity; - uint8_t audio_level; - EXPECT_TRUE(transport_.last_sent_packet().GetExtension( - &voice_activity, &audio_level)); - EXPECT_EQ(kAudioLevel, audio_level); - EXPECT_FALSE(voice_activity); + AudioLevel audio_level; + EXPECT_TRUE(transport_.last_sent_packet().GetExtension( + &audio_level)); + EXPECT_EQ(kAudioLevel, audio_level.level()); + EXPECT_FALSE(audio_level.voice_activity()); } TEST_F(RtpSenderAudioTest, SendAudioWithoutAbsoluteCaptureTime) { @@ -149,8 +155,8 @@ TEST_F(RtpSenderAudioTest, SendAudioWithoutAbsoluteCaptureTime) { TEST_F(RtpSenderAudioTest, SendAudioWithAbsoluteCaptureTimeWithCaptureClockOffset) { - rtp_module_->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), - kAbsoluteCaptureTimeExtensionId); + rtp_module_.RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), + kAbsoluteCaptureTimeExtensionId); constexpr Timestamp kAbsoluteCaptureTimestamp = Timestamp::Millis(521); const char payload_name[] = "audio"; const uint8_t payload_type = 127; @@ -222,4 +228,19 @@ TEST_F(RtpSenderAudioTest, CheckMarkerBitForTelephoneEvents) { EXPECT_FALSE(transport_.last_sent_packet().Marker()); } +TEST_F(RtpSenderAudioTest, SendsCsrcs) { + const char payload_name[] = "audio"; + const uint8_t payload_type = 127; + ASSERT_EQ(0, rtp_sender_audio_->RegisterAudioPayload( + payload_name, payload_type, 48000, 0, 1500)); + uint8_t payload[] = {47, 11, 32, 93, 89}; + + std::vector csrcs({123, 456, 789}); + + ASSERT_TRUE(rtp_sender_audio_->SendAudio( + {.payload = payload, .payload_id = payload_type, .csrcs = csrcs})); + + EXPECT_EQ(transport_.last_sent_packet().Csrcs(), csrcs); +} + } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender_egress.cc b/modules/rtp_rtcp/source/rtp_sender_egress.cc index 7265f409c6..57758ad538 100644 --- a/modules/rtp_rtcp/source/rtp_sender_egress.cc +++ b/modules/rtp_rtcp/source/rtp_sender_egress.cc @@ -11,19 +11,42 @@ #include "modules/rtp_rtcp/source/rtp_sender_egress.h" #include -#include +#include +#include #include +#include #include - -#include "absl/strings/match.h" +#include + +#include "api/array_view.h" +#include "api/call/transport.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" +#include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/packet_sequencer.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet_history.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" +#include "rtc_base/bitrate_tracker.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" +#include "rtc_base/task_utils/repeating_task.h" namespace webrtc { namespace { constexpr uint32_t kTimestampTicksPerMs = 90; -constexpr TimeDelta kSendSideDelayWindow = TimeDelta::Seconds(1); constexpr TimeDelta kBitrateStatisticsWindow = TimeDelta::Seconds(1); constexpr size_t kRtpSequenceNumberMapMaxEntries = 1 << 13; constexpr TimeDelta kUpdateInterval = kBitrateStatisticsWindow; @@ -79,38 +102,39 @@ void RtpSenderEgress::NonPacedPacketSender::PrepareForSend( packet->ReserveExtension(); } -RtpSenderEgress::RtpSenderEgress(const RtpRtcpInterface::Configuration& config, +RtpSenderEgress::RtpSenderEgress(const Environment& env, + const RtpRtcpInterface::Configuration& config, RtpPacketHistory* packet_history) - : enable_send_packet_batching_(config.enable_send_packet_batching), + : env_(env), + enable_send_packet_batching_(config.enable_send_packet_batching), worker_queue_(TaskQueueBase::Current()), ssrc_(config.local_media_ssrc), rtx_ssrc_(config.rtx_send_ssrc), flexfec_ssrc_(config.fec_generator ? config.fec_generator->FecSsrc() - : absl::nullopt), + : std::nullopt), populate_network2_timestamp_(config.populate_network2_timestamp), - clock_(config.clock), packet_history_(packet_history), transport_(config.outgoing_transport), - event_log_(config.event_log), is_audio_(config.audio), need_rtp_packet_infos_(config.need_rtp_packet_infos), fec_generator_(config.fec_generator), - transport_feedback_observer_(config.transport_feedback_callback), - send_side_delay_observer_(config.send_side_delay_observer), send_packet_observer_(config.send_packet_observer), rtp_stats_callback_(config.rtp_stats_callback), bitrate_callback_(config.send_bitrate_observer), media_has_been_sent_(false), force_part_of_allocation_(false), timestamp_offset_(0), - max_delay_it_(send_delays_.end()), - sum_delays_(TimeDelta::Zero()), send_rates_(kNumMediaTypes, BitrateTracker(kBitrateStatisticsWindow)), rtp_sequence_number_map_(need_rtp_packet_infos_ ? std::make_unique( kRtpSequenceNumberMapMaxEntries) - : nullptr) { + : nullptr), + use_ntp_time_for_absolute_send_time_(!env_.field_trials().IsDisabled( + "WebRTC-UseNtpTimeAbsoluteSendTime")) { RTC_DCHECK(worker_queue_); + RTC_DCHECK(config.transport_feedback_callback == nullptr) + << "transport_feedback_callback is no longer used and will soon be " + "deleted."; if (bitrate_callback_) { update_task_ = RepeatingTaskHandle::DelayedStart(worker_queue_, kUpdateInterval, [this]() { @@ -151,10 +175,7 @@ void RtpSenderEgress::SendPacket(std::unique_ptr packet, RTC_DCHECK(packet->retransmitted_sequence_number().has_value()); } - const Timestamp now = clock_->CurrentTime(); -#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE - BweTestLoggingPlot(now, packet->Ssrc()); -#endif + const Timestamp now = env_.clock().CurrentTime(); if (need_rtp_packet_infos_ && packet->packet_type() == RtpPacketToSend::Type::kVideo) { // Last packet of a frame, add it to sequence number info map. @@ -169,7 +190,7 @@ void RtpSenderEgress::SendPacket(std::unique_ptr packet, // This packet should be protected by FEC, add it to packet generator. RTC_DCHECK(fec_generator_); RTC_DCHECK(packet->packet_type() == RtpPacketMediaType::kVideo); - absl::optional> + std::optional> new_fec_params; new_fec_params.swap(pending_fec_params_); if (new_fec_params) { @@ -179,7 +200,7 @@ void RtpSenderEgress::SendPacket(std::unique_ptr packet, if (packet->is_red()) { RtpPacketToSend unpacked_packet(*packet); - const rtc::CopyOnWriteBuffer buffer = packet->Buffer(); + const CopyOnWriteBuffer buffer = packet->Buffer(); // Grab media payload type from RED header. const size_t headers_size = packet->headers_size(); unpacked_packet.SetPayloadType(buffer[headers_size]); @@ -211,7 +232,17 @@ void RtpSenderEgress::SendPacket(std::unique_ptr packet, packet->SetExtension(kTimestampTicksPerMs * diff.ms()); } if (packet->HasExtension()) { - packet->SetExtension(AbsoluteSendTime::To24Bits(now)); + if (use_ntp_time_for_absolute_send_time_) { + packet->SetExtension(AbsoluteSendTime::To24Bits( + env_.clock().ConvertTimestampToNtpTime(now))); + } else { + packet->SetExtension(AbsoluteSendTime::To24Bits(now)); + } + } + if (packet->HasExtension() && + packet->transport_sequence_number()) { + packet->SetExtension( + *packet->transport_sequence_number() & 0xFFFF); } if (packet->HasExtension()) { @@ -243,37 +274,42 @@ void RtpSenderEgress::CompleteSendPacket(const Packet& compound_packet, RTC_DCHECK_RUN_ON(worker_queue_); auto& [packet, pacing_info, now] = compound_packet; RTC_CHECK(packet); - const bool is_media = packet->packet_type() == RtpPacketMediaType::kAudio || - packet->packet_type() == RtpPacketMediaType::kVideo; PacketOptions options; options.included_in_allocation = force_part_of_allocation_; - - // Downstream code actually uses this flag to distinguish between media and - // everything else. - options.is_retransmit = !is_media; - if (auto packet_id = packet->GetExtension()) { + options.is_media = packet->packet_type() == RtpPacketMediaType::kAudio || + packet->packet_type() == RtpPacketMediaType::kVideo; + + // Set Packet id from transport sequence number header extension if it is + // used. The source of the header extension is + // RtpPacketToSend::transport_sequence_number(), but the extension is only 16 + // bit and will wrap. We should be able to use the 64bit value as id, but in + // order to not change behaviour we use the 16bit extension value if it is + // used. + std::optional packet_id = + packet->GetExtension(); + if (packet_id.has_value()) { options.packet_id = *packet_id; options.included_in_feedback = true; options.included_in_allocation = true; - AddPacketToTransportFeedback(*packet_id, *packet, pacing_info); + } else if (packet->transport_sequence_number()) { + options.packet_id = *packet->transport_sequence_number(); } - options.additional_data = packet->additional_data(); - - const uint32_t packet_ssrc = packet->Ssrc(); if (packet->packet_type() != RtpPacketMediaType::kPadding && - packet->packet_type() != RtpPacketMediaType::kRetransmission) { - UpdateDelayStatistics(packet->capture_time(), now, packet_ssrc); - UpdateOnSendPacket(options.packet_id, packet->capture_time(), packet_ssrc); + packet->packet_type() != RtpPacketMediaType::kRetransmission && + send_packet_observer_ != nullptr && packet->capture_time().IsFinite()) { + send_packet_observer_->OnSendPacket(packet_id, packet->capture_time(), + packet->Ssrc()); } + options.send_as_ect1 = packet->send_as_ect1(); options.batchable = enable_send_packet_batching_ && !is_audio_; options.last_packet_in_batch = last_in_batch; const bool send_success = SendPacketToNetwork(*packet, options, pacing_info); // Put packet in retransmission history or update pending status even if // actual sending fails. - if (is_media && packet->allow_retransmission()) { + if (options.is_media && packet->allow_retransmission()) { packet_history_->PutRtpPacket(std::make_unique(*packet), now); } else if (packet->retransmitted_sequence_number()) { @@ -292,8 +328,8 @@ void RtpSenderEgress::CompleteSendPacket(const Packet& compound_packet, RTC_DCHECK(packet->packet_type().has_value()); RtpPacketMediaType packet_type = *packet->packet_type(); RtpPacketCounter counter(*packet); - size_t size = packet->size(); - UpdateRtpStats(now, packet_ssrc, packet_type, std::move(counter), size); + UpdateRtpStats(now, packet->Ssrc(), packet_type, std::move(counter), + packet->size()); } } @@ -310,8 +346,15 @@ RtpSendRates RtpSenderEgress::GetSendRates(Timestamp now) const { void RtpSenderEgress::GetDataCounters(StreamDataCounters* rtp_stats, StreamDataCounters* rtx_stats) const { RTC_DCHECK_RUN_ON(worker_queue_); - *rtp_stats = rtp_stats_; - *rtx_stats = rtx_rtp_stats_; + if (rtp_stats_callback_) { + *rtp_stats = rtp_stats_callback_->GetDataCounters(ssrc_); + if (rtx_ssrc_.has_value()) { + *rtx_stats = rtp_stats_callback_->GetDataCounters(*rtx_ssrc_); + } + } else { + *rtp_stats = rtp_stats_; + *rtx_stats = rtx_rtp_stats_; + } } void RtpSenderEgress::ForceIncludeSendPacketsInAllocation( @@ -336,7 +379,7 @@ void RtpSenderEgress::SetTimestampOffset(uint32_t timestamp) { } std::vector RtpSenderEgress::GetSentRtpPacketInfos( - rtc::ArrayView sequence_numbers) const { + ArrayView sequence_numbers) const { RTC_DCHECK_RUN_ON(worker_queue_); RTC_DCHECK(!sequence_numbers.empty()); if (!need_rtp_packet_infos_) { @@ -376,7 +419,7 @@ RtpSenderEgress::FetchFecPackets() { } void RtpSenderEgress::OnAbortedRetransmissions( - rtc::ArrayView sequence_numbers) { + ArrayView sequence_numbers) { RTC_DCHECK_RUN_ON(worker_queue_); // Mark aborted retransmissions as sent, rather than leaving them in // a 'pending' state - otherwise they can not be requested again and @@ -403,138 +446,17 @@ bool RtpSenderEgress::HasCorrectSsrc(const RtpPacketToSend& packet) const { return false; } -void RtpSenderEgress::AddPacketToTransportFeedback( - uint16_t packet_id, - const RtpPacketToSend& packet, - const PacedPacketInfo& pacing_info) { - if (transport_feedback_observer_) { - RtpPacketSendInfo packet_info; - packet_info.transport_sequence_number = packet_id; - packet_info.rtp_timestamp = packet.Timestamp(); - packet_info.length = packet.size(); - packet_info.pacing_info = pacing_info; - packet_info.packet_type = packet.packet_type(); - - switch (*packet_info.packet_type) { - case RtpPacketMediaType::kAudio: - case RtpPacketMediaType::kVideo: - packet_info.media_ssrc = ssrc_; - packet_info.rtp_sequence_number = packet.SequenceNumber(); - break; - case RtpPacketMediaType::kRetransmission: - // For retransmissions, we're want to remove the original media packet - // if the retransmit arrives - so populate that in the packet info. - packet_info.media_ssrc = ssrc_; - packet_info.rtp_sequence_number = - *packet.retransmitted_sequence_number(); - break; - case RtpPacketMediaType::kPadding: - case RtpPacketMediaType::kForwardErrorCorrection: - // We're not interested in feedback about these packets being received - // or lost. - break; - } - - transport_feedback_observer_->OnAddPacket(packet_info); - } -} - -void RtpSenderEgress::UpdateDelayStatistics(Timestamp capture_time, - Timestamp now, - uint32_t ssrc) { - RTC_DCHECK_RUN_ON(worker_queue_); - if (!send_side_delay_observer_ || capture_time.IsInfinite()) - return; - - TimeDelta avg_delay = TimeDelta::Zero(); - TimeDelta max_delay = TimeDelta::Zero(); - { - // Compute the max and average of the recent capture-to-send delays. - // The time complexity of the current approach depends on the distribution - // of the delay values. This could be done more efficiently. - - // Remove elements older than kSendSideDelayWindowMs. - auto lower_bound = send_delays_.lower_bound(now - kSendSideDelayWindow); - for (auto it = send_delays_.begin(); it != lower_bound; ++it) { - if (max_delay_it_ == it) { - max_delay_it_ = send_delays_.end(); - } - sum_delays_ -= it->second; - } - send_delays_.erase(send_delays_.begin(), lower_bound); - if (max_delay_it_ == send_delays_.end()) { - // Removed the previous max. Need to recompute. - RecomputeMaxSendDelay(); - } - - // Add the new element. - TimeDelta new_send_delay = now - capture_time; - auto [it, inserted] = send_delays_.emplace(now, new_send_delay); - if (!inserted) { - // TODO(terelius): If we have multiple delay measurements during the same - // millisecond then we keep the most recent one. It is not clear that this - // is the right decision, but it preserves an earlier behavior. - TimeDelta previous_send_delay = it->second; - sum_delays_ -= previous_send_delay; - it->second = new_send_delay; - if (max_delay_it_ == it && new_send_delay < previous_send_delay) { - RecomputeMaxSendDelay(); - } - } - if (max_delay_it_ == send_delays_.end() || - it->second >= max_delay_it_->second) { - max_delay_it_ = it; - } - sum_delays_ += new_send_delay; - - size_t num_delays = send_delays_.size(); - RTC_DCHECK(max_delay_it_ != send_delays_.end()); - max_delay = max_delay_it_->second; - avg_delay = sum_delays_ / num_delays; - } - send_side_delay_observer_->SendSideDelayUpdated(avg_delay.ms(), - max_delay.ms(), ssrc); -} - -void RtpSenderEgress::RecomputeMaxSendDelay() { - RTC_DCHECK_RUN_ON(worker_queue_); - max_delay_it_ = send_delays_.begin(); - for (auto it = send_delays_.begin(); it != send_delays_.end(); ++it) { - if (it->second >= max_delay_it_->second) { - max_delay_it_ = it; - } - } -} - -void RtpSenderEgress::UpdateOnSendPacket(int packet_id, - Timestamp capture_time, - uint32_t ssrc) { - if (!send_packet_observer_ || capture_time.IsInfinite() || packet_id == -1) { - return; - } - - send_packet_observer_->OnSendPacket(packet_id, capture_time, ssrc); -} - bool RtpSenderEgress::SendPacketToNetwork(const RtpPacketToSend& packet, const PacketOptions& options, const PacedPacketInfo& pacing_info) { RTC_DCHECK_RUN_ON(worker_queue_); - int bytes_sent = -1; - if (transport_) { - bytes_sent = transport_->SendRtp(packet, options) - ? static_cast(packet.size()) - : -1; - if (event_log_ && bytes_sent > 0) { - event_log_->Log(std::make_unique( - packet, pacing_info.probe_cluster_id)); - } - } - - if (bytes_sent <= 0) { + if (transport_ == nullptr || !transport_->SendRtp(packet, options)) { RTC_LOG(LS_WARNING) << "Transport failed to send packet."; return false; } + + env_.event_log().Log(std::make_unique( + packet, pacing_info.probe_cluster_id)); return true; } @@ -549,8 +471,13 @@ void RtpSenderEgress::UpdateRtpStats(Timestamp now, // worker thread. RtpSendRates send_rates; - StreamDataCounters* counters = - packet_ssrc == rtx_ssrc_ ? &rtx_rtp_stats_ : &rtp_stats_; + StreamDataCounters* counters = nullptr; + if (rtp_stats_callback_) { + rtp_stats_ = rtp_stats_callback_->GetDataCounters(packet_ssrc); + counters = &rtp_stats_; + } else { + counters = packet_ssrc == rtx_ssrc_ ? &rtx_rtp_stats_ : &rtp_stats_; + } counters->MaybeSetFirstPacketTime(now); @@ -559,16 +486,16 @@ void RtpSenderEgress::UpdateRtpStats(Timestamp now, } else if (packet_type == RtpPacketMediaType::kRetransmission) { counters->retransmitted.Add(counter); } - counters->transmitted.Add(counter); + counters->transmitted.Add(counter); - send_rates_[static_cast(packet_type)].Update(packet_size, now); - if (bitrate_callback_) { + send_rates_[static_cast(packet_type)].Update(packet_size, now); + if (bitrate_callback_) { send_rates = GetSendRates(now); - } + } - if (rtp_stats_callback_) { - rtp_stats_callback_->DataCountersUpdated(*counters, packet_ssrc); - } + if (rtp_stats_callback_) { + rtp_stats_callback_->DataCountersUpdated(*counters, packet_ssrc); + } // The bitrate_callback_ and rtp_stats_callback_ pointers in practice point // to the same object, so these callbacks could be consolidated into one. @@ -582,30 +509,9 @@ void RtpSenderEgress::UpdateRtpStats(Timestamp now, void RtpSenderEgress::PeriodicUpdate() { RTC_DCHECK_RUN_ON(worker_queue_); RTC_DCHECK(bitrate_callback_); - RtpSendRates send_rates = GetSendRates(clock_->CurrentTime()); + RtpSendRates send_rates = GetSendRates(env_.clock().CurrentTime()); bitrate_callback_->Notify( send_rates.Sum().bps(), send_rates[RtpPacketMediaType::kRetransmission].bps(), ssrc_); } - -#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE -void RtpSenderEgress::BweTestLoggingPlot(Timestamp now, uint32_t packet_ssrc) { - RTC_DCHECK_RUN_ON(worker_queue_); - - const auto rates = GetSendRates(now); - if (is_audio_) { - BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "AudioTotBitrate_kbps", now.ms(), - rates.Sum().kbps(), packet_ssrc); - BWE_TEST_LOGGING_PLOT_WITH_SSRC( - 1, "AudioNackBitrate_kbps", now.ms(), - rates[RtpPacketMediaType::kRetransmission].kbps(), packet_ssrc); - } else { - BWE_TEST_LOGGING_PLOT_WITH_SSRC(1, "VideoTotBitrate_kbps", now.ms(), - rates.Sum().kbps(), packet_ssrc); - BWE_TEST_LOGGING_PLOT_WITH_SSRC( - 1, "VideoNackBitrate_kbps", now.ms(), - rates[RtpPacketMediaType::kRetransmission].kbps(), packet_ssrc); - } -} -#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender_egress.h b/modules/rtp_rtcp/source/rtp_sender_egress.h index 3e5b2b21c3..6d3cdd38c1 100644 --- a/modules/rtp_rtcp/source/rtp_sender_egress.h +++ b/modules/rtp_rtcp/source/rtp_sender_egress.h @@ -11,30 +11,30 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTP_SENDER_EGRESS_H_ #define MODULES_RTP_RTCP_SOURCE_RTP_SENDER_EGRESS_H_ -#include +#include +#include #include +#include #include #include -#include "absl/types/optional.h" +#include "api/array_view.h" #include "api/call/transport.h" -#include "api/rtc_event_log/rtc_event_log.h" -#include "api/sequence_checker.h" +#include "api/environment/environment.h" +#include "api/rtp_packet_sender.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" -#include "api/units/data_rate.h" -#include "api/units/time_delta.h" +#include "api/transport/network_types.h" #include "api/units/timestamp.h" -#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "modules/include/module_fec_types.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/packet_sequencer.h" #include "modules/rtp_rtcp/source/rtp_packet_history.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" +#include "modules/rtp_rtcp/source/video_fec_generator.h" #include "rtc_base/bitrate_tracker.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" @@ -54,7 +54,7 @@ class RtpSenderEgress { void EnqueuePackets( std::vector> packets) override; // Since we don't pace packets, there's no pending packets to remove. - void RemovePacketsForSsrc(uint32_t ssrc) override {} + void RemovePacketsForSsrc(uint32_t /* ssrc */) override {} private: void PrepareForSend(RtpPacketToSend* packet); @@ -65,7 +65,8 @@ class RtpSenderEgress { ScopedTaskSafety task_safety_; }; - RtpSenderEgress(const RtpRtcpInterface::Configuration& config, + RtpSenderEgress(const Environment& env, + const RtpRtcpInterface::Configuration& config, RtpPacketHistory* packet_history); ~RtpSenderEgress(); @@ -73,8 +74,8 @@ class RtpSenderEgress { const PacedPacketInfo& pacing_info); void OnBatchComplete(); uint32_t Ssrc() const { return ssrc_; } - absl::optional RtxSsrc() const { return rtx_ssrc_; } - absl::optional FlexFecSsrc() const { return flexfec_ssrc_; } + std::optional RtxSsrc() const { return rtx_ssrc_; } + std::optional FlexFecSsrc() const { return flexfec_ssrc_; } RtpSendRates GetSendRates(Timestamp now) const; void GetDataCounters(StreamDataCounters* rtp_stats, @@ -92,15 +93,14 @@ class RtpSenderEgress { // recalled, return a vector with all of them (in corresponding order). // If any could not be recalled, return an empty vector. std::vector GetSentRtpPacketInfos( - rtc::ArrayView sequence_numbers) const; + ArrayView sequence_numbers) const; void SetFecProtectionParameters(const FecProtectionParams& delta_params, const FecProtectionParams& key_params); std::vector> FetchFecPackets(); // Clears pending status for these sequence numbers in the packet history. - void OnAbortedRetransmissions( - rtc::ArrayView sequence_numbers); + void OnAbortedRetransmissions(ArrayView sequence_numbers); private: struct Packet { @@ -110,14 +110,7 @@ class RtpSenderEgress { }; void CompleteSendPacket(const Packet& compound_packet, bool last_in_batch); bool HasCorrectSsrc(const RtpPacketToSend& packet) const; - void AddPacketToTransportFeedback(uint16_t packet_id, - const RtpPacketToSend& packet, - const PacedPacketInfo& pacing_info); - void UpdateDelayStatistics(Timestamp capture_time, - Timestamp now, - uint32_t ssrc); - void RecomputeMaxSendDelay(); - void UpdateOnSendPacket(int packet_id, Timestamp capture_time, uint32_t ssrc); + // Sends packet on to `transport_`, leaving the RTP module. bool SendPacketToNetwork(const RtpPacketToSend& packet, const PacketOptions& options, @@ -127,31 +120,25 @@ class RtpSenderEgress { RtpPacketMediaType packet_type, RtpPacketCounter counter, size_t packet_size); -#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE - void BweTestLoggingPlot(Timestamp now, uint32_t packet_ssrc); -#endif // Called on a timer, once a second, on the worker_queue_. void PeriodicUpdate(); + const Environment env_; const bool enable_send_packet_batching_; TaskQueueBase* const worker_queue_; const uint32_t ssrc_; - const absl::optional rtx_ssrc_; - const absl::optional flexfec_ssrc_; + const std::optional rtx_ssrc_; + const std::optional flexfec_ssrc_; const bool populate_network2_timestamp_; - Clock* const clock_; RtpPacketHistory* const packet_history_ RTC_GUARDED_BY(worker_queue_); Transport* const transport_; - RtcEventLog* const event_log_; const bool is_audio_; const bool need_rtp_packet_infos_; VideoFecGenerator* const fec_generator_ RTC_GUARDED_BY(worker_queue_); - absl::optional last_sent_seq_ RTC_GUARDED_BY(worker_queue_); - absl::optional last_sent_rtx_seq_ RTC_GUARDED_BY(worker_queue_); + std::optional last_sent_seq_ RTC_GUARDED_BY(worker_queue_); + std::optional last_sent_rtx_seq_ RTC_GUARDED_BY(worker_queue_); - TransportFeedbackObserver* const transport_feedback_observer_; - SendSideDelayObserver* const send_side_delay_observer_; SendPacketObserver* const send_packet_observer_; StreamDataCountersCallback* const rtp_stats_callback_; BitrateStatisticsObserver* const bitrate_callback_; @@ -160,18 +147,13 @@ class RtpSenderEgress { bool force_part_of_allocation_ RTC_GUARDED_BY(worker_queue_); uint32_t timestamp_offset_ RTC_GUARDED_BY(worker_queue_); - // Maps capture time to send-side delay. Send-side delay is the difference - // between transmission time and capture time. - std::map send_delays_ RTC_GUARDED_BY(worker_queue_); - std::map::const_iterator max_delay_it_ - RTC_GUARDED_BY(worker_queue_); - // The sum of delays over a kSendSideDelayWindowMs sliding window. - TimeDelta sum_delays_ RTC_GUARDED_BY(worker_queue_); + // These counters are only used if `rtp_stats_callback_` is null. StreamDataCounters rtp_stats_ RTC_GUARDED_BY(worker_queue_); StreamDataCounters rtx_rtp_stats_ RTC_GUARDED_BY(worker_queue_); + // One element per value in RtpPacketMediaType, with index matching value. std::vector send_rates_ RTC_GUARDED_BY(worker_queue_); - absl::optional> + std::optional> pending_fec_params_ RTC_GUARDED_BY(worker_queue_); // Maps sent packets' sequence numbers to a tuple consisting of: @@ -183,6 +165,7 @@ class RtpSenderEgress { RepeatingTaskHandle update_task_ RTC_GUARDED_BY(worker_queue_); std::vector packets_to_send_ RTC_GUARDED_BY(worker_queue_); ScopedTaskSafety task_safety_; + const bool use_ntp_time_for_absolute_send_time_; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc index 6c798e4595..833a93bfce 100644 --- a/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_sender_egress_unittest.cc @@ -10,23 +10,32 @@ #include "modules/rtp_rtcp/source/rtp_sender_egress.h" -#include +#include +#include +#include +#include +#include +#include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/call/transport.h" -#include "api/field_trials_registry.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/transport/network_types.h" #include "api/units/data_size.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/mock/mock_rtc_event_log.h" +#include "api/video/video_timing.h" #include "modules/rtp_rtcp/include/flexfec_sender.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_header_extension_size.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_history.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "test/explicit_key_value_config.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "rtc_base/checks.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/time_controller/simulated_time_controller.h" @@ -36,10 +45,10 @@ namespace { using ::testing::_; using ::testing::AllOf; +using ::testing::Eq; using ::testing::Field; using ::testing::InSequence; using ::testing::NiceMock; -using ::testing::StrictMock; constexpr Timestamp kStartTime = Timestamp::Millis(123456789); constexpr int kDefaultPayloadType = 100; @@ -57,29 +66,33 @@ enum : int { class MockSendPacketObserver : public SendPacketObserver { public: - MOCK_METHOD(void, OnSendPacket, (uint16_t, Timestamp, uint32_t), (override)); -}; - -class MockTransportFeedbackObserver : public TransportFeedbackObserver { - public: - MOCK_METHOD(void, OnAddPacket, (const RtpPacketSendInfo&), (override)); + MOCK_METHOD(void, + OnSendPacket, + (std::optional, Timestamp, uint32_t), + (override)); }; class MockStreamDataCountersCallback : public StreamDataCountersCallback { public: MOCK_METHOD(void, - DataCountersUpdated, - (const StreamDataCounters& counters, uint32_t ssrc), - (override)); -}; + MockDataCountersUpdated, + (const StreamDataCounters& counters, uint32_t ssrc)); -class MockSendSideDelayObserver : public SendSideDelayObserver { - public: - MOCK_METHOD(void, SendSideDelayUpdated, (int, int, uint32_t), (override)); + StreamDataCounters GetDataCounters(uint32_t ssrc) const override { + auto it = counters_by_ssrc.find(ssrc); + return it != counters_by_ssrc.end() ? it->second : StreamDataCounters(); + } + void DataCountersUpdated(const StreamDataCounters& counters, + uint32_t ssrc) override { + MockDataCountersUpdated(counters, ssrc); + counters_by_ssrc[ssrc] = counters; + } + + std::map counters_by_ssrc; }; struct TransmittedPacket { - TransmittedPacket(rtc::ArrayView data, + TransmittedPacket(ArrayView data, const PacketOptions& packet_options, RtpHeaderExtensionMap* extensions) : packet(extensions), options(packet_options) { @@ -94,7 +107,7 @@ class TestTransport : public Transport { explicit TestTransport(RtpHeaderExtensionMap* extensions) : total_data_sent_(DataSize::Zero()), extensions_(extensions) {} MOCK_METHOD(void, SentRtp, (const PacketOptions& options), ()); - bool SendRtp(rtc::ArrayView packet, + bool SendRtp(ArrayView packet, const PacketOptions& options) override { total_data_sent_ += DataSize::Bytes(packet.size()); last_packet_.emplace(packet, options, extensions_); @@ -102,15 +115,13 @@ class TestTransport : public Transport { return true; } - bool SendRtcp(rtc::ArrayView) override { - RTC_CHECK_NOTREACHED(); - } + bool SendRtcp(ArrayView) override { RTC_CHECK_NOTREACHED(); } - absl::optional last_packet() { return last_packet_; } + std::optional last_packet() { return last_packet_; } private: DataSize total_data_sent_; - absl::optional last_packet_; + std::optional last_packet_; RtpHeaderExtensionMap* const extensions_; }; @@ -120,30 +131,27 @@ class RtpSenderEgressTest : public ::testing::Test { protected: RtpSenderEgressTest() : time_controller_(kStartTime), - clock_(time_controller_.GetClock()), + env_(CreateEnvironment(time_controller_.GetClock())), transport_(&header_extensions_), - packet_history_(clock_, /*enable_rtx_padding_prioritization=*/true), - trials_(""), + packet_history_(env_, + RtpPacketHistory::PaddingMode::kRecentLargePacket), sequence_number_(kStartSequenceNumber) {} std::unique_ptr CreateRtpSenderEgress() { - return std::make_unique(DefaultConfig(), &packet_history_); + return std::make_unique(env_, DefaultConfig(), + &packet_history_); } RtpRtcpInterface::Configuration DefaultConfig() { RtpRtcpInterface::Configuration config; config.audio = false; - config.clock = clock_; config.outgoing_transport = &transport_; config.local_media_ssrc = kSsrc; config.rtx_send_ssrc = kRtxSsrc; config.fec_generator = nullptr; - config.event_log = &mock_rtc_event_log_; config.send_packet_observer = &send_packet_observer_; config.rtp_stats_callback = &mock_rtp_stats_callback_; - config.transport_feedback_callback = &feedback_observer_; config.populate_network2_timestamp = false; - config.field_trials = &trials_; return config; } @@ -165,50 +173,19 @@ class RtpSenderEgressTest : public ::testing::Test { } std::unique_ptr BuildRtpPacket() { - return BuildRtpPacket(/*marker_bit=*/true, clock_->CurrentTime().ms()); + return BuildRtpPacket(/*marker_bit=*/true, env_.clock().CurrentTime().ms()); } GlobalSimulatedTimeController time_controller_; - Clock* const clock_; - NiceMock mock_rtc_event_log_; + const Environment env_; NiceMock mock_rtp_stats_callback_; NiceMock send_packet_observer_; - NiceMock feedback_observer_; RtpHeaderExtensionMap header_extensions_; NiceMock transport_; RtpPacketHistory packet_history_; - test::ExplicitKeyValueConfig trials_; uint16_t sequence_number_; }; -TEST_F(RtpSenderEgressTest, TransportFeedbackObserverGetsCorrectByteCount) { - constexpr size_t kRtpOverheadBytesPerPacket = 12 + 8; - constexpr size_t kPayloadSize = 1400; - const uint16_t kTransportSequenceNumber = 17; - - header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, - TransportSequenceNumber::Uri()); - - const size_t expected_bytes = kPayloadSize + kRtpOverheadBytesPerPacket; - - EXPECT_CALL( - feedback_observer_, - OnAddPacket(AllOf( - Field(&RtpPacketSendInfo::media_ssrc, kSsrc), - Field(&RtpPacketSendInfo::transport_sequence_number, - kTransportSequenceNumber), - Field(&RtpPacketSendInfo::rtp_sequence_number, kStartSequenceNumber), - Field(&RtpPacketSendInfo::length, expected_bytes), - Field(&RtpPacketSendInfo::pacing_info, PacedPacketInfo())))); - - std::unique_ptr packet = BuildRtpPacket(); - packet->SetExtension(kTransportSequenceNumber); - packet->AllocatePayload(kPayloadSize); - - std::unique_ptr sender = CreateRtpSenderEgress(); - sender->SendPacket(std::move(packet), PacedPacketInfo()); -} - TEST_F(RtpSenderEgressTest, SendsPacketsOneByOneWhenNotBatching) { std::unique_ptr sender = CreateRtpSenderEgress(); EXPECT_CALL(transport_, @@ -221,7 +198,8 @@ TEST_F(RtpSenderEgressTest, SendsPacketsOneByOneWhenBatchingWithAudio) { auto config = DefaultConfig(); config.enable_send_packet_batching = true; config.audio = true; - auto sender = std::make_unique(config, &packet_history_); + auto sender = + std::make_unique(env_, config, &packet_history_); EXPECT_CALL(transport_, SentRtp(AllOf(Field(&PacketOptions::last_packet_in_batch, false), Field(&PacketOptions::batchable, false)))) @@ -233,9 +211,9 @@ TEST_F(RtpSenderEgressTest, SendsPacketsOneByOneWhenBatchingWithAudio) { TEST_F(RtpSenderEgressTest, CollectsPacketsWhenBatchingWithVideo) { auto config = DefaultConfig(); config.enable_send_packet_batching = true; - auto sender = std::make_unique(config, &packet_history_); - sender->SendPacket(BuildRtpPacket(), PacedPacketInfo()); - sender->SendPacket(BuildRtpPacket(), PacedPacketInfo()); + RtpSenderEgress sender(env_, config, &packet_history_); + sender.SendPacket(BuildRtpPacket(), PacedPacketInfo()); + sender.SendPacket(BuildRtpPacket(), PacedPacketInfo()); InSequence s; EXPECT_CALL(transport_, SentRtp(AllOf(Field(&PacketOptions::last_packet_in_batch, false), @@ -243,23 +221,23 @@ TEST_F(RtpSenderEgressTest, CollectsPacketsWhenBatchingWithVideo) { EXPECT_CALL(transport_, SentRtp(AllOf(Field(&PacketOptions::last_packet_in_batch, true), Field(&PacketOptions::batchable, true)))); - sender->OnBatchComplete(); + sender.OnBatchComplete(); } -TEST_F(RtpSenderEgressTest, PacketOptionsIsRetransmitSetByPacketType) { +TEST_F(RtpSenderEgressTest, PacketOptionsIsMediaSetByPacketType) { std::unique_ptr sender = CreateRtpSenderEgress(); std::unique_ptr media_packet = BuildRtpPacket(); auto sequence_number = media_packet->SequenceNumber(); media_packet->set_packet_type(RtpPacketMediaType::kVideo); sender->SendPacket(std::move(media_packet), PacedPacketInfo()); - EXPECT_FALSE(transport_.last_packet()->options.is_retransmit); + EXPECT_TRUE(transport_.last_packet()->options.is_media); std::unique_ptr retransmission = BuildRtpPacket(); retransmission->set_packet_type(RtpPacketMediaType::kRetransmission); retransmission->set_retransmitted_sequence_number(sequence_number); sender->SendPacket(std::move(retransmission), PacedPacketInfo()); - EXPECT_TRUE(transport_.last_packet()->options.is_retransmit); + EXPECT_FALSE(transport_.last_packet()->options.is_media); } TEST_F(RtpSenderEgressTest, DoesnSetIncludedInAllocationByDefault) { @@ -278,6 +256,7 @@ TEST_F(RtpSenderEgressTest, header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); std::unique_ptr packet = BuildRtpPacket(); + packet->set_transport_sequence_number(1); sender->SendPacket(std::move(packet), PacedPacketInfo()); EXPECT_TRUE(transport_.last_packet()->options.included_in_feedback); } @@ -290,6 +269,7 @@ TEST_F( header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); std::unique_ptr packet = BuildRtpPacket(); + packet->set_transport_sequence_number(1); sender->SendPacket(std::move(packet), PacedPacketInfo()); EXPECT_TRUE(transport_.last_packet()->options.included_in_allocation); } @@ -320,53 +300,11 @@ TEST_F(RtpSenderEgressTest, std::unique_ptr sender = CreateRtpSenderEgress(); sender->SendPacket(std::move(padding), PacedPacketInfo()); - absl::optional offset = + std::optional offset = transport_.last_packet()->packet.GetExtension(); EXPECT_EQ(offset, 0); } -TEST_F(RtpSenderEgressTest, OnSendSideDelayUpdated) { - StrictMock send_side_delay_observer; - RtpRtcpInterface::Configuration config = DefaultConfig(); - config.send_side_delay_observer = &send_side_delay_observer; - auto sender = std::make_unique(config, &packet_history_); - - // Send packet with 10 ms send-side delay. The average, max and total should - // be 10 ms. - EXPECT_CALL(send_side_delay_observer, SendSideDelayUpdated(10, 10, kSsrc)); - int64_t capture_time_ms = clock_->TimeInMilliseconds(); - time_controller_.AdvanceTime(TimeDelta::Millis(10)); - sender->SendPacket(BuildRtpPacket(/*marker=*/true, capture_time_ms), - PacedPacketInfo()); - - // Send another packet with 20 ms delay. The average, max and total should be - // 15, 20 and 30 ms respectively. - EXPECT_CALL(send_side_delay_observer, SendSideDelayUpdated(15, 20, kSsrc)); - capture_time_ms = clock_->TimeInMilliseconds(); - time_controller_.AdvanceTime(TimeDelta::Millis(20)); - sender->SendPacket(BuildRtpPacket(/*marker=*/true, capture_time_ms), - PacedPacketInfo()); - - // Send another packet at the same time, which replaces the last packet. - // Since this packet has 0 ms delay, the average is now 5 ms and max is 10 ms. - // The total counter stays the same though. - // TODO(terelius): Is is not clear that this is the right behavior. - EXPECT_CALL(send_side_delay_observer, SendSideDelayUpdated(5, 10, kSsrc)); - capture_time_ms = clock_->TimeInMilliseconds(); - sender->SendPacket(BuildRtpPacket(/*marker=*/true, capture_time_ms), - PacedPacketInfo()); - - // Send a packet 1 second later. The earlier packets should have timed - // out, so both max and average should be the delay of this packet. The total - // keeps increasing. - time_controller_.AdvanceTime(TimeDelta::Seconds(1)); - EXPECT_CALL(send_side_delay_observer, SendSideDelayUpdated(1, 1, kSsrc)); - capture_time_ms = clock_->TimeInMilliseconds(); - time_controller_.AdvanceTime(TimeDelta::Millis(1)); - sender->SendPacket(BuildRtpPacket(/*marker=*/true, capture_time_ms), - PacedPacketInfo()); -} - TEST_F(RtpSenderEgressTest, WritesPacerExitToTimingExtension) { std::unique_ptr sender = CreateRtpSenderEgress(); header_extensions_.RegisterByUri(kVideoTimingExtensionId, @@ -390,7 +328,7 @@ TEST_F(RtpSenderEgressTest, WritesPacerExitToTimingExtension) { TEST_F(RtpSenderEgressTest, WritesNetwork2ToTimingExtension) { RtpRtcpInterface::Configuration rtp_config = DefaultConfig(); rtp_config.populate_network2_timestamp = true; - auto sender = std::make_unique(rtp_config, &packet_history_); + RtpSenderEgress sender(env_, rtp_config, &packet_history_); header_extensions_.RegisterByUri(kVideoTimingExtensionId, VideoTimingExtension::Uri()); @@ -402,7 +340,7 @@ TEST_F(RtpSenderEgressTest, WritesNetwork2ToTimingExtension) { const int kStoredTimeInMs = 100; time_controller_.AdvanceTime(TimeDelta::Millis(kStoredTimeInMs)); - sender->SendPacket(std::move(packet), PacedPacketInfo()); + sender.SendPacket(std::move(packet), PacedPacketInfo()); ASSERT_TRUE(transport_.last_packet().has_value()); VideoSendTiming video_timing; @@ -413,20 +351,46 @@ TEST_F(RtpSenderEgressTest, WritesNetwork2ToTimingExtension) { EXPECT_EQ(video_timing.pacer_exit_delta_ms, kPacerExitMs); } +TEST_F(RtpSenderEgressTest, WritesTransportSequenceNumberExtensionIfAllocated) { + RtpSenderEgress sender(env_, DefaultConfig(), &packet_history_); + header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, + TransportSequenceNumber::Uri()); + std::unique_ptr packet = BuildRtpPacket(); + ASSERT_TRUE(packet->HasExtension()); + const int64_t kTransportSequenceNumber = 0xFFFF000F; + packet->set_transport_sequence_number(kTransportSequenceNumber); + + sender.SendPacket(std::move(packet), PacedPacketInfo()); + + ASSERT_TRUE(transport_.last_packet().has_value()); + EXPECT_EQ( + transport_.last_packet()->packet.GetExtension(), + kTransportSequenceNumber & 0xFFFF); +} + TEST_F(RtpSenderEgressTest, OnSendPacketUpdated) { std::unique_ptr sender = CreateRtpSenderEgress(); header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); const uint16_t kTransportSequenceNumber = 1; - EXPECT_CALL( - send_packet_observer_, - OnSendPacket(kTransportSequenceNumber, clock_->CurrentTime(), kSsrc)); + EXPECT_CALL(send_packet_observer_, + OnSendPacket(Eq(kTransportSequenceNumber), + env_.clock().CurrentTime(), kSsrc)); std::unique_ptr packet = BuildRtpPacket(); - packet->SetExtension(kTransportSequenceNumber); + packet->set_transport_sequence_number(kTransportSequenceNumber); sender->SendPacket(std::move(packet), PacedPacketInfo()); } +TEST_F(RtpSenderEgressTest, OnSendPacketUpdatedWithoutTransportSequenceNumber) { + std::unique_ptr sender = CreateRtpSenderEgress(); + + EXPECT_CALL( + send_packet_observer_, + OnSendPacket(Eq(std::nullopt), env_.clock().CurrentTime(), kSsrc)); + sender->SendPacket(BuildRtpPacket(), PacedPacketInfo()); +} + TEST_F(RtpSenderEgressTest, OnSendPacketNotUpdatedForRetransmits) { std::unique_ptr sender = CreateRtpSenderEgress(); header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, @@ -435,9 +399,10 @@ TEST_F(RtpSenderEgressTest, OnSendPacketNotUpdatedForRetransmits) { const uint16_t kTransportSequenceNumber = 1; EXPECT_CALL(send_packet_observer_, OnSendPacket).Times(0); std::unique_ptr packet = BuildRtpPacket(); - packet->SetExtension(kTransportSequenceNumber); + packet->set_transport_sequence_number(kTransportSequenceNumber); packet->set_packet_type(RtpPacketMediaType::kRetransmission); packet->set_retransmitted_sequence_number(packet->SequenceNumber()); + packet->set_original_ssrc(packet->Ssrc()); sender->SendPacket(std::move(packet), PacedPacketInfo()); } @@ -480,7 +445,7 @@ TEST_F(RtpSenderEgressTest, BitrateCallbacks) { RtpRtcpInterface::Configuration config = DefaultConfig(); config.send_bitrate_observer = &observer; - auto sender = std::make_unique(config, &packet_history_); + RtpSenderEgress sender(env_, config, &packet_history_); // Simulate kNumPackets sent with kPacketInterval intervals, with the // number of packets selected so that we fill (but don't overflow) the one @@ -514,7 +479,7 @@ TEST_F(RtpSenderEgressTest, BitrateCallbacks) { EXPECT_NEAR(retransmission_bitrate_bps, expected_bitrate_bps, 500); }); - sender->SendPacket(std::move(packet), PacedPacketInfo()); + sender.SendPacket(std::move(packet), PacedPacketInfo()); time_controller_.AdvanceTime(kPacketInterval); } } @@ -616,14 +581,14 @@ TEST_F(RtpSenderEgressTest, StreamDataCountersCallbacks) { expected_transmitted_counter.header_bytes += media_packet->headers_size(); expected_transmitted_counter.total_packet_delay += TimeDelta::Millis(10); - EXPECT_CALL( - mock_rtp_stats_callback_, - DataCountersUpdated(AllOf(Field(&StreamDataCounters::transmitted, - expected_transmitted_counter), - Field(&StreamDataCounters::retransmitted, - expected_retransmission_counter), - Field(&StreamDataCounters::fec, kEmptyCounter)), - kSsrc)); + EXPECT_CALL(mock_rtp_stats_callback_, + MockDataCountersUpdated( + AllOf(Field(&StreamDataCounters::transmitted, + expected_transmitted_counter), + Field(&StreamDataCounters::retransmitted, + expected_retransmission_counter), + Field(&StreamDataCounters::fec, kEmptyCounter)), + kSsrc)); sender->SendPacket(std::move(media_packet), PacedPacketInfo()); time_controller_.AdvanceTime(TimeDelta::Zero()); @@ -649,14 +614,14 @@ TEST_F(RtpSenderEgressTest, StreamDataCountersCallbacks) { retransmission_packet->headers_size(); expected_retransmission_counter.total_packet_delay += TimeDelta::Millis(20); - EXPECT_CALL( - mock_rtp_stats_callback_, - DataCountersUpdated(AllOf(Field(&StreamDataCounters::transmitted, - expected_transmitted_counter), - Field(&StreamDataCounters::retransmitted, - expected_retransmission_counter), - Field(&StreamDataCounters::fec, kEmptyCounter)), - kSsrc)); + EXPECT_CALL(mock_rtp_stats_callback_, + MockDataCountersUpdated( + AllOf(Field(&StreamDataCounters::transmitted, + expected_transmitted_counter), + Field(&StreamDataCounters::retransmitted, + expected_retransmission_counter), + Field(&StreamDataCounters::fec, kEmptyCounter)), + kSsrc)); sender->SendPacket(std::move(retransmission_packet), PacedPacketInfo()); time_controller_.AdvanceTime(TimeDelta::Zero()); @@ -671,14 +636,14 @@ TEST_F(RtpSenderEgressTest, StreamDataCountersCallbacks) { expected_transmitted_counter.header_bytes += padding_packet->headers_size(); expected_transmitted_counter.total_packet_delay += TimeDelta::Millis(30); - EXPECT_CALL( - mock_rtp_stats_callback_, - DataCountersUpdated(AllOf(Field(&StreamDataCounters::transmitted, - expected_transmitted_counter), - Field(&StreamDataCounters::retransmitted, - expected_retransmission_counter), - Field(&StreamDataCounters::fec, kEmptyCounter)), - kSsrc)); + EXPECT_CALL(mock_rtp_stats_callback_, + MockDataCountersUpdated( + AllOf(Field(&StreamDataCounters::transmitted, + expected_transmitted_counter), + Field(&StreamDataCounters::retransmitted, + expected_retransmission_counter), + Field(&StreamDataCounters::fec, kEmptyCounter)), + kSsrc)); sender->SendPacket(std::move(padding_packet), PacedPacketInfo()); time_controller_.AdvanceTime(TimeDelta::Zero()); } @@ -699,7 +664,7 @@ TEST_F(RtpSenderEgressTest, StreamDataCountersCallbacksFec) { EXPECT_CALL( mock_rtp_stats_callback_, - DataCountersUpdated( + MockDataCountersUpdated( AllOf(Field(&StreamDataCounters::transmitted, expected_transmitted_counter), Field(&StreamDataCounters::retransmitted, kEmptyCounter), @@ -723,7 +688,7 @@ TEST_F(RtpSenderEgressTest, StreamDataCountersCallbacksFec) { EXPECT_CALL( mock_rtp_stats_callback_, - DataCountersUpdated( + MockDataCountersUpdated( AllOf(Field(&StreamDataCounters::transmitted, expected_transmitted_counter), Field(&StreamDataCounters::retransmitted, kEmptyCounter), @@ -791,7 +756,7 @@ TEST_F(RtpSenderEgressTest, SendPacketUpdatesExtensions) { std::unique_ptr sender = CreateRtpSenderEgress(); std::unique_ptr packet = BuildRtpPacket(); - packet->set_packetization_finish_time(clock_->CurrentTime()); + packet->set_packetization_finish_time(env_.clock().CurrentTime()); const int32_t kDiffMs = 10; time_controller_.AdvanceTime(TimeDelta::Millis(kDiffMs)); @@ -803,7 +768,7 @@ TEST_F(RtpSenderEgressTest, SendPacketUpdatesExtensions) { EXPECT_EQ(received_packet.GetExtension(), kDiffMs * 90); EXPECT_EQ(received_packet.GetExtension(), - AbsoluteSendTime::To24Bits(clock_->CurrentTime())); + AbsoluteSendTime::To24Bits(env_.clock().CurrentTime())); VideoSendTiming timing; EXPECT_TRUE(received_packet.GetExtension(&timing)); @@ -812,14 +777,16 @@ TEST_F(RtpSenderEgressTest, SendPacketUpdatesExtensions) { TEST_F(RtpSenderEgressTest, SendPacketSetsPacketOptions) { const uint16_t kPacketId = 42; + const uint16_t kSequenceNumber = 456; std::unique_ptr sender = CreateRtpSenderEgress(); header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); std::unique_ptr packet = BuildRtpPacket(); - packet->SetExtension(kPacketId); + uint32_t ssrc = packet->Ssrc(); + packet->SetSequenceNumber(kSequenceNumber); + packet->set_transport_sequence_number(kPacketId); EXPECT_CALL(send_packet_observer_, OnSendPacket); - auto packet_sequence_number = packet->SequenceNumber(); sender->SendPacket(std::move(packet), PacedPacketInfo()); PacketOptions packet_options = transport_.last_packet()->options; @@ -827,191 +794,122 @@ TEST_F(RtpSenderEgressTest, SendPacketSetsPacketOptions) { EXPECT_EQ(packet_options.packet_id, kPacketId); EXPECT_TRUE(packet_options.included_in_allocation); EXPECT_TRUE(packet_options.included_in_feedback); - EXPECT_FALSE(packet_options.is_retransmit); + EXPECT_TRUE(packet_options.is_media); // Send another packet as retransmission, verify options are populated. std::unique_ptr retransmission = BuildRtpPacket(); - retransmission->SetExtension(kPacketId + 1); + retransmission->set_transport_sequence_number(kPacketId + 1); retransmission->set_packet_type(RtpPacketMediaType::kRetransmission); - retransmission->set_retransmitted_sequence_number(packet_sequence_number); + retransmission->set_retransmitted_sequence_number(kSequenceNumber); + retransmission->set_original_ssrc(ssrc); sender->SendPacket(std::move(retransmission), PacedPacketInfo()); - EXPECT_TRUE(transport_.last_packet()->options.is_retransmit); + EXPECT_FALSE(transport_.last_packet()->options.is_media); +} + +TEST_F(RtpSenderEgressTest, SendPacketSetsPacketOptionsIdFromExtension) { + header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, + TransportSequenceNumber::Uri()); + RtpSenderEgress sender(env_, DefaultConfig(), &packet_history_); + + // 64-bit transport sequence number. + const int64_t kTransportSequenceNumber = 0xFFFF000F; + std::unique_ptr packet = BuildRtpPacket(); + packet->set_transport_sequence_number(kTransportSequenceNumber); + + EXPECT_CALL(send_packet_observer_, OnSendPacket); + sender.SendPacket(std::move(packet), PacedPacketInfo()); + + ASSERT_TRUE(transport_.last_packet().has_value()); + EXPECT_EQ( + transport_.last_packet()->packet.GetExtension(), + kTransportSequenceNumber & 0xFFFF); + PacketOptions packet_options = transport_.last_packet()->options; + // 16 bit packet id. + EXPECT_EQ(packet_options.packet_id, kTransportSequenceNumber & 0xFFFF); +} + +TEST_F(RtpSenderEgressTest, + SendPacketSetsPacketOptionsIdFromRtpSendPacketIfNotUsingExtension) { + RtpSenderEgress sender(env_, DefaultConfig(), &packet_history_); + // 64-bit transport sequence number. + const int64_t kTransportSequenceNumber = 0xFFFF000F; + std::unique_ptr packet = BuildRtpPacket(); + packet->set_transport_sequence_number(kTransportSequenceNumber); + + EXPECT_CALL(send_packet_observer_, OnSendPacket); + sender.SendPacket(std::move(packet), PacedPacketInfo()); + + ASSERT_TRUE(transport_.last_packet().has_value()); + ASSERT_FALSE( + transport_.last_packet()->packet.HasExtension()); + PacketOptions packet_options = transport_.last_packet()->options; + EXPECT_EQ(packet_options.packet_id, kTransportSequenceNumber); } TEST_F(RtpSenderEgressTest, SendPacketUpdatesStats) { const size_t kPayloadSize = 1000; - StrictMock send_side_delay_observer; - const rtc::ArrayView kNoRtpHeaderExtensionSizes; - FlexfecSender flexfec(kFlexfectPayloadType, kFlexFecSsrc, kSsrc, /*mid=*/"", + const ArrayView kNoRtpHeaderExtensionSizes; + FlexfecSender flexfec(env_, kFlexfectPayloadType, kFlexFecSsrc, kSsrc, + /*mid=*/"", /*header_extensions=*/{}, kNoRtpHeaderExtensionSizes, - /*rtp_state=*/nullptr, time_controller_.GetClock()); + /*rtp_state=*/nullptr); RtpRtcpInterface::Configuration config = DefaultConfig(); config.fec_generator = &flexfec; - config.send_side_delay_observer = &send_side_delay_observer; - auto sender = std::make_unique(config, &packet_history_); + RtpSenderEgress sender(env_, config, &packet_history_); header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, TransportSequenceNumber::Uri()); - const Timestamp capture_time = clock_->CurrentTime(); + const Timestamp capture_time = env_.clock().CurrentTime(); std::unique_ptr video_packet = BuildRtpPacket(); video_packet->set_packet_type(RtpPacketMediaType::kVideo); video_packet->SetPayloadSize(kPayloadSize); - video_packet->SetExtension(1); + video_packet->set_transport_sequence_number(1); std::unique_ptr rtx_packet = BuildRtpPacket(); rtx_packet->SetSsrc(kRtxSsrc); rtx_packet->set_packet_type(RtpPacketMediaType::kRetransmission); + rtx_packet->set_original_ssrc(video_packet->Ssrc()); rtx_packet->set_retransmitted_sequence_number(video_packet->SequenceNumber()); rtx_packet->SetPayloadSize(kPayloadSize); - rtx_packet->SetExtension(2); + rtx_packet->set_transport_sequence_number(2); std::unique_ptr fec_packet = BuildRtpPacket(); fec_packet->SetSsrc(kFlexFecSsrc); fec_packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection); fec_packet->SetPayloadSize(kPayloadSize); - fec_packet->SetExtension(3); + fec_packet->set_transport_sequence_number(3); const int64_t kDiffMs = 25; time_controller_.AdvanceTime(TimeDelta::Millis(kDiffMs)); - EXPECT_CALL(send_side_delay_observer, - SendSideDelayUpdated(kDiffMs, kDiffMs, kSsrc)); - EXPECT_CALL(send_side_delay_observer, - SendSideDelayUpdated(kDiffMs, kDiffMs, kFlexFecSsrc)); + EXPECT_CALL(send_packet_observer_, OnSendPacket(Eq(1), capture_time, kSsrc)); - EXPECT_CALL(send_packet_observer_, OnSendPacket(1, capture_time, kSsrc)); - - sender->SendPacket(std::move(video_packet), PacedPacketInfo()); + sender.SendPacket(std::move(video_packet), PacedPacketInfo()); // Send packet observer not called for padding/retransmissions. - EXPECT_CALL(send_packet_observer_, OnSendPacket(2, _, _)).Times(0); - sender->SendPacket(std::move(rtx_packet), PacedPacketInfo()); + EXPECT_CALL(send_packet_observer_, OnSendPacket(Eq(2), _, _)).Times(0); + sender.SendPacket(std::move(rtx_packet), PacedPacketInfo()); EXPECT_CALL(send_packet_observer_, - OnSendPacket(3, capture_time, kFlexFecSsrc)); - sender->SendPacket(std::move(fec_packet), PacedPacketInfo()); + OnSendPacket(Eq(3), capture_time, kFlexFecSsrc)); + sender.SendPacket(std::move(fec_packet), PacedPacketInfo()); time_controller_.AdvanceTime(TimeDelta::Zero()); - StreamDataCounters rtp_stats; - StreamDataCounters rtx_stats; - sender->GetDataCounters(&rtp_stats, &rtx_stats); - EXPECT_EQ(rtp_stats.transmitted.packets, 2u); - EXPECT_EQ(rtp_stats.fec.packets, 1u); + StreamDataCounters rtp_stats = + mock_rtp_stats_callback_.GetDataCounters(kSsrc); + StreamDataCounters rtx_stats = + mock_rtp_stats_callback_.GetDataCounters(kRtxSsrc); + StreamDataCounters fec_stats = + mock_rtp_stats_callback_.GetDataCounters(kFlexFecSsrc); + sender.GetDataCounters(&rtp_stats, &rtx_stats); + EXPECT_EQ(rtp_stats.transmitted.packets, 1u); + EXPECT_EQ(fec_stats.transmitted.packets, 1u); EXPECT_EQ(rtx_stats.retransmitted.packets, 1u); } -TEST_F(RtpSenderEgressTest, TransportFeedbackObserverWithRetransmission) { - const uint16_t kTransportSequenceNumber = 17; - header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, - TransportSequenceNumber::Uri()); - std::unique_ptr retransmission = BuildRtpPacket(); - retransmission->set_packet_type(RtpPacketMediaType::kRetransmission); - retransmission->SetExtension( - kTransportSequenceNumber); - uint16_t retransmitted_seq = retransmission->SequenceNumber() - 2; - retransmission->set_retransmitted_sequence_number(retransmitted_seq); - - std::unique_ptr sender = CreateRtpSenderEgress(); - EXPECT_CALL( - feedback_observer_, - OnAddPacket(AllOf( - Field(&RtpPacketSendInfo::media_ssrc, kSsrc), - Field(&RtpPacketSendInfo::rtp_sequence_number, retransmitted_seq), - Field(&RtpPacketSendInfo::transport_sequence_number, - kTransportSequenceNumber)))); - sender->SendPacket(std::move(retransmission), PacedPacketInfo()); -} - -TEST_F(RtpSenderEgressTest, TransportFeedbackObserverWithRtxRetransmission) { - const uint16_t kTransportSequenceNumber = 17; - header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, - TransportSequenceNumber::Uri()); - - std::unique_ptr rtx_retransmission = BuildRtpPacket(); - rtx_retransmission->SetSsrc(kRtxSsrc); - rtx_retransmission->SetExtension( - kTransportSequenceNumber); - rtx_retransmission->set_packet_type(RtpPacketMediaType::kRetransmission); - uint16_t rtx_retransmitted_seq = rtx_retransmission->SequenceNumber() - 2; - rtx_retransmission->set_retransmitted_sequence_number(rtx_retransmitted_seq); - - std::unique_ptr sender = CreateRtpSenderEgress(); - EXPECT_CALL( - feedback_observer_, - OnAddPacket(AllOf( - Field(&RtpPacketSendInfo::media_ssrc, kSsrc), - Field(&RtpPacketSendInfo::rtp_sequence_number, rtx_retransmitted_seq), - Field(&RtpPacketSendInfo::transport_sequence_number, - kTransportSequenceNumber)))); - sender->SendPacket(std::move(rtx_retransmission), PacedPacketInfo()); -} - -TEST_F(RtpSenderEgressTest, TransportFeedbackObserverPadding) { - const uint16_t kTransportSequenceNumber = 17; - header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, - TransportSequenceNumber::Uri()); - std::unique_ptr padding = BuildRtpPacket(); - padding->SetPadding(224); - padding->set_packet_type(RtpPacketMediaType::kPadding); - padding->SetExtension(kTransportSequenceNumber); - - std::unique_ptr sender = CreateRtpSenderEgress(); - EXPECT_CALL( - feedback_observer_, - OnAddPacket(AllOf(Field(&RtpPacketSendInfo::media_ssrc, absl::nullopt), - Field(&RtpPacketSendInfo::transport_sequence_number, - kTransportSequenceNumber)))); - sender->SendPacket(std::move(padding), PacedPacketInfo()); -} - -TEST_F(RtpSenderEgressTest, TransportFeedbackObserverRtxPadding) { - const uint16_t kTransportSequenceNumber = 17; - header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, - TransportSequenceNumber::Uri()); - - std::unique_ptr rtx_padding = BuildRtpPacket(); - rtx_padding->SetPadding(224); - rtx_padding->SetSsrc(kRtxSsrc); - rtx_padding->set_packet_type(RtpPacketMediaType::kPadding); - rtx_padding->SetExtension(kTransportSequenceNumber); - - std::unique_ptr sender = CreateRtpSenderEgress(); - EXPECT_CALL( - feedback_observer_, - OnAddPacket(AllOf(Field(&RtpPacketSendInfo::media_ssrc, absl::nullopt), - Field(&RtpPacketSendInfo::transport_sequence_number, - kTransportSequenceNumber)))); - sender->SendPacket(std::move(rtx_padding), PacedPacketInfo()); -} - -TEST_F(RtpSenderEgressTest, TransportFeedbackObserverFec) { - const uint16_t kTransportSequenceNumber = 17; - header_extensions_.RegisterByUri(kTransportSequenceNumberExtensionId, - TransportSequenceNumber::Uri()); - - std::unique_ptr fec_packet = BuildRtpPacket(); - fec_packet->SetSsrc(kFlexFecSsrc); - fec_packet->set_packet_type(RtpPacketMediaType::kForwardErrorCorrection); - fec_packet->SetExtension(kTransportSequenceNumber); - - const rtc::ArrayView kNoRtpHeaderExtensionSizes; - FlexfecSender flexfec(kFlexfectPayloadType, kFlexFecSsrc, kSsrc, /*mid=*/"", - /*header_extensions=*/{}, kNoRtpHeaderExtensionSizes, - /*rtp_state=*/nullptr, time_controller_.GetClock()); - RtpRtcpInterface::Configuration config = DefaultConfig(); - config.fec_generator = &flexfec; - auto sender = std::make_unique(config, &packet_history_); - EXPECT_CALL( - feedback_observer_, - OnAddPacket(AllOf(Field(&RtpPacketSendInfo::media_ssrc, absl::nullopt), - Field(&RtpPacketSendInfo::transport_sequence_number, - kTransportSequenceNumber)))); - sender->SendPacket(std::move(fec_packet), PacedPacketInfo()); -} - TEST_F(RtpSenderEgressTest, SupportsAbortingRetransmissions) { std::unique_ptr sender = CreateRtpSenderEgress(); packet_history_.SetStorePacketsStatus( diff --git a/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_unittest.cc index c47edfc8fc..eb9306bcab 100644 --- a/modules/rtp_rtcp/source/rtp_sender_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_sender_unittest.cc @@ -10,38 +10,38 @@ #include "modules/rtp_rtcp/source/rtp_sender.h" +#include +#include #include +#include +#include +#include #include #include "absl/strings/string_view.h" -#include "api/rtc_event_log/rtc_event.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/rtp_packet_sender.h" +#include "api/rtp_parameters.h" #include "api/units/frequency.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "api/video/video_codec_constants.h" -#include "api/video/video_timing.h" -#include "logging/rtc_event_log/mock/mock_rtc_event_log.h" -#include "modules/rtp_rtcp/include/rtp_cvo.h" -#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/include/rtp_packet_sender.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" +#include "modules/rtp_rtcp/include/flexfec_sender.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/packet_sequencer.h" #include "modules/rtp_rtcp/source/rtp_format_video_generic.h" -#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" -#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" +#include "modules/rtp_rtcp/source/rtp_header_extension_size.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" -#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_packet_history.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_sender_video.h" #include "modules/rtp_rtcp/source/video_fec_generator.h" -#include "rtc_base/arraysize.h" -#include "rtc_base/logging.h" #include "rtc_base/rate_limiter.h" -#include "rtc_base/strings/string_builder.h" -#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" -#include "test/mock_transport.h" #include "test/time_controller/simulated_time_controller.h" namespace webrtc { @@ -75,18 +75,14 @@ constexpr absl::string_view kMid = "mid"; constexpr absl::string_view kRid = "f"; constexpr bool kMarkerBit = true; -using ::testing::_; using ::testing::AllOf; using ::testing::AtLeast; -using ::testing::Contains; using ::testing::Each; using ::testing::ElementsAre; using ::testing::ElementsAreArray; using ::testing::Eq; -using ::testing::Field; using ::testing::Gt; using ::testing::IsEmpty; -using ::testing::NiceMock; using ::testing::Not; using ::testing::Pointee; using ::testing::Property; @@ -116,16 +112,16 @@ class RtpSenderTest : public ::testing::Test { protected: RtpSenderTest() : time_controller_(Timestamp::Millis(kStartTime)), - clock_(time_controller_.GetClock()), - retransmission_rate_limiter_(clock_, 1000), - flexfec_sender_(0, + env_(CreateEnvironment(time_controller_.GetClock())), + retransmission_rate_limiter_(&env_.clock(), 1000), + flexfec_sender_(env_, + 0, kFlexFecSsrc, kSsrc, "", std::vector(), std::vector(), - nullptr, - clock_) {} + nullptr) {} void SetUp() override { SetUpRtpSender(true, false, nullptr); } @@ -141,13 +137,10 @@ class RtpSenderTest : public ::testing::Test { RtpRtcpInterface::Configuration GetDefaultConfig() { RtpRtcpInterface::Configuration config; - config.clock = clock_; config.local_media_ssrc = kSsrc; config.rtx_send_ssrc = kRtxSsrc; - config.event_log = &mock_rtc_event_log_; config.retransmission_rate_limiter = &retransmission_rate_limiter_; config.paced_sender = &mock_paced_sender_; - config.field_trials = &field_trials_; // Configure rid unconditionally, it has effect only if // corresponding header extension is enabled. config.rid = std::string(kRid); @@ -156,29 +149,26 @@ class RtpSenderTest : public ::testing::Test { void CreateSender(const RtpRtcpInterface::Configuration& config) { packet_history_ = std::make_unique( - config.clock, RtpPacketHistory::PaddingMode::kPriority); + env_, RtpPacketHistory::PaddingMode::kRecentLargePacket); sequencer_.emplace(kSsrc, kRtxSsrc, /*require_marker_before_media_padding=*/!config.audio, - clock_); - rtp_sender_ = std::make_unique(config, packet_history_.get(), - config.paced_sender); + &env_.clock()); + rtp_sender_ = std::make_unique( + env_, config, packet_history_.get(), config.paced_sender); sequencer_->set_media_sequence_number(kSeqNum); rtp_sender_->SetTimestampOffset(0); } GlobalSimulatedTimeController time_controller_; - Clock* const clock_; - NiceMock mock_rtc_event_log_; + const Environment env_; MockRtpPacketPacer mock_paced_sender_; RateLimiter retransmission_rate_limiter_; FlexfecSender flexfec_sender_; - absl::optional sequencer_; + std::optional sequencer_; std::unique_ptr packet_history_; std::unique_ptr rtp_sender_; - const test::ExplicitKeyValueConfig field_trials_{""}; - std::unique_ptr BuildRtpPacket(int payload_type, bool marker_bit, uint32_t rtp_timestamp, @@ -211,7 +201,7 @@ class RtpSenderTest : public ::testing::Test { // Use maximum allowed size to catch corner cases when packet is dropped // because of lack of capacity for the media packet, or for an rtx packet // containing the media packet. - return SendPacket(/*capture_time=*/clock_->CurrentTime(), + return SendPacket(/*capture_time=*/env_.clock().CurrentTime(), /*payload_length=*/rtp_sender_->MaxRtpPacketSize() - rtp_sender_->ExpectedPerPacketOverhead()); } @@ -290,8 +280,8 @@ TEST_F(RtpSenderTest, AllocatePacketReserveExtensions) { TransmissionOffset::Uri(), kTransmissionTimeOffsetExtensionId)); ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension( AbsoluteSendTime::Uri(), kAbsoluteSendTimeExtensionId)); - ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension(AudioLevel::Uri(), - kAudioLevelExtensionId)); + ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension( + AudioLevelExtension::Uri(), kAudioLevelExtensionId)); ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension( TransportSequenceNumber::Uri(), kTransportSequenceNumberExtensionId)); ASSERT_TRUE(rtp_sender_->RegisterRtpHeaderExtension( @@ -305,7 +295,7 @@ TEST_F(RtpSenderTest, AllocatePacketReserveExtensions) { EXPECT_TRUE(packet->HasExtension()); EXPECT_TRUE(packet->HasExtension()); // Do not allocate media specific extensions. - EXPECT_FALSE(packet->HasExtension()); + EXPECT_FALSE(packet->HasExtension()); EXPECT_FALSE(packet->HasExtension()); } @@ -345,7 +335,7 @@ TEST_F(RtpSenderTest, SendToNetworkForwardsPacketsToPacer) { std::vector> packets(1); packets[0] = BuildRtpPacket(kPayload, kMarkerBit, kTimestamp, Timestamp::Zero()); - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); EXPECT_CALL(mock_paced_sender_, EnqueuePackets(ElementsAre(AllOf( @@ -358,7 +348,7 @@ TEST_F(RtpSenderTest, SendToNetworkForwardsPacketsToPacer) { TEST_F(RtpSenderTest, ReSendPacketForwardsPacketsToPacer) { packet_history_->SetStorePacketsStatus( RtpPacketHistory::StorageMode::kStoreAndCull, 10); - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); auto packet = BuildRtpPacket(kPayload, kMarkerBit, kTimestamp, now); packet->SetSequenceNumber(kSeqNum); packet->set_allow_retransmission(true); @@ -380,8 +370,8 @@ TEST_F(RtpSenderTest, SendPadding) { constexpr int kNumPaddingPackets = 4; EXPECT_CALL(mock_paced_sender_, EnqueuePackets); std::unique_ptr media_packet = - SendPacket(/*capture_time=*/clock_->CurrentTime(), - /*payload_size=*/100); + SendPacket(/*capture_time=*/env_.clock().CurrentTime(), + /*payload_length=*/100); sequencer_->Sequence(*media_packet); // Wait 50 ms before generating each padding packet. @@ -413,8 +403,8 @@ TEST_F(RtpSenderTest, SendPadding) { &RtpPacketToSend::Timestamp, Gt(media_packet->Timestamp())))))); std::unique_ptr next_media_packet = - SendPacket(/*capture_time=*/clock_->CurrentTime(), - /*payload_size=*/100); + SendPacket(/*capture_time=*/env_.clock().CurrentTime(), + /*payload_length=*/100); } TEST_F(RtpSenderTest, NoPaddingAsFirstPacketWithoutBweExtensions) { @@ -435,8 +425,9 @@ TEST_F(RtpSenderTest, NoPaddingAsFirstPacketWithoutBweExtensions) { TEST_F(RtpSenderTest, RequiresRtxSsrcToEnableRtx) { RtpRtcpInterface::Configuration config = GetDefaultConfig(); - config.rtx_send_ssrc = absl::nullopt; - RTPSender rtp_sender(config, packet_history_.get(), config.paced_sender); + config.rtx_send_ssrc = std::nullopt; + RTPSender rtp_sender(env_, config, packet_history_.get(), + config.paced_sender); rtp_sender.SetRtxPayloadType(kRtxPayload, kPayload); rtp_sender.SetRtxStatus(kRtxRetransmitted); @@ -447,7 +438,8 @@ TEST_F(RtpSenderTest, RequiresRtxSsrcToEnableRtx) { TEST_F(RtpSenderTest, RequiresRtxPayloadTypesToEnableRtx) { RtpRtcpInterface::Configuration config = GetDefaultConfig(); config.rtx_send_ssrc = kRtxSsrc; - RTPSender rtp_sender(config, packet_history_.get(), config.paced_sender); + RTPSender rtp_sender(env_, config, packet_history_.get(), + config.paced_sender); rtp_sender.SetRtxStatus(kRtxRetransmitted); @@ -457,7 +449,8 @@ TEST_F(RtpSenderTest, RequiresRtxPayloadTypesToEnableRtx) { TEST_F(RtpSenderTest, CanEnableRtxWhenRtxSsrcAndPayloadTypeAreConfigured) { RtpRtcpInterface::Configuration config = GetDefaultConfig(); config.rtx_send_ssrc = kRtxSsrc; - RTPSender rtp_sender(config, packet_history_.get(), config.paced_sender); + RTPSender rtp_sender(env_, config, packet_history_.get(), + config.paced_sender); rtp_sender.SetRtxPayloadType(kRtxPayload, kPayload); ASSERT_EQ(rtp_sender.RtxStatus(), kRtxOff); @@ -511,7 +504,7 @@ TEST_F(RtpSenderTest, AllowPaddingAsFirstPacketOnRtxWithAbsSendTime) { TEST_F(RtpSenderTest, UpdatesTimestampsOnPlainRtxPadding) { EnableRtx(); // Timestamps as set based on capture time in RtpSenderTest. - const Timestamp start_time = clock_->CurrentTime(); + const Timestamp start_time = env_.clock().CurrentTime(); const uint32_t start_timestamp = ToRtpTimestamp(start_time); // Start by sending one media packet. @@ -522,7 +515,7 @@ TEST_F(RtpSenderTest, UpdatesTimestampsOnPlainRtxPadding) { Pointee(Property(&RtpPacketToSend::Timestamp, start_timestamp)), Pointee(Property(&RtpPacketToSend::capture_time, start_time)))))); std::unique_ptr media_packet = - SendPacket(start_time, /*payload_size=*/600); + SendPacket(start_time, /*payload_length=*/600); sequencer_->Sequence(*media_packet); // Advance time before sending padding. @@ -544,10 +537,9 @@ TEST_F(RtpSenderTest, KeepsTimestampsOnPayloadPadding) { TransportSequenceNumber::Uri(), kTransportSequenceNumberExtensionId)); EnableRtx(); // Timestamps as set based on capture time in RtpSenderTest. - const Timestamp start_time = clock_->CurrentTime(); + const Timestamp start_time = env_.clock().CurrentTime(); const uint32_t start_timestamp = ToRtpTimestamp(start_time); const size_t kPayloadSize = 200; - const size_t kRtxHeaderSize = 2; // Start by sending one media packet and putting in the packet history. EXPECT_CALL( @@ -610,7 +602,7 @@ TEST_F(RtpSenderTest, RidIncludedOnRtxSentPackets) { .WillOnce([&](std::vector> packets) { sequencer_->Sequence(*packets[0]); packet_history_->PutRtpPacket(std::move(packets[0]), - clock_->CurrentTime()); + env_.clock().CurrentTime()); }); SendGenericPacket(); @@ -681,7 +673,7 @@ TEST_F(RtpSenderTest, MidAndRidIncludedOnFirstRtxPacket) { EXPECT_CALL(mock_paced_sender_, EnqueuePackets(SizeIs(1))) .WillOnce([&](std::vector> packets) { packet_history_->PutRtpPacket(std::move(packets[0]), - clock_->CurrentTime()); + env_.clock().CurrentTime()); }); auto second_built_packet = SendGenericPacket(); @@ -707,7 +699,7 @@ TEST_F(RtpSenderTest, MidAndRidNotIncludedOnRtxPacketsAfterAck) { sequencer_->Sequence(*first_built_packet); packet_history_->PutRtpPacket( std::make_unique(*first_built_packet), - /*send_time=*/clock_->CurrentTime()); + /*send_time=*/env_.clock().CurrentTime()); rtp_sender_->OnReceivedAckOnSsrc(first_built_packet->SequenceNumber()); // The second packet will include neither since an ack was received. @@ -715,7 +707,7 @@ TEST_F(RtpSenderTest, MidAndRidNotIncludedOnRtxPacketsAfterAck) { sequencer_->Sequence(*second_built_packet); packet_history_->PutRtpPacket( std::make_unique(*second_built_packet), - /*send_time=*/clock_->CurrentTime()); + /*send_time=*/env_.clock().CurrentTime()); // The first RTX packet will include MID and RRID. EXPECT_CALL(mock_paced_sender_, EnqueuePackets(SizeIs(1))) @@ -753,7 +745,7 @@ TEST_F(RtpSenderTest, MidAndRidAlwaysIncludedOnRtxPacketsWhenConfigured) { .WillRepeatedly( [&](std::vector> packets) { packet_history_->PutRtpPacket(std::move(packets[0]), - clock_->CurrentTime()); + env_.clock().CurrentTime()); }); auto media_packet1 = SendGenericPacket(); rtp_sender_->OnReceivedAckOnSsrc(media_packet1->SequenceNumber()); @@ -815,7 +807,7 @@ TEST_F(RtpSenderTest, MidAndRridNotIncludedOnRtxPacketsAfterRtpStateRestored) { EXPECT_CALL(mock_paced_sender_, EnqueuePackets(SizeIs(1))) .WillOnce([&](std::vector> packets) { packet_history_->PutRtpPacket(std::move(packets[0]), - clock_->CurrentTime()); + env_.clock().CurrentTime()); }); auto built_packet = SendGenericPacket(); @@ -837,12 +829,12 @@ TEST_F(RtpSenderTest, RespectsNackBitrateLimit) { for (int32_t i = 0; i < kNumPackets; ++i) { std::unique_ptr packet = BuildRtpPacket(kPayload, /*marker_bit=*/true, /*rtp_timestamp=*/0, - /*capture_time=*/clock_->CurrentTime()); + /*capture_time=*/env_.clock().CurrentTime()); packet->set_allow_retransmission(true); sequencer_->Sequence(*packet); sequence_numbers.push_back(packet->SequenceNumber()); packet_history_->PutRtpPacket(std::move(packet), - /*send_time=*/clock_->CurrentTime()); + /*send_time=*/env_.clock().CurrentTime()); time_controller_.AdvanceTime(TimeDelta::Millis(1)); } @@ -989,10 +981,10 @@ TEST_F(RtpSenderTest, SendPacketHandlesRetransmissionHistory) { // Build a media packet and put in the packet history. std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, clock_->CurrentTime()); + BuildRtpPacket(kPayload, true, 0, env_.clock().CurrentTime()); const uint16_t media_sequence_number = packet->SequenceNumber(); packet->set_allow_retransmission(true); - packet_history_->PutRtpPacket(std::move(packet), clock_->CurrentTime()); + packet_history_->PutRtpPacket(std::move(packet), env_.clock().CurrentTime()); // Simulate successful retransmission request. time_controller_.AdvanceTime(TimeDelta::Millis(30)); @@ -1016,10 +1008,13 @@ TEST_F(RtpSenderTest, MarksRetransmittedPackets) { // Build a media packet and put in the packet history. std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, clock_->CurrentTime()); - const uint16_t media_sequence_number = packet->SequenceNumber(); + BuildRtpPacket(kPayload, true, 0, env_.clock().CurrentTime()); + const uint32_t kMediaSsrc = 567; + const uint16_t kMediaSequenceNumber = 123; + packet->SetSsrc(kMediaSsrc); + packet->SetSequenceNumber(kMediaSequenceNumber); packet->set_allow_retransmission(true); - packet_history_->PutRtpPacket(std::move(packet), clock_->CurrentTime()); + packet_history_->PutRtpPacket(std::move(packet), env_.clock().CurrentTime()); // Expect a retransmission packet marked with which packet it is a // retransmit of. @@ -1028,9 +1023,10 @@ TEST_F(RtpSenderTest, MarksRetransmittedPackets) { EnqueuePackets(ElementsAre(AllOf( Pointee(Property(&RtpPacketToSend::packet_type, RtpPacketMediaType::kRetransmission)), + Pointee(Property(&RtpPacketToSend::original_ssrc, kMediaSsrc)), Pointee(Property(&RtpPacketToSend::retransmitted_sequence_number, - Eq(media_sequence_number))))))); - EXPECT_THAT(rtp_sender_->ReSendPacket(media_sequence_number), Gt(0)); + Eq(kMediaSequenceNumber))))))); + EXPECT_THAT(rtp_sender_->ReSendPacket(kMediaSequenceNumber), Gt(0)); } TEST_F(RtpSenderTest, GeneratedPaddingHasBweExtensions) { @@ -1047,11 +1043,11 @@ TEST_F(RtpSenderTest, GeneratedPaddingHasBweExtensions) { // Put a packet in the history, in order to facilitate payload padding. std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, clock_->CurrentTime()); + BuildRtpPacket(kPayload, true, 0, env_.clock().CurrentTime()); packet->set_allow_retransmission(true); packet->SetPayloadSize(kMinPaddingSize); packet->set_packet_type(RtpPacketMediaType::kVideo); - packet_history_->PutRtpPacket(std::move(packet), clock_->CurrentTime()); + packet_history_->PutRtpPacket(std::move(packet), env_.clock().CurrentTime()); // Generate a plain padding packet, check that extensions are registered. std::vector> generated_packets = @@ -1075,6 +1071,43 @@ TEST_F(RtpSenderTest, GeneratedPaddingHasBweExtensions) { EXPECT_GT(payload_padding->payload_size(), 0u); } +TEST_F(RtpSenderTest, GeneratedPaddingHasMidRidExtensions) { + EnableMidSending("mid"); + EnableRidSending(); + + // Send a dummy video packet so it ends up in the packet history. Since we + // are not using RTX, it should never be used as padding. + packet_history_->SetStorePacketsStatus( + RtpPacketHistory::StorageMode::kStoreAndCull, 1); + std::unique_ptr packet = + BuildRtpPacket(kPayload, true, 0, env_.clock().CurrentTime()); + packet->set_allow_retransmission(true); + packet->SetPayloadSize(1234); + packet->set_packet_type(RtpPacketMediaType::kVideo); + sequencer_->Sequence(*packet); + packet_history_->PutRtpPacket(std::move(packet), env_.clock().CurrentTime()); + + std::vector> padding_packets = + GeneratePadding(/*target_size_bytes=*/1); + ASSERT_THAT(padding_packets, SizeIs(1)); + + EXPECT_TRUE(padding_packets[0]->HasExtension()); + EXPECT_TRUE(padding_packets[0]->HasExtension()); +} + +TEST_F(RtpSenderTest, GeneratedPaddingOnRtxHasMidRidExtensions) { + EnableRtx(); + EnableMidSending("mid"); + EnableRidSending(); + + std::vector> padding_packets = + GeneratePadding(/*target_size_bytes=*/1); + ASSERT_THAT(padding_packets, SizeIs(1)); + + EXPECT_TRUE(padding_packets[0]->HasExtension()); + EXPECT_TRUE(padding_packets[0]->HasExtension()); +} + TEST_F(RtpSenderTest, GeneratePaddingResendsOldPacketsWithRtx) { // Min requested size in order to use RTX payload. const size_t kMinPaddingSize = 50; @@ -1089,11 +1122,11 @@ TEST_F(RtpSenderTest, GeneratePaddingResendsOldPacketsWithRtx) { const size_t kPayloadPacketSize = kMinPaddingSize; std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, clock_->CurrentTime()); + BuildRtpPacket(kPayload, true, 0, env_.clock().CurrentTime()); packet->set_allow_retransmission(true); packet->SetPayloadSize(kPayloadPacketSize); packet->set_packet_type(RtpPacketMediaType::kVideo); - packet_history_->PutRtpPacket(std::move(packet), clock_->CurrentTime()); + packet_history_->PutRtpPacket(std::move(packet), env_.clock().CurrentTime()); // Generated padding has large enough budget that the video packet should be // retransmitted as padding. @@ -1112,12 +1145,12 @@ TEST_F(RtpSenderTest, GeneratePaddingResendsOldPacketsWithRtx) { size_t padding_bytes_generated = 0; generated_packets = GeneratePadding(kPaddingBytesRequested); EXPECT_EQ(generated_packets.size(), 1u); - for (auto& packet : generated_packets) { - EXPECT_EQ(packet->packet_type(), RtpPacketMediaType::kPadding); - EXPECT_EQ(packet->Ssrc(), kRtxSsrc); - EXPECT_EQ(packet->payload_size(), 0u); - EXPECT_GT(packet->padding_size(), 0u); - padding_bytes_generated += packet->padding_size(); + for (auto& generated_packet : generated_packets) { + EXPECT_EQ(generated_packet->packet_type(), RtpPacketMediaType::kPadding); + EXPECT_EQ(generated_packet->Ssrc(), kRtxSsrc); + EXPECT_EQ(generated_packet->payload_size(), 0u); + EXPECT_GT(generated_packet->padding_size(), 0u); + padding_bytes_generated += generated_packet->padding_size(); } EXPECT_EQ(padding_bytes_generated, kMaxPaddingLength); @@ -1138,11 +1171,11 @@ TEST_F(RtpSenderTest, LimitsPayloadPaddingSize) { // Send a dummy video packet so it ends up in the packet history. const size_t kPayloadPacketSize = 1234u; std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, clock_->CurrentTime()); + BuildRtpPacket(kPayload, true, 0, env_.clock().CurrentTime()); packet->set_allow_retransmission(true); packet->SetPayloadSize(kPayloadPacketSize); packet->set_packet_type(RtpPacketMediaType::kVideo); - packet_history_->PutRtpPacket(std::move(packet), clock_->CurrentTime()); + packet_history_->PutRtpPacket(std::move(packet), env_.clock().CurrentTime()); // Smallest target size that will result in the sent packet being returned as // padding. @@ -1178,12 +1211,12 @@ TEST_F(RtpSenderTest, GeneratePaddingCreatesPurePaddingWithoutRtx) { // Send a dummy video packet so it ends up in the packet history. Since we // are not using RTX, it should never be used as padding. std::unique_ptr packet = - BuildRtpPacket(kPayload, true, 0, clock_->CurrentTime()); + BuildRtpPacket(kPayload, true, 0, env_.clock().CurrentTime()); packet->set_allow_retransmission(true); packet->SetPayloadSize(kPayloadPacketSize); packet->set_packet_type(RtpPacketMediaType::kVideo); sequencer_->Sequence(*packet); - packet_history_->PutRtpPacket(std::move(packet), clock_->CurrentTime()); + packet_history_->PutRtpPacket(std::move(packet), env_.clock().CurrentTime()); // Payload padding not available without RTX, only generate plain padding on // the media SSRC. @@ -1197,15 +1230,15 @@ TEST_F(RtpSenderTest, GeneratePaddingCreatesPurePaddingWithoutRtx) { std::vector> padding_packets = GeneratePadding(kPaddingBytesRequested); EXPECT_EQ(padding_packets.size(), kExpectedNumPaddingPackets); - for (auto& packet : padding_packets) { - EXPECT_EQ(packet->packet_type(), RtpPacketMediaType::kPadding); - EXPECT_EQ(packet->Ssrc(), kSsrc); - EXPECT_EQ(packet->payload_size(), 0u); - EXPECT_GT(packet->padding_size(), 0u); - padding_bytes_generated += packet->padding_size(); - EXPECT_TRUE(packet->HasExtension()); - EXPECT_TRUE(packet->HasExtension()); - EXPECT_TRUE(packet->HasExtension()); + for (auto& generated_packet : padding_packets) { + EXPECT_EQ(generated_packet->packet_type(), RtpPacketMediaType::kPadding); + EXPECT_EQ(generated_packet->Ssrc(), kSsrc); + EXPECT_EQ(generated_packet->payload_size(), 0u); + EXPECT_GT(generated_packet->padding_size(), 0u); + padding_bytes_generated += generated_packet->padding_size(); + EXPECT_TRUE(generated_packet->HasExtension()); + EXPECT_TRUE(generated_packet->HasExtension()); + EXPECT_TRUE(generated_packet->HasExtension()); } EXPECT_EQ(padding_bytes_generated, @@ -1254,7 +1287,7 @@ TEST_F(RtpSenderTest, SetsCaptureTimeOnRtxRetransmissions) { EnableRtx(); // Put a packet in the packet history, with current time as capture time. - const Timestamp start_time = clock_->CurrentTime(); + const Timestamp start_time = env_.clock().CurrentTime(); std::unique_ptr packet = BuildRtpPacket(kPayload, kMarkerBit, /*rtp_timestamp=*/0, /*capture_time=*/start_time); @@ -1279,7 +1312,7 @@ TEST_F(RtpSenderTest, IgnoresNackAfterDisablingMedia) { packet_history_->SetRtt(kRtt); // Put a packet in the history. - const Timestamp start_time = clock_->CurrentTime(); + const Timestamp start_time = env_.clock().CurrentTime(); std::unique_ptr packet = BuildRtpPacket(kPayload, kMarkerBit, 0, /*capture_time=*/start_time); packet->set_allow_retransmission(true); @@ -1303,7 +1336,7 @@ TEST_F(RtpSenderTest, DoesntFecProtectRetransmissions) { packet_history_->SetRtt(kRtt); // Put a fec protected packet in the history. - const Timestamp start_time = clock_->CurrentTime(); + const Timestamp start_time = env_.clock().CurrentTime(); std::unique_ptr packet = BuildRtpPacket(kPayload, kMarkerBit, 0, start_time); packet->set_allow_retransmission(true); @@ -1323,13 +1356,13 @@ TEST_F(RtpSenderTest, DoesntFecProtectRetransmissions) { TEST_F(RtpSenderTest, MarksPacketsWithKeyframeStatus) { RTPSenderVideo::Config video_config; - video_config.clock = clock_; + video_config.clock = &env_.clock(); video_config.rtp_sender = rtp_sender_.get(); - video_config.field_trials = &field_trials_; + video_config.field_trials = &env_.field_trials(); RTPSenderVideo rtp_sender_video(video_config); const uint8_t kPayloadType = 127; - const absl::optional kCodecType = + const std::optional kCodecType = VideoCodecType::kVideoCodecGeneric; const uint32_t kCaptureTimeMsToRtpTimestamp = 90; // 90 kHz clock @@ -1341,7 +1374,7 @@ TEST_F(RtpSenderTest, MarksPacketsWithKeyframeStatus) { .Times(AtLeast(1)); RTPVideoHeader video_header; video_header.frame_type = VideoFrameType::kVideoFrameKey; - Timestamp capture_time = clock_->CurrentTime(); + Timestamp capture_time = env_.clock().CurrentTime(); EXPECT_TRUE(rtp_sender_video.SendVideo( kPayloadType, kCodecType, capture_time.ms() * kCaptureTimeMsToRtpTimestamp, capture_time, @@ -1358,7 +1391,7 @@ TEST_F(RtpSenderTest, MarksPacketsWithKeyframeStatus) { .Times(AtLeast(1)); RTPVideoHeader video_header; video_header.frame_type = VideoFrameType::kVideoFrameDelta; - Timestamp capture_time = clock_->CurrentTime(); + Timestamp capture_time = env_.clock().CurrentTime(); EXPECT_TRUE(rtp_sender_video.SendVideo( kPayloadType, kCodecType, capture_time.ms() * kCaptureTimeMsToRtpTimestamp, capture_time, diff --git a/modules/rtp_rtcp/source/rtp_sender_video.cc b/modules/rtp_rtcp/source/rtp_sender_video.cc index b846f7f7ea..acb4c7321c 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video.cc +++ b/modules/rtp_rtcp/source/rtp_sender_video.cc @@ -13,36 +13,61 @@ #include #include -#include -#include +#include #include -#include +#include #include +#include +#include #include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "absl/strings/match.h" +#include "api/array_view.h" #include "api/crypto/frame_encryptor_interface.h" +#include "api/field_trials_view.h" +#include "api/make_ref_counted.h" +#include "api/media_types.h" +#include "api/transport/rtp/corruption_detection_message.h" #include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/data_rate.h" #include "api/units/frequency.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_layers_allocation.h" +#include "api/video/video_rotation.h" +#include "api/video/video_timing.h" +#include "common_video/corruption_detection_converters.h" +#include "common_video/frame_instrumentation_data.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/absolute_capture_time_sender.h" -#include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/corruption_detection_extension.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" #include "modules/rtp_rtcp/source/rtp_format.h" +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" -#include "modules/rtp_rtcp/source/time_util.h" +#include "modules/rtp_rtcp/source/video_fec_generator.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" -#include "rtc_base/trace_event.h" +#include "rtc_base/race_checker.h" +#include "rtc_base/synchronization/mutex.h" +#include "system_wrappers/include/ntp_time.h" namespace webrtc { @@ -65,7 +90,7 @@ void BuildRedPayload(const RtpPacketToSend& media_packet, bool MinimizeDescriptor(RTPVideoHeader* video_header) { if (auto* vp8 = - absl::get_if(&video_header->video_type_header)) { + std::get_if(&video_header->video_type_header)) { // Set minimum fields the RtpPacketizer is using to create vp8 packets. // nonReference is the only field that doesn't require extra space. bool non_reference = vp8->nonReference; @@ -77,44 +102,54 @@ bool MinimizeDescriptor(RTPVideoHeader* video_header) { } bool IsBaseLayer(const RTPVideoHeader& video_header) { + // For AV1 & H.265 we fetch temporal index from the generic descriptor. + if (video_header.generic) { + const auto& generic = video_header.generic.value(); + return (generic.temporal_index == 0 || + generic.temporal_index == kNoTemporalIdx); + } switch (video_header.codec) { case kVideoCodecVP8: { const auto& vp8 = - absl::get(video_header.video_type_header); + std::get(video_header.video_type_header); return (vp8.temporalIdx == 0 || vp8.temporalIdx == kNoTemporalIdx); } case kVideoCodecVP9: { const auto& vp9 = - absl::get(video_header.video_type_header); + std::get(video_header.video_type_header); return (vp9.temporal_idx == 0 || vp9.temporal_idx == kNoTemporalIdx); } case kVideoCodecH264: // TODO(kron): Implement logic for H264 once WebRTC supports temporal // layers for H264. break; - default: + // These codecs do not have codec-specifics, from which we can fetch + // temporal index. + case kVideoCodecH265: + case kVideoCodecAV1: + case kVideoCodecGeneric: break; } return true; } -absl::optional LoadVideoPlayoutDelayOverride( +std::optional LoadVideoPlayoutDelayOverride( const FieldTrialsView* key_value_config) { RTC_DCHECK(key_value_config); - FieldTrialOptional playout_delay_min_ms("min_ms", absl::nullopt); - FieldTrialOptional playout_delay_max_ms("max_ms", absl::nullopt); + FieldTrialOptional playout_delay_min_ms("min_ms", std::nullopt); + FieldTrialOptional playout_delay_max_ms("max_ms", std::nullopt); ParseFieldTrial({&playout_delay_max_ms, &playout_delay_min_ms}, key_value_config->Lookup("WebRTC-ForceSendPlayoutDelay")); return playout_delay_max_ms && playout_delay_min_ms - ? absl::make_optional( + ? std::make_optional( TimeDelta::Millis(*playout_delay_min_ms), TimeDelta::Millis(*playout_delay_max_ms)) - : absl::nullopt; + : std::nullopt; } // Some packets can be skipped and the stream can still be decoded. Those // packets are less likely to be retransmitted if they are lost. -bool PacketWillLikelyBeRequestedForRestransmitionIfLost( +bool PacketWillLikelyBeRequestedForRestransmissionIfLost( const RTPVideoHeader& video_header) { return IsBaseLayer(video_header) && !(video_header.generic.has_value() @@ -150,7 +185,7 @@ RTPSenderVideo::RTPSenderVideo(const Config& config) absolute_capture_time_sender_(config.clock), frame_transformer_delegate_( config.frame_transformer - ? rtc::make_ref_counted( + ? make_ref_counted( this, config.frame_transformer, rtp_sender_->SSRC(), @@ -438,7 +473,7 @@ void RTPSenderVideo::AddRtpHeaderExtensions(const RTPVideoHeader& video_header, first_packet && send_allocation_ != SendVideoLayersAllocation::kDontSend && (video_header.frame_type == VideoFrameType::kVideoFrameKey || - PacketWillLikelyBeRequestedForRestransmitionIfLost(video_header))) { + PacketWillLikelyBeRequestedForRestransmissionIfLost(video_header))) { VideoLayersAllocation allocation = allocation_.value(); allocation.resolution_and_frame_rate_is_valid = send_allocation_ == SendVideoLayersAllocation::kSendWithResolution; @@ -449,20 +484,40 @@ void RTPSenderVideo::AddRtpHeaderExtensions(const RTPVideoHeader& video_header, packet->SetExtension( *video_header.video_frame_tracking_id); } + + if (last_packet && video_header.frame_instrumentation_data) { + std::optional message; + if (const auto* data = std::get_if( + &(*video_header.frame_instrumentation_data))) { + message = + ConvertFrameInstrumentationDataToCorruptionDetectionMessage(*data); + } else if (const auto* sync_data = + std::get_if( + &(*video_header.frame_instrumentation_data))) { + message = ConvertFrameInstrumentationSyncDataToCorruptionDetectionMessage( + *sync_data); + } else { + RTC_DCHECK_NOTREACHED(); + } + + if (message.has_value()) { + packet->SetExtension(*message); + } else { + RTC_LOG(LS_WARNING) << "Failed to convert frame instrumentation data to " + "corruption detection message."; + } + } } bool RTPSenderVideo::SendVideo(int payload_type, - absl::optional codec_type, + std::optional codec_type, uint32_t rtp_timestamp, Timestamp capture_time, - rtc::ArrayView payload, + ArrayView payload, size_t encoder_output_size, RTPVideoHeader video_header, TimeDelta expected_retransmission_time, std::vector csrcs) { - TRACE_EVENT_ASYNC_STEP1( - "webrtc", "Video", capture_time.ms_or(0), "Send", "type", - std::string(VideoFrameTypeToString(video_header.frame_type))); RTC_CHECK_RUNS_SERIALIZED(&send_checker_); if (video_header.frame_type == VideoFrameType::kEmptyFrame) @@ -544,10 +599,10 @@ bool RTPSenderVideo::SendVideo(int payload_type, if (video_header.absolute_capture_time.has_value()) { video_header.absolute_capture_time = absolute_capture_time_sender_.OnSendPacket( - AbsoluteCaptureTimeSender::GetSource(single_packet->Ssrc(), - single_packet->Csrcs()), + AbsoluteCaptureTimeSender::GetSource(single_packet->Ssrc(), csrcs), single_packet->Timestamp(), kVideoPayloadTypeFrequency, - video_header.absolute_capture_time->absolute_capture_timestamp, + NtpTime( + video_header.absolute_capture_time->absolute_capture_timestamp), video_header.absolute_capture_time->estimated_capture_clock_offset); } @@ -565,6 +620,8 @@ bool RTPSenderVideo::SendVideo(int payload_type, // Disable attaching dependency descriptor to delta packets (including // non-first packet of a key frame) when it wasn't attached to a key frame, // as dependency descriptor can't be usable in such case. + // This can also happen when the descriptor is larger than 15 bytes and + // two-byte header extensions are not negotiated using extmap-allow-mixed. RTC_LOG(LS_WARNING) << "Disable dependency descriptor because failed to " "attach it to a key frame."; video_structure_ = nullptr; @@ -609,10 +666,10 @@ bool RTPSenderVideo::SendVideo(int payload_type, MinimizeDescriptor(&video_header); } - rtc::Buffer encrypted_video_payload; + Buffer encrypted_video_payload; if (frame_encryptor_ != nullptr) { const size_t max_ciphertext_size = - frame_encryptor_->GetMaxCiphertextByteSize(cricket::MEDIA_TYPE_VIDEO, + frame_encryptor_->GetMaxCiphertextByteSize(webrtc::MediaType::VIDEO, payload.size()); encrypted_video_payload.SetSize(max_ciphertext_size); @@ -625,7 +682,7 @@ bool RTPSenderVideo::SendVideo(int payload_type, } if (frame_encryptor_->Encrypt( - cricket::MEDIA_TYPE_VIDEO, first_packet->Ssrc(), additional_data, + webrtc::MediaType::VIDEO, first_packet->Ssrc(), additional_data, payload, encrypted_video_payload, &bytes_written) != 0) { return false; } @@ -729,7 +786,7 @@ bool RTPSenderVideo::SendVideo(int payload_type, } if (video_header.frame_type == VideoFrameType::kVideoFrameKey || - PacketWillLikelyBeRequestedForRestransmitionIfLost(video_header)) { + PacketWillLikelyBeRequestedForRestransmissionIfLost(video_header)) { // This frame will likely be delivered, no need to populate playout // delay extensions until it changes again. playout_delay_pending_ = false; @@ -739,13 +796,11 @@ bool RTPSenderVideo::SendVideo(int payload_type, send_allocation_ = SendVideoLayersAllocation::kDontSend; } - TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time.ms_or(0), "timestamp", - rtp_timestamp); return true; } bool RTPSenderVideo::SendEncodedImage(int payload_type, - absl::optional codec_type, + std::optional codec_type, uint32_t rtp_timestamp, const EncodedImage& encoded_image, RTPVideoHeader video_header, @@ -804,9 +859,9 @@ uint8_t RTPSenderVideo::GetTemporalId(const RTPVideoHeader& header) { uint8_t operator()(const RTPVideoHeaderLegacyGeneric&) { return kNoTemporalIdx; } - uint8_t operator()(const absl::monostate&) { return kNoTemporalIdx; } + uint8_t operator()(const std::monostate&) { return kNoTemporalIdx; } }; - return absl::visit(TemporalIdGetter(), header.video_type_header); + return std::visit(TemporalIdGetter(), header.video_type_header); } bool RTPSenderVideo::UpdateConditionalRetransmit( @@ -831,7 +886,7 @@ bool RTPSenderVideo::UpdateConditionalRetransmit( Timestamp expected_next_frame_time = Timestamp::PlusInfinity(); for (int i = temporal_id - 1; i >= 0; --i) { TemporalLayerStats* stats = &frame_stats_by_temporal_layer_[i]; - absl::optional rate = stats->frame_rate.Rate(now); + std::optional rate = stats->frame_rate.Rate(now); if (rate > Frequency::Zero()) { Timestamp tl_next = stats->last_frame_time + 1 / *rate; if (tl_next - now > -expected_retransmission_time && @@ -855,7 +910,7 @@ bool RTPSenderVideo::UpdateConditionalRetransmit( void RTPSenderVideo::MaybeUpdateCurrentPlayoutDelay( const RTPVideoHeader& header) { - absl::optional requested_delay = + std::optional requested_delay = forced_playout_delay_.has_value() ? forced_playout_delay_ : header.playout_delay; diff --git a/modules/rtp_rtcp/source/rtp_sender_video.h b/modules/rtp_rtcp/source/rtp_sender_video.h index 5459e14888..86943db0e8 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video.h +++ b/modules/rtp_rtcp/source/rtp_sender_video.h @@ -11,28 +11,31 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_H_ #define MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_H_ +#include +#include #include #include +#include #include -#include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/field_trials_view.h" #include "api/frame_transformer_interface.h" #include "api/scoped_refptr.h" -#include "api/sequence_checker.h" -#include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "api/video/color_space.h" +#include "api/video/encoded_image.h" #include "api/video/video_codec_type.h" -#include "api/video/video_frame_type.h" #include "api/video/video_layers_allocation.h" +#include "api/video/video_rotation.h" +#include "api/video/video_timing.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/absolute_capture_time_sender.h" #include "modules/rtp_rtcp/source/active_decode_targets_helper.h" -#include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/rtp_sender.h" #include "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" @@ -43,6 +46,7 @@ #include "rtc_base/race_checker.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" +#include "system_wrappers/include/clock.h" namespace webrtc { @@ -77,14 +81,14 @@ class RTPSenderVideo : public RTPVideoFrameSenderInterface { RTPSender* rtp_sender = nullptr; // Some FEC data is duplicated here in preparation of moving FEC to // the egress stage. - absl::optional fec_type; + std::optional fec_type; size_t fec_overhead_bytes = 0; // Per packet max FEC overhead. FrameEncryptorInterface* frame_encryptor = nullptr; bool require_frame_encryption = false; bool enable_retransmit_all_layers = false; - absl::optional red_payload_type; + std::optional red_payload_type; const FieldTrialsView* field_trials = nullptr; - rtc::scoped_refptr frame_transformer; + scoped_refptr frame_transformer; TaskQueueFactory* task_queue_factory = nullptr; }; @@ -98,17 +102,17 @@ class RTPSenderVideo : public RTPVideoFrameSenderInterface { // video encoder, excluding any additional overhead. // Calls to this method are assumed to be externally serialized. bool SendVideo(int payload_type, - absl::optional codec_type, + std::optional codec_type, uint32_t rtp_timestamp, Timestamp capture_time, - rtc::ArrayView payload, + ArrayView payload, size_t encoder_output_size, RTPVideoHeader video_header, TimeDelta expected_retransmission_time, std::vector csrcs) override; bool SendEncodedImage(int payload_type, - absl::optional codec_type, + std::optional codec_type, uint32_t rtp_timestamp, const EncodedImage& encoded_image, RTPVideoHeader video_header, @@ -196,35 +200,35 @@ class RTPSenderVideo : public RTPVideoFrameSenderInterface { // These members should only be accessed from within SendVideo() to avoid // potential race conditions. - rtc::RaceChecker send_checker_; + RaceChecker send_checker_; int32_t retransmission_settings_ RTC_GUARDED_BY(send_checker_); VideoRotation last_rotation_ RTC_GUARDED_BY(send_checker_); - absl::optional last_color_space_ RTC_GUARDED_BY(send_checker_); + std::optional last_color_space_ RTC_GUARDED_BY(send_checker_); bool transmit_color_space_next_frame_ RTC_GUARDED_BY(send_checker_); std::unique_ptr video_structure_ RTC_GUARDED_BY(send_checker_); - absl::optional allocation_ + std::optional allocation_ RTC_GUARDED_BY(send_checker_); // Flag indicating if we should send `allocation_`. SendVideoLayersAllocation send_allocation_ RTC_GUARDED_BY(send_checker_); - absl::optional last_full_sent_allocation_ + std::optional last_full_sent_allocation_ RTC_GUARDED_BY(send_checker_); // Current target playout delay. - absl::optional current_playout_delay_ + std::optional current_playout_delay_ RTC_GUARDED_BY(send_checker_); // Flag indicating if we need to send `current_playout_delay_` in order // to guarantee it gets delivered. bool playout_delay_pending_; // Set by the field trial WebRTC-ForceSendPlayoutDelay to override the playout // delay of outgoing video frames. - const absl::optional forced_playout_delay_; + const std::optional forced_playout_delay_; // Should never be held when calling out of this class. Mutex mutex_; - const absl::optional red_payload_type_; - absl::optional fec_type_; + const std::optional red_payload_type_; + std::optional fec_type_; const size_t fec_overhead_bytes_; // Per packet max FEC overhead. mutable Mutex stats_mutex_; @@ -244,12 +248,13 @@ class RTPSenderVideo : public RTPVideoFrameSenderInterface { // Set to true if the generic descriptor should be authenticated. const bool generic_descriptor_auth_experiment_; - AbsoluteCaptureTimeSender absolute_capture_time_sender_; + AbsoluteCaptureTimeSender absolute_capture_time_sender_ + RTC_GUARDED_BY(send_checker_); // Tracks updates to the active decode targets and decides when active decode // targets bitmask should be attached to the dependency descriptor. ActiveDecodeTargetsHelper active_decode_targets_tracker_; - const rtc::scoped_refptr + const scoped_refptr frame_transformer_delegate_; }; diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc index 2281a2ae27..2d08bd6e8f 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc @@ -10,28 +10,54 @@ #include "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h" +#include +#include +#include +#include +#include #include #include +#include "api/array_view.h" +#include "api/frame_transformer_interface.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_factory.h" -#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_metadata.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_layers_allocation.h" +#include "api/video_codecs/video_codec.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "rtc_base/checks.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { namespace { +// Using a reasonable default of 10ms for the retransmission delay for frames +// not coming from this sender's encoder. This is usually taken from an +// estimate of the RTT of the link,so 10ms should be a reasonable estimate for +// frames being re-transmitted to a peer, probably on the same network. +const TimeDelta kDefaultRetransmissionsTime = TimeDelta::Millis(10); +} // namespace + class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { public: TransformableVideoSenderFrame(const EncodedImage& encoded_image, const RTPVideoHeader& video_header, int payload_type, - absl::optional codec_type, + std::optional codec_type, uint32_t rtp_timestamp, TimeDelta expected_retransmission_time, uint32_t ssrc, std::vector csrcs) - : encoded_data_(encoded_image.GetEncodedData()), + : TransformableVideoFrameInterface(Passkey()), + encoded_data_(encoded_image.GetEncodedData()), pre_transform_payload_size_(encoded_image.size()), header_(video_header), frame_type_(encoded_image._frameType), @@ -39,7 +65,7 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { codec_type_(codec_type), timestamp_(rtp_timestamp), capture_time_(encoded_image.CaptureTime()), - capture_time_identifier_(encoded_image.CaptureTimeIdentifier()), + presentation_timestamp_(encoded_image.PresentationTimestamp()), expected_retransmission_time_(expected_retransmission_time), ssrc_(ssrc), csrcs_(csrcs) { @@ -50,11 +76,9 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { ~TransformableVideoSenderFrame() override = default; // Implements TransformableVideoFrameInterface. - rtc::ArrayView GetData() const override { - return *encoded_data_; - } + ArrayView GetData() const override { return *encoded_data_; } - void SetData(rtc::ArrayView data) override { + void SetData(ArrayView data) override { encoded_data_ = EncodedImageBuffer::Create(data.data(), data.size()); } @@ -86,10 +110,12 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { const RTPVideoHeader& GetHeader() const { return header_; } uint8_t GetPayloadType() const override { return payload_type_; } - absl::optional GetCodecType() const { return codec_type_; } - Timestamp GetCaptureTime() const { return capture_time_; } - absl::optional GetCaptureTimeIdentifier() const override { - return capture_time_identifier_; + std::optional GetCodecType() const { return codec_type_; } + std::optional GetCaptureTimeIdentifier() const override { + return presentation_timestamp_; + } + std::optional GetPresentationTimestamp() const override { + return presentation_timestamp_; } TimeDelta GetExpectedRetransmissionTime() const { @@ -97,27 +123,43 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { } Direction GetDirection() const override { return Direction::kSender; } + std::string GetMimeType() const override { + if (!codec_type_.has_value()) { + return "video/x-unknown"; + } + std::string mime_type = "video/"; + return mime_type + CodecTypeToPayloadString(*codec_type_); + } + + std::optional ReceiveTime() const override { return std::nullopt; } + + std::optional CaptureTime() const override { + return capture_time_; + } + + std::optional SenderCaptureTimeOffset() const override { + return std::nullopt; + } private: - rtc::scoped_refptr encoded_data_; + scoped_refptr encoded_data_; const size_t pre_transform_payload_size_; RTPVideoHeader header_; const VideoFrameType frame_type_; const uint8_t payload_type_; - const absl::optional codec_type_ = absl::nullopt; + const std::optional codec_type_ = std::nullopt; uint32_t timestamp_; const Timestamp capture_time_; - const absl::optional capture_time_identifier_; + const std::optional presentation_timestamp_; const TimeDelta expected_retransmission_time_; uint32_t ssrc_; std::vector csrcs_; }; -} // namespace RTPSenderVideoFrameTransformerDelegate::RTPSenderVideoFrameTransformerDelegate( RTPVideoFrameSenderInterface* sender, - rtc::scoped_refptr frame_transformer, + scoped_refptr frame_transformer, uint32_t ssrc, TaskQueueFactory* task_queue_factory) : sender_(sender), @@ -129,16 +171,27 @@ RTPSenderVideoFrameTransformerDelegate::RTPSenderVideoFrameTransformerDelegate( void RTPSenderVideoFrameTransformerDelegate::Init() { frame_transformer_->RegisterTransformedFrameSinkCallback( - rtc::scoped_refptr(this), ssrc_); + scoped_refptr(this), ssrc_); } bool RTPSenderVideoFrameTransformerDelegate::TransformFrame( int payload_type, - absl::optional codec_type, + std::optional codec_type, uint32_t rtp_timestamp, const EncodedImage& encoded_image, RTPVideoHeader video_header, TimeDelta expected_retransmission_time) { + { + MutexLock lock(&sender_lock_); + if (short_circuit_) { + sender_->SendVideo(payload_type, codec_type, rtp_timestamp, + encoded_image.CaptureTime(), + *encoded_image.GetEncodedData(), encoded_image.size(), + video_header, expected_retransmission_time, + /*csrcs=*/{}); + return true; + } + } frame_transformer_->Transform(std::make_unique( encoded_image, video_header, payload_type, codec_type, rtp_timestamp, expected_retransmission_time, ssrc_, @@ -153,7 +206,7 @@ void RTPSenderVideoFrameTransformerDelegate::OnTransformedFrame( if (!sender_) { return; } - rtc::scoped_refptr delegate(this); + scoped_refptr delegate(this); transformation_queue_->PostTask( [delegate = std::move(delegate), frame = std::move(frame)]() mutable { RTC_DCHECK_RUN_ON(delegate->transformation_queue_.get()); @@ -161,6 +214,11 @@ void RTPSenderVideoFrameTransformerDelegate::OnTransformedFrame( }); } +void RTPSenderVideoFrameTransformerDelegate::StartShortCircuiting() { + MutexLock lock(&sender_lock_); + short_circuit_ = true; +} + void RTPSenderVideoFrameTransformerDelegate::SendVideo( std::unique_ptr transformed_frame) const { RTC_DCHECK_RUN_ON(transformation_queue_.get()); @@ -171,10 +229,11 @@ void RTPSenderVideoFrameTransformerDelegate::SendVideo( TransformableFrameInterface::Direction::kSender) { auto* transformed_video_frame = static_cast(transformed_frame.get()); + RTC_CHECK(transformed_video_frame->CaptureTime().has_value()); sender_->SendVideo(transformed_video_frame->GetPayloadType(), transformed_video_frame->GetCodecType(), transformed_video_frame->GetTimestamp(), - transformed_video_frame->GetCaptureTime(), + *transformed_video_frame->CaptureTime(), transformed_video_frame->GetData(), transformed_video_frame->GetPreTransformPayloadSize(), transformed_video_frame->GetHeader(), @@ -184,15 +243,17 @@ void RTPSenderVideoFrameTransformerDelegate::SendVideo( auto* transformed_video_frame = static_cast(transformed_frame.get()); VideoFrameMetadata metadata = transformed_video_frame->Metadata(); - sender_->SendVideo( - transformed_video_frame->GetPayloadType(), metadata.GetCodec(), - transformed_video_frame->GetTimestamp(), - /*capture_time=*/Timestamp::MinusInfinity(), - transformed_video_frame->GetData(), - transformed_video_frame->GetData().size(), - RTPVideoHeader::FromMetadata(metadata), - /*expected_retransmission_time=*/TimeDelta::PlusInfinity(), - metadata.GetCsrcs()); + // TODO(bugs.webrtc.org/14708): Use an actual RTT estimate for the + // retransmission time instead of a const default, in the same way as a + // locally encoded frame. + sender_->SendVideo(transformed_video_frame->GetPayloadType(), + metadata.GetCodec(), + transformed_video_frame->GetTimestamp(), + /*capture_time=*/Timestamp::MinusInfinity(), + transformed_video_frame->GetData(), + transformed_video_frame->GetData().size(), + RTPVideoHeader::FromMetadata(metadata), + kDefaultRetransmissionsTime, metadata.GetCsrcs()); } } @@ -229,13 +290,14 @@ std::unique_ptr CloneSenderVideoFrame( ? VideoFrameType::kVideoFrameKey : VideoFrameType::kVideoFrameDelta; // TODO(bugs.webrtc.org/14708): Fill in other EncodedImage parameters - + // TODO(bugs.webrtc.org/14708): Use an actual RTT estimate for the + // retransmission time instead of a const default, in the same way as a + // locally encoded frame. VideoFrameMetadata metadata = original->Metadata(); RTPVideoHeader new_header = RTPVideoHeader::FromMetadata(metadata); return std::make_unique( encoded_image, new_header, original->GetPayloadType(), new_header.codec, - original->GetTimestamp(), - /*expected_retransmission_time=*/TimeDelta::PlusInfinity(), + original->GetTimestamp(), kDefaultRetransmissionsTime, original->GetSsrc(), metadata.GetCsrcs()); } diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h index a333db235a..4302fb7e5f 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h +++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h @@ -11,18 +11,27 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_FRAME_TRANSFORMER_DELEGATE_H_ #define MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_FRAME_TRANSFORMER_DELEGATE_H_ +#include +#include #include +#include #include +#include "api/array_view.h" #include "api/frame_transformer_interface.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" +#include "api/transport/rtp/dependency_descriptor.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" #include "api/video/video_layers_allocation.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -31,10 +40,10 @@ namespace webrtc { class RTPVideoFrameSenderInterface { public: virtual bool SendVideo(int payload_type, - absl::optional codec_type, + std::optional codec_type, uint32_t rtp_timestamp, Timestamp capture_time, - rtc::ArrayView payload, + ArrayView payload, size_t encoder_output_size, RTPVideoHeader video_header, TimeDelta expected_retransmission_time, @@ -56,7 +65,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { public: RTPSenderVideoFrameTransformerDelegate( RTPVideoFrameSenderInterface* sender, - rtc::scoped_refptr frame_transformer, + scoped_refptr frame_transformer, uint32_t ssrc, TaskQueueFactory* send_transport_queue); @@ -64,7 +73,7 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { // Delegates the call to FrameTransformerInterface::TransformFrame. bool TransformFrame(int payload_type, - absl::optional codec_type, + std::optional codec_type, uint32_t rtp_timestamp, const EncodedImage& encoded_image, RTPVideoHeader video_header, @@ -75,6 +84,8 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { void OnTransformedFrame( std::unique_ptr frame) override; + void StartShortCircuiting() override; + // Delegates the call to RTPSendVideo::SendVideo on the `encoder_queue_`. void SendVideo(std::unique_ptr frame) const RTC_RUN_ON(transformation_queue_); @@ -102,11 +113,12 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback { mutable Mutex sender_lock_; RTPVideoFrameSenderInterface* sender_ RTC_GUARDED_BY(sender_lock_); - rtc::scoped_refptr frame_transformer_; + scoped_refptr frame_transformer_; const uint32_t ssrc_; // Used when the encoded frames arrives without a current task queue. This can // happen if a hardware encoder was used. std::unique_ptr transformation_queue_; + bool short_circuit_ RTC_GUARDED_BY(sender_lock_) = false; }; // Method to support cloning a Sender frame from another frame diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate_unittest.cc index 54cfdbadd7..af3083bcc5 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate_unittest.cc @@ -10,13 +10,32 @@ #include "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.h" +#include +#include +#include +#include #include +#include +#include "api/array_view.h" +#include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "api/test/mock_frame_transformer.h" #include "api/test/mock_transformable_video_frame.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_metadata.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_layers_allocation.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "rtc_base/event.h" #include "test/gmock.h" #include "test/gtest.h" -#include "test/mock_frame_transformer.h" #include "test/time_controller/simulated_time_controller.h" namespace webrtc { @@ -33,10 +52,10 @@ class MockRTPVideoFrameSenderInterface : public RTPVideoFrameSenderInterface { MOCK_METHOD(bool, SendVideo, (int payload_type, - absl::optional codec_type, + std::optional codec_type, uint32_t rtp_timestamp, Timestamp capture_time, - rtc::ArrayView payload, + ArrayView payload, size_t encoder_output_size, RTPVideoHeader video_header, TimeDelta expected_retransmission_time, @@ -56,13 +75,13 @@ class MockRTPVideoFrameSenderInterface : public RTPVideoFrameSenderInterface { class RtpSenderVideoFrameTransformerDelegateTest : public ::testing::Test { protected: RtpSenderVideoFrameTransformerDelegateTest() - : frame_transformer_(rtc::make_ref_counted()), + : frame_transformer_(make_ref_counted()), time_controller_(Timestamp::Seconds(0)) {} ~RtpSenderVideoFrameTransformerDelegateTest() override = default; std::unique_ptr GetTransformableFrame( - rtc::scoped_refptr delegate, + scoped_refptr delegate, bool key_frame = false) { EncodedImage encoded_image; encoded_image.SetEncodedData(EncodedImageBuffer::Create(1)); @@ -74,21 +93,27 @@ class RtpSenderVideoFrameTransformerDelegateTest : public ::testing::Test { frame_to_transform) { frame = std::move(frame_to_transform); }); + RTPVideoHeader rtp_header; + + VideoFrameMetadata metadata; + metadata.SetCodec(VideoCodecType::kVideoCodecVP8); + metadata.SetRTPVideoHeaderCodecSpecifics(RTPVideoHeaderVP8()); + delegate->TransformFrame( /*payload_type=*/1, VideoCodecType::kVideoCodecVP8, /*rtp_timestamp=*/2, - encoded_image, RTPVideoHeader(), - /*expected_retransmission_time=*/TimeDelta::PlusInfinity()); + encoded_image, RTPVideoHeader::FromMetadata(metadata), + /*expected_retransmission_time=*/TimeDelta::Millis(10)); return frame; } MockRTPVideoFrameSenderInterface test_sender_; - rtc::scoped_refptr frame_transformer_; + scoped_refptr frame_transformer_; GlobalSimulatedTimeController time_controller_; }; TEST_F(RtpSenderVideoFrameTransformerDelegateTest, RegisterTransformedFrameCallbackSinkOnInit) { - auto delegate = rtc::make_ref_counted( + auto delegate = make_ref_counted( &test_sender_, frame_transformer_, /*ssrc=*/1111, time_controller_.CreateTaskQueueFactory().get()); EXPECT_CALL(*frame_transformer_, @@ -98,7 +123,7 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, TEST_F(RtpSenderVideoFrameTransformerDelegateTest, UnregisterTransformedFrameSinkCallbackOnReset) { - auto delegate = rtc::make_ref_counted( + auto delegate = make_ref_counted( &test_sender_, frame_transformer_, /*ssrc=*/1111, time_controller_.CreateTaskQueueFactory().get()); EXPECT_CALL(*frame_transformer_, @@ -108,7 +133,7 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, TEST_F(RtpSenderVideoFrameTransformerDelegateTest, TransformFrameCallsTransform) { - auto delegate = rtc::make_ref_counted( + auto delegate = make_ref_counted( &test_sender_, frame_transformer_, /*ssrc=*/1111, time_controller_.CreateTaskQueueFactory().get()); @@ -117,16 +142,16 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, delegate->TransformFrame( /*payload_type=*/1, VideoCodecType::kVideoCodecVP8, /*rtp_timestamp=*/2, encoded_image, RTPVideoHeader(), - /*expected_retransmission_time=*/TimeDelta::PlusInfinity()); + /*expected_retransmission_time=*/TimeDelta::Millis(10)); } TEST_F(RtpSenderVideoFrameTransformerDelegateTest, OnTransformedFrameCallsSenderSendVideo) { - auto delegate = rtc::make_ref_counted( + auto delegate = make_ref_counted( &test_sender_, frame_transformer_, /*ssrc=*/1111, time_controller_.CreateTaskQueueFactory().get()); - rtc::scoped_refptr callback; + scoped_refptr callback; EXPECT_CALL(*frame_transformer_, RegisterTransformedFrameSinkCallback) .WillOnce(SaveArg<0>(&callback)); delegate->Init(); @@ -135,8 +160,9 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, std::unique_ptr frame = GetTransformableFrame(delegate); ASSERT_TRUE(frame); + EXPECT_STRCASEEQ("video/VP8", frame->GetMimeType().c_str()); - rtc::Event event; + Event event; EXPECT_CALL(test_sender_, SendVideo).WillOnce(WithoutArgs([&] { event.Set(); return true; @@ -148,7 +174,7 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, } TEST_F(RtpSenderVideoFrameTransformerDelegateTest, CloneSenderVideoFrame) { - auto delegate = rtc::make_ref_counted( + auto delegate = make_ref_counted( &test_sender_, frame_transformer_, /*ssrc=*/1111, time_controller_.CreateTaskQueueFactory().get()); @@ -162,13 +188,14 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, CloneSenderVideoFrame) { EXPECT_EQ(clone->IsKeyFrame(), video_frame.IsKeyFrame()); EXPECT_EQ(clone->GetPayloadType(), video_frame.GetPayloadType()); + EXPECT_EQ(clone->GetMimeType(), video_frame.GetMimeType()); EXPECT_EQ(clone->GetSsrc(), video_frame.GetSsrc()); EXPECT_EQ(clone->GetTimestamp(), video_frame.GetTimestamp()); EXPECT_EQ(clone->Metadata(), video_frame.Metadata()); } TEST_F(RtpSenderVideoFrameTransformerDelegateTest, CloneKeyFrame) { - auto delegate = rtc::make_ref_counted( + auto delegate = make_ref_counted( &test_sender_, frame_transformer_, /*ssrc=*/1111, time_controller_.CreateTaskQueueFactory().get()); @@ -182,13 +209,14 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, CloneKeyFrame) { EXPECT_EQ(clone->IsKeyFrame(), video_frame.IsKeyFrame()); EXPECT_EQ(clone->GetPayloadType(), video_frame.GetPayloadType()); + EXPECT_EQ(clone->GetMimeType(), video_frame.GetMimeType()); EXPECT_EQ(clone->GetSsrc(), video_frame.GetSsrc()); EXPECT_EQ(clone->GetTimestamp(), video_frame.GetTimestamp()); EXPECT_EQ(clone->Metadata(), video_frame.Metadata()); } TEST_F(RtpSenderVideoFrameTransformerDelegateTest, MetadataAfterSetMetadata) { - auto delegate = rtc::make_ref_counted( + auto delegate = make_ref_counted( &test_sender_, frame_transformer_, /*ssrc=*/1111, time_controller_.CreateTaskQueueFactory().get()); @@ -216,7 +244,7 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, MetadataAfterSetMetadata) { TEST_F(RtpSenderVideoFrameTransformerDelegateTest, ReceiverFrameConvertedToSenderFrame) { - auto delegate = rtc::make_ref_counted( + auto delegate = make_ref_counted( &test_sender_, frame_transformer_, /*ssrc=*/1111, time_controller_.CreateTaskQueueFactory().get()); @@ -233,25 +261,25 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, metadata.SetRTPVideoHeaderCodecSpecifics(RTPVideoHeaderVP8()); metadata.SetCsrcs(frame_csrcs); ON_CALL(*mock_receiver_frame, Metadata).WillByDefault(Return(metadata)); - rtc::ArrayView buffer = - (rtc::ArrayView)*EncodedImageBuffer::Create(1); + ArrayView buffer = + (ArrayView)*EncodedImageBuffer::Create(1); ON_CALL(*mock_receiver_frame, GetData).WillByDefault(Return(buffer)); ON_CALL(*mock_receiver_frame, GetPayloadType) .WillByDefault(Return(payload_type)); ON_CALL(*mock_receiver_frame, GetTimestamp).WillByDefault(Return(timestamp)); - rtc::scoped_refptr callback; + scoped_refptr callback; EXPECT_CALL(*frame_transformer_, RegisterTransformedFrameSinkCallback) .WillOnce(SaveArg<0>(&callback)); delegate->Init(); ASSERT_TRUE(callback); - rtc::Event event; + Event event; EXPECT_CALL( test_sender_, - SendVideo(payload_type, absl::make_optional(kVideoCodecVP8), timestamp, + SendVideo(payload_type, std::make_optional(kVideoCodecVP8), timestamp, /*capture_time=*/Timestamp::MinusInfinity(), buffer, _, _, - /*expected_retransmission_time=*/TimeDelta::PlusInfinity(), + /*expected_retransmission_time=*/TimeDelta::Millis(10), frame_csrcs)) .WillOnce(WithoutArgs([&] { event.Set(); @@ -264,7 +292,7 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, } TEST_F(RtpSenderVideoFrameTransformerDelegateTest, SettingRTPTimestamp) { - auto delegate = rtc::make_ref_counted( + auto delegate = make_ref_counted( &test_sender_, frame_transformer_, /*ssrc=*/1111, time_controller_.CreateTaskQueueFactory().get()); @@ -280,5 +308,29 @@ TEST_F(RtpSenderVideoFrameTransformerDelegateTest, SettingRTPTimestamp) { EXPECT_EQ(video_frame.GetTimestamp(), rtp_timestamp); } +TEST_F(RtpSenderVideoFrameTransformerDelegateTest, + ShortCircuitingSkipsTransform) { + auto delegate = make_ref_counted( + &test_sender_, frame_transformer_, + /*ssrc=*/1111, time_controller_.CreateTaskQueueFactory().get()); + EXPECT_CALL(*frame_transformer_, + RegisterTransformedFrameSinkCallback(_, 1111)); + delegate->Init(); + + delegate->StartShortCircuiting(); + + // Will not call the actual transformer. + EXPECT_CALL(*frame_transformer_, Transform).Times(0); + // Will pass the frame straight to the reciever. + EXPECT_CALL(test_sender_, SendVideo); + + EncodedImage encoded_image; + encoded_image.SetEncodedData(EncodedImageBuffer::Create(1)); + delegate->TransformFrame( + /*payload_type=*/1, VideoCodecType::kVideoCodecVP8, /*rtp_timestamp=*/2, + encoded_image, RTPVideoHeader(), + /*expected_retransmission_time=*/TimeDelta::Millis(10)); +} + } // namespace } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc b/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc index 9641d617d9..b5045aee12 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc @@ -10,23 +10,45 @@ #include "modules/rtp_rtcp/source/rtp_sender_video.h" +#include +#include #include -#include +#include #include #include #include "absl/memory/memory.h" +#include "api/array_view.h" +#include "api/call/transport.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/field_trials_view.h" #include "api/frame_transformer_factory.h" +#include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" #include "api/rtp_headers.h" +#include "api/scoped_refptr.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "api/test/mock_frame_encryptor.h" +#include "api/test/mock_frame_transformer.h" +#include "api/transport/rtp/corruption_detection_message.h" #include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "api/video/encoded_image.h" #include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_layers_allocation.h" +#include "api/video/video_rotation.h" #include "api/video/video_timing.h" +#include "common_video/frame_instrumentation_data.h" #include "modules/rtp_rtcp/include/rtp_cvo.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/corruption_detection_extension.h" #include "modules/rtp_rtcp/source/rtcp_packet/nack.h" #include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" @@ -35,18 +57,22 @@ #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" +#include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "modules/rtp_rtcp/source/rtp_sender.h" #include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" #include "rtc_base/rate_limiter.h" #include "rtc_base/thread.h" -#include "test/explicit_key_value_config.h" +#include "system_wrappers/include/clock.h" +#include "system_wrappers/include/ntp_time.h" #include "test/gmock.h" #include "test/gtest.h" -#include "test/mock_frame_transformer.h" #include "test/time_controller/simulated_time_controller.h" namespace webrtc { @@ -60,7 +86,6 @@ using ::testing::ElementsAreArray; using ::testing::IsEmpty; using ::testing::NiceMock; using ::testing::Not; -using ::testing::Return; using ::testing::ReturnArg; using ::testing::SaveArg; using ::testing::SizeIs; @@ -77,9 +102,10 @@ enum : int { // The first valid value is 1. kAbsoluteCaptureTimeExtensionId, kPlayoutDelayExtensionId, kVideoLayersAllocationExtensionId, + kCorruptionDetectionExtensionId, }; -constexpr int kPayload = 100; +constexpr int kPayloadType = 100; constexpr VideoCodecType kType = VideoCodecType::kVideoCodecGeneric; constexpr uint32_t kTimestamp = 10; constexpr uint16_t kSeqNum = 33; @@ -111,15 +137,17 @@ class LoopbackTransportTest : public webrtc::Transport { kPlayoutDelayExtensionId); receivers_extensions_.Register( kVideoLayersAllocationExtensionId); + receivers_extensions_.Register( + kCorruptionDetectionExtensionId); } - bool SendRtp(rtc::ArrayView data, - const PacketOptions& options) override { + bool SendRtp(ArrayView data, + const PacketOptions& /* options */) override { sent_packets_.push_back(RtpPacketReceived(&receivers_extensions_)); EXPECT_TRUE(sent_packets_.back().Parse(data)); return true; } - bool SendRtcp(rtc::ArrayView data) override { return false; } + bool SendRtcp(ArrayView /* data */) override { return false; } const RtpPacketReceived& last_sent_packet() { return sent_packets_.back(); } int packets_sent() { return sent_packets_.size(); } const std::vector& sent_packets() const { @@ -158,50 +186,46 @@ class RtpSenderVideoTest : public ::testing::Test { public: RtpSenderVideoTest() : fake_clock_(kStartTime), + env_(CreateEnvironment(&fake_clock_)), retransmission_rate_limiter_(&fake_clock_, 1000), - rtp_module_(ModuleRtpRtcpImpl2::Create([&] { - RtpRtcpInterface::Configuration config; - config.clock = &fake_clock_; - config.outgoing_transport = &transport_; - config.retransmission_rate_limiter = &retransmission_rate_limiter_; - config.field_trials = &field_trials_; - config.local_media_ssrc = kSsrc; - config.rtx_send_ssrc = kRtxSsrc; - config.rid = "rid"; - return config; - }())), + rtp_module_( + env_, + {.outgoing_transport = &transport_, + .retransmission_rate_limiter = &retransmission_rate_limiter_, + .local_media_ssrc = kSsrc, + .rtx_send_ssrc = kRtxSsrc, + .rid = "rid"}), rtp_sender_video_( std::make_unique(&fake_clock_, - rtp_module_->RtpSender(), - field_trials_)) { - rtp_module_->SetSequenceNumber(kSeqNum); - rtp_module_->SetStartTimestamp(0); + rtp_module_.RtpSender(), + env_.field_trials())) { + rtp_module_.SetSequenceNumber(kSeqNum); + rtp_module_.SetStartTimestamp(0); } void UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed( int version); protected: - rtc::AutoThread main_thread_; - const RtpRtcpInterface::Configuration config_; - test::ExplicitKeyValueConfig field_trials_{""}; + AutoThread main_thread_; SimulatedClock fake_clock_; + const Environment env_; LoopbackTransportTest transport_; RateLimiter retransmission_rate_limiter_; - std::unique_ptr rtp_module_; + ModuleRtpRtcpImpl2 rtp_module_; std::unique_ptr rtp_sender_video_; }; TEST_F(RtpSenderVideoTest, KeyFrameHasCVO) { uint8_t kFrame[kMaxPacketLength]; - rtp_module_->RegisterRtpHeaderExtension(VideoOrientation::Uri(), - kVideoRotationExtensionId); + rtp_module_.RegisterRtpHeaderExtension(VideoOrientation::Uri(), + kVideoRotationExtensionId); RTPVideoHeader hdr; hdr.rotation = kVideoRotation_0; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoRotation rotation; @@ -215,8 +239,8 @@ TEST_F(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { const int64_t kPacketizationTimeMs = 100; const int64_t kEncodeStartDeltaMs = 10; const int64_t kEncodeFinishDeltaMs = 50; - rtp_module_->RegisterRtpHeaderExtension(VideoTimingExtension::Uri(), - kVideoTimingExtensionId); + rtp_module_.RegisterRtpHeaderExtension(VideoTimingExtension::Uri(), + kVideoTimingExtensionId); const Timestamp kCaptureTimestamp = fake_clock_.CurrentTime(); @@ -227,8 +251,8 @@ TEST_F(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { fake_clock_.AdvanceTimeMilliseconds(kPacketizationTimeMs); hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, kCaptureTimestamp, - kFrame, sizeof(kFrame), hdr, + rtp_sender_video_->SendVideo(kPayloadType, kType, kTimestamp, + kCaptureTimestamp, kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoSendTiming timing; EXPECT_TRUE(transport_.last_sent_packet().GetExtension( @@ -238,22 +262,94 @@ TEST_F(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { EXPECT_EQ(kEncodeFinishDeltaMs, timing.encode_finish_delta_ms); } +TEST_F(RtpSenderVideoTest, + WriteCorruptionExtensionIfHeaderContainsFrameInstrumentationData) { + uint8_t kFrame[kMaxPacketLength]; + rtp_module_.RegisterRtpHeaderExtension(CorruptionDetectionExtension::Uri(), + kCorruptionDetectionExtensionId); + RTPVideoHeader hdr; + hdr.frame_type = VideoFrameType::kVideoFrameKey; + hdr.frame_instrumentation_data = FrameInstrumentationData{ + .sequence_index = 130, // 128 + 2 + .communicate_upper_bits = false, + .std_dev = 2.0, + .luma_error_threshold = 3, + .chroma_error_threshold = 2, + .sample_values = {12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0}}; + CorruptionDetectionMessage message; + + rtp_sender_video_->SendVideo( + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); + + // Only written on last packet. + for (const RtpPacketReceived& packet : transport_.sent_packets()) { + if (&packet == &transport_.last_sent_packet()) { + EXPECT_TRUE(transport_.last_sent_packet() + .GetExtension(&message)); + } else { + EXPECT_FALSE(packet.HasExtension()); + } + } + EXPECT_EQ(message.sequence_index(), 2); + EXPECT_FALSE(message.interpret_sequence_index_as_most_significant_bits()); + EXPECT_NEAR(message.std_dev(), 2.0392156862745097, 0.041); // ~2% + EXPECT_EQ(message.luma_error_threshold(), 3); + EXPECT_EQ(message.chroma_error_threshold(), 2); + EXPECT_THAT(message.sample_values(), + ElementsAre(12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0)); +} + +TEST_F(RtpSenderVideoTest, + WriteCorruptionExtensionIfHeaderContainsFrameInstrumentationSyncData) { + uint8_t kFrame[kMaxPacketLength]; + rtp_module_.RegisterRtpHeaderExtension(CorruptionDetectionExtension::Uri(), + kCorruptionDetectionExtensionId); + RTPVideoHeader hdr; + hdr.frame_type = VideoFrameType::kVideoFrameKey; + hdr.frame_instrumentation_data = FrameInstrumentationSyncData{ + .sequence_index = 130, // 128 + 2 + .communicate_upper_bits = true, + }; + CorruptionDetectionMessage message; + + rtp_sender_video_->SendVideo( + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); + + // Only written on last packet. + for (const RtpPacketReceived& packet : transport_.sent_packets()) { + if (&packet == &transport_.last_sent_packet()) { + EXPECT_TRUE(transport_.last_sent_packet() + .GetExtension(&message)); + } else { + EXPECT_FALSE(packet.HasExtension()); + } + } + EXPECT_EQ(message.sequence_index(), 1); + EXPECT_TRUE(message.interpret_sequence_index_as_most_significant_bits()); + EXPECT_DOUBLE_EQ(message.std_dev(), 0.0); + EXPECT_EQ(message.luma_error_threshold(), 0); + EXPECT_EQ(message.chroma_error_threshold(), 0); + EXPECT_THAT(message.sample_values(), IsEmpty()); +} + TEST_F(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) { uint8_t kFrame[kMaxPacketLength]; - rtp_module_->RegisterRtpHeaderExtension(VideoOrientation::Uri(), - kVideoRotationExtensionId); + rtp_module_.RegisterRtpHeaderExtension(VideoOrientation::Uri(), + kVideoRotationExtensionId); RTPVideoHeader hdr; hdr.rotation = kVideoRotation_90; hdr.frame_type = VideoFrameType::kVideoFrameKey; EXPECT_TRUE(rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {})); hdr.rotation = kVideoRotation_0; hdr.frame_type = VideoFrameType::kVideoFrameDelta; EXPECT_TRUE(rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp + 1, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp + 1, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {})); VideoRotation rotation; @@ -264,19 +360,19 @@ TEST_F(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) { TEST_F(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) { uint8_t kFrame[kMaxPacketLength]; - rtp_module_->RegisterRtpHeaderExtension(VideoOrientation::Uri(), - kVideoRotationExtensionId); + rtp_module_.RegisterRtpHeaderExtension(VideoOrientation::Uri(), + kVideoRotationExtensionId); RTPVideoHeader hdr; hdr.rotation = kVideoRotation_90; hdr.frame_type = VideoFrameType::kVideoFrameKey; EXPECT_TRUE(rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {})); hdr.frame_type = VideoFrameType::kVideoFrameDelta; EXPECT_TRUE(rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp + 1, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp + 1, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {})); VideoRotation rotation; @@ -489,15 +585,15 @@ TEST_F(RtpSenderVideoTest, constexpr int kRtxPayloadId = 101; constexpr size_t kMaxPacketSize = 1'000; - rtp_module_->SetMaxRtpPacketSize(kMaxPacketSize); - rtp_module_->RegisterRtpHeaderExtension(RtpMid::Uri(), 1); - rtp_module_->RegisterRtpHeaderExtension(RtpStreamId::Uri(), 2); - rtp_module_->RegisterRtpHeaderExtension(RepairedRtpStreamId::Uri(), 3); - rtp_module_->RegisterRtpHeaderExtension(AbsoluteSendTime::Uri(), 4); - rtp_module_->SetMid("long_mid"); - rtp_module_->SetRtxSendPayloadType(kRtxPayloadId, kMediaPayloadId); - rtp_module_->SetStorePacketsStatus(/*enable=*/true, 10); - rtp_module_->SetRtxSendStatus(kRtxRetransmitted); + rtp_module_.SetMaxRtpPacketSize(kMaxPacketSize); + rtp_module_.RegisterRtpHeaderExtension(RtpMid::Uri(), 1); + rtp_module_.RegisterRtpHeaderExtension(RtpStreamId::Uri(), 2); + rtp_module_.RegisterRtpHeaderExtension(RepairedRtpStreamId::Uri(), 3); + rtp_module_.RegisterRtpHeaderExtension(AbsoluteSendTime::Uri(), 4); + rtp_module_.SetMid("long_mid"); + rtp_module_.SetRtxSendPayloadType(kRtxPayloadId, kMediaPayloadId); + rtp_module_.SetStorePacketsStatus(/*enable=*/true, 10); + rtp_module_.SetRtxSendStatus(kRtxRetransmitted); RTPVideoHeader header; header.codec = kVideoCodecVP8; @@ -519,13 +615,13 @@ TEST_F(RtpSenderVideoTest, rb.SetMediaSsrc(kSsrc); rb.SetExtHighestSeqNum(transport_.last_sent_packet().SequenceNumber()); rr.AddReportBlock(rb); - rtp_module_->IncomingRtcpPacket(rr.Build()); + rtp_module_.IncomingRtcpPacket(rr.Build()); // Test for various frame size close to `kMaxPacketSize` to catch edge cases // when rtx packet barely fit. for (size_t frame_size = 800; frame_size < kMaxPacketSize; ++frame_size) { SCOPED_TRACE(frame_size); - rtc::ArrayView payload(kPayload, frame_size); + ArrayView payload(kPayload, frame_size); EXPECT_TRUE(rtp_sender_video_->SendVideo( kMediaPayloadId, /*codec_type=*/kVideoCodecVP8, /*rtp_timestamp=*/0, @@ -537,7 +633,7 @@ TEST_F(RtpSenderVideoTest, rtcp::Nack nack; nack.SetMediaSsrc(kSsrc); nack.SetPacketIds({media_packet.SequenceNumber()}); - rtp_module_->IncomingRtcpPacket(nack.Build()); + rtp_module_.IncomingRtcpPacket(nack.Build()); const RtpPacketReceived& rtx_packet = transport_.last_sent_packet(); EXPECT_EQ(rtx_packet.Ssrc(), kRtxSsrc); @@ -548,7 +644,7 @@ TEST_F(RtpSenderVideoTest, TEST_F(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpDependencyDescriptorExtension::Uri(), kDependencyDescriptorId); FrameDependencyStructure video_structure; video_structure.num_decode_targets = 2; @@ -569,7 +665,7 @@ TEST_F(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { DecodeTargetIndication::kSwitch}; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_EQ(transport_.packets_sent(), 1); @@ -596,7 +692,7 @@ TEST_F(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { DecodeTargetIndication::kRequired}; hdr.frame_type = VideoFrameType::kVideoFrameDelta; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_EQ(transport_.packets_sent(), 2); @@ -619,9 +715,9 @@ TEST_F(RtpSenderVideoTest, SkipsDependencyDescriptorOnDeltaFrameWhenFailedToAttachToKeyFrame) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpDependencyDescriptorExtension::Uri(), kDependencyDescriptorId); - rtp_module_->SetExtmapAllowMixed(false); + rtp_module_.SetExtmapAllowMixed(false); FrameDependencyStructure video_structure; video_structure.num_decode_targets = 2; // Use many templates so that key dependency descriptor would be too large @@ -646,7 +742,7 @@ TEST_F(RtpSenderVideoTest, DecodeTargetIndication::kSwitch}; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_EQ(transport_.packets_sent(), 1); @@ -663,7 +759,7 @@ TEST_F(RtpSenderVideoTest, DecodeTargetIndication::kRequired}; hdr.frame_type = VideoFrameType::kVideoFrameDelta; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_EQ(transport_.packets_sent(), 2); @@ -674,7 +770,7 @@ TEST_F(RtpSenderVideoTest, TEST_F(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpDependencyDescriptorExtension::Uri(), kDependencyDescriptorId); FrameDependencyStructure video_structure; video_structure.num_decode_targets = 2; @@ -693,7 +789,7 @@ TEST_F(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) { generic.chain_diffs = {2}; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_EQ(transport_.packets_sent(), 1); @@ -709,7 +805,7 @@ TEST_F(RtpSenderVideoTest, PropagatesActiveDecodeTargetsIntoDependencyDescriptor) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpDependencyDescriptorExtension::Uri(), kDependencyDescriptorId); FrameDependencyStructure video_structure; video_structure.num_decode_targets = 2; @@ -729,7 +825,7 @@ TEST_F(RtpSenderVideoTest, generic.chain_diffs = {1}; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_EQ(transport_.packets_sent(), 1); @@ -744,7 +840,7 @@ TEST_F(RtpSenderVideoTest, SetDiffentVideoStructureAvoidsCollisionWithThePreviousStructure) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpDependencyDescriptorExtension::Uri(), kDependencyDescriptorId); FrameDependencyStructure video_structure1; video_structure1.num_decode_targets = 2; @@ -768,7 +864,7 @@ TEST_F(RtpSenderVideoTest, hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SetVideoStructure(&video_structure1); rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); // Parse 1st extension. ASSERT_EQ(transport_.packets_sent(), 1); @@ -785,7 +881,7 @@ TEST_F(RtpSenderVideoTest, DecodeTargetIndication::kNotPresent}; hdr.frame_type = VideoFrameType::kVideoFrameDelta; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_EQ(transport_.packets_sent(), 2); @@ -798,7 +894,7 @@ TEST_F(RtpSenderVideoTest, hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SetVideoStructure(&video_structure2); rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); // Parse the 2nd key frame. ASSERT_EQ(transport_.packets_sent(), 3); @@ -822,20 +918,20 @@ TEST_F(RtpSenderVideoTest, static constexpr size_t kFrameSize = 100; uint8_t kFrame[kFrameSize] = {1, 2, 3, 4}; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpDependencyDescriptorExtension::Uri(), kDependencyDescriptorId); - auto encryptor = rtc::make_ref_counted>(); + auto encryptor = make_ref_counted>(); ON_CALL(*encryptor, GetMaxCiphertextByteSize).WillByDefault(ReturnArg<1>()); ON_CALL(*encryptor, Encrypt) .WillByDefault(WithArgs<3, 5>( - [](rtc::ArrayView frame, size_t* bytes_written) { + [](ArrayView frame, size_t* bytes_written) { *bytes_written = frame.size(); return 0; })); RTPSenderVideo::Config config; config.clock = &fake_clock_; - config.rtp_sender = rtp_module_->RtpSender(); - config.field_trials = &field_trials_; + config.rtp_sender = rtp_module_.RtpSender(); + config.field_trials = &env_.field_trials(); config.frame_encryptor = encryptor.get(); RTPSenderVideo rtp_sender_video(config); @@ -852,7 +948,7 @@ TEST_F(RtpSenderVideoTest, EXPECT_CALL(*encryptor, Encrypt(_, _, Not(IsEmpty()), ElementsAreArray(kFrame), _, _)); - rtp_sender_video.SendVideo(kPayload, kType, kTimestamp, + rtp_sender_video.SendVideo(kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); // Double check packet with the dependency descriptor is sent. @@ -864,7 +960,7 @@ TEST_F(RtpSenderVideoTest, TEST_F(RtpSenderVideoTest, PopulateGenericFrameDescriptor) { const int64_t kFrameId = 100000; uint8_t kFrame[100]; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpGenericFrameDescriptorExtension00::Uri(), kGenericDescriptorId); RTPVideoHeader hdr; @@ -876,7 +972,7 @@ TEST_F(RtpSenderVideoTest, PopulateGenericFrameDescriptor) { generic.dependencies.push_back(kFrameId - 500); hdr.frame_type = VideoFrameType::kVideoFrameDelta; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); RtpGenericFrameDescriptor descriptor_wire; @@ -892,12 +988,12 @@ TEST_F(RtpSenderVideoTest, PopulateGenericFrameDescriptor) { void RtpSenderVideoTest:: UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed( - int version) { + int /* version */) { const int64_t kFrameId = 100000; const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpGenericFrameDescriptorExtension00::Uri(), kGenericDescriptorId); RTPVideoHeader hdr; @@ -910,7 +1006,7 @@ void RtpSenderVideoTest:: RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace(); generic.frame_id = kFrameId; hdr.frame_type = VideoFrameType::kVideoFrameDelta; - rtp_sender_video_->SendVideo(kPayload, VideoCodecType::kVideoCodecVP8, + rtp_sender_video_->SendVideo(kPayloadType, VideoCodecType::kVideoCodecVP8, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); @@ -933,7 +1029,7 @@ TEST_F(RtpSenderVideoTest, TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpVideoLayersAllocationExtension::Uri(), kVideoLayersAllocationExtensionId); @@ -950,7 +1046,7 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) { RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoLayersAllocation sent_allocation; @@ -961,7 +1057,7 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) { // Next key frame also have the allocation. rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_TRUE( transport_.last_sent_packet() @@ -972,7 +1068,7 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithoutResolutionSentOnDeltaWhenUpdated) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpVideoLayersAllocationExtension::Uri(), kVideoLayersAllocationExtensionId); @@ -989,7 +1085,7 @@ TEST_F(RtpSenderVideoTest, RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_TRUE(transport_.last_sent_packet() .HasExtension()); @@ -997,7 +1093,7 @@ TEST_F(RtpSenderVideoTest, // No allocation sent on delta frame unless it has been updated. hdr.frame_type = VideoFrameType::kVideoFrameDelta; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_FALSE(transport_.last_sent_packet() .HasExtension()); @@ -1005,7 +1101,7 @@ TEST_F(RtpSenderVideoTest, // Update the allocation. rtp_sender_video_->SetVideoLayersAllocation(allocation); rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoLayersAllocation sent_allocation; @@ -1023,7 +1119,7 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnDeltaWhenSpatialLayerAdded) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpVideoLayersAllocationExtension::Uri(), kVideoLayersAllocationExtensionId); @@ -1041,7 +1137,7 @@ TEST_F(RtpSenderVideoTest, RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_TRUE(transport_.last_sent_packet() .HasExtension()); @@ -1056,7 +1152,7 @@ TEST_F(RtpSenderVideoTest, rtp_sender_video_->SetVideoLayersAllocation(allocation); hdr.frame_type = VideoFrameType::kVideoFrameDelta; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoLayersAllocation sent_allocation; @@ -1071,7 +1167,7 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnLargeFrameRateChange) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpVideoLayersAllocationExtension::Uri(), kVideoLayersAllocationExtensionId); @@ -1090,7 +1186,7 @@ TEST_F(RtpSenderVideoTest, RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_TRUE(transport_.last_sent_packet() .HasExtension()); @@ -1100,7 +1196,7 @@ TEST_F(RtpSenderVideoTest, rtp_sender_video_->SetVideoLayersAllocation(allocation); hdr.frame_type = VideoFrameType::kVideoFrameDelta; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoLayersAllocation sent_allocation; @@ -1115,7 +1211,7 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithoutResolutionSentOnSmallFrameRateChange) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpVideoLayersAllocationExtension::Uri(), kVideoLayersAllocationExtensionId); @@ -1134,7 +1230,7 @@ TEST_F(RtpSenderVideoTest, RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_TRUE(transport_.last_sent_packet() .HasExtension()); @@ -1144,7 +1240,7 @@ TEST_F(RtpSenderVideoTest, rtp_sender_video_->SetVideoLayersAllocation(allocation); hdr.frame_type = VideoFrameType::kVideoFrameDelta; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoLayersAllocation sent_allocation; @@ -1157,7 +1253,7 @@ TEST_F(RtpSenderVideoTest, TEST_F(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpVideoLayersAllocationExtension::Uri(), kVideoLayersAllocationExtensionId); @@ -1173,7 +1269,7 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameDelta; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoLayersAllocation sent_allocation; @@ -1184,7 +1280,7 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { // VideoLayersAllocation not sent on the next delta frame. rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_FALSE(transport_.last_sent_packet() .HasExtension()); @@ -1192,7 +1288,7 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { // Update allocation. VideoLayesAllocation should be sent on the next frame. rtp_sender_video_->SetVideoLayersAllocation(allocation); rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_TRUE( transport_.last_sent_packet() @@ -1202,7 +1298,7 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { TEST_F(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) { const size_t kFrameSize = 100; uint8_t kFrame[kFrameSize]; - rtp_module_->RegisterRtpHeaderExtension( + rtp_module_.RegisterRtpHeaderExtension( RtpVideoLayersAllocationExtension::Uri(), kVideoLayersAllocationExtensionId); @@ -1223,7 +1319,7 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) { vp8_header.temporalIdx = 1; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_FALSE(transport_.last_sent_packet() .HasExtension()); @@ -1231,7 +1327,7 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) { // Send a delta frame on tl0. vp8_header.temporalIdx = 0; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_TRUE(transport_.last_sent_packet() .HasExtension()); @@ -1240,12 +1336,12 @@ TEST_F(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) { TEST_F(RtpSenderVideoTest, AbsoluteCaptureTimeNotForwardedWhenImageHasNoCaptureTime) { uint8_t kFrame[kMaxPacketLength]; - rtp_module_->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), - kAbsoluteCaptureTimeExtensionId); + rtp_module_.RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), + kAbsoluteCaptureTimeExtensionId); RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, + rtp_sender_video_->SendVideo(kPayloadType, kType, kTimestamp, /*capture_time=*/Timestamp::MinusInfinity(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); @@ -1258,20 +1354,20 @@ TEST_F(RtpSenderVideoTest, TEST_F(RtpSenderVideoTest, AbsoluteCaptureTime) { rtp_sender_video_ = std::make_unique( - &fake_clock_, rtp_module_->RtpSender(), field_trials_); + &fake_clock_, rtp_module_.RtpSender(), env_.field_trials()); constexpr Timestamp kAbsoluteCaptureTimestamp = Timestamp::Millis(12345678); uint8_t kFrame[kMaxPacketLength]; - rtp_module_->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), - kAbsoluteCaptureTimeExtensionId); + rtp_module_.RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), + kAbsoluteCaptureTimeExtensionId); RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, kAbsoluteCaptureTimestamp, kFrame, + kPayloadType, kType, kTimestamp, kAbsoluteCaptureTimestamp, kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); - absl::optional absolute_capture_time; + std::optional absolute_capture_time; // It is expected that one and only one of the packets sent on this video // frame has absolute capture time header extension. @@ -1296,21 +1392,21 @@ TEST_F(RtpSenderVideoTest, AbsoluteCaptureTime) { TEST_F(RtpSenderVideoTest, AbsoluteCaptureTimeWithExtensionProvided) { constexpr AbsoluteCaptureTime kAbsoluteCaptureTime = { 123, - absl::optional(456), + std::optional(456), }; uint8_t kFrame[kMaxPacketLength]; - rtp_module_->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), - kAbsoluteCaptureTimeExtensionId); + rtp_module_.RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), + kAbsoluteCaptureTimeExtensionId); RTPVideoHeader hdr; hdr.frame_type = VideoFrameType::kVideoFrameKey; hdr.absolute_capture_time = kAbsoluteCaptureTime; - rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, + rtp_sender_video_->SendVideo(kPayloadType, kType, kTimestamp, /*capture_time=*/Timestamp::Millis(789), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); - absl::optional absolute_capture_time; + std::optional absolute_capture_time; // It is expected that one and only one of the packets sent on this video // frame has absolute capture time header extension. @@ -1331,8 +1427,8 @@ TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) { // Single packet frames. constexpr size_t kPacketSize = 123; uint8_t kFrame[kPacketSize]; - rtp_module_->RegisterRtpHeaderExtension(PlayoutDelayLimits::Uri(), - kPlayoutDelayExtensionId); + rtp_module_.RegisterRtpHeaderExtension(PlayoutDelayLimits::Uri(), + kPlayoutDelayExtensionId); const VideoPlayoutDelay kExpectedDelay(TimeDelta::Millis(10), TimeDelta::Millis(20)); @@ -1344,7 +1440,7 @@ TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) { vp8_header.temporalIdx = 0; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_FALSE( transport_.last_sent_packet().HasExtension()); @@ -1354,7 +1450,7 @@ TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) { hdr.frame_type = VideoFrameType::kVideoFrameDelta; vp8_header.temporalIdx = 1; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); VideoPlayoutDelay received_delay = VideoPlayoutDelay(); ASSERT_TRUE(transport_.last_sent_packet().GetExtension( @@ -1363,10 +1459,10 @@ TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) { // Set playout delay on a non-discardable frame, the extension should still // be populated since dilvery wasn't guaranteed on the last one. - hdr.playout_delay = absl::nullopt; // Indicates "no change". + hdr.playout_delay = std::nullopt; // Indicates "no change". vp8_header.temporalIdx = 0; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_TRUE(transport_.last_sent_packet().GetExtension( &received_delay)); @@ -1375,7 +1471,7 @@ TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) { // The next frame does not need the extensions since it's delivery has // already been guaranteed. rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); EXPECT_FALSE( transport_.last_sent_packet().HasExtension()); @@ -1383,7 +1479,7 @@ TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) { // Insert key-frame, we need to refresh the state here. hdr.frame_type = VideoFrameType::kVideoFrameKey; rtp_sender_video_->SendVideo( - kPayload, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, + kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); ASSERT_TRUE(transport_.last_sent_packet().GetExtension( &received_delay)); @@ -1391,7 +1487,7 @@ TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) { } TEST_F(RtpSenderVideoTest, SendGenericVideo) { - const uint8_t kPayloadType = 127; + const uint8_t kPayloadTypeGeneric = 127; const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric; const uint8_t kPayload[] = {47, 11, 32, 93, 89}; @@ -1399,10 +1495,10 @@ TEST_F(RtpSenderVideoTest, SendGenericVideo) { RTPVideoHeader video_header; video_header.frame_type = VideoFrameType::kVideoFrameKey; ASSERT_TRUE(rtp_sender_video_->SendVideo( - kPayloadType, kCodecType, 1234, fake_clock_.CurrentTime(), kPayload, - sizeof(kPayload), video_header, TimeDelta::PlusInfinity(), {})); + kPayloadTypeGeneric, kCodecType, 1234, fake_clock_.CurrentTime(), + kPayload, sizeof(kPayload), video_header, TimeDelta::PlusInfinity(), {})); - rtc::ArrayView sent_payload = + ArrayView sent_payload = transport_.last_sent_packet().payload(); uint8_t generic_header = sent_payload[0]; EXPECT_TRUE(generic_header & RtpFormatVideoGeneric::kKeyFrameBit); @@ -1413,8 +1509,9 @@ TEST_F(RtpSenderVideoTest, SendGenericVideo) { const uint8_t kDeltaPayload[] = {13, 42, 32, 93, 13}; video_header.frame_type = VideoFrameType::kVideoFrameDelta; ASSERT_TRUE(rtp_sender_video_->SendVideo( - kPayloadType, kCodecType, 1234, fake_clock_.CurrentTime(), kDeltaPayload, - sizeof(kDeltaPayload), video_header, TimeDelta::PlusInfinity(), {})); + kPayloadTypeGeneric, kCodecType, 1234, fake_clock_.CurrentTime(), + kDeltaPayload, sizeof(kDeltaPayload), video_header, + TimeDelta::PlusInfinity(), {})); sent_payload = sent_payload = transport_.last_sent_packet().payload(); generic_header = sent_payload[0]; @@ -1424,17 +1521,17 @@ TEST_F(RtpSenderVideoTest, SendGenericVideo) { } TEST_F(RtpSenderVideoTest, SendRawVideo) { - const uint8_t kPayloadType = 111; + const uint8_t kPayloadTypeRaw = 111; const uint8_t kPayload[] = {11, 22, 33, 44, 55}; // Send a frame. RTPVideoHeader video_header; video_header.frame_type = VideoFrameType::kVideoFrameKey; ASSERT_TRUE(rtp_sender_video_->SendVideo( - kPayloadType, absl::nullopt, 1234, fake_clock_.CurrentTime(), kPayload, + kPayloadTypeRaw, std::nullopt, 1234, fake_clock_.CurrentTime(), kPayload, sizeof(kPayload), video_header, TimeDelta::PlusInfinity(), {})); - rtc::ArrayView sent_payload = + ArrayView sent_payload = transport_.last_sent_packet().payload(); EXPECT_THAT(sent_payload, ElementsAreArray(kPayload)); } @@ -1443,26 +1540,24 @@ class RtpSenderVideoWithFrameTransformerTest : public ::testing::Test { public: RtpSenderVideoWithFrameTransformerTest() : time_controller_(kStartTime), + env_(CreateEnvironment(time_controller_.GetClock(), + time_controller_.GetTaskQueueFactory())), retransmission_rate_limiter_(time_controller_.GetClock(), 1000), - rtp_module_(ModuleRtpRtcpImpl2::Create([&] { - RtpRtcpInterface::Configuration config; - config.clock = time_controller_.GetClock(); - config.outgoing_transport = &transport_; - config.retransmission_rate_limiter = &retransmission_rate_limiter_; - config.field_trials = &field_trials_; - config.local_media_ssrc = kSsrc; - return config; - }())) { - rtp_module_->SetSequenceNumber(kSeqNum); - rtp_module_->SetStartTimestamp(0); + rtp_module_( + env_, + {.outgoing_transport = &transport_, + .retransmission_rate_limiter = &retransmission_rate_limiter_, + .local_media_ssrc = kSsrc}) { + rtp_module_.SetSequenceNumber(kSeqNum); + rtp_module_.SetStartTimestamp(0); } std::unique_ptr CreateSenderWithFrameTransformer( - rtc::scoped_refptr transformer) { + scoped_refptr transformer) { RTPSenderVideo::Config config; config.clock = time_controller_.GetClock(); - config.rtp_sender = rtp_module_->RtpSender(); - config.field_trials = &field_trials_; + config.rtp_sender = rtp_module_.RtpSender(); + config.field_trials = &env_.field_trials(); config.frame_transformer = transformer; config.task_queue_factory = time_controller_.GetTaskQueueFactory(); return std::make_unique(config); @@ -1470,10 +1565,10 @@ class RtpSenderVideoWithFrameTransformerTest : public ::testing::Test { protected: GlobalSimulatedTimeController time_controller_; - test::ExplicitKeyValueConfig field_trials_{""}; + const Environment env_; LoopbackTransportTest transport_; RateLimiter retransmission_rate_limiter_; - std::unique_ptr rtp_module_; + ModuleRtpRtcpImpl2 rtp_module_; }; std::unique_ptr CreateDefaultEncodedImage() { @@ -1487,7 +1582,7 @@ std::unique_ptr CreateDefaultEncodedImage() { TEST_F(RtpSenderVideoWithFrameTransformerTest, CreateSenderRegistersFrameTransformer) { auto mock_frame_transformer = - rtc::make_ref_counted>(); + make_ref_counted>(); EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback(_, kSsrc)); std::unique_ptr rtp_sender_video = @@ -1497,7 +1592,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, TEST_F(RtpSenderVideoWithFrameTransformerTest, DestroySenderUnregistersFrameTransformer) { auto mock_frame_transformer = - rtc::make_ref_counted>(); + make_ref_counted>(); std::unique_ptr rtp_sender_video = CreateSenderWithFrameTransformer(mock_frame_transformer); EXPECT_CALL(*mock_frame_transformer, @@ -1508,14 +1603,14 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, TEST_F(RtpSenderVideoWithFrameTransformerTest, SendEncodedImageTransformsFrame) { auto mock_frame_transformer = - rtc::make_ref_counted>(); + make_ref_counted>(); std::unique_ptr rtp_sender_video = CreateSenderWithFrameTransformer(mock_frame_transformer); auto encoded_image = CreateDefaultEncodedImage(); RTPVideoHeader video_header; EXPECT_CALL(*mock_frame_transformer, Transform); - rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, + rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, *encoded_image, video_header, kDefaultExpectedRetransmissionTime); } @@ -1523,7 +1618,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) TEST_F(RtpSenderVideoWithFrameTransformerTest, ValidPayloadTypes) { auto mock_frame_transformer = - rtc::make_ref_counted>(); + make_ref_counted>(); std::unique_ptr rtp_sender_video = CreateSenderWithFrameTransformer(mock_frame_transformer); auto encoded_image = CreateDefaultEncodedImage(); @@ -1548,8 +1643,8 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, ValidPayloadTypes) { TEST_F(RtpSenderVideoWithFrameTransformerTest, OnTransformedFrameSendsVideo) { auto mock_frame_transformer = - rtc::make_ref_counted>(); - rtc::scoped_refptr callback; + make_ref_counted>(); + scoped_refptr callback; EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) .WillOnce(SaveArg<0>(&callback)); std::unique_ptr rtp_sender_video = @@ -1567,14 +1662,14 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, OnTransformedFrameSendsVideo) { auto encoder_queue = time_controller_.GetTaskQueueFactory()->CreateTaskQueue( "encoder_queue", TaskQueueFactory::Priority::NORMAL); encoder_queue->PostTask([&] { - rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, + rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, *encoded_image, video_header, kDefaultExpectedRetransmissionTime); }); time_controller_.AdvanceTime(TimeDelta::Zero()); EXPECT_EQ(transport_.packets_sent(), 1); encoder_queue->PostTask([&] { - rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, + rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, *encoded_image, video_header, kDefaultExpectedRetransmissionTime); }); @@ -1585,8 +1680,8 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, OnTransformedFrameSendsVideo) { TEST_F(RtpSenderVideoWithFrameTransformerTest, TransformOverheadCorrectlyAccountedFor) { auto mock_frame_transformer = - rtc::make_ref_counted>(); - rtc::scoped_refptr callback; + make_ref_counted>(); + scoped_refptr callback; EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) .WillOnce(SaveArg<0>(&callback)); std::unique_ptr rtp_sender_video = @@ -1609,7 +1704,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, const int kFramesPerSecond = 25; for (int i = 0; i < kFramesPerSecond; ++i) { encoder_queue->PostTask([&] { - rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, + rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, *encoded_image, video_header, kDefaultExpectedRetransmissionTime); }); @@ -1622,7 +1717,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, TEST_F(RtpSenderVideoWithFrameTransformerTest, TransformableFrameMetadataHasCorrectValue) { auto mock_frame_transformer = - rtc::make_ref_counted>(); + make_ref_counted>(); std::unique_ptr rtp_sender_video = CreateSenderWithFrameTransformer(mock_frame_transformer); auto encoded_image = CreateDefaultEncodedImage(); @@ -1656,7 +1751,7 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, EXPECT_THAT(metadata.GetDecodeTargetIndications(), ElementsAre(DecodeTargetIndication::kSwitch)); }); - rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, + rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, *encoded_image, video_header, kDefaultExpectedRetransmissionTime); } @@ -1664,11 +1759,11 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, TEST_F(RtpSenderVideoWithFrameTransformerTest, TransformableFrameHasCorrectCaptureIdentifier) { auto mock_frame_transformer = - rtc::make_ref_counted>(); + make_ref_counted>(); std::unique_ptr rtp_sender_video = CreateSenderWithFrameTransformer(mock_frame_transformer); auto encoded_image = CreateDefaultEncodedImage(); - encoded_image->SetCaptureTimeIdentifier(Timestamp::Millis(1)); + encoded_image->SetPresentationTimestamp(Timestamp::Millis(1)); RTPVideoHeader video_header; EXPECT_CALL(*mock_frame_transformer, Transform) @@ -1677,10 +1772,10 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, auto* frame = static_cast( transformable_frame.get()); ASSERT_TRUE(frame); - EXPECT_EQ(frame->GetCaptureTimeIdentifier(), - encoded_image->CaptureTimeIdentifier()); + EXPECT_EQ(frame->GetPresentationTimestamp(), + encoded_image->PresentationTimestamp()); }); - rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, + rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, *encoded_image, video_header, kDefaultExpectedRetransmissionTime); } @@ -1688,8 +1783,8 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, TEST_F(RtpSenderVideoWithFrameTransformerTest, OnTransformedFrameSendsVideoWhenCloned) { auto mock_frame_transformer = - rtc::make_ref_counted>(); - rtc::scoped_refptr callback; + make_ref_counted>(); + scoped_refptr callback; EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) .WillOnce(SaveArg<0>(&callback)); std::unique_ptr rtp_sender_video = @@ -1710,14 +1805,14 @@ TEST_F(RtpSenderVideoWithFrameTransformerTest, auto encoder_queue = time_controller_.GetTaskQueueFactory()->CreateTaskQueue( "encoder_queue", TaskQueueFactory::Priority::NORMAL); encoder_queue->PostTask([&] { - rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, + rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, *encoded_image, video_header, kDefaultExpectedRetransmissionTime); }); time_controller_.AdvanceTime(TimeDelta::Zero()); EXPECT_EQ(transport_.packets_sent(), 1); encoder_queue->PostTask([&] { - rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp, + rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, *encoded_image, video_header, kDefaultExpectedRetransmissionTime); }); diff --git a/modules/rtp_rtcp/source/rtp_sequence_number_map.cc b/modules/rtp_rtcp/source/rtp_sequence_number_map.cc index 441429d442..f4ffab753d 100644 --- a/modules/rtp_rtcp/source/rtp_sequence_number_map.cc +++ b/modules/rtp_rtcp/source/rtp_sequence_number_map.cc @@ -11,8 +11,12 @@ #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include +#include +#include +#include #include #include +#include #include "absl/algorithm/container.h" #include "rtc_base/checks.h" @@ -94,7 +98,7 @@ void RtpSequenceNumberMap::InsertFrame(uint16_t first_sequence_number, } } -absl::optional RtpSequenceNumberMap::Get( +std::optional RtpSequenceNumberMap::Get( uint16_t sequence_number) const { // To make the binary search easier to understand, we use the fact that // adding a constant offset to all elements, as well as to the searched @@ -105,7 +109,7 @@ absl::optional RtpSequenceNumberMap::Get( // element to 0 would serve this purpose. if (associations_.empty()) { - return absl::nullopt; + return std::nullopt; } const uint16_t offset = @@ -118,8 +122,8 @@ absl::optional RtpSequenceNumberMap::Get( const auto elem = absl::c_lower_bound(associations_, sequence_number, cmp); return elem != associations_.end() && elem->sequence_number == sequence_number - ? absl::optional(elem->info) - : absl::nullopt; + ? std::optional(elem->info) + : std::nullopt; } size_t RtpSequenceNumberMap::AssociationCountForTesting() const { diff --git a/modules/rtp_rtcp/source/rtp_sequence_number_map.h b/modules/rtp_rtcp/source/rtp_sequence_number_map.h index 8a036c25a4..d2bbd188dc 100644 --- a/modules/rtp_rtcp/source/rtp_sequence_number_map.h +++ b/modules/rtp_rtcp/source/rtp_sequence_number_map.h @@ -14,8 +14,7 @@ #include #include #include - -#include "absl/types/optional.h" +#include namespace webrtc { @@ -57,7 +56,7 @@ class RtpSequenceNumberMap final { size_t packet_count, uint32_t timestamp); - absl::optional Get(uint16_t sequence_number) const; + std::optional Get(uint16_t sequence_number) const; size_t AssociationCountForTesting() const; diff --git a/modules/rtp_rtcp/source/rtp_sequence_number_map_unittest.cc b/modules/rtp_rtcp/source/rtp_sequence_number_map_unittest.cc index 78c9e4a251..1598990ea7 100644 --- a/modules/rtp_rtcp/source/rtp_sequence_number_map_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_sequence_number_map_unittest.cc @@ -11,14 +11,13 @@ #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include +#include +#include #include #include -#include #include #include -#include "absl/memory/memory.h" -#include "absl/types/optional.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/random.h" diff --git a/modules/rtp_rtcp/source/rtp_util.cc b/modules/rtp_rtcp/source/rtp_util.cc index cf1e54254a..4d802b6308 100644 --- a/modules/rtp_rtcp/source/rtp_util.cc +++ b/modules/rtp_rtcp/source/rtp_util.cc @@ -24,7 +24,7 @@ constexpr uint8_t kRtpVersion = 2; constexpr size_t kMinRtpPacketLen = 12; constexpr size_t kMinRtcpPacketLen = 4; -bool HasCorrectRtpVersion(rtc::ArrayView packet) { +bool HasCorrectRtpVersion(ArrayView packet) { return packet[0] >> 6 == kRtpVersion; } @@ -35,27 +35,27 @@ bool PayloadTypeIsReservedForRtcp(uint8_t payload_type) { } // namespace -bool IsRtpPacket(rtc::ArrayView packet) { +bool IsRtpPacket(ArrayView packet) { return packet.size() >= kMinRtpPacketLen && HasCorrectRtpVersion(packet) && !PayloadTypeIsReservedForRtcp(packet[1] & 0x7F); } -bool IsRtcpPacket(rtc::ArrayView packet) { +bool IsRtcpPacket(ArrayView packet) { return packet.size() >= kMinRtcpPacketLen && HasCorrectRtpVersion(packet) && PayloadTypeIsReservedForRtcp(packet[1] & 0x7F); } -int ParseRtpPayloadType(rtc::ArrayView rtp_packet) { +int ParseRtpPayloadType(ArrayView rtp_packet) { RTC_DCHECK(IsRtpPacket(rtp_packet)); return rtp_packet[1] & 0x7F; } -uint16_t ParseRtpSequenceNumber(rtc::ArrayView rtp_packet) { +uint16_t ParseRtpSequenceNumber(ArrayView rtp_packet) { RTC_DCHECK(IsRtpPacket(rtp_packet)); return ByteReader::ReadBigEndian(rtp_packet.data() + 2); } -uint32_t ParseRtpSsrc(rtc::ArrayView rtp_packet) { +uint32_t ParseRtpSsrc(ArrayView rtp_packet) { RTC_DCHECK(IsRtpPacket(rtp_packet)); return ByteReader::ReadBigEndian(rtp_packet.data() + 8); } diff --git a/modules/rtp_rtcp/source/rtp_util.h b/modules/rtp_rtcp/source/rtp_util.h index 835cfcd6c8..a183e0e3df 100644 --- a/modules/rtp_rtcp/source/rtp_util.h +++ b/modules/rtp_rtcp/source/rtp_util.h @@ -17,14 +17,14 @@ namespace webrtc { -bool IsRtcpPacket(rtc::ArrayView packet); -bool IsRtpPacket(rtc::ArrayView packet); +bool IsRtcpPacket(ArrayView packet); +bool IsRtpPacket(ArrayView packet); // Returns base rtp header fields of the rtp packet. // Behaviour is undefined when `!IsRtpPacket(rtp_packet)`. -int ParseRtpPayloadType(rtc::ArrayView rtp_packet); -uint16_t ParseRtpSequenceNumber(rtc::ArrayView rtp_packet); -uint32_t ParseRtpSsrc(rtc::ArrayView rtp_packet); +int ParseRtpPayloadType(ArrayView rtp_packet); +uint16_t ParseRtpSequenceNumber(ArrayView rtp_packet); +uint32_t ParseRtpSsrc(ArrayView rtp_packet); } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_util_unittest.cc b/modules/rtp_rtcp/source/rtp_util_unittest.cc index 3e23416ff4..1c72c259bc 100644 --- a/modules/rtp_rtcp/source/rtp_util_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_util_unittest.cc @@ -10,7 +10,9 @@ #include "modules/rtp_rtcp/source/rtp_util.h" -#include "test/gmock.h" +#include + +#include "test/gtest.h" namespace webrtc { namespace { diff --git a/modules/rtp_rtcp/source/rtp_video_header.cc b/modules/rtp_rtcp/source/rtp_video_header.cc index b07a7beec4..fd69e6a673 100644 --- a/modules/rtp_rtcp/source/rtp_video_header.cc +++ b/modules/rtp_rtcp/source/rtp_video_header.cc @@ -10,6 +10,14 @@ #include "modules/rtp_rtcp/source/rtp_video_header.h" +#include + +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_metadata.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" + namespace webrtc { RTPVideoHeader::GenericDescriptorInfo::GenericDescriptorInfo() = default; @@ -49,18 +57,20 @@ VideoFrameMetadata RTPVideoHeader::GetAsMetadata() const { switch (codec) { case VideoCodecType::kVideoCodecVP8: metadata.SetRTPVideoHeaderCodecSpecifics( - absl::get(video_type_header)); + std::get(video_type_header)); break; case VideoCodecType::kVideoCodecVP9: metadata.SetRTPVideoHeaderCodecSpecifics( - absl::get(video_type_header)); + std::get(video_type_header)); break; case VideoCodecType::kVideoCodecH264: metadata.SetRTPVideoHeaderCodecSpecifics( - absl::get(video_type_header)); + std::get(video_type_header)); break; - default: - // Codec-specifics are not supported for this codec. + // These codec types do not have codec-specifics. + case VideoCodecType::kVideoCodecH265: + case VideoCodecType::kVideoCodecAV1: + case VideoCodecType::kVideoCodecGeneric: break; } return metadata; @@ -73,7 +83,7 @@ void RTPVideoHeader::SetFromMetadata(const VideoFrameMetadata& metadata) { rotation = metadata.GetRotation(); content_type = metadata.GetContentType(); if (!metadata.GetFrameId().has_value()) { - generic = absl::nullopt; + generic = std::nullopt; } else { generic.emplace(); generic->frame_id = metadata.GetFrameId().value(); @@ -90,15 +100,15 @@ void RTPVideoHeader::SetFromMetadata(const VideoFrameMetadata& metadata) { codec = metadata.GetCodec(); switch (codec) { case VideoCodecType::kVideoCodecVP8: - video_type_header = absl::get( + video_type_header = std::get( metadata.GetRTPVideoHeaderCodecSpecifics()); break; case VideoCodecType::kVideoCodecVP9: - video_type_header = absl::get( + video_type_header = std::get( metadata.GetRTPVideoHeaderCodecSpecifics()); break; case VideoCodecType::kVideoCodecH264: - video_type_header = absl::get( + video_type_header = std::get( metadata.GetRTPVideoHeaderCodecSpecifics()); break; default: diff --git a/modules/rtp_rtcp/source/rtp_video_header.h b/modules/rtp_rtcp/source/rtp_video_header.h index 3100d4d1e7..1e6579d447 100644 --- a/modules/rtp_rtcp/source/rtp_video_header.h +++ b/modules/rtp_rtcp/source/rtp_video_header.h @@ -12,10 +12,10 @@ #include #include +#include +#include #include "absl/container/inlined_vector.h" -#include "absl/types/optional.h" -#include "absl/types/variant.h" #include "api/rtp_headers.h" #include "api/transport/rtp/dependency_descriptor.h" #include "api/video/color_space.h" @@ -25,6 +25,7 @@ #include "api/video/video_frame_type.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" +#include "common_video/frame_instrumentation_data.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" @@ -37,11 +38,11 @@ struct RTPVideoHeaderLegacyGeneric { uint16_t picture_id; }; -using RTPVideoTypeHeader = absl::variant; +using RTPVideoTypeHeader = std::variant; struct RTPVideoHeader { struct GenericDescriptorInfo { @@ -69,7 +70,7 @@ struct RTPVideoHeader { VideoFrameMetadata GetAsMetadata() const; void SetFromMetadata(const VideoFrameMetadata& metadata); - absl::optional generic; + std::optional generic; VideoFrameType frame_type = VideoFrameType::kEmptyFrame; uint16_t width = 0; @@ -82,18 +83,23 @@ struct RTPVideoHeader { uint8_t simulcastIdx = 0; VideoCodecType codec = VideoCodecType::kVideoCodecGeneric; - absl::optional playout_delay; + std::optional playout_delay; VideoSendTiming video_timing; - absl::optional color_space; + std::optional color_space; // This field is meant for media quality testing purpose only. When enabled it // carries the webrtc::VideoFrame id field from the sender to the receiver. - absl::optional video_frame_tracking_id; + std::optional video_frame_tracking_id; RTPVideoTypeHeader video_type_header; // When provided, is sent as is as an RTP header extension according to // http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time. // Otherwise, it is derived from other relevant information. - absl::optional absolute_capture_time; + std::optional absolute_capture_time; + + // Required for automatic corruption detection. + std::optional< + std::variant> + frame_instrumentation_data; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_video_header_unittest.cc b/modules/rtp_rtcp/source/rtp_video_header_unittest.cc index 335fa1a8a0..b2e5bde16c 100644 --- a/modules/rtp_rtcp/source/rtp_video_header_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_video_header_unittest.cc @@ -10,8 +10,19 @@ #include "modules/rtp_rtcp/source/rtp_video_header.h" +#include +#include + +#include "absl/container/inlined_vector.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" #include "api/video/video_frame_metadata.h" #include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "test/gmock.h" #include "test/gtest.h" @@ -117,7 +128,7 @@ TEST(RTPVideoHeaderTest, FrameId_FromMetadata) { TEST(RTPVideoHeaderTest, FrameId_FromMetadataWhenFrameIdIsMissing) { VideoFrameMetadata metadata; - metadata.SetFrameId(absl::nullopt); + metadata.SetFrameId(std::nullopt); RTPVideoHeader video_header = RTPVideoHeader::FromMetadata(metadata); EXPECT_FALSE(video_header.generic.has_value()); } @@ -282,7 +293,7 @@ TEST(RTPVideoHeaderTest, RTPVideoHeaderCodecSpecifics_GetAsMetadata) { video_header.video_type_header = vp8_specifics; VideoFrameMetadata metadata = video_header.GetAsMetadata(); EXPECT_EQ( - absl::get(metadata.GetRTPVideoHeaderCodecSpecifics()) + std::get(metadata.GetRTPVideoHeaderCodecSpecifics()) .pictureId, vp8_specifics.pictureId); } @@ -294,7 +305,7 @@ TEST(RTPVideoHeaderTest, RTPVideoHeaderCodecSpecifics_GetAsMetadata) { video_header.video_type_header = vp9_specifics; VideoFrameMetadata metadata = video_header.GetAsMetadata(); EXPECT_EQ( - absl::get(metadata.GetRTPVideoHeaderCodecSpecifics()) + std::get(metadata.GetRTPVideoHeaderCodecSpecifics()) .max_picture_id, vp9_specifics.max_picture_id); } @@ -304,10 +315,10 @@ TEST(RTPVideoHeaderTest, RTPVideoHeaderCodecSpecifics_GetAsMetadata) { h264_specifics.nalu_type = 42; video_header.video_type_header = h264_specifics; VideoFrameMetadata metadata = video_header.GetAsMetadata(); - EXPECT_EQ(absl::get( - metadata.GetRTPVideoHeaderCodecSpecifics()) - .nalu_type, - h264_specifics.nalu_type); + EXPECT_EQ( + std::get(metadata.GetRTPVideoHeaderCodecSpecifics()) + .nalu_type, + h264_specifics.nalu_type); } } @@ -321,7 +332,7 @@ TEST(RTPVideoHeaderTest, RTPVideoHeaderCodecSpecifics_FromMetadata) { metadata.SetRTPVideoHeaderCodecSpecifics(vp8_specifics); RTPVideoHeader video_header = RTPVideoHeader::FromMetadata(metadata); EXPECT_EQ( - absl::get(video_header.video_type_header).pictureId, + std::get(video_header.video_type_header).pictureId, 42); } { @@ -331,7 +342,7 @@ TEST(RTPVideoHeaderTest, RTPVideoHeaderCodecSpecifics_FromMetadata) { vp9_specifics.max_picture_id = 42; metadata.SetRTPVideoHeaderCodecSpecifics(vp9_specifics); RTPVideoHeader video_header = RTPVideoHeader::FromMetadata(metadata); - EXPECT_EQ(absl::get(video_header.video_type_header) + EXPECT_EQ(std::get(video_header.video_type_header) .max_picture_id, 42); } @@ -342,7 +353,7 @@ TEST(RTPVideoHeaderTest, RTPVideoHeaderCodecSpecifics_FromMetadata) { metadata.SetRTPVideoHeaderCodecSpecifics(h264_specifics); RTPVideoHeader video_header = RTPVideoHeader::FromMetadata(metadata); EXPECT_EQ( - absl::get(video_header.video_type_header).nalu_type, + std::get(video_header.video_type_header).nalu_type, 42); } } diff --git a/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc b/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc index 6b86ee553b..57e0925a2f 100644 --- a/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc +++ b/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.cc @@ -13,16 +13,17 @@ #include #include +#include + #include "absl/algorithm/container.h" +#include "api/array_view.h" +#include "api/units/data_rate.h" #include "api/video/video_layers_allocation.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/leb128.h" #include "rtc_base/checks.h" namespace webrtc { - -constexpr RTPExtensionType RtpVideoLayersAllocationExtension::kId; - namespace { constexpr int kMaxNumRtpStreams = 4; @@ -109,7 +110,7 @@ SpatialLayersBitmasks SpatialLayersBitmasksPerRtpStream( // for the description of the format. bool RtpVideoLayersAllocationExtension::Write( - rtc::ArrayView data, + ArrayView data, const VideoLayersAllocation& allocation) { RTC_DCHECK(AllocationIsValid(allocation)); RTC_DCHECK_GE(data.size(), ValueSize(allocation)); @@ -178,7 +179,7 @@ bool RtpVideoLayersAllocationExtension::Write( } bool RtpVideoLayersAllocationExtension::Parse( - rtc::ArrayView data, + ArrayView data, VideoLayersAllocation* allocation) { if (data.empty() || allocation == nullptr) { return false; diff --git a/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h b/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h index d59c922b36..520a16371b 100644 --- a/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h +++ b/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h @@ -11,7 +11,11 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTP_VIDEO_LAYERS_ALLOCATION_EXTENSION_H_ #define MODULES_RTP_RTCP_SOURCE_RTP_VIDEO_LAYERS_ALLOCATION_EXTENSION_H_ +#include +#include + #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/rtp_parameters.h" #include "api/video/video_layers_allocation.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -26,10 +30,10 @@ class RtpVideoLayersAllocationExtension { return RtpExtension::kVideoLayersAllocationUri; } - static bool Parse(rtc::ArrayView data, + static bool Parse(ArrayView data, VideoLayersAllocation* allocation); static size_t ValueSize(const VideoLayersAllocation& allocation); - static bool Write(rtc::ArrayView data, + static bool Write(ArrayView data, const VideoLayersAllocation& allocation); }; diff --git a/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension_unittest.cc b/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension_unittest.cc index e05df1a266..a7a856d8be 100644 --- a/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_video_layers_allocation_extension_unittest.cc @@ -10,17 +10,19 @@ #include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" +#include + +#include "api/units/data_rate.h" #include "api/video/video_layers_allocation.h" -#include "rtc_base/bit_buffer.h" #include "rtc_base/buffer.h" -#include "test/gmock.h" +#include "test/gtest.h" namespace webrtc { namespace { TEST(RtpVideoLayersAllocationExtension, WriteEmptyLayersAllocationReturnsTrue) { VideoLayersAllocation written_allocation; - rtc::Buffer buffer( + Buffer buffer( RtpVideoLayersAllocationExtension::ValueSize(written_allocation)); EXPECT_TRUE( RtpVideoLayersAllocationExtension::Write(buffer, written_allocation)); @@ -34,7 +36,7 @@ TEST(RtpVideoLayersAllocationExtension, written_allocation.resolution_and_frame_rate_is_valid = true; written_allocation.rtp_stream_index = 0; - rtc::Buffer buffer( + Buffer buffer( RtpVideoLayersAllocationExtension::ValueSize(written_allocation)); EXPECT_TRUE( RtpVideoLayersAllocationExtension::Write(buffer, written_allocation)); @@ -69,7 +71,7 @@ TEST(RtpVideoLayersAllocationExtension, /*frame_rate_fps*/ 0, }, }; - rtc::Buffer buffer( + Buffer buffer( RtpVideoLayersAllocationExtension::ValueSize(written_allocation)); EXPECT_TRUE( RtpVideoLayersAllocationExtension::Write(buffer, written_allocation)); @@ -103,7 +105,7 @@ TEST(RtpVideoLayersAllocationExtension, /*height*/ 0, /*frame_rate_fps*/ 0}, }; - rtc::Buffer buffer( + Buffer buffer( RtpVideoLayersAllocationExtension::ValueSize(written_allocation)); EXPECT_TRUE( RtpVideoLayersAllocationExtension::Write(buffer, written_allocation)); @@ -131,7 +133,7 @@ TEST(RtpVideoLayersAllocationExtension, /*height*/ 0, /*frame_rate_fps*/ 0}, }; - rtc::Buffer buffer( + Buffer buffer( RtpVideoLayersAllocationExtension::ValueSize(written_allocation)); EXPECT_TRUE( RtpVideoLayersAllocationExtension::Write(buffer, written_allocation)); @@ -159,7 +161,7 @@ TEST(RtpVideoLayersAllocationExtension, /*height*/ 0, /*frame_rate_fps*/ 0}, }; - rtc::Buffer buffer( + Buffer buffer( RtpVideoLayersAllocationExtension::ValueSize(written_allocation)); EXPECT_TRUE( RtpVideoLayersAllocationExtension::Write(buffer, written_allocation)); @@ -192,7 +194,7 @@ TEST(RtpVideoLayersAllocationExtension, /*frame_rate_fps*/ 0, }, }; - rtc::Buffer buffer( + Buffer buffer( RtpVideoLayersAllocationExtension::ValueSize(written_allocation)); EXPECT_TRUE( RtpVideoLayersAllocationExtension::Write(buffer, written_allocation)); @@ -228,7 +230,7 @@ TEST(RtpVideoLayersAllocationExtension, }, }; - rtc::Buffer buffer( + Buffer buffer( RtpVideoLayersAllocationExtension::ValueSize(written_allocation)); EXPECT_TRUE( RtpVideoLayersAllocationExtension::Write(buffer, written_allocation)); @@ -242,7 +244,7 @@ TEST(RtpVideoLayersAllocationExtension, WriteEmptyAllocationCanHaveAnyRtpStreamIndex) { VideoLayersAllocation written_allocation; written_allocation.rtp_stream_index = 1; - rtc::Buffer buffer( + Buffer buffer( RtpVideoLayersAllocationExtension::ValueSize(written_allocation)); EXPECT_TRUE( RtpVideoLayersAllocationExtension::Write(buffer, written_allocation)); @@ -270,7 +272,7 @@ TEST(RtpVideoLayersAllocationExtension, DiscardsInvalidHeight) { /*frame_rate_fps*/ 8, }, }; - rtc::Buffer buffer( + Buffer buffer( RtpVideoLayersAllocationExtension::ValueSize(written_allocation)); ASSERT_TRUE( RtpVideoLayersAllocationExtension::Write(buffer, written_allocation)); diff --git a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc index 4913445860..7b5f480aa1 100644 --- a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc +++ b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc @@ -10,24 +10,43 @@ #include "modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h" +#include +#include +#include +#include #include #include #include "absl/memory/memory.h" -#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" +#include "api/array_view.h" +#include "api/frame_transformer_interface.h" +#include "api/rtp_packet_infos.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/video_frame_metadata.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_timing.h" +#include "api/video_codecs/video_codec.h" +#include "modules/rtp_rtcp/source/frame_object.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "rtc_base/checks.h" #include "rtc_base/thread.h" +#include "system_wrappers/include/clock.h" +#include "system_wrappers/include/ntp_time.h" namespace webrtc { -namespace { class TransformableVideoReceiverFrame : public TransformableVideoFrameInterface { public: TransformableVideoReceiverFrame(std::unique_ptr frame, uint32_t ssrc, RtpVideoFrameReceiver* receiver) - : frame_(std::move(frame)), + : TransformableVideoFrameInterface(Passkey()), + frame_(std::move(frame)), metadata_(frame_->GetRtpVideoHeader().GetAsMetadata()), receiver_(receiver) { metadata_.SetSsrc(ssrc); @@ -36,20 +55,20 @@ class TransformableVideoReceiverFrame ~TransformableVideoReceiverFrame() override = default; // Implements TransformableVideoFrameInterface. - rtc::ArrayView GetData() const override { + ArrayView GetData() const override { return *frame_->GetEncodedData(); } - void SetData(rtc::ArrayView data) override { + void SetData(ArrayView data) override { frame_->SetEncodedData( EncodedImageBuffer::Create(data.data(), data.size())); } uint8_t GetPayloadType() const override { return frame_->PayloadType(); } uint32_t GetSsrc() const override { return Metadata().GetSsrc(); } - uint32_t GetTimestamp() const override { return frame_->Timestamp(); } + uint32_t GetTimestamp() const override { return frame_->RtpTimestamp(); } void SetRTPTimestamp(uint32_t timestamp) override { - frame_->SetTimestamp(timestamp); + frame_->SetRtpTimestamp(timestamp); } bool IsKeyFrame() const override { @@ -75,6 +94,36 @@ class TransformableVideoReceiverFrame } Direction GetDirection() const override { return Direction::kReceiver; } + std::string GetMimeType() const override { + std::string mime_type = "video/"; + return mime_type + CodecTypeToPayloadString(frame_->codec_type()); + } + + std::optional ReceiveTime() const override { + return frame_->ReceivedTimestamp(); + } + + std::optional CaptureTime() const override { + if (auto& absolute_capture_time = + frame_->GetRtpVideoHeader().absolute_capture_time) { + if (absolute_capture_time->absolute_capture_timestamp) { + return Timestamp::Micros(UQ32x32ToInt64Us( + absolute_capture_time->absolute_capture_timestamp)); + } + } + return std::nullopt; + } + + std::optional SenderCaptureTimeOffset() const override { + if (auto& absolute_capture_time = + frame_->GetRtpVideoHeader().absolute_capture_time) { + if (absolute_capture_time->estimated_capture_clock_offset) { + return TimeDelta::Micros(Q32x32ToInt64Us( + *absolute_capture_time->estimated_capture_clock_offset)); + } + } + return std::nullopt; + } const RtpVideoFrameReceiver* Receiver() { return receiver_; } @@ -83,14 +132,13 @@ class TransformableVideoReceiverFrame VideoFrameMetadata metadata_; RtpVideoFrameReceiver* receiver_; }; -} // namespace RtpVideoStreamReceiverFrameTransformerDelegate:: RtpVideoStreamReceiverFrameTransformerDelegate( RtpVideoFrameReceiver* receiver, Clock* clock, - rtc::scoped_refptr frame_transformer, - rtc::Thread* network_thread, + scoped_refptr frame_transformer, + Thread* network_thread, uint32_t ssrc) : receiver_(receiver), frame_transformer_(std::move(frame_transformer)), @@ -101,7 +149,7 @@ RtpVideoStreamReceiverFrameTransformerDelegate:: void RtpVideoStreamReceiverFrameTransformerDelegate::Init() { RTC_DCHECK_RUN_ON(&network_sequence_checker_); frame_transformer_->RegisterTransformedFrameSinkCallback( - rtc::scoped_refptr(this), ssrc_); + scoped_refptr(this), ssrc_); } void RtpVideoStreamReceiverFrameTransformerDelegate::Reset() { @@ -114,21 +162,38 @@ void RtpVideoStreamReceiverFrameTransformerDelegate::Reset() { void RtpVideoStreamReceiverFrameTransformerDelegate::TransformFrame( std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&network_sequence_checker_); - frame_transformer_->Transform( - std::make_unique(std::move(frame), ssrc_, - receiver_)); + if (short_circuit_) { + // Just pass the frame straight back. + receiver_->ManageFrame(std::move(frame)); + } else { + frame_transformer_->Transform( + std::make_unique(std::move(frame), + ssrc_, receiver_)); + } } void RtpVideoStreamReceiverFrameTransformerDelegate::OnTransformedFrame( std::unique_ptr frame) { - rtc::scoped_refptr delegate( - this); + scoped_refptr delegate(this); network_thread_->PostTask( [delegate = std::move(delegate), frame = std::move(frame)]() mutable { delegate->ManageFrame(std::move(frame)); }); } +void RtpVideoStreamReceiverFrameTransformerDelegate::StartShortCircuiting() { + scoped_refptr delegate(this); + network_thread_->PostTask([delegate = std::move(delegate)]() mutable { + delegate->StartShortCircuitingOnNetworkSequence(); + }); +} + +void RtpVideoStreamReceiverFrameTransformerDelegate:: + StartShortCircuitingOnNetworkSequence() { + RTC_DCHECK_RUN_ON(&network_sequence_checker_); + short_circuit_ = true; +} + void RtpVideoStreamReceiverFrameTransformerDelegate::ManageFrame( std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&network_sequence_checker_); @@ -170,7 +235,7 @@ void RtpVideoStreamReceiverFrameTransformerDelegate::ManageFrame( VideoFrameMetadata metadata = transformed_frame->Metadata(); RTPVideoHeader video_header = RTPVideoHeader::FromMetadata(metadata); VideoSendTiming timing; - rtc::ArrayView data = transformed_frame->GetData(); + ArrayView data = transformed_frame->GetData(); int64_t receive_time = clock_->CurrentTime().ms(); receiver_->ManageFrame(std::make_unique( /*first_seq_num=*/metadata.GetFrameId().value_or(0), @@ -182,7 +247,8 @@ void RtpVideoStreamReceiverFrameTransformerDelegate::ManageFrame( /*rtp_timestamp=*/transformed_frame->GetTimestamp(), /*ntp_time_ms=*/0, timing, transformed_frame->GetPayloadType(), metadata.GetCodec(), metadata.GetRotation(), metadata.GetContentType(), - video_header, video_header.color_space, RtpPacketInfos(), + video_header, video_header.color_space, + video_header.frame_instrumentation_data, RtpPacketInfos(), EncodedImageBuffer::Create(data.data(), data.size()))); } } diff --git a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h index 62a42fdddf..1e6678b4ee 100644 --- a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h +++ b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h @@ -11,13 +11,16 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTP_VIDEO_STREAM_RECEIVER_FRAME_TRANSFORMER_DELEGATE_H_ #define MODULES_RTP_RTCP_SOURCE_RTP_VIDEO_STREAM_RECEIVER_FRAME_TRANSFORMER_DELEGATE_H_ +#include #include #include "api/frame_transformer_interface.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "modules/rtp_rtcp/source/frame_object.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -40,8 +43,8 @@ class RtpVideoStreamReceiverFrameTransformerDelegate RtpVideoStreamReceiverFrameTransformerDelegate( RtpVideoFrameReceiver* receiver, Clock* clock, - rtc::scoped_refptr frame_transformer, - rtc::Thread* network_thread, + scoped_refptr frame_transformer, + Thread* network_thread, uint32_t ssrc); void Init(); @@ -55,6 +58,8 @@ class RtpVideoStreamReceiverFrameTransformerDelegate void OnTransformedFrame( std::unique_ptr frame) override; + void StartShortCircuiting() override; + // Delegates the call to RtpVideoFrameReceiver::ManageFrame on the // `network_thread_`. void ManageFrame(std::unique_ptr frame); @@ -63,13 +68,16 @@ class RtpVideoStreamReceiverFrameTransformerDelegate ~RtpVideoStreamReceiverFrameTransformerDelegate() override = default; private: + void StartShortCircuitingOnNetworkSequence(); + RTC_NO_UNIQUE_ADDRESS SequenceChecker network_sequence_checker_; RtpVideoFrameReceiver* receiver_ RTC_GUARDED_BY(network_sequence_checker_); - rtc::scoped_refptr frame_transformer_ + scoped_refptr frame_transformer_ RTC_GUARDED_BY(network_sequence_checker_); - rtc::Thread* const network_thread_; + Thread* const network_thread_; const uint32_t ssrc_; Clock* const clock_; + bool short_circuit_ RTC_GUARDED_BY(network_sequence_checker_) = false; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc index bbc1b62b5e..92860b1acf 100644 --- a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc +++ b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate_unittest.cc @@ -10,21 +10,39 @@ #include "modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h" -#include +#include #include +#include #include #include #include "absl/memory/memory.h" -#include "api/call/transport.h" +#include "api/array_view.h" +#include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" +#include "api/rtp_headers.h" +#include "api/rtp_packet_info.h" +#include "api/rtp_packet_infos.h" +#include "api/scoped_refptr.h" +#include "api/test/mock_frame_transformer.h" #include "api/test/mock_transformable_video_frame.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "call/video_receive_stream.h" -#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" -#include "rtc_base/event.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_metadata.h" +#include "api/video/video_rotation.h" +#include "api/video/video_timing.h" +#include "modules/rtp_rtcp/source/frame_object.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "rtc_base/thread.h" +#include "system_wrappers/include/clock.h" +#include "system_wrappers/include/ntp_time.h" #include "test/gmock.h" #include "test/gtest.h" -#include "test/mock_frame_transformer.h" namespace webrtc { namespace { @@ -50,8 +68,8 @@ std::unique_ptr CreateRtpFrameObject( /*last_packet_received_time=*/5, /*rtp_timestamp=*/6, /*ntp_time_ms=*/7, VideoSendTiming(), /*payload_type=*/8, video_header.codec, kVideoRotation_0, VideoContentType::UNSPECIFIED, video_header, - absl::nullopt, RtpPacketInfos({packet_info}), - EncodedImageBuffer::Create(0)); + /*color_space=*/std::nullopt, /*frame_instrumentation_data=*/std::nullopt, + RtpPacketInfos({packet_info}), EncodedImageBuffer::Create(0)); } std::unique_ptr CreateRtpFrameObject() { @@ -72,11 +90,11 @@ class TestRtpVideoFrameReceiver : public RtpVideoFrameReceiver { TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, RegisterTransformedFrameCallbackSinkOnInit) { TestRtpVideoFrameReceiver receiver; - auto frame_transformer(rtc::make_ref_counted()); + auto frame_transformer(make_ref_counted()); SimulatedClock clock(0); auto delegate( - rtc::make_ref_counted( - &receiver, &clock, frame_transformer, rtc::Thread::Current(), + make_ref_counted( + &receiver, &clock, frame_transformer, Thread::Current(), /*remote_ssrc*/ 1111)); EXPECT_CALL(*frame_transformer, RegisterTransformedFrameSinkCallback(testing::_, 1111)); @@ -86,11 +104,11 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, UnregisterTransformedFrameSinkCallbackOnReset) { TestRtpVideoFrameReceiver receiver; - auto frame_transformer(rtc::make_ref_counted()); + auto frame_transformer(make_ref_counted()); SimulatedClock clock(0); auto delegate( - rtc::make_ref_counted( - &receiver, &clock, frame_transformer, rtc::Thread::Current(), + make_ref_counted( + &receiver, &clock, frame_transformer, Thread::Current(), /*remote_ssrc*/ 1111)); EXPECT_CALL(*frame_transformer, UnregisterTransformedFrameSinkCallback(1111)); delegate->Reset(); @@ -99,11 +117,11 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, TransformFrame) { TestRtpVideoFrameReceiver receiver; auto frame_transformer( - rtc::make_ref_counted>()); + make_ref_counted>()); SimulatedClock clock(0); auto delegate( - rtc::make_ref_counted( - &receiver, &clock, frame_transformer, rtc::Thread::Current(), + make_ref_counted( + &receiver, &clock, frame_transformer, Thread::Current(), /*remote_ssrc*/ 1111)); auto frame = CreateRtpFrameObject(); EXPECT_CALL(*frame_transformer, Transform); @@ -112,18 +130,18 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, TransformFrame) { TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, ManageFrameOnTransformedFrame) { - rtc::AutoThread main_thread_; + AutoThread main_thread_; TestRtpVideoFrameReceiver receiver; auto mock_frame_transformer( - rtc::make_ref_counted>()); + make_ref_counted>()); SimulatedClock clock(0); std::vector csrcs = {234, 345, 456}; auto delegate = - rtc::make_ref_counted( - &receiver, &clock, mock_frame_transformer, rtc::Thread::Current(), + make_ref_counted( + &receiver, &clock, mock_frame_transformer, Thread::Current(), /*remote_ssrc*/ 1111); - rtc::scoped_refptr callback; + scoped_refptr callback; EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) .WillOnce(SaveArg<0>(&callback)); delegate->Init(); @@ -138,26 +156,35 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, ON_CALL(*mock_frame_transformer, Transform) .WillByDefault( [&callback](std::unique_ptr frame) { + EXPECT_STRCASEEQ("video/Generic", frame->GetMimeType().c_str()); callback->OnTransformedFrame(std::move(frame)); }); delegate->TransformFrame(CreateRtpFrameObject(RTPVideoHeader(), csrcs)); - rtc::ThreadManager::ProcessAllMessageQueuesForTesting(); + ThreadManager::ProcessAllMessageQueuesForTesting(); } TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, TransformableFrameMetadataHasCorrectValue) { TestRtpVideoFrameReceiver receiver; auto mock_frame_transformer = - rtc::make_ref_counted>(); + make_ref_counted>(); SimulatedClock clock(0); auto delegate = - rtc::make_ref_counted( - &receiver, &clock, mock_frame_transformer, rtc::Thread::Current(), - 1111); + make_ref_counted( + &receiver, &clock, mock_frame_transformer, Thread::Current(), 1111); delegate->Init(); RTPVideoHeader video_header; video_header.width = 1280u; video_header.height = 720u; + + Timestamp capture_time = Timestamp::Millis(1234); + TimeDelta sender_capture_time_offset = TimeDelta::Millis(56); + AbsoluteCaptureTime absolute_capture_time = { + .absolute_capture_timestamp = Int64MsToUQ32x32(capture_time.ms()), + .estimated_capture_clock_offset = + Int64MsToQ32x32(sender_capture_time_offset.ms())}; + video_header.absolute_capture_time = absolute_capture_time; + RTPVideoHeader::GenericDescriptorInfo& generic = video_header.generic.emplace(); generic.frame_id = 10; @@ -187,24 +214,60 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, EXPECT_THAT(metadata.GetDecodeTargetIndications(), ElementsAre(DecodeTargetIndication::kSwitch)); EXPECT_EQ(metadata.GetCsrcs(), csrcs); + ASSERT_TRUE(frame->ReceiveTime().has_value()); + EXPECT_GE(frame->ReceiveTime()->us(), 0); + EXPECT_EQ(frame->CaptureTime(), capture_time); + EXPECT_EQ(frame->SenderCaptureTimeOffset(), sender_capture_time_offset); }); // The delegate creates a transformable frame from the RtpFrameObject. delegate->TransformFrame(CreateRtpFrameObject(video_header, csrcs)); } +TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, + TransformableFrameWithNegativeSenderCaptureTimeOffsetIsCorrect) { + TestRtpVideoFrameReceiver receiver; + auto mock_frame_transformer = + make_ref_counted>(); + SimulatedClock clock(0); + auto delegate = + make_ref_counted( + &receiver, &clock, mock_frame_transformer, Thread::Current(), 1111); + delegate->Init(); + RTPVideoHeader video_header; + Timestamp capture_time = Timestamp::Millis(1234); + TimeDelta sender_capture_time_offset = TimeDelta::Millis(-56); + AbsoluteCaptureTime absolute_capture_time = { + .absolute_capture_timestamp = Int64MsToUQ32x32(capture_time.ms()), + .estimated_capture_clock_offset = + Int64MsToQ32x32(sender_capture_time_offset.ms())}; + video_header.absolute_capture_time = absolute_capture_time; + + EXPECT_CALL(*mock_frame_transformer, Transform) + .WillOnce([&](std::unique_ptr + transformable_frame) { + auto frame = + absl::WrapUnique(static_cast( + transformable_frame.release())); + ASSERT_TRUE(frame); + EXPECT_GE(frame->ReceiveTime()->us(), 0); + EXPECT_EQ(frame->CaptureTime(), capture_time); + EXPECT_EQ(frame->SenderCaptureTimeOffset(), sender_capture_time_offset); + }); + delegate->TransformFrame(CreateRtpFrameObject(video_header, /*csrcs=*/{})); +} + TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, TransformableFrameMetadataHasCorrectValueAfterSetMetadata) { - rtc::AutoThread main_thread; + AutoThread main_thread; TestRtpVideoFrameReceiver receiver; auto mock_frame_transformer = - rtc::make_ref_counted>(); + make_ref_counted>(); SimulatedClock clock(1000); auto delegate = - rtc::make_ref_counted( - &receiver, &clock, mock_frame_transformer, rtc::Thread::Current(), - 1111); + make_ref_counted( + &receiver, &clock, mock_frame_transformer, Thread::Current(), 1111); - rtc::scoped_refptr callback; + scoped_refptr callback; EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) .WillOnce(SaveArg<0>(&callback)); delegate->Init(); @@ -221,8 +284,8 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, // Checks that the recieved RTPFrameObject has the new metadata. EXPECT_CALL(receiver, ManageFrame) .WillOnce([&](std::unique_ptr frame) { - const absl::optional& - descriptor = frame->GetRtpVideoHeader().generic; + const std::optional& descriptor = + frame->GetRtpVideoHeader().generic; if (!descriptor.has_value()) { ADD_FAILURE() << "GenericDescriptorInfo in RTPVideoHeader doesn't " "have a value."; @@ -253,19 +316,19 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, // The delegate creates a transformable frame from the RtpFrameObject. delegate->TransformFrame(CreateRtpFrameObject(video_header, csrcs)); - rtc::ThreadManager::ProcessAllMessageQueuesForTesting(); + ThreadManager::ProcessAllMessageQueuesForTesting(); } TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, SenderFramesAreConvertedToReceiverFrames) { - rtc::AutoThread main_thread_; + AutoThread main_thread_; TestRtpVideoFrameReceiver receiver; auto mock_frame_transformer = - rtc::make_ref_counted>(); + make_ref_counted>(); SimulatedClock clock(/*initial_timestamp_us=*/12345000); auto delegate = - rtc::make_ref_counted( - &receiver, &clock, mock_frame_transformer, rtc::Thread::Current(), + make_ref_counted( + &receiver, &clock, mock_frame_transformer, Thread::Current(), /*remote_ssrc*/ 1111); auto mock_sender_frame = @@ -276,12 +339,12 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, metadata.SetCodec(kVideoCodecVP8); metadata.SetRTPVideoHeaderCodecSpecifics(RTPVideoHeaderVP8()); ON_CALL(*mock_sender_frame, Metadata).WillByDefault(Return(metadata)); - rtc::scoped_refptr buffer = + scoped_refptr buffer = EncodedImageBuffer::Create(1); ON_CALL(*mock_sender_frame, GetData) - .WillByDefault(Return(rtc::ArrayView(*buffer))); + .WillByDefault(Return(ArrayView(*buffer))); - rtc::scoped_refptr callback; + scoped_refptr callback; EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) .WillOnce(SaveArg<0>(&callback)); delegate->Init(); @@ -293,34 +356,34 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, EXPECT_EQ(frame->ReceivedTime(), 12345); }); callback->OnTransformedFrame(std::move(mock_sender_frame)); - rtc::ThreadManager::ProcessAllMessageQueuesForTesting(); + ThreadManager::ProcessAllMessageQueuesForTesting(); } TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, ManageFrameFromDifferentReceiver) { - rtc::AutoThread main_thread_; + AutoThread main_thread_; std::vector csrcs = {234, 345, 456}; const int frame_id = 11; TestRtpVideoFrameReceiver receiver1; auto mock_frame_transformer1( - rtc::make_ref_counted>()); + make_ref_counted>()); SimulatedClock clock(0); auto delegate1 = - rtc::make_ref_counted( - &receiver1, &clock, mock_frame_transformer1, rtc::Thread::Current(), + make_ref_counted( + &receiver1, &clock, mock_frame_transformer1, Thread::Current(), /*remote_ssrc*/ 1111); TestRtpVideoFrameReceiver receiver2; auto mock_frame_transformer2( - rtc::make_ref_counted>()); + make_ref_counted>()); auto delegate2 = - rtc::make_ref_counted( - &receiver2, &clock, mock_frame_transformer2, rtc::Thread::Current(), + make_ref_counted( + &receiver2, &clock, mock_frame_transformer2, Thread::Current(), /*remote_ssrc*/ 1111); delegate1->Init(); - rtc::scoped_refptr callback_for_2; + scoped_refptr callback_for_2; EXPECT_CALL(*mock_frame_transformer2, RegisterTransformedFrameSinkCallback) .WillOnce(SaveArg<0>(&callback_for_2)); delegate2->Init(); @@ -345,7 +408,29 @@ TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, CreateRtpFrameObject(RTPVideoHeader(), csrcs); untransformed_frame->SetId(frame_id); delegate1->TransformFrame(std::move(untransformed_frame)); - rtc::ThreadManager::ProcessAllMessageQueuesForTesting(); + ThreadManager::ProcessAllMessageQueuesForTesting(); +} + +TEST(RtpVideoStreamReceiverFrameTransformerDelegateTest, + ShortCircuitingSkipsTransform) { + AutoThread main_thread_; + TestRtpVideoFrameReceiver receiver; + auto mock_frame_transformer = + make_ref_counted>(); + SimulatedClock clock(0); + auto delegate = + make_ref_counted( + &receiver, &clock, mock_frame_transformer, Thread::Current(), 1111); + delegate->Init(); + + delegate->StartShortCircuiting(); + ThreadManager::ProcessAllMessageQueuesForTesting(); + + // Will not call the actual transformer. + EXPECT_CALL(*mock_frame_transformer, Transform).Times(0); + // Will pass the frame straight to the reciever. + EXPECT_CALL(receiver, ManageFrame); + delegate->TransformFrame(CreateRtpFrameObject()); } } // namespace diff --git a/modules/rtp_rtcp/source/source_tracker.cc b/modules/rtp_rtcp/source/source_tracker.cc index 51e8f1cd38..fdd465249d 100644 --- a/modules/rtp_rtcp/source/source_tracker.cc +++ b/modules/rtp_rtcp/source/source_tracker.cc @@ -10,44 +10,40 @@ #include "modules/rtp_rtcp/source/source_tracker.h" -#include +#include #include +#include +#include "api/rtp_packet_info.h" +#include "api/rtp_packet_infos.h" +#include "api/transport/rtp/rtp_source.h" +#include "api/units/timestamp.h" +#include "rtc_base/checks.h" #include "rtc_base/trace_event.h" +#include "system_wrappers/include/clock.h" namespace webrtc { -SourceTracker::SourceTracker(Clock* clock) - : worker_thread_(TaskQueueBase::Current()), clock_(clock) { - RTC_DCHECK(worker_thread_); +SourceTracker::SourceTracker(Clock* clock) : clock_(clock) { RTC_DCHECK(clock_); } -void SourceTracker::OnFrameDelivered(RtpPacketInfos packet_infos) { +void SourceTracker::OnFrameDelivered(const RtpPacketInfos& packet_infos, + Timestamp delivery_time) { + TRACE_EVENT0("webrtc", "SourceTracker::OnFrameDelivered"); if (packet_infos.empty()) { return; } - - Timestamp now = clock_->CurrentTime(); - worker_thread_->PostTask( - SafeTask(worker_safety_.flag(), - [this, packet_infos = std::move(packet_infos), now]() { - RTC_DCHECK_RUN_ON(worker_thread_); - OnFrameDeliveredInternal(now, packet_infos); - })); -} - -void SourceTracker::OnFrameDeliveredInternal( - Timestamp now, - const RtpPacketInfos& packet_infos) { - TRACE_EVENT0("webrtc", "SourceTracker::OnFrameDelivered"); + if (delivery_time.IsInfinite()) { + delivery_time = clock_->CurrentTime(); + } for (const RtpPacketInfo& packet_info : packet_infos) { for (uint32_t csrc : packet_info.csrcs()) { SourceKey key(RtpSourceType::CSRC, csrc); SourceEntry& entry = UpdateEntry(key); - entry.timestamp = now; + entry.timestamp = delivery_time; entry.audio_level = packet_info.audio_level(); entry.absolute_capture_time = packet_info.absolute_capture_time(); entry.local_capture_clock_offset = @@ -58,19 +54,17 @@ void SourceTracker::OnFrameDeliveredInternal( SourceKey key(RtpSourceType::SSRC, packet_info.ssrc()); SourceEntry& entry = UpdateEntry(key); - entry.timestamp = now; + entry.timestamp = delivery_time; entry.audio_level = packet_info.audio_level(); entry.absolute_capture_time = packet_info.absolute_capture_time(); entry.local_capture_clock_offset = packet_info.local_capture_clock_offset(); entry.rtp_timestamp = packet_info.rtp_timestamp(); } - PruneEntries(now); + PruneEntries(delivery_time); } std::vector SourceTracker::GetSources() const { - RTC_DCHECK_RUN_ON(worker_thread_); - PruneEntries(clock_->CurrentTime()); std::vector sources; @@ -107,6 +101,9 @@ SourceTracker::SourceEntry& SourceTracker::UpdateEntry(const SourceKey& key) { } void SourceTracker::PruneEntries(Timestamp now) const { + if (now < Timestamp::Zero() + kTimeout) { + return; + } Timestamp prune = now - kTimeout; while (!list_.empty() && list_.back().second.timestamp < prune) { map_.erase(list_.back().first); diff --git a/modules/rtp_rtcp/source/source_tracker.h b/modules/rtp_rtcp/source/source_tracker.h index 30a5b8a4fa..9b599383c5 100644 --- a/modules/rtp_rtcp/source/source_tracker.h +++ b/modules/rtp_rtcp/source/source_tracker.h @@ -11,20 +11,19 @@ #ifndef MODULES_RTP_RTCP_SOURCE_SOURCE_TRACKER_H_ #define MODULES_RTP_RTCP_SOURCE_SOURCE_TRACKER_H_ +#include #include #include +#include #include #include #include -#include "absl/types/optional.h" +#include "api/rtp_headers.h" #include "api/rtp_packet_infos.h" -#include "api/task_queue/pending_task_safety_flag.h" -#include "api/task_queue/task_queue_base.h" #include "api/transport/rtp/rtp_source.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "rtc_base/time_utils.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -34,6 +33,7 @@ namespace webrtc { // - https://w3c.github.io/webrtc-pc/#dom-rtcrtpcontributingsource // - https://w3c.github.io/webrtc-pc/#dom-rtcrtpsynchronizationsource // +// This class is thread unsafe. class SourceTracker { public: // Amount of time before the entry associated with an update is removed. See: @@ -49,7 +49,8 @@ class SourceTracker { // Updates the source entries when a frame is delivered to the // RTCRtpReceiver's MediaStreamTrack. - void OnFrameDelivered(RtpPacketInfos packet_infos); + void OnFrameDelivered(const RtpPacketInfos& packet_infos, + Timestamp delivery_time = Timestamp::MinusInfinity()); // Returns an `RtpSource` for each unique SSRC and CSRC identifier updated in // the last `kTimeoutMs` milliseconds. Entries appear in reverse chronological @@ -91,19 +92,19 @@ class SourceTracker { // the most recent packet used to assemble the frame associated with // `timestamp`. May be absent. Only relevant for audio receivers. See the // specs for `RTCRtpContributingSource` for more info. - absl::optional audio_level; + std::optional audio_level; // Absolute capture time header extension received or interpolated from the // most recent packet used to assemble the frame. For more info see // https://webrtc.org/experiments/rtp-hdrext/abs-capture-time/ - absl::optional absolute_capture_time; + std::optional absolute_capture_time; // Clock offset between the local clock and the capturer's clock. // Do not confuse with `AbsoluteCaptureTime::estimated_capture_clock_offset` // which instead represents the clock offset between a remote sender and the // capturer. The following holds: // Capture's NTP Clock = Local NTP Clock + Local-Capture Clock Offset - absl::optional local_capture_clock_offset; + std::optional local_capture_clock_offset; // RTP timestamp of the most recent packet used to assemble the frame // associated with `timestamp`. @@ -116,27 +117,21 @@ class SourceTracker { SourceKeyHasher, SourceKeyComparator>; - void OnFrameDeliveredInternal(Timestamp now, - const RtpPacketInfos& packet_infos) - RTC_RUN_ON(worker_thread_); - // Updates an entry by creating it (if it didn't previously exist) and moving // it to the front of the list. Returns a reference to the entry. - SourceEntry& UpdateEntry(const SourceKey& key) RTC_RUN_ON(worker_thread_); + SourceEntry& UpdateEntry(const SourceKey& key); // Removes entries that have timed out. Marked as "const" so that we can do // pruning in getters. - void PruneEntries(Timestamp now) const RTC_RUN_ON(worker_thread_); + void PruneEntries(Timestamp now) const; - TaskQueueBase* const worker_thread_; Clock* const clock_; // Entries are stored in reverse chronological order (i.e. with the most // recently updated entries appearing first). Mutability is needed for timeout // pruning in const functions. - mutable SourceList list_ RTC_GUARDED_BY(worker_thread_); - mutable SourceMap map_ RTC_GUARDED_BY(worker_thread_); - ScopedTaskSafety worker_safety_; + mutable SourceList list_; + mutable SourceMap map_; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/source_tracker_unittest.cc b/modules/rtp_rtcp/source/source_tracker_unittest.cc index e14a389534..8c6cc500b6 100644 --- a/modules/rtp_rtcp/source/source_tracker_unittest.cc +++ b/modules/rtp_rtcp/source/source_tracker_unittest.cc @@ -10,18 +10,23 @@ #include "modules/rtp_rtcp/source/source_tracker.h" -#include +#include +#include #include +#include #include #include #include #include #include -#include "absl/types/optional.h" #include "api/rtp_headers.h" #include "api/rtp_packet_info.h" #include "api/rtp_packet_infos.h" +#include "api/transport/rtp/rtp_source.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "system_wrappers/include/clock.h" #include "system_wrappers/include/ntp_time.h" #include "test/gmock.h" #include "test/gtest.h" @@ -167,9 +172,9 @@ class SourceTrackerRandomTest return std::uniform_int_distribution()(generator_); } - absl::optional GenerateAudioLevel() { + std::optional GenerateAudioLevel() { if (std::bernoulli_distribution(0.25)(generator_)) { - return absl::nullopt; + return std::nullopt; } // Workaround for std::uniform_int_distribution not being allowed. @@ -177,9 +182,9 @@ class SourceTrackerRandomTest std::uniform_int_distribution()(generator_)); } - absl::optional GenerateAbsoluteCaptureTime() { + std::optional GenerateAbsoluteCaptureTime() { if (std::bernoulli_distribution(0.25)(generator_)) { - return absl::nullopt; + return std::nullopt; } AbsoluteCaptureTime value; @@ -188,7 +193,7 @@ class SourceTrackerRandomTest std::uniform_int_distribution()(generator_); if (std::bernoulli_distribution(0.5)(generator_)) { - value.estimated_capture_clock_offset = absl::nullopt; + value.estimated_capture_clock_offset = std::nullopt; } else { value.estimated_capture_clock_offset = std::uniform_int_distribution()(generator_); @@ -197,9 +202,9 @@ class SourceTrackerRandomTest return value; } - absl::optional GenerateLocalCaptureClockOffset() { + std::optional GenerateLocalCaptureClockOffset() { if (std::bernoulli_distribution(0.5)(generator_)) { - return absl::nullopt; + return std::nullopt; } return TimeDelta::Millis( UQ32x32ToInt64Ms(std::uniform_int_distribution()(generator_))); @@ -263,12 +268,12 @@ TEST(SourceTrackerTest, OnFrameDeliveredRecordsSourcesDistinctSsrcs) { constexpr uint32_t kCsrcs2 = 22; constexpr uint32_t kRtpTimestamp0 = 40; constexpr uint32_t kRtpTimestamp1 = 50; - constexpr absl::optional kAudioLevel0 = 50; - constexpr absl::optional kAudioLevel1 = 20; - constexpr absl::optional kAbsoluteCaptureTime = + constexpr std::optional kAudioLevel0 = 50; + constexpr std::optional kAudioLevel1 = 20; + constexpr std::optional kAbsoluteCaptureTime = AbsoluteCaptureTime{/*absolute_capture_timestamp=*/12, - /*estimated_capture_clock_offset=*/absl::nullopt}; - constexpr absl::optional kLocalCaptureClockOffset = absl::nullopt; + /*estimated_capture_clock_offset=*/std::nullopt}; + constexpr std::optional kLocalCaptureClockOffset = std::nullopt; constexpr Timestamp kReceiveTime0 = Timestamp::Millis(60); constexpr Timestamp kReceiveTime1 = Timestamp::Millis(70); @@ -318,13 +323,13 @@ TEST(SourceTrackerTest, OnFrameDeliveredRecordsSourcesSameSsrc) { constexpr uint32_t kRtpTimestamp0 = 40; constexpr uint32_t kRtpTimestamp1 = 45; constexpr uint32_t kRtpTimestamp2 = 50; - constexpr absl::optional kAudioLevel0 = 50; - constexpr absl::optional kAudioLevel1 = 20; - constexpr absl::optional kAudioLevel2 = 10; - constexpr absl::optional kAbsoluteCaptureTime = + constexpr std::optional kAudioLevel0 = 50; + constexpr std::optional kAudioLevel1 = 20; + constexpr std::optional kAudioLevel2 = 10; + constexpr std::optional kAbsoluteCaptureTime = AbsoluteCaptureTime{/*absolute_capture_timestamp=*/12, - /*estimated_capture_clock_offset=*/absl::nullopt}; - constexpr absl::optional kLocalCaptureClockOffset = absl::nullopt; + /*estimated_capture_clock_offset=*/std::nullopt}; + constexpr std::optional kLocalCaptureClockOffset = std::nullopt; constexpr Timestamp kReceiveTime0 = Timestamp::Millis(60); constexpr Timestamp kReceiveTime1 = Timestamp::Millis(70); constexpr Timestamp kReceiveTime2 = Timestamp::Millis(80); @@ -382,20 +387,20 @@ TEST(SourceTrackerTest, OnFrameDeliveredUpdatesSources) { constexpr uint32_t kRtpTimestamp0 = 40; constexpr uint32_t kRtpTimestamp1 = 41; constexpr uint32_t kRtpTimestamp2 = 42; - constexpr absl::optional kAudioLevel0 = 50; - constexpr absl::optional kAudioLevel1 = absl::nullopt; - constexpr absl::optional kAudioLevel2 = 10; - constexpr absl::optional kAbsoluteCaptureTime0 = + constexpr std::optional kAudioLevel0 = 50; + constexpr std::optional kAudioLevel1 = std::nullopt; + constexpr std::optional kAudioLevel2 = 10; + constexpr std::optional kAbsoluteCaptureTime0 = AbsoluteCaptureTime{12, 34}; - constexpr absl::optional kAbsoluteCaptureTime1 = + constexpr std::optional kAbsoluteCaptureTime1 = AbsoluteCaptureTime{56, 78}; - constexpr absl::optional kAbsoluteCaptureTime2 = + constexpr std::optional kAbsoluteCaptureTime2 = AbsoluteCaptureTime{89, 90}; - constexpr absl::optional kLocalCaptureClockOffset0 = + constexpr std::optional kLocalCaptureClockOffset0 = TimeDelta::Millis(123); - constexpr absl::optional kLocalCaptureClockOffset1 = + constexpr std::optional kLocalCaptureClockOffset1 = TimeDelta::Millis(456); - constexpr absl::optional kLocalCaptureClockOffset2 = + constexpr std::optional kLocalCaptureClockOffset2 = TimeDelta::Millis(789); constexpr Timestamp kReceiveTime0 = Timestamp::Millis(60); constexpr Timestamp kReceiveTime1 = Timestamp::Millis(61); @@ -487,15 +492,15 @@ TEST(SourceTrackerTest, TimedOutSourcesAreRemoved) { constexpr uint32_t kCsrcs2 = 22; constexpr uint32_t kRtpTimestamp0 = 40; constexpr uint32_t kRtpTimestamp1 = 41; - constexpr absl::optional kAudioLevel0 = 50; - constexpr absl::optional kAudioLevel1 = absl::nullopt; - constexpr absl::optional kAbsoluteCaptureTime0 = + constexpr std::optional kAudioLevel0 = 50; + constexpr std::optional kAudioLevel1 = std::nullopt; + constexpr std::optional kAbsoluteCaptureTime0 = AbsoluteCaptureTime{12, 34}; - constexpr absl::optional kAbsoluteCaptureTime1 = + constexpr std::optional kAbsoluteCaptureTime1 = AbsoluteCaptureTime{56, 78}; - constexpr absl::optional kLocalCaptureClockOffset0 = + constexpr std::optional kLocalCaptureClockOffset0 = TimeDelta::Millis(123); - constexpr absl::optional kLocalCaptureClockOffset1 = + constexpr std::optional kLocalCaptureClockOffset1 = TimeDelta::Millis(456); constexpr Timestamp kReceiveTime0 = Timestamp::Millis(60); constexpr Timestamp kReceiveTime1 = Timestamp::Millis(61); @@ -535,4 +540,12 @@ TEST(SourceTrackerTest, TimedOutSourcesAreRemoved) { kRtpTimestamp1, extensions1))); } +TEST(SourceTrackerTest, AvoidNegativeTimestamp) { + SimulatedClock clock(Timestamp::Zero()); + SourceTracker tracker(&clock); + tracker.OnFrameDelivered(RtpPacketInfos( + {RtpPacketInfo(/*ssrc=*/111, /*csrcs=*/{}, /*rtp_timestamp=*/0, + /*receive_time=*/Timestamp::Zero())})); +} + } // namespace webrtc diff --git a/modules/rtp_rtcp/source/tmmbr_help.cc b/modules/rtp_rtcp/source/tmmbr_help.cc index 569ed4d8e0..0eb7d602c7 100644 --- a/modules/rtp_rtcp/source/tmmbr_help.cc +++ b/modules/rtp_rtcp/source/tmmbr_help.cc @@ -12,9 +12,12 @@ #include +#include #include +#include #include "absl/algorithm/container.h" +#include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "rtc_base/checks.h" namespace webrtc { @@ -87,13 +90,13 @@ std::vector TMMBRHelp::FindBoundingSet( bounding_set.push_back(*min_bitrate_it); intersection[0] = 0; // Calculate its maximum packet rate (where its line crosses x-axis). - uint16_t packet_overhead = bounding_set.back().packet_overhead(); - if (packet_overhead == 0) { + if (bounding_set.back().packet_overhead() == 0) { // Avoid division by zero. max_packet_rate[0] = std::numeric_limits::max(); } else { max_packet_rate[0] = - bounding_set.back().bitrate_bps() / static_cast(packet_overhead); + bounding_set.back().bitrate_bps() / + static_cast(bounding_set.back().packet_overhead()); } // Remove from candidate list. min_bitrate_it->set_bitrate_bps(0); diff --git a/modules/rtp_rtcp/source/ulpfec_generator.cc b/modules/rtp_rtcp/source/ulpfec_generator.cc index cae659cdd7..a29aeb5352 100644 --- a/modules/rtp_rtcp/source/ulpfec_generator.cc +++ b/modules/rtp_rtcp/source/ulpfec_generator.cc @@ -15,12 +15,17 @@ #include #include #include +#include +#include "api/environment/environment.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "modules/include/module_fec_types.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/forward_error_correction_internal.h" #include "rtc_base/checks.h" +#include "rtc_base/race_checker.h" #include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -67,12 +72,12 @@ UlpfecGenerator::Params::Params(FecProtectionParams delta_params, FecProtectionParams keyframe_params) : delta_params(delta_params), keyframe_params(keyframe_params) {} -UlpfecGenerator::UlpfecGenerator(int red_payload_type, - int ulpfec_payload_type, - Clock* clock) - : red_payload_type_(red_payload_type), +UlpfecGenerator::UlpfecGenerator(const Environment& env, + int red_payload_type, + int ulpfec_payload_type) + : env_(env), + red_payload_type_(red_payload_type), ulpfec_payload_type_(ulpfec_payload_type), - clock_(clock), fec_(ForwardErrorCorrection::CreateUlpfec(kUnknownSsrc)), num_protected_frames_(0), min_num_media_packets_(1), @@ -80,11 +85,11 @@ UlpfecGenerator::UlpfecGenerator(int red_payload_type, fec_bitrate_(/*max_window_size=*/TimeDelta::Seconds(1)) {} // Used by FlexFecSender, payload types are unused. -UlpfecGenerator::UlpfecGenerator(std::unique_ptr fec, - Clock* clock) - : red_payload_type_(0), +UlpfecGenerator::UlpfecGenerator(const Environment& env, + std::unique_ptr fec) + : env_(env), + red_payload_type_(0), ulpfec_payload_type_(0), - clock_(clock), fec_(std::move(fec)), num_protected_frames_(0), min_num_media_packets_(1), @@ -235,14 +240,15 @@ std::vector> UlpfecGenerator::GetFecPackets() { ResetState(); MutexLock lock(&mutex_); - fec_bitrate_.Update(total_fec_size_bytes, clock_->CurrentTime()); + fec_bitrate_.Update(total_fec_size_bytes, env_.clock().CurrentTime()); return fec_packets; } DataRate UlpfecGenerator::CurrentFecRate() const { MutexLock lock(&mutex_); - return fec_bitrate_.Rate(clock_->CurrentTime()).value_or(DataRate::Zero()); + return fec_bitrate_.Rate(env_.clock().CurrentTime()) + .value_or(DataRate::Zero()); } int UlpfecGenerator::Overhead() const { diff --git a/modules/rtp_rtcp/source/ulpfec_generator.h b/modules/rtp_rtcp/source/ulpfec_generator.h index 0058847357..b524b1c776 100644 --- a/modules/rtp_rtcp/source/ulpfec_generator.h +++ b/modules/rtp_rtcp/source/ulpfec_generator.h @@ -16,14 +16,20 @@ #include #include +#include #include +#include "api/environment/environment.h" +#include "api/units/data_rate.h" #include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/video_fec_generator.h" #include "rtc_base/bitrate_tracker.h" #include "rtc_base/race_checker.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -33,13 +39,15 @@ class UlpfecGenerator : public VideoFecGenerator { friend class FlexfecSender; public: - UlpfecGenerator(int red_payload_type, int ulpfec_payload_type, Clock* clock); + UlpfecGenerator(const Environment& env, + int red_payload_type, + int ulpfec_payload_type); ~UlpfecGenerator(); FecType GetFecType() const override { return VideoFecGenerator::FecType::kUlpFec; } - absl::optional FecSsrc() override { return absl::nullopt; } + std::optional FecSsrc() override { return std::nullopt; } void SetProtectionParameters(const FecProtectionParams& delta_params, const FecProtectionParams& key_params) override; @@ -57,7 +65,7 @@ class UlpfecGenerator : public VideoFecGenerator { // Current rate of FEC packets generated, including all RTP-level headers. DataRate CurrentFecRate() const override; - absl::optional GetRtpState() override { return absl::nullopt; } + std::optional GetRtpState() override { return std::nullopt; } // Currently used protection params. const FecProtectionParams& CurrentParams() const; @@ -72,7 +80,8 @@ class UlpfecGenerator : public VideoFecGenerator { FecProtectionParams keyframe_params; }; - UlpfecGenerator(std::unique_ptr fec, Clock* clock); + UlpfecGenerator(const Environment& env, + std::unique_ptr fec); // Overhead is defined as relative to the number of media packets, and not // relative to total number of packets. This definition is inherited from the @@ -95,16 +104,16 @@ class UlpfecGenerator : public VideoFecGenerator { void ResetState(); + const Environment env_; const int red_payload_type_; const int ulpfec_payload_type_; - Clock* const clock_; - rtc::RaceChecker race_checker_; + RaceChecker race_checker_; const std::unique_ptr fec_ RTC_GUARDED_BY(race_checker_); ForwardErrorCorrection::PacketList media_packets_ RTC_GUARDED_BY(race_checker_); - absl::optional last_media_packet_ + std::optional last_media_packet_ RTC_GUARDED_BY(race_checker_); std::list generated_fec_packets_ RTC_GUARDED_BY(race_checker_); @@ -114,7 +123,7 @@ class UlpfecGenerator : public VideoFecGenerator { bool media_contains_keyframe_ RTC_GUARDED_BY(race_checker_); mutable Mutex mutex_; - absl::optional pending_params_ RTC_GUARDED_BY(mutex_); + std::optional pending_params_ RTC_GUARDED_BY(mutex_); BitrateTracker fec_bitrate_ RTC_GUARDED_BY(mutex_); }; diff --git a/modules/rtp_rtcp/source/ulpfec_generator_unittest.cc b/modules/rtp_rtcp/source/ulpfec_generator_unittest.cc index 18f5685791..69ad59de23 100644 --- a/modules/rtp_rtcp/source/ulpfec_generator_unittest.cc +++ b/modules/rtp_rtcp/source/ulpfec_generator_unittest.cc @@ -10,14 +10,20 @@ #include "modules/rtp_rtcp/source/ulpfec_generator.h" -#include +#include +#include #include -#include #include -#include "modules/rtp_rtcp/source/byte_io.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/fec_test_helper.h" -#include "modules/rtp_rtcp/source/forward_error_correction.h" +#include "modules/rtp_rtcp/source/forward_error_correction_internal.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "system_wrappers/include/clock.h" #include "test/gtest.h" namespace webrtc { @@ -36,7 +42,7 @@ void VerifyHeader(uint16_t seq_num, int red_payload_type, int fec_payload_type, bool marker_bit, - const rtc::CopyOnWriteBuffer& data) { + const CopyOnWriteBuffer& data) { // Marker bit not set. EXPECT_EQ(marker_bit ? 0x80 : 0, data[1] & 0x80); EXPECT_EQ(red_payload_type, data[1] & 0x7F); @@ -50,11 +56,11 @@ void VerifyHeader(uint16_t seq_num, class UlpfecGeneratorTest : public ::testing::Test { protected: UlpfecGeneratorTest() - : fake_clock_(1), - ulpfec_generator_(kRedPayloadType, kFecPayloadType, &fake_clock_), + : env_(CreateEnvironment(std::make_unique(1))), + ulpfec_generator_(env_, kRedPayloadType, kFecPayloadType), packet_generator_(kMediaSsrc) {} - SimulatedClock fake_clock_; + const Environment env_; UlpfecGenerator ulpfec_generator_; AugmentedPacketGenerator packet_generator_; }; diff --git a/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc b/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc index f57f31115c..5bd4488573 100644 --- a/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc +++ b/modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc @@ -12,8 +12,12 @@ #include +#include + +#include "api/array_view.h" #include "api/scoped_refptr.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/forward_error_correction_internal.h" #include "rtc_base/checks.h" @@ -98,7 +102,7 @@ UlpfecHeaderWriter::~UlpfecHeaderWriter() = default; // returns a bound on the sequence number spread), if logic is added to // UlpfecHeaderWriter::FinalizeFecHeader to truncate packet masks which end // in a string of zeroes. (Similar to how it is done in the FlexFEC case.) -size_t UlpfecHeaderWriter::MinPacketMaskSize(const uint8_t* packet_mask, +size_t UlpfecHeaderWriter::MinPacketMaskSize(const uint8_t* /* packet_mask */, size_t packet_mask_size) const { return packet_mask_size; } @@ -108,7 +112,7 @@ size_t UlpfecHeaderWriter::FecHeaderSize(size_t packet_mask_size) const { } void UlpfecHeaderWriter::FinalizeFecHeader( - rtc::ArrayView protected_streams, + ArrayView protected_streams, ForwardErrorCorrection::Packet& fec_packet) const { RTC_CHECK_EQ(protected_streams.size(), 1); uint16_t seq_num_base = protected_streams[0].seq_num_base; diff --git a/modules/rtp_rtcp/source/ulpfec_header_reader_writer.h b/modules/rtp_rtcp/source/ulpfec_header_reader_writer.h index 1d823b937a..b08725eebf 100644 --- a/modules/rtp_rtcp/source/ulpfec_header_reader_writer.h +++ b/modules/rtp_rtcp/source/ulpfec_header_reader_writer.h @@ -14,6 +14,7 @@ #include #include +#include "api/array_view.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" namespace webrtc { @@ -57,7 +58,7 @@ class UlpfecHeaderWriter : public FecHeaderWriter { size_t FecHeaderSize(size_t packet_mask_row_size) const override; void FinalizeFecHeader( - rtc::ArrayView protected_streams, + ArrayView protected_streams, ForwardErrorCorrection::Packet& fec_packet) const override; }; diff --git a/modules/rtp_rtcp/source/ulpfec_header_reader_writer_unittest.cc b/modules/rtp_rtcp/source/ulpfec_header_reader_writer_unittest.cc index f0b78e8d87..99ac1127a4 100644 --- a/modules/rtp_rtcp/source/ulpfec_header_reader_writer_unittest.cc +++ b/modules/rtp_rtcp/source/ulpfec_header_reader_writer_unittest.cc @@ -12,14 +12,13 @@ #include +#include #include -#include #include "api/scoped_refptr.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/forward_error_correction_internal.h" -#include "rtc_base/checks.h" #include "rtc_base/random.h" #include "test/gmock.h" #include "test/gtest.h" @@ -71,7 +70,7 @@ std::unique_ptr ReadHeader(const Packet& written_packet) { UlpfecHeaderReader reader; std::unique_ptr read_packet(new ReceivedFecPacket()); read_packet->ssrc = kMediaSsrc; - read_packet->pkt = rtc::scoped_refptr(new Packet()); + read_packet->pkt = scoped_refptr(new Packet()); read_packet->pkt->data = written_packet.data; EXPECT_TRUE(reader.ReadFecHeader(read_packet.get())); return read_packet; @@ -115,7 +114,7 @@ TEST(UlpfecHeaderReaderTest, ReadsSmallHeader) { }; const size_t packet_length = sizeof(packet); ReceivedFecPacket read_packet; - read_packet.pkt = rtc::scoped_refptr(new Packet()); + read_packet.pkt = scoped_refptr(new Packet()); read_packet.pkt->data.SetData(packet, packet_length); UlpfecHeaderReader reader; @@ -139,7 +138,7 @@ TEST(UlpfecHeaderReaderTest, ReadsLargeHeader) { }; const size_t packet_length = sizeof(packet); ReceivedFecPacket read_packet; - read_packet.pkt = rtc::scoped_refptr(new Packet()); + read_packet.pkt = scoped_refptr(new Packet()); read_packet.pkt->data.SetData(packet, packet_length); UlpfecHeaderReader reader; diff --git a/modules/rtp_rtcp/source/ulpfec_receiver.cc b/modules/rtp_rtcp/source/ulpfec_receiver.cc index 7f74a18a87..b823ca1750 100644 --- a/modules/rtp_rtcp/source/ulpfec_receiver.cc +++ b/modules/rtp_rtcp/source/ulpfec_receiver.cc @@ -10,13 +10,24 @@ #include "modules/rtp_rtcp/source/ulpfec_receiver.h" +#include +#include #include #include +#include #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/include/recovered_packet_receiver.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/time_utils.h" +#include "system_wrappers/include/clock.h" #include "system_wrappers/include/metrics.h" namespace webrtc { diff --git a/modules/rtp_rtcp/source/ulpfec_receiver.h b/modules/rtp_rtcp/source/ulpfec_receiver.h index 6afb422718..689d2bad13 100644 --- a/modules/rtp_rtcp/source/ulpfec_receiver.h +++ b/modules/rtp_rtcp/source/ulpfec_receiver.h @@ -18,11 +18,12 @@ #include #include "api/sequence_checker.h" +#include "api/units/timestamp.h" #include "modules/rtp_rtcp/include/recovered_packet_receiver.h" -#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" namespace webrtc { diff --git a/modules/rtp_rtcp/source/ulpfec_receiver_unittest.cc b/modules/rtp_rtcp/source/ulpfec_receiver_unittest.cc index 676e20c795..d36ebf559a 100644 --- a/modules/rtp_rtcp/source/ulpfec_receiver_unittest.cc +++ b/modules/rtp_rtcp/source/ulpfec_receiver_unittest.cc @@ -12,16 +12,22 @@ #include +#include +#include #include #include #include +#include "api/units/timestamp.h" +#include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/recovered_packet_receiver.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/mocks/mock_recovered_packet_receiver.h" -#include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/fec_test_helper.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "system_wrappers/include/clock.h" #include "test/gmock.h" #include "test/gtest.h" @@ -41,7 +47,7 @@ constexpr uint32_t kMediaSsrc = 835424; class NullRecoveredPacketReceiver : public RecoveredPacketReceiver { public: - void OnRecoveredPacket(const RtpPacketReceived& packet) override {} + void OnRecoveredPacket(const RtpPacketReceived& /* packet */) override {} }; } // namespace diff --git a/modules/rtp_rtcp/source/video_fec_generator.h b/modules/rtp_rtcp/source/video_fec_generator.h index 38e4103cb6..f4a492e14d 100644 --- a/modules/rtp_rtcp/source/video_fec_generator.h +++ b/modules/rtp_rtcp/source/video_fec_generator.h @@ -11,11 +11,15 @@ #ifndef MODULES_RTP_RTCP_SOURCE_VIDEO_FEC_GENERATOR_H_ #define MODULES_RTP_RTCP_SOURCE_VIDEO_FEC_GENERATOR_H_ +#include +#include #include +#include #include #include "api/units/data_rate.h" #include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" namespace webrtc { @@ -28,7 +32,7 @@ class VideoFecGenerator { enum class FecType { kFlexFec, kUlpFec }; virtual FecType GetFecType() const = 0; // Returns the SSRC used for FEC packets (i.e. FlexFec SSRC). - virtual absl::optional FecSsrc() = 0; + virtual std::optional FecSsrc() = 0; // Returns the overhead, in bytes per packet, for FEC (and possibly RED). virtual size_t MaxPacketOverhead() const = 0; // Current rate of FEC packets generated, including all RTP-level headers. @@ -47,7 +51,7 @@ class VideoFecGenerator { virtual std::vector> GetFecPackets() = 0; // Only called on the VideoSendStream queue, after operation has shut down, // and only populated if there is an RtpState (e.g. FlexFec). - virtual absl::optional GetRtpState() = 0; + virtual std::optional GetRtpState() = 0; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer.cc index bb0bf09e90..74a7522546 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer.cc @@ -13,6 +13,8 @@ #include #include +#include + #include "api/array_view.h" #include "api/scoped_refptr.h" #include "api/video/encoded_image.h" @@ -20,18 +22,18 @@ namespace webrtc { -rtc::scoped_refptr VideoRtpDepacketizer::AssembleFrame( - rtc::ArrayView> rtp_payloads) { +scoped_refptr VideoRtpDepacketizer::AssembleFrame( + ArrayView> rtp_payloads) { size_t frame_size = 0; - for (rtc::ArrayView payload : rtp_payloads) { + for (ArrayView payload : rtp_payloads) { frame_size += payload.size(); } - rtc::scoped_refptr bitstream = + scoped_refptr bitstream = EncodedImageBuffer::Create(frame_size); uint8_t* write_at = bitstream->data(); - for (rtc::ArrayView payload : rtp_payloads) { + for (ArrayView payload : rtp_payloads) { memcpy(write_at, payload.data(), payload.size()); write_at += payload.size(); } diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer.h b/modules/rtp_rtcp/source/video_rtp_depacketizer.h index 2266120799..487be38f62 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer.h +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer.h @@ -13,7 +13,8 @@ #include -#include "absl/types/optional.h" +#include + #include "api/array_view.h" #include "api/scoped_refptr.h" #include "api/video/encoded_image.h" @@ -26,14 +27,14 @@ class VideoRtpDepacketizer { public: struct ParsedRtpPayload { RTPVideoHeader video_header; - rtc::CopyOnWriteBuffer video_payload; + CopyOnWriteBuffer video_payload; }; virtual ~VideoRtpDepacketizer() = default; - virtual absl::optional Parse( - rtc::CopyOnWriteBuffer rtp_payload) = 0; - virtual rtc::scoped_refptr AssembleFrame( - rtc::ArrayView> rtp_payloads); + virtual std::optional Parse( + CopyOnWriteBuffer rtp_payload) = 0; + virtual scoped_refptr AssembleFrame( + ArrayView> rtp_payloads); }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc index 870f788538..fd8d647045 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc @@ -13,12 +13,24 @@ #include #include +#include +#include +#include +#include #include +#include "absl/container/inlined_vector.h" +#include "api/array_view.h" +#include "api/scoped_refptr.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "modules/rtp_rtcp/source/leb128.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "rtc_base/byte_buffer.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -82,7 +94,7 @@ class ArrayOfArrayViews { } private: - using Storage = absl::InlinedVector, 2>; + using Storage = absl::InlinedVector, 2>; size_t size_ = 0; Storage data_; @@ -184,12 +196,11 @@ int RtpStartsNewCodedVideoSequence(uint8_t aggregation_header) { // fills ObuInfo::data field. // Returns empty vector on error. VectorObuInfo ParseObus( - rtc::ArrayView> rtp_payloads) { + ArrayView> rtp_payloads) { VectorObuInfo obu_infos; bool expect_continues_obu = false; - for (rtc::ArrayView rtp_payload : rtp_payloads) { - rtc::ByteBufferReader payload( - reinterpret_cast(rtp_payload.data()), rtp_payload.size()); + for (ArrayView rtp_payload : rtp_payloads) { + ByteBufferReader payload(rtp_payload); uint8_t aggregation_header; if (!payload.ReadUInt8(&aggregation_header)) { RTC_DLOG(LS_WARNING) @@ -319,15 +330,15 @@ bool CalculateObuSizes(ObuInfo* obu_info) { } obu_info->payload_offset = it; obu_info->prefix_size += - WriteLeb128(rtc::dchecked_cast(obu_info->payload_size), + WriteLeb128(dchecked_cast(obu_info->payload_size), obu_info->prefix.data() + obu_info->prefix_size); return true; } } // namespace -rtc::scoped_refptr VideoRtpDepacketizerAv1::AssembleFrame( - rtc::ArrayView> rtp_payloads) { +scoped_refptr VideoRtpDepacketizerAv1::AssembleFrame( + ArrayView> rtp_payloads) { VectorObuInfo obu_infos = ParseObus(rtp_payloads); if (obu_infos.empty()) { return nullptr; @@ -341,7 +352,7 @@ rtc::scoped_refptr VideoRtpDepacketizerAv1::AssembleFrame( frame_size += (obu_info.prefix_size + obu_info.payload_size); } - rtc::scoped_refptr bitstream = + scoped_refptr bitstream = EncodedImageBuffer::Create(frame_size); uint8_t* write_at = bitstream->data(); for (const ObuInfo& obu_info : obu_infos) { @@ -356,19 +367,19 @@ rtc::scoped_refptr VideoRtpDepacketizerAv1::AssembleFrame( return bitstream; } -absl::optional -VideoRtpDepacketizerAv1::Parse(rtc::CopyOnWriteBuffer rtp_payload) { +std::optional +VideoRtpDepacketizerAv1::Parse(CopyOnWriteBuffer rtp_payload) { if (rtp_payload.size() == 0) { RTC_DLOG(LS_ERROR) << "Empty rtp payload."; - return absl::nullopt; + return std::nullopt; } uint8_t aggregation_header = rtp_payload.cdata()[0]; if (RtpStartsNewCodedVideoSequence(aggregation_header) && RtpStartsWithFragment(aggregation_header)) { // new coded video sequence can't start from an OBU fragment. - return absl::nullopt; + return std::nullopt; } - absl::optional parsed(absl::in_place); + std::optional parsed(std::in_place); // To assemble frame, all of the rtp payload is required, including // aggregation header. diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h b/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h index ac8c7e6d11..4acf209d9c 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h @@ -14,7 +14,8 @@ #include #include -#include "absl/types/optional.h" +#include + #include "api/array_view.h" #include "api/scoped_refptr.h" #include "api/video/encoded_image.h" @@ -30,12 +31,10 @@ class VideoRtpDepacketizerAv1 : public VideoRtpDepacketizer { VideoRtpDepacketizerAv1& operator=(const VideoRtpDepacketizerAv1&) = delete; ~VideoRtpDepacketizerAv1() override = default; - rtc::scoped_refptr AssembleFrame( - rtc::ArrayView> rtp_payloads) - override; + scoped_refptr AssembleFrame( + ArrayView> rtp_payloads) override; - absl::optional Parse( - rtc::CopyOnWriteBuffer rtp_payload) override; + std::optional Parse(CopyOnWriteBuffer rtp_payload) override; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_av1_unittest.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_av1_unittest.cc index e9ad1a1b8e..aa3a735fca 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_av1_unittest.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_av1_unittest.cc @@ -10,6 +10,14 @@ #include "modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h" +#include +#include +#include + +#include "api/array_view.h" +#include "api/video/video_frame_type.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "rtc_base/copy_on_write_buffer.h" #include "test/gmock.h" #include "test/gtest.h" @@ -28,9 +36,9 @@ constexpr uint8_t kObuHeaderHasSize = 0b0'0000'010; TEST(VideoRtpDepacketizerAv1Test, ParsePassFullRtpPayloadAsCodecPayload) { const uint8_t packet[] = {(uint8_t{1} << 7) | kObuCountOne, 1, 2, 3, 4}; - rtc::CopyOnWriteBuffer rtp_payload(packet); + CopyOnWriteBuffer rtp_payload(packet); VideoRtpDepacketizerAv1 depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(rtp_payload); ASSERT_TRUE(parsed); EXPECT_EQ(parsed->video_payload.size(), sizeof(packet)); @@ -44,8 +52,8 @@ TEST(VideoRtpDepacketizerAv1Test, kObuHeaderFrame}; // Value doesn't matter since it is a // continuation of the OBU from previous packet. VideoRtpDepacketizerAv1 depacketizer; - absl::optional parsed = - depacketizer.Parse(rtc::CopyOnWriteBuffer(packet)); + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(packet)); ASSERT_TRUE(parsed); EXPECT_FALSE(parsed->video_header.is_first_packet_in_frame); } @@ -54,17 +62,17 @@ TEST(VideoRtpDepacketizerAv1Test, ParseTreatsNoContinuationFlagAsBeginningOfFrame) { const uint8_t packet[] = {(uint8_t{0} << 7) | kObuCountOne, kObuHeaderFrame}; VideoRtpDepacketizerAv1 depacketizer; - absl::optional parsed = - depacketizer.Parse(rtc::CopyOnWriteBuffer(packet)); + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(packet)); ASSERT_TRUE(parsed); EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); } TEST(VideoRtpDepacketizerAv1Test, ParseTreatsWillContinueFlagAsNotEndOfFrame) { const uint8_t packet[] = {(uint8_t{1} << 6) | kObuCountOne, kObuHeaderFrame}; - rtc::CopyOnWriteBuffer rtp_payload(packet); + CopyOnWriteBuffer rtp_payload(packet); VideoRtpDepacketizerAv1 depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(rtp_payload); ASSERT_TRUE(parsed); EXPECT_FALSE(parsed->video_header.is_last_packet_in_frame); @@ -73,8 +81,8 @@ TEST(VideoRtpDepacketizerAv1Test, ParseTreatsWillContinueFlagAsNotEndOfFrame) { TEST(VideoRtpDepacketizerAv1Test, ParseTreatsNoWillContinueFlagAsEndOfFrame) { const uint8_t packet[] = {(uint8_t{0} << 6) | kObuCountOne, kObuHeaderFrame}; VideoRtpDepacketizerAv1 depacketizer; - absl::optional parsed = - depacketizer.Parse(rtc::CopyOnWriteBuffer(packet)); + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(packet)); ASSERT_TRUE(parsed); EXPECT_TRUE(parsed->video_header.is_last_packet_in_frame); } @@ -84,8 +92,8 @@ TEST(VideoRtpDepacketizerAv1Test, const uint8_t packet[] = {(uint8_t{1} << 3) | kObuCountOne, kObuHeaderSequenceHeader}; VideoRtpDepacketizerAv1 depacketizer; - absl::optional parsed = - depacketizer.Parse(rtc::CopyOnWriteBuffer(packet)); + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(packet)); ASSERT_TRUE(parsed); EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); EXPECT_TRUE(parsed->video_header.frame_type == @@ -97,8 +105,8 @@ TEST(VideoRtpDepacketizerAv1Test, const uint8_t packet[] = {(uint8_t{0} << 3) | kObuCountOne, kObuHeaderSequenceHeader}; VideoRtpDepacketizerAv1 depacketizer; - absl::optional parsed = - depacketizer.Parse(rtc::CopyOnWriteBuffer(packet)); + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(packet)); ASSERT_TRUE(parsed); EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); EXPECT_TRUE(parsed->video_header.frame_type == @@ -110,17 +118,17 @@ TEST(VideoRtpDepacketizerAv1Test, const uint8_t packet[] = {0b10'00'1000 | kObuCountOne, kObuHeaderSequenceHeader}; VideoRtpDepacketizerAv1 depacketizer; - ASSERT_FALSE(depacketizer.Parse(rtc::CopyOnWriteBuffer(packet))); + ASSERT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(packet))); } TEST(VideoRtpDepacketizerAv1Test, AssembleFrameSetsOBUPayloadSizeWhenAbsent) { const uint8_t payload1[] = {0b00'01'0000, // aggregation header 0b0'0110'000, // / Frame 20, 30, 40}; // \ OBU - rtc::ArrayView payloads[] = {payload1}; + ArrayView payloads[] = {payload1}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); - rtc::ArrayView frame_view(*frame); + ArrayView frame_view(*frame); EXPECT_TRUE(frame_view[0] & kObuHeaderHasSize); EXPECT_EQ(frame_view[1], 3); } @@ -132,10 +140,10 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameSetsOBUPayloadSizeWhenPresent) { 20, 30, 40}; // \ obu_payload - rtc::ArrayView payloads[] = {payload1}; + ArrayView payloads[] = {payload1}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); - rtc::ArrayView frame_view(*frame); + ArrayView frame_view(*frame); EXPECT_TRUE(frame_view[0] & kObuHeaderHasSize); EXPECT_EQ(frame_view[1], 3); } @@ -146,10 +154,10 @@ TEST(VideoRtpDepacketizerAv1Test, 0b0'0110'100, // / Frame 0b010'01'000, // | extension_header 20, 30, 40}; // \ OBU - rtc::ArrayView payloads[] = {payload1}; + ArrayView payloads[] = {payload1}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); - rtc::ArrayView frame_view(*frame); + ArrayView frame_view(*frame); EXPECT_TRUE(frame_view[0] & kObuHeaderHasSize); EXPECT_EQ(frame_view[2], 3); } @@ -163,10 +171,10 @@ TEST(VideoRtpDepacketizerAv1Test, 20, 30, 40}; // \ obu_payload - rtc::ArrayView payloads[] = {payload1}; + ArrayView payloads[] = {payload1}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); - rtc::ArrayView frame_view(*frame); + ArrayView frame_view(*frame); EXPECT_TRUE(frame_view[0] & kObuHeaderHasSize); EXPECT_EQ(frame_view[2], 3); } @@ -175,10 +183,10 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameFromOnePacketWithOneObu) { const uint8_t payload1[] = {0b00'01'0000, // aggregation header 0b0'0110'000, // / Frame 20}; // \ OBU - rtc::ArrayView payloads[] = {payload1}; + ArrayView payloads[] = {payload1}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); - EXPECT_THAT(rtc::ArrayView(*frame), + EXPECT_THAT(ArrayView(*frame), ElementsAre(0b0'0110'010, 1, 20)); } @@ -189,10 +197,10 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameFromOnePacketWithTwoObus) { 10, // \ OBU 0b0'0110'000, // / Frame 20}; // \ OBU - rtc::ArrayView payloads[] = {payload1}; + ArrayView payloads[] = {payload1}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); - EXPECT_THAT(rtc::ArrayView(*frame), + EXPECT_THAT(ArrayView(*frame), ElementsAre(0b0'0001'010, 1, 10, // Sequence Header OBU 0b0'0110'010, 1, 20)); // Frame OBU } @@ -202,10 +210,10 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameFromTwoPacketsWithOneObu) { 0b0'0110'000, 20, 30}; const uint8_t payload2[] = {0b10'01'0000, // aggregation header 40}; - rtc::ArrayView payloads[] = {payload1, payload2}; + ArrayView payloads[] = {payload1, payload2}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); - EXPECT_THAT(rtc::ArrayView(*frame), + EXPECT_THAT(ArrayView(*frame), ElementsAre(0b0'0110'010, 3, 20, 30, 40)); } @@ -219,10 +227,10 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameFromTwoPacketsWithTwoObu) { 30}; // const uint8_t payload2[] = {0b10'01'0000, // aggregation header 40}; // - rtc::ArrayView payloads[] = {payload1, payload2}; + ArrayView payloads[] = {payload1, payload2}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); - EXPECT_THAT(rtc::ArrayView(*frame), + EXPECT_THAT(ArrayView(*frame), ElementsAre(0b0'0001'010, 1, 10, // SH 0b0'0110'010, 3, 20, 30, 40)); // Frame } @@ -250,10 +258,10 @@ TEST(VideoRtpDepacketizerAv1Test, const uint8_t payload2[] = {0b10'01'0000, // aggregation header 70, 80, 90}; // \ tail of the frame OBU - rtc::ArrayView payloads[] = {payload1, payload2}; + ArrayView payloads[] = {payload1, payload2}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); - EXPECT_THAT(rtc::ArrayView(*frame), + EXPECT_THAT(ArrayView(*frame), ElementsAre( // Sequence header OBU 0b0'0001'010, 1, 10, // Metadata OBU without extension @@ -274,11 +282,11 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameWithOneObuFromManyPackets) { const uint8_t payload4[] = {0b10'01'0000, // aggregation header 18}; - rtc::ArrayView payloads[] = {payload1, payload2, payload3, - payload4}; + ArrayView payloads[] = {payload1, payload2, payload3, + payload4}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); - EXPECT_THAT(rtc::ArrayView(*frame), + EXPECT_THAT(ArrayView(*frame), ElementsAre(0b0'0110'010, 8, 11, 12, 13, 14, 15, 16, 17, 18)); } @@ -306,11 +314,11 @@ TEST(VideoRtpDepacketizerAv1Test, 32}; const uint8_t payload4[] = {0b10'01'0000, // aggregation header 33, 34, 35, 36}; - rtc::ArrayView payloads[] = {payload1, payload2, payload3, - payload4}; + ArrayView payloads[] = {payload1, payload2, payload3, + payload4}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); - EXPECT_THAT(rtc::ArrayView(*frame), + EXPECT_THAT(ArrayView(*frame), ElementsAre(0b0'0011'010, 2, 11, 12, // Frame header 0b0'0100'010, 7, 21, 22, 23, 24, 25, 26, 27, // 0b0'0111'010, 2, 11, 12, // @@ -326,11 +334,11 @@ TEST(VideoRtpDepacketizerAv1Test, payload1[2] = 0x01; // in two bytes payload1[3] = 0b0'0110'000; // obu_header with size and extension bits unset. payload1[4 + 42] = 0x42; - rtc::ArrayView payloads[] = {payload1}; + ArrayView payloads[] = {payload1}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); EXPECT_EQ(frame->size(), 2 + 127u); - rtc::ArrayView frame_view(*frame); + ArrayView frame_view(*frame); EXPECT_EQ(frame_view[0], 0b0'0110'010); // obu_header with size bit set. EXPECT_EQ(frame_view[1], 127); // obu payload size, 1 byte enough to encode. // Check 'random' byte from the payload is at the same 'random' offset. @@ -351,11 +359,11 @@ TEST(VideoRtpDepacketizerAv1Test, payload2[1] = 96; // leb128 encoded size of 96 bytes in one byte payload2[2 + 20] = 0x20; - rtc::ArrayView payloads[] = {payload1, payload2}; + ArrayView payloads[] = {payload1, payload2}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); EXPECT_EQ(frame->size(), 3 + 128u); - rtc::ArrayView frame_view(*frame); + ArrayView frame_view(*frame); EXPECT_EQ(frame_view[0], 0b0'0110'010); // obu_header with size bit set. EXPECT_EQ(frame_view[1], 0x80); // obu payload size of 128 bytes. EXPECT_EQ(frame_view[2], 0x01); // encoded in two byes @@ -368,11 +376,11 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameFromAlmostEmptyPacketStartingAnOBU) { const uint8_t payload1[] = {0b01'01'0000}; const uint8_t payload2[] = {0b10'01'0000, 0b0'0110'000, 10, 20, 30}; - rtc::ArrayView payloads[] = {payload1, payload2}; + ArrayView payloads[] = {payload1, payload2}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); - EXPECT_THAT(rtc::ArrayView(*frame), + EXPECT_THAT(ArrayView(*frame), ElementsAre(0b0'0110'010, 3, 10, 20, 30)); } @@ -380,11 +388,11 @@ TEST(VideoRtpDepacketizerAv1Test, AssembleFrameFromAlmostEmptyPacketFinishingAnOBU) { const uint8_t payload1[] = {0b01'01'0000, 0b0'0110'000, 10, 20, 30}; const uint8_t payload2[] = {0b10'01'0000}; - rtc::ArrayView payloads[] = {payload1, payload2}; + ArrayView payloads[] = {payload1, payload2}; auto frame = VideoRtpDepacketizerAv1().AssembleFrame(payloads); ASSERT_TRUE(frame); - EXPECT_THAT(rtc::ArrayView(*frame), + EXPECT_THAT(ArrayView(*frame), ElementsAre(0b0'0110'010, 3, 10, 20, 30)); } diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc index 6010771318..1ac96a2f2e 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc @@ -13,9 +13,11 @@ #include #include +#include #include -#include "absl/types/optional.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "rtc_base/copy_on_write_buffer.h" @@ -33,13 +35,13 @@ constexpr size_t kGenericHeaderLength = 1; constexpr size_t kExtendedHeaderLength = 2; } // namespace -absl::optional -VideoRtpDepacketizerGeneric::Parse(rtc::CopyOnWriteBuffer rtp_payload) { +std::optional +VideoRtpDepacketizerGeneric::Parse(CopyOnWriteBuffer rtp_payload) { if (rtp_payload.size() == 0) { RTC_LOG(LS_WARNING) << "Empty payload."; - return absl::nullopt; + return std::nullopt; } - absl::optional parsed(absl::in_place); + std::optional parsed(std::in_place); const uint8_t* payload_data = rtp_payload.cdata(); uint8_t generic_header = payload_data[0]; @@ -57,7 +59,7 @@ VideoRtpDepacketizerGeneric::Parse(rtc::CopyOnWriteBuffer rtp_payload) { if (generic_header & kExtendedHeaderBit) { if (rtp_payload.size() < offset + kExtendedHeaderLength) { RTC_LOG(LS_WARNING) << "Too short payload for generic header."; - return absl::nullopt; + return std::nullopt; } parsed->video_header.video_type_header .emplace() diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.h b/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.h index 27056da481..91b34fb6f9 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.h +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_generic.h @@ -11,7 +11,8 @@ #ifndef MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_GENERIC_H_ #define MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_GENERIC_H_ -#include "absl/types/optional.h" +#include + #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "rtc_base/copy_on_write_buffer.h" @@ -21,8 +22,7 @@ class VideoRtpDepacketizerGeneric : public VideoRtpDepacketizer { public: ~VideoRtpDepacketizerGeneric() override = default; - absl::optional Parse( - rtc::CopyOnWriteBuffer rtp_payload) override; + std::optional Parse(CopyOnWriteBuffer rtp_payload) override; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_generic_unittest.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_generic_unittest.cc index 860ddab4fd..e808effca1 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_generic_unittest.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_generic_unittest.cc @@ -12,7 +12,12 @@ #include -#include "absl/types/optional.h" +#include +#include +#include + +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "rtc_base/copy_on_write_buffer.h" #include "test/gmock.h" #include "test/gtest.h" @@ -25,28 +30,28 @@ using ::testing::SizeIs; TEST(VideoRtpDepacketizerGeneric, NonExtendedHeaderNoFrameId) { const size_t kRtpPayloadSize = 10; const uint8_t kPayload[kRtpPayloadSize] = {0x01}; - rtc::CopyOnWriteBuffer rtp_payload(kPayload); + CopyOnWriteBuffer rtp_payload(kPayload); VideoRtpDepacketizerGeneric depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(rtp_payload); ASSERT_TRUE(parsed); - EXPECT_EQ(parsed->video_header.generic, absl::nullopt); + EXPECT_EQ(parsed->video_header.generic, std::nullopt); EXPECT_THAT(parsed->video_payload, SizeIs(kRtpPayloadSize - 1)); } TEST(VideoRtpDepacketizerGeneric, ExtendedHeaderParsesFrameId) { const size_t kRtpPayloadSize = 10; const uint8_t kPayload[kRtpPayloadSize] = {0x05, 0x13, 0x37}; - rtc::CopyOnWriteBuffer rtp_payload(kPayload); + CopyOnWriteBuffer rtp_payload(kPayload); VideoRtpDepacketizerGeneric depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(rtp_payload); ASSERT_TRUE(parsed); - const auto* generic_header = absl::get_if( + const auto* generic_header = std::get_if( &parsed->video_header.video_type_header); ASSERT_TRUE(generic_header); EXPECT_EQ(generic_header->picture_id, 0x1337); @@ -55,10 +60,10 @@ TEST(VideoRtpDepacketizerGeneric, ExtendedHeaderParsesFrameId) { TEST(VideoRtpDepacketizerGeneric, PassRtpPayloadAsVideoPayload) { const uint8_t kPayload[] = {0x01, 0x25, 0x52}; - rtc::CopyOnWriteBuffer rtp_payload(kPayload); + CopyOnWriteBuffer rtp_payload(kPayload); VideoRtpDepacketizerGeneric depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(rtp_payload); ASSERT_TRUE(parsed); diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc index 9978e5f5fc..54b85e5989 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc @@ -12,11 +12,13 @@ #include #include +#include #include #include -#include "absl/types/optional.h" -#include "absl/types/variant.h" +#include "api/array_view.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "common_video/h264/h264_common.h" #include "common_video/h264/pps_parser.h" #include "common_video/h264/sps_parser.h" @@ -24,6 +26,9 @@ #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtp_format_h264.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "rtc_base/buffer.h" +#include "rtc_base/byte_buffer.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" @@ -34,158 +39,145 @@ namespace { constexpr size_t kNalHeaderSize = 1; constexpr size_t kFuAHeaderSize = 2; constexpr size_t kLengthFieldSize = 2; -constexpr size_t kStapAHeaderSize = kNalHeaderSize + kLengthFieldSize; -// TODO(pbos): Avoid parsing this here as well as inside the jitter buffer. -bool ParseStapAStartOffsets(const uint8_t* nalu_ptr, - size_t length_remaining, - std::vector* offsets) { - size_t offset = 0; - while (length_remaining > 0) { - // Buffer doesn't contain room for additional nalu length. - if (length_remaining < sizeof(uint16_t)) - return false; - uint16_t nalu_size = ByteReader::ReadBigEndian(nalu_ptr); - nalu_ptr += sizeof(uint16_t); - length_remaining -= sizeof(uint16_t); - if (nalu_size > length_remaining) - return false; - nalu_ptr += nalu_size; - length_remaining -= nalu_size; +std::vector> ParseStapA( + ArrayView data) { + std::vector> nal_units; + ByteBufferReader reader(data); + if (!reader.Consume(kNalHeaderSize)) { + return nal_units; + } - offsets->push_back(offset + kStapAHeaderSize); - offset += kLengthFieldSize + nalu_size; + while (reader.Length() > 0) { + uint16_t nalu_size; + if (!reader.ReadUInt16(&nalu_size)) { + return {}; + } + if (nalu_size == 0 || nalu_size > reader.Length()) { + return {}; + } + nal_units.emplace_back(reader.Data(), nalu_size); + reader.Consume(nalu_size); } - return true; + return nal_units; } -absl::optional ProcessStapAOrSingleNalu( - rtc::CopyOnWriteBuffer rtp_payload) { - const uint8_t* const payload_data = rtp_payload.cdata(); - absl::optional parsed_payload( - absl::in_place); +std::optional ProcessStapAOrSingleNalu( + CopyOnWriteBuffer rtp_payload) { + ArrayView payload_data(rtp_payload); + std::optional parsed_payload( + std::in_place); bool modified_buffer = false; + Buffer output_buffer; parsed_payload->video_payload = rtp_payload; parsed_payload->video_header.width = 0; parsed_payload->video_header.height = 0; parsed_payload->video_header.codec = kVideoCodecH264; parsed_payload->video_header.simulcastIdx = 0; - parsed_payload->video_header.is_first_packet_in_frame = true; + parsed_payload->video_header.is_first_packet_in_frame = false; auto& h264_header = parsed_payload->video_header.video_type_header .emplace(); - const uint8_t* nalu_start = payload_data + kNalHeaderSize; - const size_t nalu_length = rtp_payload.size() - kNalHeaderSize; uint8_t nal_type = payload_data[0] & kH264TypeMask; - std::vector nalu_start_offsets; + std::vector> nal_units; if (nal_type == H264::NaluType::kStapA) { - // Skip the StapA header (StapA NAL type + length). - if (rtp_payload.size() <= kStapAHeaderSize) { - RTC_LOG(LS_ERROR) << "StapA header truncated."; - return absl::nullopt; - } - - if (!ParseStapAStartOffsets(nalu_start, nalu_length, &nalu_start_offsets)) { - RTC_LOG(LS_ERROR) << "StapA packet with incorrect NALU packet lengths."; - return absl::nullopt; + nal_units = ParseStapA(payload_data); + if (nal_units.empty()) { + RTC_LOG(LS_ERROR) << "Incorrect StapA packet."; + return std::nullopt; } - h264_header.packetization_type = kH264StapA; - nal_type = payload_data[kStapAHeaderSize] & kH264TypeMask; + h264_header.nalu_type = nal_units[0][0] & kH264TypeMask; } else { h264_header.packetization_type = kH264SingleNalu; - nalu_start_offsets.push_back(0); + h264_header.nalu_type = nal_type; + nal_units.push_back(payload_data); } - h264_header.nalu_type = nal_type; - parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameDelta; - nalu_start_offsets.push_back(rtp_payload.size() + - kLengthFieldSize); // End offset. - for (size_t i = 0; i < nalu_start_offsets.size() - 1; ++i) { - size_t start_offset = nalu_start_offsets[i]; - // End offset is actually start offset for next unit, excluding length field - // so remove that from this units length. - size_t end_offset = nalu_start_offsets[i + 1] - kLengthFieldSize; - if (end_offset - start_offset < H264::kNaluTypeSize) { - RTC_LOG(LS_ERROR) << "STAP-A packet too short"; - return absl::nullopt; - } + parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameDelta; + for (const ArrayView& nal_unit : nal_units) { NaluInfo nalu; - nalu.type = payload_data[start_offset] & kH264TypeMask; + nalu.type = nal_unit[0] & kH264TypeMask; nalu.sps_id = -1; nalu.pps_id = -1; - start_offset += H264::kNaluTypeSize; + ArrayView nalu_data = nal_unit.subview(H264::kNaluTypeSize); + + if (nalu_data.empty()) { + RTC_LOG(LS_WARNING) << "Skipping empty NAL unit."; + continue; + } switch (nalu.type) { case H264::NaluType::kSps: { // Check if VUI is present in SPS and if it needs to be modified to - // avoid - // excessive decoder latency. + // avoid excessive decoder latency. // Copy any previous data first (likely just the first header). - rtc::Buffer output_buffer; - if (start_offset) - output_buffer.AppendData(payload_data, start_offset); + output_buffer.Clear(); + size_t start_offset = nalu_data.data() - payload_data.data(); + size_t end_offset = start_offset + nalu_data.size(); + if (start_offset) { + output_buffer.AppendData(payload_data.data(), start_offset); + } - absl::optional sps; + std::optional sps; SpsVuiRewriter::ParseResult result = SpsVuiRewriter::ParseAndRewriteSps( - &payload_data[start_offset], end_offset - start_offset, &sps, - nullptr, &output_buffer, SpsVuiRewriter::Direction::kIncoming); - - if (result == SpsVuiRewriter::ParseResult::kVuiRewritten) { - if (modified_buffer) { - RTC_LOG(LS_WARNING) - << "More than one H264 SPS NAL units needing " - "rewriting found within a single STAP-A packet. " - "Keeping the first and rewriting the last."; - } - - // Rewrite length field to new SPS size. - if (h264_header.packetization_type == kH264StapA) { - size_t length_field_offset = - start_offset - (H264::kNaluTypeSize + kLengthFieldSize); - // Stap-A Length includes payload data and type header. - size_t rewritten_size = - output_buffer.size() - start_offset + H264::kNaluTypeSize; - ByteWriter::WriteBigEndian( - &output_buffer[length_field_offset], rewritten_size); - } + nalu_data, &sps, nullptr, &output_buffer, + SpsVuiRewriter::Direction::kIncoming); + switch (result) { + case SpsVuiRewriter::ParseResult::kFailure: + RTC_LOG(LS_WARNING) << "Failed to parse SPS NAL unit."; + return std::nullopt; + case SpsVuiRewriter::ParseResult::kVuiRewritten: + if (modified_buffer) { + RTC_LOG(LS_WARNING) + << "More than one H264 SPS NAL units needing " + "rewriting found within a single STAP-A packet. " + "Keeping the first and rewriting the last."; + } - parsed_payload->video_payload.SetData(output_buffer.data(), - output_buffer.size()); - // Append rest of packet. - parsed_payload->video_payload.AppendData( - &payload_data[end_offset], - nalu_length + kNalHeaderSize - end_offset); + // Rewrite length field to new SPS size. + if (h264_header.packetization_type == kH264StapA) { + size_t length_field_offset = + start_offset - (H264::kNaluTypeSize + kLengthFieldSize); + // Stap-A Length includes payload data and type header. + size_t rewritten_size = + output_buffer.size() - start_offset + H264::kNaluTypeSize; + ByteWriter::WriteBigEndian( + &output_buffer[length_field_offset], rewritten_size); + } - modified_buffer = true; - } + // Append rest of packet. + output_buffer.AppendData(payload_data.subview(end_offset)); - if (sps) { - parsed_payload->video_header.width = sps->width; - parsed_payload->video_header.height = sps->height; - nalu.sps_id = sps->id; - } else { - RTC_LOG(LS_WARNING) << "Failed to parse SPS id from SPS slice."; + modified_buffer = true; + [[fallthrough]]; + case SpsVuiRewriter::ParseResult::kVuiOk: + RTC_DCHECK(sps); + nalu.sps_id = sps->id; + parsed_payload->video_header.width = sps->width; + parsed_payload->video_header.height = sps->height; + parsed_payload->video_header.frame_type = + VideoFrameType::kVideoFrameKey; + break; } - parsed_payload->video_header.frame_type = - VideoFrameType::kVideoFrameKey; + parsed_payload->video_header.is_first_packet_in_frame = true; break; } case H264::NaluType::kPps: { uint32_t pps_id; uint32_t sps_id; - if (PpsParser::ParsePpsIds(&payload_data[start_offset], - end_offset - start_offset, &pps_id, - &sps_id)) { + if (PpsParser::ParsePpsIds(nalu_data, &pps_id, &sps_id)) { nalu.pps_id = pps_id; nalu.sps_id = sps_id; } else { RTC_LOG(LS_WARNING) << "Failed to parse PPS id and SPS id from PPS slice."; + return std::nullopt; } + parsed_payload->video_header.is_first_packet_in_frame = true; break; } case H264::NaluType::kIdr: @@ -193,67 +185,78 @@ absl::optional ProcessStapAOrSingleNalu( VideoFrameType::kVideoFrameKey; [[fallthrough]]; case H264::NaluType::kSlice: { - absl::optional pps_id = PpsParser::ParsePpsIdFromSlice( - &payload_data[start_offset], end_offset - start_offset); - if (pps_id) { - nalu.pps_id = *pps_id; + std::optional slice_header = + PpsParser::ParseSliceHeader(nalu_data); + if (slice_header) { + nalu.pps_id = slice_header->pic_parameter_set_id; + if (slice_header->first_mb_in_slice == 0) { + parsed_payload->video_header.is_first_packet_in_frame = true; + } } else { RTC_LOG(LS_WARNING) << "Failed to parse PPS id from slice of type: " << static_cast(nalu.type); + return std::nullopt; } break; } - // Slices below don't contain SPS or PPS ids. case H264::NaluType::kAud: + parsed_payload->video_header.is_first_packet_in_frame = true; + break; + case H264::NaluType::kSei: + parsed_payload->video_header.is_first_packet_in_frame = true; + break; + // Slices below don't contain SPS or PPS ids. case H264::NaluType::kEndOfSequence: case H264::NaluType::kEndOfStream: case H264::NaluType::kFiller: - case H264::NaluType::kSei: break; case H264::NaluType::kStapA: case H264::NaluType::kFuA: RTC_LOG(LS_WARNING) << "Unexpected STAP-A or FU-A received."; - return absl::nullopt; + return std::nullopt; } - if (h264_header.nalus_length == kMaxNalusPerPacket) { - RTC_LOG(LS_WARNING) - << "Received packet containing more than " << kMaxNalusPerPacket - << " NAL units. Will not keep track sps and pps ids for all of them."; - } else { - h264_header.nalus[h264_header.nalus_length++] = nalu; - } + h264_header.nalus.push_back(nalu); } + if (modified_buffer) { + parsed_payload->video_payload.SetData(output_buffer.data(), + output_buffer.size()); + } return parsed_payload; } -absl::optional ParseFuaNalu( - rtc::CopyOnWriteBuffer rtp_payload) { +std::optional ParseFuaNalu( + CopyOnWriteBuffer rtp_payload) { if (rtp_payload.size() < kFuAHeaderSize) { RTC_LOG(LS_ERROR) << "FU-A NAL units truncated."; - return absl::nullopt; + return std::nullopt; } - absl::optional parsed_payload( - absl::in_place); + std::optional parsed_payload( + std::in_place); uint8_t fnri = rtp_payload.cdata()[0] & (kH264FBit | kH264NriMask); uint8_t original_nal_type = rtp_payload.cdata()[1] & kH264TypeMask; bool first_fragment = (rtp_payload.cdata()[1] & kH264SBit) > 0; + bool is_first_packet_in_frame = false; NaluInfo nalu; nalu.type = original_nal_type; nalu.sps_id = -1; nalu.pps_id = -1; if (first_fragment) { - absl::optional pps_id = - PpsParser::ParsePpsIdFromSlice(rtp_payload.cdata() + 2 * kNalHeaderSize, - rtp_payload.size() - 2 * kNalHeaderSize); - if (pps_id) { - nalu.pps_id = *pps_id; - } else { - RTC_LOG(LS_WARNING) - << "Failed to parse PPS from first fragment of FU-A NAL " - "unit with original type: " - << static_cast(nalu.type); + if (original_nal_type == H264::NaluType::kIdr || + original_nal_type == H264::NaluType::kSlice) { + std::optional slice_header = + PpsParser::ParseSliceHeader(ArrayView(rtp_payload) + .subview(2 * kNalHeaderSize)); + if (slice_header) { + nalu.pps_id = slice_header->pic_parameter_set_id; + is_first_packet_in_frame = slice_header->first_mb_in_slice == 0; + } else { + RTC_LOG(LS_WARNING) + << "Failed to parse PPS from first fragment of FU-A NAL " + "unit with original type: " + << static_cast(nalu.type); + } } uint8_t original_nal_header = fnri | original_nal_type; rtp_payload = @@ -274,25 +277,25 @@ absl::optional ParseFuaNalu( parsed_payload->video_header.height = 0; parsed_payload->video_header.codec = kVideoCodecH264; parsed_payload->video_header.simulcastIdx = 0; - parsed_payload->video_header.is_first_packet_in_frame = first_fragment; + parsed_payload->video_header.is_first_packet_in_frame = + is_first_packet_in_frame; auto& h264_header = parsed_payload->video_header.video_type_header .emplace(); h264_header.packetization_type = kH264FuA; h264_header.nalu_type = original_nal_type; if (first_fragment) { - h264_header.nalus[h264_header.nalus_length] = nalu; - h264_header.nalus_length = 1; + h264_header.nalus = {nalu}; } return parsed_payload; } } // namespace -absl::optional -VideoRtpDepacketizerH264::Parse(rtc::CopyOnWriteBuffer rtp_payload) { +std::optional +VideoRtpDepacketizerH264::Parse(CopyOnWriteBuffer rtp_payload) { if (rtp_payload.size() == 0) { RTC_LOG(LS_ERROR) << "Empty payload."; - return absl::nullopt; + return std::nullopt; } uint8_t nal_type = rtp_payload.cdata()[0] & kH264TypeMask; diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.h b/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.h index cbea860049..3415afe1e8 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.h +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.h @@ -11,7 +11,8 @@ #ifndef MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_H264_H_ #define MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_H264_H_ -#include "absl/types/optional.h" +#include + #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "rtc_base/copy_on_write_buffer.h" @@ -20,8 +21,7 @@ class VideoRtpDepacketizerH264 : public VideoRtpDepacketizer { public: ~VideoRtpDepacketizerH264() override = default; - absl::optional Parse( - rtc::CopyOnWriteBuffer rtp_payload) override; + std::optional Parse(CopyOnWriteBuffer rtp_payload) override; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_h264_unittest.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_h264_unittest.cc index f569c45fd3..c2ff14413f 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_h264_unittest.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_h264_unittest.cc @@ -11,14 +11,18 @@ #include "modules/rtp_rtcp/source/video_rtp_depacketizer_h264.h" #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "common_video/h264/h264_common.h" -#include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtp_format_h264.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "rtc_base/buffer.h" #include "rtc_base/copy_on_write_buffer.h" #include "test/gmock.h" #include "test/gtest.h" @@ -33,30 +37,26 @@ using ::testing::Eq; using ::testing::IsEmpty; using ::testing::SizeIs; -enum Nalu { - kSlice = 1, - kIdr = 5, - kSei = 6, - kSps = 7, - kPps = 8, - kStapA = 24, - kFuA = 28 +// clang-format off: split example data on NAL unit boundaries. +constexpr uint8_t kOriginalSps[] = { + H264::kSps, 0x00, 0x00, 0x03, 0x03, + 0xF4, 0x05, 0x03, 0xC7, 0xC0 }; - -constexpr uint8_t kOriginalSps[] = {kSps, 0x00, 0x00, 0x03, 0x03, - 0xF4, 0x05, 0x03, 0xC7, 0xC0}; -constexpr uint8_t kRewrittenSps[] = {kSps, 0x00, 0x00, 0x03, 0x03, - 0xF4, 0x05, 0x03, 0xC7, 0xE0, - 0x1B, 0x41, 0x10, 0x8D, 0x00}; -constexpr uint8_t kIdrOne[] = {kIdr, 0xFF, 0x00, 0x00, 0x04}; -constexpr uint8_t kIdrTwo[] = {kIdr, 0xFF, 0x00, 0x11}; +constexpr uint8_t kRewrittenSps[] = { + H264::kSps, 0x00, 0x00, 0x03, 0x03, + 0xF4, 0x05, 0x03, 0xC7, 0xE0, + 0x1B, 0x41, 0x10, 0x8D, 0x00 +}; +constexpr uint8_t kIdrOne[] = {H264::kIdr, 0xFF, 0x00, 0x00, 0x04}; +constexpr uint8_t kIdrTwo[] = {H264::kIdr, 0xFF, 0x00, 0x11}; +// clang-format on TEST(VideoRtpDepacketizerH264Test, SingleNalu) { - uint8_t packet[2] = {0x05, 0xFF}; // F=0, NRI=0, Type=5 (IDR). - rtc::CopyOnWriteBuffer rtp_payload(packet); + const uint8_t kPayload[] = {H264::kIdr, 0xFF}; // F=0, NRI=0, Type=5 (IDR). + CopyOnWriteBuffer rtp_payload(kPayload); VideoRtpDepacketizerH264 depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(rtp_payload); ASSERT_TRUE(parsed); @@ -65,19 +65,24 @@ TEST(VideoRtpDepacketizerH264Test, SingleNalu) { EXPECT_EQ(parsed->video_header.codec, kVideoCodecH264); EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); const RTPVideoHeaderH264& h264 = - absl::get(parsed->video_header.video_type_header); + std::get(parsed->video_header.video_type_header); EXPECT_EQ(h264.packetization_type, kH264SingleNalu); - EXPECT_EQ(h264.nalu_type, kIdr); + EXPECT_EQ(h264.nalu_type, H264::kIdr); } TEST(VideoRtpDepacketizerH264Test, SingleNaluSpsWithResolution) { - uint8_t packet[] = {kSps, 0x7A, 0x00, 0x1F, 0xBC, 0xD9, 0x40, 0x50, - 0x05, 0xBA, 0x10, 0x00, 0x00, 0x03, 0x00, 0xC0, - 0x00, 0x00, 0x03, 0x2A, 0xE0, 0xF1, 0x83, 0x25}; - rtc::CopyOnWriteBuffer rtp_payload(packet); + // clang-format off: split example data on NAL unit boundaries. + const uint8_t kPayload[] = { + H264::kSps, 0x7A, 0x00, 0x1F, 0xBC, 0xD9, + 0x40, 0x50, 0x05, 0xBA, 0x10, 0x00, + 0x00, 0x03, 0x00, 0xC0, 0x00, 0x00, + 0x03, 0x2A, 0xE0, 0xF1, 0x83, 0x25 + }; + // clang-format on + CopyOnWriteBuffer rtp_payload(kPayload); VideoRtpDepacketizerH264 depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(rtp_payload); ASSERT_TRUE(parsed); @@ -88,31 +93,33 @@ TEST(VideoRtpDepacketizerH264Test, SingleNaluSpsWithResolution) { EXPECT_EQ(parsed->video_header.width, 1280u); EXPECT_EQ(parsed->video_header.height, 720u); const auto& h264 = - absl::get(parsed->video_header.video_type_header); + std::get(parsed->video_header.video_type_header); EXPECT_EQ(h264.packetization_type, kH264SingleNalu); } TEST(VideoRtpDepacketizerH264Test, StapAKey) { - // clang-format off - const NaluInfo kExpectedNalus[] = { {H264::kSps, 0, -1}, - {H264::kPps, 1, 2}, - {H264::kIdr, -1, 0} }; - uint8_t packet[] = {kStapA, // F=0, NRI=0, Type=24. - // Length, nal header, payload. - 0, 0x18, kExpectedNalus[0].type, - 0x7A, 0x00, 0x1F, 0xBC, 0xD9, 0x40, 0x50, 0x05, 0xBA, - 0x10, 0x00, 0x00, 0x03, 0x00, 0xC0, 0x00, 0x00, 0x03, - 0x2A, 0xE0, 0xF1, 0x83, 0x25, - 0, 0xD, kExpectedNalus[1].type, - 0x69, 0xFC, 0x0, 0x0, 0x3, 0x0, 0x7, 0xFF, 0xFF, 0xFF, - 0xF6, 0x40, - 0, 0xB, kExpectedNalus[2].type, - 0x85, 0xB8, 0x0, 0x4, 0x0, 0x0, 0x13, 0x93, 0x12, 0x0}; + const NaluInfo kExpectedNalus[] = { + {H264::kSps, 0, -1}, {H264::kPps, 1, 2}, {H264::kIdr, -1, 0}}; + // clang-format off: split example data on NAL unit boundaries. + const uint8_t kPayload[] = { + H264::kStapA, // F=0, NRI=0, Type=24. + // Length (2 bytes), nal header, payload. + 0x00, 0x18, + kExpectedNalus[0].type, 0x7A, 0x00, 0x1F, 0xBC, 0xD9, 0x40, 0x50, 0x05, + 0xBA, 0x10, 0x00, 0x00, 0x03, 0x00, 0xC0, 0x00, 0x00, 0x03, 0x2A, 0xE0, + 0xF1, 0x83, 0x25, + 0x00, 0xD, + kExpectedNalus[1].type, 0x69, 0xFC, 0x0, 0x0, 0x3, 0x0, 0x7, 0xFF, 0xFF, + 0xFF, 0xF6, 0x40, + 0x00, 0xB, + kExpectedNalus[2].type, 0x85, 0xB8, 0x0, 0x4, 0x0, 0x0, 0x13, 0x93, 0x12, + 0x0 + }; // clang-format on - rtc::CopyOnWriteBuffer rtp_payload(packet); + CopyOnWriteBuffer rtp_payload(kPayload); VideoRtpDepacketizerH264 depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(rtp_payload); ASSERT_TRUE(parsed); @@ -121,33 +128,31 @@ TEST(VideoRtpDepacketizerH264Test, StapAKey) { EXPECT_EQ(parsed->video_header.codec, kVideoCodecH264); EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); const auto& h264 = - absl::get(parsed->video_header.video_type_header); - EXPECT_EQ(h264.packetization_type, kH264StapA); + std::get(parsed->video_header.video_type_header); + EXPECT_EQ(h264.packetization_type, H264PacketizationTypes::kH264StapA); // NALU type for aggregated packets is the type of the first packet only. - EXPECT_EQ(h264.nalu_type, kSps); - ASSERT_EQ(h264.nalus_length, 3u); - for (size_t i = 0; i < h264.nalus_length; ++i) { - EXPECT_EQ(h264.nalus[i].type, kExpectedNalus[i].type) - << "Failed parsing nalu " << i; - EXPECT_EQ(h264.nalus[i].sps_id, kExpectedNalus[i].sps_id) - << "Failed parsing nalu " << i; - EXPECT_EQ(h264.nalus[i].pps_id, kExpectedNalus[i].pps_id) - << "Failed parsing nalu " << i; - } + EXPECT_EQ(h264.nalu_type, H264::kSps); + EXPECT_THAT(h264.nalus, ElementsAreArray(kExpectedNalus)); } TEST(VideoRtpDepacketizerH264Test, StapANaluSpsWithResolution) { - uint8_t packet[] = {kStapA, // F=0, NRI=0, Type=24. - // Length (2 bytes), nal header, payload. - 0x00, 0x19, kSps, 0x7A, 0x00, 0x1F, 0xBC, 0xD9, 0x40, - 0x50, 0x05, 0xBA, 0x10, 0x00, 0x00, 0x03, 0x00, 0xC0, - 0x00, 0x00, 0x03, 0x2A, 0xE0, 0xF1, 0x83, 0x25, 0x80, - 0x00, 0x03, kIdr, 0xFF, 0x00, 0x00, 0x04, kIdr, 0xFF, - 0x00, 0x11}; - rtc::CopyOnWriteBuffer rtp_payload(packet); + // clang-format off: split example data on NAL unit boundaries. + const uint8_t kPayload[] = { + H264::kStapA, // F=0, NRI=0, Type=24. + // Length (2 bytes), nal header, payload. + 0x00, 0x19, + H264::kSps, 0x7A, 0x00, 0x1F, 0xBC, 0xD9, 0x40, 0x50, 0x05, 0xBA, 0x10, + 0x00, 0x00, 0x03, 0x00, 0xC0, 0x00, 0x00, 0x03, 0x2A, 0xE0, 0xF1, 0x83, + 0x25, 0x80, 0x00, 0x03, + H264::kIdr, 0xFF, 0x00, + 0x00, 0x04, + H264::kIdr, 0xFF, 0x00, 0x11 + }; + // clang-format on + CopyOnWriteBuffer rtp_payload(kPayload); VideoRtpDepacketizerH264 depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(rtp_payload); ASSERT_TRUE(parsed); @@ -158,33 +163,47 @@ TEST(VideoRtpDepacketizerH264Test, StapANaluSpsWithResolution) { EXPECT_EQ(parsed->video_header.width, 1280u); EXPECT_EQ(parsed->video_header.height, 720u); const auto& h264 = - absl::get(parsed->video_header.video_type_header); - EXPECT_EQ(h264.packetization_type, kH264StapA); + std::get(parsed->video_header.video_type_header); + EXPECT_EQ(h264.packetization_type, H264PacketizationTypes::kH264StapA); } TEST(VideoRtpDepacketizerH264Test, EmptyStapARejected) { - uint8_t lone_empty_packet[] = {kStapA, 0x00, 0x00}; - uint8_t leading_empty_packet[] = {kStapA, 0x00, 0x00, 0x00, 0x04, - kIdr, 0xFF, 0x00, 0x11}; - uint8_t middle_empty_packet[] = {kStapA, 0x00, 0x03, kIdr, 0xFF, 0x00, 0x00, - 0x00, 0x00, 0x04, kIdr, 0xFF, 0x00, 0x11}; - uint8_t trailing_empty_packet[] = {kStapA, 0x00, 0x03, kIdr, - 0xFF, 0x00, 0x00, 0x00}; + // clang-format off: split example data on NAL unit boundaries. + uint8_t lone_empty_packet[] = { + H264::kStapA, 0x00, 0x00 + }; + uint8_t leading_empty_packet[] = { + H264::kStapA, + 0x00, 0x00, // Empty STAP-A is invalid. + 0x00, 0x04, + H264::kIdr, 0xFF, 0x00, 0x11 + }; + uint8_t middle_empty_packet[] = { + H264::kStapA, + 0x00, 0x03, + H264::kIdr, 0xFF, 0x00, 0x00, 0x00, + 0x00, 0x04, + H264::kIdr, 0xFF, 0x00, 0x11 + }; + uint8_t trailing_empty_packet[] = { + H264::kStapA, + 0x00, 0x03, + H264::kIdr, 0xFF, 0x00, 0x00, 0x00 + }; + // clang-format on VideoRtpDepacketizerH264 depacketizer; - EXPECT_FALSE(depacketizer.Parse(rtc::CopyOnWriteBuffer(lone_empty_packet))); - EXPECT_FALSE( - depacketizer.Parse(rtc::CopyOnWriteBuffer(leading_empty_packet))); - EXPECT_FALSE(depacketizer.Parse(rtc::CopyOnWriteBuffer(middle_empty_packet))); - EXPECT_FALSE( - depacketizer.Parse(rtc::CopyOnWriteBuffer(trailing_empty_packet))); + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(lone_empty_packet))); + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(leading_empty_packet))); + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(middle_empty_packet))); + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(trailing_empty_packet))); } TEST(VideoRtpDepacketizerH264Test, DepacketizeWithRewriting) { - rtc::CopyOnWriteBuffer in_buffer; - rtc::Buffer out_buffer; + CopyOnWriteBuffer in_buffer; + Buffer out_buffer; - uint8_t kHeader[2] = {kStapA}; + uint8_t kHeader[] = {H264::kStapA, 0x00}; in_buffer.AppendData(kHeader, 1); out_buffer.AppendData(kHeader, 1); @@ -210,16 +229,16 @@ TEST(VideoRtpDepacketizerH264Test, DepacketizeWithRewriting) { VideoRtpDepacketizerH264 depacketizer; auto parsed = depacketizer.Parse(in_buffer); ASSERT_TRUE(parsed); - EXPECT_THAT(rtc::MakeArrayView(parsed->video_payload.cdata(), - parsed->video_payload.size()), + EXPECT_THAT(MakeArrayView(parsed->video_payload.cdata(), + parsed->video_payload.size()), ElementsAreArray(out_buffer)); } TEST(VideoRtpDepacketizerH264Test, DepacketizeWithDoubleRewriting) { - rtc::CopyOnWriteBuffer in_buffer; - rtc::Buffer out_buffer; + CopyOnWriteBuffer in_buffer; + Buffer out_buffer; - uint8_t kHeader[2] = {kStapA}; + uint8_t kHeader[] = {H264::kStapA, 0x00}; in_buffer.AppendData(kHeader, 1); out_buffer.AppendData(kHeader, 1); @@ -255,20 +274,28 @@ TEST(VideoRtpDepacketizerH264Test, DepacketizeWithDoubleRewriting) { ASSERT_TRUE(parsed); std::vector expected_packet_payload( out_buffer.data(), &out_buffer.data()[out_buffer.size()]); - EXPECT_THAT(rtc::MakeArrayView(parsed->video_payload.cdata(), - parsed->video_payload.size()), + EXPECT_THAT(MakeArrayView(parsed->video_payload.cdata(), + parsed->video_payload.size()), ElementsAreArray(out_buffer)); } TEST(VideoRtpDepacketizerH264Test, StapADelta) { - uint8_t packet[16] = {kStapA, // F=0, NRI=0, Type=24. - // Length, nal header, payload. - 0, 0x02, kSlice, 0xFF, 0, 0x03, kSlice, 0xFF, 0x00, 0, - 0x04, kSlice, 0xFF, 0x00, 0x11}; - rtc::CopyOnWriteBuffer rtp_payload(packet); + // clang-format off: split example data on NAL unit boundaries. + const uint8_t kPayload[] = { + H264::kStapA, // F=0, NRI=0, Type=24. + // Length (2 bytes), nal header, payload. + 0x00, 0x02, + H264::kSlice, 0xFF, + 0x00, 0x03, + H264::kSlice, 0xFF, 0x00, + 0x00, 0x04, + H264::kSlice, 0xFF, 0x00, 0x11 + }; + // clang-format on + CopyOnWriteBuffer rtp_payload(kPayload); VideoRtpDepacketizerH264 depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(rtp_payload); ASSERT_TRUE(parsed); @@ -279,95 +306,96 @@ TEST(VideoRtpDepacketizerH264Test, StapADelta) { EXPECT_EQ(parsed->video_header.codec, kVideoCodecH264); EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); const RTPVideoHeaderH264& h264 = - absl::get(parsed->video_header.video_type_header); - EXPECT_EQ(h264.packetization_type, kH264StapA); + std::get(parsed->video_header.video_type_header); + EXPECT_EQ(h264.packetization_type, H264PacketizationTypes::kH264StapA); // NALU type for aggregated packets is the type of the first packet only. - EXPECT_EQ(h264.nalu_type, kSlice); + EXPECT_EQ(h264.nalu_type, H264::kSlice); } TEST(VideoRtpDepacketizerH264Test, FuA) { - // clang-format off - uint8_t packet1[] = { - kFuA, // F=0, NRI=0, Type=28. - kH264SBit | kIdr, // FU header. - 0x85, 0xB8, 0x0, 0x4, 0x0, 0x0, 0x13, 0x93, 0x12, 0x0 // Payload. + // clang-format off: split example data on NAL unit boundaries. + const uint8_t kPayload1[] = { + H264::kFuA, // F=0, NRI=0, Type=28. + kH264SBit | H264::kIdr, // FU header. + 0x85, 0xB8, 0x00, 0x04, 0x00, 0x00, 0x13, 0x93, 0x12, 0x00 // Payload. }; - // clang-format on - const uint8_t kExpected1[] = {kIdr, 0x85, 0xB8, 0x0, 0x4, 0x0, - 0x0, 0x13, 0x93, 0x12, 0x0}; - - uint8_t packet2[] = { - kFuA, // F=0, NRI=0, Type=28. - kIdr, // FU header. - 0x02 // Payload. + const uint8_t kExpected1[] = { + H264::kIdr, 0x85, 0xB8, 0x00, 0x04, 0x00, + 0x00, 0x13, 0x93, 0x12, 0x00}; + + const uint8_t kPayload2[] = { + H264::kFuA, // F=0, NRI=0, Type=28. + H264::kIdr, // FU header. + 0x02 // Payload. }; const uint8_t kExpected2[] = {0x02}; - uint8_t packet3[] = { - kFuA, // F=0, NRI=0, Type=28. - kH264EBit | kIdr, // FU header. - 0x03 // Payload. + const uint8_t kPayload3[] = { + H264::kFuA, // F=0, NRI=0, Type=28. + kH264EBit | H264::kIdr, // FU header. + 0x03 // Payload. }; const uint8_t kExpected3[] = {0x03}; + // clang-format on VideoRtpDepacketizerH264 depacketizer; - absl::optional parsed1 = - depacketizer.Parse(rtc::CopyOnWriteBuffer(packet1)); + std::optional parsed1 = + depacketizer.Parse(CopyOnWriteBuffer(kPayload1)); ASSERT_TRUE(parsed1); // We expect that the first packet is one byte shorter since the FU-A header // has been replaced by the original nal header. - EXPECT_THAT(rtc::MakeArrayView(parsed1->video_payload.cdata(), - parsed1->video_payload.size()), + EXPECT_THAT(MakeArrayView(parsed1->video_payload.cdata(), + parsed1->video_payload.size()), ElementsAreArray(kExpected1)); EXPECT_EQ(parsed1->video_header.frame_type, VideoFrameType::kVideoFrameKey); EXPECT_EQ(parsed1->video_header.codec, kVideoCodecH264); EXPECT_TRUE(parsed1->video_header.is_first_packet_in_frame); { const RTPVideoHeaderH264& h264 = - absl::get(parsed1->video_header.video_type_header); - EXPECT_EQ(h264.packetization_type, kH264FuA); - EXPECT_EQ(h264.nalu_type, kIdr); - ASSERT_EQ(h264.nalus_length, 1u); - EXPECT_EQ(h264.nalus[0].type, static_cast(kIdr)); + std::get(parsed1->video_header.video_type_header); + EXPECT_EQ(h264.packetization_type, H264PacketizationTypes::kH264FuA); + EXPECT_EQ(h264.nalu_type, H264::kIdr); + ASSERT_THAT(h264.nalus, SizeIs(1)); + EXPECT_EQ(h264.nalus[0].type, H264::kIdr); EXPECT_EQ(h264.nalus[0].sps_id, -1); EXPECT_EQ(h264.nalus[0].pps_id, 0); } // Following packets will be 2 bytes shorter since they will only be appended // onto the first packet. - auto parsed2 = depacketizer.Parse(rtc::CopyOnWriteBuffer(packet2)); - EXPECT_THAT(rtc::MakeArrayView(parsed2->video_payload.cdata(), - parsed2->video_payload.size()), + auto parsed2 = depacketizer.Parse(CopyOnWriteBuffer(kPayload2)); + EXPECT_THAT(MakeArrayView(parsed2->video_payload.cdata(), + parsed2->video_payload.size()), ElementsAreArray(kExpected2)); EXPECT_FALSE(parsed2->video_header.is_first_packet_in_frame); EXPECT_EQ(parsed2->video_header.codec, kVideoCodecH264); { const RTPVideoHeaderH264& h264 = - absl::get(parsed2->video_header.video_type_header); - EXPECT_EQ(h264.packetization_type, kH264FuA); - EXPECT_EQ(h264.nalu_type, kIdr); + std::get(parsed2->video_header.video_type_header); + EXPECT_EQ(h264.packetization_type, H264PacketizationTypes::kH264FuA); + EXPECT_EQ(h264.nalu_type, H264::kIdr); // NALU info is only expected for the first FU-A packet. - EXPECT_EQ(h264.nalus_length, 0u); + EXPECT_THAT(h264.nalus, IsEmpty()); } - auto parsed3 = depacketizer.Parse(rtc::CopyOnWriteBuffer(packet3)); - EXPECT_THAT(rtc::MakeArrayView(parsed3->video_payload.cdata(), - parsed3->video_payload.size()), + auto parsed3 = depacketizer.Parse(CopyOnWriteBuffer(kPayload3)); + EXPECT_THAT(MakeArrayView(parsed3->video_payload.cdata(), + parsed3->video_payload.size()), ElementsAreArray(kExpected3)); EXPECT_FALSE(parsed3->video_header.is_first_packet_in_frame); EXPECT_EQ(parsed3->video_header.codec, kVideoCodecH264); { const RTPVideoHeaderH264& h264 = - absl::get(parsed3->video_header.video_type_header); - EXPECT_EQ(h264.packetization_type, kH264FuA); - EXPECT_EQ(h264.nalu_type, kIdr); + std::get(parsed3->video_header.video_type_header); + EXPECT_EQ(h264.packetization_type, H264PacketizationTypes::kH264FuA); + EXPECT_EQ(h264.nalu_type, H264::kIdr); // NALU info is only expected for the first FU-A packet. - ASSERT_EQ(h264.nalus_length, 0u); + EXPECT_THAT(h264.nalus, IsEmpty()); } } TEST(VideoRtpDepacketizerH264Test, EmptyPayload) { - rtc::CopyOnWriteBuffer empty; + CopyOnWriteBuffer empty; VideoRtpDepacketizerH264 depacketizer; EXPECT_FALSE(depacketizer.Parse(empty)); } @@ -375,51 +403,200 @@ TEST(VideoRtpDepacketizerH264Test, EmptyPayload) { TEST(VideoRtpDepacketizerH264Test, TruncatedFuaNalu) { const uint8_t kPayload[] = {0x9c}; VideoRtpDepacketizerH264 depacketizer; - EXPECT_FALSE(depacketizer.Parse(rtc::CopyOnWriteBuffer(kPayload))); + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(kPayload))); } TEST(VideoRtpDepacketizerH264Test, TruncatedSingleStapANalu) { const uint8_t kPayload[] = {0xd8, 0x27}; VideoRtpDepacketizerH264 depacketizer; - EXPECT_FALSE(depacketizer.Parse(rtc::CopyOnWriteBuffer(kPayload))); + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(kPayload))); } TEST(VideoRtpDepacketizerH264Test, StapAPacketWithTruncatedNalUnits) { const uint8_t kPayload[] = {0x58, 0xCB, 0xED, 0xDF}; VideoRtpDepacketizerH264 depacketizer; - EXPECT_FALSE(depacketizer.Parse(rtc::CopyOnWriteBuffer(kPayload))); + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(kPayload))); } TEST(VideoRtpDepacketizerH264Test, TruncationJustAfterSingleStapANalu) { const uint8_t kPayload[] = {0x38, 0x27, 0x27}; VideoRtpDepacketizerH264 depacketizer; - EXPECT_FALSE(depacketizer.Parse(rtc::CopyOnWriteBuffer(kPayload))); -} - -TEST(VideoRtpDepacketizerH264Test, ShortSpsPacket) { - const uint8_t kPayload[] = {0x27, 0x80, 0x00}; - VideoRtpDepacketizerH264 depacketizer; - EXPECT_TRUE(depacketizer.Parse(rtc::CopyOnWriteBuffer(kPayload))); + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(kPayload))); } TEST(VideoRtpDepacketizerH264Test, SeiPacket) { const uint8_t kPayload[] = { - kSei, // F=0, NRI=0, Type=6. + H264::kSei, // F=0, NRI=0, Type=6. 0x03, 0x03, 0x03, 0x03 // Payload. }; VideoRtpDepacketizerH264 depacketizer; - auto parsed = depacketizer.Parse(rtc::CopyOnWriteBuffer(kPayload)); + auto parsed = depacketizer.Parse(CopyOnWriteBuffer(kPayload)); ASSERT_TRUE(parsed); const RTPVideoHeaderH264& h264 = - absl::get(parsed->video_header.video_type_header); + std::get(parsed->video_header.video_type_header); EXPECT_EQ(parsed->video_header.frame_type, VideoFrameType::kVideoFrameDelta); EXPECT_EQ(h264.packetization_type, kH264SingleNalu); - EXPECT_EQ(h264.nalu_type, kSei); - ASSERT_EQ(h264.nalus_length, 1u); - EXPECT_EQ(h264.nalus[0].type, static_cast(kSei)); + EXPECT_EQ(h264.nalu_type, H264::kSei); + ASSERT_THAT(h264.nalus, SizeIs(1)); + EXPECT_EQ(h264.nalus[0].type, H264::kSei); EXPECT_EQ(h264.nalus[0].sps_id, -1); EXPECT_EQ(h264.nalus[0].pps_id, -1); } +TEST(VideoRtpDepacketizerH264Test, ShortSpsPacket) { + const uint8_t kPayload[] = {0x27, 0x80, 0x00}; + VideoRtpDepacketizerH264 depacketizer; + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(kPayload))); +} + +TEST(VideoRtpDepacketizerH264Test, BadSps) { + const uint8_t kPayload[] = { + H264::kSps, 0x42, 0x41, 0x2a, 0xd3, 0x93, 0xd3, 0x3b // Payload. + }; + VideoRtpDepacketizerH264 depacketizer; + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(kPayload))); +} + +TEST(VideoRtpDepacketizerH264Test, BadPps) { + const uint8_t kPayload[] = { + H264::kPps, + 0x00 // Payload. + }; + VideoRtpDepacketizerH264 depacketizer; + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(kPayload))); +} + +TEST(VideoRtpDepacketizerH264Test, BadSlice) { + const uint8_t kPayload[] = { + H264::kIdr, + 0xc0 // Payload. + }; + VideoRtpDepacketizerH264 depacketizer; + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(kPayload))); +} + +TEST(VideoRtpDepacketizerH264Test, StapASpsPpsMultiSlice) { + // A STAP-A containing a black 320x192 key frame with multiple slices. + // clang-format off: split example data on NAL unit boundaries. + const uint8_t kPayload[] = { + H264::kStapA, // F=0, NRI=0, Type=24. + 0x00, 0x10, + 0x67, 0x42, 0xc0, 0x15, 0x8c, 0x68, 0x14, 0x19, // SPS. + 0x79, 0xe0, 0x1e, 0x11, 0x08, 0xd4, 0x00, 0x04, + 0x00, 0x06, + 0x68, 0xce, 0x3c, 0x80, 0x00, 0x2e, // PPS. + // Slices. + 0x00, 0x30, + 0x65, 0xb8, 0x00, 0x04, 0x08, 0x79, 0x31, 0x40, 0x00, 0x42, 0xae, 0x4d, + 0xc9, 0xc9, 0xc9, 0xc9, 0xc9, 0xc9, 0xc9, 0xc9, 0xc9, 0xc9, 0xc9, 0xc9, + 0xc9, 0xc9, 0xc9, 0xc9, 0xc9, 0xc9, 0xd6, 0xeb, 0xae, 0xba, 0xeb, 0xae, + 0xba, 0xeb, 0xae, 0xba, 0xeb, 0xae, 0xba, 0xeb, 0xae, 0xbc, 0x00, 0x2f, + 0x00, 0x2f, + 0x65, 0x05, 0x2e, 0x00, 0x01, 0x02, 0x1e, 0x4c, 0x50, 0x00, 0x10, 0xab, + 0x93, 0x72, 0x72, 0x72, 0x72, 0x72, 0x72, 0x72, 0x72, 0x72, 0x72, 0x72, + 0x72, 0x72, 0x72, 0x72, 0x72, 0x72, 0x72, 0x75, 0xba, 0xeb, 0xae, 0xba, + 0xeb, 0xae, 0xba, 0xeb, 0xae, 0xba, 0xeb, 0xae, 0xba, 0xeb, 0xaf, + 0x00, 0x30, + 0x65, 0x02, 0x8b, 0x80, 0x00, 0x40, 0x87, 0x93, 0x14, 0x00, 0x04, 0x2a, + 0xe4, 0xdc, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, + 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9d, 0x6e, 0xba, 0xeb, 0xae, + 0xba, 0xeb, 0xae, 0xba, 0xeb, 0xae, 0xba, 0xeb, 0xae, 0xba, 0xeb, 0xc0, + 0x00, 0x30, + 0x65, 0x03, 0xcb, 0x80, 0x00, 0x40, 0x87, 0x93, 0x14, 0x00, 0x04, 0x2a, + 0xe4, 0xdc, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, + 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9c, 0x9d, 0x6e, 0xba, 0xeb, 0xae, + 0xba, 0xeb, 0xae, 0xba, 0xeb, 0xae, 0xba, 0xeb, 0xae, 0xba, 0xeb, 0xc0, + 0x00, 0x30, + 0x65, 0x01, 0x42, 0xe0, 0x00, 0x10, 0x21, 0xe4, 0xc5, 0x00, 0x01, 0x0a, + 0xb9, 0x37, 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, + 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, 0x5b, 0xae, 0xba, 0xeb, + 0xae, 0xba, 0xeb, 0xae, 0xba, 0xeb, 0xae, 0xba, 0xeb, 0xae, 0xba, 0xf0, + 0x00, 0x30, + 0x65, 0x01, 0x92, 0xe0, 0x00, 0x10, 0x21, 0xe4, 0xc5, 0x00, 0x01, 0x0a, + 0xb9, 0x37, 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, + 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, 0x27, 0x5b, 0xae, 0xba, 0xeb, + 0xae, 0xba, 0xeb, 0xae, 0xba, 0xeb, 0xae, 0xba, 0xeb, 0xae, 0xba, 0xf0 + }; + // clang-format on + + VideoRtpDepacketizerH264 depacketizer; + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed); + EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH264Test, SecondSliceIdrNalu) { + // First few bytes of a second slice of an IDR nalu with + // first_mb_in_slice = 480. + // clang-format off: split example data on NAL unit boundaries. + const uint8_t kPayload[] = { + 0x65, 0x00, 0xf0, 0x88, 0x82, 0x01, 0x3b, 0xff, 0xdf, 0xfe, 0x0b, 0xbb, + 0xfc, 0xb4, 0x30, 0xd1, 0x00, 0xef, 0xfd, 0xef, 0x0e, 0x79, 0x8b, 0x74, + 0x9b, 0x44, 0xf3, 0xb8, 0x65, 0x8f, 0xa1, 0x92, 0x30, 0xf9, 0x40, 0x06, + 0xb0, 0x00, 0x00, 0x03, 0x00, 0x00, 0x03, 0x00, 0x00, 0x03, 0x00, 0x00, + 0x03, 0x00, 0x18, 0x87, 0x4f, 0x6a, 0xfe, 0x60, 0x03, 0x9f, 0xfe, 0xd8, + 0x8b, 0xa6, 0x67, 0x31 + }; + // clang-format on + + VideoRtpDepacketizerH264 depacketizer; + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed); + EXPECT_FALSE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH264Test, AudSetsFirstPacketInFrame) { + // clang-format off: split example data on NAL unit boundaries. + const uint8_t kPayload[] = { + H264::kAud, 0x10 + }; + // clang-format on + + VideoRtpDepacketizerH264 depacketizer; + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed); + EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH264Test, PpsSetsFirstPacketInFrame) { + // clang-format off: split example data on NAL unit boundaries. + const uint8_t kPayload[] = { + H264::kPps, 0x69, 0xFC, 0x00, 0x00, 0x03, 0x00, + 0x07, 0xFF, 0xFF, 0xFF, 0xF6, 0x40 + }; + // clang-format on + + VideoRtpDepacketizerH264 depacketizer; + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed); + EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH264Test, SeiSetsFirstPacketInFrame) { + // clang-format off: split example data on NAL unit boundaries. + const uint8_t kPayload[] = { + H264::kSei, 0x05, 0x04, 0xDE, 0xAD, 0xBE, 0xEF, 0x80 + }; + // clang-format on + + VideoRtpDepacketizerH264 depacketizer; + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed); + EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH264Test, EmptyNaluPayload) { + const uint8_t kPayload[] = {H264::kEndOfSequence}; + VideoRtpDepacketizerH264 depacketizer; + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed); +} + } // namespace } // namespace webrtc diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.cc new file mode 100644 index 0000000000..4af35dc2c4 --- /dev/null +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.cc @@ -0,0 +1,278 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h" + +#include +#include +#include +#include +#include + +#include "absl/base/attributes.h" +#include "api/array_view.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" +#include "common_video/h265/h265_bitstream_parser.h" +#include "common_video/h265/h265_common.h" +#include "common_video/h265/h265_sps_parser.h" +#include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/rtp_packet_h265_common.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/logging.h" + +// RTP Payload Format for HEVC: https://datatracker.ietf.org/doc/html/rfc7798 + +namespace webrtc { +namespace { + +bool ParseApStartOffsets(const uint8_t* nalu_ptr, + size_t length_remaining, + std::vector* offsets) { + size_t offset = 0; + while (length_remaining > 0) { + // Buffer doesn't contain room for additional NALU length. + if (length_remaining < kH265LengthFieldSizeBytes) + return false; + // Read 16-bit NALU size defined in RFC7798 section 4.4.2. + uint16_t nalu_size = ByteReader::ReadBigEndian(nalu_ptr); + nalu_ptr += kH265LengthFieldSizeBytes; + length_remaining -= kH265LengthFieldSizeBytes; + if (nalu_size > length_remaining) + return false; + nalu_ptr += nalu_size; + length_remaining -= nalu_size; + + offsets->push_back(offset + kH265ApHeaderSizeBytes); + offset += kH265LengthFieldSizeBytes + nalu_size; + } + return true; +} + +// Single NALU packet structure +// https://datatracker.ietf.org/doc/html/rfc7798#section-4.4.1 +// Aggregation Packet (AP) strcture +// https://datatracker.ietf.org/doc/html/rfc7798#section-4.4.2 +std::optional ProcessApOrSingleNalu( + CopyOnWriteBuffer rtp_payload) { + if (rtp_payload.size() < kH265PayloadHeaderSizeBytes) { + RTC_LOG(LS_ERROR) << "RTP payload truncated."; + return std::nullopt; + } + const uint8_t* const payload_data = rtp_payload.cdata(); + std::optional parsed_payload( + std::in_place); + parsed_payload->video_header.width = 0; + parsed_payload->video_header.height = 0; + parsed_payload->video_header.codec = kVideoCodecH265; + parsed_payload->video_header.is_first_packet_in_frame = false; + + const uint8_t* nalu_start = payload_data + kH265PayloadHeaderSizeBytes; + const size_t nalu_length = rtp_payload.size() - kH265PayloadHeaderSizeBytes; + uint8_t nal_type = (payload_data[0] & kH265TypeMask) >> 1; + std::vector nalu_start_offsets; + CopyOnWriteBuffer video_payload; + if (nal_type == H265::NaluType::kAp) { + // Skip the aggregated packet header (Aggregated packet NAL type + length). + if (rtp_payload.size() <= kH265ApHeaderSizeBytes) { + RTC_LOG(LS_ERROR) << "Aggregated packet header truncated."; + return std::nullopt; + } + + if (!ParseApStartOffsets(nalu_start, nalu_length, &nalu_start_offsets)) { + RTC_LOG(LS_ERROR) + << "Aggregated packet with incorrect NALU packet lengths."; + return std::nullopt; + } + + nal_type = (payload_data[kH265ApHeaderSizeBytes] & kH265TypeMask) >> 1; + } else { + nalu_start_offsets.push_back(0); + } + parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameDelta; + + nalu_start_offsets.push_back(rtp_payload.size() + + kH265LengthFieldSizeBytes); // End offset. + for (size_t i = 0; i < nalu_start_offsets.size() - 1; ++i) { + size_t start_offset = nalu_start_offsets[i]; + // End offset is actually start offset for next unit, excluding length field + // so remove that from this units length. + size_t end_offset = nalu_start_offsets[i + 1] - kH265LengthFieldSizeBytes; + if (end_offset - start_offset < kH265NalHeaderSizeBytes) { + RTC_LOG(LS_ERROR) << "Aggregated packet too short"; + return std::nullopt; + } + + // Insert start code before each NALU in aggregated packet. + video_payload.AppendData(kStartCode); + video_payload.AppendData(&payload_data[start_offset], + end_offset - start_offset); + + uint8_t nalu_type = (payload_data[start_offset] & kH265TypeMask) >> 1; + start_offset += kH265NalHeaderSizeBytes; + ArrayView nalu_data(&payload_data[start_offset], + end_offset - start_offset); + switch (nalu_type) { + case H265::NaluType::kBlaWLp: + case H265::NaluType::kBlaWRadl: + case H265::NaluType::kBlaNLp: + case H265::NaluType::kIdrWRadl: + case H265::NaluType::kIdrNLp: + case H265::NaluType::kCra: + // Mark IRAP(Intra Random Access Point) frames as key frames. Their NALU + // types are in the range of BLA_W_LP (16) to CRA (21), inclusive. + // https://datatracker.ietf.org/doc/html/rfc7798#section-3.1.1 + parsed_payload->video_header.frame_type = + VideoFrameType::kVideoFrameKey; + break; + case H265::NaluType::kSps: { + std::optional sps = + H265SpsParser::ParseSps(nalu_data); + + if (sps) { + // TODO(bugs.webrtc.org/13485): Implement the size calculation taking + // VPS->vui_parameters.def_disp_win_xx_offset into account. + parsed_payload->video_header.width = sps->width; + parsed_payload->video_header.height = sps->height; + } else { + RTC_LOG(LS_WARNING) << "Failed to parse SPS from SPS slice."; + } + } + ABSL_FALLTHROUGH_INTENDED; + case H265::NaluType::kVps: + case H265::NaluType::kPps: + case H265::NaluType::kTrailN: + case H265::NaluType::kTrailR: + case H265::NaluType::kTsaN: + case H265::NaluType::kTsaR: + case H265::NaluType::kStsaN: + case H265::NaluType::kStsaR: + case H265::NaluType::kRadlN: + case H265::NaluType::kRadlR: + // Slices below don't contain SPS or PPS ids. + case H265::NaluType::kAud: + case H265::NaluType::kPrefixSei: + case H265::NaluType::kSuffixSei: + break; + case H265::NaluType::kAp: + case H265::NaluType::kFu: + case H265::NaluType::kPaci: + RTC_LOG(LS_WARNING) << "Unexpected AP, FU or PACI received."; + return std::nullopt; + } + + // Spec 7.4.2.4.4: Order of NAL units and codec pictures. + if ((nalu_type >= H265::NaluType::kVps && + nalu_type <= H265::NaluType::kAud) || + nalu_type == H265::NaluType::kPrefixSei) { + parsed_payload->video_header.is_first_packet_in_frame = true; + } else if (nalu_type >= H265::NaluType::kTrailN && + nalu_type <= H265::NaluType::kRsvVcl31) { + std::optional first_slice_segment_in_pic_flag = + H265BitstreamParser::IsFirstSliceSegmentInPic(nalu_data); + if (first_slice_segment_in_pic_flag.value_or(false)) { + parsed_payload->video_header.is_first_packet_in_frame = true; + } + } + } + parsed_payload->video_payload = video_payload; + return parsed_payload; +} + +// Fragmentation Unit (FU) structure: +// https://datatracker.ietf.org/doc/html/rfc7798#section-4.4.3 +std::optional ParseFuNalu( + CopyOnWriteBuffer rtp_payload) { + if (rtp_payload.size() < kH265FuHeaderSizeBytes + kH265NalHeaderSizeBytes) { + RTC_LOG(LS_ERROR) << "FU NAL units truncated."; + return std::nullopt; + } + std::optional parsed_payload( + std::in_place); + + uint8_t f = rtp_payload.cdata()[0] & kH265FBit; + uint8_t layer_id_h = rtp_payload.cdata()[0] & kH265LayerIDHMask; + uint8_t layer_id_l_unshifted = rtp_payload.cdata()[1] & kH265LayerIDLMask; + uint8_t tid = rtp_payload.cdata()[1] & kH265TIDMask; + + uint8_t original_nal_type = rtp_payload.cdata()[2] & kH265TypeMaskInFuHeader; + bool first_fragment = rtp_payload.cdata()[2] & kH265SBitMask; + bool is_first_packet_in_frame = false; + if (first_fragment) { + if (original_nal_type >= H265::NaluType::kTrailN && + original_nal_type <= H265::NaluType::kRsvVcl31) { + size_t slice_offset = + kH265FuHeaderSizeBytes + kH265PayloadHeaderSizeBytes; + std::optional first_slice_segment_in_pic_flag = + H265BitstreamParser::IsFirstSliceSegmentInPic( + ArrayView(rtp_payload.cdata() + slice_offset, + rtp_payload.size() - slice_offset)); + if (first_slice_segment_in_pic_flag.value_or(false)) { + is_first_packet_in_frame = true; + } + } + rtp_payload = rtp_payload.Slice( + kH265FuHeaderSizeBytes, rtp_payload.size() - kH265FuHeaderSizeBytes); + rtp_payload.MutableData()[0] = f | original_nal_type << 1 | layer_id_h; + rtp_payload.MutableData()[1] = layer_id_l_unshifted | tid; + CopyOnWriteBuffer video_payload; + // Insert start code before the first fragment in FU. + video_payload.AppendData(kStartCode); + video_payload.AppendData(rtp_payload); + parsed_payload->video_payload = video_payload; + } else { + parsed_payload->video_payload = rtp_payload.Slice( + kH265NalHeaderSizeBytes + kH265FuHeaderSizeBytes, + rtp_payload.size() - kH265NalHeaderSizeBytes - kH265FuHeaderSizeBytes); + } + + if (original_nal_type >= H265::NaluType::kBlaWLp && + original_nal_type <= H265::NaluType::kRsvIrapVcl23) { + // IRAP picture. + // https://datatracker.ietf.org/doc/html/rfc7798#section-3.1.1 + parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameKey; + } else { + parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameDelta; + } + parsed_payload->video_header.width = 0; + parsed_payload->video_header.height = 0; + parsed_payload->video_header.codec = kVideoCodecH265; + parsed_payload->video_header.is_first_packet_in_frame = + is_first_packet_in_frame; + + return parsed_payload; +} + +} // namespace + +std::optional +VideoRtpDepacketizerH265::Parse(CopyOnWriteBuffer rtp_payload) { + if (rtp_payload.empty()) { + RTC_LOG(LS_ERROR) << "Empty payload."; + return std::nullopt; + } + + uint8_t nal_type = (rtp_payload.cdata()[0] & kH265TypeMask) >> 1; + + if (nal_type == H265::NaluType::kFu) { + // Fragmented NAL units (FU). + return ParseFuNalu(std::move(rtp_payload)); + } else if (nal_type == H265::NaluType::kPaci) { + // TODO(bugs.webrtc.org/13485): Implement PACI parse for H265 + RTC_LOG(LS_ERROR) << "Not support type:" << nal_type; + return std::nullopt; + } else { + // Single NAL unit packet or Aggregated packets (AP). + return ProcessApOrSingleNalu(std::move(rtp_payload)); + } +} + +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h b/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h new file mode 100644 index 0000000000..d9e075ce2f --- /dev/null +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_H265_H_ +#define MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_H265_H_ + +#include + +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "rtc_base/copy_on_write_buffer.h" + +namespace webrtc { +class VideoRtpDepacketizerH265 : public VideoRtpDepacketizer { + public: + ~VideoRtpDepacketizerH265() override = default; + + std::optional Parse(CopyOnWriteBuffer rtp_payload) override; +}; +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_H265_H_ diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_h265_unittest.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_h265_unittest.cc new file mode 100644 index 0000000000..ba7348aaa0 --- /dev/null +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_h265_unittest.cc @@ -0,0 +1,641 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h" + +#include +#include + +#include "api/array_view.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" +#include "common_video/h265/h265_common.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "rtc_base/buffer.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::Each; +using ::testing::ElementsAre; +using ::testing::ElementsAreArray; +using ::testing::Eq; +using ::testing::IsEmpty; +using ::testing::SizeIs; + +TEST(VideoRtpDepacketizerH265Test, SingleNalu) { + uint8_t packet[3] = {0x26, 0x02, + 0xFF}; // F=0, Type=19 (Idr), LayerId=0, TID=2. + uint8_t expected_packet[] = {0x00, 0x00, 0x00, 0x01, 0x26, 0x02, 0xff}; + CopyOnWriteBuffer rtp_payload(packet); + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(rtp_payload); + ASSERT_TRUE(parsed); + + EXPECT_THAT(MakeArrayView(parsed->video_payload.cdata(), + parsed->video_payload.size()), + ElementsAreArray(expected_packet)); + EXPECT_EQ(parsed->video_header.frame_type, VideoFrameType::kVideoFrameKey); + EXPECT_EQ(parsed->video_header.codec, kVideoCodecH265); + EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH265Test, SingleNaluSpsWithResolution) { + // SPS for a 1280x720 camera capture from ffmpeg on linux. Contains + // emulation bytes but no cropping. This buffer is generated + // with following command: + // 1) ffmpeg -i /dev/video0 -r 30 -c:v libx265 -s 1280x720 camera.h265 + // + // 2) Open camera.h265 and find the SPS, generally everything between the + // second and third start codes (0 0 0 1 or 0 0 1). The first two bytes + // 0x42 and 0x02 shows the nal header of SPS. + uint8_t packet[] = {0x42, 0x02, 0x01, 0x04, 0x08, 0x00, 0x00, 0x03, + 0x00, 0x9d, 0x08, 0x00, 0x00, 0x03, 0x00, 0x00, + 0x5d, 0xb0, 0x02, 0x80, 0x80, 0x2d, 0x16, 0x59, + 0x59, 0xa4, 0x93, 0x2b, 0x80, 0x40, 0x00, 0x00, + 0x03, 0x00, 0x40, 0x00, 0x00, 0x07, 0x82}; + uint8_t expected_packet[] = { + 0x00, 0x00, 0x00, 0x01, 0x42, 0x02, 0x01, 0x04, 0x08, 0x00, 0x00, + 0x03, 0x00, 0x9d, 0x08, 0x00, 0x00, 0x03, 0x00, 0x00, 0x5d, 0xb0, + 0x02, 0x80, 0x80, 0x2d, 0x16, 0x59, 0x59, 0xa4, 0x93, 0x2b, 0x80, + 0x40, 0x00, 0x00, 0x03, 0x00, 0x40, 0x00, 0x00, 0x07, 0x82}; + CopyOnWriteBuffer rtp_payload(packet); + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(rtp_payload); + ASSERT_TRUE(parsed); + + EXPECT_THAT(MakeArrayView(parsed->video_payload.cdata(), + parsed->video_payload.size()), + ElementsAreArray(expected_packet)); + EXPECT_EQ(parsed->video_header.codec, kVideoCodecH265); + EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); + EXPECT_EQ(parsed->video_header.width, 1280u); + EXPECT_EQ(parsed->video_header.height, 720u); +} + +TEST(VideoRtpDepacketizerH265Test, PaciPackets) { + uint8_t packet[2] = {0x64, 0x02}; // F=0, Type=50 (PACI), LayerId=0, TID=2. + CopyOnWriteBuffer rtp_payload(packet); + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(rtp_payload); + ASSERT_FALSE(parsed); +} + +TEST(VideoRtpDepacketizerH265Test, ApKey) { + uint8_t payload_header[] = {0x60, 0x02}; + uint8_t vps_nalu_size[] = {0, 0x17}; + uint8_t sps_nalu_size[] = {0, 0x27}; + uint8_t pps_nalu_size[] = {0, 0x32}; + uint8_t slice_nalu_size[] = {0, 0xa}; + uint8_t start_code[] = {0x00, 0x00, 0x00, 0x01}; + // VPS/SPS/PPS/IDR for a 1280x720 camera capture from ffmpeg on linux. + // Contains emulation bytes but no cropping. This buffer is generated with + // following command: 1) ffmpeg -i /dev/video0 -r 30 -c:v libx265 -s 1280x720 + // camera.h265 + // + // 2) Open camera.h265 and find: + // VPS - generally everything between the first and second start codes (0 0 0 + // 1 or 0 0 1). The first two bytes 0x40 and 0x02 shows the nal header of VPS. + // SPS - generally everything between the + // second and third start codes (0 0 0 1 or 0 0 1). The first two bytes + // 0x42 and 0x02 shows the nal header of SPS. + // PPS - generally everything between the third and fourth start codes (0 0 0 + // 1 or 0 0 1). The first two bytes 0x44 and 0x02 shows the nal header of PPS. + // IDR - Part of the keyframe bitstream (no need to show all the bytes for + // depacketizer testing). The first two bytes 0x26 and 0x02 shows the nal + // header of IDR frame. + uint8_t vps[] = { + 0x40, 0x02, 0x1c, 0x01, 0xff, 0xff, 0x04, 0x08, 0x00, 0x00, 0x03, 0x00, + 0x9d, 0x08, 0x00, 0x00, 0x03, 0x00, 0x00, 0x78, 0x95, 0x98, 0x09, + }; + uint8_t sps[] = {0x42, 0x02, 0x01, 0x04, 0x08, 0x00, 0x00, 0x03, 0x00, 0x9d, + 0x08, 0x00, 0x00, 0x03, 0x00, 0x00, 0x5d, 0xb0, 0x02, 0x80, + 0x80, 0x2d, 0x16, 0x59, 0x59, 0xa4, 0x93, 0x2b, 0x80, 0x40, + 0x00, 0x00, 0x03, 0x00, 0x40, 0x00, 0x00, 0x07, 0x82}; + uint8_t pps[] = {0x44, 0x02, 0xa4, 0x04, 0x55, 0xa2, 0x6d, 0xce, 0xc0, 0xc3, + 0xed, 0x0b, 0xac, 0xbc, 0x00, 0xc4, 0x44, 0x2e, 0xf7, 0x55, + 0xfd, 0x05, 0x86, 0x92, 0x19, 0xdf, 0x58, 0xec, 0x38, 0x36, + 0xb7, 0x7c, 0x00, 0x15, 0x33, 0x78, 0x03, 0x67, 0x26, 0x0f, + 0x7b, 0x30, 0x1c, 0xd7, 0xd4, 0x3a, 0xec, 0xad, 0xef, 0x73}; + uint8_t idr[] = {0x26, 0x02, 0xaf, 0x08, 0x4a, 0x31, 0x11, 0x15, 0xe5, 0xc0}; + + Buffer packet; + packet.AppendData(payload_header); + packet.AppendData(vps_nalu_size); + packet.AppendData(vps); + packet.AppendData(sps_nalu_size); + packet.AppendData(sps); + packet.AppendData(pps_nalu_size); + packet.AppendData(pps); + packet.AppendData(slice_nalu_size); + packet.AppendData(idr); + + Buffer expected_packet; + expected_packet.AppendData(start_code); + expected_packet.AppendData(vps); + expected_packet.AppendData(start_code); + expected_packet.AppendData(sps); + expected_packet.AppendData(start_code); + expected_packet.AppendData(pps); + expected_packet.AppendData(start_code); + expected_packet.AppendData(idr); + + // clang-format on + CopyOnWriteBuffer rtp_payload(packet); + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(rtp_payload); + ASSERT_TRUE(parsed); + + EXPECT_THAT(MakeArrayView(parsed->video_payload.cdata(), + parsed->video_payload.size()), + ElementsAreArray(expected_packet)); + EXPECT_EQ(parsed->video_header.frame_type, VideoFrameType::kVideoFrameKey); + EXPECT_EQ(parsed->video_header.codec, kVideoCodecH265); + EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH265Test, ApNaluSpsWithResolution) { + uint8_t payload_header[] = {0x60, 0x02}; + uint8_t vps_nalu_size[] = {0, 0x17}; + uint8_t sps_nalu_size[] = {0, 0x27}; + uint8_t pps_nalu_size[] = {0, 0x32}; + uint8_t slice_nalu_size[] = {0, 0xa}; + uint8_t start_code[] = {0x00, 0x00, 0x00, 0x01}; + // The VPS/SPS/PPS/IDR bytes are generated using the same way as above case. + uint8_t vps[] = { + 0x40, 0x02, 0x1c, 0x01, 0xff, 0xff, 0x04, 0x08, 0x00, 0x00, 0x03, 0x00, + 0x9d, 0x08, 0x00, 0x00, 0x03, 0x00, 0x00, 0x78, 0x95, 0x98, 0x09, + }; + uint8_t sps[] = {0x42, 0x02, 0x01, 0x04, 0x08, 0x00, 0x00, 0x03, 0x00, 0x9d, + 0x08, 0x00, 0x00, 0x03, 0x00, 0x00, 0x5d, 0xb0, 0x02, 0x80, + 0x80, 0x2d, 0x16, 0x59, 0x59, 0xa4, 0x93, 0x2b, 0x80, 0x40, + 0x00, 0x00, 0x03, 0x00, 0x40, 0x00, 0x00, 0x07, 0x82}; + uint8_t pps[] = {0x44, 0x02, 0xa4, 0x04, 0x55, 0xa2, 0x6d, 0xce, 0xc0, 0xc3, + 0xed, 0x0b, 0xac, 0xbc, 0x00, 0xc4, 0x44, 0x2e, 0xf7, 0x55, + 0xfd, 0x05, 0x86, 0x92, 0x19, 0xdf, 0x58, 0xec, 0x38, 0x36, + 0xb7, 0x7c, 0x00, 0x15, 0x33, 0x78, 0x03, 0x67, 0x26, 0x0f, + 0x7b, 0x30, 0x1c, 0xd7, 0xd4, 0x3a, 0xec, 0xad, 0xef, 0x73}; + uint8_t idr[] = {0x26, 0x02, 0xaf, 0x08, 0x4a, 0x31, 0x11, 0x15, 0xe5, 0xc0}; + + Buffer packet; + packet.AppendData(payload_header); + packet.AppendData(vps_nalu_size); + packet.AppendData(vps); + packet.AppendData(sps_nalu_size); + packet.AppendData(sps); + packet.AppendData(pps_nalu_size); + packet.AppendData(pps); + packet.AppendData(slice_nalu_size); + packet.AppendData(idr); + + Buffer expected_packet; + expected_packet.AppendData(start_code); + expected_packet.AppendData(vps); + expected_packet.AppendData(start_code); + expected_packet.AppendData(sps); + expected_packet.AppendData(start_code); + expected_packet.AppendData(pps); + expected_packet.AppendData(start_code); + expected_packet.AppendData(idr); + + CopyOnWriteBuffer rtp_payload(packet); + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(rtp_payload); + ASSERT_TRUE(parsed); + + EXPECT_THAT(MakeArrayView(parsed->video_payload.cdata(), + parsed->video_payload.size()), + ElementsAreArray(expected_packet)); + EXPECT_EQ(parsed->video_header.frame_type, VideoFrameType::kVideoFrameKey); + EXPECT_EQ(parsed->video_header.codec, kVideoCodecH265); + EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); + EXPECT_EQ(parsed->video_header.width, 1280u); + EXPECT_EQ(parsed->video_header.height, 720u); +} + +TEST(VideoRtpDepacketizerH265Test, EmptyApRejected) { + uint8_t lone_empty_packet[] = {0x60, 0x02, // F=0, Type=48 (kH265Ap). + 0x00, 0x00}; + uint8_t leading_empty_packet[] = {0x60, 0x02, // F=0, Type=48 (kH265Ap). + 0x00, 0x00, 0x00, 0x05, 0x26, + 0x02, 0xFF, 0x00, 0x11}; // kIdrWRadl + uint8_t middle_empty_packet[] = {0x60, 0x02, // F=0, Type=48 (kH265Ap). + 0x00, 0x04, 0x26, 0x02, 0xFF, + 0x00, 0x00, 0x00, 0x00, 0x05, + 0x26, 0x02, 0xFF, 0x00, 0x11}; // kIdrWRadl + uint8_t trailing_empty_packet[] = {0x60, 0x02, // F=0, Type=48 (kH265Ap). + 0x00, 0x04, 0x26, + 0x02, 0xFF, 0x00, // kIdrWRadl + 0x00, 0x00}; + + VideoRtpDepacketizerH265 depacketizer; + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(lone_empty_packet))); + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(leading_empty_packet))); + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(middle_empty_packet))); + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(trailing_empty_packet))); +} + +TEST(VideoRtpDepacketizerH265Test, ApDelta) { + uint8_t packet[20] = {0x60, 0x02, // F=0, Type=48 (kH265Ap). + // Length, nal header, payload. + 0, 0x03, 0x02, 0x02, 0xFF, // TrailR + 0, 0x04, 0x02, 0x02, 0xFF, 0x00, // TrailR + 0, 0x05, 0x02, 0x02, 0xFF, 0x00, 0x11}; // TrailR + uint8_t expected_packet[] = { + 0x00, 0x00, 0x00, 0x01, 0x02, 0x02, 0xFF, // TrailR + 0x00, 0x00, 0x00, 0x01, 0x02, 0x02, 0xFF, 0x00, // TrailR + 0x00, 0x00, 0x00, 0x01, 0x02, 0x02, 0xFF, 0x00, 0x11}; // TrailR + CopyOnWriteBuffer rtp_payload(packet); + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(rtp_payload); + ASSERT_TRUE(parsed); + + EXPECT_THAT(MakeArrayView(parsed->video_payload.cdata(), + parsed->video_payload.size()), + ElementsAreArray(expected_packet)); + + EXPECT_EQ(parsed->video_header.frame_type, VideoFrameType::kVideoFrameDelta); + EXPECT_EQ(parsed->video_header.codec, kVideoCodecH265); + EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH265Test, Fu) { + // clang-format off + uint8_t packet1[] = { + 0x62, 0x02, // F=0, Type=49 (kH265Fu). + 0x93, // FU header kH265SBitMask | H265::kIdrWRadl. + 0xaf, 0x08, 0x4a, 0x31, 0x11, 0x15, 0xe5, 0xc0 // Payload. + }; + // clang-format on + // F=0, Type=19, (kIdrWRadl), tid=1, nalu header: 00100110 00000010, which is + // 0x26, 0x02 + const uint8_t kExpected1[] = {0x00, 0x00, 0x00, 0x01, 0x26, 0x02, 0xaf, + 0x08, 0x4a, 0x31, 0x11, 0x15, 0xe5, 0xc0}; + + uint8_t packet2[] = { + 0x62, 0x02, // F=0, Type=49 (kH265Fu). + H265::kBlaWLp, // FU header. + 0x02 // Payload. + }; + const uint8_t kExpected2[] = {0x02}; + + uint8_t packet3[] = { + 0x62, 0x02, // F=0, Type=49 (kH265Fu). + 0x53, // FU header kH265EBitMask | H265::kIdrWRadl. + 0x03 // Payload. + }; + const uint8_t kExpected3[] = {0x03}; + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed1 = + depacketizer.Parse(CopyOnWriteBuffer(packet1)); + ASSERT_TRUE(parsed1); + // We expect that the first packet is one byte shorter since the FU header + // has been replaced by the original nal header. + EXPECT_THAT(MakeArrayView(parsed1->video_payload.cdata(), + parsed1->video_payload.size()), + ElementsAreArray(kExpected1)); + EXPECT_EQ(parsed1->video_header.frame_type, VideoFrameType::kVideoFrameKey); + EXPECT_EQ(parsed1->video_header.codec, kVideoCodecH265); + EXPECT_TRUE(parsed1->video_header.is_first_packet_in_frame); + + // Following packets will be 2 bytes shorter since they will only be appended + // onto the first packet. + auto parsed2 = depacketizer.Parse(CopyOnWriteBuffer(packet2)); + EXPECT_THAT(MakeArrayView(parsed2->video_payload.cdata(), + parsed2->video_payload.size()), + ElementsAreArray(kExpected2)); + EXPECT_FALSE(parsed2->video_header.is_first_packet_in_frame); + EXPECT_EQ(parsed2->video_header.frame_type, VideoFrameType::kVideoFrameKey); + EXPECT_EQ(parsed2->video_header.codec, kVideoCodecH265); + + auto parsed3 = depacketizer.Parse(CopyOnWriteBuffer(packet3)); + EXPECT_THAT(MakeArrayView(parsed3->video_payload.cdata(), + parsed3->video_payload.size()), + ElementsAreArray(kExpected3)); + EXPECT_FALSE(parsed3->video_header.is_first_packet_in_frame); + EXPECT_EQ(parsed3->video_header.frame_type, VideoFrameType::kVideoFrameKey); + EXPECT_EQ(parsed3->video_header.codec, kVideoCodecH265); +} + +TEST(VideoRtpDepacketizerH265Test, EmptyPayload) { + CopyOnWriteBuffer empty; + VideoRtpDepacketizerH265 depacketizer; + EXPECT_FALSE(depacketizer.Parse(empty)); +} + +TEST(VideoRtpDepacketizerH265Test, TruncatedFuNalu) { + const uint8_t kPayload[] = {0x62}; + VideoRtpDepacketizerH265 depacketizer; + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(kPayload))); +} + +TEST(VideoRtpDepacketizerH265Test, TruncatedSingleApNalu) { + const uint8_t kPayload[] = {0xe0, 0x02, 0x40}; + VideoRtpDepacketizerH265 depacketizer; + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(kPayload))); +} + +TEST(VideoRtpDepacketizerH265Test, ApPacketWithTruncatedNalUnits) { + const uint8_t kPayload[] = {0x60, 0x02, 0xED, 0xDF}; + VideoRtpDepacketizerH265 depacketizer; + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(kPayload))); +} + +TEST(VideoRtpDepacketizerH265Test, TruncationJustAfterSingleApNalu) { + const uint8_t kPayload[] = {0x60, 0x02, 0x40, 0x40}; + VideoRtpDepacketizerH265 depacketizer; + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(kPayload))); +} + +TEST(VideoRtpDepacketizerH265Test, ShortSpsPacket) { + const uint8_t kPayload[] = {0x40, 0x80, 0x00}; + VideoRtpDepacketizerH265 depacketizer; + EXPECT_TRUE(depacketizer.Parse(CopyOnWriteBuffer(kPayload))); +} + +TEST(VideoRtpDepacketizerH265Test, InvalidNaluSizeApNalu) { + const uint8_t kPayload[] = {0x60, 0x02, // F=0, Type=48 (kH265Ap). + // Length, nal header, payload. + 0, 0xff, 0x02, 0x02, 0xFF, // TrailR + 0, 0x05, 0x02, 0x02, 0xFF, 0x00, + 0x11}; // TrailR; + VideoRtpDepacketizerH265 depacketizer; + EXPECT_FALSE(depacketizer.Parse(CopyOnWriteBuffer(kPayload))); +} + +TEST(VideoRtpDepacketizerH265Test, PrefixSeiSetsFirstPacketInFrame) { + const uint8_t kPayload[] = { + 0x4e, 0x02, // F=0, Type=39 (H265::kPrefixSei). + 0x03, 0x03, 0x03, 0x03 // Payload. + }; + VideoRtpDepacketizerH265 depacketizer; + auto parsed = depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed.has_value()); + EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH265Test, ApVpsSpsPpsMultiIdrSlices) { + uint8_t payload_header[] = {0x60, 0x02}; + uint8_t vps_nalu_size[] = {0, 0x17}; + uint8_t sps_nalu_size[] = {0, 0x27}; + uint8_t pps_nalu_size[] = {0, 0x32}; + uint8_t slice_nalu_size[] = {0, 0xa}; + uint8_t start_code[] = {0x00, 0x00, 0x00, 0x01}; + // The VPS/SPS/PPS/IDR bytes are generated using the same way as above case. + // Slices are truncated to contain enough data for test. + uint8_t vps[] = {0x40, 0x02, 0x1c, 0x01, 0xff, 0xff, 0x04, 0x08, + 0x00, 0x00, 0x03, 0x00, 0x9d, 0x08, 0x00, 0x00, + 0x03, 0x00, 0x00, 0x78, 0x95, 0x98, 0x09}; + uint8_t sps[] = {0x42, 0x02, 0x01, 0x04, 0x08, 0x00, 0x00, 0x03, 0x00, 0x9d, + 0x08, 0x00, 0x00, 0x03, 0x00, 0x00, 0x5d, 0xb0, 0x02, 0x80, + 0x80, 0x2d, 0x16, 0x59, 0x59, 0xa4, 0x93, 0x2b, 0x80, 0x40, + 0x00, 0x00, 0x03, 0x00, 0x40, 0x00, 0x00, 0x07, 0x82}; + uint8_t pps[] = {0x44, 0x02, 0xa4, 0x04, 0x55, 0xa2, 0x6d, 0xce, 0xc0, 0xc3, + 0xed, 0x0b, 0xac, 0xbc, 0x00, 0xc4, 0x44, 0x2e, 0xf7, 0x55, + 0xfd, 0x05, 0x86, 0x92, 0x19, 0xdf, 0x58, 0xec, 0x38, 0x36, + 0xb7, 0x7c, 0x00, 0x15, 0x33, 0x78, 0x03, 0x67, 0x26, 0x0f, + 0x7b, 0x30, 0x1c, 0xd7, 0xd4, 0x3a, 0xec, 0xad, 0xef, 0x73}; + uint8_t idr_slice1[] = {0x28, 0x01, 0xac, 0x6d, 0xa0, + 0x7b, 0x4c, 0xe2, 0x09, 0xef}; + uint8_t idr_slice2[] = {0x28, 0x01, 0x27, 0xf8, 0x63, + 0x6d, 0x7b, 0x6f, 0xcf, 0xff}; + + CopyOnWriteBuffer rtp_payload; + rtp_payload.AppendData(payload_header); + rtp_payload.AppendData(vps_nalu_size); + rtp_payload.AppendData(vps); + rtp_payload.AppendData(sps_nalu_size); + rtp_payload.AppendData(sps); + rtp_payload.AppendData(pps_nalu_size); + rtp_payload.AppendData(pps); + rtp_payload.AppendData(slice_nalu_size); + rtp_payload.AppendData(idr_slice1); + rtp_payload.AppendData(slice_nalu_size); + rtp_payload.AppendData(idr_slice2); + + Buffer expected_packet; + expected_packet.AppendData(start_code); + expected_packet.AppendData(vps); + expected_packet.AppendData(start_code); + expected_packet.AppendData(sps); + expected_packet.AppendData(start_code); + expected_packet.AppendData(pps); + expected_packet.AppendData(start_code); + expected_packet.AppendData(idr_slice1); + expected_packet.AppendData(start_code); + expected_packet.AppendData(idr_slice2); + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(rtp_payload); + ASSERT_TRUE(parsed.has_value()); + + EXPECT_THAT(MakeArrayView(parsed->video_payload.cdata(), + parsed->video_payload.size()), + ElementsAreArray(expected_packet)); + EXPECT_EQ(parsed->video_header.frame_type, VideoFrameType::kVideoFrameKey); + EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH265Test, ApMultiNonFirstSlicesFromSingleNonIdrFrame) { + uint8_t payload_header[] = {0x60, 0x02}; + uint8_t slice_nalu_size[] = {0, 0xa}; + uint8_t start_code[] = {0x00, 0x00, 0x00, 0x01}; + // First few bytes of two non-IDR slices from the same frame, both with the + // first_slice_segment_in_pic_flag set to 0. + uint8_t non_idr_slice1[] = {0x02, 0x01, 0x23, 0xfc, 0x20, + 0x42, 0xad, 0x1b, 0x68, 0xdf}; + uint8_t non_idr_slice2[] = {0x02, 0x01, 0x27, 0xf8, 0x20, + 0x42, 0xad, 0x1b, 0x68, 0xe0}; + + CopyOnWriteBuffer rtp_payload; + rtp_payload.AppendData(payload_header); + rtp_payload.AppendData(slice_nalu_size); + rtp_payload.AppendData(non_idr_slice1); + rtp_payload.AppendData(slice_nalu_size); + rtp_payload.AppendData(non_idr_slice2); + + Buffer expected_packet; + expected_packet.AppendData(start_code); + expected_packet.AppendData(non_idr_slice1); + expected_packet.AppendData(start_code); + expected_packet.AppendData(non_idr_slice2); + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(rtp_payload); + ASSERT_TRUE(parsed.has_value()); + + EXPECT_THAT(MakeArrayView(parsed->video_payload.cdata(), + parsed->video_payload.size()), + ElementsAreArray(expected_packet)); + EXPECT_EQ(parsed->video_header.frame_type, VideoFrameType::kVideoFrameDelta); + EXPECT_FALSE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH265Test, ApFirstTwoSlicesFromSingleNonIdrFrame) { + uint8_t payload_header[] = {0x60, 0x02}; + uint8_t slice_nalu_size[] = {0, 0xa}; + uint8_t start_code[] = {0x00, 0x00, 0x00, 0x01}; + // First few bytes of two non-IDR slices from the same frame, with the first + // slice's first_slice_segment_in_pic_flag set to 1, and second set to 0. + uint8_t non_idr_slice1[] = {0x02, 0x01, 0xa4, 0x08, 0x55, + 0xa3, 0x6d, 0xcc, 0xcf, 0x26}; + uint8_t non_idr_slice2[] = {0x02, 0x01, 0x23, 0xfc, 0x20, + 0x42, 0xad, 0x1b, 0x68, 0xdf}; + + CopyOnWriteBuffer rtp_payload; + rtp_payload.AppendData(payload_header); + rtp_payload.AppendData(slice_nalu_size); + rtp_payload.AppendData(non_idr_slice1); + rtp_payload.AppendData(slice_nalu_size); + rtp_payload.AppendData(non_idr_slice2); + + Buffer expected_packet; + expected_packet.AppendData(start_code); + expected_packet.AppendData(non_idr_slice1); + expected_packet.AppendData(start_code); + expected_packet.AppendData(non_idr_slice2); + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(rtp_payload); + ASSERT_TRUE(parsed.has_value()); + + EXPECT_THAT(MakeArrayView(parsed->video_payload.cdata(), + parsed->video_payload.size()), + ElementsAreArray(expected_packet)); + EXPECT_EQ(parsed->video_header.frame_type, VideoFrameType::kVideoFrameDelta); + EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH265Test, SingleNaluFromIdrSecondSlice) { + // First few bytes of the second slice of an IDR_N_LP nalu with + // first_slice_segment_in_pic_flag set to 0. + const uint8_t kPayload[] = {0x28, 0x01, 0x27, 0xf8, 0x63, 0x6d, 0x7b, 0x6f, + 0xcf, 0xff, 0x0d, 0xf5, 0xc7, 0xfe, 0x57, 0x77, + 0xdc, 0x29, 0x24, 0x89, 0x89, 0xea, 0xd1, 0x88}; + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed.has_value()); + EXPECT_EQ(parsed->video_header.frame_type, VideoFrameType::kVideoFrameKey); + EXPECT_FALSE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH265Test, SingleNaluFromNonIdrSecondSlice) { + // First few bytes of the second slice of an TRAIL_R nalu with + // first_slice_segment_in_pic_flag set to 0. + const uint8_t kPayload[] = {0x02, 0x01, 0x23, 0xfc, 0x20, 0x22, 0xad, 0x13, + 0x68, 0xce, 0xc3, 0x5a, 0x00, 0xdc, 0xeb, 0x86, + 0x4b, 0x0b, 0xa7, 0x6a, 0xe1, 0x9c, 0x5c, 0xea}; + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed.has_value()); + EXPECT_EQ(parsed->video_header.frame_type, VideoFrameType::kVideoFrameDelta); + EXPECT_FALSE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH265Test, FuFromIdrFrameSecondSlice) { + // First few bytes of the second slice of an IDR_N_LP nalu with + // first_slice_segment_in_pic_flag set to 0. + const uint8_t kPayload[] = { + 0x62, 0x02, // F=0, Type=49 (H265::kFu). + 0x93, // FU header kH265SBitMask | H265::kIdrWRadl. + 0x23, 0xfc, 0x20, 0x22, 0xad, 0x13, 0x68, 0xce, 0xc3, 0x5a, 0x00, 0xdc}; + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed.has_value()); + EXPECT_EQ(parsed->video_header.frame_type, VideoFrameType::kVideoFrameKey); + EXPECT_FALSE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH265Test, FuFromNonIdrFrameSecondSlice) { + // First few bytes of the second slice of an TRAIL_R nalu with + // first_slice_segment_in_pic_flag set to 0. + const uint8_t kPayload[] = {0x62, 0x02, // F=0, Type=49 (H265::kFu). + 0x80, // FU header kH265SBitMask | H265::kTrailR. + 0x23, 0xfc, 0x20, 0x22, 0xad, 0x13, + 0x68, 0xce, 0xc3, 0x5a, 0x00, 0xdc}; + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed.has_value()); + EXPECT_EQ(parsed->video_header.frame_type, VideoFrameType::kVideoFrameDelta); + EXPECT_FALSE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH265Test, AudSetsFirstPacketInFrame) { + const uint8_t kPayload[] = {0x46, 0x01, 0x10}; + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed.has_value()); + EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH265Test, PpsSetsFirstPacketInFrame) { + const uint8_t kPayload[] = { + 0x44, 0x02, 0xa4, 0x04, 0x55, 0xa2, 0x6d, 0xce, 0xc0, 0xc3, + 0xed, 0x0b, 0xac, 0xbc, 0x00, 0xc4, 0x44, 0x2e, 0xf7, 0x55, + 0xfd, 0x05, 0x86, 0x92, 0x19, 0xdf, 0x58, 0xec, 0x38, 0x36, + 0xb7, 0x7c, 0x00, 0x15, 0x33, 0x78, 0x03, 0x67, 0x26, 0x0f, + 0x7b, 0x30, 0x1c, 0xd7, 0xd4, 0x3a, 0xec, 0xad, 0xef, 0x73}; + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed.has_value()); + EXPECT_TRUE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH265Test, SuffixSeiNotSetFirstPacketInFrame) { + const uint8_t kPayload[] = {0x50, 0x01, 0x81, 0x01, 0x03, 0x80}; + + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed.has_value()); + EXPECT_FALSE(parsed->video_header.is_first_packet_in_frame); +} + +TEST(VideoRtpDepacketizerH265Test, EmptyNaluPayload) { + const uint8_t kPayload[] = {0x48, 0x00}; // F=0, Type=36 (H265::kEos). + VideoRtpDepacketizerH265 depacketizer; + std::optional parsed = + depacketizer.Parse(CopyOnWriteBuffer(kPayload)); + ASSERT_TRUE(parsed.has_value()); +} + +} // namespace +} // namespace webrtc diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc index 81b4e4ab53..6af4238ee9 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc @@ -10,17 +10,17 @@ #include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h" +#include #include -#include "absl/types/optional.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "rtc_base/copy_on_write_buffer.h" namespace webrtc { -absl::optional -VideoRtpDepacketizerRaw::Parse(rtc::CopyOnWriteBuffer rtp_payload) { - absl::optional parsed(absl::in_place); +std::optional +VideoRtpDepacketizerRaw::Parse(CopyOnWriteBuffer rtp_payload) { + std::optional parsed(std::in_place); parsed->video_payload = std::move(rtp_payload); return parsed; } diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h b/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h index 59c8695352..2ca09641e1 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h @@ -11,7 +11,8 @@ #ifndef MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_RAW_H_ #define MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_RAW_H_ -#include "absl/types/optional.h" +#include + #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "rtc_base/copy_on_write_buffer.h" @@ -21,8 +22,7 @@ class VideoRtpDepacketizerRaw : public VideoRtpDepacketizer { public: ~VideoRtpDepacketizerRaw() override = default; - absl::optional Parse( - rtc::CopyOnWriteBuffer rtp_payload) override; + std::optional Parse(CopyOnWriteBuffer rtp_payload) override; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_raw_unittest.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_raw_unittest.cc index 36c826ab84..126ec86158 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_raw_unittest.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_raw_unittest.cc @@ -11,8 +11,10 @@ #include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h" #include +#include -#include "absl/types/optional.h" +#include "api/video/video_codec_type.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "rtc_base/copy_on_write_buffer.h" #include "test/gtest.h" @@ -21,10 +23,10 @@ namespace { TEST(VideoRtpDepacketizerRaw, PassRtpPayloadAsVideoPayload) { const uint8_t kPayload[] = {0x05, 0x25, 0x52}; - rtc::CopyOnWriteBuffer rtp_payload(kPayload); + CopyOnWriteBuffer rtp_payload(kPayload); VideoRtpDepacketizerRaw depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(rtp_payload); ASSERT_TRUE(parsed); @@ -36,10 +38,10 @@ TEST(VideoRtpDepacketizerRaw, PassRtpPayloadAsVideoPayload) { TEST(VideoRtpDepacketizerRaw, UsesDefaultValuesForVideoHeader) { const uint8_t kPayload[] = {0x05, 0x25, 0x52}; - rtc::CopyOnWriteBuffer rtp_payload(kPayload); + CopyOnWriteBuffer rtp_payload(kPayload); VideoRtpDepacketizerRaw depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(rtp_payload); ASSERT_TRUE(parsed); diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc index d6bd33c24d..8d9bf6da7b 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc @@ -13,10 +13,17 @@ #include #include -#include "absl/types/optional.h" +#include +#include + #include "api/array_view.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" // VP8 payload descriptor @@ -131,14 +138,13 @@ int ParseVP8Descriptor(RTPVideoHeaderVP8* vp8, } // namespace -absl::optional -VideoRtpDepacketizerVp8::Parse(rtc::CopyOnWriteBuffer rtp_payload) { - rtc::ArrayView payload(rtp_payload.cdata(), - rtp_payload.size()); - absl::optional result(absl::in_place); +std::optional +VideoRtpDepacketizerVp8::Parse(CopyOnWriteBuffer rtp_payload) { + ArrayView payload(rtp_payload.cdata(), rtp_payload.size()); + std::optional result(std::in_place); int offset = ParseRtpPayload(payload, &result->video_header); if (offset == kFailedToParse) - return absl::nullopt; + return std::nullopt; RTC_DCHECK_LT(offset, rtp_payload.size()); result->video_payload = rtp_payload.Slice(offset, rtp_payload.size() - offset); @@ -146,7 +152,7 @@ VideoRtpDepacketizerVp8::Parse(rtc::CopyOnWriteBuffer rtp_payload) { } int VideoRtpDepacketizerVp8::ParseRtpPayload( - rtc::ArrayView rtp_payload, + ArrayView rtp_payload, RTPVideoHeader* video_header) { RTC_DCHECK(video_header); if (rtp_payload.empty()) { diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h index 3d7cb3291d..a01db9a04c 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h @@ -12,8 +12,8 @@ #define MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_VP8_H_ #include +#include -#include "absl/types/optional.h" #include "api/array_view.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" @@ -30,11 +30,10 @@ class VideoRtpDepacketizerVp8 : public VideoRtpDepacketizer { // Parses vp8 rtp payload descriptor. // Returns zero on error or vp8 payload header offset on success. - static int ParseRtpPayload(rtc::ArrayView rtp_payload, + static int ParseRtpPayload(ArrayView rtp_payload, RTPVideoHeader* video_header); - absl::optional Parse( - rtc::CopyOnWriteBuffer rtp_payload) override; + std::optional Parse(CopyOnWriteBuffer rtp_payload) override; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8_unittest.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8_unittest.cc index 77469cf935..9b7eee1e17 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8_unittest.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp8_unittest.cc @@ -10,11 +10,20 @@ #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h" +#include +#include +#include + #include "api/array_view.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "modules/rtp_rtcp/source/rtp_format_vp8.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "rtc_base/copy_on_write_buffer.h" -#include "test/gmock.h" #include "test/gtest.h" // VP8 payload descriptor @@ -60,7 +69,7 @@ TEST(VideoRtpDepacketizerVp8Test, BasicHeader) { EXPECT_EQ(video_header.frame_type, VideoFrameType::kVideoFrameDelta); EXPECT_EQ(video_header.codec, kVideoCodecVP8); const auto& vp8_header = - absl::get(video_header.video_type_header); + std::get(video_header.video_type_header); EXPECT_FALSE(vp8_header.nonReference); EXPECT_TRUE(vp8_header.beginningOfPartition); EXPECT_EQ(vp8_header.partitionId, 4); @@ -82,7 +91,7 @@ TEST(VideoRtpDepacketizerVp8Test, OneBytePictureID) { EXPECT_EQ(offset, 3); const auto& vp8_header = - absl::get(video_header.video_type_header); + std::get(video_header.video_type_header); EXPECT_EQ(vp8_header.pictureId, kPictureId); } @@ -99,7 +108,7 @@ TEST(VideoRtpDepacketizerVp8Test, TwoBytePictureID) { EXPECT_EQ(offset, 4); const auto& vp8_header = - absl::get(video_header.video_type_header); + std::get(video_header.video_type_header); EXPECT_EQ(vp8_header.pictureId, kPictureId); } @@ -115,7 +124,7 @@ TEST(VideoRtpDepacketizerVp8Test, Tl0PicIdx) { EXPECT_EQ(offset, 3); const auto& vp8_header = - absl::get(video_header.video_type_header); + std::get(video_header.video_type_header); EXPECT_EQ(vp8_header.tl0PicIdx, kTl0PicIdx); } @@ -130,7 +139,7 @@ TEST(VideoRtpDepacketizerVp8Test, TIDAndLayerSync) { EXPECT_EQ(offset, 3); const auto& vp8_header = - absl::get(video_header.video_type_header); + std::get(video_header.video_type_header); EXPECT_EQ(vp8_header.temporalIdx, 2); EXPECT_FALSE(vp8_header.layerSync); } @@ -147,7 +156,7 @@ TEST(VideoRtpDepacketizerVp8Test, KeyIdx) { EXPECT_EQ(offset, 3); const auto& vp8_header = - absl::get(video_header.video_type_header); + std::get(video_header.video_type_header); EXPECT_EQ(vp8_header.keyIdx, kKeyIdx); } @@ -165,7 +174,7 @@ TEST(VideoRtpDepacketizerVp8Test, MultipleExtensions) { EXPECT_EQ(offset, 6); const auto& vp8_header = - absl::get(video_header.video_type_header); + std::get(video_header.video_type_header); EXPECT_TRUE(vp8_header.nonReference); EXPECT_EQ(vp8_header.partitionId, 0b0110); EXPECT_EQ(vp8_header.pictureId, 0x1234); @@ -201,13 +210,13 @@ TEST(VideoRtpDepacketizerVp8Test, WithPacketizer) { ASSERT_TRUE(packetizer.NextPacket(&packet)); VideoRtpDepacketizerVp8 depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(packet.PayloadBuffer()); ASSERT_TRUE(parsed); EXPECT_EQ(parsed->video_header.codec, kVideoCodecVP8); const auto& vp8_header = - absl::get(parsed->video_header.video_type_header); + std::get(parsed->video_header.video_type_header); EXPECT_EQ(vp8_header.nonReference, input_header.nonReference); EXPECT_EQ(vp8_header.pictureId, input_header.pictureId); EXPECT_EQ(vp8_header.tl0PicIdx, input_header.tl0PicIdx); @@ -223,9 +232,9 @@ TEST(VideoRtpDepacketizerVp8Test, ReferencesInputCopyOnWriteBuffer) { packet[1] = 0b1111'0000; // with all extensions, packet[2] = 15; // and one-byte picture id. - rtc::CopyOnWriteBuffer rtp_payload(packet); + CopyOnWriteBuffer rtp_payload(packet); VideoRtpDepacketizerVp8 depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(rtp_payload); ASSERT_TRUE(parsed); @@ -235,7 +244,7 @@ TEST(VideoRtpDepacketizerVp8Test, ReferencesInputCopyOnWriteBuffer) { } TEST(VideoRtpDepacketizerVp8Test, FailsOnEmptyPayload) { - rtc::ArrayView empty; + ArrayView empty; RTPVideoHeader video_header; EXPECT_EQ(VideoRtpDepacketizerVp8::ParseRtpPayload(empty, &video_header), 0); } diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc index 41f363d221..a0d21f4fdc 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc @@ -12,12 +12,21 @@ #include +#include +#include +#include + +#include "api/array_view.h" #include "api/video/video_codec_constants.h" -#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "rtc_base/bitstream_reader.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" +#include "rtc_base/copy_on_write_buffer.h" namespace webrtc { namespace { @@ -146,12 +155,12 @@ void ParseSsData(BitstreamReader& parser, RTPVideoHeaderVP9* vp9) { } } // namespace -absl::optional -VideoRtpDepacketizerVp9::Parse(rtc::CopyOnWriteBuffer rtp_payload) { - absl::optional result(absl::in_place); +std::optional +VideoRtpDepacketizerVp9::Parse(CopyOnWriteBuffer rtp_payload) { + std::optional result(std::in_place); int offset = ParseRtpPayload(rtp_payload, &result->video_header); if (offset == 0) - return absl::nullopt; + return std::nullopt; RTC_DCHECK_LT(offset, rtp_payload.size()); result->video_payload = rtp_payload.Slice(offset, rtp_payload.size() - offset); @@ -159,7 +168,7 @@ VideoRtpDepacketizerVp9::Parse(rtc::CopyOnWriteBuffer rtp_payload) { } int VideoRtpDepacketizerVp9::ParseRtpPayload( - rtc::ArrayView rtp_payload, + ArrayView rtp_payload, RTPVideoHeader* video_header) { RTC_DCHECK(video_header); // Parse mandatory first byte of payload descriptor. @@ -180,9 +189,6 @@ int VideoRtpDepacketizerVp9::ParseRtpPayload( video_header->simulcastIdx = 0; video_header->codec = kVideoCodecVP9; - video_header->frame_type = - p_bit ? VideoFrameType::kVideoFrameDelta : VideoFrameType::kVideoFrameKey; - auto& vp9_header = video_header->video_type_header.emplace(); vp9_header.InitRTPVideoHeaderVP9(); @@ -211,6 +217,9 @@ int VideoRtpDepacketizerVp9::ParseRtpPayload( video_header->height = vp9_header.height[0]; } } + video_header->frame_type = p_bit || vp9_header.inter_layer_predicted + ? VideoFrameType::kVideoFrameDelta + : VideoFrameType::kVideoFrameKey; video_header->is_first_packet_in_frame = b_bit; video_header->is_last_packet_in_frame = e_bit; diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h index 4bb358a15f..d5d45ec2c4 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h @@ -12,8 +12,8 @@ #define MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_VP9_H_ #include +#include -#include "absl/types/optional.h" #include "api/array_view.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" @@ -30,11 +30,10 @@ class VideoRtpDepacketizerVp9 : public VideoRtpDepacketizer { // Parses vp9 rtp payload descriptor. // Returns zero on error or vp9 payload header offset on success. - static int ParseRtpPayload(rtc::ArrayView rtp_payload, + static int ParseRtpPayload(ArrayView rtp_payload, RTPVideoHeader* video_header); - absl::optional Parse( - rtc::CopyOnWriteBuffer rtp_payload) override; + std::optional Parse(CopyOnWriteBuffer rtp_payload) override; }; } // namespace webrtc diff --git a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9_unittest.cc b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9_unittest.cc index 36af59a779..91bd7f5143 100644 --- a/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9_unittest.cc +++ b/modules/rtp_rtcp/source/video_rtp_depacketizer_vp9_unittest.cc @@ -10,11 +10,16 @@ #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h" -#include -#include +#include +#include +#include #include "api/array_view.h" -#include "test/gmock.h" +#include "api/video/video_frame_type.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "rtc_base/copy_on_write_buffer.h" #include "test/gtest.h" namespace webrtc { @@ -79,7 +84,7 @@ TEST(VideoRtpDepacketizerVp9Test, ParseBasicHeader) { expected.beginning_of_frame = true; expected.end_of_frame = true; VerifyHeader(expected, - absl::get(video_header.video_type_header)); + std::get(video_header.video_type_header)); } TEST(VideoRtpDepacketizerVp9Test, ParseOneBytePictureId) { @@ -96,7 +101,7 @@ TEST(VideoRtpDepacketizerVp9Test, ParseOneBytePictureId) { expected.picture_id = kMaxOneBytePictureId; expected.max_picture_id = kMaxOneBytePictureId; VerifyHeader(expected, - absl::get(video_header.video_type_header)); + std::get(video_header.video_type_header)); } TEST(VideoRtpDepacketizerVp9Test, ParseTwoBytePictureId) { @@ -114,7 +119,7 @@ TEST(VideoRtpDepacketizerVp9Test, ParseTwoBytePictureId) { expected.picture_id = kMaxTwoBytePictureId; expected.max_picture_id = kMaxTwoBytePictureId; VerifyHeader(expected, - absl::get(video_header.video_type_header)); + std::get(video_header.video_type_header)); } TEST(VideoRtpDepacketizerVp9Test, ParseLayerInfoWithNonFlexibleMode) { @@ -142,7 +147,7 @@ TEST(VideoRtpDepacketizerVp9Test, ParseLayerInfoWithNonFlexibleMode) { expected.inter_layer_predicted = kDbit ? true : false; expected.tl0_pic_idx = kTl0PicIdx; VerifyHeader(expected, - absl::get(video_header.video_type_header)); + std::get(video_header.video_type_header)); } TEST(VideoRtpDepacketizerVp9Test, ParseLayerInfoWithFlexibleMode) { @@ -169,7 +174,7 @@ TEST(VideoRtpDepacketizerVp9Test, ParseLayerInfoWithFlexibleMode) { expected.spatial_idx = kSpatialIdx; expected.inter_layer_predicted = kDbit ? true : false; VerifyHeader(expected, - absl::get(video_header.video_type_header)); + std::get(video_header.video_type_header)); } TEST(VideoRtpDepacketizerVp9Test, ParseRefIdx) { @@ -209,7 +214,7 @@ TEST(VideoRtpDepacketizerVp9Test, ParseRefIdx) { expected.ref_picture_id[1] = 0x7FFF; expected.ref_picture_id[2] = 32658; VerifyHeader(expected, - absl::get(video_header.video_type_header)); + std::get(video_header.video_type_header)); } TEST(VideoRtpDepacketizerVp9Test, ParseRefIdxFailsWithNoPictureId) { @@ -267,7 +272,7 @@ TEST(VideoRtpDepacketizerVp9Test, ParseSsData) { expected.gof.num_ref_pics[1] = 1; expected.gof.pid_diff[1][0] = 33; VerifyHeader(expected, - absl::get(video_header.video_type_header)); + std::get(video_header.video_type_header)); } TEST(VideoRtpDepacketizerVp9Test, ParseFirstPacketInKeyFrame) { @@ -317,7 +322,7 @@ TEST(VideoRtpDepacketizerVp9Test, ParseResolution) { } TEST(VideoRtpDepacketizerVp9Test, ParseFailsForNoPayloadLength) { - rtc::ArrayView empty; + ArrayView empty; RTPVideoHeader video_header; EXPECT_EQ(VideoRtpDepacketizerVp9::ParseRtpPayload(empty, &video_header), 0); @@ -342,7 +347,7 @@ TEST(VideoRtpDepacketizerVp9Test, ParseNonRefForInterLayerPred) { expected.beginning_of_frame = true; expected.non_ref_for_inter_layer_pred = false; VerifyHeader(expected, - absl::get(video_header.video_type_header)); + std::get(video_header.video_type_header)); packet[0] = 0x05; // I:0 P:0 L:0 F:0 B:0 E:1 V:0 Z:1 VideoRtpDepacketizerVp9::ParseRtpPayload(packet, &video_header); @@ -351,7 +356,7 @@ TEST(VideoRtpDepacketizerVp9Test, ParseNonRefForInterLayerPred) { expected.end_of_frame = true; expected.non_ref_for_inter_layer_pred = true; VerifyHeader(expected, - absl::get(video_header.video_type_header)); + std::get(video_header.video_type_header)); } TEST(VideoRtpDepacketizerVp9Test, ReferencesInputCopyOnWriteBuffer) { @@ -359,9 +364,9 @@ TEST(VideoRtpDepacketizerVp9Test, ReferencesInputCopyOnWriteBuffer) { uint8_t packet[4] = {0}; packet[0] = 0x0C; // I:0 P:0 L:0 F:0 B:1 E:1 V:0 Z:0 - rtc::CopyOnWriteBuffer rtp_payload(packet); + CopyOnWriteBuffer rtp_payload(packet); VideoRtpDepacketizerVp9 depacketizer; - absl::optional parsed = + std::optional parsed = depacketizer.Parse(rtp_payload); ASSERT_TRUE(parsed); @@ -369,5 +374,18 @@ TEST(VideoRtpDepacketizerVp9Test, ReferencesInputCopyOnWriteBuffer) { // Compare pointers to check there was no copy on write buffer unsharing. EXPECT_EQ(parsed->video_payload.cdata(), rtp_payload.cdata() + kHeaderSize); } + +TEST(VideoRtpDepacketizerVp9Test, InterLayerPredOnlyFrameMerkedAsDelta) { + // Set P=0 and D=1 and vefify that the depaketizers marks this packet as a + // part of a delta frame (not a keyframe). + uint8_t packet[13] = {0}; + packet[0] = 0b0010'0000; // I:0 P:0 L:1 F:0 B:0 E:0 V:0 Z:0 + packet[1] = 0b0000'0001; // T:000 U:0 S:000 D:1 + packet[2] = 0; // TL0PICIDX + + RTPVideoHeader video_header; + VideoRtpDepacketizerVp9::ParseRtpPayload(packet, &video_header); + EXPECT_EQ(video_header.frame_type, VideoFrameType::kVideoFrameDelta); +} } // namespace } // namespace webrtc diff --git a/modules/rtp_rtcp/test/testFec/test_fec.cc b/modules/rtp_rtcp/test/testFec/test_fec.cc index 5ac8feca21..bd6998d71f 100644 --- a/modules/rtp_rtcp/test/testFec/test_fec.cc +++ b/modules/rtp_rtcp/test/testFec/test_fec.cc @@ -16,11 +16,20 @@ #include #include +#include +#include #include +#include +#include +#include +#include +#include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" #include "modules/rtp_rtcp/source/forward_error_correction_internal.h" +#include "rtc_base/checks.h" #include "rtc_base/random.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" diff --git a/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc b/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc index 25ceee585a..21591b42ba 100644 --- a/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc +++ b/modules/rtp_rtcp/test/testFec/test_packet_masks_metrics.cc @@ -44,10 +44,17 @@ */ #include +#include +#include +#include #include +#include +#include "api/array_view.h" +#include "modules/include/module_fec_types.h" #include "modules/rtp_rtcp/source/forward_error_correction_internal.h" #include "modules/rtp_rtcp/test/testFec/average_residual_loss_xor_codes.h" +#include "rtc_base/checks.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -693,7 +700,7 @@ class FecPacketMaskMetricsTest : public ::testing::Test { int num_media_packets, int num_fec_packets, int mask_bytes_fec_packet, - CodeType code_type) { + CodeType /* code_type */) { for (int i = 0; i < num_fec_packets; i++) { for (int j = 0; j < num_media_packets; j++) { const uint8_t byte_mask = @@ -723,7 +730,7 @@ class FecPacketMaskMetricsTest : public ::testing::Test { for (int num_fec_packets = 1; num_fec_packets <= num_media_packets; num_fec_packets++) { memset(packet_mask.get(), 0, num_media_packets * mask_bytes_fec_packet); - rtc::ArrayView mask = + ArrayView mask = mask_table.LookUp(num_media_packets, num_fec_packets); memcpy(packet_mask.get(), &mask[0], mask.size()); // Convert to bit mask. diff --git a/modules/third_party/.clang-format b/modules/third_party/.clang-format new file mode 100644 index 0000000000..e3845288a2 --- /dev/null +++ b/modules/third_party/.clang-format @@ -0,0 +1 @@ +DisableFormat: true diff --git a/modules/third_party/fft/README.chromium b/modules/third_party/fft/README.chromium index 0c79ef8a6a..214600fb7b 100644 --- a/modules/third_party/fft/README.chromium +++ b/modules/third_party/fft/README.chromium @@ -1,7 +1,7 @@ Name: fft Short Name: fft -URL: -Version: 0 +URL: This is the canonical repository. +Version: N/A Date: 2018-07-26 License: Custom license License File: LICENSE @@ -11,3 +11,11 @@ Shipped: yes Description: Multivariate complex Fourier transform, computed in place using mixed-radix Fast Fourier Transform algorithm. + +This contribution has no upstream repo and is not pulled. +It is maintained solely in the webrtc repo, and is solely +used by the audio_processing/vad function. + +Bug for removal: https://issues.webrtc.org/42234774 + + diff --git a/modules/third_party/g711/README.chromium b/modules/third_party/g711/README.chromium index 675572d720..09e781fdc6 100644 --- a/modules/third_party/g711/README.chromium +++ b/modules/third_party/g711/README.chromium @@ -1,7 +1,7 @@ Name: In line A-law and u-law conversion routines Short Name: g711 -URL: -Version: 0 +URL: https://www.soft-switch.org/downloads/spandsp/ +Version: N/A Date: 2018-06-25 License: Custom license License File: LICENSE diff --git a/modules/third_party/g722/README.chromium b/modules/third_party/g722/README.chromium index ba2234f4f1..869d17c77f 100644 --- a/modules/third_party/g722/README.chromium +++ b/modules/third_party/g722/README.chromium @@ -1,7 +1,7 @@ Name: The ITU G.722 codec, encode and decode part. Short Name: g722 -URL: -Version: 0 +URL: https://www.soft-switch.org/downloads/spandsp/ +Version: N/A Date: 2018-06-25 License: Custom license License File: LICENSE diff --git a/modules/third_party/portaudio/README.chromium b/modules/third_party/portaudio/README.chromium index 3f7beef655..d912d912e2 100644 --- a/modules/third_party/portaudio/README.chromium +++ b/modules/third_party/portaudio/README.chromium @@ -3,7 +3,7 @@ Short Name: portaudio URL: https://github.com/PortAudio/portaudio/tree/master/src/common Version: 9d8563100d841300f1689b186d131347ad43a0f6 Date: 2022-04-12 -License: Custom license +License: MIT License File: LICENSE Security Critical: yes Shipped: yes diff --git a/modules/utility/source/helpers_android.cc b/modules/utility/source/helpers_android.cc index 9cfee8a2af..74e6ed914e 100644 --- a/modules/utility/source/helpers_android.cc +++ b/modules/utility/source/helpers_android.cc @@ -94,7 +94,7 @@ AttachThreadScoped::AttachThreadScoped(JavaVM* jvm) // Adding debug log here so we can track down potential leaks and figure // out why we sometimes see "Native thread exiting without having called // DetachCurrentThread" in logcat outputs. - ALOGD("Attaching thread to JVM[tid=%d]", rtc::CurrentThreadId()); + ALOGD("Attaching thread to JVM[tid=%d]", CurrentThreadId()); jint res = jvm->AttachCurrentThread(&env_, NULL); attached_ = (res == JNI_OK); RTC_CHECK(attached_) << "AttachCurrentThread failed: " << res; @@ -103,7 +103,7 @@ AttachThreadScoped::AttachThreadScoped(JavaVM* jvm) AttachThreadScoped::~AttachThreadScoped() { if (attached_) { - ALOGD("Detaching thread from JVM[tid=%d]", rtc::CurrentThreadId()); + ALOGD("Detaching thread from JVM[tid=%d]", CurrentThreadId()); jint res = jvm_->DetachCurrentThread(); RTC_CHECK(res == JNI_OK) << "DetachCurrentThread failed: " << res; RTC_CHECK(!GetEnv(jvm_)); diff --git a/modules/utility/source/jvm_android.cc b/modules/utility/source/jvm_android.cc index e0c66d5fe1..017920ea22 100644 --- a/modules/utility/source/jvm_android.cc +++ b/modules/utility/source/jvm_android.cc @@ -26,8 +26,7 @@ JVM* g_jvm; struct { const char* name; jclass clazz; -} loaded_classes[] = { -}; +} loaded_classes[] = {}; // Android's FindClass() is trickier than usual because the app-specific // ClassLoader is not consulted when there is no app-specific frame on the diff --git a/modules/video_capture/BUILD.gn b/modules/video_capture/BUILD.gn index 730ec9bfdd..22f5ff2acc 100644 --- a/modules/video_capture/BUILD.gn +++ b/modules/video_capture/BUILD.gn @@ -33,7 +33,7 @@ rtc_library("video_capture_module") { "../../api/video:video_frame", "../../api/video:video_rtp_headers", "../../common_video", - "../../media:rtc_media_base", + "../../rtc_base:checks", "../../rtc_base:event_tracer", "../../rtc_base:logging", "../../rtc_base:macromagic", @@ -44,9 +44,10 @@ rtc_library("video_capture_module") { "../../rtc_base/synchronization:mutex", "../../rtc_base/system:rtc_export", "../../system_wrappers", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", "//third_party/libyuv", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } if (!build_with_chromium || is_linux || is_chromeos) { @@ -56,6 +57,7 @@ if (!build_with_chromium || is_linux || is_chromeos) { ":video_capture_module", "../../api:scoped_refptr", "../../api:sequence_checker", + "../../media:video_common", "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:macromagic", @@ -80,7 +82,10 @@ if (!build_with_chromium || is_linux || is_chromeos) { "linux/video_capture_v4l2.cc", "linux/video_capture_v4l2.h", ] - deps += [ "../../media:rtc_media_base" ] + deps += [ + "../../media:rtc_media_base", + "../../rtc_base:sanitizer", + ] if (rtc_use_pipewire) { sources += [ @@ -174,10 +179,14 @@ if (!build_with_chromium || is_linux || is_chromeos) { deps = [ ":video_capture_internal_impl", ":video_capture_module", + "../../api:rtc_error_matchers", "../../api:scoped_refptr", + "../../api/units:time_delta", "../../api/video:video_frame", "../../api/video:video_rtp_headers", "../../common_video", + "../../rtc_base:checks", + "../../rtc_base:gunit_helpers", "../../rtc_base:timeutils", "../../rtc_base/synchronization:mutex", "../../system_wrappers", @@ -185,6 +194,7 @@ if (!build_with_chromium || is_linux || is_chromeos) { "../../test:test_main", "../../test:test_support", "../../test:video_test_common", + "../../test:wait_until", "//testing/gtest", "//third_party/abseil-cpp/absl/memory", ] diff --git a/modules/video_capture/device_info_impl.cc b/modules/video_capture/device_info_impl.cc index ff32a78580..dc49d8abb9 100644 --- a/modules/video_capture/device_info_impl.cc +++ b/modules/video_capture/device_info_impl.cc @@ -12,9 +12,16 @@ #include +#include + #include "absl/strings/match.h" #include "absl/strings/string_view.h" +#include "api/video/video_rotation.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "modules/video_capture/video_capture_defines.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" #ifndef abs #define abs(a) (a >= 0 ? a : -a) @@ -158,6 +165,7 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability( if (capability.height == requested.height && capability.width == requested.width && capability.maxFPS >= requested.maxFPS) { + bestVideoType = capability.videoType; bestformatIndex = tmp; } } else // Better frame rate @@ -202,7 +210,7 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability( } // Default implementation. This should be overridden by Mobile implementations. -int32_t DeviceInfoImpl::GetOrientation(const char* deviceUniqueIdUTF8, +int32_t DeviceInfoImpl::GetOrientation(const char* /* deviceUniqueIdUTF8 */, VideoRotation& orientation) { orientation = kVideoRotation_0; return -1; diff --git a/modules/video_capture/linux/camera_portal.cc b/modules/video_capture/linux/camera_portal.cc index 85b9f20228..106ca1682c 100644 --- a/modules/video_capture/linux/camera_portal.cc +++ b/modules/video_capture/linux/camera_portal.cc @@ -15,6 +15,7 @@ #include "modules/portal/pipewire_utils.h" #include "modules/portal/xdg_desktop_portal_utils.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -54,7 +55,9 @@ class CameraPortalPrivate { GAsyncResult* result, gpointer user_data); - CameraPortal::PortalNotifier* notifier_ = nullptr; + webrtc::Mutex notifier_lock_; + CameraPortal::PortalNotifier* notifier_ RTC_GUARDED_BY(¬ifier_lock_) = + nullptr; GDBusConnection* connection_ = nullptr; GDBusProxy* proxy_ = nullptr; @@ -66,6 +69,11 @@ CameraPortalPrivate::CameraPortalPrivate(CameraPortal::PortalNotifier* notifier) : notifier_(notifier) {} CameraPortalPrivate::~CameraPortalPrivate() { + { + webrtc::MutexLock lock(¬ifier_lock_); + notifier_ = nullptr; + } + if (access_request_signal_id_) { g_dbus_connection_signal_unsubscribe(connection_, access_request_signal_id_); @@ -229,7 +237,11 @@ void CameraPortalPrivate::OnOpenResponse(GDBusProxy* proxy, } void CameraPortalPrivate::OnPortalDone(RequestResponse result, int fd) { - notifier_->OnCameraRequestResult(result, fd); + webrtc::MutexLock lock(¬ifier_lock_); + if (notifier_) { + notifier_->OnCameraRequestResult(result, fd); + notifier_ = nullptr; + } } CameraPortal::CameraPortal(PortalNotifier* notifier) diff --git a/modules/video_capture/linux/device_info_pipewire.cc b/modules/video_capture/linux/device_info_pipewire.cc index 1dee78f5ee..0ec0da3933 100644 --- a/modules/video_capture/linux/device_info_pipewire.cc +++ b/modules/video_capture/linux/device_info_pipewire.cc @@ -20,10 +20,10 @@ #include +#include "modules/video_capture/linux/pipewire_session.h" #include "modules/video_capture/video_capture.h" #include "modules/video_capture/video_capture_defines.h" #include "modules/video_capture/video_capture_impl.h" -#include "modules/video_capture/video_capture_options.h" #include "rtc_base/logging.h" namespace webrtc { @@ -38,6 +38,8 @@ int32_t DeviceInfoPipeWire::Init() { DeviceInfoPipeWire::~DeviceInfoPipeWire() = default; uint32_t DeviceInfoPipeWire::NumberOfDevices() { + RTC_CHECK(pipewire_session_); + return pipewire_session_->nodes().size(); } @@ -48,34 +50,36 @@ int32_t DeviceInfoPipeWire::GetDeviceName(uint32_t deviceNumber, uint32_t deviceUniqueIdUTF8Length, char* productUniqueIdUTF8, uint32_t productUniqueIdUTF8Length) { + RTC_CHECK(pipewire_session_); + if (deviceNumber >= NumberOfDevices()) return -1; - const PipeWireNode& node = pipewire_session_->nodes().at(deviceNumber); + const auto& node = pipewire_session_->nodes().at(deviceNumber); - if (deviceNameLength <= node.display_name().length()) { + if (deviceNameLength <= node->display_name().length()) { RTC_LOG(LS_INFO) << "deviceNameUTF8 buffer passed is too small"; return -1; } - if (deviceUniqueIdUTF8Length <= node.unique_id().length()) { + if (deviceUniqueIdUTF8Length <= node->unique_id().length()) { RTC_LOG(LS_INFO) << "deviceUniqueIdUTF8 buffer passed is too small"; return -1; } if (productUniqueIdUTF8 && - productUniqueIdUTF8Length <= node.model_id().length()) { + productUniqueIdUTF8Length <= node->model_id().length()) { RTC_LOG(LS_INFO) << "productUniqueIdUTF8 buffer passed is too small"; return -1; } memset(deviceNameUTF8, 0, deviceNameLength); - node.display_name().copy(deviceNameUTF8, deviceNameLength); + node->display_name().copy(deviceNameUTF8, deviceNameLength); memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length); - node.unique_id().copy(deviceUniqueIdUTF8, deviceUniqueIdUTF8Length); + node->unique_id().copy(deviceUniqueIdUTF8, deviceUniqueIdUTF8Length); if (productUniqueIdUTF8) { memset(productUniqueIdUTF8, 0, productUniqueIdUTF8Length); - node.model_id().copy(productUniqueIdUTF8, productUniqueIdUTF8Length); + node->model_id().copy(productUniqueIdUTF8, productUniqueIdUTF8Length); } return 0; @@ -83,12 +87,14 @@ int32_t DeviceInfoPipeWire::GetDeviceName(uint32_t deviceNumber, int32_t DeviceInfoPipeWire::CreateCapabilityMap( const char* deviceUniqueIdUTF8) { + RTC_CHECK(pipewire_session_); + for (auto& node : pipewire_session_->nodes()) { - if (node.unique_id().compare(deviceUniqueIdUTF8) != 0) + if (node->unique_id().compare(deviceUniqueIdUTF8) != 0) continue; - _captureCapabilities = node.capabilities(); - _lastUsedDeviceNameLength = node.display_name().length(); + _captureCapabilities = node->capabilities(); + _lastUsedDeviceNameLength = node->unique_id().length(); _lastUsedDeviceName = static_cast( realloc(_lastUsedDeviceName, _lastUsedDeviceNameLength + 1)); memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, diff --git a/modules/video_capture/linux/device_info_pipewire.h b/modules/video_capture/linux/device_info_pipewire.h index a006c85d1b..0187e47e5f 100644 --- a/modules/video_capture/linux/device_info_pipewire.h +++ b/modules/video_capture/linux/device_info_pipewire.h @@ -14,7 +14,7 @@ #include #include "modules/video_capture/device_info_impl.h" -#include "modules/video_capture/linux/pipewire_session.h" +#include "modules/video_capture/video_capture_options.h" namespace webrtc { namespace videocapturemodule { @@ -44,7 +44,7 @@ class DeviceInfoPipeWire : public DeviceInfoImpl { int32_t Init() override; private: - rtc::scoped_refptr pipewire_session_; + webrtc::scoped_refptr pipewire_session_; }; } // namespace videocapturemodule } // namespace webrtc diff --git a/modules/video_capture/linux/pipewire_session.cc b/modules/video_capture/linux/pipewire_session.cc index 3f52b3dd61..990bfde912 100644 --- a/modules/video_capture/linux/pipewire_session.cc +++ b/modules/video_capture/linux/pipewire_session.cc @@ -16,9 +16,12 @@ #include #include +#include + #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/video_capture/device_info_impl.h" #include "rtc_base/logging.h" +#include "rtc_base/sanitizer.h" #include "rtc_base/string_encode.h" #include "rtc_base/string_to_number.h" @@ -35,20 +38,44 @@ VideoType PipeWireRawFormatToVideoType(uint32_t id) { return VideoType::kYUY2; case SPA_VIDEO_FORMAT_UYVY: return VideoType::kUYVY; + case SPA_VIDEO_FORMAT_RGB16: + return VideoType::kRGB565; case SPA_VIDEO_FORMAT_RGB: + return VideoType::kBGR24; + case SPA_VIDEO_FORMAT_BGR: return VideoType::kRGB24; + case SPA_VIDEO_FORMAT_BGRA: + return VideoType::kARGB; + case SPA_VIDEO_FORMAT_RGBA: + return VideoType::kABGR; + case SPA_VIDEO_FORMAT_ARGB: + return VideoType::kBGRA; default: return VideoType::kUnknown; } } +void PipeWireNode::PipeWireNodeDeleter::operator()( + PipeWireNode* node) const noexcept { + spa_hook_remove(&node->node_listener_); + pw_proxy_destroy(node->proxy_); +} + +// static +PipeWireNode::PipeWireNodePtr PipeWireNode::Create(PipeWireSession* session, + uint32_t id, + const spa_dict* props) { + return PipeWireNodePtr(new PipeWireNode(session, id, props)); +} + +RTC_NO_SANITIZE("cfi-icall") PipeWireNode::PipeWireNode(PipeWireSession* session, uint32_t id, const spa_dict* props) : session_(session), id_(id), display_name_(spa_dict_lookup(props, PW_KEY_NODE_DESCRIPTION)), - unique_id_(rtc::ToString(id)) { + unique_id_(spa_dict_lookup(props, PW_KEY_NODE_NAME)) { RTC_LOG(LS_VERBOSE) << "Found Camera: " << display_name_; proxy_ = static_cast(pw_registry_bind( @@ -60,28 +87,24 @@ PipeWireNode::PipeWireNode(PipeWireSession* session, .param = OnNodeParam, }; - pw_node_add_listener(proxy_, &node_listener_, &node_events, this); -} - -PipeWireNode::~PipeWireNode() { - pw_proxy_destroy(proxy_); - spa_hook_remove(&node_listener_); + pw_node_add_listener(reinterpret_cast(proxy_), &node_listener_, &node_events, this); } // static +RTC_NO_SANITIZE("cfi-icall") void PipeWireNode::OnNodeInfo(void* data, const pw_node_info* info) { PipeWireNode* that = static_cast(data); if (info->change_mask & PW_NODE_CHANGE_MASK_PROPS) { const char* vid_str; const char* pid_str; - absl::optional vid; - absl::optional pid; + std::optional vid; + std::optional pid; vid_str = spa_dict_lookup(info->props, SPA_KEY_DEVICE_VENDOR_ID); pid_str = spa_dict_lookup(info->props, SPA_KEY_DEVICE_PRODUCT_ID); - vid = vid_str ? rtc::StringToNumber(vid_str) : absl::nullopt; - pid = pid_str ? rtc::StringToNumber(pid_str) : absl::nullopt; + vid = vid_str ? webrtc::StringToNumber(vid_str) : std::nullopt; + pid = pid_str ? webrtc::StringToNumber(pid_str) : std::nullopt; if (vid && pid) { char model_str[10]; @@ -89,12 +112,14 @@ void PipeWireNode::OnNodeInfo(void* data, const pw_node_info* info) { pid.value()); that->model_id_ = model_str; } - } else if (info->change_mask & PW_NODE_CHANGE_MASK_PARAMS) { + } + + if (info->change_mask & PW_NODE_CHANGE_MASK_PARAMS) { for (uint32_t i = 0; i < info->n_params; i++) { uint32_t id = info->params[i].id; if (id == SPA_PARAM_EnumFormat && info->params[i].flags & SPA_PARAM_INFO_READ) { - pw_node_enum_params(that->proxy_, 0, id, 0, UINT32_MAX, nullptr); + pw_node_enum_params(reinterpret_cast(that->proxy_), 0, id, 0, UINT32_MAX, nullptr); break; } } @@ -103,6 +128,7 @@ void PipeWireNode::OnNodeInfo(void* data, const pw_node_info* info) { } // static +RTC_NO_SANITIZE("cfi-icall") void PipeWireNode::OnNodeParam(void* data, int seq, uint32_t id, @@ -127,9 +153,15 @@ void PipeWireNode::OnNodeParam(void* data, fract = static_cast(SPA_POD_BODY(val)); - if (choice == SPA_CHOICE_None) + if (choice == SPA_CHOICE_None) { cap.maxFPS = 1.0 * fract[0].num / fract[0].denom; - else if (choice == SPA_CHOICE_Range && fract[1].num > 0) + } else if (choice == SPA_CHOICE_Enum) { + for (uint32_t i = 1; i < n_items; i++) { + cap.maxFPS = std::max( + static_cast(1.0 * fract[i].num / fract[i].denom), + cap.maxFPS); + } + } else if (choice == SPA_CHOICE_Range && fract[1].num > 0) cap.maxFPS = 1.0 * fract[1].num / fract[1].denom; } } @@ -254,6 +286,7 @@ void PipeWireSession::InitPipeWire(int fd) { Finish(VideoCaptureOptions::Status::ERROR); } +RTC_NO_SANITIZE("cfi-icall") bool PipeWireSession::StartPipeWire(int fd) { pw_init(/*argc=*/nullptr, /*argv=*/nullptr); @@ -320,6 +353,7 @@ void PipeWireSession::StopPipeWire() { } } +RTC_NO_SANITIZE("cfi-icall") void PipeWireSession::PipeWireSync() { sync_seq_ = pw_core_sync(pw_core_, PW_ID_CORE, sync_seq_); } @@ -340,12 +374,21 @@ void PipeWireSession::OnCoreDone(void* data, uint32_t id, int seq) { if (id == PW_ID_CORE) { if (seq == that->sync_seq_) { RTC_LOG(LS_VERBOSE) << "Enumerating PipeWire camera devices complete."; + + // Remove camera devices with no capabilities + auto it = std::remove_if(that->nodes_.begin(), that->nodes_.end(), + [](const PipeWireNode::PipeWireNodePtr& node) { + return node->capabilities().empty(); + }); + that->nodes_.erase(it, that->nodes_.end()); + that->Finish(VideoCaptureOptions::Status::SUCCESS); } } } // static +RTC_NO_SANITIZE("cfi-icall") void PipeWireSession::OnRegistryGlobal(void* data, uint32_t id, uint32_t permissions, @@ -354,13 +397,24 @@ void PipeWireSession::OnRegistryGlobal(void* data, const spa_dict* props) { PipeWireSession* that = static_cast(data); + // Skip already added nodes to avoid duplicate camera entries + if (std::find_if(that->nodes_.begin(), that->nodes_.end(), + [id](const PipeWireNode::PipeWireNodePtr& node) { + return node->id() == id; + }) != that->nodes_.end()) + return; + if (type != absl::string_view(PW_TYPE_INTERFACE_Node)) return; if (!spa_dict_lookup(props, PW_KEY_NODE_DESCRIPTION)) return; - that->nodes_.emplace_back(that, id, props); + auto node_role = spa_dict_lookup(props, PW_KEY_MEDIA_ROLE); + if (!node_role || strcmp(node_role, "Camera")) + return; + + that->nodes_.push_back(PipeWireNode::Create(that, id, props)); that->PipeWireSync(); } @@ -368,15 +422,16 @@ void PipeWireSession::OnRegistryGlobal(void* data, void PipeWireSession::OnRegistryGlobalRemove(void* data, uint32_t id) { PipeWireSession* that = static_cast(data); - for (auto it = that->nodes_.begin(); it != that->nodes().end(); ++it) { - if ((*it).id() == id) { - that->nodes_.erase(it); - break; - } - } + auto it = std::remove_if(that->nodes_.begin(), that->nodes_.end(), + [id](const PipeWireNode::PipeWireNodePtr& node) { + return node->id() == id; + }); + that->nodes_.erase(it, that->nodes_.end()); } void PipeWireSession::Finish(VideoCaptureOptions::Status status) { + status_ = status; + webrtc::MutexLock lock(&callback_lock_); if (callback_) { diff --git a/modules/video_capture/linux/pipewire_session.h b/modules/video_capture/linux/pipewire_session.h index fdc06a6b2a..aec268e008 100644 --- a/modules/video_capture/linux/pipewire_session.h +++ b/modules/video_capture/linux/pipewire_session.h @@ -11,7 +11,6 @@ #ifndef MODULES_VIDEO_CAPTURE_LINUX_PIPEWIRE_SESSION_H_ #define MODULES_VIDEO_CAPTURE_LINUX_PIPEWIRE_SESSION_H_ -#include #include #include @@ -37,8 +36,15 @@ class VideoCaptureModulePipeWire; // So they all represent one camera that is available via PipeWire. class PipeWireNode { public: - PipeWireNode(PipeWireSession* session, uint32_t id, const spa_dict* props); - ~PipeWireNode(); + struct PipeWireNodeDeleter { + void operator()(PipeWireNode* node) const noexcept; + }; + + using PipeWireNodePtr = + std::unique_ptr; + static PipeWireNodePtr Create(PipeWireSession* session, + uint32_t id, + const spa_dict* props); uint32_t id() const { return id_; } std::string display_name() const { return display_name_; } @@ -48,6 +54,9 @@ class PipeWireNode { return capabilities_; } + protected: + PipeWireNode(PipeWireSession* session, uint32_t id, const spa_dict* props); + private: static void OnNodeInfo(void* data, const pw_node_info* info); static void OnNodeParam(void* data, @@ -80,15 +89,16 @@ class CameraPortalNotifier : public CameraPortal::PortalNotifier { PipeWireSession* session_; }; -class PipeWireSession : public rtc::RefCountedNonVirtual { +class PipeWireSession : public webrtc::RefCountedNonVirtual { public: PipeWireSession(); ~PipeWireSession(); void Init(VideoCaptureOptions::Callback* callback, int fd = kInvalidPipeWireFd); - - const std::deque& nodes() const { return nodes_; } + const std::deque& nodes() const { + return nodes_; + } friend class CameraPortalNotifier; friend class PipeWireNode; @@ -134,7 +144,7 @@ class PipeWireSession : public rtc::RefCountedNonVirtual { int sync_seq_ = 0; - std::deque nodes_; + std::deque nodes_; std::unique_ptr portal_; std::unique_ptr portal_notifier_; }; diff --git a/modules/video_capture/linux/video_capture_linux.cc b/modules/video_capture/linux/video_capture_linux.cc index a2ea218a8b..0b57cfe90c 100644 --- a/modules/video_capture/linux/video_capture_linux.cc +++ b/modules/video_capture/linux/video_capture_linux.cc @@ -34,9 +34,9 @@ namespace webrtc { namespace videocapturemodule { -rtc::scoped_refptr VideoCaptureImpl::Create( +scoped_refptr VideoCaptureImpl::Create( const char* deviceUniqueId) { - auto implementation = rtc::make_ref_counted(); + auto implementation = make_ref_counted(); if (implementation->Init(deviceUniqueId) != 0) return nullptr; @@ -44,20 +44,20 @@ rtc::scoped_refptr VideoCaptureImpl::Create( return implementation; } -rtc::scoped_refptr VideoCaptureImpl::Create( +scoped_refptr VideoCaptureImpl::Create( VideoCaptureOptions* options, const char* deviceUniqueId) { #if defined(WEBRTC_USE_PIPEWIRE) if (options->allow_pipewire()) { auto implementation = - rtc::make_ref_counted(options); + webrtc::make_ref_counted(options); if (implementation->Init(deviceUniqueId) == 0) return implementation; } #endif if (options->allow_v4l2()) { - auto implementation = rtc::make_ref_counted(); + auto implementation = make_ref_counted(); if (implementation->Init(deviceUniqueId) == 0) return implementation; diff --git a/modules/video_capture/linux/video_capture_pipewire.cc b/modules/video_capture/linux/video_capture_pipewire.cc index 46cfe9bc15..f6cd57ac36 100644 --- a/modules/video_capture/linux/video_capture_pipewire.cc +++ b/modules/video_capture/linux/video_capture_pipewire.cc @@ -20,6 +20,7 @@ #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/portal/pipewire_utils.h" #include "rtc_base/logging.h" +#include "rtc_base/sanitizer.h" #include "rtc_base/string_to_number.h" namespace webrtc { @@ -33,7 +34,16 @@ struct { {SPA_VIDEO_FORMAT_NV12, VideoType::kNV12}, {SPA_VIDEO_FORMAT_YUY2, VideoType::kYUY2}, {SPA_VIDEO_FORMAT_UYVY, VideoType::kUYVY}, - {SPA_VIDEO_FORMAT_RGB, VideoType::kRGB24}, + // PipeWire is big-endian for the formats, while libyuv is little-endian + // This means that BGRA == ARGB, RGBA == ABGR and similar + // This follows mapping in libcamera PipeWire plugin: + // https://gitlab.freedesktop.org/pipewire/pipewire/-/blob/master/spa/plugins/libcamera/libcamera-utils.cpp + {SPA_VIDEO_FORMAT_BGRA, VideoType::kARGB}, + {SPA_VIDEO_FORMAT_RGBA, VideoType::kABGR}, + {SPA_VIDEO_FORMAT_ARGB, VideoType::kBGRA}, + {SPA_VIDEO_FORMAT_RGB, VideoType::kBGR24}, + {SPA_VIDEO_FORMAT_BGR, VideoType::kRGB24}, + {SPA_VIDEO_FORMAT_RGB16, VideoType::kRGB565}, }; VideoType VideoCaptureModulePipeWire::PipeWireRawFormatToVideoType( @@ -42,13 +52,26 @@ VideoType VideoCaptureModulePipeWire::PipeWireRawFormatToVideoType( if (spa_and_pixel_format.spa_format == spa_format) return spa_and_pixel_format.video_type; } - RTC_LOG(LS_INFO) << "Unsupported pixel format: " << spa_format; + RTC_LOG(LS_WARNING) << "Unsupported pixel format: " << spa_format; return VideoType::kUnknown; } +uint32_t VideoCaptureModulePipeWire::VideoTypeToPipeWireRawFormat( + VideoType type) { + for (const auto& spa_and_pixel_format : kSupportedFormats) { + if (spa_and_pixel_format.video_type == type) + return spa_and_pixel_format.spa_format; + } + RTC_LOG(LS_WARNING) << "Unsupported video type: " << static_cast(type); + return SPA_VIDEO_FORMAT_UNKNOWN; +} + VideoCaptureModulePipeWire::VideoCaptureModulePipeWire( VideoCaptureOptions* options) - : VideoCaptureImpl(), session_(options->pipewire_session()) {} + : VideoCaptureImpl(), + session_(options->pipewire_session()), + initialized_(false), + started_(false) {} VideoCaptureModulePipeWire::~VideoCaptureModulePipeWire() { RTC_DCHECK_RUN_ON(&api_checker_); @@ -60,12 +83,15 @@ int32_t VideoCaptureModulePipeWire::Init(const char* deviceUniqueId) { RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); RTC_DCHECK_RUN_ON(&api_checker_); - absl::optional id; - id = rtc::StringToNumber(deviceUniqueId); - if (id == absl::nullopt) + auto node = + std::find_if(session_->nodes_.begin(), session_->nodes_.end(), + [deviceUniqueId](const PipeWireNode::PipeWireNodePtr& node) { + return node->unique_id() == deviceUniqueId; + }); + if (node == session_->nodes_.end()) return -1; - node_id_ = id.value(); + node_id_ = (*node)->id(); const int len = strlen(deviceUniqueId); _deviceUniqueId = new (std::nothrow) char[len + 1]; @@ -75,57 +101,81 @@ int32_t VideoCaptureModulePipeWire::Init(const char* deviceUniqueId) { } static spa_pod* BuildFormat(spa_pod_builder* builder, - uint32_t format, + VideoType video_type, uint32_t width, uint32_t height, float frame_rate) { - spa_pod_frame frames[2]; + spa_pod_frame frame; - spa_pod_builder_push_object(builder, &frames[0], SPA_TYPE_OBJECT_Format, + const uint32_t media_subtype = video_type == VideoType::kMJPEG + ? SPA_MEDIA_SUBTYPE_mjpg + : SPA_MEDIA_SUBTYPE_raw; + + spa_pod_builder_push_object(builder, &frame, SPA_TYPE_OBJECT_Format, SPA_PARAM_EnumFormat); spa_pod_builder_add(builder, SPA_FORMAT_mediaType, SPA_POD_Id(SPA_MEDIA_TYPE_video), SPA_FORMAT_mediaSubtype, - SPA_POD_Id(format), 0); - - if (format == SPA_MEDIA_SUBTYPE_raw) { - spa_pod_builder_prop(builder, SPA_FORMAT_VIDEO_format, 0); - spa_pod_builder_push_choice(builder, &frames[1], SPA_CHOICE_Enum, 0); - spa_pod_builder_id(builder, kSupportedFormats[0].spa_format); - for (const auto& spa_and_pixel_format : kSupportedFormats) - spa_pod_builder_id(builder, spa_and_pixel_format.spa_format); - spa_pod_builder_pop(builder, &frames[1]); - } + SPA_POD_Id(media_subtype), 0); - spa_rectangle preferred_size = spa_rectangle{width, height}; - spa_rectangle min_size = spa_rectangle{1, 1}; - spa_rectangle max_size = spa_rectangle{4096, 4096}; - spa_pod_builder_add( - builder, SPA_FORMAT_VIDEO_size, - SPA_POD_CHOICE_RANGE_Rectangle(&preferred_size, &min_size, &max_size), 0); + if (media_subtype == SPA_MEDIA_SUBTYPE_raw) { + const uint32_t format = + VideoCaptureModulePipeWire::VideoTypeToPipeWireRawFormat(video_type); + RTC_CHECK(format != SPA_VIDEO_FORMAT_UNKNOWN); + spa_pod_builder_add(builder, SPA_FORMAT_VIDEO_format, SPA_POD_Id(format), + 0); + } - spa_fraction preferred_frame_rate = - spa_fraction{static_cast(frame_rate), 1}; - spa_fraction min_frame_rate = spa_fraction{0, 1}; - spa_fraction max_frame_rate = spa_fraction{INT32_MAX, 1}; - spa_pod_builder_add( - builder, SPA_FORMAT_VIDEO_framerate, - SPA_POD_CHOICE_RANGE_Fraction(&preferred_frame_rate, &min_frame_rate, - &max_frame_rate), - 0); + spa_rectangle resolution = spa_rectangle{width, height}; + spa_pod_builder_add(builder, SPA_FORMAT_VIDEO_size, + SPA_POD_Rectangle(&resolution), 0); + + // Framerate can be also set to 0 to be unspecified + if (frame_rate) { + spa_fraction framerate = spa_fraction{static_cast(frame_rate), 1}; + spa_pod_builder_add(builder, SPA_FORMAT_VIDEO_framerate, + SPA_POD_Fraction(&framerate), 0); + } else { + // Default to some reasonable values + spa_fraction preferred_frame_rate = + spa_fraction{static_cast(30), 1}; + spa_fraction min_frame_rate = spa_fraction{1, 1}; + spa_fraction max_frame_rate = spa_fraction{30, 1}; + spa_pod_builder_add( + builder, SPA_FORMAT_VIDEO_framerate, + SPA_POD_CHOICE_RANGE_Fraction(&preferred_frame_rate, &min_frame_rate, + &max_frame_rate), + 0); + } - return static_cast(spa_pod_builder_pop(builder, &frames[0])); + return static_cast(spa_pod_builder_pop(builder, &frame)); } +RTC_NO_SANITIZE("cfi-icall") int32_t VideoCaptureModulePipeWire::StartCapture( const VideoCaptureCapability& capability) { - RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); RTC_DCHECK_RUN_ON(&api_checker_); + if (initialized_) { + if (capability == _requestedCapability) { + return 0; + } else { + StopCapture(); + } + } + uint8_t buffer[1024] = {}; + // We don't want members above to be guarded by capture_checker_ as + // it's meant to be for members that are accessed on the API thread + // only when we are not capturing. The code above can be called many + // times while sharing instance of VideoCapturePipeWire between + // websites and therefore it would not follow the requirements of this + // checker. + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + PipeWireThreadLoopLock thread_loop_lock(session_->pw_main_loop_); + RTC_LOG(LS_VERBOSE) << "Creating new PipeWire stream for node " << node_id_; - PipeWireThreadLoopLock thread_loop_lock(session_->pw_main_loop_); pw_properties* reuse_props = pw_properties_new_string("pipewire.client.reuse=1"); stream_ = pw_stream_new(session_->pw_core_, "camera-stream", reuse_props); @@ -149,19 +199,15 @@ int32_t VideoCaptureModulePipeWire::StartCapture( uint32_t width = capability.width; uint32_t height = capability.height; uint32_t frame_rate = capability.maxFPS; - bool prefer_jpeg = (width > 640) || (height > 480); + VideoType video_type = capability.videoType; params.push_back( - BuildFormat(&builder, SPA_MEDIA_SUBTYPE_raw, width, height, frame_rate)); - params.insert( - prefer_jpeg ? params.begin() : params.end(), - BuildFormat(&builder, SPA_MEDIA_SUBTYPE_mjpg, width, height, frame_rate)); + BuildFormat(&builder, video_type, width, height, frame_rate)); int res = pw_stream_connect( stream_, PW_DIRECTION_INPUT, node_id_, static_cast(PW_STREAM_FLAG_AUTOCONNECT | - PW_STREAM_FLAG_DONT_RECONNECT | - PW_STREAM_FLAG_MAP_BUFFERS), + PW_STREAM_FLAG_DONT_RECONNECT), params.data(), params.size()); if (res != 0) { RTC_LOG(LS_ERROR) << "Could not connect to camera stream: " @@ -170,14 +216,19 @@ int32_t VideoCaptureModulePipeWire::StartCapture( } _requestedCapability = capability; + initialized_ = true; + return 0; } int32_t VideoCaptureModulePipeWire::StopCapture() { - RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); RTC_DCHECK_RUN_ON(&api_checker_); PipeWireThreadLoopLock thread_loop_lock(session_->pw_main_loop_); + // PipeWireSession is guarded by API checker so just make sure we do + // race detection when the PipeWire loop is locked/stopped to not run + // any callback at this point. + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); if (stream_) { pw_stream_destroy(stream_); stream_ = nullptr; @@ -216,6 +267,7 @@ void VideoCaptureModulePipeWire::OnStreamParamChanged( that->OnFormatChanged(format); } +RTC_NO_SANITIZE("cfi-icall") void VideoCaptureModulePipeWire::OnFormatChanged(const struct spa_pod* format) { RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); @@ -276,11 +328,18 @@ void VideoCaptureModulePipeWire::OnFormatChanged(const struct spa_pod* format) { break; case VideoType::kYUY2: case VideoType::kUYVY: + case VideoType::kRGB565: stride = configured_capability_.width * 2; break; case VideoType::kRGB24: + case VideoType::kBGR24: stride = configured_capability_.width * 3; break; + case VideoType::kARGB: + case VideoType::kABGR: + case VideoType::kBGRA: + stride = configured_capability_.width * 4; + break; default: RTC_LOG(LS_ERROR) << "Unsupported video format."; return; @@ -289,11 +348,11 @@ void VideoCaptureModulePipeWire::OnFormatChanged(const struct spa_pod* format) { 0); } + const int buffer_types = + (1 << SPA_DATA_DmaBuf) | (1 << SPA_DATA_MemFd) | (1 << SPA_DATA_MemPtr); spa_pod_builder_add( &builder, SPA_PARAM_BUFFERS_buffers, SPA_POD_CHOICE_RANGE_Int(8, 1, 32), - SPA_PARAM_BUFFERS_dataType, - SPA_POD_CHOICE_FLAGS_Int((1 << SPA_DATA_MemFd) | (1 << SPA_DATA_MemPtr)), - 0); + SPA_PARAM_BUFFERS_dataType, SPA_POD_CHOICE_FLAGS_Int(buffer_types), 0); params.push_back( static_cast(spa_pod_builder_pop(&builder, &frame))); @@ -301,6 +360,10 @@ void VideoCaptureModulePipeWire::OnFormatChanged(const struct spa_pod* format) { &builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type, SPA_POD_Id(SPA_META_Header), SPA_PARAM_META_size, SPA_POD_Int(sizeof(struct spa_meta_header))))); + params.push_back(reinterpret_cast(spa_pod_builder_add_object( + &builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type, + SPA_POD_Id(SPA_META_VideoTransform), SPA_PARAM_META_size, + SPA_POD_Int(sizeof(struct spa_meta_videotransform))))); pw_stream_update_params(stream_, params.data(), params.size()); } @@ -312,7 +375,6 @@ void VideoCaptureModulePipeWire::OnStreamStateChanged( VideoCaptureModulePipeWire* that = static_cast(data); RTC_DCHECK(that); - RTC_CHECK_RUNS_SERIALIZED(&that->capture_checker_); MutexLock lock(&that->api_lock_); switch (state) { @@ -341,21 +403,70 @@ void VideoCaptureModulePipeWire::OnStreamProcess(void* data) { that->ProcessBuffers(); } +static VideoRotation VideorotationFromPipeWireTransform(uint32_t transform) { + switch (transform) { + case SPA_META_TRANSFORMATION_90: + return kVideoRotation_90; + case SPA_META_TRANSFORMATION_180: + return kVideoRotation_180; + case SPA_META_TRANSFORMATION_270: + return kVideoRotation_270; + default: + return kVideoRotation_0; + } +} + +RTC_NO_SANITIZE("cfi-icall") void VideoCaptureModulePipeWire::ProcessBuffers() { RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); while (pw_buffer* buffer = pw_stream_dequeue_buffer(stream_)) { + spa_buffer* spaBuffer = buffer->buffer; struct spa_meta_header* h; h = static_cast( - spa_buffer_find_meta_data(buffer->buffer, SPA_META_Header, sizeof(*h))); + spa_buffer_find_meta_data(spaBuffer, SPA_META_Header, sizeof(*h))); + + struct spa_meta_videotransform* videotransform; + videotransform = + static_cast(spa_buffer_find_meta_data( + spaBuffer, SPA_META_VideoTransform, sizeof(*videotransform))); + if (videotransform) { + VideoRotation rotation = + VideorotationFromPipeWireTransform(videotransform->transform); + SetCaptureRotation(rotation); + SetApplyRotation(rotation != kVideoRotation_0); + } if (h->flags & SPA_META_HEADER_FLAG_CORRUPTED) { RTC_LOG(LS_INFO) << "Dropping corruped frame."; - } else { - IncomingFrame(static_cast(buffer->buffer->datas[0].data), - buffer->buffer->datas[0].chunk->size, - configured_capability_); + pw_stream_queue_buffer(stream_, buffer); + continue; } + + if (spaBuffer->datas[0].type == SPA_DATA_DmaBuf || + spaBuffer->datas[0].type == SPA_DATA_MemFd) { + ScopedBuf frame; + frame.initialize( + static_cast( + mmap(nullptr, spaBuffer->datas[0].maxsize, PROT_READ, MAP_SHARED, + spaBuffer->datas[0].fd, spaBuffer->datas[0].mapoffset)), + spaBuffer->datas[0].maxsize, spaBuffer->datas[0].fd, + spaBuffer->datas[0].type == SPA_DATA_DmaBuf); + + if (!frame) { + RTC_LOG(LS_ERROR) << "Failed to mmap the memory: " + << std::strerror(errno); + return; + } + + IncomingFrame( + SPA_MEMBER(frame.get(), spaBuffer->datas[0].mapoffset, uint8_t), + spaBuffer->datas[0].chunk->size, configured_capability_); + } else { // SPA_DATA_MemPtr + IncomingFrame(static_cast(spaBuffer->datas[0].data), + spaBuffer->datas[0].chunk->size, configured_capability_); + } + pw_stream_queue_buffer(stream_, buffer); } } diff --git a/modules/video_capture/linux/video_capture_pipewire.h b/modules/video_capture/linux/video_capture_pipewire.h index 316fb2449d..240e7da52b 100644 --- a/modules/video_capture/linux/video_capture_pipewire.h +++ b/modules/video_capture/linux/video_capture_pipewire.h @@ -28,6 +28,7 @@ class VideoCaptureModulePipeWire : public VideoCaptureImpl { int32_t CaptureSettings(VideoCaptureCapability& settings) override; static VideoType PipeWireRawFormatToVideoType(uint32_t format); + static uint32_t VideoTypeToPipeWireRawFormat(VideoType type); private: static void OnStreamParamChanged(void* data, @@ -43,12 +44,13 @@ class VideoCaptureModulePipeWire : public VideoCaptureImpl { void OnFormatChanged(const struct spa_pod* format); void ProcessBuffers(); - const rtc::scoped_refptr session_ - RTC_GUARDED_BY(capture_checker_); + const webrtc::scoped_refptr session_ + RTC_GUARDED_BY(api_checker_); + bool initialized_ RTC_GUARDED_BY(api_checker_); + bool started_ RTC_GUARDED_BY(api_lock_); int node_id_ RTC_GUARDED_BY(capture_checker_); VideoCaptureCapability configured_capability_ RTC_GUARDED_BY(capture_checker_); - bool started_ RTC_GUARDED_BY(api_lock_); struct pw_stream* stream_ RTC_GUARDED_BY(capture_checker_) = nullptr; struct spa_hook stream_listener_ RTC_GUARDED_BY(capture_checker_); diff --git a/modules/video_capture/linux/video_capture_v4l2.cc b/modules/video_capture/linux/video_capture_v4l2.cc index 7a70c2ff88..33aa452a07 100644 --- a/modules/video_capture/linux/video_capture_v4l2.cc +++ b/modules/video_capture/linux/video_capture_v4l2.cc @@ -112,7 +112,6 @@ VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() { int32_t VideoCaptureModuleV4L2::StartCapture( const VideoCaptureCapability& capability) { RTC_DCHECK_RUN_ON(&api_checker_); - RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); if (_captureStarted) { if (capability == _requestedCapability) { @@ -122,6 +121,13 @@ int32_t VideoCaptureModuleV4L2::StartCapture( } } + // We don't want members above to be guarded by capture_checker_ as + // it's meant to be for members that are accessed on the API thread + // only when we are not capturing. The code above can be called many + // times while sharing instance of VideoCaptureV4L2 between websites + // and therefore it would not follow the requirements of this checker. + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + // Set a baseline of configured parameters. It is updated here during // configuration, then read from the capture thread. configured_capability_ = capability; @@ -166,8 +172,7 @@ int32_t VideoCaptureModuleV4L2::StartCapture( fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; RTC_LOG(LS_INFO) << "Video Capture enumerats supported image formats:"; while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) { - RTC_LOG(LS_INFO) << " { pixelformat = " - << cricket::GetFourccName(fmt.pixelformat) + RTC_LOG(LS_INFO) << " { pixelformat = " << GetFourccName(fmt.pixelformat) << ", description = '" << fmt.description << "' }"; // Match the preferred order. for (int i = 0; i < nFormats; i++) { @@ -182,8 +187,7 @@ int32_t VideoCaptureModuleV4L2::StartCapture( RTC_LOG(LS_INFO) << "no supporting video formats found"; return -1; } else { - RTC_LOG(LS_INFO) << "We prefer format " - << cricket::GetFourccName(fmts[fmtsIdx]); + RTC_LOG(LS_INFO) << "We prefer format " << GetFourccName(fmts[fmtsIdx]); } struct v4l2_format video_fmt; @@ -283,17 +287,17 @@ int32_t VideoCaptureModuleV4L2::StartCapture( _requestedCapability = capability; _captureStarted = true; + _streaming = true; // start capture thread; if (_captureThread.empty()) { quit_ = false; - _captureThread = rtc::PlatformThread::SpawnJoinable( + _captureThread = PlatformThread::SpawnJoinable( [this] { while (CaptureProcess()) { } }, - "CaptureThread", - rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kHigh)); + "CaptureThread", ThreadAttributes().SetPriority(ThreadPriority::kHigh)); } return 0; } @@ -310,10 +314,12 @@ int32_t VideoCaptureModuleV4L2::StopCapture() { _captureThread.Finalize(); } + _captureStarted = false; + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); MutexLock lock(&capture_lock_); - if (_captureStarted) { - _captureStarted = false; + if (_streaming) { + _streaming = false; DeAllocateVideoBuffers(); close(_deviceFd); @@ -397,7 +403,7 @@ bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers() { } bool VideoCaptureModuleV4L2::CaptureStarted() { - RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); + RTC_DCHECK_RUN_ON(&api_checker_); return _captureStarted; } @@ -434,7 +440,7 @@ bool VideoCaptureModuleV4L2::CaptureProcess() { return true; } - if (_captureStarted) { + if (_streaming) { struct v4l2_buffer buf; memset(&buf, 0, sizeof(struct v4l2_buffer)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; diff --git a/modules/video_capture/linux/video_capture_v4l2.h b/modules/video_capture/linux/video_capture_v4l2.h index 0191e41876..2611192d55 100644 --- a/modules/video_capture/linux/video_capture_v4l2.h +++ b/modules/video_capture/linux/video_capture_v4l2.h @@ -41,7 +41,7 @@ class VideoCaptureModuleV4L2 : public VideoCaptureImpl { bool AllocateVideoBuffers() RTC_EXCLUSIVE_LOCKS_REQUIRED(capture_lock_); bool DeAllocateVideoBuffers() RTC_EXCLUSIVE_LOCKS_REQUIRED(capture_lock_); - rtc::PlatformThread _captureThread RTC_GUARDED_BY(api_checker_); + PlatformThread _captureThread RTC_GUARDED_BY(api_checker_); Mutex capture_lock_ RTC_ACQUIRED_BEFORE(api_lock_); bool quit_ RTC_GUARDED_BY(capture_lock_); int32_t _deviceId RTC_GUARDED_BY(api_checker_); @@ -50,7 +50,8 @@ class VideoCaptureModuleV4L2 : public VideoCaptureImpl { int32_t _buffersAllocatedByDevice RTC_GUARDED_BY(capture_lock_); VideoCaptureCapability configured_capability_ RTC_GUARDED_BY(capture_checker_); - bool _captureStarted RTC_GUARDED_BY(capture_checker_); + bool _streaming RTC_GUARDED_BY(capture_checker_); + bool _captureStarted RTC_GUARDED_BY(api_checker_); struct Buffer { void* start; size_t length; diff --git a/modules/video_capture/test/video_capture_unittest.cc b/modules/video_capture/test/video_capture_unittest.cc index c8af222b57..2019f0bf4e 100644 --- a/modules/video_capture/test/video_capture_unittest.cc +++ b/modules/video_capture/test/video_capture_unittest.cc @@ -12,45 +12,34 @@ #include +#include #include #include #include +#include +#include -#include "absl/memory/memory.h" #include "api/scoped_refptr.h" -#include "api/video/i420_buffer.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" #include "api/video/video_frame.h" -#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "api/video/video_rotation.h" +#include "api/video/video_sink_interface.h" +#include "modules/video_capture/video_capture_defines.h" #include "modules/video_capture/video_capture_factory.h" +#include "rtc_base/checks.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/sleep.h" #include "test/frame_utils.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" -using webrtc::SleepMs; +using ::testing::Ge; using webrtc::VideoCaptureCapability; using webrtc::VideoCaptureFactory; using webrtc::VideoCaptureModule; -#define WAIT_(ex, timeout, res) \ - do { \ - res = (ex); \ - int64_t start = rtc::TimeMillis(); \ - while (!res && rtc::TimeMillis() < start + timeout) { \ - SleepMs(5); \ - res = (ex); \ - } \ - } while (0) - -#define EXPECT_TRUE_WAIT(ex, timeout) \ - do { \ - bool res; \ - WAIT_(ex, timeout, res); \ - if (!res) \ - EXPECT_TRUE(ex); \ - } while (0) - static const int kTimeOut = 5000; #ifdef WEBRTC_MAC static const int kTestHeight = 288; @@ -59,7 +48,7 @@ static const int kTestFramerate = 30; #endif class TestVideoCaptureCallback - : public rtc::VideoSinkInterface { + : public webrtc::VideoSinkInterface { public: TestVideoCaptureCallback() : last_render_time_ms_(0), @@ -87,8 +76,8 @@ class TestVideoCaptureCallback EXPECT_EQ(rotate_frame_, videoFrame.rotation()); #endif // RenderTimstamp should be the time now. - EXPECT_TRUE(videoFrame.render_time_ms() >= rtc::TimeMillis() - 30 && - videoFrame.render_time_ms() <= rtc::TimeMillis()); + EXPECT_TRUE(videoFrame.render_time_ms() >= webrtc::TimeMillis() - 30 && + videoFrame.render_time_ms() <= webrtc::TimeMillis()); if ((videoFrame.render_time_ms() > last_render_time_ms_ + (1000 * 1.1) / capability_.maxFPS && @@ -141,7 +130,7 @@ class TestVideoCaptureCallback int64_t last_render_time_ms_; int incoming_frames_; int timing_warnings_; - rtc::scoped_refptr last_frame_; + webrtc::scoped_refptr last_frame_; webrtc::VideoRotation rotate_frame_; }; @@ -156,16 +145,16 @@ class VideoCaptureTest : public ::testing::Test { ASSERT_GT(number_of_devices_, 0u); } - rtc::scoped_refptr OpenVideoCaptureDevice( + webrtc::scoped_refptr OpenVideoCaptureDevice( unsigned int device, - rtc::VideoSinkInterface* callback) { + webrtc::VideoSinkInterface* callback) { char device_name[256]; char unique_name[256]; EXPECT_EQ(0, device_info_->GetDeviceName(device, device_name, 256, unique_name, 256)); - rtc::scoped_refptr module( + webrtc::scoped_refptr module( VideoCaptureFactory::Create(unique_name)); if (module.get() == NULL) return nullptr; @@ -200,9 +189,9 @@ class VideoCaptureTest : public ::testing::Test { #endif TEST_F(VideoCaptureTest, MAYBE_CreateDelete) { for (int i = 0; i < 5; ++i) { - int64_t start_time = rtc::TimeMillis(); + int64_t start_time = webrtc::TimeMillis(); TestVideoCaptureCallback capture_observer; - rtc::scoped_refptr module( + webrtc::scoped_refptr module( OpenVideoCaptureDevice(0, &capture_observer)); ASSERT_TRUE(module.get() != NULL); @@ -219,17 +208,20 @@ TEST_F(VideoCaptureTest, MAYBE_CreateDelete) { ASSERT_NO_FATAL_FAILURE(StartCapture(module.get(), capability)); // Less than 4s to start the camera. - EXPECT_LE(rtc::TimeMillis() - start_time, 4000); + EXPECT_LE(webrtc::TimeMillis() - start_time, 4000); // Make sure 5 frames are captured. - EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 5, kTimeOut); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return capture_observer.incoming_frames(); }, Ge(5), + {.timeout = webrtc::TimeDelta::Millis(kTimeOut)}), + webrtc::IsRtcOk()); - int64_t stop_time = rtc::TimeMillis(); + int64_t stop_time = webrtc::TimeMillis(); EXPECT_EQ(0, module->StopCapture()); EXPECT_FALSE(module->CaptureStarted()); // Less than 3s to stop the camera. - EXPECT_LE(rtc::TimeMillis() - stop_time, 3000); + EXPECT_LE(webrtc::TimeMillis() - stop_time, 3000); } } @@ -243,7 +235,7 @@ TEST_F(VideoCaptureTest, MAYBE_CreateDelete) { TEST_F(VideoCaptureTest, MAYBE_Capabilities) { TestVideoCaptureCallback capture_observer; - rtc::scoped_refptr module( + webrtc::scoped_refptr module( OpenVideoCaptureDevice(0, &capture_observer)); ASSERT_TRUE(module.get() != NULL); @@ -273,7 +265,10 @@ TEST_F(VideoCaptureTest, MAYBE_Capabilities) { capture_observer.SetExpectedCapability(capability); ASSERT_NO_FATAL_FAILURE(StartCapture(module.get(), capability)); // Make sure at least one frame is captured. - EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 1, kTimeOut); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return capture_observer.incoming_frames(); }, Ge(1), + {.timeout = webrtc::TimeDelta::Millis(kTimeOut)}), + webrtc::IsRtcOk()); EXPECT_EQ(0, module->StopCapture()); } @@ -304,7 +299,7 @@ TEST_F(VideoCaptureTest, DISABLED_TestTwoCameras) { } TestVideoCaptureCallback capture_observer1; - rtc::scoped_refptr module1( + webrtc::scoped_refptr module1( OpenVideoCaptureDevice(0, &capture_observer1)); ASSERT_TRUE(module1.get() != NULL); VideoCaptureCapability capability1; @@ -319,7 +314,7 @@ TEST_F(VideoCaptureTest, DISABLED_TestTwoCameras) { capture_observer1.SetExpectedCapability(capability1); TestVideoCaptureCallback capture_observer2; - rtc::scoped_refptr module2( + webrtc::scoped_refptr module2( OpenVideoCaptureDevice(1, &capture_observer2)); ASSERT_TRUE(module1.get() != NULL); @@ -336,8 +331,14 @@ TEST_F(VideoCaptureTest, DISABLED_TestTwoCameras) { ASSERT_NO_FATAL_FAILURE(StartCapture(module1.get(), capability1)); ASSERT_NO_FATAL_FAILURE(StartCapture(module2.get(), capability2)); - EXPECT_TRUE_WAIT(capture_observer1.incoming_frames() >= 5, kTimeOut); - EXPECT_TRUE_WAIT(capture_observer2.incoming_frames() >= 5, kTimeOut); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return capture_observer1.incoming_frames(); }, Ge(5), + {.timeout = webrtc::TimeDelta::Millis(kTimeOut)}), + webrtc::IsRtcOk()); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return capture_observer2.incoming_frames(); }, Ge(5), + {.timeout = webrtc::TimeDelta::Millis(kTimeOut)}), + webrtc::IsRtcOk()); EXPECT_EQ(0, module2->StopCapture()); EXPECT_EQ(0, module1->StopCapture()); } @@ -350,7 +351,7 @@ TEST_F(VideoCaptureTest, DISABLED_TestTwoCameras) { #endif TEST_F(VideoCaptureTest, MAYBE_ConcurrentAccess) { TestVideoCaptureCallback capture_observer1; - rtc::scoped_refptr module1( + webrtc::scoped_refptr module1( OpenVideoCaptureDevice(0, &capture_observer1)); ASSERT_TRUE(module1.get() != NULL); VideoCaptureCapability capability; @@ -358,19 +359,25 @@ TEST_F(VideoCaptureTest, MAYBE_ConcurrentAccess) { capture_observer1.SetExpectedCapability(capability); TestVideoCaptureCallback capture_observer2; - rtc::scoped_refptr module2( + webrtc::scoped_refptr module2( OpenVideoCaptureDevice(0, &capture_observer2)); ASSERT_TRUE(module2.get() != NULL); capture_observer2.SetExpectedCapability(capability); // Starting module1 should work. ASSERT_NO_FATAL_FAILURE(StartCapture(module1.get(), capability)); - EXPECT_TRUE_WAIT(capture_observer1.incoming_frames() >= 5, kTimeOut); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return capture_observer1.incoming_frames(); }, Ge(5), + {.timeout = webrtc::TimeDelta::Millis(kTimeOut)}), + webrtc::IsRtcOk()); // When module1 is stopped, starting module2 for the same device should work. EXPECT_EQ(0, module1->StopCapture()); ASSERT_NO_FATAL_FAILURE(StartCapture(module2.get(), capability)); - EXPECT_TRUE_WAIT(capture_observer2.incoming_frames() >= 5, kTimeOut); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return capture_observer2.incoming_frames(); }, Ge(5), + {.timeout = webrtc::TimeDelta::Millis(kTimeOut)}), + webrtc::IsRtcOk()); EXPECT_EQ(0, module2->StopCapture()); } diff --git a/modules/video_capture/video_capture.h b/modules/video_capture/video_capture.h index eddc31414a..fe5c09bd87 100644 --- a/modules/video_capture/video_capture.h +++ b/modules/video_capture/video_capture.h @@ -18,7 +18,7 @@ namespace webrtc { -class VideoCaptureModule : public rtc::RefCountInterface { +class VideoCaptureModule : public RefCountInterface { public: // Interface for receiving information about available camera devices. class DeviceInfo { @@ -74,7 +74,7 @@ class VideoCaptureModule : public rtc::RefCountInterface { // Register capture data callback virtual void RegisterCaptureDataCallback( - rtc::VideoSinkInterface* dataCallback) = 0; + VideoSinkInterface* dataCallback) = 0; virtual void RegisterCaptureDataCallback( RawVideoSinkInterface* dataCallback) = 0; diff --git a/modules/video_capture/video_capture_factory.cc b/modules/video_capture/video_capture_factory.cc index 563ef5abd2..8354067f93 100644 --- a/modules/video_capture/video_capture_factory.cc +++ b/modules/video_capture/video_capture_factory.cc @@ -14,8 +14,8 @@ namespace webrtc { -rtc::scoped_refptr VideoCaptureFactory::Create( - const char* deviceUniqueIdUTF8) { +scoped_refptr VideoCaptureFactory::Create( + [[maybe_unused]] const char* deviceUniqueIdUTF8) { #if defined(WEBRTC_ANDROID) || defined(WEBRTC_MAC) return nullptr; #else @@ -23,9 +23,9 @@ rtc::scoped_refptr VideoCaptureFactory::Create( #endif } -rtc::scoped_refptr VideoCaptureFactory::Create( - VideoCaptureOptions* options, - const char* deviceUniqueIdUTF8) { +scoped_refptr VideoCaptureFactory::Create( + [[maybe_unused]] VideoCaptureOptions* options, + [[maybe_unused]] const char* deviceUniqueIdUTF8) { // This is only implemented on pure Linux and WEBRTC_LINUX is defined for // Android as well #if !defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID) @@ -45,7 +45,7 @@ VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo() { } VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo( - VideoCaptureOptions* options) { + [[maybe_unused]] VideoCaptureOptions* options) { // This is only implemented on pure Linux and WEBRTC_LINUX is defined for // Android as well #if !defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID) diff --git a/modules/video_capture/video_capture_factory.h b/modules/video_capture/video_capture_factory.h index 62b4067c3a..c24eaf559f 100644 --- a/modules/video_capture/video_capture_factory.h +++ b/modules/video_capture/video_capture_factory.h @@ -29,9 +29,9 @@ class RTC_EXPORT VideoCaptureFactory { // id - unique identifier of this video capture module object. // deviceUniqueIdUTF8 - name of the device. // Available names can be found by using GetDeviceName - static rtc::scoped_refptr Create( + static scoped_refptr Create( const char* deviceUniqueIdUTF8); - static rtc::scoped_refptr Create( + static scoped_refptr Create( VideoCaptureOptions* options, const char* deviceUniqueIdUTF8); diff --git a/modules/video_capture/video_capture_factory_null.cc b/modules/video_capture/video_capture_factory_null.cc index 7808d19851..77b7f4b75e 100644 --- a/modules/video_capture/video_capture_factory_null.cc +++ b/modules/video_capture/video_capture_factory_null.cc @@ -18,7 +18,7 @@ VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() { return nullptr; } -rtc::scoped_refptr VideoCaptureImpl::Create( +webrtc::scoped_refptr VideoCaptureImpl::Create( const char* device_id) { return nullptr; } diff --git a/modules/video_capture/video_capture_impl.cc b/modules/video_capture/video_capture_impl.cc index a1461fb687..06f9bd3165 100644 --- a/modules/video_capture/video_capture_impl.cc +++ b/modules/video_capture/video_capture_impl.cc @@ -75,11 +75,11 @@ int32_t VideoCaptureImpl::RotationInDegrees(VideoRotation rotation, VideoCaptureImpl::VideoCaptureImpl() : _deviceUniqueId(NULL), _requestedCapability(), - _lastProcessTimeNanos(rtc::TimeNanos()), - _lastFrameRateCallbackTimeNanos(rtc::TimeNanos()), + _lastProcessTimeNanos(TimeNanos()), + _lastFrameRateCallbackTimeNanos(TimeNanos()), _dataCallBack(NULL), _rawDataCallBack(NULL), - _lastProcessFrameTimeNanos(rtc::TimeNanos()), + _lastProcessFrameTimeNanos(TimeNanos()), _rotateFrame(kVideoRotation_0), apply_rotation_(false) { _requestedCapability.width = kDefaultWidth; @@ -97,7 +97,7 @@ VideoCaptureImpl::~VideoCaptureImpl() { } void VideoCaptureImpl::RegisterCaptureDataCallback( - rtc::VideoSinkInterface* dataCallBack) { + VideoSinkInterface* dataCallBack) { MutexLock lock(&api_lock_); RTC_DCHECK(!_rawDataCallBack); _dataCallBack = dataCallBack; @@ -186,7 +186,7 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, // Setting absolute height (in case it was negative). // In Windows, the image starts bottom left, instead of top left. // Setting a negative source height, inverts the image (within LibYuv). - rtc::scoped_refptr buffer = I420Buffer::Create( + scoped_refptr buffer = I420Buffer::Create( target_width, target_height, stride_y, stride_uv, stride_uv); libyuv::RotationMode rotation_mode = libyuv::kRotate0; @@ -214,7 +214,7 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, buffer.get()->StrideV(), 0, 0, // No Cropping width, height, target_width, target_height, rotation_mode, ConvertVideoType(frameInfo.videoType)); - if (conversionResult < 0) { + if (conversionResult != 0) { RTC_LOG(LS_ERROR) << "Failed to convert capture frame from type " << static_cast(frameInfo.videoType) << "to I420."; return -1; @@ -223,8 +223,8 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, VideoFrame captureFrame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(0) - .set_timestamp_ms(rtc::TimeMillis()) + .set_rtp_timestamp(0) + .set_timestamp_ms(TimeMillis()) .set_rotation(!apply_rotation_ ? _rotateFrame : kVideoRotation_0) .build(); captureFrame.set_ntp_time_ms(captureTime); @@ -274,7 +274,7 @@ bool VideoCaptureImpl::GetApplyRotation() { void VideoCaptureImpl::UpdateFrameCount() { RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); - if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0) { + if (_incomingFrameTimesNanos[0] / kNumNanosecsPerMicrosec == 0) { // first no shift } else { // shift @@ -282,7 +282,7 @@ void VideoCaptureImpl::UpdateFrameCount() { _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i]; } } - _incomingFrameTimesNanos[0] = rtc::TimeNanos(); + _incomingFrameTimesNanos[0] = TimeNanos(); } uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) { @@ -292,8 +292,7 @@ uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) { int32_t nrOfFrames = 0; for (num = 1; num < (kFrameRateCountHistorySize - 1); ++num) { if (_incomingFrameTimesNanos[num] <= 0 || - (now_ns - _incomingFrameTimesNanos[num]) / - rtc::kNumNanosecsPerMillisec > + (now_ns - _incomingFrameTimesNanos[num]) / kNumNanosecsPerMillisec > kFrameRateHistoryWindowMs) { // don't use data older than 2sec break; } else { @@ -301,8 +300,8 @@ uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) { } } if (num > 1) { - int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) / - rtc::kNumNanosecsPerMillisec; + int64_t diff = + (now_ns - _incomingFrameTimesNanos[num - 1]) / kNumNanosecsPerMillisec; if (diff > 0) { return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f); } diff --git a/modules/video_capture/video_capture_impl.h b/modules/video_capture/video_capture_impl.h index 5ec1fd4a83..3ec832f871 100644 --- a/modules/video_capture/video_capture_impl.h +++ b/modules/video_capture/video_capture_impl.h @@ -45,9 +45,9 @@ class RTC_EXPORT VideoCaptureImpl : public VideoCaptureModule { * deviceUniqueIdUTF8 - name of the device. Available names can be found by * using GetDeviceName */ - static rtc::scoped_refptr Create( + static scoped_refptr Create( const char* deviceUniqueIdUTF8); - static rtc::scoped_refptr Create( + static scoped_refptr Create( VideoCaptureOptions* options, const char* deviceUniqueIdUTF8); @@ -61,7 +61,7 @@ class RTC_EXPORT VideoCaptureImpl : public VideoCaptureModule { // Call backs void RegisterCaptureDataCallback( - rtc::VideoSinkInterface* dataCallback) override; + VideoSinkInterface* dataCallback) override; virtual void RegisterCaptureDataCallback( RawVideoSinkInterface* dataCallback) override; void DeRegisterCaptureDataCallback() override; @@ -92,7 +92,7 @@ class RTC_EXPORT VideoCaptureImpl : public VideoCaptureModule { SequenceChecker api_checker_; // RaceChecker for members that can be accessed on the API thread while // capture is not happening, and on a callback thread otherwise. - rtc::RaceChecker capture_checker_; + RaceChecker capture_checker_; // current Device unique name; char* _deviceUniqueId RTC_GUARDED_BY(api_checker_); Mutex api_lock_; @@ -115,7 +115,7 @@ class RTC_EXPORT VideoCaptureImpl : public VideoCaptureModule { // last time the frame rate callback function was called. int64_t _lastFrameRateCallbackTimeNanos RTC_GUARDED_BY(capture_checker_); - rtc::VideoSinkInterface* _dataCallBack RTC_GUARDED_BY(api_lock_); + VideoSinkInterface* _dataCallBack RTC_GUARDED_BY(api_lock_); RawVideoSinkInterface* _rawDataCallBack RTC_GUARDED_BY(api_lock_); int64_t _lastProcessFrameTimeNanos RTC_GUARDED_BY(capture_checker_); diff --git a/modules/video_capture/video_capture_options.cc b/modules/video_capture/video_capture_options.cc index 203d0a604b..64cd6216e7 100644 --- a/modules/video_capture/video_capture_options.cc +++ b/modules/video_capture/video_capture_options.cc @@ -32,7 +32,7 @@ void VideoCaptureOptions::Init(Callback* callback) { #if defined(WEBRTC_USE_PIPEWIRE) if (allow_pipewire_) { pipewire_session_ = - rtc::make_ref_counted(); + webrtc::make_ref_counted(); pipewire_session_->Init(callback, pipewire_fd_); return; } @@ -46,7 +46,7 @@ void VideoCaptureOptions::Init(Callback* callback) { } #if defined(WEBRTC_USE_PIPEWIRE) -rtc::scoped_refptr +webrtc::scoped_refptr VideoCaptureOptions::pipewire_session() { return pipewire_session_; } diff --git a/modules/video_capture/video_capture_options.h b/modules/video_capture/video_capture_options.h index 6f72f7927e..d1d4ebe4b9 100644 --- a/modules/video_capture/video_capture_options.h +++ b/modules/video_capture/video_capture_options.h @@ -64,7 +64,7 @@ class RTC_EXPORT VideoCaptureOptions { bool allow_pipewire() const { return allow_pipewire_; } void set_allow_pipewire(bool allow) { allow_pipewire_ = allow; } void set_pipewire_fd(int fd) { pipewire_fd_ = fd; } - rtc::scoped_refptr pipewire_session(); + webrtc::scoped_refptr pipewire_session(); #endif private: @@ -74,7 +74,7 @@ class RTC_EXPORT VideoCaptureOptions { #if defined(WEBRTC_USE_PIPEWIRE) bool allow_pipewire_ = false; int pipewire_fd_ = kInvalidPipeWireFd; - rtc::scoped_refptr pipewire_session_; + webrtc::scoped_refptr pipewire_session_; #endif }; diff --git a/modules/video_capture/windows/device_info_ds.cc b/modules/video_capture/windows/device_info_ds.cc index d0d274cb02..c4bef2d6ca 100644 --- a/modules/video_capture/windows/device_info_ds.cc +++ b/modules/video_capture/windows/device_info_ds.cc @@ -74,7 +74,7 @@ DeviceInfoDS::DeviceInfoDS() RTC_DLOG(LS_INFO) << __FUNCTION__ << ": CoInitializeEx(NULL, COINIT_APARTMENTTHREADED)" " => RPC_E_CHANGED_MODE, error 0x" - << rtc::ToHex(hr); + << webrtc::ToHex(hr); } } } @@ -92,7 +92,7 @@ int32_t DeviceInfoDS::Init() { IID_ICreateDevEnum, (void**)&_dsDevEnum); if (hr != NOERROR) { RTC_LOG(LS_INFO) << "Failed to create CLSID_SystemDeviceEnum, error 0x" - << rtc::ToHex(hr); + << webrtc::ToHex(hr); return -1; } return 0; @@ -131,7 +131,7 @@ int32_t DeviceInfoDS::GetDeviceInfo(uint32_t deviceNumber, &_dsMonikerDevEnum, 0); if (hr != NOERROR) { RTC_LOG(LS_INFO) << "Failed to enumerate CLSID_SystemDeviceEnum, error 0x" - << rtc::ToHex(hr) << ". No webcam exist?"; + << webrtc::ToHex(hr) << ". No webcam exist?"; return 0; } @@ -223,7 +223,7 @@ IBaseFilter* DeviceInfoDS::GetDeviceFilter(const char* deviceUniqueIdUTF8, &_dsMonikerDevEnum, 0); if (hr != NOERROR) { RTC_LOG(LS_INFO) << "Failed to enumerate CLSID_SystemDeviceEnum, error 0x" - << rtc::ToHex(hr) << ". No webcam exist?"; + << webrtc::ToHex(hr) << ". No webcam exist?"; return 0; } _dsMonikerDevEnum->Reset(); diff --git a/modules/video_capture/windows/help_functions_ds.h b/modules/video_capture/windows/help_functions_ds.h index 29479157a8..aaec288ee5 100644 --- a/modules/video_capture/windows/help_functions_ds.h +++ b/modules/video_capture/windows/help_functions_ds.h @@ -62,14 +62,14 @@ void FreeMediaType(AM_MEDIA_TYPE* media_type); HRESULT CopyMediaType(AM_MEDIA_TYPE* target, const AM_MEDIA_TYPE* source); // Helper function to make using scoped_refptr with COM interface pointers -// a little less awkward. rtc::scoped_refptr doesn't support the & operator +// a little less awkward. webrtc::scoped_refptr doesn't support the & operator // or a way to receive values via an out ptr. // The function is intentionally not called QueryInterface to make things less // confusing for the compiler to figure out what the caller wants to do when // called from within the context of a class that also implements COM // interfaces. template -HRESULT GetComInterface(IUnknown* object, rtc::scoped_refptr* ptr) { +HRESULT GetComInterface(IUnknown* object, webrtc::scoped_refptr* ptr) { // This helper function is not meant to magically free ptr. If we do that // we add code bloat to most places where it's not needed and make the code // less readable since it's not clear at the call site that the pointer @@ -99,7 +99,7 @@ class ComRefCount : public T { STDMETHOD_(ULONG, Release)() override { const auto status = ref_count_.DecRef(); - if (status == rtc::RefCountReleaseStatus::kDroppedLastRef) { + if (status == RefCountReleaseStatus::kDroppedLastRef) { delete this; return 0; } diff --git a/modules/video_capture/windows/sink_filter_ds.cc b/modules/video_capture/windows/sink_filter_ds.cc index 290a45affb..33e7a269bb 100644 --- a/modules/video_capture/windows/sink_filter_ds.cc +++ b/modules/video_capture/windows/sink_filter_ds.cc @@ -92,7 +92,7 @@ class EnumPins : public IEnumPins { return S_OK; } - rtc::scoped_refptr pin_; + webrtc::scoped_refptr pin_; int pos_ = 0; }; @@ -143,7 +143,7 @@ BYTE* AllocMediaTypeFormatBuffer(AM_MEDIA_TYPE* media_type, ULONG length) { } void GetSampleProperties(IMediaSample* sample, AM_SAMPLE2_PROPERTIES* props) { - rtc::scoped_refptr sample2; + webrtc::scoped_refptr sample2; if (SUCCEEDED(GetComInterface(sample, &sample2))) { sample2->GetProperties(sizeof(*props), reinterpret_cast(props)); return; @@ -197,7 +197,7 @@ bool TranslateMediaTypeToVideoCaptureCapability( RTC_LOG(LS_INFO) << "TranslateMediaTypeToVideoCaptureCapability width:" << bih->biWidth << " height:" << bih->biHeight - << " Compression:0x" << rtc::ToHex(bih->biCompression); + << " Compression:0x" << webrtc::ToHex(bih->biCompression); const GUID& sub_type = media_type->subtype; if (sub_type == MEDIASUBTYPE_MJPG && @@ -255,7 +255,7 @@ class MediaTypesEnum : public IEnumMediaTypes { } } else { RTC_LOG(LS_WARNING) << "Unsupported video type: " - << rtc::ToString( + << absl::StrCat( static_cast(capability_.videoType)) << ", using default preference list."; } @@ -745,7 +745,7 @@ CaptureInputPin::Receive(IMediaSample* media_sample) { if (!capture_thread_id_) { // Make sure we set the thread name only once. capture_thread_id_ = GetCurrentThreadId(); - rtc::SetCurrentThreadName("webrtc_video_capture"); + webrtc::SetCurrentThreadName("webrtc_video_capture"); } AM_SAMPLE2_PROPERTIES sample_props = {}; @@ -900,7 +900,7 @@ CaptureSinkFilter::JoinFilterGraph(IFilterGraph* graph, LPCWSTR name) { if (info_.pGraph) { // make sure we don't hold on to the reference we may receive. // Note that this assumes the same object identity, but so be it. - rtc::scoped_refptr sink; + webrtc::scoped_refptr sink; GetComInterface(info_.pGraph, &sink); sink_ = sink.get(); } diff --git a/modules/video_capture/windows/sink_filter_ds.h b/modules/video_capture/windows/sink_filter_ds.h index b0fabda3cd..4cc5670e6f 100644 --- a/modules/video_capture/windows/sink_filter_ds.h +++ b/modules/video_capture/windows/sink_filter_ds.h @@ -97,8 +97,8 @@ class CaptureInputPin : public IMemInputPin, public IPin { // running), otherwise accessed on the capture thread. VideoCaptureCapability resulting_capability_; DWORD capture_thread_id_ = 0; - rtc::scoped_refptr allocator_ RTC_GUARDED_BY(main_checker_); - rtc::scoped_refptr receive_pin_ RTC_GUARDED_BY(main_checker_); + webrtc::scoped_refptr allocator_ RTC_GUARDED_BY(main_checker_); + webrtc::scoped_refptr receive_pin_ RTC_GUARDED_BY(main_checker_); std::atomic_bool flushing_{false}; std::atomic_bool runtime_error_{false}; // Holds a referenceless pointer to the owning filter, the name and @@ -148,7 +148,7 @@ class CaptureSinkFilter : public IBaseFilter { private: SequenceChecker main_checker_; - const rtc::scoped_refptr> input_pin_; + const webrtc::scoped_refptr> input_pin_; VideoCaptureImpl* const capture_observer_; FILTER_INFO info_ RTC_GUARDED_BY(main_checker_) = {}; // Set/cleared in JoinFilterGraph. The filter must be stopped (no capture) diff --git a/modules/video_capture/windows/video_capture_ds.h b/modules/video_capture/windows/video_capture_ds.h index d6897155b0..5157115045 100644 --- a/modules/video_capture/windows/video_capture_ds.h +++ b/modules/video_capture/windows/video_capture_ds.h @@ -60,7 +60,7 @@ class VideoCaptureDS : public VideoCaptureImpl { IBaseFilter* _captureFilter RTC_GUARDED_BY(api_checker_); IGraphBuilder* _graphBuilder RTC_GUARDED_BY(api_checker_); IMediaControl* _mediaControl RTC_GUARDED_BY(api_checker_); - rtc::scoped_refptr sink_filter_ + webrtc::scoped_refptr sink_filter_ RTC_GUARDED_BY(api_checker_); IPin* _inputSendPin RTC_GUARDED_BY(api_checker_); IPin* _outputCapturePin RTC_GUARDED_BY(api_checker_); diff --git a/modules/video_capture/windows/video_capture_factory_windows.cc b/modules/video_capture/windows/video_capture_factory_windows.cc index 481326c1d2..8cf328e918 100644 --- a/modules/video_capture/windows/video_capture_factory_windows.cc +++ b/modules/video_capture/windows/video_capture_factory_windows.cc @@ -20,13 +20,13 @@ VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() { return DeviceInfoDS::Create(); } -rtc::scoped_refptr VideoCaptureImpl::Create( +webrtc::scoped_refptr VideoCaptureImpl::Create( const char* device_id) { if (device_id == nullptr) return nullptr; // TODO(tommi): Use Media Foundation implementation for Vista and up. - auto capture = rtc::make_ref_counted(); + auto capture = webrtc::make_ref_counted(); if (capture->Init(device_id) != 0) { return nullptr; } diff --git a/modules/video_coding/BUILD.gn b/modules/video_coding/BUILD.gn index 738d3d4edf..d308040153 100644 --- a/modules/video_coding/BUILD.gn +++ b/modules/video_coding/BUILD.gn @@ -6,6 +6,7 @@ # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. +import("//build/config/linux/pkg_config.gni") import("//third_party/libaom/options.gni") import("../../webrtc.gni") @@ -20,19 +21,15 @@ rtc_library("encoded_frame") { ":video_codec_interface", "../../api/video:encoded_image", "../../api/video:video_frame", + "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", "../../modules:module_api_public", "../../modules/rtp_rtcp:rtp_video_header", "../../rtc_base:checks", "../../rtc_base/experiments:alr_experiment", - "../../rtc_base/experiments:rtt_mult_experiment", "../../rtc_base/system:rtc_export", "../../system_wrappers", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", - ] } rtc_library("chain_diff_calculator") { @@ -44,10 +41,7 @@ rtc_library("chain_diff_calculator") { deps = [ "../../rtc_base:checks", "../../rtc_base:logging", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -62,11 +56,8 @@ rtc_library("frame_dependencies_calculator") { "../../common_video/generic_frame_descriptor", "../../rtc_base:checks", "../../rtc_base:logging", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -89,8 +80,10 @@ rtc_library("nack_requester") { "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:macromagic", + "../../rtc_base:mod_ops", "../../rtc_base:rtc_numerics", "../../rtc_base/experiments:field_trial_parser", + "../../rtc_base/system:no_unique_address", "../../rtc_base/task_utils:repeating_task", "../../system_wrappers", ] @@ -107,6 +100,7 @@ rtc_library("packet_buffer") { "../../api:rtp_packet_info", "../../api/units:timestamp", "../../api/video:encoded_image", + "../../api/video:video_frame", "../../api/video:video_frame_type", "../../common_video", "../../rtc_base:checks", @@ -117,25 +111,24 @@ rtc_library("packet_buffer") { "../../rtc_base:rtc_numerics", "../rtp_rtcp:rtp_rtcp_format", "../rtp_rtcp:rtp_video_header", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/types:variant", ] } -rtc_library("h264_packet_buffer") { +rtc_library("h26x_packet_buffer") { sources = [ - "h264_packet_buffer.cc", - "h264_packet_buffer.h", + "h26x_packet_buffer.cc", + "h26x_packet_buffer.h", ] deps = [ ":codec_globals_headers", + ":h264_sprop_parameter_sets", ":packet_buffer", "../../api:array_view", "../../api:rtp_packet_info", "../../api/units:timestamp", "../../api/video:encoded_image", + "../../api/video:video_frame", "../../api/video:video_frame_type", "../../common_video", "../../rtc_base:checks", @@ -144,10 +137,8 @@ rtc_library("h264_packet_buffer") { "../../rtc_base:rtc_numerics", "../rtp_rtcp:rtp_rtcp_format", "../rtp_rtcp:rtp_video_header", - ] - absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -157,10 +148,25 @@ rtc_library("frame_helpers") { "frame_helpers.h", ] deps = [ + "../../api/units:time_delta", + "../../api/units:timestamp", "../../api/video:encoded_frame", + "../../api/video:encoded_image", + "../../rtc_base:checks", + "../../rtc_base:logging", + "//third_party/abseil-cpp/absl/container:inlined_vector", + ] +} + +rtc_library("h264_sprop_parameter_sets") { + sources = [ + "h264_sprop_parameter_sets.cc", + "h264_sprop_parameter_sets.h", + ] + deps = [ + "../../rtc_base:base64", "../../rtc_base:logging", ] - absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ] } rtc_library("video_coding") { @@ -173,8 +179,6 @@ rtc_library("video_coding") { "fec_rate_table.h", "generic_decoder.cc", "generic_decoder.h", - "h264_sprop_parameter_sets.cc", - "h264_sprop_parameter_sets.h", "h264_sps_pps_tracker.cc", "h264_sps_pps_tracker.h", "include/video_codec_initializer.h", @@ -204,6 +208,7 @@ rtc_library("video_coding") { ":codec_globals_headers", ":encoded_frame", ":frame_helpers", + ":h264_sprop_parameter_sets", ":video_codec_interface", ":video_coding_utility", ":webrtc_vp8_scalability", @@ -218,6 +223,7 @@ rtc_library("video_coding") { "../../api:rtp_packet_info", "../../api:scoped_refptr", "../../api:sequence_checker", + "../../api/environment", "../../api/task_queue", "../../api/units:data_rate", "../../api/units:data_size", @@ -227,21 +233,30 @@ rtc_library("video_coding") { "../../api/video:builtin_video_bitrate_allocator_factory", "../../api/video:encoded_frame", "../../api/video:encoded_image", + "../../api/video:render_resolution", "../../api/video:video_adaptation", "../../api/video:video_adaptation", "../../api/video:video_bitrate_allocation", "../../api/video:video_bitrate_allocator", "../../api/video:video_bitrate_allocator_factory", + "../../api/video:video_codec_constants", "../../api/video:video_frame", "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", + "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", "../../common_video", + "../../common_video:corruption_score_calculator", + "../../common_video:frame_instrumentation_data", + "../../rtc_base:base64", + "../../rtc_base:buffer", + "../../rtc_base:byte_buffer", "../../rtc_base:checks", "../../rtc_base:copy_on_write_buffer", "../../rtc_base:event_tracer", "../../rtc_base:logging", "../../rtc_base:macromagic", + "../../rtc_base:mod_ops", "../../rtc_base:rtc_event", "../../rtc_base:rtc_numerics", "../../rtc_base:safe_conversions", @@ -252,15 +267,13 @@ rtc_library("video_coding") { "../../rtc_base/experiments:field_trial_parser", "../../rtc_base/experiments:min_video_bitrate_experiment", "../../rtc_base/experiments:rate_control_settings", - "../../rtc_base/experiments:rtt_mult_experiment", "../../rtc_base/synchronization:mutex", "../../rtc_base/system:no_unique_address", "../../rtc_base/task_utils:repeating_task", - "../../rtc_base/third_party/base64", "../../system_wrappers", - "../../system_wrappers:field_trial", "../../system_wrappers:metrics", "../../video/config:encoder_config", + "../../video/corruption_detection:frame_instrumentation_evaluation", "../rtp_rtcp", "../rtp_rtcp:rtp_rtcp_format", "../rtp_rtcp:rtp_video_header", @@ -270,13 +283,8 @@ rtc_library("video_coding") { "timing:jitter_estimator", "timing:rtt_filter", "timing:timing_module", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", ] } @@ -287,19 +295,25 @@ rtc_library("video_codec_interface") { "include/video_codec_interface.h", "include/video_coding_defines.h", "include/video_error_codes.h", + "include/video_error_codes_utils.cc", + "include/video_error_codes_utils.h", "video_coding_defines.cc", ] deps = [ ":codec_globals_headers", + "../../api/transport/rtp:dependency_descriptor", + "../../api/units:time_delta", "../../api/video:video_frame", + "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", "../../common_video", + "../../common_video:frame_instrumentation_data", "../../common_video/generic_frame_descriptor", + "../../rtc_base:checks", "../../rtc_base/system:rtc_export", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("video_coding_legacy") { @@ -321,14 +335,16 @@ rtc_library("video_coding_legacy") { "../../api:rtp_headers", "../../api:rtp_packet_info", "../../api:sequence_checker", - "../../api/transport:field_trial_based_config", + "../../api/environment", "../../api/units:timestamp", "../../api/video:encoded_image", + "../../api/video:render_resolution", "../../api/video:video_frame", "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", "../../common_video", + "../../common_video:corruption_score_calculator", "../../modules/rtp_rtcp:rtp_video_header", "../../rtc_base:checks", "../../rtc_base:event_tracer", @@ -337,7 +353,6 @@ rtc_library("video_coding_legacy") { "../../rtc_base:one_time_event", "../../rtc_base:rtc_event", "../../rtc_base:safe_conversions", - "../../rtc_base/memory:always_valid_pointer", "../../rtc_base/synchronization:mutex", "../../system_wrappers", "../rtp_rtcp:rtp_rtcp_format", @@ -354,11 +369,6 @@ rtc_library("video_coding_legacy") { "timing:jitter_estimator", "timing:timing_module", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", - ] } rtc_source_set("codec_globals_headers") { @@ -373,11 +383,19 @@ rtc_source_set("codec_globals_headers") { deps = [ "../../rtc_base:checks" ] } +if (rtc_use_h264 && rtc_system_openh264) { + pkg_config("openh264") { + packages = [ "openh264" ] + } +} + rtc_library("video_coding_utility") { visibility = [ "*" ] sources = [ "utility/bandwidth_quality_scaler.cc", "utility/bandwidth_quality_scaler.h", + "utility/corruption_detection_settings_generator.cc", + "utility/corruption_detection_settings_generator.h", "utility/decoded_frames_history.cc", "utility/decoded_frames_history.h", "utility/frame_dropper.cc", @@ -411,7 +429,11 @@ rtc_library("video_coding_utility") { "../../api:field_trials_view", "../../api:scoped_refptr", "../../api:sequence_checker", + "../../api/environment", + "../../api/task_queue", + "../../api/units:data_rate", "../../api/units:time_delta", + "../../api/video:corruption_detection_filter_settings", "../../api/video:encoded_frame", "../../api/video:encoded_image", "../../api/video:video_adaptation", @@ -419,19 +441,20 @@ rtc_library("video_coding_utility") { "../../api/video:video_bitrate_allocator", "../../api/video:video_codec_constants", "../../api/video:video_frame", + "../../api/video:video_frame_type", "../../api/video_codecs:video_codecs_api", "../../common_video", "../../modules/rtp_rtcp", "../../rtc_base:bitstream_reader", "../../rtc_base:checks", "../../rtc_base:logging", + "../../rtc_base:macromagic", "../../rtc_base:rate_statistics", "../../rtc_base:refcount", "../../rtc_base:rtc_numerics", "../../rtc_base:stringutils", "../../rtc_base:timeutils", "../../rtc_base:weak_ptr", - "../../rtc_base/experiments:bandwidth_quality_scaler_settings", "../../rtc_base/experiments:encoder_info_settings", "../../rtc_base/experiments:quality_scaler_settings", "../../rtc_base/experiments:quality_scaling_experiment", @@ -441,15 +464,13 @@ rtc_library("video_coding_utility") { "../../rtc_base/system:arch", "../../rtc_base/system:file_wrapper", "../../rtc_base/system:no_unique_address", + "../../rtc_base/system:rtc_export", "../../rtc_base/task_utils:repeating_task", - "../../system_wrappers:field_trial", + "../../video/config:encoder_config", "../rtp_rtcp:rtp_rtcp_format", "svc:scalability_mode_util", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/numeric:bits", - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -468,17 +489,25 @@ rtc_library("webrtc_h264") { defines = [] deps = [ + ":codec_globals_headers", ":video_codec_interface", ":video_coding_utility", + "../../api:scoped_refptr", + "../../api/environment", "../../api/transport/rtp:dependency_descriptor", + "../../api/units:data_rate", + "../../api/video:encoded_image", + "../../api/video:render_resolution", + "../../api/video:video_bitrate_allocation", + "../../api/video:video_bitrate_allocator", "../../api/video:video_codec_constants", "../../api/video:video_frame", "../../api/video:video_frame_i010", + "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", "../../common_video", - "../../media:codec", "../../media:media_constants", "../../media:rtc_media_base", "../../rtc_base:checks", @@ -486,59 +515,28 @@ rtc_library("webrtc_h264") { "../../rtc_base:logging", "../../rtc_base:timeutils", "../../rtc_base/system:rtc_export", - "../../system_wrappers:field_trial", "../../system_wrappers:metrics", "svc:scalability_structures", "svc:scalable_video_controller", - "//third_party/libyuv", - ] - absl_deps = [ + "//third_party/abseil-cpp/absl/base:nullability", "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/libyuv", ] if (rtc_use_h264) { - deps += [ - "//third_party/ffmpeg", - "//third_party/openh264:encoder", - ] + deps += [ "//third_party/ffmpeg" ] + if (rtc_system_openh264) { + configs += [ ":openh264" ] + } else { + deps += [ "//third_party/openh264:encoder" ] + } if (!build_with_mozilla) { deps += [ "../../media:rtc_media_base" ] } } } -rtc_library("webrtc_multiplex") { - sources = [ - "codecs/multiplex/augmented_video_frame_buffer.cc", - "codecs/multiplex/include/augmented_video_frame_buffer.h", - "codecs/multiplex/include/multiplex_decoder_adapter.h", - "codecs/multiplex/include/multiplex_encoder_adapter.h", - "codecs/multiplex/multiplex_decoder_adapter.cc", - "codecs/multiplex/multiplex_encoded_image_packer.cc", - "codecs/multiplex/multiplex_encoded_image_packer.h", - "codecs/multiplex/multiplex_encoder_adapter.cc", - ] - - deps = [ - ":video_codec_interface", - ":video_coding_utility", - "../../api:fec_controller_api", - "../../api:scoped_refptr", - "../../api/video:encoded_image", - "../../api/video:video_frame", - "../../api/video:video_rtp_headers", - "../../api/video_codecs:video_codecs_api", - "../../common_video", - "../../media:rtc_media_base", - "../../rtc_base:checks", - "../../rtc_base:logging", - "../../rtc_base/synchronization:mutex", - "../rtp_rtcp:rtp_rtcp_format", - ] -} - # This target defines a bare-bones interface towards libvpx, used by the # VP8 and VP9 wrappers below. rtc_library("webrtc_libvpx_interface") { @@ -582,9 +580,18 @@ rtc_library("webrtc_vp8") { ":webrtc_vp8_scalability", ":webrtc_vp8_temporal_layers", "../../api:fec_controller_api", + "../../api:field_trials_view", "../../api:scoped_refptr", + "../../api/environment", + "../../api/units:time_delta", + "../../api/units:timestamp", "../../api/video:encoded_image", + "../../api/video:render_resolution", + "../../api/video:video_bitrate_allocation", + "../../api/video:video_bitrate_allocator", + "../../api/video:video_codec_constants", "../../api/video:video_frame", + "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", @@ -594,20 +601,18 @@ rtc_library("webrtc_vp8") { "../../rtc_base:event_tracer", "../../rtc_base:logging", "../../rtc_base:rtc_numerics", + "../../rtc_base:safe_conversions", "../../rtc_base:timeutils", - "../../rtc_base/experiments:cpu_speed_experiment", "../../rtc_base/experiments:encoder_info_settings", "../../rtc_base/experiments:field_trial_parser", "../../rtc_base/experiments:rate_control_settings", - "../../system_wrappers:field_trial", "../../system_wrappers:metrics", "svc:scalability_mode_util", - "//third_party/libyuv", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/strings:string_view", + "//third_party/libyuv", ] if (rtc_build_libvpx) { deps += [ rtc_libvpx_dir ] @@ -630,7 +635,6 @@ rtc_library("webrtc_vp8_temporal_layers") { "codecs/vp8/include/temporal_layers_checker.h", "codecs/vp8/screenshare_layers.cc", "codecs/vp8/screenshare_layers.h", - "codecs/vp8/temporal_layers.h", "codecs/vp8/temporal_layers_checker.cc", ] @@ -639,17 +643,20 @@ rtc_library("webrtc_vp8_temporal_layers") { ":video_codec_interface", ":video_coding_utility", "../../api:fec_controller_api", + "../../api/transport/rtp:dependency_descriptor", + "../../api/video:video_codec_constants", "../../api/video_codecs:video_codecs_api", + "../../common_video/generic_frame_descriptor", "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:macromagic", "../../rtc_base:rate_statistics", "../../rtc_base:rtc_numerics", "../../rtc_base:timeutils", - "../../system_wrappers:field_trial", "../../system_wrappers:metrics", + "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } # This target includes VP9 files that may be used for any VP9 codec, internal SW or external HW. @@ -665,9 +672,11 @@ rtc_library("webrtc_vp9_helpers") { "../../api/video:video_bitrate_allocation", "../../api/video:video_bitrate_allocator", "../../api/video:video_codec_constants", + "../../api/video:video_frame", + "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", "../../common_video", - "../../media:rtc_media_base", + "../../media:video_common", "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base/experiments:stable_target_rate_experiment", @@ -675,7 +684,6 @@ rtc_library("webrtc_vp9_helpers") { "svc:scalability_structures", "svc:scalable_video_controller", ] - absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ] } rtc_library("webrtc_vp9") { @@ -693,49 +701,52 @@ rtc_library("webrtc_vp9") { ] deps = [ + ":codec_globals_headers", ":video_codec_interface", ":video_coding_utility", ":webrtc_libvpx_interface", - ":webrtc_vp9_helpers", + "../../api:array_view", "../../api:fec_controller_api", "../../api:field_trials_view", "../../api:refcountedbase", "../../api:scoped_refptr", - "../../api/transport:field_trial_based_config", + "../../api/environment", + "../../api/transport/rtp:dependency_descriptor", + "../../api/video:encoded_image", + "../../api/video:render_resolution", + "../../api/video:video_bitrate_allocation", + "../../api/video:video_bitrate_allocator", + "../../api/video:video_codec_constants", "../../api/video:video_frame", "../../api/video:video_frame_i010", + "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", "../../common_video", - "../../media:codec", - "../../media:media_constants", - "../../media:rtc_media_base", "../../rtc_base:buffer", "../../rtc_base:checks", "../../rtc_base:event_tracer", "../../rtc_base:logging", + "../../rtc_base:macromagic", + "../../rtc_base:safe_conversions", "../../rtc_base:stringutils", - "../../rtc_base:timeutils", "../../rtc_base/containers:flat_map", "../../rtc_base/experiments:encoder_info_settings", "../../rtc_base/experiments:field_trial_parser", "../../rtc_base/experiments:rate_control_settings", "../../rtc_base/synchronization:mutex", - "../../system_wrappers:field_trial", "../rtp_rtcp:rtp_rtcp_format", "svc:scalability_mode_util", "svc:scalability_structures", "svc:scalable_video_controller", + "svc:simulcast_to_svc_converter", "svc:svc_rate_allocator", - "//third_party/libyuv", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/base:nullability", "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings", + "//third_party/libyuv", ] if (rtc_build_libvpx) { deps += [ rtc_libvpx_dir ] @@ -754,7 +765,6 @@ if (rtc_include_tests) { "../../api/video_codecs:video_codecs_api", "../../modules/utility:utility", "../../rtc_base:checks", - "../../rtc_base:ignore_wundef", "../../sdk/android:internal_jni", "../../sdk/android:native_api_base", "../../sdk/android:native_api_codecs", @@ -775,7 +785,6 @@ if (rtc_include_tests) { "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", "../../media:rtc_audio_video", - "../../media:rtc_media_base", "../../sdk:native_api", "../../sdk:peerconnectionfactory_base_objc", "../../sdk:videocodec_objc", @@ -796,7 +805,9 @@ if (rtc_include_tests) { "../../api:create_frame_generator", "../../api:frame_generator_api", "../../api/transport/rtp:dependency_descriptor", + "../../api/units:timestamp", "../../api/video:encoded_image", + "../../api/video:render_resolution", "../../api/video:video_frame", "../../api/video:video_frame_type", "../../api/video_codecs:video_codecs_api", @@ -817,9 +828,14 @@ if (rtc_include_tests) { ":video_coding_utility", "../../api:mock_video_decoder", "../../api:mock_video_encoder", + "../../api:scoped_refptr", "../../api:simulcast_test_fixture_api", + "../../api/environment", + "../../api/environment:environment_factory", "../../api/video:encoded_image", + "../../api/video:video_bitrate_allocator", "../../api/video:video_frame", + "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", "../../common_video", @@ -849,13 +865,15 @@ if (rtc_include_tests) { "../../api:frame_generator_api", "../../api:scoped_refptr", "../../api:sequence_checker", - "../../api:video_codec_stats_api", - "../../api:video_codec_tester_api", "../../api:videocodec_test_fixture_api", + "../../api:videocodec_test_stats_api", + "../../api/environment", + "../../api/environment:environment_factory", "../../api/numerics:numerics", "../../api/task_queue", "../../api/task_queue:default_task_queue_factory", "../../api/test/metrics:global_metrics_logger_and_exporter", + "../../api/units:time_delta", "../../api/video:builtin_video_bitrate_allocator_factory", "../../api/video:encoded_image", "../../api/video:resolution", @@ -864,6 +882,7 @@ if (rtc_include_tests) { "../../api/video:video_bitrate_allocator_factory", "../../api/video:video_codec_constants", "../../api/video:video_frame", + "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", "../../api/video_codecs:video_codecs_api", "../../common_video", @@ -876,12 +895,12 @@ if (rtc_include_tests) { "../../rtc_base/synchronization:mutex", "../../rtc_base/system:no_unique_address", "../../test:test_support", + "../../test:video_frame_writer", "../../test:video_test_common", "../../test:video_test_support", "../rtp_rtcp:rtp_rtcp_format", "//third_party/libyuv", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } video_coding_modules_tests_resources = [] @@ -931,12 +950,22 @@ if (rtc_include_tests) { ":videocodec_test_stats_impl", ":webrtc_vp9_helpers", "../../api:array_view", + "../../api:make_ref_counted", + "../../api:rtp_parameters", "../../api:videocodec_test_fixture_api", + "../../api:videocodec_test_stats_api", + "../../api/environment", + "../../api/environment:environment_factory", "../../api/test/metrics:global_metrics_logger_and_exporter", "../../api/test/metrics:metric", "../../api/test/video:function_video_factory", "../../api/transport:field_trial_based_config", + "../../api/video:encoded_image", + "../../api/video:resolution", "../../api/video:video_bitrate_allocation", + "../../api/video:video_codec_constants", + "../../api/video:video_frame", + "../../api/video:video_frame_type", "../../api/video_codecs:video_codecs_api", "../../api/video_codecs:video_decoder_factory_template", "../../api/video_codecs:video_decoder_factory_template_dav1d_adapter", @@ -948,28 +977,26 @@ if (rtc_include_tests) { "../../api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter", "../../api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter", "../../api/video_codecs:video_encoder_factory_template_open_h264_adapter", - "../../call:video_stream_api", "../../common_video", "../../media:media_constants", "../../media:rtc_audio_video", - "../../media:rtc_media_base", "../../rtc_base:checks", "../../rtc_base:logging", "../../rtc_base:rtc_base_tests_utils", "../../rtc_base:stringutils", "../../rtc_base:task_queue_for_test", "../../rtc_base:timeutils", + "../../rtc_base/system:file_wrapper", "../../system_wrappers", "../../test:fileutils", "../../test:test_support", + "../../test:video_frame_writer", "../../test:video_test_common", "../../test:video_test_support", "../../video/config:encoder_config", "../../video/config:streams_config", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -984,6 +1011,9 @@ if (rtc_include_tests) { "../../api/numerics", "../../api/test/metrics:global_metrics_logger_and_exporter", "../../api/test/metrics:metric", + "../../api/units:data_rate", + "../../api/units:frequency", + "../../api/video:video_frame_type", "../../rtc_base:checks", "../../rtc_base:rtc_numerics", "../../rtc_base:stringutils", @@ -992,46 +1022,6 @@ if (rtc_include_tests) { ] } - rtc_library("video_codec_tester") { - testonly = true - sources = [ - "codecs/test/video_codec_analyzer.cc", - "codecs/test/video_codec_analyzer.h", - "codecs/test/video_codec_stats_impl.cc", - "codecs/test/video_codec_stats_impl.h", - "codecs/test/video_codec_tester_impl.cc", - "codecs/test/video_codec_tester_impl.h", - ] - - deps = [ - ":video_coding_utility", - "../../api:sequence_checker", - "../../api:video_codec_stats_api", - "../../api:video_codec_tester_api", - "../../api/numerics:numerics", - "../../api/task_queue:default_task_queue_factory", - "../../api/test/metrics:metrics_logger", - "../../api/units:data_rate", - "../../api/units:frequency", - "../../api/units:time_delta", - "../../api/units:timestamp", - "../../api/video:encoded_image", - "../../api/video:resolution", - "../../api/video:video_codec_constants", - "../../api/video:video_frame", - "../../rtc_base:checks", - "../../rtc_base:rtc_event", - "../../rtc_base:task_queue_for_test", - "../../rtc_base:timeutils", - "../../rtc_base/system:no_unique_address", - "../../system_wrappers", - "../../test:video_test_support", - "//third_party/libyuv", - ] - - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] - } - rtc_test("video_codec_perf_tests") { testonly = true @@ -1039,28 +1029,29 @@ if (rtc_include_tests) { deps = [ ":video_codec_interface", - ":video_codec_tester", - "../../api:create_video_codec_tester_api", - "../../api:video_codec_tester_api", - "../../api:videocodec_test_stats_api", + "../../api/environment", + "../../api/environment:environment_factory", "../../api/test/metrics:global_metrics_logger_and_exporter", "../../api/units:data_rate", "../../api/units:frequency", - "../../api/video:encoded_image", "../../api/video:resolution", - "../../api/video:video_frame", + "../../api/video_codecs:builtin_video_decoder_factory", + "../../api/video_codecs:builtin_video_encoder_factory", "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", - "../../media:rtc_internal_video_codecs", + "../../modules/video_coding/svc:scalability_mode_util", + "../../rtc_base:checks", "../../rtc_base:logging", - "../../system_wrappers:field_trial", + "../../rtc_base:stringutils", + "../../test:explicit_key_value_config", + "../../test:field_trial", "../../test:fileutils", + "../../test:test_flags", "../../test:test_main", "../../test:test_support", - "../../test:video_test_support", - "../rtp_rtcp:rtp_rtcp_format", - "svc:scalability_mode_util", - "//third_party/libyuv", + "../../test:video_codec_tester", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] if (is_android) { @@ -1068,14 +1059,10 @@ if (rtc_include_tests) { deps += [ ":android_codec_factory_helper", "../../sdk/android:hwcodecs_java", - "//sdk/android:native_test_jni_onload", - "//testing/android/native_test:native_test_support", ] shard_timeout = 900 } - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable" ] - data = [ "../../resources/FourPeople_1280x720_30.yuv" ] } @@ -1085,7 +1072,6 @@ if (rtc_include_tests) { sources = [ "codecs/h264/test/h264_impl_unittest.cc", - "codecs/multiplex/test/multiplex_adapter_unittest.cc", "codecs/test/video_encoder_decoder_instantiation_tests.cc", "codecs/test/videocodec_test_av1.cc", "codecs/test/videocodec_test_libvpx.cc", @@ -1101,6 +1087,7 @@ if (rtc_include_tests) { } deps = [ + ":codec_globals_headers", ":encoded_video_frame_producer", ":mock_libvpx_interface", ":video_codec_interface", @@ -1109,10 +1096,10 @@ if (rtc_include_tests) { ":videocodec_test_impl", ":webrtc_h264", ":webrtc_libvpx_interface", - ":webrtc_multiplex", ":webrtc_vp8", ":webrtc_vp9", ":webrtc_vp9_helpers", + "../../api:array_view", "../../api:create_frame_generator", "../../api:create_videocodec_test_fixture_api", "../../api:frame_generator_api", @@ -1122,36 +1109,45 @@ if (rtc_include_tests) { "../../api:scoped_refptr", "../../api:videocodec_test_fixture_api", "../../api:videocodec_test_stats_api", + "../../api/environment", + "../../api/environment:environment_factory", "../../api/test/metrics:global_metrics_logger_and_exporter", "../../api/test/video:function_video_factory", + "../../api/units:data_rate", + "../../api/units:time_delta", + "../../api/units:timestamp", "../../api/video:encoded_image", + "../../api/video:render_resolution", + "../../api/video:video_bitrate_allocation", + "../../api/video:video_codec_constants", "../../api/video:video_frame", + "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", "../../api/video_codecs:rtc_software_fallback_wrappers", + "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", "../../common_video", "../../common_video/test:utilities", "../../media:codec", "../../media:media_constants", "../../media:rtc_internal_video_codecs", - "../../media:rtc_media_base", "../../media:rtc_simulcast_encoder_adapter", + "../../rtc_base:checks", "../../rtc_base:refcount", "../../rtc_base:stringutils", "../../rtc_base:timeutils", "../../test:explicit_key_value_config", "../../test:field_trial", "../../test:fileutils", + "../../test:scoped_key_value_config", "../../test:test_support", "../../test:video_test_common", "../rtp_rtcp:rtp_rtcp_format", "codecs/av1:dav1d_decoder", "svc:scalability_mode_util", - "//third_party/libyuv", - ] - absl_deps = [ + "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/libyuv", ] data = video_coding_modules_tests_resources @@ -1185,9 +1181,6 @@ if (rtc_include_tests) { sources = [ "chain_diff_calculator_unittest.cc", - "codecs/test/video_codec_analyzer_unittest.cc", - "codecs/test/video_codec_stats_impl_unittest.cc", - "codecs/test/video_codec_tester_impl_unittest.cc", "codecs/test/videocodec_test_fixture_config_unittest.cc", "codecs/test/videocodec_test_stats_impl_unittest.cc", "codecs/test/videoprocessor_unittest.cc", @@ -1200,9 +1193,9 @@ if (rtc_include_tests) { "frame_dependencies_calculator_unittest.cc", "frame_helpers_unittest.cc", "generic_decoder_unittest.cc", - "h264_packet_buffer_unittest.cc", "h264_sprop_parameter_sets_unittest.cc", "h264_sps_pps_tracker_unittest.cc", + "h26x_packet_buffer_unittest.cc", "histogram_unittest.cc", "loss_notification_controller_unittest.cc", "nack_requester_unittest.cc", @@ -1211,6 +1204,7 @@ if (rtc_include_tests) { "rtp_vp8_ref_finder_unittest.cc", "rtp_vp9_ref_finder_unittest.cc", "utility/bandwidth_quality_scaler_unittest.cc", + "utility/corruption_detection_settings_generator_unittest.cc", "utility/decoded_frames_history_unittest.cc", "utility/frame_dropper_unittest.cc", "utility/framerate_controller_deprecated_unittest.cc", @@ -1237,12 +1231,12 @@ if (rtc_include_tests) { ":encoded_frame", ":frame_dependencies_calculator", ":frame_helpers", - ":h264_packet_buffer", + ":h264_sprop_parameter_sets", + ":h26x_packet_buffer", ":nack_requester", ":packet_buffer", ":simulcast_test_fixture_impl", ":video_codec_interface", - ":video_codec_tester", ":video_codecs_test_framework", ":video_coding", ":video_coding_legacy", @@ -1254,44 +1248,58 @@ if (rtc_include_tests) { ":webrtc_vp8_temporal_layers", ":webrtc_vp9", ":webrtc_vp9_helpers", + "..:module_api", "..:module_fec_api", "../../api:array_view", "../../api:create_simulcast_test_fixture_api", "../../api:fec_controller_api", + "../../api:field_trials_view", + "../../api:make_ref_counted", "../../api:mock_fec_controller_override", "../../api:mock_video_decoder", "../../api:mock_video_encoder", + "../../api:rtp_headers", "../../api:rtp_packet_info", "../../api:scoped_refptr", "../../api:simulcast_test_fixture_api", - "../../api:video_codec_tester_api", "../../api:videocodec_test_fixture_api", + "../../api/environment", + "../../api/environment:environment_factory", "../../api/task_queue", "../../api/task_queue:default_task_queue_factory", "../../api/test/video:function_video_factory", + "../../api/transport/rtp:dependency_descriptor", + "../../api/units:data_rate", "../../api/units:data_size", "../../api/units:frequency", "../../api/units:time_delta", "../../api/units:timestamp", "../../api/video:builtin_video_bitrate_allocator_factory", + "../../api/video:corruption_detection_filter_settings", "../../api/video:encoded_frame", "../../api/video:encoded_image", + "../../api/video:frame_buffer", "../../api/video:render_resolution", "../../api/video:video_adaptation", "../../api/video:video_bitrate_allocation", "../../api/video:video_bitrate_allocator", "../../api/video:video_bitrate_allocator_factory", + "../../api/video:video_codec_constants", "../../api/video:video_frame", "../../api/video:video_frame_type", "../../api/video:video_rtp_headers", + "../../api/video_codecs:scalability_mode", "../../api/video_codecs:video_codecs_api", "../../api/video_codecs:vp8_temporal_layers_factory", "../../common_video", + "../../common_video:corruption_score_calculator", + "../../common_video:frame_instrumentation_data", "../../common_video/generic_frame_descriptor", "../../common_video/test:utilities", "../../media:media_constants", - "../../media:rtc_media_base", + "../../media:rtc_internal_video_codecs", "../../rtc_base:checks", + "../../rtc_base:copy_on_write_buffer", "../../rtc_base:gunit_helpers", "../../rtc_base:histogram_percentile_counter", "../../rtc_base:platform_thread", @@ -1302,13 +1310,15 @@ if (rtc_include_tests) { "../../rtc_base:rtc_numerics", "../../rtc_base:stringutils", "../../rtc_base:task_queue_for_test", + "../../rtc_base:threading", "../../rtc_base:timeutils", "../../rtc_base/experiments:encoder_info_settings", "../../rtc_base/synchronization:mutex", + "../../rtc_base/system:file_wrapper", "../../rtc_base/system:unused", "../../system_wrappers", - "../../system_wrappers:field_trial", "../../system_wrappers:metrics", + "../../test:explicit_key_value_config", "../../test:fake_encoded_frame", "../../test:fake_video_codecs", "../../test:field_trial", @@ -1320,6 +1330,7 @@ if (rtc_include_tests) { "../../test:video_test_support", "../../test/time_controller:time_controller", "../../third_party/libyuv:libyuv", + "../../video/config:encoder_config", "../rtp_rtcp", "../rtp_rtcp:rtp_rtcp_format", "../rtp_rtcp:rtp_video_header", @@ -1330,14 +1341,11 @@ if (rtc_include_tests) { "deprecated:deprecated_session_info", "deprecated:deprecated_stream_generator", "svc:scalability_structure_tests", + "svc:simulcast_to_svc_converter_tests", "svc:svc_rate_allocator_tests", "timing:jitter_estimator", "timing:timing_module", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", + "//third_party/abseil-cpp/absl/container:inlined_vector", ] if (rtc_build_libvpx) { deps += [ rtc_libvpx_dir ] diff --git a/modules/video_coding/DEPS b/modules/video_coding/DEPS index d62707c2f9..49c640bd54 100644 --- a/modules/video_coding/DEPS +++ b/modules/video_coding/DEPS @@ -11,9 +11,6 @@ include_rules = [ ] specific_include_rules = { - "android_codec_factory_helper\.cc": [ - "+base/android", - ], "multiplex_encoder_adapter\.cc": [ "+media/base", ], diff --git a/modules/video_coding/OWNERS b/modules/video_coding/OWNERS index 2e4d968c98..5073079d34 100644 --- a/modules/video_coding/OWNERS +++ b/modules/video_coding/OWNERS @@ -4,4 +4,5 @@ ilnik@webrtc.org marpan@webrtc.org philipel@webrtc.org sprang@webrtc.org +ssilkin@webrtc.org stefan@webrtc.org diff --git a/modules/video_coding/chain_diff_calculator.cc b/modules/video_coding/chain_diff_calculator.cc index 5f852717b5..d8ce44acbd 100644 --- a/modules/video_coding/chain_diff_calculator.cc +++ b/modules/video_coding/chain_diff_calculator.cc @@ -13,10 +13,10 @@ #include #include +#include #include #include "absl/container/inlined_vector.h" -#include "absl/types/optional.h" #include "rtc_base/logging.h" namespace webrtc { @@ -25,7 +25,7 @@ void ChainDiffCalculator::Reset(const std::vector& chains) { last_frame_in_chain_.resize(chains.size()); for (size_t i = 0; i < chains.size(); ++i) { if (chains[i]) { - last_frame_in_chain_[i] = absl::nullopt; + last_frame_in_chain_[i] = std::nullopt; } } } diff --git a/modules/video_coding/chain_diff_calculator.h b/modules/video_coding/chain_diff_calculator.h index bca7340c6f..b757544607 100644 --- a/modules/video_coding/chain_diff_calculator.h +++ b/modules/video_coding/chain_diff_calculator.h @@ -13,10 +13,10 @@ #include +#include #include #include "absl/container/inlined_vector.h" -#include "absl/types/optional.h" namespace webrtc { @@ -38,7 +38,7 @@ class ChainDiffCalculator { private: absl::InlinedVector ChainDiffs(int64_t frame_id) const; - absl::InlinedVector, 4> last_frame_in_chain_; + absl::InlinedVector, 4> last_frame_in_chain_; }; } // namespace webrtc diff --git a/modules/video_coding/codecs/av1/BUILD.gn b/modules/video_coding/codecs/av1/BUILD.gn index 6465306731..6b97218c84 100644 --- a/modules/video_coding/codecs/av1/BUILD.gn +++ b/modules/video_coding/codecs/av1/BUILD.gn @@ -15,6 +15,8 @@ rtc_library("av1_svc_config") { "av1_svc_config.h", ] deps = [ + "../../../../api/video:video_frame", + "../../../../api/video_codecs:scalability_mode", "../../../../api/video_codecs:video_codecs_api", "../../../../rtc_base:checks", "../../../../rtc_base:logging", @@ -22,9 +24,8 @@ rtc_library("av1_svc_config") { "../../svc:scalability_mode_util", "../../svc:scalability_structures", "../../svc:scalable_video_controller", + "//third_party/abseil-cpp/absl/container:inlined_vector", ] - - absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ] } rtc_library("dav1d_decoder") { @@ -35,7 +36,9 @@ rtc_library("dav1d_decoder") { deps = [ "../..:video_codec_interface", + "../../../../api:refcountedbase", "../../../../api:scoped_refptr", + "../../../../api/environment", "../../../../api/video:encoded_image", "../../../../api/video:video_frame", "../../../../api/video_codecs:video_codecs_api", @@ -44,7 +47,6 @@ rtc_library("dav1d_decoder") { "//third_party/dav1d", "//third_party/libyuv", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("libaom_av1_encoder") { @@ -56,25 +58,30 @@ rtc_library("libaom_av1_encoder") { "../..:video_codec_interface", "../../../../api:field_trials_view", "../../../../api:scoped_refptr", - "../../../../api/transport:field_trial_based_config", + "../../../../api/environment", "../../../../api/video:encoded_image", + "../../../../api/video:render_resolution", + "../../../../api/video:video_codec_constants", "../../../../api/video:video_frame", + "../../../../api/video:video_frame_type", + "../../../../api/video:video_rtp_headers", "../../../../api/video_codecs:scalability_mode", "../../../../api/video_codecs:video_codecs_api", "../../../../common_video", + "../../../../common_video/generic_frame_descriptor", + "../../../../modules/rtp_rtcp:rtp_rtcp_format", "../../../../rtc_base:checks", "../../../../rtc_base:logging", "../../../../rtc_base:rtc_numerics", "../../../../rtc_base/experiments:encoder_info_settings", "../../svc:scalability_structures", "../../svc:scalable_video_controller", - "//third_party/libaom", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/strings", + "//third_party/libaom", ] } @@ -82,10 +89,30 @@ if (rtc_include_tests) { rtc_library("video_coding_codecs_av1_tests") { testonly = true - sources = [ "av1_svc_config_unittest.cc" ] + sources = [ + "av1_svc_config_unittest.cc", + "dav1d_decoder_unittest.cc", + ] + deps = [ ":av1_svc_config", + ":dav1d_decoder", + "../..:video_codec_interface", + "../../../../api:array_view", + "../../../../api/environment", + "../../../../api/environment:environment_factory", + "../../../../api/transport/rtp:dependency_descriptor", + "../../../../api/units:data_rate", + "../../../../api/units:timestamp", + "../../../../api/video:encoded_image", + "../../../../api/video:render_resolution", + "../../../../api/video:video_bitrate_allocation", + "../../../../api/video:video_frame", + "../../../../api/video:video_frame_type", + "../../../../api/video_codecs:scalability_mode", "../../../../api/video_codecs:video_codecs_api", + "../../../../rtc_base:checks", + "../../../../test:explicit_key_value_config", "../../../../test:test_support", ] @@ -95,22 +122,21 @@ if (rtc_include_tests) { "libaom_av1_unittest.cc", ] deps += [ - ":dav1d_decoder", ":libaom_av1_encoder", "../..:encoded_video_frame_producer", - "../..:video_codec_interface", "../../../../api:create_frame_generator", "../../../../api:frame_generator_api", "../../../../api:mock_video_encoder", "../../../../api/units:data_size", "../../../../api/units:time_delta", - "../../../../api/video:video_frame", - "../../../../test:field_trial", + "../../../../modules/rtp_rtcp:rtp_rtcp_format", + "../../../../test:fileutils", + "../../../../test:scoped_key_value_config", + "../../../../test:video_test_support", "../../svc:scalability_mode_util", "../../svc:scalability_structures", "../../svc:scalable_video_controller", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } } } diff --git a/modules/video_coding/codecs/av1/av1_svc_config.cc b/modules/video_coding/codecs/av1/av1_svc_config.cc index 43dcf96ab7..fa3b93e49e 100644 --- a/modules/video_coding/codecs/av1/av1_svc_config.cc +++ b/modules/video_coding/codecs/av1/av1_svc_config.cc @@ -13,7 +13,13 @@ #include #include #include +#include +#include "absl/container/inlined_vector.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/spatial_layer.h" +#include "api/video_codecs/video_codec.h" #include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/svc/scalability_mode_util.h" #include "modules/video_coding/svc/scalable_video_controller.h" @@ -23,10 +29,26 @@ namespace webrtc { namespace { -absl::optional BuildScalabilityMode(int num_temporal_layers, - int num_spatial_layers) { +const int kMinAv1SpatialLayerLongSideLength = 240; +const int kMinAv1SpatialLayerShortSideLength = 135; + +int GetLimitedNumSpatialLayers(int width, int height) { + const bool is_landscape = width >= height; + const int min_width = is_landscape ? kMinAv1SpatialLayerLongSideLength + : kMinAv1SpatialLayerShortSideLength; + const int min_height = is_landscape ? kMinAv1SpatialLayerShortSideLength + : kMinAv1SpatialLayerLongSideLength; + const int num_layers_fit_horz = static_cast( + std::floor(1 + std::max(0.0f, std::log2(1.0f * width / min_width)))); + const int num_layers_fit_vert = static_cast( + std::floor(1 + std::max(0.0f, std::log2(1.0f * height / min_height)))); + return std::min(num_layers_fit_horz, num_layers_fit_vert); +} + +std::optional BuildScalabilityMode(int num_temporal_layers, + int num_spatial_layers) { char name[20]; - rtc::SimpleStringBuilder ss(name); + SimpleStringBuilder ss(name); ss << "L" << num_spatial_layers << "T" << num_temporal_layers; if (num_spatial_layers > 1) { ss << "_KEY"; @@ -40,7 +62,7 @@ absl::InlinedVector LibaomAv1EncoderSupportedScalabilityModes() { absl::InlinedVector scalability_modes; for (ScalabilityMode scalability_mode : kAllScalabilityModes) { - if (ScalabilityStructureConfig(scalability_mode) != absl::nullopt) { + if (ScalabilityStructureConfig(scalability_mode) != std::nullopt) { scalability_modes.push_back(scalability_mode); } } @@ -50,7 +72,7 @@ LibaomAv1EncoderSupportedScalabilityModes() { bool LibaomAv1EncoderSupportsScalabilityMode(ScalabilityMode scalability_mode) { // For libaom AV1, the scalability mode is supported if we can create the // scalability structure. - return ScalabilityStructureConfig(scalability_mode) != absl::nullopt; + return ScalabilityStructureConfig(scalability_mode) != std::nullopt; } bool SetAv1SvcConfig(VideoCodec& video_codec, @@ -58,7 +80,7 @@ bool SetAv1SvcConfig(VideoCodec& video_codec, int num_spatial_layers) { RTC_DCHECK_EQ(video_codec.codecType, kVideoCodecAV1); - absl::optional scalability_mode = + std::optional scalability_mode = video_codec.GetScalabilityMode(); if (!scalability_mode.has_value()) { scalability_mode = @@ -69,6 +91,19 @@ bool SetAv1SvcConfig(VideoCodec& video_codec, } } + bool requested_single_spatial_layer = + ScalabilityModeToNumSpatialLayers(*scalability_mode) == 1; + + if (ScalabilityMode reduced = LimitNumSpatialLayers( + *scalability_mode, + GetLimitedNumSpatialLayers(video_codec.width, video_codec.height)); + *scalability_mode != reduced) { + RTC_LOG(LS_WARNING) << "Reduced number of spatial layers from " + << ScalabilityModeToString(*scalability_mode) << " to " + << ScalabilityModeToString(reduced); + scalability_mode = reduced; + } + std::unique_ptr structure = CreateScalabilityStructure(*scalability_mode); if (structure == nullptr) { @@ -92,7 +127,7 @@ bool SetAv1SvcConfig(VideoCodec& video_codec, spatial_layer.active = true; } - if (info.num_spatial_layers == 1) { + if (requested_single_spatial_layer) { SpatialLayer& spatial_layer = video_codec.spatialLayers[0]; spatial_layer.minBitrate = video_codec.minBitrate; spatial_layer.maxBitrate = video_codec.maxBitrate; @@ -103,10 +138,8 @@ bool SetAv1SvcConfig(VideoCodec& video_codec, for (int sl_idx = 0; sl_idx < info.num_spatial_layers; ++sl_idx) { SpatialLayer& spatial_layer = video_codec.spatialLayers[sl_idx]; - // minBitrate and maxBitrate formulas are copied from vp9 settings and - // are not yet tuned for av1. const int num_pixels = spatial_layer.width * spatial_layer.height; - int min_bitrate_kbps = (600.0 * std::sqrt(num_pixels) - 95'000.0) / 1000.0; + int min_bitrate_kbps = (480.0 * std::sqrt(num_pixels) - 95'000.0) / 1000.0; spatial_layer.minBitrate = std::max(min_bitrate_kbps, 20); spatial_layer.maxBitrate = 50 + static_cast(1.6 * num_pixels / 1000.0); spatial_layer.targetBitrate = diff --git a/modules/video_coding/codecs/av1/av1_svc_config.h b/modules/video_coding/codecs/av1/av1_svc_config.h index 05b886b9f4..218712d48b 100644 --- a/modules/video_coding/codecs/av1/av1_svc_config.h +++ b/modules/video_coding/codecs/av1/av1_svc_config.h @@ -10,9 +10,9 @@ #ifndef MODULES_VIDEO_CODING_CODECS_AV1_AV1_SVC_CONFIG_H_ #define MODULES_VIDEO_CODING_CODECS_AV1_AV1_SVC_CONFIG_H_ -#include #include "absl/container/inlined_vector.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_codec.h" namespace webrtc { diff --git a/modules/video_coding/codecs/av1/av1_svc_config_unittest.cc b/modules/video_coding/codecs/av1/av1_svc_config_unittest.cc index 9f1da9865c..331b43daa7 100644 --- a/modules/video_coding/codecs/av1/av1_svc_config_unittest.cc +++ b/modules/video_coding/codecs/av1/av1_svc_config_unittest.cc @@ -10,17 +10,25 @@ #include "modules/video_coding/codecs/av1/av1_svc_config.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_codec.h" -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { namespace { constexpr int kDontCare = 0; -TEST(Av1SvcConfigTest, TreatsEmptyAsL1T1) { +VideoCodec GetDefaultVideoCodec() { VideoCodec video_codec; video_codec.codecType = kVideoCodecAV1; + video_codec.width = 1280; + video_codec.height = 720; + return video_codec; +} + +TEST(Av1SvcConfigTest, TreatsEmptyAsL1T1) { + VideoCodec video_codec = GetDefaultVideoCodec(); EXPECT_TRUE(SetAv1SvcConfig(video_codec, /*num_temporal_layers=*/kDontCare, /*num_spatial_layers=*/kDontCare)); @@ -31,8 +39,7 @@ TEST(Av1SvcConfigTest, TreatsEmptyAsL1T1) { } TEST(Av1SvcConfigTest, ScalabilityModeFromNumberOfTemporalLayers) { - VideoCodec video_codec; - video_codec.codecType = kVideoCodecAV1; + VideoCodec video_codec = GetDefaultVideoCodec(); EXPECT_TRUE(SetAv1SvcConfig(video_codec, /*num_temporal_layers=*/3, /*num_spatial_layers=*/1)); @@ -40,8 +47,7 @@ TEST(Av1SvcConfigTest, ScalabilityModeFromNumberOfTemporalLayers) { } TEST(Av1SvcConfigTest, ScalabilityModeFromNumberOfSpatialLayers) { - VideoCodec video_codec; - video_codec.codecType = kVideoCodecAV1; + VideoCodec video_codec = GetDefaultVideoCodec(); EXPECT_TRUE(SetAv1SvcConfig(video_codec, /*num_temporal_layers=*/3, /*num_spatial_layers=*/2)); @@ -52,8 +58,7 @@ TEST(Av1SvcConfigTest, ScalabilityModeFromNumberOfSpatialLayers) { } TEST(Av1SvcConfigTest, SetsActiveSpatialLayersFromScalabilityMode) { - VideoCodec video_codec; - video_codec.codecType = kVideoCodecAV1; + VideoCodec video_codec = GetDefaultVideoCodec(); video_codec.SetScalabilityMode(ScalabilityMode::kL2T1); EXPECT_TRUE(SetAv1SvcConfig(video_codec, /*num_temporal_layers=*/kDontCare, @@ -98,9 +103,7 @@ TEST(Av1SvcConfigTest, ConfiguresSmallResolutionRatioFromScalabilityMode) { } TEST(Av1SvcConfigTest, CopiesFramrate) { - VideoCodec video_codec; - video_codec.codecType = kVideoCodecAV1; - // h mode uses 1.5:1 ratio + VideoCodec video_codec = GetDefaultVideoCodec(); video_codec.SetScalabilityMode(ScalabilityMode::kL2T1); video_codec.maxFramerate = 27; @@ -112,8 +115,7 @@ TEST(Av1SvcConfigTest, CopiesFramrate) { } TEST(Av1SvcConfigTest, SetsNumberOfTemporalLayers) { - VideoCodec video_codec; - video_codec.codecType = kVideoCodecAV1; + VideoCodec video_codec = GetDefaultVideoCodec(); video_codec.SetScalabilityMode(ScalabilityMode::kL1T3); EXPECT_TRUE(SetAv1SvcConfig(video_codec, /*num_temporal_layers=*/kDontCare, @@ -143,28 +145,30 @@ TEST(Av1SvcConfigTest, CopiesMinMaxBitrateForSingleSpatialLayer) { TEST(Av1SvcConfigTest, SetsBitratesForMultipleSpatialLayers) { VideoCodec video_codec; video_codec.codecType = kVideoCodecAV1; - video_codec.SetScalabilityMode(ScalabilityMode::kL3T3); + video_codec.width = 640; + video_codec.height = 360; + video_codec.SetScalabilityMode(ScalabilityMode::kL2T2); EXPECT_TRUE(SetAv1SvcConfig(video_codec, /*num_temporal_layers=*/kDontCare, /*num_spatial_layers=*/kDontCare)); - EXPECT_GT(video_codec.spatialLayers[0].minBitrate, 0u); - EXPECT_LE(video_codec.spatialLayers[0].minBitrate, - video_codec.spatialLayers[0].targetBitrate); - EXPECT_LE(video_codec.spatialLayers[0].targetBitrate, - video_codec.spatialLayers[0].maxBitrate); + EXPECT_EQ(video_codec.spatialLayers[0].minBitrate, 20u); + EXPECT_EQ(video_codec.spatialLayers[0].maxBitrate, 142u); + + EXPECT_EQ(video_codec.spatialLayers[1].minBitrate, 135u); + EXPECT_EQ(video_codec.spatialLayers[1].maxBitrate, 418u); +} + +TEST(Av1SvcConfigTest, ReduceSpatialLayersOnInsufficentInputResolution) { + VideoCodec video_codec = GetDefaultVideoCodec(); + video_codec.width = 640; + video_codec.height = 360; + video_codec.SetScalabilityMode(ScalabilityMode::kL3T3); + + EXPECT_TRUE(SetAv1SvcConfig(video_codec, /*num_temporal_layers=*/kDontCare, + /*num_spatial_layers=*/kDontCare)); - EXPECT_GT(video_codec.spatialLayers[1].minBitrate, 0u); - EXPECT_LE(video_codec.spatialLayers[1].minBitrate, - video_codec.spatialLayers[1].targetBitrate); - EXPECT_LE(video_codec.spatialLayers[1].targetBitrate, - video_codec.spatialLayers[1].maxBitrate); - - EXPECT_GT(video_codec.spatialLayers[2].minBitrate, 0u); - EXPECT_LE(video_codec.spatialLayers[2].minBitrate, - video_codec.spatialLayers[2].targetBitrate); - EXPECT_LE(video_codec.spatialLayers[2].targetBitrate, - video_codec.spatialLayers[2].maxBitrate); + EXPECT_EQ(*video_codec.GetScalabilityMode(), ScalabilityMode::kL2T3); } } // namespace diff --git a/modules/video_coding/codecs/av1/dav1d_decoder.cc b/modules/video_coding/codecs/av1/dav1d_decoder.cc index 3100c0d41b..a9384b0c25 100644 --- a/modules/video_coding/codecs/av1/dav1d_decoder.cc +++ b/modules/video_coding/codecs/av1/dav1d_decoder.cc @@ -11,16 +11,24 @@ #include "modules/video_coding/codecs/av1/dav1d_decoder.h" #include +#include +#include +#include +#include "api/environment/environment.h" +#include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "api/video/encoded_image.h" +#include "api/video/video_frame.h" #include "api/video/video_frame_buffer.h" +#include "api/video_codecs/video_decoder.h" #include "common_video/include/video_frame_buffer.h" #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/logging.h" +#include "third_party/dav1d/libdav1d/include/dav1d/data.h" #include "third_party/dav1d/libdav1d/include/dav1d/dav1d.h" -#include "third_party/libyuv/include/libyuv/convert.h" -#include "third_party/libyuv/include/libyuv/planar_functions.h" +#include "third_party/dav1d/libdav1d/include/dav1d/headers.h" +#include "third_party/dav1d/libdav1d/include/dav1d/picture.h" namespace webrtc { namespace { @@ -28,6 +36,7 @@ namespace { class Dav1dDecoder : public VideoDecoder { public: Dav1dDecoder(); + explicit Dav1dDecoder(const Environment& env); Dav1dDecoder(const Dav1dDecoder&) = delete; Dav1dDecoder& operator=(const Dav1dDecoder&) = delete; @@ -45,6 +54,8 @@ class Dav1dDecoder : public VideoDecoder { private: Dav1dContext* context_ = nullptr; DecodedImageCallback* decode_complete_callback_ = nullptr; + + const bool crop_to_render_resolution_ = false; }; class ScopedDav1dData { @@ -57,13 +68,12 @@ class ScopedDav1dData { Dav1dData data_ = {}; }; -class ScopedDav1dPicture - : public rtc::RefCountedNonVirtual { +class ScopedDav1dPicture : public RefCountedNonVirtual { public: ~ScopedDav1dPicture() { dav1d_picture_unref(&picture_); } Dav1dPicture& Picture() { return picture_; } - using rtc::RefCountedNonVirtual::HasOneRef; + using RefCountedNonVirtual::HasOneRef; private: Dav1dPicture picture_ = {}; @@ -72,10 +82,14 @@ class ScopedDav1dPicture constexpr char kDav1dName[] = "dav1d"; // Calling `dav1d_data_wrap` requires a `free_callback` to be registered. -void NullFreeCallback(const uint8_t* buffer, void* opaque) {} +void NullFreeCallback(const uint8_t* /* buffer */, void* /* opaque */) {} Dav1dDecoder::Dav1dDecoder() = default; +Dav1dDecoder::Dav1dDecoder(const Environment& env) + : crop_to_render_resolution_(env.field_trials().IsEnabled( + "WebRTC-Dav1dDecoder-CropToRenderResolution")) {} + Dav1dDecoder::~Dav1dDecoder() { Release(); } @@ -84,9 +98,11 @@ bool Dav1dDecoder::Configure(const Settings& settings) { Dav1dSettings s; dav1d_default_settings(&s); - s.n_threads = std::max(2, settings.number_of_cores()); - s.max_frame_delay = 1; // For low latency decoding. - s.all_layers = 0; // Don't output a frame for every spatial layer. + s.n_threads = std::clamp(settings.number_of_cores(), 1, DAV1D_MAX_THREADS); + s.max_frame_delay = 1; // For low latency decoding. + s.all_layers = 0; // Don't output a frame for every spatial layer. + // Limit max frame size to avoid OOM'ing fuzzers. crbug.com/325284120. + s.frame_size_limit = 16384 * 16384; s.operating_point = 31; // Decode all operating points. return dav1d_open(&context_, &s) == 0; @@ -136,7 +152,7 @@ int32_t Dav1dDecoder::Decode(const EncodedImage& encoded_image, return WEBRTC_VIDEO_CODEC_ERROR; } - rtc::scoped_refptr scoped_dav1d_picture( + scoped_refptr scoped_dav1d_picture( new ScopedDav1dPicture{}); Dav1dPicture& dav1d_picture = scoped_dav1d_picture->Picture(); if (int get_picture_res = dav1d_get_picture(context_, &dav1d_picture)) { @@ -153,21 +169,38 @@ int32_t Dav1dDecoder::Decode(const EncodedImage& encoded_image, return WEBRTC_VIDEO_CODEC_ERROR; } - rtc::scoped_refptr wrapped_buffer; + int width = dav1d_picture.p.w; + int height = dav1d_picture.p.h; + + if (crop_to_render_resolution_ && dav1d_picture.frame_hdr) { + // Interpret render_width/height as resolution decoded frame should be + // cropped to. + if (dav1d_picture.frame_hdr->render_width > 0 && + dav1d_picture.frame_hdr->render_height > 0) { + width = std::min(width, dav1d_picture.frame_hdr->render_width); + height = std::min(height, dav1d_picture.frame_hdr->render_height); + } else { + RTC_LOG(LS_WARNING) << "Dav1dDecoder::Decode invalid render resolution " + << dav1d_picture.frame_hdr->render_width << "x" + << dav1d_picture.frame_hdr->render_height; + } + } + + scoped_refptr wrapped_buffer; if (dav1d_picture.p.layout == DAV1D_PIXEL_LAYOUT_I420) { wrapped_buffer = WrapI420Buffer( - dav1d_picture.p.w, dav1d_picture.p.h, - static_cast(dav1d_picture.data[0]), dav1d_picture.stride[0], - static_cast(dav1d_picture.data[1]), dav1d_picture.stride[1], - static_cast(dav1d_picture.data[2]), dav1d_picture.stride[1], + width, height, static_cast(dav1d_picture.data[0]), + dav1d_picture.stride[0], static_cast(dav1d_picture.data[1]), + dav1d_picture.stride[1], static_cast(dav1d_picture.data[2]), + dav1d_picture.stride[1], // To keep |scoped_dav1d_picture.Picture()| alive [scoped_dav1d_picture] {}); } else if (dav1d_picture.p.layout == DAV1D_PIXEL_LAYOUT_I444) { wrapped_buffer = WrapI444Buffer( - dav1d_picture.p.w, dav1d_picture.p.h, - static_cast(dav1d_picture.data[0]), dav1d_picture.stride[0], - static_cast(dav1d_picture.data[1]), dav1d_picture.stride[1], - static_cast(dav1d_picture.data[2]), dav1d_picture.stride[1], + width, height, static_cast(dav1d_picture.data[0]), + dav1d_picture.stride[0], static_cast(dav1d_picture.data[1]), + dav1d_picture.stride[1], static_cast(dav1d_picture.data[2]), + dav1d_picture.stride[1], // To keep |scoped_dav1d_picture.Picture()| alive [scoped_dav1d_picture] {}); } else { @@ -181,15 +214,23 @@ int32_t Dav1dDecoder::Decode(const EncodedImage& encoded_image, return WEBRTC_VIDEO_CODEC_ERROR; } - VideoFrame decoded_frame = VideoFrame::Builder() - .set_video_frame_buffer(wrapped_buffer) - .set_timestamp_rtp(encoded_image.Timestamp()) - .set_ntp_time_ms(encoded_image.ntp_time_ms_) - .set_color_space(encoded_image.ColorSpace()) - .build(); - - decode_complete_callback_->Decoded(decoded_frame, absl::nullopt, - absl::nullopt); + VideoFrame decoded_frame = + VideoFrame::Builder() + .set_video_frame_buffer(wrapped_buffer) + .set_rtp_timestamp(encoded_image.RtpTimestamp()) + .set_ntp_time_ms(encoded_image.ntp_time_ms_) + .set_color_space(encoded_image.ColorSpace()) + .build(); + + // Corresponds to QP_base in + // J. Han et al., "A Technical Overview of AV1," in Proceedings of the IEEE, + // vol. 109, no. 9, pp. 1435-1462, Sept. 2021, + // doi: 10.1109/JPROC.2021.3058584. keywords: + // {Encoding;Codecs;Decoding;Streaming media;Video compression;Media;Alliance + // of Open Media;AV1;video compression}, + std::optional qp = dav1d_picture.frame_hdr->quant.yac; + decode_complete_callback_->Decoded(decoded_frame, + /*decode_time_ms=*/std::nullopt, qp); return WEBRTC_VIDEO_CODEC_OK; } @@ -200,4 +241,8 @@ std::unique_ptr CreateDav1dDecoder() { return std::make_unique(); } +std::unique_ptr CreateDav1dDecoder(const Environment& env) { + return std::make_unique(env); +} + } // namespace webrtc diff --git a/modules/video_coding/codecs/av1/dav1d_decoder.h b/modules/video_coding/codecs/av1/dav1d_decoder.h index c9396d1e03..3761295321 100644 --- a/modules/video_coding/codecs/av1/dav1d_decoder.h +++ b/modules/video_coding/codecs/av1/dav1d_decoder.h @@ -12,12 +12,17 @@ #include +#include "api/environment/environment.h" #include "api/video_codecs/video_decoder.h" namespace webrtc { +// TODO: b/405341160 - Delete after downstream projects switched to version with +// `Environment`. std::unique_ptr CreateDav1dDecoder(); +std::unique_ptr CreateDav1dDecoder(const Environment& env); + } // namespace webrtc #endif // MODULES_VIDEO_CODING_CODECS_AV1_DAV1D_DECODER_H_ diff --git a/modules/video_coding/codecs/av1/dav1d_decoder_unittest.cc b/modules/video_coding/codecs/av1/dav1d_decoder_unittest.cc new file mode 100644 index 0000000000..f2ad3a57a6 --- /dev/null +++ b/modules/video_coding/codecs/av1/dav1d_decoder_unittest.cc @@ -0,0 +1,120 @@ +/* + * Copyright (c) 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/codecs/av1/dav1d_decoder.h" + +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/video/encoded_image.h" +#include "api/video/video_frame.h" +#include "api/video_codecs/video_decoder.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "test/explicit_key_value_config.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace test { +namespace { + +using ::testing::Eq; +using ::testing::Not; +using ::testing::NotNull; + +constexpr uint8_t kAv1FrameWith36x20EncodededAnd32x16RenderResolution[] = { + 0x12, 0x00, 0x0a, 0x06, 0x18, 0x15, 0x23, 0x9f, 0x60, 0x10, 0x32, 0x18, + 0x20, 0x03, 0xe0, 0x01, 0xf2, 0xb0, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, + 0x00, 0xf2, 0x44, 0xd6, 0xa5, 0x3b, 0x7c, 0x8b, 0x7c, 0x8c, 0x6b, 0x9a}; + +EncodedImage CreateEncodedImage(ArrayView data) { + EncodedImage image; + image.SetEncodedData(EncodedImageBuffer::Create(data.data(), data.size())); + return image; +} + +class TestAv1Decoder : public DecodedImageCallback { + public: + explicit TestAv1Decoder(const Environment& env) + : decoder_(CreateDav1dDecoder(env)) { + if (decoder_ == nullptr) { + ADD_FAILURE() << "Failed to create decoder"; + return; + } + EXPECT_TRUE(decoder_->Configure({})); + EXPECT_EQ(decoder_->RegisterDecodeCompleteCallback(this), + WEBRTC_VIDEO_CODEC_OK); + } + // This class requires pointer stability and thus not copyable nor movable. + TestAv1Decoder(const TestAv1Decoder&) = delete; + TestAv1Decoder& operator=(const TestAv1Decoder&) = delete; + + void Decode(const EncodedImage& image) { + ASSERT_THAT(decoder_, NotNull()); + decoded_frame_ = std::nullopt; + int32_t error = + decoder_->Decode(image, /*render_time_ms=*/image.capture_time_ms_); + ASSERT_EQ(error, WEBRTC_VIDEO_CODEC_OK); + ASSERT_THAT(decoded_frame_, Not(Eq(std::nullopt))); + } + + VideoFrame& decoded_frame() { return *decoded_frame_; } + + private: + int32_t Decoded(VideoFrame& decoded_frame) override { + decoded_frame_ = std::move(decoded_frame); + return 0; + } + void Decoded(VideoFrame& decoded_frame, + std::optional /*decode_time_ms*/, + std::optional /*qp*/) override { + Decoded(decoded_frame); + } + + const std::unique_ptr decoder_; + std::optional decoded_frame_; +}; + +TEST(Dav1dDecoderTest, KeepsDecodedResolutionByDefault) { + TestAv1Decoder decoder(CreateEnvironment()); + decoder.Decode( + CreateEncodedImage(kAv1FrameWith36x20EncodededAnd32x16RenderResolution)); + EXPECT_EQ(decoder.decoded_frame().width(), 36); + EXPECT_EQ(decoder.decoded_frame().height(), 20); +} + +TEST(Dav1dDecoderTest, CropsToRenderResolutionWhenCropIsEnabled) { + TestAv1Decoder decoder( + CreateEnvironment(std::make_unique( + "WebRTC-Dav1dDecoder-CropToRenderResolution/Enabled/"))); + decoder.Decode( + CreateEncodedImage(kAv1FrameWith36x20EncodededAnd32x16RenderResolution)); + EXPECT_EQ(decoder.decoded_frame().width(), 32); + EXPECT_EQ(decoder.decoded_frame().height(), 16); +} + +TEST(Dav1dDecoderTest, DoesNotCropToRenderResolutionWhenCropIsDisabled) { + TestAv1Decoder decoder( + CreateEnvironment(std::make_unique( + "WebRTC-Dav1dDecoder-CropToRenderResolution/Disabled/"))); + decoder.Decode( + CreateEncodedImage(kAv1FrameWith36x20EncodededAnd32x16RenderResolution)); + EXPECT_EQ(decoder.decoded_frame().width(), 36); + EXPECT_EQ(decoder.decoded_frame().height(), 20); +} + +} // namespace +} // namespace test +} // namespace webrtc diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder.cc index 28a8e5f846..d0c513e91b 100644 --- a/modules/video_coding/codecs/av1/libaom_av1_encoder.cc +++ b/modules/video_coding/codecs/av1/libaom_av1_encoder.cc @@ -13,34 +13,48 @@ #include #include +#include #include #include #include "absl/algorithm/container.h" #include "absl/base/macros.h" -#include "absl/strings/match.h" -#include "absl/types/optional.h" +#include "absl/base/nullability.h" +#include "absl/container/inlined_vector.h" +#include "api/environment/environment.h" #include "api/field_trials_view.h" #include "api/scoped_refptr.h" -#include "api/transport/field_trial_based_config.h" #include "api/video/encoded_image.h" -#include "api/video/i420_buffer.h" +#include "api/video/render_resolution.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" #include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_timing.h" #include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/svc/scalable_video_controller.h" -#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/encoder_info_settings.h" #include "rtc_base/logging.h" #include "third_party/libaom/source/libaom/aom/aom_codec.h" #include "third_party/libaom/source/libaom/aom/aom_encoder.h" +#include "third_party/libaom/source/libaom/aom/aom_image.h" #include "third_party/libaom/source/libaom/aom/aomcx.h" +#if (defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64)) && \ + (defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)) +#define MOBILE_ARM +#endif + #define SET_ENCODER_PARAM_OR_RETURN_ERROR(param_id, param_value) \ do { \ if (!SetEncoderControlParameters(param_id, param_value)) { \ @@ -52,14 +66,14 @@ namespace webrtc { namespace { // Encoder configuration parameters -constexpr int kQpMin = 10; +constexpr int kMinQp = 10; +constexpr int kMinQindex = 40; // Min qindex corresponding to kMinQp. constexpr int kUsageProfile = AOM_USAGE_REALTIME; -constexpr int kMinQindex = 145; // Min qindex threshold for QP scaling. -constexpr int kMaxQindex = 205; // Max qindex threshold for QP scaling. +constexpr int kLowQindex = 145; // Low qindex threshold for QP scaling. +constexpr int kHighQindex = 205; // High qindex threshold for QP scaling. constexpr int kBitDepth = 8; constexpr int kLagInFrames = 0; // No look ahead. -constexpr int kRtpTicksPerSecond = 90000; -constexpr double kMinimumFrameRate = 1.0; +constexpr double kMinFrameRateFps = 1.0; aom_superblock_size_t GetSuperblockSize(int width, int height, int threads) { int resolution = width * height; @@ -71,8 +85,7 @@ aom_superblock_size_t GetSuperblockSize(int width, int height, int threads) { class LibaomAv1Encoder final : public VideoEncoder { public: - LibaomAv1Encoder(const absl::optional& aux_config, - const FieldTrialsView& trials); + LibaomAv1Encoder(const Environment& env, LibaomAv1EncoderSettings settings); ~LibaomAv1Encoder(); int InitEncode(const VideoCodec* codec_settings, @@ -102,7 +115,8 @@ class LibaomAv1Encoder final : public VideoEncoder { bool SvcEnabled() const { return svc_params_.has_value(); } // Fills svc_params_ memeber value. Returns false on error. - bool SetSvcParams(ScalableVideoController::StreamLayersConfig svc_config); + bool SetSvcParams(ScalableVideoController::StreamLayersConfig svc_config, + const aom_codec_enc_cfg_t& encoder_config); // Configures the encoder with layer for the next frame. void SetSvcLayerId( const ScalableVideoController::LayerFrameConfig& layer_frame); @@ -113,21 +127,22 @@ class LibaomAv1Encoder final : public VideoEncoder { void MaybeRewrapImgWithFormat(const aom_img_fmt_t fmt); std::unique_ptr svc_controller_; - absl::optional scalability_mode_; + std::optional scalability_mode_; bool inited_; bool rates_configured_; - absl::optional svc_params_; + std::optional svc_params_; VideoCodec encoder_settings_; - absl::optional aux_config_; + LibaomAv1EncoderSettings settings_; aom_image_t* frame_for_encode_; aom_codec_ctx_t ctx_; aom_codec_enc_cfg_t cfg_; EncodedImageCallback* encoded_image_callback_; + double framerate_fps_; // Current target frame rate. int64_t timestamp_; const LibaomAv1EncoderInfoSettings encoder_info_override_; - // TODO(webrtc:15225): Kill switch for disabling frame dropping. Remove it - // after frame dropping is fully rolled out. - bool disable_frame_dropping_; + // TODO(webrtc:351644568): Remove this kill-switch after the feature is fully + // deployed. + const bool post_encode_frame_drop_; }; int32_t VerifyCodecSettings(const VideoCodec& codec_settings) { @@ -152,24 +167,24 @@ int32_t VerifyCodecSettings(const VideoCodec& codec_settings) { if (codec_settings.maxFramerate < 1) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - if (codec_settings.qpMax < kQpMin || codec_settings.qpMax > 63) { + if (codec_settings.qpMax < kMinQp || codec_settings.qpMax > 63) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } return WEBRTC_VIDEO_CODEC_OK; } -LibaomAv1Encoder::LibaomAv1Encoder( - const absl::optional& aux_config, - const FieldTrialsView& trials) +LibaomAv1Encoder::LibaomAv1Encoder(const Environment& env, + LibaomAv1EncoderSettings settings) : inited_(false), rates_configured_(false), - aux_config_(aux_config), + settings_(std::move(settings)), frame_for_encode_(nullptr), encoded_image_callback_(nullptr), + framerate_fps_(0), timestamp_(0), - disable_frame_dropping_(absl::StartsWith( - trials.Lookup("WebRTC-LibaomAv1Encoder-DisableFrameDropping"), - "Enabled")) {} + encoder_info_override_(env.field_trials()), + post_encode_frame_drop_(!env.field_trials().IsDisabled( + "WebRTC-LibaomAv1Encoder-PostEncodeFrameDrop")) {} LibaomAv1Encoder::~LibaomAv1Encoder() { Release(); @@ -214,10 +229,6 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - if (!SetSvcParams(svc_controller_->StreamConfig())) { - return WEBRTC_VIDEO_CODEC_ERROR; - } - // Initialize encoder configuration structure with default values aom_codec_err_t ret = aom_codec_enc_config_default(aom_codec_av1_cx(), &cfg_, kUsageProfile); @@ -233,14 +244,12 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, cfg_.g_threads = NumberOfThreads(cfg_.g_w, cfg_.g_h, settings.number_of_cores); cfg_.g_timebase.num = 1; - cfg_.g_timebase.den = kRtpTicksPerSecond; + cfg_.g_timebase.den = kVideoPayloadTypeFrequency; cfg_.rc_target_bitrate = encoder_settings_.startBitrate; // kilobits/sec. - cfg_.rc_dropframe_thresh = - (!disable_frame_dropping_ && encoder_settings_.GetFrameDropEnabled()) ? 30 - : 0; + cfg_.rc_dropframe_thresh = encoder_settings_.GetFrameDropEnabled() ? 30 : 0; cfg_.g_input_bit_depth = kBitDepth; cfg_.kf_mode = AOM_KF_DISABLED; - cfg_.rc_min_quantizer = kQpMin; + cfg_.rc_min_quantizer = kMinQp; cfg_.rc_max_quantizer = encoder_settings_.qpMax; cfg_.rc_undershoot_pct = 50; cfg_.rc_overshoot_pct = 50; @@ -269,6 +278,11 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, << " on aom_codec_enc_init."; return WEBRTC_VIDEO_CODEC_ERROR; } + + if (!SetSvcParams(svc_controller_->StreamConfig(), cfg_)) { + return WEBRTC_VIDEO_CODEC_ERROR; + } + inited_ = true; // Set control parameters @@ -292,21 +306,7 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_PALETTE, 0); } - if (cfg_.g_threads == 8) { - // Values passed to AV1E_SET_TILE_ROWS and AV1E_SET_TILE_COLUMNS are log2() - // based. - // Use 4 tile columns x 2 tile rows for 8 threads. - SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_TILE_ROWS, 1); - SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_TILE_COLUMNS, 2); - } else if (cfg_.g_threads == 4) { - // Use 2 tile columns x 2 tile rows for 4 threads. - SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_TILE_ROWS, 1); - SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_TILE_COLUMNS, 1); - } else { - SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_TILE_COLUMNS, - static_cast(log2(cfg_.g_threads))); - } - + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_AUTO_TILES, 1); SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ROW_MT, 1); SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_OBMC, 0); SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_NOISE_SENSITIVITY, 0); @@ -337,6 +337,11 @@ int LibaomAv1Encoder::InitEncode(const VideoCodec* codec_settings, SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_SMOOTH_INTERINTRA, 0); SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_ENABLE_TX64, 0); SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_MAX_REFERENCE_FRAMES, 3); + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_MAX_CONSEC_FRAME_DROP_MS_CBR, 250); + + if (post_encode_frame_drop_) { + SET_ENCODER_PARAM_OR_RETURN_ERROR(AV1E_SET_POSTENCODE_DROP_RTC, 1); + } return WEBRTC_VIDEO_CODEC_OK; } @@ -353,18 +358,22 @@ bool LibaomAv1Encoder::SetEncoderControlParameters(int param_id, return error_code == AOM_CODEC_OK; } -// Only positive speeds, range for real-time coding currently is: 6 - 8. +// Only positive speeds, range for real-time coding currently is: 6 - 10. +// Speed 11 is used for screen sharing. // Lower means slower/better quality, higher means fastest/lower quality. int LibaomAv1Encoder::GetCpuSpeed(int width, int height) { - if (aux_config_) { - if (auto it = aux_config_->max_pixel_count_to_cpu_speed.lower_bound(width * - height); - it != aux_config_->max_pixel_count_to_cpu_speed.end()) { + if (!settings_.max_pixel_count_to_cpu_speed.empty()) { + if (auto it = + settings_.max_pixel_count_to_cpu_speed.lower_bound(width * height); + it != settings_.max_pixel_count_to_cpu_speed.end()) { return it->second; } return 10; } else { + if (encoder_settings_.mode == VideoCodecMode::kScreensharing) { + return 11; + } // For smaller resolutions, use lower speed setting (get some coding gain at // the cost of increased encoding complexity). switch (encoder_settings_.GetVideoEncoderComplexity()) { @@ -413,8 +422,7 @@ int LibaomAv1Encoder::NumberOfThreads(int width, return 2; } else { // Use 2 threads for low res on ARM. -#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || \ - defined(WEBRTC_ANDROID) +#ifdef MOBILE_ARM if (width * height >= 320 * 180 && number_of_cores > 2) { return 2; } @@ -425,11 +433,12 @@ int LibaomAv1Encoder::NumberOfThreads(int width, } bool LibaomAv1Encoder::SetSvcParams( - ScalableVideoController::StreamLayersConfig svc_config) { + ScalableVideoController::StreamLayersConfig svc_config, + const aom_codec_enc_cfg_t& encoder_config) { bool svc_enabled = svc_config.num_spatial_layers > 1 || svc_config.num_temporal_layers > 1; if (!svc_enabled) { - svc_params_ = absl::nullopt; + svc_params_ = std::nullopt; return true; } if (svc_config.num_spatial_layers < 1 || svc_config.num_spatial_layers > 4) { @@ -450,8 +459,8 @@ bool LibaomAv1Encoder::SetSvcParams( int num_layers = svc_config.num_spatial_layers * svc_config.num_temporal_layers; for (int i = 0; i < num_layers; ++i) { - svc_params.min_quantizers[i] = kQpMin; - svc_params.max_quantizers[i] = encoder_settings_.qpMax; + svc_params.min_quantizers[i] = encoder_config.rc_min_quantizer; + svc_params.max_quantizers[i] = encoder_config.rc_max_quantizer; } // Assume each temporal layer doubles framerate. @@ -565,11 +574,11 @@ int32_t LibaomAv1Encoder::Encode( return WEBRTC_VIDEO_CODEC_ERROR; } - rtc::scoped_refptr buffer = frame.video_frame_buffer(); + scoped_refptr buffer = frame.video_frame_buffer(); absl::InlinedVector supported_formats = {VideoFrameBuffer::Type::kI420, VideoFrameBuffer::Type::kNV12}; - rtc::scoped_refptr mapped_buffer; + scoped_refptr mapped_buffer; if (buffer->type() != VideoFrameBuffer::Type::kNative) { // `buffer` is already mapped. mapped_buffer = buffer; @@ -583,7 +592,7 @@ int32_t LibaomAv1Encoder::Encode( (absl::c_find(supported_formats, mapped_buffer->type()) == supported_formats.end() && mapped_buffer->type() != VideoFrameBuffer::Type::kI420A)) { - rtc::scoped_refptr converted_buffer(buffer->ToI420()); + scoped_refptr converted_buffer(buffer->ToI420()); if (!converted_buffer) { RTC_LOG(LS_ERROR) << "Failed to convert " << VideoFrameBufferTypeToString( @@ -604,6 +613,8 @@ int32_t LibaomAv1Encoder::Encode( MaybeRewrapImgWithFormat(AOM_IMG_FMT_I420); auto i420_buffer = mapped_buffer->GetI420(); RTC_DCHECK(i420_buffer); + RTC_CHECK_EQ(i420_buffer->width(), frame_for_encode_->d_w); + RTC_CHECK_EQ(i420_buffer->height(), frame_for_encode_->d_h); frame_for_encode_->planes[AOM_PLANE_Y] = const_cast(i420_buffer->DataY()); frame_for_encode_->planes[AOM_PLANE_U] = @@ -619,6 +630,8 @@ int32_t LibaomAv1Encoder::Encode( MaybeRewrapImgWithFormat(AOM_IMG_FMT_NV12); const NV12BufferInterface* nv12_buffer = mapped_buffer->GetNV12(); RTC_DCHECK(nv12_buffer); + RTC_CHECK_EQ(nv12_buffer->width(), frame_for_encode_->d_w); + RTC_CHECK_EQ(nv12_buffer->height(), frame_for_encode_->d_h); frame_for_encode_->planes[AOM_PLANE_Y] = const_cast(nv12_buffer->DataY()); frame_for_encode_->planes[AOM_PLANE_U] = @@ -633,18 +646,18 @@ int32_t LibaomAv1Encoder::Encode( return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE; } - const uint32_t duration = - kRtpTicksPerSecond / static_cast(encoder_settings_.maxFramerate); + const uint32_t duration = kVideoPayloadTypeFrequency / framerate_fps_; timestamp_ += duration; const size_t num_spatial_layers = svc_params_ ? svc_params_->number_spatial_layers : 1; auto next_layer_frame = layer_frames.begin(); + std::vector> encoded_images; for (size_t i = 0; i < num_spatial_layers; ++i) { // The libaom AV1 encoder requires that `aom_codec_encode` is called for // every spatial layer, even if the configured bitrate for that layer is // zero. For zero bitrate spatial layers no frames will be produced. - absl::optional + std::optional non_encoded_layer_frame; ScalableVideoController::LayerFrameConfig* layer_frame; if (next_layer_frame != layer_frames.end() && @@ -704,8 +717,8 @@ int32_t LibaomAv1Encoder::Encode( encoded_image._frameType = layer_frame->IsKeyframe() ? VideoFrameType::kVideoFrameKey : VideoFrameType::kVideoFrameDelta; - encoded_image.SetTimestamp(frame.timestamp()); - encoded_image.SetCaptureTimeIdentifier(frame.capture_time_identifier()); + encoded_image.SetRtpTimestamp(frame.rtp_timestamp()); + encoded_image.SetPresentationTimestamp(frame.presentation_timestamp()); encoded_image.capture_time_ms_ = frame.render_time_ms(); encoded_image.rotation_ = frame.rotation(); encoded_image.content_type_ = VideoContentType::UNSPECIFIED; @@ -758,10 +771,17 @@ int32_t LibaomAv1Encoder::Encode( resolutions = {RenderResolution(cfg_.g_w, cfg_.g_h)}; } } - encoded_image_callback_->OnEncodedImage(encoded_image, - &codec_specific_info); + encoded_images.emplace_back(std::move(encoded_image), + std::move(codec_specific_info)); } } + if (!encoded_images.empty()) { + encoded_images.back().second.end_of_picture = true; + } + for (auto& [encoded_image, codec_specific_info] : encoded_images) { + encoded_image_callback_->OnEncodedImage(encoded_image, + &codec_specific_info); + } return WEBRTC_VIDEO_CODEC_OK; } @@ -771,9 +791,9 @@ void LibaomAv1Encoder::SetRates(const RateControlParameters& parameters) { RTC_LOG(LS_WARNING) << "SetRates() while encoder is not initialized"; return; } - if (parameters.framerate_fps < kMinimumFrameRate) { + if (parameters.framerate_fps < kMinFrameRateFps) { RTC_LOG(LS_WARNING) << "Unsupported framerate (must be >= " - << kMinimumFrameRate + << kMinFrameRateFps << " ): " << parameters.framerate_fps; return; } @@ -796,26 +816,20 @@ void LibaomAv1Encoder::SetRates(const RateControlParameters& parameters) { if (SvcEnabled()) { for (int sid = 0; sid < svc_params_->number_spatial_layers; ++sid) { // libaom bitrate for spatial id S and temporal id T means bitrate - // of frames with spatial_id=S and temporal_id<=T - // while `parameters.bitrate` provdies bitrate of frames with - // spatial_id=S and temporal_id=T - int accumulated_bitrate_bps = 0; + // of frames with spatial_id=S and temporal_id<=T. for (int tid = 0; tid < svc_params_->number_temporal_layers; ++tid) { int layer_index = sid * svc_params_->number_temporal_layers + tid; - accumulated_bitrate_bps += parameters.bitrate.GetBitrate(sid, tid); // `svc_params_->layer_target_bitrate` expects bitrate in kbps. svc_params_->layer_target_bitrate[layer_index] = - accumulated_bitrate_bps / 1000; + parameters.bitrate.GetTemporalLayerSum(sid, tid) / 1000; } } SetEncoderControlParameters(AV1E_SET_SVC_PARAMS, &*svc_params_); } - rates_configured_ = true; + framerate_fps_ = parameters.framerate_fps; - // Set frame rate to closest integer value. - encoder_settings_.maxFramerate = - static_cast(parameters.framerate_fps + 0.5); + rates_configured_ = true; } VideoEncoder::EncoderInfo LibaomAv1Encoder::GetEncoderInfo() const { @@ -824,7 +838,10 @@ VideoEncoder::EncoderInfo LibaomAv1Encoder::GetEncoderInfo() const { info.implementation_name = "libaom"; info.has_trusted_rate_controller = true; info.is_hardware_accelerated = false; - info.scaling_settings = VideoEncoder::ScalingSettings(kMinQindex, kMaxQindex); + info.scaling_settings = + (inited_ && !encoder_settings_.AV1().automatic_resize_on) + ? VideoEncoder::ScalingSettings::kOff + : VideoEncoder::ScalingSettings(kLowQindex, kHighQindex); info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420, VideoFrameBuffer::Type::kNV12}; if (SvcEnabled()) { @@ -840,20 +857,17 @@ VideoEncoder::EncoderInfo LibaomAv1Encoder::GetEncoderInfo() const { info.resolution_bitrate_limits = encoder_info_override_.resolution_bitrate_limits(); } + + info.min_qp = kMinQindex; return info; } } // namespace -std::unique_ptr CreateLibaomAv1Encoder() { - return std::make_unique(absl::nullopt, - FieldTrialBasedConfig()); -} - -std::unique_ptr CreateLibaomAv1Encoder( - const LibaomAv1EncoderAuxConfig& aux_config) { - return std::make_unique(aux_config, - FieldTrialBasedConfig()); +absl_nonnull std::unique_ptr CreateLibaomAv1Encoder( + const Environment& env, + LibaomAv1EncoderSettings settings) { + return std::make_unique(env, std::move(settings)); } } // namespace webrtc diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder.h b/modules/video_coding/codecs/av1/libaom_av1_encoder.h index 2fd1d5a754..e2becda952 100644 --- a/modules/video_coding/codecs/av1/libaom_av1_encoder.h +++ b/modules/video_coding/codecs/av1/libaom_av1_encoder.h @@ -13,18 +13,19 @@ #include #include -#include "absl/strings/string_view.h" +#include "absl/base/nullability.h" +#include "api/environment/environment.h" #include "api/video_codecs/video_encoder.h" namespace webrtc { -struct LibaomAv1EncoderAuxConfig { + +struct LibaomAv1EncoderSettings { // A map of max pixel count --> cpu speed. std::map max_pixel_count_to_cpu_speed; }; - -std::unique_ptr CreateLibaomAv1Encoder(); -std::unique_ptr CreateLibaomAv1Encoder( - const LibaomAv1EncoderAuxConfig& aux_config); +absl_nonnull std::unique_ptr CreateLibaomAv1Encoder( + const Environment& env, + LibaomAv1EncoderSettings settings = {}); } // namespace webrtc diff --git a/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc b/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc index 09bf1bf1ca..33b750cdc6 100644 --- a/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc +++ b/modules/video_coding/codecs/av1/libaom_av1_encoder_unittest.cc @@ -10,32 +10,56 @@ #include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" +#include +#include +#include +#include #include #include +#include +#include #include -#include "absl/types/optional.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/test/create_frame_generator.h" #include "api/test/frame_generator_interface.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/render_resolution.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/video_coding/codecs/test/encoded_video_frame_producer.h" #include "modules/video_coding/include/video_error_codes.h" -#include "test/field_trial.h" +#include "rtc_base/checks.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/scoped_key_value_config.h" +#include "test/testsupport/file_utils.h" +#include "test/testsupport/frame_reader.h" namespace webrtc { namespace { +using test::ScopedKeyValueConfig; using ::testing::ElementsAre; using ::testing::Eq; using ::testing::Field; using ::testing::IsEmpty; using ::testing::SizeIs; +using ::testing::Values; VideoCodec DefaultCodecSettings() { VideoCodec codec_settings; + codec_settings.codecType = kVideoCodecAV1; codec_settings.width = 320; codec_settings.height = 180; codec_settings.maxFramerate = 30; @@ -51,12 +75,14 @@ VideoEncoder::Settings DefaultEncoderSettings() { } TEST(LibaomAv1EncoderTest, CanCreate) { - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); EXPECT_TRUE(encoder); } TEST(LibaomAv1EncoderTest, InitAndRelease) { - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); ASSERT_TRUE(encoder); VideoCodec codec_settings = DefaultCodecSettings(); EXPECT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), @@ -66,7 +92,8 @@ TEST(LibaomAv1EncoderTest, InitAndRelease) { TEST(LibaomAv1EncoderTest, NoBitrateOnTopLayerRefecltedInActiveDecodeTargets) { // Configure encoder with 2 temporal layers. - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); codec_settings.SetScalabilityMode(ScalabilityMode::kL1T2); ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), @@ -82,7 +109,7 @@ TEST(LibaomAv1EncoderTest, NoBitrateOnTopLayerRefecltedInActiveDecodeTargets) { EncodedVideoFrameProducer(*encoder).SetNumInputFrames(1).Encode(); ASSERT_THAT(encoded_frames, SizeIs(1)); ASSERT_NE(encoded_frames[0].codec_specific_info.generic_frame_info, - absl::nullopt); + std::nullopt); // Assuming L1T2 structure uses 1st decode target for T0 and 2nd decode target // for T0+T1 frames, expect only 1st decode target is active. EXPECT_EQ(encoded_frames[0] @@ -92,7 +119,8 @@ TEST(LibaomAv1EncoderTest, NoBitrateOnTopLayerRefecltedInActiveDecodeTargets) { TEST(LibaomAv1EncoderTest, SpatialScalabilityInTemporalUnitReportedAsDeltaFrame) { - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); codec_settings.SetScalabilityMode(ScalabilityMode::kL2T1); ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), @@ -114,7 +142,8 @@ TEST(LibaomAv1EncoderTest, } TEST(LibaomAv1EncoderTest, NoBitrateOnTopSpatialLayerProduceDeltaFrames) { - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); codec_settings.SetScalabilityMode(ScalabilityMode::kL2T1); ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), @@ -141,7 +170,8 @@ TEST(LibaomAv1EncoderTest, SetsEndOfPictureForLastFrameInTemporalUnit) { allocation.SetBitrate(1, 0, 40000); allocation.SetBitrate(2, 0, 30000); - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); // Configure encoder with 3 spatial layers. codec_settings.SetScalabilityMode(ScalabilityMode::kL3T1); @@ -163,12 +193,42 @@ TEST(LibaomAv1EncoderTest, SetsEndOfPictureForLastFrameInTemporalUnit) { EXPECT_TRUE(encoded_frames[5].codec_specific_info.end_of_picture); } +TEST(LibaomAv1EncoderTest, + SetsEndOfPictureForLastFrameInTemporalUnitWhenLayerDrop) { + VideoBitrateAllocation allocation; + allocation.SetBitrate(0, 0, 30000); + allocation.SetBitrate(1, 0, 40000); + // Lower bitrate for the last spatial layer to provoke layer drop. + allocation.SetBitrate(2, 0, 500); + + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); + VideoCodec codec_settings = DefaultCodecSettings(); + // Configure encoder with 3 spatial layers. + codec_settings.SetScalabilityMode(ScalabilityMode::kL3T1); + codec_settings.startBitrate = allocation.get_sum_kbps(); + ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), + WEBRTC_VIDEO_CODEC_OK); + + encoder->SetRates(VideoEncoder::RateControlParameters( + allocation, codec_settings.maxFramerate)); + + std::vector encoded_frames = + EncodedVideoFrameProducer(*encoder).SetNumInputFrames(2).Encode(); + ASSERT_THAT(encoded_frames, SizeIs(4)); + EXPECT_FALSE(encoded_frames[0].codec_specific_info.end_of_picture); + EXPECT_TRUE(encoded_frames[1].codec_specific_info.end_of_picture); + EXPECT_FALSE(encoded_frames[2].codec_specific_info.end_of_picture); + EXPECT_TRUE(encoded_frames[3].codec_specific_info.end_of_picture); +} + TEST(LibaomAv1EncoderTest, CheckOddDimensionsWithSpatialLayers) { VideoBitrateAllocation allocation; allocation.SetBitrate(0, 0, 30000); allocation.SetBitrate(1, 0, 40000); allocation.SetBitrate(2, 0, 30000); - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); // Configure encoder with 3 spatial layers. codec_settings.SetScalabilityMode(ScalabilityMode::kL3T1); @@ -187,19 +247,69 @@ TEST(LibaomAv1EncoderTest, CheckOddDimensionsWithSpatialLayers) { ASSERT_THAT(encoded_frames, SizeIs(6)); } +class LibaomAv1EncoderMaxConsecDropTest + : public ::testing::TestWithParam {}; + +TEST_P(LibaomAv1EncoderMaxConsecDropTest, MaxConsecDrops) { + VideoBitrateAllocation allocation; + allocation.SetBitrate(0, 0, + 2000); // A low bitrate to provoke frame drops. + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); + VideoCodec codec_settings = DefaultCodecSettings(); + codec_settings.SetFrameDropEnabled(true); + codec_settings.SetScalabilityMode(ScalabilityMode::kL1T1); + codec_settings.startBitrate = allocation.get_sum_kbps(); + codec_settings.maxFramerate = GetParam(); + ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), + WEBRTC_VIDEO_CODEC_OK); + encoder->SetRates(VideoEncoder::RateControlParameters( + allocation, codec_settings.maxFramerate)); + std::vector encoded_frames = + EncodedVideoFrameProducer(*encoder) + .SetNumInputFrames(60) + .SetFramerateFps(codec_settings.maxFramerate) + .SetResolution(RenderResolution{320, 180}) + .Encode(); + ASSERT_GE(encoded_frames.size(), 2u); + + int max_consec_drops = 0; + for (size_t i = 1; i < encoded_frames.size(); ++i) { + uint32_t frame_duration_rtp = + encoded_frames[i].encoded_image.RtpTimestamp() - + encoded_frames[i - 1].encoded_image.RtpTimestamp(); + // X consecutive drops result in a freeze of (X + 1) frame duration. + // Subtract 1 to get pure number of drops. + int num_drops = frame_duration_rtp * codec_settings.maxFramerate / + kVideoPayloadTypeFrequency - + 1; + max_consec_drops = std::max(max_consec_drops, num_drops); + } + + const int expected_max_consec_drops = + std::ceil(0.25 * codec_settings.maxFramerate); + EXPECT_EQ(max_consec_drops, expected_max_consec_drops); +} + +INSTANTIATE_TEST_SUITE_P(LibaomAv1EncoderMaxConsecDropTests, + LibaomAv1EncoderMaxConsecDropTest, + Values(1, 2, 5, 15, 30, 60)); + TEST(LibaomAv1EncoderTest, EncoderInfoWithoutResolutionBitrateLimits) { - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); EXPECT_TRUE(encoder->GetEncoderInfo().resolution_bitrate_limits.empty()); } TEST(LibaomAv1EncoderTest, EncoderInfoWithBitrateLimitsFromFieldTrial) { - test::ScopedFieldTrials field_trials( + auto field_trials = std::make_unique( "WebRTC-Av1-GetEncoderInfoOverride/" "frame_size_pixels:123|456|789," "min_start_bitrate_bps:11000|22000|33000," "min_bitrate_bps:44000|55000|66000," "max_bitrate_bps:77000|88000|99000/"); - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + const Environment env = CreateEnvironment(std::move(field_trials)); + std::unique_ptr encoder = CreateLibaomAv1Encoder(env); EXPECT_THAT( encoder->GetEncoderInfo().resolution_bitrate_limits, @@ -210,7 +320,8 @@ TEST(LibaomAv1EncoderTest, EncoderInfoWithBitrateLimitsFromFieldTrial) { } TEST(LibaomAv1EncoderTest, EncoderInfoProvidesFpsAllocation) { - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); codec_settings.SetScalabilityMode(ScalabilityMode::kL3T3); codec_settings.maxFramerate = 60; @@ -232,7 +343,8 @@ TEST(LibaomAv1EncoderTest, PopulatesEncodedFrameSize) { allocation.SetBitrate(0, 0, 30000); allocation.SetBitrate(1, 0, 40000); allocation.SetBitrate(2, 0, 30000); - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); codec_settings.startBitrate = allocation.get_sum_kbps(); ASSERT_GT(codec_settings.width, 4); @@ -265,7 +377,8 @@ TEST(LibaomAv1EncoderTest, PopulatesEncodedFrameSize) { } TEST(LibaomAv1EncoderTest, RtpTimestampWrap) { - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); codec_settings.SetScalabilityMode(ScalabilityMode::kL1T1); ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), @@ -288,9 +401,10 @@ TEST(LibaomAv1EncoderTest, RtpTimestampWrap) { Eq(VideoFrameType::kVideoFrameDelta)); } -TEST(LibaomAv1EncoderTest, TestCaptureTimeId) { - std::unique_ptr encoder = CreateLibaomAv1Encoder(); - const Timestamp capture_time_id = Timestamp::Micros(2000); +TEST(LibaomAv1EncoderTest, TestPresentationTimestamp) { + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); + const Timestamp presentation_timestamp = Timestamp::Micros(2000); VideoCodec codec_settings = DefaultCodecSettings(); codec_settings.SetScalabilityMode(ScalabilityMode::kL2T1); ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), @@ -307,21 +421,22 @@ TEST(LibaomAv1EncoderTest, TestCaptureTimeId) { std::vector encoded_frames = EncodedVideoFrameProducer(*encoder) .SetNumInputFrames(1) - .SetCaptureTimeIdentifier(capture_time_id) + .SetPresentationTimestamp(presentation_timestamp) .Encode(); ASSERT_THAT(encoded_frames, SizeIs(2)); ASSERT_TRUE( - encoded_frames[0].encoded_image.CaptureTimeIdentifier().has_value()); + encoded_frames[0].encoded_image.PresentationTimestamp().has_value()); ASSERT_TRUE( - encoded_frames[1].encoded_image.CaptureTimeIdentifier().has_value()); - EXPECT_EQ(encoded_frames[0].encoded_image.CaptureTimeIdentifier()->us(), - capture_time_id.us()); - EXPECT_EQ(encoded_frames[1].encoded_image.CaptureTimeIdentifier()->us(), - capture_time_id.us()); + encoded_frames[1].encoded_image.PresentationTimestamp().has_value()); + EXPECT_EQ(encoded_frames[0].encoded_image.PresentationTimestamp()->us(), + presentation_timestamp.us()); + EXPECT_EQ(encoded_frames[1].encoded_image.PresentationTimestamp()->us(), + presentation_timestamp.us()); } TEST(LibaomAv1EncoderTest, AdheresToTargetBitrateDespiteUnevenFrameTiming) { - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); codec_settings.SetScalabilityMode(ScalabilityMode::kL1T1); codec_settings.startBitrate = 300; // kbps @@ -345,7 +460,7 @@ TEST(LibaomAv1EncoderTest, AdheresToTargetBitrateDespiteUnevenFrameTiming) { private: Result OnEncodedImage( const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info) override { + const CodecSpecificInfo* /* codec_specific_info */) override { bytes_encoded_ += DataSize::Bytes(encoded_image.size()); return Result(Result::Error::OK); } @@ -381,7 +496,7 @@ TEST(LibaomAv1EncoderTest, AdheresToTargetBitrateDespiteUnevenFrameTiming) { VideoFrame frame = VideoFrame::Builder() .set_video_frame_buffer( frame_buffer_generator->NextFrame().buffer) - .set_timestamp_rtp(rtp_timestamp) + .set_rtp_timestamp(rtp_timestamp) .build(); RTC_CHECK_EQ(encoder->Encode(frame, &frame_types), WEBRTC_VIDEO_CODEC_OK); @@ -398,5 +513,84 @@ TEST(LibaomAv1EncoderTest, AdheresToTargetBitrateDespiteUnevenFrameTiming) { kTargetBitrateBps, kTargetBitrateBps / 10); } +TEST(LibaomAv1EncoderTest, DisableAutomaticResize) { + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); + ASSERT_TRUE(encoder); + VideoCodec codec_settings = DefaultCodecSettings(); + codec_settings.AV1()->automatic_resize_on = false; + EXPECT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), + WEBRTC_VIDEO_CODEC_OK); + EXPECT_EQ(encoder->GetEncoderInfo().scaling_settings.thresholds, + std::nullopt); +} + +TEST(LibaomAv1EncoderTest, PostEncodeFrameDrop) { + // To trigger post-encode frame drop, encode a frame of a high complexity + // using a medium bitrate, then reduce the bitrate and encode the same frame + // again. + // Using a medium bitrate for the first frame prevents quality and QP + // saturation. Encoding the same content twice prevents scene change + // detection. The second frame overshoots RC buffer and provokes post-encode + // drop. + VideoFrame input_frame = + VideoFrame::Builder() + .set_video_frame_buffer( + test::CreateYuvFrameReader( + test::ResourcePath("photo_1850_1110", "yuv"), + {.width = 1850, .height = 1110}) + ->PullFrame()) + .build(); + + VideoBitrateAllocation allocation; + allocation.SetBitrate(/*spatial_index=*/0, /*temporal_index=*/0, + /*bitrate_bps=*/10000000); + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); + VideoCodec codec_settings = DefaultCodecSettings(); + codec_settings.width = input_frame.width(); + codec_settings.height = input_frame.height(); + codec_settings.startBitrate = allocation.get_sum_kbps(); + codec_settings.SetFrameDropEnabled(true); + codec_settings.SetScalabilityMode(ScalabilityMode::kL1T1); + ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), + WEBRTC_VIDEO_CODEC_OK); + encoder->SetRates(VideoEncoder::RateControlParameters( + allocation, codec_settings.maxFramerate)); + + class EncoderCallback : public EncodedImageCallback { + public: + EncoderCallback() = default; + int frames_encoded() const { return frames_encoded_; } + + private: + Result OnEncodedImage( + const EncodedImage& encoded_image, + const CodecSpecificInfo* /* codec_specific_info */) override { + frames_encoded_++; + return Result(Result::Error::OK); + } + + int frames_encoded_ = 0; + } callback; + encoder->RegisterEncodeCompleteCallback(&callback); + + input_frame.set_rtp_timestamp(1 * kVideoPayloadTypeFrequency / + codec_settings.maxFramerate); + RTC_CHECK_EQ(encoder->Encode(input_frame, /*frame_types=*/nullptr), + WEBRTC_VIDEO_CODEC_OK); + + allocation.SetBitrate(/*spatial_index=*/0, /*temporal_index=*/0, + /*bitrate_bps=*/1000); + encoder->SetRates(VideoEncoder::RateControlParameters( + allocation, codec_settings.maxFramerate)); + + input_frame.set_rtp_timestamp(2 * kVideoPayloadTypeFrequency / + codec_settings.maxFramerate); + RTC_CHECK_EQ(encoder->Encode(input_frame, /*frame_types=*/nullptr), + WEBRTC_VIDEO_CODEC_OK); + RTC_CHECK_EQ(callback.frames_encoded(), 1); +} + } // namespace } // namespace webrtc diff --git a/modules/video_coding/codecs/av1/libaom_av1_unittest.cc b/modules/video_coding/codecs/av1/libaom_av1_unittest.cc index 766b7660e4..2993e3994d 100644 --- a/modules/video_coding/codecs/av1/libaom_av1_unittest.cc +++ b/modules/video_coding/codecs/av1/libaom_av1_unittest.cc @@ -13,14 +13,25 @@ #include #include +#include #include +#include #include +#include #include -#include "absl/types/optional.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" +#include "api/video/encoded_image.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_frame.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_encoder.h" #include "modules/video_coding/codecs/av1/dav1d_decoder.h" #include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" @@ -30,7 +41,7 @@ #include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/svc/scalability_mode_util.h" #include "modules/video_coding/svc/scalable_video_controller.h" -#include "modules/video_coding/svc/scalable_video_controller_no_layering.h" +#include "rtc_base/checks.h" #include "test/gmock.h" #include "test/gtest.h" @@ -62,6 +73,7 @@ VideoCodec DefaultCodecSettings() { codec_settings.height = kHeight; codec_settings.maxFramerate = kFramerate; codec_settings.maxBitrate = 1000; + codec_settings.startBitrate = 1; codec_settings.qpMax = 63; return codec_settings; } @@ -73,8 +85,8 @@ VideoEncoder::Settings DefaultEncoderSettings() { class TestAv1Decoder { public: - explicit TestAv1Decoder(int decoder_id) - : decoder_id_(decoder_id), decoder_(CreateDav1dDecoder()) { + explicit TestAv1Decoder(const Environment& env, int decoder_id) + : decoder_id_(decoder_id), decoder_(CreateDav1dDecoder(env)) { if (decoder_ == nullptr) { ADD_FAILURE() << "Failed to create a decoder#" << decoder_id_; return; @@ -118,8 +130,8 @@ class TestAv1Decoder { return 0; } void Decoded(VideoFrame& /*decoded_image*/, - absl::optional /*decode_time_ms*/, - absl::optional /*qp*/) override { + std::optional /*decode_time_ms*/, + std::optional /*qp*/) override { ++num_called_; } @@ -133,8 +145,9 @@ class TestAv1Decoder { }; TEST(LibaomAv1Test, EncodeDecode) { - TestAv1Decoder decoder(0); - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + const Environment env = CreateEnvironment(); + TestAv1Decoder decoder(env, /*decoder_id=*/0); + std::unique_ptr encoder = CreateLibaomAv1Encoder(env); VideoCodec codec_settings = DefaultCodecSettings(); ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), WEBRTC_VIDEO_CODEC_OK); @@ -178,7 +191,7 @@ struct LayerId { struct SvcTestParam { ScalabilityMode GetScalabilityMode() const { - absl::optional scalability_mode = + std::optional scalability_mode = ScalabilityModeFromString(name); RTC_CHECK(scalability_mode.has_value()); return *scalability_mode; @@ -215,7 +228,8 @@ TEST_P(LibaomAv1SvcTest, EncodeAndDecodeAllDecodeTargets) { size_t num_decode_targets = svc_controller->DependencyStructure().num_decode_targets; - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + const Environment env = CreateEnvironment(); + std::unique_ptr encoder = CreateLibaomAv1Encoder(env); VideoCodec codec_settings = DefaultCodecSettings(); codec_settings.SetScalabilityMode(GetParam().GetScalabilityMode()); ASSERT_EQ(encoder->InitEncode(&codec_settings, DefaultEncoderSettings()), @@ -237,7 +251,7 @@ TEST_P(LibaomAv1SvcTest, EncodeAndDecodeAllDecodeTargets) { }))); for (size_t dt = 0; dt < num_decode_targets; ++dt) { - TestAv1Decoder decoder(dt); + TestAv1Decoder decoder(env, dt); std::vector requested_ids; for (int64_t frame_id = 0; frame_id < static_cast(encoded_frames.size()); ++frame_id) { @@ -286,7 +300,8 @@ TEST_P(LibaomAv1SvcTest, SetRatesMatchMeasuredBitrate) { kv.second.bps()); } - std::unique_ptr encoder = CreateLibaomAv1Encoder(); + std::unique_ptr encoder = + CreateLibaomAv1Encoder(CreateEnvironment()); ASSERT_TRUE(encoder); VideoCodec codec_settings = DefaultCodecSettings(); codec_settings.SetScalabilityMode(param.GetScalabilityMode()); @@ -350,7 +365,8 @@ INSTANTIATE_TEST_SUITE_P( SvcTestParam{"L3T1", /*num_frames_to_generate=*/3}, SvcTestParam{"L3T3", /*num_frames_to_generate=*/8}, SvcTestParam{"S2T1", /*num_frames_to_generate=*/3}, - SvcTestParam{"S3T3", /*num_frames_to_generate=*/8}, + // TODO: bugs.webrtc.org/15715 - Re-enable once AV1 is fixed. + // SvcTestParam{"S3T3", /*num_frames_to_generate=*/8}, SvcTestParam{"L2T2", /*num_frames_to_generate=*/4}, SvcTestParam{"L2T2_KEY", /*num_frames_to_generate=*/4}, SvcTestParam{"L2T2_KEY_SHIFT", diff --git a/modules/video_coding/codecs/h264/h264.cc b/modules/video_coding/codecs/h264/h264.cc index 5b9f0338a9..ffee2af7f5 100644 --- a/modules/video_coding/codecs/h264/h264.cc +++ b/modules/video_coding/codecs/h264/h264.cc @@ -12,10 +12,10 @@ #include "modules/video_coding/codecs/h264/include/h264.h" #include +#include #include #include "absl/container/inlined_vector.h" -#include "absl/types/optional.h" #include "api/video_codecs/sdp_video_format.h" #include "media/base/media_constants.h" #include "rtc_base/trace_event.h" @@ -54,7 +54,7 @@ SdpVideoFormat CreateH264Format(H264Profile profile, H264Level level, const std::string& packetization_mode, bool add_scalability_modes) { - const absl::optional profile_string = + const std::optional profile_string = H264ProfileLevelIdToString(H264ProfileLevelId(profile, level)); RTC_CHECK(profile_string); absl::InlinedVector scalability_modes; @@ -63,12 +63,11 @@ SdpVideoFormat CreateH264Format(H264Profile profile, scalability_modes.push_back(scalability_mode); } } - return SdpVideoFormat( - cricket::kH264CodecName, - {{cricket::kH264FmtpProfileLevelId, *profile_string}, - {cricket::kH264FmtpLevelAsymmetryAllowed, "1"}, - {cricket::kH264FmtpPacketizationMode, packetization_mode}}, - scalability_modes); + return SdpVideoFormat(kH264CodecName, + {{kH264FmtpProfileLevelId, *profile_string}, + {kH264FmtpLevelAsymmetryAllowed, "1"}, + {kH264FmtpPacketizationMode, packetization_mode}}, + scalability_modes); } void DisableRtcUseH264() { @@ -121,20 +120,27 @@ std::vector SupportedH264DecoderCodecs() { return supportedCodecs; } -std::unique_ptr H264Encoder::Create() { - return Create(cricket::CreateVideoCodec(cricket::kH264CodecName)); +H264EncoderSettings H264EncoderSettings::Parse(const SdpVideoFormat& format) { + if (auto it = format.parameters.find(kH264FmtpPacketizationMode); + it != format.parameters.end()) { + if (it->second == "0") { + return {.packetization_mode = H264PacketizationMode::SingleNalUnit}; + } else if (it->second == "1") { + return {.packetization_mode = H264PacketizationMode::NonInterleaved}; + } + } + return {}; } -std::unique_ptr H264Encoder::Create( - const cricket::VideoCodec& codec) { - RTC_DCHECK(H264Encoder::IsSupported()); +absl_nonnull std::unique_ptr CreateH264Encoder( + [[maybe_unused]] const Environment& env, + [[maybe_unused]] H264EncoderSettings settings) { #if defined(WEBRTC_USE_H264) RTC_CHECK(g_rtc_use_h264); RTC_LOG(LS_INFO) << "Creating H264EncoderImpl."; - return std::make_unique(codec); + return std::make_unique(env, settings); #else - RTC_DCHECK_NOTREACHED(); - return nullptr; + RTC_CHECK_NOTREACHED(); #endif } diff --git a/modules/video_coding/codecs/h264/h264_color_space.h b/modules/video_coding/codecs/h264/h264_color_space.h index 392ccaf563..5a93575184 100644 --- a/modules/video_coding/codecs/h264/h264_color_space.h +++ b/modules/video_coding/codecs/h264/h264_color_space.h @@ -20,12 +20,12 @@ #error "See: bugs.webrtc.org/9213#c13." #endif -#include "api/video/color_space.h" - extern "C" { -#include "third_party/ffmpeg/libavcodec/avcodec.h" +#include } // extern "C" +#include "api/video/color_space.h" + namespace webrtc { // Helper class for extracting color space information from H264 stream. diff --git a/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/modules/video_coding/codecs/h264/h264_decoder_impl.cc index f67718cb23..f3737a1edd 100644 --- a/modules/video_coding/codecs/h264/h264_decoder_impl.cc +++ b/modules/video_coding/codecs/h264/h264_decoder_impl.cc @@ -16,21 +16,22 @@ #include "modules/video_coding/codecs/h264/h264_decoder_impl.h" +extern "C" { +#include +#include +#include +} // extern "C" + #include #include #include -extern "C" { -#include "third_party/ffmpeg/libavcodec/avcodec.h" -#include "third_party/ffmpeg/libavformat/avformat.h" -#include "third_party/ffmpeg/libavutil/imgutils.h" -} // extern "C" - #include "api/video/color_space.h" #include "api/video/i010_buffer.h" #include "api/video/i420_buffer.h" #include "common_video/include/video_frame_buffer.h" #include "modules/video_coding/codecs/h264/h264_color_space.h" +#include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "system_wrappers/include/metrics.h" @@ -80,7 +81,11 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context, kPixelFormatsSupported.begin(), kPixelFormatsSupported.end(), [context](AVPixelFormat format) { return context->pix_fmt == format; }); - RTC_CHECK(pixelFormatSupported != kPixelFormatsSupported.end()); + if (pixelFormatSupported == kPixelFormatsSupported.end()) { + RTC_LOG(LS_ERROR) << "Unsupported pixel format: " << context->pix_fmt; + decoder->ReportError(); + return -1; + } // `av_frame->width` and `av_frame->height` are set by FFmpeg. These are the // actual image's dimensions and may be different from `context->width` and @@ -114,13 +119,13 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context, // http://crbug.com/390941. Our pool is set up to zero-initialize new buffers. // TODO(https://crbug.com/390941): Delete that feature from the video pool, // instead add an explicit call to InitializeData here. - rtc::scoped_refptr frame_buffer; - rtc::scoped_refptr i444_buffer; - rtc::scoped_refptr i420_buffer; - rtc::scoped_refptr i422_buffer; - rtc::scoped_refptr i010_buffer; - rtc::scoped_refptr i210_buffer; - rtc::scoped_refptr i410_buffer; + webrtc::scoped_refptr frame_buffer; + webrtc::scoped_refptr i444_buffer; + webrtc::scoped_refptr i420_buffer; + webrtc::scoped_refptr i422_buffer; + webrtc::scoped_refptr i010_buffer; + webrtc::scoped_refptr i210_buffer; + webrtc::scoped_refptr i410_buffer; int bytes_per_pixel = 1; switch (context->pix_fmt) { case AV_PIX_FMT_YUV420P: @@ -229,7 +234,6 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context, int total_size = y_size + 2 * uv_size; av_frame->format = context->pix_fmt; - av_frame->reordered_opaque = context->reordered_opaque; // Create a VideoFrame object, to keep a reference to the buffer. // TODO(nisse): The VideoFrame's timestamp and rotation info is not used. @@ -324,7 +328,7 @@ bool H264DecoderImpl::Configure(const Settings& settings) { av_frame_.reset(av_frame_alloc()); - if (absl::optional buffer_pool_size = settings.buffer_pool_size()) { + if (std::optional buffer_pool_size = settings.buffer_pool_size()) { if (!ffmpeg_buffer_pool_.Resize(*buffer_pool_size)) { return false; } @@ -377,8 +381,6 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image, return WEBRTC_VIDEO_CODEC_ERROR; } packet->size = static_cast(input_image.size()); - int64_t frame_timestamp_us = input_image.ntp_time_ms_ * 1000; // ms -> μs - av_context_->reordered_opaque = frame_timestamp_us; int result = avcodec_send_packet(av_context_.get(), packet.get()); @@ -395,19 +397,15 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image, return WEBRTC_VIDEO_CODEC_ERROR; } - // We don't expect reordering. Decoded frame timestamp should match - // the input one. - RTC_DCHECK_EQ(av_frame_->reordered_opaque, frame_timestamp_us); - // TODO(sakal): Maybe it is possible to get QP directly from FFmpeg. h264_bitstream_parser_.ParseBitstream(input_image); - absl::optional qp = h264_bitstream_parser_.GetLastSliceQp(); + std::optional qp = h264_bitstream_parser_.GetLastSliceQp(); // Obtain the `video_frame` containing the decoded image. VideoFrame* input_frame = static_cast(av_buffer_get_opaque(av_frame_->buf[0])); RTC_DCHECK(input_frame); - rtc::scoped_refptr frame_buffer = + webrtc::scoped_refptr frame_buffer = input_frame->video_frame_buffer(); // Instantiate Planar YUV buffer according to video frame buffer type @@ -532,7 +530,7 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image, return WEBRTC_VIDEO_CODEC_ERROR; } - rtc::scoped_refptr cropped_buffer; + webrtc::scoped_refptr cropped_buffer; switch (video_frame_buffer_type) { case VideoFrameBuffer::Type::kI420: cropped_buffer = WrapI420Buffer( @@ -612,14 +610,14 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image, VideoFrame decoded_frame = VideoFrame::Builder() .set_video_frame_buffer(cropped_buffer) - .set_timestamp_rtp(input_image.Timestamp()) + .set_rtp_timestamp(input_image.RtpTimestamp()) .set_color_space(color_space) .build(); // Return decoded frame. // TODO(nisse): Timestamp and rotation are all zero here. Change decoder // interface to pass a VideoFrameBuffer instead of a VideoFrame? - decoded_image_callback_->Decoded(decoded_frame, absl::nullopt, qp); + decoded_image_callback_->Decoded(decoded_frame, std::nullopt, qp); // Stop referencing it, possibly freeing `input_frame`. av_frame_unref(av_frame_.get()); diff --git a/modules/video_coding/codecs/h264/h264_decoder_impl.h b/modules/video_coding/codecs/h264/h264_decoder_impl.h index 97d091cf4b..e9da6a7a0d 100644 --- a/modules/video_coding/codecs/h264/h264_decoder_impl.h +++ b/modules/video_coding/codecs/h264/h264_decoder_impl.h @@ -21,10 +21,6 @@ #error "See: bugs.webrtc.org/9213#c13." #endif -#include - -#include "modules/video_coding/codecs/h264/include/h264.h" - // CAVEAT: According to ffmpeg docs for avcodec_send_packet, ffmpeg requires a // few extra padding bytes after the end of input. And in addition, docs for // AV_INPUT_BUFFER_PADDING_SIZE says "If the first 23 bits of the additional @@ -40,11 +36,14 @@ // passed to ffmpeg. extern "C" { -#include "third_party/ffmpeg/libavcodec/avcodec.h" +#include } // extern "C" +#include + #include "common_video/h264/h264_bitstream_parser.h" #include "common_video/include/video_frame_buffer_pool.h" +#include "modules/video_coding/codecs/h264/include/h264.h" namespace webrtc { diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/modules/video_coding/codecs/h264/h264_encoder_impl.cc index b6023ac502..7792d53e32 100644 --- a/modules/video_coding/codecs/h264/h264_encoder_impl.cc +++ b/modules/video_coding/codecs/h264/h264_encoder_impl.cc @@ -18,13 +18,15 @@ #include #include +#include #include #include "absl/strings/match.h" -#include "absl/types/optional.h" #include "api/video/video_codec_constants.h" #include "api/video_codecs/scalability_mode.h" #include "common_video/libyuv/include/webrtc_libyuv.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "modules/video_coding/utility/simulcast_utility.h" @@ -56,7 +58,7 @@ enum H264EncoderImplEvent { kH264EncoderEventMax = 16, }; -int NumberOfThreads(absl::optional encoder_thread_limit, +int NumberOfThreads(std::optional encoder_thread_limit, int width, int height, int number_of_cores) { @@ -100,7 +102,7 @@ VideoFrameType ConvertToVideoFrameType(EVideoFrameType type) { return VideoFrameType::kEmptyFrame; } -absl::optional ScalabilityModeFromTemporalLayers( +std::optional ScalabilityModeFromTemporalLayers( int num_temporal_layers) { switch (num_temporal_layers) { case 0: @@ -114,7 +116,7 @@ absl::optional ScalabilityModeFromTemporalLayers( default: RTC_DCHECK_NOTREACHED(); } - return absl::nullopt; + return std::nullopt; } } // namespace @@ -170,20 +172,15 @@ static void RtpFragmentize(EncodedImage* encoded_image, SFrameBSInfo* info) { } } -H264EncoderImpl::H264EncoderImpl(const cricket::VideoCodec& codec) - : packetization_mode_(H264PacketizationMode::SingleNalUnit), +H264EncoderImpl::H264EncoderImpl(const Environment& env, + H264EncoderSettings settings) + : env_(env), + packetization_mode_(settings.packetization_mode), max_payload_size_(0), number_of_cores_(0), encoded_image_callback_(nullptr), has_reported_init_(false), has_reported_error_(false) { - RTC_CHECK(absl::EqualsIgnoreCase(codec.name, cricket::kH264CodecName)); - std::string packetization_mode_string; - if (codec.GetParam(cricket::kH264FmtpPacketizationMode, - &packetization_mode_string) && - packetization_mode_string == "1") { - packetization_mode_ = H264PacketizationMode::NonInterleaved; - } downscaled_buffers_.reserve(kMaxSimulcastStreams - 1); encoded_images_.reserve(kMaxSimulcastStreams); encoders_.reserve(kMaxSimulcastStreams); @@ -330,7 +327,7 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst, } } - SimulcastRateAllocator init_allocator(codec_); + SimulcastRateAllocator init_allocator(env_, codec_); VideoBitrateAllocation allocation = init_allocator.Allocate(VideoBitrateAllocationParameters( DataRate::KilobitsPerSec(codec_.startBitrate), codec_.maxFramerate)); @@ -423,7 +420,7 @@ int32_t H264EncoderImpl::Encode( return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } - rtc::scoped_refptr frame_buffer = + webrtc::scoped_refptr frame_buffer = input_frame.video_frame_buffer()->ToI420(); if (!frame_buffer) { RTC_LOG(LS_ERROR) << "Failed to convert " @@ -533,7 +530,7 @@ int32_t H264EncoderImpl::Encode( encoded_images_[i]._encodedWidth = configurations_[i].width; encoded_images_[i]._encodedHeight = configurations_[i].height; - encoded_images_[i].SetTimestamp(input_frame.timestamp()); + encoded_images_[i].SetRtpTimestamp(input_frame.rtp_timestamp()); encoded_images_[i].SetColorSpace(input_frame.color_space()); encoded_images_[i]._frameType = ConvertToVideoFrameType(info.eFrameType); encoded_images_[i].SetSimulcastIndex(configurations_[i].simulcast_idx); @@ -565,10 +562,20 @@ int32_t H264EncoderImpl::Encode( codec_specific.codecSpecific.H264.base_layer_sync = tid > 0 && tid < tl0sync_limit_[i]; if (svc_controllers_[i]) { + if (encoded_images_[i]._frameType == VideoFrameType::kVideoFrameKey) { + // Reset the ScalableVideoController on key frame + // to reset the expected dependency structure. + layer_frames = + svc_controllers_[i]->NextFrameConfig(/* restart= */ true); + RTC_CHECK_EQ(layer_frames.size(), 1); + RTC_DCHECK_EQ(layer_frames[0].TemporalId(), 0); + RTC_DCHECK_EQ(layer_frames[0].IsKeyframe(), true); + } + if (layer_frames[0].TemporalId() != tid) { RTC_LOG(LS_WARNING) - << "Encoder produced a frame for layer S" << (i + 1) << "T" - << tid + 1 << " that wasn't requested."; + << "Encoder produced a frame with temporal id " << tid + << ", expected " << layer_frames[0].TemporalId() << "."; continue; } encoded_images_[i].SetTemporalIndex(tid); @@ -583,7 +590,8 @@ int32_t H264EncoderImpl::Encode( if (svc_controllers_[i]) { codec_specific.generic_frame_info = svc_controllers_[i]->OnEncodeDone(layer_frames[0]); - if (send_key_frame && codec_specific.generic_frame_info.has_value()) { + if (encoded_images_[i]._frameType == VideoFrameType::kVideoFrameKey && + codec_specific.generic_frame_info.has_value()) { codec_specific.template_structure = svc_controllers_[i]->DependencyStructure(); } diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.h b/modules/video_coding/codecs/h264/h264_encoder_impl.h index 19c16f3d8b..2d4a95311d 100644 --- a/modules/video_coding/codecs/h264/h264_encoder_impl.h +++ b/modules/video_coding/codecs/h264/h264_encoder_impl.h @@ -40,7 +40,7 @@ class ISVCEncoder; namespace webrtc { -class H264EncoderImpl : public H264Encoder { +class H264EncoderImpl : public VideoEncoder { public: struct LayerConfig { int simulcast_idx = 0; @@ -58,8 +58,8 @@ class H264EncoderImpl : public H264Encoder { void SetStreamState(bool send_stream); }; - public: - explicit H264EncoderImpl(const cricket::VideoCodec& codec); + H264EncoderImpl(const Environment& env, H264EncoderSettings settings); + ~H264EncoderImpl() override; // `settings.max_payload_size` is ignored. @@ -99,18 +99,19 @@ class H264EncoderImpl : public H264Encoder { std::vector encoders_; std::vector pictures_; - std::vector> downscaled_buffers_; + std::vector> downscaled_buffers_; std::vector configurations_; std::vector encoded_images_; std::vector> svc_controllers_; - absl::InlinedVector, kMaxSimulcastStreams> + absl::InlinedVector, kMaxSimulcastStreams> scalability_modes_; + const Environment env_; VideoCodec codec_; H264PacketizationMode packetization_mode_; size_t max_payload_size_; int32_t number_of_cores_; - absl::optional encoder_thread_limit_; + std::optional encoder_thread_limit_; EncodedImageCallback* encoded_image_callback_; bool has_reported_init_; diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl_unittest.cc b/modules/video_coding/codecs/h264/h264_encoder_impl_unittest.cc index 3a139ab1c3..e714e277e8 100644 --- a/modules/video_coding/codecs/h264/h264_encoder_impl_unittest.cc +++ b/modules/video_coding/codecs/h264/h264_encoder_impl_unittest.cc @@ -11,7 +11,9 @@ #include "modules/video_coding/codecs/h264/h264_encoder_impl.h" +#include "api/environment/environment_factory.h" #include "api/video_codecs/video_encoder.h" +#include "modules/video_coding/include/video_error_codes.h" #include "test/gtest.h" namespace webrtc { @@ -39,7 +41,7 @@ void SetDefaultSettings(VideoCodec* codec_settings) { } TEST(H264EncoderImplTest, CanInitializeWithDefaultParameters) { - H264EncoderImpl encoder(cricket::CreateVideoCodec("H264")); + H264EncoderImpl encoder(CreateEnvironment(), {}); VideoCodec codec_settings; SetDefaultSettings(&codec_settings); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, @@ -49,9 +51,9 @@ TEST(H264EncoderImplTest, CanInitializeWithDefaultParameters) { } TEST(H264EncoderImplTest, CanInitializeWithNonInterleavedModeExplicitly) { - cricket::VideoCodec codec = cricket::CreateVideoCodec("H264"); - codec.SetParam(cricket::kH264FmtpPacketizationMode, "1"); - H264EncoderImpl encoder(codec); + H264EncoderImpl encoder( + CreateEnvironment(), + {.packetization_mode = H264PacketizationMode::NonInterleaved}); VideoCodec codec_settings; SetDefaultSettings(&codec_settings); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, @@ -61,21 +63,9 @@ TEST(H264EncoderImplTest, CanInitializeWithNonInterleavedModeExplicitly) { } TEST(H264EncoderImplTest, CanInitializeWithSingleNalUnitModeExplicitly) { - cricket::VideoCodec codec = cricket::CreateVideoCodec("H264"); - codec.SetParam(cricket::kH264FmtpPacketizationMode, "0"); - H264EncoderImpl encoder(codec); - VideoCodec codec_settings; - SetDefaultSettings(&codec_settings); - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, - encoder.InitEncode(&codec_settings, kSettings)); - EXPECT_EQ(H264PacketizationMode::SingleNalUnit, - encoder.PacketizationModeForTesting()); -} - -TEST(H264EncoderImplTest, CanInitializeWithRemovedParameter) { - cricket::VideoCodec codec = cricket::CreateVideoCodec("H264"); - codec.RemoveParam(cricket::kH264FmtpPacketizationMode); - H264EncoderImpl encoder(codec); + H264EncoderImpl encoder( + CreateEnvironment(), + {.packetization_mode = H264PacketizationMode::SingleNalUnit}); VideoCodec codec_settings; SetDefaultSettings(&codec_settings); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, diff --git a/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc b/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc index e191f51f0a..b9ea324ca2 100644 --- a/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc +++ b/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc @@ -24,13 +24,15 @@ namespace { std::unique_ptr CreateSpecificSimulcastTestFixture() { std::unique_ptr encoder_factory = std::make_unique( - []() { return H264Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateH264Encoder(env); + }); std::unique_ptr decoder_factory = std::make_unique( []() { return H264Decoder::Create(); }); return CreateSimulcastTestFixture(std::move(encoder_factory), std::move(decoder_factory), - SdpVideoFormat("H264")); + SdpVideoFormat::H264()); } } // namespace diff --git a/modules/video_coding/codecs/h264/include/h264.h b/modules/video_coding/codecs/h264/include/h264.h index 025a6ba1f0..6255ce95cb 100644 --- a/modules/video_coding/codecs/h264/include/h264.h +++ b/modules/video_coding/codecs/h264/include/h264.h @@ -16,16 +16,18 @@ #include #include +#include "absl/base/nullability.h" +#include "api/environment/environment.h" #include "api/video_codecs/h264_profile_level_id.h" #include "api/video_codecs/scalability_mode.h" -#include "media/base/codec.h" -#include "modules/video_coding/include/video_codec_interface.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder.h" +#include "api/video_codecs/video_encoder.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { -struct SdpVideoFormat; - // Creates an H264 SdpVideoFormat entry with specified paramters. RTC_EXPORT SdpVideoFormat CreateH264Format(H264Profile profile, @@ -49,16 +51,24 @@ std::vector SupportedH264Codecs( // only connections. std::vector SupportedH264DecoderCodecs(); -class RTC_EXPORT H264Encoder : public VideoEncoder { +class RTC_EXPORT H264Encoder { public: - static std::unique_ptr Create(const cricket::VideoCodec& codec); - static std::unique_ptr Create(); // If H.264 is supported (any implementation). static bool IsSupported(); static bool SupportsScalabilityMode(ScalabilityMode scalability_mode); +}; + +struct H264EncoderSettings { + // Use factory function rather than constructor to allow to create + // `H264EncoderSettings` with designated initializers. + static H264EncoderSettings Parse(const SdpVideoFormat& format); - ~H264Encoder() override {} + H264PacketizationMode packetization_mode = + H264PacketizationMode::NonInterleaved; }; +absl_nonnull std::unique_ptr CreateH264Encoder( + const Environment& env, + H264EncoderSettings settings = {}); class RTC_EXPORT H264Decoder : public VideoDecoder { public: diff --git a/modules/video_coding/codecs/h264/include/h264_globals.h b/modules/video_coding/codecs/h264/include/h264_globals.h index 6a1de382dc..dac36dd734 100644 --- a/modules/video_coding/codecs/h264/include/h264_globals.h +++ b/modules/video_coding/codecs/h264/include/h264_globals.h @@ -16,6 +16,7 @@ #include #include +#include #include "modules/video_coding/codecs/interface/common_constants.h" #include "rtc_base/checks.h" @@ -72,8 +73,6 @@ struct NaluInfo { } }; -const size_t kMaxNalusPerPacket = 10; - struct RTPVideoHeaderH264 { // The NAL unit type. If this is a header for a // fragmented packet, it's the NAL unit type of @@ -83,8 +82,7 @@ struct RTPVideoHeaderH264 { uint8_t nalu_type; // The packetization type of this buffer - single, aggregated or fragmented. H264PacketizationTypes packetization_type; - NaluInfo nalus[kMaxNalusPerPacket]; - size_t nalus_length; + std::vector nalus; // The packetization mode of this transport. Packetization mode // determines which packetization types are allowed when packetizing. H264PacketizationMode packetization_mode; @@ -93,8 +91,7 @@ struct RTPVideoHeaderH264 { const RTPVideoHeaderH264& rhs) { return lhs.nalu_type == rhs.nalu_type && lhs.packetization_type == rhs.packetization_type && - std::equal(lhs.nalus, lhs.nalus + lhs.nalus_length, rhs.nalus, - rhs.nalus + rhs.nalus_length) && + lhs.nalus == rhs.nalus && lhs.packetization_mode == rhs.packetization_mode; } diff --git a/modules/video_coding/codecs/h264/test/h264_impl_unittest.cc b/modules/video_coding/codecs/h264/test/h264_impl_unittest.cc index b8a9addb2b..985224d07e 100644 --- a/modules/video_coding/codecs/h264/test/h264_impl_unittest.cc +++ b/modules/video_coding/codecs/h264/test/h264_impl_unittest.cc @@ -11,8 +11,8 @@ #include #include +#include -#include "absl/types/optional.h" #include "api/video/color_space.h" #include "api/video/encoded_image.h" #include "api/video/video_frame.h" @@ -34,7 +34,7 @@ namespace webrtc { class TestH264Impl : public VideoCodecUnitTest { protected: std::unique_ptr CreateEncoder() override { - return H264Encoder::Create(); + return CreateH264Encoder(env_); } std::unique_ptr CreateDecoder() override { @@ -64,7 +64,7 @@ TEST_F(TestH264Impl, MAYBE_EncodeDecode) { encoded_frame._frameType = VideoFrameType::kVideoFrameKey; EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, 0)); std::unique_ptr decoded_frame; - absl::optional decoded_qp; + std::optional decoded_qp; ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp)); ASSERT_TRUE(decoded_frame); EXPECT_GT(I420PSNR(&input_frame, decoded_frame.get()), 36); @@ -89,7 +89,7 @@ TEST_F(TestH264Impl, MAYBE_DecodedQpEqualsEncodedQp) { encoded_frame._frameType = VideoFrameType::kVideoFrameKey; EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, 0)); std::unique_ptr decoded_frame; - absl::optional decoded_qp; + std::optional decoded_qp; ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp)); ASSERT_TRUE(decoded_frame); ASSERT_TRUE(decoded_qp); diff --git a/modules/video_coding/codecs/interface/libvpx_interface.cc b/modules/video_coding/codecs/interface/libvpx_interface.cc index 4f33bef2ba..73c23e71b9 100644 --- a/modules/video_coding/codecs/interface/libvpx_interface.cc +++ b/modules/video_coding/codecs/interface/libvpx_interface.cc @@ -10,9 +10,15 @@ #include "modules/video_coding/codecs/interface/libvpx_interface.h" +#include #include #include "rtc_base/checks.h" +#include "vpx/vp8cx.h" +#include "vpx/vpx_codec.h" +#include "vpx/vpx_encoder.h" +#include "vpx/vpx_ext_ratectrl.h" +#include "vpx/vpx_image.h" namespace webrtc { namespace { diff --git a/modules/video_coding/codecs/interface/libvpx_interface.h b/modules/video_coding/codecs/interface/libvpx_interface.h index 3dea24dd6d..26ddca7289 100644 --- a/modules/video_coding/codecs/interface/libvpx_interface.h +++ b/modules/video_coding/codecs/interface/libvpx_interface.h @@ -18,6 +18,7 @@ #include "vpx/vp8cx.h" #include "vpx/vpx_codec.h" #include "vpx/vpx_encoder.h" +#include "vpx/vpx_ext_ratectrl.h" #include "vpx/vpx_image.h" namespace webrtc { diff --git a/modules/video_coding/codecs/interface/mock_libvpx_interface.h b/modules/video_coding/codecs/interface/mock_libvpx_interface.h index 6dfe733dd0..0f92e42e05 100644 --- a/modules/video_coding/codecs/interface/mock_libvpx_interface.h +++ b/modules/video_coding/codecs/interface/mock_libvpx_interface.h @@ -11,9 +11,15 @@ #ifndef MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_LIBVPX_INTERFACE_H_ #define MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_LIBVPX_INTERFACE_H_ +#include + #include "modules/video_coding/codecs/interface/libvpx_interface.h" #include "test/gmock.h" -#include "test/gtest.h" +#include "vpx/vp8cx.h" +#include "vpx/vpx_codec.h" +#include "vpx/vpx_encoder.h" +#include "vpx/vpx_ext_ratectrl.h" +#include "vpx/vpx_image.h" namespace webrtc { diff --git a/modules/video_coding/codecs/multiplex/augmented_video_frame_buffer.cc b/modules/video_coding/codecs/multiplex/augmented_video_frame_buffer.cc deleted file mode 100644 index 8740884f5b..0000000000 --- a/modules/video_coding/codecs/multiplex/augmented_video_frame_buffer.cc +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h" - -#include - -#include - -#include "api/video/video_frame_buffer.h" - -namespace webrtc { - -AugmentedVideoFrameBuffer::AugmentedVideoFrameBuffer( - const rtc::scoped_refptr& video_frame_buffer, - std::unique_ptr augmenting_data, - uint16_t augmenting_data_size) - : augmenting_data_size_(augmenting_data_size), - augmenting_data_(std::move(augmenting_data)), - video_frame_buffer_(video_frame_buffer) {} - -rtc::scoped_refptr -AugmentedVideoFrameBuffer::GetVideoFrameBuffer() const { - return video_frame_buffer_; -} - -uint8_t* AugmentedVideoFrameBuffer::GetAugmentingData() const { - return augmenting_data_.get(); -} - -uint16_t AugmentedVideoFrameBuffer::GetAugmentingDataSize() const { - return augmenting_data_size_; -} - -VideoFrameBuffer::Type AugmentedVideoFrameBuffer::type() const { - return video_frame_buffer_->type(); -} - -int AugmentedVideoFrameBuffer::width() const { - return video_frame_buffer_->width(); -} - -int AugmentedVideoFrameBuffer::height() const { - return video_frame_buffer_->height(); -} - -rtc::scoped_refptr AugmentedVideoFrameBuffer::ToI420() { - return video_frame_buffer_->ToI420(); -} - -const I420BufferInterface* AugmentedVideoFrameBuffer::GetI420() const { - // TODO(https://crbug.com/webrtc/12021): When AugmentedVideoFrameBuffer is - // updated to implement the buffer interfaces of relevant - // VideoFrameBuffer::Types, stop overriding GetI420() as a workaround to - // AugmentedVideoFrameBuffer not being the type that is returned by type(). - return video_frame_buffer_->GetI420(); -} -} // namespace webrtc diff --git a/modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h b/modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h deleted file mode 100644 index d711cd07da..0000000000 --- a/modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_CODECS_MULTIPLEX_INCLUDE_AUGMENTED_VIDEO_FRAME_BUFFER_H_ -#define MODULES_VIDEO_CODING_CODECS_MULTIPLEX_INCLUDE_AUGMENTED_VIDEO_FRAME_BUFFER_H_ - -#include -#include - -#include "api/scoped_refptr.h" -#include "api/video/video_frame_buffer.h" - -namespace webrtc { -class AugmentedVideoFrameBuffer : public VideoFrameBuffer { - public: - AugmentedVideoFrameBuffer( - const rtc::scoped_refptr& video_frame_buffer, - std::unique_ptr augmenting_data, - uint16_t augmenting_data_size); - - // Retrieves the underlying VideoFrameBuffer without the augmented data - rtc::scoped_refptr GetVideoFrameBuffer() const; - - // Gets a pointer to the augmenting data and moves ownership to the caller - uint8_t* GetAugmentingData() const; - - // Get the size of the augmenting data - uint16_t GetAugmentingDataSize() const; - - // Returns the type of the underlying VideoFrameBuffer - Type type() const final; - - // Returns the width of the underlying VideoFrameBuffer - int width() const final; - - // Returns the height of the underlying VideoFrameBuffer - int height() const final; - - // Get the I140 Buffer from the underlying frame buffer - rtc::scoped_refptr ToI420() final; - // Returns GetI420() of the underlying VideoFrameBuffer. - // TODO(hbos): AugmentedVideoFrameBuffer should not return a type (such as - // kI420) without also implementing that type's interface (i.e. - // I420BufferInterface). Either implement all possible Type's interfaces or - // return kNative. - const I420BufferInterface* GetI420() const final; - - private: - uint16_t augmenting_data_size_; - std::unique_ptr augmenting_data_; - rtc::scoped_refptr video_frame_buffer_; -}; -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_MULTIPLEX_INCLUDE_AUGMENTED_VIDEO_FRAME_BUFFER_H_ diff --git a/modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h b/modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h deleted file mode 100644 index d58981e4b2..0000000000 --- a/modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_CODECS_MULTIPLEX_INCLUDE_MULTIPLEX_DECODER_ADAPTER_H_ -#define MODULES_VIDEO_CODING_CODECS_MULTIPLEX_INCLUDE_MULTIPLEX_DECODER_ADAPTER_H_ - -#include -#include -#include - -#include "api/video_codecs/sdp_video_format.h" -#include "api/video_codecs/video_decoder.h" -#include "api/video_codecs/video_decoder_factory.h" -#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h" - -namespace webrtc { - -class MultiplexDecoderAdapter : public VideoDecoder { - public: - // `factory` is not owned and expected to outlive this class. - MultiplexDecoderAdapter(VideoDecoderFactory* factory, - const SdpVideoFormat& associated_format, - bool supports_augmenting_data = false); - virtual ~MultiplexDecoderAdapter(); - - // Implements VideoDecoder - bool Configure(const Settings& settings) override; - int32_t Decode(const EncodedImage& input_image, - int64_t render_time_ms) override; - int32_t RegisterDecodeCompleteCallback( - DecodedImageCallback* callback) override; - int32_t Release() override; - - void Decoded(AlphaCodecStream stream_idx, - VideoFrame* decoded_image, - absl::optional decode_time_ms, - absl::optional qp); - - private: - // Wrapper class that redirects Decoded() calls. - class AdapterDecodedImageCallback; - - // Holds the decoded image output of a frame. - struct DecodedImageData; - - // Holds the augmenting data of an image - struct AugmentingData; - - void MergeAlphaImages(VideoFrame* decoded_image, - const absl::optional& decode_time_ms, - const absl::optional& qp, - VideoFrame* multiplex_decoded_image, - const absl::optional& multiplex_decode_time_ms, - const absl::optional& multiplex_qp, - std::unique_ptr augmenting_data, - uint16_t augmenting_data_length); - - VideoDecoderFactory* const factory_; - const SdpVideoFormat associated_format_; - std::vector> decoders_; - std::vector> adapter_callbacks_; - DecodedImageCallback* decoded_complete_callback_; - - // Holds YUV or AXX decode output of a frame that is identified by timestamp. - std::map decoded_data_; - std::map decoded_augmenting_data_; - const bool supports_augmenting_data_; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_MULTIPLEX_INCLUDE_MULTIPLEX_DECODER_ADAPTER_H_ diff --git a/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h b/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h deleted file mode 100644 index 2e5aad8a5b..0000000000 --- a/modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_CODECS_MULTIPLEX_INCLUDE_MULTIPLEX_ENCODER_ADAPTER_H_ -#define MODULES_VIDEO_CODING_CODECS_MULTIPLEX_INCLUDE_MULTIPLEX_ENCODER_ADAPTER_H_ - -#include -#include -#include - -#include "api/fec_controller_override.h" -#include "api/video_codecs/sdp_video_format.h" -#include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/video_encoder_factory.h" -#include "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h" -#include "modules/video_coding/include/video_codec_interface.h" -#include "rtc_base/synchronization/mutex.h" - -namespace webrtc { - -enum AlphaCodecStream { - kYUVStream = 0, - kAXXStream = 1, - kAlphaCodecStreams = 2, -}; - -class MultiplexEncoderAdapter : public VideoEncoder { - public: - // `factory` is not owned and expected to outlive this class. - MultiplexEncoderAdapter(VideoEncoderFactory* factory, - const SdpVideoFormat& associated_format, - bool supports_augmenting_data = false); - virtual ~MultiplexEncoderAdapter(); - - // Implements VideoEncoder - void SetFecControllerOverride( - FecControllerOverride* fec_controller_override) override; - int InitEncode(const VideoCodec* inst, - const VideoEncoder::Settings& settings) override; - int Encode(const VideoFrame& input_image, - const std::vector* frame_types) override; - int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override; - void SetRates(const RateControlParameters& parameters) override; - void OnPacketLossRateUpdate(float packet_loss_rate) override; - void OnRttUpdate(int64_t rtt_ms) override; - void OnLossNotification(const LossNotification& loss_notification) override; - int Release() override; - EncoderInfo GetEncoderInfo() const override; - - EncodedImageCallback::Result OnEncodedImage( - AlphaCodecStream stream_idx, - const EncodedImage& encodedImage, - const CodecSpecificInfo* codecSpecificInfo); - - private: - // Wrapper class that redirects OnEncodedImage() calls. - class AdapterEncodedImageCallback; - - VideoEncoderFactory* const factory_; - const SdpVideoFormat associated_format_; - std::vector> encoders_; - std::vector> adapter_callbacks_; - EncodedImageCallback* encoded_complete_callback_; - - std::map stashed_images_ - RTC_GUARDED_BY(mutex_); - - uint16_t picture_index_ = 0; - std::vector multiplex_dummy_planes_; - - int key_frame_interval_; - EncodedImage combined_image_; - - Mutex mutex_; - - const bool supports_augmented_data_; - int augmenting_data_size_ = 0; - - EncoderInfo encoder_info_; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_MULTIPLEX_INCLUDE_MULTIPLEX_ENCODER_ADAPTER_H_ diff --git a/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc b/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc deleted file mode 100644 index 9641df3c2e..0000000000 --- a/modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc +++ /dev/null @@ -1,264 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h" - -#include "api/video/encoded_image.h" -#include "api/video/i420_buffer.h" -#include "api/video/video_frame_buffer.h" -#include "common_video/include/video_frame_buffer.h" -#include "common_video/libyuv/include/webrtc_libyuv.h" -#include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h" -#include "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -class MultiplexDecoderAdapter::AdapterDecodedImageCallback - : public webrtc::DecodedImageCallback { - public: - AdapterDecodedImageCallback(webrtc::MultiplexDecoderAdapter* adapter, - AlphaCodecStream stream_idx) - : adapter_(adapter), stream_idx_(stream_idx) {} - - void Decoded(VideoFrame& decoded_image, - absl::optional decode_time_ms, - absl::optional qp) override { - if (!adapter_) - return; - adapter_->Decoded(stream_idx_, &decoded_image, decode_time_ms, qp); - } - int32_t Decoded(VideoFrame& decoded_image) override { - RTC_DCHECK_NOTREACHED(); - return WEBRTC_VIDEO_CODEC_OK; - } - int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override { - RTC_DCHECK_NOTREACHED(); - return WEBRTC_VIDEO_CODEC_OK; - } - - private: - MultiplexDecoderAdapter* adapter_; - const AlphaCodecStream stream_idx_; -}; - -struct MultiplexDecoderAdapter::DecodedImageData { - explicit DecodedImageData(AlphaCodecStream stream_idx) - : stream_idx_(stream_idx), - decoded_image_( - VideoFrame::Builder() - .set_video_frame_buffer( - I420Buffer::Create(1 /* width */, 1 /* height */)) - .set_timestamp_rtp(0) - .set_timestamp_us(0) - .set_rotation(kVideoRotation_0) - .build()) { - RTC_DCHECK_EQ(kAXXStream, stream_idx); - } - DecodedImageData(AlphaCodecStream stream_idx, - const VideoFrame& decoded_image, - const absl::optional& decode_time_ms, - const absl::optional& qp) - : stream_idx_(stream_idx), - decoded_image_(decoded_image), - decode_time_ms_(decode_time_ms), - qp_(qp) {} - - DecodedImageData() = delete; - DecodedImageData(const DecodedImageData&) = delete; - DecodedImageData& operator=(const DecodedImageData&) = delete; - - const AlphaCodecStream stream_idx_; - VideoFrame decoded_image_; - const absl::optional decode_time_ms_; - const absl::optional qp_; -}; - -struct MultiplexDecoderAdapter::AugmentingData { - AugmentingData(std::unique_ptr augmenting_data, uint16_t data_size) - : data_(std::move(augmenting_data)), size_(data_size) {} - AugmentingData() = delete; - AugmentingData(const AugmentingData&) = delete; - AugmentingData& operator=(const AugmentingData&) = delete; - - std::unique_ptr data_; - const uint16_t size_; -}; - -MultiplexDecoderAdapter::MultiplexDecoderAdapter( - VideoDecoderFactory* factory, - const SdpVideoFormat& associated_format, - bool supports_augmenting_data) - : factory_(factory), - associated_format_(associated_format), - supports_augmenting_data_(supports_augmenting_data) {} - -MultiplexDecoderAdapter::~MultiplexDecoderAdapter() { - Release(); -} - -bool MultiplexDecoderAdapter::Configure(const Settings& settings) { - RTC_DCHECK_EQ(settings.codec_type(), kVideoCodecMultiplex); - Settings associated_settings = settings; - associated_settings.set_codec_type( - PayloadStringToCodecType(associated_format_.name)); - for (size_t i = 0; i < kAlphaCodecStreams; ++i) { - std::unique_ptr decoder = - factory_->CreateVideoDecoder(associated_format_); - if (!decoder->Configure(associated_settings)) { - return false; - } - adapter_callbacks_.emplace_back( - new MultiplexDecoderAdapter::AdapterDecodedImageCallback( - this, static_cast(i))); - decoder->RegisterDecodeCompleteCallback(adapter_callbacks_.back().get()); - decoders_.emplace_back(std::move(decoder)); - } - return true; -} - -int32_t MultiplexDecoderAdapter::Decode(const EncodedImage& input_image, - int64_t render_time_ms) { - MultiplexImage image = MultiplexEncodedImagePacker::Unpack(input_image); - - if (supports_augmenting_data_) { - RTC_DCHECK(decoded_augmenting_data_.find(input_image.Timestamp()) == - decoded_augmenting_data_.end()); - decoded_augmenting_data_.emplace( - std::piecewise_construct, - std::forward_as_tuple(input_image.Timestamp()), - std::forward_as_tuple(std::move(image.augmenting_data), - image.augmenting_data_size)); - } - - if (image.component_count == 1) { - RTC_DCHECK(decoded_data_.find(input_image.Timestamp()) == - decoded_data_.end()); - decoded_data_.emplace(std::piecewise_construct, - std::forward_as_tuple(input_image.Timestamp()), - std::forward_as_tuple(kAXXStream)); - } - int32_t rv = 0; - for (size_t i = 0; i < image.image_components.size(); i++) { - rv = decoders_[image.image_components[i].component_index]->Decode( - image.image_components[i].encoded_image, render_time_ms); - if (rv != WEBRTC_VIDEO_CODEC_OK) - return rv; - } - return rv; -} - -int32_t MultiplexDecoderAdapter::RegisterDecodeCompleteCallback( - DecodedImageCallback* callback) { - decoded_complete_callback_ = callback; - return WEBRTC_VIDEO_CODEC_OK; -} - -int32_t MultiplexDecoderAdapter::Release() { - for (auto& decoder : decoders_) { - const int32_t rv = decoder->Release(); - if (rv) - return rv; - } - decoders_.clear(); - adapter_callbacks_.clear(); - return WEBRTC_VIDEO_CODEC_OK; -} - -void MultiplexDecoderAdapter::Decoded(AlphaCodecStream stream_idx, - VideoFrame* decoded_image, - absl::optional decode_time_ms, - absl::optional qp) { - const auto& other_decoded_data_it = - decoded_data_.find(decoded_image->timestamp()); - const auto& augmenting_data_it = - decoded_augmenting_data_.find(decoded_image->timestamp()); - const bool has_augmenting_data = - augmenting_data_it != decoded_augmenting_data_.end(); - if (other_decoded_data_it != decoded_data_.end()) { - uint16_t augmenting_data_size = - has_augmenting_data ? augmenting_data_it->second.size_ : 0; - std::unique_ptr augmenting_data = - has_augmenting_data ? std::move(augmenting_data_it->second.data_) - : nullptr; - auto& other_image_data = other_decoded_data_it->second; - if (stream_idx == kYUVStream) { - RTC_DCHECK_EQ(kAXXStream, other_image_data.stream_idx_); - MergeAlphaImages(decoded_image, decode_time_ms, qp, - &other_image_data.decoded_image_, - other_image_data.decode_time_ms_, other_image_data.qp_, - std::move(augmenting_data), augmenting_data_size); - } else { - RTC_DCHECK_EQ(kYUVStream, other_image_data.stream_idx_); - RTC_DCHECK_EQ(kAXXStream, stream_idx); - MergeAlphaImages(&other_image_data.decoded_image_, - other_image_data.decode_time_ms_, other_image_data.qp_, - decoded_image, decode_time_ms, qp, - std::move(augmenting_data), augmenting_data_size); - } - decoded_data_.erase(decoded_data_.begin(), other_decoded_data_it); - if (has_augmenting_data) { - decoded_augmenting_data_.erase(decoded_augmenting_data_.begin(), - augmenting_data_it); - } - return; - } - RTC_DCHECK(decoded_data_.find(decoded_image->timestamp()) == - decoded_data_.end()); - decoded_data_.emplace( - std::piecewise_construct, - std::forward_as_tuple(decoded_image->timestamp()), - std::forward_as_tuple(stream_idx, *decoded_image, decode_time_ms, qp)); -} - -void MultiplexDecoderAdapter::MergeAlphaImages( - VideoFrame* decoded_image, - const absl::optional& decode_time_ms, - const absl::optional& qp, - VideoFrame* alpha_decoded_image, - const absl::optional& alpha_decode_time_ms, - const absl::optional& alpha_qp, - std::unique_ptr augmenting_data, - uint16_t augmenting_data_length) { - rtc::scoped_refptr merged_buffer; - if (!alpha_decoded_image->timestamp()) { - merged_buffer = decoded_image->video_frame_buffer(); - } else { - rtc::scoped_refptr yuv_buffer = - decoded_image->video_frame_buffer()->ToI420(); - rtc::scoped_refptr alpha_buffer = - alpha_decoded_image->video_frame_buffer()->ToI420(); - RTC_DCHECK_EQ(yuv_buffer->width(), alpha_buffer->width()); - RTC_DCHECK_EQ(yuv_buffer->height(), alpha_buffer->height()); - merged_buffer = WrapI420ABuffer( - yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(), - yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(), - yuv_buffer->DataV(), yuv_buffer->StrideV(), alpha_buffer->DataY(), - alpha_buffer->StrideY(), - // To keep references alive. - [yuv_buffer, alpha_buffer] {}); - } - if (supports_augmenting_data_) { - merged_buffer = rtc::make_ref_counted( - merged_buffer, std::move(augmenting_data), augmenting_data_length); - } - - VideoFrame merged_image = VideoFrame::Builder() - .set_video_frame_buffer(merged_buffer) - .set_timestamp_rtp(decoded_image->timestamp()) - .set_timestamp_us(0) - .set_rotation(decoded_image->rotation()) - .set_id(decoded_image->id()) - .set_packet_infos(decoded_image->packet_infos()) - .build(); - decoded_complete_callback_->Decoded(merged_image, decode_time_ms, qp); -} - -} // namespace webrtc diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc deleted file mode 100644 index 0f05d1a89c..0000000000 --- a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h" - -#include -#include - -#include "modules/rtp_rtcp/source/byte_io.h" -#include "rtc_base/checks.h" - -namespace webrtc { -int PackHeader(uint8_t* buffer, MultiplexImageHeader header) { - int offset = 0; - ByteWriter::WriteBigEndian(buffer + offset, header.component_count); - offset += sizeof(uint8_t); - - ByteWriter::WriteBigEndian(buffer + offset, header.image_index); - offset += sizeof(uint16_t); - - ByteWriter::WriteBigEndian(buffer + offset, - header.augmenting_data_size); - offset += sizeof(uint16_t); - - ByteWriter::WriteBigEndian(buffer + offset, - header.augmenting_data_offset); - offset += sizeof(uint32_t); - - ByteWriter::WriteBigEndian(buffer + offset, - header.first_component_header_offset); - offset += sizeof(uint32_t); - - RTC_DCHECK_EQ(offset, kMultiplexImageHeaderSize); - return offset; -} - -MultiplexImageHeader UnpackHeader(const uint8_t* buffer) { - MultiplexImageHeader header; - int offset = 0; - header.component_count = ByteReader::ReadBigEndian(buffer + offset); - offset += sizeof(uint8_t); - - header.image_index = ByteReader::ReadBigEndian(buffer + offset); - offset += sizeof(uint16_t); - - header.augmenting_data_size = - ByteReader::ReadBigEndian(buffer + offset); - offset += sizeof(uint16_t); - - header.augmenting_data_offset = - ByteReader::ReadBigEndian(buffer + offset); - offset += sizeof(uint32_t); - - header.first_component_header_offset = - ByteReader::ReadBigEndian(buffer + offset); - offset += sizeof(uint32_t); - - RTC_DCHECK_EQ(offset, kMultiplexImageHeaderSize); - return header; -} - -int PackFrameHeader(uint8_t* buffer, - MultiplexImageComponentHeader frame_header) { - int offset = 0; - ByteWriter::WriteBigEndian( - buffer + offset, frame_header.next_component_header_offset); - offset += sizeof(uint32_t); - - ByteWriter::WriteBigEndian(buffer + offset, - frame_header.component_index); - offset += sizeof(uint8_t); - - ByteWriter::WriteBigEndian(buffer + offset, - frame_header.bitstream_offset); - offset += sizeof(uint32_t); - - ByteWriter::WriteBigEndian(buffer + offset, - frame_header.bitstream_length); - offset += sizeof(uint32_t); - - ByteWriter::WriteBigEndian(buffer + offset, frame_header.codec_type); - offset += sizeof(uint8_t); - - ByteWriter::WriteBigEndian( - buffer + offset, static_cast(frame_header.frame_type)); - offset += sizeof(uint8_t); - - RTC_DCHECK_EQ(offset, kMultiplexImageComponentHeaderSize); - return offset; -} - -MultiplexImageComponentHeader UnpackFrameHeader(const uint8_t* buffer) { - MultiplexImageComponentHeader frame_header; - int offset = 0; - - frame_header.next_component_header_offset = - ByteReader::ReadBigEndian(buffer + offset); - offset += sizeof(uint32_t); - - frame_header.component_index = - ByteReader::ReadBigEndian(buffer + offset); - offset += sizeof(uint8_t); - - frame_header.bitstream_offset = - ByteReader::ReadBigEndian(buffer + offset); - offset += sizeof(uint32_t); - - frame_header.bitstream_length = - ByteReader::ReadBigEndian(buffer + offset); - offset += sizeof(uint32_t); - - // This makes the wire format depend on the numeric values of the - // VideoCodecType and VideoFrameType enum constants. - frame_header.codec_type = static_cast( - ByteReader::ReadBigEndian(buffer + offset)); - offset += sizeof(uint8_t); - - frame_header.frame_type = static_cast( - ByteReader::ReadBigEndian(buffer + offset)); - offset += sizeof(uint8_t); - - RTC_DCHECK_EQ(offset, kMultiplexImageComponentHeaderSize); - return frame_header; -} - -void PackBitstream(uint8_t* buffer, MultiplexImageComponent image) { - memcpy(buffer, image.encoded_image.data(), image.encoded_image.size()); -} - -MultiplexImage::MultiplexImage(uint16_t picture_index, - uint8_t frame_count, - std::unique_ptr augmenting_data, - uint16_t augmenting_data_size) - : image_index(picture_index), - component_count(frame_count), - augmenting_data_size(augmenting_data_size), - augmenting_data(std::move(augmenting_data)) {} - -EncodedImage MultiplexEncodedImagePacker::PackAndRelease( - const MultiplexImage& multiplex_image) { - MultiplexImageHeader header; - std::vector frame_headers; - - header.component_count = multiplex_image.component_count; - header.image_index = multiplex_image.image_index; - int header_offset = kMultiplexImageHeaderSize; - header.first_component_header_offset = header_offset; - header.augmenting_data_offset = - header_offset + - kMultiplexImageComponentHeaderSize * header.component_count; - header.augmenting_data_size = multiplex_image.augmenting_data_size; - int bitstream_offset = - header.augmenting_data_offset + header.augmenting_data_size; - - const std::vector& images = - multiplex_image.image_components; - EncodedImage combined_image = images[0].encoded_image; - for (size_t i = 0; i < images.size(); i++) { - MultiplexImageComponentHeader frame_header; - header_offset += kMultiplexImageComponentHeaderSize; - frame_header.next_component_header_offset = - (i == images.size() - 1) ? 0 : header_offset; - frame_header.component_index = images[i].component_index; - - frame_header.bitstream_offset = bitstream_offset; - frame_header.bitstream_length = - static_cast(images[i].encoded_image.size()); - bitstream_offset += frame_header.bitstream_length; - - frame_header.codec_type = images[i].codec_type; - frame_header.frame_type = images[i].encoded_image._frameType; - - // As long as one component is delta frame, we have to mark the combined - // frame as delta frame, because it is necessary for all components to be - // key frame so as to decode the whole image without previous frame data. - // Thus only when all components are key frames, we can mark the combined - // frame as key frame. - if (frame_header.frame_type == VideoFrameType::kVideoFrameDelta) { - combined_image._frameType = VideoFrameType::kVideoFrameDelta; - } - - frame_headers.push_back(frame_header); - } - - auto buffer = EncodedImageBuffer::Create(bitstream_offset); - combined_image.SetEncodedData(buffer); - - // header - header_offset = PackHeader(buffer->data(), header); - RTC_DCHECK_EQ(header.first_component_header_offset, - kMultiplexImageHeaderSize); - - // Frame Header - for (size_t i = 0; i < images.size(); i++) { - int relative_offset = - PackFrameHeader(buffer->data() + header_offset, frame_headers[i]); - RTC_DCHECK_EQ(relative_offset, kMultiplexImageComponentHeaderSize); - - header_offset = frame_headers[i].next_component_header_offset; - RTC_DCHECK_EQ(header_offset, - (i == images.size() - 1) - ? 0 - : (kMultiplexImageHeaderSize + - kMultiplexImageComponentHeaderSize * (i + 1))); - } - - // Augmenting Data - if (multiplex_image.augmenting_data_size != 0) { - memcpy(buffer->data() + header.augmenting_data_offset, - multiplex_image.augmenting_data.get(), - multiplex_image.augmenting_data_size); - } - - // Bitstreams - for (size_t i = 0; i < images.size(); i++) { - PackBitstream(buffer->data() + frame_headers[i].bitstream_offset, - images[i]); - } - - return combined_image; -} - -MultiplexImage MultiplexEncodedImagePacker::Unpack( - const EncodedImage& combined_image) { - const MultiplexImageHeader& header = UnpackHeader(combined_image.data()); - - std::vector frame_headers; - int header_offset = header.first_component_header_offset; - - while (header_offset > 0) { - frame_headers.push_back( - UnpackFrameHeader(combined_image.data() + header_offset)); - header_offset = frame_headers.back().next_component_header_offset; - } - - RTC_DCHECK_LE(frame_headers.size(), header.component_count); - std::unique_ptr augmenting_data = nullptr; - if (header.augmenting_data_size != 0) { - augmenting_data = - std::unique_ptr(new uint8_t[header.augmenting_data_size]); - memcpy(augmenting_data.get(), - combined_image.data() + header.augmenting_data_offset, - header.augmenting_data_size); - } - - MultiplexImage multiplex_image(header.image_index, header.component_count, - std::move(augmenting_data), - header.augmenting_data_size); - - for (size_t i = 0; i < frame_headers.size(); i++) { - MultiplexImageComponent image_component; - image_component.component_index = frame_headers[i].component_index; - image_component.codec_type = frame_headers[i].codec_type; - - EncodedImage encoded_image = combined_image; - encoded_image.SetTimestamp(combined_image.Timestamp()); - encoded_image._frameType = frame_headers[i].frame_type; - encoded_image.SetEncodedData(EncodedImageBuffer::Create( - combined_image.data() + frame_headers[i].bitstream_offset, - frame_headers[i].bitstream_length)); - - image_component.encoded_image = encoded_image; - - multiplex_image.image_components.push_back(image_component); - } - - return multiplex_image; -} - -} // namespace webrtc diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h b/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h deleted file mode 100644 index 299a0159d5..0000000000 --- a/modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_CODECS_MULTIPLEX_MULTIPLEX_ENCODED_IMAGE_PACKER_H_ -#define MODULES_VIDEO_CODING_CODECS_MULTIPLEX_MULTIPLEX_ENCODED_IMAGE_PACKER_H_ - -#include -#include -#include - -#include "api/video/encoded_image.h" -#include "api/video_codecs/video_codec.h" - -namespace webrtc { - -// Struct describing the whole bundle of multiple frames of an image. -// This struct is expected to be the set in the beginning of a picture's -// bitstream data. -struct MultiplexImageHeader { - // The number of frame components making up the complete picture data. - // For example, `frame_count` = 2 for the case of YUV frame with Alpha frame. - uint8_t component_count; - - // The increasing image ID given by the encoder. For different components - // of a single picture, they have the same `picture_index`. - uint16_t image_index; - - // The location of the first MultiplexImageComponentHeader in the bitstream, - // in terms of byte from the beginning of the bitstream. - uint32_t first_component_header_offset; - - // The location of the augmenting data in the bitstream, in terms of bytes - // from the beginning of the bitstream - uint32_t augmenting_data_offset; - - // The size of the augmenting data in the bitstream it terms of byte - uint16_t augmenting_data_size; -}; -const int kMultiplexImageHeaderSize = - sizeof(uint8_t) + 2 * sizeof(uint16_t) + 2 * sizeof(uint32_t); - -// Struct describing the individual image component's content. -struct MultiplexImageComponentHeader { - // The location of the next MultiplexImageComponentHeader in the bitstream, - // in terms of the byte from the beginning of the bitstream; - uint32_t next_component_header_offset; - - // Identifies which component this frame represent, i.e. YUV frame vs Alpha - // frame. - uint8_t component_index; - - // The location of the real encoded image data of the frame in the bitstream, - // in terms of byte from the beginning of the bitstream. - uint32_t bitstream_offset; - - // Indicates the number of bytes of the encoded image data. - uint32_t bitstream_length; - - // Indicated the underlying VideoCodecType of the frame, i.e. VP9 or VP8 etc. - VideoCodecType codec_type; - - // Indicated the underlying frame is a key frame or delta frame. - VideoFrameType frame_type; -}; -const int kMultiplexImageComponentHeaderSize = - sizeof(uint32_t) + sizeof(uint8_t) + sizeof(uint32_t) + sizeof(uint32_t) + - sizeof(uint8_t) + sizeof(uint8_t); - -// Struct holding the encoded image for one component. -struct MultiplexImageComponent { - // Indicated the underlying VideoCodecType of the frame, i.e. VP9 or VP8 etc. - VideoCodecType codec_type; - - // Identifies which component this frame represent, i.e. YUV frame vs Alpha - // frame. - uint8_t component_index; - - // Stores the actual frame data of the encoded image. - EncodedImage encoded_image; -}; - -// Struct holding the whole frame bundle of components of an image. -struct MultiplexImage { - uint16_t image_index; - uint8_t component_count; - uint16_t augmenting_data_size; - std::unique_ptr augmenting_data; - std::vector image_components; - - MultiplexImage(uint16_t picture_index, - uint8_t component_count, - std::unique_ptr augmenting_data, - uint16_t augmenting_data_size); -}; - -// A utility class providing conversion between two representations of a -// multiplex image frame: -// 1. Packed version is just one encoded image, we pack all necessary metadata -// in the bitstream as headers. -// 2. Unpacked version is essentially a list of encoded images, one for one -// component. -class MultiplexEncodedImagePacker { - public: - // Note: It is caller responsibility to release the buffer of the result. - static EncodedImage PackAndRelease(const MultiplexImage& image); - - // Note: The image components just share the memory with `combined_image`. - static MultiplexImage Unpack(const EncodedImage& combined_image); -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_MULTIPLEX_MULTIPLEX_ENCODED_IMAGE_PACKER_H_ diff --git a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc b/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc deleted file mode 100644 index 80744e2d8c..0000000000 --- a/modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc +++ /dev/null @@ -1,353 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h" - -#include - -#include "api/video/encoded_image.h" -#include "api/video_codecs/video_encoder.h" -#include "common_video/include/video_frame_buffer.h" -#include "common_video/libyuv/include/webrtc_libyuv.h" -#include "media/base/video_common.h" -#include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -// Callback wrapper that helps distinguish returned results from `encoders_` -// instances. -class MultiplexEncoderAdapter::AdapterEncodedImageCallback - : public webrtc::EncodedImageCallback { - public: - AdapterEncodedImageCallback(webrtc::MultiplexEncoderAdapter* adapter, - AlphaCodecStream stream_idx) - : adapter_(adapter), stream_idx_(stream_idx) {} - - EncodedImageCallback::Result OnEncodedImage( - const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info) override { - if (!adapter_) - return Result(Result::OK); - return adapter_->OnEncodedImage(stream_idx_, encoded_image, - codec_specific_info); - } - - private: - MultiplexEncoderAdapter* adapter_; - const AlphaCodecStream stream_idx_; -}; - -MultiplexEncoderAdapter::MultiplexEncoderAdapter( - VideoEncoderFactory* factory, - const SdpVideoFormat& associated_format, - bool supports_augmented_data) - : factory_(factory), - associated_format_(associated_format), - encoded_complete_callback_(nullptr), - key_frame_interval_(0), - supports_augmented_data_(supports_augmented_data) {} - -MultiplexEncoderAdapter::~MultiplexEncoderAdapter() { - Release(); -} - -void MultiplexEncoderAdapter::SetFecControllerOverride( - FecControllerOverride* fec_controller_override) { - // Ignored. -} - -int MultiplexEncoderAdapter::InitEncode( - const VideoCodec* inst, - const VideoEncoder::Settings& settings) { - const size_t buffer_size = - CalcBufferSize(VideoType::kI420, inst->width, inst->height); - multiplex_dummy_planes_.resize(buffer_size); - // It is more expensive to encode 0x00, so use 0x80 instead. - std::fill(multiplex_dummy_planes_.begin(), multiplex_dummy_planes_.end(), - 0x80); - - RTC_DCHECK_EQ(kVideoCodecMultiplex, inst->codecType); - VideoCodec video_codec = *inst; - video_codec.codecType = PayloadStringToCodecType(associated_format_.name); - - // Take over the key frame interval at adapter level, because we have to - // sync the key frames for both sub-encoders. - switch (video_codec.codecType) { - case kVideoCodecVP8: - key_frame_interval_ = video_codec.VP8()->keyFrameInterval; - video_codec.VP8()->keyFrameInterval = 0; - break; - case kVideoCodecVP9: - key_frame_interval_ = video_codec.VP9()->keyFrameInterval; - video_codec.VP9()->keyFrameInterval = 0; - break; - case kVideoCodecH264: - key_frame_interval_ = video_codec.H264()->keyFrameInterval; - video_codec.H264()->keyFrameInterval = 0; - break; - default: - break; - } - - encoder_info_ = EncoderInfo(); - encoder_info_.implementation_name = "MultiplexEncoderAdapter ("; - encoder_info_.requested_resolution_alignment = 1; - encoder_info_.apply_alignment_to_all_simulcast_layers = false; - // This needs to be false so that we can do the split in Encode(). - encoder_info_.supports_native_handle = false; - - for (size_t i = 0; i < kAlphaCodecStreams; ++i) { - std::unique_ptr encoder = - factory_->CreateVideoEncoder(associated_format_); - const int rv = encoder->InitEncode(&video_codec, settings); - if (rv) { - RTC_LOG(LS_ERROR) << "Failed to create multiplex codec index " << i; - return rv; - } - adapter_callbacks_.emplace_back(new AdapterEncodedImageCallback( - this, static_cast(i))); - encoder->RegisterEncodeCompleteCallback(adapter_callbacks_.back().get()); - - const EncoderInfo& encoder_impl_info = encoder->GetEncoderInfo(); - encoder_info_.implementation_name += encoder_impl_info.implementation_name; - if (i != kAlphaCodecStreams - 1) { - encoder_info_.implementation_name += ", "; - } - // Uses hardware support if any of the encoders uses it. - // For example, if we are having issues with down-scaling due to - // pipelining delay in HW encoders we need higher encoder usage - // thresholds in CPU adaptation. - if (i == 0) { - encoder_info_.is_hardware_accelerated = - encoder_impl_info.is_hardware_accelerated; - } else { - encoder_info_.is_hardware_accelerated |= - encoder_impl_info.is_hardware_accelerated; - } - - encoder_info_.requested_resolution_alignment = cricket::LeastCommonMultiple( - encoder_info_.requested_resolution_alignment, - encoder_impl_info.requested_resolution_alignment); - - if (encoder_impl_info.apply_alignment_to_all_simulcast_layers) { - encoder_info_.apply_alignment_to_all_simulcast_layers = true; - } - - encoders_.emplace_back(std::move(encoder)); - } - encoder_info_.implementation_name += ")"; - - return WEBRTC_VIDEO_CODEC_OK; -} - -int MultiplexEncoderAdapter::Encode( - const VideoFrame& input_image, - const std::vector* frame_types) { - if (!encoded_complete_callback_) { - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - } - - // The input image is forwarded as-is, unless it is a native buffer and - // `supports_augmented_data_` is true in which case we need to map it in order - // to access the underlying AugmentedVideoFrameBuffer. - VideoFrame forwarded_image = input_image; - if (supports_augmented_data_ && - forwarded_image.video_frame_buffer()->type() == - VideoFrameBuffer::Type::kNative) { - auto info = GetEncoderInfo(); - rtc::scoped_refptr mapped_buffer = - forwarded_image.video_frame_buffer()->GetMappedFrameBuffer( - info.preferred_pixel_formats); - if (!mapped_buffer) { - // Unable to map the buffer. - return WEBRTC_VIDEO_CODEC_ERROR; - } - forwarded_image.set_video_frame_buffer(std::move(mapped_buffer)); - } - - std::vector adjusted_frame_types; - if (key_frame_interval_ > 0 && picture_index_ % key_frame_interval_ == 0) { - adjusted_frame_types.push_back(VideoFrameType::kVideoFrameKey); - } else { - adjusted_frame_types.push_back(VideoFrameType::kVideoFrameDelta); - } - const bool has_alpha = forwarded_image.video_frame_buffer()->type() == - VideoFrameBuffer::Type::kI420A; - std::unique_ptr augmenting_data = nullptr; - uint16_t augmenting_data_length = 0; - AugmentedVideoFrameBuffer* augmented_video_frame_buffer = nullptr; - if (supports_augmented_data_) { - augmented_video_frame_buffer = static_cast( - forwarded_image.video_frame_buffer().get()); - augmenting_data_length = - augmented_video_frame_buffer->GetAugmentingDataSize(); - augmenting_data = - std::unique_ptr(new uint8_t[augmenting_data_length]); - memcpy(augmenting_data.get(), - augmented_video_frame_buffer->GetAugmentingData(), - augmenting_data_length); - augmenting_data_size_ = augmenting_data_length; - } - - { - MutexLock lock(&mutex_); - stashed_images_.emplace( - std::piecewise_construct, - std::forward_as_tuple(forwarded_image.timestamp()), - std::forward_as_tuple( - picture_index_, has_alpha ? kAlphaCodecStreams : 1, - std::move(augmenting_data), augmenting_data_length)); - } - - ++picture_index_; - - // Encode YUV - int rv = - encoders_[kYUVStream]->Encode(forwarded_image, &adjusted_frame_types); - - // If we do not receive an alpha frame, we send a single frame for this - // `picture_index_`. The receiver will receive `frame_count` as 1 which - // specifies this case. - if (rv || !has_alpha) - return rv; - - // Encode AXX - rtc::scoped_refptr frame_buffer = - supports_augmented_data_ - ? augmented_video_frame_buffer->GetVideoFrameBuffer() - : forwarded_image.video_frame_buffer(); - const I420ABufferInterface* yuva_buffer = frame_buffer->GetI420A(); - rtc::scoped_refptr alpha_buffer = - WrapI420Buffer(forwarded_image.width(), forwarded_image.height(), - yuva_buffer->DataA(), yuva_buffer->StrideA(), - multiplex_dummy_planes_.data(), yuva_buffer->StrideU(), - multiplex_dummy_planes_.data(), yuva_buffer->StrideV(), - // To keep reference alive. - [frame_buffer] {}); - VideoFrame alpha_image = - VideoFrame::Builder() - .set_video_frame_buffer(alpha_buffer) - .set_timestamp_rtp(forwarded_image.timestamp()) - .set_timestamp_ms(forwarded_image.render_time_ms()) - .set_rotation(forwarded_image.rotation()) - .set_id(forwarded_image.id()) - .set_packet_infos(forwarded_image.packet_infos()) - .build(); - rv = encoders_[kAXXStream]->Encode(alpha_image, &adjusted_frame_types); - return rv; -} - -int MultiplexEncoderAdapter::RegisterEncodeCompleteCallback( - EncodedImageCallback* callback) { - encoded_complete_callback_ = callback; - return WEBRTC_VIDEO_CODEC_OK; -} - -void MultiplexEncoderAdapter::SetRates( - const RateControlParameters& parameters) { - VideoBitrateAllocation bitrate_allocation(parameters.bitrate); - bitrate_allocation.SetBitrate( - 0, 0, parameters.bitrate.GetBitrate(0, 0) - augmenting_data_size_); - for (auto& encoder : encoders_) { - // TODO(emircan): `framerate` is used to calculate duration in encoder - // instances. We report the total frame rate to keep real time for now. - // Remove this after refactoring duration logic. - encoder->SetRates(RateControlParameters( - bitrate_allocation, - static_cast(encoders_.size() * parameters.framerate_fps), - parameters.bandwidth_allocation - - DataRate::BitsPerSec(augmenting_data_size_))); - } -} - -void MultiplexEncoderAdapter::OnPacketLossRateUpdate(float packet_loss_rate) { - for (auto& encoder : encoders_) { - encoder->OnPacketLossRateUpdate(packet_loss_rate); - } -} - -void MultiplexEncoderAdapter::OnRttUpdate(int64_t rtt_ms) { - for (auto& encoder : encoders_) { - encoder->OnRttUpdate(rtt_ms); - } -} - -void MultiplexEncoderAdapter::OnLossNotification( - const LossNotification& loss_notification) { - for (auto& encoder : encoders_) { - encoder->OnLossNotification(loss_notification); - } -} - -int MultiplexEncoderAdapter::Release() { - for (auto& encoder : encoders_) { - const int rv = encoder->Release(); - if (rv) - return rv; - } - encoders_.clear(); - adapter_callbacks_.clear(); - MutexLock lock(&mutex_); - stashed_images_.clear(); - - return WEBRTC_VIDEO_CODEC_OK; -} - -VideoEncoder::EncoderInfo MultiplexEncoderAdapter::GetEncoderInfo() const { - return encoder_info_; -} - -EncodedImageCallback::Result MultiplexEncoderAdapter::OnEncodedImage( - AlphaCodecStream stream_idx, - const EncodedImage& encodedImage, - const CodecSpecificInfo* codecSpecificInfo) { - // Save the image - MultiplexImageComponent image_component; - image_component.component_index = stream_idx; - image_component.codec_type = - PayloadStringToCodecType(associated_format_.name); - image_component.encoded_image = encodedImage; - - MutexLock lock(&mutex_); - const auto& stashed_image_itr = - stashed_images_.find(encodedImage.Timestamp()); - const auto& stashed_image_next_itr = std::next(stashed_image_itr, 1); - RTC_DCHECK(stashed_image_itr != stashed_images_.end()); - MultiplexImage& stashed_image = stashed_image_itr->second; - const uint8_t frame_count = stashed_image.component_count; - - stashed_image.image_components.push_back(image_component); - - if (stashed_image.image_components.size() == frame_count) { - // Complete case - for (auto iter = stashed_images_.begin(); - iter != stashed_images_.end() && iter != stashed_image_next_itr; - iter++) { - // No image at all, skip. - if (iter->second.image_components.size() == 0) - continue; - - // We have to send out those stashed frames, otherwise the delta frame - // dependency chain is broken. - combined_image_ = - MultiplexEncodedImagePacker::PackAndRelease(iter->second); - - CodecSpecificInfo codec_info = *codecSpecificInfo; - codec_info.codecType = kVideoCodecMultiplex; - encoded_complete_callback_->OnEncodedImage(combined_image_, &codec_info); - } - - stashed_images_.erase(stashed_images_.begin(), stashed_image_next_itr); - } - return EncodedImageCallback::Result(EncodedImageCallback::Result::OK); -} - -} // namespace webrtc diff --git a/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc b/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc deleted file mode 100644 index a2f36a306d..0000000000 --- a/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc +++ /dev/null @@ -1,319 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/scoped_refptr.h" -#include "api/test/mock_video_decoder_factory.h" -#include "api/test/mock_video_encoder_factory.h" -#include "api/video/encoded_image.h" -#include "api/video/video_frame.h" -#include "api/video/video_frame_buffer.h" -#include "api/video/video_rotation.h" -#include "api/video_codecs/sdp_video_format.h" -#include "api/video_codecs/video_codec.h" -#include "api/video_codecs/video_decoder.h" -#include "api/video_codecs/video_encoder.h" -#include "common_video/include/video_frame_buffer.h" -#include "common_video/libyuv/include/webrtc_libyuv.h" -#include "media/base/media_constants.h" -#include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h" -#include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h" -#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h" -#include "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h" -#include "modules/video_coding/codecs/test/video_codec_unittest.h" -#include "modules/video_coding/codecs/vp9/include/vp9.h" -#include "modules/video_coding/include/video_codec_interface.h" -#include "modules/video_coding/include/video_error_codes.h" -#include "test/gmock.h" -#include "test/gtest.h" -#include "test/video_codec_settings.h" - -using ::testing::_; -using ::testing::Return; - -namespace webrtc { - -constexpr const char* kMultiplexAssociatedCodecName = cricket::kVp9CodecName; -const VideoCodecType kMultiplexAssociatedCodecType = - PayloadStringToCodecType(kMultiplexAssociatedCodecName); - -class TestMultiplexAdapter : public VideoCodecUnitTest, - public ::testing::WithParamInterface< - bool /* supports_augmenting_data */> { - public: - TestMultiplexAdapter() - : decoder_factory_(new webrtc::MockVideoDecoderFactory), - encoder_factory_(new webrtc::MockVideoEncoderFactory), - supports_augmenting_data_(GetParam()) {} - - protected: - std::unique_ptr CreateDecoder() override { - return std::make_unique( - decoder_factory_.get(), SdpVideoFormat(kMultiplexAssociatedCodecName), - supports_augmenting_data_); - } - - std::unique_ptr CreateEncoder() override { - return std::make_unique( - encoder_factory_.get(), SdpVideoFormat(kMultiplexAssociatedCodecName), - supports_augmenting_data_); - } - - void ModifyCodecSettings(VideoCodec* codec_settings) override { - webrtc::test::CodecSettings(kMultiplexAssociatedCodecType, codec_settings); - codec_settings->VP9()->numberOfTemporalLayers = 1; - codec_settings->VP9()->numberOfSpatialLayers = 1; - codec_settings->codecType = webrtc::kVideoCodecMultiplex; - } - - std::unique_ptr CreateDataAugmentedInputFrame( - VideoFrame* video_frame) { - rtc::scoped_refptr video_buffer = - video_frame->video_frame_buffer(); - std::unique_ptr data = - std::unique_ptr(new uint8_t[16]); - for (int i = 0; i < 16; i++) { - data[i] = i; - } - auto augmented_video_frame_buffer = - rtc::make_ref_counted(video_buffer, - std::move(data), 16); - return std::make_unique( - VideoFrame::Builder() - .set_video_frame_buffer(augmented_video_frame_buffer) - .set_timestamp_rtp(video_frame->timestamp()) - .set_timestamp_ms(video_frame->render_time_ms()) - .set_rotation(video_frame->rotation()) - .set_id(video_frame->id()) - .build()); - } - - std::unique_ptr CreateI420AInputFrame() { - VideoFrame input_frame = NextInputFrame(); - rtc::scoped_refptr yuv_buffer = - input_frame.video_frame_buffer()->ToI420(); - rtc::scoped_refptr yuva_buffer = WrapI420ABuffer( - yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(), - yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(), - yuv_buffer->DataV(), yuv_buffer->StrideV(), yuv_buffer->DataY(), - yuv_buffer->StrideY(), - // To keep reference alive. - [yuv_buffer] {}); - return std::make_unique(VideoFrame::Builder() - .set_video_frame_buffer(yuva_buffer) - .set_timestamp_rtp(123) - .set_timestamp_ms(345) - .set_rotation(kVideoRotation_0) - .build()); - } - - std::unique_ptr CreateInputFrame(bool contains_alpha) { - std::unique_ptr video_frame; - if (contains_alpha) { - video_frame = CreateI420AInputFrame(); - } else { - VideoFrame next_frame = NextInputFrame(); - video_frame = std::make_unique( - VideoFrame::Builder() - .set_video_frame_buffer(next_frame.video_frame_buffer()) - .set_timestamp_rtp(next_frame.timestamp()) - .set_timestamp_ms(next_frame.render_time_ms()) - .set_rotation(next_frame.rotation()) - .set_id(next_frame.id()) - .build()); - } - if (supports_augmenting_data_) { - video_frame = CreateDataAugmentedInputFrame(video_frame.get()); - } - - return video_frame; - } - - void CheckData(rtc::scoped_refptr video_frame_buffer) { - if (!supports_augmenting_data_) { - return; - } - AugmentedVideoFrameBuffer* augmented_buffer = - static_cast(video_frame_buffer.get()); - EXPECT_EQ(augmented_buffer->GetAugmentingDataSize(), 16); - uint8_t* data = augmented_buffer->GetAugmentingData(); - for (int i = 0; i < 16; i++) { - EXPECT_EQ(data[i], i); - } - } - - std::unique_ptr ExtractAXXFrame(const VideoFrame& video_frame) { - rtc::scoped_refptr video_frame_buffer = - video_frame.video_frame_buffer(); - if (supports_augmenting_data_) { - AugmentedVideoFrameBuffer* augmentedBuffer = - static_cast(video_frame_buffer.get()); - video_frame_buffer = augmentedBuffer->GetVideoFrameBuffer(); - } - const I420ABufferInterface* yuva_buffer = video_frame_buffer->GetI420A(); - rtc::scoped_refptr axx_buffer = WrapI420Buffer( - yuva_buffer->width(), yuva_buffer->height(), yuva_buffer->DataA(), - yuva_buffer->StrideA(), yuva_buffer->DataU(), yuva_buffer->StrideU(), - yuva_buffer->DataV(), yuva_buffer->StrideV(), [video_frame_buffer] {}); - return std::make_unique(VideoFrame::Builder() - .set_video_frame_buffer(axx_buffer) - .set_timestamp_rtp(123) - .set_timestamp_ms(345) - .set_rotation(kVideoRotation_0) - .build()); - } - - private: - void SetUp() override { - EXPECT_CALL(*decoder_factory_, Die); - // The decoders/encoders will be owned by the caller of - // CreateVideoDecoder()/CreateVideoEncoder(). - EXPECT_CALL(*decoder_factory_, CreateVideoDecoder) - .Times(2) - .WillRepeatedly([] { return VP9Decoder::Create(); }); - - EXPECT_CALL(*encoder_factory_, Die); - EXPECT_CALL(*encoder_factory_, CreateVideoEncoder) - .Times(2) - .WillRepeatedly([] { return VP9Encoder::Create(); }); - - VideoCodecUnitTest::SetUp(); - } - - const std::unique_ptr decoder_factory_; - const std::unique_ptr encoder_factory_; - const bool supports_augmenting_data_; -}; - -// TODO(emircan): Currently VideoCodecUnitTest tests do a complete setup -// step that goes beyond constructing `decoder_`. Simplify these tests to do -// less. -TEST_P(TestMultiplexAdapter, ConstructAndDestructDecoder) { - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release()); -} - -TEST_P(TestMultiplexAdapter, ConstructAndDestructEncoder) { - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); -} - -TEST_P(TestMultiplexAdapter, EncodeDecodeI420Frame) { - std::unique_ptr input_frame = CreateInputFrame(false); - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*input_frame, nullptr)); - EncodedImage encoded_frame; - CodecSpecificInfo codec_specific_info; - ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); - EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType); - - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, -1)); - std::unique_ptr decoded_frame; - absl::optional decoded_qp; - ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp)); - ASSERT_TRUE(decoded_frame); - EXPECT_GT(I420PSNR(input_frame.get(), decoded_frame.get()), 36); - CheckData(decoded_frame->video_frame_buffer()); -} - -TEST_P(TestMultiplexAdapter, EncodeDecodeI420AFrame) { - std::unique_ptr yuva_frame = CreateInputFrame(true); - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*yuva_frame, nullptr)); - EncodedImage encoded_frame; - CodecSpecificInfo codec_specific_info; - ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); - EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType); - - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, 0)); - std::unique_ptr decoded_frame; - absl::optional decoded_qp; - ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp)); - ASSERT_TRUE(decoded_frame); - EXPECT_GT(I420PSNR(yuva_frame.get(), decoded_frame.get()), 36); - - // Find PSNR for AXX bits. - std::unique_ptr input_axx_frame = ExtractAXXFrame(*yuva_frame); - std::unique_ptr output_axx_frame = - ExtractAXXFrame(*decoded_frame); - EXPECT_GT(I420PSNR(input_axx_frame.get(), output_axx_frame.get()), 47); - - CheckData(decoded_frame->video_frame_buffer()); -} - -TEST_P(TestMultiplexAdapter, CheckSingleFrameEncodedBitstream) { - std::unique_ptr input_frame = CreateInputFrame(false); - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*input_frame, nullptr)); - EncodedImage encoded_frame; - CodecSpecificInfo codec_specific_info; - ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); - EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType); - EXPECT_FALSE(encoded_frame.SpatialIndex()); - - const MultiplexImage& unpacked_frame = - MultiplexEncodedImagePacker::Unpack(encoded_frame); - EXPECT_EQ(0, unpacked_frame.image_index); - EXPECT_EQ(1, unpacked_frame.component_count); - const MultiplexImageComponent& component = unpacked_frame.image_components[0]; - EXPECT_EQ(0, component.component_index); - EXPECT_NE(nullptr, component.encoded_image.data()); - EXPECT_EQ(VideoFrameType::kVideoFrameKey, component.encoded_image._frameType); -} - -TEST_P(TestMultiplexAdapter, CheckDoubleFramesEncodedBitstream) { - std::unique_ptr yuva_frame = CreateInputFrame(true); - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*yuva_frame, nullptr)); - EncodedImage encoded_frame; - CodecSpecificInfo codec_specific_info; - ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); - EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType); - EXPECT_FALSE(encoded_frame.SpatialIndex()); - - const MultiplexImage& unpacked_frame = - MultiplexEncodedImagePacker::Unpack(encoded_frame); - EXPECT_EQ(0, unpacked_frame.image_index); - EXPECT_EQ(2, unpacked_frame.component_count); - EXPECT_EQ(unpacked_frame.image_components.size(), - unpacked_frame.component_count); - for (int i = 0; i < unpacked_frame.component_count; ++i) { - const MultiplexImageComponent& component = - unpacked_frame.image_components[i]; - EXPECT_EQ(i, component.component_index); - EXPECT_NE(nullptr, component.encoded_image.data()); - EXPECT_EQ(VideoFrameType::kVideoFrameKey, - component.encoded_image._frameType); - } -} - -TEST_P(TestMultiplexAdapter, ImageIndexIncreases) { - std::unique_ptr yuva_frame = CreateInputFrame(true); - const size_t expected_num_encoded_frames = 3; - for (size_t i = 0; i < expected_num_encoded_frames; ++i) { - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*yuva_frame, nullptr)); - EncodedImage encoded_frame; - CodecSpecificInfo codec_specific_info; - ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); - const MultiplexImage& unpacked_frame = - MultiplexEncodedImagePacker::Unpack(encoded_frame); - EXPECT_EQ(i, unpacked_frame.image_index); - EXPECT_EQ( - i ? VideoFrameType::kVideoFrameDelta : VideoFrameType::kVideoFrameKey, - encoded_frame._frameType); - } -} - -INSTANTIATE_TEST_SUITE_P(TestMultiplexAdapter, - TestMultiplexAdapter, - ::testing::Bool()); - -} // namespace webrtc diff --git a/modules/video_coding/codecs/test/encoded_video_frame_producer.cc b/modules/video_coding/codecs/test/encoded_video_frame_producer.cc index be2f2bfcba..25becc1fde 100644 --- a/modules/video_coding/codecs/test/encoded_video_frame_producer.cc +++ b/modules/video_coding/codecs/test/encoded_video_frame_producer.cc @@ -10,12 +10,14 @@ #include "modules/video_coding/codecs/test/encoded_video_frame_producer.h" +#include #include +#include #include #include "api/test/create_frame_generator.h" #include "api/test/frame_generator_interface.h" -#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/encoded_image.h" #include "api/video/video_frame.h" #include "api/video/video_frame_type.h" #include "api/video_codecs/video_encoder.h" @@ -49,7 +51,7 @@ EncodedVideoFrameProducer::Encode() { std::unique_ptr frame_buffer_generator = test::CreateSquareFrameGenerator( resolution_.Width(), resolution_.Height(), - test::FrameGeneratorInterface::OutputType::kI420, absl::nullopt); + test::FrameGeneratorInterface::OutputType::kI420, std::nullopt); std::vector encoded_frames; EncoderCallback encoder_callback(encoded_frames); @@ -61,8 +63,8 @@ EncodedVideoFrameProducer::Encode() { VideoFrame frame = VideoFrame::Builder() .set_video_frame_buffer(frame_buffer_generator->NextFrame().buffer) - .set_timestamp_rtp(rtp_timestamp_) - .set_capture_time_identifier(capture_time_identifier_) + .set_rtp_timestamp(rtp_timestamp_) + .set_presentation_timestamp(presentation_timestamp_) .build(); rtp_timestamp_ += rtp_tick; RTC_CHECK_EQ(encoder_.Encode(frame, &next_frame_type_), diff --git a/modules/video_coding/codecs/test/encoded_video_frame_producer.h b/modules/video_coding/codecs/test/encoded_video_frame_producer.h index 063cfd4efe..12fe37de1a 100644 --- a/modules/video_coding/codecs/test/encoded_video_frame_producer.h +++ b/modules/video_coding/codecs/test/encoded_video_frame_producer.h @@ -15,10 +15,13 @@ #include -#include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/timestamp.h" #include "api/video/encoded_image.h" +#include "api/video/render_resolution.h" +#include "api/video/video_frame_type.h" #include "api/video_codecs/video_encoder.h" #include "modules/video_coding/include/video_codec_interface.h" +#include "rtc_base/checks.h" namespace webrtc { @@ -49,7 +52,7 @@ class EncodedVideoFrameProducer { EncodedVideoFrameProducer& SetRtpTimestamp(uint32_t value); - EncodedVideoFrameProducer& SetCaptureTimeIdentifier(Timestamp value); + EncodedVideoFrameProducer& SetPresentationTimestamp(Timestamp value); // Generates input video frames and encodes them with `encoder` provided // in the constructor. Returns frame passed to the `OnEncodedImage` by @@ -60,7 +63,7 @@ class EncodedVideoFrameProducer { VideoEncoder& encoder_; uint32_t rtp_timestamp_ = 1000; - Timestamp capture_time_identifier_ = Timestamp::Micros(1000); + Timestamp presentation_timestamp_ = Timestamp::Micros(1000); int num_input_frames_ = 1; int framerate_fps_ = 30; RenderResolution resolution_ = {320, 180}; @@ -100,8 +103,8 @@ inline EncodedVideoFrameProducer& EncodedVideoFrameProducer::SetRtpTimestamp( } inline EncodedVideoFrameProducer& -EncodedVideoFrameProducer::SetCaptureTimeIdentifier(Timestamp value) { - capture_time_identifier_ = value; +EncodedVideoFrameProducer::SetPresentationTimestamp(Timestamp value) { + presentation_timestamp_ = value; return *this; } } // namespace webrtc diff --git a/modules/video_coding/codecs/test/objc_codec_factory_helper.mm b/modules/video_coding/codecs/test/objc_codec_factory_helper.mm index ed82376251..7862b7eee6 100644 --- a/modules/video_coding/codecs/test/objc_codec_factory_helper.mm +++ b/modules/video_coding/codecs/test/objc_codec_factory_helper.mm @@ -19,11 +19,13 @@ namespace test { std::unique_ptr CreateObjCEncoderFactory() { - return ObjCToNativeVideoEncoderFactory([[RTC_OBJC_TYPE(RTCVideoEncoderFactoryH264) alloc] init]); + return ObjCToNativeVideoEncoderFactory( + [[RTC_OBJC_TYPE(RTCVideoEncoderFactoryH264) alloc] init]); } std::unique_ptr CreateObjCDecoderFactory() { - return ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE(RTCVideoDecoderFactoryH264) alloc] init]); + return ObjCToNativeVideoDecoderFactory( + [[RTC_OBJC_TYPE(RTCVideoDecoderFactoryH264) alloc] init]); } } // namespace test diff --git a/modules/video_coding/codecs/test/video_codec_analyzer.cc b/modules/video_coding/codecs/test/video_codec_analyzer.cc deleted file mode 100644 index 6e0ea9f6b4..0000000000 --- a/modules/video_coding/codecs/test/video_codec_analyzer.cc +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/codecs/test/video_codec_analyzer.h" - -#include - -#include "api/task_queue/default_task_queue_factory.h" -#include "api/video/i420_buffer.h" -#include "api/video/video_codec_constants.h" -#include "api/video/video_frame.h" -#include "rtc_base/checks.h" -#include "rtc_base/event.h" -#include "rtc_base/time_utils.h" -#include "third_party/libyuv/include/libyuv/compare.h" - -namespace webrtc { -namespace test { - -namespace { -using Psnr = VideoCodecStats::Frame::Psnr; - -Psnr CalcPsnr(const I420BufferInterface& ref_buffer, - const I420BufferInterface& dec_buffer) { - RTC_CHECK_EQ(ref_buffer.width(), dec_buffer.width()); - RTC_CHECK_EQ(ref_buffer.height(), dec_buffer.height()); - - uint64_t sse_y = libyuv::ComputeSumSquareErrorPlane( - dec_buffer.DataY(), dec_buffer.StrideY(), ref_buffer.DataY(), - ref_buffer.StrideY(), dec_buffer.width(), dec_buffer.height()); - - uint64_t sse_u = libyuv::ComputeSumSquareErrorPlane( - dec_buffer.DataU(), dec_buffer.StrideU(), ref_buffer.DataU(), - ref_buffer.StrideU(), dec_buffer.width() / 2, dec_buffer.height() / 2); - - uint64_t sse_v = libyuv::ComputeSumSquareErrorPlane( - dec_buffer.DataV(), dec_buffer.StrideV(), ref_buffer.DataV(), - ref_buffer.StrideV(), dec_buffer.width() / 2, dec_buffer.height() / 2); - - int num_y_samples = dec_buffer.width() * dec_buffer.height(); - Psnr psnr; - psnr.y = libyuv::SumSquareErrorToPsnr(sse_y, num_y_samples); - psnr.u = libyuv::SumSquareErrorToPsnr(sse_u, num_y_samples / 4); - psnr.v = libyuv::SumSquareErrorToPsnr(sse_v, num_y_samples / 4); - - return psnr; -} - -} // namespace - -VideoCodecAnalyzer::VideoCodecAnalyzer( - ReferenceVideoSource* reference_video_source) - : reference_video_source_(reference_video_source), num_frames_(0) { - sequence_checker_.Detach(); -} - -void VideoCodecAnalyzer::StartEncode(const VideoFrame& input_frame) { - int64_t encode_start_us = rtc::TimeMicros(); - task_queue_.PostTask( - [this, timestamp_rtp = input_frame.timestamp(), encode_start_us]() { - RTC_DCHECK_RUN_ON(&sequence_checker_); - - RTC_CHECK(frame_num_.find(timestamp_rtp) == frame_num_.end()); - frame_num_[timestamp_rtp] = num_frames_++; - - stats_.AddFrame({.frame_num = frame_num_[timestamp_rtp], - .timestamp_rtp = timestamp_rtp, - .encode_start = Timestamp::Micros(encode_start_us)}); - }); -} - -void VideoCodecAnalyzer::FinishEncode(const EncodedImage& frame) { - int64_t encode_finished_us = rtc::TimeMicros(); - - task_queue_.PostTask([this, timestamp_rtp = frame.Timestamp(), - spatial_idx = frame.SpatialIndex().value_or(0), - temporal_idx = frame.TemporalIndex().value_or(0), - width = frame._encodedWidth, - height = frame._encodedHeight, - frame_type = frame._frameType, - frame_size_bytes = frame.size(), qp = frame.qp_, - encode_finished_us]() { - RTC_DCHECK_RUN_ON(&sequence_checker_); - - if (spatial_idx > 0) { - VideoCodecStats::Frame* base_frame = - stats_.GetFrame(timestamp_rtp, /*spatial_idx=*/0); - - stats_.AddFrame({.frame_num = base_frame->frame_num, - .timestamp_rtp = timestamp_rtp, - .spatial_idx = spatial_idx, - .encode_start = base_frame->encode_start}); - } - - VideoCodecStats::Frame* fs = stats_.GetFrame(timestamp_rtp, spatial_idx); - fs->spatial_idx = spatial_idx; - fs->temporal_idx = temporal_idx; - fs->width = width; - fs->height = height; - fs->frame_size = DataSize::Bytes(frame_size_bytes); - fs->qp = qp; - fs->keyframe = frame_type == VideoFrameType::kVideoFrameKey; - fs->encode_time = Timestamp::Micros(encode_finished_us) - fs->encode_start; - fs->encoded = true; - }); -} - -void VideoCodecAnalyzer::StartDecode(const EncodedImage& frame) { - int64_t decode_start_us = rtc::TimeMicros(); - task_queue_.PostTask([this, timestamp_rtp = frame.Timestamp(), - spatial_idx = frame.SpatialIndex().value_or(0), - frame_size_bytes = frame.size(), decode_start_us]() { - RTC_DCHECK_RUN_ON(&sequence_checker_); - - VideoCodecStats::Frame* fs = stats_.GetFrame(timestamp_rtp, spatial_idx); - if (fs == nullptr) { - if (frame_num_.find(timestamp_rtp) == frame_num_.end()) { - frame_num_[timestamp_rtp] = num_frames_++; - } - stats_.AddFrame({.frame_num = frame_num_[timestamp_rtp], - .timestamp_rtp = timestamp_rtp, - .spatial_idx = spatial_idx, - .frame_size = DataSize::Bytes(frame_size_bytes)}); - fs = stats_.GetFrame(timestamp_rtp, spatial_idx); - } - - fs->decode_start = Timestamp::Micros(decode_start_us); - }); -} - -void VideoCodecAnalyzer::FinishDecode(const VideoFrame& frame, - int spatial_idx) { - int64_t decode_finished_us = rtc::TimeMicros(); - task_queue_.PostTask([this, timestamp_rtp = frame.timestamp(), spatial_idx, - width = frame.width(), height = frame.height(), - decode_finished_us]() { - RTC_DCHECK_RUN_ON(&sequence_checker_); - VideoCodecStats::Frame* fs = stats_.GetFrame(timestamp_rtp, spatial_idx); - fs->decode_time = Timestamp::Micros(decode_finished_us) - fs->decode_start; - - if (!fs->encoded) { - fs->width = width; - fs->height = height; - } - - fs->decoded = true; - }); - - if (reference_video_source_ != nullptr) { - // Copy hardware-backed frame into main memory to release output buffers - // which number may be limited in hardware decoders. - rtc::scoped_refptr decoded_buffer = - frame.video_frame_buffer()->ToI420(); - - task_queue_.PostTask([this, decoded_buffer, - timestamp_rtp = frame.timestamp(), spatial_idx]() { - RTC_DCHECK_RUN_ON(&sequence_checker_); - VideoFrame ref_frame = reference_video_source_->GetFrame( - timestamp_rtp, {.width = decoded_buffer->width(), - .height = decoded_buffer->height()}); - rtc::scoped_refptr ref_buffer = - ref_frame.video_frame_buffer()->ToI420(); - - Psnr psnr = CalcPsnr(*decoded_buffer, *ref_buffer); - - VideoCodecStats::Frame* fs = - this->stats_.GetFrame(timestamp_rtp, spatial_idx); - fs->psnr = psnr; - }); - } -} - -std::unique_ptr VideoCodecAnalyzer::GetStats() { - std::unique_ptr stats; - rtc::Event ready; - task_queue_.PostTask([this, &stats, &ready]() mutable { - RTC_DCHECK_RUN_ON(&sequence_checker_); - stats.reset(new VideoCodecStatsImpl(stats_)); - ready.Set(); - }); - ready.Wait(rtc::Event::kForever); - return stats; -} - -} // namespace test -} // namespace webrtc diff --git a/modules/video_coding/codecs/test/video_codec_analyzer.h b/modules/video_coding/codecs/test/video_codec_analyzer.h deleted file mode 100644 index 29ca8ee2ff..0000000000 --- a/modules/video_coding/codecs/test/video_codec_analyzer.h +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_ -#define MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/sequence_checker.h" -#include "api/test/video_codec_tester.h" -#include "api/video/encoded_image.h" -#include "api/video/resolution.h" -#include "api/video/video_frame.h" -#include "modules/video_coding/codecs/test/video_codec_stats_impl.h" -#include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_queue_for_test.h" - -namespace webrtc { -namespace test { - -// Analyzer measures and collects metrics necessary for evaluation of video -// codec quality and performance. This class is thread-safe. -class VideoCodecAnalyzer { - public: - // An interface that provides reference frames for spatial quality analysis. - class ReferenceVideoSource { - public: - virtual ~ReferenceVideoSource() = default; - - virtual VideoFrame GetFrame(uint32_t timestamp_rtp, - Resolution resolution) = 0; - }; - - explicit VideoCodecAnalyzer( - ReferenceVideoSource* reference_video_source = nullptr); - - void StartEncode(const VideoFrame& frame); - - void FinishEncode(const EncodedImage& frame); - - void StartDecode(const EncodedImage& frame); - - void FinishDecode(const VideoFrame& frame, int spatial_idx); - - std::unique_ptr GetStats(); - - protected: - TaskQueueForTest task_queue_; - - ReferenceVideoSource* const reference_video_source_; - - VideoCodecStatsImpl stats_ RTC_GUARDED_BY(sequence_checker_); - - // Map from RTP timestamp to frame number. - std::map frame_num_ RTC_GUARDED_BY(sequence_checker_); - - // Processed frames counter. - int num_frames_ RTC_GUARDED_BY(sequence_checker_); - - RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; -}; - -} // namespace test -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_ diff --git a/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc b/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc deleted file mode 100644 index d7c5fe28a4..0000000000 --- a/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/codecs/test/video_codec_analyzer.h" - -#include "absl/types/optional.h" -#include "api/video/i420_buffer.h" -#include "test/gmock.h" -#include "test/gtest.h" -#include "third_party/libyuv/include/libyuv/planar_functions.h" - -namespace webrtc { -namespace test { - -namespace { -using ::testing::Return; -using ::testing::Values; -using Psnr = VideoCodecStats::Frame::Psnr; - -const uint32_t kTimestamp = 3000; -const int kSpatialIdx = 2; - -class MockReferenceVideoSource - : public VideoCodecAnalyzer::ReferenceVideoSource { - public: - MOCK_METHOD(VideoFrame, GetFrame, (uint32_t, Resolution), (override)); -}; - -VideoFrame CreateVideoFrame(uint32_t timestamp_rtp, - uint8_t y = 0, - uint8_t u = 0, - uint8_t v = 0) { - rtc::scoped_refptr buffer(I420Buffer::Create(2, 2)); - - libyuv::I420Rect(buffer->MutableDataY(), buffer->StrideY(), - buffer->MutableDataU(), buffer->StrideU(), - buffer->MutableDataV(), buffer->StrideV(), 0, 0, - buffer->width(), buffer->height(), y, u, v); - - return VideoFrame::Builder() - .set_video_frame_buffer(buffer) - .set_timestamp_rtp(timestamp_rtp) - .build(); -} - -EncodedImage CreateEncodedImage(uint32_t timestamp_rtp, int spatial_idx = 0) { - EncodedImage encoded_image; - encoded_image.SetTimestamp(timestamp_rtp); - encoded_image.SetSpatialIndex(spatial_idx); - return encoded_image; -} -} // namespace - -TEST(VideoCodecAnalyzerTest, StartEncode) { - VideoCodecAnalyzer analyzer; - analyzer.StartEncode(CreateVideoFrame(kTimestamp)); - - auto fs = analyzer.GetStats()->Slice(); - EXPECT_EQ(1u, fs.size()); - EXPECT_EQ(fs[0].timestamp_rtp, kTimestamp); -} - -TEST(VideoCodecAnalyzerTest, FinishEncode) { - VideoCodecAnalyzer analyzer; - analyzer.StartEncode(CreateVideoFrame(kTimestamp)); - - EncodedImage encoded_frame = CreateEncodedImage(kTimestamp, kSpatialIdx); - analyzer.FinishEncode(encoded_frame); - - auto fs = analyzer.GetStats()->Slice(); - EXPECT_EQ(2u, fs.size()); - EXPECT_EQ(kSpatialIdx, fs[1].spatial_idx); -} - -TEST(VideoCodecAnalyzerTest, StartDecode) { - VideoCodecAnalyzer analyzer; - analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx)); - - auto fs = analyzer.GetStats()->Slice(); - EXPECT_EQ(1u, fs.size()); - EXPECT_EQ(kTimestamp, fs[0].timestamp_rtp); -} - -TEST(VideoCodecAnalyzerTest, FinishDecode) { - VideoCodecAnalyzer analyzer; - analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx)); - VideoFrame decoded_frame = CreateVideoFrame(kTimestamp); - analyzer.FinishDecode(decoded_frame, kSpatialIdx); - - auto fs = analyzer.GetStats()->Slice(); - EXPECT_EQ(1u, fs.size()); - EXPECT_EQ(decoded_frame.width(), fs[0].width); - EXPECT_EQ(decoded_frame.height(), fs[0].height); -} - -TEST(VideoCodecAnalyzerTest, ReferenceVideoSource) { - MockReferenceVideoSource reference_video_source; - VideoCodecAnalyzer analyzer(&reference_video_source); - analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx)); - - EXPECT_CALL(reference_video_source, GetFrame) - .WillOnce(Return(CreateVideoFrame(kTimestamp, /*y=*/0, - /*u=*/0, /*v=*/0))); - - analyzer.FinishDecode( - CreateVideoFrame(kTimestamp, /*value_y=*/1, /*value_u=*/2, /*value_v=*/3), - kSpatialIdx); - - auto fs = analyzer.GetStats()->Slice(); - EXPECT_EQ(1u, fs.size()); - EXPECT_TRUE(fs[0].psnr.has_value()); - - const Psnr& psnr = *fs[0].psnr; - EXPECT_NEAR(psnr.y, 48, 1); - EXPECT_NEAR(psnr.u, 42, 1); - EXPECT_NEAR(psnr.v, 38, 1); -} - -} // namespace test -} // namespace webrtc diff --git a/modules/video_coding/codecs/test/video_codec_stats_impl.cc b/modules/video_coding/codecs/test/video_codec_stats_impl.cc deleted file mode 100644 index 9808e2a601..0000000000 --- a/modules/video_coding/codecs/test/video_codec_stats_impl.cc +++ /dev/null @@ -1,278 +0,0 @@ -/* - * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/codecs/test/video_codec_stats_impl.h" - -#include - -#include "api/numerics/samples_stats_counter.h" -#include "api/test/metrics/metrics_logger.h" -#include "rtc_base/checks.h" -#include "rtc_base/time_utils.h" - -namespace webrtc { -namespace test { -namespace { -using Frame = VideoCodecStats::Frame; -using Stream = VideoCodecStats::Stream; - -constexpr Frequency k90kHz = Frequency::Hertz(90000); - -class LeakyBucket { - public: - LeakyBucket() : level_bits_(0) {} - - // Updates bucket level and returns its current level in bits. Data is remove - // from bucket with rate equal to target bitrate of previous frame. Bucket - // level is tracked with floating point precision. Returned value of bucket - // level is rounded up. - int Update(const Frame& frame) { - RTC_CHECK(frame.target_bitrate) << "Bitrate must be specified."; - - if (prev_frame_) { - RTC_CHECK_GT(frame.timestamp_rtp, prev_frame_->timestamp_rtp) - << "Timestamp must increase."; - TimeDelta passed = - (frame.timestamp_rtp - prev_frame_->timestamp_rtp) / k90kHz; - level_bits_ -= - prev_frame_->target_bitrate->bps() * passed.us() / 1000000.0; - level_bits_ = std::max(level_bits_, 0.0); - } - - prev_frame_ = frame; - - level_bits_ += frame.frame_size.bytes() * 8; - return static_cast(std::ceil(level_bits_)); - } - - private: - absl::optional prev_frame_; - double level_bits_; -}; - -// Merges spatial layer frames into superframes. -std::vector Merge(const std::vector& frames) { - std::vector superframes; - // Map from frame timestamp to index in `superframes` vector. - std::map index; - - for (const auto& f : frames) { - if (index.find(f.timestamp_rtp) == index.end()) { - index[f.timestamp_rtp] = static_cast(superframes.size()); - superframes.push_back(f); - continue; - } - - Frame& sf = superframes[index[f.timestamp_rtp]]; - - sf.width = std::max(sf.width, f.width); - sf.height = std::max(sf.height, f.height); - sf.frame_size += f.frame_size; - sf.keyframe |= f.keyframe; - - sf.encode_time = std::max(sf.encode_time, f.encode_time); - sf.decode_time = std::max(sf.decode_time, f.decode_time); - - if (f.spatial_idx > sf.spatial_idx) { - if (f.qp) { - sf.qp = f.qp; - } - if (f.psnr) { - sf.psnr = f.psnr; - } - } - - sf.spatial_idx = std::max(sf.spatial_idx, f.spatial_idx); - sf.temporal_idx = std::max(sf.temporal_idx, f.temporal_idx); - - sf.encoded |= f.encoded; - sf.decoded |= f.decoded; - } - - return superframes; -} - -Timestamp RtpToTime(uint32_t timestamp_rtp) { - return Timestamp::Micros((timestamp_rtp / k90kHz).us()); -} - -SamplesStatsCounter::StatsSample StatsSample(double value, Timestamp time) { - return SamplesStatsCounter::StatsSample{value, time}; -} - -TimeDelta CalcTotalDuration(const std::vector& frames) { - RTC_CHECK(!frames.empty()); - TimeDelta duration = TimeDelta::Zero(); - if (frames.size() > 1) { - duration += - (frames.rbegin()->timestamp_rtp - frames.begin()->timestamp_rtp) / - k90kHz; - } - - // Add last frame duration. If target frame rate is provided, calculate frame - // duration from it. Otherwise, assume duration of last frame is the same as - // duration of preceding frame. - if (frames.rbegin()->target_framerate) { - duration += 1 / *frames.rbegin()->target_framerate; - } else { - RTC_CHECK_GT(frames.size(), 1u); - duration += (frames.rbegin()->timestamp_rtp - - std::next(frames.rbegin())->timestamp_rtp) / - k90kHz; - } - - return duration; -} -} // namespace - -std::vector VideoCodecStatsImpl::Slice( - absl::optional filter) const { - std::vector frames; - for (const auto& [frame_id, f] : frames_) { - if (filter.has_value()) { - if (filter->first_frame.has_value() && - f.frame_num < *filter->first_frame) { - continue; - } - if (filter->last_frame.has_value() && f.frame_num > *filter->last_frame) { - continue; - } - if (filter->spatial_idx.has_value() && - f.spatial_idx != *filter->spatial_idx) { - continue; - } - if (filter->temporal_idx.has_value() && - f.temporal_idx > *filter->temporal_idx) { - continue; - } - } - frames.push_back(f); - } - return frames; -} - -Stream VideoCodecStatsImpl::Aggregate(const std::vector& frames) const { - std::vector superframes = Merge(frames); - RTC_CHECK(!superframes.empty()); - - LeakyBucket leacky_bucket; - Stream stream; - for (size_t i = 0; i < superframes.size(); ++i) { - Frame& f = superframes[i]; - Timestamp time = RtpToTime(f.timestamp_rtp); - - if (!f.frame_size.IsZero()) { - stream.width.AddSample(StatsSample(f.width, time)); - stream.height.AddSample(StatsSample(f.height, time)); - stream.frame_size_bytes.AddSample( - StatsSample(f.frame_size.bytes(), time)); - stream.keyframe.AddSample(StatsSample(f.keyframe, time)); - if (f.qp) { - stream.qp.AddSample(StatsSample(*f.qp, time)); - } - } - - if (f.encoded) { - stream.encode_time_ms.AddSample(StatsSample(f.encode_time.ms(), time)); - } - - if (f.decoded) { - stream.decode_time_ms.AddSample(StatsSample(f.decode_time.ms(), time)); - } - - if (f.psnr) { - stream.psnr.y.AddSample(StatsSample(f.psnr->y, time)); - stream.psnr.u.AddSample(StatsSample(f.psnr->u, time)); - stream.psnr.v.AddSample(StatsSample(f.psnr->v, time)); - } - - if (f.target_framerate) { - stream.target_framerate_fps.AddSample( - StatsSample(f.target_framerate->millihertz() / 1000.0, time)); - } - - if (f.target_bitrate) { - stream.target_bitrate_kbps.AddSample( - StatsSample(f.target_bitrate->bps() / 1000.0, time)); - - int buffer_level_bits = leacky_bucket.Update(f); - stream.transmission_time_ms.AddSample( - StatsSample(buffer_level_bits * rtc::kNumMillisecsPerSec / - f.target_bitrate->bps(), - RtpToTime(f.timestamp_rtp))); - } - } - - TimeDelta duration = CalcTotalDuration(superframes); - DataRate encoded_bitrate = - DataSize::Bytes(stream.frame_size_bytes.GetSum()) / duration; - - int num_encoded_frames = stream.frame_size_bytes.NumSamples(); - Frequency encoded_framerate = num_encoded_frames / duration; - - absl::optional bitrate_mismatch_pct; - if (auto target_bitrate = superframes.begin()->target_bitrate; - target_bitrate) { - bitrate_mismatch_pct = 100.0 * - (encoded_bitrate.bps() - target_bitrate->bps()) / - target_bitrate->bps(); - } - - absl::optional framerate_mismatch_pct; - if (auto target_framerate = superframes.begin()->target_framerate; - target_framerate) { - framerate_mismatch_pct = - 100.0 * - (encoded_framerate.millihertz() - target_framerate->millihertz()) / - target_framerate->millihertz(); - } - - for (auto& f : superframes) { - Timestamp time = RtpToTime(f.timestamp_rtp); - stream.encoded_bitrate_kbps.AddSample( - StatsSample(encoded_bitrate.bps() / 1000.0, time)); - - stream.encoded_framerate_fps.AddSample( - StatsSample(encoded_framerate.millihertz() / 1000.0, time)); - - if (bitrate_mismatch_pct) { - stream.bitrate_mismatch_pct.AddSample( - StatsSample(*bitrate_mismatch_pct, time)); - } - - if (framerate_mismatch_pct) { - stream.framerate_mismatch_pct.AddSample( - StatsSample(*framerate_mismatch_pct, time)); - } - } - - return stream; -} - -void VideoCodecStatsImpl::AddFrame(const Frame& frame) { - FrameId frame_id{.timestamp_rtp = frame.timestamp_rtp, - .spatial_idx = frame.spatial_idx}; - RTC_CHECK(frames_.find(frame_id) == frames_.end()) - << "Frame with timestamp_rtp=" << frame.timestamp_rtp - << " and spatial_idx=" << frame.spatial_idx << " already exists"; - - frames_[frame_id] = frame; -} - -Frame* VideoCodecStatsImpl::GetFrame(uint32_t timestamp_rtp, int spatial_idx) { - FrameId frame_id{.timestamp_rtp = timestamp_rtp, .spatial_idx = spatial_idx}; - if (frames_.find(frame_id) == frames_.end()) { - return nullptr; - } - return &frames_.find(frame_id)->second; -} - -} // namespace test -} // namespace webrtc diff --git a/modules/video_coding/codecs/test/video_codec_stats_impl.h b/modules/video_coding/codecs/test/video_codec_stats_impl.h deleted file mode 100644 index 77471d2ecd..0000000000 --- a/modules/video_coding/codecs/test/video_codec_stats_impl.h +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_STATS_IMPL_H_ -#define MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_STATS_IMPL_H_ - -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/test/video_codec_stats.h" - -namespace webrtc { -namespace test { - -// Implementation of `VideoCodecStats`. This class is not thread-safe. -class VideoCodecStatsImpl : public VideoCodecStats { - public: - std::vector Slice( - absl::optional filter = absl::nullopt) const override; - - Stream Aggregate(const std::vector& frames) const override; - - void AddFrame(const Frame& frame); - - // Returns raw pointers to previously added frame. If frame does not exist, - // returns `nullptr`. - Frame* GetFrame(uint32_t timestamp_rtp, int spatial_idx); - - private: - struct FrameId { - uint32_t timestamp_rtp; - int spatial_idx; - - bool operator==(const FrameId& o) const { - return timestamp_rtp == o.timestamp_rtp && spatial_idx == o.spatial_idx; - } - - bool operator<(const FrameId& o) const { - if (timestamp_rtp < o.timestamp_rtp) - return true; - if (timestamp_rtp == o.timestamp_rtp && spatial_idx < o.spatial_idx) - return true; - return false; - } - }; - - std::map frames_; -}; - -} // namespace test -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_STATS_IMPL_H_ diff --git a/modules/video_coding/codecs/test/video_codec_stats_impl_unittest.cc b/modules/video_coding/codecs/test/video_codec_stats_impl_unittest.cc deleted file mode 100644 index ce11d5abe6..0000000000 --- a/modules/video_coding/codecs/test/video_codec_stats_impl_unittest.cc +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/codecs/test/video_codec_stats_impl.h" - -#include - -#include "absl/types/optional.h" -#include "test/gmock.h" -#include "test/gtest.h" - -namespace webrtc { -namespace test { - -namespace { -using ::testing::Return; -using ::testing::Values; -using Filter = VideoCodecStats::Filter; -using Frame = VideoCodecStatsImpl::Frame; -using Stream = VideoCodecStats::Stream; -} // namespace - -TEST(VideoCodecStatsImpl, AddAndGetFrame) { - VideoCodecStatsImpl stats; - stats.AddFrame({.timestamp_rtp = 0, .spatial_idx = 0}); - stats.AddFrame({.timestamp_rtp = 0, .spatial_idx = 1}); - stats.AddFrame({.timestamp_rtp = 1, .spatial_idx = 0}); - - Frame* fs = stats.GetFrame(/*timestamp_rtp=*/0, /*spatial_idx=*/0); - ASSERT_NE(fs, nullptr); - EXPECT_EQ(fs->timestamp_rtp, 0u); - EXPECT_EQ(fs->spatial_idx, 0); - - fs = stats.GetFrame(/*timestamp_rtp=*/0, /*spatial_idx=*/1); - ASSERT_NE(fs, nullptr); - EXPECT_EQ(fs->timestamp_rtp, 0u); - EXPECT_EQ(fs->spatial_idx, 1); - - fs = stats.GetFrame(/*timestamp_rtp=*/1, /*spatial_idx=*/0); - ASSERT_NE(fs, nullptr); - EXPECT_EQ(fs->timestamp_rtp, 1u); - EXPECT_EQ(fs->spatial_idx, 0); - - fs = stats.GetFrame(/*timestamp_rtp=*/1, /*spatial_idx=*/1); - EXPECT_EQ(fs, nullptr); -} - -class VideoCodecStatsImplSlicingTest - : public ::testing::TestWithParam>> {}; - -TEST_P(VideoCodecStatsImplSlicingTest, Slice) { - Filter filter = std::get<0>(GetParam()); - std::vector expected_frames = std::get<1>(GetParam()); - std::vector frames = { - {.frame_num = 0, .timestamp_rtp = 0, .spatial_idx = 0, .temporal_idx = 0}, - {.frame_num = 0, .timestamp_rtp = 0, .spatial_idx = 1, .temporal_idx = 0}, - {.frame_num = 1, .timestamp_rtp = 1, .spatial_idx = 0, .temporal_idx = 1}, - {.frame_num = 1, - .timestamp_rtp = 1, - .spatial_idx = 1, - .temporal_idx = 1}}; - - VideoCodecStatsImpl stats; - stats.AddFrame(frames[0]); - stats.AddFrame(frames[1]); - stats.AddFrame(frames[2]); - stats.AddFrame(frames[3]); - - std::vector slice = stats.Slice(filter); - ASSERT_EQ(slice.size(), expected_frames.size()); - for (size_t i = 0; i < expected_frames.size(); ++i) { - Frame& expected = frames[expected_frames[i]]; - EXPECT_EQ(slice[i].frame_num, expected.frame_num); - EXPECT_EQ(slice[i].timestamp_rtp, expected.timestamp_rtp); - EXPECT_EQ(slice[i].spatial_idx, expected.spatial_idx); - EXPECT_EQ(slice[i].temporal_idx, expected.temporal_idx); - } -} - -INSTANTIATE_TEST_SUITE_P( - All, - VideoCodecStatsImplSlicingTest, - ::testing::Values( - std::make_tuple(Filter{}, std::vector{0, 1, 2, 3}), - std::make_tuple(Filter{.first_frame = 1}, std::vector{2, 3}), - std::make_tuple(Filter{.last_frame = 0}, std::vector{0, 1}), - std::make_tuple(Filter{.spatial_idx = 0}, std::vector{0, 2}), - std::make_tuple(Filter{.temporal_idx = 1}, - std::vector{0, 1, 2, 3}))); - -TEST(VideoCodecStatsImpl, AggregateBitrate) { - std::vector frames = { - {.frame_num = 0, - .timestamp_rtp = 0, - .frame_size = DataSize::Bytes(1000), - .target_bitrate = DataRate::BytesPerSec(1000)}, - {.frame_num = 1, - .timestamp_rtp = 90000, - .frame_size = DataSize::Bytes(2000), - .target_bitrate = DataRate::BytesPerSec(1000)}}; - - Stream stream = VideoCodecStatsImpl().Aggregate(frames); - EXPECT_EQ(stream.encoded_bitrate_kbps.GetAverage(), 12.0); - EXPECT_EQ(stream.bitrate_mismatch_pct.GetAverage(), 50.0); -} - -TEST(VideoCodecStatsImpl, AggregateFramerate) { - std::vector frames = { - {.frame_num = 0, - .timestamp_rtp = 0, - .frame_size = DataSize::Bytes(1), - .target_framerate = Frequency::Hertz(1)}, - {.frame_num = 1, - .timestamp_rtp = 90000, - .frame_size = DataSize::Zero(), - .target_framerate = Frequency::Hertz(1)}}; - - Stream stream = VideoCodecStatsImpl().Aggregate(frames); - EXPECT_EQ(stream.encoded_framerate_fps.GetAverage(), 0.5); - EXPECT_EQ(stream.framerate_mismatch_pct.GetAverage(), -50.0); -} - -TEST(VideoCodecStatsImpl, AggregateTransmissionTime) { - std::vector frames = { - {.frame_num = 0, - .timestamp_rtp = 0, - .frame_size = DataSize::Bytes(2), - .target_bitrate = DataRate::BytesPerSec(1)}, - {.frame_num = 1, - .timestamp_rtp = 90000, - .frame_size = DataSize::Bytes(3), - .target_bitrate = DataRate::BytesPerSec(1)}}; - - Stream stream = VideoCodecStatsImpl().Aggregate(frames); - ASSERT_EQ(stream.transmission_time_ms.NumSamples(), 2); - ASSERT_EQ(stream.transmission_time_ms.GetSamples()[0], 2000); - ASSERT_EQ(stream.transmission_time_ms.GetSamples()[1], 4000); -} - -} // namespace test -} // namespace webrtc diff --git a/modules/video_coding/codecs/test/video_codec_test.cc b/modules/video_coding/codecs/test/video_codec_test.cc index 587af46a07..a19bd5995b 100644 --- a/modules/video_coding/codecs/test/video_codec_test.cc +++ b/modules/video_coding/codecs/test/video_codec_test.cc @@ -8,40 +8,88 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "api/video_codecs/video_codec.h" - -#include +#include +#include +#include +#include +#include #include +#include #include +#include +#include #include -#include "absl/functional/any_invocable.h" -#include "api/test/create_video_codec_tester.h" +#include "absl/flags/flag.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" -#include "api/test/video_codec_tester.h" -#include "api/test/videocodec_test_stats.h" #include "api/units/data_rate.h" #include "api/units/frequency.h" -#include "api/video/encoded_image.h" -#include "api/video/i420_buffer.h" #include "api/video/resolution.h" -#include "api/video/video_frame.h" +#include "api/video_codecs/builtin_video_decoder_factory.h" +#include "api/video_codecs/builtin_video_encoder_factory.h" #include "api/video_codecs/scalability_mode.h" -#include "api/video_codecs/video_decoder.h" -#include "api/video_codecs/video_encoder.h" -#include "media/engine/internal_decoder_factory.h" -#include "media/engine/internal_encoder_factory.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/video_coding/include/video_error_codes.h" -#include "modules/video_coding/svc/scalability_mode_util.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "rtc_base/checks.h" #if defined(WEBRTC_ANDROID) #include "modules/video_coding/codecs/test/android_codec_factory_helper.h" #endif +#include "modules/video_coding/svc/scalability_mode_util.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" +#include "rtc_base/strings/string_builder.h" +#include "test/explicit_key_value_config.h" +#include "test/field_trial.h" #include "test/gtest.h" +#include "test/test_flags.h" #include "test/testsupport/file_utils.h" -#include "test/testsupport/frame_reader.h" +#include "test/video_codec_tester.h" + +ABSL_FLAG(std::string, + input_path, + webrtc::test::ResourcePath("FourPeople_1280x720_30", "yuv"), + "Path to input video file."); +ABSL_FLAG(int, input_width, 1280, "Input video width."); +ABSL_FLAG(int, input_height, 720, "Input video height."); +ABSL_FLAG(double, input_framerate_fps, 30, "Input video framerate, fps."); +ABSL_FLAG(std::string, + encoder, + "libaom-av1", + "Encoder: libaom-av1, libvpx-vp9, libvpx-vp8, openh264, hw-vp8, " + "hw-vp9, hw-av1, hw-h264, hw-h265"); +ABSL_FLAG(std::string, + decoder, + "dav1d", + "Decoder: dav1d, libvpx-vp9, libvpx-vp8, ffmpeg-h264, hw-vp8, " + "hw-vp9, hw-av1, hw-h264, hw-h265"); +ABSL_FLAG(std::string, scalability_mode, "L1T1", "Scalability mode."); +ABSL_FLAG(std::optional, width, std::nullopt, "Encode width."); +ABSL_FLAG(std::optional, height, std::nullopt, "Encode height."); +ABSL_FLAG(std::vector, + bitrate_kbps, + {"1024"}, + "Encode target bitrate per layer (l0t0,l0t1,...l1t0,l1t1 and so on) " + "in kbps."); +ABSL_FLAG(std::optional, + framerate_fps, + std::nullopt, + "Encode target frame rate of the top temporal layer in fps."); +ABSL_FLAG(bool, screencast, false, "Enable screen encoding mode."); +ABSL_FLAG(bool, frame_drop, true, "Enable frame dropping."); +ABSL_FLAG(int, + key_interval, + std::numeric_limits::max(), + "Keyframe interval in frames."); +ABSL_FLAG(int, num_frames, 300, "Number of frames to encode and/or decode."); +ABSL_FLAG(std::string, field_trials, "", "Field trials to apply."); +ABSL_FLAG(std::string, test_name, "", "Test name."); +ABSL_FLAG(bool, dump_decoder_input, false, "Dump decoder input."); +ABSL_FLAG(bool, dump_decoder_output, false, "Dump decoder output."); +ABSL_FLAG(bool, dump_encoder_input, false, "Dump encoder input."); +ABSL_FLAG(bool, dump_encoder_output, false, "Dump encoder output."); +ABSL_FLAG(bool, write_csv, false, "Write metrics to a CSV file."); namespace webrtc { namespace test { @@ -49,6 +97,10 @@ namespace test { namespace { using ::testing::Combine; using ::testing::Values; +using VideoSourceSettings = VideoCodecTester::VideoSourceSettings; +using EncodingSettings = VideoCodecTester::EncodingSettings; +using VideoCodecStats = VideoCodecTester::VideoCodecStats; +using Filter = VideoCodecStats::Filter; using PacingMode = VideoCodecTester::PacingSettings::PacingMode; struct VideoInfo { @@ -57,405 +109,82 @@ struct VideoInfo { Frequency framerate; }; -struct LayerId { - int spatial_idx; - int temporal_idx; - - bool operator==(const LayerId& o) const { - return spatial_idx == o.spatial_idx && temporal_idx == o.temporal_idx; - } - - bool operator<(const LayerId& o) const { - if (spatial_idx < o.spatial_idx) - return true; - if (spatial_idx == o.spatial_idx && temporal_idx < o.temporal_idx) - return true; - return false; - } -}; - -struct EncodingSettings { - ScalabilityMode scalability_mode; - struct LayerSettings { - Resolution resolution; - Frequency framerate; - DataRate bitrate; - }; - std::map layer_settings; - - bool IsSameSettings(const EncodingSettings& other) const { - if (scalability_mode != other.scalability_mode) { - return false; - } - - for (auto [layer_id, layer] : layer_settings) { - const auto& other_layer = other.layer_settings.at(layer_id); - if (layer.resolution != other_layer.resolution) { - return false; - } - } - - return true; - } - - bool IsSameRate(const EncodingSettings& other) const { - for (auto [layer_id, layer] : layer_settings) { - const auto& other_layer = other.layer_settings.at(layer_id); - if (layer.bitrate != other_layer.bitrate || - layer.framerate != other_layer.framerate) { - return false; - } - } - - return true; - } -}; - -const VideoInfo kFourPeople_1280x720_30 = { +VideoInfo kFourPeople_1280x720_30 = { .name = "FourPeople_1280x720_30", .resolution = {.width = 1280, .height = 720}, .framerate = Frequency::Hertz(30)}; -class TestRawVideoSource : public VideoCodecTester::RawVideoSource { - public: - static constexpr Frequency k90kHz = Frequency::Hertz(90000); - - TestRawVideoSource(VideoInfo video_info, - const std::map& frame_settings, - int num_frames) - : video_info_(video_info), - frame_settings_(frame_settings), - num_frames_(num_frames), - frame_num_(0), - // Start with non-zero timestamp to force using frame RTP timestamps in - // IvfFrameWriter. - timestamp_rtp_(90000) { - // Ensure settings for the first frame are provided. - RTC_CHECK_GT(frame_settings_.size(), 0u); - RTC_CHECK_EQ(frame_settings_.begin()->first, 0); - - frame_reader_ = CreateYuvFrameReader( - ResourcePath(video_info_.name, "yuv"), video_info_.resolution, - YuvFrameReaderImpl::RepeatMode::kPingPong); - RTC_CHECK(frame_reader_); - } - - // Pulls next frame. Frame RTP timestamp is set accordingly to - // `EncodingSettings::framerate`. - absl::optional PullFrame() override { - if (frame_num_ >= num_frames_) { - return absl::nullopt; // End of stream. - } - - const EncodingSettings& encoding_settings = - std::prev(frame_settings_.upper_bound(frame_num_))->second; - - Resolution resolution = - encoding_settings.layer_settings.begin()->second.resolution; - Frequency framerate = - encoding_settings.layer_settings.begin()->second.framerate; - - int pulled_frame; - auto buffer = frame_reader_->PullFrame( - &pulled_frame, resolution, - {.num = static_cast(framerate.millihertz()), - .den = static_cast(video_info_.framerate.millihertz())}); - RTC_CHECK(buffer) << "Cannot pull frame " << frame_num_; - - auto frame = VideoFrame::Builder() - .set_video_frame_buffer(buffer) - .set_timestamp_rtp(timestamp_rtp_) - .set_timestamp_us((timestamp_rtp_ / k90kHz).us()) - .build(); - - pulled_frames_[timestamp_rtp_] = pulled_frame; - timestamp_rtp_ += k90kHz / framerate; - ++frame_num_; - - return frame; - } - - // Reads frame specified by `timestamp_rtp`, scales it to `resolution` and - // returns. Frame with the given `timestamp_rtp` is expected to be pulled - // before. - VideoFrame GetFrame(uint32_t timestamp_rtp, Resolution resolution) override { - RTC_CHECK(pulled_frames_.find(timestamp_rtp) != pulled_frames_.end()) - << "Frame with RTP timestamp " << timestamp_rtp - << " was not pulled before"; - auto buffer = - frame_reader_->ReadFrame(pulled_frames_[timestamp_rtp], resolution); - return VideoFrame::Builder() - .set_video_frame_buffer(buffer) - .set_timestamp_rtp(timestamp_rtp) - .build(); - } - - protected: - VideoInfo video_info_; - std::unique_ptr frame_reader_; - const std::map& frame_settings_; - int num_frames_; - int frame_num_; - uint32_t timestamp_rtp_; - std::map pulled_frames_; -}; - -class TestEncoder : public VideoCodecTester::Encoder, - public EncodedImageCallback { - public: - TestEncoder(std::unique_ptr encoder, - const std::string codec_type, - const std::map& frame_settings) - : encoder_(std::move(encoder)), - codec_type_(codec_type), - frame_settings_(frame_settings), - frame_num_(0) { - // Ensure settings for the first frame is provided. - RTC_CHECK_GT(frame_settings_.size(), 0u); - RTC_CHECK_EQ(frame_settings_.begin()->first, 0); - - encoder_->RegisterEncodeCompleteCallback(this); - } - - void Initialize() override { - const EncodingSettings& first_frame_settings = frame_settings_.at(0); - Configure(first_frame_settings); - SetRates(first_frame_settings); - } - - void Encode(const VideoFrame& frame, EncodeCallback callback) override { - { - MutexLock lock(&mutex_); - callbacks_[frame.timestamp()] = std::move(callback); - } - - if (auto fs = frame_settings_.find(frame_num_); - fs != frame_settings_.begin() && fs != frame_settings_.end()) { - if (!fs->second.IsSameSettings(std::prev(fs)->second)) { - Configure(fs->second); - } else if (!fs->second.IsSameRate(std::prev(fs)->second)) { - SetRates(fs->second); - } - } - - encoder_->Encode(frame, nullptr); - ++frame_num_; - } - - void Flush() override { - // TODO(webrtc:14852): For codecs which buffer frames we need a to - // flush them to get last frames. Add such functionality to VideoEncoder - // API. On Android it will map directly to `MediaCodec.flush()`. - encoder_->Release(); - } - - VideoEncoder* encoder() { return encoder_.get(); } - - protected: - Result OnEncodedImage(const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info) override { - MutexLock lock(&mutex_); - auto cb = callbacks_.find(encoded_image.Timestamp()); - RTC_CHECK(cb != callbacks_.end()); - cb->second(encoded_image); - - callbacks_.erase(callbacks_.begin(), cb); - return Result(Result::Error::OK); - } - - void Configure(const EncodingSettings& es) { - VideoCodec vc; - const EncodingSettings::LayerSettings& layer_settings = - es.layer_settings.begin()->second; - vc.width = layer_settings.resolution.width; - vc.height = layer_settings.resolution.height; - const DataRate& bitrate = layer_settings.bitrate; - vc.startBitrate = bitrate.kbps(); - vc.maxBitrate = bitrate.kbps(); - vc.minBitrate = 0; - vc.maxFramerate = static_cast(layer_settings.framerate.hertz()); - vc.active = true; - vc.qpMax = 63; - vc.numberOfSimulcastStreams = 0; - vc.mode = webrtc::VideoCodecMode::kRealtimeVideo; - vc.SetFrameDropEnabled(true); - vc.SetScalabilityMode(es.scalability_mode); - - vc.codecType = PayloadStringToCodecType(codec_type_); - if (vc.codecType == kVideoCodecVP8) { - *(vc.VP8()) = VideoEncoder::GetDefaultVp8Settings(); - } else if (vc.codecType == kVideoCodecVP9) { - *(vc.VP9()) = VideoEncoder::GetDefaultVp9Settings(); - } else if (vc.codecType == kVideoCodecH264) { - *(vc.H264()) = VideoEncoder::GetDefaultH264Settings(); - } +static constexpr Frequency k90kHz = Frequency::Hertz(90000); - VideoEncoder::Settings ves( - VideoEncoder::Capabilities(/*loss_notification=*/false), - /*number_of_cores=*/1, - /*max_payload_size=*/1440); - - int result = encoder_->InitEncode(&vc, ves); - ASSERT_EQ(result, WEBRTC_VIDEO_CODEC_OK); +VideoSourceSettings ToSourceSettings(VideoInfo video_info) { + return VideoSourceSettings{.file_path = ResourcePath(video_info.name, "yuv"), + .resolution = video_info.resolution, + .framerate = video_info.framerate}; +} - SetRates(es); +std::string CodecNameToCodecType(std::string name) { + if (name.find("av1") != std::string::npos) { + return "AV1"; } - - void SetRates(const EncodingSettings& es) { - VideoEncoder::RateControlParameters rc; - int num_spatial_layers = - ScalabilityModeToNumSpatialLayers(es.scalability_mode); - int num_temporal_layers = - ScalabilityModeToNumSpatialLayers(es.scalability_mode); - for (int sidx = 0; sidx < num_spatial_layers; ++sidx) { - for (int tidx = 0; tidx < num_temporal_layers; ++tidx) { - auto layer_settings = - es.layer_settings.find({.spatial_idx = sidx, .temporal_idx = tidx}); - RTC_CHECK(layer_settings != es.layer_settings.end()) - << "Bitrate for layer S=" << sidx << " T=" << tidx << " is not set"; - rc.bitrate.SetBitrate(sidx, tidx, layer_settings->second.bitrate.bps()); - } - } - - rc.framerate_fps = - es.layer_settings.begin()->second.framerate.millihertz() / 1000.0; - encoder_->SetRates(rc); + if (name.find("vp9") != std::string::npos) { + return "VP9"; } - - std::unique_ptr encoder_; - const std::string codec_type_; - const std::map& frame_settings_; - int frame_num_; - std::map callbacks_ RTC_GUARDED_BY(mutex_); - Mutex mutex_; -}; - -class TestDecoder : public VideoCodecTester::Decoder, - public DecodedImageCallback { - public: - TestDecoder(std::unique_ptr decoder, - const std::string codec_type) - : decoder_(std::move(decoder)), codec_type_(codec_type) { - decoder_->RegisterDecodeCompleteCallback(this); - } - - void Initialize() override { - VideoDecoder::Settings ds; - ds.set_codec_type(PayloadStringToCodecType(codec_type_)); - ds.set_number_of_cores(1); - ds.set_max_render_resolution({1280, 720}); - - bool result = decoder_->Configure(ds); - ASSERT_TRUE(result); + if (name.find("vp8") != std::string::npos) { + return "VP8"; } - - void Decode(const EncodedImage& frame, DecodeCallback callback) override { - { - MutexLock lock(&mutex_); - callbacks_[frame.Timestamp()] = std::move(callback); - } - - decoder_->Decode(frame, /*render_time_ms=*/0); + if (name.find("h264") != std::string::npos) { + return "H264"; } - - void Flush() override { - // TODO(webrtc:14852): For codecs which buffer frames we need a to - // flush them to get last frames. Add such functionality to VideoDecoder - // API. On Android it will map directly to `MediaCodec.flush()`. - decoder_->Release(); + if (name.find("h265") != std::string::npos) { + return "H265"; } + RTC_CHECK_NOTREACHED(); +} - VideoDecoder* decoder() { return decoder_.get(); } - - protected: - int Decoded(VideoFrame& decoded_frame) override { - MutexLock lock(&mutex_); - auto cb = callbacks_.find(decoded_frame.timestamp()); - RTC_CHECK(cb != callbacks_.end()); - cb->second(decoded_frame); - - callbacks_.erase(callbacks_.begin(), cb); - return WEBRTC_VIDEO_CODEC_OK; +// TODO(webrtc:14852): Make Create[Encoder,Decoder]Factory to work with codec +// name directly. +std::string CodecNameToCodecImpl(std::string name) { + if (name.find("hw") != std::string::npos) { + return "mediacodec"; } - - std::unique_ptr decoder_; - const std::string codec_type_; - std::map callbacks_ RTC_GUARDED_BY(mutex_); - Mutex mutex_; -}; - -std::unique_ptr CreateVideoSource( - const VideoInfo& video, - const std::map& frame_settings, - int num_frames) { - return std::make_unique(video, frame_settings, - num_frames); + return "builtin"; } -std::unique_ptr CreateEncoder( - std::string type, - std::string impl, - const std::map& frame_settings) { - std::unique_ptr factory; +std::unique_ptr CreateEncoderFactory(std::string impl) { if (impl == "builtin") { - factory = std::make_unique(); - } else if (impl == "mediacodec") { + return CreateBuiltinVideoEncoderFactory(); + } #if defined(WEBRTC_ANDROID) - InitializeAndroidObjects(); - factory = CreateAndroidEncoderFactory(); + InitializeAndroidObjects(); + return CreateAndroidEncoderFactory(); +#else + return nullptr; #endif - } - std::unique_ptr encoder = - factory->CreateVideoEncoder(SdpVideoFormat(type)); - if (encoder == nullptr) { - return nullptr; - } - return std::make_unique(std::move(encoder), type, - frame_settings); } -std::unique_ptr CreateDecoder(std::string type, std::string impl) { - std::unique_ptr factory; +std::unique_ptr CreateDecoderFactory(std::string impl) { if (impl == "builtin") { - factory = std::make_unique(); - } else if (impl == "mediacodec") { + return CreateBuiltinVideoDecoderFactory(); + } #if defined(WEBRTC_ANDROID) - InitializeAndroidObjects(); - factory = CreateAndroidDecoderFactory(); + InitializeAndroidObjects(); + return CreateAndroidDecoderFactory(); +#else + return nullptr; #endif - } - std::unique_ptr decoder = - factory->CreateVideoDecoder(SdpVideoFormat(type)); - if (decoder == nullptr) { - return nullptr; - } - return std::make_unique(std::move(decoder), type); } -void SetTargetRates(const std::map& frame_settings, - std::vector& frames) { - for (VideoCodecStats::Frame& f : frames) { - const EncodingSettings& encoding_settings = - std::prev(frame_settings.upper_bound(f.frame_num))->second; - LayerId layer_id = {.spatial_idx = f.spatial_idx, - .temporal_idx = f.temporal_idx}; - RTC_CHECK(encoding_settings.layer_settings.find(layer_id) != - encoding_settings.layer_settings.end()) - << "Frame frame_num=" << f.frame_num - << " belongs to spatial_idx=" << f.spatial_idx - << " temporal_idx=" << f.temporal_idx - << " but settings for this layer are not provided."; - const EncodingSettings::LayerSettings& layer_settings = - encoding_settings.layer_settings.at(layer_id); - f.target_bitrate = layer_settings.bitrate; - f.target_framerate = layer_settings.framerate; +std::string TestName() { + std::string test_name = absl::GetFlag(FLAGS_test_name); + if (!test_name.empty()) { + return test_name; } + return ::testing::UnitTest::GetInstance()->current_test_info()->name(); } std::string TestOutputPath() { std::string output_path = - OutputPath() + - ::testing::UnitTest::GetInstance()->current_test_info()->name(); + (StringBuilder() << OutputPath() << TestName()).str(); std::string output_dir = DirName(output_path); bool result = CreateDir(output_dir); RTC_CHECK(result) << "Cannot create " << output_dir; @@ -464,116 +193,119 @@ std::string TestOutputPath() { } // namespace std::unique_ptr RunEncodeDecodeTest( - std::string codec_type, - std::string codec_impl, - const VideoInfo& video_info, - const std::map& frame_settings, - int num_frames, - bool save_codec_input, - bool save_codec_output) { - std::unique_ptr video_source = - CreateVideoSource(video_info, frame_settings, num_frames); - - std::unique_ptr encoder = - CreateEncoder(codec_type, codec_impl, frame_settings); - if (encoder == nullptr) { + const Environment& env, + std::string encoder_impl, + std::string decoder_impl, + const VideoSourceSettings& source_settings, + const std::map& encoding_settings) { + const SdpVideoFormat& sdp_video_format = + encoding_settings.begin()->second.sdp_video_format; + + std::unique_ptr encoder_factory = + CreateEncoderFactory(encoder_impl); + if (!encoder_factory + ->QueryCodecSupport(sdp_video_format, + /*scalability_mode=*/std::nullopt) + .is_supported) { + RTC_LOG(LS_WARNING) << "No " << encoder_impl << " encoder for video format " + << sdp_video_format.ToString(); return nullptr; } - std::unique_ptr decoder = CreateDecoder(codec_type, codec_impl); - if (decoder == nullptr) { - // If platform decoder is not available try built-in one. - if (codec_impl == "builtin") { - return nullptr; - } - - decoder = CreateDecoder(codec_type, "builtin"); - if (decoder == nullptr) { + std::unique_ptr decoder_factory = + CreateDecoderFactory(decoder_impl); + if (!decoder_factory + ->QueryCodecSupport(sdp_video_format, + /*reference_scaling=*/false) + .is_supported) { + RTC_LOG(LS_WARNING) << "No " << decoder_impl << " decoder for video format " + << sdp_video_format.ToString() + << ". Trying built-in decoder."; + // TODO(ssilkin): No H264 support in ffmpeg on ARM. Consider trying HW + // decoder. + decoder_factory = CreateDecoderFactory("builtin"); + if (!decoder_factory + ->QueryCodecSupport(sdp_video_format, + /*reference_scaling=*/false) + .is_supported) { + RTC_LOG(LS_WARNING) << "No " << decoder_impl + << " decoder for video format " + << sdp_video_format.ToString(); return nullptr; } } - RTC_LOG(LS_INFO) << "Encoder implementation: " - << encoder->encoder()->GetEncoderInfo().implementation_name; - RTC_LOG(LS_INFO) << "Decoder implementation: " - << decoder->decoder()->GetDecoderInfo().implementation_name; + std::string output_path = TestOutputPath(); VideoCodecTester::EncoderSettings encoder_settings; - encoder_settings.pacing.mode = - encoder->encoder()->GetEncoderInfo().is_hardware_accelerated - ? PacingMode::kRealTime - : PacingMode::kNoPacing; + encoder_settings.pacing_settings.mode = + encoder_impl == "builtin" ? PacingMode::kNoPacing : PacingMode::kRealTime; + if (absl::GetFlag(FLAGS_dump_encoder_input)) { + encoder_settings.encoder_input_base_path = output_path + "_enc_input"; + } + if (absl::GetFlag(FLAGS_dump_encoder_output)) { + encoder_settings.encoder_output_base_path = output_path + "_enc_output"; + } VideoCodecTester::DecoderSettings decoder_settings; - decoder_settings.pacing.mode = - decoder->decoder()->GetDecoderInfo().is_hardware_accelerated - ? PacingMode::kRealTime - : PacingMode::kNoPacing; - - std::string output_path = TestOutputPath(); - if (save_codec_input) { - encoder_settings.encoder_input_base_path = output_path + "_enc_input"; + decoder_settings.pacing_settings.mode = + decoder_impl == "builtin" ? PacingMode::kNoPacing : PacingMode::kRealTime; + if (absl::GetFlag(FLAGS_dump_decoder_input)) { decoder_settings.decoder_input_base_path = output_path + "_dec_input"; } - if (save_codec_output) { - encoder_settings.encoder_output_base_path = output_path + "_enc_output"; + if (absl::GetFlag(FLAGS_dump_decoder_output)) { decoder_settings.decoder_output_base_path = output_path + "_dec_output"; } - std::unique_ptr tester = CreateVideoCodecTester(); - return tester->RunEncodeDecodeTest(video_source.get(), encoder.get(), - decoder.get(), encoder_settings, - decoder_settings); + return VideoCodecTester::RunEncodeDecodeTest( + env, source_settings, encoder_factory.get(), decoder_factory.get(), + encoder_settings, decoder_settings, encoding_settings); } std::unique_ptr RunEncodeTest( - std::string codec_type, - std::string codec_impl, - const VideoInfo& video_info, - const std::map& frame_settings, - int num_frames, - bool save_codec_input, - bool save_codec_output) { - std::unique_ptr video_source = - CreateVideoSource(video_info, frame_settings, num_frames); - - std::unique_ptr encoder = - CreateEncoder(codec_type, codec_impl, frame_settings); - if (encoder == nullptr) { + const Environment& env, + std::string encoder_impl, + const VideoSourceSettings& source_settings, + const std::map& encoding_settings) { + const SdpVideoFormat& sdp_video_format = + encoding_settings.begin()->second.sdp_video_format; + + std::unique_ptr encoder_factory = + CreateEncoderFactory(encoder_impl); + if (!encoder_factory + ->QueryCodecSupport(sdp_video_format, + /*scalability_mode=*/std::nullopt) + .is_supported) { + RTC_LOG(LS_WARNING) << "No encoder for video format " + << sdp_video_format.ToString(); return nullptr; } - RTC_LOG(LS_INFO) << "Encoder implementation: " - << encoder->encoder()->GetEncoderInfo().implementation_name; - - VideoCodecTester::EncoderSettings encoder_settings; - encoder_settings.pacing.mode = - encoder->encoder()->GetEncoderInfo().is_hardware_accelerated - ? PacingMode::kRealTime - : PacingMode::kNoPacing; - std::string output_path = TestOutputPath(); - if (save_codec_input) { + VideoCodecTester::EncoderSettings encoder_settings; + encoder_settings.pacing_settings.mode = + encoder_impl == "builtin" ? PacingMode::kNoPacing : PacingMode::kRealTime; + if (absl::GetFlag(FLAGS_dump_encoder_input)) { encoder_settings.encoder_input_base_path = output_path + "_enc_input"; } - if (save_codec_output) { + if (absl::GetFlag(FLAGS_dump_encoder_output)) { encoder_settings.encoder_output_base_path = output_path + "_enc_output"; } - std::unique_ptr tester = CreateVideoCodecTester(); - return tester->RunEncodeTest(video_source.get(), encoder.get(), - encoder_settings); + return VideoCodecTester::RunEncodeTest(env, source_settings, + encoder_factory.get(), + encoder_settings, encoding_settings); } -class SpatialQualityTest : public ::testing::TestWithParam< - std::tuple>> { +class SpatialQualityTest : public ::testing::TestWithParam>> { public: static std::string TestParamsToString( const ::testing::TestParamInfo& info) { @@ -588,42 +320,48 @@ class SpatialQualityTest : public ::testing::TestWithParam< }; TEST_P(SpatialQualityTest, SpatialQuality) { + const Environment env = CreateEnvironment(); auto [codec_type, codec_impl, video_info, coding_settings] = GetParam(); - auto [width, height, framerate_fps, bitrate_kbps, psnr] = coding_settings; - - std::map frame_settings = { - {0, - {.scalability_mode = ScalabilityMode::kL1T1, - .layer_settings = { - {LayerId{.spatial_idx = 0, .temporal_idx = 0}, - {.resolution = {.width = width, .height = height}, - .framerate = Frequency::MilliHertz(1000 * framerate_fps), - .bitrate = DataRate::KilobitsPerSec(bitrate_kbps)}}}}}}; - + auto [width, height, framerate_fps, bitrate_kbps, expected_min_psnr] = + coding_settings; int duration_s = 10; int num_frames = duration_s * framerate_fps; + VideoSourceSettings source_settings = ToSourceSettings(video_info); + + EncodingSettings encoding_settings = VideoCodecTester::CreateEncodingSettings( + env, codec_type, /*scalability_mode=*/"L1T1", width, height, + {DataRate::KilobitsPerSec(bitrate_kbps)}, + Frequency::Hertz(framerate_fps)); + + std::map frame_settings = + VideoCodecTester::CreateFrameSettings(encoding_settings, num_frames); + std::unique_ptr stats = RunEncodeDecodeTest( - codec_type, codec_impl, video_info, frame_settings, num_frames, - /*save_codec_input=*/false, /*save_codec_output=*/false); + env, codec_impl, codec_impl, source_settings, frame_settings); VideoCodecStats::Stream stream; if (stats != nullptr) { - std::vector frames = stats->Slice(); - SetTargetRates(frame_settings, frames); - stream = stats->Aggregate(frames); - if (field_trial::IsEnabled("WebRTC-QuickPerfTest")) { - EXPECT_GE(stream.psnr.y.GetAverage(), psnr); + stream = stats->Aggregate(Filter{}); + if (absl::GetFlag(FLAGS_webrtc_quick_perf_test)) { + EXPECT_GE(stream.psnr.y.GetAverage(), expected_min_psnr); } } stream.LogMetrics( GetGlobalMetricsLogger(), ::testing::UnitTest::GetInstance()->current_test_info()->name(), + /*prefix=*/"", /*metadata=*/ - {{"codec_type", codec_type}, - {"codec_impl", codec_impl}, - {"video_name", video_info.name}}); + {{"video_name", video_info.name}, + {"codec_type", codec_type}, + {"codec_impl", codec_impl}}); + + if (absl::GetFlag(FLAGS_write_csv)) { + stats->LogMetrics((StringBuilder() << TestOutputPath() << ".csv").str(), + stats->Slice(Filter{}, /*merge=*/false), /*metadata=*/ + {{"test_name", TestName()}}); + } } INSTANTIATE_TEST_SUITE_P( @@ -636,15 +374,15 @@ INSTANTIATE_TEST_SUITE_P( Values("builtin"), #endif Values(kFourPeople_1280x720_30), - Values(std::make_tuple(320, 180, 30, 32, 28), - std::make_tuple(320, 180, 30, 64, 30), - std::make_tuple(320, 180, 30, 128, 33), + Values(std::make_tuple(320, 180, 30, 32, 26), + std::make_tuple(320, 180, 30, 64, 29), + std::make_tuple(320, 180, 30, 128, 32), std::make_tuple(320, 180, 30, 256, 36), - std::make_tuple(640, 360, 30, 128, 31), + std::make_tuple(640, 360, 30, 128, 29), std::make_tuple(640, 360, 30, 256, 33), std::make_tuple(640, 360, 30, 384, 35), std::make_tuple(640, 360, 30, 512, 36), - std::make_tuple(1280, 720, 30, 256, 32), + std::make_tuple(1280, 720, 30, 256, 30), std::make_tuple(1280, 720, 30, 512, 34), std::make_tuple(1280, 720, 30, 1024, 37), std::make_tuple(1280, 720, 30, 2048, 39))), @@ -668,36 +406,45 @@ class BitrateAdaptationTest TEST_P(BitrateAdaptationTest, BitrateAdaptation) { auto [codec_type, codec_impl, video_info, bitrate_kbps] = GetParam(); + const Environment env = CreateEnvironment(); int duration_s = 10; // Duration of fixed rate interval. - int first_frame = duration_s * video_info.framerate.millihertz() / 1000; - int num_frames = 2 * duration_s * video_info.framerate.millihertz() / 1000; - - std::map frame_settings = { - {0, - {.layer_settings = {{LayerId{.spatial_idx = 0, .temporal_idx = 0}, - {.resolution = {.width = 640, .height = 360}, - .framerate = video_info.framerate, - .bitrate = DataRate::KilobitsPerSec( - bitrate_kbps.first)}}}}}, - {first_frame, - {.layer_settings = { - {LayerId{.spatial_idx = 0, .temporal_idx = 0}, - {.resolution = {.width = 640, .height = 360}, - .framerate = video_info.framerate, - .bitrate = DataRate::KilobitsPerSec(bitrate_kbps.second)}}}}}}; - - std::unique_ptr stats = RunEncodeTest( - codec_type, codec_impl, video_info, frame_settings, num_frames, - /*save_codec_input=*/false, /*save_codec_output=*/false); + int num_frames = + static_cast(duration_s * video_info.framerate.hertz()); + + VideoSourceSettings source_settings = ToSourceSettings(video_info); + + EncodingSettings encoding_settings = VideoCodecTester::CreateEncodingSettings( + env, codec_type, /*scalability_mode=*/"L1T1", + /*width=*/640, /*height=*/360, + {DataRate::KilobitsPerSec(bitrate_kbps.first)}, + /*framerate=*/Frequency::Hertz(30)); + + EncodingSettings encoding_settings2 = + VideoCodecTester::CreateEncodingSettings( + env, codec_type, /*scalability_mode=*/"L1T1", + /*width=*/640, /*height=*/360, + {DataRate::KilobitsPerSec(bitrate_kbps.second)}, + /*framerate=*/Frequency::Hertz(30)); + + std::map frame_settings = + VideoCodecTester::CreateFrameSettings(encoding_settings, num_frames); + + uint32_t timestamp_rtp = + frame_settings.rbegin()->first + k90kHz / Frequency::Hertz(30); + std::map frame_settings2 = + VideoCodecTester::CreateFrameSettings(encoding_settings2, num_frames, + timestamp_rtp); + + frame_settings.merge(frame_settings2); + + std::unique_ptr stats = + RunEncodeTest(env, codec_impl, source_settings, frame_settings); VideoCodecStats::Stream stream; if (stats != nullptr) { - std::vector frames = - stats->Slice(VideoCodecStats::Filter{.first_frame = first_frame}); - SetTargetRates(frame_settings, frames); - stream = stats->Aggregate(frames); - if (field_trial::IsEnabled("WebRTC-QuickPerfTest")) { + stream = stats->Aggregate({.min_timestamp_rtp = timestamp_rtp}); + if (absl::GetFlag(FLAGS_webrtc_quick_perf_test)) { EXPECT_NEAR(stream.bitrate_mismatch_pct.GetAverage(), 0, 10); EXPECT_NEAR(stream.framerate_mismatch_pct.GetAverage(), 0, 10); } @@ -706,12 +453,19 @@ TEST_P(BitrateAdaptationTest, BitrateAdaptation) { stream.LogMetrics( GetGlobalMetricsLogger(), ::testing::UnitTest::GetInstance()->current_test_info()->name(), + /*prefix=*/"", /*metadata=*/ {{"codec_type", codec_type}, {"codec_impl", codec_impl}, {"video_name", video_info.name}, {"rate_profile", std::to_string(bitrate_kbps.first) + "," + std::to_string(bitrate_kbps.second)}}); + + if (absl::GetFlag(FLAGS_write_csv)) { + stats->LogMetrics((StringBuilder() << TestOutputPath() << ".csv").str(), + stats->Slice(Filter{}, /*merge=*/false), /*metadata=*/ + {{"test_name", TestName()}}); + } } INSTANTIATE_TEST_SUITE_P(All, @@ -746,37 +500,46 @@ class FramerateAdaptationTest TEST_P(FramerateAdaptationTest, FramerateAdaptation) { auto [codec_type, codec_impl, video_info, framerate_fps] = GetParam(); + const Environment env = CreateEnvironment(); int duration_s = 10; // Duration of fixed rate interval. - int first_frame = static_cast(duration_s * framerate_fps.first); - int num_frames = static_cast( - duration_s * (framerate_fps.first + framerate_fps.second)); - - std::map frame_settings = { - {0, - {.layer_settings = {{LayerId{.spatial_idx = 0, .temporal_idx = 0}, - {.resolution = {.width = 640, .height = 360}, - .framerate = Frequency::MilliHertz( - 1000 * framerate_fps.first), - .bitrate = DataRate::KilobitsPerSec(512)}}}}}, - {first_frame, - {.layer_settings = { - {LayerId{.spatial_idx = 0, .temporal_idx = 0}, - {.resolution = {.width = 640, .height = 360}, - .framerate = Frequency::MilliHertz(1000 * framerate_fps.second), - .bitrate = DataRate::KilobitsPerSec(512)}}}}}}; - - std::unique_ptr stats = RunEncodeTest( - codec_type, codec_impl, video_info, frame_settings, num_frames, - /*save_codec_input=*/false, /*save_codec_output=*/false); + + VideoSourceSettings source_settings = ToSourceSettings(video_info); + + EncodingSettings encoding_settings = VideoCodecTester::CreateEncodingSettings( + env, codec_type, /*scalability_mode=*/"L1T1", + /*width=*/640, /*height=*/360, + /*bitrate=*/{DataRate::KilobitsPerSec(512)}, + Frequency::Hertz(framerate_fps.first)); + + EncodingSettings encoding_settings2 = + VideoCodecTester::CreateEncodingSettings( + env, codec_type, /*scalability_mode=*/"L1T1", + /*width=*/640, /*height=*/360, + /*bitrate=*/{DataRate::KilobitsPerSec(512)}, + Frequency::Hertz(framerate_fps.second)); + + int num_frames = static_cast(duration_s * framerate_fps.first); + std::map frame_settings = + VideoCodecTester::CreateFrameSettings(encoding_settings, num_frames); + + uint32_t timestamp_rtp = frame_settings.rbegin()->first + + k90kHz / Frequency::Hertz(framerate_fps.first); + + num_frames = static_cast(duration_s * framerate_fps.second); + std::map frame_settings2 = + VideoCodecTester::CreateFrameSettings(encoding_settings2, num_frames, + timestamp_rtp); + + frame_settings.merge(frame_settings2); + + std::unique_ptr stats = + RunEncodeTest(env, codec_impl, source_settings, frame_settings); VideoCodecStats::Stream stream; if (stats != nullptr) { - std::vector frames = - stats->Slice(VideoCodecStats::Filter{.first_frame = first_frame}); - SetTargetRates(frame_settings, frames); - stream = stats->Aggregate(frames); - if (field_trial::IsEnabled("WebRTC-QuickPerfTest")) { + stream = stats->Aggregate({.min_timestamp_rtp = timestamp_rtp}); + if (absl::GetFlag(FLAGS_webrtc_quick_perf_test)) { EXPECT_NEAR(stream.bitrate_mismatch_pct.GetAverage(), 0, 10); EXPECT_NEAR(stream.framerate_mismatch_pct.GetAverage(), 0, 10); } @@ -785,12 +548,19 @@ TEST_P(FramerateAdaptationTest, FramerateAdaptation) { stream.LogMetrics( GetGlobalMetricsLogger(), ::testing::UnitTest::GetInstance()->current_test_info()->name(), + /*prefix=*/"", /*metadata=*/ {{"codec_type", codec_type}, {"codec_impl", codec_impl}, {"video_name", video_info.name}, {"rate_profile", std::to_string(framerate_fps.first) + "," + std::to_string(framerate_fps.second)}}); + + if (absl::GetFlag(FLAGS_write_csv)) { + stats->LogMetrics((StringBuilder() << TestOutputPath() << ".csv").str(), + stats->Slice(Filter{}, /*merge=*/false), /*metadata=*/ + {{"test_name", TestName()}}); + } } INSTANTIATE_TEST_SUITE_P(All, @@ -805,6 +575,95 @@ INSTANTIATE_TEST_SUITE_P(All, Values(std::pair(30, 15), std::pair(15, 30))), FramerateAdaptationTest::TestParamsToString); +TEST(VideoCodecTest, DISABLED_EncodeDecode) { + ScopedFieldTrials field_trials(absl::GetFlag(FLAGS_field_trials)); + const Environment env = + CreateEnvironment(std::make_unique( + absl::GetFlag(FLAGS_field_trials))); + + VideoSourceSettings source_settings{ + .file_path = absl::GetFlag(FLAGS_input_path), + .resolution = {.width = absl::GetFlag(FLAGS_input_width), + .height = absl::GetFlag(FLAGS_input_height)}, + .framerate = + Frequency::Hertz(absl::GetFlag(FLAGS_input_framerate_fps))}; + + std::vector bitrate_str = absl::GetFlag(FLAGS_bitrate_kbps); + std::vector bitrate; + std::transform(bitrate_str.begin(), bitrate_str.end(), + std::back_inserter(bitrate), [](const std::string& str) { + return DataRate::KilobitsPerSec(std::stoi(str)); + }); + + Frequency framerate = Frequency::Hertz( + absl::GetFlag(FLAGS_framerate_fps) + .value_or(absl::GetFlag(FLAGS_input_framerate_fps))); + + EncodingSettings encoding_settings = VideoCodecTester::CreateEncodingSettings( + env, CodecNameToCodecType(absl::GetFlag(FLAGS_encoder)), + absl::GetFlag(FLAGS_scalability_mode), + absl::GetFlag(FLAGS_width).value_or(absl::GetFlag(FLAGS_input_width)), + absl::GetFlag(FLAGS_height).value_or(absl::GetFlag(FLAGS_input_height)), + {bitrate}, framerate, absl::GetFlag(FLAGS_screencast), + absl::GetFlag(FLAGS_frame_drop)); + + int num_frames = absl::GetFlag(FLAGS_num_frames); + int key_interval = absl::GetFlag(FLAGS_key_interval); + uint32_t timestamp_rtp = 90000; + std::map frame_settings; + for (int frame_num = 0; frame_num < num_frames; ++frame_num) { + encoding_settings.keyframe = + (key_interval > 0 && (frame_num % key_interval) == 0); + frame_settings.emplace(timestamp_rtp, encoding_settings); + timestamp_rtp += k90kHz / framerate; + } + + std::unique_ptr stats; + std::string decoder = absl::GetFlag(FLAGS_decoder); + if (decoder == "null") { + stats = + RunEncodeTest(env, CodecNameToCodecImpl(absl::GetFlag(FLAGS_encoder)), + source_settings, frame_settings); + } else { + // TODO(webrtc:14852): Pass encoder and decoder names directly, and update + // logged test name (implies lossing history in the chromeperf dashboard). + // Sync with changes in Stream::LogMetrics (see TODOs there). + stats = RunEncodeDecodeTest( + env, CodecNameToCodecImpl(absl::GetFlag(FLAGS_encoder)), + CodecNameToCodecImpl(decoder), source_settings, frame_settings); + } + ASSERT_NE(nullptr, stats); + + // Log unsliced metrics. + VideoCodecStats::Stream stream = stats->Aggregate(Filter{}); + stream.LogMetrics(GetGlobalMetricsLogger(), TestName(), /*prefix=*/"", + /*metadata=*/{}); + + // Log metrics sliced on spatial and temporal layer. + ScalabilityMode scalability_mode = + *ScalabilityModeFromString(absl::GetFlag(FLAGS_scalability_mode)); + int num_spatial_layers = ScalabilityModeToNumSpatialLayers(scalability_mode); + int num_temporal_layers = + ScalabilityModeToNumTemporalLayers(scalability_mode); + for (int sidx = 0; sidx < num_spatial_layers; ++sidx) { + for (int tidx = 0; tidx < num_temporal_layers; ++tidx) { + std::string metric_name_prefix = + (StringBuilder() << "s" << sidx << "t" << tidx << "_").str(); + stream = stats->Aggregate( + {.layer_id = {{.spatial_idx = sidx, .temporal_idx = tidx}}}); + stream.LogMetrics(GetGlobalMetricsLogger(), TestName(), + metric_name_prefix, + /*metadata=*/{}); + } + } + + if (absl::GetFlag(FLAGS_write_csv)) { + stats->LogMetrics((StringBuilder() << TestOutputPath() << ".csv").str(), + stats->Slice(Filter{}, /*merge=*/false), /*metadata=*/ + {{"test_name", TestName()}}); + } +} + } // namespace test } // namespace webrtc diff --git a/modules/video_coding/codecs/test/video_codec_tester_impl.cc b/modules/video_coding/codecs/test/video_codec_tester_impl.cc deleted file mode 100644 index fdfee3d028..0000000000 --- a/modules/video_coding/codecs/test/video_codec_tester_impl.cc +++ /dev/null @@ -1,437 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/codecs/test/video_codec_tester_impl.h" - -#include -#include -#include -#include - -#include "api/task_queue/default_task_queue_factory.h" -#include "api/units/frequency.h" -#include "api/units/time_delta.h" -#include "api/units/timestamp.h" -#include "api/video/encoded_image.h" -#include "api/video/i420_buffer.h" -#include "api/video/video_codec_type.h" -#include "api/video/video_frame.h" -#include "modules/video_coding/codecs/test/video_codec_analyzer.h" -#include "modules/video_coding/utility/ivf_file_writer.h" -#include "rtc_base/event.h" -#include "rtc_base/time_utils.h" -#include "system_wrappers/include/sleep.h" -#include "test/testsupport/video_frame_writer.h" - -namespace webrtc { -namespace test { - -namespace { -using RawVideoSource = VideoCodecTester::RawVideoSource; -using CodedVideoSource = VideoCodecTester::CodedVideoSource; -using Decoder = VideoCodecTester::Decoder; -using Encoder = VideoCodecTester::Encoder; -using EncoderSettings = VideoCodecTester::EncoderSettings; -using DecoderSettings = VideoCodecTester::DecoderSettings; -using PacingSettings = VideoCodecTester::PacingSettings; -using PacingMode = PacingSettings::PacingMode; - -constexpr Frequency k90kHz = Frequency::Hertz(90000); - -// A thread-safe wrapper for video source to be shared with the quality analyzer -// that reads reference frames from a separate thread. -class SyncRawVideoSource : public VideoCodecAnalyzer::ReferenceVideoSource { - public: - explicit SyncRawVideoSource(RawVideoSource* video_source) - : video_source_(video_source) {} - - absl::optional PullFrame() { - MutexLock lock(&mutex_); - return video_source_->PullFrame(); - } - - VideoFrame GetFrame(uint32_t timestamp_rtp, Resolution resolution) override { - MutexLock lock(&mutex_); - return video_source_->GetFrame(timestamp_rtp, resolution); - } - - protected: - RawVideoSource* const video_source_ RTC_GUARDED_BY(mutex_); - Mutex mutex_; -}; - -// Pacer calculates delay necessary to keep frame encode or decode call spaced -// from the previous calls by the pacing time. `Delay` is expected to be called -// as close as possible to posting frame encode or decode task. This class is -// not thread safe. -class Pacer { - public: - explicit Pacer(PacingSettings settings) - : settings_(settings), delay_(TimeDelta::Zero()) {} - Timestamp Schedule(Timestamp timestamp) { - Timestamp now = Timestamp::Micros(rtc::TimeMicros()); - if (settings_.mode == PacingMode::kNoPacing) { - return now; - } - - Timestamp scheduled = now; - if (prev_scheduled_) { - scheduled = *prev_scheduled_ + PacingTime(timestamp); - if (scheduled < now) { - scheduled = now; - } - } - - prev_timestamp_ = timestamp; - prev_scheduled_ = scheduled; - return scheduled; - } - - private: - TimeDelta PacingTime(Timestamp timestamp) { - if (settings_.mode == PacingMode::kRealTime) { - return timestamp - *prev_timestamp_; - } - RTC_CHECK_EQ(PacingMode::kConstantRate, settings_.mode); - return 1 / settings_.constant_rate; - } - - PacingSettings settings_; - absl::optional prev_timestamp_; - absl::optional prev_scheduled_; - TimeDelta delay_; -}; - -// Task queue that keeps the number of queued tasks below a certain limit. If -// the limit is reached, posting of a next task is blocked until execution of a -// previously posted task starts. This class is not thread-safe. -class LimitedTaskQueue { - public: - // The codec tester reads frames from video source in the main thread. - // Encoding and decoding are done in separate threads. If encoding or - // decoding is slow, the reading may go far ahead and may buffer too many - // frames in memory. To prevent this we limit the encoding/decoding queue - // size. When the queue is full, the main thread and, hence, reading frames - // from video source is blocked until a previously posted encoding/decoding - // task starts. - static constexpr int kMaxTaskQueueSize = 3; - - LimitedTaskQueue() : queue_size_(0) {} - - void PostScheduledTask(absl::AnyInvocable task, Timestamp start) { - ++queue_size_; - task_queue_.PostTask([this, task = std::move(task), start]() mutable { - int wait_ms = static_cast(start.ms() - rtc::TimeMillis()); - if (wait_ms > 0) { - SleepMs(wait_ms); - } - - std::move(task)(); - --queue_size_; - task_executed_.Set(); - }); - - task_executed_.Reset(); - if (queue_size_ > kMaxTaskQueueSize) { - task_executed_.Wait(rtc::Event::kForever); - } - RTC_CHECK(queue_size_ <= kMaxTaskQueueSize); - } - - void WaitForPreviouslyPostedTasks() { - task_queue_.SendTask([] {}); - } - - TaskQueueForTest task_queue_; - std::atomic_int queue_size_; - rtc::Event task_executed_; -}; - -class TesterY4mWriter { - public: - explicit TesterY4mWriter(absl::string_view base_path) - : base_path_(base_path) {} - - ~TesterY4mWriter() { - task_queue_.SendTask([] {}); - } - - void Write(const VideoFrame& frame, int spatial_idx) { - task_queue_.PostTask([this, frame, spatial_idx] { - if (y4m_writers_.find(spatial_idx) == y4m_writers_.end()) { - std::string file_path = - base_path_ + "_s" + std::to_string(spatial_idx) + ".y4m"; - - Y4mVideoFrameWriterImpl* y4m_writer = new Y4mVideoFrameWriterImpl( - file_path, frame.width(), frame.height(), /*fps=*/30); - RTC_CHECK(y4m_writer); - - y4m_writers_[spatial_idx] = - std::unique_ptr(y4m_writer); - } - - y4m_writers_.at(spatial_idx)->WriteFrame(frame); - }); - } - - protected: - std::string base_path_; - std::map> y4m_writers_; - TaskQueueForTest task_queue_; -}; - -class TesterIvfWriter { - public: - explicit TesterIvfWriter(absl::string_view base_path) - : base_path_(base_path) {} - - ~TesterIvfWriter() { - task_queue_.SendTask([] {}); - } - - void Write(const EncodedImage& encoded_frame) { - task_queue_.PostTask([this, encoded_frame] { - int spatial_idx = encoded_frame.SpatialIndex().value_or(0); - if (ivf_file_writers_.find(spatial_idx) == ivf_file_writers_.end()) { - std::string ivf_path = - base_path_ + "_s" + std::to_string(spatial_idx) + ".ivf"; - - FileWrapper ivf_file = FileWrapper::OpenWriteOnly(ivf_path); - RTC_CHECK(ivf_file.is_open()); - - std::unique_ptr ivf_writer = - IvfFileWriter::Wrap(std::move(ivf_file), /*byte_limit=*/0); - RTC_CHECK(ivf_writer); - - ivf_file_writers_[spatial_idx] = std::move(ivf_writer); - } - - // To play: ffplay -vcodec vp8|vp9|av1|hevc|h264 filename - ivf_file_writers_.at(spatial_idx) - ->WriteFrame(encoded_frame, VideoCodecType::kVideoCodecGeneric); - }); - } - - protected: - std::string base_path_; - std::map> ivf_file_writers_; - TaskQueueForTest task_queue_; -}; - -class TesterDecoder { - public: - TesterDecoder(Decoder* decoder, - VideoCodecAnalyzer* analyzer, - const DecoderSettings& settings) - : decoder_(decoder), - analyzer_(analyzer), - settings_(settings), - pacer_(settings.pacing) { - RTC_CHECK(analyzer_) << "Analyzer must be provided"; - - if (settings.decoder_input_base_path) { - input_writer_ = - std::make_unique(*settings.decoder_input_base_path); - } - - if (settings.decoder_output_base_path) { - output_writer_ = - std::make_unique(*settings.decoder_output_base_path); - } - } - - void Initialize() { - task_queue_.PostScheduledTask([this] { decoder_->Initialize(); }, - Timestamp::Zero()); - task_queue_.WaitForPreviouslyPostedTasks(); - } - - void Decode(const EncodedImage& input_frame) { - Timestamp timestamp = - Timestamp::Micros((input_frame.Timestamp() / k90kHz).us()); - - task_queue_.PostScheduledTask( - [this, input_frame] { - analyzer_->StartDecode(input_frame); - - decoder_->Decode( - input_frame, - [this, spatial_idx = input_frame.SpatialIndex().value_or(0)]( - const VideoFrame& output_frame) { - analyzer_->FinishDecode(output_frame, spatial_idx); - - if (output_writer_) { - output_writer_->Write(output_frame, spatial_idx); - } - }); - - if (input_writer_) { - input_writer_->Write(input_frame); - } - }, - pacer_.Schedule(timestamp)); - } - - void Flush() { - task_queue_.PostScheduledTask([this] { decoder_->Flush(); }, - Timestamp::Zero()); - task_queue_.WaitForPreviouslyPostedTasks(); - } - - protected: - Decoder* const decoder_; - VideoCodecAnalyzer* const analyzer_; - const DecoderSettings& settings_; - Pacer pacer_; - LimitedTaskQueue task_queue_; - std::unique_ptr input_writer_; - std::unique_ptr output_writer_; -}; - -class TesterEncoder { - public: - TesterEncoder(Encoder* encoder, - TesterDecoder* decoder, - VideoCodecAnalyzer* analyzer, - const EncoderSettings& settings) - : encoder_(encoder), - decoder_(decoder), - analyzer_(analyzer), - settings_(settings), - pacer_(settings.pacing) { - RTC_CHECK(analyzer_) << "Analyzer must be provided"; - if (settings.encoder_input_base_path) { - input_writer_ = - std::make_unique(*settings.encoder_input_base_path); - } - - if (settings.encoder_output_base_path) { - output_writer_ = - std::make_unique(*settings.encoder_output_base_path); - } - } - - void Initialize() { - task_queue_.PostScheduledTask([this] { encoder_->Initialize(); }, - Timestamp::Zero()); - task_queue_.WaitForPreviouslyPostedTasks(); - } - - void Encode(const VideoFrame& input_frame) { - Timestamp timestamp = - Timestamp::Micros((input_frame.timestamp() / k90kHz).us()); - - task_queue_.PostScheduledTask( - [this, input_frame] { - analyzer_->StartEncode(input_frame); - encoder_->Encode(input_frame, - [this](const EncodedImage& encoded_frame) { - analyzer_->FinishEncode(encoded_frame); - - if (decoder_ != nullptr) { - decoder_->Decode(encoded_frame); - } - - if (output_writer_ != nullptr) { - output_writer_->Write(encoded_frame); - } - }); - - if (input_writer_) { - input_writer_->Write(input_frame, /*spatial_idx=*/0); - } - }, - pacer_.Schedule(timestamp)); - } - - void Flush() { - task_queue_.PostScheduledTask([this] { encoder_->Flush(); }, - Timestamp::Zero()); - task_queue_.WaitForPreviouslyPostedTasks(); - } - - protected: - Encoder* const encoder_; - TesterDecoder* const decoder_; - VideoCodecAnalyzer* const analyzer_; - const EncoderSettings& settings_; - std::unique_ptr input_writer_; - std::unique_ptr output_writer_; - Pacer pacer_; - LimitedTaskQueue task_queue_; -}; - -} // namespace - -std::unique_ptr VideoCodecTesterImpl::RunDecodeTest( - CodedVideoSource* video_source, - Decoder* decoder, - const DecoderSettings& decoder_settings) { - VideoCodecAnalyzer perf_analyzer; - TesterDecoder tester_decoder(decoder, &perf_analyzer, decoder_settings); - - tester_decoder.Initialize(); - - while (auto frame = video_source->PullFrame()) { - tester_decoder.Decode(*frame); - } - - tester_decoder.Flush(); - - return perf_analyzer.GetStats(); -} - -std::unique_ptr VideoCodecTesterImpl::RunEncodeTest( - RawVideoSource* video_source, - Encoder* encoder, - const EncoderSettings& encoder_settings) { - SyncRawVideoSource sync_source(video_source); - VideoCodecAnalyzer perf_analyzer; - TesterEncoder tester_encoder(encoder, /*decoder=*/nullptr, &perf_analyzer, - encoder_settings); - - tester_encoder.Initialize(); - - while (auto frame = sync_source.PullFrame()) { - tester_encoder.Encode(*frame); - } - - tester_encoder.Flush(); - - return perf_analyzer.GetStats(); -} - -std::unique_ptr VideoCodecTesterImpl::RunEncodeDecodeTest( - RawVideoSource* video_source, - Encoder* encoder, - Decoder* decoder, - const EncoderSettings& encoder_settings, - const DecoderSettings& decoder_settings) { - SyncRawVideoSource sync_source(video_source); - VideoCodecAnalyzer perf_analyzer(&sync_source); - TesterDecoder tester_decoder(decoder, &perf_analyzer, decoder_settings); - TesterEncoder tester_encoder(encoder, &tester_decoder, &perf_analyzer, - encoder_settings); - - tester_encoder.Initialize(); - tester_decoder.Initialize(); - - while (auto frame = sync_source.PullFrame()) { - tester_encoder.Encode(*frame); - } - - tester_encoder.Flush(); - tester_decoder.Flush(); - - return perf_analyzer.GetStats(); -} - -} // namespace test -} // namespace webrtc diff --git a/modules/video_coding/codecs/test/video_codec_tester_impl.h b/modules/video_coding/codecs/test/video_codec_tester_impl.h deleted file mode 100644 index 32191b5a98..0000000000 --- a/modules/video_coding/codecs/test/video_codec_tester_impl.h +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_ -#define MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_ - -#include - -#include "api/test/video_codec_tester.h" - -namespace webrtc { -namespace test { - -// A stateless implementation of `VideoCodecTester`. This class is thread safe. -class VideoCodecTesterImpl : public VideoCodecTester { - public: - std::unique_ptr RunDecodeTest( - CodedVideoSource* video_source, - Decoder* decoder, - const DecoderSettings& decoder_settings) override; - - std::unique_ptr RunEncodeTest( - RawVideoSource* video_source, - Encoder* encoder, - const EncoderSettings& encoder_settings) override; - - std::unique_ptr RunEncodeDecodeTest( - RawVideoSource* video_source, - Encoder* encoder, - Decoder* decoder, - const EncoderSettings& encoder_settings, - const DecoderSettings& decoder_settings) override; -}; - -} // namespace test -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_ diff --git a/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc b/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc deleted file mode 100644 index 524134bd42..0000000000 --- a/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/codecs/test/video_codec_tester_impl.h" - -#include -#include -#include -#include - -#include "api/units/frequency.h" -#include "api/units/time_delta.h" -#include "api/video/encoded_image.h" -#include "api/video/i420_buffer.h" -#include "api/video/video_frame.h" -#include "rtc_base/fake_clock.h" -#include "rtc_base/gunit.h" -#include "rtc_base/task_queue_for_test.h" -#include "rtc_base/time_utils.h" -#include "test/gmock.h" -#include "test/gtest.h" - -namespace webrtc { -namespace test { - -namespace { -using ::testing::_; -using ::testing::Invoke; -using ::testing::InvokeWithoutArgs; -using ::testing::Return; - -using Decoder = VideoCodecTester::Decoder; -using Encoder = VideoCodecTester::Encoder; -using CodedVideoSource = VideoCodecTester::CodedVideoSource; -using RawVideoSource = VideoCodecTester::RawVideoSource; -using DecoderSettings = VideoCodecTester::DecoderSettings; -using EncoderSettings = VideoCodecTester::EncoderSettings; -using PacingSettings = VideoCodecTester::PacingSettings; -using PacingMode = PacingSettings::PacingMode; - -constexpr Frequency k90kHz = Frequency::Hertz(90000); - -struct PacingTestParams { - PacingSettings pacing_settings; - Frequency framerate; - int num_frames; - std::vector expected_delta_ms; -}; - -VideoFrame CreateVideoFrame(uint32_t timestamp_rtp) { - rtc::scoped_refptr buffer(I420Buffer::Create(2, 2)); - return VideoFrame::Builder() - .set_video_frame_buffer(buffer) - .set_timestamp_rtp(timestamp_rtp) - .build(); -} - -EncodedImage CreateEncodedImage(uint32_t timestamp_rtp) { - EncodedImage encoded_image; - encoded_image.SetTimestamp(timestamp_rtp); - return encoded_image; -} - -class MockRawVideoSource : public RawVideoSource { - public: - MockRawVideoSource(int num_frames, Frequency framerate) - : num_frames_(num_frames), frame_num_(0), framerate_(framerate) {} - - absl::optional PullFrame() override { - if (frame_num_ >= num_frames_) { - return absl::nullopt; - } - uint32_t timestamp_rtp = frame_num_ * k90kHz / framerate_; - ++frame_num_; - return CreateVideoFrame(timestamp_rtp); - } - - MOCK_METHOD(VideoFrame, - GetFrame, - (uint32_t timestamp_rtp, Resolution), - (override)); - - private: - int num_frames_; - int frame_num_; - Frequency framerate_; -}; - -class MockCodedVideoSource : public CodedVideoSource { - public: - MockCodedVideoSource(int num_frames, Frequency framerate) - : num_frames_(num_frames), frame_num_(0), framerate_(framerate) {} - - absl::optional PullFrame() override { - if (frame_num_ >= num_frames_) { - return absl::nullopt; - } - uint32_t timestamp_rtp = frame_num_ * k90kHz / framerate_; - ++frame_num_; - return CreateEncodedImage(timestamp_rtp); - } - - private: - int num_frames_; - int frame_num_; - Frequency framerate_; -}; - -class MockDecoder : public Decoder { - public: - MOCK_METHOD(void, Initialize, (), (override)); - MOCK_METHOD(void, - Decode, - (const EncodedImage& frame, DecodeCallback callback), - (override)); - MOCK_METHOD(void, Flush, (), (override)); -}; - -class MockEncoder : public Encoder { - public: - MOCK_METHOD(void, Initialize, (), (override)); - MOCK_METHOD(void, - Encode, - (const VideoFrame& frame, EncodeCallback callback), - (override)); - MOCK_METHOD(void, Flush, (), (override)); -}; - -} // namespace - -class VideoCodecTesterImplPacingTest - : public ::testing::TestWithParam { - public: - VideoCodecTesterImplPacingTest() : test_params_(GetParam()) {} - - protected: - PacingTestParams test_params_; -}; - -TEST_P(VideoCodecTesterImplPacingTest, PaceEncode) { - MockRawVideoSource video_source(test_params_.num_frames, - test_params_.framerate); - MockEncoder encoder; - EncoderSettings encoder_settings; - encoder_settings.pacing = test_params_.pacing_settings; - - VideoCodecTesterImpl tester; - auto fs = - tester.RunEncodeTest(&video_source, &encoder, encoder_settings)->Slice(); - ASSERT_EQ(static_cast(fs.size()), test_params_.num_frames); - - for (size_t i = 1; i < fs.size(); ++i) { - int delta_ms = (fs[i].encode_start - fs[i - 1].encode_start).ms(); - EXPECT_NEAR(delta_ms, test_params_.expected_delta_ms[i - 1], 10); - } -} - -TEST_P(VideoCodecTesterImplPacingTest, PaceDecode) { - MockCodedVideoSource video_source(test_params_.num_frames, - test_params_.framerate); - MockDecoder decoder; - DecoderSettings decoder_settings; - decoder_settings.pacing = test_params_.pacing_settings; - - VideoCodecTesterImpl tester; - auto fs = - tester.RunDecodeTest(&video_source, &decoder, decoder_settings)->Slice(); - ASSERT_EQ(static_cast(fs.size()), test_params_.num_frames); - - for (size_t i = 1; i < fs.size(); ++i) { - int delta_ms = (fs[i].decode_start - fs[i - 1].decode_start).ms(); - EXPECT_NEAR(delta_ms, test_params_.expected_delta_ms[i - 1], 20); - } -} - -INSTANTIATE_TEST_SUITE_P( - DISABLED_All, - VideoCodecTesterImplPacingTest, - ::testing::ValuesIn( - {// No pacing. - PacingTestParams({.pacing_settings = {.mode = PacingMode::kNoPacing}, - .framerate = Frequency::Hertz(10), - .num_frames = 3, - .expected_delta_ms = {0, 0}}), - // Real-time pacing. - PacingTestParams({.pacing_settings = {.mode = PacingMode::kRealTime}, - .framerate = Frequency::Hertz(10), - .num_frames = 3, - .expected_delta_ms = {100, 100}}), - // Pace with specified constant rate. - PacingTestParams( - {.pacing_settings = {.mode = PacingMode::kConstantRate, - .constant_rate = Frequency::Hertz(20)}, - .framerate = Frequency::Hertz(10), - .num_frames = 3, - .expected_delta_ms = {50, 50}})})); -} // namespace test -} // namespace webrtc diff --git a/modules/video_coding/codecs/test/video_codec_unittest.cc b/modules/video_coding/codecs/test/video_codec_unittest.cc index a4a8b253fc..54be5b3c99 100644 --- a/modules/video_coding/codecs/test/video_codec_unittest.cc +++ b/modules/video_coding/codecs/test/video_codec_unittest.cc @@ -10,12 +10,27 @@ #include "modules/video_coding/codecs/test/video_codec_unittest.h" +#include +#include +#include +#include #include +#include #include "api/test/create_frame_generator.h" +#include "api/test/frame_generator_interface.h" +#include "api/units/time_delta.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_encoder.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/video_coding/include/video_error_codes.h" +#include "rtc_base/checks.h" +#include "rtc_base/synchronization/mutex.h" +#include "test/gtest.h" #include "test/video_codec_settings.h" static constexpr webrtc::TimeDelta kEncodeTimeout = @@ -57,8 +72,8 @@ VideoCodecUnitTest::FakeEncodeCompleteCallback::OnEncodedImage( void VideoCodecUnitTest::FakeDecodeCompleteCallback::Decoded( VideoFrame& frame, - absl::optional decode_time_ms, - absl::optional qp) { + std::optional /* decode_time_ms */, + std::optional qp) { MutexLock lock(&test_->decoded_frame_section_); test_->decoded_frame_.emplace(frame); test_->decoded_qp_ = qp; @@ -77,7 +92,7 @@ void VideoCodecUnitTest::SetUp() { input_frame_generator_ = test::CreateSquareFrameGenerator( codec_settings_.width, codec_settings_.height, - test::FrameGeneratorInterface::OutputType::kI420, absl::optional()); + test::FrameGeneratorInterface::OutputType::kI420, std::optional()); encoder_ = CreateEncoder(); decoder_ = CreateDecoder(); @@ -97,7 +112,8 @@ void VideoCodecUnitTest::SetUp() { EXPECT_TRUE(decoder_->Configure(decoder_settings)); } -void VideoCodecUnitTest::ModifyCodecSettings(VideoCodec* codec_settings) {} +void VideoCodecUnitTest::ModifyCodecSettings(VideoCodec* /* codec_settings */) { +} VideoFrame VideoCodecUnitTest::NextInputFrame() { test::FrameGeneratorInterface::VideoFrameData frame_data = @@ -110,7 +126,8 @@ VideoFrame VideoCodecUnitTest::NextInputFrame() { const uint32_t timestamp = last_input_frame_timestamp_ + kVideoPayloadTypeFrequency / codec_settings_.maxFramerate; - input_frame.set_timestamp(timestamp); + input_frame.set_rtp_timestamp(timestamp); + input_frame.set_timestamp_us(timestamp * (1000 / 90)); last_input_frame_timestamp_ = timestamp; return input_frame; @@ -158,7 +175,7 @@ bool VideoCodecUnitTest::WaitForEncodedFrames( } bool VideoCodecUnitTest::WaitForDecodedFrame(std::unique_ptr* frame, - absl::optional* qp) { + std::optional* qp) { bool ret = decoded_frame_event_.Wait(kDecodeTimeout); EXPECT_TRUE(ret) << "Timed out while waiting for a decoded frame."; // This becomes unsafe if there are multiple threads waiting for frames. diff --git a/modules/video_coding/codecs/test/video_codec_unittest.h b/modules/video_coding/codecs/test/video_codec_unittest.h index 7d05882b63..90b01bfa2a 100644 --- a/modules/video_coding/codecs/test/video_codec_unittest.h +++ b/modules/video_coding/codecs/test/video_codec_unittest.h @@ -11,15 +11,22 @@ #ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_UNITTEST_H_ #define MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_UNITTEST_H_ +#include +#include #include +#include #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/test/frame_generator_interface.h" +#include "api/video/encoded_image.h" +#include "api/video/video_frame.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_encoder.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "modules/video_coding/utility/vp8_header_parser.h" -#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" +#include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -30,7 +37,8 @@ namespace webrtc { class VideoCodecUnitTest : public ::testing::Test { public: VideoCodecUnitTest() - : encode_complete_callback_(this), + : env_(CreateEnvironment()), + encode_complete_callback_(this), decode_complete_callback_(this), wait_for_encoded_frames_threshold_(1), last_input_frame_timestamp_(0) {} @@ -53,17 +61,18 @@ class VideoCodecUnitTest : public ::testing::Test { explicit FakeDecodeCompleteCallback(VideoCodecUnitTest* test) : test_(test) {} - int32_t Decoded(VideoFrame& frame) override { + int32_t Decoded(VideoFrame& /* frame */) override { RTC_DCHECK_NOTREACHED(); return -1; } - int32_t Decoded(VideoFrame& frame, int64_t decode_time_ms) override { + int32_t Decoded(VideoFrame& /* frame */, + int64_t /* decode_time_ms */) override { RTC_DCHECK_NOTREACHED(); return -1; } void Decoded(VideoFrame& frame, - absl::optional decode_time_ms, - absl::optional qp) override; + std::optional decode_time_ms, + std::optional qp) override; private: VideoCodecUnitTest* const test_; @@ -92,10 +101,11 @@ class VideoCodecUnitTest : public ::testing::Test { // Helper method for waiting a single decoded frame. bool WaitForDecodedFrame(std::unique_ptr* frame, - absl::optional* qp); + std::optional* qp); size_t GetNumEncodedFrames(); + const Environment env_; VideoCodec codec_settings_; std::unique_ptr encoder_; @@ -106,7 +116,7 @@ class VideoCodecUnitTest : public ::testing::Test { FakeEncodeCompleteCallback encode_complete_callback_; FakeDecodeCompleteCallback decode_complete_callback_; - rtc::Event encoded_frame_event_; + Event encoded_frame_event_; Mutex encoded_frame_section_; size_t wait_for_encoded_frames_threshold_; std::vector encoded_frames_ @@ -114,11 +124,11 @@ class VideoCodecUnitTest : public ::testing::Test { std::vector codec_specific_infos_ RTC_GUARDED_BY(encoded_frame_section_); - rtc::Event decoded_frame_event_; + Event decoded_frame_event_; Mutex decoded_frame_section_; - absl::optional decoded_frame_ + std::optional decoded_frame_ RTC_GUARDED_BY(decoded_frame_section_); - absl::optional decoded_qp_ RTC_GUARDED_BY(decoded_frame_section_); + std::optional decoded_qp_ RTC_GUARDED_BY(decoded_frame_section_); uint32_t last_input_frame_timestamp_; }; diff --git a/modules/video_coding/codecs/test/video_encoder_decoder_instantiation_tests.cc b/modules/video_coding/codecs/test/video_encoder_decoder_instantiation_tests.cc index 41f2304748..b0af26f494 100644 --- a/modules/video_coding/codecs/test/video_encoder_decoder_instantiation_tests.cc +++ b/modules/video_coding/codecs/test/video_encoder_decoder_instantiation_tests.cc @@ -8,14 +8,21 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include #include +#include #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/video/video_codec_type.h" #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" +#include "rtc_base/checks.h" #if defined(WEBRTC_ANDROID) #include "modules/video_coding/codecs/test/android_codec_factory_helper.h" #elif defined(WEBRTC_IOS) @@ -86,6 +93,8 @@ class VideoEncoderDecoderInstantiationTest } } + const Environment env_ = CreateEnvironment(); + const SdpVideoFormat vp8_format_; const SdpVideoFormat vp9_format_; const SdpVideoFormat h264cbp_format_; @@ -119,14 +128,14 @@ INSTANTIATE_TEST_SUITE_P(MultipleEncodersDecoders, TEST_P(VideoEncoderDecoderInstantiationTest, DISABLED_InstantiateVp8Codecs) { for (int i = 0; i < num_encoders_; ++i) { std::unique_ptr encoder = - encoder_factory_->CreateVideoEncoder(vp8_format_); + encoder_factory_->Create(env_, vp8_format_); EXPECT_EQ(0, InitEncoder(kVideoCodecVP8, encoder.get())); encoders_.emplace_back(std::move(encoder)); } for (int i = 0; i < num_decoders_; ++i) { std::unique_ptr decoder = - decoder_factory_->CreateVideoDecoder(vp8_format_); + decoder_factory_->Create(env_, vp8_format_); ASSERT_THAT(decoder, NotNull()); EXPECT_TRUE(decoder->Configure(DecoderSettings(kVideoCodecVP8))); decoders_.emplace_back(std::move(decoder)); @@ -137,14 +146,14 @@ TEST_P(VideoEncoderDecoderInstantiationTest, DISABLED_InstantiateH264CBPCodecs) { for (int i = 0; i < num_encoders_; ++i) { std::unique_ptr encoder = - encoder_factory_->CreateVideoEncoder(h264cbp_format_); + encoder_factory_->Create(env_, h264cbp_format_); EXPECT_EQ(0, InitEncoder(kVideoCodecH264, encoder.get())); encoders_.emplace_back(std::move(encoder)); } for (int i = 0; i < num_decoders_; ++i) { std::unique_ptr decoder = - decoder_factory_->CreateVideoDecoder(h264cbp_format_); + decoder_factory_->Create(env_, h264cbp_format_); ASSERT_THAT(decoder, NotNull()); EXPECT_TRUE(decoder->Configure(DecoderSettings(kVideoCodecH264))); decoders_.push_back(std::move(decoder)); diff --git a/modules/video_coding/codecs/test/videocodec_test_av1.cc b/modules/video_coding/codecs/test/videocodec_test_av1.cc index 9189f5abe5..b3538d6138 100644 --- a/modules/video_coding/codecs/test/videocodec_test_av1.cc +++ b/modules/video_coding/codecs/test/videocodec_test_av1.cc @@ -9,15 +9,13 @@ */ #include +#include #include #include "api/test/create_videocodec_test_fixture.h" -#include "api/test/video/function_video_encoder_factory.h" -#include "api/video_codecs/sdp_video_format.h" +#include "api/test/videocodec_test_fixture.h" +#include "api/video_codecs/scalability_mode.h" #include "media/base/media_constants.h" -#include "media/engine/internal_decoder_factory.h" -#include "media/engine/internal_encoder_factory.h" -#include "media/engine/simulcast_encoder_adapter.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -40,8 +38,8 @@ VideoCodecTestFixture::Config CreateConfig(std::string filename) { TEST(VideoCodecTestAv1, HighBitrate) { auto config = CreateConfig("foreman_cif"); - config.SetCodecSettings(cricket::kAv1CodecName, 1, 1, 1, false, true, true, - kCifWidth, kCifHeight); + config.SetCodecSettings(kAv1CodecName, 1, 1, 1, false, true, true, kCifWidth, + kCifHeight); config.codec_settings.SetScalabilityMode(ScalabilityMode::kL1T1); config.num_frames = kNumFramesLong; auto fixture = CreateVideoCodecTestFixture(config); @@ -58,8 +56,8 @@ TEST(VideoCodecTestAv1, HighBitrate) { TEST(VideoCodecTestAv1, VeryLowBitrate) { auto config = CreateConfig("foreman_cif"); - config.SetCodecSettings(cricket::kAv1CodecName, 1, 1, 1, false, true, true, - kCifWidth, kCifHeight); + config.SetCodecSettings(kAv1CodecName, 1, 1, 1, false, true, true, kCifWidth, + kCifHeight); config.codec_settings.SetScalabilityMode(ScalabilityMode::kL1T1); auto fixture = CreateVideoCodecTestFixture(config); @@ -78,8 +76,8 @@ constexpr int kHdWidth = 1280; constexpr int kHdHeight = 720; TEST(VideoCodecTestAv1, Hd) { auto config = CreateConfig("ConferenceMotion_1280_720_50"); - config.SetCodecSettings(cricket::kAv1CodecName, 1, 1, 1, false, true, true, - kHdWidth, kHdHeight); + config.SetCodecSettings(kAv1CodecName, 1, 1, 1, false, true, true, kHdWidth, + kHdHeight); config.codec_settings.SetScalabilityMode(ScalabilityMode::kL1T1); config.num_frames = kNumFramesLong; auto fixture = CreateVideoCodecTestFixture(config); diff --git a/modules/video_coding/codecs/test/videocodec_test_fixture_config_unittest.cc b/modules/video_coding/codecs/test/videocodec_test_fixture_config_unittest.cc index 126aa93ee8..d9d43e067b 100644 --- a/modules/video_coding/codecs/test/videocodec_test_fixture_config_unittest.cc +++ b/modules/video_coding/codecs/test/videocodec_test_fixture_config_unittest.cc @@ -11,6 +11,7 @@ #include #include "api/test/videocodec_test_fixture.h" +#include "api/video/video_codec_type.h" #include "api/video_codecs/video_codec.h" #include "test/gmock.h" #include "test/gtest.h" diff --git a/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc b/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc index 7543372e21..5974266285 100644 --- a/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc +++ b/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc @@ -15,28 +15,43 @@ #include #include +#include #include +#include #include #include #include +#include "absl/strings/match.h" #include "absl/strings/str_replace.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/environment/environment_factory.h" +#include "api/make_ref_counted.h" +#include "api/rtp_parameters.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/metrics/metric.h" +#include "api/test/videocodec_test_fixture.h" +#include "api/test/videocodec_test_stats.h" #include "api/transport/field_trial_based_config.h" -#include "api/video/video_bitrate_allocation.h" +#include "api/video/encoded_image.h" +#include "api/video/resolution.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "api/video_codecs/h264_profile_level_id.h" #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/simulcast_stream.h" +#include "api/video_codecs/spatial_layer.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_decoder.h" +#include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h" +#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "api/video_codecs/video_encoder_factory_template.h" #include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h" @@ -46,20 +61,25 @@ #include "common_video/h264/h264_common.h" #include "media/base/media_constants.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "modules/video_coding/codecs/test/videoprocessor.h" #include "modules/video_coding/codecs/vp9/svc_config.h" #include "modules/video_coding/utility/ivf_file_writer.h" #include "rtc_base/checks.h" #include "rtc_base/cpu_time.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/system/file_wrapper.h" +#include "rtc_base/system_time.h" +#include "rtc_base/task_queue_for_test.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/cpu_info.h" #include "system_wrappers/include/sleep.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" +#include "test/testsupport/frame_reader.h" #include "test/testsupport/frame_writer.h" #include "test/video_codec_settings.h" -#include "video/config/simulcast.h" +#include "video/config/encoder_stream_factory.h" #include "video/config/video_encoder_config.h" namespace webrtc { @@ -69,16 +89,20 @@ namespace { using VideoStatistics = VideoCodecTestStats::VideoStatistics; const int kBaseKeyFrameInterval = 3000; -const double kBitratePriority = 1.0; const int kDefaultMaxFramerateFps = 30; const int kMaxQp = 56; void ConfigureSimulcast(VideoCodec* codec_settings) { FieldTrialBasedConfig trials; - const std::vector streams = cricket::GetSimulcastConfig( - /*min_layer=*/1, codec_settings->numberOfSimulcastStreams, - codec_settings->width, codec_settings->height, kBitratePriority, kMaxQp, - /* is_screenshare = */ false, true, trials); + VideoEncoderConfig encoder_config; + encoder_config.codec_type = codec_settings->codecType; + encoder_config.number_of_streams = codec_settings->numberOfSimulcastStreams; + encoder_config.simulcast_layers.resize( + codec_settings->numberOfSimulcastStreams); + VideoEncoder::EncoderInfo encoder_info; + auto stream_factory = make_ref_counted(encoder_info); + const std::vector streams = stream_factory->CreateEncoderStreams( + trials, codec_settings->width, codec_settings->height, encoder_config); for (size_t i = 0; i < streams.size(); ++i) { SimulcastStream* ss = &codec_settings->simulcastStream[i]; @@ -89,7 +113,7 @@ void ConfigureSimulcast(VideoCodec* codec_settings) { ss->maxBitrate = streams[i].max_bitrate_bps / 1000; ss->targetBitrate = streams[i].target_bitrate_bps / 1000; ss->minBitrate = streams[i].min_bitrate_bps / 1000; - ss->qpMax = streams[i].max_qp; + ss->qpMax = kMaxQp; ss->active = true; } } @@ -112,7 +136,7 @@ void ConfigureSvc(VideoCodec* codec_settings) { std::string CodecSpecificToString(const VideoCodec& codec) { char buf[1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); switch (codec.codecType) { case kVideoCodecVP8: ss << "\nnum_temporal_layers: " @@ -137,6 +161,9 @@ std::string CodecSpecificToString(const VideoCodec& codec) { ss << "\nnum_temporal_layers: " << static_cast(codec.H264().numberOfTemporalLayers); break; + case kVideoCodecH265: + // TODO(bugs.webrtc.org/13485) + break; default: break; } @@ -164,16 +191,23 @@ SdpVideoFormat CreateSdpVideoFormat( H264PacketizationMode::NonInterleaved ? "1" : "0"; - SdpVideoFormat::Parameters codec_params = { - {cricket::kH264FmtpProfileLevelId, + CodecParameterMap codec_params = { + {kH264FmtpProfileLevelId, *H264ProfileLevelIdToString(H264ProfileLevelId( config.h264_codec_settings.profile, H264Level::kLevel3_1))}, - {cricket::kH264FmtpPacketizationMode, packetization_mode}, - {cricket::kH264FmtpLevelAsymmetryAllowed, "1"}}; + {kH264FmtpPacketizationMode, packetization_mode}, + {kH264FmtpLevelAsymmetryAllowed, "1"}}; return SdpVideoFormat(config.codec_name, codec_params); } else if (config.codec_settings.codecType == kVideoCodecVP9) { - return SdpVideoFormat(config.codec_name, {{"profile-id", "0"}}); + return SdpVideoFormat::VP9Profile0(); + } else if (config.codec_settings.codecType == kVideoCodecAV1) { + // Extra condition to not fallback to the default creation of + // SdpVideoFormat. This is needed for backwards compatibility in downstream + // projects that still use the preliminary codec name AV1X. + if (absl::EqualsIgnoreCase(config.codec_name, kAv1CodecName)) { + return SdpVideoFormat::AV1Profile0(); + } } return SdpVideoFormat(config.codec_name); @@ -184,7 +218,7 @@ SdpVideoFormat CreateSdpVideoFormat( VideoCodecTestFixtureImpl::Config::Config() = default; void VideoCodecTestFixtureImpl::Config::SetCodecSettings( - std::string codec_name, + std::string codec_name_to_set, size_t num_simulcast_streams, size_t num_spatial_layers, size_t num_temporal_layers, @@ -193,7 +227,7 @@ void VideoCodecTestFixtureImpl::Config::SetCodecSettings( bool spatial_resize_on, size_t width, size_t height) { - this->codec_name = codec_name; + codec_name = codec_name_to_set; VideoCodecType codec_type = PayloadStringToCodecType(codec_name); webrtc::test::CodecSettings(codec_type, &codec_settings); @@ -246,6 +280,9 @@ void VideoCodecTestFixtureImpl::Config::SetCodecSettings( codec_settings.H264()->numberOfTemporalLayers = static_cast(num_temporal_layers); break; + case kVideoCodecH265: + // TODO(bugs.webrtc.org/13485) + break; default: break; } @@ -288,7 +325,7 @@ size_t VideoCodecTestFixtureImpl::Config::NumberOfSimulcastStreams() const { std::string VideoCodecTestFixtureImpl::Config::ToString() const { std::string codec_type = CodecTypeToPayloadString(codec_settings.codecType); - rtc::StringBuilder ss; + StringBuilder ss; ss << "test_name: " << test_name; ss << "\nfilename: " << filename; ss << "\nnum_frames: " << num_frames; @@ -360,7 +397,7 @@ void VideoCodecTestFixtureImpl::H264KeyframeChecker::CheckEncodedFrame( bool contains_pps = false; bool contains_idr = false; const std::vector nalu_indices = - webrtc::H264::FindNaluIndices(encoded_frame.data(), encoded_frame.size()); + webrtc::H264::FindNaluIndices(encoded_frame); for (const webrtc::H264::NaluIndex& index : nalu_indices) { webrtc::H264::NaluType nalu_type = webrtc::H264::ParseNaluType( encoded_frame.data()[index.payload_start_offset]); @@ -392,14 +429,14 @@ class VideoCodecTestFixtureImpl::CpuProcessTime final { void Start() { if (config_.measure_cpu) { - cpu_time_ -= rtc::GetProcessCpuTimeNanos(); - wallclock_time_ -= rtc::SystemTimeNanos(); + cpu_time_ -= GetProcessCpuTimeNanos(); + wallclock_time_ -= SystemTimeNanos(); } } void Stop() { if (config_.measure_cpu) { - cpu_time_ += rtc::GetProcessCpuTimeNanos(); - wallclock_time_ += rtc::SystemTimeNanos(); + cpu_time_ += GetProcessCpuTimeNanos(); + wallclock_time_ += SystemTimeNanos(); } } void Print() const { @@ -430,6 +467,7 @@ VideoCodecTestFixtureImpl::VideoCodecTestFixtureImpl(Config config) webrtc::LibvpxVp9DecoderTemplateAdapter, webrtc::OpenH264DecoderTemplateAdapter, webrtc::Dav1dDecoderTemplateAdapter>>()), + env_(CreateEnvironment()), config_(config) {} VideoCodecTestFixtureImpl::VideoCodecTestFixtureImpl( @@ -438,6 +476,7 @@ VideoCodecTestFixtureImpl::VideoCodecTestFixtureImpl( std::unique_ptr encoder_factory) : encoder_factory_(std::move(encoder_factory)), decoder_factory_(std::move(decoder_factory)), + env_(CreateEnvironment()), config_(config) {} VideoCodecTestFixtureImpl::~VideoCodecTestFixtureImpl() = default; @@ -497,7 +536,7 @@ void VideoCodecTestFixtureImpl::ProcessAllFrames( if (RunEncodeInRealTime(config_)) { // Roughly pace the frames. const int frame_duration_ms = - std::ceil(rtc::kNumMillisecsPerSec / rate_profile->input_fps); + std::ceil(kNumMillisecsPerSec / rate_profile->input_fps); SleepMs(frame_duration_ms); } } @@ -509,7 +548,7 @@ void VideoCodecTestFixtureImpl::ProcessAllFrames( // Give the VideoProcessor pipeline some time to process the last frame, // and then release the codecs. - SleepMs(1 * rtc::kNumMillisecsPerSec); + SleepMs(1 * kNumMillisecsPerSec); cpu_process_time_->Stop(); } @@ -540,7 +579,7 @@ void VideoCodecTestFixtureImpl::AnalyzeAllFrames( // For perf dashboard. char modifier_buf[256]; - rtc::SimpleStringBuilder modifier(modifier_buf); + SimpleStringBuilder modifier(modifier_buf); modifier << "_r" << rate_profile_idx << "_sl" << layer_stat.spatial_idx; auto PrintResultHelper = [&modifier, this]( @@ -548,7 +587,7 @@ void VideoCodecTestFixtureImpl::AnalyzeAllFrames( Unit unit, absl::string_view non_standard_unit_suffix, ImprovementDirection improvement_direction) { - rtc::StringBuilder metric_name(measurement); + StringBuilder metric_name(measurement); metric_name << modifier.str() << non_standard_unit_suffix; GetGlobalMetricsLogger()->LogSingleValueMetric( metric_name.str(), config_.test_name, value, unit, @@ -693,7 +732,7 @@ bool VideoCodecTestFixtureImpl::CreateEncoderAndDecoder() { decoder_format = *config_.decoder_format; } - encoder_ = encoder_factory_->CreateVideoEncoder(encoder_format); + encoder_ = encoder_factory_->Create(env_, encoder_format); EXPECT_TRUE(encoder_) << "Encoder not successfully created."; if (encoder_ == nullptr) { return false; @@ -703,7 +742,7 @@ bool VideoCodecTestFixtureImpl::CreateEncoderAndDecoder() { config_.NumberOfSimulcastStreams(), config_.NumberOfSpatialLayers()); for (size_t i = 0; i < num_simulcast_or_spatial_layers; ++i) { std::unique_ptr decoder = - decoder_factory_->CreateVideoDecoder(decoder_format); + decoder_factory_->Create(env_, decoder_format); EXPECT_TRUE(decoder) << "Decoder not successfully created."; if (decoder == nullptr) { return false; @@ -800,7 +839,7 @@ bool VideoCodecTestFixtureImpl::SetUpAndInitObjects( task_queue->SendTask([this]() { processor_ = std::make_unique( - encoder_.get(), &decoders_, source_frame_reader_.get(), config_, + env_, encoder_.get(), &decoders_, source_frame_reader_.get(), config_, &stats_, &encoded_frame_writers_, decoded_frame_writers_.empty() ? nullptr : &decoded_frame_writers_); }); diff --git a/modules/video_coding/codecs/test/videocodec_test_fixture_impl.h b/modules/video_coding/codecs/test/videocodec_test_fixture_impl.h index 005b7c0a8e..b36b80adc5 100644 --- a/modules/video_coding/codecs/test/videocodec_test_fixture_impl.h +++ b/modules/video_coding/codecs/test/videocodec_test_fixture_impl.h @@ -11,20 +11,23 @@ #ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEOCODEC_TEST_FIXTURE_IMPL_H_ #define MODULES_VIDEO_CODING_CODECS_TEST_VIDEOCODEC_TEST_FIXTURE_IMPL_H_ +#include #include #include #include +#include "api/environment/environment.h" #include "api/test/videocodec_test_fixture.h" +#include "api/test/videocodec_test_stats.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" #include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" -#include "common_video/h264/h264_common.h" #include "modules/video_coding/codecs/test/videocodec_test_stats_impl.h" #include "modules/video_coding/codecs/test/videoprocessor.h" -#include "modules/video_coding/utility/ivf_file_writer.h" #include "rtc_base/task_queue_for_test.h" #include "test/testsupport/frame_reader.h" -#include "test/testsupport/frame_writer.h" namespace webrtc { namespace test { @@ -92,6 +95,7 @@ class VideoCodecTestFixtureImpl : public VideoCodecTestFixture { VideoProcessor::VideoDecoderList decoders_; // Helper objects. + const Environment env_; Config config_; VideoCodecTestStatsImpl stats_; std::unique_ptr source_frame_reader_; diff --git a/modules/video_coding/codecs/test/videocodec_test_libvpx.cc b/modules/video_coding/codecs/test/videocodec_test_libvpx.cc index 062375bd60..481fc775c1 100644 --- a/modules/video_coding/codecs/test/videocodec_test_libvpx.cc +++ b/modules/video_coding/codecs/test/videocodec_test_libvpx.cc @@ -8,11 +8,20 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include #include +#include #include +#include "api/environment/environment.h" #include "api/test/create_videocodec_test_fixture.h" #include "api/test/video/function_video_encoder_factory.h" +#include "api/test/videocodec_test_fixture.h" +#include "api/test/videocodec_test_stats.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" #include "api/video_codecs/sdp_video_format.h" #include "media/base/media_constants.h" #include "media/engine/internal_decoder_factory.h" @@ -20,6 +29,7 @@ #include "media/engine/simulcast_encoder_adapter.h" #include "modules/video_coding/utility/vp8_header_parser.h" #include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" +#include "rtc_base/checks.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -91,8 +101,8 @@ void PrintRdPerf(std::map> rd_stats) { #if defined(RTC_ENABLE_VP9) TEST(VideoCodecTestLibvpx, HighBitrateVP9) { auto config = CreateConfig(); - config.SetCodecSettings(cricket::kVp9CodecName, 1, 1, 1, false, true, false, - kCifWidth, kCifHeight); + config.SetCodecSettings(kVp9CodecName, 1, 1, 1, false, true, false, kCifWidth, + kCifHeight); config.num_frames = kNumFramesShort; const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); @@ -110,8 +120,8 @@ TEST(VideoCodecTestLibvpx, HighBitrateVP9) { TEST(VideoCodecTestLibvpx, ChangeBitrateVP9) { auto config = CreateConfig(); - config.SetCodecSettings(cricket::kVp9CodecName, 1, 1, 1, false, true, false, - kCifWidth, kCifHeight); + config.SetCodecSettings(kVp9CodecName, 1, 1, 1, false, true, false, kCifWidth, + kCifHeight); const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); auto fixture = CreateVideoCodecTestFixture(config); @@ -134,8 +144,8 @@ TEST(VideoCodecTestLibvpx, ChangeBitrateVP9) { TEST(VideoCodecTestLibvpx, ChangeFramerateVP9) { auto config = CreateConfig(); - config.SetCodecSettings(cricket::kVp9CodecName, 1, 1, 1, false, true, false, - kCifWidth, kCifHeight); + config.SetCodecSettings(kVp9CodecName, 1, 1, 1, false, true, false, kCifWidth, + kCifHeight); const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); auto fixture = CreateVideoCodecTestFixture(config); @@ -160,8 +170,8 @@ TEST(VideoCodecTestLibvpx, ChangeFramerateVP9) { TEST(VideoCodecTestLibvpx, DenoiserOnVP9) { auto config = CreateConfig(); - config.SetCodecSettings(cricket::kVp9CodecName, 1, 1, 1, true, true, false, - kCifWidth, kCifHeight); + config.SetCodecSettings(kVp9CodecName, 1, 1, 1, true, true, false, kCifWidth, + kCifHeight); config.num_frames = kNumFramesShort; const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); @@ -179,8 +189,8 @@ TEST(VideoCodecTestLibvpx, DenoiserOnVP9) { TEST(VideoCodecTestLibvpx, VeryLowBitrateVP9) { auto config = CreateConfig(); - config.SetCodecSettings(cricket::kVp9CodecName, 1, 1, 1, false, true, true, - kCifWidth, kCifHeight); + config.SetCodecSettings(kVp9CodecName, 1, 1, 1, false, true, true, kCifWidth, + kCifHeight); const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); auto fixture = CreateVideoCodecTestFixture(config); @@ -202,8 +212,8 @@ TEST(VideoCodecTestLibvpx, VeryLowBitrateVP9) { TEST(VideoCodecTestLibvpx, HighBitrateVP8) { auto config = CreateConfig(); - config.SetCodecSettings(cricket::kVp8CodecName, 1, 1, 1, true, true, false, - kCifWidth, kCifHeight); + config.SetCodecSettings(kVp8CodecName, 1, 1, 1, true, true, false, kCifWidth, + kCifHeight); config.num_frames = kNumFramesShort; const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); @@ -224,8 +234,8 @@ TEST(VideoCodecTestLibvpx, HighBitrateVP8) { TEST(VideoCodecTestLibvpx, MAYBE_ChangeBitrateVP8) { auto config = CreateConfig(); - config.SetCodecSettings(cricket::kVp8CodecName, 1, 1, 1, true, true, false, - kCifWidth, kCifHeight); + config.SetCodecSettings(kVp8CodecName, 1, 1, 1, true, true, false, kCifWidth, + kCifHeight); const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); auto fixture = CreateVideoCodecTestFixture(config); @@ -252,8 +262,8 @@ TEST(VideoCodecTestLibvpx, MAYBE_ChangeBitrateVP8) { TEST(VideoCodecTestLibvpx, MAYBE_ChangeFramerateVP8) { auto config = CreateConfig(); - config.SetCodecSettings(cricket::kVp8CodecName, 1, 1, 1, true, true, false, - kCifWidth, kCifHeight); + config.SetCodecSettings(kVp8CodecName, 1, 1, 1, true, true, false, kCifWidth, + kCifHeight); const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); auto fixture = CreateVideoCodecTestFixture(config); @@ -292,8 +302,8 @@ TEST(VideoCodecTestLibvpx, MAYBE_ChangeFramerateVP8) { #endif TEST(VideoCodecTestLibvpx, MAYBE_TemporalLayersVP8) { auto config = CreateConfig(); - config.SetCodecSettings(cricket::kVp8CodecName, 1, 1, 3, true, true, false, - kCifWidth, kCifHeight); + config.SetCodecSettings(kVp8CodecName, 1, 1, 3, true, true, false, kCifWidth, + kCifHeight); const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); auto fixture = CreateVideoCodecTestFixture(config); @@ -328,8 +338,7 @@ TEST(VideoCodecTestLibvpx, MAYBE_MultiresVP8) { config.filename = "ConferenceMotion_1280_720_50"; config.filepath = ResourcePath(config.filename, "yuv"); config.num_frames = 100; - config.SetCodecSettings(cricket::kVp8CodecName, 3, 1, 3, true, true, false, - 1280, 720); + config.SetCodecSettings(kVp8CodecName, 3, 1, 3, true, true, false, 1280, 720); const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); auto fixture = CreateVideoCodecTestFixture(config); @@ -357,19 +366,17 @@ TEST(VideoCodecTestLibvpx, MAYBE_SimulcastVP8) { config.filename = "ConferenceMotion_1280_720_50"; config.filepath = ResourcePath(config.filename, "yuv"); config.num_frames = 100; - config.SetCodecSettings(cricket::kVp8CodecName, 3, 1, 3, true, true, false, - 1280, 720); + config.SetCodecSettings(kVp8CodecName, 3, 1, 3, true, true, false, 1280, 720); const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); InternalEncoderFactory internal_encoder_factory; - std::unique_ptr adapted_encoder_factory = - std::make_unique([&]() { + auto adapted_encoder_factory = std::make_unique( + [&](const Environment& env, const SdpVideoFormat& /* format */) { return std::make_unique( - &internal_encoder_factory, SdpVideoFormat(cricket::kVp8CodecName)); + env, &internal_encoder_factory, nullptr, SdpVideoFormat::VP8()); }); - std::unique_ptr internal_decoder_factory( - new InternalDecoderFactory()); + auto internal_decoder_factory = std::make_unique(); auto fixture = CreateVideoCodecTestFixture(config, std::move(internal_decoder_factory), @@ -394,8 +401,7 @@ TEST(VideoCodecTestLibvpx, MAYBE_SvcVP9) { config.filename = "ConferenceMotion_1280_720_50"; config.filepath = ResourcePath(config.filename, "yuv"); config.num_frames = 100; - config.SetCodecSettings(cricket::kVp9CodecName, 1, 3, 3, true, true, false, - 1280, 720); + config.SetCodecSettings(kVp9CodecName, 1, 3, 3, true, true, false, 1280, 720); const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); auto fixture = CreateVideoCodecTestFixture(config); @@ -415,8 +421,7 @@ TEST(VideoCodecTestLibvpx, DISABLED_MultiresVP8RdPerf) { config.filepath = ResourcePath(config.filename, "yuv"); config.num_frames = 300; config.print_frame_level_stats = true; - config.SetCodecSettings(cricket::kVp8CodecName, 3, 1, 3, true, true, false, - 1280, 720); + config.SetCodecSettings(kVp8CodecName, 3, 1, 3, true, true, false, 1280, 720); const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); auto fixture = CreateVideoCodecTestFixture(config); @@ -441,8 +446,7 @@ TEST(VideoCodecTestLibvpx, DISABLED_SvcVP9RdPerf) { config.filepath = ResourcePath(config.filename, "yuv"); config.num_frames = 300; config.print_frame_level_stats = true; - config.SetCodecSettings(cricket::kVp9CodecName, 1, 3, 3, true, true, false, - 1280, 720); + config.SetCodecSettings(kVp9CodecName, 1, 3, 3, true, true, false, 1280, 720); const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); auto fixture = CreateVideoCodecTestFixture(config); diff --git a/modules/video_coding/codecs/test/videocodec_test_mediacodec.cc b/modules/video_coding/codecs/test/videocodec_test_mediacodec.cc index fce21544b4..79ff245c55 100644 --- a/modules/video_coding/codecs/test/videocodec_test_mediacodec.cc +++ b/modules/video_coding/codecs/test/videocodec_test_mediacodec.cc @@ -8,12 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include #include #include #include +#include #include #include "api/test/create_videocodec_test_fixture.h" +#include "api/test/videocodec_test_fixture.h" +#include "api/video_codecs/h264_profile_level_id.h" #include "media/base/media_constants.h" #include "modules/video_coding/codecs/test/android_codec_factory_helper.h" #include "modules/video_coding/codecs/test/videocodec_test_fixture_impl.h" @@ -93,7 +98,7 @@ std::unique_ptr CreateTestFixtureWithConfig( TEST(VideoCodecTestMediaCodec, ForemanCif500kbpsVp8) { auto config = CreateConfig(); - config.SetCodecSettings(cricket::kVp8CodecName, 1, 1, 1, false, false, false, + config.SetCodecSettings(webrtc::kVp8CodecName, 1, 1, 1, false, false, false, 352, 288); auto fixture = CreateTestFixtureWithConfig(config); @@ -115,7 +120,7 @@ TEST(VideoCodecTestMediaCodec, ForemanCif500kbpsH264CBP) { const auto frame_checker = std::make_unique(); config.encoded_frame_checker = frame_checker.get(); - config.SetCodecSettings(cricket::kH264CodecName, 1, 1, 1, false, false, false, + config.SetCodecSettings(webrtc::kH264CodecName, 1, 1, 1, false, false, false, 352, 288); auto fixture = CreateTestFixtureWithConfig(config); @@ -141,7 +146,7 @@ TEST(VideoCodecTestMediaCodec, DISABLED_ForemanCif500kbpsH264CHP) { config.h264_codec_settings.profile = H264Profile::kProfileConstrainedHigh; config.encoded_frame_checker = frame_checker.get(); - config.SetCodecSettings(cricket::kH264CodecName, 1, 1, 1, false, false, false, + config.SetCodecSettings(webrtc::kH264CodecName, 1, 1, 1, false, false, false, 352, 288); auto fixture = CreateTestFixtureWithConfig(config); @@ -161,8 +166,8 @@ TEST(VideoCodecTestMediaCodec, DISABLED_ForemanCif500kbpsH264CHP) { TEST(VideoCodecTestMediaCodec, ForemanMixedRes100kbpsVp8H264) { auto config = CreateConfig(); const int kNumFrames = 30; - const std::vector codecs = {cricket::kVp8CodecName, - cricket::kH264CodecName}; + const std::vector codecs = {webrtc::kVp8CodecName, + webrtc::kH264CodecName}; const std::vector> resolutions = { {128, 96}, {176, 144}, {320, 240}, {480, 272}}; const std::vector rate_profiles = { @@ -196,7 +201,7 @@ class VideoCodecTestMediaCodecRateAdaptation const ::testing::TestParamInfo< VideoCodecTestMediaCodecRateAdaptation::ParamType>& info) { char buf[512]; - rtc::SimpleStringBuilder ss(buf); + webrtc::SimpleStringBuilder ss(buf); ss << std::get<0>(info.param).name << "_" << std::get<1>(info.param); return ss.str(); } @@ -258,9 +263,9 @@ INSTANTIATE_TEST_SUITE_P( kBitRateHighLowHigh, kFrameRateLowHighLow, kFrameRateHighLowHigh), - ::testing::Values(cricket::kVp8CodecName, - cricket::kVp9CodecName, - cricket::kH264CodecName)), + ::testing::Values(webrtc::kVp8CodecName, + webrtc::kVp9CodecName, + webrtc::kH264CodecName)), VideoCodecTestMediaCodecRateAdaptation::ParamInfoToStr); } // namespace test diff --git a/modules/video_coding/codecs/test/videocodec_test_openh264.cc b/modules/video_coding/codecs/test/videocodec_test_openh264.cc index 6513074bad..07da31ccad 100644 --- a/modules/video_coding/codecs/test/videocodec_test_openh264.cc +++ b/modules/video_coding/codecs/test/videocodec_test_openh264.cc @@ -12,7 +12,9 @@ #include #include "api/test/create_videocodec_test_fixture.h" +#include "api/test/videocodec_test_fixture.h" #include "media/base/media_constants.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "modules/video_coding/codecs/test/videocodec_test_fixture_impl.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -41,7 +43,7 @@ TEST(VideoCodecTestOpenH264, ConstantHighBitrate) { auto frame_checker = std::make_unique(); auto config = CreateConfig(); - config.SetCodecSettings(cricket::kH264CodecName, 1, 1, 1, false, true, false, + config.SetCodecSettings(webrtc::kH264CodecName, 1, 1, 1, false, true, false, kCifWidth, kCifHeight); config.encoded_frame_checker = frame_checker.get(); auto fixture = CreateVideoCodecTestFixture(config); @@ -65,7 +67,7 @@ TEST(VideoCodecTestOpenH264, SingleNalUnit) { config.h264_codec_settings.packetization_mode = H264PacketizationMode::SingleNalUnit; config.max_payload_size_bytes = 500; - config.SetCodecSettings(cricket::kH264CodecName, 1, 1, 1, false, true, false, + config.SetCodecSettings(webrtc::kH264CodecName, 1, 1, 1, false, true, false, kCifWidth, kCifHeight); config.encoded_frame_checker = frame_checker.get(); auto fixture = CreateVideoCodecTestFixture(config); diff --git a/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc b/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc index 390348b97a..fb2ccfebb5 100644 --- a/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc +++ b/modules/video_coding/codecs/test/videocodec_test_stats_impl.cc @@ -12,14 +12,19 @@ #include #include +#include #include #include -#include +#include +#include +#include "api/test/videocodec_test_stats.h" +#include "api/units/data_rate.h" +#include "api/units/frequency.h" +#include "api/video/video_frame_type.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/running_statistics.h" -#include "rtc_base/strings/string_builder.h" namespace webrtc { namespace test { @@ -106,7 +111,7 @@ VideoCodecTestStatsImpl::SliceAndCalcLayerVideoStatistic( ++temporal_idx) { VideoStatistics layer_stat = SliceAndCalcVideoStatistic( first_frame_num, last_frame_num, spatial_idx, temporal_idx, false, - /*target_bitrate=*/absl::nullopt, /*target_framerate=*/absl::nullopt); + /*target_bitrate=*/std::nullopt, /*target_framerate=*/std::nullopt); layer_stats.push_back(layer_stat); } } @@ -126,8 +131,8 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcAggregatedVideoStatistic( return SliceAndCalcVideoStatistic( first_frame_num, last_frame_num, num_spatial_layers - 1, - num_temporal_layers - 1, true, /*target_bitrate=*/absl::nullopt, - /*target_framerate=*/absl::nullopt); + num_temporal_layers - 1, true, /*target_bitrate=*/std::nullopt, + /*target_framerate=*/std::nullopt); } VideoStatistics VideoCodecTestStatsImpl::CalcVideoStatistic( @@ -205,8 +210,8 @@ VideoStatistics VideoCodecTestStatsImpl::SliceAndCalcVideoStatistic( size_t spatial_idx, size_t temporal_idx, bool aggregate_independent_layers, - absl::optional target_bitrate, - absl::optional target_framerate) { + std::optional target_bitrate, + std::optional target_framerate) { VideoStatistics video_stat; float buffer_level_bits = 0.0f; diff --git a/modules/video_coding/codecs/test/videocodec_test_stats_impl.h b/modules/video_coding/codecs/test/videocodec_test_stats_impl.h index 1a7980aa0a..ec6869b7d4 100644 --- a/modules/video_coding/codecs/test/videocodec_test_stats_impl.h +++ b/modules/video_coding/codecs/test/videocodec_test_stats_impl.h @@ -14,10 +14,12 @@ #include #include -#include +#include #include #include "api/test/videocodec_test_stats.h" // NOLINT(build/include) +#include "api/units/data_rate.h" +#include "api/units/frequency.h" namespace webrtc { namespace test { @@ -75,8 +77,8 @@ class VideoCodecTestStatsImpl : public VideoCodecTestStats { size_t spatial_idx, size_t temporal_idx, bool aggregate_independent_layers, - absl::optional target_bitrate, - absl::optional target_framerate); + std::optional target_bitrate, + std::optional target_framerate); void GetNumberOfEncodedLayers(size_t first_frame_num, size_t last_frame_num, diff --git a/modules/video_coding/codecs/test/videocodec_test_stats_impl_unittest.cc b/modules/video_coding/codecs/test/videocodec_test_stats_impl_unittest.cc index 89e7d2e1c4..a68d8ac0b9 100644 --- a/modules/video_coding/codecs/test/videocodec_test_stats_impl_unittest.cc +++ b/modules/video_coding/codecs/test/videocodec_test_stats_impl_unittest.cc @@ -10,6 +10,7 @@ #include "modules/video_coding/codecs/test/videocodec_test_stats_impl.h" +#include #include #include "test/gmock.h" diff --git a/modules/video_coding/codecs/test/videocodec_test_videotoolbox.cc b/modules/video_coding/codecs/test/videocodec_test_videotoolbox.cc index 6df974362f..1c5d2c8321 100644 --- a/modules/video_coding/codecs/test/videocodec_test_videotoolbox.cc +++ b/modules/video_coding/codecs/test/videocodec_test_videotoolbox.cc @@ -9,9 +9,12 @@ */ #include +#include #include #include "api/test/create_videocodec_test_fixture.h" +#include "api/test/videocodec_test_fixture.h" +#include "api/video_codecs/h264_profile_level_id.h" #include "media/base/media_constants.h" #include "modules/video_coding/codecs/test/objc_codec_factory_helper.h" #include "modules/video_coding/codecs/test/videocodec_test_fixture_impl.h" @@ -55,7 +58,7 @@ MAYBE_TEST(VideoCodecTestVideoToolbox, ForemanCif500kbpsH264CBP) { const auto frame_checker = std::make_unique(); auto config = CreateConfig(); - config.SetCodecSettings(cricket::kH264CodecName, 1, 1, 1, false, false, false, + config.SetCodecSettings(webrtc::kH264CodecName, 1, 1, 1, false, false, false, 352, 288); config.encoded_frame_checker = frame_checker.get(); auto fixture = CreateTestFixtureWithConfig(config); @@ -72,7 +75,7 @@ MAYBE_TEST(VideoCodecTestVideoToolbox, ForemanCif500kbpsH264CHP) { std::make_unique(); auto config = CreateConfig(); config.h264_codec_settings.profile = H264Profile::kProfileConstrainedHigh; - config.SetCodecSettings(cricket::kH264CodecName, 1, 1, 1, false, false, false, + config.SetCodecSettings(webrtc::kH264CodecName, 1, 1, 1, false, false, false, 352, 288); config.encoded_frame_checker = frame_checker.get(); auto fixture = CreateTestFixtureWithConfig(config); diff --git a/modules/video_coding/codecs/test/videoprocessor.cc b/modules/video_coding/codecs/test/videoprocessor.cc index 3d7f6ff8a0..5b5c2bb80a 100644 --- a/modules/video_coding/codecs/test/videoprocessor.cc +++ b/modules/video_coding/codecs/test/videoprocessor.cc @@ -14,26 +14,45 @@ #include #include +#include +#include #include #include #include +#include +#include "api/environment/environment.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/test/videocodec_test_fixture.h" +#include "api/test/videocodec_test_stats.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" +#include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" +#include "api/video/resolution.h" +#include "api/video/video_bitrate_allocator.h" #include "api/video/video_bitrate_allocator_factory.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" #include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" #include "api/video/video_rotation.h" #include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_encoder.h" #include "common_video/h264/h264_common.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/test/videocodec_test_stats_impl.h" +#include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/checks.h" #include "rtc_base/time_utils.h" #include "test/gtest.h" +#include "test/testsupport/frame_reader.h" +#include "test/testsupport/frame_writer.h" #include "third_party/libyuv/include/libyuv/compare.h" #include "third_party/libyuv/include/libyuv/scale.h" @@ -52,7 +71,7 @@ size_t GetMaxNaluSizeBytes(const EncodedImage& encoded_frame, return 0; std::vector nalu_indices = - webrtc::H264::FindNaluIndices(encoded_frame.data(), encoded_frame.size()); + webrtc::H264::FindNaluIndices(encoded_frame); RTC_CHECK(!nalu_indices.empty()); @@ -77,7 +96,7 @@ size_t GetTemporalLayerIndex(const CodecSpecificInfo& codec_specific) { } int GetElapsedTimeMicroseconds(int64_t start_ns, int64_t stop_ns) { - int64_t diff_us = (stop_ns - start_ns) / rtc::kNumNanosecsPerMicrosec; + int64_t diff_us = (stop_ns - start_ns) / kNumNanosecsPerMicrosec; RTC_DCHECK_GE(diff_us, std::numeric_limits::min()); RTC_DCHECK_LE(diff_us, std::numeric_limits::max()); return static_cast(diff_us); @@ -92,7 +111,7 @@ void CalculateFrameQuality(const I420BufferInterface& ref_buffer, RTC_CHECK_GE(ref_buffer.width(), dec_buffer.width()); RTC_CHECK_GE(ref_buffer.height(), dec_buffer.height()); // Downscale reference frame. - rtc::scoped_refptr scaled_buffer = + scoped_refptr scaled_buffer = I420Buffer::Create(dec_buffer.width(), dec_buffer.height()); I420Scale(ref_buffer.DataY(), ref_buffer.StrideY(), ref_buffer.DataU(), ref_buffer.StrideU(), ref_buffer.DataV(), ref_buffer.StrideV(), @@ -135,7 +154,8 @@ void CalculateFrameQuality(const I420BufferInterface& ref_buffer, } // namespace -VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder, +VideoProcessor::VideoProcessor(const Environment& env, + webrtc::VideoEncoder* encoder, VideoDecoderList* decoders, FrameReader* input_frame_reader, const VideoCodecTestFixture::Config& config, @@ -150,9 +170,9 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder, stats_(stats), encoder_(encoder), decoders_(decoders), - bitrate_allocator_( - CreateBuiltinVideoBitrateAllocatorFactory() - ->CreateVideoBitrateAllocator(config_.codec_settings)), + bitrate_allocator_(CreateBuiltinVideoBitrateAllocatorFactory()->Create( + env, + config_.codec_settings)), encode_callback_(this), input_frame_reader_(input_frame_reader), merged_encoded_frames_(num_simulcast_or_spatial_layers_), @@ -243,9 +263,8 @@ void VideoProcessor::ProcessFrame() { FrameReader::Ratio framerate_scale = FrameReader::Ratio( {.num = config_.clip_fps.value_or(config_.codec_settings.maxFramerate), .den = static_cast(config_.codec_settings.maxFramerate)}); - rtc::scoped_refptr buffer = - input_frame_reader_->PullFrame( - /*frame_num*/ nullptr, resolution, framerate_scale); + scoped_refptr buffer = input_frame_reader_->PullFrame( + /*frame_num*/ nullptr, resolution, framerate_scale); RTC_CHECK(buffer) << "Tried to read too many frames from the file."; const size_t timestamp = @@ -254,7 +273,7 @@ void VideoProcessor::ProcessFrame() { VideoFrame input_frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(static_cast(timestamp)) + .set_rtp_timestamp(static_cast(timestamp)) .set_timestamp_ms(static_cast(timestamp / kMsToRtpTimestamp)) .set_rotation(webrtc::kVideoRotation_0) .build(); @@ -267,7 +286,7 @@ void VideoProcessor::ProcessFrame() { if (config_.reference_width != -1 && config_.reference_height != -1 && (input_frame.width() != config_.reference_width || input_frame.height() != config_.reference_height)) { - rtc::scoped_refptr scaled_buffer = I420Buffer::Create( + scoped_refptr scaled_buffer = I420Buffer::Create( config_.codec_settings.width, config_.codec_settings.height); scaled_buffer->ScaleFrom(*input_frame.video_frame_buffer()->ToI420()); @@ -297,7 +316,7 @@ void VideoProcessor::ProcessFrame() { // For the highest measurement accuracy of the encode time, the start/stop // time recordings should wrap the Encode call as tightly as possible. - const int64_t encode_start_ns = rtc::TimeNanos(); + const int64_t encode_start_ns = TimeNanos(); for (size_t i = 0; i < num_simulcast_or_spatial_layers_; ++i) { FrameStatistics* frame_stat = stats_->GetFrame(frame_number, i); frame_stat->encode_start_ns = encode_start_ns; @@ -305,7 +324,7 @@ void VideoProcessor::ProcessFrame() { if (input_frame.width() != config_.codec_settings.width || input_frame.height() != config_.codec_settings.height) { - rtc::scoped_refptr scaled_buffer = I420Buffer::Create( + scoped_refptr scaled_buffer = I420Buffer::Create( config_.codec_settings.width, config_.codec_settings.height); scaled_buffer->ScaleFrom(*input_frame.video_frame_buffer()->ToI420()); input_frame.set_video_frame_buffer(scaled_buffer); @@ -352,7 +371,7 @@ int32_t VideoProcessor::VideoProcessorDecodeCompleteCallback::Decoded( .set_timestamp_us(image.timestamp_us()) .set_id(image.id()) .build(); - copy.set_timestamp(image.timestamp()); + copy.set_rtp_timestamp(image.rtp_timestamp()); task_queue_->PostTask([this, copy]() { video_processor_->FrameDecoded(copy, simulcast_svc_idx_); @@ -370,7 +389,7 @@ void VideoProcessor::FrameEncoded( // For the highest measurement accuracy of the encode time, the start/stop // time recordings should wrap the Encode call as tightly as possible. - const int64_t encode_stop_ns = rtc::TimeNanos(); + const int64_t encode_stop_ns = TimeNanos(); const VideoCodecType codec_type = codec_specific.codecType; if (config_.encoded_frame_checker) { @@ -387,7 +406,7 @@ void VideoProcessor::FrameEncoded( size_t temporal_idx = GetTemporalLayerIndex(codec_specific); FrameStatistics* frame_stat = - stats_->GetFrameWithTimestamp(encoded_image.Timestamp(), stream_idx); + stats_->GetFrameWithTimestamp(encoded_image.RtpTimestamp(), stream_idx); const size_t frame_number = frame_stat->frame_number; // Ensure that the encode order is monotonically increasing, within this @@ -466,7 +485,7 @@ void VideoProcessor::FrameEncoded( if (!layer_dropped) { base_image = &merged_encoded_frames_[i]; base_stat = - stats_->GetFrameWithTimestamp(encoded_image.Timestamp(), i); + stats_->GetFrameWithTimestamp(encoded_image.RtpTimestamp(), i); } else if (base_image && !base_stat->non_ref_for_inter_layer_pred) { DecodeFrame(*base_image, i); } @@ -492,7 +511,7 @@ void VideoProcessor::FrameEncoded( if (!config_.encode_in_real_time) { // To get pure encode time for next layers, measure time spent in encode // callback and subtract it from encode time of next layers. - post_encode_time_ns_ += rtc::TimeNanos() - encode_stop_ns; + post_encode_time_ns_ += TimeNanos() - encode_stop_ns; } } @@ -518,7 +537,7 @@ void VideoProcessor::WriteDecodedFrame(const I420BufferInterface& decoded_frame, int input_video_width = config_.codec_settings.width; int input_video_height = config_.codec_settings.height; - rtc::scoped_refptr scaled_buffer; + scoped_refptr scaled_buffer; const I420BufferInterface* scaled_frame; if (decoded_frame.width() == input_video_width && @@ -552,10 +571,10 @@ void VideoProcessor::FrameDecoded(const VideoFrame& decoded_frame, // For the highest measurement accuracy of the decode time, the start/stop // time recordings should wrap the Decode call as tightly as possible. - const int64_t decode_stop_ns = rtc::TimeNanos(); + const int64_t decode_stop_ns = TimeNanos(); FrameStatistics* frame_stat = - stats_->GetFrameWithTimestamp(decoded_frame.timestamp(), spatial_idx); + stats_->GetFrameWithTimestamp(decoded_frame.rtp_timestamp(), spatial_idx); const size_t frame_number = frame_stat->frame_number; if (!first_decoded_frame_[spatial_idx]) { @@ -596,7 +615,7 @@ void VideoProcessor::FrameDecoded(const VideoFrame& decoded_frame, // Skip quality metrics calculation to not affect CPU usage. if (analyze_frame_quality_ || decoded_frame_writers_) { // Save last decoded frame to handle possible future drops. - rtc::scoped_refptr i420buffer = + scoped_refptr i420buffer = decoded_frame.video_frame_buffer()->ToI420(); // Copy decoded frame to a buffer without padding/stride such that we can @@ -634,17 +653,17 @@ void VideoProcessor::DecodeFrame(const EncodedImage& encoded_image, size_t spatial_idx) { RTC_DCHECK_RUN_ON(&sequence_checker_); FrameStatistics* frame_stat = - stats_->GetFrameWithTimestamp(encoded_image.Timestamp(), spatial_idx); + stats_->GetFrameWithTimestamp(encoded_image.RtpTimestamp(), spatial_idx); - frame_stat->decode_start_ns = rtc::TimeNanos(); + frame_stat->decode_start_ns = TimeNanos(); frame_stat->decode_return_code = decoders_->at(spatial_idx)->Decode(encoded_image, 0); } const webrtc::EncodedImage* VideoProcessor::BuildAndStoreSuperframe( const EncodedImage& encoded_image, - const VideoCodecType codec, - size_t frame_number, + const VideoCodecType /* codec */, + size_t /* frame_number */, size_t spatial_idx, bool inter_layer_predicted) { // Should only be called for SVC. @@ -659,7 +678,7 @@ const webrtc::EncodedImage* VideoProcessor::BuildAndStoreSuperframe( for (int base_idx = static_cast(spatial_idx) - 1; base_idx >= 0; --base_idx) { EncodedImage lower_layer = merged_encoded_frames_.at(base_idx); - if (lower_layer.Timestamp() == encoded_image.Timestamp()) { + if (lower_layer.RtpTimestamp() == encoded_image.RtpTimestamp()) { base_image = lower_layer; break; } diff --git a/modules/video_coding/codecs/test/videoprocessor.h b/modules/video_coding/codecs/test/videoprocessor.h index 502fa3d0fa..200d5bb154 100644 --- a/modules/video_coding/codecs/test/videoprocessor.h +++ b/modules/video_coding/codecs/test/videoprocessor.h @@ -16,26 +16,27 @@ #include #include +#include #include #include -#include "absl/types/optional.h" +#include "api/environment/environment.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/test/videocodec_test_fixture.h" +#include "api/test/videocodec_test_stats.h" #include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" -#include "api/video/resolution.h" -#include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_type.h" #include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" #include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_encoder.h" -#include "modules/include/module_common_types.h" #include "modules/video_coding/codecs/test/videocodec_test_stats_impl.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/utility/ivf_file_writer.h" -#include "rtc_base/buffer.h" #include "rtc_base/checks.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" @@ -61,7 +62,8 @@ class VideoProcessor { using FrameWriterList = std::vector>; using FrameStatistics = VideoCodecTestStats::FrameStatistics; - VideoProcessor(webrtc::VideoEncoder* encoder, + VideoProcessor(const Environment& env, + VideoEncoder* encoder, VideoDecoderList* decoders, FrameReader* input_frame_reader, const VideoCodecTestFixture::Config& config, @@ -139,13 +141,13 @@ class VideoProcessor { int32_t Decoded(webrtc::VideoFrame& image) override; int32_t Decoded(webrtc::VideoFrame& image, - int64_t decode_time_ms) override { + int64_t /* decode_time_ms */) override { return Decoded(image); } void Decoded(webrtc::VideoFrame& image, - absl::optional decode_time_ms, - absl::optional qp) override { + std::optional /* decode_time_ms */, + std::optional /* qp */) override { Decoded(image); } @@ -243,7 +245,7 @@ class VideoProcessor { // simulcast_svc_idx -> frame_number. std::vector last_decoded_frame_num_ RTC_GUARDED_BY(sequence_checker_); // simulcast_svc_idx -> buffer. - std::vector> last_decoded_frame_buffer_ + std::vector> last_decoded_frame_buffer_ RTC_GUARDED_BY(sequence_checker_); // Time spent in frame encode callback. It is accumulated for layers and diff --git a/modules/video_coding/codecs/test/videoprocessor_unittest.cc b/modules/video_coding/codecs/test/videoprocessor_unittest.cc index 40cb5b6395..66f36d6854 100644 --- a/modules/video_coding/codecs/test/videoprocessor_unittest.cc +++ b/modules/video_coding/codecs/test/videoprocessor_unittest.cc @@ -10,13 +10,18 @@ #include "modules/video_coding/codecs/test/videoprocessor.h" +#include #include +#include -#include "api/scoped_refptr.h" +#include "api/environment/environment_factory.h" #include "api/test/mock_video_decoder.h" #include "api/test/mock_video_encoder.h" #include "api/test/videocodec_test_fixture.h" #include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "api/video_codecs/video_decoder.h" +#include "api/video_codecs/video_encoder.h" #include "media/base/media_constants.h" #include "modules/video_coding/codecs/test/videocodec_test_stats_impl.h" #include "rtc_base/task_queue_for_test.h" @@ -44,8 +49,8 @@ const int kHeight = 288; class VideoProcessorTest : public ::testing::Test { protected: VideoProcessorTest() : q_("VP queue") { - config_.SetCodecSettings(cricket::kVp8CodecName, 1, 1, 1, false, false, - false, kWidth, kHeight); + config_.SetCodecSettings(kVp8CodecName, 1, 1, 1, false, false, false, + kWidth, kHeight); decoder_mock_ = new MockVideoDecoder(); decoders_.push_back(std::unique_ptr(decoder_mock_)); @@ -53,8 +58,9 @@ class VideoProcessorTest : public ::testing::Test { ExpectInit(); q_.SendTask([this] { video_processor_ = std::make_unique( - &encoder_mock_, &decoders_, &frame_reader_mock_, config_, &stats_, - &encoded_frame_writers_, /*decoded_frame_writers=*/nullptr); + CreateEnvironment(), &encoder_mock_, &decoders_, &frame_reader_mock_, + config_, &stats_, &encoded_frame_writers_, + /*decoded_frame_writers=*/nullptr); }); } @@ -101,19 +107,20 @@ TEST_F(VideoProcessorTest, ProcessFrames_FixedFramerate) { SetRates(Field(&VideoEncoder::RateControlParameters::framerate_fps, static_cast(kFramerateFps)))) .Times(1); - q_.SendTask([=] { video_processor_->SetRates(kBitrateKbps, kFramerateFps); }); + q_.SendTask( + [this] { video_processor_->SetRates(kBitrateKbps, kFramerateFps); }); EXPECT_CALL(frame_reader_mock_, PullFrame(_, _, _)) .WillRepeatedly(Return(I420Buffer::Create(kWidth, kHeight))); - EXPECT_CALL( - encoder_mock_, - Encode(Property(&VideoFrame::timestamp, 1 * 90000 / kFramerateFps), _)) + EXPECT_CALL(encoder_mock_, Encode(Property(&VideoFrame::rtp_timestamp, + 1 * 90000 / kFramerateFps), + _)) .Times(1); q_.SendTask([this] { video_processor_->ProcessFrame(); }); - EXPECT_CALL( - encoder_mock_, - Encode(Property(&VideoFrame::timestamp, 2 * 90000 / kFramerateFps), _)) + EXPECT_CALL(encoder_mock_, Encode(Property(&VideoFrame::rtp_timestamp, + 2 * 90000 / kFramerateFps), + _)) .Times(1); q_.SendTask([this] { video_processor_->ProcessFrame(); }); @@ -130,12 +137,12 @@ TEST_F(VideoProcessorTest, ProcessFrames_VariableFramerate) { static_cast(kStartFramerateFps)))) .Times(1); q_.SendTask( - [=] { video_processor_->SetRates(kBitrateKbps, kStartFramerateFps); }); + [this] { video_processor_->SetRates(kBitrateKbps, kStartFramerateFps); }); EXPECT_CALL(frame_reader_mock_, PullFrame(_, _, _)) .WillRepeatedly(Return(I420Buffer::Create(kWidth, kHeight))); EXPECT_CALL(encoder_mock_, - Encode(Property(&VideoFrame::timestamp, kStartTimestamp), _)) + Encode(Property(&VideoFrame::rtp_timestamp, kStartTimestamp), _)) .Times(1); q_.SendTask([this] { video_processor_->ProcessFrame(); }); @@ -146,10 +153,10 @@ TEST_F(VideoProcessorTest, ProcessFrames_VariableFramerate) { static_cast(kNewFramerateFps)))) .Times(1); q_.SendTask( - [=] { video_processor_->SetRates(kBitrateKbps, kNewFramerateFps); }); + [this] { video_processor_->SetRates(kBitrateKbps, kNewFramerateFps); }); EXPECT_CALL(encoder_mock_, - Encode(Property(&VideoFrame::timestamp, + Encode(Property(&VideoFrame::rtp_timestamp, kStartTimestamp + 90000 / kNewFramerateFps), _)) .Times(1); @@ -172,7 +179,8 @@ TEST_F(VideoProcessorTest, SetRates) { Field(&VideoEncoder::RateControlParameters::framerate_fps, static_cast(kFramerateFps))))) .Times(1); - q_.SendTask([=] { video_processor_->SetRates(kBitrateKbps, kFramerateFps); }); + q_.SendTask( + [this] { video_processor_->SetRates(kBitrateKbps, kFramerateFps); }); const uint32_t kNewBitrateKbps = 456; const int kNewFramerateFps = 34; @@ -186,8 +194,9 @@ TEST_F(VideoProcessorTest, SetRates) { Field(&VideoEncoder::RateControlParameters::framerate_fps, static_cast(kNewFramerateFps))))) .Times(1); - q_.SendTask( - [=] { video_processor_->SetRates(kNewBitrateKbps, kNewFramerateFps); }); + q_.SendTask([this] { + video_processor_->SetRates(kNewBitrateKbps, kNewFramerateFps); + }); ExpectRelease(); } diff --git a/modules/video_coding/codecs/vp8/default_temporal_layers.cc b/modules/video_coding/codecs/vp8/default_temporal_layers.cc index 94860da1b6..cd55e0ff37 100644 --- a/modules/video_coding/codecs/vp8/default_temporal_layers.cc +++ b/modules/video_coding/codecs/vp8/default_temporal_layers.cc @@ -13,16 +13,27 @@ #include #include +#include +#include +#include #include #include #include #include +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_codec_constants.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/vp8_frame_buffer_controller.h" +#include "api/video_codecs/vp8_frame_config.h" +#include "api/video_codecs/vp8_temporal_layers.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp8/include/temporal_layers_checker.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { DefaultTemporalLayers::PendingFrame::PendingFrame() = default; @@ -141,78 +152,33 @@ DefaultTemporalLayers::GetDependencyInfo(size_t num_layers) { // that the 'alt' buffer reference is effectively the last keyframe. // TL0 also references and updates the 'last' buffer. // TL1 also references 'last' and references and updates 'golden'. - if (!field_trial::IsDisabled("WebRTC-UseShortVP8TL2Pattern")) { - // Shortened 4-frame pattern: - // 1---1 1---1 ... - // / / / / - // 0---0---0---0 ... - return {{"SS", {kReferenceAndUpdate, kNone, kNone}}, - {"-S", {kReference, kUpdate, kNone}}, - {"SR", {kReferenceAndUpdate, kNone, kNone}}, - {"-D", {kReference, kReference, kNone, kFreezeEntropy}}}; - } else { - // "Default" 8-frame pattern: - // 1---1---1---1 1---1---1---1 ... - // / / / / / / / / - // 0---0---0---0---0---0---0---0 ... - return {{"SS", {kReferenceAndUpdate, kNone, kNone}}, - {"-S", {kReference, kUpdate, kNone}}, - {"SR", {kReferenceAndUpdate, kNone, kNone}}, - {"-R", {kReference, kReferenceAndUpdate, kNone}}, - {"SR", {kReferenceAndUpdate, kNone, kNone}}, - {"-R", {kReference, kReferenceAndUpdate, kNone}}, - {"SR", {kReferenceAndUpdate, kNone, kNone}}, - {"-D", {kReference, kReference, kNone, kFreezeEntropy}}}; - } + // 1---1 1---1 ... + // / / / / + // 0---0---0---0 ... + return {{"SS", {kReferenceAndUpdate, kNone, kNone}}, + {"-S", {kReference, kUpdate, kNone}}, + {"SR", {kReferenceAndUpdate, kNone, kNone}}, + {"-D", {kReference, kReference, kNone, kFreezeEntropy}}}; case 3: - if (field_trial::IsEnabled("WebRTC-UseShortVP8TL3Pattern")) { - // This field trial is intended to check if it is worth using a shorter - // temporal pattern, trading some coding efficiency for less risk of - // dropped frames. - // The coding efficiency will decrease somewhat since the higher layer - // state is more volatile, but it will be offset slightly by updating - // the altref buffer with TL2 frames, instead of just referencing lower - // layers. - // If a frame is dropped in a higher layer, the jitter - // buffer on the receive side won't be able to decode any higher layer - // frame until the next sync frame. So we expect a noticeable decrease - // in frame drops on links with high packet loss. - - // TL0 references and updates the 'last' buffer. - // TL1 references 'last' and references and updates 'golden'. - // TL2 references both 'last' & 'golden' and references and updates - // 'arf'. - // 2-------2 2-------2 2 - // / __/ / __/ / - // / __1 / __1 / - // /___/ /___/ / - // 0---------------0---------------0----- - // 0 1 2 3 4 5 6 7 8 9 ... - return {{"SSS", {kReferenceAndUpdate, kNone, kNone}}, - {"--S", {kReference, kNone, kUpdate}}, - {"-DR", {kReference, kUpdate, kNone}}, - {"--D", {kReference, kReference, kReference, kFreezeEntropy}}}; - } else { - // All layers can reference but not update the 'alt' buffer, this means - // that the 'alt' buffer reference is effectively the last keyframe. - // TL0 also references and updates the 'last' buffer. - // TL1 also references 'last' and references and updates 'golden'. - // TL2 references both 'last' and 'golden' but updates no buffer. - // 2 __2 _____2 __2 2 - // / /____/ / / / - // / 1---------/-----1 / - // /_____/ /_____/ / - // 0---------------0---------------0----- - // 0 1 2 3 4 5 6 7 8 9 ... - return {{"SSS", {kReferenceAndUpdate, kNone, kNone}}, - {"--D", {kReference, kNone, kNone, kFreezeEntropy}}, - {"-SS", {kReference, kUpdate, kNone}}, - {"--D", {kReference, kReference, kNone, kFreezeEntropy}}, - {"SRR", {kReferenceAndUpdate, kNone, kNone}}, - {"--D", {kReference, kReference, kNone, kFreezeEntropy}}, - {"-DS", {kReference, kReferenceAndUpdate, kNone}}, - {"--D", {kReference, kReference, kNone, kFreezeEntropy}}}; - } + // All layers can reference but not update the 'alt' buffer, this means + // that the 'alt' buffer reference is effectively the last keyframe. + // TL0 also references and updates the 'last' buffer. + // TL1 also references 'last' and references and updates 'golden'. + // TL2 references both 'last' and 'golden' but updates no buffer. + // 2 __2 _____2 __2 2 + // / /____/ / / / + // / 1---------/-----1 / + // /_____/ /_____/ / + // 0---------------0---------------0----- + // 0 1 2 3 4 5 6 7 8 9 ... + return {{"SSS", {kReferenceAndUpdate, kNone, kNone}}, + {"--D", {kReference, kNone, kNone, kFreezeEntropy}}, + {"-SS", {kReference, kUpdate, kNone}}, + {"--D", {kReference, kReference, kNone, kFreezeEntropy}}, + {"SRR", {kReferenceAndUpdate, kNone, kNone}}, + {"--D", {kReference, kReference, kNone, kFreezeEntropy}}, + {"-DS", {kReference, kReferenceAndUpdate, kNone}}, + {"--D", {kReference, kReference, kNone, kFreezeEntropy}}}; case 4: // TL0 references and updates only the 'last' buffer. // TL1 references 'last' and updates and references 'golden'. @@ -286,8 +252,8 @@ DefaultTemporalLayers::DefaultTemporalLayers(int number_of_temporal_layers) DefaultTemporalLayers::~DefaultTemporalLayers() = default; void DefaultTemporalLayers::SetQpLimits(size_t stream_index, - int min_qp, - int max_qp) { + int /* min_qp */, + int /* max_qp */) { RTC_DCHECK_LT(stream_index, StreamCount()); // Ignore. } @@ -306,7 +272,7 @@ bool DefaultTemporalLayers::SupportsEncoderFrameDropping( void DefaultTemporalLayers::OnRatesUpdated( size_t stream_index, const std::vector& bitrates_bps, - int framerate_fps) { + int /* framerate_fps */) { RTC_DCHECK_LT(stream_index, StreamCount()); RTC_DCHECK_GT(bitrates_bps.size(), 0); RTC_DCHECK_LE(bitrates_bps.size(), num_layers_); @@ -517,7 +483,7 @@ void DefaultTemporalLayers::OnEncodeDone(size_t stream_index, uint32_t rtp_timestamp, size_t size_bytes, bool is_keyframe, - int qp, + int /* qp */, CodecSpecificInfo* info) { RTC_DCHECK_LT(stream_index, StreamCount()); RTC_DCHECK_GT(num_layers_, 0); @@ -622,7 +588,7 @@ void DefaultTemporalLayers::OnEncodeDone(size_t stream_index, pending_frames_.pop_front(); } -void DefaultTemporalLayers::OnFrameDropped(size_t stream_index, +void DefaultTemporalLayers::OnFrameDropped(size_t /* stream_index */, uint32_t rtp_timestamp) { CullPendingFramesBefore(rtp_timestamp); RTC_CHECK(!pending_frames_.empty()); @@ -630,12 +596,13 @@ void DefaultTemporalLayers::OnFrameDropped(size_t stream_index, pending_frames_.pop_front(); } -void DefaultTemporalLayers::OnPacketLossRateUpdate(float packet_loss_rate) {} +void DefaultTemporalLayers::OnPacketLossRateUpdate( + float /* packet_loss_rate */) {} -void DefaultTemporalLayers::OnRttUpdate(int64_t rtt_ms) {} +void DefaultTemporalLayers::OnRttUpdate(int64_t /* rtt_ms */) {} void DefaultTemporalLayers::OnLossNotification( - const VideoEncoder::LossNotification& loss_notification) {} + const VideoEncoder::LossNotification& /* loss_notification */) {} FrameDependencyStructure DefaultTemporalLayers::GetTemplateStructure( int num_layers) const { @@ -662,23 +629,14 @@ FrameDependencyStructure DefaultTemporalLayers::GetTemplateStructure( return template_structure; } case 3: { - if (field_trial::IsEnabled("WebRTC-UseShortVP8TL3Pattern")) { - template_structure.templates.resize(5); - template_structure.templates[0].T(0).Dtis("SSS"); - template_structure.templates[1].T(0).Dtis("SSS").FrameDiffs({4}); - template_structure.templates[2].T(1).Dtis("-DR").FrameDiffs({2}); - template_structure.templates[3].T(2).Dtis("--S").FrameDiffs({1}); - template_structure.templates[4].T(2).Dtis("--D").FrameDiffs({2, 1}); - } else { - template_structure.templates.resize(7); - template_structure.templates[0].T(0).Dtis("SSS"); - template_structure.templates[1].T(0).Dtis("SSS").FrameDiffs({4}); - template_structure.templates[2].T(0).Dtis("SRR").FrameDiffs({4}); - template_structure.templates[3].T(1).Dtis("-SS").FrameDiffs({2}); - template_structure.templates[4].T(1).Dtis("-DS").FrameDiffs({4, 2}); - template_structure.templates[5].T(2).Dtis("--D").FrameDiffs({1}); - template_structure.templates[6].T(2).Dtis("--D").FrameDiffs({3, 1}); - } + template_structure.templates.resize(7); + template_structure.templates[0].T(0).Dtis("SSS"); + template_structure.templates[1].T(0).Dtis("SSS").FrameDiffs({4}); + template_structure.templates[2].T(0).Dtis("SRR").FrameDiffs({4}); + template_structure.templates[3].T(1).Dtis("-SS").FrameDiffs({2}); + template_structure.templates[4].T(1).Dtis("-DS").FrameDiffs({4, 2}); + template_structure.templates[5].T(2).Dtis("--D").FrameDiffs({1}); + template_structure.templates[6].T(2).Dtis("--D").FrameDiffs({3, 1}); return template_structure; } case 4: { @@ -708,17 +666,9 @@ std::vector> GetTemporalDependencies( case 1: return {{0}}; case 2: - if (!field_trial::IsDisabled("WebRTC-UseShortVP8TL2Pattern")) { - return {{2}, {0}, {0}, {1, 2}}; - } else { - return {{6}, {0}, {0}, {1, 2}, {2}, {3, 4}, {4}, {5, 6}}; - } + return {{2}, {0}, {0}, {1, 2}}; case 3: - if (field_trial::IsEnabled("WebRTC-UseShortVP8TL3Pattern")) { - return {{0}, {0}, {0}, {0, 1, 2}}; - } else { - return {{4}, {0}, {0}, {0, 2}, {0}, {2, 4}, {2, 4}, {4, 6}}; - } + return {{4}, {0}, {0}, {0, 2}, {0}, {2, 4}, {2, 4}, {4, 6}}; case 4: return {{8}, {0}, {0}, {0, 2}, {0}, {0, 2, 4}, {0, 2, 4}, {0, 4, 6}, diff --git a/modules/video_coding/codecs/vp8/default_temporal_layers.h b/modules/video_coding/codecs/vp8/default_temporal_layers.h index bc6574c54c..32b3ab39b2 100644 --- a/modules/video_coding/codecs/vp8/default_temporal_layers.h +++ b/modules/video_coding/codecs/vp8/default_temporal_layers.h @@ -15,17 +15,20 @@ #include #include +#include #include #include -#include #include +#include #include -#include #include -#include "absl/types/optional.h" +#include "absl/container/inlined_vector.h" +#include "absl/strings/string_view.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/vp8_frame_buffer_controller.h" #include "api/video_codecs/vp8_frame_config.h" -#include "api/video_codecs/vp8_temporal_layers.h" #include "modules/video_coding/codecs/vp8/include/temporal_layers_checker.h" #include "modules/video_coding/include/video_codec_interface.h" @@ -124,7 +127,7 @@ class DefaultTemporalLayers final : public Vp8FrameBufferController { uint8_t pattern_idx_; // Updated cumulative bitrates, per temporal layer. - absl::optional> new_bitrates_bps_; + std::optional> new_bitrates_bps_; // Status for each pending frame, in std::deque pending_frames_; diff --git a/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc b/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc index ae027a9d8a..503c6a19ff 100644 --- a/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc +++ b/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc @@ -10,16 +10,24 @@ #include "modules/video_coding/codecs/vp8/default_temporal_layers.h" +#include +#include #include #include +#include +#include "api/environment/environment_factory.h" +#include "api/transport/rtp/dependency_descriptor.h" #include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" #include "api/video_codecs/video_codec.h" +#include "api/video_codecs/vp8_frame_buffer_controller.h" #include "api/video_codecs/vp8_frame_config.h" #include "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" -#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" #include "vpx/vp8cx.h" @@ -99,7 +107,7 @@ std::vector GetTemporalLayerRates(int target_bitrate_kbps, codec.simulcastStream[0].maxBitrate = target_bitrate_kbps; codec.simulcastStream[0].numberOfTemporalLayers = num_temporal_layers; codec.simulcastStream[0].active = true; - SimulcastRateAllocator allocator(codec); + SimulcastRateAllocator allocator(CreateEnvironment(), codec); return allocator .Allocate( VideoBitrateAllocationParameters(target_bitrate_kbps, framerate_fps)) @@ -226,221 +234,6 @@ TEST_F(TemporalLayersTest, 3Layers) { } } -TEST_F(TemporalLayersTest, Alternative3Layers) { - constexpr int kNumLayers = 3; - ScopedFieldTrials field_trial("WebRTC-UseShortVP8TL3Pattern/Enabled/"); - DefaultTemporalLayers tl(kNumLayers); - DefaultTemporalLayersChecker checker(kNumLayers); - tl.OnRatesUpdated(0, - GetTemporalLayerRates(kDefaultBytesPerFrame, - kDefaultFramerate, kNumLayers), - kDefaultFramerate); - tl.UpdateConfiguration(0); - - int expected_flags[8] = {kTemporalUpdateLast, - kTemporalUpdateAltrefWithoutDependency, - kTemporalUpdateGoldenWithoutDependency, - kTemporalUpdateNone, - kTemporalUpdateLast, - kTemporalUpdateAltrefWithoutDependency, - kTemporalUpdateGoldenWithoutDependency, - kTemporalUpdateNone}; - int expected_temporal_idx[8] = {0, 2, 1, 2, 0, 2, 1, 2}; - - bool expected_layer_sync[8] = {false, true, true, false, - false, true, true, false}; - - unsigned int timestamp = 0; - for (int i = 0; i < 8; ++i) { - const bool is_keyframe = (i == 0); - CodecSpecificInfo info; - Vp8FrameConfig tl_config = tl.NextFrameConfig(0, timestamp); - EXPECT_EQ(is_keyframe ? kKeyFrameFlags : expected_flags[i], - LibvpxVp8Encoder::EncodeFlags(tl_config)) - << i; - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, is_keyframe, - kDefaultQp, &info); - EXPECT_TRUE(checker.CheckTemporalConfig(is_keyframe, tl_config)); - EXPECT_EQ(expected_temporal_idx[i], info.codecSpecific.VP8.temporalIdx); - EXPECT_EQ(expected_temporal_idx[i], tl_config.packetizer_temporal_idx); - EXPECT_EQ(expected_temporal_idx[i], tl_config.encoder_layer_id); - EXPECT_EQ(is_keyframe || expected_layer_sync[i], - info.codecSpecific.VP8.layerSync); - EXPECT_EQ(expected_layer_sync[i], tl_config.layer_sync); - timestamp += 3000; - } -} - -TEST_F(TemporalLayersTest, SearchOrder) { - constexpr int kNumLayers = 3; - ScopedFieldTrials field_trial("WebRTC-UseShortVP8TL3Pattern/Enabled/"); - DefaultTemporalLayers tl(kNumLayers); - DefaultTemporalLayersChecker checker(kNumLayers); - tl.OnRatesUpdated(0, - GetTemporalLayerRates(kDefaultBytesPerFrame, - kDefaultFramerate, kNumLayers), - kDefaultFramerate); - tl.UpdateConfiguration(0); - - // Use a repeating pattern of tl 0, 2, 1, 2. - // Tl 0, 1, 2 update last, golden, altref respectively. - - // Start with a key-frame. tl_config flags can be ignored. - uint32_t timestamp = 0; - Vp8FrameConfig tl_config = tl.NextFrameConfig(0, timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, true, kDefaultQp, - IgnoredCodecSpecificInfo()); - - // TL2 frame. First one only references TL0. Updates altref. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - EXPECT_EQ(tl_config.first_reference, Vp8BufferReference::kLast); - EXPECT_EQ(tl_config.second_reference, Vp8BufferReference::kNone); - - // TL1 frame. Can only reference TL0. Updated golden. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - EXPECT_EQ(tl_config.first_reference, Vp8BufferReference::kLast); - EXPECT_EQ(tl_config.second_reference, Vp8BufferReference::kNone); - - // TL2 frame. Can reference all three buffers. Golden was the last to be - // updated, the next to last was altref. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - EXPECT_EQ(tl_config.first_reference, Vp8BufferReference::kGolden); - EXPECT_EQ(tl_config.second_reference, Vp8BufferReference::kAltref); -} - -TEST_F(TemporalLayersTest, SearchOrderWithDrop) { - constexpr int kNumLayers = 3; - ScopedFieldTrials field_trial("WebRTC-UseShortVP8TL3Pattern/Enabled/"); - DefaultTemporalLayers tl(kNumLayers); - DefaultTemporalLayersChecker checker(kNumLayers); - tl.OnRatesUpdated(0, - GetTemporalLayerRates(kDefaultBytesPerFrame, - kDefaultFramerate, kNumLayers), - kDefaultFramerate); - tl.UpdateConfiguration(0); - - // Use a repeating pattern of tl 0, 2, 1, 2. - // Tl 0, 1, 2 update last, golden, altref respectively. - - // Start with a key-frame. tl_config flags can be ignored. - uint32_t timestamp = 0; - Vp8FrameConfig tl_config = tl.NextFrameConfig(0, timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, true, kDefaultQp, - IgnoredCodecSpecificInfo()); - - // TL2 frame. First one only references TL0. Updates altref. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - EXPECT_EQ(tl_config.first_reference, Vp8BufferReference::kLast); - EXPECT_EQ(tl_config.second_reference, Vp8BufferReference::kNone); - - // Dropped TL1 frame. Can only reference TL0. Should have updated golden. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, 0, false, 0, nullptr); - - // TL2 frame. Can normally reference all three buffers, but golden has not - // been populated this cycle. Altref was last to be updated, before that last. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - EXPECT_EQ(tl_config.first_reference, Vp8BufferReference::kAltref); - EXPECT_EQ(tl_config.second_reference, Vp8BufferReference::kLast); -} - -TEST_F(TemporalLayersTest, DoesNotReferenceDroppedFrames) { - constexpr int kNumLayers = 3; - // Use a repeating pattern of tl 0, 2, 1, 2. - // Tl 0, 1, 2 update last, golden, altref respectively. - ScopedFieldTrials field_trial("WebRTC-UseShortVP8TL3Pattern/Enabled/"); - DefaultTemporalLayers tl(kNumLayers); - DefaultTemporalLayersChecker checker(kNumLayers); - tl.OnRatesUpdated(0, - GetTemporalLayerRates(kDefaultBytesPerFrame, - kDefaultFramerate, kNumLayers), - kDefaultFramerate); - tl.UpdateConfiguration(0); - - // Start with a keyframe. - uint32_t timestamp = 0; - Vp8FrameConfig tl_config = tl.NextFrameConfig(0, timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, true, kDefaultQp, - IgnoredCodecSpecificInfo()); - - // Dropped TL2 frame. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, 0, false, 0, nullptr); - - // Dropped TL1 frame. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, 0, false, 0, nullptr); - - // TL2 frame. Can reference all three buffers, valid since golden and altref - // both contain the last keyframe. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - EXPECT_TRUE(tl_config.last_buffer_flags & BufferFlags::kReference); - EXPECT_TRUE(tl_config.golden_buffer_flags & BufferFlags::kReference); - EXPECT_TRUE(tl_config.arf_buffer_flags & BufferFlags::kReference); - - // Restart of cycle! - - // TL0 base layer frame, updating and referencing last. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - - // TL2 frame, updating altref. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - - // TL1 frame, updating golden. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - - // TL2 frame. Can still reference all buffer since they have been update this - // cycle. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - EXPECT_TRUE(tl_config.last_buffer_flags & BufferFlags::kReference); - EXPECT_TRUE(tl_config.golden_buffer_flags & BufferFlags::kReference); - EXPECT_TRUE(tl_config.arf_buffer_flags & BufferFlags::kReference); - - // Restart of cycle! - - // TL0 base layer frame, updating and referencing last. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - - // Dropped TL2 frame. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, 0, false, 0, nullptr); - - // Dropped TL1 frame. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, 0, false, 0, nullptr); - - // TL2 frame. This time golden and altref contain data from the previous cycle - // and cannot be referenced. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - EXPECT_TRUE(tl_config.last_buffer_flags & BufferFlags::kReference); - EXPECT_FALSE(tl_config.golden_buffer_flags & BufferFlags::kReference); - EXPECT_FALSE(tl_config.arf_buffer_flags & BufferFlags::kReference); -} - TEST_F(TemporalLayersTest, DoesNotReferenceUnlessGuaranteedToExist) { constexpr int kNumLayers = 3; // Use a repeating pattern of tl 0, 2, 1, 2. @@ -509,69 +302,6 @@ TEST_F(TemporalLayersTest, DoesNotReferenceUnlessGuaranteedToExist) { EXPECT_FALSE(tl_config.arf_buffer_flags & BufferFlags::kReference); } -TEST_F(TemporalLayersTest, DoesNotReferenceUnlessGuaranteedToExistLongDelay) { - constexpr int kNumLayers = 3; - // Use a repeating pattern of tl 0, 2, 1, 2. - // Tl 0, 1 updates last, golden, altref respectively. - ScopedFieldTrials field_trial("WebRTC-UseShortVP8TL3Pattern/Enabled/"); - DefaultTemporalLayers tl(kNumLayers); - DefaultTemporalLayersChecker checker(kNumLayers); - tl.OnRatesUpdated(0, - GetTemporalLayerRates(kDefaultBytesPerFrame, - kDefaultFramerate, kNumLayers), - kDefaultFramerate); - tl.UpdateConfiguration(0); - - // Start with a keyframe. - uint32_t timestamp = 0; - Vp8FrameConfig tl_config = tl.NextFrameConfig(0, timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, true, kDefaultQp, - IgnoredCodecSpecificInfo()); - - // Do a full cycle of the pattern. - for (int i = 0; i < 3; ++i) { - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - } - - // TL0 base layer frame, starting the cycle over. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - - // TL2 frame. - tl_config = tl.NextFrameConfig(0, ++timestamp); - tl.OnEncodeDone(0, timestamp, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - - // Encoder has a hiccup and builds a queue, so frame encoding is delayed. - // Encoded, but delayed frames in TL 1, 2. - tl_config = tl.NextFrameConfig(0, timestamp + 1); - tl_config = tl.NextFrameConfig(0, timestamp + 2); - - // Restart of the pattern! - - // Encoded, but delayed frames in TL 2, 1. - tl_config = tl.NextFrameConfig(0, timestamp + 3); - tl_config = tl.NextFrameConfig(0, timestamp + 4); - - // TL1 frame from last cycle is ready. - tl.OnEncodeDone(0, timestamp + 1, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - // TL2 frame from last cycle is ready. - tl.OnEncodeDone(0, timestamp + 2, kDefaultBytesPerFrame, false, kDefaultQp, - IgnoredCodecSpecificInfo()); - - // TL2 frame, that should be referencing all buffers, but altref and golden - // haven not been updated this cycle. (Don't be fooled by the late frames from - // the last cycle!) - tl_config = tl.NextFrameConfig(0, timestamp + 5); - EXPECT_TRUE(tl_config.last_buffer_flags & BufferFlags::kReference); - EXPECT_FALSE(tl_config.golden_buffer_flags & BufferFlags::kReference); - EXPECT_FALSE(tl_config.arf_buffer_flags & BufferFlags::kReference); -} - TEST_F(TemporalLayersTest, KeyFrame) { constexpr int kNumLayers = 3; DefaultTemporalLayers tl(kNumLayers); diff --git a/modules/video_coding/codecs/vp8/include/vp8.h b/modules/video_coding/codecs/vp8/include/vp8.h index 2fc647874f..3fdffa08f6 100644 --- a/modules/video_coding/codecs/vp8/include/vp8.h +++ b/modules/video_coding/codecs/vp8/include/vp8.h @@ -14,36 +14,23 @@ #include #include +#include "absl/base/nullability.h" +#include "api/environment/environment.h" +#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/vp8_frame_buffer_controller.h" -#include "modules/video_coding/include/video_codec_interface.h" namespace webrtc { -// TODO(brandtr): Move these interfaces to the api/ folder. -class VP8Encoder { - public: - struct Settings { - // Allows for overriding the Vp8FrameBufferController used by the encoder. - // If unset, a default Vp8FrameBufferController will be instantiated - // internally. - std::unique_ptr - frame_buffer_controller_factory = nullptr; - - // Allows for overriding the resolution/bitrate limits exposed through - // VideoEncoder::GetEncoderInfo(). No override is done if empty. - std::vector - resolution_bitrate_limits = {}; - }; - - static std::unique_ptr Create(); - static std::unique_ptr Create(Settings settings); +struct Vp8EncoderSettings { + // Allows for overriding the resolution/bitrate limits exposed through + // VideoEncoder::GetEncoderInfo(). No override is done if empty. + std::vector resolution_bitrate_limits; }; +absl_nonnull std::unique_ptr CreateVp8Encoder( + const Environment& env, + Vp8EncoderSettings settings = {}); -class VP8Decoder { - public: - static std::unique_ptr Create(); -}; +std::unique_ptr CreateVp8Decoder(const Environment& env); } // namespace webrtc diff --git a/modules/video_coding/codecs/vp8/include/vp8_globals.h b/modules/video_coding/codecs/vp8/include/vp8_globals.h index 8b772f8666..8bd99d4bae 100644 --- a/modules/video_coding/codecs/vp8/include/vp8_globals.h +++ b/modules/video_coding/codecs/vp8/include/vp8_globals.h @@ -14,6 +14,8 @@ #ifndef MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_GLOBALS_H_ #define MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_GLOBALS_H_ +#include + #include "modules/video_coding/codecs/interface/common_constants.h" namespace webrtc { diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc index 01cedb5316..3eef8e5c11 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc @@ -14,26 +14,32 @@ #include #include +#include #include +#include #include -#include "absl/types/optional.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "api/scoped_refptr.h" +#include "api/video/color_space.h" +#include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" #include "api/video/video_frame.h" #include "api/video/video_frame_buffer.h" -#include "api/video/video_rotation.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/video_decoder.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" #include "third_party/libyuv/include/libyuv/convert.h" #include "vpx/vp8.h" #include "vpx/vp8dx.h" #include "vpx/vpx_decoder.h" +#include "vpx/vpx_image.h" namespace webrtc { namespace { @@ -52,16 +58,16 @@ constexpr bool kIsArm = true; constexpr bool kIsArm = false; #endif -absl::optional DefaultDeblockParams() { +std::optional DefaultDeblockParams() { return LibvpxVp8Decoder::DeblockParams(/*max_level=*/8, /*degrade_qp=*/60, /*min_qp=*/30); } -absl::optional -GetPostProcParamsFromFieldTrialGroup() { - std::string group = webrtc::field_trial::FindFullName( - kIsArm ? kVp8PostProcArmFieldTrial : kVp8PostProcFieldTrial); +std::optional +GetPostProcParamsFromFieldTrialGroup(const FieldTrialsView& field_trials) { + std::string group = field_trials.Lookup(kIsArm ? kVp8PostProcArmFieldTrial + : kVp8PostProcFieldTrial); if (group.empty()) { return DefaultDeblockParams(); } @@ -85,22 +91,21 @@ GetPostProcParamsFromFieldTrialGroup() { } // namespace -std::unique_ptr VP8Decoder::Create() { - return std::make_unique(); +std::unique_ptr CreateVp8Decoder(const Environment& env) { + return std::make_unique(env); } class LibvpxVp8Decoder::QpSmoother { public: - QpSmoother() : last_sample_ms_(rtc::TimeMillis()), smoother_(kAlpha) {} + QpSmoother() : last_sample_ms_(TimeMillis()), smoother_(kAlpha) {} int GetAvg() const { float value = smoother_.filtered(); - return (value == rtc::ExpFilter::kValueUndefined) ? 0 - : static_cast(value); + return (value == ExpFilter::kValueUndefined) ? 0 : static_cast(value); } void Add(float sample) { - int64_t now_ms = rtc::TimeMillis(); + int64_t now_ms = TimeMillis(); smoother_.Apply(static_cast(now_ms - last_sample_ms_), sample); last_sample_ms_ = now_ms; } @@ -110,12 +115,12 @@ class LibvpxVp8Decoder::QpSmoother { private: const float kAlpha = 0.95f; int64_t last_sample_ms_; - rtc::ExpFilter smoother_; + ExpFilter smoother_; }; -LibvpxVp8Decoder::LibvpxVp8Decoder() +LibvpxVp8Decoder::LibvpxVp8Decoder(const Environment& env) : use_postproc_( - kIsArm ? webrtc::field_trial::IsEnabled(kVp8PostProcArmFieldTrial) + kIsArm ? env.field_trials().IsEnabled(kVp8PostProcArmFieldTrial) : true), buffer_pool_(false, 300 /* max_number_of_buffers*/), decode_complete_callback_(NULL), @@ -124,8 +129,9 @@ LibvpxVp8Decoder::LibvpxVp8Decoder() last_frame_width_(0), last_frame_height_(0), key_frame_required_(true), - deblock_params_(use_postproc_ ? GetPostProcParamsFromFieldTrialGroup() - : absl::nullopt), + deblock_params_(use_postproc_ ? GetPostProcParamsFromFieldTrialGroup( + env.field_trials()) + : std::nullopt), qp_smoother_(use_postproc_ ? new QpSmoother() : nullptr) {} LibvpxVp8Decoder::~LibvpxVp8Decoder() { @@ -158,7 +164,7 @@ bool LibvpxVp8Decoder::Configure(const Settings& settings) { // Always start with a complete key frame. key_frame_required_ = true; - if (absl::optional buffer_pool_size = settings.buffer_pool_size()) { + if (std::optional buffer_pool_size = settings.buffer_pool_size()) { if (!buffer_pool_.Resize(*buffer_pool_size)) { return false; } @@ -249,8 +255,8 @@ int LibvpxVp8Decoder::Decode(const EncodedImage& input_image, vpx_codec_err_t vpx_ret = vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp); RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK); - int ret = - ReturnFrame(img, input_image.Timestamp(), qp, input_image.ColorSpace()); + int ret = ReturnFrame(img, input_image.RtpTimestamp(), qp, + input_image.ColorSpace()); if (ret != 0) { return ret; } @@ -276,9 +282,9 @@ int LibvpxVp8Decoder::ReturnFrame( last_frame_width_ = img->d_w; last_frame_height_ = img->d_h; // Allocate memory for decoded image. - rtc::scoped_refptr buffer; + scoped_refptr buffer; - rtc::scoped_refptr i420_buffer = + scoped_refptr i420_buffer = buffer_pool_.CreateI420Buffer(img->d_w, img->d_h); buffer = i420_buffer; if (i420_buffer.get()) { @@ -300,10 +306,10 @@ int LibvpxVp8Decoder::ReturnFrame( VideoFrame decoded_image = VideoFrame::Builder() .set_video_frame_buffer(buffer) - .set_timestamp_rtp(timestamp) + .set_rtp_timestamp(timestamp) .set_color_space(explicit_color_space) .build(); - decode_complete_callback_->Decoded(decoded_image, absl::nullopt, qp); + decode_complete_callback_->Decoded(decoded_image, std::nullopt, qp); return WEBRTC_VIDEO_CODEC_OK; } diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h index 74f4dc7c89..58c209de99 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h @@ -11,22 +11,23 @@ #ifndef MODULES_VIDEO_CODING_CODECS_VP8_LIBVPX_VP8_DECODER_H_ #define MODULES_VIDEO_CODING_CODECS_VP8_LIBVPX_VP8_DECODER_H_ +#include #include +#include -#include "absl/types/optional.h" +#include "api/environment/environment.h" +#include "api/video/color_space.h" #include "api/video/encoded_image.h" #include "api/video_codecs/video_decoder.h" #include "common_video/include/video_frame_buffer_pool.h" -#include "modules/video_coding/codecs/vp8/include/vp8.h" -#include "modules/video_coding/include/video_codec_interface.h" -#include "vpx/vp8dx.h" #include "vpx/vpx_decoder.h" +#include "vpx/vpx_image.h" namespace webrtc { class LibvpxVp8Decoder : public VideoDecoder { public: - LibvpxVp8Decoder(); + explicit LibvpxVp8Decoder(const Environment& env); ~LibvpxVp8Decoder() override; bool Configure(const Settings& settings) override; @@ -56,6 +57,7 @@ class LibvpxVp8Decoder : public VideoDecoder { private: class QpSmoother; + int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp, int qp, @@ -69,7 +71,7 @@ class LibvpxVp8Decoder : public VideoDecoder { int last_frame_width_; int last_frame_height_; bool key_frame_required_; - const absl::optional deblock_params_; + const std::optional deblock_params_; const std::unique_ptr qp_smoother_; }; diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc index 5457402542..1ec4ab985b 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc @@ -13,36 +13,62 @@ #include #include +#include #include #include #include +#include #include #include #include #include "absl/algorithm/container.h" +#include "absl/container/inlined_vector.h" +#include "api/environment/environment.h" +#include "api/fec_controller_override.h" +#include "api/field_trials_view.h" #include "api/scoped_refptr.h" -#include "api/video/video_content_type.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/render_resolution.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" #include "api/video/video_frame_buffer.h" -#include "api/video/video_timing.h" +#include "api/video/video_frame_type.h" #include "api/video_codecs/scalability_mode.h" -#include "api/video_codecs/vp8_temporal_layers.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/vp8_frame_buffer_controller.h" +#include "api/video_codecs/vp8_frame_config.h" #include "api/video_codecs/vp8_temporal_layers_factory.h" #include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/interface/libvpx_interface.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp8/vp8_scalability.h" +#include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_error_codes.h" -#include "modules/video_coding/svc/scalability_mode_util.h" +#include "modules/video_coding/utility/corruption_detection_settings_generator.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "modules/video_coding/utility/simulcast_utility.h" +#include "modules/video_coding/utility/vp8_constants.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/experiments/field_trial_units.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/field_trial.h" -#include "third_party/libyuv/include/libyuv/scale.h" #include "vpx/vp8cx.h" +#include "vpx/vpx_codec.h" +#include "vpx/vpx_encoder.h" +#include "vpx/vpx_image.h" + +#if (defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64)) && \ + (defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)) +#define MOBILE_ARM +#endif namespace webrtc { namespace { @@ -52,6 +78,11 @@ constexpr char kVP8IosMaxNumberOfThreadFieldTrial[] = constexpr char kVP8IosMaxNumberOfThreadFieldTrialParameter[] = "max_thread"; #endif +namespace variable_framerate_screenshare { +static constexpr double kMinFps = 5.0; +static constexpr int kUndershootPct = 30; +} // namespace variable_framerate_screenshare + constexpr char kVp8ForcePartitionResilience[] = "WebRTC-VP8-ForcePartitionResilience"; @@ -59,6 +90,7 @@ constexpr char kVp8ForcePartitionResilience[] = // bitstream range of [0, 127] and not the user-level range of [0,63]. constexpr int kLowVp8QpThreshold = 29; constexpr int kHighVp8QpThreshold = 95; +constexpr int kScreenshareMinQp = 15; constexpr int kTokenPartitions = VP8_ONE_TOKENPARTITION; constexpr uint32_t kVp832ByteAlign = 32u; @@ -66,6 +98,13 @@ constexpr uint32_t kVp832ByteAlign = 32u; constexpr int kRtpTicksPerSecond = 90000; constexpr int kRtpTicksPerMs = kRtpTicksPerSecond / 1000; +// If internal frame dropping is enabled, force the encoder to output a frame +// on an encode request after this timeout even if this causes some +// bitrate overshoot compared to the nominal target. Otherwise we risk the +// receivers incorrectly identifying the gap as a fault and they may needlessly +// send keyframe requests to recover. +constexpr TimeDelta kDefaultMaxFrameDropInterval = TimeDelta::Seconds(2); + // VP8 denoiser states. enum denoiserState : uint32_t { kDenoiserOff, @@ -100,8 +139,8 @@ static_assert(Vp8EncoderConfig::TemporalLayerConfig::kMaxLayers == // Allow a newer value to override a current value only if the new value // is set. template -bool MaybeSetNewValue(const absl::optional& new_value, - absl::optional* base_value) { +bool MaybeSetNewValue(const std::optional& new_value, + std::optional* base_value) { if (new_value.has_value() && new_value != *base_value) { *base_value = new_value; return true; @@ -210,17 +249,53 @@ void SetRawImagePlanes(vpx_image_t* raw_image, VideoFrameBuffer* buffer) { } } -} // namespace +// Helper class used to temporarily change the frame drop threshold for an +// encoder. Returns the setting to the previous value when upon destruction. +class FrameDropConfigOverride { + public: + FrameDropConfigOverride(LibvpxInterface* libvpx, + vpx_codec_ctx_t* encoder, + vpx_codec_enc_cfg_t* config, + uint32_t temporary_frame_drop_threshold) + : libvpx_(libvpx), + encoder_(encoder), + config_(config), + original_frame_drop_threshold_(config->rc_dropframe_thresh) { + config_->rc_dropframe_thresh = temporary_frame_drop_threshold; + libvpx_->codec_enc_config_set(encoder_, config_); + } + ~FrameDropConfigOverride() { + config_->rc_dropframe_thresh = original_frame_drop_threshold_; + libvpx_->codec_enc_config_set(encoder_, config_); + } + + private: + LibvpxInterface* const libvpx_; + vpx_codec_ctx_t* const encoder_; + vpx_codec_enc_cfg_t* const config_; + const uint32_t original_frame_drop_threshold_; +}; -std::unique_ptr VP8Encoder::Create() { - return std::make_unique(LibvpxInterface::Create(), - VP8Encoder::Settings()); +std::optional ParseFrameDropInterval( + const FieldTrialsView& field_trials) { + FieldTrialFlag disabled = FieldTrialFlag("Disabled"); + FieldTrialParameter interval("interval", + kDefaultMaxFrameDropInterval); + ParseFieldTrial({&disabled, &interval}, + field_trials.Lookup("WebRTC-VP8-MaxFrameInterval")); + if (disabled.Get()) { + // Kill switch set, don't use any max frame interval. + return std::nullopt; + } + return interval.Get(); } -std::unique_ptr VP8Encoder::Create( - VP8Encoder::Settings settings) { - return std::make_unique(LibvpxInterface::Create(), - std::move(settings)); +} // namespace + +std::unique_ptr CreateVp8Encoder(const Environment& env, + Vp8EncoderSettings settings) { + return std::make_unique(env, std::move(settings), + LibvpxInterface::Create()); } vpx_enc_frame_flags_t LibvpxVp8Encoder::EncodeFlags( @@ -252,17 +327,21 @@ vpx_enc_frame_flags_t LibvpxVp8Encoder::EncodeFlags( return flags; } -LibvpxVp8Encoder::LibvpxVp8Encoder(std::unique_ptr interface, - VP8Encoder::Settings settings) - : libvpx_(std::move(interface)), - rate_control_settings_(RateControlSettings::ParseFromFieldTrials()), - frame_buffer_controller_factory_( - std::move(settings.frame_buffer_controller_factory)), +LibvpxVp8Encoder::LibvpxVp8Encoder(const Environment& env, + Vp8EncoderSettings settings, + std::unique_ptr interface) + : env_(env), + libvpx_(std::move(interface)), + rate_control_settings_(env_.field_trials()), resolution_bitrate_limits_(std::move(settings.resolution_bitrate_limits)), key_frame_request_(kMaxSimulcastStreams, false), - variable_framerate_experiment_(ParseVariableFramerateConfig( - "WebRTC-VP8VariableFramerateScreenshare")), - framerate_controller_(variable_framerate_experiment_.framerate_limit) { + last_encoder_output_time_(kMaxSimulcastStreams, + Timestamp::MinusInfinity()), + framerate_controller_(variable_framerate_screenshare::kMinFps), + encoder_info_override_(env_.field_trials()), + max_frame_drop_interval_(ParseFrameDropInterval(env_.field_trials())), + android_specific_threading_settings_(env_.field_trials().IsEnabled( + "WebRTC-LibvpxVp8Encoder-AndroidSpecificThreadingSettings")) { // TODO(eladalon/ilnik): These reservations might be wasting memory. // InitEncode() is resizing to the actual size, which might be smaller. raw_images_.reserve(kMaxSimulcastStreams); @@ -437,7 +516,7 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - if (absl::optional scalability_mode = + if (std::optional scalability_mode = inst->GetScalabilityMode(); scalability_mode.has_value() && !VP8SupportsScalabilityMode(*scalability_mode)) { @@ -474,14 +553,9 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, } RTC_DCHECK(!frame_buffer_controller_); - if (frame_buffer_controller_factory_) { - frame_buffer_controller_ = frame_buffer_controller_factory_->Create( - *inst, settings, fec_controller_override_); - } else { - Vp8TemporalLayersFactory factory; - frame_buffer_controller_ = - factory.Create(*inst, settings, fec_controller_override_); - } + Vp8TemporalLayersFactory factory; + frame_buffer_controller_ = + factory.Create(*inst, settings, fec_controller_override_); RTC_DCHECK(frame_buffer_controller_); number_of_cores_ = settings.number_of_cores; @@ -505,6 +579,8 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, send_stream_[0] = true; // For non-simulcast case. cpu_speed_.resize(number_of_streams); std::fill(key_frame_request_.begin(), key_frame_request_.end(), false); + std::fill(last_encoder_output_time_.begin(), last_encoder_output_time_.end(), + Timestamp::MinusInfinity()); int idx = number_of_streams - 1; for (int i = 0; i < (number_of_streams - 1); ++i, --idx) { @@ -538,7 +614,7 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, // Override the error resilience mode if this is not simulcast, but we are // using temporal layers. - if (field_trial::IsEnabled(kVp8ForcePartitionResilience) && + if (env_.field_trials().IsEnabled(kVp8ForcePartitionResilience) && (number_of_streams == 1) && (SimulcastUtility::NumberOfTemporalLayers(*inst, 0) > 1)) { RTC_LOG(LS_INFO) << "Overriding g_error_resilient from " @@ -625,7 +701,7 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, // Note the order we use is different from webm, we have lowest resolution // at position 0 and they have highest resolution at position 0. const size_t stream_idx_cfg_0 = encoders_.size() - 1; - SimulcastRateAllocator init_allocator(codec_); + SimulcastRateAllocator init_allocator(env_, codec_); VideoBitrateAllocation allocation = init_allocator.Allocate(VideoBitrateAllocationParameters( inst->startBitrate * 1000, inst->maxFramerate)); @@ -688,22 +764,39 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, UpdateVpxConfiguration(stream_idx); } + corruption_detection_settings_generator_ = + std::make_unique( + CorruptionDetectionSettingsGenerator::ExponentialFunctionParameters{ + .scale = 0.006, + .exponent_factor = 0.01857465, + .exponent_offset = -4.26470513}, + CorruptionDetectionSettingsGenerator::ErrorThresholds{.luma = 5, + .chroma = 6}, + // On large changes, increase error threshold by one and std_dev + // by 2.0. Trigger on qp changes larger than 30, and fade down the + // adjusted value over 4 * num_temporal_layers to allow the base layer + // to converge somewhat. Set a minim filter size of 1.25 since some + // outlier pixels deviate a bit from truth even at very low QP, + // seeminly by bleeding into neighbours. + webrtc::CorruptionDetectionSettingsGenerator::TransientParameters{ + .max_qp = 127, + .keyframe_threshold_offset = 1, + .keyframe_stddev_offset = 2.0, + .keyframe_offset_duration_frames = + std::max(1, + SimulcastUtility::NumberOfTemporalLayers(*inst, 0)) * + 4, + .large_qp_change_threshold = 30, + .std_dev_lower_bound = 1.25}); + return InitAndSetControlSettings(); } int LibvpxVp8Encoder::GetCpuSpeed(int width, int height) { -#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || \ - defined(WEBRTC_ANDROID) +#ifdef MOBILE_ARM // On mobile platform, use a lower speed setting for lower resolutions for // CPUs with 4 or more cores. RTC_DCHECK_GT(number_of_cores_, 0); - if (experimental_cpu_speed_config_arm_ - .GetValue(width * height, number_of_cores_) - .has_value()) { - return experimental_cpu_speed_config_arm_ - .GetValue(width * height, number_of_cores_) - .value(); - } if (number_of_cores_ <= 3) return -12; @@ -727,22 +820,23 @@ int LibvpxVp8Encoder::GetCpuSpeed(int width, int height) { int LibvpxVp8Encoder::NumberOfThreads(int width, int height, int cpus) { #if defined(WEBRTC_ANDROID) - if (width * height >= 320 * 180) { - if (cpus >= 4) { - // 3 threads for CPUs with 4 and more cores since most of times only 4 - // cores will be active. - return 3; - } else if (cpus == 3 || cpus == 2) { - return 2; - } else { - return 1; + if (android_specific_threading_settings_) { + if (width * height >= 320 * 180) { + if (cpus >= 4) { + // 3 threads for CPUs with 4 and more cores since most of times only 4 + // cores will be active. + return 3; + } else if (cpus == 3 || cpus == 2) { + return 2; + } else { + return 1; + } } + return 1; } - return 1; -#else -#if defined(WEBRTC_IOS) +#elif defined(WEBRTC_IOS) std::string trial_string = - field_trial::FindFullName(kVP8IosMaxNumberOfThreadFieldTrial); + env_.field_trials().Lookup(kVP8IosMaxNumberOfThreadFieldTrial); FieldTrialParameter max_thread_number( kVP8IosMaxNumberOfThreadFieldTrialParameter, 0); ParseFieldTrial({&max_thread_number}, trial_string); @@ -767,11 +861,10 @@ int LibvpxVp8Encoder::NumberOfThreads(int width, int height, int cpus) { return 3; } return 2; - } else { - // 1 thread for VGA or less. - return 1; } -#endif + + // 1 thread for VGA or less. + return 1; } int LibvpxVp8Encoder::InitAndSetControlSettings() { @@ -798,12 +891,10 @@ int LibvpxVp8Encoder::InitAndSetControlSettings() { // for getting the denoised frame from the encoder and using that // when encoding lower resolution streams. Would it work with the // multi-res encoding feature? +#ifdef MOBILE_ARM denoiserState denoiser_state = kDenoiserOnYOnly; -#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || \ - defined(WEBRTC_ANDROID) - denoiser_state = kDenoiserOnYOnly; #else - denoiser_state = kDenoiserOnAdaptive; + denoiserState denoiser_state = kDenoiserOnAdaptive; #endif libvpx_->codec_control( &encoders_[0], VP8E_SET_NOISE_SENSITIVITY, @@ -892,9 +983,7 @@ size_t LibvpxVp8Encoder::SteadyStateSize(int sid, int tid) { return 0; return static_cast( bitrate_bps / (8 * fps) * - (100 - - variable_framerate_experiment_.steady_state_undershoot_percentage) / - 100 + + (100 - variable_framerate_screenshare::kUndershootPct) / 100 + 0.5); } @@ -953,13 +1042,31 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame, } } + // Check if any encoder risks timing out and force a frame in that case. + std::vector frame_drop_overrides_; + if (max_frame_drop_interval_.has_value()) { + Timestamp now = Timestamp::Micros(frame.timestamp_us()); + for (size_t i = 0; i < send_stream_.size(); ++i) { + if (send_stream_[i] && FrameDropThreshold(i) > 0 && + last_encoder_output_time_[i].IsFinite() && + (now - last_encoder_output_time_[i]) >= *max_frame_drop_interval_) { + RTC_LOG(LS_INFO) << "Forcing frame to avoid timeout for stream " << i; + size_t encoder_idx = encoders_.size() - 1 - i; + frame_drop_overrides_.emplace_back(libvpx_.get(), + &encoders_[encoder_idx], + &vpx_configs_[encoder_idx], 0); + } + } + } + if (frame.update_rect().IsEmpty() && num_steady_state_frames_ >= 3 && !key_frame_requested) { - if (variable_framerate_experiment_.enabled && - framerate_controller_.DropFrame(frame.timestamp() / kRtpTicksPerMs)) { + if (framerate_controller_.DropFrame(frame.rtp_timestamp() / + kRtpTicksPerMs) && + frame_drop_overrides_.empty()) { return WEBRTC_VIDEO_CODEC_OK; } - framerate_controller_.AddFrame(frame.timestamp() / kRtpTicksPerMs); + framerate_controller_.AddFrame(frame.rtp_timestamp() / kRtpTicksPerMs); } bool send_key_frame = key_frame_requested; @@ -968,7 +1075,7 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame, Vp8FrameConfig tl_configs[kMaxSimulcastStreams]; for (size_t i = 0; i < encoders_.size(); ++i) { tl_configs[i] = - frame_buffer_controller_->NextFrameConfig(i, frame.timestamp()); + frame_buffer_controller_->NextFrameConfig(i, frame.rtp_timestamp()); send_key_frame |= tl_configs[i].IntraFrame(); drop_frame |= tl_configs[i].drop_frame; RTC_DCHECK(i == 0 || @@ -989,7 +1096,7 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame, // Because `raw_images_` are set to hold pointers to the prepared buffers, we // need to keep these buffers alive through reference counting until after // encoding is complete. - std::vector> prepared_buffers = + std::vector> prepared_buffers = PrepareBuffers(frame.video_frame_buffer()); if (prepared_buffers.empty()) { return WEBRTC_VIDEO_CODEC_ERROR; @@ -997,7 +1104,7 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame, struct CleanUpOnExit { explicit CleanUpOnExit( vpx_image_t* raw_image, - std::vector> prepared_buffers) + std::vector> prepared_buffers) : raw_image_(raw_image), prepared_buffers_(std::move(prepared_buffers)) {} ~CleanUpOnExit() { @@ -1006,7 +1113,7 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame, raw_image_->planes[VPX_PLANE_V] = nullptr; } vpx_image_t* raw_image_; - std::vector> prepared_buffers_; + std::vector> prepared_buffers_; } clean_up_on_exit(&raw_images_[0], std::move(prepared_buffers)); if (send_key_frame) { @@ -1095,7 +1202,7 @@ void LibvpxVp8Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, frame_buffer_controller_->OnEncodeDone(stream_idx, timestamp, encoded_images_[encoder_idx].size(), is_keyframe, qp, codec_specific); - if (is_keyframe && codec_specific->template_structure != absl::nullopt) { + if (is_keyframe && codec_specific->template_structure != std::nullopt) { // Number of resolutions must match number of spatial layers, VP8 structures // expected to use single spatial layer. Templates must be ordered by // spatial_id, so assumption there is exactly one spatial layer is same as @@ -1171,7 +1278,7 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, encoded_images_[encoder_idx].set_size(encoded_pos); encoded_images_[encoder_idx].SetSimulcastIndex(stream_idx); PopulateCodecSpecific(&codec_specific, *pkt, stream_idx, encoder_idx, - input_image.timestamp()); + input_image.rtp_timestamp()); if (codec_specific.codecSpecific.VP8.temporalIdx != kNoTemporalIdx) { encoded_images_[encoder_idx].SetTemporalIndex( codec_specific.codecSpecific.VP8.temporalIdx); @@ -1179,9 +1286,9 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, break; } } - encoded_images_[encoder_idx].SetTimestamp(input_image.timestamp()); - encoded_images_[encoder_idx].SetCaptureTimeIdentifier( - input_image.capture_time_identifier()); + encoded_images_[encoder_idx].SetRtpTimestamp(input_image.rtp_timestamp()); + encoded_images_[encoder_idx].SetPresentationTimestamp( + input_image.presentation_timestamp()); encoded_images_[encoder_idx].SetColorSpace(input_image.color_space()); encoded_images_[encoder_idx].SetRetransmissionAllowed( retransmission_allowed); @@ -1198,11 +1305,20 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, libvpx_->codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER, &qp_128); encoded_images_[encoder_idx].qp_ = qp_128; + last_encoder_output_time_[stream_idx] = + Timestamp::Micros(input_image.timestamp_us()); + + encoded_images_[encoder_idx].set_corruption_detection_filter_settings( + corruption_detection_settings_generator_->OnFrame( + encoded_images_[encoder_idx].FrameType() == + VideoFrameType::kVideoFrameKey, + qp_128)); + encoded_complete_callback_->OnEncodedImage(encoded_images_[encoder_idx], &codec_specific); const size_t steady_state_size = SteadyStateSize( stream_idx, codec_specific.codecSpecific.VP8.temporalIdx); - if (qp_128 > variable_framerate_experiment_.steady_state_qp || + if (qp_128 > kVp8SteadyStateQpThreshold || encoded_images_[encoder_idx].size() > steady_state_size) { num_steady_state_frames_ = 0; } else { @@ -1214,7 +1330,7 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, if (encoded_images_[encoder_idx].size() == 0) { // Dropped frame that will be re-encoded. frame_buffer_controller_->OnFrameDropped(stream_idx, - input_image.timestamp()); + input_image.rtp_timestamp()); } } } @@ -1279,13 +1395,17 @@ VideoEncoder::EncoderInfo LibvpxVp8Encoder::GetEncoderInfo() const { for (size_t ti = 0; ti < vpx_configs_[encoder_idx].ts_number_layers; ++ti) { RTC_DCHECK_GT(vpx_configs_[encoder_idx].ts_rate_decimator[ti], 0); - info.fps_allocation[si].push_back(rtc::saturated_cast( + info.fps_allocation[si].push_back(saturated_cast( EncoderInfo::kMaxFramerateFraction / vpx_configs_[encoder_idx].ts_rate_decimator[ti] + 0.5)); } } } + + if (codec_.mode == VideoCodecMode::kScreensharing) { + info.min_qp = kScreenshareMinQp; + } } return info; @@ -1322,15 +1442,15 @@ void LibvpxVp8Encoder::MaybeUpdatePixelFormat(vpx_img_fmt fmt) { } } -std::vector> -LibvpxVp8Encoder::PrepareBuffers(rtc::scoped_refptr buffer) { +std::vector> LibvpxVp8Encoder::PrepareBuffers( + scoped_refptr buffer) { RTC_DCHECK_EQ(buffer->width(), raw_images_[0].d_w); RTC_DCHECK_EQ(buffer->height(), raw_images_[0].d_h); absl::InlinedVector supported_formats = {VideoFrameBuffer::Type::kI420, VideoFrameBuffer::Type::kNV12}; - rtc::scoped_refptr mapped_buffer; + scoped_refptr mapped_buffer; if (buffer->type() != VideoFrameBuffer::Type::kNative) { // `buffer` is already mapped. mapped_buffer = buffer; @@ -1375,7 +1495,7 @@ LibvpxVp8Encoder::PrepareBuffers(rtc::scoped_refptr buffer) { // Prepare `raw_images_` from `mapped_buffer` and, if simulcast, scaled // versions of `buffer`. - std::vector> prepared_buffers; + std::vector> prepared_buffers; SetRawImagePlanes(&raw_images_[0], mapped_buffer.get()); prepared_buffers.push_back(mapped_buffer); for (size_t i = 1; i < encoders_.size(); ++i) { @@ -1425,22 +1545,4 @@ LibvpxVp8Encoder::PrepareBuffers(rtc::scoped_refptr buffer) { return prepared_buffers; } -// static -LibvpxVp8Encoder::VariableFramerateExperiment -LibvpxVp8Encoder::ParseVariableFramerateConfig(std::string group_name) { - FieldTrialFlag disabled = FieldTrialFlag("Disabled"); - FieldTrialParameter framerate_limit("min_fps", 5.0); - FieldTrialParameter qp("min_qp", 15); - FieldTrialParameter undershoot_percentage("undershoot", 30); - ParseFieldTrial({&disabled, &framerate_limit, &qp, &undershoot_percentage}, - field_trial::FindFullName(group_name)); - VariableFramerateExperiment config; - config.enabled = !disabled.Get(); - config.framerate_limit = framerate_limit.Get(); - config.steady_state_qp = qp.Get(); - config.steady_state_undershoot_percentage = undershoot_percentage.Get(); - - return config; -} - } // namespace webrtc diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h index 74477eac7e..02f36e8d15 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h @@ -11,33 +11,43 @@ #ifndef MODULES_VIDEO_CODING_CODECS_VP8_LIBVPX_VP8_ENCODER_H_ #define MODULES_VIDEO_CODING_CODECS_VP8_LIBVPX_VP8_ENCODER_H_ +#include +#include #include -#include +#include #include +#include "api/environment/environment.h" #include "api/fec_controller_override.h" +#include "api/scoped_refptr.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/encoded_image.h" #include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp8_frame_buffer_controller.h" #include "api/video_codecs/vp8_frame_config.h" #include "modules/video_coding/codecs/interface/libvpx_interface.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/utility/corruption_detection_settings_generator.h" #include "modules/video_coding/utility/framerate_controller_deprecated.h" -#include "modules/video_coding/utility/vp8_constants.h" -#include "rtc_base/experiments/cpu_speed_experiment.h" #include "rtc_base/experiments/encoder_info_settings.h" #include "rtc_base/experiments/rate_control_settings.h" -#include "vpx/vp8cx.h" #include "vpx/vpx_encoder.h" +#include "vpx/vpx_image.h" namespace webrtc { class LibvpxVp8Encoder : public VideoEncoder { public: - LibvpxVp8Encoder(std::unique_ptr interface, - VP8Encoder::Settings settings); + LibvpxVp8Encoder(const Environment& env, + Vp8EncoderSettings settings, + std::unique_ptr interface); + ~LibvpxVp8Encoder() override; int Release() override; @@ -101,12 +111,12 @@ class LibvpxVp8Encoder : public VideoEncoder { // as a result, allowing the caller to keep references to them until after // encoding has finished. On failure to convert the buffer, an empty list is // returned. - std::vector> PrepareBuffers( - rtc::scoped_refptr buffer); + std::vector> PrepareBuffers( + scoped_refptr buffer); + const Environment env_; const std::unique_ptr libvpx_; - const CpuSpeedExperiment experimental_cpu_speed_config_arm_; const RateControlSettings rate_control_settings_; EncodedImageCallback* encoded_complete_callback_ = nullptr; @@ -118,8 +128,6 @@ class LibvpxVp8Encoder : public VideoEncoder { int number_of_cores_ = 0; uint32_t rc_max_intra_target_ = 0; int num_active_streams_ = 0; - const std::unique_ptr - frame_buffer_controller_factory_; std::unique_ptr frame_buffer_controller_; const std::vector resolution_bitrate_limits_; @@ -132,26 +140,21 @@ class LibvpxVp8Encoder : public VideoEncoder { std::vector vpx_configs_; std::vector config_overrides_; std::vector downsampling_factors_; + std::vector last_encoder_output_time_; - // Variable frame-rate screencast related fields and methods. - const struct VariableFramerateExperiment { - bool enabled = false; - // Framerate is limited to this value in steady state. - float framerate_limit = 5.0; - // This qp or below is considered a steady state. - int steady_state_qp = kVp8SteadyStateQpThreshold; - // Frames of at least this percentage below ideal for configured bitrate are - // considered in a steady state. - int steady_state_undershoot_percentage = 30; - } variable_framerate_experiment_; - static VariableFramerateExperiment ParseVariableFramerateConfig( - std::string group_name); FramerateControllerDeprecated framerate_controller_; int num_steady_state_frames_ = 0; FecControllerOverride* fec_controller_override_ = nullptr; const LibvpxVp8EncoderInfoSettings encoder_info_override_; + + std::optional max_frame_drop_interval_; + + bool android_specific_threading_settings_; + + std::unique_ptr + corruption_detection_settings_generator_; }; } // namespace webrtc diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc index 4ca3de20d5..a5ed2f63fd 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc @@ -9,11 +9,16 @@ */ #include +#include +#include "api/environment/environment.h" #include "api/test/create_simulcast_test_fixture.h" #include "api/test/simulcast_test_fixture.h" #include "api/test/video/function_video_decoder_factory.h" #include "api/test/video/function_video_encoder_factory.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "test/gtest.h" @@ -24,13 +29,17 @@ namespace { std::unique_ptr CreateSpecificSimulcastTestFixture() { std::unique_ptr encoder_factory = std::make_unique( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& /* format */) { + return CreateVp8Encoder(env); + }); std::unique_ptr decoder_factory = std::make_unique( - []() { return VP8Decoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& /* format */) { + return CreateVp8Decoder(env); + }); return CreateSimulcastTestFixture(std::move(encoder_factory), std::move(decoder_factory), - SdpVideoFormat("VP8")); + SdpVideoFormat::VP8()); } } // namespace diff --git a/modules/video_coding/codecs/vp8/screenshare_layers.cc b/modules/video_coding/codecs/vp8/screenshare_layers.cc index 71db0b22c2..f89c7364ad 100644 --- a/modules/video_coding/codecs/vp8/screenshare_layers.cc +++ b/modules/video_coding/codecs/vp8/screenshare_layers.cc @@ -12,8 +12,19 @@ #include #include +#include #include - +#include +#include + +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/vp8_frame_buffer_controller.h" +#include "api/video_codecs/vp8_frame_config.h" +#include "api/video_codecs/vp8_temporal_layers.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp8/include/temporal_layers_checker.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" @@ -118,7 +129,7 @@ Vp8FrameConfig ScreenshareLayers::NextFrameConfig(size_t stream_index, return dependency_info.frame_config; } - const int64_t now_ms = rtc::TimeMillis(); + const int64_t now_ms = TimeMillis(); int64_t unwrapped_timestamp = time_wrap_handler_.Unwrap(timestamp); int64_t ts_diff; @@ -302,7 +313,7 @@ void ScreenshareLayers::OnEncodeDone(size_t stream_index, return; } - absl::optional dependency_info; + std::optional dependency_info; auto it = pending_frame_configs_.find(rtp_timestamp); if (it != pending_frame_configs_.end()) { dependency_info = it->second; @@ -383,7 +394,7 @@ void ScreenshareLayers::OnEncodeDone(size_t stream_index, } } - encode_framerate_.Update(1, rtc::TimeMillis()); + encode_framerate_.Update(1, TimeMillis()); if (number_of_temporal_layers_ == 1) return; @@ -410,18 +421,18 @@ void ScreenshareLayers::OnEncodeDone(size_t stream_index, } } -void ScreenshareLayers::OnFrameDropped(size_t stream_index, - uint32_t rtp_timestamp) { +void ScreenshareLayers::OnFrameDropped(size_t /* stream_index */, + uint32_t /* rtp_timestamp */) { layers_[active_layer_].state = TemporalLayer::State::kDropped; ++stats_.num_overshoots_; } -void ScreenshareLayers::OnPacketLossRateUpdate(float packet_loss_rate) {} +void ScreenshareLayers::OnPacketLossRateUpdate(float /* packet_loss_rate */) {} -void ScreenshareLayers::OnRttUpdate(int64_t rtt_ms) {} +void ScreenshareLayers::OnRttUpdate(int64_t /* rtt_ms */) {} void ScreenshareLayers::OnLossNotification( - const VideoEncoder::LossNotification& loss_notification) {} + const VideoEncoder::LossNotification& /* loss_notification */) {} FrameDependencyStructure ScreenshareLayers::GetTemplateStructure( int num_layers) const { @@ -510,9 +521,8 @@ Vp8EncoderConfig ScreenshareLayers::UpdateConfiguration(size_t stream_index) { static_cast(*capture_framerate_) / *target_framerate_; } - if (bitrate_updated_ || - encoder_config_.rc_target_bitrate != - absl::make_optional(encoder_config_bitrate_kbps)) { + if (bitrate_updated_ || encoder_config_.rc_target_bitrate != + std::make_optional(encoder_config_bitrate_kbps)) { encoder_config_.rc_target_bitrate = encoder_config_bitrate_kbps; // Don't reconfigure qp limits during quality boost frames. @@ -587,7 +597,7 @@ void ScreenshareLayers::UpdateHistograms() { if (stats_.first_frame_time_ms_ == -1) return; int64_t duration_sec = - (rtc::TimeMillis() - stats_.first_frame_time_ms_ + 500) / 1000; + (TimeMillis() - stats_.first_frame_time_ms_ + 500) / 1000; if (duration_sec >= metrics::kMinRunTimeInSeconds) { RTC_HISTOGRAM_COUNTS_10000( "WebRTC.Video.Screenshare.Layer0.FrameRate", diff --git a/modules/video_coding/codecs/vp8/screenshare_layers.h b/modules/video_coding/codecs/vp8/screenshare_layers.h index 47d6b401f4..8f4c0ab3da 100644 --- a/modules/video_coding/codecs/vp8/screenshare_layers.h +++ b/modules/video_coding/codecs/vp8/screenshare_layers.h @@ -9,16 +9,21 @@ #ifndef MODULES_VIDEO_CODING_CODECS_VP8_SCREENSHARE_LAYERS_H_ #define MODULES_VIDEO_CODING_CODECS_VP8_SCREENSHARE_LAYERS_H_ +#include +#include #include #include -#include +#include #include +#include "absl/container/inlined_vector.h" +#include "absl/strings/string_view.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/vp8_frame_buffer_controller.h" #include "api/video_codecs/vp8_frame_config.h" -#include "api/video_codecs/vp8_temporal_layers.h" #include "modules/video_coding/codecs/vp8/include/temporal_layers_checker.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "modules/video_coding/utility/frame_dropper.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" #include "rtc_base/rate_statistics.h" @@ -91,8 +96,8 @@ class ScreenshareLayers final : public Vp8FrameBufferController { const int number_of_temporal_layers_; // TODO(eladalon/sprang): These should be made into const-int set in the ctor. - absl::optional min_qp_; - absl::optional max_qp_; + std::optional min_qp_; + std::optional max_qp_; int active_layer_; int64_t last_timestamp_; @@ -105,9 +110,9 @@ class ScreenshareLayers final : public Vp8FrameBufferController { std::map pending_frame_configs_; // Configured max framerate. - absl::optional target_framerate_; + std::optional target_framerate_; // Incoming framerate from capturer. - absl::optional capture_framerate_; + std::optional capture_framerate_; // Tracks what framerate we actually encode, and drops frames on overshoot. RateStatistics encode_framerate_; diff --git a/modules/video_coding/codecs/vp8/screenshare_layers_unittest.cc b/modules/video_coding/codecs/vp8/screenshare_layers_unittest.cc index e5b3bd4fdf..8158b0dcad 100644 --- a/modules/video_coding/codecs/vp8/screenshare_layers_unittest.cc +++ b/modules/video_coding/codecs/vp8/screenshare_layers_unittest.cc @@ -15,14 +15,18 @@ #include #include +#include #include +#include "api/units/time_delta.h" +#include "api/video_codecs/vp8_frame_buffer_controller.h" #include "api/video_codecs/vp8_frame_config.h" #include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/checks.h" #include "rtc_base/fake_clock.h" +#include "rtc_base/time_utils.h" #include "system_wrappers/include/metrics.h" #include "test/gmock.h" #include "test/gtest.h" @@ -84,7 +88,7 @@ class ScreenshareLayerTest : public ::testing::Test { return flags; } - int ConfigureFrame(bool key_frame) { + int ConfigureFrame(bool /* key_frame */) { tl_config_ = NextFrameConfig(0, timestamp_); EXPECT_EQ(0, tl_config_.encoder_layer_id) << "ScreenshareLayers always encodes using the bitrate allocator for " @@ -113,7 +117,7 @@ class ScreenshareLayerTest : public ::testing::Test { Vp8FrameConfig NextFrameConfig(size_t stream_index, uint32_t timestamp) { int64_t timestamp_ms = timestamp / 90; - clock_.AdvanceTime(TimeDelta::Millis(timestamp_ms - rtc::TimeMillis())); + clock_.AdvanceTime(TimeDelta::Millis(timestamp_ms - TimeMillis())); return layers_->NextFrameConfig(stream_index, timestamp); } @@ -161,12 +165,10 @@ class ScreenshareLayerTest : public ::testing::Test { // Adds frames until we get one in the specified temporal layer. The last // FrameEncoded() call will be omitted and needs to be done by the caller. // Returns the flags for the last frame. - int SkipUntilTl(int layer) { - return SkipUntilTlAndSync(layer, absl::nullopt); - } + int SkipUntilTl(int layer) { return SkipUntilTlAndSync(layer, std::nullopt); } // Same as SkipUntilTl, but also waits until the sync bit condition is met. - int SkipUntilTlAndSync(int layer, absl::optional sync) { + int SkipUntilTlAndSync(int layer, std::optional sync) { int flags = 0; const int kMaxFramesToSkip = 1 + (sync.value_or(false) ? kMaxSyncPeriodSeconds : 1) * kFrameRate; @@ -194,7 +196,7 @@ class ScreenshareLayerTest : public ::testing::Test { int min_qp_; uint32_t max_qp_; int frame_size_; - rtc::ScopedFakeClock clock_; + ScopedFakeClock clock_; std::unique_ptr layers_; uint32_t timestamp_; diff --git a/modules/video_coding/codecs/vp8/temporal_layers.h b/modules/video_coding/codecs/vp8/temporal_layers.h deleted file mode 100644 index 9576fb27be..0000000000 --- a/modules/video_coding/codecs/vp8/temporal_layers.h +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_CODECS_VP8_TEMPORAL_LAYERS_H_ -#define MODULES_VIDEO_CODING_CODECS_VP8_TEMPORAL_LAYERS_H_ - -// TODO(webrtc:9012) Remove this file when downstream projects have updated. -#include "api/video_codecs/vp8_temporal_layers.h" - -#endif // MODULES_VIDEO_CODING_CODECS_VP8_TEMPORAL_LAYERS_H_ diff --git a/modules/video_coding/codecs/vp8/temporal_layers_checker.cc b/modules/video_coding/codecs/vp8/temporal_layers_checker.cc index 5aebd2c526..7c2251adca 100644 --- a/modules/video_coding/codecs/vp8/temporal_layers_checker.cc +++ b/modules/video_coding/codecs/vp8/temporal_layers_checker.cc @@ -10,10 +10,14 @@ #include "modules/video_coding/codecs/vp8/include/temporal_layers_checker.h" +#include #include +#include "api/video_codecs/vp8_frame_config.h" +#include "api/video_codecs/vp8_temporal_layers.h" #include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/codecs/vp8/default_temporal_layers.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { diff --git a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc index 14ac8aab5b..c0aca08ca1 100644 --- a/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc +++ b/modules/video_coding/codecs/vp8/test/vp8_impl_unittest.cc @@ -10,25 +10,53 @@ #include +#include +#include #include - +#include +#include +#include +#include +#include + +#include "absl/container/inlined_vector.h" +#include "absl/memory/memory.h" +#include "api/environment/environment_factory.h" +#include "api/scoped_refptr.h" #include "api/test/create_frame_generator.h" #include "api/test/frame_generator_interface.h" #include "api/test/mock_video_decoder.h" #include "api/test/mock_video_encoder.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/render_resolution.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/vp8_temporal_layers.h" #include "common_video/libyuv/include/webrtc_libyuv.h" -#include "common_video/test/utilities.h" #include "modules/video_coding/codecs/interface/mock_libvpx_interface.h" #include "modules/video_coding/codecs/test/video_codec_unittest.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h" +#include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/utility/vp8_header_parser.h" #include "rtc_base/time_utils.h" #include "test/field_trial.h" +#include "test/gmock.h" +#include "test/gtest.h" #include "test/mappable_native_buffer.h" +#include "test/scoped_key_value_config.h" #include "test/video_codec_settings.h" +#include "vpx/vpx_codec.h" +#include "vpx/vpx_encoder.h" +#include "vpx/vpx_image.h" namespace webrtc { @@ -40,6 +68,8 @@ using ::testing::Field; using ::testing::Invoke; using ::testing::NiceMock; using ::testing::Return; +using ::testing::Values; +using ::testing::WithParamInterface; using EncoderInfo = webrtc::VideoEncoder::EncoderInfo; using FramerateFractions = absl::InlinedVector; @@ -65,11 +95,11 @@ const VideoEncoder::Settings kSettings(kCapabilities, class TestVp8Impl : public VideoCodecUnitTest { protected: std::unique_ptr CreateEncoder() override { - return VP8Encoder::Create(); + return CreateVp8Encoder(env_); } std::unique_ptr CreateDecoder() override { - return VP8Decoder::Create(); + return CreateVp8Decoder(env_); } void ModifyCodecSettings(VideoCodec* codec_settings) override { @@ -120,8 +150,7 @@ TEST_F(TestVp8Impl, ErrorResilienceDisabledForNoTemporalLayers) { codec_settings_.simulcastStream[0].numberOfTemporalLayers = 1; auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), - VP8Encoder::Settings()); + LibvpxVp8Encoder encoder(CreateEnvironment(), {}, absl::WrapUnique(vpx)); EXPECT_CALL(*vpx, codec_enc_init( _, _, Field(&vpx_codec_enc_cfg_t::g_error_resilient, 0), _)); @@ -134,8 +163,7 @@ TEST_F(TestVp8Impl, DefaultErrorResilienceEnabledForTemporalLayers) { codec_settings_.VP8()->numberOfTemporalLayers = 2; auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), - VP8Encoder::Settings()); + LibvpxVp8Encoder encoder(CreateEnvironment(), {}, absl::WrapUnique(vpx)); EXPECT_CALL(*vpx, codec_enc_init(_, _, Field(&vpx_codec_enc_cfg_t::g_error_resilient, @@ -153,8 +181,7 @@ TEST_F(TestVp8Impl, codec_settings_.VP8()->numberOfTemporalLayers = 2; auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), - VP8Encoder::Settings()); + LibvpxVp8Encoder encoder(CreateEnvironment(), {}, absl::WrapUnique(vpx)); EXPECT_CALL(*vpx, codec_enc_init(_, _, Field(&vpx_codec_enc_cfg_t::g_error_resilient, @@ -167,8 +194,7 @@ TEST_F(TestVp8Impl, TEST_F(TestVp8Impl, SetRates) { codec_settings_.SetFrameDropEnabled(true); auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), - VP8Encoder::Settings()); + LibvpxVp8Encoder encoder(CreateEnvironment(), {}, absl::WrapUnique(vpx)); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder.InitEncode(&codec_settings_, VideoEncoder::Settings(kCapabilities, 1, 1000))); @@ -214,7 +240,7 @@ TEST_F(TestVp8Impl, EncodeNv12FrameSimulcast) { CodecSpecificInfo codec_specific_info; input_frame_generator_ = test::CreateSquareFrameGenerator( kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kNV12, - absl::nullopt); + std::nullopt); EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); @@ -231,11 +257,11 @@ TEST_F(TestVp8Impl, EncodeI420FrameAfterNv12Frame) { CodecSpecificInfo codec_specific_info; input_frame_generator_ = test::CreateSquareFrameGenerator( kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kNV12, - absl::nullopt); + std::nullopt); EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info); input_frame_generator_ = test::CreateSquareFrameGenerator( kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kI420, - absl::nullopt); + std::nullopt); EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); @@ -249,22 +275,21 @@ TEST_F(TestVp8Impl, Configure) { } TEST_F(TestVp8Impl, OnEncodedImageReportsInfo) { - constexpr Timestamp kCaptureTimeIdentifier = Timestamp::Micros(1000); + constexpr Timestamp kPresentationTimestamp = Timestamp::Micros(1000); VideoFrame input_frame = NextInputFrame(); - input_frame.set_timestamp(kInitialTimestampRtp); - input_frame.set_timestamp_us(kInitialTimestampMs * - rtc::kNumMicrosecsPerMillisec); - input_frame.set_capture_time_identifier(kCaptureTimeIdentifier); + input_frame.set_rtp_timestamp(kInitialTimestampRtp); + input_frame.set_timestamp_us(kInitialTimestampMs * kNumMicrosecsPerMillisec); + input_frame.set_presentation_timestamp(kPresentationTimestamp); EncodedImage encoded_frame; CodecSpecificInfo codec_specific_info; EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info); - EXPECT_EQ(kInitialTimestampRtp, encoded_frame.Timestamp()); + EXPECT_EQ(kInitialTimestampRtp, encoded_frame.RtpTimestamp()); EXPECT_EQ(kWidth, static_cast(encoded_frame._encodedWidth)); EXPECT_EQ(kHeight, static_cast(encoded_frame._encodedHeight)); - ASSERT_TRUE(encoded_frame.CaptureTimeIdentifier().has_value()); - EXPECT_EQ(kCaptureTimeIdentifier.us(), - encoded_frame.CaptureTimeIdentifier()->us()); + ASSERT_TRUE(encoded_frame.PresentationTimestamp().has_value()); + EXPECT_EQ(kPresentationTimestamp.us(), + encoded_frame.PresentationTimestamp()->us()); } TEST_F(TestVp8Impl, @@ -288,7 +313,7 @@ TEST_F(TestVp8Impl, DecodedQpEqualsEncodedQp) { encoded_frame._frameType = VideoFrameType::kVideoFrameKey; EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, -1)); std::unique_ptr decoded_frame; - absl::optional decoded_qp; + std::optional decoded_qp; ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp)); ASSERT_TRUE(decoded_frame); ASSERT_TRUE(decoded_qp); @@ -491,9 +516,8 @@ TEST_F(TestVp8Impl, ChecksSimulcastSettings) { #endif TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) { VideoFrame input_frame = NextInputFrame(); - input_frame.set_timestamp(kInitialTimestampRtp); - input_frame.set_timestamp_us(kInitialTimestampMs * - rtc::kNumMicrosecsPerMillisec); + input_frame.set_rtp_timestamp(kInitialTimestampRtp); + input_frame.set_timestamp_us(kInitialTimestampMs * kNumMicrosecsPerMillisec); EncodedImage encoded_frame; CodecSpecificInfo codec_specific_info; EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info); @@ -504,12 +528,12 @@ TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) { EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, -1)); std::unique_ptr decoded_frame; - absl::optional decoded_qp; + std::optional decoded_qp; ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp)); ASSERT_TRUE(decoded_frame); // Compute PSNR on all planes (faster than SSIM). EXPECT_GT(I420PSNR(&input_frame, decoded_frame.get()), 36); - EXPECT_EQ(kInitialTimestampRtp, decoded_frame->timestamp()); + EXPECT_EQ(kInitialTimestampRtp, decoded_frame->rtp_timestamp()); } TEST_F(TestVp8Impl, EncoderWith2TemporalLayers) { @@ -574,7 +598,7 @@ TEST_F(TestVp8Impl, DontDropKeyframes) { input_frame_generator_ = test::CreateSquareFrameGenerator( codec_settings_.width, codec_settings_.height, test::FrameGeneratorInterface::OutputType::kI420, - /* num_squares = */ absl::optional(300)); + /* num_squares = */ std::optional(300)); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->InitEncode(&codec_settings_, kSettings)); @@ -596,8 +620,7 @@ TEST_F(TestVp8Impl, DontDropKeyframes) { TEST_F(TestVp8Impl, KeepsTimestampOnReencode) { auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), - VP8Encoder::Settings()); + LibvpxVp8Encoder encoder(CreateEnvironment(), {}, absl::WrapUnique(vpx)); // Settings needed to trigger ScreenshareLayers usage, which is required for // overshoot-drop-reencode logic. @@ -608,7 +631,7 @@ TEST_F(TestVp8Impl, KeepsTimestampOnReencode) { EXPECT_CALL(*vpx, img_wrap(_, _, _, _, _, _)) .WillOnce(Invoke([](vpx_image_t* img, vpx_img_fmt_t fmt, unsigned int d_w, - unsigned int d_h, unsigned int stride_align, + unsigned int d_h, unsigned int /* stride_align */, unsigned char* img_data) { img->fmt = fmt; img->d_w = d_w; @@ -634,10 +657,22 @@ TEST_F(TestVp8Impl, KeepsTimestampOnReencode) { encoder.Encode(NextInputFrame(), &delta_frame); } +TEST_F(TestVp8Impl, PopulatesFilterSettings) { + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + encoder_->InitEncode(&codec_settings_, kSettings)); + + EncodedImage encoded_frame; + CodecSpecificInfo codec_specific_info; + EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info); + + ASSERT_TRUE(encoded_frame.corruption_detection_filter_settings().has_value()); + EXPECT_GT(encoded_frame.corruption_detection_filter_settings()->std_dev, 0.0); +} + TEST(LibvpxVp8EncoderTest, GetEncoderInfoReturnsStaticInformation) { auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), - VP8Encoder::Settings()); + LibvpxVp8Encoder encoder(CreateEnvironment(), {}, absl::WrapUnique(vpx)); const auto info = encoder.GetEncoderInfo(); @@ -652,13 +687,13 @@ TEST(LibvpxVp8EncoderTest, GetEncoderInfoReturnsStaticInformation) { } TEST(LibvpxVp8EncoderTest, RequestedResolutionAlignmentFromFieldTrial) { - test::ScopedFieldTrials field_trials( + test::ScopedKeyValueConfig field_trials( "WebRTC-VP8-GetEncoderInfoOverride/" "requested_resolution_alignment:10/"); auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), - VP8Encoder::Settings()); + LibvpxVp8Encoder encoder(CreateEnvironment(&field_trials), {}, + absl::WrapUnique(vpx)); EXPECT_EQ(encoder.GetEncoderInfo().requested_resolution_alignment, 10u); EXPECT_FALSE( @@ -667,7 +702,7 @@ TEST(LibvpxVp8EncoderTest, RequestedResolutionAlignmentFromFieldTrial) { } TEST(LibvpxVp8EncoderTest, ResolutionBitrateLimitsFromFieldTrial) { - test::ScopedFieldTrials field_trials( + test::ScopedKeyValueConfig field_trials( "WebRTC-VP8-GetEncoderInfoOverride/" "frame_size_pixels:123|456|789," "min_start_bitrate_bps:11000|22000|33000," @@ -675,12 +710,12 @@ TEST(LibvpxVp8EncoderTest, ResolutionBitrateLimitsFromFieldTrial) { "max_bitrate_bps:77000|88000|99000/"); auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), - VP8Encoder::Settings()); + LibvpxVp8Encoder encoder(CreateEnvironment(&field_trials), {}, + absl::WrapUnique(vpx)); EXPECT_THAT( encoder.GetEncoderInfo().resolution_bitrate_limits, - ::testing::ElementsAre( + ElementsAre( VideoEncoder::ResolutionBitrateLimits{123, 11000, 44000, 77000}, VideoEncoder::ResolutionBitrateLimits{456, 22000, 55000, 88000}, VideoEncoder::ResolutionBitrateLimits{789, 33000, 66000, 99000})); @@ -689,8 +724,7 @@ TEST(LibvpxVp8EncoderTest, ResolutionBitrateLimitsFromFieldTrial) { TEST(LibvpxVp8EncoderTest, GetEncoderInfoReturnsEmptyResolutionBitrateLimitsByDefault) { auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), - VP8Encoder::Settings()); + LibvpxVp8Encoder encoder(CreateEnvironment(), {}, absl::WrapUnique(vpx)); const auto info = encoder.GetEncoderInfo(); @@ -705,12 +739,12 @@ TEST(LibvpxVp8EncoderTest, /*min_bitrate_bps=*/100, /*max_bitrate_bps=*/1000), VideoEncoder::ResolutionBitrateLimits(320 * 180, 100, 30, 500)}; - VP8Encoder::Settings settings; + Vp8EncoderSettings settings; settings.resolution_bitrate_limits = resolution_bitrate_limits; auto* const vpx = new NiceMock(); - LibvpxVp8Encoder encoder((std::unique_ptr(vpx)), - std::move(settings)); + LibvpxVp8Encoder encoder(CreateEnvironment(), std::move(settings), + absl::WrapUnique(vpx)); const auto info = encoder.GetEncoderInfo(); @@ -722,7 +756,7 @@ TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationNoLayers) { FramerateFractions(1, EncoderInfo::kMaxFramerateFraction)}; EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation, - ::testing::ElementsAreArray(expected_fps_allocation)); + ElementsAreArray(expected_fps_allocation)); } TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationTwoTemporalLayers) { @@ -740,7 +774,7 @@ TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationTwoTemporalLayers) { expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction); EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation, - ::testing::ElementsAreArray(expected_fps_allocation)); + ElementsAreArray(expected_fps_allocation)); } TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationThreeTemporalLayers) { @@ -759,7 +793,7 @@ TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationThreeTemporalLayers) { expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction); EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation, - ::testing::ElementsAreArray(expected_fps_allocation)); + ElementsAreArray(expected_fps_allocation)); } TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationScreenshareLayers) { @@ -780,7 +814,7 @@ TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationScreenshareLayers) { // Expect empty vector, since this mode doesn't have a fixed framerate. FramerateFractions expected_fps_allocation[kMaxSpatialLayers]; EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation, - ::testing::ElementsAreArray(expected_fps_allocation)); + ElementsAreArray(expected_fps_allocation)); } TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationSimulcastVideo) { @@ -812,7 +846,7 @@ TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationSimulcastVideo) { expected_fps_allocation[1] = expected_fps_allocation[0]; expected_fps_allocation[2] = expected_fps_allocation[0]; EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation, - ::testing::ElementsAreArray(expected_fps_allocation)); + ElementsAreArray(expected_fps_allocation)); // Release encoder and re-init without temporal layers. EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); @@ -821,7 +855,7 @@ TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationSimulcastVideo) { FramerateFractions default_fps_fraction[kMaxSpatialLayers]; default_fps_fraction[0].push_back(EncoderInfo::kMaxFramerateFraction); EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation, - ::testing::ElementsAreArray(default_fps_fraction)); + ElementsAreArray(default_fps_fraction)); for (int i = 0; i < codec_settings_.numberOfSimulcastStreams; ++i) { codec_settings_.simulcastStream[i].numberOfTemporalLayers = 1; @@ -834,12 +868,151 @@ TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationSimulcastVideo) { expected_fps_allocation[i].push_back(EncoderInfo::kMaxFramerateFraction); } EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation, - ::testing::ElementsAreArray(expected_fps_allocation)); + ElementsAreArray(expected_fps_allocation)); +} + +class TestVp8ImplWithMaxFrameDropTrial + : public TestVp8Impl, + public WithParamInterface> { + public: + TestVp8ImplWithMaxFrameDropTrial() + : TestVp8Impl(), trials_(std::get<0>(GetParam())) {} + + protected: + test::ScopedFieldTrials trials_; +}; + +TEST_P(TestVp8ImplWithMaxFrameDropTrial, EnforcesMaxFrameDropInterval) { + static constexpr int kFps = 5; + auto [trial_string, max_interval_config, min_expected_interval] = GetParam(); + + // Allow one frame interval over the configured max frame drop interval. + TimeDelta max_frame_delta = + max_interval_config + (TimeDelta::Seconds(1) / kFps); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); + + // Set up low-bitrate screenshare stream. + codec_settings_.numberOfSimulcastStreams = 1; + codec_settings_.legacy_conference_mode = false; + codec_settings_.mode = VideoCodecMode::kScreensharing; + codec_settings_.maxFramerate = kFps; + codec_settings_.width = 2880; + codec_settings_.height = 1800; + codec_settings_.minBitrate = 30; + codec_settings_.maxBitrate = 420; + codec_settings_.SetFrameDropEnabled(true); + + codec_settings_.simulcastStream[0].active = true; + codec_settings_.simulcastStream[0].minBitrate = codec_settings_.minBitrate; + codec_settings_.simulcastStream[0].targetBitrate = codec_settings_.maxBitrate; + codec_settings_.simulcastStream[0].maxBitrate = codec_settings_.maxBitrate; + codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2; + codec_settings_.simulcastStream[0].width = codec_settings_.width; + codec_settings_.simulcastStream[0].height = codec_settings_.height; + codec_settings_.simulcastStream[0].maxFramerate = + codec_settings_.maxFramerate; + + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + encoder_->InitEncode(&codec_settings_, kSettings)); + + // Allocate a very constained amount of bitrate to increase risk of frame + // drops. + VideoBitrateAllocation bitrate_allocation; + bitrate_allocation.SetBitrate(0, 0, 50'000); + bitrate_allocation.SetBitrate(0, 1, 50'000); + encoder_->SetRates( + VideoEncoder::RateControlParameters(bitrate_allocation, 5.0)); + + EncodedImage encoded_frame; + CodecSpecificInfo codec_specific_info; + // Create a low-complexity 1 square test sequence. + input_frame_generator_ = test::CreateSquareFrameGenerator( + codec_settings_.width, codec_settings_.height, + test::FrameGeneratorInterface::OutputType::kI420, + /*num_squares=*/1); + + class Callback : public EncodedImageCallback { + public: + Callback() : last_callback_(Timestamp::MinusInfinity()) {} + + const std::vector& GetCallbackDeltas() const { + return callback_deltas_; + } + void ClearCallbackDeltas() { callback_deltas_.clear(); } + + protected: + Result OnEncodedImage(const EncodedImage& encoded_image, + const CodecSpecificInfo* /* codec_specific_info */) { + Timestamp timestamp = + Timestamp::Millis(encoded_image.RtpTimestamp() / 90); + if (last_callback_.IsFinite()) { + callback_deltas_.push_back(timestamp - last_callback_); + } + last_callback_ = timestamp; + return Result(Result::Error::OK); + } + + private: + std::vector callback_deltas_; + Timestamp last_callback_; + } callback; + + encoder_->RegisterEncodeCompleteCallback(&callback); + std::vector frame_types = {VideoFrameType::kVideoFrameKey}; + EXPECT_EQ(encoder_->Encode(NextInputFrame(), &frame_types), + WEBRTC_VIDEO_CODEC_OK); + frame_types[0] = VideoFrameType::kVideoFrameDelta; + + // Encode a couple of frames and verify reasonable frame spacing. + for (uint32_t i = 0; i < codec_settings_.maxFramerate * 10; ++i) { + EXPECT_EQ(encoder_->Encode(NextInputFrame(), &frame_types), + WEBRTC_VIDEO_CODEC_OK); + } + auto deltas = callback.GetCallbackDeltas(); + ASSERT_FALSE(deltas.empty()); + EXPECT_LE(*std::max_element(deltas.begin(), deltas.end()), max_frame_delta); + + // Switch to a much more complex input. Verify time deltas are still OK. + input_frame_generator_ = test::CreateSquareFrameGenerator( + codec_settings_.width, codec_settings_.height, + test::FrameGeneratorInterface::OutputType::kI420, + /*num_squares=*/5000); + callback.ClearCallbackDeltas(); + for (uint32_t i = 0; i < codec_settings_.maxFramerate * 10; ++i) { + EXPECT_EQ(encoder_->Encode(NextInputFrame(), &frame_types), + WEBRTC_VIDEO_CODEC_OK); + } + deltas = callback.GetCallbackDeltas(); + ASSERT_FALSE(deltas.empty()); + EXPECT_LE(*std::max_element(deltas.begin(), deltas.end()), max_frame_delta); + + // Check that encoder is causing the expected long frame drop intervals. + EXPECT_GT(*std::max_element(deltas.begin(), deltas.end()), + min_expected_interval); + + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release()); } +INSTANTIATE_TEST_SUITE_P( + All, + TestVp8ImplWithMaxFrameDropTrial, + Values( + // Tuple of { + // trial string, + // configured max frame interval, + // lower bound on expected frame drop intervals + // } + std::make_tuple("WebRTC-VP8-MaxFrameInterval/Disabled/", + TimeDelta::PlusInfinity(), + TimeDelta::Seconds(2)), + std::make_tuple("WebRTC-VP8-MaxFrameInterval/interval:1s/", + TimeDelta::Seconds(1), + TimeDelta::Seconds(0)), + std::make_tuple("", TimeDelta::Seconds(2), TimeDelta::Seconds(1)))); + class TestVp8ImplForPixelFormat : public TestVp8Impl, - public ::testing::WithParamInterface { + public WithParamInterface { public: TestVp8ImplForPixelFormat() : TestVp8Impl(), mappable_type_(GetParam()) {} @@ -891,9 +1064,9 @@ TEST_P(TestVp8ImplForPixelFormat, EncodeNativeFrameSimulcast) { EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info); // After encoding, we expect one mapping per simulcast layer. - rtc::scoped_refptr mappable_buffer = + scoped_refptr mappable_buffer = test::GetMappableNativeBufferFromVideoFrame(input_frame); - std::vector> mapped_buffers = + std::vector> mapped_buffers = mappable_buffer->GetMappedFramedBuffers(); ASSERT_EQ(mapped_buffers.size(), 3u); EXPECT_EQ(mapped_buffers[0]->type(), mappable_type_); @@ -912,7 +1085,7 @@ TEST_P(TestVp8ImplForPixelFormat, EncodeNativeFrameSimulcast) { INSTANTIATE_TEST_SUITE_P(All, TestVp8ImplForPixelFormat, - ::testing::Values(VideoFrameBuffer::Type::kI420, - VideoFrameBuffer::Type::kNV12)); + Values(VideoFrameBuffer::Type::kI420, + VideoFrameBuffer::Type::kNV12)); } // namespace webrtc diff --git a/modules/video_coding/codecs/vp8/vp8_scalability.cc b/modules/video_coding/codecs/vp8/vp8_scalability.cc index 9c7495ddf7..b271ec6e42 100644 --- a/modules/video_coding/codecs/vp8/vp8_scalability.cc +++ b/modules/video_coding/codecs/vp8/vp8_scalability.cc @@ -10,6 +10,8 @@ #include "modules/video_coding/codecs/vp8/vp8_scalability.h" +#include "api/video_codecs/scalability_mode.h" + namespace webrtc { bool VP8SupportsScalabilityMode(ScalabilityMode scalability_mode) { diff --git a/modules/video_coding/codecs/vp9/include/vp9.h b/modules/video_coding/codecs/vp9/include/vp9.h index 79d403ded3..61f4763874 100644 --- a/modules/video_coding/codecs/vp9/include/vp9.h +++ b/modules/video_coding/codecs/vp9/include/vp9.h @@ -15,10 +15,13 @@ #include #include +#include "absl/base/nullability.h" +#include "api/environment/environment.h" #include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" -#include "media/base/codec.h" -#include "modules/video_coding/include/video_codec_interface.h" +#include "api/video_codecs/video_decoder.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/vp9_profile.h" namespace webrtc { @@ -31,16 +34,16 @@ std::vector SupportedVP9Codecs( // preference. These will be availble for receive-only connections. std::vector SupportedVP9DecoderCodecs(); -class VP9Encoder : public VideoEncoder { +struct Vp9EncoderSettings { + VP9Profile profile = VP9Profile::kProfile0; +}; +absl_nonnull std::unique_ptr CreateVp9Encoder( + const Environment& env, + Vp9EncoderSettings settings = {}); + +class VP9Encoder { public: - // Deprecated. Returns default implementation using VP9 Profile 0. - // TODO(emircan): Remove once this is no longer used. - static std::unique_ptr Create(); - // Parses VP9 Profile from `codec` and returns the appropriate implementation. - static std::unique_ptr Create(const cricket::VideoCodec& codec); static bool SupportsScalabilityMode(ScalabilityMode scalability_mode); - - ~VP9Encoder() override {} }; class VP9Decoder : public VideoDecoder { diff --git a/modules/video_coding/codecs/vp9/include/vp9_globals.h b/modules/video_coding/codecs/vp9/include/vp9_globals.h index 0614a3c83f..b5fbc2ba11 100644 --- a/modules/video_coding/codecs/vp9/include/vp9_globals.h +++ b/modules/video_coding/codecs/vp9/include/vp9_globals.h @@ -16,6 +16,8 @@ #include +#include + #include "modules/video_coding/codecs/interface/common_constants.h" #include "rtc_base/checks.h" @@ -30,8 +32,8 @@ const size_t kMaxVp9RefPics = 3; const size_t kMaxVp9FramesInGof = 0xFF; // 8 bits const size_t kMaxVp9NumberOfSpatialLayers = 8; -const size_t kMinVp9SpatialLayerLongSideLength = 240; -const size_t kMinVp9SpatialLayerShortSideLength = 135; +const int kMinVp9SpatialLayerLongSideLength = 240; +const int kMinVp9SpatialLayerShortSideLength = 135; enum TemporalStructureMode { kTemporalStructureMode1, // 1 temporal layer structure - i.e., IPPP... diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc index 2dec061836..4d4e147d50 100644 --- a/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc @@ -14,18 +14,29 @@ #include "modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h" #include +#include +#include +#include -#include "absl/strings/match.h" -#include "api/transport/field_trial_based_config.h" +#include "api/array_view.h" +#include "api/scoped_refptr.h" #include "api/video/color_space.h" -#include "api/video/i010_buffer.h" +#include "api/video/encoded_image.h" +#include "api/video/render_resolution.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/video_decoder.h" #include "common_video/include/video_frame_buffer.h" +#include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h" +#include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "third_party/libyuv/include/libyuv/convert.h" #include "vpx/vp8dx.h" #include "vpx/vpx_decoder.h" +#include "vpx/vpx_encoder.h" +#include "vpx/vpx_image.h" namespace webrtc { namespace { @@ -170,7 +181,7 @@ bool LibvpxVp9Decoder::Configure(const Settings& settings) { inited_ = true; // Always start with a complete key frame. key_frame_required_ = true; - if (absl::optional buffer_pool_size = settings.buffer_pool_size()) { + if (std::optional buffer_pool_size = settings.buffer_pool_size()) { if (!libvpx_buffer_pool_.Resize(*buffer_pool_size)) { return false; } @@ -197,9 +208,9 @@ int LibvpxVp9Decoder::Decode(const EncodedImage& input_image, } if (input_image._frameType == VideoFrameType::kVideoFrameKey) { - absl::optional frame_info = + std::optional frame_info = ParseUncompressedVp9Header( - rtc::MakeArrayView(input_image.data(), input_image.size())); + MakeArrayView(input_image.data(), input_image.size())); if (frame_info) { RenderResolution frame_resolution(frame_info->frame_width, frame_info->frame_height); @@ -246,8 +257,8 @@ int LibvpxVp9Decoder::Decode(const EncodedImage& input_image, vpx_codec_err_t vpx_ret = vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp); RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK); - int ret = - ReturnFrame(img, input_image.Timestamp(), qp, input_image.ColorSpace()); + int ret = ReturnFrame(img, input_image.RtpTimestamp(), qp, + input_image.ColorSpace()); if (ret != 0) { return ret; } @@ -267,12 +278,12 @@ int LibvpxVp9Decoder::ReturnFrame( // This buffer contains all of `img`'s image data, a reference counted // Vp9FrameBuffer. (libvpx is done with the buffers after a few // vpx_codec_decode calls or vpx_codec_destroy). - rtc::scoped_refptr img_buffer( + scoped_refptr img_buffer( static_cast(img->fb_priv)); // The buffer can be used directly by the VideoFrame (without copy) by // using a Wrapped*Buffer. - rtc::scoped_refptr img_wrapped_buffer; + scoped_refptr img_wrapped_buffer; switch (img->fmt) { case VPX_IMG_FMT_I420: img_wrapped_buffer = WrapI420Buffer( @@ -345,7 +356,7 @@ int LibvpxVp9Decoder::ReturnFrame( auto builder = VideoFrame::Builder() .set_video_frame_buffer(img_wrapped_buffer) - .set_timestamp_rtp(timestamp); + .set_rtp_timestamp(timestamp); if (explicit_color_space) { builder.set_color_space(*explicit_color_space); } else { @@ -354,7 +365,7 @@ int LibvpxVp9Decoder::ReturnFrame( } VideoFrame decoded_image = builder.build(); - decode_complete_callback_->Decoded(decoded_image, absl::nullopt, qp); + decode_complete_callback_->Decoded(decoded_image, std::nullopt, qp); return WEBRTC_VIDEO_CODEC_OK; } diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h index 4275836286..773b94cd3f 100644 --- a/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h @@ -14,10 +14,15 @@ #ifdef RTC_ENABLE_VP9 +#include + +#include "api/video/color_space.h" +#include "api/video/encoded_image.h" #include "api/video_codecs/video_decoder.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h" -#include "vpx/vp8cx.h" +#include "vpx/vpx_codec.h" +#include "vpx/vpx_image.h" namespace webrtc { diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc index e460625aea..ce540775f4 100644 --- a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc @@ -9,43 +9,75 @@ * */ -#include #ifdef RTC_ENABLE_VP9 +#include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" + #include -#include -#include +#include +#include +#include +#include +#include +#include +#include #include #include #include "absl/algorithm/container.h" -#include "absl/memory/memory.h" -#include "absl/strings/match.h" -#include "absl/types/optional.h" -#include "api/video/color_space.h" +#include "absl/container/inlined_vector.h" +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/fec_controller_override.h" +#include "api/field_trials_view.h" +#include "api/scoped_refptr.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/encoded_image.h" #include "api/video/i010_buffer.h" +#include "api/video/render_resolution.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" #include "api/video_codecs/scalability_mode.h" -#include "common_video/include/video_frame_buffer.h" -#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/vp9_profile.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/interface/libvpx_interface.h" +#include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/svc/scalability_mode_util.h" #include "modules/video_coding/svc/scalable_video_controller.h" #include "modules/video_coding/svc/scalable_video_controller_no_layering.h" +#include "modules/video_coding/svc/simulcast_to_svc_converter.h" #include "modules/video_coding/svc/svc_rate_allocator.h" -#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" +#include "modules/video_coding/utility/framerate_controller_deprecated.h" +#include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "rtc_base/checks.h" +#include "rtc_base/containers/flat_map.h" #include "rtc_base/experiments/field_trial_list.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" -#include "third_party/libyuv/include/libyuv/convert.h" #include "vpx/vp8cx.h" #include "vpx/vpx_encoder.h" +#include "vpx/vpx_image.h" + +#if (defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64)) && \ + (defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)) +#define MOBILE_ARM +#endif namespace webrtc { @@ -58,6 +90,13 @@ uint8_t kUpdBufIdx[4] = {0, 0, 1, 0}; // Maximum allowed PID difference for differnet per-layer frame-rate case. const int kMaxAllowedPidDiff = 30; +namespace variable_framerate_screenshare { +constexpr double kMinFps = 5.0; +constexpr int kMinQP = 32; +constexpr int kUndershootPct = 30; +constexpr int kFramesBeforeSteadyState = 5; +} // namespace variable_framerate_screenshare + // TODO(ilink): Tune these thresholds further. // Selected using ConverenceMotion_1280_720_50.yuv clip. // No toggling observed on any link capacity from 100-2000kbps. @@ -82,25 +121,21 @@ std::pair GetActiveLayers( return {0, 0}; } -using Vp9ScalabilityStructure = - std::tuple, ScalabilityMode>; -absl::optional CreateVp9ScalabilityStructure( +std::unique_ptr CreateVp9ScalabilityStructure( const VideoCodec& codec) { int num_spatial_layers = codec.VP9().numberOfSpatialLayers; int num_temporal_layers = std::max(1, int{codec.VP9().numberOfTemporalLayers}); if (num_spatial_layers == 1 && num_temporal_layers == 1) { - return absl::make_optional( - std::make_unique(), - ScalabilityMode::kL1T1); + return std::make_unique(); } char name[20]; - rtc::SimpleStringBuilder ss(name); + SimpleStringBuilder ss(name); if (codec.mode == VideoCodecMode::kScreensharing) { // TODO(bugs.webrtc.org/11999): Compose names of the structures when they // are implemented. - return absl::nullopt; + return nullptr; } else if (codec.VP9().interLayerPred == InterLayerPredMode::kOn || num_spatial_layers == 1) { ss << "L" << num_spatial_layers << "T" << num_temporal_layers; @@ -112,12 +147,12 @@ absl::optional CreateVp9ScalabilityStructure( } // Check spatial ratio. - if (num_spatial_layers > 1 && codec.spatialLayers[0].targetBitrate > 0) { + if (num_spatial_layers > 1) { if (codec.width != codec.spatialLayers[num_spatial_layers - 1].width || codec.height != codec.spatialLayers[num_spatial_layers - 1].height) { RTC_LOG(LS_WARNING) << "Top layer resolution expected to match overall resolution"; - return absl::nullopt; + return nullptr; } // Check if the ratio is one of the supported. int numerator; @@ -135,7 +170,7 @@ absl::optional CreateVp9ScalabilityStructure( RTC_LOG(LS_WARNING) << "Unsupported scalability ratio " << codec.spatialLayers[0].width << ":" << codec.spatialLayers[1].width; - return absl::nullopt; + return nullptr; } // Validate ratio is consistent for all spatial layer transitions. for (int sid = 1; sid < num_spatial_layers; ++sid) { @@ -145,16 +180,16 @@ absl::optional CreateVp9ScalabilityStructure( codec.spatialLayers[sid - 1].height * denominator) { RTC_LOG(LS_WARNING) << "Inconsistent scalability ratio " << numerator << ":" << denominator; - return absl::nullopt; + return nullptr; } } } - absl::optional scalability_mode = + std::optional scalability_mode = ScalabilityModeFromString(name); if (!scalability_mode.has_value()) { RTC_LOG(LS_WARNING) << "Invalid scalability mode " << name; - return absl::nullopt; + return nullptr; } auto scalability_structure_controller = CreateScalabilityStructure(*scalability_mode); @@ -163,12 +198,11 @@ absl::optional CreateVp9ScalabilityStructure( } else { RTC_LOG(LS_INFO) << "Created scalability structure " << name; } - return absl::make_optional( - std::move(scalability_structure_controller), *scalability_mode); + return scalability_structure_controller; } vpx_svc_ref_frame_config_t Vp9References( - rtc::ArrayView layers) { + ArrayView layers) { vpx_svc_ref_frame_config_t ref_config = {}; for (const ScalableVideoController::LayerFrameConfig& layer_frame : layers) { const auto& buffers = layer_frame.Buffers(); @@ -201,13 +235,12 @@ vpx_svc_ref_frame_config_t Vp9References( } bool AllowDenoising() { - // Do not enable the denoiser on ARM since optimization is pending. - // Denoiser is on by default on other platforms. -#if !defined(WEBRTC_ARCH_ARM) && !defined(WEBRTC_ARCH_ARM64) && \ - !defined(ANDROID) - return true; -#else +#ifdef MOBILE_ARM + // Keep the denoiser disabled on mobile ARM devices. It increases encode time + // by up to 16%. return false; +#else + return true; #endif } @@ -219,14 +252,14 @@ void LibvpxVp9Encoder::EncoderOutputCodedPacketCallback(vpx_codec_cx_pkt* pkt, enc->GetEncodedLayerFrame(pkt); } -LibvpxVp9Encoder::LibvpxVp9Encoder(const cricket::VideoCodec& codec, - std::unique_ptr interface, - const FieldTrialsView& trials) - : libvpx_(std::move(interface)), +LibvpxVp9Encoder::LibvpxVp9Encoder(const Environment& env, + Vp9EncoderSettings settings, + std::unique_ptr interface) + : env_(env), + libvpx_(std::move(interface)), encoded_image_(), encoded_complete_callback_(nullptr), - profile_( - ParseSdpForVP9Profile(codec.params).value_or(VP9Profile::kProfile0)), + profile_(settings.profile), inited_(false), timestamp_(0), rc_max_intra_target_(0), @@ -240,29 +273,24 @@ LibvpxVp9Encoder::LibvpxVp9Encoder(const cricket::VideoCodec& codec, num_spatial_layers_(0), num_active_spatial_layers_(0), first_active_layer_(0), - layer_deactivation_requires_key_frame_(absl::StartsWith( - trials.Lookup("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation"), - "Enabled")), + layer_deactivation_requires_key_frame_(env.field_trials().IsEnabled( + "WebRTC-Vp9IssueKeyFrameOnLayerDeactivation")), is_svc_(false), inter_layer_pred_(InterLayerPredMode::kOn), - external_ref_control_(false), // Set in InitEncode because of tests. - trusted_rate_controller_( - RateControlSettings::ParseFromKeyValueConfig(&trials) - .LibvpxVp9TrustedRateController()), + trusted_rate_controller_(RateControlSettings(env.field_trials()) + .LibvpxVp9TrustedRateController()), first_frame_in_picture_(true), ss_info_needed_(false), force_all_active_layers_(false), + enable_svc_for_simulcast_( + !env.field_trials().IsDisabled("WebRTC-VP9-SvcForSimulcast")), is_flexible_mode_(false), - variable_framerate_experiment_(ParseVariableFramerateConfig(trials)), - variable_framerate_controller_( - variable_framerate_experiment_.framerate_limit), - quality_scaler_experiment_(ParseQualityScalerConfig(trials)), - external_ref_ctrl_( - !absl::StartsWith(trials.Lookup("WebRTC-Vp9ExternalRefCtrl"), - "Disabled")), - performance_flags_(ParsePerformanceFlagsFromTrials(trials)), + variable_framerate_controller_(variable_framerate_screenshare::kMinFps), + quality_scaler_experiment_(ParseQualityScalerConfig(env.field_trials())), + performance_flags_(ParsePerformanceFlagsFromTrials(env.field_trials())), num_steady_state_frames_(0), - config_changed_(true) { + config_changed_(true), + encoder_info_override_(env.field_trials()) { codec_ = {}; memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t)); } @@ -299,12 +327,6 @@ int LibvpxVp9Encoder::Release() { return ret_val; } -bool LibvpxVp9Encoder::ExplicitlyConfiguredSpatialLayers() const { - // We check target_bitrate_bps of the 0th layer to see if the spatial layers - // (i.e. bitrates) were explicitly configured. - return codec_.spatialLayers[0].targetBitrate > 0; -} - bool LibvpxVp9Encoder::SetSvcRates( const VideoBitrateAllocation& bitrate_allocation) { std::pair current_layers = @@ -331,66 +353,23 @@ bool LibvpxVp9Encoder::SetSvcRates( config_->rc_target_bitrate = bitrate_allocation.get_sum_kbps(); - if (ExplicitlyConfiguredSpatialLayers()) { - for (size_t sl_idx = 0; sl_idx < num_spatial_layers_; ++sl_idx) { - const bool was_layer_active = (config_->ss_target_bitrate[sl_idx] > 0); - config_->ss_target_bitrate[sl_idx] = - bitrate_allocation.GetSpatialLayerSum(sl_idx) / 1000; - - for (size_t tl_idx = 0; tl_idx < num_temporal_layers_; ++tl_idx) { - config_->layer_target_bitrate[sl_idx * num_temporal_layers_ + tl_idx] = - bitrate_allocation.GetTemporalLayerSum(sl_idx, tl_idx) / 1000; - } - - if (!was_layer_active) { - // Reset frame rate controller if layer is resumed after pause. - framerate_controller_[sl_idx].Reset(); - } - - framerate_controller_[sl_idx].SetTargetRate( - codec_.spatialLayers[sl_idx].maxFramerate); - } - } else { - float rate_ratio[VPX_MAX_LAYERS] = {0}; - float total = 0; - for (int i = 0; i < num_spatial_layers_; ++i) { - if (svc_params_.scaling_factor_num[i] <= 0 || - svc_params_.scaling_factor_den[i] <= 0) { - RTC_LOG(LS_ERROR) << "Scaling factors not specified!"; - return false; - } - rate_ratio[i] = static_cast(svc_params_.scaling_factor_num[i]) / - svc_params_.scaling_factor_den[i]; - total += rate_ratio[i]; + for (size_t sl_idx = 0; sl_idx < num_spatial_layers_; ++sl_idx) { + if (config_->ss_target_bitrate[sl_idx] == 0) { + // Reset frame rate controller if layer is resumed after pause. + framerate_controller_[sl_idx].Reset(); } - for (int i = 0; i < num_spatial_layers_; ++i) { - RTC_CHECK_GT(total, 0); - config_->ss_target_bitrate[i] = static_cast( - config_->rc_target_bitrate * rate_ratio[i] / total); - if (num_temporal_layers_ == 1) { - config_->layer_target_bitrate[i] = config_->ss_target_bitrate[i]; - } else if (num_temporal_layers_ == 2) { - config_->layer_target_bitrate[i * num_temporal_layers_] = - config_->ss_target_bitrate[i] * 2 / 3; - config_->layer_target_bitrate[i * num_temporal_layers_ + 1] = - config_->ss_target_bitrate[i]; - } else if (num_temporal_layers_ == 3) { - config_->layer_target_bitrate[i * num_temporal_layers_] = - config_->ss_target_bitrate[i] / 2; - config_->layer_target_bitrate[i * num_temporal_layers_ + 1] = - config_->layer_target_bitrate[i * num_temporal_layers_] + - (config_->ss_target_bitrate[i] / 4); - config_->layer_target_bitrate[i * num_temporal_layers_ + 2] = - config_->ss_target_bitrate[i]; - } else { - RTC_LOG(LS_ERROR) << "Unsupported number of temporal layers: " - << num_temporal_layers_; - return false; - } + config_->ss_target_bitrate[sl_idx] = + bitrate_allocation.GetSpatialLayerSum(sl_idx) / 1000; - framerate_controller_[i].SetTargetRate(codec_.maxFramerate); + for (size_t tl_idx = 0; tl_idx < num_temporal_layers_; ++tl_idx) { + config_->layer_target_bitrate[sl_idx * num_temporal_layers_ + tl_idx] = + bitrate_allocation.GetTemporalLayerSum(sl_idx, tl_idx) / 1000; } + + framerate_controller_[sl_idx].SetTargetRate( + num_spatial_layers_ > 1 ? codec_.spatialLayers[sl_idx].maxFramerate + : codec_.maxFramerate); } num_active_spatial_layers_ = 0; @@ -453,6 +432,36 @@ bool LibvpxVp9Encoder::SetSvcRates( return true; } +void LibvpxVp9Encoder::AdjustScalingFactorsForTopActiveLayer() { + if (num_active_spatial_layers_ == 0 || num_spatial_layers_ <= 1 || !is_svc_ || + static_cast(config_->g_w) == + codec_.spatialLayers[num_active_spatial_layers_ - 1].width) { + return; + } + + config_->g_w = codec_.spatialLayers[num_active_spatial_layers_ - 1].width; + config_->g_h = codec_.spatialLayers[num_active_spatial_layers_ - 1].height; + + // Recalculate scaling factors ignoring top inactive layers. + // Divide all by scaling factor of the last active layer. + for (int i = 0; i < num_active_spatial_layers_; ++i) { + int n = scaling_factors_num_[i] * + scaling_factors_den_[num_active_spatial_layers_ - 1]; + int d = scaling_factors_den_[i] * + scaling_factors_num_[num_active_spatial_layers_ - 1]; + int gcd = std::gcd(n, d); + svc_params_.scaling_factor_num[i] = n / gcd; + svc_params_.scaling_factor_den[i] = d / gcd; + } + for (int i = num_active_spatial_layers_; i < num_spatial_layers_; ++i) { + svc_params_.scaling_factor_num[i] = 1; + svc_params_.scaling_factor_den[i] = 1; + } + + libvpx_->codec_control(encoder_, VP9E_SET_SVC_PARAMETERS, &svc_params_); + config_changed_ = true; +} + void LibvpxVp9Encoder::DisableSpatialLayer(int sid) { RTC_DCHECK_LT(sid, num_spatial_layers_); if (config_->ss_target_bitrate[sid] == 0) { @@ -522,6 +531,7 @@ void LibvpxVp9Encoder::SetRates(const RateControlParameters& parameters) { bool res = SetSvcRates(parameters.bitrate); RTC_DCHECK(res) << "Failed to set new bitrate allocation"; + AdjustScalingFactorsForTopActiveLayer(); config_changed_ = true; } @@ -552,9 +562,6 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - absl::optional previous_img_fmt = - raw_ ? absl::make_optional(raw_->fmt) : absl::nullopt; - int ret_val = Release(); if (ret_val < 0) { return ret_val; @@ -571,12 +578,32 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, if (&codec_ != inst) { codec_ = *inst; } + + if (enable_svc_for_simulcast_ && codec_.numberOfSimulcastStreams > 1) { + if (!SimulcastToSvcConverter::IsConfigSupported(codec_)) { + return WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED; + } + RTC_LOG(LS_INFO) << "Rewriting simulcast config to SVC."; + current_bitrate_allocation_ = + SimulcastRateAllocator(env_, codec_) + .Allocate(VideoBitrateAllocationParameters( + codec_.startBitrate * 1000, codec_.maxFramerate)); + simulcast_to_svc_converter_.emplace(codec_); + codec_ = simulcast_to_svc_converter_->GetConfig(); + } else { + current_bitrate_allocation_ = + SvcRateAllocator(codec_, env_.field_trials()) + .Allocate(VideoBitrateAllocationParameters( + codec_.startBitrate * 1000, codec_.maxFramerate)); + simulcast_to_svc_converter_ = std::nullopt; + } + memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t)); force_key_frame_ = true; pics_since_key_ = 0; - scalability_mode_ = inst->GetScalabilityMode(); + scalability_mode_ = codec_.GetScalabilityMode(); if (scalability_mode_.has_value()) { // Use settings from `ScalabilityMode` identifier. RTC_LOG(LS_INFO) << "Create scalability structure " @@ -592,21 +619,14 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, num_temporal_layers_ = info.num_temporal_layers; inter_layer_pred_ = ScalabilityModeToInterLayerPredMode(*scalability_mode_); } else { - num_spatial_layers_ = inst->VP9().numberOfSpatialLayers; + num_spatial_layers_ = codec_.VP9()->numberOfSpatialLayers; RTC_DCHECK_GT(num_spatial_layers_, 0); - num_temporal_layers_ = inst->VP9().numberOfTemporalLayers; + num_temporal_layers_ = codec_.VP9()->numberOfTemporalLayers; if (num_temporal_layers_ == 0) { num_temporal_layers_ = 1; } - inter_layer_pred_ = inst->VP9().interLayerPred; - auto vp9_scalability = CreateVp9ScalabilityStructure(*inst); - if (vp9_scalability.has_value()) { - std::tie(svc_controller_, scalability_mode_) = - std::move(vp9_scalability.value()); - } else { - svc_controller_ = nullptr; - scalability_mode_ = absl::nullopt; - } + inter_layer_pred_ = codec_.VP9()->interLayerPred; + svc_controller_ = CreateVp9ScalabilityStructure(codec_); } framerate_controller_ = std::vector( @@ -619,12 +639,8 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, return WEBRTC_VIDEO_CODEC_ERROR; } - vpx_img_fmt img_fmt = VPX_IMG_FMT_NONE; - unsigned int bits_for_storage = 8; switch (profile_) { case VP9Profile::kProfile0: - img_fmt = previous_img_fmt.value_or(VPX_IMG_FMT_I420); - bits_for_storage = 8; config_->g_bit_depth = VPX_BITS_8; config_->g_profile = 0; config_->g_input_bit_depth = 8; @@ -635,8 +651,6 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, RTC_DCHECK_NOTREACHED(); break; case VP9Profile::kProfile2: - img_fmt = VPX_IMG_FMT_I42016; - bits_for_storage = 16; config_->g_bit_depth = VPX_BITS_10; config_->g_profile = 2; config_->g_input_bit_depth = 10; @@ -647,16 +661,9 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, break; } - // Creating a wrapper to the image - setting image data to nullptr. Actual - // pointer will be set in encode. Setting align to 1, as it is meaningless - // (actual memory is not allocated). - raw_ = libvpx_->img_wrap(nullptr, img_fmt, codec_.width, codec_.height, 1, - nullptr); - raw_->bit_depth = bits_for_storage; - config_->g_w = codec_.width; config_->g_h = codec_.height; - config_->rc_target_bitrate = inst->startBitrate; // in kbit/s + config_->rc_target_bitrate = codec_.startBitrate; // in kbit/s config_->g_error_resilient = is_svc_ ? VPX_ERROR_RESILIENT_DEFAULT : 0; // Setting the time base of the codec. config_->g_timebase.num = 1; @@ -664,7 +671,7 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, config_->g_lag_in_frames = 0; // 0- no frame lagging config_->g_threads = 1; // Rate control settings. - config_->rc_dropframe_thresh = inst->GetFrameDropEnabled() ? 30 : 0; + config_->rc_dropframe_thresh = codec_.GetFrameDropEnabled() ? 30 : 0; config_->rc_end_usage = VPX_CBR; config_->g_pass = VPX_RC_ONE_PASS; config_->rc_min_quantizer = @@ -681,20 +688,20 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, config_->kf_mode = VPX_KF_DISABLED; // TODO(webm:1592): work-around for libvpx issue, as it can still // put some key-frames at will even in VPX_KF_DISABLED kf_mode. - config_->kf_max_dist = inst->VP9().keyFrameInterval; + config_->kf_max_dist = codec_.VP9()->keyFrameInterval; config_->kf_min_dist = config_->kf_max_dist; if (quality_scaler_experiment_.enabled) { // In that experiment webrtc wide quality scaler is used instead of libvpx // internal scaler. config_->rc_resize_allowed = 0; } else { - config_->rc_resize_allowed = inst->VP9().automaticResizeOn ? 1 : 0; + config_->rc_resize_allowed = codec_.VP9()->automaticResizeOn ? 1 : 0; } // Determine number of threads based on the image size and #cores. config_->g_threads = NumberOfThreads(config_->g_w, config_->g_h, settings.number_of_cores); - is_flexible_mode_ = inst->VP9().flexibleMode; + is_flexible_mode_ = codec_.VP9()->flexibleMode; if (num_spatial_layers_ > 1 && codec_.mode == VideoCodecMode::kScreensharing && !is_flexible_mode_) { @@ -703,13 +710,6 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - // External reference control is required for different frame rate on spatial - // layers because libvpx generates rtp incompatible references in this case. - external_ref_control_ = external_ref_ctrl_ || - (num_spatial_layers_ > 1 && - codec_.mode == VideoCodecMode::kScreensharing) || - inter_layer_pred_ == InterLayerPredMode::kOn; - if (num_temporal_layers_ == 1) { gof_.SetGofInfoVP9(kTemporalStructureMode1); config_->temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_NOLAYERING; @@ -742,18 +742,16 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst, return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - if (external_ref_control_) { - config_->temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_BYPASS; - if (num_temporal_layers_ > 1 && num_spatial_layers_ > 1 && - codec_.mode == VideoCodecMode::kScreensharing) { - // External reference control for several temporal layers with different - // frame rates on spatial layers is not implemented yet. - return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; - } + config_->temporal_layering_mode = VP9E_TEMPORAL_LAYERING_MODE_BYPASS; + if (num_temporal_layers_ > 1 && num_spatial_layers_ > 1 && + codec_.mode == VideoCodecMode::kScreensharing) { + // External reference control for several temporal layers with different + // frame rates on spatial layers is not implemented yet. + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } ref_buf_ = {}; - return InitAndSetControlSettings(inst); + return InitAndSetControlSettings(); } int LibvpxVp9Encoder::NumberOfThreads(int width, @@ -766,9 +764,8 @@ int LibvpxVp9Encoder::NumberOfThreads(int width, } else if (width * height >= 640 * 360 && number_of_cores > 2) { return 2; } else { -// Use 2 threads for low res on ARM. -#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || \ - defined(WEBRTC_ANDROID) +// Use 2 threads for low res on mobile ARM. +#ifdef MOBILE_ARM if (width * height >= 320 * 180 && number_of_cores > 2) { return 2; } @@ -778,9 +775,11 @@ int LibvpxVp9Encoder::NumberOfThreads(int width, } } -int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { +int LibvpxVp9Encoder::InitAndSetControlSettings() { // Set QP-min/max per spatial and temporal layer. int tot_num_layers = num_spatial_layers_ * num_temporal_layers_; + scaling_factors_num_.resize(num_spatial_layers_); + scaling_factors_den_.resize(num_spatial_layers_); for (int i = 0; i < tot_num_layers; ++i) { svc_params_.max_quantizers[i] = config_->rc_max_quantizer; svc_params_.min_quantizers[i] = config_->rc_min_quantizer; @@ -789,10 +788,12 @@ int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { if (svc_controller_) { auto stream_config = svc_controller_->StreamConfig(); for (int i = 0; i < stream_config.num_spatial_layers; ++i) { - svc_params_.scaling_factor_num[i] = stream_config.scaling_factor_num[i]; - svc_params_.scaling_factor_den[i] = stream_config.scaling_factor_den[i]; + scaling_factors_num_[i] = svc_params_.scaling_factor_num[i] = + stream_config.scaling_factor_num[i]; + scaling_factors_den_[i] = svc_params_.scaling_factor_den[i] = + stream_config.scaling_factor_den[i]; } - } else if (ExplicitlyConfiguredSpatialLayers()) { + } else if (num_spatial_layers_ > 1) { for (int i = 0; i < num_spatial_layers_; ++i) { const auto& layer = codec_.spatialLayers[i]; RTC_CHECK_GT(layer.width, 0); @@ -815,8 +816,9 @@ int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - svc_params_.scaling_factor_num[i] = 1; - svc_params_.scaling_factor_den[i] = scale_factor; + scaling_factors_num_[i] = svc_params_.scaling_factor_num[i] = 1; + scaling_factors_den_[i] = svc_params_.scaling_factor_den[i] = + scale_factor; RTC_DCHECK_GT(codec_.spatialLayers[i].maxFramerate, 0); RTC_DCHECK_LE(codec_.spatialLayers[i].maxFramerate, codec_.maxFramerate); @@ -827,23 +829,14 @@ int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { codec_.spatialLayers[i - 1].maxFramerate); } } - } else { - int scaling_factor_num = 256; - for (int i = num_spatial_layers_ - 1; i >= 0; --i) { - // 1:2 scaling in each dimension. - svc_params_.scaling_factor_num[i] = scaling_factor_num; - svc_params_.scaling_factor_den[i] = 256; - } } UpdatePerformanceFlags(); RTC_DCHECK_EQ(performance_flags_by_spatial_index_.size(), static_cast(num_spatial_layers_)); - SvcRateAllocator init_allocator(codec_); - current_bitrate_allocation_ = - init_allocator.Allocate(VideoBitrateAllocationParameters( - inst->startBitrate * 1000, inst->maxFramerate)); + // `current_bitrate_allocation_` is set in InitEncode and may have used + // simulcast configuration. if (!SetSvcRates(current_bitrate_allocation_)) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } @@ -864,7 +857,7 @@ int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { performance_flags_by_spatial_index_[si].deblock_mode; } bool denoiser_on = - AllowDenoising() && inst->VP9().denoisingOn && + AllowDenoising() && codec_.VP9()->denoisingOn && performance_flags_by_spatial_index_[num_spatial_layers_ - 1] .allow_denoising; libvpx_->codec_control(encoder_, VP9E_SET_NOISE_SENSITIVITY, @@ -874,7 +867,7 @@ int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { libvpx_->codec_control(encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT, rc_max_intra_target_); libvpx_->codec_control(encoder_, VP9E_SET_AQ_MODE, - inst->VP9().adaptiveQpMode ? 3 : 0); + codec_.VP9()->adaptiveQpMode ? 3 : 0); libvpx_->codec_control(encoder_, VP9E_SET_FRAME_PARALLEL_DECODING, 0); libvpx_->codec_control(encoder_, VP9E_SET_SVC_GF_TEMPORAL_REF, 0); @@ -907,8 +900,7 @@ int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { memset(&svc_drop_frame_, 0, sizeof(svc_drop_frame_)); const bool reverse_constrained_drop_mode = inter_layer_pred_ == InterLayerPredMode::kOn && - codec_.mode == VideoCodecMode::kScreensharing && - num_spatial_layers_ > 1; + codec_.mode == VideoCodecMode::kScreensharing; if (reverse_constrained_drop_mode) { // Screenshare dropping mode: drop a layer only together with all lower // layers. This ensures that drops on lower layers won't reduce frame-rate @@ -919,11 +911,19 @@ int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { svc_drop_frame_.framedrop_thresh[i] = config_->rc_dropframe_thresh; } } else { - // Configure encoder to drop entire superframe whenever it needs to drop - // a layer. This mode is preferred over per-layer dropping which causes - // quality flickering and is not compatible with RTP non-flexible mode. - svc_drop_frame_.framedrop_mode = FULL_SUPERFRAME_DROP; - svc_drop_frame_.max_consec_drop = std::numeric_limits::max(); + if (is_flexible_mode_ && svc_controller_ && + (inter_layer_pred_ == InterLayerPredMode::kOff || + inter_layer_pred_ == InterLayerPredMode::kOnKeyPic)) { + // SVC controller is required since it properly accounts for dropped + // refs (unlike SetReferences(), which assumes full superframe drop). + svc_drop_frame_.framedrop_mode = LAYER_DROP; + } else { + // Configure encoder to drop entire superframe whenever it needs to drop + // a layer. This mode is preferred over per-layer dropping which causes + // quality flickering and is not compatible with RTP non-flexible mode. + svc_drop_frame_.framedrop_mode = FULL_SUPERFRAME_DROP; + } + svc_drop_frame_.max_consec_drop = 2; for (size_t i = 0; i < num_spatial_layers_; ++i) { svc_drop_frame_.framedrop_thresh[i] = config_->rc_dropframe_thresh; } @@ -951,7 +951,7 @@ int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { if (AllowDenoising() && !performance_flags_.use_per_layer_speed) { libvpx_->codec_control(encoder_, VP9E_SET_NOISE_SENSITIVITY, - inst->VP9().denoisingOn ? 1 : 0); + codec_.VP9()->denoisingOn ? 1 : 0); } if (codec_.mode == VideoCodecMode::kScreensharing) { @@ -960,6 +960,10 @@ int LibvpxVp9Encoder::InitAndSetControlSettings(const VideoCodec* inst) { } // Enable encoder skip of static/low content blocks. libvpx_->codec_control(encoder_, VP8E_SET_STATIC_THRESHOLD, 1); + + // This has to be done after the initial setup is completed. + AdjustScalingFactorsForTopActiveLayer(); + inited_ = true; config_changed_ = true; return WEBRTC_VIDEO_CODEC_OK; @@ -1007,6 +1011,9 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, if (svc_controller_) { layer_frames_ = svc_controller_->NextFrameConfig(force_key_frame_); + if (simulcast_to_svc_converter_) { + simulcast_to_svc_converter_->EncodeStarted(force_key_frame_); + } if (layer_frames_.empty()) { return WEBRTC_VIDEO_CODEC_ERROR; } @@ -1022,16 +1029,15 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, if (codec_.mode == VideoCodecMode::kScreensharing) { const uint32_t frame_timestamp_ms = - 1000 * input_image.timestamp() / kVideoPayloadTypeFrequency; + 1000 * input_image.rtp_timestamp() / kVideoPayloadTypeFrequency; // To ensure that several rate-limiters with different limits don't // interfere, they must be queried in order of increasing limit. bool use_steady_state_limiter = - variable_framerate_experiment_.enabled && input_image.update_rect().IsEmpty() && num_steady_state_frames_ >= - variable_framerate_experiment_.frames_before_steady_state; + variable_framerate_screenshare::kFramesBeforeSteadyState; // Need to check all frame limiters, even if lower layers are disabled, // because variable frame-rate limiter should be checked after the first @@ -1042,7 +1048,7 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, framerate_controller_[layer_id.spatial_layer_id].GetTargetRate(); // Use steady state rate-limiter at the correct place. if (use_steady_state_limiter && - layer_fps > variable_framerate_experiment_.framerate_limit - 1e-9) { + layer_fps > variable_framerate_screenshare::kMinFps - 1e-9) { if (variable_framerate_controller_.DropFrame(frame_timestamp_ms)) { layer_id.spatial_layer_id = num_active_spatial_layers_; } @@ -1132,10 +1138,10 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, // resolution instead and base the speed on that. for (int i = num_spatial_layers_ - 1; i >= 0; --i) { if (config_->ss_target_bitrate[i] > 0) { - int width = (svc_params_.scaling_factor_num[i] * config_->g_w) / - svc_params_.scaling_factor_den[i]; - int height = (svc_params_.scaling_factor_num[i] * config_->g_h) / - svc_params_.scaling_factor_den[i]; + int width = (scaling_factors_num_[i] * codec_.width) / + scaling_factors_den_[i]; + int height = (scaling_factors_num_[i] * codec_.height) / + scaling_factors_den_[i]; int speed = std::prev(performance_flags_.settings_by_resolution.lower_bound( width * height)) @@ -1148,24 +1154,32 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, config_changed_ = false; } - RTC_DCHECK_EQ(input_image.width(), raw_->d_w); - RTC_DCHECK_EQ(input_image.height(), raw_->d_h); - // Set input image for use in the callback. // This was necessary since you need some information from input_image. // You can save only the necessary information (such as timestamp) instead of // doing this. input_image_ = &input_image; + scoped_refptr scaled_image; + if (!is_svc_ || num_active_spatial_layers_ == num_spatial_layers_) { + scaled_image = input_image.video_frame_buffer(); + } else { + scaled_image = input_image.video_frame_buffer()->Scale( + codec_.spatialLayers[num_active_spatial_layers_ - 1].width, + codec_.spatialLayers[num_active_spatial_layers_ - 1].height); + } + + RTC_DCHECK_EQ(scaled_image->width(), config_->g_w); + RTC_DCHECK_EQ(scaled_image->height(), config_->g_h); + // In case we need to map the buffer, `mapped_buffer` is used to keep it alive // through reference counting until after encoding has finished. - rtc::scoped_refptr mapped_buffer; + scoped_refptr mapped_buffer; const I010BufferInterface* i010_buffer; - rtc::scoped_refptr i010_copy; + scoped_refptr i010_copy; switch (profile_) { case VP9Profile::kProfile0: { - mapped_buffer = - PrepareBufferForProfile0(input_image.video_frame_buffer()); + mapped_buffer = PrepareBufferForProfile0(scaled_image); if (!mapped_buffer) { return WEBRTC_VIDEO_CODEC_ERROR; } @@ -1180,11 +1194,11 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, // should be converted to it. switch (input_image.video_frame_buffer()->type()) { case VideoFrameBuffer::Type::kI010: { - i010_buffer = input_image.video_frame_buffer()->GetI010(); + i010_buffer = scaled_image->GetI010(); break; } default: { - auto i420_buffer = input_image.video_frame_buffer()->ToI420(); + auto i420_buffer = scaled_image->ToI420(); if (!i420_buffer) { RTC_LOG(LS_ERROR) << "Failed to convert " << VideoFrameBufferTypeToString( @@ -1196,6 +1210,8 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, i010_buffer = i010_copy.get(); } } + MaybeRewrapRawWithFormat(VPX_IMG_FMT_I42016, i010_buffer->width(), + i010_buffer->height()); raw_->planes[VPX_PLANE_Y] = const_cast( reinterpret_cast(i010_buffer->DataY())); raw_->planes[VPX_PLANE_U] = const_cast( @@ -1222,7 +1238,7 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, vpx_svc_ref_frame_config_t ref_config = Vp9References(layer_frames_); libvpx_->codec_control(encoder_, VP9E_SET_SVC_REF_FRAME_CONFIG, &ref_config); - } else if (external_ref_control_) { + } else { vpx_svc_ref_frame_config_t ref_config = SetReferences(force_key_frame_, layer_id.spatial_layer_id); @@ -1271,8 +1287,8 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, } bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, - absl::optional* spatial_idx, - absl::optional* temporal_idx, + std::optional* spatial_idx, + std::optional* temporal_idx, const vpx_codec_cx_pkt& pkt) { RTC_CHECK(codec_specific != nullptr); codec_specific->codecType = kVideoCodecVP9; @@ -1298,14 +1314,14 @@ bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, if (num_temporal_layers_ == 1) { RTC_CHECK_EQ(layer_id.temporal_layer_id, 0); vp9_info->temporal_idx = kNoTemporalIdx; - *temporal_idx = absl::nullopt; + *temporal_idx = std::nullopt; } else { vp9_info->temporal_idx = layer_id.temporal_layer_id; *temporal_idx = layer_id.temporal_layer_id; } if (num_active_spatial_layers_ == 1) { RTC_CHECK_EQ(layer_id.spatial_layer_id, 0); - *spatial_idx = absl::nullopt; + *spatial_idx = std::nullopt; } else { *spatial_idx = layer_id.spatial_layer_id; } @@ -1380,10 +1396,10 @@ bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, vp9_info->height[i] = 0; } for (size_t i = first_active_layer_; i < num_active_spatial_layers_; ++i) { - vp9_info->width[i] = codec_.width * svc_params_.scaling_factor_num[i] / - svc_params_.scaling_factor_den[i]; - vp9_info->height[i] = codec_.height * svc_params_.scaling_factor_num[i] / - svc_params_.scaling_factor_den[i]; + vp9_info->width[i] = + codec_.width * scaling_factors_num_[i] / scaling_factors_den_[i]; + vp9_info->height[i] = + codec_.height * scaling_factors_num_[i] / scaling_factors_den_[i]; } if (vp9_info->flexible_mode) { vp9_info->gof.num_frames_in_gof = 0; @@ -1420,10 +1436,10 @@ bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, resolutions.resize(num_spatial_layers_); for (int sid = 0; sid < num_spatial_layers_; ++sid) { resolutions[sid] = RenderResolution( - /*width=*/codec_.width * svc_params_.scaling_factor_num[sid] / - svc_params_.scaling_factor_den[sid], - /*height=*/codec_.height * svc_params_.scaling_factor_num[sid] / - svc_params_.scaling_factor_den[sid]); + /*width=*/codec_.width * scaling_factors_num_[sid] / + scaling_factors_den_[sid], + /*height=*/codec_.height * scaling_factors_num_[sid] / + scaling_factors_den_[sid]); } } if (is_flexible_mode_) { @@ -1444,7 +1460,19 @@ bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, } } } - codec_specific->scalability_mode = scalability_mode_; + // If returned the configured scalability mode in standard mode, otherwise + // create one if it is based on layer activation. + if (scalability_mode_) { + codec_specific->scalability_mode = scalability_mode_; + } else { + codec_specific_.scalability_mode = MakeScalabilityMode( + num_active_spatial_layers_, num_temporal_layers_, inter_layer_pred_, + num_active_spatial_layers_ > 1 + ? std::make_optional(ScalabilityModeResolutionRatio::kTwoToOne) + : std::nullopt, + /*shift=*/false); + } + return true; } @@ -1554,7 +1582,7 @@ void LibvpxVp9Encoder::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, } } -void LibvpxVp9Encoder::UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, +void LibvpxVp9Encoder::UpdateReferenceBuffers(const vpx_codec_cx_pkt& /* pkt */, const size_t pic_num) { vpx_svc_layer_id_t layer_id = {0}; libvpx_->codec_control(encoder_, VP9E_GET_SVC_LAYER_ID, &layer_id); @@ -1609,7 +1637,7 @@ vpx_svc_ref_frame_config_t LibvpxVp9Encoder::SetReferences( const bool is_inter_layer_pred_allowed = inter_layer_pred_ == InterLayerPredMode::kOn || (inter_layer_pred_ == InterLayerPredMode::kOnKeyPic && is_key_pic); - absl::optional last_updated_buf_idx; + std::optional last_updated_buf_idx; // Put temporal reference to LAST and spatial reference to GOLDEN. Update // frame buffer (i.e. store encoded frame) if current frame is a temporal @@ -1705,8 +1733,8 @@ void LibvpxVp9Encoder::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { static_cast(pkt->data.frame.buf), pkt->data.frame.sz)); codec_specific_ = {}; - absl::optional spatial_index; - absl::optional temporal_index; + std::optional spatial_index; + std::optional temporal_index; if (!PopulateCodecSpecific(&codec_specific_, &spatial_index, &temporal_index, *pkt)) { // Drop the frame. @@ -1733,9 +1761,9 @@ void LibvpxVp9Encoder::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { UpdateReferenceBuffers(*pkt, pics_since_key_); TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_.size()); - encoded_image_.SetTimestamp(input_image_->timestamp()); - encoded_image_.SetCaptureTimeIdentifier( - input_image_->capture_time_identifier()); + encoded_image_.SetRtpTimestamp(input_image_->rtp_timestamp()); + encoded_image_.SetPresentationTimestamp( + input_image_->presentation_timestamp()); encoded_image_.SetColorSpace(input_image_->color_space()); encoded_image_._encodedHeight = pkt->data.frame.height[layer_id.spatial_layer_id]; @@ -1762,13 +1790,20 @@ void LibvpxVp9Encoder::DeliverBufferedFrame(bool end_of_picture) { codec_specific_.end_of_picture = end_of_picture; + if (!simulcast_to_svc_converter_) { + encoded_image_.SetSimulcastIndex(std::nullopt); + } else { + simulcast_to_svc_converter_->ConvertFrame(encoded_image_, + codec_specific_); + } + encoded_complete_callback_->OnEncodedImage(encoded_image_, &codec_specific_); if (codec_.mode == VideoCodecMode::kScreensharing) { const uint8_t spatial_idx = encoded_image_.SpatialIndex().value_or(0); const uint32_t frame_timestamp_ms = - 1000 * encoded_image_.Timestamp() / kVideoPayloadTypeFrequency; + 1000 * encoded_image_.RtpTimestamp() / kVideoPayloadTypeFrequency; framerate_controller_[spatial_idx].AddFrame(frame_timestamp_ms); const size_t steady_state_size = SteadyStateSize( @@ -1777,9 +1812,8 @@ void LibvpxVp9Encoder::DeliverBufferedFrame(bool end_of_picture) { // Only frames on spatial layers, which may be limited in a steady state // are considered for steady state detection. if (framerate_controller_[spatial_idx].GetTargetRate() > - variable_framerate_experiment_.framerate_limit + 1e-9) { - if (encoded_image_.qp_ <= - variable_framerate_experiment_.steady_state_qp && + variable_framerate_screenshare::kMinFps + 1e-9) { + if (encoded_image_.qp_ <= variable_framerate_screenshare::kMinQP && encoded_image_.size() <= steady_state_size) { ++num_steady_state_frames_; } else { @@ -1800,6 +1834,7 @@ int LibvpxVp9Encoder::RegisterEncodeCompleteCallback( VideoEncoder::EncoderInfo LibvpxVp9Encoder::GetEncoderInfo() const { EncoderInfo info; info.supports_native_handle = false; + info.supports_simulcast = true; info.implementation_name = "libvpx"; if (quality_scaler_experiment_.enabled && inited_ && codec_.VP9().automaticResizeOn) { @@ -1834,14 +1869,18 @@ VideoEncoder::EncoderInfo LibvpxVp9Encoder::GetEncoderInfo() const { num_temporal_layers_ <= 1 ? 1 : config_->ts_rate_decimator[ti]; RTC_DCHECK_GT(decimator, 0); info.fps_allocation[si].push_back( - rtc::saturated_cast(EncoderInfo::kMaxFramerateFraction * - (sl_fps_fraction / decimator))); + saturated_cast(EncoderInfo::kMaxFramerateFraction * + (sl_fps_fraction / decimator))); } } if (profile_ == VP9Profile::kProfile0) { info.preferred_pixel_formats = {VideoFrameBuffer::Type::kI420, VideoFrameBuffer::Type::kNV12}; } + + if (codec_.mode == VideoCodecMode::kScreensharing) { + info.min_qp = variable_framerate_screenshare::kMinQP; + } } if (!encoder_info_override_.resolution_bitrate_limits().empty()) { info.resolution_bitrate_limits = @@ -1859,34 +1898,10 @@ size_t LibvpxVp9Encoder::SteadyStateSize(int sid, int tid) { : codec_.maxFramerate; return static_cast( bitrate_bps / (8 * fps) * - (100 - - variable_framerate_experiment_.steady_state_undershoot_percentage) / - 100 + + (100 - variable_framerate_screenshare::kUndershootPct) / 100 + 0.5); } -// static -LibvpxVp9Encoder::VariableFramerateExperiment -LibvpxVp9Encoder::ParseVariableFramerateConfig(const FieldTrialsView& trials) { - FieldTrialFlag enabled = FieldTrialFlag("Enabled"); - FieldTrialParameter framerate_limit("min_fps", 5.0); - FieldTrialParameter qp("min_qp", 32); - FieldTrialParameter undershoot_percentage("undershoot", 30); - FieldTrialParameter frames_before_steady_state( - "frames_before_steady_state", 5); - ParseFieldTrial({&enabled, &framerate_limit, &qp, &undershoot_percentage, - &frames_before_steady_state}, - trials.Lookup("WebRTC-VP9VariableFramerateScreenshare")); - VariableFramerateExperiment config; - config.enabled = enabled.Get(); - config.framerate_limit = framerate_limit.Get(); - config.steady_state_qp = qp.Get(); - config.steady_state_undershoot_percentage = undershoot_percentage.Get(); - config.frames_before_steady_state = frames_before_steady_state.Get(); - - return config; -} - // static LibvpxVp9Encoder::QualityScalerExperiment LibvpxVp9Encoder::ParseQualityScalerConfig(const FieldTrialsView& trials) { @@ -1998,7 +2013,7 @@ LibvpxVp9Encoder::PerformanceFlags LibvpxVp9Encoder::GetDefaultPerformanceFlags() { PerformanceFlags flags; flags.use_per_layer_speed = true; -#if defined(WEBRTC_ARCH_ARM) || defined(WEBRTC_ARCH_ARM64) || defined(ANDROID) +#ifdef MOBILE_ARM // Speed 8 on all layers for all resolutions. flags.settings_by_resolution[0] = {.base_layer_speed = 8, .high_layer_speed = 8, @@ -2036,27 +2051,32 @@ LibvpxVp9Encoder::GetDefaultPerformanceFlags() { return flags; } -void LibvpxVp9Encoder::MaybeRewrapRawWithFormat(const vpx_img_fmt fmt) { +void LibvpxVp9Encoder::MaybeRewrapRawWithFormat(const vpx_img_fmt fmt, + unsigned int width, + unsigned int height) { if (!raw_) { - raw_ = libvpx_->img_wrap(nullptr, fmt, codec_.width, codec_.height, 1, - nullptr); - } else if (raw_->fmt != fmt) { + raw_ = libvpx_->img_wrap(nullptr, fmt, width, height, 1, nullptr); + RTC_LOG(LS_INFO) << "Configured VP9 encoder pixel format to " + << (fmt == VPX_IMG_FMT_NV12 ? "NV12" : "I420") << " " + << width << "x" << height; + } else if (raw_->fmt != fmt || raw_->d_w != width || raw_->d_h != height) { RTC_LOG(LS_INFO) << "Switching VP9 encoder pixel format to " - << (fmt == VPX_IMG_FMT_NV12 ? "NV12" : "I420"); + << (fmt == VPX_IMG_FMT_NV12 ? "NV12" : "I420") << " " + << width << "x" << height; libvpx_->img_free(raw_); - raw_ = libvpx_->img_wrap(nullptr, fmt, codec_.width, codec_.height, 1, - nullptr); + raw_ = libvpx_->img_wrap(nullptr, fmt, width, height, 1, nullptr); } // else no-op since the image is already in the right format. + raw_->bit_depth = (fmt == VPX_IMG_FMT_I42016) ? 16 : 8; } -rtc::scoped_refptr LibvpxVp9Encoder::PrepareBufferForProfile0( - rtc::scoped_refptr buffer) { +scoped_refptr LibvpxVp9Encoder::PrepareBufferForProfile0( + scoped_refptr buffer) { absl::InlinedVector supported_formats = {VideoFrameBuffer::Type::kI420, VideoFrameBuffer::Type::kNV12}; - rtc::scoped_refptr mapped_buffer; + scoped_refptr mapped_buffer; if (buffer->type() != VideoFrameBuffer::Type::kNative) { // `buffer` is already mapped. mapped_buffer = buffer; @@ -2088,7 +2108,8 @@ rtc::scoped_refptr LibvpxVp9Encoder::PrepareBufferForProfile0( switch (mapped_buffer->type()) { case VideoFrameBuffer::Type::kI420: case VideoFrameBuffer::Type::kI420A: { - MaybeRewrapRawWithFormat(VPX_IMG_FMT_I420); + MaybeRewrapRawWithFormat(VPX_IMG_FMT_I420, mapped_buffer->width(), + mapped_buffer->height()); const I420BufferInterface* i420_buffer = mapped_buffer->GetI420(); RTC_DCHECK(i420_buffer); raw_->planes[VPX_PLANE_Y] = const_cast(i420_buffer->DataY()); @@ -2100,7 +2121,8 @@ rtc::scoped_refptr LibvpxVp9Encoder::PrepareBufferForProfile0( break; } case VideoFrameBuffer::Type::kNV12: { - MaybeRewrapRawWithFormat(VPX_IMG_FMT_NV12); + MaybeRewrapRawWithFormat(VPX_IMG_FMT_NV12, mapped_buffer->width(), + mapped_buffer->height()); const NV12BufferInterface* nv12_buffer = mapped_buffer->GetNV12(); RTC_DCHECK(nv12_buffer); raw_->planes[VPX_PLANE_Y] = const_cast(nv12_buffer->DataY()); diff --git a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h index 0474e7bc17..aa54caa6b2 100644 --- a/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h +++ b/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h @@ -15,31 +15,46 @@ #ifdef RTC_ENABLE_VP9 #include +#include +#include #include +#include #include +#include "api/environment/environment.h" #include "api/fec_controller_override.h" #include "api/field_trials_view.h" +#include "api/scoped_refptr.h" +#include "api/video/encoded_image.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" #include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp9_profile.h" -#include "common_video/include/video_frame_buffer_pool.h" #include "modules/video_coding/codecs/interface/libvpx_interface.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" -#include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/svc/scalable_video_controller.h" +#include "modules/video_coding/svc/simulcast_to_svc_converter.h" #include "modules/video_coding/utility/framerate_controller_deprecated.h" #include "rtc_base/containers/flat_map.h" #include "rtc_base/experiments/encoder_info_settings.h" #include "vpx/vp8cx.h" +#include "vpx/vpx_codec.h" +#include "vpx/vpx_encoder.h" +#include "vpx/vpx_image.h" namespace webrtc { -class LibvpxVp9Encoder : public VP9Encoder { +class LibvpxVp9Encoder : public VideoEncoder { public: - LibvpxVp9Encoder(const cricket::VideoCodec& codec, - std::unique_ptr interface, - const FieldTrialsView& trials); + LibvpxVp9Encoder(const Environment& env, + Vp9EncoderSettings settings, + std::unique_ptr interface); ~LibvpxVp9Encoder() override; @@ -65,11 +80,11 @@ class LibvpxVp9Encoder : public VP9Encoder { int NumberOfThreads(int width, int height, int number_of_cores); // Call encoder initialize function and set control settings. - int InitAndSetControlSettings(const VideoCodec* inst); + int InitAndSetControlSettings(); bool PopulateCodecSpecific(CodecSpecificInfo* codec_specific, - absl::optional* spatial_idx, - absl::optional* temporal_idx, + std::optional* spatial_idx, + std::optional* temporal_idx, const vpx_codec_cx_pkt& pkt); void FillReferenceIndices(const vpx_codec_cx_pkt& pkt, size_t pic_num, @@ -82,6 +97,10 @@ class LibvpxVp9Encoder : public VP9Encoder { bool ExplicitlyConfiguredSpatialLayers() const; bool SetSvcRates(const VideoBitrateAllocation& bitrate_allocation); + // Adjust sclaing factors assuming that the top active SVC layer + // will be the input resolution. + void AdjustScalingFactorsForTopActiveLayer(); + // Configures which spatial layers libvpx should encode according to // configuration provided by svc_controller_. void EnableSpatialLayer(int sid); @@ -108,14 +127,17 @@ class LibvpxVp9Encoder : public VP9Encoder { size_t SteadyStateSize(int sid, int tid); - void MaybeRewrapRawWithFormat(vpx_img_fmt fmt); + void MaybeRewrapRawWithFormat(const vpx_img_fmt fmt, + unsigned int width, + unsigned int height); // Prepares `raw_` to reference image data of `buffer`, or of mapped or scaled // versions of `buffer`. Returns the buffer that got referenced as a result, // allowing the caller to keep a reference to it until after encoding has // finished. On failure to convert the buffer, null is returned. - rtc::scoped_refptr PrepareBufferForProfile0( - rtc::scoped_refptr buffer); + scoped_refptr PrepareBufferForProfile0( + scoped_refptr buffer); + const Environment env_; const std::unique_ptr libvpx_; EncodedImage encoded_image_; CodecSpecificInfo codec_specific_; @@ -138,10 +160,10 @@ class LibvpxVp9Encoder : public VP9Encoder { uint8_t num_spatial_layers_; // Number of configured SLs uint8_t num_active_spatial_layers_; // Number of actively encoded SLs uint8_t first_active_layer_; + uint8_t last_active_layer_; bool layer_deactivation_requires_key_frame_; bool is_svc_; InterLayerPredMode inter_layer_pred_; - bool external_ref_control_; const bool trusted_rate_controller_; vpx_svc_frame_drop_t svc_drop_frame_; bool first_frame_in_picture_; @@ -149,8 +171,11 @@ class LibvpxVp9Encoder : public VP9Encoder { bool ss_info_needed_; bool force_all_active_layers_; + const bool enable_svc_for_simulcast_; + std::optional simulcast_to_svc_converter_; + std::unique_ptr svc_controller_; - absl::optional scalability_mode_; + std::optional scalability_mode_; std::vector framerate_controller_; // Used for flexible mode. @@ -168,24 +193,12 @@ class LibvpxVp9Encoder : public VP9Encoder { std::array ref_buf_; std::vector layer_frames_; - // Variable frame-rate related fields and methods. - const struct VariableFramerateExperiment { - bool enabled; - // Framerate is limited to this value in steady state. - float framerate_limit; - // This qp or below is considered a steady state. - int steady_state_qp; - // Frames of at least this percentage below ideal for configured bitrate are - // considered in a steady state. - int steady_state_undershoot_percentage; - // Number of consecutive frames with good QP and size required to detect - // the steady state. - int frames_before_steady_state; - } variable_framerate_experiment_; - static VariableFramerateExperiment ParseVariableFramerateConfig( - const FieldTrialsView& trials); FramerateControllerDeprecated variable_framerate_controller_; + // Original scaling factors for all configured layers active and inactive. + // `svc_config_` stores factors ignoring top inactive layers. + std::vector scaling_factors_num_, scaling_factors_den_; + const struct QualityScalerExperiment { int low_qp; int high_qp; @@ -193,7 +206,6 @@ class LibvpxVp9Encoder : public VP9Encoder { } quality_scaler_experiment_; static QualityScalerExperiment ParseQualityScalerConfig( const FieldTrialsView& trials); - const bool external_ref_ctrl_; // Flags that can affect speed vs quality tradeoff, and are configureable per // resolution ranges. diff --git a/modules/video_coding/codecs/vp9/svc_config.cc b/modules/video_coding/codecs/vp9/svc_config.cc index 43def0f6f3..2b99f7f852 100644 --- a/modules/video_coding/codecs/vp9/svc_config.cc +++ b/modules/video_coding/codecs/vp9/svc_config.cc @@ -12,13 +12,19 @@ #include #include -#include +#include +#include +#include #include -#include "media/base/video_common.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/spatial_layer.h" +#include "api/video_codecs/video_codec.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/svc/scalability_mode_util.h" +#include "modules/video_coding/svc/scalable_video_controller.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -83,7 +89,7 @@ std::vector ConfigureSvcNormalVideo( size_t first_active_layer, size_t num_spatial_layers, size_t num_temporal_layers, - absl::optional config) { + std::optional config) { RTC_DCHECK_LT(first_active_layer, num_spatial_layers); // Limit number of layers for given resolution. @@ -105,8 +111,8 @@ std::vector ConfigureSvcNormalVideo( if (config) { required_divisiblity = 1; for (size_t sl_idx = 0; sl_idx < num_spatial_layers; ++sl_idx) { - required_divisiblity = cricket::LeastCommonMultiple( - required_divisiblity, config->scaling_factor_den[sl_idx]); + required_divisiblity = + std::lcm(required_divisiblity, config->scaling_factor_den[sl_idx]); } } input_width = input_width - input_width % required_divisiblity; @@ -169,9 +175,12 @@ std::vector ConfigureSvcNormalVideo( std::vector GetVp9SvcConfig(VideoCodec& codec) { RTC_DCHECK_EQ(codec.codecType, kVideoCodecVP9); - absl::optional scalability_mode = codec.GetScalabilityMode(); + std::optional scalability_mode = codec.GetScalabilityMode(); RTC_DCHECK(scalability_mode.has_value()); + bool requested_single_spatial_layer = + ScalabilityModeToNumSpatialLayers(*scalability_mode) == 1; + // Limit number of spatial layers for given resolution. int limited_num_spatial_layers = GetLimitedNumSpatialLayers(codec.width, codec.height); @@ -187,7 +196,10 @@ std::vector GetVp9SvcConfig(VideoCodec& codec) { codec.SetScalabilityMode(limited_scalability_mode); } - absl::optional info = + codec.VP9()->interLayerPred = + ScalabilityModeToInterLayerPredMode(*scalability_mode); + + std::optional info = ScalabilityStructureConfig(*scalability_mode); if (!info.has_value()) { RTC_LOG(LS_WARNING) << "Failed to create structure " @@ -200,11 +212,19 @@ std::vector GetVp9SvcConfig(VideoCodec& codec) { GetSvcConfig(codec.width, codec.height, codec.maxFramerate, /*first_active_layer=*/0, info->num_spatial_layers, info->num_temporal_layers, /*is_screen_sharing=*/false, - codec.GetScalabilityMode() ? info : absl::nullopt); + codec.GetScalabilityMode() ? info : std::nullopt); RTC_DCHECK(!spatial_layers.empty()); spatial_layers[0].minBitrate = kMinVp9SvcBitrateKbps; + // Use codec bitrate limits if spatial layering is not requested. + if (requested_single_spatial_layer) { + SpatialLayer& spatial_layer = spatial_layers[0]; + spatial_layer.minBitrate = codec.minBitrate; + spatial_layer.maxBitrate = codec.maxBitrate; + spatial_layer.targetBitrate = codec.maxBitrate; + } + return spatial_layers; } @@ -216,7 +236,7 @@ std::vector GetSvcConfig( size_t num_spatial_layers, size_t num_temporal_layers, bool is_screen_sharing, - absl::optional config) { + std::optional config) { RTC_DCHECK_GT(input_width, 0); RTC_DCHECK_GT(input_height, 0); RTC_DCHECK_GT(num_spatial_layers, 0); diff --git a/modules/video_coding/codecs/vp9/svc_config.h b/modules/video_coding/codecs/vp9/svc_config.h index adeaf0f161..eda0634670 100644 --- a/modules/video_coding/codecs/vp9/svc_config.h +++ b/modules/video_coding/codecs/vp9/svc_config.h @@ -12,6 +12,7 @@ #include +#include #include #include "api/video_codecs/spatial_layer.h" @@ -31,8 +32,8 @@ std::vector GetSvcConfig( size_t num_spatial_layers, size_t num_temporal_layers, bool is_screen_sharing, - absl::optional config = - absl::nullopt); + std::optional config = + std::nullopt); } // namespace webrtc diff --git a/modules/video_coding/codecs/vp9/svc_config_unittest.cc b/modules/video_coding/codecs/vp9/svc_config_unittest.cc index 762fd39287..8a1c08f626 100644 --- a/modules/video_coding/codecs/vp9/svc_config_unittest.cc +++ b/modules/video_coding/codecs/vp9/svc_config_unittest.cc @@ -13,6 +13,10 @@ #include #include +#include "api/video/video_codec_type.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/spatial_layer.h" +#include "api/video_codecs/video_codec.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "test/gmock.h" #include "test/gtest.h" @@ -65,6 +69,25 @@ TEST(SvcConfig, NumSpatialLayersWithScalabilityMode) { EXPECT_EQ(codec.GetScalabilityMode(), ScalabilityMode::kL3T3_KEY); } +TEST(SvcConfig, UpdatesInterLayerPredModeBasedOnScalabilityMode) { + VideoCodec codec; + codec.codecType = kVideoCodecVP9; + codec.width = 1280; + codec.height = 720; + codec.SetScalabilityMode(ScalabilityMode::kL3T3_KEY); + + std::vector spatial_layers = GetVp9SvcConfig(codec); + EXPECT_EQ(codec.VP9()->interLayerPred, InterLayerPredMode::kOnKeyPic); + + codec.SetScalabilityMode(ScalabilityMode::kL3T3); + spatial_layers = GetVp9SvcConfig(codec); + EXPECT_EQ(codec.VP9()->interLayerPred, InterLayerPredMode::kOn); + + codec.SetScalabilityMode(ScalabilityMode::kS3T3); + spatial_layers = GetVp9SvcConfig(codec); + EXPECT_EQ(codec.VP9()->interLayerPred, InterLayerPredMode::kOff); +} + TEST(SvcConfig, NumSpatialLayersLimitedWithScalabilityMode) { VideoCodec codec; codec.codecType = kVideoCodecVP9; @@ -154,7 +177,7 @@ TEST(SvcConfig, AlwaysSendsAtLeastOneLayerPortrait) { TEST(SvcConfig, EnforcesMinimalRequiredParity) { const size_t max_num_spatial_layers = 3; - const size_t kOddSize = 1023; + const int kOddSize = 1023; std::vector spatial_layers = GetSvcConfig(kOddSize, kOddSize, 30, @@ -265,6 +288,21 @@ TEST(SvcConfig, BitrateThresholdsWithScalabilityMode) { } } +TEST(SvcConfig, CopiesMinMaxBitrateForSingleSpatialLayer) { + VideoCodec codec; + codec.codecType = kVideoCodecVP9; + codec.SetScalabilityMode(ScalabilityMode::kL1T3); + codec.width = 1280; + codec.height = 720; + codec.minBitrate = 100; + codec.maxBitrate = 500; + + std::vector spatial_layers = GetVp9SvcConfig(codec); + EXPECT_EQ(spatial_layers[0].minBitrate, 100u); + EXPECT_EQ(spatial_layers[0].maxBitrate, 500u); + EXPECT_LE(spatial_layers[0].targetBitrate, 500u); +} + TEST(SvcConfig, ScreenSharing) { std::vector spatial_layers = GetSvcConfig(1920, 1080, 30, 1, 3, 3, true); diff --git a/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc b/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc index 993fd245ad..3152ccbb1e 100644 --- a/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc +++ b/modules/video_coding/codecs/vp9/test/vp9_impl_unittest.cc @@ -8,32 +8,65 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/container/inlined_vector.h" #include "absl/memory/memory.h" +#include "api/array_view.h" +#include "api/environment/environment_factory.h" +#include "api/scoped_refptr.h" #include "api/test/create_frame_generator.h" #include "api/test/frame_generator_interface.h" #include "api/test/mock_video_encoder.h" +#include "api/units/data_rate.h" +#include "api/units/timestamp.h" #include "api/video/color_space.h" -#include "api/video/i420_buffer.h" +#include "api/video/encoded_image.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/spatial_layer.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp9_profile.h" #include "common_video/libyuv/include/webrtc_libyuv.h" -#include "media/base/codec.h" -#include "media/base/media_constants.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/codecs/interface/libvpx_interface.h" #include "modules/video_coding/codecs/interface/mock_libvpx_interface.h" #include "modules/video_coding/codecs/test/encoded_video_frame_producer.h" #include "modules/video_coding/codecs/test/video_codec_unittest.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" #include "modules/video_coding/codecs/vp9/svc_config.h" -#include "rtc_base/strings/string_builder.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/svc/scalability_mode_util.h" +#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" #include "test/explicit_key_value_config.h" #include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/mappable_native_buffer.h" #include "test/video_codec_settings.h" +#include "vpx/vp8cx.h" +#include "vpx/vpx_codec.h" +#include "vpx/vpx_encoder.h" +#include "vpx/vpx_image.h" namespace webrtc { namespace { @@ -66,6 +99,7 @@ using FramerateFractions = constexpr size_t kWidth = 1280; constexpr size_t kHeight = 720; +constexpr int kBitrateKbps = 2048; const VideoEncoder::Capabilities kCapabilities(false); const VideoEncoder::Settings kSettings(kCapabilities, @@ -77,17 +111,18 @@ VideoCodec DefaultCodecSettings() { webrtc::test::CodecSettings(kVideoCodecVP9, &codec_settings); codec_settings.width = kWidth; codec_settings.height = kHeight; + codec_settings.startBitrate = kBitrateKbps; + codec_settings.maxBitrate = kBitrateKbps; codec_settings.VP9()->numberOfTemporalLayers = 1; codec_settings.VP9()->numberOfSpatialLayers = 1; return codec_settings; } void ConfigureSvc(VideoCodec& codec_settings, - int num_spatial_layers, + int num_spatial_layers = 1, int num_temporal_layers = 1) { codec_settings.VP9()->numberOfSpatialLayers = num_spatial_layers; codec_settings.VP9()->numberOfTemporalLayers = num_temporal_layers; - codec_settings.SetFrameDropEnabled(false); std::vector layers = GetSvcConfig( codec_settings.width, codec_settings.height, codec_settings.maxFramerate, @@ -102,7 +137,7 @@ void ConfigureSvc(VideoCodec& codec_settings, class TestVp9Impl : public VideoCodecUnitTest { protected: std::unique_ptr CreateEncoder() override { - return VP9Encoder::Create(); + return CreateVp9Encoder(env_); } std::unique_ptr CreateDecoder() override { @@ -113,8 +148,7 @@ class TestVp9Impl : public VideoCodecUnitTest { webrtc::test::CodecSettings(kVideoCodecVP9, codec_settings); codec_settings->width = kWidth; codec_settings->height = kHeight; - codec_settings->VP9()->numberOfTemporalLayers = 1; - codec_settings->VP9()->numberOfSpatialLayers = 1; + ConfigureSvc(*codec_settings); } }; @@ -125,7 +159,7 @@ class TestVp9ImplForPixelFormat protected: void SetUp() override { input_frame_generator_ = test::CreateSquareFrameGenerator( - kWidth, kHeight, GetParam(), absl::optional()); + kWidth, kHeight, GetParam(), std::optional()); TestVp9Impl::SetUp(); } }; @@ -145,7 +179,7 @@ TEST_P(TestVp9ImplForPixelFormat, EncodeDecode) { encoded_frame._frameType = VideoFrameType::kVideoFrameKey; EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, 0)); std::unique_ptr decoded_frame; - absl::optional decoded_qp; + std::optional decoded_qp; ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp)); ASSERT_TRUE(decoded_frame); EXPECT_GT(I420PSNR(&input_frame, decoded_frame.get()), 36); @@ -175,9 +209,9 @@ TEST_P(TestVp9ImplForPixelFormat, EncodeNativeBuffer) { ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); // After encoding, we would expect a single mapping to have happened. - rtc::scoped_refptr mappable_buffer = + scoped_refptr mappable_buffer = test::GetMappableNativeBufferFromVideoFrame(input_frame); - std::vector> mapped_buffers = + std::vector> mapped_buffers = mappable_buffer->GetMappedFramedBuffers(); ASSERT_EQ(mapped_buffers.size(), 1u); EXPECT_EQ(mapped_buffers[0]->type(), mappable_buffer->mappable_type()); @@ -195,7 +229,7 @@ TEST_P(TestVp9ImplForPixelFormat, DecodedColorSpaceFromBitstream) { // Encoded frame without explicit color space information. EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, 0)); std::unique_ptr decoded_frame; - absl::optional decoded_qp; + std::optional decoded_qp; ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp)); ASSERT_TRUE(decoded_frame); // Color space present from encoded bitstream. @@ -213,24 +247,24 @@ TEST_P(TestVp9ImplForPixelFormat, DecodedQpEqualsEncodedQp) { encoded_frame._frameType = VideoFrameType::kVideoFrameKey; EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, 0)); std::unique_ptr decoded_frame; - absl::optional decoded_qp; + std::optional decoded_qp; ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp)); ASSERT_TRUE(decoded_frame); ASSERT_TRUE(decoded_qp); EXPECT_EQ(encoded_frame.qp_, *decoded_qp); } -TEST_P(TestVp9ImplForPixelFormat, CheckCaptureTimeID) { - constexpr Timestamp kCaptureTimeIdentifier = Timestamp::Micros(1000); +TEST_P(TestVp9ImplForPixelFormat, CheckPresentationTimestamp) { + constexpr Timestamp kPresentationTimestamp = Timestamp::Micros(1000); VideoFrame input_frame = NextInputFrame(); - input_frame.set_capture_time_identifier(kCaptureTimeIdentifier); + input_frame.set_presentation_timestamp(kPresentationTimestamp); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(input_frame, nullptr)); EncodedImage encoded_frame; CodecSpecificInfo codec_specific_info; ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); - ASSERT_TRUE(encoded_frame.CaptureTimeIdentifier().has_value()); - EXPECT_EQ(kCaptureTimeIdentifier.us(), - encoded_frame.CaptureTimeIdentifier()->us()); + ASSERT_TRUE(encoded_frame.PresentationTimestamp().has_value()); + EXPECT_EQ(kPresentationTimestamp.us(), + encoded_frame.PresentationTimestamp()->us()); } TEST_F(TestVp9Impl, SwitchInputPixelFormatsWithoutReconfigure) { @@ -242,20 +276,20 @@ TEST_F(TestVp9Impl, SwitchInputPixelFormatsWithoutReconfigure) { // Change the input frame type from I420 to NV12, encoding should still work. input_frame_generator_ = test::CreateSquareFrameGenerator( kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kNV12, - absl::optional()); + std::optional()); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr)); ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); // Flipping back to I420, encoding should still work. input_frame_generator_ = test::CreateSquareFrameGenerator( kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kI420, - absl::optional()); + std::optional()); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr)); ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info)); } TEST(Vp9ImplTest, ParserQpEqualsEncodedQp) { - std::unique_ptr encoder = VP9Encoder::Create(); + std::unique_ptr encoder = CreateVp9Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); encoder->InitEncode(&codec_settings, kSettings); @@ -272,7 +306,7 @@ TEST(Vp9ImplTest, ParserQpEqualsEncodedQp) { } TEST(Vp9ImplTest, EncodeAttachesTemplateStructureWithSvcController) { - std::unique_ptr encoder = VP9Encoder::Create(); + std::unique_ptr encoder = CreateVp9Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings), WEBRTC_VIDEO_CODEC_OK); @@ -292,11 +326,12 @@ TEST(Vp9ImplTest, EncodeAttachesTemplateStructureWithSvcController) { } TEST(Vp9ImplTest, EncoderWith2TemporalLayers) { - std::unique_ptr encoder = VP9Encoder::Create(); + std::unique_ptr encoder = CreateVp9Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); - codec_settings.VP9()->numberOfTemporalLayers = 2; // Tl0PidIdx is only used in non-flexible mode. codec_settings.VP9()->flexibleMode = false; + ConfigureSvc(codec_settings, /*num_spatial_layers=*/1, + /*num_temporal_layers=*/2); EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings), WEBRTC_VIDEO_CODEC_OK); @@ -314,9 +349,10 @@ TEST(Vp9ImplTest, EncoderWith2TemporalLayers) { } TEST(Vp9ImplTest, EncodeTemporalLayersWithSvcController) { - std::unique_ptr encoder = VP9Encoder::Create(); + std::unique_ptr encoder = CreateVp9Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); - codec_settings.VP9()->numberOfTemporalLayers = 2; + ConfigureSvc(codec_settings, /*num_spatial_layers=*/1, + /*num_temporal_layers=*/2); EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings), WEBRTC_VIDEO_CODEC_OK); @@ -343,9 +379,9 @@ TEST(Vp9ImplTest, EncodeTemporalLayersWithSvcController) { } TEST(Vp9ImplTest, EncoderWith2SpatialLayers) { - std::unique_ptr encoder = VP9Encoder::Create(); + std::unique_ptr encoder = CreateVp9Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); - codec_settings.VP9()->numberOfSpatialLayers = 2; + ConfigureSvc(codec_settings, /*num_spatial_layers=*/2); EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings), WEBRTC_VIDEO_CODEC_OK); @@ -361,9 +397,9 @@ TEST(Vp9ImplTest, EncoderWith2SpatialLayers) { } TEST(Vp9ImplTest, EncodeSpatialLayersWithSvcController) { - std::unique_ptr encoder = VP9Encoder::Create(); + std::unique_ptr encoder = CreateVp9Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); - codec_settings.VP9()->numberOfSpatialLayers = 2; + ConfigureSvc(codec_settings, /*num_spatial_layers=*/2); EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings), WEBRTC_VIDEO_CODEC_OK); @@ -390,39 +426,12 @@ TEST(Vp9ImplTest, EncodeSpatialLayersWithSvcController) { } TEST_F(TestVp9Impl, EncoderExplicitLayering) { - // Override default settings. - codec_settings_.VP9()->numberOfTemporalLayers = 1; - codec_settings_.VP9()->numberOfSpatialLayers = 2; - - codec_settings_.width = 960; - codec_settings_.height = 540; - codec_settings_.spatialLayers[0].minBitrate = 200; - codec_settings_.spatialLayers[0].maxBitrate = 500; - codec_settings_.spatialLayers[0].targetBitrate = - (codec_settings_.spatialLayers[0].minBitrate + - codec_settings_.spatialLayers[0].maxBitrate) / - 2; - codec_settings_.spatialLayers[0].active = true; - - codec_settings_.spatialLayers[1].minBitrate = 400; - codec_settings_.spatialLayers[1].maxBitrate = 1500; - codec_settings_.spatialLayers[1].targetBitrate = - (codec_settings_.spatialLayers[1].minBitrate + - codec_settings_.spatialLayers[1].maxBitrate) / - 2; - codec_settings_.spatialLayers[1].active = true; - - codec_settings_.spatialLayers[0].width = codec_settings_.width / 2; - codec_settings_.spatialLayers[0].height = codec_settings_.height / 2; - codec_settings_.spatialLayers[0].maxFramerate = codec_settings_.maxFramerate; - codec_settings_.spatialLayers[1].width = codec_settings_.width; - codec_settings_.spatialLayers[1].height = codec_settings_.height; - codec_settings_.spatialLayers[1].maxFramerate = codec_settings_.maxFramerate; + ConfigureSvc(codec_settings_, /*num_spatial_layers=*/2); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->InitEncode(&codec_settings_, kSettings)); - // Ensure it fails if scaling factors in horz/vert dimentions are different. + // Ensure it fails if scaling factors in horz/vert dimensions are different. codec_settings_.spatialLayers[0].width = codec_settings_.width; codec_settings_.spatialLayers[0].height = codec_settings_.height / 2; codec_settings_.spatialLayers[1].width = codec_settings_.width; @@ -439,6 +448,154 @@ TEST_F(TestVp9Impl, EncoderExplicitLayering) { encoder_->InitEncode(&codec_settings_, kSettings)); } +TEST_F(TestVp9Impl, EncoderAcceptsSvcLikeSimulcast) { + // Override default settings. + codec_settings_.VP9()->numberOfTemporalLayers = 3; + codec_settings_.VP9()->numberOfSpatialLayers = 1; + codec_settings_.numberOfSimulcastStreams = 3; + + codec_settings_.width = 1280; + codec_settings_.height = 720; + codec_settings_.simulcastStream[0].minBitrate = 30; + codec_settings_.simulcastStream[0].maxBitrate = 150; + codec_settings_.simulcastStream[0].targetBitrate = + (codec_settings_.simulcastStream[0].minBitrate + + codec_settings_.simulcastStream[0].maxBitrate) / + 2; + codec_settings_.simulcastStream[0].numberOfTemporalLayers = 3; + codec_settings_.simulcastStream[0].active = true; + + codec_settings_.simulcastStream[1].minBitrate = 200; + codec_settings_.simulcastStream[1].maxBitrate = 500; + codec_settings_.simulcastStream[1].targetBitrate = + (codec_settings_.simulcastStream[1].minBitrate + + codec_settings_.simulcastStream[1].maxBitrate) / + 2; + codec_settings_.simulcastStream[1].numberOfTemporalLayers = 3; + codec_settings_.simulcastStream[1].active = true; + + codec_settings_.simulcastStream[2].minBitrate = 600; + codec_settings_.simulcastStream[2].maxBitrate = 1200; + codec_settings_.simulcastStream[2].targetBitrate = + (codec_settings_.simulcastStream[2].minBitrate + + codec_settings_.simulcastStream[2].maxBitrate) / + 2; + codec_settings_.simulcastStream[2].numberOfTemporalLayers = 3; + codec_settings_.simulcastStream[2].active = true; + + codec_settings_.simulcastStream[0].width = codec_settings_.width / 4; + codec_settings_.simulcastStream[0].height = codec_settings_.height / 4; + codec_settings_.simulcastStream[0].maxFramerate = + codec_settings_.maxFramerate; + codec_settings_.simulcastStream[1].width = codec_settings_.width / 2; + codec_settings_.simulcastStream[1].height = codec_settings_.height / 2; + codec_settings_.simulcastStream[1].maxFramerate = + codec_settings_.maxFramerate; + codec_settings_.simulcastStream[2].width = codec_settings_.width; + codec_settings_.simulcastStream[2].height = codec_settings_.height; + codec_settings_.simulcastStream[2].maxFramerate = + codec_settings_.maxFramerate; + + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + encoder_->InitEncode(&codec_settings_, kSettings)); + + // Ensure it fails if temporal configs are different. + codec_settings_.simulcastStream[0].numberOfTemporalLayers = 1; + EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED, + encoder_->InitEncode(&codec_settings_, kSettings)); + + // Restore for following tests. + codec_settings_.simulcastStream[0].numberOfTemporalLayers = 3; + + // Ensure it fails if scaling factors in horz/vert dimentions are different. + codec_settings_.simulcastStream[0].width = codec_settings_.width / 4; + codec_settings_.simulcastStream[0].height = codec_settings_.height / 16; + codec_settings_.simulcastStream[1].width = codec_settings_.width / 2; + codec_settings_.simulcastStream[1].height = codec_settings_.height / 4; + EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED, + encoder_->InitEncode(&codec_settings_, kSettings)); + + // Ensure it fails if scaling factor is not power of two. + codec_settings_.simulcastStream[0].width = codec_settings_.width / 9; + codec_settings_.simulcastStream[0].height = codec_settings_.height / 9; + codec_settings_.simulcastStream[1].width = codec_settings_.width / 3; + codec_settings_.simulcastStream[1].height = codec_settings_.height / 3; + EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED, + encoder_->InitEncode(&codec_settings_, kSettings)); +} + +TEST_F(TestVp9Impl, SvcSimulcastThenSinglecastWithCorrectSimulcastIndex) { + const int kTargetBitrate = 1200; + const int kMaxFramerate = 30; + + // Configure 720p 4:2:1 + codec_settings_.VP9()->numberOfTemporalLayers = 1; + codec_settings_.VP9()->numberOfSpatialLayers = 1; + codec_settings_.numberOfSimulcastStreams = 3; + codec_settings_.width = 1280; + codec_settings_.height = 720; + codec_settings_.simulcastStream[0].width = codec_settings_.width / 4; + codec_settings_.simulcastStream[0].height = codec_settings_.height / 4; + codec_settings_.simulcastStream[0].maxFramerate = kMaxFramerate; + codec_settings_.simulcastStream[0].minBitrate = kTargetBitrate / 2; + codec_settings_.simulcastStream[0].maxBitrate = kTargetBitrate; + codec_settings_.simulcastStream[0].targetBitrate = kTargetBitrate; + codec_settings_.simulcastStream[0].active = true; + codec_settings_.simulcastStream[1].width = codec_settings_.width / 2; + codec_settings_.simulcastStream[1].height = codec_settings_.height / 2; + codec_settings_.simulcastStream[1].maxFramerate = kMaxFramerate; + codec_settings_.simulcastStream[1].minBitrate = kTargetBitrate / 2; + codec_settings_.simulcastStream[1].maxBitrate = kTargetBitrate; + codec_settings_.simulcastStream[1].targetBitrate = kTargetBitrate; + codec_settings_.simulcastStream[1].active = true; + codec_settings_.simulcastStream[2].width = codec_settings_.width; + codec_settings_.simulcastStream[2].height = codec_settings_.height; + codec_settings_.simulcastStream[2].maxFramerate = kMaxFramerate; + codec_settings_.simulcastStream[2].minBitrate = kTargetBitrate / 2; + codec_settings_.simulcastStream[2].maxBitrate = kTargetBitrate; + codec_settings_.simulcastStream[2].targetBitrate = kTargetBitrate; + codec_settings_.simulcastStream[2].active = true; + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + encoder_->InitEncode(&codec_settings_, kSettings)); + + // Bitrate must be set for all layers to be produced. + VideoBitrateAllocation bitrate_allocation; + bitrate_allocation.SetBitrate(0, 0, kTargetBitrate * 1000); + bitrate_allocation.SetBitrate(1, 0, kTargetBitrate * 1000); + bitrate_allocation.SetBitrate(2, 0, kTargetBitrate * 1000); + encoder_->SetRates( + VideoEncoder::RateControlParameters(bitrate_allocation, kMaxFramerate)); + + // Encode a frame and confirm simulcast index is set for all layers. + { + SetWaitForEncodedFramesThreshold(3); + std::vector encoded_frame; + std::vector codec_specific_info; + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + encoder_->Encode(NextInputFrame(), nullptr)); + ASSERT_TRUE(WaitForEncodedFrames(&encoded_frame, &codec_specific_info)); + EXPECT_EQ(encoded_frame[0].SimulcastIndex().value_or(-1), 0); + EXPECT_EQ(encoded_frame[1].SimulcastIndex().value_or(-1), 1); + EXPECT_EQ(encoded_frame[2].SimulcastIndex().value_or(-1), 2); + } + + // Reconfigure 720p singlecast. + codec_settings_.numberOfSimulcastStreams = 1; + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + encoder_->InitEncode(&codec_settings_, kSettings)); + + // Encode a frame and confirm simulcast index is not set. + { + SetWaitForEncodedFramesThreshold(1); + std::vector encoded_frame; + std::vector codec_specific_info; + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + encoder_->Encode(NextInputFrame(), nullptr)); + ASSERT_TRUE(WaitForEncodedFrames(&encoded_frame, &codec_specific_info)); + EXPECT_FALSE(encoded_frame[0].SimulcastIndex().has_value()); + } +} + TEST_F(TestVp9Impl, EnableDisableSpatialLayers) { // Configure encoder to produce N spatial layers. Encode frames of layer 0 // then enable layer 1 and encode more frames and so on until layer N-1. @@ -503,7 +660,7 @@ TEST(Vp9ImplTest, EnableDisableSpatialLayersWithSvcController) { // Note: bit rate allocation is high to avoid frame dropping due to rate // control, the encoder should always produce a frame. A dropped // frame indicates a problem and the test will fail. - std::unique_ptr encoder = VP9Encoder::Create(); + std::unique_ptr encoder = CreateVp9Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); ConfigureSvc(codec_settings, num_spatial_layers); codec_settings.SetFrameDropEnabled(true); @@ -556,7 +713,7 @@ TEST(Vp9ImplTest, EnableDisableSpatialLayersWithSvcController) { } MATCHER_P2(GenericLayerIs, spatial_id, temporal_id, "") { - if (arg.codec_specific_info.generic_frame_info == absl::nullopt) { + if (arg.codec_specific_info.generic_frame_info == std::nullopt) { *result_listener << " miss generic_frame_info"; return false; } @@ -570,7 +727,7 @@ MATCHER_P2(GenericLayerIs, spatial_id, temporal_id, "") { } TEST(Vp9ImplTest, SpatialUpswitchNotAtGOFBoundary) { - std::unique_ptr encoder = VP9Encoder::Create(); + std::unique_ptr encoder = CreateVp9Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); ConfigureSvc(codec_settings, /*num_spatial_layers=*/3, /*num_temporal_layers=*/3); @@ -608,12 +765,10 @@ TEST(Vp9ImplTest, SpatialUpswitchNotAtGOFBoundary) { EXPECT_THAT(producer.SetNumInputFrames(1).Encode(), ElementsAre(GenericLayerIs(0, 0), GenericLayerIs(1, 0))); } -// TODO(bugs.webrtc.org/13442) Enable once a forward fix has landed in WebRTC. -TEST_F(TestVp9Impl, DISABLED_DisableEnableBaseLayerTriggersKeyFrame) { + +TEST_F(TestVp9Impl, DisableEnableBaseLayerTriggersKeyFrame) { // Configure encoder to produce N spatial layers. Encode frames for all // layers. Then disable all but the last layer. Then reenable all back again. - test::ScopedFieldTrials override_field_trials( - "WebRTC-Vp9ExternalRefCtrl/Enabled/"); const size_t num_spatial_layers = 3; const size_t num_temporal_layers = 3; // Must not be multiple of temporal period to exercise all code paths. @@ -685,16 +840,18 @@ TEST_F(TestVp9Impl, DISABLED_DisableEnableBaseLayerTriggersKeyFrame) { EXPECT_TRUE(seen_ss_data); // Force key-frame. - std::vector frame_types = {VideoFrameType::kVideoFrameKey}; - SetWaitForEncodedFramesThreshold(1); - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, - encoder_->Encode(NextInputFrame(), &frame_types)); - std::vector encoded_frame; - std::vector codec_specific_info; - ASSERT_TRUE(WaitForEncodedFrames(&encoded_frame, &codec_specific_info)); - // Key-frame should be produced. - EXPECT_EQ(encoded_frame[0]._frameType, VideoFrameType::kVideoFrameKey); - EXPECT_EQ(encoded_frame[0].SpatialIndex().value_or(-1), 2); + { + std::vector frame_types = {VideoFrameType::kVideoFrameKey}; + SetWaitForEncodedFramesThreshold(1); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + encoder_->Encode(NextInputFrame(), &frame_types)); + std::vector encoded_frame; + std::vector codec_specific_info; + ASSERT_TRUE(WaitForEncodedFrames(&encoded_frame, &codec_specific_info)); + // Key-frame should be produced. + EXPECT_EQ(encoded_frame[0]._frameType, VideoFrameType::kVideoFrameKey); + EXPECT_EQ(encoded_frame[0].SpatialIndex().value_or(-1), 2); + } // Encode some more frames. for (size_t frame_num = 0; frame_num < num_frames_to_encode; ++frame_num) { @@ -764,9 +921,8 @@ TEST_F(TestVp9Impl, DISABLED_DisableEnableBaseLayerTriggersKeyFrame) { EXPECT_EQ(encoded_frame[0]._frameType, expected_type); } } -// TODO(bugs.webrtc.org/13442) Enable once a forward fix has landed in WebRTC. -TEST(Vp9ImplTest, - DISABLED_DisableEnableBaseLayerWithSvcControllerTriggersKeyFrame) { + +TEST(Vp9ImplTest, DisableEnableBaseLayerWithSvcControllerTriggersKeyFrame) { // Configure encoder to produce N spatial layers. Encode frames for all // layers. Then disable all but the last layer. Then reenable all back again. const size_t num_spatial_layers = 3; @@ -774,7 +930,7 @@ TEST(Vp9ImplTest, // Must not be multiple of temporal period to exercise all code paths. const size_t num_frames_to_encode = 5; - std::unique_ptr encoder = VP9Encoder::Create(); + std::unique_ptr encoder = CreateVp9Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); ConfigureSvc(codec_settings, num_spatial_layers, num_temporal_layers); codec_settings.SetFrameDropEnabled(false); @@ -944,22 +1100,24 @@ TEST_F(TestVp9Impl, DisableEnableBaseLayerTriggersKeyFrameForScreenshare) { } // Force key-frame. - std::vector frame_types = {VideoFrameType::kVideoFrameKey}; - SetWaitForEncodedFramesThreshold(1); - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, - encoder_->Encode(NextInputFrame(), &frame_types)); - std::vector encoded_frame; - std::vector codec_specific_info; - ASSERT_TRUE(WaitForEncodedFrames(&encoded_frame, &codec_specific_info)); - // Key-frame should be produced. - EXPECT_EQ(encoded_frame[0]._frameType, VideoFrameType::kVideoFrameKey); + { + std::vector frame_types = {VideoFrameType::kVideoFrameKey}; + SetWaitForEncodedFramesThreshold(1); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + encoder_->Encode(NextInputFrame(), &frame_types)); + std::vector encoded_frame; + std::vector codec_specific_info; + ASSERT_TRUE(WaitForEncodedFrames(&encoded_frame, &codec_specific_info)); + // Key-frame should be produced. + EXPECT_EQ(encoded_frame[0]._frameType, VideoFrameType::kVideoFrameKey); - // Enable the second layer back. - // Allocate high bit rate to avoid frame dropping due to rate control. - bitrate_allocation.SetBitrate( - 1, 0, codec_settings_.spatialLayers[0].targetBitrate * 1000 * 2); - encoder_->SetRates(VideoEncoder::RateControlParameters( - bitrate_allocation, codec_settings_.maxFramerate)); + // Enable the second layer back. + // Allocate high bit rate to avoid frame dropping due to rate control. + bitrate_allocation.SetBitrate( + 1, 0, codec_settings_.spatialLayers[0].targetBitrate * 1000 * 2); + encoder_->SetRates(VideoEncoder::RateControlParameters( + bitrate_allocation, codec_settings_.maxFramerate)); + } for (size_t frame_num = 0; frame_num < num_frames_to_encode; ++frame_num) { SetWaitForEncodedFramesThreshold(2); @@ -1493,17 +1651,20 @@ TEST_F(TestVp9Impl, ScreenshareFrameDropping) { EXPECT_TRUE(frame_dropped); // Enable the last layer. - bitrate_allocation.SetBitrate( - 2, 0, codec_settings_.spatialLayers[2].targetBitrate * 1000); - encoder_->SetRates(VideoEncoder::RateControlParameters( - bitrate_allocation, codec_settings_.maxFramerate)); - SetWaitForEncodedFramesThreshold(1); - EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(NextInputFrame(), nullptr)); - std::vector encoded_frames; - std::vector codec_specific_info; - ASSERT_TRUE(WaitForEncodedFrames(&encoded_frames, &codec_specific_info)); - // No drop allowed. - EXPECT_EQ(encoded_frames.size(), 3u); + { + bitrate_allocation.SetBitrate( + 2, 0, codec_settings_.spatialLayers[2].targetBitrate * 1000); + encoder_->SetRates(VideoEncoder::RateControlParameters( + bitrate_allocation, codec_settings_.maxFramerate)); + SetWaitForEncodedFramesThreshold(1); + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, + encoder_->Encode(NextInputFrame(), nullptr)); + std::vector encoded_frames; + std::vector codec_specific_info; + ASSERT_TRUE(WaitForEncodedFrames(&encoded_frames, &codec_specific_info)); + // No drop allowed. + EXPECT_EQ(encoded_frames.size(), 3u); + } // Verify that frame-dropping is re-enabled back. frame_dropped = false; @@ -1832,18 +1993,14 @@ TEST_F(TestVp9Impl, EncoderInfoFpsAllocationFlexibleMode) { } class Vp9ImplWithLayeringTest - : public ::testing::TestWithParam> { + : public ::testing::TestWithParam> { protected: Vp9ImplWithLayeringTest() : num_spatial_layers_(std::get<0>(GetParam())), - num_temporal_layers_(std::get<1>(GetParam())), - override_field_trials_(std::get<2>(GetParam()) - ? "WebRTC-Vp9ExternalRefCtrl/Enabled/" - : "") {} + num_temporal_layers_(std::get<1>(GetParam())) {} const uint8_t num_spatial_layers_; const uint8_t num_temporal_layers_; - const test::ScopedFieldTrials override_field_trials_; }; TEST_P(Vp9ImplWithLayeringTest, FlexibleMode) { @@ -1851,12 +2008,11 @@ TEST_P(Vp9ImplWithLayeringTest, FlexibleMode) { // encoder and writes it into RTP payload descriptor. Check that reference // list in payload descriptor matches the predefined one, which is used // in non-flexible mode. - std::unique_ptr encoder = VP9Encoder::Create(); + std::unique_ptr encoder = CreateVp9Encoder(CreateEnvironment()); VideoCodec codec_settings = DefaultCodecSettings(); codec_settings.VP9()->flexibleMode = true; codec_settings.SetFrameDropEnabled(false); - codec_settings.VP9()->numberOfSpatialLayers = num_spatial_layers_; - codec_settings.VP9()->numberOfTemporalLayers = num_temporal_layers_; + ConfigureSvc(codec_settings, num_spatial_layers_, num_temporal_layers_); EXPECT_EQ(encoder->InitEncode(&codec_settings, kSettings), WEBRTC_VIDEO_CODEC_OK); @@ -1888,8 +2044,8 @@ TEST_P(Vp9ImplWithLayeringTest, FlexibleMode) { frame.codec_specific_info.codecSpecific.VP9; EXPECT_EQ(frame.encoded_image.SpatialIndex(), num_spatial_layers_ == 1 - ? absl::nullopt - : absl::optional(i % num_spatial_layers_)) + ? std::nullopt + : std::optional(i % num_spatial_layers_)) << "Frame " << i; EXPECT_EQ(vp9.temporal_idx, num_temporal_layers_ == 1 ? kNoTemporalIdx @@ -1900,7 +2056,7 @@ TEST_P(Vp9ImplWithLayeringTest, FlexibleMode) { if (picture_idx == 0) { EXPECT_EQ(vp9.num_ref_pics, 0) << "Frame " << i; } else { - EXPECT_THAT(rtc::MakeArrayView(vp9.p_diff, vp9.num_ref_pics), + EXPECT_THAT(MakeArrayView(vp9.p_diff, vp9.num_ref_pics), UnorderedElementsAreArray(gof.pid_diff[gof_idx], gof.num_ref_pics[gof_idx])) << "Frame " << i; @@ -1911,8 +2067,7 @@ TEST_P(Vp9ImplWithLayeringTest, FlexibleMode) { INSTANTIATE_TEST_SUITE_P(All, Vp9ImplWithLayeringTest, ::testing::Combine(::testing::Values(1, 2, 3), - ::testing::Values(1, 2, 3), - ::testing::Bool())); + ::testing::Values(1, 2, 3))); class TestVp9ImplFrameDropping : public TestVp9Impl { protected: @@ -1934,15 +2089,16 @@ TEST_F(TestVp9ImplFrameDropping, PreEncodeFrameDropping) { const float max_abs_framerate_error_fps = expected_framerate_fps * 0.1f; codec_settings_.maxFramerate = static_cast(expected_framerate_fps); + ConfigureSvc(codec_settings_); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->InitEncode(&codec_settings_, kSettings)); VideoFrame input_frame = NextInputFrame(); for (size_t frame_num = 0; frame_num < num_frames_to_encode; ++frame_num) { EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(input_frame, nullptr)); - const size_t timestamp = input_frame.timestamp() + + const size_t timestamp = input_frame.rtp_timestamp() + kVideoPayloadTypeFrequency / input_framerate_fps; - input_frame.set_timestamp(static_cast(timestamp)); + input_frame.set_rtp_timestamp(static_cast(timestamp)); } const size_t num_encoded_frames = GetNumEncodedFrames(); @@ -1992,9 +2148,9 @@ TEST_F(TestVp9ImplFrameDropping, DifferentFrameratePerSpatialLayer) { VideoFrame input_frame = NextInputFrame(); for (size_t frame_num = 0; frame_num < num_input_frames; ++frame_num) { EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(input_frame, nullptr)); - const size_t timestamp = input_frame.timestamp() + + const size_t timestamp = input_frame.rtp_timestamp() + kVideoPayloadTypeFrequency / input_framerate_fps; - input_frame.set_timestamp(static_cast(timestamp)); + input_frame.set_rtp_timestamp(static_cast(timestamp)); } std::vector encoded_frames; @@ -2035,16 +2191,11 @@ class TestVp9ImplProfile2 : public TestVp9Impl { TestVp9Impl::SetUp(); input_frame_generator_ = test::CreateSquareFrameGenerator( codec_settings_.width, codec_settings_.height, - test::FrameGeneratorInterface::OutputType::kI010, - absl::optional()); + test::FrameGeneratorInterface::OutputType::kI010, std::optional()); } std::unique_ptr CreateEncoder() override { - cricket::VideoCodec profile2_codec = - cricket::CreateVideoCodec(cricket::kVp9CodecName); - profile2_codec.SetParam(kVP9FmtpProfileId, - VP9ProfileToString(VP9Profile::kProfile2)); - return VP9Encoder::Create(profile2_codec); + return CreateVp9Encoder(env_, {.profile = VP9Profile::kProfile2}); } std::unique_ptr CreateDecoder() override { @@ -2065,7 +2216,7 @@ TEST_F(TestVp9ImplProfile2, EncodeDecode) { encoded_frame._frameType = VideoFrameType::kVideoFrameKey; EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, 0)); std::unique_ptr decoded_frame; - absl::optional decoded_qp; + std::optional decoded_qp; ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp)); ASSERT_TRUE(decoded_frame); @@ -2209,6 +2360,104 @@ GetWrapImageFunction(vpx_image_t* img) { }; } +TEST_F(TestVp9Impl, ScalesInputToActiveResolution) { + // Keep a raw pointer for EXPECT calls and the like. Ownership is otherwise + // passed on to LibvpxVp9Encoder. + auto* const vpx = new NiceMock(); + LibvpxVp9Encoder encoder(CreateEnvironment(), {}, + absl::WrapUnique(vpx)); + + VideoCodec settings = DefaultCodecSettings(); + settings.width = 1280; + settings.height = 720; + constexpr int kNumSpatialLayers = 3; + constexpr int kNumTemporalLayers = 3; + ConfigureSvc(settings, kNumSpatialLayers, kNumTemporalLayers); + VideoBitrateAllocation bitrate_allocation; + for (int si = 0; si < kNumSpatialLayers; ++si) { + for (int ti = 0; ti < kNumTemporalLayers; ++ti) { + uint32_t bitrate_bps = + settings.spatialLayers[si].targetBitrate * 1'000 / kNumTemporalLayers; + bitrate_allocation.SetBitrate(si, ti, bitrate_bps); + } + } + vpx_image_t img; + + ON_CALL(*vpx, img_wrap).WillByDefault(GetWrapImageFunction(&img)); + ON_CALL(*vpx, codec_enc_init) + .WillByDefault(WithArg<0>([](vpx_codec_ctx_t* ctx) { + memset(ctx, 0, sizeof(*ctx)); + return VPX_CODEC_OK; + })); + ON_CALL(*vpx, codec_enc_config_default) + .WillByDefault(DoAll(WithArg<1>([](vpx_codec_enc_cfg_t* cfg) { + memset(cfg, 0, sizeof(vpx_codec_enc_cfg_t)); + }), + Return(VPX_CODEC_OK))); + + vpx_codec_priv_output_cx_pkt_cb_pair_t callback_pointer = {}; + EXPECT_CALL(*vpx, codec_control(_, VP9E_REGISTER_CX_CALLBACK, A())) + .WillOnce(WithArg<2>([&](void* cbp) { + callback_pointer = + *reinterpret_cast(cbp); + return VPX_CODEC_OK; + })); + + EXPECT_CALL( + *vpx, + codec_control( + _, VP9E_SET_SVC_PARAMETERS, + SafeMatcherCast(AllOf( + Field(&vpx_svc_extra_cfg_t::scaling_factor_num, + ElementsAreArray({1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0})), + Field(&vpx_svc_extra_cfg_t::scaling_factor_den, + ElementsAreArray({4, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0})))))); + + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder.InitEncode(&settings, kSettings)); + + Mock::VerifyAndClearExpectations(vpx); + + // All layers active. + encoder.SetRates(VideoEncoder::RateControlParameters(bitrate_allocation, + settings.maxFramerate)); + + // Deactivate SL2 + bitrate_allocation.SetBitrate(2, 0, 0); + bitrate_allocation.SetBitrate(2, 1, 0); + bitrate_allocation.SetBitrate(2, 2, 0); + + EXPECT_CALL( + *vpx, + codec_control( + _, VP9E_SET_SVC_PARAMETERS, + SafeMatcherCast(AllOf( + Field(&vpx_svc_extra_cfg_t::scaling_factor_num, + ElementsAreArray({1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0})), + Field(&vpx_svc_extra_cfg_t::scaling_factor_den, + ElementsAreArray({2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0})))))); + + encoder.SetRates(VideoEncoder::RateControlParameters(bitrate_allocation, + settings.maxFramerate)); + + // Deactivate SL1 + bitrate_allocation.SetBitrate(1, 0, 0); + bitrate_allocation.SetBitrate(1, 1, 0); + bitrate_allocation.SetBitrate(1, 2, 0); + + EXPECT_CALL( + *vpx, + codec_control( + _, VP9E_SET_SVC_PARAMETERS, + SafeMatcherCast(AllOf( + Field(&vpx_svc_extra_cfg_t::scaling_factor_num, + ElementsAreArray({1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0})), + Field(&vpx_svc_extra_cfg_t::scaling_factor_den, + ElementsAreArray({1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0})))))); + + encoder.SetRates(VideoEncoder::RateControlParameters(bitrate_allocation, + settings.maxFramerate)); +} + TEST(Vp9SpeedSettingsTrialsTest, NoSvcUsesGlobalSpeedFromTl0InLayerConfig) { // TL0 speed 8 at >= 480x270, 5 if below that. test::ExplicitKeyValueConfig trials( @@ -2222,8 +2471,8 @@ TEST(Vp9SpeedSettingsTrialsTest, NoSvcUsesGlobalSpeedFromTl0InLayerConfig) { // Keep a raw pointer for EXPECT calls and the like. Ownership is otherwise // passed on to LibvpxVp9Encoder. auto* const vpx = new NiceMock(); - LibvpxVp9Encoder encoder(cricket::CreateVideoCodec(cricket::kVp9CodecName), - absl::WrapUnique(vpx), trials); + LibvpxVp9Encoder encoder(CreateEnvironment(&trials), {}, + absl::WrapUnique(vpx)); VideoCodec settings = DefaultCodecSettings(); settings.width = 480; @@ -2266,8 +2515,8 @@ TEST(Vp9SpeedSettingsTrialsTest, // Keep a raw pointer for EXPECT calls and the like. Ownership is otherwise // passed on to LibvpxVp9Encoder. auto* const vpx = new NiceMock(); - LibvpxVp9Encoder encoder(cricket::CreateVideoCodec(cricket::kVp9CodecName), - absl::WrapUnique(vpx), trials); + LibvpxVp9Encoder encoder(CreateEnvironment(&trials), {}, + absl::WrapUnique(vpx)); VideoCodec settings = DefaultCodecSettings(); settings.width = 480; @@ -2324,8 +2573,8 @@ TEST(Vp9SpeedSettingsTrialsTest, DefaultPerLayerFlagsWithSvc) { // Keep a raw pointer for EXPECT calls and the like. Ownership is otherwise // passed on to LibvpxVp9Encoder. auto* const vpx = new NiceMock(); - LibvpxVp9Encoder encoder(cricket::CreateVideoCodec(cricket::kVp9CodecName), - absl::WrapUnique(vpx), trials); + LibvpxVp9Encoder encoder(CreateEnvironment(&trials), {}, + absl::WrapUnique(vpx)); VideoCodec settings = DefaultCodecSettings(); constexpr int kNumSpatialLayers = 3; @@ -2459,4 +2708,87 @@ TEST(Vp9SpeedSettingsTrialsTest, DefaultPerLayerFlagsWithSvc) { } } +struct SvcFrameDropConfigTestParameters { + bool flexible_mode; + std::optional scalability_mode; + std::string field_trial; + int expected_framedrop_mode; + int expected_max_consec_drop; +}; + +class TestVp9ImplSvcFrameDropConfig + : public ::testing::TestWithParam {}; + +TEST_P(TestVp9ImplSvcFrameDropConfig, SvcFrameDropConfig) { + SvcFrameDropConfigTestParameters test_params = GetParam(); + auto* const vpx = new NiceMock(); + LibvpxVp9Encoder encoder( + CreateEnvironment(std::make_unique( + test_params.field_trial)), + {}, absl::WrapUnique(vpx)); + + vpx_image_t img; + ON_CALL(*vpx, img_wrap).WillByDefault(GetWrapImageFunction(&img)); + + EXPECT_CALL(*vpx, + codec_control(_, VP9E_SET_SVC_FRAME_DROP_LAYER, + SafeMatcherCast(AllOf( + Field(&vpx_svc_frame_drop_t::framedrop_mode, + test_params.expected_framedrop_mode), + Field(&vpx_svc_frame_drop_t::max_consec_drop, + test_params.expected_max_consec_drop))))); + + VideoCodec settings = DefaultCodecSettings(); + settings.VP9()->flexibleMode = test_params.flexible_mode; + + int num_spatial_layers = 3; + if (test_params.scalability_mode.has_value()) { + settings.SetScalabilityMode(*test_params.scalability_mode); + num_spatial_layers = + ScalabilityModeToNumSpatialLayers(*test_params.scalability_mode); + } else { + num_spatial_layers = + 3; // to execute SVC code paths even when scalability_mode is not set. + } + ConfigureSvc(settings, num_spatial_layers); + + EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder.InitEncode(&settings, kSettings)); +} + +INSTANTIATE_TEST_SUITE_P( + All, + TestVp9ImplSvcFrameDropConfig, + ::testing::Values( + // Flexible mode is disabled, KSVC. Layer drop is not allowed. + SvcFrameDropConfigTestParameters{ + .flexible_mode = false, + .scalability_mode = ScalabilityMode::kL3T3_KEY, + .expected_framedrop_mode = FULL_SUPERFRAME_DROP, + .expected_max_consec_drop = 2}, + // Flexible mode is enabled, KSVC. Layer drop is enabled. + SvcFrameDropConfigTestParameters{ + .flexible_mode = true, + .scalability_mode = ScalabilityMode::kL3T3_KEY, + .expected_framedrop_mode = LAYER_DROP, + .expected_max_consec_drop = 2}, + // Flexible mode is enabled, simulcast. Layer drop is enabled. + SvcFrameDropConfigTestParameters{ + .flexible_mode = true, + .scalability_mode = ScalabilityMode::kS3T3, + .expected_framedrop_mode = LAYER_DROP, + .expected_max_consec_drop = 2}, + // Flexible mode is enabled, full SVC. Layer drop is not allowed. + SvcFrameDropConfigTestParameters{ + .flexible_mode = false, + .scalability_mode = ScalabilityMode::kL3T3, + .expected_framedrop_mode = FULL_SUPERFRAME_DROP, + .expected_max_consec_drop = 2}, + // Flexible mode is enabled, scalability mode is not set (i.e., SVC + // controller is not enabled). Layer drop is not allowed. + SvcFrameDropConfigTestParameters{ + .flexible_mode = true, + .scalability_mode = std::nullopt, + .expected_framedrop_mode = FULL_SUPERFRAME_DROP, + .expected_max_consec_drop = 2})); + } // namespace webrtc diff --git a/modules/video_coding/codecs/vp9/vp9.cc b/modules/video_coding/codecs/vp9/vp9.cc index c1dbf3a451..cc773f24a0 100644 --- a/modules/video_coding/codecs/vp9/vp9.cc +++ b/modules/video_coding/codecs/vp9/vp9.cc @@ -11,17 +11,18 @@ #include "modules/video_coding/codecs/vp9/include/vp9.h" #include +#include +#include "absl/base/nullability.h" #include "absl/container/inlined_vector.h" -#include "api/transport/field_trial_based_config.h" +#include "api/environment/environment.h" #include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" -#include "api/video_codecs/vp9_profile.h" -#include "media/base/media_constants.h" +#include "api/video_codecs/video_encoder.h" +#include "modules/video_coding/codecs/interface/libvpx_interface.h" #include "modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h" #include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" #include "modules/video_coding/svc/create_scalability_structure.h" -#include "rtc_base/checks.h" #include "vpx/vp8cx.h" #include "vpx/vp8dx.h" #include "vpx/vpx_codec.h" @@ -46,15 +47,11 @@ std::vector SupportedVP9Codecs(bool add_scalability_modes) { } } } - std::vector supported_formats{SdpVideoFormat( - cricket::kVp9CodecName, - {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}}, - scalability_modes)}; + std::vector supported_formats{ + SdpVideoFormat(SdpVideoFormat::VP9Profile0(), scalability_modes)}; if (vpx_supports_high_bit_depth) { - supported_formats.push_back(SdpVideoFormat( - cricket::kVp9CodecName, - {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}}, - scalability_modes)); + supported_formats.push_back( + SdpVideoFormat(SdpVideoFormat::VP9Profile2(), scalability_modes)); } return supported_formats; @@ -69,37 +66,22 @@ std::vector SupportedVP9DecoderCodecs() { // The WebRTC internal decoder supports VP9 profile 1 and 3. However, there's // currently no way of sending VP9 profile 1 or 3 using the internal encoder. // It would require extended support for I444, I422, and I440 buffers. - supported_formats.push_back(SdpVideoFormat( - cricket::kVp9CodecName, - {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile1)}})); - supported_formats.push_back(SdpVideoFormat( - cricket::kVp9CodecName, - {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile3)}})); + supported_formats.push_back(SdpVideoFormat::VP9Profile1()); + supported_formats.push_back(SdpVideoFormat::VP9Profile3()); return supported_formats; #else return std::vector(); #endif } -std::unique_ptr VP9Encoder::Create() { +absl_nonnull std::unique_ptr CreateVp9Encoder( + const Environment& env, + Vp9EncoderSettings settings) { #ifdef RTC_ENABLE_VP9 - return std::make_unique( - cricket::CreateVideoCodec(cricket::kVp9CodecName), - LibvpxInterface::Create(), FieldTrialBasedConfig()); + return std::make_unique(env, settings, + LibvpxInterface::Create()); #else - RTC_DCHECK_NOTREACHED(); - return nullptr; -#endif -} - -std::unique_ptr VP9Encoder::Create( - const cricket::VideoCodec& codec) { -#ifdef RTC_ENABLE_VP9 - return std::make_unique(codec, LibvpxInterface::Create(), - FieldTrialBasedConfig()); -#else - RTC_DCHECK_NOTREACHED(); - return nullptr; + RTC_CHECK_NOTREACHED(); #endif } diff --git a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc index 181550ce91..d8d91bc1d6 100644 --- a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc +++ b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc @@ -13,8 +13,13 @@ #include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h" +#include +#include + +#include "api/scoped_refptr.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" #include "vpx/vpx_codec.h" #include "vpx/vpx_decoder.h" #include "vpx/vpx_frame_buffer.h" @@ -52,10 +57,10 @@ bool Vp9FrameBufferPool::InitializeVpxUsePool( return true; } -rtc::scoped_refptr +scoped_refptr Vp9FrameBufferPool::GetFrameBuffer(size_t min_size) { RTC_DCHECK_GT(min_size, 0); - rtc::scoped_refptr available_buffer = nullptr; + scoped_refptr available_buffer = nullptr; { MutexLock lock(&buffers_lock_); // Do we have a buffer we can recycle? @@ -149,7 +154,7 @@ int32_t Vp9FrameBufferPool::VpxGetFrameBuffer(void* user_priv, Vp9FrameBufferPool* pool = static_cast(user_priv); - rtc::scoped_refptr buffer = pool->GetFrameBuffer(min_size); + scoped_refptr buffer = pool->GetFrameBuffer(min_size); fb->data = buffer->GetData(); fb->size = buffer->GetDataSize(); // Store Vp9FrameBuffer* in `priv` for use in VpxReleaseFrameBuffer. diff --git a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h index f46f1b7ea2..592c13da2a 100644 --- a/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h +++ b/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h @@ -14,12 +14,15 @@ #ifdef RTC_ENABLE_VP9 +#include +#include #include #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "rtc_base/buffer.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" struct vpx_codec_ctx; struct vpx_codec_frame_buffer; @@ -65,18 +68,17 @@ constexpr size_t kDefaultMaxNumBuffers = 68; // vpx_codec_destroy(decoder_ctx); class Vp9FrameBufferPool { public: - class Vp9FrameBuffer final - : public rtc::RefCountedNonVirtual { + class Vp9FrameBuffer final : public RefCountedNonVirtual { public: uint8_t* GetData(); size_t GetDataSize() const; void SetSize(size_t size); - using rtc::RefCountedNonVirtual::HasOneRef; + using RefCountedNonVirtual::HasOneRef; private: // Data as an easily resizable buffer. - rtc::Buffer data_; + Buffer data_; }; // Configures libvpx to, in the specified context, use this memory pool for @@ -86,7 +88,7 @@ class Vp9FrameBufferPool { // Gets a frame buffer of at least `min_size`, recycling an available one or // creating a new one. When no longer referenced from the outside the buffer // becomes recyclable. - rtc::scoped_refptr GetFrameBuffer(size_t min_size); + scoped_refptr GetFrameBuffer(size_t min_size); // Gets the number of buffers currently in use (not ready to be recycled). int GetNumBuffersInUse() const; // Changes the max amount of buffers in the pool to the new value. @@ -122,7 +124,7 @@ class Vp9FrameBufferPool { // Protects `allocated_buffers_`. mutable Mutex buffers_lock_; // All buffers, in use or ready to be recycled. - std::vector> allocated_buffers_ + std::vector> allocated_buffers_ RTC_GUARDED_BY(buffers_lock_); size_t max_num_buffers_ = kDefaultMaxNumBuffers; }; diff --git a/modules/video_coding/decoder_database.cc b/modules/video_coding/decoder_database.cc index dabef41f95..f20c793598 100644 --- a/modules/video_coding/decoder_database.cc +++ b/modules/video_coding/decoder_database.cc @@ -10,9 +10,17 @@ #include "modules/video_coding/decoder_database.h" +#include #include +#include #include +#include "api/sequence_checker.h" +#include "api/video/encoded_frame.h" +#include "api/video/render_resolution.h" +#include "api/video_codecs/video_decoder.h" +#include "modules/video_coding/generic_decoder.h" +#include "modules/video_coding/include/video_coding_defines.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -34,7 +42,7 @@ void VCMDecoderDatabase::DeregisterExternalDecoder(uint8_t payload_type) { // frame after RegisterReceiveCodec). if (current_decoder_ && current_decoder_->IsSameDecoder(it->second.get())) { // Release it if it was registered and in use. - current_decoder_ = absl::nullopt; + current_decoder_ = std::nullopt; } decoders_.erase(it); } @@ -64,7 +72,7 @@ void VCMDecoderDatabase::RegisterReceiveCodec( const VideoDecoder::Settings& settings) { // If payload value already exists, erase old and insert new. if (payload_type == current_payload_type_) { - current_payload_type_ = absl::nullopt; + current_payload_type_ = std::nullopt; } decoder_settings_[payload_type] = settings; } @@ -75,13 +83,13 @@ bool VCMDecoderDatabase::DeregisterReceiveCodec(uint8_t payload_type) { } if (payload_type == current_payload_type_) { // This codec is currently in use. - current_payload_type_ = absl::nullopt; + current_payload_type_ = std::nullopt; } return true; } void VCMDecoderDatabase::DeregisterReceiveCodecs() { - current_payload_type_ = absl::nullopt; + current_payload_type_ = std::nullopt; decoder_settings_.clear(); } @@ -96,12 +104,12 @@ VCMGenericDecoder* VCMDecoderDatabase::GetDecoder( } // If decoder exists - delete. if (current_decoder_.has_value()) { - current_decoder_ = absl::nullopt; - current_payload_type_ = absl::nullopt; + current_decoder_ = std::nullopt; + current_payload_type_ = std::nullopt; } CreateAndInitDecoder(frame); - if (current_decoder_ == absl::nullopt) { + if (current_decoder_ == std::nullopt) { return nullptr; } @@ -109,7 +117,7 @@ VCMGenericDecoder* VCMDecoderDatabase::GetDecoder( callback->OnIncomingPayloadType(payload_type); if (current_decoder_->RegisterDecodeCompleteCallback(decoded_frame_callback) < 0) { - current_decoder_ = absl::nullopt; + current_decoder_ = std::nullopt; return nullptr; } @@ -144,7 +152,7 @@ void VCMDecoderDatabase::CreateAndInitDecoder(const EncodedFrame& frame) { decoder_item->second.set_max_render_resolution(frame_resolution); } if (!current_decoder_->Configure(decoder_item->second)) { - current_decoder_ = absl::nullopt; + current_decoder_ = std::nullopt; RTC_LOG(LS_ERROR) << "Failed to initialize decoder."; } } diff --git a/modules/video_coding/decoder_database.h b/modules/video_coding/decoder_database.h index 87edcd05df..df289d291d 100644 --- a/modules/video_coding/decoder_database.h +++ b/modules/video_coding/decoder_database.h @@ -15,12 +15,13 @@ #include #include +#include -#include "absl/types/optional.h" #include "api/sequence_checker.h" #include "api/video/encoded_frame.h" #include "api/video_codecs/video_decoder.h" #include "modules/video_coding/generic_decoder.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -58,8 +59,8 @@ class VCMDecoderDatabase { SequenceChecker decoder_sequence_checker_; - absl::optional current_payload_type_; - absl::optional current_decoder_ + std::optional current_payload_type_; + std::optional current_decoder_ RTC_GUARDED_BY(decoder_sequence_checker_); // Initialization paramaters for decoders keyed by payload type. std::map decoder_settings_; diff --git a/modules/video_coding/decoder_database_unittest.cc b/modules/video_coding/decoder_database_unittest.cc index 2e9c91b1c4..485fd60f1e 100644 --- a/modules/video_coding/decoder_database_unittest.cc +++ b/modules/video_coding/decoder_database_unittest.cc @@ -14,6 +14,8 @@ #include #include "api/test/mock_video_decoder.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/video_decoder.h" #include "test/gmock.h" #include "test/gtest.h" diff --git a/modules/video_coding/deprecated/BUILD.gn b/modules/video_coding/deprecated/BUILD.gn index 7812092136..9b68262c0a 100644 --- a/modules/video_coding/deprecated/BUILD.gn +++ b/modules/video_coding/deprecated/BUILD.gn @@ -23,8 +23,12 @@ rtc_library("deprecated_decoding_state") { ":deprecated_frame_buffer", ":deprecated_jitter_buffer_common", ":deprecated_packet", + "..:codec_globals_headers", + "../../../api/video:video_frame", + "../../../api/video:video_frame_type", "../../../common_video", "../../../modules:module_api_public", + "../../../rtc_base:checks", "../../../rtc_base:logging", ] } @@ -34,7 +38,10 @@ rtc_library("deprecated_event_wrapper") { "event_wrapper.cc", "event_wrapper.h", ] - deps = [ "../../../rtc_base:rtc_event" ] + deps = [ + "../../../api/units:time_delta", + "../../../rtc_base:rtc_event", + ] } rtc_library("deprecated_jitter_buffer_common") { @@ -52,8 +59,11 @@ rtc_library("deprecated_jitter_buffer") { ":deprecated_frame_buffer", ":deprecated_jitter_buffer_common", ":deprecated_packet", + ":deprecated_session_info", "../../../api:field_trials_view", + "../../../api/units:data_size", "../../../api/units:timestamp", + "../../../api/video:video_frame_type", "../../../modules:module_api", "../../../modules:module_api_public", "../../../modules/video_coding:video_codec_interface", @@ -65,7 +75,6 @@ rtc_library("deprecated_jitter_buffer") { "../../../rtc_base/synchronization:mutex", "../../../system_wrappers", ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } rtc_library("deprecated_frame_buffer") { @@ -77,7 +86,9 @@ rtc_library("deprecated_frame_buffer") { ":deprecated_jitter_buffer_common", ":deprecated_packet", ":deprecated_session_info", + "../../../api:scoped_refptr", "../../../api/video:encoded_image", + "../../../api/video:video_frame_type", "../../../api/video:video_rtp_headers", "../../../modules/video_coding:codec_globals_headers", "../../../modules/video_coding:encoded_frame", @@ -96,11 +107,11 @@ rtc_library("deprecated_packet") { "../../../api:rtp_headers", "../../../api:rtp_packet_info", "../../../api/units:timestamp", + "../../../api/video:video_frame", "../../../api/video:video_frame_type", "../../../modules/rtp_rtcp:rtp_rtcp_format", "../../../modules/rtp_rtcp:rtp_video_header", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("deprecated_receiver") { @@ -114,7 +125,10 @@ rtc_library("deprecated_receiver") { ":deprecated_jitter_buffer_common", ":deprecated_packet", "../../../api:field_trials_view", + "../../../api/units:time_delta", + "../../../api/units:timestamp", "../../../api/video:encoded_image", + "../../../api/video:video_rtp_headers", "../../../modules/video_coding", "../../../modules/video_coding:encoded_frame", "../../../modules/video_coding:video_codec_interface", @@ -123,29 +137,33 @@ rtc_library("deprecated_receiver") { "../../../rtc_base:logging", "../../../rtc_base:safe_conversions", "../../../system_wrappers", + "//third_party/abseil-cpp/absl/memory", ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } rtc_library("deprecated_session_info") { deps = [ ":deprecated_jitter_buffer_common", ":deprecated_packet", + "../../../api/video:video_frame", + "../../../api/video:video_frame_type", "../../../modules:module_api", "../../../modules:module_api_public", "../../../modules/video_coding:codec_globals_headers", + "../../../rtc_base:checks", "../../../rtc_base:logging", + "//third_party/abseil-cpp/absl/algorithm:container", ] sources = [ "session_info.cc", "session_info.h", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:variant" ] } rtc_library("deprecated_stream_generator") { deps = [ ":deprecated_packet", + "../../../api/video:video_frame_type", "../../../rtc_base:checks", ] sources = [ @@ -165,6 +183,7 @@ rtc_library("deprecated_unittests") { visibility += [ "../../../modules/*" ] deps = [ ":deprecated_decoding_state", + ":deprecated_event_wrapper", ":deprecated_frame_buffer", ":deprecated_jitter_buffer", ":deprecated_jitter_buffer_common", @@ -172,6 +191,10 @@ rtc_library("deprecated_unittests") { ":deprecated_receiver", ":deprecated_session_info", ":deprecated_stream_generator", + "../../../api:rtp_headers", + "../../../api/units:time_delta", + "../../../api/video:video_frame", + "../../../api/video:video_frame_type", "../../../common_video", "../../../modules/rtp_rtcp:rtp_video_header", "../../../modules/video_coding:codec_globals_headers", @@ -181,6 +204,6 @@ rtc_library("deprecated_unittests") { "../../../system_wrappers", "../../../test:scoped_key_value_config", "../../../test:test_support", + "//third_party/abseil-cpp/absl/memory", ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } diff --git a/modules/video_coding/deprecated/decoding_state.cc b/modules/video_coding/deprecated/decoding_state.cc index 1ec8e107a9..6711cd5daf 100644 --- a/modules/video_coding/deprecated/decoding_state.cc +++ b/modules/video_coding/deprecated/decoding_state.cc @@ -10,11 +10,20 @@ #include "modules/video_coding/deprecated/decoding_state.h" +#include +#include +#include +#include +#include + +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "common_video/h264/h264_common.h" #include "modules/include/module_common_types_public.h" +#include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/deprecated/frame_buffer.h" -#include "modules/video_coding/deprecated/jitter_buffer_common.h" #include "modules/video_coding/deprecated/packet.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { @@ -58,7 +67,7 @@ bool VCMDecodingState::IsOldFrame(const VCMFrameBuffer* frame) const { RTC_DCHECK(frame); if (in_initial_state_) return false; - return !IsNewerTimestamp(frame->Timestamp(), time_stamp_); + return !IsNewerTimestamp(frame->RtpTimestamp(), time_stamp_); } bool VCMDecodingState::IsOldPacket(const VCMPacket* packet) const { @@ -74,7 +83,7 @@ void VCMDecodingState::SetState(const VCMFrameBuffer* frame) { if (!UsingFlexibleMode(frame)) UpdateSyncState(frame); sequence_num_ = static_cast(frame->GetHighSeqNum()); - time_stamp_ = frame->Timestamp(); + time_stamp_ = frame->RtpTimestamp(); picture_id_ = frame->PictureId(); temporal_id_ = frame->TemporalId(); tl0_pic_id_ = frame->Tl0PicId(); @@ -144,7 +153,7 @@ bool VCMDecodingState::UpdateEmptyFrame(const VCMFrameBuffer* frame) { // Continuous empty packets or continuous frames can be dropped if we // advance the sequence number. sequence_num_ = frame->GetHighSeqNum(); - time_stamp_ = frame->Timestamp(); + time_stamp_ = frame->RtpTimestamp(); return true; } return false; diff --git a/modules/video_coding/deprecated/decoding_state_unittest.cc b/modules/video_coding/deprecated/decoding_state_unittest.cc index 1b589b05ab..a32583d4dc 100644 --- a/modules/video_coding/deprecated/decoding_state_unittest.cc +++ b/modules/video_coding/deprecated/decoding_state_unittest.cc @@ -10,6 +10,10 @@ #include "modules/video_coding/deprecated/decoding_state.h" +#include + +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" diff --git a/modules/video_coding/deprecated/event_wrapper.cc b/modules/video_coding/deprecated/event_wrapper.cc index 16321b6fc6..709ec1c4d2 100644 --- a/modules/video_coding/deprecated/event_wrapper.cc +++ b/modules/video_coding/deprecated/event_wrapper.cc @@ -10,6 +10,7 @@ #include "modules/video_coding/deprecated/event_wrapper.h" +#include "api/units/time_delta.h" #include "rtc_base/event.h" namespace webrtc { @@ -30,7 +31,7 @@ class EventWrapperImpl : public EventWrapper { } private: - rtc::Event event_; + Event event_; }; // static diff --git a/modules/video_coding/deprecated/frame_buffer.cc b/modules/video_coding/deprecated/frame_buffer.cc index 76345ac6e4..395f596f46 100644 --- a/modules/video_coding/deprecated/frame_buffer.cc +++ b/modules/video_coding/deprecated/frame_buffer.cc @@ -12,9 +12,18 @@ #include +#include +#include + #include "api/video/encoded_image.h" +#include "api/video/video_frame_type.h" #include "api/video/video_timing.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "modules/video_coding/deprecated/jitter_buffer_common.h" #include "modules/video_coding/deprecated/packet.h" +#include "modules/video_coding/deprecated/session_info.h" +#include "modules/video_coding/encoded_frame.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/trace_event.h" @@ -81,7 +90,7 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(const VCMPacket& packet, if (kStateEmpty == _state) { // First packet (empty and/or media) inserted into this frame. // store some info and set some initial values. - SetTimestamp(packet.timestamp); + SetRtpTimestamp(packet.timestamp); // We only take the ntp timestamp of the first packet of a frame. ntp_time_ms_ = packet.ntp_time_ms_; _codec = packet.codec(); diff --git a/modules/video_coding/deprecated/frame_buffer.h b/modules/video_coding/deprecated/frame_buffer.h index de8d0ab7cb..9aae5dbf9d 100644 --- a/modules/video_coding/deprecated/frame_buffer.h +++ b/modules/video_coding/deprecated/frame_buffer.h @@ -16,6 +16,10 @@ #include +#include "api/scoped_refptr.h" +#include "api/video/encoded_image.h" +#include "api/video/video_frame_type.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/deprecated/jitter_buffer_common.h" #include "modules/video_coding/deprecated/packet.h" @@ -76,7 +80,7 @@ class VCMFrameBuffer : public VCMEncodedFrame { VCMFrameBufferStateEnum _state; // Current state of the frame // Set with SetEncodedData, but keep pointer to the concrete class here, to // enable reallocation and mutation. - rtc::scoped_refptr encoded_image_buffer_; + scoped_refptr encoded_image_buffer_; VCMSessionInfo _sessionInfo; uint16_t _nackCount; int64_t _latestPacketTimeMs; diff --git a/modules/video_coding/deprecated/jitter_buffer.cc b/modules/video_coding/deprecated/jitter_buffer.cc index bae4bac9f8..2e6bd97a9b 100644 --- a/modules/video_coding/deprecated/jitter_buffer.cc +++ b/modules/video_coding/deprecated/jitter_buffer.cc @@ -10,17 +10,29 @@ #include "modules/video_coding/deprecated/jitter_buffer.h" #include -#include +#include +#include +#include +#include #include +#include +#include "api/field_trials_view.h" +#include "api/units/data_size.h" #include "api/units/timestamp.h" +#include "api/video/video_frame_type.h" +#include "modules/include/module_common_types_public.h" +#include "modules/video_coding/deprecated/decoding_state.h" +#include "modules/video_coding/deprecated/event_wrapper.h" #include "modules/video_coding/deprecated/frame_buffer.h" #include "modules/video_coding/deprecated/jitter_buffer_common.h" #include "modules/video_coding/deprecated/packet.h" +#include "modules/video_coding/deprecated/session_info.h" #include "modules/video_coding/timing/inter_frame_delay_variation_calculator.h" #include "modules/video_coding/timing/jitter_estimator.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -38,7 +50,7 @@ bool HasNonEmptyState(FrameListPair pair) { } void FrameList::InsertFrame(VCMFrameBuffer* frame) { - insert(rbegin().base(), FrameListPair(frame->Timestamp(), frame)); + insert(rbegin().base(), FrameListPair(frame->RtpTimestamp(), frame)); } VCMFrameBuffer* FrameList::PopFrame(uint32_t timestamp) { @@ -286,7 +298,7 @@ VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) { // Wait for this one to get complete. waiting_for_completion_.frame_size = frame->size(); waiting_for_completion_.latest_packet_time = frame->LatestPacketTimeMs(); - waiting_for_completion_.timestamp = frame->Timestamp(); + waiting_for_completion_.timestamp = frame->RtpTimestamp(); } } @@ -521,7 +533,8 @@ bool VCMJitterBuffer::IsContinuous(const VCMFrameBuffer& frame) const { for (FrameList::const_iterator it = decodable_frames_.begin(); it != decodable_frames_.end(); ++it) { VCMFrameBuffer* decodable_frame = it->second; - if (IsNewerTimestamp(decodable_frame->Timestamp(), frame.Timestamp())) { + if (IsNewerTimestamp(decodable_frame->RtpTimestamp(), + frame.RtpTimestamp())) { break; } decoding_state.SetState(decodable_frame); @@ -555,7 +568,7 @@ void VCMJitterBuffer::FindAndInsertContinuousFramesWithState( it != incomplete_frames_.end();) { VCMFrameBuffer* frame = it->second; if (IsNewerTimestamp(original_decoded_state.time_stamp(), - frame->Timestamp())) { + frame->RtpTimestamp())) { ++it; continue; } @@ -574,7 +587,7 @@ void VCMJitterBuffer::FindAndInsertContinuousFramesWithState( uint32_t VCMJitterBuffer::EstimatedJitterMs() { MutexLock lock(&mutex_); const double rtt_mult = 1.0f; - return jitter_estimate_.GetJitterEstimate(rtt_mult, absl::nullopt).ms(); + return jitter_estimate_.GetJitterEstimate(rtt_mult, std::nullopt).ms(); } void VCMJitterBuffer::SetNackSettings(size_t max_nack_list_size, @@ -592,11 +605,11 @@ int VCMJitterBuffer::NonContinuousOrIncompleteDuration() { if (incomplete_frames_.empty()) { return 0; } - uint32_t start_timestamp = incomplete_frames_.Front()->Timestamp(); + uint32_t start_timestamp = incomplete_frames_.Front()->RtpTimestamp(); if (!decodable_frames_.empty()) { - start_timestamp = decodable_frames_.Back()->Timestamp(); + start_timestamp = decodable_frames_.Back()->RtpTimestamp(); } - return incomplete_frames_.Back()->Timestamp() - start_timestamp; + return incomplete_frames_.Back()->RtpTimestamp() - start_timestamp; } uint16_t VCMJitterBuffer::EstimatedLowSequenceNumber( @@ -861,7 +874,7 @@ void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame, } // No retransmitted frames should be a part of the jitter // estimate. - UpdateJitterEstimate(frame.LatestPacketTimeMs(), frame.Timestamp(), + UpdateJitterEstimate(frame.LatestPacketTimeMs(), frame.RtpTimestamp(), frame.size(), incomplete_frame); } diff --git a/modules/video_coding/deprecated/jitter_buffer.h b/modules/video_coding/deprecated/jitter_buffer.h index 49af9c7b1e..1657f46573 100644 --- a/modules/video_coding/deprecated/jitter_buffer.h +++ b/modules/video_coding/deprecated/jitter_buffer.h @@ -11,6 +11,8 @@ #ifndef MODULES_VIDEO_CODING_DEPRECATED_JITTER_BUFFER_H_ #define MODULES_VIDEO_CODING_DEPRECATED_JITTER_BUFFER_H_ +#include +#include #include #include #include @@ -18,12 +20,10 @@ #include #include "api/field_trials_view.h" -#include "modules/include/module_common_types.h" #include "modules/include/module_common_types_public.h" #include "modules/video_coding/deprecated/decoding_state.h" #include "modules/video_coding/deprecated/event_wrapper.h" #include "modules/video_coding/deprecated/jitter_buffer_common.h" -#include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/timing/inter_frame_delay_variation_calculator.h" #include "modules/video_coding/timing/jitter_estimator.h" #include "rtc_base/synchronization/mutex.h" diff --git a/modules/video_coding/deprecated/jitter_buffer_unittest.cc b/modules/video_coding/deprecated/jitter_buffer_unittest.cc index 81483a1e2c..cc0eaacec7 100644 --- a/modules/video_coding/deprecated/jitter_buffer_unittest.cc +++ b/modules/video_coding/deprecated/jitter_buffer_unittest.cc @@ -10,18 +10,26 @@ #include "modules/video_coding/deprecated/jitter_buffer.h" -#include +#include +#include +#include #include -#include #include #include "absl/memory/memory.h" +#include "api/rtp_headers.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "common_video/h264/h264_common.h" -#include "modules/video_coding/deprecated/frame_buffer.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "modules/video_coding/deprecated/event_wrapper.h" +#include "modules/video_coding/deprecated/jitter_buffer_common.h" #include "modules/video_coding/deprecated/packet.h" #include "modules/video_coding/deprecated/stream_generator.h" +#include "modules/video_coding/encoded_frame.h" #include "system_wrappers/include/clock.h" -#include "test/gmock.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" @@ -70,7 +78,7 @@ class TestBasicJitterBuffer : public ::testing::Test { VCMEncodedFrame* found_frame = jitter_buffer_->NextCompleteFrame(10); if (!found_frame) return nullptr; - return jitter_buffer_->ExtractAndSetDecode(found_frame->Timestamp()); + return jitter_buffer_->ExtractAndSetDecode(found_frame->RtpTimestamp()); } void CheckOutFrame(VCMEncodedFrame* frame_out, @@ -203,7 +211,7 @@ class TestRunningJitterBuffer : public ::testing::Test { return false; VCMEncodedFrame* frame = - jitter_buffer_->ExtractAndSetDecode(found_frame->Timestamp()); + jitter_buffer_->ExtractAndSetDecode(found_frame->RtpTimestamp()); bool ret = (frame != NULL); jitter_buffer_->ReleaseFrame(frame); return ret; @@ -691,12 +699,12 @@ TEST_F(TestBasicJitterBuffer, TestSkipForwardVp9) { EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re)); VCMEncodedFrame* frame_out = DecodeCompleteFrame(); - EXPECT_EQ(1000U, frame_out->Timestamp()); + EXPECT_EQ(1000U, frame_out->RtpTimestamp()); EXPECT_EQ(VideoFrameType::kVideoFrameKey, frame_out->FrameType()); jitter_buffer_->ReleaseFrame(frame_out); frame_out = DecodeCompleteFrame(); - EXPECT_EQ(13000U, frame_out->Timestamp()); + EXPECT_EQ(13000U, frame_out->RtpTimestamp()); EXPECT_EQ(VideoFrameType::kVideoFrameDelta, frame_out->FrameType()); jitter_buffer_->ReleaseFrame(frame_out); } @@ -755,7 +763,7 @@ TEST_F(TestBasicJitterBuffer, ReorderedVp9SsData_3TlLayers) { EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re)); VCMEncodedFrame* frame_out = DecodeCompleteFrame(); - EXPECT_EQ(3000U, frame_out->Timestamp()); + EXPECT_EQ(3000U, frame_out->RtpTimestamp()); EXPECT_EQ(VideoFrameType::kVideoFrameKey, frame_out->FrameType()); EXPECT_EQ(0, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx); EXPECT_FALSE( @@ -763,14 +771,14 @@ TEST_F(TestBasicJitterBuffer, ReorderedVp9SsData_3TlLayers) { jitter_buffer_->ReleaseFrame(frame_out); frame_out = DecodeCompleteFrame(); - EXPECT_EQ(6000U, frame_out->Timestamp()); + EXPECT_EQ(6000U, frame_out->RtpTimestamp()); EXPECT_EQ(VideoFrameType::kVideoFrameDelta, frame_out->FrameType()); EXPECT_EQ(2, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx); EXPECT_TRUE(frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch); jitter_buffer_->ReleaseFrame(frame_out); frame_out = DecodeCompleteFrame(); - EXPECT_EQ(9000U, frame_out->Timestamp()); + EXPECT_EQ(9000U, frame_out->RtpTimestamp()); EXPECT_EQ(VideoFrameType::kVideoFrameDelta, frame_out->FrameType()); EXPECT_EQ(1, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx); EXPECT_TRUE(frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch); @@ -848,7 +856,7 @@ TEST_F(TestBasicJitterBuffer, ReorderedVp9SsData_2Tl2SLayers) { EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &re)); VCMEncodedFrame* frame_out = DecodeCompleteFrame(); - EXPECT_EQ(3000U, frame_out->Timestamp()); + EXPECT_EQ(3000U, frame_out->RtpTimestamp()); EXPECT_EQ(VideoFrameType::kVideoFrameKey, frame_out->FrameType()); EXPECT_EQ(0, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx); EXPECT_FALSE( @@ -856,7 +864,7 @@ TEST_F(TestBasicJitterBuffer, ReorderedVp9SsData_2Tl2SLayers) { jitter_buffer_->ReleaseFrame(frame_out); frame_out = DecodeCompleteFrame(); - EXPECT_EQ(6000U, frame_out->Timestamp()); + EXPECT_EQ(6000U, frame_out->RtpTimestamp()); EXPECT_EQ(VideoFrameType::kVideoFrameDelta, frame_out->FrameType()); EXPECT_EQ(1, frame_out->CodecSpecific()->codecSpecific.VP9.temporal_idx); EXPECT_TRUE(frame_out->CodecSpecific()->codecSpecific.VP9.temporal_up_switch); @@ -903,10 +911,8 @@ TEST_F(TestBasicJitterBuffer, SpsAndPpsHandling) { packet_->markerBit = true; packet_->video_header.codec = kVideoCodecH264; h264_header.nalu_type = H264::NaluType::kIdr; - h264_header.nalus[0].type = H264::NaluType::kIdr; - h264_header.nalus[0].sps_id = -1; - h264_header.nalus[0].pps_id = 0; - h264_header.nalus_length = 1; + h264_header.nalus = { + {.type = H264::NaluType::kIdr, .sps_id = -1, .pps_id = 0}}; bool retransmitted = false; EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &retransmitted)); @@ -922,13 +928,9 @@ TEST_F(TestBasicJitterBuffer, SpsAndPpsHandling) { packet_->markerBit = false; packet_->video_header.codec = kVideoCodecH264; h264_header.nalu_type = H264::NaluType::kStapA; - h264_header.nalus[0].type = H264::NaluType::kSps; - h264_header.nalus[0].sps_id = 0; - h264_header.nalus[0].pps_id = -1; - h264_header.nalus[1].type = H264::NaluType::kPps; - h264_header.nalus[1].sps_id = 0; - h264_header.nalus[1].pps_id = 0; - h264_header.nalus_length = 2; + h264_header.nalus = { + {.type = H264::NaluType::kSps, .sps_id = 0, .pps_id = -1}, + {.type = H264::NaluType::kPps, .sps_id = 0, .pps_id = 0}}; // Not complete since the marker bit hasn't been received. EXPECT_EQ(kIncomplete, jitter_buffer_->InsertPacket(*packet_, &retransmitted)); @@ -940,10 +942,8 @@ TEST_F(TestBasicJitterBuffer, SpsAndPpsHandling) { packet_->markerBit = true; packet_->video_header.codec = kVideoCodecH264; h264_header.nalu_type = H264::NaluType::kIdr; - h264_header.nalus[0].type = H264::NaluType::kIdr; - h264_header.nalus[0].sps_id = -1; - h264_header.nalus[0].pps_id = 0; - h264_header.nalus_length = 1; + h264_header.nalus = { + {.type = H264::NaluType::kIdr, .sps_id = -1, .pps_id = 0}}; // Complete and decodable since the pps and sps are received in the first // packet of this frame. EXPECT_EQ(kCompleteSession, @@ -961,10 +961,9 @@ TEST_F(TestBasicJitterBuffer, SpsAndPpsHandling) { packet_->markerBit = true; packet_->video_header.codec = kVideoCodecH264; h264_header.nalu_type = H264::NaluType::kSlice; - h264_header.nalus[0].type = H264::NaluType::kSlice; - h264_header.nalus[0].sps_id = -1; - h264_header.nalus[0].pps_id = 0; - h264_header.nalus_length = 1; + h264_header.nalus = { + {.type = H264::NaluType::kIdr, .sps_id = -1, .pps_id = 0}}; + // Complete and decodable since sps, pps and key frame has been received. EXPECT_EQ(kCompleteSession, jitter_buffer_->InsertPacket(*packet_, &retransmitted)); @@ -1089,7 +1088,7 @@ TEST_F(TestBasicJitterBuffer, TestInsertOldFrame) { jitter_buffer_->InsertPacket(*packet_, &retransmitted)); VCMEncodedFrame* frame_out = DecodeCompleteFrame(); - EXPECT_EQ(3000u, frame_out->Timestamp()); + EXPECT_EQ(3000u, frame_out->RtpTimestamp()); CheckOutFrame(frame_out, size_, false); EXPECT_EQ(VideoFrameType::kVideoFrameKey, frame_out->FrameType()); jitter_buffer_->ReleaseFrame(frame_out); @@ -1124,7 +1123,7 @@ TEST_F(TestBasicJitterBuffer, TestInsertOldFrameWithSeqNumWrap) { jitter_buffer_->InsertPacket(*packet_, &retransmitted)); VCMEncodedFrame* frame_out = DecodeCompleteFrame(); - EXPECT_EQ(timestamp_, frame_out->Timestamp()); + EXPECT_EQ(timestamp_, frame_out->RtpTimestamp()); CheckOutFrame(frame_out, size_, false); @@ -1234,13 +1233,13 @@ TEST_F(TestBasicJitterBuffer, 2FrameWithTimestampWrap) { jitter_buffer_->InsertPacket(*packet_, &retransmitted)); VCMEncodedFrame* frame_out = DecodeCompleteFrame(); - EXPECT_EQ(0xffffff00, frame_out->Timestamp()); + EXPECT_EQ(0xffffff00, frame_out->RtpTimestamp()); CheckOutFrame(frame_out, size_, false); EXPECT_EQ(VideoFrameType::kVideoFrameKey, frame_out->FrameType()); jitter_buffer_->ReleaseFrame(frame_out); VCMEncodedFrame* frame_out2 = DecodeCompleteFrame(); - EXPECT_EQ(2700u, frame_out2->Timestamp()); + EXPECT_EQ(2700u, frame_out2->RtpTimestamp()); CheckOutFrame(frame_out2, size_, false); EXPECT_EQ(VideoFrameType::kVideoFrameDelta, frame_out2->FrameType()); jitter_buffer_->ReleaseFrame(frame_out2); @@ -1277,13 +1276,13 @@ TEST_F(TestBasicJitterBuffer, Insert2FramesReOrderedWithTimestampWrap) { jitter_buffer_->InsertPacket(*packet_, &retransmitted)); VCMEncodedFrame* frame_out = DecodeCompleteFrame(); - EXPECT_EQ(0xffffff00, frame_out->Timestamp()); + EXPECT_EQ(0xffffff00, frame_out->RtpTimestamp()); CheckOutFrame(frame_out, size_, false); EXPECT_EQ(VideoFrameType::kVideoFrameKey, frame_out->FrameType()); jitter_buffer_->ReleaseFrame(frame_out); VCMEncodedFrame* frame_out2 = DecodeCompleteFrame(); - EXPECT_EQ(2700u, frame_out2->Timestamp()); + EXPECT_EQ(2700u, frame_out2->RtpTimestamp()); CheckOutFrame(frame_out2, size_, false); EXPECT_EQ(VideoFrameType::kVideoFrameDelta, frame_out2->FrameType()); jitter_buffer_->ReleaseFrame(frame_out2); @@ -1377,7 +1376,7 @@ TEST_F(TestBasicJitterBuffer, ExceedNumOfFrameWithSeqNumWrap) { jitter_buffer_->InsertPacket(*packet_, &retransmitted)); VCMEncodedFrame* frame_out = DecodeCompleteFrame(); - EXPECT_EQ(first_key_frame_timestamp, frame_out->Timestamp()); + EXPECT_EQ(first_key_frame_timestamp, frame_out->RtpTimestamp()); CheckOutFrame(frame_out, size_, false); EXPECT_EQ(VideoFrameType::kVideoFrameKey, frame_out->FrameType()); jitter_buffer_->ReleaseFrame(frame_out); diff --git a/modules/video_coding/deprecated/packet.cc b/modules/video_coding/deprecated/packet.cc index 110f38e0fc..6b55bc45d3 100644 --- a/modules/video_coding/deprecated/packet.cc +++ b/modules/video_coding/deprecated/packet.cc @@ -10,7 +10,14 @@ #include "modules/video_coding/deprecated/packet.h" +#include +#include +#include + #include "api/rtp_headers.h" +#include "api/units/timestamp.h" +#include "api/video/video_codec_type.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" namespace webrtc { @@ -25,8 +32,7 @@ VCMPacket::VCMPacket() timesNacked(-1), completeNALU(kNaluUnset), insertStartCode(false), - video_header() { -} + video_header() {} VCMPacket::VCMPacket(const uint8_t* ptr, size_t size, @@ -59,7 +65,7 @@ VCMPacket::VCMPacket(const uint8_t* ptr, // Playout decisions are made entirely based on first packet in a frame. if (!is_first_packet_in_frame()) { - video_header.playout_delay = absl::nullopt; + video_header.playout_delay = std::nullopt; } } diff --git a/modules/video_coding/deprecated/packet.h b/modules/video_coding/deprecated/packet.h index de69ff4302..7bb1ee6fa3 100644 --- a/modules/video_coding/deprecated/packet.h +++ b/modules/video_coding/deprecated/packet.h @@ -14,11 +14,12 @@ #include #include -#include "absl/types/optional.h" +#include + #include "api/rtp_headers.h" #include "api/rtp_packet_info.h" #include "api/units/timestamp.h" -#include "api/video/video_frame_type.h" +#include "api/video/video_codec_type.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" @@ -71,7 +72,7 @@ class VCMPacket { bool insertStartCode; // True if a start code should be inserted before this // packet. RTPVideoHeader video_header; - absl::optional generic_descriptor; + std::optional generic_descriptor; RtpPacketInfo packet_info; }; diff --git a/modules/video_coding/deprecated/receiver.cc b/modules/video_coding/deprecated/receiver.cc index 44a041d0d1..c9b41ddd14 100644 --- a/modules/video_coding/deprecated/receiver.cc +++ b/modules/video_coding/deprecated/receiver.cc @@ -12,14 +12,24 @@ #include #include +#include +#include #include #include #include "absl/memory/memory.h" +#include "api/field_trials_view.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/encoded_image.h" +#include "api/video/video_timing.h" +#include "modules/video_coding/deprecated/event_wrapper.h" #include "modules/video_coding/deprecated/jitter_buffer_common.h" +#include "modules/video_coding/deprecated/packet.h" #include "modules/video_coding/encoded_frame.h" +#include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/internal_defines.h" +#include "modules/video_coding/timing/timing.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/trace_event.h" @@ -88,12 +98,11 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms, if (found_frame == nullptr) { return nullptr; } - uint32_t frame_timestamp = found_frame->Timestamp(); + uint32_t frame_timestamp = found_frame->RtpTimestamp(); - if (absl::optional playout_delay = + if (std::optional playout_delay = found_frame->EncodedImage().PlayoutDelay()) { - timing_->set_min_playout_delay(playout_delay->min()); - timing_->set_max_playout_delay(playout_delay->max()); + timing_->set_playout_delay(*playout_delay); } // We have a frame - Set timing and render timestamp. @@ -138,7 +147,7 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms, static_cast(clock_->TimeInMilliseconds() - start_time_ms); uint16_t new_max_wait_time = static_cast(VCM_MAX(available_wait_time, 0)); - uint32_t wait_time_ms = rtc::saturated_cast( + uint32_t wait_time_ms = saturated_cast( timing_ ->MaxWaitingTime(Timestamp::Millis(render_time_ms), clock_->CurrentTime(), @@ -161,8 +170,9 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(uint16_t max_wait_time_ms, return NULL; } frame->SetRenderTime(render_time_ms); - TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", frame->Timestamp(), "SetRenderTS", - "render_time", frame->RenderTimeMs()); + TRACE_EVENT_ASYNC_STEP_INTO1("webrtc", "Video", frame->RtpTimestamp(), + "SetRenderTS", "render_time", + frame->RenderTimeMs()); return frame; } diff --git a/modules/video_coding/deprecated/receiver.h b/modules/video_coding/deprecated/receiver.h index e6f3d70e11..f537952abe 100644 --- a/modules/video_coding/deprecated/receiver.h +++ b/modules/video_coding/deprecated/receiver.h @@ -11,6 +11,8 @@ #ifndef MODULES_VIDEO_CODING_DEPRECATED_RECEIVER_H_ #define MODULES_VIDEO_CODING_DEPRECATED_RECEIVER_H_ +#include +#include #include #include @@ -18,7 +20,6 @@ #include "modules/video_coding/deprecated/event_wrapper.h" #include "modules/video_coding/deprecated/jitter_buffer.h" #include "modules/video_coding/deprecated/packet.h" -#include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/timing/timing.h" namespace webrtc { diff --git a/modules/video_coding/deprecated/receiver_unittest.cc b/modules/video_coding/deprecated/receiver_unittest.cc index 8845a47c3a..0d41a111d7 100644 --- a/modules/video_coding/deprecated/receiver_unittest.cc +++ b/modules/video_coding/deprecated/receiver_unittest.cc @@ -16,6 +16,9 @@ #include #include +#include "api/units/time_delta.h" +#include "api/video/video_frame_type.h" +#include "modules/video_coding/deprecated/event_wrapper.h" #include "modules/video_coding/deprecated/jitter_buffer_common.h" #include "modules/video_coding/deprecated/packet.h" #include "modules/video_coding/deprecated/stream_generator.h" diff --git a/modules/video_coding/deprecated/session_info.cc b/modules/video_coding/deprecated/session_info.cc index b15dc0a9ff..c351425dec 100644 --- a/modules/video_coding/deprecated/session_info.cc +++ b/modules/video_coding/deprecated/session_info.cc @@ -12,15 +12,22 @@ #include +#include +#include +#include #include -#include "absl/types/variant.h" -#include "modules/include/module_common_types.h" +#include "absl/algorithm/container.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "modules/include/module_common_types_public.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/deprecated/jitter_buffer_common.h" #include "modules/video_coding/deprecated/packet.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { @@ -71,11 +78,11 @@ int VCMSessionInfo::PictureId() const { if (packets_.empty()) return kNoPictureId; if (packets_.front().video_header.codec == kVideoCodecVP8) { - return absl::get( + return std::get( packets_.front().video_header.video_type_header) .pictureId; } else if (packets_.front().video_header.codec == kVideoCodecVP9) { - return absl::get( + return std::get( packets_.front().video_header.video_type_header) .picture_id; } else { @@ -87,11 +94,11 @@ int VCMSessionInfo::TemporalId() const { if (packets_.empty()) return kNoTemporalIdx; if (packets_.front().video_header.codec == kVideoCodecVP8) { - return absl::get( + return std::get( packets_.front().video_header.video_type_header) .temporalIdx; } else if (packets_.front().video_header.codec == kVideoCodecVP9) { - return absl::get( + return std::get( packets_.front().video_header.video_type_header) .temporal_idx; } else { @@ -103,11 +110,11 @@ bool VCMSessionInfo::LayerSync() const { if (packets_.empty()) return false; if (packets_.front().video_header.codec == kVideoCodecVP8) { - return absl::get( + return std::get( packets_.front().video_header.video_type_header) .layerSync; } else if (packets_.front().video_header.codec == kVideoCodecVP9) { - return absl::get( + return std::get( packets_.front().video_header.video_type_header) .temporal_up_switch; } else { @@ -119,11 +126,11 @@ int VCMSessionInfo::Tl0PicId() const { if (packets_.empty()) return kNoTl0PicIdx; if (packets_.front().video_header.codec == kVideoCodecVP8) { - return absl::get( + return std::get( packets_.front().video_header.video_type_header) .tl0PicIdx; } else if (packets_.front().video_header.codec == kVideoCodecVP9) { - return absl::get( + return std::get( packets_.front().video_header.video_type_header) .tl0_pic_idx; } else { @@ -138,10 +145,8 @@ std::vector VCMSessionInfo::GetNaluInfos() const { std::vector nalu_infos; for (const VCMPacket& packet : packets_) { const auto& h264 = - absl::get(packet.video_header.video_type_header); - for (size_t i = 0; i < h264.nalus_length; ++i) { - nalu_infos.push_back(h264.nalus[i]); - } + std::get(packet.video_header.video_type_header); + absl::c_copy(h264.nalus, std::back_inserter(nalu_infos)); } return nalu_infos; } @@ -150,7 +155,7 @@ void VCMSessionInfo::SetGofInfo(const GofInfoVP9& gof_info, size_t idx) { if (packets_.empty()) return; - auto* vp9_header = absl::get_if( + auto* vp9_header = std::get_if( &packets_.front().video_header.video_type_header); if (!vp9_header || vp9_header->flexible_mode) return; @@ -206,7 +211,7 @@ size_t VCMSessionInfo::InsertBuffer(uint8_t* frame_buffer, const size_t kH264NALHeaderLengthInBytes = 1; const size_t kLengthFieldLength = 2; const auto* h264 = - absl::get_if(&packet.video_header.video_type_header); + std::get_if(&packet.video_header.video_type_header); if (h264 && h264->packetization_type == kH264StapA) { size_t required_length = 0; const uint8_t* nalu_ptr = packet_buffer + kH264NALHeaderLengthInBytes; @@ -339,7 +344,7 @@ size_t VCMSessionInfo::DeletePacketData(PacketIterator start, VCMSessionInfo::PacketIterator VCMSessionInfo::FindNextPartitionBeginning( PacketIterator it) const { while (it != packets_.end()) { - if (absl::get((*it).video_header.video_type_header) + if (std::get((*it).video_header.video_type_header) .beginningOfPartition) { return it; } @@ -353,14 +358,14 @@ VCMSessionInfo::PacketIterator VCMSessionInfo::FindPartitionEnd( RTC_DCHECK_EQ((*it).codec(), kVideoCodecVP8); PacketIterator prev_it = it; const int partition_id = - absl::get((*it).video_header.video_type_header) + std::get((*it).video_header.video_type_header) .partitionId; while (it != packets_.end()) { bool beginning = - absl::get((*it).video_header.video_type_header) + std::get((*it).video_header.video_type_header) .beginningOfPartition; int current_partition_id = - absl::get((*it).video_header.video_type_header) + std::get((*it).video_header.video_type_header) .partitionId; bool packet_loss_found = (!beginning && !InSequence(it, prev_it)); if (packet_loss_found || @@ -421,7 +426,7 @@ bool VCMSessionInfo::HaveLastPacket() const { int VCMSessionInfo::InsertPacket(const VCMPacket& packet, uint8_t* frame_buffer, - const FrameData& frame_data) { + const FrameData& /* frame_data */) { if (packet.video_header.frame_type == VideoFrameType::kEmptyFrame) { // Update sequence number of an empty packet. // Only media packets are inserted into the packet list. diff --git a/modules/video_coding/deprecated/session_info.h b/modules/video_coding/deprecated/session_info.h index dfc5d06931..735d649c6d 100644 --- a/modules/video_coding/deprecated/session_info.h +++ b/modules/video_coding/deprecated/session_info.h @@ -17,6 +17,7 @@ #include #include +#include "api/video/video_frame_type.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/deprecated/packet.h" diff --git a/modules/video_coding/deprecated/session_info_unittest.cc b/modules/video_coding/deprecated/session_info_unittest.cc index 0a789d5f2d..598a2ef1d5 100644 --- a/modules/video_coding/deprecated/session_info_unittest.cc +++ b/modules/video_coding/deprecated/session_info_unittest.cc @@ -12,6 +12,10 @@ #include +#include + +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "modules/video_coding/deprecated/packet.h" #include "test/gtest.h" diff --git a/modules/video_coding/deprecated/stream_generator.cc b/modules/video_coding/deprecated/stream_generator.cc index 503ada2b37..09be3382fe 100644 --- a/modules/video_coding/deprecated/stream_generator.cc +++ b/modules/video_coding/deprecated/stream_generator.cc @@ -12,8 +12,10 @@ #include +#include #include +#include "api/video/video_frame_type.h" #include "modules/video_coding/deprecated/packet.h" #include "rtc_base/checks.h" diff --git a/modules/video_coding/deprecated/stream_generator.h b/modules/video_coding/deprecated/stream_generator.h index 1a86f69937..e6a90aa7be 100644 --- a/modules/video_coding/deprecated/stream_generator.h +++ b/modules/video_coding/deprecated/stream_generator.h @@ -14,6 +14,7 @@ #include +#include "api/video/video_frame_type.h" #include "modules/video_coding/deprecated/packet.h" namespace webrtc { diff --git a/modules/video_coding/encoded_frame.cc b/modules/video_coding/encoded_frame.cc index 637a20cfc9..e79e3f6c1d 100644 --- a/modules/video_coding/encoded_frame.cc +++ b/modules/video_coding/encoded_frame.cc @@ -12,8 +12,16 @@ #include -#include "absl/types/variant.h" +#include +#include + +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" #include "api/video/video_timing.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" @@ -36,8 +44,8 @@ VCMEncodedFrame::~VCMEncodedFrame() { } void VCMEncodedFrame::Reset() { - SetTimestamp(0); - SetSpatialIndex(absl::nullopt); + SetRtpTimestamp(0); + SetSpatialIndex(std::nullopt); _renderTimeMs = -1; _payloadType = 0; _frameType = VideoFrameType::kVideoFrameDelta; @@ -57,7 +65,7 @@ void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header) { switch (header->codec) { case kVideoCodecVP8: { const auto& vp8_header = - absl::get(header->video_type_header); + std::get(header->video_type_header); if (_codecSpecificInfo.codecType != kVideoCodecVP8) { // This is the first packet for this frame. _codecSpecificInfo.codecSpecific.VP8.temporalIdx = 0; @@ -79,7 +87,7 @@ void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header) { } case kVideoCodecVP9: { const auto& vp9_header = - absl::get(header->video_type_header); + std::get(header->video_type_header); if (_codecSpecificInfo.codecType != kVideoCodecVP9) { // This is the first packet for this frame. _codecSpecificInfo.codecSpecific.VP9.temporal_idx = 0; @@ -140,6 +148,10 @@ void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header) { _codecSpecificInfo.codecType = kVideoCodecAV1; break; } + case kVideoCodecH265: { + _codecSpecificInfo.codecType = kVideoCodecH265; + break; + } default: { _codecSpecificInfo.codecType = kVideoCodecGeneric; break; diff --git a/modules/video_coding/encoded_frame.h b/modules/video_coding/encoded_frame.h index b8c9d14653..b9c528f722 100644 --- a/modules/video_coding/encoded_frame.h +++ b/modules/video_coding/encoded_frame.h @@ -11,12 +11,12 @@ #ifndef MODULES_VIDEO_CODING_ENCODED_FRAME_H_ #define MODULES_VIDEO_CODING_ENCODED_FRAME_H_ -#include +#include #include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "modules/video_coding/include/video_coding_defines.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -46,17 +46,17 @@ class RTC_EXPORT VCMEncodedFrame : public EncodedImage { using EncodedImage::GetEncodedData; using EncodedImage::NtpTimeMs; using EncodedImage::PacketInfos; + using EncodedImage::RtpTimestamp; using EncodedImage::set_size; using EncodedImage::SetColorSpace; using EncodedImage::SetEncodedData; using EncodedImage::SetPacketInfos; + using EncodedImage::SetRtpTimestamp; using EncodedImage::SetSpatialIndex; using EncodedImage::SetSpatialLayerFrameSize; - using EncodedImage::SetTimestamp; using EncodedImage::size; using EncodedImage::SpatialIndex; using EncodedImage::SpatialLayerFrameSize; - using EncodedImage::Timestamp; /** * Get render time in milliseconds diff --git a/modules/video_coding/fec_controller_default.cc b/modules/video_coding/fec_controller_default.cc index f204b01c7c..31c5f09687 100644 --- a/modules/video_coding/fec_controller_default.cc +++ b/modules/video_coding/fec_controller_default.cc @@ -13,32 +13,35 @@ #include #include +#include #include +#include +#include "api/environment/environment.h" +#include "api/fec_controller.h" +#include "api/field_trials_view.h" +#include "api/video/video_frame_type.h" #include "modules/include/module_fec_types.h" +#include "modules/video_coding/media_opt_util.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" +#include "rtc_base/synchronization/mutex.h" +#include "system_wrappers/include/clock.h" namespace webrtc { const float kProtectionOverheadRateThreshold = 0.5; FecControllerDefault::FecControllerDefault( - Clock* clock, + const Environment& env, VCMProtectionCallback* protection_callback) - : clock_(clock), + : env_(env), protection_callback_(protection_callback), - loss_prot_logic_(new media_optimization::VCMLossProtectionLogic( - clock_->TimeInMilliseconds())), + loss_prot_logic_(new media_optimization::VCMLossProtectionLogic(env_)), max_payload_size_(1460), overhead_threshold_(GetProtectionOverheadRateThreshold()) {} -FecControllerDefault::FecControllerDefault(Clock* clock) - : clock_(clock), - loss_prot_logic_(new media_optimization::VCMLossProtectionLogic( - clock_->TimeInMilliseconds())), - max_payload_size_(1460), - overhead_threshold_(GetProtectionOverheadRateThreshold()) {} +FecControllerDefault::FecControllerDefault(const Environment& env) + : FecControllerDefault(env, nullptr) {} FecControllerDefault::~FecControllerDefault(void) { loss_prot_logic_->Release(); @@ -61,8 +64,8 @@ void FecControllerDefault::SetEncodingData(size_t width, float FecControllerDefault::GetProtectionOverheadRateThreshold() { float overhead_threshold = - strtof(webrtc::field_trial::FindFullName( - "WebRTC-ProtectionOverheadRateThreshold") + strtof(env_.field_trials() + .Lookup("WebRTC-ProtectionOverheadRateThreshold") .c_str(), nullptr); if (overhead_threshold > 0 && overhead_threshold <= 1) { @@ -83,7 +86,7 @@ uint32_t FecControllerDefault::UpdateFecRates( uint32_t estimated_bitrate_bps, int actual_framerate_fps, uint8_t fraction_lost, - std::vector loss_mask_vector, + std::vector /* loss_mask_vector */, int64_t round_trip_time_ms) { float target_bitrate_kbps = static_cast(estimated_bitrate_bps) / 1000.0f; @@ -107,7 +110,7 @@ uint32_t FecControllerDefault::UpdateFecRates( media_optimization::FilterPacketLossMode filter_mode = media_optimization::kMaxFilter; uint8_t packet_loss_enc = loss_prot_logic_->FilteredLoss( - clock_->TimeInMilliseconds(), filter_mode, fraction_lost); + env_.clock().TimeInMilliseconds(), filter_mode, fraction_lost); // For now use the filtered loss for computing the robustness settings. loss_prot_logic_->UpdateFilteredLossPr(packet_loss_enc); if (loss_prot_logic_->SelectedType() == media_optimization::kNone) { @@ -191,11 +194,11 @@ void FecControllerDefault::UpdateWithEncodedData( const float min_packets_per_frame = encoded_length / static_cast(max_payload_size_); if (delta_frame) { - loss_prot_logic_->UpdatePacketsPerFrame(min_packets_per_frame, - clock_->TimeInMilliseconds()); + loss_prot_logic_->UpdatePacketsPerFrame( + min_packets_per_frame, env_.clock().TimeInMilliseconds()); } else { loss_prot_logic_->UpdatePacketsPerFrameKey( - min_packets_per_frame, clock_->TimeInMilliseconds()); + min_packets_per_frame, env_.clock().TimeInMilliseconds()); } } if (!delta_frame && encoded_length > 0) { diff --git a/modules/video_coding/fec_controller_default.h b/modules/video_coding/fec_controller_default.h index a97dea011b..c4b7044d9b 100644 --- a/modules/video_coding/fec_controller_default.h +++ b/modules/video_coding/fec_controller_default.h @@ -17,24 +17,26 @@ #include #include +#include "api/environment/environment.h" #include "api/fec_controller.h" +#include "api/video/video_frame_type.h" #include "modules/video_coding/media_opt_util.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" -#include "system_wrappers/include/clock.h" namespace webrtc { class FecControllerDefault : public FecController { public: - FecControllerDefault(Clock* clock, + FecControllerDefault(const Environment& env, VCMProtectionCallback* protection_callback); - explicit FecControllerDefault(Clock* clock); - ~FecControllerDefault() override; + explicit FecControllerDefault(const Environment& env); FecControllerDefault(const FecControllerDefault&) = delete; FecControllerDefault& operator=(const FecControllerDefault&) = delete; + ~FecControllerDefault() override; + void SetProtectionCallback( VCMProtectionCallback* protection_callback) override; void SetProtectionMethod(bool enable_fec, bool enable_nack) override; @@ -54,7 +56,7 @@ class FecControllerDefault : public FecController { private: enum { kBitrateAverageWinMs = 1000 }; - Clock* const clock_; + const Environment env_; VCMProtectionCallback* protection_callback_; Mutex mutex_; std::unique_ptr loss_prot_logic_ diff --git a/modules/video_coding/fec_controller_unittest.cc b/modules/video_coding/fec_controller_unittest.cc index fa3cc7a93b..30d3d90a62 100644 --- a/modules/video_coding/fec_controller_unittest.cc +++ b/modules/video_coding/fec_controller_unittest.cc @@ -14,6 +14,7 @@ #include +#include "api/environment/environment_factory.h" #include "modules/include/module_fec_types.h" #include "modules/video_coding/fec_controller_default.h" #include "system_wrappers/include/clock.h" @@ -31,8 +32,8 @@ class ProtectionBitrateCalculatorTest : public ::testing::Test { class ProtectionCallback : public VCMProtectionCallback { public: - int ProtectionRequest(const FecProtectionParams* delta_params, - const FecProtectionParams* key_params, + int ProtectionRequest(const FecProtectionParams* /* delta_params */, + const FecProtectionParams* /* key_params */, uint32_t* sent_video_rate_bps, uint32_t* sent_nack_rate_bps, uint32_t* sent_fec_rate_bps) override { @@ -41,7 +42,7 @@ class ProtectionBitrateCalculatorTest : public ::testing::Test { *sent_fec_rate_bps = fec_rate_bps_; return 0; } - void SetRetransmissionMode(int retransmission_mode) {} + void SetRetransmissionMode(int /* retransmission_mode */) {} uint32_t fec_rate_bps_ = 0; uint32_t nack_rate_bps_ = 0; @@ -50,7 +51,8 @@ class ProtectionBitrateCalculatorTest : public ::testing::Test { // Note: simulated clock starts at 1 seconds, since parts of webrtc use 0 as // a special case (e.g. frame rate in media optimization). ProtectionBitrateCalculatorTest() - : clock_(1000), fec_controller_(&clock_, &protection_callback_) {} + : clock_(1000), + fec_controller_(CreateEnvironment(&clock_), &protection_callback_) {} SimulatedClock clock_; ProtectionCallback protection_callback_; diff --git a/modules/video_coding/frame_dependencies_calculator.cc b/modules/video_coding/frame_dependencies_calculator.cc index 7ca59f779a..64a44762f4 100644 --- a/modules/video_coding/frame_dependencies_calculator.cc +++ b/modules/video_coding/frame_dependencies_calculator.cc @@ -11,12 +11,15 @@ #include +#include #include +#include #include #include "absl/algorithm/container.h" #include "absl/container/inlined_vector.h" #include "api/array_view.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -24,7 +27,7 @@ namespace webrtc { absl::InlinedVector FrameDependenciesCalculator::FromBuffersUsage( int64_t frame_id, - rtc::ArrayView buffers_usage) { + ArrayView buffers_usage) { absl::InlinedVector dependencies; RTC_DCHECK_GT(buffers_usage.size(), 0); for (const CodecBufferUsage& buffer_usage : buffers_usage) { @@ -41,7 +44,7 @@ absl::InlinedVector FrameDependenciesCalculator::FromBuffersUsage( continue; } const BufferUsage& buffer = buffers_[buffer_usage.id]; - if (buffer.frame_id == absl::nullopt) { + if (buffer.frame_id == std::nullopt) { RTC_LOG(LS_ERROR) << "Odd configuration: frame " << frame_id << " references buffer #" << buffer_usage.id << " that was never updated."; diff --git a/modules/video_coding/frame_dependencies_calculator.h b/modules/video_coding/frame_dependencies_calculator.h index 2c4a8502e1..3a354c563a 100644 --- a/modules/video_coding/frame_dependencies_calculator.h +++ b/modules/video_coding/frame_dependencies_calculator.h @@ -13,10 +13,9 @@ #include -#include +#include #include "absl/container/inlined_vector.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" @@ -33,11 +32,11 @@ class FrameDependenciesCalculator { // Calculates frame dependencies based on previous encoder buffer usage. absl::InlinedVector FromBuffersUsage( int64_t frame_id, - rtc::ArrayView buffers_usage); + ArrayView buffers_usage); private: struct BufferUsage { - absl::optional frame_id; + std::optional frame_id; absl::InlinedVector dependencies; }; diff --git a/modules/video_coding/frame_helpers.cc b/modules/video_coding/frame_helpers.cc index e25eac8a18..1ce54c08fd 100644 --- a/modules/video_coding/frame_helpers.cc +++ b/modules/video_coding/frame_helpers.cc @@ -10,8 +10,18 @@ #include "modules/video_coding/frame_helpers.h" +#include +#include +#include +#include #include +#include "absl/container/inlined_vector.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_frame.h" +#include "api/video/encoded_image.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { @@ -74,6 +84,15 @@ std::unique_ptr CombineAndDeleteFrames( // Spatial index of combined frame is set equal to spatial index of its top // spatial layer. first_frame->SetSpatialIndex(last_frame.SpatialIndex().value_or(0)); + // Each spatial layer (at the same rtp_timestamp) sends corruption data. + // Reconstructed (combined) frame will be of resolution of the highest spatial + // layer and that's why the corruption data for the highest layer should be + // used to calculate the metric on the combined frame for the best outcome. + // + // TODO: bugs.webrtc.org/358039777 - Fix for LxTy scalability, currently only + // works for LxTy_KEY and L1Ty. + first_frame->SetFrameInstrumentationData( + last_frame.CodecSpecific()->frame_instrumentation_data); first_frame->video_timing_mutable()->network2_timestamp_ms = last_frame.video_timing().network2_timestamp_ms; diff --git a/modules/video_coding/frame_helpers.h b/modules/video_coding/frame_helpers.h index 56ee593678..724058a636 100644 --- a/modules/video_coding/frame_helpers.h +++ b/modules/video_coding/frame_helpers.h @@ -14,6 +14,8 @@ #include #include "absl/container/inlined_vector.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/encoded_frame.h" namespace webrtc { diff --git a/modules/video_coding/frame_helpers_unittest.cc b/modules/video_coding/frame_helpers_unittest.cc index 1f73689c0a..ed319b68a3 100644 --- a/modules/video_coding/frame_helpers_unittest.cc +++ b/modules/video_coding/frame_helpers_unittest.cc @@ -10,12 +10,48 @@ #include "modules/video_coding/frame_helpers.h" +#include +#include +#include +#include +#include +#include + +#include "absl/container/inlined_vector.h" +#include "api/scoped_refptr.h" #include "api/units/timestamp.h" +#include "api/video/encoded_frame.h" +#include "api/video/encoded_image.h" +#include "common_video/frame_instrumentation_data.h" +#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { namespace { +using ::testing::ElementsAre; + +constexpr uint32_t kRtpTimestamp = 123456710; + +webrtc::scoped_refptr CreateEncodedImageBufferOfSizeN( + size_t n, + uint8_t x) { + webrtc::scoped_refptr buffer = + EncodedImageBuffer::Create(n); + for (size_t i = 0; i < n; ++i) { + buffer->data()[i] = static_cast(x + i); + } + return buffer; +} + +// Returns an `EncodedFrame` with data values [x, x+1, ... x+(n-1)]. +EncodedFrame CreateEncodedImageOfSizeN(size_t n, uint8_t x) { + EncodedFrame image; + image.SetEncodedData(CreateEncodedImageBufferOfSizeN(n, x)); + image.SetRtpTimestamp(kRtpTimestamp); + return image; +} + TEST(FrameHasBadRenderTimingTest, LargePositiveFrameDelayIsBad) { Timestamp render_time = Timestamp::Seconds(12); Timestamp now = Timestamp::Seconds(0); @@ -30,5 +66,54 @@ TEST(FrameHasBadRenderTimingTest, LargeNegativeFrameDelayIsBad) { EXPECT_TRUE(FrameHasBadRenderTiming(render_time, now)); } +TEST(FrameInstrumentationDataTest, + CombinedFrameHasSameDataAsHighestSpatialLayer) { + // Assume L2T1 scalability mode. + EncodedFrame spatial_layer_1 = CreateEncodedImageOfSizeN(/*n=*/10, /*x=*/1); + const FrameInstrumentationData frame_ins_data_1 = { + .sequence_index = 100, + .communicate_upper_bits = false, + .std_dev = 0.5, + .luma_error_threshold = 5, + .chroma_error_threshold = 4, + .sample_values = {0.2, 0.7, 1.9}}; + spatial_layer_1.SetFrameInstrumentationData(frame_ins_data_1); + + EncodedFrame spatial_layer_2 = CreateEncodedImageOfSizeN(/*n=*/10, /*x=*/11); + FrameInstrumentationData frame_ins_data_2 = { + .sequence_index = 10, + .communicate_upper_bits = false, + .std_dev = 1.0, + .luma_error_threshold = 3, + .chroma_error_threshold = 4, + .sample_values = {0.1, 0.3, 2.1}}; + spatial_layer_2.SetFrameInstrumentationData(frame_ins_data_2); + + absl::InlinedVector, 4> frames; + frames.push_back(std::make_unique(spatial_layer_1)); + frames.push_back(std::make_unique(spatial_layer_2)); + + std::optional< + std::variant> + data = CombineAndDeleteFrames(std::move(frames)) + ->CodecSpecific() + ->frame_instrumentation_data; + + ASSERT_TRUE(data.has_value()); + ASSERT_TRUE(std::holds_alternative(*data)); + FrameInstrumentationData frame_instrumentation_data = + std::get(*data); + + // Expect to have the same frame_instrumentation_data as the highest spatial + // layer. + EXPECT_EQ(frame_instrumentation_data.sequence_index, 10); + EXPECT_FALSE(frame_instrumentation_data.communicate_upper_bits); + EXPECT_EQ(frame_instrumentation_data.std_dev, 1.0); + EXPECT_EQ(frame_instrumentation_data.luma_error_threshold, 3); + EXPECT_EQ(frame_instrumentation_data.chroma_error_threshold, 4); + EXPECT_THAT(frame_instrumentation_data.sample_values, + ElementsAre(0.1, 0.3, 2.1)); +} + } // namespace } // namespace webrtc diff --git a/modules/video_coding/generic_decoder.cc b/modules/video_coding/generic_decoder.cc index a13fe8e8c4..b3d6e51553 100644 --- a/modules/video_coding/generic_decoder.cc +++ b/modules/video_coding/generic_decoder.cc @@ -13,19 +13,34 @@ #include #include -#include +#include #include +#include +#include #include +#include #include "absl/algorithm/container.h" -#include "absl/types/optional.h" +#include "api/field_trials_view.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_frame.h" +#include "api/video/encoded_image.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" #include "api/video/video_timing.h" #include "api/video_codecs/video_decoder.h" +#include "common_video/frame_instrumentation_data.h" +#include "common_video/include/corruption_score_calculator.h" #include "modules/include/module_common_types_public.h" +#include "modules/video_coding/encoded_frame.h" +#include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/timing/timing.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/string_encode.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" #include "system_wrappers/include/metrics.h" @@ -41,8 +56,11 @@ constexpr size_t kDecoderFrameMemoryLength = 10; VCMDecodedFrameCallback::VCMDecodedFrameCallback( VCMTiming* timing, Clock* clock, - const FieldTrialsView& field_trials) - : _clock(clock), _timing(timing) { + const FieldTrialsView& /* field_trials */, + CorruptionScoreCalculator* corruption_score_calculator) + : _clock(clock), + _timing(timing), + corruption_score_calculator_(corruption_score_calculator) { ntp_offset_ = _clock->CurrentNtpInMilliseconds() - _clock->TimeInMilliseconds(); } @@ -73,15 +91,15 @@ int32_t VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage) { int32_t VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, int64_t decode_time_ms) { Decoded(decodedImage, - decode_time_ms >= 0 ? absl::optional(decode_time_ms) - : absl::nullopt, - absl::nullopt); + decode_time_ms >= 0 ? std::optional(decode_time_ms) + : std::nullopt, + std::nullopt); return WEBRTC_VIDEO_CODEC_OK; } -std::pair, size_t> +std::pair, size_t> VCMDecodedFrameCallback::FindFrameInfo(uint32_t rtp_timestamp) { - absl::optional frame_info; + std::optional frame_info; auto it = absl::c_find_if(frame_infos_, [rtp_timestamp](const auto& entry) { return entry.rtp_timestamp == rtp_timestamp || @@ -100,20 +118,21 @@ VCMDecodedFrameCallback::FindFrameInfo(uint32_t rtp_timestamp) { } void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, - absl::optional decode_time_ms, - absl::optional qp) { + std::optional decode_time_ms, + std::optional qp) { RTC_DCHECK(_receiveCallback) << "Callback must not be null at this point"; - TRACE_EVENT_INSTANT1("webrtc", "VCMDecodedFrameCallback::Decoded", - "timestamp", decodedImage.timestamp()); + TRACE_EVENT( + "webrtc", "VCMDecodedFrameCallback::Decoded", + perfetto::TerminatingFlow::ProcessScoped(decodedImage.rtp_timestamp())); // TODO(holmer): We should improve this so that we can handle multiple // callbacks from one call to Decode(). - absl::optional frame_info; + std::optional frame_info; int timestamp_map_size = 0; int dropped_frames = 0; { MutexLock lock(&lock_); std::tie(frame_info, dropped_frames) = - FindFrameInfo(decodedImage.timestamp()); + FindFrameInfo(decodedImage.rtp_timestamp()); timestamp_map_size = frame_infos_.size(); } if (dropped_frames > 0) { @@ -123,10 +142,21 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, if (!frame_info) { RTC_LOG(LS_WARNING) << "Too many frames backed up in the decoder, dropping " "frame with timestamp " - << decodedImage.timestamp(); + << decodedImage.rtp_timestamp(); return; } + std::optional corruption_score; + if (corruption_score_calculator_ && + frame_info->frame_instrumentation_data.has_value()) { + if (const FrameInstrumentationData* data = + std::get_if( + &*frame_info->frame_instrumentation_data)) { + corruption_score = corruption_score_calculator_->CalculateCorruptionScore( + decodedImage, *data); + } + } + decodedImage.set_ntp_time_ms(frame_info->ntp_time_ms); decodedImage.set_packet_infos(frame_info->packet_infos); decodedImage.set_rotation(frame_info->rotation); @@ -203,7 +233,7 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, timing_frame_info.decode_finish_ms = now.ms(); timing_frame_info.render_time_ms = frame_info->render_time ? frame_info->render_time->ms() : -1; - timing_frame_info.rtp_timestamp = decodedImage.timestamp(); + timing_frame_info.rtp_timestamp = decodedImage.rtp_timestamp(); timing_frame_info.receive_start_ms = frame_info->timing.receive_start_ms; timing_frame_info.receive_finish_ms = frame_info->timing.receive_finish_ms; RTC_HISTOGRAM_COUNTS_1000( @@ -219,9 +249,12 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage, decodedImage.set_timestamp_us( frame_info->render_time ? frame_info->render_time->us() : -1); - _receiveCallback->FrameToRender(decodedImage, qp, decode_time, - frame_info->content_type, - frame_info->frame_type); + _receiveCallback->OnFrameToRender({.video_frame = decodedImage, + .qp = qp, + .decode_time = decode_time, + .content_type = frame_info->content_type, + .frame_type = frame_info->frame_type, + .corruption_score = corruption_score}); } void VCMDecodedFrameCallback::OnDecoderInfoChanged( @@ -282,29 +315,36 @@ bool VCMGenericDecoder::Configure(const VideoDecoder::Settings& settings) { } int32_t VCMGenericDecoder::Decode(const EncodedFrame& frame, Timestamp now) { - return Decode(frame, now, frame.RenderTimeMs()); + return Decode(frame, now, frame.RenderTimeMs(), + frame.CodecSpecific()->frame_instrumentation_data); } int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, Timestamp now) { - return Decode(frame, now, frame.RenderTimeMs()); + return Decode(frame, now, frame.RenderTimeMs(), + frame.CodecSpecific()->frame_instrumentation_data); } -int32_t VCMGenericDecoder::Decode(const EncodedImage& frame, - Timestamp now, - int64_t render_time_ms) { - TRACE_EVENT1("webrtc", "VCMGenericDecoder::Decode", "timestamp", - frame.Timestamp()); +int32_t VCMGenericDecoder::Decode( + const EncodedImage& frame, + Timestamp now, + int64_t render_time_ms, + const std::optional< + std::variant>& + frame_instrumentation_data) { + TRACE_EVENT("webrtc", "VCMGenericDecoder::Decode", + perfetto::Flow::ProcessScoped(frame.RtpTimestamp())); FrameInfo frame_info; - frame_info.rtp_timestamp = frame.Timestamp(); + frame_info.rtp_timestamp = frame.RtpTimestamp(); frame_info.decode_start = now; frame_info.render_time = render_time_ms >= 0 - ? absl::make_optional(Timestamp::Millis(render_time_ms)) - : absl::nullopt; + ? std::make_optional(Timestamp::Millis(render_time_ms)) + : std::nullopt; frame_info.rotation = frame.rotation(); frame_info.timing = frame.video_timing(); frame_info.ntp_time_ms = frame.ntp_time_ms_; frame_info.packet_infos = frame.PacketInfos(); + frame_info.frame_instrumentation_data = frame_instrumentation_data; // Set correctly only for key frames. Thus, use latest key frame // content type. If the corresponding key frame was lost, decode will fail @@ -329,18 +369,7 @@ int32_t VCMGenericDecoder::Decode(const EncodedImage& frame, } _callback->OnDecoderInfoChanged(std::move(decoder_info)); } - if (ret < WEBRTC_VIDEO_CODEC_OK) { - const absl::optional ssrc = - !frame_info.packet_infos.empty() - ? absl::make_optional(frame_info.packet_infos[0].ssrc()) - : absl::nullopt; - RTC_LOG(LS_WARNING) << "Failed to decode frame with timestamp " - << frame.Timestamp() << ", ssrc " - << (ssrc ? rtc::ToString(*ssrc) : "") - << ", error code: " << ret; - _callback->ClearTimestampMap(); - } else if (ret == WEBRTC_VIDEO_CODEC_NO_OUTPUT) { - // No output. + if (ret < WEBRTC_VIDEO_CODEC_OK || ret == WEBRTC_VIDEO_CODEC_NO_OUTPUT) { _callback->ClearTimestampMap(); } return ret; diff --git a/modules/video_coding/generic_decoder.h b/modules/video_coding/generic_decoder.h index b1fb1f39f4..160576c3f4 100644 --- a/modules/video_coding/generic_decoder.h +++ b/modules/video_coding/generic_decoder.h @@ -11,18 +11,31 @@ #ifndef MODULES_VIDEO_CODING_GENERIC_DECODER_H_ #define MODULES_VIDEO_CODING_GENERIC_DECODER_H_ +#include #include #include -#include +#include #include +#include #include "api/field_trials_view.h" +#include "api/rtp_packet_infos.h" #include "api/sequence_checker.h" +#include "api/units/timestamp.h" #include "api/video/encoded_frame.h" +#include "api/video/encoded_image.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" #include "api/video_codecs/video_decoder.h" +#include "common_video/frame_instrumentation_data.h" +#include "common_video/include/corruption_score_calculator.h" #include "modules/video_coding/encoded_frame.h" #include "modules/video_coding/timing/timing.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" +#include "system_wrappers/include/clock.h" namespace webrtc { @@ -39,8 +52,8 @@ struct FrameInfo { // This is likely not optional, but some inputs seem to sometimes be negative. // TODO(bugs.webrtc.org/13756): See if this can be replaced with Timestamp // once all inputs to this field use Timestamp instead of an integer. - absl::optional render_time; - absl::optional decode_start; + std::optional render_time; + std::optional decode_start; VideoRotation rotation; VideoContentType content_type; EncodedImage::Timing timing; @@ -48,13 +61,18 @@ struct FrameInfo { RtpPacketInfos packet_infos; // ColorSpace is not stored here, as it might be modified by decoders. VideoFrameType frame_type; + std::optional< + std::variant> + frame_instrumentation_data; }; class VCMDecodedFrameCallback : public DecodedImageCallback { public: - VCMDecodedFrameCallback(VCMTiming* timing, - Clock* clock, - const FieldTrialsView& field_trials); + VCMDecodedFrameCallback( + VCMTiming* timing, + Clock* clock, + const FieldTrialsView& field_trials, + CorruptionScoreCalculator* corruption_score_calculator); ~VCMDecodedFrameCallback() override; void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback); VCMReceiveCallback* UserReceiveCallback(); @@ -62,8 +80,8 @@ class VCMDecodedFrameCallback : public DecodedImageCallback { int32_t Decoded(VideoFrame& decodedImage) override; int32_t Decoded(VideoFrame& decodedImage, int64_t decode_time_ms) override; void Decoded(VideoFrame& decodedImage, - absl::optional decode_time_ms, - absl::optional qp) override; + std::optional decode_time_ms, + std::optional qp) override; void OnDecoderInfoChanged(const VideoDecoder::DecoderInfo& decoder_info); @@ -71,7 +89,7 @@ class VCMDecodedFrameCallback : public DecodedImageCallback { void ClearTimestampMap(); private: - std::pair, size_t> FindFrameInfo( + std::pair, size_t> FindFrameInfo( uint32_t rtp_timestamp) RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); SequenceChecker construction_thread_; @@ -86,6 +104,7 @@ class VCMDecodedFrameCallback : public DecodedImageCallback { Mutex lock_; std::deque frame_infos_ RTC_GUARDED_BY(lock_); int64_t ntp_offset_; + CorruptionScoreCalculator* const corruption_score_calculator_; }; class VCMGenericDecoder { @@ -120,7 +139,10 @@ class VCMGenericDecoder { private: int32_t Decode(const EncodedImage& frame, Timestamp now, - int64_t render_time_ms); + int64_t render_time_ms, + const std::optional>& + frame_instrumentation_data); VCMDecodedFrameCallback* _callback = nullptr; VideoDecoder* const decoder_; VideoContentType _last_keyframe_content_type; diff --git a/modules/video_coding/generic_decoder_unittest.cc b/modules/video_coding/generic_decoder_unittest.cc index e8d1dad09f..52fc057956 100644 --- a/modules/video_coding/generic_decoder_unittest.cc +++ b/modules/video_coding/generic_decoder_unittest.cc @@ -11,14 +11,27 @@ #include "modules/video_coding/generic_decoder.h" #include -#include +#include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/rtp_packet_infos.h" +#include "api/scoped_refptr.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_frame.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_timing.h" #include "api/video_codecs/video_decoder.h" +#include "common_video/frame_instrumentation_data.h" +#include "common_video/include/corruption_score_calculator.h" #include "common_video/test/utilities.h" +#include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/timing/timing.h" #include "system_wrappers/include/clock.h" #include "test/fake_decoder.h" @@ -27,29 +40,37 @@ #include "test/scoped_key_value_config.h" #include "test/time_controller/simulated_time_controller.h" +using ::testing::Return; + namespace webrtc { namespace video_coding { +class MockCorruptionScoreCalculator : public CorruptionScoreCalculator { + public: + MOCK_METHOD(std::optional, + CalculateCorruptionScore, + (const VideoFrame& frame, + const FrameInstrumentationData& frame_instrumentation_data), + (override)); +}; + class ReceiveCallback : public VCMReceiveCallback { public: - int32_t FrameToRender(VideoFrame& frame, - absl::optional qp, - TimeDelta decode_time, - VideoContentType content_type, - VideoFrameType frame_type) override { - frames_.push_back(frame); + int32_t OnFrameToRender(const FrameToRender& arguments) override { + frames_.push_back(arguments.video_frame); + last_corruption_score_ = arguments.corruption_score; return 0; } - absl::optional PopLastFrame() { + std::optional PopLastFrame() { if (frames_.empty()) - return absl::nullopt; + return std::nullopt; auto ret = frames_.front(); frames_.pop_back(); return ret; } - rtc::ArrayView GetAllFrames() const { return frames_; } + ArrayView GetAllFrames() const { return frames_; } void OnDroppedFrames(uint32_t frames_dropped) { frames_dropped_ += frames_dropped; @@ -57,9 +78,14 @@ class ReceiveCallback : public VCMReceiveCallback { uint32_t frames_dropped() const { return frames_dropped_; } + std::optional last_corruption_score() const { + return last_corruption_score_; + } + private: std::vector frames_; uint32_t frames_dropped_ = 0; + std::optional last_corruption_score_; }; class GenericDecoderTest : public ::testing::Test { @@ -69,7 +95,10 @@ class GenericDecoderTest : public ::testing::Test { clock_(time_controller_.GetClock()), timing_(time_controller_.GetClock(), field_trials_), decoder_(time_controller_.GetTaskQueueFactory()), - vcm_callback_(&timing_, time_controller_.GetClock(), field_trials_), + vcm_callback_(&timing_, + time_controller_.GetClock(), + field_trials_, + &corruption_score_calculator_), generic_decoder_(&decoder_) {} void SetUp() override { @@ -90,6 +119,7 @@ class GenericDecoderTest : public ::testing::Test { VCMDecodedFrameCallback vcm_callback_; VCMGenericDecoder generic_decoder_; ReceiveCallback user_callback_; + MockCorruptionScoreCalculator corruption_score_calculator_; }; TEST_F(GenericDecoderTest, PassesPacketInfos) { @@ -98,7 +128,7 @@ TEST_F(GenericDecoderTest, PassesPacketInfos) { encoded_frame.SetPacketInfos(packet_infos); generic_decoder_.Decode(encoded_frame, clock_->CurrentTime()); time_controller_.AdvanceTime(TimeDelta::Millis(10)); - absl::optional decoded_frame = user_callback_.PopLastFrame(); + std::optional decoded_frame = user_callback_.PopLastFrame(); ASSERT_TRUE(decoded_frame.has_value()); EXPECT_EQ(decoded_frame->packet_infos().size(), 3U); } @@ -108,7 +138,7 @@ TEST_F(GenericDecoderTest, FrameDroppedIfTooManyFramesInFlight) { decoder_.SetDelayedDecoding(10); for (int i = 0; i < kMaxFramesInFlight + 1; ++i) { EncodedFrame encoded_frame; - encoded_frame.SetTimestamp(90000 * i); + encoded_frame.SetRtpTimestamp(90000 * i); generic_decoder_.Decode(encoded_frame, clock_->CurrentTime()); } @@ -118,7 +148,7 @@ TEST_F(GenericDecoderTest, FrameDroppedIfTooManyFramesInFlight) { ASSERT_EQ(10U, frames.size()); // Expect that the first frame was dropped since all decodes released at the // same time and the oldest frame info is the first one dropped. - EXPECT_EQ(frames[0].timestamp(), 90000u); + EXPECT_EQ(frames[0].rtp_timestamp(), 90000u); EXPECT_EQ(1u, user_callback_.frames_dropped()); } @@ -134,7 +164,7 @@ TEST_F(GenericDecoderTest, PassesPacketInfosForDelayedDecoders) { } time_controller_.AdvanceTime(TimeDelta::Millis(200)); - absl::optional decoded_frame = user_callback_.PopLastFrame(); + std::optional decoded_frame = user_callback_.PopLastFrame(); ASSERT_TRUE(decoded_frame.has_value()); EXPECT_EQ(decoded_frame->packet_infos().size(), 3U); } @@ -143,11 +173,11 @@ TEST_F(GenericDecoderTest, MaxCompositionDelayNotSetByDefault) { EncodedFrame encoded_frame; generic_decoder_.Decode(encoded_frame, clock_->CurrentTime()); time_controller_.AdvanceTime(TimeDelta::Millis(10)); - absl::optional decoded_frame = user_callback_.PopLastFrame(); + std::optional decoded_frame = user_callback_.PopLastFrame(); ASSERT_TRUE(decoded_frame.has_value()); EXPECT_THAT( decoded_frame->render_parameters().max_composition_delay_in_frames, - testing::Eq(absl::nullopt)); + testing::Eq(std::nullopt)); } TEST_F(GenericDecoderTest, MaxCompositionDelayActivatedByPlayoutDelay) { @@ -156,10 +186,10 @@ TEST_F(GenericDecoderTest, MaxCompositionDelayActivatedByPlayoutDelay) { // is specified as X,Y, where X=0, Y>0. constexpr int kMaxCompositionDelayInFrames = 3; // ~50 ms at 60 fps. timing_.SetMaxCompositionDelayInFrames( - absl::make_optional(kMaxCompositionDelayInFrames)); + std::make_optional(kMaxCompositionDelayInFrames)); generic_decoder_.Decode(encoded_frame, clock_->CurrentTime()); time_controller_.AdvanceTime(TimeDelta::Millis(10)); - absl::optional decoded_frame = user_callback_.PopLastFrame(); + std::optional decoded_frame = user_callback_.PopLastFrame(); ASSERT_TRUE(decoded_frame.has_value()); EXPECT_THAT( decoded_frame->render_parameters().max_composition_delay_in_frames, @@ -170,7 +200,7 @@ TEST_F(GenericDecoderTest, IsLowLatencyStreamFalseByDefault) { EncodedFrame encoded_frame; generic_decoder_.Decode(encoded_frame, clock_->CurrentTime()); time_controller_.AdvanceTime(TimeDelta::Millis(10)); - absl::optional decoded_frame = user_callback_.PopLastFrame(); + std::optional decoded_frame = user_callback_.PopLastFrame(); ASSERT_TRUE(decoded_frame.has_value()); EXPECT_FALSE(decoded_frame->render_parameters().use_low_latency_rendering); } @@ -179,14 +209,37 @@ TEST_F(GenericDecoderTest, IsLowLatencyStreamActivatedByPlayoutDelay) { EncodedFrame encoded_frame; const VideoPlayoutDelay kPlayoutDelay(TimeDelta::Zero(), TimeDelta::Millis(50)); - timing_.set_min_playout_delay(kPlayoutDelay.min()); - timing_.set_max_playout_delay(kPlayoutDelay.max()); + timing_.set_playout_delay(kPlayoutDelay); generic_decoder_.Decode(encoded_frame, clock_->CurrentTime()); time_controller_.AdvanceTime(TimeDelta::Millis(10)); - absl::optional decoded_frame = user_callback_.PopLastFrame(); + std::optional decoded_frame = user_callback_.PopLastFrame(); ASSERT_TRUE(decoded_frame.has_value()); EXPECT_TRUE(decoded_frame->render_parameters().use_low_latency_rendering); } +TEST_F(GenericDecoderTest, CallCalculateCorruptionScoreInDecoded) { + constexpr double kCorruptionScore = 0.76; + + EXPECT_CALL(corruption_score_calculator_, CalculateCorruptionScore) + .WillOnce(Return(kCorruptionScore)); + + constexpr uint32_t kRtpTimestamp = 1; + FrameInfo frame_info; + frame_info.frame_instrumentation_data = FrameInstrumentationData{}; + frame_info.rtp_timestamp = kRtpTimestamp; + frame_info.decode_start = Timestamp::Zero(); + frame_info.content_type = VideoContentType::UNSPECIFIED; + frame_info.frame_type = VideoFrameType::kVideoFrameDelta; + VideoFrame video_frame = VideoFrame::Builder() + .set_video_frame_buffer(I420Buffer::Create(5, 5)) + .set_rtp_timestamp(kRtpTimestamp) + .build(); + vcm_callback_.Map(std::move(frame_info)); + + vcm_callback_.Decoded(video_frame); + + EXPECT_EQ(user_callback_.last_corruption_score(), kCorruptionScore); +} + } // namespace video_coding } // namespace webrtc diff --git a/modules/video_coding/h264_packet_buffer.cc b/modules/video_coding/h264_packet_buffer.cc deleted file mode 100644 index 6096665bda..0000000000 --- a/modules/video_coding/h264_packet_buffer.cc +++ /dev/null @@ -1,287 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/h264_packet_buffer.h" - -#include -#include -#include -#include - -#include "api/array_view.h" -#include "api/rtp_packet_info.h" -#include "api/video/video_frame_type.h" -#include "common_video/h264/h264_common.h" -#include "modules/rtp_rtcp/source/rtp_header_extensions.h" -#include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "modules/rtp_rtcp/source/rtp_video_header.h" -#include "modules/video_coding/codecs/h264/include/h264_globals.h" -#include "rtc_base/checks.h" -#include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/sequence_number_util.h" - -namespace webrtc { -namespace { -int64_t EuclideanMod(int64_t n, int64_t div) { - RTC_DCHECK_GT(div, 0); - return (n %= div) < 0 ? n + div : n; -} - -rtc::ArrayView GetNaluInfos( - const RTPVideoHeaderH264& h264_header) { - if (h264_header.nalus_length > kMaxNalusPerPacket) { - return {}; - } - - return rtc::MakeArrayView(h264_header.nalus, h264_header.nalus_length); -} - -bool IsFirstPacketOfFragment(const RTPVideoHeaderH264& h264_header) { - return h264_header.nalus_length > 0; -} - -bool BeginningOfIdr(const H264PacketBuffer::Packet& packet) { - const auto& h264_header = - absl::get(packet.video_header.video_type_header); - const bool contains_idr_nalu = - absl::c_any_of(GetNaluInfos(h264_header), [](const auto& nalu_info) { - return nalu_info.type == H264::NaluType::kIdr; - }); - switch (h264_header.packetization_type) { - case kH264StapA: - case kH264SingleNalu: { - return contains_idr_nalu; - } - case kH264FuA: { - return contains_idr_nalu && IsFirstPacketOfFragment(h264_header); - } - } -} - -bool HasSps(const H264PacketBuffer::Packet& packet) { - auto& h264_header = - absl::get(packet.video_header.video_type_header); - return absl::c_any_of(GetNaluInfos(h264_header), [](const auto& nalu_info) { - return nalu_info.type == H264::NaluType::kSps; - }); -} - -// TODO(bugs.webrtc.org/13157): Update the H264 depacketizer so we don't have to -// fiddle with the payload at this point. -rtc::CopyOnWriteBuffer FixVideoPayload(rtc::ArrayView payload, - const RTPVideoHeader& video_header) { - constexpr uint8_t kStartCode[] = {0, 0, 0, 1}; - - const auto& h264_header = - absl::get(video_header.video_type_header); - - rtc::CopyOnWriteBuffer result; - switch (h264_header.packetization_type) { - case kH264StapA: { - const uint8_t* payload_end = payload.data() + payload.size(); - const uint8_t* nalu_ptr = payload.data() + 1; - while (nalu_ptr < payload_end - 1) { - // The first two bytes describe the length of the segment, where a - // segment is the nalu type plus nalu payload. - uint16_t segment_length = nalu_ptr[0] << 8 | nalu_ptr[1]; - nalu_ptr += 2; - - if (nalu_ptr + segment_length <= payload_end) { - result.AppendData(kStartCode); - result.AppendData(nalu_ptr, segment_length); - } - nalu_ptr += segment_length; - } - return result; - } - - case kH264FuA: { - if (IsFirstPacketOfFragment(h264_header)) { - result.AppendData(kStartCode); - } - result.AppendData(payload); - return result; - } - - case kH264SingleNalu: { - result.AppendData(kStartCode); - result.AppendData(payload); - return result; - } - } - - RTC_DCHECK_NOTREACHED(); - return result; -} - -} // namespace - -H264PacketBuffer::H264PacketBuffer(bool idr_only_keyframes_allowed) - : idr_only_keyframes_allowed_(idr_only_keyframes_allowed) {} - -H264PacketBuffer::InsertResult H264PacketBuffer::InsertPacket( - std::unique_ptr packet) { - RTC_DCHECK(packet->video_header.codec == kVideoCodecH264); - - InsertResult result; - if (!absl::holds_alternative( - packet->video_header.video_type_header)) { - return result; - } - - int64_t unwrapped_seq_num = seq_num_unwrapper_.Unwrap(packet->seq_num); - auto& packet_slot = GetPacket(unwrapped_seq_num); - if (packet_slot != nullptr && - AheadOrAt(packet_slot->timestamp, packet->timestamp)) { - // The incoming `packet` is old or a duplicate. - return result; - } else { - packet_slot = std::move(packet); - } - - result.packets = FindFrames(unwrapped_seq_num); - return result; -} - -std::unique_ptr& H264PacketBuffer::GetPacket( - int64_t unwrapped_seq_num) { - return buffer_[EuclideanMod(unwrapped_seq_num, kBufferSize)]; -} - -bool H264PacketBuffer::BeginningOfStream( - const H264PacketBuffer::Packet& packet) const { - return HasSps(packet) || - (idr_only_keyframes_allowed_ && BeginningOfIdr(packet)); -} - -std::vector> -H264PacketBuffer::FindFrames(int64_t unwrapped_seq_num) { - std::vector> found_frames; - - Packet* packet = GetPacket(unwrapped_seq_num).get(); - RTC_CHECK(packet != nullptr); - - // Check if the packet is continuous or the beginning of a new coded video - // sequence. - if (unwrapped_seq_num - 1 != last_continuous_unwrapped_seq_num_) { - if (unwrapped_seq_num <= last_continuous_unwrapped_seq_num_ || - !BeginningOfStream(*packet)) { - return found_frames; - } - - last_continuous_unwrapped_seq_num_ = unwrapped_seq_num; - } - - for (int64_t seq_num = unwrapped_seq_num; - seq_num < unwrapped_seq_num + kBufferSize;) { - RTC_DCHECK_GE(seq_num, *last_continuous_unwrapped_seq_num_); - - // Packets that were never assembled into a completed frame will stay in - // the 'buffer_'. Check that the `packet` sequence number match the expected - // unwrapped sequence number. - if (static_cast(seq_num) != packet->seq_num) { - return found_frames; - } - - last_continuous_unwrapped_seq_num_ = seq_num; - // Last packet of the frame, try to assemble the frame. - if (packet->marker_bit) { - uint32_t rtp_timestamp = packet->timestamp; - - // Iterate backwards to find where the frame starts. - for (int64_t seq_num_start = seq_num; - seq_num_start > seq_num - kBufferSize; --seq_num_start) { - auto& prev_packet = GetPacket(seq_num_start - 1); - - if (prev_packet == nullptr || prev_packet->timestamp != rtp_timestamp) { - if (MaybeAssembleFrame(seq_num_start, seq_num, found_frames)) { - // Frame was assembled, continue to look for more frames. - break; - } else { - // Frame was not assembled, no subsequent frame will be continuous. - return found_frames; - } - } - } - } - - seq_num++; - packet = GetPacket(seq_num).get(); - if (packet == nullptr) { - return found_frames; - } - } - - return found_frames; -} - -bool H264PacketBuffer::MaybeAssembleFrame( - int64_t start_seq_num_unwrapped, - int64_t end_sequence_number_unwrapped, - std::vector>& frames) { - bool has_sps = false; - bool has_pps = false; - bool has_idr = false; - - int width = -1; - int height = -1; - - for (int64_t seq_num = start_seq_num_unwrapped; - seq_num <= end_sequence_number_unwrapped; ++seq_num) { - const auto& packet = GetPacket(seq_num); - const auto& h264_header = - absl::get(packet->video_header.video_type_header); - for (const auto& nalu : GetNaluInfos(h264_header)) { - has_idr |= nalu.type == H264::NaluType::kIdr; - has_sps |= nalu.type == H264::NaluType::kSps; - has_pps |= nalu.type == H264::NaluType::kPps; - } - - width = std::max(packet->video_header.width, width); - height = std::max(packet->video_header.height, height); - } - - if (has_idr) { - if (!idr_only_keyframes_allowed_ && (!has_sps || !has_pps)) { - return false; - } - } - - for (int64_t seq_num = start_seq_num_unwrapped; - seq_num <= end_sequence_number_unwrapped; ++seq_num) { - auto& packet = GetPacket(seq_num); - - packet->video_header.is_first_packet_in_frame = - (seq_num == start_seq_num_unwrapped); - packet->video_header.is_last_packet_in_frame = - (seq_num == end_sequence_number_unwrapped); - - if (packet->video_header.is_first_packet_in_frame) { - if (width > 0 && height > 0) { - packet->video_header.width = width; - packet->video_header.height = height; - } - - packet->video_header.frame_type = has_idr - ? VideoFrameType::kVideoFrameKey - : VideoFrameType::kVideoFrameDelta; - } - - packet->video_payload = - FixVideoPayload(packet->video_payload, packet->video_header); - - frames.push_back(std::move(packet)); - } - - return true; -} - -} // namespace webrtc diff --git a/modules/video_coding/h264_packet_buffer.h b/modules/video_coding/h264_packet_buffer.h deleted file mode 100644 index a72c240e82..0000000000 --- a/modules/video_coding/h264_packet_buffer.h +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_H264_PACKET_BUFFER_H_ -#define MODULES_VIDEO_CODING_H264_PACKET_BUFFER_H_ - -#include -#include -#include - -#include "absl/base/attributes.h" -#include "absl/types/optional.h" -#include "modules/video_coding/packet_buffer.h" -#include "rtc_base/numerics/sequence_number_unwrapper.h" - -namespace webrtc { - -class H264PacketBuffer { - public: - // The H264PacketBuffer does the same job as the PacketBuffer but for H264 - // only. To make it fit in with surronding code the PacketBuffer input/output - // classes are used. - using Packet = video_coding::PacketBuffer::Packet; - using InsertResult = video_coding::PacketBuffer::InsertResult; - - explicit H264PacketBuffer(bool idr_only_keyframes_allowed); - - ABSL_MUST_USE_RESULT InsertResult - InsertPacket(std::unique_ptr packet); - - private: - static constexpr int kBufferSize = 2048; - - std::unique_ptr& GetPacket(int64_t unwrapped_seq_num); - bool BeginningOfStream(const Packet& packet) const; - std::vector> FindFrames(int64_t unwrapped_seq_num); - bool MaybeAssembleFrame(int64_t start_seq_num_unwrapped, - int64_t end_sequence_number_unwrapped, - std::vector>& packets); - - const bool idr_only_keyframes_allowed_; - std::array, kBufferSize> buffer_; - absl::optional last_continuous_unwrapped_seq_num_; - SeqNumUnwrapper seq_num_unwrapper_; -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_H264_PACKET_BUFFER_H_ diff --git a/modules/video_coding/h264_packet_buffer_unittest.cc b/modules/video_coding/h264_packet_buffer_unittest.cc deleted file mode 100644 index 4f2331da28..0000000000 --- a/modules/video_coding/h264_packet_buffer_unittest.cc +++ /dev/null @@ -1,778 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/h264_packet_buffer.h" - -#include -#include -#include -#include -#include - -#include "api/array_view.h" -#include "api/video/render_resolution.h" -#include "common_video/h264/h264_common.h" -#include "rtc_base/system/unused.h" -#include "test/gmock.h" -#include "test/gtest.h" - -namespace webrtc { -namespace { - -using ::testing::ElementsAreArray; -using ::testing::Eq; -using ::testing::IsEmpty; -using ::testing::SizeIs; - -using H264::NaluType::kAud; -using H264::NaluType::kFuA; -using H264::NaluType::kIdr; -using H264::NaluType::kPps; -using H264::NaluType::kSlice; -using H264::NaluType::kSps; -using H264::NaluType::kStapA; - -constexpr int kBufferSize = 2048; - -std::vector StartCode() { - return {0, 0, 0, 1}; -} - -NaluInfo MakeNaluInfo(uint8_t type) { - NaluInfo res; - res.type = type; - res.sps_id = -1; - res.pps_id = -1; - return res; -} - -class Packet { - public: - explicit Packet(H264PacketizationTypes type); - - Packet& Idr(std::vector payload = {9, 9, 9}); - Packet& Slice(std::vector payload = {9, 9, 9}); - Packet& Sps(std::vector payload = {9, 9, 9}); - Packet& SpsWithResolution(RenderResolution resolution, - std::vector payload = {9, 9, 9}); - Packet& Pps(std::vector payload = {9, 9, 9}); - Packet& Aud(); - Packet& Marker(); - Packet& AsFirstFragment(); - Packet& Time(uint32_t rtp_timestamp); - Packet& SeqNum(uint16_t rtp_seq_num); - - std::unique_ptr Build(); - - private: - rtc::CopyOnWriteBuffer BuildFuaPayload() const; - rtc::CopyOnWriteBuffer BuildSingleNaluPayload() const; - rtc::CopyOnWriteBuffer BuildStapAPayload() const; - - RTPVideoHeaderH264& H264Header() { - return absl::get(video_header_.video_type_header); - } - const RTPVideoHeaderH264& H264Header() const { - return absl::get(video_header_.video_type_header); - } - - H264PacketizationTypes type_; - RTPVideoHeader video_header_; - bool first_fragment_ = false; - bool marker_bit_ = false; - uint32_t rtp_timestamp_ = 0; - uint16_t rtp_seq_num_ = 0; - std::vector> nalu_payloads_; -}; - -Packet::Packet(H264PacketizationTypes type) : type_(type) { - video_header_.video_type_header.emplace(); -} - -Packet& Packet::Idr(std::vector payload) { - auto& h264_header = H264Header(); - h264_header.nalus[h264_header.nalus_length++] = MakeNaluInfo(kIdr); - nalu_payloads_.push_back(std::move(payload)); - return *this; -} - -Packet& Packet::Slice(std::vector payload) { - auto& h264_header = H264Header(); - h264_header.nalus[h264_header.nalus_length++] = MakeNaluInfo(kSlice); - nalu_payloads_.push_back(std::move(payload)); - return *this; -} - -Packet& Packet::Sps(std::vector payload) { - auto& h264_header = H264Header(); - h264_header.nalus[h264_header.nalus_length++] = MakeNaluInfo(kSps); - nalu_payloads_.push_back(std::move(payload)); - return *this; -} - -Packet& Packet::SpsWithResolution(RenderResolution resolution, - std::vector payload) { - auto& h264_header = H264Header(); - h264_header.nalus[h264_header.nalus_length++] = MakeNaluInfo(kSps); - video_header_.width = resolution.Width(); - video_header_.height = resolution.Height(); - nalu_payloads_.push_back(std::move(payload)); - return *this; -} - -Packet& Packet::Pps(std::vector payload) { - auto& h264_header = H264Header(); - h264_header.nalus[h264_header.nalus_length++] = MakeNaluInfo(kPps); - nalu_payloads_.push_back(std::move(payload)); - return *this; -} - -Packet& Packet::Aud() { - auto& h264_header = H264Header(); - h264_header.nalus[h264_header.nalus_length++] = MakeNaluInfo(kAud); - nalu_payloads_.push_back({}); - return *this; -} - -Packet& Packet::Marker() { - marker_bit_ = true; - return *this; -} - -Packet& Packet::AsFirstFragment() { - first_fragment_ = true; - return *this; -} - -Packet& Packet::Time(uint32_t rtp_timestamp) { - rtp_timestamp_ = rtp_timestamp; - return *this; -} - -Packet& Packet::SeqNum(uint16_t rtp_seq_num) { - rtp_seq_num_ = rtp_seq_num; - return *this; -} - -std::unique_ptr Packet::Build() { - auto res = std::make_unique(); - - auto& h264_header = H264Header(); - switch (type_) { - case kH264FuA: { - RTC_CHECK_EQ(h264_header.nalus_length, 1); - res->video_payload = BuildFuaPayload(); - break; - } - case kH264SingleNalu: { - RTC_CHECK_EQ(h264_header.nalus_length, 1); - res->video_payload = BuildSingleNaluPayload(); - break; - } - case kH264StapA: { - RTC_CHECK_GT(h264_header.nalus_length, 1); - RTC_CHECK_LE(h264_header.nalus_length, kMaxNalusPerPacket); - res->video_payload = BuildStapAPayload(); - break; - } - } - - if (type_ == kH264FuA && !first_fragment_) { - h264_header.nalus_length = 0; - } - - h264_header.packetization_type = type_; - res->marker_bit = marker_bit_; - res->video_header = video_header_; - res->timestamp = rtp_timestamp_; - res->seq_num = rtp_seq_num_; - res->video_header.codec = kVideoCodecH264; - - return res; -} - -rtc::CopyOnWriteBuffer Packet::BuildFuaPayload() const { - return rtc::CopyOnWriteBuffer(nalu_payloads_[0]); -} - -rtc::CopyOnWriteBuffer Packet::BuildSingleNaluPayload() const { - rtc::CopyOnWriteBuffer res; - auto& h264_header = H264Header(); - res.AppendData(&h264_header.nalus[0].type, 1); - res.AppendData(nalu_payloads_[0]); - return res; -} - -rtc::CopyOnWriteBuffer Packet::BuildStapAPayload() const { - rtc::CopyOnWriteBuffer res; - - const uint8_t indicator = H264::NaluType::kStapA; - res.AppendData(&indicator, 1); - - auto& h264_header = H264Header(); - for (size_t i = 0; i < h264_header.nalus_length; ++i) { - // The two first bytes indicates the nalu segment size. - uint8_t length_as_array[2] = { - 0, static_cast(nalu_payloads_[i].size() + 1)}; - res.AppendData(length_as_array); - - res.AppendData(&h264_header.nalus[i].type, 1); - res.AppendData(nalu_payloads_[i]); - } - return res; -} - -rtc::ArrayView PacketPayload( - const std::unique_ptr& packet) { - return packet->video_payload; -} - -std::vector FlatVector( - const std::vector>& elems) { - std::vector res; - for (const auto& elem : elems) { - res.insert(res.end(), elem.begin(), elem.end()); - } - return res; -} - -TEST(H264PacketBufferTest, IdrIsKeyframe) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/true); - - EXPECT_THAT( - packet_buffer.InsertPacket(Packet(kH264SingleNalu).Idr().Marker().Build()) - .packets, - SizeIs(1)); -} - -TEST(H264PacketBufferTest, IdrIsNotKeyframe) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - EXPECT_THAT( - packet_buffer.InsertPacket(Packet(kH264SingleNalu).Idr().Marker().Build()) - .packets, - IsEmpty()); -} - -TEST(H264PacketBufferTest, IdrIsKeyframeFuaRequiresFirstFragmet) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/true); - - // Not marked as the first fragment - EXPECT_THAT( - packet_buffer - .InsertPacket(Packet(kH264FuA).Idr().SeqNum(0).Time(0).Build()) - .packets, - IsEmpty()); - - EXPECT_THAT(packet_buffer - .InsertPacket( - Packet(kH264FuA).Idr().SeqNum(1).Time(0).Marker().Build()) - .packets, - IsEmpty()); - - // Marked as first fragment - EXPECT_THAT(packet_buffer - .InsertPacket(Packet(kH264FuA) - .Idr() - .SeqNum(2) - .Time(1) - .AsFirstFragment() - .Build()) - .packets, - IsEmpty()); - - EXPECT_THAT(packet_buffer - .InsertPacket( - Packet(kH264FuA).Idr().SeqNum(3).Time(1).Marker().Build()) - .packets, - SizeIs(2)); -} - -TEST(H264PacketBufferTest, SpsPpsIdrIsKeyframeSingleNalus) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264SingleNalu).Sps().SeqNum(0).Time(0).Build())); - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264SingleNalu).Pps().SeqNum(1).Time(0).Build())); - EXPECT_THAT( - packet_buffer - .InsertPacket( - Packet(kH264SingleNalu).Idr().SeqNum(2).Time(0).Marker().Build()) - .packets, - SizeIs(3)); -} - -TEST(H264PacketBufferTest, PpsIdrIsNotKeyframeSingleNalus) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264SingleNalu).Pps().SeqNum(0).Time(0).Build())); - EXPECT_THAT( - packet_buffer - .InsertPacket( - Packet(kH264SingleNalu).Idr().SeqNum(1).Time(0).Marker().Build()) - .packets, - IsEmpty()); -} - -TEST(H264PacketBufferTest, SpsIdrIsNotKeyframeSingleNalus) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264SingleNalu).Sps().SeqNum(0).Time(0).Build())); - EXPECT_THAT( - packet_buffer - .InsertPacket( - Packet(kH264SingleNalu).Idr().SeqNum(1).Time(0).Marker().Build()) - .packets, - IsEmpty()); -} - -TEST(H264PacketBufferTest, SpsPpsIdrIsKeyframeStapA) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - EXPECT_THAT(packet_buffer - .InsertPacket(Packet(kH264StapA) - .Sps() - .Pps() - .Idr() - .SeqNum(0) - .Time(0) - .Marker() - .Build()) - .packets, - SizeIs(1)); -} - -TEST(H264PacketBufferTest, PpsIdrIsNotKeyframeStapA) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - EXPECT_THAT( - packet_buffer - .InsertPacket( - Packet(kH264StapA).Pps().Idr().SeqNum(0).Time(0).Marker().Build()) - .packets, - IsEmpty()); -} - -TEST(H264PacketBufferTest, SpsIdrIsNotKeyframeStapA) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - EXPECT_THAT( - packet_buffer - .InsertPacket( - Packet(kH264StapA).Sps().Idr().SeqNum(2).Time(2).Marker().Build()) - .packets, - IsEmpty()); - - EXPECT_THAT(packet_buffer - .InsertPacket(Packet(kH264StapA) - .Sps() - .Pps() - .Idr() - .SeqNum(3) - .Time(3) - .Marker() - .Build()) - .packets, - SizeIs(1)); -} - -TEST(H264PacketBufferTest, InsertingSpsPpsLastCompletesKeyframe) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264SingleNalu).Idr().SeqNum(2).Time(1).Marker().Build())); - - EXPECT_THAT(packet_buffer - .InsertPacket( - Packet(kH264StapA).Sps().Pps().SeqNum(1).Time(1).Build()) - .packets, - SizeIs(2)); -} - -TEST(H264PacketBufferTest, InsertingMidFuaCompletesFrame) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - EXPECT_THAT(packet_buffer - .InsertPacket(Packet(kH264StapA) - .Sps() - .Pps() - .Idr() - .SeqNum(0) - .Time(0) - .Marker() - .Build()) - .packets, - SizeIs(1)); - - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264FuA).Slice().SeqNum(1).Time(1).AsFirstFragment().Build())); - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264FuA).Slice().SeqNum(3).Time(1).Marker().Build())); - EXPECT_THAT( - packet_buffer - .InsertPacket(Packet(kH264FuA).Slice().SeqNum(2).Time(1).Build()) - .packets, - SizeIs(3)); -} - -TEST(H264PacketBufferTest, SeqNumJumpDoesNotCompleteFrame) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - EXPECT_THAT(packet_buffer - .InsertPacket(Packet(kH264StapA) - .Sps() - .Pps() - .Idr() - .SeqNum(0) - .Time(0) - .Marker() - .Build()) - .packets, - SizeIs(1)); - - EXPECT_THAT( - packet_buffer - .InsertPacket(Packet(kH264FuA).Slice().SeqNum(1).Time(1).Build()) - .packets, - IsEmpty()); - - // Add `kBufferSize` to make the index of the sequence number wrap and end up - // where the packet with sequence number 2 would have ended up. - EXPECT_THAT(packet_buffer - .InsertPacket(Packet(kH264FuA) - .Slice() - .SeqNum(2 + kBufferSize) - .Time(3) - .Marker() - .Build()) - .packets, - IsEmpty()); -} - -TEST(H264PacketBufferTest, OldFramesAreNotCompletedAfterBufferWrap) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - EXPECT_THAT(packet_buffer - .InsertPacket(Packet(kH264SingleNalu) - .Slice() - .SeqNum(1) - .Time(1) - .Marker() - .Build()) - .packets, - IsEmpty()); - - // New keyframe, preceedes packet with sequence number 1 in the buffer. - EXPECT_THAT(packet_buffer - .InsertPacket(Packet(kH264StapA) - .Sps() - .Pps() - .Idr() - .SeqNum(kBufferSize) - .Time(kBufferSize) - .Marker() - .Build()) - .packets, - SizeIs(1)); -} - -TEST(H264PacketBufferTest, OldPacketsDontBlockNewPackets) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - EXPECT_THAT(packet_buffer - .InsertPacket(Packet(kH264StapA) - .Sps() - .Pps() - .Idr() - .SeqNum(kBufferSize) - .Time(kBufferSize) - .Marker() - .Build()) - .packets, - SizeIs(1)); - - RTC_UNUSED(packet_buffer.InsertPacket(Packet(kH264FuA) - .Slice() - .SeqNum(kBufferSize + 1) - .Time(kBufferSize + 1) - .AsFirstFragment() - .Build())); - - RTC_UNUSED(packet_buffer.InsertPacket(Packet(kH264FuA) - .Slice() - .SeqNum(kBufferSize + 3) - .Time(kBufferSize + 1) - .Marker() - .Build())); - EXPECT_THAT( - packet_buffer - .InsertPacket(Packet(kH264FuA).Slice().SeqNum(2).Time(2).Build()) - .packets, - IsEmpty()); - - EXPECT_THAT(packet_buffer - .InsertPacket(Packet(kH264FuA) - .Slice() - .SeqNum(kBufferSize + 2) - .Time(kBufferSize + 1) - .Build()) - .packets, - SizeIs(3)); -} - -TEST(H264PacketBufferTest, OldPacketDoesntCompleteFrame) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - EXPECT_THAT(packet_buffer - .InsertPacket(Packet(kH264StapA) - .Sps() - .Pps() - .Idr() - .SeqNum(kBufferSize) - .Time(kBufferSize) - .Marker() - .Build()) - .packets, - SizeIs(1)); - - EXPECT_THAT(packet_buffer - .InsertPacket(Packet(kH264FuA) - .Slice() - .SeqNum(kBufferSize + 3) - .Time(kBufferSize + 1) - .Marker() - .Build()) - .packets, - IsEmpty()); - - EXPECT_THAT( - packet_buffer - .InsertPacket( - Packet(kH264FuA).Slice().SeqNum(2).Time(2).Marker().Build()) - .packets, - IsEmpty()); - - EXPECT_THAT(packet_buffer - .InsertPacket(Packet(kH264FuA) - .Slice() - .SeqNum(kBufferSize + 1) - .Time(kBufferSize + 1) - .AsFirstFragment() - .Build()) - .packets, - IsEmpty()); -} - -TEST(H264PacketBufferTest, FrameBoundariesAreSet) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - auto key = packet_buffer.InsertPacket( - Packet(kH264StapA).Sps().Pps().Idr().SeqNum(1).Time(1).Marker().Build()); - - ASSERT_THAT(key.packets, SizeIs(1)); - EXPECT_TRUE(key.packets[0]->video_header.is_first_packet_in_frame); - EXPECT_TRUE(key.packets[0]->video_header.is_last_packet_in_frame); - - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264FuA).Slice().SeqNum(2).Time(2).Build())); - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264FuA).Slice().SeqNum(3).Time(2).Build())); - auto delta = packet_buffer.InsertPacket( - Packet(kH264FuA).Slice().SeqNum(4).Time(2).Marker().Build()); - - ASSERT_THAT(delta.packets, SizeIs(3)); - EXPECT_TRUE(delta.packets[0]->video_header.is_first_packet_in_frame); - EXPECT_FALSE(delta.packets[0]->video_header.is_last_packet_in_frame); - - EXPECT_FALSE(delta.packets[1]->video_header.is_first_packet_in_frame); - EXPECT_FALSE(delta.packets[1]->video_header.is_last_packet_in_frame); - - EXPECT_FALSE(delta.packets[2]->video_header.is_first_packet_in_frame); - EXPECT_TRUE(delta.packets[2]->video_header.is_last_packet_in_frame); -} - -TEST(H264PacketBufferTest, ResolutionSetOnFirstPacket) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264SingleNalu).Aud().SeqNum(1).Time(1).Build())); - auto res = packet_buffer.InsertPacket(Packet(kH264StapA) - .SpsWithResolution({320, 240}) - .Pps() - .Idr() - .SeqNum(2) - .Time(1) - .Marker() - .Build()); - - ASSERT_THAT(res.packets, SizeIs(2)); - EXPECT_THAT(res.packets[0]->video_header.width, Eq(320)); - EXPECT_THAT(res.packets[0]->video_header.height, Eq(240)); -} - -TEST(H264PacketBufferTest, KeyframeAndDeltaFrameSetOnFirstPacket) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264SingleNalu).Aud().SeqNum(1).Time(1).Build())); - auto key = packet_buffer.InsertPacket( - Packet(kH264StapA).Sps().Pps().Idr().SeqNum(2).Time(1).Marker().Build()); - - auto delta = packet_buffer.InsertPacket( - Packet(kH264SingleNalu).Slice().SeqNum(3).Time(2).Marker().Build()); - - ASSERT_THAT(key.packets, SizeIs(2)); - EXPECT_THAT(key.packets[0]->video_header.frame_type, - Eq(VideoFrameType::kVideoFrameKey)); - ASSERT_THAT(delta.packets, SizeIs(1)); - EXPECT_THAT(delta.packets[0]->video_header.frame_type, - Eq(VideoFrameType::kVideoFrameDelta)); -} - -TEST(H264PacketBufferTest, RtpSeqNumWrap) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264StapA).Sps().Pps().SeqNum(0xffff).Time(0).Build())); - - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264FuA).Idr().SeqNum(0).Time(0).Build())); - EXPECT_THAT(packet_buffer - .InsertPacket( - Packet(kH264FuA).Idr().SeqNum(1).Time(0).Marker().Build()) - .packets, - SizeIs(3)); -} - -TEST(H264PacketBufferTest, StapAFixedBitstream) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - auto packets = packet_buffer - .InsertPacket(Packet(kH264StapA) - .Sps({1, 2, 3}) - .Pps({4, 5, 6}) - .Idr({7, 8, 9}) - .SeqNum(0) - .Time(0) - .Marker() - .Build()) - .packets; - - ASSERT_THAT(packets, SizeIs(1)); - EXPECT_THAT(PacketPayload(packets[0]), - ElementsAreArray(FlatVector({StartCode(), - {kSps, 1, 2, 3}, - StartCode(), - {kPps, 4, 5, 6}, - StartCode(), - {kIdr, 7, 8, 9}}))); -} - -TEST(H264PacketBufferTest, SingleNaluFixedBitstream) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264SingleNalu).Sps({1, 2, 3}).SeqNum(0).Time(0).Build())); - RTC_UNUSED(packet_buffer.InsertPacket( - Packet(kH264SingleNalu).Pps({4, 5, 6}).SeqNum(1).Time(0).Build())); - auto packets = packet_buffer - .InsertPacket(Packet(kH264SingleNalu) - .Idr({7, 8, 9}) - .SeqNum(2) - .Time(0) - .Marker() - .Build()) - .packets; - - ASSERT_THAT(packets, SizeIs(3)); - EXPECT_THAT(PacketPayload(packets[0]), - ElementsAreArray(FlatVector({StartCode(), {kSps, 1, 2, 3}}))); - EXPECT_THAT(PacketPayload(packets[1]), - ElementsAreArray(FlatVector({StartCode(), {kPps, 4, 5, 6}}))); - EXPECT_THAT(PacketPayload(packets[2]), - ElementsAreArray(FlatVector({StartCode(), {kIdr, 7, 8, 9}}))); -} - -TEST(H264PacketBufferTest, StapaAndFuaFixedBitstream) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - RTC_UNUSED(packet_buffer.InsertPacket(Packet(kH264StapA) - .Sps({1, 2, 3}) - .Pps({4, 5, 6}) - .SeqNum(0) - .Time(0) - .Build())); - RTC_UNUSED(packet_buffer.InsertPacket(Packet(kH264FuA) - .Idr({8, 8, 8}) - .SeqNum(1) - .Time(0) - .AsFirstFragment() - .Build())); - auto packets = packet_buffer - .InsertPacket(Packet(kH264FuA) - .Idr({9, 9, 9}) - .SeqNum(2) - .Time(0) - .Marker() - .Build()) - .packets; - - ASSERT_THAT(packets, SizeIs(3)); - EXPECT_THAT( - PacketPayload(packets[0]), - ElementsAreArray(FlatVector( - {StartCode(), {kSps, 1, 2, 3}, StartCode(), {kPps, 4, 5, 6}}))); - EXPECT_THAT(PacketPayload(packets[1]), - ElementsAreArray(FlatVector({StartCode(), {8, 8, 8}}))); - // Third is a continuation of second, so only the payload is expected. - EXPECT_THAT(PacketPayload(packets[2]), - ElementsAreArray(FlatVector({{9, 9, 9}}))); -} - -TEST(H264PacketBufferTest, FullPacketBufferDoesNotBlockKeyframe) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - for (int i = 0; i < kBufferSize; ++i) { - EXPECT_THAT( - packet_buffer - .InsertPacket( - Packet(kH264SingleNalu).Slice().SeqNum(i).Time(0).Build()) - .packets, - IsEmpty()); - } - - EXPECT_THAT(packet_buffer - .InsertPacket(Packet(kH264StapA) - .Sps() - .Pps() - .Idr() - .SeqNum(kBufferSize) - .Time(1) - .Marker() - .Build()) - .packets, - SizeIs(1)); -} - -TEST(H264PacketBufferTest, TooManyNalusInPacket) { - H264PacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); - - std::unique_ptr packet( - Packet(kH264StapA).Sps().Pps().Idr().SeqNum(1).Time(1).Marker().Build()); - auto& h264_header = - absl::get(packet->video_header.video_type_header); - h264_header.nalus_length = kMaxNalusPerPacket + 1; - - EXPECT_THAT(packet_buffer.InsertPacket(std::move(packet)).packets, IsEmpty()); -} - -} // namespace -} // namespace webrtc diff --git a/modules/video_coding/h264_sprop_parameter_sets.cc b/modules/video_coding/h264_sprop_parameter_sets.cc index a64f8885da..4d398c7d81 100644 --- a/modules/video_coding/h264_sprop_parameter_sets.cc +++ b/modules/video_coding/h264_sprop_parameter_sets.cc @@ -13,21 +13,27 @@ #include #include +#include #include #include +#include "rtc_base/base64.h" #include "rtc_base/logging.h" -#include "rtc_base/third_party/base64/base64.h" + +namespace webrtc { namespace { bool DecodeAndConvert(const std::string& base64, std::vector* binary) { - return rtc::Base64::DecodeFromArray(base64.data(), base64.size(), - rtc::Base64::DO_STRICT, binary, nullptr); + std::optional decoded = Base64Decode(base64); + if (!decoded.has_value()) { + return false; + } + binary->assign(decoded->begin(), decoded->end()); + return true; } -} // namespace -namespace webrtc { +} // namespace bool H264SpropParameterSets::DecodeSprop(const std::string& sprop) { size_t separator_pos = sprop.find(','); diff --git a/modules/video_coding/h264_sprop_parameter_sets_unittest.cc b/modules/video_coding/h264_sprop_parameter_sets_unittest.cc index ae263131a7..24f6bb7410 100644 --- a/modules/video_coding/h264_sprop_parameter_sets_unittest.cc +++ b/modules/video_coding/h264_sprop_parameter_sets_unittest.cc @@ -10,6 +10,7 @@ #include "modules/video_coding/h264_sprop_parameter_sets.h" +#include #include #include "test/gtest.h" diff --git a/modules/video_coding/h264_sps_pps_tracker.cc b/modules/video_coding/h264_sps_pps_tracker.cc index 5a7eae7b42..c00ec6813d 100644 --- a/modules/video_coding/h264_sps_pps_tracker.cc +++ b/modules/video_coding/h264_sps_pps_tracker.cc @@ -10,15 +10,20 @@ #include "modules/video_coding/h264_sps_pps_tracker.h" -#include -#include +#include +#include +#include #include +#include -#include "absl/types/variant.h" +#include "api/array_view.h" +#include "api/video/video_codec_type.h" #include "common_video/h264/h264_common.h" #include "common_video/h264/pps_parser.h" #include "common_video/h264/sps_parser.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "rtc_base/byte_buffer.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -29,37 +34,21 @@ namespace { const uint8_t start_code_h264[] = {0, 0, 0, 1}; } // namespace -H264SpsPpsTracker::H264SpsPpsTracker() = default; -H264SpsPpsTracker::~H264SpsPpsTracker() = default; - -H264SpsPpsTracker::PpsInfo::PpsInfo() = default; -H264SpsPpsTracker::PpsInfo::PpsInfo(PpsInfo&& rhs) = default; -H264SpsPpsTracker::PpsInfo& H264SpsPpsTracker::PpsInfo::operator=( - PpsInfo&& rhs) = default; -H264SpsPpsTracker::PpsInfo::~PpsInfo() = default; - -H264SpsPpsTracker::SpsInfo::SpsInfo() = default; -H264SpsPpsTracker::SpsInfo::SpsInfo(SpsInfo&& rhs) = default; -H264SpsPpsTracker::SpsInfo& H264SpsPpsTracker::SpsInfo::operator=( - SpsInfo&& rhs) = default; -H264SpsPpsTracker::SpsInfo::~SpsInfo() = default; - H264SpsPpsTracker::FixedBitstream H264SpsPpsTracker::CopyAndFixBitstream( - rtc::ArrayView bitstream, + ArrayView bitstream, RTPVideoHeader* video_header) { RTC_DCHECK(video_header); RTC_DCHECK(video_header->codec == kVideoCodecH264); RTC_DCHECK_GT(bitstream.size(), 0); auto& h264_header = - absl::get(video_header->video_type_header); + std::get(video_header->video_type_header); bool append_sps_pps = false; auto sps = sps_data_.end(); auto pps = pps_data_.end(); - for (size_t i = 0; i < h264_header.nalus_length; ++i) { - const NaluInfo& nalu = h264_header.nalus[i]; + for (const NaluInfo& nalu : h264_header.nalus) { switch (nalu.type) { case H264::NaluType::kSps: { SpsInfo& sps_info = sps_data_[nalu.sps_id]; @@ -103,9 +92,7 @@ H264SpsPpsTracker::FixedBitstream H264SpsPpsTracker::CopyAndFixBitstream( // If the SPS/PPS was supplied out of band then we will have saved // the actual bitstream in `data`. - if (sps->second.data && pps->second.data) { - RTC_DCHECK_GT(sps->second.size, 0); - RTC_DCHECK_GT(pps->second.size, 0); + if (!sps->second.data.empty() && !pps->second.data.empty()) { append_sps_pps = true; } } @@ -123,24 +110,27 @@ H264SpsPpsTracker::FixedBitstream H264SpsPpsTracker::CopyAndFixBitstream( size_t required_size = 0; if (append_sps_pps) { - required_size += sps->second.size + sizeof(start_code_h264); - required_size += pps->second.size + sizeof(start_code_h264); + required_size += sps->second.data.size() + sizeof(start_code_h264); + required_size += pps->second.data.size() + sizeof(start_code_h264); } if (h264_header.packetization_type == kH264StapA) { - const uint8_t* nalu_ptr = bitstream.data() + 1; - while (nalu_ptr < bitstream.data() + bitstream.size() - 1) { + ByteBufferReader nalu(bitstream.subview(1)); + while (nalu.Length() > 0) { required_size += sizeof(start_code_h264); // The first two bytes describe the length of a segment. - uint16_t segment_length = nalu_ptr[0] << 8 | nalu_ptr[1]; - nalu_ptr += 2; - + uint16_t segment_length; + if (!nalu.ReadUInt16(&segment_length)) + return {kDrop}; + if (segment_length == 0 || segment_length > nalu.Length()) { + return {kDrop}; + } required_size += segment_length; - nalu_ptr += segment_length; + nalu.Consume(segment_length); } } else { - if (h264_header.nalus_length > 0) { + if (!h264_header.nalus.empty()) { required_size += sizeof(start_code_h264); } required_size += bitstream.size(); @@ -153,50 +143,38 @@ H264SpsPpsTracker::FixedBitstream H264SpsPpsTracker::CopyAndFixBitstream( if (append_sps_pps) { // Insert SPS. fixed.bitstream.AppendData(start_code_h264); - fixed.bitstream.AppendData(sps->second.data.get(), sps->second.size); + fixed.bitstream.AppendData(sps->second.data); // Insert PPS. fixed.bitstream.AppendData(start_code_h264); - fixed.bitstream.AppendData(pps->second.data.get(), pps->second.size); + fixed.bitstream.AppendData(pps->second.data); // Update codec header to reflect the newly added SPS and PPS. - NaluInfo sps_info; - sps_info.type = H264::NaluType::kSps; - sps_info.sps_id = sps->first; - sps_info.pps_id = -1; - NaluInfo pps_info; - pps_info.type = H264::NaluType::kPps; - pps_info.sps_id = sps->first; - pps_info.pps_id = pps->first; - if (h264_header.nalus_length + 2 <= kMaxNalusPerPacket) { - h264_header.nalus[h264_header.nalus_length++] = sps_info; - h264_header.nalus[h264_header.nalus_length++] = pps_info; - } else { - RTC_LOG(LS_WARNING) << "Not enough space in H.264 codec header to insert " - "SPS/PPS provided out-of-band."; - } + h264_header.nalus.push_back( + {.type = H264::NaluType::kSps, .sps_id = sps->first, .pps_id = -1}); + h264_header.nalus.push_back({.type = H264::NaluType::kPps, + .sps_id = sps->first, + .pps_id = pps->first}); } // Copy the rest of the bitstream and insert start codes. if (h264_header.packetization_type == kH264StapA) { - const uint8_t* nalu_ptr = bitstream.data() + 1; - while (nalu_ptr < bitstream.data() + bitstream.size() - 1) { + ByteBufferReader nalu(bitstream.subview(1)); + while (nalu.Length() > 0) { fixed.bitstream.AppendData(start_code_h264); // The first two bytes describe the length of a segment. - uint16_t segment_length = nalu_ptr[0] << 8 | nalu_ptr[1]; - nalu_ptr += 2; - - size_t copy_end = nalu_ptr - bitstream.data() + segment_length; - if (copy_end > bitstream.size()) { + uint16_t segment_length; + if (!nalu.ReadUInt16(&segment_length)) + return {kDrop}; + if (segment_length == 0 || segment_length > nalu.Length()) { return {kDrop}; } - - fixed.bitstream.AppendData(nalu_ptr, segment_length); - nalu_ptr += segment_length; + fixed.bitstream.AppendData(nalu.Data(), segment_length); + nalu.Consume(segment_length); } } else { - if (h264_header.nalus_length > 0) { + if (!h264_header.nalus.empty()) { fixed.bitstream.AppendData(start_code_h264); } fixed.bitstream.AppendData(bitstream.data(), bitstream.size()); @@ -227,10 +205,10 @@ void H264SpsPpsTracker::InsertSpsPpsNalus(const std::vector& sps, RTC_LOG(LS_WARNING) << "SPS Nalu header missing"; return; } - absl::optional parsed_sps = SpsParser::ParseSps( - sps.data() + kNaluHeaderOffset, sps.size() - kNaluHeaderOffset); - absl::optional parsed_pps = PpsParser::ParsePps( - pps.data() + kNaluHeaderOffset, pps.size() - kNaluHeaderOffset); + std::optional parsed_sps = SpsParser::ParseSps( + ArrayView(sps).subview(kNaluHeaderOffset)); + std::optional parsed_pps = PpsParser::ParsePps( + ArrayView(pps).subview(kNaluHeaderOffset)); if (!parsed_sps) { RTC_LOG(LS_WARNING) << "Failed to parse SPS."; @@ -245,20 +223,14 @@ void H264SpsPpsTracker::InsertSpsPpsNalus(const std::vector& sps, } SpsInfo sps_info; - sps_info.size = sps.size(); sps_info.width = parsed_sps->width; sps_info.height = parsed_sps->height; - uint8_t* sps_data = new uint8_t[sps_info.size]; - memcpy(sps_data, sps.data(), sps_info.size); - sps_info.data.reset(sps_data); + sps_info.data.SetData(sps); sps_data_[parsed_sps->id] = std::move(sps_info); PpsInfo pps_info; - pps_info.size = pps.size(); pps_info.sps_id = parsed_pps->sps_id; - uint8_t* pps_data = new uint8_t[pps_info.size]; - memcpy(pps_data, pps.data(), pps_info.size); - pps_info.data.reset(pps_data); + pps_info.data.SetData(pps); pps_data_[parsed_pps->id] = std::move(pps_info); RTC_LOG(LS_INFO) << "Inserted SPS id " << parsed_sps->id << " and PPS id " diff --git a/modules/video_coding/h264_sps_pps_tracker.h b/modules/video_coding/h264_sps_pps_tracker.h index 600e2ee397..132aaa0568 100644 --- a/modules/video_coding/h264_sps_pps_tracker.h +++ b/modules/video_coding/h264_sps_pps_tracker.h @@ -14,11 +14,11 @@ #include #include #include -#include #include #include "api/array_view.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "rtc_base/buffer.h" #include "rtc_base/copy_on_write_buffer.h" namespace webrtc { @@ -29,14 +29,16 @@ class H264SpsPpsTracker { enum PacketAction { kInsert, kDrop, kRequestKeyframe }; struct FixedBitstream { PacketAction action; - rtc::CopyOnWriteBuffer bitstream; + CopyOnWriteBuffer bitstream; }; - H264SpsPpsTracker(); - ~H264SpsPpsTracker(); + H264SpsPpsTracker() = default; + H264SpsPpsTracker(const H264SpsPpsTracker& other) = default; + H264SpsPpsTracker& operator=(const H264SpsPpsTracker& other) = default; + ~H264SpsPpsTracker() = default; // Returns fixed bitstream and modifies `video_header`. - FixedBitstream CopyAndFixBitstream(rtc::ArrayView bitstream, + FixedBitstream CopyAndFixBitstream(ArrayView bitstream, RTPVideoHeader* video_header); void InsertSpsPpsNalus(const std::vector& sps, @@ -44,30 +46,18 @@ class H264SpsPpsTracker { private: struct PpsInfo { - PpsInfo(); - PpsInfo(PpsInfo&& rhs); - PpsInfo& operator=(PpsInfo&& rhs); - ~PpsInfo(); - int sps_id = -1; - size_t size = 0; - std::unique_ptr data; + Buffer data; }; struct SpsInfo { - SpsInfo(); - SpsInfo(SpsInfo&& rhs); - SpsInfo& operator=(SpsInfo&& rhs); - ~SpsInfo(); - - size_t size = 0; int width = -1; int height = -1; - std::unique_ptr data; + Buffer data; }; - std::map pps_data_; - std::map sps_data_; + std::map pps_data_; + std::map sps_data_; }; } // namespace video_coding diff --git a/modules/video_coding/h264_sps_pps_tracker_unittest.cc b/modules/video_coding/h264_sps_pps_tracker_unittest.cc index 76591e9f5c..079049d59e 100644 --- a/modules/video_coding/h264_sps_pps_tracker_unittest.cc +++ b/modules/video_coding/h264_sps_pps_tracker_unittest.cc @@ -10,11 +10,11 @@ #include "modules/video_coding/h264_sps_pps_tracker.h" -#include - +#include #include -#include "absl/types/variant.h" +#include "api/array_view.h" +#include "api/video/video_codec_type.h" #include "common_video/h264/h264_common.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" @@ -26,10 +26,11 @@ namespace video_coding { namespace { using ::testing::ElementsAreArray; +using ::testing::SizeIs; const uint8_t start_code[] = {0, 0, 0, 1}; -rtc::ArrayView Bitstream( +ArrayView Bitstream( const H264SpsPpsTracker::FixedBitstream& fixed) { return fixed.bitstream; } @@ -64,12 +65,11 @@ class H264VideoHeader : public RTPVideoHeader { codec = kVideoCodecH264; is_first_packet_in_frame = false; auto& h264_header = video_type_header.emplace(); - h264_header.nalus_length = 0; h264_header.packetization_type = kH264SingleNalu; } RTPVideoHeaderH264& h264() { - return absl::get(video_type_header); + return std::get(video_type_header); } }; @@ -87,7 +87,7 @@ class TestH264SpsPpsTracker : public ::testing::Test { data->push_back(H264::NaluType::kSps); data->push_back(sps_id); // The sps data, just a single byte. - header->h264().nalus[header->h264().nalus_length++] = info; + header->h264().nalus.push_back(info); } void AddPps(H264VideoHeader* header, @@ -101,7 +101,7 @@ class TestH264SpsPpsTracker : public ::testing::Test { data->push_back(H264::NaluType::kPps); data->push_back(pps_id); // The pps data, just a single byte. - header->h264().nalus[header->h264().nalus_length++] = info; + header->h264().nalus.push_back(info); } void AddIdr(H264VideoHeader* header, int pps_id) { @@ -110,7 +110,7 @@ class TestH264SpsPpsTracker : public ::testing::Test { info.sps_id = -1; info.pps_id = pps_id; - header->h264().nalus[header->h264().nalus_length++] = info; + header->h264().nalus.push_back(info); } protected: @@ -133,7 +133,7 @@ TEST_F(TestH264SpsPpsTracker, FuAFirstPacket) { uint8_t data[] = {1, 2, 3}; H264VideoHeader header; header.h264().packetization_type = kH264FuA; - header.h264().nalus_length = 1; + header.h264().nalus.resize(1); header.is_first_packet_in_frame = true; H264SpsPpsTracker::FixedBitstream fixed = @@ -201,7 +201,7 @@ TEST_F(TestH264SpsPpsTracker, ConsecutiveStapA) { TEST_F(TestH264SpsPpsTracker, SingleNaluInsertStartCode) { uint8_t data[] = {1, 2, 3}; H264VideoHeader header; - header.h264().nalus_length = 1; + header.h264().nalus.resize(1); H264SpsPpsTracker::FixedBitstream fixed = tracker_.CopyAndFixBitstream(data, &header); @@ -217,8 +217,8 @@ TEST_F(TestH264SpsPpsTracker, NoStartCodeInsertedForSubsequentFuAPacket) { std::vector data = {1, 2, 3}; H264VideoHeader header; header.h264().packetization_type = kH264FuA; - // Since no NALU begin in this packet the nalus_length is zero. - header.h264().nalus_length = 0; + // Since no NALU begin in this packet the nalus are empty. + header.h264().nalus.clear(); H264SpsPpsTracker::FixedBitstream fixed = tracker_.CopyAndFixBitstream(data, &header); @@ -330,12 +330,12 @@ TEST_F(TestH264SpsPpsTracker, SpsPpsOutOfBand) { H264VideoHeader idr_header; idr_header.is_first_packet_in_frame = true; AddIdr(&idr_header, 0); - EXPECT_EQ(idr_header.h264().nalus_length, 1u); + EXPECT_THAT(idr_header.h264().nalus, SizeIs(1)); H264SpsPpsTracker::FixedBitstream fixed = tracker_.CopyAndFixBitstream(kData, &idr_header); - EXPECT_EQ(idr_header.h264().nalus_length, 3u); + EXPECT_THAT(idr_header.h264().nalus, SizeIs(3)); EXPECT_EQ(idr_header.width, 320u); EXPECT_EQ(idr_header.height, 240u); ExpectSpsPpsIdr(idr_header.h264(), 0, 0); diff --git a/modules/video_coding/h26x_packet_buffer.cc b/modules/video_coding/h26x_packet_buffer.cc new file mode 100644 index 0000000000..dc141e5a83 --- /dev/null +++ b/modules/video_coding/h26x_packet_buffer.cc @@ -0,0 +1,542 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/h26x_packet_buffer.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "api/array_view.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" +#include "common_video/h264/h264_common.h" +#include "common_video/h264/pps_parser.h" +#include "common_video/h264/sps_parser.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "modules/video_coding/h264_sprop_parameter_sets.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/sequence_number_util.h" +#ifdef RTC_ENABLE_H265 +#include "common_video/h265/h265_common.h" +#endif + +namespace webrtc { +namespace { + +int64_t EuclideanMod(int64_t n, int64_t div) { + RTC_DCHECK_GT(div, 0); + return (n %= div) < 0 ? n + div : n; +} + +bool IsFirstPacketOfFragment(const RTPVideoHeaderH264& h264_header) { + return !h264_header.nalus.empty(); +} + +bool BeginningOfIdr(const H26xPacketBuffer::Packet& packet) { + const auto& h264_header = + std::get(packet.video_header.video_type_header); + const bool contains_idr_nalu = + absl::c_any_of(h264_header.nalus, [](const auto& nalu_info) { + return nalu_info.type == H264::NaluType::kIdr; + }); + switch (h264_header.packetization_type) { + case kH264StapA: + case kH264SingleNalu: { + return contains_idr_nalu; + } + case kH264FuA: { + return contains_idr_nalu && IsFirstPacketOfFragment(h264_header); + } + } +} + +bool HasSps(const H26xPacketBuffer::Packet& packet) { + auto& h264_header = + std::get(packet.video_header.video_type_header); + return absl::c_any_of(h264_header.nalus, [](const auto& nalu_info) { + return nalu_info.type == H264::NaluType::kSps; + }); +} + +int64_t* GetContinuousSequence(ArrayView last_continuous, + int64_t unwrapped_seq_num) { + for (int64_t& last : last_continuous) { + if (unwrapped_seq_num - 1 == last) { + return &last; + } + } + return nullptr; +} + +#ifdef RTC_ENABLE_H265 +bool HasVps(const H26xPacketBuffer::Packet& packet) { + std::vector nalu_indices = + H265::FindNaluIndices(packet.video_payload); + return absl::c_any_of((nalu_indices), [&packet]( + const H265::NaluIndex& nalu_index) { + return H265::ParseNaluType( + packet.video_payload.cdata()[nalu_index.payload_start_offset]) == + H265::NaluType::kVps; + }); +} +#endif + +} // namespace + +H26xPacketBuffer::H26xPacketBuffer(bool h264_idr_only_keyframes_allowed) + : h264_idr_only_keyframes_allowed_(h264_idr_only_keyframes_allowed) { + last_continuous_in_sequence_.fill(std::numeric_limits::min()); +} + +H26xPacketBuffer::InsertResult H26xPacketBuffer::InsertPadding( + uint16_t unwrapped_seq_num) { + int64_t* last_continuous_unwrapped_seq_num = + GetContinuousSequence(last_continuous_in_sequence_, unwrapped_seq_num); + if (last_continuous_unwrapped_seq_num == nullptr) { + last_continuous_in_sequence_[last_continuous_in_sequence_index_] = + unwrapped_seq_num; + last_continuous_unwrapped_seq_num = + &last_continuous_in_sequence_[last_continuous_in_sequence_index_]; + last_continuous_in_sequence_index_ = + (last_continuous_in_sequence_index_ + 1) % + last_continuous_in_sequence_.size(); + } else { + *last_continuous_unwrapped_seq_num = unwrapped_seq_num; + } + return {}; +} + +H26xPacketBuffer::InsertResult H26xPacketBuffer::InsertPacket( + std::unique_ptr packet) { + RTC_DCHECK(packet->video_header.codec == kVideoCodecH264 || + packet->video_header.codec == kVideoCodecH265); + + InsertResult result; + + int64_t unwrapped_seq_num = packet->sequence_number; + auto& packet_slot = GetPacket(unwrapped_seq_num); + if (packet_slot != nullptr && + AheadOrAt(packet_slot->timestamp, packet->timestamp)) { + // The incoming `packet` is old or a duplicate. + return result; + } else { + packet_slot = std::move(packet); + } + + return FindFrames(unwrapped_seq_num); +} + +std::unique_ptr& H26xPacketBuffer::GetPacket( + int64_t unwrapped_seq_num) { + return buffer_[EuclideanMod(unwrapped_seq_num, kBufferSize)]; +} + +bool H26xPacketBuffer::BeginningOfStream( + const H26xPacketBuffer::Packet& packet) const { + if (packet.codec() == kVideoCodecH264) { + return HasSps(packet) || + (h264_idr_only_keyframes_allowed_ && BeginningOfIdr(packet)); +#ifdef RTC_ENABLE_H265 + } else if (packet.codec() == kVideoCodecH265) { + return HasVps(packet); +#endif + } + RTC_DCHECK_NOTREACHED(); + return false; +} + +H26xPacketBuffer::InsertResult H26xPacketBuffer::FindFrames( + int64_t unwrapped_seq_num) { + InsertResult result; + + Packet* packet = GetPacket(unwrapped_seq_num).get(); + RTC_CHECK(packet != nullptr); + + // Check if the packet is continuous or the beginning of a new coded video + // sequence. + int64_t* last_continuous_unwrapped_seq_num = + GetContinuousSequence(last_continuous_in_sequence_, unwrapped_seq_num); + if (last_continuous_unwrapped_seq_num == nullptr) { + if (!BeginningOfStream(*packet)) { + return result; + } + + last_continuous_in_sequence_[last_continuous_in_sequence_index_] = + unwrapped_seq_num; + last_continuous_unwrapped_seq_num = + &last_continuous_in_sequence_[last_continuous_in_sequence_index_]; + last_continuous_in_sequence_index_ = + (last_continuous_in_sequence_index_ + 1) % + last_continuous_in_sequence_.size(); + } + + for (int64_t seq_num = unwrapped_seq_num; + seq_num < unwrapped_seq_num + kBufferSize;) { + RTC_DCHECK_GE(seq_num, *last_continuous_unwrapped_seq_num); + + // Packets that were never assembled into a completed frame will stay in + // the 'buffer_'. Check that the `packet` sequence number match the expected + // unwrapped sequence number. + if (seq_num != packet->sequence_number) { + return result; + } + + *last_continuous_unwrapped_seq_num = seq_num; + // Last packet of the frame, try to assemble the frame. + if (packet->marker_bit) { + uint32_t rtp_timestamp = packet->timestamp; + + // Iterate backwards to find where the frame starts. + for (int64_t seq_num_start = seq_num; + seq_num_start > seq_num - kBufferSize; --seq_num_start) { + auto& prev_packet = GetPacket(seq_num_start - 1); + + if (prev_packet == nullptr || prev_packet->timestamp != rtp_timestamp) { + if (MaybeAssembleFrame(seq_num_start, seq_num, result)) { + // Frame was assembled, continue to look for more frames. + break; + } else { + // Frame was not assembled, no subsequent frame will be continuous. + return result; + } + } + } + } + + seq_num++; + packet = GetPacket(seq_num).get(); + if (packet == nullptr) { + return result; + } + } + + return result; +} + +bool H26xPacketBuffer::MaybeAssembleFrame(int64_t start_seq_num_unwrapped, + int64_t end_sequence_number_unwrapped, + InsertResult& result) { +#ifdef RTC_ENABLE_H265 + bool has_vps = false; +#endif + bool has_sps = false; + bool has_pps = false; + // Includes IDR, CRA and BLA for HEVC. + bool has_idr = false; + + int width = -1; + int height = -1; + + for (int64_t seq_num = start_seq_num_unwrapped; + seq_num <= end_sequence_number_unwrapped; ++seq_num) { + const auto& packet = GetPacket(seq_num); + if (packet->codec() == kVideoCodecH264) { + const auto& h264_header = + std::get(packet->video_header.video_type_header); + for (const auto& nalu : h264_header.nalus) { + has_idr |= nalu.type == H264::NaluType::kIdr; + has_sps |= nalu.type == H264::NaluType::kSps; + has_pps |= nalu.type == H264::NaluType::kPps; + } + if (has_idr) { + if (!h264_idr_only_keyframes_allowed_ && (!has_sps || !has_pps)) { + return false; + } + } +#ifdef RTC_ENABLE_H265 + } else if (packet->codec() == kVideoCodecH265) { + std::vector nalu_indices = + H265::FindNaluIndices(packet->video_payload); + for (const auto& nalu_index : nalu_indices) { + uint8_t nalu_type = H265::ParseNaluType( + packet->video_payload.cdata()[nalu_index.payload_start_offset]); + has_idr |= (nalu_type >= H265::NaluType::kBlaWLp && + nalu_type <= H265::NaluType::kRsvIrapVcl23); + has_vps |= nalu_type == H265::NaluType::kVps; + has_sps |= nalu_type == H265::NaluType::kSps; + has_pps |= nalu_type == H265::NaluType::kPps; + } + if (has_idr) { + if (!has_vps || !has_sps || !has_pps) { + return false; + } + } +#endif // RTC_ENABLE_H265 + } + + width = std::max(packet->video_header.width, width); + height = std::max(packet->video_header.height, height); + } + + for (int64_t seq_num = start_seq_num_unwrapped; + seq_num <= end_sequence_number_unwrapped; ++seq_num) { + auto& packet = GetPacket(seq_num); + + packet->video_header.is_first_packet_in_frame = + (seq_num == start_seq_num_unwrapped); + packet->video_header.is_last_packet_in_frame = + (seq_num == end_sequence_number_unwrapped); + + if (packet->video_header.is_first_packet_in_frame) { + if (width > 0 && height > 0) { + packet->video_header.width = width; + packet->video_header.height = height; + } + + packet->video_header.frame_type = has_idr + ? VideoFrameType::kVideoFrameKey + : VideoFrameType::kVideoFrameDelta; + } + + // Only applies to H.264 because start code is inserted by depacktizer for + // H.265 and out-of-band parameter sets is not supported by H.265. + if (packet->codec() == kVideoCodecH264) { + if (!FixH264Packet(*packet)) { + // The buffer is not cleared actually, but a key frame request is + // needed. + result.buffer_cleared = true; + return false; + } + } + + result.packets.push_back(std::move(packet)); + } + + return true; +} + +void H26xPacketBuffer::SetSpropParameterSets( + const std::string& sprop_parameter_sets) { + if (!h264_idr_only_keyframes_allowed_) { + RTC_LOG(LS_WARNING) << "Ignore sprop parameter sets because IDR only " + "keyframe is not allowed."; + return; + } + H264SpropParameterSets sprop_decoder; + if (!sprop_decoder.DecodeSprop(sprop_parameter_sets)) { + return; + } + InsertSpsPpsNalus(sprop_decoder.sps_nalu(), sprop_decoder.pps_nalu()); +} + +void H26xPacketBuffer::InsertSpsPpsNalus(const std::vector& sps, + const std::vector& pps) { + RTC_CHECK(h264_idr_only_keyframes_allowed_); + constexpr size_t kNaluHeaderOffset = 1; + if (sps.size() < kNaluHeaderOffset) { + RTC_LOG(LS_WARNING) << "SPS size " << sps.size() << " is smaller than " + << kNaluHeaderOffset; + return; + } + if ((sps[0] & 0x1f) != H264::NaluType::kSps) { + RTC_LOG(LS_WARNING) << "SPS Nalu header missing"; + return; + } + if (pps.size() < kNaluHeaderOffset) { + RTC_LOG(LS_WARNING) << "PPS size " << pps.size() << " is smaller than " + << kNaluHeaderOffset; + return; + } + if ((pps[0] & 0x1f) != H264::NaluType::kPps) { + RTC_LOG(LS_WARNING) << "SPS Nalu header missing"; + return; + } + std::optional parsed_sps = SpsParser::ParseSps( + ArrayView(sps).subview(kNaluHeaderOffset)); + std::optional parsed_pps = PpsParser::ParsePps( + ArrayView(pps).subview(kNaluHeaderOffset)); + + if (!parsed_sps) { + RTC_LOG(LS_WARNING) << "Failed to parse SPS."; + } + + if (!parsed_pps) { + RTC_LOG(LS_WARNING) << "Failed to parse PPS."; + } + + if (!parsed_pps || !parsed_sps) { + return; + } + + SpsInfo sps_info; + sps_info.size = sps.size(); + sps_info.width = parsed_sps->width; + sps_info.height = parsed_sps->height; + uint8_t* sps_data = new uint8_t[sps_info.size]; + memcpy(sps_data, sps.data(), sps_info.size); + sps_info.payload.reset(sps_data); + sps_data_[parsed_sps->id] = std::move(sps_info); + + PpsInfo pps_info; + pps_info.size = pps.size(); + pps_info.sps_id = parsed_pps->sps_id; + uint8_t* pps_data = new uint8_t[pps_info.size]; + memcpy(pps_data, pps.data(), pps_info.size); + pps_info.payload.reset(pps_data); + pps_data_[parsed_pps->id] = std::move(pps_info); + + RTC_LOG(LS_INFO) << "Inserted SPS id " << parsed_sps->id << " and PPS id " + << parsed_pps->id << " (referencing SPS " + << parsed_pps->sps_id << ")"; +} + +// TODO(bugs.webrtc.org/13157): Update the H264 depacketizer so we don't have to +// fiddle with the payload at this point. +bool H26xPacketBuffer::FixH264Packet(Packet& packet) { + constexpr uint8_t kStartCode[] = {0, 0, 0, 1}; + + RTPVideoHeader& video_header = packet.video_header; + RTPVideoHeaderH264& h264_header = + std::get(video_header.video_type_header); + + CopyOnWriteBuffer result; + + if (h264_idr_only_keyframes_allowed_) { + // Check if sps and pps insertion is needed. + bool prepend_sps_pps = false; + auto sps = sps_data_.end(); + auto pps = pps_data_.end(); + + for (const NaluInfo& nalu : h264_header.nalus) { + switch (nalu.type) { + case H264::NaluType::kSps: { + SpsInfo& sps_info = sps_data_[nalu.sps_id]; + sps_info.width = video_header.width; + sps_info.height = video_header.height; + break; + } + case H264::NaluType::kPps: { + pps_data_[nalu.pps_id].sps_id = nalu.sps_id; + break; + } + case H264::NaluType::kIdr: { + // If this is the first packet of an IDR, make sure we have the + // required SPS/PPS and also calculate how much extra space we need + // in the buffer to prepend the SPS/PPS to the bitstream with start + // codes. + if (video_header.is_first_packet_in_frame) { + if (nalu.pps_id == -1) { + RTC_LOG(LS_WARNING) << "No PPS id in IDR nalu."; + return false; + } + + pps = pps_data_.find(nalu.pps_id); + if (pps == pps_data_.end()) { + RTC_LOG(LS_WARNING) + << "No PPS with id << " << nalu.pps_id << " received"; + return false; + } + + sps = sps_data_.find(pps->second.sps_id); + if (sps == sps_data_.end()) { + RTC_LOG(LS_WARNING) + << "No SPS with id << " << pps->second.sps_id << " received"; + return false; + } + + // Since the first packet of every keyframe should have its width + // and height set we set it here in the case of it being supplied + // out of band. + video_header.width = sps->second.width; + video_header.height = sps->second.height; + + // If the SPS/PPS was supplied out of band then we will have saved + // the actual bitstream in `data`. + if (sps->second.payload && pps->second.payload) { + RTC_DCHECK_GT(sps->second.size, 0); + RTC_DCHECK_GT(pps->second.size, 0); + prepend_sps_pps = true; + } + } + break; + } + default: + break; + } + } + + RTC_CHECK(!prepend_sps_pps || + (sps != sps_data_.end() && pps != pps_data_.end())); + + // Insert SPS and PPS if they are missing. + if (prepend_sps_pps) { + // Insert SPS. + result.AppendData(kStartCode); + result.AppendData(sps->second.payload.get(), sps->second.size); + + // Insert PPS. + result.AppendData(kStartCode); + result.AppendData(pps->second.payload.get(), pps->second.size); + + // Update codec header to reflect the newly added SPS and PPS. + h264_header.nalus.push_back( + {.type = H264::NaluType::kSps, .sps_id = sps->first, .pps_id = -1}); + h264_header.nalus.push_back({.type = H264::NaluType::kPps, + .sps_id = sps->first, + .pps_id = pps->first}); + } + } + + // Insert start code. + switch (h264_header.packetization_type) { + case kH264StapA: { + const uint8_t* payload_end = + packet.video_payload.data() + packet.video_payload.size(); + const uint8_t* nalu_ptr = packet.video_payload.data() + 1; + while (nalu_ptr < payload_end - 1) { + // The first two bytes describe the length of the segment, where a + // segment is the nalu type plus nalu payload. + uint16_t segment_length = nalu_ptr[0] << 8 | nalu_ptr[1]; + nalu_ptr += 2; + + if (nalu_ptr + segment_length <= payload_end) { + result.AppendData(kStartCode); + result.AppendData(nalu_ptr, segment_length); + } + nalu_ptr += segment_length; + } + packet.video_payload = result; + return true; + } + + case kH264FuA: { + if (IsFirstPacketOfFragment(h264_header)) { + result.AppendData(kStartCode); + } + result.AppendData(packet.video_payload); + packet.video_payload = result; + return true; + } + + case kH264SingleNalu: { + result.AppendData(kStartCode); + result.AppendData(packet.video_payload); + packet.video_payload = result; + return true; + } + } + + RTC_DCHECK_NOTREACHED(); + return false; +} + +} // namespace webrtc diff --git a/modules/video_coding/h26x_packet_buffer.h b/modules/video_coding/h26x_packet_buffer.h new file mode 100644 index 0000000000..7d3b959580 --- /dev/null +++ b/modules/video_coding/h26x_packet_buffer.h @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_H26X_PACKET_BUFFER_H_ +#define MODULES_VIDEO_CODING_H26X_PACKET_BUFFER_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/base/attributes.h" +#include "modules/video_coding/packet_buffer.h" + +namespace webrtc { + +class H26xPacketBuffer { + public: + // The H26xPacketBuffer does the same job as the PacketBuffer but for H264 and + // H265 only. To make it fit in with surronding code the PacketBuffer + // input/output classes are used. + using Packet = video_coding::PacketBuffer::Packet; + using InsertResult = video_coding::PacketBuffer::InsertResult; + + // |h264_idr_only_keyframes_allowed| is ignored if H.265 is used. + explicit H26xPacketBuffer(bool h264_idr_only_keyframes_allowed); + + ABSL_MUST_USE_RESULT InsertResult + InsertPacket(std::unique_ptr packet); + ABSL_MUST_USE_RESULT InsertResult InsertPadding(uint16_t unwrapped_seq_num); + + // Out of band supplied codec parameters for H.264. + void SetSpropParameterSets(const std::string& sprop_parameter_sets); + + private: + // Stores PPS payload and the active SPS ID. + struct PpsInfo { + PpsInfo() = default; + PpsInfo(PpsInfo&& rhs) = default; + PpsInfo& operator=(PpsInfo&& rhs) = default; + ~PpsInfo() = default; + + // The value of sps_seq_parameter_set_id for the active SPS. + uint32_t sps_id = 0; + // Payload size. + size_t size = 0; + std::unique_ptr payload; + }; + + // Stores SPS payload and picture size. + struct SpsInfo { + SpsInfo() = default; + SpsInfo(SpsInfo&& rhs) = default; + SpsInfo& operator=(SpsInfo&& rhs) = default; + ~SpsInfo() = default; + + // The width and height of decoded pictures. + int width = -1; + int height = -1; + // Payload size. + size_t size = 0; + std::unique_ptr payload; + }; + + static constexpr int kBufferSize = 2048; + static constexpr int kNumTrackedSequences = 5; + + std::unique_ptr& GetPacket(int64_t unwrapped_seq_num); + bool BeginningOfStream(const Packet& packet) const; + InsertResult FindFrames(int64_t unwrapped_seq_num); + bool MaybeAssembleFrame(int64_t start_seq_num_unwrapped, + int64_t end_sequence_number_unwrapped, + InsertResult& result); + // Store SPS and PPS nalus. They will be used later when an IDR frame is + // received without SPS/PPS. + void InsertSpsPpsNalus(const std::vector& sps, + const std::vector& pps); + // Insert start code and paramter sets for H.264 payload, also update header + // if parameter sets are inserted. Return false if required SPS or PPS is not + // found. + bool FixH264Packet(Packet& packet); + + // Indicates whether IDR frames without SPS and PPS are allowed. + const bool h264_idr_only_keyframes_allowed_; + std::array, kBufferSize> buffer_; + std::array last_continuous_in_sequence_; + int64_t last_continuous_in_sequence_index_ = 0; + + // Map from pps_pic_parameter_set_id to the PPS payload associated with this + // ID. + std::map pps_data_; + // Map from sps_video_parameter_set_id to the SPS payload associated with this + // ID. + std::map sps_data_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_H26X_PACKET_BUFFER_H_ diff --git a/modules/video_coding/h26x_packet_buffer_unittest.cc b/modules/video_coding/h26x_packet_buffer_unittest.cc new file mode 100644 index 0000000000..c07de1f0a1 --- /dev/null +++ b/modules/video_coding/h26x_packet_buffer_unittest.cc @@ -0,0 +1,1410 @@ +/* + * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/h26x_packet_buffer.h" + +#include +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/video/render_resolution.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" +#include "common_video/h264/h264_common.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/system/unused.h" +#include "test/gmock.h" +#include "test/gtest.h" +#ifdef RTC_ENABLE_H265 +#include "common_video/h265/h265_common.h" +#endif + +namespace webrtc { +namespace { + +using ::testing::ElementsAreArray; +using ::testing::Eq; +using ::testing::IsEmpty; +using ::testing::SizeIs; + +using H264::NaluType::kAud; +using H264::NaluType::kFuA; +using H264::NaluType::kIdr; +using H264::NaluType::kPps; +using H264::NaluType::kSlice; +using H264::NaluType::kSps; +using H264::NaluType::kStapA; + +constexpr int kBufferSize = 2048; +// Example sprop string from https://tools.ietf.org/html/rfc3984. +const char kExampleSpropString[] = "Z0IACpZTBYmI,aMljiA=="; +static const std::vector kExampleSpropRawSps{ + 0x67, 0x42, 0x00, 0x0A, 0x96, 0x53, 0x05, 0x89, 0x88}; +static const std::vector kExampleSpropRawPps{0x68, 0xC9, 0x63, 0x88}; + +std::vector StartCode() { + return {0, 0, 0, 1}; +} + +NaluInfo MakeNaluInfo(uint8_t type) { + NaluInfo res; + res.type = type; + res.sps_id = -1; + res.pps_id = -1; + return res; +} + +class H264Packet { + public: + explicit H264Packet(H264PacketizationTypes type); + + H264Packet& Idr(std::vector payload = {9, 9, 9}, int pps_id = -1); + H264Packet& Slice(std::vector payload = {9, 9, 9}); + H264Packet& Sps(std::vector payload = {9, 9, 9}, int sps_id = -1); + H264Packet& SpsWithResolution(RenderResolution resolution, + std::vector payload = {9, 9, 9}); + H264Packet& Pps(std::vector payload = {9, 9, 9}, + int pps_id = -1, + int sps_id = -1); + H264Packet& Aud(); + H264Packet& Marker(); + H264Packet& AsFirstFragment(); + H264Packet& AsFirstPacket(); + H264Packet& Time(uint32_t rtp_timestamp); + H264Packet& SeqNum(int64_t rtp_seq_num); + + std::unique_ptr Build(); + + private: + CopyOnWriteBuffer BuildFuaPayload() const; + CopyOnWriteBuffer BuildSingleNaluPayload() const; + CopyOnWriteBuffer BuildStapAPayload() const; + + RTPVideoHeaderH264& H264Header() { + return std::get(video_header_.video_type_header); + } + const RTPVideoHeaderH264& H264Header() const { + return std::get(video_header_.video_type_header); + } + + H264PacketizationTypes type_; + RTPVideoHeader video_header_; + bool first_fragment_ = false; + bool first_packet_ = false; + bool marker_bit_ = false; + uint32_t rtp_timestamp_ = 0; + int64_t rtp_seq_num_ = 0; + std::vector> nalu_payloads_; +}; + +H264Packet::H264Packet(H264PacketizationTypes type) : type_(type) { + video_header_.video_type_header.emplace(); +} + +H264Packet& H264Packet::Idr(std::vector payload, int pps_id) { + auto& h264_header = H264Header(); + auto nalu_info = MakeNaluInfo(kIdr); + nalu_info.pps_id = pps_id; + h264_header.nalus.push_back(nalu_info); + nalu_payloads_.push_back(std::move(payload)); + return *this; +} + +H264Packet& H264Packet::Slice(std::vector payload) { + auto& h264_header = H264Header(); + h264_header.nalus.push_back(MakeNaluInfo(kSlice)); + nalu_payloads_.push_back(std::move(payload)); + return *this; +} + +H264Packet& H264Packet::Sps(std::vector payload, int sps_id) { + auto& h264_header = H264Header(); + auto nalu_info = MakeNaluInfo(kSps); + nalu_info.pps_id = sps_id; + h264_header.nalus.push_back(nalu_info); + nalu_payloads_.push_back(std::move(payload)); + return *this; +} + +H264Packet& H264Packet::SpsWithResolution(RenderResolution resolution, + std::vector payload) { + auto& h264_header = H264Header(); + h264_header.nalus.push_back(MakeNaluInfo(kSps)); + video_header_.width = resolution.Width(); + video_header_.height = resolution.Height(); + nalu_payloads_.push_back(std::move(payload)); + return *this; +} + +H264Packet& H264Packet::Pps(std::vector payload, + int pps_id, + int sps_id) { + auto& h264_header = H264Header(); + auto nalu_info = MakeNaluInfo(kPps); + nalu_info.pps_id = pps_id; + nalu_info.sps_id = sps_id; + h264_header.nalus.push_back(nalu_info); + nalu_payloads_.push_back(std::move(payload)); + return *this; +} + +H264Packet& H264Packet::Aud() { + auto& h264_header = H264Header(); + h264_header.nalus.push_back(MakeNaluInfo(kAud)); + nalu_payloads_.push_back({}); + return *this; +} + +H264Packet& H264Packet::Marker() { + marker_bit_ = true; + return *this; +} + +H264Packet& H264Packet::AsFirstFragment() { + first_fragment_ = true; + return *this; +} + +H264Packet& H264Packet::AsFirstPacket() { + first_packet_ = true; + return *this; +} + +H264Packet& H264Packet::Time(uint32_t rtp_timestamp) { + rtp_timestamp_ = rtp_timestamp; + return *this; +} + +H264Packet& H264Packet::SeqNum(int64_t rtp_seq_num) { + rtp_seq_num_ = rtp_seq_num; + return *this; +} + +std::unique_ptr H264Packet::Build() { + auto res = std::make_unique(); + + auto& h264_header = H264Header(); + switch (type_) { + case kH264FuA: { + RTC_CHECK_EQ(h264_header.nalus.size(), 1); + res->video_payload = BuildFuaPayload(); + break; + } + case kH264SingleNalu: { + RTC_CHECK_EQ(h264_header.nalus.size(), 1); + res->video_payload = BuildSingleNaluPayload(); + break; + } + case kH264StapA: { + RTC_CHECK_GT(h264_header.nalus.size(), 1); + res->video_payload = BuildStapAPayload(); + break; + } + } + + if (type_ == kH264FuA && !first_fragment_) { + h264_header.nalus.clear(); + } + + h264_header.packetization_type = type_; + res->marker_bit = marker_bit_; + res->video_header = video_header_; + res->timestamp = rtp_timestamp_; + res->sequence_number = rtp_seq_num_; + res->video_header.codec = kVideoCodecH264; + res->video_header.is_first_packet_in_frame = first_packet_; + + return res; +} + +CopyOnWriteBuffer H264Packet::BuildFuaPayload() const { + return CopyOnWriteBuffer(nalu_payloads_[0]); +} + +CopyOnWriteBuffer H264Packet::BuildSingleNaluPayload() const { + CopyOnWriteBuffer res; + auto& h264_header = H264Header(); + res.AppendData(&h264_header.nalus[0].type, 1); + res.AppendData(nalu_payloads_[0]); + return res; +} + +CopyOnWriteBuffer H264Packet::BuildStapAPayload() const { + CopyOnWriteBuffer res; + + const uint8_t indicator = H264::NaluType::kStapA; + res.AppendData(&indicator, 1); + + auto& h264_header = H264Header(); + for (size_t i = 0; i < h264_header.nalus.size(); ++i) { + // The two first bytes indicates the nalu segment size. + uint8_t length_as_array[2] = { + 0, static_cast(nalu_payloads_[i].size() + 1)}; + res.AppendData(length_as_array); + + res.AppendData(&h264_header.nalus[i].type, 1); + res.AppendData(nalu_payloads_[i]); + } + return res; +} + +#ifdef RTC_ENABLE_H265 +class H265Packet { + public: + H265Packet() = default; + + H265Packet& Idr(std::vector payload = {9, 9, 9}); + H265Packet& Slice(H265::NaluType type, + std::vector payload = {9, 9, 9}); + H265Packet& Vps(std::vector payload = {9, 9, 9}); + H265Packet& Sps(std::vector payload = {9, 9, 9}); + H265Packet& SpsWithResolution(RenderResolution resolution, + std::vector payload = {9, 9, 9}); + H265Packet& Pps(std::vector payload = {9, 9, 9}); + H265Packet& Aud(); + H265Packet& Marker(); + H265Packet& AsFirstFragment(); + H265Packet& AsFirstPacket(); + H265Packet& Time(uint32_t rtp_timestamp); + H265Packet& SeqNum(int64_t rtp_seq_num); + + std::unique_ptr Build(); + + private: + H265Packet& StartCode(); + + RTPVideoHeader video_header_; + bool first_fragment_ = false; + bool first_packet_ = false; + bool marker_bit_ = false; + uint32_t rtp_timestamp_ = 0; + uint16_t rtp_seq_num_ = 0; + std::vector> nalu_payloads_; +}; + +H265Packet& H265Packet::Idr(std::vector payload) { + return Slice(H265::NaluType::kIdrNLp, std::move(payload)); +} + +H265Packet& H265Packet::Slice(H265::NaluType type, + std::vector payload) { + StartCode(); + // Nalu header. Assume layer ID is 0 and TID is 2. + nalu_payloads_.push_back({static_cast(type << 1), 0x02}); + nalu_payloads_.push_back(std::move(payload)); + return *this; +} + +H265Packet& H265Packet::Vps(std::vector payload) { + return Slice(H265::NaluType::kVps, std::move(payload)); +} + +H265Packet& H265Packet::Sps(std::vector payload) { + return Slice(H265::NaluType::kSps, std::move(payload)); +} + +H265Packet& H265Packet::SpsWithResolution(RenderResolution resolution, + std::vector payload) { + video_header_.width = resolution.Width(); + video_header_.height = resolution.Height(); + return Sps(std::move(payload)); +} + +H265Packet& H265Packet::Pps(std::vector payload) { + return Slice(H265::NaluType::kPps, std::move(payload)); +} + +H265Packet& H265Packet::Aud() { + return Slice(H265::NaluType::kAud, {}); +} + +H265Packet& H265Packet::Marker() { + marker_bit_ = true; + return *this; +} + +H265Packet& H265Packet::StartCode() { + nalu_payloads_.push_back({0x00, 0x00, 0x00, 0x01}); + return *this; +} + +std::unique_ptr H265Packet::Build() { + auto res = std::make_unique(); + res->marker_bit = marker_bit_; + res->video_header = video_header_; + res->timestamp = rtp_timestamp_; + res->sequence_number = rtp_seq_num_; + res->video_header.codec = kVideoCodecH265; + res->video_payload = CopyOnWriteBuffer(); + res->video_header.is_first_packet_in_frame = first_packet_; + for (const auto& payload : nalu_payloads_) { + res->video_payload.AppendData(payload); + } + + return res; +} + +H265Packet& H265Packet::AsFirstFragment() { + first_fragment_ = true; + return *this; +} + +H265Packet& H265Packet::AsFirstPacket() { + first_packet_ = true; + return *this; +} + +H265Packet& H265Packet::Time(uint32_t rtp_timestamp) { + rtp_timestamp_ = rtp_timestamp; + return *this; +} + +H265Packet& H265Packet::SeqNum(int64_t rtp_seq_num) { + rtp_seq_num_ = rtp_seq_num; + return *this; +} +#endif + +ArrayView PacketPayload( + const std::unique_ptr& packet) { + return packet->video_payload; +} + +std::vector FlatVector( + const std::vector>& elems) { + std::vector res; + for (const auto& elem : elems) { + res.insert(res.end(), elem.begin(), elem.end()); + } + return res; +} + +TEST(H26xPacketBufferTest, IdrOnlyKeyframeWithSprop) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/true); + packet_buffer.SetSpropParameterSets(kExampleSpropString); + + auto packets = packet_buffer + .InsertPacket(H264Packet(kH264SingleNalu) + .Idr({1, 2, 3}, 0) + .AsFirstPacket() + .Marker() + .Build()) + .packets; + EXPECT_THAT(packets, SizeIs(1)); + EXPECT_THAT(PacketPayload(packets[0]), + ElementsAreArray(FlatVector({StartCode(), + kExampleSpropRawSps, + StartCode(), + kExampleSpropRawPps, + StartCode(), + {kIdr, 1, 2, 3}}))); +} + +TEST(H26xPacketBufferTest, IdrOnlyKeyframeWithoutSprop) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/true); + + // Cannot fix biststream by prepending SPS and PPS because no sprop string is + // available. Request a key frame. + EXPECT_TRUE(packet_buffer + .InsertPacket(H264Packet(kH264SingleNalu) + .Idr({9, 9, 9}, 0) + .AsFirstPacket() + .Marker() + .Build()) + .buffer_cleared); +} + +TEST(H26xPacketBufferTest, IdrOnlyKeyframeWithSpropAndUnknownPpsId) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/true); + packet_buffer.SetSpropParameterSets(kExampleSpropString); + + // Cannot fix biststream because sprop string doesn't contain a PPS with given + // ID. Request a key frame. + EXPECT_TRUE(packet_buffer + .InsertPacket(H264Packet(kH264SingleNalu) + .Idr({9, 9, 9}, 1) + .AsFirstPacket() + .Marker() + .Build()) + .buffer_cleared); +} + +TEST(H26xPacketBufferTest, IdrOnlyKeyframeInTheMiddle) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/true); + packet_buffer.SetSpropParameterSets(kExampleSpropString); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264SingleNalu) + .Sps({1, 2, 3}, 1) + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Build())); + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264SingleNalu) + .Pps({4, 5, 6}, 1, 1) + .SeqNum(1) + .Time(0) + .AsFirstPacket() + .Build())); + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264SingleNalu) + .Idr({7, 8, 9}, 1) + .SeqNum(2) + .Time(0) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(3)); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264SingleNalu) + .Slice() + .SeqNum(3) + .Time(1) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(1)); + + auto packets = packet_buffer + .InsertPacket(H264Packet(kH264SingleNalu) + .Idr({10, 11, 12}, 0) + .SeqNum(4) + .Time(2) + .AsFirstPacket() + .Marker() + .Build()) + .packets; + EXPECT_THAT(packets, SizeIs(1)); + EXPECT_THAT(PacketPayload(packets[0]), + ElementsAreArray(FlatVector({StartCode(), + kExampleSpropRawSps, + StartCode(), + kExampleSpropRawPps, + StartCode(), + {kIdr, 10, 11, 12}}))); +} + +TEST(H26xPacketBufferTest, IdrIsNotKeyframe) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264SingleNalu) + .Idr() + .AsFirstPacket() + .Marker() + .Build()) + .packets, + IsEmpty()); +} + +TEST(H26xPacketBufferTest, IdrIsKeyframeFuaRequiresFirstFragmet) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/true); + packet_buffer.SetSpropParameterSets(kExampleSpropString); + + // Not marked as the first fragment + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264FuA) + .Idr() + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Build()) + .packets, + IsEmpty()); + + EXPECT_THAT( + packet_buffer + .InsertPacket( + H264Packet(kH264FuA).Idr().SeqNum(1).Time(0).Marker().Build()) + .packets, + IsEmpty()); + + // Marked as first fragment + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264FuA) + .Idr({9, 9, 9}, 0) + .SeqNum(2) + .Time(1) + .AsFirstFragment() + .AsFirstPacket() + .Build()) + .packets, + IsEmpty()); + + EXPECT_THAT( + packet_buffer + .InsertPacket( + H264Packet(kH264FuA).Idr().SeqNum(3).Time(1).Marker().Build()) + .packets, + SizeIs(2)); +} + +TEST(H26xPacketBufferTest, SpsPpsIdrIsKeyframeSingleNalus) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264SingleNalu) + .Sps() + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Build())); + RTC_UNUSED(packet_buffer.InsertPacket( + H264Packet(kH264SingleNalu).Pps().SeqNum(1).Time(0).Build())); + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264SingleNalu) + .Idr() + .SeqNum(2) + .Time(0) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(3)); +} + +TEST(H26xPacketBufferTest, SpsPpsIdrIsKeyframeIgnoresSprop) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + // When h264_allow_idr_only_keyframes is false, sprop string should be + // ignored. Use in band parameter sets. + packet_buffer.SetSpropParameterSets(kExampleSpropString); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264SingleNalu) + .Sps({1, 2, 3}, 0) + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Build())); + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264SingleNalu) + .Pps({4, 5, 6}, 0, 0) + .SeqNum(1) + .Time(0) + .AsFirstPacket() + .Build())); + auto packets = packet_buffer + .InsertPacket(H264Packet(kH264SingleNalu) + .Idr({7, 8, 9}, 0) + .SeqNum(2) + .Time(0) + .AsFirstPacket() + .Marker() + .Build()) + .packets; + EXPECT_THAT(packets, SizeIs(3)); + EXPECT_THAT(PacketPayload(packets[0]), + ElementsAreArray(FlatVector({StartCode(), {kSps, 1, 2, 3}}))); + EXPECT_THAT(PacketPayload(packets[1]), + ElementsAreArray(FlatVector({StartCode(), {kPps, 4, 5, 6}}))); + EXPECT_THAT(PacketPayload(packets[2]), + ElementsAreArray(FlatVector({StartCode(), {kIdr, 7, 8, 9}}))); +} + +TEST(H26xPacketBufferTest, PpsIdrIsNotKeyframeSingleNalus) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264SingleNalu) + .Pps() + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Build())); + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264SingleNalu) + .Idr() + .SeqNum(1) + .Time(0) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + IsEmpty()); +} + +TEST(H26xPacketBufferTest, SpsIdrIsNotKeyframeSingleNalus) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264SingleNalu) + .Sps() + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Build())); + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264SingleNalu) + .Idr() + .SeqNum(1) + .Time(0) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + IsEmpty()); +} + +TEST(H26xPacketBufferTest, SpsPpsIdrIsKeyframeStapA) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .Idr() + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(1)); +} + +TEST(H26xPacketBufferTest, PpsIdrIsNotKeyframeStapA) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Pps() + .Idr() + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + IsEmpty()); +} + +TEST(H26xPacketBufferTest, SpsIdrIsNotKeyframeStapA) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps() + .Idr() + .SeqNum(2) + .Time(2) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + IsEmpty()); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .Idr() + .SeqNum(3) + .Time(3) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(1)); +} + +TEST(H26xPacketBufferTest, InsertingSpsPpsLastCompletesKeyframe) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264SingleNalu) + .Idr() + .SeqNum(2) + .Time(1) + .AsFirstPacket() + .Marker() + .Build())); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .SeqNum(1) + .Time(1) + .AsFirstPacket() + .Build()) + .packets, + SizeIs(2)); +} + +TEST(H26xPacketBufferTest, InsertingMidFuaCompletesFrame) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .Idr() + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(1)); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264FuA) + .Slice() + .SeqNum(1) + .Time(1) + .AsFirstFragment() + .AsFirstPacket() + .Build())); + RTC_UNUSED(packet_buffer.InsertPacket( + H264Packet(kH264FuA).Slice().SeqNum(3).Time(1).Marker().Build())); + EXPECT_THAT( + packet_buffer + .InsertPacket(H264Packet(kH264FuA).Slice().SeqNum(2).Time(1).Build()) + .packets, + SizeIs(3)); +} + +TEST(H26xPacketBufferTest, SeqNumJumpDoesNotCompleteFrame) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .Idr() + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(1)); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264FuA) + .Slice() + .SeqNum(1) + .Time(1) + .AsFirstPacket() + .Build()) + .packets, + IsEmpty()); + + // Add `kBufferSize` to make the index of the sequence number wrap and end up + // where the packet with sequence number 2 would have ended up. + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264FuA) + .Slice() + .SeqNum(2 + kBufferSize) + .Time(3) + .Marker() + .Build()) + .packets, + IsEmpty()); +} + +TEST(H26xPacketBufferTest, OldFramesAreNotCompletedAfterBufferWrap) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264SingleNalu) + .Slice() + .SeqNum(1) + .Time(1) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + IsEmpty()); + + // New keyframe, preceedes packet with sequence number 1 in the buffer. + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .Idr() + .SeqNum(kBufferSize) + .Time(kBufferSize) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(1)); +} + +TEST(H26xPacketBufferTest, OldPacketsDontBlockNewPackets) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .Idr() + .SeqNum(kBufferSize) + .Time(kBufferSize) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(1)); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264FuA) + .Slice() + .SeqNum(kBufferSize + 1) + .Time(kBufferSize + 1) + .AsFirstFragment() + .AsFirstPacket() + .Build())); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264FuA) + .Slice() + .SeqNum(kBufferSize + 3) + .Time(kBufferSize + 1) + .Marker() + .Build())); + EXPECT_THAT( + packet_buffer + .InsertPacket(H264Packet(kH264FuA).Slice().SeqNum(2).Time(2).Build()) + .packets, + IsEmpty()); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264FuA) + .Slice() + .SeqNum(kBufferSize + 2) + .Time(kBufferSize + 1) + .Build()) + .packets, + SizeIs(3)); +} + +TEST(H26xPacketBufferTest, OldPacketDoesntCompleteFrame) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .Idr() + .SeqNum(kBufferSize) + .Time(kBufferSize) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(1)); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264FuA) + .Slice() + .SeqNum(kBufferSize + 3) + .Time(kBufferSize + 1) + .Marker() + .Build()) + .packets, + IsEmpty()); + + EXPECT_THAT( + packet_buffer + .InsertPacket( + H264Packet(kH264FuA).Slice().SeqNum(2).Time(2).Marker().Build()) + .packets, + IsEmpty()); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264FuA) + .Slice() + .SeqNum(kBufferSize + 1) + .Time(kBufferSize + 1) + .AsFirstFragment() + .AsFirstPacket() + .Build()) + .packets, + IsEmpty()); +} + +TEST(H26xPacketBufferTest, FrameBoundariesAreSet) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + auto key = packet_buffer.InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .Idr() + .SeqNum(1) + .Time(1) + .Marker() + .Build()); + + ASSERT_THAT(key.packets, SizeIs(1)); + EXPECT_TRUE(key.packets[0]->video_header.is_first_packet_in_frame); + EXPECT_TRUE(key.packets[0]->video_header.is_last_packet_in_frame); + + RTC_UNUSED(packet_buffer.InsertPacket( + H264Packet(kH264FuA).Slice().SeqNum(2).Time(2).Build())); + RTC_UNUSED(packet_buffer.InsertPacket( + H264Packet(kH264FuA).Slice().SeqNum(3).Time(2).Build())); + auto delta = packet_buffer.InsertPacket( + H264Packet(kH264FuA).Slice().SeqNum(4).Time(2).Marker().Build()); + + ASSERT_THAT(delta.packets, SizeIs(3)); + EXPECT_TRUE(delta.packets[0]->video_header.is_first_packet_in_frame); + EXPECT_FALSE(delta.packets[0]->video_header.is_last_packet_in_frame); + + EXPECT_FALSE(delta.packets[1]->video_header.is_first_packet_in_frame); + EXPECT_FALSE(delta.packets[1]->video_header.is_last_packet_in_frame); + + EXPECT_FALSE(delta.packets[2]->video_header.is_first_packet_in_frame); + EXPECT_TRUE(delta.packets[2]->video_header.is_last_packet_in_frame); +} + +TEST(H26xPacketBufferTest, ResolutionSetOnFirstPacket) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264SingleNalu) + .Aud() + .SeqNum(1) + .AsFirstPacket() + .Time(1) + .Build())); + auto res = packet_buffer.InsertPacket(H264Packet(kH264StapA) + .SpsWithResolution({320, 240}) + .Pps() + .Idr() + .SeqNum(2) + .AsFirstPacket() + .Time(1) + .Marker() + .Build()); + + ASSERT_THAT(res.packets, SizeIs(2)); + EXPECT_THAT(res.packets[0]->video_header.width, Eq(320)); + EXPECT_THAT(res.packets[0]->video_header.height, Eq(240)); +} + +TEST(H26xPacketBufferTest, KeyframeAndDeltaFrameSetOnFirstPacket) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264SingleNalu) + .Aud() + .SeqNum(1) + .AsFirstPacket() + .Time(1) + .Build())); + auto key = packet_buffer.InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .Idr() + .SeqNum(2) + .AsFirstPacket() + .Time(1) + .Marker() + .Build()); + + auto delta = packet_buffer.InsertPacket(H264Packet(kH264SingleNalu) + .Slice() + .SeqNum(3) + .Time(2) + .AsFirstPacket() + .Marker() + .Build()); + + ASSERT_THAT(key.packets, SizeIs(2)); + EXPECT_THAT(key.packets[0]->video_header.frame_type, + Eq(VideoFrameType::kVideoFrameKey)); + ASSERT_THAT(delta.packets, SizeIs(1)); + EXPECT_THAT(delta.packets[0]->video_header.frame_type, + Eq(VideoFrameType::kVideoFrameDelta)); +} + +TEST(H26xPacketBufferTest, RtpSeqNumWrap) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .SeqNum(0xffff) + .Time(0) + .AsFirstPacket() + .Build())); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264FuA) + .Idr() + .SeqNum(0x1'0000) + .Time(0) + .AsFirstPacket() + .Build())); + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264FuA) + .Idr() + .SeqNum(0x1'0001) + .Time(0) + .Marker() + .Build()) + .packets, + SizeIs(3)); +} + +TEST(H26xPacketBufferTest, StapAFixedBitstream) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + auto packets = packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps({1, 2, 3}) + .Pps({4, 5, 6}) + .Idr({7, 8, 9}) + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Marker() + .Build()) + .packets; + + ASSERT_THAT(packets, SizeIs(1)); + EXPECT_THAT(PacketPayload(packets[0]), + ElementsAreArray(FlatVector({StartCode(), + {kSps, 1, 2, 3}, + StartCode(), + {kPps, 4, 5, 6}, + StartCode(), + {kIdr, 7, 8, 9}}))); +} + +TEST(H26xPacketBufferTest, SingleNaluFixedBitstream) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264SingleNalu) + .Sps({1, 2, 3}) + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Build())); + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264SingleNalu) + .Pps({4, 5, 6}) + .SeqNum(1) + .Time(0) + .AsFirstPacket() + .Build())); + auto packets = packet_buffer + .InsertPacket(H264Packet(kH264SingleNalu) + .Idr({7, 8, 9}) + .SeqNum(2) + .Time(0) + .AsFirstPacket() + .Marker() + .Build()) + .packets; + + ASSERT_THAT(packets, SizeIs(3)); + EXPECT_THAT(PacketPayload(packets[0]), + ElementsAreArray(FlatVector({StartCode(), {kSps, 1, 2, 3}}))); + EXPECT_THAT(PacketPayload(packets[1]), + ElementsAreArray(FlatVector({StartCode(), {kPps, 4, 5, 6}}))); + EXPECT_THAT(PacketPayload(packets[2]), + ElementsAreArray(FlatVector({StartCode(), {kIdr, 7, 8, 9}}))); +} + +TEST(H26xPacketBufferTest, StapaAndFuaFixedBitstream) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264StapA) + .Sps({1, 2, 3}) + .Pps({4, 5, 6}) + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Build())); + RTC_UNUSED(packet_buffer.InsertPacket(H264Packet(kH264FuA) + .Idr({8, 8, 8}) + .SeqNum(1) + .Time(0) + .AsFirstFragment() + .AsFirstPacket() + .Build())); + auto packets = packet_buffer + .InsertPacket(H264Packet(kH264FuA) + .Idr({9, 9, 9}) + .SeqNum(2) + .Time(0) + .Marker() + .Build()) + .packets; + + ASSERT_THAT(packets, SizeIs(3)); + EXPECT_THAT( + PacketPayload(packets[0]), + ElementsAreArray(FlatVector( + {StartCode(), {kSps, 1, 2, 3}, StartCode(), {kPps, 4, 5, 6}}))); + EXPECT_THAT(PacketPayload(packets[1]), + ElementsAreArray(FlatVector({StartCode(), {8, 8, 8}}))); + // Third is a continuation of second, so only the payload is expected. + EXPECT_THAT(PacketPayload(packets[2]), + ElementsAreArray(FlatVector({{9, 9, 9}}))); +} + +TEST(H26xPacketBufferTest, FullPacketBufferDoesNotBlockKeyframe) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + for (int i = 0; i < kBufferSize; ++i) { + EXPECT_THAT( + packet_buffer + .InsertPacket( + H264Packet(kH264SingleNalu).Slice().SeqNum(i).Time(0).Build()) + .packets, + IsEmpty()); + } + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .Idr() + .SeqNum(kBufferSize) + .Time(1) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(1)); +} + +TEST(H26xPacketBufferTest, AssembleFrameAfterReordering) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .Idr() + .SeqNum(2) + .Time(2) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(1)); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264SingleNalu) + .Slice() + .SeqNum(1) + .Time(1) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + IsEmpty()); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .Idr() + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(2)); +} + +TEST(H26xPacketBufferTest, AssembleFrameAfterLoss) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .Idr() + .SeqNum(0) + .Time(0) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(1)); + + EXPECT_THAT(packet_buffer + .InsertPacket(H264Packet(kH264StapA) + .Sps() + .Pps() + .Idr() + .SeqNum(2) + .Time(2) + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(1)); +} + +#ifdef RTC_ENABLE_H265 +TEST(H26xPacketBufferTest, H265VpsSpsPpsIdrIsKeyframe) { + H26xPacketBuffer packet_buffer(/*allow_idr_only_keyframes=*/false); + + EXPECT_THAT(packet_buffer + .InsertPacket(H265Packet() + .Vps() + .Sps() + .Pps() + .Idr() + .AsFirstPacket() + .Marker() + .Build()) + .packets, + SizeIs(1)); +} + +TEST(H26xPacketBufferTest, H265IrapIsNotKeyframe) { + std::vector irap_types = { + H265::NaluType::kBlaWLp, H265::NaluType::kBlaWRadl, + H265::NaluType::kBlaNLp, H265::NaluType::kIdrWRadl, + H265::NaluType::kIdrNLp, H265::NaluType::kCra, + H265::NaluType::kRsvIrapVcl23}; + for (const H265::NaluType type : irap_types) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT( + packet_buffer + .InsertPacket( + H265Packet().Slice(type).AsFirstPacket().Marker().Build()) + .packets, + IsEmpty()); + } +} + +TEST(H26xPacketBufferTest, H265IdrIsNotKeyFrame) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT( + packet_buffer + .InsertPacket(H265Packet().Idr().AsFirstPacket().Marker().Build()) + .packets, + IsEmpty()); +} + +TEST(H26xPacketBufferTest, H265IdrIsNotKeyFrameEvenWithSprop) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/true); + packet_buffer.SetSpropParameterSets(kExampleSpropString); + + EXPECT_THAT( + packet_buffer + .InsertPacket(H265Packet().Idr().AsFirstPacket().Marker().Build()) + .packets, + IsEmpty()); +} + +TEST(H26xPacketBufferTest, H265SpsPpsIdrIsNotKeyFrame) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT( + packet_buffer + .InsertPacket( + H265Packet().Sps().Pps().Idr().AsFirstPacket().Marker().Build()) + .packets, + IsEmpty()); +} + +TEST(H26xPacketBufferTest, H265VpsPpsIdrIsNotKeyFrame) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT( + packet_buffer + .InsertPacket( + H265Packet().Vps().Pps().Idr().AsFirstPacket().Marker().Build()) + .packets, + IsEmpty()); +} + +TEST(H26xPacketBufferTest, H265VpsSpsIdrIsNotKeyFrame) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT( + packet_buffer + .InsertPacket( + H265Packet().Vps().Sps().Idr().AsFirstPacket().Marker().Build()) + .packets, + IsEmpty()); +} + +TEST(H26xPacketBufferTest, H265VpsIdrIsNotKeyFrame) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT(packet_buffer + .InsertPacket( + H265Packet().Vps().Idr().AsFirstPacket().Marker().Build()) + .packets, + IsEmpty()); +} + +TEST(H26xPacketBufferTest, H265SpsIdrIsNotKeyFrame) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT(packet_buffer + .InsertPacket( + H265Packet().Sps().Idr().AsFirstPacket().Marker().Build()) + .packets, + IsEmpty()); +} + +TEST(H26xPacketBufferTest, H265PpsIdrIsNotKeyFrame) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + EXPECT_THAT(packet_buffer + .InsertPacket( + H265Packet().Pps().Idr().AsFirstPacket().Marker().Build()) + .packets, + IsEmpty()); +} + +TEST(H26xPacketBufferTest, H265ResolutionSetOnSpsPacket) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + RTC_UNUSED(packet_buffer.InsertPacket( + H265Packet().Aud().SeqNum(1).Time(1).AsFirstPacket().Build())); + auto res = packet_buffer.InsertPacket(H265Packet() + .Vps() + .SpsWithResolution({320, 240}) + .Pps() + .Idr() + .SeqNum(2) + .Time(1) + .AsFirstPacket() + .Marker() + .Build()); + + ASSERT_THAT(res.packets, SizeIs(2)); + EXPECT_THAT(res.packets[0]->video_header.width, Eq(320)); + EXPECT_THAT(res.packets[0]->video_header.height, Eq(240)); +} + +TEST(H26xPacketBufferTest, H265InsertingVpsSpsPpsLastCompletesKeyframe) { + H26xPacketBuffer packet_buffer(/*h264_allow_idr_only_keyframes=*/false); + + RTC_UNUSED(packet_buffer.InsertPacket( + H265Packet().Idr().SeqNum(2).Time(1).Marker().Build())); + + EXPECT_THAT(packet_buffer + .InsertPacket(H265Packet() + .Vps() + .Sps() + .Pps() + .SeqNum(1) + .Time(1) + .AsFirstPacket() + .Build()) + .packets, + SizeIs(2)); +} +#endif // RTC_ENABLE_H265 + +} // namespace +} // namespace webrtc diff --git a/modules/video_coding/histogram.cc b/modules/video_coding/histogram.cc index 4e90b19eec..d9e3412de5 100644 --- a/modules/video_coding/histogram.cc +++ b/modules/video_coding/histogram.cc @@ -11,6 +11,7 @@ #include "modules/video_coding/histogram.h" #include +#include #include "rtc_base/checks.h" diff --git a/modules/video_coding/include/video_codec_initializer.h b/modules/video_coding/include/video_codec_initializer.h index 2d10ee45a8..d040001e6f 100644 --- a/modules/video_coding/include/video_codec_initializer.h +++ b/modules/video_coding/include/video_codec_initializer.h @@ -11,28 +11,21 @@ #ifndef MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODEC_INITIALIZER_H_ #define MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODEC_INITIALIZER_H_ -#include -#include #include +#include "api/field_trials_view.h" +#include "api/video_codecs/video_codec.h" #include "video/config/video_encoder_config.h" namespace webrtc { -class VideoCodec; - class VideoCodecInitializer { public: // Takes a VideoEncoderConfig and the VideoStream configuration and // translates them into the old school VideoCodec type. - static bool SetupCodec(const VideoEncoderConfig& config, - const std::vector& streams, - VideoCodec* codec); - - private: - static VideoCodec VideoEncoderConfigToVideoCodec( - const VideoEncoderConfig& config, - const std::vector& streams); + static VideoCodec SetupCodec(const FieldTrialsView& field_trials, + const VideoEncoderConfig& config, + const std::vector& streams); }; } // namespace webrtc diff --git a/modules/video_coding/include/video_codec_interface.cc b/modules/video_coding/include/video_codec_interface.cc index bd033b6c57..aa94dcfcc9 100644 --- a/modules/video_coding/include/video_codec_interface.cc +++ b/modules/video_coding/include/video_codec_interface.cc @@ -9,6 +9,10 @@ */ #include "modules/video_coding/include/video_codec_interface.h" +#include + +#include "api/video/video_codec_type.h" + namespace webrtc { CodecSpecificInfo::CodecSpecificInfo() : codecType(kVideoCodecGeneric) { memset(&codecSpecific, 0, sizeof(codecSpecific)); diff --git a/modules/video_coding/include/video_codec_interface.h b/modules/video_coding/include/video_codec_interface.h index c6522fcc6b..e1c6cd1514 100644 --- a/modules/video_coding/include/video_codec_interface.h +++ b/modules/video_coding/include/video_codec_interface.h @@ -11,17 +11,20 @@ #ifndef MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODEC_INTERFACE_H_ #define MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODEC_INTERFACE_H_ -#include - -#include "absl/types/optional.h" -#include "api/video/video_frame.h" +#include +#include +#include +#include +#include + +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_codec_type.h" #include "api/video_codecs/scalability_mode.h" -#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_encoder.h" +#include "common_video/frame_instrumentation_data.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" -#include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -50,7 +53,9 @@ struct CodecSpecificInfoVP8 { size_t updatedBuffers[kBuffersCount]; size_t updatedBuffersCount; }; -static_assert(std::is_pod::value, ""); +static_assert(std::is_trivial_v && + std::is_standard_layout_v, + ""); // Hack alert - the code assumes that thisstruct is memset when constructed. struct CodecSpecificInfoVP9 { @@ -79,7 +84,9 @@ struct CodecSpecificInfoVP9 { uint8_t num_ref_pics; uint8_t p_diff[kMaxVp9RefPics]; }; -static_assert(std::is_pod::value, ""); +static_assert(std::is_trivial_v && + std::is_standard_layout_v, + ""); // Hack alert - the code assumes that thisstruct is memset when constructed. struct CodecSpecificInfoH264 { @@ -88,14 +95,18 @@ struct CodecSpecificInfoH264 { bool base_layer_sync; bool idr_frame; }; -static_assert(std::is_pod::value, ""); +static_assert(std::is_trivial_v && + std::is_standard_layout_v, + ""); union CodecSpecificInfoUnion { CodecSpecificInfoVP8 VP8; CodecSpecificInfoVP9 VP9; CodecSpecificInfoH264 H264; }; -static_assert(std::is_pod::value, ""); +static_assert(std::is_trivial_v && + std::is_standard_layout_v, + ""); // Note: if any pointers are added to this struct or its sub-structs, it // must be fitted with a copy-constructor. This is because it is copied @@ -108,9 +119,14 @@ struct RTC_EXPORT CodecSpecificInfo { VideoCodecType codecType; CodecSpecificInfoUnion codecSpecific; bool end_of_picture = true; - absl::optional generic_frame_info; - absl::optional template_structure; - absl::optional scalability_mode; + std::optional generic_frame_info; + std::optional template_structure; + std::optional scalability_mode; + + // Required for automatic corruption detection. + std::optional< + std::variant> + frame_instrumentation_data; }; } // namespace webrtc diff --git a/modules/video_coding/include/video_coding.h b/modules/video_coding/include/video_coding.h index ee9326d9fc..76fe2c3ad8 100644 --- a/modules/video_coding/include/video_coding.h +++ b/modules/video_coding/include/video_coding.h @@ -11,26 +11,22 @@ #ifndef MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_H_ #define MODULES_VIDEO_CODING_INCLUDE_VIDEO_CODING_H_ -#include "api/field_trials_view.h" -#include "api/video/video_frame.h" +#include +#include +#include + +#include "api/environment/environment.h" +#include "api/rtp_headers.h" #include "api/video_codecs/video_decoder.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/include/video_coding_defines.h" namespace webrtc { -class Clock; -class EncodedImageCallback; -class VideoDecoder; -class VideoEncoder; -struct CodecSpecificInfo; - class VideoCodingModule { public: - // DEPRECATED. - static VideoCodingModule* Create( - Clock* clock, - const FieldTrialsView* field_trials = nullptr); + [[deprecated]] static std::unique_ptr CreateDeprecated( + const Environment& env); virtual ~VideoCodingModule() = default; diff --git a/modules/video_coding/include/video_coding_defines.h b/modules/video_coding/include/video_coding_defines.h index cbd420732c..1a81d29bea 100644 --- a/modules/video_coding/include/video_coding_defines.h +++ b/modules/video_coding/include/video_coding_defines.h @@ -14,10 +14,12 @@ #include #include -#include "absl/types/optional.h" +#include + +#include "api/units/time_delta.h" #include "api/video/video_content_type.h" #include "api/video/video_frame.h" -#include "api/video/video_timing.h" +#include "api/video/video_frame_type.h" #include "api/video_codecs/video_decoder.h" namespace webrtc { @@ -50,11 +52,16 @@ enum VCMVideoProtection { // rendered. class VCMReceiveCallback { public: - virtual int32_t FrameToRender(VideoFrame& videoFrame, // NOLINT - absl::optional qp, - TimeDelta decode_time, - VideoContentType content_type, - VideoFrameType frame_type) = 0; + struct FrameToRender { + VideoFrame& video_frame; + std::optional qp; + TimeDelta decode_time; + VideoContentType content_type; + VideoFrameType frame_type; + std::optional corruption_score; + }; + + virtual int32_t OnFrameToRender(const FrameToRender& arguments) = 0; virtual void OnDroppedFrames(uint32_t frames_dropped); diff --git a/modules/video_coding/include/video_error_codes.h b/modules/video_coding/include/video_error_codes.h index 17146ce205..d7d54f3989 100644 --- a/modules/video_coding/include/video_error_codes.h +++ b/modules/video_coding/include/video_error_codes.h @@ -11,10 +11,6 @@ #ifndef MODULES_VIDEO_CODING_INCLUDE_VIDEO_ERROR_CODES_H_ #define MODULES_VIDEO_CODING_INCLUDE_VIDEO_ERROR_CODES_H_ -// NOTE: in sync with video_coding_module_defines.h - -// Define return values - #define WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT 5 #define WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME 4 #define WEBRTC_VIDEO_CODEC_NO_OUTPUT 1 diff --git a/modules/video_coding/include/video_error_codes_utils.cc b/modules/video_coding/include/video_error_codes_utils.cc new file mode 100644 index 0000000000..af1e69c4ac --- /dev/null +++ b/modules/video_coding/include/video_error_codes_utils.cc @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/include/video_error_codes_utils.h" + +#include + +#include "modules/video_coding/include/video_error_codes.h" + +namespace webrtc { + +const char* WebRtcVideoCodecErrorToString(int32_t error_code) { + switch (error_code) { + case WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT: + return "WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT"; + case WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME: + return "WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME"; + case WEBRTC_VIDEO_CODEC_NO_OUTPUT: + return "WEBRTC_VIDEO_CODEC_NO_OUTPUT"; + case WEBRTC_VIDEO_CODEC_ERROR: + return "WEBRTC_VIDEO_CODEC_ERROR"; + case WEBRTC_VIDEO_CODEC_MEMORY: + return "WEBRTC_VIDEO_CODEC_MEMORY"; + case WEBRTC_VIDEO_CODEC_ERR_PARAMETER: + return "WEBRTC_VIDEO_CODEC_ERR_PARAMETER"; + case WEBRTC_VIDEO_CODEC_TIMEOUT: + return "WEBRTC_VIDEO_CODEC_TIMEOUT"; + case WEBRTC_VIDEO_CODEC_UNINITIALIZED: + return "WEBRTC_VIDEO_CODEC_UNINITIALIZED"; + case WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE: + return "WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE"; + case WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED: + return "WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED"; + case WEBRTC_VIDEO_CODEC_ENCODER_FAILURE: + return "WEBRTC_VIDEO_CODEC_ENCODER_FAILURE"; + default: + return "WEBRTC_VIDEO_CODEC_UNKNOWN"; + } +} + +} // namespace webrtc diff --git a/modules/video_coding/include/video_error_codes_utils.h b/modules/video_coding/include/video_error_codes_utils.h new file mode 100644 index 0000000000..ae17e29636 --- /dev/null +++ b/modules/video_coding/include/video_error_codes_utils.h @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_INCLUDE_VIDEO_ERROR_CODES_UTILS_H_ +#define MODULES_VIDEO_CODING_INCLUDE_VIDEO_ERROR_CODES_UTILS_H_ + +#include + +namespace webrtc { + +const char* WebRtcVideoCodecErrorToString(int32_t error_code); + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_INCLUDE_VIDEO_ERROR_CODES_UTILS_H_ diff --git a/modules/video_coding/loss_notification_controller.cc b/modules/video_coding/loss_notification_controller.cc index 3377ab5a76..43d4b19f72 100644 --- a/modules/video_coding/loss_notification_controller.cc +++ b/modules/video_coding/loss_notification_controller.cc @@ -12,7 +12,11 @@ #include +#include + #include "api/array_view.h" +#include "api/sequence_checker.h" +#include "modules/include/module_common_types.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/sequence_number_util.h" @@ -110,7 +114,7 @@ void LossNotificationController::OnAssembledFrame( uint16_t first_seq_num, int64_t frame_id, bool discardable, - rtc::ArrayView frame_dependencies) { + ArrayView frame_dependencies) { RTC_DCHECK_RUN_ON(&sequence_checker_); DiscardOldInformation(); // Prevent memory overconsumption. @@ -136,7 +140,7 @@ void LossNotificationController::DiscardOldInformation() { } bool LossNotificationController::AllDependenciesDecodable( - rtc::ArrayView frame_dependencies) const { + ArrayView frame_dependencies) const { RTC_DCHECK_RUN_ON(&sequence_checker_); // Due to packet reordering, frame buffering and asynchronous decoders, it is diff --git a/modules/video_coding/loss_notification_controller.h b/modules/video_coding/loss_notification_controller.h index ecba41267b..f6b99925e2 100644 --- a/modules/video_coding/loss_notification_controller.h +++ b/modules/video_coding/loss_notification_controller.h @@ -13,13 +13,14 @@ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/sequence_checker.h" #include "modules/include/module_common_types.h" #include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -28,7 +29,7 @@ class LossNotificationController { struct FrameDetails { bool is_keyframe; int64_t frame_id; - rtc::ArrayView frame_dependencies; + ArrayView frame_dependencies; }; LossNotificationController(KeyFrameRequestSender* key_frame_request_sender, @@ -44,13 +45,13 @@ class LossNotificationController { void OnAssembledFrame(uint16_t first_seq_num, int64_t frame_id, bool discardable, - rtc::ArrayView frame_dependencies); + ArrayView frame_dependencies); private: void DiscardOldInformation(); bool AllDependenciesDecodable( - rtc::ArrayView frame_dependencies) const; + ArrayView frame_dependencies) const; // When the loss of a packet or the non-decodability of a frame is detected, // produces a key frame request or a loss notification. @@ -75,11 +76,11 @@ class LossNotificationController { RTC_GUARDED_BY(sequence_checker_); // Tracked to avoid processing repeated frames (buggy/malicious remote). - absl::optional last_received_frame_id_ + std::optional last_received_frame_id_ RTC_GUARDED_BY(sequence_checker_); // Tracked to avoid processing repeated packets. - absl::optional last_received_seq_num_ + std::optional last_received_seq_num_ RTC_GUARDED_BY(sequence_checker_); // Tracked in order to correctly report the potential-decodability of @@ -95,7 +96,7 @@ class LossNotificationController { explicit FrameInfo(uint16_t first_seq_num) : first_seq_num(first_seq_num) {} uint16_t first_seq_num; }; - absl::optional last_decodable_non_discardable_ + std::optional last_decodable_non_discardable_ RTC_GUARDED_BY(sequence_checker_); // Track which frames are decodable. Later frames are also decodable if diff --git a/modules/video_coding/loss_notification_controller_unittest.cc b/modules/video_coding/loss_notification_controller_unittest.cc index 9c4e715b4f..aba0933682 100644 --- a/modules/video_coding/loss_notification_controller_unittest.cc +++ b/modules/video_coding/loss_notification_controller_unittest.cc @@ -12,13 +12,16 @@ #include +#include #include +#include #include #include #include #include -#include "absl/types/optional.h" +#include "modules/include/module_common_types.h" +#include "rtc_base/checks.h" #include "test/gtest.h" namespace webrtc { @@ -35,7 +38,7 @@ struct Packet { Packet CreatePacket( bool first_in_frame, - bool last_in_frame, + bool /* last_in_frame */, uint16_t seq_num, uint16_t frame_id, bool is_key_frame, @@ -138,7 +141,7 @@ class LossNotificationControllerBaseTest : public ::testing::Test, } void ExpectKeyFrameRequest() { - EXPECT_EQ(LastLossNotification(), absl::nullopt); + EXPECT_EQ(LastLossNotification(), std::nullopt); EXPECT_TRUE(LastKeyFrameRequest()); } @@ -188,9 +191,9 @@ class LossNotificationControllerBaseTest : public ::testing::Test, return result; } - absl::optional LastLossNotification() { - const absl::optional result = last_loss_notification_; - last_loss_notification_ = absl::nullopt; + std::optional LastLossNotification() { + const std::optional result = last_loss_notification_; + last_loss_notification_ = std::nullopt; return result; } @@ -198,13 +201,13 @@ class LossNotificationControllerBaseTest : public ::testing::Test, bool key_frame_requested_; - absl::optional last_loss_notification_; + std::optional last_loss_notification_; // First packet of last frame. (Note that if a test skips the first packet // of a subsequent frame, OnAssembledFrame is not called, and so this is // note read. Therefore, it's not a problem if it is not cleared when // the frame changes.) - absl::optional previous_first_packet_in_frame_; + std::optional previous_first_packet_in_frame_; }; class LossNotificationControllerTest diff --git a/modules/video_coding/media_opt_util.cc b/modules/video_coding/media_opt_util.cc index 7580c95fc7..b0fb8f835a 100644 --- a/modules/video_coding/media_opt_util.cc +++ b/modules/video_coding/media_opt_util.cc @@ -13,13 +13,19 @@ #include #include +#include +#include +#include +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "modules/video_coding/fec_rate_table.h" #include "modules/video_coding/internal_defines.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/numerics/safe_conversions.h" +#include "system_wrappers/include/clock.h" namespace webrtc { // Max value of loss rates in off-line model @@ -80,9 +86,10 @@ int VCMProtectionMethod::MaxFramesFec() const { return 1; } -VCMNackFecMethod::VCMNackFecMethod(int64_t lowRttNackThresholdMs, +VCMNackFecMethod::VCMNackFecMethod(const FieldTrialsView& field_trials, + int64_t lowRttNackThresholdMs, int64_t highRttNackThresholdMs) - : VCMFecMethod(), + : VCMFecMethod(field_trials), _lowRttNackMs(lowRttNackThresholdMs), _highRttNackMs(highRttNackThresholdMs), _maxFramesFec(1) { @@ -125,8 +132,8 @@ bool VCMNackFecMethod::ProtectionFactor( // Adjust FEC with NACK on (for delta frame only) // table depends on RTT relative to rttMax (NACK Threshold) - _protectionFactorD = rtc::saturated_cast( - adjustRtt * rtc::saturated_cast(_protectionFactorD)); + _protectionFactorD = saturated_cast( + adjustRtt * saturated_cast(_protectionFactorD)); // update FEC rates after applying adjustment VCMFecMethod::UpdateProtectionFactorD(_protectionFactorD); } @@ -148,9 +155,9 @@ int VCMNackFecMethod::ComputeMaxFramesFec( // RTP module based on the actual number of packets and the protection factor. float base_layer_framerate = parameters->frameRate / - rtc::saturated_cast(1 << (parameters->numLayers - 1)); + saturated_cast(1 << (parameters->numLayers - 1)); int max_frames_fec = std::max( - rtc::saturated_cast( + saturated_cast( 2.0f * base_layer_framerate * parameters->rtt / 1000.0f + 0.5f), 1); // `kUpperLimitFramesFec` is the upper limit on how many frames we @@ -229,7 +236,7 @@ VCMNackMethod::~VCMNackMethod() { } bool VCMNackMethod::EffectivePacketLoss( - const VCMProtectionParameters* parameter) { + const VCMProtectionParameters* /* parameter */) { // Effective Packet Loss, NA in current version. _effectivePacketLoss = 0; return true; @@ -244,9 +251,8 @@ bool VCMNackMethod::UpdateParameters( return true; } -VCMFecMethod::VCMFecMethod() - : VCMProtectionMethod(), - rate_control_settings_(RateControlSettings::ParseFromFieldTrials()) { +VCMFecMethod::VCMFecMethod(const FieldTrialsView& field_trials) + : rate_control_settings_(field_trials) { _type = kFec; } @@ -267,9 +273,9 @@ uint8_t VCMFecMethod::BoostCodeRateKey(uint8_t packetFrameDelta, } uint8_t VCMFecMethod::ConvertFECRate(uint8_t codeRateRTP) const { - return rtc::saturated_cast( - VCM_MIN(255, (0.5 + 255.0 * codeRateRTP / - rtc::saturated_cast(255 - codeRateRTP)))); + return saturated_cast(VCM_MIN( + 255, + (0.5 + 255.0 * codeRateRTP / saturated_cast(255 - codeRateRTP)))); } // Update FEC with protectionFactorD @@ -286,7 +292,7 @@ bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) { // FEC PROTECTION SETTINGS: varies with packet loss and bitrate // No protection if (filtered) packetLoss is 0 - uint8_t packetLoss = rtc::saturated_cast(255 * parameters->lossPr); + uint8_t packetLoss = saturated_cast(255 * parameters->lossPr); if (packetLoss == 0) { _protectionFactorK = 0; _protectionFactorD = 0; @@ -297,7 +303,7 @@ bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) { // first partition size, thresholds, table pars, spatial resoln fac. // First partition protection: ~ 20% - uint8_t firstPartitionProt = rtc::saturated_cast(255 * 0.20); + uint8_t firstPartitionProt = saturated_cast(255 * 0.20); // Minimum protection level needed to generate one FEC packet for one // source packet/frame (in RTP sender) @@ -313,9 +319,9 @@ bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) { const uint8_t ratePar2 = 49; // Spatial resolution size, relative to a reference size. - float spatialSizeToRef = rtc::saturated_cast(parameters->codecWidth * - parameters->codecHeight) / - (rtc::saturated_cast(704 * 576)); + float spatialSizeToRef = + saturated_cast(parameters->codecWidth * parameters->codecHeight) / + (saturated_cast(704 * 576)); // resolnFac: This parameter will generally increase/decrease the FEC rate // (for fixed bitRate and packetLoss) based on system size. // Use a smaller exponent (< 1) to control/soften system size effect. @@ -324,9 +330,9 @@ bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) { const int bitRatePerFrame = BitsPerFrame(parameters); // Average number of packets per frame (source and fec): - const uint8_t avgTotPackets = rtc::saturated_cast( - 1.5f + rtc::saturated_cast(bitRatePerFrame) * 1000.0f / - rtc::saturated_cast(8.0 * _maxPayloadSize)); + const uint8_t avgTotPackets = saturated_cast( + 1.5f + saturated_cast(bitRatePerFrame) * 1000.0f / + saturated_cast(8.0 * _maxPayloadSize)); // FEC rate parameters: for P and I frame uint8_t codeRateDelta = 0; @@ -336,8 +342,8 @@ bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) { // The range on the rate index corresponds to rates (bps) // from ~200k to ~8000k, for 30fps const uint16_t effRateFecTable = - rtc::saturated_cast(resolnFac * bitRatePerFrame); - uint8_t rateIndexTable = rtc::saturated_cast( + saturated_cast(resolnFac * bitRatePerFrame); + uint8_t rateIndexTable = saturated_cast( VCM_MAX(VCM_MIN((effRateFecTable - ratePar1) / ratePar1, ratePar2), 0)); // Restrict packet loss range to 50: @@ -370,12 +376,12 @@ bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) { // The boost factor may depend on several factors: ratio of packet // number of I to P frames, how much protection placed on P frames, etc. const uint8_t packetFrameDelta = - rtc::saturated_cast(0.5 + parameters->packetsPerFrame); + saturated_cast(0.5 + parameters->packetsPerFrame); const uint8_t packetFrameKey = - rtc::saturated_cast(0.5 + parameters->packetsPerFrameKey); + saturated_cast(0.5 + parameters->packetsPerFrameKey); const uint8_t boostKey = BoostCodeRateKey(packetFrameDelta, packetFrameKey); - rateIndexTable = rtc::saturated_cast(VCM_MAX( + rateIndexTable = saturated_cast(VCM_MAX( VCM_MIN(1 + (boostKey * effRateFecTable - ratePar1) / ratePar1, ratePar2), 0)); uint16_t indexTableKey = rateIndexTable * kPacketLossMax + packetLoss; @@ -396,7 +402,7 @@ bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) { // Make sure I frame protection is at least larger than P frame protection, // and at least as high as filtered packet loss. - codeRateKey = rtc::saturated_cast( + codeRateKey = saturated_cast( VCM_MAX(packetLoss, VCM_MAX(boostKeyProt, codeRateKey))); // Check limit on amount of protection for I frame: 50% is max. @@ -417,13 +423,12 @@ bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) { // for cases of low rates (small #packets) and low protection levels. float numPacketsFl = - 1.0f + (rtc::saturated_cast(bitRatePerFrame) * 1000.0 / - rtc::saturated_cast(8.0 * _maxPayloadSize) + + 1.0f + (saturated_cast(bitRatePerFrame) * 1000.0 / + saturated_cast(8.0 * _maxPayloadSize) + 0.5); const float estNumFecGen = - 0.5f + - rtc::saturated_cast(_protectionFactorD * numPacketsFl / 255.0f); + 0.5f + saturated_cast(_protectionFactorD * numPacketsFl / 255.0f); // We reduce cost factor (which will reduce overhead for FEC and // hybrid method) and not the protectionFactor. @@ -456,11 +461,11 @@ int VCMFecMethod::BitsPerFrame(const VCMProtectionParameters* parameters) { if (frameRate < 1.0f) frameRate = 1.0f; // Average bits per frame (units of kbits) - return rtc::saturated_cast(adjustmentFactor * bitRate / frameRate); + return saturated_cast(adjustmentFactor * bitRate / frameRate); } bool VCMFecMethod::EffectivePacketLoss( - const VCMProtectionParameters* parameters) { + const VCMProtectionParameters* /* parameters */) { // Effective packet loss to encoder is based on RPL (residual packet loss) // this is a soft setting based on degree of FEC protection // RPL = received/input packet loss - average_FEC_recovery @@ -489,8 +494,9 @@ bool VCMFecMethod::UpdateParameters(const VCMProtectionParameters* parameters) { return true; } -VCMLossProtectionLogic::VCMLossProtectionLogic(int64_t nowMs) - : _currentParameters(), +VCMLossProtectionLogic::VCMLossProtectionLogic(const Environment& env) + : env_(env), + _currentParameters(), _rtt(0), _lossPr(0.0f), _bitRate(0.0f), @@ -507,7 +513,7 @@ VCMLossProtectionLogic::VCMLossProtectionLogic(int64_t nowMs) _codecWidth(704), _codecHeight(576), _numLayers(1) { - Reset(nowMs); + Reset(env_.clock().CurrentTime().ms()); } VCMLossProtectionLogic::~VCMLossProtectionLogic() { @@ -524,10 +530,11 @@ void VCMLossProtectionLogic::SetMethod( _selectedMethod.reset(new VCMNackMethod()); break; case kFec: - _selectedMethod.reset(new VCMFecMethod()); + _selectedMethod = std::make_unique(env_.field_trials()); break; case kNackFec: - _selectedMethod.reset(new VCMNackFecMethod(kLowRttNackMs, -1)); + _selectedMethod = std::make_unique(env_.field_trials(), + kLowRttNackMs, -1); break; case kNone: _selectedMethod.reset(); @@ -598,8 +605,8 @@ uint8_t VCMLossProtectionLogic::FilteredLoss(int64_t nowMs, UpdateMaxLossHistory(lossPr255, nowMs); // Update the recursive average filter. - _lossPr255.Apply(rtc::saturated_cast(nowMs - _lastPrUpdateT), - rtc::saturated_cast(lossPr255)); + _lossPr255.Apply(saturated_cast(nowMs - _lastPrUpdateT), + saturated_cast(lossPr255)); _lastPrUpdateT = nowMs; // Filtered loss: default is received loss (no filtering). @@ -609,7 +616,7 @@ uint8_t VCMLossProtectionLogic::FilteredLoss(int64_t nowMs, case kNoFilter: break; case kAvgFilter: - filtered_loss = rtc::saturated_cast(_lossPr255.filtered() + 0.5); + filtered_loss = saturated_cast(_lossPr255.filtered() + 0.5); break; case kMaxFilter: filtered_loss = MaxFilteredLossPr(nowMs); @@ -620,7 +627,7 @@ uint8_t VCMLossProtectionLogic::FilteredLoss(int64_t nowMs, } void VCMLossProtectionLogic::UpdateFilteredLossPr(uint8_t packetLossEnc) { - _lossPr = rtc::saturated_cast(packetLossEnc) / 255.0; + _lossPr = saturated_cast(packetLossEnc) / 255.0; } void VCMLossProtectionLogic::UpdateBitRate(float bitRate) { @@ -630,15 +637,14 @@ void VCMLossProtectionLogic::UpdateBitRate(float bitRate) { void VCMLossProtectionLogic::UpdatePacketsPerFrame(float nPackets, int64_t nowMs) { _packetsPerFrame.Apply( - rtc::saturated_cast(nowMs - _lastPacketPerFrameUpdateT), nPackets); + saturated_cast(nowMs - _lastPacketPerFrameUpdateT), nPackets); _lastPacketPerFrameUpdateT = nowMs; } void VCMLossProtectionLogic::UpdatePacketsPerFrameKey(float nPackets, int64_t nowMs) { _packetsPerFrameKey.Apply( - rtc::saturated_cast(nowMs - _lastPacketPerFrameUpdateTKey), - nPackets); + saturated_cast(nowMs - _lastPacketPerFrameUpdateTKey), nPackets); _lastPacketPerFrameUpdateTKey = nowMs; } diff --git a/modules/video_coding/media_opt_util.h b/modules/video_coding/media_opt_util.h index a74d1af6cb..ba01ae1e6d 100644 --- a/modules/video_coding/media_opt_util.h +++ b/modules/video_coding/media_opt_util.h @@ -14,9 +14,11 @@ #include #include +#include #include -#include "modules/video_coding/internal_defines.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/numerics/exp_filter.h" @@ -43,10 +45,6 @@ enum FilterPacketLossMode { // common to media optimization and the jitter buffer. constexpr int64_t kLowRttNackMs = 20; -// If the RTT is higher than this an extra RTT wont be added to to the jitter -// buffer delay. -constexpr int kMaxRttDelayThreshold = 500; - struct VCMProtectionParameters { VCMProtectionParameters(); @@ -153,7 +151,7 @@ class VCMNackMethod : public VCMProtectionMethod { class VCMFecMethod : public VCMProtectionMethod { public: - VCMFecMethod(); + explicit VCMFecMethod(const FieldTrialsView& field_trials); ~VCMFecMethod() override; bool UpdateParameters(const VCMProtectionParameters* parameters) override; // Get the effective packet loss for ER @@ -190,7 +188,8 @@ class VCMFecMethod : public VCMProtectionMethod { class VCMNackFecMethod : public VCMFecMethod { public: - VCMNackFecMethod(int64_t lowRttNackThresholdMs, + VCMNackFecMethod(const FieldTrialsView& field_trials, + int64_t lowRttNackThresholdMs, int64_t highRttNackThresholdMs); ~VCMNackFecMethod() override; bool UpdateParameters(const VCMProtectionParameters* parameters) override; @@ -213,7 +212,7 @@ class VCMNackFecMethod : public VCMFecMethod { class VCMLossProtectionLogic { public: - explicit VCMLossProtectionLogic(int64_t nowMs); + explicit VCMLossProtectionLogic(const Environment& env); ~VCMLossProtectionLogic(); // Set the protection method to be used @@ -322,6 +321,8 @@ class VCMLossProtectionLogic { // Sets the available loss protection methods. void UpdateMaxLossHistory(uint8_t lossPr255, int64_t now); uint8_t MaxFilteredLossPr(int64_t nowMs) const; + + const Environment env_; std::unique_ptr _selectedMethod; VCMProtectionParameters _currentParameters; int64_t _rtt; @@ -334,11 +335,11 @@ class VCMLossProtectionLogic { int64_t _lastPrUpdateT; int64_t _lastPacketPerFrameUpdateT; int64_t _lastPacketPerFrameUpdateTKey; - rtc::ExpFilter _lossPr255; + ExpFilter _lossPr255; VCMLossProbabilitySample _lossPrHistory[kLossPrHistorySize]; uint8_t _shortMaxLossPr255; - rtc::ExpFilter _packetsPerFrame; - rtc::ExpFilter _packetsPerFrameKey; + ExpFilter _packetsPerFrame; + ExpFilter _packetsPerFrameKey; size_t _codecWidth; size_t _codecHeight; int _numLayers; diff --git a/modules/video_coding/nack_requester.cc b/modules/video_coding/nack_requester.cc index 008420f4da..af2adf34c7 100644 --- a/modules/video_coding/nack_requester.cc +++ b/modules/video_coding/nack_requester.cc @@ -11,13 +11,22 @@ #include "modules/video_coding/nack_requester.h" #include -#include +#include +#include +#include +#include "api/field_trials_view.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "modules/include/module_common_types.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/mod_ops.h" +#include "rtc_base/numerics/sequence_number_util.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "system_wrappers/include/clock.h" namespace webrtc { @@ -25,7 +34,9 @@ namespace { constexpr int kMaxPacketAge = 10'000; constexpr int kMaxNackPackets = 1000; constexpr TimeDelta kDefaultRtt = TimeDelta::Millis(100); -constexpr int kMaxNackRetries = 10; +// Number of times a packet can be nacked before giving up. Nack is sent at most +// every RTT. +constexpr int kMaxNackRetries = 100; constexpr int kMaxReorderedPackets = 128; constexpr int kNumReorderingBuckets = 10; constexpr TimeDelta kDefaultSendNackDelay = TimeDelta::Zero(); @@ -141,14 +152,12 @@ void NackRequester::ProcessNacks() { } } -int NackRequester::OnReceivedPacket(uint16_t seq_num, bool is_keyframe) { +int NackRequester::OnReceivedPacket(uint16_t seq_num) { RTC_DCHECK_RUN_ON(worker_thread_); - return OnReceivedPacket(seq_num, is_keyframe, false); + return OnReceivedPacket(seq_num, false); } -int NackRequester::OnReceivedPacket(uint16_t seq_num, - bool is_keyframe, - bool is_recovered) { +int NackRequester::OnReceivedPacket(uint16_t seq_num, bool is_recovered) { RTC_DCHECK_RUN_ON(worker_thread_); // TODO(philipel): When the packet includes information whether it is // retransmitted or not, use that value instead. For @@ -158,8 +167,6 @@ int NackRequester::OnReceivedPacket(uint16_t seq_num, if (!initialized_) { newest_seq_num_ = seq_num; - if (is_keyframe) - keyframe_list_.insert(seq_num); initialized_ = true; return 0; } @@ -182,15 +189,6 @@ int NackRequester::OnReceivedPacket(uint16_t seq_num, return nacks_sent_for_packet; } - // Keep track of new keyframes. - if (is_keyframe) - keyframe_list_.insert(seq_num); - - // And remove old ones so we don't accumulate keyframes. - auto it = keyframe_list_.lower_bound(seq_num - kMaxPacketAge); - if (it != keyframe_list_.begin()) - keyframe_list_.erase(keyframe_list_.begin(), it); - if (is_recovered) { recovered_list_.insert(seq_num); @@ -225,8 +223,6 @@ void NackRequester::ClearUpTo(uint16_t seq_num) { // thread. RTC_DCHECK_RUN_ON(worker_thread_); nack_list_.erase(nack_list_.begin(), nack_list_.lower_bound(seq_num)); - keyframe_list_.erase(keyframe_list_.begin(), - keyframe_list_.lower_bound(seq_num)); recovered_list_.erase(recovered_list_.begin(), recovered_list_.lower_bound(seq_num)); } @@ -236,25 +232,6 @@ void NackRequester::UpdateRtt(int64_t rtt_ms) { rtt_ = TimeDelta::Millis(rtt_ms); } -bool NackRequester::RemovePacketsUntilKeyFrame() { - // Called on worker_thread_. - while (!keyframe_list_.empty()) { - auto it = nack_list_.lower_bound(*keyframe_list_.begin()); - - if (it != nack_list_.begin()) { - // We have found a keyframe that actually is newer than at least one - // packet in the nack list. - nack_list_.erase(nack_list_.begin(), it); - return true; - } - - // If this keyframe is so old it does not remove any packets from the list, - // remove it from the list of keyframes and try the next keyframe. - keyframe_list_.erase(keyframe_list_.begin()); - } - return false; -} - void NackRequester::AddPacketsToNack(uint16_t seq_num_start, uint16_t seq_num_end) { // Called on worker_thread_. @@ -262,22 +239,13 @@ void NackRequester::AddPacketsToNack(uint16_t seq_num_start, auto it = nack_list_.lower_bound(seq_num_end - kMaxPacketAge); nack_list_.erase(nack_list_.begin(), it); - // If the nack list is too large, remove packets from the nack list until - // the latest first packet of a keyframe. If the list is still too large, - // clear it and request a keyframe. uint16_t num_new_nacks = ForwardDiff(seq_num_start, seq_num_end); if (nack_list_.size() + num_new_nacks > kMaxNackPackets) { - while (RemovePacketsUntilKeyFrame() && - nack_list_.size() + num_new_nacks > kMaxNackPackets) { - } - - if (nack_list_.size() + num_new_nacks > kMaxNackPackets) { - nack_list_.clear(); - RTC_LOG(LS_WARNING) << "NACK list full, clearing NACK" - " list and requesting keyframe."; - keyframe_request_sender_->RequestKeyFrame(); - return; - } + nack_list_.clear(); + RTC_LOG(LS_WARNING) << "NACK list full, clearing NACK" + " list and requesting keyframe."; + keyframe_request_sender_->RequestKeyFrame(); + return; } for (uint16_t seq_num = seq_num_start; seq_num != seq_num_end; ++seq_num) { diff --git a/modules/video_coding/nack_requester.h b/modules/video_coding/nack_requester.h index c860787dcf..54526279a7 100644 --- a/modules/video_coding/nack_requester.h +++ b/modules/video_coding/nack_requester.h @@ -26,6 +26,7 @@ #include "modules/include/module_common_types.h" #include "modules/video_coding/histogram.h" #include "rtc_base/numerics/sequence_number_util.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" @@ -78,8 +79,8 @@ class NackRequester final : public NackRequesterBase { void ProcessNacks() override; - int OnReceivedPacket(uint16_t seq_num, bool is_keyframe); - int OnReceivedPacket(uint16_t seq_num, bool is_keyframe, bool is_recovered); + int OnReceivedPacket(uint16_t seq_num); + int OnReceivedPacket(uint16_t seq_num, bool is_recovered); void ClearUpTo(uint16_t seq_num); void UpdateRtt(int64_t rtt_ms); @@ -108,10 +109,6 @@ class NackRequester final : public NackRequesterBase { void AddPacketsToNack(uint16_t seq_num_start, uint16_t seq_num_end) RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); - // Removes packets from the nack list until the next keyframe. Returns true - // if packets were removed. - bool RemovePacketsUntilKeyFrame() - RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); std::vector GetNackBatch(NackFilterOptions options) RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); @@ -134,8 +131,6 @@ class NackRequester final : public NackRequesterBase { // synchronized access. std::map> nack_list_ RTC_GUARDED_BY(worker_thread_); - std::set> keyframe_list_ - RTC_GUARDED_BY(worker_thread_); std::set> recovered_list_ RTC_GUARDED_BY(worker_thread_); video_coding::Histogram reordering_histogram_ RTC_GUARDED_BY(worker_thread_); diff --git a/modules/video_coding/nack_requester_unittest.cc b/modules/video_coding/nack_requester_unittest.cc index 6f11cb6e91..5432f3cb4b 100644 --- a/modules/video_coding/nack_requester_unittest.cc +++ b/modules/video_coding/nack_requester_unittest.cc @@ -14,7 +14,13 @@ #include #include #include +#include +#include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" +#include "modules/include/module_common_types.h" +#include "rtc_base/checks.h" +#include "rtc_base/thread.h" #include "system_wrappers/include/clock.h" #include "test/gtest.h" #include "test/run_loop.h" @@ -34,7 +40,7 @@ class TestNackRequester : public ::testing::Test, void SetUp() override {} void SendNack(const std::vector& sequence_numbers, - bool buffering_allowed) override { + bool /* buffering_allowed */) override { sent_nacks_.insert(sent_nacks_.end(), sequence_numbers.begin(), sequence_numbers.end()); if (waiting_for_send_nack_) { @@ -89,7 +95,7 @@ class TestNackRequester : public ::testing::Test, } static constexpr int64_t kDefaultRttMs = 20; - rtc::AutoThread main_thread_; + AutoThread main_thread_; test::RunLoop loop_; std::unique_ptr clock_; std::unique_ptr nack_periodic_processor_; @@ -102,90 +108,25 @@ class TestNackRequester : public ::testing::Test, TEST_F(TestNackRequester, NackOnePacket) { NackRequester& nack_module = CreateNackModule(); - nack_module.OnReceivedPacket(1, false, false); - nack_module.OnReceivedPacket(3, false, false); + nack_module.OnReceivedPacket(1); + nack_module.OnReceivedPacket(3); ASSERT_EQ(1u, sent_nacks_.size()); EXPECT_EQ(2, sent_nacks_[0]); } TEST_F(TestNackRequester, WrappingSeqNum) { NackRequester& nack_module = CreateNackModule(); - nack_module.OnReceivedPacket(0xfffe, false, false); - nack_module.OnReceivedPacket(1, false, false); + nack_module.OnReceivedPacket(0xfffe); + nack_module.OnReceivedPacket(1); ASSERT_EQ(2u, sent_nacks_.size()); EXPECT_EQ(0xffff, sent_nacks_[0]); EXPECT_EQ(0, sent_nacks_[1]); } -TEST_F(TestNackRequester, WrappingSeqNumClearToKeyframe) { - NackRequester& nack_module = CreateNackModule(TimeDelta::Millis(10)); - nack_module.OnReceivedPacket(0xfffe, false, false); - nack_module.OnReceivedPacket(1, false, false); - ASSERT_EQ(2u, sent_nacks_.size()); - EXPECT_EQ(0xffff, sent_nacks_[0]); - EXPECT_EQ(0, sent_nacks_[1]); - - sent_nacks_.clear(); - nack_module.OnReceivedPacket(2, true, false); - ASSERT_EQ(0u, sent_nacks_.size()); - - nack_module.OnReceivedPacket(501, true, false); - ASSERT_EQ(498u, sent_nacks_.size()); - for (int seq_num = 3; seq_num < 501; ++seq_num) - EXPECT_EQ(seq_num, sent_nacks_[seq_num - 3]); - - sent_nacks_.clear(); - nack_module.OnReceivedPacket(1001, false, false); - EXPECT_EQ(499u, sent_nacks_.size()); - for (int seq_num = 502; seq_num < 1001; ++seq_num) - EXPECT_EQ(seq_num, sent_nacks_[seq_num - 502]); - - sent_nacks_.clear(); - clock_->AdvanceTimeMilliseconds(100); - ASSERT_TRUE(WaitForSendNack()); - ASSERT_EQ(999u, sent_nacks_.size()); - EXPECT_EQ(0xffff, sent_nacks_[0]); - EXPECT_EQ(0, sent_nacks_[1]); - for (int seq_num = 3; seq_num < 501; ++seq_num) - EXPECT_EQ(seq_num, sent_nacks_[seq_num - 1]); - for (int seq_num = 502; seq_num < 1001; ++seq_num) - EXPECT_EQ(seq_num, sent_nacks_[seq_num - 2]); - - // Adding packet 1004 will cause the nack list to reach it's max limit. - // It will then clear all nacks up to the next keyframe (seq num 2), - // thus removing 0xffff and 0 from the nack list. - sent_nacks_.clear(); - nack_module.OnReceivedPacket(1004, false, false); - ASSERT_EQ(2u, sent_nacks_.size()); - EXPECT_EQ(1002, sent_nacks_[0]); - EXPECT_EQ(1003, sent_nacks_[1]); - - sent_nacks_.clear(); - clock_->AdvanceTimeMilliseconds(100); - ASSERT_TRUE(WaitForSendNack()); - ASSERT_EQ(999u, sent_nacks_.size()); - for (int seq_num = 3; seq_num < 501; ++seq_num) - EXPECT_EQ(seq_num, sent_nacks_[seq_num - 3]); - for (int seq_num = 502; seq_num < 1001; ++seq_num) - EXPECT_EQ(seq_num, sent_nacks_[seq_num - 4]); - - // Adding packet 1007 will cause the nack module to overflow again, thus - // clearing everything up to 501 which is the next keyframe. - nack_module.OnReceivedPacket(1007, false, false); - sent_nacks_.clear(); - clock_->AdvanceTimeMilliseconds(100); - ASSERT_TRUE(WaitForSendNack()); - ASSERT_EQ(503u, sent_nacks_.size()); - for (int seq_num = 502; seq_num < 1001; ++seq_num) - EXPECT_EQ(seq_num, sent_nacks_[seq_num - 502]); - EXPECT_EQ(1005, sent_nacks_[501]); - EXPECT_EQ(1006, sent_nacks_[502]); -} - TEST_F(TestNackRequester, ResendNack) { NackRequester& nack_module = CreateNackModule(TimeDelta::Millis(1)); - nack_module.OnReceivedPacket(1, false, false); - nack_module.OnReceivedPacket(3, false, false); + nack_module.OnReceivedPacket(1); + nack_module.OnReceivedPacket(3); size_t expected_nacks_sent = 1; ASSERT_EQ(expected_nacks_sent, sent_nacks_.size()); EXPECT_EQ(2, sent_nacks_[0]); @@ -225,8 +166,8 @@ TEST_F(TestNackRequester, ResendNack) { TEST_F(TestNackRequester, ResendPacketMaxRetries) { NackRequester& nack_module = CreateNackModule(TimeDelta::Millis(1)); - nack_module.OnReceivedPacket(1, false, false); - nack_module.OnReceivedPacket(3, false, false); + nack_module.OnReceivedPacket(1); + nack_module.OnReceivedPacket(3); ASSERT_EQ(1u, sent_nacks_.size()); EXPECT_EQ(2, sent_nacks_[0]); @@ -246,37 +187,22 @@ TEST_F(TestNackRequester, ResendPacketMaxRetries) { TEST_F(TestNackRequester, TooLargeNackList) { NackRequester& nack_module = CreateNackModule(); - nack_module.OnReceivedPacket(0, false, false); - nack_module.OnReceivedPacket(1001, false, false); + nack_module.OnReceivedPacket(0); + nack_module.OnReceivedPacket(1001); EXPECT_EQ(1000u, sent_nacks_.size()); EXPECT_EQ(0, keyframes_requested_); - nack_module.OnReceivedPacket(1003, false, false); + nack_module.OnReceivedPacket(1003); EXPECT_EQ(1000u, sent_nacks_.size()); EXPECT_EQ(1, keyframes_requested_); - nack_module.OnReceivedPacket(1004, false, false); - EXPECT_EQ(1000u, sent_nacks_.size()); - EXPECT_EQ(1, keyframes_requested_); -} - -TEST_F(TestNackRequester, TooLargeNackListWithKeyFrame) { - NackRequester& nack_module = CreateNackModule(); - nack_module.OnReceivedPacket(0, false, false); - nack_module.OnReceivedPacket(1, true, false); - nack_module.OnReceivedPacket(1001, false, false); - EXPECT_EQ(999u, sent_nacks_.size()); - EXPECT_EQ(0, keyframes_requested_); - nack_module.OnReceivedPacket(1003, false, false); - EXPECT_EQ(1000u, sent_nacks_.size()); - EXPECT_EQ(0, keyframes_requested_); - nack_module.OnReceivedPacket(1005, false, false); + nack_module.OnReceivedPacket(1004); EXPECT_EQ(1000u, sent_nacks_.size()); EXPECT_EQ(1, keyframes_requested_); } TEST_F(TestNackRequester, ClearUpTo) { NackRequester& nack_module = CreateNackModule(TimeDelta::Millis(1)); - nack_module.OnReceivedPacket(0, false, false); - nack_module.OnReceivedPacket(100, false, false); + nack_module.OnReceivedPacket(0); + nack_module.OnReceivedPacket(100); EXPECT_EQ(99u, sent_nacks_.size()); sent_nacks_.clear(); @@ -289,8 +215,8 @@ TEST_F(TestNackRequester, ClearUpTo) { TEST_F(TestNackRequester, ClearUpToWrap) { NackRequester& nack_module = CreateNackModule(); - nack_module.OnReceivedPacket(0xfff0, false, false); - nack_module.OnReceivedPacket(0xf, false, false); + nack_module.OnReceivedPacket(0xfff0); + nack_module.OnReceivedPacket(0xf); EXPECT_EQ(30u, sent_nacks_.size()); sent_nacks_.clear(); @@ -303,13 +229,13 @@ TEST_F(TestNackRequester, ClearUpToWrap) { TEST_F(TestNackRequester, PacketNackCount) { NackRequester& nack_module = CreateNackModule(TimeDelta::Millis(1)); - EXPECT_EQ(0, nack_module.OnReceivedPacket(0, false, false)); - EXPECT_EQ(0, nack_module.OnReceivedPacket(2, false, false)); - EXPECT_EQ(1, nack_module.OnReceivedPacket(1, false, false)); + EXPECT_EQ(0, nack_module.OnReceivedPacket(0)); + EXPECT_EQ(0, nack_module.OnReceivedPacket(2)); + EXPECT_EQ(1, nack_module.OnReceivedPacket(1)); sent_nacks_.clear(); nack_module.UpdateRtt(100); - EXPECT_EQ(0, nack_module.OnReceivedPacket(5, false, false)); + EXPECT_EQ(0, nack_module.OnReceivedPacket(5)); clock_->AdvanceTimeMilliseconds(100); WaitForSendNack(); EXPECT_EQ(4u, sent_nacks_.size()); @@ -319,40 +245,24 @@ TEST_F(TestNackRequester, PacketNackCount) { EXPECT_EQ(6u, sent_nacks_.size()); - EXPECT_EQ(3, nack_module.OnReceivedPacket(3, false, false)); - EXPECT_EQ(3, nack_module.OnReceivedPacket(4, false, false)); - EXPECT_EQ(0, nack_module.OnReceivedPacket(4, false, false)); -} - -TEST_F(TestNackRequester, NackListFullAndNoOverlapWithKeyframes) { - NackRequester& nack_module = CreateNackModule(); - const int kMaxNackPackets = 1000; - const unsigned int kFirstGap = kMaxNackPackets - 20; - const unsigned int kSecondGap = 200; - uint16_t seq_num = 0; - nack_module.OnReceivedPacket(seq_num++, true, false); - seq_num += kFirstGap; - nack_module.OnReceivedPacket(seq_num++, true, false); - EXPECT_EQ(kFirstGap, sent_nacks_.size()); - sent_nacks_.clear(); - seq_num += kSecondGap; - nack_module.OnReceivedPacket(seq_num, true, false); - EXPECT_EQ(kSecondGap, sent_nacks_.size()); + EXPECT_EQ(3, nack_module.OnReceivedPacket(3)); + EXPECT_EQ(3, nack_module.OnReceivedPacket(4)); + EXPECT_EQ(0, nack_module.OnReceivedPacket(4)); } TEST_F(TestNackRequester, HandleFecRecoveredPacket) { NackRequester& nack_module = CreateNackModule(); - nack_module.OnReceivedPacket(1, false, false); - nack_module.OnReceivedPacket(4, false, true); + nack_module.OnReceivedPacket(1); + nack_module.OnReceivedPacket(4, /*is_recovered=*/true); EXPECT_EQ(0u, sent_nacks_.size()); - nack_module.OnReceivedPacket(5, false, false); + nack_module.OnReceivedPacket(5); EXPECT_EQ(2u, sent_nacks_.size()); } TEST_F(TestNackRequester, SendNackWithoutDelay) { NackRequester& nack_module = CreateNackModule(); - nack_module.OnReceivedPacket(0, false, false); - nack_module.OnReceivedPacket(100, false, false); + nack_module.OnReceivedPacket(0); + nack_module.OnReceivedPacket(100); EXPECT_EQ(99u, sent_nacks_.size()); } @@ -372,7 +282,7 @@ class TestNackRequesterWithFieldTrial : public ::testing::Test, keyframes_requested_(0) {} void SendNack(const std::vector& sequence_numbers, - bool buffering_allowed) override { + bool /* buffering_allowed */) override { sent_nacks_.insert(sent_nacks_.end(), sequence_numbers.begin(), sequence_numbers.end()); } @@ -380,7 +290,7 @@ class TestNackRequesterWithFieldTrial : public ::testing::Test, void RequestKeyFrame() override { ++keyframes_requested_; } test::ScopedKeyValueConfig nack_delay_field_trial_; - rtc::AutoThread main_thread_; + AutoThread main_thread_; std::unique_ptr clock_; NackPeriodicProcessor nack_periodic_processor_; NackRequester nack_module_; @@ -389,14 +299,14 @@ class TestNackRequesterWithFieldTrial : public ::testing::Test, }; TEST_F(TestNackRequesterWithFieldTrial, SendNackWithDelay) { - nack_module_.OnReceivedPacket(0, false, false); - nack_module_.OnReceivedPacket(100, false, false); + nack_module_.OnReceivedPacket(0); + nack_module_.OnReceivedPacket(100); EXPECT_EQ(0u, sent_nacks_.size()); clock_->AdvanceTimeMilliseconds(10); - nack_module_.OnReceivedPacket(106, false, false); + nack_module_.OnReceivedPacket(106); EXPECT_EQ(99u, sent_nacks_.size()); clock_->AdvanceTimeMilliseconds(10); - nack_module_.OnReceivedPacket(109, false, false); + nack_module_.OnReceivedPacket(109); EXPECT_EQ(104u, sent_nacks_.size()); } } // namespace webrtc diff --git a/modules/video_coding/packet_buffer.cc b/modules/video_coding/packet_buffer.cc index 52ef5c2d85..cfc16d5854 100644 --- a/modules/video_coding/packet_buffer.cc +++ b/modules/video_coding/packet_buffer.cc @@ -14,34 +14,38 @@ #include #include -#include +#include #include +#include #include -#include "absl/types/variant.h" -#include "api/array_view.h" -#include "api/rtp_packet_info.h" +#include "api/video/video_codec_type.h" #include "api/video/video_frame_type.h" #include "common_video/h264/h264_common.h" -#include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/mod_ops.h" +#include "rtc_base/numerics/sequence_number_util.h" namespace webrtc { namespace video_coding { PacketBuffer::Packet::Packet(const RtpPacketReceived& rtp_packet, + int64_t sequence_number, const RTPVideoHeader& video_header) : marker_bit(rtp_packet.Marker()), payload_type(rtp_packet.PayloadType()), - seq_num(rtp_packet.SequenceNumber()), + sequence_number(sequence_number), timestamp(rtp_packet.Timestamp()), times_nacked(-1), - video_header(video_header) {} + video_header(video_header) { + // Unwrapped sequence number should match the original wrapped one. + RTC_DCHECK_EQ(static_cast(sequence_number), + rtp_packet.SequenceNumber()); +} PacketBuffer::PacketBuffer(size_t start_buffer_size, size_t max_buffer_size) : max_size_(max_buffer_size), @@ -64,7 +68,7 @@ PacketBuffer::InsertResult PacketBuffer::InsertPacket( std::unique_ptr packet) { PacketBuffer::InsertResult result; - uint16_t seq_num = packet->seq_num; + uint16_t seq_num = packet->seq_num(); size_t index = seq_num % buffer_.size(); if (!first_packet_received_) { @@ -77,12 +81,20 @@ PacketBuffer::InsertResult PacketBuffer::InsertPacket( return result; } + if (ForwardDiff(first_seq_num_, seq_num) >= max_size_ && + ForwardDiff(seq_num, first_seq_num_) >= max_size_ / 2) { + // Large negative jump in rtp sequence number: clear the buffer and treat + // latest packet as the new first packet. + Clear(); + first_packet_received_ = true; + } + first_seq_num_ = seq_num; } if (buffer_[index] != nullptr) { // Duplicate packet, just delete the payload. - if (buffer_[index]->seq_num == packet->seq_num) { + if (buffer_[index]->seq_num() == packet->seq_num()) { return result; } @@ -117,8 +129,7 @@ PacketBuffer::InsertResult PacketBuffer::InsertPacket( void PacketBuffer::ClearTo(uint16_t seq_num) { // We have already cleared past this sequence number, no need to do anything. - if (is_cleared_to_first_seq_num_ && - AheadOf(first_seq_num_, seq_num)) { + if (AheadOf(first_seq_num_, seq_num)) { return; } @@ -133,7 +144,7 @@ void PacketBuffer::ClearTo(uint16_t seq_num) { size_t iterations = std::min(diff, buffer_.size()); for (size_t i = 0; i < iterations; ++i) { auto& stored = buffer_[first_seq_num_ % buffer_.size()]; - if (stored != nullptr && AheadOf(seq_num, stored->seq_num)) { + if (stored != nullptr && AheadOf(seq_num, stored->seq_num())) { stored = nullptr; } ++first_seq_num_; @@ -194,7 +205,7 @@ bool PacketBuffer::ExpandBufferSize() { std::vector> new_buffer(new_size); for (std::unique_ptr& entry : buffer_) { if (entry != nullptr) { - new_buffer[entry->seq_num % new_size] = std::move(entry); + new_buffer[entry->seq_num() % new_size] = std::move(entry); } } buffer_ = std::move(new_buffer); @@ -210,13 +221,13 @@ bool PacketBuffer::PotentialNewFrame(uint16_t seq_num) const { if (entry == nullptr) return false; - if (entry->seq_num != seq_num) + if (entry->seq_num() != seq_num) return false; if (entry->is_first_packet_in_frame()) return true; if (prev_entry == nullptr) return false; - if (prev_entry->seq_num != static_cast(entry->seq_num - 1)) + if (prev_entry->seq_num() != static_cast(entry->seq_num() - 1)) return false; if (prev_entry->timestamp != entry->timestamp) return false; @@ -267,6 +278,14 @@ std::vector> PacketBuffer::FindFrames( int idr_height = -1; bool full_frame_found = false; while (true) { + // GFD is only attached to first packet of frame, so update check on + // every packet. + if (buffer_[start_index] != nullptr) { + is_generic = buffer_[start_index]->video_header.generic.has_value(); + if (is_generic) { + is_h264_descriptor = false; + } + } ++tested_packets; if (!is_h264_descriptor) { @@ -278,17 +297,17 @@ std::vector> PacketBuffer::FindFrames( } if (is_h264_descriptor) { - const auto* h264_header = absl::get_if( + const auto* h264_header = std::get_if( &buffer_[start_index]->video_header.video_type_header); - if (!h264_header || h264_header->nalus_length >= kMaxNalusPerPacket) + if (!h264_header) return found_frames; - for (size_t j = 0; j < h264_header->nalus_length; ++j) { - if (h264_header->nalus[j].type == H264::NaluType::kSps) { + for (const NaluInfo& nalu : h264_header->nalus) { + if (nalu.type == H264::NaluType::kSps) { has_h264_sps = true; - } else if (h264_header->nalus[j].type == H264::NaluType::kPps) { + } else if (nalu.type == H264::NaluType::kPps) { has_h264_pps = true; - } else if (h264_header->nalus[j].type == H264::NaluType::kIdr) { + } else if (nalu.type == H264::NaluType::kIdr) { has_h264_idr = true; } } @@ -371,13 +390,13 @@ std::vector> PacketBuffer::FindFrames( // Use uint16_t type to handle sequence number wrap around case. uint16_t num_packets = end_seq_num - start_seq_num; found_frames.reserve(found_frames.size() + num_packets); - for (uint16_t i = start_seq_num; i != end_seq_num; ++i) { - std::unique_ptr& packet = buffer_[i % buffer_.size()]; + for (uint16_t j = start_seq_num; j != end_seq_num; ++j) { + std::unique_ptr& packet = buffer_[j % buffer_.size()]; RTC_DCHECK(packet); - RTC_DCHECK_EQ(i, packet->seq_num); + RTC_DCHECK_EQ(j, packet->seq_num()); // Ensure frame boundary flags are properly set. - packet->video_header.is_first_packet_in_frame = (i == start_seq_num); - packet->video_header.is_last_packet_in_frame = (i == seq_num); + packet->video_header.is_first_packet_in_frame = (j == start_seq_num); + packet->video_header.is_last_packet_in_frame = (j == seq_num); found_frames.push_back(std::move(packet)); } diff --git a/modules/video_coding/packet_buffer.h b/modules/video_coding/packet_buffer.h index 53e08c95a1..f17b00efb1 100644 --- a/modules/video_coding/packet_buffer.h +++ b/modules/video_coding/packet_buffer.h @@ -11,20 +11,19 @@ #ifndef MODULES_VIDEO_CODING_PACKET_BUFFER_H_ #define MODULES_VIDEO_CODING_PACKET_BUFFER_H_ +#include +#include #include -#include +#include #include #include #include "absl/base/attributes.h" -#include "api/rtp_packet_info.h" -#include "api/units/timestamp.h" -#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/numerics/sequence_number_util.h" -#include "rtc_base/thread_annotations.h" namespace webrtc { namespace video_coding { @@ -34,6 +33,7 @@ class PacketBuffer { struct Packet { Packet() = default; Packet(const RtpPacketReceived& rtp_packet, + int64_t sequence_number, const RTPVideoHeader& video_header); Packet(const Packet&) = delete; Packet(Packet&&) = delete; @@ -51,17 +51,18 @@ class PacketBuffer { bool is_last_packet_in_frame() const { return video_header.is_last_packet_in_frame; } + uint16_t seq_num() const { return static_cast(sequence_number); } // If all its previous packets have been inserted into the packet buffer. // Set and used internally by the PacketBuffer. bool continuous = false; bool marker_bit = false; uint8_t payload_type = 0; - uint16_t seq_num = 0; + int64_t sequence_number = 0; uint32_t timestamp = 0; int times_nacked = -1; - rtc::CopyOnWriteBuffer video_payload; + CopyOnWriteBuffer video_payload; RTPVideoHeader video_header; }; struct InsertResult { @@ -115,7 +116,7 @@ class PacketBuffer { // determine continuity between them. std::vector> buffer_; - absl::optional newest_inserted_seq_num_; + std::optional newest_inserted_seq_num_; std::set> missing_packets_; std::set> received_padding_; diff --git a/modules/video_coding/packet_buffer_unittest.cc b/modules/video_coding/packet_buffer_unittest.cc index 901f44b188..17a0d75c3b 100644 --- a/modules/video_coding/packet_buffer_unittest.cc +++ b/modules/video_coding/packet_buffer_unittest.cc @@ -9,18 +9,23 @@ */ #include "modules/video_coding/packet_buffer.h" +#include #include #include +#include #include -#include #include +#include #include "api/array_view.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "common_video/h264/h264_common.h" -#include "modules/rtp_rtcp/source/frame_object.h" -#include "rtc_base/numerics/sequence_number_unwrapper.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/random.h" -#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" @@ -43,13 +48,13 @@ void IgnoreResult(PacketBuffer::InsertResult /*result*/) {} // Validates frame boundaries are valid and returns first sequence_number for // each frame. std::vector StartSeqNums( - rtc::ArrayView> packets) { + ArrayView> packets) { std::vector result; bool frame_boundary = true; for (const auto& packet : packets) { EXPECT_EQ(frame_boundary, packet->is_first_packet_in_frame()); if (packet->is_first_packet_in_frame()) { - result.push_back(packet->seq_num); + result.push_back(packet->seq_num()); } frame_boundary = packet->is_last_packet_in_frame(); } @@ -85,11 +90,11 @@ void PrintTo(const PacketBufferInsertResult& result, std::ostream* os) { for (const auto& packet : result.packets) { if (packet->is_first_packet_in_frame() && packet->is_last_packet_in_frame()) { - *os << "{sn: " << packet->seq_num << " }"; + *os << "{sn: " << packet->seq_num() << " }"; } else if (packet->is_first_packet_in_frame()) { - *os << "{sn: [" << packet->seq_num << "-"; + *os << "{sn: [" << packet->seq_num() << "-"; } else if (packet->is_last_packet_in_frame()) { - *os << packet->seq_num << "] }, "; + *os << packet->seq_num() << "] }, "; } } *os << " }"; @@ -108,16 +113,16 @@ class PacketBufferTest : public ::testing::Test { enum IsFirst { kFirst, kNotFirst }; enum IsLast { kLast, kNotLast }; - PacketBufferInsertResult Insert(uint16_t seq_num, // packet sequence number + PacketBufferInsertResult Insert(int64_t seq_num, // packet sequence number IsKeyFrame keyframe, // is keyframe IsFirst first, // is first packet of frame IsLast last, // is last packet of frame - rtc::ArrayView data = {}, + ArrayView data = {}, uint32_t timestamp = 123u) { // rtp timestamp auto packet = std::make_unique(); packet->video_header.codec = kVideoCodecGeneric; packet->timestamp = timestamp; - packet->seq_num = seq_num; + packet->sequence_number = seq_num; packet->video_header.frame_type = keyframe == kKeyFrame ? VideoFrameType::kVideoFrameKey : VideoFrameType::kVideoFrameDelta; @@ -134,12 +139,12 @@ class PacketBufferTest : public ::testing::Test { }; TEST_F(PacketBufferTest, InsertOnePacket) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kLast).packets, SizeIs(1)); } TEST_F(PacketBufferTest, InsertMultiplePackets) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kLast).packets, SizeIs(1)); EXPECT_THAT(Insert(seq_num + 1, kKeyFrame, kFirst, kLast).packets, SizeIs(1)); EXPECT_THAT(Insert(seq_num + 2, kKeyFrame, kFirst, kLast).packets, SizeIs(1)); @@ -147,7 +152,7 @@ TEST_F(PacketBufferTest, InsertMultiplePackets) { } TEST_F(PacketBufferTest, InsertDuplicatePacket) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kNotLast).packets, IsEmpty()); EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kNotLast).packets, IsEmpty()); EXPECT_THAT(Insert(seq_num + 1, kKeyFrame, kNotFirst, kLast).packets, @@ -156,14 +161,14 @@ TEST_F(PacketBufferTest, InsertDuplicatePacket) { TEST_F(PacketBufferTest, SeqNumWrapOneFrame) { Insert(0xFFFF, kKeyFrame, kFirst, kNotLast); - EXPECT_THAT(Insert(0x0, kKeyFrame, kNotFirst, kLast), + EXPECT_THAT(Insert(0x1'0000, kKeyFrame, kNotFirst, kLast), StartSeqNumsAre(0xFFFF)); } TEST_F(PacketBufferTest, SeqNumWrapTwoFrames) { EXPECT_THAT(Insert(0xFFFF, kKeyFrame, kFirst, kLast), StartSeqNumsAre(0xFFFF)); - EXPECT_THAT(Insert(0x0, kKeyFrame, kFirst, kLast), StartSeqNumsAre(0x0)); + EXPECT_THAT(Insert(0x1'0000, kKeyFrame, kFirst, kLast), StartSeqNumsAre(0x0)); } TEST_F(PacketBufferTest, InsertOldPackets) { @@ -181,7 +186,7 @@ TEST_F(PacketBufferTest, InsertOldPackets) { } TEST_F(PacketBufferTest, FrameSize) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); uint8_t data1[5] = {}; uint8_t data2[5] = {}; uint8_t data3[5] = {}; @@ -198,7 +203,7 @@ TEST_F(PacketBufferTest, FrameSize) { } TEST_F(PacketBufferTest, ExpandBuffer) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); Insert(seq_num, kKeyFrame, kFirst, kNotLast); for (int i = 1; i < kStartSize; ++i) @@ -212,7 +217,7 @@ TEST_F(PacketBufferTest, ExpandBuffer) { } TEST_F(PacketBufferTest, SingleFrameExpandsBuffer) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); Insert(seq_num, kKeyFrame, kFirst, kNotLast); for (int i = 1; i < kStartSize; ++i) @@ -222,7 +227,7 @@ TEST_F(PacketBufferTest, SingleFrameExpandsBuffer) { } TEST_F(PacketBufferTest, ExpandBufferOverflow) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); EXPECT_FALSE(Insert(seq_num, kKeyFrame, kFirst, kNotLast).buffer_cleared); for (int i = 1; i < kMaxSize; ++i) @@ -236,13 +241,13 @@ TEST_F(PacketBufferTest, ExpandBufferOverflow) { } TEST_F(PacketBufferTest, OnePacketOneFrame) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kLast), StartSeqNumsAre(seq_num)); } TEST_F(PacketBufferTest, TwoPacketsTwoFrames) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kLast), StartSeqNumsAre(seq_num)); @@ -251,7 +256,7 @@ TEST_F(PacketBufferTest, TwoPacketsTwoFrames) { } TEST_F(PacketBufferTest, TwoPacketsOneFrames) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kNotLast).packets, IsEmpty()); EXPECT_THAT(Insert(seq_num + 1, kKeyFrame, kNotFirst, kLast), @@ -259,7 +264,7 @@ TEST_F(PacketBufferTest, TwoPacketsOneFrames) { } TEST_F(PacketBufferTest, ThreePacketReorderingOneFrame) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kNotLast).packets, IsEmpty()); EXPECT_THAT(Insert(seq_num + 2, kKeyFrame, kNotFirst, kLast).packets, @@ -269,7 +274,7 @@ TEST_F(PacketBufferTest, ThreePacketReorderingOneFrame) { } TEST_F(PacketBufferTest, Frames) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); EXPECT_THAT(Insert(seq_num, kKeyFrame, kFirst, kLast), StartSeqNumsAre(seq_num)); @@ -282,7 +287,7 @@ TEST_F(PacketBufferTest, Frames) { } TEST_F(PacketBufferTest, ClearSinglePacket) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); for (int i = 0; i < kMaxSize; ++i) Insert(seq_num + i, kDeltaFrame, kFirst, kLast); @@ -322,7 +327,7 @@ TEST_F(PacketBufferTest, DontClearNewerPacket) { } TEST_F(PacketBufferTest, OneIncompleteFrame) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); EXPECT_THAT(Insert(seq_num, kDeltaFrame, kFirst, kNotLast).packets, IsEmpty()); @@ -333,7 +338,7 @@ TEST_F(PacketBufferTest, OneIncompleteFrame) { } TEST_F(PacketBufferTest, TwoIncompleteFramesFullBuffer) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); for (int i = 1; i < kMaxSize - 1; ++i) Insert(seq_num + i, kDeltaFrame, kNotFirst, kNotLast); @@ -344,7 +349,7 @@ TEST_F(PacketBufferTest, TwoIncompleteFramesFullBuffer) { } TEST_F(PacketBufferTest, FramesReordered) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); EXPECT_THAT(Insert(seq_num + 1, kDeltaFrame, kFirst, kLast), StartSeqNumsAre(seq_num + 1)); @@ -356,15 +361,27 @@ TEST_F(PacketBufferTest, FramesReordered) { StartSeqNumsAre(seq_num + 2)); } +TEST_F(PacketBufferTest, FramesReorderedReconstruction) { + Insert(100, kKeyFrame, kFirst, kNotLast, {}, 2); + + Insert(98, kKeyFrame, kFirst, kNotLast, {}, 1); + EXPECT_THAT(Insert(99, kDeltaFrame, kNotFirst, kLast, {}, 1), + StartSeqNumsAre(98)); + + // Ideally frame with timestamp 2, seq No 100 should be + // reconstructed here from the first Insert() call in the test + EXPECT_THAT(Insert(101, kDeltaFrame, kNotFirst, kLast, {}, 2), + StartSeqNumsAre(100)); +} + TEST_F(PacketBufferTest, InsertPacketAfterSequenceNumberWrapAround) { - uint16_t kFirstSeqNum = 0; + int64_t kFirstSeqNum = 0; uint32_t kTimestampDelta = 100; uint32_t timestamp = 10000; - uint16_t seq_num = kFirstSeqNum; + int64_t seq_num = kFirstSeqNum; // Loop until seq_num wraps around. - SeqNumUnwrapper unwrapper; - while (unwrapper.Unwrap(seq_num) < std::numeric_limits::max()) { + while (seq_num < std::numeric_limits::max()) { Insert(seq_num++, kKeyFrame, kFirst, kNotLast, {}, timestamp); for (int i = 0; i < 5; ++i) { Insert(seq_num++, kKeyFrame, kNotFirst, kNotLast, {}, timestamp); @@ -399,12 +416,12 @@ class PacketBufferH264Test : public PacketBufferTest { } PacketBufferInsertResult InsertH264( - uint16_t seq_num, // packet sequence number + int64_t seq_num, // packet sequence number IsKeyFrame keyframe, // is keyframe IsFirst first, // is first packet of frame IsLast last, // is last packet of frame uint32_t timestamp, // rtp timestamp - rtc::ArrayView data = {}, + ArrayView data = {}, uint32_t width = 0, // width of frame (SPS/IDR) uint32_t height = 0, // height of frame (SPS/IDR) bool generic = false) { // has generic descriptor @@ -412,17 +429,15 @@ class PacketBufferH264Test : public PacketBufferTest { packet->video_header.codec = kVideoCodecH264; auto& h264_header = packet->video_header.video_type_header.emplace(); - packet->seq_num = seq_num; + packet->sequence_number = seq_num; packet->timestamp = timestamp; if (keyframe == kKeyFrame) { if (sps_pps_idr_is_keyframe_) { - h264_header.nalus[0].type = H264::NaluType::kSps; - h264_header.nalus[1].type = H264::NaluType::kPps; - h264_header.nalus[2].type = H264::NaluType::kIdr; - h264_header.nalus_length = 3; + h264_header.nalus = {{H264::NaluType::kSps}, + {H264::NaluType::kPps}, + {H264::NaluType::kIdr}}; } else { - h264_header.nalus[0].type = H264::NaluType::kIdr; - h264_header.nalus_length = 1; + h264_header.nalus = {{H264::NaluType::kIdr}}; } } packet->video_header.width = width; @@ -439,27 +454,26 @@ class PacketBufferH264Test : public PacketBufferTest { } PacketBufferInsertResult InsertH264KeyFrameWithAud( - uint16_t seq_num, // packet sequence number + int64_t seq_num, // packet sequence number IsKeyFrame keyframe, // is keyframe IsFirst first, // is first packet of frame IsLast last, // is last packet of frame uint32_t timestamp, // rtp timestamp - rtc::ArrayView data = {}, + ArrayView data = {}, uint32_t width = 0, // width of frame (SPS/IDR) uint32_t height = 0) { // height of frame (SPS/IDR) auto packet = std::make_unique(); packet->video_header.codec = kVideoCodecH264; auto& h264_header = packet->video_header.video_type_header.emplace(); - packet->seq_num = seq_num; + packet->sequence_number = seq_num; packet->timestamp = timestamp; // this should be the start of frame. RTC_CHECK(first == kFirst); // Insert a AUD NALU / packet without width/height. - h264_header.nalus[0].type = H264::NaluType::kAud; - h264_header.nalus_length = 1; + h264_header.nalus = {{H264::NaluType::kAud}}; packet->video_header.is_first_packet_in_frame = true; packet->video_header.is_last_packet_in_frame = false; IgnoreResult(packet_buffer_.InsertPacket(std::move(packet))); @@ -518,16 +532,15 @@ TEST_P(PacketBufferH264ParameterizedTest, GetBitstreamOneFrameFullBuffer) { } TEST_P(PacketBufferH264ParameterizedTest, GetBitstreamBufferPadding) { - uint16_t seq_num = Rand(); - rtc::CopyOnWriteBuffer data = "some plain old data"; + int64_t seq_num = Rand(); + CopyOnWriteBuffer data = "some plain old data"; auto packet = std::make_unique(); auto& h264_header = packet->video_header.video_type_header.emplace(); - h264_header.nalus_length = 1; - h264_header.nalus[0].type = H264::NaluType::kIdr; + h264_header.nalus = {{H264::NaluType::kIdr}}; h264_header.packetization_type = kH264SingleNalu; - packet->seq_num = seq_num; + packet->sequence_number = seq_num; packet->video_header.codec = kVideoCodecH264; packet->video_payload = data; packet->video_header.is_first_packet_in_frame = true; @@ -535,12 +548,12 @@ TEST_P(PacketBufferH264ParameterizedTest, GetBitstreamBufferPadding) { auto frames = packet_buffer_.InsertPacket(std::move(packet)).packets; ASSERT_THAT(frames, SizeIs(1)); - EXPECT_EQ(frames[0]->seq_num, seq_num); + EXPECT_EQ(frames[0]->sequence_number, seq_num); EXPECT_EQ(frames[0]->video_payload, data); } TEST_P(PacketBufferH264ParameterizedTest, FrameResolution) { - uint16_t seq_num = 100; + int64_t seq_num = 100; uint8_t data[] = "some plain old data"; uint32_t width = 640; uint32_t height = 360; @@ -556,7 +569,7 @@ TEST_P(PacketBufferH264ParameterizedTest, FrameResolution) { } TEST_P(PacketBufferH264ParameterizedTest, FrameResolutionNaluBeforeSPS) { - uint16_t seq_num = 100; + int64_t seq_num = 100; uint8_t data[] = "some plain old data"; uint32_t width = 640; uint32_t height = 360; @@ -572,7 +585,7 @@ TEST_P(PacketBufferH264ParameterizedTest, FrameResolutionNaluBeforeSPS) { } TEST_F(PacketBufferTest, FreeSlotsOnFrameCreation) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); Insert(seq_num, kKeyFrame, kFirst, kNotLast); Insert(seq_num + 1, kDeltaFrame, kNotFirst, kNotLast); @@ -588,7 +601,7 @@ TEST_F(PacketBufferTest, FreeSlotsOnFrameCreation) { } TEST_F(PacketBufferTest, Clear) { - const uint16_t seq_num = Rand(); + const int64_t seq_num = Rand(); Insert(seq_num, kKeyFrame, kFirst, kNotLast); Insert(seq_num + 1, kDeltaFrame, kNotFirst, kNotLast); @@ -630,7 +643,7 @@ TEST_F(PacketBufferTest, IncomingCodecChange) { packet->video_header.codec = kVideoCodecVP8; packet->video_header.video_type_header.emplace(); packet->timestamp = 1; - packet->seq_num = 1; + packet->sequence_number = 1; packet->video_header.frame_type = VideoFrameType::kVideoFrameKey; EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, SizeIs(1)); @@ -641,9 +654,9 @@ TEST_F(PacketBufferTest, IncomingCodecChange) { packet->video_header.codec = kVideoCodecH264; auto& h264_header = packet->video_header.video_type_header.emplace(); - h264_header.nalus_length = 1; + h264_header.nalus.resize(1); packet->timestamp = 3; - packet->seq_num = 3; + packet->sequence_number = 3; packet->video_header.frame_type = VideoFrameType::kVideoFrameKey; EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, IsEmpty()); @@ -654,27 +667,12 @@ TEST_F(PacketBufferTest, IncomingCodecChange) { packet->video_header.codec = kVideoCodecVP8; packet->video_header.video_type_header.emplace(); packet->timestamp = 2; - packet->seq_num = 2; + packet->sequence_number = 2; packet->video_header.frame_type = VideoFrameType::kVideoFrameDelta; EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, SizeIs(2)); } -TEST_F(PacketBufferTest, TooManyNalusInPacket) { - auto packet = std::make_unique(); - packet->video_header.codec = kVideoCodecH264; - packet->timestamp = 1; - packet->seq_num = 1; - packet->video_header.frame_type = VideoFrameType::kVideoFrameKey; - packet->video_header.is_first_packet_in_frame = true; - packet->video_header.is_last_packet_in_frame = true; - auto& h264_header = - packet->video_header.video_type_header.emplace(); - h264_header.nalus_length = kMaxNalusPerPacket; - EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, - IsEmpty()); -} - TEST_P(PacketBufferH264ParameterizedTest, OneFrameFillBuffer) { InsertH264(0, kKeyFrame, kFirst, kNotLast, 1000); for (int i = 1; i < kStartSize - 1; ++i) @@ -705,6 +703,12 @@ TEST_P(PacketBufferH264ParameterizedTest, OneFrameMaxSeqNum) { StartSeqNumsAre(65534)); } +TEST_P(PacketBufferH264ParameterizedTest, InsertTooOldPackets) { + InsertH264(4660, kKeyFrame, kFirst, kNotLast, 1000); + InsertH264(37429, kDeltaFrame, kFirst, kNotLast, 1000); + InsertH264(4662, kKeyFrame, kFirst, kLast, 1000); +} + TEST_P(PacketBufferH264ParameterizedTest, ClearMissingPacketsOnKeyframe) { InsertH264(0, kKeyFrame, kFirst, kLast, 1000); InsertH264(2, kKeyFrame, kFirst, kLast, 3000); @@ -738,7 +742,7 @@ TEST_P(PacketBufferH264ParameterizedTest, FindFramesOnReorderedPadding) { class PacketBufferH264XIsKeyframeTest : public PacketBufferH264Test { protected: - const uint16_t kSeqNum = 5; + const int64_t kSeqNum = 5; explicit PacketBufferH264XIsKeyframeTest(bool sps_pps_idr_is_keyframe) : PacketBufferH264Test(sps_pps_idr_is_keyframe) {} @@ -746,7 +750,7 @@ class PacketBufferH264XIsKeyframeTest : public PacketBufferH264Test { std::unique_ptr CreatePacket() { auto packet = std::make_unique(); packet->video_header.codec = kVideoCodecH264; - packet->seq_num = kSeqNum; + packet->sequence_number = kSeqNum; packet->video_header.is_first_packet_in_frame = true; packet->video_header.is_last_packet_in_frame = true; @@ -765,8 +769,7 @@ TEST_F(PacketBufferH264IdrIsKeyframeTest, IdrIsKeyframe) { auto packet = CreatePacket(); auto& h264_header = packet->video_header.video_type_header.emplace(); - h264_header.nalus[0].type = H264::NaluType::kIdr; - h264_header.nalus_length = 1; + h264_header.nalus = {{H264::NaluType::kIdr}}; EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, ElementsAre(KeyFrame())); } @@ -775,10 +778,8 @@ TEST_F(PacketBufferH264IdrIsKeyframeTest, SpsPpsIdrIsKeyframe) { auto packet = CreatePacket(); auto& h264_header = packet->video_header.video_type_header.emplace(); - h264_header.nalus[0].type = H264::NaluType::kSps; - h264_header.nalus[1].type = H264::NaluType::kPps; - h264_header.nalus[2].type = H264::NaluType::kIdr; - h264_header.nalus_length = 3; + h264_header.nalus = { + {H264::NaluType::kSps}, {H264::NaluType::kPps}, {H264::NaluType::kIdr}}; EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, ElementsAre(KeyFrame())); @@ -795,8 +796,7 @@ TEST_F(PacketBufferH264SpsPpsIdrIsKeyframeTest, IdrIsNotKeyframe) { auto packet = CreatePacket(); auto& h264_header = packet->video_header.video_type_header.emplace(); - h264_header.nalus[0].type = H264::NaluType::kIdr; - h264_header.nalus_length = 1; + h264_header.nalus = {{H264::NaluType::kIdr}}; EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, ElementsAre(DeltaFrame())); @@ -806,9 +806,7 @@ TEST_F(PacketBufferH264SpsPpsIdrIsKeyframeTest, SpsPpsIsNotKeyframe) { auto packet = CreatePacket(); auto& h264_header = packet->video_header.video_type_header.emplace(); - h264_header.nalus[0].type = H264::NaluType::kSps; - h264_header.nalus[1].type = H264::NaluType::kPps; - h264_header.nalus_length = 2; + h264_header.nalus = {{H264::NaluType::kSps}, {H264::NaluType::kPps}}; EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, ElementsAre(DeltaFrame())); @@ -818,10 +816,8 @@ TEST_F(PacketBufferH264SpsPpsIdrIsKeyframeTest, SpsPpsIdrIsKeyframe) { auto packet = CreatePacket(); auto& h264_header = packet->video_header.video_type_header.emplace(); - h264_header.nalus[0].type = H264::NaluType::kSps; - h264_header.nalus[1].type = H264::NaluType::kPps; - h264_header.nalus[2].type = H264::NaluType::kIdr; - h264_header.nalus_length = 3; + h264_header.nalus = { + {H264::NaluType::kSps}, {H264::NaluType::kPps}, {H264::NaluType::kIdr}}; EXPECT_THAT(packet_buffer_.InsertPacket(std::move(packet)).packets, ElementsAre(KeyFrame())); @@ -849,6 +845,25 @@ TEST_F(PacketBufferH264FrameGap, DisallowFrameGapForH264NoGeneric) { IsEmpty()); } +TEST_F(PacketBufferH264FrameGap, + AllowFrameGapForH264WithGenericOnFirstPacketOnly) { + bool generic = true; + InsertH264(1, kKeyFrame, kFirst, kLast, 1001, {}, 0, 0, generic); + InsertH264(3, kDeltaFrame, kFirst, kNotLast, 1003, {}, 0, 0, generic); + // Second packet is not generic, but we can still output frame with 2 packets. + EXPECT_THAT( + InsertH264(4, kDeltaFrame, kNotFirst, kLast, 1003, {}, 0, 0, !generic) + .packets, + SizeIs(2)); +} + +TEST_F(PacketBufferH264FrameGap, DoesntCrashWhenTryToClearBefore1stPacket) { + // Test scenario copied from the https://issues.chromium.org/370689424 + InsertH264(41087, kKeyFrame, kNotFirst, kNotLast, 123, 0, 0, false); + packet_buffer_.ClearTo(30896); + InsertH264(32896, kKeyFrame, kFirst, kLast, 123, 0, 0, false); +} + } // namespace } // namespace video_coding } // namespace webrtc diff --git a/modules/video_coding/rtp_frame_id_only_ref_finder.cc b/modules/video_coding/rtp_frame_id_only_ref_finder.cc index 9f3d5bb296..714571461d 100644 --- a/modules/video_coding/rtp_frame_id_only_ref_finder.cc +++ b/modules/video_coding/rtp_frame_id_only_ref_finder.cc @@ -10,9 +10,12 @@ #include "modules/video_coding/rtp_frame_id_only_ref_finder.h" +#include #include -#include "rtc_base/logging.h" +#include "api/video/video_frame_type.h" +#include "modules/rtp_rtcp/source/frame_object.h" +#include "modules/video_coding/rtp_frame_reference_finder.h" namespace webrtc { diff --git a/modules/video_coding/rtp_frame_id_only_ref_finder.h b/modules/video_coding/rtp_frame_id_only_ref_finder.h index c842fb5011..9c16d3004b 100644 --- a/modules/video_coding/rtp_frame_id_only_ref_finder.h +++ b/modules/video_coding/rtp_frame_id_only_ref_finder.h @@ -11,9 +11,9 @@ #ifndef MODULES_VIDEO_CODING_RTP_FRAME_ID_ONLY_REF_FINDER_H_ #define MODULES_VIDEO_CODING_RTP_FRAME_ID_ONLY_REF_FINDER_H_ +#include #include -#include "absl/container/inlined_vector.h" #include "modules/rtp_rtcp/source/frame_object.h" #include "modules/video_coding/rtp_frame_reference_finder.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" diff --git a/modules/video_coding/rtp_frame_reference_finder.cc b/modules/video_coding/rtp_frame_reference_finder.cc index b73fcfe9d5..40c7080159 100644 --- a/modules/video_coding/rtp_frame_reference_finder.cc +++ b/modules/video_coding/rtp_frame_reference_finder.cc @@ -10,15 +10,24 @@ #include "modules/video_coding/rtp_frame_reference_finder.h" +#include +#include +#include #include +#include -#include "absl/types/variant.h" +#include "api/video/video_codec_type.h" #include "modules/rtp_rtcp/source/frame_object.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/rtp_frame_id_only_ref_finder.h" #include "modules/video_coding/rtp_generic_ref_finder.h" #include "modules/video_coding/rtp_seq_num_only_ref_finder.h" #include "modules/video_coding/rtp_vp8_ref_finder.h" #include "modules/video_coding/rtp_vp9_ref_finder.h" +#include "rtc_base/numerics/sequence_number_util.h" namespace webrtc { namespace internal { @@ -32,12 +41,12 @@ class RtpFrameReferenceFinderImpl { void ClearTo(uint16_t seq_num); private: - using RefFinder = absl::variant; + using RefFinder = std::variant; template T& GetRefFinderAs(); @@ -56,7 +65,7 @@ RtpFrameReferenceFinder::ReturnVector RtpFrameReferenceFinderImpl::ManageFrame( switch (frame->codec_type()) { case kVideoCodecVP8: { const RTPVideoHeaderVP8& vp8_header = - absl::get(video_header.video_type_header); + std::get(video_header.video_type_header); if (vp8_header.temporalIdx == kNoTemporalIdx || vp8_header.tl0PicIdx == kNoTl0PicIdx) { @@ -73,7 +82,7 @@ RtpFrameReferenceFinder::ReturnVector RtpFrameReferenceFinderImpl::ManageFrame( } case kVideoCodecVP9: { const RTPVideoHeaderVP9& vp9_header = - absl::get(video_header.video_type_header); + std::get(video_header.video_type_header); if (vp9_header.temporal_idx == kNoTemporalIdx) { if (vp9_header.picture_id == kNoPictureId) { @@ -88,7 +97,7 @@ RtpFrameReferenceFinder::ReturnVector RtpFrameReferenceFinderImpl::ManageFrame( return GetRefFinderAs().ManageFrame(std::move(frame)); } case kVideoCodecGeneric: { - if (auto* generic_header = absl::get_if( + if (auto* generic_header = std::get_if( &video_header.video_type_header)) { return GetRefFinderAs().ManageFrame( std::move(frame), generic_header->picture_id); @@ -106,7 +115,7 @@ RtpFrameReferenceFinder::ReturnVector RtpFrameReferenceFinderImpl::ManageFrame( RtpFrameReferenceFinder::ReturnVector RtpFrameReferenceFinderImpl::PaddingReceived(uint16_t seq_num) { - if (auto* ref_finder = absl::get_if(&ref_finder_)) { + if (auto* ref_finder = std::get_if(&ref_finder_)) { return ref_finder->PaddingReceived(seq_num); } return {}; @@ -114,9 +123,9 @@ RtpFrameReferenceFinderImpl::PaddingReceived(uint16_t seq_num) { void RtpFrameReferenceFinderImpl::ClearTo(uint16_t seq_num) { struct ClearToVisitor { - void operator()(absl::monostate& ref_finder) {} - void operator()(RtpGenericFrameRefFinder& ref_finder) {} - void operator()(RtpFrameIdOnlyRefFinder& ref_finder) {} + void operator()(std::monostate& /* ref_finder */) {} + void operator()(RtpGenericFrameRefFinder& /* ref_finder */) {} + void operator()(RtpFrameIdOnlyRefFinder& /* ref_finder */) {} void operator()(RtpSeqNumOnlyRefFinder& ref_finder) { ref_finder.ClearTo(seq_num); } @@ -129,12 +138,12 @@ void RtpFrameReferenceFinderImpl::ClearTo(uint16_t seq_num) { uint16_t seq_num; }; - absl::visit(ClearToVisitor{seq_num}, ref_finder_); + std::visit(ClearToVisitor{seq_num}, ref_finder_); } template T& RtpFrameReferenceFinderImpl::GetRefFinderAs() { - if (auto* ref_finder = absl::get_if(&ref_finder_)) { + if (auto* ref_finder = std::get_if(&ref_finder_)) { return *ref_finder; } return ref_finder_.emplace(); diff --git a/modules/video_coding/rtp_frame_reference_finder.h b/modules/video_coding/rtp_frame_reference_finder.h index 6050513179..04a3eea76b 100644 --- a/modules/video_coding/rtp_frame_reference_finder.h +++ b/modules/video_coding/rtp_frame_reference_finder.h @@ -11,8 +11,10 @@ #ifndef MODULES_VIDEO_CODING_RTP_FRAME_REFERENCE_FINDER_H_ #define MODULES_VIDEO_CODING_RTP_FRAME_REFERENCE_FINDER_H_ +#include #include +#include "absl/container/inlined_vector.h" #include "modules/rtp_rtcp/source/frame_object.h" namespace webrtc { diff --git a/modules/video_coding/rtp_frame_reference_finder_unittest.cc b/modules/video_coding/rtp_frame_reference_finder_unittest.cc index 0ca2cf0456..56c0cf31b4 100644 --- a/modules/video_coding/rtp_frame_reference_finder_unittest.cc +++ b/modules/video_coding/rtp_frame_reference_finder_unittest.cc @@ -10,18 +10,25 @@ #include "modules/video_coding/rtp_frame_reference_finder.h" +#include #include -#include #include +#include +#include #include #include -#include +#include "api/rtp_packet_infos.h" +#include "api/video/encoded_frame.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "api/video/video_timing.h" #include "modules/rtp_rtcp/source/frame_object.h" -#include "modules/video_coding/packet_buffer.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "rtc_base/random.h" -#include "rtc_base/ref_count.h" -#include "system_wrappers/include/clock.h" #include "test/gtest.h" namespace webrtc { @@ -54,7 +61,8 @@ std::unique_ptr CreateFrame( kVideoRotation_0, VideoContentType::UNSPECIFIED, video_header, - /*color_space=*/absl::nullopt, + /*color_space=*/std::nullopt, + /*frame_instrumentation_data=*/std::nullopt, RtpPacketInfos(), EncodedImageBuffer::Create(/*size=*/0)); // clang-format on @@ -148,7 +156,7 @@ class TestRtpFrameReferenceFinder : public ::testing::Test { RefsToSet(m, refs...); } - void RefsToSet(std::set* m) const {} + void RefsToSet(std::set* /* m */) const {} Random rand_; std::unique_ptr reference_finder_; diff --git a/modules/video_coding/rtp_generic_ref_finder.cc b/modules/video_coding/rtp_generic_ref_finder.cc index fd5b8afda1..85dcaf0832 100644 --- a/modules/video_coding/rtp_generic_ref_finder.cc +++ b/modules/video_coding/rtp_generic_ref_finder.cc @@ -10,8 +10,16 @@ #include "modules/video_coding/rtp_generic_ref_finder.h" +#include +#include #include +#include "api/video/encoded_frame.h" +#include "api/video/video_codec_constants.h" +#include "modules/rtp_rtcp/source/frame_object.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/rtp_frame_reference_finder.h" #include "rtc_base/logging.h" namespace webrtc { @@ -19,6 +27,13 @@ namespace webrtc { RtpFrameReferenceFinder::ReturnVector RtpGenericFrameRefFinder::ManageFrame( std::unique_ptr frame, const RTPVideoHeader::GenericDescriptorInfo& descriptor) { + RtpFrameReferenceFinder::ReturnVector res; + if (descriptor.spatial_index >= kMaxSpatialLayers) { + RTC_LOG(LS_WARNING) << "Spatial index " << descriptor.spatial_index + << " is unsupported."; + return res; + } + // Frame IDs are unwrapped in the RtpVideoStreamReceiver, no need to unwrap // them here. frame->SetId(descriptor.frame_id); @@ -26,7 +41,6 @@ RtpFrameReferenceFinder::ReturnVector RtpGenericFrameRefFinder::ManageFrame( if (descriptor.temporal_index != kNoTemporalIdx) frame->SetTemporalIndex(descriptor.temporal_index); - RtpFrameReferenceFinder::ReturnVector res; if (EncodedFrame::kMaxFrameReferences < descriptor.dependencies.size()) { RTC_LOG(LS_WARNING) << "Too many dependencies in generic descriptor."; return res; diff --git a/modules/video_coding/rtp_generic_ref_finder.h b/modules/video_coding/rtp_generic_ref_finder.h index 021e31a0cc..3a759a1ed2 100644 --- a/modules/video_coding/rtp_generic_ref_finder.h +++ b/modules/video_coding/rtp_generic_ref_finder.h @@ -14,6 +14,7 @@ #include #include "modules/rtp_rtcp/source/frame_object.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/rtp_frame_reference_finder.h" namespace webrtc { diff --git a/modules/video_coding/rtp_seq_num_only_ref_finder.cc b/modules/video_coding/rtp_seq_num_only_ref_finder.cc index 59b027e2ce..ab09b012c3 100644 --- a/modules/video_coding/rtp_seq_num_only_ref_finder.cc +++ b/modules/video_coding/rtp_seq_num_only_ref_finder.cc @@ -10,9 +10,17 @@ #include "modules/video_coding/rtp_seq_num_only_ref_finder.h" +#include +#include #include +#include "api/video/video_frame_type.h" +#include "modules/rtp_rtcp/source/frame_object.h" +#include "modules/video_coding/rtp_frame_reference_finder.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/mod_ops.h" +#include "rtc_base/numerics/sequence_number_util.h" namespace webrtc { diff --git a/modules/video_coding/rtp_seq_num_only_ref_finder.h b/modules/video_coding/rtp_seq_num_only_ref_finder.h index 166b6a7f0c..88aa6dce96 100644 --- a/modules/video_coding/rtp_seq_num_only_ref_finder.h +++ b/modules/video_coding/rtp_seq_num_only_ref_finder.h @@ -11,16 +11,17 @@ #ifndef MODULES_VIDEO_CODING_RTP_SEQ_NUM_ONLY_REF_FINDER_H_ #define MODULES_VIDEO_CODING_RTP_SEQ_NUM_ONLY_REF_FINDER_H_ +#include #include #include #include #include #include -#include "absl/container/inlined_vector.h" #include "modules/rtp_rtcp/source/frame_object.h" #include "modules/video_coding/rtp_frame_reference_finder.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" +#include "rtc_base/numerics/sequence_number_util.h" namespace webrtc { diff --git a/modules/video_coding/rtp_vp8_ref_finder.cc b/modules/video_coding/rtp_vp8_ref_finder.cc index 185756ce51..995ad86d96 100644 --- a/modules/video_coding/rtp_vp8_ref_finder.cc +++ b/modules/video_coding/rtp_vp8_ref_finder.cc @@ -10,16 +10,26 @@ #include "modules/video_coding/rtp_vp8_ref_finder.h" +#include +#include +#include #include +#include "api/video/video_frame_type.h" +#include "modules/rtp_rtcp/source/frame_object.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "modules/video_coding/rtp_frame_reference_finder.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/mod_ops.h" +#include "rtc_base/numerics/sequence_number_util.h" namespace webrtc { RtpFrameReferenceFinder::ReturnVector RtpVp8RefFinder::ManageFrame( std::unique_ptr frame) { - const RTPVideoHeaderVP8& codec_header = absl::get( - frame->GetRtpVideoHeader().video_type_header); + const RTPVideoHeaderVP8& codec_header = + std::get(frame->GetRtpVideoHeader().video_type_header); if (codec_header.temporalIdx != kNoTemporalIdx) frame->SetTemporalIndex(codec_header.temporalIdx); @@ -214,7 +224,7 @@ void RtpVp8RefFinder::RetryStashedFrames( do { complete_frame = false; for (auto it = stashed_frames_.begin(); it != stashed_frames_.end();) { - const RTPVideoHeaderVP8& codec_header = absl::get( + const RTPVideoHeaderVP8& codec_header = std::get( it->frame->GetRtpVideoHeader().video_type_header); FrameDecision decision = ManageFrameInternal(it->frame.get(), codec_header, it->unwrapped_tl0); diff --git a/modules/video_coding/rtp_vp8_ref_finder.h b/modules/video_coding/rtp_vp8_ref_finder.h index 82fdbd31a1..8f581ca90a 100644 --- a/modules/video_coding/rtp_vp8_ref_finder.h +++ b/modules/video_coding/rtp_vp8_ref_finder.h @@ -11,15 +11,18 @@ #ifndef MODULES_VIDEO_CODING_RTP_VP8_REF_FINDER_H_ #define MODULES_VIDEO_CODING_RTP_VP8_REF_FINDER_H_ +#include +#include #include #include #include #include -#include "absl/container/inlined_vector.h" #include "modules/rtp_rtcp/source/frame_object.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "modules/video_coding/rtp_frame_reference_finder.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" +#include "rtc_base/numerics/sequence_number_util.h" namespace webrtc { diff --git a/modules/video_coding/rtp_vp8_ref_finder_unittest.cc b/modules/video_coding/rtp_vp8_ref_finder_unittest.cc index a27085e584..0ed70b2e39 100644 --- a/modules/video_coding/rtp_vp8_ref_finder_unittest.cc +++ b/modules/video_coding/rtp_vp8_ref_finder_unittest.cc @@ -10,10 +10,24 @@ #include "modules/video_coding/rtp_vp8_ref_finder.h" +#include +#include +#include #include #include +#include "api/array_view.h" +#include "api/rtp_packet_infos.h" +#include "api/video/encoded_frame.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "api/video/video_timing.h" #include "modules/rtp_rtcp/source/frame_object.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "test/gmock.h" #include "test/gtest.h" @@ -30,7 +44,7 @@ namespace { MATCHER_P2(HasIdAndRefs, id, refs, "") { return Matches(Eq(id))(arg->Id()) && Matches(UnorderedElementsAreArray(refs))( - rtc::ArrayView(arg->references, arg->num_references)); + ArrayView(arg->references, arg->num_references)); } Matcher>&> @@ -92,7 +106,8 @@ class Frame { kVideoRotation_0, VideoContentType::UNSPECIFIED, video_header, - /*color_space=*/absl::nullopt, + /*color_space=*/std::nullopt, + /*frame_instrumentation_data=*/std::nullopt, RtpPacketInfos(), EncodedImageBuffer::Create(/*size=*/0)); // clang-format on @@ -100,9 +115,9 @@ class Frame { private: bool is_keyframe_ = false; - absl::optional picture_id_; - absl::optional temporal_id_; - absl::optional tl0_idx_; + std::optional picture_id_; + std::optional temporal_id_; + std::optional tl0_idx_; bool sync = false; }; diff --git a/modules/video_coding/rtp_vp9_ref_finder.cc b/modules/video_coding/rtp_vp9_ref_finder.cc index 175ed3464b..f9abf306d0 100644 --- a/modules/video_coding/rtp_vp9_ref_finder.cc +++ b/modules/video_coding/rtp_vp9_ref_finder.cc @@ -11,15 +11,28 @@ #include "modules/video_coding/rtp_vp9_ref_finder.h" #include +#include +#include +#include #include +#include "api/video/encoded_frame.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_frame_type.h" +#include "modules/rtp_rtcp/source/frame_object.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "modules/video_coding/rtp_frame_reference_finder.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/mod_ops.h" +#include "rtc_base/numerics/sequence_number_util.h" namespace webrtc { RtpFrameReferenceFinder::ReturnVector RtpVp9RefFinder::ManageFrame( std::unique_ptr frame) { - const RTPVideoHeaderVP9& codec_header = absl::get( - frame->GetRtpVideoHeader().video_type_header); + const RTPVideoHeaderVP9& codec_header = + std::get(frame->GetRtpVideoHeader().video_type_header); if (codec_header.temporal_idx != kNoTemporalIdx) frame->SetTemporalIndex(codec_header.temporal_idx); @@ -132,24 +145,20 @@ RtpVp9RefFinder::FrameDecision RtpVp9RefFinder::ManageFrameGof( FlattenFrameIdAndRefs(frame, codec_header.inter_layer_predicted); return kHandOff; } - } else if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { - if (frame->SpatialIndex() == 0) { + } else { + if (frame->frame_type() == VideoFrameType::kVideoFrameKey) { RTC_LOG(LS_WARNING) << "Received keyframe without scalability structure"; return kDrop; } - const auto gof_info_it = gof_info_.find(unwrapped_tl0); - if (gof_info_it == gof_info_.end()) - return kStash; - info = &gof_info_it->second; - - frame->num_references = 0; - FrameReceivedVp9(frame->Id(), info); - FlattenFrameIdAndRefs(frame, codec_header.inter_layer_predicted); - return kHandOff; - } else { - auto gof_info_it = gof_info_.find( - (codec_header.temporal_idx == 0) ? unwrapped_tl0 - 1 : unwrapped_tl0); + // tl0_idx is incremented on temporal_idx=0 frames of the lowest spatial + // layer (which spatial_idx is not necessarily zero). Upper spatial layer + // frames with inter-layer prediction use GOF info of their base spatial + // layer frames. + const bool use_prev_gof = + codec_header.temporal_idx == 0 && !codec_header.inter_layer_predicted; + auto gof_info_it = + gof_info_.find(use_prev_gof ? unwrapped_tl0 - 1 : unwrapped_tl0); // Gof info for this frame is not available yet, stash this frame. if (gof_info_it == gof_info_.end()) @@ -185,29 +194,29 @@ RtpVp9RefFinder::FrameDecision RtpVp9RefFinder::ManageFrameGof( auto up_switch_erase_to = up_switch_.lower_bound(old_picture_id); up_switch_.erase(up_switch_.begin(), up_switch_erase_to); - size_t diff = - ForwardDiff(info->gof->pid_start, frame->Id()); - size_t gof_idx = diff % info->gof->num_frames_in_gof; - - if (info->gof->num_ref_pics[gof_idx] > EncodedFrame::kMaxFrameReferences) { - return kDrop; - } - // Populate references according to the scalability structure. - frame->num_references = info->gof->num_ref_pics[gof_idx]; - for (size_t i = 0; i < frame->num_references; ++i) { - frame->references[i] = - Subtract(frame->Id(), info->gof->pid_diff[gof_idx][i]); + if (codec_header.inter_pic_predicted) { + size_t diff = ForwardDiff(info->gof->pid_start, + frame->Id()); + size_t gof_idx = diff % info->gof->num_frames_in_gof; - // If this is a reference to a frame earlier than the last up switch point, - // then ignore this reference. - if (UpSwitchInIntervalVp9(frame->Id(), codec_header.temporal_idx, - frame->references[i])) { - --frame->num_references; + if (info->gof->num_ref_pics[gof_idx] > EncodedFrame::kMaxFrameReferences) { + return kDrop; } - } - // Override GOF references. - if (!codec_header.inter_pic_predicted) { + // Populate references according to the scalability structure. + frame->num_references = info->gof->num_ref_pics[gof_idx]; + for (size_t i = 0; i < frame->num_references; ++i) { + frame->references[i] = Subtract( + frame->Id(), info->gof->pid_diff[gof_idx][i]); + + // If this is a reference to a frame earlier than the last up switch + // point, then ignore this reference. + if (UpSwitchInIntervalVp9(frame->Id(), codec_header.temporal_idx, + frame->references[i])) { + --frame->num_references; + } + } + } else { frame->num_references = 0; } @@ -315,7 +324,7 @@ void RtpVp9RefFinder::RetryStashedFrames( do { complete_frame = false; for (auto it = stashed_frames_.begin(); it != stashed_frames_.end();) { - const RTPVideoHeaderVP9& codec_header = absl::get( + const RTPVideoHeaderVP9& codec_header = std::get( it->frame->GetRtpVideoHeader().video_type_header); RTC_DCHECK(!codec_header.flexible_mode); FrameDecision decision = diff --git a/modules/video_coding/rtp_vp9_ref_finder.h b/modules/video_coding/rtp_vp9_ref_finder.h index fb14417ac3..d28a78737b 100644 --- a/modules/video_coding/rtp_vp9_ref_finder.h +++ b/modules/video_coding/rtp_vp9_ref_finder.h @@ -11,15 +11,18 @@ #ifndef MODULES_VIDEO_CODING_RTP_VP9_REF_FINDER_H_ #define MODULES_VIDEO_CODING_RTP_VP9_REF_FINDER_H_ +#include +#include #include #include #include #include -#include "absl/container/inlined_vector.h" #include "modules/rtp_rtcp/source/frame_object.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/rtp_frame_reference_finder.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" +#include "rtc_base/numerics/sequence_number_util.h" namespace webrtc { diff --git a/modules/video_coding/rtp_vp9_ref_finder_unittest.cc b/modules/video_coding/rtp_vp9_ref_finder_unittest.cc index a3cb31ade5..822f11472f 100644 --- a/modules/video_coding/rtp_vp9_ref_finder_unittest.cc +++ b/modules/video_coding/rtp_vp9_ref_finder_unittest.cc @@ -10,10 +10,28 @@ #include "modules/video_coding/rtp_vp9_ref_finder.h" +#include +#include +#include +#include +#include +#include #include #include +#include "api/array_view.h" +#include "api/rtp_packet_infos.h" +#include "api/video/encoded_frame.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "api/video/video_timing.h" #include "modules/rtp_rtcp/source/frame_object.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "rtc_base/checks.h" #include "test/gmock.h" #include "test/gtest.h" @@ -128,7 +146,8 @@ class Frame { kVideoRotation_0, VideoContentType::UNSPECIFIED, video_header, - /*color_space=*/absl::nullopt, + /*color_space=*/std::nullopt, + /*frame_instrumentation_data=*/std::nullopt, RtpPacketInfos(), EncodedImageBuffer::Create(/*size=*/0)); // clang-format on @@ -138,10 +157,10 @@ class Frame { uint16_t seq_num_start = 0; uint16_t seq_num_end = 0; bool keyframe = false; - absl::optional picture_id; - absl::optional spatial_id; - absl::optional temporal_id; - absl::optional tl0_idx; + std::optional picture_id; + std::optional spatial_id; + std::optional temporal_id; + std::optional tl0_idx; bool up_switch = false; bool inter_layer = false; bool inter_pic = true; @@ -172,8 +191,7 @@ class HasFrameMatcher : public MatcherInterface { return false; } - rtc::ArrayView actual_refs((*it)->references, - (*it)->num_references); + ArrayView actual_refs((*it)->references, (*it)->num_references); if (!Matches(UnorderedElementsAreArray(expected_refs_))(actual_refs)) { if (result_listener->IsInterested()) { *result_listener << "Frame with frame_id:" << frame_id_ << " and " @@ -360,6 +378,18 @@ TEST_F(RtpVp9RefFinderTest, GofSkipFramesTemporalLayers_0212) { EXPECT_THAT(frames_, HasFrameWithIdAndRefs(35, {30})); } +TEST_F(RtpVp9RefFinderTest, GofInterLayerPredS0KeyS1Delta) { + GofInfoVP9 ss; + ss.SetGofInfoVP9(kTemporalStructureMode1); + + Insert(Frame().Pid(1).SidAndTid(0, 0).Tl0(0).AsKeyFrame().Gof(&ss)); + Insert(Frame().Pid(1).SidAndTid(1, 0).Tl0(0).AsInterLayer().NotAsInterPic()); + + ASSERT_EQ(2UL, frames_.size()); + EXPECT_THAT(frames_, HasFrameWithIdAndRefs(5, {})); + EXPECT_THAT(frames_, HasFrameWithIdAndRefs(6, {5})); +} + TEST_F(RtpVp9RefFinderTest, GofTemporalLayers_01) { GofInfoVP9 ss; ss.SetGofInfoVP9(kTemporalStructureMode2); // 0101 pattern diff --git a/modules/video_coding/svc/BUILD.gn b/modules/video_coding/svc/BUILD.gn index b8ce91d99a..9ee0223154 100644 --- a/modules/video_coding/svc/BUILD.gn +++ b/modules/video_coding/svc/BUILD.gn @@ -17,10 +17,9 @@ rtc_source_set("scalability_mode_util") { "../../../api/video_codecs:scalability_mode", "../../../api/video_codecs:video_codecs_api", "../../../rtc_base:checks", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "../../../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -33,12 +32,10 @@ rtc_source_set("scalable_video_controller") { deps = [ "../../../api/transport/rtp:dependency_descriptor", "../../../api/video:video_bitrate_allocation", + "../../../api/video:video_codec_constants", "../../../common_video/generic_frame_descriptor", "../../../rtc_base:checks", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -63,11 +60,9 @@ rtc_source_set("scalability_structures") { "../../../common_video/generic_frame_descriptor", "../../../rtc_base:checks", "../../../rtc_base:logging", - ] - absl_deps = [ + "../../../rtc_base/system:rtc_export", "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -78,14 +73,40 @@ rtc_source_set("svc_rate_allocator") { ] deps = [ ":scalability_structures", + ":scalable_video_controller", + "../../../api:field_trials_view", + "../../../api/units:data_rate", "../../../api/video:video_bitrate_allocation", "../../../api/video:video_bitrate_allocator", "../../../api/video:video_codec_constants", + "../../../api/video:video_frame", + "../../../api/video_codecs:scalability_mode", "../../../api/video_codecs:video_codecs_api", "../../../rtc_base:checks", "../../../rtc_base/experiments:stable_target_rate_experiment", + "//third_party/abseil-cpp/absl/container:inlined_vector", + ] +} + +rtc_source_set("simulcast_to_svc_converter") { + sources = [ + "simulcast_to_svc_converter.cc", + "simulcast_to_svc_converter.h", + ] + deps = [ + ":scalability_mode_util", + ":scalability_structures", + ":scalable_video_controller", + "..:codec_globals_headers", + "../../../api/video:encoded_image", + "../../../api/video:video_bitrate_allocation", + "../../../api/video_codecs:scalability_mode", + "../../../api/video_codecs:video_codecs_api", + "../../../modules/video_coding:video_codec_interface", + "../../../modules/video_coding:video_coding_utility", + "../../../rtc_base:checks", + "../../../rtc_base/system:rtc_export", ] - absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector" ] } if (rtc_include_tests) { @@ -104,6 +125,7 @@ if (rtc_include_tests) { ":scalability_mode_util", ":scalability_structures", ":scalable_video_controller", + ":simulcast_to_svc_converter", "..:chain_diff_calculator", "..:frame_dependencies_calculator", "../../../api:array_view", @@ -112,12 +134,10 @@ if (rtc_include_tests) { "../../../api/video:video_frame_type", "../../../api/video_codecs:scalability_mode", "../../../common_video/generic_frame_descriptor", + "../../../rtc_base:checks", "../../../rtc_base:stringutils", "../../../test:test_support", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -127,8 +147,35 @@ if (rtc_include_tests) { deps = [ ":svc_rate_allocator", "..:webrtc_vp9_helpers", + "../../../api/units:data_rate", + "../../../api/video:video_bitrate_allocation", + "../../../api/video:video_bitrate_allocator", + "../../../api/video:video_codec_constants", + "../../../api/video:video_frame", + "../../../api/video_codecs:scalability_mode", + "../../../api/video_codecs:video_codecs_api", + "../../../rtc_base:checks", + "../../../test:explicit_key_value_config", + "../../../test:test_support", + "../codecs/av1:av1_svc_config", + "//third_party/abseil-cpp/absl/container:inlined_vector", + ] + } + + rtc_source_set("simulcast_to_svc_converter_tests") { + testonly = true + sources = [ "simulcast_to_svc_converter_unittest.cc" ] + deps = [ + ":scalability_structures", + ":scalable_video_controller", + ":simulcast_to_svc_converter", + "..:video_codec_interface", + "../../../api/video:encoded_image", + "../../../api/video:video_bitrate_allocation", + "../../../api/video:video_frame", + "../../../api/video_codecs:scalability_mode", + "../../../api/video_codecs:video_codecs_api", "../../../rtc_base:checks", - "../../../test:field_trial", "../../../test:test_support", ] } diff --git a/modules/video_coding/svc/create_scalability_structure.cc b/modules/video_coding/svc/create_scalability_structure.cc index fbcd27b139..ca22da71fc 100644 --- a/modules/video_coding/svc/create_scalability_structure.cc +++ b/modules/video_coding/svc/create_scalability_structure.cc @@ -10,6 +10,7 @@ #include "modules/video_coding/svc/create_scalability_structure.h" #include +#include #include "api/video_codecs/scalability_mode.h" #include "modules/video_coding/svc/scalability_structure_full_svc.h" @@ -18,7 +19,6 @@ #include "modules/video_coding/svc/scalability_structure_simulcast.h" #include "modules/video_coding/svc/scalable_video_controller.h" #include "modules/video_coding/svc/scalable_video_controller_no_layering.h" -#include "rtc_base/checks.h" namespace webrtc { namespace { @@ -282,14 +282,14 @@ std::unique_ptr CreateScalabilityStructure( return nullptr; } -absl::optional +std::optional ScalabilityStructureConfig(ScalabilityMode name) { for (const auto& entry : kFactories) { if (entry.name == name) { return entry.config; } } - return absl::nullopt; + return std::nullopt; } } // namespace webrtc diff --git a/modules/video_coding/svc/create_scalability_structure.h b/modules/video_coding/svc/create_scalability_structure.h index 3b67443693..6328a8536d 100644 --- a/modules/video_coding/svc/create_scalability_structure.h +++ b/modules/video_coding/svc/create_scalability_structure.h @@ -11,23 +11,23 @@ #define MODULES_VIDEO_CODING_SVC_CREATE_SCALABILITY_STRUCTURE_H_ #include -#include +#include -#include "absl/types/optional.h" #include "api/video_codecs/scalability_mode.h" #include "modules/video_coding/svc/scalable_video_controller.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { // Creates a structure by name according to // https://w3c.github.io/webrtc-svc/#scalabilitymodes* // Returns nullptr for unknown name. -std::unique_ptr CreateScalabilityStructure( - ScalabilityMode name); +std::unique_ptr RTC_EXPORT +CreateScalabilityStructure(ScalabilityMode name); // Returns description of the scalability structure identified by 'name', // Return nullopt for unknown name. -absl::optional +std::optional ScalabilityStructureConfig(ScalabilityMode name); } // namespace webrtc diff --git a/modules/video_coding/svc/scalability_mode_util.cc b/modules/video_coding/svc/scalability_mode_util.cc index 35d66df203..9ee30f4d4d 100644 --- a/modules/video_coding/svc/scalability_mode_util.cc +++ b/modules/video_coding/svc/scalability_mode_util.cc @@ -10,286 +10,356 @@ #include "modules/video_coding/svc/scalability_mode_util.h" +#include +#include +#include +#include + +#include "absl/algorithm/container.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/video_codec.h" #include "rtc_base/checks.h" namespace webrtc { -absl::optional ScalabilityModeFromString( - absl::string_view mode_string) { - if (mode_string == "L1T1") - return ScalabilityMode::kL1T1; - if (mode_string == "L1T2") - return ScalabilityMode::kL1T2; - if (mode_string == "L1T3") - return ScalabilityMode::kL1T3; +namespace { + +struct ScalabilityModeParameters { + const ScalabilityMode scalability_mode; + const absl::string_view name; + const int num_spatial_layers; + const int num_temporal_layers; + const InterLayerPredMode inter_layer_pred; + const std::optional ratio = + ScalabilityModeResolutionRatio::kTwoToOne; + const bool shift = false; +}; - if (mode_string == "L2T1") - return ScalabilityMode::kL2T1; - if (mode_string == "L2T1h") - return ScalabilityMode::kL2T1h; - if (mode_string == "L2T1_KEY") - return ScalabilityMode::kL2T1_KEY; +constexpr size_t kNumScalabilityModes = + static_cast(ScalabilityMode::kS3T3h) + 1; - if (mode_string == "L2T2") - return ScalabilityMode::kL2T2; - if (mode_string == "L2T2h") - return ScalabilityMode::kL2T2h; - if (mode_string == "L2T2_KEY") - return ScalabilityMode::kL2T2_KEY; - if (mode_string == "L2T2_KEY_SHIFT") - return ScalabilityMode::kL2T2_KEY_SHIFT; - if (mode_string == "L2T3") - return ScalabilityMode::kL2T3; - if (mode_string == "L2T3h") - return ScalabilityMode::kL2T3h; - if (mode_string == "L2T3_KEY") - return ScalabilityMode::kL2T3_KEY; +constexpr ScalabilityModeParameters kScalabilityModeParams[] = { + ScalabilityModeParameters{.scalability_mode = ScalabilityMode::kL1T1, + .name = "L1T1", + .num_spatial_layers = 1, + .num_temporal_layers = 1, + .inter_layer_pred = InterLayerPredMode::kOff, + .ratio = std::nullopt}, + ScalabilityModeParameters{.scalability_mode = ScalabilityMode::kL1T2, + .name = "L1T2", + .num_spatial_layers = 1, + .num_temporal_layers = 2, + .inter_layer_pred = InterLayerPredMode::kOff, + .ratio = std::nullopt}, + ScalabilityModeParameters{.scalability_mode = ScalabilityMode::kL1T3, + .name = "L1T3", + .num_spatial_layers = 1, + .num_temporal_layers = 3, + .inter_layer_pred = InterLayerPredMode::kOff, + .ratio = std::nullopt}, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL2T1, + .name = "L2T1", + .num_spatial_layers = 2, + .num_temporal_layers = 1, + .inter_layer_pred = InterLayerPredMode::kOn, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL2T1h, + .name = "L2T1h", + .num_spatial_layers = 2, + .num_temporal_layers = 1, + .inter_layer_pred = InterLayerPredMode::kOn, + .ratio = ScalabilityModeResolutionRatio::kThreeToTwo, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL2T1_KEY, + .name = "L2T1_KEY", + .num_spatial_layers = 2, + .num_temporal_layers = 1, + .inter_layer_pred = InterLayerPredMode::kOnKeyPic, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL2T2, + .name = "L2T2", + .num_spatial_layers = 2, + .num_temporal_layers = 2, + .inter_layer_pred = InterLayerPredMode::kOn, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL2T2h, + .name = "L2T2h", + .num_spatial_layers = 2, + .num_temporal_layers = 2, + .inter_layer_pred = InterLayerPredMode::kOn, + .ratio = ScalabilityModeResolutionRatio::kThreeToTwo, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL2T2_KEY, + .name = "L2T2_KEY", + .num_spatial_layers = 2, + .num_temporal_layers = 2, + .inter_layer_pred = InterLayerPredMode::kOnKeyPic, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL2T2_KEY_SHIFT, + .name = "L2T2_KEY_SHIFT", + .num_spatial_layers = 2, + .num_temporal_layers = 2, + .inter_layer_pred = InterLayerPredMode::kOnKeyPic, + .shift = true}, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL2T3, + .name = "L2T3", + .num_spatial_layers = 2, + .num_temporal_layers = 3, + .inter_layer_pred = InterLayerPredMode::kOn, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL2T3h, + .name = "L2T3h", + .num_spatial_layers = 2, + .num_temporal_layers = 3, + .inter_layer_pred = InterLayerPredMode::kOn, + .ratio = ScalabilityModeResolutionRatio::kThreeToTwo, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL2T3_KEY, + .name = "L2T3_KEY", + .num_spatial_layers = 2, + .num_temporal_layers = 3, + .inter_layer_pred = InterLayerPredMode::kOnKeyPic, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL3T1, + .name = "L3T1", + .num_spatial_layers = 3, + .num_temporal_layers = 1, + .inter_layer_pred = InterLayerPredMode::kOn, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL3T1h, + .name = "L3T1h", + .num_spatial_layers = 3, + .num_temporal_layers = 1, + .inter_layer_pred = InterLayerPredMode::kOn, + .ratio = ScalabilityModeResolutionRatio::kThreeToTwo, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL3T1_KEY, + .name = "L3T1_KEY", + .num_spatial_layers = 3, + .num_temporal_layers = 1, + .inter_layer_pred = InterLayerPredMode::kOnKeyPic, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL3T2, + .name = "L3T2", + .num_spatial_layers = 3, + .num_temporal_layers = 2, + .inter_layer_pred = InterLayerPredMode::kOn, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL3T2h, + .name = "L3T2h", + .num_spatial_layers = 3, + .num_temporal_layers = 2, + .inter_layer_pred = InterLayerPredMode::kOn, + .ratio = ScalabilityModeResolutionRatio::kThreeToTwo, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL3T2_KEY, + .name = "L3T2_KEY", + .num_spatial_layers = 3, + .num_temporal_layers = 2, + .inter_layer_pred = InterLayerPredMode::kOnKeyPic, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL3T3, + .name = "L3T3", + .num_spatial_layers = 3, + .num_temporal_layers = 3, + .inter_layer_pred = InterLayerPredMode::kOn, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL3T3h, + .name = "L3T3h", + .num_spatial_layers = 3, + .num_temporal_layers = 3, + .inter_layer_pred = InterLayerPredMode::kOn, + .ratio = ScalabilityModeResolutionRatio::kThreeToTwo, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kL3T3_KEY, + .name = "L3T3_KEY", + .num_spatial_layers = 3, + .num_temporal_layers = 3, + .inter_layer_pred = InterLayerPredMode::kOnKeyPic, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kS2T1, + .name = "S2T1", + .num_spatial_layers = 2, + .num_temporal_layers = 1, + .inter_layer_pred = InterLayerPredMode::kOff, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kS2T1h, + .name = "S2T1h", + .num_spatial_layers = 2, + .num_temporal_layers = 1, + .inter_layer_pred = InterLayerPredMode::kOff, + .ratio = ScalabilityModeResolutionRatio::kThreeToTwo, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kS2T2, + .name = "S2T2", + .num_spatial_layers = 2, + .num_temporal_layers = 2, + .inter_layer_pred = InterLayerPredMode::kOff, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kS2T2h, + .name = "S2T2h", + .num_spatial_layers = 2, + .num_temporal_layers = 2, + .inter_layer_pred = InterLayerPredMode::kOff, + .ratio = ScalabilityModeResolutionRatio::kThreeToTwo, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kS2T3, + .name = "S2T3", + .num_spatial_layers = 2, + .num_temporal_layers = 3, + .inter_layer_pred = InterLayerPredMode::kOff, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kS2T3h, + .name = "S2T3h", + .num_spatial_layers = 2, + .num_temporal_layers = 3, + .inter_layer_pred = InterLayerPredMode::kOff, + .ratio = ScalabilityModeResolutionRatio::kThreeToTwo, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kS3T1, + .name = "S3T1", + .num_spatial_layers = 3, + .num_temporal_layers = 1, + .inter_layer_pred = InterLayerPredMode::kOff, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kS3T1h, + .name = "S3T1h", + .num_spatial_layers = 3, + .num_temporal_layers = 1, + .inter_layer_pred = InterLayerPredMode::kOff, + .ratio = ScalabilityModeResolutionRatio::kThreeToTwo, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kS3T2, + .name = "S3T2", + .num_spatial_layers = 3, + .num_temporal_layers = 2, + .inter_layer_pred = InterLayerPredMode::kOff, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kS3T2h, + .name = "S3T2h", + .num_spatial_layers = 3, + .num_temporal_layers = 2, + .inter_layer_pred = InterLayerPredMode::kOff, + .ratio = ScalabilityModeResolutionRatio::kThreeToTwo, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kS3T3, + .name = "S3T3", + .num_spatial_layers = 3, + .num_temporal_layers = 3, + .inter_layer_pred = InterLayerPredMode::kOff, + }, + ScalabilityModeParameters{ + .scalability_mode = ScalabilityMode::kS3T3h, + .name = "S3T3h", + .num_spatial_layers = 3, + .num_temporal_layers = 3, + .inter_layer_pred = InterLayerPredMode::kOff, + .ratio = ScalabilityModeResolutionRatio::kThreeToTwo, + }, +}; - if (mode_string == "L3T1") - return ScalabilityMode::kL3T1; - if (mode_string == "L3T1h") - return ScalabilityMode::kL3T1h; - if (mode_string == "L3T1_KEY") - return ScalabilityMode::kL3T1_KEY; +// This could be replaced with std::all_of in c++20. +constexpr bool CheckScalabilityModeParams() { + static_assert(std::size(kScalabilityModeParams) == kNumScalabilityModes); + for (size_t s = 0; s < kNumScalabilityModes; ++s) { + if (kScalabilityModeParams[s].scalability_mode != + static_cast(s)) { + return false; + } + } + return true; +} - if (mode_string == "L3T2") - return ScalabilityMode::kL3T2; - if (mode_string == "L3T2h") - return ScalabilityMode::kL3T2h; - if (mode_string == "L3T2_KEY") - return ScalabilityMode::kL3T2_KEY; +static_assert(CheckScalabilityModeParams(), + "There is a scalability mode mismatch in the array!"); - if (mode_string == "L3T3") - return ScalabilityMode::kL3T3; - if (mode_string == "L3T3h") - return ScalabilityMode::kL3T3h; - if (mode_string == "L3T3_KEY") - return ScalabilityMode::kL3T3_KEY; +constexpr auto Idx(ScalabilityMode s) { + const auto index = static_cast>(s); + RTC_CHECK_LT(index, kNumScalabilityModes); + return index; +} - if (mode_string == "S2T1") - return ScalabilityMode::kS2T1; - if (mode_string == "S2T1h") - return ScalabilityMode::kS2T1h; - if (mode_string == "S2T2") - return ScalabilityMode::kS2T2; - if (mode_string == "S2T2h") - return ScalabilityMode::kS2T2h; - if (mode_string == "S2T3") - return ScalabilityMode::kS2T3; - if (mode_string == "S2T3h") - return ScalabilityMode::kS2T3h; - if (mode_string == "S3T1") - return ScalabilityMode::kS3T1; - if (mode_string == "S3T1h") - return ScalabilityMode::kS3T1h; - if (mode_string == "S3T2") - return ScalabilityMode::kS3T2; - if (mode_string == "S3T2h") - return ScalabilityMode::kS3T2h; - if (mode_string == "S3T3") - return ScalabilityMode::kS3T3; - if (mode_string == "S3T3h") - return ScalabilityMode::kS3T3h; +} // namespace - return absl::nullopt; +std::optional MakeScalabilityMode( + int num_spatial_layers, + int num_temporal_layers, + InterLayerPredMode inter_layer_pred, + std::optional ratio, + bool shift) { + for (const auto& candidate_mode : kScalabilityModeParams) { + if (candidate_mode.num_spatial_layers == num_spatial_layers && + candidate_mode.num_temporal_layers == num_temporal_layers) { + if (num_spatial_layers == 1 || + (candidate_mode.inter_layer_pred == inter_layer_pred && + candidate_mode.ratio == ratio && candidate_mode.shift == shift)) { + return candidate_mode.scalability_mode; + } + } + } + return std::nullopt; +} + +std::optional ScalabilityModeFromString( + absl::string_view mode_string) { + const auto it = + absl::c_find_if(kScalabilityModeParams, + [&](const ScalabilityModeParameters& candidate_mode) { + return candidate_mode.name == mode_string; + }); + if (it != std::end(kScalabilityModeParams)) { + return it->scalability_mode; + } + return std::nullopt; } InterLayerPredMode ScalabilityModeToInterLayerPredMode( ScalabilityMode scalability_mode) { - switch (scalability_mode) { - case ScalabilityMode::kL1T1: - case ScalabilityMode::kL1T2: - case ScalabilityMode::kL1T3: - case ScalabilityMode::kL2T1: - case ScalabilityMode::kL2T1h: - return InterLayerPredMode::kOn; - case ScalabilityMode::kL2T1_KEY: - return InterLayerPredMode::kOnKeyPic; - case ScalabilityMode::kL2T2: - case ScalabilityMode::kL2T2h: - return InterLayerPredMode::kOn; - case ScalabilityMode::kL2T2_KEY: - case ScalabilityMode::kL2T2_KEY_SHIFT: - return InterLayerPredMode::kOnKeyPic; - case ScalabilityMode::kL2T3: - case ScalabilityMode::kL2T3h: - return InterLayerPredMode::kOn; - case ScalabilityMode::kL2T3_KEY: - return InterLayerPredMode::kOnKeyPic; - case ScalabilityMode::kL3T1: - case ScalabilityMode::kL3T1h: - return InterLayerPredMode::kOn; - case ScalabilityMode::kL3T1_KEY: - return InterLayerPredMode::kOnKeyPic; - case ScalabilityMode::kL3T2: - case ScalabilityMode::kL3T2h: - return InterLayerPredMode::kOn; - case ScalabilityMode::kL3T2_KEY: - return InterLayerPredMode::kOnKeyPic; - case ScalabilityMode::kL3T3: - case ScalabilityMode::kL3T3h: - return InterLayerPredMode::kOn; - case ScalabilityMode::kL3T3_KEY: - return InterLayerPredMode::kOnKeyPic; - case ScalabilityMode::kS2T1: - case ScalabilityMode::kS2T1h: - case ScalabilityMode::kS2T2: - case ScalabilityMode::kS2T2h: - case ScalabilityMode::kS2T3: - case ScalabilityMode::kS2T3h: - case ScalabilityMode::kS3T1: - case ScalabilityMode::kS3T1h: - case ScalabilityMode::kS3T2: - case ScalabilityMode::kS3T2h: - case ScalabilityMode::kS3T3: - case ScalabilityMode::kS3T3h: - return InterLayerPredMode::kOff; - } - RTC_CHECK_NOTREACHED(); + return kScalabilityModeParams[Idx(scalability_mode)].inter_layer_pred; } int ScalabilityModeToNumSpatialLayers(ScalabilityMode scalability_mode) { - switch (scalability_mode) { - case ScalabilityMode::kL1T1: - case ScalabilityMode::kL1T2: - case ScalabilityMode::kL1T3: - return 1; - case ScalabilityMode::kL2T1: - case ScalabilityMode::kL2T1h: - case ScalabilityMode::kL2T1_KEY: - case ScalabilityMode::kL2T2: - case ScalabilityMode::kL2T2h: - case ScalabilityMode::kL2T2_KEY: - case ScalabilityMode::kL2T2_KEY_SHIFT: - case ScalabilityMode::kL2T3: - case ScalabilityMode::kL2T3h: - case ScalabilityMode::kL2T3_KEY: - return 2; - case ScalabilityMode::kL3T1: - case ScalabilityMode::kL3T1h: - case ScalabilityMode::kL3T1_KEY: - case ScalabilityMode::kL3T2: - case ScalabilityMode::kL3T2h: - case ScalabilityMode::kL3T2_KEY: - case ScalabilityMode::kL3T3: - case ScalabilityMode::kL3T3h: - case ScalabilityMode::kL3T3_KEY: - return 3; - case ScalabilityMode::kS2T1: - case ScalabilityMode::kS2T1h: - case ScalabilityMode::kS2T2: - case ScalabilityMode::kS2T2h: - case ScalabilityMode::kS2T3: - case ScalabilityMode::kS2T3h: - return 2; - case ScalabilityMode::kS3T1: - case ScalabilityMode::kS3T1h: - case ScalabilityMode::kS3T2: - case ScalabilityMode::kS3T2h: - case ScalabilityMode::kS3T3: - case ScalabilityMode::kS3T3h: - return 3; - } - RTC_CHECK_NOTREACHED(); + return kScalabilityModeParams[Idx(scalability_mode)].num_spatial_layers; } int ScalabilityModeToNumTemporalLayers(ScalabilityMode scalability_mode) { - switch (scalability_mode) { - case ScalabilityMode::kL1T1: - return 1; - case ScalabilityMode::kL1T2: - return 2; - case ScalabilityMode::kL1T3: - return 3; - case ScalabilityMode::kL2T1: - case ScalabilityMode::kL2T1h: - case ScalabilityMode::kL2T1_KEY: - return 1; - case ScalabilityMode::kL2T2: - case ScalabilityMode::kL2T2h: - case ScalabilityMode::kL2T2_KEY: - case ScalabilityMode::kL2T2_KEY_SHIFT: - return 2; - case ScalabilityMode::kL2T3: - case ScalabilityMode::kL2T3h: - case ScalabilityMode::kL2T3_KEY: - return 3; - case ScalabilityMode::kL3T1: - case ScalabilityMode::kL3T1h: - case ScalabilityMode::kL3T1_KEY: - return 1; - case ScalabilityMode::kL3T2: - case ScalabilityMode::kL3T2h: - case ScalabilityMode::kL3T2_KEY: - return 2; - case ScalabilityMode::kL3T3: - case ScalabilityMode::kL3T3h: - case ScalabilityMode::kL3T3_KEY: - return 3; - case ScalabilityMode::kS2T1: - case ScalabilityMode::kS2T1h: - case ScalabilityMode::kS3T1: - case ScalabilityMode::kS3T1h: - return 1; - case ScalabilityMode::kS2T2: - case ScalabilityMode::kS2T2h: - case ScalabilityMode::kS3T2: - case ScalabilityMode::kS3T2h: - return 2; - case ScalabilityMode::kS2T3: - case ScalabilityMode::kS2T3h: - case ScalabilityMode::kS3T3: - case ScalabilityMode::kS3T3h: - return 3; - } - RTC_CHECK_NOTREACHED(); + return kScalabilityModeParams[Idx(scalability_mode)].num_temporal_layers; } -absl::optional ScalabilityModeToResolutionRatio( +std::optional ScalabilityModeToResolutionRatio( ScalabilityMode scalability_mode) { - switch (scalability_mode) { - case ScalabilityMode::kL1T1: - case ScalabilityMode::kL1T2: - case ScalabilityMode::kL1T3: - return absl::nullopt; - case ScalabilityMode::kL2T1: - case ScalabilityMode::kL2T1_KEY: - case ScalabilityMode::kL2T2: - case ScalabilityMode::kL2T2_KEY: - case ScalabilityMode::kL2T2_KEY_SHIFT: - case ScalabilityMode::kL2T3: - case ScalabilityMode::kL2T3_KEY: - case ScalabilityMode::kL3T1: - case ScalabilityMode::kL3T1_KEY: - case ScalabilityMode::kL3T2: - case ScalabilityMode::kL3T2_KEY: - case ScalabilityMode::kL3T3: - case ScalabilityMode::kL3T3_KEY: - case ScalabilityMode::kS2T1: - case ScalabilityMode::kS2T2: - case ScalabilityMode::kS2T3: - case ScalabilityMode::kS3T1: - case ScalabilityMode::kS3T2: - case ScalabilityMode::kS3T3: - return ScalabilityModeResolutionRatio::kTwoToOne; - case ScalabilityMode::kL2T1h: - case ScalabilityMode::kL2T2h: - case ScalabilityMode::kL2T3h: - case ScalabilityMode::kL3T1h: - case ScalabilityMode::kL3T2h: - case ScalabilityMode::kL3T3h: - case ScalabilityMode::kS2T1h: - case ScalabilityMode::kS2T2h: - case ScalabilityMode::kS2T3h: - case ScalabilityMode::kS3T1h: - case ScalabilityMode::kS3T2h: - case ScalabilityMode::kS3T3h: - return ScalabilityModeResolutionRatio::kThreeToTwo; - } - RTC_CHECK_NOTREACHED(); + return kScalabilityModeParams[Idx(scalability_mode)].ratio; } ScalabilityMode LimitNumSpatialLayers(ScalabilityMode scalability_mode, @@ -387,4 +457,8 @@ ScalabilityMode LimitNumSpatialLayers(ScalabilityMode scalability_mode, RTC_CHECK_NOTREACHED(); } +bool ScalabilityModeIsShiftMode(ScalabilityMode scalability_mode) { + return kScalabilityModeParams[Idx(scalability_mode)].shift; +} + } // namespace webrtc diff --git a/modules/video_coding/svc/scalability_mode_util.h b/modules/video_coding/svc/scalability_mode_util.h index 9c8193e037..24e5960f74 100644 --- a/modules/video_coding/svc/scalability_mode_util.h +++ b/modules/video_coding/svc/scalability_mode_util.h @@ -11,10 +11,12 @@ #ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_MODE_UTIL_H_ #define MODULES_VIDEO_CODING_SVC_SCALABILITY_MODE_UTIL_H_ +#include + #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_codec.h" +#include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -25,7 +27,18 @@ enum class ScalabilityModeResolutionRatio { static constexpr char kDefaultScalabilityModeStr[] = "L1T2"; -absl::optional ScalabilityModeFromString( +// Scalability mode to be used if falling back to default scalability mode is +// unsupported. +static constexpr char kNoLayeringScalabilityModeStr[] = "L1T1"; + +RTC_EXPORT std::optional MakeScalabilityMode( + int num_spatial_layers, + int num_temporal_layers, + InterLayerPredMode inter_layer_pred, + std::optional ratio, + bool shift); + +RTC_EXPORT std::optional ScalabilityModeFromString( absl::string_view scalability_mode_string); InterLayerPredMode ScalabilityModeToInterLayerPredMode( @@ -35,9 +48,11 @@ int ScalabilityModeToNumSpatialLayers(ScalabilityMode scalability_mode); int ScalabilityModeToNumTemporalLayers(ScalabilityMode scalability_mode); -absl::optional ScalabilityModeToResolutionRatio( +std::optional ScalabilityModeToResolutionRatio( ScalabilityMode scalability_mode); +bool ScalabilityModeIsShiftMode(ScalabilityMode scalability_mode); + ScalabilityMode LimitNumSpatialLayers(ScalabilityMode scalability_mode, int max_spatial_layers); diff --git a/modules/video_coding/svc/scalability_mode_util_unittest.cc b/modules/video_coding/svc/scalability_mode_util_unittest.cc index 448494ffcc..346944c113 100644 --- a/modules/video_coding/svc/scalability_mode_util_unittest.cc +++ b/modules/video_coding/svc/scalability_mode_util_unittest.cc @@ -10,16 +10,18 @@ #include "modules/video_coding/svc/scalability_mode_util.h" +#include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/video_codecs/scalability_mode.h" +#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { + namespace { TEST(ScalabilityModeUtil, ConvertsL1T2) { @@ -28,8 +30,28 @@ TEST(ScalabilityModeUtil, ConvertsL1T2) { } TEST(ScalabilityModeUtil, RejectsUnknownString) { - EXPECT_EQ(ScalabilityModeFromString(""), absl::nullopt); - EXPECT_EQ(ScalabilityModeFromString("not-a-mode"), absl::nullopt); + EXPECT_EQ(ScalabilityModeFromString(""), std::nullopt); + EXPECT_EQ(ScalabilityModeFromString("not-a-mode"), std::nullopt); +} + +TEST(ScalabilityModeUtil, MakeScalabilityModeRoundTrip) { + const ScalabilityMode kLastEnum = ScalabilityMode::kS3T3h; + for (int numerical_enum = 0; numerical_enum <= static_cast(kLastEnum); + numerical_enum++) { + ScalabilityMode scalability_mode = + static_cast(numerical_enum); + std::optional created_mode = MakeScalabilityMode( + ScalabilityModeToNumSpatialLayers(scalability_mode), + ScalabilityModeToNumTemporalLayers(scalability_mode), + ScalabilityModeToInterLayerPredMode(scalability_mode), + ScalabilityModeToResolutionRatio(scalability_mode), + ScalabilityModeIsShiftMode(scalability_mode)); + EXPECT_THAT(created_mode, ::testing::Optional(scalability_mode)) + << "Expected " + << (created_mode.has_value() ? ScalabilityModeToString(*created_mode) + : "(nullopt)") + << " to equal " << ScalabilityModeToString(scalability_mode); + } } // Check roundtrip conversion of all enum values. diff --git a/modules/video_coding/svc/scalability_structure_full_svc.cc b/modules/video_coding/svc/scalability_structure_full_svc.cc index a262317597..4593e99a28 100644 --- a/modules/video_coding/svc/scalability_structure_full_svc.cc +++ b/modules/video_coding/svc/scalability_structure_full_svc.cc @@ -9,12 +9,15 @@ */ #include "modules/video_coding/svc/scalability_structure_full_svc.h" -#include +#include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -142,7 +145,7 @@ ScalabilityStructureFullSvc::NextFrameConfig(bool restart) { } FramePattern current_pattern = NextPattern(); - absl::optional spatial_dependency_buffer_id; + std::optional spatial_dependency_buffer_id; switch (current_pattern) { case kDeltaT0: case kKey: diff --git a/modules/video_coding/svc/scalability_structure_full_svc.h b/modules/video_coding/svc/scalability_structure_full_svc.h index a4ede69342..b81a27c59f 100644 --- a/modules/video_coding/svc/scalability_structure_full_svc.h +++ b/modules/video_coding/svc/scalability_structure_full_svc.h @@ -13,7 +13,9 @@ #include #include +#include "absl/strings/string_view.h" #include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "modules/video_coding/svc/scalable_video_controller.h" diff --git a/modules/video_coding/svc/scalability_structure_full_svc_unittest.cc b/modules/video_coding/svc/scalability_structure_full_svc_unittest.cc index 1c0a8be8f1..2c51cf1e8b 100644 --- a/modules/video_coding/svc/scalability_structure_full_svc_unittest.cc +++ b/modules/video_coding/svc/scalability_structure_full_svc_unittest.cc @@ -11,6 +11,7 @@ #include +#include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "modules/video_coding/svc/scalability_structure_test_helpers.h" #include "test/gmock.h" #include "test/gtest.h" diff --git a/modules/video_coding/svc/scalability_structure_key_svc.cc b/modules/video_coding/svc/scalability_structure_key_svc.cc index 0e6fecfae9..b21639b113 100644 --- a/modules/video_coding/svc/scalability_structure_key_svc.cc +++ b/modules/video_coding/svc/scalability_structure_key_svc.cc @@ -10,16 +10,15 @@ #include "modules/video_coding/svc/scalability_structure_key_svc.h" #include -#include +#include +#include #include -#include "absl/types/optional.h" #include "api/transport/rtp/dependency_descriptor.h" #include "api/video/video_bitrate_allocation.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "modules/video_coding/svc/scalable_video_controller.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" namespace webrtc { @@ -90,7 +89,7 @@ std::vector ScalabilityStructureKeySvc::KeyframeConfig() { std::vector configs; configs.reserve(num_spatial_layers_); - absl::optional spatial_dependency_buffer_id; + std::optional spatial_dependency_buffer_id; spatial_id_is_enabled_.reset(); // Disallow temporal references cross T0 on higher temporal layers. can_reference_t1_frame_for_spatial_id_.reset(); diff --git a/modules/video_coding/svc/scalability_structure_key_svc_unittest.cc b/modules/video_coding/svc/scalability_structure_key_svc_unittest.cc index 5f923bb487..175b8fc277 100644 --- a/modules/video_coding/svc/scalability_structure_key_svc_unittest.cc +++ b/modules/video_coding/svc/scalability_structure_key_svc_unittest.cc @@ -12,7 +12,6 @@ #include #include "api/array_view.h" -#include "api/transport/rtp/dependency_descriptor.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "modules/video_coding/svc/scalability_structure_test_helpers.h" #include "test/gmock.h" @@ -235,7 +234,7 @@ TEST(ScalabilityStructureL3T3KeyTest, ReenablingSpatialLayerTriggersKeyFrame) { EXPECT_EQ(frames[13].temporal_id, 0); EXPECT_EQ(frames[14].temporal_id, 0); EXPECT_EQ(frames[15].temporal_id, 0); - auto all_frames = rtc::MakeArrayView(frames.data(), frames.size()); + auto all_frames = MakeArrayView(frames.data(), frames.size()); EXPECT_TRUE(wrapper.FrameReferencesAreValid(all_frames.subview(0, 13))); // Frames starting from the frame#13 should not reference any earlier frames. EXPECT_TRUE(wrapper.FrameReferencesAreValid(all_frames.subview(13))); diff --git a/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc b/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc index 4d15942d3e..b39b953767 100644 --- a/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc +++ b/modules/video_coding/svc/scalability_structure_l2t2_key_shift.cc @@ -9,13 +9,13 @@ */ #include "modules/video_coding/svc/scalability_structure_l2t2_key_shift.h" -#include #include -#include "absl/base/macros.h" #include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" namespace webrtc { namespace { diff --git a/modules/video_coding/svc/scalability_structure_l2t2_key_shift.h b/modules/video_coding/svc/scalability_structure_l2t2_key_shift.h index 26d1afcb29..1fb6e8d127 100644 --- a/modules/video_coding/svc/scalability_structure_l2t2_key_shift.h +++ b/modules/video_coding/svc/scalability_structure_l2t2_key_shift.h @@ -10,6 +10,7 @@ #ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_ #define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_L2T2_KEY_SHIFT_H_ +#include #include #include "api/transport/rtp/dependency_descriptor.h" diff --git a/modules/video_coding/svc/scalability_structure_l2t2_key_shift_unittest.cc b/modules/video_coding/svc/scalability_structure_l2t2_key_shift_unittest.cc index 40fecf1812..153294014d 100644 --- a/modules/video_coding/svc/scalability_structure_l2t2_key_shift_unittest.cc +++ b/modules/video_coding/svc/scalability_structure_l2t2_key_shift_unittest.cc @@ -12,7 +12,6 @@ #include #include "api/array_view.h" -#include "api/transport/rtp/dependency_descriptor.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "modules/video_coding/svc/scalability_structure_test_helpers.h" #include "test/gmock.h" @@ -237,8 +236,8 @@ TEST(ScalabilityStructureL2T2KeyShiftTest, ReenableS1TriggersKeyFrame) { EXPECT_THAT(frames[4].temporal_id, 1); // Expect frame[5] to be a key frame. - EXPECT_TRUE(wrapper.FrameReferencesAreValid( - rtc::MakeArrayView(frames.data() + 5, 4))); + EXPECT_TRUE( + wrapper.FrameReferencesAreValid(MakeArrayView(frames.data() + 5, 4))); EXPECT_THAT(frames[5].spatial_id, 0); EXPECT_THAT(frames[6].spatial_id, 1); diff --git a/modules/video_coding/svc/scalability_structure_simulcast.cc b/modules/video_coding/svc/scalability_structure_simulcast.cc index 54e27fda5c..71eae32794 100644 --- a/modules/video_coding/svc/scalability_structure_simulcast.cc +++ b/modules/video_coding/svc/scalability_structure_simulcast.cc @@ -9,13 +9,14 @@ */ #include "modules/video_coding/svc/scalability_structure_simulcast.h" -#include +#include #include -#include "absl/base/macros.h" #include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" namespace webrtc { namespace { diff --git a/modules/video_coding/svc/scalability_structure_simulcast.h b/modules/video_coding/svc/scalability_structure_simulcast.h index 99be9f0d58..7435724b42 100644 --- a/modules/video_coding/svc/scalability_structure_simulcast.h +++ b/modules/video_coding/svc/scalability_structure_simulcast.h @@ -10,6 +10,7 @@ #ifndef MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_SIMULCAST_H_ #define MODULES_VIDEO_CODING_SVC_SCALABILITY_STRUCTURE_SIMULCAST_H_ +#include #include #include "api/transport/rtp/dependency_descriptor.h" diff --git a/modules/video_coding/svc/scalability_structure_test_helpers.cc b/modules/video_coding/svc/scalability_structure_test_helpers.cc index aeb4d88f1a..e5cb1ef1d2 100644 --- a/modules/video_coding/svc/scalability_structure_test_helpers.cc +++ b/modules/video_coding/svc/scalability_structure_test_helpers.cc @@ -11,12 +11,14 @@ #include +#include +#include #include #include #include "api/array_view.h" -#include "api/transport/rtp/dependency_descriptor.h" #include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "modules/video_coding/chain_diff_calculator.h" #include "modules/video_coding/frame_dependencies_calculator.h" #include "modules/video_coding/svc/scalable_video_controller.h" @@ -65,7 +67,7 @@ void ScalabilityStructureWrapper::GenerateFrames( } bool ScalabilityStructureWrapper::FrameReferencesAreValid( - rtc::ArrayView frames) const { + ArrayView frames) const { bool valid = true; // VP9 and AV1 supports up to 8 buffers. Expect no more buffers are not used. std::bitset<8> buffer_contains_frame; diff --git a/modules/video_coding/svc/scalability_structure_test_helpers.h b/modules/video_coding/svc/scalability_structure_test_helpers.h index d183be4766..42257b59a1 100644 --- a/modules/video_coding/svc/scalability_structure_test_helpers.h +++ b/modules/video_coding/svc/scalability_structure_test_helpers.h @@ -15,7 +15,6 @@ #include #include "api/array_view.h" -#include "api/transport/rtp/dependency_descriptor.h" #include "api/video/video_bitrate_allocation.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "modules/video_coding/chain_diff_calculator.h" @@ -44,8 +43,7 @@ class ScalabilityStructureWrapper { // Returns false and ADD_FAILUREs for frames with invalid references. // In particular validates no frame frame reference to frame before frames[0]. // In error messages frames are indexed starting with 0. - bool FrameReferencesAreValid( - rtc::ArrayView frames) const; + bool FrameReferencesAreValid(ArrayView frames) const; private: ScalableVideoController& structure_controller_; diff --git a/modules/video_coding/svc/scalability_structure_unittest.cc b/modules/video_coding/svc/scalability_structure_unittest.cc index 2d517c5825..9a66df3a10 100644 --- a/modules/video_coding/svc/scalability_structure_unittest.cc +++ b/modules/video_coding/svc/scalability_structure_unittest.cc @@ -12,16 +12,23 @@ #include #include +#include #include +#include #include +#include +#include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video_codecs/scalability_mode.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/svc/scalability_mode_util.h" #include "modules/video_coding/svc/scalability_structure_test_helpers.h" #include "modules/video_coding/svc/scalable_video_controller.h" +#include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" #include "test/gmock.h" #include "test/gtest.h" @@ -45,7 +52,7 @@ using ::testing::TestWithParam; using ::testing::Values; std::string FrameDependencyTemplateToString(const FrameDependencyTemplate& t) { - rtc::StringBuilder sb; + StringBuilder sb; sb << "S" << t.spatial_id << "T" << t.temporal_id; sb << ": dtis = "; for (const auto dtis : t.decode_target_indications) { @@ -85,7 +92,7 @@ struct SvcTestParam { } ScalabilityMode GetScalabilityMode() const { - absl::optional scalability_mode = + std::optional scalability_mode = ScalabilityModeFromString(name); RTC_CHECK(scalability_mode.has_value()); return *scalability_mode; @@ -101,20 +108,20 @@ TEST_P(ScalabilityStructureTest, StaticConfigMatchesConfigReturnedByController) { std::unique_ptr controller = CreateScalabilityStructure(GetParam().GetScalabilityMode()); - absl::optional static_config = + std::optional static_config = ScalabilityStructureConfig(GetParam().GetScalabilityMode()); ASSERT_THAT(controller, NotNull()); - ASSERT_NE(static_config, absl::nullopt); + ASSERT_NE(static_config, std::nullopt); ScalableVideoController::StreamLayersConfig config = controller->StreamConfig(); EXPECT_EQ(config.num_spatial_layers, static_config->num_spatial_layers); EXPECT_EQ(config.num_temporal_layers, static_config->num_temporal_layers); EXPECT_THAT( - rtc::MakeArrayView(config.scaling_factor_num, config.num_spatial_layers), + MakeArrayView(config.scaling_factor_num, config.num_spatial_layers), ElementsAreArray(static_config->scaling_factor_num, static_config->num_spatial_layers)); EXPECT_THAT( - rtc::MakeArrayView(config.scaling_factor_den, config.num_spatial_layers), + MakeArrayView(config.scaling_factor_den, config.num_spatial_layers), ElementsAreArray(static_config->scaling_factor_den, static_config->num_spatial_layers)); } diff --git a/modules/video_coding/svc/scalable_video_controller.h b/modules/video_coding/svc/scalable_video_controller.h index c7362657ec..ca21998302 100644 --- a/modules/video_coding/svc/scalable_video_controller.h +++ b/modules/video_coding/svc/scalable_video_controller.h @@ -15,6 +15,7 @@ #include "absl/container/inlined_vector.h" #include "api/transport/rtp/dependency_descriptor.h" #include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_constants.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" namespace webrtc { diff --git a/modules/video_coding/svc/scalable_video_controller_no_layering.cc b/modules/video_coding/svc/scalable_video_controller_no_layering.cc index a9d530dd9d..95b9c9cc41 100644 --- a/modules/video_coding/svc/scalable_video_controller_no_layering.cc +++ b/modules/video_coding/svc/scalable_video_controller_no_layering.cc @@ -9,10 +9,12 @@ */ #include "modules/video_coding/svc/scalable_video_controller_no_layering.h" -#include #include #include "api/transport/rtp/dependency_descriptor.h" +#include "api/video/video_bitrate_allocation.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/svc/scalable_video_controller.h" #include "rtc_base/checks.h" namespace webrtc { diff --git a/modules/video_coding/svc/simulcast_to_svc_converter.cc b/modules/video_coding/svc/simulcast_to_svc_converter.cc new file mode 100644 index 0000000000..f1bec7a515 --- /dev/null +++ b/modules/video_coding/svc/simulcast_to_svc_converter.cc @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/svc/simulcast_to_svc_converter.h" + +#include +#include +#include + +#include "api/video/encoded_image.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/video_codec.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalability_mode_util.h" +#include "modules/video_coding/utility/simulcast_utility.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +SimulcastToSvcConverter::SimulcastToSvcConverter(const VideoCodec& codec) { + config_ = codec; + int num_temporal_layers = + config_.simulcastStream[0].GetNumberOfTemporalLayers(); + int num_spatial_layers = config_.numberOfSimulcastStreams; + ScalabilityMode scalability_mode; + switch (num_temporal_layers) { + case 1: + scalability_mode = ScalabilityMode::kL1T1; + break; + case 2: + scalability_mode = ScalabilityMode::kL1T2; + break; + case 3: + scalability_mode = ScalabilityMode::kL1T3; + break; + default: + RTC_DCHECK_NOTREACHED(); + } + + for (int i = 0; i < num_spatial_layers; ++i) { + config_.spatialLayers[i] = config_.simulcastStream[i]; + } + config_.simulcastStream[0] = + config_.simulcastStream[config_.numberOfSimulcastStreams - 1]; + config_.VP9()->numberOfSpatialLayers = config_.numberOfSimulcastStreams; + config_.VP9()->numberOfTemporalLayers = + config_.spatialLayers[0].numberOfTemporalLayers; + config_.VP9()->interLayerPred = InterLayerPredMode::kOff; + config_.numberOfSimulcastStreams = 1; + config_.UnsetScalabilityMode(); + + for (int i = 0; i < num_spatial_layers; ++i) { + layers_.emplace_back(scalability_mode, num_temporal_layers); + } +} + +VideoCodec SimulcastToSvcConverter::GetConfig() const { + return config_; +} + +void SimulcastToSvcConverter::EncodeStarted(bool force_keyframe) { + // Check if at least one layer was encoded successfully. + bool some_layers_has_completed = false; + for (size_t i = 0; i < layers_.size(); ++i) { + some_layers_has_completed |= !layers_[i].awaiting_frame; + } + for (size_t i = 0; i < layers_.size(); ++i) { + if (layers_[i].awaiting_frame && some_layers_has_completed) { + // Simulcast SVC controller updates pattern on all layers, even + // if some layers has dropped the frame. + // Simulate that behavior for all controllers, not updated + // while rewriting frame descriptors. + layers_[i].video_controller->OnEncodeDone(layers_[i].layer_config); + } + layers_[i].awaiting_frame = true; + auto configs = layers_[i].video_controller->NextFrameConfig(force_keyframe); + RTC_CHECK_EQ(configs.size(), 1u); + layers_[i].layer_config = configs[0]; + } +} + +bool SimulcastToSvcConverter::ConvertFrame(EncodedImage& encoded_image, + CodecSpecificInfo& codec_specific) { + int sid = encoded_image.SpatialIndex().value_or(0); + encoded_image.SetSimulcastIndex(sid); + encoded_image.SetSpatialIndex(std::nullopt); + codec_specific.end_of_picture = true; + if (codec_specific.scalability_mode) { + int num_temporal_layers = + ScalabilityModeToNumTemporalLayers(*codec_specific.scalability_mode); + RTC_DCHECK_LE(num_temporal_layers, 3); + if (num_temporal_layers == 1) { + codec_specific.scalability_mode = ScalabilityMode::kL1T1; + } else if (num_temporal_layers == 2) { + codec_specific.scalability_mode = ScalabilityMode::kL1T2; + } else if (num_temporal_layers == 3) { + codec_specific.scalability_mode = ScalabilityMode::kL1T3; + } + } + CodecSpecificInfoVP9& vp9_info = codec_specific.codecSpecific.VP9; + vp9_info.num_spatial_layers = 1; + vp9_info.first_active_layer = 0; + vp9_info.first_frame_in_picture = true; + if (vp9_info.ss_data_available) { + vp9_info.width[0] = vp9_info.width[sid]; + vp9_info.height[0] = vp9_info.height[sid]; + } + + auto& video_controller = *layers_[sid].video_controller; + if (codec_specific.generic_frame_info) { + layers_[sid].awaiting_frame = false; + uint8_t tid = encoded_image.TemporalIndex().value_or(0); + auto& frame_config = layers_[sid].layer_config; + RTC_DCHECK_EQ(frame_config.TemporalId(), tid == kNoTemporalIdx ? 0 : tid); + if (frame_config.TemporalId() != (tid == kNoTemporalIdx ? 0 : tid)) { + return false; + } + codec_specific.generic_frame_info = + video_controller.OnEncodeDone(frame_config); + } + if (codec_specific.template_structure) { + auto resolution = codec_specific.template_structure->resolutions[sid]; + codec_specific.template_structure = video_controller.DependencyStructure(); + codec_specific.template_structure->resolutions.resize(1); + codec_specific.template_structure->resolutions[0] = resolution; + } + return true; +} + +SimulcastToSvcConverter::LayerState::LayerState( + ScalabilityMode scalability_mode, + int num_temporal_layers) + : video_controller(CreateScalabilityStructure(scalability_mode)), + awaiting_frame(false) { + VideoBitrateAllocation dummy_bitrates; + for (int i = 0; i < num_temporal_layers; ++i) { + dummy_bitrates.SetBitrate(0, i, 10000); + } + video_controller->OnRatesUpdated(dummy_bitrates); +} + +// static +bool SimulcastToSvcConverter::IsConfigSupported(const VideoCodec& codec) { + if (codec.numberOfSimulcastStreams <= 1 || + !SimulcastUtility::ValidSimulcastParameters( + codec, codec.numberOfSimulcastStreams)) { + return false; + } + // Ensure there's 4:2:1 scaling. + for (int i = 1; i < codec.numberOfSimulcastStreams; ++i) { + if (codec.simulcastStream[i].active && + codec.simulcastStream[i - 1].active && + (codec.simulcastStream[i].width != + codec.simulcastStream[i - 1].width * 2 || + codec.simulcastStream[i].height != + codec.simulcastStream[i - 1].height * 2)) { + return false; + } + } + int first_active_layer = -1; + int last_active_layer = -1; + int num_active_layers = 0; + for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) { + if (codec.simulcastStream[i].active) { + if (first_active_layer < 0) + first_active_layer = i; + last_active_layer = i; + ++num_active_layers; + } + } + // Active layers must form a continuous segment. Can't have holes, because + // most SVC encoders can't process that. + return num_active_layers == last_active_layer - first_active_layer + 1; +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/simulcast_to_svc_converter.h b/modules/video_coding/svc/simulcast_to_svc_converter.h new file mode 100644 index 0000000000..ea2435d768 --- /dev/null +++ b/modules/video_coding/svc/simulcast_to_svc_converter.h @@ -0,0 +1,66 @@ +/* Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_SVC_SIMULCAST_TO_SVC_CONVERTER_H_ +#define MODULES_VIDEO_CODING_SVC_SIMULCAST_TO_SVC_CONVERTER_H_ + +#include + +#include +#include + +#include "api/video/encoded_image.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/video_codec.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class RTC_EXPORT SimulcastToSvcConverter { + public: + explicit SimulcastToSvcConverter(const VideoCodec&); + SimulcastToSvcConverter(SimulcastToSvcConverter&&) = default; + + SimulcastToSvcConverter(const SimulcastToSvcConverter&) = delete; + SimulcastToSvcConverter& operator=(const SimulcastToSvcConverter&) = delete; + SimulcastToSvcConverter& operator=(SimulcastToSvcConverter&&) = default; + + ~SimulcastToSvcConverter() = default; + + static bool IsConfigSupported(const VideoCodec& codec); + + VideoCodec GetConfig() const; + + void EncodeStarted(bool force_keyframe); + + bool ConvertFrame(EncodedImage& encoded_image, + CodecSpecificInfo& codec_specific); + + private: + struct LayerState { + LayerState(ScalabilityMode scalability_mode, int num_temporal_layers); + ~LayerState() = default; + LayerState(const LayerState&) = delete; + LayerState(LayerState&&) = default; + + std::unique_ptr video_controller; + ScalableVideoController::LayerFrameConfig layer_config; + bool awaiting_frame; + }; + + VideoCodec config_; + + std::vector layers_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_SVC_SIMULCAST_TO_SVC_CONVERTER_H_ diff --git a/modules/video_coding/svc/simulcast_to_svc_converter_unittest.cc b/modules/video_coding/svc/simulcast_to_svc_converter_unittest.cc new file mode 100644 index 0000000000..4466f18845 --- /dev/null +++ b/modules/video_coding/svc/simulcast_to_svc_converter_unittest.cc @@ -0,0 +1,393 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/svc/simulcast_to_svc_converter.h" + +#include +#include +#include +#include + +#include "api/video/encoded_image.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/video_codec.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "test/gtest.h" + +namespace webrtc { + +TEST(SimulcastToSvc, ConvertsConfig) { + VideoCodec codec; + codec.codecType = kVideoCodecVP9; + codec.SetScalabilityMode(ScalabilityMode::kL1T3); + codec.width = 1280; + codec.height = 720; + codec.minBitrate = 10; + codec.maxBitrate = 2500; + codec.numberOfSimulcastStreams = 3; + codec.VP9()->numberOfSpatialLayers = 1; + codec.VP9()->interLayerPred = InterLayerPredMode::kOff; + codec.simulcastStream[0] = {.width = 320, + .height = 180, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 100, + .targetBitrate = 70, + .minBitrate = 50, + .qpMax = 150, + .active = true}; + codec.simulcastStream[1] = {.width = 640, + .height = 360, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 250, + .targetBitrate = 150, + .minBitrate = 100, + .qpMax = 150, + .active = true}; + codec.simulcastStream[2] = {.width = 12800, + .height = 720, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 1500, + .targetBitrate = 1200, + .minBitrate = 800, + .qpMax = 150, + .active = true}; + VideoCodec result = codec; + + SimulcastToSvcConverter converter(codec); + result = converter.GetConfig(); + + EXPECT_EQ(result.numberOfSimulcastStreams, 1); + EXPECT_EQ(result.spatialLayers[0], codec.simulcastStream[0]); + EXPECT_EQ(result.spatialLayers[1], codec.simulcastStream[1]); + EXPECT_EQ(result.spatialLayers[2], codec.simulcastStream[2]); + EXPECT_EQ(result.VP9()->numberOfTemporalLayers, 3); + EXPECT_EQ(result.VP9()->numberOfSpatialLayers, 3); + EXPECT_EQ(result.VP9()->interLayerPred, InterLayerPredMode::kOff); +} + +TEST(SimulcastToSvc, ConvertsEncodedImage) { + VideoCodec codec; + codec.codecType = kVideoCodecVP9; + codec.SetScalabilityMode(ScalabilityMode::kL1T3); + codec.width = 1280; + codec.height = 720; + codec.minBitrate = 10; + codec.maxBitrate = 2500; + codec.numberOfSimulcastStreams = 3; + codec.VP9()->numberOfSpatialLayers = 1; + codec.VP9()->interLayerPred = InterLayerPredMode::kOff; + codec.simulcastStream[0] = {.width = 320, + .height = 180, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 100, + .targetBitrate = 70, + .minBitrate = 50, + .qpMax = 150, + .active = true}; + codec.simulcastStream[1] = {.width = 640, + .height = 360, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 250, + .targetBitrate = 150, + .minBitrate = 100, + .qpMax = 150, + .active = true}; + codec.simulcastStream[2] = {.width = 1280, + .height = 720, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 1500, + .targetBitrate = 1200, + .minBitrate = 800, + .qpMax = 150, + .active = true}; + + SimulcastToSvcConverter converter(codec); + + EncodedImage image; + image.SetRtpTimestamp(123); + image.SetSpatialIndex(1); + image.SetTemporalIndex(0); + image._encodedWidth = 640; + image._encodedHeight = 360; + + CodecSpecificInfo codec_specific; + codec_specific.codecType = kVideoCodecVP9; + codec_specific.end_of_picture = false; + codec_specific.codecSpecific.VP9.num_spatial_layers = 3; + codec_specific.codecSpecific.VP9.first_active_layer = 0; + codec_specific.scalability_mode = ScalabilityMode::kS3T3; + + converter.EncodeStarted(/*force_keyframe =*/true); + converter.ConvertFrame(image, codec_specific); + + EXPECT_EQ(image.SpatialIndex(), std::nullopt); + EXPECT_EQ(image.SimulcastIndex(), 1); + EXPECT_EQ(image.TemporalIndex(), 0); + + EXPECT_EQ(codec_specific.end_of_picture, true); + EXPECT_EQ(codec_specific.scalability_mode, ScalabilityMode::kL1T3); +} + +// Checks that ScalableVideoController, which actualle is used by the encoder +// in the forced S-mode behaves as SimulcastToSvcConverter assumes. +TEST(SimulcastToSvc, PredictsInternalStateCorrectlyOnFrameDrops) { + VideoCodec codec; + codec.codecType = kVideoCodecVP9; + codec.SetScalabilityMode(ScalabilityMode::kL1T3); + codec.width = 1280; + codec.height = 720; + codec.minBitrate = 10; + codec.maxBitrate = 2500; + codec.numberOfSimulcastStreams = 3; + codec.VP9()->numberOfSpatialLayers = 1; + codec.VP9()->interLayerPred = InterLayerPredMode::kOff; + + codec.simulcastStream[0] = {.width = 320, + .height = 180, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 100, + .targetBitrate = 70, + .minBitrate = 50, + .qpMax = 150, + .active = true}; + codec.simulcastStream[1] = {.width = 640, + .height = 360, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 250, + .targetBitrate = 150, + .minBitrate = 100, + .qpMax = 150, + .active = true}; + codec.simulcastStream[2] = {.width = 1280, + .height = 720, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 1500, + .targetBitrate = 1200, + .minBitrate = 800, + .qpMax = 150, + .active = true}; + + std::unique_ptr svc_controller = + CreateScalabilityStructure(ScalabilityMode::kS3T3); + + VideoBitrateAllocation dummy_bitrates; + for (int sid = 0; sid < 3; ++sid) { + for (int tid = 0; tid < 3; ++tid) { + dummy_bitrates.SetBitrate(sid, tid, 10000); + } + } + svc_controller->OnRatesUpdated(dummy_bitrates); + + SimulcastToSvcConverter converter(codec); + + EncodedImage image; + + // Simulate complex dropping pattern. + const int kDropInterval[3] = {11, 7, 5}; + const int kKeyFrameInterval = 13; + for (int i = 0; i < 100; ++i) { + bool force_restart = ((i + 1) % kKeyFrameInterval == 0) || (i == 0); + auto layer_config = svc_controller->NextFrameConfig(force_restart); + converter.EncodeStarted(force_restart); + for (int sid = 0; sid < 3; ++sid) { + if ((i + 1) % kDropInterval[sid] == 0) { + continue; + } + image.SetRtpTimestamp(123 * i); + image.SetSpatialIndex(sid); + image.SetTemporalIndex(0); + image._encodedWidth = 1280 / (1 << sid); + image._encodedHeight = 720 / (1 << sid); + image.SetSpatialIndex(sid); + image.SetTemporalIndex(layer_config[sid].TemporalId()); + + CodecSpecificInfo codec_specific; + codec_specific.codecType = kVideoCodecVP9; + codec_specific.end_of_picture = false; + codec_specific.codecSpecific.VP9.num_spatial_layers = 3; + codec_specific.codecSpecific.VP9.first_active_layer = 0; + codec_specific.codecSpecific.VP9.temporal_idx = + layer_config[sid].TemporalId(); + codec_specific.generic_frame_info = + svc_controller->OnEncodeDone(layer_config[sid]); + + codec_specific.scalability_mode = ScalabilityMode::kS3T3; + + EXPECT_TRUE(converter.ConvertFrame(image, codec_specific)); + + EXPECT_EQ(image.SpatialIndex(), std::nullopt); + EXPECT_EQ(image.SimulcastIndex(), sid); + EXPECT_EQ(image.TemporalIndex(), layer_config[sid].TemporalId()); + + EXPECT_EQ(codec_specific.scalability_mode, ScalabilityMode::kL1T3); + } + } +} + +TEST(SimulcastToSvc, SupportsOnlyContinuousActiveStreams) { + VideoCodec codec; + codec.codecType = kVideoCodecVP9; + codec.SetScalabilityMode(ScalabilityMode::kL1T3); + codec.width = 1280; + codec.height = 720; + codec.minBitrate = 10; + codec.maxBitrate = 2500; + codec.numberOfSimulcastStreams = 3; + codec.VP9()->numberOfSpatialLayers = 1; + codec.VP9()->interLayerPred = InterLayerPredMode::kOff; + + codec.simulcastStream[0] = {.width = 320, + .height = 180, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 100, + .targetBitrate = 70, + .minBitrate = 50, + .qpMax = 150, + .active = true}; + codec.simulcastStream[1] = {.width = 640, + .height = 360, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 250, + .targetBitrate = 150, + .minBitrate = 100, + .qpMax = 150, + .active = true}; + codec.simulcastStream[2] = {.width = 1280, + .height = 720, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 1500, + .targetBitrate = 1200, + .minBitrate = 800, + .qpMax = 150, + .active = true}; + EXPECT_TRUE(SimulcastToSvcConverter::IsConfigSupported(codec)); + + codec.simulcastStream[0].active = false; + codec.simulcastStream[1].active = true; + codec.simulcastStream[2].active = true; + EXPECT_TRUE(SimulcastToSvcConverter::IsConfigSupported(codec)); + + codec.simulcastStream[0].active = true; + codec.simulcastStream[1].active = true; + codec.simulcastStream[2].active = false; + EXPECT_TRUE(SimulcastToSvcConverter::IsConfigSupported(codec)); + + codec.simulcastStream[0].active = true; + codec.simulcastStream[1].active = false; + codec.simulcastStream[2].active = true; + EXPECT_FALSE(SimulcastToSvcConverter::IsConfigSupported(codec)); +} + +TEST(SimulcastToSvc, SupportsOnlySameTemporalStructure) { + VideoCodec codec; + codec.codecType = kVideoCodecVP9; + codec.width = 1280; + codec.height = 720; + codec.minBitrate = 10; + codec.maxBitrate = 2500; + codec.numberOfSimulcastStreams = 3; + codec.VP9()->numberOfSpatialLayers = 1; + codec.VP9()->interLayerPred = InterLayerPredMode::kOff; + + codec.simulcastStream[0] = {.width = 320, + .height = 180, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 100, + .targetBitrate = 70, + .minBitrate = 50, + .qpMax = 150, + .active = true}; + codec.simulcastStream[1] = {.width = 640, + .height = 360, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 250, + .targetBitrate = 150, + .minBitrate = 100, + .qpMax = 150, + .active = true}; + codec.simulcastStream[2] = {.width = 1280, + .height = 720, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 1500, + .targetBitrate = 1200, + .minBitrate = 800, + .qpMax = 150, + .active = true}; + EXPECT_TRUE(SimulcastToSvcConverter::IsConfigSupported(codec)); + + codec.simulcastStream[0].numberOfTemporalLayers = 1; + EXPECT_FALSE(SimulcastToSvcConverter::IsConfigSupported(codec)); +} + +TEST(SimulcastToSvc, SupportsOnly421Scaling) { + VideoCodec codec; + codec.codecType = kVideoCodecVP9; + codec.width = 1280; + codec.height = 720; + codec.minBitrate = 10; + codec.maxBitrate = 2500; + codec.numberOfSimulcastStreams = 3; + codec.VP9()->numberOfSpatialLayers = 1; + codec.VP9()->interLayerPred = InterLayerPredMode::kOff; + + codec.simulcastStream[0] = {.width = 320, + .height = 180, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 100, + .targetBitrate = 70, + .minBitrate = 50, + .qpMax = 150, + .active = true}; + codec.simulcastStream[1] = {.width = 640, + .height = 360, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 250, + .targetBitrate = 150, + .minBitrate = 100, + .qpMax = 150, + .active = true}; + codec.simulcastStream[2] = {.width = 1280, + .height = 720, + .maxFramerate = 30, + .numberOfTemporalLayers = 3, + .maxBitrate = 1500, + .targetBitrate = 1200, + .minBitrate = 800, + .qpMax = 150, + .active = true}; + EXPECT_TRUE(SimulcastToSvcConverter::IsConfigSupported(codec)); + + codec.simulcastStream[0].width = 160; + codec.simulcastStream[0].height = 90; + EXPECT_FALSE(SimulcastToSvcConverter::IsConfigSupported(codec)); +} + +} // namespace webrtc diff --git a/modules/video_coding/svc/svc_rate_allocator.cc b/modules/video_coding/svc/svc_rate_allocator.cc index f3514a1a77..fbf5dcd9ea 100644 --- a/modules/video_coding/svc/svc_rate_allocator.cc +++ b/modules/video_coding/svc/svc_rate_allocator.cc @@ -14,10 +14,20 @@ #include #include #include +#include #include #include "absl/container/inlined_vector.h" +#include "api/field_trials_view.h" +#include "api/units/data_rate.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/video_codec.h" #include "modules/video_coding/svc/create_scalability_structure.h" +#include "modules/video_coding/svc/scalable_video_controller.h" #include "rtc_base/checks.h" namespace webrtc { @@ -117,6 +127,46 @@ static std::vector SplitBitrate(size_t num_layers, return bitrates; } +VideoBitrateAllocation DistributeAllocationToTemporalLayers( + std::vector spatial_layer_birates, + size_t first_active_layer, + size_t num_temporal_layers) { + // Distribute rate across temporal layers. Allocate more bits to lower + // layers since they are used for prediction of higher layers and their + // references are far apart. + VideoBitrateAllocation bitrate_allocation; + for (size_t sl_idx = 0; sl_idx < spatial_layer_birates.size(); ++sl_idx) { + std::vector temporal_layer_bitrates = + SplitBitrate(num_temporal_layers, spatial_layer_birates[sl_idx], + kTemporalLayeringRateScalingFactor); + + if (num_temporal_layers == 1) { + bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 0, + temporal_layer_bitrates[0].bps()); + } else if (num_temporal_layers == 2) { + bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 0, + temporal_layer_bitrates[1].bps()); + bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 1, + temporal_layer_bitrates[0].bps()); + } else { + RTC_CHECK_EQ(num_temporal_layers, 3); + // In case of three temporal layers the high layer has two frames and the + // middle layer has one frame within GOP (in between two consecutive low + // layer frames). Thus high layer requires more bits (comparing pure + // bitrate of layer, excluding bitrate of base layers) to keep quality on + // par with lower layers. + bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 0, + temporal_layer_bitrates[2].bps()); + bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 1, + temporal_layer_bitrates[0].bps()); + bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 2, + temporal_layer_bitrates[1].bps()); + } + } + + return bitrate_allocation; +} + // Returns the minimum bitrate needed for `num_active_layers` spatial layers to // become active using the configuration specified by `codec`. DataRate FindLayerTogglingThreshold(const VideoCodec& codec, @@ -174,7 +224,7 @@ DataRate FindLayerTogglingThreshold(const VideoCodec& codec, SvcRateAllocator::NumLayers SvcRateAllocator::GetNumLayers( const VideoCodec& codec) { NumLayers layers; - if (absl::optional scalability_mode = + if (std::optional scalability_mode = codec.GetScalabilityMode(); scalability_mode.has_value()) { if (auto structure = CreateScalabilityStructure(*scalability_mode)) { @@ -195,10 +245,11 @@ SvcRateAllocator::NumLayers SvcRateAllocator::GetNumLayers( return layers; } -SvcRateAllocator::SvcRateAllocator(const VideoCodec& codec) +SvcRateAllocator::SvcRateAllocator(const VideoCodec& codec, + const FieldTrialsView& field_trials) : codec_(codec), num_layers_(GetNumLayers(codec)), - experiment_settings_(StableTargetRateExperiment::ParseFromFieldTrials()), + experiment_settings_(field_trials), cumulative_layer_start_bitrates_(GetLayerStartBitrates(codec)), last_active_layer_count_(0) { RTC_DCHECK_GT(num_layers_.spatial, 0); @@ -269,19 +320,24 @@ VideoBitrateAllocation SvcRateAllocator::Allocate( } last_active_layer_count_ = num_spatial_layers; - VideoBitrateAllocation allocation; + std::vector spatial_layer_bitrates; if (codec_.mode == VideoCodecMode::kRealtimeVideo) { - allocation = GetAllocationNormalVideo(total_bitrate, active_layers.first, - num_spatial_layers); + spatial_layer_bitrates = DistributeAllocationToSpatialLayersNormalVideo( + total_bitrate, active_layers.first, num_spatial_layers); } else { - allocation = GetAllocationScreenSharing(total_bitrate, active_layers.first, - num_spatial_layers); + spatial_layer_bitrates = DistributeAllocationToSpatialLayersScreenSharing( + total_bitrate, active_layers.first, num_spatial_layers); } + + VideoBitrateAllocation allocation = DistributeAllocationToTemporalLayers( + spatial_layer_bitrates, active_layers.first, num_layers_.temporal); + allocation.set_bw_limited(num_spatial_layers < active_layers.num); return allocation; } -VideoBitrateAllocation SvcRateAllocator::GetAllocationNormalVideo( +std::vector +SvcRateAllocator::DistributeAllocationToSpatialLayersNormalVideo( DataRate total_bitrate, size_t first_active_layer, size_t num_spatial_layers) const { @@ -291,67 +347,33 @@ VideoBitrateAllocation SvcRateAllocator::GetAllocationNormalVideo( // bitrate anyway. num_spatial_layers = 1; spatial_layer_rates.push_back(total_bitrate); - } else { - spatial_layer_rates = - AdjustAndVerify(codec_, first_active_layer, - SplitBitrate(num_spatial_layers, total_bitrate, - kSpatialLayeringRateScalingFactor)); - RTC_DCHECK_EQ(spatial_layer_rates.size(), num_spatial_layers); + return spatial_layer_rates; } - VideoBitrateAllocation bitrate_allocation; - - for (size_t sl_idx = 0; sl_idx < num_spatial_layers; ++sl_idx) { - std::vector temporal_layer_rates = - SplitBitrate(num_layers_.temporal, spatial_layer_rates[sl_idx], - kTemporalLayeringRateScalingFactor); - - // Distribute rate across temporal layers. Allocate more bits to lower - // layers since they are used for prediction of higher layers and their - // references are far apart. - if (num_layers_.temporal == 1) { - bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 0, - temporal_layer_rates[0].bps()); - } else if (num_layers_.temporal == 2) { - bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 0, - temporal_layer_rates[1].bps()); - bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 1, - temporal_layer_rates[0].bps()); - } else { - RTC_CHECK_EQ(num_layers_.temporal, 3); - // In case of three temporal layers the high layer has two frames and the - // middle layer has one frame within GOP (in between two consecutive low - // layer frames). Thus high layer requires more bits (comparing pure - // bitrate of layer, excluding bitrate of base layers) to keep quality on - // par with lower layers. - bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 0, - temporal_layer_rates[2].bps()); - bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 1, - temporal_layer_rates[0].bps()); - bitrate_allocation.SetBitrate(sl_idx + first_active_layer, 2, - temporal_layer_rates[1].bps()); - } - } - - return bitrate_allocation; + spatial_layer_rates = + AdjustAndVerify(codec_, first_active_layer, + SplitBitrate(num_spatial_layers, total_bitrate, + kSpatialLayeringRateScalingFactor)); + RTC_DCHECK_EQ(spatial_layer_rates.size(), num_spatial_layers); + return spatial_layer_rates; } // Bit-rate is allocated in such a way, that the highest enabled layer will have // between min and max bitrate, and all others will have exactly target // bit-rate allocated. -VideoBitrateAllocation SvcRateAllocator::GetAllocationScreenSharing( +std::vector +SvcRateAllocator::DistributeAllocationToSpatialLayersScreenSharing( DataRate total_bitrate, size_t first_active_layer, size_t num_spatial_layers) const { - VideoBitrateAllocation bitrate_allocation; - + std::vector spatial_layer_rates; if (num_spatial_layers == 0 || total_bitrate < DataRate::KilobitsPerSec( codec_.spatialLayers[first_active_layer].minBitrate)) { // Always enable at least one layer. - bitrate_allocation.SetBitrate(first_active_layer, 0, total_bitrate.bps()); - return bitrate_allocation; + spatial_layer_rates.push_back(total_bitrate); + return spatial_layer_rates; } DataRate allocated_rate = DataRate::Zero(); @@ -370,7 +392,7 @@ VideoBitrateAllocation SvcRateAllocator::GetAllocationScreenSharing( } top_layer_rate = std::min(target_rate, total_bitrate - allocated_rate); - bitrate_allocation.SetBitrate(sl_idx, 0, top_layer_rate.bps()); + spatial_layer_rates.push_back(top_layer_rate); allocated_rate += top_layer_rate; } @@ -379,10 +401,10 @@ VideoBitrateAllocation SvcRateAllocator::GetAllocationScreenSharing( top_layer_rate = std::min( top_layer_rate + (total_bitrate - allocated_rate), DataRate::KilobitsPerSec(codec_.spatialLayers[sl_idx - 1].maxBitrate)); - bitrate_allocation.SetBitrate(sl_idx - 1, 0, top_layer_rate.bps()); + spatial_layer_rates.back() = top_layer_rate; } - return bitrate_allocation; + return spatial_layer_rates; } size_t SvcRateAllocator::FindNumEnabledLayers(DataRate target_rate) const { diff --git a/modules/video_coding/svc/svc_rate_allocator.h b/modules/video_coding/svc/svc_rate_allocator.h index bd75fca284..a8d05b6ec0 100644 --- a/modules/video_coding/svc/svc_rate_allocator.h +++ b/modules/video_coding/svc/svc_rate_allocator.h @@ -12,9 +12,12 @@ #define MODULES_VIDEO_CODING_SVC_SVC_RATE_ALLOCATOR_H_ #include -#include + +#include #include "absl/container/inlined_vector.h" +#include "api/field_trials_view.h" +#include "api/units/data_rate.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocator.h" #include "api/video/video_codec_constants.h" @@ -25,7 +28,8 @@ namespace webrtc { class SvcRateAllocator : public VideoBitrateAllocator { public: - explicit SvcRateAllocator(const VideoCodec& codec); + SvcRateAllocator(const VideoCodec& codec, + const FieldTrialsView& field_trials); VideoBitrateAllocation Allocate( VideoBitrateAllocationParameters parameters) override; @@ -42,12 +46,12 @@ class SvcRateAllocator : public VideoBitrateAllocator { }; static NumLayers GetNumLayers(const VideoCodec& codec); - VideoBitrateAllocation GetAllocationNormalVideo( + std::vector DistributeAllocationToSpatialLayersNormalVideo( DataRate total_bitrate, size_t first_active_layer, size_t num_spatial_layers) const; - VideoBitrateAllocation GetAllocationScreenSharing( + std::vector DistributeAllocationToSpatialLayersScreenSharing( DataRate total_bitrate, size_t first_active_layer, size_t num_spatial_layers) const; diff --git a/modules/video_coding/svc/svc_rate_allocator_unittest.cc b/modules/video_coding/svc/svc_rate_allocator_unittest.cc index 44d1eae667..09a769a0ec 100644 --- a/modules/video_coding/svc/svc_rate_allocator_unittest.cc +++ b/modules/video_coding/svc/svc_rate_allocator_unittest.cc @@ -11,17 +11,33 @@ #include "modules/video_coding/svc/svc_rate_allocator.h" #include +#include +#include #include +#include "absl/container/inlined_vector.h" +#include "api/units/data_rate.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/spatial_layer.h" +#include "api/video_codecs/video_codec.h" +#include "modules/video_coding/codecs/av1/av1_svc_config.h" #include "modules/video_coding/codecs/vp9/svc_config.h" #include "rtc_base/checks.h" -#include "test/field_trial.h" +#include "test/explicit_key_value_config.h" #include "test/gtest.h" namespace webrtc { namespace test { namespace { -static VideoCodec Configure(size_t width, +using ::testing::Bool; +using ::testing::TestWithParam; + +static VideoCodec Configure(VideoCodecType codecType, + size_t width, size_t height, size_t num_spatial_layers, size_t num_temporal_layers, @@ -29,32 +45,49 @@ static VideoCodec Configure(size_t width, VideoCodec codec; codec.width = width; codec.height = height; - codec.codecType = kVideoCodecVP9; + codec.codecType = codecType; codec.mode = is_screen_sharing ? VideoCodecMode::kScreensharing : VideoCodecMode::kRealtimeVideo; - std::vector spatial_layers = - GetSvcConfig(width, height, 30, /*first_active_layer=*/0, - num_spatial_layers, num_temporal_layers, is_screen_sharing); - RTC_CHECK_LE(spatial_layers.size(), kMaxSpatialLayers); + std::vector spatial_layers; + if (codecType == kVideoCodecVP9) { + spatial_layers = GetSvcConfig(width, height, 30, /*first_active_layer=*/0, + num_spatial_layers, num_temporal_layers, + is_screen_sharing); + RTC_CHECK_LE(spatial_layers.size(), kMaxSpatialLayers); + + codec.VP9()->numberOfSpatialLayers = + std::min(num_spatial_layers, spatial_layers.size()); + codec.VP9()->numberOfTemporalLayers = std::min( + num_temporal_layers, spatial_layers.back().numberOfTemporalLayers); - codec.VP9()->numberOfSpatialLayers = - std::min(num_spatial_layers, spatial_layers.size()); - codec.VP9()->numberOfTemporalLayers = std::min( - num_temporal_layers, spatial_layers.back().numberOfTemporalLayers); + for (size_t sl_idx = 0; sl_idx < spatial_layers.size(); ++sl_idx) { + codec.spatialLayers[sl_idx] = spatial_layers[sl_idx]; + } - for (size_t sl_idx = 0; sl_idx < spatial_layers.size(); ++sl_idx) { - codec.spatialLayers[sl_idx] = spatial_layers[sl_idx]; + return codec; } + RTC_DCHECK_EQ(codecType, kVideoCodecAV1); + + if (num_spatial_layers == 1) { + // SetAv1SvcConfig expects bitrate limits for be set when single spatial + // layer is requested. + codec.minBitrate = 30; + codec.maxBitrate = 5000; + } + + SetAv1SvcConfig(codec, num_temporal_layers, num_spatial_layers); + return codec; } } // namespace TEST(SvcRateAllocatorTest, SingleLayerFor320x180Input) { - VideoCodec codec = Configure(320, 180, 3, 3, false); - SvcRateAllocator allocator = SvcRateAllocator(codec); + VideoCodec codec = Configure(kVideoCodecVP9, 320, 180, 3, 3, false); + ExplicitKeyValueConfig field_trials(""); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); VideoBitrateAllocation allocation = allocator.Allocate(VideoBitrateAllocationParameters(1000 * 1000, 30)); @@ -64,8 +97,9 @@ TEST(SvcRateAllocatorTest, SingleLayerFor320x180Input) { } TEST(SvcRateAllocatorTest, TwoLayersFor640x360Input) { - VideoCodec codec = Configure(640, 360, 3, 3, false); - SvcRateAllocator allocator = SvcRateAllocator(codec); + VideoCodec codec = Configure(kVideoCodecVP9, 640, 360, 3, 3, false); + ExplicitKeyValueConfig field_trials(""); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); VideoBitrateAllocation allocation = allocator.Allocate(VideoBitrateAllocationParameters(1000 * 1000, 30)); @@ -76,8 +110,9 @@ TEST(SvcRateAllocatorTest, TwoLayersFor640x360Input) { } TEST(SvcRateAllocatorTest, ThreeLayersFor1280x720Input) { - VideoCodec codec = Configure(1280, 720, 3, 3, false); - SvcRateAllocator allocator = SvcRateAllocator(codec); + VideoCodec codec = Configure(kVideoCodecVP9, 1280, 720, 3, 3, false); + ExplicitKeyValueConfig field_trials(""); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); VideoBitrateAllocation allocation = allocator.Allocate(VideoBitrateAllocationParameters(1000 * 1000, 30)); @@ -89,8 +124,9 @@ TEST(SvcRateAllocatorTest, ThreeLayersFor1280x720Input) { TEST(SvcRateAllocatorTest, BaseLayerNonZeroBitrateEvenIfTotalIfLessThanMinimum) { - VideoCodec codec = Configure(1280, 720, 3, 3, false); - SvcRateAllocator allocator = SvcRateAllocator(codec); + VideoCodec codec = Configure(kVideoCodecVP9, 1280, 720, 3, 3, false); + ExplicitKeyValueConfig field_trials(""); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); const SpatialLayer* layers = codec.spatialLayers; @@ -103,8 +139,9 @@ TEST(SvcRateAllocatorTest, } TEST(SvcRateAllocatorTest, Disable640x360Layer) { - VideoCodec codec = Configure(1280, 720, 3, 3, false); - SvcRateAllocator allocator = SvcRateAllocator(codec); + VideoCodec codec = Configure(kVideoCodecVP9, 1280, 720, 3, 3, false); + ExplicitKeyValueConfig field_trials(""); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); const SpatialLayer* layers = codec.spatialLayers; @@ -120,8 +157,9 @@ TEST(SvcRateAllocatorTest, Disable640x360Layer) { } TEST(SvcRateAllocatorTest, Disable1280x720Layer) { - VideoCodec codec = Configure(1280, 720, 3, 3, false); - SvcRateAllocator allocator = SvcRateAllocator(codec); + VideoCodec codec = Configure(kVideoCodecVP9, 1280, 720, 3, 3, false); + ExplicitKeyValueConfig field_trials(""); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); const SpatialLayer* layers = codec.spatialLayers; @@ -138,8 +176,9 @@ TEST(SvcRateAllocatorTest, Disable1280x720Layer) { } TEST(SvcRateAllocatorTest, BitrateIsCapped) { - VideoCodec codec = Configure(1280, 720, 3, 3, false); - SvcRateAllocator allocator = SvcRateAllocator(codec); + VideoCodec codec = Configure(kVideoCodecVP9, 1280, 720, 3, 3, false); + ExplicitKeyValueConfig field_trials(""); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); const SpatialLayer* layers = codec.spatialLayers; @@ -155,8 +194,9 @@ TEST(SvcRateAllocatorTest, BitrateIsCapped) { } TEST(SvcRateAllocatorTest, MinBitrateToGetQualityLayer) { - VideoCodec codec = Configure(1280, 720, 3, 1, true); - SvcRateAllocator allocator = SvcRateAllocator(codec); + VideoCodec codec = Configure(kVideoCodecVP9, 1280, 720, 3, 1, true); + ExplicitKeyValueConfig field_trials(""); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); const SpatialLayer* layers = codec.spatialLayers; @@ -174,14 +214,15 @@ TEST(SvcRateAllocatorTest, MinBitrateToGetQualityLayer) { } TEST(SvcRateAllocatorTest, DeactivateHigherLayers) { + ExplicitKeyValueConfig field_trials(""); for (int deactivated_idx = 2; deactivated_idx >= 0; --deactivated_idx) { - VideoCodec codec = Configure(1280, 720, 3, 1, false); + VideoCodec codec = Configure(kVideoCodecVP9, 1280, 720, 3, 1, false); EXPECT_LE(codec.VP9()->numberOfSpatialLayers, 3U); for (int i = deactivated_idx; i < 3; ++i) codec.spatialLayers[i].active = false; - SvcRateAllocator allocator = SvcRateAllocator(codec); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); VideoBitrateAllocation allocation = allocator.Allocate( VideoBitrateAllocationParameters(10 * 1000 * 1000, 30)); @@ -199,14 +240,15 @@ TEST(SvcRateAllocatorTest, DeactivateHigherLayers) { } TEST(SvcRateAllocatorTest, DeactivateLowerLayers) { + ExplicitKeyValueConfig field_trials(""); for (int deactivated_idx = 0; deactivated_idx < 3; ++deactivated_idx) { - VideoCodec codec = Configure(1280, 720, 3, 1, false); + VideoCodec codec = Configure(kVideoCodecVP9, 1280, 720, 3, 1, false); EXPECT_LE(codec.VP9()->numberOfSpatialLayers, 3U); for (int i = deactivated_idx; i >= 0; --i) codec.spatialLayers[i].active = false; - SvcRateAllocator allocator = SvcRateAllocator(codec); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); VideoBitrateAllocation allocation = allocator.Allocate( VideoBitrateAllocationParameters(10 * 1000 * 1000, 30)); @@ -225,8 +267,9 @@ TEST(SvcRateAllocatorTest, DeactivateLowerLayers) { } TEST(SvcRateAllocatorTest, SignalsBwLimited) { - VideoCodec codec = Configure(1280, 720, 3, 1, false); - SvcRateAllocator allocator = SvcRateAllocator(codec); + VideoCodec codec = Configure(kVideoCodecVP9, 1280, 720, 3, 1, false); + ExplicitKeyValueConfig field_trials(""); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); // Rough estimate calculated by hand. uint32_t min_to_enable_all = 900000; @@ -243,7 +286,7 @@ TEST(SvcRateAllocatorTest, SignalsBwLimited) { } TEST(SvcRateAllocatorTest, NoPaddingIfAllLayersAreDeactivated) { - VideoCodec codec = Configure(1280, 720, 3, 1, false); + VideoCodec codec = Configure(kVideoCodecVP9, 1280, 720, 3, 1, false); EXPECT_EQ(codec.VP9()->numberOfSpatialLayers, 3U); // Deactivation of base layer deactivates all layers. codec.spatialLayers[0].active = false; @@ -262,7 +305,7 @@ TEST(SvcRateAllocatorTest, FindLayerTogglingThreshold) { const DataRate kTwoLayerMinRate = DataRate::BitsPerSec(299150); const DataRate kThreeLayerMinRate = DataRate::BitsPerSec(891052); - VideoCodec codec = Configure(1280, 720, 3, 1, false); + VideoCodec codec = Configure(kVideoCodecVP9, 1280, 720, 3, 1, false); absl::InlinedVector layer_start_bitrates = SvcRateAllocator::GetLayerStartBitrates(codec); ASSERT_EQ(layer_start_bitrates.size(), 3u); @@ -288,8 +331,9 @@ TEST(SvcRateAllocatorTest, SupportsAv1) { codec.spatialLayers[2].minBitrate = 193; codec.spatialLayers[2].targetBitrate = 305; codec.spatialLayers[2].maxBitrate = 418; + ExplicitKeyValueConfig field_trials(""); - SvcRateAllocator allocator(codec); + SvcRateAllocator allocator(codec, field_trials); VideoBitrateAllocation allocation = allocator.Allocate(VideoBitrateAllocationParameters(1'000'000, 30)); @@ -317,8 +361,9 @@ TEST(SvcRateAllocatorTest, SupportsAv1WithSkippedLayer) { codec.spatialLayers[2].minBitrate = 193; codec.spatialLayers[2].targetBitrate = 305; codec.spatialLayers[2].maxBitrate = 418; + ExplicitKeyValueConfig field_trials(""); - SvcRateAllocator allocator(codec); + SvcRateAllocator allocator(codec, field_trials); VideoBitrateAllocation allocation = allocator.Allocate(VideoBitrateAllocationParameters(1'000'000, 30)); @@ -346,8 +391,9 @@ TEST(SvcRateAllocatorTest, UsesScalabilityModeToGetNumberOfLayers) { codec.spatialLayers[2].minBitrate = 193; codec.spatialLayers[2].targetBitrate = 305; codec.spatialLayers[2].maxBitrate = 418; + ExplicitKeyValueConfig field_trials(""); - SvcRateAllocator allocator(codec); + SvcRateAllocator allocator(codec, field_trials); VideoBitrateAllocation allocation = allocator.Allocate(VideoBitrateAllocationParameters(1'000'000, 30)); @@ -362,9 +408,11 @@ TEST(SvcRateAllocatorTest, UsesScalabilityModeToGetNumberOfLayers) { } TEST(SvcRateAllocatorTest, CapsAllocationToMaxBitrate) { - VideoCodec codec = Configure(1280, 720, 3, 3, false); + VideoCodec codec = Configure(kVideoCodecVP9, 1280, 720, 3, 3, false); codec.maxBitrate = 70; // Cap the overall max bitrate to 70kbps. - SvcRateAllocator allocator = SvcRateAllocator(codec); + ExplicitKeyValueConfig field_trials(""); + + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); // Allocate 3Mbps which should be enough for all layers. VideoBitrateAllocation allocation = @@ -376,9 +424,7 @@ TEST(SvcRateAllocatorTest, CapsAllocationToMaxBitrate) { EXPECT_EQ(allocation.GetSpatialLayerSum(2), 0u); } -class SvcRateAllocatorTestParametrizedContentType - : public ::testing::Test, - public ::testing::WithParamInterface { +class SvcRateAllocatorTestParametrizedContentType : public TestWithParam { public: SvcRateAllocatorTestParametrizedContentType() : is_screen_sharing_(GetParam()) {} @@ -387,7 +433,8 @@ class SvcRateAllocatorTestParametrizedContentType }; TEST_P(SvcRateAllocatorTestParametrizedContentType, MaxBitrate) { - VideoCodec codec = Configure(1280, 720, 3, 1, is_screen_sharing_); + VideoCodec codec = + Configure(kVideoCodecVP9, 1280, 720, 3, 1, is_screen_sharing_); EXPECT_EQ(SvcRateAllocator::GetMaxBitrate(codec), DataRate::KilobitsPerSec(codec.spatialLayers[0].maxBitrate + codec.spatialLayers[1].maxBitrate + @@ -400,8 +447,10 @@ TEST_P(SvcRateAllocatorTestParametrizedContentType, MaxBitrate) { } TEST_P(SvcRateAllocatorTestParametrizedContentType, PaddingBitrate) { - VideoCodec codec = Configure(1280, 720, 3, 1, is_screen_sharing_); - SvcRateAllocator allocator = SvcRateAllocator(codec); + VideoCodec codec = + Configure(kVideoCodecVP9, 1280, 720, 3, 1, is_screen_sharing_); + ExplicitKeyValueConfig field_trials(""); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); DataRate padding_bitrate = SvcRateAllocator::GetPaddingBitrate(codec); @@ -445,11 +494,12 @@ TEST_P(SvcRateAllocatorTestParametrizedContentType, PaddingBitrate) { } TEST_P(SvcRateAllocatorTestParametrizedContentType, StableBitrate) { - ScopedFieldTrials field_trial( + ExplicitKeyValueConfig field_trials( "WebRTC-StableTargetRate/enabled:true,video_hysteresis_factor:1.0," "screenshare_hysteresis_factor:1.0/"); - const VideoCodec codec = Configure(1280, 720, 3, 1, is_screen_sharing_); + const VideoCodec codec = + Configure(kVideoCodecVP9, 1280, 720, 3, 1, is_screen_sharing_); const auto start_rates = SvcRateAllocator::GetLayerStartBitrates(codec); const DataRate min_rate_two_layers = start_rates[1]; const DataRate min_rate_three_layers = start_rates[2]; @@ -463,7 +513,7 @@ TEST_P(SvcRateAllocatorTestParametrizedContentType, StableBitrate) { : DataRate::KilobitsPerSec(codec.spatialLayers[0].maxBitrate + codec.spatialLayers[1].maxBitrate); - SvcRateAllocator allocator = SvcRateAllocator(codec); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); // Two layers, stable and target equal. auto allocation = allocator.Allocate(VideoBitrateAllocationParameters( @@ -502,16 +552,17 @@ TEST_P(SvcRateAllocatorTestParametrizedContentType, StableBitrate) { TEST_P(SvcRateAllocatorTestParametrizedContentType, StableBitrateWithHysteresis) { - const VideoCodec codec = Configure(1280, 720, 3, 1, is_screen_sharing_); + const VideoCodec codec = + Configure(kVideoCodecVP9, 1280, 720, 3, 1, is_screen_sharing_); const auto start_rates = SvcRateAllocator::GetLayerStartBitrates(codec); const DataRate min_rate_single_layer = start_rates[0]; const DataRate min_rate_two_layers = start_rates[1]; const DataRate min_rate_three_layers = start_rates[2]; - ScopedFieldTrials field_trial( + ExplicitKeyValueConfig field_trials( "WebRTC-StableTargetRate/enabled:true,video_hysteresis_factor:1.1," "screenshare_hysteresis_factor:1.1/"); - SvcRateAllocator allocator = SvcRateAllocator(codec); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); // Always use max bitrate as target, verify only stable is used for layer // count selection. const DataRate max_bitrate = allocator.GetMaxBitrate(codec); @@ -591,9 +642,41 @@ TEST_P(SvcRateAllocatorTestParametrizedContentType, EXPECT_FALSE(allocation.IsSpatialLayerUsed(2)); } +TEST_P(SvcRateAllocatorTestParametrizedContentType, TwoTemporalLayersAv1) { + VideoCodec codec = + Configure(kVideoCodecAV1, 1280, 720, 1, 2, is_screen_sharing_); + ExplicitKeyValueConfig field_trials(""); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); + VideoBitrateAllocation allocation = + allocator.Allocate(VideoBitrateAllocationParameters( + /*total_bitrate_bps=*/1024'000, /*framerate=*/30)); + + EXPECT_EQ(allocation.GetBitrate(/*spatial_index=*/0, /*temporal_index=*/0), + 660645u); + EXPECT_EQ(allocation.GetBitrate(/*spatial_index=*/0, /*temporal_index=*/1), + 363355u); +} + +TEST_P(SvcRateAllocatorTestParametrizedContentType, ThreeTemporalLayersAv1) { + VideoCodec codec = + Configure(kVideoCodecAV1, 1280, 720, 1, 3, is_screen_sharing_); + ExplicitKeyValueConfig field_trials(""); + SvcRateAllocator allocator = SvcRateAllocator(codec, field_trials); + VideoBitrateAllocation allocation = + allocator.Allocate(VideoBitrateAllocationParameters( + /*total_bitrate_bps=*/1024'000, /*framerate=*/30)); + + EXPECT_EQ(allocation.GetBitrate(/*spatial_index=*/0, /*temporal_index=*/0), + 552766u); + EXPECT_EQ(allocation.GetBitrate(/*spatial_index=*/0, /*temporal_index=*/1), + 167212u); + EXPECT_EQ(allocation.GetBitrate(/*spatial_index=*/0, /*temporal_index=*/2), + 304022u); +} + INSTANTIATE_TEST_SUITE_P(_, SvcRateAllocatorTestParametrizedContentType, - ::testing::Bool()); + Bool()); } // namespace test } // namespace webrtc diff --git a/modules/video_coding/timing/BUILD.gn b/modules/video_coding/timing/BUILD.gn index 9802b05a46..a24f570c3e 100644 --- a/modules/video_coding/timing/BUILD.gn +++ b/modules/video_coding/timing/BUILD.gn @@ -28,7 +28,6 @@ rtc_library("inter_frame_delay_variation_calculator") { "../../../api/units:timestamp", "../../../rtc_base:rtc_numerics", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("frame_delay_variation_kalman_filter") { @@ -39,6 +38,7 @@ rtc_library("frame_delay_variation_kalman_filter") { deps = [ "../../../api/units:data_size", "../../../api/units:time_delta", + "../../../rtc_base:checks", ] visibility = [ ":jitter_estimator", @@ -66,10 +66,7 @@ rtc_library("jitter_estimator") { "../../../rtc_base:safe_conversions", "../../../rtc_base/experiments:field_trial_parser", "../../../system_wrappers", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -78,8 +75,9 @@ rtc_library("rtt_filter") { "rtt_filter.cc", "rtt_filter.h", ] - deps = [ "../../../api/units:time_delta" ] - absl_deps = [ + deps = [ + "../../../api/units:time_delta", + "../../../rtc_base:checks", "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/container:inlined_vector", ] @@ -91,11 +89,12 @@ rtc_library("timestamp_extrapolator") { "timestamp_extrapolator.h", ] deps = [ + "../../../api/units:time_delta", "../../../api/units:timestamp", "../../../modules:module_api_public", "../../../rtc_base:rtc_numerics", + "../../../system_wrappers:metrics", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("timing_module") { @@ -108,8 +107,10 @@ rtc_library("timing_module") { ":timestamp_extrapolator", "../../../api:field_trials_view", "../../../api/units:time_delta", + "../../../api/units:timestamp", "../../../api/video:video_frame", "../../../api/video:video_rtp_headers", + "../../../rtc_base:checks", "../../../rtc_base:logging", "../../../rtc_base:macromagic", "../../../rtc_base:rtc_numerics", @@ -117,7 +118,6 @@ rtc_library("timing_module") { "../../../rtc_base/synchronization:mutex", "../../../system_wrappers", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("timing_unittests") { @@ -145,9 +145,9 @@ rtc_library("timing_unittests") { "../../../api/units:timestamp", "../../../rtc_base:histogram_percentile_counter", "../../../rtc_base:timeutils", + "../../../system_wrappers:metrics", "../../../system_wrappers:system_wrappers", "../../../test:scoped_key_value_config", "../../../test:test_support", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } diff --git a/modules/video_coding/timing/decode_time_percentile_filter.h b/modules/video_coding/timing/decode_time_percentile_filter.h index 9a6aa5e6a4..c2c653d1a0 100644 --- a/modules/video_coding/timing/decode_time_percentile_filter.h +++ b/modules/video_coding/timing/decode_time_percentile_filter.h @@ -11,6 +11,7 @@ #ifndef MODULES_VIDEO_CODING_TIMING_DECODE_TIME_PERCENTILE_FILTER_H_ #define MODULES_VIDEO_CODING_TIMING_DECODE_TIME_PERCENTILE_FILTER_H_ +#include #include #include "rtc_base/numerics/percentile_filter.h" diff --git a/modules/video_coding/timing/frame_delay_variation_kalman_filter.cc b/modules/video_coding/timing/frame_delay_variation_kalman_filter.cc index ec6aa3445a..f968667aba 100644 --- a/modules/video_coding/timing/frame_delay_variation_kalman_filter.cc +++ b/modules/video_coding/timing/frame_delay_variation_kalman_filter.cc @@ -10,8 +10,9 @@ #include "modules/video_coding/timing/frame_delay_variation_kalman_filter.h" -#include "api/units/data_size.h" -#include "api/units/time_delta.h" +#include + +#include "rtc_base/checks.h" namespace webrtc { diff --git a/modules/video_coding/timing/frame_delay_variation_kalman_filter.h b/modules/video_coding/timing/frame_delay_variation_kalman_filter.h index a65ceefa10..e57f18c2b6 100644 --- a/modules/video_coding/timing/frame_delay_variation_kalman_filter.h +++ b/modules/video_coding/timing/frame_delay_variation_kalman_filter.h @@ -11,8 +11,6 @@ #ifndef MODULES_VIDEO_CODING_TIMING_FRAME_DELAY_VARIATION_KALMAN_FILTER_H_ #define MODULES_VIDEO_CODING_TIMING_FRAME_DELAY_VARIATION_KALMAN_FILTER_H_ -#include "api/units/data_size.h" -#include "api/units/time_delta.h" namespace webrtc { diff --git a/modules/video_coding/timing/inter_frame_delay_variation_calculator.cc b/modules/video_coding/timing/inter_frame_delay_variation_calculator.cc index 69bc916199..8a1441905c 100644 --- a/modules/video_coding/timing/inter_frame_delay_variation_calculator.cc +++ b/modules/video_coding/timing/inter_frame_delay_variation_calculator.cc @@ -10,9 +10,12 @@ #include "modules/video_coding/timing/inter_frame_delay_variation_calculator.h" -#include "absl/types/optional.h" +#include +#include + #include "api/units/frequency.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "modules/include/module_common_types_public.h" namespace webrtc { @@ -26,11 +29,11 @@ InterFrameDelayVariationCalculator::InterFrameDelayVariationCalculator() { } void InterFrameDelayVariationCalculator::Reset() { - prev_wall_clock_ = absl::nullopt; + prev_wall_clock_ = std::nullopt; prev_rtp_timestamp_unwrapped_ = 0; } -absl::optional InterFrameDelayVariationCalculator::Calculate( +std::optional InterFrameDelayVariationCalculator::Calculate( uint32_t rtp_timestamp, Timestamp now) { int64_t rtp_timestamp_unwrapped = unwrapper_.Unwrap(rtp_timestamp); @@ -39,7 +42,7 @@ absl::optional InterFrameDelayVariationCalculator::Calculate( prev_wall_clock_ = now; prev_rtp_timestamp_unwrapped_ = rtp_timestamp_unwrapped; // Inter-frame delay variation is undefined for a single frame. - // TODO(brandtr): Should this return absl::nullopt instead? + // TODO(brandtr): Should this return std::nullopt instead? return TimeDelta::Zero(); } @@ -49,7 +52,7 @@ absl::optional InterFrameDelayVariationCalculator::Calculate( uint32_t cropped_prev = static_cast(prev_rtp_timestamp_unwrapped_); if (rtp_timestamp_unwrapped < prev_rtp_timestamp_unwrapped_ || !IsNewerTimestamp(rtp_timestamp, cropped_prev)) { - return absl::nullopt; + return std::nullopt; } // Compute the compensated timestamp difference. diff --git a/modules/video_coding/timing/inter_frame_delay_variation_calculator.h b/modules/video_coding/timing/inter_frame_delay_variation_calculator.h index c1d3d205ad..d8a8c83840 100644 --- a/modules/video_coding/timing/inter_frame_delay_variation_calculator.h +++ b/modules/video_coding/timing/inter_frame_delay_variation_calculator.h @@ -13,7 +13,8 @@ #include -#include "absl/types/optional.h" +#include + #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" @@ -32,11 +33,11 @@ class InterFrameDelayVariationCalculator { // Calculates the inter-frame delay variation of a frame with the given // RTP timestamp. This method is called when the frame is complete. - absl::optional Calculate(uint32_t rtp_timestamp, Timestamp now); + std::optional Calculate(uint32_t rtp_timestamp, Timestamp now); private: // The previous wall clock timestamp used in the calculation. - absl::optional prev_wall_clock_; + std::optional prev_wall_clock_; // The previous RTP timestamp used in the calculation. int64_t prev_rtp_timestamp_unwrapped_; diff --git a/modules/video_coding/timing/inter_frame_delay_variation_calculator_unittest.cc b/modules/video_coding/timing/inter_frame_delay_variation_calculator_unittest.cc index ea719b64b5..024c65903b 100644 --- a/modules/video_coding/timing/inter_frame_delay_variation_calculator_unittest.cc +++ b/modules/video_coding/timing/inter_frame_delay_variation_calculator_unittest.cc @@ -10,9 +10,10 @@ #include "modules/video_coding/timing/inter_frame_delay_variation_calculator.h" +#include #include +#include -#include "absl/types/optional.h" #include "api/units/frequency.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -40,7 +41,7 @@ TEST(InterFrameDelayVariationCalculatorTest, OldRtpTimestamp) { InterFrameDelayVariationCalculator ifdv_calculator; EXPECT_THAT(ifdv_calculator.Calculate(180000, kStartTime), Optional(TimeDelta::Zero())); - EXPECT_THAT(ifdv_calculator.Calculate(90000, kStartTime), Eq(absl::nullopt)); + EXPECT_THAT(ifdv_calculator.Calculate(90000, kStartTime), Eq(std::nullopt)); } TEST(InterFrameDelayVariationCalculatorTest, @@ -51,7 +52,7 @@ TEST(InterFrameDelayVariationCalculatorTest, Optional(TimeDelta::Zero())); // RTP has wrapped around backwards. rtp -= 3000; - EXPECT_THAT(ifdv_calculator.Calculate(rtp, kStartTime), Eq(absl::nullopt)); + EXPECT_THAT(ifdv_calculator.Calculate(rtp, kStartTime), Eq(std::nullopt)); } TEST(InterFrameDelayVariationCalculatorTest, CorrectDelayForFrames) { @@ -184,7 +185,7 @@ TEST(InterFrameDelayVariationCalculatorTest, // Frame delay should be as normal, in this case simulated as 1ms late. clock.AdvanceTime(kFrameDelay); EXPECT_THAT(ifdv_calculator.Calculate(rtp, clock.CurrentTime()), - Eq(absl::nullopt)); + Eq(std::nullopt)); } } // namespace webrtc diff --git a/modules/video_coding/timing/jitter_estimator.cc b/modules/video_coding/timing/jitter_estimator.cc index 62757787a1..b2e433f8bc 100644 --- a/modules/video_coding/timing/jitter_estimator.cc +++ b/modules/video_coding/timing/jitter_estimator.cc @@ -14,18 +14,18 @@ #include #include -#include +#include -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" #include "api/field_trials_view.h" #include "api/units/data_size.h" #include "api/units/frequency.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "modules/video_coding/timing/frame_delay_variation_kalman_filter.h" #include "modules/video_coding/timing/rtt_filter.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -155,9 +155,9 @@ void JitterEstimator::Reset() { var_frame_size_bytes2_ = 100; avg_frame_size_median_bytes_.Reset(); max_frame_size_bytes_percentile_.Reset(); - last_update_time_ = absl::nullopt; - prev_estimate_ = absl::nullopt; - prev_frame_size_ = absl::nullopt; + last_update_time_ = std::nullopt; + prev_estimate_ = std::nullopt; + prev_frame_size_ = std::nullopt; avg_noise_ms_ = 0.0; var_noise_ms2_ = 4.0; alpha_count_ = 1; @@ -424,7 +424,7 @@ void JitterEstimator::PostProcessEstimate() { // otherwise tries to calculate an estimate. TimeDelta JitterEstimator::GetJitterEstimate( double rtt_multiplier, - absl::optional rtt_mult_add_cap) { + std::optional rtt_mult_add_cap) { TimeDelta jitter = CalculateEstimate() + OPERATING_SYSTEM_JITTER; Timestamp now = clock_->CurrentTime(); diff --git a/modules/video_coding/timing/jitter_estimator.h b/modules/video_coding/timing/jitter_estimator.h index 89dc64934b..08f16aac89 100644 --- a/modules/video_coding/timing/jitter_estimator.h +++ b/modules/video_coding/timing/jitter_estimator.h @@ -11,12 +11,12 @@ #ifndef MODULES_VIDEO_CODING_TIMING_JITTER_ESTIMATOR_H_ #define MODULES_VIDEO_CODING_TIMING_JITTER_ESTIMATOR_H_ -#include +#include +#include #include -#include +#include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/field_trials_view.h" #include "api/units/data_size.h" #include "api/units/frequency.h" @@ -66,37 +66,37 @@ class JitterEstimator { // If set, the "max" frame size is calculated as this percentile over a // window of recent frame sizes. - absl::optional max_frame_size_percentile = absl::nullopt; + std::optional max_frame_size_percentile = std::nullopt; // The length of the percentile filters' window, in number of frames. - absl::optional frame_size_window = absl::nullopt; + std::optional frame_size_window = std::nullopt; // The incoming frame delay variation samples are clamped to be at most // this number of standard deviations away from zero. // // Increasing this value clamps fewer samples. - absl::optional num_stddev_delay_clamp = absl::nullopt; + std::optional num_stddev_delay_clamp = std::nullopt; // A (relative) frame delay variation sample is an outlier if its absolute // deviation from the Kalman filter model falls outside this number of // sample standard deviations. // // Increasing this value rejects fewer samples. - absl::optional num_stddev_delay_outlier = absl::nullopt; + std::optional num_stddev_delay_outlier = std::nullopt; // An (absolute) frame size sample is an outlier if its positive deviation // from the estimated average frame size falls outside this number of sample // standard deviations. // // Increasing this value rejects fewer samples. - absl::optional num_stddev_size_outlier = absl::nullopt; + std::optional num_stddev_size_outlier = std::nullopt; // A (relative) frame size variation sample is deemed "congested", and is // thus rejected, if its value is less than this factor times the estimated // max frame size. // // Decreasing this value rejects fewer samples. - absl::optional congestion_rejection_factor = absl::nullopt; + std::optional congestion_rejection_factor = std::nullopt; // If true, the noise estimate will be updated for congestion rejected // frames. This is currently enabled by default, but that may not be optimal @@ -128,7 +128,7 @@ class JitterEstimator { // // Return value : Jitter estimate. TimeDelta GetJitterEstimate(double rtt_multiplier, - absl::optional rtt_mult_add_cap); + std::optional rtt_mult_add_cap); // Updates the nack counter. void FrameNacked(); @@ -187,11 +187,11 @@ class JitterEstimator { double startup_frame_size_sum_bytes_; size_t startup_frame_size_count_; - absl::optional last_update_time_; + std::optional last_update_time_; // The previously returned jitter estimate - absl::optional prev_estimate_; + std::optional prev_estimate_; // Frame size of the previous frame - absl::optional prev_frame_size_; + std::optional prev_frame_size_; // Average of the random jitter. Unit is milliseconds. double avg_noise_ms_; // Variance of the time-deviation from the line. Unit is milliseconds^2. @@ -209,7 +209,7 @@ class JitterEstimator { RttFilter rtt_filter_; // Tracks frame rates in microseconds. - rtc::RollingAccumulator fps_counter_; + RollingAccumulator fps_counter_; Clock* clock_; }; diff --git a/modules/video_coding/timing/jitter_estimator_unittest.cc b/modules/video_coding/timing/jitter_estimator_unittest.cc index 8e0c01587f..3b3449ee6d 100644 --- a/modules/video_coding/timing/jitter_estimator_unittest.cc +++ b/modules/video_coding/timing/jitter_estimator_unittest.cc @@ -12,18 +12,16 @@ #include #include +#include #include #include #include -#include "absl/types/optional.h" -#include "api/array_view.h" #include "api/field_trials.h" #include "api/units/data_size.h" #include "api/units/frequency.h" #include "api/units/time_delta.h" #include "rtc_base/numerics/histogram_percentile_counter.h" -#include "rtc_base/time_utils.h" #include "system_wrappers/include/clock.h" #include "test/gtest.h" @@ -79,7 +77,7 @@ class JitterEstimatorTest : public ::testing::Test { TEST_F(JitterEstimatorTest, SteadyStateConvergence) { ValueGenerator gen(10); Run(/*duration_s=*/60, /*framerate_fps=*/30, gen); - EXPECT_EQ(estimator_.GetJitterEstimate(0, absl::nullopt).ms(), 54); + EXPECT_EQ(estimator_.GetJitterEstimate(0, std::nullopt).ms(), 54); } TEST_F(JitterEstimatorTest, @@ -88,12 +86,11 @@ TEST_F(JitterEstimatorTest, // Steady state. Run(/*duration_s=*/60, /*framerate_fps=*/30, gen); - TimeDelta steady_state_jitter = - estimator_.GetJitterEstimate(0, absl::nullopt); + TimeDelta steady_state_jitter = estimator_.GetJitterEstimate(0, std::nullopt); // A single outlier frame size... estimator_.UpdateEstimate(gen.Delay(), 10 * gen.FrameSize()); - TimeDelta outlier_jitter = estimator_.GetJitterEstimate(0, absl::nullopt); + TimeDelta outlier_jitter = estimator_.GetJitterEstimate(0, std::nullopt); // ...changes the estimate. EXPECT_GT(outlier_jitter.ms(), 1.25 * steady_state_jitter.ms()); @@ -107,14 +104,14 @@ TEST_F(JitterEstimatorTest, LowFramerateDisablesJitterEstimator) { estimator_.UpdateEstimate(gen.Delay(), gen.FrameSize()); fake_clock_.AdvanceTime(time_delta); if (i > 2) - EXPECT_EQ(estimator_.GetJitterEstimate(0, absl::nullopt), + EXPECT_EQ(estimator_.GetJitterEstimate(0, std::nullopt), TimeDelta::Zero()); gen.Advance(); } } TEST_F(JitterEstimatorTest, RttMultAddCap) { - std::vector> + std::vector> jitter_by_rtt_mult_cap; jitter_by_rtt_mult_cap.emplace_back( /*rtt_mult_add_cap=*/TimeDelta::Millis(10), /*long_tail_boundary=*/1000); @@ -150,12 +147,11 @@ TEST_F(JitterEstimatorTest, Single2xFrameSizeImpactsJitterEstimate) { // Steady state. Run(/*duration_s=*/60, /*framerate_fps=*/30, gen); - TimeDelta steady_state_jitter = - estimator_.GetJitterEstimate(0, absl::nullopt); + TimeDelta steady_state_jitter = estimator_.GetJitterEstimate(0, std::nullopt); // A single outlier frame size... estimator_.UpdateEstimate(gen.Delay(), 2 * gen.FrameSize()); - TimeDelta outlier_jitter = estimator_.GetJitterEstimate(0, absl::nullopt); + TimeDelta outlier_jitter = estimator_.GetJitterEstimate(0, std::nullopt); // ...impacts the estimate. EXPECT_GT(outlier_jitter.ms(), steady_state_jitter.ms()); @@ -168,12 +164,11 @@ TEST_F(JitterEstimatorTest, CongestedFrameImpactsJitterEstimate) { // Steady state. Run(/*duration_s=*/10, /*framerate_fps=*/30, gen); - TimeDelta steady_state_jitter = - estimator_.GetJitterEstimate(0, absl::nullopt); + TimeDelta steady_state_jitter = estimator_.GetJitterEstimate(0, std::nullopt); // Congested frame... estimator_.UpdateEstimate(-10 * gen.Delay(), 0.1 * gen.FrameSize()); - TimeDelta outlier_jitter = estimator_.GetJitterEstimate(0, absl::nullopt); + TimeDelta outlier_jitter = estimator_.GetJitterEstimate(0, std::nullopt); // ...impacts the estimate. EXPECT_GT(outlier_jitter.ms(), steady_state_jitter.ms()); @@ -225,12 +220,11 @@ TEST_F(FieldTrialsOverriddenJitterEstimatorTest, // Steady state. Run(/*duration_s=*/60, /*framerate_fps=*/30, gen); - TimeDelta steady_state_jitter = - estimator_.GetJitterEstimate(0, absl::nullopt); + TimeDelta steady_state_jitter = estimator_.GetJitterEstimate(0, std::nullopt); // A single outlier frame size... estimator_.UpdateEstimate(10 * gen.Delay(), gen.FrameSize()); - TimeDelta outlier_jitter = estimator_.GetJitterEstimate(0, absl::nullopt); + TimeDelta outlier_jitter = estimator_.GetJitterEstimate(0, std::nullopt); // ...does not change the estimate. EXPECT_EQ(outlier_jitter.ms(), steady_state_jitter.ms()); @@ -244,19 +238,18 @@ TEST_F(FieldTrialsOverriddenJitterEstimatorTest, // Steady state. Run(/*duration_s=*/60, /*framerate_fps=*/30, gen); - TimeDelta steady_state_jitter = - estimator_.GetJitterEstimate(0, absl::nullopt); + TimeDelta steady_state_jitter = estimator_.GetJitterEstimate(0, std::nullopt); // Three outlier frames do not impact the jitter estimate. for (int i = 0; i < 3; ++i) { estimator_.UpdateEstimate(gen.Delay(), 2 * gen.FrameSize()); } - TimeDelta outlier_jitter_3x = estimator_.GetJitterEstimate(0, absl::nullopt); + TimeDelta outlier_jitter_3x = estimator_.GetJitterEstimate(0, std::nullopt); EXPECT_EQ(outlier_jitter_3x.ms(), steady_state_jitter.ms()); // Four outlier frames do impact the jitter estimate. estimator_.UpdateEstimate(gen.Delay(), 2 * gen.FrameSize()); - TimeDelta outlier_jitter_4x = estimator_.GetJitterEstimate(0, absl::nullopt); + TimeDelta outlier_jitter_4x = estimator_.GetJitterEstimate(0, std::nullopt); EXPECT_GT(outlier_jitter_4x.ms(), outlier_jitter_3x.ms()); } @@ -268,12 +261,11 @@ TEST_F(FieldTrialsOverriddenJitterEstimatorTest, // Steady state. Run(/*duration_s=*/10, /*framerate_fps=*/30, gen); - TimeDelta steady_state_jitter = - estimator_.GetJitterEstimate(0, absl::nullopt); + TimeDelta steady_state_jitter = estimator_.GetJitterEstimate(0, std::nullopt); // Congested frame... estimator_.UpdateEstimate(-10 * gen.Delay(), 0.1 * gen.FrameSize()); - TimeDelta outlier_jitter = estimator_.GetJitterEstimate(0, absl::nullopt); + TimeDelta outlier_jitter = estimator_.GetJitterEstimate(0, std::nullopt); // ...does not impact the estimate. EXPECT_EQ(outlier_jitter.ms(), steady_state_jitter.ms()); diff --git a/modules/video_coding/timing/rtt_filter.cc b/modules/video_coding/timing/rtt_filter.cc index 6962224d61..6dd42af7e6 100644 --- a/modules/video_coding/timing/rtt_filter.cc +++ b/modules/video_coding/timing/rtt_filter.cc @@ -12,13 +12,13 @@ #include #include -#include #include +#include #include "absl/algorithm/container.h" -#include "absl/container/inlined_vector.h" #include "api/units/time_delta.h" +#include "rtc_base/checks.h" namespace webrtc { diff --git a/modules/video_coding/timing/rtt_filter_unittest.cc b/modules/video_coding/timing/rtt_filter_unittest.cc index 05502e6f5b..e089c4f67c 100644 --- a/modules/video_coding/timing/rtt_filter_unittest.cc +++ b/modules/video_coding/timing/rtt_filter_unittest.cc @@ -11,7 +11,6 @@ #include "modules/video_coding/timing/rtt_filter.h" #include "api/units/time_delta.h" -#include "test/gmock.h" #include "test/gtest.h" namespace webrtc { diff --git a/modules/video_coding/timing/timestamp_extrapolator.cc b/modules/video_coding/timing/timestamp_extrapolator.cc index a90df8bf7f..404ea60c05 100644 --- a/modules/video_coding/timing/timestamp_extrapolator.cc +++ b/modules/video_coding/timing/timestamp_extrapolator.cc @@ -11,16 +11,23 @@ #include "modules/video_coding/timing/timestamp_extrapolator.h" #include +#include +#include +#include -#include "absl/types/optional.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" +#include "system_wrappers/include/metrics.h" namespace webrtc { namespace { +constexpr int kMinimumSamplesToLogEstimatedClockDrift = + 3000; // 100 seconds at 30 fps. constexpr double kLambda = 1; -constexpr uint32_t kStartUpFilterDelayInPackets = 2; +constexpr int kStartUpFilterDelayInPackets = 2; constexpr double kAlarmThreshold = 60e3; // in timestamp ticks, i.e. 15 ms constexpr double kAccDrift = 6600; @@ -38,10 +45,20 @@ TimestampExtrapolator::TimestampExtrapolator(Timestamp start) Reset(start); } +TimestampExtrapolator::~TimestampExtrapolator() { + if (packet_count_ >= kMinimumSamplesToLogEstimatedClockDrift) { + // Relative clock drift per million (ppm). + double clock_drift_ppm = 1e6 * (w_[0] - 90.0) / 90.0; + RTC_HISTOGRAM_COUNTS_100000("WebRTC.Video.EstimatedClockDrift_ppm", + static_cast(std::abs(clock_drift_ppm))); + } +} + void TimestampExtrapolator::Reset(Timestamp start) { start_ = start; prev_ = start_; - first_unwrapped_timestamp_ = absl::nullopt; + first_unwrapped_timestamp_ = std::nullopt; + prev_unwrapped_timestamp_ = std::nullopt; w_[0] = 90.0; w_[1] = 0; p_[0][0] = 1; @@ -116,41 +133,45 @@ void TimestampExtrapolator::Update(Timestamp now, uint32_t ts90khz) { 1 / kLambda * (p_[1][1] - (K[1] * t_ms * p_[0][1] + K[1] * p_[1][1])); p_[0][0] = p00; p_[0][1] = p01; + prev_unwrapped_timestamp_ = unwrapped_ts90khz; - if (packet_count_ < kStartUpFilterDelayInPackets) { + if (packet_count_ < kStartUpFilterDelayInPackets || + packet_count_ < kMinimumSamplesToLogEstimatedClockDrift) { packet_count_++; } } -absl::optional TimestampExtrapolator::ExtrapolateLocalTime( +std::optional TimestampExtrapolator::ExtrapolateLocalTime( uint32_t timestamp90khz) const { int64_t unwrapped_ts90khz = unwrapper_.PeekUnwrap(timestamp90khz); if (!first_unwrapped_timestamp_) { - return absl::nullopt; - } else if (packet_count_ < kStartUpFilterDelayInPackets) { + return std::nullopt; + } + if (packet_count_ < kStartUpFilterDelayInPackets) { constexpr double kRtpTicksPerMs = 90; TimeDelta diff = TimeDelta::Millis( (unwrapped_ts90khz - *prev_unwrapped_timestamp_) / kRtpTicksPerMs); if (prev_.us() + diff.us() < 0) { // Prevent the construction of a negative Timestamp. // This scenario can occur when the RTP timestamp wraps around. - return absl::nullopt; + return std::nullopt; } return prev_ + diff; - } else if (w_[0] < 1e-3) { + } + if (w_[0] < 1e-3) { return start_; - } else { - double timestampDiff = unwrapped_ts90khz - *first_unwrapped_timestamp_; - TimeDelta diff = TimeDelta::Millis( - static_cast((timestampDiff - w_[1]) / w_[0] + 0.5)); - if (start_.us() + diff.us() < 0) { - // Prevent the construction of a negative Timestamp. - // This scenario can occur when the RTP timestamp wraps around. - return absl::nullopt; - } - return start_ + diff; } + double timestamp_diff = + static_cast(unwrapped_ts90khz - *first_unwrapped_timestamp_); + TimeDelta diff = TimeDelta::Millis( + static_cast((timestamp_diff - w_[1]) / w_[0] + 0.5)); + if (start_.us() + diff.us() < 0) { + // Prevent the construction of a negative Timestamp. + // This scenario can occur when the RTP timestamp wraps around. + return std::nullopt; + } + return start_ + diff; } bool TimestampExtrapolator::DelayChangeDetection(double error) { diff --git a/modules/video_coding/timing/timestamp_extrapolator.h b/modules/video_coding/timing/timestamp_extrapolator.h index 6a9763943e..b1cd0b641d 100644 --- a/modules/video_coding/timing/timestamp_extrapolator.h +++ b/modules/video_coding/timing/timestamp_extrapolator.h @@ -13,7 +13,8 @@ #include -#include "absl/types/optional.h" +#include + #include "api/units/timestamp.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" @@ -23,8 +24,9 @@ namespace webrtc { class TimestampExtrapolator { public: explicit TimestampExtrapolator(Timestamp start); + ~TimestampExtrapolator(); void Update(Timestamp now, uint32_t ts90khz); - absl::optional ExtrapolateLocalTime(uint32_t timestamp90khz) const; + std::optional ExtrapolateLocalTime(uint32_t timestamp90khz) const; void Reset(Timestamp start); private: @@ -35,10 +37,10 @@ class TimestampExtrapolator { double p_[2][2]; Timestamp start_; Timestamp prev_; - absl::optional first_unwrapped_timestamp_; + std::optional first_unwrapped_timestamp_; RtpTimestampUnwrapper unwrapper_; - absl::optional prev_unwrapped_timestamp_; - uint32_t packet_count_; + std::optional prev_unwrapped_timestamp_; + int packet_count_; double detector_accumulator_pos_; double detector_accumulator_neg_; }; diff --git a/modules/video_coding/timing/timestamp_extrapolator_unittest.cc b/modules/video_coding/timing/timestamp_extrapolator_unittest.cc index d6c8fa9de1..e0367a3026 100644 --- a/modules/video_coding/timing/timestamp_extrapolator_unittest.cc +++ b/modules/video_coding/timing/timestamp_extrapolator_unittest.cc @@ -12,13 +12,16 @@ #include +#include #include +#include +#include -#include "absl/types/optional.h" #include "api/units/frequency.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "system_wrappers/include/clock.h" +#include "system_wrappers/include/metrics.h" #include "test/gmock.h" #include "test/gtest.h" @@ -40,7 +43,7 @@ TEST(TimestampExtrapolatorTest, ExtrapolationOccursAfter2Packets) { TimestampExtrapolator ts_extrapolator(clock.CurrentTime()); // No packets so no timestamp. - EXPECT_THAT(ts_extrapolator.ExtrapolateLocalTime(90000), Eq(absl::nullopt)); + EXPECT_THAT(ts_extrapolator.ExtrapolateLocalTime(90000), Eq(std::nullopt)); uint32_t rtp = 90000; clock.AdvanceTime(k25FpsDelay); @@ -143,7 +146,7 @@ TEST(TimestampExtrapolatorTest, NegativeRtpTimestampWrapAroundSecondScenario) { // Go backwards! Static cast to avoid undefined behaviour with -=. rtp -= static_cast(kRtpHz * TimeDelta::Seconds(10)); ts_extrapolator.Update(clock.CurrentTime(), rtp); - EXPECT_THAT(ts_extrapolator.ExtrapolateLocalTime(rtp), absl::nullopt); + EXPECT_THAT(ts_extrapolator.ExtrapolateLocalTime(rtp), std::nullopt); } TEST(TimestampExtrapolatorTest, Slow90KHzClock) { @@ -154,14 +157,13 @@ TEST(TimestampExtrapolatorTest, Slow90KHzClock) { constexpr TimeDelta k24FpsDelay = 1 / Frequency::Hertz(24); uint32_t rtp = 90000; - ts_extrapolator.Update(clock.CurrentTime(), rtp); // Slow camera will increment RTP at 25 FPS rate even though its producing at // 24 FPS. After 25 frames the extrapolator should settle at this rate. for (int i = 0; i < 25; ++i) { + ts_extrapolator.Update(clock.CurrentTime(), rtp); rtp += kRtpHz / k25Fps; clock.AdvanceTime(k24FpsDelay); - ts_extrapolator.Update(clock.CurrentTime(), rtp); } // The camera would normally produce 25 frames in 90K ticks, but is slow @@ -181,14 +183,13 @@ TEST(TimestampExtrapolatorTest, Fast90KHzClock) { constexpr TimeDelta k26FpsDelay = 1 / Frequency::Hertz(26); uint32_t rtp = 90000; - ts_extrapolator.Update(clock.CurrentTime(), rtp); // Fast camera will increment RTP at 25 FPS rate even though its producing at // 26 FPS. After 25 frames the extrapolator should settle at this rate. for (int i = 0; i < 25; ++i) { + ts_extrapolator.Update(clock.CurrentTime(), rtp); rtp += kRtpHz / k25Fps; clock.AdvanceTime(k26FpsDelay); - ts_extrapolator.Update(clock.CurrentTime(), rtp); } // The camera would normally produce 25 frames in 90K ticks, but is slow @@ -231,4 +232,92 @@ TEST(TimestampExtrapolatorTest, TimestampJump) { Optional(clock.CurrentTime())); } +TEST(TimestampExtrapolatorTest, GapInReceivedFrames) { + SimulatedClock clock( + Timestamp::Seconds(std::numeric_limits::max() / 90000 - 31)); + TimestampExtrapolator ts_extrapolator(clock.CurrentTime()); + + uint32_t rtp = std::numeric_limits::max(); + clock.AdvanceTime(k25FpsDelay); + ts_extrapolator.Update(clock.CurrentTime(), rtp); + + rtp += 30 * 90000; + clock.AdvanceTime(TimeDelta::Seconds(30)); + ts_extrapolator.Update(clock.CurrentTime(), rtp); + EXPECT_THAT(ts_extrapolator.ExtrapolateLocalTime(rtp), + Optional(clock.CurrentTime())); +} + +TEST(TimestampExtrapolatorTest, EstimatedClockDriftHistogram) { + const std::string kHistogramName = "WebRTC.Video.EstimatedClockDrift_ppm"; + constexpr int kPpmTolerance = 50; + constexpr int kToPpmFactor = 1e6; + constexpr int kMinimumSamples = 3000; + constexpr Frequency k24Fps = Frequency::Hertz(24); + constexpr TimeDelta k24FpsDelay = 1 / k24Fps; + + // This simulates a remote clock without drift with frames produced at 25 fps. + // Local scope to trigger the destructor of TimestampExtrapolator. + { + // Clear all histogram data. + metrics::Reset(); + SimulatedClock clock(Timestamp::Millis(1337)); + TimestampExtrapolator ts_extrapolator(clock.CurrentTime()); + + uint32_t rtp = 90000; + for (int i = 0; i < kMinimumSamples; ++i) { + ts_extrapolator.Update(clock.CurrentTime(), rtp); + rtp += kRtpHz / k25Fps; + clock.AdvanceTime(k25FpsDelay); + } + } + EXPECT_EQ(metrics::NumSamples(kHistogramName), 1); + const int kExpectedIdealClockDriftPpm = 0; + EXPECT_NEAR(kExpectedIdealClockDriftPpm, metrics::MinSample(kHistogramName), + kPpmTolerance); + + // This simulates a slow remote clock, where the RTP timestamps are + // incremented as if the camera was 25 fps even though frames arrive at 24 + // fps. Local scope to trigger the destructor of TimestampExtrapolator. + { + // Clear all histogram data. + metrics::Reset(); + SimulatedClock clock(Timestamp::Millis(1337)); + TimestampExtrapolator ts_extrapolator(clock.CurrentTime()); + + uint32_t rtp = 90000; + for (int i = 0; i < kMinimumSamples; ++i) { + ts_extrapolator.Update(clock.CurrentTime(), rtp); + rtp += kRtpHz / k25Fps; + clock.AdvanceTime(k24FpsDelay); + } + } + EXPECT_EQ(metrics::NumSamples(kHistogramName), 1); + const int kExpectedSlowClockDriftPpm = + std::abs(k24Fps / k25Fps - 1.0) * kToPpmFactor; + EXPECT_NEAR(kExpectedSlowClockDriftPpm, metrics::MinSample(kHistogramName), + kPpmTolerance); + + // This simulates a fast remote clock, where the RTP timestamps are + // incremented as if the camera was 24 fps even though frames arrive at 25 + // fps. Local scope to trigger the destructor of TimestampExtrapolator. + { + // Clear all histogram data. + metrics::Reset(); + SimulatedClock clock(Timestamp::Millis(1337)); + TimestampExtrapolator ts_extrapolator(clock.CurrentTime()); + + uint32_t rtp = 90000; + for (int i = 0; i < kMinimumSamples; ++i) { + ts_extrapolator.Update(clock.CurrentTime(), rtp); + rtp += kRtpHz / k24Fps; + clock.AdvanceTime(k25FpsDelay); + } + } + EXPECT_EQ(metrics::NumSamples(kHistogramName), 1); + const int kExpectedFastClockDriftPpm = (k25Fps / k24Fps - 1.0) * kToPpmFactor; + EXPECT_NEAR(kExpectedFastClockDriftPpm, metrics::MinSample(kHistogramName), + kPpmTolerance); +} + } // namespace webrtc diff --git a/modules/video_coding/timing/timing.cc b/modules/video_coding/timing/timing.cc index 735f6328d8..302cc7ffa5 100644 --- a/modules/video_coding/timing/timing.cc +++ b/modules/video_coding/timing/timing.cc @@ -11,12 +11,21 @@ #include "modules/video_coding/timing/timing.h" #include +#include +#include +#include +#include "api/field_trials_view.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/video_frame.h" +#include "api/video/video_timing.h" #include "modules/video_coding/timing/decode_time_percentile_filter.h" #include "modules/video_coding/timing/timestamp_extrapolator.h" +#include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -92,12 +101,12 @@ void VCMTiming::set_min_playout_delay(TimeDelta min_playout_delay) { } } -void VCMTiming::set_max_playout_delay(TimeDelta max_playout_delay) { +void VCMTiming::set_playout_delay(const VideoPlayoutDelay& playout_delay) { MutexLock lock(&mutex_); - if (max_playout_delay_ != max_playout_delay) { - CheckDelaysValid(min_playout_delay_, max_playout_delay); - max_playout_delay_ = max_playout_delay; - } + // No need to call `CheckDelaysValid` as the same invariant (min <= max) + // is guaranteed by the `VideoPlayoutDelay` type. + min_playout_delay_ = playout_delay.min(); + max_playout_delay_ = playout_delay.max(); } void VCMTiming::SetJitterDelay(TimeDelta jitter_delay) { @@ -201,8 +210,12 @@ Timestamp VCMTiming::RenderTimeInternal(uint32_t frame_timestamp, } // Note that TimestampExtrapolator::ExtrapolateLocalTime is not a const // method; it mutates the object's wraparound state. - Timestamp estimated_complete_time = - ts_extrapolator_->ExtrapolateLocalTime(frame_timestamp).value_or(now); + std::optional local_time = + ts_extrapolator_->ExtrapolateLocalTime(frame_timestamp); + if (!local_time.has_value()) { + return now; + } + Timestamp estimated_complete_time = *local_time; // Make sure the actual delay stays in the range of `min_playout_delay_` // and `max_playout_delay_`. @@ -292,13 +305,13 @@ void VCMTiming::SetTimingFrameInfo(const TimingFrameInfo& info) { timing_frame_info_.emplace(info); } -absl::optional VCMTiming::GetTimingFrameInfo() { +std::optional VCMTiming::GetTimingFrameInfo() { MutexLock lock(&mutex_); return timing_frame_info_; } void VCMTiming::SetMaxCompositionDelayInFrames( - absl::optional max_composition_delay_in_frames) { + std::optional max_composition_delay_in_frames) { MutexLock lock(&mutex_); max_composition_delay_in_frames_ = max_composition_delay_in_frames; } diff --git a/modules/video_coding/timing/timing.h b/modules/video_coding/timing/timing.h index 9e7fb874c7..fe35e8aaf7 100644 --- a/modules/video_coding/timing/timing.h +++ b/modules/video_coding/timing/timing.h @@ -11,11 +11,14 @@ #ifndef MODULES_VIDEO_CODING_TIMING_TIMING_H_ #define MODULES_VIDEO_CODING_TIMING_TIMING_H_ +#include +#include #include +#include -#include "absl/types/optional.h" #include "api/field_trials_view.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/video_frame.h" #include "api/video/video_timing.h" #include "modules/video_coding/timing/decode_time_percentile_filter.h" @@ -73,8 +76,8 @@ class VCMTiming { TimeDelta min_playout_delay() const; void set_min_playout_delay(TimeDelta min_playout_delay); - // Set/get the maximum playout delay from capture to render in ms. - void set_max_playout_delay(TimeDelta max_playout_delay); + // Set the minimum and maximum playout delay from capture to render. + void set_playout_delay(const VideoPlayoutDelay& playout_delay); // Increases or decreases the current delay to get closer to the target delay. // Calculates how long it has been since the previous call to this function, @@ -120,10 +123,10 @@ class VCMTiming { VideoDelayTimings GetTimings() const; void SetTimingFrameInfo(const TimingFrameInfo& info); - absl::optional GetTimingFrameInfo(); + std::optional GetTimingFrameInfo(); void SetMaxCompositionDelayInFrames( - absl::optional max_composition_delay_in_frames); + std::optional max_composition_delay_in_frames); VideoFrame::RenderParameters RenderParameters() const; @@ -156,9 +159,9 @@ class VCMTiming { TimeDelta jitter_delay_ RTC_GUARDED_BY(mutex_); TimeDelta current_delay_ RTC_GUARDED_BY(mutex_); uint32_t prev_frame_timestamp_ RTC_GUARDED_BY(mutex_); - absl::optional timing_frame_info_ RTC_GUARDED_BY(mutex_); + std::optional timing_frame_info_ RTC_GUARDED_BY(mutex_); size_t num_decoded_frames_ RTC_GUARDED_BY(mutex_); - absl::optional max_composition_delay_in_frames_ RTC_GUARDED_BY(mutex_); + std::optional max_composition_delay_in_frames_ RTC_GUARDED_BY(mutex_); // Set by the field trial WebRTC-ZeroPlayoutDelay. The parameter min_pacing // determines the minimum delay between frames scheduled for decoding that is // used when min playout delay=0 and max playout delay>=0. diff --git a/modules/video_coding/timing/timing_unittest.cc b/modules/video_coding/timing/timing_unittest.cc index 4ba8c4dcd2..61ec9bfb9e 100644 --- a/modules/video_coding/timing/timing_unittest.cc +++ b/modules/video_coding/timing/timing_unittest.cc @@ -10,8 +10,11 @@ #include "modules/video_coding/timing/timing.h" +#include + #include "api/units/frequency.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "system_wrappers/include/clock.h" #include "test/gmock.h" #include "test/gtest.h" @@ -38,30 +41,37 @@ MATCHER(HasConsistentVideoDelayTimings, "") { // Delays should be internally consistent. bool m1 = arg.minimum_delay <= arg.target_delay; if (!m1) { - *result_listener << "\nminimum_delay: " << arg.minimum_delay << ", " - << "target_delay: " << arg.target_delay << "\n"; + *result_listener << "\nminimum_delay: " << ToString(arg.minimum_delay) + << ", " << "target_delay: " << ToString(arg.target_delay) + << "\n"; } bool m2 = arg.minimum_delay <= arg.current_delay; if (!m2) { - *result_listener << "\nminimum_delay: " << arg.minimum_delay << ", " - << "current_delay: " << arg.current_delay; + *result_listener << "\nminimum_delay: " << ToString(arg.minimum_delay) + << ", " + << "current_delay: " << ToString(arg.current_delay); } bool m3 = arg.target_delay >= arg.min_playout_delay; if (!m3) { - *result_listener << "\ntarget_delay: " << arg.target_delay << ", " - << "min_playout_delay: " << arg.min_playout_delay << "\n"; + *result_listener << "\ntarget_delay: " << ToString(arg.target_delay) << ", " + << "min_playout_delay: " << ToString(arg.min_playout_delay) + << "\n"; } // TODO(crbug.com/webrtc/15197): Uncomment when this is guaranteed. // bool m4 = arg.target_delay <= arg.max_playout_delay; bool m5 = arg.current_delay >= arg.min_playout_delay; if (!m5) { - *result_listener << "\ncurrent_delay: " << arg.current_delay << ", " - << "min_playout_delay: " << arg.min_playout_delay << "\n"; + *result_listener << "\ncurrent_delay: " << ToString(arg.current_delay) + << ", " + << "min_playout_delay: " << ToString(arg.min_playout_delay) + << "\n"; } bool m6 = arg.current_delay <= arg.max_playout_delay; if (!m6) { - *result_listener << "\ncurrent_delay: " << arg.current_delay << ", " - << "max_playout_delay: " << arg.max_playout_delay << "\n"; + *result_listener << "\ncurrent_delay: " << ToString(arg.current_delay) + << ", " + << "max_playout_delay: " << ToString(arg.max_playout_delay) + << "\n"; } bool m = m1 && m2 && m3 && m5 && m6; @@ -196,20 +206,19 @@ TEST(VCMTimingTest, UseLowLatencyRenderer) { // Default is false. EXPECT_FALSE(timing.RenderParameters().use_low_latency_rendering); // False if min playout delay > 0. - timing.set_min_playout_delay(TimeDelta::Millis(10)); - timing.set_max_playout_delay(TimeDelta::Millis(20)); + timing.set_playout_delay({TimeDelta::Millis(10), TimeDelta::Millis(20)}); EXPECT_FALSE(timing.RenderParameters().use_low_latency_rendering); // True if min==0, max > 0. - timing.set_min_playout_delay(TimeDelta::Zero()); + timing.set_playout_delay({TimeDelta::Zero(), TimeDelta::Millis(20)}); EXPECT_TRUE(timing.RenderParameters().use_low_latency_rendering); // True if min==max==0. - timing.set_max_playout_delay(TimeDelta::Zero()); + timing.set_playout_delay({TimeDelta::Zero(), TimeDelta::Zero()}); EXPECT_TRUE(timing.RenderParameters().use_low_latency_rendering); // True also for max playout delay==500 ms. - timing.set_max_playout_delay(TimeDelta::Millis(500)); + timing.set_playout_delay({TimeDelta::Zero(), TimeDelta::Millis(500)}); EXPECT_TRUE(timing.RenderParameters().use_low_latency_rendering); // False if max playout delay > 500 ms. - timing.set_max_playout_delay(TimeDelta::Millis(501)); + timing.set_playout_delay({TimeDelta::Zero(), TimeDelta::Millis(501)}); EXPECT_FALSE(timing.RenderParameters().use_low_latency_rendering); EXPECT_THAT(timing.GetTimings(), HasConsistentVideoDelayTimings()); @@ -225,7 +234,7 @@ TEST(VCMTimingTest, MaxWaitingTimeIsZeroForZeroRenderTime) { test::ScopedKeyValueConfig field_trials; VCMTiming timing(&clock, field_trials); timing.Reset(); - timing.set_max_playout_delay(TimeDelta::Zero()); + timing.set_playout_delay({TimeDelta::Zero(), TimeDelta::Zero()}); for (int i = 0; i < 10; ++i) { clock.AdvanceTime(kTimeDelta); Timestamp now = clock.CurrentTime(); @@ -409,9 +418,8 @@ TEST(VCMTimingTest, GetTimings) { TimeDelta render_delay = TimeDelta::Millis(11); timing.set_render_delay(render_delay); TimeDelta min_playout_delay = TimeDelta::Millis(50); - timing.set_min_playout_delay(min_playout_delay); TimeDelta max_playout_delay = TimeDelta::Millis(500); - timing.set_max_playout_delay(max_playout_delay); + timing.set_playout_delay({min_playout_delay, max_playout_delay}); // On complete. timing.IncomingTimestamp(3000, clock.CurrentTime()); @@ -442,4 +450,45 @@ TEST(VCMTimingTest, GetTimings) { EXPECT_THAT(timings, HasConsistentVideoDelayTimings()); } +TEST(VCMTimingTest, GetTimingsBeforeAndAfterValidRtpTimestamp) { + SimulatedClock clock(33); + test::ScopedKeyValueConfig field_trials; + VCMTiming timing(&clock, field_trials); + + // Setup. + TimeDelta min_playout_delay = TimeDelta::Millis(50); + timing.set_playout_delay({min_playout_delay, TimeDelta::Millis(500)}); + + // On decodable frames before valid rtp timestamp. + constexpr int decodeable_frame_cnt = 10; + constexpr uint32_t any_time_elapsed = 17; + constexpr uint32_t rtp_ts_base = 3000; + constexpr uint32_t rtp_ts_delta_10fps = 9000; + constexpr uint32_t frame_ts_delta_10fps = 100; + uint32_t rtp_ts = rtp_ts_base; + + for (int i = 0; i < decodeable_frame_cnt; i++) { + clock.AdvanceTimeMilliseconds(any_time_elapsed); + rtp_ts += rtp_ts_delta_10fps; + + Timestamp render_time = timing.RenderTime(rtp_ts, clock.CurrentTime()); + // Render time should be CurrentTime, because timing.IncomingTimestamp has + // not been called yet. + EXPECT_EQ(render_time, clock.CurrentTime()); + } + + // On frame complete, which one not 'metadata.delayed_by_retransmission' + Timestamp valid_frame_ts = clock.CurrentTime(); + timing.IncomingTimestamp(rtp_ts, valid_frame_ts); + + clock.AdvanceTimeMilliseconds(any_time_elapsed); + rtp_ts += rtp_ts_delta_10fps; + + Timestamp render_time = timing.RenderTime(rtp_ts, clock.CurrentTime()); + // Render time should be relative to the latest valid frame timestamp. + EXPECT_EQ(render_time, valid_frame_ts + + TimeDelta::Millis(frame_ts_delta_10fps) + + min_playout_delay); +} + } // namespace webrtc diff --git a/modules/video_coding/utility/bandwidth_quality_scaler.cc b/modules/video_coding/utility/bandwidth_quality_scaler.cc index 13502a142b..694a289139 100644 --- a/modules/video_coding/utility/bandwidth_quality_scaler.cc +++ b/modules/video_coding/utility/bandwidth_quality_scaler.cc @@ -10,18 +10,17 @@ #include "modules/video_coding/utility/bandwidth_quality_scaler.h" -#include -#include -#include +#include +#include #include -#include "api/video/video_adaptation_reason.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/video/video_codec_type.h" #include "api/video_codecs/video_encoder.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/bandwidth_quality_scaler_settings.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/exp_filter.h" -#include "rtc_base/time_utils.h" +#include "rtc_base/experiments/encoder_info_settings.h" +#include "rtc_base/rate_statistics.h" #include "rtc_base/weak_ptr.h" namespace webrtc { @@ -31,16 +30,11 @@ namespace { constexpr int kDefaultMaxWindowSizeMs = 5000; constexpr float kHigherMaxBitrateTolerationFactor = 0.95; constexpr float kLowerMinBitrateTolerationFactor = 0.8; -constexpr int kDefaultBitrateStateUpdateIntervalSeconds = 5; } // namespace BandwidthQualityScaler::BandwidthQualityScaler( BandwidthQualityScalerUsageHandlerInterface* handler) - : kBitrateStateUpdateInterval(TimeDelta::Seconds( - BandwidthQualityScalerSettings::ParseFromFieldTrials() - .BitrateStateUpdateInterval() - .value_or(kDefaultBitrateStateUpdateIntervalSeconds))), - handler_(handler), + : handler_(handler), encoded_bitrate_(kDefaultMaxWindowSizeMs, RateStatistics::kBpsScale), weak_ptr_factory_(this) { RTC_DCHECK_RUN_ON(&task_checker_); @@ -98,10 +92,12 @@ void BandwidthQualityScaler::ReportEncodeInfo(int frame_size_bytes, void BandwidthQualityScaler::SetResolutionBitrateLimits( const std::vector& - resolution_bitrate_limits) { + resolution_bitrate_limits, + VideoCodecType codec_type) { if (resolution_bitrate_limits.empty()) { - resolution_bitrate_limits_ = EncoderInfoSettings:: - GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted(); + resolution_bitrate_limits_ = + EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted( + codec_type); } else { resolution_bitrate_limits_ = resolution_bitrate_limits; } @@ -115,13 +111,13 @@ BandwidthQualityScaler::CheckBitrate() { return BandwidthQualityScaler::CheckBitrateResult::kInsufficientSamples; } - absl::optional current_bitrate_bps = + std::optional current_bitrate_bps = encoded_bitrate_.Rate(last_time_sent_in_ms_.value()); if (!current_bitrate_bps.has_value()) { // We can't get a valid bitrate due to not enough data points. return BandwidthQualityScaler::CheckBitrateResult::kInsufficientSamples; } - absl::optional suitable_bitrate_limit = + std::optional suitable_bitrate_limit = EncoderInfoSettings:: GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted( last_frame_size_pixels_, resolution_bitrate_limits_); diff --git a/modules/video_coding/utility/bandwidth_quality_scaler.h b/modules/video_coding/utility/bandwidth_quality_scaler.h index 7cd1de0dd2..65b44ff038 100644 --- a/modules/video_coding/utility/bandwidth_quality_scaler.h +++ b/modules/video_coding/utility/bandwidth_quality_scaler.h @@ -14,19 +14,16 @@ #include #include -#include +#include #include -#include "absl/types/optional.h" -#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/units/time_delta.h" +#include "api/video/video_codec_type.h" #include "api/video_codecs/video_encoder.h" -#include "rtc_base/experiments/encoder_info_settings.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/exp_filter.h" #include "rtc_base/rate_statistics.h" -#include "rtc_base/ref_count.h" #include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/weak_ptr.h" namespace webrtc { @@ -46,6 +43,9 @@ class BandwidthQualityScalerUsageHandlerInterface { // stream down or up). class BandwidthQualityScaler { public: + static constexpr TimeDelta kBitrateStateUpdateInterval = + TimeDelta::Seconds(5); + explicit BandwidthQualityScaler( BandwidthQualityScalerUsageHandlerInterface* handler); virtual ~BandwidthQualityScaler(); @@ -55,14 +55,13 @@ class BandwidthQualityScaler { uint32_t encoded_width, uint32_t encoded_height); - // We prioritise to using the |resolution_bitrate_limits| provided by the - // current decoder. If not provided, we will use the default data by + // We prioritize the |resolution_bitrate_limits| provided by the + // current encoder. If not provided, we will use the default data by // GetDefaultResolutionBitrateLimits(). void SetResolutionBitrateLimits( const std::vector& - resolution_bitrate_limits); - - const TimeDelta kBitrateStateUpdateInterval; + resolution_bitrate_limits, + VideoCodecType codec_type); private: enum class CheckBitrateResult { @@ -81,10 +80,10 @@ class BandwidthQualityScaler { BandwidthQualityScalerUsageHandlerInterface* const handler_ RTC_GUARDED_BY(&task_checker_); - absl::optional last_time_sent_in_ms_ RTC_GUARDED_BY(&task_checker_); + std::optional last_time_sent_in_ms_ RTC_GUARDED_BY(&task_checker_); RateStatistics encoded_bitrate_ RTC_GUARDED_BY(&task_checker_); - absl::optional last_frame_size_pixels_ RTC_GUARDED_BY(&task_checker_); - rtc::WeakPtrFactory weak_ptr_factory_; + std::optional last_frame_size_pixels_ RTC_GUARDED_BY(&task_checker_); + WeakPtrFactory weak_ptr_factory_; std::vector resolution_bitrate_limits_; }; diff --git a/modules/video_coding/utility/bandwidth_quality_scaler_unittest.cc b/modules/video_coding/utility/bandwidth_quality_scaler_unittest.cc index 4e2c759707..e54b597d0b 100644 --- a/modules/video_coding/utility/bandwidth_quality_scaler_unittest.cc +++ b/modules/video_coding/utility/bandwidth_quality_scaler_unittest.cc @@ -11,23 +11,27 @@ #include "modules/video_coding/utility/bandwidth_quality_scaler.h" #include -#include +#include +#include +#include "api/task_queue/task_queue_factory.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/video_encoder.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/experiments/encoder_info_settings.h" #include "rtc_base/task_queue_for_test.h" -#include "rtc_base/time_utils.h" -#include "test/field_trial.h" #include "test/gtest.h" +#include "test/time_controller/simulated_time_controller.h" namespace webrtc { namespace { constexpr int kFramerateFps = 30; -constexpr TimeDelta kDefaultBitrateStateUpdateInterval = TimeDelta::Seconds(5); constexpr TimeDelta kDefaultEncodeTime = TimeDelta::Seconds(1) / kFramerateFps; +constexpr TimeDelta kWaitTime = TimeDelta::Millis(200); } // namespace @@ -45,25 +49,12 @@ class FakeBandwidthQualityScalerHandler event_.Set(); } - rtc::Event event_; + Event event_; int adapt_up_event_count_ = 0; int adapt_down_event_count_ = 0; }; -class BandwidthQualityScalerUnderTest : public BandwidthQualityScaler { - public: - explicit BandwidthQualityScalerUnderTest( - BandwidthQualityScalerUsageHandlerInterface* handler) - : BandwidthQualityScaler(handler) {} - - int GetBitrateStateUpdateIntervalMs() { - return this->kBitrateStateUpdateInterval.ms() + 200; - } -}; - -class BandwidthQualityScalerTest - : public ::testing::Test, - public ::testing::WithParamInterface { +class BandwidthQualityScalerTest : public ::testing::Test { protected: enum ScaleDirection { kKeepScaleNormalBandwidth, @@ -93,17 +84,19 @@ class BandwidthQualityScalerTest int actual_height; }; - BandwidthQualityScalerTest() - : scoped_field_trial_(GetParam()), - task_queue_("BandwidthQualityScalerTestQueue"), - handler_(std::make_unique()) { + explicit BandwidthQualityScalerTest(VideoCodecType codec_type) + : task_queue_(time_controller_.GetTaskQueueFactory()->CreateTaskQueue( + "BandwidthQualityScalerTestQueue", + TaskQueueFactory::Priority::NORMAL)), + handler_(std::make_unique()), + codec_type_(codec_type) { task_queue_.SendTask([this] { bandwidth_quality_scaler_ = - std::unique_ptr( - new BandwidthQualityScalerUnderTest(handler_.get())); + std::make_unique(handler_.get()); bandwidth_quality_scaler_->SetResolutionBitrateLimits( EncoderInfoSettings:: - GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted()); + GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted(codec_type_), + codec_type_); // Only for testing. Set first_timestamp_ in RateStatistics to 0. bandwidth_quality_scaler_->ReportEncodeInfo(0, 0, 0, 0); }); @@ -138,63 +131,71 @@ class BandwidthQualityScalerTest return -1; } - absl::optional + std::optional GetDefaultSuitableBitrateLimit(int frame_size_pixels) { return EncoderInfoSettings:: GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted( frame_size_pixels, EncoderInfoSettings:: - GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted()); + GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted( + codec_type_)); } void TriggerBandwidthQualityScalerTest( const std::vector& frame_configs) { - task_queue_.SendTask([frame_configs, this] { - RTC_CHECK(!frame_configs.empty()); + RTC_CHECK(!frame_configs.empty()); - int total_frame_nums = 0; - for (const FrameConfig& frame_config : frame_configs) { - total_frame_nums += frame_config.frame_num; - } + int total_frame_nums = 0; + for (const FrameConfig& frame_config : frame_configs) { + total_frame_nums += frame_config.frame_num; + } - EXPECT_EQ(kFramerateFps * kDefaultBitrateStateUpdateInterval.seconds(), - total_frame_nums); - - uint32_t time_send_to_scaler_ms_ = rtc::TimeMillis(); - for (size_t i = 0; i < frame_configs.size(); ++i) { - const FrameConfig& config = frame_configs[i]; - absl::optional suitable_bitrate = - GetDefaultSuitableBitrateLimit(config.actual_width * - config.actual_height); - EXPECT_TRUE(suitable_bitrate); - for (int j = 0; j <= config.frame_num; ++j) { - time_send_to_scaler_ms_ += kDefaultEncodeTime.ms(); - int frame_size_bytes = - GetFrameSizeBytes(config, suitable_bitrate.value()); - RTC_CHECK(frame_size_bytes > 0); - bandwidth_quality_scaler_->ReportEncodeInfo( - frame_size_bytes, time_send_to_scaler_ms_, config.actual_width, - config.actual_height); - } + EXPECT_EQ(kFramerateFps * + BandwidthQualityScaler::kBitrateStateUpdateInterval.seconds(), + total_frame_nums); + + TimeDelta delay = TimeDelta::Zero(); + int num_delayed_tasks = 0; + for (const FrameConfig& config : frame_configs) { + std::optional suitable_bitrate = + GetDefaultSuitableBitrateLimit(config.actual_width * + config.actual_height); + EXPECT_TRUE(suitable_bitrate); + for (int j = 0; j <= config.frame_num; ++j) { + delay += kDefaultEncodeTime; + int frame_size_bytes = GetFrameSizeBytes(config, *suitable_bitrate); + RTC_CHECK_GT(frame_size_bytes, 0); + ++num_delayed_tasks; + task_queue_.PostDelayedTask( + [frame_size_bytes, config, &num_delayed_tasks, this] { + bandwidth_quality_scaler_->ReportEncodeInfo( + frame_size_bytes, + time_controller_.GetClock()->CurrentTime().ms(), + config.actual_width, config.actual_height); + --num_delayed_tasks; + }, + delay); } - }); + } + time_controller_.AdvanceTime(delay); + ASSERT_TRUE(time_controller_.Wait([&] { return num_delayed_tasks == 0; })); } - test::ScopedFieldTrials scoped_field_trial_; + GlobalSimulatedTimeController time_controller_{Timestamp::Seconds(1234)}; TaskQueueForTest task_queue_; - std::unique_ptr bandwidth_quality_scaler_; + std::unique_ptr bandwidth_quality_scaler_; std::unique_ptr handler_; + VideoCodecType codec_type_; }; -INSTANTIATE_TEST_SUITE_P( - FieldTrials, - BandwidthQualityScalerTest, - ::testing::Values("WebRTC-Video-BandwidthQualityScalerSettings/" - "bitrate_state_update_interval_s_:1/", - "WebRTC-Video-BandwidthQualityScalerSettings/" - "bitrate_state_update_interval_s_:2/")); +class BandwidthQualityScalerTests + : public BandwidthQualityScalerTest, + public ::testing::WithParamInterface { + protected: + BandwidthQualityScalerTests() : BandwidthQualityScalerTest(GetParam()) {} +}; -TEST_P(BandwidthQualityScalerTest, AllNormalFrame_640x360) { +TEST_P(BandwidthQualityScalerTests, AllNormalFrame_640x360) { const std::vector frame_configs{ FrameConfig(150, FrameType::kNormalFrame, 640, 360)}; TriggerBandwidthQualityScalerTest(frame_configs); @@ -202,13 +203,12 @@ TEST_P(BandwidthQualityScalerTest, AllNormalFrame_640x360) { // When resolution is 640*360, experimental working bitrate range is // [500000,800000] bps. Encoded bitrate is 654253, so it falls in the range // without any operation(up/down). - EXPECT_FALSE(handler_->event_.Wait(TimeDelta::Millis( - bandwidth_quality_scaler_->GetBitrateStateUpdateIntervalMs()))); + EXPECT_FALSE(handler_->event_.Wait(kWaitTime)); EXPECT_EQ(0, handler_->adapt_down_event_count_); EXPECT_EQ(0, handler_->adapt_up_event_count_); } -TEST_P(BandwidthQualityScalerTest, AllNoramlFrame_AboveMaxBandwidth_640x360) { +TEST_P(BandwidthQualityScalerTests, AllNormalFrame_AboveMaxBandwidth_640x360) { const std::vector frame_configs{ FrameConfig(150, FrameType::kNormalFrame_Overuse, 640, 360)}; TriggerBandwidthQualityScalerTest(frame_configs); @@ -216,13 +216,12 @@ TEST_P(BandwidthQualityScalerTest, AllNoramlFrame_AboveMaxBandwidth_640x360) { // When resolution is 640*360, experimental working bitrate range is // [500000,800000] bps. Encoded bitrate is 1208000 > 800000 * 0.95, so it // triggers adapt_up_event_count_. - EXPECT_TRUE(handler_->event_.Wait(TimeDelta::Millis( - bandwidth_quality_scaler_->GetBitrateStateUpdateIntervalMs()))); + EXPECT_TRUE(handler_->event_.Wait(kWaitTime)); EXPECT_EQ(0, handler_->adapt_down_event_count_); EXPECT_EQ(1, handler_->adapt_up_event_count_); } -TEST_P(BandwidthQualityScalerTest, AllNormalFrame_Underuse_640x360) { +TEST_P(BandwidthQualityScalerTests, AllNormalFrame_Underuse_640x360) { const std::vector frame_configs{ FrameConfig(150, FrameType::kNormalFrame_Underuse, 640, 360)}; TriggerBandwidthQualityScalerTest(frame_configs); @@ -230,13 +229,12 @@ TEST_P(BandwidthQualityScalerTest, AllNormalFrame_Underuse_640x360) { // When resolution is 640*360, experimental working bitrate range is // [500000,800000] bps. Encoded bitrate is 377379 < 500000 * 0.8, so it // triggers adapt_down_event_count_. - EXPECT_TRUE(handler_->event_.Wait(TimeDelta::Millis( - bandwidth_quality_scaler_->GetBitrateStateUpdateIntervalMs()))); + EXPECT_TRUE(handler_->event_.Wait(kWaitTime)); EXPECT_EQ(1, handler_->adapt_down_event_count_); EXPECT_EQ(0, handler_->adapt_up_event_count_); } -TEST_P(BandwidthQualityScalerTest, FixedFrameTypeTest1_640x360) { +TEST_P(BandwidthQualityScalerTests, FixedFrameTypeTest1_640x360) { const std::vector frame_configs{ FrameConfig(5, FrameType::kNormalFrame_Underuse, 640, 360), FrameConfig(110, FrameType::kNormalFrame, 640, 360), @@ -248,13 +246,12 @@ TEST_P(BandwidthQualityScalerTest, FixedFrameTypeTest1_640x360) { // When resolution is 640*360, experimental working bitrate range is // [500000,800000] bps. Encoded bitrate is 1059462 > 800000 * 0.95, so it // triggers adapt_up_event_count_. - EXPECT_TRUE(handler_->event_.Wait(TimeDelta::Millis( - bandwidth_quality_scaler_->GetBitrateStateUpdateIntervalMs()))); + EXPECT_TRUE(handler_->event_.Wait(kWaitTime)); EXPECT_EQ(0, handler_->adapt_down_event_count_); EXPECT_EQ(1, handler_->adapt_up_event_count_); } -TEST_P(BandwidthQualityScalerTest, FixedFrameTypeTest2_640x360) { +TEST_P(BandwidthQualityScalerTests, FixedFrameTypeTest2_640x360) { const std::vector frame_configs{ FrameConfig(10, FrameType::kNormalFrame_Underuse, 640, 360), FrameConfig(50, FrameType::kNormalFrame, 640, 360), @@ -266,10 +263,13 @@ TEST_P(BandwidthQualityScalerTest, FixedFrameTypeTest2_640x360) { // When resolution is 640*360, experimental working bitrate range is // [500000,800000] bps. Encoded bitrate is 1059462 > 800000 * 0.95, so it // triggers adapt_up_event_count_. - EXPECT_TRUE(handler_->event_.Wait(TimeDelta::Millis( - bandwidth_quality_scaler_->GetBitrateStateUpdateIntervalMs()))); + EXPECT_TRUE(handler_->event_.Wait(kWaitTime)); EXPECT_EQ(0, handler_->adapt_down_event_count_); EXPECT_EQ(1, handler_->adapt_up_event_count_); } +INSTANTIATE_TEST_SUITE_P(AllCodecs, + BandwidthQualityScalerTests, + ::testing::Values(kVideoCodecH264, kVideoCodecH265)); + } // namespace webrtc diff --git a/modules/video_coding/utility/corruption_detection_settings_generator.cc b/modules/video_coding/utility/corruption_detection_settings_generator.cc new file mode 100644 index 0000000000..fedf268da9 --- /dev/null +++ b/modules/video_coding/utility/corruption_detection_settings_generator.cc @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/utility/corruption_detection_settings_generator.h" + +#include +#include +#include + +#include "api/video/corruption_detection_filter_settings.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace { +void ValidateParameters( + const CorruptionDetectionSettingsGenerator::ErrorThresholds& + default_error_thresholds, + const CorruptionDetectionSettingsGenerator::TransientParameters& + transient_params) { + int offset = transient_params.keyframe_threshold_offset; + RTC_DCHECK_GE(offset, 0); + RTC_DCHECK_LE(offset, 15); + RTC_DCHECK_GE(default_error_thresholds.chroma, 0); + RTC_DCHECK_LE(default_error_thresholds.chroma + offset, 15); + RTC_DCHECK_GE(default_error_thresholds.luma, 0); + RTC_DCHECK_LE(default_error_thresholds.luma + offset, 15); + + RTC_DCHECK_GE(transient_params.max_qp, 0); + RTC_DCHECK_GE(transient_params.keyframe_stddev_offset, 0.0); + RTC_DCHECK_GE(transient_params.keyframe_offset_duration_frames, 0); + RTC_DCHECK_GE(transient_params.large_qp_change_threshold, 0); + RTC_DCHECK_LE(transient_params.large_qp_change_threshold, + transient_params.max_qp); + RTC_DCHECK_GE(transient_params.std_dev_lower_bound, 0.0); + RTC_DCHECK_LE(transient_params.std_dev_lower_bound, 40.0); +} +} // namespace + +CorruptionDetectionSettingsGenerator::CorruptionDetectionSettingsGenerator( + const RationalFunctionParameters& function_params, + const ErrorThresholds& default_error_thresholds, + const TransientParameters& transient_params) + : function_params_(function_params), + error_thresholds_(default_error_thresholds), + transient_params_(transient_params), + frames_since_keyframe_(0) { + ValidateParameters(default_error_thresholds, transient_params); +} + +CorruptionDetectionSettingsGenerator::CorruptionDetectionSettingsGenerator( + const ExponentialFunctionParameters& function_params, + const ErrorThresholds& default_error_thresholds, + const TransientParameters& transient_params) + : function_params_(function_params), + error_thresholds_(default_error_thresholds), + transient_params_(transient_params), + frames_since_keyframe_(0) { + ValidateParameters(default_error_thresholds, transient_params); +} + +CorruptionDetectionFilterSettings CorruptionDetectionSettingsGenerator::OnFrame( + bool is_keyframe, + int qp) { + double std_dev = CalculateStdDev(qp); + int y_err = error_thresholds_.luma; + int uv_err = error_thresholds_.chroma; + + if (is_keyframe || (transient_params_.large_qp_change_threshold > 0 && + std::abs(previous_qp_.value_or(qp) - qp) >= + transient_params_.large_qp_change_threshold)) { + frames_since_keyframe_ = 0; + } + previous_qp_ = qp; + + if (frames_since_keyframe_ <= + transient_params_.keyframe_offset_duration_frames) { + // The progress, from the start at the keyframe at 0.0 to completely back to + // normal at 1.0. + double progress = transient_params_.keyframe_offset_duration_frames == 0 + ? 1.0 + : (static_cast(frames_since_keyframe_) / + transient_params_.keyframe_offset_duration_frames); + double adjusted_std_dev = + std::min(std_dev + transient_params_.keyframe_stddev_offset, 40.0); + double adjusted_y_err = + std::min(y_err + transient_params_.keyframe_threshold_offset, 15); + double adjusted_uv_err = + std::min(uv_err + transient_params_.keyframe_threshold_offset, 15); + + std_dev = ((1.0 - progress) * adjusted_std_dev) + (progress * std_dev); + y_err = static_cast(((1.0 - progress) * adjusted_y_err) + + (progress * y_err) + 0.5); + uv_err = static_cast(((1.0 - progress) * adjusted_uv_err) + + (progress * uv_err) + 0.5); + } + + ++frames_since_keyframe_; + + std_dev = std::max(std_dev, transient_params_.std_dev_lower_bound); + std_dev = std::min(std_dev, 40.0); + + return CorruptionDetectionFilterSettings{.std_dev = std_dev, + .luma_error_threshold = y_err, + .chroma_error_threshold = uv_err}; +} + +double CorruptionDetectionSettingsGenerator::CalculateStdDev(int qp) const { + if (std::holds_alternative(function_params_)) { + const auto& params = std::get(function_params_); + return (qp * params.numerator_factor) / (qp + params.denumerator_term) + + params.offset; + } + RTC_DCHECK( + std::holds_alternative(function_params_)); + + const auto& params = + std::get(function_params_); + return params.scale * + std::exp(params.exponent_factor * qp - params.exponent_offset); +} + +} // namespace webrtc diff --git a/modules/video_coding/utility/corruption_detection_settings_generator.h b/modules/video_coding/utility/corruption_detection_settings_generator.h new file mode 100644 index 0000000000..53ba16975b --- /dev/null +++ b/modules/video_coding/utility/corruption_detection_settings_generator.h @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_UTILITY_CORRUPTION_DETECTION_SETTINGS_GENERATOR_H_ +#define MODULES_VIDEO_CODING_UTILITY_CORRUPTION_DETECTION_SETTINGS_GENERATOR_H_ + +#include +#include + +#include "api/video/corruption_detection_filter_settings.h" + +namespace webrtc { + +class CorruptionDetectionSettingsGenerator { + public: + // A struct with the parameters for a ration function used to determine the + // standard deviation as function of the qp. It has the form f(qp) = + // (-numerator_factor * qp) / (denumerator_term + qp) + offset. + struct RationalFunctionParameters { + double numerator_factor = 0.0; + double denumerator_term = 0.0; + double offset = 0.0; + }; + + // A struct with the parameters for an exponential function used to determine + // the standard deviation as a function of the qp. It has the form f(qp) = + // scale * std::exp(exponent_factor * qp - exponent_offset). + struct ExponentialFunctionParameters { + double scale = 0.0; + double exponent_factor = 0.0; + double exponent_offset = 0.0; + }; + + // Allowed error thresholds for luma (Y) and chroma (UV) channels. + struct ErrorThresholds { + int luma = 0; + int chroma = 0; + }; + + // Settings relating to transient events like key-frames. + struct TransientParameters { + // The max qp for the codec in use (e.g. 255 for AV1). + int max_qp = 0; + + // Temporary increase to error thresholds on keyframes. + int keyframe_threshold_offset = 0; + // Temporary increase to std dev on keyframes. + double keyframe_stddev_offset = 0.0; + // Fade-out time (in frames) for temporary keyframe offsets. + int keyframe_offset_duration_frames = 0; + + // How many QP points count as a "large change", or 0 to disable. + // A large change will trigger the same compensation as a keyframe. + int large_qp_change_threshold = 0; + + // Don't use a filter kernel smaller than this. + double std_dev_lower_bound = 0.0; + }; + + CorruptionDetectionSettingsGenerator( + const RationalFunctionParameters& function_params, + const ErrorThresholds& default_error_thresholds, + const TransientParameters& transient_params); + CorruptionDetectionSettingsGenerator( + const ExponentialFunctionParameters& function_params, + const ErrorThresholds& default_error_thresholds, + const TransientParameters& transient_params); + + CorruptionDetectionFilterSettings OnFrame(bool is_keyframe, int qp); + + private: + double CalculateStdDev(int qp) const; + + const std::variant + function_params_; + const ErrorThresholds error_thresholds_; + const TransientParameters transient_params_; + + int frames_since_keyframe_; + std::optional previous_qp_; +}; + +} // namespace webrtc + +#endif // MODULES_VIDEO_CODING_UTILITY_CORRUPTION_DETECTION_SETTINGS_GENERATOR_H_ diff --git a/modules/video_coding/utility/corruption_detection_settings_generator_unittest.cc b/modules/video_coding/utility/corruption_detection_settings_generator_unittest.cc new file mode 100644 index 0000000000..7e74b32afc --- /dev/null +++ b/modules/video_coding/utility/corruption_detection_settings_generator_unittest.cc @@ -0,0 +1,230 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/video_coding/utility/corruption_detection_settings_generator.h" + +#include "api/video/corruption_detection_filter_settings.h" +#include "test/gmock.h" +#include "test/gtest.h" + +using ::testing::AllOf; +using ::testing::DoubleEq; +using ::testing::DoubleNear; +using ::testing::Eq; +using ::testing::Field; + +namespace webrtc { + +TEST(CorruptionDetectionSettingsGenerator, ExponentialFunctionStdDev) { + CorruptionDetectionSettingsGenerator settings_generator( + CorruptionDetectionSettingsGenerator::ExponentialFunctionParameters{ + .scale = 0.006, + .exponent_factor = 0.01857465, + .exponent_offset = -4.26470513}, + CorruptionDetectionSettingsGenerator::ErrorThresholds{}, + webrtc::CorruptionDetectionSettingsGenerator::TransientParameters{}); + + // 0.006 * e^(0.01857465 * 20 + 4.26470513) ~= 0.612 + CorruptionDetectionFilterSettings settings = + settings_generator.OnFrame(/*is_keyframe=*/true, /*qp=*/20); + EXPECT_THAT(settings.std_dev, DoubleNear(0.612, 0.01)); + + // 0.006 * e^(0.01857465 * 20 + 4.26470513) ~= 1.886 + settings = settings_generator.OnFrame(/*is_keyframe=*/true, /*qp=*/80); + EXPECT_THAT(settings.std_dev, DoubleNear(1.886, 0.01)); +} + +TEST(CorruptionDetectionSettingsGenerator, ExponentialFunctionThresholds) { + CorruptionDetectionSettingsGenerator settings_generator( + CorruptionDetectionSettingsGenerator::ExponentialFunctionParameters{ + .scale = 0.006, + .exponent_factor = 0.01857465, + .exponent_offset = -4.26470513}, + CorruptionDetectionSettingsGenerator::ErrorThresholds{.luma = 5, + .chroma = 6}, + webrtc::CorruptionDetectionSettingsGenerator::TransientParameters{}); + + CorruptionDetectionFilterSettings settings = + settings_generator.OnFrame(/*is_keyframe=*/true, /*qp=*/20); + EXPECT_EQ(settings.chroma_error_threshold, 6); + EXPECT_EQ(settings.luma_error_threshold, 5); +} + +TEST(CorruptionDetectionSettingsGenerator, RationalFunctionStdDev) { + CorruptionDetectionSettingsGenerator settings_generator( + CorruptionDetectionSettingsGenerator::RationalFunctionParameters{ + .numerator_factor = -5.5, .denumerator_term = -97, .offset = -1}, + CorruptionDetectionSettingsGenerator::ErrorThresholds{}, + webrtc::CorruptionDetectionSettingsGenerator::TransientParameters{}); + + // (20 * -5.5) / (20 - 97) - 1 ~= 0.429 + CorruptionDetectionFilterSettings settings = + settings_generator.OnFrame(/*is_keyframe=*/true, /*qp=*/20); + EXPECT_THAT(settings.std_dev, DoubleNear(0.429, 0.01)); + + // (40 * -5.5) / (40 - 97) - 1 ~= 2.860 + settings = settings_generator.OnFrame(/*is_keyframe=*/true, /*qp=*/40); + EXPECT_THAT(settings.std_dev, DoubleNear(2.860, 0.01)); +} + +TEST(CorruptionDetectionSettingsGenerator, RationalFunctionThresholds) { + CorruptionDetectionSettingsGenerator settings_generator( + CorruptionDetectionSettingsGenerator::RationalFunctionParameters{ + .numerator_factor = -5.5, .denumerator_term = -97, .offset = -1}, + CorruptionDetectionSettingsGenerator::ErrorThresholds{.luma = 5, + .chroma = 6}, + webrtc::CorruptionDetectionSettingsGenerator::TransientParameters{}); + + CorruptionDetectionFilterSettings settings = + settings_generator.OnFrame(/*is_keyframe=*/true, /*qp=*/20); + EXPECT_EQ(settings.chroma_error_threshold, 6); + EXPECT_EQ(settings.luma_error_threshold, 5); +} + +TEST(CorruptionDetectionSettingsGenerator, TransientStdDevOffset) { + CorruptionDetectionSettingsGenerator settings_generator( + // (1 * qp) / (qp - 0) + 1 = 2, for all values of qp. + CorruptionDetectionSettingsGenerator::RationalFunctionParameters{ + .numerator_factor = 1, .denumerator_term = 0, .offset = 1}, + CorruptionDetectionSettingsGenerator::ErrorThresholds{}, + // Two frames with adjusted settings, including the keyframe. + // Adjust the keyframe std_dev by 2. + webrtc::CorruptionDetectionSettingsGenerator::TransientParameters{ + .keyframe_stddev_offset = 2.0, + .keyframe_offset_duration_frames = 2, + }); + + EXPECT_THAT(settings_generator.OnFrame(/*is_keyframe=*/true, /*qp=*/1), + Field(&CorruptionDetectionFilterSettings::std_dev, + DoubleNear(4.0, 0.001))); + + // Second frame has std_dev ofset interpolated halfway between keyframe + // (2.0 + 2.0) and default (2.0) => 3.0 + EXPECT_THAT(settings_generator.OnFrame(/*is_keyframe=*/false, /*qp=*/1), + Field(&CorruptionDetectionFilterSettings::std_dev, + DoubleNear(3.0, 0.001))); + + EXPECT_THAT(settings_generator.OnFrame(/*is_keyframe=*/false, /*qp=*/1), + Field(&CorruptionDetectionFilterSettings::std_dev, + DoubleNear(2.0, 0.001))); + + EXPECT_THAT(settings_generator.OnFrame(/*is_keyframe=*/false, /*qp=*/1), + Field(&CorruptionDetectionFilterSettings::std_dev, + DoubleNear(2.0, 0.001))); +} + +TEST(CorruptionDetectionSettingsGenerator, TransientThresholdOffsets) { + CorruptionDetectionSettingsGenerator settings_generator( + CorruptionDetectionSettingsGenerator::RationalFunctionParameters{ + .numerator_factor = 1, .denumerator_term = 0, .offset = 1}, + CorruptionDetectionSettingsGenerator::ErrorThresholds{.luma = 2, + .chroma = 3}, + // Two frames with adjusted settings, including the keyframe. + // Adjust the error thresholds by 2. + webrtc::CorruptionDetectionSettingsGenerator::TransientParameters{ + .keyframe_threshold_offset = 2, + .keyframe_offset_duration_frames = 2, + }); + + EXPECT_THAT( + settings_generator.OnFrame(/*is_keyframe=*/true, /*qp=*/1), + AllOf(Field(&CorruptionDetectionFilterSettings::chroma_error_threshold, + Eq(5)), + Field(&CorruptionDetectionFilterSettings::luma_error_threshold, + Eq(4)))); + + // Second frame has offset interpolated halfway between keyframe and default. + EXPECT_THAT( + settings_generator.OnFrame(/*is_keyframe=*/false, /*qp=*/1), + AllOf(Field(&CorruptionDetectionFilterSettings::chroma_error_threshold, + Eq(4)), + Field(&CorruptionDetectionFilterSettings::luma_error_threshold, + Eq(3)))); + + EXPECT_THAT( + settings_generator.OnFrame(/*is_keyframe=*/false, /*qp=*/1), + AllOf(Field(&CorruptionDetectionFilterSettings::chroma_error_threshold, + Eq(3)), + Field(&CorruptionDetectionFilterSettings::luma_error_threshold, + Eq(2)))); + + EXPECT_THAT( + settings_generator.OnFrame(/*is_keyframe=*/false, /*qp=*/1), + AllOf(Field(&CorruptionDetectionFilterSettings::chroma_error_threshold, + Eq(3)), + Field(&CorruptionDetectionFilterSettings::luma_error_threshold, + Eq(2)))); +} + +TEST(CorruptionDetectionSettingsGenerator, StdDevUpperBound) { + CorruptionDetectionSettingsGenerator settings_generator( + // (1 * qp) / (qp - 0) + 41 = 42, for all values of qp. + CorruptionDetectionSettingsGenerator::RationalFunctionParameters{ + .numerator_factor = 1, .denumerator_term = 0, .offset = 41}, + CorruptionDetectionSettingsGenerator::ErrorThresholds{}, + webrtc::CorruptionDetectionSettingsGenerator::TransientParameters{}); + + // `std_dev` capped at max 40.0, which is the limit for the protocol. + EXPECT_THAT( + settings_generator.OnFrame(/*is_keyframe=*/true, /*qp=*/1), + Field(&CorruptionDetectionFilterSettings::std_dev, DoubleEq(40.0))); +} + +TEST(CorruptionDetectionSettingsGenerator, StdDevLowerBound) { + CorruptionDetectionSettingsGenerator settings_generator( + // (1 * qp) / (qp - 0) + 1 = 2, for all values of qp. + CorruptionDetectionSettingsGenerator::RationalFunctionParameters{ + .numerator_factor = 1, .denumerator_term = 0, .offset = 1}, + CorruptionDetectionSettingsGenerator::ErrorThresholds{}, + webrtc::CorruptionDetectionSettingsGenerator::TransientParameters{ + .std_dev_lower_bound = 5.0}); + + // `std_dev` capped at lower bound of 5.0. + EXPECT_THAT( + settings_generator.OnFrame(/*is_keyframe=*/true, /*qp=*/1), + Field(&CorruptionDetectionFilterSettings::std_dev, DoubleEq(5.0))); +} + +TEST(CorruptionDetectionSettingsGenerator, TreatsLargeQpChangeAsKeyFrame) { + CorruptionDetectionSettingsGenerator settings_generator( + CorruptionDetectionSettingsGenerator::RationalFunctionParameters{ + .numerator_factor = 1, .denumerator_term = 0, .offset = 1}, + CorruptionDetectionSettingsGenerator::ErrorThresholds{.luma = 2, + .chroma = 3}, + // Two frames with adjusted settings, including the keyframe. + // Adjust the error thresholds by 2. + webrtc::CorruptionDetectionSettingsGenerator::TransientParameters{ + .max_qp = 100, + .keyframe_threshold_offset = 2, + .keyframe_offset_duration_frames = 1, + .large_qp_change_threshold = 20}); + + // +2 offset due to keyframe. + EXPECT_THAT( + settings_generator.OnFrame(/*is_keyframe=*/true, /*qp=*/10), + Field(&CorruptionDetectionFilterSettings::luma_error_threshold, Eq(4))); + + // Back to normal. + EXPECT_THAT( + settings_generator.OnFrame(/*is_keyframe=*/false, /*qp=*/10), + Field(&CorruptionDetectionFilterSettings::luma_error_threshold, Eq(2))); + + // Large change in qp, treat as keyframe => add +2 offset. + EXPECT_THAT( + settings_generator.OnFrame(/*is_keyframe=*/false, /*qp=*/30), + Field(&CorruptionDetectionFilterSettings::luma_error_threshold, Eq(4))); + + // Back to normal. + EXPECT_THAT( + settings_generator.OnFrame(/*is_keyframe=*/false, /*qp=*/30), + Field(&CorruptionDetectionFilterSettings::luma_error_threshold, Eq(2))); +} + +} // namespace webrtc diff --git a/modules/video_coding/utility/decoded_frames_history.cc b/modules/video_coding/utility/decoded_frames_history.cc index 1138aa8448..6bfcf2b56b 100644 --- a/modules/video_coding/utility/decoded_frames_history.cc +++ b/modules/video_coding/utility/decoded_frames_history.cc @@ -11,6 +11,9 @@ #include "modules/video_coding/utility/decoded_frames_history.h" #include +#include +#include +#include #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -74,11 +77,11 @@ void DecodedFramesHistory::Clear() { last_frame_id_.reset(); } -absl::optional DecodedFramesHistory::GetLastDecodedFrameId() const { +std::optional DecodedFramesHistory::GetLastDecodedFrameId() const { return last_decoded_frame_; } -absl::optional DecodedFramesHistory::GetLastDecodedFrameTimestamp() +std::optional DecodedFramesHistory::GetLastDecodedFrameTimestamp() const { return last_decoded_frame_timestamp_; } diff --git a/modules/video_coding/utility/decoded_frames_history.h b/modules/video_coding/utility/decoded_frames_history.h index 9b8bf65821..f401d68bfc 100644 --- a/modules/video_coding/utility/decoded_frames_history.h +++ b/modules/video_coding/utility/decoded_frames_history.h @@ -13,11 +13,10 @@ #include -#include +#include +#include #include -#include "absl/types/optional.h" -#include "api/video/encoded_frame.h" namespace webrtc { namespace video_coding { @@ -35,16 +34,16 @@ class DecodedFramesHistory { void Clear(); - absl::optional GetLastDecodedFrameId() const; - absl::optional GetLastDecodedFrameTimestamp() const; + std::optional GetLastDecodedFrameId() const; + std::optional GetLastDecodedFrameTimestamp() const; private: int FrameIdToIndex(int64_t frame_id) const; std::vector buffer_; - absl::optional last_frame_id_; - absl::optional last_decoded_frame_; - absl::optional last_decoded_frame_timestamp_; + std::optional last_frame_id_; + std::optional last_decoded_frame_; + std::optional last_decoded_frame_timestamp_; }; } // namespace video_coding diff --git a/modules/video_coding/utility/decoded_frames_history_unittest.cc b/modules/video_coding/utility/decoded_frames_history_unittest.cc index ac09a42053..77bf902489 100644 --- a/modules/video_coding/utility/decoded_frames_history_unittest.cc +++ b/modules/video_coding/utility/decoded_frames_history_unittest.cc @@ -10,6 +10,8 @@ #include "modules/video_coding/utility/decoded_frames_history.h" +#include + #include "test/gtest.h" namespace webrtc { @@ -48,8 +50,8 @@ TEST(DecodedFramesHistory, ClearsHistory) { history.InsertDecoded(1234, 0); history.Clear(); EXPECT_EQ(history.WasDecoded(1234), false); - EXPECT_EQ(history.GetLastDecodedFrameId(), absl::nullopt); - EXPECT_EQ(history.GetLastDecodedFrameTimestamp(), absl::nullopt); + EXPECT_EQ(history.GetLastDecodedFrameId(), std::nullopt); + EXPECT_EQ(history.GetLastDecodedFrameTimestamp(), std::nullopt); } TEST(DecodedFramesHistory, HandlesBigJumpInPictureId) { @@ -74,7 +76,7 @@ TEST(DecodedFramesHistory, ForgetsTooOldHistory) { TEST(DecodedFramesHistory, ReturnsLastDecodedFrameId) { DecodedFramesHistory history(kHistorySize); - EXPECT_EQ(history.GetLastDecodedFrameId(), absl::nullopt); + EXPECT_EQ(history.GetLastDecodedFrameId(), std::nullopt); history.InsertDecoded(1234, 0); EXPECT_EQ(history.GetLastDecodedFrameId(), 1234); history.InsertDecoded(1235, 0); @@ -83,7 +85,7 @@ TEST(DecodedFramesHistory, ReturnsLastDecodedFrameId) { TEST(DecodedFramesHistory, ReturnsLastDecodedFrameTimestamp) { DecodedFramesHistory history(kHistorySize); - EXPECT_EQ(history.GetLastDecodedFrameTimestamp(), absl::nullopt); + EXPECT_EQ(history.GetLastDecodedFrameTimestamp(), std::nullopt); history.InsertDecoded(1234, 12345); EXPECT_EQ(history.GetLastDecodedFrameTimestamp(), 12345u); history.InsertDecoded(1235, 12366); diff --git a/modules/video_coding/utility/frame_dropper.cc b/modules/video_coding/utility/frame_dropper.cc index 8ea8a8e268..4392fdc644 100644 --- a/modules/video_coding/utility/frame_dropper.cc +++ b/modules/video_coding/utility/frame_dropper.cc @@ -11,6 +11,8 @@ #include "modules/video_coding/utility/frame_dropper.h" #include +#include +#include namespace webrtc { diff --git a/modules/video_coding/utility/frame_dropper.h b/modules/video_coding/utility/frame_dropper.h index b45b7fe27f..e80168398f 100644 --- a/modules/video_coding/utility/frame_dropper.h +++ b/modules/video_coding/utility/frame_dropper.h @@ -59,8 +59,8 @@ class FrameDropper { void UpdateRatio(); void CapAccumulator(); - rtc::ExpFilter key_frame_ratio_; - rtc::ExpFilter delta_frame_size_avg_kbits_; + ExpFilter key_frame_ratio_; + ExpFilter delta_frame_size_avg_kbits_; // Key frames and large delta frames are not immediately accumulated in the // bucket since they can immediately overflow the bucket leading to large @@ -81,7 +81,7 @@ class FrameDropper { float accumulator_max_; float target_bitrate_; bool drop_next_; - rtc::ExpFilter drop_ratio_; + ExpFilter drop_ratio_; int drop_count_; float incoming_frame_rate_; bool was_below_max_; diff --git a/modules/video_coding/utility/frame_dropper_unittest.cc b/modules/video_coding/utility/frame_dropper_unittest.cc index 066103a788..ac3fade782 100644 --- a/modules/video_coding/utility/frame_dropper_unittest.cc +++ b/modules/video_coding/utility/frame_dropper_unittest.cc @@ -10,6 +10,8 @@ #include "modules/video_coding/utility/frame_dropper.h" +#include + #include "test/gtest.h" namespace webrtc { diff --git a/modules/video_coding/utility/framerate_controller_deprecated.cc b/modules/video_coding/utility/framerate_controller_deprecated.cc index 5978adc3c4..56fd631fa5 100644 --- a/modules/video_coding/utility/framerate_controller_deprecated.cc +++ b/modules/video_coding/utility/framerate_controller_deprecated.cc @@ -13,6 +13,7 @@ #include #include +#include namespace webrtc { @@ -77,7 +78,7 @@ void FramerateControllerDeprecated::AddFrame(uint32_t timestamp_ms) { last_timestamp_ms_ = timestamp_ms; } -absl::optional FramerateControllerDeprecated::Rate( +std::optional FramerateControllerDeprecated::Rate( uint32_t timestamp_ms) const { return framerate_estimator_.Rate(timestamp_ms); } diff --git a/modules/video_coding/utility/framerate_controller_deprecated.h b/modules/video_coding/utility/framerate_controller_deprecated.h index ca0cbea053..6fcda3ddc9 100644 --- a/modules/video_coding/utility/framerate_controller_deprecated.h +++ b/modules/video_coding/utility/framerate_controller_deprecated.h @@ -13,7 +13,8 @@ #include -#include "absl/types/optional.h" +#include + #include "rtc_base/rate_statistics.h" namespace webrtc { @@ -34,10 +35,10 @@ class FramerateControllerDeprecated { void Reset(); private: - absl::optional Rate(uint32_t timestamp_ms) const; + std::optional Rate(uint32_t timestamp_ms) const; - absl::optional target_framerate_fps_; - absl::optional last_timestamp_ms_; + std::optional target_framerate_fps_; + std::optional last_timestamp_ms_; uint32_t min_frame_interval_ms_; RateStatistics framerate_estimator_; }; diff --git a/modules/video_coding/utility/framerate_controller_deprecated_unittest.cc b/modules/video_coding/utility/framerate_controller_deprecated_unittest.cc index eabf0529db..dc4d45b9f2 100644 --- a/modules/video_coding/utility/framerate_controller_deprecated_unittest.cc +++ b/modules/video_coding/utility/framerate_controller_deprecated_unittest.cc @@ -12,6 +12,8 @@ #include +#include + #include "test/gtest.h" namespace webrtc { diff --git a/modules/video_coding/utility/ivf_file_reader.cc b/modules/video_coding/utility/ivf_file_reader.cc index 13092b5e24..c8ef46c0fb 100644 --- a/modules/video_coding/utility/ivf_file_reader.cc +++ b/modules/video_coding/utility/ivf_file_reader.cc @@ -10,13 +10,25 @@ #include "modules/video_coding/utility/ivf_file_reader.h" +#include +#include +#include +#include +#include #include +#include #include +#include "api/scoped_refptr.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "api/video_codecs/video_codec.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/video_coding/utility/ivf_defines.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/system/file_wrapper.h" namespace webrtc { namespace { @@ -29,6 +41,7 @@ constexpr uint8_t kVp8Header[kCodecTypeBytesCount] = {'V', 'P', '8', '0'}; constexpr uint8_t kVp9Header[kCodecTypeBytesCount] = {'V', 'P', '9', '0'}; constexpr uint8_t kAv1Header[kCodecTypeBytesCount] = {'A', 'V', '0', '1'}; constexpr uint8_t kH264Header[kCodecTypeBytesCount] = {'H', '2', '6', '4'}; +constexpr uint8_t kH265Header[kCodecTypeBytesCount] = {'H', '2', '6', '5'}; // RTP standard required 90kHz clock rate. constexpr int32_t kRtpClockRateHz = 90000; @@ -67,7 +80,7 @@ bool IvfFileReader::Reset() { return false; } - absl::optional codec_type = ParseCodecType(ivf_header, 8); + std::optional codec_type = ParseCodecType(ivf_header, 8); if (!codec_type) { return false; } @@ -110,12 +123,12 @@ bool IvfFileReader::Reset() { return true; } -absl::optional IvfFileReader::NextFrame() { +std::optional IvfFileReader::NextFrame() { if (has_error_ || !HasMoreFrames()) { - return absl::nullopt; + return std::nullopt; } - rtc::scoped_refptr payload = EncodedImageBuffer::Create(); + scoped_refptr payload = EncodedImageBuffer::Create(); std::vector layer_sizes; // next_frame_header_ have to be presented by the way how it was loaded. If it // is missing it means there is a bug in error handling. @@ -138,7 +151,7 @@ absl::optional IvfFileReader::NextFrame() { RTC_LOG(LS_ERROR) << "Frame #" << num_read_frames_ << ": failed to read frame payload"; has_error_ = true; - return absl::nullopt; + return std::nullopt; } num_read_frames_++; @@ -150,13 +163,13 @@ absl::optional IvfFileReader::NextFrame() { if (!has_error_ && num_read_frames_ != num_frames_) { RTC_LOG(LS_ERROR) << "Unexpected EOF"; has_error_ = true; - return absl::nullopt; + return std::nullopt; } } EncodedImage image; image.capture_time_ms_ = current_timestamp; - image.SetTimestamp( + image.SetRtpTimestamp( static_cast(current_timestamp * kRtpClockRateHz / time_scale_)); image.SetEncodedData(payload); image.SetSpatialIndex(static_cast(layer_sizes.size()) - 1); @@ -178,8 +191,8 @@ bool IvfFileReader::Close() { return true; } -absl::optional IvfFileReader::ParseCodecType(uint8_t* buffer, - size_t start_pos) { +std::optional IvfFileReader::ParseCodecType(uint8_t* buffer, + size_t start_pos) { if (memcmp(&buffer[start_pos], kVp8Header, kCodecTypeBytesCount) == 0) { return VideoCodecType::kVideoCodecVP8; } @@ -192,16 +205,18 @@ absl::optional IvfFileReader::ParseCodecType(uint8_t* buffer, if (memcmp(&buffer[start_pos], kH264Header, kCodecTypeBytesCount) == 0) { return VideoCodecType::kVideoCodecH264; } + if (memcmp(&buffer[start_pos], kH265Header, kCodecTypeBytesCount) == 0) { + return VideoCodecType::kVideoCodecH265; + } has_error_ = true; RTC_LOG(LS_ERROR) << "Unknown codec type: " << std::string( reinterpret_cast(&buffer[start_pos]), kCodecTypeBytesCount); - return absl::nullopt; + return std::nullopt; } -absl::optional -IvfFileReader::ReadNextFrameHeader() { +std::optional IvfFileReader::ReadNextFrameHeader() { uint8_t ivf_frame_header[kIvfFrameHeaderSize] = {0}; size_t read = file_.Read(&ivf_frame_header, kIvfFrameHeaderSize); if (read != kIvfFrameHeaderSize) { @@ -210,7 +225,7 @@ IvfFileReader::ReadNextFrameHeader() { RTC_LOG(LS_ERROR) << "Frame #" << num_read_frames_ << ": failed to read IVF frame header"; } - return absl::nullopt; + return std::nullopt; } FrameHeader header; header.frame_size = static_cast( @@ -222,14 +237,14 @@ IvfFileReader::ReadNextFrameHeader() { has_error_ = true; RTC_LOG(LS_ERROR) << "Frame #" << num_read_frames_ << ": invalid frame size"; - return absl::nullopt; + return std::nullopt; } if (header.timestamp < 0) { has_error_ = true; RTC_LOG(LS_ERROR) << "Frame #" << num_read_frames_ << ": negative timestamp"; - return absl::nullopt; + return std::nullopt; } return header; diff --git a/modules/video_coding/utility/ivf_file_reader.h b/modules/video_coding/utility/ivf_file_reader.h index db4fc25575..a7752364c6 100644 --- a/modules/video_coding/utility/ivf_file_reader.h +++ b/modules/video_coding/utility/ivf_file_reader.h @@ -11,12 +11,14 @@ #ifndef MODULES_VIDEO_CODING_UTILITY_IVF_FILE_READER_H_ #define MODULES_VIDEO_CODING_UTILITY_IVF_FILE_READER_H_ +#include +#include #include +#include #include -#include "absl/types/optional.h" #include "api/video/encoded_image.h" -#include "api/video_codecs/video_codec.h" +#include "api/video/video_codec_type.h" #include "rtc_base/system/file_wrapper.h" namespace webrtc { @@ -39,9 +41,9 @@ class IvfFileReader { // Returns count of frames in this file. size_t GetFramesCount() const { return num_frames_; } - // Returns next frame or absl::nullopt if any error acquired. Always returns - // absl::nullopt after first error was spotted. - absl::optional NextFrame(); + // Returns next frame or std::nullopt if any error acquired. Always returns + // std::nullopt after first error was spotted. + std::optional NextFrame(); bool HasMoreFrames() const { return num_read_frames_ < num_frames_; } bool HasError() const { return has_error_; } @@ -61,9 +63,9 @@ class IvfFileReader { // Parses codec type from specified position of the buffer. Codec type // contains kCodecTypeBytesCount bytes and caller has to ensure that buffer // won't overflow. - absl::optional ParseCodecType(uint8_t* buffer, - size_t start_pos); - absl::optional ReadNextFrameHeader(); + std::optional ParseCodecType(uint8_t* buffer, + size_t start_pos); + std::optional ReadNextFrameHeader(); VideoCodecType codec_type_; size_t num_frames_; @@ -73,7 +75,7 @@ class IvfFileReader { uint32_t time_scale_; FileWrapper file_; - absl::optional next_frame_header_; + std::optional next_frame_header_; bool has_error_; }; diff --git a/modules/video_coding/utility/ivf_file_reader_unittest.cc b/modules/video_coding/utility/ivf_file_reader_unittest.cc index 0e20b7f77c..bfd2b6140f 100644 --- a/modules/video_coding/utility/ivf_file_reader_unittest.cc +++ b/modules/video_coding/utility/ivf_file_reader_unittest.cc @@ -10,10 +10,18 @@ #include "modules/video_coding/utility/ivf_file_reader.h" +#include +#include +#include #include +#include #include +#include "api/scoped_refptr.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" #include "modules/video_coding/utility/ivf_file_writer.h" +#include "rtc_base/system/file_wrapper.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -44,7 +52,7 @@ class IvfFileReaderTest : public ::testing::Test { int spatial_layers_count) { EncodedImage frame; frame.SetSpatialIndex(spatial_layers_count); - rtc::scoped_refptr payload = EncodedImageBuffer::Create( + scoped_refptr payload = EncodedImageBuffer::Create( sizeof(kDummyPayload) * spatial_layers_count); for (int i = 0; i < spatial_layers_count; ++i) { memcpy(&payload->data()[i * sizeof(kDummyPayload)], kDummyPayload, @@ -58,7 +66,7 @@ class IvfFileReaderTest : public ::testing::Test { if (use_capture_tims_ms) { frame.capture_time_ms_ = i; } else { - frame.SetTimestamp(i); + frame.SetRtpTimestamp(i); } if (!file_writer->WriteFrame(frame, codec_type)) return false; @@ -78,7 +86,7 @@ class IvfFileReaderTest : public ::testing::Test { ASSERT_TRUE(file_writer->Close()); } - void ValidateFrame(absl::optional frame, + void ValidateFrame(std::optional frame, int frame_index, bool use_capture_tims_ms, int spatial_layers_count) { @@ -86,9 +94,9 @@ class IvfFileReaderTest : public ::testing::Test { EXPECT_EQ(frame->SpatialIndex(), spatial_layers_count - 1); if (use_capture_tims_ms) { EXPECT_EQ(frame->capture_time_ms_, static_cast(frame_index)); - EXPECT_EQ(frame->Timestamp(), static_cast(90 * frame_index)); + EXPECT_EQ(frame->RtpTimestamp(), static_cast(90 * frame_index)); } else { - EXPECT_EQ(frame->Timestamp(), static_cast(frame_index)); + EXPECT_EQ(frame->RtpTimestamp(), static_cast(frame_index)); } ASSERT_EQ(frame->size(), sizeof(kDummyPayload) * spatial_layers_count); for (int i = 0; i < spatial_layers_count; ++i) { diff --git a/modules/video_coding/utility/ivf_file_writer.cc b/modules/video_coding/utility/ivf_file_writer.cc index 01025986d6..422e4585a8 100644 --- a/modules/video_coding/utility/ivf_file_writer.cc +++ b/modules/video_coding/utility/ivf_file_writer.cc @@ -10,13 +10,20 @@ #include "modules/video_coding/utility/ivf_file_writer.h" +#include +#include +#include #include +#include "absl/strings/string_view.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" #include "api/video_codecs/video_codec.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/video_coding/utility/ivf_defines.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/system/file_wrapper.h" // TODO(palmkvist): make logging more informative in the absence of a file name // (or get one) @@ -53,6 +60,12 @@ std::unique_ptr IvfFileWriter::Wrap(FileWrapper file, new IvfFileWriter(std::move(file), byte_limit)); } +std::unique_ptr IvfFileWriter::Wrap(absl::string_view filename, + size_t byte_limit) { + return std::unique_ptr( + new IvfFileWriter(FileWrapper::OpenWriteOnly(filename), byte_limit)); +} + bool IvfFileWriter::WriteHeader() { if (!file_.Rewind()) { RTC_LOG(LS_WARNING) << "Unable to rewind ivf output file."; @@ -92,6 +105,12 @@ bool IvfFileWriter::WriteHeader() { ivf_header[10] = '6'; ivf_header[11] = '4'; break; + case kVideoCodecH265: + ivf_header[8] = 'H'; + ivf_header[9] = '2'; + ivf_header[10] = '6'; + ivf_header[11] = '5'; + break; default: // For unknown codec type use **** code. You can specify actual payload // format when playing the video with ffplay: ffplay -f H263 file.ivf @@ -135,7 +154,7 @@ bool IvfFileWriter::InitFromFirstFrame(const EncodedImage& encoded_image, height_ = encoded_image._encodedHeight; } - using_capture_timestamps_ = encoded_image.Timestamp() == 0; + using_capture_timestamps_ = encoded_image.RtpTimestamp() == 0; codec_type_ = codec_type; @@ -162,7 +181,7 @@ bool IvfFileWriter::WriteFrame(const EncodedImage& encoded_image, int64_t timestamp = using_capture_timestamps_ ? encoded_image.capture_time_ms_ - : wrap_handler_.Unwrap(encoded_image.Timestamp()); + : wrap_handler_.Unwrap(encoded_image.RtpTimestamp()); if (last_timestamp_ != -1 && timestamp < last_timestamp_) { RTC_LOG(LS_WARNING) << "Timestamp not increasing: " << last_timestamp_ << " -> " << timestamp; diff --git a/modules/video_coding/utility/ivf_file_writer.h b/modules/video_coding/utility/ivf_file_writer.h index ec8a7bf9e1..c1c088690b 100644 --- a/modules/video_coding/utility/ivf_file_writer.h +++ b/modules/video_coding/utility/ivf_file_writer.h @@ -16,6 +16,7 @@ #include +#include "absl/strings/string_view.h" #include "api/video/encoded_image.h" #include "api/video/video_codec_type.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" @@ -31,6 +32,8 @@ class IvfFileWriter { // will fail. A `byte_limit` of 0 is equivalent to no limit. static std::unique_ptr Wrap(FileWrapper file, size_t byte_limit); + static std::unique_ptr Wrap(absl::string_view filename, + size_t byte_limit); ~IvfFileWriter(); IvfFileWriter(const IvfFileWriter&) = delete; diff --git a/modules/video_coding/utility/ivf_file_writer_unittest.cc b/modules/video_coding/utility/ivf_file_writer_unittest.cc index c5d30a1286..6be8a6c228 100644 --- a/modules/video_coding/utility/ivf_file_writer_unittest.cc +++ b/modules/video_coding/utility/ivf_file_writer_unittest.cc @@ -12,10 +12,14 @@ #include +#include #include #include +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "rtc_base/system/file_wrapper.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" @@ -54,7 +58,7 @@ class IvfFileWriterTest : public ::testing::Test { if (use_capture_tims_ms) { frame.capture_time_ms_ = i; } else { - frame.SetTimestamp(i); + frame.SetRtpTimestamp(i); } if (!file_writer_->WriteFrame(frame, codec_type)) return false; diff --git a/modules/video_coding/utility/qp_parser.cc b/modules/video_coding/utility/qp_parser.cc index 18f225447d..491bae8e56 100644 --- a/modules/video_coding/utility/qp_parser.cc +++ b/modules/video_coding/utility/qp_parser.cc @@ -10,18 +10,26 @@ #include "modules/video_coding/utility/qp_parser.h" +#include +#include +#include + +#include "api/array_view.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" #include "modules/video_coding/utility/vp8_header_parser.h" #include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { -absl::optional QpParser::Parse(VideoCodecType codec_type, - size_t spatial_idx, - const uint8_t* frame_data, - size_t frame_size) { +std::optional QpParser::Parse(VideoCodecType codec_type, + size_t spatial_idx, + const uint8_t* frame_data, + size_t frame_size) { if (frame_data == nullptr || frame_size == 0 || spatial_idx >= kMaxSimulcastStreams) { - return absl::nullopt; + return std::nullopt; } if (codec_type == kVideoCodecVP8) { @@ -36,18 +44,32 @@ absl::optional QpParser::Parse(VideoCodecType codec_type, } } else if (codec_type == kVideoCodecH264) { return h264_parsers_[spatial_idx].Parse(frame_data, frame_size); + } else if (codec_type == kVideoCodecH265) { + // H.265 bitstream parser is conditionally built. +#ifdef RTC_ENABLE_H265 + return h265_parsers_[spatial_idx].Parse(frame_data, frame_size); +#endif } - return absl::nullopt; + return std::nullopt; +} + +std::optional QpParser::H264QpParser::Parse(const uint8_t* frame_data, + size_t frame_size) { + MutexLock lock(&mutex_); + bitstream_parser_.ParseBitstream( + ArrayView(frame_data, frame_size)); + return bitstream_parser_.GetLastSliceQp(); } -absl::optional QpParser::H264QpParser::Parse( - const uint8_t* frame_data, - size_t frame_size) { +#ifdef RTC_ENABLE_H265 +std::optional QpParser::H265QpParser::Parse(const uint8_t* frame_data, + size_t frame_size) { MutexLock lock(&mutex_); bitstream_parser_.ParseBitstream( - rtc::ArrayView(frame_data, frame_size)); + ArrayView(frame_data, frame_size)); return bitstream_parser_.GetLastSliceQp(); } +#endif } // namespace webrtc diff --git a/modules/video_coding/utility/qp_parser.h b/modules/video_coding/utility/qp_parser.h index f132ff9337..48e19e988b 100644 --- a/modules/video_coding/utility/qp_parser.h +++ b/modules/video_coding/utility/qp_parser.h @@ -11,26 +11,33 @@ #ifndef MODULES_VIDEO_CODING_UTILITY_QP_PARSER_H_ #define MODULES_VIDEO_CODING_UTILITY_QP_PARSER_H_ -#include "absl/types/optional.h" +#include +#include +#include + #include "api/video/video_codec_constants.h" #include "api/video/video_codec_type.h" #include "common_video/h264/h264_bitstream_parser.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" + +#ifdef RTC_ENABLE_H265 +#include "common_video/h265/h265_bitstream_parser.h" +#endif namespace webrtc { class QpParser { public: - absl::optional Parse(VideoCodecType codec_type, - size_t spatial_idx, - const uint8_t* frame_data, - size_t frame_size); + std::optional Parse(VideoCodecType codec_type, + size_t spatial_idx, + const uint8_t* frame_data, + size_t frame_size); private: // A thread safe wrapper for H264 bitstream parser. class H264QpParser { public: - absl::optional Parse(const uint8_t* frame_data, - size_t frame_size); + std::optional Parse(const uint8_t* frame_data, size_t frame_size); private: Mutex mutex_; @@ -38,6 +45,20 @@ class QpParser { }; H264QpParser h264_parsers_[kMaxSimulcastStreams]; + +#ifdef RTC_ENABLE_H265 + // A thread safe wrapper for H.265 bitstream parser. + class H265QpParser { + public: + std::optional Parse(const uint8_t* frame_data, size_t frame_size); + + private: + Mutex mutex_; + H265BitstreamParser bitstream_parser_ RTC_GUARDED_BY(mutex_); + }; + + H265QpParser h265_parsers_[kMaxSimulcastStreams]; +#endif }; } // namespace webrtc diff --git a/modules/video_coding/utility/qp_parser_unittest.cc b/modules/video_coding/utility/qp_parser_unittest.cc index 1131288f26..a7fe387015 100644 --- a/modules/video_coding/utility/qp_parser_unittest.cc +++ b/modules/video_coding/utility/qp_parser_unittest.cc @@ -12,6 +12,11 @@ #include +#include +#include + +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" #include "test/gtest.h" namespace webrtc { @@ -55,21 +60,21 @@ const uint8_t kCodedFrameH264InterSliceQpDelta0[] = {0x00, 0x00, 0x00, 0x01, TEST(QpParserTest, ParseQpVp8) { QpParser parser; - absl::optional qp = parser.Parse( + std::optional qp = parser.Parse( kVideoCodecVP8, 0, kCodedFrameVp8Qp25, sizeof(kCodedFrameVp8Qp25)); EXPECT_EQ(qp, 25u); } TEST(QpParserTest, ParseQpVp9) { QpParser parser; - absl::optional qp = parser.Parse( + std::optional qp = parser.Parse( kVideoCodecVP9, 0, kCodedFrameVp9Qp96, sizeof(kCodedFrameVp9Qp96)); EXPECT_EQ(qp, 96u); } TEST(QpParserTest, ParseQpH264) { QpParser parser; - absl::optional qp = parser.Parse( + std::optional qp = parser.Parse( VideoCodecType::kVideoCodecH264, 0, kCodedFrameH264SpsPpsIdrQp38, sizeof(kCodedFrameH264SpsPpsIdrQp38)); EXPECT_EQ(qp, 38u); @@ -89,27 +94,27 @@ TEST(QpParserTest, ParseQpH264) { TEST(QpParserTest, ParseQpUnsupportedCodecType) { QpParser parser; - absl::optional qp = parser.Parse( + std::optional qp = parser.Parse( kVideoCodecGeneric, 0, kCodedFrameVp8Qp25, sizeof(kCodedFrameVp8Qp25)); EXPECT_FALSE(qp.has_value()); } TEST(QpParserTest, ParseQpNullData) { QpParser parser; - absl::optional qp = parser.Parse(kVideoCodecVP8, 0, nullptr, 100); + std::optional qp = parser.Parse(kVideoCodecVP8, 0, nullptr, 100); EXPECT_FALSE(qp.has_value()); } TEST(QpParserTest, ParseQpEmptyData) { QpParser parser; - absl::optional qp = + std::optional qp = parser.Parse(kVideoCodecVP8, 0, kCodedFrameVp8Qp25, 0); EXPECT_FALSE(qp.has_value()); } TEST(QpParserTest, ParseQpSpatialIdxExceedsMax) { QpParser parser; - absl::optional qp = + std::optional qp = parser.Parse(kVideoCodecVP8, kMaxSimulcastStreams, kCodedFrameVp8Qp25, sizeof(kCodedFrameVp8Qp25)); EXPECT_FALSE(qp.has_value()); diff --git a/modules/video_coding/utility/quality_scaler.cc b/modules/video_coding/utility/quality_scaler.cc index 9fb41a0ad7..a5fc7041a9 100644 --- a/modules/video_coding/utility/quality_scaler.cc +++ b/modules/video_coding/utility/quality_scaler.cc @@ -10,13 +10,19 @@ #include "modules/video_coding/utility/quality_scaler.h" +#include +#include #include -#include +#include +#include "api/field_trials_view.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" -#include "api/video/video_adaptation_reason.h" +#include "api/video_codecs/video_encoder.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/quality_scaler_settings.h" +#include "rtc_base/experiments/quality_scaling_experiment.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/weak_ptr.h" @@ -41,10 +47,10 @@ class QualityScaler::QpSmoother { last_sample_ms_(0), smoother_(alpha) {} - absl::optional GetAvg() const { + std::optional GetAvg() const { float value = smoother_.filtered(); - if (value == rtc::ExpFilter::kValueUndefined) { - return absl::nullopt; + if (value == ExpFilter::kValueUndefined) { + return std::nullopt; } return static_cast(value); } @@ -60,7 +66,7 @@ class QualityScaler::QpSmoother { private: const float alpha_; int64_t last_sample_ms_; - rtc::ExpFilter smoother_; + ExpFilter smoother_; }; // The QualityScaler checks for QP periodically by queuing CheckQpTasks. The @@ -171,41 +177,42 @@ class QualityScaler::CheckQpTask { const Result previous_task_result_; Result result_; - rtc::WeakPtrFactory weak_ptr_factory_; + WeakPtrFactory weak_ptr_factory_; }; QualityScaler::QualityScaler(QualityScalerQpUsageHandlerInterface* handler, - VideoEncoder::QpThresholds thresholds) - : QualityScaler(handler, thresholds, kMeasureMs) {} + VideoEncoder::QpThresholds thresholds, + const FieldTrialsView& field_trials) + : QualityScaler(handler, thresholds, field_trials, kMeasureMs) {} // Protected ctor, should not be called directly. QualityScaler::QualityScaler(QualityScalerQpUsageHandlerInterface* handler, VideoEncoder::QpThresholds thresholds, + const FieldTrialsView& field_trials, int64_t default_sampling_period_ms) : handler_(handler), thresholds_(thresholds), - sampling_period_ms_(QualityScalerSettings::ParseFromFieldTrials() + sampling_period_ms_(QualityScalerSettings(field_trials) .SamplingPeriodMs() .value_or(default_sampling_period_ms)), fast_rampup_(true), // Arbitrarily choose size based on 30 fps for 5 seconds. - average_qp_(QualityScalerSettings::ParseFromFieldTrials() + average_qp_(QualityScalerSettings(field_trials) .AverageQpWindow() .value_or(5 * 30)), framedrop_percent_media_opt_(5 * 30), framedrop_percent_all_(5 * 30), - experiment_enabled_(QualityScalingExperiment::Enabled()), - min_frames_needed_( - QualityScalerSettings::ParseFromFieldTrials().MinFrames().value_or( - kMinFramesNeededToScale)), - initial_scale_factor_(QualityScalerSettings::ParseFromFieldTrials() + experiment_enabled_(QualityScalingExperiment::Enabled(field_trials)), + min_frames_needed_(QualityScalerSettings(field_trials) + .MinFrames() + .value_or(kMinFramesNeededToScale)), + initial_scale_factor_(QualityScalerSettings(field_trials) .InitialScaleFactor() .value_or(kSamplePeriodScaleFactor)), - scale_factor_( - QualityScalerSettings::ParseFromFieldTrials().ScaleFactor()) { + scale_factor_(QualityScalerSettings(field_trials).ScaleFactor()) { RTC_DCHECK_RUN_ON(&task_checker_); if (experiment_enabled_) { - config_ = QualityScalingExperiment::GetConfig(); + config_ = QualityScalingExperiment::GetConfig(field_trials); qp_smoother_high_.reset(new QpSmoother(config_.alpha_high)); qp_smoother_low_.reset(new QpSmoother(config_.alpha_low)); } @@ -258,21 +265,6 @@ void QualityScaler::ReportQp(int qp, int64_t time_sent_us) { qp_smoother_low_->Add(qp, time_sent_us); } -bool QualityScaler::QpFastFilterLow() const { - RTC_DCHECK_RUN_ON(&task_checker_); - size_t num_frames = config_.use_all_drop_reasons - ? framedrop_percent_all_.Size() - : framedrop_percent_media_opt_.Size(); - const size_t kMinNumFrames = 10; - if (num_frames < kMinNumFrames) { - return false; // Wait for more frames before making a decision. - } - absl::optional avg_qp_high = qp_smoother_high_ - ? qp_smoother_high_->GetAvg() - : average_qp_.GetAverageRoundedDown(); - return (avg_qp_high) ? (avg_qp_high.value() <= thresholds_.low) : false; -} - QualityScaler::CheckQpResult QualityScaler::CheckQp() const { RTC_DCHECK_RUN_ON(&task_checker_); // Should be set through InitEncode -> Should be set by now. @@ -288,7 +280,7 @@ QualityScaler::CheckQpResult QualityScaler::CheckQp() const { } // Check if we should scale down due to high frame drop. - const absl::optional drop_rate = + const std::optional drop_rate = config_.use_all_drop_reasons ? framedrop_percent_all_.GetAverageRoundedDown() : framedrop_percent_media_opt_.GetAverageRoundedDown(); @@ -298,10 +290,10 @@ QualityScaler::CheckQpResult QualityScaler::CheckQp() const { } // Check if we should scale up or down based on QP. - const absl::optional avg_qp_high = + const std::optional avg_qp_high = qp_smoother_high_ ? qp_smoother_high_->GetAvg() : average_qp_.GetAverageRoundedDown(); - const absl::optional avg_qp_low = + const std::optional avg_qp_low = qp_smoother_low_ ? qp_smoother_low_->GetAvg() : average_qp_.GetAverageRoundedDown(); if (avg_qp_high && avg_qp_low) { diff --git a/modules/video_coding/utility/quality_scaler.h b/modules/video_coding/utility/quality_scaler.h index 93014e36a7..2d0e7f9231 100644 --- a/modules/video_coding/utility/quality_scaler.h +++ b/modules/video_coding/utility/quality_scaler.h @@ -15,15 +15,15 @@ #include #include +#include -#include "absl/types/optional.h" -#include "api/scoped_refptr.h" +#include "api/field_trials_view.h" #include "api/sequence_checker.h" #include "api/video_codecs/video_encoder.h" #include "rtc_base/experiments/quality_scaling_experiment.h" #include "rtc_base/numerics/moving_average.h" -#include "rtc_base/ref_count.h" #include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -40,7 +40,8 @@ class QualityScaler { // This starts the quality scaler periodically checking what the average QP // has been recently. QualityScaler(QualityScalerQpUsageHandlerInterface* handler, - VideoEncoder::QpThresholds thresholds); + VideoEncoder::QpThresholds thresholds, + const FieldTrialsView& field_trials); virtual ~QualityScaler(); // Should be called each time a frame is dropped at encoding. void ReportDroppedFrameByMediaOpt(); @@ -49,12 +50,12 @@ class QualityScaler { void ReportQp(int qp, int64_t time_sent_us); void SetQpThresholds(VideoEncoder::QpThresholds thresholds); - bool QpFastFilterLow() const; // The following members declared protected for testing purposes. protected: QualityScaler(QualityScalerQpUsageHandlerInterface* handler, VideoEncoder::QpThresholds thresholds, + const FieldTrialsView& field_trials, int64_t sampling_period_ms); private: @@ -86,10 +87,9 @@ class QualityScaler { VideoEncoder::QpThresholds thresholds_ RTC_GUARDED_BY(&task_checker_); const int64_t sampling_period_ms_; bool fast_rampup_ RTC_GUARDED_BY(&task_checker_); - rtc::MovingAverage average_qp_ RTC_GUARDED_BY(&task_checker_); - rtc::MovingAverage framedrop_percent_media_opt_ - RTC_GUARDED_BY(&task_checker_); - rtc::MovingAverage framedrop_percent_all_ RTC_GUARDED_BY(&task_checker_); + MovingAverage average_qp_ RTC_GUARDED_BY(&task_checker_); + MovingAverage framedrop_percent_media_opt_ RTC_GUARDED_BY(&task_checker_); + MovingAverage framedrop_percent_all_ RTC_GUARDED_BY(&task_checker_); // Used by QualityScalingExperiment. const bool experiment_enabled_; @@ -99,7 +99,7 @@ class QualityScaler { const size_t min_frames_needed_; const double initial_scale_factor_; - const absl::optional scale_factor_; + const std::optional scale_factor_; }; // Reacts to QP being too high or too low. For best quality, when QP is high it diff --git a/modules/video_coding/utility/quality_scaler_unittest.cc b/modules/video_coding/utility/quality_scaler_unittest.cc index 50410dd25b..6febf0d93d 100644 --- a/modules/video_coding/utility/quality_scaler_unittest.cc +++ b/modules/video_coding/utility/quality_scaler_unittest.cc @@ -13,12 +13,13 @@ #include #include +#include "api/field_trials_view.h" #include "api/units/time_delta.h" -#include "rtc_base/checks.h" +#include "api/video_codecs/video_encoder.h" #include "rtc_base/event.h" #include "rtc_base/task_queue_for_test.h" -#include "test/field_trial.h" #include "test/gtest.h" +#include "test/scoped_key_value_config.h" namespace webrtc { namespace { @@ -44,7 +45,7 @@ class FakeQpUsageHandler : public QualityScalerQpUsageHandlerInterface { event.Set(); } - rtc::Event event; + Event event; int adapt_up_events_ = 0; int adapt_down_events_ = 0; }; @@ -53,8 +54,9 @@ class FakeQpUsageHandler : public QualityScalerQpUsageHandlerInterface { class QualityScalerUnderTest : public QualityScaler { public: explicit QualityScalerUnderTest(QualityScalerQpUsageHandlerInterface* handler, - VideoEncoder::QpThresholds thresholds) - : QualityScaler(handler, thresholds, 5) {} + VideoEncoder::QpThresholds thresholds, + const FieldTrialsView& field_trials) + : QualityScaler(handler, thresholds, field_trials, 5) {} }; class QualityScalerTest : public ::testing::Test, @@ -74,7 +76,8 @@ class QualityScalerTest : public ::testing::Test, handler_(std::make_unique()) { task_queue_.SendTask([this] { qs_ = std::unique_ptr(new QualityScalerUnderTest( - handler_.get(), VideoEncoder::QpThresholds(kLowQp, kHighQp))); + handler_.get(), VideoEncoder::QpThresholds(kLowQp, kHighQp), + scoped_field_trial_)); }); } @@ -104,7 +107,7 @@ class QualityScalerTest : public ::testing::Test, } } - test::ScopedFieldTrials scoped_field_trial_; + test::ScopedKeyValueConfig scoped_field_trial_; TaskQueueForTest task_queue_; std::unique_ptr qs_; std::unique_ptr handler_; diff --git a/modules/video_coding/utility/simulcast_rate_allocator.cc b/modules/video_coding/utility/simulcast_rate_allocator.cc index 1496934e1c..1157d1a76f 100644 --- a/modules/video_coding/utility/simulcast_rate_allocator.cc +++ b/modules/video_coding/utility/simulcast_rate_allocator.cc @@ -13,16 +13,21 @@ #include #include -#include #include #include -#include #include #include +#include "api/environment/environment.h" +#include "api/units/data_rate.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/simulcast_stream.h" +#include "api/video_codecs/video_codec.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/rate_control_settings.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { @@ -58,10 +63,11 @@ float SimulcastRateAllocator::GetTemporalRateAllocation( return kLayerRateAllocation[num_layers - 1][temporal_id]; } -SimulcastRateAllocator::SimulcastRateAllocator(const VideoCodec& codec) +SimulcastRateAllocator::SimulcastRateAllocator(const Environment& env, + const VideoCodec& codec) : codec_(codec), - stable_rate_settings_(StableTargetRateExperiment::ParseFromFieldTrials()), - rate_control_settings_(RateControlSettings::ParseFromFieldTrials()), + stable_rate_settings_(env.field_trials()), + rate_control_settings_(env.field_trials()), legacy_conference_mode_(false) {} SimulcastRateAllocator::~SimulcastRateAllocator() = default; @@ -277,7 +283,7 @@ void SimulcastRateAllocator::DistributeAllocationToTemporalLayers( std::vector SimulcastRateAllocator::DefaultTemporalLayerAllocation( int bitrate_kbps, - int max_bitrate_kbps, + int /* max_bitrate_kbps */, int simulcast_id) const { const size_t num_temporal_layers = NumTemporalStreams(simulcast_id); std::vector bitrates; diff --git a/modules/video_coding/utility/simulcast_rate_allocator.h b/modules/video_coding/utility/simulcast_rate_allocator.h index 6f93dbde74..34da59ef31 100644 --- a/modules/video_coding/utility/simulcast_rate_allocator.h +++ b/modules/video_coding/utility/simulcast_rate_allocator.h @@ -16,6 +16,8 @@ #include +#include "api/environment/environment.h" +#include "api/units/data_rate.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocator.h" #include "api/video_codecs/video_codec.h" @@ -26,7 +28,7 @@ namespace webrtc { class SimulcastRateAllocator : public VideoBitrateAllocator { public: - explicit SimulcastRateAllocator(const VideoCodec& codec); + SimulcastRateAllocator(const Environment& env, const VideoCodec& codec); ~SimulcastRateAllocator() override; SimulcastRateAllocator(const SimulcastRateAllocator&) = delete; diff --git a/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc b/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc index 24d7c58bcd..dc15e7d6a2 100644 --- a/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc +++ b/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc @@ -10,21 +10,29 @@ #include "modules/video_coding/utility/simulcast_rate_allocator.h" +#include +#include #include #include -#include #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/units/data_rate.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/vp8_frame_buffer_controller.h" #include "api/video_codecs/vp8_frame_config.h" -#include "api/video_codecs/vp8_temporal_layers.h" #include "rtc_base/checks.h" -#include "test/field_trial.h" +#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" namespace webrtc { namespace { +using test::ExplicitKeyValueConfig; using ::testing::_; constexpr uint32_t kFramerateFps = 5; @@ -90,9 +98,8 @@ class SimulcastRateAllocatorTest : public ::testing::TestWithParam { EXPECT_EQ(sum, actual.get_sum_bps()); } - void CreateAllocator(bool legacy_conference_mode = false) { - allocator_.reset(new SimulcastRateAllocator(codec_)); - allocator_->SetLegacyConferenceMode(legacy_conference_mode); + void CreateAllocator(Environment env = CreateEnvironment()) { + allocator_ = std::make_unique(env, codec_); } void SetupCodec3SL3TL(const std::vector& active_streams) { @@ -298,11 +305,11 @@ TEST_F(SimulcastRateAllocatorTest, Regular3TLTemporalRateAllocation) { } TEST_F(SimulcastRateAllocatorTest, BaseHeavy3TLTemporalRateAllocation) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-UseBaseHeavyVP8TL3RateAllocation/Enabled/"); SetupCodec3SL3TL({true, true, true}); - CreateAllocator(); + CreateAllocator(CreateEnvironment(&field_trials)); const VideoBitrateAllocation alloc = GetAllocation(kMinBitrateKbps); // 60/20/20. @@ -583,13 +590,13 @@ TEST_F(SimulcastRateAllocatorTest, NonConferenceModeScreenshare) { } TEST_F(SimulcastRateAllocatorTest, StableRate) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-StableTargetRate/" "enabled:true," "video_hysteresis_factor:1.1/"); SetupCodec3SL3TL({true, true, true}); - CreateAllocator(); + CreateAllocator(CreateEnvironment(&field_trials)); // Let the volatile rate always be be enough for all streams, in this test we // are only interested in how the stable rate affects enablement. @@ -685,7 +692,8 @@ INSTANTIATE_TEST_SUITE_P(ScreenshareTest, TEST_P(ScreenshareRateAllocationTest, ConferenceBitrateBelowTl0) { SetupConferenceScreenshare(GetParam()); - CreateAllocator(true); + CreateAllocator(); + allocator_->SetLegacyConferenceMode(true); VideoBitrateAllocation allocation = allocator_->Allocate(VideoBitrateAllocationParameters( @@ -700,7 +708,8 @@ TEST_P(ScreenshareRateAllocationTest, ConferenceBitrateBelowTl0) { TEST_P(ScreenshareRateAllocationTest, ConferenceBitrateAboveTl0) { SetupConferenceScreenshare(GetParam()); - CreateAllocator(true); + CreateAllocator(); + allocator_->SetLegacyConferenceMode(true); uint32_t target_bitrate_kbps = (kLegacyScreenshareTargetBitrateKbps + kLegacyScreenshareMaxBitrateKbps) / @@ -721,7 +730,8 @@ TEST_P(ScreenshareRateAllocationTest, ConferenceBitrateAboveTl0) { TEST_F(ScreenshareRateAllocationTest, ConferenceBitrateAboveTl1) { // This test is only for the non-simulcast case. SetupConferenceScreenshare(false); - CreateAllocator(true); + CreateAllocator(); + allocator_->SetLegacyConferenceMode(true); VideoBitrateAllocation allocation = allocator_->Allocate(VideoBitrateAllocationParameters( diff --git a/modules/video_coding/utility/simulcast_test_fixture_impl.cc b/modules/video_coding/utility/simulcast_test_fixture_impl.cc index 338835ebb9..a8a3e80fdd 100644 --- a/modules/video_coding/utility/simulcast_test_fixture_impl.cc +++ b/modules/video_coding/utility/simulcast_test_fixture_impl.cc @@ -11,17 +11,37 @@ #include "modules/video_coding/utility/simulcast_test_fixture_impl.h" #include -#include +#include +#include +#include #include +#include #include +#include "api/environment/environment_factory.h" +#include "api/scoped_refptr.h" +#include "api/test/mock_video_decoder.h" +#include "api/test/mock_video_encoder.h" #include "api/video/encoded_image.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/simulcast_stream.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder.h" +#include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder.h" -#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "api/video_codecs/video_encoder_factory.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_coding_defines.h" +#include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "rtc_base/checks.h" +#include "test/gmock.h" #include "test/gtest.h" using ::testing::_; @@ -101,7 +121,7 @@ class SimulcastTestFixtureImpl::TestEncodedImageCallback temporal_layer_[encoded_image.SimulcastIndex().value_or(0)] = codec_specific_info->codecSpecific.H264.temporal_idx; } - return Result(Result::OK, encoded_image.Timestamp()); + return Result(Result::OK, encoded_image.RtpTimestamp()); } // This method only makes sense for VP8. void GetLastEncodedFrameInfo(int* temporal_layer, @@ -129,7 +149,7 @@ class SimulcastTestFixtureImpl::TestDecodedImageCallback public: TestDecodedImageCallback() : decoded_frames_(0) {} int32_t Decoded(VideoFrame& decoded_image) override { - rtc::scoped_refptr i420_buffer = + scoped_refptr i420_buffer = decoded_image.video_frame_buffer()->ToI420(); for (int i = 0; i < decoded_image.width(); ++i) { EXPECT_NEAR(kColorY, i420_buffer->DataY()[i], 1); @@ -143,13 +163,14 @@ class SimulcastTestFixtureImpl::TestDecodedImageCallback decoded_frames_++; return 0; } - int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override { + int32_t Decoded(VideoFrame& /* decoded_image */, + int64_t /* decode_time_ms */) override { RTC_DCHECK_NOTREACHED(); return -1; } void Decoded(VideoFrame& decoded_image, - absl::optional decode_time_ms, - absl::optional qp) override { + std::optional /* decode_time_ms */, + std::optional /* qp */) override { Decoded(decoded_image); } int DecodedFrames() { return decoded_frames_; } @@ -171,7 +192,7 @@ void SetPlane(uint8_t* data, uint8_t value, int width, int height, int stride) { } // Fills in an I420Buffer from `plane_colors`. -void CreateImage(const rtc::scoped_refptr& buffer, +void CreateImage(const scoped_refptr& buffer, int plane_colors[kNumOfPlanes]) { SetPlane(buffer->MutableDataY(), plane_colors[0], buffer->width(), buffer->height(), buffer->StrideY()); @@ -244,12 +265,19 @@ void SimulcastTestFixtureImpl::DefaultSettings( &settings->simulcastStream[layer_order[2]], temporal_layer_profile[2]); settings->SetFrameDropEnabled(true); - if (codec_type == kVideoCodecVP8) { - settings->VP8()->denoisingOn = true; - settings->VP8()->automaticResizeOn = false; - settings->VP8()->keyFrameInterval = 3000; - } else { - settings->H264()->keyFrameInterval = 3000; + switch (codec_type) { + case kVideoCodecVP8: + settings->VP8()->denoisingOn = true; + settings->VP8()->automaticResizeOn = false; + settings->VP8()->keyFrameInterval = 3000; + break; + case kVideoCodecH264: + settings->H264()->keyFrameInterval = 3000; + break; + case kVideoCodecVP9: + break; + default: + RTC_CHECK_NOTREACHED(); } } @@ -257,9 +285,10 @@ SimulcastTestFixtureImpl::SimulcastTestFixtureImpl( std::unique_ptr encoder_factory, std::unique_ptr decoder_factory, SdpVideoFormat video_format) - : codec_type_(PayloadStringToCodecType(video_format.name)) { - encoder_ = encoder_factory->CreateVideoEncoder(video_format); - decoder_ = decoder_factory->CreateVideoDecoder(video_format); + : env_(CreateEnvironment()), + codec_type_(PayloadStringToCodecType(video_format.name)) { + encoder_ = encoder_factory->Create(env_, video_format); + decoder_ = decoder_factory->Create(env_, video_format); SetUpCodec((codec_type_ == kVideoCodecVP8 || codec_type_ == kVideoCodecH264) ? kDefaultTemporalLayerProfile : kNoTemporalLayerProfile); @@ -291,7 +320,7 @@ void SimulcastTestFixtureImpl::SetUpCodec(const int* temporal_layer_profile) { } void SimulcastTestFixtureImpl::SetUpRateAllocator() { - rate_allocator_.reset(new SimulcastRateAllocator(settings_)); + rate_allocator_ = std::make_unique(env_, settings_); } void SimulcastTestFixtureImpl::SetRates(uint32_t bitrate_kbps, uint32_t fps) { @@ -311,11 +340,11 @@ void SimulcastTestFixtureImpl::RunActiveStreamsTest( SetRates(kMaxBitrates[0] + kMaxBitrates[1] + kMaxBitrates[2], 30); ExpectStreams(VideoFrameType::kVideoFrameKey, active_streams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, active_streams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -396,32 +425,32 @@ void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnAllStreams() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, kNumberOfSimulcastStreams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); frame_types[0] = VideoFrameType::kVideoFrameKey; ExpectStreams(VideoFrameType::kVideoFrameKey, kNumberOfSimulcastStreams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), VideoFrameType::kVideoFrameDelta); frame_types[1] = VideoFrameType::kVideoFrameKey; ExpectStreams(VideoFrameType::kVideoFrameKey, kNumberOfSimulcastStreams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), VideoFrameType::kVideoFrameDelta); frame_types[2] = VideoFrameType::kVideoFrameKey; ExpectStreams(VideoFrameType::kVideoFrameKey, kNumberOfSimulcastStreams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), VideoFrameType::kVideoFrameDelta); ExpectStreams(VideoFrameType::kVideoFrameDelta, kNumberOfSimulcastStreams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -435,14 +464,14 @@ void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnSpecificStreams() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, kNumberOfSimulcastStreams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); frame_types[0] = VideoFrameType::kVideoFrameKey; ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[0]); ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[1]); ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[2]); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), @@ -451,7 +480,7 @@ void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnSpecificStreams() { ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[0]); ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[1]); ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[2]); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), @@ -460,7 +489,7 @@ void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnSpecificStreams() { ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[0]); ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[1]); ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[2]); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), @@ -470,7 +499,7 @@ void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnSpecificStreams() { ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[0]); ExpectStream(VideoFrameType::kVideoFrameDelta, kScaleResolutionDownBy[1]); ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[2]); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), @@ -478,13 +507,13 @@ void SimulcastTestFixtureImpl::TestKeyFrameRequestsOnSpecificStreams() { ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[0]); ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[1]); ExpectStream(VideoFrameType::kVideoFrameKey, kScaleResolutionDownBy[2]); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); std::fill(frame_types.begin(), frame_types.end(), VideoFrameType::kVideoFrameDelta); ExpectStreams(VideoFrameType::kVideoFrameDelta, kNumberOfSimulcastStreams); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -497,7 +526,7 @@ void SimulcastTestFixtureImpl::TestPaddingAllStreams() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -510,7 +539,7 @@ void SimulcastTestFixtureImpl::TestPaddingTwoStreams() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -524,7 +553,7 @@ void SimulcastTestFixtureImpl::TestPaddingTwoStreamsOneMaxedOut() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -537,7 +566,7 @@ void SimulcastTestFixtureImpl::TestPaddingOneStream() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, 2); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -551,7 +580,7 @@ void SimulcastTestFixtureImpl::TestPaddingOneStreamTwoMaxedOut() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, 2); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -564,7 +593,7 @@ void SimulcastTestFixtureImpl::TestSendAllStreams() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, 3); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -577,40 +606,40 @@ void SimulcastTestFixtureImpl::TestDisablingStreams() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); ExpectStreams(VideoFrameType::kVideoFrameDelta, 3); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); // We should only get two streams and padding for one. SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); ExpectStreams(VideoFrameType::kVideoFrameDelta, 2); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); // We should only get the first stream and padding for two. SetRates(kTargetBitrates[0] + kMinBitrates[1] / 2, 30); ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); // We don't have enough bitrate for the thumbnail stream, but we should get // it anyway with current configuration. SetRates(kTargetBitrates[0] - 1, 30); ExpectStreams(VideoFrameType::kVideoFrameDelta, 1); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); // We should only get two streams and padding for one. SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30); // We get a key frame because a new stream is being enabled. ExpectStreams(VideoFrameType::kVideoFrameKey, 2); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); // We should get all three streams. SetRates(kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30); // We get a key frame because a new stream is being enabled. ExpectStreams(VideoFrameType::kVideoFrameKey, 3); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, &frame_types)); } @@ -744,7 +773,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #1. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(2, 2, 2, expected_temporal_idx); SetExpectedValues3(true, true, true, expected_layer_sync); @@ -752,7 +781,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #2. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(1, 1, 1, expected_temporal_idx); SetExpectedValues3(true, true, true, expected_layer_sync); @@ -760,7 +789,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #3. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(2, 2, 2, expected_temporal_idx); SetExpectedValues3(false, false, false, expected_layer_sync); @@ -768,7 +797,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #4. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(0, 0, 0, expected_temporal_idx); SetExpectedValues3(false, false, false, expected_layer_sync); @@ -776,7 +805,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #5. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(2, 2, 2, expected_temporal_idx); SetExpectedValues3(is_h264, is_h264, is_h264, expected_layer_sync); @@ -814,7 +843,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #1. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(2, 1, 255, expected_temporal_idx); SetExpectedValues3(true, true, false, expected_layer_sync); @@ -822,7 +851,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #2. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(1, 0, 255, expected_temporal_idx); SetExpectedValues3(true, false, false, expected_layer_sync); @@ -830,7 +859,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #3. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(2, 1, 255, expected_temporal_idx); SetExpectedValues3(false, false, false, expected_layer_sync); @@ -838,7 +867,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #4. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(0, 0, 255, expected_temporal_idx); SetExpectedValues3(false, false, false, expected_layer_sync); @@ -846,7 +875,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() { &encoder_callback, expected_temporal_idx, expected_layer_sync, 3); // Next frame: #5. - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); SetExpectedValues3(2, 1, 255, expected_temporal_idx); SetExpectedValues3(false, true, false, expected_layer_sync); @@ -888,7 +917,7 @@ void SimulcastTestFixtureImpl::TestStrideEncodeDecode() { plane_offset[kUPlane] += 1; plane_offset[kVPlane] += 1; CreateImage(input_buffer_, plane_offset); - input_frame_->set_timestamp(input_frame_->timestamp() + 3000); + input_frame_->set_rtp_timestamp(input_frame_->rtp_timestamp() + 3000); EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); EncodedImage encoded_frame; @@ -911,9 +940,9 @@ void SimulcastTestFixtureImpl::TestDecodeWidthHeightSet() { EXPECT_CALL(encoder_callback, OnEncodedImage(_, _)) .Times(3) - .WillRepeatedly( - ::testing::Invoke([&](const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info) { + .WillRepeatedly(::testing::Invoke( + [&](const EncodedImage& encoded_image, + const CodecSpecificInfo* /* codec_specific_info */) { EXPECT_EQ(encoded_image._frameType, VideoFrameType::kVideoFrameKey); size_t index = encoded_image.SimulcastIndex().value_or(0); @@ -926,30 +955,33 @@ void SimulcastTestFixtureImpl::TestDecodeWidthHeightSet() { EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL)); EXPECT_CALL(decoder_callback, Decoded(_, _, _)) - .WillOnce(::testing::Invoke([](VideoFrame& decodedImage, - absl::optional decode_time_ms, - absl::optional qp) { - EXPECT_EQ(decodedImage.width(), kDefaultWidth / 4); - EXPECT_EQ(decodedImage.height(), kDefaultHeight / 4); - })); + .WillOnce( + ::testing::Invoke([](VideoFrame& decodedImage, + std::optional /* decode_time_ms */, + std::optional /* qp */) { + EXPECT_EQ(decodedImage.width(), kDefaultWidth / 4); + EXPECT_EQ(decodedImage.height(), kDefaultHeight / 4); + })); EXPECT_EQ(0, decoder_->Decode(encoded_frame[0], 0)); EXPECT_CALL(decoder_callback, Decoded(_, _, _)) - .WillOnce(::testing::Invoke([](VideoFrame& decodedImage, - absl::optional decode_time_ms, - absl::optional qp) { - EXPECT_EQ(decodedImage.width(), kDefaultWidth / 2); - EXPECT_EQ(decodedImage.height(), kDefaultHeight / 2); - })); + .WillOnce( + ::testing::Invoke([](VideoFrame& decodedImage, + std::optional /* decode_time_ms */, + std::optional /* qp */) { + EXPECT_EQ(decodedImage.width(), kDefaultWidth / 2); + EXPECT_EQ(decodedImage.height(), kDefaultHeight / 2); + })); EXPECT_EQ(0, decoder_->Decode(encoded_frame[1], 0)); EXPECT_CALL(decoder_callback, Decoded(_, _, _)) - .WillOnce(::testing::Invoke([](VideoFrame& decodedImage, - absl::optional decode_time_ms, - absl::optional qp) { - EXPECT_EQ(decodedImage.width(), kDefaultWidth); - EXPECT_EQ(decodedImage.height(), kDefaultHeight); - })); + .WillOnce( + ::testing::Invoke([](VideoFrame& decodedImage, + std::optional /* decode_time_ms */, + std::optional /* qp */) { + EXPECT_EQ(decodedImage.width(), kDefaultWidth); + EXPECT_EQ(decodedImage.height(), kDefaultHeight); + })); EXPECT_EQ(0, decoder_->Decode(encoded_frame[2], 0)); } diff --git a/modules/video_coding/utility/simulcast_test_fixture_impl.h b/modules/video_coding/utility/simulcast_test_fixture_impl.h index f142ab4813..79d891e812 100644 --- a/modules/video_coding/utility/simulcast_test_fixture_impl.h +++ b/modules/video_coding/utility/simulcast_test_fixture_impl.h @@ -11,15 +11,24 @@ #ifndef MODULES_VIDEO_CODING_UTILITY_SIMULCAST_TEST_FIXTURE_IMPL_H_ #define MODULES_VIDEO_CODING_UTILITY_SIMULCAST_TEST_FIXTURE_IMPL_H_ +#include #include #include +#include "api/environment/environment.h" +#include "api/scoped_refptr.h" #include "api/test/mock_video_decoder.h" #include "api/test/mock_video_encoder.h" #include "api/test/simulcast_test_fixture.h" #include "api/video/i420_buffer.h" +#include "api/video/video_codec_type.h" #include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" @@ -78,12 +87,13 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture { int num_spatial_layers); void SwitchingToOneStream(int width, int height); + const Environment env_; std::unique_ptr encoder_; MockEncodedImageCallback encoder_callback_; std::unique_ptr decoder_; MockDecodedImageCallback decoder_callback_; VideoCodec settings_; - rtc::scoped_refptr input_buffer_; + scoped_refptr input_buffer_; std::unique_ptr input_frame_; std::unique_ptr rate_allocator_; VideoCodecType codec_type_; diff --git a/modules/video_coding/utility/simulcast_utility.cc b/modules/video_coding/utility/simulcast_utility.cc index 95e9488b01..ec1ad067cb 100644 --- a/modules/video_coding/utility/simulcast_utility.cc +++ b/modules/video_coding/utility/simulcast_utility.cc @@ -12,9 +12,13 @@ #include #include +#include +#include "api/video/video_codec_type.h" +#include "api/video_codecs/video_codec.h" #include "modules/video_coding/svc/scalability_mode_util.h" #include "rtc_base/checks.h" +#include "video/config/video_encoder_config.h" namespace webrtc { @@ -75,7 +79,18 @@ bool SimulcastUtility::ValidSimulcastParameters(const VideoCodec& codec, bool SimulcastUtility::IsConferenceModeScreenshare(const VideoCodec& codec) { return codec.mode == VideoCodecMode::kScreensharing && - codec.legacy_conference_mode; + codec.legacy_conference_mode && + (codec.codecType == kVideoCodecVP8 || + codec.codecType == kVideoCodecH264); +} + +bool SimulcastUtility::IsConferenceModeScreenshare( + const VideoEncoderConfig& encoder_config) { + return encoder_config.content_type == + VideoEncoderConfig::ContentType::kScreen && + encoder_config.legacy_conference_mode && + (encoder_config.codec_type == webrtc::VideoCodecType::kVideoCodecVP8 || + encoder_config.codec_type == webrtc::VideoCodecType::kVideoCodecH264); } int SimulcastUtility::NumberOfTemporalLayers(const VideoCodec& codec, @@ -94,7 +109,11 @@ int SimulcastUtility::NumberOfTemporalLayers(const VideoCodec& codec, case kVideoCodecH264: num_temporal_layers = codec.H264().numberOfTemporalLayers; break; - default: + // For AV1 and H.265 we get temporal layer count from scalability mode, + // instead of from codec-specifics. + case kVideoCodecAV1: + case kVideoCodecH265: + case kVideoCodecGeneric: break; } } diff --git a/modules/video_coding/utility/simulcast_utility.h b/modules/video_coding/utility/simulcast_utility.h index e25a594360..8fda339c09 100644 --- a/modules/video_coding/utility/simulcast_utility.h +++ b/modules/video_coding/utility/simulcast_utility.h @@ -14,10 +14,12 @@ #include #include "api/video_codecs/video_codec.h" +#include "rtc_base/system/rtc_export.h" +#include "video/config/video_encoder_config.h" namespace webrtc { -class SimulcastUtility { +class RTC_EXPORT SimulcastUtility { public: static uint32_t SumStreamMaxBitrate(int streams, const VideoCodec& codec); static int NumberOfSimulcastStreams(const VideoCodec& codec); @@ -26,6 +28,8 @@ class SimulcastUtility { static int NumberOfTemporalLayers(const VideoCodec& codec, int spatial_id); // TODO(sprang): Remove this hack when ScreenshareLayers is gone. static bool IsConferenceModeScreenshare(const VideoCodec& codec); + static bool IsConferenceModeScreenshare( + const VideoEncoderConfig& encoder_config); }; } // namespace webrtc diff --git a/modules/video_coding/utility/vp8_constants.h b/modules/video_coding/utility/vp8_constants.h index 9321864dbc..6b2498e98d 100644 --- a/modules/video_coding/utility/vp8_constants.h +++ b/modules/video_coding/utility/vp8_constants.h @@ -14,7 +14,6 @@ #include #include -#include namespace webrtc { diff --git a/modules/video_coding/utility/vp8_header_parser.cc b/modules/video_coding/utility/vp8_header_parser.cc index 80026f9a0f..c446fe1c54 100644 --- a/modules/video_coding/utility/vp8_header_parser.cc +++ b/modules/video_coding/utility/vp8_header_parser.cc @@ -9,8 +9,10 @@ */ #include "modules/video_coding/utility/vp8_header_parser.h" +#include +#include + #include "rtc_base/logging.h" -#include "rtc_base/system/arch.h" namespace webrtc { diff --git a/modules/video_coding/utility/vp9_constants.h b/modules/video_coding/utility/vp9_constants.h index af2c701b82..97c8eafb36 100644 --- a/modules/video_coding/utility/vp9_constants.h +++ b/modules/video_coding/utility/vp9_constants.h @@ -14,7 +14,6 @@ #include #include -#include namespace webrtc { diff --git a/modules/video_coding/utility/vp9_uncompressed_header_parser.cc b/modules/video_coding/utility/vp9_uncompressed_header_parser.cc index bf9d51f692..ca3c8595c4 100644 --- a/modules/video_coding/utility/vp9_uncompressed_header_parser.cc +++ b/modules/video_coding/utility/vp9_uncompressed_header_parser.cc @@ -9,8 +9,14 @@ */ #include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" -#include "absl/numeric/bits.h" +#include +#include +#include +#include + #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "modules/video_coding/utility/vp9_constants.h" #include "rtc_base/bitstream_reader.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" @@ -252,10 +258,9 @@ const Vp9InterpolationFilter kLiteralToType[4] = { std::string Vp9UncompressedHeader::ToString() const { char buf[1024]; - rtc::SimpleStringBuilder oss(buf); + SimpleStringBuilder oss(buf); - oss << "Vp9UncompressedHeader { " - << "profile = " << profile; + oss << "Vp9UncompressedHeader { " << "profile = " << profile; if (show_existing_frame) { oss << ", show_existing_frame = " << *show_existing_frame << " }"; @@ -488,6 +493,8 @@ void Parse(BitstreamReader& br, // Frame context index. frame_info->frame_context_idx = br.ReadBits(2); + frame_info->loop_filter_params_offset_bits = + total_buffer_size_bits - br.RemainingBitCount(); Vp9ReadLoopfilter(br); // Read base QP. @@ -505,21 +512,21 @@ void Parse(BitstreamReader& br, (total_buffer_size_bits / 8) - (br.RemainingBitCount() / 8); } -absl::optional ParseUncompressedVp9Header( - rtc::ArrayView buf) { +std::optional ParseUncompressedVp9Header( + ArrayView buf) { BitstreamReader reader(buf); Vp9UncompressedHeader frame_info; Parse(reader, &frame_info, /*qp_only=*/false); if (reader.Ok() && frame_info.frame_width > 0) { return frame_info; } - return absl::nullopt; + return std::nullopt; } namespace vp9 { bool GetQp(const uint8_t* buf, size_t length, int* qp) { - BitstreamReader reader(rtc::MakeArrayView(buf, length)); + BitstreamReader reader(MakeArrayView(buf, length)); Vp9UncompressedHeader frame_info; Parse(reader, &frame_info, /*qp_only=*/true); if (!reader.Ok()) { diff --git a/modules/video_coding/utility/vp9_uncompressed_header_parser.h b/modules/video_coding/utility/vp9_uncompressed_header_parser.h index 8d1b88c3d3..0153a3b579 100644 --- a/modules/video_coding/utility/vp9_uncompressed_header_parser.h +++ b/modules/video_coding/utility/vp9_uncompressed_header_parser.h @@ -16,9 +16,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "modules/video_coding/utility/vp9_constants.h" @@ -90,14 +90,14 @@ enum class Vp9InterpolationFilter : uint8_t { struct Vp9UncompressedHeader { int profile = 0; // Profiles 0-3 are valid. - absl::optional show_existing_frame; + std::optional show_existing_frame; bool is_keyframe = false; bool show_frame = false; bool error_resilient = false; Vp9BitDept bit_detph = Vp9BitDept::k8Bit; - absl::optional color_space; - absl::optional color_range; - absl::optional sub_sampling; + std::optional color_space; + std::optional color_range; + std::optional sub_sampling; int frame_width = 0; int frame_height = 0; int render_width = 0; @@ -105,7 +105,10 @@ struct Vp9UncompressedHeader { // Width/height of the tiles used (in units of 8x8 blocks). size_t tile_cols_log2 = 0; // tile_cols = 1 << tile_cols_log2 size_t tile_rows_log2 = 0; // tile_rows = 1 << tile_rows_log2 - absl::optional render_size_offset_bits; + std::optional render_size_offset_bits; + // Number of bits from the start of the frame header to where the loop filter + // parameters are located. + std::optional loop_filter_params_offset_bits; Vp9InterpolationFilter interpolation_filter = Vp9InterpolationFilter::kEightTap; bool allow_high_precision_mv = false; @@ -114,10 +117,10 @@ struct Vp9UncompressedHeader { uint8_t frame_context_idx = 0; bool segmentation_enabled = false; - absl::optional> segmentation_tree_probs; - absl::optional> segmentation_pred_prob; + std::optional> segmentation_tree_probs; + std::optional> segmentation_pred_prob; bool segmentation_is_delta = false; - std::array, kVp9SegLvlMax>, kVp9MaxSegments> + std::array, kVp9SegLvlMax>, kVp9MaxSegments> segmentation_features; // Which of the 8 reference buffers may be used as references for this frame. @@ -129,7 +132,7 @@ struct Vp9UncompressedHeader { std::bitset reference_buffers_sign_bias = 0; // Indicates which reference buffer [0,7] to infer the frame size from. - absl::optional infer_size_from_reference; + std::optional infer_size_from_reference; // Which of the 8 reference buffers are updated by this frame. std::bitset updated_buffers = 0; @@ -147,8 +150,8 @@ struct Vp9UncompressedHeader { // Parses the uncompressed header and populates (most) values in a // UncompressedHeader struct. Returns nullopt on failure. -absl::optional ParseUncompressedVp9Header( - rtc::ArrayView buf); +std::optional ParseUncompressedVp9Header( + ArrayView buf); } // namespace webrtc diff --git a/modules/video_coding/utility/vp9_uncompressed_header_parser_unittest.cc b/modules/video_coding/utility/vp9_uncompressed_header_parser_unittest.cc index d8cc738e07..1d93c35189 100644 --- a/modules/video_coding/utility/vp9_uncompressed_header_parser_unittest.cc +++ b/modules/video_coding/utility/vp9_uncompressed_header_parser_unittest.cc @@ -10,6 +10,10 @@ #include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" +#include +#include + +#include "modules/video_coding/utility/vp9_constants.h" #include "test/gmock.h" #include "test/gtest.h" @@ -31,7 +35,7 @@ TEST(Vp9UncompressedHeaderParserTest, FrameWithSegmentation) { 0x2e, 0x73, 0xb7, 0xee, 0x22, 0x06, 0x81, 0x82, 0xd4, 0xef, 0xc3, 0x58, 0x1f, 0x12, 0xd2, 0x7b, 0x28, 0x1f, 0x80, 0xfc, 0x07, 0xe0, 0x00, 0x00}; - absl::optional frame_info = + std::optional frame_info = ParseUncompressedVp9Header(kHeader); ASSERT_TRUE(frame_info.has_value()); @@ -56,6 +60,7 @@ TEST(Vp9UncompressedHeaderParserTest, FrameWithSegmentation) { EXPECT_EQ(frame_info->tile_cols_log2, 0u); EXPECT_EQ(frame_info->tile_rows_log2, 0u); EXPECT_EQ(frame_info->render_size_offset_bits, 64u); + EXPECT_EQ(frame_info->loop_filter_params_offset_bits, 100u); EXPECT_EQ(frame_info->compressed_header_size, 23u); EXPECT_EQ(frame_info->uncompressed_header_size, 37u); @@ -73,7 +78,7 @@ TEST(Vp9UncompressedHeaderParserTest, SegmentationWithDefaultPredProbs) { const uint8_t kHeader[] = {0x90, 0x49, 0x83, 0x42, 0x80, 0x2e, 0x30, 0x0, 0xb0, 0x0, 0x37, 0xff, 0x06, 0x80, 0x0, 0x0, 0x0, 0x0}; - absl::optional frame_info = + std::optional frame_info = ParseUncompressedVp9Header(kHeader); ASSERT_TRUE(frame_info.has_value()); EXPECT_THAT(frame_info->segmentation_pred_prob, @@ -84,7 +89,7 @@ TEST(Vp9UncompressedHeaderParserTest, SegmentationWithSkipLevel) { const uint8_t kHeader[] = {0x90, 0x49, 0x83, 0x42, 0x80, 0x2e, 0x30, 0x00, 0xb0, 0x00, 0x37, 0xff, 0x06, 0x80, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}; - absl::optional frame_info = + std::optional frame_info = ParseUncompressedVp9Header(kHeader); ASSERT_TRUE(frame_info.has_value()); EXPECT_THAT(frame_info->segmentation_features[0][kVp9SegLvlSkip], Eq(1)); diff --git a/modules/video_coding/video_codec_initializer.cc b/modules/video_coding/video_codec_initializer.cc index 43fab8c432..4ad2060f35 100644 --- a/modules/video_coding/video_codec_initializer.cc +++ b/modules/video_coding/video_codec_initializer.cc @@ -14,10 +14,18 @@ #include #include +#include +#include -#include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/scoped_refptr.h" #include "api/units/data_rate.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/simulcast_stream.h" +#include "api/video_codecs/spatial_layer.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" #include "modules/video_coding/codecs/av1/av1_svc_config.h" #include "modules/video_coding/codecs/vp8/vp8_scalability.h" @@ -28,29 +36,28 @@ #include "rtc_base/experiments/min_video_bitrate_experiment.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" +#include "video/config/video_encoder_config.h" namespace webrtc { +namespace { -bool VideoCodecInitializer::SetupCodec(const VideoEncoderConfig& config, - const std::vector& streams, - VideoCodec* codec) { - if (config.codec_type == kVideoCodecMultiplex) { - VideoEncoderConfig associated_config = config.Copy(); - associated_config.codec_type = kVideoCodecVP9; - if (!SetupCodec(associated_config, streams, codec)) { - RTC_LOG(LS_ERROR) << "Failed to create stereo encoder configuration."; - return false; +constexpr ScalabilityMode kH265SupportedScalabilityModes[] = { + ScalabilityMode::kL1T1, ScalabilityMode::kL1T2, ScalabilityMode::kL1T3}; + +bool H265SupportsScalabilityMode(ScalabilityMode scalability_mode) { + for (const auto& entry : kH265SupportedScalabilityModes) { + if (entry == scalability_mode) { + return true; } - codec->codecType = kVideoCodecMultiplex; - return true; } - - *codec = VideoEncoderConfigToVideoCodec(config, streams); - return true; + return false; } +} // namespace + // TODO(sprang): Split this up and separate the codec specific parts. -VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( +VideoCodec VideoCodecInitializer::SetupCodec( + const FieldTrialsView& field_trials, const VideoEncoderConfig& config, const std::vector& streams) { static const int kEncoderMinBitrateKbps = 30; @@ -96,8 +103,7 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( int max_framerate = 0; - absl::optional scalability_mode = - streams[0].scalability_mode; + std::optional scalability_mode = streams[0].scalability_mode; for (size_t i = 0; i < streams.size(); ++i) { SimulcastStream* sim_stream = &video_codec.simulcastStream[i]; RTC_DCHECK_GT(streams[i].width, 0); @@ -236,18 +242,9 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( if (!config.spatial_layers.empty()) { // Layering is set explicitly. spatial_layers = config.spatial_layers; - } else if (scalability_mode.has_value()) { + } else if (video_codec.GetScalabilityMode().has_value()) { // Layering is set via scalability mode. spatial_layers = GetVp9SvcConfig(video_codec); - if (spatial_layers.empty()) - break; - // Use codec bitrate limits if spatial layering is not requested. - if (video_codec.numberOfSimulcastStreams <= 1 && - ScalabilityModeToNumSpatialLayers(*scalability_mode) == 1) { - spatial_layers.back().minBitrate = video_codec.minBitrate; - spatial_layers.back().targetBitrate = video_codec.maxBitrate; - spatial_layers.back().maxBitrate = video_codec.maxBitrate; - } } else { size_t first_active_layer = 0; for (size_t spatial_idx = 0; @@ -325,6 +322,14 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( streams.back().num_temporal_layers.value_or(1), /*num_spatial_layers=*/ std::max(config.spatial_layers.size(), 1))) { + // If min bitrate is set via RtpEncodingParameters, use this value on + // lowest spatial layer. + if (!config.simulcast_layers.empty() && + config.simulcast_layers[0].min_bitrate_bps > 0) { + video_codec.spatialLayers[0].minBitrate = std::min( + config.simulcast_layers[0].min_bitrate_bps / 1000, + static_cast(video_codec.spatialLayers[0].targetBitrate)); + } for (size_t i = 0; i < config.spatial_layers.size(); ++i) { video_codec.spatialLayers[i].active = config.spatial_layers[i].active; } @@ -344,6 +349,28 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( kMaxTemporalStreams); break; } + case kVideoCodecH265: + RTC_DCHECK(!config.encoder_specific_settings) << "No encoder-specific " + "settings for H.265."; + + // Validate specified scalability modes. If some layer has an unsupported + // mode, store it as the top-level scalability mode, which will make + // InitEncode fail with an appropriate error. + for (const auto& stream : streams) { + if (stream.scalability_mode.has_value() && + !H265SupportsScalabilityMode(*stream.scalability_mode)) { + RTC_LOG(LS_WARNING) + << "Invalid scalability mode for H.265: " + << ScalabilityModeToString(*stream.scalability_mode); + video_codec.SetScalabilityMode(*stream.scalability_mode); + break; + } + } + video_codec.spatialLayers[0].minBitrate = video_codec.minBitrate; + video_codec.spatialLayers[0].targetBitrate = video_codec.maxBitrate; + video_codec.spatialLayers[0].maxBitrate = video_codec.maxBitrate; + video_codec.spatialLayers[0].active = codec_active; + break; default: // TODO(pbos): Support encoder_settings codec-agnostically. RTC_DCHECK(!config.encoder_specific_settings) @@ -351,14 +378,18 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec( break; } - const absl::optional experimental_min_bitrate = - GetExperimentalMinVideoBitrate(video_codec.codecType); + const std::optional experimental_min_bitrate = + GetExperimentalMinVideoBitrate(field_trials, video_codec.codecType); if (experimental_min_bitrate) { const int experimental_min_bitrate_kbps = - rtc::saturated_cast(experimental_min_bitrate->kbps()); + saturated_cast(experimental_min_bitrate->kbps()); video_codec.minBitrate = experimental_min_bitrate_kbps; video_codec.simulcastStream[0].minBitrate = experimental_min_bitrate_kbps; - if (video_codec.codecType == kVideoCodecVP9) { + if (video_codec.codecType == kVideoCodecVP9 || +#ifdef RTC_ENABLE_H265 + video_codec.codecType == kVideoCodecH265 || +#endif + video_codec.codecType == kVideoCodecAV1) { video_codec.spatialLayers[0].minBitrate = experimental_min_bitrate_kbps; } } diff --git a/modules/video_coding/video_codec_initializer_unittest.cc b/modules/video_coding/video_codec_initializer_unittest.cc index b0edab6004..849c780aca 100644 --- a/modules/video_coding/video_codec_initializer_unittest.cc +++ b/modules/video_coding/video_codec_initializer_unittest.cc @@ -14,21 +14,28 @@ #include #include +#include +#include -#include "absl/types/optional.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/make_ref_counted.h" #include "api/scoped_refptr.h" #include "api/test/mock_fec_controller_override.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocator.h" #include "api/video/video_bitrate_allocator_factory.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/vp8_temporal_layers.h" +#include "api/video_codecs/vp8_frame_buffer_controller.h" #include "api/video_codecs/vp8_temporal_layers_factory.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "rtc_base/checks.h" -#include "test/gmock.h" #include "test/gtest.h" +#include "video/config/video_encoder_config.h" namespace webrtc { @@ -58,8 +65,8 @@ class VideoCodecInitializerTest : public ::testing::Test { protected: void SetUpFor(VideoCodecType type, - absl::optional num_simulcast_streams, - absl::optional num_spatial_streams, + std::optional num_simulcast_streams, + std::optional num_spatial_streams, int num_temporal_streams, bool screenshare) { config_ = VideoEncoderConfig(); @@ -77,31 +84,25 @@ class VideoCodecInitializerTest : public ::testing::Test { ASSERT_FALSE(num_spatial_streams.has_value()); VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); vp8_settings.numberOfTemporalLayers = num_temporal_streams; - config_.encoder_specific_settings = rtc::make_ref_counted< + config_.encoder_specific_settings = make_ref_counted< webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); } else if (type == VideoCodecType::kVideoCodecVP9) { ASSERT_TRUE(num_spatial_streams.has_value()); VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); vp9_settings.numberOfSpatialLayers = num_spatial_streams.value(); vp9_settings.numberOfTemporalLayers = num_temporal_streams; - config_.encoder_specific_settings = rtc::make_ref_counted< + config_.encoder_specific_settings = make_ref_counted< webrtc::VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); - } else if (type != VideoCodecType::kVideoCodecMultiplex) { - ADD_FAILURE() << "Unexpected codec type: " << type; } } - bool InitializeCodec() { - codec_out_ = VideoCodec(); + void InitializeCodec() { frame_buffer_controller_.reset(); - if (!VideoCodecInitializer::SetupCodec(config_, streams_, &codec_out_)) { - return false; - } - bitrate_allocator_ = CreateBuiltinVideoBitrateAllocatorFactory() - ->CreateVideoBitrateAllocator(codec_out_); + codec_out_ = VideoCodecInitializer::SetupCodec(env_.field_trials(), config_, + streams_); + bitrate_allocator_ = + CreateBuiltinVideoBitrateAllocatorFactory()->Create(env_, codec_out_); RTC_CHECK(bitrate_allocator_); - if (codec_out_.codecType == VideoCodecType::kVideoCodecMultiplex) - return true; // Make sure temporal layers instances have been created. if (codec_out_.codecType == VideoCodecType::kVideoCodecVP8) { @@ -111,13 +112,12 @@ class VideoCodecInitializerTest : public ::testing::Test { frame_buffer_controller_ = factory.Create(codec_out_, settings, &fec_controller_override_); } - return true; } VideoStream DefaultStream( int width = kDefaultWidth, int height = kDefaultHeight, - absl::optional scalability_mode = absl::nullopt) { + std::optional scalability_mode = std::nullopt) { VideoStream stream; stream.width = width; stream.height = height; @@ -143,6 +143,7 @@ class VideoCodecInitializerTest : public ::testing::Test { return stream; } + const Environment env_ = CreateEnvironment(); MockFecControllerOverride fec_controller_override_; // Input settings. @@ -156,9 +157,9 @@ class VideoCodecInitializerTest : public ::testing::Test { }; TEST_F(VideoCodecInitializerTest, SingleStreamVp8Screenshare) { - SetUpFor(VideoCodecType::kVideoCodecVP8, 1, absl::nullopt, 1, true); + SetUpFor(VideoCodecType::kVideoCodecVP8, 1, std::nullopt, 1, true); streams_.push_back(DefaultStream()); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); VideoBitrateAllocation bitrate_allocation = bitrate_allocator_->Allocate(VideoBitrateAllocationParameters( @@ -169,11 +170,11 @@ TEST_F(VideoCodecInitializerTest, SingleStreamVp8Screenshare) { } TEST_F(VideoCodecInitializerTest, SingleStreamVp8ScreenshareInactive) { - SetUpFor(VideoCodecType::kVideoCodecVP8, 1, absl::nullopt, 1, true); + SetUpFor(VideoCodecType::kVideoCodecVP8, 1, std::nullopt, 1, true); VideoStream inactive_stream = DefaultStream(); inactive_stream.active = false; streams_.push_back(inactive_stream); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); VideoBitrateAllocation bitrate_allocation = bitrate_allocator_->Allocate(VideoBitrateAllocationParameters( @@ -184,9 +185,9 @@ TEST_F(VideoCodecInitializerTest, SingleStreamVp8ScreenshareInactive) { } TEST_F(VideoCodecInitializerTest, TemporalLayeredVp8ScreenshareConference) { - SetUpFor(VideoCodecType::kVideoCodecVP8, 1, absl::nullopt, 2, true); + SetUpFor(VideoCodecType::kVideoCodecVP8, 1, std::nullopt, 2, true); streams_.push_back(DefaultScreenshareStream()); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); bitrate_allocator_->SetLegacyConferenceMode(true); EXPECT_EQ(1u, codec_out_.numberOfSimulcastStreams); @@ -201,9 +202,9 @@ TEST_F(VideoCodecInitializerTest, TemporalLayeredVp8ScreenshareConference) { } TEST_F(VideoCodecInitializerTest, TemporalLayeredVp8Screenshare) { - SetUpFor(VideoCodecType::kVideoCodecVP8, 1, absl::nullopt, 2, true); + SetUpFor(VideoCodecType::kVideoCodecVP8, 1, std::nullopt, 2, true); streams_.push_back(DefaultScreenshareStream()); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(1u, codec_out_.numberOfSimulcastStreams); EXPECT_EQ(2u, codec_out_.VP8()->numberOfTemporalLayers); @@ -216,12 +217,12 @@ TEST_F(VideoCodecInitializerTest, TemporalLayeredVp8Screenshare) { } TEST_F(VideoCodecInitializerTest, SimulcastVp8Screenshare) { - SetUpFor(VideoCodecType::kVideoCodecVP8, 2, absl::nullopt, 1, true); + SetUpFor(VideoCodecType::kVideoCodecVP8, 2, std::nullopt, 1, true); streams_.push_back(DefaultScreenshareStream()); VideoStream video_stream = DefaultStream(); video_stream.max_framerate = kScreenshareDefaultFramerate; streams_.push_back(video_stream); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(2u, codec_out_.numberOfSimulcastStreams); EXPECT_EQ(1u, codec_out_.VP8()->numberOfTemporalLayers); @@ -240,13 +241,13 @@ TEST_F(VideoCodecInitializerTest, SimulcastVp8Screenshare) { // Tests that when a video stream is inactive, then the bitrate allocation will // be 0 for that stream. TEST_F(VideoCodecInitializerTest, SimulcastVp8ScreenshareInactive) { - SetUpFor(VideoCodecType::kVideoCodecVP8, 2, absl::nullopt, 1, true); + SetUpFor(VideoCodecType::kVideoCodecVP8, 2, std::nullopt, 1, true); streams_.push_back(DefaultScreenshareStream()); VideoStream inactive_video_stream = DefaultStream(); inactive_video_stream.active = false; inactive_video_stream.max_framerate = kScreenshareDefaultFramerate; streams_.push_back(inactive_video_stream); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(2u, codec_out_.numberOfSimulcastStreams); EXPECT_EQ(1u, codec_out_.VP8()->numberOfTemporalLayers); @@ -265,12 +266,12 @@ TEST_F(VideoCodecInitializerTest, SimulcastVp8ScreenshareInactive) { TEST_F(VideoCodecInitializerTest, HighFpsSimulcastVp8Screenshare) { // Two simulcast streams, the lower one using legacy settings (two temporal // streams, 5fps), the higher one using 3 temporal streams and 30fps. - SetUpFor(VideoCodecType::kVideoCodecVP8, 2, absl::nullopt, 3, true); + SetUpFor(VideoCodecType::kVideoCodecVP8, 2, std::nullopt, 3, true); streams_.push_back(DefaultScreenshareStream()); VideoStream video_stream = DefaultStream(); video_stream.num_temporal_layers = 3; streams_.push_back(video_stream); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(2u, codec_out_.numberOfSimulcastStreams); EXPECT_EQ(3u, codec_out_.VP8()->numberOfTemporalLayers); @@ -288,25 +289,19 @@ TEST_F(VideoCodecInitializerTest, HighFpsSimulcastVp8Screenshare) { bitrate_allocation.GetBitrate(1, 1)); } -TEST_F(VideoCodecInitializerTest, SingleStreamMultiplexCodec) { - SetUpFor(VideoCodecType::kVideoCodecMultiplex, absl::nullopt, 1, 1, true); - streams_.push_back(DefaultStream()); - EXPECT_TRUE(InitializeCodec()); -} - TEST_F(VideoCodecInitializerTest, Vp9SvcDefaultLayering) { - SetUpFor(VideoCodecType::kVideoCodecVP9, absl::nullopt, 3, 3, false); + SetUpFor(VideoCodecType::kVideoCodecVP9, std::nullopt, 3, 3, false); VideoStream stream = DefaultStream(); stream.num_temporal_layers = 3; streams_.push_back(stream); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 3u); EXPECT_EQ(codec_out_.VP9()->numberOfTemporalLayers, 3u); } TEST_F(VideoCodecInitializerTest, Vp9SvcAdjustedLayering) { - SetUpFor(VideoCodecType::kVideoCodecVP9, absl::nullopt, 3, 3, false); + SetUpFor(VideoCodecType::kVideoCodecVP9, std::nullopt, 3, 3, false); VideoStream stream = DefaultStream(); stream.num_temporal_layers = 3; // Set resolution which is only enough to produce 2 spatial layers. @@ -315,18 +310,18 @@ TEST_F(VideoCodecInitializerTest, Vp9SvcAdjustedLayering) { streams_.push_back(stream); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 2u); } TEST_F(VideoCodecInitializerTest, Vp9SingleSpatialLayerMaxBitrateIsEqualToCodecMaxBitrate) { - SetUpFor(VideoCodecType::kVideoCodecVP9, absl::nullopt, 1, 3, false); + SetUpFor(VideoCodecType::kVideoCodecVP9, std::nullopt, 1, 3, false); VideoStream stream = DefaultStream(); stream.num_temporal_layers = 3; streams_.push_back(stream); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(codec_out_.spatialLayers[0].maxBitrate, kDefaultMaxBitrateBps / 1000); } @@ -339,7 +334,7 @@ TEST_F(VideoCodecInitializerTest, stream.scalability_mode = ScalabilityMode::kL1T1; streams_.push_back(stream); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(1u, codec_out_.VP9()->numberOfSpatialLayers); EXPECT_EQ(codec_out_.spatialLayers[0].minBitrate, kDefaultMinBitrateBps / 1000); @@ -349,12 +344,12 @@ TEST_F(VideoCodecInitializerTest, TEST_F(VideoCodecInitializerTest, Vp9SingleSpatialLayerTargetBitrateIsEqualToCodecMaxBitrate) { - SetUpFor(VideoCodecType::kVideoCodecVP9, absl::nullopt, 1, 1, true); + SetUpFor(VideoCodecType::kVideoCodecVP9, std::nullopt, 1, 1, true); VideoStream stream = DefaultStream(); stream.num_temporal_layers = 1; streams_.push_back(stream); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(codec_out_.spatialLayers[0].targetBitrate, kDefaultMaxBitrateBps / 1000); } @@ -364,14 +359,14 @@ TEST_F(VideoCodecInitializerTest, // Request 3 spatial layers for 320x180 input. Actual number of layers will be // reduced to 1 due to low input resolution but SVC bitrate limits should be // applied. - SetUpFor(VideoCodecType::kVideoCodecVP9, absl::nullopt, 3, 3, false); + SetUpFor(VideoCodecType::kVideoCodecVP9, std::nullopt, 3, 3, false); VideoStream stream = DefaultStream(); stream.width = 320; stream.height = 180; stream.num_temporal_layers = 3; streams_.push_back(stream); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_LT(codec_out_.spatialLayers[0].maxBitrate, kDefaultMaxBitrateBps / 1000); } @@ -389,7 +384,7 @@ TEST_F(VideoCodecInitializerTest, stream.scalability_mode = ScalabilityMode::kL3T1; streams_.push_back(stream); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(1u, codec_out_.VP9()->numberOfSpatialLayers); EXPECT_LT(codec_out_.spatialLayers[0].minBitrate, kDefaultMinBitrateBps / 1000); @@ -398,7 +393,7 @@ TEST_F(VideoCodecInitializerTest, } TEST_F(VideoCodecInitializerTest, Vp9DeactivateLayers) { - SetUpFor(VideoCodecType::kVideoCodecVP9, absl::nullopt, 3, 1, false); + SetUpFor(VideoCodecType::kVideoCodecVP9, std::nullopt, 3, 1, false); VideoStream stream = DefaultStream(); streams_.push_back(stream); @@ -408,7 +403,7 @@ TEST_F(VideoCodecInitializerTest, Vp9DeactivateLayers) { config_.simulcast_layers[0].active = true; config_.simulcast_layers[1].active = true; config_.simulcast_layers[2].active = true; - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 3); EXPECT_TRUE(codec_out_.spatialLayers[0].active); EXPECT_TRUE(codec_out_.spatialLayers[1].active); @@ -418,7 +413,7 @@ TEST_F(VideoCodecInitializerTest, Vp9DeactivateLayers) { config_.simulcast_layers[0].active = true; config_.simulcast_layers[1].active = true; config_.simulcast_layers[2].active = false; - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 3); EXPECT_TRUE(codec_out_.spatialLayers[0].active); EXPECT_TRUE(codec_out_.spatialLayers[1].active); @@ -428,7 +423,7 @@ TEST_F(VideoCodecInitializerTest, Vp9DeactivateLayers) { config_.simulcast_layers[0].active = true; config_.simulcast_layers[1].active = false; config_.simulcast_layers[2].active = true; - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 3); EXPECT_TRUE(codec_out_.spatialLayers[0].active); EXPECT_FALSE(codec_out_.spatialLayers[1].active); @@ -438,7 +433,7 @@ TEST_F(VideoCodecInitializerTest, Vp9DeactivateLayers) { config_.simulcast_layers[0].active = false; config_.simulcast_layers[1].active = true; config_.simulcast_layers[2].active = true; - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 2); EXPECT_TRUE(codec_out_.spatialLayers[0].active); EXPECT_TRUE(codec_out_.spatialLayers[1].active); @@ -447,7 +442,7 @@ TEST_F(VideoCodecInitializerTest, Vp9DeactivateLayers) { config_.simulcast_layers[0].active = false; config_.simulcast_layers[1].active = false; config_.simulcast_layers[2].active = true; - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 1); EXPECT_TRUE(codec_out_.spatialLayers[0].active); @@ -455,7 +450,7 @@ TEST_F(VideoCodecInitializerTest, Vp9DeactivateLayers) { config_.simulcast_layers[0].active = false; config_.simulcast_layers[1].active = true; config_.simulcast_layers[2].active = false; - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 2); EXPECT_TRUE(codec_out_.spatialLayers[0].active); EXPECT_FALSE(codec_out_.spatialLayers[1].active); @@ -464,7 +459,7 @@ TEST_F(VideoCodecInitializerTest, Vp9DeactivateLayers) { config_.simulcast_layers[0].active = true; config_.simulcast_layers[1].active = false; config_.simulcast_layers[2].active = false; - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(codec_out_.VP9()->numberOfSpatialLayers, 3); EXPECT_TRUE(codec_out_.spatialLayers[0].active); EXPECT_FALSE(codec_out_.spatialLayers[1].active); @@ -472,14 +467,14 @@ TEST_F(VideoCodecInitializerTest, Vp9DeactivateLayers) { } TEST_F(VideoCodecInitializerTest, Vp9SvcResolutionAlignment) { - SetUpFor(VideoCodecType::kVideoCodecVP9, absl::nullopt, 3, 3, false); + SetUpFor(VideoCodecType::kVideoCodecVP9, std::nullopt, 3, 3, false); VideoStream stream = DefaultStream(); stream.width = 1281; stream.height = 721; stream.num_temporal_layers = 3; streams_.push_back(stream); - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); EXPECT_EQ(codec_out_.width, 1280); EXPECT_EQ(codec_out_.height, 720); EXPECT_EQ(codec_out_.numberOfSimulcastStreams, 1); @@ -495,7 +490,7 @@ TEST_F(VideoCodecInitializerTest, Vp9SimulcastResolutions) { DefaultStream(640, 360, ScalabilityMode::kL1T3), DefaultStream(1280, 720, ScalabilityMode::kL1T3)}; - EXPECT_TRUE(InitializeCodec()); + InitializeCodec(); // This is expected to be the largest layer. EXPECT_EQ(codec_out_.width, 1280); EXPECT_EQ(codec_out_.height, 720); @@ -515,8 +510,8 @@ TEST_F(VideoCodecInitializerTest, Av1SingleSpatialLayerBitratesAreConsistent) { std::vector streams = {DefaultStream()}; streams[0].scalability_mode = ScalabilityMode::kL1T2; - VideoCodec codec; - EXPECT_TRUE(VideoCodecInitializer::SetupCodec(config, streams, &codec)); + VideoCodec codec = + VideoCodecInitializer::SetupCodec(env_.field_trials(), config, streams); EXPECT_GE(codec.spatialLayers[0].targetBitrate, codec.spatialLayers[0].minBitrate); @@ -530,8 +525,8 @@ TEST_F(VideoCodecInitializerTest, Av1TwoSpatialLayersBitratesAreConsistent) { std::vector streams = {DefaultStream()}; streams[0].scalability_mode = ScalabilityMode::kL2T2; - VideoCodec codec; - EXPECT_TRUE(VideoCodecInitializer::SetupCodec(config, streams, &codec)); + VideoCodec codec = + VideoCodecInitializer::SetupCodec(env_.field_trials(), config, streams); EXPECT_GE(codec.spatialLayers[0].targetBitrate, codec.spatialLayers[0].minBitrate); @@ -544,6 +539,51 @@ TEST_F(VideoCodecInitializerTest, Av1TwoSpatialLayersBitratesAreConsistent) { codec.spatialLayers[1].maxBitrate); } +TEST_F(VideoCodecInitializerTest, Av1ConfiguredMinBitrateApplied) { + VideoEncoderConfig config; + config.simulcast_layers.resize(1); + config.simulcast_layers[0].min_bitrate_bps = 28000; + config.codec_type = VideoCodecType::kVideoCodecAV1; + std::vector streams = {DefaultStream()}; + streams[0].scalability_mode = ScalabilityMode::kL3T2; + + VideoCodec codec = + VideoCodecInitializer::SetupCodec(env_.field_trials(), config, streams); + + EXPECT_EQ(codec.spatialLayers[0].minBitrate, 28u); + EXPECT_GE(codec.spatialLayers[0].targetBitrate, + codec.spatialLayers[0].minBitrate); +} + +TEST_F(VideoCodecInitializerTest, + Av1ConfiguredMinBitrateLimitedByDefaultTargetBitrate) { + VideoEncoderConfig config; + config.simulcast_layers.resize(1); + config.simulcast_layers[0].min_bitrate_bps = 2228000; + config.codec_type = VideoCodecType::kVideoCodecAV1; + std::vector streams = {DefaultStream()}; + streams[0].scalability_mode = ScalabilityMode::kL3T2; + + VideoCodec codec = + VideoCodecInitializer::SetupCodec(env_.field_trials(), config, streams); + + EXPECT_GE(codec.spatialLayers[0].targetBitrate, + codec.spatialLayers[0].minBitrate); +} + +TEST_F(VideoCodecInitializerTest, Av1ConfiguredMinBitrateNotAppliedIfUnset) { + VideoEncoderConfig config; + config.simulcast_layers.resize(1); + config.codec_type = VideoCodecType::kVideoCodecAV1; + std::vector streams = {DefaultStream()}; + streams[0].scalability_mode = ScalabilityMode::kL3T2; + + VideoCodec codec = + VideoCodecInitializer::SetupCodec(env_.field_trials(), config, streams); + + EXPECT_GT(codec.spatialLayers[0].minBitrate, 0u); +} + TEST_F(VideoCodecInitializerTest, Av1TwoSpatialLayersActiveByDefault) { VideoEncoderConfig config; config.codec_type = VideoCodecType::kVideoCodecAV1; @@ -551,8 +591,8 @@ TEST_F(VideoCodecInitializerTest, Av1TwoSpatialLayersActiveByDefault) { streams[0].scalability_mode = ScalabilityMode::kL2T2; config.spatial_layers = {}; - VideoCodec codec; - EXPECT_TRUE(VideoCodecInitializer::SetupCodec(config, streams, &codec)); + VideoCodec codec = + VideoCodecInitializer::SetupCodec(env_.field_trials(), config, streams); EXPECT_TRUE(codec.spatialLayers[0].active); EXPECT_TRUE(codec.spatialLayers[1].active); @@ -567,8 +607,8 @@ TEST_F(VideoCodecInitializerTest, Av1TwoSpatialLayersOneDeactivated) { config.spatial_layers[0].active = true; config.spatial_layers[1].active = false; - VideoCodec codec; - EXPECT_TRUE(VideoCodecInitializer::SetupCodec(config, streams, &codec)); + VideoCodec codec = + VideoCodecInitializer::SetupCodec(env_.field_trials(), config, streams); EXPECT_TRUE(codec.spatialLayers[0].active); EXPECT_FALSE(codec.spatialLayers[1].active); @@ -585,8 +625,8 @@ TEST_F(VideoCodecInitializerTest, Vp9SingleSpatialLayerBitratesAreConsistent) { std::vector streams = {DefaultStream()}; streams[0].scalability_mode = ScalabilityMode::kL1T2; - VideoCodec codec; - EXPECT_TRUE(VideoCodecInitializer::SetupCodec(config, streams, &codec)); + VideoCodec codec = + VideoCodecInitializer::SetupCodec(env_.field_trials(), config, streams); EXPECT_EQ(1u, codec.VP9()->numberOfSpatialLayers); // Target is consistent with min and max (min <= target <= max). @@ -613,8 +653,8 @@ TEST_F(VideoCodecInitializerTest, Vp9TwoSpatialLayersBitratesAreConsistent) { std::vector streams = {DefaultStream()}; streams[0].scalability_mode = ScalabilityMode::kL2T2; - VideoCodec codec; - EXPECT_TRUE(VideoCodecInitializer::SetupCodec(config, streams, &codec)); + VideoCodec codec = + VideoCodecInitializer::SetupCodec(env_.field_trials(), config, streams); EXPECT_EQ(2u, codec.VP9()->numberOfSpatialLayers); EXPECT_GE(codec.spatialLayers[0].targetBitrate, @@ -631,4 +671,88 @@ TEST_F(VideoCodecInitializerTest, Vp9TwoSpatialLayersBitratesAreConsistent) { codec.spatialLayers[0].maxBitrate); } +TEST_F(VideoCodecInitializerTest, UpdatesVp9SpecificFieldsWithScalabilityMode) { + VideoEncoderConfig config; + config.codec_type = VideoCodecType::kVideoCodecVP9; + std::vector streams = {DefaultStream()}; + streams[0].scalability_mode = ScalabilityMode::kL2T3_KEY; + + VideoCodec codec = + VideoCodecInitializer::SetupCodec(env_.field_trials(), config, streams); + + EXPECT_EQ(codec.VP9()->numberOfSpatialLayers, 2u); + EXPECT_EQ(codec.VP9()->numberOfTemporalLayers, 3u); + EXPECT_EQ(codec.VP9()->interLayerPred, InterLayerPredMode::kOnKeyPic); + + streams[0].scalability_mode = ScalabilityMode::kS3T1; + codec = + VideoCodecInitializer::SetupCodec(env_.field_trials(), config, streams); + + EXPECT_EQ(codec.VP9()->numberOfSpatialLayers, 3u); + EXPECT_EQ(codec.VP9()->numberOfTemporalLayers, 1u); + EXPECT_EQ(codec.VP9()->interLayerPred, InterLayerPredMode::kOff); +} + +#ifdef RTC_ENABLE_H265 +TEST_F(VideoCodecInitializerTest, H265SingleSpatialLayerBitratesAreConsistent) { + VideoEncoderConfig config; + config.codec_type = VideoCodecType::kVideoCodecH265; + std::vector streams = {DefaultStream()}; + streams[0].scalability_mode = ScalabilityMode::kL1T2; + + VideoCodec codec = + VideoCodecInitializer::SetupCodec(env_.field_trials(), config, streams); + + EXPECT_GE(codec.spatialLayers[0].targetBitrate, + codec.spatialLayers[0].minBitrate); + EXPECT_LE(codec.spatialLayers[0].targetBitrate, + codec.spatialLayers[0].maxBitrate); +} + +// Test that the H.265 codec initializer carries over invalid simulcast layer +// scalability mode to top level scalability mode setting. +TEST_F(VideoCodecInitializerTest, + H265ScalabilityModeConfiguredToTopLevelWhenNotAllowed) { + VideoEncoderConfig config; + config.codec_type = VideoCodecType::kVideoCodecH265; + + std::vector streams = {DefaultStream()}; + streams[0].scalability_mode = ScalabilityMode::kL3T3; + + VideoCodec codec = + VideoCodecInitializer::SetupCodec(env_.field_trials(), config, streams); + + // Check that an unsupported scalability mode will cause top-level scalability + // to be set to the same unsupported mode. + EXPECT_EQ(codec.GetScalabilityMode(), ScalabilityMode::kL3T3); + EXPECT_EQ(codec.spatialLayers[0].numberOfTemporalLayers, 3); + EXPECT_EQ(codec.simulcastStream[0].numberOfTemporalLayers, 3); +} + +// Test that inconistent scalability mode settings in simulcast streams will +// clear top level scalability mode setting. +TEST_F(VideoCodecInitializerTest, + H265InconsistentScalabilityModesWillClearTopLevelScalability) { + VideoEncoderConfig config; + config.simulcast_layers.resize(2); + config.simulcast_layers[0].active = true; + config.simulcast_layers[1].active = true; + config.codec_type = VideoCodecType::kVideoCodecH265; + + std::vector streams = {DefaultStream(), DefaultStream()}; + streams[0].scalability_mode = ScalabilityMode::kL1T3; + streams[1].scalability_mode = ScalabilityMode::kL1T1; + + VideoCodec codec = + VideoCodecInitializer::SetupCodec(env_.field_trials(), config, streams); + + // Top level scalability mode should be cleared if the simulcast streams have + // different per-stream temporal layer settings. + EXPECT_EQ(codec.GetScalabilityMode(), std::nullopt); + EXPECT_EQ(codec.spatialLayers[0].numberOfTemporalLayers, 3); + EXPECT_EQ(codec.simulcastStream[0].numberOfTemporalLayers, 3); + EXPECT_EQ(codec.simulcastStream[1].numberOfTemporalLayers, 1); +} +#endif + } // namespace webrtc diff --git a/modules/video_coding/video_coding_defines.cc b/modules/video_coding/video_coding_defines.cc index 436b1a6490..1f134fc7b0 100644 --- a/modules/video_coding/video_coding_defines.cc +++ b/modules/video_coding/video_coding_defines.cc @@ -10,10 +10,14 @@ #include "modules/video_coding/include/video_coding_defines.h" +#include + +#include "api/video_codecs/video_decoder.h" + namespace webrtc { -void VCMReceiveCallback::OnDroppedFrames(uint32_t frames_dropped) {} -void VCMReceiveCallback::OnIncomingPayloadType(int payload_type) {} +void VCMReceiveCallback::OnDroppedFrames(uint32_t /* frames_dropped */) {} +void VCMReceiveCallback::OnIncomingPayloadType(int /* payload_type */) {} void VCMReceiveCallback::OnDecoderInfoChanged( const VideoDecoder::DecoderInfo&) {} diff --git a/modules/video_coding/video_coding_impl.cc b/modules/video_coding/video_coding_impl.cc index 2eaecd5011..9d261f3d77 100644 --- a/modules/video_coding/video_coding_impl.cc +++ b/modules/video_coding/video_coding_impl.cc @@ -11,16 +11,25 @@ #include "modules/video_coding/video_coding_impl.h" #include +#include +#include #include +#include -#include "api/field_trials_view.h" +#include "api/environment/environment.h" +#include "api/rtp_headers.h" #include "api/sequence_checker.h" -#include "api/transport/field_trial_based_config.h" #include "api/video/encoded_image.h" -#include "modules/video_coding/include/video_codec_interface.h" +#include "api/video/render_resolution.h" +#include "api/video_codecs/video_decoder.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/encoded_frame.h" +#include "modules/video_coding/generic_decoder.h" +#include "modules/video_coding/include/video_coding.h" +#include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/timing/timing.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/memory/always_valid_pointer.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -57,7 +66,7 @@ VideoDecoder* DEPRECATED_VCMDecoderDataBase::DeregisterExternalDecoder( // frame after RegisterReceiveCodec). if (current_decoder_ && current_decoder_->IsSameDecoder(it->second)) { // Release it if it was registered and in use. - current_decoder_ = absl::nullopt; + current_decoder_ = std::nullopt; } VideoDecoder* ret = it->second; decoders_.erase(it); @@ -87,7 +96,7 @@ void DEPRECATED_VCMDecoderDataBase::RegisterReceiveCodec( const VideoDecoder::Settings& settings) { // If payload value already exists, erase old and insert new. if (payload_type == current_payload_type_) { - current_payload_type_ = absl::nullopt; + current_payload_type_ = std::nullopt; } decoder_settings_[payload_type] = settings; } @@ -99,7 +108,7 @@ bool DEPRECATED_VCMDecoderDataBase::DeregisterReceiveCodec( } if (payload_type == current_payload_type_) { // This codec is currently in use. - current_payload_type_ = absl::nullopt; + current_payload_type_ = std::nullopt; } return true; } @@ -115,12 +124,12 @@ VCMGenericDecoder* DEPRECATED_VCMDecoderDataBase::GetDecoder( } // If decoder exists - delete. if (current_decoder_.has_value()) { - current_decoder_ = absl::nullopt; - current_payload_type_ = absl::nullopt; + current_decoder_ = std::nullopt; + current_payload_type_ = std::nullopt; } CreateAndInitDecoder(frame); - if (current_decoder_ == absl::nullopt) { + if (current_decoder_ == std::nullopt) { return nullptr; } @@ -128,7 +137,7 @@ VCMGenericDecoder* DEPRECATED_VCMDecoderDataBase::GetDecoder( callback->OnIncomingPayloadType(payload_type); if (current_decoder_->RegisterDecodeCompleteCallback(decoded_frame_callback) < 0) { - current_decoder_ = absl::nullopt; + current_decoder_ = std::nullopt; return nullptr; } @@ -164,7 +173,7 @@ void DEPRECATED_VCMDecoderDataBase::CreateAndInitDecoder( decoder_item->second.set_max_render_resolution(frame_resolution); } if (!current_decoder_->Configure(decoder_item->second)) { - current_decoder_ = absl::nullopt; + current_decoder_ = std::nullopt; RTC_LOG(LS_ERROR) << "Failed to initialize decoder."; } } @@ -175,12 +184,10 @@ namespace { class VideoCodingModuleImpl : public VideoCodingModule { public: - explicit VideoCodingModuleImpl(Clock* clock, - const FieldTrialsView* field_trials) - : VideoCodingModule(), - field_trials_(field_trials), - timing_(new VCMTiming(clock, *field_trials_)), - receiver_(clock, timing_.get(), *field_trials_) {} + explicit VideoCodingModuleImpl(const Environment& env) + : env_(env), + timing_(&env_.clock(), env_.field_trials()), + receiver_(&env_.clock(), &timing_, env_.field_trials()) {} ~VideoCodingModuleImpl() override = default; @@ -234,21 +241,18 @@ class VideoCodingModuleImpl : public VideoCodingModule { } private: - AlwaysValidPointer - field_trials_; + const Environment env_; SequenceChecker construction_thread_; - const std::unique_ptr timing_; + VCMTiming timing_; vcm::VideoReceiver receiver_; }; } // namespace // DEPRECATED. Create method for current interface, will be removed when the // new jitter buffer is in place. -VideoCodingModule* VideoCodingModule::Create( - Clock* clock, - const FieldTrialsView* field_trials) { - RTC_DCHECK(clock); - return new VideoCodingModuleImpl(clock, field_trials); +std::unique_ptr VideoCodingModule::CreateDeprecated( + const Environment& env) { + return std::make_unique(env); } } // namespace webrtc diff --git a/modules/video_coding/video_coding_impl.h b/modules/video_coding/video_coding_impl.h index b715d589ad..fa44e4c5b4 100644 --- a/modules/video_coding/video_coding_impl.h +++ b/modules/video_coding/video_coding_impl.h @@ -11,19 +11,20 @@ #ifndef MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_ #define MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_ +#include +#include #include -#include -#include -#include +#include -#include "absl/types/optional.h" #include "api/field_trials_view.h" +#include "api/rtp_headers.h" #include "api/sequence_checker.h" -#include "modules/video_coding/deprecated/frame_buffer.h" +#include "api/video_codecs/video_decoder.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/deprecated/jitter_buffer.h" #include "modules/video_coding/deprecated/receiver.h" #include "modules/video_coding/generic_decoder.h" -#include "modules/video_coding/include/video_coding.h" +#include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/timing/timing.h" #include "rtc_base/one_time_event.h" #include "rtc_base/synchronization/mutex.h" @@ -89,8 +90,8 @@ class DEPRECATED_VCMDecoderDataBase { SequenceChecker decoder_sequence_checker_; - absl::optional current_payload_type_; - absl::optional current_decoder_ + std::optional current_payload_type_; + std::optional current_decoder_ RTC_GUARDED_BY(decoder_sequence_checker_); // Initialization paramaters for decoders keyed by payload type. std::map decoder_settings_; diff --git a/modules/video_coding/video_receiver.cc b/modules/video_coding/video_receiver.cc index 0c54cb178a..dce294e8c4 100644 --- a/modules/video_coding/video_receiver.cc +++ b/modules/video_coding/video_receiver.cc @@ -13,25 +13,26 @@ #include #include +#include "api/field_trials_view.h" #include "api/rtp_headers.h" #include "api/sequence_checker.h" -#include "api/video_codecs/video_codec.h" +#include "api/units/timestamp.h" +#include "api/video/video_frame_type.h" #include "api/video_codecs/video_decoder.h" -#include "modules/video_coding/decoder_database.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/deprecated/jitter_buffer.h" #include "modules/video_coding/deprecated/packet.h" #include "modules/video_coding/deprecated/receiver.h" #include "modules/video_coding/encoded_frame.h" #include "modules/video_coding/generic_decoder.h" -#include "modules/video_coding/include/video_coding.h" #include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/internal_defines.h" -#include "modules/video_coding/media_opt_util.h" #include "modules/video_coding/timing/timing.h" #include "modules/video_coding/video_coding_impl.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/one_time_event.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" @@ -44,7 +45,10 @@ VideoReceiver::VideoReceiver(Clock* clock, : clock_(clock), _timing(timing), _receiver(_timing, clock_, field_trials), - _decodedFrameCallback(_timing, clock_, field_trials), + _decodedFrameCallback(_timing, + clock_, + field_trials, + /*corruption_score_calculator=*/nullptr), _frameTypeCallback(nullptr), _packetRequestCallback(nullptr), _scheduleKeyRequest(false), diff --git a/modules/video_coding/video_receiver2.cc b/modules/video_coding/video_receiver2.cc index 72f366dbe7..75b5b1daae 100644 --- a/modules/video_coding/video_receiver2.cc +++ b/modules/video_coding/video_receiver2.cc @@ -13,12 +13,14 @@ #include #include +#include #include -#include -#include "absl/algorithm/container.h" -#include "api/video_codecs/video_codec.h" +#include "api/field_trials_view.h" +#include "api/sequence_checker.h" +#include "api/video/encoded_frame.h" #include "api/video_codecs/video_decoder.h" +#include "common_video/include/corruption_score_calculator.h" #include "modules/video_coding/decoder_database.h" #include "modules/video_coding/generic_decoder.h" #include "modules/video_coding/include/video_coding_defines.h" @@ -29,11 +31,16 @@ namespace webrtc { -VideoReceiver2::VideoReceiver2(Clock* clock, - VCMTiming* timing, - const FieldTrialsView& field_trials) +VideoReceiver2::VideoReceiver2( + Clock* clock, + VCMTiming* timing, + const FieldTrialsView& field_trials, + CorruptionScoreCalculator* corruption_score_calculator) : clock_(clock), - decoded_frame_callback_(timing, clock_, field_trials), + decoded_frame_callback_(timing, + clock_, + field_trials, + corruption_score_calculator), codec_database_() { decoder_sequence_checker_.Detach(); } diff --git a/modules/video_coding/video_receiver2.h b/modules/video_coding/video_receiver2.h index 0d3bdb464f..83a2d6d248 100644 --- a/modules/video_coding/video_receiver2.h +++ b/modules/video_coding/video_receiver2.h @@ -11,13 +11,14 @@ #ifndef MODULES_VIDEO_CODING_VIDEO_RECEIVER2_H_ #define MODULES_VIDEO_CODING_VIDEO_RECEIVER2_H_ +#include #include -#include #include "api/field_trials_view.h" #include "api/sequence_checker.h" #include "api/video/encoded_frame.h" #include "api/video_codecs/video_decoder.h" +#include "common_video/include/corruption_score_calculator.h" #include "modules/video_coding/decoder_database.h" #include "modules/video_coding/generic_decoder.h" #include "modules/video_coding/timing/timing.h" @@ -35,7 +36,8 @@ class VideoReceiver2 { public: VideoReceiver2(Clock* clock, VCMTiming* timing, - const FieldTrialsView& field_trials); + const FieldTrialsView& field_trials, + CorruptionScoreCalculator* corruption_score_calculator); ~VideoReceiver2(); void RegisterReceiveCodec(uint8_t payload_type, diff --git a/modules/video_coding/video_receiver2_unittest.cc b/modules/video_coding/video_receiver2_unittest.cc index 88a19dff0f..f7460a5310 100644 --- a/modules/video_coding/video_receiver2_unittest.cc +++ b/modules/video_coding/video_receiver2_unittest.cc @@ -10,14 +10,18 @@ #include "modules/video_coding/video_receiver2.h" +#include #include #include #include "api/test/mock_video_decoder.h" #include "api/units/timestamp.h" #include "api/video/encoded_frame.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/video_decoder.h" #include "common_video/test/utilities.h" -#include "modules/video_coding/decoder_database.h" +#include "modules/video_coding/include/video_coding_defines.h" +#include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/timing/timing.h" #include "system_wrappers/include/clock.h" #include "test/gmock.h" @@ -35,14 +39,7 @@ class MockVCMReceiveCallback : public VCMReceiveCallback { public: MockVCMReceiveCallback() = default; - MOCK_METHOD(int32_t, - FrameToRender, - (VideoFrame&, - absl::optional, - TimeDelta, - VideoContentType, - VideoFrameType), - (override)); + MOCK_METHOD(int32_t, OnFrameToRender, (const FrameToRender&), (override)); MOCK_METHOD(void, OnIncomingPayloadType, (int), (override)); MOCK_METHOD(void, OnDecoderInfoChanged, @@ -89,7 +86,8 @@ class VideoReceiver2Test : public ::testing::Test { SimulatedClock clock_{Timestamp::Millis(1337)}; VCMTiming timing_{&clock_, field_trials_}; NiceMock receive_callback_; - VideoReceiver2 receiver_{&clock_, &timing_, field_trials_}; + VideoReceiver2 receiver_{&clock_, &timing_, field_trials_, + /*corruption_score_calculator=*/nullptr}; }; TEST_F(VideoReceiver2Test, RegisterExternalDecoder) { diff --git a/modules/video_coding/video_receiver_unittest.cc b/modules/video_coding/video_receiver_unittest.cc index 20636536cd..0e29146aef 100644 --- a/modules/video_coding/video_receiver_unittest.cc +++ b/modules/video_coding/video_receiver_unittest.cc @@ -8,12 +8,21 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include + +#include "api/rtp_headers.h" #include "api/test/mock_video_decoder.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" #include "api/video_codecs/video_decoder.h" -#include "modules/video_coding/include/video_coding.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/timing/timing.h" #include "modules/video_coding/video_coding_impl.h" #include "system_wrappers/include/clock.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" @@ -38,14 +47,7 @@ class MockVCMReceiveCallback : public VCMReceiveCallback { MockVCMReceiveCallback() {} virtual ~MockVCMReceiveCallback() {} - MOCK_METHOD(int32_t, - FrameToRender, - (VideoFrame&, - absl::optional, - TimeDelta, - VideoContentType, - VideoFrameType), - (override)); + MOCK_METHOD(int32_t, OnFrameToRender, (const FrameToRender&), (override)); MOCK_METHOD(void, OnIncomingPayloadType, (int), (override)); MOCK_METHOD(void, OnDecoderInfoChanged, diff --git a/native-api.md b/native-api.md index edd68e2f75..66445203cb 100644 --- a/native-api.md +++ b/native-api.md @@ -108,10 +108,3 @@ macro for you. [metrics_h]: https://webrtc.googlesource.com/src/+/main/system_wrappers/include/metrics.h -## `WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR` -The transient suppressor functionality in the audio processing module is not -always used. If you wish to exclude it from the build in order to preserve -binary size, then define the preprocessor macro -`WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR`. If you use GN, you can just set the GN -argument `rtc_exclude_transient_suppressor` to true and GN will define the macro -for you. diff --git a/net/dcsctp/common/BUILD.gn b/net/dcsctp/common/BUILD.gn index 78fa0d307e..d496c64a56 100644 --- a/net/dcsctp/common/BUILD.gn +++ b/net/dcsctp/common/BUILD.gn @@ -29,12 +29,6 @@ rtc_source_set("sequence_numbers") { sources = [ "sequence_numbers.h" ] } -rtc_source_set("str_join") { - deps = [ "../../../rtc_base:stringutils" ] - sources = [ "str_join.h" ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] -} - if (rtc_include_tests) { rtc_library("dcsctp_common_unittests") { testonly = true @@ -43,7 +37,6 @@ if (rtc_include_tests) { deps = [ ":math", ":sequence_numbers", - ":str_join", "../../../api:array_view", "../../../rtc_base:checks", "../../../rtc_base:gunit_helpers", @@ -52,7 +45,6 @@ if (rtc_include_tests) { sources = [ "math_test.cc", "sequence_numbers_test.cc", - "str_join_test.cc", ] } } diff --git a/net/dcsctp/common/internal_types.h b/net/dcsctp/common/internal_types.h index 2354b92cc4..4f3b1935a2 100644 --- a/net/dcsctp/common/internal_types.h +++ b/net/dcsctp/common/internal_types.h @@ -40,5 +40,10 @@ using VerificationTag = webrtc::StrongAlias; // Tie Tag, used as a nonce when connecting. using TieTag = webrtc::StrongAlias; +// An ID for every outgoing message, to correlate outgoing data chunks with the +// message it was carved from. +using OutgoingMessageId = + webrtc::StrongAlias; + } // namespace dcsctp #endif // NET_DCSCTP_COMMON_INTERNAL_TYPES_H_ diff --git a/net/dcsctp/fuzzers/dcsctp_fuzzers.cc b/net/dcsctp/fuzzers/dcsctp_fuzzers.cc index e8fcacffa0..91f258fd2a 100644 --- a/net/dcsctp/fuzzers/dcsctp_fuzzers.cc +++ b/net/dcsctp/fuzzers/dcsctp_fuzzers.cc @@ -60,7 +60,7 @@ enum class StartingState : int { // State about the current fuzzing iteration class FuzzState { public: - explicit FuzzState(rtc::ArrayView data) : data_(data) {} + explicit FuzzState(webrtc::ArrayView data) : data_(data) {} uint8_t GetByte() { uint8_t value = 0; @@ -79,7 +79,7 @@ class FuzzState { private: uint32_t tsn_ = kRandomValue; uint32_t mid_ = 0; - rtc::ArrayView data_; + webrtc::ArrayView data_; size_t offset_ = 0; }; @@ -397,7 +397,7 @@ std::vector GeneratePacket(FuzzState& state) { void FuzzSocket(DcSctpSocketInterface& socket, FuzzerCallbacks& cb, - rtc::ArrayView data) { + webrtc::ArrayView data) { if (data.size() < kMinInputLength || data.size() > kMaxInputLength) { return; } @@ -434,7 +434,7 @@ void FuzzSocket(DcSctpSocketInterface& socket, SendOptions options; options.unordered = IsUnordered(flags & 0x01); options.max_retransmissions = - (flags & 0x02) != 0 ? absl::make_optional(0) : absl::nullopt; + (flags & 0x02) != 0 ? std::make_optional(0) : std::nullopt; options.lifecycle_id = LifecycleId(42); size_t payload_exponent = (flags >> 2) % 16; size_t payload_size = static_cast(1) << payload_exponent; @@ -446,7 +446,7 @@ void FuzzSocket(DcSctpSocketInterface& socket, case 7: { // Expire an active timeout/timer. uint8_t timeout_idx = state.GetByte(); - absl::optional timeout_id = cb.ExpireTimeout(timeout_idx); + std::optional timeout_id = cb.ExpireTimeout(timeout_idx); if (timeout_id.has_value()) { socket.HandleTimeout(*timeout_id); } diff --git a/net/dcsctp/fuzzers/dcsctp_fuzzers.h b/net/dcsctp/fuzzers/dcsctp_fuzzers.h index 90cfa35099..756eaf4b89 100644 --- a/net/dcsctp/fuzzers/dcsctp_fuzzers.h +++ b/net/dcsctp/fuzzers/dcsctp_fuzzers.h @@ -28,7 +28,7 @@ class FuzzerTimeout : public Timeout { explicit FuzzerTimeout(std::set& active_timeouts) : active_timeouts_(active_timeouts) {} - void Start(DurationMs duration_ms, TimeoutID timeout_id) override { + void Start(DurationMs /* duration_ms */, TimeoutID timeout_id) override { // Start is only allowed to be called on stopped or expired timeouts. if (timeout_id_.has_value()) { // It has been started before, but maybe it expired. Ensure that it's not @@ -44,42 +44,45 @@ class FuzzerTimeout : public Timeout { // expired. RTC_DCHECK(timeout_id_.has_value()); RTC_DCHECK(active_timeouts_.erase(*timeout_id_) == 1); - timeout_id_ = absl::nullopt; + timeout_id_ = std::nullopt; } // A set of all active timeouts, managed by `FuzzerCallbacks`. std::set& active_timeouts_; // If present, the timout is active and will expire reported as `timeout_id`. - absl::optional timeout_id_; + std::optional timeout_id_; }; class FuzzerCallbacks : public DcSctpSocketCallbacks { public: static constexpr int kRandomValue = 42; - void SendPacket(rtc::ArrayView data) override { + void SendPacket(webrtc::ArrayView data) override { sent_packets_.emplace_back(std::vector(data.begin(), data.end())); } std::unique_ptr CreateTimeout( - webrtc::TaskQueueBase::DelayPrecision precision) override { + webrtc::TaskQueueBase::DelayPrecision /* precision */) override { // The fuzzer timeouts don't implement |precision|. return std::make_unique(active_timeouts_); } - TimeMs TimeMillis() override { return TimeMs(42); } - uint32_t GetRandomInt(uint32_t low, uint32_t high) override { + webrtc::Timestamp Now() override { return webrtc::Timestamp::Millis(42); } + uint32_t GetRandomInt(uint32_t /* low */, uint32_t /* high */) override { return kRandomValue; } - void OnMessageReceived(DcSctpMessage message) override {} - void OnError(ErrorKind error, absl::string_view message) override {} - void OnAborted(ErrorKind error, absl::string_view message) override {} + void OnMessageReceived(DcSctpMessage /* message */) override {} + void OnError(ErrorKind /* error */, + absl::string_view /* message */) override {} + void OnAborted(ErrorKind /* error */, + absl::string_view /* message */) override {} void OnConnected() override {} void OnClosed() override {} void OnConnectionRestarted() override {} - void OnStreamsResetFailed(rtc::ArrayView outgoing_streams, - absl::string_view reason) override {} + void OnStreamsResetFailed( + webrtc::ArrayView /* outgoing_streams */, + absl::string_view /* reason */) override {} void OnStreamsResetPerformed( - rtc::ArrayView outgoing_streams) override {} + webrtc::ArrayView outgoing_streams) override {} void OnIncomingStreamsReset( - rtc::ArrayView incoming_streams) override {} + webrtc::ArrayView incoming_streams) override {} std::vector ConsumeSentPacket() { if (sent_packets_.empty()) { @@ -91,7 +94,7 @@ class FuzzerCallbacks : public DcSctpSocketCallbacks { } // Given an index among the active timeouts, will expire that one. - absl::optional ExpireTimeout(size_t index) { + std::optional ExpireTimeout(size_t index) { if (index < active_timeouts_.size()) { auto it = active_timeouts_.begin(); std::advance(it, index); @@ -99,7 +102,7 @@ class FuzzerCallbacks : public DcSctpSocketCallbacks { active_timeouts_.erase(it); return timeout_id; } - return absl::nullopt; + return std::nullopt; } private: @@ -112,7 +115,7 @@ class FuzzerCallbacks : public DcSctpSocketCallbacks { // API methods. void FuzzSocket(DcSctpSocketInterface& socket, FuzzerCallbacks& cb, - rtc::ArrayView data); + webrtc::ArrayView data); } // namespace dcsctp_fuzzers } // namespace dcsctp diff --git a/net/dcsctp/packet/BUILD.gn b/net/dcsctp/packet/BUILD.gn index 7abccc004b..dcdff0856c 100644 --- a/net/dcsctp/packet/BUILD.gn +++ b/net/dcsctp/packet/BUILD.gn @@ -30,10 +30,6 @@ rtc_library("tlv_trait") { "../../../rtc_base:checks", "../../../rtc_base:logging", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", - ] sources = [ "tlv_trait.cc", "tlv_trait.h", @@ -72,8 +68,10 @@ rtc_library("parameter") { "../../../rtc_base:stringutils", "../common:internal_types", "../common:math", - "../common:str_join", "../public:types", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "parameter/add_incoming_streams_request_parameter.cc", @@ -101,12 +99,6 @@ rtc_library("parameter") { "parameter/zero_checksum_acceptable_chunk_parameter.cc", "parameter/zero_checksum_acceptable_chunk_parameter.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("error_cause") { @@ -120,9 +112,10 @@ rtc_library("error_cause") { "../../../rtc_base:stringutils", "../common:internal_types", "../common:math", - "../common:str_join", "../packet:bounded_io", "../public:types", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "error_cause/cookie_received_while_shutting_down_cause.cc", @@ -154,11 +147,6 @@ rtc_library("error_cause") { "error_cause/user_initiated_abort_cause.cc", "error_cause/user_initiated_abort_cause.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("chunk") { @@ -172,8 +160,9 @@ rtc_library("chunk") { "../../../rtc_base:logging", "../../../rtc_base:stringutils", "../common:math", - "../common:str_join", "../packet:bounded_io", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "chunk/abort_chunk.cc", @@ -215,11 +204,6 @@ rtc_library("chunk") { "chunk/shutdown_complete_chunk.cc", "chunk/shutdown_complete_chunk.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("chunk_validators") { @@ -246,15 +230,12 @@ rtc_library("sctp_packet") { "../common:internal_types", "../common:math", "../public:types", + "//third_party/abseil-cpp/absl/memory", ] sources = [ "sctp_packet.cc", "sctp_packet.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/memory:memory", - "//third_party/abseil-cpp/absl/types:optional", - ] } if (rtc_include_tests) { @@ -329,6 +310,5 @@ if (rtc_include_tests) { "sctp_packet_test.cc", "tlv_trait_test.cc", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } } diff --git a/net/dcsctp/packet/bounded_byte_reader.h b/net/dcsctp/packet/bounded_byte_reader.h index 603ed6ac33..3ce6d6d0b5 100644 --- a/net/dcsctp/packet/bounded_byte_reader.h +++ b/net/dcsctp/packet/bounded_byte_reader.h @@ -51,7 +51,8 @@ inline uint32_t LoadBigEndian32(const uint8_t* data) { template class BoundedByteReader { public: - explicit BoundedByteReader(rtc::ArrayView data) : data_(data) { + explicit BoundedByteReader(webrtc::ArrayView data) + : data_(data) { RTC_CHECK(data.size() >= FixedSize); } @@ -79,19 +80,19 @@ class BoundedByteReader { BoundedByteReader sub_reader(size_t variable_offset) const { RTC_CHECK(FixedSize + variable_offset + SubSize <= data_.size()); - rtc::ArrayView sub_span = + webrtc::ArrayView sub_span = data_.subview(FixedSize + variable_offset, SubSize); return BoundedByteReader(sub_span); } size_t variable_data_size() const { return data_.size() - FixedSize; } - rtc::ArrayView variable_data() const { + webrtc::ArrayView variable_data() const { return data_.subview(FixedSize, data_.size() - FixedSize); } private: - const rtc::ArrayView data_; + const webrtc::ArrayView data_; }; } // namespace dcsctp diff --git a/net/dcsctp/packet/bounded_byte_writer.h b/net/dcsctp/packet/bounded_byte_writer.h index d754549e4f..08dda4f5e7 100644 --- a/net/dcsctp/packet/bounded_byte_writer.h +++ b/net/dcsctp/packet/bounded_byte_writer.h @@ -55,7 +55,7 @@ inline void StoreBigEndian32(uint8_t* data, uint32_t val) { template class BoundedByteWriter { public: - explicit BoundedByteWriter(rtc::ArrayView data) : data_(data) { + explicit BoundedByteWriter(webrtc::ArrayView data) : data_(data) { RTC_CHECK(data.size() >= FixedSize); } @@ -87,7 +87,7 @@ class BoundedByteWriter { data_.subview(FixedSize + variable_offset, SubSize)); } - void CopyToVariableData(rtc::ArrayView source) { + void CopyToVariableData(webrtc::ArrayView source) { size_t copy_size = std::min(source.size(), data_.size() - FixedSize); if (source.data() == nullptr || copy_size == 0) { return; @@ -96,7 +96,7 @@ class BoundedByteWriter { } private: - rtc::ArrayView data_; + webrtc::ArrayView data_; }; } // namespace dcsctp diff --git a/net/dcsctp/packet/chunk/abort_chunk.cc b/net/dcsctp/packet/chunk/abort_chunk.cc index 8348eb96a9..d7190a7d5e 100644 --- a/net/dcsctp/packet/chunk/abort_chunk.cc +++ b/net/dcsctp/packet/chunk/abort_chunk.cc @@ -11,10 +11,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -36,16 +36,16 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int AbortChunk::kType; -absl::optional AbortChunk::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional AbortChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } - absl::optional error_causes = + std::optional error_causes = Parameters::Parse(reader->variable_data()); if (!error_causes.has_value()) { - return absl::nullopt; + return std::nullopt; } uint8_t flags = reader->Load8<1>(); bool filled_in_verification_tag = (flags & (1 << kFlagsBitT)) == 0; @@ -53,7 +53,7 @@ absl::optional AbortChunk::Parse( } void AbortChunk::SerializeTo(std::vector& out) const { - rtc::ArrayView error_causes = error_causes_.data(); + webrtc::ArrayView error_causes = error_causes_.data(); BoundedByteWriter writer = AllocateTLV(out, error_causes.size()); writer.Store8<1>(filled_in_verification_tag_ ? 0 : (1 << kFlagsBitT)); writer.CopyToVariableData(error_causes); diff --git a/net/dcsctp/packet/chunk/abort_chunk.h b/net/dcsctp/packet/chunk/abort_chunk.h index 1408a75e80..771bbe9b38 100644 --- a/net/dcsctp/packet/chunk/abort_chunk.h +++ b/net/dcsctp/packet/chunk/abort_chunk.h @@ -42,7 +42,7 @@ class AbortChunk : public Chunk, public TLVTrait { AbortChunk(AbortChunk&& other) = default; AbortChunk& operator=(AbortChunk&& other) = default; - static absl::optional Parse(rtc::ArrayView data); + static std::optional Parse(webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/chunk/chunk.cc b/net/dcsctp/packet/chunk/chunk.cc index 832ab82288..73d675593c 100644 --- a/net/dcsctp/packet/chunk/chunk.cc +++ b/net/dcsctp/packet/chunk/chunk.cc @@ -11,9 +11,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/common/math.h" #include "net/dcsctp/packet/chunk/abort_chunk.h" @@ -39,10 +39,10 @@ namespace dcsctp { template bool ParseAndPrint(uint8_t chunk_type, - rtc::ArrayView data, - rtc::StringBuilder& sb) { + webrtc::ArrayView data, + webrtc::StringBuilder& sb) { if (chunk_type == Chunk::kType) { - absl::optional c = Chunk::Parse(data); + std::optional c = Chunk::Parse(data); if (c.has_value()) { sb << c->ToString(); } else { @@ -53,8 +53,8 @@ bool ParseAndPrint(uint8_t chunk_type, return false; } -std::string DebugConvertChunkToString(rtc::ArrayView data) { - rtc::StringBuilder sb; +std::string DebugConvertChunkToString(webrtc::ArrayView data) { + webrtc::StringBuilder sb; if (data.empty()) { sb << "Failed to parse chunk due to empty data"; diff --git a/net/dcsctp/packet/chunk/chunk.h b/net/dcsctp/packet/chunk/chunk.h index 687aa1daa1..810cd92790 100644 --- a/net/dcsctp/packet/chunk/chunk.h +++ b/net/dcsctp/packet/chunk/chunk.h @@ -16,13 +16,13 @@ #include #include #include +#include #include #include #include #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/data.h" #include "net/dcsctp/packet/error_cause/error_cause.h" @@ -52,7 +52,7 @@ class Chunk { // Introspects the chunk in `data` and returns a human readable textual // representation of it, to be used in debugging. -std::string DebugConvertChunkToString(rtc::ArrayView data); +std::string DebugConvertChunkToString(webrtc::ArrayView data); struct ChunkConfig { static constexpr int kTypeSizeInBytes = 1; diff --git a/net/dcsctp/packet/chunk/cookie_ack_chunk.cc b/net/dcsctp/packet/chunk/cookie_ack_chunk.cc index 4839969ccf..c48462067a 100644 --- a/net/dcsctp/packet/chunk/cookie_ack_chunk.cc +++ b/net/dcsctp/packet/chunk/cookie_ack_chunk.cc @@ -11,9 +11,9 @@ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" namespace dcsctp { @@ -27,10 +27,10 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int CookieAckChunk::kType; -absl::optional CookieAckChunk::Parse( - rtc::ArrayView data) { +std::optional CookieAckChunk::Parse( + webrtc::ArrayView data) { if (!ParseTLV(data).has_value()) { - return absl::nullopt; + return std::nullopt; } return CookieAckChunk(); } diff --git a/net/dcsctp/packet/chunk/cookie_ack_chunk.h b/net/dcsctp/packet/chunk/cookie_ack_chunk.h index f7d4a33f7d..0af7bfa5a2 100644 --- a/net/dcsctp/packet/chunk/cookie_ack_chunk.h +++ b/net/dcsctp/packet/chunk/cookie_ack_chunk.h @@ -35,8 +35,8 @@ class CookieAckChunk : public Chunk, public TLVTrait { CookieAckChunk() {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/chunk/cookie_echo_chunk.cc b/net/dcsctp/packet/chunk/cookie_echo_chunk.cc index a01d0b13c4..daa85622ee 100644 --- a/net/dcsctp/packet/chunk/cookie_echo_chunk.cc +++ b/net/dcsctp/packet/chunk/cookie_echo_chunk.cc @@ -11,10 +11,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -34,11 +34,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int CookieEchoChunk::kType; -absl::optional CookieEchoChunk::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional CookieEchoChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } return CookieEchoChunk(reader->variable_data()); } diff --git a/net/dcsctp/packet/chunk/cookie_echo_chunk.h b/net/dcsctp/packet/chunk/cookie_echo_chunk.h index 8cb80527f8..58d730bbd4 100644 --- a/net/dcsctp/packet/chunk/cookie_echo_chunk.h +++ b/net/dcsctp/packet/chunk/cookie_echo_chunk.h @@ -33,16 +33,16 @@ class CookieEchoChunk : public Chunk, public TLVTrait { public: static constexpr int kType = CookieEchoChunkConfig::kType; - explicit CookieEchoChunk(rtc::ArrayView cookie) + explicit CookieEchoChunk(webrtc::ArrayView cookie) : cookie_(cookie.begin(), cookie.end()) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; - rtc::ArrayView cookie() const { return cookie_; } + webrtc::ArrayView cookie() const { return cookie_; } private: std::vector cookie_; diff --git a/net/dcsctp/packet/chunk/data_chunk.cc b/net/dcsctp/packet/chunk/data_chunk.cc index cf866b7b2f..229ba0cc36 100644 --- a/net/dcsctp/packet/chunk/data_chunk.cc +++ b/net/dcsctp/packet/chunk/data_chunk.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -43,10 +43,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int DataChunk::kType; -absl::optional DataChunk::Parse(rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional DataChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } uint8_t flags = reader->Load8<1>(); @@ -86,7 +87,7 @@ void DataChunk::SerializeTo(std::vector& out) const { } std::string DataChunk::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "DATA, type=" << (options().is_unordered ? "unordered" : "ordered") << "::" << (*options().is_beginning && *options().is_end ? "complete" diff --git a/net/dcsctp/packet/chunk/data_chunk.h b/net/dcsctp/packet/chunk/data_chunk.h index 12bb05f2c4..cdd037a671 100644 --- a/net/dcsctp/packet/chunk/data_chunk.h +++ b/net/dcsctp/packet/chunk/data_chunk.h @@ -59,7 +59,7 @@ class DataChunk : public AnyDataChunk, public TLVTrait { DataChunk(TSN tsn, Data&& data, bool immediate_ack) : AnyDataChunk(tsn, std::move(data), immediate_ack) {} - static absl::optional Parse(rtc::ArrayView data); + static std::optional Parse(webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/chunk/data_common.h b/net/dcsctp/packet/chunk/data_common.h index b67efeee1e..7f85c82b23 100644 --- a/net/dcsctp/packet/chunk/data_common.h +++ b/net/dcsctp/packet/chunk/data_common.h @@ -48,10 +48,10 @@ class AnyDataChunk : public Chunk { StreamID stream_id() const { return data_.stream_id; } SSN ssn() const { return data_.ssn; } - MID message_id() const { return data_.message_id; } + MID mid() const { return data_.mid; } FSN fsn() const { return data_.fsn; } PPID ppid() const { return data_.ppid; } - rtc::ArrayView payload() const { return data_.payload; } + webrtc::ArrayView payload() const { return data_.payload; } // Extracts the Data from the chunk, as a destructive action. Data extract() && { return std::move(data_); } @@ -59,7 +59,7 @@ class AnyDataChunk : public Chunk { AnyDataChunk(TSN tsn, StreamID stream_id, SSN ssn, - MID message_id, + MID mid, FSN fsn, PPID ppid, std::vector payload, @@ -67,7 +67,7 @@ class AnyDataChunk : public Chunk { : tsn_(tsn), data_(stream_id, ssn, - message_id, + mid, fsn, ppid, std::move(payload), diff --git a/net/dcsctp/packet/chunk/error_chunk.cc b/net/dcsctp/packet/chunk/error_chunk.cc index baac0c5588..67a439ee7b 100644 --- a/net/dcsctp/packet/chunk/error_chunk.cc +++ b/net/dcsctp/packet/chunk/error_chunk.cc @@ -11,10 +11,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -36,22 +36,22 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int ErrorChunk::kType; -absl::optional ErrorChunk::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional ErrorChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } - absl::optional error_causes = + std::optional error_causes = Parameters::Parse(reader->variable_data()); if (!error_causes.has_value()) { - return absl::nullopt; + return std::nullopt; } return ErrorChunk(*std::move(error_causes)); } void ErrorChunk::SerializeTo(std::vector& out) const { - rtc::ArrayView error_causes = error_causes_.data(); + webrtc::ArrayView error_causes = error_causes_.data(); BoundedByteWriter writer = AllocateTLV(out, error_causes.size()); writer.CopyToVariableData(error_causes); } diff --git a/net/dcsctp/packet/chunk/error_chunk.h b/net/dcsctp/packet/chunk/error_chunk.h index 96122cff6a..b8bac5348b 100644 --- a/net/dcsctp/packet/chunk/error_chunk.h +++ b/net/dcsctp/packet/chunk/error_chunk.h @@ -41,7 +41,7 @@ class ErrorChunk : public Chunk, public TLVTrait { ErrorChunk(ErrorChunk&& other) = default; ErrorChunk& operator=(ErrorChunk&& other) = default; - static absl::optional Parse(rtc::ArrayView data); + static std::optional Parse(webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/chunk/forward_tsn_chunk.cc b/net/dcsctp/packet/chunk/forward_tsn_chunk.cc index e432114c50..f9cf67e117 100644 --- a/net/dcsctp/packet/chunk/forward_tsn_chunk.cc +++ b/net/dcsctp/packet/chunk/forward_tsn_chunk.cc @@ -12,11 +12,11 @@ #include #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -44,11 +44,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int ForwardTsnChunk::kType; -absl::optional ForwardTsnChunk::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional ForwardTsnChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } TSN new_cumulative_tsn(reader->Load32<4>()); @@ -70,7 +70,7 @@ absl::optional ForwardTsnChunk::Parse( } void ForwardTsnChunk::SerializeTo(std::vector& out) const { - rtc::ArrayView skipped = skipped_streams(); + webrtc::ArrayView skipped = skipped_streams(); size_t variable_size = skipped.size() * kSkippedStreamBufferSize; BoundedByteWriter writer = AllocateTLV(out, variable_size); @@ -85,11 +85,11 @@ void ForwardTsnChunk::SerializeTo(std::vector& out) const { } std::string ForwardTsnChunk::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "FORWARD-TSN, new_cumulative_tsn=" << *new_cumulative_tsn(); for (const auto& skipped : skipped_streams()) { sb << ", skip " << skipped.stream_id.value() << ":" << *skipped.ssn; } - return sb.str(); + return sb.Release(); } } // namespace dcsctp diff --git a/net/dcsctp/packet/chunk/forward_tsn_chunk.h b/net/dcsctp/packet/chunk/forward_tsn_chunk.h index b9ef666f41..ae4250f524 100644 --- a/net/dcsctp/packet/chunk/forward_tsn_chunk.h +++ b/net/dcsctp/packet/chunk/forward_tsn_chunk.h @@ -40,8 +40,8 @@ class ForwardTsnChunk : public AnyForwardTsnChunk, std::vector skipped_streams) : AnyForwardTsnChunk(new_cumulative_tsn, std::move(skipped_streams)) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/chunk/forward_tsn_common.h b/net/dcsctp/packet/chunk/forward_tsn_common.h index 37bd2aafff..ede879c599 100644 --- a/net/dcsctp/packet/chunk/forward_tsn_common.h +++ b/net/dcsctp/packet/chunk/forward_tsn_common.h @@ -24,12 +24,9 @@ class AnyForwardTsnChunk : public Chunk { public: struct SkippedStream { SkippedStream(StreamID stream_id, SSN ssn) - : stream_id(stream_id), ssn(ssn), unordered(false), message_id(0) {} - SkippedStream(IsUnordered unordered, StreamID stream_id, MID message_id) - : stream_id(stream_id), - ssn(0), - unordered(unordered), - message_id(message_id) {} + : stream_id(stream_id), ssn(ssn), unordered(false), mid(0) {} + SkippedStream(IsUnordered unordered, StreamID stream_id, MID mid) + : stream_id(stream_id), ssn(0), unordered(unordered), mid(mid) {} StreamID stream_id; @@ -38,11 +35,11 @@ class AnyForwardTsnChunk : public Chunk { // Set for I-FORWARD_TSN IsUnordered unordered; - MID message_id; + MID mid; bool operator==(const SkippedStream& other) const { return stream_id == other.stream_id && ssn == other.ssn && - unordered == other.unordered && message_id == other.message_id; + unordered == other.unordered && mid == other.mid; } }; @@ -53,7 +50,7 @@ class AnyForwardTsnChunk : public Chunk { TSN new_cumulative_tsn() const { return new_cumulative_tsn_; } - rtc::ArrayView skipped_streams() const { + webrtc::ArrayView skipped_streams() const { return skipped_streams_; } diff --git a/net/dcsctp/packet/chunk/heartbeat_ack_chunk.cc b/net/dcsctp/packet/chunk/heartbeat_ack_chunk.cc index 3cbcd09c75..68855bc52a 100644 --- a/net/dcsctp/packet/chunk/heartbeat_ack_chunk.cc +++ b/net/dcsctp/packet/chunk/heartbeat_ack_chunk.cc @@ -11,10 +11,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -36,23 +36,23 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int HeartbeatAckChunk::kType; -absl::optional HeartbeatAckChunk::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional HeartbeatAckChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } - absl::optional parameters = + std::optional parameters = Parameters::Parse(reader->variable_data()); if (!parameters.has_value()) { - return absl::nullopt; + return std::nullopt; } return HeartbeatAckChunk(*std::move(parameters)); } void HeartbeatAckChunk::SerializeTo(std::vector& out) const { - rtc::ArrayView parameters = parameters_.data(); + webrtc::ArrayView parameters = parameters_.data(); BoundedByteWriter writer = AllocateTLV(out, parameters.size()); writer.CopyToVariableData(parameters); } diff --git a/net/dcsctp/packet/chunk/heartbeat_ack_chunk.h b/net/dcsctp/packet/chunk/heartbeat_ack_chunk.h index a6479f78b0..ed7b77f8bc 100644 --- a/net/dcsctp/packet/chunk/heartbeat_ack_chunk.h +++ b/net/dcsctp/packet/chunk/heartbeat_ack_chunk.h @@ -43,15 +43,15 @@ class HeartbeatAckChunk : public Chunk, HeartbeatAckChunk(HeartbeatAckChunk&& other) = default; HeartbeatAckChunk& operator=(HeartbeatAckChunk&& other) = default; - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; const Parameters& parameters() const { return parameters_; } - absl::optional info() const { + std::optional info() const { return parameters_.get(); } diff --git a/net/dcsctp/packet/chunk/heartbeat_request_chunk.cc b/net/dcsctp/packet/chunk/heartbeat_request_chunk.cc index d759d6b16d..dd8f3e8c8b 100644 --- a/net/dcsctp/packet/chunk/heartbeat_request_chunk.cc +++ b/net/dcsctp/packet/chunk/heartbeat_request_chunk.cc @@ -11,10 +11,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -36,23 +36,23 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int HeartbeatRequestChunk::kType; -absl::optional HeartbeatRequestChunk::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional HeartbeatRequestChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } - absl::optional parameters = + std::optional parameters = Parameters::Parse(reader->variable_data()); if (!parameters.has_value()) { - return absl::nullopt; + return std::nullopt; } return HeartbeatRequestChunk(*std::move(parameters)); } void HeartbeatRequestChunk::SerializeTo(std::vector& out) const { - rtc::ArrayView parameters = parameters_.data(); + webrtc::ArrayView parameters = parameters_.data(); BoundedByteWriter writer = AllocateTLV(out, parameters.size()); writer.CopyToVariableData(parameters); } diff --git a/net/dcsctp/packet/chunk/heartbeat_request_chunk.h b/net/dcsctp/packet/chunk/heartbeat_request_chunk.h index fe2ce19504..3b940a410b 100644 --- a/net/dcsctp/packet/chunk/heartbeat_request_chunk.h +++ b/net/dcsctp/packet/chunk/heartbeat_request_chunk.h @@ -42,15 +42,15 @@ class HeartbeatRequestChunk : public Chunk, HeartbeatRequestChunk(HeartbeatRequestChunk&& other) = default; HeartbeatRequestChunk& operator=(HeartbeatRequestChunk&& other) = default; - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; const Parameters& parameters() const { return parameters_; } Parameters extract_parameters() && { return std::move(parameters_); } - absl::optional info() const { + std::optional info() const { return parameters_.get(); } diff --git a/net/dcsctp/packet/chunk/idata_chunk.cc b/net/dcsctp/packet/chunk/idata_chunk.cc index 9f19c7f053..e084dc332f 100644 --- a/net/dcsctp/packet/chunk/idata_chunk.cc +++ b/net/dcsctp/packet/chunk/idata_chunk.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -45,16 +45,16 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int IDataChunk::kType; -absl::optional IDataChunk::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional IDataChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } uint8_t flags = reader->Load8<1>(); TSN tsn(reader->Load32<4>()); StreamID stream_identifier(reader->Load16<8>()); - MID message_id(reader->Load32<12>()); + MID mid(reader->Load32<12>()); uint32_t ppid_or_fsn = reader->Load32<16>(); Options options; @@ -65,7 +65,7 @@ absl::optional IDataChunk::Parse( options.immediate_ack = ImmediateAckFlag((flags & (1 << kFlagsBitImmediateAck)) != 0); - return IDataChunk(tsn, stream_identifier, message_id, + return IDataChunk(tsn, stream_identifier, mid, PPID(options.is_beginning ? ppid_or_fsn : 0), FSN(options.is_beginning ? 0 : ppid_or_fsn), std::vector(reader->variable_data().begin(), @@ -83,13 +83,13 @@ void IDataChunk::SerializeTo(std::vector& out) const { (*options().immediate_ack ? (1 << kFlagsBitImmediateAck) : 0)); writer.Store32<4>(*tsn()); writer.Store16<8>(*stream_id()); - writer.Store32<12>(*message_id()); + writer.Store32<12>(*mid()); writer.Store32<16>(options().is_beginning ? *ppid() : *fsn()); writer.CopyToVariableData(payload()); } std::string IDataChunk::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "I-DATA, type=" << (options().is_unordered ? "unordered" : "ordered") << "::" << (*options().is_beginning && *options().is_end ? "complete" @@ -97,7 +97,7 @@ std::string IDataChunk::ToString() const { : *options().is_end ? "last" : "middle") << ", tsn=" << *tsn() << ", stream_id=" << *stream_id() - << ", message_id=" << *message_id(); + << ", mid=" << *mid(); if (*options().is_beginning) { sb << ", ppid=" << *ppid(); diff --git a/net/dcsctp/packet/chunk/idata_chunk.h b/net/dcsctp/packet/chunk/idata_chunk.h index 8cdf2a1fc4..c2298d02b8 100644 --- a/net/dcsctp/packet/chunk/idata_chunk.h +++ b/net/dcsctp/packet/chunk/idata_chunk.h @@ -42,7 +42,7 @@ class IDataChunk : public AnyDataChunk, public TLVTrait { static constexpr size_t kHeaderSize = IDataChunkConfig::kHeaderSize; IDataChunk(TSN tsn, StreamID stream_id, - MID message_id, + MID mid, PPID ppid, FSN fsn, std::vector payload, @@ -50,7 +50,7 @@ class IDataChunk : public AnyDataChunk, public TLVTrait { : AnyDataChunk(tsn, stream_id, SSN(0), - message_id, + mid, fsn, ppid, std::move(payload), @@ -59,7 +59,7 @@ class IDataChunk : public AnyDataChunk, public TLVTrait { explicit IDataChunk(TSN tsn, Data&& data, bool immediate_ack) : AnyDataChunk(tsn, std::move(data), immediate_ack) {} - static absl::optional Parse(rtc::ArrayView data); + static std::optional Parse(webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/chunk/idata_chunk_test.cc b/net/dcsctp/packet/chunk/idata_chunk_test.cc index fea492d71e..d1cc0d8f90 100644 --- a/net/dcsctp/packet/chunk/idata_chunk_test.cc +++ b/net/dcsctp/packet/chunk/idata_chunk_test.cc @@ -44,7 +44,7 @@ TEST(IDataChunkTest, AtBeginningFromCapture) { ASSERT_HAS_VALUE_AND_ASSIGN(IDataChunk chunk, IDataChunk::Parse(data)); EXPECT_EQ(*chunk.tsn(), 2487901653); EXPECT_EQ(*chunk.stream_id(), 1); - EXPECT_EQ(*chunk.message_id(), 0u); + EXPECT_EQ(*chunk.mid(), 0u); EXPECT_EQ(*chunk.ppid(), 53u); EXPECT_EQ(*chunk.fsn(), 0u); // Not provided (so set to zero) } @@ -62,13 +62,13 @@ TEST(IDataChunkTest, AtBeginningSerializeAndDeserialize) { IDataChunk::Parse(serialized)); EXPECT_EQ(*deserialized.tsn(), 123u); EXPECT_EQ(*deserialized.stream_id(), 456u); - EXPECT_EQ(*deserialized.message_id(), 789u); + EXPECT_EQ(*deserialized.mid(), 789u); EXPECT_EQ(*deserialized.ppid(), 53u); EXPECT_EQ(*deserialized.fsn(), 0u); EXPECT_EQ(deserialized.ToString(), "I-DATA, type=ordered::first, tsn=123, stream_id=456, " - "message_id=789, ppid=53, length=1"); + "mid=789, ppid=53, length=1"); } TEST(IDataChunkTest, InMiddleFromCapture) { @@ -93,7 +93,7 @@ TEST(IDataChunkTest, InMiddleFromCapture) { ASSERT_HAS_VALUE_AND_ASSIGN(IDataChunk chunk, IDataChunk::Parse(data)); EXPECT_EQ(*chunk.tsn(), 2487901706); EXPECT_EQ(*chunk.stream_id(), 3u); - EXPECT_EQ(*chunk.message_id(), 1u); + EXPECT_EQ(*chunk.mid(), 1u); EXPECT_EQ(*chunk.ppid(), 0u); // Not provided (so set to zero) EXPECT_EQ(*chunk.fsn(), 8u); } @@ -109,14 +109,14 @@ TEST(IDataChunkTest, InMiddleSerializeAndDeserialize) { IDataChunk::Parse(serialized)); EXPECT_EQ(*deserialized.tsn(), 123u); EXPECT_EQ(*deserialized.stream_id(), 456u); - EXPECT_EQ(*deserialized.message_id(), 789u); + EXPECT_EQ(*deserialized.mid(), 789u); EXPECT_EQ(*deserialized.ppid(), 0u); EXPECT_EQ(*deserialized.fsn(), 101112u); EXPECT_THAT(deserialized.payload(), ElementsAre(1, 2, 3)); EXPECT_EQ(deserialized.ToString(), "I-DATA, type=ordered::middle, tsn=123, stream_id=456, " - "message_id=789, fsn=101112, length=3"); + "mid=789, fsn=101112, length=3"); } } // namespace diff --git a/net/dcsctp/packet/chunk/iforward_tsn_chunk.cc b/net/dcsctp/packet/chunk/iforward_tsn_chunk.cc index a647a8bf8a..19f5780951 100644 --- a/net/dcsctp/packet/chunk/iforward_tsn_chunk.cc +++ b/net/dcsctp/packet/chunk/iforward_tsn_chunk.cc @@ -12,11 +12,11 @@ #include #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -48,11 +48,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int IForwardTsnChunk::kType; -absl::optional IForwardTsnChunk::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional IForwardTsnChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } TSN new_cumulative_tsn(reader->Load32<4>()); @@ -68,8 +68,8 @@ absl::optional IForwardTsnChunk::Parse( StreamID stream_id(sub_reader.Load16<0>()); IsUnordered unordered(sub_reader.Load8<3>() & 0x01); - MID message_id(sub_reader.Load32<4>()); - skipped_streams.emplace_back(unordered, stream_id, message_id); + MID mid(sub_reader.Load32<4>()); + skipped_streams.emplace_back(unordered, stream_id, mid); offset += kSkippedStreamBufferSize; } RTC_DCHECK(offset == reader->variable_data_size()); @@ -77,7 +77,7 @@ absl::optional IForwardTsnChunk::Parse( } void IForwardTsnChunk::SerializeTo(std::vector& out) const { - rtc::ArrayView skipped = skipped_streams(); + webrtc::ArrayView skipped = skipped_streams(); size_t variable_size = skipped.size() * kSkippedStreamBufferSize; BoundedByteWriter writer = AllocateTLV(out, variable_size); @@ -89,14 +89,14 @@ void IForwardTsnChunk::SerializeTo(std::vector& out) const { sub_writer.Store16<0>(*skipped[i].stream_id); sub_writer.Store8<3>(skipped[i].unordered ? 1 : 0); - sub_writer.Store32<4>(*skipped[i].message_id); + sub_writer.Store32<4>(*skipped[i].mid); offset += kSkippedStreamBufferSize; } RTC_DCHECK(offset == variable_size); } std::string IForwardTsnChunk::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "I-FORWARD-TSN, new_cumulative_tsn=" << *new_cumulative_tsn(); return sb.Release(); } diff --git a/net/dcsctp/packet/chunk/iforward_tsn_chunk.h b/net/dcsctp/packet/chunk/iforward_tsn_chunk.h index 54d23f7a83..43ef648d6e 100644 --- a/net/dcsctp/packet/chunk/iforward_tsn_chunk.h +++ b/net/dcsctp/packet/chunk/iforward_tsn_chunk.h @@ -40,8 +40,8 @@ class IForwardTsnChunk : public AnyForwardTsnChunk, std::vector skipped_streams) : AnyForwardTsnChunk(new_cumulative_tsn, std::move(skipped_streams)) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/chunk/init_ack_chunk.cc b/net/dcsctp/packet/chunk/init_ack_chunk.cc index c7ef9da1f1..5b6a03c3b5 100644 --- a/net/dcsctp/packet/chunk/init_ack_chunk.cc +++ b/net/dcsctp/packet/chunk/init_ack_chunk.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -46,11 +46,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int InitAckChunk::kType; -absl::optional InitAckChunk::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional InitAckChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } VerificationTag initiate_tag(reader->Load32<4>()); @@ -58,17 +58,17 @@ absl::optional InitAckChunk::Parse( uint16_t nbr_outbound_streams = reader->Load16<12>(); uint16_t nbr_inbound_streams = reader->Load16<14>(); TSN initial_tsn(reader->Load32<16>()); - absl::optional parameters = + std::optional parameters = Parameters::Parse(reader->variable_data()); if (!parameters.has_value()) { - return absl::nullopt; + return std::nullopt; } return InitAckChunk(initiate_tag, a_rwnd, nbr_outbound_streams, nbr_inbound_streams, initial_tsn, *std::move(parameters)); } void InitAckChunk::SerializeTo(std::vector& out) const { - rtc::ArrayView parameters = parameters_.data(); + webrtc::ArrayView parameters = parameters_.data(); BoundedByteWriter writer = AllocateTLV(out, parameters.size()); writer.Store32<4>(*initiate_tag_); @@ -80,7 +80,7 @@ void InitAckChunk::SerializeTo(std::vector& out) const { } std::string InitAckChunk::ToString() const { - return rtc::StringFormat("INIT_ACK, initiate_tag=0x%0x, initial_tsn=%u", - *initiate_tag(), *initial_tsn()); + return webrtc::StringFormat("INIT_ACK, initiate_tag=0x%0x, initial_tsn=%u", + *initiate_tag(), *initial_tsn()); } } // namespace dcsctp diff --git a/net/dcsctp/packet/chunk/init_ack_chunk.h b/net/dcsctp/packet/chunk/init_ack_chunk.h index 6fcf64b2eb..0b11007a98 100644 --- a/net/dcsctp/packet/chunk/init_ack_chunk.h +++ b/net/dcsctp/packet/chunk/init_ack_chunk.h @@ -51,7 +51,8 @@ class InitAckChunk : public Chunk, public TLVTrait { InitAckChunk(InitAckChunk&& other) = default; InitAckChunk& operator=(InitAckChunk&& other) = default; - static absl::optional Parse(rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/chunk/init_chunk.cc b/net/dcsctp/packet/chunk/init_chunk.cc index 8030107072..c9ca4ae711 100644 --- a/net/dcsctp/packet/chunk/init_chunk.cc +++ b/net/dcsctp/packet/chunk/init_chunk.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -46,10 +46,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int InitChunk::kType; -absl::optional InitChunk::Parse(rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional InitChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } VerificationTag initiate_tag(reader->Load32<4>()); @@ -58,17 +59,17 @@ absl::optional InitChunk::Parse(rtc::ArrayView data) { uint16_t nbr_inbound_streams = reader->Load16<14>(); TSN initial_tsn(reader->Load32<16>()); - absl::optional parameters = + std::optional parameters = Parameters::Parse(reader->variable_data()); if (!parameters.has_value()) { - return absl::nullopt; + return std::nullopt; } return InitChunk(initiate_tag, a_rwnd, nbr_outbound_streams, nbr_inbound_streams, initial_tsn, *std::move(parameters)); } void InitChunk::SerializeTo(std::vector& out) const { - rtc::ArrayView parameters = parameters_.data(); + webrtc::ArrayView parameters = parameters_.data(); BoundedByteWriter writer = AllocateTLV(out, parameters.size()); writer.Store32<4>(*initiate_tag_); @@ -81,8 +82,8 @@ void InitChunk::SerializeTo(std::vector& out) const { } std::string InitChunk::ToString() const { - return rtc::StringFormat("INIT, initiate_tag=0x%0x, initial_tsn=%u", - *initiate_tag(), *initial_tsn()); + return webrtc::StringFormat("INIT, initiate_tag=0x%0x, initial_tsn=%u", + *initiate_tag(), *initial_tsn()); } } // namespace dcsctp diff --git a/net/dcsctp/packet/chunk/init_chunk.h b/net/dcsctp/packet/chunk/init_chunk.h index 38f9994caa..fba4504102 100644 --- a/net/dcsctp/packet/chunk/init_chunk.h +++ b/net/dcsctp/packet/chunk/init_chunk.h @@ -51,7 +51,7 @@ class InitChunk : public Chunk, public TLVTrait { InitChunk(InitChunk&& other) = default; InitChunk& operator=(InitChunk&& other) = default; - static absl::optional Parse(rtc::ArrayView data); + static std::optional Parse(webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/chunk/reconfig_chunk.cc b/net/dcsctp/packet/chunk/reconfig_chunk.cc index f39f3b619f..6cbbd17ef0 100644 --- a/net/dcsctp/packet/chunk/reconfig_chunk.cc +++ b/net/dcsctp/packet/chunk/reconfig_chunk.cc @@ -11,10 +11,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -40,24 +40,24 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int ReConfigChunk::kType; -absl::optional ReConfigChunk::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional ReConfigChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } - absl::optional parameters = + std::optional parameters = Parameters::Parse(reader->variable_data()); if (!parameters.has_value()) { - return absl::nullopt; + return std::nullopt; } return ReConfigChunk(*std::move(parameters)); } void ReConfigChunk::SerializeTo(std::vector& out) const { - rtc::ArrayView parameters = parameters_.data(); + webrtc::ArrayView parameters = parameters_.data(); BoundedByteWriter writer = AllocateTLV(out, parameters.size()); writer.CopyToVariableData(parameters); } diff --git a/net/dcsctp/packet/chunk/reconfig_chunk.h b/net/dcsctp/packet/chunk/reconfig_chunk.h index 9d2539a515..d9c85da6b7 100644 --- a/net/dcsctp/packet/chunk/reconfig_chunk.h +++ b/net/dcsctp/packet/chunk/reconfig_chunk.h @@ -38,8 +38,8 @@ class ReConfigChunk : public Chunk, public TLVTrait { explicit ReConfigChunk(Parameters parameters) : parameters_(std::move(parameters)) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/chunk/sack_chunk.cc b/net/dcsctp/packet/chunk/sack_chunk.cc index d80e430082..108c5cab0a 100644 --- a/net/dcsctp/packet/chunk/sack_chunk.cc +++ b/net/dcsctp/packet/chunk/sack_chunk.cc @@ -12,17 +12,17 @@ #include #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" -#include "net/dcsctp/common/str_join.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" #include "net/dcsctp/packet/tlv_trait.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/str_join.h" #include "rtc_base/strings/string_builder.h" namespace dcsctp { @@ -58,10 +58,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int SackChunk::kType; -absl::optional SackChunk::Parse(rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional SackChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } TSN tsn_ack(reader->Load32<4>()); @@ -72,7 +73,7 @@ absl::optional SackChunk::Parse(rtc::ArrayView data) { if (reader->variable_data_size() != nbr_of_gap_blocks * kGapAckBlockSize + nbr_of_dup_tsns * kDupTsnBlockSize) { RTC_DLOG(LS_WARNING) << "Invalid number of gap blocks or duplicate TSNs"; - return absl::nullopt; + return std::nullopt; } std::vector gap_ack_blocks; @@ -135,7 +136,7 @@ void SackChunk::SerializeTo(std::vector& out) const { } std::string SackChunk::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "SACK, cum_ack_tsn=" << *cumulative_tsn_ack() << ", a_rwnd=" << a_rwnd(); for (const GapAckBlock& gap : gap_ack_blocks_) { @@ -145,8 +146,9 @@ std::string SackChunk::ToString() const { } if (!duplicate_tsns_.empty()) { sb << ", dup_tsns=" - << StrJoin(duplicate_tsns(), ",", - [](rtc::StringBuilder& sb, TSN tsn) { sb << *tsn; }); + << webrtc::StrJoin( + duplicate_tsns(), ",", + [](webrtc::StringBuilder& sb, TSN tsn) { sb << *tsn; }); } return sb.Release(); diff --git a/net/dcsctp/packet/chunk/sack_chunk.h b/net/dcsctp/packet/chunk/sack_chunk.h index e6758fa332..59347f4dc4 100644 --- a/net/dcsctp/packet/chunk/sack_chunk.h +++ b/net/dcsctp/packet/chunk/sack_chunk.h @@ -54,14 +54,14 @@ class SackChunk : public Chunk, public TLVTrait { a_rwnd_(a_rwnd), gap_ack_blocks_(std::move(gap_ack_blocks)), duplicate_tsns_(std::move(duplicate_tsns)) {} - static absl::optional Parse(rtc::ArrayView data); + static std::optional Parse(webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; TSN cumulative_tsn_ack() const { return cumulative_tsn_ack_; } uint32_t a_rwnd() const { return a_rwnd_; } - rtc::ArrayView gap_ack_blocks() const { + webrtc::ArrayView gap_ack_blocks() const { return gap_ack_blocks_; } const std::set& duplicate_tsns() const { return duplicate_tsns_; } diff --git a/net/dcsctp/packet/chunk/shutdown_ack_chunk.cc b/net/dcsctp/packet/chunk/shutdown_ack_chunk.cc index d42aceead4..31ba417f70 100644 --- a/net/dcsctp/packet/chunk/shutdown_ack_chunk.cc +++ b/net/dcsctp/packet/chunk/shutdown_ack_chunk.cc @@ -11,9 +11,9 @@ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" namespace dcsctp { @@ -27,10 +27,10 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int ShutdownAckChunk::kType; -absl::optional ShutdownAckChunk::Parse( - rtc::ArrayView data) { +std::optional ShutdownAckChunk::Parse( + webrtc::ArrayView data) { if (!ParseTLV(data).has_value()) { - return absl::nullopt; + return std::nullopt; } return ShutdownAckChunk(); } diff --git a/net/dcsctp/packet/chunk/shutdown_ack_chunk.h b/net/dcsctp/packet/chunk/shutdown_ack_chunk.h index 29c1a98be6..e7ae03ff66 100644 --- a/net/dcsctp/packet/chunk/shutdown_ack_chunk.h +++ b/net/dcsctp/packet/chunk/shutdown_ack_chunk.h @@ -35,8 +35,8 @@ class ShutdownAckChunk : public Chunk, public TLVTrait { ShutdownAckChunk() {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/chunk/shutdown_chunk.cc b/net/dcsctp/packet/chunk/shutdown_chunk.cc index 59f806f7f7..8038458724 100644 --- a/net/dcsctp/packet/chunk/shutdown_chunk.cc +++ b/net/dcsctp/packet/chunk/shutdown_chunk.cc @@ -11,10 +11,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -33,11 +33,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int ShutdownChunk::kType; -absl::optional ShutdownChunk::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional ShutdownChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } TSN cumulative_tsn_ack(reader->Load32<4>()); diff --git a/net/dcsctp/packet/chunk/shutdown_chunk.h b/net/dcsctp/packet/chunk/shutdown_chunk.h index 8148cca286..bdb412e03d 100644 --- a/net/dcsctp/packet/chunk/shutdown_chunk.h +++ b/net/dcsctp/packet/chunk/shutdown_chunk.h @@ -36,8 +36,8 @@ class ShutdownChunk : public Chunk, public TLVTrait { explicit ShutdownChunk(TSN cumulative_tsn_ack) : cumulative_tsn_ack_(cumulative_tsn_ack) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/chunk/shutdown_complete_chunk.cc b/net/dcsctp/packet/chunk/shutdown_complete_chunk.cc index 3f54857437..0733f2164b 100644 --- a/net/dcsctp/packet/chunk/shutdown_complete_chunk.cc +++ b/net/dcsctp/packet/chunk/shutdown_complete_chunk.cc @@ -11,10 +11,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -31,11 +31,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int ShutdownCompleteChunk::kType; -absl::optional ShutdownCompleteChunk::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional ShutdownCompleteChunk::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } uint8_t flags = reader->Load8<1>(); bool tag_reflected = (flags & (1 << kFlagsBitT)) != 0; diff --git a/net/dcsctp/packet/chunk/shutdown_complete_chunk.h b/net/dcsctp/packet/chunk/shutdown_complete_chunk.h index 46d28e88dc..73a6d92645 100644 --- a/net/dcsctp/packet/chunk/shutdown_complete_chunk.h +++ b/net/dcsctp/packet/chunk/shutdown_complete_chunk.h @@ -37,8 +37,8 @@ class ShutdownCompleteChunk : public Chunk, explicit ShutdownCompleteChunk(bool tag_reflected) : tag_reflected_(tag_reflected) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/crc32c.cc b/net/dcsctp/packet/crc32c.cc index e3f0dc1d19..adcb4aff17 100644 --- a/net/dcsctp/packet/crc32c.cc +++ b/net/dcsctp/packet/crc32c.cc @@ -15,7 +15,7 @@ namespace dcsctp { -uint32_t GenerateCrc32C(rtc::ArrayView data) { +uint32_t GenerateCrc32C(webrtc::ArrayView data) { uint32_t crc32c = crc32c_value(data.data(), data.size()); // Byte swapping for little endian byte order: diff --git a/net/dcsctp/packet/crc32c.h b/net/dcsctp/packet/crc32c.h index a969e1b26b..71ef16ceeb 100644 --- a/net/dcsctp/packet/crc32c.h +++ b/net/dcsctp/packet/crc32c.h @@ -17,7 +17,7 @@ namespace dcsctp { // Generates the CRC32C checksum of `data`. -uint32_t GenerateCrc32C(rtc::ArrayView data); +uint32_t GenerateCrc32C(webrtc::ArrayView data); } // namespace dcsctp diff --git a/net/dcsctp/packet/data.h b/net/dcsctp/packet/data.h index c1754ed59a..08174dc052 100644 --- a/net/dcsctp/packet/data.h +++ b/net/dcsctp/packet/data.h @@ -42,7 +42,7 @@ struct Data { Data(StreamID stream_id, SSN ssn, - MID message_id, + MID mid, FSN fsn, PPID ppid, std::vector payload, @@ -51,7 +51,7 @@ struct Data { IsUnordered is_unordered) : stream_id(stream_id), ssn(ssn), - message_id(message_id), + mid(mid), fsn(fsn), ppid(ppid), payload(std::move(payload)), @@ -65,8 +65,8 @@ struct Data { // Creates a copy of this `Data` object. Data Clone() const { - return Data(stream_id, ssn, message_id, fsn, ppid, payload, is_beginning, - is_end, is_unordered); + return Data(stream_id, ssn, mid, fsn, ppid, payload, is_beginning, is_end, + is_unordered); } // The size of this data, which translates to the size of its payload. @@ -82,7 +82,7 @@ struct Data { // Message Identifier (MID) per stream and ordered/unordered. Defined by // RFC8260, and used together with options.is_unordered and stream_id to // uniquely identify a message. Used only in I-DATA chunks (not DATA). - MID message_id; + MID mid; // Fragment Sequence Number (FSN) per stream and ordered/unordered, as above. FSN fsn; diff --git a/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.cc b/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.cc index ef67c2a49f..61dc953f11 100644 --- a/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.cc +++ b/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.cc @@ -11,9 +11,9 @@ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" namespace dcsctp { @@ -25,11 +25,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int CookieReceivedWhileShuttingDownCause::kType; -absl::optional +std::optional CookieReceivedWhileShuttingDownCause::Parse( - rtc::ArrayView data) { + webrtc::ArrayView data) { if (!ParseTLV(data).has_value()) { - return absl::nullopt; + return std::nullopt; } return CookieReceivedWhileShuttingDownCause(); } diff --git a/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.h b/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.h index 362f181fba..cf5c2f3392 100644 --- a/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.h +++ b/net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.h @@ -38,8 +38,8 @@ class CookieReceivedWhileShuttingDownCause CookieReceivedWhileShuttingDownCause() {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/error_cause/error_cause.cc b/net/dcsctp/packet/error_cause/error_cause.cc index dcd07472ed..291cc51776 100644 --- a/net/dcsctp/packet/error_cause/error_cause.cc +++ b/net/dcsctp/packet/error_cause/error_cause.cc @@ -13,11 +13,11 @@ #include #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/common/math.h" #include "net/dcsctp/packet/error_cause/cookie_received_while_shutting_down_cause.h" @@ -38,9 +38,9 @@ namespace dcsctp { template -bool ParseAndPrint(ParameterDescriptor descriptor, rtc::StringBuilder& sb) { +bool ParseAndPrint(ParameterDescriptor descriptor, webrtc::StringBuilder& sb) { if (descriptor.type == ErrorCause::kType) { - absl::optional p = ErrorCause::Parse(descriptor.data); + std::optional p = ErrorCause::Parse(descriptor.data); if (p.has_value()) { sb << p->ToString(); } else { @@ -52,7 +52,7 @@ bool ParseAndPrint(ParameterDescriptor descriptor, rtc::StringBuilder& sb) { } std::string ErrorCausesToString(const Parameters& parameters) { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; std::vector descriptors = parameters.descriptors(); for (size_t i = 0; i < descriptors.size(); ++i) { diff --git a/net/dcsctp/packet/error_cause/error_cause.h b/net/dcsctp/packet/error_cause/error_cause.h index fa2bf81478..a4247cbff6 100644 --- a/net/dcsctp/packet/error_cause/error_cause.h +++ b/net/dcsctp/packet/error_cause/error_cause.h @@ -15,6 +15,7 @@ #include #include #include +#include #include #include #include @@ -22,7 +23,6 @@ #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/parameter/parameter.h" #include "net/dcsctp/packet/tlv_trait.h" diff --git a/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.cc b/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.cc index 0187544226..8ddd4fb495 100644 --- a/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.cc +++ b/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.cc @@ -11,9 +11,9 @@ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" namespace dcsctp { @@ -25,10 +25,10 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int InvalidMandatoryParameterCause::kType; -absl::optional -InvalidMandatoryParameterCause::Parse(rtc::ArrayView data) { +std::optional +InvalidMandatoryParameterCause::Parse(webrtc::ArrayView data) { if (!ParseTLV(data).has_value()) { - return absl::nullopt; + return std::nullopt; } return InvalidMandatoryParameterCause(); } diff --git a/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.h b/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.h index e192b5a42f..cdb2a06370 100644 --- a/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.h +++ b/net/dcsctp/packet/error_cause/invalid_mandatory_parameter_cause.h @@ -37,8 +37,8 @@ class InvalidMandatoryParameterCause InvalidMandatoryParameterCause() {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.cc b/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.cc index b2ddd6f4ef..225ed71e38 100644 --- a/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.cc +++ b/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -33,11 +33,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int InvalidStreamIdentifierCause::kType; -absl::optional -InvalidStreamIdentifierCause::Parse(rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional InvalidStreamIdentifierCause::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } StreamID stream_id(reader->Load16<4>()); @@ -52,7 +52,7 @@ void InvalidStreamIdentifierCause::SerializeTo( } std::string InvalidStreamIdentifierCause::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "Invalid Stream Identifier, stream_id=" << *stream_id_; return sb.Release(); } diff --git a/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.h b/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.h index b7dfe177b8..795f138de5 100644 --- a/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.h +++ b/net/dcsctp/packet/error_cause/invalid_stream_identifier_cause.h @@ -39,8 +39,8 @@ class InvalidStreamIdentifierCause explicit InvalidStreamIdentifierCause(StreamID stream_id) : stream_id_(stream_id) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc b/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc index b89f86e43e..378d83d5b2 100644 --- a/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc +++ b/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.cc @@ -12,17 +12,17 @@ #include #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" -#include "net/dcsctp/common/str_join.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" #include "net/dcsctp/packet/tlv_trait.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/str_join.h" #include "rtc_base/strings/string_builder.h" namespace dcsctp { @@ -40,17 +40,17 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int MissingMandatoryParameterCause::kType; -absl::optional -MissingMandatoryParameterCause::Parse(rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional +MissingMandatoryParameterCause::Parse(webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } uint32_t count = reader->Load32<4>(); if (reader->variable_data_size() / kMissingParameterSize != count) { RTC_DLOG(LS_WARNING) << "Invalid number of missing parameters"; - return absl::nullopt; + return std::nullopt; } std::vector missing_parameter_types; @@ -81,9 +81,9 @@ void MissingMandatoryParameterCause::SerializeTo( } std::string MissingMandatoryParameterCause::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "Missing Mandatory Parameter, missing_parameter_types=" - << StrJoin(missing_parameter_types_, ","); + << webrtc::StrJoin(missing_parameter_types_, ","); return sb.Release(); } diff --git a/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.h b/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.h index 4435424295..a777c5f189 100644 --- a/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.h +++ b/net/dcsctp/packet/error_cause/missing_mandatory_parameter_cause.h @@ -36,17 +36,17 @@ class MissingMandatoryParameterCause static constexpr int kType = MissingMandatoryParameterCauseConfig::kType; explicit MissingMandatoryParameterCause( - rtc::ArrayView missing_parameter_types) + webrtc::ArrayView missing_parameter_types) : missing_parameter_types_(missing_parameter_types.begin(), missing_parameter_types.end()) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; - rtc::ArrayView missing_parameter_types() const { + webrtc::ArrayView missing_parameter_types() const { return missing_parameter_types_; } diff --git a/net/dcsctp/packet/error_cause/no_user_data_cause.cc b/net/dcsctp/packet/error_cause/no_user_data_cause.cc index 2853915b0c..45cef7e9ec 100644 --- a/net/dcsctp/packet/error_cause/no_user_data_cause.cc +++ b/net/dcsctp/packet/error_cause/no_user_data_cause.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -34,11 +34,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int NoUserDataCause::kType; -absl::optional NoUserDataCause::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional NoUserDataCause::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } TSN tsn(reader->Load32<4>()); return NoUserDataCause(tsn); @@ -50,7 +50,7 @@ void NoUserDataCause::SerializeTo(std::vector& out) const { } std::string NoUserDataCause::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "No User Data, tsn=" << *tsn_; return sb.Release(); } diff --git a/net/dcsctp/packet/error_cause/no_user_data_cause.h b/net/dcsctp/packet/error_cause/no_user_data_cause.h index 1087dcc97c..a462a698cf 100644 --- a/net/dcsctp/packet/error_cause/no_user_data_cause.h +++ b/net/dcsctp/packet/error_cause/no_user_data_cause.h @@ -36,8 +36,8 @@ class NoUserDataCause : public Parameter, explicit NoUserDataCause(TSN tsn) : tsn_(tsn) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/error_cause/out_of_resource_error_cause.cc b/net/dcsctp/packet/error_cause/out_of_resource_error_cause.cc index e5c7c0e787..ffa3c16967 100644 --- a/net/dcsctp/packet/error_cause/out_of_resource_error_cause.cc +++ b/net/dcsctp/packet/error_cause/out_of_resource_error_cause.cc @@ -11,9 +11,9 @@ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" namespace dcsctp { @@ -25,10 +25,10 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int OutOfResourceErrorCause::kType; -absl::optional OutOfResourceErrorCause::Parse( - rtc::ArrayView data) { +std::optional OutOfResourceErrorCause::Parse( + webrtc::ArrayView data) { if (!ParseTLV(data).has_value()) { - return absl::nullopt; + return std::nullopt; } return OutOfResourceErrorCause(); } diff --git a/net/dcsctp/packet/error_cause/out_of_resource_error_cause.h b/net/dcsctp/packet/error_cause/out_of_resource_error_cause.h index fc798ca4ac..b8b511ef2a 100644 --- a/net/dcsctp/packet/error_cause/out_of_resource_error_cause.h +++ b/net/dcsctp/packet/error_cause/out_of_resource_error_cause.h @@ -36,8 +36,8 @@ class OutOfResourceErrorCause : public Parameter, OutOfResourceErrorCause() {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/error_cause/protocol_violation_cause.cc b/net/dcsctp/packet/error_cause/protocol_violation_cause.cc index 1b8d423afb..d7bc582050 100644 --- a/net/dcsctp/packet/error_cause/protocol_violation_cause.cc +++ b/net/dcsctp/packet/error_cause/protocol_violation_cause.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -36,11 +36,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int ProtocolViolationCause::kType; -absl::optional ProtocolViolationCause::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional ProtocolViolationCause::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } return ProtocolViolationCause( std::string(reinterpret_cast(reader->variable_data().data()), @@ -50,13 +50,13 @@ absl::optional ProtocolViolationCause::Parse( void ProtocolViolationCause::SerializeTo(std::vector& out) const { BoundedByteWriter writer = AllocateTLV(out, additional_information_.size()); - writer.CopyToVariableData(rtc::MakeArrayView( + writer.CopyToVariableData(webrtc::MakeArrayView( reinterpret_cast(additional_information_.data()), additional_information_.size())); } std::string ProtocolViolationCause::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "Protocol Violation, additional_information=" << additional_information_; return sb.Release(); diff --git a/net/dcsctp/packet/error_cause/protocol_violation_cause.h b/net/dcsctp/packet/error_cause/protocol_violation_cause.h index 3081e1f28c..bf7f1fb87e 100644 --- a/net/dcsctp/packet/error_cause/protocol_violation_cause.h +++ b/net/dcsctp/packet/error_cause/protocol_violation_cause.h @@ -37,8 +37,8 @@ class ProtocolViolationCause : public Parameter, explicit ProtocolViolationCause(absl::string_view additional_information) : additional_information_(additional_information) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.cc b/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.cc index abe5de6211..fd3dfa88b7 100644 --- a/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.cc +++ b/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.cc @@ -11,10 +11,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -34,12 +34,12 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int RestartOfAnAssociationWithNewAddressesCause::kType; -absl::optional +std::optional RestartOfAnAssociationWithNewAddressesCause::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } return RestartOfAnAssociationWithNewAddressesCause(reader->variable_data()); } diff --git a/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.h b/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.h index a1cccdc8a1..d7729e28f5 100644 --- a/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.h +++ b/net/dcsctp/packet/error_cause/restart_of_an_association_with_new_address_cause.h @@ -38,16 +38,16 @@ class RestartOfAnAssociationWithNewAddressesCause RestartOfAnAssociationWithNewAddressesCauseConfig::kType; explicit RestartOfAnAssociationWithNewAddressesCause( - rtc::ArrayView new_address_tlvs) + webrtc::ArrayView new_address_tlvs) : new_address_tlvs_(new_address_tlvs.begin(), new_address_tlvs.end()) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; - rtc::ArrayView new_address_tlvs() const { + webrtc::ArrayView new_address_tlvs() const { return new_address_tlvs_; } diff --git a/net/dcsctp/packet/error_cause/stale_cookie_error_cause.cc b/net/dcsctp/packet/error_cause/stale_cookie_error_cause.cc index d77d8488f1..122c28f4a9 100644 --- a/net/dcsctp/packet/error_cause/stale_cookie_error_cause.cc +++ b/net/dcsctp/packet/error_cause/stale_cookie_error_cause.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -33,11 +33,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int StaleCookieErrorCause::kType; -absl::optional StaleCookieErrorCause::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional StaleCookieErrorCause::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } uint32_t staleness_us = reader->Load32<4>(); return StaleCookieErrorCause(staleness_us); @@ -49,7 +49,7 @@ void StaleCookieErrorCause::SerializeTo(std::vector& out) const { } std::string StaleCookieErrorCause::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "Stale Cookie Error, staleness_us=" << staleness_us_; return sb.Release(); } diff --git a/net/dcsctp/packet/error_cause/stale_cookie_error_cause.h b/net/dcsctp/packet/error_cause/stale_cookie_error_cause.h index d8b7b5b5bd..d44f3db976 100644 --- a/net/dcsctp/packet/error_cause/stale_cookie_error_cause.h +++ b/net/dcsctp/packet/error_cause/stale_cookie_error_cause.h @@ -37,8 +37,8 @@ class StaleCookieErrorCause : public Parameter, explicit StaleCookieErrorCause(uint32_t staleness_us) : staleness_us_(staleness_us) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.cc b/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.cc index 04b960d992..4745142daf 100644 --- a/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.cc +++ b/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.cc @@ -10,11 +10,11 @@ #include "net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.h" #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -33,11 +33,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int UnrecognizedChunkTypeCause::kType; -absl::optional UnrecognizedChunkTypeCause::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional UnrecognizedChunkTypeCause::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } std::vector unrecognized_chunk(reader->variable_data().begin(), reader->variable_data().end()); @@ -51,7 +51,7 @@ void UnrecognizedChunkTypeCause::SerializeTo(std::vector& out) const { } std::string UnrecognizedChunkTypeCause::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "Unrecognized Chunk Type, chunk_type="; if (!unrecognized_chunk_.empty()) { sb << static_cast(unrecognized_chunk_[0]); diff --git a/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.h b/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.h index 26d3d3b8f9..2467c56d4f 100644 --- a/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.h +++ b/net/dcsctp/packet/error_cause/unrecognized_chunk_type_cause.h @@ -40,13 +40,13 @@ class UnrecognizedChunkTypeCause explicit UnrecognizedChunkTypeCause(std::vector unrecognized_chunk) : unrecognized_chunk_(std::move(unrecognized_chunk)) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; - rtc::ArrayView unrecognized_chunk() const { + webrtc::ArrayView unrecognized_chunk() const { return unrecognized_chunk_; } diff --git a/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.cc b/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.cc index 80001a9eae..708111ed4c 100644 --- a/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.cc +++ b/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.cc @@ -11,10 +11,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -32,11 +32,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int UnrecognizedParametersCause::kType; -absl::optional UnrecognizedParametersCause::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional UnrecognizedParametersCause::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } return UnrecognizedParametersCause(reader->variable_data()); } diff --git a/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.h b/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.h index ebec5ed4c3..8c0df7f618 100644 --- a/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.h +++ b/net/dcsctp/packet/error_cause/unrecognized_parameter_cause.h @@ -37,17 +37,17 @@ class UnrecognizedParametersCause static constexpr int kType = UnrecognizedParametersCauseConfig::kType; explicit UnrecognizedParametersCause( - rtc::ArrayView unrecognized_parameters) + webrtc::ArrayView unrecognized_parameters) : unrecognized_parameters_(unrecognized_parameters.begin(), unrecognized_parameters.end()) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; - rtc::ArrayView unrecognized_parameters() const { + webrtc::ArrayView unrecognized_parameters() const { return unrecognized_parameters_; } diff --git a/net/dcsctp/packet/error_cause/unresolvable_address_cause.cc b/net/dcsctp/packet/error_cause/unresolvable_address_cause.cc index 8108d31aa7..b7b9ffbec7 100644 --- a/net/dcsctp/packet/error_cause/unresolvable_address_cause.cc +++ b/net/dcsctp/packet/error_cause/unresolvable_address_cause.cc @@ -11,10 +11,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -32,11 +32,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int UnresolvableAddressCause::kType; -absl::optional UnresolvableAddressCause::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional UnresolvableAddressCause::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } return UnresolvableAddressCause(reader->variable_data()); } diff --git a/net/dcsctp/packet/error_cause/unresolvable_address_cause.h b/net/dcsctp/packet/error_cause/unresolvable_address_cause.h index c63b3779ef..e42d3f2060 100644 --- a/net/dcsctp/packet/error_cause/unresolvable_address_cause.h +++ b/net/dcsctp/packet/error_cause/unresolvable_address_cause.h @@ -37,17 +37,17 @@ class UnresolvableAddressCause static constexpr int kType = UnresolvableAddressCauseConfig::kType; explicit UnresolvableAddressCause( - rtc::ArrayView unresolvable_address) + webrtc::ArrayView unresolvable_address) : unresolvable_address_(unresolvable_address.begin(), unresolvable_address.end()) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; - rtc::ArrayView unresolvable_address() const { + webrtc::ArrayView unresolvable_address() const { return unresolvable_address_; } diff --git a/net/dcsctp/packet/error_cause/user_initiated_abort_cause.cc b/net/dcsctp/packet/error_cause/user_initiated_abort_cause.cc index da99aacbfa..de86983fcb 100644 --- a/net/dcsctp/packet/error_cause/user_initiated_abort_cause.cc +++ b/net/dcsctp/packet/error_cause/user_initiated_abort_cause.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -36,11 +36,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int UserInitiatedAbortCause::kType; -absl::optional UserInitiatedAbortCause::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional UserInitiatedAbortCause::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } if (reader->variable_data().empty()) { return UserInitiatedAbortCause(""); @@ -53,13 +53,13 @@ absl::optional UserInitiatedAbortCause::Parse( void UserInitiatedAbortCause::SerializeTo(std::vector& out) const { BoundedByteWriter writer = AllocateTLV(out, upper_layer_abort_reason_.size()); - writer.CopyToVariableData(rtc::MakeArrayView( + writer.CopyToVariableData(webrtc::MakeArrayView( reinterpret_cast(upper_layer_abort_reason_.data()), upper_layer_abort_reason_.size())); } std::string UserInitiatedAbortCause::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "User-Initiated Abort, reason=" << upper_layer_abort_reason_; return sb.Release(); } diff --git a/net/dcsctp/packet/error_cause/user_initiated_abort_cause.h b/net/dcsctp/packet/error_cause/user_initiated_abort_cause.h index 9eb16657b4..4dc452252a 100644 --- a/net/dcsctp/packet/error_cause/user_initiated_abort_cause.h +++ b/net/dcsctp/packet/error_cause/user_initiated_abort_cause.h @@ -37,8 +37,8 @@ class UserInitiatedAbortCause : public Parameter, explicit UserInitiatedAbortCause(absl::string_view upper_layer_abort_reason) : upper_layer_abort_reason_(upper_layer_abort_reason) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.cc b/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.cc index c33e3e11f6..55b189bf2c 100644 --- a/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.cc +++ b/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/packet/bounded_byte_reader.h" @@ -38,11 +38,12 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int AddIncomingStreamsRequestParameter::kType; -absl::optional -AddIncomingStreamsRequestParameter::Parse(rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional +AddIncomingStreamsRequestParameter::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } ReconfigRequestSN request_sequence_number(reader->Load32<4>()); uint16_t nbr_of_new_streams = reader->Load16<8>(); @@ -59,7 +60,7 @@ void AddIncomingStreamsRequestParameter::SerializeTo( } std::string AddIncomingStreamsRequestParameter::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "Add Incoming Streams Request, req_seq_nbr=" << *request_sequence_number(); return sb.Release(); diff --git a/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.h b/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.h index 3859eb3f7e..2eebd51d5d 100644 --- a/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.h +++ b/net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.h @@ -42,8 +42,8 @@ class AddIncomingStreamsRequestParameter : request_sequence_number_(request_sequence_number), nbr_of_new_streams_(nbr_of_new_streams) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.cc b/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.cc index 4787ee9718..3f9e9c377f 100644 --- a/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.cc +++ b/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -37,11 +37,12 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int AddOutgoingStreamsRequestParameter::kType; -absl::optional -AddOutgoingStreamsRequestParameter::Parse(rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional +AddOutgoingStreamsRequestParameter::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } ReconfigRequestSN request_sequence_number(reader->Load32<4>()); uint16_t nbr_of_new_streams = reader->Load16<8>(); @@ -58,7 +59,7 @@ void AddOutgoingStreamsRequestParameter::SerializeTo( } std::string AddOutgoingStreamsRequestParameter::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "Add Outgoing Streams Request, req_seq_nbr=" << *request_sequence_number(); return sb.Release(); diff --git a/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.h b/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.h index 01e8f91cfa..e7f9fe6354 100644 --- a/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.h +++ b/net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.h @@ -42,8 +42,8 @@ class AddOutgoingStreamsRequestParameter : request_sequence_number_(request_sequence_number), nbr_of_new_streams_(nbr_of_new_streams) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.cc b/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.cc index 7dd8e1923f..fed399ff0b 100644 --- a/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.cc +++ b/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.cc @@ -11,9 +11,9 @@ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" namespace dcsctp { @@ -26,10 +26,10 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int ForwardTsnSupportedParameter::kType; -absl::optional -ForwardTsnSupportedParameter::Parse(rtc::ArrayView data) { +std::optional ForwardTsnSupportedParameter::Parse( + webrtc::ArrayView data) { if (!ParseTLV(data).has_value()) { - return absl::nullopt; + return std::nullopt; } return ForwardTsnSupportedParameter(); } diff --git a/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.h b/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.h index d4cff4ac21..649b2dee05 100644 --- a/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.h +++ b/net/dcsctp/packet/parameter/forward_tsn_supported_parameter.h @@ -37,8 +37,8 @@ class ForwardTsnSupportedParameter ForwardTsnSupportedParameter() {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/parameter/heartbeat_info_parameter.cc b/net/dcsctp/packet/parameter/heartbeat_info_parameter.cc index 918976d305..93d283d545 100644 --- a/net/dcsctp/packet/parameter/heartbeat_info_parameter.cc +++ b/net/dcsctp/packet/parameter/heartbeat_info_parameter.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -37,11 +37,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int HeartbeatInfoParameter::kType; -absl::optional HeartbeatInfoParameter::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional HeartbeatInfoParameter::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } return HeartbeatInfoParameter(reader->variable_data()); } @@ -52,7 +52,7 @@ void HeartbeatInfoParameter::SerializeTo(std::vector& out) const { } std::string HeartbeatInfoParameter::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "Heartbeat Info parameter (info_length=" << info_.size() << ")"; return sb.Release(); } diff --git a/net/dcsctp/packet/parameter/heartbeat_info_parameter.h b/net/dcsctp/packet/parameter/heartbeat_info_parameter.h index ec503a94b2..b07da8eeac 100644 --- a/net/dcsctp/packet/parameter/heartbeat_info_parameter.h +++ b/net/dcsctp/packet/parameter/heartbeat_info_parameter.h @@ -34,16 +34,16 @@ class HeartbeatInfoParameter : public Parameter, public: static constexpr int kType = HeartbeatInfoParameterConfig::kType; - explicit HeartbeatInfoParameter(rtc::ArrayView info) + explicit HeartbeatInfoParameter(webrtc::ArrayView info) : info_(info.begin(), info.end()) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; - rtc::ArrayView info() const { return info_; } + webrtc::ArrayView info() const { return info_; } private: std::vector info_; diff --git a/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.cc b/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.cc index 6191adfe9d..138c2fd086 100644 --- a/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.cc +++ b/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.cc @@ -12,11 +12,11 @@ #include #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -42,11 +42,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int IncomingSSNResetRequestParameter::kType; -absl::optional -IncomingSSNResetRequestParameter::Parse(rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional +IncomingSSNResetRequestParameter::Parse(webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } ReconfigRequestSN request_sequence_number(reader->Load32<4>()); @@ -80,7 +80,7 @@ void IncomingSSNResetRequestParameter::SerializeTo( } std::string IncomingSSNResetRequestParameter::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "Incoming SSN Reset Request, req_seq_nbr=" << *request_sequence_number(); return sb.Release(); diff --git a/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h b/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h index 18963efafc..1ad9dfc179 100644 --- a/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h +++ b/net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h @@ -43,8 +43,8 @@ class IncomingSSNResetRequestParameter : request_sequence_number_(request_sequence_number), stream_ids_(std::move(stream_ids)) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; @@ -52,7 +52,7 @@ class IncomingSSNResetRequestParameter ReconfigRequestSN request_sequence_number() const { return request_sequence_number_; } - rtc::ArrayView stream_ids() const { return stream_ids_; } + webrtc::ArrayView stream_ids() const { return stream_ids_; } private: static constexpr size_t kStreamIdSize = sizeof(uint16_t); diff --git a/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.cc b/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.cc index c25a2426be..56ec00ecbd 100644 --- a/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.cc +++ b/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.cc @@ -12,11 +12,11 @@ #include #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/packet/bounded_byte_reader.h" @@ -48,11 +48,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int OutgoingSSNResetRequestParameter::kType; -absl::optional -OutgoingSSNResetRequestParameter::Parse(rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional +OutgoingSSNResetRequestParameter::Parse(webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } ReconfigRequestSN request_sequence_number(reader->Load32<4>()); @@ -91,7 +91,7 @@ void OutgoingSSNResetRequestParameter::SerializeTo( } std::string OutgoingSSNResetRequestParameter::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "Outgoing SSN Reset Request, req_seq_nbr=" << *request_sequence_number() << ", resp_seq_nbr=" << *response_sequence_number() << ", sender_last_asg_tsn=" << *sender_last_assigned_tsn(); diff --git a/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h b/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h index 6eb44e079f..109f742a15 100644 --- a/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h +++ b/net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h @@ -49,8 +49,8 @@ class OutgoingSSNResetRequestParameter sender_last_assigned_tsn_(sender_last_assigned_tsn), stream_ids_(std::move(stream_ids)) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; @@ -62,7 +62,7 @@ class OutgoingSSNResetRequestParameter return response_sequence_number_; } TSN sender_last_assigned_tsn() const { return sender_last_assigned_tsn_; } - rtc::ArrayView stream_ids() const { return stream_ids_; } + webrtc::ArrayView stream_ids() const { return stream_ids_; } private: static constexpr size_t kStreamIdSize = sizeof(uint16_t); diff --git a/net/dcsctp/packet/parameter/parameter.cc b/net/dcsctp/packet/parameter/parameter.cc index b3b2bffef7..e6765c197c 100644 --- a/net/dcsctp/packet/parameter/parameter.cc +++ b/net/dcsctp/packet/parameter/parameter.cc @@ -13,12 +13,12 @@ #include #include +#include #include #include #include #include "absl/memory/memory.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/common/math.h" #include "net/dcsctp/packet/bounded_byte_reader.h" @@ -54,7 +54,7 @@ Parameters::Builder& Parameters::Builder::Add(const Parameter& p) { } std::vector Parameters::descriptors() const { - rtc::ArrayView span(data_); + webrtc::ArrayView span(data_); std::vector result; while (!span.empty()) { BoundedByteReader header(span); @@ -70,20 +70,20 @@ std::vector Parameters::descriptors() const { return result; } -absl::optional Parameters::Parse( - rtc::ArrayView data) { +std::optional Parameters::Parse( + webrtc::ArrayView data) { // Validate the parameter descriptors - rtc::ArrayView span(data); + webrtc::ArrayView span(data); while (!span.empty()) { if (span.size() < kParameterHeaderSize) { RTC_DLOG(LS_WARNING) << "Insufficient parameter length"; - return absl::nullopt; + return std::nullopt; } BoundedByteReader header(span); uint16_t length = header.Load16<2>(); if (length < kParameterHeaderSize || length > span.size()) { RTC_DLOG(LS_WARNING) << "Invalid parameter length field"; - return absl::nullopt; + return std::nullopt; } size_t length_with_padding = RoundUpTo4(length); if (length_with_padding > span.size()) { diff --git a/net/dcsctp/packet/parameter/parameter.h b/net/dcsctp/packet/parameter/parameter.h index e8fa67c8f7..5042f16e1b 100644 --- a/net/dcsctp/packet/parameter/parameter.h +++ b/net/dcsctp/packet/parameter/parameter.h @@ -16,6 +16,7 @@ #include #include #include +#include #include #include #include @@ -23,7 +24,6 @@ #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/tlv_trait.h" #include "rtc_base/strings/string_builder.h" @@ -43,10 +43,10 @@ class Parameter { }; struct ParameterDescriptor { - ParameterDescriptor(uint16_t type, rtc::ArrayView data) + ParameterDescriptor(uint16_t type, webrtc::ArrayView data) : type(type), data(data) {} uint16_t type; - rtc::ArrayView data; + webrtc::ArrayView data; }; class Parameters { @@ -61,17 +61,17 @@ class Parameters { std::vector data_; }; - static absl::optional Parse(rtc::ArrayView data); + static std::optional Parse(webrtc::ArrayView data); Parameters() {} Parameters(Parameters&& other) = default; Parameters& operator=(Parameters&& other) = default; - rtc::ArrayView data() const { return data_; } + webrtc::ArrayView data() const { return data_; } std::vector descriptors() const; template - absl::optional

get() const { + std::optional

get() const { static_assert(std::is_base_of::value, "Template parameter not derived from Parameter"); for (const auto& p : descriptors()) { @@ -79,7 +79,7 @@ class Parameters { return P::Parse(p.data); } } - return absl::nullopt; + return std::nullopt; } private: diff --git a/net/dcsctp/packet/parameter/parameter_test.cc b/net/dcsctp/packet/parameter/parameter_test.cc index 467e324592..2ff49f320f 100644 --- a/net/dcsctp/packet/parameter/parameter_test.cc +++ b/net/dcsctp/packet/parameter/parameter_test.cc @@ -33,7 +33,7 @@ TEST(ParameterTest, SerializeDeserializeParameter) { TSN(789), {StreamID(42)})) .Build(); - rtc::ArrayView serialized = parameters.data(); + webrtc::ArrayView serialized = parameters.data(); ASSERT_HAS_VALUE_AND_ASSIGN(Parameters parsed, Parameters::Parse(serialized)); auto descriptors = parsed.descriptors(); diff --git a/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc b/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc index fafb204acc..b8debe8bf9 100644 --- a/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc +++ b/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc @@ -12,12 +12,12 @@ #include #include +#include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -64,11 +64,11 @@ absl::string_view ToString(ReconfigurationResponseParameter::Result result) { } } -absl::optional -ReconfigurationResponseParameter::Parse(rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional +ReconfigurationResponseParameter::Parse(webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } ReconfigRequestSN response_sequence_number(reader->Load32<4>()); @@ -101,14 +101,14 @@ ReconfigurationResponseParameter::Parse(rtc::ArrayView data) { default: RTC_DLOG(LS_WARNING) << "Invalid reconfig response result: " << result_nbr; - return absl::nullopt; + return std::nullopt; } if (reader->variable_data().empty()) { return ReconfigurationResponseParameter(response_sequence_number, result); } else if (reader->variable_data_size() != kNextTsnHeaderSize) { RTC_DLOG(LS_WARNING) << "Invalid parameter size"; - return absl::nullopt; + return std::nullopt; } BoundedByteReader sub_reader = @@ -144,7 +144,7 @@ void ReconfigurationResponseParameter::SerializeTo( } std::string ReconfigurationResponseParameter::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "Re-configuration Response, resp_seq_nbr=" << *response_sequence_number(); return sb.Release(); diff --git a/net/dcsctp/packet/parameter/reconfiguration_response_parameter.h b/net/dcsctp/packet/parameter/reconfiguration_response_parameter.h index c5a68acb33..37bc7452a5 100644 --- a/net/dcsctp/packet/parameter/reconfiguration_response_parameter.h +++ b/net/dcsctp/packet/parameter/reconfiguration_response_parameter.h @@ -12,11 +12,11 @@ #include #include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/packet/parameter/parameter.h" @@ -51,8 +51,8 @@ class ReconfigurationResponseParameter Result result) : response_sequence_number_(response_sequence_number), result_(result), - sender_next_tsn_(absl::nullopt), - receiver_next_tsn_(absl::nullopt) {} + sender_next_tsn_(std::nullopt), + receiver_next_tsn_(std::nullopt) {} explicit ReconfigurationResponseParameter( ReconfigRequestSN response_sequence_number, @@ -64,8 +64,8 @@ class ReconfigurationResponseParameter sender_next_tsn_(sender_next_tsn), receiver_next_tsn_(receiver_next_tsn) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; @@ -74,15 +74,15 @@ class ReconfigurationResponseParameter return response_sequence_number_; } Result result() const { return result_; } - absl::optional sender_next_tsn() const { return sender_next_tsn_; } - absl::optional receiver_next_tsn() const { return receiver_next_tsn_; } + std::optional sender_next_tsn() const { return sender_next_tsn_; } + std::optional receiver_next_tsn() const { return receiver_next_tsn_; } private: static constexpr size_t kNextTsnHeaderSize = 8; ReconfigRequestSN response_sequence_number_; Result result_; - absl::optional sender_next_tsn_; - absl::optional receiver_next_tsn_; + std::optional sender_next_tsn_; + std::optional receiver_next_tsn_; }; absl::string_view ToString(ReconfigurationResponseParameter::Result result); diff --git a/net/dcsctp/packet/parameter/reconfiguration_response_parameter_test.cc b/net/dcsctp/packet/parameter/reconfiguration_response_parameter_test.cc index 8125d93cd0..b7845452dd 100644 --- a/net/dcsctp/packet/parameter/reconfiguration_response_parameter_test.cc +++ b/net/dcsctp/packet/parameter/reconfiguration_response_parameter_test.cc @@ -11,10 +11,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "net/dcsctp/testing/testing_macros.h" #include "rtc_base/gunit.h" #include "test/gmock.h" @@ -37,8 +37,8 @@ TEST(ReconfigurationResponseParameterTest, SerializeAndDeserializeFirstForm) { EXPECT_EQ(*deserialized.response_sequence_number(), 1u); EXPECT_EQ(deserialized.result(), ReconfigurationResponseParameter::Result::kSuccessPerformed); - EXPECT_EQ(deserialized.sender_next_tsn(), absl::nullopt); - EXPECT_EQ(deserialized.receiver_next_tsn(), absl::nullopt); + EXPECT_EQ(deserialized.sender_next_tsn(), std::nullopt); + EXPECT_EQ(deserialized.receiver_next_tsn(), std::nullopt); } TEST(ReconfigurationResponseParameterTest, diff --git a/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.cc b/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.cc index d656e0db8f..93d5f133f4 100644 --- a/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.cc +++ b/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -35,11 +35,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int SSNTSNResetRequestParameter::kType; -absl::optional SSNTSNResetRequestParameter::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional SSNTSNResetRequestParameter::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } ReconfigRequestSN request_sequence_number(reader->Load32<4>()); @@ -52,7 +52,7 @@ void SSNTSNResetRequestParameter::SerializeTo(std::vector& out) const { } std::string SSNTSNResetRequestParameter::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "SSN/TSN Reset Request, req_seq_nbr=" << *request_sequence_number(); return sb.Release(); } diff --git a/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.h b/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.h index e31d7ebe8f..4d83044483 100644 --- a/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.h +++ b/net/dcsctp/packet/parameter/ssn_tsn_reset_request_parameter.h @@ -40,8 +40,8 @@ class SSNTSNResetRequestParameter ReconfigRequestSN request_sequence_number) : request_sequence_number_(request_sequence_number) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; diff --git a/net/dcsctp/packet/parameter/state_cookie_parameter.cc b/net/dcsctp/packet/parameter/state_cookie_parameter.cc index 9777aa6667..0c4d068fa0 100644 --- a/net/dcsctp/packet/parameter/state_cookie_parameter.cc +++ b/net/dcsctp/packet/parameter/state_cookie_parameter.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -28,11 +28,11 @@ namespace dcsctp { constexpr int StateCookieParameter::kType; -absl::optional StateCookieParameter::Parse( - rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional StateCookieParameter::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } return StateCookieParameter(reader->variable_data()); } @@ -43,7 +43,7 @@ void StateCookieParameter::SerializeTo(std::vector& out) const { } std::string StateCookieParameter::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "State Cookie parameter (cookie_length=" << data_.size() << ")"; return sb.Release(); } diff --git a/net/dcsctp/packet/parameter/state_cookie_parameter.h b/net/dcsctp/packet/parameter/state_cookie_parameter.h index f4355495e2..d11ce8e22d 100644 --- a/net/dcsctp/packet/parameter/state_cookie_parameter.h +++ b/net/dcsctp/packet/parameter/state_cookie_parameter.h @@ -35,16 +35,16 @@ class StateCookieParameter : public Parameter, public: static constexpr int kType = StateCookieParameterConfig::kType; - explicit StateCookieParameter(rtc::ArrayView data) + explicit StateCookieParameter(webrtc::ArrayView data) : data_(data.begin(), data.end()) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; - rtc::ArrayView data() const { return data_; } + webrtc::ArrayView data() const { return data_; } private: std::vector data_; diff --git a/net/dcsctp/packet/parameter/supported_extensions_parameter.cc b/net/dcsctp/packet/parameter/supported_extensions_parameter.cc index 6a8fb214de..4163016757 100644 --- a/net/dcsctp/packet/parameter/supported_extensions_parameter.cc +++ b/net/dcsctp/packet/parameter/supported_extensions_parameter.cc @@ -10,16 +10,16 @@ #include "net/dcsctp/packet/parameter/supported_extensions_parameter.h" #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" -#include "net/dcsctp/common/str_join.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" #include "net/dcsctp/packet/tlv_trait.h" +#include "rtc_base/strings/str_join.h" #include "rtc_base/strings/string_builder.h" namespace dcsctp { @@ -39,11 +39,11 @@ namespace dcsctp { // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int SupportedExtensionsParameter::kType; -absl::optional -SupportedExtensionsParameter::Parse(rtc::ArrayView data) { - absl::optional> reader = ParseTLV(data); +std::optional SupportedExtensionsParameter::Parse( + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); if (!reader.has_value()) { - return absl::nullopt; + return std::nullopt; } std::vector chunk_types(reader->variable_data().begin(), @@ -58,8 +58,8 @@ void SupportedExtensionsParameter::SerializeTo( } std::string SupportedExtensionsParameter::ToString() const { - rtc::StringBuilder sb; - sb << "Supported Extensions (" << StrJoin(chunk_types_, ", ") << ")"; + webrtc::StringBuilder sb; + sb << "Supported Extensions (" << webrtc::StrJoin(chunk_types_, ", ") << ")"; return sb.Release(); } } // namespace dcsctp diff --git a/net/dcsctp/packet/parameter/supported_extensions_parameter.h b/net/dcsctp/packet/parameter/supported_extensions_parameter.h index 5689fd8035..c7fdbcb1ab 100644 --- a/net/dcsctp/packet/parameter/supported_extensions_parameter.h +++ b/net/dcsctp/packet/parameter/supported_extensions_parameter.h @@ -41,8 +41,8 @@ class SupportedExtensionsParameter explicit SupportedExtensionsParameter(std::vector chunk_types) : chunk_types_(std::move(chunk_types)) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; @@ -52,7 +52,7 @@ class SupportedExtensionsParameter chunk_types_.end(); } - rtc::ArrayView chunk_types() const { return chunk_types_; } + webrtc::ArrayView chunk_types() const { return chunk_types_; } private: std::vector chunk_types_; diff --git a/net/dcsctp/packet/parameter/zero_checksum_acceptable_chunk_parameter.cc b/net/dcsctp/packet/parameter/zero_checksum_acceptable_chunk_parameter.cc index 75f7d3c487..4fd46c96ec 100644 --- a/net/dcsctp/packet/parameter/zero_checksum_acceptable_chunk_parameter.cc +++ b/net/dcsctp/packet/parameter/zero_checksum_acceptable_chunk_parameter.cc @@ -11,36 +11,48 @@ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "rtc_base/strings/string_builder.h" namespace dcsctp { // https://www.ietf.org/archive/id/draft-tuexen-tsvwg-sctp-zero-checksum-00.html#section-3 -// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ -// | Type = 0x8001 | Length = 4 | -// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Type = 0x8001 (suggested) | Length = 8 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | Error Detection Method Identifier (EDMID) | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ constexpr int ZeroChecksumAcceptableChunkParameter::kType; -absl::optional +std::optional ZeroChecksumAcceptableChunkParameter::Parse( - rtc::ArrayView data) { - if (!ParseTLV(data).has_value()) { - return absl::nullopt; + webrtc::ArrayView data) { + std::optional> reader = ParseTLV(data); + if (!reader.has_value()) { + return std::nullopt; } - return ZeroChecksumAcceptableChunkParameter(); + + ZeroChecksumAlternateErrorDetectionMethod method(reader->Load32<4>()); + if (method == ZeroChecksumAlternateErrorDetectionMethod::None()) { + return std::nullopt; + } + return ZeroChecksumAcceptableChunkParameter(method); } void ZeroChecksumAcceptableChunkParameter::SerializeTo( std::vector& out) const { - AllocateTLV(out); + BoundedByteWriter writer = AllocateTLV(out); + writer.Store32<4>(*error_detection_method_); } std::string ZeroChecksumAcceptableChunkParameter::ToString() const { - return "Zero Checksum Acceptable"; + webrtc::StringBuilder sb; + sb << "Zero Checksum Acceptable (" << *error_detection_method_ << ")"; + return sb.Release(); } } // namespace dcsctp diff --git a/net/dcsctp/packet/parameter/zero_checksum_acceptable_chunk_parameter.h b/net/dcsctp/packet/parameter/zero_checksum_acceptable_chunk_parameter.h index 9ae2ec8280..8b0132d2cf 100644 --- a/net/dcsctp/packet/parameter/zero_checksum_acceptable_chunk_parameter.h +++ b/net/dcsctp/packet/parameter/zero_checksum_acceptable_chunk_parameter.h @@ -19,13 +19,14 @@ #include "api/array_view.h" #include "net/dcsctp/packet/parameter/parameter.h" #include "net/dcsctp/packet/tlv_trait.h" +#include "net/dcsctp/public/types.h" namespace dcsctp { -// https://datatracker.ietf.org/doc/draft-tuexen-tsvwg-sctp-zero-checksum/ +// https://datatracker.ietf.org/doc/draft-ietf-tsvwg-sctp-zero-checksum/ struct ZeroChecksumAcceptableChunkParameterConfig : ParameterConfig { static constexpr int kType = 0x8001; - static constexpr size_t kHeaderSize = 4; + static constexpr size_t kHeaderSize = 8; static constexpr size_t kVariableLengthAlignment = 0; }; @@ -36,13 +37,22 @@ class ZeroChecksumAcceptableChunkParameter static constexpr int kType = ZeroChecksumAcceptableChunkParameterConfig::kType; - ZeroChecksumAcceptableChunkParameter() {} + explicit ZeroChecksumAcceptableChunkParameter( + ZeroChecksumAlternateErrorDetectionMethod error_detection_method) + : error_detection_method_(error_detection_method) {} - static absl::optional Parse( - rtc::ArrayView data); + static std::optional Parse( + webrtc::ArrayView data); void SerializeTo(std::vector& out) const override; std::string ToString() const override; + + ZeroChecksumAlternateErrorDetectionMethod error_detection_method() const { + return error_detection_method_; + } + + private: + ZeroChecksumAlternateErrorDetectionMethod error_detection_method_; }; } // namespace dcsctp diff --git a/net/dcsctp/packet/parameter/zero_checksum_acceptable_chunk_parameter_test.cc b/net/dcsctp/packet/parameter/zero_checksum_acceptable_chunk_parameter_test.cc index 8a004e1788..861fa4d785 100644 --- a/net/dcsctp/packet/parameter/zero_checksum_acceptable_chunk_parameter_test.cc +++ b/net/dcsctp/packet/parameter/zero_checksum_acceptable_chunk_parameter_test.cc @@ -24,18 +24,28 @@ namespace { using ::testing::ElementsAre; TEST(ZeroChecksumAcceptableChunkParameterTest, SerializeAndDeserialize) { - ZeroChecksumAcceptableChunkParameter parameter; + ZeroChecksumAcceptableChunkParameter parameter( + ZeroChecksumAlternateErrorDetectionMethod::LowerLayerDtls()); std::vector serialized; parameter.SerializeTo(serialized); - EXPECT_THAT(serialized, ElementsAre(0x80, 0x01, 0x00, 0x04)); + EXPECT_THAT(serialized, + ElementsAre(0x80, 0x01, 0x00, 0x08, 0x00, 0x00, 0x00, 0x01)); ASSERT_HAS_VALUE_AND_ASSIGN( ZeroChecksumAcceptableChunkParameter deserialized, ZeroChecksumAcceptableChunkParameter::Parse(serialized)); } +TEST(ZeroChecksumAcceptableChunkParameterTest, FailToDeserializePrevVersion) { + // This is how the draft described the chunk as, in version 00. + std::vector invalid = {0x80, 0x01, 0x00, 0x04}; + + EXPECT_FALSE( + ZeroChecksumAcceptableChunkParameter::Parse(invalid).has_value()); +} + TEST(ZeroChecksumAcceptableChunkParameterTest, FailToDeserialize) { std::vector invalid = {0x00, 0x00, 0x00, 0x00}; @@ -44,9 +54,10 @@ TEST(ZeroChecksumAcceptableChunkParameterTest, FailToDeserialize) { } TEST(ZeroChecksumAcceptableChunkParameterTest, HasToString) { - ZeroChecksumAcceptableChunkParameter parameter; + ZeroChecksumAcceptableChunkParameter parameter( + ZeroChecksumAlternateErrorDetectionMethod::LowerLayerDtls()); - EXPECT_EQ(parameter.ToString(), "Zero Checksum Acceptable"); + EXPECT_EQ(parameter.ToString(), "Zero Checksum Acceptable (1)"); } } // namespace diff --git a/net/dcsctp/packet/sctp_packet.cc b/net/dcsctp/packet/sctp_packet.cc index de407bb4c7..391d52151a 100644 --- a/net/dcsctp/packet/sctp_packet.cc +++ b/net/dcsctp/packet/sctp_packet.cc @@ -12,12 +12,12 @@ #include #include +#include #include #include #include #include "absl/memory/memory.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/common/math.h" #include "net/dcsctp/packet/bounded_byte_reader.h" @@ -105,12 +105,13 @@ std::vector SctpPacket::Builder::Build(bool write_checksum) { return out; } -absl::optional SctpPacket::Parse(rtc::ArrayView data, - const DcSctpOptions& options) { +std::optional SctpPacket::Parse( + webrtc::ArrayView data, + const DcSctpOptions& options) { if (data.size() < kHeaderSize + kChunkTlvHeaderSize || data.size() > kMaxUdpPacketSize) { RTC_DLOG(LS_WARNING) << "Invalid packet size"; - return absl::nullopt; + return std::nullopt; } BoundedByteReader reader(data); @@ -126,7 +127,9 @@ absl::optional SctpPacket::Parse(rtc::ArrayView data, std::vector(data.begin(), data.end()); if (options.disable_checksum_verification || - (options.enable_zero_checksum && common_header.checksum == 0u)) { + (options.zero_checksum_alternate_error_detection_method != + ZeroChecksumAlternateErrorDetectionMethod::None() && + common_header.checksum == 0u)) { // https://www.ietf.org/archive/id/draft-tuexen-tsvwg-sctp-zero-checksum-01.html#section-4.3: // If an end point has sent the Zero Checksum Acceptable Chunk Parameter in // an INIT or INIT ACK chunk, it MUST accept SCTP packets using an incorrect @@ -137,11 +140,11 @@ absl::optional SctpPacket::Parse(rtc::ArrayView data, BoundedByteWriter(data_copy).Store32<8>(0); uint32_t calculated_checksum = GenerateCrc32C(data_copy); if (calculated_checksum != common_header.checksum) { - RTC_DLOG(LS_WARNING) << rtc::StringFormat( + RTC_DLOG(LS_WARNING) << webrtc::StringFormat( "Invalid packet checksum, packet_checksum=0x%08x, " "calculated_checksum=0x%08x", common_header.checksum, calculated_checksum); - return absl::nullopt; + return std::nullopt; } // Restore the checksum in the header. BoundedByteWriter(data_copy).Store32<8>( @@ -159,12 +162,12 @@ absl::optional SctpPacket::Parse(rtc::ArrayView data, std::vector descriptors; descriptors.reserve(kExpectedDescriptorCount); - rtc::ArrayView descriptor_data = - rtc::ArrayView(data_copy).subview(kHeaderSize); + webrtc::ArrayView descriptor_data = + webrtc::ArrayView(data_copy).subview(kHeaderSize); while (!descriptor_data.empty()) { if (descriptor_data.size() < kChunkTlvHeaderSize) { RTC_DLOG(LS_WARNING) << "Too small chunk"; - return absl::nullopt; + return std::nullopt; } BoundedByteReader chunk_header(descriptor_data); uint8_t type = chunk_header.Load8<0>(); @@ -174,10 +177,10 @@ absl::optional SctpPacket::Parse(rtc::ArrayView data, if (padded_length > descriptor_data.size()) { RTC_DLOG(LS_WARNING) << "Too large chunk. length=" << length << ", remaining=" << descriptor_data.size(); - return absl::nullopt; + return std::nullopt; } else if (padded_length < kChunkTlvHeaderSize) { RTC_DLOG(LS_WARNING) << "Too small chunk. length=" << length; - return absl::nullopt; + return std::nullopt; } descriptors.emplace_back(type, flags, descriptor_data.subview(0, padded_length)); diff --git a/net/dcsctp/packet/sctp_packet.h b/net/dcsctp/packet/sctp_packet.h index 0d348b448f..3a5ba973db 100644 --- a/net/dcsctp/packet/sctp_packet.h +++ b/net/dcsctp/packet/sctp_packet.h @@ -42,11 +42,11 @@ class SctpPacket { struct ChunkDescriptor { ChunkDescriptor(uint8_t type, uint8_t flags, - rtc::ArrayView data) + webrtc::ArrayView data) : type(type), flags(flags), data(data) {} uint8_t type; uint8_t flags; - rtc::ArrayView data; + webrtc::ArrayView data; }; SctpPacket(SctpPacket&& other) = default; @@ -89,14 +89,14 @@ class SctpPacket { }; // Parses `data` as an SCTP packet and returns it if it validates. - static absl::optional Parse(rtc::ArrayView data, - const DcSctpOptions& options); + static std::optional Parse(webrtc::ArrayView data, + const DcSctpOptions& options); // Returns the SCTP common header. const CommonHeader& common_header() const { return common_header_; } // Returns the chunks (types and offsets) within the packet. - rtc::ArrayView descriptors() const { + webrtc::ArrayView descriptors() const { return descriptors_; } diff --git a/net/dcsctp/packet/sctp_packet_test.cc b/net/dcsctp/packet/sctp_packet_test.cc index fcdd161c0d..aab01df19c 100644 --- a/net/dcsctp/packet/sctp_packet_test.cc +++ b/net/dcsctp/packet/sctp_packet_test.cc @@ -38,7 +38,8 @@ using ::testing::SizeIs; constexpr VerificationTag kVerificationTag = VerificationTag(0x12345678); constexpr DcSctpOptions kVerifyChecksumOptions = DcSctpOptions{.disable_checksum_verification = false, - .enable_zero_checksum = false}; + .zero_checksum_alternate_error_detection_method = + ZeroChecksumAlternateErrorDetectionMethod::None()}; TEST(SctpPacketTest, DeserializeSimplePacketFromCapture) { /* @@ -208,8 +209,10 @@ TEST(SctpPacketTest, DeserializePacketDontValidateChecksum) { ASSERT_HAS_VALUE_AND_ASSIGN( SctpPacket packet, - SctpPacket::Parse(data, {.disable_checksum_verification = true, - .enable_zero_checksum = false})); + SctpPacket::Parse( + data, {.disable_checksum_verification = true, + .zero_checksum_alternate_error_detection_method = + ZeroChecksumAlternateErrorDetectionMethod::None()})); EXPECT_EQ(packet.common_header().source_port, 5000); EXPECT_EQ(packet.common_header().destination_port, 5000); EXPECT_EQ(packet.common_header().verification_tag, @@ -375,8 +378,11 @@ TEST(SctpPacketTest, AcceptsZeroSetZeroChecksum) { ASSERT_HAS_VALUE_AND_ASSIGN( SctpPacket packet, - SctpPacket::Parse(data, {.disable_checksum_verification = false, - .enable_zero_checksum = true})); + SctpPacket::Parse( + data, + {.disable_checksum_verification = false, + .zero_checksum_alternate_error_detection_method = + ZeroChecksumAlternateErrorDetectionMethod::LowerLayerDtls()})); EXPECT_EQ(packet.common_header().source_port, 5000); EXPECT_EQ(packet.common_header().destination_port, 5000); EXPECT_EQ(packet.common_header().verification_tag, @@ -409,9 +415,13 @@ TEST(SctpPacketTest, RejectsNonZeroIncorrectChecksumWhenZeroChecksumIsActive) { 0x00, 0x00, 0x03, 0x00, 0x00, 0x10, 0x55, 0x08, 0x36, 0x40, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}; - EXPECT_FALSE(SctpPacket::Parse(data, {.disable_checksum_verification = false, - .enable_zero_checksum = true}) - .has_value()); + EXPECT_FALSE( + SctpPacket::Parse( + data, + {.disable_checksum_verification = false, + .zero_checksum_alternate_error_detection_method = + ZeroChecksumAlternateErrorDetectionMethod::LowerLayerDtls()}) + .has_value()); } TEST(SctpPacketTest, WritePacketWithCalculatedChecksum) { diff --git a/net/dcsctp/packet/tlv_trait.h b/net/dcsctp/packet/tlv_trait.h index a3c728efd7..fd463479b3 100644 --- a/net/dcsctp/packet/tlv_trait.h +++ b/net/dcsctp/packet/tlv_trait.h @@ -16,10 +16,10 @@ #include #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -80,11 +80,11 @@ class TLVTrait { // Validates the data with regards to size, alignment and type. // If valid, returns a bounded buffer. - static absl::optional> ParseTLV( - rtc::ArrayView data) { + static std::optional> ParseTLV( + webrtc::ArrayView data) { if (data.size() < Config::kHeaderSize) { tlv_trait_impl::ReportInvalidSize(data.size(), Config::kHeaderSize); - return absl::nullopt; + return std::nullopt; } BoundedByteReader tlv_header(data); @@ -94,7 +94,7 @@ class TLVTrait { if (type != Config::kType) { tlv_trait_impl::ReportInvalidType(type, Config::kType); - return absl::nullopt; + return std::nullopt; } const uint16_t length = tlv_header.template Load16<2>(); if (Config::kVariableLengthAlignment == 0) { @@ -102,25 +102,25 @@ class TLVTrait { if (length != Config::kHeaderSize || data.size() != Config::kHeaderSize) { tlv_trait_impl::ReportInvalidFixedLengthField(length, Config::kHeaderSize); - return absl::nullopt; + return std::nullopt; } } else { // Expect variable length data - verify its size alignment. if (length > data.size() || length < Config::kHeaderSize) { tlv_trait_impl::ReportInvalidVariableLengthField(length, data.size()); - return absl::nullopt; + return std::nullopt; } const size_t padding = data.size() - length; if (padding > 3) { // https://tools.ietf.org/html/rfc4960#section-3.2 // "This padding MUST NOT be more than 3 bytes in total" tlv_trait_impl::ReportInvalidPadding(padding); - return absl::nullopt; + return std::nullopt; } if (!ValidateLengthAlignment(length, Config::kVariableLengthAlignment)) { tlv_trait_impl::ReportInvalidLengthMultiple( length, Config::kVariableLengthAlignment); - return absl::nullopt; + return std::nullopt; } } return BoundedByteReader(data.subview(0, length)); @@ -137,7 +137,7 @@ class TLVTrait { out.resize(offset + size); BoundedByteWriter tlv_header( - rtc::ArrayView(out.data() + offset, kTlvHeaderSize)); + webrtc::ArrayView(out.data() + offset, kTlvHeaderSize)); if (Config::kTypeSizeInBytes == 1) { tlv_header.template Store8<0>(static_cast(Config::kType)); } else { @@ -146,7 +146,7 @@ class TLVTrait { tlv_header.template Store16<2>(size); return BoundedByteWriter( - rtc::ArrayView(out.data() + offset, size)); + webrtc::ArrayView(out.data() + offset, size)); } private: diff --git a/net/dcsctp/packet/tlv_trait_test.cc b/net/dcsctp/packet/tlv_trait_test.cc index a0dd1a1136..737534bddf 100644 --- a/net/dcsctp/packet/tlv_trait_test.cc +++ b/net/dcsctp/packet/tlv_trait_test.cc @@ -41,11 +41,11 @@ class OneByteChunk : public TLVTrait { writer.Store16<10>(0x0708); uint8_t variable_data[kVariableSize] = {0xDE, 0xAD, 0xBE, 0xEF}; - writer.CopyToVariableData(rtc::ArrayView(variable_data)); + writer.CopyToVariableData(webrtc::ArrayView(variable_data)); } - static absl::optional> - Parse(rtc::ArrayView data) { + static std::optional> Parse( + webrtc::ArrayView data) { return ParseTLV(data); } }; @@ -64,7 +64,7 @@ TEST(TlvDataTest, CanReadOneByteTypeTlvs) { uint8_t data[] = {0x49, 0x00, 0x00, 0x10, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0xDE, 0xAD, 0xBE, 0xEF}; - absl::optional> reader = + std::optional> reader = OneByteChunk::Parse(data); ASSERT_TRUE(reader.has_value()); EXPECT_EQ(reader->Load32<4>(), 0x01020304U); @@ -90,11 +90,11 @@ class TwoByteChunk : public TLVTrait { writer.Store32<4>(0x01020304U); uint8_t variable_data[] = {0x05, 0x06, 0x07, 0x08, 0xDE, 0xAD, 0xBE, 0xEF}; - writer.CopyToVariableData(rtc::ArrayView(variable_data)); + writer.CopyToVariableData(webrtc::ArrayView(variable_data)); } - static absl::optional> - Parse(rtc::ArrayView data) { + static std::optional> Parse( + webrtc::ArrayView data) { return ParseTLV(data); } }; @@ -114,7 +114,7 @@ TEST(TlvDataTest, CanReadTwoByteTypeTlvs) { uint8_t data[] = {0x7A, 0x69, 0x00, 0x10, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0xDE, 0xAD, 0xBE, 0xEF}; - absl::optional> reader = + std::optional> reader = TwoByteChunk::Parse(data); EXPECT_TRUE(reader.has_value()); EXPECT_EQ(reader->Load32<4>(), 0x01020304U); diff --git a/net/dcsctp/public/BUILD.gn b/net/dcsctp/public/BUILD.gn index 6cb289bf5b..97f7651f15 100644 --- a/net/dcsctp/public/BUILD.gn +++ b/net/dcsctp/public/BUILD.gn @@ -11,6 +11,7 @@ import("../../../webrtc.gni") rtc_source_set("types") { deps = [ "../../../api:array_view", + "../../../api/units:time_delta", "../../../rtc_base:strong_alias", ] sources = [ @@ -18,7 +19,6 @@ rtc_source_set("types") { "dcsctp_options.h", "types.h", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("socket") { @@ -26,8 +26,10 @@ rtc_source_set("socket") { ":types", "../../../api:array_view", "../../../api/task_queue:task_queue", + "../../../api/units:timestamp", "../../../rtc_base:checks", "../../../rtc_base:strong_alias", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "dcsctp_handover_state.cc", @@ -36,10 +38,6 @@ rtc_source_set("socket") { "packet_observer.h", "timeout.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_source_set("factory") { @@ -47,12 +45,12 @@ rtc_source_set("factory") { ":socket", ":types", "../socket:dcsctp_socket", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "dcsctp_socket_factory.cc", "dcsctp_socket_factory.h", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_source_set("mocks") { @@ -76,12 +74,12 @@ rtc_source_set("utils") { "../../../rtc_base:logging", "../../../rtc_base:stringutils", "../socket:dcsctp_socket", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "text_pcap_packet_observer.cc", "text_pcap_packet_observer.h", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } if (rtc_include_tests) { diff --git a/net/dcsctp/public/dcsctp_message.h b/net/dcsctp/public/dcsctp_message.h index 38e6763916..bcf4486f2d 100644 --- a/net/dcsctp/public/dcsctp_message.h +++ b/net/dcsctp/public/dcsctp_message.h @@ -39,7 +39,7 @@ class DcSctpMessage { PPID ppid() const { return ppid_; } // The payload of the message. - rtc::ArrayView payload() const { return payload_; } + webrtc::ArrayView payload() const { return payload_; } // When destructing the message, extracts the payload. std::vector ReleasePayload() && { return std::move(payload_); } diff --git a/net/dcsctp/public/dcsctp_options.h b/net/dcsctp/public/dcsctp_options.h index d19798054c..6e1181f7e2 100644 --- a/net/dcsctp/public/dcsctp_options.h +++ b/net/dcsctp/public/dcsctp_options.h @@ -13,7 +13,8 @@ #include #include -#include "absl/types/optional.h" +#include + #include "net/dcsctp/public/types.h" namespace dcsctp { @@ -85,10 +86,14 @@ struct DcSctpOptions { // buffer is fully utilized. size_t max_receiver_window_buffer_size = 5 * 1024 * 1024; - // Maximum send buffer size. It will not be possible to queue more data than - // this before sending it. + // Send queue total size limit. It will not be possible to queue more data if + // the queue size is larger than this number. size_t max_send_buffer_size = 2'000'000; + // Per stream send queue size limit. Similar to `max_send_buffer_size`, but + // limiting the size of individual streams. + size_t per_stream_send_queue_limit = 2'000'000; + // A threshold that, when the amount of data in the send buffer goes below // this value, will trigger `DcSctpCallbacks::OnTotalBufferedAmountLow`. size_t total_buffered_amount_low_threshold = 1'800'000; @@ -124,7 +129,7 @@ struct DcSctpOptions { // transient network issues. Setting this value may require changing // `max_retransmissions` and `max_init_retransmits` to ensure that the // connection is not closed too quickly. - absl::optional max_timer_backoff_duration = absl::nullopt; + std::optional max_timer_backoff_duration = std::nullopt; // Hearbeat interval (on idle connections only). Set to zero to disable. DurationMs heartbeat_interval = DurationMs(30000); @@ -144,8 +149,11 @@ struct DcSctpOptions { // processing time of received packets and the clock granularity when setting // the delayed ack timer on the peer. // - // This is described for TCP in + // This is defined as "G" in the algorithm for TCP in // https://datatracker.ietf.org/doc/html/rfc6298#section-4. + // + // Note that this value will be further adjusted by scaling factors, so if you + // intend to change this, do it incrementally and measure the results. DurationMs min_rtt_variance = DurationMs(220); // The initial congestion window size, in number of MTUs. @@ -168,6 +176,16 @@ struct DcSctpOptions { // creating small fragmented packets. size_t avoid_fragmentation_cwnd_mtus = 6; + // When the congestion window is below this number of MTUs, sent data chunks + // will have the "I" (Immediate SACK - RFC7053) bit set. That will prevent the + // receiver from delaying the SACK, which result in shorter time until the + // sender can send the next packet as its driven by SACKs. This can reduce + // latency for low utilized and lossy connections. + // + // Default value set to be same as initial congestion window. Set to zero to + // disable. + size_t immediate_sack_under_cwnd_mtus = 10; + // The number of packets that may be sent at once. This is limited to avoid // bursts that too quickly fill the send buffer. Typically in a a socket in // its "slow start" phase (when it sends as much as it can), it will send @@ -176,13 +194,13 @@ struct DcSctpOptions { // retransmission scenarios. int max_burst = 4; - // Maximum Data Retransmit Attempts (per DATA chunk). Set to absl::nullopt for + // Maximum Data Retransmit Attempts (per DATA chunk). Set to std::nullopt for // no limit. - absl::optional max_retransmissions = 10; + std::optional max_retransmissions = 10; // Max.Init.Retransmits (https://tools.ietf.org/html/rfc4960#section-15). Set - // to absl::nullopt for no limit. - absl::optional max_init_retransmits = 8; + // to std::nullopt for no limit. + std::optional max_init_retransmits = 8; // RFC3758 Partial Reliability Extension bool enable_partial_reliability = true; @@ -196,14 +214,13 @@ struct DcSctpOptions { // Disables SCTP packet crc32 verification. For fuzzers only! bool disable_checksum_verification = false; - // Controls the acceptance of zero checksum, as defined in - // https://datatracker.ietf.org/doc/draft-tuexen-tsvwg-sctp-zero-checksum/ - // This should only be enabled if the packet integrity can be ensured by lower - // layers, which DTLS will do in WebRTC, as defined by RFC8261. - // - // This will also enable sending packets without a checksum value (set to 0) - // once both peers have negotiated this feature. - bool enable_zero_checksum = false; + // Controls the "zero checksum option" feature, as defined in + // https://www.ietf.org/archive/id/draft-ietf-tsvwg-sctp-zero-checksum-06.html. + // To have this feature enabled, both peers must be configured to use the + // same (defined, not "none") alternate error detection method. + ZeroChecksumAlternateErrorDetectionMethod + zero_checksum_alternate_error_detection_method = + ZeroChecksumAlternateErrorDetectionMethod::None(); }; } // namespace dcsctp diff --git a/net/dcsctp/public/dcsctp_socket.h b/net/dcsctp/public/dcsctp_socket.h index 9fda56a3ad..bce886fbbf 100644 --- a/net/dcsctp/public/dcsctp_socket.h +++ b/net/dcsctp/public/dcsctp_socket.h @@ -12,12 +12,14 @@ #include #include +#include #include +#include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/timestamp.h" #include "net/dcsctp/public/dcsctp_handover_state.h" #include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/public/dcsctp_options.h" @@ -49,11 +51,11 @@ struct SendOptions { // If set, will discard messages that haven't been correctly sent and // received before the lifetime has expired. This is only available if the // peer supports Partial Reliability Extension (RFC3758). - absl::optional lifetime = absl::nullopt; + std::optional lifetime = std::nullopt; // If set, limits the number of retransmissions. This is only available // if the peer supports Partial Reliability Extension (RFC3758). - absl::optional max_retransmissions = absl::nullopt; + std::optional max_retransmissions = std::nullopt; // If set, will generate lifecycle events for this message. See e.g. // `DcSctpSocketCallbacks::OnLifecycleMessageFullySent`. This value is decided @@ -255,6 +257,10 @@ struct Metrics { // peers. bool uses_message_interleaving = false; + // Indicates if draft-tuexen-tsvwg-sctp-zero-checksum-00 has been negotiated + // by both peers. + bool uses_zero_checksum = false; + // The number of negotiated incoming and outgoing streams, which is configured // locally as `DcSctpOptions::announced_maximum_incoming_streams` and // `DcSctpOptions::announced_maximum_outgoing_streams`, and which will be @@ -281,14 +287,14 @@ class DcSctpSocketCallbacks { // // Note that it's NOT ALLOWED to call into this library from within this // callback. - virtual void SendPacket(rtc::ArrayView data) {} + virtual void SendPacket(webrtc::ArrayView /* data */) {} // Called when the library wants the packet serialized as `data` to be sent. // // Note that it's NOT ALLOWED to call into this library from within this // callback. virtual SendPacketStatus SendPacketWithStatus( - rtc::ArrayView data) { + webrtc::ArrayView data) { SendPacket(data); return SendPacketStatus::kSuccess; } @@ -306,7 +312,7 @@ class DcSctpSocketCallbacks { // Note that it's NOT ALLOWED to call into this library from within this // callback. virtual std::unique_ptr CreateTimeout( - webrtc::TaskQueueBase::DelayPrecision precision) { + webrtc::TaskQueueBase::DelayPrecision /* precision */) { // TODO(hbos): When dependencies have migrated to this new signature, make // this pure virtual and delete the other version. return CreateTimeout(); @@ -319,9 +325,21 @@ class DcSctpSocketCallbacks { // Returns the current time in milliseconds (from any epoch). // + // TODO(bugs.webrtc.org/15593): This method is deprecated, see `Now`. + // + // Note that it's NOT ALLOWED to call into this library from within this + // callback. + virtual TimeMs TimeMillis() { return TimeMs(0); } + + // Returns the current time (from any epoch). + // + // This callback will eventually replace `TimeMillis()`. + // // Note that it's NOT ALLOWED to call into this library from within this // callback. - virtual TimeMs TimeMillis() = 0; + virtual webrtc::Timestamp Now() { + return webrtc::Timestamp::Millis(*TimeMillis()); + } // Called when the library needs a random number uniformly distributed between // `low` (inclusive) and `high` (exclusive). The random numbers used by the @@ -385,27 +403,27 @@ class DcSctpSocketCallbacks { // // It is allowed to call into this library from within this callback. virtual void OnStreamsResetFailed( - rtc::ArrayView outgoing_streams, + webrtc::ArrayView outgoing_streams, absl::string_view reason) = 0; // Indicates that a stream reset request has been performed. // // It is allowed to call into this library from within this callback. virtual void OnStreamsResetPerformed( - rtc::ArrayView outgoing_streams) = 0; + webrtc::ArrayView outgoing_streams) = 0; // When a peer has reset some of its outgoing streams, this will be called. An // empty list indicates that all streams have been reset. // // It is allowed to call into this library from within this callback. virtual void OnIncomingStreamsReset( - rtc::ArrayView incoming_streams) = 0; + webrtc::ArrayView incoming_streams) = 0; // Will be called when the amount of data buffered to be sent falls to or // below the threshold set when calling `SetBufferedAmountLowThreshold`. // // It is allowed to call into this library from within this callback. - virtual void OnBufferedAmountLow(StreamID stream_id) {} + virtual void OnBufferedAmountLow(StreamID /* stream_id */) {} // Will be called when the total amount of data buffered (in the entire send // buffer, for all streams) falls to or below the threshold specified in @@ -438,7 +456,7 @@ class DcSctpSocketCallbacks { // // Note that it's NOT ALLOWED to call into this library from within this // callback. - virtual void OnLifecycleMessageFullySent(LifecycleId lifecycle_id) {} + virtual void OnLifecycleMessageFullySent(LifecycleId /* lifecycle_id */) {} // OnLifecycleMessageExpired will be called when a message has expired. If it // was expired with data remaining in the send queue that had not been sent @@ -456,8 +474,8 @@ class DcSctpSocketCallbacks { // // Note that it's NOT ALLOWED to call into this library from within this // callback. - virtual void OnLifecycleMessageExpired(LifecycleId lifecycle_id, - bool maybe_delivered) {} + virtual void OnLifecycleMessageExpired(LifecycleId /* lifecycle_id */, + bool /* maybe_delivered */) {} // OnLifecycleMessageDelivered will be called when a non-expired message has // been acknowledged by the peer as delivered. @@ -475,7 +493,7 @@ class DcSctpSocketCallbacks { // // Note that it's NOT ALLOWED to call into this library from within this // callback. - virtual void OnLifecycleMessageDelivered(LifecycleId lifecycle_id) {} + virtual void OnLifecycleMessageDelivered(LifecycleId /* lifecycle_id */) {} // OnLifecycleEnd will be called when a lifecycle event has reached its end. // It will be called when processing of a message is complete, no matter how @@ -495,7 +513,7 @@ class DcSctpSocketCallbacks { // // Note that it's NOT ALLOWED to call into this library from within this // callback. - virtual void OnLifecycleEnd(LifecycleId lifecycle_id) {} + virtual void OnLifecycleEnd(LifecycleId /* lifecycle_id */) {} }; // The DcSctpSocket implementation implements the following interface. @@ -505,7 +523,7 @@ class DcSctpSocketInterface { virtual ~DcSctpSocketInterface() = default; // To be called when an incoming SCTP packet is to be processed. - virtual void ReceivePacket(rtc::ArrayView data) = 0; + virtual void ReceivePacket(webrtc::ArrayView data) = 0; // To be called when a timeout has expired. The `timeout_id` is provided // when the timeout was initiated. @@ -560,6 +578,16 @@ class DcSctpSocketInterface { virtual SendStatus Send(DcSctpMessage message, const SendOptions& send_options) = 0; + // Sends the messages `messages` using the provided send options. + // Sending a message is an asynchronous operation, and the `OnError` callback + // may be invoked to indicate any errors in sending the message. + // + // This has identical semantics to Send, except that it may coalesce many + // messages into a single SCTP packet if they would fit. + virtual std::vector SendMany( + webrtc::ArrayView messages, + const SendOptions& send_options) = 0; + // Resetting streams is an asynchronous operation and the results will // be notified using `DcSctpSocketCallbacks::OnStreamsResetDone()` on success // and `DcSctpSocketCallbacks::OnStreamsResetFailed()` on failure. Note that @@ -576,7 +604,7 @@ class DcSctpSocketInterface { // supports stream resetting. Calling this method on e.g. a closed association // or streams that don't support resetting will not perform any operation. virtual ResetStreamsStatus ResetStreams( - rtc::ArrayView outgoing_streams) = 0; + webrtc::ArrayView outgoing_streams) = 0; // Returns the number of bytes of data currently queued to be sent on a given // stream. @@ -593,10 +621,10 @@ class DcSctpSocketInterface { size_t bytes) = 0; // Retrieves the latest metrics. If the socket is not fully connected, - // `absl::nullopt` will be returned. Note that metrics are not guaranteed to + // `std::nullopt` will be returned. Note that metrics are not guaranteed to // be carried over if this socket is handed over by calling // `GetHandoverStateAndClose`. - virtual absl::optional GetMetrics() const = 0; + virtual std::optional GetMetrics() const = 0; // Returns empty bitmask if the socket is in the state in which a snapshot of // the state can be made by `GetHandoverStateAndClose()`. Return value is @@ -609,7 +637,7 @@ class DcSctpSocketInterface { // The method fails if the socket is not in a state ready for handover. // nullopt indicates the failure. `DcSctpSocketCallbacks::OnClosed` will be // called on success. - virtual absl::optional + virtual std::optional GetHandoverStateAndClose() = 0; // Returns the detected SCTP implementation of the peer. As this is not diff --git a/net/dcsctp/public/mock_dcsctp_socket.h b/net/dcsctp/public/mock_dcsctp_socket.h index 0fd572bd94..c1c59fea32 100644 --- a/net/dcsctp/public/mock_dcsctp_socket.h +++ b/net/dcsctp/public/mock_dcsctp_socket.h @@ -10,6 +10,8 @@ #ifndef NET_DCSCTP_PUBLIC_MOCK_DCSCTP_SOCKET_H_ #define NET_DCSCTP_PUBLIC_MOCK_DCSCTP_SOCKET_H_ +#include + #include "net/dcsctp/public/dcsctp_socket.h" #include "test/gmock.h" @@ -19,7 +21,7 @@ class MockDcSctpSocket : public DcSctpSocketInterface { public: MOCK_METHOD(void, ReceivePacket, - (rtc::ArrayView data), + (webrtc::ArrayView data), (override)); MOCK_METHOD(void, HandleTimeout, (TimeoutID timeout_id), (override)); @@ -56,9 +58,15 @@ class MockDcSctpSocket : public DcSctpSocketInterface { (DcSctpMessage message, const SendOptions& send_options), (override)); + MOCK_METHOD(std::vector, + SendMany, + (webrtc::ArrayView messages, + const SendOptions& send_options), + (override)); + MOCK_METHOD(ResetStreamsStatus, ResetStreams, - (rtc::ArrayView outgoing_streams), + (webrtc::ArrayView outgoing_streams), (override)); MOCK_METHOD(size_t, buffered_amount, (StreamID stream_id), (const, override)); @@ -73,13 +81,13 @@ class MockDcSctpSocket : public DcSctpSocketInterface { (StreamID stream_id, size_t bytes), (override)); - MOCK_METHOD(absl::optional, GetMetrics, (), (const, override)); + MOCK_METHOD(std::optional, GetMetrics, (), (const, override)); MOCK_METHOD(HandoverReadinessStatus, GetHandoverReadiness, (), (const, override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, GetHandoverStateAndClose, (), (override)); diff --git a/net/dcsctp/public/packet_observer.h b/net/dcsctp/public/packet_observer.h index fe7567824f..0eb1d4f997 100644 --- a/net/dcsctp/public/packet_observer.h +++ b/net/dcsctp/public/packet_observer.h @@ -25,12 +25,12 @@ class PacketObserver { // Called when a packet is sent, with the current time (in milliseconds) as // `now`, and the packet payload as `payload`. virtual void OnSentPacket(TimeMs now, - rtc::ArrayView payload) = 0; + webrtc::ArrayView payload) = 0; // Called when a packet is received, with the current time (in milliseconds) // as `now`, and the packet payload as `payload`. virtual void OnReceivedPacket(TimeMs now, - rtc::ArrayView payload) = 0; + webrtc::ArrayView payload) = 0; }; } // namespace dcsctp diff --git a/net/dcsctp/public/text_pcap_packet_observer.cc b/net/dcsctp/public/text_pcap_packet_observer.cc index 2b13060190..e43585223b 100644 --- a/net/dcsctp/public/text_pcap_packet_observer.cc +++ b/net/dcsctp/public/text_pcap_packet_observer.cc @@ -18,13 +18,13 @@ namespace dcsctp { void TextPcapPacketObserver::OnSentPacket( dcsctp::TimeMs now, - rtc::ArrayView payload) { + webrtc::ArrayView payload) { PrintPacket("O ", name_, now, payload); } void TextPcapPacketObserver::OnReceivedPacket( dcsctp::TimeMs now, - rtc::ArrayView payload) { + webrtc::ArrayView payload) { PrintPacket("I ", name_, now, payload); } @@ -32,8 +32,8 @@ void TextPcapPacketObserver::PrintPacket( absl::string_view prefix, absl::string_view socket_name, dcsctp::TimeMs now, - rtc::ArrayView payload) { - rtc::StringBuilder s; + webrtc::ArrayView payload) { + webrtc::StringBuilder s; s << "\n" << prefix; int64_t remaining = *now % (24 * 60 * 60 * 1000); int hours = remaining / (60 * 60 * 1000); diff --git a/net/dcsctp/public/text_pcap_packet_observer.h b/net/dcsctp/public/text_pcap_packet_observer.h index 0685771ccf..2434511dd3 100644 --- a/net/dcsctp/public/text_pcap_packet_observer.h +++ b/net/dcsctp/public/text_pcap_packet_observer.h @@ -26,17 +26,17 @@ class TextPcapPacketObserver : public dcsctp::PacketObserver { // Implementation of `dcsctp::PacketObserver`. void OnSentPacket(dcsctp::TimeMs now, - rtc::ArrayView payload) override; + webrtc::ArrayView payload) override; void OnReceivedPacket(dcsctp::TimeMs now, - rtc::ArrayView payload) override; + webrtc::ArrayView payload) override; // Prints a packet to the log. Exposed to allow it to be used in compatibility // tests suites that don't use PacketObserver. static void PrintPacket(absl::string_view prefix, absl::string_view socket_name, dcsctp::TimeMs now, - rtc::ArrayView payload); + webrtc::ArrayView payload); private: const std::string name_; diff --git a/net/dcsctp/public/types.h b/net/dcsctp/public/types.h index d0725620d8..1565cd7b58 100644 --- a/net/dcsctp/public/types.h +++ b/net/dcsctp/public/types.h @@ -14,6 +14,7 @@ #include #include +#include "api/units/time_delta.h" #include "rtc_base/strong_alias.h" namespace dcsctp { @@ -41,6 +42,13 @@ class DurationMs : public webrtc::StrongAlias { constexpr explicit DurationMs(const UnderlyingType& v) : webrtc::StrongAlias(v) {} + constexpr explicit DurationMs(webrtc::TimeDelta v) + : webrtc::StrongAlias( + v.IsInfinite() ? InfiniteDuration() : DurationMs(v.ms())) {} + + static constexpr DurationMs InfiniteDuration() { + return DurationMs(std::numeric_limits::max()); + } // Convenience methods for working with time. constexpr DurationMs& operator+=(DurationMs d) { value_ += d.value_; @@ -55,6 +63,11 @@ class DurationMs : public webrtc::StrongAlias { value_ *= factor; return *this; } + constexpr webrtc::TimeDelta ToTimeDelta() const { + return *this == DurationMs::InfiniteDuration() + ? webrtc::TimeDelta::PlusInfinity() + : webrtc::TimeDelta::Millis(value_); + } }; constexpr inline DurationMs operator+(DurationMs lhs, DurationMs rhs) { @@ -138,6 +151,27 @@ class LifecycleId : public webrtc::StrongAlias { static constexpr LifecycleId NotSet() { return LifecycleId(0); } }; + +// To enable zero checksum feature, both peers must agree on which alternate +// error detection method that is used. See +// https://www.ietf.org/archive/id/draft-ietf-tsvwg-sctp-zero-checksum-06.html. +class ZeroChecksumAlternateErrorDetectionMethod + : public webrtc::StrongAlias< + class ZeroChecksumAlternateErrorDetectionMethodTag, + uint32_t> { + public: + constexpr explicit ZeroChecksumAlternateErrorDetectionMethod( + const UnderlyingType& v) + : webrtc::StrongAlias(v) {} + + static constexpr ZeroChecksumAlternateErrorDetectionMethod None() { + return ZeroChecksumAlternateErrorDetectionMethod(0); + } + static constexpr ZeroChecksumAlternateErrorDetectionMethod LowerLayerDtls() { + return ZeroChecksumAlternateErrorDetectionMethod(1); + } +}; } // namespace dcsctp #endif // NET_DCSCTP_PUBLIC_TYPES_H_ diff --git a/net/dcsctp/rx/BUILD.gn b/net/dcsctp/rx/BUILD.gn index d66fd6ba72..2046a27a80 100644 --- a/net/dcsctp/rx/BUILD.gn +++ b/net/dcsctp/rx/BUILD.gn @@ -19,16 +19,13 @@ rtc_library("data_tracker") { "../packet:data", "../public:socket", "../timer", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "data_tracker.cc", "data_tracker.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_source_set("reassembly_streams") { @@ -39,9 +36,9 @@ rtc_source_set("reassembly_streams") { "../packet:data", "../public:socket", "../public:types", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "reassembly_streams.h" ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("interleaved_reassembly_streams") { @@ -54,16 +51,13 @@ rtc_library("interleaved_reassembly_streams") { "../packet:chunk", "../packet:data", "../public:types", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "interleaved_reassembly_streams.cc", "interleaved_reassembly_streams.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("traditional_reassembly_streams") { deps = [ @@ -75,16 +69,13 @@ rtc_library("traditional_reassembly_streams") { "../packet:chunk", "../packet:data", "../public:types", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "traditional_reassembly_streams.cc", "traditional_reassembly_streams.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("reassembly_queue") { @@ -95,23 +86,22 @@ rtc_library("reassembly_queue") { "../../../api:array_view", "../../../rtc_base:checks", "../../../rtc_base:logging", + "../../../rtc_base:stringutils", + "../../../rtc_base/containers:flat_set", "../common:internal_types", "../common:sequence_numbers", - "../common:str_join", "../packet:chunk", "../packet:data", "../packet:parameter", "../public:socket", "../public:types", + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "reassembly_queue.cc", "reassembly_queue.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } if (rtc_include_tests) { @@ -137,7 +127,6 @@ if (rtc_include_tests) { "../testing:data_generator", "../timer", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] sources = [ "data_tracker_test.cc", "interleaved_reassembly_streams_test.cc", diff --git a/net/dcsctp/rx/data_tracker.cc b/net/dcsctp/rx/data_tracker.cc index 1f2e43f7f5..627478a2b2 100644 --- a/net/dcsctp/rx/data_tracker.cc +++ b/net/dcsctp/rx/data_tracker.cc @@ -12,6 +12,7 @@ #include #include #include +#include #include #include #include @@ -19,7 +20,6 @@ #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "net/dcsctp/common/sequence_numbers.h" #include "net/dcsctp/packet/chunk/sack_chunk.h" #include "net/dcsctp/timer/timer.h" @@ -214,7 +214,7 @@ bool DataTracker::Observe(TSN tsn, return !is_duplicate; } -void DataTracker::HandleForwardTsn(TSN new_cumulative_ack) { +bool DataTracker::HandleForwardTsn(TSN new_cumulative_ack) { // ForwardTSN is sent to make the receiver (this socket) "forget" about partly // received (or not received at all) data, up until `new_cumulative_ack`. @@ -232,7 +232,7 @@ void DataTracker::HandleForwardTsn(TSN new_cumulative_ack) { // indicate the previous SACK was lost in the network." UpdateAckState(AckState::kImmediate, "FORWARD_TSN new_cumulative_tsn was behind"); - return; + return false; } // https://tools.ietf.org/html/rfc3758#section-3.6 @@ -271,6 +271,7 @@ void DataTracker::HandleForwardTsn(TSN new_cumulative_ack) { UpdateAckState(AckState::kImmediate, "received FORWARD_TSN when already delayed"); } + return true; } SackChunk DataTracker::CreateSelectiveAck(size_t a_rwnd) { diff --git a/net/dcsctp/rx/data_tracker.h b/net/dcsctp/rx/data_tracker.h index e07e1e379d..9991ee6139 100644 --- a/net/dcsctp/rx/data_tracker.h +++ b/net/dcsctp/rx/data_tracker.h @@ -74,8 +74,9 @@ class DataTracker { // Called at the end of processing an SCTP packet. void ObservePacketEnd(); - // Called for incoming FORWARD-TSN/I-FORWARD-TSN chunks - void HandleForwardTsn(TSN new_cumulative_ack); + // Called for incoming FORWARD-TSN/I-FORWARD-TSN chunks. Indicates if the + // chunk had any effect. + bool HandleForwardTsn(TSN new_cumulative_ack); // Indicates if a SACK should be sent. There may be other reasons to send a // SACK, but if this function indicates so, it should be sent as soon as @@ -92,6 +93,10 @@ class DataTracker { return TSN(last_cumulative_acked_tsn_.Wrap()); } + bool IsLaterThanCumulativeAckedTsn(TSN tsn) const { + return tsn_unwrapper_.PeekUnwrap(tsn) > last_cumulative_acked_tsn_; + } + // Returns true if the received `tsn` would increase the cumulative ack TSN. bool will_increase_cum_ack_tsn(TSN tsn) const; diff --git a/net/dcsctp/rx/data_tracker_test.cc b/net/dcsctp/rx/data_tracker_test.cc index f74dd6eb0b..e0bf4cf739 100644 --- a/net/dcsctp/rx/data_tracker_test.cc +++ b/net/dcsctp/rx/data_tracker_test.cc @@ -12,8 +12,8 @@ #include #include #include +#include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/task_queue/task_queue_base.h" #include "net/dcsctp/common/handover_testing.h" @@ -29,6 +29,8 @@ using ::testing::ElementsAre; using ::testing::IsEmpty; using ::testing::SizeIs; using ::testing::UnorderedElementsAre; +using ::webrtc::TimeDelta; +using ::webrtc::Timestamp; constexpr size_t kArwnd = 10000; constexpr TSN kInitialTSN(11); @@ -42,8 +44,8 @@ class DataTrackerTest : public testing::Test { }), timer_(timer_manager_.CreateTimer( "test/delayed_ack", - []() { return absl::nullopt; }, - TimerOptions(DurationMs(0)))), + []() { return TimeDelta::Zero(); }, + TimerOptions(TimeDelta::Zero()))), tracker_( std::make_unique("log: ", timer_.get(), kInitialTSN)) { } @@ -71,7 +73,7 @@ class DataTrackerTest : public testing::Test { tracker_->RestoreFromState(state); } - TimeMs now_ = TimeMs(0); + Timestamp now_ = Timestamp::Zero(); FakeTimeoutManager timeout_manager_; TimerManager timer_manager_; std::unique_ptr timer_; @@ -735,5 +737,65 @@ TEST_F(DataTrackerTest, HandoverWhileSendingSackEveryPacketOnPacketLoss) { EXPECT_FALSE(tracker_->ShouldSendAck()); EXPECT_TRUE(timer_->is_running()); } + +TEST_F(DataTrackerTest, DoesNotAcceptDataBeforeForwardTsn) { + Observer({12, 13, 14, 15, 17}); + tracker_->ObservePacketEnd(); + + tracker_->HandleForwardTsn(TSN(13)); + + EXPECT_FALSE(tracker_->Observe(TSN(11))); +} + +TEST_F(DataTrackerTest, DoesNotAcceptDataAtForwardTsn) { + Observer({12, 13, 14, 15, 17}); + tracker_->ObservePacketEnd(); + + tracker_->HandleForwardTsn(TSN(16)); + + EXPECT_FALSE(tracker_->Observe(TSN(16))); +} + +TEST_F(DataTrackerTest, DoesNotAcceptDataBeforeCumAckTsn) { + EXPECT_EQ(kInitialTSN, TSN(11)); + EXPECT_FALSE(tracker_->Observe(TSN(10))); +} + +TEST_F(DataTrackerTest, DoesNotAcceptContiguousDuplicateData) { + EXPECT_EQ(kInitialTSN, TSN(11)); + EXPECT_TRUE(tracker_->Observe(TSN(11))); + EXPECT_FALSE(tracker_->Observe(TSN(11))); + EXPECT_TRUE(tracker_->Observe(TSN(12))); + EXPECT_FALSE(tracker_->Observe(TSN(12))); + EXPECT_FALSE(tracker_->Observe(TSN(11))); + EXPECT_FALSE(tracker_->Observe(TSN(10))); +} + +TEST_F(DataTrackerTest, DoesNotAcceptGapsWithDuplicateData) { + EXPECT_EQ(kInitialTSN, TSN(11)); + EXPECT_TRUE(tracker_->Observe(TSN(11))); + EXPECT_FALSE(tracker_->Observe(TSN(11))); + + EXPECT_TRUE(tracker_->Observe(TSN(14))); + EXPECT_FALSE(tracker_->Observe(TSN(14))); + + EXPECT_TRUE(tracker_->Observe(TSN(13))); + EXPECT_FALSE(tracker_->Observe(TSN(13))); + + EXPECT_TRUE(tracker_->Observe(TSN(12))); + EXPECT_FALSE(tracker_->Observe(TSN(12))); +} + +TEST_F(DataTrackerTest, NotReadyForHandoverWhenHavingTsnGaps) { + tracker_->Observe(TSN(10)); + tracker_->Observe(TSN(12)); + EXPECT_EQ(tracker_->GetHandoverReadiness(), + HandoverReadinessStatus().Add( + HandoverUnreadinessReason::kDataTrackerTsnBlocksPending)); + + tracker_->Observe(TSN(11)); + EXPECT_EQ(tracker_->GetHandoverReadiness(), HandoverReadinessStatus()); +} + } // namespace } // namespace dcsctp diff --git a/net/dcsctp/rx/interleaved_reassembly_streams.cc b/net/dcsctp/rx/interleaved_reassembly_streams.cc index 8b316de676..c34c042281 100644 --- a/net/dcsctp/rx/interleaved_reassembly_streams.cc +++ b/net/dcsctp/rx/interleaved_reassembly_streams.cc @@ -37,14 +37,13 @@ InterleavedReassemblyStreams::InterleavedReassemblyStreams( size_t InterleavedReassemblyStreams::Stream::TryToAssembleMessage( UnwrappedMID mid) { - std::map::const_iterator it = - chunks_by_mid_.find(mid); + std::map::iterator it = chunks_by_mid_.find(mid); if (it == chunks_by_mid_.end()) { RTC_DLOG(LS_VERBOSE) << parent_.log_prefix_ << "TryToAssembleMessage " << *mid.Wrap() << " - no chunks"; return 0; } - const ChunkMap& chunks = it->second; + ChunkMap& chunks = it->second; if (!chunks.begin()->second.second.is_beginning || !chunks.rbegin()->second.second.is_end) { RTC_DLOG(LS_VERBOSE) << parent_.log_prefix_ << "TryToAssembleMessage " @@ -69,17 +68,22 @@ size_t InterleavedReassemblyStreams::Stream::TryToAssembleMessage( return removed_bytes; } +size_t InterleavedReassemblyStreams::Stream::AssembleMessage(UnwrappedTSN tsn, + Data data) { + size_t payload_size = data.size(); + UnwrappedTSN tsns[1] = {tsn}; + DcSctpMessage message(data.stream_id, data.ppid, std::move(data.payload)); + parent_.on_assembled_message_(tsns, std::move(message)); + return payload_size; +} + size_t InterleavedReassemblyStreams::Stream::AssembleMessage( - const ChunkMap& tsn_chunks) { + ChunkMap& tsn_chunks) { size_t count = tsn_chunks.size(); if (count == 1) { // Fast path - zero-copy - const Data& data = tsn_chunks.begin()->second.second; - size_t payload_size = data.size(); - UnwrappedTSN tsns[1] = {tsn_chunks.begin()->second.first}; - DcSctpMessage message(data.stream_id, data.ppid, std::move(data.payload)); - parent_.on_assembled_message_(tsns, std::move(message)); - return payload_size; + return AssembleMessage(tsn_chunks.begin()->second.first, + std::move(tsn_chunks.begin()->second.second)); } // Slow path - will need to concatenate the payload. @@ -106,8 +110,8 @@ size_t InterleavedReassemblyStreams::Stream::AssembleMessage( return payload_size; } -size_t InterleavedReassemblyStreams::Stream::EraseTo(MID message_id) { - UnwrappedMID unwrapped_mid = mid_unwrapper_.Unwrap(message_id); +size_t InterleavedReassemblyStreams::Stream::EraseTo(MID mid) { + UnwrappedMID unwrapped_mid = mid_unwrapper_.Unwrap(mid); size_t removed_bytes = 0; auto it = chunks_by_mid_.begin(); @@ -135,8 +139,23 @@ int InterleavedReassemblyStreams::Stream::Add(UnwrappedTSN tsn, Data data) { RTC_DCHECK_EQ(*data.is_unordered, *stream_id_.unordered); RTC_DCHECK_EQ(*data.stream_id, *stream_id_.stream_id); int queued_bytes = data.size(); - UnwrappedMID mid = mid_unwrapper_.Unwrap(data.message_id); + UnwrappedMID mid = mid_unwrapper_.Unwrap(data.mid); FSN fsn = data.fsn; + + // Avoid inserting it into any map if it can be delivered directly. + if (stream_id_.unordered && data.is_beginning && data.is_end) { + AssembleMessage(tsn, std::move(data)); + return 0; + + } else if (!stream_id_.unordered && mid == next_mid_ && data.is_beginning && + data.is_end) { + AssembleMessage(tsn, std::move(data)); + next_mid_.Increment(); + // This might unblock assembling more messages. + return -TryToAssembleMessages(); + } + + // Slow path. auto [unused, inserted] = chunks_by_mid_[mid].emplace(fsn, std::make_pair(tsn, std::move(data))); if (!inserted) { @@ -202,19 +221,20 @@ int InterleavedReassemblyStreams::Add(UnwrappedTSN tsn, Data data) { } size_t InterleavedReassemblyStreams::HandleForwardTsn( - UnwrappedTSN new_cumulative_ack_tsn, - rtc::ArrayView skipped_streams) { + UnwrappedTSN /* new_cumulative_ack_tsn */, + webrtc::ArrayView + skipped_streams) { size_t removed_bytes = 0; for (const auto& skipped : skipped_streams) { removed_bytes += GetOrCreateStream(FullStreamId(skipped.unordered, skipped.stream_id)) - .EraseTo(skipped.message_id); + .EraseTo(skipped.mid); } return removed_bytes; } void InterleavedReassemblyStreams::ResetStreams( - rtc::ArrayView stream_ids) { + webrtc::ArrayView stream_ids) { if (stream_ids.empty()) { for (auto& entry : streams_) { entry.second.Reset(); @@ -254,16 +274,16 @@ void InterleavedReassemblyStreams::RestoreFromState( // Validate that the component is in pristine state. RTC_DCHECK(streams_.empty()); - for (const DcSctpSocketHandoverState::OrderedStream& state : + for (const DcSctpSocketHandoverState::OrderedStream& stream_state : state.rx.ordered_streams) { - FullStreamId stream_id(IsUnordered(false), StreamID(state.id)); + FullStreamId stream_id(IsUnordered(false), StreamID(stream_state.id)); streams_.emplace( std::piecewise_construct, std::forward_as_tuple(stream_id), - std::forward_as_tuple(stream_id, this, MID(state.next_ssn))); + std::forward_as_tuple(stream_id, this, MID(stream_state.next_ssn))); } - for (const DcSctpSocketHandoverState::UnorderedStream& state : + for (const DcSctpSocketHandoverState::UnorderedStream& stream_state : state.rx.unordered_streams) { - FullStreamId stream_id(IsUnordered(true), StreamID(state.id)); + FullStreamId stream_id(IsUnordered(true), StreamID(stream_state.id)); streams_.emplace(std::piecewise_construct, std::forward_as_tuple(stream_id), std::forward_as_tuple(stream_id, this)); } diff --git a/net/dcsctp/rx/interleaved_reassembly_streams.h b/net/dcsctp/rx/interleaved_reassembly_streams.h index 605cf42b93..2213fbb8d0 100644 --- a/net/dcsctp/rx/interleaved_reassembly_streams.h +++ b/net/dcsctp/rx/interleaved_reassembly_streams.h @@ -35,10 +35,10 @@ class InterleavedReassemblyStreams : public ReassemblyStreams { size_t HandleForwardTsn( UnwrappedTSN new_cumulative_ack_tsn, - rtc::ArrayView skipped_streams) - override; + webrtc::ArrayView + skipped_streams) override; - void ResetStreams(rtc::ArrayView stream_ids) override; + void ResetStreams(webrtc::ArrayView stream_ids) override; HandoverReadinessStatus GetHandoverReadiness() const override; void AddHandoverState(DcSctpSocketHandoverState& state) override; @@ -67,7 +67,7 @@ class InterleavedReassemblyStreams : public ReassemblyStreams { parent_(*parent), next_mid_(mid_unwrapper_.Unwrap(next_mid)) {} int Add(UnwrappedTSN tsn, Data data); - size_t EraseTo(MID message_id); + size_t EraseTo(MID mid); void Reset() { mid_unwrapper_.Reset(); next_mid_ = mid_unwrapper_.Unwrap(MID(0)); @@ -81,7 +81,9 @@ class InterleavedReassemblyStreams : public ReassemblyStreams { // Try to assemble one message identified by `mid`. // Returns the number of bytes assembled if a message was assembled. size_t TryToAssembleMessage(UnwrappedMID mid); - size_t AssembleMessage(const ChunkMap& tsn_chunks); + size_t AssembleMessage(ChunkMap& tsn_chunks); + size_t AssembleMessage(UnwrappedTSN tsn, Data data); + // Try to assemble one or several messages in order from the stream. // Returns the number of bytes assembled if one or more messages were // assembled. diff --git a/net/dcsctp/rx/interleaved_reassembly_streams_test.cc b/net/dcsctp/rx/interleaved_reassembly_streams_test.cc index df4024ed60..480fbadda5 100644 --- a/net/dcsctp/rx/interleaved_reassembly_streams_test.cc +++ b/net/dcsctp/rx/interleaved_reassembly_streams_test.cc @@ -13,6 +13,7 @@ #include #include +#include "net/dcsctp/common/handover_testing.h" #include "net/dcsctp/common/sequence_numbers.h" #include "net/dcsctp/packet/chunk/forward_tsn_common.h" #include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h" @@ -24,8 +25,11 @@ namespace dcsctp { namespace { +using ::testing::ElementsAre; +using ::testing::Field; using ::testing::MockFunction; using ::testing::NiceMock; +using ::testing::Property; class InterleavedReassemblyStreamsTest : public testing::Test { protected: @@ -150,5 +154,115 @@ TEST_F(InterleavedReassemblyStreamsTest, EXPECT_EQ(streams.HandleForwardTsn(tsn(4), skipped), 8u); } +TEST_F(InterleavedReassemblyStreamsTest, CanReassembleFastPathUnordered) { + NiceMock> on_assembled; + + { + testing::InSequence s; + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(1)), + Property(&DcSctpMessage::payload, ElementsAre(1)))); + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(3)), + Property(&DcSctpMessage::payload, ElementsAre(3)))); + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(2)), + Property(&DcSctpMessage::payload, ElementsAre(2)))); + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(4)), + Property(&DcSctpMessage::payload, ElementsAre(4)))); + } + + InterleavedReassemblyStreams streams("", on_assembled.AsStdFunction()); + + EXPECT_EQ(streams.Add(tsn(1), gen_.Unordered({1}, "BE")), 0); + EXPECT_EQ(streams.Add(tsn(3), gen_.Unordered({3}, "BE")), 0); + EXPECT_EQ(streams.Add(tsn(2), gen_.Unordered({2}, "BE")), 0); + EXPECT_EQ(streams.Add(tsn(4), gen_.Unordered({4}, "BE")), 0); +} + +TEST_F(InterleavedReassemblyStreamsTest, CanReassembleFastPathOrdered) { + NiceMock> on_assembled; + + { + testing::InSequence s; + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(1)), + Property(&DcSctpMessage::payload, ElementsAre(1)))); + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(2)), + Property(&DcSctpMessage::payload, ElementsAre(2)))); + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(3)), + Property(&DcSctpMessage::payload, ElementsAre(3)))); + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(4)), + Property(&DcSctpMessage::payload, ElementsAre(4)))); + } + + InterleavedReassemblyStreams streams("", on_assembled.AsStdFunction()); + + Data data1 = gen_.Ordered({1}, "BE"); + Data data2 = gen_.Ordered({2}, "BE"); + Data data3 = gen_.Ordered({3}, "BE"); + Data data4 = gen_.Ordered({4}, "BE"); + EXPECT_EQ(streams.Add(tsn(1), std::move(data1)), 0); + EXPECT_EQ(streams.Add(tsn(3), std::move(data3)), 1); + EXPECT_EQ(streams.Add(tsn(2), std::move(data2)), -1); + EXPECT_EQ(streams.Add(tsn(4), std::move(data4)), 0); +} + +TEST_F(InterleavedReassemblyStreamsTest, CanHandoverOrderedStreams) { + InterleavedReassemblyStreams streams1("", [](auto...) {}); + + EXPECT_EQ(streams1.Add(tsn(1), gen_.Ordered({1}, "B")), 1); + EXPECT_EQ(streams1.GetHandoverReadiness(), + HandoverReadinessStatus( + HandoverUnreadinessReason::kOrderedStreamHasUnassembledChunks)); + EXPECT_EQ(streams1.Add(tsn(2), gen_.Ordered({2, 3, 4}, "E")), -1); + EXPECT_TRUE(streams1.GetHandoverReadiness().IsReady()); + + DcSctpSocketHandoverState state; + streams1.AddHandoverState(state); + g_handover_state_transformer_for_test(&state); + + MockFunction on_assembled; + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(3)), + Property(&DcSctpMessage::payload, ElementsAre(5, 6, 7, 8)))); + + InterleavedReassemblyStreams streams2("", on_assembled.AsStdFunction()); + streams2.RestoreFromState(state); + Data data = gen_.Ordered({5, 6, 7, 8}, "BE"); + EXPECT_EQ(data.mid, MID(1)); + EXPECT_EQ(streams2.Add(tsn(3), std::move(data)), 0); +} + +TEST_F(InterleavedReassemblyStreamsTest, CanHandoverUnorderedStreams) { + InterleavedReassemblyStreams streams1("", [](auto...) {}); + + EXPECT_EQ(streams1.Add(tsn(1), gen_.Unordered({1}, "B")), 1); + EXPECT_EQ( + streams1.GetHandoverReadiness(), + HandoverReadinessStatus( + HandoverUnreadinessReason::kUnorderedStreamHasUnassembledChunks)); + EXPECT_EQ(streams1.Add(tsn(2), gen_.Unordered({2, 3, 4}, "E")), -1); + EXPECT_TRUE(streams1.GetHandoverReadiness().IsReady()); + + DcSctpSocketHandoverState state; + streams1.AddHandoverState(state); + g_handover_state_transformer_for_test(&state); + + MockFunction on_assembled; + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(3)), + Property(&DcSctpMessage::payload, ElementsAre(5, 6, 7, 8)))); + + InterleavedReassemblyStreams streams2("", on_assembled.AsStdFunction()); + streams2.RestoreFromState(state); + Data data = gen_.Unordered({5, 6, 7, 8}, "BE"); + EXPECT_EQ(data.mid, MID(1)); + EXPECT_EQ(streams2.Add(tsn(3), std::move(data)), 0); +} } // namespace } // namespace dcsctp diff --git a/net/dcsctp/rx/reassembly_queue.cc b/net/dcsctp/rx/reassembly_queue.cc index 2cc90a6202..439863a6cb 100644 --- a/net/dcsctp/rx/reassembly_queue.cc +++ b/net/dcsctp/rx/reassembly_queue.cc @@ -14,25 +14,27 @@ #include #include #include +#include #include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/common/sequence_numbers.h" -#include "net/dcsctp/common/str_join.h" #include "net/dcsctp/packet/chunk/forward_tsn_common.h" #include "net/dcsctp/packet/data.h" #include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h" #include "net/dcsctp/packet/parameter/reconfiguration_response_parameter.h" +#include "net/dcsctp/public/dcsctp_handover_state.h" #include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/public/types.h" #include "net/dcsctp/rx/interleaved_reassembly_streams.h" #include "net/dcsctp/rx/reassembly_streams.h" #include "net/dcsctp/rx/traditional_reassembly_streams.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/str_join.h" namespace dcsctp { namespace { @@ -50,28 +52,23 @@ std::unique_ptr CreateStreams( } // namespace ReassemblyQueue::ReassemblyQueue(absl::string_view log_prefix, - TSN peer_initial_tsn, size_t max_size_bytes, bool use_message_interleaving) : log_prefix_(log_prefix), max_size_bytes_(max_size_bytes), watermark_bytes_(max_size_bytes * kHighWatermarkLimit), - last_assembled_tsn_watermark_( - tsn_unwrapper_.Unwrap(TSN(*peer_initial_tsn - 1))), - last_completed_reset_req_seq_nbr_(ReconfigRequestSN(0)), streams_(CreateStreams( log_prefix_, - [this](rtc::ArrayView tsns, + [this](webrtc::ArrayView tsns, DcSctpMessage message) { AddReassembledMessage(tsns, std::move(message)); }, use_message_interleaving)) {} void ReassemblyQueue::Add(TSN tsn, Data data) { - RTC_DCHECK(IsConsistent()); RTC_DLOG(LS_VERBOSE) << log_prefix_ << "added tsn=" << *tsn - << ", stream=" << *data.stream_id << ":" - << *data.message_id << ":" << *data.fsn << ", type=" + << ", stream=" << *data.stream_id << ":" << *data.mid + << ":" << *data.fsn << ", type=" << (data.is_beginning && data.is_end ? "complete" : data.is_beginning ? "first" : data.is_end ? "last" @@ -79,32 +76,27 @@ void ReassemblyQueue::Add(TSN tsn, Data data) { UnwrappedTSN unwrapped_tsn = tsn_unwrapper_.Unwrap(tsn); - if (unwrapped_tsn <= last_assembled_tsn_watermark_ || - delivered_tsns_.find(unwrapped_tsn) != delivered_tsns_.end()) { - RTC_DLOG(LS_VERBOSE) << log_prefix_ - << "Chunk has already been delivered - skipping"; - return; - } - // If a stream reset has been received with a "sender's last assigned tsn" in // the future, the socket is in "deferred reset processing" mode and must // buffer chunks until it's exited. if (deferred_reset_streams_.has_value() && - unwrapped_tsn > - tsn_unwrapper_.Unwrap( - deferred_reset_streams_->req.sender_last_assigned_tsn())) { + unwrapped_tsn > deferred_reset_streams_->sender_last_assigned_tsn && + deferred_reset_streams_->streams.contains(data.stream_id)) { RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Deferring chunk with tsn=" << *tsn - << " until cum_ack_tsn=" - << *deferred_reset_streams_->req.sender_last_assigned_tsn(); + << ", sid=" << *data.stream_id << " until tsn=" + << *deferred_reset_streams_->sender_last_assigned_tsn.Wrap(); // https://tools.ietf.org/html/rfc6525#section-5.2.2 // "In this mode, any data arriving with a TSN larger than the // Sender's Last Assigned TSN for the affected stream(s) MUST be queued // locally and held until the cumulative acknowledgment point reaches the // Sender's Last Assigned TSN." queued_bytes_ += data.size(); - deferred_reset_streams_->deferred_chunks.emplace_back( - std::make_pair(tsn, std::move(data))); + deferred_reset_streams_->deferred_actions.push_back( + [this, tsn, data = std::move(data)]() mutable { + queued_bytes_ -= data.size(); + Add(tsn, std::move(data)); + }); } else { queued_bytes_ += streams_->Add(unwrapped_tsn, std::move(data)); } @@ -120,83 +112,50 @@ void ReassemblyQueue::Add(TSN tsn, Data data) { RTC_DCHECK(IsConsistent()); } -ReconfigurationResponseParameter::Result ReassemblyQueue::ResetStreams( - const OutgoingSSNResetRequestParameter& req, - TSN cum_tsn_ack) { - RTC_DCHECK(IsConsistent()); - if (deferred_reset_streams_.has_value()) { - // In deferred mode already. - return ReconfigurationResponseParameter::Result::kInProgress; - } else if (req.request_sequence_number() <= - last_completed_reset_req_seq_nbr_) { - // Already performed at some time previously. - return ReconfigurationResponseParameter::Result::kSuccessPerformed; - } - - UnwrappedTSN sla_tsn = tsn_unwrapper_.Unwrap(req.sender_last_assigned_tsn()); - UnwrappedTSN unwrapped_cum_tsn_ack = tsn_unwrapper_.Unwrap(cum_tsn_ack); - - // https://tools.ietf.org/html/rfc6525#section-5.2.2 - // "If the Sender's Last Assigned TSN is greater than the - // cumulative acknowledgment point, then the endpoint MUST enter "deferred - // reset processing"." - if (sla_tsn > unwrapped_cum_tsn_ack) { - RTC_DLOG(LS_VERBOSE) - << log_prefix_ - << "Entering deferred reset processing mode until cum_tsn_ack=" - << *req.sender_last_assigned_tsn(); - deferred_reset_streams_ = absl::make_optional(req); - return ReconfigurationResponseParameter::Result::kInProgress; - } +void ReassemblyQueue::ResetStreamsAndLeaveDeferredReset( + webrtc::ArrayView stream_ids) { + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Resetting streams: [" + << webrtc::StrJoin(stream_ids, ",", + [](webrtc::StringBuilder& sb, + StreamID sid) { sb << *sid; }) + << "]"; // https://tools.ietf.org/html/rfc6525#section-5.2.2 // "... streams MUST be reset to 0 as the next expected SSN." - streams_->ResetStreams(req.stream_ids()); - last_completed_reset_req_seq_nbr_ = req.request_sequence_number(); - RTC_DCHECK(IsConsistent()); - return ReconfigurationResponseParameter::Result::kSuccessPerformed; -} + streams_->ResetStreams(stream_ids); -bool ReassemblyQueue::MaybeResetStreamsDeferred(TSN cum_ack_tsn) { - RTC_DCHECK(IsConsistent()); if (deferred_reset_streams_.has_value()) { - UnwrappedTSN unwrapped_cum_ack_tsn = tsn_unwrapper_.Unwrap(cum_ack_tsn); - UnwrappedTSN unwrapped_sla_tsn = tsn_unwrapper_.Unwrap( - deferred_reset_streams_->req.sender_last_assigned_tsn()); - if (unwrapped_cum_ack_tsn >= unwrapped_sla_tsn) { - RTC_DLOG(LS_VERBOSE) << log_prefix_ - << "Leaving deferred reset processing with tsn=" - << *cum_ack_tsn << ", feeding back " - << deferred_reset_streams_->deferred_chunks.size() - << " chunks"; - // https://tools.ietf.org/html/rfc6525#section-5.2.2 - // "... streams MUST be reset to 0 as the next expected SSN." - streams_->ResetStreams(deferred_reset_streams_->req.stream_ids()); - std::vector> deferred_chunks = - std::move(deferred_reset_streams_->deferred_chunks); - // The response will not be sent now, but as a reply to the retried - // request, which will come as "in progress" has been sent prior. - last_completed_reset_req_seq_nbr_ = - deferred_reset_streams_->req.request_sequence_number(); - deferred_reset_streams_ = absl::nullopt; - - // https://tools.ietf.org/html/rfc6525#section-5.2.2 - // "Any queued TSNs (queued at step E2) MUST now be released and processed - // normally." - for (auto& [tsn, data] : deferred_chunks) { - queued_bytes_ -= data.size(); - Add(tsn, std::move(data)); - } - - RTC_DCHECK(IsConsistent()); - return true; - } else { - RTC_DLOG(LS_VERBOSE) << "Staying in deferred reset processing. tsn=" - << *cum_ack_tsn; + RTC_DLOG(LS_VERBOSE) << log_prefix_ + << "Leaving deferred reset processing, feeding back " + << deferred_reset_streams_->deferred_actions.size() + << " actions"; + // https://tools.ietf.org/html/rfc6525#section-5.2.2 + // "Any queued TSNs (queued at step E2) MUST now be released and processed + // normally." + auto deferred_actions = + std::move(deferred_reset_streams_->deferred_actions); + deferred_reset_streams_ = std::nullopt; + + for (auto& action : deferred_actions) { + action(); } } - return false; + RTC_DCHECK(IsConsistent()); +} + +void ReassemblyQueue::EnterDeferredReset( + TSN sender_last_assigned_tsn, + webrtc::ArrayView streams) { + if (!deferred_reset_streams_.has_value()) { + RTC_DLOG(LS_VERBOSE) << log_prefix_ + << "Entering deferred reset; sender_last_assigned_tsn=" + << *sender_last_assigned_tsn; + deferred_reset_streams_ = std::make_optional( + tsn_unwrapper_.Unwrap(sender_last_assigned_tsn), + webrtc::flat_set(streams.begin(), streams.end())); + } + RTC_DCHECK(IsConsistent()); } std::vector ReassemblyQueue::FlushMessages() { @@ -206,86 +165,56 @@ std::vector ReassemblyQueue::FlushMessages() { } void ReassemblyQueue::AddReassembledMessage( - rtc::ArrayView tsns, + webrtc::ArrayView tsns, DcSctpMessage message) { RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Assembled message from TSN=[" - << StrJoin(tsns, ",", - [](rtc::StringBuilder& sb, UnwrappedTSN tsn) { - sb << *tsn.Wrap(); - }) + << webrtc::StrJoin( + tsns, ",", + [](webrtc::StringBuilder& sb, UnwrappedTSN tsn) { + sb << *tsn.Wrap(); + }) << "], message; stream_id=" << *message.stream_id() << ", ppid=" << *message.ppid() << ", payload=" << message.payload().size() << " bytes"; - for (const UnwrappedTSN tsn : tsns) { - if (tsn <= last_assembled_tsn_watermark_) { - // This can be provoked by a misbehaving peer by sending FORWARD-TSN with - // invalid SSNs, allowing ordered messages to stay in the queue that - // should've been discarded. - RTC_DLOG(LS_VERBOSE) - << log_prefix_ - << "Message is built from fragments already seen - skipping"; - return; - } else if (tsn == last_assembled_tsn_watermark_.next_value()) { - // Update watermark, or insert into delivered_tsns_ - last_assembled_tsn_watermark_.Increment(); - } else { - delivered_tsns_.insert(tsn); - } - } - - // With new TSNs in delivered_tsns, gaps might be filled. - MaybeMoveLastAssembledWatermarkFurther(); - reassembled_messages_.emplace_back(std::move(message)); } -void ReassemblyQueue::MaybeMoveLastAssembledWatermarkFurther() { - // `delivered_tsns_` contain TSNS when there is a gap between ranges of - // assembled TSNs. `last_assembled_tsn_watermark_` should not be adjacent to - // that list, because if so, it can be moved. - while (!delivered_tsns_.empty() && - *delivered_tsns_.begin() == - last_assembled_tsn_watermark_.next_value()) { - last_assembled_tsn_watermark_.Increment(); - delivered_tsns_.erase(delivered_tsns_.begin()); - } -} +void ReassemblyQueue::HandleForwardTsn( + TSN new_cumulative_tsn, + webrtc::ArrayView + skipped_streams) { + UnwrappedTSN tsn = tsn_unwrapper_.Unwrap(new_cumulative_tsn); -void ReassemblyQueue::Handle(const AnyForwardTsnChunk& forward_tsn) { - RTC_DCHECK(IsConsistent()); - UnwrappedTSN tsn = tsn_unwrapper_.Unwrap(forward_tsn.new_cumulative_tsn()); - - last_assembled_tsn_watermark_ = std::max(last_assembled_tsn_watermark_, tsn); - delivered_tsns_.erase(delivered_tsns_.begin(), - delivered_tsns_.upper_bound(tsn)); - - MaybeMoveLastAssembledWatermarkFurther(); + if (deferred_reset_streams_.has_value() && + tsn > deferred_reset_streams_->sender_last_assigned_tsn) { + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "ForwardTSN to " << *tsn.Wrap() + << "- deferring."; + deferred_reset_streams_->deferred_actions.emplace_back( + [this, new_cumulative_tsn, + streams = std::vector( + skipped_streams.begin(), skipped_streams.end())] { + HandleForwardTsn(new_cumulative_tsn, streams); + }); + RTC_DCHECK(IsConsistent()); + return; + } - queued_bytes_ -= - streams_->HandleForwardTsn(tsn, forward_tsn.skipped_streams()); + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "ForwardTSN to " << *tsn.Wrap() + << " - performing."; + queued_bytes_ -= streams_->HandleForwardTsn(tsn, skipped_streams); RTC_DCHECK(IsConsistent()); } bool ReassemblyQueue::IsConsistent() const { - // `delivered_tsns_` and `last_assembled_tsn_watermark_` mustn't overlap or be - // adjacent. - if (!delivered_tsns_.empty() && - last_assembled_tsn_watermark_.next_value() >= *delivered_tsns_.begin()) { - return false; - } - // Allow queued_bytes_ to be larger than max_size_bytes, as it's not actively - // enforced in this class. This comparison will still trigger if queued_bytes_ - // became "negative". - return (queued_bytes_ >= 0 && queued_bytes_ <= 2 * max_size_bytes_); + // enforced in this class. But in case it wraps around (becomes negative, but + // as it's unsigned, that would wrap to very big), this would trigger. + return (queued_bytes_ <= 2 * max_size_bytes_); } HandoverReadinessStatus ReassemblyQueue::GetHandoverReadiness() const { HandoverReadinessStatus status = streams_->GetHandoverReadiness(); - if (!delivered_tsns_.empty()) { - status.Add(HandoverUnreadinessReason::kReassemblyQueueDeliveredTSNsGap); - } if (deferred_reset_streams_.has_value()) { status.Add(HandoverUnreadinessReason::kStreamResetDeferred); } @@ -293,20 +222,10 @@ HandoverReadinessStatus ReassemblyQueue::GetHandoverReadiness() const { } void ReassemblyQueue::AddHandoverState(DcSctpSocketHandoverState& state) { - state.rx.last_assembled_tsn = last_assembled_tsn_watermark_.Wrap().value(); - state.rx.last_completed_deferred_reset_req_sn = - last_completed_reset_req_seq_nbr_.value(); streams_->AddHandoverState(state); } void ReassemblyQueue::RestoreFromState(const DcSctpSocketHandoverState& state) { - // Validate that the component is in pristine state. - RTC_DCHECK(last_completed_reset_req_seq_nbr_ == ReconfigRequestSN(0)); - - last_assembled_tsn_watermark_ = - tsn_unwrapper_.Unwrap(TSN(state.rx.last_assembled_tsn)); - last_completed_reset_req_seq_nbr_ = - ReconfigRequestSN(state.rx.last_completed_deferred_reset_req_sn); streams_->RestoreFromState(state); } } // namespace dcsctp diff --git a/net/dcsctp/rx/reassembly_queue.h b/net/dcsctp/rx/reassembly_queue.h index e1f231e2a3..7308fdd227 100644 --- a/net/dcsctp/rx/reassembly_queue.h +++ b/net/dcsctp/rx/reassembly_queue.h @@ -19,6 +19,7 @@ #include #include +#include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" #include "api/array_view.h" #include "net/dcsctp/common/internal_types.h" @@ -27,9 +28,9 @@ #include "net/dcsctp/packet/data.h" #include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h" #include "net/dcsctp/packet/parameter/reconfiguration_response_parameter.h" -#include "net/dcsctp/public/dcsctp_handover_state.h" #include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/rx/reassembly_streams.h" +#include "rtc_base/containers/flat_set.h" namespace dcsctp { @@ -70,7 +71,6 @@ class ReassemblyQueue { static constexpr float kHighWatermarkLimit = 0.9; ReassemblyQueue(absl::string_view log_prefix, - TSN peer_initial_tsn, size_t max_size_bytes, bool use_message_interleaving = false); @@ -88,18 +88,19 @@ class ReassemblyQueue { // Handle a ForwardTSN chunk, when the sender has indicated that the received // (this class) should forget about some chunks. This is used to implement // partial reliability. - void Handle(const AnyForwardTsnChunk& forward_tsn); + void HandleForwardTsn( + TSN new_cumulative_tsn, + webrtc::ArrayView + skipped_streams); - // Given the reset stream request and the current cum_tsn_ack, might either - // reset the streams directly (returns kSuccessPerformed), or at a later time, - // by entering the "deferred reset processing" mode (returns kInProgress). - ReconfigurationResponseParameter::Result ResetStreams( - const OutgoingSSNResetRequestParameter& req, - TSN cum_tsn_ack); + // Resets the provided streams and leaves deferred reset processing, if + // enabled. + void ResetStreamsAndLeaveDeferredReset( + webrtc::ArrayView stream_ids); - // Given the current (updated) cum_tsn_ack, might leave "defererred reset - // processing" mode and reset streams. Returns true if so. - bool MaybeResetStreamsDeferred(TSN cum_ack_tsn); + // Enters deferred reset processing. + void EnterDeferredReset(TSN sender_last_assigned_tsn, + webrtc::ArrayView streams); // The number of payload bytes that have been queued. Note that the actual // memory usage is higher due to additional overhead of tracking received @@ -126,39 +127,32 @@ class ReassemblyQueue { void RestoreFromState(const DcSctpSocketHandoverState& state); private: - bool IsConsistent() const; - void AddReassembledMessage(rtc::ArrayView tsns, - DcSctpMessage message); - void MaybeMoveLastAssembledWatermarkFurther(); - struct DeferredResetStreams { - explicit DeferredResetStreams(OutgoingSSNResetRequestParameter req) - : req(std::move(req)) {} - OutgoingSSNResetRequestParameter req; - std::vector> deferred_chunks; + DeferredResetStreams(UnwrappedTSN sender_last_assigned_tsn, + webrtc::flat_set streams) + : sender_last_assigned_tsn(sender_last_assigned_tsn), + streams(std::move(streams)) {} + + UnwrappedTSN sender_last_assigned_tsn; + webrtc::flat_set streams; + std::vector> deferred_actions; }; + bool IsConsistent() const; + void AddReassembledMessage(webrtc::ArrayView tsns, + DcSctpMessage message); + const absl::string_view log_prefix_; const size_t max_size_bytes_; const size_t watermark_bytes_; UnwrappedTSN::Unwrapper tsn_unwrapper_; - // Whenever a message has been assembled, either increase - // `last_assembled_tsn_watermark_` or - if there are gaps - add the message's - // TSNs into delivered_tsns_ so that messages are not re-delivered on - // duplicate chunks. - UnwrappedTSN last_assembled_tsn_watermark_; - std::set delivered_tsns_; // Messages that have been reassembled, and will be returned by // `FlushMessages`. std::vector reassembled_messages_; // If present, "deferred reset processing" mode is active. - absl::optional deferred_reset_streams_; - - // Contains the last request sequence number of the - // OutgoingSSNResetRequestParameter that was performed. - ReconfigRequestSN last_completed_reset_req_seq_nbr_; + std::optional deferred_reset_streams_; // The number of "payload bytes" that are in this queue, in total. size_t queued_bytes_ = 0; diff --git a/net/dcsctp/rx/reassembly_queue_test.cc b/net/dcsctp/rx/reassembly_queue_test.cc index 549bc6fce1..dc15aceb08 100644 --- a/net/dcsctp/rx/reassembly_queue_test.cc +++ b/net/dcsctp/rx/reassembly_queue_test.cc @@ -34,6 +34,7 @@ namespace { using ::testing::ElementsAre; using ::testing::SizeIs; using ::testing::UnorderedElementsAre; +using SkippedStream = AnyForwardTsnChunk::SkippedStream; // The default maximum size of the Reassembly Queue. static constexpr size_t kBufferSize = 10000; @@ -80,13 +81,13 @@ class ReassemblyQueueTest : public testing::Test { }; TEST_F(ReassemblyQueueTest, EmptyQueue) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); + ReassemblyQueue reasm("log: ", kBufferSize); EXPECT_FALSE(reasm.HasMessages()); EXPECT_EQ(reasm.queued_bytes(), 0u); } TEST_F(ReassemblyQueueTest, SingleUnorderedChunkMessage) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); + ReassemblyQueue reasm("log: ", kBufferSize); reasm.Add(TSN(10), gen_.Unordered({1, 2, 3, 4}, "BE")); EXPECT_TRUE(reasm.HasMessages()); EXPECT_THAT(reasm.FlushMessages(), @@ -96,9 +97,9 @@ TEST_F(ReassemblyQueueTest, SingleUnorderedChunkMessage) { TEST_F(ReassemblyQueueTest, LargeUnorderedChunkAllPermutations) { std::vector tsns = {10, 11, 12, 13}; - rtc::ArrayView payload(kLongPayload); + webrtc::ArrayView payload(kLongPayload); do { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); + ReassemblyQueue reasm("log: ", kBufferSize); for (size_t i = 0; i < tsns.size(); i++) { auto span = payload.subview((tsns[i] - 10) * 4, 4); @@ -122,7 +123,7 @@ TEST_F(ReassemblyQueueTest, LargeUnorderedChunkAllPermutations) { } TEST_F(ReassemblyQueueTest, SingleOrderedChunkMessage) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); + ReassemblyQueue reasm("log: ", kBufferSize); reasm.Add(TSN(10), gen_.Ordered({1, 2, 3, 4}, "BE")); EXPECT_EQ(reasm.queued_bytes(), 0u); EXPECT_TRUE(reasm.HasMessages()); @@ -132,9 +133,9 @@ TEST_F(ReassemblyQueueTest, SingleOrderedChunkMessage) { TEST_F(ReassemblyQueueTest, ManySmallOrderedMessages) { std::vector tsns = {10, 11, 12, 13}; - rtc::ArrayView payload(kLongPayload); + webrtc::ArrayView payload(kLongPayload); do { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); + ReassemblyQueue reasm("log: ", kBufferSize); for (size_t i = 0; i < tsns.size(); i++) { auto span = payload.subview((tsns[i] - 10) * 4, 4); Data::IsBeginning is_beginning(true); @@ -157,7 +158,7 @@ TEST_F(ReassemblyQueueTest, ManySmallOrderedMessages) { } TEST_F(ReassemblyQueueTest, RetransmissionInLargeOrdered) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); + ReassemblyQueue reasm("log: ", kBufferSize); reasm.Add(TSN(10), gen_.Ordered({1}, "B")); reasm.Add(TSN(12), gen_.Ordered({3})); reasm.Add(TSN(13), gen_.Ordered({4})); @@ -182,7 +183,7 @@ TEST_F(ReassemblyQueueTest, RetransmissionInLargeOrdered) { } TEST_F(ReassemblyQueueTest, ForwardTSNRemoveUnordered) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); + ReassemblyQueue reasm("log: ", kBufferSize); reasm.Add(TSN(10), gen_.Unordered({1}, "B")); reasm.Add(TSN(12), gen_.Unordered({3})); reasm.Add(TSN(13), gen_.Unordered({4}, "E")); @@ -194,12 +195,7 @@ TEST_F(ReassemblyQueueTest, ForwardTSNRemoveUnordered) { EXPECT_FALSE(reasm.HasMessages()); - reasm.Handle(ForwardTsnChunk(TSN(13), {})); - EXPECT_EQ(reasm.queued_bytes(), 3u); - - // The lost chunk comes, but too late. - reasm.Add(TSN(11), gen_.Unordered({2})); - EXPECT_FALSE(reasm.HasMessages()); + reasm.HandleForwardTsn(TSN(13), std::vector()); EXPECT_EQ(reasm.queued_bytes(), 3u); // The second lost chunk comes, message is assembled. @@ -209,7 +205,7 @@ TEST_F(ReassemblyQueueTest, ForwardTSNRemoveUnordered) { } TEST_F(ReassemblyQueueTest, ForwardTSNRemoveOrdered) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); + ReassemblyQueue reasm("log: ", kBufferSize); reasm.Add(TSN(10), gen_.Ordered({1}, "B")); reasm.Add(TSN(12), gen_.Ordered({3})); reasm.Add(TSN(13), gen_.Ordered({4}, "E")); @@ -222,8 +218,8 @@ TEST_F(ReassemblyQueueTest, ForwardTSNRemoveOrdered) { EXPECT_FALSE(reasm.HasMessages()); - reasm.Handle(ForwardTsnChunk( - TSN(13), {ForwardTsnChunk::SkippedStream(kStreamID, kSSN)})); + reasm.HandleForwardTsn( + TSN(13), std::vector({SkippedStream(kStreamID, kSSN)})); EXPECT_EQ(reasm.queued_bytes(), 0u); // The lost chunk comes, but too late. @@ -233,7 +229,7 @@ TEST_F(ReassemblyQueueTest, ForwardTSNRemoveOrdered) { } TEST_F(ReassemblyQueueTest, ForwardTSNRemoveALotOrdered) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); + ReassemblyQueue reasm("log: ", kBufferSize); reasm.Add(TSN(10), gen_.Ordered({1}, "B")); reasm.Add(TSN(12), gen_.Ordered({3})); reasm.Add(TSN(13), gen_.Ordered({4}, "E")); @@ -246,8 +242,8 @@ TEST_F(ReassemblyQueueTest, ForwardTSNRemoveALotOrdered) { EXPECT_FALSE(reasm.HasMessages()); - reasm.Handle(ForwardTsnChunk( - TSN(13), {ForwardTsnChunk::SkippedStream(kStreamID, kSSN)})); + reasm.HandleForwardTsn( + TSN(13), std::vector({SkippedStream(kStreamID, kSSN)})); EXPECT_EQ(reasm.queued_bytes(), 0u); // The lost chunk comes, but too late. @@ -256,126 +252,31 @@ TEST_F(ReassemblyQueueTest, ForwardTSNRemoveALotOrdered) { ElementsAre(SctpMessageIs(kStreamID, kPPID, kMessage2Payload))); } -TEST_F(ReassemblyQueueTest, ShouldntDeliverMessagesBeforeInitialTsn) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); - reasm.Add(TSN(5), gen_.Unordered({1, 2, 3, 4}, "BE")); - EXPECT_EQ(reasm.queued_bytes(), 0u); - EXPECT_FALSE(reasm.HasMessages()); -} - -TEST_F(ReassemblyQueueTest, ShouldntRedeliverUnorderedMessages) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); - reasm.Add(TSN(10), gen_.Unordered({1, 2, 3, 4}, "BE")); - EXPECT_EQ(reasm.queued_bytes(), 0u); - EXPECT_TRUE(reasm.HasMessages()); - EXPECT_THAT(reasm.FlushMessages(), - ElementsAre(SctpMessageIs(kStreamID, kPPID, kShortPayload))); - reasm.Add(TSN(10), gen_.Unordered({1, 2, 3, 4}, "BE")); - EXPECT_EQ(reasm.queued_bytes(), 0u); - EXPECT_FALSE(reasm.HasMessages()); -} - -TEST_F(ReassemblyQueueTest, ShouldntRedeliverUnorderedMessagesReallyUnordered) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); - reasm.Add(TSN(10), gen_.Unordered({1, 2, 3, 4}, "B")); - EXPECT_EQ(reasm.queued_bytes(), 4u); - - EXPECT_FALSE(reasm.HasMessages()); - - reasm.Add(TSN(12), gen_.Unordered({1, 2, 3, 4}, "BE")); - EXPECT_EQ(reasm.queued_bytes(), 4u); - EXPECT_TRUE(reasm.HasMessages()); - - EXPECT_THAT(reasm.FlushMessages(), - ElementsAre(SctpMessageIs(kStreamID, kPPID, kShortPayload))); - reasm.Add(TSN(12), gen_.Unordered({1, 2, 3, 4}, "BE")); - EXPECT_EQ(reasm.queued_bytes(), 4u); - EXPECT_FALSE(reasm.HasMessages()); -} - -TEST_F(ReassemblyQueueTest, ShouldntDeliverBeforeForwardedTsn) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); - reasm.Handle(ForwardTsnChunk(TSN(12), {})); - - reasm.Add(TSN(12), gen_.Unordered({1, 2, 3, 4}, "BE")); - EXPECT_EQ(reasm.queued_bytes(), 0u); - EXPECT_FALSE(reasm.HasMessages()); -} - -TEST_F(ReassemblyQueueTest, NotReadyForHandoverWhenDeliveredTsnsHaveGap) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); - reasm.Add(TSN(10), gen_.Unordered({1, 2, 3, 4}, "B")); - EXPECT_FALSE(reasm.HasMessages()); - - reasm.Add(TSN(12), gen_.Unordered({1, 2, 3, 4}, "BE")); - EXPECT_TRUE(reasm.HasMessages()); - EXPECT_EQ( - reasm.GetHandoverReadiness(), - HandoverReadinessStatus() - .Add(HandoverUnreadinessReason::kReassemblyQueueDeliveredTSNsGap) - .Add( - HandoverUnreadinessReason::kUnorderedStreamHasUnassembledChunks)); - - EXPECT_THAT(reasm.FlushMessages(), - ElementsAre(SctpMessageIs(kStreamID, kPPID, kShortPayload))); - EXPECT_EQ( - reasm.GetHandoverReadiness(), - HandoverReadinessStatus() - .Add(HandoverUnreadinessReason::kReassemblyQueueDeliveredTSNsGap) - .Add( - HandoverUnreadinessReason::kUnorderedStreamHasUnassembledChunks)); - - reasm.Handle(ForwardTsnChunk(TSN(13), {})); - EXPECT_EQ(reasm.GetHandoverReadiness(), HandoverReadinessStatus()); -} - TEST_F(ReassemblyQueueTest, NotReadyForHandoverWhenResetStreamIsDeferred) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); - DataGeneratorOptions opts; - opts.message_id = MID(0); - reasm.Add(TSN(10), gen_.Ordered({1, 2, 3, 4}, "BE", opts)); - opts.message_id = MID(1); - reasm.Add(TSN(11), gen_.Ordered({1, 2, 3, 4}, "BE", opts)); + ReassemblyQueue reasm("log: ", kBufferSize); + reasm.Add(TSN(10), gen_.Ordered({1, 2, 3, 4}, "BE", {.mid = MID(0)})); + reasm.Add(TSN(11), gen_.Ordered({1, 2, 3, 4}, "BE", {.mid = MID(1)})); EXPECT_THAT(reasm.FlushMessages(), SizeIs(2)); - reasm.ResetStreams( - OutgoingSSNResetRequestParameter( - ReconfigRequestSN(10), ReconfigRequestSN(3), TSN(13), {StreamID(1)}), - TSN(11)); + reasm.EnterDeferredReset(TSN(12), std::vector({StreamID(1)})); EXPECT_EQ(reasm.GetHandoverReadiness(), HandoverReadinessStatus().Add( HandoverUnreadinessReason::kStreamResetDeferred)); - opts.message_id = MID(3); - opts.ppid = PPID(3); - reasm.Add(TSN(13), gen_.Ordered({1, 2, 3, 4}, "BE", opts)); - reasm.MaybeResetStreamsDeferred(TSN(11)); - - opts.message_id = MID(2); - opts.ppid = PPID(2); - reasm.Add(TSN(13), gen_.Ordered({1, 2, 3, 4}, "BE", opts)); - reasm.MaybeResetStreamsDeferred(TSN(15)); - EXPECT_EQ(reasm.GetHandoverReadiness(), - HandoverReadinessStatus().Add( - HandoverUnreadinessReason::kReassemblyQueueDeliveredTSNsGap)); - - EXPECT_THAT(reasm.FlushMessages(), SizeIs(2)); - EXPECT_EQ(reasm.GetHandoverReadiness(), - HandoverReadinessStatus().Add( - HandoverUnreadinessReason::kReassemblyQueueDeliveredTSNsGap)); + reasm.Add(TSN(12), gen_.Ordered({1, 2, 3, 4}, "BE", {.mid = MID(2)})); - reasm.Handle(ForwardTsnChunk(TSN(15), {})); + reasm.ResetStreamsAndLeaveDeferredReset(std::vector({StreamID(1)})); EXPECT_EQ(reasm.GetHandoverReadiness(), HandoverReadinessStatus()); } TEST_F(ReassemblyQueueTest, HandoverInInitialState) { - ReassemblyQueue reasm1("log: ", TSN(10), kBufferSize); + ReassemblyQueue reasm1("log: ", kBufferSize); EXPECT_EQ(reasm1.GetHandoverReadiness(), HandoverReadinessStatus()); DcSctpSocketHandoverState state; reasm1.AddHandoverState(state); g_handover_state_transformer_for_test(&state); - ReassemblyQueue reasm2("log: ", TSN(100), kBufferSize, + ReassemblyQueue reasm2("log: ", kBufferSize, /*use_message_interleaving=*/false); reasm2.RestoreFromState(state); @@ -384,7 +285,7 @@ TEST_F(ReassemblyQueueTest, HandoverInInitialState) { } TEST_F(ReassemblyQueueTest, HandoverAfterHavingAssembedOneMessage) { - ReassemblyQueue reasm1("log: ", TSN(10), kBufferSize); + ReassemblyQueue reasm1("log: ", kBufferSize); reasm1.Add(TSN(10), gen_.Ordered({1, 2, 3, 4}, "BE")); EXPECT_THAT(reasm1.FlushMessages(), SizeIs(1)); @@ -392,7 +293,7 @@ TEST_F(ReassemblyQueueTest, HandoverAfterHavingAssembedOneMessage) { DcSctpSocketHandoverState state; reasm1.AddHandoverState(state); g_handover_state_transformer_for_test(&state); - ReassemblyQueue reasm2("log: ", TSN(100), kBufferSize, + ReassemblyQueue reasm2("log: ", kBufferSize, /*use_message_interleaving=*/false); reasm2.RestoreFromState(state); @@ -400,24 +301,8 @@ TEST_F(ReassemblyQueueTest, HandoverAfterHavingAssembedOneMessage) { EXPECT_THAT(reasm2.FlushMessages(), SizeIs(1)); } -TEST_F(ReassemblyQueueTest, HandleInconsistentForwardTSN) { - // Found when fuzzing. - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize); - // Add TSN=43, SSN=7. Can't be reassembled as previous SSNs aren't known. - reasm.Add(TSN(43), Data(kStreamID, SSN(7), MID(0), FSN(0), kPPID, - std::vector(10), Data::IsBeginning(true), - Data::IsEnd(true), IsUnordered(false))); - - // Invalid, as TSN=44 have to have SSN>=7, but peer says 6. - reasm.Handle(ForwardTsnChunk( - TSN(44), {ForwardTsnChunk::SkippedStream(kStreamID, SSN(6))})); - - // Don't assemble SSN=7, as that TSN is skipped. - EXPECT_FALSE(reasm.HasMessages()); -} - TEST_F(ReassemblyQueueTest, SingleUnorderedChunkMessageInRfc8260) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize, + ReassemblyQueue reasm("log: ", kBufferSize, /*use_message_interleaving=*/true); reasm.Add(TSN(10), Data(StreamID(1), SSN(0), MID(0), FSN(0), kPPID, {1, 2, 3, 4}, Data::IsBeginning(true), @@ -429,7 +314,7 @@ TEST_F(ReassemblyQueueTest, SingleUnorderedChunkMessageInRfc8260) { } TEST_F(ReassemblyQueueTest, TwoInterleavedChunks) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize, + ReassemblyQueue reasm("log: ", kBufferSize, /*use_message_interleaving=*/true); reasm.Add(TSN(10), Data(StreamID(1), SSN(0), MID(0), FSN(0), kPPID, {1, 2, 3, 4}, Data::IsBeginning(true), @@ -458,9 +343,9 @@ TEST_F(ReassemblyQueueTest, UnorderedInterleavedMessagesAllPermutations) { StreamID stream_ids[] = {StreamID(1), StreamID(2), StreamID(1), StreamID(1), StreamID(2), StreamID(2)}; FSN fsns[] = {FSN(0), FSN(0), FSN(1), FSN(2), FSN(1), FSN(2)}; - rtc::ArrayView payload(kSixBytePayload); + webrtc::ArrayView payload(kSixBytePayload); do { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize, + ReassemblyQueue reasm("log: ", kBufferSize, /*use_message_interleaving=*/true); for (int i : indexes) { auto span = payload.subview(*fsns[i] * 2, 2); @@ -480,7 +365,7 @@ TEST_F(ReassemblyQueueTest, UnorderedInterleavedMessagesAllPermutations) { } TEST_F(ReassemblyQueueTest, IForwardTSNRemoveALotOrdered) { - ReassemblyQueue reasm("log: ", TSN(10), kBufferSize, + ReassemblyQueue reasm("log: ", kBufferSize, /*use_message_interleaving=*/true); reasm.Add(TSN(10), gen_.Ordered({1}, "B")); gen_.Ordered({2}, ""); @@ -494,9 +379,8 @@ TEST_F(ReassemblyQueueTest, IForwardTSNRemoveALotOrdered) { ASSERT_FALSE(reasm.HasMessages()); EXPECT_EQ(reasm.queued_bytes(), 7u); - reasm.Handle( - IForwardTsnChunk(TSN(13), {IForwardTsnChunk::SkippedStream( - IsUnordered(false), kStreamID, MID(0))})); + reasm.HandleForwardTsn(TSN(13), std::vector({SkippedStream( + IsUnordered(false), kStreamID, MID(0))})); EXPECT_EQ(reasm.queued_bytes(), 0u); // The lost chunk comes, but too late. diff --git a/net/dcsctp/rx/reassembly_streams.cc b/net/dcsctp/rx/reassembly_streams.cc deleted file mode 100644 index 9fd52fb15d..0000000000 --- a/net/dcsctp/rx/reassembly_streams.cc +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "net/dcsctp/rx/reassembly_streams.h" - -#include -#include -#include - -namespace dcsctp { - -ReassembledMessage AssembleMessage(std::map::iterator start, - std::map::iterator end) { - size_t count = std::distance(start, end); - - if (count == 1) { - // Fast path - zero-copy - Data& data = start->second; - - return ReassembledMessage{ - .tsns = {start->first}, - .message = DcSctpMessage(data.stream_id, data.ppid, - std::move(start->second.payload)), - }; - } - - // Slow path - will need to concatenate the payload. - std::vector tsns; - std::vector payload; - - size_t payload_size = std::accumulate( - start, end, 0, - [](size_t v, const auto& p) { return v + p.second.size(); }); - - tsns.reserve(count); - payload.reserve(payload_size); - for (auto it = start; it != end; ++it) { - Data& data = it->second; - tsns.push_back(it->first); - payload.insert(payload.end(), data.payload.begin(), data.payload.end()); - } - - return ReassembledMessage{ - .tsns = std::move(tsns), - .message = DcSctpMessage(start->second.stream_id, start->second.ppid, - std::move(payload)), - }; -} -} // namespace dcsctp diff --git a/net/dcsctp/rx/reassembly_streams.h b/net/dcsctp/rx/reassembly_streams.h index 0ecfac0c0a..fab596645e 100644 --- a/net/dcsctp/rx/reassembly_streams.h +++ b/net/dcsctp/rx/reassembly_streams.h @@ -46,7 +46,7 @@ class ReassemblyStreams { // message has been assembled as well as indicating from which TSNs this // message was assembled from. using OnAssembledMessage = - std::function tsns, + std::function tsns, DcSctpMessage message)>; virtual ~ReassemblyStreams() = default; @@ -71,13 +71,13 @@ class ReassemblyStreams { // this operation. virtual size_t HandleForwardTsn( UnwrappedTSN new_cumulative_ack_tsn, - rtc::ArrayView + webrtc::ArrayView skipped_streams) = 0; // Called for incoming (possibly deferred) RE_CONFIG chunks asking for // either a few streams, or all streams (when the list is empty) to be // reset - to have their next SSN or Message ID to be zero. - virtual void ResetStreams(rtc::ArrayView stream_ids) = 0; + virtual void ResetStreams(webrtc::ArrayView stream_ids) = 0; virtual HandoverReadinessStatus GetHandoverReadiness() const = 0; virtual void AddHandoverState(DcSctpSocketHandoverState& state) = 0; diff --git a/net/dcsctp/rx/traditional_reassembly_streams.cc b/net/dcsctp/rx/traditional_reassembly_streams.cc index dce6c90131..aca565d668 100644 --- a/net/dcsctp/rx/traditional_reassembly_streams.cc +++ b/net/dcsctp/rx/traditional_reassembly_streams.cc @@ -16,11 +16,11 @@ #include #include #include +#include #include #include #include "absl/algorithm/container.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/common/sequence_numbers.h" #include "net/dcsctp/packet/chunk/forward_tsn_common.h" @@ -34,8 +34,8 @@ namespace { // Given a map (`chunks`) and an iterator to within that map (`iter`), this // function will return an iterator to the first chunk in that message, which // has the `is_beginning` flag set. If there are any gaps, or if the beginning -// can't be found, `absl::nullopt` is returned. -absl::optional::iterator> FindBeginning( +// can't be found, `std::nullopt` is returned. +std::optional::iterator> FindBeginning( const std::map& chunks, std::map::iterator iter) { UnwrappedTSN prev_tsn = iter->first; @@ -44,11 +44,11 @@ absl::optional::iterator> FindBeginning( return iter; } if (iter == chunks.begin()) { - return absl::nullopt; + return std::nullopt; } --iter; if (iter->first.next_value() != prev_tsn) { - return absl::nullopt; + return std::nullopt; } prev_tsn = iter->first; } @@ -57,8 +57,8 @@ absl::optional::iterator> FindBeginning( // Given a map (`chunks`) and an iterator to within that map (`iter`), this // function will return an iterator to the chunk after the last chunk in that // message, which has the `is_end` flag set. If there are any gaps, or if the -// end can't be found, `absl::nullopt` is returned. -absl::optional::iterator> FindEnd( +// end can't be found, `std::nullopt` is returned. +std::optional::iterator> FindEnd( std::map& chunks, std::map::iterator iter) { UnwrappedTSN prev_tsn = iter->first; @@ -68,10 +68,10 @@ absl::optional::iterator> FindEnd( } ++iter; if (iter == chunks.end()) { - return absl::nullopt; + return std::nullopt; } if (iter->first != prev_tsn.next_value()) { - return absl::nullopt; + return std::nullopt; } prev_tsn = iter->first; } @@ -86,6 +86,11 @@ TraditionalReassemblyStreams::TraditionalReassemblyStreams( int TraditionalReassemblyStreams::UnorderedStream::Add(UnwrappedTSN tsn, Data data) { + if (data.is_beginning && data.is_end) { + // Fastpath for already assembled chunks. + AssembleMessage(tsn, std::move(data)); + return 0; + } int queued_bytes = data.size(); auto [it, inserted] = chunks_.emplace(tsn, std::move(data)); if (!inserted) { @@ -103,11 +108,11 @@ size_t TraditionalReassemblyStreams::UnorderedStream::TryToAssembleMessage( // message, which can be inefficient for very large values of N. This could be // optimized by e.g. only trying to assemble a message once _any_ beginning // and _any_ end has been found. - absl::optional start = FindBeginning(chunks_, iter); + std::optional start = FindBeginning(chunks_, iter); if (!start.has_value()) { return 0; } - absl::optional end = FindEnd(chunks_, iter); + std::optional end = FindEnd(chunks_, iter); if (!end.has_value()) { return 0; } @@ -124,12 +129,7 @@ size_t TraditionalReassemblyStreams::StreamBase::AssembleMessage( if (count == 1) { // Fast path - zero-copy - const Data& data = start->second; - size_t payload_size = start->second.size(); - UnwrappedTSN tsns[1] = {start->first}; - DcSctpMessage message(data.stream_id, data.ppid, std::move(data.payload)); - parent_.on_assembled_message_(tsns, std::move(message)); - return payload_size; + return AssembleMessage(start->first, std::move(start->second)); } // Slow path - will need to concatenate the payload. @@ -155,6 +155,17 @@ size_t TraditionalReassemblyStreams::StreamBase::AssembleMessage( return payload_size; } +size_t TraditionalReassemblyStreams::StreamBase::AssembleMessage( + UnwrappedTSN tsn, + Data data) { + // Fast path - zero-copy + size_t payload_size = data.size(); + UnwrappedTSN tsns[1] = {tsn}; + DcSctpMessage message(data.stream_id, data.ppid, std::move(data.payload)); + parent_.on_assembled_message_(tsns, std::move(message)); + return payload_size; +} + size_t TraditionalReassemblyStreams::UnorderedStream::EraseTo( UnwrappedTSN tsn) { auto end_iter = chunks_.upper_bound(tsn); @@ -202,20 +213,40 @@ size_t TraditionalReassemblyStreams::OrderedStream::TryToAssembleMessages() { return assembled_bytes; } +size_t +TraditionalReassemblyStreams::OrderedStream::TryToAssembleMessagesFastpath( + UnwrappedSSN ssn, + UnwrappedTSN tsn, + Data data) { + RTC_DCHECK(ssn == next_ssn_); + size_t assembled_bytes = 0; + if (data.is_beginning && data.is_end) { + assembled_bytes += AssembleMessage(tsn, std::move(data)); + next_ssn_.Increment(); + } else { + size_t queued_bytes = data.size(); + auto [iter, inserted] = chunks_by_ssn_[ssn].emplace(tsn, std::move(data)); + if (!inserted) { + // Not actually assembled, but deduplicated meaning queued size doesn't + // include this message. + return queued_bytes; + } + } + return assembled_bytes + TryToAssembleMessages(); +} + int TraditionalReassemblyStreams::OrderedStream::Add(UnwrappedTSN tsn, Data data) { int queued_bytes = data.size(); - UnwrappedSSN ssn = ssn_unwrapper_.Unwrap(data.ssn); - auto [unused, inserted] = chunks_by_ssn_[ssn].emplace(tsn, std::move(data)); + if (ssn == next_ssn_) { + return queued_bytes - + TryToAssembleMessagesFastpath(ssn, tsn, std::move(data)); + } + auto [iter, inserted] = chunks_by_ssn_[ssn].emplace(tsn, std::move(data)); if (!inserted) { return 0; } - - if (ssn == next_ssn_) { - queued_bytes -= TryToAssembleMessages(); - } - return queued_bytes; } @@ -253,7 +284,8 @@ int TraditionalReassemblyStreams::Add(UnwrappedTSN tsn, Data data) { size_t TraditionalReassemblyStreams::HandleForwardTsn( UnwrappedTSN new_cumulative_ack_tsn, - rtc::ArrayView skipped_streams) { + webrtc::ArrayView + skipped_streams) { size_t bytes_removed = 0; // The `skipped_streams` only cover ordered messages - need to // iterate all unordered streams manually to remove those chunks. @@ -271,7 +303,7 @@ size_t TraditionalReassemblyStreams::HandleForwardTsn( } void TraditionalReassemblyStreams::ResetStreams( - rtc::ArrayView stream_ids) { + webrtc::ArrayView stream_ids) { if (stream_ids.empty()) { for (auto& [stream_id, stream] : ordered_streams_) { RTC_DLOG(LS_VERBOSE) << log_prefix_ diff --git a/net/dcsctp/rx/traditional_reassembly_streams.h b/net/dcsctp/rx/traditional_reassembly_streams.h index d355c599ae..f755c5f0bd 100644 --- a/net/dcsctp/rx/traditional_reassembly_streams.h +++ b/net/dcsctp/rx/traditional_reassembly_streams.h @@ -36,10 +36,10 @@ class TraditionalReassemblyStreams : public ReassemblyStreams { size_t HandleForwardTsn( UnwrappedTSN new_cumulative_ack_tsn, - rtc::ArrayView skipped_streams) - override; + webrtc::ArrayView + skipped_streams) override; - void ResetStreams(rtc::ArrayView stream_ids) override; + void ResetStreams(webrtc::ArrayView stream_ids) override; HandoverReadinessStatus GetHandoverReadiness() const override; void AddHandoverState(DcSctpSocketHandoverState& state) override; @@ -55,6 +55,7 @@ class TraditionalReassemblyStreams : public ReassemblyStreams { : parent_(*parent) {} size_t AssembleMessage(ChunkMap::iterator start, ChunkMap::iterator end); + size_t AssembleMessage(UnwrappedTSN tsn, Data data); TraditionalReassemblyStreams& parent_; }; @@ -101,6 +102,11 @@ class TraditionalReassemblyStreams : public ReassemblyStreams { // Returns the number of bytes assembled if a message was assembled. size_t TryToAssembleMessage(); size_t TryToAssembleMessages(); + // Same as above but when inserting the first complete message avoid + // insertion into the map. + size_t TryToAssembleMessagesFastpath(UnwrappedSSN ssn, + UnwrappedTSN tsn, + Data data); // This must be an ordered container to be able to iterate in SSN order. std::map chunks_by_ssn_; UnwrappedSSN::Unwrapper ssn_unwrapper_; diff --git a/net/dcsctp/rx/traditional_reassembly_streams_test.cc b/net/dcsctp/rx/traditional_reassembly_streams_test.cc index 341870442d..9aa0cec40b 100644 --- a/net/dcsctp/rx/traditional_reassembly_streams_test.cc +++ b/net/dcsctp/rx/traditional_reassembly_streams_test.cc @@ -253,5 +253,62 @@ TEST_F(TraditionalReassemblyStreamsTest, CanDeleteFirstOrderedMessage) { EXPECT_EQ(streams.Add(tsn(2), gen_.Ordered({2, 3, 4}, "BE")), 0); } +TEST_F(TraditionalReassemblyStreamsTest, CanReassembleFastPathUnordered) { + NiceMock> on_assembled; + + { + testing::InSequence s; + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(1)), + Property(&DcSctpMessage::payload, ElementsAre(1)))); + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(3)), + Property(&DcSctpMessage::payload, ElementsAre(3)))); + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(2)), + Property(&DcSctpMessage::payload, ElementsAre(2)))); + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(4)), + Property(&DcSctpMessage::payload, ElementsAre(4)))); + } + + TraditionalReassemblyStreams streams("", on_assembled.AsStdFunction()); + + EXPECT_EQ(streams.Add(tsn(1), gen_.Unordered({1}, "BE")), 0); + EXPECT_EQ(streams.Add(tsn(3), gen_.Unordered({3}, "BE")), 0); + EXPECT_EQ(streams.Add(tsn(2), gen_.Unordered({2}, "BE")), 0); + EXPECT_EQ(streams.Add(tsn(4), gen_.Unordered({4}, "BE")), 0); +} + +TEST_F(TraditionalReassemblyStreamsTest, CanReassembleFastPathOrdered) { + NiceMock> on_assembled; + + { + testing::InSequence s; + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(1)), + Property(&DcSctpMessage::payload, ElementsAre(1)))); + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(2)), + Property(&DcSctpMessage::payload, ElementsAre(2)))); + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(3)), + Property(&DcSctpMessage::payload, ElementsAre(3)))); + EXPECT_CALL(on_assembled, + Call(ElementsAre(tsn(4)), + Property(&DcSctpMessage::payload, ElementsAre(4)))); + } + + TraditionalReassemblyStreams streams("", on_assembled.AsStdFunction()); + + Data data1 = gen_.Ordered({1}, "BE"); + Data data2 = gen_.Ordered({2}, "BE"); + Data data3 = gen_.Ordered({3}, "BE"); + Data data4 = gen_.Ordered({4}, "BE"); + EXPECT_EQ(streams.Add(tsn(1), std::move(data1)), 0); + EXPECT_EQ(streams.Add(tsn(3), std::move(data3)), 1); + EXPECT_EQ(streams.Add(tsn(2), std::move(data2)), -1); + EXPECT_EQ(streams.Add(tsn(4), std::move(data4)), 0); +} } // namespace } // namespace dcsctp diff --git a/net/dcsctp/socket/BUILD.gn b/net/dcsctp/socket/BUILD.gn index 681ddd47e9..f6fbe7f3e3 100644 --- a/net/dcsctp/socket/BUILD.gn +++ b/net/dcsctp/socket/BUILD.gn @@ -11,18 +11,20 @@ import("../../../webrtc.gni") rtc_source_set("context") { sources = [ "context.h" ] deps = [ + "../../../api/units:time_delta", "../common:internal_types", "../packet:sctp_packet", "../public:socket", "../public:types", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("heartbeat_handler") { deps = [ ":context", "../../../api:array_view", + "../../../api/units:time_delta", "../../../rtc_base:checks", "../../../rtc_base:logging", "../packet:bounded_io", @@ -32,27 +34,25 @@ rtc_library("heartbeat_handler") { "../public:socket", "../public:types", "../timer", + "//third_party/abseil-cpp/absl/functional:bind_front", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "heartbeat_handler.cc", "heartbeat_handler.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/functional:bind_front", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("stream_reset_handler") { deps = [ ":context", "../../../api:array_view", + "../../../api/units:time_delta", "../../../rtc_base:checks", "../../../rtc_base:logging", + "../../../rtc_base:stringutils", "../../../rtc_base/containers:flat_set", "../common:internal_types", - "../common:str_join", "../packet:chunk", "../packet:parameter", "../packet:sctp_packet", @@ -63,16 +63,13 @@ rtc_library("stream_reset_handler") { "../rx:reassembly_queue", "../timer", "../tx:retransmission_queue", + "//third_party/abseil-cpp/absl/functional:bind_front", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "stream_reset_handler.cc", "stream_reset_handler.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/functional:bind_front", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("packet_sender") { @@ -86,7 +83,6 @@ rtc_library("packet_sender") { "packet_sender.cc", "packet_sender.h", ] - absl_deps = [] } rtc_library("transmission_control_block") { @@ -97,6 +93,7 @@ rtc_library("transmission_control_block") { ":stream_reset_handler", "../../../api:array_view", "../../../api/task_queue:task_queue", + "../../../api/units:time_delta", "../../../rtc_base:checks", "../../../rtc_base:logging", "../../../rtc_base:stringutils", @@ -112,17 +109,14 @@ rtc_library("transmission_control_block") { "../tx:retransmission_queue", "../tx:retransmission_timeout", "../tx:send_queue", + "//third_party/abseil-cpp/absl/functional:bind_front", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "capabilities.h", "transmission_control_block.cc", "transmission_control_block.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/functional:bind_front", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("dcsctp_socket") { @@ -136,7 +130,6 @@ rtc_library("dcsctp_socket") { "../../../api:make_ref_counted", "../../../api:refcountedbase", "../../../api:scoped_refptr", - "../../../api:sequence_checker", "../../../api/task_queue:task_queue", "../../../rtc_base:checks", "../../../rtc_base:logging", @@ -160,6 +153,9 @@ rtc_library("dcsctp_socket") { "../tx:retransmission_timeout", "../tx:rr_send_queue", "../tx:send_queue", + "//third_party/abseil-cpp/absl/functional:bind_front", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "callback_deferrer.cc", @@ -169,12 +165,6 @@ rtc_library("dcsctp_socket") { "state_cookie.cc", "state_cookie.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/functional:bind_front", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } if (rtc_include_tests) { @@ -190,10 +180,7 @@ if (rtc_include_tests) { "../public:socket", "../public:types", "../timer", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -208,10 +195,7 @@ if (rtc_include_tests) { "../packet:sctp_packet", "../public:socket", "../public:types", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -231,12 +215,13 @@ if (rtc_include_tests) { "../../../api:network_emulation_manager_api", "../../../api/task_queue", "../../../api/task_queue:pending_task_safety_flag", + "../../../api/units:data_rate", "../../../api/units:time_delta", - "../../../call:simulated_network", "../../../rtc_base:checks", "../../../rtc_base:copy_on_write_buffer", "../../../rtc_base:gunit_helpers", "../../../rtc_base:logging", + "../../../rtc_base:random", "../../../rtc_base:rtc_base_tests_utils", "../../../rtc_base:socket_address", "../../../rtc_base:stringutils", @@ -261,12 +246,9 @@ if (rtc_include_tests) { "../timer:task_queue_timeout", "../tx:mock_send_queue", "../tx:retransmission_queue", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "dcsctp_socket_network_test.cc", diff --git a/net/dcsctp/socket/callback_deferrer.cc b/net/dcsctp/socket/callback_deferrer.cc index 123526e782..a17ba75d9f 100644 --- a/net/dcsctp/socket/callback_deferrer.cc +++ b/net/dcsctp/socket/callback_deferrer.cc @@ -12,31 +12,6 @@ #include "api/make_ref_counted.h" namespace dcsctp { -namespace { -// A wrapper around the move-only DcSctpMessage, to let it be captured in a -// lambda. -class MessageDeliverer { - public: - explicit MessageDeliverer(DcSctpMessage&& message) - : state_(rtc::make_ref_counted(std::move(message))) {} - - void Deliver(DcSctpSocketCallbacks& c) { - // Really ensure that it's only called once. - RTC_DCHECK(!state_->has_delivered); - state_->has_delivered = true; - c.OnMessageReceived(std::move(state_->message)); - } - - private: - struct State : public rtc::RefCountInterface { - explicit State(DcSctpMessage&& m) - : has_delivered(false), message(std::move(m)) {} - bool has_delivered; - DcSctpMessage message; - }; - rtc::scoped_refptr state_; -}; -} // namespace void CallbackDeferrer::Prepare() { RTC_DCHECK(!prepared_); @@ -48,17 +23,21 @@ void CallbackDeferrer::TriggerDeferred() { // callback, and that might result in adding new callbacks to this instance, // and the vector can't be modified while iterated on. RTC_DCHECK(prepared_); - std::vector> deferred; - deferred.swap(deferred_); prepared_ = false; - - for (auto& cb : deferred) { - cb(underlying_); + if (deferred_.empty()) { + return; + } + std::vector> deferred; + // Reserve a small buffer to prevent too much reallocation on growth. + deferred.reserve(8); + deferred.swap(deferred_); + for (auto& [cb, data] : deferred) { + cb(std::move(data), underlying_); } } SendPacketStatus CallbackDeferrer::SendPacketWithStatus( - rtc::ArrayView data) { + webrtc::ArrayView data) { // Will not be deferred - call directly. return underlying_.SendPacketWithStatus(data); } @@ -70,6 +49,8 @@ std::unique_ptr CallbackDeferrer::CreateTimeout( } TimeMs CallbackDeferrer::TimeMillis() { + // This should not be called by the library - it's migrated to `Now()`. + RTC_DCHECK(false); // Will not be deferred - call directly. return underlying_.TimeMillis(); } @@ -82,83 +63,111 @@ uint32_t CallbackDeferrer::GetRandomInt(uint32_t low, uint32_t high) { void CallbackDeferrer::OnMessageReceived(DcSctpMessage message) { RTC_DCHECK(prepared_); deferred_.emplace_back( - [deliverer = MessageDeliverer(std::move(message))]( - DcSctpSocketCallbacks& cb) mutable { deliverer.Deliver(cb); }); + +[](CallbackData data, DcSctpSocketCallbacks& cb) { + return cb.OnMessageReceived(std::get(std::move(data))); + }, + std::move(message)); } void CallbackDeferrer::OnError(ErrorKind error, absl::string_view message) { RTC_DCHECK(prepared_); deferred_.emplace_back( - [error, message = std::string(message)](DcSctpSocketCallbacks& cb) { - cb.OnError(error, message); - }); + +[](CallbackData data, DcSctpSocketCallbacks& cb) { + Error error = std::get(std::move(data)); + return cb.OnError(error.error, error.message); + }, + Error{error, std::string(message)}); } void CallbackDeferrer::OnAborted(ErrorKind error, absl::string_view message) { RTC_DCHECK(prepared_); deferred_.emplace_back( - [error, message = std::string(message)](DcSctpSocketCallbacks& cb) { - cb.OnAborted(error, message); - }); + +[](CallbackData data, DcSctpSocketCallbacks& cb) { + Error error = std::get(std::move(data)); + return cb.OnAborted(error.error, error.message); + }, + Error{error, std::string(message)}); } void CallbackDeferrer::OnConnected() { RTC_DCHECK(prepared_); - deferred_.emplace_back([](DcSctpSocketCallbacks& cb) { cb.OnConnected(); }); + deferred_.emplace_back( + +[](CallbackData /* data */, DcSctpSocketCallbacks& cb) { + return cb.OnConnected(); + }, + std::monostate{}); } void CallbackDeferrer::OnClosed() { RTC_DCHECK(prepared_); - deferred_.emplace_back([](DcSctpSocketCallbacks& cb) { cb.OnClosed(); }); + deferred_.emplace_back( + +[](CallbackData /* data */, DcSctpSocketCallbacks& cb) { + return cb.OnClosed(); + }, + std::monostate{}); } void CallbackDeferrer::OnConnectionRestarted() { RTC_DCHECK(prepared_); deferred_.emplace_back( - [](DcSctpSocketCallbacks& cb) { cb.OnConnectionRestarted(); }); + +[](CallbackData /* data */, DcSctpSocketCallbacks& cb) { + return cb.OnConnectionRestarted(); + }, + std::monostate{}); } void CallbackDeferrer::OnStreamsResetFailed( - rtc::ArrayView outgoing_streams, + webrtc::ArrayView outgoing_streams, absl::string_view reason) { RTC_DCHECK(prepared_); deferred_.emplace_back( - [streams = std::vector(outgoing_streams.begin(), - outgoing_streams.end()), - reason = std::string(reason)](DcSctpSocketCallbacks& cb) { - cb.OnStreamsResetFailed(streams, reason); - }); + +[](CallbackData data, DcSctpSocketCallbacks& cb) { + StreamReset stream_reset = std::get(std::move(data)); + return cb.OnStreamsResetFailed(stream_reset.streams, + stream_reset.message); + }, + StreamReset{{outgoing_streams.begin(), outgoing_streams.end()}, + std::string(reason)}); } void CallbackDeferrer::OnStreamsResetPerformed( - rtc::ArrayView outgoing_streams) { + webrtc::ArrayView outgoing_streams) { RTC_DCHECK(prepared_); deferred_.emplace_back( - [streams = std::vector(outgoing_streams.begin(), - outgoing_streams.end())]( - DcSctpSocketCallbacks& cb) { cb.OnStreamsResetPerformed(streams); }); + +[](CallbackData data, DcSctpSocketCallbacks& cb) { + StreamReset stream_reset = std::get(std::move(data)); + return cb.OnStreamsResetPerformed(stream_reset.streams); + }, + StreamReset{{outgoing_streams.begin(), outgoing_streams.end()}}); } void CallbackDeferrer::OnIncomingStreamsReset( - rtc::ArrayView incoming_streams) { + webrtc::ArrayView incoming_streams) { RTC_DCHECK(prepared_); deferred_.emplace_back( - [streams = std::vector(incoming_streams.begin(), - incoming_streams.end())]( - DcSctpSocketCallbacks& cb) { cb.OnIncomingStreamsReset(streams); }); + +[](CallbackData data, DcSctpSocketCallbacks& cb) { + StreamReset stream_reset = std::get(std::move(data)); + return cb.OnIncomingStreamsReset(stream_reset.streams); + }, + StreamReset{{incoming_streams.begin(), incoming_streams.end()}}); } void CallbackDeferrer::OnBufferedAmountLow(StreamID stream_id) { RTC_DCHECK(prepared_); - deferred_.emplace_back([stream_id](DcSctpSocketCallbacks& cb) { - cb.OnBufferedAmountLow(stream_id); - }); + deferred_.emplace_back( + +[](CallbackData data, DcSctpSocketCallbacks& cb) { + return cb.OnBufferedAmountLow(std::get(std::move(data))); + }, + stream_id); } void CallbackDeferrer::OnTotalBufferedAmountLow() { RTC_DCHECK(prepared_); deferred_.emplace_back( - [](DcSctpSocketCallbacks& cb) { cb.OnTotalBufferedAmountLow(); }); + +[](CallbackData /* data */, DcSctpSocketCallbacks& cb) { + return cb.OnTotalBufferedAmountLow(); + }, + std::monostate{}); } void CallbackDeferrer::OnLifecycleMessageExpired(LifecycleId lifecycle_id, diff --git a/net/dcsctp/socket/callback_deferrer.h b/net/dcsctp/socket/callback_deferrer.h index 1c35dda6cf..d7cb0db2a3 100644 --- a/net/dcsctp/socket/callback_deferrer.h +++ b/net/dcsctp/socket/callback_deferrer.h @@ -15,6 +15,7 @@ #include #include #include +#include #include #include "absl/strings/string_view.h" @@ -61,10 +62,11 @@ class CallbackDeferrer : public DcSctpSocketCallbacks { // Implementation of DcSctpSocketCallbacks SendPacketStatus SendPacketWithStatus( - rtc::ArrayView data) override; + webrtc::ArrayView data) override; std::unique_ptr CreateTimeout( webrtc::TaskQueueBase::DelayPrecision precision) override; TimeMs TimeMillis() override; + webrtc::Timestamp Now() override { return underlying_.Now(); } uint32_t GetRandomInt(uint32_t low, uint32_t high) override; void OnMessageReceived(DcSctpMessage message) override; void OnError(ErrorKind error, absl::string_view message) override; @@ -72,12 +74,12 @@ class CallbackDeferrer : public DcSctpSocketCallbacks { void OnConnected() override; void OnClosed() override; void OnConnectionRestarted() override; - void OnStreamsResetFailed(rtc::ArrayView outgoing_streams, + void OnStreamsResetFailed(webrtc::ArrayView outgoing_streams, absl::string_view reason) override; void OnStreamsResetPerformed( - rtc::ArrayView outgoing_streams) override; + webrtc::ArrayView outgoing_streams) override; void OnIncomingStreamsReset( - rtc::ArrayView incoming_streams) override; + webrtc::ArrayView incoming_streams) override; void OnBufferedAmountLow(StreamID stream_id) override; void OnTotalBufferedAmountLow() override; @@ -88,12 +90,26 @@ class CallbackDeferrer : public DcSctpSocketCallbacks { void OnLifecycleEnd(LifecycleId lifecycle_id) override; private: + struct Error { + ErrorKind error; + std::string message; + }; + struct StreamReset { + std::vector streams; + std::string message; + }; + // Use a pre-sized variant for storage to avoid double heap allocation. This + // variant can hold all cases of stored data. + using CallbackData = + std::variant; + using Callback = void (*)(CallbackData, DcSctpSocketCallbacks&); + void Prepare(); void TriggerDeferred(); DcSctpSocketCallbacks& underlying_; bool prepared_ = false; - std::vector> deferred_; + std::vector> deferred_; }; } // namespace dcsctp diff --git a/net/dcsctp/socket/capabilities.h b/net/dcsctp/socket/capabilities.h index fa3be37d12..9b1bff0a90 100644 --- a/net/dcsctp/socket/capabilities.h +++ b/net/dcsctp/socket/capabilities.h @@ -21,6 +21,8 @@ struct Capabilities { bool message_interleaving = false; // RFC6525 Stream Reconfiguration bool reconfig = false; + // https://datatracker.ietf.org/doc/draft-ietf-tsvwg-sctp-zero-checksum/ + bool zero_checksum = false; // Negotiated maximum incoming and outgoing stream count. uint16_t negotiated_maximum_incoming_streams = 0; uint16_t negotiated_maximum_outgoing_streams = 0; diff --git a/net/dcsctp/socket/context.h b/net/dcsctp/socket/context.h index eca5b9e4fb..8e970e8c8e 100644 --- a/net/dcsctp/socket/context.h +++ b/net/dcsctp/socket/context.h @@ -13,6 +13,7 @@ #include #include "absl/strings/string_view.h" +#include "api/units/time_delta.h" #include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/packet/sctp_packet.h" #include "net/dcsctp/public/dcsctp_socket.h" @@ -39,11 +40,11 @@ class Context { // Returns the socket callbacks. virtual DcSctpSocketCallbacks& callbacks() const = 0; - // Observes a measured RTT value, in milliseconds. - virtual void ObserveRTT(DurationMs rtt_ms) = 0; + // Observes a measured RTT value. + virtual void ObserveRTT(webrtc::TimeDelta rtt_ms) = 0; // Returns the current Retransmission Timeout (rto) value, in milliseconds. - virtual DurationMs current_rto() const = 0; + virtual webrtc::TimeDelta current_rto() const = 0; // Increments the transmission error counter, given a human readable reason. virtual bool IncrementTxErrorCounter(absl::string_view reason) = 0; diff --git a/net/dcsctp/socket/dcsctp_socket.cc b/net/dcsctp/socket/dcsctp_socket.cc index 712fceaa66..6c889cb482 100644 --- a/net/dcsctp/socket/dcsctp_socket.cc +++ b/net/dcsctp/socket/dcsctp_socket.cc @@ -13,6 +13,7 @@ #include #include #include +#include #include #include #include @@ -20,9 +21,7 @@ #include "absl/functional/bind_front.h" #include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" -#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/packet/chunk/abort_chunk.h" #include "net/dcsctp/packet/chunk/chunk.h" #include "net/dcsctp/packet/chunk/cookie_ack_chunk.h" @@ -56,6 +55,7 @@ #include "net/dcsctp/packet/parameter/parameter.h" #include "net/dcsctp/packet/parameter/state_cookie_parameter.h" #include "net/dcsctp/packet/parameter/supported_extensions_parameter.h" +#include "net/dcsctp/packet/parameter/zero_checksum_acceptable_chunk_parameter.h" #include "net/dcsctp/packet/sctp_packet.h" #include "net/dcsctp/packet/tlv_trait.h" #include "net/dcsctp/public/dcsctp_message.h" @@ -81,6 +81,8 @@ namespace dcsctp { namespace { +using ::webrtc::TimeDelta; +using ::webrtc::Timestamp; // https://tools.ietf.org/html/rfc4960#section-5.1 constexpr uint32_t kMinVerificationTag = 1; @@ -95,7 +97,7 @@ Capabilities ComputeCapabilities(const DcSctpOptions& options, uint16_t peer_nbr_inbound_streams, const Parameters& parameters) { Capabilities capabilities; - absl::optional supported_extensions = + std::optional supported_extensions = parameters.get(); if (options.enable_partial_reliability) { @@ -117,6 +119,15 @@ Capabilities ComputeCapabilities(const DcSctpOptions& options, capabilities.reconfig = true; } + if (options.zero_checksum_alternate_error_detection_method != + ZeroChecksumAlternateErrorDetectionMethod::None() && + parameters.get().has_value() && + parameters.get() + ->error_detection_method() == + options.zero_checksum_alternate_error_detection_method) { + capabilities.zero_checksum = true; + } + capabilities.negotiated_maximum_incoming_streams = std::min( options.announced_maximum_incoming_streams, peer_nbr_outbound_streams); capabilities.negotiated_maximum_outgoing_streams = std::min( @@ -126,6 +137,7 @@ Capabilities ComputeCapabilities(const DcSctpOptions& options, } void AddCapabilityParameters(const DcSctpOptions& options, + bool support_zero_checksum, Parameters::Builder& builder) { std::vector chunk_types = {ReConfigChunk::kType}; @@ -137,6 +149,12 @@ void AddCapabilityParameters(const DcSctpOptions& options, chunk_types.push_back(IDataChunk::kType); chunk_types.push_back(IForwardTsnChunk::kType); } + if (support_zero_checksum) { + RTC_DCHECK(options.zero_checksum_alternate_error_detection_method != + ZeroChecksumAlternateErrorDetectionMethod::None()); + builder.Add(ZeroChecksumAcceptableChunkParameter( + options.zero_checksum_alternate_error_detection_method)); + } builder.Add(SupportedExtensionsParameter(std::move(chunk_types))); } @@ -150,7 +168,7 @@ TieTag MakeTieTag(DcSctpSocketCallbacks& cb) { } SctpImplementation DeterminePeerImplementation( - rtc::ArrayView cookie) { + webrtc::ArrayView cookie) { if (cookie.size() > 8) { absl::string_view magic(reinterpret_cast(cookie.data()), 8); if (magic == "dcSCTP00") { @@ -178,26 +196,25 @@ DcSctpSocket::DcSctpSocket(absl::string_view log_prefix, t1_init_(timer_manager_.CreateTimer( "t1-init", absl::bind_front(&DcSctpSocket::OnInitTimerExpiry, this), - TimerOptions(options.t1_init_timeout, + TimerOptions(options.t1_init_timeout.ToTimeDelta(), TimerBackoffAlgorithm::kExponential, options.max_init_retransmits))), t1_cookie_(timer_manager_.CreateTimer( "t1-cookie", absl::bind_front(&DcSctpSocket::OnCookieTimerExpiry, this), - TimerOptions(options.t1_cookie_timeout, + TimerOptions(options.t1_cookie_timeout.ToTimeDelta(), TimerBackoffAlgorithm::kExponential, options.max_init_retransmits))), t2_shutdown_(timer_manager_.CreateTimer( "t2-shutdown", absl::bind_front(&DcSctpSocket::OnShutdownTimerExpiry, this), - TimerOptions(options.t2_shutdown_timeout, + TimerOptions(options.t2_shutdown_timeout.ToTimeDelta(), TimerBackoffAlgorithm::kExponential, options.max_retransmissions))), packet_sender_(callbacks_, absl::bind_front(&DcSctpSocket::OnSentPacket, this)), send_queue_(log_prefix_, &callbacks_, - options_.max_send_buffer_size, options_.mtu, options_.default_stream_priority, options_.total_buffered_amount_low_threshold) {} @@ -271,7 +288,11 @@ void DcSctpSocket::SetState(State state, absl::string_view reason) { void DcSctpSocket::SendInit() { Parameters::Builder params_builder; - AddCapabilityParameters(options_, params_builder); + AddCapabilityParameters( + options_, /*support_zero_checksum=*/ + options_.zero_checksum_alternate_error_detection_method != + ZeroChecksumAlternateErrorDetectionMethod::None(), + params_builder); InitChunk init(/*initiate_tag=*/connect_params_.verification_tag, /*a_rwnd=*/options_.max_receiver_window_buffer_size, options_.announced_maximum_outgoing_streams, @@ -279,26 +300,23 @@ void DcSctpSocket::SendInit() { connect_params_.initial_tsn, params_builder.Build()); SctpPacket::Builder b(VerificationTag(0), options_); b.Add(init); - packet_sender_.Send(b); -} - -void DcSctpSocket::MakeConnectionParameters() { - VerificationTag new_verification_tag( - callbacks_.GetRandomInt(kMinVerificationTag, kMaxVerificationTag)); - TSN initial_tsn(callbacks_.GetRandomInt(kMinInitialTsn, kMaxInitialTsn)); - connect_params_.initial_tsn = initial_tsn; - connect_params_.verification_tag = new_verification_tag; + // https://www.ietf.org/archive/id/draft-tuexen-tsvwg-sctp-zero-checksum-01.html#section-4.2 + // "When an end point sends a packet containing an INIT chunk, it MUST include + // a correct CRC32c checksum in the packet containing the INIT chunk." + packet_sender_.Send(b, /*write_checksum=*/true); } void DcSctpSocket::Connect() { - RTC_DCHECK_RUN_ON(&thread_checker_); CallbackDeferrer::ScopedDeferrer deferrer(callbacks_); if (state_ == State::kClosed) { - MakeConnectionParameters(); + connect_params_.initial_tsn = + TSN(callbacks_.GetRandomInt(kMinInitialTsn, kMaxInitialTsn)); + connect_params_.verification_tag = VerificationTag( + callbacks_.GetRandomInt(kMinVerificationTag, kMaxVerificationTag)); RTC_DLOG(LS_INFO) << log_prefix() - << rtc::StringFormat( + << webrtc::StringFormat( "Connecting. my_verification_tag=%08x, my_initial_tsn=%u", *connect_params_.verification_tag, *connect_params_.initial_tsn); SendInit(); @@ -320,6 +338,7 @@ void DcSctpSocket::CreateTransmissionControlBlock( size_t a_rwnd, TieTag tie_tag) { metrics_.uses_message_interleaving = capabilities.message_interleaving; + metrics_.uses_zero_checksum = capabilities.zero_checksum; metrics_.negotiated_maximum_incoming_streams = capabilities.negotiated_maximum_incoming_streams; metrics_.negotiated_maximum_outgoing_streams = @@ -333,7 +352,6 @@ void DcSctpSocket::CreateTransmissionControlBlock( } void DcSctpSocket::RestoreFromState(const DcSctpSocketHandoverState& state) { - RTC_DCHECK_RUN_ON(&thread_checker_); CallbackDeferrer::ScopedDeferrer deferrer(callbacks_); if (state_ != State::kClosed) { @@ -351,6 +369,7 @@ void DcSctpSocket::RestoreFromState(const DcSctpSocketHandoverState& state) { capabilities.message_interleaving = state.capabilities.message_interleaving; capabilities.reconfig = state.capabilities.reconfig; + capabilities.zero_checksum = state.capabilities.zero_checksum; capabilities.negotiated_maximum_incoming_streams = state.capabilities.negotiated_maximum_incoming_streams; capabilities.negotiated_maximum_outgoing_streams = @@ -375,7 +394,6 @@ void DcSctpSocket::RestoreFromState(const DcSctpSocketHandoverState& state) { } void DcSctpSocket::Shutdown() { - RTC_DCHECK_RUN_ON(&thread_checker_); CallbackDeferrer::ScopedDeferrer deferrer(callbacks_); if (tcb_ != nullptr) { @@ -404,7 +422,6 @@ void DcSctpSocket::Shutdown() { } void DcSctpSocket::Close() { - RTC_DCHECK_RUN_ON(&thread_checker_); CallbackDeferrer::ScopedDeferrer deferrer(callbacks_); if (state_ != State::kClosed) { @@ -452,20 +469,51 @@ void DcSctpSocket::InternalClose(ErrorKind error, absl::string_view message) { void DcSctpSocket::SetStreamPriority(StreamID stream_id, StreamPriority priority) { - RTC_DCHECK_RUN_ON(&thread_checker_); send_queue_.SetStreamPriority(stream_id, priority); } StreamPriority DcSctpSocket::GetStreamPriority(StreamID stream_id) const { - RTC_DCHECK_RUN_ON(&thread_checker_); return send_queue_.GetStreamPriority(stream_id); } SendStatus DcSctpSocket::Send(DcSctpMessage message, const SendOptions& send_options) { - RTC_DCHECK_RUN_ON(&thread_checker_); CallbackDeferrer::ScopedDeferrer deferrer(callbacks_); - LifecycleId lifecycle_id = send_options.lifecycle_id; + SendStatus send_status = InternalSend(message, send_options); + if (send_status != SendStatus::kSuccess) + return send_status; + Timestamp now = callbacks_.Now(); + ++metrics_.tx_messages_count; + send_queue_.Add(now, std::move(message), send_options); + if (tcb_ != nullptr) + tcb_->SendBufferedPackets(now); + RTC_DCHECK(IsConsistent()); + return SendStatus::kSuccess; +} + +std::vector DcSctpSocket::SendMany( + webrtc::ArrayView messages, + const SendOptions& send_options) { + CallbackDeferrer::ScopedDeferrer deferrer(callbacks_); + Timestamp now = callbacks_.Now(); + std::vector send_statuses; + send_statuses.reserve(messages.size()); + for (DcSctpMessage& message : messages) { + SendStatus send_status = InternalSend(message, send_options); + send_statuses.push_back(send_status); + if (send_status != SendStatus::kSuccess) + continue; + ++metrics_.tx_messages_count; + send_queue_.Add(now, std::move(message), send_options); + } + if (tcb_ != nullptr) + tcb_->SendBufferedPackets(now); + RTC_DCHECK(IsConsistent()); + return send_statuses; +} +SendStatus DcSctpSocket::InternalSend(const DcSctpMessage& message, + const SendOptions& send_options) { + LifecycleId lifecycle_id = send_options.lifecycle_id; if (message.payload().empty()) { if (lifecycle_id.IsSet()) { callbacks_.OnLifecycleEnd(lifecycle_id); @@ -495,7 +543,9 @@ SendStatus DcSctpSocket::Send(DcSctpMessage message, "Unable to send message as the socket is shutting down"); return SendStatus::kErrorShuttingDown; } - if (send_queue_.IsFull()) { + if (send_queue_.total_buffered_amount() >= options_.max_send_buffer_size || + send_queue_.buffered_amount(message.stream_id()) >= + options_.per_stream_send_queue_limit) { if (lifecycle_id.IsSet()) { callbacks_.OnLifecycleEnd(lifecycle_id); } @@ -503,21 +553,11 @@ SendStatus DcSctpSocket::Send(DcSctpMessage message, "Unable to send message as the send queue is full"); return SendStatus::kErrorResourceExhaustion; } - - TimeMs now = callbacks_.TimeMillis(); - ++metrics_.tx_messages_count; - send_queue_.Add(now, std::move(message), send_options); - if (tcb_ != nullptr) { - tcb_->SendBufferedPackets(now); - } - - RTC_DCHECK(IsConsistent()); return SendStatus::kSuccess; } ResetStreamsStatus DcSctpSocket::ResetStreams( - rtc::ArrayView outgoing_streams) { - RTC_DCHECK_RUN_ON(&thread_checker_); + webrtc::ArrayView outgoing_streams) { CallbackDeferrer::ScopedDeferrer deferrer(callbacks_); if (tcb_ == nullptr) { @@ -539,7 +579,6 @@ ResetStreamsStatus DcSctpSocket::ResetStreams( } SocketState DcSctpSocket::state() const { - RTC_DCHECK_RUN_ON(&thread_checker_); switch (state_) { case State::kClosed: return SocketState::kClosed; @@ -557,40 +596,34 @@ SocketState DcSctpSocket::state() const { } void DcSctpSocket::SetMaxMessageSize(size_t max_message_size) { - RTC_DCHECK_RUN_ON(&thread_checker_); options_.max_message_size = max_message_size; } size_t DcSctpSocket::buffered_amount(StreamID stream_id) const { - RTC_DCHECK_RUN_ON(&thread_checker_); return send_queue_.buffered_amount(stream_id); } size_t DcSctpSocket::buffered_amount_low_threshold(StreamID stream_id) const { - RTC_DCHECK_RUN_ON(&thread_checker_); return send_queue_.buffered_amount_low_threshold(stream_id); } void DcSctpSocket::SetBufferedAmountLowThreshold(StreamID stream_id, size_t bytes) { - RTC_DCHECK_RUN_ON(&thread_checker_); send_queue_.SetBufferedAmountLowThreshold(stream_id, bytes); } -absl::optional DcSctpSocket::GetMetrics() const { - RTC_DCHECK_RUN_ON(&thread_checker_); - +std::optional DcSctpSocket::GetMetrics() const { if (tcb_ == nullptr) { - return absl::nullopt; + return std::nullopt; } Metrics metrics = metrics_; metrics.cwnd_bytes = tcb_->cwnd(); - metrics.srtt_ms = tcb_->current_srtt().value(); + metrics.srtt_ms = tcb_->current_srtt().ms(); size_t packet_payload_size = options_.mtu - SctpPacket::kHeaderSize - DataChunk::kHeaderSize; metrics.unack_data_count = - tcb_->retransmission_queue().outstanding_items() + + tcb_->retransmission_queue().unacked_items() + (send_queue_.total_buffered_amount() + packet_payload_size - 1) / packet_payload_size; metrics.peer_rwnd_bytes = tcb_->retransmission_queue().rwnd(); @@ -624,7 +657,7 @@ void DcSctpSocket::MaybeSendShutdownOnPacketReceived(const SctpPacket& packet) { } void DcSctpSocket::MaybeSendResetStreamsRequest() { - absl::optional reconfig = + std::optional reconfig = tcb_->stream_reset_handler().MakeStreamResetRequest(); if (reconfig.has_value()) { SctpPacket::Builder builder = tcb_->PacketBuilder(); @@ -682,7 +715,7 @@ bool DcSctpSocket::ValidatePacket(const SctpPacket& packet) { } callbacks_.OnError( ErrorKind::kParseFailed, - rtc::StringFormat( + webrtc::StringFormat( "Packet has invalid verification tag: %08x, expected %08x", *header.verification_tag, *connect_params_.verification_tag)); return false; @@ -727,14 +760,13 @@ bool DcSctpSocket::ValidatePacket(const SctpPacket& packet) { callbacks_.OnError( ErrorKind::kParseFailed, - rtc::StringFormat( + webrtc::StringFormat( "Packet has invalid verification tag: %08x, expected %08x", *header.verification_tag, *my_verification_tag)); return false; } void DcSctpSocket::HandleTimeout(TimeoutID timeout_id) { - RTC_DCHECK_RUN_ON(&thread_checker_); CallbackDeferrer::ScopedDeferrer deferrer(callbacks_); timer_manager_.HandleTimeout(timeout_id); @@ -747,17 +779,16 @@ void DcSctpSocket::HandleTimeout(TimeoutID timeout_id) { RTC_DCHECK(IsConsistent()); } -void DcSctpSocket::ReceivePacket(rtc::ArrayView data) { - RTC_DCHECK_RUN_ON(&thread_checker_); +void DcSctpSocket::ReceivePacket(webrtc::ArrayView data) { CallbackDeferrer::ScopedDeferrer deferrer(callbacks_); ++metrics_.rx_packets_count; if (packet_observer_ != nullptr) { - packet_observer_->OnReceivedPacket(callbacks_.TimeMillis(), data); + packet_observer_->OnReceivedPacket(TimeMs(callbacks_.Now().ms()), data); } - absl::optional packet = SctpPacket::Parse(data, options_); + std::optional packet = SctpPacket::Parse(data, options_); if (!packet.has_value()) { // https://tools.ietf.org/html/rfc4960#section-6.8 // "The default procedure for handling invalid SCTP packets is to @@ -798,7 +829,8 @@ void DcSctpSocket::ReceivePacket(rtc::ArrayView data) { RTC_DCHECK(IsConsistent()); } -void DcSctpSocket::DebugPrintOutgoing(rtc::ArrayView payload) { +void DcSctpSocket::DebugPrintOutgoing( + webrtc::ArrayView payload) { auto packet = SctpPacket::Parse(payload, options_); RTC_DCHECK(packet.has_value()); @@ -875,7 +907,7 @@ bool DcSctpSocket::HandleUnrecognizedChunk( RTC_DLOG(LS_VERBOSE) << log_prefix() << "Received unknown chunk: " << static_cast(descriptor.type); if (report_as_error) { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "Received unknown chunk of type: " << static_cast(descriptor.type) << " with report-error bit set"; callbacks_.OnError(ErrorKind::kParseFailed, sb.str()); @@ -907,7 +939,7 @@ bool DcSctpSocket::HandleUnrecognizedChunk( return continue_processing; } -absl::optional DcSctpSocket::OnInitTimerExpiry() { +TimeDelta DcSctpSocket::OnInitTimerExpiry() { RTC_DLOG(LS_VERBOSE) << log_prefix() << "Timer " << t1_init_->name() << " has expired: " << t1_init_->expiration_count() << "/" << t1_init_->options().max_restarts.value_or(-1); @@ -919,10 +951,10 @@ absl::optional DcSctpSocket::OnInitTimerExpiry() { InternalClose(ErrorKind::kTooManyRetries, "No INIT_ACK received"); } RTC_DCHECK(IsConsistent()); - return absl::nullopt; + return TimeDelta::Zero(); } -absl::optional DcSctpSocket::OnCookieTimerExpiry() { +TimeDelta DcSctpSocket::OnCookieTimerExpiry() { // https://tools.ietf.org/html/rfc4960#section-4 // "If the T1-cookie timer expires, the endpoint MUST retransmit COOKIE // ECHO and restart the T1-cookie timer without changing state. This MUST @@ -937,16 +969,16 @@ absl::optional DcSctpSocket::OnCookieTimerExpiry() { RTC_DCHECK(state_ == State::kCookieEchoed); if (t1_cookie_->is_running()) { - tcb_->SendBufferedPackets(callbacks_.TimeMillis()); + tcb_->SendBufferedPackets(callbacks_.Now()); } else { InternalClose(ErrorKind::kTooManyRetries, "No COOKIE_ACK received"); } RTC_DCHECK(IsConsistent()); - return absl::nullopt; + return TimeDelta::Zero(); } -absl::optional DcSctpSocket::OnShutdownTimerExpiry() { +TimeDelta DcSctpSocket::OnShutdownTimerExpiry() { RTC_DLOG(LS_VERBOSE) << log_prefix() << "Timer " << t2_shutdown_->name() << " has expired: " << t2_shutdown_->expiration_count() << "/" @@ -966,7 +998,7 @@ absl::optional DcSctpSocket::OnShutdownTimerExpiry() { InternalClose(ErrorKind::kTooManyRetries, "No SHUTDOWN_ACK received"); RTC_DCHECK(IsConsistent()); - return absl::nullopt; + return TimeDelta::Zero(); } // https://tools.ietf.org/html/rfc4960#section-9.2 @@ -977,12 +1009,12 @@ absl::optional DcSctpSocket::OnShutdownTimerExpiry() { return tcb_->current_rto(); } -void DcSctpSocket::OnSentPacket(rtc::ArrayView packet, +void DcSctpSocket::OnSentPacket(webrtc::ArrayView packet, SendPacketStatus status) { // The packet observer is invoked even if the packet was failed to be sent, to // indicate an attempt was made. if (packet_observer_ != nullptr) { - packet_observer_->OnSentPacket(callbacks_.TimeMillis(), packet); + packet_observer_->OnSentPacket(TimeMs(callbacks_.Now().ms()), packet); } if (status == SendPacketStatus::kSuccess) { @@ -990,12 +1022,6 @@ void DcSctpSocket::OnSentPacket(rtc::ArrayView packet, DebugPrintOutgoing(packet); } - // The heartbeat interval timer is restarted for every sent packet, to - // fire when the outgoing channel is inactive. - if (tcb_ != nullptr) { - tcb_->heartbeat_handler().RestartTimer(); - } - ++metrics_.tx_packets_count; } } @@ -1012,22 +1038,22 @@ bool DcSctpSocket::ValidateHasTCB() { } void DcSctpSocket::ReportFailedToParseChunk(int chunk_type) { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << "Failed to parse chunk of type: " << chunk_type; callbacks_.OnError(ErrorKind::kParseFailed, sb.str()); } -void DcSctpSocket::HandleData(const CommonHeader& header, +void DcSctpSocket::HandleData(const CommonHeader& /* header */, const SctpPacket::ChunkDescriptor& descriptor) { - absl::optional chunk = DataChunk::Parse(descriptor.data); + std::optional chunk = DataChunk::Parse(descriptor.data); if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { HandleDataCommon(*chunk); } } -void DcSctpSocket::HandleIData(const CommonHeader& header, +void DcSctpSocket::HandleIData(const CommonHeader& /* header */, const SctpPacket::ChunkDescriptor& descriptor) { - absl::optional chunk = IDataChunk::Parse(descriptor.data); + std::optional chunk = IDataChunk::Parse(descriptor.data); if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { HandleDataCommon(*chunk); } @@ -1088,15 +1114,13 @@ void DcSctpSocket::HandleDataCommon(AnyDataChunk& chunk) { if (tcb_->data_tracker().Observe(tsn, immediate_ack)) { tcb_->reassembly_queue().Add(tsn, std::move(data)); - tcb_->reassembly_queue().MaybeResetStreamsDeferred( - tcb_->data_tracker().last_cumulative_acked_tsn()); - DeliverReassembledMessages(); + MaybeDeliverMessages(); } } -void DcSctpSocket::HandleInit(const CommonHeader& header, +void DcSctpSocket::HandleInit(const CommonHeader& /* header */, const SctpPacket::ChunkDescriptor& descriptor) { - absl::optional chunk = InitChunk::Parse(descriptor.data); + std::optional chunk = InitChunk::Parse(descriptor.data); if (!ValidateParseSuccess(chunk)) { return; } @@ -1139,11 +1163,16 @@ void DcSctpSocket::HandleInit(const CommonHeader& header, } TieTag tie_tag(0); + VerificationTag my_verification_tag; + TSN my_initial_tsn; if (state_ == State::kClosed) { RTC_DLOG(LS_VERBOSE) << log_prefix() << "Received Init in closed state (normal)"; - MakeConnectionParameters(); + my_verification_tag = VerificationTag( + callbacks_.GetRandomInt(kMinVerificationTag, kMaxVerificationTag)); + my_initial_tsn = + TSN(callbacks_.GetRandomInt(kMinInitialTsn, kMaxInitialTsn)); } else if (state_ == State::kCookieWait || state_ == State::kCookieEchoed) { // https://tools.ietf.org/html/rfc4960#section-5.2.1 // "This usually indicates an initialization collision, i.e., each @@ -1156,6 +1185,8 @@ void DcSctpSocket::HandleInit(const CommonHeader& header, // endpoint) was sent." RTC_DLOG(LS_VERBOSE) << log_prefix() << "Received Init indicating simultaneous connections"; + my_verification_tag = connect_params_.verification_tag; + my_initial_tsn = connect_params_.initial_tsn; } else { RTC_DCHECK(tcb_ != nullptr); // https://tools.ietf.org/html/rfc4960#section-5.2.2 @@ -1170,28 +1201,27 @@ void DcSctpSocket::HandleInit(const CommonHeader& header, << "Received Init indicating restarted connection"; // Create a new verification tag - different from the previous one. for (int tries = 0; tries < 10; ++tries) { - connect_params_.verification_tag = VerificationTag( + my_verification_tag = VerificationTag( callbacks_.GetRandomInt(kMinVerificationTag, kMaxVerificationTag)); - if (connect_params_.verification_tag != tcb_->my_verification_tag()) { + if (my_verification_tag != tcb_->my_verification_tag()) { break; } } // Make the initial TSN make a large jump, so that there is no overlap // with the old and new association. - connect_params_.initial_tsn = - TSN(*tcb_->retransmission_queue().next_tsn() + 1000000); + my_initial_tsn = TSN(*tcb_->retransmission_queue().next_tsn() + 1000000); tie_tag = tcb_->tie_tag(); } RTC_DLOG(LS_VERBOSE) << log_prefix() - << rtc::StringFormat( + << webrtc::StringFormat( "Proceeding with connection. my_verification_tag=%08x, " "my_initial_tsn=%u, peer_verification_tag=%08x, " "peer_initial_tsn=%u", - *connect_params_.verification_tag, *connect_params_.initial_tsn, - *chunk->initiate_tag(), *chunk->initial_tsn()); + *my_verification_tag, *my_initial_tsn, *chunk->initiate_tag(), + *chunk->initial_tsn()); Capabilities capabilities = ComputeCapabilities(options_, chunk->nbr_outbound_streams(), @@ -1200,24 +1230,27 @@ void DcSctpSocket::HandleInit(const CommonHeader& header, SctpPacket::Builder b(chunk->initiate_tag(), options_); Parameters::Builder params_builder = Parameters::Builder().Add(StateCookieParameter( - StateCookie(chunk->initiate_tag(), chunk->initial_tsn(), - chunk->a_rwnd(), tie_tag, capabilities) + StateCookie(chunk->initiate_tag(), my_verification_tag, + chunk->initial_tsn(), my_initial_tsn, chunk->a_rwnd(), + tie_tag, capabilities) .Serialize())); - AddCapabilityParameters(options_, params_builder); + AddCapabilityParameters(options_, capabilities.zero_checksum, params_builder); - InitAckChunk init_ack(/*initiate_tag=*/connect_params_.verification_tag, + InitAckChunk init_ack(/*initiate_tag=*/my_verification_tag, options_.max_receiver_window_buffer_size, options_.announced_maximum_outgoing_streams, options_.announced_maximum_incoming_streams, - connect_params_.initial_tsn, params_builder.Build()); + my_initial_tsn, params_builder.Build()); b.Add(init_ack); - packet_sender_.Send(b); + // If the peer has signaled that it supports zero checksum, INIT-ACK can then + // have its checksum as zero. + packet_sender_.Send(b, /*write_checksum=*/!capabilities.zero_checksum); } void DcSctpSocket::HandleInitAck( - const CommonHeader& header, + const CommonHeader& /* header */, const SctpPacket::ChunkDescriptor& descriptor) { - absl::optional chunk = InitAckChunk::Parse(descriptor.data); + std::optional chunk = InitAckChunk::Parse(descriptor.data); if (!ValidateParseSuccess(chunk)) { return; } @@ -1268,21 +1301,20 @@ void DcSctpSocket::HandleInitAck( // The connection isn't fully established just yet. tcb_->SetCookieEchoChunk(CookieEchoChunk(cookie->data())); - tcb_->SendBufferedPackets(callbacks_.TimeMillis()); + tcb_->SendBufferedPackets(callbacks_.Now()); t1_cookie_->Start(); } void DcSctpSocket::HandleCookieEcho( const CommonHeader& header, const SctpPacket::ChunkDescriptor& descriptor) { - absl::optional chunk = + std::optional chunk = CookieEchoChunk::Parse(descriptor.data); if (!ValidateParseSuccess(chunk)) { return; } - absl::optional cookie = - StateCookie::Deserialize(chunk->cookie()); + std::optional cookie = StateCookie::Deserialize(chunk->cookie()); if (!cookie.has_value()) { callbacks_.OnError(ErrorKind::kParseFailed, "Failed to parse state cookie"); return; @@ -1293,13 +1325,13 @@ void DcSctpSocket::HandleCookieEcho( return; } } else { - if (header.verification_tag != connect_params_.verification_tag) { + if (header.verification_tag != cookie->my_tag()) { callbacks_.OnError( ErrorKind::kParseFailed, - rtc::StringFormat( + webrtc::StringFormat( "Received CookieEcho with invalid verification tag: %08x, " "expected %08x", - *header.verification_tag, *connect_params_.verification_tag)); + *header.verification_tag, *cookie->my_tag())); return; } } @@ -1324,10 +1356,10 @@ void DcSctpSocket::HandleCookieEcho( // send queue is already re-configured, and shouldn't be reset. send_queue_.Reset(); - CreateTransmissionControlBlock( - cookie->capabilities(), connect_params_.verification_tag, - connect_params_.initial_tsn, cookie->initiate_tag(), - cookie->initial_tsn(), cookie->a_rwnd(), MakeTieTag(callbacks_)); + CreateTransmissionControlBlock(cookie->capabilities(), cookie->my_tag(), + cookie->my_initial_tsn(), cookie->peer_tag(), + cookie->peer_initial_tsn(), cookie->a_rwnd(), + MakeTieTag(callbacks_)); } SctpPacket::Builder b = tcb_->PacketBuilder(); @@ -1337,7 +1369,7 @@ void DcSctpSocket::HandleCookieEcho( // "A COOKIE ACK chunk may be bundled with any pending DATA chunks (and/or // SACK chunks), but the COOKIE ACK chunk MUST be the first chunk in the // packet." - tcb_->SendBufferedPackets(b, callbacks_.TimeMillis()); + tcb_->SendBufferedPackets(b, callbacks_.Now()); } bool DcSctpSocket::HandleCookieEchoWithTCB(const CommonHeader& header, @@ -1347,13 +1379,13 @@ bool DcSctpSocket::HandleCookieEchoWithTCB(const CommonHeader& header, << *tcb_->my_verification_tag() << ", peer_tag=" << *header.verification_tag << ", tcb_tag=" << *tcb_->peer_verification_tag() - << ", cookie_tag=" << *cookie.initiate_tag() + << ", peer_tag=" << *cookie.peer_tag() << ", local_tie_tag=" << *tcb_->tie_tag() << ", peer_tie_tag=" << *cookie.tie_tag(); // https://tools.ietf.org/html/rfc4960#section-5.2.4 // "Handle a COOKIE ECHO when a TCB Exists" if (header.verification_tag != tcb_->my_verification_tag() && - tcb_->peer_verification_tag() != cookie.initiate_tag() && + tcb_->peer_verification_tag() != cookie.peer_tag() && cookie.tie_tag() == tcb_->tie_tag()) { // "A) In this case, the peer may have restarted." if (state_ == State::kShutdownAckSent) { @@ -1361,7 +1393,7 @@ bool DcSctpSocket::HandleCookieEchoWithTCB(const CommonHeader& header, // that the peer has restarted ... it MUST NOT set up a new association // but instead resend the SHUTDOWN ACK and send an ERROR chunk with a // "Cookie Received While Shutting Down" error cause to its peer." - SctpPacket::Builder b(cookie.initiate_tag(), options_); + SctpPacket::Builder b(cookie.peer_tag(), options_); b.Add(ShutdownAckChunk()); b.Add(ErrorChunk(Parameters::Builder() .Add(CookieReceivedWhileShuttingDownCause()) @@ -1378,7 +1410,7 @@ bool DcSctpSocket::HandleCookieEchoWithTCB(const CommonHeader& header, tcb_ = nullptr; callbacks_.OnConnectionRestarted(); } else if (header.verification_tag == tcb_->my_verification_tag() && - tcb_->peer_verification_tag() != cookie.initiate_tag()) { + tcb_->peer_verification_tag() != cookie.peer_tag()) { // TODO(boivie): Handle the peer_tag == 0? // "B) In this case, both sides may be attempting to start an // association at about the same time, but the peer endpoint started its @@ -1388,7 +1420,7 @@ bool DcSctpSocket::HandleCookieEchoWithTCB(const CommonHeader& header, << "Received COOKIE-ECHO indicating simultaneous connections"; tcb_ = nullptr; } else if (header.verification_tag != tcb_->my_verification_tag() && - tcb_->peer_verification_tag() == cookie.initiate_tag() && + tcb_->peer_verification_tag() == cookie.peer_tag() && cookie.tie_tag() == TieTag(0)) { // "C) In this case, the local endpoint's cookie has arrived late. // Before it arrived, the local endpoint sent an INIT and received an @@ -1401,7 +1433,7 @@ bool DcSctpSocket::HandleCookieEchoWithTCB(const CommonHeader& header, << "Received COOKIE-ECHO indicating a late COOKIE-ECHO. Discarding"; return false; } else if (header.verification_tag == tcb_->my_verification_tag() && - tcb_->peer_verification_tag() == cookie.initiate_tag()) { + tcb_->peer_verification_tag() == cookie.peer_tag()) { // "D) When both local and remote tags match, the endpoint should enter // the ESTABLISHED state, if it is in the COOKIE-ECHOED state. It // should stop any cookie timer that may be running and send a COOKIE @@ -1415,9 +1447,9 @@ bool DcSctpSocket::HandleCookieEchoWithTCB(const CommonHeader& header, } void DcSctpSocket::HandleCookieAck( - const CommonHeader& header, + const CommonHeader& /* header */, const SctpPacket::ChunkDescriptor& descriptor) { - absl::optional chunk = CookieAckChunk::Parse(descriptor.data); + std::optional chunk = CookieAckChunk::Parse(descriptor.data); if (!ValidateParseSuccess(chunk)) { return; } @@ -1435,25 +1467,23 @@ void DcSctpSocket::HandleCookieAck( t1_cookie_->Stop(); tcb_->ClearCookieEchoChunk(); SetState(State::kEstablished, "COOKIE_ACK received"); - tcb_->SendBufferedPackets(callbacks_.TimeMillis()); + tcb_->SendBufferedPackets(callbacks_.Now()); callbacks_.OnConnected(); } -void DcSctpSocket::DeliverReassembledMessages() { - if (tcb_->reassembly_queue().HasMessages()) { - for (auto& message : tcb_->reassembly_queue().FlushMessages()) { - ++metrics_.rx_messages_count; - callbacks_.OnMessageReceived(std::move(message)); - } +void DcSctpSocket::MaybeDeliverMessages() { + for (auto& message : tcb_->reassembly_queue().FlushMessages()) { + ++metrics_.rx_messages_count; + callbacks_.OnMessageReceived(std::move(message)); } } -void DcSctpSocket::HandleSack(const CommonHeader& header, +void DcSctpSocket::HandleSack(const CommonHeader& /* header */, const SctpPacket::ChunkDescriptor& descriptor) { - absl::optional chunk = SackChunk::Parse(descriptor.data); + std::optional chunk = SackChunk::Parse(descriptor.data); if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { - TimeMs now = callbacks_.TimeMillis(); + Timestamp now = callbacks_.Now(); SackChunk sack = ChunkValidators::Clean(*std::move(chunk)); if (tcb_->retransmission_queue().HandleSack(now, sack)) { @@ -1481,9 +1511,9 @@ void DcSctpSocket::HandleSack(const CommonHeader& header, } void DcSctpSocket::HandleHeartbeatRequest( - const CommonHeader& header, + const CommonHeader& /* header */, const SctpPacket::ChunkDescriptor& descriptor) { - absl::optional chunk = + std::optional chunk = HeartbeatRequestChunk::Parse(descriptor.data); if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { @@ -1492,9 +1522,9 @@ void DcSctpSocket::HandleHeartbeatRequest( } void DcSctpSocket::HandleHeartbeatAck( - const CommonHeader& header, + const CommonHeader& /* header */, const SctpPacket::ChunkDescriptor& descriptor) { - absl::optional chunk = + std::optional chunk = HeartbeatAckChunk::Parse(descriptor.data); if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { @@ -1502,9 +1532,9 @@ void DcSctpSocket::HandleHeartbeatAck( } } -void DcSctpSocket::HandleAbort(const CommonHeader& header, +void DcSctpSocket::HandleAbort(const CommonHeader& /* header */, const SctpPacket::ChunkDescriptor& descriptor) { - absl::optional chunk = AbortChunk::Parse(descriptor.data); + std::optional chunk = AbortChunk::Parse(descriptor.data); if (ValidateParseSuccess(chunk)) { std::string error_string = ErrorCausesToString(chunk->error_causes()); if (tcb_ == nullptr) { @@ -1522,9 +1552,9 @@ void DcSctpSocket::HandleAbort(const CommonHeader& header, } } -void DcSctpSocket::HandleError(const CommonHeader& header, +void DcSctpSocket::HandleError(const CommonHeader& /* header */, const SctpPacket::ChunkDescriptor& descriptor) { - absl::optional chunk = ErrorChunk::Parse(descriptor.data); + std::optional chunk = ErrorChunk::Parse(descriptor.data); if (ValidateParseSuccess(chunk)) { std::string error_string = ErrorCausesToString(chunk->error_causes()); if (tcb_ == nullptr) { @@ -1540,10 +1570,10 @@ void DcSctpSocket::HandleError(const CommonHeader& header, } void DcSctpSocket::HandleReconfig( - const CommonHeader& header, + const CommonHeader& /* header */, const SctpPacket::ChunkDescriptor& descriptor) { - TimeMs now = callbacks_.TimeMillis(); - absl::optional chunk = ReConfigChunk::Parse(descriptor.data); + Timestamp now = callbacks_.Now(); + std::optional chunk = ReConfigChunk::Parse(descriptor.data); if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { tcb_->stream_reset_handler().HandleReConfig(*std::move(chunk)); // Handling this response may result in outgoing stream resets finishing @@ -1554,6 +1584,10 @@ void DcSctpSocket::HandleReconfig( // If a response was processed, pending to-be-reset streams may now have // become unpaused. Try to send more DATA chunks. tcb_->SendBufferedPackets(now); + + // If it leaves "deferred reset processing", there may be chunks to deliver + // that were queued while waiting for the stream to reset. + MaybeDeliverMessages(); } } @@ -1660,7 +1694,7 @@ void DcSctpSocket::HandleShutdownComplete( void DcSctpSocket::HandleForwardTsn( const CommonHeader& header, const SctpPacket::ChunkDescriptor& descriptor) { - absl::optional chunk = + std::optional chunk = ForwardTsnChunk::Parse(descriptor.data); if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { HandleForwardTsnCommon(*chunk); @@ -1670,7 +1704,7 @@ void DcSctpSocket::HandleForwardTsn( void DcSctpSocket::HandleIForwardTsn( const CommonHeader& header, const SctpPacket::ChunkDescriptor& descriptor) { - absl::optional chunk = + std::optional chunk = IForwardTsnChunk::Parse(descriptor.data); if (ValidateParseSuccess(chunk) && ValidateHasTCB()) { HandleForwardTsnCommon(*chunk); @@ -1692,18 +1726,17 @@ void DcSctpSocket::HandleForwardTsnCommon(const AnyForwardTsnChunk& chunk) { "Received a FORWARD_TSN without announced peer support"); return; } - tcb_->data_tracker().HandleForwardTsn(chunk.new_cumulative_tsn()); - tcb_->reassembly_queue().Handle(chunk); - // A forward TSN - for ordered streams - may allow messages to be - // delivered. - DeliverReassembledMessages(); + if (tcb_->data_tracker().HandleForwardTsn(chunk.new_cumulative_tsn())) { + tcb_->reassembly_queue().HandleForwardTsn(chunk.new_cumulative_tsn(), + chunk.skipped_streams()); + } - // Processing a FORWARD_TSN might result in sending a SACK. - tcb_->MaybeSendSack(); + // A forward TSN - for ordered streams - may allow messages to be delivered. + MaybeDeliverMessages(); } void DcSctpSocket::MaybeSendShutdownOrAck() { - if (tcb_->retransmission_queue().outstanding_bytes() != 0) { + if (tcb_->retransmission_queue().unacked_items() != 0) { return; } @@ -1744,7 +1777,6 @@ void DcSctpSocket::SendShutdownAck() { } HandoverReadinessStatus DcSctpSocket::GetHandoverReadiness() const { - RTC_DCHECK_RUN_ON(&thread_checker_); HandoverReadinessStatus status; if (state_ != State::kClosed && state_ != State::kEstablished) { status.Add(HandoverUnreadinessReason::kWrongConnectionState); @@ -1756,13 +1788,12 @@ HandoverReadinessStatus DcSctpSocket::GetHandoverReadiness() const { return status; } -absl::optional +std::optional DcSctpSocket::GetHandoverStateAndClose() { - RTC_DCHECK_RUN_ON(&thread_checker_); CallbackDeferrer::ScopedDeferrer deferrer(callbacks_); if (!GetHandoverReadiness().IsReady()) { - return absl::nullopt; + return std::nullopt; } DcSctpSocketHandoverState state; diff --git a/net/dcsctp/socket/dcsctp_socket.h b/net/dcsctp/socket/dcsctp_socket.h index 157c515d65..d68711ea1f 100644 --- a/net/dcsctp/socket/dcsctp_socket.h +++ b/net/dcsctp/socket/dcsctp_socket.h @@ -14,10 +14,10 @@ #include #include #include +#include #include "absl/strings/string_view.h" #include "api/array_view.h" -#include "api/sequence_checker.h" #include "net/dcsctp/packet/chunk/abort_chunk.h" #include "net/dcsctp/packet/chunk/chunk.h" #include "net/dcsctp/packet/chunk/cookie_ack_chunk.h" @@ -83,7 +83,7 @@ class DcSctpSocket : public DcSctpSocketInterface { DcSctpSocket& operator=(const DcSctpSocket&) = delete; // Implementation of `DcSctpSocketInterface`. - void ReceivePacket(rtc::ArrayView data) override; + void ReceivePacket(webrtc::ArrayView data) override; void HandleTimeout(TimeoutID timeout_id) override; void Connect() override; void RestoreFromState(const DcSctpSocketHandoverState& state) override; @@ -91,8 +91,10 @@ class DcSctpSocket : public DcSctpSocketInterface { void Close() override; SendStatus Send(DcSctpMessage message, const SendOptions& send_options) override; + std::vector SendMany(webrtc::ArrayView messages, + const SendOptions& send_options) override; ResetStreamsStatus ResetStreams( - rtc::ArrayView outgoing_streams) override; + webrtc::ArrayView outgoing_streams) override; SocketState state() const override; const DcSctpOptions& options() const override { return options_; } void SetMaxMessageSize(size_t max_message_size) override; @@ -101,9 +103,9 @@ class DcSctpSocket : public DcSctpSocketInterface { size_t buffered_amount(StreamID stream_id) const override; size_t buffered_amount_low_threshold(StreamID stream_id) const override; void SetBufferedAmountLowThreshold(StreamID stream_id, size_t bytes) override; - absl::optional GetMetrics() const override; + std::optional GetMetrics() const override; HandoverReadinessStatus GetHandoverReadiness() const override; - absl::optional GetHandoverStateAndClose() override; + std::optional GetHandoverStateAndClose() override; SctpImplementation peer_implementation() const override { return metrics_.peer_implementation; } @@ -148,17 +150,15 @@ class DcSctpSocket : public DcSctpSocketInterface { // Changes the socket state, given a `reason` (for debugging/logging). void SetState(State state, absl::string_view reason); - // Fills in `connect_params` with random verification tag and initial TSN. - void MakeConnectionParameters(); // Closes the association. Note that the TCB will not be valid past this call. void InternalClose(ErrorKind error, absl::string_view message); // Closes the association, because of too many retransmission errors. void CloseConnectionBecauseOfTooManyTransmissionErrors(); // Timer expiration handlers - absl::optional OnInitTimerExpiry(); - absl::optional OnCookieTimerExpiry(); - absl::optional OnShutdownTimerExpiry(); - void OnSentPacket(rtc::ArrayView packet, + webrtc::TimeDelta OnInitTimerExpiry(); + webrtc::TimeDelta OnCookieTimerExpiry(); + webrtc::TimeDelta OnShutdownTimerExpiry(); + void OnSentPacket(webrtc::ArrayView packet, SendPacketStatus status); // Sends SHUTDOWN or SHUTDOWN-ACK if the socket is shutting down and if all // outstanding data has been acknowledged. @@ -167,6 +167,9 @@ class DcSctpSocket : public DcSctpSocketInterface { void MaybeSendShutdownOnPacketReceived(const SctpPacket& packet); // If there are streams pending to be reset, send a request to reset them. void MaybeSendResetStreamsRequest(); + // Performs internal processing shared between Send and SendMany. + SendStatus InternalSend(const DcSctpMessage& message, + const SendOptions& send_options); // Sends a INIT chunk. void SendInit(); // Sends a SHUTDOWN chunk. @@ -178,16 +181,17 @@ class DcSctpSocket : public DcSctpSocketInterface { bool ValidatePacket(const SctpPacket& packet); // Parses `payload`, which is a serialized packet that is just going to be // sent and prints all chunks. - void DebugPrintOutgoing(rtc::ArrayView payload); - // Called whenever there may be reassembled messages, and delivers those. - void DeliverReassembledMessages(); + void DebugPrintOutgoing(webrtc::ArrayView payload); + // Called whenever data has been received, or the cumulative acknowledgment + // TSN has moved, that may result in delivering messages. + void MaybeDeliverMessages(); // Returns true if there is a TCB, and false otherwise (and reports an error). bool ValidateHasTCB(); // Returns true if the parsing of a chunk of type `T` succeeded. If it didn't, // it reports an error and returns false. template - bool ValidateParseSuccess(const absl::optional& c) { + bool ValidateParseSuccess(const std::optional& c) { if (c.has_value()) { return true; } @@ -266,7 +270,6 @@ class DcSctpSocket : public DcSctpSocketInterface { const std::string log_prefix_; const std::unique_ptr packet_observer_; - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker thread_checker_; Metrics metrics_; DcSctpOptions options_; diff --git a/net/dcsctp/socket/dcsctp_socket_network_test.cc b/net/dcsctp/socket/dcsctp_socket_network_test.cc index f097bfa095..09a8bbb6c2 100644 --- a/net/dcsctp/socket/dcsctp_socket_network_test.cc +++ b/net/dcsctp/socket/dcsctp_socket_network_test.cc @@ -10,20 +10,20 @@ #include #include #include +#include #include #include #include #include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "api/test/create_network_emulation_manager.h" #include "api/test/network_emulation_manager.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" -#include "call/simulated_network.h" #include "net/dcsctp/public/dcsctp_options.h" #include "net/dcsctp/public/dcsctp_socket.h" #include "net/dcsctp/public/types.h" @@ -33,6 +33,7 @@ #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/gunit.h" #include "rtc_base/logging.h" +#include "rtc_base/random.h" #include "rtc_base/socket_address.h" #include "rtc_base/strings/string_format.h" #include "rtc_base/time_utils.h" @@ -55,6 +56,9 @@ using ::testing::AllOf; using ::testing::Ge; using ::testing::Le; using ::testing::SizeIs; +using ::webrtc::DataRate; +using ::webrtc::TimeDelta; +using ::webrtc::Timestamp; constexpr StreamID kStreamId(1); constexpr PPID kPpid(53); @@ -103,20 +107,20 @@ class BoundSocket : public webrtc::EmulatedNetworkReceiverInterface { endpoint_ = endpoint; uint16_t port = endpoint->BindReceiver(0, this).value(); source_address_ = - rtc::SocketAddress(endpoint_->GetPeerLocalAddress(), port); + webrtc::SocketAddress(endpoint_->GetPeerLocalAddress(), port); } void SetDestination(const BoundSocket& socket) { dest_address_ = socket.source_address_; } - void SetReceiver(std::function receiver) { + void SetReceiver(std::function receiver) { receiver_ = std::move(receiver); } - void SendPacket(rtc::ArrayView data) { + void SendPacket(webrtc::ArrayView data) { endpoint_->SendPacket(source_address_, dest_address_, - rtc::CopyOnWriteBuffer(data.data(), data.size())); + webrtc::CopyOnWriteBuffer(data.data(), data.size())); } private: @@ -125,10 +129,10 @@ class BoundSocket : public webrtc::EmulatedNetworkReceiverInterface { receiver_(std::move(packet.data)); } - std::function receiver_; + std::function receiver_; webrtc::EmulatedEndpoint* endpoint_ = nullptr; - rtc::SocketAddress source_address_; - rtc::SocketAddress dest_address_; + webrtc::SocketAddress source_address_; + webrtc::SocketAddress dest_address_; }; // Sends at a constant rate but with random packet sizes. @@ -138,18 +142,18 @@ class SctpActor : public DcSctpSocketCallbacks { BoundSocket& emulated_socket, const DcSctpOptions& sctp_options) : log_prefix_(std::string(name) + ": "), - thread_(rtc::Thread::Current()), + thread_(webrtc::Thread::Current()), emulated_socket_(emulated_socket), timeout_factory_( *thread_, - [this]() { return TimeMillis(); }, + [this]() { return TimeMs(Now().ms()); }, [this](dcsctp::TimeoutID timeout_id) { sctp_socket_.HandleTimeout(timeout_id); }), random_(GetUniqueSeed()), sctp_socket_(name, *this, nullptr, sctp_options), - last_bandwidth_printout_(TimeMs(TimeMillis())) { - emulated_socket.SetReceiver([this](rtc::CopyOnWriteBuffer buf) { + last_bandwidth_printout_(Now()) { + emulated_socket.SetReceiver([this](webrtc::CopyOnWriteBuffer buf) { // The receiver will be executed on the NetworkEmulation task queue, but // the dcSCTP socket is owned by `thread_` and is not thread-safe. thread_->PostTask([this, buf] { this->sctp_socket_.ReceivePacket(buf); }); @@ -157,13 +161,14 @@ class SctpActor : public DcSctpSocketCallbacks { } void PrintBandwidth() { - TimeMs now = TimeMillis(); - DurationMs duration = now - last_bandwidth_printout_; + Timestamp now = Now(); + TimeDelta duration = now - last_bandwidth_printout_; double bitrate_mbps = - static_cast(received_bytes_ * 8) / *duration / 1000; + static_cast(received_bytes_ * 8) / duration.ms() / 1000; RTC_LOG(LS_INFO) << log_prefix() - << rtc::StringFormat("Received %0.2f Mbps", bitrate_mbps); + << webrtc::StringFormat("Received %0.2f Mbps", + bitrate_mbps); received_bitrate_mbps_.push_back(bitrate_mbps); received_bytes_ = 0; @@ -176,7 +181,7 @@ class SctpActor : public DcSctpSocketCallbacks { } } - void SendPacket(rtc::ArrayView data) override { + void SendPacket(webrtc::ArrayView data) override { emulated_socket_.SendPacket(data); } @@ -185,7 +190,7 @@ class SctpActor : public DcSctpSocketCallbacks { return timeout_factory_.CreateTimeout(precision); } - TimeMs TimeMillis() override { return TimeMs(rtc::TimeMillis()); } + Timestamp Now() override { return Timestamp::Millis(webrtc::TimeMillis()); } uint32_t GetRandomInt(uint32_t low, uint32_t high) override { return random_.Rand(low, high); @@ -212,18 +217,19 @@ class SctpActor : public DcSctpSocketCallbacks { void OnConnectionRestarted() override {} - void OnStreamsResetFailed(rtc::ArrayView outgoing_streams, - absl::string_view reason) override {} + void OnStreamsResetFailed( + webrtc::ArrayView /* outgoing_streams */, + absl::string_view /* reason */) override {} void OnStreamsResetPerformed( - rtc::ArrayView outgoing_streams) override {} + webrtc::ArrayView /* outgoing_streams */) override {} void OnIncomingStreamsReset( - rtc::ArrayView incoming_streams) override {} + webrtc::ArrayView /* incoming_streams */) override {} void NotifyOutgoingMessageBufferEmpty() override {} - void OnBufferedAmountLow(StreamID stream_id) override { + void OnBufferedAmountLow(StreamID /* stream_id */) override { if (mode_ == ActorMode::kThroughputSender) { std::vector payload(kHugePayloadSize); sctp_socket_.Send(DcSctpMessage(kStreamId, kPpid, std::move(payload)), @@ -239,7 +245,7 @@ class SctpActor : public DcSctpSocketCallbacks { std::vector(kLargePayloadSize)), send_options); - send_options.max_retransmissions = absl::nullopt; + send_options.max_retransmissions = std::nullopt; sctp_socket_.Send( DcSctpMessage(kStreamId, kPpid, std::vector(kSmallPayloadSize)), @@ -248,12 +254,12 @@ class SctpActor : public DcSctpSocketCallbacks { } } - absl::optional ConsumeReceivedMessage() { + std::optional ConsumeReceivedMessage() { if (!last_received_message_.has_value()) { - return absl::nullopt; + return std::nullopt; } DcSctpMessage ret = *std::move(last_received_message_); - last_received_message_ = absl::nullopt; + last_received_message_ = std::nullopt; return ret; } @@ -298,23 +304,23 @@ class SctpActor : public DcSctpSocketCallbacks { private: std::string log_prefix() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb << log_prefix_; - sb << rtc::TimeMillis(); + sb << webrtc::TimeMillis(); sb << ": "; return sb.Release(); } ActorMode mode_ = ActorMode::kAtRest; const std::string log_prefix_; - rtc::Thread* thread_; + webrtc::Thread* thread_; BoundSocket& emulated_socket_; TaskQueueTimeoutFactory timeout_factory_; webrtc::Random random_; DcSctpSocket sctp_socket_; size_t received_bytes_ = 0; - absl::optional last_received_message_; - TimeMs last_bandwidth_printout_; + std::optional last_received_message_; + Timestamp last_bandwidth_printout_; // Per-second received bitrates, in Mbps std::vector received_bitrate_mbps_; webrtc::ScopedTaskSafety safety_; @@ -325,7 +331,7 @@ class DcSctpSocketNetworkTest : public testing::Test { DcSctpSocketNetworkTest() : options_(MakeOptionsForTest()), emulation_(webrtc::CreateNetworkEmulationManager( - webrtc::TimeMode::kSimulated)) {} + {.time_mode = webrtc::TimeMode::kSimulated})) {} void MakeNetwork(const webrtc::BuiltInNetworkBehaviorConfig& config) { webrtc::EmulatedEndpoint* endpoint_a = @@ -405,7 +411,7 @@ TEST_F(DcSctpSocketNetworkTest, CanSendLargeMessage) { TEST_F(DcSctpSocketNetworkTest, CanSendMessagesReliablyWithLowBandwidth) { webrtc::BuiltInNetworkBehaviorConfig pipe_config; pipe_config.queue_delay_ms = 30; - pipe_config.link_capacity_kbps = 1000; + pipe_config.link_capacity = DataRate::KilobitsPerSec(1000); MakeNetwork(pipe_config); SctpActor sender("A", emulated_socket_a_, options_); @@ -434,7 +440,7 @@ TEST_F(DcSctpSocketNetworkTest, DCSCTP_NDEBUG_TEST(CanSendMessagesReliablyWithMediumBandwidth)) { webrtc::BuiltInNetworkBehaviorConfig pipe_config; pipe_config.queue_delay_ms = 30; - pipe_config.link_capacity_kbps = 18000; + pipe_config.link_capacity = DataRate::KilobitsPerSec(18000); MakeNetwork(pipe_config); SctpActor sender("A", emulated_socket_a_, options_); diff --git a/net/dcsctp/socket/dcsctp_socket_test.cc b/net/dcsctp/socket/dcsctp_socket_test.cc index c31c048582..d9c1c5768a 100644 --- a/net/dcsctp/socket/dcsctp_socket_test.cc +++ b/net/dcsctp/socket/dcsctp_socket_test.cc @@ -9,9 +9,11 @@ */ #include "net/dcsctp/socket/dcsctp_socket.h" +#include #include #include #include +#include #include #include #include @@ -19,19 +21,23 @@ #include "absl/flags/flag.h" #include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/common/handover_testing.h" #include "net/dcsctp/common/math.h" +#include "net/dcsctp/packet/chunk/abort_chunk.h" #include "net/dcsctp/packet/chunk/chunk.h" +#include "net/dcsctp/packet/chunk/cookie_ack_chunk.h" #include "net/dcsctp/packet/chunk/cookie_echo_chunk.h" #include "net/dcsctp/packet/chunk/data_chunk.h" #include "net/dcsctp/packet/chunk/data_common.h" #include "net/dcsctp/packet/chunk/error_chunk.h" +#include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" #include "net/dcsctp/packet/chunk/heartbeat_ack_chunk.h" #include "net/dcsctp/packet/chunk/heartbeat_request_chunk.h" #include "net/dcsctp/packet/chunk/idata_chunk.h" +#include "net/dcsctp/packet/chunk/init_ack_chunk.h" #include "net/dcsctp/packet/chunk/init_chunk.h" +#include "net/dcsctp/packet/chunk/reconfig_chunk.h" #include "net/dcsctp/packet/chunk/sack_chunk.h" #include "net/dcsctp/packet/chunk/shutdown_chunk.h" #include "net/dcsctp/packet/error_cause/error_cause.h" @@ -39,6 +45,7 @@ #include "net/dcsctp/packet/parameter/heartbeat_info_parameter.h" #include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h" #include "net/dcsctp/packet/parameter/parameter.h" +#include "net/dcsctp/packet/parameter/reconfiguration_response_parameter.h" #include "net/dcsctp/packet/sctp_packet.h" #include "net/dcsctp/packet/tlv_trait.h" #include "net/dcsctp/public/dcsctp_message.h" @@ -59,10 +66,17 @@ namespace { using ::testing::_; using ::testing::AllOf; using ::testing::ElementsAre; +using ::testing::ElementsAreArray; +using ::testing::Eq; +using ::testing::Field; using ::testing::HasSubstr; using ::testing::IsEmpty; +using ::testing::Not; +using ::testing::Property; using ::testing::SizeIs; using ::testing::UnorderedElementsAre; +using ::webrtc::TimeDelta; +using ::webrtc::Timestamp; constexpr SendOptions kSendOptions; constexpr size_t kLargeMessageSize = DcSctpOptions::kMaxSafeMTUSize * 20; @@ -70,243 +84,135 @@ constexpr size_t kSmallMessageSize = 10; constexpr int kMaxBurstPackets = 4; constexpr DcSctpOptions kDefaultOptions; -MATCHER_P(HasDataChunkWithStreamId, stream_id, "") { - absl::optional packet = SctpPacket::Parse(arg, kDefaultOptions); +MATCHER_P(HasChunks, chunks, "") { + std::optional packet = SctpPacket::Parse(arg, kDefaultOptions); if (!packet.has_value()) { *result_listener << "data didn't parse as an SctpPacket"; return false; } - if (packet->descriptors()[0].type != DataChunk::kType) { - *result_listener << "the first chunk in the packet is not a data chunk"; - return false; - } - - absl::optional dc = - DataChunk::Parse(packet->descriptors()[0].data); - if (!dc.has_value()) { - *result_listener << "The first chunk didn't parse as a data chunk"; - return false; - } - - if (dc->stream_id() != stream_id) { - *result_listener << "the stream_id is " << *dc->stream_id(); - return false; - } - - return true; + return ExplainMatchResult(chunks, packet->descriptors(), result_listener); } -MATCHER_P(HasDataChunkWithPPID, ppid, "") { - absl::optional packet = SctpPacket::Parse(arg, kDefaultOptions); - if (!packet.has_value()) { - *result_listener << "data didn't parse as an SctpPacket"; - return false; - } - - if (packet->descriptors()[0].type != DataChunk::kType) { - *result_listener << "the first chunk in the packet is not a data chunk"; - return false; - } - - absl::optional dc = - DataChunk::Parse(packet->descriptors()[0].data); - if (!dc.has_value()) { - *result_listener << "The first chunk didn't parse as a data chunk"; - return false; - } - - if (dc->ppid() != ppid) { - *result_listener << "the ppid is " << *dc->ppid(); - return false; - } - - return true; +MATCHER_P(IsChunkType, chunk_type, "") { + return ExplainMatchResult(chunk_type, arg.type, result_listener); } -MATCHER_P(HasDataChunkWithSsn, ssn, "") { - absl::optional packet = SctpPacket::Parse(arg, kDefaultOptions); - if (!packet.has_value()) { - *result_listener << "data didn't parse as an SctpPacket"; - return false; - } - - if (packet->descriptors()[0].type != DataChunk::kType) { - *result_listener << "the first chunk in the packet is not a data chunk"; +MATCHER_P(IsDataChunk, properties, "") { + if (arg.type != DataChunk::kType) { + *result_listener << "the chunk is not a data chunk"; return false; } - absl::optional dc = - DataChunk::Parse(packet->descriptors()[0].data); - if (!dc.has_value()) { - *result_listener << "The first chunk didn't parse as a data chunk"; + std::optional chunk = DataChunk::Parse(arg.data); + if (!chunk.has_value()) { + *result_listener << "The chunk didn't parse as a data chunk"; return false; } - if (dc->ssn() != ssn) { - *result_listener << "the ssn is " << *dc->ssn(); - return false; - } - - return true; + return ExplainMatchResult(properties, *chunk, result_listener); } -MATCHER_P(HasDataChunkWithMid, mid, "") { - absl::optional packet = SctpPacket::Parse(arg, kDefaultOptions); - if (!packet.has_value()) { - *result_listener << "data didn't parse as an SctpPacket"; - return false; - } - - if (packet->descriptors()[0].type != IDataChunk::kType) { - *result_listener << "the first chunk in the packet is not an i-data chunk"; - return false; - } - - absl::optional dc = - IDataChunk::Parse(packet->descriptors()[0].data); - if (!dc.has_value()) { - *result_listener << "The first chunk didn't parse as an i-data chunk"; +MATCHER_P(IsSack, properties, "") { + if (arg.type != SackChunk::kType) { + *result_listener << "the chunk is not a sack chunk"; return false; } - if (dc->message_id() != mid) { - *result_listener << "the mid is " << *dc->message_id(); + std::optional chunk = SackChunk::Parse(arg.data); + if (!chunk.has_value()) { + *result_listener << "The chunk didn't parse as a sack chunk"; return false; } - return true; + return ExplainMatchResult(properties, *chunk, result_listener); } -MATCHER_P(HasSackWithCumAckTsn, tsn, "") { - absl::optional packet = SctpPacket::Parse(arg, kDefaultOptions); - if (!packet.has_value()) { - *result_listener << "data didn't parse as an SctpPacket"; - return false; - } - - if (packet->descriptors()[0].type != SackChunk::kType) { - *result_listener << "the first chunk in the packet is not a data chunk"; - return false; - } - - absl::optional sc = - SackChunk::Parse(packet->descriptors()[0].data); - if (!sc.has_value()) { - *result_listener << "The first chunk didn't parse as a data chunk"; +MATCHER_P(IsReConfig, properties, "") { + if (arg.type != ReConfigChunk::kType) { + *result_listener << "the chunk is not a re-config chunk"; return false; } - if (sc->cumulative_tsn_ack() != tsn) { - *result_listener << "the cum_ack_tsn is " << *sc->cumulative_tsn_ack(); + std::optional chunk = ReConfigChunk::Parse(arg.data); + if (!chunk.has_value()) { + *result_listener << "The chunk didn't parse as a re-config chunk"; return false; } - return true; + return ExplainMatchResult(properties, *chunk, result_listener); } -MATCHER(HasSackWithNoGapAckBlocks, "") { - absl::optional packet = SctpPacket::Parse(arg, kDefaultOptions); - if (!packet.has_value()) { - *result_listener << "data didn't parse as an SctpPacket"; - return false; - } - - if (packet->descriptors()[0].type != SackChunk::kType) { - *result_listener << "the first chunk in the packet is not a data chunk"; - return false; - } - - absl::optional sc = - SackChunk::Parse(packet->descriptors()[0].data); - if (!sc.has_value()) { - *result_listener << "The first chunk didn't parse as a data chunk"; +MATCHER_P(IsHeartbeatAck, properties, "") { + if (arg.type != HeartbeatAckChunk::kType) { + *result_listener << "the chunk is not a HeartbeatAckChunk"; return false; } - if (!sc->gap_ack_blocks().empty()) { - *result_listener << "there are gap ack blocks"; + std::optional chunk = HeartbeatAckChunk::Parse(arg.data); + if (!chunk.has_value()) { + *result_listener << "The chunk didn't parse as a HeartbeatAckChunk"; return false; } - return true; + return ExplainMatchResult(properties, *chunk, result_listener); } -MATCHER_P(HasReconfigWithStreams, streams_matcher, "") { - absl::optional packet = SctpPacket::Parse(arg, kDefaultOptions); - if (!packet.has_value()) { - *result_listener << "data didn't parse as an SctpPacket"; +MATCHER_P(IsHeartbeatRequest, properties, "") { + if (arg.type != HeartbeatRequestChunk::kType) { + *result_listener << "the chunk is not a HeartbeatRequestChunk"; return false; } - if (packet->descriptors()[0].type != ReConfigChunk::kType) { - *result_listener << "the first chunk in the packet is not a data chunk"; + std::optional chunk = + HeartbeatRequestChunk::Parse(arg.data); + if (!chunk.has_value()) { + *result_listener << "The chunk didn't parse as a HeartbeatRequestChunk"; return false; } - absl::optional reconfig = - ReConfigChunk::Parse(packet->descriptors()[0].data); - if (!reconfig.has_value()) { - *result_listener << "The first chunk didn't parse as a data chunk"; - return false; - } - - const Parameters& parameters = reconfig->parameters(); - if (parameters.descriptors().size() != 1 || - parameters.descriptors()[0].type != - OutgoingSSNResetRequestParameter::kType) { - *result_listener << "Expected the reconfig chunk to have an outgoing SSN " - "reset request parameter"; - return false; - } - - absl::optional p = - OutgoingSSNResetRequestParameter::Parse(parameters.descriptors()[0].data); - testing::Matcher> matcher = streams_matcher; - if (!matcher.MatchAndExplain(p->stream_ids(), result_listener)) { - return false; - } + return ExplainMatchResult(properties, *chunk, result_listener); +} - return true; +MATCHER_P(HasParameters, parameters, "") { + return ExplainMatchResult(parameters, arg.parameters().descriptors(), + result_listener); } -MATCHER_P(HasReconfigWithResponse, result, "") { - absl::optional packet = SctpPacket::Parse(arg, kDefaultOptions); - if (!packet.has_value()) { - *result_listener << "data didn't parse as an SctpPacket"; +MATCHER_P(IsOutgoingResetRequest, properties, "") { + if (arg.type != OutgoingSSNResetRequestParameter::kType) { + *result_listener + << "the parameter is not an OutgoingSSNResetRequestParameter"; return false; } - if (packet->descriptors()[0].type != ReConfigChunk::kType) { - *result_listener << "the first chunk in the packet is not a reconfig chunk"; + std::optional parameter = + OutgoingSSNResetRequestParameter::Parse(arg.data); + if (!parameter.has_value()) { + *result_listener + << "The parameter didn't parse as an OutgoingSSNResetRequestParameter"; return false; } - absl::optional reconfig = - ReConfigChunk::Parse(packet->descriptors()[0].data); - if (!reconfig.has_value()) { - *result_listener << "The first chunk didn't parse as a reconfig chunk"; - return false; - } + return ExplainMatchResult(properties, *parameter, result_listener); +} - const Parameters& parameters = reconfig->parameters(); - if (parameters.descriptors().size() != 1 || - parameters.descriptors()[0].type != - ReconfigurationResponseParameter::kType) { - *result_listener << "Expected the reconfig chunk to have a " - "ReconfigurationResponse Parameter"; +MATCHER_P(IsReconfigurationResponse, properties, "") { + if (arg.type != ReconfigurationResponseParameter::kType) { + *result_listener + << "the parameter is not an ReconfigurationResponseParameter"; return false; } - absl::optional p = - ReconfigurationResponseParameter::Parse(parameters.descriptors()[0].data); - if (p->result() != result) { - *result_listener << "ReconfigurationResponse Parameter doesn't contain the " - "expected result"; + std::optional parameter = + ReconfigurationResponseParameter::Parse(arg.data); + if (!parameter.has_value()) { + *result_listener + << "The parameter didn't parse as an ReconfigurationResponseParameter"; return false; } - return true; + return ExplainMatchResult(properties, *parameter, result_listener); } TSN AddTo(TSN tsn, int delta) { @@ -359,7 +265,7 @@ void ExchangeMessages(SocketUnderTest& a, SocketUnderTest& z) { void RunTimers(SocketUnderTest& s) { for (;;) { - absl::optional timeout_id = s.cb.GetNextExpiredTimeout(); + std::optional timeout_id = s.cb.GetNextExpiredTimeout(); if (!timeout_id.has_value()) { break; } @@ -367,7 +273,7 @@ void RunTimers(SocketUnderTest& s) { } } -void AdvanceTime(SocketUnderTest& a, SocketUnderTest& z, DurationMs duration) { +void AdvanceTime(SocketUnderTest& a, SocketUnderTest& z, TimeDelta duration) { a.cb.AdvanceTime(duration); z.cb.AdvanceTime(duration); @@ -375,6 +281,26 @@ void AdvanceTime(SocketUnderTest& a, SocketUnderTest& z, DurationMs duration) { RunTimers(z); } +// Exchanges messages between `a` and `z`, advancing time until there are no +// more pending timers, or until `max_timeout` is reached. +void ExchangeMessagesAndAdvanceTime( + SocketUnderTest& a, + SocketUnderTest& z, + TimeDelta max_timeout = TimeDelta::Seconds(10)) { + Timestamp time_started = a.cb.Now(); + while (a.cb.Now() - time_started < max_timeout) { + ExchangeMessages(a, z); + + TimeDelta time_to_next_timeout = + std::min(a.cb.GetTimeToNextTimeout(), z.cb.GetTimeToNextTimeout()); + if (time_to_next_timeout.IsPlusInfinity()) { + // No more pending timer. + return; + } + AdvanceTime(a, z, time_to_next_timeout); + } +} + // Calls Connect() on `sock_a_` and make the connection established. void ConnectSockets(SocketUnderTest& a, SocketUnderTest& z) { EXPECT_CALL(a.cb, OnConnected).Times(1); @@ -399,7 +325,7 @@ std::unique_ptr HandoverSocket( if (!is_closed) { EXPECT_CALL(sut->cb, OnClosed).Times(1); } - absl::optional handover_state = + std::optional handover_state = sut->socket.GetHandoverStateAndClose(); EXPECT_TRUE(handover_state.has_value()); g_handover_state_transformer_for_test(&*handover_state); @@ -415,7 +341,7 @@ std::unique_ptr HandoverSocket( std::vector GetReceivedMessagePpids(SocketUnderTest& z) { std::vector ppids; for (;;) { - absl::optional msg = z.cb.ConsumeReceivedMessage(); + std::optional msg = z.cb.ConsumeReceivedMessage(); if (!msg.has_value()) { break; } @@ -457,7 +383,7 @@ class DcSctpSocketParametrizedTest a.socket.Send(DcSctpMessage(StreamID(1), PPID(53), {1, 2}), kSendOptions); ExchangeMessages(a, *z); - absl::optional msg = z->cb.ConsumeReceivedMessage(); + std::optional msg = z->cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg.has_value()); EXPECT_EQ(msg->stream_id(), StreamID(1)); } @@ -613,7 +539,7 @@ TEST(DcSctpSocketTest, EstablishConnectionLostCookieAck) { EXPECT_EQ(z.socket.state(), SocketState::kConnected); // This will make A re-send the COOKIE_ECHO - AdvanceTime(a, z, DurationMs(a.options.t1_cookie_timeout)); + AdvanceTime(a, z, a.options.t1_cookie_timeout.ToTimeDelta()); // Z reads COOKIE_ECHO, produces COOKIE_ACK z.socket.ReceivePacket(a.cb.ConsumeSentPacket()); @@ -630,12 +556,10 @@ TEST(DcSctpSocketTest, ResendInitAndEstablishConnection) { a.socket.Connect(); // INIT is never received by Z. - ASSERT_HAS_VALUE_AND_ASSIGN( - SctpPacket init_packet, - SctpPacket::Parse(a.cb.ConsumeSentPacket(), z.options)); - EXPECT_EQ(init_packet.descriptors()[0].type, InitChunk::kType); + EXPECT_THAT(a.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsChunkType(InitChunk::kType)))); - AdvanceTime(a, z, a.options.t1_init_timeout); + AdvanceTime(a, z, a.options.t1_init_timeout.ToTimeDelta()); // Z reads INIT, produces INIT_ACK z.socket.ReceivePacket(a.cb.ConsumeSentPacket()); @@ -657,25 +581,22 @@ TEST(DcSctpSocketTest, ResendingInitTooManyTimesAborts) { a.socket.Connect(); // INIT is never received by Z. - ASSERT_HAS_VALUE_AND_ASSIGN( - SctpPacket init_packet, - SctpPacket::Parse(a.cb.ConsumeSentPacket(), z.options)); - EXPECT_EQ(init_packet.descriptors()[0].type, InitChunk::kType); + EXPECT_THAT(a.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsChunkType(InitChunk::kType)))); for (int i = 0; i < *a.options.max_init_retransmits; ++i) { - AdvanceTime(a, z, a.options.t1_init_timeout * (1 << i)); + AdvanceTime(a, z, a.options.t1_init_timeout.ToTimeDelta() * (1 << i)); // INIT is resent - ASSERT_HAS_VALUE_AND_ASSIGN( - SctpPacket resent_init_packet, - SctpPacket::Parse(a.cb.ConsumeSentPacket(), z.options)); - EXPECT_EQ(resent_init_packet.descriptors()[0].type, InitChunk::kType); + EXPECT_THAT(a.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsChunkType(InitChunk::kType)))); } // Another timeout, after the max init retransmits. EXPECT_CALL(a.cb, OnAborted).Times(1); - AdvanceTime( - a, z, a.options.t1_init_timeout * (1 << *a.options.max_init_retransmits)); + AdvanceTime(a, z, + a.options.t1_init_timeout.ToTimeDelta() * + (1 << *a.options.max_init_retransmits)); EXPECT_EQ(a.socket.state(), SocketState::kClosed); } @@ -692,12 +613,10 @@ TEST(DcSctpSocketTest, ResendCookieEchoAndEstablishConnection) { a.socket.ReceivePacket(z.cb.ConsumeSentPacket()); // COOKIE_ECHO is never received by Z. - ASSERT_HAS_VALUE_AND_ASSIGN( - SctpPacket init_packet, - SctpPacket::Parse(a.cb.ConsumeSentPacket(), z.options)); - EXPECT_EQ(init_packet.descriptors()[0].type, CookieEchoChunk::kType); + EXPECT_THAT(a.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsChunkType(CookieEchoChunk::kType)))); - AdvanceTime(a, z, a.options.t1_init_timeout); + AdvanceTime(a, z, a.options.t1_init_timeout.ToTimeDelta()); // Z reads COOKIE_ECHO, produces COOKIE_ACK z.socket.ReceivePacket(a.cb.ConsumeSentPacket()); @@ -720,26 +639,22 @@ TEST(DcSctpSocketTest, ResendingCookieEchoTooManyTimesAborts) { a.socket.ReceivePacket(z.cb.ConsumeSentPacket()); // COOKIE_ECHO is never received by Z. - ASSERT_HAS_VALUE_AND_ASSIGN( - SctpPacket init_packet, - SctpPacket::Parse(a.cb.ConsumeSentPacket(), z.options)); - EXPECT_EQ(init_packet.descriptors()[0].type, CookieEchoChunk::kType); + EXPECT_THAT(a.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsChunkType(CookieEchoChunk::kType)))); for (int i = 0; i < *a.options.max_init_retransmits; ++i) { - AdvanceTime(a, z, a.options.t1_cookie_timeout * (1 << i)); + AdvanceTime(a, z, a.options.t1_cookie_timeout.ToTimeDelta() * (1 << i)); // COOKIE_ECHO is resent - ASSERT_HAS_VALUE_AND_ASSIGN( - SctpPacket resent_init_packet, - SctpPacket::Parse(a.cb.ConsumeSentPacket(), z.options)); - EXPECT_EQ(resent_init_packet.descriptors()[0].type, CookieEchoChunk::kType); + EXPECT_THAT(a.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsChunkType(CookieEchoChunk::kType)))); } // Another timeout, after the max init retransmits. EXPECT_CALL(a.cb, OnAborted).Times(1); - AdvanceTime( - a, z, - a.options.t1_cookie_timeout * (1 << *a.options.max_init_retransmits)); + AdvanceTime(a, z, + a.options.t1_cookie_timeout.ToTimeDelta() * + (1 << *a.options.max_init_retransmits)); EXPECT_EQ(a.socket.state(), SocketState::kClosed); } @@ -759,12 +674,9 @@ TEST(DcSctpSocketTest, DoesntSendMorePacketsUntilCookieAckHasBeenReceived) { a.socket.ReceivePacket(z.cb.ConsumeSentPacket()); // COOKIE_ECHO is never received by Z. - ASSERT_HAS_VALUE_AND_ASSIGN( - SctpPacket cookie_echo_packet1, - SctpPacket::Parse(a.cb.ConsumeSentPacket(), z.options)); - EXPECT_THAT(cookie_echo_packet1.descriptors(), SizeIs(2)); - EXPECT_EQ(cookie_echo_packet1.descriptors()[0].type, CookieEchoChunk::kType); - EXPECT_EQ(cookie_echo_packet1.descriptors()[1].type, DataChunk::kType); + EXPECT_THAT(a.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsChunkType(CookieEchoChunk::kType), + IsDataChunk(_)))); EXPECT_THAT(a.cb.ConsumeSentPacket(), IsEmpty()); @@ -773,24 +685,23 @@ TEST(DcSctpSocketTest, DoesntSendMorePacketsUntilCookieAckHasBeenReceived) { // will be T1-COOKIE that drives retransmissions, so when the T3-RTX expires, // nothing should be retransmitted. ASSERT_TRUE(a.options.rto_initial < a.options.t1_cookie_timeout); - AdvanceTime(a, z, a.options.rto_initial); + AdvanceTime(a, z, a.options.rto_initial.ToTimeDelta()); EXPECT_THAT(a.cb.ConsumeSentPacket(), IsEmpty()); // When T1-COOKIE expires, both the COOKIE-ECHO and DATA should be present. - AdvanceTime(a, z, a.options.t1_cookie_timeout - a.options.rto_initial); + AdvanceTime(a, z, + a.options.t1_cookie_timeout.ToTimeDelta() - + a.options.rto_initial.ToTimeDelta()); // And this COOKIE-ECHO and DATA is also lost - never received by Z. - ASSERT_HAS_VALUE_AND_ASSIGN( - SctpPacket cookie_echo_packet2, - SctpPacket::Parse(a.cb.ConsumeSentPacket(), z.options)); - EXPECT_THAT(cookie_echo_packet2.descriptors(), SizeIs(2)); - EXPECT_EQ(cookie_echo_packet2.descriptors()[0].type, CookieEchoChunk::kType); - EXPECT_EQ(cookie_echo_packet2.descriptors()[1].type, DataChunk::kType); + EXPECT_THAT(a.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsChunkType(CookieEchoChunk::kType), + IsDataChunk(_)))); EXPECT_THAT(a.cb.ConsumeSentPacket(), IsEmpty()); // COOKIE_ECHO has exponential backoff. - AdvanceTime(a, z, a.options.t1_cookie_timeout * 2); + AdvanceTime(a, z, a.options.t1_cookie_timeout.ToTimeDelta() * 2); // Z reads COOKIE_ECHO, produces COOKIE_ACK z.socket.ReceivePacket(a.cb.ConsumeSentPacket()); @@ -843,25 +754,22 @@ TEST(DcSctpSocketTest, ShutdownTimerExpiresTooManyTimeClosesConnection) { EXPECT_EQ(a.socket.state(), SocketState::kShuttingDown); for (int i = 0; i < *a.options.max_retransmissions; ++i) { - AdvanceTime(a, z, DurationMs(a.options.rto_initial * (1 << i))); + AdvanceTime(a, z, a.options.rto_initial.ToTimeDelta() * (1 << i)); // Dropping every shutdown chunk. - ASSERT_HAS_VALUE_AND_ASSIGN( - SctpPacket packet, - SctpPacket::Parse(a.cb.ConsumeSentPacket(), z.options)); - EXPECT_EQ(packet.descriptors()[0].type, ShutdownChunk::kType); + EXPECT_THAT(a.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsChunkType(ShutdownChunk::kType)))); EXPECT_TRUE(a.cb.ConsumeSentPacket().empty()); } // The last expiry, makes it abort the connection. EXPECT_CALL(a.cb, OnAborted).Times(1); AdvanceTime(a, z, - a.options.rto_initial * (1 << *a.options.max_retransmissions)); + a.options.rto_initial.ToTimeDelta() * + (1 << *a.options.max_retransmissions)); EXPECT_EQ(a.socket.state(), SocketState::kClosed); - ASSERT_HAS_VALUE_AND_ASSIGN( - SctpPacket packet, - SctpPacket::Parse(a.cb.ConsumeSentPacket(), z.options)); - EXPECT_EQ(packet.descriptors()[0].type, AbortChunk::kType); + EXPECT_THAT(a.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsChunkType(AbortChunk::kType)))); EXPECT_TRUE(a.cb.ConsumeSentPacket().empty()); } @@ -885,7 +793,7 @@ TEST(DcSctpSocketTest, EstablishConnectionWhileSendingData) { EXPECT_EQ(a.socket.state(), SocketState::kConnected); EXPECT_EQ(z.socket.state(), SocketState::kConnected); - absl::optional msg = z.cb.ConsumeReceivedMessage(); + std::optional msg = z.cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg.has_value()); EXPECT_EQ(msg->stream_id(), StreamID(1)); } @@ -899,7 +807,7 @@ TEST(DcSctpSocketTest, SendMessageAfterEstablished) { a.socket.Send(DcSctpMessage(StreamID(1), PPID(53), {1, 2}), kSendOptions); z.socket.ReceivePacket(a.cb.ConsumeSentPacket()); - absl::optional msg = z.cb.ConsumeReceivedMessage(); + std::optional msg = z.cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg.has_value()); EXPECT_EQ(msg->stream_id(), StreamID(1)); } @@ -915,11 +823,11 @@ TEST_P(DcSctpSocketParametrizedTest, TimeoutResendsPacket) { a.cb.ConsumeSentPacket(); RTC_LOG(LS_INFO) << "Advancing time"; - AdvanceTime(a, *z, a.options.rto_initial); + AdvanceTime(a, *z, a.options.rto_initial.ToTimeDelta()); z->socket.ReceivePacket(a.cb.ConsumeSentPacket()); - absl::optional msg = z->cb.ConsumeReceivedMessage(); + std::optional msg = z->cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg.has_value()); EXPECT_EQ(msg->stream_id(), StreamID(1)); @@ -944,7 +852,7 @@ TEST_P(DcSctpSocketParametrizedTest, SendALotOfBytesMissedSecondPacket) { // Retransmit and handle the rest ExchangeMessages(a, *z); - absl::optional msg = z->cb.ConsumeReceivedMessage(); + std::optional msg = z->cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg.has_value()); EXPECT_EQ(msg->stream_id(), StreamID(1)); EXPECT_THAT(msg->payload(), testing::ElementsAreArray(payload)); @@ -968,15 +876,11 @@ TEST_P(DcSctpSocketParametrizedTest, SendingHeartbeatAnswersWithAck) { a.socket.ReceivePacket(b.Build()); // HEARTBEAT_ACK is sent as a reply. Capture it. - ASSERT_HAS_VALUE_AND_ASSIGN( - SctpPacket ack_packet, - SctpPacket::Parse(a.cb.ConsumeSentPacket(), z->options)); - ASSERT_THAT(ack_packet.descriptors(), SizeIs(1)); - ASSERT_HAS_VALUE_AND_ASSIGN( - HeartbeatAckChunk ack, - HeartbeatAckChunk::Parse(ack_packet.descriptors()[0].data)); - ASSERT_HAS_VALUE_AND_ASSIGN(HeartbeatInfoParameter info_param, ack.info()); - EXPECT_THAT(info_param.info(), ElementsAre(1, 2, 3, 4)); + EXPECT_THAT(a.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsHeartbeatAck( + Property(&HeartbeatAckChunk::info, + Optional(Property(&HeartbeatInfoParameter::info, + ElementsAre(1, 2, 3, 4)))))))); MaybeHandoverSocketAndSendMessage(a, std::move(z)); } @@ -990,27 +894,54 @@ TEST_P(DcSctpSocketParametrizedTest, ExpectHeartbeatToBeSent) { EXPECT_THAT(a.cb.ConsumeSentPacket(), IsEmpty()); - AdvanceTime(a, *z, a.options.heartbeat_interval); - - std::vector hb_packet_raw = a.cb.ConsumeSentPacket(); - ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket hb_packet, - SctpPacket::Parse(hb_packet_raw, z->options)); - ASSERT_THAT(hb_packet.descriptors(), SizeIs(1)); - ASSERT_HAS_VALUE_AND_ASSIGN( - HeartbeatRequestChunk hb, - HeartbeatRequestChunk::Parse(hb_packet.descriptors()[0].data)); - ASSERT_HAS_VALUE_AND_ASSIGN(HeartbeatInfoParameter info_param, hb.info()); + AdvanceTime(a, *z, a.options.heartbeat_interval.ToTimeDelta()); + std::vector packet = a.cb.ConsumeSentPacket(); // The info is a single 64-bit number. - EXPECT_THAT(hb.info()->info(), SizeIs(8)); + EXPECT_THAT( + packet, + HasChunks(ElementsAre(IsHeartbeatRequest(Property( + &HeartbeatRequestChunk::info, + Optional(Property(&HeartbeatInfoParameter::info, SizeIs(8)))))))); // Feed it to Sock-z and expect a HEARTBEAT_ACK that will be propagated back. - z->socket.ReceivePacket(hb_packet_raw); + z->socket.ReceivePacket(packet); a.socket.ReceivePacket(z->cb.ConsumeSentPacket()); MaybeHandoverSocketAndSendMessage(a, std::move(z)); } +TEST(DcSctpSocketParametrizedTest, BothSidesSendHeartbeats) { + // On an idle connection, both sides send heartbeats, and both sides acks. + + // Make them have slightly different heartbeat intervals, to validate that + // sending an ack by Z doesn't restart its heartbeat timer. + DcSctpOptions options_a = {.heartbeat_interval = DurationMs(1000)}; + SocketUnderTest a("A", options_a); + + DcSctpOptions options_z = {.heartbeat_interval = DurationMs(1100)}; + SocketUnderTest z("Z", options_z); + + ConnectSockets(a, z); + + AdvanceTime(a, z, TimeDelta::Millis(1000)); + + std::vector packet_a = a.cb.ConsumeSentPacket(); + EXPECT_THAT(packet_a, HasChunks(ElementsAre(IsHeartbeatRequest(_)))); + // Z receives heartbeat, sends ACK. + z.socket.ReceivePacket(packet_a); + a.socket.ReceivePacket(z.cb.ConsumeSentPacket()); + + // A little while later, Z should send heartbeats to A. + AdvanceTime(a, z, TimeDelta::Millis(100)); + + std::vector packet_z = z.cb.ConsumeSentPacket(); + EXPECT_THAT(packet_z, HasChunks(ElementsAre(IsHeartbeatRequest(_)))); + // A receives heartbeat, sends ACK. + a.socket.ReceivePacket(packet_z); + z.socket.ReceivePacket(a.cb.ConsumeSentPacket()); +} + TEST_P(DcSctpSocketParametrizedTest, CloseConnectionAfterTooManyLostHeartbeats) { SocketUnderTest a("A"); @@ -1028,7 +959,7 @@ TEST_P(DcSctpSocketParametrizedTest, for (int i = 0; i < *a.options.max_retransmissions; ++i) { RTC_LOG(LS_INFO) << "Letting HEARTBEAT interval timer expire - sending..."; - AdvanceTime(a, *z, time_to_next_hearbeat); + AdvanceTime(a, *z, time_to_next_hearbeat.ToTimeDelta()); // Dropping every heartbeat. ASSERT_HAS_VALUE_AND_ASSIGN( @@ -1037,20 +968,20 @@ TEST_P(DcSctpSocketParametrizedTest, EXPECT_EQ(hb_packet.descriptors()[0].type, HeartbeatRequestChunk::kType); RTC_LOG(LS_INFO) << "Letting the heartbeat expire."; - AdvanceTime(a, *z, DurationMs(1000)); + AdvanceTime(a, *z, TimeDelta::Millis(1000)); time_to_next_hearbeat = a.options.heartbeat_interval - DurationMs(1000); } RTC_LOG(LS_INFO) << "Letting HEARTBEAT interval timer expire - sending..."; - AdvanceTime(a, *z, time_to_next_hearbeat); + AdvanceTime(a, *z, time_to_next_hearbeat.ToTimeDelta()); // Last heartbeat EXPECT_THAT(a.cb.ConsumeSentPacket(), Not(IsEmpty())); EXPECT_CALL(a.cb, OnAborted).Times(1); // Should suffice as exceeding RTO - AdvanceTime(a, *z, DurationMs(1000)); + AdvanceTime(a, *z, TimeDelta::Millis(1000)); z = MaybeHandoverSocket(std::move(z)); } @@ -1067,7 +998,7 @@ TEST_P(DcSctpSocketParametrizedTest, RecoversAfterASuccessfulAck) { // Force-close socket Z so that it doesn't interfere from now on. z->socket.Close(); - DurationMs time_to_next_hearbeat = a.options.heartbeat_interval; + TimeDelta time_to_next_hearbeat = a.options.heartbeat_interval.ToTimeDelta(); for (int i = 0; i < *a.options.max_retransmissions; ++i) { AdvanceTime(a, *z, time_to_next_hearbeat); @@ -1076,9 +1007,10 @@ TEST_P(DcSctpSocketParametrizedTest, RecoversAfterASuccessfulAck) { a.cb.ConsumeSentPacket(); RTC_LOG(LS_INFO) << "Letting the heartbeat expire."; - AdvanceTime(a, *z, DurationMs(1000)); + AdvanceTime(a, *z, TimeDelta::Seconds(1)); - time_to_next_hearbeat = a.options.heartbeat_interval - DurationMs(1000); + time_to_next_hearbeat = + a.options.heartbeat_interval.ToTimeDelta() - TimeDelta::Seconds(1); } RTC_LOG(LS_INFO) << "Getting the last heartbeat - and acking it"; @@ -1098,7 +1030,7 @@ TEST_P(DcSctpSocketParametrizedTest, RecoversAfterASuccessfulAck) { // Should suffice as exceeding RTO - which will not fire. EXPECT_CALL(a.cb, OnAborted).Times(0); - AdvanceTime(a, *z, DurationMs(1000)); + AdvanceTime(a, *z, TimeDelta::Seconds(1)); EXPECT_THAT(a.cb.ConsumeSentPacket(), IsEmpty()); @@ -1106,10 +1038,8 @@ TEST_P(DcSctpSocketParametrizedTest, RecoversAfterASuccessfulAck) { RTC_LOG(LS_INFO) << "Expecting a new heartbeat"; AdvanceTime(a, *z, time_to_next_hearbeat); - ASSERT_HAS_VALUE_AND_ASSIGN( - SctpPacket another_packet, - SctpPacket::Parse(a.cb.ConsumeSentPacket(), z->options)); - EXPECT_EQ(another_packet.descriptors()[0].type, HeartbeatRequestChunk::kType); + EXPECT_THAT(a.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsHeartbeatRequest(_)))); } TEST_P(DcSctpSocketParametrizedTest, ResetStream) { @@ -1122,7 +1052,7 @@ TEST_P(DcSctpSocketParametrizedTest, ResetStream) { a.socket.Send(DcSctpMessage(StreamID(1), PPID(53), {1, 2}), {}); z->socket.ReceivePacket(a.cb.ConsumeSentPacket()); - absl::optional msg = z->cb.ConsumeReceivedMessage(); + std::optional msg = z->cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg.has_value()); EXPECT_EQ(msg->stream_id(), StreamID(1)); @@ -1144,6 +1074,38 @@ TEST_P(DcSctpSocketParametrizedTest, ResetStream) { MaybeHandoverSocketAndSendMessage(a, std::move(z)); } +TEST(DcSctpSocketTest, SendReconfigWhenStreamsReady) { + DcSctpOptions options = {.cwnd_mtus_initial = 1}; + SocketUnderTest a("A", options); + SocketUnderTest z("Z", options); + + ConnectSockets(a, z); + + // Send a message so large so that it will not be sent in full, and still + // remaining in the send queue. + a.socket.Send(DcSctpMessage(StreamID(1), PPID(51), + std::vector(options.mtu * 3)), + {.unordered = IsUnordered(false)}); + + // Reset the outgoing stream. RECONFIG can't be sent immediately as the stream + // is pending (not paused). + a.socket.ResetStreams(std::vector({StreamID(1)})); + + // This message sent directly after should be received eventually. + a.socket.Send(DcSctpMessage(StreamID(1), PPID(53), std::vector(100)), + {.unordered = IsUnordered(false)}); + + ExchangeMessagesAndAdvanceTime(a, z); + + std::optional msg1 = z.cb.ConsumeReceivedMessage(); + ASSERT_TRUE(msg1.has_value()); + EXPECT_EQ(msg1->ppid(), PPID(51)); + + std::optional msg2 = z.cb.ConsumeReceivedMessage(); + ASSERT_TRUE(msg2.has_value()); + EXPECT_EQ(msg2->ppid(), PPID(53)); +} + TEST_P(DcSctpSocketParametrizedTest, ResetStreamWillMakeChunksStartAtZeroSsn) { SocketUnderTest a("A"); auto z = std::make_unique("Z"); @@ -1157,21 +1119,25 @@ TEST_P(DcSctpSocketParametrizedTest, ResetStreamWillMakeChunksStartAtZeroSsn) { a.socket.Send(DcSctpMessage(StreamID(1), PPID(53), payload), {}); auto packet1 = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet1, HasDataChunkWithSsn(SSN(0))); + EXPECT_THAT( + packet1, + HasChunks(ElementsAre(IsDataChunk(Property(&DataChunk::ssn, SSN(0)))))); z->socket.ReceivePacket(packet1); auto packet2 = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet2, HasDataChunkWithSsn(SSN(1))); + EXPECT_THAT( + packet2, + HasChunks(ElementsAre(IsDataChunk(Property(&DataChunk::ssn, SSN(1)))))); z->socket.ReceivePacket(packet2); // Handle SACK a.socket.ReceivePacket(z->cb.ConsumeSentPacket()); - absl::optional msg1 = z->cb.ConsumeReceivedMessage(); + std::optional msg1 = z->cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg1.has_value()); EXPECT_EQ(msg1->stream_id(), StreamID(1)); - absl::optional msg2 = z->cb.ConsumeReceivedMessage(); + std::optional msg2 = z->cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg2.has_value()); EXPECT_EQ(msg2->stream_id(), StreamID(1)); @@ -1187,11 +1153,15 @@ TEST_P(DcSctpSocketParametrizedTest, ResetStreamWillMakeChunksStartAtZeroSsn) { a.socket.Send(DcSctpMessage(StreamID(1), PPID(53), payload), {}); auto packet3 = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet3, HasDataChunkWithSsn(SSN(0))); + EXPECT_THAT( + packet3, + HasChunks(ElementsAre(IsDataChunk(Property(&DataChunk::ssn, SSN(0)))))); z->socket.ReceivePacket(packet3); auto packet4 = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet4, HasDataChunkWithSsn(SSN(1))); + EXPECT_THAT( + packet4, + HasChunks(ElementsAre(IsDataChunk(Property(&DataChunk::ssn, SSN(1)))))); z->socket.ReceivePacket(packet4); // Handle SACK @@ -1215,13 +1185,15 @@ TEST_P(DcSctpSocketParametrizedTest, a.socket.Send(DcSctpMessage(StreamID(1), PPID(53), payload), {}); auto packet1 = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet1, HasDataChunkWithStreamId(StreamID(1))); - EXPECT_THAT(packet1, HasDataChunkWithSsn(SSN(0))); + EXPECT_THAT(packet1, HasChunks(ElementsAre(IsDataChunk( + AllOf(Property(&DataChunk::stream_id, StreamID(1)), + Property(&DataChunk::ssn, SSN(0))))))); z->socket.ReceivePacket(packet1); auto packet2 = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet1, HasDataChunkWithStreamId(StreamID(1))); - EXPECT_THAT(packet2, HasDataChunkWithSsn(SSN(1))); + EXPECT_THAT(packet2, HasChunks(ElementsAre(IsDataChunk( + AllOf(Property(&DataChunk::stream_id, StreamID(1)), + Property(&DataChunk::ssn, SSN(1))))))); z->socket.ReceivePacket(packet2); // Handle SACK @@ -1231,29 +1203,31 @@ TEST_P(DcSctpSocketParametrizedTest, a.socket.Send(DcSctpMessage(StreamID(3), PPID(53), payload), {}); a.socket.Send(DcSctpMessage(StreamID(3), PPID(53), payload), {}); auto packet3 = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet3, HasDataChunkWithStreamId(StreamID(3))); - EXPECT_THAT(packet3, HasDataChunkWithSsn(SSN(0))); + EXPECT_THAT(packet3, HasChunks(ElementsAre(IsDataChunk( + AllOf(Property(&DataChunk::stream_id, StreamID(3)), + Property(&DataChunk::ssn, SSN(0))))))); z->socket.ReceivePacket(packet3); auto packet4 = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet4, HasDataChunkWithStreamId(StreamID(3))); - EXPECT_THAT(packet4, HasDataChunkWithSsn(SSN(1))); + EXPECT_THAT(packet4, HasChunks(ElementsAre(IsDataChunk( + AllOf(Property(&DataChunk::stream_id, StreamID(3)), + Property(&DataChunk::ssn, SSN(1))))))); z->socket.ReceivePacket(packet4); a.socket.ReceivePacket(z->cb.ConsumeSentPacket()); // Receive all messages. - absl::optional msg1 = z->cb.ConsumeReceivedMessage(); + std::optional msg1 = z->cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg1.has_value()); EXPECT_EQ(msg1->stream_id(), StreamID(1)); - absl::optional msg2 = z->cb.ConsumeReceivedMessage(); + std::optional msg2 = z->cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg2.has_value()); EXPECT_EQ(msg2->stream_id(), StreamID(1)); - absl::optional msg3 = z->cb.ConsumeReceivedMessage(); + std::optional msg3 = z->cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg3.has_value()); EXPECT_EQ(msg3->stream_id(), StreamID(3)); - absl::optional msg4 = z->cb.ConsumeReceivedMessage(); + std::optional msg4 = z->cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg4.has_value()); EXPECT_EQ(msg4->stream_id(), StreamID(3)); @@ -1270,13 +1244,16 @@ TEST_P(DcSctpSocketParametrizedTest, a.socket.Send(DcSctpMessage(StreamID(3), PPID(53), payload), {}); auto packet5 = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet5, HasDataChunkWithStreamId(StreamID(1))); - EXPECT_THAT(packet5, HasDataChunkWithSsn(SSN(2))); // Unchanged. + EXPECT_THAT(packet5, + HasChunks(ElementsAre(IsDataChunk( + AllOf(Property(&DataChunk::stream_id, StreamID(1)), + Property(&DataChunk::ssn, SSN(2))))))); // Unchanged. z->socket.ReceivePacket(packet5); auto packet6 = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet6, HasDataChunkWithStreamId(StreamID(3))); - EXPECT_THAT(packet6, HasDataChunkWithSsn(SSN(0))); // Reset. + EXPECT_THAT(packet6, HasChunks(ElementsAre(IsDataChunk(AllOf( + Property(&DataChunk::stream_id, StreamID(3)), + Property(&DataChunk::ssn, SSN(0))))))); // Reset z->socket.ReceivePacket(packet6); // Handle SACK @@ -1309,7 +1286,7 @@ TEST_P(DcSctpSocketParametrizedTest, OnePeerReconnects) { // have the wrong verification tag, those will yield errors. ExchangeMessages(a, z2); - absl::optional msg = z2.cb.ConsumeReceivedMessage(); + std::optional msg = z2.cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg.has_value()); EXPECT_EQ(msg->stream_id(), StreamID(1)); EXPECT_THAT(msg->payload(), testing::ElementsAreArray(payload)); @@ -1340,7 +1317,7 @@ TEST_P(DcSctpSocketParametrizedTest, SendMessageWithLimitedRtx) { a.socket.ReceivePacket(z->cb.ConsumeSentPacket()); // Handle delayed SACK for third DATA - AdvanceTime(a, *z, a.options.delayed_ack_max_timeout); + AdvanceTime(a, *z, a.options.delayed_ack_max_timeout.ToTimeDelta()); // Handle SACK for second DATA a.socket.ReceivePacket(z->cb.ConsumeSentPacket()); @@ -1349,7 +1326,7 @@ TEST_P(DcSctpSocketParametrizedTest, SendMessageWithLimitedRtx) { // in-flight and the reported gap could be due to out-of-order delivery. So // the RetransmissionQueue will not mark it as "to be retransmitted" until // after the t3-rtx timer has expired. - AdvanceTime(a, *z, a.options.rto_initial); + AdvanceTime(a, *z, a.options.rto_initial.ToTimeDelta()); // The chunk will be marked as retransmitted, and then as abandoned, which // will trigger a FORWARD-TSN to be sent. @@ -1360,15 +1337,15 @@ TEST_P(DcSctpSocketParametrizedTest, SendMessageWithLimitedRtx) { // Which will trigger a SACK a.socket.ReceivePacket(z->cb.ConsumeSentPacket()); - absl::optional msg1 = z->cb.ConsumeReceivedMessage(); + std::optional msg1 = z->cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg1.has_value()); EXPECT_EQ(msg1->ppid(), PPID(51)); - absl::optional msg2 = z->cb.ConsumeReceivedMessage(); + std::optional msg2 = z->cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg2.has_value()); EXPECT_EQ(msg2->ppid(), PPID(53)); - absl::optional msg3 = z->cb.ConsumeReceivedMessage(); + std::optional msg3 = z->cb.ConsumeReceivedMessage(); EXPECT_FALSE(msg3.has_value()); MaybeHandoverSocketAndSendMessage(a, std::move(z)); @@ -1396,54 +1373,62 @@ TEST_P(DcSctpSocketParametrizedTest, SendManyFragmentedMessagesWithLimitedRtx) { // First DATA, first fragment std::vector packet = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet, HasDataChunkWithPPID(PPID(51))); + EXPECT_THAT(packet, HasChunks(ElementsAre( + IsDataChunk(Property(&DataChunk::ppid, PPID(51)))))); z->socket.ReceivePacket(std::move(packet)); // First DATA, second fragment (lost) packet = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet, HasDataChunkWithPPID(PPID(51))); + EXPECT_THAT(packet, HasChunks(ElementsAre( + IsDataChunk(Property(&DataChunk::ppid, PPID(51)))))); // Second DATA, first fragment packet = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet, HasDataChunkWithPPID(PPID(52))); + EXPECT_THAT(packet, HasChunks(ElementsAre( + IsDataChunk(Property(&DataChunk::ppid, PPID(52)))))); z->socket.ReceivePacket(std::move(packet)); // Second DATA, second fragment (lost) packet = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet, HasDataChunkWithPPID(PPID(52))); - EXPECT_THAT(packet, HasDataChunkWithSsn(SSN(0))); + EXPECT_THAT(packet, HasChunks(ElementsAre(IsDataChunk( + AllOf(Property(&DataChunk::ppid, PPID(52)), + Property(&DataChunk::ssn, SSN(0))))))); // Third DATA, first fragment packet = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet, HasDataChunkWithPPID(PPID(53))); - EXPECT_THAT(packet, HasDataChunkWithSsn(SSN(0))); + EXPECT_THAT(packet, HasChunks(ElementsAre(IsDataChunk( + AllOf(Property(&DataChunk::ppid, PPID(53)), + Property(&DataChunk::ssn, SSN(0))))))); z->socket.ReceivePacket(std::move(packet)); // Third DATA, second fragment (lost) packet = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet, HasDataChunkWithPPID(PPID(53))); - EXPECT_THAT(packet, HasDataChunkWithSsn(SSN(0))); + EXPECT_THAT(packet, HasChunks(ElementsAre(IsDataChunk( + AllOf(Property(&DataChunk::ppid, PPID(53)), + Property(&DataChunk::ssn, SSN(0))))))); // Fourth DATA, first fragment packet = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet, HasDataChunkWithPPID(PPID(54))); - EXPECT_THAT(packet, HasDataChunkWithSsn(SSN(0))); + EXPECT_THAT(packet, HasChunks(ElementsAre(IsDataChunk( + AllOf(Property(&DataChunk::ppid, PPID(54)), + Property(&DataChunk::ssn, SSN(0))))))); z->socket.ReceivePacket(std::move(packet)); // Fourth DATA, second fragment packet = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet, HasDataChunkWithPPID(PPID(54))); - EXPECT_THAT(packet, HasDataChunkWithSsn(SSN(0))); + EXPECT_THAT(packet, HasChunks(ElementsAre(IsDataChunk( + AllOf(Property(&DataChunk::ppid, PPID(54)), + Property(&DataChunk::ssn, SSN(0))))))); z->socket.ReceivePacket(std::move(packet)); ExchangeMessages(a, *z); // Let the RTX timer expire, and exchange FORWARD-TSN/SACKs - AdvanceTime(a, *z, a.options.rto_initial); + AdvanceTime(a, *z, a.options.rto_initial.ToTimeDelta()); ExchangeMessages(a, *z); - absl::optional msg1 = z->cb.ConsumeReceivedMessage(); + std::optional msg1 = z->cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg1.has_value()); EXPECT_EQ(msg1->ppid(), PPID(54)); @@ -1558,7 +1543,9 @@ TEST(DcSctpSocketTest, PassingHighWatermarkWillOnlyAcceptCumAckTsn) { // First DATA will always trigger a SACK. It's not interesting. EXPECT_THAT(z.cb.ConsumeSentPacket(), - AllOf(HasSackWithCumAckTsn(tsn), HasSackWithNoGapAckBlocks())); + HasChunks(ElementsAre(IsSack( + AllOf(Property(&SackChunk::cumulative_tsn_ack, tsn), + Property(&SackChunk::gap_ack_blocks, IsEmpty())))))); // This DATA should be accepted - it's advancing cum ack tsn. z.socket.ReceivePacket( @@ -1569,11 +1556,12 @@ TEST(DcSctpSocketTest, PassingHighWatermarkWillOnlyAcceptCumAckTsn) { .Build()); // The receiver might have moved into delayed ack mode. - AdvanceTime(a, z, z.options.rto_initial); + AdvanceTime(a, z, z.options.rto_initial.ToTimeDelta()); - EXPECT_THAT( - z.cb.ConsumeSentPacket(), - AllOf(HasSackWithCumAckTsn(AddTo(tsn, 1)), HasSackWithNoGapAckBlocks())); + EXPECT_THAT(z.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsSack( + AllOf(Property(&SackChunk::cumulative_tsn_ack, AddTo(tsn, 1)), + Property(&SackChunk::gap_ack_blocks, IsEmpty())))))); // This DATA will not be accepted - it's not advancing cum ack tsn. z.socket.ReceivePacket( @@ -1584,9 +1572,10 @@ TEST(DcSctpSocketTest, PassingHighWatermarkWillOnlyAcceptCumAckTsn) { .Build()); // Sack will be sent in IMMEDIATE mode when this is happening. - EXPECT_THAT( - z.cb.ConsumeSentPacket(), - AllOf(HasSackWithCumAckTsn(AddTo(tsn, 1)), HasSackWithNoGapAckBlocks())); + EXPECT_THAT(z.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsSack( + AllOf(Property(&SackChunk::cumulative_tsn_ack, AddTo(tsn, 1)), + Property(&SackChunk::gap_ack_blocks, IsEmpty())))))); // This DATA will not be accepted either. z.socket.ReceivePacket( @@ -1597,9 +1586,10 @@ TEST(DcSctpSocketTest, PassingHighWatermarkWillOnlyAcceptCumAckTsn) { .Build()); // Sack will be sent in IMMEDIATE mode when this is happening. - EXPECT_THAT( - z.cb.ConsumeSentPacket(), - AllOf(HasSackWithCumAckTsn(AddTo(tsn, 1)), HasSackWithNoGapAckBlocks())); + EXPECT_THAT(z.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsSack( + AllOf(Property(&SackChunk::cumulative_tsn_ack, AddTo(tsn, 1)), + Property(&SackChunk::gap_ack_blocks, IsEmpty())))))); // This DATA should be accepted, and it fills the reassembly queue. z.socket.ReceivePacket( @@ -1610,11 +1600,12 @@ TEST(DcSctpSocketTest, PassingHighWatermarkWillOnlyAcceptCumAckTsn) { .Build()); // The receiver might have moved into delayed ack mode. - AdvanceTime(a, z, z.options.rto_initial); + AdvanceTime(a, z, z.options.rto_initial.ToTimeDelta()); - EXPECT_THAT( - z.cb.ConsumeSentPacket(), - AllOf(HasSackWithCumAckTsn(AddTo(tsn, 2)), HasSackWithNoGapAckBlocks())); + EXPECT_THAT(z.cb.ConsumeSentPacket(), + HasChunks(ElementsAre(IsSack( + AllOf(Property(&SackChunk::cumulative_tsn_ack, AddTo(tsn, 2)), + Property(&SackChunk::gap_ack_blocks, IsEmpty())))))); EXPECT_CALL(z.cb, OnAborted(ErrorKind::kResourceExhaustion, _)); EXPECT_CALL(z.cb, OnClosed).Times(0); @@ -1635,6 +1626,33 @@ TEST(DcSctpSocketTest, SetMaxMessageSize) { EXPECT_EQ(a.socket.options().max_message_size, 42u); } +TEST_P(DcSctpSocketParametrizedTest, SendManyMessages) { + SocketUnderTest a("A"); + auto z = std::make_unique("Z"); + + ConnectSockets(a, *z); + z = MaybeHandoverSocket(std::move(z)); + + static constexpr int kIterations = 100; + std::vector messages; + std::vector statuses; + for (int i = 0; i < kIterations; ++i) { + messages.push_back(DcSctpMessage(StreamID(1), PPID(53), {1, 2})); + statuses.push_back(SendStatus::kSuccess); + } + EXPECT_THAT(a.socket.SendMany(messages, {}), ElementsAreArray(statuses)); + + ExchangeMessages(a, *z); + + for (int i = 0; i < kIterations; ++i) { + EXPECT_TRUE(z->cb.ConsumeReceivedMessage().has_value()); + } + + EXPECT_FALSE(z->cb.ConsumeReceivedMessage().has_value()); + + MaybeHandoverSocketAndSendMessage(a, std::move(z)); +} + TEST_P(DcSctpSocketParametrizedTest, SendsMessagesWithLowLifetime) { SocketUnderTest a("A"); auto z = std::make_unique("Z"); @@ -1643,13 +1661,13 @@ TEST_P(DcSctpSocketParametrizedTest, SendsMessagesWithLowLifetime) { z = MaybeHandoverSocket(std::move(z)); // Mock that the time always goes forward. - TimeMs now(0); - EXPECT_CALL(a.cb, TimeMillis).WillRepeatedly([&]() { - now += DurationMs(3); + Timestamp now = Timestamp::Zero(); + EXPECT_CALL(a.cb, Now).WillRepeatedly([&]() { + now += TimeDelta::Millis(3); return now; }); - EXPECT_CALL(z->cb, TimeMillis).WillRepeatedly([&]() { - now += DurationMs(3); + EXPECT_CALL(z->cb, Now).WillRepeatedly([&]() { + now += TimeDelta::Millis(3); return now; }); @@ -1673,7 +1691,7 @@ TEST_P(DcSctpSocketParametrizedTest, SendsMessagesWithLowLifetime) { EXPECT_FALSE(z->cb.ConsumeReceivedMessage().has_value()); // Validate that the sockets really make the time move forward. - EXPECT_GE(*now, kIterations * 2); + EXPECT_GE(now.ms(), kIterations * 2); MaybeHandoverSocketAndSendMessage(a, std::move(z)); } @@ -1695,13 +1713,13 @@ TEST_P(DcSctpSocketParametrizedTest, lifetime_1.lifetime = DurationMs(1); // Mock that the time always goes forward. - TimeMs now(0); - EXPECT_CALL(a.cb, TimeMillis).WillRepeatedly([&]() { - now += DurationMs(3); + Timestamp now = Timestamp::Zero(); + EXPECT_CALL(a.cb, Now).WillRepeatedly([&]() { + now += TimeDelta::Millis(3); return now; }); - EXPECT_CALL(z->cb, TimeMillis).WillRepeatedly([&]() { - now += DurationMs(3); + EXPECT_CALL(z->cb, Now).WillRepeatedly([&]() { + now += TimeDelta::Millis(3); return now; }); @@ -1740,6 +1758,37 @@ TEST_P(DcSctpSocketParametrizedTest, MaybeHandoverSocketAndSendMessage(a, std::move(z)); } +TEST(DcSctpSocketTest, RespectsPerStreamQueueLimit) { + DcSctpOptions options = {.max_send_buffer_size = 4000, + .per_stream_send_queue_limit = 1000}; + SocketUnderTest a("A", options); + EXPECT_EQ(a.socket.Send( + DcSctpMessage(StreamID(1), PPID(53), std::vector(600)), + kSendOptions), + SendStatus::kSuccess); + EXPECT_EQ(a.socket.Send( + DcSctpMessage(StreamID(1), PPID(53), std::vector(600)), + kSendOptions), + SendStatus::kSuccess); + EXPECT_EQ(a.socket.Send( + DcSctpMessage(StreamID(1), PPID(53), std::vector(600)), + kSendOptions), + SendStatus::kErrorResourceExhaustion); + // The per-stream limit for SID=1 is reached, but not SID=2. + EXPECT_EQ(a.socket.Send( + DcSctpMessage(StreamID(2), PPID(53), std::vector(600)), + kSendOptions), + SendStatus::kSuccess); + EXPECT_EQ(a.socket.Send( + DcSctpMessage(StreamID(2), PPID(53), std::vector(600)), + kSendOptions), + SendStatus::kSuccess); + EXPECT_EQ(a.socket.Send( + DcSctpMessage(StreamID(2), PPID(53), std::vector(600)), + kSendOptions), + SendStatus::kErrorResourceExhaustion); +} + TEST_P(DcSctpSocketParametrizedTest, HasReasonableBufferedAmountValues) { SocketUnderTest a("A"); auto z = std::make_unique("Z"); @@ -2025,7 +2074,7 @@ TEST(DcSctpSocketTest, RxAndTxPacketMetricsIncrease) { EXPECT_EQ(z.socket.GetMetrics()->rx_messages_count, 2u); // Delayed sack - AdvanceTime(a, z, a.options.delayed_ack_max_timeout); + AdvanceTime(a, z, a.options.delayed_ack_max_timeout.ToTimeDelta()); a.socket.ReceivePacket(z.cb.ConsumeSentPacket()); // SACK EXPECT_EQ(a.socket.GetMetrics()->unack_data_count, 0u); @@ -2062,7 +2111,7 @@ TEST(DcSctpSocketTest, RetransmissionMetricsAreSetForNormalRetransmit) { a.socket.Send(DcSctpMessage(StreamID(1), PPID(53), payload), kSendOptions); a.cb.ConsumeSentPacket(); - AdvanceTime(a, z, a.options.rto_initial); + AdvanceTime(a, z, a.options.rto_initial.ToTimeDelta()); ExchangeMessages(a, z); EXPECT_EQ(a.socket.GetMetrics()->rtx_packets_count, 1u); @@ -2125,55 +2174,6 @@ TEST_P(DcSctpSocketParametrizedTest, DoesntSendMoreThanMaxBurstPackets) { MaybeHandoverSocketAndSendMessage(a, std::move(z)); } -TEST_P(DcSctpSocketParametrizedTest, SendsOnlyLargePackets) { - SocketUnderTest a("A"); - auto z = std::make_unique("Z"); - - ConnectSockets(a, *z); - z = MaybeHandoverSocket(std::move(z)); - - // A really large message, to ensure that the congestion window is often full. - constexpr size_t kMessageSize = 100000; - a.socket.Send( - DcSctpMessage(StreamID(1), PPID(53), std::vector(kMessageSize)), - kSendOptions); - - bool delivered_packet = false; - std::vector data_packet_sizes; - do { - delivered_packet = false; - std::vector packet_from_a = a.cb.ConsumeSentPacket(); - if (!packet_from_a.empty()) { - data_packet_sizes.push_back(packet_from_a.size()); - delivered_packet = true; - z->socket.ReceivePacket(std::move(packet_from_a)); - } - std::vector packet_from_z = z->cb.ConsumeSentPacket(); - if (!packet_from_z.empty()) { - delivered_packet = true; - a.socket.ReceivePacket(std::move(packet_from_z)); - } - } while (delivered_packet); - - size_t packet_payload_bytes = - a.options.mtu - SctpPacket::kHeaderSize - DataChunk::kHeaderSize; - // +1 accounts for padding, and rounding up. - size_t expected_packets = - (kMessageSize + packet_payload_bytes - 1) / packet_payload_bytes + 1; - EXPECT_THAT(data_packet_sizes, SizeIs(expected_packets)); - - // Remove the last size - it will be the remainder. But all other sizes should - // be large. - data_packet_sizes.pop_back(); - - for (size_t size : data_packet_sizes) { - // The 4 is for padding/alignment. - EXPECT_GE(size, a.options.mtu - 4); - } - - MaybeHandoverSocketAndSendMessage(a, std::move(z)); -} - TEST(DcSctpSocketTest, SendMessagesAfterHandover) { SocketUnderTest a("A"); auto z = std::make_unique("Z"); @@ -2187,7 +2187,7 @@ TEST(DcSctpSocketTest, SendMessagesAfterHandover) { z = HandoverSocket(std::move(z)); - absl::optional msg; + std::optional msg; RTC_LOG(LS_INFO) << "Sending A #1"; @@ -2266,7 +2266,7 @@ TEST_P(DcSctpSocketParametrizedTest, CanLoseFirstOrderedMessage) { // First DATA is lost, and retransmission timer will delete it. a.cb.ConsumeSentPacket(); - AdvanceTime(a, *z, a.options.rto_initial); + AdvanceTime(a, *z, a.options.rto_initial.ToTimeDelta()); ExchangeMessages(a, *z); // Send a second message (SID=0, SSN=1). @@ -2274,7 +2274,7 @@ TEST_P(DcSctpSocketParametrizedTest, CanLoseFirstOrderedMessage) { ExchangeMessages(a, *z); // The Z socket should receive the second message, but not the first. - absl::optional msg = z->cb.ConsumeReceivedMessage(); + std::optional msg = z->cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg.has_value()); EXPECT_EQ(msg->ppid(), PPID(52)); @@ -2413,13 +2413,13 @@ TEST(DcSctpSocketTest, CloseStreamsWithPendingRequest) { ExchangeMessages(a, z); // Receive these messages - absl::optional msg1 = z.cb.ConsumeReceivedMessage(); + std::optional msg1 = z.cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg1.has_value()); EXPECT_EQ(msg1->stream_id(), StreamID(1)); - absl::optional msg2 = z.cb.ConsumeReceivedMessage(); + std::optional msg2 = z.cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg2.has_value()); EXPECT_EQ(msg2->stream_id(), StreamID(2)); - absl::optional msg3 = z.cb.ConsumeReceivedMessage(); + std::optional msg3 = z.cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg3.has_value()); EXPECT_EQ(msg3->stream_id(), StreamID(3)); @@ -2427,7 +2427,10 @@ TEST(DcSctpSocketTest, CloseStreamsWithPendingRequest) { a.socket.ResetStreams(std::vector({StreamID(1)})); std::vector packet = a.cb.ConsumeSentPacket(); - EXPECT_THAT(packet, HasReconfigWithStreams(ElementsAre(StreamID(1)))); + EXPECT_THAT(packet, HasChunks(ElementsAre(IsReConfig(HasParameters( + ElementsAre(IsOutgoingResetRequest(Property( + &OutgoingSSNResetRequestParameter::stream_ids, + ElementsAre(StreamID(1)))))))))); z.socket.ReceivePacket(std::move(packet)); // Sending more reset requests while this one is ongoing. @@ -2445,13 +2448,13 @@ TEST(DcSctpSocketTest, CloseStreamsWithPendingRequest) { ExchangeMessages(a, z); // Receive these messages - absl::optional msg4 = z.cb.ConsumeReceivedMessage(); + std::optional msg4 = z.cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg4.has_value()); EXPECT_EQ(msg4->stream_id(), StreamID(1)); - absl::optional msg5 = z.cb.ConsumeReceivedMessage(); + std::optional msg5 = z.cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg5.has_value()); EXPECT_EQ(msg5->stream_id(), StreamID(2)); - absl::optional msg6 = z.cb.ConsumeReceivedMessage(); + std::optional msg6 = z.cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg6.has_value()); EXPECT_EQ(msg6->stream_id(), StreamID(3)); } @@ -2557,7 +2560,7 @@ TEST(DcSctpSocketTest, SmallSentMessagesWithPrioWillArriveInSpecificOrder) { std::vector received_ppids; for (;;) { - absl::optional msg = z.cb.ConsumeReceivedMessage(); + std::optional msg = z.cb.ConsumeReceivedMessage(); if (!msg.has_value()) { break; } @@ -2652,7 +2655,7 @@ TEST(DcSctpSocketTest, LifecycleEventsAreGeneratedForAckedMessages) { EXPECT_CALL(a.cb, OnLifecycleEnd(LifecycleId(42))); ExchangeMessages(a, z); // In case of delayed ack. - AdvanceTime(a, z, a.options.delayed_ack_max_timeout); + AdvanceTime(a, z, a.options.delayed_ack_max_timeout.ToTimeDelta()); ExchangeMessages(a, z); EXPECT_THAT(GetReceivedMessagePpids(z), ElementsAre(101, 102, 103)); @@ -2695,15 +2698,15 @@ TEST(DcSctpSocketTest, LifecycleEventsForFailMaxRetransmissions) { ExchangeMessages(a, z); // Handle delayed SACK. - AdvanceTime(a, z, a.options.delayed_ack_max_timeout); + AdvanceTime(a, z, a.options.delayed_ack_max_timeout.ToTimeDelta()); ExchangeMessages(a, z); // The chunk is now NACKed. Let the RTO expire, to discard the message. - AdvanceTime(a, z, a.options.rto_initial); + AdvanceTime(a, z, a.options.rto_initial.ToTimeDelta()); ExchangeMessages(a, z); // Handle delayed SACK. - AdvanceTime(a, z, a.options.delayed_ack_max_timeout); + AdvanceTime(a, z, a.options.delayed_ack_max_timeout.ToTimeDelta()); ExchangeMessages(a, z); EXPECT_THAT(GetReceivedMessagePpids(z), ElementsAre(51, 53)); @@ -2750,7 +2753,7 @@ TEST(DcSctpSocketTest, LifecycleEventsForExpiredMessageWithLifetimeLimit) { .lifecycle_id = LifecycleId(1), }); - AdvanceTime(a, z, DurationMs(200)); + AdvanceTime(a, z, TimeDelta::Millis(200)); EXPECT_CALL(a.cb, OnLifecycleMessageExpired(LifecycleId(1), /*maybe_delivered=*/false)); @@ -2809,10 +2812,19 @@ TEST(DcSctpSocketTest, ResetStreamsDeferred) { auto data3 = a.cb.ConsumeSentPacket(); auto reconfig = a.cb.ConsumeSentPacket(); - EXPECT_THAT(data1, HasDataChunkWithSsn(SSN(0))); - EXPECT_THAT(data2, HasDataChunkWithSsn(SSN(0))); - EXPECT_THAT(data3, HasDataChunkWithSsn(SSN(1))); - EXPECT_THAT(reconfig, HasReconfigWithStreams(ElementsAre(StreamID(1)))); + EXPECT_THAT( + data1, + HasChunks(ElementsAre(IsDataChunk(Property(&DataChunk::ssn, SSN(0)))))); + EXPECT_THAT( + data2, + HasChunks(ElementsAre(IsDataChunk(Property(&DataChunk::ssn, SSN(0)))))); + EXPECT_THAT( + data3, + HasChunks(ElementsAre(IsDataChunk(Property(&DataChunk::ssn, SSN(1)))))); + EXPECT_THAT(reconfig, HasChunks(ElementsAre(IsReConfig(HasParameters( + ElementsAre(IsOutgoingResetRequest(Property( + &OutgoingSSNResetRequestParameter::stream_ids, + ElementsAre(StreamID(1)))))))))); // Receive them slightly out of order to make stream resetting deferred. z.socket.ReceivePacket(reconfig); @@ -2821,13 +2833,13 @@ TEST(DcSctpSocketTest, ResetStreamsDeferred) { z.socket.ReceivePacket(data2); z.socket.ReceivePacket(data3); - absl::optional msg1 = z.cb.ConsumeReceivedMessage(); + std::optional msg1 = z.cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg1.has_value()); EXPECT_EQ(msg1->stream_id(), StreamID(1)); EXPECT_EQ(msg1->ppid(), PPID(53)); EXPECT_EQ(msg1->payload().size(), kTwoFragmentsSize); - absl::optional msg2 = z.cb.ConsumeReceivedMessage(); + std::optional msg2 = z.cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg2.has_value()); EXPECT_EQ(msg2->stream_id(), StreamID(1)); EXPECT_EQ(msg2->ppid(), PPID(54)); @@ -2838,23 +2850,37 @@ TEST(DcSctpSocketTest, ResetStreamsDeferred) { // Z sent "in progress", which will make A buffer packets until it's sure // that the reconfiguration has been applied. A will retry - wait for that. - AdvanceTime(a, z, a.options.rto_initial); + AdvanceTime(a, z, a.options.rto_initial.ToTimeDelta()); auto reconfig2 = a.cb.ConsumeSentPacket(); - EXPECT_THAT(reconfig2, HasReconfigWithStreams(ElementsAre(StreamID(1)))); + EXPECT_THAT(reconfig2, HasChunks(ElementsAre(IsReConfig(HasParameters( + ElementsAre(IsOutgoingResetRequest(Property( + &OutgoingSSNResetRequestParameter::stream_ids, + ElementsAre(StreamID(1)))))))))); EXPECT_CALL(z.cb, OnIncomingStreamsReset(ElementsAre(StreamID(1)))); z.socket.ReceivePacket(reconfig2); auto reconfig3 = z.cb.ConsumeSentPacket(); - EXPECT_THAT(reconfig3, - HasReconfigWithResponse( - ReconfigurationResponseParameter::Result::kSuccessPerformed)); + EXPECT_THAT(reconfig3, HasChunks(ElementsAre(IsReConfig(HasParameters( + ElementsAre(IsReconfigurationResponse(Property( + &ReconfigurationResponseParameter::result, + ReconfigurationResponseParameter::Result:: + kSuccessPerformed)))))))); a.socket.ReceivePacket(reconfig3); - EXPECT_THAT(data1, HasDataChunkWithSsn(SSN(0))); - EXPECT_THAT(data2, HasDataChunkWithSsn(SSN(0))); - EXPECT_THAT(data3, HasDataChunkWithSsn(SSN(1))); - EXPECT_THAT(reconfig, HasReconfigWithStreams(ElementsAre(StreamID(1)))); + EXPECT_THAT( + data1, + HasChunks(ElementsAre(IsDataChunk(Property(&DataChunk::ssn, SSN(0)))))); + EXPECT_THAT( + data2, + HasChunks(ElementsAre(IsDataChunk(Property(&DataChunk::ssn, SSN(0)))))); + EXPECT_THAT( + data3, + HasChunks(ElementsAre(IsDataChunk(Property(&DataChunk::ssn, SSN(1)))))); + EXPECT_THAT(reconfig, HasChunks(ElementsAre(IsReConfig(HasParameters( + ElementsAre(IsOutgoingResetRequest(Property( + &OutgoingSSNResetRequestParameter::stream_ids, + ElementsAre(StreamID(1)))))))))); // Send a new message after the stream has been reset. a.socket.Send(DcSctpMessage(StreamID(1), PPID(55), @@ -2862,7 +2888,7 @@ TEST(DcSctpSocketTest, ResetStreamsDeferred) { {}); ExchangeMessages(a, z); - absl::optional msg3 = z.cb.ConsumeReceivedMessage(); + std::optional msg3 = z.cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg3.has_value()); EXPECT_EQ(msg3->stream_id(), StreamID(1)); EXPECT_EQ(msg3->ppid(), PPID(55)); @@ -2890,18 +2916,475 @@ TEST(DcSctpSocketTest, ResetStreamsWithPausedSenderResumesWhenPerformed) { EXPECT_CALL(z.cb, OnIncomingStreamsReset(ElementsAre(StreamID(1)))); ExchangeMessages(a, z); - absl::optional msg1 = z.cb.ConsumeReceivedMessage(); + std::optional msg1 = z.cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg1.has_value()); EXPECT_EQ(msg1->stream_id(), StreamID(1)); EXPECT_EQ(msg1->ppid(), PPID(51)); EXPECT_EQ(msg1->payload().size(), kSmallMessageSize); - absl::optional msg2 = z.cb.ConsumeReceivedMessage(); + std::optional msg2 = z.cb.ConsumeReceivedMessage(); ASSERT_TRUE(msg2.has_value()); EXPECT_EQ(msg2->stream_id(), StreamID(1)); EXPECT_EQ(msg2->ppid(), PPID(52)); EXPECT_EQ(msg2->payload().size(), kSmallMessageSize); } +TEST_P(DcSctpSocketParametrizedTest, ZeroChecksumMetricsAreSet) { + std::vector> combinations = { + {false, false}, {false, true}, {true, false}, {true, true}}; + for (const auto& [a_enable, z_enable] : combinations) { + DcSctpOptions a_options = { + .zero_checksum_alternate_error_detection_method = + a_enable + ? ZeroChecksumAlternateErrorDetectionMethod::LowerLayerDtls() + : ZeroChecksumAlternateErrorDetectionMethod::None()}; + DcSctpOptions z_options = { + .zero_checksum_alternate_error_detection_method = + z_enable + ? ZeroChecksumAlternateErrorDetectionMethod::LowerLayerDtls() + : ZeroChecksumAlternateErrorDetectionMethod::None()}; + + SocketUnderTest a("A", a_options); + auto z = std::make_unique("Z", z_options); + + ConnectSockets(a, *z); + z = MaybeHandoverSocket(std::move(z)); + + EXPECT_EQ(a.socket.GetMetrics()->uses_zero_checksum, a_enable && z_enable); + EXPECT_EQ(z->socket.GetMetrics()->uses_zero_checksum, a_enable && z_enable); + } +} + +TEST(DcSctpSocketTest, AlwaysSendsInitWithNonZeroChecksum) { + DcSctpOptions options = { + .zero_checksum_alternate_error_detection_method = + ZeroChecksumAlternateErrorDetectionMethod::LowerLayerDtls()}; + SocketUnderTest a("A", options); + + a.socket.Connect(); + std::vector data = a.cb.ConsumeSentPacket(); + ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket packet, + SctpPacket::Parse(data, options)); + EXPECT_THAT(packet.descriptors(), + ElementsAre(testing::Field(&SctpPacket::ChunkDescriptor::type, + InitChunk::kType))); + EXPECT_THAT(packet.common_header().checksum, Not(Eq(0u))); +} + +TEST(DcSctpSocketTest, MaySendInitAckWithZeroChecksum) { + DcSctpOptions options = { + .zero_checksum_alternate_error_detection_method = + ZeroChecksumAlternateErrorDetectionMethod::LowerLayerDtls()}; + SocketUnderTest a("A", options); + SocketUnderTest z("Z", options); + + a.socket.Connect(); + z.socket.ReceivePacket(a.cb.ConsumeSentPacket()); // INIT + + std::vector data = z.cb.ConsumeSentPacket(); + ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket packet, + SctpPacket::Parse(data, options)); + EXPECT_THAT(packet.descriptors(), + ElementsAre(testing::Field(&SctpPacket::ChunkDescriptor::type, + InitAckChunk::kType))); + EXPECT_THAT(packet.common_header().checksum, 0u); +} + +TEST(DcSctpSocketTest, AlwaysSendsCookieEchoWithNonZeroChecksum) { + DcSctpOptions options = { + .zero_checksum_alternate_error_detection_method = + ZeroChecksumAlternateErrorDetectionMethod::LowerLayerDtls()}; + SocketUnderTest a("A", options); + SocketUnderTest z("Z", options); + + a.socket.Connect(); + z.socket.ReceivePacket(a.cb.ConsumeSentPacket()); // INIT + a.socket.ReceivePacket(z.cb.ConsumeSentPacket()); // INIT-ACK + + std::vector data = a.cb.ConsumeSentPacket(); + ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket packet, + SctpPacket::Parse(data, options)); + EXPECT_THAT(packet.descriptors(), + ElementsAre(testing::Field(&SctpPacket::ChunkDescriptor::type, + CookieEchoChunk::kType))); + EXPECT_THAT(packet.common_header().checksum, Not(Eq(0u))); +} + +TEST(DcSctpSocketTest, SendsCookieAckWithZeroChecksum) { + DcSctpOptions options = { + .zero_checksum_alternate_error_detection_method = + ZeroChecksumAlternateErrorDetectionMethod::LowerLayerDtls()}; + SocketUnderTest a("A", options); + SocketUnderTest z("Z", options); + + a.socket.Connect(); + z.socket.ReceivePacket(a.cb.ConsumeSentPacket()); // INIT + a.socket.ReceivePacket(z.cb.ConsumeSentPacket()); // INIT-ACK + z.socket.ReceivePacket(a.cb.ConsumeSentPacket()); // COOKIE-ECHO + + std::vector data = z.cb.ConsumeSentPacket(); + ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket packet, + SctpPacket::Parse(data, options)); + EXPECT_THAT(packet.descriptors(), + ElementsAre(testing::Field(&SctpPacket::ChunkDescriptor::type, + CookieAckChunk::kType))); + EXPECT_THAT(packet.common_header().checksum, 0u); +} + +TEST_P(DcSctpSocketParametrizedTest, SendsDataWithZeroChecksum) { + DcSctpOptions options = { + .zero_checksum_alternate_error_detection_method = + ZeroChecksumAlternateErrorDetectionMethod::LowerLayerDtls()}; + SocketUnderTest a("A", options); + auto z = std::make_unique("Z", options); + + ConnectSockets(a, *z); + z = MaybeHandoverSocket(std::move(z)); + + std::vector payload(a.options.mtu - 100); + a.socket.Send(DcSctpMessage(StreamID(1), PPID(53), payload), {}); + + std::vector data = a.cb.ConsumeSentPacket(); + z->socket.ReceivePacket(data); + ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket packet, + SctpPacket::Parse(data, options)); + EXPECT_THAT(packet.descriptors(), + ElementsAre(testing::Field(&SctpPacket::ChunkDescriptor::type, + DataChunk::kType))); + EXPECT_THAT(packet.common_header().checksum, 0u); + + MaybeHandoverSocketAndSendMessage(a, std::move(z)); +} + +TEST_P(DcSctpSocketParametrizedTest, AllPacketsAfterConnectHaveZeroChecksum) { + DcSctpOptions options = { + .zero_checksum_alternate_error_detection_method = + ZeroChecksumAlternateErrorDetectionMethod::LowerLayerDtls()}; + SocketUnderTest a("A", options); + auto z = std::make_unique("Z", options); + + ConnectSockets(a, *z); + z = MaybeHandoverSocket(std::move(z)); + + // Send large messages in both directions, and verify that they arrive and + // that every packet has zero checksum. + std::vector payload(kLargeMessageSize); + a.socket.Send(DcSctpMessage(StreamID(1), PPID(53), payload), kSendOptions); + z->socket.Send(DcSctpMessage(StreamID(1), PPID(53), payload), kSendOptions); + + for (;;) { + std::vector data; + if (data = a.cb.ConsumeSentPacket(); !data.empty()) { + ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket packet, + SctpPacket::Parse(data, options)); + EXPECT_THAT(packet.common_header().checksum, 0u); + z->socket.ReceivePacket(std::move(data)); + + } else if (data = z->cb.ConsumeSentPacket(); !data.empty()) { + ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket packet, + SctpPacket::Parse(data, options)); + EXPECT_THAT(packet.common_header().checksum, 0u); + a.socket.ReceivePacket(std::move(data)); + + } else { + break; + } + } + + std::optional msg1 = z->cb.ConsumeReceivedMessage(); + ASSERT_TRUE(msg1.has_value()); + EXPECT_THAT(msg1->payload(), SizeIs(kLargeMessageSize)); + + std::optional msg2 = a.cb.ConsumeReceivedMessage(); + ASSERT_TRUE(msg2.has_value()); + EXPECT_THAT(msg2->payload(), SizeIs(kLargeMessageSize)); + + MaybeHandoverSocketAndSendMessage(a, std::move(z)); +} + +TEST(DcSctpSocketTest, HandlesForwardTsnOutOfOrderWithStreamResetting) { + // This test ensures that receiving FORWARD-TSN and RECONFIG out of order is + // handled correctly. + SocketUnderTest a("A", {.heartbeat_interval = DurationMs(0)}); + SocketUnderTest z("Z", {.heartbeat_interval = DurationMs(0)}); + + ConnectSockets(a, z); + std::vector payload(kSmallMessageSize); + a.socket.Send(DcSctpMessage(StreamID(1), PPID(51), payload), + { + .max_retransmissions = 0, + }); + + // Packet is lost. + EXPECT_THAT(a.cb.ConsumeSentPacket(), + HasChunks(ElementsAre( + IsDataChunk(AllOf(Property(&DataChunk::ssn, SSN(0)), + Property(&DataChunk::ppid, PPID(51))))))); + AdvanceTime(a, z, a.options.rto_initial.ToTimeDelta()); + + auto fwd_tsn_packet = a.cb.ConsumeSentPacket(); + EXPECT_THAT(fwd_tsn_packet, + HasChunks(ElementsAre(IsChunkType(ForwardTsnChunk::kType)))); + // Reset stream 1 + a.socket.ResetStreams(std::vector({StreamID(1)})); + auto reconfig_packet = a.cb.ConsumeSentPacket(); + EXPECT_THAT(reconfig_packet, + HasChunks(ElementsAre(IsChunkType(ReConfigChunk::kType)))); + + // These two packets are received in the wrong order. + z.socket.ReceivePacket(reconfig_packet); + z.socket.ReceivePacket(fwd_tsn_packet); + ExchangeMessagesAndAdvanceTime(a, z); + + a.socket.Send(DcSctpMessage(StreamID(1), PPID(52), payload), {}); + a.socket.Send(DcSctpMessage(StreamID(1), PPID(53), payload), {}); + + auto data_packet_2 = a.cb.ConsumeSentPacket(); + auto data_packet_3 = a.cb.ConsumeSentPacket(); + EXPECT_THAT(data_packet_2, HasChunks(ElementsAre(IsDataChunk(AllOf( + Property(&DataChunk::ssn, SSN(0)), + Property(&DataChunk::ppid, PPID(52))))))); + EXPECT_THAT(data_packet_3, HasChunks(ElementsAre(IsDataChunk(AllOf( + Property(&DataChunk::ssn, SSN(1)), + Property(&DataChunk::ppid, PPID(53))))))); + + z.socket.ReceivePacket(data_packet_2); + z.socket.ReceivePacket(data_packet_3); + ASSERT_THAT(z.cb.ConsumeReceivedMessage(), + testing::Optional(Property(&DcSctpMessage::ppid, PPID(52)))); + ASSERT_THAT(z.cb.ConsumeReceivedMessage(), + testing::Optional(Property(&DcSctpMessage::ppid, PPID(53)))); +} + +TEST(DcSctpSocketTest, ResentInitHasSameParameters) { + // If an INIT chunk has to be resent (due to INIT_ACK not received in time), + // the resent INIT must have the same properties as the original one. + SocketUnderTest a("A"); + SocketUnderTest z("Z"); + + a.socket.Connect(); + auto packet_1 = a.cb.ConsumeSentPacket(); + + // Times out, INIT is re-sent. + AdvanceTime(a, z, a.options.t1_init_timeout.ToTimeDelta()); + auto packet_2 = a.cb.ConsumeSentPacket(); + + ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket init_packet_1, + SctpPacket::Parse(packet_1, z.options)); + ASSERT_HAS_VALUE_AND_ASSIGN( + InitChunk init_chunk_1, + InitChunk::Parse(init_packet_1.descriptors()[0].data)); + + ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket init_packet_2, + SctpPacket::Parse(packet_2, z.options)); + ASSERT_HAS_VALUE_AND_ASSIGN( + InitChunk init_chunk_2, + InitChunk::Parse(init_packet_2.descriptors()[0].data)); + + EXPECT_EQ(init_chunk_1.initial_tsn(), init_chunk_2.initial_tsn()); + EXPECT_EQ(init_chunk_1.initiate_tag(), init_chunk_2.initiate_tag()); +} + +TEST(DcSctpSocketTest, ResentInitAckHasDifferentParameters) { + // For every INIT, an INIT_ACK is produced. Verify that the socket doesn't + // maintain any state by ensuring that two created INIT_ACKs for the same + // received INIT are different. + SocketUnderTest a("A"); + SocketUnderTest z("Z"); + + a.socket.Connect(); + auto packet_1 = a.cb.ConsumeSentPacket(); + EXPECT_THAT(packet_1, HasChunks(ElementsAre(IsChunkType(InitChunk::kType)))); + + z.socket.ReceivePacket(packet_1); + auto packet_2 = z.cb.ConsumeSentPacket(); + z.socket.ReceivePacket(packet_1); + auto packet_3 = z.cb.ConsumeSentPacket(); + + EXPECT_THAT(packet_2, + HasChunks(ElementsAre(IsChunkType(InitAckChunk::kType)))); + EXPECT_THAT(packet_3, + HasChunks(ElementsAre(IsChunkType(InitAckChunk::kType)))); + + ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket init_ack_packet_1, + SctpPacket::Parse(packet_2, z.options)); + ASSERT_HAS_VALUE_AND_ASSIGN( + InitAckChunk init_ack_chunk_1, + InitAckChunk::Parse(init_ack_packet_1.descriptors()[0].data)); + + ASSERT_HAS_VALUE_AND_ASSIGN(SctpPacket init_ack_packet_2, + SctpPacket::Parse(packet_3, z.options)); + ASSERT_HAS_VALUE_AND_ASSIGN( + InitAckChunk init_ack_chunk_2, + InitAckChunk::Parse(init_ack_packet_2.descriptors()[0].data)); + + EXPECT_NE(init_ack_chunk_1.initiate_tag(), init_ack_chunk_2.initiate_tag()); + EXPECT_NE(init_ack_chunk_1.initial_tsn(), init_ack_chunk_2.initial_tsn()); +} + +TEST(DcSctpSocketTest, ConnectionCanContinueFromFirstInitAck) { + // If an INIT chunk has to be resent (due to INIT_ACK not received in time), + // another INIT will be sent, and if both INITs were actually received, both + // will be responded to by an INIT_ACK. While these two INIT_ACKs may have + // different parameters, the connection must be able to finish with the cookie + // (as replied to using COOKIE_ECHO) from either INIT_ACK. + SocketUnderTest a("A"); + SocketUnderTest z("Z"); + + a.socket.Send(DcSctpMessage(StreamID(1), PPID(53), + std::vector(kLargeMessageSize)), + kSendOptions); + a.socket.Connect(); + auto init_1 = a.cb.ConsumeSentPacket(); + + // Times out, INIT is re-sent. + AdvanceTime(a, z, a.options.t1_init_timeout.ToTimeDelta()); + auto init_2 = a.cb.ConsumeSentPacket(); + + EXPECT_THAT(init_1, HasChunks(ElementsAre(IsChunkType(InitChunk::kType)))); + EXPECT_THAT(init_2, HasChunks(ElementsAre(IsChunkType(InitChunk::kType)))); + + z.socket.ReceivePacket(init_1); + z.socket.ReceivePacket(init_2); + auto init_ack_1 = z.cb.ConsumeSentPacket(); + auto init_ack_2 = z.cb.ConsumeSentPacket(); + EXPECT_THAT(init_ack_1, + HasChunks(ElementsAre(IsChunkType(InitAckChunk::kType)))); + EXPECT_THAT(init_ack_2, + HasChunks(ElementsAre(IsChunkType(InitAckChunk::kType)))); + + a.socket.ReceivePacket(init_ack_1); + // Then let the rest continue. + ExchangeMessages(a, z); + + std::optional msg = z.cb.ConsumeReceivedMessage(); + ASSERT_TRUE(msg.has_value()); + EXPECT_EQ(msg->stream_id(), StreamID(1)); + EXPECT_THAT(msg->payload(), SizeIs(kLargeMessageSize)); +} + +TEST(DcSctpSocketTest, ConnectionCanContinueFromSecondInitAck) { + // Just as above, but discarding the first INIT_ACK. + SocketUnderTest a("A"); + SocketUnderTest z("Z"); + + a.socket.Send(DcSctpMessage(StreamID(1), PPID(53), + std::vector(kLargeMessageSize)), + kSendOptions); + a.socket.Connect(); + auto init_1 = a.cb.ConsumeSentPacket(); + + // Times out, INIT is re-sent. + AdvanceTime(a, z, a.options.t1_init_timeout.ToTimeDelta()); + auto init_2 = a.cb.ConsumeSentPacket(); + + EXPECT_THAT(init_1, HasChunks(ElementsAre(IsChunkType(InitChunk::kType)))); + EXPECT_THAT(init_2, HasChunks(ElementsAre(IsChunkType(InitChunk::kType)))); + + z.socket.ReceivePacket(init_1); + z.socket.ReceivePacket(init_2); + auto init_ack_1 = z.cb.ConsumeSentPacket(); + auto init_ack_2 = z.cb.ConsumeSentPacket(); + EXPECT_THAT(init_ack_1, + HasChunks(ElementsAre(IsChunkType(InitAckChunk::kType)))); + EXPECT_THAT(init_ack_2, + HasChunks(ElementsAre(IsChunkType(InitAckChunk::kType)))); + + a.socket.ReceivePacket(init_ack_2); + // Then let the rest continue. + ExchangeMessages(a, z); + + std::optional msg = z.cb.ConsumeReceivedMessage(); + ASSERT_TRUE(msg.has_value()); + EXPECT_EQ(msg->stream_id(), StreamID(1)); + EXPECT_THAT(msg->payload(), SizeIs(kLargeMessageSize)); +} + +TEST_P(DcSctpSocketParametrizedTest, LowCongestionWindowSetsIsackBit) { + // This test verifies the option `immediate_sack_under_cwnd_mtus`. + DcSctpOptions options = {.cwnd_mtus_initial = 4, + .immediate_sack_under_cwnd_mtus = 2}; + SocketUnderTest a("A", options); + SocketUnderTest z("Z"); + + ConnectSockets(a, z); + + EXPECT_EQ(a.socket.GetMetrics()->cwnd_bytes, + options.cwnd_mtus_initial * options.mtu); + + a.socket.Send(DcSctpMessage(StreamID(1), PPID(51), std::vector(1)), + SendOptions()); + + // Drop the first packet, and let T3-rtx fire, which lowers cwnd. + auto packet1 = a.cb.ConsumeSentPacket(); + EXPECT_THAT(packet1, + HasChunks(ElementsAre(IsDataChunk(AllOf( + Property(&DataChunk::stream_id, StreamID(1)), + Property(&DataChunk::options, + Field(&AnyDataChunk::Options::immediate_ack, + AnyDataChunk::ImmediateAckFlag(false)))))))); + + AdvanceTime(a, z, a.options.rto_initial.ToTimeDelta()); + EXPECT_EQ(a.socket.GetMetrics()->cwnd_bytes, 1 * options.mtu); + + // Observe that the retransmission will have the I-SACK bit set. + auto packet2 = a.cb.ConsumeSentPacket(); + z.socket.ReceivePacket(packet2); + EXPECT_THAT(packet2, + HasChunks(ElementsAre(IsDataChunk(AllOf( + Property(&DataChunk::stream_id, StreamID(1)), + Property(&DataChunk::options, + Field(&AnyDataChunk::Options::immediate_ack, + AnyDataChunk::ImmediateAckFlag(true)))))))); + + // The receiver immediately SACKS. It would even without this bit set. + auto packet3 = z.cb.ConsumeSentPacket(); + a.socket.ReceivePacket(packet3); + EXPECT_THAT(packet3, HasChunks(ElementsAre(IsChunkType(SackChunk::kType)))); + + // Next sent chunk will also have the i-sack set, as cwnd is low. + a.socket.Send(DcSctpMessage(StreamID(1), PPID(53), + std::vector(kLargeMessageSize)), + kSendOptions); + + a.socket.Send(DcSctpMessage(StreamID(1), PPID(51), std::vector(1)), + SendOptions()); + + // Observe that the retransmission will have the I-SACK bit set. + auto packet4 = a.cb.ConsumeSentPacket(); + z.socket.ReceivePacket(packet4); + EXPECT_THAT(packet4, + HasChunks(ElementsAre(IsDataChunk(AllOf( + Property(&DataChunk::stream_id, StreamID(1)), + Property(&DataChunk::options, + Field(&AnyDataChunk::Options::immediate_ack, + AnyDataChunk::ImmediateAckFlag(true)))))))); + + // The receiver would normally delay this sack, but now it's sent directly. + auto packet5 = z.cb.ConsumeSentPacket(); + a.socket.ReceivePacket(packet5); + EXPECT_THAT(packet5, HasChunks(ElementsAre(IsChunkType(SackChunk::kType)))); + + // Transfer the rest of the message. + ExchangeMessages(a, z); + + // This will grow the cwnd, as the message was large. + EXPECT_GT(a.socket.GetMetrics()->cwnd_bytes, + options.immediate_sack_under_cwnd_mtus * options.mtu); + + // Future chunks will then not have the I-SACK bit set. + a.socket.Send(DcSctpMessage(StreamID(1), PPID(51), std::vector(1)), + SendOptions()); + + // Drop the first packet, and let T3-rtx fire, which lowers cwnd. + auto packet6 = a.cb.ConsumeSentPacket(); + EXPECT_THAT(packet6, + HasChunks(ElementsAre(IsDataChunk(AllOf( + Property(&DataChunk::stream_id, StreamID(1)), + Property(&DataChunk::options, + Field(&AnyDataChunk::Options::immediate_ack, + AnyDataChunk::ImmediateAckFlag(false)))))))); +} + } // namespace } // namespace dcsctp diff --git a/net/dcsctp/socket/heartbeat_handler.cc b/net/dcsctp/socket/heartbeat_handler.cc index 902dff962f..5131ee05b2 100644 --- a/net/dcsctp/socket/heartbeat_handler.cc +++ b/net/dcsctp/socket/heartbeat_handler.cc @@ -13,14 +13,15 @@ #include #include +#include #include #include #include #include "absl/functional/bind_front.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/units/time_delta.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" #include "net/dcsctp/packet/chunk/heartbeat_ack_chunk.h" @@ -35,6 +36,8 @@ #include "rtc_base/logging.h" namespace dcsctp { +using ::webrtc::TimeDelta; +using ::webrtc::Timestamp; // This is stored (in serialized form) as HeartbeatInfoParameter sent in // HeartbeatRequestChunk and received back in HeartbeatAckChunk. It should be @@ -49,11 +52,11 @@ class HeartbeatInfo { static constexpr size_t kBufferSize = sizeof(uint64_t); static_assert(kBufferSize == 8, "Unexpected buffer size"); - explicit HeartbeatInfo(TimeMs created_at) : created_at_(created_at) {} + explicit HeartbeatInfo(Timestamp created_at) : created_at_(created_at) {} std::vector Serialize() { - uint32_t high_bits = static_cast(*created_at_ >> 32); - uint32_t low_bits = static_cast(*created_at_); + uint32_t high_bits = static_cast(created_at_.ms() >> 32); + uint32_t low_bits = static_cast(created_at_.ms()); std::vector data(kBufferSize); BoundedByteWriter writer(data); @@ -62,12 +65,12 @@ class HeartbeatInfo { return data; } - static absl::optional Deserialize( - rtc::ArrayView data) { + static std::optional Deserialize( + webrtc::ArrayView data) { if (data.size() != kBufferSize) { RTC_LOG(LS_WARNING) << "Invalid heartbeat info: " << data.size() << " bytes"; - return absl::nullopt; + return std::nullopt; } BoundedByteReader reader(data); @@ -75,13 +78,13 @@ class HeartbeatInfo { uint32_t low_bits = reader.Load32<4>(); uint64_t created_at = static_cast(high_bits) << 32 | low_bits; - return HeartbeatInfo(TimeMs(created_at)); + return HeartbeatInfo(Timestamp::Millis(created_at)); } - TimeMs created_at() const { return created_at_; } + Timestamp created_at() const { return created_at_; } private: - const TimeMs created_at_; + const Timestamp created_at_; }; HeartbeatHandler::HeartbeatHandler(absl::string_view log_prefix, @@ -91,7 +94,7 @@ HeartbeatHandler::HeartbeatHandler(absl::string_view log_prefix, : log_prefix_(log_prefix), ctx_(context), timer_manager_(timer_manager), - interval_duration_(options.heartbeat_interval), + interval_duration_(options.heartbeat_interval.ToTimeDelta()), interval_duration_should_include_rtt_( options.heartbeat_interval_include_rtt), interval_timer_(timer_manager_->CreateTimer( @@ -101,7 +104,7 @@ HeartbeatHandler::HeartbeatHandler(absl::string_view log_prefix, timeout_timer_(timer_manager_->CreateTimer( "heartbeat-timeout", absl::bind_front(&HeartbeatHandler::OnTimeoutTimerExpiry, this), - TimerOptions(options.rto_initial, + TimerOptions(options.rto_initial.ToTimeDelta(), TimerBackoffAlgorithm::kExponential, /*max_restarts=*/0))) { // The interval timer must always be running as long as the association is up. @@ -109,7 +112,7 @@ HeartbeatHandler::HeartbeatHandler(absl::string_view log_prefix, } void HeartbeatHandler::RestartTimer() { - if (interval_duration_ == DurationMs(0)) { + if (interval_duration_.IsZero()) { // Heartbeating has been disabled. return; } @@ -137,14 +140,14 @@ void HeartbeatHandler::HandleHeartbeatRequest(HeartbeatRequestChunk chunk) { void HeartbeatHandler::HandleHeartbeatAck(HeartbeatAckChunk chunk) { timeout_timer_->Stop(); - absl::optional info_param = chunk.info(); + std::optional info_param = chunk.info(); if (!info_param.has_value()) { ctx_->callbacks().OnError( ErrorKind::kParseFailed, "Failed to parse HEARTBEAT-ACK; No Heartbeat Info parameter"); return; } - absl::optional info = + std::optional info = HeartbeatInfo::Deserialize(info_param->info()); if (!info.has_value()) { ctx_->callbacks().OnError(ErrorKind::kParseFailed, @@ -153,8 +156,8 @@ void HeartbeatHandler::HandleHeartbeatAck(HeartbeatAckChunk chunk) { return; } - TimeMs now = ctx_->callbacks().TimeMillis(); - if (info->created_at() > TimeMs(0) && info->created_at() <= now) { + Timestamp now = ctx_->callbacks().Now(); + if (info->created_at() > Timestamp::Zero() && info->created_at() <= now) { ctx_->ObserveRTT(now - info->created_at()); } @@ -164,13 +167,13 @@ void HeartbeatHandler::HandleHeartbeatAck(HeartbeatAckChunk chunk) { ctx_->ClearTxErrorCounter(); } -absl::optional HeartbeatHandler::OnIntervalTimerExpiry() { +TimeDelta HeartbeatHandler::OnIntervalTimerExpiry() { if (ctx_->is_connection_established()) { - HeartbeatInfo info(ctx_->callbacks().TimeMillis()); + HeartbeatInfo info(ctx_->callbacks().Now()); timeout_timer_->set_duration(ctx_->current_rto()); timeout_timer_->Start(); RTC_DLOG(LS_INFO) << log_prefix_ << "Sending HEARTBEAT with timeout " - << *timeout_timer_->duration(); + << webrtc::ToString(timeout_timer_->duration()); Parameters parameters = Parameters::Builder() .Add(HeartbeatInfoParameter(info.Serialize())) @@ -183,14 +186,14 @@ absl::optional HeartbeatHandler::OnIntervalTimerExpiry() { << log_prefix_ << "Will not send HEARTBEAT when connection not established"; } - return absl::nullopt; + return TimeDelta::Zero(); } -absl::optional HeartbeatHandler::OnTimeoutTimerExpiry() { +TimeDelta HeartbeatHandler::OnTimeoutTimerExpiry() { // Note that the timeout timer is not restarted. It will be started again when // the interval timer expires. RTC_DCHECK(!timeout_timer_->is_running()); ctx_->IncrementTxErrorCounter("HEARTBEAT timeout"); - return absl::nullopt; + return TimeDelta::Zero(); } } // namespace dcsctp diff --git a/net/dcsctp/socket/heartbeat_handler.h b/net/dcsctp/socket/heartbeat_handler.h index 318b02955b..ac58b97a64 100644 --- a/net/dcsctp/socket/heartbeat_handler.h +++ b/net/dcsctp/socket/heartbeat_handler.h @@ -50,14 +50,14 @@ class HeartbeatHandler { void HandleHeartbeatAck(HeartbeatAckChunk chunk); private: - absl::optional OnIntervalTimerExpiry(); - absl::optional OnTimeoutTimerExpiry(); + webrtc::TimeDelta OnIntervalTimerExpiry(); + webrtc::TimeDelta OnTimeoutTimerExpiry(); const absl::string_view log_prefix_; Context* ctx_; TimerManager* timer_manager_; // The time for a connection to be idle before a heartbeat is sent. - const DurationMs interval_duration_; + const webrtc::TimeDelta interval_duration_; // Adding RTT to the duration will add some jitter, which is good in // production, but less good in unit tests, which is why it can be disabled. const bool interval_duration_should_include_rtt_; diff --git a/net/dcsctp/socket/heartbeat_handler_test.cc b/net/dcsctp/socket/heartbeat_handler_test.cc index d573192440..8f6af0ce31 100644 --- a/net/dcsctp/socket/heartbeat_handler_test.cc +++ b/net/dcsctp/socket/heartbeat_handler_test.cc @@ -30,6 +30,7 @@ using ::testing::IsEmpty; using ::testing::NiceMock; using ::testing::Return; using ::testing::SizeIs; +using ::webrtc::TimeDelta; constexpr DurationMs kHeartbeatInterval = DurationMs(30'000); @@ -37,7 +38,8 @@ DcSctpOptions MakeOptions(DurationMs heartbeat_interval) { DcSctpOptions options; options.heartbeat_interval_include_rtt = false; options.heartbeat_interval = heartbeat_interval; - options.enable_zero_checksum = false; + options.zero_checksum_alternate_error_detection_method = + ZeroChecksumAlternateErrorDetectionMethod::None(); return options; } @@ -51,10 +53,10 @@ class HeartbeatHandlerTestBase : public testing::Test { }), handler_("log: ", options_, &context_, &timer_manager_) {} - void AdvanceTime(DurationMs duration) { + void AdvanceTime(webrtc::TimeDelta duration) { callbacks_.AdvanceTime(duration); for (;;) { - absl::optional timeout_id = callbacks_.GetNextExpiredTimeout(); + std::optional timeout_id = callbacks_.GetNextExpiredTimeout(); if (!timeout_id.has_value()) { break; } @@ -80,7 +82,7 @@ class DisabledHeartbeatHandlerTest : public HeartbeatHandlerTestBase { }; TEST_F(HeartbeatHandlerTest, HasRunningHeartbeatIntervalTimer) { - AdvanceTime(options_.heartbeat_interval); + AdvanceTime(options_.heartbeat_interval.ToTimeDelta()); // Validate that a heartbeat request was sent. std::vector payload = callbacks_.ConsumeSentPacket(); @@ -119,7 +121,7 @@ TEST_F(HeartbeatHandlerTest, RepliesToHeartbeatRequests) { } TEST_F(HeartbeatHandlerTest, SendsHeartbeatRequestsOnIdleChannel) { - AdvanceTime(options_.heartbeat_interval); + AdvanceTime(options_.heartbeat_interval.ToTimeDelta()); // Grab the request, and make a response. std::vector payload = callbacks_.ConsumeSentPacket(); @@ -134,7 +136,7 @@ TEST_F(HeartbeatHandlerTest, SendsHeartbeatRequestsOnIdleChannel) { HeartbeatAckChunk ack(std::move(req).extract_parameters()); // Respond a while later. This RTT will be measured by the handler - constexpr DurationMs rtt(313); + constexpr TimeDelta rtt = TimeDelta::Millis(313); EXPECT_CALL(context_, ObserveRTT(rtt)).Times(1); @@ -143,7 +145,7 @@ TEST_F(HeartbeatHandlerTest, SendsHeartbeatRequestsOnIdleChannel) { } TEST_F(HeartbeatHandlerTest, DoesntObserveInvalidHeartbeats) { - AdvanceTime(options_.heartbeat_interval); + AdvanceTime(options_.heartbeat_interval.ToTimeDelta()); // Grab the request, and make a response. std::vector payload = callbacks_.ConsumeSentPacket(); @@ -161,15 +163,15 @@ TEST_F(HeartbeatHandlerTest, DoesntObserveInvalidHeartbeats) { // Go backwards in time - which make the HEARTBEAT-ACK have an invalid // timestamp in it, as it will be in the future. - callbacks_.AdvanceTime(DurationMs(-100)); + callbacks_.AdvanceTime(TimeDelta::Millis(-100)); handler_.HandleHeartbeatAck(std::move(ack)); } TEST_F(HeartbeatHandlerTest, IncreasesErrorIfNotAckedInTime) { - DurationMs rto(105); + TimeDelta rto = TimeDelta::Millis(105); EXPECT_CALL(context_, current_rto).WillOnce(Return(rto)); - AdvanceTime(options_.heartbeat_interval); + AdvanceTime(options_.heartbeat_interval.ToTimeDelta()); // Validate that a request was sent. EXPECT_THAT(callbacks_.ConsumeSentPacket(), Not(IsEmpty())); @@ -179,7 +181,7 @@ TEST_F(HeartbeatHandlerTest, IncreasesErrorIfNotAckedInTime) { } TEST_F(DisabledHeartbeatHandlerTest, IsReallyDisabled) { - AdvanceTime(options_.heartbeat_interval); + AdvanceTime(options_.heartbeat_interval.ToTimeDelta()); // Validate that a request was NOT sent. EXPECT_THAT(callbacks_.ConsumeSentPacket(), IsEmpty()); diff --git a/net/dcsctp/socket/mock_context.h b/net/dcsctp/socket/mock_context.h index 88e71d1b35..59801696d8 100644 --- a/net/dcsctp/socket/mock_context.h +++ b/net/dcsctp/socket/mock_context.h @@ -11,9 +11,9 @@ #define NET_DCSCTP_SOCKET_MOCK_CONTEXT_H_ #include +#include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "net/dcsctp/packet/sctp_packet.h" #include "net/dcsctp/public/dcsctp_options.h" #include "net/dcsctp/public/dcsctp_socket.h" @@ -40,7 +40,8 @@ class MockContext : public Context { ON_CALL(*this, peer_initial_tsn) .WillByDefault(testing::Return(PeerInitialTsn())); ON_CALL(*this, callbacks).WillByDefault(testing::ReturnRef(callbacks_)); - ON_CALL(*this, current_rto).WillByDefault(testing::Return(DurationMs(123))); + ON_CALL(*this, current_rto) + .WillByDefault(testing::Return(webrtc::TimeDelta::Millis(123))); ON_CALL(*this, Send).WillByDefault([this](SctpPacket::Builder& builder) { callbacks_.SendPacketWithStatus(builder.Build()); }); @@ -51,8 +52,8 @@ class MockContext : public Context { MOCK_METHOD(TSN, peer_initial_tsn, (), (const, override)); MOCK_METHOD(DcSctpSocketCallbacks&, callbacks, (), (const, override)); - MOCK_METHOD(void, ObserveRTT, (DurationMs rtt_ms), (override)); - MOCK_METHOD(DurationMs, current_rto, (), (const, override)); + MOCK_METHOD(void, ObserveRTT, (webrtc::TimeDelta rtt), (override)); + MOCK_METHOD(webrtc::TimeDelta, current_rto, (), (const, override)); MOCK_METHOD(bool, IncrementTxErrorCounter, (absl::string_view reason), diff --git a/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h b/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h index 8b2a772fa3..5dddf08d9c 100644 --- a/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h +++ b/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h @@ -13,12 +13,12 @@ #include #include #include +#include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/task_queue/task_queue_base.h" #include "net/dcsctp/public/dcsctp_message.h" @@ -58,7 +58,7 @@ class MockDcSctpSocketCallbacks : public DcSctpSocketCallbacks { random_(internal::GetUniqueSeed()), timeout_manager_([this]() { return now_; }) { ON_CALL(*this, SendPacketWithStatus) - .WillByDefault([this](rtc::ArrayView data) { + .WillByDefault([this](webrtc::ArrayView data) { sent_packets_.emplace_back( std::vector(data.begin(), data.end())); return SendPacketStatus::kSuccess; @@ -80,21 +80,21 @@ class MockDcSctpSocketCallbacks : public DcSctpSocketCallbacks { << log_prefix_ << "Socket abort: " << ToString(error) << "; " << message; }); - ON_CALL(*this, TimeMillis).WillByDefault([this]() { return now_; }); + ON_CALL(*this, Now).WillByDefault([this]() { return now_; }); } MOCK_METHOD(SendPacketStatus, SendPacketWithStatus, - (rtc::ArrayView data), + (webrtc::ArrayView data), (override)); std::unique_ptr CreateTimeout( - webrtc::TaskQueueBase::DelayPrecision precision) override { + webrtc::TaskQueueBase::DelayPrecision /* precision */) override { // The fake timeout manager does not implement |precision|. return timeout_manager_.CreateTimeout(); } - MOCK_METHOD(TimeMs, TimeMillis, (), (override)); + MOCK_METHOD(webrtc::Timestamp, Now, (), (override)); uint32_t GetRandomInt(uint32_t low, uint32_t high) override { return random_.Rand(low, high); } @@ -113,16 +113,16 @@ class MockDcSctpSocketCallbacks : public DcSctpSocketCallbacks { MOCK_METHOD(void, OnConnectionRestarted, (), (override)); MOCK_METHOD(void, OnStreamsResetFailed, - (rtc::ArrayView outgoing_streams, + (webrtc::ArrayView outgoing_streams, absl::string_view reason), (override)); MOCK_METHOD(void, OnStreamsResetPerformed, - (rtc::ArrayView outgoing_streams), + (webrtc::ArrayView outgoing_streams), (override)); MOCK_METHOD(void, OnIncomingStreamsReset, - (rtc::ArrayView incoming_streams), + (webrtc::ArrayView incoming_streams), (override)); MOCK_METHOD(void, OnBufferedAmountLow, (StreamID stream_id), (override)); MOCK_METHOD(void, OnTotalBufferedAmountLow, (), (override)); @@ -150,25 +150,29 @@ class MockDcSctpSocketCallbacks : public DcSctpSocketCallbacks { sent_packets_.pop_front(); return ret; } - absl::optional ConsumeReceivedMessage() { + std::optional ConsumeReceivedMessage() { if (received_messages_.empty()) { - return absl::nullopt; + return std::nullopt; } DcSctpMessage ret = std::move(received_messages_.front()); received_messages_.pop_front(); return ret; } - void AdvanceTime(DurationMs duration_ms) { now_ = now_ + duration_ms; } - void SetTime(TimeMs now) { now_ = now; } + void AdvanceTime(webrtc::TimeDelta duration) { now_ = now_ + duration; } + void SetTime(webrtc::Timestamp now) { now_ = now; } - absl::optional GetNextExpiredTimeout() { + std::optional GetNextExpiredTimeout() { return timeout_manager_.GetNextExpiredTimeout(); } + webrtc::TimeDelta GetTimeToNextTimeout() const { + return timeout_manager_.GetTimeToNextTimeout(); + } + private: const std::string log_prefix_; - TimeMs now_ = TimeMs(0); + webrtc::Timestamp now_ = webrtc::Timestamp::Zero(); webrtc::Random random_; FakeTimeoutManager timeout_manager_; std::deque> sent_packets_; diff --git a/net/dcsctp/socket/packet_sender.cc b/net/dcsctp/socket/packet_sender.cc index 85392e205d..cdaf95c963 100644 --- a/net/dcsctp/socket/packet_sender.cc +++ b/net/dcsctp/socket/packet_sender.cc @@ -17,16 +17,16 @@ namespace dcsctp { PacketSender::PacketSender(DcSctpSocketCallbacks& callbacks, - std::function, + std::function, SendPacketStatus)> on_sent_packet) : callbacks_(callbacks), on_sent_packet_(std::move(on_sent_packet)) {} -bool PacketSender::Send(SctpPacket::Builder& builder) { +bool PacketSender::Send(SctpPacket::Builder& builder, bool write_checksum) { if (builder.empty()) { return false; } - std::vector payload = builder.Build(); + std::vector payload = builder.Build(write_checksum); SendPacketStatus status = callbacks_.SendPacketWithStatus(payload); on_sent_packet_(payload, status); diff --git a/net/dcsctp/socket/packet_sender.h b/net/dcsctp/socket/packet_sender.h index 7af4d3c47b..273e0948f0 100644 --- a/net/dcsctp/socket/packet_sender.h +++ b/net/dcsctp/socket/packet_sender.h @@ -21,18 +21,18 @@ namespace dcsctp { class PacketSender { public: PacketSender(DcSctpSocketCallbacks& callbacks, - std::function, + std::function, SendPacketStatus)> on_sent_packet); // Sends the packet, and returns true if it was sent successfully. - bool Send(SctpPacket::Builder& builder); + bool Send(SctpPacket::Builder& builder, bool write_checksum = true); private: DcSctpSocketCallbacks& callbacks_; // Callback that will be triggered for every send attempt, indicating the // status of the operation. - std::function, SendPacketStatus)> + std::function, SendPacketStatus)> on_sent_packet_; }; } // namespace dcsctp diff --git a/net/dcsctp/socket/packet_sender_test.cc b/net/dcsctp/socket/packet_sender_test.cc index 079dc36a41..d10786a9cc 100644 --- a/net/dcsctp/socket/packet_sender_test.cc +++ b/net/dcsctp/socket/packet_sender_test.cc @@ -31,7 +31,8 @@ class PacketSenderTest : public testing::Test { DcSctpOptions options_; testing::NiceMock callbacks_; - testing::MockFunction, SendPacketStatus)> + testing::MockFunction, + SendPacketStatus)> on_send_fn_; PacketSender sender_; }; diff --git a/net/dcsctp/socket/state_cookie.cc b/net/dcsctp/socket/state_cookie.cc index 86be77aa34..d9493826e9 100644 --- a/net/dcsctp/socket/state_cookie.cc +++ b/net/dcsctp/socket/state_cookie.cc @@ -10,9 +10,9 @@ #include "net/dcsctp/socket/state_cookie.h" #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/bounded_byte_reader.h" #include "net/dcsctp/packet/bounded_byte_writer.h" @@ -32,25 +32,28 @@ std::vector StateCookie::Serialize() { BoundedByteWriter buffer(cookie); buffer.Store32<0>(kMagic1); buffer.Store32<4>(kMagic2); - buffer.Store32<8>(*initiate_tag_); - buffer.Store32<12>(*initial_tsn_); - buffer.Store32<16>(a_rwnd_); - buffer.Store32<20>(static_cast(*tie_tag_ >> 32)); - buffer.Store32<24>(static_cast(*tie_tag_)); - buffer.Store8<28>(capabilities_.partial_reliability); - buffer.Store8<29>(capabilities_.message_interleaving); - buffer.Store8<30>(capabilities_.reconfig); - buffer.Store16<32>(capabilities_.negotiated_maximum_incoming_streams); - buffer.Store16<34>(capabilities_.negotiated_maximum_outgoing_streams); + buffer.Store32<8>(*peer_tag_); + buffer.Store32<12>(*my_tag_); + buffer.Store32<16>(*peer_initial_tsn_); + buffer.Store32<20>(*my_initial_tsn_); + buffer.Store32<24>(a_rwnd_); + buffer.Store32<28>(static_cast(*tie_tag_ >> 32)); + buffer.Store32<32>(static_cast(*tie_tag_)); + buffer.Store8<36>(capabilities_.partial_reliability); + buffer.Store8<37>(capabilities_.message_interleaving); + buffer.Store8<38>(capabilities_.reconfig); + buffer.Store8<39>(capabilities_.zero_checksum); + buffer.Store16<40>(capabilities_.negotiated_maximum_incoming_streams); + buffer.Store16<42>(capabilities_.negotiated_maximum_outgoing_streams); return cookie; } -absl::optional StateCookie::Deserialize( - rtc::ArrayView cookie) { +std::optional StateCookie::Deserialize( + webrtc::ArrayView cookie) { if (cookie.size() != kCookieSize) { RTC_DLOG(LS_WARNING) << "Invalid state cookie: " << cookie.size() << " bytes"; - return absl::nullopt; + return std::nullopt; } BoundedByteReader buffer(cookie); @@ -58,25 +61,28 @@ absl::optional StateCookie::Deserialize( uint32_t magic2 = buffer.Load32<4>(); if (magic1 != kMagic1 || magic2 != kMagic2) { RTC_DLOG(LS_WARNING) << "Invalid state cookie; wrong magic"; - return absl::nullopt; + return std::nullopt; } - VerificationTag verification_tag(buffer.Load32<8>()); - TSN initial_tsn(buffer.Load32<12>()); - uint32_t a_rwnd = buffer.Load32<16>(); - uint32_t tie_tag_upper = buffer.Load32<20>(); - uint32_t tie_tag_lower = buffer.Load32<24>(); + VerificationTag peer_tag(buffer.Load32<8>()); + VerificationTag my_tag(buffer.Load32<12>()); + TSN peer_initial_tsn(buffer.Load32<16>()); + TSN my_initial_tsn(buffer.Load32<20>()); + uint32_t a_rwnd = buffer.Load32<24>(); + uint32_t tie_tag_upper = buffer.Load32<28>(); + uint32_t tie_tag_lower = buffer.Load32<32>(); TieTag tie_tag(static_cast(tie_tag_upper) << 32 | static_cast(tie_tag_lower)); Capabilities capabilities; - capabilities.partial_reliability = buffer.Load8<28>() != 0; - capabilities.message_interleaving = buffer.Load8<29>() != 0; - capabilities.reconfig = buffer.Load8<30>() != 0; - capabilities.negotiated_maximum_incoming_streams = buffer.Load16<32>(); - capabilities.negotiated_maximum_outgoing_streams = buffer.Load16<34>(); + capabilities.partial_reliability = buffer.Load8<36>() != 0; + capabilities.message_interleaving = buffer.Load8<37>() != 0; + capabilities.reconfig = buffer.Load8<38>() != 0; + capabilities.zero_checksum = buffer.Load8<39>() != 0; + capabilities.negotiated_maximum_incoming_streams = buffer.Load16<40>(); + capabilities.negotiated_maximum_outgoing_streams = buffer.Load16<42>(); - return StateCookie(verification_tag, initial_tsn, a_rwnd, tie_tag, - capabilities); + return StateCookie(peer_tag, my_tag, peer_initial_tsn, my_initial_tsn, a_rwnd, + tie_tag, capabilities); } } // namespace dcsctp diff --git a/net/dcsctp/socket/state_cookie.h b/net/dcsctp/socket/state_cookie.h index a26dbf86f7..387744a39f 100644 --- a/net/dcsctp/socket/state_cookie.h +++ b/net/dcsctp/socket/state_cookie.h @@ -11,9 +11,9 @@ #define NET_DCSCTP_SOCKET_STATE_COOKIE_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/socket/capabilities.h" @@ -27,15 +27,19 @@ namespace dcsctp { // Do not trust anything in it; no pointers or anything like that. class StateCookie { public: - static constexpr size_t kCookieSize = 36; + static constexpr size_t kCookieSize = 44; - StateCookie(VerificationTag initiate_tag, - TSN initial_tsn, + StateCookie(VerificationTag peer_tag, + VerificationTag my_tag, + TSN peer_initial_tsn, + TSN my_initial_tsn, uint32_t a_rwnd, TieTag tie_tag, Capabilities capabilities) - : initiate_tag_(initiate_tag), - initial_tsn_(initial_tsn), + : peer_tag_(peer_tag), + my_tag_(my_tag), + peer_initial_tsn_(peer_initial_tsn), + my_initial_tsn_(my_initial_tsn), a_rwnd_(a_rwnd), tie_tag_(tie_tag), capabilities_(capabilities) {} @@ -43,19 +47,25 @@ class StateCookie { // Returns a serialized version of this cookie. std::vector Serialize(); - // Deserializes the cookie, and returns absl::nullopt if that failed. - static absl::optional Deserialize( - rtc::ArrayView cookie); + // Deserializes the cookie, and returns std::nullopt if that failed. + static std::optional Deserialize( + webrtc::ArrayView cookie); - VerificationTag initiate_tag() const { return initiate_tag_; } - TSN initial_tsn() const { return initial_tsn_; } + VerificationTag peer_tag() const { return peer_tag_; } + VerificationTag my_tag() const { return my_tag_; } + TSN peer_initial_tsn() const { return peer_initial_tsn_; } + TSN my_initial_tsn() const { return my_initial_tsn_; } uint32_t a_rwnd() const { return a_rwnd_; } TieTag tie_tag() const { return tie_tag_; } const Capabilities& capabilities() const { return capabilities_; } private: - const VerificationTag initiate_tag_; - const TSN initial_tsn_; + // Also called "Tag_A" in RFC4960. + const VerificationTag peer_tag_; + // Also called "Tag_Z" in RFC4960. + const VerificationTag my_tag_; + const TSN peer_initial_tsn_; + const TSN my_initial_tsn_; const uint32_t a_rwnd_; const TieTag tie_tag_; const Capabilities capabilities_; diff --git a/net/dcsctp/socket/state_cookie_test.cc b/net/dcsctp/socket/state_cookie_test.cc index 7d8e1339ee..806ea2024b 100644 --- a/net/dcsctp/socket/state_cookie_test.cc +++ b/net/dcsctp/socket/state_cookie_test.cc @@ -21,21 +21,27 @@ TEST(StateCookieTest, SerializeAndDeserialize) { Capabilities capabilities = {.partial_reliability = true, .message_interleaving = false, .reconfig = true, + .zero_checksum = true, .negotiated_maximum_incoming_streams = 123, .negotiated_maximum_outgoing_streams = 234}; - StateCookie cookie(VerificationTag(123), TSN(456), + StateCookie cookie(/*peer_tag=*/VerificationTag(123), + /*my_tag=*/VerificationTag(321), + /*peer_initial_tsn=*/TSN(456), /*my_initial_tsn=*/TSN(654), /*a_rwnd=*/789, TieTag(101112), capabilities); std::vector serialized = cookie.Serialize(); EXPECT_THAT(serialized, SizeIs(StateCookie::kCookieSize)); ASSERT_HAS_VALUE_AND_ASSIGN(StateCookie deserialized, StateCookie::Deserialize(serialized)); - EXPECT_EQ(deserialized.initiate_tag(), VerificationTag(123)); - EXPECT_EQ(deserialized.initial_tsn(), TSN(456)); + EXPECT_EQ(deserialized.peer_tag(), VerificationTag(123)); + EXPECT_EQ(deserialized.my_tag(), VerificationTag(321)); + EXPECT_EQ(deserialized.peer_initial_tsn(), TSN(456)); + EXPECT_EQ(deserialized.my_initial_tsn(), TSN(654)); EXPECT_EQ(deserialized.a_rwnd(), 789u); EXPECT_EQ(deserialized.tie_tag(), TieTag(101112)); EXPECT_TRUE(deserialized.capabilities().partial_reliability); EXPECT_FALSE(deserialized.capabilities().message_interleaving); EXPECT_TRUE(deserialized.capabilities().reconfig); + EXPECT_TRUE(deserialized.capabilities().zero_checksum); EXPECT_EQ(deserialized.capabilities().negotiated_maximum_incoming_streams, 123); EXPECT_EQ(deserialized.capabilities().negotiated_maximum_outgoing_streams, @@ -46,7 +52,9 @@ TEST(StateCookieTest, ValidateMagicValue) { Capabilities capabilities = {.partial_reliability = true, .message_interleaving = false, .reconfig = true}; - StateCookie cookie(VerificationTag(123), TSN(456), + StateCookie cookie(/*peer_tag=*/VerificationTag(123), + /*my_tag=*/VerificationTag(321), + /*peer_initial_tsn=*/TSN(456), /*my_initial_tsn=*/TSN(654), /*a_rwnd=*/789, TieTag(101112), capabilities); std::vector serialized = cookie.Serialize(); ASSERT_THAT(serialized, SizeIs(StateCookie::kCookieSize)); diff --git a/net/dcsctp/socket/stream_reset_handler.cc b/net/dcsctp/socket/stream_reset_handler.cc index c81b34b626..8d109ea071 100644 --- a/net/dcsctp/socket/stream_reset_handler.cc +++ b/net/dcsctp/socket/stream_reset_handler.cc @@ -11,13 +11,13 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/units/time_delta.h" #include "net/dcsctp/common/internal_types.h" -#include "net/dcsctp/common/str_join.h" #include "net/dcsctp/packet/chunk/reconfig_chunk.h" #include "net/dcsctp/packet/parameter/add_incoming_streams_request_parameter.h" #include "net/dcsctp/packet/parameter/add_outgoing_streams_request_parameter.h" @@ -35,9 +35,11 @@ #include "net/dcsctp/timer/timer.h" #include "net/dcsctp/tx/retransmission_queue.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/str_join.h" namespace dcsctp { namespace { +using ::webrtc::TimeDelta; using ResponseResult = ReconfigurationResponseParameter::Result; bool DescriptorsAre(const std::vector& c, @@ -82,10 +84,10 @@ bool StreamResetHandler::Validate(const ReConfigChunk& chunk) { return false; } -absl::optional> +std::optional> StreamResetHandler::Process(const ReConfigChunk& chunk) { if (!Validate(chunk)) { - return absl::nullopt; + return std::nullopt; } std::vector responses; @@ -110,7 +112,7 @@ StreamResetHandler::Process(const ReConfigChunk& chunk) { } void StreamResetHandler::HandleReConfig(ReConfigChunk chunk) { - absl::optional> responses = + std::optional> responses = Process(chunk); if (!responses.has_value()) { @@ -131,7 +133,7 @@ void StreamResetHandler::HandleReConfig(ReConfigChunk chunk) { } bool StreamResetHandler::ValidateReqSeqNbr( - ReconfigRequestSN req_seq_nbr, + UnwrappedReconfigRequestSn req_seq_nbr, std::vector& responses) { if (req_seq_nbr == last_processed_req_seq_nbr_) { // https://www.rfc-editor.org/rfc/rfc6525.html#section-5.2.1 "If the @@ -143,11 +145,11 @@ bool StreamResetHandler::ValidateReqSeqNbr( << " already processed, returning result=" << ToString(last_processed_req_result_); responses.push_back(ReconfigurationResponseParameter( - req_seq_nbr, last_processed_req_result_)); + req_seq_nbr.Wrap(), last_processed_req_result_)); return false; } - if (req_seq_nbr != ReconfigRequestSN(*last_processed_req_seq_nbr_ + 1)) { + if (req_seq_nbr != last_processed_req_seq_nbr_.next_value()) { // Too old, too new, from wrong association etc. // This is expected to happen when handing over a RTCPeerConnection from one // server to another. The client will notice this and may decide to close @@ -156,7 +158,7 @@ bool StreamResetHandler::ValidateReqSeqNbr( RTC_DLOG(LS_VERBOSE) << log_prefix_ << "req=" << *req_seq_nbr << " bad seq_nbr"; responses.push_back(ReconfigurationResponseParameter( - req_seq_nbr, ResponseResult::kErrorBadSequenceNumber)); + req_seq_nbr.Wrap(), ResponseResult::kErrorBadSequenceNumber)); return false; } @@ -166,7 +168,7 @@ bool StreamResetHandler::ValidateReqSeqNbr( void StreamResetHandler::HandleResetOutgoing( const ParameterDescriptor& descriptor, std::vector& responses) { - absl::optional req = + std::optional req = OutgoingSSNResetRequestParameter::Parse(descriptor.data); if (!req.has_value()) { ctx_->callbacks().OnError(ErrorKind::kParseFailed, @@ -174,16 +176,43 @@ void StreamResetHandler::HandleResetOutgoing( return; } - if (ValidateReqSeqNbr(req->request_sequence_number(), responses)) { - RTC_DLOG(LS_VERBOSE) << log_prefix_ - << "Reset outgoing streams with req_seq_nbr=" - << *req->request_sequence_number(); - - last_processed_req_seq_nbr_ = req->request_sequence_number(); - last_processed_req_result_ = reassembly_queue_->ResetStreams( - *req, data_tracker_->last_cumulative_acked_tsn()); - if (last_processed_req_result_ == ResponseResult::kSuccessPerformed) { + UnwrappedReconfigRequestSn request_sn = + incoming_reconfig_request_sn_unwrapper_.Unwrap( + req->request_sequence_number()); + + if (ValidateReqSeqNbr(request_sn, responses)) { + last_processed_req_seq_nbr_ = request_sn; + if (data_tracker_->IsLaterThanCumulativeAckedTsn( + req->sender_last_assigned_tsn())) { + // https://datatracker.ietf.org/doc/html/rfc6525#section-5.2.2 + // E2) "If the Sender's Last Assigned TSN is greater than the cumulative + // acknowledgment point, then the endpoint MUST enter 'deferred reset + // processing'." + reassembly_queue_->EnterDeferredReset(req->sender_last_assigned_tsn(), + req->stream_ids()); + // "If the endpoint enters 'deferred reset processing', it MUST put a + // Re-configuration Response Parameter into a RE-CONFIG chunk indicating + // 'In progress' and MUST send the RE-CONFIG chunk. + last_processed_req_result_ = ResponseResult::kInProgress; + RTC_DLOG(LS_VERBOSE) << log_prefix_ + << "Reset outgoing; Sender last_assigned=" + << *req->sender_last_assigned_tsn() + << " - not yet reached -> InProgress"; + } else { + // https://datatracker.ietf.org/doc/html/rfc6525#section-5.2.2 + // E3) If no stream numbers are listed in the parameter, then all incoming + // streams MUST be reset to 0 as the next expected SSN. If specific stream + // numbers are listed, then only these specific streams MUST be reset to + // 0, and all other non-listed SSNs remain unchanged. E4: Any queued TSNs + // (queued at step E2) MUST now be released and processed normally. + reassembly_queue_->ResetStreamsAndLeaveDeferredReset(req->stream_ids()); ctx_->callbacks().OnIncomingStreamsReset(req->stream_ids()); + last_processed_req_result_ = ResponseResult::kSuccessPerformed; + + RTC_DLOG(LS_VERBOSE) << log_prefix_ + << "Reset outgoing; Sender last_assigned=" + << *req->sender_last_assigned_tsn() + << " - reached -> SuccessPerformed"; } responses.push_back(ReconfigurationResponseParameter( req->request_sequence_number(), last_processed_req_result_)); @@ -193,22 +222,27 @@ void StreamResetHandler::HandleResetOutgoing( void StreamResetHandler::HandleResetIncoming( const ParameterDescriptor& descriptor, std::vector& responses) { - absl::optional req = + std::optional req = IncomingSSNResetRequestParameter::Parse(descriptor.data); if (!req.has_value()) { ctx_->callbacks().OnError(ErrorKind::kParseFailed, "Failed to parse Incoming Reset command"); return; } - if (ValidateReqSeqNbr(req->request_sequence_number(), responses)) { + + UnwrappedReconfigRequestSn request_sn = + incoming_reconfig_request_sn_unwrapper_.Unwrap( + req->request_sequence_number()); + + if (ValidateReqSeqNbr(request_sn, responses)) { responses.push_back(ReconfigurationResponseParameter( req->request_sequence_number(), ResponseResult::kSuccessNothingToDo)); - last_processed_req_seq_nbr_ = req->request_sequence_number(); + last_processed_req_seq_nbr_ = request_sn; } } void StreamResetHandler::HandleResponse(const ParameterDescriptor& descriptor) { - absl::optional resp = + std::optional resp = ReconfigurationResponseParameter::Parse(descriptor.data); if (!resp.has_value()) { ctx_->callbacks().OnError( @@ -227,22 +261,20 @@ void StreamResetHandler::HandleResponse(const ParameterDescriptor& descriptor) { RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Reset stream success, req_seq_nbr=" << *current_request_->req_seq_nbr() << ", streams=" - << StrJoin(current_request_->streams(), ",", - [](rtc::StringBuilder& sb, StreamID stream_id) { - sb << *stream_id; - }); + << webrtc::StrJoin(current_request_->streams(), ",", + [](webrtc::StringBuilder& sb, + StreamID stream_id) { sb << *stream_id; }); ctx_->callbacks().OnStreamsResetPerformed(current_request_->streams()); - current_request_ = absl::nullopt; + current_request_ = std::nullopt; retransmission_queue_->CommitResetStreams(); break; case ResponseResult::kInProgress: RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Reset stream still pending, req_seq_nbr=" << *current_request_->req_seq_nbr() << ", streams=" - << StrJoin(current_request_->streams(), ",", - [](rtc::StringBuilder& sb, StreamID stream_id) { - sb << *stream_id; - }); + << webrtc::StrJoin(current_request_->streams(), ",", + [](webrtc::StringBuilder& sb, + StreamID stream_id) { sb << *stream_id; }); // Force this request to be sent again, but with new req_seq_nbr. current_request_->PrepareRetransmission(); reconfig_timer_->set_duration(ctx_->current_rto()); @@ -256,30 +288,29 @@ void StreamResetHandler::HandleResponse(const ParameterDescriptor& descriptor) { << log_prefix_ << "Reset stream error=" << ToString(resp->result()) << ", req_seq_nbr=" << *current_request_->req_seq_nbr() << ", streams=" - << StrJoin(current_request_->streams(), ",", - [](rtc::StringBuilder& sb, StreamID stream_id) { - sb << *stream_id; - }); + << webrtc::StrJoin(current_request_->streams(), ",", + [](webrtc::StringBuilder& sb, + StreamID stream_id) { sb << *stream_id; }); ctx_->callbacks().OnStreamsResetFailed(current_request_->streams(), ToString(resp->result())); - current_request_ = absl::nullopt; + current_request_ = std::nullopt; retransmission_queue_->RollbackResetStreams(); break; } } } -absl::optional StreamResetHandler::MakeStreamResetRequest() { +std::optional StreamResetHandler::MakeStreamResetRequest() { // Only send stream resets if there are streams to reset, and no current // ongoing request (there can only be one at a time), and if the stream // can be reset. if (current_request_.has_value() || !retransmission_queue_->HasStreamsReadyToBeReset()) { - return absl::nullopt; + return std::nullopt; } - current_request_.emplace(TSN(*retransmission_queue_->next_tsn() - 1), - retransmission_queue_->GetStreamsReadyToBeReset()); + current_request_.emplace(retransmission_queue_->last_assigned_tsn(), + retransmission_queue_->BeginResetStreams()); reconfig_timer_->set_duration(ctx_->current_rto()); reconfig_timer_->Start(); return MakeReconfigChunk(); @@ -309,19 +340,19 @@ ReConfigChunk StreamResetHandler::MakeReconfigChunk() { } void StreamResetHandler::ResetStreams( - rtc::ArrayView outgoing_streams) { + webrtc::ArrayView outgoing_streams) { for (StreamID stream_id : outgoing_streams) { retransmission_queue_->PrepareResetStream(stream_id); } } -absl::optional StreamResetHandler::OnReconfigTimerExpiry() { +TimeDelta StreamResetHandler::OnReconfigTimerExpiry() { if (current_request_->has_been_sent()) { // There is an outstanding request, which timed out while waiting for a // response. if (!ctx_->IncrementTxErrorCounter("RECONFIG timeout")) { // Timed out. The connection will close after processing the timers. - return absl::nullopt; + return TimeDelta::Zero(); } } else { // There is no outstanding request, but there is a prepared one. This means @@ -345,7 +376,8 @@ HandoverReadinessStatus StreamResetHandler::GetHandoverReadiness() const { } void StreamResetHandler::AddHandoverState(DcSctpSocketHandoverState& state) { - state.rx.last_completed_reset_req_sn = last_processed_req_seq_nbr_.value(); + state.rx.last_completed_reset_req_sn = + last_processed_req_seq_nbr_.Wrap().value(); state.tx.next_reset_req_sn = next_outgoing_req_seq_nbr_.value(); } diff --git a/net/dcsctp/socket/stream_reset_handler.h b/net/dcsctp/socket/stream_reset_handler.h index 8140903c49..dba402e29e 100644 --- a/net/dcsctp/socket/stream_reset_handler.h +++ b/net/dcsctp/socket/stream_reset_handler.h @@ -12,14 +12,15 @@ #include #include +#include #include #include #include #include "absl/functional/bind_front.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/units/time_delta.h" #include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/packet/chunk/reconfig_chunk.h" #include "net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h" @@ -80,15 +81,17 @@ class StreamResetHandler { reconfig_timer_(timer_manager->CreateTimer( "re-config", absl::bind_front(&StreamResetHandler::OnReconfigTimerExpiry, this), - TimerOptions(DurationMs(0)))), + TimerOptions(webrtc::TimeDelta::Zero()))), next_outgoing_req_seq_nbr_( handover_state ? ReconfigRequestSN(handover_state->tx.next_reset_req_sn) : ReconfigRequestSN(*ctx_->my_initial_tsn())), last_processed_req_seq_nbr_( - handover_state ? ReconfigRequestSN( - handover_state->rx.last_completed_reset_req_sn) - : ReconfigRequestSN(*ctx_->peer_initial_tsn() - 1)), + incoming_reconfig_request_sn_unwrapper_.Unwrap( + handover_state + ? ReconfigRequestSN( + handover_state->rx.last_completed_reset_req_sn) + : ReconfigRequestSN(*ctx_->peer_initial_tsn() - 1))), last_processed_req_result_( ReconfigurationResponseParameter::Result::kSuccessNothingToDo) {} @@ -97,13 +100,13 @@ class StreamResetHandler { // time and also multiple times. It will enqueue requests that can't be // directly fulfilled, and will asynchronously process them when any ongoing // request has completed. - void ResetStreams(rtc::ArrayView outgoing_streams); + void ResetStreams(webrtc::ArrayView outgoing_streams); // Creates a Reset Streams request that must be sent if returned. Will start - // the reconfig timer. Will return absl::nullopt if there is no need to + // the reconfig timer. Will return std::nullopt if there is no need to // create a request (no streams to reset) or if there already is an ongoing // stream reset request that hasn't completed yet. - absl::optional MakeStreamResetRequest(); + std::optional MakeStreamResetRequest(); // Called when handling and incoming RE-CONFIG chunk. void HandleReConfig(ReConfigChunk chunk); @@ -113,6 +116,7 @@ class StreamResetHandler { void AddHandoverState(DcSctpSocketHandoverState& state); private: + using UnwrappedReconfigRequestSn = UnwrappedSequenceNumber; // Represents a stream request operation. There can only be one ongoing at // any time, and a sent request may either succeed, fail or result in the // receiver signaling that it can't process it right now, and then it will be @@ -120,7 +124,7 @@ class StreamResetHandler { class CurrentRequest { public: CurrentRequest(TSN sender_last_assigned_tsn, std::vector streams) - : req_seq_nbr_(absl::nullopt), + : req_seq_nbr_(std::nullopt), sender_last_assigned_tsn_(sender_last_assigned_tsn), streams_(std::move(streams)) {} @@ -148,7 +152,7 @@ class StreamResetHandler { // If the receiver can't apply the request yet (and answered "In Progress"), // this will be called to prepare the request to be retransmitted at a later // time. - void PrepareRetransmission() { req_seq_nbr_ = absl::nullopt; } + void PrepareRetransmission() { req_seq_nbr_ = std::nullopt; } // If the request hasn't been sent yet, this assigns it a request number. void PrepareToSend(ReconfigRequestSN new_req_seq_nbr) { @@ -160,7 +164,7 @@ class StreamResetHandler { // has been prepared, but has not yet been sent. This is typically used when // the peer responded "in progress" and the same request (but a different // request number) must be sent again. - absl::optional req_seq_nbr_; + std::optional req_seq_nbr_; // The sender's (that's us) last assigned TSN, from the retransmission // queue. TSN sender_last_assigned_tsn_; @@ -172,9 +176,9 @@ class StreamResetHandler { bool Validate(const ReConfigChunk& chunk); // Processes a stream stream reconfiguration chunk and may either return - // absl::nullopt (on protocol errors), or a list of responses - either 0, 1 + // std::nullopt (on protocol errors), or a list of responses - either 0, 1 // or 2. - absl::optional> Process( + std::optional> Process( const ReConfigChunk& chunk); // Creates the actual RE-CONFIG chunk. A request (which set `current_request`) @@ -185,7 +189,7 @@ class StreamResetHandler { // fails to validate, and returns false, it will also add a response to // `responses`. bool ValidateReqSeqNbr( - ReconfigRequestSN req_seq_nbr, + UnwrappedReconfigRequestSn req_seq_nbr, std::vector& responses); // Called when this socket receives an outgoing stream reset request. It might @@ -208,23 +212,24 @@ class StreamResetHandler { void HandleResponse(const ParameterDescriptor& descriptor); // Expiration handler for the Reconfig timer. - absl::optional OnReconfigTimerExpiry(); + webrtc::TimeDelta OnReconfigTimerExpiry(); const absl::string_view log_prefix_; Context* ctx_; DataTracker* data_tracker_; ReassemblyQueue* reassembly_queue_; RetransmissionQueue* retransmission_queue_; + UnwrappedReconfigRequestSn::Unwrapper incoming_reconfig_request_sn_unwrapper_; const std::unique_ptr reconfig_timer_; // The next sequence number for outgoing stream requests. ReconfigRequestSN next_outgoing_req_seq_nbr_; // The current stream request operation. - absl::optional current_request_; + std::optional current_request_; // For incoming requests - last processed request sequence number. - ReconfigRequestSN last_processed_req_seq_nbr_; + UnwrappedReconfigRequestSn last_processed_req_seq_nbr_; // The result from last processed incoming request ReconfigurationResponseParameter::Result last_processed_req_result_; }; diff --git a/net/dcsctp/socket/stream_reset_handler_test.cc b/net/dcsctp/socket/stream_reset_handler_test.cc index 503cc89094..7b624a88d3 100644 --- a/net/dcsctp/socket/stream_reset_handler_test.cc +++ b/net/dcsctp/socket/stream_reset_handler_test.cc @@ -12,20 +12,22 @@ #include #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/task_queue/task_queue_base.h" #include "net/dcsctp/common/handover_testing.h" #include "net/dcsctp/common/internal_types.h" +#include "net/dcsctp/packet/chunk/forward_tsn_common.h" #include "net/dcsctp/packet/chunk/reconfig_chunk.h" #include "net/dcsctp/packet/parameter/incoming_ssn_reset_request_parameter.h" #include "net/dcsctp/packet/parameter/outgoing_ssn_reset_request_parameter.h" #include "net/dcsctp/packet/parameter/parameter.h" #include "net/dcsctp/packet/parameter/reconfiguration_response_parameter.h" #include "net/dcsctp/public/dcsctp_message.h" +#include "net/dcsctp/public/types.h" #include "net/dcsctp/rx/data_tracker.h" #include "net/dcsctp/rx/reassembly_queue.h" #include "net/dcsctp/socket/mock_context.h" @@ -42,10 +44,13 @@ namespace dcsctp { namespace { using ::testing::IsEmpty; using ::testing::NiceMock; +using ::testing::Property; using ::testing::Return; using ::testing::SizeIs; using ::testing::UnorderedElementsAre; +using ::webrtc::TimeDelta; using ResponseResult = ReconfigurationResponseParameter::Result; +using SkippedStream = AnyForwardTsnChunk::SkippedStream; constexpr TSN kMyInitialTsn = MockContext::MyInitialTsn(); constexpr ReconfigRequestSN kMyInitialReqSn = ReconfigRequestSN(*kMyInitialTsn); @@ -53,7 +58,7 @@ constexpr TSN kPeerInitialTsn = MockContext::PeerInitialTsn(); constexpr ReconfigRequestSN kPeerInitialReqSn = ReconfigRequestSN(*kPeerInitialTsn); constexpr uint32_t kArwnd = 131072; -constexpr DurationMs kRto = DurationMs(250); +constexpr TimeDelta kRto = TimeDelta::Millis(250); constexpr std::array kShortPayload = {1, 2, 3, 4}; @@ -93,25 +98,23 @@ class StreamResetHandlerTest : public testing::Test { }), delayed_ack_timer_(timer_manager_.CreateTimer( "test/delayed_ack", - []() { return absl::nullopt; }, - TimerOptions(DurationMs(0)))), + []() { return TimeDelta::Zero(); }, + TimerOptions(TimeDelta::Zero()))), t3_rtx_timer_(timer_manager_.CreateTimer( "test/t3_rtx", - []() { return absl::nullopt; }, - TimerOptions(DurationMs(0)))), + []() { return TimeDelta::Zero(); }, + TimerOptions(TimeDelta::Zero()))), data_tracker_(std::make_unique("log: ", delayed_ack_timer_.get(), kPeerInitialTsn)), - reasm_(std::make_unique("log: ", - kPeerInitialTsn, - kArwnd)), + reasm_(std::make_unique("log: ", kArwnd)), retransmission_queue_(std::make_unique( "", &callbacks_, kMyInitialTsn, kArwnd, producer_, - [](DurationMs rtt_ms) {}, + [](TimeDelta /* rtt */) {}, []() {}, *t3_rtx_timer_, DcSctpOptions())), @@ -125,10 +128,10 @@ class StreamResetHandlerTest : public testing::Test { EXPECT_CALL(ctx_, current_rto).WillRepeatedly(Return(kRto)); } - void AdvanceTime(DurationMs duration) { - callbacks_.AdvanceTime(kRto); + void AdvanceTime(TimeDelta duration) { + callbacks_.AdvanceTime(duration); for (;;) { - absl::optional timeout_id = callbacks_.GetNextExpiredTimeout(); + std::optional timeout_id = callbacks_.GetNextExpiredTimeout(); if (!timeout_id.has_value()) { break; } @@ -149,7 +152,7 @@ class StreamResetHandlerTest : public testing::Test { } std::vector responses; - absl::optional p = SctpPacket::Parse(payload, DcSctpOptions()); + std::optional p = SctpPacket::Parse(payload, DcSctpOptions()); if (!p.has_value()) { EXPECT_TRUE(false); return {}; @@ -158,7 +161,7 @@ class StreamResetHandlerTest : public testing::Test { EXPECT_TRUE(false); return {}; } - absl::optional response_chunk = + std::optional response_chunk = ReConfigChunk::Parse(p->descriptors()[0].data); if (!response_chunk.has_value()) { EXPECT_TRUE(false); @@ -166,7 +169,7 @@ class StreamResetHandlerTest : public testing::Test { } for (const auto& desc : response_chunk->parameters().descriptors()) { if (desc.type == ReconfigurationResponseParameter::kType) { - absl::optional response = + std::optional response = ReconfigurationResponseParameter::Parse(desc.data); if (!response.has_value()) { EXPECT_TRUE(false); @@ -196,12 +199,11 @@ class StreamResetHandlerTest : public testing::Test { data_tracker_ = std::make_unique( "log: ", delayed_ack_timer_.get(), kPeerInitialTsn); data_tracker_->RestoreFromState(state); - reasm_ = - std::make_unique("log: ", kPeerInitialTsn, kArwnd); + reasm_ = std::make_unique("log: ", kArwnd); reasm_->RestoreFromState(state); retransmission_queue_ = std::make_unique( "", &callbacks_, kMyInitialTsn, kArwnd, producer_, - [](DurationMs rtt_ms) {}, []() {}, *t3_rtx_timer_, DcSctpOptions(), + [](TimeDelta /* rtt */) {}, []() {}, *t3_rtx_timer_, DcSctpOptions(), /*supports_partial_reliability=*/true, /*use_message_interleaving=*/false); retransmission_queue_->RestoreFromState(state); @@ -289,61 +291,187 @@ TEST_F(StreamResetHandlerTest, ResetStreamsNotDeferred) { } TEST_F(StreamResetHandlerTest, ResetStreamsDeferred) { - DataGeneratorOptions opts; - opts.message_id = MID(0); - reasm_->Add(kPeerInitialTsn, gen_.Ordered({1, 2, 3, 4}, "BE", opts)); + constexpr StreamID kStreamId = StreamID(1); + data_tracker_->Observe(TSN(10)); + reasm_->Add(TSN(10), gen_.Ordered({1, 2, 3, 4}, "BE", {.mid = MID(0)})); - opts.message_id = MID(1); - reasm_->Add(AddTo(kPeerInitialTsn, 1), - gen_.Ordered({1, 2, 3, 4}, "BE", opts)); + data_tracker_->Observe(TSN(11)); + reasm_->Add(TSN(11), gen_.Ordered({1, 2, 3, 4}, "BE", {.mid = MID(1)})); - data_tracker_->Observe(kPeerInitialTsn); - data_tracker_->Observe(AddTo(kPeerInitialTsn, 1)); - EXPECT_THAT(reasm_->FlushMessages(), - UnorderedElementsAre( - SctpMessageIs(StreamID(1), PPID(53), kShortPayload), - SctpMessageIs(StreamID(1), PPID(53), kShortPayload))); + EXPECT_THAT( + reasm_->FlushMessages(), + UnorderedElementsAre(SctpMessageIs(kStreamId, PPID(53), kShortPayload), + SctpMessageIs(kStreamId, PPID(53), kShortPayload))); Parameters::Builder builder; builder.Add(OutgoingSSNResetRequestParameter( - kPeerInitialReqSn, ReconfigRequestSN(3), AddTo(kPeerInitialTsn, 3), - {StreamID(1)})); + ReconfigRequestSN(10), ReconfigRequestSN(3), TSN(13), {kStreamId})); + EXPECT_THAT(HandleAndCatchResponse(ReConfigChunk(builder.Build())), + ElementsAre(Property(&ReconfigurationResponseParameter::result, + ResponseResult::kInProgress))); - std::vector responses = - HandleAndCatchResponse(ReConfigChunk(builder.Build())); - EXPECT_THAT(responses, SizeIs(1)); - EXPECT_EQ(responses[0].result(), ResponseResult::kInProgress); + data_tracker_->Observe(TSN(15)); + reasm_->Add(TSN(15), gen_.Ordered({1, 2, 3, 4}, "BE", + {.mid = MID(1), .ppid = PPID(5)})); - opts.message_id = MID(1); - opts.ppid = PPID(5); - reasm_->Add(AddTo(kPeerInitialTsn, 5), - gen_.Ordered({1, 2, 3, 4}, "BE", opts)); - reasm_->MaybeResetStreamsDeferred(AddTo(kPeerInitialTsn, 1)); - - opts.message_id = MID(0); - opts.ppid = PPID(4); - reasm_->Add(AddTo(kPeerInitialTsn, 4), - gen_.Ordered({1, 2, 3, 4}, "BE", opts)); - reasm_->MaybeResetStreamsDeferred(AddTo(kPeerInitialTsn, 1)); - - opts.message_id = MID(3); - opts.ppid = PPID(3); - reasm_->Add(AddTo(kPeerInitialTsn, 3), - gen_.Ordered({1, 2, 3, 4}, "BE", opts)); - reasm_->MaybeResetStreamsDeferred(AddTo(kPeerInitialTsn, 1)); - - opts.message_id = MID(2); - opts.ppid = PPID(2); - reasm_->Add(AddTo(kPeerInitialTsn, 2), - gen_.Ordered({1, 2, 3, 4}, "BE", opts)); - reasm_->MaybeResetStreamsDeferred(AddTo(kPeerInitialTsn, 5)); + data_tracker_->Observe(TSN(14)); + reasm_->Add(TSN(14), gen_.Ordered({1, 2, 3, 4}, "BE", + {.mid = MID(0), .ppid = PPID(4)})); + + data_tracker_->Observe(TSN(13)); + reasm_->Add(TSN(13), gen_.Ordered({1, 2, 3, 4}, "BE", + {.mid = MID(3), .ppid = PPID(3)})); + + data_tracker_->Observe(TSN(12)); + reasm_->Add(TSN(12), gen_.Ordered({1, 2, 3, 4}, "BE", + {.mid = MID(2), .ppid = PPID(2)})); + + builder.Add(OutgoingSSNResetRequestParameter( + ReconfigRequestSN(11), ReconfigRequestSN(4), TSN(13), {kStreamId})); + EXPECT_THAT(HandleAndCatchResponse(ReConfigChunk(builder.Build())), + ElementsAre(Property(&ReconfigurationResponseParameter::result, + ResponseResult::kSuccessPerformed))); EXPECT_THAT( reasm_->FlushMessages(), - UnorderedElementsAre(SctpMessageIs(StreamID(1), PPID(2), kShortPayload), - SctpMessageIs(StreamID(1), PPID(3), kShortPayload), - SctpMessageIs(StreamID(1), PPID(4), kShortPayload), - SctpMessageIs(StreamID(1), PPID(5), kShortPayload))); + UnorderedElementsAre(SctpMessageIs(kStreamId, PPID(2), kShortPayload), + SctpMessageIs(kStreamId, PPID(3), kShortPayload), + SctpMessageIs(kStreamId, PPID(4), kShortPayload), + SctpMessageIs(kStreamId, PPID(5), kShortPayload))); +} + +TEST_F(StreamResetHandlerTest, ResetStreamsDeferredOnlySelectedStreams) { + // This test verifies the receiving behavior of receiving messages on + // streams 1, 2 and 3, and receiving a reset request on stream 1, 2, causing + // deferred reset processing. + + // Reset stream 1,2 with "last assigned TSN=12" + Parameters::Builder builder; + builder.Add(OutgoingSSNResetRequestParameter(ReconfigRequestSN(10), + ReconfigRequestSN(3), TSN(12), + {StreamID(1), StreamID(2)})); + EXPECT_THAT(HandleAndCatchResponse(ReConfigChunk(builder.Build())), + ElementsAre(Property(&ReconfigurationResponseParameter::result, + ResponseResult::kInProgress))); + + // TSN 10, SID 1 - before TSN 12 -> deliver + data_tracker_->Observe(TSN(10)); + reasm_->Add(TSN(10), gen_.Ordered({1, 2, 3, 4}, "BE", + {.stream_id = StreamID(1), + .mid = MID(0), + .ppid = PPID(1001)})); + + // TSN 11, SID 2 - before TSN 12 -> deliver + data_tracker_->Observe(TSN(11)); + reasm_->Add(TSN(11), gen_.Ordered({1, 2, 3, 4}, "BE", + {.stream_id = StreamID(2), + .mid = MID(0), + .ppid = PPID(1002)})); + + // TSN 12, SID 3 - at TSN 12 -> deliver + data_tracker_->Observe(TSN(12)); + reasm_->Add(TSN(12), gen_.Ordered({1, 2, 3, 4}, "BE", + {.stream_id = StreamID(3), + .mid = MID(0), + .ppid = PPID(1003)})); + + // TSN 13, SID 1 - after TSN 12 and SID=1 -> defer + data_tracker_->Observe(TSN(13)); + reasm_->Add(TSN(13), gen_.Ordered({1, 2, 3, 4}, "BE", + {.stream_id = StreamID(1), + .mid = MID(0), + .ppid = PPID(1004)})); + + // TSN 14, SID 2 - after TSN 12 and SID=2 -> defer + data_tracker_->Observe(TSN(14)); + reasm_->Add(TSN(14), gen_.Ordered({1, 2, 3, 4}, "BE", + {.stream_id = StreamID(2), + .mid = MID(0), + .ppid = PPID(1005)})); + + // TSN 15, SID 3 - after TSN 12, but SID 3 is not reset -> deliver + data_tracker_->Observe(TSN(15)); + reasm_->Add(TSN(15), gen_.Ordered({1, 2, 3, 4}, "BE", + {.stream_id = StreamID(3), + .mid = MID(1), + .ppid = PPID(1006)})); + + EXPECT_THAT(reasm_->FlushMessages(), + UnorderedElementsAre( + SctpMessageIs(StreamID(1), PPID(1001), kShortPayload), + SctpMessageIs(StreamID(2), PPID(1002), kShortPayload), + SctpMessageIs(StreamID(3), PPID(1003), kShortPayload), + SctpMessageIs(StreamID(3), PPID(1006), kShortPayload))); + + builder.Add(OutgoingSSNResetRequestParameter(ReconfigRequestSN(11), + ReconfigRequestSN(3), TSN(13), + {StreamID(1), StreamID(2)})); + EXPECT_THAT(HandleAndCatchResponse(ReConfigChunk(builder.Build())), + ElementsAre(Property(&ReconfigurationResponseParameter::result, + ResponseResult::kSuccessPerformed))); + + EXPECT_THAT(reasm_->FlushMessages(), + UnorderedElementsAre( + SctpMessageIs(StreamID(1), PPID(1004), kShortPayload), + SctpMessageIs(StreamID(2), PPID(1005), kShortPayload))); +} + +TEST_F(StreamResetHandlerTest, ResetStreamsDefersForwardTsn) { + // This test verifies that FORWARD-TSNs are deferred if they want to move + // the cumulative ack TSN point past sender's last assigned TSN. + static constexpr StreamID kStreamId = StreamID(42); + + // Simulate sender sends: + // * TSN 10 (SSN=0, BE, lost), + // * TSN 11 (SSN=1, BE, lost), + // * TSN 12 (SSN=2, BE, lost) + // * RESET THE STREAM + // * TSN 13 (SSN=0, B, received) + // * TSN 14 (SSN=0, E, lost), + // * TSN 15 (SSN=1, BE, received) + Parameters::Builder builder; + builder.Add(OutgoingSSNResetRequestParameter( + ReconfigRequestSN(10), ReconfigRequestSN(3), TSN(12), {kStreamId})); + EXPECT_THAT(HandleAndCatchResponse(ReConfigChunk(builder.Build())), + ElementsAre(Property(&ReconfigurationResponseParameter::result, + ResponseResult::kInProgress))); + + // TSN 13, B, after TSN=12 -> defer + data_tracker_->Observe(TSN(13)); + reasm_->Add(TSN(13), + gen_.Ordered( + {1, 2, 3, 4}, "B", + {.stream_id = kStreamId, .mid = MID(0), .ppid = PPID(1004)})); + + // TSN 15, BE, after TSN=12 -> defer + data_tracker_->Observe(TSN(15)); + reasm_->Add(TSN(15), + gen_.Ordered( + {1, 2, 3, 4}, "BE", + {.stream_id = kStreamId, .mid = MID(1), .ppid = PPID(1005)})); + + // Time passes, sender decides to send FORWARD-TSN up to the RESET. + data_tracker_->HandleForwardTsn(TSN(12)); + reasm_->HandleForwardTsn( + TSN(12), std::vector({SkippedStream(kStreamId, SSN(2))})); + + // The receiver sends a SACK in response to that. The stream hasn't been + // reset yet, but the sender now decides that TSN=13-14 is to be skipped. + // As this has a TSN 14, after TSN=12 -> defer it. + data_tracker_->HandleForwardTsn(TSN(14)); + reasm_->HandleForwardTsn( + TSN(14), std::vector({SkippedStream(kStreamId, SSN(0))})); + + // Reset the stream -> deferred TSNs should be re-added. + builder.Add(OutgoingSSNResetRequestParameter( + ReconfigRequestSN(11), ReconfigRequestSN(3), TSN(12), {kStreamId})); + EXPECT_THAT(HandleAndCatchResponse(ReConfigChunk(builder.Build())), + ElementsAre(Property(&ReconfigurationResponseParameter::result, + ResponseResult::kSuccessPerformed))); + + EXPECT_THAT(reasm_->FlushMessages(), + UnorderedElementsAre( + SctpMessageIs(kStreamId, PPID(1005), kShortPayload))); } TEST_F(StreamResetHandlerTest, SendOutgoingRequestDirectly) { @@ -354,7 +482,7 @@ TEST_F(StreamResetHandlerTest, SendOutgoingRequestDirectly) { EXPECT_CALL(producer_, GetStreamsReadyToBeReset()) .WillOnce(Return(std::vector({StreamID(42)}))); - absl::optional reconfig = handler_->MakeStreamResetRequest(); + std::optional reconfig = handler_->MakeStreamResetRequest(); ASSERT_TRUE(reconfig.has_value()); ASSERT_HAS_VALUE_AND_ASSIGN( OutgoingSSNResetRequestParameter req, @@ -382,7 +510,7 @@ TEST_F(StreamResetHandlerTest, ResetMultipleStreamsInOneRequest) { .WillOnce(Return( std::vector({StreamID(40), StreamID(41), StreamID(42), StreamID(43), StreamID(44)}))); - absl::optional reconfig = handler_->MakeStreamResetRequest(); + std::optional reconfig = handler_->MakeStreamResetRequest(); ASSERT_TRUE(reconfig.has_value()); ASSERT_HAS_VALUE_AND_ASSIGN( OutgoingSSNResetRequestParameter req, @@ -418,7 +546,7 @@ TEST_F(StreamResetHandlerTest, SendOutgoingResettingOnPositiveResponse) { EXPECT_CALL(producer_, GetStreamsReadyToBeReset()) .WillOnce(Return(std::vector({StreamID(42)}))); - absl::optional reconfig = handler_->MakeStreamResetRequest(); + std::optional reconfig = handler_->MakeStreamResetRequest(); ASSERT_TRUE(reconfig.has_value()); ASSERT_HAS_VALUE_AND_ASSIGN( OutgoingSSNResetRequestParameter req, @@ -446,7 +574,7 @@ TEST_F(StreamResetHandlerTest, SendOutgoingResetRollbackOnError) { EXPECT_CALL(producer_, GetStreamsReadyToBeReset()) .WillOnce(Return(std::vector({StreamID(42)}))); - absl::optional reconfig = handler_->MakeStreamResetRequest(); + std::optional reconfig = handler_->MakeStreamResetRequest(); ASSERT_TRUE(reconfig.has_value()); ASSERT_HAS_VALUE_AND_ASSIGN( OutgoingSSNResetRequestParameter req, @@ -476,7 +604,7 @@ TEST_F(StreamResetHandlerTest, SendOutgoingResetRetransmitOnInProgress) { EXPECT_CALL(producer_, GetStreamsReadyToBeReset()) .WillOnce(Return(std::vector({kStreamToReset}))); - absl::optional reconfig1 = handler_->MakeStreamResetRequest(); + std::optional reconfig1 = handler_->MakeStreamResetRequest(); ASSERT_TRUE(reconfig1.has_value()); ASSERT_HAS_VALUE_AND_ASSIGN( OutgoingSSNResetRequestParameter req1, @@ -528,7 +656,7 @@ TEST_F(StreamResetHandlerTest, ResetWhileRequestIsSentWillQueue) { EXPECT_CALL(producer_, GetStreamsReadyToBeReset()) .WillOnce(Return(std::vector({StreamID(42)}))); - absl::optional reconfig1 = handler_->MakeStreamResetRequest(); + std::optional reconfig1 = handler_->MakeStreamResetRequest(); ASSERT_TRUE(reconfig1.has_value()); ASSERT_HAS_VALUE_AND_ASSIGN( OutgoingSSNResetRequestParameter req1, @@ -543,7 +671,7 @@ TEST_F(StreamResetHandlerTest, ResetWhileRequestIsSentWillQueue) { EXPECT_CALL(producer_, PrepareResetStream(StreamID(43))); StreamID stream_ids[] = {StreamID(41), StreamID(43)}; handler_->ResetStreams(stream_ids); - EXPECT_EQ(handler_->MakeStreamResetRequest(), absl::nullopt); + EXPECT_EQ(handler_->MakeStreamResetRequest(), std::nullopt); Parameters::Builder builder; builder.Add(ReconfigurationResponseParameter( @@ -563,7 +691,7 @@ TEST_F(StreamResetHandlerTest, ResetWhileRequestIsSentWillQueue) { EXPECT_CALL(producer_, GetStreamsReadyToBeReset()) .WillOnce(Return(std::vector({StreamID(41), StreamID(43)}))); - absl::optional reconfig2 = handler_->MakeStreamResetRequest(); + std::optional reconfig2 = handler_->MakeStreamResetRequest(); ASSERT_TRUE(reconfig2.has_value()); ASSERT_HAS_VALUE_AND_ASSIGN( OutgoingSSNResetRequestParameter req2, @@ -683,7 +811,7 @@ TEST_F(StreamResetHandlerTest, HandoverInInitialState) { EXPECT_CALL(producer_, GetStreamsReadyToBeReset()) .WillOnce(Return(std::vector({StreamID(42)}))); - absl::optional reconfig = handler_->MakeStreamResetRequest(); + std::optional reconfig = handler_->MakeStreamResetRequest(); ASSERT_TRUE(reconfig.has_value()); ASSERT_HAS_VALUE_AND_ASSIGN( OutgoingSSNResetRequestParameter req, @@ -765,9 +893,8 @@ TEST_F(StreamResetHandlerTest, PerformCloseAfterOneFirstFailing) { // Let the socket receive the TSN. DataGeneratorOptions opts; - opts.message_id = MID(0); + opts.mid = MID(0); reasm_->Add(kPeerInitialTsn, gen_.Ordered({1, 2, 3, 4}, "BE", opts)); - reasm_->MaybeResetStreamsDeferred(kPeerInitialTsn); data_tracker_->Observe(kPeerInitialTsn); // And emulate that time has passed, and the peer retries the stream reset, diff --git a/net/dcsctp/socket/transmission_control_block.cc b/net/dcsctp/socket/transmission_control_block.cc index 1dcf394813..29f958be8d 100644 --- a/net/dcsctp/socket/transmission_control_block.cc +++ b/net/dcsctp/socket/transmission_control_block.cc @@ -12,11 +12,12 @@ #include #include #include +#include #include #include #include -#include "absl/types/optional.h" +#include "api/units/time_delta.h" #include "net/dcsctp/packet/chunk/data_chunk.h" #include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" #include "net/dcsctp/packet/chunk/idata_chunk.h" @@ -25,6 +26,7 @@ #include "net/dcsctp/packet/chunk/sack_chunk.h" #include "net/dcsctp/packet/sctp_packet.h" #include "net/dcsctp/public/dcsctp_options.h" +#include "net/dcsctp/public/types.h" #include "net/dcsctp/rx/data_tracker.h" #include "net/dcsctp/rx/reassembly_queue.h" #include "net/dcsctp/socket/capabilities.h" @@ -36,6 +38,8 @@ #include "rtc_base/strings/string_builder.h" namespace dcsctp { +using ::webrtc::TimeDelta; +using ::webrtc::Timestamp; TransmissionControlBlock::TransmissionControlBlock( TimerManager& timer_manager, @@ -60,18 +64,20 @@ TransmissionControlBlock::TransmissionControlBlock( t3_rtx_(timer_manager_.CreateTimer( "t3-rtx", absl::bind_front(&TransmissionControlBlock::OnRtxTimerExpiry, this), - TimerOptions(options.rto_initial, + TimerOptions(options.rto_initial.ToTimeDelta(), TimerBackoffAlgorithm::kExponential, - /*max_restarts=*/absl::nullopt, - options.max_timer_backoff_duration))), + /*max_restarts=*/std::nullopt, + options.max_timer_backoff_duration.has_value() + ? options.max_timer_backoff_duration->ToTimeDelta() + : TimeDelta::PlusInfinity()))), delayed_ack_timer_(timer_manager_.CreateTimer( "delayed-ack", absl::bind_front(&TransmissionControlBlock::OnDelayedAckTimerExpiry, this), - TimerOptions(options.delayed_ack_max_timeout, + TimerOptions(options.delayed_ack_max_timeout.ToTimeDelta(), TimerBackoffAlgorithm::kExponential, /*max_restarts=*/0, - /*max_backoff_duration=*/absl::nullopt, + /*max_backoff_duration=*/TimeDelta::PlusInfinity(), webrtc::TaskQueueBase::DelayPrecision::kHigh))), my_verification_tag_(my_verification_tag), my_initial_tsn_(my_initial_tsn), @@ -84,7 +90,6 @@ TransmissionControlBlock::TransmissionControlBlock( tx_error_counter_(log_prefix, options), data_tracker_(log_prefix, delayed_ack_timer_.get(), peer_initial_tsn), reassembly_queue_(log_prefix, - peer_initial_tsn, options.max_receiver_window_buffer_size, capabilities.message_interleaving), retransmission_queue_( @@ -109,21 +114,22 @@ TransmissionControlBlock::TransmissionControlBlock( send_queue.EnableMessageInterleaving(capabilities.message_interleaving); } -void TransmissionControlBlock::ObserveRTT(DurationMs rtt) { - DurationMs prev_rto = rto_.rto(); +void TransmissionControlBlock::ObserveRTT(TimeDelta rtt) { + TimeDelta prev_rto = rto_.rto(); rto_.ObserveRTT(rtt); - RTC_DLOG(LS_VERBOSE) << log_prefix_ << "new rtt=" << *rtt - << ", srtt=" << *rto_.srtt() << ", rto=" << *rto_.rto() - << " (" << *prev_rto << ")"; + RTC_DLOG(LS_VERBOSE) << log_prefix_ << "new rtt=" << webrtc::ToString(rtt) + << ", srtt=" << webrtc::ToString(rto_.srtt()) + << ", rto=" << webrtc::ToString(rto_.rto()) << " (" + << webrtc::ToString(prev_rto) << ")"; t3_rtx_->set_duration(rto_.rto()); - DurationMs delayed_ack_tmo = - std::min(rto_.rto() * 0.5, options_.delayed_ack_max_timeout); + TimeDelta delayed_ack_tmo = std::min( + rto_.rto() * 0.5, options_.delayed_ack_max_timeout.ToTimeDelta()); delayed_ack_timer_->set_duration(delayed_ack_tmo); } -absl::optional TransmissionControlBlock::OnRtxTimerExpiry() { - TimeMs now = callbacks_.TimeMillis(); +TimeDelta TransmissionControlBlock::OnRtxTimerExpiry() { + Timestamp now = callbacks_.Now(); RTC_DLOG(LS_INFO) << log_prefix_ << "Timer " << t3_rtx_->name() << " has expired"; if (cookie_echo_chunk_.has_value()) { @@ -136,13 +142,13 @@ absl::optional TransmissionControlBlock::OnRtxTimerExpiry() { SendBufferedPackets(now); } } - return absl::nullopt; + return TimeDelta::Zero(); } -absl::optional TransmissionControlBlock::OnDelayedAckTimerExpiry() { +TimeDelta TransmissionControlBlock::OnDelayedAckTimerExpiry() { data_tracker_.HandleDelayedAckTimerExpiry(); MaybeSendSack(); - return absl::nullopt; + return TimeDelta::Zero(); } void TransmissionControlBlock::MaybeSendSack() { @@ -155,7 +161,7 @@ void TransmissionControlBlock::MaybeSendSack() { } void TransmissionControlBlock::MaybeSendForwardTsn(SctpPacket::Builder& builder, - TimeMs now) { + Timestamp now) { if (now >= limit_forward_tsn_until_ && retransmission_queue_.ShouldSendForwardTsn(now)) { if (capabilities_.message_interleaving) { @@ -163,14 +169,14 @@ void TransmissionControlBlock::MaybeSendForwardTsn(SctpPacket::Builder& builder, } else { builder.Add(retransmission_queue_.CreateForwardTsn()); } - packet_sender_.Send(builder); // https://datatracker.ietf.org/doc/html/rfc3758 // "IMPLEMENTATION NOTE: An implementation may wish to limit the number of // duplicate FORWARD TSN chunks it sends by ... waiting a full RTT before // sending a duplicate FORWARD TSN." // "Any delay applied to the sending of FORWARD TSN chunk SHOULD NOT exceed // 200ms and MUST NOT exceed 500ms". - limit_forward_tsn_until_ = now + std::min(DurationMs(200), rto_.srtt()); + limit_forward_tsn_until_ = + now + std::min(TimeDelta::Millis(200), rto_.srtt()); } } @@ -198,14 +204,12 @@ void TransmissionControlBlock::MaybeSendFastRetransmit() { builder.Add(DataChunk(tsn, std::move(data), false)); } } - packet_sender_.Send(builder); + Send(builder); } void TransmissionControlBlock::SendBufferedPackets(SctpPacket::Builder& builder, - TimeMs now) { - for (int packet_idx = 0; - packet_idx < options_.max_burst && retransmission_queue_.can_send_data(); - ++packet_idx) { + Timestamp now) { + for (int packet_idx = 0; packet_idx < options_.max_burst; ++packet_idx) { // Only add control chunks to the first packet that is sent, if sending // multiple packets in one go (as allowed by the congestion window). if (packet_idx == 0) { @@ -228,7 +232,7 @@ void TransmissionControlBlock::SendBufferedPackets(SctpPacket::Builder& builder, reassembly_queue_.remaining_bytes())); } MaybeSendForwardTsn(builder, now); - absl::optional reconfig = + std::optional reconfig = stream_reset_handler_.MakeStreamResetRequest(); if (reconfig.has_value()) { builder.Add(*reconfig); @@ -237,15 +241,30 @@ void TransmissionControlBlock::SendBufferedPackets(SctpPacket::Builder& builder, auto chunks = retransmission_queue_.GetChunksToSend(now, builder.bytes_remaining()); + + if (!chunks.empty()) { + // https://datatracker.ietf.org/doc/html/rfc9260#section-8.3 + // Sending DATA means that the path is not idle - restart heartbeat timer. + heartbeat_handler_.RestartTimer(); + } + + bool set_immediate_sack_bit = + cwnd() < (options_.immediate_sack_under_cwnd_mtus * options_.mtu); for (auto& [tsn, data] : chunks) { if (capabilities_.message_interleaving) { - builder.Add(IDataChunk(tsn, std::move(data), false)); + builder.Add(IDataChunk(tsn, std::move(data), set_immediate_sack_bit)); } else { - builder.Add(DataChunk(tsn, std::move(data), false)); + builder.Add(DataChunk(tsn, std::move(data), set_immediate_sack_bit)); } } - if (!packet_sender_.Send(builder)) { + // https://www.ietf.org/archive/id/draft-tuexen-tsvwg-sctp-zero-checksum-02.html#section-4.2 + // "When an end point sends a packet containing a COOKIE ECHO chunk, it MUST + // include a correct CRC32c checksum in the packet containing the COOKIE + // ECHO chunk." + bool write_checksum = + !capabilities_.zero_checksum || cookie_echo_chunk_.has_value(); + if (!packet_sender_.Send(builder, write_checksum)) { break; } @@ -259,7 +278,7 @@ void TransmissionControlBlock::SendBufferedPackets(SctpPacket::Builder& builder, } std::string TransmissionControlBlock::ToString() const { - rtc::StringBuilder sb; + webrtc::StringBuilder sb; sb.AppendFormat( "verification_tag=%08x, last_cumulative_ack=%u, capabilities=", @@ -274,6 +293,9 @@ std::string TransmissionControlBlock::ToString() const { if (capabilities_.reconfig) { sb << "Reconfig,"; } + if (capabilities_.zero_checksum) { + sb << "ZeroChecksum,"; + } sb << " max_in=" << capabilities_.negotiated_maximum_incoming_streams; sb << " max_out=" << capabilities_.negotiated_maximum_outgoing_streams; @@ -294,6 +316,7 @@ void TransmissionControlBlock::AddHandoverState( state.capabilities.partial_reliability = capabilities_.partial_reliability; state.capabilities.message_interleaving = capabilities_.message_interleaving; state.capabilities.reconfig = capabilities_.reconfig; + state.capabilities.zero_checksum = capabilities_.zero_checksum; state.capabilities.negotiated_maximum_incoming_streams = capabilities_.negotiated_maximum_incoming_streams; state.capabilities.negotiated_maximum_outgoing_streams = diff --git a/net/dcsctp/socket/transmission_control_block.h b/net/dcsctp/socket/transmission_control_block.h index fc66fcc857..e4c58219fc 100644 --- a/net/dcsctp/socket/transmission_control_block.h +++ b/net/dcsctp/socket/transmission_control_block.h @@ -67,8 +67,8 @@ class TransmissionControlBlock : public Context { TSN my_initial_tsn() const override { return my_initial_tsn_; } TSN peer_initial_tsn() const override { return peer_initial_tsn_; } DcSctpSocketCallbacks& callbacks() const override { return callbacks_; } - void ObserveRTT(DurationMs rtt) override; - DurationMs current_rto() const override { return rto_.rto(); } + void ObserveRTT(webrtc::TimeDelta rtt) override; + webrtc::TimeDelta current_rto() const override { return rto_.rto(); } bool IncrementTxErrorCounter(absl::string_view reason) override { return tx_error_counter_.Increment(reason); } @@ -80,7 +80,8 @@ class TransmissionControlBlock : public Context { return tx_error_counter_.IsExhausted(); } void Send(SctpPacket::Builder& builder) override { - packet_sender_.Send(builder); + packet_sender_.Send(builder, + /*write_checksum=*/!capabilities_.zero_checksum); } // Other accessors @@ -90,7 +91,7 @@ class TransmissionControlBlock : public Context { StreamResetHandler& stream_reset_handler() { return stream_reset_handler_; } HeartbeatHandler& heartbeat_handler() { return heartbeat_handler_; } size_t cwnd() const { return retransmission_queue_.cwnd(); } - DurationMs current_srtt() const { return rto_.srtt(); } + webrtc::TimeDelta current_srtt() const { return rto_.srtt(); } // Returns this socket's verification tag, set in all packet headers. VerificationTag my_verification_tag() const { return my_verification_tag_; } @@ -107,7 +108,7 @@ class TransmissionControlBlock : public Context { void MaybeSendSack(); // Sends a FORWARD-TSN, if it is needed and allowed (rate-limited). - void MaybeSendForwardTsn(SctpPacket::Builder& builder, TimeMs now); + void MaybeSendForwardTsn(SctpPacket::Builder& builder, webrtc::Timestamp now); // Will be set while the socket is in kCookieEcho state. In this state, there // can only be a single packet outstanding, and it must contain the COOKIE @@ -119,7 +120,7 @@ class TransmissionControlBlock : public Context { // Called when the COOKIE ACK chunk has been received, to allow further // packets to be sent. - void ClearCookieEchoChunk() { cookie_echo_chunk_ = absl::nullopt; } + void ClearCookieEchoChunk() { cookie_echo_chunk_ = std::nullopt; } bool has_cookie_echo_chunk() const { return cookie_echo_chunk_.has_value(); } @@ -128,12 +129,12 @@ class TransmissionControlBlock : public Context { // Fills `builder` (which may already be filled with control chunks) with // other control and data chunks, and sends packets as much as can be // allowed by the congestion control algorithm. - void SendBufferedPackets(SctpPacket::Builder& builder, TimeMs now); + void SendBufferedPackets(SctpPacket::Builder& builder, webrtc::Timestamp now); // As above, but without passing in a builder. If `cookie_echo_chunk_` is // present, then only one packet will be sent, with this chunk as the first // chunk. - void SendBufferedPackets(TimeMs now) { + void SendBufferedPackets(webrtc::Timestamp now) { SctpPacket::Builder builder(peer_verification_tag_, options_); SendBufferedPackets(builder, now); } @@ -148,9 +149,9 @@ class TransmissionControlBlock : public Context { private: // Will be called when the retransmission timer (t3-rtx) expires. - absl::optional OnRtxTimerExpiry(); + webrtc::TimeDelta OnRtxTimerExpiry(); // Will be called when the delayed ack timer expires. - absl::optional OnDelayedAckTimerExpiry(); + webrtc::TimeDelta OnDelayedAckTimerExpiry(); const absl::string_view log_prefix_; const DcSctpOptions options_; @@ -171,7 +172,7 @@ class TransmissionControlBlock : public Context { const std::function is_connection_established_; PacketSender& packet_sender_; // Rate limiting of FORWARD-TSN. Next can be sent at or after this timestamp. - TimeMs limit_forward_tsn_until_ = TimeMs(0); + webrtc::Timestamp limit_forward_tsn_until_ = webrtc::Timestamp::Zero(); RetransmissionTimeout rto_; RetransmissionErrorCounter tx_error_counter_; @@ -186,7 +187,7 @@ class TransmissionControlBlock : public Context { // including a COOKIE ECHO). So if `cookie_echo_chunk_` is present, the // SendBufferedChunks will always only just send one packet, with this chunk // as the first chunk in the packet. - absl::optional cookie_echo_chunk_ = absl::nullopt; + std::optional cookie_echo_chunk_ = std::nullopt; }; } // namespace dcsctp diff --git a/net/dcsctp/socket/transmission_control_block_test.cc b/net/dcsctp/socket/transmission_control_block_test.cc index 40aea58d4b..60876b3932 100644 --- a/net/dcsctp/socket/transmission_control_block_test.cc +++ b/net/dcsctp/socket/transmission_control_block_test.cc @@ -12,10 +12,10 @@ #include #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/task_queue/task_queue_base.h" #include "net/dcsctp/common/handover_testing.h" @@ -63,7 +63,8 @@ class TransmissionControlBlockTest : public testing::Test { Capabilities capabilities_; StrictMock callbacks_; StrictMock send_queue_; - testing::MockFunction, SendPacketStatus)> + testing::MockFunction, + SendPacketStatus)> on_send_fn_; testing::MockFunction on_connection_established; PacketSender sender_; @@ -92,6 +93,7 @@ TEST_F(TransmissionControlBlockTest, LogsAllCapabilitiesInToSring) { capabilities_.negotiated_maximum_outgoing_streams = 2000; capabilities_.message_interleaving = true; capabilities_.partial_reliability = true; + capabilities_.zero_checksum = true; capabilities_.reconfig = true; TransmissionControlBlock tcb( @@ -99,9 +101,10 @@ TEST_F(TransmissionControlBlockTest, LogsAllCapabilitiesInToSring) { kMyVerificationTag, kMyInitialTsn, kPeerVerificationTag, kPeerInitialTsn, kArwnd, kTieTag, sender_, on_connection_established.AsStdFunction()); - EXPECT_EQ(tcb.ToString(), - "verification_tag=000001c8, last_cumulative_ack=999, " - "capabilities=PR,IL,Reconfig, max_in=1000 max_out=2000"); + EXPECT_EQ( + tcb.ToString(), + "verification_tag=000001c8, last_cumulative_ack=999, " + "capabilities=PR,IL,Reconfig,ZeroChecksum, max_in=1000 max_out=2000"); } TEST_F(TransmissionControlBlockTest, IsInitiallyHandoverReady) { diff --git a/net/dcsctp/testing/BUILD.gn b/net/dcsctp/testing/BUILD.gn index 7e005a1f0c..069d486557 100644 --- a/net/dcsctp/testing/BUILD.gn +++ b/net/dcsctp/testing/BUILD.gn @@ -21,13 +21,10 @@ rtc_library("data_generator") { "../common:internal_types", "../packet:data", "../public:types", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "data_generator.cc", "data_generator.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } diff --git a/net/dcsctp/testing/data_generator.cc b/net/dcsctp/testing/data_generator.cc index e4f9f91384..417695c9d0 100644 --- a/net/dcsctp/testing/data_generator.cc +++ b/net/dcsctp/testing/data_generator.cc @@ -32,13 +32,13 @@ Data DataGenerator::Ordered(std::vector payload, } else { fsn_ = FSN(*fsn_ + 1); } - MID message_id = opts.message_id.value_or(message_id_); - Data ret = Data(opts.stream_id, SSN(static_cast(*message_id)), - message_id, fsn_, opts.ppid, std::move(payload), is_beginning, - is_end, IsUnordered(false)); + MID mid = opts.mid.value_or(mid_); + Data ret = Data(opts.stream_id, SSN(static_cast(*mid)), mid, fsn_, + opts.ppid, std::move(payload), is_beginning, is_end, + IsUnordered(false)); if (is_end) { - message_id_ = MID(*message_id + 1); + mid_ = MID(*mid + 1); } return ret; } @@ -54,11 +54,11 @@ Data DataGenerator::Unordered(std::vector payload, } else { fsn_ = FSN(*fsn_ + 1); } - MID message_id = opts.message_id.value_or(message_id_); - Data ret = Data(opts.stream_id, SSN(0), message_id, fsn_, kPpid, - std::move(payload), is_beginning, is_end, IsUnordered(true)); + MID mid = opts.mid.value_or(mid_); + Data ret = Data(opts.stream_id, SSN(0), mid, fsn_, kPpid, std::move(payload), + is_beginning, is_end, IsUnordered(true)); if (is_end) { - message_id_ = MID(*message_id + 1); + mid_ = MID(*mid + 1); } return ret; } diff --git a/net/dcsctp/testing/data_generator.h b/net/dcsctp/testing/data_generator.h index f917c740a7..f8853298f9 100644 --- a/net/dcsctp/testing/data_generator.h +++ b/net/dcsctp/testing/data_generator.h @@ -11,10 +11,10 @@ #define NET_DCSCTP_TESTING_DATA_GENERATOR_H_ #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/packet/data.h" @@ -23,15 +23,14 @@ namespace dcsctp { struct DataGeneratorOptions { StreamID stream_id = StreamID(1); - absl::optional message_id = absl::nullopt; + std::optional mid = std::nullopt; PPID ppid = PPID(53); }; // Generates Data with correct sequence numbers, and used only in unit tests. class DataGenerator { public: - explicit DataGenerator(MID start_message_id = MID(0)) - : message_id_(start_message_id) {} + explicit DataGenerator(MID start_mid = MID(0)) : mid_(start_mid) {} // Generates ordered "data" with the provided `payload` and flags, which can // contain "B" for setting the "is_beginning" flag, and/or "E" for setting the @@ -48,10 +47,10 @@ class DataGenerator { DataGeneratorOptions opts = {}); // Resets the Message ID identifier - simulating a "stream reset". - void ResetStream() { message_id_ = MID(0); } + void ResetStream() { mid_ = MID(0); } private: - MID message_id_; + MID mid_; FSN fsn_ = FSN(0); }; } // namespace dcsctp diff --git a/net/dcsctp/testing/testing_macros.h b/net/dcsctp/testing/testing_macros.h index 5cbdfffdce..dafa76fc12 100644 --- a/net/dcsctp/testing/testing_macros.h +++ b/net/dcsctp/testing/testing_macros.h @@ -17,7 +17,7 @@ namespace dcsctp { #define DCSCTP_CONCAT_INNER_(x, y) x##y #define DCSCTP_CONCAT_(x, y) DCSCTP_CONCAT_INNER_(x, y) -// Similar to ASSERT_OK_AND_ASSIGN, this works with an absl::optional<> instead +// Similar to ASSERT_OK_AND_ASSIGN, this works with an std::optional<> instead // of an absl::StatusOr<>. #define ASSERT_HAS_VALUE_AND_ASSIGN(lhs, rexpr) \ auto DCSCTP_CONCAT_(tmp_opt_val__, __LINE__) = rexpr; \ diff --git a/net/dcsctp/timer/BUILD.gn b/net/dcsctp/timer/BUILD.gn index d3be1ec872..5b2eb1e620 100644 --- a/net/dcsctp/timer/BUILD.gn +++ b/net/dcsctp/timer/BUILD.gn @@ -12,23 +12,22 @@ rtc_library("timer") { deps = [ "../../../api:array_view", "../../../api/task_queue:task_queue", + "../../../api/units:time_delta", + "../../../api/units:timestamp", "../../../rtc_base:checks", "../../../rtc_base:strong_alias", "../../../rtc_base/containers:flat_map", "../../../rtc_base/containers:flat_set", "../public:socket", "../public:types", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "fake_timeout.h", "timer.cc", "timer.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("task_queue_timeout") { @@ -37,6 +36,7 @@ rtc_library("task_queue_timeout") { "../../../api/task_queue:pending_task_safety_flag", "../../../api/task_queue:task_queue", "../../../api/units:time_delta", + "../../../api/units:timestamp", "../../../rtc_base:checks", "../../../rtc_base:logging", "../public:socket", @@ -59,6 +59,7 @@ if (rtc_include_tests) { "../../../api:array_view", "../../../api/task_queue:task_queue", "../../../api/task_queue/test:mock_task_queue_base", + "../../../api/units:time_delta", "../../../rtc_base:checks", "../../../rtc_base:gunit_helpers", "../../../test:test_support", @@ -69,6 +70,5 @@ if (rtc_include_tests) { "task_queue_timeout_test.cc", "timer_test.cc", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } } diff --git a/net/dcsctp/timer/fake_timeout.h b/net/dcsctp/timer/fake_timeout.h index 74ffe5af29..52a1b13dc7 100644 --- a/net/dcsctp/timer/fake_timeout.h +++ b/net/dcsctp/timer/fake_timeout.h @@ -14,12 +14,14 @@ #include #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/timestamp.h" #include "net/dcsctp/public/timeout.h" +#include "net/dcsctp/public/types.h" #include "rtc_base/checks.h" #include "rtc_base/containers/flat_set.h" @@ -28,45 +30,46 @@ namespace dcsctp { // A timeout used in tests. class FakeTimeout : public Timeout { public: - FakeTimeout(std::function get_time, + FakeTimeout(std::function get_time, std::function on_delete) : get_time_(std::move(get_time)), on_delete_(std::move(on_delete)) {} ~FakeTimeout() override { on_delete_(this); } void Start(DurationMs duration_ms, TimeoutID timeout_id) override { - RTC_DCHECK(expiry_ == TimeMs::InfiniteFuture()); + RTC_DCHECK(expiry_.IsPlusInfinity()); timeout_id_ = timeout_id; - expiry_ = get_time_() + duration_ms; + expiry_ = get_time_() + duration_ms.ToTimeDelta(); } void Stop() override { - RTC_DCHECK(expiry_ != TimeMs::InfiniteFuture()); - expiry_ = TimeMs::InfiniteFuture(); + RTC_DCHECK(!expiry_.IsPlusInfinity()); + expiry_ = webrtc::Timestamp::PlusInfinity(); } - bool EvaluateHasExpired(TimeMs now) { + bool EvaluateHasExpired(webrtc::Timestamp now) { if (now >= expiry_) { - expiry_ = TimeMs::InfiniteFuture(); + expiry_ = webrtc::Timestamp::PlusInfinity(); return true; } return false; } TimeoutID timeout_id() const { return timeout_id_; } + webrtc::Timestamp expiry() const { return expiry_; } private: - const std::function get_time_; + const std::function get_time_; const std::function on_delete_; TimeoutID timeout_id_ = TimeoutID(0); - TimeMs expiry_ = TimeMs::InfiniteFuture(); + webrtc::Timestamp expiry_ = webrtc::Timestamp::PlusInfinity(); }; class FakeTimeoutManager { public: // The `get_time` function must return the current time, relative to any // epoch. - explicit FakeTimeoutManager(std::function get_time) + explicit FakeTimeoutManager(std::function get_time) : get_time_(std::move(get_time)) {} std::unique_ptr CreateTimeout() { @@ -76,7 +79,7 @@ class FakeTimeoutManager { return timer; } std::unique_ptr CreateTimeout( - webrtc::TaskQueueBase::DelayPrecision precision) { + webrtc::TaskQueueBase::DelayPrecision /* precision */) { // FakeTimeout does not support implement |precision|. return CreateTimeout(); } @@ -86,19 +89,32 @@ class FakeTimeoutManager { // still believes it's running, and it needs to be updated to set // Timer::is_running_ to false before you operate on the Timer or Timeout // again. - absl::optional GetNextExpiredTimeout() { - TimeMs now = get_time_(); + std::optional GetNextExpiredTimeout() { + webrtc::Timestamp now = get_time_(); std::vector expired_timers; for (auto& timer : timers_) { if (timer->EvaluateHasExpired(now)) { return timer->timeout_id(); } } - return absl::nullopt; + return std::nullopt; + } + + webrtc::TimeDelta GetTimeToNextTimeout() const { + webrtc::Timestamp next_expiry = webrtc::Timestamp::PlusInfinity(); + for (const FakeTimeout* timer : timers_) { + if (timer->expiry() < next_expiry) { + next_expiry = timer->expiry(); + } + } + webrtc::Timestamp now = get_time_(); + return !next_expiry.IsPlusInfinity() && next_expiry >= now + ? next_expiry - now + : webrtc::TimeDelta::PlusInfinity(); } private: - const std::function get_time_; + const std::function get_time_; webrtc::flat_set timers_; }; diff --git a/net/dcsctp/timer/task_queue_timeout.cc b/net/dcsctp/timer/task_queue_timeout.cc index 6c43640d39..ef891e186d 100644 --- a/net/dcsctp/timer/task_queue_timeout.cc +++ b/net/dcsctp/timer/task_queue_timeout.cc @@ -14,6 +14,8 @@ #include "rtc_base/logging.h" namespace dcsctp { +using ::webrtc::TimeDelta; +using ::webrtc::Timestamp; TaskQueueTimeoutFactory::TaskQueueTimeout::TaskQueueTimeout( TaskQueueTimeoutFactory& parent, @@ -30,8 +32,8 @@ TaskQueueTimeoutFactory::TaskQueueTimeout::~TaskQueueTimeout() { void TaskQueueTimeoutFactory::TaskQueueTimeout::Start(DurationMs duration_ms, TimeoutID timeout_id) { RTC_DCHECK_RUN_ON(&parent_.thread_checker_); - RTC_DCHECK(timeout_expiration_ == TimeMs::InfiniteFuture()); - timeout_expiration_ = parent_.get_time_() + duration_ms; + RTC_DCHECK(timeout_expiration_.IsPlusInfinity()); + timeout_expiration_ = parent_.Now() + duration_ms.ToTimeDelta(); timeout_id_ = timeout_id; if (timeout_expiration_ >= posted_task_expiration_) { @@ -43,7 +45,7 @@ void TaskQueueTimeoutFactory::TaskQueueTimeout::Start(DurationMs duration_ms, return; } - if (posted_task_expiration_ != TimeMs::InfiniteFuture()) { + if (!posted_task_expiration_.IsPlusInfinity()) { RTC_DLOG(LS_VERBOSE) << "New timeout duration is less than scheduled - " "ghosting old delayed task."; // There is already a scheduled delayed task, but its expiration time is @@ -58,34 +60,37 @@ void TaskQueueTimeoutFactory::TaskQueueTimeout::Start(DurationMs duration_ms, posted_task_expiration_ = timeout_expiration_; parent_.task_queue_.PostDelayedTaskWithPrecision( precision_, - webrtc::SafeTask( - pending_task_safety_flag_, - [timeout_id, this]() { - RTC_DLOG(LS_VERBOSE) << "Timout expired: " << timeout_id.value(); - RTC_DCHECK_RUN_ON(&parent_.thread_checker_); - RTC_DCHECK(posted_task_expiration_ != TimeMs::InfiniteFuture()); - posted_task_expiration_ = TimeMs::InfiniteFuture(); + webrtc::SafeTask(pending_task_safety_flag_, + [timeout_id, this]() { + RTC_DLOG(LS_VERBOSE) + << "Timout expired: " << timeout_id.value(); + RTC_DCHECK_RUN_ON(&parent_.thread_checker_); + RTC_DCHECK(!posted_task_expiration_.IsPlusInfinity()); + posted_task_expiration_ = Timestamp::PlusInfinity(); - if (timeout_expiration_ == TimeMs::InfiniteFuture()) { - // The timeout was stopped before it expired. Very common. - } else { - // Note that the timeout might have been restarted, which updated - // `timeout_expiration_` but left the scheduled task running. So - // if it's not quite time to trigger the timeout yet, schedule a - // new delayed task with what's remaining and retry at that point - // in time. - DurationMs remaining = timeout_expiration_ - parent_.get_time_(); - timeout_expiration_ = TimeMs::InfiniteFuture(); - if (*remaining > 0) { - Start(remaining, timeout_id_); - } else { - // It has actually triggered. - RTC_DLOG(LS_VERBOSE) - << "Timout triggered: " << timeout_id.value(); - parent_.on_expired_(timeout_id_); - } - } - }), + if (timeout_expiration_.IsPlusInfinity()) { + // The timeout was stopped before it expired. Very + // common. + } else { + // Note that the timeout might have been restarted, + // which updated `timeout_expiration_` but left the + // scheduled task running. So if it's not quite time + // to trigger the timeout yet, schedule a new delayed + // task with what's remaining and retry at that point + // in time. + TimeDelta remaining = + timeout_expiration_ - parent_.Now(); + timeout_expiration_ = Timestamp::PlusInfinity(); + if (remaining > TimeDelta::Zero()) { + Start(DurationMs(remaining.ms()), timeout_id_); + } else { + // It has actually triggered. + RTC_DLOG(LS_VERBOSE) + << "Timout triggered: " << timeout_id.value(); + parent_.on_expired_(timeout_id_); + } + } + }), webrtc::TimeDelta::Millis(duration_ms.value())); } @@ -93,7 +98,7 @@ void TaskQueueTimeoutFactory::TaskQueueTimeout::Stop() { // As the TaskQueue doesn't support deleting a posted task, just mark the // timeout as not running. RTC_DCHECK_RUN_ON(&parent_.thread_checker_); - timeout_expiration_ = TimeMs::InfiniteFuture(); + timeout_expiration_ = Timestamp::PlusInfinity(); } } // namespace dcsctp diff --git a/net/dcsctp/timer/task_queue_timeout.h b/net/dcsctp/timer/task_queue_timeout.h index faae14464f..7dccd2adc2 100644 --- a/net/dcsctp/timer/task_queue_timeout.h +++ b/net/dcsctp/timer/task_queue_timeout.h @@ -15,6 +15,7 @@ #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/timestamp.h" #include "net/dcsctp/public/timeout.h" namespace dcsctp { @@ -71,17 +72,21 @@ class TaskQueueTimeoutFactory { // expiration time _further away_ than what is now the expected expiration // time. In this scenario, a new delayed task has to be posted with a // shorter duration and the old task has to be forgotten. - rtc::scoped_refptr pending_task_safety_flag_; + webrtc::scoped_refptr + pending_task_safety_flag_; // The time when the posted delayed task is set to expire. Will be set to // the infinite future if there is no such task running. - TimeMs posted_task_expiration_ = TimeMs::InfiniteFuture(); + webrtc::Timestamp posted_task_expiration_ = + webrtc::Timestamp::PlusInfinity(); // The time when the timeout expires. It will be set to the infinite future // if the timeout is not running/not started. - TimeMs timeout_expiration_ = TimeMs::InfiniteFuture(); + webrtc::Timestamp timeout_expiration_ = webrtc::Timestamp::PlusInfinity(); // The current timeout ID that will be reported when expired. TimeoutID timeout_id_ = TimeoutID(0); }; + webrtc::Timestamp Now() { return webrtc::Timestamp::Millis(*get_time_()); } + RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker thread_checker_; webrtc::TaskQueueBase& task_queue_; const std::function get_time_; diff --git a/net/dcsctp/timer/task_queue_timeout_test.cc b/net/dcsctp/timer/task_queue_timeout_test.cc index 8e392e38fb..07e1a592d7 100644 --- a/net/dcsctp/timer/task_queue_timeout_test.cc +++ b/net/dcsctp/timer/task_queue_timeout_test.cc @@ -43,7 +43,7 @@ class TaskQueueTimeoutTest : public testing::Test { MockFunction on_expired_; webrtc::GlobalSimulatedTimeController time_controller_; - rtc::Thread* task_queue_; + webrtc::Thread* task_queue_; TaskQueueTimeoutFactory factory_; }; @@ -129,7 +129,7 @@ TEST(TaskQueueTimeoutWithMockTaskQueueTest, CanSetTimeoutPrecisionToLow) { _)); TaskQueueTimeoutFactory factory( mock_task_queue, []() { return TimeMs(1337); }, - [](TimeoutID timeout_id) {}); + [](TimeoutID /* timeout_id */) {}); std::unique_ptr timeout = factory.CreateTimeout(webrtc::TaskQueueBase::DelayPrecision::kLow); timeout->Start(DurationMs(1), TimeoutID(1)); @@ -147,7 +147,7 @@ TEST(TaskQueueTimeoutWithMockTaskQueueTest, CanSetTimeoutPrecisionToHigh) { _)); TaskQueueTimeoutFactory factory( mock_task_queue, []() { return TimeMs(1337); }, - [](TimeoutID timeout_id) {}); + [](TimeoutID /* timeout_id */) {}); std::unique_ptr timeout = factory.CreateTimeout(webrtc::TaskQueueBase::DelayPrecision::kHigh); timeout->Start(DurationMs(1), TimeoutID(1)); @@ -165,7 +165,7 @@ TEST(TaskQueueTimeoutWithMockTaskQueueTest, TimeoutPrecisionIsLowByDefault) { _)); TaskQueueTimeoutFactory factory( mock_task_queue, []() { return TimeMs(1337); }, - [](TimeoutID timeout_id) {}); + [](TimeoutID /* timeout_id */) {}); std::unique_ptr timeout = factory.CreateTimeout(); timeout->Start(DurationMs(1), TimeoutID(1)); } diff --git a/net/dcsctp/timer/timer.cc b/net/dcsctp/timer/timer.cc index bde07638a5..07c9f3d786 100644 --- a/net/dcsctp/timer/timer.cc +++ b/net/dcsctp/timer/timer.cc @@ -22,36 +22,37 @@ namespace dcsctp { namespace { +using ::webrtc::TimeDelta; + TimeoutID MakeTimeoutId(TimerID timer_id, TimerGeneration generation) { return TimeoutID(static_cast(*timer_id) << 32 | *generation); } -DurationMs GetBackoffDuration(const TimerOptions& options, - DurationMs base_duration, - int expiration_count) { +TimeDelta GetBackoffDuration(const TimerOptions& options, + TimeDelta base_duration, + int expiration_count) { switch (options.backoff_algorithm) { case TimerBackoffAlgorithm::kFixed: return base_duration; case TimerBackoffAlgorithm::kExponential: { - int32_t duration_ms = *base_duration; + TimeDelta duration = base_duration; - while (expiration_count > 0 && duration_ms < *Timer::kMaxTimerDuration) { - duration_ms *= 2; + while (expiration_count > 0 && duration < Timer::kMaxTimerDuration) { + duration = duration * 2; --expiration_count; - if (options.max_backoff_duration.has_value() && - duration_ms > **options.max_backoff_duration) { - return *options.max_backoff_duration; + if (duration > options.max_backoff_duration) { + return options.max_backoff_duration; } } - return DurationMs(std::min(duration_ms, *Timer::kMaxTimerDuration)); + return TimeDelta(std::min(duration, Timer::kMaxTimerDuration)); } } } } // namespace -constexpr DurationMs Timer::kMaxTimerDuration; +constexpr TimeDelta Timer::kMaxTimerDuration; Timer::Timer(TimerID id, absl::string_view name, @@ -77,12 +78,12 @@ void Timer::Start() { if (!is_running()) { is_running_ = true; generation_ = TimerGeneration(*generation_ + 1); - timeout_->Start(duration_, MakeTimeoutId(id_, generation_)); + timeout_->Start(DurationMs(duration_), MakeTimeoutId(id_, generation_)); } else { // Timer was running - stop and restart it, to make it expire in `duration_` // from now. generation_ = TimerGeneration(*generation_ + 1); - timeout_->Restart(duration_, MakeTimeoutId(id_, generation_)); + timeout_->Restart(DurationMs(duration_), MakeTimeoutId(id_, generation_)); } } @@ -104,23 +105,24 @@ void Timer::Trigger(TimerGeneration generation) { // timer. Note that it might be very quickly restarted again, if the // `on_expired_` callback returns a new duration. is_running_ = true; - DurationMs duration = + TimeDelta duration = GetBackoffDuration(options_, duration_, expiration_count_); generation_ = TimerGeneration(*generation_ + 1); - timeout_->Start(duration, MakeTimeoutId(id_, generation_)); + timeout_->Start(DurationMs(duration), MakeTimeoutId(id_, generation_)); } - absl::optional new_duration = on_expired_(); - if (new_duration.has_value() && new_duration != duration_) { - duration_ = new_duration.value(); + TimeDelta new_duration = on_expired_(); + RTC_DCHECK(new_duration != TimeDelta::PlusInfinity()); + if (new_duration > TimeDelta::Zero() && new_duration != duration_) { + duration_ = new_duration; if (is_running_) { // Restart it with new duration. timeout_->Stop(); - DurationMs duration = + TimeDelta duration = GetBackoffDuration(options_, duration_, expiration_count_); generation_ = TimerGeneration(*generation_ + 1); - timeout_->Start(duration, MakeTimeoutId(id_, generation_)); + timeout_->Start(DurationMs(duration), MakeTimeoutId(id_, generation_)); } } } diff --git a/net/dcsctp/timer/timer.h b/net/dcsctp/timer/timer.h index 31b496dc81..cfacdda403 100644 --- a/net/dcsctp/timer/timer.h +++ b/net/dcsctp/timer/timer.h @@ -16,12 +16,13 @@ #include #include #include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" #include "net/dcsctp/public/timeout.h" #include "rtc_base/strong_alias.h" @@ -40,28 +41,31 @@ enum class TimerBackoffAlgorithm { }; struct TimerOptions { - explicit TimerOptions(DurationMs duration) + explicit TimerOptions(webrtc::TimeDelta duration) : TimerOptions(duration, TimerBackoffAlgorithm::kExponential) {} - TimerOptions(DurationMs duration, TimerBackoffAlgorithm backoff_algorithm) - : TimerOptions(duration, backoff_algorithm, absl::nullopt) {} - TimerOptions(DurationMs duration, + TimerOptions(webrtc::TimeDelta duration, + TimerBackoffAlgorithm backoff_algorithm) + : TimerOptions(duration, backoff_algorithm, std::nullopt) {} + TimerOptions(webrtc::TimeDelta duration, TimerBackoffAlgorithm backoff_algorithm, - absl::optional max_restarts) - : TimerOptions(duration, backoff_algorithm, max_restarts, absl::nullopt) { - } - TimerOptions(DurationMs duration, + std::optional max_restarts) + : TimerOptions(duration, + backoff_algorithm, + max_restarts, + webrtc::TimeDelta::PlusInfinity()) {} + TimerOptions(webrtc::TimeDelta duration, TimerBackoffAlgorithm backoff_algorithm, - absl::optional max_restarts, - absl::optional max_backoff_duration) + std::optional max_restarts, + webrtc::TimeDelta max_backoff_duration) : TimerOptions(duration, backoff_algorithm, max_restarts, max_backoff_duration, webrtc::TaskQueueBase::DelayPrecision::kLow) {} - TimerOptions(DurationMs duration, + TimerOptions(webrtc::TimeDelta duration, TimerBackoffAlgorithm backoff_algorithm, - absl::optional max_restarts, - absl::optional max_backoff_duration, + std::optional max_restarts, + webrtc::TimeDelta max_backoff_duration, webrtc::TaskQueueBase::DelayPrecision precision) : duration(duration), backoff_algorithm(backoff_algorithm), @@ -70,15 +74,15 @@ struct TimerOptions { precision(precision) {} // The initial timer duration. Can be overridden with `set_duration`. - const DurationMs duration; + const webrtc::TimeDelta duration; // If the duration should be increased (using exponential backoff) when it is // restarted. If not set, the same duration will be used. const TimerBackoffAlgorithm backoff_algorithm; // The maximum number of times that the timer will be automatically restarted, - // or absl::nullopt if there is no limit. - const absl::optional max_restarts; + // or std::nullopt if there is no limit. + const std::optional max_restarts; // The maximum timeout value for exponential backoff. - const absl::optional max_backoff_duration; + const webrtc::TimeDelta max_backoff_duration; // The precision of the webrtc::TaskQueueBase used for scheduling. const webrtc::TaskQueueBase::DelayPrecision precision; }; @@ -98,12 +102,14 @@ struct TimerOptions { class Timer { public: // The maximum timer duration - one day. - static constexpr DurationMs kMaxTimerDuration = DurationMs(24 * 3600 * 1000); + static constexpr webrtc::TimeDelta kMaxTimerDuration = + webrtc::TimeDelta::Seconds(24 * 3600); - // When expired, the timer handler can optionally return a new duration which - // will be set as `duration` and used as base duration when the timer is - // restarted and as input to the backoff algorithm. - using OnExpired = std::function()>; + // When expired, the timer handler can optionally return a new non-zero + // duration which will be set as `duration` and used as base duration when the + // timer is restarted and as input to the backoff algorithm. If zero is + // returned, the current duration is used. + using OnExpired = std::function; // TimerManager will have pointers to these instances, so they must not move. Timer(const Timer&) = delete; @@ -121,13 +127,13 @@ class Timer { // Sets the base duration. The actual timer duration may be larger depending // on the backoff algorithm. - void set_duration(DurationMs duration) { + void set_duration(webrtc::TimeDelta duration) { duration_ = std::min(duration, kMaxTimerDuration); } // Retrieves the base duration. The actual timer duration may be larger // depending on the backoff algorithm. - DurationMs duration() const { return duration_; } + webrtc::TimeDelta duration() const { return duration_; } // Returns the number of times the timer has expired. int expiration_count() const { return expiration_count_; } @@ -165,7 +171,7 @@ class Timer { const UnregisterHandler unregister_handler_; const std::unique_ptr timeout_; - DurationMs duration_; + webrtc::TimeDelta duration_; // Increased on each start, and is matched on Trigger, to avoid races. And by // race, meaning that a timeout - which may be evaluated/expired on a diff --git a/net/dcsctp/timer/timer_test.cc b/net/dcsctp/timer/timer_test.cc index 4aebe65b48..5d5091e1ca 100644 --- a/net/dcsctp/timer/timer_test.cc +++ b/net/dcsctp/timer/timer_test.cc @@ -10,9 +10,10 @@ #include "net/dcsctp/timer/timer.h" #include +#include -#include "absl/types/optional.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" #include "net/dcsctp/public/timeout.h" #include "net/dcsctp/timer/fake_timeout.h" #include "rtc_base/gunit.h" @@ -21,6 +22,8 @@ namespace dcsctp { namespace { using ::testing::Return; +using ::webrtc::TimeDelta; +using ::webrtc::Timestamp; class TimerTest : public testing::Test { protected: @@ -29,14 +32,14 @@ class TimerTest : public testing::Test { manager_([this](webrtc::TaskQueueBase::DelayPrecision precision) { return timeout_manager_.CreateTimeout(precision); }) { - ON_CALL(on_expired_, Call).WillByDefault(Return(absl::nullopt)); + ON_CALL(on_expired_, Call).WillByDefault(Return(TimeDelta::Zero())); } - void AdvanceTimeAndRunTimers(DurationMs duration) { + void AdvanceTimeAndRunTimers(TimeDelta duration) { now_ = now_ + duration; for (;;) { - absl::optional timeout_id = + std::optional timeout_id = timeout_manager_.GetNextExpiredTimeout(); if (!timeout_id.has_value()) { break; @@ -45,16 +48,16 @@ class TimerTest : public testing::Test { } } - TimeMs now_ = TimeMs(0); + Timestamp now_ = Timestamp::Zero(); FakeTimeoutManager timeout_manager_; TimerManager manager_; - testing::MockFunction()> on_expired_; + testing::MockFunction on_expired_; }; TEST_F(TimerTest, TimerIsInitiallyStopped) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(5000), TimerBackoffAlgorithm::kFixed)); + TimerOptions(TimeDelta::Seconds(5), TimerBackoffAlgorithm::kFixed)); EXPECT_FALSE(t1->is_running()); } @@ -62,50 +65,50 @@ TEST_F(TimerTest, TimerIsInitiallyStopped) { TEST_F(TimerTest, TimerExpiresAtGivenTime) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(5000), TimerBackoffAlgorithm::kFixed)); + TimerOptions(TimeDelta::Seconds(5), TimerBackoffAlgorithm::kFixed)); EXPECT_CALL(on_expired_, Call).Times(0); t1->Start(); EXPECT_TRUE(t1->is_running()); - AdvanceTimeAndRunTimers(DurationMs(4000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(4)); EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); } TEST_F(TimerTest, TimerReschedulesAfterExpiredWithFixedBackoff) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(5000), TimerBackoffAlgorithm::kFixed)); + TimerOptions(TimeDelta::Seconds(5), TimerBackoffAlgorithm::kFixed)); EXPECT_CALL(on_expired_, Call).Times(0); t1->Start(); EXPECT_EQ(t1->expiration_count(), 0); - AdvanceTimeAndRunTimers(DurationMs(4000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(4)); // Fire first time EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); EXPECT_TRUE(t1->is_running()); EXPECT_EQ(t1->expiration_count(), 1); EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(4000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(4)); // Second time EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); EXPECT_TRUE(t1->is_running()); EXPECT_EQ(t1->expiration_count(), 2); EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(4000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(4)); // Third time EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); EXPECT_TRUE(t1->is_running()); EXPECT_EQ(t1->expiration_count(), 3); } @@ -113,151 +116,151 @@ TEST_F(TimerTest, TimerReschedulesAfterExpiredWithFixedBackoff) { TEST_F(TimerTest, TimerWithNoRestarts) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(5000), TimerBackoffAlgorithm::kFixed, + TimerOptions(TimeDelta::Seconds(5), TimerBackoffAlgorithm::kFixed, /*max_restart=*/0)); EXPECT_CALL(on_expired_, Call).Times(0); t1->Start(); - AdvanceTimeAndRunTimers(DurationMs(4000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(4)); // Fire first time EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); EXPECT_FALSE(t1->is_running()); // Second time - shouldn't fire EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(5000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(5)); EXPECT_FALSE(t1->is_running()); } TEST_F(TimerTest, TimerWithOneRestart) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(5000), TimerBackoffAlgorithm::kFixed, + TimerOptions(TimeDelta::Seconds(5), TimerBackoffAlgorithm::kFixed, /*max_restart=*/1)); EXPECT_CALL(on_expired_, Call).Times(0); t1->Start(); - AdvanceTimeAndRunTimers(DurationMs(4000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(4)); // Fire first time EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); EXPECT_TRUE(t1->is_running()); EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(4000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(4)); // Second time - max restart limit reached. EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); EXPECT_FALSE(t1->is_running()); // Third time - should not fire. EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(5000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(5)); EXPECT_FALSE(t1->is_running()); } TEST_F(TimerTest, TimerWithTwoRestart) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(5000), TimerBackoffAlgorithm::kFixed, + TimerOptions(TimeDelta::Seconds(5), TimerBackoffAlgorithm::kFixed, /*max_restart=*/2)); EXPECT_CALL(on_expired_, Call).Times(0); t1->Start(); - AdvanceTimeAndRunTimers(DurationMs(4000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(4)); // Fire first time EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); EXPECT_TRUE(t1->is_running()); EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(4000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(4)); // Second time EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); EXPECT_TRUE(t1->is_running()); EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(4000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(4)); // Third time EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); EXPECT_FALSE(t1->is_running()); } TEST_F(TimerTest, TimerWithExponentialBackoff) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(5000), TimerBackoffAlgorithm::kExponential)); + TimerOptions(TimeDelta::Seconds(5), TimerBackoffAlgorithm::kExponential)); t1->Start(); // Fire first time at 5 seconds EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(5000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(5)); // Second time at 5*2^1 = 10 seconds later. EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(9000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(9)); EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); // Third time at 5*2^2 = 20 seconds later. EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(19000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(19)); EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); // Fourth time at 5*2^3 = 40 seconds later. EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(39000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(39)); EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); } TEST_F(TimerTest, StartTimerWillStopAndStart) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(5000), TimerBackoffAlgorithm::kExponential)); + TimerOptions(TimeDelta::Seconds(5), TimerBackoffAlgorithm::kExponential)); t1->Start(); - AdvanceTimeAndRunTimers(DurationMs(3000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(3)); t1->Start(); EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(2000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(2)); EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(3000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(3)); } TEST_F(TimerTest, ExpirationCounterWillResetIfStopped) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(5000), TimerBackoffAlgorithm::kExponential)); + TimerOptions(TimeDelta::Seconds(5), TimerBackoffAlgorithm::kExponential)); t1->Start(); // Fire first time at 5 seconds EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(5000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(5)); EXPECT_EQ(t1->expiration_count(), 1); // Second time at 5*2^1 = 10 seconds later. EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(9000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(9)); EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); EXPECT_EQ(t1->expiration_count(), 2); t1->Start(); @@ -265,79 +268,79 @@ TEST_F(TimerTest, ExpirationCounterWillResetIfStopped) { // Third time at 5*2^0 = 5 seconds later. EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(4000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(4)); EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); EXPECT_EQ(t1->expiration_count(), 1); } TEST_F(TimerTest, StopTimerWillMakeItNotExpire) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(5000), TimerBackoffAlgorithm::kExponential)); + TimerOptions(TimeDelta::Seconds(5), TimerBackoffAlgorithm::kExponential)); t1->Start(); EXPECT_TRUE(t1->is_running()); EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(4000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(4)); t1->Stop(); EXPECT_FALSE(t1->is_running()); EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); } TEST_F(TimerTest, ReturningNewDurationWhenExpired) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(5000), TimerBackoffAlgorithm::kFixed)); + TimerOptions(TimeDelta::Seconds(5), TimerBackoffAlgorithm::kFixed)); EXPECT_CALL(on_expired_, Call).Times(0); t1->Start(); - EXPECT_EQ(t1->duration(), DurationMs(5000)); + EXPECT_EQ(t1->duration(), TimeDelta::Seconds(5)); - AdvanceTimeAndRunTimers(DurationMs(4000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(4)); // Fire first time - EXPECT_CALL(on_expired_, Call).WillOnce(Return(DurationMs(2000))); - AdvanceTimeAndRunTimers(DurationMs(1000)); - EXPECT_EQ(t1->duration(), DurationMs(2000)); + EXPECT_CALL(on_expired_, Call).WillOnce(Return(TimeDelta::Seconds(2))); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); + EXPECT_EQ(t1->duration(), TimeDelta::Seconds(2)); EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); // Second time - EXPECT_CALL(on_expired_, Call).WillOnce(Return(DurationMs(10000))); - AdvanceTimeAndRunTimers(DurationMs(1000)); - EXPECT_EQ(t1->duration(), DurationMs(10000)); + EXPECT_CALL(on_expired_, Call).WillOnce(Return(TimeDelta::Seconds(10))); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); + EXPECT_EQ(t1->duration(), TimeDelta::Seconds(10)); EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(9000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(9)); EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); } TEST_F(TimerTest, TimersHaveMaximumDuration) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(1000), TimerBackoffAlgorithm::kExponential)); + TimerOptions(TimeDelta::Seconds(1), TimerBackoffAlgorithm::kExponential)); - t1->set_duration(DurationMs(2 * *Timer::kMaxTimerDuration)); + t1->set_duration(2 * Timer::kMaxTimerDuration); EXPECT_EQ(t1->duration(), Timer::kMaxTimerDuration); } TEST_F(TimerTest, TimersHaveMaximumBackoffDuration) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(1000), TimerBackoffAlgorithm::kExponential)); + TimerOptions(TimeDelta::Seconds(1), TimerBackoffAlgorithm::kExponential)); t1->Start(); - int max_exponent = static_cast(log2(*Timer::kMaxTimerDuration / 1000)); + int max_exponent = static_cast(log2(Timer::kMaxTimerDuration.seconds())); for (int i = 0; i < max_exponent; ++i) { EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000 * (1 << i))); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1 * (1 << i))); } // Reached the maximum duration. @@ -357,100 +360,100 @@ TEST_F(TimerTest, TimersHaveMaximumBackoffDuration) { TEST_F(TimerTest, TimerCanBeStartedFromWithinExpirationHandler) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(1000), TimerBackoffAlgorithm::kFixed)); + TimerOptions(TimeDelta::Seconds(1), TimerBackoffAlgorithm::kFixed)); t1->Start(); // Start a timer, but don't return any new duration in callback. EXPECT_CALL(on_expired_, Call).WillOnce([&]() { EXPECT_TRUE(t1->is_running()); - t1->set_duration(DurationMs(5000)); + t1->set_duration(TimeDelta::Seconds(5)); t1->Start(); - return absl::nullopt; + return TimeDelta::Zero(); }); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(4999)); + AdvanceTimeAndRunTimers(TimeDelta::Millis(4999)); // Start a timer, and return any new duration in callback. EXPECT_CALL(on_expired_, Call).WillOnce([&]() { EXPECT_TRUE(t1->is_running()); - t1->set_duration(DurationMs(5000)); + t1->set_duration(TimeDelta::Seconds(5)); t1->Start(); - return absl::make_optional(DurationMs(8000)); + return TimeDelta::Seconds(8); }); - AdvanceTimeAndRunTimers(DurationMs(1)); + AdvanceTimeAndRunTimers(TimeDelta::Millis(1)); EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(7999)); + AdvanceTimeAndRunTimers(TimeDelta::Millis(7999)); EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1)); + AdvanceTimeAndRunTimers(TimeDelta::Millis(1)); } TEST_F(TimerTest, DurationStaysWithinMaxTimerBackOffDuration) { std::unique_ptr t1 = manager_.CreateTimer( "t1", on_expired_.AsStdFunction(), - TimerOptions(DurationMs(1000), TimerBackoffAlgorithm::kExponential, - /*max_restarts=*/absl::nullopt, DurationMs(5000))); + TimerOptions(TimeDelta::Seconds(1), TimerBackoffAlgorithm::kExponential, + /*max_restarts=*/std::nullopt, TimeDelta::Seconds(5))); t1->Start(); // Initial timeout, 1000 ms EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1000)); + AdvanceTimeAndRunTimers(TimeDelta::Seconds(1)); // Exponential backoff -> 2000 ms EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(1999)); + AdvanceTimeAndRunTimers(TimeDelta::Millis(1999)); EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1)); + AdvanceTimeAndRunTimers(TimeDelta::Millis(1)); // Exponential backoff -> 4000 ms EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(3999)); + AdvanceTimeAndRunTimers(TimeDelta::Millis(3999)); EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1)); + AdvanceTimeAndRunTimers(TimeDelta::Millis(1)); // Limited backoff -> 5000ms EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(4999)); + AdvanceTimeAndRunTimers(TimeDelta::Millis(4999)); EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1)); + AdvanceTimeAndRunTimers(TimeDelta::Millis(1)); // ... where it plateaus EXPECT_CALL(on_expired_, Call).Times(0); - AdvanceTimeAndRunTimers(DurationMs(4999)); + AdvanceTimeAndRunTimers(TimeDelta::Millis(4999)); EXPECT_CALL(on_expired_, Call).Times(1); - AdvanceTimeAndRunTimers(DurationMs(1)); + AdvanceTimeAndRunTimers(TimeDelta::Millis(1)); } TEST(TimerManagerTest, TimerManagerPassesPrecisionToCreateTimeoutMethod) { - FakeTimeoutManager timeout_manager([&]() { return TimeMs(0); }); - absl::optional create_timer_precison; + FakeTimeoutManager timeout_manager([&]() { return Timestamp::Zero(); }); + std::optional create_timer_precison; TimerManager manager([&](webrtc::TaskQueueBase::DelayPrecision precision) { create_timer_precison = precision; return timeout_manager.CreateTimeout(precision); }); // Default TimerOptions. manager.CreateTimer( - "test_timer", []() { return absl::optional(); }, - TimerOptions(DurationMs(123))); + "test_timer", []() { return TimeDelta::Zero(); }, + TimerOptions(TimeDelta::Millis(123))); EXPECT_EQ(create_timer_precison, webrtc::TaskQueueBase::DelayPrecision::kLow); // High precision TimerOptions. manager.CreateTimer( - "test_timer", []() { return absl::optional(); }, - TimerOptions(DurationMs(123), TimerBackoffAlgorithm::kExponential, - absl::nullopt, absl::nullopt, + "test_timer", []() { return TimeDelta::Zero(); }, + TimerOptions(TimeDelta::Millis(123), TimerBackoffAlgorithm::kExponential, + std::nullopt, TimeDelta::PlusInfinity(), webrtc::TaskQueueBase::DelayPrecision::kHigh)); EXPECT_EQ(create_timer_precison, webrtc::TaskQueueBase::DelayPrecision::kHigh); // Low precision TimerOptions. manager.CreateTimer( - "test_timer", []() { return absl::optional(); }, - TimerOptions(DurationMs(123), TimerBackoffAlgorithm::kExponential, - absl::nullopt, absl::nullopt, + "test_timer", []() { return TimeDelta::Zero(); }, + TimerOptions(TimeDelta::Millis(123), TimerBackoffAlgorithm::kExponential, + std::nullopt, TimeDelta::PlusInfinity(), webrtc::TaskQueueBase::DelayPrecision::kLow)); EXPECT_EQ(create_timer_precison, webrtc::TaskQueueBase::DelayPrecision::kLow); } diff --git a/net/dcsctp/tx/BUILD.gn b/net/dcsctp/tx/BUILD.gn index 43fd41639e..0e21d91cde 100644 --- a/net/dcsctp/tx/BUILD.gn +++ b/net/dcsctp/tx/BUILD.gn @@ -11,6 +11,7 @@ import("../../../webrtc.gni") rtc_source_set("send_queue") { deps = [ "../../../api:array_view", + "../../../api/units:timestamp", "../common:internal_types", "../packet:chunk", "../packet:data", @@ -18,7 +19,6 @@ rtc_source_set("send_queue") { "../public:types", ] sources = [ "send_queue.h" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("rr_send_queue") { @@ -28,22 +28,19 @@ rtc_library("rr_send_queue") { "../../../api:array_view", "../../../rtc_base:checks", "../../../rtc_base:logging", + "../../../rtc_base:stringutils", "../../../rtc_base/containers:flat_map", - "../common:str_join", + "../common:internal_types", "../packet:data", "../public:socket", "../public:types", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "rr_send_queue.cc", "rr_send_queue.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("stream_scheduler") { @@ -52,25 +49,22 @@ rtc_library("stream_scheduler") { "../../../api:array_view", "../../../rtc_base:checks", "../../../rtc_base:logging", + "../../../rtc_base:stringutils", "../../../rtc_base:strong_alias", "../../../rtc_base/containers:flat_set", - "../common:str_join", "../packet:chunk", "../packet:data", "../packet:sctp_packet", "../public:socket", "../public:types", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "stream_scheduler.cc", "stream_scheduler.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("retransmission_error_counter") { @@ -78,16 +72,17 @@ rtc_library("retransmission_error_counter") { "../../../rtc_base:checks", "../../../rtc_base:logging", "../public:types", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "retransmission_error_counter.cc", "retransmission_error_counter.h", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("retransmission_timeout") { deps = [ + "../../../api/units:time_delta", "../../../rtc_base:checks", "../public:types", ] @@ -102,11 +97,15 @@ rtc_library("outstanding_data") { ":retransmission_timeout", ":send_queue", "../../../api:array_view", + "../../../api/units:time_delta", + "../../../api/units:timestamp", "../../../rtc_base:checks", "../../../rtc_base:logging", + "../../../rtc_base:stringutils", + "../../../rtc_base/containers:flat_set", + "../common:internal_types", "../common:math", "../common:sequence_numbers", - "../common:str_join", "../packet:chunk", "../packet:data", "../public:socket", @@ -117,11 +116,6 @@ rtc_library("outstanding_data") { "outstanding_data.cc", "outstanding_data.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("retransmission_queue") { @@ -135,22 +129,18 @@ rtc_library("retransmission_queue") { "../../../rtc_base:stringutils", "../common:math", "../common:sequence_numbers", - "../common:str_join", "../packet:chunk", "../packet:data", "../public:socket", "../public:types", "../timer", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings:string_view", ] sources = [ "retransmission_queue.cc", "retransmission_queue.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } if (rtc_include_tests) { @@ -159,9 +149,9 @@ if (rtc_include_tests) { deps = [ ":send_queue", "../../../api:array_view", + "../../../api/units:timestamp", "../../../test:test_support", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] sources = [ "mock_send_queue.h" ] } @@ -183,6 +173,7 @@ if (rtc_include_tests) { "../../../rtc_base:gunit_helpers", "../../../test:test_support", "../common:handover_testing", + "../common:internal_types", "../common:math", "../common:sequence_numbers", "../packet:chunk", @@ -196,7 +187,6 @@ if (rtc_include_tests) { "../testing:testing_macros", "../timer", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] sources = [ "outstanding_data_test.cc", "retransmission_error_counter_test.cc", diff --git a/net/dcsctp/tx/mock_send_queue.h b/net/dcsctp/tx/mock_send_queue.h index 0c8f5d141d..ba412f7ba2 100644 --- a/net/dcsctp/tx/mock_send_queue.h +++ b/net/dcsctp/tx/mock_send_queue.h @@ -11,10 +11,11 @@ #define NET_DCSCTP_TX_MOCK_SEND_QUEUE_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/units/timestamp.h" #include "net/dcsctp/tx/send_queue.h" #include "test/gmock.h" @@ -23,18 +24,19 @@ namespace dcsctp { class MockSendQueue : public SendQueue { public: MockSendQueue() { - ON_CALL(*this, Produce).WillByDefault([](TimeMs now, size_t max_size) { - return absl::nullopt; - }); + ON_CALL(*this, Produce) + .WillByDefault([](webrtc::Timestamp /* now */, size_t /* max_size */) { + return std::nullopt; + }); } - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, Produce, - (TimeMs now, size_t max_size), + (webrtc::Timestamp now, size_t max_size), (override)); MOCK_METHOD(bool, Discard, - (IsUnordered unordered, StreamID stream_id, MID message_id), + (StreamID stream_id, OutgoingMessageId message_id), (override)); MOCK_METHOD(void, PrepareResetStream, (StreamID stream_id), (override)); MOCK_METHOD(bool, HasStreamsReadyToBeReset, (), (const, override)); diff --git a/net/dcsctp/tx/outstanding_data.cc b/net/dcsctp/tx/outstanding_data.cc index 4f1e863056..a92fc6f638 100644 --- a/net/dcsctp/tx/outstanding_data.cc +++ b/net/dcsctp/tx/outstanding_data.cc @@ -14,12 +14,16 @@ #include #include +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "net/dcsctp/common/math.h" #include "net/dcsctp/common/sequence_numbers.h" #include "net/dcsctp/public/types.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace dcsctp { +using ::webrtc::Timestamp; // The number of times a packet must be NACKed before it's retransmitted. // See https://tools.ietf.org/html/rfc4960#section-7.2.4 @@ -63,16 +67,19 @@ void OutstandingData::Item::MarkAsRetransmitted() { } void OutstandingData::Item::Abandon() { + RTC_DCHECK(!expires_at_.IsPlusInfinity() || + max_retransmissions_ != MaxRetransmits::NoLimit()); lifecycle_ = Lifecycle::kAbandoned; } -bool OutstandingData::Item::has_expired(TimeMs now) const { +bool OutstandingData::Item::has_expired(Timestamp now) const { return expires_at_ <= now; } bool OutstandingData::IsConsistent() const { - size_t actual_outstanding_bytes = 0; - size_t actual_outstanding_items = 0; + size_t actual_unacked_payload_bytes = 0; + size_t actual_unacked_packet_bytes = 0; + size_t actual_unacked_items = 0; std::set combined_to_be_retransmitted; combined_to_be_retransmitted.insert(to_be_retransmitted_.begin(), @@ -81,10 +88,13 @@ bool OutstandingData::IsConsistent() const { to_be_fast_retransmitted_.end()); std::set actual_combined_to_be_retransmitted; - for (const auto& [tsn, item] : outstanding_data_) { + UnwrappedTSN tsn = last_cumulative_tsn_ack_; + for (const Item& item : outstanding_data_) { + tsn.Increment(); if (item.is_outstanding()) { - actual_outstanding_bytes += GetSerializedChunkSize(item.data()); - ++actual_outstanding_items; + actual_unacked_payload_bytes += item.data().size(); + actual_unacked_packet_bytes += GetSerializedChunkSize(item.data()); + ++actual_unacked_items; } if (item.should_be_retransmitted()) { @@ -92,39 +102,36 @@ bool OutstandingData::IsConsistent() const { } } - if (outstanding_data_.empty() && - next_tsn_ != last_cumulative_tsn_ack_.next_value()) { - return false; - } - - return actual_outstanding_bytes == outstanding_bytes_ && - actual_outstanding_items == outstanding_items_ && + return actual_unacked_payload_bytes == unacked_payload_bytes_ && + actual_unacked_packet_bytes == unacked_packet_bytes_ && + actual_unacked_items == unacked_items_ && actual_combined_to_be_retransmitted == combined_to_be_retransmitted; } void OutstandingData::AckChunk(AckInfo& ack_info, - std::map::iterator iter) { - if (!iter->second.is_acked()) { - size_t serialized_size = GetSerializedChunkSize(iter->second.data()); + UnwrappedTSN tsn, + Item& item) { + if (!item.is_acked()) { + size_t serialized_size = GetSerializedChunkSize(item.data()); ack_info.bytes_acked += serialized_size; - if (iter->second.is_outstanding()) { - outstanding_bytes_ -= serialized_size; - --outstanding_items_; + if (item.is_outstanding()) { + unacked_payload_bytes_ -= item.data().size(); + unacked_packet_bytes_ -= serialized_size; + --unacked_items_; } - if (iter->second.should_be_retransmitted()) { - RTC_DCHECK(to_be_fast_retransmitted_.find(iter->first) == + if (item.should_be_retransmitted()) { + RTC_DCHECK(to_be_fast_retransmitted_.find(tsn) == to_be_fast_retransmitted_.end()); - to_be_retransmitted_.erase(iter->first); + to_be_retransmitted_.erase(tsn); } - iter->second.Ack(); - ack_info.highest_tsn_acked = - std::max(ack_info.highest_tsn_acked, iter->first); + item.Ack(); + ack_info.highest_tsn_acked = std::max(ack_info.highest_tsn_acked, tsn); } } OutstandingData::AckInfo OutstandingData::HandleSack( UnwrappedTSN cumulative_tsn_ack, - rtc::ArrayView gap_ack_blocks, + webrtc::ArrayView gap_ack_blocks, bool is_in_fast_recovery) { OutstandingData::AckInfo ack_info(cumulative_tsn_ack); // Erase all items up to cumulative_tsn_ack. @@ -141,29 +148,51 @@ OutstandingData::AckInfo OutstandingData::HandleSack( return ack_info; } +OutstandingData::Item& OutstandingData::GetItem(UnwrappedTSN tsn) { + RTC_DCHECK(tsn > last_cumulative_tsn_ack_); + RTC_DCHECK(tsn < next_tsn()); + int index = UnwrappedTSN::Difference(tsn, last_cumulative_tsn_ack_) - 1; + RTC_DCHECK(index >= 0); + RTC_DCHECK(index < static_cast(outstanding_data_.size())); + return outstanding_data_[index]; +} + +const OutstandingData::Item& OutstandingData::GetItem(UnwrappedTSN tsn) const { + RTC_DCHECK(tsn > last_cumulative_tsn_ack_); + RTC_DCHECK(tsn < next_tsn()); + int index = UnwrappedTSN::Difference(tsn, last_cumulative_tsn_ack_) - 1; + RTC_DCHECK(index >= 0); + RTC_DCHECK(index < static_cast(outstanding_data_.size())); + return outstanding_data_[index]; +} + void OutstandingData::RemoveAcked(UnwrappedTSN cumulative_tsn_ack, AckInfo& ack_info) { - auto first_unacked = outstanding_data_.upper_bound(cumulative_tsn_ack); - - for (auto iter = outstanding_data_.begin(); iter != first_unacked; ++iter) { - AckChunk(ack_info, iter); - if (iter->second.lifecycle_id().IsSet()) { - RTC_DCHECK(iter->second.data().is_end); - if (iter->second.is_abandoned()) { - ack_info.abandoned_lifecycle_ids.push_back(iter->second.lifecycle_id()); + while (!outstanding_data_.empty() && + last_cumulative_tsn_ack_ < cumulative_tsn_ack) { + UnwrappedTSN tsn = last_cumulative_tsn_ack_.next_value(); + Item& item = outstanding_data_.front(); + AckChunk(ack_info, tsn, item); + if (item.lifecycle_id().IsSet()) { + RTC_DCHECK(item.data().is_end); + if (item.is_abandoned()) { + ack_info.abandoned_lifecycle_ids.push_back(item.lifecycle_id()); } else { - ack_info.acked_lifecycle_ids.push_back(iter->second.lifecycle_id()); + ack_info.acked_lifecycle_ids.push_back(item.lifecycle_id()); } } + outstanding_data_.pop_front(); + last_cumulative_tsn_ack_.Increment(); } - outstanding_data_.erase(outstanding_data_.begin(), first_unacked); - last_cumulative_tsn_ack_ = cumulative_tsn_ack; + stream_reset_breakpoint_tsns_.erase(stream_reset_breakpoint_tsns_.begin(), + stream_reset_breakpoint_tsns_.upper_bound( + cumulative_tsn_ack.next_value())); } void OutstandingData::AckGapBlocks( UnwrappedTSN cumulative_tsn_ack, - rtc::ArrayView gap_ack_blocks, + webrtc::ArrayView gap_ack_blocks, AckInfo& ack_info) { // Mark all non-gaps as ACKED (but they can't be removed) as (from RFC) // "SCTP considers the information carried in the Gap Ack Blocks in the @@ -171,19 +200,20 @@ void OutstandingData::AckGapBlocks( // handled differently. for (auto& block : gap_ack_blocks) { - auto start = outstanding_data_.lower_bound( - UnwrappedTSN::AddTo(cumulative_tsn_ack, block.start)); - auto end = outstanding_data_.upper_bound( - UnwrappedTSN::AddTo(cumulative_tsn_ack, block.end)); - for (auto iter = start; iter != end; ++iter) { - AckChunk(ack_info, iter); + UnwrappedTSN start = UnwrappedTSN::AddTo(cumulative_tsn_ack, block.start); + UnwrappedTSN end = UnwrappedTSN::AddTo(cumulative_tsn_ack, block.end); + for (UnwrappedTSN tsn = start; tsn <= end; tsn = tsn.next_value()) { + if (tsn > last_cumulative_tsn_ack_ && tsn < next_tsn()) { + Item& item = GetItem(tsn); + AckChunk(ack_info, tsn, item); + } } } } void OutstandingData::NackBetweenAckBlocks( UnwrappedTSN cumulative_tsn_ack, - rtc::ArrayView gap_ack_blocks, + webrtc::ArrayView gap_ack_blocks, bool is_in_fast_recovery, OutstandingData::AckInfo& ack_info) { // Mark everything between the blocks as NACKED/TO_BE_RETRANSMITTED. @@ -211,13 +241,12 @@ void OutstandingData::NackBetweenAckBlocks( for (auto& block : gap_ack_blocks) { UnwrappedTSN cur_block_first_acked = UnwrappedTSN::AddTo(cumulative_tsn_ack, block.start); - for (auto iter = outstanding_data_.upper_bound(prev_block_last_acked); - iter != outstanding_data_.lower_bound(cur_block_first_acked); ++iter) { - if (iter->first <= max_tsn_to_nack) { - ack_info.has_packet_loss |= - NackItem(iter->first, iter->second, /*retransmit_now=*/false, - /*do_fast_retransmit=*/!is_in_fast_recovery); - } + for (UnwrappedTSN tsn = prev_block_last_acked.next_value(); + tsn < cur_block_first_acked && tsn <= max_tsn_to_nack; + tsn = tsn.next_value()) { + ack_info.has_packet_loss |= + NackItem(tsn, /*retransmit_now=*/false, + /*do_fast_retransmit=*/!is_in_fast_recovery); } prev_block_last_acked = UnwrappedTSN::AddTo(cumulative_tsn_ack, block.end); } @@ -229,12 +258,13 @@ void OutstandingData::NackBetweenAckBlocks( } bool OutstandingData::NackItem(UnwrappedTSN tsn, - Item& item, bool retransmit_now, bool do_fast_retransmit) { + Item& item = GetItem(tsn); if (item.is_outstanding()) { - outstanding_bytes_ -= GetSerializedChunkSize(item.data()); - --outstanding_items_; + unacked_payload_bytes_ -= item.data().size(); + unacked_packet_bytes_ -= GetSerializedChunkSize(item.data()); + --unacked_items_; } switch (item.Nack(retransmit_now)) { @@ -249,6 +279,7 @@ bool OutstandingData::NackItem(UnwrappedTSN tsn, RTC_DLOG(LS_VERBOSE) << *tsn.Wrap() << " marked for retransmission"; break; case Item::NackAction::kAbandon: + RTC_DLOG(LS_VERBOSE) << *tsn.Wrap() << " Nacked, resulted in abandoning"; AbandonAllFor(item); break; } @@ -257,8 +288,7 @@ bool OutstandingData::NackItem(UnwrappedTSN tsn, void OutstandingData::AbandonAllFor(const Item& item) { // Erase all remaining chunks from the producer, if any. - if (discard_from_send_queue_(item.data().is_unordered, item.data().stream_id, - item.data().message_id)) { + if (discard_from_send_queue_(item.data().stream_id, item.message_id())) { // There were remaining chunks to be produced for this message. Since the // receiver may have already received all chunks (up till now) for this // message, we can't just FORWARD-TSN to the last fragment in this @@ -267,32 +297,28 @@ void OutstandingData::AbandonAllFor(const Item& item) { // skipped over). So create a new fragment, representing the end, that the // received will never see as it is abandoned immediately and used as cum // TSN in the sent FORWARD-TSN. - UnwrappedTSN tsn = next_tsn_; - next_tsn_.Increment(); - Data message_end(item.data().stream_id, item.data().ssn, - item.data().message_id, item.data().fsn, item.data().ppid, - std::vector(), Data::IsBeginning(false), - Data::IsEnd(true), item.data().is_unordered); - Item& added_item = - outstanding_data_ - .emplace(std::piecewise_construct, std::forward_as_tuple(tsn), - std::forward_as_tuple(std::move(message_end), TimeMs(0), - MaxRetransmits::NoLimit(), - TimeMs::InfiniteFuture(), - LifecycleId::NotSet())) - .first->second; - // The added chunk shouldn't be included in `outstanding_bytes`, so set it + Data message_end(item.data().stream_id, item.data().ssn, item.data().mid, + item.data().fsn, item.data().ppid, std::vector(), + Data::IsBeginning(false), Data::IsEnd(true), + item.data().is_unordered); + UnwrappedTSN tsn = next_tsn(); + Item& added_item = outstanding_data_.emplace_back( + item.message_id(), std::move(message_end), Timestamp::Zero(), + MaxRetransmits(0), Timestamp::PlusInfinity(), LifecycleId::NotSet()); + + // The added chunk shouldn't be included in `unacked_bytes`, so set it // as acked. added_item.Ack(); RTC_DLOG(LS_VERBOSE) << "Adding unsent end placeholder for message at tsn=" << *tsn.Wrap(); } - for (auto& [tsn, other] : outstanding_data_) { + UnwrappedTSN tsn = last_cumulative_tsn_ack_; + for (Item& other : outstanding_data_) { + tsn.Increment(); if (!other.is_abandoned() && other.data().stream_id == item.data().stream_id && - other.data().is_unordered == item.data().is_unordered && - other.data().message_id == item.data().message_id) { + other.message_id() == item.message_id()) { RTC_DLOG(LS_VERBOSE) << "Marking chunk " << *tsn.Wrap() << " as abandoned"; if (other.should_be_retransmitted()) { @@ -311,9 +337,7 @@ std::vector> OutstandingData::ExtractChunksThatCanFit( for (auto it = chunks.begin(); it != chunks.end();) { UnwrappedTSN tsn = *it; - auto elem = outstanding_data_.find(tsn); - RTC_DCHECK(elem != outstanding_data_.end()); - Item& item = elem->second; + Item& item = GetItem(tsn); RTC_DCHECK(item.should_be_retransmitted()); RTC_DCHECK(!item.is_outstanding()); RTC_DCHECK(!item.is_abandoned()); @@ -324,8 +348,9 @@ std::vector> OutstandingData::ExtractChunksThatCanFit( item.MarkAsRetransmitted(); result.emplace_back(tsn.Wrap(), item.data().Clone()); max_size -= serialized_size; - outstanding_bytes_ += serialized_size; - ++outstanding_items_; + unacked_payload_bytes_ += item.data().size(); + unacked_packet_bytes_ += serialized_size; + ++unacked_items_; it = chunks.erase(it); } else { ++it; @@ -366,8 +391,10 @@ std::vector> OutstandingData::GetChunksToBeRetransmitted( return ExtractChunksThatCanFit(to_be_retransmitted_, max_size); } -void OutstandingData::ExpireOutstandingChunks(TimeMs now) { - for (const auto& [tsn, item] : outstanding_data_) { +void OutstandingData::ExpireOutstandingChunks(Timestamp now) { + UnwrappedTSN tsn = last_cumulative_tsn_ack_; + for (const Item& item : outstanding_data_) { + tsn.Increment(); // Chunks that are nacked can be expired. Care should be taken not to expire // unacked (in-flight) chunks as they might have been received, but the SACK // is either delayed or in-flight and may be received later. @@ -375,7 +402,7 @@ void OutstandingData::ExpireOutstandingChunks(TimeMs now) { // Already abandoned. } else if (item.is_nacked() && item.has_expired(now)) { RTC_DLOG(LS_VERBOSE) << "Marking nacked chunk " << *tsn.Wrap() - << " and message " << *item.data().message_id + << " and message " << *item.data().mid << " as expired"; AbandonAllFor(item); } else { @@ -387,39 +414,36 @@ void OutstandingData::ExpireOutstandingChunks(TimeMs now) { } UnwrappedTSN OutstandingData::highest_outstanding_tsn() const { - return outstanding_data_.empty() ? last_cumulative_tsn_ack_ - : outstanding_data_.rbegin()->first; + return UnwrappedTSN::AddTo(last_cumulative_tsn_ack_, + outstanding_data_.size()); } -absl::optional OutstandingData::Insert( +std::optional OutstandingData::Insert( + OutgoingMessageId message_id, const Data& data, - TimeMs time_sent, + Timestamp time_sent, MaxRetransmits max_retransmissions, - TimeMs expires_at, + Timestamp expires_at, LifecycleId lifecycle_id) { - UnwrappedTSN tsn = next_tsn_; - next_tsn_.Increment(); - // All chunks are always padded to be even divisible by 4. size_t chunk_size = GetSerializedChunkSize(data); - outstanding_bytes_ += chunk_size; - ++outstanding_items_; - auto it = outstanding_data_ - .emplace(std::piecewise_construct, std::forward_as_tuple(tsn), - std::forward_as_tuple(data.Clone(), time_sent, - max_retransmissions, expires_at, - lifecycle_id)) - .first; - - if (it->second.has_expired(time_sent)) { + unacked_payload_bytes_ += data.size(); + unacked_packet_bytes_ += chunk_size; + ++unacked_items_; + UnwrappedTSN tsn = next_tsn(); + Item& item = outstanding_data_.emplace_back(message_id, data.Clone(), + time_sent, max_retransmissions, + expires_at, lifecycle_id); + + if (item.has_expired(time_sent)) { // No need to send it - it was expired when it was in the send // queue. - RTC_DLOG(LS_VERBOSE) << "Marking freshly produced chunk " - << *it->first.Wrap() << " and message " - << *it->second.data().message_id << " as expired"; - AbandonAllFor(it->second); + RTC_DLOG(LS_VERBOSE) << "Marking freshly produced chunk " << *tsn.Wrap() + << " and message " << *item.data().mid + << " as expired"; + AbandonAllFor(item); RTC_DCHECK(IsConsistent()); - return absl::nullopt; + return std::nullopt; } RTC_DCHECK(IsConsistent()); @@ -427,34 +451,47 @@ absl::optional OutstandingData::Insert( } void OutstandingData::NackAll() { - for (auto& [tsn, item] : outstanding_data_) { + UnwrappedTSN tsn = last_cumulative_tsn_ack_; + // A two-pass algorithm is needed, as NackItem will invalidate iterators. + std::vector tsns_to_nack; + for (Item& item : outstanding_data_) { + tsn.Increment(); if (!item.is_acked()) { - NackItem(tsn, item, /*retransmit_now=*/true, - /*do_fast_retransmit=*/false); + tsns_to_nack.push_back(tsn); } } + + for (UnwrappedTSN tsn_to_nack : tsns_to_nack) { + NackItem(tsn_to_nack, /*retransmit_now=*/true, + /*do_fast_retransmit=*/false); + } + RTC_DCHECK(IsConsistent()); } -absl::optional OutstandingData::MeasureRTT(TimeMs now, - UnwrappedTSN tsn) const { - auto it = outstanding_data_.find(tsn); - if (it != outstanding_data_.end() && !it->second.has_been_retransmitted()) { - // https://tools.ietf.org/html/rfc4960#section-6.3.1 - // "Karn's algorithm: RTT measurements MUST NOT be made using - // packets that were retransmitted (and thus for which it is ambiguous - // whether the reply was for the first instance of the chunk or for a - // later instance)" - return now - it->second.time_sent(); +webrtc::TimeDelta OutstandingData::MeasureRTT(Timestamp now, + UnwrappedTSN tsn) const { + if (tsn > last_cumulative_tsn_ack_ && tsn < next_tsn()) { + const Item& item = GetItem(tsn); + if (!item.has_been_retransmitted()) { + // https://tools.ietf.org/html/rfc4960#section-6.3.1 + // "Karn's algorithm: RTT measurements MUST NOT be made using + // packets that were retransmitted (and thus for which it is ambiguous + // whether the reply was for the first instance of the chunk or for a + // later instance)" + return now - item.time_sent(); + } } - return absl::nullopt; + return webrtc::TimeDelta::PlusInfinity(); } std::vector> OutstandingData::GetChunkStatesForTesting() const { std::vector> states; states.emplace_back(last_cumulative_tsn_ack_.Wrap(), State::kAcked); - for (const auto& [tsn, item] : outstanding_data_) { + UnwrappedTSN tsn = last_cumulative_tsn_ack_; + for (const Item& item : outstanding_data_) { + tsn.Increment(); State state; if (item.is_abandoned()) { state = State::kAbandoned; @@ -475,9 +512,7 @@ OutstandingData::GetChunkStatesForTesting() const { bool OutstandingData::ShouldSendForwardTsn() const { if (!outstanding_data_.empty()) { - auto it = outstanding_data_.begin(); - return it->first == last_cumulative_tsn_ack_.next_value() && - it->second.is_abandoned(); + return outstanding_data_.front().is_abandoned(); } return false; } @@ -486,8 +521,11 @@ ForwardTsnChunk OutstandingData::CreateForwardTsn() const { std::map skipped_per_ordered_stream; UnwrappedTSN new_cumulative_ack = last_cumulative_tsn_ack_; - for (const auto& [tsn, item] : outstanding_data_) { - if ((tsn != new_cumulative_ack.next_value()) || !item.is_abandoned()) { + UnwrappedTSN tsn = last_cumulative_tsn_ack_; + for (const Item& item : outstanding_data_) { + tsn.Increment(); + if (stream_reset_breakpoint_tsns_.contains(tsn) || + (tsn != new_cumulative_ack.next_value()) || !item.is_abandoned()) { break; } new_cumulative_ack = tsn; @@ -509,35 +547,38 @@ IForwardTsnChunk OutstandingData::CreateIForwardTsn() const { std::map, MID> skipped_per_stream; UnwrappedTSN new_cumulative_ack = last_cumulative_tsn_ack_; - for (const auto& [tsn, item] : outstanding_data_) { - if ((tsn != new_cumulative_ack.next_value()) || !item.is_abandoned()) { + UnwrappedTSN tsn = last_cumulative_tsn_ack_; + for (const Item& item : outstanding_data_) { + tsn.Increment(); + if (stream_reset_breakpoint_tsns_.contains(tsn) || + (tsn != new_cumulative_ack.next_value()) || !item.is_abandoned()) { break; } new_cumulative_ack = tsn; std::pair stream_id = std::make_pair(item.data().is_unordered, item.data().stream_id); - if (item.data().message_id > skipped_per_stream[stream_id]) { - skipped_per_stream[stream_id] = item.data().message_id; + if (item.data().mid > skipped_per_stream[stream_id]) { + skipped_per_stream[stream_id] = item.data().mid; } } std::vector skipped_streams; skipped_streams.reserve(skipped_per_stream.size()); - for (const auto& [stream, message_id] : skipped_per_stream) { - skipped_streams.emplace_back(stream.first, stream.second, message_id); + for (const auto& [stream, mid] : skipped_per_stream) { + skipped_streams.emplace_back(stream.first, stream.second, mid); } return IForwardTsnChunk(new_cumulative_ack.Wrap(), std::move(skipped_streams)); } -void OutstandingData::ResetSequenceNumbers(UnwrappedTSN next_tsn, - UnwrappedTSN last_cumulative_tsn) { +void OutstandingData::ResetSequenceNumbers(UnwrappedTSN last_cumulative_tsn) { RTC_DCHECK(outstanding_data_.empty()); - RTC_DCHECK(next_tsn_ == last_cumulative_tsn_ack_.next_value()); - RTC_DCHECK(next_tsn == last_cumulative_tsn.next_value()); - next_tsn_ = next_tsn; last_cumulative_tsn_ack_ = last_cumulative_tsn; } + +void OutstandingData::BeginResetStreams() { + stream_reset_breakpoint_tsns_.insert(next_tsn()); +} } // namespace dcsctp diff --git a/net/dcsctp/tx/outstanding_data.h b/net/dcsctp/tx/outstanding_data.h index 6b4b7121fb..a25969734a 100644 --- a/net/dcsctp/tx/outstanding_data.h +++ b/net/dcsctp/tx/outstanding_data.h @@ -10,23 +10,30 @@ #ifndef NET_DCSCTP_TX_OUTSTANDING_DATA_H_ #define NET_DCSCTP_TX_OUTSTANDING_DATA_H_ +#include #include +#include #include #include #include -#include "absl/types/optional.h" +#include "api/units/timestamp.h" +#include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/common/sequence_numbers.h" #include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" #include "net/dcsctp/packet/chunk/iforward_tsn_chunk.h" #include "net/dcsctp/packet/chunk/sack_chunk.h" #include "net/dcsctp/packet/data.h" #include "net/dcsctp/public/types.h" +#include "rtc_base/containers/flat_set.h" namespace dcsctp { // This class keeps track of outstanding data chunks (sent, not yet acked) and // handles acking, nacking, rescheduling and abandoning. +// +// Items are added to this queue as they are sent and will be removed when the +// peer acks them using the cumulative TSN ack. class OutstandingData { public: // State for DATA chunks (message fragments) in the queue - used in tests. @@ -72,17 +79,15 @@ class OutstandingData { OutstandingData( size_t data_chunk_header_size, - UnwrappedTSN next_tsn, UnwrappedTSN last_cumulative_tsn_ack, - std::function discard_from_send_queue) + std::function discard_from_send_queue) : data_chunk_header_size_(data_chunk_header_size), - next_tsn_(next_tsn), last_cumulative_tsn_ack_(last_cumulative_tsn_ack), discard_from_send_queue_(std::move(discard_from_send_queue)) {} AckInfo HandleSack( UnwrappedTSN cumulative_tsn_ack, - rtc::ArrayView gap_ack_blocks, + webrtc::ArrayView gap_ack_blocks, bool is_in_fast_recovery); // Returns as many of the chunks that are eligible for fast retransmissions @@ -96,14 +101,18 @@ class OutstandingData { // it? std::vector> GetChunksToBeRetransmitted(size_t max_size); - size_t outstanding_bytes() const { return outstanding_bytes_; } + // How many inflight bytes there are, as sent on the wire as packets. + size_t unacked_packet_bytes() const { return unacked_packet_bytes_; } - // Returns the number of DATA chunks that are in-flight. - size_t outstanding_items() const { return outstanding_items_; } + // How many inflight bytes there are, counting only the payload. + size_t unacked_payload_bytes() const { return unacked_payload_bytes_; } + + // Returns the number of DATA chunks that are in-flight (not acked or nacked). + size_t unacked_items() const { return unacked_items_; } // Given the current time `now_ms`, expire and abandon outstanding (sent at // least once) chunks that have a limited lifetime. - void ExpireOutstandingChunks(TimeMs now); + void ExpireOutstandingChunks(webrtc::Timestamp now); bool empty() const { return outstanding_data_.empty(); } @@ -119,18 +128,21 @@ class OutstandingData { return last_cumulative_tsn_ack_; } - UnwrappedTSN next_tsn() const { return next_tsn_; } + UnwrappedTSN next_tsn() const { + return highest_outstanding_tsn().next_value(); + } UnwrappedTSN highest_outstanding_tsn() const; // Schedules `data` to be sent, with the provided partial reliability // parameters. Returns the TSN if the item was actually added and scheduled to - // be sent, and absl::nullopt if it shouldn't be sent. - absl::optional Insert( + // be sent, and std::nullopt if it shouldn't be sent. + std::optional Insert( + OutgoingMessageId message_id, const Data& data, - TimeMs time_sent, + webrtc::Timestamp time_sent, MaxRetransmits max_retransmissions = MaxRetransmits::NoLimit(), - TimeMs expires_at = TimeMs::InfiniteFuture(), + webrtc::Timestamp expires_at = webrtc::Timestamp::PlusInfinity(), LifecycleId lifecycle_id = LifecycleId::NotSet()); // Nacks all outstanding data. @@ -144,8 +156,8 @@ class OutstandingData { // Given the current time and a TSN, it returns the measured RTT between when // the chunk was sent and now. It takes into acccount Karn's algorithm, so if - // the chunk has ever been retransmitted, it will return absl::nullopt. - absl::optional MeasureRTT(TimeMs now, UnwrappedTSN tsn) const; + // the chunk has ever been retransmitted, it will return `PlusInfinity()`. + webrtc::TimeDelta MeasureRTT(webrtc::Timestamp now, UnwrappedTSN tsn) const; // Returns the internal state of all queued chunks. This is only used in // unit-tests. @@ -156,8 +168,11 @@ class OutstandingData { bool ShouldSendForwardTsn() const; // Sets the next TSN to be used. This is used in handover. - void ResetSequenceNumbers(UnwrappedTSN next_tsn, - UnwrappedTSN last_cumulative_tsn); + void ResetSequenceNumbers(UnwrappedTSN last_cumulative_tsn); + + // Called when an outgoing stream reset is sent, marking the last assigned TSN + // as a breakpoint that a FORWARD-TSN shouldn't cross. + void BeginResetStreams(); private: // A fragmented message's DATA chunk while in the retransmission queue, and @@ -170,12 +185,14 @@ class OutstandingData { kAbandon, }; - Item(Data data, - TimeMs time_sent, + Item(OutgoingMessageId message_id, + Data data, + webrtc::Timestamp time_sent, MaxRetransmits max_retransmissions, - TimeMs expires_at, + webrtc::Timestamp expires_at, LifecycleId lifecycle_id) - : time_sent_(time_sent), + : message_id_(message_id), + time_sent_(time_sent), max_retransmissions_(max_retransmissions), expires_at_(expires_at), lifecycle_id_(lifecycle_id), @@ -184,7 +201,9 @@ class OutstandingData { Item(const Item&) = delete; Item& operator=(const Item&) = delete; - TimeMs time_sent() const { return time_sent_; } + OutgoingMessageId message_id() const { return message_id_; } + + webrtc::Timestamp time_sent() const { return time_sent_; } const Data& data() const { return data_; } @@ -218,7 +237,7 @@ class OutstandingData { // Given the current time, and the current state of this DATA chunk, it will // indicate if it has expired (SCTP Partial Reliability Extension). - bool has_expired(TimeMs now) const; + bool has_expired(webrtc::Timestamp now) const; LifecycleId lifecycle_id() const { return lifecycle_id_; } @@ -244,8 +263,10 @@ class OutstandingData { // NOTE: This data structure has been optimized for size, by ordering fields // to avoid unnecessary padding. + const OutgoingMessageId message_id_; + // When the packet was sent, and placed in this queue. - const TimeMs time_sent_; + const webrtc::Timestamp time_sent_; // If the message was sent with a maximum number of retransmissions, this is // set to that number. The value zero (0) means that it will never be // retransmitted. @@ -265,7 +286,7 @@ class OutstandingData { // At this exact millisecond, the item is considered expired. If the message // is not to be expired, this is set to the infinite future. - const TimeMs expires_at_; + const webrtc::Timestamp expires_at_; // An optional lifecycle id, which may only be set for the last fragment. const LifecycleId lifecycle_id_; @@ -277,6 +298,9 @@ class OutstandingData { // Returns how large a chunk will be, serialized, carrying the data size_t GetSerializedChunkSize(const Data& data) const; + Item& GetItem(UnwrappedTSN tsn); + const Item& GetItem(UnwrappedTSN tsn) const; + // Given a `cumulative_tsn_ack` from an incoming SACK, will remove those items // in the retransmission queue up until this value and will update `ack_info` // by setting `bytes_acked_by_cumulative_tsn_ack`. @@ -284,9 +308,10 @@ class OutstandingData { // Will mark the chunks covered by the `gap_ack_blocks` from an incoming SACK // as "acked" and update `ack_info` by adding new TSNs to `added_tsns`. - void AckGapBlocks(UnwrappedTSN cumulative_tsn_ack, - rtc::ArrayView gap_ack_blocks, - AckInfo& ack_info); + void AckGapBlocks( + UnwrappedTSN cumulative_tsn_ack, + webrtc::ArrayView gap_ack_blocks, + AckInfo& ack_info); // Mark chunks reported as "missing", as "nacked" or "to be retransmitted" // depending how many times this has happened. Only packets up until @@ -294,13 +319,13 @@ class OutstandingData { // nacked/retransmitted. The method will set `ack_info.has_packet_loss`. void NackBetweenAckBlocks( UnwrappedTSN cumulative_tsn_ack, - rtc::ArrayView gap_ack_blocks, + webrtc::ArrayView gap_ack_blocks, bool is_in_fast_recovery, OutstandingData::AckInfo& ack_info); // Process the acknowledgement of the chunk referenced by `iter` and updates // state in `ack_info` and the object's state. - void AckChunk(AckInfo& ack_info, std::map::iterator iter); + void AckChunk(AckInfo& ack_info, UnwrappedTSN tsn, Item& item); // Helper method to process an incoming nack of an item and perform the // correct operations given the action indicated when nacking an item (e.g. @@ -310,10 +335,11 @@ class OutstandingData { // many times so that it should be retransmitted, this will schedule it to be // "fast retransmitted". This is only done just before going into fast // recovery. - bool NackItem(UnwrappedTSN tsn, - Item& item, - bool retransmit_now, - bool do_fast_retransmit); + // + // Note that since nacking an item may result in it becoming abandoned, which + // in turn could alter `outstanding_data_`, any iterators are invalidated + // after having called this method. + bool NackItem(UnwrappedTSN tsn, bool retransmit_now, bool do_fast_retransmit); // Given that a message fragment, `item` has been abandoned, abandon all other // fragments that share the same message - both never-before-sent fragments @@ -328,23 +354,30 @@ class OutstandingData { // The size of the data chunk (DATA/I-DATA) header that is used. const size_t data_chunk_header_size_; - // Next TSN to used. - UnwrappedTSN next_tsn_; // The last cumulative TSN ack number. UnwrappedTSN last_cumulative_tsn_ack_; // Callback when to discard items from the send queue. - std::function discard_from_send_queue_; - - std::map outstanding_data_; - // The number of bytes that are in-flight (sent but not yet acked or nacked). - size_t outstanding_bytes_ = 0; + std::function discard_from_send_queue_; + + // Outstanding items. If non-empty, the first element has + // `TSN=last_cumulative_tsn_ack_ + 1` and the following items are in strict + // increasing TSN order. The last item has `TSN=highest_outstanding_tsn()`. + std::deque outstanding_data_; + // The number of bytes that are in-flight, counting only the payload. + size_t unacked_payload_bytes_ = 0; + // The number of bytes that are in-flight, as sent on the wire (as packets). + size_t unacked_packet_bytes_ = 0; // The number of DATA chunks that are in-flight (sent but not yet acked or // nacked). - size_t outstanding_items_ = 0; + size_t unacked_items_ = 0; // Data chunks that are eligible for fast retransmission. std::set to_be_fast_retransmitted_; // Data chunks that are to be retransmitted. std::set to_be_retransmitted_; + // Wben a stream reset has begun, the "next TSN to assign" is added to this + // set, and removed when the cum-ack TSN reaches it. This is used to limit a + // FORWARD-TSN to reset streams past a "stream reset last assigned TSN". + webrtc::flat_set stream_reset_breakpoint_tsns_; }; } // namespace dcsctp #endif // NET_DCSCTP_TX_OUTSTANDING_DATA_H_ diff --git a/net/dcsctp/tx/outstanding_data_test.cc b/net/dcsctp/tx/outstanding_data_test.cc index cdca40cfef..e737035feb 100644 --- a/net/dcsctp/tx/outstanding_data_test.cc +++ b/net/dcsctp/tx/outstanding_data_test.cc @@ -9,13 +9,15 @@ */ #include "net/dcsctp/tx/outstanding_data.h" +#include #include -#include "absl/types/optional.h" +#include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/common/math.h" #include "net/dcsctp/common/sequence_numbers.h" #include "net/dcsctp/packet/chunk/data_chunk.h" #include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" +#include "net/dcsctp/public/dcsctp_socket.h" #include "net/dcsctp/public/types.h" #include "net/dcsctp/testing/data_generator.h" #include "net/dcsctp/testing/testing_macros.h" @@ -27,33 +29,39 @@ namespace { using ::testing::MockFunction; using State = ::dcsctp::OutstandingData::State; using ::testing::_; +using ::testing::AllOf; using ::testing::ElementsAre; using ::testing::IsEmpty; using ::testing::Pair; +using ::testing::Property; using ::testing::Return; using ::testing::StrictMock; +using ::testing::UnorderedElementsAre; +using ::webrtc::TimeDelta; +using ::webrtc::Timestamp; -constexpr TimeMs kNow(42); +constexpr Timestamp kNow = Timestamp::Millis(42); +constexpr OutgoingMessageId kMessageId = OutgoingMessageId(17); class OutstandingDataTest : public testing::Test { protected: OutstandingDataTest() : gen_(MID(42)), buf_(DataChunk::kHeaderSize, - unwrapper_.Unwrap(TSN(10)), unwrapper_.Unwrap(TSN(9)), on_discard_.AsStdFunction()) {} UnwrappedTSN::Unwrapper unwrapper_; DataGenerator gen_; - StrictMock> on_discard_; + StrictMock> on_discard_; OutstandingData buf_; }; TEST_F(OutstandingDataTest, HasInitialState) { EXPECT_TRUE(buf_.empty()); - EXPECT_EQ(buf_.outstanding_bytes(), 0u); - EXPECT_EQ(buf_.outstanding_items(), 0u); + EXPECT_EQ(buf_.unacked_payload_bytes(), 0u); + EXPECT_EQ(buf_.unacked_packet_bytes(), 0u); + EXPECT_EQ(buf_.unacked_items(), 0u); EXPECT_FALSE(buf_.has_data_to_be_retransmitted()); EXPECT_EQ(buf_.last_cumulative_tsn_ack().Wrap(), TSN(9)); EXPECT_EQ(buf_.next_tsn().Wrap(), TSN(10)); @@ -64,13 +72,13 @@ TEST_F(OutstandingDataTest, HasInitialState) { } TEST_F(OutstandingDataTest, InsertChunk) { - ASSERT_HAS_VALUE_AND_ASSIGN(UnwrappedTSN tsn, - buf_.Insert(gen_.Ordered({1}, "BE"), kNow)); + ASSERT_HAS_VALUE_AND_ASSIGN( + UnwrappedTSN tsn, buf_.Insert(kMessageId, gen_.Ordered({1}, "BE"), kNow)); EXPECT_EQ(tsn.Wrap(), TSN(10)); - EXPECT_EQ(buf_.outstanding_bytes(), DataChunk::kHeaderSize + RoundUpTo4(1)); - EXPECT_EQ(buf_.outstanding_items(), 1u); + EXPECT_EQ(buf_.unacked_payload_bytes(), 1u); + EXPECT_EQ(buf_.unacked_items(), 1u); EXPECT_FALSE(buf_.has_data_to_be_retransmitted()); EXPECT_EQ(buf_.last_cumulative_tsn_ack().Wrap(), TSN(9)); EXPECT_EQ(buf_.next_tsn().Wrap(), TSN(11)); @@ -81,7 +89,7 @@ TEST_F(OutstandingDataTest, InsertChunk) { } TEST_F(OutstandingDataTest, AcksSingleChunk) { - buf_.Insert(gen_.Ordered({1}, "BE"), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, "BE"), kNow); OutstandingData::AckInfo ack = buf_.HandleSack(unwrapper_.Unwrap(TSN(10)), {}, false); @@ -89,8 +97,8 @@ TEST_F(OutstandingDataTest, AcksSingleChunk) { EXPECT_EQ(ack.highest_tsn_acked.Wrap(), TSN(10)); EXPECT_FALSE(ack.has_packet_loss); - EXPECT_EQ(buf_.outstanding_bytes(), 0u); - EXPECT_EQ(buf_.outstanding_items(), 0u); + EXPECT_EQ(buf_.unacked_payload_bytes(), 0u); + EXPECT_EQ(buf_.unacked_items(), 0u); EXPECT_FALSE(buf_.has_data_to_be_retransmitted()); EXPECT_EQ(buf_.last_cumulative_tsn_ack().Wrap(), TSN(10)); EXPECT_EQ(buf_.next_tsn().Wrap(), TSN(11)); @@ -100,11 +108,11 @@ TEST_F(OutstandingDataTest, AcksSingleChunk) { } TEST_F(OutstandingDataTest, AcksPreviousChunkDoesntUpdate) { - buf_.Insert(gen_.Ordered({1}, "BE"), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, "BE"), kNow); buf_.HandleSack(unwrapper_.Unwrap(TSN(9)), {}, false); - EXPECT_EQ(buf_.outstanding_bytes(), DataChunk::kHeaderSize + RoundUpTo4(1)); - EXPECT_EQ(buf_.outstanding_items(), 1u); + EXPECT_EQ(buf_.unacked_payload_bytes(), 1u); + EXPECT_EQ(buf_.unacked_items(), 1u); EXPECT_FALSE(buf_.has_data_to_be_retransmitted()); EXPECT_EQ(buf_.last_cumulative_tsn_ack().Wrap(), TSN(9)); EXPECT_EQ(buf_.next_tsn().Wrap(), TSN(11)); @@ -115,8 +123,8 @@ TEST_F(OutstandingDataTest, AcksPreviousChunkDoesntUpdate) { } TEST_F(OutstandingDataTest, AcksAndNacksWithGapAckBlocks) { - buf_.Insert(gen_.Ordered({1}, "B"), kNow); - buf_.Insert(gen_.Ordered({1}, "E"), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, "B"), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, "E"), kNow); std::vector gab = {SackChunk::GapAckBlock(2, 2)}; OutstandingData::AckInfo ack = @@ -125,8 +133,8 @@ TEST_F(OutstandingDataTest, AcksAndNacksWithGapAckBlocks) { EXPECT_EQ(ack.highest_tsn_acked.Wrap(), TSN(11)); EXPECT_FALSE(ack.has_packet_loss); - EXPECT_EQ(buf_.outstanding_bytes(), 0u); - EXPECT_EQ(buf_.outstanding_items(), 0u); + EXPECT_EQ(buf_.unacked_payload_bytes(), 0u); + EXPECT_EQ(buf_.unacked_items(), 0u); EXPECT_FALSE(buf_.has_data_to_be_retransmitted()); EXPECT_EQ(buf_.last_cumulative_tsn_ack().Wrap(), TSN(9)); EXPECT_EQ(buf_.next_tsn().Wrap(), TSN(12)); @@ -138,8 +146,8 @@ TEST_F(OutstandingDataTest, AcksAndNacksWithGapAckBlocks) { } TEST_F(OutstandingDataTest, NacksThreeTimesWithSameTsnDoesntRetransmit) { - buf_.Insert(gen_.Ordered({1}, "B"), kNow); - buf_.Insert(gen_.Ordered({1}, "E"), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, "B"), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, "E"), kNow); std::vector gab1 = {SackChunk::GapAckBlock(2, 2)}; EXPECT_FALSE( @@ -161,10 +169,10 @@ TEST_F(OutstandingDataTest, NacksThreeTimesWithSameTsnDoesntRetransmit) { } TEST_F(OutstandingDataTest, NacksThreeTimesResultsInRetransmission) { - buf_.Insert(gen_.Ordered({1}, "B"), kNow); - buf_.Insert(gen_.Ordered({1}, ""), kNow); - buf_.Insert(gen_.Ordered({1}, ""), kNow); - buf_.Insert(gen_.Ordered({1}, "E"), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, "B"), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, "E"), kNow); std::vector gab1 = {SackChunk::GapAckBlock(2, 2)}; EXPECT_FALSE( @@ -199,10 +207,10 @@ TEST_F(OutstandingDataTest, NacksThreeTimesResultsInRetransmission) { TEST_F(OutstandingDataTest, NacksThreeTimesResultsInAbandoning) { static constexpr MaxRetransmits kMaxRetransmissions(0); - buf_.Insert(gen_.Ordered({1}, "B"), kNow, kMaxRetransmissions); - buf_.Insert(gen_.Ordered({1}, ""), kNow, kMaxRetransmissions); - buf_.Insert(gen_.Ordered({1}, ""), kNow, kMaxRetransmissions); - buf_.Insert(gen_.Ordered({1}, "E"), kNow, kMaxRetransmissions); + buf_.Insert(kMessageId, gen_.Ordered({1}, "B"), kNow, kMaxRetransmissions); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow, kMaxRetransmissions); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow, kMaxRetransmissions); + buf_.Insert(kMessageId, gen_.Ordered({1}, "E"), kNow, kMaxRetransmissions); std::vector gab1 = {SackChunk::GapAckBlock(2, 2)}; EXPECT_FALSE( @@ -214,7 +222,7 @@ TEST_F(OutstandingDataTest, NacksThreeTimesResultsInAbandoning) { buf_.HandleSack(unwrapper_.Unwrap(TSN(9)), gab2, false).has_packet_loss); EXPECT_FALSE(buf_.has_data_to_be_retransmitted()); - EXPECT_CALL(on_discard_, Call(IsUnordered(false), StreamID(1), MID(42))) + EXPECT_CALL(on_discard_, Call(StreamID(1), kMessageId)) .WillOnce(Return(false)); std::vector gab3 = {SackChunk::GapAckBlock(2, 4)}; OutstandingData::AckInfo ack = @@ -235,10 +243,10 @@ TEST_F(OutstandingDataTest, NacksThreeTimesResultsInAbandoning) { TEST_F(OutstandingDataTest, NacksThreeTimesResultsInAbandoningWithPlaceholder) { static constexpr MaxRetransmits kMaxRetransmissions(0); - buf_.Insert(gen_.Ordered({1}, "B"), kNow, kMaxRetransmissions); - buf_.Insert(gen_.Ordered({1}, ""), kNow, kMaxRetransmissions); - buf_.Insert(gen_.Ordered({1}, ""), kNow, kMaxRetransmissions); - buf_.Insert(gen_.Ordered({1}, ""), kNow, kMaxRetransmissions); + buf_.Insert(kMessageId, gen_.Ordered({1}, "B"), kNow, kMaxRetransmissions); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow, kMaxRetransmissions); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow, kMaxRetransmissions); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow, kMaxRetransmissions); std::vector gab1 = {SackChunk::GapAckBlock(2, 2)}; EXPECT_FALSE( @@ -250,7 +258,7 @@ TEST_F(OutstandingDataTest, NacksThreeTimesResultsInAbandoningWithPlaceholder) { buf_.HandleSack(unwrapper_.Unwrap(TSN(9)), gab2, false).has_packet_loss); EXPECT_FALSE(buf_.has_data_to_be_retransmitted()); - EXPECT_CALL(on_discard_, Call(IsUnordered(false), StreamID(1), MID(42))) + EXPECT_CALL(on_discard_, Call(StreamID(1), kMessageId)) .WillOnce(Return(true)); std::vector gab3 = {SackChunk::GapAckBlock(2, 4)}; OutstandingData::AckInfo ack = @@ -271,17 +279,19 @@ TEST_F(OutstandingDataTest, NacksThreeTimesResultsInAbandoningWithPlaceholder) { } TEST_F(OutstandingDataTest, ExpiresChunkBeforeItIsInserted) { - static constexpr TimeMs kExpiresAt = kNow + DurationMs(1); - EXPECT_TRUE(buf_.Insert(gen_.Ordered({1}, "B"), kNow, + static constexpr Timestamp kExpiresAt = kNow + TimeDelta::Millis(1); + EXPECT_TRUE(buf_.Insert(kMessageId, gen_.Ordered({1}, "B"), kNow, MaxRetransmits::NoLimit(), kExpiresAt) .has_value()); - EXPECT_TRUE(buf_.Insert(gen_.Ordered({1}, ""), kNow + DurationMs(0), + EXPECT_TRUE(buf_.Insert(kMessageId, gen_.Ordered({1}, ""), + kNow + TimeDelta::Millis(0), MaxRetransmits::NoLimit(), kExpiresAt) .has_value()); - EXPECT_CALL(on_discard_, Call(IsUnordered(false), StreamID(1), MID(42))) + EXPECT_CALL(on_discard_, Call(StreamID(1), kMessageId)) .WillOnce(Return(false)); - EXPECT_FALSE(buf_.Insert(gen_.Ordered({1}, "E"), kNow + DurationMs(1), + EXPECT_FALSE(buf_.Insert(kMessageId, gen_.Ordered({1}, "E"), + kNow + TimeDelta::Millis(1), MaxRetransmits::NoLimit(), kExpiresAt) .has_value()); @@ -298,11 +308,11 @@ TEST_F(OutstandingDataTest, ExpiresChunkBeforeItIsInserted) { TEST_F(OutstandingDataTest, CanGenerateForwardTsn) { static constexpr MaxRetransmits kMaxRetransmissions(0); - buf_.Insert(gen_.Ordered({1}, "B"), kNow, kMaxRetransmissions); - buf_.Insert(gen_.Ordered({1}, ""), kNow, kMaxRetransmissions); - buf_.Insert(gen_.Ordered({1}, "E"), kNow, kMaxRetransmissions); + buf_.Insert(kMessageId, gen_.Ordered({1}, "B"), kNow, kMaxRetransmissions); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow, kMaxRetransmissions); + buf_.Insert(kMessageId, gen_.Ordered({1}, "E"), kNow, kMaxRetransmissions); - EXPECT_CALL(on_discard_, Call(IsUnordered(false), StreamID(1), MID(42))) + EXPECT_CALL(on_discard_, Call(StreamID(1), kMessageId)) .WillOnce(Return(false)); buf_.NackAll(); @@ -319,14 +329,14 @@ TEST_F(OutstandingDataTest, CanGenerateForwardTsn) { } TEST_F(OutstandingDataTest, AckWithGapBlocksFromRFC4960Section334) { - buf_.Insert(gen_.Ordered({1}, "B"), kNow); - buf_.Insert(gen_.Ordered({1}, ""), kNow); - buf_.Insert(gen_.Ordered({1}, ""), kNow); - buf_.Insert(gen_.Ordered({1}, ""), kNow); - buf_.Insert(gen_.Ordered({1}, ""), kNow); - buf_.Insert(gen_.Ordered({1}, ""), kNow); - buf_.Insert(gen_.Ordered({1}, ""), kNow); - buf_.Insert(gen_.Ordered({1}, "E"), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, "B"), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, "E"), kNow); EXPECT_THAT(buf_.GetChunkStatesForTesting(), testing::ElementsAre(Pair(TSN(9), State::kAcked), // @@ -353,16 +363,15 @@ TEST_F(OutstandingDataTest, AckWithGapBlocksFromRFC4960Section334) { } TEST_F(OutstandingDataTest, MeasureRTT) { - buf_.Insert(gen_.Ordered({1}, "BE"), kNow); - buf_.Insert(gen_.Ordered({1}, "BE"), kNow + DurationMs(1)); - buf_.Insert(gen_.Ordered({1}, "BE"), kNow + DurationMs(2)); + buf_.Insert(kMessageId, gen_.Ordered({1}, "BE"), kNow); + buf_.Insert(kMessageId, gen_.Ordered({1}, "BE"), kNow + TimeDelta::Millis(1)); + buf_.Insert(kMessageId, gen_.Ordered({1}, "BE"), kNow + TimeDelta::Millis(2)); - static constexpr DurationMs kDuration(123); - ASSERT_HAS_VALUE_AND_ASSIGN( - DurationMs duration, - buf_.MeasureRTT(kNow + kDuration, unwrapper_.Unwrap(TSN(11)))); + static constexpr TimeDelta kDuration = TimeDelta::Millis(123); + TimeDelta duration = + buf_.MeasureRTT(kNow + kDuration, unwrapper_.Unwrap(TSN(11))); - EXPECT_EQ(duration, kDuration - DurationMs(1)); + EXPECT_EQ(duration, kDuration - TimeDelta::Millis(1)); } TEST_F(OutstandingDataTest, MustRetransmitBeforeGettingNackedAgain) { @@ -372,7 +381,8 @@ TEST_F(OutstandingDataTest, MustRetransmitBeforeGettingNackedAgain) { static constexpr MaxRetransmits kOneRetransmission(1); for (int tsn = 10; tsn <= 20; ++tsn) { - buf_.Insert(gen_.Ordered({1}, tsn == 10 ? "B" + buf_.Insert(kMessageId, + gen_.Ordered({1}, tsn == 10 ? "B" : tsn == 20 ? "E" : ""), kNow, kOneRetransmission); @@ -431,7 +441,7 @@ TEST_F(OutstandingDataTest, MustRetransmitBeforeGettingNackedAgain) { buf_.HandleSack(unwrapper_.Unwrap(TSN(9)), gab8, false).has_packet_loss); EXPECT_FALSE(buf_.has_data_to_be_retransmitted()); - EXPECT_CALL(on_discard_, Call(IsUnordered(false), StreamID(1), MID(42))) + EXPECT_CALL(on_discard_, Call(StreamID(1), kMessageId)) .WillOnce(Return(false)); std::vector gab9 = {SackChunk::GapAckBlock(2, 10)}; @@ -442,54 +452,16 @@ TEST_F(OutstandingDataTest, MustRetransmitBeforeGettingNackedAgain) { EXPECT_FALSE(buf_.has_data_to_be_retransmitted()); } -TEST_F(OutstandingDataTest, CanAbandonChunksMarkedForFastRetransmit) { - // This test is a bit convoluted, and can't really happen with a well behaving - // client, but this was found by fuzzers. This test will verify that a message - // that was both marked as "to be fast retransmitted" and "abandoned" at the - // same time doesn't cause any consistency issues. - - // Add chunks 10-14, but chunk 11 has zero retransmissions. When chunk 10 and - // 11 are NACKed three times, chunk 10 will be marked for retransmission, but - // chunk 11 will be abandoned, which also abandons chunk 10, as it's part of - // the same message. - buf_.Insert(gen_.Ordered({1}, "B"), kNow); // 10 - buf_.Insert(gen_.Ordered({1}, ""), kNow, MaxRetransmits(0)); // 11 - buf_.Insert(gen_.Ordered({1}, ""), kNow); // 12 - buf_.Insert(gen_.Ordered({1}, ""), kNow); // 13 - buf_.Insert(gen_.Ordered({1}, "E"), kNow); // 14 - - // ACK 9, 12 - std::vector gab1 = {SackChunk::GapAckBlock(3, 3)}; - EXPECT_FALSE( - buf_.HandleSack(unwrapper_.Unwrap(TSN(9)), gab1, false).has_packet_loss); - EXPECT_FALSE(buf_.has_data_to_be_retransmitted()); - - // ACK 9, 12, 13 - std::vector gab2 = {SackChunk::GapAckBlock(3, 4)}; - EXPECT_FALSE( - buf_.HandleSack(unwrapper_.Unwrap(TSN(9)), gab2, false).has_packet_loss); - EXPECT_FALSE(buf_.has_data_to_be_retransmitted()); - - EXPECT_CALL(on_discard_, Call(IsUnordered(false), StreamID(1), MID(42))) - .WillOnce(Return(false)); - - // ACK 9, 12, 13, 14 - std::vector gab3 = {SackChunk::GapAckBlock(3, 5)}; - OutstandingData::AckInfo ack = - buf_.HandleSack(unwrapper_.Unwrap(TSN(9)), gab3, false); - EXPECT_TRUE(ack.has_packet_loss); - EXPECT_FALSE(buf_.has_data_to_be_retransmitted()); - EXPECT_THAT(buf_.GetChunksToBeFastRetransmitted(1000), IsEmpty()); - EXPECT_THAT(buf_.GetChunksToBeRetransmitted(1000), IsEmpty()); -} - TEST_F(OutstandingDataTest, LifecyleReturnsAckedItemsInAckInfo) { - buf_.Insert(gen_.Ordered({1}, "BE"), kNow, MaxRetransmits::NoLimit(), - TimeMs::InfiniteFuture(), LifecycleId(42)); - buf_.Insert(gen_.Ordered({1}, "BE"), kNow, MaxRetransmits::NoLimit(), - TimeMs::InfiniteFuture(), LifecycleId(43)); - buf_.Insert(gen_.Ordered({1}, "BE"), kNow, MaxRetransmits::NoLimit(), - TimeMs::InfiniteFuture(), LifecycleId(44)); + buf_.Insert(OutgoingMessageId(1), gen_.Ordered({1}, "BE"), kNow, + MaxRetransmits::NoLimit(), Timestamp::PlusInfinity(), + LifecycleId(42)); + buf_.Insert(OutgoingMessageId(2), gen_.Ordered({1}, "BE"), kNow, + MaxRetransmits::NoLimit(), Timestamp::PlusInfinity(), + LifecycleId(43)); + buf_.Insert(OutgoingMessageId(3), gen_.Ordered({1}, "BE"), kNow, + MaxRetransmits::NoLimit(), Timestamp::PlusInfinity(), + LifecycleId(44)); OutstandingData::AckInfo ack1 = buf_.HandleSack(unwrapper_.Unwrap(TSN(11)), {}, false); @@ -504,11 +476,11 @@ TEST_F(OutstandingDataTest, LifecyleReturnsAckedItemsInAckInfo) { } TEST_F(OutstandingDataTest, LifecycleReturnsAbandonedNackedThreeTimes) { - buf_.Insert(gen_.Ordered({1}, "B"), kNow, MaxRetransmits(0)); - buf_.Insert(gen_.Ordered({1}, ""), kNow, MaxRetransmits(0)); - buf_.Insert(gen_.Ordered({1}, ""), kNow, MaxRetransmits(0)); - buf_.Insert(gen_.Ordered({1}, "E"), kNow, MaxRetransmits(0), - TimeMs::InfiniteFuture(), LifecycleId(42)); + buf_.Insert(kMessageId, gen_.Ordered({1}, "B"), kNow, MaxRetransmits(0)); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow, MaxRetransmits(0)); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow, MaxRetransmits(0)); + buf_.Insert(kMessageId, gen_.Ordered({1}, "E"), kNow, MaxRetransmits(0), + Timestamp::PlusInfinity(), LifecycleId(42)); std::vector gab1 = {SackChunk::GapAckBlock(2, 2)}; EXPECT_FALSE( @@ -521,7 +493,7 @@ TEST_F(OutstandingDataTest, LifecycleReturnsAbandonedNackedThreeTimes) { EXPECT_FALSE(buf_.has_data_to_be_retransmitted()); std::vector gab3 = {SackChunk::GapAckBlock(2, 4)}; - EXPECT_CALL(on_discard_, Call(IsUnordered(false), StreamID(1), MID(42))) + EXPECT_CALL(on_discard_, Call(StreamID(1), kMessageId)) .WillOnce(Return(false)); OutstandingData::AckInfo ack1 = buf_.HandleSack(unwrapper_.Unwrap(TSN(9)), gab3, false); @@ -540,11 +512,11 @@ TEST_F(OutstandingDataTest, LifecycleReturnsAbandonedNackedThreeTimes) { } TEST_F(OutstandingDataTest, LifecycleReturnsAbandonedAfterT3rtxExpired) { - buf_.Insert(gen_.Ordered({1}, "B"), kNow, MaxRetransmits(0)); - buf_.Insert(gen_.Ordered({1}, ""), kNow, MaxRetransmits(0)); - buf_.Insert(gen_.Ordered({1}, ""), kNow, MaxRetransmits(0)); - buf_.Insert(gen_.Ordered({1}, "E"), kNow, MaxRetransmits(0), - TimeMs::InfiniteFuture(), LifecycleId(42)); + buf_.Insert(kMessageId, gen_.Ordered({1}, "B"), kNow, MaxRetransmits(0)); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow, MaxRetransmits(0)); + buf_.Insert(kMessageId, gen_.Ordered({1}, ""), kNow, MaxRetransmits(0)); + buf_.Insert(kMessageId, gen_.Ordered({1}, "E"), kNow, MaxRetransmits(0), + Timestamp::PlusInfinity(), LifecycleId(42)); EXPECT_THAT(buf_.GetChunkStatesForTesting(), testing::ElementsAre(Pair(TSN(9), State::kAcked), // @@ -566,7 +538,7 @@ TEST_F(OutstandingDataTest, LifecycleReturnsAbandonedAfterT3rtxExpired) { Pair(TSN(13), State::kAcked))); // T3-rtx triggered. - EXPECT_CALL(on_discard_, Call(IsUnordered(false), StreamID(1), MID(42))) + EXPECT_CALL(on_discard_, Call(StreamID(1), kMessageId)) .WillOnce(Return(false)); buf_.NackAll(); @@ -587,5 +559,119 @@ TEST_F(OutstandingDataTest, LifecycleReturnsAbandonedAfterT3rtxExpired) { EXPECT_FALSE(ack2.has_packet_loss); EXPECT_THAT(ack2.abandoned_lifecycle_ids, ElementsAre(LifecycleId(42))); } + +TEST_F(OutstandingDataTest, GeneratesForwardTsnUntilNextStreamResetTsn) { + // This test generates: + // * Stream 1: TSN 10, 11, 12 + // * Stream 2: TSN 13, 14 + // * Stream 3: TSN 15, 16 + // + // Then it expires chunk 12-15, and ensures that the generated FORWARD-TSN + // only includes up till TSN 12 until the cum ack TSN has reached 12, and then + // 13 and 14 are included, and then after the cum ack TSN has reached 14, then + // 15 is included. + // + // What it shouldn't do, is to generate a FORWARD-TSN directly at the start + // with new TSN=15, and setting [(sid=1, ssn=44), (sid=2, ssn=46), + // (sid=3, ssn=47)], because that will confuse the receiver at TSN=17, + // receiving SID=1, SSN=0 (it's reset!), expecting SSN to be 45. + constexpr DataGeneratorOptions kStream1 = {.stream_id = StreamID(1)}; + constexpr DataGeneratorOptions kStream2 = {.stream_id = StreamID(2)}; + constexpr DataGeneratorOptions kStream3 = {.stream_id = StreamID(3)}; + constexpr MaxRetransmits kNoRtx = MaxRetransmits(0); + EXPECT_CALL(on_discard_, Call).WillRepeatedly(Return(false)); + + // TSN 10-12 + buf_.Insert(OutgoingMessageId(0), gen_.Ordered({1}, "BE", kStream1), kNow, + kNoRtx); + buf_.Insert(OutgoingMessageId(1), gen_.Ordered({1}, "BE", kStream1), kNow, + kNoRtx); + buf_.Insert(OutgoingMessageId(2), gen_.Ordered({1}, "BE", kStream1), kNow, + kNoRtx); + + buf_.BeginResetStreams(); + + // TSN 13, 14 + buf_.Insert(OutgoingMessageId(3), gen_.Ordered({1}, "BE", kStream2), kNow, + kNoRtx); + buf_.Insert(OutgoingMessageId(4), gen_.Ordered({1}, "BE", kStream2), kNow, + kNoRtx); + + buf_.BeginResetStreams(); + + // TSN 15, 16 + buf_.Insert(OutgoingMessageId(5), gen_.Ordered({1}, "BE", kStream3), kNow, + kNoRtx); + buf_.Insert(OutgoingMessageId(6), gen_.Ordered({1}, "BE", kStream3), kNow); + + EXPECT_FALSE(buf_.ShouldSendForwardTsn()); + + buf_.HandleSack(unwrapper_.Unwrap(TSN(11)), {}, false); + buf_.NackAll(); + EXPECT_THAT(buf_.GetChunkStatesForTesting(), + ElementsAre(Pair(TSN(11), State::kAcked), // + Pair(TSN(12), State::kAbandoned), // + Pair(TSN(13), State::kAbandoned), // + Pair(TSN(14), State::kAbandoned), // + Pair(TSN(15), State::kAbandoned), // + Pair(TSN(16), State::kToBeRetransmitted))); + + EXPECT_TRUE(buf_.ShouldSendForwardTsn()); + EXPECT_THAT( + buf_.CreateForwardTsn(), + AllOf(Property(&ForwardTsnChunk::new_cumulative_tsn, TSN(12)), + Property(&ForwardTsnChunk::skipped_streams, + UnorderedElementsAre(ForwardTsnChunk::SkippedStream( + StreamID(1), SSN(44)))))); + + // Ack 12, allowing a FORWARD-TSN that spans to TSN=14 to be created. + buf_.HandleSack(unwrapper_.Unwrap(TSN(12)), {}, false); + EXPECT_TRUE(buf_.ShouldSendForwardTsn()); + EXPECT_THAT( + buf_.CreateForwardTsn(), + AllOf(Property(&ForwardTsnChunk::new_cumulative_tsn, TSN(14)), + Property(&ForwardTsnChunk::skipped_streams, + UnorderedElementsAre(ForwardTsnChunk::SkippedStream( + StreamID(2), SSN(46)))))); + + // Ack 13, allowing a FORWARD-TSN that spans to TSN=14 to be created. + buf_.HandleSack(unwrapper_.Unwrap(TSN(13)), {}, false); + EXPECT_TRUE(buf_.ShouldSendForwardTsn()); + EXPECT_THAT( + buf_.CreateForwardTsn(), + AllOf(Property(&ForwardTsnChunk::new_cumulative_tsn, TSN(14)), + Property(&ForwardTsnChunk::skipped_streams, + UnorderedElementsAre(ForwardTsnChunk::SkippedStream( + StreamID(2), SSN(46)))))); + + // Ack 14, allowing a FORWARD-TSN that spans to TSN=15 to be created. + buf_.HandleSack(unwrapper_.Unwrap(TSN(14)), {}, false); + EXPECT_TRUE(buf_.ShouldSendForwardTsn()); + EXPECT_THAT( + buf_.CreateForwardTsn(), + AllOf(Property(&ForwardTsnChunk::new_cumulative_tsn, TSN(15)), + Property(&ForwardTsnChunk::skipped_streams, + UnorderedElementsAre(ForwardTsnChunk::SkippedStream( + StreamID(3), SSN(47)))))); + + buf_.HandleSack(unwrapper_.Unwrap(TSN(15)), {}, false); + EXPECT_FALSE(buf_.ShouldSendForwardTsn()); +} + +TEST_F(OutstandingDataTest, TreatsUnackedPayloadBytesDifferentFromPacketBytes) { + buf_.Insert(kMessageId, gen_.Ordered({1}, "BE"), kNow); + + EXPECT_EQ(buf_.unacked_payload_bytes(), 1u); + EXPECT_EQ(buf_.unacked_packet_bytes(), + DataChunk::kHeaderSize + RoundUpTo4(1)); + EXPECT_EQ(buf_.unacked_items(), 1u); + + buf_.Insert(kMessageId, gen_.Ordered({1}, "BE"), kNow); + EXPECT_EQ(buf_.unacked_payload_bytes(), 2u); + EXPECT_EQ(buf_.unacked_packet_bytes(), + 2 * (DataChunk::kHeaderSize + RoundUpTo4(1))); + EXPECT_EQ(buf_.unacked_items(), 2u); +} + } // namespace } // namespace dcsctp diff --git a/net/dcsctp/tx/retransmission_error_counter.h b/net/dcsctp/tx/retransmission_error_counter.h index 7078c78192..589e66b472 100644 --- a/net/dcsctp/tx/retransmission_error_counter.h +++ b/net/dcsctp/tx/retransmission_error_counter.h @@ -42,7 +42,7 @@ class RetransmissionErrorCounter { private: const absl::string_view log_prefix_; - const absl::optional limit_; + const std::optional limit_; int counter_ = 0; }; } // namespace dcsctp diff --git a/net/dcsctp/tx/retransmission_error_counter_test.cc b/net/dcsctp/tx/retransmission_error_counter_test.cc index 67bbc0bec5..dc53740f0f 100644 --- a/net/dcsctp/tx/retransmission_error_counter_test.cc +++ b/net/dcsctp/tx/retransmission_error_counter_test.cc @@ -74,7 +74,7 @@ TEST(RetransmissionErrorCounterTest, ClearingCounter) { TEST(RetransmissionErrorCounterTest, CanBeLimitless) { DcSctpOptions options; - options.max_retransmissions = absl::nullopt; + options.max_retransmissions = std::nullopt; RetransmissionErrorCounter counter("log: ", options); for (int i = 0; i < 100; ++i) { EXPECT_TRUE(counter.Increment("test")); diff --git a/net/dcsctp/tx/retransmission_queue.cc b/net/dcsctp/tx/retransmission_queue.cc index 93084cc27b..dae64a0207 100644 --- a/net/dcsctp/tx/retransmission_queue.cc +++ b/net/dcsctp/tx/retransmission_queue.cc @@ -14,6 +14,7 @@ #include #include #include +#include #include #include #include @@ -21,11 +22,9 @@ #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/common/math.h" #include "net/dcsctp/common/sequence_numbers.h" -#include "net/dcsctp/common/str_join.h" #include "net/dcsctp/packet/chunk/data_chunk.h" #include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" #include "net/dcsctp/packet/chunk/forward_tsn_common.h" @@ -40,13 +39,13 @@ #include "net/dcsctp/tx/send_queue.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/str_join.h" #include "rtc_base/strings/string_builder.h" namespace dcsctp { namespace { - -// Allow sending only slightly less than an MTU, to account for headers. -constexpr float kMinBytesRequiredToSendFactor = 0.9; +using ::webrtc::TimeDelta; +using ::webrtc::Timestamp; } // namespace RetransmissionQueue::RetransmissionQueue( @@ -55,7 +54,7 @@ RetransmissionQueue::RetransmissionQueue( TSN my_initial_tsn, size_t a_rwnd, SendQueue& send_queue, - std::function on_new_rtt, + std::function on_new_rtt, std::function on_clear_retransmission_counter, Timer& t3_rtx, const DcSctpOptions& options, @@ -63,7 +62,6 @@ RetransmissionQueue::RetransmissionQueue( bool use_message_interleaving) : callbacks_(*callbacks), options_(options), - min_bytes_required_to_send_(options.mtu * kMinBytesRequiredToSendFactor), partial_reliability_(supports_partial_reliability), log_prefix_(log_prefix), data_chunk_header_size_(use_message_interleaving @@ -84,10 +82,9 @@ RetransmissionQueue::RetransmissionQueue( send_queue_(send_queue), outstanding_data_( data_chunk_header_size_, - tsn_unwrapper_.Unwrap(my_initial_tsn), tsn_unwrapper_.Unwrap(TSN(*my_initial_tsn - 1)), - [this](IsUnordered unordered, StreamID stream_id, MID message_id) { - return send_queue_.Discard(unordered, stream_id, message_id); + [this](StreamID stream_id, OutgoingMessageId message_id) { + return send_queue_.Discard(stream_id, message_id); }) {} bool RetransmissionQueue::IsConsistent() const { @@ -109,17 +106,17 @@ void RetransmissionQueue::MaybeExitFastRecovery( RTC_DLOG(LS_VERBOSE) << log_prefix_ << "exit_point=" << *fast_recovery_exit_tsn_->Wrap() << " reached - exiting fast recovery"; - fast_recovery_exit_tsn_ = absl::nullopt; + fast_recovery_exit_tsn_ = std::nullopt; } } void RetransmissionQueue::HandleIncreasedCumulativeTsnAck( - size_t outstanding_bytes, + size_t unacked_packet_bytes, size_t total_bytes_acked) { // Allow some margin for classifying as fully utilized, due to e.g. that too // small packets (less than kMinimumFragmentedPayload) are not sent + // overhead. - bool is_fully_utilized = outstanding_bytes + options_.mtu >= cwnd_; + bool is_fully_utilized = unacked_packet_bytes + options_.mtu >= cwnd_; size_t old_cwnd = cwnd_; if (phase() == CongestionAlgorithmPhase::kSlowStart) { if (is_fully_utilized && !is_in_fast_recovery()) { @@ -167,7 +164,8 @@ void RetransmissionQueue::HandleIncreasedCumulativeTsnAck( } } -void RetransmissionQueue::HandlePacketLoss(UnwrappedTSN highest_tsn_acked) { +void RetransmissionQueue::HandlePacketLoss( + UnwrappedTSN /* highest_tsn_acked */) { if (!is_in_fast_recovery()) { // https://tools.ietf.org/html/rfc4960#section-7.2.4 // "If not in Fast Recovery, adjust the ssthresh and cwnd of the @@ -204,14 +202,14 @@ void RetransmissionQueue::HandlePacketLoss(UnwrappedTSN highest_tsn_acked) { } void RetransmissionQueue::UpdateReceiverWindow(uint32_t a_rwnd) { - rwnd_ = outstanding_data_.outstanding_bytes() >= a_rwnd + rwnd_ = outstanding_data_.unacked_payload_bytes() >= a_rwnd ? 0 - : a_rwnd - outstanding_data_.outstanding_bytes(); + : a_rwnd - outstanding_data_.unacked_payload_bytes(); } void RetransmissionQueue::StartT3RtxTimerIfOutstandingData() { - // Note: Can't use `outstanding_bytes()` as that one doesn't count chunks to - // be retransmitted. + // Note: Can't use `unacked_packet_bytes()` as that one doesn't count chunks + // to be retransmitted. if (outstanding_data_.empty()) { // https://tools.ietf.org/html/rfc4960#section-6.3.2 // "Whenever all outstanding data sent to an address have been @@ -257,14 +255,14 @@ bool RetransmissionQueue::IsSackValid(const SackChunk& sack) const { return true; } -bool RetransmissionQueue::HandleSack(TimeMs now, const SackChunk& sack) { +bool RetransmissionQueue::HandleSack(Timestamp now, const SackChunk& sack) { if (!IsSackValid(sack)) { return false; } UnwrappedTSN old_last_cumulative_tsn_ack = outstanding_data_.last_cumulative_tsn_ack(); - size_t old_outstanding_bytes = outstanding_data_.outstanding_bytes(); + size_t old_unacked_packet_bytes = outstanding_data_.unacked_packet_bytes(); size_t old_rwnd = rwnd_; UnwrappedTSN cumulative_tsn_ack = tsn_unwrapper_.Unwrap(sack.cumulative_tsn_ack()); @@ -301,10 +299,10 @@ bool RetransmissionQueue::HandleSack(TimeMs now, const SackChunk& sack) { RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Received SACK, cum_tsn_ack=" << *cumulative_tsn_ack.Wrap() << " (" << *old_last_cumulative_tsn_ack.Wrap() - << "), outstanding_bytes=" - << outstanding_data_.outstanding_bytes() << " (" - << old_outstanding_bytes << "), rwnd=" << rwnd_ << " (" - << old_rwnd << ")"; + << "), unacked_packet_bytes=" + << outstanding_data_.unacked_packet_bytes() << " (" + << old_unacked_packet_bytes << "), rwnd=" << rwnd_ + << " (" << old_rwnd << ")"; if (cumulative_tsn_ack > old_last_cumulative_tsn_ack) { // https://tools.ietf.org/html/rfc4960#section-6.3.2 @@ -315,7 +313,7 @@ bool RetransmissionQueue::HandleSack(TimeMs now, const SackChunk& sack) { // Note: It may be started again in a bit further down. t3_rtx_.Stop(); - HandleIncreasedCumulativeTsnAck(old_outstanding_bytes, + HandleIncreasedCumulativeTsnAck(old_unacked_packet_bytes, ack_info.bytes_acked); } @@ -335,7 +333,7 @@ bool RetransmissionQueue::HandleSack(TimeMs now, const SackChunk& sack) { return true; } -void RetransmissionQueue::UpdateRTT(TimeMs now, +void RetransmissionQueue::UpdateRTT(Timestamp now, UnwrappedTSN cumulative_tsn_ack) { // RTT updating is flawed in SCTP, as explained in e.g. Pedersen J, Griwodz C, // Halvorsen P (2006) Considerations of SCTP retransmission delays for thin @@ -345,17 +343,16 @@ void RetransmissionQueue::UpdateRTT(TimeMs now, // TODO(boivie): Consider occasionally sending DATA chunks with I-bit set and // use only those packets for measurement. - absl::optional rtt = - outstanding_data_.MeasureRTT(now, cumulative_tsn_ack); + TimeDelta rtt = outstanding_data_.MeasureRTT(now, cumulative_tsn_ack); - if (rtt.has_value()) { - on_new_rtt_(*rtt); + if (rtt.IsFinite()) { + on_new_rtt_(rtt); } } void RetransmissionQueue::HandleT3RtxTimerExpiry() { size_t old_cwnd = cwnd_; - size_t old_outstanding_bytes = outstanding_bytes(); + size_t old_unacked_packet_bytes = unacked_packet_bytes(); // https://tools.ietf.org/html/rfc4960#section-6.3.3 // "For the destination address for which the timer expires, adjust // its ssthresh with rules defined in Section 7.2.3 and set the cwnd <- MTU." @@ -392,8 +389,8 @@ void RetransmissionQueue::HandleT3RtxTimerExpiry() { RTC_DLOG(LS_INFO) << log_prefix_ << "t3-rtx expired. new cwnd=" << cwnd_ << " (" << old_cwnd << "), ssthresh=" << ssthresh_ - << ", outstanding_bytes " << outstanding_bytes() << " (" - << old_outstanding_bytes << ")"; + << ", unacked_packet_bytes " << unacked_packet_bytes() + << " (" << old_unacked_packet_bytes << ")"; RTC_DCHECK(IsConsistent()); } @@ -402,7 +399,7 @@ RetransmissionQueue::GetChunksForFastRetransmit(size_t bytes_in_packet) { RTC_DCHECK(outstanding_data_.has_data_to_be_fast_retransmitted()); RTC_DCHECK(IsDivisibleBy4(bytes_in_packet)); std::vector> to_be_sent; - size_t old_outstanding_bytes = outstanding_bytes(); + size_t old_unacked_packet_bytes = unacked_packet_bytes(); to_be_sent = outstanding_data_.GetChunksToBeFastRetransmitted(bytes_in_packet); @@ -435,35 +432,47 @@ RetransmissionQueue::GetChunksForFastRetransmit(size_t bytes_in_packet) { rtx_bytes_count_ += bytes_retransmitted; RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Fast-retransmitting TSN " - << StrJoin(to_be_sent, ",", - [&](rtc::StringBuilder& sb, - const std::pair& c) { - sb << *c.first; - }) + << webrtc::StrJoin(to_be_sent, ",", + [&](webrtc::StringBuilder& sb, + const std::pair& c) { + sb << *c.first; + }) << " - " << bytes_retransmitted - << " bytes. outstanding_bytes=" << outstanding_bytes() - << " (" << old_outstanding_bytes << ")"; + << " bytes. unacked_packet_bytes=" + << unacked_packet_bytes() << " (" + << old_unacked_packet_bytes << ")"; RTC_DCHECK(IsConsistent()); return to_be_sent; } std::vector> RetransmissionQueue::GetChunksToSend( - TimeMs now, + Timestamp now, size_t bytes_remaining_in_packet) { // Chunks are always padded to even divisible by four. RTC_DCHECK(IsDivisibleBy4(bytes_remaining_in_packet)); std::vector> to_be_sent; - size_t old_outstanding_bytes = outstanding_bytes(); + size_t old_unacked_packet_bytes = unacked_packet_bytes(); size_t old_rwnd = rwnd_; - // Calculate the bandwidth budget (how many bytes that is - // allowed to be sent), and fill that up first with chunks that are - // scheduled to be retransmitted. If there is still budget, send new chunks - // (which will have their TSN assigned here.) + // Calculate the bandwidth budget (how many bytes that is allowed to be sent). + size_t max_packet_bytes_allowed_by_cwnd = + old_unacked_packet_bytes >= cwnd_ ? 0 : cwnd_ - old_unacked_packet_bytes; + size_t max_packet_bytes_allowed_by_rwnd = + RoundUpTo4(rwnd() + data_chunk_header_size_); + + if (outstanding_data_.unacked_items() == 0) { + // https://datatracker.ietf.org/doc/html/rfc4960#section-6.1 + // ... However, regardless of the value of rwnd (including if it is 0), the + // data sender can always have one DATA chunk in flight to the receiver if + // allowed by cwnd (see rule B, below). + max_packet_bytes_allowed_by_rwnd = options_.mtu; + } size_t max_bytes = - RoundDownTo4(std::min(max_bytes_to_send(), bytes_remaining_in_packet)); + RoundDownTo4(std::min(std::min(max_packet_bytes_allowed_by_cwnd, + max_packet_bytes_allowed_by_rwnd), + bytes_remaining_in_packet)); to_be_sent = outstanding_data_.GetChunksToBeRetransmitted(max_bytes); @@ -480,7 +489,7 @@ std::vector> RetransmissionQueue::GetChunksToSend( while (max_bytes > data_chunk_header_size_) { RTC_DCHECK(IsDivisibleBy4(max_bytes)); - absl::optional chunk_opt = + std::optional chunk_opt = send_queue_.Produce(now, max_bytes - data_chunk_header_size_); if (!chunk_opt.has_value()) { break; @@ -488,13 +497,14 @@ std::vector> RetransmissionQueue::GetChunksToSend( size_t chunk_size = GetSerializedChunkSize(chunk_opt->data); max_bytes -= chunk_size; - rwnd_ -= chunk_size; + rwnd_ -= chunk_opt->data.size(); - absl::optional tsn = outstanding_data_.Insert( - chunk_opt->data, now, + std::optional tsn = outstanding_data_.Insert( + chunk_opt->message_id, chunk_opt->data, now, partial_reliability_ ? chunk_opt->max_retransmissions : MaxRetransmits::NoLimit(), - partial_reliability_ ? chunk_opt->expires_at : TimeMs::InfiniteFuture(), + partial_reliability_ ? chunk_opt->expires_at + : Timestamp::PlusInfinity(), chunk_opt->lifecycle_id); if (tsn.has_value()) { @@ -515,31 +525,27 @@ std::vector> RetransmissionQueue::GetChunksToSend( t3_rtx_.Start(); } RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Sending TSN " - << StrJoin(to_be_sent, ",", - [&](rtc::StringBuilder& sb, - const std::pair& c) { - sb << *c.first; - }) + << webrtc::StrJoin(to_be_sent, ",", + [&](webrtc::StringBuilder& sb, + const std::pair& c) { + sb << *c.first; + }) << " - " << absl::c_accumulate( to_be_sent, 0, [&](size_t r, const std::pair& d) { return r + GetSerializedChunkSize(d.second); }) - << " bytes. outstanding_bytes=" << outstanding_bytes() - << " (" << old_outstanding_bytes << "), cwnd=" << cwnd_ + << " bytes. unacked_packet_bytes=" + << unacked_packet_bytes() << " (" + << old_unacked_packet_bytes << "), cwnd=" << cwnd_ << ", rwnd=" << rwnd_ << " (" << old_rwnd << ")"; } RTC_DCHECK(IsConsistent()); return to_be_sent; } -bool RetransmissionQueue::can_send_data() const { - return cwnd_ < options_.avoid_fragmentation_cwnd_mtus * options_.mtu || - max_bytes_to_send() >= min_bytes_required_to_send_; -} - -bool RetransmissionQueue::ShouldSendForwardTsn(TimeMs now) { +bool RetransmissionQueue::ShouldSendForwardTsn(Timestamp now) { if (!partial_reliability_) { return false; } @@ -549,20 +555,6 @@ bool RetransmissionQueue::ShouldSendForwardTsn(TimeMs now) { return ret; } -size_t RetransmissionQueue::max_bytes_to_send() const { - size_t left = outstanding_bytes() >= cwnd_ ? 0 : cwnd_ - outstanding_bytes(); - - if (outstanding_bytes() == 0) { - // https://datatracker.ietf.org/doc/html/rfc4960#section-6.1 - // ... However, regardless of the value of rwnd (including if it is 0), the - // data sender can always have one DATA chunk in flight to the receiver if - // allowed by cwnd (see rule B, below). - return left; - } - - return std::min(rwnd(), left); -} - void RetransmissionQueue::PrepareResetStream(StreamID stream_id) { // TODO(boivie): These calls are now only affecting the send queue. The // packet buffer can also change behavior - for example draining the chunk @@ -573,6 +565,10 @@ void RetransmissionQueue::PrepareResetStream(StreamID stream_id) { bool RetransmissionQueue::HasStreamsReadyToBeReset() const { return send_queue_.HasStreamsReadyToBeReset(); } +std::vector RetransmissionQueue::BeginResetStreams() { + outstanding_data_.BeginResetStreams(); + return send_queue_.GetStreamsReadyToBeReset(); +} void RetransmissionQueue::CommitResetStreams() { send_queue_.CommitResetStreams(); } @@ -615,7 +611,6 @@ void RetransmissionQueue::RestoreFromState( partial_bytes_acked_ = state.tx.partial_bytes_acked; outstanding_data_.ResetSequenceNumbers( - tsn_unwrapper_.Unwrap(TSN(state.tx.next_tsn)), tsn_unwrapper_.Unwrap(TSN(state.tx.next_tsn - 1))); } } // namespace dcsctp diff --git a/net/dcsctp/tx/retransmission_queue.h b/net/dcsctp/tx/retransmission_queue.h index 51e9c5b318..4d8766c148 100644 --- a/net/dcsctp/tx/retransmission_queue.h +++ b/net/dcsctp/tx/retransmission_queue.h @@ -13,13 +13,13 @@ #include #include #include +#include #include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/common/sequence_numbers.h" #include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" @@ -60,7 +60,7 @@ class RetransmissionQueue { TSN my_initial_tsn, size_t a_rwnd, SendQueue& send_queue, - std::function on_new_rtt, + std::function on_new_rtt, std::function on_clear_retransmission_counter, Timer& t3_rtx, const DcSctpOptions& options, @@ -69,7 +69,7 @@ class RetransmissionQueue { // Handles a received SACK. Returns true if the `sack` was processed and // false if it was discarded due to received out-of-order and not relevant. - bool HandleSack(TimeMs now, const SackChunk& sack); + bool HandleSack(webrtc::Timestamp now, const SackChunk& sack); // Handles an expired retransmission timer. void HandleT3RtxTimerExpiry(); @@ -90,7 +90,7 @@ class RetransmissionQueue { // called prior to this method, to abandon expired chunks, as this method will // not expire any chunks. std::vector> GetChunksToSend( - TimeMs now, + webrtc::Timestamp now, size_t bytes_remaining_in_packet); // Returns the internal state of all queued chunks. This is only used in @@ -103,6 +103,10 @@ class RetransmissionQueue { // Returns the next TSN that will be allocated for sent DATA chunks. TSN next_tsn() const { return outstanding_data_.next_tsn().Wrap(); } + TSN last_assigned_tsn() const { + return UnwrappedTSN::AddTo(outstanding_data_.next_tsn(), -1).Wrap(); + } + // Returns the size of the congestion window, in bytes. This is the number of // bytes that may be in-flight. size_t cwnd() const { return cwnd_; } @@ -116,23 +120,18 @@ class RetransmissionQueue { size_t rtx_packets_count() const { return rtx_packets_count_; } uint64_t rtx_bytes_count() const { return rtx_bytes_count_; } - // Returns the number of bytes of packets that are in-flight. - size_t outstanding_bytes() const { - return outstanding_data_.outstanding_bytes(); + // How many inflight bytes there are, as sent on the wire as packets. + size_t unacked_packet_bytes() const { + return outstanding_data_.unacked_packet_bytes(); } // Returns the number of DATA chunks that are in-flight. - size_t outstanding_items() const { - return outstanding_data_.outstanding_items(); - } - - // Indicates if the congestion control algorithm allows data to be sent. - bool can_send_data() const; + size_t unacked_items() const { return outstanding_data_.unacked_items(); } // Given the current time `now`, it will evaluate if there are chunks that // have expired and that need to be discarded. It returns true if a // FORWARD-TSN should be sent. - bool ShouldSendForwardTsn(TimeMs now); + bool ShouldSendForwardTsn(webrtc::Timestamp now); // Creates a FORWARD-TSN chunk. ForwardTsnChunk CreateForwardTsn() const { @@ -148,9 +147,7 @@ class RetransmissionQueue { // to stream resetting. void PrepareResetStream(StreamID stream_id); bool HasStreamsReadyToBeReset() const; - std::vector GetStreamsReadyToBeReset() const { - return send_queue_.GetStreamsReadyToBeReset(); - } + std::vector BeginResetStreams(); void CommitResetStreams(); void RollbackResetStreams(); @@ -183,7 +180,7 @@ class RetransmissionQueue { // When a SACK chunk is received, this method will be called which _may_ call // into the `RetransmissionTimeout` to update the RTO. - void UpdateRTT(TimeMs now, UnwrappedTSN cumulative_tsn_ack); + void UpdateRTT(webrtc::Timestamp now, UnwrappedTSN cumulative_tsn_ack); // If the congestion control is in "fast recovery mode", this may be exited // now. @@ -195,7 +192,7 @@ class RetransmissionQueue { // Update the congestion control algorithm given as the cumulative ack TSN // value has increased, as reported in an incoming SACK chunk. - void HandleIncreasedCumulativeTsnAck(size_t outstanding_bytes, + void HandleIncreasedCumulativeTsnAck(size_t unacked_packet_bytes, size_t total_bytes_acked); // Update the congestion control algorithm, given as packet loss has been // detected, as reported in an incoming SACK chunk. @@ -213,22 +210,15 @@ class RetransmissionQueue { : CongestionAlgorithmPhase::kCongestionAvoidance; } - // Returns the number of bytes that may be sent in a single packet according - // to the congestion control algorithm. - size_t max_bytes_to_send() const; - DcSctpSocketCallbacks& callbacks_; const DcSctpOptions options_; - // The minimum bytes required to be available in the congestion window to - // allow packets to be sent - to avoid sending too small packets. - const size_t min_bytes_required_to_send_; // If the peer supports RFC3758 - SCTP Partial Reliability Extension. const bool partial_reliability_; const absl::string_view log_prefix_; // The size of the data chunk (DATA/I-DATA) header that is used. const size_t data_chunk_header_size_; // Called when a new RTT measurement has been done - const std::function on_new_rtt_; + const std::function on_new_rtt_; // Called when a SACK has been seen that cleared the retransmission counter. const std::function on_clear_retransmission_counter_; // The retransmission counter. @@ -251,7 +241,7 @@ class RetransmissionQueue { // If set, fast recovery is enabled until this TSN has been cumulative // acked. - absl::optional fast_recovery_exit_tsn_ = absl::nullopt; + std::optional fast_recovery_exit_tsn_ = std::nullopt; // The send queue. SendQueue& send_queue_; diff --git a/net/dcsctp/tx/retransmission_queue_test.cc b/net/dcsctp/tx/retransmission_queue_test.cc index e62c030bfa..2f2f3943c4 100644 --- a/net/dcsctp/tx/retransmission_queue_test.cc +++ b/net/dcsctp/tx/retransmission_queue_test.cc @@ -13,13 +13,14 @@ #include #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/task_queue/task_queue_base.h" #include "net/dcsctp/common/handover_testing.h" +#include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/common/math.h" #include "net/dcsctp/packet/chunk/data_chunk.h" #include "net/dcsctp/packet/chunk/forward_tsn_chunk.h" @@ -44,15 +45,19 @@ using ::testing::MockFunction; using State = ::dcsctp::RetransmissionQueue::State; using ::testing::_; using ::testing::ElementsAre; +using ::testing::Field; using ::testing::IsEmpty; using ::testing::NiceMock; using ::testing::Pair; using ::testing::Return; using ::testing::SizeIs; using ::testing::UnorderedElementsAre; +using ::webrtc::TimeDelta; +using ::webrtc::Timestamp; constexpr uint32_t kArwnd = 100000; constexpr uint32_t kMaxMtu = 1191; +constexpr OutgoingMessageId kMessageId = OutgoingMessageId(42); DcSctpOptions MakeOptions() { DcSctpOptions options; @@ -71,12 +76,14 @@ class RetransmissionQueueTest : public testing::Test { }), timer_(timer_manager_.CreateTimer( "test/t3_rtx", - []() { return absl::nullopt; }, - TimerOptions(options_.rto_initial))) {} - - std::function CreateChunk() { - return [this](TimeMs now, size_t max_size) { - return SendQueue::DataToSend(gen_.Ordered({1, 2, 3, 4}, "BE")); + []() { return TimeDelta::Zero(); }, + TimerOptions(options_.rto_initial.ToTimeDelta()))) {} + + std::function CreateChunk( + OutgoingMessageId message_id) { + return [this, message_id](Timestamp /* now */, size_t /* max_size */) { + return SendQueue::DataToSend(message_id, + gen_.Ordered({1, 2, 3, 4}, "BE")); }; } @@ -122,10 +129,10 @@ class RetransmissionQueueTest : public testing::Test { MockDcSctpSocketCallbacks callbacks_; DcSctpOptions options_; DataGenerator gen_; - TimeMs now_ = TimeMs(0); + Timestamp now_ = Timestamp::Zero(); FakeTimeoutManager timeout_manager_; TimerManager timer_manager_; - NiceMock> on_rtt_; + NiceMock> on_rtt_; NiceMock> on_clear_retransmission_counter_; NiceMock producer_; std::unique_ptr timer_; @@ -140,8 +147,8 @@ TEST_F(RetransmissionQueueTest, InitialAckedPrevTsn) { TEST_F(RetransmissionQueueTest, SendOneChunk) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(10))); @@ -153,8 +160,8 @@ TEST_F(RetransmissionQueueTest, SendOneChunk) { TEST_F(RetransmissionQueueTest, SendOneChunkAndAck) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(10))); @@ -167,10 +174,10 @@ TEST_F(RetransmissionQueueTest, SendOneChunkAndAck) { TEST_F(RetransmissionQueueTest, SendThreeChunksAndAckTwo) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillOnce(CreateChunk(OutgoingMessageId(1))) + .WillOnce(CreateChunk(OutgoingMessageId(2))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(10), TSN(11), TSN(12))); @@ -185,15 +192,15 @@ TEST_F(RetransmissionQueueTest, SendThreeChunksAndAckTwo) { TEST_F(RetransmissionQueueTest, AckWithGapBlocksFromRFC4960Section334) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillOnce(CreateChunk(OutgoingMessageId(1))) + .WillOnce(CreateChunk(OutgoingMessageId(2))) + .WillOnce(CreateChunk(OutgoingMessageId(3))) + .WillOnce(CreateChunk(OutgoingMessageId(4))) + .WillOnce(CreateChunk(OutgoingMessageId(5))) + .WillOnce(CreateChunk(OutgoingMessageId(6))) + .WillOnce(CreateChunk(OutgoingMessageId(7))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(10), TSN(11), TSN(12), TSN(13), TSN(14), @@ -216,15 +223,15 @@ TEST_F(RetransmissionQueueTest, AckWithGapBlocksFromRFC4960Section334) { TEST_F(RetransmissionQueueTest, ResendPacketsWhenNackedThreeTimes) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillOnce(CreateChunk(OutgoingMessageId(1))) + .WillOnce(CreateChunk(OutgoingMessageId(2))) + .WillOnce(CreateChunk(OutgoingMessageId(3))) + .WillOnce(CreateChunk(OutgoingMessageId(4))) + .WillOnce(CreateChunk(OutgoingMessageId(5))) + .WillOnce(CreateChunk(OutgoingMessageId(6))) + .WillOnce(CreateChunk(OutgoingMessageId(7))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(10), TSN(11), TSN(12), TSN(13), TSN(14), @@ -235,8 +242,8 @@ TEST_F(RetransmissionQueueTest, ResendPacketsWhenNackedThreeTimes) { // Send 18 EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(8))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(18))); // Ack 12, 14-15, 17-18 @@ -256,8 +263,8 @@ TEST_F(RetransmissionQueueTest, ResendPacketsWhenNackedThreeTimes) { // Send 19 EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(9))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(19))); // Ack 12, 14-15, 17-19 @@ -268,8 +275,8 @@ TEST_F(RetransmissionQueueTest, ResendPacketsWhenNackedThreeTimes) { // Send 20 EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(10))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(20))); // Ack 12, 14-15, 17-20 @@ -313,19 +320,19 @@ TEST_F(RetransmissionQueueTest, RestartsT3RtxOnRetransmitFirstOutstandingTSN) { // TSN, it will also restart T3-RTX. RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillOnce(CreateChunk(OutgoingMessageId(1))) + .WillOnce(CreateChunk(OutgoingMessageId(2))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); - static constexpr TimeMs kStartTime(100000); + static constexpr Timestamp kStartTime = Timestamp::Seconds(100); now_ = kStartTime; EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(10), TSN(11), TSN(12))); // Ack 10, 12, after 100ms. - now_ += DurationMs(100); + now_ += TimeDelta::Millis(100); queue.HandleSack( now_, SackChunk(TSN(10), kArwnd, {SackChunk::GapAckBlock(2, 2)}, {})); @@ -336,23 +343,23 @@ TEST_F(RetransmissionQueueTest, RestartsT3RtxOnRetransmitFirstOutstandingTSN) { // Send 13 EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(3))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(13))); // Ack 10, 12-13, after 100ms. - now_ += DurationMs(100); + now_ += TimeDelta::Millis(100); queue.HandleSack( now_, SackChunk(TSN(10), kArwnd, {SackChunk::GapAckBlock(2, 3)}, {})); // Send 14 EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(4))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(14))); // Ack 10, 12-14, after 100 ms. - now_ += DurationMs(100); + now_ += TimeDelta::Millis(100); queue.HandleSack( now_, SackChunk(TSN(10), kArwnd, {SackChunk::GapAckBlock(2, 4)}, {})); @@ -378,11 +385,11 @@ TEST_F(RetransmissionQueueTest, RestartsT3RtxOnRetransmitFirstOutstandingTSN) { // Verify that the timer was really restarted when fast-retransmitting. The // timeout is `options_.rto_initial`, so advance the time just before that. - now_ += options_.rto_initial - DurationMs(1); + now_ += options_.rto_initial.ToTimeDelta() - TimeDelta::Millis(1); EXPECT_FALSE(timeout_manager_.GetNextExpiredTimeout().has_value()); // And ensure it really is running. - now_ += DurationMs(1); + now_ += TimeDelta::Millis(1); ASSERT_HAS_VALUE_AND_ASSIGN(TimeoutID timeout, timeout_manager_.GetNextExpiredTimeout()); // An expired timeout has to be handled (asserts validate this). @@ -392,13 +399,15 @@ TEST_F(RetransmissionQueueTest, RestartsT3RtxOnRetransmitFirstOutstandingTSN) { TEST_F(RetransmissionQueueTest, CanOnlyProduceTwoPacketsButWantsToSendThree) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce([this](TimeMs, size_t) { - return SendQueue::DataToSend(gen_.Ordered({1, 2, 3, 4}, "BE")); + .WillOnce([this](Timestamp, size_t) { + return SendQueue::DataToSend(OutgoingMessageId(0), + gen_.Ordered({1, 2, 3, 4}, "BE")); }) - .WillOnce([this](TimeMs, size_t) { - return SendQueue::DataToSend(gen_.Ordered({1, 2, 3, 4}, "BE")); + .WillOnce([this](Timestamp, size_t) { + return SendQueue::DataToSend(OutgoingMessageId(1), + gen_.Ordered({1, 2, 3, 4}, "BE")); }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); std::vector> chunks_to_send = queue.GetChunksToSend(now_, 1000); @@ -413,10 +422,11 @@ TEST_F(RetransmissionQueueTest, CanOnlyProduceTwoPacketsButWantsToSendThree) { TEST_F(RetransmissionQueueTest, RetransmitsOnT3Expiry) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce([this](TimeMs, size_t) { - return SendQueue::DataToSend(gen_.Ordered({1, 2, 3, 4}, "BE")); + .WillOnce([this](Timestamp, size_t) { + return SendQueue::DataToSend(OutgoingMessageId(0), + gen_.Ordered({1, 2, 3, 4}, "BE")); }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_FALSE(queue.ShouldSendForwardTsn(now_)); std::vector> chunks_to_send = @@ -451,12 +461,12 @@ TEST_F(RetransmissionQueueTest, LimitedRetransmissionOnlyWithRfc3758Support) { RetransmissionQueue queue = CreateQueue(/*supports_partial_reliability=*/false); EXPECT_CALL(producer_, Produce) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "BE")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, gen_.Ordered({1, 2, 3, 4}, "BE")); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_FALSE(queue.ShouldSendForwardTsn(now_)); std::vector> chunks_to_send = @@ -473,20 +483,19 @@ TEST_F(RetransmissionQueueTest, LimitedRetransmissionOnlyWithRfc3758Support) { ElementsAre(Pair(TSN(9), State::kAcked), // Pair(TSN(10), State::kToBeRetransmitted))); - EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42))) - .Times(0); + EXPECT_CALL(producer_, Discard(StreamID(1), kMessageId)).Times(0); EXPECT_FALSE(queue.ShouldSendForwardTsn(now_)); } // namespace dcsctp TEST_F(RetransmissionQueueTest, LimitsRetransmissionsAsUdp) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "BE")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, gen_.Ordered({1, 2, 3, 4}, "BE")); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_FALSE(queue.ShouldSendForwardTsn(now_)); std::vector> chunks_to_send = @@ -497,8 +506,7 @@ TEST_F(RetransmissionQueueTest, LimitsRetransmissionsAsUdp) { Pair(TSN(10), State::kInFlight))); // Will force chunks to be retransmitted - EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42))) - .Times(1); + EXPECT_CALL(producer_, Discard(StreamID(1), kMessageId)).Times(1); queue.HandleT3RtxTimerExpiry(); @@ -523,12 +531,12 @@ TEST_F(RetransmissionQueueTest, LimitsRetransmissionsAsUdp) { TEST_F(RetransmissionQueueTest, LimitsRetransmissionsToThreeSends) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "BE")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, gen_.Ordered({1, 2, 3, 4}, "BE")); dts.max_retransmissions = MaxRetransmits(3); return dts; }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_FALSE(queue.ShouldSendForwardTsn(now_)); std::vector> chunks_to_send = @@ -538,8 +546,7 @@ TEST_F(RetransmissionQueueTest, LimitsRetransmissionsToThreeSends) { ElementsAre(Pair(TSN(9), State::kAcked), // Pair(TSN(10), State::kInFlight))); - EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42))) - .Times(0); + EXPECT_CALL(producer_, Discard(StreamID(1), kMessageId)).Times(0); // Retransmission 1 queue.HandleT3RtxTimerExpiry(); @@ -557,8 +564,7 @@ TEST_F(RetransmissionQueueTest, LimitsRetransmissionsToThreeSends) { EXPECT_THAT(queue.GetChunksToSend(now_, 1000), SizeIs(1)); // Retransmission 4 - not allowed. - EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42))) - .Times(1); + EXPECT_CALL(producer_, Discard(StreamID(1), kMessageId)).Times(1); queue.HandleT3RtxTimerExpiry(); EXPECT_TRUE(queue.ShouldSendForwardTsn(now_)); EXPECT_THAT(queue.GetChunksToSend(now_, 1000), IsEmpty()); @@ -573,15 +579,16 @@ TEST_F(RetransmissionQueueTest, RetransmitsWhenSendBufferIsFullT3Expiry) { static constexpr size_t kCwnd = 1200; queue.set_cwnd(kCwnd); EXPECT_EQ(queue.cwnd(), kCwnd); - EXPECT_EQ(queue.outstanding_bytes(), 0u); - EXPECT_EQ(queue.outstanding_items(), 0u); + EXPECT_EQ(queue.unacked_packet_bytes(), 0u); + EXPECT_EQ(queue.unacked_items(), 0u); std::vector payload(1000); EXPECT_CALL(producer_, Produce) - .WillOnce([this, payload](TimeMs, size_t) { - return SendQueue::DataToSend(gen_.Ordered(payload, "BE")); + .WillOnce([this, payload](Timestamp, size_t) { + return SendQueue::DataToSend(OutgoingMessageId(0), + gen_.Ordered(payload, "BE")); }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); std::vector> chunks_to_send = queue.GetChunksToSend(now_, 1500); @@ -589,8 +596,9 @@ TEST_F(RetransmissionQueueTest, RetransmitsWhenSendBufferIsFullT3Expiry) { EXPECT_THAT(queue.GetChunkStatesForTesting(), ElementsAre(Pair(TSN(9), State::kAcked), // Pair(TSN(10), State::kInFlight))); - EXPECT_EQ(queue.outstanding_bytes(), payload.size() + DataChunk::kHeaderSize); - EXPECT_EQ(queue.outstanding_items(), 1u); + EXPECT_EQ(queue.unacked_packet_bytes(), + payload.size() + DataChunk::kHeaderSize); + EXPECT_EQ(queue.unacked_items(), 1u); // Will force chunks to be retransmitted queue.HandleT3RtxTimerExpiry(); @@ -598,8 +606,8 @@ TEST_F(RetransmissionQueueTest, RetransmitsWhenSendBufferIsFullT3Expiry) { EXPECT_THAT(queue.GetChunkStatesForTesting(), ElementsAre(Pair(TSN(9), State::kAcked), // Pair(TSN(10), State::kToBeRetransmitted))); - EXPECT_EQ(queue.outstanding_bytes(), 0u); - EXPECT_EQ(queue.outstanding_items(), 0u); + EXPECT_EQ(queue.unacked_packet_bytes(), 0u); + EXPECT_EQ(queue.unacked_items(), 0u); std::vector> chunks_to_rtx = queue.GetChunksToSend(now_, 1500); @@ -607,29 +615,31 @@ TEST_F(RetransmissionQueueTest, RetransmitsWhenSendBufferIsFullT3Expiry) { EXPECT_THAT(queue.GetChunkStatesForTesting(), ElementsAre(Pair(TSN(9), State::kAcked), // Pair(TSN(10), State::kInFlight))); - EXPECT_EQ(queue.outstanding_bytes(), payload.size() + DataChunk::kHeaderSize); - EXPECT_EQ(queue.outstanding_items(), 1u); + EXPECT_EQ(queue.unacked_packet_bytes(), + payload.size() + DataChunk::kHeaderSize); + EXPECT_EQ(queue.unacked_items(), 1u); } TEST_F(RetransmissionQueueTest, ProducesValidForwardTsn) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "B")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, gen_.Ordered({1, 2, 3, 4}, "B")); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({5, 6, 7, 8}, "")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, gen_.Ordered({5, 6, 7, 8}, "")); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({9, 10, 11, 12}, "")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, + gen_.Ordered({9, 10, 11, 12}, "")); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); // Send and ack first chunk (TSN 10) std::vector> chunks_to_send = @@ -645,7 +655,7 @@ TEST_F(RetransmissionQueueTest, ProducesValidForwardTsn) { // Chunk 10 is acked, but the remaining are lost queue.HandleSack(now_, SackChunk(TSN(10), kArwnd, {}, {})); - EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42))) + EXPECT_CALL(producer_, Discard(StreamID(1), kMessageId)) .WillOnce(Return(true)); queue.HandleT3RtxTimerExpiry(); @@ -669,22 +679,23 @@ TEST_F(RetransmissionQueueTest, ProducesValidForwardTsn) { TEST_F(RetransmissionQueueTest, ProducesValidForwardTsnWhenFullySent) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "B")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, gen_.Ordered({1, 2, 3, 4}, "B")); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({5, 6, 7, 8}, "")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, gen_.Ordered({5, 6, 7, 8}, "")); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({9, 10, 11, 12}, "E")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, + gen_.Ordered({9, 10, 11, 12}, "E")); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); // Send and ack first chunk (TSN 10) std::vector> chunks_to_send = @@ -700,7 +711,7 @@ TEST_F(RetransmissionQueueTest, ProducesValidForwardTsnWhenFullySent) { // Chunk 10 is acked, but the remaining are lost queue.HandleSack(now_, SackChunk(TSN(10), kArwnd, {}, {})); - EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42))) + EXPECT_CALL(producer_, Discard(StreamID(1), kMessageId)) .WillOnce(Return(false)); queue.HandleT3RtxTimerExpiry(); @@ -722,35 +733,39 @@ TEST_F(RetransmissionQueueTest, ProducesValidForwardTsnWhenFullySent) { TEST_F(RetransmissionQueueTest, ProducesValidIForwardTsn) { RetransmissionQueue queue = CreateQueue(/*use_message_interleaving=*/true); EXPECT_CALL(producer_, Produce) - .WillOnce([this](TimeMs, size_t) { + .WillOnce([this](Timestamp, size_t) { DataGeneratorOptions opts; opts.stream_id = StreamID(1); - SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "B", opts)); + SendQueue::DataToSend dts(OutgoingMessageId(42), + gen_.Ordered({1, 2, 3, 4}, "B", opts)); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillOnce([this](TimeMs, size_t) { + .WillOnce([this](Timestamp, size_t) { DataGeneratorOptions opts; opts.stream_id = StreamID(2); - SendQueue::DataToSend dts(gen_.Unordered({1, 2, 3, 4}, "B", opts)); + SendQueue::DataToSend dts(OutgoingMessageId(43), + gen_.Unordered({1, 2, 3, 4}, "B", opts)); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillOnce([this](TimeMs, size_t) { + .WillOnce([this](Timestamp, size_t) { DataGeneratorOptions opts; opts.stream_id = StreamID(3); - SendQueue::DataToSend dts(gen_.Ordered({9, 10, 11, 12}, "B", opts)); + SendQueue::DataToSend dts(OutgoingMessageId(44), + gen_.Ordered({9, 10, 11, 12}, "B", opts)); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillOnce([this](TimeMs, size_t) { + .WillOnce([this](Timestamp, size_t) { DataGeneratorOptions opts; opts.stream_id = StreamID(4); - SendQueue::DataToSend dts(gen_.Ordered({13, 14, 15, 16}, "B", opts)); + SendQueue::DataToSend dts(OutgoingMessageId(45), + gen_.Ordered({13, 14, 15, 16}, "B", opts)); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); std::vector> chunks_to_send = queue.GetChunksToSend(now_, 1000); @@ -773,11 +788,11 @@ TEST_F(RetransmissionQueueTest, ProducesValidIForwardTsn) { Pair(TSN(12), State::kNacked), // Pair(TSN(13), State::kAcked))); - EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42))) + EXPECT_CALL(producer_, Discard(StreamID(1), OutgoingMessageId(42))) .WillOnce(Return(true)); - EXPECT_CALL(producer_, Discard(IsUnordered(true), StreamID(2), MID(42))) + EXPECT_CALL(producer_, Discard(StreamID(2), OutgoingMessageId(43))) .WillOnce(Return(true)); - EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(3), MID(42))) + EXPECT_CALL(producer_, Discard(StreamID(3), OutgoingMessageId(44))) .WillOnce(Return(true)); queue.HandleT3RtxTimerExpiry(); @@ -839,20 +854,21 @@ TEST_F(RetransmissionQueueTest, ProducesValidIForwardTsn) { TEST_F(RetransmissionQueueTest, MeasureRTT) { RetransmissionQueue queue = CreateQueue(/*use_message_interleaving=*/true); EXPECT_CALL(producer_, Produce) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "B")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(OutgoingMessageId(0), + gen_.Ordered({1, 2, 3, 4}, "B")); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); std::vector> chunks_to_send = queue.GetChunksToSend(now_, 1000); EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _))); - now_ = now_ + DurationMs(123); + now_ = now_ + TimeDelta::Millis(123); - EXPECT_CALL(on_rtt_, Call(DurationMs(123))).Times(1); + EXPECT_CALL(on_rtt_, Call(TimeDelta::Millis(123))).Times(1); queue.HandleSack(now_, SackChunk(TSN(10), kArwnd, {}, {})); } @@ -868,15 +884,15 @@ TEST_F(RetransmissionQueueTest, ValidateCumTsnAckOnInflightData) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillOnce(CreateChunk(OutgoingMessageId(1))) + .WillOnce(CreateChunk(OutgoingMessageId(2))) + .WillOnce(CreateChunk(OutgoingMessageId(3))) + .WillOnce(CreateChunk(OutgoingMessageId(4))) + .WillOnce(CreateChunk(OutgoingMessageId(5))) + .WillOnce(CreateChunk(OutgoingMessageId(6))) + .WillOnce(CreateChunk(OutgoingMessageId(7))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(10), TSN(11), TSN(12), TSN(13), TSN(14), @@ -898,15 +914,15 @@ TEST_F(RetransmissionQueueTest, ValidateCumTsnAckOnInflightData) { TEST_F(RetransmissionQueueTest, HandleGapAckBlocksMatchingNoInflightData) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillOnce(CreateChunk(OutgoingMessageId(1))) + .WillOnce(CreateChunk(OutgoingMessageId(2))) + .WillOnce(CreateChunk(OutgoingMessageId(3))) + .WillOnce(CreateChunk(OutgoingMessageId(4))) + .WillOnce(CreateChunk(OutgoingMessageId(5))) + .WillOnce(CreateChunk(OutgoingMessageId(6))) + .WillOnce(CreateChunk(OutgoingMessageId(7))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(10), TSN(11), TSN(12), TSN(13), TSN(14), @@ -945,15 +961,15 @@ TEST_F(RetransmissionQueueTest, HandleInvalidGapAckBlocks) { TEST_F(RetransmissionQueueTest, GapAckBlocksDoNotMoveCumTsnAck) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillOnce(CreateChunk(OutgoingMessageId(1))) + .WillOnce(CreateChunk(OutgoingMessageId(2))) + .WillOnce(CreateChunk(OutgoingMessageId(3))) + .WillOnce(CreateChunk(OutgoingMessageId(4))) + .WillOnce(CreateChunk(OutgoingMessageId(5))) + .WillOnce(CreateChunk(OutgoingMessageId(6))) + .WillOnce(CreateChunk(OutgoingMessageId(7))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), testing::ElementsAre(TSN(10), TSN(11), TSN(12), TSN(13), TSN(14), @@ -983,17 +999,19 @@ TEST_F(RetransmissionQueueTest, StaysWithinAvailableSize) { // See SctpPacketTest::ReturnsCorrectSpaceAvailableToStayWithinMTU for the // magic numbers in this test. EXPECT_CALL(producer_, Produce) - .WillOnce([this](TimeMs, size_t size) { + .WillOnce([this](Timestamp, size_t size) { EXPECT_EQ(size, 1176 - DataChunk::kHeaderSize); std::vector payload(183); - return SendQueue::DataToSend(gen_.Ordered(payload, "BE")); + return SendQueue::DataToSend(OutgoingMessageId(0), + gen_.Ordered(payload, "BE")); }) - .WillOnce([this](TimeMs, size_t size) { + .WillOnce([this](Timestamp, size_t size) { EXPECT_EQ(size, 976 - DataChunk::kHeaderSize); std::vector payload(957); - return SendQueue::DataToSend(gen_.Ordered(payload, "BE")); + return SendQueue::DataToSend(OutgoingMessageId(1), + gen_.Ordered(payload, "BE")); }); std::vector> chunks_to_send = @@ -1004,22 +1022,23 @@ TEST_F(RetransmissionQueueTest, StaysWithinAvailableSize) { TEST_F(RetransmissionQueueTest, AccountsNackedAbandonedChunksAsNotOutstanding) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "B")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, gen_.Ordered({1, 2, 3, 4}, "B")); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({5, 6, 7, 8}, "")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, gen_.Ordered({5, 6, 7, 8}, "")); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({9, 10, 11, 12}, "")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, + gen_.Ordered({9, 10, 11, 12}, "")); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); // Send and ack first chunk (TSN 10) std::vector> chunks_to_send = @@ -1031,12 +1050,11 @@ TEST_F(RetransmissionQueueTest, AccountsNackedAbandonedChunksAsNotOutstanding) { Pair(TSN(10), State::kInFlight), // Pair(TSN(11), State::kInFlight), // Pair(TSN(12), State::kInFlight))); - EXPECT_EQ(queue.outstanding_bytes(), (16 + 4) * 3u); - EXPECT_EQ(queue.outstanding_items(), 3u); + EXPECT_EQ(queue.unacked_packet_bytes(), (16 + 4) * 3u); + EXPECT_EQ(queue.unacked_items(), 3u); // Mark the message as lost. - EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42))) - .Times(1); + EXPECT_CALL(producer_, Discard(StreamID(1), kMessageId)).Times(1); queue.HandleT3RtxTimerExpiry(); EXPECT_TRUE(queue.ShouldSendForwardTsn(now_)); @@ -1046,49 +1064,51 @@ TEST_F(RetransmissionQueueTest, AccountsNackedAbandonedChunksAsNotOutstanding) { Pair(TSN(10), State::kAbandoned), // Pair(TSN(11), State::kAbandoned), // Pair(TSN(12), State::kAbandoned))); - EXPECT_EQ(queue.outstanding_bytes(), 0u); - EXPECT_EQ(queue.outstanding_items(), 0u); + EXPECT_EQ(queue.unacked_packet_bytes(), 0u); + EXPECT_EQ(queue.unacked_items(), 0u); // Now ACK those, one at a time. queue.HandleSack(now_, SackChunk(TSN(10), kArwnd, {}, {})); - EXPECT_EQ(queue.outstanding_bytes(), 0u); - EXPECT_EQ(queue.outstanding_items(), 0u); + EXPECT_EQ(queue.unacked_packet_bytes(), 0u); + EXPECT_EQ(queue.unacked_items(), 0u); queue.HandleSack(now_, SackChunk(TSN(11), kArwnd, {}, {})); - EXPECT_EQ(queue.outstanding_bytes(), 0u); - EXPECT_EQ(queue.outstanding_items(), 0u); + EXPECT_EQ(queue.unacked_packet_bytes(), 0u); + EXPECT_EQ(queue.unacked_items(), 0u); queue.HandleSack(now_, SackChunk(TSN(12), kArwnd, {}, {})); - EXPECT_EQ(queue.outstanding_bytes(), 0u); - EXPECT_EQ(queue.outstanding_items(), 0u); + EXPECT_EQ(queue.unacked_packet_bytes(), 0u); + EXPECT_EQ(queue.unacked_items(), 0u); } TEST_F(RetransmissionQueueTest, ExpireFromSendQueueWhenPartiallySent) { RetransmissionQueue queue = CreateQueue(); DataGeneratorOptions options; options.stream_id = StreamID(17); - options.message_id = MID(42); - TimeMs test_start = now_; + options.mid = MID(42); + Timestamp test_start = now_; EXPECT_CALL(producer_, Produce) - .WillOnce([&](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "B", options)); - dts.expires_at = TimeMs(test_start + DurationMs(10)); + .WillOnce([&](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, + gen_.Ordered({1, 2, 3, 4}, "B", options)); + dts.expires_at = Timestamp(test_start + TimeDelta::Millis(10)); return dts; }) - .WillOnce([&](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({5, 6, 7, 8}, "", options)); - dts.expires_at = TimeMs(test_start + DurationMs(10)); + .WillOnce([&](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, + gen_.Ordered({5, 6, 7, 8}, "", options)); + dts.expires_at = Timestamp(test_start + TimeDelta::Millis(10)); return dts; }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); std::vector> chunks_to_send = queue.GetChunksToSend(now_, 24); EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _))); - EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(17), MID(42))) + EXPECT_CALL(producer_, Discard(StreamID(17), kMessageId)) .WillOnce(Return(true)); - now_ += DurationMs(100); + now_ += TimeDelta::Millis(100); EXPECT_THAT(queue.GetChunksToSend(now_, 24), IsEmpty()); @@ -1100,18 +1120,75 @@ TEST_F(RetransmissionQueueTest, ExpireFromSendQueueWhenPartiallySent) { Pair(TSN(12), State::kAbandoned))); // Placeholder end } +TEST_F(RetransmissionQueueTest, ExpireCorrectMessageFromSendQueue) { + RetransmissionQueue queue = CreateQueue(); + Timestamp test_start = now_; + EXPECT_CALL(producer_, Produce) + .WillOnce([&](Timestamp, size_t) { + SendQueue::DataToSend dts( + OutgoingMessageId(42), + gen_.Ordered({1, 2, 3, 4}, "BE", {.mid = MID(0)})); + dts.expires_at = Timestamp(test_start + TimeDelta::Millis(10)); + return dts; + }) + .WillOnce([&](Timestamp, size_t) { + SendQueue::DataToSend dts( + OutgoingMessageId(43), + gen_.Ordered({1, 2, 3, 4}, "BE", {.mid = MID(1)})); + dts.expires_at = Timestamp(test_start + TimeDelta::Millis(10)); + return dts; + }) + // Stream reset - MID reset to zero again. + .WillOnce([&](Timestamp, size_t) { + SendQueue::DataToSend dts( + OutgoingMessageId(44), + gen_.Ordered({1, 2, 3, 4}, "B", {.mid = MID(0)})); + dts.expires_at = Timestamp(test_start + TimeDelta::Millis(10)); + return dts; + }) + .WillOnce([&](Timestamp, size_t) { + SendQueue::DataToSend dts( + OutgoingMessageId(44), + gen_.Ordered({5, 6, 7, 8}, "", {.mid = MID(0)})); + dts.expires_at = Timestamp(test_start + TimeDelta::Millis(10)); + return dts; + }) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); + EXPECT_CALL(producer_, Discard(StreamID(1), OutgoingMessageId(44))) + .WillOnce(Return(true)); + + EXPECT_THAT(queue.GetChunksToSend(now_, 24), + ElementsAre(Pair(TSN(10), Field(&Data::mid, MID(0))))); + EXPECT_THAT(queue.GetChunksToSend(now_, 24), + ElementsAre(Pair(TSN(11), Field(&Data::mid, MID(1))))); + EXPECT_THAT(queue.GetChunksToSend(now_, 24), + ElementsAre(Pair(TSN(12), Field(&Data::mid, MID(0))))); + + now_ += TimeDelta::Millis(100); + EXPECT_THAT(queue.GetChunksToSend(now_, 24), IsEmpty()); + + EXPECT_THAT( + queue.GetChunkStatesForTesting(), + ElementsAre(Pair(TSN(9), State::kAcked), // Initial TSN + Pair(TSN(10), State::kInFlight), // OutgoingMessageId=42, BE + Pair(TSN(11), State::kInFlight), // OutgoingMessageId=43, BE + Pair(TSN(12), State::kAbandoned), // OutgoingMessageId=44, B + Pair(TSN(13), State::kAbandoned), // Produced and expired + Pair(TSN(14), State::kAbandoned))); // Placeholder end +} + TEST_F(RetransmissionQueueTest, LimitsRetransmissionsOnlyWhenNackedThreeTimes) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "BE")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, gen_.Ordered({1, 2, 3, 4}, "BE")); dts.max_retransmissions = MaxRetransmits(0); return dts; }) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillOnce(CreateChunk(OutgoingMessageId(1))) + .WillOnce(CreateChunk(OutgoingMessageId(2))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_FALSE(queue.ShouldSendForwardTsn(now_)); @@ -1128,8 +1205,7 @@ TEST_F(RetransmissionQueueTest, LimitsRetransmissionsOnlyWhenNackedThreeTimes) { EXPECT_FALSE(queue.ShouldSendForwardTsn(now_)); - EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42))) - .Times(0); + EXPECT_CALL(producer_, Discard(StreamID(1), kMessageId)).Times(0); queue.HandleSack( now_, SackChunk(TSN(9), kArwnd, {SackChunk::GapAckBlock(2, 2)}, {})); @@ -1155,7 +1231,7 @@ TEST_F(RetransmissionQueueTest, LimitsRetransmissionsOnlyWhenNackedThreeTimes) { EXPECT_FALSE(queue.ShouldSendForwardTsn(now_)); - EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42))) + EXPECT_CALL(producer_, Discard(StreamID(1), kMessageId)) .WillOnce(Return(false)); queue.HandleSack( now_, SackChunk(TSN(9), kArwnd, {SackChunk::GapAckBlock(2, 4)}, {})); @@ -1174,21 +1250,21 @@ TEST_F(RetransmissionQueueTest, AbandonsRtxLimit2WhenNackedNineTimes) { // This is a fairly long test. RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce([this](TimeMs, size_t) { - SendQueue::DataToSend dts(gen_.Ordered({1, 2, 3, 4}, "BE")); + .WillOnce([this](Timestamp, size_t) { + SendQueue::DataToSend dts(kMessageId, gen_.Ordered({1, 2, 3, 4}, "BE")); dts.max_retransmissions = MaxRetransmits(2); return dts; }) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillOnce(CreateChunk(OutgoingMessageId(1))) + .WillOnce(CreateChunk(OutgoingMessageId(2))) + .WillOnce(CreateChunk(OutgoingMessageId(3))) + .WillOnce(CreateChunk(OutgoingMessageId(4))) + .WillOnce(CreateChunk(OutgoingMessageId(5))) + .WillOnce(CreateChunk(OutgoingMessageId(6))) + .WillOnce(CreateChunk(OutgoingMessageId(7))) + .WillOnce(CreateChunk(OutgoingMessageId(8))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_FALSE(queue.ShouldSendForwardTsn(now_)); @@ -1213,8 +1289,7 @@ TEST_F(RetransmissionQueueTest, AbandonsRtxLimit2WhenNackedNineTimes) { Pair(TSN(18), State::kInFlight), // Pair(TSN(19), State::kInFlight))); - EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42))) - .Times(0); + EXPECT_CALL(producer_, Discard(StreamID(1), OutgoingMessageId(8))).Times(0); // Ack TSN [11 to 13] - three nacks for TSN(10), which will retransmit it. for (int tsn = 11; tsn <= 13; ++tsn) { @@ -1284,7 +1359,7 @@ TEST_F(RetransmissionQueueTest, AbandonsRtxLimit2WhenNackedNineTimes) { EXPECT_FALSE(queue.ShouldSendForwardTsn(now_)); // Ack TSN 19 - three more nacks for TSN 10, no more retransmissions. - EXPECT_CALL(producer_, Discard(IsUnordered(false), StreamID(1), MID(42))) + EXPECT_CALL(producer_, Discard(StreamID(1), kMessageId)) .WillOnce(Return(false)); queue.HandleSack( now_, SackChunk(TSN(9), kArwnd, {SackChunk::GapAckBlock(2, 10)}, {})); @@ -1315,85 +1390,28 @@ TEST_F(RetransmissionQueueTest, CwndRecoversWhenAcking) { std::vector payload(1000); EXPECT_CALL(producer_, Produce) - .WillOnce([this, payload](TimeMs, size_t) { - return SendQueue::DataToSend(gen_.Ordered(payload, "BE")); + .WillOnce([this, payload](Timestamp, size_t) { + return SendQueue::DataToSend(OutgoingMessageId(0), + gen_.Ordered(payload, "BE")); }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); std::vector> chunks_to_send = queue.GetChunksToSend(now_, 1500); EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _))); size_t serialized_size = payload.size() + DataChunk::kHeaderSize; - EXPECT_EQ(queue.outstanding_bytes(), serialized_size); + EXPECT_EQ(queue.unacked_packet_bytes(), serialized_size); queue.HandleSack(now_, SackChunk(TSN(10), kArwnd, {}, {})); EXPECT_EQ(queue.cwnd(), kCwnd + serialized_size); } -// Verifies that it doesn't produce tiny packets, when getting close to -// the full congestion window. -TEST_F(RetransmissionQueueTest, OnlySendsLargePacketsOnLargeCongestionWindow) { - RetransmissionQueue queue = CreateQueue(); - size_t intial_cwnd = options_.avoid_fragmentation_cwnd_mtus * options_.mtu; - queue.set_cwnd(intial_cwnd); - EXPECT_EQ(queue.cwnd(), intial_cwnd); - - // Fill the congestion window almost - leaving 500 bytes. - size_t chunk_size = intial_cwnd - 500; - EXPECT_CALL(producer_, Produce) - .WillOnce([chunk_size, this](TimeMs, size_t) { - return SendQueue::DataToSend( - gen_.Ordered(std::vector(chunk_size), "BE")); - }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); - - EXPECT_TRUE(queue.can_send_data()); - std::vector> chunks_to_send = - queue.GetChunksToSend(now_, 10000); - EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _))); - - // To little space left - will not send more. - EXPECT_FALSE(queue.can_send_data()); - - // But when the first chunk is acked, it will continue. - queue.HandleSack(now_, SackChunk(TSN(10), kArwnd, {}, {})); - - EXPECT_TRUE(queue.can_send_data()); - EXPECT_EQ(queue.outstanding_bytes(), 0u); - EXPECT_EQ(queue.cwnd(), intial_cwnd + kMaxMtu); -} - -TEST_F(RetransmissionQueueTest, AllowsSmallFragmentsOnSmallCongestionWindow) { - RetransmissionQueue queue = CreateQueue(); - size_t intial_cwnd = - options_.avoid_fragmentation_cwnd_mtus * options_.mtu - 1; - queue.set_cwnd(intial_cwnd); - EXPECT_EQ(queue.cwnd(), intial_cwnd); - - // Fill the congestion window almost - leaving 500 bytes. - size_t chunk_size = intial_cwnd - 500; - EXPECT_CALL(producer_, Produce) - .WillOnce([chunk_size, this](TimeMs, size_t) { - return SendQueue::DataToSend( - gen_.Ordered(std::vector(chunk_size), "BE")); - }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); - - EXPECT_TRUE(queue.can_send_data()); - std::vector> chunks_to_send = - queue.GetChunksToSend(now_, 10000); - EXPECT_THAT(chunks_to_send, ElementsAre(Pair(TSN(10), _))); - - // With congestion window under limit, allow small packets to be created. - EXPECT_TRUE(queue.can_send_data()); -} - TEST_F(RetransmissionQueueTest, ReadyForHandoverWhenHasNoOutstandingData) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), SizeIs(1)); EXPECT_EQ( @@ -1408,15 +1426,15 @@ TEST_F(RetransmissionQueueTest, ReadyForHandoverWhenHasNoOutstandingData) { TEST_F(RetransmissionQueueTest, ReadyForHandoverWhenNothingToRetransmit) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillOnce(CreateChunk(OutgoingMessageId(1))) + .WillOnce(CreateChunk(OutgoingMessageId(2))) + .WillOnce(CreateChunk(OutgoingMessageId(3))) + .WillOnce(CreateChunk(OutgoingMessageId(4))) + .WillOnce(CreateChunk(OutgoingMessageId(5))) + .WillOnce(CreateChunk(OutgoingMessageId(6))) + .WillOnce(CreateChunk(OutgoingMessageId(7))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), SizeIs(8)); EXPECT_EQ( queue.GetHandoverReadiness(), @@ -1428,8 +1446,8 @@ TEST_F(RetransmissionQueueTest, ReadyForHandoverWhenNothingToRetransmit) { // Send 18 EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(8))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), SizeIs(1)); // Ack 12, 14-15, 17-18 @@ -1440,8 +1458,8 @@ TEST_F(RetransmissionQueueTest, ReadyForHandoverWhenNothingToRetransmit) { // Send 19 EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(9))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), SizeIs(1)); // Ack 12, 14-15, 17-19 @@ -1452,8 +1470,8 @@ TEST_F(RetransmissionQueueTest, ReadyForHandoverWhenNothingToRetransmit) { // Send 20 EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(10))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), SizeIs(1)); // Ack 12, 14-15, 17-20 @@ -1487,9 +1505,9 @@ TEST_F(RetransmissionQueueTest, ReadyForHandoverWhenNothingToRetransmit) { TEST_F(RetransmissionQueueTest, HandoverTest) { RetransmissionQueue queue = CreateQueue(); EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillOnce(CreateChunk(OutgoingMessageId(1))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(queue), SizeIs(2)); queue.HandleSack(now_, SackChunk(TSN(11), kArwnd, {}, {})); @@ -1497,10 +1515,10 @@ TEST_F(RetransmissionQueueTest, HandoverTest) { CreateQueueByHandover(queue); EXPECT_CALL(producer_, Produce) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillOnce(CreateChunk()) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillOnce(CreateChunk(OutgoingMessageId(2))) + .WillOnce(CreateChunk(OutgoingMessageId(3))) + .WillOnce(CreateChunk(OutgoingMessageId(4))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); EXPECT_THAT(GetSentPacketTSNs(*handedover_queue), testing::ElementsAre(TSN(12), TSN(13), TSN(14))); @@ -1518,22 +1536,27 @@ TEST_F(RetransmissionQueueTest, CanAlwaysSendOnePacket) { std::vector payload(mtu - 100); EXPECT_CALL(producer_, Produce) - .WillOnce([this, payload](TimeMs, size_t) { - return SendQueue::DataToSend(gen_.Ordered(payload, "B")); + .WillOnce([this, payload](Timestamp, size_t) { + return SendQueue::DataToSend(OutgoingMessageId(0), + gen_.Ordered(payload, "B")); }) - .WillOnce([this, payload](TimeMs, size_t) { - return SendQueue::DataToSend(gen_.Ordered(payload, "")); + .WillOnce([this, payload](Timestamp, size_t) { + return SendQueue::DataToSend(OutgoingMessageId(0), + gen_.Ordered(payload, "")); }) - .WillOnce([this, payload](TimeMs, size_t) { - return SendQueue::DataToSend(gen_.Ordered(payload, "")); + .WillOnce([this, payload](Timestamp, size_t) { + return SendQueue::DataToSend(OutgoingMessageId(0), + gen_.Ordered(payload, "")); }) - .WillOnce([this, payload](TimeMs, size_t) { - return SendQueue::DataToSend(gen_.Ordered(payload, "")); + .WillOnce([this, payload](Timestamp, size_t) { + return SendQueue::DataToSend(OutgoingMessageId(0), + gen_.Ordered(payload, "")); }) - .WillOnce([this, payload](TimeMs, size_t) { - return SendQueue::DataToSend(gen_.Ordered(payload, "E")); + .WillOnce([this, payload](Timestamp, size_t) { + return SendQueue::DataToSend(OutgoingMessageId(0), + gen_.Ordered(payload, "E")); }) - .WillRepeatedly([](TimeMs, size_t) { return absl::nullopt; }); + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); // Produce all chunks and put them in the retransmission queue. std::vector> chunks_to_send = @@ -1589,5 +1612,45 @@ TEST_F(RetransmissionQueueTest, CanAlwaysSendOnePacket) { EXPECT_THAT(queue.GetChunksToSend(now_, mtu), IsEmpty()); } +TEST_F(RetransmissionQueueTest, UpdatesRwndFromSackAndUnackedPayloadBytes) { + RetransmissionQueue queue = CreateQueue(); + + EXPECT_EQ(queue.rwnd(), kArwnd); + + constexpr size_t kChunkSize = 4; + + EXPECT_CALL(producer_, Produce) + .WillOnce(CreateChunk(OutgoingMessageId(0))) + .WillOnce(CreateChunk(OutgoingMessageId(1))) + .WillOnce(CreateChunk(OutgoingMessageId(2))) + .WillRepeatedly([](Timestamp, size_t) { return std::nullopt; }); + + EXPECT_THAT(GetSentPacketTSNs(queue), + testing::ElementsAre(TSN(10), TSN(11), TSN(12))); + EXPECT_THAT(queue.GetChunkStatesForTesting(), + ElementsAre(Pair(TSN(9), State::kAcked), // + Pair(TSN(10), State::kInFlight), + Pair(TSN(11), State::kInFlight), + Pair(TSN(12), State::kInFlight))); + + EXPECT_EQ(queue.rwnd(), kArwnd - kChunkSize * 3); + + // Acknowledge TSN = 10. + queue.HandleSack(now_, SackChunk(TSN(10), 1000, {}, {})); + EXPECT_THAT(queue.GetChunkStatesForTesting(), + ElementsAre(Pair(TSN(10), State::kAcked), // + Pair(TSN(11), State::kInFlight), // + Pair(TSN(12), State::kInFlight))); + + EXPECT_EQ(queue.rwnd(), 1000 - kChunkSize * 2); + + // Acknowledge everything. + queue.HandleSack(now_, SackChunk(TSN(12), 2000, {}, {})); + EXPECT_THAT(queue.GetChunkStatesForTesting(), + ElementsAre(Pair(TSN(12), State::kAcked))); + + EXPECT_EQ(queue.rwnd(), 2000u); +} + } // namespace } // namespace dcsctp diff --git a/net/dcsctp/tx/retransmission_timeout.cc b/net/dcsctp/tx/retransmission_timeout.cc index 7d8fb9761c..3456ed2910 100644 --- a/net/dcsctp/tx/retransmission_timeout.cc +++ b/net/dcsctp/tx/retransmission_timeout.cc @@ -12,52 +12,55 @@ #include #include +#include "api/units/time_delta.h" #include "net/dcsctp/public/dcsctp_options.h" namespace dcsctp { -RetransmissionTimeout::RetransmissionTimeout(const DcSctpOptions& options) - : min_rto_(*options.rto_min), - max_rto_(*options.rto_max), - max_rtt_(*options.rtt_max), - min_rtt_variance_(*options.min_rtt_variance), - scaled_srtt_(*options.rto_initial << kRttShift), - rto_(*options.rto_initial) {} +// https://datatracker.ietf.org/doc/html/rfc4960#section-15. +constexpr double kRtoAlpha = 0.125; +constexpr double kRtoBeta = 0.25; + +// A factor that the `min_rtt_variance` configuration option will be divided by +// (before later multiplied with K, which is 4 according to RFC6298). When this +// value was introduced, it was unintentionally divided by 8 since that code +// worked with scaled numbers (to avoid floating point math). That behavior is +// kept as downstream users have measured good values for their use-cases. +constexpr double kHeuristicVarianceAdjustment = 8.0; -void RetransmissionTimeout::ObserveRTT(DurationMs measured_rtt) { - const int32_t rtt = *measured_rtt; +RetransmissionTimeout::RetransmissionTimeout(const DcSctpOptions& options) + : min_rto_(options.rto_min.ToTimeDelta()), + max_rto_(options.rto_max.ToTimeDelta()), + max_rtt_(options.rtt_max.ToTimeDelta()), + min_rtt_variance_(options.min_rtt_variance.ToTimeDelta() / + kHeuristicVarianceAdjustment), + srtt_(options.rto_initial.ToTimeDelta()), + rto_(options.rto_initial.ToTimeDelta()) {} +void RetransmissionTimeout::ObserveRTT(webrtc::TimeDelta rtt) { // Unrealistic values will be skipped. If a wrongly measured (or otherwise // corrupt) value was processed, it could change the state in a way that would // take a very long time to recover. - if (rtt < 0 || rtt > max_rtt_) { + if (rtt < webrtc::TimeDelta::Zero() || rtt > max_rtt_) { return; } - // From https://tools.ietf.org/html/rfc4960#section-6.3.1, but avoiding - // floating point math by implementing algorithm from "V. Jacobson: Congestion - // avoidance and control", but adapted for SCTP. + // https://tools.ietf.org/html/rfc4960#section-6.3.1. if (first_measurement_) { - scaled_srtt_ = rtt << kRttShift; - scaled_rtt_var_ = (rtt / 2) << kRttVarShift; + srtt_ = rtt; + rtt_var_ = rtt / 2; first_measurement_ = false; } else { - int32_t rtt_diff = rtt - (scaled_srtt_ >> kRttShift); - scaled_srtt_ += rtt_diff; - if (rtt_diff < 0) { - rtt_diff = -rtt_diff; - } - rtt_diff -= (scaled_rtt_var_ >> kRttVarShift); - scaled_rtt_var_ += rtt_diff; + webrtc::TimeDelta rtt_diff = (srtt_ - rtt).Abs(); + rtt_var_ = (1 - kRtoBeta) * rtt_var_ + kRtoBeta * rtt_diff; + srtt_ = (1 - kRtoAlpha) * srtt_ + kRtoAlpha * rtt; } - if (scaled_rtt_var_ < min_rtt_variance_) { - scaled_rtt_var_ = min_rtt_variance_; + if (rtt_var_ < min_rtt_variance_) { + rtt_var_ = min_rtt_variance_; } - rto_ = (scaled_srtt_ >> kRttShift) + scaled_rtt_var_; - - // Clamp RTO between min and max. - rto_ = std::min(std::max(rto_, min_rto_), max_rto_); + rto_ = srtt_ + 4 * rtt_var_; + rto_ = std::clamp(rto_, min_rto_, max_rto_); } } // namespace dcsctp diff --git a/net/dcsctp/tx/retransmission_timeout.h b/net/dcsctp/tx/retransmission_timeout.h index 01530cb3b5..b87501d79f 100644 --- a/net/dcsctp/tx/retransmission_timeout.h +++ b/net/dcsctp/tx/retransmission_timeout.h @@ -27,32 +27,30 @@ namespace dcsctp { // a lot, which is an indicator of a bad connection. class RetransmissionTimeout { public: - static constexpr int kRttShift = 3; - static constexpr int kRttVarShift = 2; explicit RetransmissionTimeout(const DcSctpOptions& options); // To be called when a RTT has been measured, to update the RTO value. - void ObserveRTT(DurationMs measured_rtt); + void ObserveRTT(webrtc::TimeDelta rtt); - // Returns the Retransmission Timeout (RTO) value, in milliseconds. - DurationMs rto() const { return DurationMs(rto_); } + // Returns the Retransmission Timeout (RTO) value. + webrtc::TimeDelta rto() const { return rto_; } - // Returns the smoothed RTT value, in milliseconds. - DurationMs srtt() const { return DurationMs(scaled_srtt_ >> kRttShift); } + // Returns the smoothed RTT value. + webrtc::TimeDelta srtt() const { return srtt_; } private: - const int32_t min_rto_; - const int32_t max_rto_; - const int32_t max_rtt_; - const int32_t min_rtt_variance_; + const webrtc::TimeDelta min_rto_; + const webrtc::TimeDelta max_rto_; + const webrtc::TimeDelta max_rtt_; + const webrtc::TimeDelta min_rtt_variance_; // If this is the first measurement bool first_measurement_ = true; - // Smoothed Round-Trip Time, shifted by kRttShift - int32_t scaled_srtt_; - // Round-Trip Time Variation, shifted by kRttVarShift - int32_t scaled_rtt_var_ = 0; + // Smoothed Round-Trip Time. + webrtc::TimeDelta srtt_; + // Round-Trip Time Variation. + webrtc::TimeDelta rtt_var_ = webrtc::TimeDelta::Zero(); // Retransmission Timeout - int32_t rto_; + webrtc::TimeDelta rto_; }; } // namespace dcsctp diff --git a/net/dcsctp/tx/retransmission_timeout_test.cc b/net/dcsctp/tx/retransmission_timeout_test.cc index b901995e97..8686fbe012 100644 --- a/net/dcsctp/tx/retransmission_timeout_test.cc +++ b/net/dcsctp/tx/retransmission_timeout_test.cc @@ -15,20 +15,21 @@ namespace dcsctp { namespace { +using ::webrtc::TimeDelta; -constexpr DurationMs kMaxRtt = DurationMs(8'000); -constexpr DurationMs kInitialRto = DurationMs(200); -constexpr DurationMs kMaxRto = DurationMs(800); -constexpr DurationMs kMinRto = DurationMs(120); -constexpr DurationMs kMinRttVariance = DurationMs(220); +constexpr TimeDelta kMaxRtt = TimeDelta::Millis(8'000); +constexpr TimeDelta kInitialRto = TimeDelta::Millis(200); +constexpr TimeDelta kMaxRto = TimeDelta::Millis(800); +constexpr TimeDelta kMinRto = TimeDelta::Millis(120); +constexpr TimeDelta kMinRttVariance = TimeDelta::Millis(220); DcSctpOptions MakeOptions() { DcSctpOptions options; - options.rtt_max = kMaxRtt; - options.rto_initial = kInitialRto; - options.rto_max = kMaxRto; - options.rto_min = kMinRto; - options.min_rtt_variance = kMinRttVariance; + options.rtt_max = DurationMs(kMaxRtt); + options.rto_initial = DurationMs(kInitialRto); + options.rto_max = DurationMs(kMaxRto); + options.rto_min = DurationMs(kMinRto); + options.min_rtt_variance = DurationMs(kMinRttVariance); return options; } @@ -45,31 +46,31 @@ TEST(RetransmissionTimeoutTest, HasValidInitialSrtt) { TEST(RetransmissionTimeoutTest, NegativeValuesDoNotAffectRTO) { RetransmissionTimeout rto_(MakeOptions()); // Initial negative value - rto_.ObserveRTT(DurationMs(-10)); + rto_.ObserveRTT(TimeDelta::Millis(-10)); EXPECT_EQ(rto_.rto(), kInitialRto); - rto_.ObserveRTT(DurationMs(124)); - EXPECT_EQ(*rto_.rto(), 372); + rto_.ObserveRTT(TimeDelta::Millis(124)); + EXPECT_EQ(rto_.rto().ms(), 372); // Subsequent negative value - rto_.ObserveRTT(DurationMs(-10)); - EXPECT_EQ(*rto_.rto(), 372); + rto_.ObserveRTT(TimeDelta::Millis(-10)); + EXPECT_EQ(rto_.rto().ms(), 372); } TEST(RetransmissionTimeoutTest, TooLargeValuesDoNotAffectRTO) { RetransmissionTimeout rto_(MakeOptions()); // Initial too large value - rto_.ObserveRTT(kMaxRtt + DurationMs(100)); + rto_.ObserveRTT(kMaxRtt + TimeDelta::Millis(100)); EXPECT_EQ(rto_.rto(), kInitialRto); - rto_.ObserveRTT(DurationMs(124)); - EXPECT_EQ(*rto_.rto(), 372); + rto_.ObserveRTT(TimeDelta::Millis(124)); + EXPECT_EQ(rto_.rto().ms(), 372); // Subsequent too large value - rto_.ObserveRTT(kMaxRtt + DurationMs(100)); - EXPECT_EQ(*rto_.rto(), 372); + rto_.ObserveRTT(kMaxRtt + TimeDelta::Millis(100)); + EXPECT_EQ(rto_.rto().ms(), 372); } TEST(RetransmissionTimeoutTest, WillNeverGoBelowMinimumRto) { RetransmissionTimeout rto_(MakeOptions()); for (int i = 0; i < 1000; ++i) { - rto_.ObserveRTT(DurationMs(1)); + rto_.ObserveRTT(TimeDelta::Millis(1)); } EXPECT_GE(rto_.rto(), kMinRto); } @@ -77,67 +78,67 @@ TEST(RetransmissionTimeoutTest, WillNeverGoBelowMinimumRto) { TEST(RetransmissionTimeoutTest, WillNeverGoAboveMaximumRto) { RetransmissionTimeout rto_(MakeOptions()); for (int i = 0; i < 1000; ++i) { - rto_.ObserveRTT(kMaxRtt - DurationMs(1)); + rto_.ObserveRTT(kMaxRtt - TimeDelta::Millis(1)); // Adding jitter, which would make it RTO be well above RTT. - rto_.ObserveRTT(kMaxRtt - DurationMs(100)); + rto_.ObserveRTT(kMaxRtt - TimeDelta::Millis(100)); } EXPECT_LE(rto_.rto(), kMaxRto); } TEST(RetransmissionTimeoutTest, CalculatesRtoForStableRtt) { RetransmissionTimeout rto_(MakeOptions()); - rto_.ObserveRTT(DurationMs(124)); - EXPECT_EQ(*rto_.rto(), 372); - rto_.ObserveRTT(DurationMs(128)); - EXPECT_EQ(*rto_.rto(), 344); - rto_.ObserveRTT(DurationMs(123)); - EXPECT_EQ(*rto_.rto(), 344); - rto_.ObserveRTT(DurationMs(125)); - EXPECT_EQ(*rto_.rto(), 344); - rto_.ObserveRTT(DurationMs(127)); - EXPECT_EQ(*rto_.rto(), 344); + rto_.ObserveRTT(TimeDelta::Millis(124)); + EXPECT_EQ(rto_.rto().ms(), 372); + rto_.ObserveRTT(TimeDelta::Millis(128)); + EXPECT_EQ(rto_.rto().ms(), 315); + rto_.ObserveRTT(TimeDelta::Millis(123)); + EXPECT_EQ(rto_.rto().ms(), 268); + rto_.ObserveRTT(TimeDelta::Millis(125)); + EXPECT_EQ(rto_.rto().ms(), 234); + rto_.ObserveRTT(TimeDelta::Millis(127)); + EXPECT_EQ(rto_.rto().ms(), 235); } TEST(RetransmissionTimeoutTest, CalculatesRtoForUnstableRtt) { RetransmissionTimeout rto_(MakeOptions()); - rto_.ObserveRTT(DurationMs(124)); - EXPECT_EQ(*rto_.rto(), 372); - rto_.ObserveRTT(DurationMs(402)); - EXPECT_EQ(*rto_.rto(), 622); - rto_.ObserveRTT(DurationMs(728)); - EXPECT_EQ(*rto_.rto(), 800); - rto_.ObserveRTT(DurationMs(89)); - EXPECT_EQ(*rto_.rto(), 800); - rto_.ObserveRTT(DurationMs(126)); - EXPECT_EQ(*rto_.rto(), 800); + rto_.ObserveRTT(TimeDelta::Millis(124)); + EXPECT_EQ(rto_.rto().ms(), 372); + rto_.ObserveRTT(TimeDelta::Millis(402)); + EXPECT_EQ(rto_.rto().ms(), 623); + rto_.ObserveRTT(TimeDelta::Millis(728)); + EXPECT_EQ(rto_.rto().ms(), 800); + rto_.ObserveRTT(TimeDelta::Millis(89)); + EXPECT_EQ(rto_.rto().ms(), 800); + rto_.ObserveRTT(TimeDelta::Millis(126)); + EXPECT_EQ(rto_.rto().ms(), 800); } TEST(RetransmissionTimeoutTest, WillStabilizeAfterAWhile) { RetransmissionTimeout rto_(MakeOptions()); - rto_.ObserveRTT(DurationMs(124)); - rto_.ObserveRTT(DurationMs(402)); - rto_.ObserveRTT(DurationMs(728)); - rto_.ObserveRTT(DurationMs(89)); - rto_.ObserveRTT(DurationMs(126)); - EXPECT_EQ(*rto_.rto(), 800); - rto_.ObserveRTT(DurationMs(124)); - EXPECT_EQ(*rto_.rto(), 800); - rto_.ObserveRTT(DurationMs(122)); - EXPECT_EQ(*rto_.rto(), 710); - rto_.ObserveRTT(DurationMs(123)); - EXPECT_EQ(*rto_.rto(), 631); - rto_.ObserveRTT(DurationMs(124)); - EXPECT_EQ(*rto_.rto(), 562); - rto_.ObserveRTT(DurationMs(122)); - EXPECT_EQ(*rto_.rto(), 505); - rto_.ObserveRTT(DurationMs(124)); - EXPECT_EQ(*rto_.rto(), 454); - rto_.ObserveRTT(DurationMs(124)); - EXPECT_EQ(*rto_.rto(), 410); - rto_.ObserveRTT(DurationMs(124)); - EXPECT_EQ(*rto_.rto(), 372); - rto_.ObserveRTT(DurationMs(124)); - EXPECT_EQ(*rto_.rto(), 367); + rto_.ObserveRTT(TimeDelta::Millis(124)); + rto_.ObserveRTT(TimeDelta::Millis(402)); + rto_.ObserveRTT(TimeDelta::Millis(728)); + rto_.ObserveRTT(TimeDelta::Millis(89)); + rto_.ObserveRTT(TimeDelta::Millis(126)); + EXPECT_EQ(rto_.rto().ms(), 800); + rto_.ObserveRTT(TimeDelta::Millis(124)); + EXPECT_EQ(rto_.rto().ms(), 800); + rto_.ObserveRTT(TimeDelta::Millis(122)); + EXPECT_EQ(rto_.rto().ms(), 709); + rto_.ObserveRTT(TimeDelta::Millis(123)); + EXPECT_EQ(rto_.rto().ms(), 630); + rto_.ObserveRTT(TimeDelta::Millis(124)); + EXPECT_EQ(rto_.rto().ms(), 562); + rto_.ObserveRTT(TimeDelta::Millis(122)); + EXPECT_EQ(rto_.rto().ms(), 505); + rto_.ObserveRTT(TimeDelta::Millis(124)); + EXPECT_EQ(rto_.rto().ms(), 454); + rto_.ObserveRTT(TimeDelta::Millis(124)); + EXPECT_EQ(rto_.rto().ms(), 410); + rto_.ObserveRTT(TimeDelta::Millis(124)); + EXPECT_EQ(rto_.rto().ms(), 372); + rto_.ObserveRTT(TimeDelta::Millis(124)); + EXPECT_EQ(rto_.rto().ms(), 340); } TEST(RetransmissionTimeoutTest, WillAlwaysStayAboveRTT) { @@ -149,31 +150,33 @@ TEST(RetransmissionTimeoutTest, WillAlwaysStayAboveRTT) { RetransmissionTimeout rto_(MakeOptions()); for (int i = 0; i < 1000; ++i) { - rto_.ObserveRTT(DurationMs(124)); + rto_.ObserveRTT(TimeDelta::Millis(124)); } - EXPECT_EQ(*rto_.rto(), 344); + EXPECT_EQ(rto_.rto().ms(), 234); } TEST(RetransmissionTimeoutTest, CanSpecifySmallerMinimumRttVariance) { DcSctpOptions options = MakeOptions(); - options.min_rtt_variance = kMinRttVariance - DurationMs(100); + options.min_rtt_variance = + DurationMs(kMinRttVariance - TimeDelta::Millis(100)); RetransmissionTimeout rto_(options); for (int i = 0; i < 1000; ++i) { - rto_.ObserveRTT(DurationMs(124)); + rto_.ObserveRTT(TimeDelta::Millis(124)); } - EXPECT_EQ(*rto_.rto(), 244); + EXPECT_EQ(rto_.rto().ms(), 184); } TEST(RetransmissionTimeoutTest, CanSpecifyLargerMinimumRttVariance) { DcSctpOptions options = MakeOptions(); - options.min_rtt_variance = kMinRttVariance + DurationMs(100); + options.min_rtt_variance = + DurationMs(kMinRttVariance + TimeDelta::Millis(100)); RetransmissionTimeout rto_(options); for (int i = 0; i < 1000; ++i) { - rto_.ObserveRTT(DurationMs(124)); + rto_.ObserveRTT(TimeDelta::Millis(124)); } - EXPECT_EQ(*rto_.rto(), 444); + EXPECT_EQ(rto_.rto().ms(), 284); } } // namespace diff --git a/net/dcsctp/tx/rr_send_queue.cc b/net/dcsctp/tx/rr_send_queue.cc index 4df253dddf..26d84363f1 100644 --- a/net/dcsctp/tx/rr_send_queue.cc +++ b/net/dcsctp/tx/rr_send_queue.cc @@ -13,32 +13,33 @@ #include #include #include +#include #include #include #include #include "absl/algorithm/container.h" -#include "absl/types/optional.h" #include "api/array_view.h" -#include "net/dcsctp/common/str_join.h" +#include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/packet/data.h" #include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/public/dcsctp_socket.h" #include "net/dcsctp/public/types.h" #include "net/dcsctp/tx/send_queue.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/str_join.h" namespace dcsctp { +using ::webrtc::TimeDelta; +using ::webrtc::Timestamp; RRSendQueue::RRSendQueue(absl::string_view log_prefix, DcSctpSocketCallbacks* callbacks, - size_t buffer_size, size_t mtu, StreamPriority default_priority, size_t total_buffered_amount_low_threshold) : log_prefix_(log_prefix), callbacks_(*callbacks), - buffer_size_(buffer_size), default_priority_(default_priority), scheduler_(log_prefix_, mtu), total_buffered_amount_( @@ -81,11 +82,12 @@ bool RRSendQueue::IsConsistent() const { } } if (expected_active_streams != actual_active_streams) { - auto fn = [&](rtc::StringBuilder& sb, const auto& p) { sb << *p; }; + auto fn = [&](webrtc::StringBuilder& sb, const auto& p) { sb << *p; }; RTC_DLOG(LS_ERROR) << "Active streams mismatch, is=[" - << StrJoin(actual_active_streams, ",", fn) + << webrtc::StrJoin(actual_active_streams, ",", fn) << "], expected=[" - << StrJoin(expected_active_streams, ",", fn) << "]"; + << webrtc::StrJoin(expected_active_streams, ",", fn) + << "]"; return false; } @@ -123,7 +125,10 @@ void RRSendQueue::OutgoingStream::Add(DcSctpMessage message, bool was_active = bytes_to_send_in_next_message() > 0; buffered_amount_.Increase(message.payload().size()); parent_.total_buffered_amount_.Increase(message.payload().size()); - items_.emplace_back(std::move(message), std::move(attributes)); + OutgoingMessageId message_id = parent_.current_message_id; + parent_.current_message_id = + OutgoingMessageId(*parent_.current_message_id + 1); + items_.emplace_back(message_id, std::move(message), std::move(attributes)); if (!was_active) { scheduler_stream_->MaybeMakeActive(); @@ -132,8 +137,8 @@ void RRSendQueue::OutgoingStream::Add(DcSctpMessage message, RTC_DCHECK(IsConsistent()); } -absl::optional RRSendQueue::OutgoingStream::Produce( - TimeMs now, +std::optional RRSendQueue::OutgoingStream::Produce( + Timestamp now, size_t max_size) { RTC_DCHECK(pause_state_ != PauseState::kPaused && pause_state_ != PauseState::kResetting); @@ -143,7 +148,7 @@ absl::optional RRSendQueue::OutgoingStream::Produce( DcSctpMessage& message = item.message; // Allocate Message ID and SSN when the first fragment is sent. - if (!item.message_id.has_value()) { + if (!item.mid.has_value()) { // Oops, this entire message has already expired. Try the next one. if (item.attributes.expires_at <= now) { HandleMessageExpired(item); @@ -153,7 +158,7 @@ absl::optional RRSendQueue::OutgoingStream::Produce( MID& mid = item.attributes.unordered ? next_unordered_mid_ : next_ordered_mid_; - item.message_id = mid; + item.mid = mid; mid = MID(*mid + 1); } if (!item.attributes.unordered && !item.ssn.has_value()) { @@ -162,9 +167,9 @@ absl::optional RRSendQueue::OutgoingStream::Produce( } // Grab the next `max_size` fragment from this message and calculate flags. - rtc::ArrayView chunk_payload = + webrtc::ArrayView chunk_payload = item.message.payload().subview(item.remaining_offset, max_size); - rtc::ArrayView message_payload = message.payload(); + webrtc::ArrayView message_payload = message.payload(); Data::IsBeginning is_beginning(chunk_payload.data() == message_payload.data()); Data::IsEnd is_end((chunk_payload.data() + chunk_payload.size()) == @@ -184,10 +189,10 @@ absl::optional RRSendQueue::OutgoingStream::Produce( buffered_amount_.Decrease(payload.size()); parent_.total_buffered_amount_.Decrease(payload.size()); - SendQueue::DataToSend chunk(Data(stream_id, item.ssn.value_or(SSN(0)), - item.message_id.value(), fsn, ppid, - std::move(payload), is_beginning, is_end, - item.attributes.unordered)); + SendQueue::DataToSend chunk( + item.message_id, Data(stream_id, item.ssn.value_or(SSN(0)), *item.mid, + fsn, ppid, std::move(payload), is_beginning, + is_end, item.attributes.unordered)); chunk.max_retransmissions = item.attributes.max_retransmissions; chunk.expires_at = item.attributes.expires_at; chunk.lifecycle_id = @@ -214,7 +219,7 @@ absl::optional RRSendQueue::OutgoingStream::Produce( return chunk; } RTC_DCHECK(IsConsistent()); - return absl::nullopt; + return std::nullopt; } void RRSendQueue::OutgoingStream::HandleMessageExpired( @@ -231,13 +236,11 @@ void RRSendQueue::OutgoingStream::HandleMessageExpired( } } -bool RRSendQueue::OutgoingStream::Discard(IsUnordered unordered, - MID message_id) { +bool RRSendQueue::OutgoingStream::Discard(OutgoingMessageId message_id) { bool result = false; if (!items_.empty()) { Item& item = items_.front(); - if (item.attributes.unordered == unordered && item.message_id.has_value() && - *item.message_id == message_id) { + if (item.message_id == message_id) { HandleMessageExpired(item); items_.pop_front(); @@ -329,8 +332,8 @@ void RRSendQueue::OutgoingStream::Reset() { item.remaining_size); item.remaining_offset = 0; item.remaining_size = item.message.payload().size(); - item.message_id = absl::nullopt; - item.ssn = absl::nullopt; + item.mid = std::nullopt; + item.ssn = std::nullopt; item.current_fsn = FSN(0); if (old_pause_state == PauseState::kPaused || old_pause_state == PauseState::kResetting) { @@ -344,10 +347,10 @@ bool RRSendQueue::OutgoingStream::has_partially_sent_message() const { if (items_.empty()) { return false; } - return items_.front().message_id.has_value(); + return items_.front().mid.has_value(); } -void RRSendQueue::Add(TimeMs now, +void RRSendQueue::Add(Timestamp now, DcSctpMessage message, const SendOptions& send_options) { RTC_DCHECK(!message.payload().empty()); @@ -364,33 +367,28 @@ void RRSendQueue::Add(TimeMs now, ? MaxRetransmits(send_options.max_retransmissions.value()) : MaxRetransmits::NoLimit(), .expires_at = send_options.lifetime.has_value() - ? now + *send_options.lifetime + DurationMs(1) - : TimeMs::InfiniteFuture(), + ? now + send_options.lifetime->ToTimeDelta() + + TimeDelta::Millis(1) + : Timestamp::PlusInfinity(), .lifecycle_id = send_options.lifecycle_id, }; - GetOrCreateStreamInfo(message.stream_id()) - .Add(std::move(message), std::move(attributes)); + StreamID stream_id = message.stream_id(); + GetOrCreateStreamInfo(stream_id).Add(std::move(message), + std::move(attributes)); RTC_DCHECK(IsConsistent()); } -bool RRSendQueue::IsFull() const { - return total_buffered_amount() >= buffer_size_; -} - bool RRSendQueue::IsEmpty() const { return total_buffered_amount() == 0; } -absl::optional RRSendQueue::Produce(TimeMs now, - size_t max_size) { +std::optional RRSendQueue::Produce(Timestamp now, + size_t max_size) { return scheduler_.Produce(now, max_size); } -bool RRSendQueue::Discard(IsUnordered unordered, - StreamID stream_id, - MID message_id) { - bool has_discarded = - GetOrCreateStreamInfo(stream_id).Discard(unordered, message_id); +bool RRSendQueue::Discard(StreamID stream_id, OutgoingMessageId message_id) { + bool has_discarded = GetOrCreateStreamInfo(stream_id).Discard(message_id); RTC_DCHECK(IsConsistent()); return has_discarded; diff --git a/net/dcsctp/tx/rr_send_queue.h b/net/dcsctp/tx/rr_send_queue.h index c4111ff717..19c30e19c9 100644 --- a/net/dcsctp/tx/rr_send_queue.h +++ b/net/dcsctp/tx/rr_send_queue.h @@ -14,14 +14,15 @@ #include #include #include +#include #include #include #include #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" +#include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/public/dcsctp_socket.h" #include "net/dcsctp/public/types.h" @@ -55,7 +56,6 @@ class RRSendQueue : public SendQueue { public: RRSendQueue(absl::string_view log_prefix, DcSctpSocketCallbacks* callbacks, - size_t buffer_size, size_t mtu, StreamPriority default_priority, size_t total_buffered_amount_low_threshold); @@ -70,15 +70,14 @@ class RRSendQueue : public SendQueue { // time should be in `now`. Note that it's the responsibility of the caller to // ensure that the buffer is not full (by calling `IsFull`) before adding // messages to it. - void Add(TimeMs now, + void Add(webrtc::Timestamp now, DcSctpMessage message, const SendOptions& send_options = {}); // Implementation of `SendQueue`. - absl::optional Produce(TimeMs now, size_t max_size) override; - bool Discard(IsUnordered unordered, - StreamID stream_id, - MID message_id) override; + std::optional Produce(webrtc::Timestamp now, + size_t max_size) override; + bool Discard(StreamID stream_id, OutgoingMessageId message_id) override; void PrepareResetStream(StreamID streams) override; bool HasStreamsReadyToBeReset() const override; std::vector GetStreamsReadyToBeReset() override; @@ -105,7 +104,7 @@ class RRSendQueue : public SendQueue { struct MessageAttributes { IsUnordered unordered; MaxRetransmits max_retransmissions; - TimeMs expires_at; + webrtc::Timestamp expires_at; LifecycleId lifecycle_id; }; @@ -155,15 +154,15 @@ class RRSendQueue : public SendQueue { void Add(DcSctpMessage message, MessageAttributes attributes); // Implementing `StreamScheduler::StreamProducer`. - absl::optional Produce(TimeMs now, - size_t max_size) override; + std::optional Produce(webrtc::Timestamp now, + size_t max_size) override; size_t bytes_to_send_in_next_message() const override; const ThresholdWatcher& buffered_amount() const { return buffered_amount_; } ThresholdWatcher& buffered_amount() { return buffered_amount_; } // Discards a partially sent message, see `SendQueue::Discard`. - bool Discard(IsUnordered unordered, MID message_id); + bool Discard(OutgoingMessageId message_id); // Pauses this stream, which is used before resetting it. void Pause(); @@ -219,11 +218,15 @@ class RRSendQueue : public SendQueue { // An enqueued message and metadata. struct Item { - explicit Item(DcSctpMessage msg, MessageAttributes attributes) - : message(std::move(msg)), + explicit Item(OutgoingMessageId message_id, + DcSctpMessage msg, + MessageAttributes attributes) + : message_id(message_id), + message(std::move(msg)), attributes(std::move(attributes)), remaining_offset(0), remaining_size(message.payload().size()) {} + OutgoingMessageId message_id; DcSctpMessage message; MessageAttributes attributes; // The remaining payload (offset and size) to be sent, when it has been @@ -232,8 +235,8 @@ class RRSendQueue : public SendQueue { size_t remaining_size; // If set, an allocated Message ID and SSN. Will be allocated when the // first fragment is sent. - absl::optional message_id = absl::nullopt; - absl::optional ssn = absl::nullopt; + std::optional mid = std::nullopt; + std::optional ssn = std::nullopt; // The current Fragment Sequence Number, incremented for each fragment. FSN current_fsn = FSN(0); }; @@ -260,15 +263,15 @@ class RRSendQueue : public SendQueue { bool IsConsistent() const; OutgoingStream& GetOrCreateStreamInfo(StreamID stream_id); - absl::optional Produce( + std::optional Produce( std::map::iterator it, - TimeMs now, + webrtc::Timestamp now, size_t max_size); const absl::string_view log_prefix_; DcSctpSocketCallbacks& callbacks_; - const size_t buffer_size_; const StreamPriority default_priority_; + OutgoingMessageId current_message_id = OutgoingMessageId(0); StreamScheduler scheduler_; // The total amount of buffer data, for all streams. diff --git a/net/dcsctp/tx/rr_send_queue_test.cc b/net/dcsctp/tx/rr_send_queue_test.cc index 95416b193a..3af511d841 100644 --- a/net/dcsctp/tx/rr_send_queue_test.cc +++ b/net/dcsctp/tx/rr_send_queue_test.cc @@ -13,6 +13,7 @@ #include #include +#include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/packet/data.h" #include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/public/dcsctp_options.h" @@ -28,90 +29,82 @@ namespace dcsctp { namespace { using ::testing::SizeIs; using ::testing::UnorderedElementsAre; +using ::webrtc::TimeDelta; +using ::webrtc::Timestamp; -constexpr TimeMs kNow = TimeMs(0); +constexpr Timestamp kNow = Timestamp::Zero(); constexpr StreamID kStreamID(1); constexpr PPID kPPID(53); -constexpr size_t kMaxQueueSize = 1000; constexpr StreamPriority kDefaultPriority(10); constexpr size_t kBufferedAmountLowThreshold = 500; constexpr size_t kOneFragmentPacketSize = 100; constexpr size_t kTwoFragmentPacketSize = 101; constexpr size_t kMtu = 1100; -class RRSendQueueTest : public testing::Test { - protected: - RRSendQueueTest() - : buf_("log: ", - &callbacks_, - kMaxQueueSize, - kMtu, - kDefaultPriority, - kBufferedAmountLowThreshold) {} - - testing::NiceMock callbacks_; - const DcSctpOptions options_; - RRSendQueue buf_; -}; - -TEST_F(RRSendQueueTest, EmptyBuffer) { - EXPECT_TRUE(buf_.IsEmpty()); - EXPECT_FALSE(buf_.Produce(kNow, kOneFragmentPacketSize).has_value()); - EXPECT_FALSE(buf_.IsFull()); -} - -TEST_F(RRSendQueueTest, AddAndGetSingleChunk) { - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, {1, 2, 4, 5, 6})); - - EXPECT_FALSE(buf_.IsEmpty()); - EXPECT_FALSE(buf_.IsFull()); - absl::optional chunk_opt = - buf_.Produce(kNow, kOneFragmentPacketSize); +TEST(RRSendQueueTest, EmptyBuffer) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + EXPECT_TRUE(q.IsEmpty()); + EXPECT_FALSE(q.Produce(kNow, kOneFragmentPacketSize).has_value()); +} + +TEST(RRSendQueueTest, AddAndGetSingleChunk) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, {1, 2, 4, 5, 6})); + + EXPECT_FALSE(q.IsEmpty()); + std::optional chunk_opt = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_opt.has_value()); EXPECT_TRUE(chunk_opt->data.is_beginning); EXPECT_TRUE(chunk_opt->data.is_end); } -TEST_F(RRSendQueueTest, CarveOutBeginningMiddleAndEnd) { +TEST(RRSendQueueTest, CarveOutBeginningMiddleAndEnd) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(60); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - absl::optional chunk_beg = - buf_.Produce(kNow, /*max_size=*/20); + std::optional chunk_beg = + q.Produce(kNow, /*max_size=*/20); ASSERT_TRUE(chunk_beg.has_value()); EXPECT_TRUE(chunk_beg->data.is_beginning); EXPECT_FALSE(chunk_beg->data.is_end); - absl::optional chunk_mid = - buf_.Produce(kNow, /*max_size=*/20); + std::optional chunk_mid = + q.Produce(kNow, /*max_size=*/20); ASSERT_TRUE(chunk_mid.has_value()); EXPECT_FALSE(chunk_mid->data.is_beginning); EXPECT_FALSE(chunk_mid->data.is_end); - absl::optional chunk_end = - buf_.Produce(kNow, /*max_size=*/20); + std::optional chunk_end = + q.Produce(kNow, /*max_size=*/20); ASSERT_TRUE(chunk_end.has_value()); EXPECT_FALSE(chunk_end->data.is_beginning); EXPECT_TRUE(chunk_end->data.is_end); - EXPECT_FALSE(buf_.Produce(kNow, kOneFragmentPacketSize).has_value()); + EXPECT_FALSE(q.Produce(kNow, kOneFragmentPacketSize).has_value()); } -TEST_F(RRSendQueueTest, GetChunksFromTwoMessages) { +TEST(RRSendQueueTest, GetChunksFromTwoMessages) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(60); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - buf_.Add(kNow, DcSctpMessage(StreamID(3), PPID(54), payload)); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + q.Add(kNow, DcSctpMessage(StreamID(3), PPID(54), payload)); - absl::optional chunk_one = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_one = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_one.has_value()); EXPECT_EQ(chunk_one->data.stream_id, kStreamID); EXPECT_EQ(chunk_one->data.ppid, kPPID); EXPECT_TRUE(chunk_one->data.is_beginning); EXPECT_TRUE(chunk_one->data.is_end); - absl::optional chunk_two = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_two = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_two.has_value()); EXPECT_EQ(chunk_two->data.stream_id, StreamID(3)); EXPECT_EQ(chunk_two->data.ppid, PPID(54)); @@ -119,748 +112,828 @@ TEST_F(RRSendQueueTest, GetChunksFromTwoMessages) { EXPECT_TRUE(chunk_two->data.is_end); } -TEST_F(RRSendQueueTest, BufferBecomesFullAndEmptied) { +TEST(RRSendQueueTest, BufferBecomesFullAndEmptied) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(600); - EXPECT_FALSE(buf_.IsFull()); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - EXPECT_FALSE(buf_.IsFull()); - buf_.Add(kNow, DcSctpMessage(StreamID(3), PPID(54), payload)); - EXPECT_TRUE(buf_.IsFull()); + EXPECT_LT(q.total_buffered_amount(), 1000u); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + EXPECT_LT(q.total_buffered_amount(), 1000u); + q.Add(kNow, DcSctpMessage(StreamID(3), PPID(54), payload)); + EXPECT_GE(q.total_buffered_amount(), 1000u); // However, it's still possible to add messages. It's a soft limit, and it // might be necessary to forcefully add messages due to e.g. external // fragmentation. - buf_.Add(kNow, DcSctpMessage(StreamID(5), PPID(55), payload)); - EXPECT_TRUE(buf_.IsFull()); + q.Add(kNow, DcSctpMessage(StreamID(5), PPID(55), payload)); + EXPECT_GE(q.total_buffered_amount(), 1000u); - absl::optional chunk_one = buf_.Produce(kNow, 1000); + std::optional chunk_one = q.Produce(kNow, 1000); ASSERT_TRUE(chunk_one.has_value()); EXPECT_EQ(chunk_one->data.stream_id, kStreamID); EXPECT_EQ(chunk_one->data.ppid, kPPID); - EXPECT_TRUE(buf_.IsFull()); + EXPECT_GE(q.total_buffered_amount(), 1000u); - absl::optional chunk_two = buf_.Produce(kNow, 1000); + std::optional chunk_two = q.Produce(kNow, 1000); ASSERT_TRUE(chunk_two.has_value()); EXPECT_EQ(chunk_two->data.stream_id, StreamID(3)); EXPECT_EQ(chunk_two->data.ppid, PPID(54)); - EXPECT_FALSE(buf_.IsFull()); - EXPECT_FALSE(buf_.IsEmpty()); + EXPECT_LT(q.total_buffered_amount(), 1000u); + EXPECT_FALSE(q.IsEmpty()); - absl::optional chunk_three = buf_.Produce(kNow, 1000); + std::optional chunk_three = q.Produce(kNow, 1000); ASSERT_TRUE(chunk_three.has_value()); EXPECT_EQ(chunk_three->data.stream_id, StreamID(5)); EXPECT_EQ(chunk_three->data.ppid, PPID(55)); - EXPECT_FALSE(buf_.IsFull()); - EXPECT_TRUE(buf_.IsEmpty()); + EXPECT_LT(q.total_buffered_amount(), 1000u); + EXPECT_TRUE(q.IsEmpty()); } -TEST_F(RRSendQueueTest, DefaultsToOrderedSend) { +TEST(RRSendQueueTest, DefaultsToOrderedSend) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(20); // Default is ordered - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - absl::optional chunk_one = - buf_.Produce(kNow, kOneFragmentPacketSize); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + std::optional chunk_one = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_one.has_value()); EXPECT_FALSE(chunk_one->data.is_unordered); // Explicitly unordered. SendOptions opts; opts.unordered = IsUnordered(true); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload), opts); - absl::optional chunk_two = - buf_.Produce(kNow, kOneFragmentPacketSize); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload), opts); + std::optional chunk_two = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_two.has_value()); EXPECT_TRUE(chunk_two->data.is_unordered); } -TEST_F(RRSendQueueTest, ProduceWithLifetimeExpiry) { +TEST(RRSendQueueTest, ProduceWithLifetimeExpiry) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(20); // Default is no expiry - TimeMs now = kNow; - buf_.Add(now, DcSctpMessage(kStreamID, kPPID, payload)); - now += DurationMs(1000000); - ASSERT_TRUE(buf_.Produce(now, kOneFragmentPacketSize)); + Timestamp now = kNow; + q.Add(now, DcSctpMessage(kStreamID, kPPID, payload)); + now += TimeDelta::Seconds(1000); + ASSERT_TRUE(q.Produce(now, kOneFragmentPacketSize)); SendOptions expires_2_seconds; expires_2_seconds.lifetime = DurationMs(2000); // Add and consume within lifetime - buf_.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_2_seconds); - now += DurationMs(2000); - ASSERT_TRUE(buf_.Produce(now, kOneFragmentPacketSize)); + q.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_2_seconds); + now += TimeDelta::Millis(2000); + ASSERT_TRUE(q.Produce(now, kOneFragmentPacketSize)); // Add and consume just outside lifetime - buf_.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_2_seconds); - now += DurationMs(2001); - ASSERT_FALSE(buf_.Produce(now, kOneFragmentPacketSize)); + q.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_2_seconds); + now += TimeDelta::Millis(2001); + ASSERT_FALSE(q.Produce(now, kOneFragmentPacketSize)); // A long time after expiry - buf_.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_2_seconds); - now += DurationMs(1000000); - ASSERT_FALSE(buf_.Produce(now, kOneFragmentPacketSize)); + q.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_2_seconds); + now += TimeDelta::Seconds(1000); + ASSERT_FALSE(q.Produce(now, kOneFragmentPacketSize)); // Expire one message, but produce the second that is not expired. - buf_.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_2_seconds); + q.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_2_seconds); SendOptions expires_4_seconds; expires_4_seconds.lifetime = DurationMs(4000); - buf_.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_4_seconds); - now += DurationMs(2001); + q.Add(now, DcSctpMessage(kStreamID, kPPID, payload), expires_4_seconds); + now += TimeDelta::Millis(2001); - ASSERT_TRUE(buf_.Produce(now, kOneFragmentPacketSize)); - ASSERT_FALSE(buf_.Produce(now, kOneFragmentPacketSize)); + ASSERT_TRUE(q.Produce(now, kOneFragmentPacketSize)); + ASSERT_FALSE(q.Produce(now, kOneFragmentPacketSize)); } -TEST_F(RRSendQueueTest, DiscardPartialPackets) { +TEST(RRSendQueueTest, DiscardPartialPackets) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(120); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - buf_.Add(kNow, DcSctpMessage(StreamID(2), PPID(54), payload)); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + q.Add(kNow, DcSctpMessage(StreamID(2), PPID(54), payload)); - absl::optional chunk_one = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_one = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_one.has_value()); EXPECT_FALSE(chunk_one->data.is_end); EXPECT_EQ(chunk_one->data.stream_id, kStreamID); - buf_.Discard(IsUnordered(false), chunk_one->data.stream_id, - chunk_one->data.message_id); + q.Discard(chunk_one->data.stream_id, chunk_one->message_id); - absl::optional chunk_two = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_two = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_two.has_value()); EXPECT_FALSE(chunk_two->data.is_end); EXPECT_EQ(chunk_two->data.stream_id, StreamID(2)); - absl::optional chunk_three = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_three = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_three.has_value()); EXPECT_TRUE(chunk_three->data.is_end); EXPECT_EQ(chunk_three->data.stream_id, StreamID(2)); - ASSERT_FALSE(buf_.Produce(kNow, kOneFragmentPacketSize)); + ASSERT_FALSE(q.Produce(kNow, kOneFragmentPacketSize)); // Calling it again shouldn't cause issues. - buf_.Discard(IsUnordered(false), chunk_one->data.stream_id, - chunk_one->data.message_id); - ASSERT_FALSE(buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Discard(chunk_one->data.stream_id, chunk_one->message_id); + ASSERT_FALSE(q.Produce(kNow, kOneFragmentPacketSize)); } -TEST_F(RRSendQueueTest, PrepareResetStreamsDiscardsStream) { - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, {1, 2, 3})); - buf_.Add(kNow, DcSctpMessage(StreamID(2), PPID(54), {1, 2, 3, 4, 5})); - EXPECT_EQ(buf_.total_buffered_amount(), 8u); +TEST(RRSendQueueTest, PrepareResetStreamsDiscardsStream) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, {1, 2, 3})); + q.Add(kNow, DcSctpMessage(StreamID(2), PPID(54), {1, 2, 3, 4, 5})); + EXPECT_EQ(q.total_buffered_amount(), 8u); - buf_.PrepareResetStream(StreamID(1)); - EXPECT_EQ(buf_.total_buffered_amount(), 5u); + q.PrepareResetStream(StreamID(1)); + EXPECT_EQ(q.total_buffered_amount(), 5u); - EXPECT_THAT(buf_.GetStreamsReadyToBeReset(), - UnorderedElementsAre(StreamID(1))); - buf_.CommitResetStreams(); - buf_.PrepareResetStream(StreamID(2)); - EXPECT_EQ(buf_.total_buffered_amount(), 0u); + EXPECT_THAT(q.GetStreamsReadyToBeReset(), UnorderedElementsAre(StreamID(1))); + q.CommitResetStreams(); + q.PrepareResetStream(StreamID(2)); + EXPECT_EQ(q.total_buffered_amount(), 0u); } -TEST_F(RRSendQueueTest, PrepareResetStreamsNotPartialPackets) { +TEST(RRSendQueueTest, PrepareResetStreamsNotPartialPackets) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(120); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - absl::optional chunk_one = buf_.Produce(kNow, 50); + std::optional chunk_one = q.Produce(kNow, 50); ASSERT_TRUE(chunk_one.has_value()); EXPECT_EQ(chunk_one->data.stream_id, kStreamID); - EXPECT_EQ(buf_.total_buffered_amount(), 2 * payload.size() - 50); + EXPECT_EQ(q.total_buffered_amount(), 2 * payload.size() - 50); - buf_.PrepareResetStream(StreamID(1)); - EXPECT_EQ(buf_.total_buffered_amount(), payload.size() - 50); + q.PrepareResetStream(StreamID(1)); + EXPECT_EQ(q.total_buffered_amount(), payload.size() - 50); } -TEST_F(RRSendQueueTest, EnqueuedItemsArePausedDuringStreamReset) { +TEST(RRSendQueueTest, EnqueuedItemsArePausedDuringStreamReset) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(50); - buf_.PrepareResetStream(StreamID(1)); - EXPECT_EQ(buf_.total_buffered_amount(), 0u); + q.PrepareResetStream(StreamID(1)); + EXPECT_EQ(q.total_buffered_amount(), 0u); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - EXPECT_EQ(buf_.total_buffered_amount(), payload.size()); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + EXPECT_EQ(q.total_buffered_amount(), payload.size()); - EXPECT_FALSE(buf_.Produce(kNow, kOneFragmentPacketSize).has_value()); + EXPECT_FALSE(q.Produce(kNow, kOneFragmentPacketSize).has_value()); - EXPECT_TRUE(buf_.HasStreamsReadyToBeReset()); - EXPECT_THAT(buf_.GetStreamsReadyToBeReset(), - UnorderedElementsAre(StreamID(1))); + EXPECT_TRUE(q.HasStreamsReadyToBeReset()); + EXPECT_THAT(q.GetStreamsReadyToBeReset(), UnorderedElementsAre(StreamID(1))); - EXPECT_FALSE(buf_.Produce(kNow, kOneFragmentPacketSize).has_value()); + EXPECT_FALSE(q.Produce(kNow, kOneFragmentPacketSize).has_value()); - buf_.CommitResetStreams(); - EXPECT_EQ(buf_.total_buffered_amount(), payload.size()); + q.CommitResetStreams(); + EXPECT_EQ(q.total_buffered_amount(), payload.size()); - absl::optional chunk_one = buf_.Produce(kNow, 50); + std::optional chunk_one = q.Produce(kNow, 50); ASSERT_TRUE(chunk_one.has_value()); EXPECT_EQ(chunk_one->data.stream_id, kStreamID); - EXPECT_EQ(buf_.total_buffered_amount(), 0u); + EXPECT_EQ(q.total_buffered_amount(), 0u); } -TEST_F(RRSendQueueTest, PausedStreamsStillSendPartialMessagesUntilEnd) { +TEST(RRSendQueueTest, PausedStreamsStillSendPartialMessagesUntilEnd) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); constexpr size_t kPayloadSize = 100; constexpr size_t kFragmentSize = 50; std::vector payload(kPayloadSize); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - absl::optional chunk_one = - buf_.Produce(kNow, kFragmentSize); + std::optional chunk_one = + q.Produce(kNow, kFragmentSize); ASSERT_TRUE(chunk_one.has_value()); EXPECT_EQ(chunk_one->data.stream_id, kStreamID); - EXPECT_EQ(buf_.total_buffered_amount(), 2 * kPayloadSize - kFragmentSize); + EXPECT_EQ(q.total_buffered_amount(), 2 * kPayloadSize - kFragmentSize); // This will stop the second message from being sent. - buf_.PrepareResetStream(StreamID(1)); - EXPECT_EQ(buf_.total_buffered_amount(), 1 * kPayloadSize - kFragmentSize); + q.PrepareResetStream(StreamID(1)); + EXPECT_EQ(q.total_buffered_amount(), 1 * kPayloadSize - kFragmentSize); // Should still produce fragments until end of message. - absl::optional chunk_two = - buf_.Produce(kNow, kFragmentSize); + std::optional chunk_two = + q.Produce(kNow, kFragmentSize); ASSERT_TRUE(chunk_two.has_value()); EXPECT_EQ(chunk_two->data.stream_id, kStreamID); - EXPECT_EQ(buf_.total_buffered_amount(), 0ul); + EXPECT_EQ(q.total_buffered_amount(), 0ul); // But shouldn't produce any more messages as the stream is paused. - EXPECT_FALSE(buf_.Produce(kNow, kFragmentSize).has_value()); + EXPECT_FALSE(q.Produce(kNow, kFragmentSize).has_value()); } -TEST_F(RRSendQueueTest, CommittingResetsSSN) { +TEST(RRSendQueueTest, CommittingResetsSSN) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(50); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - absl::optional chunk_one = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_one = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_one.has_value()); EXPECT_EQ(chunk_one->data.ssn, SSN(0)); - absl::optional chunk_two = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_two = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_two.has_value()); EXPECT_EQ(chunk_two->data.ssn, SSN(1)); - buf_.PrepareResetStream(StreamID(1)); + q.PrepareResetStream(StreamID(1)); // Buffered - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - EXPECT_TRUE(buf_.HasStreamsReadyToBeReset()); - EXPECT_THAT(buf_.GetStreamsReadyToBeReset(), - UnorderedElementsAre(StreamID(1))); - buf_.CommitResetStreams(); + EXPECT_TRUE(q.HasStreamsReadyToBeReset()); + EXPECT_THAT(q.GetStreamsReadyToBeReset(), UnorderedElementsAre(StreamID(1))); + q.CommitResetStreams(); - absl::optional chunk_three = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_three = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_three.has_value()); EXPECT_EQ(chunk_three->data.ssn, SSN(0)); } -TEST_F(RRSendQueueTest, CommittingResetsSSNForPausedStreamsOnly) { +TEST(RRSendQueueTest, CommittingDoesNotResetMessageId) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + std::vector payload(50); + + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1, + q.Produce(kNow, kOneFragmentPacketSize)); + EXPECT_EQ(chunk1.data.ssn, SSN(0)); + EXPECT_EQ(chunk1.message_id, OutgoingMessageId(0)); + + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2, + q.Produce(kNow, kOneFragmentPacketSize)); + EXPECT_EQ(chunk2.data.ssn, SSN(1)); + EXPECT_EQ(chunk2.message_id, OutgoingMessageId(1)); + + q.PrepareResetStream(kStreamID); + EXPECT_THAT(q.GetStreamsReadyToBeReset(), UnorderedElementsAre(kStreamID)); + q.CommitResetStreams(); + + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk3, + q.Produce(kNow, kOneFragmentPacketSize)); + EXPECT_EQ(chunk3.data.ssn, SSN(0)); + EXPECT_EQ(chunk3.message_id, OutgoingMessageId(2)); +} + +TEST(RRSendQueueTest, CommittingResetsSSNForPausedStreamsOnly) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(50); - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, payload)); - buf_.Add(kNow, DcSctpMessage(StreamID(3), kPPID, payload)); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, payload)); + q.Add(kNow, DcSctpMessage(StreamID(3), kPPID, payload)); - absl::optional chunk_one = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_one = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_one.has_value()); EXPECT_EQ(chunk_one->data.stream_id, StreamID(1)); EXPECT_EQ(chunk_one->data.ssn, SSN(0)); - absl::optional chunk_two = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_two = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_two.has_value()); EXPECT_EQ(chunk_two->data.stream_id, StreamID(3)); EXPECT_EQ(chunk_two->data.ssn, SSN(0)); - buf_.PrepareResetStream(StreamID(3)); + q.PrepareResetStream(StreamID(3)); // Send two more messages - SID 3 will buffer, SID 1 will send. - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, payload)); - buf_.Add(kNow, DcSctpMessage(StreamID(3), kPPID, payload)); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, payload)); + q.Add(kNow, DcSctpMessage(StreamID(3), kPPID, payload)); - EXPECT_TRUE(buf_.HasStreamsReadyToBeReset()); - EXPECT_THAT(buf_.GetStreamsReadyToBeReset(), - UnorderedElementsAre(StreamID(3))); + EXPECT_TRUE(q.HasStreamsReadyToBeReset()); + EXPECT_THAT(q.GetStreamsReadyToBeReset(), UnorderedElementsAre(StreamID(3))); - buf_.CommitResetStreams(); + q.CommitResetStreams(); - absl::optional chunk_three = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_three = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_three.has_value()); EXPECT_EQ(chunk_three->data.stream_id, StreamID(1)); EXPECT_EQ(chunk_three->data.ssn, SSN(1)); - absl::optional chunk_four = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_four = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_four.has_value()); EXPECT_EQ(chunk_four->data.stream_id, StreamID(3)); EXPECT_EQ(chunk_four->data.ssn, SSN(0)); } -TEST_F(RRSendQueueTest, RollBackResumesSSN) { +TEST(RRSendQueueTest, RollBackResumesSSN) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(50); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - absl::optional chunk_one = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_one = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_one.has_value()); EXPECT_EQ(chunk_one->data.ssn, SSN(0)); - absl::optional chunk_two = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_two = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_two.has_value()); EXPECT_EQ(chunk_two->data.ssn, SSN(1)); - buf_.PrepareResetStream(StreamID(1)); + q.PrepareResetStream(StreamID(1)); // Buffered - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - EXPECT_TRUE(buf_.HasStreamsReadyToBeReset()); - EXPECT_THAT(buf_.GetStreamsReadyToBeReset(), - UnorderedElementsAre(StreamID(1))); - buf_.RollbackResetStreams(); + EXPECT_TRUE(q.HasStreamsReadyToBeReset()); + EXPECT_THAT(q.GetStreamsReadyToBeReset(), UnorderedElementsAre(StreamID(1))); + q.RollbackResetStreams(); - absl::optional chunk_three = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_three = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_three.has_value()); EXPECT_EQ(chunk_three->data.ssn, SSN(2)); } -TEST_F(RRSendQueueTest, ReturnsFragmentsForOneMessageBeforeMovingToNext) { +TEST(RRSendQueueTest, ReturnsFragmentsForOneMessageBeforeMovingToNext) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(200); - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, payload)); - buf_.Add(kNow, DcSctpMessage(StreamID(2), kPPID, payload)); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, payload)); + q.Add(kNow, DcSctpMessage(StreamID(2), kPPID, payload)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk1.data.stream_id, StreamID(1)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk2.data.stream_id, StreamID(1)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk3, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk3.data.stream_id, StreamID(2)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk4, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk4.data.stream_id, StreamID(2)); } -TEST_F(RRSendQueueTest, ReturnsAlsoSmallFragmentsBeforeMovingToNext) { +TEST(RRSendQueueTest, ReturnsAlsoSmallFragmentsBeforeMovingToNext) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(kTwoFragmentPacketSize); - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, payload)); - buf_.Add(kNow, DcSctpMessage(StreamID(2), kPPID, payload)); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, payload)); + q.Add(kNow, DcSctpMessage(StreamID(2), kPPID, payload)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk1.data.stream_id, StreamID(1)); EXPECT_THAT(chunk1.data.payload, SizeIs(kOneFragmentPacketSize)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk2.data.stream_id, StreamID(1)); EXPECT_THAT(chunk2.data.payload, SizeIs(kTwoFragmentPacketSize - kOneFragmentPacketSize)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk3, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk3.data.stream_id, StreamID(2)); EXPECT_THAT(chunk3.data.payload, SizeIs(kOneFragmentPacketSize)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk4, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk4.data.stream_id, StreamID(2)); EXPECT_THAT(chunk4.data.payload, SizeIs(kTwoFragmentPacketSize - kOneFragmentPacketSize)); } -TEST_F(RRSendQueueTest, WillCycleInRoundRobinFashionBetweenStreams) { - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(1))); - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(2))); - buf_.Add(kNow, DcSctpMessage(StreamID(2), kPPID, std::vector(3))); - buf_.Add(kNow, DcSctpMessage(StreamID(2), kPPID, std::vector(4))); - buf_.Add(kNow, DcSctpMessage(StreamID(3), kPPID, std::vector(5))); - buf_.Add(kNow, DcSctpMessage(StreamID(3), kPPID, std::vector(6))); - buf_.Add(kNow, DcSctpMessage(StreamID(4), kPPID, std::vector(7))); - buf_.Add(kNow, DcSctpMessage(StreamID(4), kPPID, std::vector(8))); +TEST(RRSendQueueTest, WillCycleInRoundRobinFashionBetweenStreams) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(1))); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(2))); + q.Add(kNow, DcSctpMessage(StreamID(2), kPPID, std::vector(3))); + q.Add(kNow, DcSctpMessage(StreamID(2), kPPID, std::vector(4))); + q.Add(kNow, DcSctpMessage(StreamID(3), kPPID, std::vector(5))); + q.Add(kNow, DcSctpMessage(StreamID(3), kPPID, std::vector(6))); + q.Add(kNow, DcSctpMessage(StreamID(4), kPPID, std::vector(7))); + q.Add(kNow, DcSctpMessage(StreamID(4), kPPID, std::vector(8))); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk1.data.stream_id, StreamID(1)); EXPECT_THAT(chunk1.data.payload, SizeIs(1)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk2.data.stream_id, StreamID(2)); EXPECT_THAT(chunk2.data.payload, SizeIs(3)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk3, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk3.data.stream_id, StreamID(3)); EXPECT_THAT(chunk3.data.payload, SizeIs(5)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk4, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk4.data.stream_id, StreamID(4)); EXPECT_THAT(chunk4.data.payload, SizeIs(7)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk5, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk5.data.stream_id, StreamID(1)); EXPECT_THAT(chunk5.data.payload, SizeIs(2)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk6, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk6.data.stream_id, StreamID(2)); EXPECT_THAT(chunk6.data.payload, SizeIs(4)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk7, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk7.data.stream_id, StreamID(3)); EXPECT_THAT(chunk7.data.payload, SizeIs(6)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk8, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk8.data.stream_id, StreamID(4)); EXPECT_THAT(chunk8.data.payload, SizeIs(8)); } -TEST_F(RRSendQueueTest, DoesntTriggerOnBufferedAmountLowWhenSetToZero) { - EXPECT_CALL(callbacks_, OnBufferedAmountLow).Times(0); - buf_.SetBufferedAmountLowThreshold(StreamID(1), 0u); +TEST(RRSendQueueTest, DoesntTriggerOnBufferedAmountLowWhenSetToZero) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + EXPECT_CALL(cb, OnBufferedAmountLow).Times(0); + q.SetBufferedAmountLowThreshold(StreamID(1), 0u); } -TEST_F(RRSendQueueTest, TriggersOnBufferedAmountAtZeroLowWhenSent) { - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(1))); - EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 1u); +TEST(RRSendQueueTest, TriggersOnBufferedAmountAtZeroLowWhenSent) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(1))); + EXPECT_EQ(q.buffered_amount(StreamID(1)), 1u); - EXPECT_CALL(callbacks_, OnBufferedAmountLow(StreamID(1))); + EXPECT_CALL(cb, OnBufferedAmountLow(StreamID(1))); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk1.data.stream_id, StreamID(1)); EXPECT_THAT(chunk1.data.payload, SizeIs(1)); - EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 0u); + EXPECT_EQ(q.buffered_amount(StreamID(1)), 0u); } -TEST_F(RRSendQueueTest, WillRetriggerOnBufferedAmountLowIfAddingMore) { - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(1))); +TEST(RRSendQueueTest, WillRetriggerOnBufferedAmountLowIfAddingMore) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(1))); - EXPECT_CALL(callbacks_, OnBufferedAmountLow(StreamID(1))); + EXPECT_CALL(cb, OnBufferedAmountLow(StreamID(1))); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk1.data.stream_id, StreamID(1)); EXPECT_THAT(chunk1.data.payload, SizeIs(1)); - EXPECT_CALL(callbacks_, OnBufferedAmountLow).Times(0); + EXPECT_CALL(cb, OnBufferedAmountLow).Times(0); - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(1))); - EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 1u); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(1))); + EXPECT_EQ(q.buffered_amount(StreamID(1)), 1u); // Should now trigger again, as buffer_amount went above the threshold. - EXPECT_CALL(callbacks_, OnBufferedAmountLow(StreamID(1))); + EXPECT_CALL(cb, OnBufferedAmountLow(StreamID(1))); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk2.data.stream_id, StreamID(1)); EXPECT_THAT(chunk2.data.payload, SizeIs(1)); } -TEST_F(RRSendQueueTest, OnlyTriggersWhenTransitioningFromAboveToBelowOrEqual) { - buf_.SetBufferedAmountLowThreshold(StreamID(1), 1000); +TEST(RRSendQueueTest, OnlyTriggersWhenTransitioningFromAboveToBelowOrEqual) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + q.SetBufferedAmountLowThreshold(StreamID(1), 1000); - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(10))); - EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 10u); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(10))); + EXPECT_EQ(q.buffered_amount(StreamID(1)), 10u); - EXPECT_CALL(callbacks_, OnBufferedAmountLow).Times(0); + EXPECT_CALL(cb, OnBufferedAmountLow).Times(0); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk1.data.stream_id, StreamID(1)); EXPECT_THAT(chunk1.data.payload, SizeIs(10)); - EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 0u); + EXPECT_EQ(q.buffered_amount(StreamID(1)), 0u); - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(20))); - EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 20u); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(20))); + EXPECT_EQ(q.buffered_amount(StreamID(1)), 20u); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk2.data.stream_id, StreamID(1)); EXPECT_THAT(chunk2.data.payload, SizeIs(20)); - EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 0u); + EXPECT_EQ(q.buffered_amount(StreamID(1)), 0u); } -TEST_F(RRSendQueueTest, WillTriggerOnBufferedAmountLowSetAboveZero) { - EXPECT_CALL(callbacks_, OnBufferedAmountLow).Times(0); +TEST(RRSendQueueTest, WillTriggerOnBufferedAmountLowSetAboveZero) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + EXPECT_CALL(cb, OnBufferedAmountLow).Times(0); - buf_.SetBufferedAmountLowThreshold(StreamID(1), 700); + q.SetBufferedAmountLowThreshold(StreamID(1), 700); std::vector payload(1000); - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, payload)); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, payload)); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk1.data.stream_id, StreamID(1)); EXPECT_THAT(chunk1.data.payload, SizeIs(kOneFragmentPacketSize)); - EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 900u); + EXPECT_EQ(q.buffered_amount(StreamID(1)), 900u); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk2.data.stream_id, StreamID(1)); EXPECT_THAT(chunk2.data.payload, SizeIs(kOneFragmentPacketSize)); - EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 800u); + EXPECT_EQ(q.buffered_amount(StreamID(1)), 800u); - EXPECT_CALL(callbacks_, OnBufferedAmountLow(StreamID(1))); + EXPECT_CALL(cb, OnBufferedAmountLow(StreamID(1))); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk3, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk3.data.stream_id, StreamID(1)); EXPECT_THAT(chunk3.data.payload, SizeIs(kOneFragmentPacketSize)); - EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 700u); + EXPECT_EQ(q.buffered_amount(StreamID(1)), 700u); // Doesn't trigger when reducing even further. - EXPECT_CALL(callbacks_, OnBufferedAmountLow).Times(0); + EXPECT_CALL(cb, OnBufferedAmountLow).Times(0); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk4, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk3.data.stream_id, StreamID(1)); EXPECT_THAT(chunk3.data.payload, SizeIs(kOneFragmentPacketSize)); - EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 600u); + EXPECT_EQ(q.buffered_amount(StreamID(1)), 600u); } -TEST_F(RRSendQueueTest, WillRetriggerOnBufferedAmountLowSetAboveZero) { - EXPECT_CALL(callbacks_, OnBufferedAmountLow).Times(0); +TEST(RRSendQueueTest, WillRetriggerOnBufferedAmountLowSetAboveZero) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + EXPECT_CALL(cb, OnBufferedAmountLow).Times(0); - buf_.SetBufferedAmountLowThreshold(StreamID(1), 700); + q.SetBufferedAmountLowThreshold(StreamID(1), 700); - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(1000))); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(1000))); - EXPECT_CALL(callbacks_, OnBufferedAmountLow(StreamID(1))); + EXPECT_CALL(cb, OnBufferedAmountLow(StreamID(1))); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1, - buf_.Produce(kNow, 400)); + q.Produce(kNow, 400)); EXPECT_EQ(chunk1.data.stream_id, StreamID(1)); EXPECT_THAT(chunk1.data.payload, SizeIs(400)); - EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 600u); + EXPECT_EQ(q.buffered_amount(StreamID(1)), 600u); - EXPECT_CALL(callbacks_, OnBufferedAmountLow).Times(0); - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(200))); - EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 800u); + EXPECT_CALL(cb, OnBufferedAmountLow).Times(0); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(200))); + EXPECT_EQ(q.buffered_amount(StreamID(1)), 800u); // Will trigger again, as it went above the limit. - EXPECT_CALL(callbacks_, OnBufferedAmountLow(StreamID(1))); + EXPECT_CALL(cb, OnBufferedAmountLow(StreamID(1))); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2, - buf_.Produce(kNow, 200)); + q.Produce(kNow, 200)); EXPECT_EQ(chunk2.data.stream_id, StreamID(1)); EXPECT_THAT(chunk2.data.payload, SizeIs(200)); - EXPECT_EQ(buf_.buffered_amount(StreamID(1)), 600u); + EXPECT_EQ(q.buffered_amount(StreamID(1)), 600u); } -TEST_F(RRSendQueueTest, TriggersOnBufferedAmountLowOnThresholdChanged) { - EXPECT_CALL(callbacks_, OnBufferedAmountLow).Times(0); +TEST(RRSendQueueTest, TriggersOnBufferedAmountLowOnThresholdChanged) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + EXPECT_CALL(cb, OnBufferedAmountLow).Times(0); - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(100))); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(100))); // Modifying the threshold, still under buffered_amount, should not trigger. - buf_.SetBufferedAmountLowThreshold(StreamID(1), 50); - buf_.SetBufferedAmountLowThreshold(StreamID(1), 99); + q.SetBufferedAmountLowThreshold(StreamID(1), 50); + q.SetBufferedAmountLowThreshold(StreamID(1), 99); // When the threshold reaches buffered_amount, it will trigger. - EXPECT_CALL(callbacks_, OnBufferedAmountLow(StreamID(1))); - buf_.SetBufferedAmountLowThreshold(StreamID(1), 100); + EXPECT_CALL(cb, OnBufferedAmountLow(StreamID(1))); + q.SetBufferedAmountLowThreshold(StreamID(1), 100); // But not when it's set low again. - EXPECT_CALL(callbacks_, OnBufferedAmountLow).Times(0); - buf_.SetBufferedAmountLowThreshold(StreamID(1), 50); + EXPECT_CALL(cb, OnBufferedAmountLow).Times(0); + q.SetBufferedAmountLowThreshold(StreamID(1), 50); // But it will trigger when it overshoots. - EXPECT_CALL(callbacks_, OnBufferedAmountLow(StreamID(1))); - buf_.SetBufferedAmountLowThreshold(StreamID(1), 150); + EXPECT_CALL(cb, OnBufferedAmountLow(StreamID(1))); + q.SetBufferedAmountLowThreshold(StreamID(1), 150); // But not when it's set low again. - EXPECT_CALL(callbacks_, OnBufferedAmountLow).Times(0); - buf_.SetBufferedAmountLowThreshold(StreamID(1), 0); + EXPECT_CALL(cb, OnBufferedAmountLow).Times(0); + q.SetBufferedAmountLowThreshold(StreamID(1), 0); } -TEST_F(RRSendQueueTest, - OnTotalBufferedAmountLowDoesNotTriggerOnBufferFillingUp) { - EXPECT_CALL(callbacks_, OnTotalBufferedAmountLow).Times(0); +TEST(RRSendQueueTest, OnTotalBufferedAmountLowDoesNotTriggerOnBufferFillingUp) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + EXPECT_CALL(cb, OnTotalBufferedAmountLow).Times(0); std::vector payload(kBufferedAmountLowThreshold - 1); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - EXPECT_EQ(buf_.total_buffered_amount(), payload.size()); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + EXPECT_EQ(q.total_buffered_amount(), payload.size()); // Will not trigger if going above but never below. - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, - std::vector(kOneFragmentPacketSize))); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, + std::vector(kOneFragmentPacketSize))); } -TEST_F(RRSendQueueTest, TriggersOnTotalBufferedAmountLowWhenCrossing) { - EXPECT_CALL(callbacks_, OnTotalBufferedAmountLow).Times(0); +TEST(RRSendQueueTest, TriggersOnTotalBufferedAmountLowWhenCrossing) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + EXPECT_CALL(cb, OnTotalBufferedAmountLow).Times(0); std::vector payload(kBufferedAmountLowThreshold); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); - EXPECT_EQ(buf_.total_buffered_amount(), payload.size()); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload)); + EXPECT_EQ(q.total_buffered_amount(), payload.size()); // Reaches it. - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, std::vector(1))); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, std::vector(1))); // Drain it a bit - will trigger. - EXPECT_CALL(callbacks_, OnTotalBufferedAmountLow).Times(1); - absl::optional chunk_two = - buf_.Produce(kNow, kOneFragmentPacketSize); + EXPECT_CALL(cb, OnTotalBufferedAmountLow).Times(1); + std::optional chunk_two = + q.Produce(kNow, kOneFragmentPacketSize); } -TEST_F(RRSendQueueTest, WillStayInAStreamAsLongAsThatMessageIsSending) { - buf_.Add(kNow, DcSctpMessage(StreamID(5), kPPID, std::vector(1))); +TEST(RRSendQueueTest, WillStayInAStreamAsLongAsThatMessageIsSending) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + q.Add(kNow, DcSctpMessage(StreamID(5), kPPID, std::vector(1))); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk1, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk1.data.stream_id, StreamID(5)); EXPECT_THAT(chunk1.data.payload, SizeIs(1)); // Next, it should pick a different stream. - buf_.Add(kNow, - DcSctpMessage(StreamID(1), kPPID, - std::vector(kOneFragmentPacketSize * 2))); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, + std::vector(kOneFragmentPacketSize * 2))); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk2, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk2.data.stream_id, StreamID(1)); EXPECT_THAT(chunk2.data.payload, SizeIs(kOneFragmentPacketSize)); // It should still stay on the Stream1 now, even if might be tempted to switch // to this stream, as it's the stream following 5. - buf_.Add(kNow, DcSctpMessage(StreamID(6), kPPID, std::vector(1))); + q.Add(kNow, DcSctpMessage(StreamID(6), kPPID, std::vector(1))); ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk3, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk3.data.stream_id, StreamID(1)); EXPECT_THAT(chunk3.data.payload, SizeIs(kOneFragmentPacketSize)); // After stream id 1 is complete, it's time to do stream 6. ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk4, - buf_.Produce(kNow, kOneFragmentPacketSize)); + q.Produce(kNow, kOneFragmentPacketSize)); EXPECT_EQ(chunk4.data.stream_id, StreamID(6)); EXPECT_THAT(chunk4.data.payload, SizeIs(1)); - EXPECT_FALSE(buf_.Produce(kNow, kOneFragmentPacketSize).has_value()); + EXPECT_FALSE(q.Produce(kNow, kOneFragmentPacketSize).has_value()); } -TEST_F(RRSendQueueTest, StreamsHaveInitialPriority) { - EXPECT_EQ(buf_.GetStreamPriority(StreamID(1)), kDefaultPriority); +TEST(RRSendQueueTest, StreamsHaveInitialPriority) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + EXPECT_EQ(q.GetStreamPriority(StreamID(1)), kDefaultPriority); - buf_.Add(kNow, DcSctpMessage(StreamID(2), kPPID, std::vector(40))); - EXPECT_EQ(buf_.GetStreamPriority(StreamID(2)), kDefaultPriority); + q.Add(kNow, DcSctpMessage(StreamID(2), kPPID, std::vector(40))); + EXPECT_EQ(q.GetStreamPriority(StreamID(2)), kDefaultPriority); } -TEST_F(RRSendQueueTest, CanChangeStreamPriority) { - buf_.SetStreamPriority(StreamID(1), StreamPriority(42)); - EXPECT_EQ(buf_.GetStreamPriority(StreamID(1)), StreamPriority(42)); +TEST(RRSendQueueTest, CanChangeStreamPriority) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + q.SetStreamPriority(StreamID(1), StreamPriority(42)); + EXPECT_EQ(q.GetStreamPriority(StreamID(1)), StreamPriority(42)); - buf_.Add(kNow, DcSctpMessage(StreamID(2), kPPID, std::vector(40))); - buf_.SetStreamPriority(StreamID(2), StreamPriority(42)); - EXPECT_EQ(buf_.GetStreamPriority(StreamID(2)), StreamPriority(42)); + q.Add(kNow, DcSctpMessage(StreamID(2), kPPID, std::vector(40))); + q.SetStreamPriority(StreamID(2), StreamPriority(42)); + EXPECT_EQ(q.GetStreamPriority(StreamID(2)), StreamPriority(42)); } -TEST_F(RRSendQueueTest, WillHandoverPriority) { - buf_.SetStreamPriority(StreamID(1), StreamPriority(42)); +TEST(RRSendQueueTest, WillHandoverPriority) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + q.SetStreamPriority(StreamID(1), StreamPriority(42)); - buf_.Add(kNow, DcSctpMessage(StreamID(2), kPPID, std::vector(40))); - buf_.SetStreamPriority(StreamID(2), StreamPriority(42)); + q.Add(kNow, DcSctpMessage(StreamID(2), kPPID, std::vector(40))); + q.SetStreamPriority(StreamID(2), StreamPriority(42)); DcSctpSocketHandoverState state; - buf_.AddHandoverState(state); + q.AddHandoverState(state); - RRSendQueue q2("log: ", &callbacks_, kMaxQueueSize, kMtu, kDefaultPriority, + RRSendQueue q2("log: ", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); q2.RestoreFromState(state); EXPECT_EQ(q2.GetStreamPriority(StreamID(1)), StreamPriority(42)); EXPECT_EQ(q2.GetStreamPriority(StreamID(2)), StreamPriority(42)); } -TEST_F(RRSendQueueTest, WillSendMessagesByPrio) { - buf_.EnableMessageInterleaving(true); - buf_.SetStreamPriority(StreamID(1), StreamPriority(10)); - buf_.SetStreamPriority(StreamID(2), StreamPriority(20)); - buf_.SetStreamPriority(StreamID(3), StreamPriority(30)); +TEST(RRSendQueueTest, WillSendMessagesByPrio) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); + q.EnableMessageInterleaving(true); + q.SetStreamPriority(StreamID(1), StreamPriority(10)); + q.SetStreamPriority(StreamID(2), StreamPriority(20)); + q.SetStreamPriority(StreamID(3), StreamPriority(30)); - buf_.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(40))); - buf_.Add(kNow, DcSctpMessage(StreamID(2), kPPID, std::vector(20))); - buf_.Add(kNow, DcSctpMessage(StreamID(3), kPPID, std::vector(10))); + q.Add(kNow, DcSctpMessage(StreamID(1), kPPID, std::vector(40))); + q.Add(kNow, DcSctpMessage(StreamID(2), kPPID, std::vector(20))); + q.Add(kNow, DcSctpMessage(StreamID(3), kPPID, std::vector(10))); std::vector expected_streams = {3, 2, 2, 1, 1, 1, 1}; for (uint16_t stream_num : expected_streams) { ASSERT_HAS_VALUE_AND_ASSIGN(SendQueue::DataToSend chunk, - buf_.Produce(kNow, 10)); + q.Produce(kNow, 10)); EXPECT_EQ(chunk.data.stream_id, StreamID(stream_num)); } - EXPECT_FALSE(buf_.Produce(kNow, 1).has_value()); + EXPECT_FALSE(q.Produce(kNow, 1).has_value()); } -TEST_F(RRSendQueueTest, WillSendLifecycleExpireWhenExpiredInSendQueue) { +TEST(RRSendQueueTest, WillSendLifecycleExpireWhenExpiredInSendQueue) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(kOneFragmentPacketSize); - buf_.Add(kNow, DcSctpMessage(StreamID(2), kPPID, payload), - SendOptions{.lifetime = DurationMs(1000), - .lifecycle_id = LifecycleId(1)}); - - EXPECT_CALL(callbacks_, OnLifecycleMessageExpired(LifecycleId(1), - /*maybe_delivered=*/false)); - EXPECT_CALL(callbacks_, OnLifecycleEnd(LifecycleId(1))); - EXPECT_FALSE(buf_.Produce(kNow + DurationMs(1001), kOneFragmentPacketSize) + q.Add(kNow, DcSctpMessage(StreamID(2), kPPID, payload), + SendOptions{.lifetime = DurationMs(1000), + .lifecycle_id = LifecycleId(1)}); + + EXPECT_CALL(cb, OnLifecycleMessageExpired(LifecycleId(1), + /*maybe_delivered=*/false)); + EXPECT_CALL(cb, OnLifecycleEnd(LifecycleId(1))); + EXPECT_FALSE(q.Produce(kNow + TimeDelta::Millis(1001), kOneFragmentPacketSize) .has_value()); } -TEST_F(RRSendQueueTest, WillSendLifecycleExpireWhenDiscardingDuringPause) { +TEST(RRSendQueueTest, WillSendLifecycleExpireWhenDiscardingDuringPause) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(120); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload), - SendOptions{.lifecycle_id = LifecycleId(1)}); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload), - SendOptions{.lifecycle_id = LifecycleId(2)}); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload), + SendOptions{.lifecycle_id = LifecycleId(1)}); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload), + SendOptions{.lifecycle_id = LifecycleId(2)}); - absl::optional chunk_one = buf_.Produce(kNow, 50); + std::optional chunk_one = q.Produce(kNow, 50); ASSERT_TRUE(chunk_one.has_value()); EXPECT_EQ(chunk_one->data.stream_id, kStreamID); - EXPECT_EQ(buf_.total_buffered_amount(), 2 * payload.size() - 50); + EXPECT_EQ(q.total_buffered_amount(), 2 * payload.size() - 50); - EXPECT_CALL(callbacks_, OnLifecycleMessageExpired(LifecycleId(2), - /*maybe_delivered=*/false)); - EXPECT_CALL(callbacks_, OnLifecycleEnd(LifecycleId(2))); - buf_.PrepareResetStream(StreamID(1)); - EXPECT_EQ(buf_.total_buffered_amount(), payload.size() - 50); + EXPECT_CALL(cb, OnLifecycleMessageExpired(LifecycleId(2), + /*maybe_delivered=*/false)); + EXPECT_CALL(cb, OnLifecycleEnd(LifecycleId(2))); + q.PrepareResetStream(StreamID(1)); + EXPECT_EQ(q.total_buffered_amount(), payload.size() - 50); } -TEST_F(RRSendQueueTest, WillSendLifecycleExpireWhenDiscardingExplicitly) { +TEST(RRSendQueueTest, WillSendLifecycleExpireWhenDiscardingExplicitly) { + testing::NiceMock cb; + RRSendQueue q("", &cb, kMtu, kDefaultPriority, kBufferedAmountLowThreshold); std::vector payload(kOneFragmentPacketSize + 20); - buf_.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload), - SendOptions{.lifecycle_id = LifecycleId(1)}); + q.Add(kNow, DcSctpMessage(kStreamID, kPPID, payload), + SendOptions{.lifecycle_id = LifecycleId(1)}); - absl::optional chunk_one = - buf_.Produce(kNow, kOneFragmentPacketSize); + std::optional chunk_one = + q.Produce(kNow, kOneFragmentPacketSize); ASSERT_TRUE(chunk_one.has_value()); EXPECT_FALSE(chunk_one->data.is_end); EXPECT_EQ(chunk_one->data.stream_id, kStreamID); - EXPECT_CALL(callbacks_, OnLifecycleMessageExpired(LifecycleId(1), - /*maybe_delivered=*/false)); - EXPECT_CALL(callbacks_, OnLifecycleEnd(LifecycleId(1))); - buf_.Discard(IsUnordered(false), chunk_one->data.stream_id, - chunk_one->data.message_id); + EXPECT_CALL(cb, OnLifecycleMessageExpired(LifecycleId(1), + /*maybe_delivered=*/false)); + EXPECT_CALL(cb, OnLifecycleEnd(LifecycleId(1))); + q.Discard(chunk_one->data.stream_id, chunk_one->message_id); } } // namespace } // namespace dcsctp diff --git a/net/dcsctp/tx/send_queue.h b/net/dcsctp/tx/send_queue.h index 0b96e9041a..5f7ca2a758 100644 --- a/net/dcsctp/tx/send_queue.h +++ b/net/dcsctp/tx/send_queue.h @@ -12,11 +12,12 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/units/timestamp.h" #include "net/dcsctp/common/internal_types.h" #include "net/dcsctp/packet/data.h" #include "net/dcsctp/public/types.h" @@ -27,13 +28,17 @@ class SendQueue { public: // Container for a data chunk that is produced by the SendQueue struct DataToSend { - explicit DataToSend(Data data) : data(std::move(data)) {} + DataToSend(OutgoingMessageId message_id, Data data) + : message_id(message_id), data(std::move(data)) {} + + OutgoingMessageId message_id; + // The data to send, including all parameters. Data data; // Partial reliability - RFC3758 MaxRetransmits max_retransmissions = MaxRetransmits::NoLimit(); - TimeMs expires_at = TimeMs::InfiniteFuture(); + webrtc::Timestamp expires_at = webrtc::Timestamp::PlusInfinity(); // Lifecycle - set for the last fragment, and `LifecycleId::NotSet()` for // all other fragments. @@ -51,9 +56,10 @@ class SendQueue { // // `max_size` refers to how many payload bytes that may be produced, not // including any headers. - virtual absl::optional Produce(TimeMs now, size_t max_size) = 0; + virtual std::optional Produce(webrtc::Timestamp now, + size_t max_size) = 0; - // Discards a partially sent message identified by the parameters `unordered`, + // Discards a partially sent message identified by the parameters // `stream_id` and `message_id`. The `message_id` comes from the returned // information when having called `Produce`. A partially sent message means // that it has had at least one fragment of it returned when `Produce` was @@ -67,9 +73,7 @@ class SendQueue { // // This function returns true if this message had unsent fragments still in // the queue that were discarded, and false if there were no such fragments. - virtual bool Discard(IsUnordered unordered, - StreamID stream_id, - MID message_id) = 0; + virtual bool Discard(StreamID stream_id, OutgoingMessageId message_id) = 0; // Prepares the stream to be reset. This is used to close a WebRTC data // channel and will be signaled to the other side. diff --git a/net/dcsctp/tx/stream_scheduler.cc b/net/dcsctp/tx/stream_scheduler.cc index c1d220aaa2..ef3c52c560 100644 --- a/net/dcsctp/tx/stream_scheduler.cc +++ b/net/dcsctp/tx/stream_scheduler.cc @@ -10,11 +10,10 @@ #include "net/dcsctp/tx/stream_scheduler.h" #include +#include #include "absl/algorithm/container.h" -#include "absl/types/optional.h" #include "api/array_view.h" -#include "net/dcsctp/common/str_join.h" #include "net/dcsctp/packet/data.h" #include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/public/dcsctp_socket.h" @@ -22,6 +21,7 @@ #include "net/dcsctp/tx/send_queue.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/strings/str_join.h" namespace dcsctp { @@ -30,8 +30,8 @@ void StreamScheduler::Stream::SetPriority(StreamPriority priority) { inverse_weight_ = InverseWeight(priority); } -absl::optional StreamScheduler::Produce( - TimeMs now, +std::optional StreamScheduler::Produce( + webrtc::Timestamp now, size_t max_size) { // For non-interleaved streams, avoid rescheduling while still sending a // message as it needs to be sent in full. For interleaved messaging, @@ -42,15 +42,16 @@ absl::optional StreamScheduler::Produce( RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Producing data, rescheduling=" << rescheduling << ", active=" - << StrJoin(active_streams_, ", ", - [&](rtc::StringBuilder& sb, const auto& p) { - sb << *p->stream_id() << "@" - << *p->next_finish_time(); - }); + << webrtc::StrJoin( + active_streams_, ", ", + [&](webrtc::StringBuilder& sb, const auto& p) { + sb << *p->stream_id() << "@" + << *p->next_finish_time(); + }); RTC_DCHECK(rescheduling || current_stream_ != nullptr); - absl::optional data; + std::optional data; while (!data.has_value() && !active_streams_.empty()) { if (rescheduling) { auto it = active_streams_.begin(); @@ -77,7 +78,7 @@ absl::optional StreamScheduler::Produce( << "There is no stream with data; Can't produce any data."; RTC_DCHECK(IsConsistent()); - return absl::nullopt; + return std::nullopt; } RTC_DCHECK(data->data.stream_id == current_stream_->stream_id()); @@ -126,10 +127,10 @@ StreamScheduler::VirtualTime StreamScheduler::Stream::CalculateFinishTime( return VirtualTime(*current_virtual_time_ + 1); } -absl::optional StreamScheduler::Stream::Produce( - TimeMs now, +std::optional StreamScheduler::Stream::Produce( + webrtc::Timestamp now, size_t max_size) { - absl::optional data = producer_.Produce(now, max_size); + std::optional data = producer_.Produce(now, max_size); if (data.has_value()) { VirtualTime new_current = CalculateFinishTime(data->data.payload.size()); diff --git a/net/dcsctp/tx/stream_scheduler.h b/net/dcsctp/tx/stream_scheduler.h index ce836a5826..f5377fa7ed 100644 --- a/net/dcsctp/tx/stream_scheduler.h +++ b/net/dcsctp/tx/stream_scheduler.h @@ -15,6 +15,7 @@ #include #include #include +#include #include #include #include @@ -23,7 +24,6 @@ #include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "net/dcsctp/packet/chunk/idata_chunk.h" #include "net/dcsctp/packet/sctp_packet.h" @@ -86,9 +86,9 @@ class StreamScheduler { // as `now` and should be used to skip chunks with expired limited lifetime. // The parameter `max_size` specifies the maximum amount of actual payload // that may be returned. If these constraints prevents the stream from - // sending some data, `absl::nullopt` should be returned. - virtual absl::optional Produce(TimeMs now, - size_t max_size) = 0; + // sending some data, `std::nullopt` should be returned. + virtual std::optional Produce(webrtc::Timestamp now, + size_t max_size) = 0; // Returns the number of payload bytes that is scheduled to be sent in the // next enqueued message, or zero if there are no enqueued messages or if @@ -132,7 +132,8 @@ class StreamScheduler { // Produces a message from this stream. This will only be called on streams // that have data. - absl::optional Produce(TimeMs now, size_t max_size); + std::optional Produce(webrtc::Timestamp now, + size_t max_size); void MakeActive(size_t bytes_to_send_next); void ForceMarkInactive(); @@ -179,8 +180,9 @@ class StreamScheduler { // Produces a fragment of data to send. The current wall time is specified as // `now` and will be used to skip chunks with expired limited lifetime. The // parameter `max_size` specifies the maximum amount of actual payload that - // may be returned. If no data can be produced, `absl::nullopt` is returned. - absl::optional Produce(TimeMs now, size_t max_size); + // may be returned. If no data can be produced, `std::nullopt` is returned. + std::optional Produce(webrtc::Timestamp now, + size_t max_size); std::set ActiveStreamsForTesting() const; diff --git a/net/dcsctp/tx/stream_scheduler_test.cc b/net/dcsctp/tx/stream_scheduler_test.cc index 20833371c1..024e7430c4 100644 --- a/net/dcsctp/tx/stream_scheduler_test.cc +++ b/net/dcsctp/tx/stream_scheduler_test.cc @@ -19,9 +19,11 @@ namespace dcsctp { namespace { using ::testing::Return; using ::testing::StrictMock; +using ::webrtc::Timestamp; constexpr size_t kMtu = 1000; constexpr size_t kPayloadSize = 4; +constexpr Timestamp kNow = Timestamp::Zero(); MATCHER_P(HasDataWithMid, mid, "") { if (!arg.has_value()) { @@ -29,8 +31,8 @@ MATCHER_P(HasDataWithMid, mid, "") { return false; } - if (arg->data.message_id != mid) { - *result_listener << "the produced data had mid " << *arg->data.message_id + if (arg->data.mid != mid) { + *result_listener << "the produced data had mid " << *arg->data.mid << " and not the expected " << *mid; return false; } @@ -38,12 +40,18 @@ MATCHER_P(HasDataWithMid, mid, "") { return true; } -std::function(TimeMs, size_t)> -CreateChunk(StreamID sid, MID mid, size_t payload_size = kPayloadSize) { - return [sid, mid, payload_size](TimeMs now, size_t max_size) { - return SendQueue::DataToSend(Data( - sid, SSN(0), mid, FSN(0), PPID(42), std::vector(payload_size), - Data::IsBeginning(true), Data::IsEnd(true), IsUnordered(true))); +std::function(Timestamp, size_t)> +CreateChunk(OutgoingMessageId message_id, + StreamID sid, + MID mid, + size_t payload_size = kPayloadSize) { + return [sid, mid, payload_size, message_id](Timestamp /* now */, + size_t /* max_size */) { + return SendQueue::DataToSend( + message_id, + Data(sid, SSN(0), mid, FSN(0), PPID(42), + std::vector(payload_size), Data::IsBeginning(true), + Data::IsEnd(true), IsUnordered(true))); }; } @@ -51,8 +59,7 @@ std::map GetPacketCounts(StreamScheduler& scheduler, size_t packets_to_generate) { std::map packet_counts; for (size_t i = 0; i < packets_to_generate; ++i) { - absl::optional data = - scheduler.Produce(TimeMs(0), kMtu); + std::optional data = scheduler.Produce(kNow, kMtu); if (data.has_value()) { ++packet_counts[data->data.stream_id]; } @@ -62,9 +69,9 @@ std::map GetPacketCounts(StreamScheduler& scheduler, class MockStreamProducer : public StreamScheduler::StreamProducer { public: - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, Produce, - (TimeMs, size_t), + (Timestamp, size_t), (override)); MOCK_METHOD(size_t, bytes_to_send_in_next_message, (), (const, override)); }; @@ -76,7 +83,8 @@ class TestStream { StreamPriority priority, size_t packet_size = kPayloadSize) { EXPECT_CALL(producer_, Produce) - .WillRepeatedly(CreateChunk(stream_id, MID(0), packet_size)); + .WillRepeatedly( + CreateChunk(OutgoingMessageId(0), stream_id, MID(0), packet_size)); EXPECT_CALL(producer_, bytes_to_send_in_next_message) .WillRepeatedly(Return(packet_size)); stream_ = scheduler.CreateStream(&producer_, stream_id, priority); @@ -94,7 +102,7 @@ class TestStream { TEST(StreamSchedulerTest, HasNoActiveStreams) { StreamScheduler scheduler("", kMtu); - EXPECT_EQ(scheduler.Produce(TimeMs(0), kMtu), absl::nullopt); + EXPECT_EQ(scheduler.Produce(kNow, kMtu), std::nullopt); } // Stream properties can be set and retrieved @@ -117,7 +125,8 @@ TEST(StreamSchedulerTest, CanProduceFromSingleStream) { StreamScheduler scheduler("", kMtu); StrictMock producer; - EXPECT_CALL(producer, Produce).WillOnce(CreateChunk(StreamID(1), MID(0))); + EXPECT_CALL(producer, Produce) + .WillOnce(CreateChunk(OutgoingMessageId(0), StreamID(1), MID(0))); EXPECT_CALL(producer, bytes_to_send_in_next_message) .WillOnce(Return(kPayloadSize)) // When making active .WillOnce(Return(0)); @@ -125,8 +134,8 @@ TEST(StreamSchedulerTest, CanProduceFromSingleStream) { scheduler.CreateStream(&producer, StreamID(1), StreamPriority(2)); stream->MaybeMakeActive(); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(0))); - EXPECT_EQ(scheduler.Produce(TimeMs(0), kMtu), absl::nullopt); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(0))); + EXPECT_EQ(scheduler.Produce(kNow, kMtu), std::nullopt); } // Switches between two streams after every packet. @@ -135,9 +144,9 @@ TEST(StreamSchedulerTest, WillRoundRobinBetweenStreams) { StrictMock producer1; EXPECT_CALL(producer1, Produce) - .WillOnce(CreateChunk(StreamID(1), MID(100))) - .WillOnce(CreateChunk(StreamID(1), MID(101))) - .WillOnce(CreateChunk(StreamID(1), MID(102))); + .WillOnce(CreateChunk(OutgoingMessageId(0), StreamID(1), MID(100))) + .WillOnce(CreateChunk(OutgoingMessageId(1), StreamID(1), MID(101))) + .WillOnce(CreateChunk(OutgoingMessageId(2), StreamID(1), MID(102))); EXPECT_CALL(producer1, bytes_to_send_in_next_message) .WillOnce(Return(kPayloadSize)) // When making active .WillOnce(Return(kPayloadSize)) @@ -149,9 +158,9 @@ TEST(StreamSchedulerTest, WillRoundRobinBetweenStreams) { StrictMock producer2; EXPECT_CALL(producer2, Produce) - .WillOnce(CreateChunk(StreamID(2), MID(200))) - .WillOnce(CreateChunk(StreamID(2), MID(201))) - .WillOnce(CreateChunk(StreamID(2), MID(202))); + .WillOnce(CreateChunk(OutgoingMessageId(4), StreamID(2), MID(200))) + .WillOnce(CreateChunk(OutgoingMessageId(5), StreamID(2), MID(201))) + .WillOnce(CreateChunk(OutgoingMessageId(6), StreamID(2), MID(202))); EXPECT_CALL(producer2, bytes_to_send_in_next_message) .WillOnce(Return(kPayloadSize)) // When making active .WillOnce(Return(kPayloadSize)) @@ -161,13 +170,13 @@ TEST(StreamSchedulerTest, WillRoundRobinBetweenStreams) { scheduler.CreateStream(&producer2, StreamID(2), StreamPriority(2)); stream2->MaybeMakeActive(); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(100))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(200))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(101))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(201))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(102))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(202))); - EXPECT_EQ(scheduler.Produce(TimeMs(0), kMtu), absl::nullopt); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(100))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(200))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(101))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(201))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(102))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(202))); + EXPECT_EQ(scheduler.Produce(kNow, kMtu), std::nullopt); } // Switches between two streams after every packet, but keeps producing from the @@ -177,26 +186,29 @@ TEST(StreamSchedulerTest, WillRoundRobinOnlyWhenFinishedProducingChunk) { StrictMock producer1; EXPECT_CALL(producer1, Produce) - .WillOnce(CreateChunk(StreamID(1), MID(100))) + .WillOnce(CreateChunk(OutgoingMessageId(0), StreamID(1), MID(100))) .WillOnce([](...) { return SendQueue::DataToSend( + OutgoingMessageId(1), Data(StreamID(1), SSN(0), MID(101), FSN(0), PPID(42), std::vector(4), Data::IsBeginning(true), Data::IsEnd(false), IsUnordered(true))); }) .WillOnce([](...) { return SendQueue::DataToSend( + OutgoingMessageId(1), Data(StreamID(1), SSN(0), MID(101), FSN(0), PPID(42), std::vector(4), Data::IsBeginning(false), Data::IsEnd(false), IsUnordered(true))); }) .WillOnce([](...) { return SendQueue::DataToSend( + OutgoingMessageId(1), Data(StreamID(1), SSN(0), MID(101), FSN(0), PPID(42), std::vector(4), Data::IsBeginning(false), Data::IsEnd(true), IsUnordered(true))); }) - .WillOnce(CreateChunk(StreamID(1), MID(102))); + .WillOnce(CreateChunk(OutgoingMessageId(2), StreamID(1), MID(102))); EXPECT_CALL(producer1, bytes_to_send_in_next_message) .WillOnce(Return(kPayloadSize)) // When making active .WillOnce(Return(kPayloadSize)) @@ -210,9 +222,9 @@ TEST(StreamSchedulerTest, WillRoundRobinOnlyWhenFinishedProducingChunk) { StrictMock producer2; EXPECT_CALL(producer2, Produce) - .WillOnce(CreateChunk(StreamID(2), MID(200))) - .WillOnce(CreateChunk(StreamID(2), MID(201))) - .WillOnce(CreateChunk(StreamID(2), MID(202))); + .WillOnce(CreateChunk(OutgoingMessageId(3), StreamID(2), MID(200))) + .WillOnce(CreateChunk(OutgoingMessageId(4), StreamID(2), MID(201))) + .WillOnce(CreateChunk(OutgoingMessageId(5), StreamID(2), MID(202))); EXPECT_CALL(producer2, bytes_to_send_in_next_message) .WillOnce(Return(kPayloadSize)) // When making active .WillOnce(Return(kPayloadSize)) @@ -222,15 +234,15 @@ TEST(StreamSchedulerTest, WillRoundRobinOnlyWhenFinishedProducingChunk) { scheduler.CreateStream(&producer2, StreamID(2), StreamPriority(2)); stream2->MaybeMakeActive(); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(100))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(200))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(101))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(101))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(101))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(201))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(102))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(202))); - EXPECT_EQ(scheduler.Produce(TimeMs(0), kMtu), absl::nullopt); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(100))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(200))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(101))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(101))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(101))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(201))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(102))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(202))); + EXPECT_EQ(scheduler.Produce(kNow, kMtu), std::nullopt); } // Deactivates a stream before it has finished producing all packets. @@ -239,8 +251,8 @@ TEST(StreamSchedulerTest, StreamsCanBeMadeInactive) { StrictMock producer1; EXPECT_CALL(producer1, Produce) - .WillOnce(CreateChunk(StreamID(1), MID(100))) - .WillOnce(CreateChunk(StreamID(1), MID(101))); + .WillOnce(CreateChunk(OutgoingMessageId(0), StreamID(1), MID(100))) + .WillOnce(CreateChunk(OutgoingMessageId(1), StreamID(1), MID(101))); EXPECT_CALL(producer1, bytes_to_send_in_next_message) .WillOnce(Return(kPayloadSize)) // When making active .WillOnce(Return(kPayloadSize)) @@ -249,12 +261,12 @@ TEST(StreamSchedulerTest, StreamsCanBeMadeInactive) { scheduler.CreateStream(&producer1, StreamID(1), StreamPriority(2)); stream1->MaybeMakeActive(); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(100))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(101))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(100))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(101))); // ... but the stream is made inactive before it can be produced. stream1->MakeInactive(); - EXPECT_EQ(scheduler.Produce(TimeMs(0), kMtu), absl::nullopt); + EXPECT_EQ(scheduler.Produce(kNow, kMtu), std::nullopt); } // Resumes a paused stream - makes a stream active after inactivating it. @@ -264,9 +276,9 @@ TEST(StreamSchedulerTest, SingleStreamCanBeResumed) { StrictMock producer1; // Callbacks are setup so that they hint that there is a MID(2) coming... EXPECT_CALL(producer1, Produce) - .WillOnce(CreateChunk(StreamID(1), MID(100))) - .WillOnce(CreateChunk(StreamID(1), MID(101))) - .WillOnce(CreateChunk(StreamID(1), MID(102))); + .WillOnce(CreateChunk(OutgoingMessageId(0), StreamID(1), MID(100))) + .WillOnce(CreateChunk(OutgoingMessageId(1), StreamID(1), MID(101))) + .WillOnce(CreateChunk(OutgoingMessageId(2), StreamID(1), MID(102))); EXPECT_CALL(producer1, bytes_to_send_in_next_message) .WillOnce(Return(kPayloadSize)) // When making active .WillOnce(Return(kPayloadSize)) @@ -277,14 +289,14 @@ TEST(StreamSchedulerTest, SingleStreamCanBeResumed) { scheduler.CreateStream(&producer1, StreamID(1), StreamPriority(2)); stream1->MaybeMakeActive(); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(100))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(101))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(100))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(101))); stream1->MakeInactive(); - EXPECT_EQ(scheduler.Produce(TimeMs(0), kMtu), absl::nullopt); + EXPECT_EQ(scheduler.Produce(kNow, kMtu), std::nullopt); stream1->MaybeMakeActive(); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(102))); - EXPECT_EQ(scheduler.Produce(TimeMs(0), kMtu), absl::nullopt); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(102))); + EXPECT_EQ(scheduler.Produce(kNow, kMtu), std::nullopt); } // Iterates between streams, where one is suddenly paused and later resumed. @@ -293,9 +305,9 @@ TEST(StreamSchedulerTest, WillRoundRobinWithPausedStream) { StrictMock producer1; EXPECT_CALL(producer1, Produce) - .WillOnce(CreateChunk(StreamID(1), MID(100))) - .WillOnce(CreateChunk(StreamID(1), MID(101))) - .WillOnce(CreateChunk(StreamID(1), MID(102))); + .WillOnce(CreateChunk(OutgoingMessageId(0), StreamID(1), MID(100))) + .WillOnce(CreateChunk(OutgoingMessageId(1), StreamID(1), MID(101))) + .WillOnce(CreateChunk(OutgoingMessageId(2), StreamID(1), MID(102))); EXPECT_CALL(producer1, bytes_to_send_in_next_message) .WillOnce(Return(kPayloadSize)) // When making active .WillOnce(Return(kPayloadSize)) @@ -308,9 +320,9 @@ TEST(StreamSchedulerTest, WillRoundRobinWithPausedStream) { StrictMock producer2; EXPECT_CALL(producer2, Produce) - .WillOnce(CreateChunk(StreamID(2), MID(200))) - .WillOnce(CreateChunk(StreamID(2), MID(201))) - .WillOnce(CreateChunk(StreamID(2), MID(202))); + .WillOnce(CreateChunk(OutgoingMessageId(3), StreamID(2), MID(200))) + .WillOnce(CreateChunk(OutgoingMessageId(4), StreamID(2), MID(201))) + .WillOnce(CreateChunk(OutgoingMessageId(5), StreamID(2), MID(202))); EXPECT_CALL(producer2, bytes_to_send_in_next_message) .WillOnce(Return(kPayloadSize)) // When making active .WillOnce(Return(kPayloadSize)) @@ -320,15 +332,15 @@ TEST(StreamSchedulerTest, WillRoundRobinWithPausedStream) { scheduler.CreateStream(&producer2, StreamID(2), StreamPriority(2)); stream2->MaybeMakeActive(); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(100))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(200))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(100))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(200))); stream1->MakeInactive(); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(201))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(202))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(201))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(202))); stream1->MaybeMakeActive(); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(101))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(102))); - EXPECT_EQ(scheduler.Produce(TimeMs(0), kMtu), absl::nullopt); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(101))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(102))); + EXPECT_EQ(scheduler.Produce(kNow, kMtu), std::nullopt); } // Verifies that packet counts are evenly distributed in round robin scheduling. @@ -384,9 +396,12 @@ TEST(StreamSchedulerTest, WillDoFairQueuingWithSamePriority) { StrictMock callback1; EXPECT_CALL(callback1, Produce) - .WillOnce(CreateChunk(StreamID(1), MID(100), kSmallPacket)) - .WillOnce(CreateChunk(StreamID(1), MID(101), kSmallPacket)) - .WillOnce(CreateChunk(StreamID(1), MID(102), kSmallPacket)); + .WillOnce(CreateChunk(OutgoingMessageId(0), StreamID(1), MID(100), + kSmallPacket)) + .WillOnce(CreateChunk(OutgoingMessageId(1), StreamID(1), MID(101), + kSmallPacket)) + .WillOnce(CreateChunk(OutgoingMessageId(2), StreamID(1), MID(102), + kSmallPacket)); EXPECT_CALL(callback1, bytes_to_send_in_next_message) .WillOnce(Return(kSmallPacket)) // When making active .WillOnce(Return(kSmallPacket)) @@ -398,9 +413,12 @@ TEST(StreamSchedulerTest, WillDoFairQueuingWithSamePriority) { StrictMock callback2; EXPECT_CALL(callback2, Produce) - .WillOnce(CreateChunk(StreamID(2), MID(200), kLargePacket)) - .WillOnce(CreateChunk(StreamID(2), MID(201), kLargePacket)) - .WillOnce(CreateChunk(StreamID(2), MID(202), kLargePacket)); + .WillOnce(CreateChunk(OutgoingMessageId(3), StreamID(2), MID(200), + kLargePacket)) + .WillOnce(CreateChunk(OutgoingMessageId(4), StreamID(2), MID(201), + kLargePacket)) + .WillOnce(CreateChunk(OutgoingMessageId(5), StreamID(2), MID(202), + kLargePacket)); EXPECT_CALL(callback2, bytes_to_send_in_next_message) .WillOnce(Return(kLargePacket)) // When making active .WillOnce(Return(kLargePacket)) @@ -411,18 +429,18 @@ TEST(StreamSchedulerTest, WillDoFairQueuingWithSamePriority) { stream2->MaybeMakeActive(); // t = 30 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(100))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(100))); // t = 60 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(101))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(101))); // t = 70 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(200))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(200))); // t = 90 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(102))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(102))); // t = 140 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(201))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(201))); // t = 210 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(202))); - EXPECT_EQ(scheduler.Produce(TimeMs(0), kMtu), absl::nullopt); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(202))); + EXPECT_EQ(scheduler.Produce(kNow, kMtu), std::nullopt); } // Will do weighted fair queuing with three streams having different priority. @@ -432,9 +450,9 @@ TEST(StreamSchedulerTest, WillDoWeightedFairQueuingSameSizeDifferentPriority) { StrictMock callback1; EXPECT_CALL(callback1, Produce) - .WillOnce(CreateChunk(StreamID(1), MID(100))) - .WillOnce(CreateChunk(StreamID(1), MID(101))) - .WillOnce(CreateChunk(StreamID(1), MID(102))); + .WillOnce(CreateChunk(OutgoingMessageId(0), StreamID(1), MID(100))) + .WillOnce(CreateChunk(OutgoingMessageId(1), StreamID(1), MID(101))) + .WillOnce(CreateChunk(OutgoingMessageId(2), StreamID(1), MID(102))); EXPECT_CALL(callback1, bytes_to_send_in_next_message) .WillOnce(Return(kPayloadSize)) // When making active .WillOnce(Return(kPayloadSize)) @@ -447,9 +465,9 @@ TEST(StreamSchedulerTest, WillDoWeightedFairQueuingSameSizeDifferentPriority) { StrictMock callback2; EXPECT_CALL(callback2, Produce) - .WillOnce(CreateChunk(StreamID(2), MID(200))) - .WillOnce(CreateChunk(StreamID(2), MID(201))) - .WillOnce(CreateChunk(StreamID(2), MID(202))); + .WillOnce(CreateChunk(OutgoingMessageId(3), StreamID(2), MID(200))) + .WillOnce(CreateChunk(OutgoingMessageId(4), StreamID(2), MID(201))) + .WillOnce(CreateChunk(OutgoingMessageId(5), StreamID(2), MID(202))); EXPECT_CALL(callback2, bytes_to_send_in_next_message) .WillOnce(Return(kPayloadSize)) // When making active .WillOnce(Return(kPayloadSize)) @@ -462,9 +480,9 @@ TEST(StreamSchedulerTest, WillDoWeightedFairQueuingSameSizeDifferentPriority) { StrictMock callback3; EXPECT_CALL(callback3, Produce) - .WillOnce(CreateChunk(StreamID(3), MID(300))) - .WillOnce(CreateChunk(StreamID(3), MID(301))) - .WillOnce(CreateChunk(StreamID(3), MID(302))); + .WillOnce(CreateChunk(OutgoingMessageId(6), StreamID(3), MID(300))) + .WillOnce(CreateChunk(OutgoingMessageId(7), StreamID(3), MID(301))) + .WillOnce(CreateChunk(OutgoingMessageId(8), StreamID(3), MID(302))); EXPECT_CALL(callback3, bytes_to_send_in_next_message) .WillOnce(Return(kPayloadSize)) // When making active .WillOnce(Return(kPayloadSize)) @@ -476,24 +494,24 @@ TEST(StreamSchedulerTest, WillDoWeightedFairQueuingSameSizeDifferentPriority) { stream3->MaybeMakeActive(); // t ~= 20 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(300))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(300))); // t ~= 40 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(301))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(301))); // t ~= 50 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(200))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(200))); // t ~= 60 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(302))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(302))); // t ~= 80 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(100))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(100))); // t ~= 100 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(201))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(201))); // t ~= 150 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(202))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(202))); // t ~= 160 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(101))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(101))); // t ~= 240 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(102))); - EXPECT_EQ(scheduler.Produce(TimeMs(0), kMtu), absl::nullopt); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(102))); + EXPECT_EQ(scheduler.Produce(kNow, kMtu), std::nullopt); } // Will do weighted fair queuing with three streams having different priority @@ -510,11 +528,14 @@ TEST(StreamSchedulerTest, WillDoWeightedFairQueuingDifferentSizeAndPriority) { StrictMock callback1; EXPECT_CALL(callback1, Produce) // virtual finish time ~ 0 + 50 * 80 = 4000 - .WillOnce(CreateChunk(StreamID(1), MID(100), kMediumPacket)) + .WillOnce(CreateChunk(OutgoingMessageId(0), StreamID(1), MID(100), + kMediumPacket)) // virtual finish time ~ 4000 + 20 * 80 = 5600 - .WillOnce(CreateChunk(StreamID(1), MID(101), kSmallPacket)) + .WillOnce(CreateChunk(OutgoingMessageId(1), StreamID(1), MID(101), + kSmallPacket)) // virtual finish time ~ 5600 + 70 * 80 = 11200 - .WillOnce(CreateChunk(StreamID(1), MID(102), kLargePacket)); + .WillOnce(CreateChunk(OutgoingMessageId(2), StreamID(1), MID(102), + kLargePacket)); EXPECT_CALL(callback1, bytes_to_send_in_next_message) .WillOnce(Return(kMediumPacket)) // When making active .WillOnce(Return(kSmallPacket)) @@ -528,11 +549,14 @@ TEST(StreamSchedulerTest, WillDoWeightedFairQueuingDifferentSizeAndPriority) { StrictMock callback2; EXPECT_CALL(callback2, Produce) // virtual finish time ~ 0 + 50 * 50 = 2500 - .WillOnce(CreateChunk(StreamID(2), MID(200), kMediumPacket)) + .WillOnce(CreateChunk(OutgoingMessageId(3), StreamID(2), MID(200), + kMediumPacket)) // virtual finish time ~ 2500 + 70 * 50 = 6000 - .WillOnce(CreateChunk(StreamID(2), MID(201), kLargePacket)) + .WillOnce(CreateChunk(OutgoingMessageId(4), StreamID(2), MID(201), + kLargePacket)) // virtual finish time ~ 6000 + 20 * 50 = 7000 - .WillOnce(CreateChunk(StreamID(2), MID(202), kSmallPacket)); + .WillOnce(CreateChunk(OutgoingMessageId(5), StreamID(2), MID(202), + kSmallPacket)); EXPECT_CALL(callback2, bytes_to_send_in_next_message) .WillOnce(Return(kMediumPacket)) // When making active .WillOnce(Return(kLargePacket)) @@ -546,11 +570,14 @@ TEST(StreamSchedulerTest, WillDoWeightedFairQueuingDifferentSizeAndPriority) { StrictMock callback3; EXPECT_CALL(callback3, Produce) // virtual finish time ~ 0 + 20 * 20 = 400 - .WillOnce(CreateChunk(StreamID(3), MID(300), kSmallPacket)) + .WillOnce(CreateChunk(OutgoingMessageId(6), StreamID(3), MID(300), + kSmallPacket)) // virtual finish time ~ 400 + 50 * 20 = 1400 - .WillOnce(CreateChunk(StreamID(3), MID(301), kMediumPacket)) + .WillOnce(CreateChunk(OutgoingMessageId(7), StreamID(3), MID(301), + kMediumPacket)) // virtual finish time ~ 1400 + 70 * 20 = 2800 - .WillOnce(CreateChunk(StreamID(3), MID(302), kLargePacket)); + .WillOnce(CreateChunk(OutgoingMessageId(8), StreamID(3), MID(302), + kLargePacket)); EXPECT_CALL(callback3, bytes_to_send_in_next_message) .WillOnce(Return(kSmallPacket)) // When making active .WillOnce(Return(kMediumPacket)) @@ -561,24 +588,24 @@ TEST(StreamSchedulerTest, WillDoWeightedFairQueuingDifferentSizeAndPriority) { stream3->MaybeMakeActive(); // t ~= 400 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(300))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(300))); // t ~= 1400 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(301))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(301))); // t ~= 2500 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(200))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(200))); // t ~= 2800 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(302))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(302))); // t ~= 4000 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(100))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(100))); // t ~= 5600 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(101))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(101))); // t ~= 6000 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(201))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(201))); // t ~= 7000 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(202))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(202))); // t ~= 11200 - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(102))); - EXPECT_EQ(scheduler.Produce(TimeMs(0), kMtu), absl::nullopt); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(102))); + EXPECT_EQ(scheduler.Produce(kNow, kMtu), std::nullopt); } TEST(StreamSchedulerTest, WillDistributeWFQPacketsInTwoStreamsByPriority) { // A simple test with two streams of different priority, but sending packets @@ -677,8 +704,8 @@ TEST(StreamSchedulerTest, SendLargeMessageWithSmallMtu) { StrictMock producer1; EXPECT_CALL(producer1, Produce) - .WillOnce(CreateChunk(StreamID(1), MID(0), 100)) - .WillOnce(CreateChunk(StreamID(1), MID(0), 100)); + .WillOnce(CreateChunk(OutgoingMessageId(0), StreamID(1), MID(0), 100)) + .WillOnce(CreateChunk(OutgoingMessageId(1), StreamID(1), MID(0), 100)); EXPECT_CALL(producer1, bytes_to_send_in_next_message) .WillOnce(Return(200)) // When making active .WillOnce(Return(100)) @@ -689,8 +716,8 @@ TEST(StreamSchedulerTest, SendLargeMessageWithSmallMtu) { StrictMock producer2; EXPECT_CALL(producer2, Produce) - .WillOnce(CreateChunk(StreamID(2), MID(1), 100)) - .WillOnce(CreateChunk(StreamID(2), MID(1), 50)); + .WillOnce(CreateChunk(OutgoingMessageId(2), StreamID(2), MID(1), 100)) + .WillOnce(CreateChunk(OutgoingMessageId(3), StreamID(2), MID(1), 50)); EXPECT_CALL(producer2, bytes_to_send_in_next_message) .WillOnce(Return(150)) // When making active .WillOnce(Return(50)) @@ -698,11 +725,11 @@ TEST(StreamSchedulerTest, SendLargeMessageWithSmallMtu) { auto stream2 = scheduler.CreateStream(&producer2, StreamID(2), StreamPriority(1)); stream2->MaybeMakeActive(); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(0))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(1))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(1))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(0))); - EXPECT_EQ(scheduler.Produce(TimeMs(0), kMtu), absl::nullopt); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(0))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(1))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(1))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(0))); + EXPECT_EQ(scheduler.Produce(kNow, kMtu), std::nullopt); } // Sending large messages with large MTU will not fragment messages and will @@ -714,7 +741,7 @@ TEST(StreamSchedulerTest, SendLargeMessageWithLargeMtu) { StrictMock producer1; EXPECT_CALL(producer1, Produce) - .WillOnce(CreateChunk(StreamID(1), MID(0), 200)); + .WillOnce(CreateChunk(OutgoingMessageId(0), StreamID(1), MID(0), 200)); EXPECT_CALL(producer1, bytes_to_send_in_next_message) .WillOnce(Return(200)) // When making active .WillOnce(Return(0)); @@ -724,16 +751,16 @@ TEST(StreamSchedulerTest, SendLargeMessageWithLargeMtu) { StrictMock producer2; EXPECT_CALL(producer2, Produce) - .WillOnce(CreateChunk(StreamID(2), MID(1), 150)); + .WillOnce(CreateChunk(OutgoingMessageId(1), StreamID(2), MID(1), 150)); EXPECT_CALL(producer2, bytes_to_send_in_next_message) .WillOnce(Return(150)) // When making active .WillOnce(Return(0)); auto stream2 = scheduler.CreateStream(&producer2, StreamID(2), StreamPriority(1)); stream2->MaybeMakeActive(); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(1))); - EXPECT_THAT(scheduler.Produce(TimeMs(0), kMtu), HasDataWithMid(MID(0))); - EXPECT_EQ(scheduler.Produce(TimeMs(0), kMtu), absl::nullopt); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(1))); + EXPECT_THAT(scheduler.Produce(kNow, kMtu), HasDataWithMid(MID(0))); + EXPECT_EQ(scheduler.Produce(kNow, kMtu), std::nullopt); } } // namespace diff --git a/p2p/BUILD.gn b/p2p/BUILD.gn index b8dc0b03ad..a34e893ef0 100644 --- a/p2p/BUILD.gn +++ b/p2p/BUILD.gn @@ -8,172 +8,979 @@ import("../webrtc.gni") -group("p2p") { +rtc_source_set("active_ice_controller_factory_interface") { + sources = [ "base/active_ice_controller_factory_interface.h" ] deps = [ - ":libstunprober", - ":rtc_p2p", + ":active_ice_controller_interface", + ":ice_agent_interface", + ":ice_controller_factory_interface", ] } -rtc_library("rtc_p2p") { - visibility = [ "*" ] +rtc_source_set("active_ice_controller_interface") { + sources = [ "base/active_ice_controller_interface.h" ] + deps = [ + ":connection", + ":ice_switch_reason", + ":ice_transport_internal", + ":transport_description", + "../api:array_view", + ] +} + +rtc_library("basic_async_resolver_factory") { sources = [ - "base/active_ice_controller_factory_interface.h", - "base/active_ice_controller_interface.h", - "base/async_stun_tcp_socket.cc", - "base/async_stun_tcp_socket.h", "base/basic_async_resolver_factory.cc", "base/basic_async_resolver_factory.h", + ] + deps = [ + "../api:async_dns_resolver", + "../rtc_base:async_dns_resolver", + "../rtc_base:logging", + "../rtc_base:socket_address", + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/memory", + ] +} + +rtc_library("async_stun_tcp_socket") { + sources = [ + "base/async_stun_tcp_socket.cc", + "base/async_stun_tcp_socket.h", + ] + deps = [ + "../api:array_view", + "../api/transport:stun_types", + "../api/units:timestamp", + "../rtc_base:async_packet_socket", + "../rtc_base:async_tcp_socket", + "../rtc_base:byte_order", + "../rtc_base:checks", + "../rtc_base:socket", + "../rtc_base:socket_address", + "../rtc_base:timeutils", + "../rtc_base/network:received_packet", + "../rtc_base/network:sent_packet", + ] +} + +rtc_library("basic_ice_controller") { + sources = [ "base/basic_ice_controller.cc", "base/basic_ice_controller.h", + ] + deps = [ + ":connection", + ":connection_info", + ":ice_controller_factory_interface", + ":ice_controller_interface", + ":ice_switch_reason", + ":ice_transport_internal", + ":p2p_constants", + ":transport_description", + "../api:array_view", + "../api:candidate", + "../api/transport:enums", + "../rtc_base:checks", + "../rtc_base:ip_address", + "../rtc_base:logging", + "../rtc_base:net_helper", + "../rtc_base:network", + "../rtc_base:network_constants", + "../rtc_base:timeutils", + "//third_party/abseil-cpp/absl/algorithm:container", + ] +} + +rtc_library("basic_packet_socket_factory") { + sources = [ "base/basic_packet_socket_factory.cc", "base/basic_packet_socket_factory.h", - "base/candidate_pair_interface.h", + ] + deps = [ + ":async_stun_tcp_socket", + "../api:async_dns_resolver", + "../api:packet_socket_factory", + "../rtc_base:async_dns_resolver", + "../rtc_base:async_packet_socket", + "../rtc_base:async_tcp_socket", + "../rtc_base:async_udp_socket", + "../rtc_base:checks", + "../rtc_base:logging", + "../rtc_base:socket", + "../rtc_base:socket_adapters", + "../rtc_base:socket_address", + "../rtc_base:socket_factory", + "../rtc_base:ssl", + "../rtc_base:ssl_adapter", + "../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/memory", + ] +} + +rtc_library("basic_port_allocator") { + sources = [ + "client/basic_port_allocator.cc", + "client/basic_port_allocator.h", + ] + deps = [ + ":port", + ":port_allocator", + ":port_interface", + ":relay_port_factory_interface", + ":stun_port", + ":tcp_port", + ":turn_port", + ":turn_port_factory", + "../api:candidate", + "../api:field_trials_view", + "../api:packet_socket_factory", + "../api:sequence_checker", + "../api:turn_customizer", + "../api/environment", + "../api/task_queue:pending_task_safety_flag", + "../api/transport:enums", + "../api/units:time_delta", + "../rtc_base:async_packet_socket", + "../rtc_base:checks", + "../rtc_base:crypto_random", + "../rtc_base:event_tracer", + "../rtc_base:ip_address", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:net_helper", + "../rtc_base:net_helpers", + "../rtc_base:network", + "../rtc_base:network_constants", + "../rtc_base:socket_address", + "../rtc_base:stringutils", + "../rtc_base:threading", + "../rtc_base/memory:always_valid_pointer", + "../rtc_base/network:received_packet", + "../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_source_set("candidate_pair_interface") { + sources = [ "base/candidate_pair_interface.h" ] + deps = [ "../api:candidate" ] +} + +rtc_library("connection") { + sources = [ "base/connection.cc", "base/connection.h", + ] + deps = [ + ":candidate_pair_interface", + ":connection_info", + ":dtls_stun_piggyback_controller", + ":ice_credentials_iterator", + ":p2p_constants", + ":p2p_transport_channel_ice_field_trials", + ":port_interface", + ":stun_request", + ":transport_description", + "../api:array_view", + "../api:candidate", + "../api:field_trials_view", + "../api:packet_socket_factory", + "../api:rtc_error", + "../api:sequence_checker", + "../api/task_queue", + "../api/transport:enums", + "../api/transport:field_trial_based_config", + "../api/transport:stun_types", + "../api/units:time_delta", + "../api/units:timestamp", + "../logging:ice_log", + "../rtc_base:async_packet_socket", + "../rtc_base:base64", + "../rtc_base:byte_buffer", + "../rtc_base:callback_list", + "../rtc_base:checks", + "../rtc_base:copy_on_write_buffer", + "../rtc_base:crc32", + "../rtc_base:crypto_random", + "../rtc_base:dscp", + "../rtc_base:event_tracer", + "../rtc_base:ip_address", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:mdns_responder_interface", + "../rtc_base:net_helper", + "../rtc_base:net_helpers", + "../rtc_base:network", + "../rtc_base:network_constants", + "../rtc_base:rate_tracker", + "../rtc_base:rtc_numerics", + "../rtc_base:safe_minmax", + "../rtc_base:socket", + "../rtc_base:socket_address", + "../rtc_base:stringutils", + "../rtc_base:threading", + "../rtc_base:timeutils", + "../rtc_base:weak_ptr", + "../rtc_base/memory:always_valid_pointer", + "../rtc_base/network:received_packet", + "../rtc_base/network:sent_packet", + "../rtc_base/system:rtc_export", + "../rtc_base/third_party/sigslot", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("connection_info") { + sources = [ "base/connection_info.cc", "base/connection_info.h", + ] + deps = [ + "../api:candidate", + "../api/units:timestamp", + ] +} + +rtc_library("default_ice_transport_factory") { + sources = [ "base/default_ice_transport_factory.cc", "base/default_ice_transport_factory.h", - "base/dtls_transport.cc", - "base/dtls_transport.h", - "base/dtls_transport_factory.h", - "base/dtls_transport_internal.cc", - "base/dtls_transport_internal.h", - "base/ice_agent_interface.h", - "base/ice_controller_factory_interface.h", + ] + deps = [ + ":basic_ice_controller", + ":ice_controller_factory_interface", + ":ice_controller_interface", + ":p2p_transport_channel", + "../api:ice_transport_interface", + "../api:make_ref_counted", + "../api:scoped_refptr", + "../api:sequence_checker", + "../rtc_base:macromagic", + "../rtc_base:threading", + ] +} + +rtc_library("dtls_transport") { + sources = [ + "dtls/dtls_transport.cc", + "dtls/dtls_transport.h", + ] + deps = [ + ":dtls_stun_piggyback_controller", + ":dtls_transport_internal", + ":dtls_utils", + ":ice_transport_internal", + ":packet_transport_internal", + "../api:array_view", + "../api:dtls_transport_interface", + "../api:rtc_error", + "../api:scoped_refptr", + "../api:sequence_checker", + "../api/crypto:options", + "../api/rtc_event_log", + "../api/task_queue:pending_task_safety_flag", + "../api/transport:ecn_marking", + "../api/transport:stun_types", + "../api/units:time_delta", + "../api/units:timestamp", + "../logging:ice_log", + "../rtc_base:async_packet_socket", + "../rtc_base:buffer", + "../rtc_base:buffer_queue", + "../rtc_base:checks", + "../rtc_base:dscp", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:network_route", + "../rtc_base:socket", + "../rtc_base:socket_address", + "../rtc_base:ssl", + "../rtc_base:ssl_adapter", + "../rtc_base:stream", + "../rtc_base:stringutils", + "../rtc_base:threading", + "../rtc_base:timeutils", + "../rtc_base/network:ecn_marking", + "../rtc_base/network:received_packet", + "../rtc_base/network:sent_packet", + "../rtc_base/system:no_unique_address", + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_source_set("dtls_transport_factory") { + sources = [ "dtls/dtls_transport_factory.h" ] + deps = [ + ":dtls_transport_internal", + ":ice_transport_internal", + "../api/crypto:options", + "../rtc_base:ssl_adapter", + ] +} + +rtc_library("dtls_transport_internal") { + sources = [ + "dtls/dtls_transport_internal.cc", + "dtls/dtls_transport_internal.h", + ] + deps = [ + ":ice_transport_internal", + ":packet_transport_internal", + "../api:dtls_transport_interface", + "../api:rtc_error", + "../api:scoped_refptr", + "../rtc_base:buffer", + "../rtc_base:callback_list", + "../rtc_base:ssl", + "../rtc_base:ssl_adapter", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_source_set("ice_agent_interface") { + sources = [ "base/ice_agent_interface.h" ] + deps = [ + ":connection", + ":ice_switch_reason", + ":transport_description", + "../api:array_view", + ] +} + +rtc_library("ice_controller_interface") { + sources = [ "base/ice_controller_interface.cc", "base/ice_controller_interface.h", + ] + deps = [ + ":connection", + ":ice_switch_reason", + ":ice_transport_internal", + ":p2p_transport_channel_ice_field_trials", + ":transport_description", + "../api:array_view", + "../rtc_base:checks", + "../rtc_base/system:rtc_export", + ] +} + +rtc_source_set("ice_controller_factory_interface") { + sources = [ "base/ice_controller_factory_interface.h" ] + deps = [ + ":connection", + ":ice_controller_interface", + ":ice_transport_internal", + ":transport_description", + ] +} + +rtc_library("ice_credentials_iterator") { + sources = [ "base/ice_credentials_iterator.cc", "base/ice_credentials_iterator.h", + ] + deps = [ + ":p2p_constants", + ":transport_description", + "../rtc_base:crypto_random", + ] +} + +rtc_library("ice_switch_reason") { + sources = [ "base/ice_switch_reason.cc", "base/ice_switch_reason.h", + ] + deps = [ "../rtc_base/system:rtc_export" ] +} + +rtc_library("ice_transport_internal") { + sources = [ "base/ice_transport_internal.cc", "base/ice_transport_internal.h", + ] + deps = [ + ":candidate_pair_interface", + ":connection", + ":connection_info", + ":dtls_stun_piggyback_controller", + ":p2p_constants", + ":packet_transport_internal", + ":port", + ":stun_dictionary", + ":transport_description", + "../api:array_view", + "../api:candidate", + "../api:field_trials_view", + "../api:libjingle_peerconnection_api", + "../api:rtc_error", + "../api/transport:enums", + "../api/transport:stun_types", + "../rtc_base:callback_list", + "../rtc_base:checks", + "../rtc_base:net_helper", + "../rtc_base:network_constants", + "../rtc_base:timeutils", + "../rtc_base/network:received_packet", + "../rtc_base/system:rtc_export", + "../rtc_base/third_party/sigslot", + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("p2p_constants") { + sources = [ "base/p2p_constants.cc", "base/p2p_constants.h", + ] + deps = [ "../rtc_base/system:rtc_export" ] +} + +rtc_library("p2p_transport_channel") { + sources = [ "base/p2p_transport_channel.cc", "base/p2p_transport_channel.h", - "base/p2p_transport_channel_ice_field_trials.h", + ] + deps = [ + ":active_ice_controller_factory_interface", + ":active_ice_controller_interface", + ":basic_ice_controller", + ":candidate_pair_interface", + ":connection", + ":connection_info", + ":dtls_stun_piggyback_controller", + ":ice_agent_interface", + ":ice_controller_factory_interface", + ":ice_controller_interface", + ":ice_switch_reason", + ":ice_transport_internal", + ":p2p_constants", + ":p2p_transport_channel_ice_field_trials", + ":port", + ":port_allocator", + ":port_interface", + ":regathering_controller", + ":stun_dictionary", + ":transport_description", + ":wrapping_active_ice_controller", + "../api:array_view", + "../api:async_dns_resolver", + "../api:candidate", + "../api:field_trials_view", + "../api:ice_transport_interface", + "../api:rtc_error", + "../api:sequence_checker", + "../api/task_queue:pending_task_safety_flag", + "../api/transport:enums", + "../api/transport:stun_types", + "../api/units:time_delta", + "../logging:ice_log", + "../rtc_base:async_packet_socket", + "../rtc_base:checks", + "../rtc_base:copy_on_write_buffer", + "../rtc_base:dscp", + "../rtc_base:event_tracer", + "../rtc_base:ip_address", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:net_helper", + "../rtc_base:net_helpers", + "../rtc_base:network", + "../rtc_base:network_constants", + "../rtc_base:network_route", + "../rtc_base:socket", + "../rtc_base:socket_address", + "../rtc_base:stringutils", + "../rtc_base:threading", + "../rtc_base:timeutils", + "../rtc_base/experiments:field_trial_parser", + "../rtc_base/network:received_packet", + "../rtc_base/network:sent_packet", + "../rtc_base/system:rtc_export", + "../system_wrappers:metrics", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_source_set("p2p_transport_channel_ice_field_trials") { + sources = [ "base/p2p_transport_channel_ice_field_trials.h" ] +} + +rtc_library("packet_transport_internal") { + sources = [ "base/packet_transport_internal.cc", "base/packet_transport_internal.h", + ] + deps = [ + ":connection", + ":port", + "../api:sequence_checker", + "../rtc_base:async_packet_socket", + "../rtc_base:callback_list", + "../rtc_base:checks", + "../rtc_base:macromagic", + "../rtc_base:network_route", + "../rtc_base:socket", + "../rtc_base/network:received_packet", + "../rtc_base/system:rtc_export", + "../rtc_base/third_party/sigslot", + "//third_party/abseil-cpp/absl/functional:any_invocable", + ] +} + +rtc_library("port") { + sources = [ "base/port.cc", "base/port.h", + ] + deps = [ + ":candidate_pair_interface", + ":connection", + ":p2p_constants", + ":port_interface", + ":stun_request", + ":transport_description", + "../api:array_view", + "../api:candidate", + "../api:field_trials_view", + "../api:packet_socket_factory", + "../api:rtc_error", + "../api:sequence_checker", + "../api/environment", + "../api/task_queue", + "../api/transport:stun_types", + "../api/units:time_delta", + "../rtc_base:async_packet_socket", + "../rtc_base:byte_buffer", + "../rtc_base:callback_list", + "../rtc_base:checks", + "../rtc_base:crypto_random", + "../rtc_base:dscp", + "../rtc_base:event_tracer", + "../rtc_base:ip_address", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:mdns_responder_interface", + "../rtc_base:net_helper", + "../rtc_base:net_helpers", + "../rtc_base:network", + "../rtc_base:socket_address", + "../rtc_base:stringutils", + "../rtc_base:timeutils", + "../rtc_base:weak_ptr", + "../rtc_base/network:received_packet", + "../rtc_base/network:sent_packet", + "../rtc_base/system:rtc_export", + "../rtc_base/third_party/sigslot", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("port_allocator") { + sources = [ "base/port_allocator.cc", "base/port_allocator.h", + ] + deps = [ + ":connection", + ":ice_credentials_iterator", + ":port", + ":port_interface", + ":transport_description", + "../api:candidate", + "../api:sequence_checker", + "../api/transport:enums", + "../rtc_base:checks", + "../rtc_base:crypto_random", + "../rtc_base:network", + "../rtc_base:socket_address", + "../rtc_base:ssl", + "../rtc_base:threading", + "../rtc_base/system:rtc_export", + "../rtc_base/third_party/sigslot", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("port_interface") { + sources = [ "base/port_interface.cc", "base/port_interface.h", + ] + deps = [ + ":transport_description", + "../api:candidate", + "../api:field_trials_view", + "../api:packet_socket_factory", + "../api/task_queue:task_queue", + "../rtc_base:async_packet_socket", + "../rtc_base:callback_list", + "../rtc_base:dscp", + "../rtc_base:network", + "../rtc_base:network", + "../rtc_base:socket", + "../rtc_base:socket_address", + "../rtc_base/network:sent_packet", + "../rtc_base/third_party/sigslot", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("pseudo_tcp") { + sources = [ "base/pseudo_tcp.cc", "base/pseudo_tcp.h", + ] + deps = [ + "../api:array_view", + "../rtc_base:byte_buffer", + "../rtc_base:byte_order", + "../rtc_base:checks", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:safe_minmax", + "../rtc_base:socket", + "../rtc_base:timeutils", + "../rtc_base/synchronization:mutex", + "../rtc_base/system:rtc_export", + ] +} + +rtc_library("regathering_controller") { + sources = [ "base/regathering_controller.cc", "base/regathering_controller.h", + ] + deps = [ + ":connection", + ":ice_transport_internal", + ":p2p_constants", + ":packet_transport_internal", + ":port_allocator", + "../api:sequence_checker", + "../api/task_queue:pending_task_safety_flag", + "../api/units:time_delta", + "../rtc_base:checks", + "../rtc_base:network_route", + "../rtc_base:threading", + "../rtc_base/third_party/sigslot", + ] +} + +rtc_library("stun_dictionary") { + sources = [ "base/stun_dictionary.cc", "base/stun_dictionary.h", + ] + deps = [ + "../api:rtc_error", + "../api/transport:stun_types", + "../rtc_base:byte_buffer", + "../rtc_base:logging", + ] +} + +rtc_library("dtls_utils") { + sources = [ + "dtls/dtls_utils.cc", + "dtls/dtls_utils.h", + ] + deps = [ + "../api:array_view", + "../rtc_base:buffer", + "../rtc_base:byte_buffer", + "../rtc_base:checks", + "../rtc_base:crc32", + "//third_party/abseil-cpp/absl/container:flat_hash_set", + ] +} + +rtc_library("dtls_stun_piggyback_controller") { + sources = [ + "dtls/dtls_stun_piggyback_callbacks.h", + "dtls/dtls_stun_piggyback_controller.cc", + "dtls/dtls_stun_piggyback_controller.h", + ] + deps = [ + ":dtls_utils", + "../api:array_view", + "../api:sequence_checker", + "../api/transport:stun_types", + "../api/transport:stun_types", + "../rtc_base:buffer", + "../rtc_base:byte_buffer", + "../rtc_base:checks", + "../rtc_base:logging", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:stringutils", + "../rtc_base/system:no_unique_address", + "//third_party/abseil-cpp/absl/container:flat_hash_set", + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:str_format", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("stun_port") { + sources = [ "base/stun_port.cc", "base/stun_port.h", + ] + deps = [ + ":connection", + ":p2p_constants", + ":port", + ":port_interface", + ":stun_request", + "../api:async_dns_resolver", + "../api:candidate", + "../api:field_trials_view", + "../api:packet_socket_factory", + "../api/transport:stun_types", + "../rtc_base:async_packet_socket", + "../rtc_base:checks", + "../rtc_base:dscp", + "../rtc_base:ip_address", + "../rtc_base:logging", + "../rtc_base:net_helper", + "../rtc_base:network", + "../rtc_base:network_constants", + "../rtc_base:socket", + "../rtc_base:socket_address", + "../rtc_base:stringutils", + "../rtc_base:timeutils", + "../rtc_base/network:received_packet", + "../rtc_base/network:sent_packet", + "../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("stun_request") { + sources = [ "base/stun_request.cc", "base/stun_request.h", + ] + deps = [ + "../api:array_view", + "../api:sequence_checker", + "../api/task_queue", + "../api/task_queue:pending_task_safety_flag", + "../api/transport:stun_types", + "../api/units:time_delta", + "../rtc_base:byte_buffer", + "../rtc_base:checks", + "../rtc_base:crypto_random", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:stringutils", + "../rtc_base:timeutils", + "//third_party/abseil-cpp/absl/memory", + ] +} + +rtc_library("tcp_port") { + sources = [ "base/tcp_port.cc", "base/tcp_port.h", + ] + deps = [ + ":connection", + ":connection_info", + ":p2p_constants", + ":port", + ":port_interface", + ":stun_request", + "../api:candidate", + "../api:packet_socket_factory", + "../api:sequence_checker", + "../api/task_queue:pending_task_safety_flag", + "../api/transport:stun_types", + "../api/units:time_delta", + "../rtc_base:async_packet_socket", + "../rtc_base:checks", + "../rtc_base:ip_address", + "../rtc_base:logging", + "../rtc_base:net_helper", + "../rtc_base:rate_tracker", + "../rtc_base:socket", + "../rtc_base:socket_address", + "../rtc_base:timeutils", + "../rtc_base:weak_ptr", + "../rtc_base/containers:flat_map", + "../rtc_base/network:received_packet", + "../rtc_base/network:sent_packet", + "../rtc_base/third_party/sigslot", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("transport_description") { + sources = [ "base/transport_description.cc", "base/transport_description.h", + ] + deps = [ + ":p2p_constants", + "../api:rtc_error", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:ssl", + "../rtc_base:stringutils", + "../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("transport_description_factory") { + sources = [ "base/transport_description_factory.cc", "base/transport_description_factory.h", - "base/transport_info.h", + ] + deps = [ + ":ice_credentials_iterator", + ":transport_description", + "../api:field_trials_view", + "../api:scoped_refptr", + "../rtc_base:checks", + "../rtc_base:logging", + "../rtc_base:ssl", + ] +} + +rtc_source_set("transport_info") { + sources = [ "base/transport_info.h" ] + deps = [ + ":p2p_constants", + ":transport_description", + "../api:candidate", + "../rtc_base:ssl", + ] +} + +rtc_library("turn_port") { + sources = [ "base/turn_port.cc", "base/turn_port.h", - "base/udp_port.h", - "base/wrapping_active_ice_controller.cc", - "base/wrapping_active_ice_controller.h", - "client/basic_port_allocator.cc", - "client/basic_port_allocator.h", - "client/relay_port_factory_interface.h", - "client/turn_port_factory.cc", - "client/turn_port_factory.h", ] - deps = [ + ":connection", + ":p2p_constants", + ":port", + ":port_allocator", + ":port_interface", + ":relay_port_factory_interface", + ":stun_request", "../api:array_view", "../api:async_dns_resolver", "../api:candidate", - "../api:dtls_transport_interface", - "../api:field_trials_view", - "../api:ice_transport_interface", - "../api:make_ref_counted", "../api:packet_socket_factory", - "../api:rtc_error", "../api:scoped_refptr", - "../api:sequence_checker", "../api:turn_customizer", - "../api:wrapping_async_dns_resolver", - "../api/crypto:options", - "../api/rtc_event_log", "../api/task_queue", - "../api/transport:enums", - "../api/transport:field_trial_based_config", + "../api/task_queue:pending_task_safety_flag", "../api/transport:stun_types", "../api/units:time_delta", - "../api/units:timestamp", - "../logging:ice_log", - "../rtc_base:async_dns_resolver", "../rtc_base:async_packet_socket", - "../rtc_base:async_resolver_interface", - "../rtc_base:async_tcp_socket", - "../rtc_base:async_udp_socket", - "../rtc_base:buffer", - "../rtc_base:buffer_queue", "../rtc_base:byte_buffer", "../rtc_base:byte_order", "../rtc_base:callback_list", "../rtc_base:checks", - "../rtc_base:crc32", "../rtc_base:dscp", - "../rtc_base:event_tracer", "../rtc_base:ip_address", "../rtc_base:logging", - "../rtc_base:macromagic", - "../rtc_base:mdns_responder_interface", "../rtc_base:net_helper", - "../rtc_base:net_helpers", "../rtc_base:network", - "../rtc_base:network_constants", - "../rtc_base:network_route", - "../rtc_base:proxy_info", - "../rtc_base:rate_tracker", - "../rtc_base:refcount", - "../rtc_base:rtc_numerics", "../rtc_base:socket", - "../rtc_base:socket_adapters", "../rtc_base:socket_address", - "../rtc_base:socket_factory", - "../rtc_base:socket_server", "../rtc_base:ssl", - "../rtc_base:stream", "../rtc_base:stringutils", - "../rtc_base:threading", - "../rtc_base:timeutils", - "../rtc_base/containers:flat_map", - "../rtc_base/experiments:field_trial_parser", - "../rtc_base/memory:always_valid_pointer", - "../rtc_base/system:no_unique_address", - - # Needed by pseudo_tcp, which should move to a separate target. - "../api/task_queue:pending_task_safety_flag", - "../rtc_base:safe_minmax", - "../rtc_base:weak_ptr", + "../rtc_base/network:received_packet", "../rtc_base/network:sent_packet", - "../rtc_base/synchronization:mutex", - "../rtc_base/system:rtc_export", - "../rtc_base/third_party/base64", "../rtc_base/third_party/sigslot", - "../system_wrappers:metrics", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("turn_port_factory") { + sources = [ + "client/turn_port_factory.cc", + "client/turn_port_factory.h", + ] + deps = [ + ":port", + ":port_allocator", + ":relay_port_factory_interface", + ":turn_port", + "../rtc_base:async_packet_socket", + ] +} + +rtc_source_set("relay_port_factory_interface") { + sources = [ "client/relay_port_factory_interface.h" ] + deps = [ + ":port", + ":port_allocator", + "../api:packet_socket_factory", + "../api/environment", + "../rtc_base:async_packet_socket", + "../rtc_base:network", + "../rtc_base:threading", + ] +} + +rtc_library("wrapping_active_ice_controller") { + sources = [ + "base/wrapping_active_ice_controller.cc", + "base/wrapping_active_ice_controller.h", + ] + deps = [ + ":active_ice_controller_interface", + ":basic_ice_controller", + ":connection", + ":ice_agent_interface", + ":ice_controller_factory_interface", + ":ice_controller_interface", + ":ice_switch_reason", + ":ice_transport_internal", + ":transport_description", + "../api:sequence_checker", + "../api/task_queue:pending_task_safety_flag", + "../api/units:time_delta", + "../rtc_base:checks", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:threading", + "../rtc_base:timeutils", ] } @@ -181,84 +988,165 @@ if (rtc_include_tests) { rtc_library("fake_ice_transport") { testonly = true visibility = [ "*" ] - sources = [ "base/fake_ice_transport.h" ] + sources = [ + "base/fake_ice_transport.h", + "test/fake_ice_transport.h", + ] deps = [ - ":rtc_p2p", + ":candidate_pair_interface", + ":connection", + ":connection_info", + ":dtls_stun_piggyback_controller", + ":ice_transport_internal", + ":port", + ":transport_description", + "../api:array_view", + "../api:candidate", "../api:ice_transport_interface", "../api:libjingle_peerconnection_api", + "../api:sequence_checker", "../api/task_queue:pending_task_safety_flag", + "../api/transport:enums", + "../api/transport:stun_types", "../api/units:time_delta", + "../rtc_base:async_packet_socket", + "../rtc_base:byte_buffer", + "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:network_route", + "../rtc_base:socket", "../rtc_base:task_queue_for_test", - ] - absl_deps = [ + "../rtc_base:threading", + "../rtc_base:timeutils", + "../rtc_base/network:received_packet", + "../rtc_base/network:sent_packet", + "../test:explicit_key_value_config", "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/strings:string_view", ] } rtc_library("fake_port_allocator") { testonly = true visibility = [ "*" ] - sources = [ "base/fake_port_allocator.h" ] + sources = [ + "base/fake_port_allocator.h", + "test/fake_port_allocator.h", + ] deps = [ - ":rtc_p2p", - "../rtc_base:net_helpers", + ":basic_packet_socket_factory", + ":port", + ":port_allocator", + ":port_interface", + ":stun_port", + "../api:candidate", + "../api:packet_socket_factory", + "../api/environment", + "../api/task_queue", + "../rtc_base:async_packet_socket", + "../rtc_base:checks", + "../rtc_base:ip_address", "../rtc_base:net_test_helpers", + "../rtc_base:network", + "../rtc_base:socket_factory", "../rtc_base:task_queue_for_test", - "../rtc_base:threading", - "../rtc_base/memory:always_valid_pointer", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("p2p_test_utils") { testonly = true sources = [ - "base/fake_dtls_transport.h", - "base/fake_packet_transport.h", - "base/mock_active_ice_controller.h", - "base/mock_async_resolver.h", - "base/mock_dns_resolving_packet_socket_factory.h", "base/mock_ice_agent.h", "base/mock_ice_controller.h", "base/mock_ice_transport.h", - "base/test_stun_server.cc", - "base/test_stun_server.h", - "base/test_turn_customizer.h", - "base/test_turn_server.h", + "dtls/fake_dtls_transport.h", + "test/fake_packet_transport.h", + "test/mock_active_ice_controller.h", + "test/mock_dns_resolving_packet_socket_factory.h", + "test/mock_ice_agent.h", + "test/mock_ice_controller.h", + "test/mock_ice_transport.h", + "test/nat_server.cc", + "test/nat_server.h", + "test/nat_socket_factory.cc", + "test/nat_socket_factory.h", + "test/nat_types.cc", + "test/nat_types.h", + "test/test_stun_server.cc", + "test/test_stun_server.h", + "test/test_turn_customizer.h", + "test/test_turn_server.h", ] deps = [ + ":active_ice_controller_factory_interface", + ":active_ice_controller_interface", + ":basic_packet_socket_factory", + ":candidate_pair_interface", + ":connection", + ":dtls_transport_internal", ":fake_ice_transport", ":fake_port_allocator", + ":ice_agent_interface", + ":ice_controller_factory_interface", + ":ice_controller_interface", + ":ice_switch_reason", + ":ice_transport_internal", ":p2p_server_utils", - ":rtc_p2p", + ":packet_transport_internal", + ":port_interface", + ":transport_description", + "../api:array_view", + "../api:async_dns_resolver", + "../api:candidate", "../api:dtls_transport_interface", "../api:libjingle_peerconnection_api", "../api:mock_async_dns_resolver", "../api:packet_socket_factory", + "../api:rtc_error", + "../api:scoped_refptr", "../api:sequence_checker", "../api:turn_customizer", "../api/crypto:options", + "../api/transport:ecn_marking", + "../api/transport:enums", "../api/transport:stun_types", - "../rtc_base:async_resolver_interface", + "../api/units:time_delta", + "../api/units:timestamp", + "../rtc_base:async_packet_socket", + "../rtc_base:async_udp_socket", "../rtc_base:async_udp_socket", + "../rtc_base:buffer", + "../rtc_base:byte_order", + "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", "../rtc_base:gunit_helpers", + "../rtc_base:ip_address", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:net_helpers", + "../rtc_base:network_route", "../rtc_base:rtc_base_tests_utils", "../rtc_base:socket", + "../rtc_base:socket_adapters", "../rtc_base:socket_address", + "../rtc_base:socket_address_pair", + "../rtc_base:socket_factory", "../rtc_base:socket_server", "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:threading", + "../rtc_base:timeutils", + "../rtc_base/network:received_packet", + "../rtc_base/synchronization:mutex", "../rtc_base/third_party/sigslot", "../test:test_support", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -267,10 +1155,9 @@ if (rtc_include_tests) { sources = [ "base/async_stun_tcp_socket_unittest.cc", - "base/basic_async_resolver_factory_unittest.cc", - "base/dtls_transport_unittest.cc", "base/ice_credentials_iterator_unittest.cc", "base/p2p_transport_channel_unittest.cc", + "base/packet_transport_internal_unittest.cc", "base/port_allocator_unittest.cc", "base/port_unittest.cc", "base/pseudo_tcp_unittest.cc", @@ -278,37 +1165,93 @@ if (rtc_include_tests) { "base/stun_dictionary_unittest.cc", "base/stun_port_unittest.cc", "base/stun_request_unittest.cc", - "base/stun_server_unittest.cc", "base/tcp_port_unittest.cc", "base/transport_description_factory_unittest.cc", "base/transport_description_unittest.cc", "base/turn_port_unittest.cc", - "base/turn_server_unittest.cc", "base/wrapping_active_ice_controller_unittest.cc", "client/basic_port_allocator_unittest.cc", + "dtls/dtls_ice_integrationtest.cc", + "dtls/dtls_stun_piggyback_controller_unittest.cc", + "dtls/dtls_transport_unittest.cc", + "dtls/dtls_utils_unittest.cc", + "test/nat_unittest.cc", + "test/stun_server_unittest.cc", + "test/turn_server_unittest.cc", ] deps = [ + ":async_stun_tcp_socket", + ":basic_ice_controller", + ":basic_packet_socket_factory", + ":basic_port_allocator", + ":candidate_pair_interface", + ":connection", + ":connection_info", + ":dtls_stun_piggyback_controller", + ":dtls_transport", + ":dtls_transport_internal", + ":dtls_utils", ":fake_ice_transport", ":fake_port_allocator", + ":ice_controller_factory_interface", + ":ice_controller_interface", + ":ice_credentials_iterator", + ":ice_switch_reason", + ":ice_transport_internal", + ":p2p_constants", ":p2p_server_utils", ":p2p_test_utils", - ":rtc_p2p", + ":p2p_transport_channel", + ":p2p_transport_channel_ice_field_trials", + ":packet_transport_internal", + ":port", + ":port_allocator", + ":port_interface", + ":pseudo_tcp", + ":regathering_controller", + ":relay_port_factory_interface", + ":stun_dictionary", + ":stun_port", + ":stun_request", + ":tcp_port", + ":transport_description", + ":transport_description_factory", + ":turn_port", + ":wrapping_active_ice_controller", + "../api:array_view", + "../api:async_dns_resolver", "../api:candidate", + "../api:create_network_emulation_manager", "../api:dtls_transport_interface", + "../api:field_trials", "../api:field_trials_view", - "../api:libjingle_peerconnection_api", + "../api:ice_transport_interface", "../api:mock_async_dns_resolver", + "../api:network_emulation_manager_api", "../api:packet_socket_factory", + "../api:rtc_error", + "../api:rtc_error_matchers", "../api:scoped_refptr", + "../api:simulated_network_api", + "../api/crypto:options", + "../api/environment", + "../api/environment:environment_factory", "../api/task_queue", "../api/task_queue:pending_task_safety_flag", + "../api/test/network_emulation", + "../api/transport:enums", "../api/transport:stun_types", + "../api/units:data_rate", "../api/units:time_delta", "../rtc_base:async_packet_socket", + "../rtc_base:async_tcp_socket", + "../rtc_base:async_udp_socket", "../rtc_base:buffer", "../rtc_base:byte_buffer", + "../rtc_base:byte_order", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", + "../rtc_base:crypto_random", "../rtc_base:dscp", "../rtc_base:gunit_helpers", "../rtc_base:ip_address", @@ -320,30 +1263,35 @@ if (rtc_include_tests) { "../rtc_base:net_test_helpers", "../rtc_base:network", "../rtc_base:network_constants", - "../rtc_base:proxy_info", + "../rtc_base:network_route", "../rtc_base:rtc_base_tests_utils", + "../rtc_base:rtc_event", "../rtc_base:socket", "../rtc_base:socket_adapters", "../rtc_base:socket_address", - "../rtc_base:socket_address_pair", + "../rtc_base:socket_factory", + "../rtc_base:socket_server", "../rtc_base:ssl", - "../rtc_base:stringutils", + "../rtc_base:ssl_adapter", + "../rtc_base:stream", "../rtc_base:testclient", "../rtc_base:threading", "../rtc_base:timeutils", + "../rtc_base/network:ecn_marking", + "../rtc_base/network:received_packet", "../rtc_base/network:sent_packet", "../rtc_base/third_party/sigslot", "../system_wrappers:metrics", - "../test:rtc_expect_death", + "../test:explicit_key_value_config", "../test:scoped_key_value_config", "../test:test_support", - "//testing/gtest", - ] - absl_deps = [ + "../test:wait_until", "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/container:flat_hash_set", + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } } @@ -351,13 +1299,14 @@ if (rtc_include_tests) { rtc_library("p2p_server_utils") { testonly = true sources = [ - "base/stun_server.cc", - "base/stun_server.h", - "base/turn_server.cc", - "base/turn_server.h", + "test/stun_server.cc", + "test/stun_server.h", + "test/turn_server.cc", + "test/turn_server.h", ] deps = [ - ":rtc_p2p", + ":async_stun_tcp_socket", + ":port_interface", "../api:array_view", "../api:packet_socket_factory", "../api:sequence_checker", @@ -368,67 +1317,25 @@ rtc_library("p2p_server_utils") { "../rtc_base:async_packet_socket", "../rtc_base:async_udp_socket", "../rtc_base:byte_buffer", + "../rtc_base:byte_order", "../rtc_base:checks", + "../rtc_base:crypto_random", + "../rtc_base:digest", + "../rtc_base:ip_address", "../rtc_base:logging", + "../rtc_base:macromagic", "../rtc_base:rtc_base_tests_utils", + "../rtc_base:socket", "../rtc_base:socket_adapters", "../rtc_base:socket_address", "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:stringutils", + "../rtc_base:timeutils", + "../rtc_base/network:received_packet", "../rtc_base/third_party/sigslot", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - ] -} - -rtc_library("libstunprober") { - visibility = [ "*" ] - sources = [ - "stunprober/stun_prober.cc", - "stunprober/stun_prober.h", - ] - - deps = [ - ":rtc_p2p", - "../api:packet_socket_factory", - "../api:sequence_checker", - "../api/task_queue:pending_task_safety_flag", - "../api/transport:stun_types", - "../api/units:time_delta", - "../rtc_base:async_packet_socket", - "../rtc_base:async_resolver_interface", - "../rtc_base:byte_buffer", - "../rtc_base:checks", - "../rtc_base:ip_address", - "../rtc_base:logging", - "../rtc_base:network", - "../rtc_base:socket_address", - "../rtc_base:ssl", - "../rtc_base:threading", - "../rtc_base:timeutils", - "../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", ] } - -if (rtc_include_tests) { - rtc_library("libstunprober_unittests") { - testonly = true - - sources = [ "stunprober/stun_prober_unittest.cc" ] - deps = [ - ":libstunprober", - ":p2p_test_utils", - ":rtc_p2p", - "../rtc_base:checks", - "../rtc_base:gunit_helpers", - "../rtc_base:ip_address", - "../rtc_base:rtc_base_tests_utils", - "../rtc_base:ssl", - "../test:test_support", - "//testing/gtest", - ] - } -} diff --git a/p2p/DEPS b/p2p/DEPS index 8243179d40..291d05983a 100644 --- a/p2p/DEPS +++ b/p2p/DEPS @@ -2,4 +2,5 @@ include_rules = [ "+logging", "+net", "+system_wrappers", + "+absl/functional/any_invocable.h", ] diff --git a/p2p/base/active_ice_controller_factory_interface.h b/p2p/base/active_ice_controller_factory_interface.h index 6a47f2253f..8b8246f487 100644 --- a/p2p/base/active_ice_controller_factory_interface.h +++ b/p2p/base/active_ice_controller_factory_interface.h @@ -17,7 +17,7 @@ #include "p2p/base/ice_agent_interface.h" #include "p2p/base/ice_controller_factory_interface.h" -namespace cricket { +namespace webrtc { // An active ICE controller may be constructed with the same arguments as a // legacy ICE controller. Additionally, an ICE agent must be provided for the @@ -34,6 +34,15 @@ class ActiveIceControllerFactoryInterface { const ActiveIceControllerFactoryArgs&) = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::ActiveIceControllerFactoryArgs; +using ::webrtc::ActiveIceControllerFactoryInterface; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_ACTIVE_ICE_CONTROLLER_FACTORY_INTERFACE_H_ diff --git a/p2p/base/active_ice_controller_interface.h b/p2p/base/active_ice_controller_interface.h index e54838ee64..252cdc4642 100644 --- a/p2p/base/active_ice_controller_interface.h +++ b/p2p/base/active_ice_controller_interface.h @@ -11,14 +11,13 @@ #ifndef P2P_BASE_ACTIVE_ICE_CONTROLLER_INTERFACE_H_ #define P2P_BASE_ACTIVE_ICE_CONTROLLER_INTERFACE_H_ -#include "absl/types/optional.h" -#include "api/array_view.h" + #include "p2p/base/connection.h" #include "p2p/base/ice_switch_reason.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/transport_description.h" -namespace cricket { +namespace webrtc { // ActiveIceControllerInterface defines the methods for a module that actively // manages the connection used by an ICE transport. @@ -79,6 +78,14 @@ class ActiveIceControllerInterface { virtual const Connection* FindNextPingableConnection() = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::ActiveIceControllerInterface; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_ACTIVE_ICE_CONTROLLER_INTERFACE_H_ diff --git a/p2p/base/async_stun_tcp_socket.cc b/p2p/base/async_stun_tcp_socket.cc index 4a35903dfe..8eb8c3c1cf 100644 --- a/p2p/base/async_stun_tcp_socket.cc +++ b/p2p/base/async_stun_tcp_socket.cc @@ -14,13 +14,23 @@ #include #include +#include +#include + +#include "api/array_view.h" #include "api/transport/stun.h" +#include "api/units/timestamp.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/async_tcp_socket.h" #include "rtc_base/byte_order.h" #include "rtc_base/checks.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/network/sent_packet.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/time_utils.h" -namespace cricket { +namespace webrtc { static const size_t kMaxPacketSize = 64 * 1024; @@ -40,19 +50,19 @@ inline bool IsStunMessage(uint16_t msg_type) { // it. Takes ownership of `socket`. Returns NULL if bind() or // connect() fail (`socket` is destroyed in that case). AsyncStunTCPSocket* AsyncStunTCPSocket::Create( - rtc::Socket* socket, - const rtc::SocketAddress& bind_address, - const rtc::SocketAddress& remote_address) { + Socket* socket, + const SocketAddress& bind_address, + const SocketAddress& remote_address) { return new AsyncStunTCPSocket( AsyncTCPSocketBase::ConnectSocket(socket, bind_address, remote_address)); } -AsyncStunTCPSocket::AsyncStunTCPSocket(rtc::Socket* socket) - : rtc::AsyncTCPSocketBase(socket, kBufSize) {} +AsyncStunTCPSocket::AsyncStunTCPSocket(Socket* socket) + : AsyncTCPSocketBase(socket, kBufSize) {} int AsyncStunTCPSocket::Send(const void* pv, size_t cb, - const rtc::PacketOptions& options) { + const AsyncSocketPacketOptions& options) { if (cb > kBufSize || cb < kPacketLenSize + kPacketLenOffset) { SetError(EMSGSIZE); return -1; @@ -82,15 +92,15 @@ int AsyncStunTCPSocket::Send(const void* pv, return res; } - rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis()); + SentPacketInfo sent_packet(options.packet_id, TimeMillis()); SignalSentPacket(this, sent_packet); // We claim to have sent the whole thing, even if we only sent partial return static_cast(cb); } -void AsyncStunTCPSocket::ProcessInput(char* data, size_t* len) { - rtc::SocketAddress remote_addr(GetRemoteAddress()); +size_t AsyncStunTCPSocket::ProcessInput(ArrayView data) { + SocketAddress remote_addr(GetRemoteAddress()); // STUN packet - First 4 bytes. Total header size is 20 bytes. // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // |0 0| STUN Message Type | Message Length | @@ -101,37 +111,38 @@ void AsyncStunTCPSocket::ProcessInput(char* data, size_t* len) { // | Channel Number | Length | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + size_t processed_bytes = 0; while (true) { + size_t bytes_left = data.size() - processed_bytes; // We need at least 4 bytes to read the STUN or ChannelData packet length. - if (*len < kPacketLenOffset + kPacketLenSize) - return; + if (bytes_left < kPacketLenOffset + kPacketLenSize) + return processed_bytes; int pad_bytes; - size_t expected_pkt_len = GetExpectedLength(data, *len, &pad_bytes); + size_t expected_pkt_len = GetExpectedLength(data.data() + processed_bytes, + bytes_left, &pad_bytes); size_t actual_length = expected_pkt_len + pad_bytes; - if (*len < actual_length) { - return; + if (bytes_left < actual_length) { + return processed_bytes; } - SignalReadPacket(this, data, expected_pkt_len, remote_addr, - rtc::TimeMicros()); - - *len -= actual_length; - if (*len > 0) { - memmove(data, data + actual_length, *len); - } + ReceivedIpPacket received_packet( + data.subview(processed_bytes, expected_pkt_len), remote_addr, + Timestamp::Micros(TimeMicros())); + NotifyPacketReceived(received_packet); + processed_bytes += actual_length; } } size_t AsyncStunTCPSocket::GetExpectedLength(const void* data, - size_t len, + size_t /* len */, int* pad_bytes) { *pad_bytes = 0; PacketLength pkt_len = - rtc::GetBE16(static_cast(data) + kPacketLenOffset); + webrtc::GetBE16(static_cast(data) + kPacketLenOffset); size_t expected_pkt_len; - uint16_t msg_type = rtc::GetBE16(data); + uint16_t msg_type = webrtc::GetBE16(data); if (IsStunMessage(msg_type)) { // STUN message. expected_pkt_len = kStunHeaderSize + pkt_len; @@ -152,4 +163,4 @@ size_t AsyncStunTCPSocket::GetExpectedLength(const void* data, return expected_pkt_len; } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/async_stun_tcp_socket.h b/p2p/base/async_stun_tcp_socket.h index f0df42b52a..d4146d6e54 100644 --- a/p2p/base/async_stun_tcp_socket.h +++ b/p2p/base/async_stun_tcp_socket.h @@ -13,31 +13,34 @@ #include +#include + +#include "api/array_view.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/async_tcp_socket.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" -namespace cricket { +namespace webrtc { -class AsyncStunTCPSocket : public rtc::AsyncTCPSocketBase { +class AsyncStunTCPSocket : public AsyncTCPSocketBase { public: // Binds and connects `socket` and creates AsyncTCPSocket for // it. Takes ownership of `socket`. Returns NULL if bind() or // connect() fail (`socket` is destroyed in that case). - static AsyncStunTCPSocket* Create(rtc::Socket* socket, - const rtc::SocketAddress& bind_address, - const rtc::SocketAddress& remote_address); + static AsyncStunTCPSocket* Create(Socket* socket, + const SocketAddress& bind_address, + const SocketAddress& remote_address); - explicit AsyncStunTCPSocket(rtc::Socket* socket); + explicit AsyncStunTCPSocket(Socket* socket); AsyncStunTCPSocket(const AsyncStunTCPSocket&) = delete; AsyncStunTCPSocket& operator=(const AsyncStunTCPSocket&) = delete; int Send(const void* pv, size_t cb, - const rtc::PacketOptions& options) override; - void ProcessInput(char* data, size_t* len) override; + const AsyncSocketPacketOptions& options) override; + size_t ProcessInput(ArrayView data) override; private: // This method returns the message hdr + length written in the header. @@ -46,6 +49,14 @@ class AsyncStunTCPSocket : public rtc::AsyncTCPSocketBase { size_t GetExpectedLength(const void* data, size_t len, int* pad_bytes); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::AsyncStunTCPSocket; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_ASYNC_STUN_TCP_SOCKET_H_ diff --git a/p2p/base/async_stun_tcp_socket_unittest.cc b/p2p/base/async_stun_tcp_socket_unittest.cc index 72d6a7fde0..2730dcd1f7 100644 --- a/p2p/base/async_stun_tcp_socket_unittest.cc +++ b/p2p/base/async_stun_tcp_socket_unittest.cc @@ -19,14 +19,20 @@ #include #include "absl/memory/memory.h" +#include "api/array_view.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/async_tcp_socket.h" +#include "rtc_base/buffer.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/virtual_socket_server.h" #include "test/gtest.h" -namespace cricket { +namespace webrtc { static unsigned char kStunMessageWithZeroLength[] = { 0x00, 0x01, 0x00, 0x00, // length of 0 (last 2 bytes) @@ -57,14 +63,14 @@ static unsigned char kTurnChannelDataMessageWithOddLength[] = { 0x40, 0x00, 0x00, 0x05, 0x21, 0x12, 0xA4, 0x42, '0', }; -static const rtc::SocketAddress kClientAddr("11.11.11.11", 0); -static const rtc::SocketAddress kServerAddr("22.22.22.22", 0); +static const SocketAddress kClientAddr("11.11.11.11", 0); +static const SocketAddress kServerAddr("22.22.22.22", 0); -class AsyncStunServerTCPSocket : public rtc::AsyncTcpListenSocket { +class AsyncStunServerTCPSocket : public AsyncTcpListenSocket { public: - explicit AsyncStunServerTCPSocket(std::unique_ptr socket) + explicit AsyncStunServerTCPSocket(std::unique_ptr socket) : AsyncTcpListenSocket(std::move(socket)) {} - void HandleIncomingConnection(rtc::Socket* socket) override { + void HandleIncomingConnection(Socket* socket) override { SignalNewConnection(this, new AsyncStunTCPSocket(socket)); } }; @@ -73,12 +79,12 @@ class AsyncStunTCPSocketTest : public ::testing::Test, public sigslot::has_slots<> { protected: AsyncStunTCPSocketTest() - : vss_(new rtc::VirtualSocketServer()), thread_(vss_.get()) {} + : vss_(new VirtualSocketServer()), thread_(vss_.get()) {} virtual void SetUp() { CreateSockets(); } void CreateSockets() { - std::unique_ptr server = + std::unique_ptr server = absl::WrapUnique(vss_->CreateSocket(kServerAddr.family(), SOCK_STREAM)); server->Bind(kServerAddr); listen_socket_ = @@ -86,7 +92,7 @@ class AsyncStunTCPSocketTest : public ::testing::Test, listen_socket_->SignalNewConnection.connect( this, &AsyncStunTCPSocketTest::OnNewConnection); - rtc::Socket* client = vss_->CreateSocket(kClientAddr.family(), SOCK_STREAM); + Socket* client = vss_->CreateSocket(kClientAddr.family(), SOCK_STREAM); send_socket_.reset(AsyncStunTCPSocket::Create( client, kClientAddr, listen_socket_->GetLocalAddress())); send_socket_->SignalSentPacket.connect( @@ -95,28 +101,29 @@ class AsyncStunTCPSocketTest : public ::testing::Test, vss_->ProcessMessagesUntilIdle(); } - void OnReadPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t len, - const rtc::SocketAddress& remote_addr, - const int64_t& /* packet_time_us */) { - recv_packets_.push_back(std::string(data, len)); + void OnReadPacket(AsyncPacketSocket* /* socket */, + const ReceivedIpPacket& packet) { + recv_packets_.push_back( + std::string(reinterpret_cast(packet.payload().data()), + packet.payload().size())); } - void OnSentPacket(rtc::AsyncPacketSocket* socket, - const rtc::SentPacket& packet) { + void OnSentPacket(AsyncPacketSocket* /* socket */, + const SentPacketInfo& /* packet */) { ++sent_packets_; } - void OnNewConnection(rtc::AsyncListenSocket* /*server*/, - rtc::AsyncPacketSocket* new_socket) { + void OnNewConnection(AsyncListenSocket* /*server*/, + AsyncPacketSocket* new_socket) { recv_socket_ = absl::WrapUnique(new_socket); - new_socket->SignalReadPacket.connect(this, - &AsyncStunTCPSocketTest::OnReadPacket); + new_socket->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + OnReadPacket(socket, packet); + }); } bool Send(const void* data, size_t len) { - rtc::PacketOptions options; + AsyncSocketPacketOptions options; int ret = send_socket_->Send(reinterpret_cast(data), len, options); vss_->ProcessMessagesUntilIdle(); @@ -133,11 +140,11 @@ class AsyncStunTCPSocketTest : public ::testing::Test, return ret; } - std::unique_ptr vss_; - rtc::AutoSocketServerThread thread_; + std::unique_ptr vss_; + AutoSocketServerThread thread_; std::unique_ptr send_socket_; - std::unique_ptr listen_socket_; - std::unique_ptr recv_socket_; + std::unique_ptr listen_socket_; + std::unique_ptr recv_socket_; std::list recv_packets_; int sent_packets_ = 0; }; @@ -164,6 +171,30 @@ TEST_F(AsyncStunTCPSocketTest, TestMultipleStunPackets) { EXPECT_EQ(4u, recv_packets_.size()); } +TEST_F(AsyncStunTCPSocketTest, ProcessInputHandlesMultiplePackets) { + send_socket_->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* /* socket */, const ReceivedIpPacket& packet) { + recv_packets_.push_back( + std::string(reinterpret_cast(packet.payload().data()), + packet.payload().size())); + }); + Buffer buffer; + buffer.AppendData(kStunMessageWithZeroLength, + sizeof(kStunMessageWithZeroLength)); + // ChannelData message MUST be padded to + // a multiple of four bytes. + const unsigned char kTurnChannelData[] = { + 0x40, 0x00, 0x00, 0x04, 0x21, 0x12, 0xA4, 0x42, + }; + buffer.AppendData(kTurnChannelData, sizeof(kTurnChannelData)); + + send_socket_->ProcessInput(buffer); + EXPECT_EQ(2u, recv_packets_.size()); + EXPECT_TRUE(CheckData(kStunMessageWithZeroLength, + sizeof(kStunMessageWithZeroLength))); + EXPECT_TRUE(CheckData(kTurnChannelData, sizeof(kTurnChannelData))); +} + // Verifying TURN channel data message with zero length. TEST_F(AsyncStunTCPSocketTest, TestTurnChannelDataWithZeroLength) { EXPECT_TRUE(Send(kTurnChannelDataMessageWithZeroLength, @@ -285,4 +316,4 @@ TEST_F(AsyncStunTCPSocketTest, SignalSentPacketNotFiredWhenPacketNotSent) { EXPECT_EQ(0, sent_packets_); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/basic_async_resolver_factory.cc b/p2p/base/basic_async_resolver_factory.cc index 67c81670d2..e66e8fb6e9 100644 --- a/p2p/base/basic_async_resolver_factory.cc +++ b/p2p/base/basic_async_resolver_factory.cc @@ -13,19 +13,13 @@ #include #include -#include "absl/memory/memory.h" +#include "absl/functional/any_invocable.h" #include "api/async_dns_resolver.h" -#include "api/wrapping_async_dns_resolver.h" #include "rtc_base/async_dns_resolver.h" -#include "rtc_base/async_resolver.h" -#include "rtc_base/logging.h" +#include "rtc_base/socket_address.h" namespace webrtc { -rtc::AsyncResolverInterface* BasicAsyncResolverFactory::Create() { - return new rtc::AsyncResolver(); -} - std::unique_ptr BasicAsyncDnsResolverFactory::Create() { return std::make_unique(); @@ -33,7 +27,7 @@ BasicAsyncDnsResolverFactory::Create() { std::unique_ptr BasicAsyncDnsResolverFactory::CreateAndResolve( - const rtc::SocketAddress& addr, + const SocketAddress& addr, absl::AnyInvocable callback) { std::unique_ptr resolver = Create(); resolver->Start(addr, std::move(callback)); @@ -42,31 +36,7 @@ BasicAsyncDnsResolverFactory::CreateAndResolve( std::unique_ptr BasicAsyncDnsResolverFactory::CreateAndResolve( - const rtc::SocketAddress& addr, - int family, - absl::AnyInvocable callback) { - std::unique_ptr resolver = Create(); - resolver->Start(addr, family, std::move(callback)); - return resolver; -} - -std::unique_ptr -WrappingAsyncDnsResolverFactory::Create() { - return std::make_unique(wrapped_factory_->Create()); -} - -std::unique_ptr -WrappingAsyncDnsResolverFactory::CreateAndResolve( - const rtc::SocketAddress& addr, - absl::AnyInvocable callback) { - std::unique_ptr resolver = Create(); - resolver->Start(addr, std::move(callback)); - return resolver; -} - -std::unique_ptr -WrappingAsyncDnsResolverFactory::CreateAndResolve( - const rtc::SocketAddress& addr, + const SocketAddress& addr, int family, absl::AnyInvocable callback) { std::unique_ptr resolver = Create(); diff --git a/p2p/base/basic_async_resolver_factory.h b/p2p/base/basic_async_resolver_factory.h index 9c1af6a1e1..a553f90e66 100644 --- a/p2p/base/basic_async_resolver_factory.h +++ b/p2p/base/basic_async_resolver_factory.h @@ -11,21 +11,14 @@ #ifndef P2P_BASE_BASIC_ASYNC_RESOLVER_FACTORY_H_ #define P2P_BASE_BASIC_ASYNC_RESOLVER_FACTORY_H_ -#include #include -#include +#include "absl/functional/any_invocable.h" #include "api/async_dns_resolver.h" -#include "api/async_resolver_factory.h" -#include "rtc_base/async_resolver_interface.h" +#include "rtc_base/socket_address.h" namespace webrtc { -class BasicAsyncResolverFactory final : public AsyncResolverFactory { - public: - rtc::AsyncResolverInterface* Create() override; -}; - // A factory that vends AsyncDnsResolver instances. class BasicAsyncDnsResolverFactory final : public AsyncDnsResolverFactoryInterface { @@ -33,46 +26,15 @@ class BasicAsyncDnsResolverFactory final BasicAsyncDnsResolverFactory() = default; std::unique_ptr CreateAndResolve( - const rtc::SocketAddress& addr, - absl::AnyInvocable callback) override; - - std::unique_ptr CreateAndResolve( - const rtc::SocketAddress& addr, - int family, - absl::AnyInvocable callback) override; - - std::unique_ptr Create() override; -}; - -// This class wraps a factory using the older webrtc::AsyncResolverFactory API, -// and produces webrtc::AsyncDnsResolver objects that contain an -// rtc::AsyncResolver object. -class WrappingAsyncDnsResolverFactory final - : public AsyncDnsResolverFactoryInterface { - public: - explicit WrappingAsyncDnsResolverFactory( - std::unique_ptr wrapped_factory) - : owned_factory_(std::move(wrapped_factory)), - wrapped_factory_(owned_factory_.get()) {} - - explicit WrappingAsyncDnsResolverFactory( - AsyncResolverFactory* non_owned_factory) - : wrapped_factory_(non_owned_factory) {} - - std::unique_ptr CreateAndResolve( - const rtc::SocketAddress& addr, + const SocketAddress& addr, absl::AnyInvocable callback) override; std::unique_ptr CreateAndResolve( - const rtc::SocketAddress& addr, + const SocketAddress& addr, int family, absl::AnyInvocable callback) override; std::unique_ptr Create() override; - - private: - const std::unique_ptr owned_factory_; - AsyncResolverFactory* const wrapped_factory_; }; } // namespace webrtc diff --git a/p2p/base/basic_async_resolver_factory_unittest.cc b/p2p/base/basic_async_resolver_factory_unittest.cc deleted file mode 100644 index 77b97e75e6..0000000000 --- a/p2p/base/basic_async_resolver_factory_unittest.cc +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "p2p/base/basic_async_resolver_factory.h" - -#include "api/test/mock_async_dns_resolver.h" -#include "p2p/base/mock_async_resolver.h" -#include "rtc_base/async_resolver.h" -#include "rtc_base/gunit.h" -#include "rtc_base/socket_address.h" -#include "rtc_base/third_party/sigslot/sigslot.h" -#include "test/gmock.h" -#include "test/gtest.h" -#include "test/testsupport/rtc_expect_death.h" - -namespace webrtc { - -class BasicAsyncResolverFactoryTest : public ::testing::Test, - public sigslot::has_slots<> { - public: - void TestCreate() { - BasicAsyncResolverFactory factory; - rtc::AsyncResolverInterface* resolver = factory.Create(); - ASSERT_TRUE(resolver); - resolver->SignalDone.connect( - this, &BasicAsyncResolverFactoryTest::SetAddressResolved); - - rtc::SocketAddress address("", 0); - resolver->Start(address); - ASSERT_TRUE_WAIT(address_resolved_, 10000 /*ms*/); - resolver->Destroy(false); - } - - void SetAddressResolved(rtc::AsyncResolverInterface* resolver) { - address_resolved_ = true; - } - - private: - bool address_resolved_ = false; -}; - -// This test is primarily intended to let tools check that the created resolver -// doesn't leak. -TEST_F(BasicAsyncResolverFactoryTest, TestCreate) { - rtc::AutoThread main_thread; - TestCreate(); -} - -TEST(WrappingAsyncDnsResolverFactoryTest, TestCreateAndResolve) { - rtc::AutoThread main_thread; - WrappingAsyncDnsResolverFactory factory( - std::make_unique()); - - std::unique_ptr resolver(factory.Create()); - ASSERT_TRUE(resolver); - - bool address_resolved = false; - rtc::SocketAddress address("", 0); - resolver->Start(address, [&address_resolved]() { address_resolved = true; }); - ASSERT_TRUE_WAIT(address_resolved, 10000 /*ms*/); - resolver.reset(); -} - -TEST(WrappingAsyncDnsResolverFactoryTest, WrapOtherResolver) { - rtc::AutoThread main_thread; - BasicAsyncResolverFactory non_owned_factory; - WrappingAsyncDnsResolverFactory factory(&non_owned_factory); - std::unique_ptr resolver(factory.Create()); - ASSERT_TRUE(resolver); - - bool address_resolved = false; - rtc::SocketAddress address("", 0); - resolver->Start(address, [&address_resolved]() { address_resolved = true; }); - ASSERT_TRUE_WAIT(address_resolved, 10000 /*ms*/); - resolver.reset(); -} - -#if GTEST_HAS_DEATH_TEST && defined(WEBRTC_LINUX) -// Tests that the prohibition against deleting the resolver from the callback -// is enforced. This is required by the use of sigslot in the wrapped resolver. -// Checking the error message fails on a number of platforms, so run this -// test only on the platforms where it works. -void CallResolver(WrappingAsyncDnsResolverFactory& factory) { - rtc::SocketAddress address("", 0); - std::unique_ptr resolver(factory.Create()); - resolver->Start(address, [&resolver]() { resolver.reset(); }); - WAIT(!resolver.get(), 10000 /*ms*/); -} - -TEST(WrappingAsyncDnsResolverFactoryDeathTest, DestroyResolverInCallback) { - rtc::AutoThread main_thread; - // TODO(bugs.webrtc.org/12652): Rewrite as death test in loop style when it - // works. - WrappingAsyncDnsResolverFactory factory( - std::make_unique()); - - // Since EXPECT_DEATH is thread sensitive, and the resolver creates a thread, - // we wrap the whole creation section in EXPECT_DEATH. - RTC_EXPECT_DEATH(CallResolver(factory), - "Check failed: !within_resolve_result_"); -} -#endif - -} // namespace webrtc diff --git a/p2p/base/basic_ice_controller.cc b/p2p/base/basic_ice_controller.cc index a0ff4cf144..713a154c99 100644 --- a/p2p/base/basic_ice_controller.cc +++ b/p2p/base/basic_ice_controller.cc @@ -10,18 +10,46 @@ #include "p2p/base/basic_ice_controller.h" +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "api/candidate.h" +#include "api/transport/enums.h" +#include "p2p/base/connection.h" +#include "p2p/base/connection_info.h" +#include "p2p/base/ice_controller_factory_interface.h" +#include "p2p/base/ice_controller_interface.h" +#include "p2p/base/ice_switch_reason.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/p2p_constants.h" +#include "p2p/base/transport_description.h" +#include "rtc_base/checks.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/logging.h" +#include "rtc_base/net_helper.h" +#include "rtc_base/network.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/time_utils.h" + namespace { // The minimum improvement in RTT that justifies a switch. const int kMinImprovement = 10; -bool IsRelayRelay(const cricket::Connection* conn) { - return conn->local_candidate().type() == cricket::RELAY_PORT_TYPE && - conn->remote_candidate().type() == cricket::RELAY_PORT_TYPE; +bool IsRelayRelay(const webrtc::Connection* conn) { + return conn->local_candidate().is_relay() && + conn->remote_candidate().is_relay(); } -bool IsUdp(const cricket::Connection* conn) { - return conn->local_candidate().relay_protocol() == cricket::UDP_PROTOCOL_NAME; +bool IsUdp(const webrtc::Connection* conn) { + return conn->local_candidate().relay_protocol() == webrtc::UDP_PROTOCOL_NAME; } // TODO(qingsi) Use an enum to replace the following constants for all @@ -31,16 +59,16 @@ static constexpr int b_is_better = -1; static constexpr int a_and_b_equal = 0; bool LocalCandidateUsesPreferredNetwork( - const cricket::Connection* conn, - absl::optional network_preference) { - rtc::AdapterType network_type = conn->network()->type(); + const webrtc::Connection* conn, + std::optional network_preference) { + webrtc::AdapterType network_type = conn->network()->type(); return network_preference.has_value() && (network_type == network_preference); } int CompareCandidatePairsByNetworkPreference( - const cricket::Connection* a, - const cricket::Connection* b, - absl::optional network_preference) { + const webrtc::Connection* a, + const webrtc::Connection* b, + std::optional network_preference) { bool a_uses_preferred_network = LocalCandidateUsesPreferredNetwork(a, network_preference); bool b_uses_preferred_network = @@ -55,7 +83,7 @@ int CompareCandidatePairsByNetworkPreference( } // namespace -namespace cricket { +namespace webrtc { BasicIceController::BasicIceController(const IceControllerFactoryArgs& args) : ice_transport_state_func_(args.ice_transport_state_func), @@ -88,7 +116,7 @@ void BasicIceController::OnConnectionDestroyed(const Connection* connection) { } bool BasicIceController::HasPingableConnection() const { - int64_t now = rtc::TimeMillis(); + int64_t now = webrtc::TimeMillis(); return absl::c_any_of(connections_, [this, now](const Connection* c) { return IsPingable(c, now); }); @@ -100,16 +128,16 @@ IceControllerInterface::PingResult BasicIceController::SelectConnectionToPing( // active connection has not been pinged enough times, use the weak ping // interval. bool need_more_pings_at_weak_interval = - absl::c_any_of(connections_, [](const Connection* conn) { + absl::c_any_of(connections_, [](const webrtc::Connection* conn) { return conn->active() && - conn->num_pings_sent() < MIN_PINGS_AT_WEAK_PING_INTERVAL; + conn->num_pings_sent() < webrtc::MIN_PINGS_AT_WEAK_PING_INTERVAL; }); int ping_interval = (weak() || need_more_pings_at_weak_interval) ? weak_ping_interval() : strong_ping_interval(); const Connection* conn = nullptr; - if (rtc::TimeMillis() >= last_ping_sent_ms + ping_interval) { + if (webrtc::TimeMillis() >= last_ping_sent_ms + ping_interval) { conn = FindNextPingableConnection(); } PingResult res(conn, std::min(ping_interval, check_receiving_interval())); @@ -124,7 +152,7 @@ void BasicIceController::MarkConnectionPinged(const Connection* conn) { // Returns the next pingable connection to ping. const Connection* BasicIceController::FindNextPingableConnection() { - int64_t now = rtc::TimeMillis(); + int64_t now = webrtc::TimeMillis(); // Rule 1: Selected connection takes priority over non-selected ones. if (selected_connection_ && selected_connection_->connected() && @@ -152,7 +180,7 @@ const Connection* BasicIceController::FindNextPingableConnection() { }); auto iter = absl::c_min_element( pingable_selectable_connections, - [](const Connection* conn1, const Connection* conn2) { + [](const webrtc::Connection* conn1, const webrtc::Connection* conn2) { return conn1->last_ping_sent() < conn2->last_ping_sent(); }); if (iter != pingable_selectable_connections.end()) { @@ -246,14 +274,15 @@ int BasicIceController::CalculateActiveWritablePingInterval( int64_t now) const { // Ping each connection at a higher rate at least // MIN_PINGS_AT_WEAK_PING_INTERVAL times. - if (conn->num_pings_sent() < MIN_PINGS_AT_WEAK_PING_INTERVAL) { + if (conn->num_pings_sent() < webrtc::MIN_PINGS_AT_WEAK_PING_INTERVAL) { return weak_ping_interval(); } int stable_interval = config_.stable_writable_connection_ping_interval_or_default(); - int weak_or_stablizing_interval = std::min( - stable_interval, WEAK_OR_STABILIZING_WRITABLE_CONNECTION_PING_INTERVAL); + int weak_or_stablizing_interval = + std::min(stable_interval, + webrtc::WEAK_OR_STABILIZING_WRITABLE_CONNECTION_PING_INTERVAL); // If the channel is weak or the connection is not stable yet, use the // weak_or_stablizing_interval. return (!weak() && conn->stable(now)) ? stable_interval @@ -320,7 +349,8 @@ bool BasicIceController::IsPingable(const Connection* conn, int64_t now) const { // A connection is considered a backup connection if the channel state // is completed, the connection is not the selected connection and it is active. bool BasicIceController::IsBackupConnection(const Connection* conn) const { - return ice_transport_state_func_() == IceTransportState::STATE_COMPLETED && + return ice_transport_state_func_() == + IceTransportStateInternal::STATE_COMPLETED && conn != selected_connection_ && conn->active(); } @@ -382,19 +412,19 @@ const Connection* BasicIceController::LeastRecentlyPinged( return nullptr; } -std::map +std::map BasicIceController::GetBestConnectionByNetwork() const { // `connections_` has been sorted, so the first one in the list on a given // network is the best connection on the network, except that the selected // connection is always the best connection on the network. - std::map best_connection_by_network; + std::map best_connection_by_network; if (selected_connection_) { best_connection_by_network[selected_connection_->network()] = selected_connection_; } // TODO(honghaiz): Need to update this if `connections_` are not sorted. for (const Connection* conn : connections_) { - const rtc::Network* network = conn->network(); + const Network* network = conn->network(); // This only inserts when the network does not exist in the map. best_connection_by_network.insert(std::make_pair(network, conn)); } @@ -415,15 +445,15 @@ BasicIceController::GetBestWritableConnectionPerNetwork() const { IceControllerInterface::SwitchResult BasicIceController::HandleInitialSelectDampening( - IceSwitchReason reason, + IceSwitchReason /* reason */, const Connection* new_connection) { if (!field_trials_->initial_select_dampening.has_value() && !field_trials_->initial_select_dampening_ping_received.has_value()) { // experiment not enabled => select connection. - return {new_connection, absl::nullopt}; + return {new_connection, std::nullopt}; } - int64_t now = rtc::TimeMillis(); + int64_t now = webrtc::TimeMillis(); int64_t max_delay = 0; if (new_connection->last_ping_received() > 0 && field_trials_->initial_select_dampening_ping_received.has_value()) { @@ -441,7 +471,7 @@ BasicIceController::HandleInitialSelectDampening( << initial_select_timestamp_ms_ << " selection delayed by: " << (now - start_wait) << "ms"; initial_select_timestamp_ms_ = 0; - return {new_connection, absl::nullopt}; + return {new_connection, std::nullopt}; } // We are not yet ready to select first connection... @@ -464,7 +494,7 @@ BasicIceController::HandleInitialSelectDampening( } RTC_LOG(LS_INFO) << "delay initial selection up to " << min_delay << "ms"; - return {.connection = absl::nullopt, + return {.connection = std::nullopt, .recheck_event = IceRecheckEvent( IceSwitchReason::ICE_CONTROLLER_RECHECK, min_delay)}; } @@ -473,7 +503,7 @@ IceControllerInterface::SwitchResult BasicIceController::ShouldSwitchConnection( IceSwitchReason reason, const Connection* new_connection) { if (!ReadyToSend(new_connection) || selected_connection_ == new_connection) { - return {absl::nullopt, absl::nullopt}; + return {std::nullopt, std::nullopt}; } if (selected_connection_ == nullptr) { @@ -486,17 +516,17 @@ IceControllerInterface::SwitchResult BasicIceController::ShouldSwitchConnection( int compare_a_b_by_networks = CompareCandidatePairNetworks( new_connection, selected_connection_, config_.network_preference); if (compare_a_b_by_networks == b_is_better && !new_connection->receiving()) { - return {absl::nullopt, absl::nullopt}; + return {std::nullopt, std::nullopt}; } bool missed_receiving_unchanged_threshold = false; - absl::optional receiving_unchanged_threshold( - rtc::TimeMillis() - config_.receiving_switching_delay_or_default()); + std::optional receiving_unchanged_threshold( + webrtc::TimeMillis() - config_.receiving_switching_delay_or_default()); int cmp = CompareConnections(selected_connection_, new_connection, receiving_unchanged_threshold, &missed_receiving_unchanged_threshold); - absl::optional recheck_event; + std::optional recheck_event; if (missed_receiving_unchanged_threshold && config_.receiving_switching_delay_or_default()) { // If we do not switch to the connection because it missed the receiving @@ -508,18 +538,18 @@ IceControllerInterface::SwitchResult BasicIceController::ShouldSwitchConnection( } if (cmp < 0) { - return {new_connection, absl::nullopt}; + return {new_connection, std::nullopt}; } else if (cmp > 0) { - return {absl::nullopt, recheck_event}; + return {std::nullopt, recheck_event}; } // If everything else is the same, switch only if rtt has improved by // a margin. if (new_connection->rtt() <= selected_connection_->rtt() - kMinImprovement) { - return {new_connection, absl::nullopt}; + return {new_connection, std::nullopt}; } - return {absl::nullopt, recheck_event}; + return {std::nullopt, recheck_event}; } IceControllerInterface::SwitchResult @@ -531,7 +561,7 @@ BasicIceController::SortAndSwitchConnection(IceSwitchReason reason) { // TODO(honghaiz): Don't sort; Just use std::max_element in the right places. absl::c_stable_sort( connections_, [this](const Connection* a, const Connection* b) { - int cmp = CompareConnections(a, b, absl::nullopt, nullptr); + int cmp = CompareConnections(a, b, std::nullopt, nullptr); if (cmp != 0) { return cmp > 0; } @@ -541,7 +571,7 @@ BasicIceController::SortAndSwitchConnection(IceSwitchReason reason) { RTC_LOG(LS_VERBOSE) << "Sorting " << connections_.size() << " available connections due to: " - << IceSwitchReasonToString(reason); + << webrtc::IceSwitchReasonToString(reason); for (size_t i = 0; i < connections_.size(); ++i) { RTC_LOG(LS_VERBOSE) << connections_[i]->ToString(); } @@ -565,9 +595,9 @@ bool BasicIceController::ReadyToSend(const Connection* connection) const { bool BasicIceController::PresumedWritable(const Connection* conn) const { return (conn->write_state() == Connection::STATE_WRITE_INIT && config_.presume_writable_when_fully_relayed && - conn->local_candidate().type() == RELAY_PORT_TYPE && - (conn->remote_candidate().type() == RELAY_PORT_TYPE || - conn->remote_candidate().type() == PRFLX_PORT_TYPE)); + conn->local_candidate().is_relay() && + (conn->remote_candidate().is_relay() || + conn->remote_candidate().is_prflx())); } // Compare two connections based on their writing, receiving, and connected @@ -575,7 +605,7 @@ bool BasicIceController::PresumedWritable(const Connection* conn) const { int BasicIceController::CompareConnectionStates( const Connection* a, const Connection* b, - absl::optional receiving_unchanged_threshold, + std::optional receiving_unchanged_threshold, bool* missed_receiving_unchanged_threshold) const { // First, prefer a connection that's writable or presumed writable over // one that's not writable. @@ -693,7 +723,7 @@ int BasicIceController::CompareConnectionCandidates(const Connection* a, int BasicIceController::CompareConnections( const Connection* a, const Connection* b, - absl::optional receiving_unchanged_threshold, + std::optional receiving_unchanged_threshold, bool* missed_receiving_unchanged_threshold) const { RTC_CHECK(a != nullptr); RTC_CHECK(b != nullptr); @@ -707,7 +737,7 @@ int BasicIceController::CompareConnections( return state_cmp; } - if (ice_role_func_() == ICEROLE_CONTROLLED) { + if (ice_role_func_() == webrtc::ICEROLE_CONTROLLED) { // Compare the connections based on the nomination states and the last data // received time if this is on the controlled side. if (a->remote_nomination() > b->remote_nomination()) { @@ -732,7 +762,7 @@ int BasicIceController::CompareConnections( int BasicIceController::CompareCandidatePairNetworks( const Connection* a, const Connection* b, - absl::optional network_preference) const { + std::optional /* network_preference */) const { int compare_a_b_by_network_preference = CompareCandidatePairsByNetworkPreference(a, b, config_.network_preference); @@ -744,18 +774,18 @@ int BasicIceController::CompareCandidatePairNetworks( bool a_vpn = a->network()->IsVpn(); bool b_vpn = b->network()->IsVpn(); switch (config_.vpn_preference) { - case webrtc::VpnPreference::kDefault: + case VpnPreference::kDefault: break; - case webrtc::VpnPreference::kOnlyUseVpn: - case webrtc::VpnPreference::kPreferVpn: + case VpnPreference::kOnlyUseVpn: + case VpnPreference::kPreferVpn: if (a_vpn && !b_vpn) { return a_is_better; } else if (!a_vpn && b_vpn) { return b_is_better; } break; - case webrtc::VpnPreference::kNeverUseVpn: - case webrtc::VpnPreference::kAvoidVpn: + case VpnPreference::kNeverUseVpn: + case VpnPreference::kAvoidVpn: if (a_vpn && !b_vpn) { return b_is_better; } else if (!a_vpn && b_vpn) { @@ -797,7 +827,7 @@ std::vector BasicIceController::PruneConnections() { auto best_connection_by_network = GetBestConnectionByNetwork(); for (const Connection* conn : connections_) { const Connection* best_conn = selected_connection_; - if (!rtc::IPIsAny(conn->network()->GetBestIP())) { + if (!webrtc::IPIsAny(conn->network()->GetBestIP())) { // If the connection is bound to a specific network interface (not an // "any address" network), compare it against the best connection for // that network interface rather than the best connection overall. This @@ -823,7 +853,7 @@ bool BasicIceController::GetUseCandidateAttr(const Connection* conn, // TODO(honghaiz): Implement regular nomination. return false; case NominationMode::AGGRESSIVE: - if (remote_ice_mode == ICEMODE_LITE) { + if (remote_ice_mode == webrtc::ICEMODE_LITE) { return GetUseCandidateAttr(conn, NominationMode::REGULAR, remote_ice_mode); } @@ -839,7 +869,7 @@ bool BasicIceController::GetUseCandidateAttr(const Connection* conn, // b.1) `conn` is the selected_connection AND // b.2) `conn` is writable. bool selected = conn == selected_connection_; - if (remote_ice_mode == ICEMODE_LITE) { + if (remote_ice_mode == webrtc::ICEMODE_LITE) { return selected && conn->writable(); } bool better_than_selected = @@ -853,4 +883,4 @@ bool BasicIceController::GetUseCandidateAttr(const Connection* conn, } } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/basic_ice_controller.h b/p2p/base/basic_ice_controller.h index b941a0dd7e..70ebafceb0 100644 --- a/p2p/base/basic_ice_controller.h +++ b/p2p/base/basic_ice_controller.h @@ -12,16 +12,25 @@ #define P2P_BASE_BASIC_ICE_CONTROLLER_H_ #include +#include +#include #include +#include #include -#include #include +#include "api/array_view.h" +#include "p2p/base/connection.h" #include "p2p/base/ice_controller_factory_interface.h" #include "p2p/base/ice_controller_interface.h" -#include "p2p/base/p2p_transport_channel.h" +#include "p2p/base/ice_switch_reason.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/p2p_constants.h" +#include "p2p/base/transport_description.h" +#include "rtc_base/network.h" +#include "rtc_base/network_constants.h" -namespace cricket { +namespace webrtc { class BasicIceController : public IceControllerInterface { public: @@ -32,8 +41,11 @@ class BasicIceController : public IceControllerInterface { void SetSelectedConnection(const Connection* selected_connection) override; void AddConnection(const Connection* connection) override; void OnConnectionDestroyed(const Connection* connection) override; - rtc::ArrayView connections() const override { - return rtc::ArrayView( + ArrayView GetConnections() const override { + return connections_; + } + ArrayView connections() const override { + return ArrayView( const_cast(connections_.data()), connections_.size()); } @@ -74,7 +86,7 @@ class BasicIceController : public IceControllerInterface { } int check_receiving_interval() const { - return std::max(MIN_CHECK_RECEIVING_INTERVAL, + return std::max(webrtc::MIN_CHECK_RECEIVING_INTERVAL, config_.receiving_timeout_or_default() / 10); } @@ -100,7 +112,7 @@ class BasicIceController : public IceControllerInterface { int CalculateActiveWritablePingInterval(const Connection* conn, int64_t now) const; - std::map GetBestConnectionByNetwork() + std::map GetBestConnectionByNetwork() const; std::vector GetBestWritableConnectionPerNetwork() const; @@ -110,7 +122,7 @@ class BasicIceController : public IceControllerInterface { int CompareCandidatePairNetworks( const Connection* a, const Connection* b, - absl::optional network_preference) const; + std::optional network_preference) const; // The methods below return a positive value if `a` is preferable to `b`, // a negative value if `b` is preferable, and 0 if they're equally preferable. @@ -122,7 +134,7 @@ class BasicIceController : public IceControllerInterface { int CompareConnectionStates( const Connection* a, const Connection* b, - absl::optional receiving_unchanged_threshold, + std::optional receiving_unchanged_threshold, bool* missed_receiving_unchanged_threshold) const; int CompareConnectionCandidates(const Connection* a, const Connection* b) const; @@ -133,13 +145,13 @@ class BasicIceController : public IceControllerInterface { // Returns a positive value if `a` is better than `b`. int CompareConnections(const Connection* a, const Connection* b, - absl::optional receiving_unchanged_threshold, + std::optional receiving_unchanged_threshold, bool* missed_receiving_unchanged_threshold) const; SwitchResult HandleInitialSelectDampening(IceSwitchReason reason, const Connection* new_connection); - std::function ice_transport_state_func_; + std::function ice_transport_state_func_; std::function ice_role_func_; std::function is_connection_pruned_func_; @@ -160,6 +172,14 @@ class BasicIceController : public IceControllerInterface { int64_t initial_select_timestamp_ms_ = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::BasicIceController; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_BASIC_ICE_CONTROLLER_H_ diff --git a/p2p/base/basic_packet_socket_factory.cc b/p2p/base/basic_packet_socket_factory.cc index 5bc02dd0f0..4604570ceb 100644 --- a/p2p/base/basic_packet_socket_factory.cc +++ b/p2p/base/basic_packet_socket_factory.cc @@ -12,21 +12,27 @@ #include +#include +#include #include #include "absl/memory/memory.h" #include "api/async_dns_resolver.h" -#include "api/wrapping_async_dns_resolver.h" +#include "api/packet_socket_factory.h" #include "p2p/base/async_stun_tcp_socket.h" +#include "rtc_base/async_dns_resolver.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/async_tcp_socket.h" #include "rtc_base/async_udp_socket.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/socket.h" #include "rtc_base/socket_adapters.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/socket_factory.h" #include "rtc_base/ssl_adapter.h" -namespace rtc { +namespace webrtc { BasicPacketSocketFactory::BasicPacketSocketFactory( SocketFactory* socket_factory) @@ -86,8 +92,6 @@ AsyncListenSocket* BasicPacketSocketFactory::CreateServerTcpSocket( AsyncPacketSocket* BasicPacketSocketFactory::CreateClientTcpSocket( const SocketAddress& local_address, const SocketAddress& remote_address, - const ProxyInfo& proxy_info, - const std::string& user_agent, const PacketSocketTcpOptions& tcp_options) { Socket* socket = socket_factory_->CreateSocket(local_address.family(), SOCK_STREAM); @@ -119,16 +123,6 @@ AsyncPacketSocket* BasicPacketSocketFactory::CreateClientTcpSocket( << socket->GetError(); } - // If using a proxy, wrap the socket in a proxy socket. - if (proxy_info.type == PROXY_SOCKS5) { - socket = new AsyncSocksProxySocket( - socket, proxy_info.address, proxy_info.username, proxy_info.password); - } else if (proxy_info.type == PROXY_HTTPS) { - socket = - new AsyncHttpsProxySocket(socket, user_agent, proxy_info.address, - proxy_info.username, proxy_info.password); - } - // Assert that at most one TLS option is used. int tlsOpts = tcp_options.opts & (PacketSocketFactory::OPT_TLS | PacketSocketFactory::OPT_TLS_FAKE | @@ -172,7 +166,7 @@ AsyncPacketSocket* BasicPacketSocketFactory::CreateClientTcpSocket( // Finally, wrap that socket in a TCP or STUN TCP packet socket. AsyncPacketSocket* tcp_socket; if (tcp_options.opts & PacketSocketFactory::OPT_STUN) { - tcp_socket = new cricket::AsyncStunTCPSocket(socket); + tcp_socket = new AsyncStunTCPSocket(socket); } else { tcp_socket = new AsyncTCPSocket(socket); } @@ -180,14 +174,9 @@ AsyncPacketSocket* BasicPacketSocketFactory::CreateClientTcpSocket( return tcp_socket; } -AsyncResolverInterface* BasicPacketSocketFactory::CreateAsyncResolver() { - return new AsyncResolver(); -} - -std::unique_ptr +std::unique_ptr BasicPacketSocketFactory::CreateAsyncDnsResolver() { - return std::make_unique( - new AsyncResolver()); + return std::make_unique(); } int BasicPacketSocketFactory::BindSocket(Socket* socket, @@ -207,4 +196,4 @@ int BasicPacketSocketFactory::BindSocket(Socket* socket, return ret; } -} // namespace rtc +} // namespace webrtc diff --git a/p2p/base/basic_packet_socket_factory.h b/p2p/base/basic_packet_socket_factory.h index 396a8ba4eb..276e30956d 100644 --- a/p2p/base/basic_packet_socket_factory.h +++ b/p2p/base/basic_packet_socket_factory.h @@ -14,20 +14,16 @@ #include #include -#include #include "api/async_dns_resolver.h" #include "api/packet_socket_factory.h" #include "rtc_base/async_packet_socket.h" -#include "rtc_base/proxy_info.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" #include "rtc_base/socket_factory.h" #include "rtc_base/system/rtc_export.h" -namespace rtc { - -class SocketFactory; +namespace webrtc { class RTC_EXPORT BasicPacketSocketFactory : public PacketSocketFactory { public: @@ -44,16 +40,9 @@ class RTC_EXPORT BasicPacketSocketFactory : public PacketSocketFactory { AsyncPacketSocket* CreateClientTcpSocket( const SocketAddress& local_address, const SocketAddress& remote_address, - const ProxyInfo& proxy_info, - const std::string& user_agent, const PacketSocketTcpOptions& tcp_options) override; - // TODO(bugs.webrtc.org/12598) Remove when downstream stops using it. - ABSL_DEPRECATED("Use CreateAsyncDnsResolver") - AsyncResolverInterface* CreateAsyncResolver() override; - - std::unique_ptr CreateAsyncDnsResolver() - override; + std::unique_ptr CreateAsyncDnsResolver() override; private: int BindSocket(Socket* socket, @@ -64,6 +53,14 @@ class RTC_EXPORT BasicPacketSocketFactory : public PacketSocketFactory { SocketFactory* socket_factory_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::BasicPacketSocketFactory; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_BASIC_PACKET_SOCKET_FACTORY_H_ diff --git a/p2p/base/candidate_pair_interface.h b/p2p/base/candidate_pair_interface.h index 2b68fd7ea9..65ab7883f2 100644 --- a/p2p/base/candidate_pair_interface.h +++ b/p2p/base/candidate_pair_interface.h @@ -11,9 +11,9 @@ #ifndef P2P_BASE_CANDIDATE_PAIR_INTERFACE_H_ #define P2P_BASE_CANDIDATE_PAIR_INTERFACE_H_ -namespace cricket { +#include "api/candidate.h" -class Candidate; +namespace webrtc { class CandidatePairInterface { public: @@ -23,6 +23,27 @@ class CandidatePairInterface { virtual const Candidate& remote_candidate() const = 0; }; +// Specific implementation of the interface, suitable for being a +// data member of other structs. +struct CandidatePair final : public CandidatePairInterface { + ~CandidatePair() override = default; + + const Candidate& local_candidate() const override { return local; } + const Candidate& remote_candidate() const override { return remote; } + + Candidate local; + Candidate remote; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::CandidatePair; +using ::webrtc::CandidatePairInterface; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_CANDIDATE_PAIR_INTERFACE_H_ diff --git a/p2p/base/connection.cc b/p2p/base/connection.cc index 1ef42cc76f..e2ccfaed53 100644 --- a/p2p/base/connection.cc +++ b/p2p/base/connection.cc @@ -13,31 +13,59 @@ #include #include +#include +#include #include +#include +#include #include #include #include "absl/algorithm/container.h" -#include "absl/strings/escaping.h" -#include "absl/strings/match.h" +#include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" -#include "p2p/base/port_allocator.h" +#include "api/array_view.h" +#include "api/candidate.h" +#include "api/rtc_error.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/stun.h" +#include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" +#include "logging/rtc_event_log/ice_logger.h" +#include "p2p/base/connection_info.h" +#include "p2p/base/p2p_constants.h" +#include "p2p/base/p2p_transport_channel_ice_field_trials.h" +#include "p2p/base/port_interface.h" +#include "p2p/base/stun_request.h" +#include "p2p/base/transport_description.h" +#include "p2p/dtls/dtls_stun_piggyback_callbacks.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/byte_buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/crc32.h" -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/logging.h" -#include "rtc_base/mdns_responder_interface.h" -#include "rtc_base/message_digest.h" +#include "rtc_base/net_helper.h" +#include "rtc_base/net_helpers.h" #include "rtc_base/network.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_constants.h" #include "rtc_base/numerics/safe_minmax.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/string_encode.h" #include "rtc_base/string_utils.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/third_party/base64/base64.h" +#include "rtc_base/time_utils.h" +#include "rtc_base/weak_ptr.h" -namespace cricket { +namespace webrtc { namespace { +using webrtc::IceCandidateType; + // Determines whether we have seen at least the given maximum number of // pings fail to have a response. inline bool TooManyFailures( @@ -70,61 +98,58 @@ inline bool TooLongWithoutResponse( // Helper methods for converting string values of log description fields to // enum. -webrtc::IceCandidateType GetCandidateTypeByString(absl::string_view type) { - if (type == LOCAL_PORT_TYPE) { - return webrtc::IceCandidateType::kLocal; - } else if (type == STUN_PORT_TYPE) { - return webrtc::IceCandidateType::kStun; - } else if (type == PRFLX_PORT_TYPE) { - return webrtc::IceCandidateType::kPrflx; - } else if (type == RELAY_PORT_TYPE) { - return webrtc::IceCandidateType::kRelay; - } - return webrtc::IceCandidateType::kUnknown; -} - -webrtc::IceCandidatePairProtocol GetProtocolByString( - absl::string_view protocol) { - if (protocol == UDP_PROTOCOL_NAME) { - return webrtc::IceCandidatePairProtocol::kUdp; - } else if (protocol == TCP_PROTOCOL_NAME) { - return webrtc::IceCandidatePairProtocol::kTcp; - } else if (protocol == SSLTCP_PROTOCOL_NAME) { - return webrtc::IceCandidatePairProtocol::kSsltcp; - } else if (protocol == TLS_PROTOCOL_NAME) { - return webrtc::IceCandidatePairProtocol::kTls; - } - return webrtc::IceCandidatePairProtocol::kUnknown; -} - -webrtc::IceCandidatePairAddressFamily GetAddressFamilyByInt( - int address_family) { +IceCandidateType GetRtcEventLogCandidateType(const Candidate& c) { + if (c.is_local()) { + return IceCandidateType::kHost; + } else if (c.is_stun()) { + return IceCandidateType::kSrflx; + } else if (c.is_prflx()) { + return IceCandidateType::kPrflx; + } + RTC_DCHECK(c.is_relay()); + return IceCandidateType::kRelay; +} + +IceCandidatePairProtocol GetProtocolByString(absl::string_view protocol) { + if (protocol == webrtc::UDP_PROTOCOL_NAME) { + return IceCandidatePairProtocol::kUdp; + } else if (protocol == webrtc::TCP_PROTOCOL_NAME) { + return IceCandidatePairProtocol::kTcp; + } else if (protocol == webrtc::SSLTCP_PROTOCOL_NAME) { + return IceCandidatePairProtocol::kSsltcp; + } else if (protocol == webrtc::TLS_PROTOCOL_NAME) { + return IceCandidatePairProtocol::kTls; + } + return IceCandidatePairProtocol::kUnknown; +} + +IceCandidatePairAddressFamily GetAddressFamilyByInt(int address_family) { if (address_family == AF_INET) { - return webrtc::IceCandidatePairAddressFamily::kIpv4; + return IceCandidatePairAddressFamily::kIpv4; } else if (address_family == AF_INET6) { - return webrtc::IceCandidatePairAddressFamily::kIpv6; + return IceCandidatePairAddressFamily::kIpv6; } - return webrtc::IceCandidatePairAddressFamily::kUnknown; + return IceCandidatePairAddressFamily::kUnknown; } -webrtc::IceCandidateNetworkType ConvertNetworkType(rtc::AdapterType type) { +IceCandidateNetworkType ConvertNetworkType(AdapterType type) { switch (type) { - case rtc::ADAPTER_TYPE_ETHERNET: - return webrtc::IceCandidateNetworkType::kEthernet; - case rtc::ADAPTER_TYPE_LOOPBACK: - return webrtc::IceCandidateNetworkType::kLoopback; - case rtc::ADAPTER_TYPE_WIFI: - return webrtc::IceCandidateNetworkType::kWifi; - case rtc::ADAPTER_TYPE_VPN: - return webrtc::IceCandidateNetworkType::kVpn; - case rtc::ADAPTER_TYPE_CELLULAR: - case rtc::ADAPTER_TYPE_CELLULAR_2G: - case rtc::ADAPTER_TYPE_CELLULAR_3G: - case rtc::ADAPTER_TYPE_CELLULAR_4G: - case rtc::ADAPTER_TYPE_CELLULAR_5G: - return webrtc::IceCandidateNetworkType::kCellular; + case webrtc::ADAPTER_TYPE_ETHERNET: + return IceCandidateNetworkType::kEthernet; + case webrtc::ADAPTER_TYPE_LOOPBACK: + return IceCandidateNetworkType::kLoopback; + case webrtc::ADAPTER_TYPE_WIFI: + return IceCandidateNetworkType::kWifi; + case webrtc::ADAPTER_TYPE_VPN: + return IceCandidateNetworkType::kVpn; + case webrtc::ADAPTER_TYPE_CELLULAR: + case webrtc::ADAPTER_TYPE_CELLULAR_2G: + case webrtc::ADAPTER_TYPE_CELLULAR_3G: + case webrtc::ADAPTER_TYPE_CELLULAR_4G: + case webrtc::ADAPTER_TYPE_CELLULAR_5G: + return IceCandidateNetworkType::kCellular; default: - return webrtc::IceCandidateNetworkType::kUnknown; + return IceCandidateNetworkType::kUnknown; } } @@ -138,12 +163,6 @@ const int MINIMUM_RTT = 100; // 0.1 seconds const int MAXIMUM_RTT = 60000; // 60 seconds const int DEFAULT_RTT_ESTIMATE_HALF_TIME_MS = 500; - -// Computes our estimate of the RTT given the current estimate. -inline int ConservativeRTTEstimate(int rtt) { - return rtc::SafeClamp(2 * rtt, MINIMUM_RTT, MAXIMUM_RTT); -} - // Weighting of the old rtt value to new data. const int RTT_RATIO = 3; // 3 : 1 @@ -206,14 +225,14 @@ void Connection::ConnectionRequest::OnSent() { } int Connection::ConnectionRequest::resend_delay() { - return CONNECTION_RESPONSE_TIMEOUT; + return webrtc::CONNECTION_RESPONSE_TIMEOUT; } -Connection::Connection(rtc::WeakPtr port, +Connection::Connection(WeakPtr port, size_t index, const Candidate& remote_candidate) : network_thread_(port->thread()), - id_(rtc::CreateRandomId()), + id_(webrtc::CreateRandomId()), port_(std::move(port)), local_candidate_(port_->Candidates()[index]), remote_candidate_(remote_candidate), @@ -234,8 +253,9 @@ Connection::Connection(rtc::WeakPtr port, last_data_received_(0), last_ping_response_received_(0), state_(IceCandidatePairState::WAITING), - time_created_ms_(rtc::TimeMillis()), - delta_internal_unix_epoch_ms_(rtc::TimeUTCMillis() - rtc::TimeMillis()), + time_created_ms_(webrtc::TimeMillis()), + delta_internal_unix_epoch_ms_(webrtc::TimeUTCMillis() - + webrtc::TimeMillis()), field_trials_(&kDefaultFieldTrials), rtt_estimate_(DEFAULT_RTT_ESTIMATE_HALF_TIME_MS) { RTC_DCHECK_RUN_ON(network_thread_); @@ -246,9 +266,10 @@ Connection::Connection(rtc::WeakPtr port, Connection::~Connection() { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(!port_); + RTC_DCHECK(!received_packet_callback_); } -webrtc::TaskQueueBase* Connection::network_thread() const { +TaskQueueBase* Connection::network_thread() const { return network_thread_; } @@ -261,15 +282,18 @@ const Candidate& Connection::remote_candidate() const { return remote_candidate_; } -const rtc::Network* Connection::network() const { +const Network* Connection::network() const { + RTC_DCHECK(port_) << ToDebugId() << ": port_ null in network()"; return port()->Network(); } int Connection::generation() const { + RTC_DCHECK(port_) << ToDebugId() << ": port_ null in generation()"; return port()->generation(); } uint64_t Connection::priority() const { + RTC_DCHECK(port_) << ToDebugId() << ": port_ null in priority()"; if (!port_) return 0; @@ -280,10 +304,10 @@ uint64_t Connection::priority() const { // controlled agent. // pair priority = 2^32*MIN(G,D) + 2*MAX(G,D) + (G>D?1:0) IceRole role = port_->GetIceRole(); - if (role != ICEROLE_UNKNOWN) { + if (role != webrtc::ICEROLE_UNKNOWN) { uint32_t g = 0; uint32_t d = 0; - if (role == ICEROLE_CONTROLLING) { + if (role == webrtc::ICEROLE_CONTROLLING) { g = local_candidate().priority(); d = remote_candidate_.priority(); } else { @@ -380,41 +404,42 @@ bool Connection::nominated() const { int Connection::unwritable_timeout() const { RTC_DCHECK_RUN_ON(network_thread_); - return unwritable_timeout_.value_or(CONNECTION_WRITE_CONNECT_TIMEOUT); + return unwritable_timeout_.value_or(webrtc::CONNECTION_WRITE_CONNECT_TIMEOUT); } -void Connection::set_unwritable_timeout(const absl::optional& value_ms) { +void Connection::set_unwritable_timeout(const std::optional& value_ms) { RTC_DCHECK_RUN_ON(network_thread_); unwritable_timeout_ = value_ms; } int Connection::unwritable_min_checks() const { RTC_DCHECK_RUN_ON(network_thread_); - return unwritable_min_checks_.value_or(CONNECTION_WRITE_CONNECT_FAILURES); + return unwritable_min_checks_.value_or( + webrtc::CONNECTION_WRITE_CONNECT_FAILURES); } -void Connection::set_unwritable_min_checks(const absl::optional& value) { +void Connection::set_unwritable_min_checks(const std::optional& value) { RTC_DCHECK_RUN_ON(network_thread_); unwritable_min_checks_ = value; } int Connection::inactive_timeout() const { RTC_DCHECK_RUN_ON(network_thread_); - return inactive_timeout_.value_or(CONNECTION_WRITE_TIMEOUT); + return inactive_timeout_.value_or(webrtc::CONNECTION_WRITE_TIMEOUT); } -void Connection::set_inactive_timeout(const absl::optional& value) { +void Connection::set_inactive_timeout(const std::optional& value) { RTC_DCHECK_RUN_ON(network_thread_); inactive_timeout_ = value; } int Connection::receiving_timeout() const { RTC_DCHECK_RUN_ON(network_thread_); - return receiving_timeout_.value_or(WEAK_CONNECTION_RECEIVE_TIMEOUT); + return receiving_timeout_.value_or(webrtc::WEAK_CONNECTION_RECEIVE_TIMEOUT); } void Connection::set_receiving_timeout( - absl::optional receiving_timeout_ms) { + std::optional receiving_timeout_ms) { RTC_DCHECK_RUN_ON(network_thread_); receiving_timeout_ = receiving_timeout_ms; } @@ -429,35 +454,55 @@ void Connection::OnSendStunPacket(const void* data, size_t size, StunRequest* req) { RTC_DCHECK_RUN_ON(network_thread_); - rtc::PacketOptions options(port_->StunDscpValue()); + AsyncSocketPacketOptions options(port_->StunDscpValue()); options.info_signaled_after_sent.packet_type = - rtc::PacketType::kIceConnectivityCheck; + PacketType::kIceConnectivityCheck; auto err = port_->SendTo(data, size, remote_candidate_.address(), options, false); if (err < 0) { RTC_LOG(LS_WARNING) << ToString() << ": Failed to send STUN ping " " err=" - << err << " id=" << rtc::hex_encode(req->id()); + << err << " id=" << hex_encode(req->id()); } } +void Connection::RegisterReceivedPacketCallback( + absl::AnyInvocable + received_packet_callback) { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_CHECK(!received_packet_callback_); + received_packet_callback_ = std::move(received_packet_callback); +} + +void Connection::DeregisterReceivedPacketCallback() { + RTC_DCHECK_RUN_ON(network_thread_); + received_packet_callback_ = nullptr; +} + void Connection::OnReadPacket(const char* data, size_t size, int64_t packet_time_us) { + OnReadPacket(ReceivedIpPacket::CreateFromLegacy(data, size, packet_time_us)); +} +void Connection::OnReadPacket(const ReceivedIpPacket& packet) { RTC_DCHECK_RUN_ON(network_thread_); std::unique_ptr msg; std::string remote_ufrag; - const rtc::SocketAddress& addr(remote_candidate_.address()); - if (!port_->GetStunMessage(data, size, addr, &msg, &remote_ufrag)) { + const SocketAddress& addr(remote_candidate_.address()); + if (!port_->GetStunMessage( + reinterpret_cast(packet.payload().data()), + packet.payload().size(), addr, &msg, &remote_ufrag)) { // The packet did not parse as a valid STUN message // This is a data packet, pass it along. - last_data_received_ = rtc::TimeMillis(); + last_data_received_ = webrtc::TimeMillis(); UpdateReceiving(last_data_received_); - recv_rate_tracker_.AddSamples(size); + recv_rate_tracker_.AddSamples(packet.payload().size()); stats_.packets_received++; - SignalReadPacket(this, data, size, packet_time_us); - + if (received_packet_callback_) { + received_packet_callback_(this, packet); + } // If timed out sending writability checks, start up again if (!pruned_ && (write_state_ == STATE_WRITE_TIMEOUT)) { RTC_LOG(LS_WARNING) @@ -485,7 +530,7 @@ void Connection::OnReadPacket(const char* data, // "silently" discard the request. RTC_LOG(LS_VERBOSE) << ToString() << ": Discarding " << StunMethodToString(msg->type()) - << ", id=" << rtc::hex_encode(msg->transaction_id()) + << ", id=" << hex_encode(msg->transaction_id()) << " with invalid message integrity: " << static_cast(msg->integrity()); return; @@ -499,7 +544,7 @@ void Connection::OnReadPacket(const char* data, // "silently" discard the response. RTC_LOG(LS_VERBOSE) << ToString() << ": Discarding " << StunMethodToString(msg->type()) - << ", id=" << rtc::hex_encode(msg->transaction_id()) + << ", id=" << hex_encode(msg->transaction_id()) << " with invalid message integrity: " << static_cast(msg->integrity()); return; @@ -509,12 +554,12 @@ void Connection::OnReadPacket(const char* data, // No message integrity. } - rtc::LoggingSeverity sev = (!writable() ? rtc::LS_INFO : rtc::LS_VERBOSE); + LoggingSeverity sev = (!writable() ? LS_INFO : LS_VERBOSE); switch (msg->type()) { case STUN_BINDING_REQUEST: RTC_LOG_V(sev) << ToString() << ": Received " << StunMethodToString(msg->type()) - << ", id=" << rtc::hex_encode(msg->transaction_id()); + << ", id=" << hex_encode(msg->transaction_id()); if (remote_ufrag == remote_candidate_.username()) { HandleStunBindingOrGoogPingRequest(msg.get()); } else { @@ -557,16 +602,47 @@ void Connection::OnReadPacket(const char* data, } } +void Connection::MaybeAddDtlsPiggybackingAttributes(StunMessage* msg) { + if (dtls_stun_piggyback_callbacks_.empty()) { + return; + } + + const auto& [attr, ack] = dtls_stun_piggyback_callbacks_.send_data( + static_cast(msg->type())); + + if (ack) { + size_t msg_length = msg->length(); + size_t need_length = ack->length() + kStunAttributeHeaderSize; + if (msg_length + need_length <= kMaxStunBindingLength) { + msg->AddAttribute(std::make_unique( + STUN_ATTR_META_DTLS_IN_STUN_ACK, *ack)); + } else if (msg_length + kStunAttributeHeaderSize <= kMaxStunBindingLength) { + // Add en empty ACK. + std::string empty; + msg->AddAttribute(std::make_unique( + STUN_ATTR_META_DTLS_IN_STUN_ACK, empty)); + } else { + return; + } + } + + if (attr) { + size_t need_length = attr->length() + kStunAttributeHeaderSize; + if (msg->length() + need_length <= kMaxStunBindingLength) { + msg->AddAttribute(std::make_unique( + STUN_ATTR_META_DTLS_IN_STUN, *attr)); + } + } +} + void Connection::HandleStunBindingOrGoogPingRequest(IceMessage* msg) { RTC_DCHECK_RUN_ON(network_thread_); // This connection should now be receiving. ReceivedPing(msg->transaction_id()); if (field_trials_->extra_ice_ping && last_ping_response_received_ == 0) { - if (local_candidate().type() == RELAY_PORT_TYPE || - local_candidate().type() == PRFLX_PORT_TYPE || - remote_candidate().type() == RELAY_PORT_TYPE || - remote_candidate().type() == PRFLX_PORT_TYPE) { - const int64_t now = rtc::TimeMillis(); + if (local_candidate().is_relay() || local_candidate().is_prflx() || + remote_candidate().is_relay() || remote_candidate().is_prflx()) { + const int64_t now = webrtc::TimeMillis(); if (last_ping_sent_ + kMinExtraPingDelayMs <= now) { RTC_LOG(LS_INFO) << ToString() << "WebRTC-ExtraICEPing/Sending extra ping" @@ -584,7 +660,7 @@ void Connection::HandleStunBindingOrGoogPingRequest(IceMessage* msg) { } } - const rtc::SocketAddress& remote_addr = remote_candidate_.address(); + const SocketAddress& remote_addr = remote_candidate_.address(); if (msg->type() == STUN_BINDING_REQUEST) { // Check for role conflicts. const std::string& remote_ufrag = remote_candidate_.username(); @@ -596,11 +672,19 @@ void Connection::HandleStunBindingOrGoogPingRequest(IceMessage* msg) { } stats_.recv_ping_requests++; - LogCandidatePairEvent(webrtc::IceCandidatePairEventType::kCheckReceived, + LogCandidatePairEvent(IceCandidatePairEventType::kCheckReceived, msg->reduced_transaction_id()); // This is a validated stun request from remote peer. if (msg->type() == STUN_BINDING_REQUEST) { + if (!dtls_stun_piggyback_callbacks_.empty()) { + const StunByteStringAttribute* dtls_piggyback_attribute = + msg->GetByteString(STUN_ATTR_META_DTLS_IN_STUN); + const StunByteStringAttribute* dtls_piggyback_ack = + msg->GetByteString(STUN_ATTR_META_DTLS_IN_STUN_ACK); + dtls_stun_piggyback_callbacks_.recv_data(dtls_piggyback_attribute, + dtls_piggyback_ack); + } SendStunBindingResponse(msg); } else { RTC_DCHECK(msg->type() == GOOG_PING_REQUEST); @@ -612,7 +696,7 @@ void Connection::HandleStunBindingOrGoogPingRequest(IceMessage* msg) { set_write_state(STATE_WRITE_INIT); } - if (port_->GetIceRole() == ICEROLE_CONTROLLED) { + if (port_->GetIceRole() == webrtc::ICEROLE_CONTROLLED) { const StunUInt32Attribute* nomination_attr = msg->GetUInt32(STUN_ATTR_NOMINATION); uint32_t nomination = 0; @@ -678,7 +762,7 @@ void Connection::SendStunBindingResponse(const StunMessage* message) { response.AddAttribute(std::make_unique( STUN_ATTR_RETRANSMIT_COUNT, retransmit_attr->value())); - if (retransmit_attr->value() > CONNECTION_WRITE_CONNECT_FAILURES) { + if (retransmit_attr->value() > webrtc::CONNECTION_WRITE_CONNECT_FAILURES) { RTC_LOG(LS_INFO) << ToString() << ": Received a remote ping with high retransmit count: " @@ -717,8 +801,7 @@ void Connection::SendStunBindingResponse(const StunMessage* message) { RTC_LOG(LS_ERROR) << "GOOG_DELTA consumer did not return ack!"; } } else { - RTC_LOG(LS_WARNING) << "Ignore GOOG_DELTA" - << " len: " << delta->length() + RTC_LOG(LS_WARNING) << "Ignore GOOG_DELTA" << " len: " << delta->length() << " answer_goog_delta = " << field_trials_->answer_goog_delta << " goog_delta_consumer_ = " @@ -726,6 +809,8 @@ void Connection::SendStunBindingResponse(const StunMessage* message) { } } + MaybeAddDtlsPiggybackingAttributes(&response); + response.AddMessageIntegrity(local_candidate().password()); response.AddFingerprint(); @@ -745,31 +830,31 @@ void Connection::SendGoogPingResponse(const StunMessage* message) { void Connection::SendResponseMessage(const StunMessage& response) { RTC_DCHECK_RUN_ON(network_thread_); // Where I send the response. - const rtc::SocketAddress& addr = remote_candidate_.address(); + const SocketAddress& addr = remote_candidate_.address(); // Send the response. - rtc::ByteBufferWriter buf; + ByteBufferWriter buf; response.Write(&buf); - rtc::PacketOptions options(port_->StunDscpValue()); + AsyncSocketPacketOptions options(port_->StunDscpValue()); options.info_signaled_after_sent.packet_type = - rtc::PacketType::kIceConnectivityCheckResponse; + PacketType::kIceConnectivityCheckResponse; auto err = port_->SendTo(buf.Data(), buf.Length(), addr, options, false); if (err < 0) { RTC_LOG(LS_ERROR) << ToString() << ": Failed to send " << StunMethodToString(response.type()) << ", to=" << addr.ToSensitiveString() << ", err=" << err - << ", id=" << rtc::hex_encode(response.transaction_id()); + << ", id=" << hex_encode(response.transaction_id()); } else { // Log at LS_INFO if we send a stun ping response on an unwritable // connection. - rtc::LoggingSeverity sev = (!writable()) ? rtc::LS_INFO : rtc::LS_VERBOSE; + LoggingSeverity sev = (!writable()) ? LS_INFO : LS_VERBOSE; RTC_LOG_V(sev) << ToString() << ": Sent " << StunMethodToString(response.type()) << ", to=" << addr.ToSensitiveString() - << ", id=" << rtc::hex_encode(response.transaction_id()); + << ", id=" << hex_encode(response.transaction_id()); stats_.sent_ping_responses++; - LogCandidatePairEvent(webrtc::IceCandidatePairEventType::kCheckResponseSent, + LogCandidatePairEvent(IceCandidatePairEventType::kCheckResponseSent, response.reduced_transaction_id()); } } @@ -806,13 +891,14 @@ void Connection::Prune() { void Connection::Destroy() { RTC_DCHECK_RUN_ON(network_thread_); - RTC_DCHECK(port_) << "Calling Destroy() twice?"; + RTC_DCHECK(port_) << ToDebugId() << ": port_ null in Destroy()"; if (port_) port_->DestroyConnection(this); } bool Connection::Shutdown() { RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(port_) << ToDebugId() << ": Calling Shutdown() twice?"; if (!port_) return false; // already shut down. @@ -826,12 +912,15 @@ bool Connection::Shutdown() { SignalDestroyed.disconnect_all(); destroyed_signals(this); - LogCandidatePairConfig(webrtc::IceCandidatePairConfigType::kDestroyed); + LogCandidatePairConfig(IceCandidatePairConfigType::kDestroyed); // Reset the `port_` after logging and firing the destroyed signal since // information required for logging needs access to `port_`. port_.reset(); + // Clear any pending requests (or responses). + requests_.Clear(); + return true; } @@ -846,6 +935,7 @@ void Connection::FailAndPrune() { // will be nulled. // In such a case, there's a chance that the Port object gets // deleted before the Connection object ends up being deleted. + RTC_DCHECK(port_) << ToDebugId() << ": port_ null in FailAndPrune()"; if (!port_) return; @@ -855,19 +945,19 @@ void Connection::FailAndPrune() { void Connection::PrintPingsSinceLastResponse(std::string* s, size_t max) { RTC_DCHECK_RUN_ON(network_thread_); - rtc::StringBuilder oss; + StringBuilder oss; if (pings_since_last_response_.size() > max) { for (size_t i = 0; i < max; i++) { const SentPing& ping = pings_since_last_response_[i]; - oss << rtc::hex_encode(ping.id) << " "; + oss << hex_encode(ping.id) << " "; } oss << "... " << (pings_since_last_response_.size() - max) << " more"; } else { for (const SentPing& ping : pings_since_last_response_) { - oss << rtc::hex_encode(ping.id) << " "; + oss << hex_encode(ping.id) << " "; } } - *s = oss.str(); + *s = oss.Release(); } bool Connection::selected() const { @@ -882,10 +972,12 @@ void Connection::set_selected(bool selected) { void Connection::UpdateState(int64_t now) { RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(port_) << ToDebugId() << ": port_ null in UpdateState()"; if (!port_) return; - int rtt = ConservativeRTTEstimate(rtt_); + // Computes our estimate of the RTT given the current estimate. + int rtt = webrtc::SafeClamp(2 * rtt_, MINIMUM_RTT, MAXIMUM_RTT); if (RTC_LOG_CHECK_LEVEL(LS_VERBOSE)) { std::string pings; @@ -958,6 +1050,7 @@ int64_t Connection::last_ping_sent() const { void Connection::Ping(int64_t now, std::unique_ptr delta) { RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(port_) << ToDebugId() << ": port_ null in Ping()"; if (!port_) return; @@ -982,8 +1075,8 @@ void Connection::Ping(int64_t now, } pings_since_last_response_.push_back(SentPing(req->id(), now, nomination)); - RTC_LOG(LS_VERBOSE) << ToString() << ": Sending STUN ping, id=" - << rtc::hex_encode(req->id()) + RTC_LOG(LS_VERBOSE) << ToString() + << ": Sending STUN ping, id=" << hex_encode(req->id()) << ", nomination=" << nomination_; requests_.Send(req.release()); state_ = IceCandidatePairState::IN_PROGRESS; @@ -1011,13 +1104,14 @@ std::unique_ptr Connection::BuildPingRequest( // Adding ICE_CONTROLLED or ICE_CONTROLLING attribute based on the role. IceRole ice_role = port_->GetIceRole(); - RTC_DCHECK(ice_role == ICEROLE_CONTROLLING || ice_role == ICEROLE_CONTROLLED); + RTC_DCHECK(ice_role == webrtc::ICEROLE_CONTROLLING || + ice_role == webrtc::ICEROLE_CONTROLLED); message->AddAttribute(std::make_unique( - ice_role == ICEROLE_CONTROLLING ? STUN_ATTR_ICE_CONTROLLING - : STUN_ATTR_ICE_CONTROLLED, + ice_role == webrtc::ICEROLE_CONTROLLING ? STUN_ATTR_ICE_CONTROLLING + : STUN_ATTR_ICE_CONTROLLED, port_->IceTiebreaker())); - if (ice_role == ICEROLE_CONTROLLING) { + if (ice_role == webrtc::ICEROLE_CONTROLLING) { // We should have either USE_CANDIDATE attribute or ICE_NOMINATION // attribute but not both. That was enforced in p2ptransportchannel. if (use_candidate_attr()) { @@ -1054,6 +1148,8 @@ std::unique_ptr Connection::BuildPingRequest( message->AddAttribute(std::move(delta)); } + MaybeAddDtlsPiggybackingAttributes(message.get()); + message->AddMessageIntegrity(remote_candidate_.password()); message->AddFingerprint(); @@ -1065,7 +1161,7 @@ int64_t Connection::last_ping_response_received() const { return last_ping_response_received_; } -const absl::optional& Connection::last_ping_id_received() const { +const std::optional& Connection::last_ping_id_received() const { RTC_DCHECK_RUN_ON(network_thread_); return last_ping_id_received_; } @@ -1083,9 +1179,9 @@ int64_t Connection::last_ping_received() const { return last_ping_received_; } -void Connection::ReceivedPing(const absl::optional& request_id) { +void Connection::ReceivedPing(const std::optional& request_id) { RTC_DCHECK_RUN_ON(network_thread_); - last_ping_received_ = rtc::TimeMillis(); + last_ping_received_ = webrtc::TimeMillis(); last_ping_id_received_ = request_id; UpdateReceiving(last_ping_received_); } @@ -1103,11 +1199,11 @@ void Connection::HandlePiggybackCheckAcknowledgementIfAny(StunMessage* msg) { pings_since_last_response_, [&request_id](const SentPing& ping) { return ping.id == request_id; }); if (iter != pings_since_last_response_.end()) { - rtc::LoggingSeverity sev = !writable() ? rtc::LS_INFO : rtc::LS_VERBOSE; + LoggingSeverity sev = !writable() ? LS_INFO : LS_VERBOSE; RTC_LOG_V(sev) << ToString() << ": Received piggyback STUN ping response, id=" - << rtc::hex_encode(request_id); - const int64_t rtt = rtc::TimeMillis() - iter->sent_time; + << hex_encode(request_id); + const int64_t rtt = webrtc::TimeMillis() - iter->sent_time; ReceivedPingResponse(rtt, request_id, iter->nomination); } } @@ -1125,8 +1221,8 @@ int64_t Connection::last_data_received() const { void Connection::ReceivedPingResponse( int rtt, - absl::string_view request_id, - const absl::optional& nomination) { + absl::string_view /* request_id */, + const std::optional& nomination) { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK_GE(rtt, 0); // We've already validated that this is a STUN binding response with @@ -1138,7 +1234,7 @@ void Connection::ReceivedPingResponse( acked_nomination_ = nomination.value(); } - int64_t now = rtc::TimeMillis(); + int64_t now = webrtc::TimeMillis(); total_round_trip_time_ms_ += rtt; current_round_trip_time_ms_ = static_cast(rtt); rtt_estimate_.AddSample(now, rtt); @@ -1148,8 +1244,10 @@ void Connection::ReceivedPingResponse( UpdateReceiving(last_ping_response_received_); set_write_state(STATE_WRITABLE); set_state(IceCandidatePairState::SUCCEEDED); + + // Smooth the RTT estimate using a moving average. if (rtt_samples_ > 0) { - rtt_ = rtc::GetNextMovingAverage(rtt_, rtt, RTT_RATIO); + rtt_ = (RTT_RATIO * rtt_ + rtt) / (RTT_RATIO + 1); } else { rtt_ = rtt; } @@ -1201,7 +1299,7 @@ bool Connection::dead(int64_t now) const { // locally inactive (pruned) connection. This also allows the local agent to // ping with longer interval than 30s as long as it shorter than // `dead_connection_timeout_ms`. - if (now <= (last_received() + DEAD_CONNECTION_RECEIVE_TIMEOUT)) { + if (now <= (last_received() + webrtc::DEAD_CONNECTION_RECEIVE_TIMEOUT)) { // Not dead since we have received the last 30s. return false; } @@ -1209,7 +1307,7 @@ bool Connection::dead(int64_t now) const { // Outstanding pings: let it live until the ping is unreplied for // DEAD_CONNECTION_RECEIVE_TIMEOUT. return now > (pings_since_last_response_[0].sent_time + - DEAD_CONNECTION_RECEIVE_TIMEOUT); + webrtc::DEAD_CONNECTION_RECEIVE_TIMEOUT); } // No outstanding pings: let it live until @@ -1229,7 +1327,7 @@ bool Connection::dead(int64_t now) const { // keep it around for at least MIN_CONNECTION_LIFETIME to prevent connections // from being pruned too quickly during a network change event when two // networks would be up simultaneously but only for a brief period. - return now > (time_created_ms_ + MIN_CONNECTION_LIFETIME); + return now > (time_created_ms_ + webrtc::MIN_CONNECTION_LIFETIME); } int Connection::rtt() const { @@ -1246,16 +1344,18 @@ bool Connection::stable(int64_t now) const { } std::string Connection::ToDebugId() const { - return rtc::ToHex(reinterpret_cast(this)); + return webrtc::ToHex(reinterpret_cast(this)); } uint32_t Connection::ComputeNetworkCost() const { // TODO(honghaiz): Will add rtt as part of the network cost. + RTC_DCHECK(port_) << ToDebugId() << ": port_ null in ComputeNetworkCost()"; return port()->network_cost() + remote_candidate_.network_cost(); } std::string Connection::ToString() const { RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(port_) << ToDebugId() << ": port_ null in ToString()"; constexpr absl::string_view CONNECT_STATE_ABBREV[2] = { "-", // not connected (false) "C", // connected (true) @@ -1280,7 +1380,7 @@ std::string Connection::ToString() const { "-", // candidate pair not selected (false) "S", // selected (true) }; - rtc::StringBuilder ss; + StringBuilder ss; ss << "Conn[" << ToDebugId(); if (!port_) { @@ -1295,11 +1395,11 @@ std::string Connection::ToString() const { const Candidate& local = local_candidate(); const Candidate& remote = remote_candidate(); ss << local.id() << ":" << local.component() << ":" << local.generation() - << ":" << local.type() << ":" << local.protocol() << ":" + << ":" << local.type_name() << ":" << local.protocol() << ":" << local.address().ToSensitiveString() << "->" << remote.id() << ":" - << remote.component() << ":" << remote.priority() << ":" << remote.type() - << ":" << remote.protocol() << ":" << remote.address().ToSensitiveString() - << "|"; + << remote.component() << ":" << remote.priority() << ":" + << remote.type_name() << ":" << remote.protocol() << ":" + << remote.address().ToSensitiveString() << "|"; ss << CONNECT_STATE_ABBREV[connected_] << RECEIVE_STATE_ABBREV[receiving_] << WRITE_STATE_ABBREV[write_state_] << ICESTATE[static_cast(state_)] @@ -1322,24 +1422,21 @@ std::string Connection::ToSensitiveString() const { return ToString(); } -const webrtc::IceCandidatePairDescription& Connection::ToLogDescription() { +const IceCandidatePairDescription& Connection::ToLogDescription() { RTC_DCHECK_RUN_ON(network_thread_); if (log_description_.has_value()) { return log_description_.value(); } const Candidate& local = local_candidate(); const Candidate& remote = remote_candidate(); - const rtc::Network* network = port()->Network(); - log_description_ = webrtc::IceCandidatePairDescription(); - log_description_->local_candidate_type = - GetCandidateTypeByString(local.type()); + const Network* network = port()->Network(); + log_description_ = IceCandidatePairDescription( + GetRtcEventLogCandidateType(local), GetRtcEventLogCandidateType(remote)); log_description_->local_relay_protocol = GetProtocolByString(local.relay_protocol()); log_description_->local_network_type = ConvertNetworkType(network->type()); log_description_->local_address_family = GetAddressFamilyByInt(local.address().family()); - log_description_->remote_candidate_type = - GetCandidateTypeByString(remote.type()); log_description_->remote_address_family = GetAddressFamilyByInt(remote.address().family()); log_description_->candidate_pair_protocol = @@ -1347,13 +1444,12 @@ const webrtc::IceCandidatePairDescription& Connection::ToLogDescription() { return log_description_.value(); } -void Connection::set_ice_event_log(webrtc::IceEventLog* ice_event_log) { +void Connection::set_ice_event_log(IceEventLog* ice_event_log) { RTC_DCHECK_RUN_ON(network_thread_); ice_event_log_ = ice_event_log; } -void Connection::LogCandidatePairConfig( - webrtc::IceCandidatePairConfigType type) { +void Connection::LogCandidatePairConfig(IceCandidatePairConfigType type) { RTC_DCHECK_RUN_ON(network_thread_); if (ice_event_log_ == nullptr) { return; @@ -1361,7 +1457,7 @@ void Connection::LogCandidatePairConfig( ice_event_log_->LogCandidatePairConfig(type, id(), ToLogDescription()); } -void Connection::LogCandidatePairEvent(webrtc::IceCandidatePairEventType type, +void Connection::LogCandidatePairEvent(IceCandidatePairEventType type, uint32_t transaction_id) { RTC_DCHECK_RUN_ON(network_thread_); if (ice_event_log_ == nullptr) { @@ -1375,7 +1471,7 @@ void Connection::OnConnectionRequestResponse(StunRequest* request, RTC_DCHECK_RUN_ON(network_thread_); // Log at LS_INFO if we receive a ping response on an unwritable // connection. - rtc::LoggingSeverity sev = !writable() ? rtc::LS_INFO : rtc::LS_VERBOSE; + LoggingSeverity sev = !writable() ? LS_INFO : LS_VERBOSE; int rtt = request->Elapsed(); @@ -1384,12 +1480,12 @@ void Connection::OnConnectionRequestResponse(StunRequest* request, PrintPingsSinceLastResponse(&pings, 5); RTC_LOG_V(sev) << ToString() << ": Received " << StunMethodToString(response->type()) - << ", id=" << rtc::hex_encode(request->id()) + << ", id=" << hex_encode(request->id()) << ", code=0" // Makes logging easier to parse. ", rtt=" << rtt << ", pings_since_last_response=" << pings; } - absl::optional nomination; + std::optional nomination; const std::string request_id = request->id(); auto iter = absl::c_find_if( pings_since_last_response_, @@ -1400,9 +1496,8 @@ void Connection::OnConnectionRequestResponse(StunRequest* request, ReceivedPingResponse(rtt, request_id, nomination); stats_.recv_ping_responses++; - LogCandidatePairEvent( - webrtc::IceCandidatePairEventType::kCheckResponseReceived, - response->reduced_transaction_id()); + LogCandidatePairEvent(IceCandidatePairEventType::kCheckResponseReceived, + response->reduced_transaction_id()); if (request->msg()->type() == STUN_BINDING_REQUEST) { if (!remote_support_goog_ping_.has_value()) { @@ -1442,28 +1537,45 @@ void Connection::OnConnectionRequestResponse(StunRequest* request, // This means that remote does not support GOOG_DELTA RTC_LOG(LS_INFO) << "NO DELTA ACK => disable GOOG_DELTA"; (*goog_delta_ack_consumer_)( - webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_OPERATION)); + RTCError(RTCErrorType::UNSUPPORTED_OPERATION)); } else if (delta_ack) { // We did NOT send DELTA but got a DELTA_ACK. // That is internal error. RTC_LOG(LS_ERROR) << "DELTA ACK w/o DELTA => disable GOOG_DELTA"; - (*goog_delta_ack_consumer_)( - webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR)); + (*goog_delta_ack_consumer_)(RTCError(RTCErrorType::INTERNAL_ERROR)); } } else if (delta_ack) { RTC_LOG(LS_ERROR) << "Discard GOOG_DELTA_ACK, no consumer"; } + + if (!dtls_stun_piggyback_callbacks_.empty()) { + const bool sent_dtls_piggyback = + request->msg()->GetByteString(STUN_ATTR_META_DTLS_IN_STUN) != nullptr; + const bool sent_dtls_piggyback_ack = + request->msg()->GetByteString(STUN_ATTR_META_DTLS_IN_STUN_ACK) != + nullptr; + const StunByteStringAttribute* dtls_piggyback_attr = + response->GetByteString(STUN_ATTR_META_DTLS_IN_STUN); + const StunByteStringAttribute* dtls_piggyback_ack = + response->GetByteString(STUN_ATTR_META_DTLS_IN_STUN_ACK); + if (sent_dtls_piggyback || sent_dtls_piggyback_ack) { + dtls_stun_piggyback_callbacks_.recv_data(dtls_piggyback_attr, + dtls_piggyback_ack); + } + } } void Connection::OnConnectionRequestErrorResponse(ConnectionRequest* request, StunMessage* response) { + RTC_DCHECK(port_) << ToDebugId() + << ": port_ null in OnConnectionRequestErrorResponse"; if (!port_) return; int error_code = response->GetErrorCodeValue(); RTC_LOG(LS_WARNING) << ToString() << ": Received " << StunMethodToString(response->type()) - << " id=" << rtc::hex_encode(request->id()) + << " id=" << hex_encode(request->id()) << " code=" << error_code << " rtt=" << request->Elapsed(); @@ -1488,23 +1600,23 @@ void Connection::OnConnectionRequestErrorResponse(ConnectionRequest* request, void Connection::OnConnectionRequestTimeout(ConnectionRequest* request) { // Log at LS_INFO if we miss a ping on a writable connection. - rtc::LoggingSeverity sev = writable() ? rtc::LS_INFO : rtc::LS_VERBOSE; + LoggingSeverity sev = writable() ? LS_INFO : LS_VERBOSE; RTC_LOG_V(sev) << ToString() << ": Timing-out STUN ping " - << rtc::hex_encode(request->id()) << " after " - << request->Elapsed() << " ms"; + << hex_encode(request->id()) << " after " << request->Elapsed() + << " ms"; } void Connection::OnConnectionRequestSent(ConnectionRequest* request) { RTC_DCHECK_RUN_ON(network_thread_); // Log at LS_INFO if we send a ping on an unwritable connection. - rtc::LoggingSeverity sev = !writable() ? rtc::LS_INFO : rtc::LS_VERBOSE; + LoggingSeverity sev = !writable() ? LS_INFO : LS_VERBOSE; RTC_LOG_V(sev) << ToString() << ": Sent " << StunMethodToString(request->msg()->type()) - << ", id=" << rtc::hex_encode(request->id()) + << ", id=" << hex_encode(request->id()) << ", use_candidate=" << use_candidate_attr() << ", nomination=" << nomination_; stats_.sent_ping_requests_total++; - LogCandidatePairEvent(webrtc::IceCandidatePairEventType::kCheckSent, + LogCandidatePairEvent(IceCandidatePairEventType::kCheckSent, request->reduced_transaction_id()); if (stats_.recv_ping_responses == 0) { stats_.sent_ping_requests_before_first_response++; @@ -1530,7 +1642,7 @@ void Connection::MaybeSetRemoteIceParametersAndGeneration( } // TODO(deadbeef): A value of '0' for the generation is used for both // generation 0 and "generation unknown". It should be changed to an - // absl::optional to fix this. + // std::optional to fix this. if (remote_candidate_.username() == ice_params.ufrag && remote_candidate_.password() == ice_params.pwd && remote_candidate_.generation() == 0) { @@ -1540,8 +1652,7 @@ void Connection::MaybeSetRemoteIceParametersAndGeneration( void Connection::MaybeUpdatePeerReflexiveCandidate( const Candidate& new_candidate) { - if (remote_candidate_.type() == PRFLX_PORT_TYPE && - new_candidate.type() != PRFLX_PORT_TYPE && + if (remote_candidate_.is_prflx() && !new_candidate.is_prflx() && remote_candidate_.protocol() == new_candidate.protocol() && remote_candidate_.address() == new_candidate.address() && remote_candidate_.username() == new_candidate.username() && @@ -1571,9 +1682,9 @@ uint32_t Connection::prflx_priority() const { // (2^8)*(local preference) + // (2^0)*(256 - component ID) IcePriorityValue type_preference = - (local_candidate_.protocol() == TCP_PROTOCOL_NAME) - ? ICE_TYPE_PREFERENCE_PRFLX_TCP - : ICE_TYPE_PREFERENCE_PRFLX; + (local_candidate_.protocol() == webrtc::TCP_PROTOCOL_NAME) + ? webrtc::ICE_TYPE_PREFERENCE_PRFLX_TCP + : webrtc::ICE_TYPE_PREFERENCE_PRFLX; return type_preference << 24 | (local_candidate_.priority() & 0x00FFFFFF); } @@ -1598,18 +1709,20 @@ ConnectionInfo Connection::stats() { stats_.current_round_trip_time_ms = current_round_trip_time_ms_; stats_.remote_candidate = remote_candidate(); if (last_data_received_ > 0) { - stats_.last_data_received = webrtc::Timestamp::Millis( - last_data_received_ + delta_internal_unix_epoch_ms_); + stats_.last_data_received = + Timestamp::Millis(last_data_received_ + delta_internal_unix_epoch_ms_); } if (last_send_data_ > 0) { - stats_.last_data_sent = webrtc::Timestamp::Millis( - last_send_data_ + delta_internal_unix_epoch_ms_); + stats_.last_data_sent = + Timestamp::Millis(last_send_data_ + delta_internal_unix_epoch_ms_); } return stats_; } void Connection::MaybeUpdateLocalCandidate(StunRequest* request, StunMessage* response) { + RTC_DCHECK(port_) << ToDebugId() + << ": port_ null in MaybeUpdateLocalCandidate"; if (!port_) return; @@ -1654,17 +1767,15 @@ void Connection::MaybeUpdateLocalCandidate(StunRequest* request, return; } const uint32_t priority = priority_attr->value(); - std::string id = rtc::CreateRandomString(8); // Create a peer-reflexive candidate based on the local candidate. - local_candidate_.set_id(id); - local_candidate_.set_type(PRFLX_PORT_TYPE); + local_candidate_.generate_id(); + local_candidate_.set_type(IceCandidateType::kPrflx); // Set the related address and foundation attributes before changing the // address. local_candidate_.set_related_address(local_candidate_.address()); - local_candidate_.set_foundation(port()->ComputeFoundation( - PRFLX_PORT_TYPE, local_candidate_.protocol(), - local_candidate_.relay_protocol(), local_candidate_.address())); + local_candidate_.ComputeFoundation(local_candidate_.address(), + port_->IceTiebreaker()); local_candidate_.set_priority(priority); local_candidate_.set_address(addr->GetAddress()); @@ -1693,7 +1804,7 @@ bool Connection::missing_responses(int64_t now) const { } bool Connection::TooManyOutstandingPings( - const absl::optional& max_outstanding_pings) const { + const std::optional& max_outstanding_pings) const { RTC_DCHECK_RUN_ON(network_thread_); if (!max_outstanding_pings.has_value()) { return false; @@ -1746,21 +1857,22 @@ void Connection::ForgetLearnedState() { pings_since_last_response_.clear(); } -ProxyConnection::ProxyConnection(rtc::WeakPtr port, +ProxyConnection::ProxyConnection(WeakPtr port, size_t index, const Candidate& remote_candidate) : Connection(std::move(port), index, remote_candidate) {} int ProxyConnection::Send(const void* data, size_t size, - const rtc::PacketOptions& options) { + const AsyncSocketPacketOptions& options) { + RTC_DCHECK(port_) << ToDebugId() << ": port_ null in Send()"; if (!port_) return SOCKET_ERROR; stats_.sent_total_packets++; int sent = port_->SendTo(data, size, remote_candidate_.address(), options, true); - int64_t now = rtc::TimeMillis(); + int64_t now = webrtc::TimeMillis(); if (sent <= 0) { RTC_DCHECK(sent < 0); error_ = port_->GetError(); @@ -1777,4 +1889,4 @@ int ProxyConnection::GetError() { return error_; } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/connection.h b/p2p/base/connection.h index 8e439855fa..e1b0bb83ae 100644 --- a/p2p/base/connection.h +++ b/p2p/base/connection.h @@ -11,51 +11,54 @@ #ifndef P2P_BASE_CONNECTION_H_ #define P2P_BASE_CONNECTION_H_ +#include + +#include +#include #include +#include #include #include #include +#include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/candidate.h" +#include "api/rtc_error.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "api/transport/stun.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" #include "logging/rtc_event_log/ice_logger.h" #include "p2p/base/candidate_pair_interface.h" #include "p2p/base/connection_info.h" #include "p2p/base/p2p_transport_channel_ice_field_trials.h" +#include "p2p/base/port_interface.h" #include "p2p/base/stun_request.h" #include "p2p/base/transport_description.h" +#include "p2p/dtls/dtls_stun_piggyback_callbacks.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/network.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/numerics/event_based_exponential_moving_average.h" #include "rtc_base/rate_tracker.h" #include "rtc_base/system/rtc_export.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/weak_ptr.h" -namespace cricket { +namespace webrtc { // Version number for GOOG_PING, this is added to have the option of // adding other flavors in the future. constexpr int kGoogPingVersion = 1; - -// Connection and Port has circular dependencies. -// So we use forward declaration rather than include. -class Port; +// 1200 is the "commonly used" MTU. Subtract M-I attribute (20+4) and FP (4+4). +constexpr int kMaxStunBindingLength = 1200 - 24 - 8; // Forward declaration so that a ConnectionRequest can contain a Connection. class Connection; -struct CandidatePair final : public CandidatePairInterface { - ~CandidatePair() override = default; - - const Candidate& local_candidate() const override { return local; } - const Candidate& remote_candidate() const override { return remote; } - - Candidate local; - Candidate remote; -}; - // Represents a communication link between a port on the local client and a // port on the remote client. class RTC_EXPORT Connection : public CandidatePairInterface { @@ -74,7 +77,7 @@ class RTC_EXPORT Connection : public CandidatePairInterface { // A unique ID assigned when the connection is created. uint32_t id() const { return id_; } - webrtc::TaskQueueBase* network_thread() const; + TaskQueueBase* network_thread() const; // Implementation of virtual methods in CandidatePairInterface. // Returns the description of the local port @@ -83,7 +86,7 @@ class RTC_EXPORT Connection : public CandidatePairInterface { const Candidate& remote_candidate() const override; // Return local network for this connection. - virtual const rtc::Network* network() const; + virtual const Network* network() const; // Return generation for this connection. virtual int generation() const; @@ -101,7 +104,7 @@ class RTC_EXPORT Connection : public CandidatePairInterface { bool writable() const; bool receiving() const; - const Port* port() const { + const PortInterface* port() const { RTC_DCHECK_RUN_ON(network_thread_); return port_.get(); } @@ -111,6 +114,7 @@ class RTC_EXPORT Connection : public CandidatePairInterface { bool connected() const; bool weak() const; bool active() const; + bool pending_delete() const { return !port_; } // A connection is dead if it can be safely deleted. bool dead(int64_t now) const; @@ -119,11 +123,11 @@ class RTC_EXPORT Connection : public CandidatePairInterface { int rtt() const; int unwritable_timeout() const; - void set_unwritable_timeout(const absl::optional& value_ms); + void set_unwritable_timeout(const std::optional& value_ms); int unwritable_min_checks() const; - void set_unwritable_min_checks(const absl::optional& value); + void set_unwritable_min_checks(const std::optional& value); int inactive_timeout() const; - void set_inactive_timeout(const absl::optional& value); + void set_inactive_timeout(const std::optional& value); // Gets the `ConnectionInfo` stats, where `best_connection` has not been // populated (default value false). @@ -140,17 +144,24 @@ class RTC_EXPORT Connection : public CandidatePairInterface { // covers. virtual int Send(const void* data, size_t size, - const rtc::PacketOptions& options) = 0; + const AsyncSocketPacketOptions& options) = 0; // Error if Send() returns < 0 virtual int GetError() = 0; - sigslot::signal4 SignalReadPacket; + // Register as a recipient of received packets. There can only be one. + void RegisterReceivedPacketCallback( + absl::AnyInvocable + received_packet_callback); + void DeregisterReceivedPacketCallback(); sigslot::signal1 SignalReadyToSend; // Called when a packet is received on this connection. - void OnReadPacket(const char* data, size_t size, int64_t packet_time_us); + void OnReadPacket(const ReceivedIpPacket& packet); + [[deprecated("Pass a webrtc::ReceivedIpPacket")]] void + OnReadPacket(const char* data, size_t size, int64_t packet_time_us); // Called when the socket is currently able to send. void OnReadyToSend(); @@ -178,7 +189,7 @@ class RTC_EXPORT Connection : public CandidatePairInterface { bool nominated() const; int receiving_timeout() const; - void set_receiving_timeout(absl::optional receiving_timeout_ms); + void set_receiving_timeout(std::optional receiving_timeout_ms); // Deletes a `Connection` instance is by calling the `DestroyConnection` // method in `Port`. @@ -211,13 +222,13 @@ class RTC_EXPORT Connection : public CandidatePairInterface { void ReceivedPingResponse( int rtt, absl::string_view request_id, - const absl::optional& nomination = absl::nullopt); + const std::optional& nomination = std::nullopt); std::unique_ptr BuildPingRequest( std::unique_ptr delta) RTC_RUN_ON(network_thread_); int64_t last_ping_response_received() const; - const absl::optional& last_ping_id_received() const; + const std::optional& last_ping_id_received() const; // Used to check if any STUN ping response has been received. int rtt_samples() const; @@ -227,7 +238,7 @@ class RTC_EXPORT Connection : public CandidatePairInterface { int64_t last_ping_received() const; void ReceivedPing( - const absl::optional& request_id = absl::nullopt); + const std::optional& request_id = std::nullopt); // Handles the binding request; sends a response if this is a valid request. void HandleStunBindingOrGoogPingRequest(IceMessage* msg); // Handles the piggyback acknowledgement of the lastest connectivity check @@ -243,8 +254,8 @@ class RTC_EXPORT Connection : public CandidatePairInterface { std::string ToString() const; std::string ToSensitiveString() const; // Structured description of this candidate pair. - const webrtc::IceCandidatePairDescription& ToLogDescription(); - void set_ice_event_log(webrtc::IceEventLog* ice_event_log); + const IceCandidatePairDescription& ToLogDescription(); + void set_ice_event_log(IceEventLog* ice_event_log); // Prints pings_since_last_response_ into a string. void PrintPingsSinceLastResponse(std::string* pings, size_t max); @@ -289,13 +300,13 @@ class RTC_EXPORT Connection : public CandidatePairInterface { bool stable(int64_t now) const; // Check if we sent `val` pings without receving a response. - bool TooManyOutstandingPings(const absl::optional& val) const; + bool TooManyOutstandingPings(const std::optional& val) const; // Called by Port when the network cost changes. void SetLocalCandidateNetworkCost(uint16_t cost); void SetIceFieldTrials(const IceFieldTrials* field_trials); - const rtc::EventBasedExponentialMovingAverage& GetRttEstimate() const { + const EventBasedExponentialMovingAverage& GetRttEstimate() const { return rtt_estimate_; } @@ -318,8 +329,8 @@ class RTC_EXPORT Connection : public CandidatePairInterface { void SendResponseMessage(const StunMessage& response); // An accessor for unit tests. - Port* PortForTest() { return port_.get(); } - const Port* PortForTest() const { return port_.get(); } + PortInterface* PortForTest() { return port_.get(); } + const PortInterface* PortForTest() const { return port_.get(); } std::unique_ptr BuildPingRequestForTest() { RTC_DCHECK_RUN_ON(network_thread_); @@ -339,24 +350,32 @@ class RTC_EXPORT Connection : public CandidatePairInterface { void SetStunDictConsumer( std::function( - const StunByteStringAttribute*)> goog_delta_consumer, - std::function)> + const webrtc::StunByteStringAttribute*)> goog_delta_consumer, + std::function)> goog_delta_ack_consumer) { goog_delta_consumer_ = std::move(goog_delta_consumer); goog_delta_ack_consumer_ = std::move(goog_delta_ack_consumer); } void ClearStunDictConsumer() { - goog_delta_consumer_ = absl::nullopt; - goog_delta_ack_consumer_ = absl::nullopt; + goog_delta_consumer_ = std::nullopt; + goog_delta_ack_consumer_ = std::nullopt; } + void RegisterDtlsPiggyback(DtlsStunPiggybackCallbacks&& callbacks) { + dtls_stun_piggyback_callbacks_ = std::move(callbacks); + } + + void DeregisterDtlsPiggyback() { dtls_stun_piggyback_callbacks_.reset(); } + protected: // A ConnectionRequest is a simple STUN ping used to determine writability. class ConnectionRequest; // Constructs a new connection to the given remote port. - Connection(rtc::WeakPtr port, size_t index, const Candidate& candidate); + Connection(WeakPtr port, + size_t index, + const Candidate& candidate); // Called back when StunRequestManager has a stun packet to send void OnSendStunPacket(const void* data, size_t size, StunRequest* req); @@ -385,22 +404,22 @@ class RTC_EXPORT Connection : public CandidatePairInterface { void set_connected(bool value); // The local port where this connection sends and receives packets. - Port* port() { return port_.get(); } + PortInterface* port() { return port_.get(); } // NOTE: A pointer to the network thread is held by `port_` so in theory we // shouldn't need to hold on to this pointer here, but rather defer to // port_->thread(). However, some tests delete the classes in the wrong order // so `port_` may be deleted before an instance of this class is deleted. // TODO(tommi): This ^^^ should be fixed. - webrtc::TaskQueueBase* const network_thread_; + TaskQueueBase* const network_thread_; const uint32_t id_; - rtc::WeakPtr port_; + WeakPtr port_; Candidate local_candidate_ RTC_GUARDED_BY(network_thread_); Candidate remote_candidate_; ConnectionInfo stats_; - rtc::RateTracker recv_rate_tracker_; - rtc::RateTracker send_rate_tracker_; + RateTracker recv_rate_tracker_; + RateTracker send_rate_tracker_; int64_t last_send_data_ = 0; private: @@ -409,9 +428,9 @@ class RTC_EXPORT Connection : public CandidatePairInterface { void MaybeUpdateLocalCandidate(StunRequest* request, StunMessage* response) RTC_RUN_ON(network_thread_); - void LogCandidatePairConfig(webrtc::IceCandidatePairConfigType type) + void LogCandidatePairConfig(IceCandidatePairConfigType type) RTC_RUN_ON(network_thread_); - void LogCandidatePairEvent(webrtc::IceCandidatePairEventType type, + void LogCandidatePairEvent(IceCandidatePairEventType type, uint32_t transaction_id) RTC_RUN_ON(network_thread_); @@ -449,7 +468,7 @@ class RTC_EXPORT Connection : public CandidatePairInterface { // https://w3c.github.io/webrtc-stats/#dom-rtcicecandidatepairstats-totalroundtriptime uint64_t total_round_trip_time_ms_ RTC_GUARDED_BY(network_thread_) = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcicecandidatepairstats-currentroundtriptime - absl::optional current_round_trip_time_ms_ + std::optional current_round_trip_time_ms_ RTC_GUARDED_BY(network_thread_); int64_t last_ping_sent_ RTC_GUARDED_BY( network_thread_); // last time we sent a ping to the other side @@ -463,61 +482,76 @@ class RTC_EXPORT Connection : public CandidatePairInterface { RTC_GUARDED_BY(network_thread_); // Transaction ID of the last connectivity check received. Null if having not // received a ping yet. - absl::optional last_ping_id_received_ + std::optional last_ping_id_received_ RTC_GUARDED_BY(network_thread_); - absl::optional unwritable_timeout_ RTC_GUARDED_BY(network_thread_); - absl::optional unwritable_min_checks_ RTC_GUARDED_BY(network_thread_); - absl::optional inactive_timeout_ RTC_GUARDED_BY(network_thread_); + std::optional unwritable_timeout_ RTC_GUARDED_BY(network_thread_); + std::optional unwritable_min_checks_ RTC_GUARDED_BY(network_thread_); + std::optional inactive_timeout_ RTC_GUARDED_BY(network_thread_); IceCandidatePairState state_ RTC_GUARDED_BY(network_thread_); // Time duration to switch from receiving to not receiving. - absl::optional receiving_timeout_ RTC_GUARDED_BY(network_thread_); + std::optional receiving_timeout_ RTC_GUARDED_BY(network_thread_); const int64_t time_created_ms_ RTC_GUARDED_BY(network_thread_); const int64_t delta_internal_unix_epoch_ms_ RTC_GUARDED_BY(network_thread_); int num_pings_sent_ RTC_GUARDED_BY(network_thread_) = 0; - absl::optional log_description_ + std::optional log_description_ RTC_GUARDED_BY(network_thread_); - webrtc::IceEventLog* ice_event_log_ RTC_GUARDED_BY(network_thread_) = nullptr; + IceEventLog* ice_event_log_ RTC_GUARDED_BY(network_thread_) = nullptr; // GOOG_PING_REQUEST is sent in place of STUN_BINDING_REQUEST // if configured via field trial, the remote peer supports it (signaled // in STUN_BINDING) and if the last STUN BINDING is identical to the one // that is about to be sent. - absl::optional remote_support_goog_ping_ - RTC_GUARDED_BY(network_thread_); + std::optional remote_support_goog_ping_ RTC_GUARDED_BY(network_thread_); std::unique_ptr cached_stun_binding_ RTC_GUARDED_BY(network_thread_); const IceFieldTrials* field_trials_; - rtc::EventBasedExponentialMovingAverage rtt_estimate_ + EventBasedExponentialMovingAverage rtt_estimate_ RTC_GUARDED_BY(network_thread_); - absl::optional( - const StunByteStringAttribute*)>> + std::optional( + const webrtc::StunByteStringAttribute*)>> goog_delta_consumer_; - absl::optional< - std::function)>> + std::optional< + std::function)>> goog_delta_ack_consumer_; + absl::AnyInvocable + received_packet_callback_; + + void MaybeAddDtlsPiggybackingAttributes(StunMessage* msg); + DtlsStunPiggybackCallbacks dtls_stun_piggyback_callbacks_; }; // ProxyConnection defers all the interesting work to the port. class ProxyConnection : public Connection { public: - ProxyConnection(rtc::WeakPtr port, + ProxyConnection(WeakPtr port, size_t index, const Candidate& remote_candidate); int Send(const void* data, size_t size, - const rtc::PacketOptions& options) override; + const AsyncSocketPacketOptions& options) override; int GetError() override; private: int error_ = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::Connection; +using ::webrtc::kGoogPingVersion; +using ::webrtc::kMaxStunBindingLength; +using ::webrtc::ProxyConnection; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_CONNECTION_H_ diff --git a/p2p/base/connection_info.cc b/p2p/base/connection_info.cc index 363d32954e..ca88aede52 100644 --- a/p2p/base/connection_info.cc +++ b/p2p/base/connection_info.cc @@ -10,7 +10,7 @@ #include "p2p/base/connection_info.h" -namespace cricket { +namespace webrtc { ConnectionInfo::ConnectionInfo() : best_connection(false), @@ -41,4 +41,4 @@ ConnectionInfo::ConnectionInfo(const ConnectionInfo&) = default; ConnectionInfo::~ConnectionInfo() = default; -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/connection_info.h b/p2p/base/connection_info.h index e7ed1b4921..03d0f96d85 100644 --- a/p2p/base/connection_info.h +++ b/p2p/base/connection_info.h @@ -11,13 +11,15 @@ #ifndef P2P_BASE_CONNECTION_INFO_H_ #define P2P_BASE_CONNECTION_INFO_H_ +#include +#include +#include #include -#include "absl/types/optional.h" #include "api/candidate.h" #include "api/units/timestamp.h" -namespace cricket { +namespace webrtc { // States are from RFC 5245. http://tools.ietf.org/html/rfc5245#section-5.7.4 enum class IceCandidatePairState { @@ -72,16 +74,26 @@ struct ConnectionInfo { // https://w3c.github.io/webrtc-stats/#dom-rtcicecandidatepairstats-totalroundtriptime uint64_t total_round_trip_time_ms; // https://w3c.github.io/webrtc-stats/#dom-rtcicecandidatepairstats-currentroundtriptime - absl::optional current_round_trip_time_ms; + std::optional current_round_trip_time_ms; // https://w3c.github.io/webrtc-stats/#dom-rtcicecandidatepairstats-lastpacketreceivedtimestamp - absl::optional last_data_received; - absl::optional last_data_sent; + std::optional last_data_received; + std::optional last_data_sent; }; // Information about all the candidate pairs of a channel. typedef std::vector ConnectionInfos; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::ConnectionInfo; +using ::webrtc::ConnectionInfos; +using ::webrtc::IceCandidatePairState; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_CONNECTION_INFO_H_ diff --git a/p2p/base/default_ice_transport_factory.cc b/p2p/base/default_ice_transport_factory.cc index 313d608750..6c314fd827 100644 --- a/p2p/base/default_ice_transport_factory.cc +++ b/p2p/base/default_ice_transport_factory.cc @@ -10,20 +10,26 @@ #include "p2p/base/default_ice_transport_factory.h" +#include +#include #include +#include "api/ice_transport_interface.h" #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "p2p/base/basic_ice_controller.h" #include "p2p/base/ice_controller_factory_interface.h" +#include "p2p/base/ice_controller_interface.h" +#include "p2p/base/p2p_transport_channel.h" namespace { -class BasicIceControllerFactory - : public cricket::IceControllerFactoryInterface { +class BasicIceControllerFactory : public webrtc::IceControllerFactoryInterface { public: - std::unique_ptr Create( - const cricket::IceControllerFactoryArgs& args) override { - return std::make_unique(args); + std::unique_ptr Create( + const webrtc::IceControllerFactoryArgs& args) override { + return std::make_unique(args); } }; @@ -32,23 +38,22 @@ class BasicIceControllerFactory namespace webrtc { DefaultIceTransport::DefaultIceTransport( - std::unique_ptr internal) + std::unique_ptr internal) : internal_(std::move(internal)) {} DefaultIceTransport::~DefaultIceTransport() { RTC_DCHECK_RUN_ON(&thread_checker_); } -rtc::scoped_refptr +scoped_refptr DefaultIceTransportFactory::CreateIceTransport( const std::string& transport_name, int component, IceTransportInit init) { BasicIceControllerFactory factory; init.set_ice_controller_factory(&factory); - return rtc::make_ref_counted( - cricket::P2PTransportChannel::Create(transport_name, component, - std::move(init))); + return make_ref_counted( + P2PTransportChannel::Create(transport_name, component, std::move(init))); } } // namespace webrtc diff --git a/p2p/base/default_ice_transport_factory.h b/p2p/base/default_ice_transport_factory.h index e46680d480..adf726a187 100644 --- a/p2p/base/default_ice_transport_factory.h +++ b/p2p/base/default_ice_transport_factory.h @@ -15,8 +15,10 @@ #include #include "api/ice_transport_interface.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "p2p/base/p2p_transport_channel.h" -#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -26,18 +28,17 @@ namespace webrtc { // which the internal P2PTransportChannel lives. class DefaultIceTransport : public IceTransportInterface { public: - explicit DefaultIceTransport( - std::unique_ptr internal); + explicit DefaultIceTransport(std::unique_ptr internal); ~DefaultIceTransport(); - cricket::IceTransportInternal* internal() override { + IceTransportInternal* internal() override { RTC_DCHECK_RUN_ON(&thread_checker_); return internal_.get(); } private: const SequenceChecker thread_checker_{}; - std::unique_ptr internal_ + std::unique_ptr internal_ RTC_GUARDED_BY(thread_checker_); }; @@ -47,7 +48,7 @@ class DefaultIceTransportFactory : public IceTransportFactory { ~DefaultIceTransportFactory() = default; // Must be called on the network thread and returns a DefaultIceTransport. - rtc::scoped_refptr CreateIceTransport( + scoped_refptr CreateIceTransport( const std::string& transport_name, int component, IceTransportInit init) override; diff --git a/p2p/base/dtls_transport.cc b/p2p/base/dtls_transport.cc deleted file mode 100644 index 3a61fd4029..0000000000 --- a/p2p/base/dtls_transport.cc +++ /dev/null @@ -1,870 +0,0 @@ -/* - * Copyright 2011 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "p2p/base/dtls_transport.h" - -#include -#include -#include - -#include "absl/memory/memory.h" -#include "absl/strings/string_view.h" -#include "api/array_view.h" -#include "api/dtls_transport_interface.h" -#include "api/rtc_event_log/rtc_event_log.h" -#include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" -#include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" -#include "p2p/base/packet_transport_internal.h" -#include "rtc_base/buffer.h" -#include "rtc_base/checks.h" -#include "rtc_base/dscp.h" -#include "rtc_base/logging.h" -#include "rtc_base/rtc_certificate.h" -#include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/stream.h" -#include "rtc_base/thread.h" - -namespace cricket { - -// We don't pull the RTP constants from rtputils.h, to avoid a layer violation. -static const size_t kDtlsRecordHeaderLen = 13; -static const size_t kMaxDtlsPacketLen = 2048; -static const size_t kMinRtpPacketLen = 12; - -// Maximum number of pending packets in the queue. Packets are read immediately -// after they have been written, so a capacity of "1" is sufficient. -// -// However, this bug seems to indicate that's not the case: crbug.com/1063834 -// So, temporarily increasing it to 2 to see if that makes a difference. -static const size_t kMaxPendingPackets = 2; - -// Minimum and maximum values for the initial DTLS handshake timeout. We'll pick -// an initial timeout based on ICE RTT estimates, but clamp it to this range. -static const int kMinHandshakeTimeout = 50; -static const int kMaxHandshakeTimeout = 3000; - -static bool IsDtlsPacket(const char* data, size_t len) { - const uint8_t* u = reinterpret_cast(data); - return (len >= kDtlsRecordHeaderLen && (u[0] > 19 && u[0] < 64)); -} -static bool IsDtlsClientHelloPacket(const char* data, size_t len) { - if (!IsDtlsPacket(data, len)) { - return false; - } - const uint8_t* u = reinterpret_cast(data); - return len > 17 && u[0] == 22 && u[13] == 1; -} -static bool IsRtpPacket(const char* data, size_t len) { - const uint8_t* u = reinterpret_cast(data); - return (len >= kMinRtpPacketLen && (u[0] & 0xC0) == 0x80); -} - -StreamInterfaceChannel::StreamInterfaceChannel( - IceTransportInternal* ice_transport) - : ice_transport_(ice_transport), - state_(rtc::SS_OPEN), - packets_(kMaxPendingPackets, kMaxDtlsPacketLen) {} - -rtc::StreamResult StreamInterfaceChannel::Read(rtc::ArrayView buffer, - size_t& read, - int& error) { - RTC_DCHECK_RUN_ON(&sequence_checker_); - - if (state_ == rtc::SS_CLOSED) - return rtc::SR_EOS; - if (state_ == rtc::SS_OPENING) - return rtc::SR_BLOCK; - - if (!packets_.ReadFront(buffer.data(), buffer.size(), &read)) { - return rtc::SR_BLOCK; - } - - return rtc::SR_SUCCESS; -} - -rtc::StreamResult StreamInterfaceChannel::Write( - rtc::ArrayView data, - size_t& written, - int& error) { - RTC_DCHECK_RUN_ON(&sequence_checker_); - // Always succeeds, since this is an unreliable transport anyway. - // TODO(zhihuang): Should this block if ice_transport_'s temporarily - // unwritable? - rtc::PacketOptions packet_options; - ice_transport_->SendPacket(reinterpret_cast(data.data()), - data.size(), packet_options); - written = data.size(); - return rtc::SR_SUCCESS; -} - -bool StreamInterfaceChannel::OnPacketReceived(const char* data, size_t size) { - RTC_DCHECK_RUN_ON(&sequence_checker_); - if (packets_.size() > 0) { - RTC_LOG(LS_WARNING) << "Packet already in queue."; - } - bool ret = packets_.WriteBack(data, size, NULL); - if (!ret) { - // Somehow we received another packet before the SSLStreamAdapter read the - // previous one out of our temporary buffer. In this case, we'll log an - // error and still signal the read event, hoping that it will read the - // packet currently in packets_. - RTC_LOG(LS_ERROR) << "Failed to write packet to queue."; - } - SignalEvent(this, rtc::SE_READ, 0); - return ret; -} - -rtc::StreamState StreamInterfaceChannel::GetState() const { - RTC_DCHECK_RUN_ON(&sequence_checker_); - return state_; -} - -void StreamInterfaceChannel::Close() { - RTC_DCHECK_RUN_ON(&sequence_checker_); - packets_.Clear(); - state_ = rtc::SS_CLOSED; -} - -DtlsTransport::DtlsTransport(IceTransportInternal* ice_transport, - const webrtc::CryptoOptions& crypto_options, - webrtc::RtcEventLog* event_log, - rtc::SSLProtocolVersion max_version) - : component_(ice_transport->component()), - ice_transport_(ice_transport), - downward_(NULL), - srtp_ciphers_(crypto_options.GetSupportedDtlsSrtpCryptoSuites()), - ssl_max_version_(max_version), - event_log_(event_log) { - RTC_DCHECK(ice_transport_); - ConnectToIceTransport(); -} - -DtlsTransport::~DtlsTransport() = default; - -webrtc::DtlsTransportState DtlsTransport::dtls_state() const { - return dtls_state_; -} - -const std::string& DtlsTransport::transport_name() const { - return ice_transport_->transport_name(); -} - -int DtlsTransport::component() const { - return component_; -} - -bool DtlsTransport::IsDtlsActive() const { - return dtls_active_; -} - -bool DtlsTransport::SetLocalCertificate( - const rtc::scoped_refptr& certificate) { - if (dtls_active_) { - if (certificate == local_certificate_) { - // This may happen during renegotiation. - RTC_LOG(LS_INFO) << ToString() << ": Ignoring identical DTLS identity"; - return true; - } else { - RTC_LOG(LS_ERROR) << ToString() - << ": Can't change DTLS local identity in this state"; - return false; - } - } - - if (certificate) { - local_certificate_ = certificate; - dtls_active_ = true; - } else { - RTC_LOG(LS_INFO) << ToString() - << ": NULL DTLS identity supplied. Not doing DTLS"; - } - - return true; -} - -rtc::scoped_refptr DtlsTransport::GetLocalCertificate() - const { - return local_certificate_; -} - -bool DtlsTransport::SetDtlsRole(rtc::SSLRole role) { - if (dtls_) { - RTC_DCHECK(dtls_role_); - if (*dtls_role_ != role) { - RTC_LOG(LS_ERROR) - << "SSL Role can't be reversed after the session is setup."; - return false; - } - return true; - } - - dtls_role_ = role; - return true; -} - -bool DtlsTransport::GetDtlsRole(rtc::SSLRole* role) const { - if (!dtls_role_) { - return false; - } - *role = *dtls_role_; - return true; -} - -bool DtlsTransport::GetSslCipherSuite(int* cipher) { - if (dtls_state() != webrtc::DtlsTransportState::kConnected) { - return false; - } - - return dtls_->GetSslCipherSuite(cipher); -} - -webrtc::RTCError DtlsTransport::SetRemoteParameters( - absl::string_view digest_alg, - const uint8_t* digest, - size_t digest_len, - absl::optional role) { - rtc::Buffer remote_fingerprint_value(digest, digest_len); - bool is_dtls_restart = - dtls_active_ && remote_fingerprint_value_ != remote_fingerprint_value; - // Set SSL role. Role must be set before fingerprint is applied, which - // initiates DTLS setup. - if (role) { - if (is_dtls_restart) { - dtls_role_ = *role; - } else { - if (!SetDtlsRole(*role)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to set SSL role for the transport."); - } - } - } - // Apply remote fingerprint. - if (!SetRemoteFingerprint(digest_alg, digest, digest_len)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to apply remote fingerprint."); - } - return webrtc::RTCError::OK(); -} - -bool DtlsTransport::SetRemoteFingerprint(absl::string_view digest_alg, - const uint8_t* digest, - size_t digest_len) { - rtc::Buffer remote_fingerprint_value(digest, digest_len); - - // Once we have the local certificate, the same remote fingerprint can be set - // multiple times. - if (dtls_active_ && remote_fingerprint_value_ == remote_fingerprint_value && - !digest_alg.empty()) { - // This may happen during renegotiation. - RTC_LOG(LS_INFO) << ToString() - << ": Ignoring identical remote DTLS fingerprint"; - return true; - } - - // If the other side doesn't support DTLS, turn off `dtls_active_`. - // TODO(deadbeef): Remove this. It's dangerous, because it relies on higher - // level code to ensure DTLS is actually used, but there are tests that - // depend on it, for the case where an m= section is rejected. In that case - // SetRemoteFingerprint shouldn't even be called though. - if (digest_alg.empty()) { - RTC_DCHECK(!digest_len); - RTC_LOG(LS_INFO) << ToString() << ": Other side didn't support DTLS."; - dtls_active_ = false; - return true; - } - - // Otherwise, we must have a local certificate before setting remote - // fingerprint. - if (!dtls_active_) { - RTC_LOG(LS_ERROR) << ToString() - << ": Can't set DTLS remote settings in this state."; - return false; - } - - // At this point we know we are doing DTLS - bool fingerprint_changing = remote_fingerprint_value_.size() > 0u; - remote_fingerprint_value_ = std::move(remote_fingerprint_value); - remote_fingerprint_algorithm_ = std::string(digest_alg); - - if (dtls_ && !fingerprint_changing) { - // This can occur if DTLS is set up before a remote fingerprint is - // received. For instance, if we set up DTLS due to receiving an early - // ClientHello. - rtc::SSLPeerCertificateDigestError err; - if (!dtls_->SetPeerCertificateDigest( - remote_fingerprint_algorithm_, - reinterpret_cast(remote_fingerprint_value_.data()), - remote_fingerprint_value_.size(), &err)) { - RTC_LOG(LS_ERROR) << ToString() - << ": Couldn't set DTLS certificate digest."; - set_dtls_state(webrtc::DtlsTransportState::kFailed); - // If the error is "verification failed", don't return false, because - // this means the fingerprint was formatted correctly but didn't match - // the certificate from the DTLS handshake. Thus the DTLS state should go - // to "failed", but SetRemoteDescription shouldn't fail. - return err == rtc::SSLPeerCertificateDigestError::VERIFICATION_FAILED; - } - return true; - } - - // If the fingerprint is changing, we'll tear down the DTLS association and - // create a new one, resetting our state. - if (dtls_ && fingerprint_changing) { - dtls_.reset(nullptr); - set_dtls_state(webrtc::DtlsTransportState::kNew); - set_writable(false); - } - - if (!SetupDtls()) { - set_dtls_state(webrtc::DtlsTransportState::kFailed); - return false; - } - - return true; -} - -std::unique_ptr DtlsTransport::GetRemoteSSLCertChain() - const { - if (!dtls_) { - return nullptr; - } - - return dtls_->GetPeerSSLCertChain(); -} - -bool DtlsTransport::ExportKeyingMaterial(absl::string_view label, - const uint8_t* context, - size_t context_len, - bool use_context, - uint8_t* result, - size_t result_len) { - return (dtls_.get()) - ? dtls_->ExportKeyingMaterial(label, context, context_len, - use_context, result, result_len) - : false; -} - -bool DtlsTransport::SetupDtls() { - RTC_DCHECK(dtls_role_); - { - auto downward = std::make_unique(ice_transport_); - StreamInterfaceChannel* downward_ptr = downward.get(); - - dtls_ = rtc::SSLStreamAdapter::Create( - std::move(downward), - [this](rtc::SSLHandshakeError error) { OnDtlsHandshakeError(error); }); - if (!dtls_) { - RTC_LOG(LS_ERROR) << ToString() << ": Failed to create DTLS adapter."; - return false; - } - downward_ = downward_ptr; - } - - dtls_->SetIdentity(local_certificate_->identity()->Clone()); - dtls_->SetMode(rtc::SSL_MODE_DTLS); - dtls_->SetMaxProtocolVersion(ssl_max_version_); - dtls_->SetServerRole(*dtls_role_); - dtls_->SignalEvent.connect(this, &DtlsTransport::OnDtlsEvent); - if (remote_fingerprint_value_.size() && - !dtls_->SetPeerCertificateDigest( - remote_fingerprint_algorithm_, - reinterpret_cast(remote_fingerprint_value_.data()), - remote_fingerprint_value_.size())) { - RTC_LOG(LS_ERROR) << ToString() - << ": Couldn't set DTLS certificate digest."; - return false; - } - - // Set up DTLS-SRTP, if it's been enabled. - if (!srtp_ciphers_.empty()) { - if (!dtls_->SetDtlsSrtpCryptoSuites(srtp_ciphers_)) { - RTC_LOG(LS_ERROR) << ToString() << ": Couldn't set DTLS-SRTP ciphers."; - return false; - } - } else { - RTC_LOG(LS_INFO) << ToString() << ": Not using DTLS-SRTP."; - } - - RTC_LOG(LS_INFO) << ToString() << ": DTLS setup complete."; - - // If the underlying ice_transport is already writable at this point, we may - // be able to start DTLS right away. - MaybeStartDtls(); - return true; -} - -bool DtlsTransport::GetSrtpCryptoSuite(int* cipher) { - if (dtls_state() != webrtc::DtlsTransportState::kConnected) { - return false; - } - - return dtls_->GetDtlsSrtpCryptoSuite(cipher); -} - -bool DtlsTransport::GetSslVersionBytes(int* version) const { - if (dtls_state() != webrtc::DtlsTransportState::kConnected) { - return false; - } - - return dtls_->GetSslVersionBytes(version); -} - -// Called from upper layers to send a media packet. -int DtlsTransport::SendPacket(const char* data, - size_t size, - const rtc::PacketOptions& options, - int flags) { - if (!dtls_active_) { - // Not doing DTLS. - return ice_transport_->SendPacket(data, size, options); - } - - switch (dtls_state()) { - case webrtc::DtlsTransportState::kNew: - // Can't send data until the connection is active. - // TODO(ekr@rtfm.com): assert here if dtls_ is NULL? - return -1; - case webrtc::DtlsTransportState::kConnecting: - // Can't send data until the connection is active. - return -1; - case webrtc::DtlsTransportState::kConnected: - if (flags & PF_SRTP_BYPASS) { - RTC_DCHECK(!srtp_ciphers_.empty()); - if (!IsRtpPacket(data, size)) { - return -1; - } - - return ice_transport_->SendPacket(data, size, options); - } else { - size_t written; - int error; - return (dtls_->WriteAll( - rtc::MakeArrayView(reinterpret_cast(data), - size), - written, error) == rtc::SR_SUCCESS) - ? static_cast(size) - : -1; - } - case webrtc::DtlsTransportState::kFailed: - // Can't send anything when we're failed. - RTC_LOG(LS_ERROR) << ToString() - << ": Couldn't send packet due to " - "webrtc::DtlsTransportState::kFailed."; - return -1; - case webrtc::DtlsTransportState::kClosed: - // Can't send anything when we're closed. - RTC_LOG(LS_ERROR) << ToString() - << ": Couldn't send packet due to " - "webrtc::DtlsTransportState::kClosed."; - return -1; - default: - RTC_DCHECK_NOTREACHED(); - return -1; - } -} - -IceTransportInternal* DtlsTransport::ice_transport() { - return ice_transport_; -} - -bool DtlsTransport::IsDtlsConnected() { - return dtls_ && dtls_->IsTlsConnected(); -} - -bool DtlsTransport::receiving() const { - return receiving_; -} - -bool DtlsTransport::writable() const { - return writable_; -} - -int DtlsTransport::GetError() { - return ice_transport_->GetError(); -} - -absl::optional DtlsTransport::network_route() const { - return ice_transport_->network_route(); -} - -bool DtlsTransport::GetOption(rtc::Socket::Option opt, int* value) { - return ice_transport_->GetOption(opt, value); -} - -int DtlsTransport::SetOption(rtc::Socket::Option opt, int value) { - return ice_transport_->SetOption(opt, value); -} - -void DtlsTransport::ConnectToIceTransport() { - RTC_DCHECK(ice_transport_); - ice_transport_->SignalWritableState.connect(this, - &DtlsTransport::OnWritableState); - ice_transport_->SignalReadPacket.connect(this, &DtlsTransport::OnReadPacket); - ice_transport_->SignalSentPacket.connect(this, &DtlsTransport::OnSentPacket); - ice_transport_->SignalReadyToSend.connect(this, - &DtlsTransport::OnReadyToSend); - ice_transport_->SignalReceivingState.connect( - this, &DtlsTransport::OnReceivingState); - ice_transport_->SignalNetworkRouteChanged.connect( - this, &DtlsTransport::OnNetworkRouteChanged); -} - -// The state transition logic here is as follows: -// (1) If we're not doing DTLS-SRTP, then the state is just the -// state of the underlying impl() -// (2) If we're doing DTLS-SRTP: -// - Prior to the DTLS handshake, the state is neither receiving nor -// writable -// - When the impl goes writable for the first time we -// start the DTLS handshake -// - Once the DTLS handshake completes, the state is that of the -// impl again -void DtlsTransport::OnWritableState(rtc::PacketTransportInternal* transport) { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DCHECK(transport == ice_transport_); - RTC_LOG(LS_VERBOSE) << ToString() - << ": ice_transport writable state changed to " - << ice_transport_->writable(); - - if (!dtls_active_) { - // Not doing DTLS. - // Note: SignalWritableState fired by set_writable. - set_writable(ice_transport_->writable()); - return; - } - - switch (dtls_state()) { - case webrtc::DtlsTransportState::kNew: - MaybeStartDtls(); - break; - case webrtc::DtlsTransportState::kConnected: - // Note: SignalWritableState fired by set_writable. - set_writable(ice_transport_->writable()); - break; - case webrtc::DtlsTransportState::kConnecting: - // Do nothing. - break; - case webrtc::DtlsTransportState::kFailed: - // Should not happen. Do nothing. - RTC_LOG(LS_ERROR) << ToString() - << ": OnWritableState() called in state " - "webrtc::DtlsTransportState::kFailed."; - break; - case webrtc::DtlsTransportState::kClosed: - // Should not happen. Do nothing. - RTC_LOG(LS_ERROR) << ToString() - << ": OnWritableState() called in state " - "webrtc::DtlsTransportState::kClosed."; - break; - case webrtc::DtlsTransportState::kNumValues: - RTC_DCHECK_NOTREACHED(); - break; - } -} - -void DtlsTransport::OnReceivingState(rtc::PacketTransportInternal* transport) { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DCHECK(transport == ice_transport_); - RTC_LOG(LS_VERBOSE) << ToString() - << ": ice_transport " - "receiving state changed to " - << ice_transport_->receiving(); - if (!dtls_active_ || dtls_state() == webrtc::DtlsTransportState::kConnected) { - // Note: SignalReceivingState fired by set_receiving. - set_receiving(ice_transport_->receiving()); - } -} - -void DtlsTransport::OnReadPacket(rtc::PacketTransportInternal* transport, - const char* data, - size_t size, - const int64_t& packet_time_us, - int flags) { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DCHECK(transport == ice_transport_); - RTC_DCHECK(flags == 0); - - if (!dtls_active_) { - // Not doing DTLS. - SignalReadPacket(this, data, size, packet_time_us, 0); - return; - } - - switch (dtls_state()) { - case webrtc::DtlsTransportState::kNew: - if (dtls_) { - RTC_LOG(LS_INFO) << ToString() - << ": Packet received before DTLS started."; - } else { - RTC_LOG(LS_WARNING) << ToString() - << ": Packet received before we know if we are " - "doing DTLS or not."; - } - // Cache a client hello packet received before DTLS has actually started. - if (IsDtlsClientHelloPacket(data, size)) { - RTC_LOG(LS_INFO) << ToString() - << ": Caching DTLS ClientHello packet until DTLS is " - "started."; - cached_client_hello_.SetData(data, size); - // If we haven't started setting up DTLS yet (because we don't have a - // remote fingerprint/role), we can use the client hello as a clue that - // the peer has chosen the client role, and proceed with the handshake. - // The fingerprint will be verified when it's set. - if (!dtls_ && local_certificate_) { - SetDtlsRole(rtc::SSL_SERVER); - SetupDtls(); - } - } else { - RTC_LOG(LS_INFO) << ToString() - << ": Not a DTLS ClientHello packet; dropping."; - } - break; - - case webrtc::DtlsTransportState::kConnecting: - case webrtc::DtlsTransportState::kConnected: - // We should only get DTLS or SRTP packets; STUN's already been demuxed. - // Is this potentially a DTLS packet? - if (IsDtlsPacket(data, size)) { - if (!HandleDtlsPacket(data, size)) { - RTC_LOG(LS_ERROR) << ToString() << ": Failed to handle DTLS packet."; - return; - } - } else { - // Not a DTLS packet; our handshake should be complete by now. - if (dtls_state() != webrtc::DtlsTransportState::kConnected) { - RTC_LOG(LS_ERROR) << ToString() - << ": Received non-DTLS packet before DTLS " - "complete."; - return; - } - - // And it had better be a SRTP packet. - if (!IsRtpPacket(data, size)) { - RTC_LOG(LS_ERROR) - << ToString() << ": Received unexpected non-DTLS packet."; - return; - } - - // Sanity check. - RTC_DCHECK(!srtp_ciphers_.empty()); - - // Signal this upwards as a bypass packet. - SignalReadPacket(this, data, size, packet_time_us, PF_SRTP_BYPASS); - } - break; - case webrtc::DtlsTransportState::kFailed: - case webrtc::DtlsTransportState::kClosed: - case webrtc::DtlsTransportState::kNumValues: - // This shouldn't be happening. Drop the packet. - break; - } -} - -void DtlsTransport::OnSentPacket(rtc::PacketTransportInternal* transport, - const rtc::SentPacket& sent_packet) { - RTC_DCHECK_RUN_ON(&thread_checker_); - SignalSentPacket(this, sent_packet); -} - -void DtlsTransport::OnReadyToSend(rtc::PacketTransportInternal* transport) { - RTC_DCHECK_RUN_ON(&thread_checker_); - if (writable()) { - SignalReadyToSend(this); - } -} - -void DtlsTransport::OnDtlsEvent(rtc::StreamInterface* dtls, int sig, int err) { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DCHECK(dtls == dtls_.get()); - if (sig & rtc::SE_OPEN) { - // This is the first time. - RTC_LOG(LS_INFO) << ToString() << ": DTLS handshake complete."; - if (dtls_->GetState() == rtc::SS_OPEN) { - // The check for OPEN shouldn't be necessary but let's make - // sure we don't accidentally frob the state if it's closed. - set_dtls_state(webrtc::DtlsTransportState::kConnected); - set_writable(true); - } - } - if (sig & rtc::SE_READ) { - uint8_t buf[kMaxDtlsPacketLen]; - size_t read; - int read_error; - rtc::StreamResult ret; - // The underlying DTLS stream may have received multiple DTLS records in - // one packet, so read all of them. - do { - ret = dtls_->Read(buf, read, read_error); - if (ret == rtc::SR_SUCCESS) { - SignalReadPacket(this, reinterpret_cast(buf), read, - rtc::TimeMicros(), 0); - } else if (ret == rtc::SR_EOS) { - // Remote peer shut down the association with no error. - RTC_LOG(LS_INFO) << ToString() << ": DTLS transport closed by remote"; - set_writable(false); - set_dtls_state(webrtc::DtlsTransportState::kClosed); - SignalClosed(this); - } else if (ret == rtc::SR_ERROR) { - // Remote peer shut down the association with an error. - RTC_LOG(LS_INFO) - << ToString() - << ": Closed by remote with DTLS transport error, code=" - << read_error; - set_writable(false); - set_dtls_state(webrtc::DtlsTransportState::kFailed); - SignalClosed(this); - } - } while (ret == rtc::SR_SUCCESS); - } - if (sig & rtc::SE_CLOSE) { - RTC_DCHECK(sig == rtc::SE_CLOSE); // SE_CLOSE should be by itself. - set_writable(false); - if (!err) { - RTC_LOG(LS_INFO) << ToString() << ": DTLS transport closed"; - set_dtls_state(webrtc::DtlsTransportState::kClosed); - } else { - RTC_LOG(LS_INFO) << ToString() << ": DTLS transport error, code=" << err; - set_dtls_state(webrtc::DtlsTransportState::kFailed); - } - } -} - -void DtlsTransport::OnNetworkRouteChanged( - absl::optional network_route) { - RTC_DCHECK_RUN_ON(&thread_checker_); - SignalNetworkRouteChanged(network_route); -} - -void DtlsTransport::MaybeStartDtls() { - if (dtls_ && ice_transport_->writable()) { - ConfigureHandshakeTimeout(); - - if (dtls_->StartSSL()) { - // This should never fail: - // Because we are operating in a nonblocking mode and all - // incoming packets come in via OnReadPacket(), which rejects - // packets in this state, the incoming queue must be empty. We - // ignore write errors, thus any errors must be because of - // configuration and therefore are our fault. - RTC_DCHECK_NOTREACHED() << "StartSSL failed."; - RTC_LOG(LS_ERROR) << ToString() << ": Couldn't start DTLS handshake"; - set_dtls_state(webrtc::DtlsTransportState::kFailed); - return; - } - RTC_LOG(LS_INFO) << ToString() - << ": DtlsTransport: Started DTLS handshake active=" - << IsDtlsActive(); - set_dtls_state(webrtc::DtlsTransportState::kConnecting); - // Now that the handshake has started, we can process a cached ClientHello - // (if one exists). - if (cached_client_hello_.size()) { - if (*dtls_role_ == rtc::SSL_SERVER) { - RTC_LOG(LS_INFO) << ToString() - << ": Handling cached DTLS ClientHello packet."; - if (!HandleDtlsPacket(cached_client_hello_.data(), - cached_client_hello_.size())) { - RTC_LOG(LS_ERROR) << ToString() << ": Failed to handle DTLS packet."; - } - } else { - RTC_LOG(LS_WARNING) << ToString() - << ": Discarding cached DTLS ClientHello packet " - "because we don't have the server role."; - } - cached_client_hello_.Clear(); - } - } -} - -// Called from OnReadPacket when a DTLS packet is received. -bool DtlsTransport::HandleDtlsPacket(const char* data, size_t size) { - // Sanity check we're not passing junk that - // just looks like DTLS. - const uint8_t* tmp_data = reinterpret_cast(data); - size_t tmp_size = size; - while (tmp_size > 0) { - if (tmp_size < kDtlsRecordHeaderLen) - return false; // Too short for the header - - size_t record_len = (tmp_data[11] << 8) | (tmp_data[12]); - if ((record_len + kDtlsRecordHeaderLen) > tmp_size) - return false; // Body too short - - tmp_data += record_len + kDtlsRecordHeaderLen; - tmp_size -= record_len + kDtlsRecordHeaderLen; - } - - // Looks good. Pass to the SIC which ends up being passed to - // the DTLS stack. - return downward_->OnPacketReceived(data, size); -} - -void DtlsTransport::set_receiving(bool receiving) { - if (receiving_ == receiving) { - return; - } - receiving_ = receiving; - SignalReceivingState(this); -} - -void DtlsTransport::set_writable(bool writable) { - if (writable_ == writable) { - return; - } - if (event_log_) { - event_log_->Log( - std::make_unique(writable)); - } - RTC_LOG(LS_VERBOSE) << ToString() << ": set_writable to: " << writable; - writable_ = writable; - if (writable_) { - SignalReadyToSend(this); - } - SignalWritableState(this); -} - -void DtlsTransport::set_dtls_state(webrtc::DtlsTransportState state) { - if (dtls_state_ == state) { - return; - } - if (event_log_) { - event_log_->Log( - std::make_unique(state)); - } - RTC_LOG(LS_VERBOSE) << ToString() << ": set_dtls_state from:" - << static_cast(dtls_state_) << " to " - << static_cast(state); - dtls_state_ = state; - SendDtlsState(this, state); -} - -void DtlsTransport::OnDtlsHandshakeError(rtc::SSLHandshakeError error) { - SendDtlsHandshakeError(error); -} - -void DtlsTransport::ConfigureHandshakeTimeout() { - RTC_DCHECK(dtls_); - absl::optional rtt = ice_transport_->GetRttEstimate(); - if (rtt) { - // Limit the timeout to a reasonable range in case the ICE RTT takes - // extreme values. - int initial_timeout = std::max(kMinHandshakeTimeout, - std::min(kMaxHandshakeTimeout, 2 * (*rtt))); - RTC_LOG(LS_INFO) << ToString() << ": configuring DTLS handshake timeout " - << initial_timeout << " based on ICE RTT " << *rtt; - - dtls_->SetInitialRetransmissionTimeout(initial_timeout); - } else { - RTC_LOG(LS_INFO) - << ToString() - << ": no RTT estimate - using default DTLS handshake timeout"; - } -} - -} // namespace cricket diff --git a/p2p/base/dtls_transport.h b/p2p/base/dtls_transport.h index 4e21410b76..7cee7a4b14 100644 --- a/p2p/base/dtls_transport.h +++ b/p2p/base/dtls_transport.h @@ -1,5 +1,5 @@ /* - * Copyright 2011 The WebRTC Project Authors. All rights reserved. + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -11,254 +11,8 @@ #ifndef P2P_BASE_DTLS_TRANSPORT_H_ #define P2P_BASE_DTLS_TRANSPORT_H_ -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "api/crypto/crypto_options.h" -#include "api/dtls_transport_interface.h" -#include "api/sequence_checker.h" -#include "p2p/base/dtls_transport_internal.h" -#include "p2p/base/ice_transport_internal.h" -#include "rtc_base/buffer.h" -#include "rtc_base/buffer_queue.h" -#include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/stream.h" -#include "rtc_base/strings/string_builder.h" -#include "rtc_base/system/no_unique_address.h" - -namespace rtc { -class PacketTransportInternal; -} - -namespace cricket { - -// A bridge between a packet-oriented/transport-type interface on -// the bottom and a StreamInterface on the top. -class StreamInterfaceChannel : public rtc::StreamInterface { - public: - explicit StreamInterfaceChannel(IceTransportInternal* ice_transport); - - StreamInterfaceChannel(const StreamInterfaceChannel&) = delete; - StreamInterfaceChannel& operator=(const StreamInterfaceChannel&) = delete; - - // Push in a packet; this gets pulled out from Read(). - bool OnPacketReceived(const char* data, size_t size); - - // Implementations of StreamInterface - rtc::StreamState GetState() const override; - void Close() override; - rtc::StreamResult Read(rtc::ArrayView buffer, - size_t& read, - int& error) override; - rtc::StreamResult Write(rtc::ArrayView data, - size_t& written, - int& error) override; - - private: - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_; - IceTransportInternal* const ice_transport_; // owned by DtlsTransport - rtc::StreamState state_ RTC_GUARDED_BY(sequence_checker_); - rtc::BufferQueue packets_ RTC_GUARDED_BY(sequence_checker_); -}; - -// This class provides a DTLS SSLStreamAdapter inside a TransportChannel-style -// packet-based interface, wrapping an existing TransportChannel instance -// (e.g a P2PTransportChannel) -// Here's the way this works: -// -// DtlsTransport { -// SSLStreamAdapter* dtls_ { -// StreamInterfaceChannel downward_ { -// IceTransportInternal* ice_transport_; -// } -// } -// } -// -// - Data which comes into DtlsTransport from the underlying -// ice_transport_ via OnReadPacket() is checked for whether it is DTLS -// or not, and if it is, is passed to DtlsTransport::HandleDtlsPacket, -// which pushes it into to downward_. dtls_ is listening for events on -// downward_, so it immediately calls downward_->Read(). -// -// - Data written to DtlsTransport is passed either to downward_ or directly -// to ice_transport_, depending on whether DTLS is negotiated and whether -// the flags include PF_SRTP_BYPASS -// -// - The SSLStreamAdapter writes to downward_->Write() which translates it -// into packet writes on ice_transport_. -// -// This class is not thread safe; all methods must be called on the same thread -// as the constructor. -class DtlsTransport : public DtlsTransportInternal { - public: - // `ice_transport` is the ICE transport this DTLS transport is wrapping. It - // must outlive this DTLS transport. - // - // `crypto_options` are the options used for the DTLS handshake. This affects - // whether GCM crypto suites are negotiated. - // - // `event_log` is an optional RtcEventLog for logging state changes. It should - // outlive the DtlsTransport. - DtlsTransport( - IceTransportInternal* ice_transport, - const webrtc::CryptoOptions& crypto_options, - webrtc::RtcEventLog* event_log, - rtc::SSLProtocolVersion max_version = rtc::SSL_PROTOCOL_DTLS_12); - - ~DtlsTransport() override; - - DtlsTransport(const DtlsTransport&) = delete; - DtlsTransport& operator=(const DtlsTransport&) = delete; - - webrtc::DtlsTransportState dtls_state() const override; - const std::string& transport_name() const override; - int component() const override; - - // DTLS is active if a local certificate was set. Otherwise this acts in a - // "passthrough" mode, sending packets directly through the underlying ICE - // transport. - // TODO(deadbeef): Remove this weirdness, and handle it in the upper layers. - bool IsDtlsActive() const override; - - // SetLocalCertificate is what makes DTLS active. It must be called before - // SetRemoteFinterprint. - // TODO(deadbeef): Once DtlsTransport no longer has the concept of being - // "active" or not (acting as a passthrough if not active), just require this - // certificate on construction or "Start". - bool SetLocalCertificate( - const rtc::scoped_refptr& certificate) override; - rtc::scoped_refptr GetLocalCertificate() const override; - - // SetRemoteFingerprint must be called after SetLocalCertificate, and any - // other methods like SetDtlsRole. It's what triggers the actual DTLS setup. - // TODO(deadbeef): Rename to "Start" like in ORTC? - bool SetRemoteFingerprint(absl::string_view digest_alg, - const uint8_t* digest, - size_t digest_len) override; - - // SetRemoteParameters must be called after SetLocalCertificate. - webrtc::RTCError SetRemoteParameters( - absl::string_view digest_alg, - const uint8_t* digest, - size_t digest_len, - absl::optional role) override; - - // Called to send a packet (via DTLS, if turned on). - int SendPacket(const char* data, - size_t size, - const rtc::PacketOptions& options, - int flags) override; - - bool GetOption(rtc::Socket::Option opt, int* value) override; - - // Find out which TLS version was negotiated - bool GetSslVersionBytes(int* version) const override; - // Find out which DTLS-SRTP cipher was negotiated - bool GetSrtpCryptoSuite(int* cipher) override; - - bool GetDtlsRole(rtc::SSLRole* role) const override; - bool SetDtlsRole(rtc::SSLRole role) override; - - // Find out which DTLS cipher was negotiated - bool GetSslCipherSuite(int* cipher) override; - - // Once DTLS has been established, this method retrieves the certificate - // chain in use by the remote peer, for use in external identity - // verification. - std::unique_ptr GetRemoteSSLCertChain() const override; - - // Once DTLS has established (i.e., this ice_transport is writable), this - // method extracts the keys negotiated during the DTLS handshake, for use in - // external encryption. DTLS-SRTP uses this to extract the needed SRTP keys. - // See the SSLStreamAdapter documentation for info on the specific parameters. - bool ExportKeyingMaterial(absl::string_view label, - const uint8_t* context, - size_t context_len, - bool use_context, - uint8_t* result, - size_t result_len) override; - - IceTransportInternal* ice_transport() override; - - // For informational purposes. Tells if the DTLS handshake has finished. - // This may be true even if writable() is false, if the remote fingerprint - // has not yet been verified. - bool IsDtlsConnected(); - - bool receiving() const override; - bool writable() const override; - - int GetError() override; - - absl::optional network_route() const override; - - int SetOption(rtc::Socket::Option opt, int value) override; - - std::string ToString() const { - const absl::string_view RECEIVING_ABBREV[2] = {"_", "R"}; - const absl::string_view WRITABLE_ABBREV[2] = {"_", "W"}; - rtc::StringBuilder sb; - sb << "DtlsTransport[" << transport_name() << "|" << component_ << "|" - << RECEIVING_ABBREV[receiving()] << WRITABLE_ABBREV[writable()] << "]"; - return sb.Release(); - } - - private: - void ConnectToIceTransport(); - - void OnWritableState(rtc::PacketTransportInternal* transport); - void OnReadPacket(rtc::PacketTransportInternal* transport, - const char* data, - size_t size, - const int64_t& packet_time_us, - int flags); - void OnSentPacket(rtc::PacketTransportInternal* transport, - const rtc::SentPacket& sent_packet); - void OnReadyToSend(rtc::PacketTransportInternal* transport); - void OnReceivingState(rtc::PacketTransportInternal* transport); - void OnDtlsEvent(rtc::StreamInterface* stream_, int sig, int err); - void OnNetworkRouteChanged(absl::optional network_route); - bool SetupDtls(); - void MaybeStartDtls(); - bool HandleDtlsPacket(const char* data, size_t size); - void OnDtlsHandshakeError(rtc::SSLHandshakeError error); - void ConfigureHandshakeTimeout(); - - void set_receiving(bool receiving); - void set_writable(bool writable); - // Sets the DTLS state, signaling if necessary. - void set_dtls_state(webrtc::DtlsTransportState state); - - webrtc::SequenceChecker thread_checker_; - - const int component_; - webrtc::DtlsTransportState dtls_state_ = webrtc::DtlsTransportState::kNew; - // Underlying ice_transport, not owned by this class. - IceTransportInternal* const ice_transport_; - std::unique_ptr dtls_; // The DTLS stream - StreamInterfaceChannel* - downward_; // Wrapper for ice_transport_, owned by dtls_. - const std::vector srtp_ciphers_; // SRTP ciphers to use with DTLS. - bool dtls_active_ = false; - rtc::scoped_refptr local_certificate_; - absl::optional dtls_role_; - const rtc::SSLProtocolVersion ssl_max_version_; - rtc::Buffer remote_fingerprint_value_; - std::string remote_fingerprint_algorithm_; - - // Cached DTLS ClientHello packet that was received before we started the - // DTLS handshake. This could happen if the hello was received before the - // ice transport became writable, or before a remote fingerprint was received. - rtc::Buffer cached_client_hello_; - - bool receiving_ = false; - bool writable_ = false; - - webrtc::RtcEventLog* const event_log_; -}; - -} // namespace cricket +// This is a transitional header forwarding to the new version in the p2p/dtls/ +// folder. +#include "p2p/dtls/dtls_transport.h" #endif // P2P_BASE_DTLS_TRANSPORT_H_ diff --git a/p2p/base/dtls_transport_internal.h b/p2p/base/dtls_transport_internal.h index 3d20d1bfd6..0283e50072 100644 --- a/p2p/base/dtls_transport_internal.h +++ b/p2p/base/dtls_transport_internal.h @@ -1,5 +1,5 @@ /* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -11,147 +11,8 @@ #ifndef P2P_BASE_DTLS_TRANSPORT_INTERNAL_H_ #define P2P_BASE_DTLS_TRANSPORT_INTERNAL_H_ -#include -#include - -#include -#include -#include - -#include "absl/base/attributes.h" -#include "absl/strings/string_view.h" -#include "api/crypto/crypto_options.h" -#include "api/dtls_transport_interface.h" -#include "api/scoped_refptr.h" -#include "p2p/base/ice_transport_internal.h" -#include "p2p/base/packet_transport_internal.h" -#include "rtc_base/callback_list.h" -#include "rtc_base/ssl_certificate.h" -#include "rtc_base/ssl_fingerprint.h" -#include "rtc_base/ssl_stream_adapter.h" - -namespace cricket { - -enum PacketFlags { - PF_NORMAL = 0x00, // A normal packet. - PF_SRTP_BYPASS = 0x01, // An encrypted SRTP packet; bypass any additional - // crypto provided by the transport (e.g. DTLS) -}; - -// DtlsTransportInternal is an internal interface that does DTLS, also -// negotiating SRTP crypto suites so that it may be used for DTLS-SRTP. -// -// Once the public interface is supported, -// (https://www.w3.org/TR/webrtc/#rtcdtlstransport-interface) -// the DtlsTransportInterface will be split from this class. -class DtlsTransportInternal : public rtc::PacketTransportInternal { - public: - ~DtlsTransportInternal() override; - - DtlsTransportInternal(const DtlsTransportInternal&) = delete; - DtlsTransportInternal& operator=(const DtlsTransportInternal&) = delete; - - virtual webrtc::DtlsTransportState dtls_state() const = 0; - - virtual int component() const = 0; - - virtual bool IsDtlsActive() const = 0; - - virtual bool GetDtlsRole(rtc::SSLRole* role) const = 0; - - virtual bool SetDtlsRole(rtc::SSLRole role) = 0; - - // Finds out which TLS/DTLS version is running. - virtual bool GetSslVersionBytes(int* version) const = 0; - // Finds out which DTLS-SRTP cipher was negotiated. - // TODO(zhihuang): Remove this once all dependencies implement this. - virtual bool GetSrtpCryptoSuite(int* cipher) = 0; - - // Finds out which DTLS cipher was negotiated. - // TODO(zhihuang): Remove this once all dependencies implement this. - virtual bool GetSslCipherSuite(int* cipher) = 0; - - // Gets the local RTCCertificate used for DTLS. - virtual rtc::scoped_refptr GetLocalCertificate() - const = 0; - - virtual bool SetLocalCertificate( - const rtc::scoped_refptr& certificate) = 0; - - // Gets a copy of the remote side's SSL certificate chain. - virtual std::unique_ptr GetRemoteSSLCertChain() const = 0; - - // Allows key material to be extracted for external encryption. - virtual bool ExportKeyingMaterial(absl::string_view label, - const uint8_t* context, - size_t context_len, - bool use_context, - uint8_t* result, - size_t result_len) = 0; - - // Set DTLS remote fingerprint. Must be after local identity set. - ABSL_DEPRECATED("Use SetRemoteParameters instead.") - virtual bool SetRemoteFingerprint(absl::string_view digest_alg, - const uint8_t* digest, - size_t digest_len) = 0; - - // Set DTLS remote fingerprint and role. Must be after local identity set. - virtual webrtc::RTCError SetRemoteParameters( - absl::string_view digest_alg, - const uint8_t* digest, - size_t digest_len, - absl::optional role) = 0; - - ABSL_DEPRECATED("Set the max version via construction.") - bool SetSslMaxProtocolVersion(rtc::SSLProtocolVersion version) { - return true; - } - - // Expose the underneath IceTransport. - virtual IceTransportInternal* ice_transport() = 0; - - // F: void(DtlsTransportInternal*, const webrtc::DtlsTransportState) - template - void SubscribeDtlsTransportState(F&& callback) { - dtls_transport_state_callback_list_.AddReceiver(std::forward(callback)); - } - - template - void SubscribeDtlsTransportState(const void* id, F&& callback) { - dtls_transport_state_callback_list_.AddReceiver(id, - std::forward(callback)); - } - // Unsubscribe the subscription with given id. - void UnsubscribeDtlsTransportState(const void* id) { - dtls_transport_state_callback_list_.RemoveReceivers(id); - } - - void SendDtlsState(DtlsTransportInternal* transport, - webrtc::DtlsTransportState state) { - dtls_transport_state_callback_list_.Send(transport, state); - } - - // Emitted whenever the Dtls handshake failed on some transport channel. - // F: void(rtc::SSLHandshakeError) - template - void SubscribeDtlsHandshakeError(F&& callback) { - dtls_handshake_error_callback_list_.AddReceiver(std::forward(callback)); - } - - void SendDtlsHandshakeError(rtc::SSLHandshakeError error) { - dtls_handshake_error_callback_list_.Send(error); - } - - protected: - DtlsTransportInternal(); - - private: - webrtc::CallbackList - dtls_handshake_error_callback_list_; - webrtc::CallbackList - dtls_transport_state_callback_list_; -}; - -} // namespace cricket +// This is a transitional header forwarding to the new version in the p2p/dtls/ +// folder. +#include "p2p/dtls/dtls_transport_internal.h" #endif // P2P_BASE_DTLS_TRANSPORT_INTERNAL_H_ diff --git a/p2p/base/dtls_transport_unittest.cc b/p2p/base/dtls_transport_unittest.cc deleted file mode 100644 index e338ab6a49..0000000000 --- a/p2p/base/dtls_transport_unittest.cc +++ /dev/null @@ -1,748 +0,0 @@ -/* - * Copyright 2011 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "p2p/base/dtls_transport.h" - -#include -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "api/dtls_transport_interface.h" -#include "p2p/base/fake_ice_transport.h" -#include "p2p/base/packet_transport_internal.h" -#include "rtc_base/checks.h" -#include "rtc_base/dscp.h" -#include "rtc_base/gunit.h" -#include "rtc_base/helpers.h" -#include "rtc_base/rtc_certificate.h" -#include "rtc_base/ssl_adapter.h" -#include "rtc_base/ssl_identity.h" -#include "rtc_base/ssl_stream_adapter.h" - -#define MAYBE_SKIP_TEST(feature) \ - if (!(rtc::SSLStreamAdapter::feature())) { \ - RTC_LOG(LS_INFO) << #feature " feature disabled... skipping"; \ - return; \ - } - -namespace cricket { - -static const size_t kPacketNumOffset = 8; -static const size_t kPacketHeaderLen = 12; -static const int kFakePacketId = 0x1234; -static const int kTimeout = 10000; - -static bool IsRtpLeadByte(uint8_t b) { - return ((b & 0xC0) == 0x80); -} - -// `modify_digest` is used to set modified fingerprints that are meant to fail -// validation. -void SetRemoteFingerprintFromCert( - DtlsTransport* transport, - const rtc::scoped_refptr& cert, - bool modify_digest = false) { - std::unique_ptr fingerprint = - rtc::SSLFingerprint::CreateFromCertificate(*cert); - if (modify_digest) { - ++fingerprint->digest.MutableData()[0]; - } - - // Even if digest is verified to be incorrect, should fail asynchronously. - EXPECT_TRUE( - transport - ->SetRemoteParameters( - fingerprint->algorithm, - reinterpret_cast(fingerprint->digest.data()), - fingerprint->digest.size(), absl::nullopt) - .ok()); -} - -class DtlsTestClient : public sigslot::has_slots<> { - public: - explicit DtlsTestClient(absl::string_view name) : name_(name) {} - void CreateCertificate(rtc::KeyType key_type) { - certificate_ = - rtc::RTCCertificate::Create(rtc::SSLIdentity::Create(name_, key_type)); - } - const rtc::scoped_refptr& certificate() { - return certificate_; - } - void SetupMaxProtocolVersion(rtc::SSLProtocolVersion version) { - ssl_max_version_ = version; - } - // Set up fake ICE transport and real DTLS transport under test. - void SetupTransports(IceRole role, int async_delay_ms = 0) { - fake_ice_transport_.reset(new FakeIceTransport("fake", 0)); - fake_ice_transport_->SetAsync(true); - fake_ice_transport_->SetAsyncDelay(async_delay_ms); - fake_ice_transport_->SetIceRole(role); - fake_ice_transport_->SetIceTiebreaker((role == ICEROLE_CONTROLLING) ? 1 - : 2); - // Hook the raw packets so that we can verify they are encrypted. - fake_ice_transport_->SignalReadPacket.connect( - this, &DtlsTestClient::OnFakeIceTransportReadPacket); - - dtls_transport_ = std::make_unique( - fake_ice_transport_.get(), webrtc::CryptoOptions(), - /*event_log=*/nullptr, ssl_max_version_); - // Note: Certificate may be null here if testing passthrough. - dtls_transport_->SetLocalCertificate(certificate_); - dtls_transport_->SignalWritableState.connect( - this, &DtlsTestClient::OnTransportWritableState); - dtls_transport_->SignalReadPacket.connect( - this, &DtlsTestClient::OnTransportReadPacket); - dtls_transport_->SignalSentPacket.connect( - this, &DtlsTestClient::OnTransportSentPacket); - } - - FakeIceTransport* fake_ice_transport() { - return static_cast(dtls_transport_->ice_transport()); - } - - DtlsTransport* dtls_transport() { return dtls_transport_.get(); } - - // Simulate fake ICE transports connecting. - bool Connect(DtlsTestClient* peer, bool asymmetric) { - fake_ice_transport()->SetDestination(peer->fake_ice_transport(), - asymmetric); - return true; - } - - int received_dtls_client_hellos() const { - return received_dtls_client_hellos_; - } - - int received_dtls_server_hellos() const { - return received_dtls_server_hellos_; - } - - void CheckRole(rtc::SSLRole role) { - if (role == rtc::SSL_CLIENT) { - ASSERT_EQ(0, received_dtls_client_hellos_); - ASSERT_GT(received_dtls_server_hellos_, 0); - } else { - ASSERT_GT(received_dtls_client_hellos_, 0); - ASSERT_EQ(0, received_dtls_server_hellos_); - } - } - - void CheckSrtp(int expected_crypto_suite) { - int crypto_suite; - bool rv = dtls_transport_->GetSrtpCryptoSuite(&crypto_suite); - if (dtls_transport_->IsDtlsActive() && expected_crypto_suite) { - ASSERT_TRUE(rv); - ASSERT_EQ(crypto_suite, expected_crypto_suite); - } else { - ASSERT_FALSE(rv); - } - } - - void CheckSsl() { - int cipher; - bool rv = dtls_transport_->GetSslCipherSuite(&cipher); - if (dtls_transport_->IsDtlsActive()) { - ASSERT_TRUE(rv); - EXPECT_TRUE( - rtc::SSLStreamAdapter::IsAcceptableCipher(cipher, rtc::KT_DEFAULT)); - } else { - ASSERT_FALSE(rv); - } - } - - void SendPackets(size_t size, size_t count, bool srtp) { - std::unique_ptr packet(new char[size]); - size_t sent = 0; - do { - // Fill the packet with a known value and a sequence number to check - // against, and make sure that it doesn't look like DTLS. - memset(packet.get(), sent & 0xff, size); - packet[0] = (srtp) ? 0x80 : 0x00; - rtc::SetBE32(packet.get() + kPacketNumOffset, - static_cast(sent)); - - // Only set the bypass flag if we've activated DTLS. - int flags = (certificate_ && srtp) ? PF_SRTP_BYPASS : 0; - rtc::PacketOptions packet_options; - packet_options.packet_id = kFakePacketId; - int rv = dtls_transport_->SendPacket(packet.get(), size, packet_options, - flags); - ASSERT_GT(rv, 0); - ASSERT_EQ(size, static_cast(rv)); - ++sent; - } while (sent < count); - } - - int SendInvalidSrtpPacket(size_t size) { - std::unique_ptr packet(new char[size]); - // Fill the packet with 0 to form an invalid SRTP packet. - memset(packet.get(), 0, size); - - rtc::PacketOptions packet_options; - return dtls_transport_->SendPacket(packet.get(), size, packet_options, - PF_SRTP_BYPASS); - } - - void ExpectPackets(size_t size) { - packet_size_ = size; - received_.clear(); - } - - size_t NumPacketsReceived() { return received_.size(); } - - // Inverse of SendPackets. - bool VerifyPacket(const char* data, size_t size, uint32_t* out_num) { - if (size != packet_size_ || - (data[0] != 0 && static_cast(data[0]) != 0x80)) { - return false; - } - uint32_t packet_num = rtc::GetBE32(data + kPacketNumOffset); - for (size_t i = kPacketHeaderLen; i < size; ++i) { - if (static_cast(data[i]) != (packet_num & 0xff)) { - return false; - } - } - if (out_num) { - *out_num = packet_num; - } - return true; - } - bool VerifyEncryptedPacket(const char* data, size_t size) { - // This is an encrypted data packet; let's make sure it's mostly random; - // less than 10% of the bytes should be equal to the cleartext packet. - if (size <= packet_size_) { - return false; - } - uint32_t packet_num = rtc::GetBE32(data + kPacketNumOffset); - int num_matches = 0; - for (size_t i = kPacketNumOffset; i < size; ++i) { - if (static_cast(data[i]) == (packet_num & 0xff)) { - ++num_matches; - } - } - return (num_matches < ((static_cast(size) - 5) / 10)); - } - - // Transport callbacks - void OnTransportWritableState(rtc::PacketTransportInternal* transport) { - RTC_LOG(LS_INFO) << name_ << ": Transport '" << transport->transport_name() - << "' is writable"; - } - - void OnTransportReadPacket(rtc::PacketTransportInternal* transport, - const char* data, - size_t size, - const int64_t& /* packet_time_us */, - int flags) { - uint32_t packet_num = 0; - ASSERT_TRUE(VerifyPacket(data, size, &packet_num)); - received_.insert(packet_num); - // Only DTLS-SRTP packets should have the bypass flag set. - int expected_flags = - (certificate_ && IsRtpLeadByte(data[0])) ? PF_SRTP_BYPASS : 0; - ASSERT_EQ(expected_flags, flags); - } - - void OnTransportSentPacket(rtc::PacketTransportInternal* transport, - const rtc::SentPacket& sent_packet) { - sent_packet_ = sent_packet; - } - - rtc::SentPacket sent_packet() const { return sent_packet_; } - - // Hook into the raw packet stream to make sure DTLS packets are encrypted. - void OnFakeIceTransportReadPacket(rtc::PacketTransportInternal* transport, - const char* data, - size_t size, - const int64_t& /* packet_time_us */, - int flags) { - // Flags shouldn't be set on the underlying Transport packets. - ASSERT_EQ(0, flags); - - // Look at the handshake packets to see what role we played. - // Check that non-handshake packets are DTLS data or SRTP bypass. - if (data[0] == 22 && size > 17) { - if (data[13] == 1) { - ++received_dtls_client_hellos_; - } else if (data[13] == 2) { - ++received_dtls_server_hellos_; - } - } else if (dtls_transport_->IsDtlsActive() && - !(data[0] >= 20 && data[0] <= 22)) { - ASSERT_TRUE(data[0] == 23 || IsRtpLeadByte(data[0])); - if (data[0] == 23) { - ASSERT_TRUE(VerifyEncryptedPacket(data, size)); - } else if (IsRtpLeadByte(data[0])) { - ASSERT_TRUE(VerifyPacket(data, size, NULL)); - } - } - } - - private: - std::string name_; - rtc::scoped_refptr certificate_; - std::unique_ptr fake_ice_transport_; - std::unique_ptr dtls_transport_; - size_t packet_size_ = 0u; - std::set received_; - rtc::SSLProtocolVersion ssl_max_version_ = rtc::SSL_PROTOCOL_DTLS_12; - int received_dtls_client_hellos_ = 0; - int received_dtls_server_hellos_ = 0; - rtc::SentPacket sent_packet_; -}; - -// Base class for DtlsTransportTest and DtlsEventOrderingTest, which -// inherit from different variants of ::testing::Test. -// -// Note that this test always uses a FakeClock, due to the `fake_clock_` member -// variable. -class DtlsTransportTestBase { - public: - DtlsTransportTestBase() : client1_("P1"), client2_("P2"), use_dtls_(false) {} - - void SetMaxProtocolVersions(rtc::SSLProtocolVersion c1, - rtc::SSLProtocolVersion c2) { - client1_.SetupMaxProtocolVersion(c1); - client2_.SetupMaxProtocolVersion(c2); - } - // If not called, DtlsTransport will be used in SRTP bypass mode. - void PrepareDtls(rtc::KeyType key_type) { - client1_.CreateCertificate(key_type); - client2_.CreateCertificate(key_type); - use_dtls_ = true; - } - - // This test negotiates DTLS parameters before the underlying transports are - // writable. DtlsEventOrderingTest is responsible for exercising differerent - // orderings. - bool Connect(bool client1_server = true) { - Negotiate(client1_server); - EXPECT_TRUE(client1_.Connect(&client2_, false)); - - EXPECT_TRUE_SIMULATED_WAIT(client1_.dtls_transport()->writable() && - client2_.dtls_transport()->writable(), - kTimeout, fake_clock_); - if (!client1_.dtls_transport()->writable() || - !client2_.dtls_transport()->writable()) - return false; - - // Check that we used the right roles. - if (use_dtls_) { - client1_.CheckRole(client1_server ? rtc::SSL_SERVER : rtc::SSL_CLIENT); - client2_.CheckRole(client1_server ? rtc::SSL_CLIENT : rtc::SSL_SERVER); - } - - if (use_dtls_) { - // Check that we negotiated the right ciphers. Since GCM ciphers are not - // negotiated by default, we should end up with kSrtpAes128CmSha1_80. - client1_.CheckSrtp(rtc::kSrtpAes128CmSha1_80); - client2_.CheckSrtp(rtc::kSrtpAes128CmSha1_80); - } else { - // If DTLS isn't actually being used, GetSrtpCryptoSuite should return - // false. - client1_.CheckSrtp(rtc::kSrtpInvalidCryptoSuite); - client2_.CheckSrtp(rtc::kSrtpInvalidCryptoSuite); - } - - client1_.CheckSsl(); - client2_.CheckSsl(); - - return true; - } - - void Negotiate(bool client1_server = true) { - client1_.SetupTransports(ICEROLE_CONTROLLING); - client2_.SetupTransports(ICEROLE_CONTROLLED); - client1_.dtls_transport()->SetDtlsRole(client1_server ? rtc::SSL_SERVER - : rtc::SSL_CLIENT); - client2_.dtls_transport()->SetDtlsRole(client1_server ? rtc::SSL_CLIENT - : rtc::SSL_SERVER); - if (client2_.certificate()) { - SetRemoteFingerprintFromCert(client1_.dtls_transport(), - client2_.certificate()); - } - if (client1_.certificate()) { - SetRemoteFingerprintFromCert(client2_.dtls_transport(), - client1_.certificate()); - } - } - - void TestTransfer(size_t size, size_t count, bool srtp) { - RTC_LOG(LS_INFO) << "Expect packets, size=" << size; - client2_.ExpectPackets(size); - client1_.SendPackets(size, count, srtp); - EXPECT_EQ_SIMULATED_WAIT(count, client2_.NumPacketsReceived(), kTimeout, - fake_clock_); - } - - protected: - rtc::AutoThread main_thread_; - rtc::ScopedFakeClock fake_clock_; - DtlsTestClient client1_; - DtlsTestClient client2_; - bool use_dtls_; - rtc::SSLProtocolVersion ssl_expected_version_; -}; - -class DtlsTransportTest : public DtlsTransportTestBase, - public ::testing::Test {}; - -// Connect without DTLS, and transfer RTP data. -TEST_F(DtlsTransportTest, TestTransferRtp) { - ASSERT_TRUE(Connect()); - TestTransfer(1000, 100, /*srtp=*/false); -} - -// Test that the SignalSentPacket signal is wired up. -TEST_F(DtlsTransportTest, TestSignalSentPacket) { - ASSERT_TRUE(Connect()); - // Sanity check default value (-1). - ASSERT_EQ(client1_.sent_packet().send_time_ms, -1); - TestTransfer(1000, 100, false); - // Check that we get the expected fake packet ID, and a time of 0 from the - // fake clock. - EXPECT_EQ(kFakePacketId, client1_.sent_packet().packet_id); - EXPECT_GE(client1_.sent_packet().send_time_ms, 0); -} - -// Connect without DTLS, and transfer SRTP data. -TEST_F(DtlsTransportTest, TestTransferSrtp) { - ASSERT_TRUE(Connect()); - TestTransfer(1000, 100, /*srtp=*/true); -} - -// Connect with DTLS, and transfer data over DTLS. -TEST_F(DtlsTransportTest, TestTransferDtls) { - PrepareDtls(rtc::KT_DEFAULT); - ASSERT_TRUE(Connect()); - TestTransfer(1000, 100, /*srtp=*/false); -} - -// Connect with DTLS, combine multiple DTLS records into one packet. -// Our DTLS implementation doesn't do this, but other implementations may; -// see https://tools.ietf.org/html/rfc6347#section-4.1.1. -// This has caused interoperability problems with ORTCLib in the past. -TEST_F(DtlsTransportTest, TestTransferDtlsCombineRecords) { - PrepareDtls(rtc::KT_DEFAULT); - ASSERT_TRUE(Connect()); - // Our DTLS implementation always sends one record per packet, so to simulate - // an endpoint that sends multiple records per packet, we configure the fake - // ICE transport to combine every two consecutive packets into a single - // packet. - FakeIceTransport* transport = client1_.fake_ice_transport(); - transport->combine_outgoing_packets(true); - TestTransfer(500, 100, /*srtp=*/false); -} - -class DtlsTransportVersionTest - : public DtlsTransportTestBase, - public ::testing::TestWithParam< - ::testing::tuple> { -}; - -// Test that an acceptable cipher suite is negotiated when different versions -// of DTLS are supported. Note that it's IsAcceptableCipher that does the actual -// work. -TEST_P(DtlsTransportVersionTest, TestCipherSuiteNegotiation) { - PrepareDtls(rtc::KT_DEFAULT); - SetMaxProtocolVersions(::testing::get<0>(GetParam()), - ::testing::get<1>(GetParam())); - ASSERT_TRUE(Connect()); -} - -// Will test every combination of 1.0/1.2 on the client and server. -INSTANTIATE_TEST_SUITE_P( - TestCipherSuiteNegotiation, - DtlsTransportVersionTest, - ::testing::Combine(::testing::Values(rtc::SSL_PROTOCOL_DTLS_10, - rtc::SSL_PROTOCOL_DTLS_12), - ::testing::Values(rtc::SSL_PROTOCOL_DTLS_10, - rtc::SSL_PROTOCOL_DTLS_12))); - -// Connect with DTLS, negotiating DTLS-SRTP, and transfer SRTP using bypass. -TEST_F(DtlsTransportTest, TestTransferDtlsSrtp) { - PrepareDtls(rtc::KT_DEFAULT); - ASSERT_TRUE(Connect()); - TestTransfer(1000, 100, /*srtp=*/true); -} - -// Connect with DTLS-SRTP, transfer an invalid SRTP packet, and expects -1 -// returned. -TEST_F(DtlsTransportTest, TestTransferDtlsInvalidSrtpPacket) { - PrepareDtls(rtc::KT_DEFAULT); - ASSERT_TRUE(Connect()); - EXPECT_EQ(-1, client1_.SendInvalidSrtpPacket(100)); -} - -// Create a single transport with DTLS, and send normal data and SRTP data on -// it. -TEST_F(DtlsTransportTest, TestTransferDtlsSrtpDemux) { - PrepareDtls(rtc::KT_DEFAULT); - ASSERT_TRUE(Connect()); - TestTransfer(1000, 100, /*srtp=*/false); - TestTransfer(1000, 100, /*srtp=*/true); -} - -// Test transferring when the "answerer" has the server role. -TEST_F(DtlsTransportTest, TestTransferDtlsSrtpAnswererIsPassive) { - PrepareDtls(rtc::KT_DEFAULT); - ASSERT_TRUE(Connect(/*client1_server=*/false)); - TestTransfer(1000, 100, /*srtp=*/true); -} - -// Test that renegotiation (setting same role and fingerprint again) can be -// started before the clients become connected in the first negotiation. -TEST_F(DtlsTransportTest, TestRenegotiateBeforeConnect) { - PrepareDtls(rtc::KT_DEFAULT); - // Note: This is doing the same thing Connect normally does, minus some - // additional checks not relevant for this test. - Negotiate(); - Negotiate(); - EXPECT_TRUE(client1_.Connect(&client2_, false)); - EXPECT_TRUE_SIMULATED_WAIT(client1_.dtls_transport()->writable() && - client2_.dtls_transport()->writable(), - kTimeout, fake_clock_); - TestTransfer(1000, 100, true); -} - -// Test Certificates state after negotiation but before connection. -TEST_F(DtlsTransportTest, TestCertificatesBeforeConnect) { - PrepareDtls(rtc::KT_DEFAULT); - Negotiate(); - - // After negotiation, each side has a distinct local certificate, but still no - // remote certificate, because connection has not yet occurred. - auto certificate1 = client1_.dtls_transport()->GetLocalCertificate(); - auto certificate2 = client2_.dtls_transport()->GetLocalCertificate(); - ASSERT_NE(certificate1->GetSSLCertificate().ToPEMString(), - certificate2->GetSSLCertificate().ToPEMString()); - ASSERT_FALSE(client1_.dtls_transport()->GetRemoteSSLCertChain()); - ASSERT_FALSE(client2_.dtls_transport()->GetRemoteSSLCertChain()); -} - -// Test Certificates state after connection. -TEST_F(DtlsTransportTest, TestCertificatesAfterConnect) { - PrepareDtls(rtc::KT_DEFAULT); - ASSERT_TRUE(Connect()); - - // After connection, each side has a distinct local certificate. - auto certificate1 = client1_.dtls_transport()->GetLocalCertificate(); - auto certificate2 = client2_.dtls_transport()->GetLocalCertificate(); - ASSERT_NE(certificate1->GetSSLCertificate().ToPEMString(), - certificate2->GetSSLCertificate().ToPEMString()); - - // Each side's remote certificate is the other side's local certificate. - std::unique_ptr remote_cert1 = - client1_.dtls_transport()->GetRemoteSSLCertChain(); - ASSERT_TRUE(remote_cert1); - ASSERT_EQ(1u, remote_cert1->GetSize()); - ASSERT_EQ(remote_cert1->Get(0).ToPEMString(), - certificate2->GetSSLCertificate().ToPEMString()); - std::unique_ptr remote_cert2 = - client2_.dtls_transport()->GetRemoteSSLCertChain(); - ASSERT_TRUE(remote_cert2); - ASSERT_EQ(1u, remote_cert2->GetSize()); - ASSERT_EQ(remote_cert2->Get(0).ToPEMString(), - certificate1->GetSSLCertificate().ToPEMString()); -} - -// Test that packets are retransmitted according to the expected schedule. -// Each time a timeout occurs, the retransmission timer should be doubled up to -// 60 seconds. The timer defaults to 1 second, but for WebRTC we should be -// initializing it to 50ms. -TEST_F(DtlsTransportTest, TestRetransmissionSchedule) { - // We can only change the retransmission schedule with a recently-added - // BoringSSL API. Skip the test if not built with BoringSSL. - MAYBE_SKIP_TEST(IsBoringSsl); - - PrepareDtls(rtc::KT_DEFAULT); - // Exchange fingerprints and set SSL roles. - Negotiate(); - - // Make client2_ writable, but not client1_. - // This means client1_ will send DTLS client hellos but get no response. - EXPECT_TRUE(client2_.Connect(&client1_, true)); - EXPECT_TRUE_SIMULATED_WAIT(client2_.fake_ice_transport()->writable(), - kTimeout, fake_clock_); - - // Wait for the first client hello to be sent. - EXPECT_EQ_WAIT(1, client1_.received_dtls_client_hellos(), kTimeout); - EXPECT_FALSE(client1_.fake_ice_transport()->writable()); - - static int timeout_schedule_ms[] = {50, 100, 200, 400, 800, 1600, - 3200, 6400, 12800, 25600, 51200, 60000}; - - int expected_hellos = 1; - for (size_t i = 0; - i < (sizeof(timeout_schedule_ms) / sizeof(timeout_schedule_ms[0])); - ++i) { - // For each expected retransmission time, advance the fake clock a - // millisecond before the expected time and verify that no unexpected - // retransmissions were sent. Then advance it the final millisecond and - // verify that the expected retransmission was sent. - fake_clock_.AdvanceTime( - webrtc::TimeDelta::Millis(timeout_schedule_ms[i] - 1)); - EXPECT_EQ(expected_hellos, client1_.received_dtls_client_hellos()); - fake_clock_.AdvanceTime(webrtc::TimeDelta::Millis(1)); - EXPECT_EQ(++expected_hellos, client1_.received_dtls_client_hellos()); - } -} - -// The following events can occur in many different orders: -// 1. Caller receives remote fingerprint. -// 2. Caller is writable. -// 3. Caller receives ClientHello. -// 4. DTLS handshake finishes. -// -// The tests below cover all causally consistent permutations of these events; -// the caller must be writable and receive a ClientHello before the handshake -// finishes, but otherwise any ordering is possible. -// -// For each permutation, the test verifies that a connection is established and -// fingerprint verified without any DTLS packet needing to be retransmitted. -// -// Each permutation is also tested with valid and invalid fingerprints, -// ensuring that the handshake fails with an invalid fingerprint. -enum DtlsTransportEvent { - CALLER_RECEIVES_FINGERPRINT, - CALLER_WRITABLE, - CALLER_RECEIVES_CLIENTHELLO, - HANDSHAKE_FINISHES -}; - -class DtlsEventOrderingTest - : public DtlsTransportTestBase, - public ::testing::TestWithParam< - ::testing::tuple, bool>> { - protected: - // If `valid_fingerprint` is false, the caller will receive a fingerprint - // that doesn't match the callee's certificate, so the handshake should fail. - void TestEventOrdering(const std::vector& events, - bool valid_fingerprint) { - // Pre-setup: Set local certificate on both caller and callee, and - // remote fingerprint on callee, but neither is writable and the caller - // doesn't have the callee's fingerprint. - PrepareDtls(rtc::KT_DEFAULT); - // Simulate packets being sent and arriving asynchronously. - // Otherwise the entire DTLS handshake would occur in one clock tick, and - // we couldn't inject method calls in the middle of it. - int simulated_delay_ms = 10; - client1_.SetupTransports(ICEROLE_CONTROLLING, simulated_delay_ms); - client2_.SetupTransports(ICEROLE_CONTROLLED, simulated_delay_ms); - // Similar to how NegotiateOrdering works. - client1_.dtls_transport()->SetDtlsRole(rtc::SSL_SERVER); - client2_.dtls_transport()->SetDtlsRole(rtc::SSL_CLIENT); - SetRemoteFingerprintFromCert(client2_.dtls_transport(), - client1_.certificate()); - - for (DtlsTransportEvent e : events) { - switch (e) { - case CALLER_RECEIVES_FINGERPRINT: - if (valid_fingerprint) { - SetRemoteFingerprintFromCert(client1_.dtls_transport(), - client2_.certificate()); - } else { - SetRemoteFingerprintFromCert(client1_.dtls_transport(), - client2_.certificate(), - true /*modify_digest*/); - } - break; - case CALLER_WRITABLE: - EXPECT_TRUE(client1_.Connect(&client2_, true)); - EXPECT_TRUE_SIMULATED_WAIT(client1_.fake_ice_transport()->writable(), - kTimeout, fake_clock_); - break; - case CALLER_RECEIVES_CLIENTHELLO: - // Sanity check that a ClientHello hasn't already been received. - EXPECT_EQ(0, client1_.received_dtls_client_hellos()); - // Making client2_ writable will cause it to send the ClientHello. - EXPECT_TRUE(client2_.Connect(&client1_, true)); - EXPECT_TRUE_SIMULATED_WAIT(client2_.fake_ice_transport()->writable(), - kTimeout, fake_clock_); - EXPECT_EQ_SIMULATED_WAIT(1, client1_.received_dtls_client_hellos(), - kTimeout, fake_clock_); - break; - case HANDSHAKE_FINISHES: - // Sanity check that the handshake hasn't already finished. - EXPECT_FALSE(client1_.dtls_transport()->IsDtlsConnected() || - client1_.dtls_transport()->dtls_state() == - webrtc::DtlsTransportState::kFailed); - EXPECT_TRUE_SIMULATED_WAIT( - client1_.dtls_transport()->IsDtlsConnected() || - client1_.dtls_transport()->dtls_state() == - webrtc::DtlsTransportState::kFailed, - kTimeout, fake_clock_); - break; - } - } - - webrtc::DtlsTransportState expected_final_state = - valid_fingerprint ? webrtc::DtlsTransportState::kConnected - : webrtc::DtlsTransportState::kFailed; - EXPECT_EQ_SIMULATED_WAIT(expected_final_state, - client1_.dtls_transport()->dtls_state(), kTimeout, - fake_clock_); - EXPECT_EQ_SIMULATED_WAIT(expected_final_state, - client2_.dtls_transport()->dtls_state(), kTimeout, - fake_clock_); - - // Transports should be writable iff there was a valid fingerprint. - EXPECT_EQ(valid_fingerprint, client1_.dtls_transport()->writable()); - EXPECT_EQ(valid_fingerprint, client2_.dtls_transport()->writable()); - - // Check that no hello needed to be retransmitted. - EXPECT_EQ(1, client1_.received_dtls_client_hellos()); - EXPECT_EQ(1, client2_.received_dtls_server_hellos()); - - if (valid_fingerprint) { - TestTransfer(1000, 100, false); - } - } -}; - -TEST_P(DtlsEventOrderingTest, TestEventOrdering) { - TestEventOrdering(::testing::get<0>(GetParam()), - ::testing::get<1>(GetParam())); -} - -INSTANTIATE_TEST_SUITE_P( - TestEventOrdering, - DtlsEventOrderingTest, - ::testing::Combine( - ::testing::Values( - std::vector{ - CALLER_RECEIVES_FINGERPRINT, CALLER_WRITABLE, - CALLER_RECEIVES_CLIENTHELLO, HANDSHAKE_FINISHES}, - std::vector{ - CALLER_WRITABLE, CALLER_RECEIVES_FINGERPRINT, - CALLER_RECEIVES_CLIENTHELLO, HANDSHAKE_FINISHES}, - std::vector{ - CALLER_WRITABLE, CALLER_RECEIVES_CLIENTHELLO, - CALLER_RECEIVES_FINGERPRINT, HANDSHAKE_FINISHES}, - std::vector{ - CALLER_WRITABLE, CALLER_RECEIVES_CLIENTHELLO, - HANDSHAKE_FINISHES, CALLER_RECEIVES_FINGERPRINT}, - std::vector{ - CALLER_RECEIVES_FINGERPRINT, CALLER_RECEIVES_CLIENTHELLO, - CALLER_WRITABLE, HANDSHAKE_FINISHES}, - std::vector{ - CALLER_RECEIVES_CLIENTHELLO, CALLER_RECEIVES_FINGERPRINT, - CALLER_WRITABLE, HANDSHAKE_FINISHES}, - std::vector{ - CALLER_RECEIVES_CLIENTHELLO, CALLER_WRITABLE, - CALLER_RECEIVES_FINGERPRINT, HANDSHAKE_FINISHES}, - std::vector{CALLER_RECEIVES_CLIENTHELLO, - CALLER_WRITABLE, HANDSHAKE_FINISHES, - CALLER_RECEIVES_FINGERPRINT}), - ::testing::Bool())); - -} // namespace cricket diff --git a/p2p/base/fake_ice_transport.h b/p2p/base/fake_ice_transport.h index ae7bf8947e..8527da6d21 100644 --- a/p2p/base/fake_ice_transport.h +++ b/p2p/base/fake_ice_transport.h @@ -1,5 +1,5 @@ /* - * Copyright 2017 The WebRTC Project Authors. All rights reserved. + * Copyright 2025 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -11,436 +11,7 @@ #ifndef P2P_BASE_FAKE_ICE_TRANSPORT_H_ #define P2P_BASE_FAKE_ICE_TRANSPORT_H_ -#include -#include -#include -#include - -#include "absl/algorithm/container.h" -#include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/ice_transport_interface.h" -#include "api/task_queue/pending_task_safety_flag.h" -#include "api/units/time_delta.h" -#include "p2p/base/ice_transport_internal.h" -#include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/task_queue_for_test.h" - -namespace cricket { -using ::webrtc::SafeTask; -using ::webrtc::TimeDelta; - -// All methods must be called on the network thread (which is either the thread -// calling the constructor, or the separate thread explicitly passed to the -// constructor). -class FakeIceTransport : public IceTransportInternal { - public: - explicit FakeIceTransport(absl::string_view name, - int component, - rtc::Thread* network_thread = nullptr) - : name_(name), - component_(component), - network_thread_(network_thread ? network_thread - : rtc::Thread::Current()) { - RTC_DCHECK(network_thread_); - } - // Must be called either on the network thread, or after the network thread - // has been shut down. - ~FakeIceTransport() override { - if (dest_ && dest_->dest_ == this) { - dest_->dest_ = nullptr; - } - } - - // If async, will send packets by "Post"-ing to message queue instead of - // synchronously "Send"-ing. - void SetAsync(bool async) { - RTC_DCHECK_RUN_ON(network_thread_); - async_ = async; - } - void SetAsyncDelay(int delay_ms) { - RTC_DCHECK_RUN_ON(network_thread_); - async_delay_ms_ = delay_ms; - } - - // SetWritable, SetReceiving and SetDestination are the main methods that can - // be used for testing, to simulate connectivity or lack thereof. - void SetWritable(bool writable) { - RTC_DCHECK_RUN_ON(network_thread_); - set_writable(writable); - } - void SetReceiving(bool receiving) { - RTC_DCHECK_RUN_ON(network_thread_); - set_receiving(receiving); - } - - // Simulates the two transports connecting to each other. - // If `asymmetric` is true this method only affects this FakeIceTransport. - // If false, it affects `dest` as well. - void SetDestination(FakeIceTransport* dest, bool asymmetric = false) { - RTC_DCHECK_RUN_ON(network_thread_); - if (dest == dest_) { - return; - } - RTC_DCHECK(!dest || !dest_) - << "Changing fake destination from one to another is not supported."; - if (dest) { - // This simulates the delivery of candidates. - dest_ = dest; - set_writable(true); - if (!asymmetric) { - dest->SetDestination(this, true); - } - } else { - // Simulates loss of connectivity, by asymmetrically forgetting dest_. - dest_ = nullptr; - set_writable(false); - } - } - - void SetTransportState(webrtc::IceTransportState state, - IceTransportState legacy_state) { - RTC_DCHECK_RUN_ON(network_thread_); - transport_state_ = state; - legacy_transport_state_ = legacy_state; - SignalIceTransportStateChanged(this); - } - - void SetConnectionCount(size_t connection_count) { - RTC_DCHECK_RUN_ON(network_thread_); - size_t old_connection_count = connection_count_; - connection_count_ = connection_count; - if (connection_count) { - had_connection_ = true; - } - // In this fake transport channel, `connection_count_` determines the - // transport state. - if (connection_count_ < old_connection_count) { - SignalStateChanged(this); - } - } - - void SetCandidatesGatheringComplete() { - RTC_DCHECK_RUN_ON(network_thread_); - if (gathering_state_ != kIceGatheringComplete) { - gathering_state_ = kIceGatheringComplete; - SignalGatheringState(this); - } - } - - // Convenience functions for accessing ICE config and other things. - int receiving_timeout() const { - RTC_DCHECK_RUN_ON(network_thread_); - return ice_config_.receiving_timeout_or_default(); - } - bool gather_continually() const { - RTC_DCHECK_RUN_ON(network_thread_); - return ice_config_.gather_continually(); - } - const Candidates& remote_candidates() const { - RTC_DCHECK_RUN_ON(network_thread_); - return remote_candidates_; - } - - // Fake IceTransportInternal implementation. - const std::string& transport_name() const override { return name_; } - int component() const override { return component_; } - uint64_t IceTiebreaker() const { - RTC_DCHECK_RUN_ON(network_thread_); - return tiebreaker_; - } - IceMode remote_ice_mode() const { - RTC_DCHECK_RUN_ON(network_thread_); - return remote_ice_mode_; - } - const std::string& ice_ufrag() const { return ice_parameters_.ufrag; } - const std::string& ice_pwd() const { return ice_parameters_.pwd; } - const std::string& remote_ice_ufrag() const { - return remote_ice_parameters_.ufrag; - } - const std::string& remote_ice_pwd() const { - return remote_ice_parameters_.pwd; - } - const IceParameters& ice_parameters() const { return ice_parameters_; } - const IceParameters& remote_ice_parameters() const { - return remote_ice_parameters_; - } - - IceTransportState GetState() const override { - RTC_DCHECK_RUN_ON(network_thread_); - if (legacy_transport_state_) { - return *legacy_transport_state_; - } - - if (connection_count_ == 0) { - return had_connection_ ? IceTransportState::STATE_FAILED - : IceTransportState::STATE_INIT; - } - - if (connection_count_ == 1) { - return IceTransportState::STATE_COMPLETED; - } - - return IceTransportState::STATE_CONNECTING; - } - - webrtc::IceTransportState GetIceTransportState() const override { - RTC_DCHECK_RUN_ON(network_thread_); - if (transport_state_) { - return *transport_state_; - } - - if (connection_count_ == 0) { - return had_connection_ ? webrtc::IceTransportState::kFailed - : webrtc::IceTransportState::kNew; - } - - if (connection_count_ == 1) { - return webrtc::IceTransportState::kCompleted; - } - - return webrtc::IceTransportState::kConnected; - } - - void SetIceRole(IceRole role) override { - RTC_DCHECK_RUN_ON(network_thread_); - role_ = role; - } - IceRole GetIceRole() const override { - RTC_DCHECK_RUN_ON(network_thread_); - return role_; - } - void SetIceTiebreaker(uint64_t tiebreaker) override { - RTC_DCHECK_RUN_ON(network_thread_); - tiebreaker_ = tiebreaker; - } - void SetIceParameters(const IceParameters& ice_params) override { - RTC_DCHECK_RUN_ON(network_thread_); - ice_parameters_ = ice_params; - } - void SetRemoteIceParameters(const IceParameters& params) override { - RTC_DCHECK_RUN_ON(network_thread_); - remote_ice_parameters_ = params; - } - - void SetRemoteIceMode(IceMode mode) override { - RTC_DCHECK_RUN_ON(network_thread_); - remote_ice_mode_ = mode; - } - - void MaybeStartGathering() override { - RTC_DCHECK_RUN_ON(network_thread_); - if (gathering_state_ == kIceGatheringNew) { - gathering_state_ = kIceGatheringGathering; - SignalGatheringState(this); - } - } - - IceGatheringState gathering_state() const override { - RTC_DCHECK_RUN_ON(network_thread_); - return gathering_state_; - } - - void SetIceConfig(const IceConfig& config) override { - RTC_DCHECK_RUN_ON(network_thread_); - ice_config_ = config; - } - - void AddRemoteCandidate(const Candidate& candidate) override { - RTC_DCHECK_RUN_ON(network_thread_); - remote_candidates_.push_back(candidate); - } - void RemoveRemoteCandidate(const Candidate& candidate) override { - RTC_DCHECK_RUN_ON(network_thread_); - auto it = absl::c_find(remote_candidates_, candidate); - if (it == remote_candidates_.end()) { - RTC_LOG(LS_INFO) << "Trying to remove a candidate which doesn't exist."; - return; - } - - remote_candidates_.erase(it); - } - - void RemoveAllRemoteCandidates() override { - RTC_DCHECK_RUN_ON(network_thread_); - remote_candidates_.clear(); - } - - bool GetStats(IceTransportStats* ice_transport_stats) override { - CandidateStats candidate_stats; - ConnectionInfo candidate_pair_stats; - ice_transport_stats->candidate_stats_list.clear(); - ice_transport_stats->candidate_stats_list.push_back(candidate_stats); - ice_transport_stats->connection_infos.clear(); - ice_transport_stats->connection_infos.push_back(candidate_pair_stats); - return true; - } - - absl::optional GetRttEstimate() override { return absl::nullopt; } - - const Connection* selected_connection() const override { return nullptr; } - absl::optional GetSelectedCandidatePair() - const override { - return absl::nullopt; - } - - // Fake PacketTransportInternal implementation. - bool writable() const override { - RTC_DCHECK_RUN_ON(network_thread_); - return writable_; - } - bool receiving() const override { - RTC_DCHECK_RUN_ON(network_thread_); - return receiving_; - } - // If combine is enabled, every two consecutive packets to be sent with - // "SendPacket" will be combined into one outgoing packet. - void combine_outgoing_packets(bool combine) { - RTC_DCHECK_RUN_ON(network_thread_); - combine_outgoing_packets_ = combine; - } - int SendPacket(const char* data, - size_t len, - const rtc::PacketOptions& options, - int flags) override { - RTC_DCHECK_RUN_ON(network_thread_); - if (!dest_) { - return -1; - } - - send_packet_.AppendData(data, len); - if (!combine_outgoing_packets_ || send_packet_.size() > len) { - rtc::CopyOnWriteBuffer packet(std::move(send_packet_)); - if (async_) { - network_thread_->PostDelayedTask( - SafeTask(task_safety_.flag(), - [this, packet] { - RTC_DCHECK_RUN_ON(network_thread_); - FakeIceTransport::SendPacketInternal(packet); - }), - TimeDelta::Millis(async_delay_ms_)); - } else { - SendPacketInternal(packet); - } - } - rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis()); - SignalSentPacket(this, sent_packet); - return static_cast(len); - } - - int SetOption(rtc::Socket::Option opt, int value) override { - RTC_DCHECK_RUN_ON(network_thread_); - socket_options_[opt] = value; - return true; - } - bool GetOption(rtc::Socket::Option opt, int* value) override { - RTC_DCHECK_RUN_ON(network_thread_); - auto it = socket_options_.find(opt); - if (it != socket_options_.end()) { - *value = it->second; - return true; - } else { - return false; - } - } - - int GetError() override { return 0; } - - rtc::CopyOnWriteBuffer last_sent_packet() { - RTC_DCHECK_RUN_ON(network_thread_); - return last_sent_packet_; - } - - absl::optional network_route() const override { - RTC_DCHECK_RUN_ON(network_thread_); - return network_route_; - } - void SetNetworkRoute(absl::optional network_route) { - RTC_DCHECK_RUN_ON(network_thread_); - network_route_ = network_route; - SendTask(network_thread_, [this] { - RTC_DCHECK_RUN_ON(network_thread_); - SignalNetworkRouteChanged(network_route_); - }); - } - - private: - void set_writable(bool writable) - RTC_EXCLUSIVE_LOCKS_REQUIRED(network_thread_) { - if (writable_ == writable) { - return; - } - RTC_LOG(LS_INFO) << "Change writable_ to " << writable; - writable_ = writable; - if (writable_) { - SignalReadyToSend(this); - } - SignalWritableState(this); - } - - void set_receiving(bool receiving) - RTC_EXCLUSIVE_LOCKS_REQUIRED(network_thread_) { - if (receiving_ == receiving) { - return; - } - receiving_ = receiving; - SignalReceivingState(this); - } - - void SendPacketInternal(const rtc::CopyOnWriteBuffer& packet) - RTC_EXCLUSIVE_LOCKS_REQUIRED(network_thread_) { - if (dest_) { - last_sent_packet_ = packet; - dest_->SignalReadPacket(dest_, packet.data(), packet.size(), - rtc::TimeMicros(), 0); - } - } - - const std::string name_; - const int component_; - FakeIceTransport* dest_ RTC_GUARDED_BY(network_thread_) = nullptr; - bool async_ RTC_GUARDED_BY(network_thread_) = false; - int async_delay_ms_ RTC_GUARDED_BY(network_thread_) = 0; - Candidates remote_candidates_ RTC_GUARDED_BY(network_thread_); - IceConfig ice_config_ RTC_GUARDED_BY(network_thread_); - IceRole role_ RTC_GUARDED_BY(network_thread_) = ICEROLE_UNKNOWN; - uint64_t tiebreaker_ RTC_GUARDED_BY(network_thread_) = 0; - IceParameters ice_parameters_ RTC_GUARDED_BY(network_thread_); - IceParameters remote_ice_parameters_ RTC_GUARDED_BY(network_thread_); - IceMode remote_ice_mode_ RTC_GUARDED_BY(network_thread_) = ICEMODE_FULL; - size_t connection_count_ RTC_GUARDED_BY(network_thread_) = 0; - absl::optional transport_state_ - RTC_GUARDED_BY(network_thread_); - absl::optional legacy_transport_state_ - RTC_GUARDED_BY(network_thread_); - IceGatheringState gathering_state_ RTC_GUARDED_BY(network_thread_) = - kIceGatheringNew; - bool had_connection_ RTC_GUARDED_BY(network_thread_) = false; - bool writable_ RTC_GUARDED_BY(network_thread_) = false; - bool receiving_ RTC_GUARDED_BY(network_thread_) = false; - bool combine_outgoing_packets_ RTC_GUARDED_BY(network_thread_) = false; - rtc::CopyOnWriteBuffer send_packet_ RTC_GUARDED_BY(network_thread_); - absl::optional network_route_ - RTC_GUARDED_BY(network_thread_); - std::map socket_options_ - RTC_GUARDED_BY(network_thread_); - rtc::CopyOnWriteBuffer last_sent_packet_ RTC_GUARDED_BY(network_thread_); - rtc::Thread* const network_thread_; - webrtc::ScopedTaskSafetyDetached task_safety_; -}; - -class FakeIceTransportWrapper : public webrtc::IceTransportInterface { - public: - explicit FakeIceTransportWrapper( - std::unique_ptr internal) - : internal_(std::move(internal)) {} - - cricket::IceTransportInternal* internal() override { return internal_.get(); } - - private: - std::unique_ptr internal_; -}; - -} // namespace cricket +// TODO(jonaso): Remove! +#include "p2p/test/fake_ice_transport.h" #endif // P2P_BASE_FAKE_ICE_TRANSPORT_H_ diff --git a/p2p/base/fake_port_allocator.h b/p2p/base/fake_port_allocator.h index 20f6780b58..6a9a54033c 100644 --- a/p2p/base/fake_port_allocator.h +++ b/p2p/base/fake_port_allocator.h @@ -1,5 +1,5 @@ /* - * Copyright 2010 The WebRTC Project Authors. All rights reserved. + * Copyright 2025 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -11,270 +11,7 @@ #ifndef P2P_BASE_FAKE_PORT_ALLOCATOR_H_ #define P2P_BASE_FAKE_PORT_ALLOCATOR_H_ -#include -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "p2p/base/basic_packet_socket_factory.h" -#include "p2p/base/port_allocator.h" -#include "p2p/base/udp_port.h" -#include "rtc_base/memory/always_valid_pointer.h" -#include "rtc_base/net_helpers.h" -#include "rtc_base/net_test_helpers.h" -#include "rtc_base/task_queue_for_test.h" -#include "rtc_base/thread.h" - -namespace rtc { -class SocketFactory; -} - -namespace cricket { - -class TestUDPPort : public UDPPort { - public: - static TestUDPPort* Create(rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, - uint16_t min_port, - uint16_t max_port, - absl::string_view username, - absl::string_view password, - bool emit_localhost_for_anyaddress, - const webrtc::FieldTrialsView* field_trials) { - TestUDPPort* port = - new TestUDPPort(thread, factory, network, min_port, max_port, username, - password, emit_localhost_for_anyaddress, field_trials); - if (!port->Init()) { - delete port; - port = nullptr; - } - return port; - } - - protected: - TestUDPPort(rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, - uint16_t min_port, - uint16_t max_port, - absl::string_view username, - absl::string_view password, - bool emit_localhost_for_anyaddress, - const webrtc::FieldTrialsView* field_trials) - : UDPPort(thread, - factory, - network, - min_port, - max_port, - username, - password, - emit_localhost_for_anyaddress, - field_trials) {} -}; - -// A FakePortAllocatorSession can be used with either a real or fake socket -// factory. It gathers a single loopback port, using IPv6 if available and -// not disabled. -class FakePortAllocatorSession : public PortAllocatorSession { - public: - FakePortAllocatorSession(PortAllocator* allocator, - rtc::Thread* network_thread, - rtc::PacketSocketFactory* factory, - absl::string_view content_name, - int component, - absl::string_view ice_ufrag, - absl::string_view ice_pwd, - const webrtc::FieldTrialsView* field_trials) - : PortAllocatorSession(content_name, - component, - ice_ufrag, - ice_pwd, - allocator->flags()), - network_thread_(network_thread), - factory_(factory), - ipv4_network_("network", - "unittest", - rtc::IPAddress(INADDR_LOOPBACK), - 32), - ipv6_network_("network", - "unittest", - rtc::IPAddress(in6addr_loopback), - 64), - port_(), - port_config_count_(0), - stun_servers_(allocator->stun_servers()), - turn_servers_(allocator->turn_servers()), - field_trials_(field_trials) { - ipv4_network_.AddIP(rtc::IPAddress(INADDR_LOOPBACK)); - ipv6_network_.AddIP(rtc::IPAddress(in6addr_loopback)); - set_ice_tiebreaker(/*kTiebreakerDefault = */ 44444); - } - - void SetCandidateFilter(uint32_t filter) override { - candidate_filter_ = filter; - } - - void StartGettingPorts() override { - if (!port_) { - rtc::Network& network = - (rtc::HasIPv6Enabled() && (flags() & PORTALLOCATOR_ENABLE_IPV6)) - ? ipv6_network_ - : ipv4_network_; - port_.reset(TestUDPPort::Create(network_thread_, factory_, &network, 0, 0, - username(), password(), false, - field_trials_)); - RTC_DCHECK(port_); - port_->SetIceTiebreaker(ice_tiebreaker()); - port_->SubscribePortDestroyed( - [this](PortInterface* port) { OnPortDestroyed(port); }); - AddPort(port_.get()); - } - ++port_config_count_; - running_ = true; - } - - void StopGettingPorts() override { running_ = false; } - bool IsGettingPorts() override { return running_; } - void ClearGettingPorts() override { is_cleared = true; } - bool IsCleared() const override { return is_cleared; } - - void RegatherOnFailedNetworks() override { - SignalIceRegathering(this, IceRegatheringReason::NETWORK_FAILURE); - } - - std::vector ReadyPorts() const override { - return ready_ports_; - } - std::vector ReadyCandidates() const override { - return candidates_; - } - void PruneAllPorts() override { port_->Prune(); } - bool CandidatesAllocationDone() const override { return allocation_done_; } - - int port_config_count() { return port_config_count_; } - - const ServerAddresses& stun_servers() const { return stun_servers_; } - - const std::vector& turn_servers() const { - return turn_servers_; - } - - uint32_t candidate_filter() const { return candidate_filter_; } - - int transport_info_update_count() const { - return transport_info_update_count_; - } - - protected: - void UpdateIceParametersInternal() override { - // Since this class is a fake and this method only is overridden for tests, - // we don't need to actually update the transport info. - ++transport_info_update_count_; - } - - private: - void AddPort(cricket::Port* port) { - port->set_component(component()); - port->set_generation(generation()); - port->SignalPortComplete.connect(this, - &FakePortAllocatorSession::OnPortComplete); - port->PrepareAddress(); - ready_ports_.push_back(port); - SignalPortReady(this, port); - port->KeepAliveUntilPruned(); - } - void OnPortComplete(cricket::Port* port) { - const std::vector& candidates = port->Candidates(); - candidates_.insert(candidates_.end(), candidates.begin(), candidates.end()); - SignalCandidatesReady(this, candidates); - - allocation_done_ = true; - SignalCandidatesAllocationDone(this); - } - void OnPortDestroyed(cricket::PortInterface* port) { - // Don't want to double-delete port if it deletes itself. - port_.release(); - } - - rtc::Thread* network_thread_; - rtc::PacketSocketFactory* factory_; - rtc::Network ipv4_network_; - rtc::Network ipv6_network_; - std::unique_ptr port_; - int port_config_count_; - std::vector candidates_; - std::vector ready_ports_; - bool allocation_done_ = false; - bool is_cleared = false; - ServerAddresses stun_servers_; - std::vector turn_servers_; - uint32_t candidate_filter_ = CF_ALL; - int transport_info_update_count_ = 0; - bool running_ = false; - const webrtc::FieldTrialsView* field_trials_; -}; - -class FakePortAllocator : public cricket::PortAllocator { - public: - FakePortAllocator(rtc::Thread* network_thread, - rtc::PacketSocketFactory* factory, - webrtc::FieldTrialsView* field_trials) - : FakePortAllocator(network_thread, factory, nullptr, field_trials) {} - - FakePortAllocator(rtc::Thread* network_thread, - std::unique_ptr factory, - webrtc::FieldTrialsView* field_trials) - : FakePortAllocator(network_thread, - nullptr, - std::move(factory), - field_trials) {} - - void SetNetworkIgnoreMask(int network_ignore_mask) override {} - - cricket::PortAllocatorSession* CreateSessionInternal( - absl::string_view content_name, - int component, - absl::string_view ice_ufrag, - absl::string_view ice_pwd) override { - return new FakePortAllocatorSession( - this, network_thread_, factory_.get(), std::string(content_name), - component, std::string(ice_ufrag), std::string(ice_pwd), field_trials_); - } - - bool initialized() const { return initialized_; } - - // For testing: Manipulate MdnsObfuscationEnabled() - bool MdnsObfuscationEnabled() const override { - return mdns_obfuscation_enabled_; - } - void SetMdnsObfuscationEnabledForTesting(bool enabled) { - mdns_obfuscation_enabled_ = enabled; - } - - private: - FakePortAllocator(rtc::Thread* network_thread, - rtc::PacketSocketFactory* factory, - std::unique_ptr owned_factory, - webrtc::FieldTrialsView* field_trials) - : network_thread_(network_thread), - factory_(std::move(owned_factory), factory), - field_trials_(field_trials) { - if (network_thread_ == nullptr) { - network_thread_ = rtc::Thread::Current(); - Initialize(); - return; - } - SendTask(network_thread_, [this] { Initialize(); }); - } - - rtc::Thread* network_thread_; - const webrtc::AlwaysValidPointerNoDefault factory_; - const webrtc::FieldTrialsView* field_trials_; - bool mdns_obfuscation_enabled_ = false; -}; - -} // namespace cricket +// TODO(jonaso): Remove! +#include "p2p/test/fake_port_allocator.h" #endif // P2P_BASE_FAKE_PORT_ALLOCATOR_H_ diff --git a/p2p/base/ice_agent_interface.h b/p2p/base/ice_agent_interface.h index 30b6ade6e6..faf28d76a7 100644 --- a/p2p/base/ice_agent_interface.h +++ b/p2p/base/ice_agent_interface.h @@ -11,11 +11,14 @@ #ifndef P2P_BASE_ICE_AGENT_INTERFACE_H_ #define P2P_BASE_ICE_AGENT_INTERFACE_H_ +#include + #include "api/array_view.h" #include "p2p/base/connection.h" #include "p2p/base/ice_switch_reason.h" +#include "p2p/base/transport_description.h" -namespace cricket { +namespace webrtc { // IceAgentInterface provides methods that allow an ICE controller to manipulate // the connections available to a transport, and used by the transport to @@ -60,7 +63,7 @@ class IceAgentInterface { // // SignalStateChange will not be triggered. virtual void ForgetLearnedStateForConnections( - rtc::ArrayView connections) = 0; + ArrayView connections) = 0; // Send a STUN ping request for the given connection. virtual void SendPingRequest(const Connection* connection) = 0; @@ -72,9 +75,17 @@ class IceAgentInterface { // Prune away the given connections. Returns true if pruning is permitted and // successfully performed. virtual bool PruneConnections( - rtc::ArrayView connections) = 0; + ArrayView connections) = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::IceAgentInterface; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_ICE_AGENT_INTERFACE_H_ diff --git a/p2p/base/ice_controller_factory_interface.h b/p2p/base/ice_controller_factory_interface.h index bae8b8f19d..958d3223da 100644 --- a/p2p/base/ice_controller_factory_interface.h +++ b/p2p/base/ice_controller_factory_interface.h @@ -11,19 +11,22 @@ #ifndef P2P_BASE_ICE_CONTROLLER_FACTORY_INTERFACE_H_ #define P2P_BASE_ICE_CONTROLLER_FACTORY_INTERFACE_H_ +#include #include #include +#include "p2p/base/connection.h" #include "p2p/base/ice_controller_interface.h" #include "p2p/base/ice_transport_internal.h" +#include "p2p/base/transport_description.h" -namespace cricket { +namespace webrtc { // struct with arguments to IceControllerFactoryInterface::Create struct IceControllerFactoryArgs { - std::function ice_transport_state_func; + std::function ice_transport_state_func; std::function ice_role_func; - std::function is_connection_pruned_func; + std::function is_connection_pruned_func; const IceFieldTrials* ice_field_trials; std::string ice_controller_field_trials; }; @@ -35,6 +38,15 @@ class IceControllerFactoryInterface { const IceControllerFactoryArgs& args) = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::IceControllerFactoryArgs; +using ::webrtc::IceControllerFactoryInterface; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_ICE_CONTROLLER_FACTORY_INTERFACE_H_ diff --git a/p2p/base/ice_controller_interface.cc b/p2p/base/ice_controller_interface.cc index 9fb3b055f9..9c8ad0bfbd 100644 --- a/p2p/base/ice_controller_interface.cc +++ b/p2p/base/ice_controller_interface.cc @@ -14,14 +14,14 @@ #include "p2p/base/ice_switch_reason.h" -namespace cricket { +namespace webrtc { std::string IceRecheckEvent::ToString() const { - std::string str = IceSwitchReasonToString(reason); + std::string str = webrtc::IceSwitchReasonToString(reason); if (recheck_delay_ms) { str += " (after delay: " + std::to_string(recheck_delay_ms) + ")"; } return str; } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/ice_controller_interface.h b/p2p/base/ice_controller_interface.h index 8b63ed3fc3..a082d22606 100644 --- a/p2p/base/ice_controller_interface.h +++ b/p2p/base/ice_controller_interface.h @@ -11,19 +11,23 @@ #ifndef P2P_BASE_ICE_CONTROLLER_INTERFACE_H_ #define P2P_BASE_ICE_CONTROLLER_INTERFACE_H_ +#include +#include #include -#include #include -#include "absl/types/optional.h" +#include "api/array_view.h" #include "p2p/base/connection.h" #include "p2p/base/ice_switch_reason.h" #include "p2p/base/ice_transport_internal.h" +#include "p2p/base/p2p_transport_channel_ice_field_trials.h" +#include "p2p/base/transport_description.h" +#include "rtc_base/checks.h" #include "rtc_base/system/rtc_export.h" -namespace cricket { +namespace webrtc { -struct IceFieldTrials; // Forward declaration to avoid circular dependency. +// Forward declaration to avoid circular dependency. struct RTC_EXPORT IceRecheckEvent { IceRecheckEvent(IceSwitchReason _reason, int _recheck_delay_ms) @@ -53,7 +57,7 @@ struct RTC_EXPORT IceRecheckEvent { // Connection::ForgetLearnedState - return in SwitchResult // // The IceController shall keep track of all connections added -// (and not destroyed) and give them back using the connections()-function- +// (and not destroyed) and give them back using the GetConnections() function. // // When a Connection gets destroyed // - signals on Connection::SignalDestroyed @@ -63,10 +67,10 @@ class IceControllerInterface { // This represents the result of a switch call. struct SwitchResult { // Connection that we should (optionally) switch to. - absl::optional connection; + std::optional connection; // An optional recheck event for when a Switch() should be attempted again. - absl::optional recheck_event; + std::optional recheck_event; // A vector with connection to run ForgetLearnedState on. std::vector connections_to_forget_state_on; @@ -75,12 +79,12 @@ class IceControllerInterface { // This represents the result of a call to SelectConnectionToPing. struct PingResult { PingResult(const Connection* conn, int _recheck_delay_ms) - : connection(conn ? absl::optional(conn) - : absl::nullopt), + : connection(conn ? std::optional(conn) + : std::nullopt), recheck_delay_ms(_recheck_delay_ms) {} // Connection that we should (optionally) ping. - const absl::optional connection; + const std::optional connection; // The delay before P2PTransportChannel shall call SelectConnectionToPing() // again. @@ -101,7 +105,17 @@ class IceControllerInterface { virtual void OnConnectionDestroyed(const Connection* connection) = 0; // These are all connections that has been added and not destroyed. - virtual rtc::ArrayView connections() const = 0; + virtual ArrayView GetConnections() const { + // Stub implementation to simplify downstream roll. + RTC_CHECK_NOTREACHED(); + return {}; + } + // TODO(bugs.webrtc.org/15702): Remove this after downstream is cleaned up. + virtual ArrayView connections() const { + // Stub implementation to simplify downstream removal. + RTC_CHECK_NOTREACHED(); + return {}; + } // Is there a pingable connection ? // This function is used to boot-strap pinging, after this returns true @@ -134,6 +148,15 @@ class IceControllerInterface { virtual std::vector PruneConnections() = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::IceControllerInterface; +using ::webrtc::IceRecheckEvent; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_ICE_CONTROLLER_INTERFACE_H_ diff --git a/p2p/base/ice_credentials_iterator.cc b/p2p/base/ice_credentials_iterator.cc index 373c8510ac..1dc7d41762 100644 --- a/p2p/base/ice_credentials_iterator.cc +++ b/p2p/base/ice_credentials_iterator.cc @@ -10,10 +10,13 @@ #include "p2p/base/ice_credentials_iterator.h" +#include + #include "p2p/base/p2p_constants.h" -#include "rtc_base/helpers.h" +#include "p2p/base/transport_description.h" +#include "rtc_base/crypto_random.h" -namespace cricket { +namespace webrtc { IceCredentialsIterator::IceCredentialsIterator( const std::vector& pooled_credentials) @@ -22,8 +25,9 @@ IceCredentialsIterator::IceCredentialsIterator( IceCredentialsIterator::~IceCredentialsIterator() = default; IceParameters IceCredentialsIterator::CreateRandomIceCredentials() { - return IceParameters(rtc::CreateRandomString(ICE_UFRAG_LENGTH), - rtc::CreateRandomString(ICE_PWD_LENGTH), false); + return IceParameters(webrtc::CreateRandomString(webrtc::ICE_UFRAG_LENGTH), + webrtc::CreateRandomString(webrtc::ICE_PWD_LENGTH), + false); } IceParameters IceCredentialsIterator::GetIceCredentials() { @@ -35,4 +39,4 @@ IceParameters IceCredentialsIterator::GetIceCredentials() { return credentials; } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/ice_credentials_iterator.h b/p2p/base/ice_credentials_iterator.h index fa331cc6eb..5bab55fc62 100644 --- a/p2p/base/ice_credentials_iterator.h +++ b/p2p/base/ice_credentials_iterator.h @@ -15,7 +15,7 @@ #include "p2p/base/transport_description.h" -namespace cricket { +namespace webrtc { class IceCredentialsIterator { public: @@ -32,6 +32,14 @@ class IceCredentialsIterator { std::vector pooled_ice_credentials_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::IceCredentialsIterator; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_ICE_CREDENTIALS_ITERATOR_H_ diff --git a/p2p/base/ice_credentials_iterator_unittest.cc b/p2p/base/ice_credentials_iterator_unittest.cc index 470efe3e45..2f09b47a30 100644 --- a/p2p/base/ice_credentials_iterator_unittest.cc +++ b/p2p/base/ice_credentials_iterator_unittest.cc @@ -12,10 +12,11 @@ #include +#include "p2p/base/transport_description.h" #include "test/gtest.h" -using cricket::IceCredentialsIterator; -using cricket::IceParameters; +using ::webrtc::IceCredentialsIterator; +using ::webrtc::IceParameters; TEST(IceCredentialsIteratorTest, GetEmpty) { std::vector empty; diff --git a/p2p/base/ice_switch_reason.cc b/p2p/base/ice_switch_reason.cc index 67fe335c4f..3f641f17f5 100644 --- a/p2p/base/ice_switch_reason.cc +++ b/p2p/base/ice_switch_reason.cc @@ -12,7 +12,7 @@ #include -namespace cricket { +namespace webrtc { std::string IceSwitchReasonToString(IceSwitchReason reason) { switch (reason) { @@ -44,4 +44,4 @@ std::string IceSwitchReasonToString(IceSwitchReason reason) { } } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/ice_switch_reason.h b/p2p/base/ice_switch_reason.h index 669e5dc9c3..fec3c99f84 100644 --- a/p2p/base/ice_switch_reason.h +++ b/p2p/base/ice_switch_reason.h @@ -15,7 +15,7 @@ #include "rtc_base/system/rtc_export.h" -namespace cricket { +namespace webrtc { enum class IceSwitchReason { UNKNOWN, @@ -38,6 +38,15 @@ enum class IceSwitchReason { RTC_EXPORT std::string IceSwitchReasonToString(IceSwitchReason reason); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::IceSwitchReason; +using ::webrtc::IceSwitchReasonToString; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_ICE_SWITCH_REASON_H_ diff --git a/p2p/base/ice_transport_internal.cc b/p2p/base/ice_transport_internal.cc index fab6f2037a..601da19971 100644 --- a/p2p/base/ice_transport_internal.cc +++ b/p2p/base/ice_transport_internal.cc @@ -10,10 +10,49 @@ #include "p2p/base/ice_transport_internal.h" +#include +#include +#include + +#include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" +#include "api/candidate.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" #include "p2p/base/p2p_constants.h" +#include "p2p/base/port.h" +#include "p2p/base/transport_description.h" +#include "rtc_base/checks.h" +#include "rtc_base/net_helper.h" + +namespace webrtc { +namespace { + +// RTCConfiguration uses kUndefined (-1) to indicate unset optional parameters. +std::optional RTCConfigurationToIceConfigOptionalInt( + int rtc_configuration_parameter) { + if (rtc_configuration_parameter == + PeerConnectionInterface::RTCConfiguration::kUndefined) { + return std::nullopt; + } + return rtc_configuration_parameter; +} + +ContinualGatheringPolicy GetContinualGatheringPolicy( + const PeerConnectionInterface::RTCConfiguration& config) { + switch (config.continual_gathering_policy) { + case PeerConnectionInterface::GATHER_ONCE: + return GATHER_ONCE; + case PeerConnectionInterface::GATHER_CONTINUALLY: + return GATHER_CONTINUALLY; + default: + break; + } + RTC_DCHECK_NOTREACHED(); + return GATHER_ONCE; +} -namespace cricket { +} // namespace using webrtc::RTCError; using webrtc::RTCErrorType; @@ -27,8 +66,8 @@ RTCError VerifyCandidate(const Candidate& cand) { // Disallow all ports below 1024, except for 80 and 443 on public addresses. int port = cand.address().port(); - if (cand.protocol() == cricket::TCP_PROTOCOL_NAME && - (cand.tcptype() == cricket::TCPTYPE_ACTIVE_STR || port == 0)) { + if (cand.protocol() == TCP_PROTOCOL_NAME && + (cand.tcptype() == TCPTYPE_ACTIVE_STR || port == 0)) { // Expected for active-only candidates per // http://tools.ietf.org/html/rfc6544#section-4.5 so no error. // Libjingle clients emit port 0, in "active" mode. @@ -81,6 +120,31 @@ IceConfig::IceConfig(int receiving_timeout_ms, regather_on_failed_networks_interval_ms), receiving_switching_delay(receiving_switching_delay_ms) {} +IceConfig::IceConfig(const PeerConnectionInterface::RTCConfiguration& config) + : receiving_timeout(RTCConfigurationToIceConfigOptionalInt( + config.ice_connection_receiving_timeout)), + backup_connection_ping_interval(RTCConfigurationToIceConfigOptionalInt( + config.ice_backup_candidate_pair_ping_interval)), + continual_gathering_policy(GetContinualGatheringPolicy(config)), + prioritize_most_likely_candidate_pairs( + config.prioritize_most_likely_ice_candidate_pairs), + stable_writable_connection_ping_interval( + config.stable_writable_connection_ping_interval_ms), + presume_writable_when_fully_relayed( + config.presume_writable_when_fully_relayed), + surface_ice_candidates_on_ice_transport_type_changed( + config.surface_ice_candidates_on_ice_transport_type_changed), + ice_check_interval_strong_connectivity( + config.ice_check_interval_strong_connectivity), + ice_check_interval_weak_connectivity( + config.ice_check_interval_weak_connectivity), + ice_check_min_interval(config.ice_check_min_interval), + ice_unwritable_timeout(config.ice_unwritable_timeout), + ice_unwritable_min_checks(config.ice_unwritable_min_checks), + ice_inactive_timeout(config.ice_inactive_timeout), + stun_keepalive_interval(config.stun_candidate_keepalive_interval), + network_preference(config.network_preference) {} + IceConfig::~IceConfig() = default; int IceConfig::receiving_timeout_or_default() const { @@ -123,7 +187,49 @@ int IceConfig::stun_keepalive_interval_or_default() const { return stun_keepalive_interval.value_or(STUN_KEEPALIVE_INTERVAL); } -IceTransportInternal::IceTransportInternal() = default; +RTCError IceConfig::IsValid() const { + if (ice_check_interval_strong_connectivity_or_default() < + ice_check_interval_weak_connectivity.value_or(WEAK_PING_INTERVAL)) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Ping interval of candidate pairs is shorter when ICE is " + "strongly connected than that when ICE is weakly " + "connected"); + } + + if (receiving_timeout_or_default() < + std::max(ice_check_interval_strong_connectivity_or_default(), + ice_check_min_interval_or_default())) { + return RTCError( + RTCErrorType::INVALID_PARAMETER, + "Receiving timeout is shorter than the minimal ping interval."); + } + + if (backup_connection_ping_interval_or_default() < + ice_check_interval_strong_connectivity_or_default()) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Ping interval of backup candidate pairs is shorter than " + "that of general candidate pairs when ICE is strongly " + "connected"); + } + + if (stable_writable_connection_ping_interval_or_default() < + ice_check_interval_strong_connectivity_or_default()) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Ping interval of stable and writable candidate pairs is " + "shorter than that of general candidate pairs when ICE is " + "strongly connected"); + } + + if (ice_unwritable_timeout_or_default() > ice_inactive_timeout_or_default()) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "The timeout period for the writability state to become " + "UNRELIABLE is longer than that to become TIMEOUT."); + } + + return RTCError::OK(); +} + +IceTransportInternal::IceTransportInternal() {} IceTransportInternal::~IceTransportInternal() = default; @@ -137,4 +243,14 @@ void IceTransportInternal::SetRemoteIceCredentials(absl::string_view ice_ufrag, SetRemoteIceParameters(IceParameters(ice_ufrag, ice_pwd, false)); } -} // namespace cricket +void IceTransportInternal::AddGatheringStateCallback( + const void* removal_tag, + absl::AnyInvocable callback) { + gathering_state_callback_list_.AddReceiver(removal_tag, std::move(callback)); +} +void IceTransportInternal::RemoveGatheringStateCallback( + const void* removal_tag) { + gathering_state_callback_list_.RemoveReceivers(removal_tag); +} + +} // namespace webrtc diff --git a/p2p/base/ice_transport_internal.h b/p2p/base/ice_transport_internal.h index 98deb492b0..507660519e 100644 --- a/p2p/base/ice_transport_internal.h +++ b/p2p/base/ice_transport_internal.h @@ -13,26 +13,52 @@ #include +#include +#include #include #include #include +#include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/array_view.h" #include "api/candidate.h" +#include "api/field_trials_view.h" +#include "api/peer_connection_interface.h" #include "api/rtc_error.h" #include "api/transport/enums.h" +#include "p2p/base/candidate_pair_interface.h" #include "p2p/base/connection.h" +#include "p2p/base/connection_info.h" #include "p2p/base/packet_transport_internal.h" #include "p2p/base/port.h" #include "p2p/base/stun_dictionary.h" #include "p2p/base/transport_description.h" +#include "p2p/dtls/dtls_stun_piggyback_callbacks.h" +#include "rtc_base/callback_list.h" +#include "rtc_base/checks.h" #include "rtc_base/network_constants.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/time_utils.h" -namespace cricket { +namespace webrtc { + +// TODO(zhihuang): Replace this with +// PeerConnectionInterface::IceConnectionState. +enum class IceTransportStateInternal { + STATE_INIT, + STATE_CONNECTING, // Will enter this state once a connection is created + STATE_COMPLETED, + STATE_FAILED +}; + +enum IceConnectionState { + kIceConnectionConnecting = 0, + kIceConnectionFailed, + kIceConnectionConnected, // Writable, but still checking one or more + // connections + kIceConnectionCompleted, +}; struct IceTransportStats { CandidateStatsList candidate_stats_list; @@ -53,19 +79,11 @@ struct IceTransportStats { IceRole ice_role = ICEROLE_UNKNOWN; std::string ice_local_username_fragment; - webrtc::IceTransportState ice_state = webrtc::IceTransportState::kNew; + IceTransportState ice_state = IceTransportState::kNew; }; typedef std::vector Candidates; -enum IceConnectionState { - kIceConnectionConnecting = 0, - kIceConnectionFailed, - kIceConnectionConnected, // Writable, but still checking one or more - // connections - kIceConnectionCompleted, -}; - // TODO(deadbeef): Unify with PeerConnectionInterface::IceConnectionState // once /talk/ and /webrtc/ are combined, and also switch to ENUM_NAME naming // style. @@ -95,22 +113,21 @@ enum class NominationMode { // and contain valid values. If conditions are not met, an RTCError with the // appropriated error number and description is returned. If the configuration // is valid RTCError::OK() is returned. -webrtc::RTCError VerifyCandidate(const Candidate& cand); +RTCError VerifyCandidate(const Candidate& cand); -// Runs through a list of cricket::Candidate instances and calls VerifyCandidate +// Runs through a list of webrtc::Candidate instances and calls VerifyCandidate // for each one, stopping on the first error encounted and returning that error // value if so. On success returns RTCError::OK(). -webrtc::RTCError VerifyCandidates(const Candidates& candidates); +RTCError VerifyCandidates(const Candidates& candidates); // Information about ICE configuration. -// TODO(deadbeef): Use absl::optional to represent unset values, instead of -// -1. -struct IceConfig { +// TODO(bugs.webrtc.org/15609): Define a public API for this. +struct RTC_EXPORT IceConfig { // The ICE connection receiving timeout value in milliseconds. - absl::optional receiving_timeout; + std::optional receiving_timeout; // Time interval in milliseconds to ping a backup connection when the ICE // channel is strongly connected. - absl::optional backup_connection_ping_interval; + std::optional backup_connection_ping_interval; ContinualGatheringPolicy continual_gathering_policy = GATHER_ONCE; @@ -123,7 +140,7 @@ struct IceConfig { bool prioritize_most_likely_candidate_pairs = false; // Writable connections are pinged at a slower rate once stablized. - absl::optional stable_writable_connection_ping_interval; + std::optional stable_writable_connection_ping_interval; // If set to true, this means the ICE transport should presume TURN-to-TURN // candidate pairs will succeed, even before a binding response is received. @@ -138,12 +155,12 @@ struct IceConfig { // Interval to check on all networks and to perform ICE regathering on any // active network having no connection on it. - absl::optional regather_on_failed_networks_interval; + std::optional regather_on_failed_networks_interval; // The time period in which we will not switch the selected connection // when a new connection becomes receiving but the selected connection is not // in case that the selected connection may become receiving soon. - absl::optional receiving_switching_delay; + std::optional receiving_switching_delay; // TODO(honghaiz): Change the default to regular nomination. // Default nomination mode if the remote does not support renomination. @@ -153,12 +170,12 @@ struct IceConfig { // for a candidate pair when it is both writable and receiving (strong // connectivity). This parameter overrides the default value given by // `STRONG_PING_INTERVAL` in p2ptransport.h if set. - absl::optional ice_check_interval_strong_connectivity; + std::optional ice_check_interval_strong_connectivity; // The interval in milliseconds at which ICE checks (STUN pings) will be sent // for a candidate pair when it is either not writable or not receiving (weak // connectivity). This parameter overrides the default value given by // `WEAK_PING_INTERVAL` in p2ptransport.h if set. - absl::optional ice_check_interval_weak_connectivity; + std::optional ice_check_interval_weak_connectivity; // ICE checks (STUN pings) will not be sent at higher rate (lower interval) // than this, no matter what other settings there are. // Measure in milliseconds. @@ -166,32 +183,35 @@ struct IceConfig { // Note that this parameter overrides both the above check intervals for // candidate pairs with strong or weak connectivity, if either of the above // interval is shorter than the min interval. - absl::optional ice_check_min_interval; + std::optional ice_check_min_interval; // The min time period for which a candidate pair must wait for response to // connectivity checks before it becomes unwritable. This parameter // overrides the default value given by `CONNECTION_WRITE_CONNECT_TIMEOUT` // in port.h if set, when determining the writability of a candidate pair. - absl::optional ice_unwritable_timeout; + std::optional ice_unwritable_timeout; // The min number of connectivity checks that a candidate pair must sent // without receiving response before it becomes unwritable. This parameter // overrides the default value given by `CONNECTION_WRITE_CONNECT_FAILURES` in // port.h if set, when determining the writability of a candidate pair. - absl::optional ice_unwritable_min_checks; + std::optional ice_unwritable_min_checks; // The min time period for which a candidate pair must wait for response to // connectivity checks it becomes inactive. This parameter overrides the // default value given by `CONNECTION_WRITE_TIMEOUT` in port.h if set, when // determining the writability of a candidate pair. - absl::optional ice_inactive_timeout; + std::optional ice_inactive_timeout; // The interval in milliseconds at which STUN candidates will resend STUN // binding requests to keep NAT bindings open. - absl::optional stun_keepalive_interval; + std::optional stun_keepalive_interval; + + std::optional network_preference; - absl::optional network_preference; + VpnPreference vpn_preference = VpnPreference::kDefault; - webrtc::VpnPreference vpn_preference = webrtc::VpnPreference::kDefault; + // Experimental feature to transport the DTLS handshake in STUN packets. + bool dtls_handshake_in_stun = false; IceConfig(); IceConfig(int receiving_timeout_ms, @@ -202,11 +222,18 @@ struct IceConfig { bool presume_writable_when_fully_relayed, int regather_on_failed_networks_interval_ms, int receiving_switching_delay_ms); + // Construct an IceConfig object from an RTCConfiguration object. + // This will check the `config` settings and set the associated IceConfig + // member properties. + explicit IceConfig(const PeerConnectionInterface::RTCConfiguration& config); ~IceConfig(); + // Checks if the current configuration values are consistent. + RTCError IsValid() const; + // Helper getters for parameters with implementation-specific default value. // By convention, parameters with default value are represented by - // absl::optional and setting a parameter to null restores its default value. + // std::optional and setting a parameter to null restores its default value. int receiving_timeout_or_default() const; int backup_connection_ping_interval_or_default() const; int stable_writable_connection_ping_interval_or_default() const; @@ -221,28 +248,21 @@ struct IceConfig { int stun_keepalive_interval_or_default() const; }; -// TODO(zhihuang): Replace this with -// PeerConnectionInterface::IceConnectionState. -enum class IceTransportState { - STATE_INIT, - STATE_CONNECTING, // Will enter this state once a connection is created - STATE_COMPLETED, - STATE_FAILED -}; - // IceTransportInternal is an internal abstract class that does ICE. // Once the public interface is supported, // (https://www.w3.org/TR/webrtc/#rtcicetransport) // the IceTransportInterface will be split from this class. -class RTC_EXPORT IceTransportInternal : public rtc::PacketTransportInternal { +// +// TODO(bugs.webrtc.org/15609): Define a public API for this. +class RTC_EXPORT IceTransportInternal : public PacketTransportInternal { public: IceTransportInternal(); ~IceTransportInternal() override; // TODO(bugs.webrtc.org/9308): Remove GetState once all uses have been // migrated to GetIceTransportState. - virtual IceTransportState GetState() const = 0; - virtual webrtc::IceTransportState GetIceTransportState() const = 0; + virtual IceTransportStateInternal GetState() const = 0; + virtual IceTransportState GetIceTransportState() const = 0; virtual int component() const = 0; @@ -250,7 +270,12 @@ class RTC_EXPORT IceTransportInternal : public rtc::PacketTransportInternal { virtual void SetIceRole(IceRole role) = 0; - virtual void SetIceTiebreaker(uint64_t tiebreaker) = 0; + // Default implementation in order to allow downstream usage deletion. + // TODO: bugs.webrtc.org/42224914 - Remove when all downstream overrides are + // gone. + virtual void SetIceTiebreaker(uint64_t /* tiebreaker */) { + RTC_CHECK_NOTREACHED(); + } virtual void SetIceCredentials(absl::string_view ice_ufrag, absl::string_view ice_pwd); @@ -258,6 +283,18 @@ class RTC_EXPORT IceTransportInternal : public rtc::PacketTransportInternal { virtual void SetRemoteIceCredentials(absl::string_view ice_ufrag, absl::string_view ice_pwd); + // TODO: bugs.webrtc.org/367395350 - Make virtual when all downstream + // overrides are gone. + // Returns the current local ICE parameters. + virtual const IceParameters* local_ice_parameters() const { + RTC_CHECK_NOTREACHED(); + } + // Returns the latest remote ICE parameters or nullptr if there are no remote + // ICE parameters yet. + virtual const IceParameters* remote_ice_parameters() const { + RTC_CHECK_NOTREACHED(); + } + // The ufrag and pwd in `ice_params` must be set // before candidate gathering can start. virtual void SetIceParameters(const IceParameters& ice_params) = 0; @@ -267,6 +304,10 @@ class RTC_EXPORT IceTransportInternal : public rtc::PacketTransportInternal { virtual void SetRemoteIceMode(IceMode mode) = 0; virtual void SetIceConfig(const IceConfig& config) = 0; + // Default implementation in order to allow downstream usage deletion. + // TODO: bugs.webrtc.org/367395350 - Make virtual when all downstream + // overrides are gone. + virtual const IceConfig& config() const { RTC_CHECK_NOTREACHED(); } // Start gathering candidates if not already started, or if an ICE restart // occurred. @@ -284,33 +325,45 @@ class RTC_EXPORT IceTransportInternal : public rtc::PacketTransportInternal { virtual bool GetStats(IceTransportStats* ice_transport_stats) = 0; // Returns RTT estimate over the currently active connection, or an empty - // absl::optional if there is none. - virtual absl::optional GetRttEstimate() = 0; + // std::optional if there is none. + virtual std::optional GetRttEstimate() = 0; // TODO(qingsi): Remove this method once Chrome does not depend on it anymore. virtual const Connection* selected_connection() const = 0; - // Returns the selected candidate pair, or an empty absl::optional if there is + // Returns the selected candidate pair, or an empty std::optional if there is // none. - virtual absl::optional GetSelectedCandidatePair() + virtual std::optional GetSelectedCandidatePair() const = 0; - virtual absl::optional> + virtual std::optional> GetDictionaryWriter() { - return absl::nullopt; + return std::nullopt; } - sigslot::signal1 SignalGatheringState; + void AddGatheringStateCallback( + const void* removal_tag, + absl::AnyInvocable callback); + void RemoveGatheringStateCallback(const void* removal_tag); // Handles sending and receiving of candidates. sigslot::signal2 SignalCandidateGathered; - sigslot::signal2 - SignalCandidateError; + void SetCandidateErrorCallback( + absl::AnyInvocable + callback) { + RTC_DCHECK(!candidate_error_callback_); + candidate_error_callback_ = std::move(callback); + } - sigslot::signal2 - SignalCandidatesRemoved; + void SetCandidatesRemovedCallback( + absl::AnyInvocable + callback) { + RTC_DCHECK(!candidates_removed_callback_); + candidates_removed_callback_ = std::move(callback); + } // Deprecated by PacketTransportInternal::SignalNetworkRouteChanged. // This signal occurs when there is a change in the way that packets are @@ -320,8 +373,12 @@ class RTC_EXPORT IceTransportInternal : public rtc::PacketTransportInternal { // SignalNetworkRouteChanged. sigslot::signal2 SignalRouteChange; - sigslot::signal1 - SignalCandidatePairChanged; + void SetCandidatePairChangeCallback( + absl::AnyInvocable + callback) { + RTC_DCHECK(!candidate_pair_change_callback_); + candidate_pair_change_callback_ = std::move(callback); + } // Invoked when there is conflict in the ICE role between local and remote // agents. @@ -361,15 +418,63 @@ class RTC_EXPORT IceTransportInternal : public rtc::PacketTransportInternal { dictionary_writer_synced_callback_list_.RemoveReceivers(tag); } + virtual const FieldTrialsView* field_trials() const { return nullptr; } + + virtual void ResetDtlsStunPiggybackCallbacks() {} + virtual void SetDtlsStunPiggybackCallbacks( + DtlsStunPiggybackCallbacks&& callbacks) {} + protected: - webrtc::CallbackList> + void SendGatheringStateEvent() { gathering_state_callback_list_.Send(this); } + + CallbackList> dictionary_view_updated_callback_list_; - webrtc::CallbackList + CallbackList dictionary_writer_synced_callback_list_; + + CallbackList gathering_state_callback_list_; + + absl::AnyInvocable + candidate_error_callback_; + + absl::AnyInvocable + candidates_removed_callback_; + + absl::AnyInvocable + candidate_pair_change_callback_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::Candidates; +using ::webrtc::ContinualGatheringPolicy; +using ::webrtc::GATHER_CONTINUALLY; +using ::webrtc::GATHER_ONCE; +using ::webrtc::IceConfig; +using ::webrtc::IceConnectionState; +using ::webrtc::IceGatheringState; +using ::webrtc::IceTransportInternal; +using ::webrtc::IceTransportStats; +using ::webrtc::kIceConnectionCompleted; +using ::webrtc::kIceConnectionConnected; +using ::webrtc::kIceConnectionConnecting; +using ::webrtc::kIceConnectionFailed; +using ::webrtc::kIceGatheringComplete; +using ::webrtc::kIceGatheringGathering; +using ::webrtc::kIceGatheringNew; +using ::webrtc::NominationMode; +using ::webrtc::VerifyCandidate; +using ::webrtc::VerifyCandidates; + +using IceTransportState = ::webrtc::IceTransportStateInternal; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_ICE_TRANSPORT_INTERNAL_H_ diff --git a/p2p/base/mock_async_resolver.h b/p2p/base/mock_async_resolver.h deleted file mode 100644 index 44164716b2..0000000000 --- a/p2p/base/mock_async_resolver.h +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef P2P_BASE_MOCK_ASYNC_RESOLVER_H_ -#define P2P_BASE_MOCK_ASYNC_RESOLVER_H_ - -#include "api/async_resolver_factory.h" -#include "rtc_base/async_resolver_interface.h" -#include "test/gmock.h" - -namespace rtc { - -using ::testing::_; -using ::testing::InvokeWithoutArgs; - -class MockAsyncResolver : public AsyncResolverInterface { - public: - MockAsyncResolver() { - ON_CALL(*this, Start(_)).WillByDefault(InvokeWithoutArgs([this] { - SignalDone(this); - })); - } - ~MockAsyncResolver() = default; - - MOCK_METHOD(void, Start, (const rtc::SocketAddress&), (override)); - MOCK_METHOD(void, Start, (const rtc::SocketAddress&, int family), (override)); - MOCK_METHOD(bool, - GetResolvedAddress, - (int family, SocketAddress* addr), - (const, override)); - MOCK_METHOD(int, GetError, (), (const, override)); - - // Note that this won't delete the object like AsyncResolverInterface says in - // order to avoid sanitizer failures caused by this being a synchronous - // implementation. The test code should delete the object instead. - MOCK_METHOD(void, Destroy, (bool), (override)); -}; - -} // namespace rtc - -namespace webrtc { - -class MockAsyncResolverFactory : public AsyncResolverFactory { - public: - MOCK_METHOD(rtc::AsyncResolverInterface*, Create, (), (override)); -}; - -} // namespace webrtc - -#endif // P2P_BASE_MOCK_ASYNC_RESOLVER_H_ diff --git a/p2p/base/mock_ice_agent.h b/p2p/base/mock_ice_agent.h index a1c0ebffbf..83cd42898c 100644 --- a/p2p/base/mock_ice_agent.h +++ b/p2p/base/mock_ice_agent.h @@ -1,5 +1,5 @@ /* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * Copyright 2025 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -11,40 +11,6 @@ #ifndef P2P_BASE_MOCK_ICE_AGENT_H_ #define P2P_BASE_MOCK_ICE_AGENT_H_ -#include - -#include "p2p/base/connection.h" -#include "p2p/base/ice_agent_interface.h" -#include "p2p/base/ice_switch_reason.h" -#include "p2p/base/transport_description.h" -#include "test/gmock.h" - -namespace cricket { - -class MockIceAgent : public IceAgentInterface { - public: - ~MockIceAgent() override = default; - - MOCK_METHOD(int64_t, GetLastPingSentMs, (), (override, const)); - MOCK_METHOD(IceRole, GetIceRole, (), (override, const)); - MOCK_METHOD(void, OnStartedPinging, (), (override)); - MOCK_METHOD(void, UpdateConnectionStates, (), (override)); - MOCK_METHOD(void, UpdateState, (), (override)); - MOCK_METHOD(void, - ForgetLearnedStateForConnections, - (rtc::ArrayView), - (override)); - MOCK_METHOD(void, SendPingRequest, (const Connection*), (override)); - MOCK_METHOD(void, - SwitchSelectedConnection, - (const Connection*, IceSwitchReason), - (override)); - MOCK_METHOD(bool, - PruneConnections, - (rtc::ArrayView), - (override)); -}; - -} // namespace cricket +#include "p2p/test/mock_ice_agent.h" #endif // P2P_BASE_MOCK_ICE_AGENT_H_ diff --git a/p2p/base/mock_ice_controller.h b/p2p/base/mock_ice_controller.h index bde9254e7d..a0b815bb5c 100644 --- a/p2p/base/mock_ice_controller.h +++ b/p2p/base/mock_ice_controller.h @@ -1,5 +1,5 @@ /* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * Copyright 2025 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -11,80 +11,6 @@ #ifndef P2P_BASE_MOCK_ICE_CONTROLLER_H_ #define P2P_BASE_MOCK_ICE_CONTROLLER_H_ -#include -#include - -#include "p2p/base/ice_controller_factory_interface.h" -#include "p2p/base/ice_controller_interface.h" -#include "test/gmock.h" - -namespace cricket { - -class MockIceController : public cricket::IceControllerInterface { - public: - explicit MockIceController(const cricket::IceControllerFactoryArgs& args) {} - ~MockIceController() override = default; - - MOCK_METHOD(void, SetIceConfig, (const cricket::IceConfig&), (override)); - MOCK_METHOD(void, - SetSelectedConnection, - (const cricket::Connection*), - (override)); - MOCK_METHOD(void, AddConnection, (const cricket::Connection*), (override)); - MOCK_METHOD(void, - OnConnectionDestroyed, - (const cricket::Connection*), - (override)); - MOCK_METHOD(rtc::ArrayView, - connections, - (), - (const, override)); - MOCK_METHOD(bool, HasPingableConnection, (), (const, override)); - MOCK_METHOD(cricket::IceControllerInterface::PingResult, - SelectConnectionToPing, - (int64_t), - (override)); - MOCK_METHOD(bool, - GetUseCandidateAttr, - (const cricket::Connection*, - cricket::NominationMode, - cricket::IceMode), - (const, override)); - MOCK_METHOD(const cricket::Connection*, - FindNextPingableConnection, - (), - (override)); - MOCK_METHOD(void, - MarkConnectionPinged, - (const cricket::Connection*), - (override)); - MOCK_METHOD(cricket::IceControllerInterface::SwitchResult, - ShouldSwitchConnection, - (cricket::IceSwitchReason, const cricket::Connection*), - (override)); - MOCK_METHOD(cricket::IceControllerInterface::SwitchResult, - SortAndSwitchConnection, - (cricket::IceSwitchReason), - (override)); - MOCK_METHOD(std::vector, - PruneConnections, - (), - (override)); -}; - -class MockIceControllerFactory : public cricket::IceControllerFactoryInterface { - public: - ~MockIceControllerFactory() override = default; - - std::unique_ptr Create( - const cricket::IceControllerFactoryArgs& args) override { - RecordIceControllerCreated(); - return std::make_unique(args); - } - - MOCK_METHOD(void, RecordIceControllerCreated, ()); -}; - -} // namespace cricket +#include "p2p/test/mock_ice_controller.h" #endif // P2P_BASE_MOCK_ICE_CONTROLLER_H_ diff --git a/p2p/base/mock_ice_transport.h b/p2p/base/mock_ice_transport.h index ef6bdce3c0..fa26844bb3 100644 --- a/p2p/base/mock_ice_transport.h +++ b/p2p/base/mock_ice_transport.h @@ -1,5 +1,5 @@ /* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * Copyright 2025 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -11,80 +11,6 @@ #ifndef P2P_BASE_MOCK_ICE_TRANSPORT_H_ #define P2P_BASE_MOCK_ICE_TRANSPORT_H_ -#include -#include -#include - -#include "p2p/base/ice_transport_internal.h" -#include "rtc_base/gunit.h" -#include "test/gmock.h" - -using ::testing::_; -using ::testing::Return; - -namespace cricket { - -// Used in Chromium/remoting/protocol/channel_socket_adapter_unittest.cc -class MockIceTransport : public IceTransportInternal { - public: - MockIceTransport() { - SignalReadyToSend(this); - SignalWritableState(this); - } - - MOCK_METHOD(int, - SendPacket, - (const char* data, - size_t len, - const rtc::PacketOptions& options, - int flags), - (override)); - MOCK_METHOD(int, SetOption, (rtc::Socket::Option opt, int value), (override)); - MOCK_METHOD(int, GetError, (), (override)); - MOCK_METHOD(cricket::IceRole, GetIceRole, (), (const, override)); - MOCK_METHOD(bool, - GetStats, - (cricket::IceTransportStats * ice_transport_stats), - (override)); - - IceTransportState GetState() const override { - return IceTransportState::STATE_INIT; - } - webrtc::IceTransportState GetIceTransportState() const override { - return webrtc::IceTransportState::kNew; - } - - const std::string& transport_name() const override { return transport_name_; } - int component() const override { return 0; } - void SetIceRole(IceRole role) override {} - void SetIceTiebreaker(uint64_t tiebreaker) override {} - // The ufrag and pwd in `ice_params` must be set - // before candidate gathering can start. - void SetIceParameters(const IceParameters& ice_params) override {} - void SetRemoteIceParameters(const IceParameters& ice_params) override {} - void SetRemoteIceMode(IceMode mode) override {} - void SetIceConfig(const IceConfig& config) override {} - absl::optional GetRttEstimate() override { return absl::nullopt; } - const Connection* selected_connection() const override { return nullptr; } - absl::optional GetSelectedCandidatePair() - const override { - return absl::nullopt; - } - void MaybeStartGathering() override {} - void AddRemoteCandidate(const Candidate& candidate) override {} - void RemoveRemoteCandidate(const Candidate& candidate) override {} - void RemoveAllRemoteCandidates() override {} - IceGatheringState gathering_state() const override { - return IceGatheringState::kIceGatheringComplete; - } - - bool receiving() const override { return true; } - bool writable() const override { return true; } - - private: - std::string transport_name_; -}; - -} // namespace cricket +#include "p2p/test/mock_ice_transport.h" #endif // P2P_BASE_MOCK_ICE_TRANSPORT_H_ diff --git a/p2p/base/p2p_constants.cc b/p2p/base/p2p_constants.cc index 3414939a6f..11ff992227 100644 --- a/p2p/base/p2p_constants.cc +++ b/p2p/base/p2p_constants.cc @@ -10,7 +10,10 @@ #include "p2p/base/p2p_constants.h" -namespace cricket { +#include +#include + +namespace webrtc { const char CN_AUDIO[] = "audio"; const char CN_VIDEO[] = "video"; @@ -72,4 +75,4 @@ const int CONNECTION_WRITE_TIMEOUT = 15 * 1000; // 15 seconds // of increased memory, but in some networks (2G), we observe up to 60s RTTs. const int CONNECTION_RESPONSE_TIMEOUT = 60 * 1000; // 60 seconds -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/p2p_constants.h b/p2p/base/p2p_constants.h index d51ee17a07..fae92bfb27 100644 --- a/p2p/base/p2p_constants.h +++ b/p2p/base/p2p_constants.h @@ -16,7 +16,7 @@ #include "rtc_base/system/rtc_export.h" -namespace cricket { +namespace webrtc { // CN_ == "content name". When we initiate a session, we choose the // name, and when we receive a Gingle session, we provide default @@ -95,6 +95,8 @@ extern const int CONNECTION_WRITE_TIMEOUT; // Default value of IceConfig.stun_keepalive_interval; extern const int STUN_KEEPALIVE_INTERVAL; +static const int MIN_PINGS_AT_WEAK_PING_INTERVAL = 3; + // The following constants are used at the candidate pair level to determine the // state of a candidate pair. // @@ -109,6 +111,72 @@ extern const int CONNECTION_RESPONSE_TIMEOUT; // it. extern const int MIN_CONNECTION_LIFETIME; +// The type preference MUST be an integer from 0 to 126 inclusive. +// https://datatracker.ietf.org/doc/html/rfc5245#section-4.1.2.1 +enum IcePriorityValue : uint8_t { + ICE_TYPE_PREFERENCE_RELAY_TLS = 0, + ICE_TYPE_PREFERENCE_RELAY_TCP = 1, + ICE_TYPE_PREFERENCE_RELAY_UDP = 2, + ICE_TYPE_PREFERENCE_PRFLX_TCP = 80, + ICE_TYPE_PREFERENCE_HOST_TCP = 90, + ICE_TYPE_PREFERENCE_SRFLX = 100, + ICE_TYPE_PREFERENCE_PRFLX = 110, + ICE_TYPE_PREFERENCE_HOST = 126 +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::BACKUP_CONNECTION_PING_INTERVAL; +using ::webrtc::CN_AUDIO; +using ::webrtc::CN_DATA; +using ::webrtc::CN_OTHER; +using ::webrtc::CN_VIDEO; +using ::webrtc::CONNECTION_RESPONSE_TIMEOUT; +using ::webrtc::CONNECTION_WRITE_CONNECT_FAILURES; +using ::webrtc::CONNECTION_WRITE_CONNECT_TIMEOUT; +using ::webrtc::CONNECTION_WRITE_TIMEOUT; +using ::webrtc::CONNECTIONROLE_ACTIVE_STR; +using ::webrtc::CONNECTIONROLE_ACTPASS_STR; +using ::webrtc::CONNECTIONROLE_HOLDCONN_STR; +using ::webrtc::CONNECTIONROLE_PASSIVE_STR; +using ::webrtc::DEAD_CONNECTION_RECEIVE_TIMEOUT; +using ::webrtc::GROUP_TYPE_BUNDLE; +using ::webrtc::ICE_CANDIDATE_COMPONENT_DEFAULT; +using ::webrtc::ICE_CANDIDATE_COMPONENT_RTCP; +using ::webrtc::ICE_CANDIDATE_COMPONENT_RTP; +using ::webrtc::ICE_PWD_LENGTH; +using ::webrtc::ICE_PWD_MAX_LENGTH; +using ::webrtc::ICE_PWD_MIN_LENGTH; +using ::webrtc::ICE_TYPE_PREFERENCE_HOST; +using ::webrtc::ICE_TYPE_PREFERENCE_HOST_TCP; +using ::webrtc::ICE_TYPE_PREFERENCE_PRFLX; +using ::webrtc::ICE_TYPE_PREFERENCE_PRFLX_TCP; +using ::webrtc::ICE_TYPE_PREFERENCE_RELAY_TCP; +using ::webrtc::ICE_TYPE_PREFERENCE_RELAY_TLS; +using ::webrtc::ICE_TYPE_PREFERENCE_RELAY_UDP; +using ::webrtc::ICE_TYPE_PREFERENCE_SRFLX; +using ::webrtc::ICE_UFRAG_LENGTH; +using ::webrtc::ICE_UFRAG_MAX_LENGTH; +using ::webrtc::ICE_UFRAG_MIN_LENGTH; +using ::webrtc::IcePriorityValue; +using ::webrtc::LOCAL_TLD; +using ::webrtc::MIN_CHECK_RECEIVING_INTERVAL; +using ::webrtc::MIN_CONNECTION_LIFETIME; +using ::webrtc::MIN_PINGS_AT_WEAK_PING_INTERVAL; +using ::webrtc::RECEIVING_SWITCHING_DELAY; +using ::webrtc::RECEIVING_TIMEOUT; +using ::webrtc::REGATHER_ON_FAILED_NETWORKS_INTERVAL; +using ::webrtc::STRONG_AND_STABLE_WRITABLE_CONNECTION_PING_INTERVAL; +using ::webrtc::STRONG_PING_INTERVAL; +using ::webrtc::STUN_KEEPALIVE_INTERVAL; +using ::webrtc::WEAK_CONNECTION_RECEIVE_TIMEOUT; +using ::webrtc::WEAK_OR_STABILIZING_WRITABLE_CONNECTION_PING_INTERVAL; +using ::webrtc::WEAK_PING_INTERVAL; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_P2P_CONSTANTS_H_ diff --git a/p2p/base/p2p_transport_channel.cc b/p2p/base/p2p_transport_channel.cc index fe11920da9..4b1b5e7094 100644 --- a/p2p/base/p2p_transport_channel.cc +++ b/p2p/base/p2p_transport_channel.cc @@ -14,54 +14,81 @@ #include #include +#include +#include #include #include +#include #include +#include +#include #include +#include #include "absl/algorithm/container.h" +#include "absl/functional/any_invocable.h" #include "absl/memory/memory.h" #include "absl/strings/match.h" #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/async_dns_resolver.h" #include "api/candidate.h" #include "api/field_trials_view.h" -#include "api/units/time_delta.h" +#include "api/ice_transport_interface.h" +#include "api/rtc_error.h" +#include "api/sequence_checker.h" +#include "api/transport/enums.h" +#include "api/transport/stun.h" +#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" #include "logging/rtc_event_log/ice_logger.h" -#include "p2p/base/basic_async_resolver_factory.h" -#include "p2p/base/basic_ice_controller.h" +#include "p2p/base/active_ice_controller_factory_interface.h" +#include "p2p/base/candidate_pair_interface.h" #include "p2p/base/connection.h" #include "p2p/base/connection_info.h" +#include "p2p/base/ice_controller_factory_interface.h" +#include "p2p/base/ice_switch_reason.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/port_interface.h" +#include "p2p/base/regathering_controller.h" +#include "p2p/base/transport_description.h" #include "p2p/base/wrapping_active_ice_controller.h" +#include "p2p/dtls/dtls_stun_piggyback_callbacks.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/checks.h" -#include "rtc_base/crc32.h" +#include "rtc_base/dscp.h" #include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" +#include "rtc_base/net_helpers.h" #include "rtc_base/network.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" #include "rtc_base/network_constants.h" -#include "rtc_base/string_encode.h" +#include "rtc_base/network_route.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/metrics.h" +namespace webrtc { namespace { -cricket::PortInterface::CandidateOrigin GetOrigin( - cricket::PortInterface* port, - cricket::PortInterface* origin_port) { +PortInterface::CandidateOrigin GetOrigin(PortInterface* port, + PortInterface* origin_port) { if (!origin_port) - return cricket::PortInterface::ORIGIN_MESSAGE; + return PortInterface::ORIGIN_MESSAGE; else if (port == origin_port) - return cricket::PortInterface::ORIGIN_THIS_PORT; + return PortInterface::ORIGIN_THIS_PORT; else - return cricket::PortInterface::ORIGIN_OTHER_PORT; + return PortInterface::ORIGIN_OTHER_PORT; } -uint32_t GetWeakPingIntervalInFieldTrial( - const webrtc::FieldTrialsView* field_trials) { +uint32_t GetWeakPingIntervalInFieldTrial(const FieldTrialsView* field_trials) { if (field_trials != nullptr) { uint32_t weak_ping_interval = ::strtoul(field_trials->Lookup("WebRTC-StunInterPacketDelay").c_str(), @@ -70,18 +97,17 @@ uint32_t GetWeakPingIntervalInFieldTrial( return static_cast(weak_ping_interval); } } - return cricket::WEAK_PING_INTERVAL; + return WEAK_PING_INTERVAL; } -rtc::RouteEndpoint CreateRouteEndpointFromCandidate( - bool local, - const cricket::Candidate& candidate, - bool uses_turn) { +RouteEndpoint CreateRouteEndpointFromCandidate(bool local, + const Candidate& candidate, + bool uses_turn) { auto adapter_type = candidate.network_type(); - if (!local && adapter_type == rtc::ADAPTER_TYPE_UNKNOWN) { + if (!local && adapter_type == ADAPTER_TYPE_UNKNOWN) { bool vpn; std::tie(adapter_type, vpn) = - rtc::Network::GuessAdapterFromNetworkCost(candidate.network_cost()); + Network::GuessAdapterFromNetworkCost(candidate.network_cost()); } // TODO(bugs.webrtc.org/9446) : Rewrite if information about remote network @@ -89,19 +115,12 @@ rtc::RouteEndpoint CreateRouteEndpointFromCandidate( // we will only ever report 1 adapter per type. In practice this is probably // fine, since the endpoint also contains network-id. uint16_t adapter_id = static_cast(adapter_type); - return rtc::RouteEndpoint(adapter_type, adapter_id, candidate.network_id(), - uses_turn); + return RouteEndpoint(adapter_type, adapter_id, candidate.network_id(), + uses_turn); } -using ::webrtc::RTCError; -using ::webrtc::RTCErrorType; -using ::webrtc::SafeTask; -using ::webrtc::TimeDelta; - } // unnamed namespace -namespace cricket { - bool IceCredentialsChanged(absl::string_view old_ufrag, absl::string_view old_pwd, absl::string_view new_ufrag, @@ -116,33 +135,18 @@ bool IceCredentialsChanged(absl::string_view old_ufrag, std::unique_ptr P2PTransportChannel::Create( absl::string_view transport_name, int component, - webrtc::IceTransportInit init) { - // TODO(bugs.webrtc.org/12598): Remove pragma and fallback once - // async_resolver_factory is gone -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wdeprecated-declarations" - if (init.async_resolver_factory()) { - return absl::WrapUnique(new P2PTransportChannel( - transport_name, component, init.port_allocator(), nullptr, - std::make_unique( - init.async_resolver_factory()), - init.event_log(), init.ice_controller_factory(), - init.active_ice_controller_factory(), init.field_trials())); -#pragma clang diagnostic pop - } else { - return absl::WrapUnique(new P2PTransportChannel( - transport_name, component, init.port_allocator(), - init.async_dns_resolver_factory(), nullptr, init.event_log(), - init.ice_controller_factory(), init.active_ice_controller_factory(), - init.field_trials())); - } -} - -P2PTransportChannel::P2PTransportChannel( - absl::string_view transport_name, - int component, - PortAllocator* allocator, - const webrtc::FieldTrialsView* field_trials) + IceTransportInit init) { + return absl::WrapUnique(new P2PTransportChannel( + transport_name, component, init.port_allocator(), + init.async_dns_resolver_factory(), nullptr, init.event_log(), + init.ice_controller_factory(), init.active_ice_controller_factory(), + init.field_trials())); +} + +P2PTransportChannel::P2PTransportChannel(absl::string_view transport_name, + int component, + PortAllocator* allocator, + const FieldTrialsView* field_trials) : P2PTransportChannel(transport_name, component, allocator, @@ -158,13 +162,13 @@ P2PTransportChannel::P2PTransportChannel( absl::string_view transport_name, int component, PortAllocator* allocator, - webrtc::AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, - std::unique_ptr + AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, + std::unique_ptr owned_dns_resolver_factory, - webrtc::RtcEventLog* event_log, + RtcEventLog* event_log, IceControllerFactoryInterface* ice_controller_factory, ActiveIceControllerFactoryInterface* active_ice_controller_factory, - const webrtc::FieldTrialsView* field_trials) + const FieldTrialsView* field_trials) : transport_name_(transport_name), component_(component), allocator_(allocator), @@ -174,33 +178,32 @@ P2PTransportChannel::P2PTransportChannel( ? owned_dns_resolver_factory.get() : async_dns_resolver_factory), owned_dns_resolver_factory_(std::move(owned_dns_resolver_factory)), - network_thread_(rtc::Thread::Current()), + network_thread_(Thread::Current()), incoming_only_(false), error_(0), - remote_ice_mode_(ICEMODE_FULL), - ice_role_(ICEROLE_UNKNOWN), - tiebreaker_(0), - gathering_state_(kIceGatheringNew), + remote_ice_mode_(webrtc::ICEMODE_FULL), + ice_role_(webrtc::ICEROLE_UNKNOWN), + gathering_state_(webrtc::kIceGatheringNew), weak_ping_interval_(GetWeakPingIntervalInFieldTrial(field_trials)), config_(RECEIVING_TIMEOUT, BACKUP_CONNECTION_PING_INTERVAL, - GATHER_ONCE /* continual_gathering_policy */, + webrtc::GATHER_ONCE /* continual_gathering_policy */, false /* prioritize_most_likely_candidate_pairs */, STRONG_AND_STABLE_WRITABLE_CONNECTION_PING_INTERVAL, true /* presume_writable_when_fully_relayed */, REGATHER_ON_FAILED_NETWORKS_INTERVAL, - RECEIVING_SWITCHING_DELAY) { + RECEIVING_SWITCHING_DELAY), + field_trials_(field_trials) { TRACE_EVENT0("webrtc", "P2PTransportChannel::P2PTransportChannel"); RTC_DCHECK(allocator_ != nullptr); // Validate IceConfig even for mostly built-in constant default values in case // we change them. - RTC_DCHECK(ValidateIceConfig(config_).ok()); - webrtc::BasicRegatheringController::Config regathering_config; + RTC_DCHECK(config_.IsValid().ok()); + BasicRegatheringController::Config regathering_config; regathering_config.regather_on_failed_networks_interval = config_.regather_on_failed_networks_interval_or_default(); - regathering_controller_ = - std::make_unique( - regathering_config, this, network_thread_); + regathering_controller_ = std::make_unique( + regathering_config, this, network_thread_); // We populate the change in the candidate filter to the session taken by // the transport. allocator_->SignalCandidateFilterChanged.connect( @@ -276,8 +279,10 @@ void P2PTransportChannel::AddConnection(Connection* connection) { connection->set_unwritable_timeout(config_.ice_unwritable_timeout); connection->set_unwritable_min_checks(config_.ice_unwritable_min_checks); connection->set_inactive_timeout(config_.ice_inactive_timeout); - connection->SignalReadPacket.connect(this, - &P2PTransportChannel::OnReadPacket); + connection->RegisterReceivedPacketCallback( + [&](Connection* connection, const ReceivedIpPacket& packet) { + OnReadPacket(connection, packet); + }); connection->SignalReadyToSend.connect(this, &P2PTransportChannel::OnReadyToSend); connection->SignalStateChange.connect( @@ -297,15 +302,24 @@ void P2PTransportChannel::AddConnection(Connection* connection) { [this](webrtc::RTCErrorOr delta_ack) { GoogDeltaAckReceived(std::move(delta_ack)); }); - LogCandidatePairConfig(connection, - webrtc::IceCandidatePairConfigType::kAdded); + if (!dtls_stun_piggyback_callbacks_.empty()) { + connection->RegisterDtlsPiggyback(DtlsStunPiggybackCallbacks( + [&](auto request) { + return dtls_stun_piggyback_callbacks_.send_data(request); + }, + [&](auto data, auto ack) { + dtls_stun_piggyback_callbacks_.recv_data(data, ack); + })); + } + + LogCandidatePairConfig(connection, IceCandidatePairConfigType::kAdded); connections_.push_back(connection); ice_controller_->OnConnectionAdded(connection); } void P2PTransportChannel::ForgetLearnedStateForConnections( - rtc::ArrayView connections) { + ArrayView connections) { for (const Connection* con : connections) { FromIceController(con)->ForgetLearnedState(); } @@ -331,23 +345,12 @@ IceRole P2PTransportChannel::GetIceRole() const { return ice_role_; } -void P2PTransportChannel::SetIceTiebreaker(uint64_t tiebreaker) { - RTC_DCHECK_RUN_ON(network_thread_); - if (!ports_.empty() || !pruned_ports_.empty()) { - RTC_LOG(LS_ERROR) - << "Attempt to change tiebreaker after Port has been allocated."; - return; - } - - tiebreaker_ = tiebreaker; -} - -IceTransportState P2PTransportChannel::GetState() const { +IceTransportStateInternal P2PTransportChannel::GetState() const { RTC_DCHECK_RUN_ON(network_thread_); return state_; } -webrtc::IceTransportState P2PTransportChannel::GetIceTransportState() const { +IceTransportState P2PTransportChannel::GetIceTransportState() const { RTC_DCHECK_RUN_ON(network_thread_); return standardized_state_; } @@ -377,21 +380,21 @@ IceGatheringState P2PTransportChannel::gathering_state() const { return gathering_state_; } -absl::optional P2PTransportChannel::GetRttEstimate() { +std::optional P2PTransportChannel::GetRttEstimate() { RTC_DCHECK_RUN_ON(network_thread_); if (selected_connection_ != nullptr && selected_connection_->rtt_samples() > 0) { return selected_connection_->rtt(); } else { - return absl::nullopt; + return std::nullopt; } } -absl::optional +std::optional P2PTransportChannel::GetSelectedCandidatePair() const { RTC_DCHECK_RUN_ON(network_thread_); if (selected_connection_ == nullptr) { - return absl::nullopt; + return std::nullopt; } CandidatePair pair; @@ -403,10 +406,10 @@ P2PTransportChannel::GetSelectedCandidatePair() const { // A channel is considered ICE completed once there is at most one active // connection per network and at least one active connection. -IceTransportState P2PTransportChannel::ComputeState() const { +IceTransportStateInternal P2PTransportChannel::ComputeState() const { RTC_DCHECK_RUN_ON(network_thread_); if (!had_connection_) { - return IceTransportState::STATE_INIT; + return IceTransportStateInternal::STATE_INIT; } std::vector active_connections; @@ -416,12 +419,12 @@ IceTransportState P2PTransportChannel::ComputeState() const { } } if (active_connections.empty()) { - return IceTransportState::STATE_FAILED; + return IceTransportStateInternal::STATE_FAILED; } - std::set networks; + std::set networks; for (Connection* connection : active_connections) { - const rtc::Network* network = connection->network(); + const Network* network = connection->network(); if (networks.find(network) == networks.end()) { networks.insert(network); } else { @@ -429,20 +432,19 @@ IceTransportState P2PTransportChannel::ComputeState() const { << ": Ice not completed yet for this channel as " << network->ToString() << " has more than 1 connection."; - return IceTransportState::STATE_CONNECTING; + return IceTransportStateInternal::STATE_CONNECTING; } } ice_event_log_.DumpCandidatePairDescriptionToMemoryAsConfigEvents(); - return IceTransportState::STATE_COMPLETED; + return IceTransportStateInternal::STATE_COMPLETED; } // Compute the current RTCIceTransportState as described in // https://www.w3.org/TR/webrtc/#dom-rtcicetransportstate // TODO(bugs.webrtc.org/9218): Start signaling kCompleted once we have // implemented end-of-candidates signalling. -webrtc::IceTransportState P2PTransportChannel::ComputeIceTransportState() - const { +IceTransportState P2PTransportChannel::ComputeIceTransportState() const { RTC_DCHECK_RUN_ON(network_thread_); bool has_connection = false; for (Connection* connection : connections_) { @@ -453,24 +455,24 @@ webrtc::IceTransportState P2PTransportChannel::ComputeIceTransportState() } if (had_connection_ && !has_connection) { - return webrtc::IceTransportState::kFailed; + return IceTransportState::kFailed; } if (!writable() && has_been_writable_) { - return webrtc::IceTransportState::kDisconnected; + return IceTransportState::kDisconnected; } if (!had_connection_ && !has_connection) { - return webrtc::IceTransportState::kNew; + return IceTransportState::kNew; } if (has_connection && !writable()) { // A candidate pair has been formed by adding a remote candidate // and gathering a local candidate. - return webrtc::IceTransportState::kChecking; + return IceTransportState::kChecking; } - return webrtc::IceTransportState::kConnected; + return IceTransportState::kConnected; } void P2PTransportChannel::SetIceParameters(const IceParameters& ice_params) { @@ -489,7 +491,7 @@ void P2PTransportChannel::SetRemoteIceParameters( RTC_LOG(LS_INFO) << "Received remote ICE parameters: ufrag=" << ice_params.ufrag << ", renomination " << (ice_params.renomination ? "enabled" : "disabled"); - IceParameters* current_ice = remote_ice(); + const IceParameters* current_ice = remote_ice_parameters(); if (!current_ice || *current_ice != ice_params) { // Keep the ICE credentials so that newer connections // are prioritized over the older ones. @@ -519,9 +521,9 @@ void P2PTransportChannel::SetRemoteIceMode(IceMode mode) { remote_ice_mode_ = mode; } -// TODO(qingsi): We apply the convention that setting a absl::optional parameter +// TODO(qingsi): We apply the convention that setting a std::optional parameter // to null restores its default value in the implementation. However, some -// absl::optional parameters are only processed below if non-null, e.g., +// std::optional parameters are only processed below if non-null, e.g., // regather_on_failed_networks_interval, and thus there is no way to restore the // defaults. Fix this issue later for consistency. void P2PTransportChannel::SetIceConfig(const IceConfig& config) { @@ -585,7 +587,7 @@ void P2PTransportChannel::SetIceConfig(const IceConfig& config) { config_.surface_ice_candidates_on_ice_transport_type_changed = config.surface_ice_candidates_on_ice_transport_type_changed; if (config_.surface_ice_candidates_on_ice_transport_type_changed && - config_.continual_gathering_policy != GATHER_CONTINUALLY) { + config_.continual_gathering_policy != webrtc::GATHER_CONTINUALLY) { RTC_LOG(LS_WARNING) << "surface_ice_candidates_on_ice_transport_type_changed is " "ineffective since we do not gather continually."; @@ -684,7 +686,7 @@ void P2PTransportChannel::SetIceConfig(const IceConfig& config) { << config.stun_keepalive_interval_or_default(); } - webrtc::BasicRegatheringController::Config regathering_config; + BasicRegatheringController::Config regathering_config; regathering_config.regather_on_failed_networks_interval = config_.regather_on_failed_networks_interval_or_default(); regathering_controller_->SetConfig(regathering_config); @@ -693,12 +695,17 @@ void P2PTransportChannel::SetIceConfig(const IceConfig& config) { allocator_->SetVpnPreference(config_.vpn_preference); ice_controller_->SetIceConfig(config_); + if (config_.dtls_handshake_in_stun != config.dtls_handshake_in_stun) { + config_.dtls_handshake_in_stun = config.dtls_handshake_in_stun; + RTC_LOG(LS_INFO) << "Set DTLS handshake in STUN to " + << config.dtls_handshake_in_stun; + } - RTC_DCHECK(ValidateIceConfig(config_).ok()); + RTC_DCHECK(config_.IsValid().ok()); } void P2PTransportChannel::ParseFieldTrials( - const webrtc::FieldTrialsView* field_trials) { + const FieldTrialsView* field_trials) { if (field_trials == nullptr) { return; } @@ -707,7 +714,7 @@ void P2PTransportChannel::ParseFieldTrials( RTC_LOG(LS_INFO) << "Set WebRTC-ExtraICEPing: Enabled"; } - webrtc::StructParametersParser::Create( + StructParametersParser::Create( // go/skylift-light "skip_relay_to_non_relay_connections", &ice_field_trials_.skip_relay_to_non_relay_connections, @@ -773,12 +780,12 @@ void P2PTransportChannel::ParseFieldTrials( // DSCP override, allow user to specify (any) int value // that will be used for tagging all packets. - webrtc::StructParametersParser::Create("override_dscp", - &ice_field_trials_.override_dscp) + StructParametersParser::Create("override_dscp", + &ice_field_trials_.override_dscp) ->Parse(field_trials->Lookup("WebRTC-DscpFieldTrial")); if (ice_field_trials_.override_dscp) { - SetOption(rtc::Socket::OPT_DSCP, *ice_field_trials_.override_dscp); + SetOption(Socket::OPT_DSCP, *ice_field_trials_.override_dscp); } std::string field_trial_string = @@ -788,7 +795,7 @@ void P2PTransportChannel::ParseFieldTrials( if (receive_buffer_size_kb > 0) { RTC_LOG(LS_INFO) << "Set WebRTC-SetSocketReceiveBuffer: Enabled and set to " << receive_buffer_size_kb << "kb"; - SetOption(rtc::Socket::OPT_RCVBUF, receive_buffer_size_kb * 1024); + SetOption(Socket::OPT_RCVBUF, receive_buffer_size_kb * 1024); } ice_field_trials_.piggyback_ice_check_acknowledgement = @@ -800,6 +807,13 @@ void P2PTransportChannel::ParseFieldTrials( if (!ice_field_trials_.enable_goog_delta) { stun_dict_writer_.Disable(); } + + if (field_trials->IsEnabled("WebRTC-RFC8888CongestionControlFeedback")) { + int desired_recv_esn = 1; + RTC_LOG(LS_INFO) << "Set WebRTC-RFC8888CongestionControlFeedback: Enable " + "and set ECN recving mode"; + SetOption(Socket::OPT_RECV_ECN, desired_recv_esn); + } } const IceConfig& P2PTransportChannel::config() const { @@ -807,53 +821,6 @@ const IceConfig& P2PTransportChannel::config() const { return config_; } -// TODO(qingsi): Add tests for the config validation starting from -// PeerConnection::SetConfiguration. -// Static -RTCError P2PTransportChannel::ValidateIceConfig(const IceConfig& config) { - if (config.ice_check_interval_strong_connectivity_or_default() < - config.ice_check_interval_weak_connectivity.value_or( - GetWeakPingIntervalInFieldTrial(nullptr))) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Ping interval of candidate pairs is shorter when ICE is " - "strongly connected than that when ICE is weakly " - "connected"); - } - - if (config.receiving_timeout_or_default() < - std::max(config.ice_check_interval_strong_connectivity_or_default(), - config.ice_check_min_interval_or_default())) { - return RTCError( - RTCErrorType::INVALID_PARAMETER, - "Receiving timeout is shorter than the minimal ping interval."); - } - - if (config.backup_connection_ping_interval_or_default() < - config.ice_check_interval_strong_connectivity_or_default()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Ping interval of backup candidate pairs is shorter than " - "that of general candidate pairs when ICE is strongly " - "connected"); - } - - if (config.stable_writable_connection_ping_interval_or_default() < - config.ice_check_interval_strong_connectivity_or_default()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Ping interval of stable and writable candidate pairs is " - "shorter than that of general candidate pairs when ICE is " - "strongly connected"); - } - - if (config.ice_unwritable_timeout_or_default() > - config.ice_inactive_timeout_or_default()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "The timeout period for the writability state to become " - "UNRELIABLE is longer than that to become TIMEOUT."); - } - - return RTCError::OK(); -} - const Connection* P2PTransportChannel::selected_connection() const { RTC_DCHECK_RUN_ON(network_thread_); return selected_connection_; @@ -880,23 +847,9 @@ void P2PTransportChannel::MaybeStartGathering() { IceCredentialsChanged(allocator_sessions_.back()->ice_ufrag(), allocator_sessions_.back()->ice_pwd(), ice_parameters_.ufrag, ice_parameters_.pwd)) { - if (gathering_state_ != kIceGatheringGathering) { - gathering_state_ = kIceGatheringGathering; - SignalGatheringState(this); - } - - if (!allocator_sessions_.empty()) { - IceRestartState state; - if (writable()) { - state = IceRestartState::CONNECTED; - } else if (IsGettingPorts()) { - state = IceRestartState::CONNECTING; - } else { - state = IceRestartState::DISCONNECTED; - } - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.IceRestartState", - static_cast(state), - static_cast(IceRestartState::MAX_VALUE)); + if (gathering_state_ != webrtc::kIceGatheringGathering) { + gathering_state_ = webrtc::kIceGatheringGathering; + SendGatheringStateEvent(); } for (const auto& session : allocator_sessions_) { @@ -912,7 +865,6 @@ void P2PTransportChannel::MaybeStartGathering() { ice_parameters_.ufrag, ice_parameters_.pwd); if (pooled_session) { - pooled_session->set_ice_tiebreaker(tiebreaker_); AddAllocatorSession(std::move(pooled_session)); PortAllocatorSession* raw_pooled_session = allocator_sessions_.back().get(); @@ -929,14 +881,13 @@ void P2PTransportChannel::MaybeStartGathering() { AddAllocatorSession(allocator_->CreateSession( transport_name(), component(), ice_parameters_.ufrag, ice_parameters_.pwd)); - allocator_sessions_.back()->set_ice_tiebreaker(tiebreaker_); allocator_sessions_.back()->StartGettingPorts(); } } } // A new port is available, attempt to make connections for it -void P2PTransportChannel::OnPortReady(PortAllocatorSession* session, +void P2PTransportChannel::OnPortReady(PortAllocatorSession* /* session */, PortInterface* port) { RTC_DCHECK_RUN_ON(network_thread_); @@ -957,7 +908,7 @@ void P2PTransportChannel::OnPortReady(PortAllocatorSession* session, // if one is pending. port->SetIceRole(ice_role_); - port->SetIceTiebreaker(tiebreaker_); + port->SetIceTiebreaker(allocator_->ice_tiebreaker()); ports_.push_back(port); port->SignalUnknownAddress.connect(this, &P2PTransportChannel::OnUnknownAddress); @@ -982,7 +933,7 @@ void P2PTransportChannel::OnPortReady(PortAllocatorSession* session, // A new candidate is available, let listeners know void P2PTransportChannel::OnCandidatesReady( - PortAllocatorSession* session, + PortAllocatorSession* /* session */, const std::vector& candidates) { RTC_DCHECK_RUN_ON(network_thread_); for (size_t i = 0; i < candidates.size(); ++i) { @@ -991,14 +942,16 @@ void P2PTransportChannel::OnCandidatesReady( } void P2PTransportChannel::OnCandidateError( - PortAllocatorSession* session, + PortAllocatorSession* /* session */, const IceCandidateErrorEvent& event) { - RTC_DCHECK(network_thread_ == rtc::Thread::Current()); - SignalCandidateError(this, event); + RTC_DCHECK(network_thread_ == Thread::Current()); + if (candidate_error_callback_) { + candidate_error_callback_(this, event); + } } void P2PTransportChannel::OnCandidatesAllocationDone( - PortAllocatorSession* session) { + PortAllocatorSession* /* session */) { RTC_DCHECK_RUN_ON(network_thread_); if (config_.gather_continually()) { RTC_LOG(LS_INFO) << "P2PTransportChannel: " << transport_name() @@ -1007,20 +960,21 @@ void P2PTransportChannel::OnCandidatesAllocationDone( "gathering so not changing gathering state."; return; } - gathering_state_ = kIceGatheringComplete; + gathering_state_ = webrtc::kIceGatheringComplete; RTC_LOG(LS_INFO) << "P2PTransportChannel: " << transport_name() << ", component " << component() << " gathering complete"; - SignalGatheringState(this); + SendGatheringStateEvent(); } // Handle stun packets void P2PTransportChannel::OnUnknownAddress(PortInterface* port, - const rtc::SocketAddress& address, + const SocketAddress& address, ProtocolType proto, IceMessage* stun_msg, const std::string& remote_username, bool port_muxed) { RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(stun_msg); // Port has received a valid stun packet from an address that no Connection // is currently available for. See if we already have a candidate with the @@ -1046,7 +1000,7 @@ void P2PTransportChannel::OnUnknownAddress(PortInterface* port, const Candidate* candidate = nullptr; for (const Candidate& c : remote_candidates_) { if (c.username() == remote_username && c.address() == address && - c.protocol() == ProtoToString(proto)) { + c.protocol() == webrtc::ProtoToString(proto)) { candidate = &c; break; } @@ -1098,19 +1052,19 @@ void P2PTransportChannel::OnUnknownAddress(PortInterface* port, // If the source transport address of the request does not match any // existing remote candidates, it represents a new peer reflexive remote // candidate. - remote_candidate = Candidate( - component(), ProtoToString(proto), address, remote_candidate_priority, - remote_username, remote_password, PRFLX_PORT_TYPE, remote_generation, - "", network_id, network_cost); - if (proto == PROTO_TCP) { - remote_candidate.set_tcptype(TCPTYPE_ACTIVE_STR); + remote_candidate = + Candidate(component(), webrtc::ProtoToString(proto), address, + remote_candidate_priority, remote_username, remote_password, + IceCandidateType::kPrflx, remote_generation, "", network_id, + network_cost); + if (proto == webrtc::PROTO_TCP) { + remote_candidate.set_tcptype(webrtc::TCPTYPE_ACTIVE_STR); } // From RFC 5245, section-7.2.1.3: // The foundation of the candidate is set to an arbitrary value, different // from the foundation for all other remote candidates. - remote_candidate.set_foundation( - rtc::ToString(rtc::ComputeCrc32(remote_candidate.id()))); + remote_candidate.ComputePrflxFoundation(); } // RFC5245, the agent constructs a pair whose local candidate is equal to @@ -1171,14 +1125,14 @@ void P2PTransportChannel::OnCandidateFilterChanged(uint32_t prev_filter, } } -void P2PTransportChannel::OnRoleConflict(PortInterface* port) { +void P2PTransportChannel::OnRoleConflict(PortInterface* /* port */) { SignalRoleConflict(this); // STUN ping will be sent when SetRole is called // from Transport. } const IceParameters* P2PTransportChannel::FindRemoteIceFromUfrag( absl::string_view ufrag, - uint32_t* generation) { + uint32_t* generation) const { RTC_DCHECK_RUN_ON(network_thread_); const auto& params = remote_ice_parameters_; auto it = std::find_if( @@ -1194,7 +1148,7 @@ const IceParameters* P2PTransportChannel::FindRemoteIceFromUfrag( void P2PTransportChannel::OnNominated(Connection* conn) { RTC_DCHECK_RUN_ON(network_thread_); - RTC_DCHECK(ice_role_ == ICEROLE_CONTROLLED); + RTC_DCHECK(ice_role_ == webrtc::ICEROLE_CONTROLLED); if (selected_connection_ == conn) { return; @@ -1255,13 +1209,13 @@ void P2PTransportChannel::AddRemoteCandidate(const Candidate& candidate) { // the code below this (specifically, ConnectionRequest::Prepare in // port.cc) uses the remote candidates's username. So, we set it // here. - if (remote_ice()) { + if (remote_ice_parameters()) { if (candidate.username().empty()) { - new_remote_candidate.set_username(remote_ice()->ufrag); + new_remote_candidate.set_username(remote_ice_parameters()->ufrag); } - if (new_remote_candidate.username() == remote_ice()->ufrag) { + if (new_remote_candidate.username() == remote_ice_parameters()->ufrag) { if (candidate.password().empty()) { - new_remote_candidate.set_password(remote_ice()->pwd); + new_remote_candidate.set_password(remote_ice_parameters()->pwd); } } else { // The candidate belongs to the next generation. Its pwd will be set @@ -1274,8 +1228,10 @@ void P2PTransportChannel::AddRemoteCandidate(const Candidate& candidate) { if (new_remote_candidate.address().IsUnresolvedIP()) { // Don't do DNS lookups if the IceTransportPolicy is "none" or "relay". - bool sharing_host = ((allocator_->candidate_filter() & CF_HOST) != 0); - bool sharing_stun = ((allocator_->candidate_filter() & CF_REFLEXIVE) != 0); + bool sharing_host = + ((allocator_->candidate_filter() & webrtc::CF_HOST) != 0); + bool sharing_stun = + ((allocator_->candidate_filter() & webrtc::CF_REFLEXIVE) != 0); if (sharing_host || sharing_stun) { ResolveHostnameCandidate(new_remote_candidate); } @@ -1287,13 +1243,13 @@ void P2PTransportChannel::AddRemoteCandidate(const Candidate& candidate) { P2PTransportChannel::CandidateAndResolver::CandidateAndResolver( const Candidate& candidate, - std::unique_ptr&& resolver) + std::unique_ptr&& resolver) : candidate_(candidate), resolver_(std::move(resolver)) {} P2PTransportChannel::CandidateAndResolver::~CandidateAndResolver() {} void P2PTransportChannel::OnCandidateResolved( - webrtc::AsyncDnsResolverInterface* resolver) { + AsyncDnsResolverInterface* resolver) { RTC_DCHECK_RUN_ON(network_thread_); auto p = absl::c_find_if(resolvers_, [resolver](const CandidateAndResolver& cr) { @@ -1309,7 +1265,7 @@ void P2PTransportChannel::OnCandidateResolved( // Now we can delete the resolver. // TODO(bugs.webrtc.org/12651): Replace the stuff below with // resolvers_.erase(p); - std::unique_ptr to_delete = + std::unique_ptr to_delete = std::move(p->resolver_); // Delay the actual deletion of the resolver until the lambda executes. network_thread_->PostTask([to_delete = std::move(to_delete)] {}); @@ -1318,7 +1274,7 @@ void P2PTransportChannel::OnCandidateResolved( void P2PTransportChannel::AddRemoteCandidateWithResult( Candidate candidate, - const webrtc::AsyncDnsResolverResult& result) { + const AsyncDnsResolverResult& result) { RTC_DCHECK_RUN_ON(network_thread_); if (result.GetError()) { RTC_LOG(LS_WARNING) << "Failed to resolve ICE candidate hostname " @@ -1327,7 +1283,7 @@ void P2PTransportChannel::AddRemoteCandidateWithResult( return; } - rtc::SocketAddress resolved_address; + SocketAddress resolved_address; // Prefer IPv6 to IPv4 if we have it (see RFC 5245 Section 15.1). // TODO(zstein): This won't work if we only have IPv4 locally but receive an // AAAA DNS record. @@ -1440,11 +1396,13 @@ bool P2PTransportChannel::CreateConnection(PortInterface* port, } if (ice_field_trials_.skip_relay_to_non_relay_connections) { - if ((port->Type() != remote_candidate.type()) && - (port->Type() == RELAY_PORT_TYPE || - remote_candidate.type() == RELAY_PORT_TYPE)) { + IceCandidateType port_type = port->Type(); + if ((port_type != remote_candidate.type()) && + (port_type == IceCandidateType::kRelay || + remote_candidate.is_relay())) { RTC_LOG(LS_INFO) << ToString() << ": skip creating connection " - << port->Type() << " to " << remote_candidate.type(); + << webrtc::IceCandidateTypeToString(port_type) << " to " + << remote_candidate.type_name(); return false; } } @@ -1461,11 +1419,12 @@ bool P2PTransportChannel::CreateConnection(PortInterface* port, if (origin == PortInterface::ORIGIN_MESSAGE && incoming_only_) { return false; } - Connection* connection = port->CreateConnection(remote_candidate, origin); - if (!connection) { + Connection* new_connection = + port->CreateConnection(remote_candidate, origin); + if (!new_connection) { return false; } - AddConnection(connection); + AddConnection(new_connection); RTC_LOG(LS_INFO) << ToString() << ": Created connection with origin: " << origin << ", total: " << connections_.size(); @@ -1553,9 +1512,9 @@ void P2PTransportChannel::RememberRemoteCandidate( // Set options on ourselves is simply setting options on all of our available // port objects. -int P2PTransportChannel::SetOption(rtc::Socket::Option opt, int value) { +int P2PTransportChannel::SetOption(Socket::Option opt, int value) { RTC_DCHECK_RUN_ON(network_thread_); - if (ice_field_trials_.override_dscp && opt == rtc::Socket::OPT_DSCP) { + if (ice_field_trials_.override_dscp && opt == Socket::OPT_DSCP) { value = *ice_field_trials_.override_dscp; } @@ -1580,7 +1539,7 @@ int P2PTransportChannel::SetOption(rtc::Socket::Option opt, int value) { return 0; } -bool P2PTransportChannel::GetOption(rtc::Socket::Option opt, int* value) { +bool P2PTransportChannel::GetOption(Socket::Option opt, int* value) { RTC_DCHECK_RUN_ON(network_thread_); const auto& found = options_.find(opt); @@ -1599,13 +1558,14 @@ int P2PTransportChannel::GetError() { // Send data to the other side, using our selected connection. int P2PTransportChannel::SendPacket(const char* data, size_t len, - const rtc::PacketOptions& options, + const AsyncSocketPacketOptions& options, int flags) { RTC_DCHECK_RUN_ON(network_thread_); if (flags != 0) { error_ = EINVAL; return -1; } + // If we don't think the connection is working yet, return ENOTCONN // instead of sending a packet that will probably be dropped. if (!ReadyToSend(selected_connection_)) { @@ -1615,9 +1575,8 @@ int P2PTransportChannel::SendPacket(const char* data, packets_sent_++; last_sent_packet_id_ = options.packet_id; - rtc::PacketOptions modified_options(options); - modified_options.info_signaled_after_sent.packet_type = - rtc::PacketType::kData; + AsyncSocketPacketOptions modified_options(options); + modified_options.info_signaled_after_sent.packet_type = PacketType::kData; int sent = selected_connection_->Send(data, len, modified_options); if (sent <= 0) { RTC_DCHECK(sent < 0); @@ -1664,24 +1623,23 @@ bool P2PTransportChannel::GetStats(IceTransportStats* ice_transport_stats) { return true; } -absl::optional P2PTransportChannel::network_route() const { +std::optional P2PTransportChannel::network_route() const { RTC_DCHECK_RUN_ON(network_thread_); return network_route_; } -rtc::DiffServCodePoint P2PTransportChannel::DefaultDscpValue() const { +DiffServCodePoint P2PTransportChannel::DefaultDscpValue() const { RTC_DCHECK_RUN_ON(network_thread_); - OptionMap::const_iterator it = options_.find(rtc::Socket::OPT_DSCP); + OptionMap::const_iterator it = options_.find(Socket::OPT_DSCP); if (it == options_.end()) { - return rtc::DSCP_NO_CHANGE; + return webrtc::DSCP_NO_CHANGE; } - return static_cast(it->second); + return static_cast(it->second); } -rtc::ArrayView P2PTransportChannel::connections() const { +ArrayView P2PTransportChannel::connections() const { RTC_DCHECK_RUN_ON(network_thread_); - return rtc::ArrayView(connections_.data(), - connections_.size()); + return ArrayView(connections_.data(), connections_.size()); } void P2PTransportChannel::RemoveConnectionForTest(Connection* connection) { @@ -1698,7 +1656,7 @@ void P2PTransportChannel::RemoveConnectionForTest(Connection* connection) { // Monitor connection states. void P2PTransportChannel::UpdateConnectionStates() { RTC_DCHECK_RUN_ON(network_thread_); - int64_t now = rtc::TimeMillis(); + int64_t now = webrtc::TimeMillis(); // We need to copy the list of connections since some may delete themselves // when we call UpdateState. @@ -1719,7 +1677,7 @@ void P2PTransportChannel::OnStartedPinging() { regathering_controller_->Start(); } -bool P2PTransportChannel::IsPortPruned(const Port* port) const { +bool P2PTransportChannel::IsPortPruned(const PortInterface* port) const { RTC_DCHECK_RUN_ON(network_thread_); return !absl::c_linear_search(ports_, port); } @@ -1733,9 +1691,9 @@ bool P2PTransportChannel::PresumedWritable(const Connection* conn) const { RTC_DCHECK_RUN_ON(network_thread_); return (conn->write_state() == Connection::STATE_WRITE_INIT && config_.presume_writable_when_fully_relayed && - conn->local_candidate().type() == RELAY_PORT_TYPE && - (conn->remote_candidate().type() == RELAY_PORT_TYPE || - conn->remote_candidate().type() == PRFLX_PORT_TYPE)); + conn->local_candidate().is_relay() && + (conn->remote_candidate().is_relay() || + conn->remote_candidate().is_prflx())); } void P2PTransportChannel::UpdateState() { @@ -1762,12 +1720,12 @@ void P2PTransportChannel::UpdateState() { bool P2PTransportChannel::AllowedToPruneConnections() const { RTC_DCHECK_RUN_ON(network_thread_); - return ice_role_ == ICEROLE_CONTROLLING || + return ice_role_ == webrtc::ICEROLE_CONTROLLING || (selected_connection_ && selected_connection_->nominated()); } bool P2PTransportChannel::PruneConnections( - rtc::ArrayView connections) { + ArrayView connections) { RTC_DCHECK_RUN_ON(network_thread_); if (!AllowedToPruneConnections()) { RTC_LOG(LS_WARNING) << "Not allowed to prune connections"; @@ -1779,22 +1737,21 @@ bool P2PTransportChannel::PruneConnections( return true; } -rtc::NetworkRoute P2PTransportChannel::ConfigureNetworkRoute( +NetworkRoute P2PTransportChannel::ConfigureNetworkRoute( const Connection* conn) { RTC_DCHECK_RUN_ON(network_thread_); - return { - .connected = ReadyToSend(conn), - .local = CreateRouteEndpointFromCandidate( - /* local= */ true, conn->local_candidate(), - /* uses_turn= */ - conn->port()->Type() == RELAY_PORT_TYPE), - .remote = CreateRouteEndpointFromCandidate( - /* local= */ false, conn->remote_candidate(), - /* uses_turn= */ conn->remote_candidate().type() == RELAY_PORT_TYPE), - .last_sent_packet_id = last_sent_packet_id_, - .packet_overhead = - conn->local_candidate().address().ipaddr().overhead() + - GetProtocolOverhead(conn->local_candidate().protocol())}; + return {.connected = ReadyToSend(conn), + .local = CreateRouteEndpointFromCandidate( + /* local= */ true, conn->local_candidate(), + /* uses_turn= */ + conn->port()->Type() == IceCandidateType::kRelay), + .remote = CreateRouteEndpointFromCandidate( + /* local= */ false, conn->remote_candidate(), + /* uses_turn= */ conn->remote_candidate().is_relay()), + .last_sent_packet_id = last_sent_packet_id_, + .packet_overhead = + conn->local_candidate().address().ipaddr().overhead() + + webrtc::GetProtocolOverhead(conn->local_candidate().protocol())}; } void P2PTransportChannel::SwitchSelectedConnection( @@ -1813,7 +1770,7 @@ void P2PTransportChannel::SwitchSelectedConnectionInternal( // destroyed, so don't use it. Connection* old_selected_connection = selected_connection_; selected_connection_ = conn; - LogCandidatePairConfig(conn, webrtc::IceCandidatePairConfigType::kSelected); + LogCandidatePairConfig(conn, IceCandidatePairConfigType::kSelected); network_route_.reset(); if (old_selected_connection) { old_selected_connection->set_selected(false); @@ -1843,7 +1800,7 @@ void P2PTransportChannel::SwitchSelectedConnectionInternal( RTC_LOG(LS_INFO) << ToString() << ": No selected connection"; } - if (conn != nullptr && ice_role_ == ICEROLE_CONTROLLING && + if (conn != nullptr && ice_role_ == webrtc::ICEROLE_CONTROLLING && ((ice_field_trials_.send_ping_on_switch_ice_controlling && old_selected_connection != nullptr) || ice_field_trials_.send_ping_on_selected_ice_controlling)) { @@ -1862,13 +1819,14 @@ void P2PTransportChannel::SwitchSelectedConnectionInternal( if (old_selected_connection) { pair_change.estimated_disconnected_time_ms = - ComputeEstimatedDisconnectedTimeMs(rtc::TimeMillis(), + ComputeEstimatedDisconnectedTimeMs(webrtc::TimeMillis(), old_selected_connection); } else { pair_change.estimated_disconnected_time_ms = 0; } - - SignalCandidatePairChanged(pair_change); + if (candidate_pair_change_callback_) { + candidate_pair_change_callback_(pair_change); + } } ++selected_candidate_pair_changes_; @@ -1912,9 +1870,8 @@ void P2PTransportChannel::UpdateTransportState() { } SetReceiving(receiving); - IceTransportState state = ComputeState(); - webrtc::IceTransportState current_standardized_state = - ComputeIceTransportState(); + IceTransportStateInternal state = ComputeState(); + IceTransportState current_standardized_state = ComputeIceTransportState(); if (state_ != state) { RTC_LOG(LS_INFO) << ToString() << ": Transport channel state changed from " @@ -1925,29 +1882,29 @@ void P2PTransportChannel::UpdateTransportState() { // ICE states from the standard; the difference is covered by // TransportController and PeerConnection. switch (state_) { - case IceTransportState::STATE_INIT: + case IceTransportStateInternal::STATE_INIT: // TODO(deadbeef): Once we implement end-of-candidates signaling, // we shouldn't go from INIT to COMPLETED. - RTC_DCHECK(state == IceTransportState::STATE_CONNECTING || - state == IceTransportState::STATE_COMPLETED || - state == IceTransportState::STATE_FAILED); + RTC_DCHECK(state == IceTransportStateInternal::STATE_CONNECTING || + state == IceTransportStateInternal::STATE_COMPLETED || + state == IceTransportStateInternal::STATE_FAILED); break; - case IceTransportState::STATE_CONNECTING: - RTC_DCHECK(state == IceTransportState::STATE_COMPLETED || - state == IceTransportState::STATE_FAILED); + case IceTransportStateInternal::STATE_CONNECTING: + RTC_DCHECK(state == IceTransportStateInternal::STATE_COMPLETED || + state == IceTransportStateInternal::STATE_FAILED); break; - case IceTransportState::STATE_COMPLETED: + case IceTransportStateInternal::STATE_COMPLETED: // TODO(deadbeef): Once we implement end-of-candidates signaling, // we shouldn't go from COMPLETED to CONNECTING. // Though we *can* go from COMPlETED to FAILED, if consent expires. - RTC_DCHECK(state == IceTransportState::STATE_CONNECTING || - state == IceTransportState::STATE_FAILED); + RTC_DCHECK(state == IceTransportStateInternal::STATE_CONNECTING || + state == IceTransportStateInternal::STATE_FAILED); break; - case IceTransportState::STATE_FAILED: + case IceTransportStateInternal::STATE_FAILED: // TODO(deadbeef): Once we implement end-of-candidates signaling, // we shouldn't go from FAILED to CONNECTING or COMPLETED. - RTC_DCHECK(state == IceTransportState::STATE_CONNECTING || - state == IceTransportState::STATE_COMPLETED); + RTC_DCHECK(state == IceTransportStateInternal::STATE_CONNECTING || + state == IceTransportStateInternal::STATE_COMPLETED); break; default: RTC_DCHECK_NOTREACHED(); @@ -2063,7 +2020,7 @@ void P2PTransportChannel::PingConnection(Connection* conn) { RTC_DCHECK_RUN_ON(network_thread_); bool use_candidate_attr = false; uint32_t nomination = 0; - if (ice_role_ == ICEROLE_CONTROLLING) { + if (ice_role_ == webrtc::ICEROLE_CONTROLLING) { bool renomination_supported = ice_parameters_.renomination && !remote_ice_parameters_.empty() && remote_ice_parameters_.back().renomination; @@ -2075,7 +2032,7 @@ void P2PTransportChannel::PingConnection(Connection* conn) { } conn->set_nomination(nomination); conn->set_use_candidate_attr(use_candidate_attr); - last_ping_sent_ms_ = rtc::TimeMillis(); + last_ping_sent_ms_ = webrtc::TimeMillis(); conn->Ping(last_ping_sent_ms_, stun_dict_writer_.CreateDelta()); } @@ -2151,8 +2108,10 @@ void P2PTransportChannel::RemoveConnection(Connection* connection) { RTC_DCHECK_RUN_ON(network_thread_); auto it = absl::c_find(connections_, connection); RTC_DCHECK(it != connections_.end()); + connection->DeregisterReceivedPacketCallback(); connections_.erase(it); connection->ClearStunDictConsumer(); + connection->DeregisterDtlsPiggyback(); ice_controller_->OnConnectionDestroyed(connection); } @@ -2170,7 +2129,7 @@ void P2PTransportChannel::OnPortDestroyed(PortInterface* port) { } void P2PTransportChannel::OnPortsPruned( - PortAllocatorSession* session, + PortAllocatorSession* /* session */, const std::vector& ports) { RTC_DCHECK_RUN_ON(network_thread_); for (PortInterface* port : ports) { @@ -2197,7 +2156,9 @@ void P2PTransportChannel::OnCandidatesRemoved( candidate.set_transport_name(transport_name()); candidates_to_remove.push_back(candidate); } - SignalCandidatesRemoved(this, candidates_to_remove); + if (candidates_removed_callback_) { + candidates_removed_callback_(this, candidates_to_remove); + } } void P2PTransportChannel::PruneAllPorts() { @@ -2221,45 +2182,34 @@ bool P2PTransportChannel::PrunePort(PortInterface* port) { // We data is available, let listeners know void P2PTransportChannel::OnReadPacket(Connection* connection, - const char* data, - size_t len, - int64_t packet_time_us) { - RTC_DCHECK_RUN_ON(network_thread_); - - if (connection == selected_connection_) { - // Let the client know of an incoming packet - packets_received_++; - bytes_received_ += len; - RTC_DCHECK(connection->last_data_received() >= last_data_received_ms_); - last_data_received_ms_ = - std::max(last_data_received_ms_, connection->last_data_received()); - SignalReadPacket(this, data, len, packet_time_us, 0); + const ReceivedIpPacket& packet) { + RTC_DCHECK_RUN_ON(network_thread_); + if (connection != selected_connection_ && !FindConnection(connection)) { + // Do not deliver, if packet doesn't belong to the correct transport + // channel. + RTC_DCHECK_NOTREACHED(); return; } - // Do not deliver, if packet doesn't belong to the correct transport - // channel. - if (!FindConnection(connection)) - return; - + // Let the client know of an incoming packet packets_received_++; - bytes_received_ += len; + bytes_received_ += packet.payload().size(); RTC_DCHECK(connection->last_data_received() >= last_data_received_ms_); last_data_received_ms_ = std::max(last_data_received_ms_, connection->last_data_received()); - // Let the client know of an incoming packet - SignalReadPacket(this, data, len, packet_time_us, 0); + NotifyPacketReceived(packet); // May need to switch the sending connection based on the receiving media // path if this is the controlled side. - if (ice_role_ == ICEROLE_CONTROLLED) { + if (ice_role_ == webrtc::ICEROLE_CONTROLLED && + connection != selected_connection_) { ice_controller_->OnImmediateSwitchRequest(IceSwitchReason::DATA_RECEIVED, connection); } } -void P2PTransportChannel::OnSentPacket(const rtc::SentPacket& sent_packet) { +void P2PTransportChannel::OnSentPacket(const SentPacketInfo& sent_packet) { RTC_DCHECK_RUN_ON(network_thread_); SignalSentPacket(this, sent_packet); @@ -2310,14 +2260,20 @@ Candidate P2PTransportChannel::SanitizeRemoteCandidate( bool use_hostname_address = absl::EndsWith(c.address().hostname(), LOCAL_TLD); // Remove the address for prflx remote candidates. See // https://w3c.github.io/webrtc-stats/#dom-rtcicecandidatestats. - use_hostname_address |= c.type() == PRFLX_PORT_TYPE; + use_hostname_address |= c.is_prflx(); + // Filter remote ufrag of peer-reflexive candidates before any ICE parameters + // are known. + uint32_t remote_generation = 0; + bool filter_ufrag = + c.is_prflx() && + FindRemoteIceFromUfrag(c.username(), &remote_generation) == nullptr; return c.ToSanitizedCopy(use_hostname_address, - false /* filter_related_address */); + false /* filter_related_address */, filter_ufrag); } void P2PTransportChannel::LogCandidatePairConfig( Connection* conn, - webrtc::IceCandidatePairConfigType type) { + IceCandidatePairConfigType type) { RTC_DCHECK_RUN_ON(network_thread_); if (conn == nullptr) { return; @@ -2343,7 +2299,7 @@ std::unique_ptr P2PTransportChannel::GoogDeltaReceived( } void P2PTransportChannel::GoogDeltaAckReceived( - webrtc::RTCErrorOr error_or_ack) { + RTCErrorOr error_or_ack) { if (error_or_ack.ok()) { RTC_LOG(LS_ERROR) << "Applied GOOG_DELTA_ACK"; auto ack = error_or_ack.value(); @@ -2356,4 +2312,20 @@ void P2PTransportChannel::GoogDeltaAckReceived( } } -} // namespace cricket +void P2PTransportChannel::SetDtlsStunPiggybackCallbacks( + DtlsStunPiggybackCallbacks&& callbacks) { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(connections_.empty()); + RTC_DCHECK(!callbacks.empty()); + dtls_stun_piggyback_callbacks_ = std::move(callbacks); +} + +void P2PTransportChannel::ResetDtlsStunPiggybackCallbacks() { + RTC_DCHECK_RUN_ON(network_thread_); + dtls_stun_piggyback_callbacks_.reset(); + for (auto& connection : connections_) { + connection->DeregisterDtlsPiggyback(); + } +} + +} // namespace webrtc diff --git a/p2p/base/p2p_transport_channel.h b/p2p/base/p2p_transport_channel.h index 1e0d1e339a..a42d2916c5 100644 --- a/p2p/base/p2p_transport_channel.h +++ b/p2p/base/p2p_transport_channel.h @@ -23,36 +23,30 @@ #include #include -#include +#include #include #include -#include +#include #include -#include #include -#include "absl/base/attributes.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/async_dns_resolver.h" -#include "api/async_resolver_factory.h" #include "api/candidate.h" #include "api/ice_transport_interface.h" #include "api/rtc_error.h" #include "api/sequence_checker.h" -#include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/enums.h" #include "api/transport/stun.h" #include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" #include "logging/rtc_event_log/ice_logger.h" #include "p2p/base/active_ice_controller_factory_interface.h" -#include "p2p/base/basic_async_resolver_factory.h" +#include "p2p/base/active_ice_controller_interface.h" #include "p2p/base/candidate_pair_interface.h" #include "p2p/base/connection.h" #include "p2p/base/ice_agent_interface.h" #include "p2p/base/ice_controller_factory_interface.h" -#include "p2p/base/ice_controller_interface.h" #include "p2p/base/ice_switch_reason.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" @@ -63,9 +57,11 @@ #include "p2p/base/regathering_controller.h" #include "p2p/base/stun_dictionary.h" #include "p2p/base/transport_description.h" +#include "p2p/dtls/dtls_stun_piggyback_callbacks.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/checks.h" #include "rtc_base/dscp.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" #include "rtc_base/socket.h" @@ -79,13 +75,7 @@ namespace webrtc { class RtcEventLog; } // namespace webrtc -namespace cricket { - -// Enum for UMA metrics, used to record whether the channel is -// connected/connecting/disconnected when ICE restart happens. -enum class IceRestartState { CONNECTING, CONNECTED, DISCONNECTED, MAX_VALUE }; - -static const int MIN_PINGS_AT_WEAK_PING_INTERVAL = 3; +namespace webrtc { bool IceCredentialsChanged(absl::string_view old_ufrag, absl::string_view old_pwd, @@ -112,14 +102,14 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, static std::unique_ptr Create( absl::string_view transport_name, int component, - webrtc::IceTransportInit init); + IceTransportInit init); // For testing only. // TODO(zstein): Remove once AsyncDnsResolverFactory is required. P2PTransportChannel(absl::string_view transport_name, int component, PortAllocator* allocator, - const webrtc::FieldTrialsView* field_trials = nullptr); + const FieldTrialsView* field_trials = nullptr); ~P2PTransportChannel() override; @@ -127,8 +117,8 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, P2PTransportChannel& operator=(const P2PTransportChannel&) = delete; // From TransportChannelImpl: - IceTransportState GetState() const override; - webrtc::IceTransportState GetIceTransportState() const override; + IceTransportStateInternal GetState() const override; + IceTransportState GetIceTransportState() const override; const std::string& transport_name() const override; int component() const override; @@ -136,7 +126,6 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, bool receiving() const override; void SetIceRole(IceRole role) override; IceRole GetIceRole() const override; - void SetIceTiebreaker(uint64_t tiebreaker) override; void SetIceParameters(const IceParameters& ice_params) override; void SetRemoteIceParameters(const IceParameters& ice_params) override; void SetRemoteIceMode(IceMode mode) override; @@ -153,23 +142,22 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, // only update the parameter if it is considered set in `config`. For example, // a negative value of receiving_timeout will be considered "not set" and we // will not use it to update the respective parameter in `config_`. - // TODO(deadbeef): Use absl::optional instead of negative values. + // TODO(deadbeef): Use std::optional instead of negative values. void SetIceConfig(const IceConfig& config) override; - const IceConfig& config() const; - static webrtc::RTCError ValidateIceConfig(const IceConfig& config); + const IceConfig& config() const override; // From TransportChannel: int SendPacket(const char* data, size_t len, - const rtc::PacketOptions& options, + const AsyncSocketPacketOptions& options, int flags) override; - int SetOption(rtc::Socket::Option opt, int value) override; - bool GetOption(rtc::Socket::Option opt, int* value) override; + int SetOption(Socket::Option opt, int value) override; + bool GetOption(Socket::Option opt, int* value) override; int GetError() override; bool GetStats(IceTransportStats* ice_transport_stats) override; - absl::optional GetRttEstimate() override; + std::optional GetRttEstimate() override; const Connection* selected_connection() const override; - absl::optional GetSelectedCandidatePair() const override; + std::optional GetSelectedCandidatePair() const override; // From IceAgentInterface void OnStartedPinging() override; @@ -180,9 +168,9 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, void SwitchSelectedConnection(const Connection* connection, IceSwitchReason reason) override; void ForgetLearnedStateForConnections( - rtc::ArrayView connections) override; + ArrayView connections) override; bool PruneConnections( - rtc::ArrayView connections) override; + ArrayView connections) override; // TODO(honghaiz): Remove this method once the reference of it in // Chromoting is removed. @@ -214,19 +202,19 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, void PruneAllPorts(); int check_receiving_interval() const; - absl::optional network_route() const override; + std::optional network_route() const override; void RemoveConnection(Connection* connection); // Helper method used only in unittest. - rtc::DiffServCodePoint DefaultDscpValue() const; + DiffServCodePoint DefaultDscpValue() const; // Public for unit tests. Connection* FindNextPingableConnection(); void MarkConnectionPinged(Connection* conn); // Public for unit tests. - rtc::ArrayView connections() const; + ArrayView connections() const; void RemoveConnectionForTest(Connection* connection); // Public for unit tests. @@ -248,32 +236,51 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, RTC_DCHECK_RUN_ON(network_thread_); const std::string RECEIVING_ABBREV[2] = {"_", "R"}; const std::string WRITABLE_ABBREV[2] = {"_", "W"}; - rtc::StringBuilder ss; + StringBuilder ss; ss << "Channel[" << transport_name_ << "|" << component_ << "|" << RECEIVING_ABBREV[receiving_] << WRITABLE_ABBREV[writable_] << "]"; return ss.Release(); } - absl::optional> + std::optional> GetDictionaryWriter() override { return stun_dict_writer_; } + const FieldTrialsView* field_trials() const override { return field_trials_; } + + void ResetDtlsStunPiggybackCallbacks() override; + void SetDtlsStunPiggybackCallbacks( + DtlsStunPiggybackCallbacks&& callbacks) override; + + // Returns the local ICE parameters. + const IceParameters* local_ice_parameters() const override { + RTC_DCHECK_RUN_ON(network_thread_); + return &ice_parameters_; + } + // Returns the latest remote ICE parameters or nullptr if there are no remote + // ICE parameters yet. + const IceParameters* remote_ice_parameters() const override { + RTC_DCHECK_RUN_ON(network_thread_); + return remote_ice_parameters_.empty() ? nullptr + : &remote_ice_parameters_.back(); + } + private: P2PTransportChannel( absl::string_view transport_name, int component, PortAllocator* allocator, // DNS resolver factory - webrtc::AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, + AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, // If the P2PTransportChannel has to delete the DNS resolver factory // on release, this pointer is set. - std::unique_ptr + std::unique_ptr owned_dns_resolver_factory, - webrtc::RtcEventLog* event_log, + RtcEventLog* event_log, IceControllerFactoryInterface* ice_controller_factory, ActiveIceControllerFactoryInterface* active_ice_controller_factory, - const webrtc::FieldTrialsView* field_trials); + const FieldTrialsView* field_trials); bool IsGettingPorts() { RTC_DCHECK_RUN_ON(network_thread_); @@ -285,7 +292,7 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, bool PresumedWritable(const Connection* conn) const; void SendPingRequestInternal(Connection* connection); - rtc::NetworkRoute ConfigureNetworkRoute(const Connection* conn); + NetworkRoute ConfigureNetworkRoute(const Connection* conn); void SwitchSelectedConnectionInternal(Connection* conn, IceSwitchReason reason); void UpdateTransportState(); @@ -297,8 +304,8 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, // https://w3c.github.io/webrtc-pc/#dom-rtcicetransportstate. ComputeState // computes the value we currently export as RTCIceTransportState. // TODO(bugs.webrtc.org/9308): Remove ComputeState once it's no longer used. - IceTransportState ComputeState() const; - webrtc::IceTransportState ComputeIceTransportState() const; + IceTransportStateInternal ComputeState() const; + IceTransportState ComputeIceTransportState() const; bool CreateConnections(const Candidate& remote_candidate, PortInterface* origin_port); @@ -326,7 +333,7 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, const std::vector& candidates); void OnCandidatesAllocationDone(PortAllocatorSession* session); void OnUnknownAddress(PortInterface* port, - const rtc::SocketAddress& addr, + const SocketAddress& addr, ProtocolType proto, IceMessage* stun_msg, const std::string& remote_username, @@ -342,35 +349,25 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, void OnRoleConflict(PortInterface* port); void OnConnectionStateChange(Connection* connection); - void OnReadPacket(Connection* connection, - const char* data, - size_t len, - int64_t packet_time_us); - void OnSentPacket(const rtc::SentPacket& sent_packet); + void OnReadPacket(Connection* connection, const ReceivedIpPacket& packet); + void OnSentPacket(const SentPacketInfo& sent_packet); void OnReadyToSend(Connection* connection); void OnConnectionDestroyed(Connection* connection); void OnNominated(Connection* conn); void LogCandidatePairConfig(Connection* conn, - webrtc::IceCandidatePairConfigType type); + IceCandidatePairConfigType type); uint32_t GetNominationAttr(Connection* conn) const; bool GetUseCandidateAttr(Connection* conn) const; bool AllowedToPruneConnections() const; - // Returns the latest remote ICE parameters or nullptr if there are no remote - // ICE parameters yet. - IceParameters* remote_ice() { - RTC_DCHECK_RUN_ON(network_thread_); - return remote_ice_parameters_.empty() ? nullptr - : &remote_ice_parameters_.back(); - } // Returns the remote IceParameters and generation that match `ufrag` // if found, and returns nullptr otherwise. const IceParameters* FindRemoteIceFromUfrag(absl::string_view ufrag, - uint32_t* generation); + uint32_t* generation) const; // Returns the index of the latest remote ICE parameters, or 0 if no remote // ICE parameters have been received. uint32_t remote_ice_generation() { @@ -381,7 +378,7 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, } // Indicates if the given local port has been pruned. - bool IsPortPruned(const Port* port) const; + bool IsPortPruned(const PortInterface* port) const; // Indicates if the given remote candidate has been pruned. bool IsRemoteCandidatePruned(const Candidate& cand) const; @@ -413,16 +410,16 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, int64_t ComputeEstimatedDisconnectedTimeMs(int64_t now, Connection* old_connection); - void ParseFieldTrials(const webrtc::FieldTrialsView* field_trials); + void ParseFieldTrials(const FieldTrialsView* field_trials); std::string transport_name_ RTC_GUARDED_BY(network_thread_); int component_ RTC_GUARDED_BY(network_thread_); PortAllocator* allocator_ RTC_GUARDED_BY(network_thread_); - webrtc::AsyncDnsResolverFactoryInterface* const async_dns_resolver_factory_ + AsyncDnsResolverFactoryInterface* const async_dns_resolver_factory_ RTC_GUARDED_BY(network_thread_); - const std::unique_ptr + const std::unique_ptr owned_dns_resolver_factory_; - rtc::Thread* const network_thread_; + Thread* const network_thread_; bool incoming_only_ RTC_GUARDED_BY(network_thread_); int error_ RTC_GUARDED_BY(network_thread_); std::vector> allocator_sessions_ @@ -443,25 +440,24 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, RTC_GUARDED_BY(network_thread_); bool had_connection_ RTC_GUARDED_BY(network_thread_) = false; // if connections_ has ever been nonempty - typedef std::map OptionMap; + typedef std::map OptionMap; OptionMap options_ RTC_GUARDED_BY(network_thread_); IceParameters ice_parameters_ RTC_GUARDED_BY(network_thread_); std::vector remote_ice_parameters_ RTC_GUARDED_BY(network_thread_); IceMode remote_ice_mode_ RTC_GUARDED_BY(network_thread_); IceRole ice_role_ RTC_GUARDED_BY(network_thread_); - uint64_t tiebreaker_ RTC_GUARDED_BY(network_thread_); IceGatheringState gathering_state_ RTC_GUARDED_BY(network_thread_); - std::unique_ptr regathering_controller_ + std::unique_ptr regathering_controller_ RTC_GUARDED_BY(network_thread_); int64_t last_ping_sent_ms_ RTC_GUARDED_BY(network_thread_) = 0; int weak_ping_interval_ RTC_GUARDED_BY(network_thread_) = WEAK_PING_INTERVAL; // TODO(jonasolsson): Remove state_ and rename standardized_state_ once state_ // is no longer used to compute the ICE connection state. - IceTransportState state_ RTC_GUARDED_BY(network_thread_) = - IceTransportState::STATE_INIT; - webrtc::IceTransportState standardized_state_ - RTC_GUARDED_BY(network_thread_) = webrtc::IceTransportState::kNew; + IceTransportStateInternal state_ RTC_GUARDED_BY(network_thread_) = + IceTransportStateInternal::STATE_INIT; + IceTransportState standardized_state_ RTC_GUARDED_BY(network_thread_) = + IceTransportState::kNew; IceConfig config_ RTC_GUARDED_BY(network_thread_); int last_sent_packet_id_ RTC_GUARDED_BY(network_thread_) = -1; // -1 indicates no packet was sent before. @@ -473,35 +469,32 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, bool has_been_writable_ RTC_GUARDED_BY(network_thread_) = false; // if writable_ has ever been true - absl::optional network_route_ - RTC_GUARDED_BY(network_thread_); - webrtc::IceEventLog ice_event_log_ RTC_GUARDED_BY(network_thread_); + std::optional network_route_ RTC_GUARDED_BY(network_thread_); + IceEventLog ice_event_log_ RTC_GUARDED_BY(network_thread_); std::unique_ptr ice_controller_ RTC_GUARDED_BY(network_thread_); struct CandidateAndResolver final { - CandidateAndResolver( - const Candidate& candidate, - std::unique_ptr&& resolver); + CandidateAndResolver(const Candidate& candidate, + std::unique_ptr&& resolver); ~CandidateAndResolver(); // Moveable, but not copyable. CandidateAndResolver(CandidateAndResolver&&) = default; CandidateAndResolver& operator=(CandidateAndResolver&&) = default; Candidate candidate_; - std::unique_ptr resolver_; + std::unique_ptr resolver_; }; std::vector resolvers_ RTC_GUARDED_BY(network_thread_); void FinishAddingRemoteCandidate(const Candidate& new_remote_candidate); - void OnCandidateResolved(webrtc::AsyncDnsResolverInterface* resolver); - void AddRemoteCandidateWithResult( - Candidate candidate, - const webrtc::AsyncDnsResolverResult& result); + void OnCandidateResolved(AsyncDnsResolverInterface* resolver); + void AddRemoteCandidateWithResult(Candidate candidate, + const AsyncDnsResolverResult& result); std::unique_ptr GoogDeltaReceived( const StunByteStringAttribute*); - void GoogDeltaAckReceived(webrtc::RTCErrorOr); + void GoogDeltaAckReceived(RTCErrorOr); // Bytes/packets sent/received on this channel. uint64_t bytes_sent_ = 0; @@ -513,19 +506,34 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal, uint32_t selected_candidate_pair_changes_ = 0; // When was last data received on a existing connection, - // from connection->last_data_received() that uses rtc::TimeMillis(). + // from connection->last_data_received() that uses webrtc::TimeMillis(). int64_t last_data_received_ms_ = 0; // Parsed field trials. IceFieldTrials ice_field_trials_; + // Unparsed field trials. + const FieldTrialsView* field_trials_; // A dictionary of attributes that will be reflected to peer. StunDictionaryWriter stun_dict_writer_; // A dictionary that tracks attributes from peer. StunDictionaryView stun_dict_view_; + + // DTLS-STUN piggybacking callbacks. + DtlsStunPiggybackCallbacks dtls_stun_piggyback_callbacks_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::IceCredentialsChanged; +using ::webrtc::P2PTransportChannel; +using ::webrtc::RemoteCandidate; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_P2P_TRANSPORT_CHANNEL_H_ diff --git a/p2p/base/p2p_transport_channel_ice_field_trials.h b/p2p/base/p2p_transport_channel_ice_field_trials.h index 96a7756484..a176fad401 100644 --- a/p2p/base/p2p_transport_channel_ice_field_trials.h +++ b/p2p/base/p2p_transport_channel_ice_field_trials.h @@ -11,9 +11,9 @@ #ifndef P2P_BASE_P2P_TRANSPORT_CHANNEL_ICE_FIELD_TRIALS_H_ #define P2P_BASE_P2P_TRANSPORT_CHANNEL_ICE_FIELD_TRIALS_H_ -#include "absl/types/optional.h" +#include -namespace cricket { +namespace webrtc { // Field trials for P2PTransportChannel and friends, // put in separate file so that they can be shared e.g @@ -23,17 +23,17 @@ struct IceFieldTrials { // TODO(jonaso) : Consider how members of this struct can be made const. bool skip_relay_to_non_relay_connections = false; - absl::optional max_outstanding_pings; + std::optional max_outstanding_pings; // Wait X ms before selecting a connection when having none. // This will make media slower, but will give us chance to find // a better connection before starting. - absl::optional initial_select_dampening; + std::optional initial_select_dampening; // If the connection has recevied a ping-request, delay by // maximum this delay. This will make media slower, but will // give us chance to find a better connection before starting. - absl::optional initial_select_dampening_ping_received; + std::optional initial_select_dampening_ping_received; // Announce GOOG_PING support in STUN_BINDING_RESPONSE if requested // by peer. @@ -66,7 +66,7 @@ struct IceFieldTrials { bool stop_gather_on_strongly_connected = true; // DSCP taging. - absl::optional override_dscp; + std::optional override_dscp; bool piggyback_ice_check_acknowledgement = false; bool extra_ice_ping = false; @@ -76,6 +76,14 @@ struct IceFieldTrials { bool answer_goog_delta = true; // answer GOOG DELTA }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::IceFieldTrials; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_P2P_TRANSPORT_CHANNEL_ICE_FIELD_TRIALS_H_ diff --git a/p2p/base/p2p_transport_channel_unittest.cc b/p2p/base/p2p_transport_channel_unittest.cc index e414e3f558..b042942f92 100644 --- a/p2p/base/p2p_transport_channel_unittest.cc +++ b/p2p/base/p2p_transport_channel_unittest.cc @@ -10,28 +10,63 @@ #include "p2p/base/p2p_transport_channel.h" +#include +#include +#include #include +#include #include +#include #include -#include #include +#include #include "absl/algorithm/container.h" +#include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/async_dns_resolver.h" +#include "api/candidate.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/field_trials.h" +#include "api/ice_transport_interface.h" +#include "api/packet_socket_factory.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/test/mock_async_dns_resolver.h" -#include "p2p/base/active_ice_controller_factory_interface.h" -#include "p2p/base/active_ice_controller_interface.h" +#include "api/test/rtc_error_matchers.h" +#include "api/transport/enums.h" +#include "api/transport/stun.h" +#include "api/units/time_delta.h" #include "p2p/base/basic_ice_controller.h" +#include "p2p/base/basic_packet_socket_factory.h" +#include "p2p/base/candidate_pair_interface.h" #include "p2p/base/connection.h" -#include "p2p/base/fake_port_allocator.h" +#include "p2p/base/connection_info.h" +#include "p2p/base/ice_controller_factory_interface.h" +#include "p2p/base/ice_controller_interface.h" +#include "p2p/base/ice_switch_reason.h" #include "p2p/base/ice_transport_internal.h" -#include "p2p/base/mock_active_ice_controller.h" -#include "p2p/base/mock_async_resolver.h" -#include "p2p/base/mock_ice_controller.h" +#include "p2p/base/p2p_constants.h" #include "p2p/base/packet_transport_internal.h" -#include "p2p/base/test_stun_server.h" -#include "p2p/base/test_turn_server.h" +#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/port_interface.h" +#include "p2p/base/stun_dictionary.h" +#include "p2p/base/transport_description.h" #include "p2p/client/basic_port_allocator.h" +#include "p2p/dtls/dtls_stun_piggyback_callbacks.h" +#include "p2p/test/fake_port_allocator.h" +#include "p2p/test/mock_active_ice_controller.h" +#include "p2p/test/mock_ice_controller.h" +#include "p2p/test/nat_socket_factory.h" +#include "p2p/test/nat_types.h" +#include "p2p/test/stun_server.h" +#include "p2p/test/test_stun_server.h" +#include "p2p/test/test_turn_server.h" +#include "rtc_base/buffer.h" +#include "rtc_base/byte_buffer.h" #include "rtc_base/checks.h" #include "rtc_base/dscp.h" #include "rtc_base/fake_clock.h" @@ -39,41 +74,54 @@ #include "rtc_base/fake_network.h" #include "rtc_base/firewall_socket_server.h" #include "rtc_base/gunit.h" -#include "rtc_base/helpers.h" #include "rtc_base/internal/default_socket_server.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/mdns_responder_interface.h" -#include "rtc_base/nat_server.h" -#include "rtc_base/nat_socket_factory.h" -#include "rtc_base/proxy_server.h" +#include "rtc_base/net_helper.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/network.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/network_route.h" +#include "rtc_base/socket.h" #include "rtc_base/socket_address.h" -#include "rtc_base/ssl_adapter.h" -#include "rtc_base/strings/string_builder.h" +#include "rtc_base/socket_server.h" +#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" +#include "rtc_base/time_utils.h" #include "rtc_base/virtual_socket_server.h" #include "system_wrappers/include/metrics.h" -#include "test/scoped_key_value_config.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/wait_until.h" namespace { -using rtc::SocketAddress; using ::testing::_; using ::testing::Assign; -using ::testing::Combine; using ::testing::Contains; using ::testing::DoAll; -using ::testing::InSequence; -using ::testing::InvokeWithoutArgs; +using ::testing::Eq; +using ::testing::Gt; +using ::testing::IsFalse; +using ::testing::IsTrue; using ::testing::MockFunction; +using ::testing::Ne; using ::testing::Return; using ::testing::ReturnRef; -using ::testing::SaveArg; using ::testing::SetArgPointee; using ::testing::SizeIs; using ::testing::Values; using ::testing::WithParamInterface; +using ::webrtc::CreateEnvironment; +using ::webrtc::Environment; +using ::webrtc::FieldTrials; +using ::webrtc::IceCandidateType; using ::webrtc::PendingTaskSafetyFlag; using ::webrtc::SafeTask; +using ::webrtc::SocketAddress; // Default timeout for tests in this file. // Should be large enough for slow buildbots to run the tests reliably. @@ -81,9 +129,9 @@ static const int kDefaultTimeout = 10000; static const int kMediumTimeout = 3000; static const int kShortTimeout = 1000; -static const int kOnlyLocalPorts = cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_RELAY | - cricket::PORTALLOCATOR_DISABLE_TCP; +static const int kOnlyLocalPorts = webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_DISABLE_TCP; static const int LOW_RTT = 20; // Addresses on the public internet. static const SocketAddress kPublicAddrs[2] = {SocketAddress("11.11.11.11", 0), @@ -98,12 +146,6 @@ static const SocketAddress kAlternateAddrs[2] = { static const SocketAddress kIPv6AlternateAddrs[2] = { SocketAddress("2401:4030:1:2c00:be30:abcd:efab:cdef", 0), SocketAddress("2601:0:1000:1b03:2e41:38ff:fea6:f2a4", 0)}; -// Addresses for HTTP proxy servers. -static const SocketAddress kHttpsProxyAddrs[2] = { - SocketAddress("11.11.11.1", 443), SocketAddress("22.22.22.1", 443)}; -// Addresses for SOCKS proxy servers. -static const SocketAddress kSocksProxyAddrs[2] = { - SocketAddress("11.11.11.1", 1080), SocketAddress("22.22.22.1", 1080)}; // Internal addresses for NAT boxes. static const SocketAddress kNatAddrs[2] = {SocketAddress("192.168.1.1", 0), SocketAddress("192.168.2.1", 0)}; @@ -117,14 +159,14 @@ static const SocketAddress kCascadedNatAddrs[2] = { static const SocketAddress kCascadedPrivateAddrs[2] = { SocketAddress("192.168.10.11", 0), SocketAddress("192.168.20.22", 0)}; // The address of the public STUN server. -static const SocketAddress kStunAddr("99.99.99.1", cricket::STUN_SERVER_PORT); +static const SocketAddress kStunAddr("99.99.99.1", webrtc::STUN_SERVER_PORT); // The addresses for the public turn server. static const SocketAddress kTurnUdpIntAddr("99.99.99.3", - cricket::STUN_SERVER_PORT); + webrtc::STUN_SERVER_PORT); static const SocketAddress kTurnTcpIntAddr("99.99.99.4", - cricket::STUN_SERVER_PORT + 1); + webrtc::STUN_SERVER_PORT + 1); static const SocketAddress kTurnUdpExtAddr("99.99.99.5", 0); -static const cricket::RelayCredentials kRelayCredentials("test", "test"); +static const webrtc::RelayCredentials kRelayCredentials("test", "test"); // Based on ICE_UFRAG_LENGTH const char* kIceUfrag[4] = {"UF00", "UF01", "UF02", "UF03"}; @@ -132,66 +174,61 @@ const char* kIceUfrag[4] = {"UF00", "UF01", "UF02", "UF03"}; const char* kIcePwd[4] = { "TESTICEPWD00000000000000", "TESTICEPWD00000000000001", "TESTICEPWD00000000000002", "TESTICEPWD00000000000003"}; -const cricket::IceParameters kIceParams[4] = { - {kIceUfrag[0], kIcePwd[0], false}, - {kIceUfrag[1], kIcePwd[1], false}, - {kIceUfrag[2], kIcePwd[2], false}, - {kIceUfrag[3], kIcePwd[3], false}}; +const webrtc::IceParameters kIceParams[4] = {{kIceUfrag[0], kIcePwd[0], false}, + {kIceUfrag[1], kIcePwd[1], false}, + {kIceUfrag[2], kIcePwd[2], false}, + {kIceUfrag[3], kIcePwd[3], false}}; -const uint64_t kLowTiebreaker = 11111; -const uint64_t kHighTiebreaker = 22222; -const uint64_t kTiebreakerDefault = 44444; - -cricket::IceConfig CreateIceConfig( +webrtc::IceConfig CreateIceConfig( int receiving_timeout, - cricket::ContinualGatheringPolicy continual_gathering_policy, - absl::optional backup_ping_interval = absl::nullopt) { - cricket::IceConfig config; + webrtc::ContinualGatheringPolicy continual_gathering_policy, + std::optional backup_ping_interval = std::nullopt) { + webrtc::IceConfig config; config.receiving_timeout = receiving_timeout; config.continual_gathering_policy = continual_gathering_policy; config.backup_connection_ping_interval = backup_ping_interval; return config; } -cricket::Candidate CreateUdpCandidate(absl::string_view type, - absl::string_view ip, - int port, - int priority, - absl::string_view ufrag = "") { - cricket::Candidate c; - c.set_address(rtc::SocketAddress(ip, port)); - c.set_component(cricket::ICE_CANDIDATE_COMPONENT_DEFAULT); - c.set_protocol(cricket::UDP_PROTOCOL_NAME); +webrtc::Candidate CreateUdpCandidate(IceCandidateType type, + absl::string_view ip, + int port, + int priority, + absl::string_view ufrag = "") { + webrtc::Candidate c; + c.set_address(webrtc::SocketAddress(ip, port)); + c.set_component(webrtc::ICE_CANDIDATE_COMPONENT_DEFAULT); + c.set_protocol(webrtc::UDP_PROTOCOL_NAME); c.set_priority(priority); c.set_username(ufrag); c.set_type(type); return c; } -cricket::BasicPortAllocator* CreateBasicPortAllocator( - rtc::NetworkManager* network_manager, - rtc::PacketSocketFactory* socket_factory, - const cricket::ServerAddresses& stun_servers, - const rtc::SocketAddress& turn_server_udp, - const rtc::SocketAddress& turn_server_tcp) { - cricket::RelayServerConfig turn_server; +std::unique_ptr CreateBasicPortAllocator( + const Environment& env, + webrtc::NetworkManager* network_manager, + webrtc::PacketSocketFactory* socket_factory, + const webrtc::ServerAddresses& stun_servers, + const webrtc::SocketAddress& turn_server_udp, + const webrtc::SocketAddress& turn_server_tcp) { + webrtc::RelayServerConfig turn_server; turn_server.credentials = kRelayCredentials; if (!turn_server_udp.IsNil()) { turn_server.ports.push_back( - cricket::ProtocolAddress(turn_server_udp, cricket::PROTO_UDP)); + webrtc::ProtocolAddress(turn_server_udp, webrtc::PROTO_UDP)); } if (!turn_server_tcp.IsNil()) { turn_server.ports.push_back( - cricket::ProtocolAddress(turn_server_tcp, cricket::PROTO_TCP)); + webrtc::ProtocolAddress(turn_server_tcp, webrtc::PROTO_TCP)); } - std::vector turn_servers(1, turn_server); + std::vector turn_servers(1, turn_server); - std::unique_ptr allocator = - std::make_unique(network_manager, - socket_factory); + auto allocator = std::make_unique( + env, network_manager, socket_factory); allocator->Initialize(); allocator->SetConfiguration(stun_servers, turn_servers, 0, webrtc::NO_PRUNE); - return allocator.release(); + return allocator; } // An one-shot resolver factory with default return arguments. @@ -202,7 +239,7 @@ class ResolverFactoryFixture : public webrtc::MockAsyncDnsResolverFactory { mock_async_dns_resolver_ = std::make_unique(); EXPECT_CALL(*mock_async_dns_resolver_, Start(_, _)) .WillRepeatedly( - [](const rtc::SocketAddress& addr, + [](const webrtc::SocketAddress& /* addr */, absl::AnyInvocable callback) { callback(); }); EXPECT_CALL(*mock_async_dns_resolver_, result()) .WillOnce(ReturnRef(mock_async_dns_resolver_result_)); @@ -219,7 +256,7 @@ class ResolverFactoryFixture : public webrtc::MockAsyncDnsResolverFactory { }); } - void SetAddressToReturn(rtc::SocketAddress address_to_return) { + void SetAddressToReturn(webrtc::SocketAddress address_to_return) { EXPECT_CALL(mock_async_dns_resolver_result_, GetResolvedAddress(_, _)) .WillOnce(DoAll(SetArgPointee<1>(address_to_return), Return(true))); } @@ -227,7 +264,7 @@ class ResolverFactoryFixture : public webrtc::MockAsyncDnsResolverFactory { // This function must be called before Create(). ASSERT_TRUE(!!mock_async_dns_resolver_); EXPECT_CALL(*mock_async_dns_resolver_, Start(_, _)) - .WillOnce([this](const rtc::SocketAddress& addr, + .WillOnce([this](const webrtc::SocketAddress& addr, absl::AnyInvocable callback) { saved_callback_ = std::move(callback); }); @@ -244,19 +281,20 @@ class ResolverFactoryFixture : public webrtc::MockAsyncDnsResolverFactory { absl::AnyInvocable saved_callback_; }; -bool HasLocalAddress(const cricket::CandidatePairInterface* pair, +bool HasLocalAddress(const webrtc::CandidatePairInterface* pair, const SocketAddress& address) { return pair->local_candidate().address().EqualIPs(address); } -bool HasRemoteAddress(const cricket::CandidatePairInterface* pair, +bool HasRemoteAddress(const webrtc::CandidatePairInterface* pair, const SocketAddress& address) { return pair->remote_candidate().address().EqualIPs(address); } } // namespace -namespace cricket { +namespace webrtc { +using ::testing::NotNull; // This test simulates 2 P2P endpoints that want to establish connectivity // with each other over various network topologies and conditions, which can be @@ -276,45 +314,30 @@ namespace cricket { class P2PTransportChannelTestBase : public ::testing::Test, public sigslot::has_slots<> { public: - explicit P2PTransportChannelTestBase(absl::string_view field_trials) - : field_trials_(field_trials), - vss_(new rtc::VirtualSocketServer()), - nss_(new rtc::NATSocketServer(vss_.get())), - ss_(new rtc::FirewallSocketServer(nss_.get())), - socket_factory_(new rtc::BasicPacketSocketFactory(ss_.get())), + P2PTransportChannelTestBase() + : vss_(new VirtualSocketServer()), + nss_(new NATSocketServer(vss_.get())), + ss_(new FirewallSocketServer(nss_.get())), + socket_factory_(new BasicPacketSocketFactory(ss_.get())), main_(ss_.get()), - stun_server_(TestStunServer::Create(ss_.get(), kStunAddr)), + stun_server_(TestStunServer::Create(ss_.get(), kStunAddr, main_)), turn_server_(&main_, ss_.get(), kTurnUdpIntAddr, kTurnUdpExtAddr), - socks_server1_(ss_.get(), - kSocksProxyAddrs[0], - ss_.get(), - kSocksProxyAddrs[0]), - socks_server2_(ss_.get(), - kSocksProxyAddrs[1], - ss_.get(), - kSocksProxyAddrs[1]), force_relay_(false) { - ep1_.role_ = ICEROLE_CONTROLLING; - ep2_.role_ = ICEROLE_CONTROLLED; - - ServerAddresses stun_servers; - stun_servers.insert(kStunAddr); - ep1_.allocator_.reset(CreateBasicPortAllocator( - &ep1_.network_manager_, socket_factory_.get(), stun_servers, - kTurnUdpIntAddr, rtc::SocketAddress())); - ep2_.allocator_.reset(CreateBasicPortAllocator( - &ep2_.network_manager_, socket_factory_.get(), stun_servers, - kTurnUdpIntAddr, rtc::SocketAddress())); - - ep1_.SetIceTiebreaker(kTiebreakerDefault); - ep1_.allocator_->SetIceTiebreaker(kTiebreakerDefault); - ep2_.SetIceTiebreaker(kTiebreakerDefault); - ep2_.allocator_->SetIceTiebreaker(kTiebreakerDefault); + ep1_.role_ = webrtc::ICEROLE_CONTROLLING; + ep2_.role_ = webrtc::ICEROLE_CONTROLLED; + webrtc::metrics::Reset(); } - P2PTransportChannelTestBase() - : P2PTransportChannelTestBase(absl::string_view()) {} + void CreatePortAllocators(const Environment& env) { + ServerAddresses stun_servers = {kStunAddr}; + ep1_.allocator_ = CreateBasicPortAllocator( + env, &ep1_.network_manager_, socket_factory_.get(), stun_servers, + kTurnUdpIntAddr, SocketAddress()); + ep2_.allocator_ = CreateBasicPortAllocator( + env, &ep2_.network_manager_, socket_factory_.get(), stun_servers, + kTurnUdpIntAddr, SocketAddress()); + } protected: enum Config { @@ -328,15 +351,13 @@ class P2PTransportChannelTestBase : public ::testing::Test, BLOCK_UDP, // Firewall, UDP in/out blocked BLOCK_UDP_AND_INCOMING_TCP, // Firewall, UDP in/out and TCP in blocked BLOCK_ALL_BUT_OUTGOING_HTTP, // Firewall, only TCP out on 80/443 - PROXY_HTTPS, // All traffic through HTTPS proxy - PROXY_SOCKS, // All traffic through SOCKS proxy NUM_CONFIGS }; struct Result { - Result(absl::string_view controlling_type, + Result(IceCandidateType controlling_type, absl::string_view controlling_protocol, - absl::string_view controlled_type, + IceCandidateType controlled_type, absl::string_view controlled_protocol, int wait) : controlling_type(controlling_type), @@ -346,10 +367,10 @@ class P2PTransportChannelTestBase : public ::testing::Test, connect_wait(wait) {} // The expected candidate type and protocol of the controlling ICE agent. - std::string controlling_type; + IceCandidateType controlling_type; std::string controlling_protocol; // The expected candidate type and protocol of the controlled ICE agent. - std::string controlled_type; + IceCandidateType controlled_type; std::string controlled_protocol; // How long to wait before the correct candidate pair is selected. int connect_wait; @@ -378,14 +399,14 @@ class P2PTransportChannelTestBase : public ::testing::Test, struct Endpoint : public sigslot::has_slots<> { Endpoint() - : role_(ICEROLE_UNKNOWN), + : role_(webrtc::ICEROLE_UNKNOWN), tiebreaker_(0), role_conflict_(false), save_candidates_(false) {} - bool HasTransport(const rtc::PacketTransportInternal* transport) { + bool HasTransport(const PacketTransportInternal* transport) { return (transport == cd1_.ch_.get() || transport == cd2_.ch_.get()); } - ChannelData* GetChannelData(rtc::PacketTransportInternal* transport) { + ChannelData* GetChannelData(PacketTransportInternal* transport) { if (!HasTransport(transport)) return NULL; if (cd1_.ch_.get() == transport) @@ -396,8 +417,6 @@ class P2PTransportChannelTestBase : public ::testing::Test, void SetIceRole(IceRole role) { role_ = role; } IceRole ice_role() { return role_; } - void SetIceTiebreaker(uint64_t tiebreaker) { tiebreaker_ = tiebreaker; } - uint64_t GetIceTiebreaker() { return tiebreaker_; } void OnRoleConflict(bool role_conflict) { role_conflict_ = role_conflict; } bool role_conflict() { return role_conflict_; } void SetAllocationStepDelay(uint32_t delay) { @@ -415,10 +434,9 @@ class P2PTransportChannelTestBase : public ::testing::Test, return ice_regathering_counter_[reason]; } - rtc::FakeNetworkManager network_manager_; + FakeNetworkManager network_manager_; std::unique_ptr allocator_; - webrtc::AsyncDnsResolverFactoryInterface* async_dns_resolver_factory_ = - nullptr; + AsyncDnsResolverFactoryInterface* async_dns_resolver_factory_ = nullptr; ChannelData cd1_; ChannelData cd2_; IceRole role_; @@ -430,7 +448,7 @@ class P2PTransportChannelTestBase : public ::testing::Test, std::map ice_regathering_counter_; }; - ChannelData* GetChannelData(rtc::PacketTransportInternal* transport) { + ChannelData* GetChannelData(PacketTransportInternal* transport) { if (ep1_.HasTransport(transport)) return ep1_.GetChannelData(transport); else @@ -444,16 +462,17 @@ class P2PTransportChannelTestBase : public ::testing::Test, return new_ice; } - void CreateChannels(const IceConfig& ep1_config, + void CreateChannels(const Environment& env, + const IceConfig& ep1_config, const IceConfig& ep2_config, bool renomination = false) { IceParameters ice_ep1_cd1_ch = IceParamsWithRenomination(kIceParams[0], renomination); IceParameters ice_ep2_cd1_ch = IceParamsWithRenomination(kIceParams[1], renomination); - ep1_.cd1_.ch_ = CreateChannel(0, ICE_CANDIDATE_COMPONENT_DEFAULT, + ep1_.cd1_.ch_ = CreateChannel(env, 0, ICE_CANDIDATE_COMPONENT_DEFAULT, ice_ep1_cd1_ch, ice_ep2_cd1_ch); - ep2_.cd1_.ch_ = CreateChannel(1, ICE_CANDIDATE_COMPONENT_DEFAULT, + ep2_.cd1_.ch_ = CreateChannel(env, 1, ICE_CANDIDATE_COMPONENT_DEFAULT, ice_ep2_cd1_ch, ice_ep1_cd1_ch); ep1_.cd1_.ch_->SetIceConfig(ep1_config); ep2_.cd1_.ch_->SetIceConfig(ep2_config); @@ -465,31 +484,37 @@ class P2PTransportChannelTestBase : public ::testing::Test, &ep2_, &Endpoint::OnIceRegathering); } - void CreateChannels() { + void CreateChannels(const Environment& env) { IceConfig default_config; - CreateChannels(default_config, default_config, false); + CreateChannels(env, default_config, default_config, false); } std::unique_ptr CreateChannel( + const Environment& env, int endpoint, int component, const IceParameters& local_ice, const IceParameters& remote_ice) { - webrtc::IceTransportInit init; + IceTransportInit init; init.set_port_allocator(GetAllocator(endpoint)); init.set_async_dns_resolver_factory( GetEndpoint(endpoint)->async_dns_resolver_factory_); - init.set_field_trials(&field_trials_); + init.set_field_trials(&env.field_trials()); auto channel = P2PTransportChannel::Create("test content name", component, std::move(init)); channel->SignalReadyToSend.connect( this, &P2PTransportChannelTestBase::OnReadyToSend); channel->SignalCandidateGathered.connect( this, &P2PTransportChannelTestBase::OnCandidateGathered); - channel->SignalCandidatesRemoved.connect( - this, &P2PTransportChannelTestBase::OnCandidatesRemoved); - channel->SignalReadPacket.connect( - this, &P2PTransportChannelTestBase::OnReadPacket); + channel->SetCandidatesRemovedCallback( + [this](IceTransportInternal* transport, const Candidates& candidates) { + OnCandidatesRemoved(transport, candidates); + }); + channel->RegisterReceivedPacketCallback( + this, [&](PacketTransportInternal* transport, + const ReceivedIpPacket& packet) { + OnReadPacket(transport, packet); + }); channel->SignalRoleConflict.connect( this, &P2PTransportChannelTestBase::OnRoleConflict); channel->SignalNetworkRouteChanged.connect( @@ -501,7 +526,6 @@ class P2PTransportChannelTestBase : public ::testing::Test, channel->SetRemoteIceParameters(remote_ice); } channel->SetIceRole(GetEndpoint(endpoint)->ice_role()); - channel->SetIceTiebreaker(GetEndpoint(endpoint)->GetIceTiebreaker()); return channel; } @@ -513,7 +537,7 @@ class P2PTransportChannelTestBase : public ::testing::Test, ep2_.cd2_.ch_.reset(); // Process pending tasks that need to run for cleanup purposes such as // pending deletion of Connection objects (see Connection::Destroy). - rtc::Thread::Current()->ProcessMessages(0); + Thread::Current()->ProcessMessages(0); } P2PTransportChannel* ep1_ch1() { return ep1_.cd1_.ch_.get(); } P2PTransportChannel* ep1_ch2() { return ep1_.cd2_.ch_.get(); } @@ -521,7 +545,7 @@ class P2PTransportChannelTestBase : public ::testing::Test, P2PTransportChannel* ep2_ch2() { return ep2_.cd2_.ch_.get(); } TestTurnServer* test_turn_server() { return &turn_server_; } - rtc::VirtualSocketServer* virtual_socket_server() { return vss_.get(); } + VirtualSocketServer* virtual_socket_server() { return vss_.get(); } // Common results. static const Result kLocalUdpToLocalUdp; @@ -539,8 +563,8 @@ class P2PTransportChannelTestBase : public ::testing::Test, static const Result kLocalTcpToPrflxTcp; static const Result kPrflxTcpToLocalTcp; - rtc::NATSocketServer* nat() { return nss_.get(); } - rtc::FirewallSocketServer* fw() { return ss_.get(); } + NATSocketServer* nat() { return nss_.get(); } + FirewallSocketServer* fw() { return ss_.get(); } Endpoint* GetEndpoint(int endpoint) { if (endpoint == 0) { @@ -557,25 +581,18 @@ class P2PTransportChannelTestBase : public ::testing::Test, void AddAddress(int endpoint, const SocketAddress& addr) { GetEndpoint(endpoint)->network_manager_.AddInterface(addr); } - void AddAddress(int endpoint, - const SocketAddress& addr, - absl::string_view ifname, - rtc::AdapterType adapter_type, - absl::optional underlying_vpn_adapter_type = - absl::nullopt) { + void AddAddress( + int endpoint, + const SocketAddress& addr, + absl::string_view ifname, + AdapterType adapter_type, + std::optional underlying_vpn_adapter_type = std::nullopt) { GetEndpoint(endpoint)->network_manager_.AddInterface( addr, ifname, adapter_type, underlying_vpn_adapter_type); } void RemoveAddress(int endpoint, const SocketAddress& addr) { GetEndpoint(endpoint)->network_manager_.RemoveInterface(addr); - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, addr); - } - void SetProxy(int endpoint, rtc::ProxyType type) { - rtc::ProxyInfo info; - info.type = type; - info.address = (type == rtc::PROXY_HTTPS) ? kHttpsProxyAddrs[endpoint] - : kSocksProxyAddrs[endpoint]; - GetAllocator(endpoint)->set_proxy("unittest/1.0", info); + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_ANY, addr); } void SetAllocatorFlags(int endpoint, int flags) { GetAllocator(endpoint)->set_flags(flags); @@ -583,9 +600,6 @@ class P2PTransportChannelTestBase : public ::testing::Test, void SetIceRole(int endpoint, IceRole role) { GetEndpoint(endpoint)->SetIceRole(role); } - void SetIceTiebreaker(int endpoint, uint64_t tiebreaker) { - GetEndpoint(endpoint)->SetIceTiebreaker(tiebreaker); - } bool GetRoleConflict(int endpoint) { return GetEndpoint(endpoint)->role_conflict(); } @@ -596,13 +610,13 @@ class P2PTransportChannelTestBase : public ::testing::Test, return GetEndpoint(endpoint)->SetAllowTcpListen(allow_tcp_listen); } - // Return true if the approprite parts of the expected Result, based + // Return true if the appropriate parts of the expected Result, based // on the local and remote candidate of ep1_ch1, match. This can be // used in an EXPECT_TRUE_WAIT. bool CheckCandidate1(const Result& expected) { - const std::string& local_type = LocalCandidate(ep1_ch1())->type(); + auto local_type = LocalCandidate(ep1_ch1())->type(); const std::string& local_protocol = LocalCandidate(ep1_ch1())->protocol(); - const std::string& remote_type = RemoteCandidate(ep1_ch1())->type(); + auto remote_type = RemoteCandidate(ep1_ch1())->type(); const std::string& remote_protocol = RemoteCandidate(ep1_ch1())->protocol(); return (local_protocol == expected.controlling_protocol && remote_protocol == expected.controlled_protocol && @@ -610,7 +624,7 @@ class P2PTransportChannelTestBase : public ::testing::Test, remote_type == expected.controlled_type); } - // EXPECT_EQ on the approprite parts of the expected Result, based + // EXPECT_EQ on the appropriate parts of the expected Result, based // on the local and remote candidate of ep1_ch1. This is like // CheckCandidate1, except that it will provide more detail about // what didn't match. @@ -619,9 +633,9 @@ class P2PTransportChannelTestBase : public ::testing::Test, return; } - const std::string& local_type = LocalCandidate(ep1_ch1())->type(); + auto local_type = LocalCandidate(ep1_ch1())->type(); const std::string& local_protocol = LocalCandidate(ep1_ch1())->protocol(); - const std::string& remote_type = RemoteCandidate(ep1_ch1())->type(); + auto remote_type = RemoteCandidate(ep1_ch1())->type(); const std::string& remote_protocol = RemoteCandidate(ep1_ch1())->protocol(); EXPECT_EQ(expected.controlling_type, local_type); EXPECT_EQ(expected.controlled_type, remote_type); @@ -629,13 +643,13 @@ class P2PTransportChannelTestBase : public ::testing::Test, EXPECT_EQ(expected.controlled_protocol, remote_protocol); } - // Return true if the approprite parts of the expected Result, based + // Return true if the appropriate parts of the expected Result, based // on the local and remote candidate of ep2_ch1, match. This can be // used in an EXPECT_TRUE_WAIT. bool CheckCandidate2(const Result& expected) { - const std::string& local_type = LocalCandidate(ep2_ch1())->type(); + auto local_type = LocalCandidate(ep2_ch1())->type(); const std::string& local_protocol = LocalCandidate(ep2_ch1())->protocol(); - const std::string& remote_type = RemoteCandidate(ep2_ch1())->type(); + auto remote_type = RemoteCandidate(ep2_ch1())->type(); const std::string& remote_protocol = RemoteCandidate(ep2_ch1())->protocol(); return (local_protocol == expected.controlled_protocol && remote_protocol == expected.controlling_protocol && @@ -643,7 +657,7 @@ class P2PTransportChannelTestBase : public ::testing::Test, remote_type == expected.controlling_type); } - // EXPECT_EQ on the approprite parts of the expected Result, based + // EXPECT_EQ on the appropriate parts of the expected Result, based // on the local and remote candidate of ep2_ch1. This is like // CheckCandidate2, except that it will provide more detail about // what didn't match. @@ -652,9 +666,9 @@ class P2PTransportChannelTestBase : public ::testing::Test, return; } - const std::string& local_type = LocalCandidate(ep2_ch1())->type(); + auto local_type = LocalCandidate(ep2_ch1())->type(); const std::string& local_protocol = LocalCandidate(ep2_ch1())->protocol(); - const std::string& remote_type = RemoteCandidate(ep2_ch1())->type(); + auto remote_type = RemoteCandidate(ep2_ch1())->type(); const std::string& remote_protocol = RemoteCandidate(ep2_ch1())->protocol(); EXPECT_EQ(expected.controlled_type, local_type); EXPECT_EQ(expected.controlling_type, remote_type); @@ -693,16 +707,21 @@ class P2PTransportChannelTestBase : public ::testing::Test, return CheckConnected(ch1, ch2) && CheckCandidatePair(ch1, ch2, from, to); } - virtual void Test(const Result& expected) { - rtc::ScopedFakeClock clock; - int64_t connect_start = rtc::TimeMillis(); + void Test(const Environment& env, const Result& expected) { + ScopedFakeClock clock; + int64_t connect_start = webrtc::TimeMillis(); int64_t connect_time; // Create the channels and wait for them to connect. - CreateChannels(); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - expected.connect_wait + kShortTimeout, clock); - connect_time = rtc::TimeMillis() - connect_start; + CreateChannels(env); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = + TimeDelta::Millis(expected.connect_wait + kShortTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); + connect_time = webrtc::TimeMillis() - connect_start; if (connect_time < expected.connect_wait) { RTC_LOG(LS_INFO) << "Connect time: " << connect_time << " ms"; } else { @@ -713,20 +732,25 @@ class P2PTransportChannelTestBase : public ::testing::Test, // Allow a few turns of the crank for the selected connections to emerge. // This may take up to 2 seconds. if (ep1_ch1()->selected_connection() && ep2_ch1()->selected_connection()) { - int64_t converge_start = rtc::TimeMillis(); + int64_t converge_start = webrtc::TimeMillis(); int64_t converge_time; // Verifying local and remote channel selected connection information. // This is done only for the RFC 5245 as controlled agent will use // USE-CANDIDATE from controlling (ep1) agent. We can easily predict from // EP1 result matrix. - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidate1(expected) && CheckCandidate2(expected), - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidate1(expected) && CheckCandidate2(expected); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Also do EXPECT_EQ on each part so that failures are more verbose. ExpectCandidate1(expected); ExpectCandidate2(expected); - converge_time = rtc::TimeMillis() - converge_start; + converge_time = webrtc::TimeMillis() - converge_start; int64_t converge_wait = 2000; if (converge_time < converge_wait) { RTC_LOG(LS_INFO) << "Converge time: " << converge_time << " ms"; @@ -742,19 +766,33 @@ class P2PTransportChannelTestBase : public ::testing::Test, DestroyChannels(); } - void TestSendRecv(rtc::ThreadProcessingFakeClock* clock) { + void TestSendRecv(ThreadProcessingFakeClock* clock) { for (int i = 0; i < 10; ++i) { const char* data = "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"; int len = static_cast(strlen(data)); // local_channel1 <==> remote_channel1 - EXPECT_EQ_SIMULATED_WAIT(len, SendData(ep1_ch1(), data, len), - kMediumTimeout, *clock); - EXPECT_TRUE_SIMULATED_WAIT(CheckDataOnChannel(ep2_ch1(), data, len), - kMediumTimeout, *clock); - EXPECT_EQ_SIMULATED_WAIT(len, SendData(ep2_ch1(), data, len), - kMediumTimeout, *clock); - EXPECT_TRUE_SIMULATED_WAIT(CheckDataOnChannel(ep1_ch1(), data, len), - kMediumTimeout, *clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return SendData(ep1_ch1(), data, len); }, Eq(len), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &*clock}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckDataOnChannel(ep2_ch1(), data, len); }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &*clock}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return SendData(ep2_ch1(), data, len); }, Eq(len), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &*clock}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckDataOnChannel(ep1_ch1(), data, len); }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &*clock}), + webrtc::IsRtcOk()); } } @@ -764,11 +802,14 @@ class P2PTransportChannelTestBase : public ::testing::Test, // new connection using the newly generated ice candidates. // Before calling this function the end points must be configured. void TestHandleIceUfragPasswordChanged() { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; ep1_ch1()->SetRemoteIceParameters(kIceParams[1]); ep2_ch1()->SetRemoteIceParameters(kIceParams[0]); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); const Candidate* old_local_candidate1 = LocalCandidate(ep1_ch1()); const Candidate* old_local_candidate2 = LocalCandidate(ep2_ch1()); @@ -783,60 +824,47 @@ class P2PTransportChannelTestBase : public ::testing::Test, ep2_ch1()->SetRemoteIceParameters(kIceParams[2]); ep2_ch1()->MaybeStartGathering(); - EXPECT_TRUE_SIMULATED_WAIT(LocalCandidate(ep1_ch1())->generation() != - old_local_candidate1->generation(), - kMediumTimeout, clock); - EXPECT_TRUE_SIMULATED_WAIT(LocalCandidate(ep2_ch1())->generation() != - old_local_candidate2->generation(), - kMediumTimeout, clock); - EXPECT_TRUE_SIMULATED_WAIT(RemoteCandidate(ep1_ch1())->generation() != - old_remote_candidate1->generation(), - kMediumTimeout, clock); - EXPECT_TRUE_SIMULATED_WAIT(RemoteCandidate(ep2_ch1())->generation() != - old_remote_candidate2->generation(), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return LocalCandidate(ep1_ch1())->generation(); }, + Ne(old_local_candidate1->generation()), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return LocalCandidate(ep2_ch1())->generation(); }, + Ne(old_local_candidate2->generation()), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return RemoteCandidate(ep1_ch1())->generation(); }, + Ne(old_remote_candidate1->generation()), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return RemoteCandidate(ep2_ch1())->generation(); }, + Ne(old_remote_candidate2->generation()), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); EXPECT_EQ(1u, RemoteCandidate(ep2_ch1())->generation()); EXPECT_EQ(1u, RemoteCandidate(ep1_ch1())->generation()); } - void TestSignalRoleConflict() { - rtc::ScopedFakeClock clock; - // Default EP1 is in controlling state. - SetIceTiebreaker(0, kLowTiebreaker); - - SetIceRole(1, ICEROLE_CONTROLLING); - SetIceTiebreaker(1, kHighTiebreaker); - - // Creating channels with both channels role set to CONTROLLING. - CreateChannels(); - // Since both the channels initiated with controlling state and channel2 - // has higher tiebreaker value, channel1 should receive SignalRoleConflict. - EXPECT_TRUE_SIMULATED_WAIT(GetRoleConflict(0), kShortTimeout, clock); - EXPECT_FALSE(GetRoleConflict(1)); - - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kShortTimeout, clock); - - EXPECT_TRUE(ep1_ch1()->selected_connection() && - ep2_ch1()->selected_connection()); - - TestSendRecv(&clock); - DestroyChannels(); - } - - void TestPacketInfoIsSet(rtc::PacketInfo info) { - EXPECT_NE(info.packet_type, rtc::PacketType::kUnknown); - EXPECT_NE(info.protocol, rtc::PacketInfoProtocolType::kUnknown); + void TestPacketInfoIsSet(PacketInfo info) { + EXPECT_NE(info.packet_type, PacketType::kUnknown); + EXPECT_NE(info.protocol, PacketInfoProtocolType::kUnknown); EXPECT_TRUE(info.network_id.has_value()); } - void OnReadyToSend(rtc::PacketTransportInternal* transport) { + void OnReadyToSend(PacketTransportInternal* transport) { GetEndpoint(transport)->ready_to_send_ = true; } // We pass the candidates directly to the other side. void OnCandidateGathered(IceTransportInternal* ch, const Candidate& c) { - if (force_relay_ && c.type() != RELAY_PORT_TYPE) + if (force_relay_ && !c.is_relay()) return; if (GetEndpoint(ch)->save_candidates_) { @@ -848,7 +876,7 @@ class P2PTransportChannelTestBase : public ::testing::Test, } } - void OnNetworkRouteChanged(absl::optional network_route) { + void OnNetworkRouteChanged(std::optional network_route) { // If the `network_route` is unset, don't count. This is used in the case // when the network on remote side is down, the signal will be fired with an // unset network route and it shouldn't trigger a connection switch. @@ -885,12 +913,12 @@ class P2PTransportChannelTestBase : public ::testing::Test, // Tcp candidate verification has to be done when they are generated. void VerifySavedTcpCandidates(int endpoint, absl::string_view tcptype) { for (auto& data : GetEndpoint(endpoint)->saved_candidates_) { - EXPECT_EQ(data.candidate.protocol(), TCP_PROTOCOL_NAME); + EXPECT_EQ(data.candidate.protocol(), webrtc::TCP_PROTOCOL_NAME); EXPECT_EQ(data.candidate.tcptype(), tcptype); - if (data.candidate.tcptype() == TCPTYPE_ACTIVE_STR) { - EXPECT_EQ(data.candidate.address().port(), DISCARD_PORT); - } else if (data.candidate.tcptype() == TCPTYPE_PASSIVE_STR) { - EXPECT_NE(data.candidate.address().port(), DISCARD_PORT); + if (data.candidate.tcptype() == webrtc::TCPTYPE_ACTIVE_STR) { + EXPECT_EQ(data.candidate.address().port(), webrtc::DISCARD_PORT); + } else if (data.candidate.tcptype() == webrtc::TCPTYPE_PASSIVE_STR) { + EXPECT_NE(data.candidate.address().port(), webrtc::DISCARD_PORT); } else { FAIL() << "Unknown tcptype: " << data.candidate.tcptype(); } @@ -926,30 +954,30 @@ class P2PTransportChannelTestBase : public ::testing::Test, rch->AddRemoteCandidate(candidate); } - void OnReadPacket(rtc::PacketTransportInternal* transport, - const char* data, - size_t len, - const int64_t& /* packet_time_us */, - int flags) { + void OnReadPacket(PacketTransportInternal* transport, + const ReceivedIpPacket& packet) { std::list& packets = GetPacketList(transport); - packets.push_front(std::string(data, len)); + packets.push_front( + std::string(reinterpret_cast(packet.payload().data()), + packet.payload().size())); } void OnRoleConflict(IceTransportInternal* channel) { GetEndpoint(channel)->OnRoleConflict(true); - IceRole new_role = GetEndpoint(channel)->ice_role() == ICEROLE_CONTROLLING - ? ICEROLE_CONTROLLED - : ICEROLE_CONTROLLING; + IceRole new_role = + GetEndpoint(channel)->ice_role() == webrtc::ICEROLE_CONTROLLING + ? webrtc::ICEROLE_CONTROLLED + : webrtc::ICEROLE_CONTROLLING; channel->SetIceRole(new_role); } - void OnSentPacket(rtc::PacketTransportInternal* transport, - const rtc::SentPacket& packet) { + void OnSentPacket(PacketTransportInternal* transport, + const SentPacketInfo& packet) { TestPacketInfoIsSet(packet.info); } int SendData(IceTransportInternal* channel, const char* data, size_t len) { - rtc::PacketOptions options; + AsyncSocketPacketOptions options; return channel->SendPacket(data, len, options, 0); } bool CheckDataOnChannel(IceTransportInternal* channel, @@ -967,7 +995,7 @@ class P2PTransportChannelTestBase : public ::testing::Test, ? &ch->selected_connection()->remote_candidate() : NULL; } - Endpoint* GetEndpoint(rtc::PacketTransportInternal* transport) { + Endpoint* GetEndpoint(PacketTransportInternal* transport) { if (ep1_.HasTransport(transport)) { return &ep1_; } else if (ep2_.HasTransport(transport)) { @@ -988,8 +1016,7 @@ class P2PTransportChannelTestBase : public ::testing::Test, else return NULL; } - std::list& GetPacketList( - rtc::PacketTransportInternal* transport) { + std::list& GetPacketList(PacketTransportInternal* transport) { return GetChannelData(transport)->ch_packets_; } @@ -1012,21 +1039,17 @@ class P2PTransportChannelTestBase : public ::testing::Test, void OnNominated(Connection* conn) { nominated_ = true; } bool nominated() { return nominated_; } - webrtc::test::ScopedKeyValueConfig field_trials_; - private: - std::unique_ptr vss_; - std::unique_ptr nss_; - std::unique_ptr ss_; - std::unique_ptr socket_factory_; + std::unique_ptr vss_; + std::unique_ptr nss_; + std::unique_ptr ss_; + std::unique_ptr socket_factory_; - rtc::AutoSocketServerThread main_; - rtc::scoped_refptr safety_ = + AutoSocketServerThread main_; + scoped_refptr safety_ = PendingTaskSafetyFlag::Create(); - std::unique_ptr stun_server_; + TestStunServer::StunServerPtr stun_server_; TestTurnServer turn_server_; - rtc::SocksProxyServer socks_server1_; - rtc::SocksProxyServer socks_server2_; Endpoint ep1_; Endpoint ep2_; RemoteIceParameterSource remote_ice_parameter_source_ = FROM_CANDIDATE; @@ -1038,109 +1061,106 @@ class P2PTransportChannelTestBase : public ::testing::Test, // The tests have only a few outcomes, which we predefine. const P2PTransportChannelTestBase::Result - P2PTransportChannelTestBase::kLocalUdpToLocalUdp("local", + P2PTransportChannelTestBase::kLocalUdpToLocalUdp(IceCandidateType::kHost, "udp", - "local", + IceCandidateType::kHost, "udp", 1000); const P2PTransportChannelTestBase::Result - P2PTransportChannelTestBase::kLocalUdpToStunUdp("local", + P2PTransportChannelTestBase::kLocalUdpToStunUdp(IceCandidateType::kHost, "udp", - "stun", + IceCandidateType::kSrflx, "udp", 1000); const P2PTransportChannelTestBase::Result - P2PTransportChannelTestBase::kLocalUdpToPrflxUdp("local", + P2PTransportChannelTestBase::kLocalUdpToPrflxUdp(IceCandidateType::kHost, "udp", - "prflx", + IceCandidateType::kPrflx, "udp", 1000); const P2PTransportChannelTestBase::Result - P2PTransportChannelTestBase::kPrflxUdpToLocalUdp("prflx", + P2PTransportChannelTestBase::kPrflxUdpToLocalUdp(IceCandidateType::kPrflx, "udp", - "local", + IceCandidateType::kHost, "udp", 1000); const P2PTransportChannelTestBase::Result - P2PTransportChannelTestBase::kStunUdpToLocalUdp("stun", + P2PTransportChannelTestBase::kStunUdpToLocalUdp(IceCandidateType::kSrflx, "udp", - "local", + IceCandidateType::kHost, "udp", 1000); const P2PTransportChannelTestBase::Result - P2PTransportChannelTestBase::kStunUdpToStunUdp("stun", + P2PTransportChannelTestBase::kStunUdpToStunUdp(IceCandidateType::kSrflx, "udp", - "stun", + IceCandidateType::kSrflx, "udp", 1000); const P2PTransportChannelTestBase::Result - P2PTransportChannelTestBase::kStunUdpToPrflxUdp("stun", + P2PTransportChannelTestBase::kStunUdpToPrflxUdp(IceCandidateType::kSrflx, "udp", - "prflx", + IceCandidateType::kPrflx, "udp", 1000); const P2PTransportChannelTestBase::Result - P2PTransportChannelTestBase::kPrflxUdpToStunUdp("prflx", + P2PTransportChannelTestBase::kPrflxUdpToStunUdp(IceCandidateType::kPrflx, "udp", - "stun", + IceCandidateType::kSrflx, "udp", 1000); const P2PTransportChannelTestBase::Result - P2PTransportChannelTestBase::kLocalUdpToRelayUdp("local", + P2PTransportChannelTestBase::kLocalUdpToRelayUdp(IceCandidateType::kHost, "udp", - "relay", + IceCandidateType::kRelay, "udp", 2000); const P2PTransportChannelTestBase::Result - P2PTransportChannelTestBase::kPrflxUdpToRelayUdp("prflx", + P2PTransportChannelTestBase::kPrflxUdpToRelayUdp(IceCandidateType::kPrflx, "udp", - "relay", + IceCandidateType::kRelay, "udp", 2000); const P2PTransportChannelTestBase::Result - P2PTransportChannelTestBase::kRelayUdpToPrflxUdp("relay", + P2PTransportChannelTestBase::kRelayUdpToPrflxUdp(IceCandidateType::kRelay, "udp", - "prflx", + IceCandidateType::kPrflx, "udp", 2000); const P2PTransportChannelTestBase::Result - P2PTransportChannelTestBase::kLocalTcpToLocalTcp("local", + P2PTransportChannelTestBase::kLocalTcpToLocalTcp(IceCandidateType::kHost, "tcp", - "local", + IceCandidateType::kHost, "tcp", 3000); const P2PTransportChannelTestBase::Result - P2PTransportChannelTestBase::kLocalTcpToPrflxTcp("local", + P2PTransportChannelTestBase::kLocalTcpToPrflxTcp(IceCandidateType::kHost, "tcp", - "prflx", + IceCandidateType::kPrflx, "tcp", 3000); const P2PTransportChannelTestBase::Result - P2PTransportChannelTestBase::kPrflxTcpToLocalTcp("prflx", + P2PTransportChannelTestBase::kPrflxTcpToLocalTcp(IceCandidateType::kPrflx, "tcp", - "local", + IceCandidateType::kHost, "tcp", 3000); // Test the matrix of all the connectivity types we expect to see in the wild. // Just test every combination of the configs in the Config enum. class P2PTransportChannelTest : public P2PTransportChannelTestBase { - public: - P2PTransportChannelTest() : P2PTransportChannelTestBase() {} - explicit P2PTransportChannelTest(absl::string_view field_trials) - : P2PTransportChannelTestBase(field_trials) {} - protected: - void ConfigureEndpoints(Config config1, + void ConfigureEndpoints(const Environment& env, + Config config1, Config config2, int allocator_flags1, int allocator_flags2) { + CreatePortAllocators(env); ConfigureEndpoint(0, config1); SetAllocatorFlags(0, allocator_flags1); - SetAllocationStepDelay(0, kMinimumStepDelay); + SetAllocationStepDelay(0, webrtc::kMinimumStepDelay); ConfigureEndpoint(1, config2); SetAllocatorFlags(1, allocator_flags2); - SetAllocationStepDelay(1, kMinimumStepDelay); + SetAllocationStepDelay(1, webrtc::kMinimumStepDelay); set_remote_ice_parameter_source(FROM_SETICEPARAMETERS); } @@ -1157,7 +1177,7 @@ class P2PTransportChannelTest : public P2PTransportChannelTestBase { // Add a single NAT of the desired type nat() ->AddTranslator(kPublicAddrs[endpoint], kNatAddrs[endpoint], - static_cast(config - NAT_FULL_CONE)) + static_cast(config - NAT_FULL_CONE)) ->AddClient(kPrivateAddrs[endpoint]); break; case NAT_DOUBLE_CONE: @@ -1166,46 +1186,31 @@ class P2PTransportChannelTest : public P2PTransportChannelTestBase { // Add a two cascaded NATs of the desired types nat() ->AddTranslator(kPublicAddrs[endpoint], kNatAddrs[endpoint], - (config == NAT_DOUBLE_CONE) ? rtc::NAT_OPEN_CONE - : rtc::NAT_SYMMETRIC) + (config == NAT_DOUBLE_CONE) ? webrtc::NAT_OPEN_CONE + : webrtc::NAT_SYMMETRIC) ->AddTranslator(kPrivateAddrs[endpoint], - kCascadedNatAddrs[endpoint], rtc::NAT_OPEN_CONE) + kCascadedNatAddrs[endpoint], webrtc::NAT_OPEN_CONE) ->AddClient(kCascadedPrivateAddrs[endpoint]); break; case BLOCK_UDP: case BLOCK_UDP_AND_INCOMING_TCP: case BLOCK_ALL_BUT_OUTGOING_HTTP: - case PROXY_HTTPS: - case PROXY_SOCKS: AddAddress(endpoint, kPublicAddrs[endpoint]); // Block all UDP - fw()->AddRule(false, rtc::FP_UDP, rtc::FD_ANY, kPublicAddrs[endpoint]); + fw()->AddRule(false, webrtc::FP_UDP, webrtc::FD_ANY, + kPublicAddrs[endpoint]); if (config == BLOCK_UDP_AND_INCOMING_TCP) { // Block TCP inbound to the endpoint - fw()->AddRule(false, rtc::FP_TCP, SocketAddress(), + fw()->AddRule(false, webrtc::FP_TCP, SocketAddress(), kPublicAddrs[endpoint]); } else if (config == BLOCK_ALL_BUT_OUTGOING_HTTP) { // Block all TCP to/from the endpoint except 80/443 out - fw()->AddRule(true, rtc::FP_TCP, kPublicAddrs[endpoint], - SocketAddress(rtc::IPAddress(INADDR_ANY), 80)); - fw()->AddRule(true, rtc::FP_TCP, kPublicAddrs[endpoint], - SocketAddress(rtc::IPAddress(INADDR_ANY), 443)); - fw()->AddRule(false, rtc::FP_TCP, rtc::FD_ANY, - kPublicAddrs[endpoint]); - } else if (config == PROXY_HTTPS) { - // Block all TCP to/from the endpoint except to the proxy server - fw()->AddRule(true, rtc::FP_TCP, kPublicAddrs[endpoint], - kHttpsProxyAddrs[endpoint]); - fw()->AddRule(false, rtc::FP_TCP, rtc::FD_ANY, - kPublicAddrs[endpoint]); - SetProxy(endpoint, rtc::PROXY_HTTPS); - } else if (config == PROXY_SOCKS) { - // Block all TCP to/from the endpoint except to the proxy server - fw()->AddRule(true, rtc::FP_TCP, kPublicAddrs[endpoint], - kSocksProxyAddrs[endpoint]); - fw()->AddRule(false, rtc::FP_TCP, rtc::FD_ANY, + fw()->AddRule(true, webrtc::FP_TCP, kPublicAddrs[endpoint], + SocketAddress(IPAddress(INADDR_ANY), 80)); + fw()->AddRule(true, webrtc::FP_TCP, kPublicAddrs[endpoint], + SocketAddress(IPAddress(INADDR_ANY), 443)); + fw()->AddRule(false, webrtc::FP_TCP, webrtc::FD_ANY, kPublicAddrs[endpoint]); - SetProxy(endpoint, rtc::PROXY_SOCKS5); } break; default: @@ -1218,8 +1223,6 @@ class P2PTransportChannelTest : public P2PTransportChannelTestBase { class P2PTransportChannelMatrixTest : public P2PTransportChannelTest, public WithParamInterface { protected: - P2PTransportChannelMatrixTest() : P2PTransportChannelTest(GetParam()) {} - static const Result* kMatrix[NUM_CONFIGS][NUM_CONFIGS]; }; @@ -1245,76 +1248,59 @@ class P2PTransportChannelMatrixTest : public P2PTransportChannelTest, // Test matrix. Originator behavior defined by rows, receiever by columns. // TODO(?): Fix NULLs caused by lack of TCP support in NATSocket. -// TODO(?): Fix NULLs caused by no HTTP proxy support. // TODO(?): Rearrange rows/columns from best to worst. const P2PTransportChannelMatrixTest::Result* P2PTransportChannelMatrixTest::kMatrix[NUM_CONFIGS][NUM_CONFIGS] = { - // OPEN CONE ADDR PORT SYMM 2CON SCON !UDP !TCP HTTP PRXH - // PRXS - /*OP*/ {LULU, LUSU, LUSU, LUSU, LUPU, LUSU, LUPU, LTPT, LTPT, LSRS, - NULL, LTPT}, + // OPEN CONE ADDR PORT SYMM 2CON SCON !UDP !TCP HTTP + /*OP*/ + {LULU, LUSU, LUSU, LUSU, LUPU, LUSU, LUPU, LTPT, LTPT, LSRS}, /*CO*/ - {SULU, SUSU, SUSU, SUSU, SUPU, SUSU, SUPU, NULL, NULL, LSRS, NULL, - LTRT}, + {SULU, SUSU, SUSU, SUSU, SUPU, SUSU, SUPU, NULL, NULL, LSRS}, /*AD*/ - {SULU, SUSU, SUSU, SUSU, SUPU, SUSU, SUPU, NULL, NULL, LSRS, NULL, - LTRT}, + {SULU, SUSU, SUSU, SUSU, SUPU, SUSU, SUPU, NULL, NULL, LSRS}, /*PO*/ - {SULU, SUSU, SUSU, SUSU, RUPU, SUSU, RUPU, NULL, NULL, LSRS, NULL, - LTRT}, + {SULU, SUSU, SUSU, SUSU, RUPU, SUSU, RUPU, NULL, NULL, LSRS}, /*SY*/ - {PULU, PUSU, PUSU, PURU, PURU, PUSU, PURU, NULL, NULL, LSRS, NULL, - LTRT}, + {PULU, PUSU, PUSU, PURU, PURU, PUSU, PURU, NULL, NULL, LSRS}, /*2C*/ - {SULU, SUSU, SUSU, SUSU, SUPU, SUSU, SUPU, NULL, NULL, LSRS, NULL, - LTRT}, + {SULU, SUSU, SUSU, SUSU, SUPU, SUSU, SUPU, NULL, NULL, LSRS}, /*SC*/ - {PULU, PUSU, PUSU, PURU, PURU, PUSU, PURU, NULL, NULL, LSRS, NULL, - LTRT}, + {PULU, PUSU, PUSU, PURU, PURU, PUSU, PURU, NULL, NULL, LSRS}, /*!U*/ - {LTPT, NULL, NULL, NULL, NULL, NULL, NULL, LTPT, LTPT, LSRS, NULL, - LTRT}, + {LTPT, NULL, NULL, NULL, NULL, NULL, NULL, LTPT, LTPT, LSRS}, /*!T*/ - {PTLT, NULL, NULL, NULL, NULL, NULL, NULL, PTLT, LTRT, LSRS, NULL, - LTRT}, + {PTLT, NULL, NULL, NULL, NULL, NULL, NULL, PTLT, LTRT, LSRS}, /*HT*/ - {LSRS, LSRS, LSRS, LSRS, LSRS, LSRS, LSRS, LSRS, LSRS, LSRS, NULL, - LSRS}, - /*PR*/ - {NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, - NULL}, - /*PR*/ - {LTRT, LTRT, LTRT, LTRT, LTRT, LTRT, LTRT, LTRT, LTRT, LSRS, NULL, - LTRT}, + {LSRS, LSRS, LSRS, LSRS, LSRS, LSRS, LSRS, LSRS, LSRS, LSRS}, }; // The actual tests that exercise all the various configurations. // Test names are of the form P2PTransportChannelTest_TestOPENToNAT_FULL_CONE -#define P2P_TEST_DECLARATION(x, y, z) \ - TEST_P(P2PTransportChannelMatrixTest, z##Test##x##To##y) { \ - ConfigureEndpoints(x, y, PORTALLOCATOR_ENABLE_SHARED_SOCKET, \ - PORTALLOCATOR_ENABLE_SHARED_SOCKET); \ - if (kMatrix[x][y] != NULL) \ - Test(*kMatrix[x][y]); \ - else \ - RTC_LOG(LS_WARNING) << "Not yet implemented"; \ +#define P2P_TEST_DECLARATION(x, y, z) \ + TEST_P(P2PTransportChannelMatrixTest, z##Test##x##To##y) { \ + const Environment env = \ + CreateEnvironment(FieldTrials::CreateNoGlobal(GetParam())); \ + ConfigureEndpoints(env, x, y, PORTALLOCATOR_ENABLE_SHARED_SOCKET, \ + PORTALLOCATOR_ENABLE_SHARED_SOCKET); \ + if (kMatrix[x][y] != NULL) \ + Test(env, *kMatrix[x][y]); \ + else \ + RTC_LOG(LS_WARNING) << "Not yet implemented"; \ } #define P2P_TEST(x, y) P2P_TEST_DECLARATION(x, y, /* empty argument */) -#define P2P_TEST_SET(x) \ - P2P_TEST(x, OPEN) \ - P2P_TEST(x, NAT_FULL_CONE) \ - P2P_TEST(x, NAT_ADDR_RESTRICTED) \ - P2P_TEST(x, NAT_PORT_RESTRICTED) \ - P2P_TEST(x, NAT_SYMMETRIC) \ - P2P_TEST(x, NAT_DOUBLE_CONE) \ - P2P_TEST(x, NAT_SYMMETRIC_THEN_CONE) \ - P2P_TEST(x, BLOCK_UDP) \ - P2P_TEST(x, BLOCK_UDP_AND_INCOMING_TCP) \ - P2P_TEST(x, BLOCK_ALL_BUT_OUTGOING_HTTP) \ - P2P_TEST(x, PROXY_HTTPS) \ - P2P_TEST(x, PROXY_SOCKS) +#define P2P_TEST_SET(x) \ + P2P_TEST(x, OPEN) \ + P2P_TEST(x, NAT_FULL_CONE) \ + P2P_TEST(x, NAT_ADDR_RESTRICTED) \ + P2P_TEST(x, NAT_PORT_RESTRICTED) \ + P2P_TEST(x, NAT_SYMMETRIC) \ + P2P_TEST(x, NAT_DOUBLE_CONE) \ + P2P_TEST(x, NAT_SYMMETRIC_THEN_CONE) \ + P2P_TEST(x, BLOCK_UDP) \ + P2P_TEST(x, BLOCK_UDP_AND_INCOMING_TCP) \ + P2P_TEST(x, BLOCK_ALL_BUT_OUTGOING_HTTP) P2P_TEST_SET(OPEN) P2P_TEST_SET(NAT_FULL_CONE) @@ -1326,8 +1312,6 @@ P2P_TEST_SET(NAT_SYMMETRIC_THEN_CONE) P2P_TEST_SET(BLOCK_UDP) P2P_TEST_SET(BLOCK_UDP_AND_INCOMING_TCP) P2P_TEST_SET(BLOCK_ALL_BUT_OUTGOING_HTTP) -P2P_TEST_SET(PROXY_HTTPS) -P2P_TEST_SET(PROXY_SOCKS) INSTANTIATE_TEST_SUITE_P( All, @@ -1338,9 +1322,10 @@ INSTANTIATE_TEST_SUITE_P( // Test that we restart candidate allocation when local ufrag&pwd changed. // Standard Ice protocol is used. TEST_F(P2PTransportChannelTest, HandleUfragPwdChange) { - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); - CreateChannels(); + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); + CreateChannels(env); TestHandleIceUfragPasswordChanged(); DestroyChannels(); } @@ -1348,23 +1333,31 @@ TEST_F(P2PTransportChannelTest, HandleUfragPwdChange) { // Same as above test, but with a symmetric NAT. // We should end up with relay<->prflx candidate pairs, with generation "1". TEST_F(P2PTransportChannelTest, HandleUfragPwdChangeSymmetricNat) { - ConfigureEndpoints(NAT_SYMMETRIC, NAT_SYMMETRIC, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); - CreateChannels(); + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, NAT_SYMMETRIC, NAT_SYMMETRIC, + webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); + CreateChannels(env); TestHandleIceUfragPasswordChanged(); DestroyChannels(); } // Test the operation of GetStats. TEST_F(P2PTransportChannelTest, GetStats) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); - CreateChannels(); - EXPECT_TRUE_SIMULATED_WAIT(ep1_ch1()->receiving() && ep1_ch1()->writable() && - ep2_ch1()->receiving() && - ep2_ch1()->writable(), - kMediumTimeout, clock); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); + CreateChannels(env); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1()->receiving() && ep1_ch1()->writable() && + ep2_ch1()->receiving() && ep2_ch1()->writable(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Sends and receives 10 packets. TestSendRecv(&clock); @@ -1406,20 +1399,26 @@ TEST_F(P2PTransportChannelTest, GetStats) { } TEST_F(P2PTransportChannelTest, GetStatsSwitchConnection) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); IceConfig continual_gathering_config = - CreateIceConfig(1000, GATHER_CONTINUALLY); - - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); - - AddAddress(0, kAlternateAddrs[1], "rmnet0", rtc::ADAPTER_TYPE_CELLULAR); - - CreateChannels(continual_gathering_config, continual_gathering_config); - EXPECT_TRUE_SIMULATED_WAIT(ep1_ch1()->receiving() && ep1_ch1()->writable() && - ep2_ch1()->receiving() && - ep2_ch1()->writable(), - kMediumTimeout, clock); + CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); + + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); + + AddAddress(0, kAlternateAddrs[1], "rmnet0", ADAPTER_TYPE_CELLULAR); + + CreateChannels(env, continual_gathering_config, continual_gathering_config); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1()->receiving() && ep1_ch1()->writable() && + ep2_ch1()->receiving() && ep2_ch1()->writable(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Sends and receives 10 packets. TestSendRecv(&clock); @@ -1450,8 +1449,11 @@ TEST_F(P2PTransportChannelTest, GetStatsSwitchConnection) { ep1_ch1()->RemoveConnectionForTest( const_cast(old_selected_connection)); - EXPECT_TRUE_SIMULATED_WAIT(ep1_ch1()->selected_connection() != nullptr, - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Sends and receives 10 packets. TestSendRecv(&clock); @@ -1475,114 +1477,37 @@ TEST_F(P2PTransportChannelTest, GetStatsSwitchConnection) { DestroyChannels(); } -// Tests that UMAs are recorded when ICE restarts while the channel -// is disconnected. -TEST_F(P2PTransportChannelTest, TestUMAIceRestartWhileDisconnected) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); - - CreateChannels(); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kDefaultTimeout, clock); - - // Drop all packets so that both channels become not writable. - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, kPublicAddrs[0]); - const int kWriteTimeoutDelay = 8000; - EXPECT_TRUE_SIMULATED_WAIT(!ep1_ch1()->writable() && !ep2_ch1()->writable(), - kWriteTimeoutDelay, clock); - - ep1_ch1()->SetIceParameters(kIceParams[2]); - ep1_ch1()->SetRemoteIceParameters(kIceParams[3]); - ep1_ch1()->MaybeStartGathering(); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.IceRestartState", - static_cast(IceRestartState::DISCONNECTED))); - - ep2_ch1()->SetIceParameters(kIceParams[3]); - ep2_ch1()->SetRemoteIceParameters(kIceParams[2]); - ep2_ch1()->MaybeStartGathering(); - EXPECT_METRIC_EQ(2, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.IceRestartState", - static_cast(IceRestartState::DISCONNECTED))); - - DestroyChannels(); -} - -// Tests that UMAs are recorded when ICE restarts while the channel -// is connected. -TEST_F(P2PTransportChannelTest, TestUMAIceRestartWhileConnected) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); - - CreateChannels(); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kDefaultTimeout, clock); - - ep1_ch1()->SetIceParameters(kIceParams[2]); - ep1_ch1()->SetRemoteIceParameters(kIceParams[3]); - ep1_ch1()->MaybeStartGathering(); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.IceRestartState", - static_cast(IceRestartState::CONNECTED))); - - ep2_ch1()->SetIceParameters(kIceParams[3]); - ep2_ch1()->SetRemoteIceParameters(kIceParams[2]); - ep2_ch1()->MaybeStartGathering(); - EXPECT_METRIC_EQ(2, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.IceRestartState", - static_cast(IceRestartState::CONNECTED))); - - DestroyChannels(); -} - -// Tests that UMAs are recorded when ICE restarts while the channel -// is connecting. -TEST_F(P2PTransportChannelTest, TestUMAIceRestartWhileConnecting) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); - - // Create the channels without waiting for them to become connected. - CreateChannels(); - - ep1_ch1()->SetIceParameters(kIceParams[2]); - ep1_ch1()->SetRemoteIceParameters(kIceParams[3]); - ep1_ch1()->MaybeStartGathering(); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.IceRestartState", - static_cast(IceRestartState::CONNECTING))); - - ep2_ch1()->SetIceParameters(kIceParams[3]); - ep2_ch1()->SetRemoteIceParameters(kIceParams[2]); - ep2_ch1()->MaybeStartGathering(); - EXPECT_METRIC_EQ(2, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.IceRestartState", - static_cast(IceRestartState::CONNECTING))); - - DestroyChannels(); -} - -// Tests that a UMA on ICE regathering is recorded when there is a network +// Tests that an ICE regathering reason is recorded when there is a network // change if and only if continual gathering is enabled. TEST_F(P2PTransportChannelTest, TestIceRegatheringReasonContinualGatheringByNetworkChange) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); // ep1 gathers continually but ep2 does not. IceConfig continual_gathering_config = - CreateIceConfig(1000, GATHER_CONTINUALLY); + CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); IceConfig default_config; - CreateChannels(continual_gathering_config, default_config); + CreateChannels(env, continual_gathering_config, default_config); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Adding address in ep1 will trigger continual gathering. AddAddress(0, kAlternateAddrs[0]); - EXPECT_EQ_SIMULATED_WAIT(1, - GetEndpoint(0)->GetIceRegatheringCountForReason( - IceRegatheringReason::NETWORK_CHANGE), - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return GetEndpoint(0)->GetIceRegatheringCountForReason( + IceRegatheringReason::NETWORK_CHANGE); + }, + Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); ep2_ch1()->SetIceParameters(kIceParams[3]); ep2_ch1()->SetRemoteIceParameters(kIceParams[2]); @@ -1597,33 +1522,33 @@ TEST_F(P2PTransportChannelTest, DestroyChannels(); } -// Tests that a UMA on ICE regathering is recorded when there is a network +// Tests that an ICE regathering reason is recorded when there is a network // failure if and only if continual gathering is enabled. TEST_F(P2PTransportChannelTest, TestIceRegatheringReasonContinualGatheringByNetworkFailure) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); // ep1 gathers continually but ep2 does not. - IceConfig config1 = CreateIceConfig(1000, GATHER_CONTINUALLY); + IceConfig config1 = CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); config1.regather_on_failed_networks_interval = 2000; IceConfig config2; config2.regather_on_failed_networks_interval = 2000; - CreateChannels(config1, config2); + CreateChannels(env, config1, config2); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, kPublicAddrs[0]); + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_ANY, kPublicAddrs[0]); // Timeout value such that all connections are deleted. const int kNetworkFailureTimeout = 35000; SIMULATED_WAIT(false, kNetworkFailureTimeout, clock); EXPECT_LE(1, GetEndpoint(0)->GetIceRegatheringCountForReason( IceRegatheringReason::NETWORK_FAILURE)); - EXPECT_METRIC_LE( - 1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.IceRegatheringReason", - static_cast(IceRegatheringReason::NETWORK_FAILURE))); EXPECT_EQ(0, GetEndpoint(1)->GetIceRegatheringCountForReason( IceRegatheringReason::NETWORK_FAILURE)); @@ -1633,11 +1558,12 @@ TEST_F(P2PTransportChannelTest, // Test that we properly create a connection on a STUN ping from unknown address // when the signaling is slow. TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignaling) { - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); // Emulate no remote parameters coming in. set_remote_ice_parameter_source(FROM_CANDIDATE); - CreateChannels(); + CreateChannels(env); // Only have remote parameters come in for ep2, not ep1. ep2_ch1()->SetRemoteIceParameters(kIceParams[0]); @@ -1647,7 +1573,10 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignaling) { // Wait until the callee becomes writable to make sure that a ping request is // received by the caller before their remote ICE credentials are set. - ASSERT_TRUE_WAIT(ep2_ch1()->selected_connection() != nullptr, kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return ep2_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); // Add two sets of remote ICE credentials, so that the ones used by the // candidate will be generation 1 instead of 0. ep1_ch1()->SetRemoteIceParameters(kIceParams[3]); @@ -1655,19 +1584,29 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignaling) { // The caller should have the selected connection connected to the peer // reflexive candidate. const Connection* selected_connection = nullptr; - ASSERT_TRUE_WAIT( - (selected_connection = ep1_ch1()->selected_connection()) != nullptr, - kMediumTimeout); - EXPECT_EQ(PRFLX_PORT_TYPE, selected_connection->remote_candidate().type()); + ASSERT_THAT(webrtc::WaitUntil( + [&] { + return selected_connection = + ep1_ch1()->selected_connection(); + }, + Ne(nullptr), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); + EXPECT_TRUE(selected_connection->remote_candidate().is_prflx()); EXPECT_EQ(kIceUfrag[1], selected_connection->remote_candidate().username()); EXPECT_EQ(kIcePwd[1], selected_connection->remote_candidate().password()); EXPECT_EQ(1u, selected_connection->remote_candidate().generation()); ResumeCandidates(1); // Verify ep1's selected connection is updated to use the 'local' candidate. - EXPECT_EQ_WAIT(LOCAL_PORT_TYPE, - ep1_ch1()->selected_connection()->remote_candidate().type(), - kMediumTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return ep1_ch1() + ->selected_connection() + ->remote_candidate() + .is_local(); + }, + IsTrue(), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); EXPECT_EQ(selected_connection, ep1_ch1()->selected_connection()); DestroyChannels(); } @@ -1677,10 +1616,11 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignaling) { // 2. the candidate pair stats // until we learn the same address from signaling. TEST_F(P2PTransportChannelTest, PeerReflexiveRemoteCandidateIsSanitized) { - ConfigureEndpoints(OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); // Emulate no remote parameters coming in. set_remote_ice_parameter_source(FROM_CANDIDATE); - CreateChannels(); + CreateChannels(env); // Only have remote parameters come in for ep2, not ep1. ep2_ch1()->SetRemoteIceParameters(kIceParams[0]); @@ -1688,22 +1628,28 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveRemoteCandidateIsSanitized) { // candidate. PauseCandidates(1); - ASSERT_TRUE_WAIT(ep2_ch1()->selected_connection() != nullptr, kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return ep2_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); ep1_ch1()->SetRemoteIceParameters(kIceParams[1]); - ASSERT_TRUE_WAIT(ep1_ch1()->selected_connection() != nullptr, kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); // Check the selected candidate pair. auto pair_ep1 = ep1_ch1()->GetSelectedCandidatePair(); ASSERT_TRUE(pair_ep1.has_value()); - EXPECT_EQ(PRFLX_PORT_TYPE, pair_ep1->remote_candidate().type()); + EXPECT_TRUE(pair_ep1->remote_candidate().is_prflx()); EXPECT_TRUE(pair_ep1->remote_candidate().address().ipaddr().IsNil()); IceTransportStats ice_transport_stats; ep1_ch1()->GetStats(&ice_transport_stats); // Check the candidate pair stats. ASSERT_EQ(1u, ice_transport_stats.connection_infos.size()); - EXPECT_EQ(PRFLX_PORT_TYPE, - ice_transport_stats.connection_infos[0].remote_candidate.type()); + EXPECT_TRUE( + ice_transport_stats.connection_infos[0].remote_candidate.is_prflx()); EXPECT_TRUE(ice_transport_stats.connection_infos[0] .remote_candidate.address() .ipaddr() @@ -1711,11 +1657,16 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveRemoteCandidateIsSanitized) { // Let ep1 receive the remote candidate to update its type from prflx to host. ResumeCandidates(1); - ASSERT_TRUE_WAIT( - ep1_ch1()->selected_connection() != nullptr && - ep1_ch1()->selected_connection()->remote_candidate().type() == - LOCAL_PORT_TYPE, - kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { + return ep1_ch1()->selected_connection() != nullptr && + ep1_ch1() + ->selected_connection() + ->remote_candidate() + .is_local(); + }, + IsTrue(), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); // We should be able to reveal the address after it is learnt via // AddIceCandidate. @@ -1723,14 +1674,14 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveRemoteCandidateIsSanitized) { // Check the selected candidate pair. auto updated_pair_ep1 = ep1_ch1()->GetSelectedCandidatePair(); ASSERT_TRUE(updated_pair_ep1.has_value()); - EXPECT_EQ(LOCAL_PORT_TYPE, updated_pair_ep1->remote_candidate().type()); + EXPECT_TRUE(updated_pair_ep1->remote_candidate().is_local()); EXPECT_TRUE(HasRemoteAddress(&updated_pair_ep1.value(), kPublicAddrs[1])); ep1_ch1()->GetStats(&ice_transport_stats); // Check the candidate pair stats. ASSERT_EQ(1u, ice_transport_stats.connection_infos.size()); - EXPECT_EQ(LOCAL_PORT_TYPE, - ice_transport_stats.connection_infos[0].remote_candidate.type()); + EXPECT_TRUE( + ice_transport_stats.connection_infos[0].remote_candidate.is_local()); EXPECT_TRUE(ice_transport_stats.connection_infos[0] .remote_candidate.address() .EqualIPs(kPublicAddrs[1])); @@ -1741,11 +1692,13 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveRemoteCandidateIsSanitized) { // Test that we properly create a connection on a STUN ping from unknown address // when the signaling is slow and the end points are behind NAT. TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignalingWithNAT) { - ConfigureEndpoints(OPEN, NAT_SYMMETRIC, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, NAT_SYMMETRIC, + webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); // Emulate no remote parameters coming in. set_remote_ice_parameter_source(FROM_CANDIDATE); - CreateChannels(); + CreateChannels(env); // Only have remote parameters come in for ep2, not ep1. ep2_ch1()->SetRemoteIceParameters(kIceParams[0]); // Pause sending ep2's candidates to ep1 until ep1 receives the peer reflexive @@ -1754,7 +1707,10 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignalingWithNAT) { // Wait until the callee becomes writable to make sure that a ping request is // received by the caller before their remote ICE credentials are set. - ASSERT_TRUE_WAIT(ep2_ch1()->selected_connection() != nullptr, kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return ep2_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); // Add two sets of remote ICE credentials, so that the ones used by the // candidate will be generation 1 instead of 0. ep1_ch1()->SetRemoteIceParameters(kIceParams[3]); @@ -1763,19 +1719,29 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignalingWithNAT) { // The caller's selected connection should be connected to the peer reflexive // candidate. const Connection* selected_connection = nullptr; - ASSERT_TRUE_WAIT( - (selected_connection = ep1_ch1()->selected_connection()) != nullptr, - kMediumTimeout); - EXPECT_EQ(PRFLX_PORT_TYPE, selected_connection->remote_candidate().type()); + ASSERT_THAT(webrtc::WaitUntil( + [&] { + return selected_connection = + ep1_ch1()->selected_connection(); + }, + Ne(nullptr), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); + EXPECT_TRUE(selected_connection->remote_candidate().is_prflx()); EXPECT_EQ(kIceUfrag[1], selected_connection->remote_candidate().username()); EXPECT_EQ(kIcePwd[1], selected_connection->remote_candidate().password()); EXPECT_EQ(1u, selected_connection->remote_candidate().generation()); ResumeCandidates(1); - EXPECT_EQ_WAIT(PRFLX_PORT_TYPE, - ep1_ch1()->selected_connection()->remote_candidate().type(), - kMediumTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return ep1_ch1() + ->selected_connection() + ->remote_candidate() + .is_prflx(); + }, + IsTrue(), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); EXPECT_EQ(selected_connection, ep1_ch1()->selected_connection()); DestroyChannels(); } @@ -1791,19 +1757,23 @@ TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignalingWithNAT) { // prioritized above new-generation candidate pairs. TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignalingWithIceRestart) { - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); // Only gather relay candidates, so that when the prflx candidate arrives // it's prioritized above the current candidate pair. - GetEndpoint(0)->allocator_->SetCandidateFilter(CF_RELAY); - GetEndpoint(1)->allocator_->SetCandidateFilter(CF_RELAY); + GetEndpoint(0)->allocator_->SetCandidateFilter(webrtc::CF_RELAY); + GetEndpoint(1)->allocator_->SetCandidateFilter(webrtc::CF_RELAY); // Setting this allows us to control when SetRemoteIceParameters is called. set_remote_ice_parameter_source(FROM_CANDIDATE); - CreateChannels(); + CreateChannels(env); // Wait for the initial connection to be made. ep1_ch1()->SetRemoteIceParameters(kIceParams[1]); ep2_ch1()->SetRemoteIceParameters(kIceParams[0]); - EXPECT_TRUE_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), kDefaultTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, + IsTrue(), {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Simulate an ICE restart on ep2, but don't signal the candidate or new // ICE parameters until after a prflx connection has been made. @@ -1815,9 +1785,15 @@ TEST_F(P2PTransportChannelTest, // The caller should have the selected connection connected to the peer // reflexive candidate. - EXPECT_EQ_WAIT(PRFLX_PORT_TYPE, - ep1_ch1()->selected_connection()->remote_candidate().type(), - kDefaultTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return ep1_ch1() + ->selected_connection() + ->remote_candidate() + .is_prflx(); + }, + IsTrue(), {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); const Connection* prflx_selected_connection = ep1_ch1()->selected_connection(); @@ -1831,25 +1807,37 @@ TEST_F(P2PTransportChannelTest, // their information to update the peer reflexive candidate. ResumeCandidates(1); - EXPECT_EQ_WAIT(RELAY_PORT_TYPE, - ep1_ch1()->selected_connection()->remote_candidate().type(), - kDefaultTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return ep1_ch1() + ->selected_connection() + ->remote_candidate() + .is_relay(); + }, + IsTrue(), {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_EQ(prflx_selected_connection, ep1_ch1()->selected_connection()); DestroyChannels(); } // Test that if remote candidates don't have ufrag and pwd, we still work. TEST_F(P2PTransportChannelTest, RemoteCandidatesWithoutUfragPwd) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); set_remote_ice_parameter_source(FROM_SETICEPARAMETERS); - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); - CreateChannels(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); + CreateChannels(env); const Connection* selected_connection = NULL; // Wait until the callee's connections are created. - EXPECT_TRUE_SIMULATED_WAIT( - (selected_connection = ep2_ch1()->selected_connection()) != NULL, - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return selected_connection = ep2_ch1()->selected_connection(); + }, + NotNull(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Wait to make sure the selected connection is not changed. SIMULATED_WAIT(ep2_ch1()->selected_connection() != selected_connection, kShortTimeout, clock); @@ -1860,12 +1848,14 @@ TEST_F(P2PTransportChannelTest, RemoteCandidatesWithoutUfragPwd) { // Test that a host behind NAT cannot be reached when incoming_only // is set to true. TEST_F(P2PTransportChannelTest, IncomingOnlyBlocked) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(NAT_FULL_CONE, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, NAT_FULL_CONE, OPEN, + webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); SetAllocatorFlags(0, kOnlyLocalPorts); - CreateChannels(); + CreateChannels(env); ep1_ch1()->set_incoming_only(true); // Pump for 1 second and verify that the channels are not connected. @@ -1882,16 +1872,21 @@ TEST_F(P2PTransportChannelTest, IncomingOnlyBlocked) { // Test that a peer behind NAT can connect to a peer that has // incoming_only flag set. TEST_F(P2PTransportChannelTest, IncomingOnlyOpen) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(OPEN, NAT_FULL_CONE, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, NAT_FULL_CONE, + webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); SetAllocatorFlags(0, kOnlyLocalPorts); - CreateChannels(); + CreateChannels(env); ep1_ch1()->set_incoming_only(true); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); DestroyChannels(); } @@ -1900,39 +1895,42 @@ TEST_F(P2PTransportChannelTest, IncomingOnlyOpen) { // connections. This has been observed in some scenarios involving // VPNs/firewalls. TEST_F(P2PTransportChannelTest, CanOnlyMakeOutgoingTcpConnections) { + const Environment env = CreateEnvironment(); // The PORTALLOCATOR_ENABLE_ANY_ADDRESS_PORTS flag is required if the // application needs this use case to work, since the application must accept // the tradeoff that more candidates need to be allocated. // // TODO(deadbeef): Later, make this flag the default, and do more elegant // things to ensure extra candidates don't waste resources? - ConfigureEndpoints( - OPEN, OPEN, - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_ANY_ADDRESS_PORTS, - kDefaultPortAllocatorFlags); + ConfigureEndpoints(env, OPEN, OPEN, + webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_ANY_ADDRESS_PORTS, + webrtc::kDefaultPortAllocatorFlags); // In order to simulate nothing working but outgoing TCP connections, prevent // the endpoint from binding to its interface's address as well as the // "any" addresses. It can then only make a connection by using "Connect()". - fw()->SetUnbindableIps({rtc::GetAnyIP(AF_INET), rtc::GetAnyIP(AF_INET6), + fw()->SetUnbindableIps({webrtc::GetAnyIP(AF_INET), webrtc::GetAnyIP(AF_INET6), kPublicAddrs[0].ipaddr()}); - CreateChannels(); - // Expect a "prflx" candidate on the side that can only make outgoing - // connections, endpoint 0. - Test(kPrflxTcpToLocalTcp); + CreateChannels(env); + // Expect a IceCandidateType::kPrflx candidate on the side that can only make + // outgoing connections, endpoint 0. + Test(env, kPrflxTcpToLocalTcp); DestroyChannels(); } TEST_F(P2PTransportChannelTest, TestTcpConnectionsFromActiveToPassive) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); AddAddress(0, kPublicAddrs[0]); AddAddress(1, kPublicAddrs[1]); - SetAllocationStepDelay(0, kMinimumStepDelay); - SetAllocationStepDelay(1, kMinimumStepDelay); + SetAllocationStepDelay(0, webrtc::kMinimumStepDelay); + SetAllocationStepDelay(1, webrtc::kMinimumStepDelay); - int kOnlyLocalTcpPorts = PORTALLOCATOR_DISABLE_UDP | - PORTALLOCATOR_DISABLE_STUN | - PORTALLOCATOR_DISABLE_RELAY; + int kOnlyLocalTcpPorts = webrtc::PORTALLOCATOR_DISABLE_UDP | + webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_RELAY; // Disable all protocols except TCP. SetAllocatorFlags(0, kOnlyLocalTcpPorts); SetAllocatorFlags(1, kOnlyLocalTcpPorts); @@ -1948,20 +1946,25 @@ TEST_F(P2PTransportChannelTest, TestTcpConnectionsFromActiveToPassive) { // Pause candidate so we could verify the candidate properties. PauseCandidates(0); PauseCandidates(1); - CreateChannels(); + CreateChannels(env); // Verify tcp candidates. - VerifySavedTcpCandidates(0, TCPTYPE_PASSIVE_STR); - VerifySavedTcpCandidates(1, TCPTYPE_ACTIVE_STR); + VerifySavedTcpCandidates(0, webrtc::TCPTYPE_PASSIVE_STR); + VerifySavedTcpCandidates(1, webrtc::TCPTYPE_ACTIVE_STR); // Resume candidates. ResumeCandidates(0); ResumeCandidates(1); - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), kPublicAddrs[0], - kPublicAddrs[1]), - kShortTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected( + ep1_ch1(), ep2_ch1(), kPublicAddrs[0], kPublicAddrs[1]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kShortTimeout), .clock = &clock}), + webrtc::IsRtcOk()); TestSendRecv(&clock); DestroyChannels(); @@ -1969,17 +1972,21 @@ TEST_F(P2PTransportChannelTest, TestTcpConnectionsFromActiveToPassive) { // Test that tcptype is set on all candidates for a connection running over TCP. TEST_F(P2PTransportChannelTest, TestTcpConnectionTcptypeSet) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(BLOCK_UDP_AND_INCOMING_TCP, OPEN, - PORTALLOCATOR_ENABLE_SHARED_SOCKET, - PORTALLOCATOR_ENABLE_SHARED_SOCKET); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, BLOCK_UDP_AND_INCOMING_TCP, OPEN, + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET, + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); SetAllowTcpListen(0, false); // active. SetAllowTcpListen(1, true); // actpass. - CreateChannels(); + CreateChannels(env); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); SIMULATED_WAIT(false, kDefaultTimeout, clock); EXPECT_EQ(RemoteCandidate(ep1_ch1())->tcptype(), "passive"); @@ -1991,48 +1998,83 @@ TEST_F(P2PTransportChannelTest, TestTcpConnectionTcptypeSet) { } TEST_F(P2PTransportChannelTest, TestIceRoleConflict) { + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); AddAddress(0, kPublicAddrs[0]); AddAddress(1, kPublicAddrs[1]); - TestSignalRoleConflict(); + + // Creating channels with both channels role set to CONTROLLING. + SetIceRole(0, webrtc::ICEROLE_CONTROLLING); + SetIceRole(1, webrtc::ICEROLE_CONTROLLING); + + CreateChannels(env); + bool first_endpoint_has_lower_tiebreaker = + GetEndpoint(0)->allocator_->ice_tiebreaker() < + GetEndpoint(1)->allocator_->ice_tiebreaker(); + // Since both the channels initiated with controlling state, the channel with + // the lower tiebreaker should receive SignalRoleConflict. + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return GetRoleConflict(first_endpoint_has_lower_tiebreaker ? 0 : 1); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kShortTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_FALSE(GetRoleConflict(first_endpoint_has_lower_tiebreaker ? 1 : 0)); + + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kShortTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + + EXPECT_TRUE(ep1_ch1()->selected_connection() && + ep2_ch1()->selected_connection()); + + TestSendRecv(&clock); + DestroyChannels(); } -// Tests that the ice configs (protocol, tiebreaker and role) can be passed -// down to ports. +// Tests that the ice configs (protocol and role) can be passed down to ports. TEST_F(P2PTransportChannelTest, TestIceConfigWillPassDownToPort) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); AddAddress(0, kPublicAddrs[0]); AddAddress(1, kPublicAddrs[1]); // Give the first connection the higher tiebreaker so its role won't // change unless we tell it to. - SetIceRole(0, ICEROLE_CONTROLLING); - SetIceTiebreaker(0, kHighTiebreaker); - SetIceRole(1, ICEROLE_CONTROLLING); - SetIceTiebreaker(1, kLowTiebreaker); + SetIceRole(0, webrtc::ICEROLE_CONTROLLING); + SetIceRole(1, webrtc::ICEROLE_CONTROLLING); - CreateChannels(); + CreateChannels(env); - EXPECT_EQ_SIMULATED_WAIT(2u, ep1_ch1()->ports().size(), kShortTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1()->ports().size(); }, Eq(2u), + {.timeout = TimeDelta::Millis(kShortTimeout), .clock = &clock}), + webrtc::IsRtcOk()); const std::vector ports_before = ep1_ch1()->ports(); for (size_t i = 0; i < ports_before.size(); ++i) { - EXPECT_EQ(ICEROLE_CONTROLLING, ports_before[i]->GetIceRole()); - EXPECT_EQ(kHighTiebreaker, ports_before[i]->IceTiebreaker()); + EXPECT_EQ(webrtc::ICEROLE_CONTROLLING, ports_before[i]->GetIceRole()); } - ep1_ch1()->SetIceRole(ICEROLE_CONTROLLED); - ep1_ch1()->SetIceTiebreaker(kLowTiebreaker); + ep1_ch1()->SetIceRole(webrtc::ICEROLE_CONTROLLED); const std::vector ports_after = ep1_ch1()->ports(); for (size_t i = 0; i < ports_after.size(); ++i) { - EXPECT_EQ(ICEROLE_CONTROLLED, ports_before[i]->GetIceRole()); - // SetIceTiebreaker after ports have been created will fail. So expect the - // original value. - EXPECT_EQ(kHighTiebreaker, ports_before[i]->IceTiebreaker()); + EXPECT_EQ(webrtc::ICEROLE_CONTROLLED, ports_before[i]->GetIceRole()); } - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kShortTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kShortTimeout), .clock = &clock}), + webrtc::IsRtcOk()); EXPECT_TRUE(ep1_ch1()->selected_connection() && ep2_ch1()->selected_connection()); @@ -2043,46 +2085,55 @@ TEST_F(P2PTransportChannelTest, TestIceConfigWillPassDownToPort) { // Verify that we can set DSCP value and retrieve properly from P2PTC. TEST_F(P2PTransportChannelTest, TestDefaultDscpValue) { + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); AddAddress(0, kPublicAddrs[0]); AddAddress(1, kPublicAddrs[1]); - CreateChannels(); - EXPECT_EQ(rtc::DSCP_NO_CHANGE, GetEndpoint(0)->cd1_.ch_->DefaultDscpValue()); - EXPECT_EQ(rtc::DSCP_NO_CHANGE, GetEndpoint(1)->cd1_.ch_->DefaultDscpValue()); - GetEndpoint(0)->cd1_.ch_->SetOption(rtc::Socket::OPT_DSCP, rtc::DSCP_CS6); - GetEndpoint(1)->cd1_.ch_->SetOption(rtc::Socket::OPT_DSCP, rtc::DSCP_CS6); - EXPECT_EQ(rtc::DSCP_CS6, GetEndpoint(0)->cd1_.ch_->DefaultDscpValue()); - EXPECT_EQ(rtc::DSCP_CS6, GetEndpoint(1)->cd1_.ch_->DefaultDscpValue()); - GetEndpoint(0)->cd1_.ch_->SetOption(rtc::Socket::OPT_DSCP, rtc::DSCP_AF41); - GetEndpoint(1)->cd1_.ch_->SetOption(rtc::Socket::OPT_DSCP, rtc::DSCP_AF41); - EXPECT_EQ(rtc::DSCP_AF41, GetEndpoint(0)->cd1_.ch_->DefaultDscpValue()); - EXPECT_EQ(rtc::DSCP_AF41, GetEndpoint(1)->cd1_.ch_->DefaultDscpValue()); + CreateChannels(env); + EXPECT_EQ(DSCP_NO_CHANGE, GetEndpoint(0)->cd1_.ch_->DefaultDscpValue()); + EXPECT_EQ(DSCP_NO_CHANGE, GetEndpoint(1)->cd1_.ch_->DefaultDscpValue()); + GetEndpoint(0)->cd1_.ch_->SetOption(Socket::OPT_DSCP, DSCP_CS6); + GetEndpoint(1)->cd1_.ch_->SetOption(Socket::OPT_DSCP, DSCP_CS6); + EXPECT_EQ(DSCP_CS6, GetEndpoint(0)->cd1_.ch_->DefaultDscpValue()); + EXPECT_EQ(DSCP_CS6, GetEndpoint(1)->cd1_.ch_->DefaultDscpValue()); + GetEndpoint(0)->cd1_.ch_->SetOption(Socket::OPT_DSCP, DSCP_AF41); + GetEndpoint(1)->cd1_.ch_->SetOption(Socket::OPT_DSCP, DSCP_AF41); + EXPECT_EQ(DSCP_AF41, GetEndpoint(0)->cd1_.ch_->DefaultDscpValue()); + EXPECT_EQ(DSCP_AF41, GetEndpoint(1)->cd1_.ch_->DefaultDscpValue()); DestroyChannels(); } // Verify IPv6 connection is preferred over IPv4. TEST_F(P2PTransportChannelTest, TestIPv6Connections) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); AddAddress(0, kIPv6PublicAddrs[0]); AddAddress(0, kPublicAddrs[0]); AddAddress(1, kIPv6PublicAddrs[1]); AddAddress(1, kPublicAddrs[1]); - SetAllocationStepDelay(0, kMinimumStepDelay); - SetAllocationStepDelay(1, kMinimumStepDelay); + SetAllocationStepDelay(0, webrtc::kMinimumStepDelay); + SetAllocationStepDelay(1, webrtc::kMinimumStepDelay); // Enable IPv6 - SetAllocatorFlags( - 0, PORTALLOCATOR_ENABLE_IPV6 | PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); - SetAllocatorFlags( - 1, PORTALLOCATOR_ENABLE_IPV6 | PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); - - CreateChannels(); - - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), kIPv6PublicAddrs[0], - kIPv6PublicAddrs[1]), - kShortTimeout, clock); + SetAllocatorFlags(0, webrtc::PORTALLOCATOR_ENABLE_IPV6 | + webrtc::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); + SetAllocatorFlags(1, webrtc::PORTALLOCATOR_ENABLE_IPV6 | + webrtc::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); + + CreateChannels(env); + + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected( + ep1_ch1(), ep2_ch1(), kIPv6PublicAddrs[0], kIPv6PublicAddrs[1]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kShortTimeout), .clock = &clock}), + webrtc::IsRtcOk()); TestSendRecv(&clock); DestroyChannels(); @@ -2090,28 +2141,33 @@ TEST_F(P2PTransportChannelTest, TestIPv6Connections) { // Testing forceful TURN connections. TEST_F(P2PTransportChannelTest, TestForceTurn) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints( - NAT_PORT_RESTRICTED, NAT_SYMMETRIC, - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET, - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, NAT_PORT_RESTRICTED, NAT_SYMMETRIC, + webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET, + webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); set_force_relay(true); - SetAllocationStepDelay(0, kMinimumStepDelay); - SetAllocationStepDelay(1, kMinimumStepDelay); + SetAllocationStepDelay(0, webrtc::kMinimumStepDelay); + SetAllocationStepDelay(1, webrtc::kMinimumStepDelay); - CreateChannels(); + CreateChannels(env); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); EXPECT_TRUE(ep1_ch1()->selected_connection() && ep2_ch1()->selected_connection()); - EXPECT_EQ(RELAY_PORT_TYPE, RemoteCandidate(ep1_ch1())->type()); - EXPECT_EQ(RELAY_PORT_TYPE, LocalCandidate(ep1_ch1())->type()); - EXPECT_EQ(RELAY_PORT_TYPE, RemoteCandidate(ep2_ch1())->type()); - EXPECT_EQ(RELAY_PORT_TYPE, LocalCandidate(ep2_ch1())->type()); + EXPECT_TRUE(RemoteCandidate(ep1_ch1())->is_relay()); + EXPECT_TRUE(LocalCandidate(ep1_ch1())->is_relay()); + EXPECT_TRUE(RemoteCandidate(ep2_ch1())->is_relay()); + EXPECT_TRUE(LocalCandidate(ep2_ch1())->is_relay()); TestSendRecv(&clock); DestroyChannels(); @@ -2120,19 +2176,23 @@ TEST_F(P2PTransportChannelTest, TestForceTurn) { // Test that if continual gathering is set to true, ICE gathering state will // not change to "Complete", and vice versa. TEST_F(P2PTransportChannelTest, TestContinualGathering) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); - SetAllocationStepDelay(0, kDefaultStepDelay); - SetAllocationStepDelay(1, kDefaultStepDelay); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); + SetAllocationStepDelay(0, webrtc::kDefaultStepDelay); + SetAllocationStepDelay(1, webrtc::kDefaultStepDelay); IceConfig continual_gathering_config = - CreateIceConfig(1000, GATHER_CONTINUALLY); + CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); // By default, ep2 does not gather continually. IceConfig default_config; - CreateChannels(continual_gathering_config, default_config); + CreateChannels(env, continual_gathering_config, default_config); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); SIMULATED_WAIT( IceGatheringState::kIceGatheringComplete == ep1_ch1()->gathering_state(), kShortTimeout, clock); @@ -2148,9 +2208,10 @@ TEST_F(P2PTransportChannelTest, TestContinualGathering) { // Test that a connection succeeds when the P2PTransportChannel uses a pooled // PortAllocatorSession that has not yet finished gathering candidates. TEST_F(P2PTransportChannelTest, TestUsingPooledSessionBeforeDoneGathering) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); // First create a pooled session for each endpoint. auto& allocator_1 = GetEndpoint(0)->allocator_; auto& allocator_2 = GetEndpoint(1)->allocator_; @@ -2173,9 +2234,12 @@ TEST_F(P2PTransportChannelTest, TestUsingPooledSessionBeforeDoneGathering) { EXPECT_TRUE(pooled_session_2->ReadyPorts().empty()); EXPECT_TRUE(pooled_session_2->ReadyCandidates().empty()); // Now let the endpoints connect and try exchanging some data. - CreateChannels(); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kMediumTimeout, clock); + CreateChannels(env); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); TestSendRecv(&clock); // Make sure the P2PTransportChannels are actually using ports from the // pooled sessions. @@ -2191,9 +2255,10 @@ TEST_F(P2PTransportChannelTest, TestUsingPooledSessionBeforeDoneGathering) { // Test that a connection succeeds when the P2PTransportChannel uses a pooled // PortAllocatorSession that already finished gathering candidates. TEST_F(P2PTransportChannelTest, TestUsingPooledSessionAfterDoneGathering) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); // First create a pooled session for each endpoint. auto& allocator_1 = GetEndpoint(0)->allocator_; auto& allocator_2 = GetEndpoint(1)->allocator_; @@ -2212,13 +2277,22 @@ TEST_F(P2PTransportChannelTest, TestUsingPooledSessionAfterDoneGathering) { ASSERT_NE(nullptr, pooled_session_2); // Wait for the pooled sessions to finish gathering before the // P2PTransportChannels try to use them. - EXPECT_TRUE_SIMULATED_WAIT(pooled_session_1->CandidatesAllocationDone() && - pooled_session_2->CandidatesAllocationDone(), - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return pooled_session_1->CandidatesAllocationDone() && + pooled_session_2->CandidatesAllocationDone(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Now let the endpoints connect and try exchanging some data. - CreateChannels(); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kMediumTimeout, clock); + CreateChannels(env); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); TestSendRecv(&clock); // Make sure the P2PTransportChannels are actually using ports from the // pooled sessions. @@ -2238,29 +2312,36 @@ TEST_F(P2PTransportChannelTest, TestUsingPooledSessionAfterDoneGathering) { // class that operates on a single P2PTransportChannel, once an appropriate one // (which supports TURN servers and TURN candidate gathering) is available. TEST_F(P2PTransportChannelTest, TurnToTurnPresumedWritable) { - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); // Only configure one channel so we can control when the remote candidate // is added. - GetEndpoint(0)->cd1_.ch_ = CreateChannel(0, ICE_CANDIDATE_COMPONENT_DEFAULT, - kIceParams[0], kIceParams[1]); + GetEndpoint(0)->cd1_.ch_ = CreateChannel( + env, 0, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1]); IceConfig config; config.presume_writable_when_fully_relayed = true; ep1_ch1()->SetIceConfig(config); ep1_ch1()->MaybeStartGathering(); - EXPECT_EQ_WAIT(IceGatheringState::kIceGatheringComplete, - ep1_ch1()->gathering_state(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ep1_ch1()->gathering_state(); }, + Eq(IceGatheringState::kIceGatheringComplete), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Add two remote candidates; a host candidate (with higher priority) // and TURN candidate. ep1_ch1()->AddRemoteCandidate( - CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 100)); + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 100)); ep1_ch1()->AddRemoteCandidate( - CreateUdpCandidate(RELAY_PORT_TYPE, "2.2.2.2", 2, 0)); + CreateUdpCandidate(IceCandidateType::kRelay, "2.2.2.2", 2, 0)); // Expect that the TURN-TURN candidate pair will be prioritized since it's // "probably writable". - EXPECT_TRUE_WAIT(ep1_ch1()->selected_connection() != nullptr, kShortTimeout); - EXPECT_EQ(RELAY_PORT_TYPE, LocalCandidate(ep1_ch1())->type()); - EXPECT_EQ(RELAY_PORT_TYPE, RemoteCandidate(ep1_ch1())->type()); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kShortTimeout)}), + webrtc::IsRtcOk()); + EXPECT_TRUE(LocalCandidate(ep1_ch1())->is_relay()); + EXPECT_TRUE(RemoteCandidate(ep1_ch1())->is_relay()); // Also expect that the channel instantly indicates that it's writable since // it has a TURN-TURN pair. EXPECT_TRUE(ep1_ch1()->writable()); @@ -2275,7 +2356,8 @@ TEST_F(P2PTransportChannelTest, TurnToTurnPresumedWritable) { // Test that a TURN/peer reflexive candidate pair is also presumed writable. TEST_F(P2PTransportChannelTest, TurnToPrflxPresumedWritable) { - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; + const Environment env = CreateEnvironment(); // We need to add artificial network delay to verify that the connection // is presumed writable before it's actually writable. Without this delay @@ -2283,18 +2365,19 @@ TEST_F(P2PTransportChannelTest, TurnToPrflxPresumedWritable) { virtual_socket_server()->set_delay_mean(50); virtual_socket_server()->UpdateDelayDistribution(); - ConfigureEndpoints(NAT_SYMMETRIC, NAT_SYMMETRIC, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + ConfigureEndpoints(env, NAT_SYMMETRIC, NAT_SYMMETRIC, + webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); // We want the remote TURN candidate to show up as prflx. To do this we need // to configure the server to accept packets from an address we haven't // explicitly installed permission for. test_turn_server()->set_enable_permission_checks(false); IceConfig config; config.presume_writable_when_fully_relayed = true; - GetEndpoint(0)->cd1_.ch_ = CreateChannel(0, ICE_CANDIDATE_COMPONENT_DEFAULT, - kIceParams[0], kIceParams[1]); - GetEndpoint(1)->cd1_.ch_ = CreateChannel(1, ICE_CANDIDATE_COMPONENT_DEFAULT, - kIceParams[1], kIceParams[0]); + GetEndpoint(0)->cd1_.ch_ = CreateChannel( + env, 0, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1]); + GetEndpoint(1)->cd1_.ch_ = CreateChannel( + env, 1, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[1], kIceParams[0]); ep1_ch1()->SetIceConfig(config); ep2_ch1()->SetIceConfig(config); // Don't signal candidates from channel 2, so that channel 1 sees the TURN @@ -2304,18 +2387,26 @@ TEST_F(P2PTransportChannelTest, TurnToPrflxPresumedWritable) { ep2_ch1()->MaybeStartGathering(); // Wait for the TURN<->prflx connection. - EXPECT_TRUE_SIMULATED_WAIT(ep1_ch1()->receiving() && ep1_ch1()->writable(), - kShortTimeout, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1()->receiving() && ep1_ch1()->writable(); }, + IsTrue(), + {.timeout = TimeDelta::Millis(kShortTimeout), .clock = &fake_clock}), + webrtc::IsRtcOk()); ASSERT_NE(nullptr, ep1_ch1()->selected_connection()); - EXPECT_EQ(RELAY_PORT_TYPE, LocalCandidate(ep1_ch1())->type()); - EXPECT_EQ(PRFLX_PORT_TYPE, RemoteCandidate(ep1_ch1())->type()); + EXPECT_TRUE(LocalCandidate(ep1_ch1())->is_relay()); + EXPECT_TRUE(RemoteCandidate(ep1_ch1())->is_prflx()); // Make sure that at this point the connection is only presumed writable, // not fully writable. EXPECT_FALSE(ep1_ch1()->selected_connection()->writable()); // Now wait for it to actually become writable. - EXPECT_TRUE_SIMULATED_WAIT(ep1_ch1()->selected_connection()->writable(), - kShortTimeout, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection()->writable(); }, + IsTrue(), + {.timeout = TimeDelta::Millis(kShortTimeout), .clock = &fake_clock}), + webrtc::IsRtcOk()); // Explitly destroy channels, before fake clock is destroyed. DestroyChannels(); @@ -2324,36 +2415,44 @@ TEST_F(P2PTransportChannelTest, TurnToPrflxPresumedWritable) { // Test that a presumed-writable TURN<->TURN connection is preferred above an // unreliable connection (one that has failed to be pinged for some time). TEST_F(P2PTransportChannelTest, PresumedWritablePreferredOverUnreliable) { - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; + const Environment env = CreateEnvironment(); - ConfigureEndpoints(NAT_SYMMETRIC, NAT_SYMMETRIC, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + ConfigureEndpoints(env, NAT_SYMMETRIC, NAT_SYMMETRIC, + webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); IceConfig config; config.presume_writable_when_fully_relayed = true; - GetEndpoint(0)->cd1_.ch_ = CreateChannel(0, ICE_CANDIDATE_COMPONENT_DEFAULT, - kIceParams[0], kIceParams[1]); - GetEndpoint(1)->cd1_.ch_ = CreateChannel(1, ICE_CANDIDATE_COMPONENT_DEFAULT, - kIceParams[1], kIceParams[0]); + GetEndpoint(0)->cd1_.ch_ = CreateChannel( + env, 0, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1]); + GetEndpoint(1)->cd1_.ch_ = CreateChannel( + env, 1, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[1], kIceParams[0]); ep1_ch1()->SetIceConfig(config); ep2_ch1()->SetIceConfig(config); ep1_ch1()->MaybeStartGathering(); ep2_ch1()->MaybeStartGathering(); // Wait for initial connection as usual. - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kShortTimeout, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kShortTimeout), .clock = &fake_clock}), + webrtc::IsRtcOk()); const Connection* old_selected_connection = ep1_ch1()->selected_connection(); // Destroy the second channel and wait for the current connection on the // first channel to become "unreliable", making it no longer writable. GetEndpoint(1)->cd1_.ch_.reset(); - EXPECT_TRUE_SIMULATED_WAIT(!ep1_ch1()->writable(), kDefaultTimeout, - fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return !ep1_ch1()->writable(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_NE(nullptr, ep1_ch1()->selected_connection()); // Add a remote TURN candidate. The first channel should still have a TURN // port available to make a TURN<->TURN pair that's presumed writable. ep1_ch1()->AddRemoteCandidate( - CreateUdpCandidate(RELAY_PORT_TYPE, "2.2.2.2", 2, 0)); - EXPECT_EQ(RELAY_PORT_TYPE, LocalCandidate(ep1_ch1())->type()); - EXPECT_EQ(RELAY_PORT_TYPE, RemoteCandidate(ep1_ch1())->type()); + CreateUdpCandidate(IceCandidateType::kRelay, "2.2.2.2", 2, 0)); + EXPECT_TRUE(LocalCandidate(ep1_ch1())->is_relay()); + EXPECT_TRUE(RemoteCandidate(ep1_ch1())->is_relay()); EXPECT_TRUE(ep1_ch1()->writable()); EXPECT_TRUE(GetEndpoint(0)->ready_to_send_); EXPECT_NE(old_selected_connection, ep1_ch1()->selected_connection()); @@ -2364,24 +2463,31 @@ TEST_F(P2PTransportChannelTest, PresumedWritablePreferredOverUnreliable) { // Ensure that "SignalReadyToSend" is fired as expected with a "presumed // writable" connection. Previously this did not work. TEST_F(P2PTransportChannelTest, SignalReadyToSendWithPresumedWritable) { - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); // Only test one endpoint, so we can ensure the connection doesn't receive a // binding response and advance beyond being "presumed" writable. - GetEndpoint(0)->cd1_.ch_ = CreateChannel(0, ICE_CANDIDATE_COMPONENT_DEFAULT, - kIceParams[0], kIceParams[1]); + GetEndpoint(0)->cd1_.ch_ = CreateChannel( + env, 0, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1]); IceConfig config; config.presume_writable_when_fully_relayed = true; ep1_ch1()->SetIceConfig(config); ep1_ch1()->MaybeStartGathering(); - EXPECT_EQ_WAIT(IceGatheringState::kIceGatheringComplete, - ep1_ch1()->gathering_state(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ep1_ch1()->gathering_state(); }, + Eq(IceGatheringState::kIceGatheringComplete), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); ep1_ch1()->AddRemoteCandidate( - CreateUdpCandidate(RELAY_PORT_TYPE, "1.1.1.1", 1, 0)); + CreateUdpCandidate(IceCandidateType::kRelay, "1.1.1.1", 1, 0)); // Sanity checking the type of the connection. - EXPECT_TRUE_WAIT(ep1_ch1()->selected_connection() != nullptr, kShortTimeout); - EXPECT_EQ(RELAY_PORT_TYPE, LocalCandidate(ep1_ch1())->type()); - EXPECT_EQ(RELAY_PORT_TYPE, RemoteCandidate(ep1_ch1())->type()); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kShortTimeout)}), + webrtc::IsRtcOk()); + EXPECT_TRUE(LocalCandidate(ep1_ch1())->is_relay()); + EXPECT_TRUE(RemoteCandidate(ep1_ch1())->is_relay()); // Tell the socket server to block packets (returning EWOULDBLOCK). virtual_socket_server()->SetSendingBlocked(true); @@ -2403,41 +2509,46 @@ TEST_F(P2PTransportChannelTest, SignalReadyToSendWithPresumedWritable) { // crbug.com/webrtc/9034. TEST_F(P2PTransportChannelTest, TurnToPrflxSelectedAfterResolvingIceControllingRoleConflict) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); // Gather only relay candidates. - ConfigureEndpoints(NAT_SYMMETRIC, NAT_SYMMETRIC, - kDefaultPortAllocatorFlags | PORTALLOCATOR_DISABLE_UDP | - PORTALLOCATOR_DISABLE_STUN | PORTALLOCATOR_DISABLE_TCP, - kDefaultPortAllocatorFlags | PORTALLOCATOR_DISABLE_UDP | - PORTALLOCATOR_DISABLE_STUN | - PORTALLOCATOR_DISABLE_TCP); + ConfigureEndpoints( + env, NAT_SYMMETRIC, NAT_SYMMETRIC, + webrtc::kDefaultPortAllocatorFlags | webrtc::PORTALLOCATOR_DISABLE_UDP | + webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_TCP, + webrtc::kDefaultPortAllocatorFlags | webrtc::PORTALLOCATOR_DISABLE_UDP | + webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_TCP); // With conflicting ICE roles, endpoint 1 has the higher tie breaker and will // send a binding error response. - SetIceRole(0, ICEROLE_CONTROLLING); - SetIceTiebreaker(0, kHighTiebreaker); - SetIceRole(1, ICEROLE_CONTROLLING); - SetIceTiebreaker(1, kLowTiebreaker); + SetIceRole(0, webrtc::ICEROLE_CONTROLLING); + SetIceRole(1, webrtc::ICEROLE_CONTROLLING); // We want the remote TURN candidate to show up as prflx. To do this we need // to configure the server to accept packets from an address we haven't // explicitly installed permission for. test_turn_server()->set_enable_permission_checks(false); - GetEndpoint(0)->cd1_.ch_ = CreateChannel(0, ICE_CANDIDATE_COMPONENT_DEFAULT, - kIceParams[0], kIceParams[1]); - GetEndpoint(1)->cd1_.ch_ = CreateChannel(1, ICE_CANDIDATE_COMPONENT_DEFAULT, - kIceParams[1], kIceParams[0]); + GetEndpoint(0)->cd1_.ch_ = CreateChannel( + env, 0, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1]); + GetEndpoint(1)->cd1_.ch_ = CreateChannel( + env, 1, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[1], kIceParams[0]); // Don't signal candidates from channel 2, so that channel 1 sees the TURN // candidate as peer reflexive. PauseCandidates(1); ep1_ch1()->MaybeStartGathering(); ep2_ch1()->MaybeStartGathering(); - EXPECT_TRUE_SIMULATED_WAIT(ep1_ch1()->receiving() && ep1_ch1()->writable(), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1()->receiving() && ep1_ch1()->writable(); }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); ASSERT_NE(nullptr, ep1_ch1()->selected_connection()); - EXPECT_EQ(RELAY_PORT_TYPE, LocalCandidate(ep1_ch1())->type()); - EXPECT_EQ(PRFLX_PORT_TYPE, RemoteCandidate(ep1_ch1())->type()); + EXPECT_TRUE(LocalCandidate(ep1_ch1())->is_relay()); + EXPECT_TRUE(RemoteCandidate(ep1_ch1())->is_prflx()); DestroyChannels(); } @@ -2446,12 +2557,12 @@ TEST_F(P2PTransportChannelTest, // acknowledgement in the connectivity check from the remote peer. TEST_F(P2PTransportChannelTest, CanConnectWithPiggybackCheckAcknowledgementWhenCheckResponseBlocked) { - webrtc::test::ScopedKeyValueConfig field_trials( - field_trials_, "WebRTC-PiggybackIceCheckAcknowledgement/Enabled/"); - rtc::ScopedFakeClock clock; - ConfigureEndpoints(OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(FieldTrials::CreateNoGlobal( + "WebRTC-PiggybackIceCheckAcknowledgement/Enabled/")); + ConfigureEndpoints(env, OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); IceConfig ep1_config; - IceConfig ep2_config = CreateIceConfig(1000, GATHER_CONTINUALLY); + IceConfig ep2_config = CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); // Let ep2 be tolerable of the loss of connectivity checks, so that it keeps // sending pings even after ep1 becomes unwritable as we configure the // firewall below. @@ -2460,20 +2571,27 @@ TEST_F(P2PTransportChannelTest, ep2_config.ice_unwritable_min_checks = 30; ep2_config.ice_inactive_timeout = 60 * 1000; - CreateChannels(ep1_config, ep2_config); + CreateChannels(env, ep1_config, ep2_config); // Wait until both sides become writable for the first time. - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Block the ingress traffic to ep1 so that there is no check response from // ep2. ASSERT_NE(nullptr, LocalCandidate(ep1_ch1())); - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_IN, + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_IN, LocalCandidate(ep1_ch1())->address()); // Wait until ep1 becomes unwritable. At the same time ep2 should be still // fine so that it will keep sending pings. - EXPECT_TRUE_SIMULATED_WAIT(ep1_ch1() != nullptr && !ep1_ch1()->writable(), - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1() != nullptr && !ep1_ch1()->writable(); }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); EXPECT_TRUE(ep2_ch1() != nullptr && ep2_ch1()->writable()); // Now let the pings from ep2 to flow but block any pings from ep1, so that // ep1 can only become writable again after receiving an incoming ping from @@ -2481,10 +2599,14 @@ TEST_F(P2PTransportChannelTest, // though that ep1 should have stopped sending pings after becoming unwritable // in the current design. fw()->ClearRules(); - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_OUT, + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_OUT, LocalCandidate(ep1_ch1())->address()); - EXPECT_TRUE_SIMULATED_WAIT(ep1_ch1() != nullptr && ep1_ch1()->writable(), - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1() != nullptr && ep1_ch1()->writable(); }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); DestroyChannels(); } @@ -2493,17 +2615,21 @@ TEST_F(P2PTransportChannelTest, // address of the outermost NAT. class P2PTransportChannelSameNatTest : public P2PTransportChannelTestBase { protected: - void ConfigureEndpoints(Config nat_type, Config config1, Config config2) { + void ConfigureEndpoints(const Environment& env, + Config nat_type, + Config config1, + Config config2) { RTC_CHECK_GE(nat_type, NAT_FULL_CONE); RTC_CHECK_LE(nat_type, NAT_SYMMETRIC); - rtc::NATSocketServer::Translator* outer_nat = nat()->AddTranslator( - kPublicAddrs[0], kNatAddrs[0], - static_cast(nat_type - NAT_FULL_CONE)); + CreatePortAllocators(env); + NATSocketServer::Translator* outer_nat = + nat()->AddTranslator(kPublicAddrs[0], kNatAddrs[0], + static_cast(nat_type - NAT_FULL_CONE)); ConfigureEndpoint(outer_nat, 0, config1); ConfigureEndpoint(outer_nat, 1, config2); set_remote_ice_parameter_source(FROM_SETICEPARAMETERS); } - void ConfigureEndpoint(rtc::NATSocketServer::Translator* nat, + void ConfigureEndpoint(NATSocketServer::Translator* nat, int endpoint, Config config) { RTC_CHECK(config <= NAT_SYMMETRIC); @@ -2513,16 +2639,18 @@ class P2PTransportChannelSameNatTest : public P2PTransportChannelTestBase { } else { AddAddress(endpoint, kCascadedPrivateAddrs[endpoint]); nat->AddTranslator(kPrivateAddrs[endpoint], kCascadedNatAddrs[endpoint], - static_cast(config - NAT_FULL_CONE)) + static_cast(config - NAT_FULL_CONE)) ->AddClient(kCascadedPrivateAddrs[endpoint]); } } }; TEST_F(P2PTransportChannelSameNatTest, TestConesBehindSameCone) { - ConfigureEndpoints(NAT_FULL_CONE, NAT_FULL_CONE, NAT_FULL_CONE); - Test( - P2PTransportChannelTestBase::Result("prflx", "udp", "stun", "udp", 1000)); + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, NAT_FULL_CONE, NAT_FULL_CONE, NAT_FULL_CONE); + Test(env, P2PTransportChannelTestBase::Result(IceCandidateType::kPrflx, "udp", + IceCandidateType::kSrflx, "udp", + 1000)); } // Test what happens when we have multiple available pathways. @@ -2563,7 +2691,7 @@ class P2PTransportChannelMultihomedTest : public P2PTransportChannelTest { } Connection* GetBestConnection(P2PTransportChannel* channel) { - rtc::ArrayView connections = channel->connections(); + ArrayView connections = channel->connections(); auto it = absl::c_find(connections, channel->selected_connection()); if (it == connections.end()) { return nullptr; @@ -2572,7 +2700,7 @@ class P2PTransportChannelMultihomedTest : public P2PTransportChannelTest { } Connection* GetBackupConnection(P2PTransportChannel* channel) { - rtc::ArrayView connections = channel->connections(); + ArrayView connections = channel->connections(); auto it = absl::c_find_if_not(connections, [channel](Connection* conn) { return conn == channel->selected_connection(); }); @@ -2585,7 +2713,7 @@ class P2PTransportChannelMultihomedTest : public P2PTransportChannelTest { void DestroyAllButBestConnection(P2PTransportChannel* channel) { const Connection* selected_connection = channel->selected_connection(); // Copy the list of connections since the original will be modified. - rtc::ArrayView view = channel->connections(); + ArrayView view = channel->connections(); std::vector connections(view.begin(), view.end()); for (Connection* conn : connections) { if (conn != selected_connection) @@ -2596,50 +2724,68 @@ class P2PTransportChannelMultihomedTest : public P2PTransportChannelTest { // Test that we can establish connectivity when both peers are multihomed. TEST_F(P2PTransportChannelMultihomedTest, TestBasic) { + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); AddAddress(0, kPublicAddrs[0]); AddAddress(0, kAlternateAddrs[0]); AddAddress(1, kPublicAddrs[1]); AddAddress(1, kAlternateAddrs[1]); - Test(kLocalUdpToLocalUdp); + Test(env, kLocalUdpToLocalUdp); } // Test that we can quickly switch links if an interface goes down. // The controlled side has two interfaces and one will die. TEST_F(P2PTransportChannelMultihomedTest, TestFailoverControlledSide) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); AddAddress(0, kPublicAddrs[0]); // Simulate failing over from Wi-Fi to cell interface. - AddAddress(1, kPublicAddrs[1], "eth0", rtc::ADAPTER_TYPE_WIFI); - AddAddress(1, kAlternateAddrs[1], "wlan0", rtc::ADAPTER_TYPE_CELLULAR); + AddAddress(1, kPublicAddrs[1], "eth0", ADAPTER_TYPE_WIFI); + AddAddress(1, kAlternateAddrs[1], "wlan0", ADAPTER_TYPE_CELLULAR); // Use only local ports for simplicity. SetAllocatorFlags(0, kOnlyLocalPorts); SetAllocatorFlags(1, kOnlyLocalPorts); // Make the receiving timeout shorter for testing. - IceConfig config = CreateIceConfig(1000, GATHER_ONCE); + IceConfig config = CreateIceConfig(1000, webrtc::GATHER_ONCE); // Create channels and let them go writable, as usual. - CreateChannels(config, config); - - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), kPublicAddrs[0], - kPublicAddrs[1]), - kMediumTimeout, clock); + CreateChannels(env, config, config); + + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected( + ep1_ch1(), ep2_ch1(), kPublicAddrs[0], kPublicAddrs[1]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Blackhole any traffic to or from the public addrs. RTC_LOG(LS_INFO) << "Failing over..."; - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, kPublicAddrs[1]); + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_ANY, kPublicAddrs[1]); // The selected connections may switch, so keep references to them. const Connection* selected_connection1 = ep1_ch1()->selected_connection(); // We should detect loss of receiving within 1 second or so. - EXPECT_TRUE_SIMULATED_WAIT(!selected_connection1->receiving(), kMediumTimeout, - clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return !selected_connection1->receiving(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // We should switch over to use the alternate addr on both sides // when we are not receiving. - EXPECT_TRUE_SIMULATED_WAIT(ep1_ch1()->selected_connection()->receiving() && - ep2_ch1()->selected_connection()->receiving(), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1()->selected_connection()->receiving() && + ep2_ch1()->selected_connection()->receiving(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); EXPECT_TRUE(LocalCandidate(ep1_ch1())->address().EqualIPs(kPublicAddrs[0])); EXPECT_TRUE( RemoteCandidate(ep1_ch1())->address().EqualIPs(kAlternateAddrs[1])); @@ -2652,10 +2798,12 @@ TEST_F(P2PTransportChannelMultihomedTest, TestFailoverControlledSide) { // Test that we can quickly switch links if an interface goes down. // The controlling side has two interfaces and one will die. TEST_F(P2PTransportChannelMultihomedTest, TestFailoverControllingSide) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); // Simulate failing over from Wi-Fi to cell interface. - AddAddress(0, kPublicAddrs[0], "eth0", rtc::ADAPTER_TYPE_WIFI); - AddAddress(0, kAlternateAddrs[0], "wlan0", rtc::ADAPTER_TYPE_CELLULAR); + AddAddress(0, kPublicAddrs[0], "eth0", ADAPTER_TYPE_WIFI); + AddAddress(0, kAlternateAddrs[0], "wlan0", ADAPTER_TYPE_CELLULAR); AddAddress(1, kPublicAddrs[1]); // Use only local ports for simplicity. @@ -2663,25 +2811,35 @@ TEST_F(P2PTransportChannelMultihomedTest, TestFailoverControllingSide) { SetAllocatorFlags(1, kOnlyLocalPorts); // Make the receiving timeout shorter for testing. - IceConfig config = CreateIceConfig(1000, GATHER_ONCE); + IceConfig config = CreateIceConfig(1000, webrtc::GATHER_ONCE); // Create channels and let them go writable, as usual. - CreateChannels(config, config); - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), kPublicAddrs[0], - kPublicAddrs[1]), - kMediumTimeout, clock); + CreateChannels(env, config, config); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected( + ep1_ch1(), ep2_ch1(), kPublicAddrs[0], kPublicAddrs[1]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Blackhole any traffic to or from the public addrs. RTC_LOG(LS_INFO) << "Failing over..."; - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, kPublicAddrs[0]); + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_ANY, kPublicAddrs[0]); // We should detect loss of receiving within 1 second or so. // We should switch over to use the alternate addr on both sides // when we are not receiving. - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), kAlternateAddrs[0], - kPublicAddrs[1]), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected( + ep1_ch1(), ep2_ch1(), kAlternateAddrs[0], kPublicAddrs[1]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); DestroyChannels(); } @@ -2689,33 +2847,36 @@ TEST_F(P2PTransportChannelMultihomedTest, TestFailoverControllingSide) { // Tests that we can quickly switch links if an interface goes down when // there are many connections. TEST_F(P2PTransportChannelMultihomedTest, TestFailoverWithManyConnections) { - rtc::ScopedFakeClock clock; - test_turn_server()->AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); + test_turn_server()->AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); RelayServerConfig turn_server; turn_server.credentials = kRelayCredentials; - turn_server.ports.push_back(ProtocolAddress(kTurnTcpIntAddr, PROTO_TCP)); + turn_server.ports.push_back( + ProtocolAddress(kTurnTcpIntAddr, webrtc::PROTO_TCP)); GetAllocator(0)->AddTurnServerForTesting(turn_server); GetAllocator(1)->AddTurnServerForTesting(turn_server); // Enable IPv6 - SetAllocatorFlags( - 0, PORTALLOCATOR_ENABLE_IPV6 | PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); - SetAllocatorFlags( - 1, PORTALLOCATOR_ENABLE_IPV6 | PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); - SetAllocationStepDelay(0, kMinimumStepDelay); - SetAllocationStepDelay(1, kMinimumStepDelay); + SetAllocatorFlags(0, webrtc::PORTALLOCATOR_ENABLE_IPV6 | + webrtc::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); + SetAllocatorFlags(1, webrtc::PORTALLOCATOR_ENABLE_IPV6 | + webrtc::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); + SetAllocationStepDelay(0, webrtc::kMinimumStepDelay); + SetAllocationStepDelay(1, webrtc::kMinimumStepDelay); auto& wifi = kPublicAddrs; auto& cellular = kAlternateAddrs; auto& wifiIpv6 = kIPv6PublicAddrs; auto& cellularIpv6 = kIPv6AlternateAddrs; - AddAddress(0, wifi[0], "wifi0", rtc::ADAPTER_TYPE_WIFI); - AddAddress(0, wifiIpv6[0], "wifi0", rtc::ADAPTER_TYPE_WIFI); - AddAddress(0, cellular[0], "cellular0", rtc::ADAPTER_TYPE_CELLULAR); - AddAddress(0, cellularIpv6[0], "cellular0", rtc::ADAPTER_TYPE_CELLULAR); - AddAddress(1, wifi[1], "wifi1", rtc::ADAPTER_TYPE_WIFI); - AddAddress(1, wifiIpv6[1], "wifi1", rtc::ADAPTER_TYPE_WIFI); - AddAddress(1, cellular[1], "cellular1", rtc::ADAPTER_TYPE_CELLULAR); - AddAddress(1, cellularIpv6[1], "cellular1", rtc::ADAPTER_TYPE_CELLULAR); + AddAddress(0, wifi[0], "wifi0", ADAPTER_TYPE_WIFI); + AddAddress(0, wifiIpv6[0], "wifi0", ADAPTER_TYPE_WIFI); + AddAddress(0, cellular[0], "cellular0", ADAPTER_TYPE_CELLULAR); + AddAddress(0, cellularIpv6[0], "cellular0", ADAPTER_TYPE_CELLULAR); + AddAddress(1, wifi[1], "wifi1", ADAPTER_TYPE_WIFI); + AddAddress(1, wifiIpv6[1], "wifi1", ADAPTER_TYPE_WIFI); + AddAddress(1, cellular[1], "cellular1", ADAPTER_TYPE_CELLULAR); + AddAddress(1, cellularIpv6[1], "cellular1", ADAPTER_TYPE_CELLULAR); // Set smaller delay on the TCP TURN server so that TCP TURN candidates // will be created in time. @@ -2725,25 +2886,36 @@ TEST_F(P2PTransportChannelMultihomedTest, TestFailoverWithManyConnections) { virtual_socket_server()->UpdateDelayDistribution(); // Make the receiving timeout shorter for testing. - IceConfig config = CreateIceConfig(1000, GATHER_CONTINUALLY); + IceConfig config = CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); // Create channels and let them go writable, as usual. - CreateChannels(config, config, true /* ice_renomination */); - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), wifiIpv6[0], - wifiIpv6[1]), - kMediumTimeout, clock); + CreateChannels(env, config, config, true /* ice_renomination */); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), + wifiIpv6[0], wifiIpv6[1]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Blackhole any traffic to or from the wifi on endpoint 1. RTC_LOG(LS_INFO) << "Failing over..."; - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, wifi[0]); - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, wifiIpv6[0]); + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_ANY, wifi[0]); + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_ANY, wifiIpv6[0]); // The selected connections may switch, so keep references to them. const Connection* selected_connection1 = ep1_ch1()->selected_connection(); const Connection* selected_connection2 = ep2_ch1()->selected_connection(); - EXPECT_TRUE_SIMULATED_WAIT( - !selected_connection1->receiving() && !selected_connection2->receiving(), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return !selected_connection1->receiving() && + !selected_connection2->receiving(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Per-network best connections will be pinged at relatively higher rate when // the selected connection becomes not receiving. @@ -2752,9 +2924,12 @@ TEST_F(P2PTransportChannelMultihomedTest, TestFailoverWithManyConnections) { ASSERT_NE(nullptr, per_network_best_connection1); int64_t last_ping_sent1 = per_network_best_connection1->last_ping_sent(); int num_pings_sent1 = per_network_best_connection1->num_pings_sent(); - EXPECT_TRUE_SIMULATED_WAIT( - num_pings_sent1 < per_network_best_connection1->num_pings_sent(), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return per_network_best_connection1->num_pings_sent(); }, + Gt(num_pings_sent1), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); ASSERT_GT(per_network_best_connection1->num_pings_sent() - num_pings_sent1, 0); int64_t ping_interval1 = @@ -2767,10 +2942,15 @@ TEST_F(P2PTransportChannelMultihomedTest, TestFailoverWithManyConnections) { // It should switch over to use the cellular IPv6 addr on endpoint 1 before // it timed out on writing. - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), cellularIpv6[0], - wifiIpv6[1]), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), + cellularIpv6[0], wifiIpv6[1]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); DestroyChannels(); } @@ -2779,10 +2959,12 @@ TEST_F(P2PTransportChannelMultihomedTest, TestFailoverWithManyConnections) { // the nomination of the selected connection on the controlled side will // increase. TEST_F(P2PTransportChannelMultihomedTest, TestIceRenomination) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); // Simulate failing over from Wi-Fi to cell interface. - AddAddress(0, kPublicAddrs[0], "eth0", rtc::ADAPTER_TYPE_WIFI); - AddAddress(0, kAlternateAddrs[0], "wlan0", rtc::ADAPTER_TYPE_CELLULAR); + AddAddress(0, kPublicAddrs[0], "eth0", ADAPTER_TYPE_WIFI); + AddAddress(0, kAlternateAddrs[0], "wlan0", ADAPTER_TYPE_CELLULAR); AddAddress(1, kPublicAddrs[1]); // Use only local ports for simplicity. @@ -2792,15 +2974,23 @@ TEST_F(P2PTransportChannelMultihomedTest, TestIceRenomination) { // We want it to set the remote ICE parameters when creating channels. set_remote_ice_parameter_source(FROM_SETICEPARAMETERS); // Make the receiving timeout shorter for testing. - IceConfig config = CreateIceConfig(1000, GATHER_ONCE); + IceConfig config = CreateIceConfig(1000, webrtc::GATHER_ONCE); // Create channels with ICE renomination and let them go writable as usual. - CreateChannels(config, config, true); - ASSERT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kMediumTimeout, clock); - EXPECT_TRUE_SIMULATED_WAIT( - ep2_ch1()->selected_connection()->remote_nomination() > 0 && - ep1_ch1()->selected_connection()->acked_nomination() > 0, - kDefaultTimeout, clock); + CreateChannels(env, config, config, true); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return ep2_ch1()->selected_connection()->remote_nomination() > 0 && + ep1_ch1()->selected_connection()->acked_nomination() > 0; + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); const Connection* selected_connection1 = ep1_ch1()->selected_connection(); Connection* selected_connection2 = const_cast(ep2_ch1()->selected_connection()); @@ -2813,18 +3003,23 @@ TEST_F(P2PTransportChannelMultihomedTest, TestIceRenomination) { // Blackhole any traffic to or from the public addrs. RTC_LOG(LS_INFO) << "Failing over..."; - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, kPublicAddrs[0]); + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_ANY, kPublicAddrs[0]); // The selected connection on the controlling side should switch. - EXPECT_TRUE_SIMULATED_WAIT( - ep1_ch1()->selected_connection() != selected_connection1, kMediumTimeout, - clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, + Ne(selected_connection1), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // The connection on the controlled side should be nominated again // and have an increased nomination. - EXPECT_TRUE_SIMULATED_WAIT( - ep2_ch1()->selected_connection()->remote_nomination() > - remote_nomination2, - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep2_ch1()->selected_connection()->remote_nomination(); }, + Gt(remote_nomination2), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); DestroyChannels(); } @@ -2836,48 +3031,64 @@ TEST_F(P2PTransportChannelMultihomedTest, TestIceRenomination) { // TestFailoverControlledSide and TestFailoverControllingSide. TEST_F(P2PTransportChannelMultihomedTest, TestConnectionSwitchDampeningControlledSide) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); AddAddress(0, kPublicAddrs[0]); // Simulate failing over from Wi-Fi to cell interface. - AddAddress(1, kPublicAddrs[1], "eth0", rtc::ADAPTER_TYPE_WIFI); - AddAddress(1, kAlternateAddrs[1], "wlan0", rtc::ADAPTER_TYPE_CELLULAR); + AddAddress(1, kPublicAddrs[1], "eth0", ADAPTER_TYPE_WIFI); + AddAddress(1, kAlternateAddrs[1], "wlan0", ADAPTER_TYPE_CELLULAR); // Use only local ports for simplicity. SetAllocatorFlags(0, kOnlyLocalPorts); SetAllocatorFlags(1, kOnlyLocalPorts); // Create channels and let them go writable, as usual. - CreateChannels(); - - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), kPublicAddrs[0], - kPublicAddrs[1]), - kMediumTimeout, clock); + CreateChannels(env); + + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected( + ep1_ch1(), ep2_ch1(), kPublicAddrs[0], kPublicAddrs[1]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Make the receiving timeout shorter for testing. - IceConfig config = CreateIceConfig(1000, GATHER_ONCE); + IceConfig config = CreateIceConfig(1000, webrtc::GATHER_ONCE); ep1_ch1()->SetIceConfig(config); ep2_ch1()->SetIceConfig(config); reset_selected_candidate_pair_switches(); // Blackhole any traffic to or from the public addrs. RTC_LOG(LS_INFO) << "Failing over..."; - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, kPublicAddrs[1]); + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_ANY, kPublicAddrs[1]); // The selected connections may switch, so keep references to them. const Connection* selected_connection1 = ep1_ch1()->selected_connection(); // We should detect loss of receiving within 1 second or so. - EXPECT_TRUE_SIMULATED_WAIT(!selected_connection1->receiving(), kMediumTimeout, - clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return !selected_connection1->receiving(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // After a short while, the link recovers itself. SIMULATED_WAIT(false, 10, clock); fw()->ClearRules(); // We should remain on the public address on both sides and no connection // switches should have happened. - EXPECT_TRUE_SIMULATED_WAIT(ep1_ch1()->selected_connection()->receiving() && - ep2_ch1()->selected_connection()->receiving(), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1()->selected_connection()->receiving() && + ep2_ch1()->selected_connection()->receiving(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); EXPECT_TRUE(RemoteCandidate(ep1_ch1())->address().EqualIPs(kPublicAddrs[1])); EXPECT_TRUE(LocalCandidate(ep2_ch1())->address().EqualIPs(kPublicAddrs[1])); EXPECT_EQ(0, reset_selected_candidate_pair_switches()); @@ -2889,10 +3100,12 @@ TEST_F(P2PTransportChannelMultihomedTest, // the selected connection will not switch. TEST_F(P2PTransportChannelMultihomedTest, TestConnectionSwitchDampeningControllingSide) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); // Simulate failing over from Wi-Fi to cell interface. - AddAddress(0, kPublicAddrs[0], "eth0", rtc::ADAPTER_TYPE_WIFI); - AddAddress(0, kAlternateAddrs[0], "wlan0", rtc::ADAPTER_TYPE_CELLULAR); + AddAddress(0, kPublicAddrs[0], "eth0", ADAPTER_TYPE_WIFI); + AddAddress(0, kAlternateAddrs[0], "wlan0", ADAPTER_TYPE_CELLULAR); AddAddress(1, kPublicAddrs[1]); // Use only local ports for simplicity. @@ -2900,36 +3113,49 @@ TEST_F(P2PTransportChannelMultihomedTest, SetAllocatorFlags(1, kOnlyLocalPorts); // Create channels and let them go writable, as usual. - CreateChannels(); - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), kPublicAddrs[0], - kPublicAddrs[1]), - kMediumTimeout, clock); + CreateChannels(env); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected( + ep1_ch1(), ep2_ch1(), kPublicAddrs[0], kPublicAddrs[1]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Make the receiving timeout shorter for testing. - IceConfig config = CreateIceConfig(1000, GATHER_ONCE); + IceConfig config = CreateIceConfig(1000, webrtc::GATHER_ONCE); ep1_ch1()->SetIceConfig(config); ep2_ch1()->SetIceConfig(config); reset_selected_candidate_pair_switches(); // Blackhole any traffic to or from the public addrs. RTC_LOG(LS_INFO) << "Failing over..."; - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, kPublicAddrs[0]); + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_ANY, kPublicAddrs[0]); // The selected connections may switch, so keep references to them. const Connection* selected_connection1 = ep1_ch1()->selected_connection(); // We should detect loss of receiving within 1 second or so. - EXPECT_TRUE_SIMULATED_WAIT(!selected_connection1->receiving(), kMediumTimeout, - clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return !selected_connection1->receiving(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // The link recovers after a short while. SIMULATED_WAIT(false, 10, clock); fw()->ClearRules(); // We should not switch to the alternate addr on both sides because of the // dampening. - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), kPublicAddrs[0], - kPublicAddrs[1]), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected( + ep1_ch1(), ep2_ch1(), kPublicAddrs[0], kPublicAddrs[1]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); EXPECT_EQ(0, reset_selected_candidate_pair_switches()); DestroyChannels(); } @@ -2937,43 +3163,54 @@ TEST_F(P2PTransportChannelMultihomedTest, // Tests that if the remote side's network failed, it won't cause the local // side to switch connections and networks. TEST_F(P2PTransportChannelMultihomedTest, TestRemoteFailover) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); // The interface names are chosen so that `cellular` would have higher // candidate priority and higher cost. auto& wifi = kPublicAddrs; auto& cellular = kAlternateAddrs; - AddAddress(0, wifi[0], "wifi0", rtc::ADAPTER_TYPE_WIFI); - AddAddress(0, cellular[0], "cellular0", rtc::ADAPTER_TYPE_CELLULAR); - AddAddress(1, wifi[1], "wifi0", rtc::ADAPTER_TYPE_WIFI); + AddAddress(0, wifi[0], "wifi0", ADAPTER_TYPE_WIFI); + AddAddress(0, cellular[0], "cellular0", ADAPTER_TYPE_CELLULAR); + AddAddress(1, wifi[1], "wifi0", ADAPTER_TYPE_WIFI); // Use only local ports for simplicity. SetAllocatorFlags(0, kOnlyLocalPorts); SetAllocatorFlags(1, kOnlyLocalPorts); // Create channels and let them go writable, as usual. - CreateChannels(); + CreateChannels(env); // Make the receiving timeout shorter for testing. // Set the backup connection ping interval to 25s. - IceConfig config = CreateIceConfig(1000, GATHER_ONCE, 25000); + IceConfig config = CreateIceConfig(1000, webrtc::GATHER_ONCE, 25000); // Ping the best connection more frequently since we don't have traffic. config.stable_writable_connection_ping_interval = 900; ep1_ch1()->SetIceConfig(config); ep2_ch1()->SetIceConfig(config); // Need to wait to make sure the connections on both networks are writable. - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), wifi[0], wifi[1]), - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), wifi[0], + wifi[1]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); Connection* backup_conn = GetConnectionWithLocalAddress(ep1_ch1(), cellular[0]); ASSERT_NE(nullptr, backup_conn); // After a short while, the backup connection will be writable but not // receiving because backup connection is pinged at a slower rate. - EXPECT_TRUE_SIMULATED_WAIT( - backup_conn->writable() && !backup_conn->receiving(), kDefaultTimeout, - clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return backup_conn->writable() && !backup_conn->receiving(); }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); reset_selected_candidate_pair_switches(); // Blackhole any traffic to or from the remote WiFi networks. RTC_LOG(LS_INFO) << "Failing over..."; - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, wifi[1]); + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_ANY, wifi[1]); int num_switches = 0; SIMULATED_WAIT((num_switches = reset_selected_candidate_pair_switches()) > 0, @@ -2984,57 +3221,74 @@ TEST_F(P2PTransportChannelMultihomedTest, TestRemoteFailover) { // Tests that a Wifi-Wifi connection has the highest precedence. TEST_F(P2PTransportChannelMultihomedTest, TestPreferWifiToWifiConnection) { + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); // The interface names are chosen so that `cellular` would have higher // candidate priority if it is not for the network type. auto& wifi = kAlternateAddrs; auto& cellular = kPublicAddrs; - AddAddress(0, wifi[0], "test0", rtc::ADAPTER_TYPE_WIFI); - AddAddress(0, cellular[0], "test1", rtc::ADAPTER_TYPE_CELLULAR); - AddAddress(1, wifi[1], "test0", rtc::ADAPTER_TYPE_WIFI); - AddAddress(1, cellular[1], "test1", rtc::ADAPTER_TYPE_CELLULAR); + AddAddress(0, wifi[0], "test0", ADAPTER_TYPE_WIFI); + AddAddress(0, cellular[0], "test1", ADAPTER_TYPE_CELLULAR); + AddAddress(1, wifi[1], "test0", ADAPTER_TYPE_WIFI); + AddAddress(1, cellular[1], "test1", ADAPTER_TYPE_CELLULAR); // Use only local ports for simplicity. SetAllocatorFlags(0, kOnlyLocalPorts); SetAllocatorFlags(1, kOnlyLocalPorts); // Create channels and let them go writable, as usual. - CreateChannels(); + CreateChannels(env); - EXPECT_TRUE_WAIT_MARGIN(CheckConnected(ep1_ch1(), ep2_ch1()), 1000, 1000); + EXPECT_THAT( + webrtc::WaitUntil([&]() { return CheckConnected(ep1_ch1(), ep2_ch1()); }, + IsTrue()), + webrtc::IsRtcOk()); // Need to wait to make sure the connections on both networks are writable. - EXPECT_TRUE_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), wifi[0], wifi[1]), - 1000); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), + wifi[0], wifi[1]); + }, + IsTrue()), + webrtc::IsRtcOk()); DestroyChannels(); } // Tests that a Wifi-Cellular connection has higher precedence than // a Cellular-Cellular connection. TEST_F(P2PTransportChannelMultihomedTest, TestPreferWifiOverCellularNetwork) { + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); // The interface names are chosen so that `cellular` would have higher // candidate priority if it is not for the network type. auto& wifi = kAlternateAddrs; auto& cellular = kPublicAddrs; - AddAddress(0, cellular[0], "test1", rtc::ADAPTER_TYPE_CELLULAR); - AddAddress(1, wifi[1], "test0", rtc::ADAPTER_TYPE_WIFI); - AddAddress(1, cellular[1], "test1", rtc::ADAPTER_TYPE_CELLULAR); + AddAddress(0, cellular[0], "test1", ADAPTER_TYPE_CELLULAR); + AddAddress(1, wifi[1], "test0", ADAPTER_TYPE_WIFI); + AddAddress(1, cellular[1], "test1", ADAPTER_TYPE_CELLULAR); // Use only local ports for simplicity. SetAllocatorFlags(0, kOnlyLocalPorts); SetAllocatorFlags(1, kOnlyLocalPorts); // Create channels and let them go writable, as usual. - CreateChannels(); - - EXPECT_TRUE_WAIT_MARGIN(CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), - cellular[0], wifi[1]), - 1000, 1000); + CreateChannels(env); + + EXPECT_THAT(webrtc::WaitUntil( + [&]() { + return CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), + cellular[0], wifi[1]); + }, + IsTrue()), + webrtc::IsRtcOk()); DestroyChannels(); } // Test that the backup connection is pinged at a rate no faster than // what was configured. TEST_F(P2PTransportChannelMultihomedTest, TestPingBackupConnectionRate) { + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); AddAddress(0, kPublicAddrs[0]); // Adding alternate address will make sure `kPublicAddrs` has the higher // priority than others. This is due to FakeNetwork::AddInterface method. @@ -3046,23 +3300,32 @@ TEST_F(P2PTransportChannelMultihomedTest, TestPingBackupConnectionRate) { SetAllocatorFlags(1, kOnlyLocalPorts); // Create channels and let them go writable, as usual. - CreateChannels(); - EXPECT_TRUE_WAIT_MARGIN(CheckConnected(ep1_ch1(), ep2_ch1()), 1000, 1000); + CreateChannels(env); + EXPECT_THAT( + webrtc::WaitUntil([&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, + IsTrue()), + webrtc::IsRtcOk()); int backup_ping_interval = 2000; ep2_ch1()->SetIceConfig( - CreateIceConfig(2000, GATHER_ONCE, backup_ping_interval)); + CreateIceConfig(2000, webrtc::GATHER_ONCE, backup_ping_interval)); // After the state becomes COMPLETED, the backup connection will be pinged // once every `backup_ping_interval` milliseconds. - ASSERT_TRUE_WAIT(ep2_ch1()->GetState() == IceTransportState::STATE_COMPLETED, - 1000); + ASSERT_THAT(webrtc::WaitUntil([&] { return ep2_ch1()->GetState(); }, + Eq(IceTransportStateInternal::STATE_COMPLETED)), + webrtc::IsRtcOk()); auto connections = ep2_ch1()->connections(); ASSERT_EQ(2U, connections.size()); Connection* backup_conn = GetBackupConnection(ep2_ch1()); - EXPECT_TRUE_WAIT(backup_conn->writable(), kMediumTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return backup_conn->writable(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); int64_t last_ping_response_ms = backup_conn->last_ping_response_received(); - EXPECT_TRUE_WAIT( - last_ping_response_ms < backup_conn->last_ping_response_received(), - kDefaultTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return backup_conn->last_ping_response_received(); }, + Gt(last_ping_response_ms), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); int time_elapsed = backup_conn->last_ping_response_received() - last_ping_response_ms; RTC_LOG(LS_INFO) << "Time elapsed: " << time_elapsed; @@ -3074,6 +3337,8 @@ TEST_F(P2PTransportChannelMultihomedTest, TestPingBackupConnectionRate) { // Test that the connection is pinged at a rate no faster than // what was configured when stable and writable. TEST_F(P2PTransportChannelMultihomedTest, TestStableWritableRate) { + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); AddAddress(0, kPublicAddrs[0]); // Adding alternate address will make sure `kPublicAddrs` has the higher // priority than others. This is due to FakeNetwork::AddInterface method. @@ -3085,29 +3350,37 @@ TEST_F(P2PTransportChannelMultihomedTest, TestStableWritableRate) { SetAllocatorFlags(1, kOnlyLocalPorts); // Create channels and let them go writable, as usual. - CreateChannels(); - EXPECT_TRUE_WAIT_MARGIN(CheckConnected(ep1_ch1(), ep2_ch1()), 1000, 1000); + CreateChannels(env); + EXPECT_THAT( + webrtc::WaitUntil([&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, + IsTrue()), + webrtc::IsRtcOk()); // Set a value larger than the default value of 2500 ms int ping_interval_ms = 3456; - IceConfig config = CreateIceConfig(2 * ping_interval_ms, GATHER_ONCE); + IceConfig config = CreateIceConfig(2 * ping_interval_ms, webrtc::GATHER_ONCE); config.stable_writable_connection_ping_interval = ping_interval_ms; ep2_ch1()->SetIceConfig(config); // After the state becomes COMPLETED and is stable and writable, the // connection will be pinged once every `ping_interval_ms` milliseconds. - ASSERT_TRUE_WAIT(ep2_ch1()->GetState() == IceTransportState::STATE_COMPLETED, - 1000); + ASSERT_THAT(webrtc::WaitUntil([&] { return ep2_ch1()->GetState(); }, + Eq(IceTransportStateInternal::STATE_COMPLETED)), + webrtc::IsRtcOk()); auto connections = ep2_ch1()->connections(); ASSERT_EQ(2U, connections.size()); Connection* conn = GetBestConnection(ep2_ch1()); - EXPECT_TRUE_WAIT(conn->writable(), kMediumTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return conn->writable(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); int64_t last_ping_response_ms; // Burn through some pings so the connection is stable. for (int i = 0; i < 5; i++) { last_ping_response_ms = conn->last_ping_response_received(); - EXPECT_TRUE_WAIT( - last_ping_response_ms < conn->last_ping_response_received(), - kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return conn->last_ping_response_received(); }, + Gt(last_ping_response_ms), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); } EXPECT_TRUE(conn->stable(last_ping_response_ms)) << "Connection not stable"; int time_elapsed = @@ -3119,18 +3392,26 @@ TEST_F(P2PTransportChannelMultihomedTest, TestStableWritableRate) { } TEST_F(P2PTransportChannelMultihomedTest, TestGetState) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); AddAddress(0, kAlternateAddrs[0]); AddAddress(0, kPublicAddrs[0]); AddAddress(1, kPublicAddrs[1]); // Create channels and let them go writable, as usual. - CreateChannels(); + CreateChannels(env); // Both transport channels will reach STATE_COMPLETED quickly. - EXPECT_EQ_SIMULATED_WAIT(IceTransportState::STATE_COMPLETED, - ep1_ch1()->GetState(), kShortTimeout, clock); - EXPECT_EQ_SIMULATED_WAIT(IceTransportState::STATE_COMPLETED, - ep2_ch1()->GetState(), kShortTimeout, clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return ep1_ch1()->GetState(); }, + Eq(IceTransportStateInternal::STATE_COMPLETED), + {.timeout = TimeDelta::Millis(kShortTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_THAT(webrtc::WaitUntil([&] { return ep2_ch1()->GetState(); }, + Eq(IceTransportStateInternal::STATE_COMPLETED), + {.timeout = TimeDelta::Millis(kShortTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); DestroyChannels(); } @@ -3139,18 +3420,23 @@ TEST_F(P2PTransportChannelMultihomedTest, TestGetState) { // will be removed from the port list of the channel, and the respective // remote candidates on the other participant will be removed eventually. TEST_F(P2PTransportChannelMultihomedTest, TestNetworkBecomesInactive) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); AddAddress(0, kPublicAddrs[0]); AddAddress(1, kPublicAddrs[1]); // Create channels and let them go writable, as usual. - IceConfig ep1_config = CreateIceConfig(2000, GATHER_CONTINUALLY); - IceConfig ep2_config = CreateIceConfig(2000, GATHER_ONCE); - CreateChannels(ep1_config, ep2_config); + IceConfig ep1_config = CreateIceConfig(2000, webrtc::GATHER_CONTINUALLY); + IceConfig ep2_config = CreateIceConfig(2000, webrtc::GATHER_ONCE); + CreateChannels(env, ep1_config, ep2_config); SetAllocatorFlags(0, kOnlyLocalPorts); SetAllocatorFlags(1, kOnlyLocalPorts); - ASSERT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kDefaultTimeout, clock); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // More than one port has been created. EXPECT_LE(1U, ep1_ch1()->ports().size()); // Endpoint 1 enabled continual gathering; the port will be removed @@ -3158,8 +3444,10 @@ TEST_F(P2PTransportChannelMultihomedTest, TestNetworkBecomesInactive) { RemoveAddress(0, kPublicAddrs[0]); EXPECT_TRUE(ep1_ch1()->ports().empty()); // The remote candidates will be removed eventually. - EXPECT_TRUE_SIMULATED_WAIT(ep2_ch1()->remote_candidates().empty(), 1000, - clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ep2_ch1()->remote_candidates().empty(); }, + IsTrue(), {.clock = &clock}), + webrtc::IsRtcOk()); size_t num_ports = ep2_ch1()->ports().size(); EXPECT_LE(1U, num_ports); @@ -3169,9 +3457,12 @@ TEST_F(P2PTransportChannelMultihomedTest, TestNetworkBecomesInactive) { // other participant will not be removed. RemoveAddress(1, kPublicAddrs[1]); - EXPECT_EQ_SIMULATED_WAIT(0U, ep2_ch1()->ports().size(), kDefaultTimeout, - clock); - SIMULATED_WAIT(0U == ep1_ch1()->remote_candidates().size(), 500, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep2_ch1()->ports().size(); }, Eq(0U), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + SIMULATED_WAIT(ep1_ch1()->remote_candidates().empty(), 500, clock); EXPECT_EQ(num_remote_candidates, ep1_ch1()->remote_candidates().size()); DestroyChannels(); @@ -3181,53 +3472,79 @@ TEST_F(P2PTransportChannelMultihomedTest, TestNetworkBecomesInactive) { // interface is added. TEST_F(P2PTransportChannelMultihomedTest, TestContinualGatheringOnNewInterface) { + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); auto& wifi = kAlternateAddrs; auto& cellular = kPublicAddrs; - AddAddress(0, wifi[0], "test_wifi0", rtc::ADAPTER_TYPE_WIFI); - AddAddress(1, cellular[1], "test_cell1", rtc::ADAPTER_TYPE_CELLULAR); + AddAddress(0, wifi[0], "test_wifi0", ADAPTER_TYPE_WIFI); + AddAddress(1, cellular[1], "test_cell1", ADAPTER_TYPE_CELLULAR); // Set continual gathering policy. IceConfig continual_gathering_config = - CreateIceConfig(1000, GATHER_CONTINUALLY); - CreateChannels(continual_gathering_config, continual_gathering_config); + CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); + CreateChannels(env, continual_gathering_config, continual_gathering_config); SetAllocatorFlags(0, kOnlyLocalPorts); SetAllocatorFlags(1, kOnlyLocalPorts); - EXPECT_TRUE_WAIT_MARGIN(CheckConnected(ep1_ch1(), ep2_ch1()), kDefaultTimeout, - kDefaultTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, + IsTrue(), {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Add a new wifi interface on end point 2. We should expect a new connection // to be created and the new one will be the best connection. - AddAddress(1, wifi[1], "test_wifi1", rtc::ADAPTER_TYPE_WIFI); + AddAddress(1, wifi[1], "test_wifi1", ADAPTER_TYPE_WIFI); const Connection* conn; - EXPECT_TRUE_WAIT((conn = ep1_ch1()->selected_connection()) != nullptr && - HasRemoteAddress(conn, wifi[1]), - kDefaultTimeout); - EXPECT_TRUE_WAIT((conn = ep2_ch1()->selected_connection()) != nullptr && - HasLocalAddress(conn, wifi[1]), - kDefaultTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return (conn = ep1_ch1()->selected_connection()) != + nullptr && + HasRemoteAddress(conn, wifi[1]); + }, + IsTrue(), {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return (conn = ep2_ch1()->selected_connection()) != + nullptr && + HasLocalAddress(conn, wifi[1]); + }, + IsTrue(), {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Add a new cellular interface on end point 1, we should expect a new // backup connection created using this new interface. - AddAddress(0, cellular[0], "test_cellular0", rtc::ADAPTER_TYPE_CELLULAR); - EXPECT_TRUE_WAIT( - ep1_ch1()->GetState() == IceTransportState::STATE_COMPLETED && - absl::c_any_of(ep1_ch1()->connections(), - [channel = ep1_ch1(), - address = cellular[0]](const Connection* conn) { - return HasLocalAddress(conn, address) && - conn != channel->selected_connection() && - conn->writable(); - }), - kDefaultTimeout); - EXPECT_TRUE_WAIT( - ep2_ch1()->GetState() == IceTransportState::STATE_COMPLETED && - absl::c_any_of(ep2_ch1()->connections(), - [channel = ep2_ch1(), - address = cellular[0]](const Connection* conn) { - return HasRemoteAddress(conn, address) && - conn != channel->selected_connection() && - conn->receiving(); - }), - kDefaultTimeout); + AddAddress(0, cellular[0], "test_cellular0", ADAPTER_TYPE_CELLULAR); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return ep1_ch1()->GetState() == + IceTransportStateInternal::STATE_COMPLETED && + absl::c_any_of( + ep1_ch1()->connections(), + [channel = ep1_ch1(), + address = cellular[0]](const Connection* conn) { + return HasLocalAddress(conn, address) && + conn != + channel->selected_connection() && + conn->writable(); + }); + }, + IsTrue(), {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return ep2_ch1()->GetState() == + IceTransportStateInternal::STATE_COMPLETED && + absl::c_any_of( + ep2_ch1()->connections(), + [channel = ep2_ch1(), + address = cellular[0]](const Connection* conn) { + return HasRemoteAddress(conn, address) && + conn != + channel->selected_connection() && + conn->receiving(); + }); + }, + IsTrue(), {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); DestroyChannels(); } @@ -3235,7 +3552,9 @@ TEST_F(P2PTransportChannelMultihomedTest, // Tests that we can switch links via continual gathering. TEST_F(P2PTransportChannelMultihomedTest, TestSwitchLinksViaContinualGathering) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); AddAddress(0, kPublicAddrs[0]); AddAddress(1, kPublicAddrs[1]); // Use only local ports for simplicity. @@ -3244,198 +3563,282 @@ TEST_F(P2PTransportChannelMultihomedTest, // Set continual gathering policy. IceConfig continual_gathering_config = - CreateIceConfig(1000, GATHER_CONTINUALLY); + CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); // Create channels and let them go writable, as usual. - CreateChannels(continual_gathering_config, continual_gathering_config); - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), kPublicAddrs[0], - kPublicAddrs[1]), - kMediumTimeout, clock); + CreateChannels(env, continual_gathering_config, continual_gathering_config); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected( + ep1_ch1(), ep2_ch1(), kPublicAddrs[0], kPublicAddrs[1]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Add the new address first and then remove the other one. RTC_LOG(LS_INFO) << "Draining..."; AddAddress(1, kAlternateAddrs[1]); RemoveAddress(1, kPublicAddrs[1]); // We should switch to use the alternate address after an exchange of pings. - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), kPublicAddrs[0], - kAlternateAddrs[1]), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected( + ep1_ch1(), ep2_ch1(), kPublicAddrs[0], kAlternateAddrs[1]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Remove one address first and then add another address. RTC_LOG(LS_INFO) << "Draining again..."; RemoveAddress(1, kAlternateAddrs[1]); AddAddress(1, kAlternateAddrs[0]); - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), kPublicAddrs[0], - kAlternateAddrs[0]), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected( + ep1_ch1(), ep2_ch1(), kPublicAddrs[0], kAlternateAddrs[0]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); DestroyChannels(); } // Tests that the backup connection will be restored after it is destroyed. TEST_F(P2PTransportChannelMultihomedTest, TestRestoreBackupConnection) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); auto& wifi = kAlternateAddrs; auto& cellular = kPublicAddrs; - AddAddress(0, wifi[0], "test_wifi0", rtc::ADAPTER_TYPE_WIFI); - AddAddress(0, cellular[0], "test_cell0", rtc::ADAPTER_TYPE_CELLULAR); - AddAddress(1, wifi[1], "test_wifi1", rtc::ADAPTER_TYPE_WIFI); - AddAddress(1, cellular[1], "test_cell1", rtc::ADAPTER_TYPE_CELLULAR); + AddAddress(0, wifi[0], "test_wifi0", ADAPTER_TYPE_WIFI); + AddAddress(0, cellular[0], "test_cell0", ADAPTER_TYPE_CELLULAR); + AddAddress(1, wifi[1], "test_wifi1", ADAPTER_TYPE_WIFI); + AddAddress(1, cellular[1], "test_cell1", ADAPTER_TYPE_CELLULAR); // Use only local ports for simplicity. SetAllocatorFlags(0, kOnlyLocalPorts); SetAllocatorFlags(1, kOnlyLocalPorts); // Create channels and let them go writable, as usual. - IceConfig config = CreateIceConfig(1000, GATHER_CONTINUALLY); + IceConfig config = CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); config.regather_on_failed_networks_interval = 2000; - CreateChannels(config, config); - EXPECT_TRUE_SIMULATED_WAIT( - CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), wifi[0], wifi[1]), - kMediumTimeout, clock); + CreateChannels(env, config, config); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckCandidatePairAndConnected(ep1_ch1(), ep2_ch1(), wifi[0], + wifi[1]); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Destroy all backup connections. DestroyAllButBestConnection(ep1_ch1()); // Ensure the backup connection is removed first. - EXPECT_TRUE_SIMULATED_WAIT( - GetConnectionWithLocalAddress(ep1_ch1(), cellular[0]) == nullptr, - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return GetConnectionWithLocalAddress(ep1_ch1(), cellular[0]); }, + Eq(nullptr), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); const Connection* conn; - EXPECT_TRUE_SIMULATED_WAIT( - (conn = GetConnectionWithLocalAddress(ep1_ch1(), cellular[0])) != - nullptr && - conn != ep1_ch1()->selected_connection() && conn->writable(), - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return (conn = GetConnectionWithLocalAddress( + ep1_ch1(), cellular[0])) != nullptr && + conn != ep1_ch1()->selected_connection() && conn->writable(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); DestroyChannels(); } TEST_F(P2PTransportChannelMultihomedTest, TestVpnDefault) { - rtc::ScopedFakeClock clock; - AddAddress(0, kPublicAddrs[0], "eth0", rtc::ADAPTER_TYPE_ETHERNET); - AddAddress(0, kAlternateAddrs[0], "vpn0", rtc::ADAPTER_TYPE_VPN); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); + AddAddress(0, kPublicAddrs[0], "eth0", ADAPTER_TYPE_ETHERNET); + AddAddress(0, kAlternateAddrs[0], "vpn0", ADAPTER_TYPE_VPN); AddAddress(1, kPublicAddrs[1]); IceConfig config; - CreateChannels(config, config, false); - EXPECT_TRUE_SIMULATED_WAIT( - CheckConnected(ep1_ch1(), ep2_ch1()) && - !ep1_ch1()->selected_connection()->network()->IsVpn(), - kDefaultTimeout, clock); + CreateChannels(env, config, config, false); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckConnected(ep1_ch1(), ep2_ch1()) && + !ep1_ch1()->selected_connection()->network()->IsVpn(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); } TEST_F(P2PTransportChannelMultihomedTest, TestVpnPreferVpn) { - rtc::ScopedFakeClock clock; - AddAddress(0, kPublicAddrs[0], "eth0", rtc::ADAPTER_TYPE_ETHERNET); - AddAddress(0, kAlternateAddrs[0], "vpn0", rtc::ADAPTER_TYPE_VPN, - rtc::ADAPTER_TYPE_CELLULAR); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); + AddAddress(0, kPublicAddrs[0], "eth0", ADAPTER_TYPE_ETHERNET); + AddAddress(0, kAlternateAddrs[0], "vpn0", ADAPTER_TYPE_VPN, + ADAPTER_TYPE_CELLULAR); AddAddress(1, kPublicAddrs[1]); IceConfig config; - config.vpn_preference = webrtc::VpnPreference::kPreferVpn; + config.vpn_preference = VpnPreference::kPreferVpn; RTC_LOG(LS_INFO) << "KESO: config.vpn_preference: " << config.vpn_preference; - CreateChannels(config, config, false); - EXPECT_TRUE_SIMULATED_WAIT( - CheckConnected(ep1_ch1(), ep2_ch1()) && - ep1_ch1()->selected_connection()->network()->IsVpn(), - kDefaultTimeout, clock); + CreateChannels(env, config, config, false); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckConnected(ep1_ch1(), ep2_ch1()) && + ep1_ch1()->selected_connection()->network()->IsVpn(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Block VPN. - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, kAlternateAddrs[0]); + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_ANY, kAlternateAddrs[0]); // Check that it switches to non-VPN - EXPECT_TRUE_SIMULATED_WAIT( - CheckConnected(ep1_ch1(), ep2_ch1()) && - !ep1_ch1()->selected_connection()->network()->IsVpn(), - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckConnected(ep1_ch1(), ep2_ch1()) && + !ep1_ch1()->selected_connection()->network()->IsVpn(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); } TEST_F(P2PTransportChannelMultihomedTest, TestVpnAvoidVpn) { - rtc::ScopedFakeClock clock; - AddAddress(0, kPublicAddrs[0], "eth0", rtc::ADAPTER_TYPE_CELLULAR); - AddAddress(0, kAlternateAddrs[0], "vpn0", rtc::ADAPTER_TYPE_VPN, - rtc::ADAPTER_TYPE_ETHERNET); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); + AddAddress(0, kPublicAddrs[0], "eth0", ADAPTER_TYPE_CELLULAR); + AddAddress(0, kAlternateAddrs[0], "vpn0", ADAPTER_TYPE_VPN, + ADAPTER_TYPE_ETHERNET); AddAddress(1, kPublicAddrs[1]); IceConfig config; - config.vpn_preference = webrtc::VpnPreference::kAvoidVpn; - CreateChannels(config, config, false); - EXPECT_TRUE_SIMULATED_WAIT( - CheckConnected(ep1_ch1(), ep2_ch1()) && - !ep1_ch1()->selected_connection()->network()->IsVpn(), - kDefaultTimeout, clock); + config.vpn_preference = VpnPreference::kAvoidVpn; + CreateChannels(env, config, config, false); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckConnected(ep1_ch1(), ep2_ch1()) && + !ep1_ch1()->selected_connection()->network()->IsVpn(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Block non-VPN. - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, kPublicAddrs[0]); + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_ANY, kPublicAddrs[0]); // Check that it switches to VPN - EXPECT_TRUE_SIMULATED_WAIT( - CheckConnected(ep1_ch1(), ep2_ch1()) && - ep1_ch1()->selected_connection()->network()->IsVpn(), - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckConnected(ep1_ch1(), ep2_ch1()) && + ep1_ch1()->selected_connection()->network()->IsVpn(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); } TEST_F(P2PTransportChannelMultihomedTest, TestVpnNeverVpn) { - rtc::ScopedFakeClock clock; - AddAddress(0, kPublicAddrs[0], "eth0", rtc::ADAPTER_TYPE_CELLULAR); - AddAddress(0, kAlternateAddrs[0], "vpn0", rtc::ADAPTER_TYPE_VPN, - rtc::ADAPTER_TYPE_ETHERNET); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); + AddAddress(0, kPublicAddrs[0], "eth0", ADAPTER_TYPE_CELLULAR); + AddAddress(0, kAlternateAddrs[0], "vpn0", ADAPTER_TYPE_VPN, + ADAPTER_TYPE_ETHERNET); AddAddress(1, kPublicAddrs[1]); IceConfig config; - config.vpn_preference = webrtc::VpnPreference::kNeverUseVpn; - CreateChannels(config, config, false); - EXPECT_TRUE_SIMULATED_WAIT( - CheckConnected(ep1_ch1(), ep2_ch1()) && - !ep1_ch1()->selected_connection()->network()->IsVpn(), - kDefaultTimeout, clock); + config.vpn_preference = VpnPreference::kNeverUseVpn; + CreateChannels(env, config, config, false); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckConnected(ep1_ch1(), ep2_ch1()) && + !ep1_ch1()->selected_connection()->network()->IsVpn(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Block non-VPN. - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, kPublicAddrs[0]); + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_ANY, kPublicAddrs[0]); // Check that it does not switches to VPN - clock.AdvanceTime(webrtc::TimeDelta::Millis(kDefaultTimeout)); - EXPECT_TRUE_SIMULATED_WAIT(!CheckConnected(ep1_ch1(), ep2_ch1()), - kDefaultTimeout, clock); + clock.AdvanceTime(TimeDelta::Millis(kDefaultTimeout)); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return !CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); } TEST_F(P2PTransportChannelMultihomedTest, TestVpnOnlyVpn) { - rtc::ScopedFakeClock clock; - AddAddress(0, kPublicAddrs[0], "eth0", rtc::ADAPTER_TYPE_CELLULAR); - AddAddress(0, kAlternateAddrs[0], "vpn0", rtc::ADAPTER_TYPE_VPN, - rtc::ADAPTER_TYPE_ETHERNET); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); + AddAddress(0, kPublicAddrs[0], "eth0", ADAPTER_TYPE_CELLULAR); + AddAddress(0, kAlternateAddrs[0], "vpn0", ADAPTER_TYPE_VPN, + ADAPTER_TYPE_ETHERNET); AddAddress(1, kPublicAddrs[1]); IceConfig config; - config.vpn_preference = webrtc::VpnPreference::kOnlyUseVpn; - CreateChannels(config, config, false); - EXPECT_TRUE_SIMULATED_WAIT( - CheckConnected(ep1_ch1(), ep2_ch1()) && - ep1_ch1()->selected_connection()->network()->IsVpn(), - kDefaultTimeout, clock); + config.vpn_preference = VpnPreference::kOnlyUseVpn; + CreateChannels(env, config, config, false); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return CheckConnected(ep1_ch1(), ep2_ch1()) && + ep1_ch1()->selected_connection()->network()->IsVpn(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Block VPN. - fw()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, kAlternateAddrs[0]); + fw()->AddRule(false, webrtc::FP_ANY, webrtc::FD_ANY, kAlternateAddrs[0]); // Check that it does not switch to non-VPN - clock.AdvanceTime(webrtc::TimeDelta::Millis(kDefaultTimeout)); - EXPECT_TRUE_SIMULATED_WAIT(!CheckConnected(ep1_ch1(), ep2_ch1()), - kDefaultTimeout, clock); + clock.AdvanceTime(TimeDelta::Millis(kDefaultTimeout)); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return !CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); } TEST_F(P2PTransportChannelMultihomedTest, StunDictionaryPerformsSync) { - rtc::ScopedFakeClock clock; - AddAddress(0, kPublicAddrs[0], "eth0", rtc::ADAPTER_TYPE_CELLULAR); - AddAddress(0, kAlternateAddrs[0], "vpn0", rtc::ADAPTER_TYPE_VPN, - rtc::ADAPTER_TYPE_ETHERNET); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); + AddAddress(0, kPublicAddrs[0], "eth0", ADAPTER_TYPE_CELLULAR); + AddAddress(0, kAlternateAddrs[0], "vpn0", ADAPTER_TYPE_VPN, + ADAPTER_TYPE_ETHERNET); AddAddress(1, kPublicAddrs[1]); // Create channels and let them go writable, as usual. - CreateChannels(); + CreateChannels(env); MockFunction)> + webrtc::ArrayView)> view_updated_func; ep2_ch1()->AddDictionaryViewUpdatedCallback( "tag", view_updated_func.AsStdFunction()); @@ -3452,8 +3855,11 @@ TEST_F(P2PTransportChannelMultihomedTest, StunDictionaryPerformsSync) { EXPECT_EQ(view.GetByteString(12)->string_view(), "keso"); }); EXPECT_CALL(writer_synced_func, Call).Times(1); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); } // A collection of tests which tests a single P2PTransportChannel by sending @@ -3462,15 +3868,14 @@ class P2PTransportChannelPingTest : public ::testing::Test, public sigslot::has_slots<> { public: P2PTransportChannelPingTest() - : vss_(std::make_unique()), + : vss_(std::make_unique()), packet_socket_factory_( - std::make_unique(vss_.get())), + std::make_unique(vss_.get())), thread_(vss_.get()) {} protected: void PrepareChannel(P2PTransportChannel* ch) { - ch->SetIceRole(ICEROLE_CONTROLLING); - ch->SetIceTiebreaker(kTiebreakerDefault); + ch->SetIceRole(webrtc::ICEROLE_CONTROLLING); ch->SetIceParameters(kIceParams[0]); ch->SetRemoteIceParameters(kIceParams[1]); ch->SignalNetworkRouteChanged.connect( @@ -3479,21 +3884,28 @@ class P2PTransportChannelPingTest : public ::testing::Test, &P2PTransportChannelPingTest::OnReadyToSend); ch->SignalStateChanged.connect( this, &P2PTransportChannelPingTest::OnChannelStateChanged); - ch->SignalCandidatePairChanged.connect( - this, &P2PTransportChannelPingTest::OnCandidatePairChanged); + ch->SetCandidatePairChangeCallback( + [this](const CandidatePairChangeEvent& event) { + OnCandidatePairChanged(event); + }); } - Connection* WaitForConnectionTo( - P2PTransportChannel* ch, - absl::string_view ip, - int port_num, - rtc::ThreadProcessingFakeClock* clock = nullptr) { + Connection* WaitForConnectionTo(P2PTransportChannel* ch, + absl::string_view ip, + int port_num, + ThreadProcessingFakeClock* clock = nullptr) { if (clock == nullptr) { - EXPECT_TRUE_WAIT(GetConnectionTo(ch, ip, port_num) != nullptr, - kMediumTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return GetConnectionTo(ch, ip, port_num); }, + Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); } else { - EXPECT_TRUE_SIMULATED_WAIT(GetConnectionTo(ch, ip, port_num) != nullptr, - kMediumTimeout, *clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return GetConnectionTo(ch, ip, port_num); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &*clock}), + webrtc::IsRtcOk()); } return GetConnectionTo(ch, ip, port_num); } @@ -3519,7 +3931,7 @@ class P2PTransportChannelPingTest : public ::testing::Test, if (!port) { return nullptr; } - return port->GetConnection(rtc::SocketAddress(ip, port_num)); + return port->GetConnection(SocketAddress(ip, port_num)); } Connection* FindNextPingableConnectionAndPingIt(P2PTransportChannel* ch) { @@ -3534,22 +3946,25 @@ class P2PTransportChannelPingTest : public ::testing::Test, const char* data, size_t len, int packet_id) { - rtc::PacketOptions options; + AsyncSocketPacketOptions options; options.packet_id = packet_id; return channel->SendPacket(data, len, options, 0); } Connection* CreateConnectionWithCandidate(P2PTransportChannel* channel, - rtc::ScopedFakeClock* clock, + ScopedFakeClock* clock, absl::string_view ip_addr, int port, int priority, bool writable) { channel->AddRemoteCandidate( - CreateUdpCandidate(LOCAL_PORT_TYPE, ip_addr, port, priority)); - EXPECT_TRUE_SIMULATED_WAIT( - GetConnectionTo(channel, ip_addr, port) != nullptr, kMediumTimeout, - *clock); + CreateUdpCandidate(IceCandidateType::kHost, ip_addr, port, priority)); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return GetConnectionTo(channel, ip_addr, port); }, + Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &*clock}), + webrtc::IsRtcOk()); Connection* conn = GetConnectionTo(channel, ip_addr, port); if (conn && writable) { @@ -3563,7 +3978,7 @@ class P2PTransportChannelPingTest : public ::testing::Test, conn->SignalNominated(conn); } - void OnNetworkRouteChanged(absl::optional network_route) { + void OnNetworkRouteChanged(std::optional network_route) { last_network_route_ = network_route; if (last_network_route_) { last_sent_packet_id_ = last_network_route_->last_sent_packet_id; @@ -3576,7 +3991,7 @@ class P2PTransportChannelPingTest : public ::testing::Test, absl::string_view remote_ufrag, int priority, uint32_t nomination, - const absl::optional& piggyback_ping_id) { + const std::optional& piggyback_ping_id) { IceMessage msg(STUN_BINDING_REQUEST); msg.AddAttribute(std::make_unique( STUN_ATTR_USERNAME, @@ -3593,9 +4008,11 @@ class P2PTransportChannelPingTest : public ::testing::Test, } msg.AddMessageIntegrity(conn->local_candidate().password()); msg.AddFingerprint(); - rtc::ByteBufferWriter buf; + ByteBufferWriter buf; msg.Write(&buf); - conn->OnReadPacket(buf.Data(), buf.Length(), rtc::TimeMicros()); + conn->OnReadPacket(ReceivedIpPacket::CreateFromLegacy( + reinterpret_cast(buf.Data()), buf.Length(), + webrtc::TimeMicros())); } void ReceivePingOnConnection(Connection* conn, @@ -3603,10 +4020,10 @@ class P2PTransportChannelPingTest : public ::testing::Test, int priority, uint32_t nomination = 0) { ReceivePingOnConnection(conn, remote_ufrag, priority, nomination, - absl::nullopt); + std::nullopt); } - void OnReadyToSend(rtc::PacketTransportInternal* transport) { + void OnReadyToSend(PacketTransportInternal* transport) { channel_ready_to_send_ = true; } void OnChannelStateChanged(IceTransportInternal* channel) { @@ -3619,7 +4036,7 @@ class P2PTransportChannelPingTest : public ::testing::Test, int last_sent_packet_id() { return last_sent_packet_id_; } bool channel_ready_to_send() { return channel_ready_to_send_; } void reset_channel_ready_to_send() { channel_ready_to_send_ = false; } - IceTransportState channel_state() { return channel_state_; } + IceTransportStateInternal channel_state() { return channel_state_; } int reset_selected_candidate_pair_switches() { int switches = selected_candidate_pair_switches_; selected_candidate_pair_switches_ = 0; @@ -3663,34 +4080,35 @@ class P2PTransportChannelPingTest : public ::testing::Test, } } - rtc::SocketServer* ss() const { return vss_.get(); } + SocketServer* ss() const { return vss_.get(); } - rtc::PacketSocketFactory* packet_socket_factory() const { + PacketSocketFactory* packet_socket_factory() const { return packet_socket_factory_.get(); } - webrtc::test::ScopedKeyValueConfig field_trials_; - private: - std::unique_ptr vss_; - std::unique_ptr packet_socket_factory_; - rtc::AutoSocketServerThread thread_; + std::unique_ptr vss_; + std::unique_ptr packet_socket_factory_; + AutoSocketServerThread thread_; int selected_candidate_pair_switches_ = 0; int last_sent_packet_id_ = -1; bool channel_ready_to_send_ = false; - absl::optional last_candidate_change_event_; - IceTransportState channel_state_ = IceTransportState::STATE_INIT; - absl::optional last_network_route_; + std::optional last_candidate_change_event_; + IceTransportStateInternal channel_state_ = + IceTransportStateInternal::STATE_INIT; + std::optional last_network_route_; }; TEST_F(P2PTransportChannelPingTest, TestTriggeredChecks) { - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("trigger checks", 1, &pa, &field_trials_); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("trigger checks", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 2)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 2)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); @@ -3709,13 +4127,15 @@ TEST_F(P2PTransportChannelPingTest, TestTriggeredChecks) { } TEST_F(P2PTransportChannelPingTest, TestAllConnectionsPingedSufficiently) { - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("ping sufficiently", 1, &pa, &field_trials_); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("ping sufficiently", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 2)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 2)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); @@ -3725,25 +4145,30 @@ TEST_F(P2PTransportChannelPingTest, TestAllConnectionsPingedSufficiently) { // Low-priority connection becomes writable so that the other connection // is not pruned. conn1->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_TRUE_WAIT( - conn1->num_pings_sent() >= MIN_PINGS_AT_WEAK_PING_INTERVAL && - conn2->num_pings_sent() >= MIN_PINGS_AT_WEAK_PING_INTERVAL, - kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return conn1->num_pings_sent() >= MIN_PINGS_AT_WEAK_PING_INTERVAL && + conn2->num_pings_sent() >= MIN_PINGS_AT_WEAK_PING_INTERVAL; + }, + IsTrue(), {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); } // Verify that the connections are pinged at the right time. TEST_F(P2PTransportChannelPingTest, TestStunPingIntervals) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); int RTT_RATIO = 4; int SCHEDULING_RANGE = 200; int RTT_RANGE = 10; - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("TestChannel", 1, &pa, &field_trials_); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("TestChannel", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); Connection* conn = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn != nullptr); @@ -3755,7 +4180,7 @@ TEST_F(P2PTransportChannelPingTest, TestStunPingIntervals) { SIMULATED_WAIT(conn->num_pings_sent() >= MIN_PINGS_AT_WEAK_PING_INTERVAL, kDefaultTimeout, clock); int64_t ping_interval_ms = (clock.TimeNanos() - start) / - rtc::kNumNanosecsPerMillisec / + webrtc::kNumNanosecsPerMillisec / (MIN_PINGS_AT_WEAK_PING_INTERVAL - 1); EXPECT_EQ(ping_interval_ms, WEAK_PING_INTERVAL); @@ -3768,7 +4193,8 @@ TEST_F(P2PTransportChannelPingTest, TestStunPingIntervals) { // to converge the RTT. SIMULATED_WAIT(conn->num_pings_sent() == ping_sent_before + 1, kMediumTimeout, clock); - ping_interval_ms = (clock.TimeNanos() - start) / rtc::kNumNanosecsPerMillisec; + ping_interval_ms = + (clock.TimeNanos() - start) / webrtc::kNumNanosecsPerMillisec; EXPECT_GE(ping_interval_ms, WEAK_OR_STABILIZING_WRITABLE_CONNECTION_PING_INTERVAL); EXPECT_LE( @@ -3786,7 +4212,8 @@ TEST_F(P2PTransportChannelPingTest, TestStunPingIntervals) { start = clock.TimeNanos(); SIMULATED_WAIT(conn->num_pings_sent() == ping_sent_before + 1, kMediumTimeout, clock); - ping_interval_ms = (clock.TimeNanos() - start) / rtc::kNumNanosecsPerMillisec; + ping_interval_ms = + (clock.TimeNanos() - start) / webrtc::kNumNanosecsPerMillisec; EXPECT_GE(ping_interval_ms, STRONG_AND_STABLE_WRITABLE_CONNECTION_PING_INTERVAL); EXPECT_LE( @@ -3797,14 +4224,14 @@ TEST_F(P2PTransportChannelPingTest, TestStunPingIntervals) { conn->ReceivedPingResponse(LOW_RTT, "id"); // Create a in-flight ping. - conn->Ping(clock.TimeNanos() / rtc::kNumNanosecsPerMillisec); + conn->Ping(clock.TimeNanos() / webrtc::kNumNanosecsPerMillisec); start = clock.TimeNanos(); // In-flight ping timeout and the connection will be unstable. SIMULATED_WAIT( - !conn->stable(clock.TimeNanos() / rtc::kNumNanosecsPerMillisec), + !conn->stable(clock.TimeNanos() / webrtc::kNumNanosecsPerMillisec), kMediumTimeout, clock); int64_t duration_ms = - (clock.TimeNanos() - start) / rtc::kNumNanosecsPerMillisec; + (clock.TimeNanos() - start) / webrtc::kNumNanosecsPerMillisec; EXPECT_GE(duration_ms, 2 * conn->rtt() - RTT_RANGE); EXPECT_LE(duration_ms, 2 * conn->rtt() + RTT_RANGE); // The connection become unstable due to not receiving ping responses. @@ -3817,7 +4244,8 @@ TEST_F(P2PTransportChannelPingTest, TestStunPingIntervals) { ping_sent_before = conn->num_pings_sent(); SIMULATED_WAIT(conn->num_pings_sent() == ping_sent_before + 1, kMediumTimeout, clock); - ping_interval_ms = (clock.TimeNanos() - start) / rtc::kNumNanosecsPerMillisec; + ping_interval_ms = + (clock.TimeNanos() - start) / webrtc::kNumNanosecsPerMillisec; EXPECT_GE(ping_interval_ms, WEAK_OR_STABILIZING_WRITABLE_CONNECTION_PING_INTERVAL); EXPECT_LE( @@ -3828,17 +4256,19 @@ TEST_F(P2PTransportChannelPingTest, TestStunPingIntervals) { // Test that we start pinging as soon as we have a connection and remote ICE // parameters. TEST_F(P2PTransportChannelPingTest, PingingStartedAsSoonAsPossible) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("TestChannel", 1, &pa, &field_trials_); - ch.SetIceRole(ICEROLE_CONTROLLING); - ch.SetIceTiebreaker(kTiebreakerDefault); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("TestChannel", 1, &pa, &env.field_trials()); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLING); ch.SetIceParameters(kIceParams[0]); ch.MaybeStartGathering(); - EXPECT_EQ_WAIT(IceGatheringState::kIceGatheringComplete, ch.gathering_state(), - kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.gathering_state(); }, + Eq(IceGatheringState::kIceGatheringComplete), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Simulate a binding request being received, creating a peer reflexive // candidate pair while we still don't have remote ICE parameters. @@ -3850,8 +4280,8 @@ TEST_F(P2PTransportChannelPingTest, PingingStartedAsSoonAsPossible) { prflx_priority)); Port* port = GetPort(&ch); ASSERT_NE(nullptr, port); - port->SignalUnknownAddress(port, rtc::SocketAddress("1.1.1.1", 1), PROTO_UDP, - &request, kIceUfrag[1], false); + port->SignalUnknownAddress(port, SocketAddress("1.1.1.1", 1), + webrtc::PROTO_UDP, &request, kIceUfrag[1], false); Connection* conn = GetConnectionTo(&ch, "1.1.1.1", 1); ASSERT_NE(nullptr, conn); @@ -3864,17 +4294,21 @@ TEST_F(P2PTransportChannelPingTest, PingingStartedAsSoonAsPossible) { // the first ping is sent as soon as possible, within one simulated clock // tick. ch.SetRemoteIceParameters(kIceParams[1]); - EXPECT_TRUE_SIMULATED_WAIT(conn->num_pings_sent() > 0, 1, clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return conn->num_pings_sent(); }, Gt(0), + {.clock = &clock}), + webrtc::IsRtcOk()); } TEST_F(P2PTransportChannelPingTest, TestNoTriggeredChecksWhenWritable) { - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("trigger checks", 1, &pa, &field_trials_); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("trigger checks", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 2)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 2)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); @@ -3894,13 +4328,14 @@ TEST_F(P2PTransportChannelPingTest, TestNoTriggeredChecksWhenWritable) { } TEST_F(P2PTransportChannelPingTest, TestFailedConnectionNotPingable) { - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); P2PTransportChannel ch("Do not ping failed connections", 1, &pa, - &field_trials_); + &env.field_trials()); PrepareChannel(&ch); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn1 != nullptr); @@ -3913,19 +4348,23 @@ TEST_F(P2PTransportChannelPingTest, TestFailedConnectionNotPingable) { } TEST_F(P2PTransportChannelPingTest, TestSignalStateChanged) { - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("state change", 1, &pa, &field_trials_); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("state change", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn1 != nullptr); // Pruning the connection reduces the set of active connections and changes // the channel state. conn1->Prune(); - EXPECT_EQ_WAIT(IceTransportState::STATE_FAILED, channel_state(), - kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return channel_state(); }, + Eq(IceTransportStateInternal::STATE_FAILED), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); } // Test adding remote candidates with different ufrags. If a remote candidate @@ -3935,14 +4374,14 @@ TEST_F(P2PTransportChannelPingTest, TestSignalStateChanged) { // parameters arrive. If a remote candidate is added with the current ICE // ufrag, its pwd and generation will be set properly. TEST_F(P2PTransportChannelPingTest, TestAddRemoteCandidateWithVariousUfrags) { - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("add candidate", 1, &pa, &field_trials_); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("add candidate", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.MaybeStartGathering(); // Add a candidate with a future ufrag. - ch.AddRemoteCandidate( - CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1, kIceUfrag[2])); + ch.AddRemoteCandidate(CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", + 1, 1, kIceUfrag[2])); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn1 != nullptr); const Candidate& candidate = conn1->remote_candidate(); @@ -3959,61 +4398,71 @@ TEST_F(P2PTransportChannelPingTest, TestAddRemoteCandidateWithVariousUfrags) { EXPECT_EQ(conn1, FindNextPingableConnectionAndPingIt(&ch)); // Add a candidate with an old ufrag. No connection will be created. - ch.AddRemoteCandidate( - CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 2, kIceUfrag[1])); - rtc::Thread::Current()->ProcessMessages(500); + ch.AddRemoteCandidate(CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", + 2, 2, kIceUfrag[1])); + Thread::Current()->ProcessMessages(500); EXPECT_TRUE(GetConnectionTo(&ch, "2.2.2.2", 2) == nullptr); // Add a candidate with the current ufrag, its pwd and generation will be // assigned, even if the generation is not set. - ch.AddRemoteCandidate( - CreateUdpCandidate(LOCAL_PORT_TYPE, "3.3.3.3", 3, 0, kIceUfrag[2])); + ch.AddRemoteCandidate(CreateUdpCandidate(IceCandidateType::kHost, "3.3.3.3", + 3, 0, kIceUfrag[2])); Connection* conn3 = nullptr; - ASSERT_TRUE_WAIT((conn3 = GetConnectionTo(&ch, "3.3.3.3", 3)) != nullptr, - kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return conn3 = GetConnectionTo(&ch, "3.3.3.3", 3); }, + Ne(nullptr), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); const Candidate& new_candidate = conn3->remote_candidate(); EXPECT_EQ(kIcePwd[2], new_candidate.password()); EXPECT_EQ(1U, new_candidate.generation()); // Check that the pwd of all remote candidates are properly assigned. - for (const RemoteCandidate& candidate : ch.remote_candidates()) { - EXPECT_TRUE(candidate.username() == kIceUfrag[1] || - candidate.username() == kIceUfrag[2]); - if (candidate.username() == kIceUfrag[1]) { - EXPECT_EQ(kIcePwd[1], candidate.password()); - } else if (candidate.username() == kIceUfrag[2]) { - EXPECT_EQ(kIcePwd[2], candidate.password()); + for (const RemoteCandidate& remote_candidate : ch.remote_candidates()) { + EXPECT_TRUE(remote_candidate.username() == kIceUfrag[1] || + remote_candidate.username() == kIceUfrag[2]); + if (remote_candidate.username() == kIceUfrag[1]) { + EXPECT_EQ(kIcePwd[1], remote_candidate.password()); + } else if (remote_candidate.username() == kIceUfrag[2]) { + EXPECT_EQ(kIcePwd[2], remote_candidate.password()); } } } TEST_F(P2PTransportChannelPingTest, ConnectionResurrection) { - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("connection resurrection", 1, &pa, &field_trials_); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("connection resurrection", 1, &pa, + &env.field_trials()); PrepareChannel(&ch); ch.MaybeStartGathering(); // Create conn1 and keep track of original candidate priority. - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn1 != nullptr); uint32_t remote_priority = conn1->remote_candidate().priority(); // Create a higher priority candidate and make the connection // receiving/writable. This will prune conn1. - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 2)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 2)); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); ASSERT_TRUE(conn2 != nullptr); conn2->ReceivedPing(); conn2->ReceivedPingResponse(LOW_RTT, "id"); // Wait for conn2 to be selected. - EXPECT_EQ_WAIT(conn2, ch.selected_connection(), kMediumTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.selected_connection(); }, Eq(conn2), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); // Destroy the connection to test SignalUnknownAddress. ch.RemoveConnectionForTest(conn1); - EXPECT_TRUE_WAIT(GetConnectionTo(&ch, "1.1.1.1", 1) == nullptr, - kMediumTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return GetConnectionTo(&ch, "1.1.1.1", 1); }, + Eq(nullptr), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); // Create a minimal STUN message with prflx priority. IceMessage request(STUN_BINDING_REQUEST); @@ -4026,43 +4475,52 @@ TEST_F(P2PTransportChannelPingTest, ConnectionResurrection) { Port* port = GetPort(&ch); // conn1 should be resurrected with original priority. - port->SignalUnknownAddress(port, rtc::SocketAddress("1.1.1.1", 1), PROTO_UDP, - &request, kIceUfrag[1], false); + port->SignalUnknownAddress(port, SocketAddress("1.1.1.1", 1), + webrtc::PROTO_UDP, &request, kIceUfrag[1], false); conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn1 != nullptr); EXPECT_EQ(conn1->remote_candidate().priority(), remote_priority); // conn3, a real prflx connection, should have prflx priority. - port->SignalUnknownAddress(port, rtc::SocketAddress("3.3.3.3", 1), PROTO_UDP, - &request, kIceUfrag[1], false); + port->SignalUnknownAddress(port, SocketAddress("3.3.3.3", 1), + webrtc::PROTO_UDP, &request, kIceUfrag[1], false); Connection* conn3 = WaitForConnectionTo(&ch, "3.3.3.3", 1); ASSERT_TRUE(conn3 != nullptr); EXPECT_EQ(conn3->remote_candidate().priority(), prflx_priority); } TEST_F(P2PTransportChannelPingTest, TestReceivingStateChange) { - rtc::ScopedFakeClock clock; - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("receiving state change", 1, &pa, &field_trials_); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("receiving state change", 1, &pa, &env.field_trials()); PrepareChannel(&ch); // Default receiving timeout and checking receiving interval should not be too // small. EXPECT_LE(1000, ch.config().receiving_timeout_or_default()); EXPECT_LE(200, ch.check_receiving_interval()); - ch.SetIceConfig(CreateIceConfig(500, GATHER_ONCE)); + ch.SetIceConfig(CreateIceConfig(500, webrtc::GATHER_ONCE)); EXPECT_EQ(500, ch.config().receiving_timeout_or_default()); EXPECT_EQ(50, ch.check_receiving_interval()); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1, &clock); ASSERT_TRUE(conn1 != nullptr); - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + clock.AdvanceTime(TimeDelta::Seconds(1)); conn1->ReceivedPing(); - conn1->OnReadPacket("ABC", 3, rtc::TimeMicros()); - EXPECT_TRUE_SIMULATED_WAIT(ch.receiving(), kShortTimeout, clock); - EXPECT_TRUE_SIMULATED_WAIT(!ch.receiving(), kShortTimeout, clock); + conn1->OnReadPacket( + ReceivedIpPacket::CreateFromLegacy("ABC", 3, webrtc::TimeMicros())); + + EXPECT_THAT(webrtc::WaitUntil([&] { return ch.receiving(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kShortTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_THAT(webrtc::WaitUntil([&] { return !ch.receiving(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kShortTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); } // The controlled side will select a connection as the "selected connection" @@ -4072,13 +4530,14 @@ TEST_F(P2PTransportChannelPingTest, TestReceivingStateChange) { // selected connection changes and SignalReadyToSend will be fired if the new // selected connection is writable. TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBeforeNomination) { - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("receiving state change", 1, &pa, &field_trials_); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("receiving state change", 1, &pa, &env.field_trials()); PrepareChannel(&ch); - ch.SetIceRole(ICEROLE_CONTROLLED); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn1 != nullptr); // Channel is not ready to send because it is not writable. @@ -4091,7 +4550,10 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBeforeNomination) { // A connection needs to be writable before it is selected for transmission. conn1->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_WAIT(conn1, ch.selected_connection(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn1)); EXPECT_TRUE(ConnectionMatchesChangeEvent( conn1, "remote candidate generation maybe changed")); @@ -4099,11 +4561,15 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBeforeNomination) { // When a higher priority candidate comes in, the new connection is chosen // as the selected connection. - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 10)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 10)); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); ASSERT_TRUE(conn2 != nullptr); conn2->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_WAIT(conn2, ch.selected_connection(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.selected_connection(); }, Eq(conn2), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn2)); EXPECT_TRUE( ConnectionMatchesChangeEvent(conn2, "candidate pair state changed")); @@ -4114,7 +4580,8 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBeforeNomination) { // connection will be set as the selected connection, even though // its priority is lower. EXPECT_EQ(len, SendData(&ch, data, len, ++last_packet_id)); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "3.3.3.3", 3, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "3.3.3.3", 3, 1)); Connection* conn3 = WaitForConnectionTo(&ch, "3.3.3.3", 3); ASSERT_TRUE(conn3 != nullptr); // Because it has a lower priority, the selected connection is still conn2. @@ -4135,7 +4602,8 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBeforeNomination) { // the selected connection because the selected connection is nominated by // the controlling side. EXPECT_EQ(len, SendData(&ch, data, len, ++last_packet_id)); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "4.4.4.4", 4, 100)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "4.4.4.4", 4, 100)); Connection* conn4 = WaitForConnectionTo(&ch, "4.4.4.4", 4); ASSERT_TRUE(conn4 != nullptr); EXPECT_EQ(conn3, ch.selected_connection()); @@ -4147,7 +4615,10 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBeforeNomination) { reset_channel_ready_to_send(); // The selected connection switches after conn4 becomes writable. conn4->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_WAIT(conn4, ch.selected_connection(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.selected_connection(); }, Eq(conn4), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn4)); EXPECT_TRUE( ConnectionMatchesChangeEvent(conn4, "candidate pair state changed")); @@ -4160,32 +4631,38 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBeforeNomination) { // that sends a ping directly when a connection has been nominated // i.e on the ICE_CONTROLLED-side. TEST_F(P2PTransportChannelPingTest, TestPingOnNomination) { - webrtc::test::ScopedKeyValueConfig field_trials( - field_trials_, - "WebRTC-IceFieldTrials/send_ping_on_nomination_ice_controlled:true/"); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("receiving state change", 1, &pa, &field_trials); + const Environment env = CreateEnvironment(FieldTrials::CreateNoGlobal( + "WebRTC-IceFieldTrials/send_ping_on_nomination_ice_controlled:true/")); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("receiving state change", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.SetIceConfig(ch.config()); - ch.SetIceRole(ICEROLE_CONTROLLED); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn1 != nullptr); // A connection needs to be writable before it is selected for transmission. conn1->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_WAIT(conn1, ch.selected_connection(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn1)); // When a higher priority candidate comes in, the new connection is chosen // as the selected connection. - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 10)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 10)); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); ASSERT_TRUE(conn2 != nullptr); conn2->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_WAIT(conn2, ch.selected_connection(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.selected_connection(); }, Eq(conn2), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn2)); // Now nominate conn1 (low prio), it shall be choosen. @@ -4202,35 +4679,41 @@ TEST_F(P2PTransportChannelPingTest, TestPingOnNomination) { // that sends a ping directly when switching to a new connection // on the ICE_CONTROLLING-side. TEST_F(P2PTransportChannelPingTest, TestPingOnSwitch) { - webrtc::test::ScopedKeyValueConfig field_trials( - field_trials_, - "WebRTC-IceFieldTrials/send_ping_on_switch_ice_controlling:true/"); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("receiving state change", 1, &pa, &field_trials); + const Environment env = CreateEnvironment(FieldTrials::CreateNoGlobal( + "WebRTC-IceFieldTrials/send_ping_on_switch_ice_controlling:true/")); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("receiving state change", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.SetIceConfig(ch.config()); - ch.SetIceRole(ICEROLE_CONTROLLING); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLING); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn1 != nullptr); // A connection needs to be writable before it is selected for transmission. conn1->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_WAIT(conn1, ch.selected_connection(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn1)); // When a higher priority candidate comes in, the new connection is chosen // as the selected connection. - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 10)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 10)); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); ASSERT_TRUE(conn2 != nullptr); const int before = conn2->num_pings_sent(); conn2->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_WAIT(conn2, ch.selected_connection(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.selected_connection(); }, Eq(conn2), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn2)); // And the additional ping should have been sent directly. @@ -4241,17 +4724,16 @@ TEST_F(P2PTransportChannelPingTest, TestPingOnSwitch) { // that sends a ping directly when selecteing a new connection // on the ICE_CONTROLLING-side (i.e also initial selection). TEST_F(P2PTransportChannelPingTest, TestPingOnSelected) { - webrtc::test::ScopedKeyValueConfig field_trials( - field_trials_, - "WebRTC-IceFieldTrials/send_ping_on_selected_ice_controlling:true/"); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("receiving state change", 1, &pa, &field_trials); + const Environment env = CreateEnvironment(FieldTrials::CreateNoGlobal( + "WebRTC-IceFieldTrials/send_ping_on_selected_ice_controlling:true/")); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("receiving state change", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.SetIceConfig(ch.config()); - ch.SetIceRole(ICEROLE_CONTROLLING); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLING); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn1 != nullptr); @@ -4259,7 +4741,10 @@ TEST_F(P2PTransportChannelPingTest, TestPingOnSelected) { // A connection needs to be writable before it is selected for transmission. conn1->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_WAIT(conn1, ch.selected_connection(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn1)); // And the additional ping should have been sent directly. @@ -4273,11 +4758,11 @@ TEST_F(P2PTransportChannelPingTest, TestPingOnSelected) { // also sends back a ping response and set the ICE pwd in the remote candidate // appropriately. TEST_F(P2PTransportChannelPingTest, TestSelectConnectionFromUnknownAddress) { - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("receiving state change", 1, &pa, &field_trials_); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("receiving state change", 1, &pa, &env.field_trials()); PrepareChannel(&ch); - ch.SetIceRole(ICEROLE_CONTROLLED); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); ch.MaybeStartGathering(); // A minimal STUN message with prflx priority. IceMessage request(STUN_BINDING_REQUEST); @@ -4287,17 +4772,21 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionFromUnknownAddress) { request.AddAttribute(std::make_unique(STUN_ATTR_PRIORITY, prflx_priority)); TestUDPPort* port = static_cast(GetPort(&ch)); - port->SignalUnknownAddress(port, rtc::SocketAddress("1.1.1.1", 1), PROTO_UDP, - &request, kIceUfrag[1], false); + port->SignalUnknownAddress(port, SocketAddress("1.1.1.1", 1), + webrtc::PROTO_UDP, &request, kIceUfrag[1], false); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn1 != nullptr); EXPECT_EQ(conn1->stats().sent_ping_responses, 1u); EXPECT_NE(conn1, ch.selected_connection()); conn1->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_WAIT(conn1, ch.selected_connection(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Another connection is nominated via use_candidate. - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 1)); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); ASSERT_TRUE(conn2 != nullptr); // Because it has a lower priority, the selected connection is still conn1. @@ -4311,8 +4800,8 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionFromUnknownAddress) { // Another request with unknown address, it will not be set as the selected // connection because the selected connection was nominated by the controlling // side. - port->SignalUnknownAddress(port, rtc::SocketAddress("3.3.3.3", 3), PROTO_UDP, - &request, kIceUfrag[1], false); + port->SignalUnknownAddress(port, SocketAddress("3.3.3.3", 3), + webrtc::PROTO_UDP, &request, kIceUfrag[1], false); Connection* conn3 = WaitForConnectionTo(&ch, "3.3.3.3", 3); ASSERT_TRUE(conn3 != nullptr); EXPECT_EQ(conn3->stats().sent_ping_responses, 1u); @@ -4323,23 +4812,26 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionFromUnknownAddress) { // selected as the selected connection. request.AddAttribute( std::make_unique(STUN_ATTR_USE_CANDIDATE)); - port->SignalUnknownAddress(port, rtc::SocketAddress("4.4.4.4", 4), PROTO_UDP, - &request, kIceUfrag[1], false); + port->SignalUnknownAddress(port, SocketAddress("4.4.4.4", 4), + webrtc::PROTO_UDP, &request, kIceUfrag[1], false); Connection* conn4 = WaitForConnectionTo(&ch, "4.4.4.4", 4); ASSERT_TRUE(conn4 != nullptr); EXPECT_EQ(conn4->stats().sent_ping_responses, 1u); // conn4 is not the selected connection yet because it is not writable. EXPECT_EQ(conn2, ch.selected_connection()); conn4->ReceivedPingResponse(LOW_RTT, "id"); // Become writable. - EXPECT_EQ_WAIT(conn4, ch.selected_connection(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.selected_connection(); }, Eq(conn4), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Test that the request from an unknown address contains a ufrag from an old // generation. // port->set_sent_binding_response(false); ch.SetRemoteIceParameters(kIceParams[2]); ch.SetRemoteIceParameters(kIceParams[3]); - port->SignalUnknownAddress(port, rtc::SocketAddress("5.5.5.5", 5), PROTO_UDP, - &request, kIceUfrag[2], false); + port->SignalUnknownAddress(port, SocketAddress("5.5.5.5", 5), + webrtc::PROTO_UDP, &request, kIceUfrag[2], false); Connection* conn5 = WaitForConnectionTo(&ch, "5.5.5.5", 5); ASSERT_TRUE(conn5 != nullptr); EXPECT_EQ(conn5->stats().sent_ping_responses, 1u); @@ -4351,26 +4843,32 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionFromUnknownAddress) { // at which point the controlled side will select that connection as // the "selected connection". TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBasedOnMediaReceived) { - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("receiving state change", 1, &pa, &field_trials_); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("receiving state change", 1, &pa, &env.field_trials()); PrepareChannel(&ch); - ch.SetIceRole(ICEROLE_CONTROLLED); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 10)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 10)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn1 != nullptr); conn1->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_WAIT(conn1, ch.selected_connection(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // If a data packet is received on conn2, the selected connection should // switch to conn2 because the controlled side must mirror the media path // chosen by the controlling side. - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 1)); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); ASSERT_TRUE(conn2 != nullptr); conn2->ReceivedPingResponse(LOW_RTT, "id"); // Become writable and receiving. - conn2->OnReadPacket("ABC", 3, rtc::TimeMicros()); + conn2->OnReadPacket( + ReceivedIpPacket::CreateFromLegacy("ABC", 3, webrtc::TimeMicros())); EXPECT_EQ(conn2, ch.selected_connection()); conn2->ReceivedPingResponse(LOW_RTT, "id"); // Become writable. @@ -4385,31 +4883,39 @@ TEST_F(P2PTransportChannelPingTest, TestSelectConnectionBasedOnMediaReceived) { request.AddAttribute( std::make_unique(STUN_ATTR_USE_CANDIDATE)); Port* port = GetPort(&ch); - port->SignalUnknownAddress(port, rtc::SocketAddress("3.3.3.3", 3), PROTO_UDP, - &request, kIceUfrag[1], false); + port->SignalUnknownAddress(port, SocketAddress("3.3.3.3", 3), + webrtc::PROTO_UDP, &request, kIceUfrag[1], false); Connection* conn3 = WaitForConnectionTo(&ch, "3.3.3.3", 3); ASSERT_TRUE(conn3 != nullptr); EXPECT_NE(conn3, ch.selected_connection()); // Not writable yet. conn3->ReceivedPingResponse(LOW_RTT, "id"); // Become writable. - EXPECT_EQ_WAIT(conn3, ch.selected_connection(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.selected_connection(); }, Eq(conn3), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Now another data packet will not switch the selected connection because the // selected connection was nominated by the controlling side. conn2->ReceivedPing(); conn2->ReceivedPingResponse(LOW_RTT, "id"); - conn2->OnReadPacket("XYZ", 3, rtc::TimeMicros()); - EXPECT_EQ_WAIT(conn3, ch.selected_connection(), kDefaultTimeout); + conn2->OnReadPacket( + ReceivedIpPacket::CreateFromLegacy("XYZ", 3, webrtc::TimeMicros())); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.selected_connection(); }, Eq(conn3), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); } TEST_F(P2PTransportChannelPingTest, TestControlledAgentDataReceivingTakesHigherPrecedenceThanPriority) { - rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("SwitchSelectedConnection", 1, &pa, &field_trials_); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + clock.AdvanceTime(TimeDelta::Seconds(1)); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("SwitchSelectedConnection", 1, &pa, + &env.field_trials()); PrepareChannel(&ch); - ch.SetIceRole(ICEROLE_CONTROLLED); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); ch.MaybeStartGathering(); // The connections have decreasing priority. Connection* conn1 = @@ -4427,12 +4933,15 @@ TEST_F(P2PTransportChannelPingTest, // Advance the clock by 1ms so that the last data receiving timestamp of // conn2 is larger. SIMULATED_WAIT(false, 1, clock); - conn2->OnReadPacket("XYZ", 3, rtc::TimeMicros()); + + conn2->OnReadPacket( + ReceivedIpPacket::CreateFromLegacy("XYZ", 3, webrtc::TimeMicros())); EXPECT_EQ(1, reset_selected_candidate_pair_switches()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn2)); // conn1 also receives data; it becomes selected due to priority again. - conn1->OnReadPacket("XYZ", 3, rtc::TimeMicros()); + conn1->OnReadPacket( + ReceivedIpPacket::CreateFromLegacy("ABC", 3, webrtc::TimeMicros())); EXPECT_EQ(1, reset_selected_candidate_pair_switches()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn2)); @@ -4441,7 +4950,8 @@ TEST_F(P2PTransportChannelPingTest, SIMULATED_WAIT(false, 1, clock); // Need to become writable again because it was pruned. conn2->ReceivedPingResponse(LOW_RTT, "id"); - conn2->OnReadPacket("XYZ", 3, rtc::TimeMicros()); + conn2->OnReadPacket( + ReceivedIpPacket::CreateFromLegacy("ABC", 3, webrtc::TimeMicros())); EXPECT_EQ(1, reset_selected_candidate_pair_switches()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn2)); @@ -4452,14 +4962,15 @@ TEST_F(P2PTransportChannelPingTest, TEST_F(P2PTransportChannelPingTest, TestControlledAgentNominationTakesHigherPrecedenceThanDataReceiving) { - rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + ScopedFakeClock clock; + clock.AdvanceTime(TimeDelta::Seconds(1)); + const Environment env = CreateEnvironment(); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("SwitchSelectedConnection", 1, &pa, &field_trials_); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("SwitchSelectedConnection", 1, &pa, + &env.field_trials()); PrepareChannel(&ch); - ch.SetIceRole(ICEROLE_CONTROLLED); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); ch.MaybeStartGathering(); // The connections have decreasing priority. Connection* conn1 = @@ -4472,7 +4983,9 @@ TEST_F(P2PTransportChannelPingTest, // conn1 received data; it is the selected connection. // Advance the clock to have a non-zero last-data-receiving time. SIMULATED_WAIT(false, 1, clock); - conn1->OnReadPacket("XYZ", 3, rtc::TimeMicros()); + + conn1->OnReadPacket( + ReceivedIpPacket::CreateFromLegacy("XYZ", 3, webrtc::TimeMicros())); EXPECT_EQ(1, reset_selected_candidate_pair_switches()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn1)); @@ -4493,14 +5006,14 @@ TEST_F(P2PTransportChannelPingTest, TEST_F(P2PTransportChannelPingTest, TestControlledAgentSelectsConnectionWithHigherNomination) { - rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + ScopedFakeClock clock; + clock.AdvanceTime(TimeDelta::Seconds(1)); + const Environment env = CreateEnvironment(); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("test", 1, &pa, &field_trials_); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("test", 1, &pa, &env.field_trials()); PrepareChannel(&ch); - ch.SetIceRole(ICEROLE_CONTROLLED); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); ch.MaybeStartGathering(); // The connections have decreasing priority. Connection* conn1 = @@ -4511,8 +5024,11 @@ TEST_F(P2PTransportChannelPingTest, ASSERT_TRUE(conn2 != nullptr); // conn1 is the selected connection because it has a higher priority, - EXPECT_EQ_SIMULATED_WAIT(conn1, ch.selected_connection(), kDefaultTimeout, - clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn1)); reset_selected_candidate_pair_switches(); @@ -4540,14 +5056,14 @@ TEST_F(P2PTransportChannelPingTest, } TEST_F(P2PTransportChannelPingTest, TestEstimatedDisconnectedTime) { - rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + ScopedFakeClock clock; + clock.AdvanceTime(TimeDelta::Seconds(1)); + const Environment env = CreateEnvironment(); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("test", 1, &pa, &field_trials_); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("test", 1, &pa, &env.field_trials()); PrepareChannel(&ch); - ch.SetIceRole(ICEROLE_CONTROLLED); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); ch.MaybeStartGathering(); // The connections have decreasing priority. Connection* conn1 = @@ -4560,8 +5076,11 @@ TEST_F(P2PTransportChannelPingTest, TestEstimatedDisconnectedTime) { ASSERT_TRUE(conn2 != nullptr); // conn1 is the selected connection because it has a higher priority, - EXPECT_EQ_SIMULATED_WAIT(conn1, ch.selected_connection(), kDefaultTimeout, - clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn1)); // No estimateded disconnect time at first connect <=> value is 0. EXPECT_EQ(LastEstimatedDisconnectedTimeMs(), 0); @@ -4570,10 +5089,11 @@ TEST_F(P2PTransportChannelPingTest, TestEstimatedDisconnectedTime) { int nomination = 1; { - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + clock.AdvanceTime(TimeDelta::Seconds(1)); // This will not parse as STUN, and is considered data - conn1->OnReadPacket("XYZ", 3, rtc::TimeMicros()); - clock.AdvanceTime(webrtc::TimeDelta::Seconds(2)); + conn1->OnReadPacket( + ReceivedIpPacket::CreateFromLegacy("XYZ", 3, webrtc::TimeMicros())); + clock.AdvanceTime(TimeDelta::Seconds(2)); // conn2 is nominated; it becomes selected. NominateConnection(conn2, nomination++); @@ -4583,13 +5103,13 @@ TEST_F(P2PTransportChannelPingTest, TestEstimatedDisconnectedTime) { } { - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); - conn2->OnReadPacket("XYZ", 3, rtc::TimeMicros()); - - clock.AdvanceTime(webrtc::TimeDelta::Seconds(2)); + clock.AdvanceTime(TimeDelta::Seconds(1)); + conn2->OnReadPacket( + ReceivedIpPacket::CreateFromLegacy("XYZ", 3, webrtc::TimeMicros())); + clock.AdvanceTime(TimeDelta::Seconds(2)); ReceivePingOnConnection(conn2, kIceUfrag[1], 1, nomination++); - clock.AdvanceTime(webrtc::TimeDelta::Millis(500)); + clock.AdvanceTime(TimeDelta::Millis(500)); ReceivePingOnConnection(conn1, kIceUfrag[1], 1, nomination++); EXPECT_EQ(conn1, ch.selected_connection()); @@ -4600,14 +5120,14 @@ TEST_F(P2PTransportChannelPingTest, TestEstimatedDisconnectedTime) { TEST_F(P2PTransportChannelPingTest, TestControlledAgentIgnoresSmallerNomination) { - rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + ScopedFakeClock clock; + clock.AdvanceTime(TimeDelta::Seconds(1)); + const Environment env = CreateEnvironment(); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("test", 1, &pa, &field_trials_); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("test", 1, &pa, &env.field_trials()); PrepareChannel(&ch); - ch.SetIceRole(ICEROLE_CONTROLLED); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); ch.MaybeStartGathering(); Connection* conn = CreateConnectionWithCandidate(&ch, &clock, "1.1.1.1", 1, 10, false); @@ -4620,13 +5140,14 @@ TEST_F(P2PTransportChannelPingTest, TEST_F(P2PTransportChannelPingTest, TestControlledAgentWriteStateTakesHigherPrecedenceThanNomination) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("SwitchSelectedConnection", 1, &pa, &field_trials_); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("SwitchSelectedConnection", 1, &pa, + &env.field_trials()); PrepareChannel(&ch); - ch.SetIceRole(ICEROLE_CONTROLLED); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); ch.MaybeStartGathering(); // The connections have decreasing priority. Connection* conn1 = @@ -4642,18 +5163,30 @@ TEST_F(P2PTransportChannelPingTest, // conn2 becomes writable; it is selected even though it is not nominated. conn2->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_SIMULATED_WAIT(1, reset_selected_candidate_pair_switches(), - kDefaultTimeout, clock); - EXPECT_EQ_SIMULATED_WAIT(conn2, ch.selected_connection(), kDefaultTimeout, - clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return reset_selected_candidate_pair_switches(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch.selected_connection(); }, Eq(conn2), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn2)); // If conn1 is also writable, it will become selected. conn1->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_SIMULATED_WAIT(1, reset_selected_candidate_pair_switches(), - kDefaultTimeout, clock); - EXPECT_EQ_SIMULATED_WAIT(conn1, ch.selected_connection(), kDefaultTimeout, - clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return reset_selected_candidate_pair_switches(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); EXPECT_TRUE(CandidatePairMatchesNetworkRoute(conn1)); // Make sure sorting won't reselect candidate pair. @@ -4664,17 +5197,17 @@ TEST_F(P2PTransportChannelPingTest, // Test that if a new remote candidate has the same address and port with // an old one, it will be used to create a new connection. TEST_F(P2PTransportChannelPingTest, TestAddRemoteCandidateWithAddressReuse) { - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("candidate reuse", 1, &pa, &field_trials_); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("candidate reuse", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.MaybeStartGathering(); const std::string host_address = "1.1.1.1"; const int port_num = 1; // kIceUfrag[1] is the current generation ufrag. - Candidate candidate = CreateUdpCandidate(LOCAL_PORT_TYPE, host_address, - port_num, 1, kIceUfrag[1]); + Candidate candidate = CreateUdpCandidate( + IceCandidateType::kHost, host_address, port_num, 1, kIceUfrag[1]); ch.AddRemoteCandidate(candidate); Connection* conn1 = WaitForConnectionTo(&ch, host_address, port_num); ASSERT_TRUE(conn1 != nullptr); @@ -4703,15 +5236,16 @@ TEST_F(P2PTransportChannelPingTest, TestAddRemoteCandidateWithAddressReuse) { // When the current selected connection is strong, lower-priority connections // will be pruned. Otherwise, lower-priority connections are kept. TEST_F(P2PTransportChannelPingTest, TestDontPruneWhenWeak) { - rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("test channel", 1, &pa, &field_trials_); + ScopedFakeClock clock; + clock.AdvanceTime(TimeDelta::Seconds(1)); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("test channel", 1, &pa, &env.field_trials()); PrepareChannel(&ch); - ch.SetIceRole(ICEROLE_CONTROLLED); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn1 != nullptr); EXPECT_EQ(nullptr, ch.selected_connection()); @@ -4719,18 +5253,26 @@ TEST_F(P2PTransportChannelPingTest, TestDontPruneWhenWeak) { // When a higher-priority, nominated candidate comes in, the connections with // lower-priority are pruned. - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 10)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 10)); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2, &clock); ASSERT_TRUE(conn2 != nullptr); conn2->ReceivedPingResponse(LOW_RTT, "id"); // Becomes writable and receiving NominateConnection(conn2); - EXPECT_TRUE_SIMULATED_WAIT(conn1->pruned(), kMediumTimeout, clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return conn1->pruned(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); - ch.SetIceConfig(CreateIceConfig(500, GATHER_ONCE)); + ch.SetIceConfig(CreateIceConfig(500, webrtc::GATHER_ONCE)); // Wait until conn2 becomes not receiving. - EXPECT_TRUE_SIMULATED_WAIT(!conn2->receiving(), kMediumTimeout, clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return !conn2->receiving(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "3.3.3.3", 3, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "3.3.3.3", 3, 1)); Connection* conn3 = WaitForConnectionTo(&ch, "3.3.3.3", 3, &clock); ASSERT_TRUE(conn3 != nullptr); // The selected connection should still be conn2. Even through conn3 has lower @@ -4741,12 +5283,12 @@ TEST_F(P2PTransportChannelPingTest, TestDontPruneWhenWeak) { } TEST_F(P2PTransportChannelPingTest, TestDontPruneHighPriorityConnections) { - rtc::ScopedFakeClock clock; - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("test channel", 1, &pa, &field_trials_); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("test channel", 1, &pa, &env.field_trials()); PrepareChannel(&ch); - ch.SetIceRole(ICEROLE_CONTROLLED); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); ch.MaybeStartGathering(); Connection* conn1 = CreateConnectionWithCandidate(&ch, &clock, "1.1.1.1", 1, 100, true); @@ -4758,134 +5300,177 @@ TEST_F(P2PTransportChannelPingTest, TestDontPruneHighPriorityConnections) { // conn2. NominateConnection(conn1); SIMULATED_WAIT(false, 1, clock); - conn1->OnReadPacket("XYZ", 3, rtc::TimeMicros()); + conn1->OnReadPacket( + ReceivedIpPacket::CreateFromLegacy("XYZ", 3, webrtc::TimeMicros())); SIMULATED_WAIT(conn2->pruned(), 100, clock); EXPECT_FALSE(conn2->pruned()); } // Test that GetState returns the state correctly. TEST_F(P2PTransportChannelPingTest, TestGetState) { - rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("test channel", 1, &pa, &field_trials_); - EXPECT_EQ(webrtc::IceTransportState::kNew, ch.GetIceTransportState()); + ScopedFakeClock clock; + clock.AdvanceTime(TimeDelta::Seconds(1)); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("test channel", 1, &pa, &env.field_trials()); + EXPECT_EQ(IceTransportState::kNew, ch.GetIceTransportState()); PrepareChannel(&ch); ch.MaybeStartGathering(); // After gathering we are still in the kNew state because we aren't checking // any connections yet. - EXPECT_EQ(webrtc::IceTransportState::kNew, ch.GetIceTransportState()); - EXPECT_EQ(IceTransportState::STATE_INIT, ch.GetState()); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 100)); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 1)); + EXPECT_EQ(IceTransportState::kNew, ch.GetIceTransportState()); + EXPECT_EQ(IceTransportStateInternal::STATE_INIT, ch.GetState()); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 100)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 1)); // Checking candidates that have been added with gathered candidates. ASSERT_GT(ch.connections().size(), 0u); - EXPECT_EQ(webrtc::IceTransportState::kChecking, ch.GetIceTransportState()); + EXPECT_EQ(IceTransportState::kChecking, ch.GetIceTransportState()); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1, &clock); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2, &clock); ASSERT_TRUE(conn1 != nullptr); ASSERT_TRUE(conn2 != nullptr); // Now there are two connections, so the transport channel is connecting. - EXPECT_EQ(IceTransportState::STATE_CONNECTING, ch.GetState()); + EXPECT_EQ(IceTransportStateInternal::STATE_CONNECTING, ch.GetState()); // No connections are writable yet, so we should still be in the kChecking // state. - EXPECT_EQ(webrtc::IceTransportState::kChecking, ch.GetIceTransportState()); + EXPECT_EQ(IceTransportState::kChecking, ch.GetIceTransportState()); // `conn1` becomes writable and receiving; it then should prune `conn2`. conn1->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_TRUE_SIMULATED_WAIT(conn2->pruned(), kShortTimeout, clock); - EXPECT_EQ(IceTransportState::STATE_COMPLETED, ch.GetState()); - EXPECT_EQ(webrtc::IceTransportState::kConnected, ch.GetIceTransportState()); + EXPECT_THAT(webrtc::WaitUntil([&] { return conn2->pruned(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kShortTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_EQ(IceTransportStateInternal::STATE_COMPLETED, ch.GetState()); + EXPECT_EQ(IceTransportState::kConnected, ch.GetIceTransportState()); conn1->Prune(); // All connections are pruned. // Need to wait until the channel state is updated. - EXPECT_EQ_SIMULATED_WAIT(IceTransportState::STATE_FAILED, ch.GetState(), - kShortTimeout, clock); - EXPECT_EQ(webrtc::IceTransportState::kFailed, ch.GetIceTransportState()); + EXPECT_THAT(webrtc::WaitUntil([&] { return ch.GetState(); }, + Eq(IceTransportStateInternal::STATE_FAILED), + {.timeout = TimeDelta::Millis(kShortTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_EQ(IceTransportState::kFailed, ch.GetIceTransportState()); } // Test that when a low-priority connection is pruned, it is not deleted // right away, and it can become active and be pruned again. TEST_F(P2PTransportChannelPingTest, TestConnectionPrunedAgain) { - rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + ScopedFakeClock clock; + clock.AdvanceTime(TimeDelta::Seconds(1)); + const Environment env = CreateEnvironment(); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("test channel", 1, &pa, &field_trials_); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("test channel", 1, &pa, &env.field_trials()); PrepareChannel(&ch); - IceConfig config = CreateIceConfig(1000, GATHER_ONCE); + IceConfig config = CreateIceConfig(1000, webrtc::GATHER_ONCE); config.receiving_switching_delay = 800; ch.SetIceConfig(config); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 100)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 100)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1, &clock); ASSERT_TRUE(conn1 != nullptr); EXPECT_EQ(nullptr, ch.selected_connection()); conn1->ReceivedPingResponse(LOW_RTT, "id"); // Becomes writable and receiving - EXPECT_EQ_SIMULATED_WAIT(conn1, ch.selected_connection(), kDefaultTimeout, - clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Add a low-priority connection `conn2`, which will be pruned, but it will // not be deleted right away. Once the current selected connection becomes not // receiving, `conn2` will start to ping and upon receiving the ping response, // it will become the selected connection. - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 1)); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2, &clock); ASSERT_TRUE(conn2 != nullptr); - EXPECT_TRUE_SIMULATED_WAIT(!conn2->active(), kDefaultTimeout, clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return !conn2->active(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); // `conn2` should not send a ping yet. EXPECT_EQ(IceCandidatePairState::WAITING, conn2->state()); - EXPECT_EQ(IceTransportState::STATE_COMPLETED, ch.GetState()); + EXPECT_EQ(IceTransportStateInternal::STATE_COMPLETED, ch.GetState()); // Wait for `conn1` becoming not receiving. - EXPECT_TRUE_SIMULATED_WAIT(!conn1->receiving(), kMediumTimeout, clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return !conn1->receiving(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); // Make sure conn2 is not deleted. conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2, &clock); ASSERT_TRUE(conn2 != nullptr); - EXPECT_EQ_SIMULATED_WAIT(IceCandidatePairState::IN_PROGRESS, conn2->state(), - kDefaultTimeout, clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return conn2->state(); }, + Eq(IceCandidatePairState::IN_PROGRESS), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); conn2->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_SIMULATED_WAIT(conn2, ch.selected_connection(), kDefaultTimeout, - clock); - EXPECT_EQ(IceTransportState::STATE_CONNECTING, ch.GetState()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch.selected_connection(); }, Eq(conn2), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_EQ(IceTransportStateInternal::STATE_CONNECTING, ch.GetState()); // When `conn1` comes back again, `conn2` will be pruned again. conn1->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_SIMULATED_WAIT(conn1, ch.selected_connection(), kDefaultTimeout, - clock); - EXPECT_TRUE_SIMULATED_WAIT(!conn2->active(), kDefaultTimeout, clock); - EXPECT_EQ(IceTransportState::STATE_COMPLETED, ch.GetState()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_THAT(webrtc::WaitUntil([&] { return !conn2->active(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_EQ(IceTransportStateInternal::STATE_COMPLETED, ch.GetState()); } // Test that if all connections in a channel has timed out on writing, they // will all be deleted. We use Prune to simulate write_time_out. TEST_F(P2PTransportChannelPingTest, TestDeleteConnectionsIfAllWriteTimedout) { - rtc::ScopedFakeClock clock; - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("test channel", 1, &pa, &field_trials_); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("test channel", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.MaybeStartGathering(); // Have one connection only but later becomes write-time-out. - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 100)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 100)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1, &clock); ASSERT_TRUE(conn1 != nullptr); conn1->ReceivedPing(); // Becomes receiving conn1->Prune(); - EXPECT_TRUE_SIMULATED_WAIT(ch.connections().empty(), kShortTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch.connections().empty(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kShortTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Have two connections but both become write-time-out later. - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 1)); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2, &clock); ASSERT_TRUE(conn2 != nullptr); conn2->ReceivedPing(); // Becomes receiving - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "3.3.3.3", 3, 2)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "3.3.3.3", 3, 2)); Connection* conn3 = WaitForConnectionTo(&ch, "3.3.3.3", 3, &clock); ASSERT_TRUE(conn3 != nullptr); conn3->ReceivedPing(); // Becomes receiving // Now prune both conn2 and conn3; they will be deleted soon. conn2->Prune(); conn3->Prune(); - EXPECT_TRUE_SIMULATED_WAIT(ch.connections().empty(), kShortTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch.connections().empty(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kShortTimeout), .clock = &clock}), + webrtc::IsRtcOk()); } // Tests that after a port allocator session is started, it will be stopped @@ -4893,13 +5478,14 @@ TEST_F(P2PTransportChannelPingTest, TestDeleteConnectionsIfAllWriteTimedout) { // connection belonging to an old session becomes writable, it won't stop // the current port allocator session. TEST_F(P2PTransportChannelPingTest, TestStopPortAllocatorSessions) { - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("test channel", 1, &pa, &field_trials_); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("test channel", 1, &pa, &env.field_trials()); PrepareChannel(&ch); - ch.SetIceConfig(CreateIceConfig(2000, GATHER_ONCE)); + ch.SetIceConfig(CreateIceConfig(2000, webrtc::GATHER_ONCE)); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 100)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 100)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn1 != nullptr); conn1->ReceivedPingResponse(LOW_RTT, "id"); // Becomes writable and receiving @@ -4916,7 +5502,8 @@ TEST_F(P2PTransportChannelPingTest, TestStopPortAllocatorSessions) { // But if a new connection created from the new session becomes writable, // it will stop the current session. - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 100)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 100)); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); ASSERT_TRUE(conn2 != nullptr); conn2->ReceivedPingResponse(LOW_RTT, "id"); // Becomes writable and receiving @@ -4927,16 +5514,17 @@ TEST_F(P2PTransportChannelPingTest, TestStopPortAllocatorSessions) { // These ports may still have connections that need a correct role, in case that // the connections on it may still receive stun pings. TEST_F(P2PTransportChannelPingTest, TestIceRoleUpdatedOnRemovedPort) { - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); P2PTransportChannel ch("test channel", ICE_CANDIDATE_COMPONENT_DEFAULT, &pa, - &field_trials_); + &env.field_trials()); // Starts with ICEROLE_CONTROLLING. PrepareChannel(&ch); - IceConfig config = CreateIceConfig(1000, GATHER_CONTINUALLY); + IceConfig config = CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); ch.SetIceConfig(config); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); Connection* conn = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn != nullptr); @@ -4945,8 +5533,8 @@ TEST_F(P2PTransportChannelPingTest, TestIceRoleUpdatedOnRemovedPort) { // change the ICE role and expect it to be updated. std::vector ports(1, conn->PortForTest()); ch.allocator_session()->SignalPortsPruned(ch.allocator_session(), ports); - ch.SetIceRole(ICEROLE_CONTROLLED); - EXPECT_EQ(ICEROLE_CONTROLLED, conn->PortForTest()->GetIceRole()); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); + EXPECT_EQ(webrtc::ICEROLE_CONTROLLED, conn->PortForTest()->GetIceRole()); } // Test that the ICE role is updated even on ports with inactive networks. @@ -4954,14 +5542,15 @@ TEST_F(P2PTransportChannelPingTest, TestIceRoleUpdatedOnRemovedPort) { // pings sent by those connections until they're replaced by newer-generation // connections. TEST_F(P2PTransportChannelPingTest, TestIceRoleUpdatedOnPortAfterIceRestart) { - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); + const Environment env = CreateEnvironment(); + FakePortAllocator pa(env, ss()); P2PTransportChannel ch("test channel", ICE_CANDIDATE_COMPONENT_DEFAULT, &pa, - &field_trials_); + &env.field_trials()); // Starts with ICEROLE_CONTROLLING. PrepareChannel(&ch); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); Connection* conn = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn != nullptr); @@ -4970,24 +5559,25 @@ TEST_F(P2PTransportChannelPingTest, TestIceRoleUpdatedOnPortAfterIceRestart) { // role updated. ch.SetIceParameters(kIceParams[1]); ch.MaybeStartGathering(); - ch.SetIceRole(ICEROLE_CONTROLLED); - EXPECT_EQ(ICEROLE_CONTROLLED, conn->PortForTest()->GetIceRole()); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); + EXPECT_EQ(webrtc::ICEROLE_CONTROLLED, conn->PortForTest()->GetIceRole()); } // Test that after some amount of time without receiving data, the connection // will be destroyed. The port will only be destroyed after it is marked as // "pruned." TEST_F(P2PTransportChannelPingTest, TestPortDestroyedAfterTimeoutAndPruned) { - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; + const Environment env = CreateEnvironment(); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); + FakePortAllocator pa(env, ss()); P2PTransportChannel ch("test channel", ICE_CANDIDATE_COMPONENT_DEFAULT, &pa, - &field_trials_); + &env.field_trials()); PrepareChannel(&ch); - ch.SetIceRole(ICEROLE_CONTROLLED); + ch.SetIceRole(webrtc::ICEROLE_CONTROLLED); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); Connection* conn = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn != nullptr); @@ -4995,7 +5585,7 @@ TEST_F(P2PTransportChannelPingTest, TestPortDestroyedAfterTimeoutAndPruned) { // Simulate 2 minutes going by. This should be enough time for the port to // time out. for (int second = 0; second < 120; ++second) { - fake_clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + fake_clock.AdvanceTime(TimeDelta::Seconds(1)); } EXPECT_EQ(nullptr, GetConnectionTo(&ch, "1.1.1.1", 1)); // Port will not be removed because it is not pruned yet. @@ -5004,29 +5594,39 @@ TEST_F(P2PTransportChannelPingTest, TestPortDestroyedAfterTimeoutAndPruned) { // If the session prunes all ports, the port will be destroyed. ch.allocator_session()->PruneAllPorts(); - EXPECT_EQ_SIMULATED_WAIT(nullptr, GetPort(&ch), 1, fake_clock); - EXPECT_EQ_SIMULATED_WAIT(nullptr, GetPrunedPort(&ch), 1, fake_clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return GetPort(&ch); }, Eq(nullptr), + {.clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_THAT(webrtc::WaitUntil([&] { return GetPrunedPort(&ch); }, Eq(nullptr), + {.clock = &fake_clock}), + webrtc::IsRtcOk()); } TEST_F(P2PTransportChannelPingTest, TestMaxOutstandingPingsFieldTrial) { - webrtc::test::ScopedKeyValueConfig field_trials( - field_trials_, "WebRTC-IceFieldTrials/max_outstanding_pings:3/"); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("max", 1, &pa, &field_trials); + const Environment env = CreateEnvironment(FieldTrials::CreateNoGlobal( + "WebRTC-IceFieldTrials/max_outstanding_pings:3/")); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("max", 1, &pa, &env.field_trials()); ch.SetIceConfig(ch.config()); PrepareChannel(&ch); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 2)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 2)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1); Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); ASSERT_TRUE(conn1 != nullptr); ASSERT_TRUE(conn2 != nullptr); - EXPECT_TRUE_WAIT(conn1->num_pings_sent() == 3 && conn2->num_pings_sent() == 3, - kDefaultTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return conn1->num_pings_sent() == 3 && + conn2->num_pings_sent() == 3; + }, + IsTrue(), {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Check that these connections don't send any more pings. EXPECT_EQ(nullptr, ch.FindNextPingableConnection()); @@ -5036,25 +5636,30 @@ class P2PTransportChannelMostLikelyToWorkFirstTest : public P2PTransportChannelPingTest { public: P2PTransportChannelMostLikelyToWorkFirstTest() - : turn_server_(rtc::Thread::Current(), + : turn_server_(Thread::Current(), ss(), kTurnUdpIntAddr, kTurnUdpExtAddr) { network_manager_.AddInterface(kPublicAddrs[0]); - allocator_.reset(CreateBasicPortAllocator( - &network_manager_, packet_socket_factory(), ServerAddresses(), - kTurnUdpIntAddr, rtc::SocketAddress())); - allocator_->set_flags(allocator_->flags() | PORTALLOCATOR_DISABLE_STUN | - PORTALLOCATOR_DISABLE_TCP); - allocator_->set_step_delay(kMinimumStepDelay); + } + + BasicPortAllocator& CreatePortAllocator(const Environment& env) { + port_allocator_ = CreateBasicPortAllocator( + env, &network_manager_, packet_socket_factory(), ServerAddresses(), + kTurnUdpIntAddr, SocketAddress()); + port_allocator_->set_flags(port_allocator_->flags() | + webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_TCP); + port_allocator_->set_step_delay(webrtc::kMinimumStepDelay); + return *port_allocator_; } P2PTransportChannel& StartTransportChannel( + const Environment& env, bool prioritize_most_likely_to_work, - int stable_writable_connection_ping_interval, - const webrtc::FieldTrialsView* field_trials = nullptr) { - channel_.reset( - new P2PTransportChannel("checks", 1, allocator(), field_trials)); + int stable_writable_connection_ping_interval) { + channel_ = std::make_unique( + "checks", 1, port_allocator_.get(), &env.field_trials()); IceConfig config = channel_->config(); config.prioritize_most_likely_candidate_pairs = prioritize_most_likely_to_work; @@ -5063,31 +5668,30 @@ class P2PTransportChannelMostLikelyToWorkFirstTest channel_->SetIceConfig(config); PrepareChannel(channel_.get()); channel_->MaybeStartGathering(); - return *channel_.get(); + return *channel_; } - BasicPortAllocator* allocator() { return allocator_.get(); } TestTurnServer* turn_server() { return &turn_server_; } // This verifies the next pingable connection has the expected candidates' // types and, for relay local candidate, the expected relay protocol and ping // it. void VerifyNextPingableConnection( - absl::string_view local_candidate_type, - absl::string_view remote_candidate_type, - absl::string_view relay_protocol_type = UDP_PROTOCOL_NAME) { + IceCandidateType local_candidate_type, + IceCandidateType remote_candidate_type, + absl::string_view relay_protocol_type = webrtc::UDP_PROTOCOL_NAME) { Connection* conn = FindNextPingableConnectionAndPingIt(channel_.get()); ASSERT_TRUE(conn != nullptr); EXPECT_EQ(conn->local_candidate().type(), local_candidate_type); - if (conn->local_candidate().type() == RELAY_PORT_TYPE) { + if (conn->local_candidate().is_relay()) { EXPECT_EQ(conn->local_candidate().relay_protocol(), relay_protocol_type); } EXPECT_EQ(conn->remote_candidate().type(), remote_candidate_type); } private: - std::unique_ptr allocator_; - rtc::FakeNetworkManager network_manager_; + std::unique_ptr port_allocator_; + FakeNetworkManager network_manager_; TestTurnServer turn_server_; std::unique_ptr channel_; }; @@ -5097,37 +5701,47 @@ class P2PTransportChannelMostLikelyToWorkFirstTest // we have a selected connection. TEST_F(P2PTransportChannelMostLikelyToWorkFirstTest, TestRelayRelayFirstWhenNothingPingedYet) { + const Environment env = CreateEnvironment(); const int max_strong_interval = 500; + CreatePortAllocator(env); P2PTransportChannel& ch = - StartTransportChannel(true, max_strong_interval, &field_trials_); - EXPECT_TRUE_WAIT(ch.ports().size() == 2, kDefaultTimeout); - EXPECT_EQ(ch.ports()[0]->Type(), LOCAL_PORT_TYPE); - EXPECT_EQ(ch.ports()[1]->Type(), RELAY_PORT_TYPE); + StartTransportChannel(env, true, max_strong_interval); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.ports().size(); }, Eq(2), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + EXPECT_EQ(ch.ports()[0]->Type(), IceCandidateType::kHost); + EXPECT_EQ(ch.ports()[1]->Type(), IceCandidateType::kRelay); - ch.AddRemoteCandidate(CreateUdpCandidate(RELAY_PORT_TYPE, "1.1.1.1", 1, 1)); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 2)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kRelay, "1.1.1.1", 1, 1)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 2)); - EXPECT_TRUE_WAIT(ch.connections().size() == 4, kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.connections().size(); }, Eq(4), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Relay/Relay should be the first pingable connection. Connection* conn = FindNextPingableConnectionAndPingIt(&ch); ASSERT_TRUE(conn != nullptr); - EXPECT_EQ(conn->local_candidate().type(), RELAY_PORT_TYPE); - EXPECT_EQ(conn->remote_candidate().type(), RELAY_PORT_TYPE); + EXPECT_TRUE(conn->local_candidate().is_relay()); + EXPECT_TRUE(conn->remote_candidate().is_relay()); // Unless that we have a trigger check waiting to be pinged. Connection* conn2 = WaitForConnectionTo(&ch, "2.2.2.2", 2); ASSERT_TRUE(conn2 != nullptr); - EXPECT_EQ(conn2->local_candidate().type(), LOCAL_PORT_TYPE); - EXPECT_EQ(conn2->remote_candidate().type(), LOCAL_PORT_TYPE); + EXPECT_TRUE(conn2->local_candidate().is_local()); + EXPECT_TRUE(conn2->remote_candidate().is_local()); conn2->ReceivedPing(); EXPECT_EQ(conn2, FindNextPingableConnectionAndPingIt(&ch)); // Make conn3 the selected connection. Connection* conn3 = WaitForConnectionTo(&ch, "1.1.1.1", 1); ASSERT_TRUE(conn3 != nullptr); - EXPECT_EQ(conn3->local_candidate().type(), LOCAL_PORT_TYPE); - EXPECT_EQ(conn3->remote_candidate().type(), RELAY_PORT_TYPE); + EXPECT_TRUE(conn3->local_candidate().is_local()); + EXPECT_TRUE(conn3->remote_candidate().is_relay()); conn3->ReceivedPingResponse(LOW_RTT, "id"); ASSERT_TRUE(conn3->writable()); conn3->ReceivedPing(); @@ -5158,141 +5772,200 @@ TEST_F(P2PTransportChannelMostLikelyToWorkFirstTest, // in the first round. TEST_F(P2PTransportChannelMostLikelyToWorkFirstTest, TestRelayRelayFirstWhenEverythingPinged) { - P2PTransportChannel& ch = StartTransportChannel(true, 500, &field_trials_); - EXPECT_TRUE_WAIT(ch.ports().size() == 2, kDefaultTimeout); - EXPECT_EQ(ch.ports()[0]->Type(), LOCAL_PORT_TYPE); - EXPECT_EQ(ch.ports()[1]->Type(), RELAY_PORT_TYPE); + const Environment env = CreateEnvironment(); + CreatePortAllocator(env); + P2PTransportChannel& ch = StartTransportChannel(env, true, 500); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.ports().size(); }, Eq(2), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + EXPECT_EQ(ch.ports()[0]->Type(), IceCandidateType::kHost); + EXPECT_EQ(ch.ports()[1]->Type(), IceCandidateType::kRelay); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); - EXPECT_TRUE_WAIT(ch.connections().size() == 2, kDefaultTimeout); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.connections().size(); }, Eq(2), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Initially, only have Local/Local and Local/Relay. - VerifyNextPingableConnection(LOCAL_PORT_TYPE, LOCAL_PORT_TYPE); - VerifyNextPingableConnection(RELAY_PORT_TYPE, LOCAL_PORT_TYPE); + VerifyNextPingableConnection(IceCandidateType::kHost, + IceCandidateType::kHost); + VerifyNextPingableConnection(IceCandidateType::kRelay, + IceCandidateType::kHost); // Remote Relay candidate arrives. - ch.AddRemoteCandidate(CreateUdpCandidate(RELAY_PORT_TYPE, "2.2.2.2", 2, 2)); - EXPECT_TRUE_WAIT(ch.connections().size() == 4, kDefaultTimeout); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kRelay, "2.2.2.2", 2, 2)); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.connections().size(); }, Eq(4), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Relay/Relay should be the first since it hasn't been pinged before. - VerifyNextPingableConnection(RELAY_PORT_TYPE, RELAY_PORT_TYPE); + VerifyNextPingableConnection(IceCandidateType::kRelay, + IceCandidateType::kRelay); // Local/Relay is the final one. - VerifyNextPingableConnection(LOCAL_PORT_TYPE, RELAY_PORT_TYPE); + VerifyNextPingableConnection(IceCandidateType::kHost, + IceCandidateType::kRelay); // Now, every connection has been pinged once. The next one should be // Relay/Relay. - VerifyNextPingableConnection(RELAY_PORT_TYPE, RELAY_PORT_TYPE); + VerifyNextPingableConnection(IceCandidateType::kRelay, + IceCandidateType::kRelay); } // Test that when we receive a new remote candidate, they will be tried first // before we re-ping Relay/Relay connections again. TEST_F(P2PTransportChannelMostLikelyToWorkFirstTest, TestNoStarvationOnNonRelayConnection) { - P2PTransportChannel& ch = StartTransportChannel(true, 500, &field_trials_); - EXPECT_TRUE_WAIT(ch.ports().size() == 2, kDefaultTimeout); - EXPECT_EQ(ch.ports()[0]->Type(), LOCAL_PORT_TYPE); - EXPECT_EQ(ch.ports()[1]->Type(), RELAY_PORT_TYPE); + const Environment env = CreateEnvironment(); + CreatePortAllocator(env); + P2PTransportChannel& ch = StartTransportChannel(env, true, 500); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.ports().size(); }, Eq(2), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + EXPECT_EQ(ch.ports()[0]->Type(), IceCandidateType::kHost); + EXPECT_EQ(ch.ports()[1]->Type(), IceCandidateType::kRelay); - ch.AddRemoteCandidate(CreateUdpCandidate(RELAY_PORT_TYPE, "1.1.1.1", 1, 1)); - EXPECT_TRUE_WAIT(ch.connections().size() == 2, kDefaultTimeout); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kRelay, "1.1.1.1", 1, 1)); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.connections().size(); }, Eq(2), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Initially, only have Relay/Relay and Local/Relay. Ping Relay/Relay first. - VerifyNextPingableConnection(RELAY_PORT_TYPE, RELAY_PORT_TYPE); + VerifyNextPingableConnection(IceCandidateType::kRelay, + IceCandidateType::kRelay); // Next, ping Local/Relay. - VerifyNextPingableConnection(LOCAL_PORT_TYPE, RELAY_PORT_TYPE); + VerifyNextPingableConnection(IceCandidateType::kHost, + IceCandidateType::kRelay); // Remote Local candidate arrives. - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 2)); - EXPECT_TRUE_WAIT(ch.connections().size() == 4, kDefaultTimeout); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 2)); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.connections().size(); }, Eq(4), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Local/Local should be the first since it hasn't been pinged before. - VerifyNextPingableConnection(LOCAL_PORT_TYPE, LOCAL_PORT_TYPE); + VerifyNextPingableConnection(IceCandidateType::kHost, + IceCandidateType::kHost); // Relay/Local is the final one. - VerifyNextPingableConnection(RELAY_PORT_TYPE, LOCAL_PORT_TYPE); + VerifyNextPingableConnection(IceCandidateType::kRelay, + IceCandidateType::kHost); // Now, every connection has been pinged once. The next one should be // Relay/Relay. - VerifyNextPingableConnection(RELAY_PORT_TYPE, RELAY_PORT_TYPE); + VerifyNextPingableConnection(IceCandidateType::kRelay, + IceCandidateType::kRelay); } // Test skip_relay_to_non_relay_connections field-trial. // I.e that we never create connection between relay and non-relay. TEST_F(P2PTransportChannelMostLikelyToWorkFirstTest, TestSkipRelayToNonRelayConnectionsFieldTrial) { - webrtc::test::ScopedKeyValueConfig field_trials( - field_trials_, - "WebRTC-IceFieldTrials/skip_relay_to_non_relay_connections:true/"); - P2PTransportChannel& ch = StartTransportChannel(true, 500, &field_trials); - EXPECT_TRUE_WAIT(ch.ports().size() == 2, kDefaultTimeout); - EXPECT_EQ(ch.ports()[0]->Type(), LOCAL_PORT_TYPE); - EXPECT_EQ(ch.ports()[1]->Type(), RELAY_PORT_TYPE); + const Environment env = CreateEnvironment(FieldTrials::CreateNoGlobal( + "WebRTC-IceFieldTrials/skip_relay_to_non_relay_connections:true/")); + CreatePortAllocator(env); + P2PTransportChannel& ch = StartTransportChannel(env, true, 500); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.ports().size(); }, Eq(2), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + EXPECT_EQ(ch.ports()[0]->Type(), IceCandidateType::kHost); + EXPECT_EQ(ch.ports()[1]->Type(), IceCandidateType::kRelay); // Remote Relay candidate arrives. - ch.AddRemoteCandidate(CreateUdpCandidate(RELAY_PORT_TYPE, "1.1.1.1", 1, 1)); - EXPECT_TRUE_WAIT(ch.connections().size() == 1, kDefaultTimeout); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kRelay, "1.1.1.1", 1, 1)); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.connections().size(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Remote Local candidate arrives. - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 2)); - EXPECT_TRUE_WAIT(ch.connections().size() == 2, kDefaultTimeout); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 2)); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.connections().size(); }, Eq(2), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); } // Test the ping sequence is UDP Relay/Relay followed by TCP Relay/Relay, // followed by the rest. TEST_F(P2PTransportChannelMostLikelyToWorkFirstTest, TestTcpTurn) { + const Environment env = CreateEnvironment(); // Add a Tcp Turn server. - turn_server()->AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server()->AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); RelayServerConfig config; config.credentials = kRelayCredentials; - config.ports.push_back(ProtocolAddress(kTurnTcpIntAddr, PROTO_TCP)); - allocator()->AddTurnServerForTesting(config); - - P2PTransportChannel& ch = StartTransportChannel(true, 500, &field_trials_); - EXPECT_TRUE_WAIT(ch.ports().size() == 3, kDefaultTimeout); - EXPECT_EQ(ch.ports()[0]->Type(), LOCAL_PORT_TYPE); - EXPECT_EQ(ch.ports()[1]->Type(), RELAY_PORT_TYPE); - EXPECT_EQ(ch.ports()[2]->Type(), RELAY_PORT_TYPE); + config.ports.push_back(ProtocolAddress(kTurnTcpIntAddr, webrtc::PROTO_TCP)); + CreatePortAllocator(env).AddTurnServerForTesting(config); + + P2PTransportChannel& ch = StartTransportChannel(env, true, 500); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.ports().size(); }, Eq(3), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + EXPECT_EQ(ch.ports()[0]->Type(), IceCandidateType::kHost); + EXPECT_EQ(ch.ports()[1]->Type(), IceCandidateType::kRelay); + EXPECT_EQ(ch.ports()[2]->Type(), IceCandidateType::kRelay); // Remote Relay candidate arrives. - ch.AddRemoteCandidate(CreateUdpCandidate(RELAY_PORT_TYPE, "1.1.1.1", 1, 1)); - EXPECT_TRUE_WAIT(ch.connections().size() == 3, kDefaultTimeout); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kRelay, "1.1.1.1", 1, 1)); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch.connections().size(); }, Eq(3), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // UDP Relay/Relay should be pinged first. - VerifyNextPingableConnection(RELAY_PORT_TYPE, RELAY_PORT_TYPE); + VerifyNextPingableConnection(IceCandidateType::kRelay, + IceCandidateType::kRelay); // TCP Relay/Relay is the next. - VerifyNextPingableConnection(RELAY_PORT_TYPE, RELAY_PORT_TYPE, - TCP_PROTOCOL_NAME); + VerifyNextPingableConnection(IceCandidateType::kRelay, + IceCandidateType::kRelay, + webrtc::TCP_PROTOCOL_NAME); // Finally, Local/Relay will be pinged. - VerifyNextPingableConnection(LOCAL_PORT_TYPE, RELAY_PORT_TYPE); + VerifyNextPingableConnection(IceCandidateType::kHost, + IceCandidateType::kRelay); } // Test that a resolver is created, asked for a result, and destroyed // when the address is a hostname. The destruction should happen even // if the channel is not destroyed. TEST(P2PTransportChannelResolverTest, HostnameCandidateIsResolved) { - webrtc::test::ScopedKeyValueConfig field_trials; + const Environment env = CreateEnvironment(); ResolverFactoryFixture resolver_fixture; - std::unique_ptr socket_server = - rtc::CreateDefaultSocketServer(); - rtc::AutoSocketServerThread main_thread(socket_server.get()); - rtc::BasicPacketSocketFactory packet_socket_factory(socket_server.get()); - FakePortAllocator allocator(rtc::Thread::Current(), &packet_socket_factory, - &field_trials); - webrtc::IceTransportInit init; + std::unique_ptr socket_server = + webrtc::CreateDefaultSocketServer(); + AutoSocketServerThread main_thread(socket_server.get()); + FakePortAllocator allocator(env, socket_server.get()); + IceTransportInit init; init.set_port_allocator(&allocator); init.set_async_dns_resolver_factory(&resolver_fixture); - init.set_field_trials(&field_trials); + init.set_field_trials(&env.field_trials()); auto channel = P2PTransportChannel::Create("tn", 0, std::move(init)); Candidate hostname_candidate; SocketAddress hostname_address("fake.test", 1000); hostname_candidate.set_address(hostname_address); channel->AddRemoteCandidate(hostname_candidate); - ASSERT_EQ_WAIT(1u, channel->remote_candidates().size(), kDefaultTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return channel->remote_candidates().size(); }, Eq(1u), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); const RemoteCandidate& candidate = channel->remote_candidates()[0]; EXPECT_FALSE(candidate.address().IsUnresolvedIP()); } @@ -5303,50 +5976,67 @@ TEST(P2PTransportChannelResolverTest, HostnameCandidateIsResolved) { // done. TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateBeforeSignalingWithMdnsName) { + const Environment env = CreateEnvironment(); // ep1 and ep2 will only gather host candidates with addresses // kPublicAddrs[0] and kPublicAddrs[1], respectively. - ConfigureEndpoints(OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); + ConfigureEndpoints(env, OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); // ICE parameter will be set up when creating the channels. set_remote_ice_parameter_source(FROM_SETICEPARAMETERS); GetEndpoint(0)->network_manager_.set_mdns_responder( - std::make_unique(rtc::Thread::Current())); + std::make_unique(Thread::Current())); ResolverFactoryFixture resolver_fixture; GetEndpoint(1)->async_dns_resolver_factory_ = &resolver_fixture; - CreateChannels(); + CreateChannels(env); // Pause sending candidates from both endpoints until we find out what port // number is assgined to ep1's host candidate. PauseCandidates(0); PauseCandidates(1); - ASSERT_EQ_WAIT(1u, GetEndpoint(0)->saved_candidates_.size(), kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return GetEndpoint(0)->saved_candidates_.size(); }, + Eq(1u), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); const auto& local_candidate = GetEndpoint(0)->saved_candidates_[0].candidate; // The IP address of ep1's host candidate should be obfuscated. EXPECT_TRUE(local_candidate.address().IsUnresolvedIP()); // This is the underlying private IP address of the same candidate at ep1. - const auto local_address = rtc::SocketAddress( - kPublicAddrs[0].ipaddr(), local_candidate.address().port()); + const auto local_address = + SocketAddress(kPublicAddrs[0].ipaddr(), local_candidate.address().port()); // Let ep2 signal its candidate to ep1. ep1 should form a candidate // pair and start to ping. After receiving the ping, ep2 discovers a prflx // remote candidate and form a candidate pair as well. ResumeCandidates(1); - ASSERT_TRUE_WAIT(ep1_ch1()->selected_connection() != nullptr, kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); // ep2 should have the selected connection connected to the prflx remote // candidate. const Connection* selected_connection = nullptr; - ASSERT_TRUE_WAIT( - (selected_connection = ep2_ch1()->selected_connection()) != nullptr, - kMediumTimeout); - EXPECT_EQ(PRFLX_PORT_TYPE, selected_connection->remote_candidate().type()); + ASSERT_THAT(webrtc::WaitUntil( + [&] { + return selected_connection = + ep2_ch1()->selected_connection(); + }, + Ne(nullptr), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); + EXPECT_TRUE(selected_connection->remote_candidate().is_prflx()); EXPECT_EQ(kIceUfrag[0], selected_connection->remote_candidate().username()); EXPECT_EQ(kIcePwd[0], selected_connection->remote_candidate().password()); // Set expectation before ep1 signals a hostname candidate. resolver_fixture.SetAddressToReturn(local_address); ResumeCandidates(0); // Verify ep2's selected connection is updated to use the 'local' candidate. - EXPECT_EQ_WAIT(LOCAL_PORT_TYPE, - ep2_ch1()->selected_connection()->remote_candidate().type(), - kMediumTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return ep2_ch1() + ->selected_connection() + ->remote_candidate() + .is_local(); + }, + IsTrue(), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); EXPECT_EQ(selected_connection, ep2_ch1()->selected_connection()); DestroyChannels(); @@ -5358,38 +6048,45 @@ TEST_F(P2PTransportChannelTest, // address after the resolution completes. TEST_F(P2PTransportChannelTest, PeerReflexiveCandidateDuringResolvingHostCandidateWithMdnsName) { + const Environment env = CreateEnvironment(); ResolverFactoryFixture resolver_fixture; // Prevent resolution until triggered by FireDelayedResolution. resolver_fixture.DelayResolution(); // ep1 and ep2 will only gather host candidates with addresses // kPublicAddrs[0] and kPublicAddrs[1], respectively. - ConfigureEndpoints(OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); + ConfigureEndpoints(env, OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); // ICE parameter will be set up when creating the channels. set_remote_ice_parameter_source(FROM_SETICEPARAMETERS); GetEndpoint(0)->network_manager_.set_mdns_responder( - std::make_unique(rtc::Thread::Current())); + std::make_unique(Thread::Current())); GetEndpoint(1)->async_dns_resolver_factory_ = &resolver_fixture; - CreateChannels(); + CreateChannels(env); // Pause sending candidates from both endpoints until we find out what port // number is assgined to ep1's host candidate. PauseCandidates(0); PauseCandidates(1); - ASSERT_EQ_WAIT(1u, GetEndpoint(0)->saved_candidates_.size(), kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return GetEndpoint(0)->saved_candidates_.size(); }, + Eq(1u), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); const auto& local_candidate = GetEndpoint(0)->saved_candidates_[0].candidate; // The IP address of ep1's host candidate should be obfuscated. ASSERT_TRUE(local_candidate.address().IsUnresolvedIP()); // This is the underlying private IP address of the same candidate at ep1. - const auto local_address = rtc::SocketAddress( - kPublicAddrs[0].ipaddr(), local_candidate.address().port()); + const auto local_address = + SocketAddress(kPublicAddrs[0].ipaddr(), local_candidate.address().port()); // Let ep1 signal its hostname candidate to ep2. ResumeCandidates(0); // Now that ep2 is in the process of resolving the hostname candidate signaled // by ep1. Let ep2 signal its host candidate with an IP address to ep1, so // that ep1 can form a candidate pair, select it and start to ping ep2. ResumeCandidates(1); - ASSERT_TRUE_WAIT(ep1_ch1()->selected_connection() != nullptr, kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); // Let the mock resolver of ep2 receives the correct resolution. resolver_fixture.SetAddressToReturn(local_address); @@ -5398,17 +6095,25 @@ TEST_F(P2PTransportChannelTest, // // There is a caveat in our implementation associated with this expectation. // See the big comment in P2PTransportChannel::OnUnknownAddress. - ASSERT_TRUE_WAIT(ep2_ch1()->selected_connection() != nullptr, kMediumTimeout); - EXPECT_EQ(PRFLX_PORT_TYPE, - ep2_ch1()->selected_connection()->remote_candidate().type()); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return ep2_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); + EXPECT_TRUE(ep2_ch1()->selected_connection()->remote_candidate().is_prflx()); // ep2 should also be able resolve the hostname candidate. The resolved remote // host candidate should be merged with the prflx remote candidate. resolver_fixture.FireDelayedResolution(); - EXPECT_EQ_WAIT(LOCAL_PORT_TYPE, - ep2_ch1()->selected_connection()->remote_candidate().type(), - kMediumTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return ep2_ch1() + ->selected_connection() + ->remote_candidate() + .is_local(); + }, + IsTrue(), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); EXPECT_EQ(1u, ep2_ch1()->remote_candidates().size()); DestroyChannels(); @@ -5418,29 +6123,33 @@ TEST_F(P2PTransportChannelTest, // which is obfuscated by an mDNS name, and if the peer can complete the name // resolution with the correct IP address, we can have a p2p connection. TEST_F(P2PTransportChannelTest, CanConnectWithHostCandidateWithMdnsName) { + const Environment env = CreateEnvironment(); ResolverFactoryFixture resolver_fixture; // ep1 and ep2 will only gather host candidates with addresses // kPublicAddrs[0] and kPublicAddrs[1], respectively. - ConfigureEndpoints(OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); + ConfigureEndpoints(env, OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); // ICE parameter will be set up when creating the channels. set_remote_ice_parameter_source(FROM_SETICEPARAMETERS); GetEndpoint(0)->network_manager_.set_mdns_responder( - std::make_unique(rtc::Thread::Current())); + std::make_unique(Thread::Current())); GetEndpoint(1)->async_dns_resolver_factory_ = &resolver_fixture; - CreateChannels(); + CreateChannels(env); // Pause sending candidates from both endpoints until we find out what port // number is assgined to ep1's host candidate. PauseCandidates(0); PauseCandidates(1); - ASSERT_EQ_WAIT(1u, GetEndpoint(0)->saved_candidates_.size(), kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return GetEndpoint(0)->saved_candidates_.size(); }, + Eq(1u), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); const auto& local_candidate_ep1 = GetEndpoint(0)->saved_candidates_[0].candidate; // The IP address of ep1's host candidate should be obfuscated. EXPECT_TRUE(local_candidate_ep1.address().IsUnresolvedIP()); // This is the underlying private IP address of the same candidate at ep1, // and let the mock resolver of ep2 receive the correct resolution. - rtc::SocketAddress resolved_address_ep1(local_candidate_ep1.address()); + SocketAddress resolved_address_ep1(local_candidate_ep1.address()); resolved_address_ep1.SetResolvedIP(kPublicAddrs[0].ipaddr()); resolver_fixture.SetAddressToReturn(resolved_address_ep1); @@ -5449,12 +6158,12 @@ TEST_F(P2PTransportChannelTest, CanConnectWithHostCandidateWithMdnsName) { // We should be able to receive a ping from ep2 and establish a connection // with a peer reflexive candidate from ep2. - ASSERT_TRUE_WAIT((ep1_ch1()->selected_connection()) != nullptr, - kMediumTimeout); - EXPECT_EQ(LOCAL_PORT_TYPE, - ep1_ch1()->selected_connection()->local_candidate().type()); - EXPECT_EQ(PRFLX_PORT_TYPE, - ep1_ch1()->selected_connection()->remote_candidate().type()); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); + EXPECT_TRUE(ep1_ch1()->selected_connection()->local_candidate().is_local()); + EXPECT_TRUE(ep1_ch1()->selected_connection()->remote_candidate().is_prflx()); DestroyChannels(); } @@ -5467,34 +6176,42 @@ TEST_F(P2PTransportChannelTest, CanConnectWithHostCandidateWithMdnsName) { // this remote host candidate in stats. TEST_F(P2PTransportChannelTest, CandidatesSanitizedInStatsWhenMdnsObfuscationEnabled) { + const Environment env = CreateEnvironment(); ResolverFactoryFixture resolver_fixture; // ep1 and ep2 will gather host candidates with addresses // kPublicAddrs[0] and kPublicAddrs[1], respectively. ep1 also gathers a srflx // and a relay candidates. - ConfigureEndpoints(OPEN, OPEN, - kDefaultPortAllocatorFlags | PORTALLOCATOR_DISABLE_TCP, - kOnlyLocalPorts); + ConfigureEndpoints( + env, OPEN, OPEN, + webrtc::kDefaultPortAllocatorFlags | webrtc::PORTALLOCATOR_DISABLE_TCP, + kOnlyLocalPorts); // ICE parameter will be set up when creating the channels. set_remote_ice_parameter_source(FROM_SETICEPARAMETERS); GetEndpoint(0)->network_manager_.set_mdns_responder( - std::make_unique(rtc::Thread::Current())); + std::make_unique(Thread::Current())); GetEndpoint(1)->async_dns_resolver_factory_ = &resolver_fixture; - CreateChannels(); + CreateChannels(env); // Pause sending candidates from both endpoints until we find out what port // number is assigned to ep1's host candidate. PauseCandidates(0); PauseCandidates(1); // Ep1 has a UDP host, a srflx and a relay candidates. - ASSERT_EQ_WAIT(3u, GetEndpoint(0)->saved_candidates_.size(), kMediumTimeout); - ASSERT_EQ_WAIT(1u, GetEndpoint(1)->saved_candidates_.size(), kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return GetEndpoint(0)->saved_candidates_.size(); }, + Eq(3u), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return GetEndpoint(1)->saved_candidates_.size(); }, + Eq(1u), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); for (const auto& candidates_data : GetEndpoint(0)->saved_candidates_) { const auto& local_candidate_ep1 = candidates_data.candidate; - if (local_candidate_ep1.type() == LOCAL_PORT_TYPE) { + if (local_candidate_ep1.is_local()) { // This is the underlying private IP address of the same candidate at ep1, // and let the mock resolver of ep2 receive the correct resolution. - rtc::SocketAddress resolved_address_ep1(local_candidate_ep1.address()); + SocketAddress resolved_address_ep1(local_candidate_ep1.address()); resolved_address_ep1.SetResolvedIP(kPublicAddrs[0].ipaddr()); resolver_fixture.SetAddressToReturn(resolved_address_ep1); break; @@ -5503,12 +6220,20 @@ TEST_F(P2PTransportChannelTest, ResumeCandidates(0); ResumeCandidates(1); - ASSERT_EQ_WAIT(kIceGatheringComplete, ep1_ch1()->gathering_state(), - kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil([&] { return ep1_ch1()->gathering_state(); }, + Eq(webrtc::kIceGatheringComplete), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); // We should have the following candidate pairs on both endpoints: // ep1_host <-> ep2_host, ep1_srflx <-> ep2_host, ep1_relay <-> ep2_host - ASSERT_EQ_WAIT(3u, ep1_ch1()->connections().size(), kMediumTimeout); - ASSERT_EQ_WAIT(3u, ep2_ch1()->connections().size(), kMediumTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ep1_ch1()->connections().size(); }, Eq(3u), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ep2_ch1()->connections().size(); }, Eq(3u), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); IceTransportStats ice_transport_stats1; IceTransportStats ice_transport_stats2; @@ -5520,11 +6245,11 @@ TEST_F(P2PTransportChannelTest, // Check the stats of ep1 seen by ep1. for (const auto& connection_info : ice_transport_stats1.connection_infos) { const auto& local_candidate = connection_info.local_candidate; - if (local_candidate.type() == LOCAL_PORT_TYPE) { + if (local_candidate.is_local()) { EXPECT_TRUE(local_candidate.address().IsUnresolvedIP()); - } else if (local_candidate.type() == STUN_PORT_TYPE) { + } else if (local_candidate.is_stun()) { EXPECT_TRUE(local_candidate.related_address().IsAnyIP()); - } else if (local_candidate.type() == RELAY_PORT_TYPE) { + } else if (local_candidate.is_relay()) { // The related address of the relay candidate should be equal to the // srflx address. Note that NAT is not configured, hence the following // expectation. @@ -5537,11 +6262,11 @@ TEST_F(P2PTransportChannelTest, // Check the stats of ep1 seen by ep2. for (const auto& connection_info : ice_transport_stats2.connection_infos) { const auto& remote_candidate = connection_info.remote_candidate; - if (remote_candidate.type() == LOCAL_PORT_TYPE) { + if (remote_candidate.is_local()) { EXPECT_TRUE(remote_candidate.address().IsUnresolvedIP()); - } else if (remote_candidate.type() == STUN_PORT_TYPE) { + } else if (remote_candidate.is_stun()) { EXPECT_TRUE(remote_candidate.related_address().IsAnyIP()); - } else if (remote_candidate.type() == RELAY_PORT_TYPE) { + } else if (remote_candidate.is_relay()) { EXPECT_EQ(kPublicAddrs[0].ipaddr(), remote_candidate.related_address().ipaddr()); } else { @@ -5553,18 +6278,22 @@ TEST_F(P2PTransportChannelTest, TEST_F(P2PTransportChannelTest, ConnectingIncreasesSelectedCandidatePairChanges) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); - CreateChannels(); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); + CreateChannels(env); IceTransportStats ice_transport_stats; ASSERT_TRUE(ep1_ch1()->GetStats(&ice_transport_stats)); EXPECT_EQ(0u, ice_transport_stats.selected_candidate_pair_changes); // Let the channels connect. - EXPECT_TRUE_SIMULATED_WAIT(ep1_ch1()->selected_connection() != nullptr, - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); ASSERT_TRUE(ep1_ch1()->GetStats(&ice_transport_stats)); EXPECT_EQ(1u, ice_transport_stats.selected_candidate_pair_changes); @@ -5574,18 +6303,22 @@ TEST_F(P2PTransportChannelTest, TEST_F(P2PTransportChannelTest, DisconnectedIncreasesSelectedCandidatePairChanges) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); - CreateChannels(); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); + CreateChannels(env); IceTransportStats ice_transport_stats; ASSERT_TRUE(ep1_ch1()->GetStats(&ice_transport_stats)); EXPECT_EQ(0u, ice_transport_stats.selected_candidate_pair_changes); // Let the channels connect. - EXPECT_TRUE_SIMULATED_WAIT(ep1_ch1()->selected_connection() != nullptr, - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); ASSERT_TRUE(ep1_ch1()->GetStats(&ice_transport_stats)); EXPECT_EQ(1u, ice_transport_stats.selected_candidate_pair_changes); @@ -5594,8 +6327,11 @@ TEST_F(P2PTransportChannelTest, for (Connection* con : ep1_ch1()->connections()) { con->Prune(); } - EXPECT_TRUE_SIMULATED_WAIT(ep1_ch1()->selected_connection() == nullptr, - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Eq(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); ASSERT_TRUE(ep1_ch1()->GetStats(&ice_transport_stats)); EXPECT_EQ(2u, ice_transport_stats.selected_candidate_pair_changes); @@ -5605,18 +6341,22 @@ TEST_F(P2PTransportChannelTest, TEST_F(P2PTransportChannelTest, NewSelectionIncreasesSelectedCandidatePairChanges) { - rtc::ScopedFakeClock clock; - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); - CreateChannels(); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); + CreateChannels(env); IceTransportStats ice_transport_stats; ASSERT_TRUE(ep1_ch1()->GetStats(&ice_transport_stats)); EXPECT_EQ(0u, ice_transport_stats.selected_candidate_pair_changes); // Let the channels connect. - EXPECT_TRUE_SIMULATED_WAIT(ep1_ch1()->selected_connection() != nullptr, - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); ASSERT_TRUE(ep1_ch1()->GetStats(&ice_transport_stats)); EXPECT_EQ(1u, ice_transport_stats.selected_candidate_pair_changes); @@ -5629,11 +6369,16 @@ TEST_F(P2PTransportChannelTest, con->Prune(); } } - EXPECT_TRUE_SIMULATED_WAIT( - ep1_ch1()->selected_connection() != nullptr && - (ep1_ch1()->GetStats(&ice_transport_stats), - ice_transport_stats.selected_candidate_pair_changes >= 2u), - kMediumTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1()->selected_connection() != nullptr && + (ep1_ch1()->GetStats(&ice_transport_stats), + ice_transport_stats.selected_candidate_pair_changes >= 2u); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kMediumTimeout), .clock = &clock}), + webrtc::IsRtcOk()); ASSERT_TRUE(ep1_ch1()->GetStats(&ice_transport_stats)); EXPECT_GE(ice_transport_stats.selected_candidate_pair_changes, 2u); @@ -5645,46 +6390,54 @@ TEST_F(P2PTransportChannelTest, // when it is queried via GetSelectedCandidatePair. TEST_F(P2PTransportChannelTest, SelectedCandidatePairSanitizedWhenMdnsObfuscationEnabled) { + const Environment env = CreateEnvironment(); ResolverFactoryFixture resolver_fixture; // ep1 and ep2 will gather host candidates with addresses // kPublicAddrs[0] and kPublicAddrs[1], respectively. - ConfigureEndpoints(OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); + ConfigureEndpoints(env, OPEN, OPEN, kOnlyLocalPorts, kOnlyLocalPorts); // ICE parameter will be set up when creating the channels. set_remote_ice_parameter_source(FROM_SETICEPARAMETERS); GetEndpoint(0)->network_manager_.set_mdns_responder( - std::make_unique(rtc::Thread::Current())); + std::make_unique(Thread::Current())); GetEndpoint(1)->async_dns_resolver_factory_ = &resolver_fixture; - CreateChannels(); + CreateChannels(env); // Pause sending candidates from both endpoints until we find out what port // number is assigned to ep1's host candidate. PauseCandidates(0); PauseCandidates(1); - ASSERT_EQ_WAIT(1u, GetEndpoint(0)->saved_candidates_.size(), kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return GetEndpoint(0)->saved_candidates_.size(); }, + Eq(1u), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); const auto& candidates_data = GetEndpoint(0)->saved_candidates_[0]; const auto& local_candidate_ep1 = candidates_data.candidate; - ASSERT_TRUE(local_candidate_ep1.type() == LOCAL_PORT_TYPE); + ASSERT_TRUE(local_candidate_ep1.is_local()); // This is the underlying private IP address of the same candidate at ep1, // and let the mock resolver of ep2 receive the correct resolution. - rtc::SocketAddress resolved_address_ep1(local_candidate_ep1.address()); + SocketAddress resolved_address_ep1(local_candidate_ep1.address()); resolved_address_ep1.SetResolvedIP(kPublicAddrs[0].ipaddr()); resolver_fixture.SetAddressToReturn(resolved_address_ep1); ResumeCandidates(0); ResumeCandidates(1); - ASSERT_TRUE_WAIT(ep1_ch1()->selected_connection() != nullptr && - ep2_ch1()->selected_connection() != nullptr, - kMediumTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { + return ep1_ch1()->selected_connection() != nullptr && + ep2_ch1()->selected_connection() != nullptr; + }, + IsTrue(), {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); const auto pair_ep1 = ep1_ch1()->GetSelectedCandidatePair(); ASSERT_TRUE(pair_ep1.has_value()); - EXPECT_EQ(LOCAL_PORT_TYPE, pair_ep1->local_candidate().type()); + EXPECT_TRUE(pair_ep1->local_candidate().is_local()); EXPECT_TRUE(pair_ep1->local_candidate().address().IsUnresolvedIP()); const auto pair_ep2 = ep2_ch1()->GetSelectedCandidatePair(); ASSERT_TRUE(pair_ep2.has_value()); - EXPECT_EQ(LOCAL_PORT_TYPE, pair_ep2->remote_candidate().type()); + EXPECT_TRUE(pair_ep2->remote_candidate().is_local()); EXPECT_TRUE(pair_ep2->remote_candidate().address().IsUnresolvedIP()); DestroyChannels(); @@ -5692,40 +6445,48 @@ TEST_F(P2PTransportChannelTest, TEST_F(P2PTransportChannelTest, NoPairOfLocalRelayCandidateWithRemoteMdnsCandidate) { - const int kOnlyRelayPorts = cricket::PORTALLOCATOR_DISABLE_UDP | - cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_TCP; + const Environment env = CreateEnvironment(); + const int kOnlyRelayPorts = webrtc::PORTALLOCATOR_DISABLE_UDP | + webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_TCP; // We use one endpoint to test the behavior of adding remote candidates, and // this endpoint only gathers relay candidates. - ConfigureEndpoints(OPEN, OPEN, kOnlyRelayPorts, kDefaultPortAllocatorFlags); - GetEndpoint(0)->cd1_.ch_ = CreateChannel(0, ICE_CANDIDATE_COMPONENT_DEFAULT, - kIceParams[0], kIceParams[1]); + ConfigureEndpoints(env, OPEN, OPEN, kOnlyRelayPorts, + webrtc::kDefaultPortAllocatorFlags); + GetEndpoint(0)->cd1_.ch_ = CreateChannel( + env, 0, ICE_CANDIDATE_COMPONENT_DEFAULT, kIceParams[0], kIceParams[1]); IceConfig config; // Start gathering and we should have only a single relay port. ep1_ch1()->SetIceConfig(config); ep1_ch1()->MaybeStartGathering(); - EXPECT_EQ_WAIT(IceGatheringState::kIceGatheringComplete, - ep1_ch1()->gathering_state(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ep1_ch1()->gathering_state(); }, + Eq(IceGatheringState::kIceGatheringComplete), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_EQ(1u, ep1_ch1()->ports().size()); // Add a plain remote host candidate and three remote mDNS candidates with the // host, srflx and relay types. Note that the candidates differ in their // ports. - cricket::Candidate host_candidate = CreateUdpCandidate( - LOCAL_PORT_TYPE, "1.1.1.1", 1 /* port */, 0 /* priority */); + Candidate host_candidate = CreateUdpCandidate( + IceCandidateType::kHost, "1.1.1.1", 1 /* port */, 0 /* priority */); ep1_ch1()->AddRemoteCandidate(host_candidate); - std::vector mdns_candidates; - mdns_candidates.push_back(CreateUdpCandidate(LOCAL_PORT_TYPE, "example.local", - 2 /* port */, 0 /* priority */)); - mdns_candidates.push_back(CreateUdpCandidate(STUN_PORT_TYPE, "example.local", - 3 /* port */, 0 /* priority */)); - mdns_candidates.push_back(CreateUdpCandidate(RELAY_PORT_TYPE, "example.local", - 4 /* port */, 0 /* priority */)); + std::vector mdns_candidates; + mdns_candidates.push_back(CreateUdpCandidate(IceCandidateType::kHost, + "example.local", 2 /* port */, + 0 /* priority */)); + mdns_candidates.push_back(CreateUdpCandidate(IceCandidateType::kSrflx, + "example.local", 3 /* port */, + 0 /* priority */)); + mdns_candidates.push_back(CreateUdpCandidate(IceCandidateType::kRelay, + "example.local", 4 /* port */, + 0 /* priority */)); // We just resolve the hostname to 1.1.1.1, and add the candidates with this // address directly to simulate the process of adding remote candidates with // the name resolution. for (auto& mdns_candidate : mdns_candidates) { - rtc::SocketAddress resolved_address(mdns_candidate.address()); + SocketAddress resolved_address(mdns_candidate.address()); resolved_address.SetResolvedIP(0x1111); // 1.1.1.1 mdns_candidate.set_address(resolved_address); EXPECT_FALSE(mdns_candidate.address().IsUnresolvedIP()); @@ -5744,29 +6505,29 @@ TEST_F(P2PTransportChannelTest, DestroyChannels(); } -class MockMdnsResponder : public webrtc::MdnsResponderInterface { +class MockMdnsResponder : public MdnsResponderInterface { public: MOCK_METHOD(void, CreateNameForAddress, - (const rtc::IPAddress&, NameCreatedCallback), + (const IPAddress&, NameCreatedCallback), (override)); MOCK_METHOD(void, RemoveNameForAddress, - (const rtc::IPAddress&, NameRemovedCallback), + (const IPAddress&, NameRemovedCallback), (override)); }; TEST_F(P2PTransportChannelTest, SrflxCandidateCanBeGatheredBeforeMdnsCandidateToCreateConnection) { + const Environment env = CreateEnvironment(); // ep1 and ep2 will only gather host and srflx candidates with base addresses // kPublicAddrs[0] and kPublicAddrs[1], respectively, and we use a shared // socket in gathering. const auto kOnlyLocalAndStunPorts = - cricket::PORTALLOCATOR_DISABLE_RELAY | - cricket::PORTALLOCATOR_DISABLE_TCP | - cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET; + webrtc::PORTALLOCATOR_DISABLE_RELAY | webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET; // ep1 is configured with a NAT so that we do gather a srflx candidate. - ConfigureEndpoints(NAT_FULL_CONE, OPEN, kOnlyLocalAndStunPorts, + ConfigureEndpoints(env, NAT_FULL_CONE, OPEN, kOnlyLocalAndStunPorts, kOnlyLocalAndStunPorts); // ICE parameter will be set up when creating the channels. set_remote_ice_parameter_source(FROM_SETICEPARAMETERS); @@ -5779,15 +6540,15 @@ TEST_F(P2PTransportChannelTest, GetEndpoint(0)->network_manager_.set_mdns_responder( std::move(mock_mdns_responder)); - CreateChannels(); + CreateChannels(env); // We should be able to form a srflx-host connection to ep2. - ASSERT_TRUE_WAIT((ep1_ch1()->selected_connection()) != nullptr, - kMediumTimeout); - EXPECT_EQ(STUN_PORT_TYPE, - ep1_ch1()->selected_connection()->local_candidate().type()); - EXPECT_EQ(LOCAL_PORT_TYPE, - ep1_ch1()->selected_connection()->remote_candidate().type()); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); + EXPECT_TRUE(ep1_ch1()->selected_connection()->local_candidate().is_stun()); + EXPECT_TRUE(ep1_ch1()->selected_connection()->remote_candidate().is_local()); DestroyChannels(); } @@ -5799,64 +6560,88 @@ TEST_F(P2PTransportChannelTest, // removed and are still usable for necessary route switching. TEST_F(P2PTransportChannelTest, SurfaceHostCandidateOnCandidateFilterChangeFromRelayToAll) { - rtc::ScopedFakeClock clock; - - ConfigureEndpoints( - OPEN, OPEN, - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET, - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + + ConfigureEndpoints(env, OPEN, OPEN, + webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET, + webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); auto* ep1 = GetEndpoint(0); auto* ep2 = GetEndpoint(1); - ep1->allocator_->SetCandidateFilter(CF_RELAY); - ep2->allocator_->SetCandidateFilter(CF_RELAY); + ep1->allocator_->SetCandidateFilter(webrtc::CF_RELAY); + ep2->allocator_->SetCandidateFilter(webrtc::CF_RELAY); // Enable continual gathering and also resurfacing gathered candidates upon // the candidate filter changed in the ICE configuration. - IceConfig ice_config = CreateIceConfig(1000, GATHER_CONTINUALLY); + IceConfig ice_config = CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); ice_config.surface_ice_candidates_on_ice_transport_type_changed = true; - CreateChannels(ice_config, ice_config); - ASSERT_TRUE_SIMULATED_WAIT(ep1_ch1()->selected_connection() != nullptr, - kDefaultTimeout, clock); - ASSERT_TRUE_SIMULATED_WAIT(ep2_ch1()->selected_connection() != nullptr, - kDefaultTimeout, clock); - EXPECT_EQ(RELAY_PORT_TYPE, - ep1_ch1()->selected_connection()->local_candidate().type()); - EXPECT_EQ(RELAY_PORT_TYPE, - ep2_ch1()->selected_connection()->local_candidate().type()); + CreateChannels(env, ice_config, ice_config); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { return ep2_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE(ep1_ch1()->selected_connection()->local_candidate().is_relay()); + EXPECT_TRUE(ep2_ch1()->selected_connection()->local_candidate().is_relay()); // Loosen the candidate filter at ep1. - ep1->allocator_->SetCandidateFilter(CF_ALL); - EXPECT_TRUE_SIMULATED_WAIT( - ep1_ch1()->selected_connection() != nullptr && - ep1_ch1()->selected_connection()->local_candidate().type() == - LOCAL_PORT_TYPE, - kDefaultTimeout, clock); - EXPECT_EQ(RELAY_PORT_TYPE, - ep1_ch1()->selected_connection()->remote_candidate().type()); + ep1->allocator_->SetCandidateFilter(webrtc::CF_ALL); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1()->selected_connection() != nullptr && + ep1_ch1() + ->selected_connection() + ->local_candidate() + .is_local(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE(ep1_ch1()->selected_connection()->remote_candidate().is_relay()); // Loosen the candidate filter at ep2. - ep2->allocator_->SetCandidateFilter(CF_ALL); - EXPECT_TRUE_SIMULATED_WAIT( - ep2_ch1()->selected_connection() != nullptr && - ep2_ch1()->selected_connection()->local_candidate().type() == - LOCAL_PORT_TYPE, - kDefaultTimeout, clock); + ep2->allocator_->SetCandidateFilter(webrtc::CF_ALL); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return ep2_ch1()->selected_connection() != nullptr && + ep2_ch1() + ->selected_connection() + ->local_candidate() + .is_local(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // We have migrated to a host-host candidate pair. - EXPECT_EQ(LOCAL_PORT_TYPE, - ep2_ch1()->selected_connection()->remote_candidate().type()); + EXPECT_TRUE(ep2_ch1()->selected_connection()->remote_candidate().is_local()); // Block the traffic over non-relay-to-relay routes and expect a route change. - fw()->AddRule(false, rtc::FP_ANY, kPublicAddrs[0], kPublicAddrs[1]); - fw()->AddRule(false, rtc::FP_ANY, kPublicAddrs[1], kPublicAddrs[0]); - fw()->AddRule(false, rtc::FP_ANY, kPublicAddrs[0], kTurnUdpExtAddr); - fw()->AddRule(false, rtc::FP_ANY, kPublicAddrs[1], kTurnUdpExtAddr); + fw()->AddRule(false, webrtc::FP_ANY, kPublicAddrs[0], kPublicAddrs[1]); + fw()->AddRule(false, webrtc::FP_ANY, kPublicAddrs[1], kPublicAddrs[0]); + fw()->AddRule(false, webrtc::FP_ANY, kPublicAddrs[0], kTurnUdpExtAddr); + fw()->AddRule(false, webrtc::FP_ANY, kPublicAddrs[1], kTurnUdpExtAddr); // We should be able to reuse the previously gathered relay candidates. - EXPECT_EQ_SIMULATED_WAIT( - RELAY_PORT_TYPE, - ep1_ch1()->selected_connection()->local_candidate().type(), - kDefaultTimeout, clock); - EXPECT_EQ(RELAY_PORT_TYPE, - ep1_ch1()->selected_connection()->remote_candidate().type()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1() + ->selected_connection() + ->local_candidate() + .is_relay(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE(ep1_ch1()->selected_connection()->remote_candidate().is_relay()); DestroyChannels(); } @@ -5865,7 +6650,8 @@ TEST_F(P2PTransportChannelTest, // changing the candidate filter. TEST_F(P2PTransportChannelTest, SurfaceSrflxCandidateOnCandidateFilterChangeFromRelayToNoHost) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); // We need an actual NAT so that the host candidate is not equivalent to the // srflx candidate; otherwise, the host candidate would still surface even // though we disable it via the candidate filter below. This is a result of @@ -5873,57 +6659,82 @@ TEST_F(P2PTransportChannelTest, // 1. We don't generate the srflx candidate when we have public IP. // 2. We keep the host candidate in this case in CheckCandidateFilter even // though we intend to filter them. - ConfigureEndpoints( - NAT_FULL_CONE, NAT_FULL_CONE, - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET, - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET); + ConfigureEndpoints(env, NAT_FULL_CONE, NAT_FULL_CONE, + webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET, + webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); auto* ep1 = GetEndpoint(0); auto* ep2 = GetEndpoint(1); - ep1->allocator_->SetCandidateFilter(CF_RELAY); - ep2->allocator_->SetCandidateFilter(CF_RELAY); + ep1->allocator_->SetCandidateFilter(webrtc::CF_RELAY); + ep2->allocator_->SetCandidateFilter(webrtc::CF_RELAY); // Enable continual gathering and also resurfacing gathered candidates upon // the candidate filter changed in the ICE configuration. - IceConfig ice_config = CreateIceConfig(1000, GATHER_CONTINUALLY); + IceConfig ice_config = CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); ice_config.surface_ice_candidates_on_ice_transport_type_changed = true; - CreateChannels(ice_config, ice_config); - ASSERT_TRUE_SIMULATED_WAIT(ep1_ch1()->selected_connection() != nullptr, - kDefaultTimeout, clock); - ASSERT_TRUE_SIMULATED_WAIT(ep2_ch1()->selected_connection() != nullptr, - kDefaultTimeout, clock); - const uint32_t kCandidateFilterNoHost = CF_ALL & ~CF_HOST; + CreateChannels(env, ice_config, ice_config); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { return ep2_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + const uint32_t kCandidateFilterNoHost = webrtc::CF_ALL & ~webrtc::CF_HOST; // Loosen the candidate filter at ep1. ep1->allocator_->SetCandidateFilter(kCandidateFilterNoHost); - EXPECT_TRUE_SIMULATED_WAIT( - ep1_ch1()->selected_connection() != nullptr && - ep1_ch1()->selected_connection()->local_candidate().type() == - STUN_PORT_TYPE, - kDefaultTimeout, clock); - EXPECT_EQ(RELAY_PORT_TYPE, - ep1_ch1()->selected_connection()->remote_candidate().type()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1()->selected_connection() != nullptr && + ep1_ch1() + ->selected_connection() + ->local_candidate() + .is_stun(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE(ep1_ch1()->selected_connection()->remote_candidate().is_relay()); // Loosen the candidate filter at ep2. ep2->allocator_->SetCandidateFilter(kCandidateFilterNoHost); - EXPECT_TRUE_SIMULATED_WAIT( - ep2_ch1()->selected_connection() != nullptr && - ep2_ch1()->selected_connection()->local_candidate().type() == - STUN_PORT_TYPE, - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return ep2_ch1()->selected_connection() != nullptr && + ep2_ch1() + ->selected_connection() + ->local_candidate() + .is_stun(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // We have migrated to a srflx-srflx candidate pair. - EXPECT_EQ(STUN_PORT_TYPE, - ep2_ch1()->selected_connection()->remote_candidate().type()); + EXPECT_TRUE(ep2_ch1()->selected_connection()->remote_candidate().is_stun()); // Block the traffic over non-relay-to-relay routes and expect a route change. - fw()->AddRule(false, rtc::FP_ANY, kPrivateAddrs[0], kPublicAddrs[1]); - fw()->AddRule(false, rtc::FP_ANY, kPrivateAddrs[1], kPublicAddrs[0]); - fw()->AddRule(false, rtc::FP_ANY, kPrivateAddrs[0], kTurnUdpExtAddr); - fw()->AddRule(false, rtc::FP_ANY, kPrivateAddrs[1], kTurnUdpExtAddr); + fw()->AddRule(false, webrtc::FP_ANY, kPrivateAddrs[0], kPublicAddrs[1]); + fw()->AddRule(false, webrtc::FP_ANY, kPrivateAddrs[1], kPublicAddrs[0]); + fw()->AddRule(false, webrtc::FP_ANY, kPrivateAddrs[0], kTurnUdpExtAddr); + fw()->AddRule(false, webrtc::FP_ANY, kPrivateAddrs[1], kTurnUdpExtAddr); // We should be able to reuse the previously gathered relay candidates. - EXPECT_EQ_SIMULATED_WAIT( - RELAY_PORT_TYPE, - ep1_ch1()->selected_connection()->local_candidate().type(), - kDefaultTimeout, clock); - EXPECT_EQ(RELAY_PORT_TYPE, - ep1_ch1()->selected_connection()->remote_candidate().type()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1() + ->selected_connection() + ->local_candidate() + .is_relay(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE(ep1_ch1()->selected_connection()->remote_candidate().is_relay()); DestroyChannels(); } @@ -5934,35 +6745,41 @@ TEST_F(P2PTransportChannelTest, // gathering stopped. TEST_F(P2PTransportChannelTest, CannotSurfaceTheNewlyAllowedOnFilterChangeIfNotGatheringContinually) { - rtc::ScopedFakeClock clock; - - ConfigureEndpoints( - OPEN, OPEN, - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET, - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + + ConfigureEndpoints(env, OPEN, OPEN, + webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET, + webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); auto* ep1 = GetEndpoint(0); auto* ep2 = GetEndpoint(1); - ep1->allocator_->SetCandidateFilter(CF_RELAY); - ep2->allocator_->SetCandidateFilter(CF_RELAY); + ep1->allocator_->SetCandidateFilter(webrtc::CF_RELAY); + ep2->allocator_->SetCandidateFilter(webrtc::CF_RELAY); // Only gather once. - IceConfig ice_config = CreateIceConfig(1000, GATHER_ONCE); + IceConfig ice_config = CreateIceConfig(1000, webrtc::GATHER_ONCE); ice_config.surface_ice_candidates_on_ice_transport_type_changed = true; - CreateChannels(ice_config, ice_config); - ASSERT_TRUE_SIMULATED_WAIT(ep1_ch1()->selected_connection() != nullptr, - kDefaultTimeout, clock); - ASSERT_TRUE_SIMULATED_WAIT(ep2_ch1()->selected_connection() != nullptr, - kDefaultTimeout, clock); + CreateChannels(env, ice_config, ice_config); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { return ep1_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { return ep2_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Loosen the candidate filter at ep1. - ep1->allocator_->SetCandidateFilter(CF_ALL); + ep1->allocator_->SetCandidateFilter(webrtc::CF_ALL); // Wait for a period for any potential surfacing of new candidates. SIMULATED_WAIT(false, kDefaultTimeout, clock); - EXPECT_EQ(RELAY_PORT_TYPE, - ep1_ch1()->selected_connection()->local_candidate().type()); + EXPECT_TRUE(ep1_ch1()->selected_connection()->local_candidate().is_relay()); // Loosen the candidate filter at ep2. - ep2->allocator_->SetCandidateFilter(CF_ALL); - EXPECT_EQ(RELAY_PORT_TYPE, - ep2_ch1()->selected_connection()->local_candidate().type()); + ep2->allocator_->SetCandidateFilter(webrtc::CF_ALL); + EXPECT_TRUE(ep2_ch1()->selected_connection()->local_candidate().is_relay()); DestroyChannels(); } @@ -5971,64 +6788,80 @@ TEST_F(P2PTransportChannelTest, // match the filter, are not removed. TEST_F(P2PTransportChannelTest, RestrictingCandidateFilterDoesNotRemoveRegatheredCandidates) { - rtc::ScopedFakeClock clock; - - ConfigureEndpoints( - OPEN, OPEN, - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET, - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); + + ConfigureEndpoints(env, OPEN, OPEN, + webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET, + webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); auto* ep1 = GetEndpoint(0); auto* ep2 = GetEndpoint(1); - ep1->allocator_->SetCandidateFilter(CF_ALL); - ep2->allocator_->SetCandidateFilter(CF_ALL); + ep1->allocator_->SetCandidateFilter(webrtc::CF_ALL); + ep2->allocator_->SetCandidateFilter(webrtc::CF_ALL); // Enable continual gathering and also resurfacing gathered candidates upon // the candidate filter changed in the ICE configuration. - IceConfig ice_config = CreateIceConfig(1000, GATHER_CONTINUALLY); + IceConfig ice_config = CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); ice_config.surface_ice_candidates_on_ice_transport_type_changed = true; // Pause candidates so we can gather all types of candidates. See // P2PTransportChannel::OnConnectionStateChange, where we would stop the // gathering when we have a strongly connected candidate pair. PauseCandidates(0); PauseCandidates(1); - CreateChannels(ice_config, ice_config); + CreateChannels(env, ice_config, ice_config); // We have gathered host, srflx and relay candidates. - EXPECT_TRUE_SIMULATED_WAIT(ep1->saved_candidates_.size() == 3u, - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep1->saved_candidates_.size(); }, Eq(3u), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); ResumeCandidates(0); ResumeCandidates(1); - ASSERT_TRUE_SIMULATED_WAIT( - ep1_ch1()->selected_connection() != nullptr && - LOCAL_PORT_TYPE == - ep1_ch1()->selected_connection()->local_candidate().type() && - ep2_ch1()->selected_connection() != nullptr && - LOCAL_PORT_TYPE == - ep1_ch1()->selected_connection()->remote_candidate().type(), - kDefaultTimeout, clock); - ASSERT_TRUE_SIMULATED_WAIT(ep2_ch1()->selected_connection() != nullptr, - kDefaultTimeout, clock); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1()->selected_connection() != nullptr && + ep1_ch1() + ->selected_connection() + ->local_candidate() + .is_local() && + ep2_ch1()->selected_connection() != nullptr && + ep1_ch1() + ->selected_connection() + ->remote_candidate() + .is_local(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { return ep2_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Test that we have a host-host candidate pair selected and the number of // candidates signaled to the remote peer stays the same. auto test_invariants = [this]() { - EXPECT_EQ(LOCAL_PORT_TYPE, - ep1_ch1()->selected_connection()->local_candidate().type()); - EXPECT_EQ(LOCAL_PORT_TYPE, - ep1_ch1()->selected_connection()->remote_candidate().type()); + EXPECT_TRUE(ep1_ch1()->selected_connection()->local_candidate().is_local()); + EXPECT_TRUE( + ep1_ch1()->selected_connection()->remote_candidate().is_local()); EXPECT_THAT(ep2_ch1()->remote_candidates(), SizeIs(3)); }; test_invariants(); // Set a more restrictive candidate filter at ep1. - ep1->allocator_->SetCandidateFilter(CF_HOST | CF_REFLEXIVE); + ep1->allocator_->SetCandidateFilter(webrtc::CF_HOST | webrtc::CF_REFLEXIVE); SIMULATED_WAIT(false, kDefaultTimeout, clock); test_invariants(); - ep1->allocator_->SetCandidateFilter(CF_HOST); + ep1->allocator_->SetCandidateFilter(webrtc::CF_HOST); SIMULATED_WAIT(false, kDefaultTimeout, clock); test_invariants(); - ep1->allocator_->SetCandidateFilter(CF_NONE); + ep1->allocator_->SetCandidateFilter(webrtc::CF_NONE); SIMULATED_WAIT(false, kDefaultTimeout, clock); test_invariants(); DestroyChannels(); @@ -6041,137 +6874,155 @@ TEST_F(P2PTransportChannelTest, // i.e surface_ice_candidates_on_ice_transport_type_changed requires // coordination outside of webrtc to function properly. TEST_F(P2PTransportChannelTest, SurfaceRequiresCoordination) { - webrtc::test::ScopedKeyValueConfig field_trials( - field_trials_, - "WebRTC-IceFieldTrials/skip_relay_to_non_relay_connections:true/"); - rtc::ScopedFakeClock clock; - - ConfigureEndpoints( - OPEN, OPEN, - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET, - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET); + ScopedFakeClock clock; + const Environment env = CreateEnvironment(FieldTrials::CreateNoGlobal( + "WebRTC-IceFieldTrials/skip_relay_to_non_relay_connections:true/")); + + ConfigureEndpoints(env, OPEN, OPEN, + webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET, + webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); auto* ep1 = GetEndpoint(0); auto* ep2 = GetEndpoint(1); - ep1->allocator_->SetCandidateFilter(CF_RELAY); - ep2->allocator_->SetCandidateFilter(CF_ALL); + ep1->allocator_->SetCandidateFilter(webrtc::CF_RELAY); + ep2->allocator_->SetCandidateFilter(webrtc::CF_ALL); // Enable continual gathering and also resurfacing gathered candidates upon // the candidate filter changed in the ICE configuration. - IceConfig ice_config = CreateIceConfig(1000, GATHER_CONTINUALLY); + IceConfig ice_config = CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); ice_config.surface_ice_candidates_on_ice_transport_type_changed = true; // Pause candidates gathering so we can gather all types of candidates. See // P2PTransportChannel::OnConnectionStateChange, where we would stop the // gathering when we have a strongly connected candidate pair. PauseCandidates(0); PauseCandidates(1); - CreateChannels(ice_config, ice_config); + CreateChannels(env, ice_config, ice_config); // On the caller we only have relay, // on the callee we have host, srflx and relay. - EXPECT_TRUE_SIMULATED_WAIT(ep1->saved_candidates_.size() == 1u, - kDefaultTimeout, clock); - EXPECT_TRUE_SIMULATED_WAIT(ep2->saved_candidates_.size() == 3u, - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep1->saved_candidates_.size(); }, Eq(1u), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ep2->saved_candidates_.size(); }, Eq(3u), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); ResumeCandidates(0); ResumeCandidates(1); - ASSERT_TRUE_SIMULATED_WAIT( - ep1_ch1()->selected_connection() != nullptr && - RELAY_PORT_TYPE == - ep1_ch1()->selected_connection()->local_candidate().type() && - ep2_ch1()->selected_connection() != nullptr && - RELAY_PORT_TYPE == - ep1_ch1()->selected_connection()->remote_candidate().type(), - kDefaultTimeout, clock); - ASSERT_TRUE_SIMULATED_WAIT(ep2_ch1()->selected_connection() != nullptr, - kDefaultTimeout, clock); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1()->selected_connection() != nullptr && + ep1_ch1() + ->selected_connection() + ->local_candidate() + .is_relay() && + ep2_ch1()->selected_connection() != nullptr && + ep1_ch1() + ->selected_connection() + ->remote_candidate() + .is_relay(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { return ep2_ch1()->selected_connection(); }, Ne(nullptr), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // Wait until the callee discards it's candidates // since they don't manage to connect. SIMULATED_WAIT(false, 300000, clock); // And then loosen caller candidate filter. - ep1->allocator_->SetCandidateFilter(CF_ALL); + ep1->allocator_->SetCandidateFilter(webrtc::CF_ALL); SIMULATED_WAIT(false, kDefaultTimeout, clock); // No p2p connection will be made, it will remain on relay. EXPECT_TRUE(ep1_ch1()->selected_connection() != nullptr && - RELAY_PORT_TYPE == - ep1_ch1()->selected_connection()->local_candidate().type() && + ep1_ch1()->selected_connection()->local_candidate().is_relay() && ep2_ch1()->selected_connection() != nullptr && - RELAY_PORT_TYPE == - ep1_ch1()->selected_connection()->remote_candidate().type()); + ep1_ch1()->selected_connection()->remote_candidate().is_relay()); DestroyChannels(); } TEST_F(P2PTransportChannelPingTest, TestInitialSelectDampening0) { - webrtc::test::ScopedKeyValueConfig field_trials( - field_trials_, "WebRTC-IceFieldTrials/initial_select_dampening:0/"); - constexpr int kMargin = 10; - rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + ScopedFakeClock clock; + clock.AdvanceTime(TimeDelta::Seconds(1)); + const Environment env = CreateEnvironment(FieldTrials::CreateNoGlobal( + "WebRTC-IceFieldTrials/initial_select_dampening:0/")); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("test channel", 1, &pa, &field_trials); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("test channel", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.SetIceConfig(ch.config()); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 100)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 100)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1, &clock); ASSERT_TRUE(conn1 != nullptr); EXPECT_EQ(nullptr, ch.selected_connection()); conn1->ReceivedPingResponse(LOW_RTT, "id"); // Becomes writable and receiving // It shall not be selected until 0ms has passed....i.e it should be connected // directly. - EXPECT_EQ_SIMULATED_WAIT(conn1, ch.selected_connection(), kMargin, clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(kMargin), .clock = &clock}), + webrtc::IsRtcOk()); } TEST_F(P2PTransportChannelPingTest, TestInitialSelectDampening) { - webrtc::test::ScopedKeyValueConfig field_trials( - field_trials_, "WebRTC-IceFieldTrials/initial_select_dampening:100/"); - constexpr int kMargin = 10; - rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + ScopedFakeClock clock; + clock.AdvanceTime(TimeDelta::Seconds(1)); + const Environment env = CreateEnvironment(FieldTrials::CreateNoGlobal( + "WebRTC-IceFieldTrials/initial_select_dampening:100/")); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("test channel", 1, &pa, &field_trials); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("test channel", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.SetIceConfig(ch.config()); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 100)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 100)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1, &clock); ASSERT_TRUE(conn1 != nullptr); EXPECT_EQ(nullptr, ch.selected_connection()); conn1->ReceivedPingResponse(LOW_RTT, "id"); // Becomes writable and receiving // It shall not be selected until 100ms has passed. SIMULATED_WAIT(conn1 == ch.selected_connection(), 100 - kMargin, clock); - EXPECT_EQ_SIMULATED_WAIT(conn1, ch.selected_connection(), 2 * kMargin, clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(2 * kMargin), .clock = &clock}), + webrtc::IsRtcOk()); } TEST_F(P2PTransportChannelPingTest, TestInitialSelectDampeningPingReceived) { - webrtc::test::ScopedKeyValueConfig field_trials( - field_trials_, - "WebRTC-IceFieldTrials/initial_select_dampening_ping_received:100/"); - constexpr int kMargin = 10; - rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + ScopedFakeClock clock; + clock.AdvanceTime(TimeDelta::Seconds(1)); + const Environment env = CreateEnvironment(FieldTrials::CreateNoGlobal( + "WebRTC-IceFieldTrials/initial_select_dampening_ping_received:100/")); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("test channel", 1, &pa, &field_trials); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("test channel", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.SetIceConfig(ch.config()); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 100)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 100)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1, &clock); ASSERT_TRUE(conn1 != nullptr); EXPECT_EQ(nullptr, ch.selected_connection()); @@ -6179,28 +7030,29 @@ TEST_F(P2PTransportChannelPingTest, TestInitialSelectDampeningPingReceived) { conn1->ReceivedPing("id1"); // // It shall not be selected until 100ms has passed. SIMULATED_WAIT(conn1 == ch.selected_connection(), 100 - kMargin, clock); - EXPECT_EQ_SIMULATED_WAIT(conn1, ch.selected_connection(), 2 * kMargin, clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(2 * kMargin), .clock = &clock}), + webrtc::IsRtcOk()); } TEST_F(P2PTransportChannelPingTest, TestInitialSelectDampeningBoth) { - webrtc::test::ScopedKeyValueConfig field_trials( - field_trials_, + constexpr int kMargin = 10; + ScopedFakeClock clock; + clock.AdvanceTime(TimeDelta::Seconds(1)); + const Environment env = CreateEnvironment(FieldTrials::CreateNoGlobal( "WebRTC-IceFieldTrials/" "initial_select_dampening:100,initial_select_dampening_ping_received:" - "50/"); - - constexpr int kMargin = 10; - rtc::ScopedFakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + "50/")); - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - P2PTransportChannel ch("test channel", 1, &pa, &field_trials); + FakePortAllocator pa(env, ss()); + P2PTransportChannel ch("test channel", 1, &pa, &env.field_trials()); PrepareChannel(&ch); ch.SetIceConfig(ch.config()); ch.MaybeStartGathering(); - ch.AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 100)); + ch.AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 100)); Connection* conn1 = WaitForConnectionTo(&ch, "1.1.1.1", 1, &clock); ASSERT_TRUE(conn1 != nullptr); EXPECT_EQ(nullptr, ch.selected_connection()); @@ -6209,55 +7061,53 @@ TEST_F(P2PTransportChannelPingTest, TestInitialSelectDampeningBoth) { SIMULATED_WAIT(conn1 == ch.selected_connection(), 50 - kMargin, clock); // Now receiving ping and new timeout should kick in. conn1->ReceivedPing("id1"); // - EXPECT_EQ_SIMULATED_WAIT(conn1, ch.selected_connection(), 2 * kMargin, clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return ch.selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(2 * kMargin), .clock = &clock}), + webrtc::IsRtcOk()); } TEST(P2PTransportChannelIceControllerTest, InjectIceController) { - webrtc::test::ScopedKeyValueConfig field_trials; - std::unique_ptr socket_server = - rtc::CreateDefaultSocketServer(); - rtc::AutoSocketServerThread main_thread(socket_server.get()); - rtc::BasicPacketSocketFactory packet_socket_factory(socket_server.get()); + const Environment env = CreateEnvironment(); + std::unique_ptr socket_server = + webrtc::CreateDefaultSocketServer(); + AutoSocketServerThread main_thread(socket_server.get()); MockIceControllerFactory factory; - FakePortAllocator pa(rtc::Thread::Current(), &packet_socket_factory, - &field_trials); + FakePortAllocator pa(env, socket_server.get()); EXPECT_CALL(factory, RecordIceControllerCreated()).Times(1); - webrtc::IceTransportInit init; + IceTransportInit init; init.set_port_allocator(&pa); init.set_ice_controller_factory(&factory); - init.set_field_trials(&field_trials); + init.set_field_trials(&env.field_trials()); auto dummy = P2PTransportChannel::Create("transport_name", /* component= */ 77, std::move(init)); } TEST(P2PTransportChannel, InjectActiveIceController) { - webrtc::test::ScopedKeyValueConfig field_trials; - std::unique_ptr socket_server = - rtc::CreateDefaultSocketServer(); - rtc::AutoSocketServerThread main_thread(socket_server.get()); - rtc::BasicPacketSocketFactory packet_socket_factory(socket_server.get()); + const Environment env = CreateEnvironment(); + std::unique_ptr socket_server = + webrtc::CreateDefaultSocketServer(); + AutoSocketServerThread main_thread(socket_server.get()); MockActiveIceControllerFactory factory; - FakePortAllocator pa(rtc::Thread::Current(), &packet_socket_factory, - &field_trials); + FakePortAllocator pa(env, socket_server.get()); EXPECT_CALL(factory, RecordActiveIceControllerCreated()).Times(1); - webrtc::IceTransportInit init; + IceTransportInit init; init.set_port_allocator(&pa); init.set_active_ice_controller_factory(&factory); - init.set_field_trials(&field_trials); + init.set_field_trials(&env.field_trials()); auto dummy = P2PTransportChannel::Create("transport_name", /* component= */ 77, std::move(init)); } -class ForgetLearnedStateController : public cricket::BasicIceController { +class ForgetLearnedStateController : public BasicIceController { public: - explicit ForgetLearnedStateController( - const cricket::IceControllerFactoryArgs& args) - : cricket::BasicIceController(args) {} + explicit ForgetLearnedStateController(const IceControllerFactoryArgs& args) + : BasicIceController(args) {} SwitchResult SortAndSwitchConnection(IceSwitchReason reason) override { - auto result = cricket::BasicIceController::SortAndSwitchConnection(reason); + auto result = BasicIceController::SortAndSwitchConnection(reason); if (forget_connnection_) { result.connections_to_forget_state_on.push_back(forget_connnection_); forget_connnection_ = nullptr; @@ -6276,10 +7126,10 @@ class ForgetLearnedStateController : public cricket::BasicIceController { }; class ForgetLearnedStateControllerFactory - : public cricket::IceControllerFactoryInterface { + : public IceControllerFactoryInterface { public: - std::unique_ptr Create( - const cricket::IceControllerFactoryArgs& args) override { + std::unique_ptr Create( + const IceControllerFactoryArgs& args) override { auto controller = std::make_unique(args); // Keep a pointer to allow modifying calls. // Must not be used after the p2ptransportchannel has been destructed. @@ -6292,20 +7142,22 @@ class ForgetLearnedStateControllerFactory }; TEST_F(P2PTransportChannelPingTest, TestForgetLearnedState) { + const Environment env = CreateEnvironment(); ForgetLearnedStateControllerFactory factory; - FakePortAllocator pa(rtc::Thread::Current(), packet_socket_factory(), - &field_trials_); - webrtc::IceTransportInit init; + FakePortAllocator pa(env, ss()); + IceTransportInit init; init.set_port_allocator(&pa); init.set_ice_controller_factory(&factory); - init.set_field_trials(&field_trials_); + init.set_field_trials(&env.field_trials()); auto ch = P2PTransportChannel::Create("ping sufficiently", 1, std::move(init)); PrepareChannel(ch.get()); ch->MaybeStartGathering(); - ch->AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "1.1.1.1", 1, 1)); - ch->AddRemoteCandidate(CreateUdpCandidate(LOCAL_PORT_TYPE, "2.2.2.2", 2, 2)); + ch->AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "1.1.1.1", 1, 1)); + ch->AddRemoteCandidate( + CreateUdpCandidate(IceCandidateType::kHost, "2.2.2.2", 2, 2)); Connection* conn1 = WaitForConnectionTo(ch.get(), "1.1.1.1", 1); Connection* conn2 = WaitForConnectionTo(ch.get(), "2.2.2.2", 2); @@ -6314,7 +7166,10 @@ TEST_F(P2PTransportChannelPingTest, TestForgetLearnedState) { // Wait for conn1 to be selected. conn1->ReceivedPingResponse(LOW_RTT, "id"); - EXPECT_EQ_WAIT(conn1, ch->selected_connection(), kMediumTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch->selected_connection(); }, Eq(conn1), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); conn2->ReceivedPingResponse(LOW_RTT, "id"); EXPECT_TRUE(conn2->writable()); @@ -6326,84 +7181,89 @@ TEST_F(P2PTransportChannelPingTest, TestForgetLearnedState) { // We don't have a mock Connection, so verify this by checking that it // is no longer writable. - EXPECT_EQ_WAIT(false, conn2->writable(), kMediumTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return conn2->writable(); }, IsFalse(), + {.timeout = TimeDelta::Millis(kMediumTimeout)}), + webrtc::IsRtcOk()); } TEST_F(P2PTransportChannelTest, DisableDnsLookupsWithTransportPolicyRelay) { - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); auto* ep1 = GetEndpoint(0); - ep1->allocator_->SetCandidateFilter(CF_RELAY); + ep1->allocator_->SetCandidateFilter(webrtc::CF_RELAY); - std::unique_ptr mock_async_resolver = - std::make_unique(); + std::unique_ptr mock_async_resolver = + std::make_unique(); // This test expects resolution to not be started. EXPECT_CALL(*mock_async_resolver, Start(_, _)).Times(0); - webrtc::MockAsyncDnsResolverFactory mock_async_resolver_factory; + MockAsyncDnsResolverFactory mock_async_resolver_factory; ON_CALL(mock_async_resolver_factory, Create()) .WillByDefault( [&mock_async_resolver]() { return std::move(mock_async_resolver); }); ep1->async_dns_resolver_factory_ = &mock_async_resolver_factory; - CreateChannels(); + CreateChannels(env); ep1_ch1()->AddRemoteCandidate( - CreateUdpCandidate(LOCAL_PORT_TYPE, "hostname.test", 1, 100)); + CreateUdpCandidate(IceCandidateType::kHost, "hostname.test", 1, 100)); DestroyChannels(); } TEST_F(P2PTransportChannelTest, DisableDnsLookupsWithTransportPolicyNone) { - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); auto* ep1 = GetEndpoint(0); - ep1->allocator_->SetCandidateFilter(CF_NONE); + ep1->allocator_->SetCandidateFilter(webrtc::CF_NONE); - std::unique_ptr mock_async_resolver = - std::make_unique(); + std::unique_ptr mock_async_resolver = + std::make_unique(); // This test expects resolution to not be started. EXPECT_CALL(*mock_async_resolver, Start(_, _)).Times(0); - webrtc::MockAsyncDnsResolverFactory mock_async_resolver_factory; + MockAsyncDnsResolverFactory mock_async_resolver_factory; ON_CALL(mock_async_resolver_factory, Create()) .WillByDefault( [&mock_async_resolver]() { return std::move(mock_async_resolver); }); ep1->async_dns_resolver_factory_ = &mock_async_resolver_factory; - CreateChannels(); + CreateChannels(env); ep1_ch1()->AddRemoteCandidate( - CreateUdpCandidate(LOCAL_PORT_TYPE, "hostname.test", 1, 100)); + CreateUdpCandidate(IceCandidateType::kHost, "hostname.test", 1, 100)); DestroyChannels(); } TEST_F(P2PTransportChannelTest, EnableDnsLookupsWithTransportPolicyNoHost) { - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + const Environment env = CreateEnvironment(); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); auto* ep1 = GetEndpoint(0); - ep1->allocator_->SetCandidateFilter(CF_ALL & ~CF_HOST); + ep1->allocator_->SetCandidateFilter(webrtc::CF_ALL & ~webrtc::CF_HOST); - std::unique_ptr mock_async_resolver = - std::make_unique(); + std::unique_ptr mock_async_resolver = + std::make_unique(); bool lookup_started = false; EXPECT_CALL(*mock_async_resolver, Start(_, _)) .WillOnce(Assign(&lookup_started, true)); - webrtc::MockAsyncDnsResolverFactory mock_async_resolver_factory; + MockAsyncDnsResolverFactory mock_async_resolver_factory; EXPECT_CALL(mock_async_resolver_factory, Create()) .WillOnce( [&mock_async_resolver]() { return std::move(mock_async_resolver); }); ep1->async_dns_resolver_factory_ = &mock_async_resolver_factory; - CreateChannels(); + CreateChannels(env); ep1_ch1()->AddRemoteCandidate( - CreateUdpCandidate(LOCAL_PORT_TYPE, "hostname.test", 1, 100)); + CreateUdpCandidate(IceCandidateType::kHost, "hostname.test", 1, 100)); EXPECT_TRUE(lookup_started); @@ -6420,34 +7280,42 @@ TEST_P(GatherAfterConnectedTest, GatherAfterConnected) { const std::string field_trial = std::string("WebRTC-IceFieldTrials/stop_gather_on_strongly_connected:") + (stop_gather_on_strongly_connected ? "true/" : "false/"); - webrtc::test::ScopedKeyValueConfig field_trials(field_trials_, field_trial); - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = + CreateEnvironment(FieldTrials::CreateNoGlobal(field_trial)); // Use local + relay - constexpr uint32_t flags = - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET | - PORTALLOCATOR_DISABLE_STUN | PORTALLOCATOR_DISABLE_TCP; - ConfigureEndpoints(OPEN, OPEN, flags, flags); + constexpr uint32_t flags = webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_TCP; + ConfigureEndpoints(env, OPEN, OPEN, flags, flags); auto* ep1 = GetEndpoint(0); auto* ep2 = GetEndpoint(1); - ep1->allocator_->SetCandidateFilter(CF_ALL); - ep2->allocator_->SetCandidateFilter(CF_ALL); + ep1->allocator_->SetCandidateFilter(webrtc::CF_ALL); + ep2->allocator_->SetCandidateFilter(webrtc::CF_ALL); // Use step delay 3s which is long enough for // connection to be established before managing to gather relay candidates. int delay = 3000; SetAllocationStepDelay(0, delay); SetAllocationStepDelay(1, delay); - IceConfig ice_config = CreateIceConfig(1000, GATHER_CONTINUALLY); - CreateChannels(ice_config, ice_config); + IceConfig ice_config = CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); + CreateChannels(env, ice_config, ice_config); PauseCandidates(0); PauseCandidates(1); // We have gathered host candidates but not relay. - ASSERT_TRUE_SIMULATED_WAIT(ep1->saved_candidates_.size() == 1u && - ep2->saved_candidates_.size() == 1u, - kDefaultTimeout, clock); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { + return ep1->saved_candidates_.size() == 1u && + ep2->saved_candidates_.size() == 1u; + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); ResumeCandidates(0); ResumeCandidates(1); @@ -6455,15 +7323,27 @@ TEST_P(GatherAfterConnectedTest, GatherAfterConnected) { PauseCandidates(0); PauseCandidates(1); - ASSERT_TRUE_SIMULATED_WAIT(ep1_ch1()->remote_candidates().size() == 1 && - ep2_ch1()->remote_candidates().size() == 1, - kDefaultTimeout, clock); - - ASSERT_TRUE_SIMULATED_WAIT( - ep1_ch1()->selected_connection() && ep2_ch1()->selected_connection(), - kDefaultTimeout, clock); - - clock.AdvanceTime(webrtc::TimeDelta::Millis(10 * delay)); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1()->remote_candidates().size() == 1 && + ep2_ch1()->remote_candidates().size() == 1; + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + + ASSERT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1()->selected_connection() && + ep2_ch1()->selected_connection(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + + clock.AdvanceTime(TimeDelta::Millis(10 * delay)); if (stop_gather_on_strongly_connected) { // The relay candidates gathered has not been propagated to channel. @@ -6481,35 +7361,44 @@ TEST_P(GatherAfterConnectedTest, GatherAfterConnectedMultiHomed) { const std::string field_trial = std::string("WebRTC-IceFieldTrials/stop_gather_on_strongly_connected:") + (stop_gather_on_strongly_connected ? "true/" : "false/"); - webrtc::test::ScopedKeyValueConfig field_trials(field_trials_, field_trial); - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = + CreateEnvironment(FieldTrials::CreateNoGlobal(field_trial)); + // Use local + relay - constexpr uint32_t flags = - kDefaultPortAllocatorFlags | PORTALLOCATOR_ENABLE_SHARED_SOCKET | - PORTALLOCATOR_DISABLE_STUN | PORTALLOCATOR_DISABLE_TCP; + constexpr uint32_t flags = webrtc::kDefaultPortAllocatorFlags | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_TCP; AddAddress(0, kAlternateAddrs[0]); - ConfigureEndpoints(OPEN, OPEN, flags, flags); + ConfigureEndpoints(env, OPEN, OPEN, flags, flags); auto* ep1 = GetEndpoint(0); auto* ep2 = GetEndpoint(1); - ep1->allocator_->SetCandidateFilter(CF_ALL); - ep2->allocator_->SetCandidateFilter(CF_ALL); + ep1->allocator_->SetCandidateFilter(webrtc::CF_ALL); + ep2->allocator_->SetCandidateFilter(webrtc::CF_ALL); // Use step delay 3s which is long enough for // connection to be established before managing to gather relay candidates. int delay = 3000; SetAllocationStepDelay(0, delay); SetAllocationStepDelay(1, delay); - IceConfig ice_config = CreateIceConfig(1000, GATHER_CONTINUALLY); - CreateChannels(ice_config, ice_config); + IceConfig ice_config = CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); + CreateChannels(env, ice_config, ice_config); PauseCandidates(0); PauseCandidates(1); // We have gathered host candidates but not relay. - ASSERT_TRUE_SIMULATED_WAIT(ep1->saved_candidates_.size() == 2u && - ep2->saved_candidates_.size() == 1u, - kDefaultTimeout, clock); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { + return ep1->saved_candidates_.size() == 2u && + ep2->saved_candidates_.size() == 1u; + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); ResumeCandidates(0); ResumeCandidates(1); @@ -6517,15 +7406,27 @@ TEST_P(GatherAfterConnectedTest, GatherAfterConnectedMultiHomed) { PauseCandidates(0); PauseCandidates(1); - ASSERT_TRUE_SIMULATED_WAIT(ep1_ch1()->remote_candidates().size() == 1 && - ep2_ch1()->remote_candidates().size() == 2, - kDefaultTimeout, clock); - - ASSERT_TRUE_SIMULATED_WAIT( - ep1_ch1()->selected_connection() && ep2_ch1()->selected_connection(), - kDefaultTimeout, clock); - - clock.AdvanceTime(webrtc::TimeDelta::Millis(10 * delay)); + ASSERT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1()->remote_candidates().size() == 1 && + ep2_ch1()->remote_candidates().size() == 2; + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + + ASSERT_THAT( + webrtc::WaitUntil( + [&] { + return ep1_ch1()->selected_connection() && + ep2_ch1()->selected_connection(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); + + clock.AdvanceTime(TimeDelta::Millis(10 * delay)); if (stop_gather_on_strongly_connected) { // The relay candidates gathered has not been propagated to channel. @@ -6541,25 +7442,32 @@ TEST_P(GatherAfterConnectedTest, GatherAfterConnectedMultiHomed) { // Tests no candidates are generated with old ice ufrag/passwd after an ice // restart even if continual gathering is enabled. TEST_F(P2PTransportChannelTest, TestIceNoOldCandidatesAfterIceRestart) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; + const Environment env = CreateEnvironment(); AddAddress(0, kAlternateAddrs[0]); - ConfigureEndpoints(OPEN, OPEN, kDefaultPortAllocatorFlags, - kDefaultPortAllocatorFlags); + ConfigureEndpoints(env, OPEN, OPEN, webrtc::kDefaultPortAllocatorFlags, + webrtc::kDefaultPortAllocatorFlags); // gathers continually. - IceConfig config = CreateIceConfig(1000, GATHER_CONTINUALLY); - CreateChannels(config, config); + IceConfig config = CreateIceConfig(1000, webrtc::GATHER_CONTINUALLY); + CreateChannels(env, config, config); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnected(ep1_ch1(), ep2_ch1()), - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); PauseCandidates(0); ep1_ch1()->SetIceParameters(kIceParams[3]); ep1_ch1()->MaybeStartGathering(); - EXPECT_TRUE_SIMULATED_WAIT(GetEndpoint(0)->saved_candidates_.size() > 0, - kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return GetEndpoint(0)->saved_candidates_.size(); }, Gt(0), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); for (const auto& cd : GetEndpoint(0)->saved_candidates_) { EXPECT_EQ(cd.candidate.username(), kIceUfrag[3]); @@ -6568,4 +7476,71 @@ TEST_F(P2PTransportChannelTest, TestIceNoOldCandidatesAfterIceRestart) { DestroyChannels(); } -} // namespace cricket +class P2PTransportChannelTestDtlsInStun : public P2PTransportChannelTestBase { + public: + P2PTransportChannelTestDtlsInStun() : P2PTransportChannelTestBase() { + // DTLS server hello done message as test data. + std::vector dtls_server_hello = { + 0x16, 0xfe, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x0c, 0x0e, 0x00, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + }; + pending_packet_.SetData(dtls_server_hello); + } + + protected: + void Run(bool ep1_support, bool ep2_support) { + const Environment env = CreateEnvironment(); + CreatePortAllocators(env); + IceConfig ep1_config; + ep1_config.dtls_handshake_in_stun = ep1_support; + IceConfig ep2_config; + ep2_config.dtls_handshake_in_stun = ep2_support; + CreateChannels(env, ep1_config, ep2_config); + if (ep1_support) { + ep1_ch1()->SetDtlsStunPiggybackCallbacks(DtlsStunPiggybackCallbacks( + [&](auto type) { return data_to_piggyback_func(type); }, + [&](auto data, auto ack) { piggyback_data_received(data, ack); })); + } + if (ep2_support) { + ep2_ch1()->SetDtlsStunPiggybackCallbacks(DtlsStunPiggybackCallbacks( + [&](auto type) { return data_to_piggyback_func(type); }, + [&](auto data, auto ack) { piggyback_data_received(data, ack); })); + } + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnected(ep1_ch1(), ep2_ch1()); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock_}), + webrtc::IsRtcOk()); + DestroyChannels(); + } + + std::pair, std::optional> + data_to_piggyback_func(StunMessageType type) { + return make_pair(absl::string_view(pending_packet_), std::nullopt); + } + + void piggyback_data_received(const StunByteStringAttribute* data, + const StunByteStringAttribute* ack) {} + + ScopedFakeClock clock_; + Buffer pending_packet_; +}; + +TEST_F(P2PTransportChannelTestDtlsInStun, NotSupportedByEither) { + Run(false, false); +} + +TEST_F(P2PTransportChannelTestDtlsInStun, SupportedByClient) { + Run(true, false); +} + +TEST_F(P2PTransportChannelTestDtlsInStun, SupportedByServer) { + Run(false, true); +} + +TEST_F(P2PTransportChannelTestDtlsInStun, SupportedByBoth) { + Run(true, true); +} + +} // namespace webrtc diff --git a/p2p/base/packet_transport_internal.cc b/p2p/base/packet_transport_internal.cc index 0904cb2d3e..5f8017d8ab 100644 --- a/p2p/base/packet_transport_internal.cc +++ b/p2p/base/packet_transport_internal.cc @@ -10,18 +10,63 @@ #include "p2p/base/packet_transport_internal.h" -namespace rtc { +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/sequence_checker.h" +#include "rtc_base/checks.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network_route.h" +#include "rtc_base/socket.h" + +namespace webrtc { PacketTransportInternal::PacketTransportInternal() = default; PacketTransportInternal::~PacketTransportInternal() = default; -bool PacketTransportInternal::GetOption(rtc::Socket::Option opt, int* value) { +bool PacketTransportInternal::GetOption(Socket::Option /* opt */, + int* /* value */) { return false; } -absl::optional PacketTransportInternal::network_route() const { - return absl::optional(); +std::optional PacketTransportInternal::network_route() const { + return std::optional(); +} + +void PacketTransportInternal::RegisterReceivedPacketCallback( + void* id, + absl::AnyInvocable callback) { + RTC_DCHECK_RUN_ON(&network_checker_); + received_packet_callback_list_.AddReceiver(id, std::move(callback)); +} + +void PacketTransportInternal::DeregisterReceivedPacketCallback(void* id) { + RTC_DCHECK_RUN_ON(&network_checker_); + received_packet_callback_list_.RemoveReceivers(id); +} + +void PacketTransportInternal::SetOnCloseCallback( + absl::AnyInvocable callback) { + RTC_DCHECK_RUN_ON(&network_checker_); + RTC_DCHECK(!on_close_ || !callback); + on_close_ = std::move(callback); +} + +void PacketTransportInternal::NotifyPacketReceived( + const ReceivedIpPacket& packet) { + RTC_DCHECK_RUN_ON(&network_checker_); + received_packet_callback_list_.Send(this, packet); +} + +void PacketTransportInternal::NotifyOnClose() { + RTC_DCHECK_RUN_ON(&network_checker_); + if (on_close_) { + std::move(on_close_)(); + on_close_ = nullptr; + } } -} // namespace rtc +} // namespace webrtc diff --git a/p2p/base/packet_transport_internal.h b/p2p/base/packet_transport_internal.h index 2ca47d533d..a326ce7e5c 100644 --- a/p2p/base/packet_transport_internal.h +++ b/p2p/base/packet_transport_internal.h @@ -11,20 +11,22 @@ #ifndef P2P_BASE_PACKET_TRANSPORT_INTERNAL_H_ #define P2P_BASE_PACKET_TRANSPORT_INTERNAL_H_ +#include +#include #include -#include -#include "absl/types/optional.h" -#include "p2p/base/port.h" +#include "absl/functional/any_invocable.h" +#include "api/sequence_checker.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/callback_list.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/network_route.h" #include "rtc_base/socket.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread_annotations.h" -namespace rtc { -struct PacketOptions; -struct SentPacket; +namespace webrtc { class RTC_EXPORT PacketTransportInternal : public sigslot::has_slots<> { public: @@ -47,23 +49,23 @@ class RTC_EXPORT PacketTransportInternal : public sigslot::has_slots<> { // TODO(johan): Remove the default argument once channel code is updated. virtual int SendPacket(const char* data, size_t len, - const rtc::PacketOptions& options, + const AsyncSocketPacketOptions& options, int flags = 0) = 0; // Sets a socket option. Note that not all options are // supported by all transport types. - virtual int SetOption(rtc::Socket::Option opt, int value) = 0; + virtual int SetOption(Socket::Option opt, int value) = 0; // TODO(pthatcher): Once Chrome's MockPacketTransportInterface implements // this, remove the default implementation. - virtual bool GetOption(rtc::Socket::Option opt, int* value); + virtual bool GetOption(Socket::Option opt, int* value); // Returns the most recent error that occurred on this channel. virtual int GetError() = 0; // Returns the current network route with transport overhead. // TODO(zhihuang): Make it pure virtual once the Chrome/remoting is updated. - virtual absl::optional network_route() const; + virtual std::optional network_route() const; // Emitted when the writable state, represented by `writable()`, changes. sigslot::signal1 SignalWritableState; @@ -78,31 +80,47 @@ class RTC_EXPORT PacketTransportInternal : public sigslot::has_slots<> { // Emitted when receiving state changes to true. sigslot::signal1 SignalReceivingState; - // Signalled each time a packet is received on this channel. - sigslot::signal5 - SignalReadPacket; + // Callback is invoked each time a packet is received on this channel. + void RegisterReceivedPacketCallback( + void* id, + absl::AnyInvocable callback); + + void DeregisterReceivedPacketCallback(void* id); // Signalled each time a packet is sent on this channel. - sigslot::signal2 + sigslot::signal2 SignalSentPacket; // Signalled when the current network route has changed. - sigslot::signal1> SignalNetworkRouteChanged; + sigslot::signal1> SignalNetworkRouteChanged; // Signalled when the transport is closed. - sigslot::signal1 SignalClosed; + void SetOnCloseCallback(absl::AnyInvocable callback); protected: PacketTransportInternal(); ~PacketTransportInternal() override; + + void NotifyPacketReceived(const ReceivedIpPacket& packet); + void NotifyOnClose(); + + SequenceChecker network_checker_{SequenceChecker::kDetached}; + + private: + CallbackList + received_packet_callback_list_ RTC_GUARDED_BY(&network_checker_); + absl::AnyInvocable on_close_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::PacketTransportInternal; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_PACKET_TRANSPORT_INTERNAL_H_ diff --git a/p2p/base/packet_transport_internal_unittest.cc b/p2p/base/packet_transport_internal_unittest.cc new file mode 100644 index 0000000000..0ffb26b8bb --- /dev/null +++ b/p2p/base/packet_transport_internal_unittest.cc @@ -0,0 +1,59 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "p2p/base/packet_transport_internal.h" + +#include + +#include "p2p/test/fake_packet_transport.h" +#include "rtc_base/network/ecn_marking.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/socket_address.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace { + +using ::testing::MockFunction; + +TEST(PacketTransportInternal, + NotifyPacketReceivedPassthrougPacketToRegisteredListener) { + webrtc::FakePacketTransport packet_transport("test"); + MockFunction + receiver; + + packet_transport.RegisterReceivedPacketCallback(&receiver, + receiver.AsStdFunction()); + EXPECT_CALL(receiver, Call) + .WillOnce([](webrtc::PacketTransportInternal*, + const webrtc::ReceivedIpPacket& packet) { + EXPECT_EQ(packet.decryption_info(), + webrtc::ReceivedIpPacket::kDtlsDecrypted); + }); + packet_transport.NotifyPacketReceived(webrtc::ReceivedIpPacket( + {}, webrtc::SocketAddress(), std::nullopt, webrtc::EcnMarking::kNotEct, + webrtc::ReceivedIpPacket::kDtlsDecrypted)); + + packet_transport.DeregisterReceivedPacketCallback(&receiver); +} + +TEST(PacketTransportInternal, NotifiesOnceOnClose) { + webrtc::FakePacketTransport packet_transport("test"); + int call_count = 0; + packet_transport.SetOnCloseCallback([&]() { ++call_count; }); + ASSERT_EQ(call_count, 0); + packet_transport.NotifyOnClose(); + EXPECT_EQ(call_count, 1); + packet_transport.NotifyOnClose(); + EXPECT_EQ(call_count, 1); // Call count should not have increased. +} + +} // namespace diff --git a/p2p/base/port.cc b/p2p/base/port.cc index ca0a6c5b03..5c78a04092 100644 --- a/p2p/base/port.cc +++ b/p2p/base/port.cc @@ -10,85 +10,91 @@ #include "p2p/base/port.h" -#include - -#include +#include +#include +#include #include +#include +#include #include #include -#include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "absl/strings/match.h" #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/candidate.h" +#include "api/rtc_error.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/stun.h" +#include "api/units/time_delta.h" #include "p2p/base/connection.h" -#include "p2p/base/port_allocator.h" +#include "p2p/base/p2p_constants.h" +#include "p2p/base/port_interface.h" +#include "p2p/base/stun_request.h" +#include "p2p/base/transport_description.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/byte_buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/crc32.h" -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" +#include "rtc_base/dscp.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/mdns_responder_interface.h" -#include "rtc_base/message_digest.h" +#include "rtc_base/net_helper.h" +#include "rtc_base/net_helpers.h" #include "rtc_base/network.h" -#include "rtc_base/numerics/safe_minmax.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/socket_address.h" #include "rtc_base/string_encode.h" #include "rtc_base/string_utils.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/third_party/base64/base64.h" +#include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" +#include "rtc_base/weak_ptr.h" -namespace cricket { +namespace webrtc { namespace { -using ::webrtc::RTCError; -using ::webrtc::RTCErrorType; -using ::webrtc::TaskQueueBase; -using ::webrtc::TimeDelta; - -rtc::PacketInfoProtocolType ConvertProtocolTypeToPacketInfoProtocolType( - cricket::ProtocolType type) { +PacketInfoProtocolType ConvertProtocolTypeToPacketInfoProtocolType( + ProtocolType type) { switch (type) { - case cricket::ProtocolType::PROTO_UDP: - return rtc::PacketInfoProtocolType::kUdp; - case cricket::ProtocolType::PROTO_TCP: - return rtc::PacketInfoProtocolType::kTcp; - case cricket::ProtocolType::PROTO_SSLTCP: - return rtc::PacketInfoProtocolType::kSsltcp; - case cricket::ProtocolType::PROTO_TLS: - return rtc::PacketInfoProtocolType::kTls; + case ProtocolType::PROTO_UDP: + return PacketInfoProtocolType::kUdp; + case ProtocolType::PROTO_TCP: + return PacketInfoProtocolType::kTcp; + case ProtocolType::PROTO_SSLTCP: + return PacketInfoProtocolType::kSsltcp; + case ProtocolType::PROTO_TLS: + return PacketInfoProtocolType::kTls; default: - return rtc::PacketInfoProtocolType::kUnknown; + return PacketInfoProtocolType::kUnknown; } } // The delay before we begin checking if this port is useless. We set // it to a little higher than a total STUN timeout. -const int kPortTimeoutDelay = cricket::STUN_TOTAL_TIMEOUT + 5000; +const int kPortTimeoutDelay = webrtc::STUN_TOTAL_TIMEOUT + 5000; } // namespace -// TODO(ronghuawu): Use "local", "srflx", "prflx" and "relay". But this requires -// the signaling part be updated correspondingly as well. -const char LOCAL_PORT_TYPE[] = "local"; -const char STUN_PORT_TYPE[] = "stun"; -const char PRFLX_PORT_TYPE[] = "prflx"; -const char RELAY_PORT_TYPE[] = "relay"; - -static const char* const PROTO_NAMES[] = {UDP_PROTOCOL_NAME, TCP_PROTOCOL_NAME, - SSLTCP_PROTOCOL_NAME, - TLS_PROTOCOL_NAME}; +static const char* const PROTO_NAMES[] = { + webrtc::UDP_PROTOCOL_NAME, webrtc::TCP_PROTOCOL_NAME, + webrtc::SSLTCP_PROTOCOL_NAME, webrtc::TLS_PROTOCOL_NAME}; const char* ProtoToString(ProtocolType proto) { return PROTO_NAMES[proto]; } -absl::optional StringToProto(absl::string_view proto_name) { - for (size_t i = 0; i <= PROTO_LAST; ++i) { +std::optional StringToProto(absl::string_view proto_name) { + for (size_t i = 0; i <= webrtc::PROTO_LAST; ++i) { if (absl::EqualsIgnoreCase(PROTO_NAMES[i], proto_name)) { return static_cast(i); } } - return absl::nullopt; + return std::nullopt; } // RFC 6544, TCP candidate encoding rules. @@ -97,89 +103,44 @@ const char TCPTYPE_ACTIVE_STR[] = "active"; const char TCPTYPE_PASSIVE_STR[] = "passive"; const char TCPTYPE_SIMOPEN_STR[] = "so"; -std::string Port::ComputeFoundation(absl::string_view type, - absl::string_view protocol, - absl::string_view relay_protocol, - const rtc::SocketAddress& base_address) { - // TODO(bugs.webrtc.org/14605): ensure IceTiebreaker() is set. - rtc::StringBuilder sb; - sb << type << base_address.ipaddr().ToString() << protocol << relay_protocol - << rtc::ToString(IceTiebreaker()); - return rtc::ToString(rtc::ComputeCrc32(sb.Release())); -} - -Port::Port(TaskQueueBase* thread, - absl::string_view type, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, - absl::string_view username_fragment, - absl::string_view password, - const webrtc::FieldTrialsView* field_trials) - : thread_(thread), - factory_(factory), - type_(type), - send_retransmit_count_attribute_(false), - network_(network), - min_port_(0), - max_port_(0), - component_(ICE_CANDIDATE_COMPONENT_DEFAULT), - generation_(0), - ice_username_fragment_(username_fragment), - password_(password), - timeout_delay_(kPortTimeoutDelay), - enable_port_packets_(false), - ice_role_(ICEROLE_UNKNOWN), - tiebreaker_(0), - shared_socket_(true), - weak_factory_(this), - field_trials_(field_trials) { - RTC_DCHECK(factory_ != NULL); - Construct(); -} +Port::Port(const PortParametersRef& args, IceCandidateType type) + : Port(args, type, 0, 0, true) {} -Port::Port(TaskQueueBase* thread, - absl::string_view type, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, +Port::Port(const PortParametersRef& args, + IceCandidateType type, uint16_t min_port, uint16_t max_port, - absl::string_view username_fragment, - absl::string_view password, - const webrtc::FieldTrialsView* field_trials) - : thread_(thread), - factory_(factory), + bool shared_socket /*= false*/) + : env_(args.env), + thread_(args.network_thread), + factory_(args.socket_factory), type_(type), send_retransmit_count_attribute_(false), - network_(network), + network_(args.network), min_port_(min_port), max_port_(max_port), component_(ICE_CANDIDATE_COMPONENT_DEFAULT), generation_(0), - ice_username_fragment_(username_fragment), - password_(password), + ice_username_fragment_(args.ice_username_fragment), + password_(args.ice_password), timeout_delay_(kPortTimeoutDelay), enable_port_packets_(false), - ice_role_(ICEROLE_UNKNOWN), + ice_role_(webrtc::ICEROLE_UNKNOWN), tiebreaker_(0), - shared_socket_(false), - weak_factory_(this), - field_trials_(field_trials) { - RTC_DCHECK(factory_ != NULL); - Construct(); -} - -void Port::Construct() { + shared_socket_(shared_socket), + network_cost_(args.network->GetCost(env_.field_trials())), + weak_factory_(this) { RTC_DCHECK_RUN_ON(thread_); + RTC_DCHECK(factory_ != nullptr); // TODO(pthatcher): Remove this old behavior once we're sure no one // relies on it. If the username_fragment and password are empty, // we should just create one. if (ice_username_fragment_.empty()) { RTC_DCHECK(password_.empty()); - ice_username_fragment_ = rtc::CreateRandomString(ICE_UFRAG_LENGTH); - password_ = rtc::CreateRandomString(ICE_PWD_LENGTH); + ice_username_fragment_ = webrtc::CreateRandomString(ICE_UFRAG_LENGTH); + password_ = webrtc::CreateRandomString(ICE_PWD_LENGTH); } network_->SignalTypeChanged.connect(this, &Port::OnNetworkTypeChanged); - network_cost_ = network_->GetCost(field_trials()); PostDestroyIfDead(/*delayed=*/true); RTC_LOG(LS_INFO) << ToString() << ": Port created with network cost " @@ -188,14 +149,14 @@ void Port::Construct() { Port::~Port() { RTC_DCHECK_RUN_ON(thread_); - CancelPendingTasks(); DestroyAllConnections(); + CancelPendingTasks(); } -const std::string& Port::Type() const { +IceCandidateType Port::Type() const { return type_; } -const rtc::Network* Port::Network() const { +const Network* Port::Network() const { return network_; } @@ -243,7 +204,7 @@ const std::vector& Port::Candidates() const { return candidates_; } -Connection* Port::GetConnection(const rtc::SocketAddress& remote_addr) { +Connection* Port::GetConnection(const SocketAddress& remote_addr) { AddressMap::const_iterator iter = connections_.find(remote_addr); if (iter != connections_.end()) return iter->second; @@ -251,31 +212,38 @@ Connection* Port::GetConnection(const rtc::SocketAddress& remote_addr) { return NULL; } -void Port::AddAddress(const rtc::SocketAddress& address, - const rtc::SocketAddress& base_address, - const rtc::SocketAddress& related_address, +void Port::AddAddress(const SocketAddress& address, + const SocketAddress& base_address, + const SocketAddress& related_address, absl::string_view protocol, absl::string_view relay_protocol, absl::string_view tcptype, - absl::string_view type, + IceCandidateType type, uint32_t type_preference, uint32_t relay_preference, absl::string_view url, bool is_final) { RTC_DCHECK_RUN_ON(thread_); - if (protocol == TCP_PROTOCOL_NAME && type == LOCAL_PORT_TYPE) { - RTC_DCHECK(!tcptype.empty()); - } - std::string foundation = - ComputeFoundation(type, protocol, relay_protocol, base_address); + // TODO(tommi): Set relay_protocol and optionally provide the base address + // to automatically compute the foundation in the ctor? It would be a good + // thing for the Candidate class to know the base address and keep it const. Candidate c(component_, protocol, address, 0U, username_fragment(), password_, - type, generation_, foundation, network_->id(), network_cost_); + type, generation_, "", network_->id(), network_cost_); + // Set the relay protocol before computing the foundation field. c.set_relay_protocol(relay_protocol); + // TODO(bugs.webrtc.org/14605): ensure IceTiebreaker() is set. + c.ComputeFoundation(base_address, tiebreaker_); + c.set_priority( c.GetPriority(type_preference, network_->preference(), relay_preference, - field_trials_->IsEnabled( + field_trials().IsEnabled( "WebRTC-IncreaseIceCandidatePriorityHostSrflx"))); +#if RTC_DCHECK_IS_ON + if (protocol == TCP_PROTOCOL_NAME && c.is_local()) { + RTC_DCHECK(!tcptype.empty()); + } +#endif c.set_tcptype(tcptype); c.set_network_name(network_->name()); c.set_network_type(network_->type()); @@ -283,38 +251,36 @@ void Port::AddAddress(const rtc::SocketAddress& address, c.set_url(url); c.set_related_address(related_address); - bool pending = MaybeObfuscateAddress(&c, type, is_final); + bool pending = MaybeObfuscateAddress(c, is_final); if (!pending) { FinishAddingAddress(c, is_final); } } -bool Port::MaybeObfuscateAddress(Candidate* c, - absl::string_view type, - bool is_final) { +bool Port::MaybeObfuscateAddress(const Candidate& c, bool is_final) { // TODO(bugs.webrtc.org/9723): Use a config to control the feature of IP // handling with mDNS. if (network_->GetMdnsResponder() == nullptr) { return false; } - if (type != LOCAL_PORT_TYPE) { + if (!c.is_local()) { return false; } - auto copy = *c; + auto copy = c; auto weak_ptr = weak_factory_.GetWeakPtr(); - auto callback = [weak_ptr, copy, is_final](const rtc::IPAddress& addr, + auto callback = [weak_ptr, copy, is_final](const IPAddress& addr, absl::string_view name) mutable { RTC_DCHECK(copy.address().ipaddr() == addr); - rtc::SocketAddress hostname_address(name, copy.address().port()); + SocketAddress hostname_address(name, copy.address().port()); // In Port and Connection, we need the IP address information to // correctly handle the update of candidate type to prflx. The removal // of IP address when signaling this candidate will take place in // BasicPortAllocatorSession::OnCandidateReady, via SanitizeCandidate. hostname_address.SetResolvedIP(addr); copy.set_address(hostname_address); - copy.set_related_address(rtc::SocketAddress()); + copy.set_related_address(SocketAddress()); if (weak_ptr != nullptr) { RTC_DCHECK_RUN_ON(weak_ptr->thread_); weak_ptr->set_mdns_name_registration_status( @@ -359,10 +325,10 @@ void Port::AddOrReplaceConnection(Connection* conn) { } } -void Port::OnReadPacket(const char* data, - size_t size, - const rtc::SocketAddress& addr, - ProtocolType proto) { +void Port::OnReadPacket(const ReceivedIpPacket& packet, ProtocolType proto) { + const char* data = reinterpret_cast(packet.payload().data()); + size_t size = packet.payload().size(); + const SocketAddress& addr = packet.source_address(); // If the user has enabled port packets, just hand this over. if (enable_port_packets_) { SignalReadPacket(this, data, size, addr); @@ -381,7 +347,7 @@ void Port::OnReadPacket(const char* data, // STUN message handled already } else if (msg->type() == STUN_BINDING_REQUEST) { RTC_LOG(LS_INFO) << "Received " << StunMethodToString(msg->type()) - << " id=" << rtc::hex_encode(msg->transaction_id()) + << " id=" << webrtc::hex_encode(msg->transaction_id()) << " from unknown address " << addr.ToSensitiveString(); // We need to signal an unknown address before we handle any role conflict // below. Otherwise there would be no candidate pair and TURN entry created @@ -428,7 +394,7 @@ void Port::AddPrflxCandidate(const Candidate& local) { bool Port::GetStunMessage(const char* data, size_t size, - const rtc::SocketAddress& addr, + const SocketAddress& addr, std::unique_ptr* out_msg, std::string* out_username) { RTC_DCHECK_RUN_ON(thread_); @@ -452,7 +418,8 @@ bool Port::GetStunMessage(const char* data, // Parse the request message. If the packet is not a complete and correct // STUN message, then ignore it. std::unique_ptr stun_msg(new IceMessage()); - rtc::ByteBufferReader buf(data, size); + ByteBufferReader buf( + MakeArrayView(reinterpret_cast(data), size)); if (!stun_msg->Read(&buf) || (buf.Length() > 0)) { return false; } @@ -593,24 +560,24 @@ bool Port::GetStunMessage(const char* data, return true; } -bool Port::IsCompatibleAddress(const rtc::SocketAddress& addr) { +bool Port::IsCompatibleAddress(const SocketAddress& addr) { // Get a representative IP for the Network this port is configured to use. - rtc::IPAddress ip = network_->GetBestIP(); + IPAddress ip = network_->GetBestIP(); // We use single-stack sockets, so families must match. if (addr.family() != ip.family()) { return false; } // Link-local IPv6 ports can only connect to other link-local IPv6 ports. if (ip.family() == AF_INET6 && - (IPIsLinkLocal(ip) != IPIsLinkLocal(addr.ipaddr()))) { + (webrtc::IPIsLinkLocal(ip) != webrtc::IPIsLinkLocal(addr.ipaddr()))) { return false; } return true; } -rtc::DiffServCodePoint Port::StunDscpValue() const { +DiffServCodePoint Port::StunDscpValue() const { // By default, inherit from whatever the MediaChannel sends. - return rtc::DSCP_NO_CHANGE; + return webrtc::DSCP_NO_CHANGE; } void Port::DestroyAllConnections() { @@ -657,18 +624,18 @@ bool Port::ParseStunUsername(const StunMessage* stun_msg, return true; } -bool Port::MaybeIceRoleConflict(const rtc::SocketAddress& addr, +bool Port::MaybeIceRoleConflict(const SocketAddress& addr, IceMessage* stun_msg, absl::string_view remote_ufrag) { RTC_DCHECK_RUN_ON(thread_); // Validate ICE_CONTROLLING or ICE_CONTROLLED attributes. bool ret = true; - IceRole remote_ice_role = ICEROLE_UNKNOWN; + IceRole remote_ice_role = webrtc::ICEROLE_UNKNOWN; uint64_t remote_tiebreaker = 0; const StunUInt64Attribute* stun_attr = stun_msg->GetUInt64(STUN_ATTR_ICE_CONTROLLING); if (stun_attr) { - remote_ice_role = ICEROLE_CONTROLLING; + remote_ice_role = webrtc::ICEROLE_CONTROLLING; remote_tiebreaker = stun_attr->value(); } @@ -676,7 +643,7 @@ bool Port::MaybeIceRoleConflict(const rtc::SocketAddress& addr, // tie breaker value received in the ping message matches port // tiebreaker value this must be a loopback call. // We will treat this as valid scenario. - if (remote_ice_role == ICEROLE_CONTROLLING && + if (remote_ice_role == webrtc::ICEROLE_CONTROLLING && username_fragment() == remote_ufrag && remote_tiebreaker == IceTiebreaker()) { return true; @@ -684,13 +651,13 @@ bool Port::MaybeIceRoleConflict(const rtc::SocketAddress& addr, stun_attr = stun_msg->GetUInt64(STUN_ATTR_ICE_CONTROLLED); if (stun_attr) { - remote_ice_role = ICEROLE_CONTROLLED; + remote_ice_role = webrtc::ICEROLE_CONTROLLED; remote_tiebreaker = stun_attr->value(); } switch (ice_role_) { - case ICEROLE_CONTROLLING: - if (ICEROLE_CONTROLLING == remote_ice_role) { + case webrtc::ICEROLE_CONTROLLING: + if (webrtc::ICEROLE_CONTROLLING == remote_ice_role) { if (remote_tiebreaker >= tiebreaker_) { SignalRoleConflict(this); } else { @@ -701,8 +668,8 @@ bool Port::MaybeIceRoleConflict(const rtc::SocketAddress& addr, } } break; - case ICEROLE_CONTROLLED: - if (ICEROLE_CONTROLLED == remote_ice_role) { + case webrtc::ICEROLE_CONTROLLED: + if (webrtc::ICEROLE_CONTROLLED == remote_ice_role) { if (remote_tiebreaker < tiebreaker_) { SignalRoleConflict(this); } else { @@ -724,21 +691,18 @@ std::string Port::CreateStunUsername(absl::string_view remote_username) const { return std::string(remote_username) + ":" + username_fragment(); } -bool Port::HandleIncomingPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - int64_t packet_time_us) { +bool Port::HandleIncomingPacket(AsyncPacketSocket* /* socket */, + const ReceivedIpPacket& /* packet */) { RTC_DCHECK_NOTREACHED(); return false; } -bool Port::CanHandleIncomingPacketsFrom(const rtc::SocketAddress&) const { +bool Port::CanHandleIncomingPacketsFrom(const SocketAddress&) const { return false; } void Port::SendBindingErrorResponse(StunMessage* message, - const rtc::SocketAddress& addr, + const SocketAddress& addr, int error_code, absl::string_view reason) { RTC_DCHECK_RUN_ON(thread_); @@ -752,7 +716,7 @@ void Port::SendBindingErrorResponse(StunMessage* message, message->transaction_id()); // When doing GICE, we need to write out the error code incorrectly to - // maintain backwards compatiblility. + // maintain backwards compatibility. auto error_attr = StunAttribute::CreateErrorCode(); error_attr->SetCode(error_code); error_attr->SetReason(std::string(reason)); @@ -775,11 +739,11 @@ void Port::SendBindingErrorResponse(StunMessage* message, } // Send the response message. - rtc::ByteBufferWriter buf; + ByteBufferWriter buf; response.Write(&buf); - rtc::PacketOptions options(StunDscpValue()); + AsyncSocketPacketOptions options(StunDscpValue()); options.info_signaled_after_sent.packet_type = - rtc::PacketType::kIceConnectivityCheckResponse; + PacketType::kIceConnectivityCheckResponse; SendTo(buf.Data(), buf.Length(), addr, options, false); RTC_LOG(LS_INFO) << ToString() << ": Sending STUN " << StunMethodToString(response.type()) @@ -789,7 +753,7 @@ void Port::SendBindingErrorResponse(StunMessage* message, void Port::SendUnknownAttributesErrorResponse( StunMessage* message, - const rtc::SocketAddress& addr, + const SocketAddress& addr, const std::vector& unknown_types) { RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(message->type() == STUN_BINDING_REQUEST); @@ -813,11 +777,11 @@ void Port::SendUnknownAttributesErrorResponse( response.AddFingerprint(); // Send the response message. - rtc::ByteBufferWriter buf; + ByteBufferWriter buf; response.Write(&buf); - rtc::PacketOptions options(StunDscpValue()); + AsyncSocketPacketOptions options(StunDscpValue()); options.info_signaled_after_sent.packet_type = - rtc::PacketType::kIceConnectivityCheckResponse; + PacketType::kIceConnectivityCheckResponse; SendTo(buf.Data(), buf.Length(), addr, options, false); RTC_LOG(LS_ERROR) << ToString() << ": Sending STUN binding error: reason=" << STUN_ERROR_UNKNOWN_ATTRIBUTE << " to " @@ -844,7 +808,7 @@ void Port::CancelPendingTasks() { } void Port::PostDestroyIfDead(bool delayed) { - rtc::WeakPtr weak_ptr = NewWeakPtr(); + WeakPtr weak_ptr = NewWeakPtr(); auto task = [weak_ptr = std::move(weak_ptr)] { if (weak_ptr) { weak_ptr->DestroyIfDead(); @@ -860,10 +824,10 @@ void Port::PostDestroyIfDead(bool delayed) { void Port::DestroyIfDead() { RTC_DCHECK_RUN_ON(thread_); - bool dead = - (state_ == State::INIT || state_ == State::PRUNED) && - connections_.empty() && - rtc::TimeMillis() - last_time_all_connections_removed_ >= timeout_delay_; + bool dead = (state_ == State::INIT || state_ == State::PRUNED) && + connections_.empty() && + webrtc::TimeMillis() - last_time_all_connections_removed_ >= + timeout_delay_; if (dead) { Destroy(); } @@ -877,17 +841,18 @@ void Port::SubscribePortDestroyed( void Port::SendPortDestroyed(Port* port) { port_destroyed_callback_list_.Send(port); } -void Port::OnNetworkTypeChanged(const rtc::Network* network) { +void Port::OnNetworkTypeChanged(const ::webrtc::Network* network) { RTC_DCHECK(network == network_); UpdateNetworkCost(); } std::string Port::ToString() const { - rtc::StringBuilder ss; - ss << "Port[" << rtc::ToHex(reinterpret_cast(this)) << ":" - << content_name_ << ":" << component_ << ":" << generation_ << ":" << type_ - << ":" << network_->ToString() << "]"; + StringBuilder ss; + ss << "Port[" << webrtc::ToHex(reinterpret_cast(this)) << ":" + << content_name_ << ":" << component_ << ":" << generation_ << ":" + << webrtc::IceCandidateTypeToString(type_) << ":" << network_->ToString() + << "]"; return ss.Release(); } @@ -904,7 +869,7 @@ void Port::UpdateNetworkCost() { << ". Number of connections created: " << connections_.size(); network_cost_ = new_cost; - for (cricket::Candidate& candidate : candidates_) + for (Candidate& candidate : candidates_) candidate.set_network_cost(network_cost_); for (auto& [unused, connection] : connections_) @@ -932,7 +897,7 @@ bool Port::OnConnectionDestroyed(Connection* conn) { // fails and is removed before kPortTimeoutDelay, then this message will // not cause the Port to be destroyed. if (connections_.empty()) { - last_time_all_connections_removed_ = rtc::TimeMillis(); + last_time_all_connections_removed_ = webrtc::TimeMillis(); PostDestroyIfDead(/*delayed=*/true); } @@ -970,9 +935,9 @@ const std::string& Port::username_fragment() const { return ice_username_fragment_; } -void Port::CopyPortInformationToPacketInfo(rtc::PacketInfo* info) const { +void Port::CopyPortInformationToPacketInfo(PacketInfo* info) const { info->protocol = ConvertProtocolTypeToPacketInfoProtocolType(GetProtocol()); info->network_id = Network()->id(); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/port.h b/p2p/base/port.h index 2678df8e52..522fc9329e 100644 --- a/p2p/base/port.h +++ b/p2p/base/port.h @@ -11,50 +11,44 @@ #ifndef P2P_BASE_PORT_H_ #define P2P_BASE_PORT_H_ +#include +#include + +#include #include #include +#include #include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/candidate.h" +#include "api/environment/environment.h" #include "api/field_trials_view.h" #include "api/packet_socket_factory.h" -#include "api/rtc_error.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" -#include "api/transport/field_trial_based_config.h" #include "api/transport/stun.h" -#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" -#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" -#include "logging/rtc_event_log/ice_logger.h" #include "p2p/base/candidate_pair_interface.h" #include "p2p/base/connection.h" -#include "p2p/base/connection_info.h" -#include "p2p/base/p2p_constants.h" +#include "p2p/base/p2p_constants.h" // IWYU pragma: keep #include "p2p/base/port_interface.h" -#include "p2p/base/stun_request.h" +#include "p2p/base/transport_description.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/callback_list.h" -#include "rtc_base/checks.h" -#include "rtc_base/memory/always_valid_pointer.h" -#include "rtc_base/net_helper.h" +#include "rtc_base/dscp.h" #include "rtc_base/network.h" -#include "rtc_base/proxy_info.h" -#include "rtc_base/rate_tracker.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" #include "rtc_base/socket_address.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/weak_ptr.h" -namespace cricket { - -RTC_EXPORT extern const char LOCAL_PORT_TYPE[]; -RTC_EXPORT extern const char STUN_PORT_TYPE[]; -RTC_EXPORT extern const char PRFLX_PORT_TYPE[]; -RTC_EXPORT extern const char RELAY_PORT_TYPE[]; +namespace webrtc { // RFC 6544, TCP candidate encoding rules. extern const int DISCARD_PORT; @@ -62,19 +56,6 @@ extern const char TCPTYPE_ACTIVE_STR[]; extern const char TCPTYPE_PASSIVE_STR[]; extern const char TCPTYPE_SIMOPEN_STR[]; -// The type preference MUST be an integer from 0 to 126 inclusive. -// https://datatracker.ietf.org/doc/html/rfc5245#section-4.1.2.1 -enum IcePriorityValue : uint8_t { - ICE_TYPE_PREFERENCE_RELAY_TLS = 0, - ICE_TYPE_PREFERENCE_RELAY_TCP = 1, - ICE_TYPE_PREFERENCE_RELAY_UDP = 2, - ICE_TYPE_PREFERENCE_PRFLX_TCP = 80, - ICE_TYPE_PREFERENCE_HOST_TCP = 90, - ICE_TYPE_PREFERENCE_SRFLX = 100, - ICE_TYPE_PREFERENCE_PRFLX = 110, - ICE_TYPE_PREFERENCE_HOST = 126 -}; - enum class MdnsNameRegistrationStatus { // IP concealment with mDNS is not enabled or the name registration process is // not started yet. @@ -109,7 +90,7 @@ class CandidateStats { CandidateStats(const CandidateStats&) = default; CandidateStats(CandidateStats&&) = default; CandidateStats(Candidate candidate, - absl::optional stats = absl::nullopt) + std::optional stats = std::nullopt) : candidate_(std::move(candidate)), stun_stats_(std::move(stats)) {} ~CandidateStats() = default; @@ -117,24 +98,24 @@ class CandidateStats { const Candidate& candidate() const { return candidate_; } - const absl::optional& stun_stats() const { return stun_stats_; } + const std::optional& stun_stats() const { return stun_stats_; } private: Candidate candidate_; // STUN port stats if this candidate is a STUN candidate. - absl::optional stun_stats_; + std::optional stun_stats_; }; typedef std::vector CandidateStatsList; const char* ProtoToString(ProtocolType proto); -absl::optional StringToProto(absl::string_view proto_name); +std::optional StringToProto(absl::string_view proto_name); struct ProtocolAddress { - rtc::SocketAddress address; + SocketAddress address; ProtocolType proto; - ProtocolAddress(const rtc::SocketAddress& a, ProtocolType p) + ProtocolAddress(const SocketAddress& a, ProtocolType p) : address(a), proto(p) {} bool operator==(const ProtocolAddress& o) const { @@ -171,44 +152,43 @@ struct CandidatePairChangeEvent { int64_t estimated_disconnected_time_ms; }; -typedef std::set ServerAddresses; +typedef std::set ServerAddresses; // Represents a local communication mechanism that can be used to create // connections to similar mechanisms of the other client. Subclasses of this // one add support for specific mechanisms like local UDP ports. class RTC_EXPORT Port : public PortInterface, public sigslot::has_slots<> { public: - // INIT: The state when a port is just created. - // KEEP_ALIVE_UNTIL_PRUNED: A port should not be destroyed even if no - // connection is using it. - // PRUNED: It will be destroyed if no connection is using it for a period of - // 30 seconds. - enum class State { INIT, KEEP_ALIVE_UNTIL_PRUNED, PRUNED }; - Port(webrtc::TaskQueueBase* thread, - absl::string_view type, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, - absl::string_view username_fragment, - absl::string_view password, - const webrtc::FieldTrialsView* field_trials = nullptr); - Port(webrtc::TaskQueueBase* thread, - absl::string_view type, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, + // A struct containing common arguments to creating a port. See also + // CreateRelayPortArgs. + struct PortParametersRef { + Environment env; + TaskQueueBase* network_thread; + PacketSocketFactory* socket_factory; + const ::webrtc::Network* network; + absl::string_view ice_username_fragment; + absl::string_view ice_password; + }; + + protected: + // Constructors for use only by via constructors in derived classes. + Port(const PortParametersRef& args, IceCandidateType type); + Port(const PortParametersRef& args, + IceCandidateType type, uint16_t min_port, uint16_t max_port, - absl::string_view username_fragment, - absl::string_view password, - const webrtc::FieldTrialsView* field_trials = nullptr); + bool shared_socket = false); + + public: ~Port() override; // Note that the port type does NOT uniquely identify different subclasses of // Port. Use the 2-tuple of the port type AND the protocol (GetProtocol()) to // uniquely identify subclasses. Whenever a new subclass of Port introduces a - // conflit in the value of the 2-tuple, make sure that the implementation that - // relies on this 2-tuple for RTTI is properly changed. - const std::string& Type() const override; - const rtc::Network* Network() const override; + // conflict in the value of the 2-tuple, make sure that the implementation + // that relies on this 2-tuple for RTTI is properly changed. + IceCandidateType Type() const override; + const ::webrtc::Network* Network() const override; // Methods to set/get ICE role and tiebreaker values. IceRole GetIceRole() const override; @@ -230,13 +210,13 @@ class RTC_EXPORT Port : public PortInterface, public sigslot::has_slots<> { void CancelPendingTasks(); // The thread on which this port performs its I/O. - webrtc::TaskQueueBase* thread() { return thread_; } + TaskQueueBase* thread() override { return thread_; } // The factory used to create the sockets of this port. - rtc::PacketSocketFactory* socket_factory() const { return factory_; } + PacketSocketFactory* socket_factory() const override { return factory_; } // For debugging purposes. - const std::string& content_name() const { return content_name_; } + const std::string& content_name() const override { return content_name_; } void set_content_name(absl::string_view content_name) { content_name_ = std::string(content_name); } @@ -244,7 +224,7 @@ class RTC_EXPORT Port : public PortInterface, public sigslot::has_slots<> { int component() const { return component_; } void set_component(int component) { component_ = component; } - bool send_retransmit_count_attribute() const { + bool send_retransmit_count_attribute() const override { return send_retransmit_count_attribute_; } void set_send_retransmit_count_attribute(bool enable) { @@ -252,8 +232,10 @@ class RTC_EXPORT Port : public PortInterface, public sigslot::has_slots<> { } // Identifies the generation that this port was created in. - uint32_t generation() const { return generation_; } - void set_generation(uint32_t generation) { generation_ = generation; } + uint32_t generation() const override { return generation_; } + void set_generation(uint32_t generation) override { + generation_ = generation; + } const std::string& username_fragment() const; const std::string& password() const { return password_; } @@ -288,22 +270,22 @@ class RTC_EXPORT Port : public PortInterface, public sigslot::has_slots<> { void SendPortDestroyed(Port* port); // Returns a map containing all of the connections of this port, keyed by the // remote address. - typedef std::map AddressMap; + typedef std::map AddressMap; const AddressMap& connections() { return connections_; } // Returns the connection to the given address or NULL if none exists. - Connection* GetConnection(const rtc::SocketAddress& remote_addr) override; + Connection* GetConnection(const SocketAddress& remote_addr) override; // Removes and deletes a connection object. `DestroyConnection` will // delete the connection object directly whereas `DestroyConnectionAsync` // defers the `delete` operation to when the call stack has been unwound. // Async may be needed when deleting a connection object from within a // callback. - void DestroyConnection(Connection* conn) { + void DestroyConnection(Connection* conn) override { DestroyConnectionInternal(conn, false); } - void DestroyConnectionAsync(Connection* conn) { + void DestroyConnectionAsync(Connection* conn) override { DestroyConnectionInternal(conn, true); } @@ -311,34 +293,24 @@ class RTC_EXPORT Port : public PortInterface, public sigslot::has_slots<> { // to accept the packet based on the `remote_addr`. Currently only UDP // port implemented this method. // TODO(mallinath) - Make it pure virtual. - virtual bool HandleIncomingPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - int64_t packet_time_us); + virtual bool HandleIncomingPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet); // Shall the port handle packet from this `remote_addr`. // This method is overridden by TurnPort. virtual bool CanHandleIncomingPacketsFrom( - const rtc::SocketAddress& remote_addr) const; + const SocketAddress& remote_addr) const; // Sends a response error to the given request. void SendBindingErrorResponse(StunMessage* message, - const rtc::SocketAddress& addr, + const SocketAddress& addr, int error_code, absl::string_view reason) override; void SendUnknownAttributesErrorResponse( StunMessage* message, - const rtc::SocketAddress& addr, + const SocketAddress& addr, const std::vector& unknown_types); - void set_proxy(absl::string_view user_agent, const rtc::ProxyInfo& proxy) { - user_agent_ = std::string(user_agent); - proxy_ = proxy; - } - const std::string& user_agent() { return user_agent_; } - const rtc::ProxyInfo& proxy() { return proxy_; } - void EnablePortPackets() override; // Called if the port has no connections and is no longer useful. @@ -352,59 +324,47 @@ class RTC_EXPORT Port : public PortInterface, public sigslot::has_slots<> { // Timeout shortening function to speed up unit tests. void set_timeout_delay(int delay); - // This method will return local and remote username fragements from the + // This method will return local and remote username fragments from the // stun username attribute if present. bool ParseStunUsername(const StunMessage* stun_msg, std::string* local_username, - std::string* remote_username) const; - std::string CreateStunUsername(absl::string_view remote_username) const; + std::string* remote_username) const override; + std::string CreateStunUsername( + absl::string_view remote_username) const override; - bool MaybeIceRoleConflict(const rtc::SocketAddress& addr, + bool MaybeIceRoleConflict(const SocketAddress& addr, IceMessage* stun_msg, - absl::string_view remote_ufrag); + absl::string_view remote_ufrag) override; // Called when a packet has been sent to the socket. // This is made pure virtual to notify subclasses of Port that they MUST // listen to AsyncPacketSocket::SignalSentPacket and then call // PortInterface::OnSentPacket. - virtual void OnSentPacket(rtc::AsyncPacketSocket* socket, - const rtc::SentPacket& sent_packet) = 0; + virtual void OnSentPacket(AsyncPacketSocket* socket, + const SentPacketInfo& sent_packet) = 0; // Called when the socket is currently able to send. void OnReadyToSend(); // Called when the Connection discovers a local peer reflexive candidate. - void AddPrflxCandidate(const Candidate& local); + void AddPrflxCandidate(const Candidate& local) override; - int16_t network_cost() const { return network_cost_; } + int16_t network_cost() const override { return network_cost_; } - void GetStunStats(absl::optional* stats) override {} - - // Foundation: An arbitrary string that is the same for two candidates - // that have the same type, base IP address, protocol (UDP, TCP, - // etc.), and STUN or TURN server. If any of these are different, - // then the foundation will be different. Two candidate pairs with - // the same foundation pairs are likely to have similar network - // characteristics. Foundations are used in the frozen algorithm. - std::string ComputeFoundation(absl::string_view type, - absl::string_view protocol, - absl::string_view relay_protocol, - const rtc::SocketAddress& base_address); + void GetStunStats(std::optional* /* stats */) override {} protected: - virtual void UpdateNetworkCost(); - - void set_type(absl::string_view type) { type_ = std::string(type); } + void UpdateNetworkCost() override; - rtc::WeakPtr NewWeakPtr() { return weak_factory_.GetWeakPtr(); } + WeakPtr NewWeakPtr() { return weak_factory_.GetWeakPtr(); } - void AddAddress(const rtc::SocketAddress& address, - const rtc::SocketAddress& base_address, - const rtc::SocketAddress& related_address, + void AddAddress(const SocketAddress& address, + const SocketAddress& base_address, + const SocketAddress& related_address, absl::string_view protocol, absl::string_view relay_protocol, absl::string_view tcptype, - absl::string_view type, + IceCandidateType type, uint32_t type_preference, uint32_t relay_preference, absl::string_view url, @@ -423,10 +383,19 @@ class RTC_EXPORT Port : public PortInterface, public sigslot::has_slots<> { // Called when a packet is received from an unknown address that is not // currently a connection. If this is an authenticated STUN binding request, // then we will signal the client. - void OnReadPacket(const char* data, - size_t size, - const rtc::SocketAddress& addr, - ProtocolType proto); + void OnReadPacket(const ReceivedIpPacket& packet, ProtocolType proto); + + [[deprecated( + "Use OnReadPacket(const webrtc::ReceivedIpPacket& packet, ProtocolType " + "proto)")]] void + OnReadPacket(const char* data, + size_t size, + const SocketAddress& addr, + ProtocolType proto) { + OnReadPacket(ReceivedIpPacket::CreateFromLegacy( + data, size, /*packet_time_us = */ -1, addr), + proto); + } // If the given data comprises a complete and correct STUN message then the // return value is true, otherwise false. If the message username corresponds @@ -435,22 +404,22 @@ class RTC_EXPORT Port : public PortInterface, public sigslot::has_slots<> { // remote_username contains the remote fragment of the STUN username. bool GetStunMessage(const char* data, size_t size, - const rtc::SocketAddress& addr, + const SocketAddress& addr, std::unique_ptr* out_msg, - std::string* out_username); + std::string* out_username) override; // Checks if the address in addr is compatible with the port's ip. - bool IsCompatibleAddress(const rtc::SocketAddress& addr); + bool IsCompatibleAddress(const SocketAddress& addr); // Returns DSCP value packets generated by the port itself should use. - virtual rtc::DiffServCodePoint StunDscpValue() const; + DiffServCodePoint StunDscpValue() const override; // Extra work to be done in subclasses when a connection is destroyed. - virtual void HandleConnectionDestroyed(Connection* conn) {} + virtual void HandleConnectionDestroyed(Connection* /* conn */) {} void DestroyAllConnections(); - void CopyPortInformationToPacketInfo(rtc::PacketInfo* info) const; + void CopyPortInformationToPacketInfo(PacketInfo* info) const; MdnsNameRegistrationStatus mdns_name_registration_status() const { return mdns_name_registration_status_; @@ -459,10 +428,13 @@ class RTC_EXPORT Port : public PortInterface, public sigslot::has_slots<> { mdns_name_registration_status_ = status; } - const webrtc::FieldTrialsView& field_trials() const { return *field_trials_; } + const FieldTrialsView& field_trials() const { return env_.field_trials(); } + + IceCandidateType type() const { return type_; } private: - void Construct(); + bool MaybeObfuscateAddress(const Candidate& c, bool is_final) + RTC_RUN_ON(thread_); void PostDestroyIfDead(bool delayed); void DestroyIfDead(); @@ -480,13 +452,14 @@ class RTC_EXPORT Port : public PortInterface, public sigslot::has_slots<> { // distinct. void DestroyConnectionInternal(Connection* conn, bool async); - void OnNetworkTypeChanged(const rtc::Network* network); + void OnNetworkTypeChanged(const ::webrtc::Network* network); - webrtc::TaskQueueBase* const thread_; - rtc::PacketSocketFactory* const factory_; - std::string type_; + const Environment env_; + TaskQueueBase* const thread_; + PacketSocketFactory* const factory_; + const IceCandidateType type_; bool send_retransmit_count_attribute_; - const rtc::Network* network_; + const ::webrtc::Network* network_; uint16_t min_port_; uint16_t max_port_; std::string content_name_; @@ -505,32 +478,50 @@ class RTC_EXPORT Port : public PortInterface, public sigslot::has_slots<> { IceRole ice_role_; uint64_t tiebreaker_; bool shared_socket_; - // Information to use when going through a proxy. - std::string user_agent_; - rtc::ProxyInfo proxy_; // A virtual cost perceived by the user, usually based on the network type // (WiFi. vs. Cellular). It takes precedence over the priority when // comparing two connections. int16_t network_cost_; + // INIT: The state when a port is just created. + // KEEP_ALIVE_UNTIL_PRUNED: A port should not be destroyed even if no + // connection is using it. + // PRUNED: It will be destroyed if no connection is using it for a period of + // 30 seconds. + enum class State { INIT, KEEP_ALIVE_UNTIL_PRUNED, PRUNED }; State state_ = State::INIT; int64_t last_time_all_connections_removed_ = 0; MdnsNameRegistrationStatus mdns_name_registration_status_ = MdnsNameRegistrationStatus::kNotStarted; - rtc::WeakPtrFactory weak_factory_; - webrtc::AlwaysValidPointer - field_trials_; - - bool MaybeObfuscateAddress(Candidate* c, - absl::string_view type, - bool is_final) RTC_RUN_ON(thread_); + CallbackList port_destroyed_callback_list_; - friend class Connection; - webrtc::CallbackList port_destroyed_callback_list_; + // Keep as the last member variable. + WeakPtrFactory weak_factory_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::CandidatePairChangeEvent; +using ::webrtc::CandidateStats; +using ::webrtc::CandidateStatsList; +using ::webrtc::DISCARD_PORT; +using ::webrtc::IceCandidateErrorEvent; +using ::webrtc::MdnsNameRegistrationStatus; +using ::webrtc::Port; +using ::webrtc::ProtocolAddress; +using ::webrtc::ProtoToString; +using ::webrtc::ServerAddresses; +using ::webrtc::StringToProto; +using ::webrtc::StunStats; +using ::webrtc::TCPTYPE_ACTIVE_STR; +using ::webrtc::TCPTYPE_PASSIVE_STR; +using ::webrtc::TCPTYPE_SIMOPEN_STR; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_PORT_H_ diff --git a/p2p/base/port_allocator.cc b/p2p/base/port_allocator.cc index d8ff04fe20..c7a40d9074 100644 --- a/p2p/base/port_allocator.cc +++ b/p2p/base/port_allocator.cc @@ -10,20 +10,30 @@ #include "p2p/base/port_allocator.h" +#include #include +#include +#include #include #include +#include #include "absl/strings/string_view.h" +#include "api/candidate.h" +#include "api/transport/enums.h" #include "p2p/base/ice_credentials_iterator.h" +#include "p2p/base/port.h" +#include "p2p/base/port_interface.h" +#include "p2p/base/transport_description.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" +#include "rtc_base/crypto_random.h" +#include "rtc_base/socket_address.h" -namespace cricket { +namespace webrtc { RelayServerConfig::RelayServerConfig() {} -RelayServerConfig::RelayServerConfig(const rtc::SocketAddress& address, +RelayServerConfig::RelayServerConfig(const SocketAddress& address, absl::string_view username, absl::string_view password, ProtocolType proto) @@ -36,7 +46,7 @@ RelayServerConfig::RelayServerConfig(absl::string_view address, absl::string_view username, absl::string_view password, ProtocolType proto) - : RelayServerConfig(rtc::SocketAddress(address, port), + : RelayServerConfig(SocketAddress(address, port), username, password, proto) {} @@ -48,11 +58,12 @@ RelayServerConfig::RelayServerConfig(absl::string_view address, absl::string_view password, ProtocolType proto, bool secure) - : RelayServerConfig(address, - port, - username, - password, - (proto == PROTO_TCP && secure ? PROTO_TLS : proto)) {} + : RelayServerConfig( + address, + port, + username, + password, + (proto == webrtc::PROTO_TCP && secure ? webrtc::PROTO_TLS : proto)) {} RelayServerConfig::RelayServerConfig(const RelayServerConfig&) = default; @@ -68,8 +79,7 @@ PortAllocatorSession::PortAllocatorSession(absl::string_view content_name, content_name_(content_name), component_(component), ice_ufrag_(ice_ufrag), - ice_pwd_(ice_pwd), - tiebreaker_(0) { + ice_pwd_(ice_pwd) { // Pooled sessions are allowed to be created with empty content name, // component, ufrag and password. RTC_DCHECK(ice_ufrag.empty() == ice_pwd.empty()); @@ -101,7 +111,7 @@ PortAllocator::PortAllocator() step_delay_(kDefaultStepDelay), allow_tcp_listen_(true), candidate_filter_(CF_ALL), - tiebreaker_(0) { + tiebreaker_(CreateRandomId64()) { // The allocator will be attached to a thread in Initialize. thread_checker_.Detach(); } @@ -125,9 +135,9 @@ bool PortAllocator::SetConfiguration( const std::vector& turn_servers, int candidate_pool_size, bool prune_turn_ports, - webrtc::TurnCustomizer* turn_customizer, - const absl::optional& stun_candidate_keepalive_interval) { - webrtc::PortPrunePolicy turn_port_prune_policy = + TurnCustomizer* turn_customizer, + const std::optional& stun_candidate_keepalive_interval) { + PortPrunePolicy turn_port_prune_policy = prune_turn_ports ? webrtc::PRUNE_BASED_ON_PRIORITY : webrtc::NO_PRUNE; return SetConfiguration(stun_servers, turn_servers, candidate_pool_size, turn_port_prune_policy, turn_customizer, @@ -138,9 +148,11 @@ bool PortAllocator::SetConfiguration( const ServerAddresses& stun_servers, const std::vector& turn_servers, int candidate_pool_size, - webrtc::PortPrunePolicy turn_port_prune_policy, - webrtc::TurnCustomizer* turn_customizer, - const absl::optional& stun_candidate_keepalive_interval) { + PortPrunePolicy turn_port_prune_policy, + TurnCustomizer* turn_customizer, + const std::optional& stun_candidate_keepalive_interval) { + RTC_DCHECK_GE(candidate_pool_size, 0); + RTC_DCHECK_LE(candidate_pool_size, static_cast(UINT16_MAX)); CheckRunOnValidThreadIfInitialized(); // A positive candidate pool size would lead to the creation of a pooled // allocator session and starting getting ports, which we should only do on @@ -152,20 +164,6 @@ bool PortAllocator::SetConfiguration( turn_servers_ = turn_servers; turn_port_prune_policy_ = turn_port_prune_policy; - if (candidate_pool_frozen_) { - if (candidate_pool_size != candidate_pool_size_) { - RTC_LOG(LS_ERROR) - << "Trying to change candidate pool size after pool was frozen."; - return false; - } - return true; - } - - if (candidate_pool_size < 0) { - RTC_LOG(LS_ERROR) << "Can't set negative pool size."; - return false; - } - candidate_pool_size_ = candidate_pool_size; // If ICE servers changed, throw away any existing pooled sessions and create @@ -201,7 +199,6 @@ bool PortAllocator::SetConfiguration( PortAllocatorSession* pooled_session = CreateSessionInternal("", 0, iceCredentials.ufrag, iceCredentials.pwd); pooled_session->set_pooled(true); - pooled_session->set_ice_tiebreaker(tiebreaker_); pooled_session->StartGettingPorts(); pooled_sessions_.push_back( std::unique_ptr(pooled_session)); @@ -209,13 +206,6 @@ bool PortAllocator::SetConfiguration( return true; } -void PortAllocator::SetIceTiebreaker(uint64_t tiebreaker) { - tiebreaker_ = tiebreaker; - for (auto& pooled_session : pooled_sessions_) { - pooled_session->set_ice_tiebreaker(tiebreaker_); - } -} - std::unique_ptr PortAllocator::CreateSession( absl::string_view content_name, int component, @@ -225,7 +215,6 @@ std::unique_ptr PortAllocator::CreateSession( auto session = std::unique_ptr( CreateSessionInternal(content_name, component, ice_ufrag, ice_pwd)); session->SetCandidateFilter(candidate_filter()); - session->set_ice_tiebreaker(tiebreaker_); return session; } @@ -286,11 +275,6 @@ PortAllocator::FindPooledSession(const IceParameters* ice_credentials) const { return pooled_sessions_.end(); } -void PortAllocator::FreezeCandidatePool() { - CheckRunOnValidThreadAndInitialized(); - candidate_pool_frozen_ = true; -} - void PortAllocator::DiscardCandidatePool() { CheckRunOnValidThreadIfInitialized(); pooled_sessions_.clear(); @@ -329,8 +313,7 @@ Candidate PortAllocator::SanitizeCandidate(const Candidate& c) const { // For a local host candidate, we need to conceal its IP address candidate if // the mDNS obfuscation is enabled. bool use_hostname_address = - (c.type() == LOCAL_PORT_TYPE || c.type() == PRFLX_PORT_TYPE) && - MdnsObfuscationEnabled(); + (c.is_local() || c.is_prflx()) && MdnsObfuscationEnabled(); // If adapter enumeration is disabled or host candidates are disabled, // clear the raddr of STUN candidates to avoid local address leakage. bool filter_stun_related_address = @@ -343,10 +326,11 @@ Candidate PortAllocator::SanitizeCandidate(const Candidate& c) const { // Sanitize related_address when using MDNS. bool filter_prflx_related_address = MdnsObfuscationEnabled(); bool filter_related_address = - ((c.type() == STUN_PORT_TYPE && filter_stun_related_address) || - (c.type() == RELAY_PORT_TYPE && filter_turn_related_address) || - (c.type() == PRFLX_PORT_TYPE && filter_prflx_related_address)); - return c.ToSanitizedCopy(use_hostname_address, filter_related_address); + ((c.is_stun() && filter_stun_related_address) || + (c.is_relay() && filter_turn_related_address) || + (c.is_prflx() && filter_prflx_related_address)); + return c.ToSanitizedCopy(use_hostname_address, filter_related_address, + /*filter_ufrag=*/false); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/port_allocator.h b/p2p/base/port_allocator.h index 3ca63cbd41..3b957720ef 100644 --- a/p2p/base/port_allocator.h +++ b/p2p/base/port_allocator.h @@ -11,28 +11,30 @@ #ifndef P2P_BASE_PORT_ALLOCATOR_H_ #define P2P_BASE_PORT_ALLOCATOR_H_ -#include +#include + #include +#include #include #include #include "absl/strings/string_view.h" +#include "api/candidate.h" #include "api/sequence_checker.h" #include "api/transport/enums.h" #include "p2p/base/port.h" #include "p2p/base/port_interface.h" -#include "rtc_base/helpers.h" -#include "rtc_base/proxy_info.h" +#include "p2p/base/transport_description.h" +#include "rtc_base/checks.h" +#include "rtc_base/network.h" +#include "rtc_base/socket_address.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" namespace webrtc { -class TurnCustomizer; -} // namespace webrtc -namespace cricket { +class TurnCustomizer; // PortAllocator is responsible for allocating Port types for a given // P2PSocket. It also handles port freeing. @@ -151,7 +153,7 @@ typedef std::vector PortList; // TODO(deadbeef): Rename to TurnServerConfig. struct RTC_EXPORT RelayServerConfig { RelayServerConfig(); - RelayServerConfig(const rtc::SocketAddress& address, + RelayServerConfig(const SocketAddress& address, absl::string_view username, absl::string_view password, ProtocolType proto); @@ -180,7 +182,7 @@ struct RTC_EXPORT RelayServerConfig { TlsCertPolicy tls_cert_policy = TlsCertPolicy::TLS_CERT_POLICY_SECURE; std::vector tls_alpn_protocols; std::vector tls_elliptic_curves; - rtc::SSLCertificateVerifier* tls_cert_verifier = nullptr; + SSLCertificateVerifier* tls_cert_verifier = nullptr; std::string turn_logging_id; }; @@ -204,10 +206,6 @@ class RTC_EXPORT PortAllocatorSession : public sigslot::has_slots<> { const std::string& ice_pwd() const { return ice_pwd_; } bool pooled() const { return pooled_; } - // TODO(bugs.webrtc.org/14605): move this to the constructor - void set_ice_tiebreaker(uint64_t tiebreaker) { tiebreaker_ = tiebreaker; } - uint64_t ice_tiebreaker() const { return tiebreaker_; } - // Setting this filter should affect not only candidates gathered in the // future, but candidates already gathered and ports already "ready", // which would be returned by ReadyCandidates() and ReadyPorts(). @@ -246,13 +244,13 @@ class RTC_EXPORT PortAllocatorSession : public sigslot::has_slots<> { // Get candidate-level stats from all candidates on the ready ports and return // the stats to the given list. virtual void GetCandidateStatsFromReadyPorts( - CandidateStatsList* candidate_stats_list) const {} + CandidateStatsList* /* candidate_stats_list */) const {} // Set the interval at which STUN candidates will resend STUN binding requests // on the underlying ports to keep NAT bindings open. // The default value of the interval in implementation is restored if a null // optional value is passed. virtual void SetStunKeepaliveIntervalForReadyPorts( - const absl::optional& stun_keepalive_interval) {} + const std::optional& /* stun_keepalive_interval */) {} // Another way of getting the information provided by the signals below. // // Ports and candidates are not guaranteed to be in the same order as the @@ -324,9 +322,6 @@ class RTC_EXPORT PortAllocatorSession : public sigslot::has_slots<> { bool pooled_ = false; - // TODO(bugs.webrtc.org/14605): move this to the constructor - uint64_t tiebreaker_; - // SetIceParameters is an implementation detail which only PortAllocator // should be able to call. friend class PortAllocator; @@ -368,19 +363,16 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { const std::vector& turn_servers, int candidate_pool_size, bool prune_turn_ports, - webrtc::TurnCustomizer* turn_customizer = nullptr, - const absl::optional& - stun_candidate_keepalive_interval = absl::nullopt); + TurnCustomizer* turn_customizer = nullptr, + const std::optional& + stun_candidate_keepalive_interval = std::nullopt); bool SetConfiguration(const ServerAddresses& stun_servers, const std::vector& turn_servers, int candidate_pool_size, - webrtc::PortPrunePolicy turn_port_prune_policy, - webrtc::TurnCustomizer* turn_customizer = nullptr, - const absl::optional& - stun_candidate_keepalive_interval = absl::nullopt); - - void SetIceTiebreaker(uint64_t tiebreaker); - uint64_t IceTiebreaker() const { return tiebreaker_; } + PortPrunePolicy turn_port_prune_policy, + TurnCustomizer* turn_customizer = nullptr, + const std::optional& + stun_candidate_keepalive_interval = std::nullopt); const ServerAddresses& stun_servers() const { CheckRunOnValidThreadIfInitialized(); @@ -397,7 +389,7 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { return candidate_pool_size_; } - const absl::optional& stun_candidate_keepalive_interval() const { + const std::optional& stun_candidate_keepalive_interval() const { CheckRunOnValidThreadIfInitialized(); return stun_candidate_keepalive_interval_; } @@ -411,13 +403,13 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { // Set whether VPN connections should be preferred, avoided, mandated or // blocked. - virtual void SetVpnPreference(webrtc::VpnPreference preference) { + virtual void SetVpnPreference(VpnPreference preference) { vpn_preference_ = preference; } // Set list of that shall be categorized as VPN. // Implemented by BasicPortAllocator. - virtual void SetVpnList(const std::vector& vpn_list) {} + virtual void SetVpnList(const std::vector& /* vpn_list */) {} std::unique_ptr CreateSession( absl::string_view content_name, @@ -443,15 +435,6 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { const PortAllocatorSession* GetPooledSession( const IceParameters* ice_credentials = nullptr) const; - // After FreezeCandidatePool is called, changing the candidate pool size will - // no longer be allowed, and changing ICE servers will not cause pooled - // sessions to be recreated. - // - // Expected to be called when SetLocalDescription is called on a - // PeerConnection. Can be called safely on any thread as long as not - // simultaneously with SetConfiguration. - void FreezeCandidatePool(); - // Discard any remaining pooled sessions. void DiscardCandidatePool(); @@ -462,6 +445,8 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { // 3. mDNS concealment of private IPs is enabled. Candidate SanitizeCandidate(const Candidate& c) const; + uint64_t ice_tiebreaker() const { return tiebreaker_; } + uint32_t flags() const { CheckRunOnValidThreadIfInitialized(); return flags_; @@ -472,25 +457,6 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { flags_ = flags; } - // These three methods are deprecated. If connections need to go through a - // proxy, the application should create a BasicPortAllocator given a custom - // PacketSocketFactory that creates proxy sockets. - const std::string& user_agent() const { - CheckRunOnValidThreadIfInitialized(); - return agent_; - } - - const rtc::ProxyInfo& proxy() const { - CheckRunOnValidThreadIfInitialized(); - return proxy_; - } - - void set_proxy(absl::string_view agent, const rtc::ProxyInfo& proxy) { - CheckRunOnValidThreadIfInitialized(); - agent_ = std::string(agent); - proxy_ = proxy; - } - // Gets/Sets the port range to use when choosing client ports. int min_port() const { CheckRunOnValidThreadIfInitialized(); @@ -585,12 +551,12 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { return turn_port_prune_policy_ == webrtc::PRUNE_BASED_ON_PRIORITY; } - webrtc::PortPrunePolicy turn_port_prune_policy() const { + PortPrunePolicy turn_port_prune_policy() const { CheckRunOnValidThreadIfInitialized(); return turn_port_prune_policy_; } - webrtc::TurnCustomizer* turn_customizer() { + TurnCustomizer* turn_customizer() { CheckRunOnValidThreadIfInitialized(); return turn_customizer_; } @@ -638,8 +604,6 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { bool initialized_ = false; uint32_t flags_; - std::string agent_; - rtc::ProxyInfo proxy_; int min_port_; int max_port_; int max_ipv6_networks_; @@ -647,23 +611,22 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { bool allow_tcp_listen_; uint32_t candidate_filter_; std::string origin_; - webrtc::SequenceChecker thread_checker_; - webrtc::VpnPreference vpn_preference_ = webrtc::VpnPreference::kDefault; + SequenceChecker thread_checker_; + VpnPreference vpn_preference_ = VpnPreference::kDefault; private: ServerAddresses stun_servers_; std::vector turn_servers_; int candidate_pool_size_ = 0; // Last value passed into SetConfiguration. std::vector> pooled_sessions_; - bool candidate_pool_frozen_ = false; - webrtc::PortPrunePolicy turn_port_prune_policy_ = webrtc::NO_PRUNE; + PortPrunePolicy turn_port_prune_policy_ = webrtc::NO_PRUNE; // Customizer for TURN messages. // The instance is owned by application and will be shared among // all TurnPort(s) created. - webrtc::TurnCustomizer* turn_customizer_ = nullptr; + TurnCustomizer* turn_customizer_ = nullptr; - absl::optional stun_candidate_keepalive_interval_; + std::optional stun_candidate_keepalive_interval_; // If true, TakePooledSession() will only return sessions that has same ice // credentials as requested. @@ -678,6 +641,43 @@ class RTC_EXPORT PortAllocator : public sigslot::has_slots<> { uint64_t tiebreaker_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::CF_ALL; +using ::webrtc::CF_HOST; +using ::webrtc::CF_NONE; +using ::webrtc::CF_REFLEXIVE; +using ::webrtc::CF_RELAY; +using ::webrtc::IceRegatheringReason; +using ::webrtc::kDefaultMaxIPv6Networks; +using ::webrtc::kDefaultPortAllocatorFlags; +using ::webrtc::kDefaultStepDelay; +using ::webrtc::kMinimumStepDelay; +using ::webrtc::PortAllocator; +using ::webrtc::PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION; +using ::webrtc::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS; +using ::webrtc::PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE; +using ::webrtc::PORTALLOCATOR_DISABLE_LINK_LOCAL_NETWORKS; +using ::webrtc::PORTALLOCATOR_DISABLE_RELAY; +using ::webrtc::PORTALLOCATOR_DISABLE_STUN; +using ::webrtc::PORTALLOCATOR_DISABLE_TCP; +using ::webrtc::PORTALLOCATOR_DISABLE_UDP; +using ::webrtc::PORTALLOCATOR_DISABLE_UDP_RELAY; +using ::webrtc::PORTALLOCATOR_ENABLE_ANY_ADDRESS_PORTS; +using ::webrtc::PORTALLOCATOR_ENABLE_IPV6; +using ::webrtc::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI; +using ::webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET; +using ::webrtc::PORTALLOCATOR_ENABLE_STUN_RETRANSMIT_ATTRIBUTE; +using ::webrtc::PortAllocatorSession; +using ::webrtc::PortList; +using ::webrtc::RelayCredentials; +using ::webrtc::RelayServerConfig; +using ::webrtc::TlsCertPolicy; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_PORT_ALLOCATOR_H_ diff --git a/p2p/base/port_allocator_unittest.cc b/p2p/base/port_allocator_unittest.cc index 48d0bc8a6e..ec53ea30b4 100644 --- a/p2p/base/port_allocator_unittest.cc +++ b/p2p/base/port_allocator_unittest.cc @@ -11,13 +11,24 @@ #include "p2p/base/port_allocator.h" #include +#include #include "absl/strings/string_view.h" -#include "p2p/base/fake_port_allocator.h" +#include "api/candidate.h" +#include "api/environment/environment_factory.h" +#include "api/transport/enums.h" +#include "p2p/base/port.h" +#include "p2p/base/port_interface.h" +#include "p2p/test/fake_port_allocator.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/virtual_socket_server.h" #include "test/gtest.h" -#include "test/scoped_key_value_config.h" + +using ::webrtc::CreateEnvironment; +using ::webrtc::IceCandidateType; static const char kContentName[] = "test content"; // Based on ICE_UFRAG_LENGTH @@ -26,55 +37,49 @@ static const char kIceUfrag[] = "UF00"; static const char kIcePwd[] = "TESTICEPWD00000000000000"; static const char kTurnUsername[] = "test"; static const char kTurnPassword[] = "test"; -constexpr uint64_t kTiebreakerDefault = 44444; class PortAllocatorTest : public ::testing::Test, public sigslot::has_slots<> { public: PortAllocatorTest() - : vss_(std::make_unique()), + : vss_(std::make_unique()), main_(vss_.get()), - packet_socket_factory_( - std::make_unique(vss_.get())), - allocator_(std::make_unique( - rtc::Thread::Current(), - packet_socket_factory_.get(), - &field_trials_)) { - allocator_->SetIceTiebreaker(kTiebreakerDefault); - } + allocator_( + std::make_unique(CreateEnvironment(), + vss_.get())) {} protected: void SetConfigurationWithPoolSize(int candidate_pool_size) { EXPECT_TRUE(allocator_->SetConfiguration( - cricket::ServerAddresses(), std::vector(), + webrtc::ServerAddresses(), std::vector(), candidate_pool_size, webrtc::NO_PRUNE)); } void SetConfigurationWithPoolSizeExpectFailure(int candidate_pool_size) { EXPECT_FALSE(allocator_->SetConfiguration( - cricket::ServerAddresses(), std::vector(), + webrtc::ServerAddresses(), std::vector(), candidate_pool_size, webrtc::NO_PRUNE)); } - std::unique_ptr CreateSession( + std::unique_ptr CreateSession( absl::string_view content_name, int component, absl::string_view ice_ufrag, absl::string_view ice_pwd) { - return std::unique_ptr( - static_cast( + return std::unique_ptr( + static_cast( allocator_ ->CreateSession(content_name, component, ice_ufrag, ice_pwd) .release())); } - const cricket::FakePortAllocatorSession* GetPooledSession() const { - return static_cast( + const webrtc::FakePortAllocatorSession* GetPooledSession() const { + return static_cast( allocator_->GetPooledSession()); } - std::unique_ptr TakePooledSession() { - return std::unique_ptr( - static_cast( + std::unique_ptr TakePooledSession() { + return std::unique_ptr( + static_cast( allocator_->TakePooledSession(kContentName, 0, kIceUfrag, kIcePwd) .release())); } @@ -87,19 +92,17 @@ class PortAllocatorTest : public ::testing::Test, public sigslot::has_slots<> { return count; } - webrtc::test::ScopedKeyValueConfig field_trials_; - std::unique_ptr vss_; - rtc::AutoSocketServerThread main_; - std::unique_ptr packet_socket_factory_; - std::unique_ptr allocator_; - rtc::SocketAddress stun_server_1{"11.11.11.11", 3478}; - rtc::SocketAddress stun_server_2{"22.22.22.22", 3478}; - cricket::RelayServerConfig turn_server_1{"11.11.11.11", 3478, - kTurnUsername, kTurnPassword, - cricket::PROTO_UDP, false}; - cricket::RelayServerConfig turn_server_2{"22.22.22.22", 3478, - kTurnUsername, kTurnPassword, - cricket::PROTO_UDP, false}; + std::unique_ptr vss_; + webrtc::AutoSocketServerThread main_; + std::unique_ptr allocator_; + webrtc::SocketAddress stun_server_1{"11.11.11.11", 3478}; + webrtc::SocketAddress stun_server_2{"22.22.22.22", 3478}; + webrtc::RelayServerConfig turn_server_1{"11.11.11.11", 3478, + kTurnUsername, kTurnPassword, + webrtc::PROTO_UDP, false}; + webrtc::RelayServerConfig turn_server_2{"22.22.22.22", 3478, + kTurnUsername, kTurnPassword, + webrtc::PROTO_UDP, false}; }; TEST_F(PortAllocatorTest, TestDefaults) { @@ -112,10 +115,10 @@ TEST_F(PortAllocatorTest, TestDefaults) { // Call CreateSession and verify that the parameters passed in and the // candidate filter are applied as expected. TEST_F(PortAllocatorTest, CreateSession) { - allocator_->SetCandidateFilter(cricket::CF_RELAY); + allocator_->SetCandidateFilter(webrtc::CF_RELAY); auto session = CreateSession(kContentName, 1, kIceUfrag, kIcePwd); ASSERT_NE(nullptr, session); - EXPECT_EQ(cricket::CF_RELAY, session->candidate_filter()); + EXPECT_EQ(webrtc::CF_RELAY, session->candidate_filter()); EXPECT_EQ(kContentName, session->content_name()); EXPECT_EQ(1, session->component()); EXPECT_EQ(kIceUfrag, session->ice_ufrag()); @@ -123,16 +126,16 @@ TEST_F(PortAllocatorTest, CreateSession) { } TEST_F(PortAllocatorTest, SetConfigurationUpdatesIceServers) { - cricket::ServerAddresses stun_servers_1 = {stun_server_1}; - std::vector turn_servers_1 = {turn_server_1}; + webrtc::ServerAddresses stun_servers_1 = {stun_server_1}; + std::vector turn_servers_1 = {turn_server_1}; EXPECT_TRUE(allocator_->SetConfiguration(stun_servers_1, turn_servers_1, 0, webrtc::NO_PRUNE)); EXPECT_EQ(stun_servers_1, allocator_->stun_servers()); EXPECT_EQ(turn_servers_1, allocator_->turn_servers()); // Update with a different set of servers. - cricket::ServerAddresses stun_servers_2 = {stun_server_2}; - std::vector turn_servers_2 = {turn_server_2}; + webrtc::ServerAddresses stun_servers_2 = {stun_server_2}; + std::vector turn_servers_2 = {turn_server_2}; EXPECT_TRUE(allocator_->SetConfiguration(stun_servers_2, turn_servers_2, 0, webrtc::NO_PRUNE)); EXPECT_EQ(stun_servers_2, allocator_->stun_servers()); @@ -150,11 +153,6 @@ TEST_F(PortAllocatorTest, SetConfigurationUpdatesCandidatePoolSize) { EXPECT_EQ(4, allocator_->candidate_pool_size()); } -// A negative pool size should just be treated as zero. -TEST_F(PortAllocatorTest, SetConfigurationWithNegativePoolSizeFails) { - SetConfigurationWithPoolSizeExpectFailure(-1); -} - // Test that if the candidate pool size is nonzero, pooled sessions are // created, and StartGettingPorts is called on them. TEST_F(PortAllocatorTest, SetConfigurationCreatesPooledSessions) { @@ -188,16 +186,16 @@ TEST_F(PortAllocatorTest, SetConfigurationDestroysPooledSessions) { // ones created when the ICE servers change. TEST_F(PortAllocatorTest, SetConfigurationRecreatesPooledSessionsWhenIceServersChange) { - cricket::ServerAddresses stun_servers_1 = {stun_server_1}; - std::vector turn_servers_1 = {turn_server_1}; + webrtc::ServerAddresses stun_servers_1 = {stun_server_1}; + std::vector turn_servers_1 = {turn_server_1}; allocator_->SetConfiguration(stun_servers_1, turn_servers_1, 1, webrtc::NO_PRUNE); EXPECT_EQ(stun_servers_1, allocator_->stun_servers()); EXPECT_EQ(turn_servers_1, allocator_->turn_servers()); // Update with a different set of servers (and also change pool size). - cricket::ServerAddresses stun_servers_2 = {stun_server_2}; - std::vector turn_servers_2 = {turn_server_2}; + webrtc::ServerAddresses stun_servers_2 = {stun_server_2}; + std::vector turn_servers_2 = {turn_server_2}; allocator_->SetConfiguration(stun_servers_2, turn_servers_2, 2, webrtc::NO_PRUNE); EXPECT_EQ(stun_servers_2, allocator_->stun_servers()); @@ -213,33 +211,6 @@ TEST_F(PortAllocatorTest, EXPECT_EQ(0, GetAllPooledSessionsReturnCount()); } -// According to JSEP, after SetLocalDescription, setting different ICE servers -// will not cause the pool to be refilled. This is implemented by the -// PeerConnection calling FreezeCandidatePool when a local description is set. -TEST_F(PortAllocatorTest, - SetConfigurationDoesNotRecreatePooledSessionsAfterFreezeCandidatePool) { - cricket::ServerAddresses stun_servers_1 = {stun_server_1}; - std::vector turn_servers_1 = {turn_server_1}; - allocator_->SetConfiguration(stun_servers_1, turn_servers_1, 1, - webrtc::NO_PRUNE); - EXPECT_EQ(stun_servers_1, allocator_->stun_servers()); - EXPECT_EQ(turn_servers_1, allocator_->turn_servers()); - - // Update with a different set of servers, but first freeze the pool. - allocator_->FreezeCandidatePool(); - cricket::ServerAddresses stun_servers_2 = {stun_server_2}; - std::vector turn_servers_2 = {turn_server_2}; - allocator_->SetConfiguration(stun_servers_2, turn_servers_2, 2, - webrtc::NO_PRUNE); - EXPECT_EQ(stun_servers_2, allocator_->stun_servers()); - EXPECT_EQ(turn_servers_2, allocator_->turn_servers()); - auto session = TakePooledSession(); - ASSERT_NE(nullptr, session.get()); - EXPECT_EQ(stun_servers_1, session->stun_servers()); - EXPECT_EQ(turn_servers_1, session->turn_servers()); - EXPECT_EQ(0, GetAllPooledSessionsReturnCount()); -} - TEST_F(PortAllocatorTest, GetPooledSessionReturnsNextSession) { SetConfigurationWithPoolSize(2); auto peeked_session_1 = GetPooledSession(); @@ -258,8 +229,8 @@ TEST_F(PortAllocatorTest, TakePooledSessionUpdatesIceParameters) { auto peeked_session = GetPooledSession(); ASSERT_NE(nullptr, peeked_session); EXPECT_EQ(0, peeked_session->transport_info_update_count()); - std::unique_ptr session( - static_cast( + std::unique_ptr session( + static_cast( allocator_->TakePooledSession(kContentName, 1, kIceUfrag, kIcePwd) .release())); EXPECT_EQ(1, session->transport_info_update_count()); @@ -274,13 +245,13 @@ TEST_F(PortAllocatorTest, TakePooledSessionUpdatesIceParameters) { // session is taken. So a pooled session should gather candidates // unfiltered until it's returned by TakePooledSession. TEST_F(PortAllocatorTest, TakePooledSessionUpdatesCandidateFilter) { - allocator_->SetCandidateFilter(cricket::CF_RELAY); + allocator_->SetCandidateFilter(webrtc::CF_RELAY); SetConfigurationWithPoolSize(1); auto peeked_session = GetPooledSession(); ASSERT_NE(nullptr, peeked_session); - EXPECT_EQ(cricket::CF_ALL, peeked_session->candidate_filter()); + EXPECT_EQ(webrtc::CF_ALL, peeked_session->candidate_filter()); auto session = TakePooledSession(); - EXPECT_EQ(cricket::CF_RELAY, session->candidate_filter()); + EXPECT_EQ(webrtc::CF_RELAY, session->candidate_filter()); } // Verify that after DiscardCandidatePool, TakePooledSession doesn't return @@ -322,26 +293,26 @@ const char kIpv4Address[] = "12.34.56.78"; const char kIpv4AddressWithPort[] = "12.34.56.78:443"; TEST_F(PortAllocatorTest, SanitizeEmptyCandidateDefaultConfig) { - cricket::Candidate input; - cricket::Candidate output = allocator_->SanitizeCandidate(input); + webrtc::Candidate input; + webrtc::Candidate output = allocator_->SanitizeCandidate(input); EXPECT_EQ("", output.address().ipaddr().ToString()); } TEST_F(PortAllocatorTest, SanitizeIpv4CandidateDefaultConfig) { - cricket::Candidate input(1, "udp", rtc::SocketAddress(kIpv4Address, 443), 1, - "username", "password", cricket::LOCAL_PORT_TYPE, 1, - "foundation", 1, 1); - cricket::Candidate output = allocator_->SanitizeCandidate(input); + webrtc::Candidate input(1, "udp", webrtc::SocketAddress(kIpv4Address, 443), 1, + "username", "password", IceCandidateType::kHost, 1, + "foundation", 1, 1); + webrtc::Candidate output = allocator_->SanitizeCandidate(input); EXPECT_EQ(kIpv4AddressWithPort, output.address().ToString()); EXPECT_EQ(kIpv4Address, output.address().ipaddr().ToString()); } TEST_F(PortAllocatorTest, SanitizeIpv4CandidateMdnsObfuscationEnabled) { allocator_->SetMdnsObfuscationEnabledForTesting(true); - cricket::Candidate input(1, "udp", rtc::SocketAddress(kIpv4Address, 443), 1, - "username", "password", cricket::LOCAL_PORT_TYPE, 1, - "foundation", 1, 1); - cricket::Candidate output = allocator_->SanitizeCandidate(input); + webrtc::Candidate input(1, "udp", webrtc::SocketAddress(kIpv4Address, 443), 1, + "username", "password", IceCandidateType::kHost, 1, + "foundation", 1, 1); + webrtc::Candidate output = allocator_->SanitizeCandidate(input); EXPECT_NE(kIpv4AddressWithPort, output.address().ToString()); EXPECT_EQ("", output.address().ipaddr().ToString()); } @@ -349,10 +320,10 @@ TEST_F(PortAllocatorTest, SanitizeIpv4CandidateMdnsObfuscationEnabled) { TEST_F(PortAllocatorTest, SanitizePrflxCandidateMdnsObfuscationEnabled) { allocator_->SetMdnsObfuscationEnabledForTesting(true); // Create the candidate from an IP literal. This populates the hostname. - cricket::Candidate input(1, "udp", rtc::SocketAddress(kIpv4Address, 443), 1, - "username", "password", cricket::PRFLX_PORT_TYPE, 1, - "foundation", 1, 1); - cricket::Candidate output = allocator_->SanitizeCandidate(input); + webrtc::Candidate input(1, "udp", webrtc::SocketAddress(kIpv4Address, 443), 1, + "username", "password", IceCandidateType::kPrflx, 1, + "foundation", 1, 1); + webrtc::Candidate output = allocator_->SanitizeCandidate(input); EXPECT_NE(kIpv4AddressWithPort, output.address().ToString()); EXPECT_EQ("", output.address().ipaddr().ToString()); } @@ -361,11 +332,11 @@ TEST_F(PortAllocatorTest, SanitizePrflxCandidateMdnsObfuscationEnabledRelatedAddress) { allocator_->SetMdnsObfuscationEnabledForTesting(true); // Create the candidate from an IP literal. This populates the hostname. - cricket::Candidate input(1, "udp", rtc::SocketAddress(kIpv4Address, 443), 1, - "username", "password", cricket::PRFLX_PORT_TYPE, 1, - "foundation", 1, 1); + webrtc::Candidate input(1, "udp", webrtc::SocketAddress(kIpv4Address, 443), 1, + "username", "password", IceCandidateType::kPrflx, 1, + "foundation", 1, 1); - cricket::Candidate output = allocator_->SanitizeCandidate(input); + webrtc::Candidate output = allocator_->SanitizeCandidate(input); EXPECT_NE(kIpv4AddressWithPort, output.address().ToString()); EXPECT_EQ("", output.address().ipaddr().ToString()); EXPECT_NE(kIpv4AddressWithPort, output.related_address().ToString()); @@ -375,12 +346,12 @@ TEST_F(PortAllocatorTest, TEST_F(PortAllocatorTest, SanitizeIpv4NonLiteralMdnsObfuscationEnabled) { // Create the candidate with an empty hostname. allocator_->SetMdnsObfuscationEnabledForTesting(true); - rtc::IPAddress ip; - EXPECT_TRUE(IPFromString(kIpv4Address, &ip)); - cricket::Candidate input(1, "udp", rtc::SocketAddress(ip, 443), 1, "username", - "password", cricket::LOCAL_PORT_TYPE, 1, - "foundation", 1, 1); - cricket::Candidate output = allocator_->SanitizeCandidate(input); + webrtc::IPAddress ip; + EXPECT_TRUE(webrtc::IPFromString(kIpv4Address, &ip)); + webrtc::Candidate input(1, "udp", webrtc::SocketAddress(ip, 443), 1, + "username", "password", IceCandidateType::kHost, 1, + "foundation", 1, 1); + webrtc::Candidate output = allocator_->SanitizeCandidate(input); EXPECT_NE(kIpv4AddressWithPort, output.address().ToString()); EXPECT_EQ("", output.address().ipaddr().ToString()); } diff --git a/p2p/base/port_interface.cc b/p2p/base/port_interface.cc index b07cdf9ee6..b123fd2614 100644 --- a/p2p/base/port_interface.cc +++ b/p2p/base/port_interface.cc @@ -10,14 +10,12 @@ #include "p2p/base/port_interface.h" -#include -#include "absl/strings/string_view.h" -namespace cricket { +namespace webrtc { PortInterface::PortInterface() = default; PortInterface::~PortInterface() = default; -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/port_interface.h b/p2p/base/port_interface.h index 29c2741bab..93a3a252ee 100644 --- a/p2p/base/port_interface.h +++ b/p2p/base/port_interface.h @@ -11,24 +11,29 @@ #ifndef P2P_BASE_PORT_INTERFACE_H_ #define P2P_BASE_PORT_INTERFACE_H_ +#include +#include +#include +#include +#include #include -#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/candidate.h" +#include "api/packet_socket_factory.h" +#include "api/task_queue/task_queue_base.h" #include "p2p/base/transport_description.h" #include "rtc_base/async_packet_socket.h" -#include "rtc_base/callback_list.h" +#include "rtc_base/dscp.h" +#include "rtc_base/network.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/socket.h" #include "rtc_base/socket_address.h" +#include "rtc_base/third_party/sigslot/sigslot.h" -namespace rtc { -class Network; -struct PacketOptions; -} // namespace rtc +namespace webrtc { -namespace cricket { class Connection; class IceMessage; class StunMessage; @@ -49,8 +54,8 @@ class PortInterface { public: virtual ~PortInterface(); - virtual const std::string& Type() const = 0; - virtual const rtc::Network* Network() const = 0; + virtual IceCandidateType Type() const = 0; + virtual const Network* Network() const = 0; // Methods to set/get ICE role and tiebreaker values. virtual void SetIceRole(IceRole role) = 0; @@ -71,7 +76,7 @@ class PortInterface { virtual void PrepareAddress() = 0; // Returns the connection to the given address or NULL if none exists. - virtual Connection* GetConnection(const rtc::SocketAddress& remote_addr) = 0; + virtual Connection* GetConnection(const SocketAddress& remote_addr) = 0; // Creates a new connection to the given address. enum CandidateOrigin { ORIGIN_THIS_PORT, ORIGIN_OTHER_PORT, ORIGIN_MESSAGE }; @@ -79,8 +84,8 @@ class PortInterface { CandidateOrigin origin) = 0; // Functions on the underlying socket(s). - virtual int SetOption(rtc::Socket::Option opt, int value) = 0; - virtual int GetOption(rtc::Socket::Option opt, int* value) = 0; + virtual int SetOption(Socket::Option opt, int value) = 0; + virtual int GetOption(Socket::Option opt, int* value) = 0; virtual int GetError() = 0; virtual ProtocolType GetProtocol() const = 0; @@ -91,15 +96,15 @@ class PortInterface { // that of a connection or an address that has sent to us already. virtual int SendTo(const void* data, size_t size, - const rtc::SocketAddress& addr, - const rtc::PacketOptions& options, + const SocketAddress& addr, + const AsyncSocketPacketOptions& options, bool payload) = 0; // Indicates that we received a successful STUN binding request from an // address that doesn't correspond to any current connection. To turn this // into a real connection, call CreateConnection. sigslot::signal6 callback) = 0; + std::function callback) = 0; // Signaled when Port discovers ice role conflict with the peer. sigslot::signal1 SignalRoleConflict; @@ -126,21 +131,92 @@ class PortInterface { // through their respective connection and instead delivers every packet // through this port. virtual void EnablePortPackets() = 0; - sigslot:: - signal4 - SignalReadPacket; + sigslot::signal4 + SignalReadPacket; // Emitted each time a packet is sent on this port. - sigslot::signal1 SignalSentPacket; + sigslot::signal1 SignalSentPacket; virtual std::string ToString() const = 0; - virtual void GetStunStats(absl::optional* stats) = 0; + virtual void GetStunStats(std::optional* stats) = 0; + + // Removes and deletes a connection object. `DestroyConnection` will + // delete the connection object directly whereas `DestroyConnectionAsync` + // defers the `delete` operation to when the call stack has been unwound. + // Async may be needed when deleting a connection object from within a + // callback. + virtual void DestroyConnection(Connection* conn) = 0; + + virtual void DestroyConnectionAsync(Connection* conn) = 0; + + // The thread on which this port performs its I/O. + virtual TaskQueueBase* thread() = 0; + + // The factory used to create the sockets of this port. + virtual PacketSocketFactory* socket_factory() const = 0; + + // Identifies the generation that this port was created in. + virtual uint32_t generation() const = 0; + virtual void set_generation(uint32_t generation) = 0; + virtual bool send_retransmit_count_attribute() const = 0; + // For debugging purposes. + virtual const std::string& content_name() const = 0; + + // Called when the Connection discovers a local peer reflexive candidate. + virtual void AddPrflxCandidate(const Candidate& local) = 0; protected: PortInterface(); + virtual void UpdateNetworkCost() = 0; + + // Returns DSCP value packets generated by the port itself should use. + virtual DiffServCodePoint StunDscpValue() const = 0; + + // If the given data comprises a complete and correct STUN message then the + // return value is true, otherwise false. If the message username corresponds + // with this port's username fragment, msg will contain the parsed STUN + // message. Otherwise, the function may send a STUN response internally. + // remote_username contains the remote fragment of the STUN username. + virtual bool GetStunMessage(const char* data, + size_t size, + const SocketAddress& addr, + std::unique_ptr* out_msg, + std::string* out_username) = 0; + + // This method will return local and remote username fragements from the + // stun username attribute if present. + virtual bool ParseStunUsername(const StunMessage* stun_msg, + std::string* local_username, + std::string* remote_username) const = 0; + virtual std::string CreateStunUsername( + absl::string_view remote_username) const = 0; + + virtual bool MaybeIceRoleConflict(const SocketAddress& addr, + IceMessage* stun_msg, + absl::string_view remote_ufrag) = 0; + + virtual int16_t network_cost() const = 0; + + // Connection and Port are entangled; functions exposed to Port only + // should not be public. + friend class Connection; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::PortInterface; +using ::webrtc::PROTO_LAST; +using ::webrtc::PROTO_SSLTCP; +using ::webrtc::PROTO_TCP; +using ::webrtc::PROTO_TLS; +using ::webrtc::PROTO_UDP; +using ::webrtc::ProtocolType; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_PORT_INTERFACE_H_ diff --git a/p2p/base/port_unittest.cc b/p2p/base/port_unittest.cc index b27afe2f39..900296a708 100644 --- a/p2p/base/port_unittest.cc +++ b/p2p/base/port_unittest.cc @@ -12,76 +12,93 @@ #include +#include #include #include #include #include +#include #include #include #include #include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/async_dns_resolver.h" #include "api/candidate.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/packet_socket_factory.h" +#include "api/rtc_error.h" +#include "api/test/rtc_error_matchers.h" #include "api/transport/stun.h" #include "api/units/time_delta.h" #include "p2p/base/basic_packet_socket_factory.h" +#include "p2p/base/connection.h" #include "p2p/base/p2p_constants.h" +#include "p2p/base/p2p_transport_channel_ice_field_trials.h" #include "p2p/base/port_allocator.h" #include "p2p/base/port_interface.h" #include "p2p/base/stun_port.h" -#include "p2p/base/stun_server.h" #include "p2p/base/tcp_port.h" -#include "p2p/base/test_stun_server.h" -#include "p2p/base/test_turn_server.h" #include "p2p/base/transport_description.h" #include "p2p/base/turn_port.h" -#include "p2p/base/turn_server.h" #include "p2p/client/relay_port_factory_interface.h" +#include "p2p/test/nat_server.h" +#include "p2p/test/nat_socket_factory.h" +#include "p2p/test/nat_types.h" +#include "p2p/test/stun_server.h" +#include "p2p/test/test_stun_server.h" +#include "p2p/test/test_turn_server.h" +#include "p2p/test/turn_server.h" #include "rtc_base/arraysize.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/buffer.h" #include "rtc_base/byte_buffer.h" #include "rtc_base/checks.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/dscp.h" #include "rtc_base/fake_clock.h" #include "rtc_base/gunit.h" -#include "rtc_base/helpers.h" #include "rtc_base/logging.h" -#include "rtc_base/nat_server.h" -#include "rtc_base/nat_socket_factory.h" -#include "rtc_base/nat_types.h" #include "rtc_base/net_helper.h" #include "rtc_base/network.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_constants.h" -#include "rtc_base/proxy_info.h" #include "rtc_base/socket.h" -#include "rtc_base/socket_adapters.h" #include "rtc_base/socket_address.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "rtc_base/virtual_socket_server.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" - -using rtc::AsyncListenSocket; -using rtc::AsyncPacketSocket; -using rtc::ByteBufferReader; -using rtc::ByteBufferWriter; -using rtc::NAT_ADDR_RESTRICTED; -using rtc::NAT_OPEN_CONE; -using rtc::NAT_PORT_RESTRICTED; -using rtc::NAT_SYMMETRIC; -using rtc::NATType; -using rtc::PacketSocketFactory; -using rtc::Socket; -using rtc::SocketAddress; - -namespace cricket { +#include "test/wait_until.h" + +using ::testing::Eq; +using ::testing::IsNull; +using ::testing::IsTrue; +using ::testing::NotNull; +using ::webrtc::AsyncListenSocket; +using webrtc::AsyncPacketSocket; +using webrtc::ByteBufferReader; +using webrtc::ByteBufferWriter; +using ::webrtc::CreateEnvironment; +using ::webrtc::Environment; +using ::webrtc::IceCandidateType; +using ::webrtc::NAT_ADDR_RESTRICTED; +using ::webrtc::NAT_OPEN_CONE; +using ::webrtc::NAT_PORT_RESTRICTED; +using ::webrtc::NAT_SYMMETRIC; +using ::webrtc::NATType; +using webrtc::PacketSocketFactory; +using ::webrtc::Socket; +using ::webrtc::SocketAddress; + +namespace webrtc { namespace { constexpr int kDefaultTimeout = 3000; @@ -90,10 +107,10 @@ constexpr int kMaxExpectedSimulatedRtt = 200; const SocketAddress kLocalAddr1("192.168.1.2", 0); const SocketAddress kLocalAddr2("192.168.1.3", 0); const SocketAddress kLinkLocalIPv6Addr("fe80::aabb:ccff:fedd:eeff", 0); -const SocketAddress kNatAddr1("77.77.77.77", rtc::NAT_SERVER_UDP_PORT); -const SocketAddress kNatAddr2("88.88.88.88", rtc::NAT_SERVER_UDP_PORT); -const SocketAddress kStunAddr("99.99.99.1", STUN_SERVER_PORT); -const SocketAddress kTurnUdpIntAddr("99.99.99.4", STUN_SERVER_PORT); +const SocketAddress kNatAddr1("77.77.77.77", webrtc::NAT_SERVER_UDP_PORT); +const SocketAddress kNatAddr2("88.88.88.88", webrtc::NAT_SERVER_UDP_PORT); +const SocketAddress kStunAddr("99.99.99.1", webrtc::STUN_SERVER_PORT); +const SocketAddress kTurnUdpIntAddr("99.99.99.4", webrtc::STUN_SERVER_PORT); const SocketAddress kTurnTcpIntAddr("99.99.99.4", 5010); const SocketAddress kTurnUdpExtAddr("99.99.99.5", 0); const RelayCredentials kRelayCredentials("test", "test"); @@ -108,7 +125,7 @@ constexpr int kTiebreaker1 = 11111; constexpr int kTiebreaker2 = 22222; constexpr int kTiebreakerDefault = 44444; -const char* data = "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"; +const char* kTestData = "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"; Candidate GetCandidate(Port* port) { RTC_DCHECK_GE(port->Candidates().size(), 1); @@ -138,32 +155,19 @@ bool WriteStunMessage(const StunMessage& msg, ByteBufferWriter* buf) { // Stub port class for testing STUN generation and processing. class TestPort : public Port { public: - TestPort(rtc::Thread* thread, - absl::string_view type, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, - uint16_t min_port, - uint16_t max_port, - absl::string_view username_fragment, - absl::string_view password, - const webrtc::FieldTrialsView* field_trials = nullptr) - : Port(thread, - type, - factory, - network, - min_port, - max_port, - username_fragment, - password, - field_trials) {} + TestPort(const PortParametersRef& args, uint16_t min_port, uint16_t max_port) + : Port(args, IceCandidateType::kHost, min_port, max_port) {} ~TestPort() {} // Expose GetStunMessage so that we can test it. - using cricket::Port::GetStunMessage; + using Port::GetStunMessage; // The last StunMessage that was sent on this Port. - // TODO(?): Make these const; requires changes to SendXXXXResponse. - rtc::BufferT* last_stun_buf() { return last_stun_buf_.get(); } + ArrayView last_stun_buf() { + if (!last_stun_buf_) + return ArrayView(); + return *last_stun_buf_; + } IceMessage* last_stun_msg() { return last_stun_msg_.get(); } int last_stun_error_code() { int code = 0; @@ -179,33 +183,33 @@ class TestPort : public Port { virtual void PrepareAddress() { // Act as if the socket was bound to the best IP on the network, to the // first port in the allowed range. - rtc::SocketAddress addr(Network()->GetBestIP(), min_port()); - AddAddress(addr, addr, rtc::SocketAddress(), "udp", "", "", Type(), + SocketAddress addr(Network()->GetBestIP(), min_port()); + AddAddress(addr, addr, SocketAddress(), "udp", "", "", type(), ICE_TYPE_PREFERENCE_HOST, 0, "", true); } - virtual bool SupportsProtocol(absl::string_view protocol) const { + virtual bool SupportsProtocol(absl::string_view /* protocol */) const { return true; } - virtual ProtocolType GetProtocol() const { return PROTO_UDP; } + virtual ProtocolType GetProtocol() const { return webrtc::PROTO_UDP; } // Exposed for testing candidate building. - void AddCandidateAddress(const rtc::SocketAddress& addr) { - AddAddress(addr, addr, rtc::SocketAddress(), "udp", "", "", Type(), + void AddCandidateAddress(const SocketAddress& addr) { + AddAddress(addr, addr, SocketAddress(), "udp", "", "", type(), type_preference_, 0, "", false); } - void AddCandidateAddress(const rtc::SocketAddress& addr, - const rtc::SocketAddress& base_address, - absl::string_view type, + void AddCandidateAddress(const SocketAddress& addr, + const SocketAddress& base_address, + IceCandidateType type, int type_preference, bool final) { - AddAddress(addr, base_address, rtc::SocketAddress(), "udp", "", "", type, + AddAddress(addr, base_address, SocketAddress(), "udp", "", "", type, type_preference, 0, "", final); } virtual Connection* CreateConnection(const Candidate& remote_candidate, - CandidateOrigin origin) { + CandidateOrigin /* origin */) { Connection* conn = new ProxyConnection(NewWeakPtr(), 0, remote_candidate); AddOrReplaceConnection(conn); // Set use-candidate attribute flag as this will add USE-CANDIDATE attribute @@ -215,12 +219,12 @@ class TestPort : public Port { } virtual int SendTo(const void* data, size_t size, - const rtc::SocketAddress& addr, - const rtc::PacketOptions& options, + const SocketAddress& /* addr */, + const AsyncSocketPacketOptions& /* options */, bool payload) { if (!payload) { auto msg = std::make_unique(); - auto buf = std::make_unique>( + auto buf = std::make_unique>( static_cast(data), size); ByteBufferReader read_buf(*buf); if (!msg->Read(&read_buf)) { @@ -231,8 +235,8 @@ class TestPort : public Port { } return static_cast(size); } - virtual int SetOption(rtc::Socket::Option opt, int value) { return 0; } - virtual int GetOption(rtc::Socket::Option opt, int* value) { return -1; } + virtual int SetOption(Socket::Option /* opt */, int /* value */) { return 0; } + virtual int GetOption(Socket::Option opt, int* value) { return -1; } virtual int GetError() { return 0; } void Reset() { last_stun_buf_.reset(); @@ -243,31 +247,47 @@ class TestPort : public Port { } private: - void OnSentPacket(rtc::AsyncPacketSocket* socket, - const rtc::SentPacket& sent_packet) { + void OnSentPacket(AsyncPacketSocket* socket, + const SentPacketInfo& sent_packet) { PortInterface::SignalSentPacket(sent_packet); } - std::unique_ptr> last_stun_buf_; + std::unique_ptr> last_stun_buf_; std::unique_ptr last_stun_msg_; int type_preference_ = 0; }; +bool GetStunMessageFromBufferWriter(TestPort* port, + ByteBufferWriter* buf, + const SocketAddress& addr, + std::unique_ptr* out_msg, + std::string* out_username) { + return port->GetStunMessage(reinterpret_cast(buf->Data()), + buf->Length(), addr, out_msg, out_username); +} + static void SendPingAndReceiveResponse(Connection* lconn, TestPort* lport, Connection* rconn, TestPort* rport, - rtc::ScopedFakeClock* clock, + ScopedFakeClock* clock, int64_t ms) { - lconn->Ping(rtc::TimeMillis()); - ASSERT_TRUE_WAIT(lport->last_stun_msg(), kDefaultTimeout); - ASSERT_TRUE(lport->last_stun_buf()); - rconn->OnReadPacket(lport->last_stun_buf()->data(), - lport->last_stun_buf()->size(), /* packet_time_us */ -1); - clock->AdvanceTime(webrtc::TimeDelta::Millis(ms)); - ASSERT_TRUE_WAIT(rport->last_stun_msg(), kDefaultTimeout); - ASSERT_TRUE(rport->last_stun_buf()); - lconn->OnReadPacket(rport->last_stun_buf()->data(), - rport->last_stun_buf()->size(), /* packet_time_us */ -1); + lconn->Ping(webrtc::TimeMillis()); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport->last_stun_msg(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + ASSERT_GT(lport->last_stun_buf().size(), 0u); + rconn->OnReadPacket( + ReceivedIpPacket(lport->last_stun_buf(), SocketAddress(), std::nullopt)); + + clock->AdvanceTime(TimeDelta::Millis(ms)); + ASSERT_THAT( + webrtc::WaitUntil([&] { return rport->last_stun_msg(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + ASSERT_GT(rport->last_stun_buf().size(), 0u); + lconn->OnReadPacket( + ReceivedIpPacket(rport->last_stun_buf(), SocketAddress(), std::nullopt)); } class TestChannel : public sigslot::has_slots<> { @@ -285,15 +305,16 @@ class TestChannel : public sigslot::has_slots<> { int complete_count() { return complete_count_; } Connection* conn() { return conn_; } const SocketAddress& remote_address() { return remote_address_; } - const std::string remote_fragment() { return remote_frag_; } + std::string remote_fragment() { return remote_frag_; } void Start() { port_->PrepareAddress(); } void CreateConnection(const Candidate& remote_candidate) { RTC_DCHECK(!conn_); conn_ = port_->CreateConnection(remote_candidate, Port::ORIGIN_MESSAGE); - IceMode remote_ice_mode = - (ice_mode_ == ICEMODE_FULL) ? ICEMODE_LITE : ICEMODE_FULL; - conn_->set_use_candidate_attr(remote_ice_mode == ICEMODE_FULL); + IceMode remote_ice_mode = (ice_mode_ == webrtc::ICEMODE_FULL) + ? webrtc::ICEMODE_LITE + : webrtc::ICEMODE_FULL; + conn_->set_use_candidate_attr(remote_ice_mode == webrtc::ICEMODE_FULL); conn_->SignalStateChange.connect(this, &TestChannel::OnConnectionStateChange); conn_->SignalDestroyed.connect(this, &TestChannel::OnDestroyed); @@ -334,7 +355,7 @@ class TestChannel : public sigslot::has_slots<> { void SetIceMode(IceMode ice_mode) { ice_mode_ = ice_mode; } int SendData(const char* data, size_t len) { - rtc::PacketOptions options; + AsyncSocketPacketOptions options; return conn_->Send(data, len, options); } @@ -348,11 +369,11 @@ class TestChannel : public sigslot::has_slots<> { if (!remote_address_.IsNil()) { ASSERT_EQ(remote_address_, addr); } - const cricket::StunUInt32Attribute* priority_attr = + const StunUInt32Attribute* priority_attr = msg->GetUInt32(STUN_ATTR_PRIORITY); - const cricket::StunByteStringAttribute* mi_attr = + const StunByteStringAttribute* mi_attr = msg->GetByteString(STUN_ATTR_MESSAGE_INTEGRITY); - const cricket::StunUInt32Attribute* fingerprint_attr = + const StunUInt32Attribute* fingerprint_attr = msg->GetUInt32(STUN_ATTR_FINGERPRINT); EXPECT_TRUE(priority_attr != NULL); EXPECT_TRUE(mi_attr != NULL); @@ -393,7 +414,7 @@ class TestChannel : public sigslot::has_slots<> { connection_ready_to_send_ = true; } - IceMode ice_mode_ = ICEMODE_FULL; + IceMode ice_mode_ = webrtc::ICEMODE_FULL; std::unique_ptr port_; int complete_count_ = 0; @@ -408,17 +429,17 @@ class TestChannel : public sigslot::has_slots<> { class PortTest : public ::testing::Test, public sigslot::has_slots<> { public: PortTest() - : ss_(new rtc::VirtualSocketServer()), + : ss_(new VirtualSocketServer()), main_(ss_.get()), socket_factory_(ss_.get()), nat_factory1_(ss_.get(), kNatAddr1, SocketAddress()), nat_factory2_(ss_.get(), kNatAddr2, SocketAddress()), nat_socket_factory1_(&nat_factory1_), nat_socket_factory2_(&nat_factory2_), - stun_server_(TestStunServer::Create(ss_.get(), kStunAddr)), + stun_server_(TestStunServer::Create(ss_.get(), kStunAddr, main_)), turn_server_(&main_, ss_.get(), kTurnUdpIntAddr, kTurnUdpExtAddr), - username_(rtc::CreateRandomString(ICE_UFRAG_LENGTH)), - password_(rtc::CreateRandomString(ICE_PWD_LENGTH)), + username_(webrtc::CreateRandomString(ICE_UFRAG_LENGTH)), + password_(webrtc::CreateRandomString(ICE_PWD_LENGTH)), role_conflict_(false), ports_destroyed_(0) {} @@ -426,7 +447,7 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { // Workaround for tests that trigger async destruction of objects that we // need to give an opportunity here to run, before proceeding with other // teardown. - rtc::Thread::Current()->ProcessMessages(0); + Thread::Current()->ProcessMessages(0); } protected: @@ -434,96 +455,98 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { void TestLocalToLocal() { auto port1 = CreateUdpPort(kLocalAddr1); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); auto port2 = CreateUdpPort(kLocalAddr2); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); TestConnectivity("udp", std::move(port1), "udp", std::move(port2), true, true, true, true); } void TestLocalToStun(NATType ntype) { auto port1 = CreateUdpPort(kLocalAddr1); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); nat_server2_ = CreateNatServer(kNatAddr2, ntype); auto port2 = CreateStunPort(kLocalAddr2, &nat_socket_factory2_); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); TestConnectivity("udp", std::move(port1), StunName(ntype), std::move(port2), - ntype == NAT_OPEN_CONE, true, ntype != NAT_SYMMETRIC, - true); + ntype == webrtc::NAT_OPEN_CONE, true, + ntype != webrtc::NAT_SYMMETRIC, true); } void TestLocalToRelay(ProtocolType proto) { auto port1 = CreateUdpPort(kLocalAddr1); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); - auto port2 = CreateRelayPort(kLocalAddr2, proto, PROTO_UDP); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); + auto port2 = CreateRelayPort(kLocalAddr2, proto, webrtc::PROTO_UDP); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); TestConnectivity("udp", std::move(port1), RelayName(proto), std::move(port2), false, true, true, true); } void TestStunToLocal(NATType ntype) { nat_server1_ = CreateNatServer(kNatAddr1, ntype); auto port1 = CreateStunPort(kLocalAddr1, &nat_socket_factory1_); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); auto port2 = CreateUdpPort(kLocalAddr2); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); TestConnectivity(StunName(ntype), std::move(port1), "udp", std::move(port2), - true, ntype != NAT_SYMMETRIC, true, true); + true, ntype != webrtc::NAT_SYMMETRIC, true, true); } void TestStunToStun(NATType ntype1, NATType ntype2) { nat_server1_ = CreateNatServer(kNatAddr1, ntype1); auto port1 = CreateStunPort(kLocalAddr1, &nat_socket_factory1_); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); nat_server2_ = CreateNatServer(kNatAddr2, ntype2); auto port2 = CreateStunPort(kLocalAddr2, &nat_socket_factory2_); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); TestConnectivity(StunName(ntype1), std::move(port1), StunName(ntype2), - std::move(port2), ntype2 == NAT_OPEN_CONE, - ntype1 != NAT_SYMMETRIC, ntype2 != NAT_SYMMETRIC, - ntype1 + ntype2 < (NAT_PORT_RESTRICTED + NAT_SYMMETRIC)); + std::move(port2), ntype2 == webrtc::NAT_OPEN_CONE, + ntype1 != webrtc::NAT_SYMMETRIC, + ntype2 != webrtc::NAT_SYMMETRIC, + ntype1 + ntype2 < + (webrtc::NAT_PORT_RESTRICTED + webrtc::NAT_SYMMETRIC)); } void TestStunToRelay(NATType ntype, ProtocolType proto) { nat_server1_ = CreateNatServer(kNatAddr1, ntype); auto port1 = CreateStunPort(kLocalAddr1, &nat_socket_factory1_); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); - auto port2 = CreateRelayPort(kLocalAddr2, proto, PROTO_UDP); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); + auto port2 = CreateRelayPort(kLocalAddr2, proto, webrtc::PROTO_UDP); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); TestConnectivity(StunName(ntype), std::move(port1), RelayName(proto), - std::move(port2), false, ntype != NAT_SYMMETRIC, true, - true); + std::move(port2), false, ntype != webrtc::NAT_SYMMETRIC, + true, true); } void TestTcpToTcp() { auto port1 = CreateTcpPort(kLocalAddr1); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); auto port2 = CreateTcpPort(kLocalAddr2); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); TestConnectivity("tcp", std::move(port1), "tcp", std::move(port2), true, false, true, true); } void TestTcpToRelay(ProtocolType proto) { auto port1 = CreateTcpPort(kLocalAddr1); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); - auto port2 = CreateRelayPort(kLocalAddr2, proto, PROTO_TCP); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); + auto port2 = CreateRelayPort(kLocalAddr2, proto, webrtc::PROTO_TCP); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); TestConnectivity("tcp", std::move(port1), RelayName(proto), std::move(port2), false, false, true, true); } void TestSslTcpToRelay(ProtocolType proto) { auto port1 = CreateTcpPort(kLocalAddr1); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); - auto port2 = CreateRelayPort(kLocalAddr2, proto, PROTO_SSLTCP); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); + auto port2 = CreateRelayPort(kLocalAddr2, proto, webrtc::PROTO_SSLTCP); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); TestConnectivity("ssltcp", std::move(port1), RelayName(proto), std::move(port2), false, false, true, true); } - rtc::Network* MakeNetwork(const SocketAddress& addr) { + Network* MakeNetwork(const SocketAddress& addr) { networks_.emplace_back("unittest", "unittest", addr.ipaddr(), 32); networks_.back().AddIP(addr.ipaddr()); return &networks_.back(); } - rtc::Network* MakeNetworkMultipleAddrs(const SocketAddress& global_addr, - const SocketAddress& link_local_addr) { + Network* MakeNetworkMultipleAddrs(const SocketAddress& global_addr, + const SocketAddress& link_local_addr) { networks_.emplace_back("unittest", "unittest", global_addr.ipaddr(), 32, - rtc::ADAPTER_TYPE_UNKNOWN); + webrtc::ADAPTER_TYPE_UNKNOWN); networks_.back().AddIP(link_local_addr.ipaddr()); networks_.back().AddIP(global_addr.ipaddr()); networks_.back().AddIP(link_local_addr.ipaddr()); @@ -536,9 +559,13 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { } std::unique_ptr CreateUdpPort(const SocketAddress& addr, PacketSocketFactory* socket_factory) { - auto port = UDPPort::Create(&main_, socket_factory, MakeNetwork(addr), 0, 0, - username_, password_, true, absl::nullopt, - &field_trials_); + auto port = UDPPort::Create({.env = env_, + .network_thread = &main_, + .socket_factory = socket_factory, + .network = MakeNetwork(addr), + .ice_username_fragment = username_, + .ice_password = password_}, + 0, 0, true, std::nullopt); port->SetIceTiebreaker(kTiebreakerDefault); return port; } @@ -548,9 +575,13 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { const SocketAddress& link_local_addr, PacketSocketFactory* socket_factory) { auto port = UDPPort::Create( - &main_, socket_factory, - MakeNetworkMultipleAddrs(global_addr, link_local_addr), 0, 0, username_, - password_, true, absl::nullopt, &field_trials_); + {.env = env_, + .network_thread = &main_, + .socket_factory = socket_factory, + .network = MakeNetworkMultipleAddrs(global_addr, link_local_addr), + .ice_username_fragment = username_, + .ice_password = password_}, + 0, 0, true, std::nullopt); port->SetIceTiebreaker(kTiebreakerDefault); return port; } @@ -559,18 +590,28 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { } std::unique_ptr CreateTcpPort(const SocketAddress& addr, PacketSocketFactory* socket_factory) { - auto port = TCPPort::Create(&main_, socket_factory, MakeNetwork(addr), 0, 0, - username_, password_, true, &field_trials_); + auto port = TCPPort::Create({.env = env_, + .network_thread = &main_, + .socket_factory = socket_factory, + .network = MakeNetwork(addr), + .ice_username_fragment = username_, + .ice_password = password_}, + 0, 0, true); port->SetIceTiebreaker(kTiebreakerDefault); return port; } - std::unique_ptr CreateStunPort(const SocketAddress& addr, - rtc::PacketSocketFactory* factory) { + std::unique_ptr CreateStunPort( + const SocketAddress& addr, + PacketSocketFactory* socket_factory) { ServerAddresses stun_servers; stun_servers.insert(kStunAddr); - auto port = StunPort::Create(&main_, factory, MakeNetwork(addr), 0, 0, - username_, password_, stun_servers, - absl::nullopt, &field_trials_); + auto port = StunPort::Create({.env = env_, + .network_thread = &main_, + .socket_factory = socket_factory, + .network = MakeNetwork(addr), + .ice_username_fragment = username_, + .ice_password = password_}, + 0, 0, stun_servers, std::nullopt); port->SetIceTiebreaker(kTiebreakerDefault); return port; } @@ -584,20 +625,19 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { ProtocolType int_proto, ProtocolType ext_proto) { SocketAddress server_addr = - int_proto == PROTO_TCP ? kTurnTcpIntAddr : kTurnUdpIntAddr; + int_proto == webrtc::PROTO_TCP ? kTurnTcpIntAddr : kTurnUdpIntAddr; return CreateTurnPort(addr, socket_factory, int_proto, ext_proto, server_addr); } - std::unique_ptr CreateTurnPort( - const SocketAddress& addr, - PacketSocketFactory* socket_factory, - ProtocolType int_proto, - ProtocolType ext_proto, - const rtc::SocketAddress& server_addr) { + std::unique_ptr CreateTurnPort(const SocketAddress& addr, + PacketSocketFactory* socket_factory, + ProtocolType int_proto, + ProtocolType ext_proto, + const SocketAddress& server_addr) { RelayServerConfig config; config.credentials = kRelayCredentials; ProtocolAddress server_address(server_addr, int_proto); - CreateRelayPortArgs args; + CreateRelayPortArgs args = {.env = env_}; args.network_thread = &main_; args.socket_factory = socket_factory; args.network = MakeNetwork(addr); @@ -605,27 +645,26 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { args.password = password_; args.server_address = &server_address; args.config = &config; - args.field_trials = &field_trials_; auto port = TurnPort::Create(args, 0, 0); port->SetIceTiebreaker(kTiebreakerDefault); return port; } - std::unique_ptr CreateNatServer(const SocketAddress& addr, - rtc::NATType type) { - return std::make_unique(type, ss_.get(), addr, addr, - ss_.get(), addr); + std::unique_ptr CreateNatServer(const SocketAddress& addr, + NATType type) { + return std::make_unique(type, main_, ss_.get(), addr, addr, + main_, ss_.get(), addr); } static const char* StunName(NATType type) { switch (type) { - case NAT_OPEN_CONE: + case webrtc::NAT_OPEN_CONE: return "stun(open cone)"; - case NAT_ADDR_RESTRICTED: + case webrtc::NAT_ADDR_RESTRICTED: return "stun(addr restricted)"; - case NAT_PORT_RESTRICTED: + case webrtc::NAT_PORT_RESTRICTED: return "stun(port restricted)"; - case NAT_SYMMETRIC: + case webrtc::NAT_SYMMETRIC: return "stun(symmetric)"; default: return "stun(?)"; @@ -633,13 +672,13 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { } static const char* RelayName(ProtocolType proto) { switch (proto) { - case PROTO_UDP: + case webrtc::PROTO_UDP: return "turn(udp)"; - case PROTO_TCP: + case webrtc::PROTO_TCP: return "turn(tcp)"; - case PROTO_SSLTCP: + case webrtc::PROTO_SSLTCP: return "turn(ssltcp)"; - case PROTO_TLS: + case webrtc::PROTO_TLS: return "turn(tls)"; default: return "turn(?)"; @@ -665,16 +704,21 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { // TCP reconnecting mechanism before entering this function. void ConnectStartedChannels(TestChannel* ch1, TestChannel* ch2) { ASSERT_TRUE(ch1->conn()); - EXPECT_TRUE_WAIT(ch1->conn()->connected(), - kDefaultTimeout); // for TCP connect + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch1->conn()->connected(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // for TCP connect ch1->Ping(); WAIT(!ch2->remote_address().IsNil(), kShortTimeout); // Send a ping from dst to src. ch2->AcceptConnection(GetCandidate(ch1->port())); ch2->Ping(); - EXPECT_EQ_WAIT(Connection::STATE_WRITABLE, ch2->conn()->write_state(), - kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch2->conn()->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); } // This connects and disconnects the provided channels in the same sequence as @@ -703,8 +747,14 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { tcp_conn2->socket()->GetLocalAddress())); // Wait for both OnClose are delivered. - EXPECT_TRUE_WAIT(!ch1->conn()->connected(), kDefaultTimeout); - EXPECT_TRUE_WAIT(!ch2->conn()->connected(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return !ch1->conn()->connected(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil([&] { return !ch2->conn()->connected(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Ensure redundant SignalClose events on TcpConnection won't break tcp // reconnection. Chromium will fire SignalClose for all outstanding IPC @@ -715,18 +765,21 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { // Speed up destroying ch2's connection such that the test is ready to // accept a new connection from ch1 before ch1's connection destroys itself. ch2->Stop(); - EXPECT_TRUE_WAIT(ch2->conn() == NULL, kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch2->conn(); }, IsNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); } void TestTcpReconnect(bool ping_after_disconnected, bool send_after_disconnected) { auto port1 = CreateTcpPort(kLocalAddr1); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); auto port2 = CreateTcpPort(kLocalAddr2); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); - port1->set_component(cricket::ICE_CANDIDATE_COMPONENT_DEFAULT); - port2->set_component(cricket::ICE_CANDIDATE_COMPONENT_DEFAULT); + port1->set_component(ICE_CANDIDATE_COMPONENT_DEFAULT); + port2->set_component(ICE_CANDIDATE_COMPONENT_DEFAULT); // Set up channels and ensure both ports will be deleted. TestChannel ch1(std::move(port1)); @@ -736,8 +789,14 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { ch1.Start(); ch2.Start(); - ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout); - ASSERT_EQ_WAIT(1, ch2.complete_count(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch2.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Initial connecting the channel, create connection on channel1. ch1.CreateConnection(GetCandidate(ch2.port())); @@ -760,7 +819,8 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { if (send_after_disconnected) { // First SendData after disconnect should fail but will trigger // reconnect. - EXPECT_EQ(-1, ch1.SendData(data, static_cast(strlen(data)))); + EXPECT_EQ(-1, + ch1.SendData(kTestData, static_cast(strlen(kTestData)))); } if (ping_after_disconnected) { @@ -769,11 +829,17 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { } // Wait for channel's outgoing TCPConnection connected. - EXPECT_TRUE_WAIT(ch1.conn()->connected(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch1.conn()->connected(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Verify that we could still connect channels. ConnectStartedChannels(&ch1, &ch2); - EXPECT_TRUE_WAIT(ch1.connection_ready_to_send(), kTcpReconnectTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return ch1.connection_ready_to_send(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kTcpReconnectTimeout)}), + webrtc::IsRtcOk()); // Channel2 is the passive one so a new connection is created during // reconnect. This new connection should never have issued ENOTCONN // hence the connection_ready_to_send() should be false. @@ -782,15 +848,25 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_EQ(ch1.conn()->write_state(), Connection::STATE_WRITABLE); // Since the reconnection never happens, the connections should have been // destroyed after the timeout. - EXPECT_TRUE_WAIT(!ch1.conn(), kTcpReconnectTimeout + kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return !ch1.conn(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kTcpReconnectTimeout + + kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_TRUE(!ch2.conn()); } // Tear down and ensure that goes smoothly. ch1.Stop(); ch2.Stop(); - EXPECT_TRUE_WAIT(ch1.conn() == NULL, kDefaultTimeout); - EXPECT_TRUE_WAIT(ch2.conn() == NULL, kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch1.conn(); }, IsNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch2.conn(); }, IsNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); } std::unique_ptr CreateStunMessage(StunMessageType type) { @@ -806,32 +882,41 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { return msg; } std::unique_ptr CreateTestPort( - const rtc::SocketAddress& addr, + const SocketAddress& addr, absl::string_view username, absl::string_view password, - const webrtc::FieldTrialsView* field_trials = nullptr) { - auto port = std::make_unique(&main_, "test", &socket_factory_, - MakeNetwork(addr), 0, 0, username, - password, field_trials); + const FieldTrialsView* field_trials = nullptr) { + Port::PortParametersRef args = {.env = CreateEnvironment(field_trials), + .network_thread = &main_, + .socket_factory = &socket_factory_, + .network = MakeNetwork(addr), + .ice_username_fragment = username, + .ice_password = password}; + auto port = std::make_unique(args, 0, 0); port->SignalRoleConflict.connect(this, &PortTest::OnRoleConflict); return port; } - std::unique_ptr CreateTestPort(const rtc::SocketAddress& addr, + std::unique_ptr CreateTestPort(const SocketAddress& addr, absl::string_view username, absl::string_view password, - cricket::IceRole role, + IceRole role, int tiebreaker) { auto port = CreateTestPort(addr, username, password); port->SetIceRole(role); port->SetIceTiebreaker(tiebreaker); return port; } - // Overload to create a test port given an rtc::Network directly. - std::unique_ptr CreateTestPort(const rtc::Network* network, + // Overload to create a test port given an webrtc::Network directly. + std::unique_ptr CreateTestPort(const Network* network, absl::string_view username, absl::string_view password) { - auto port = std::make_unique(&main_, "test", &socket_factory_, - network, 0, 0, username, password); + Port::PortParametersRef args = {.env = env_, + .network_thread = &main_, + .socket_factory = &socket_factory_, + .network = network, + .ice_username_fragment = username, + .ice_password = password}; + auto port = std::make_unique(args, 0, 0); port->SignalRoleConflict.connect(this, &PortTest::OnRoleConflict); return port; } @@ -847,33 +932,33 @@ class PortTest : public ::testing::Test, public sigslot::has_slots<> { void OnDestroyed(PortInterface* port) { ++ports_destroyed_; } int ports_destroyed() const { return ports_destroyed_; } - rtc::BasicPacketSocketFactory* nat_socket_factory1() { + BasicPacketSocketFactory* nat_socket_factory1() { return &nat_socket_factory1_; } - rtc::VirtualSocketServer* vss() { return ss_.get(); } + VirtualSocketServer* vss() { return ss_.get(); } private: + const Environment env_ = CreateEnvironment(); // When a "create port" helper method is called with an IP, we create a // Network with that IP and add it to this list. Using a list instead of a // vector so that when it grows, pointers aren't invalidated. - std::list networks_; - std::unique_ptr ss_; - rtc::AutoSocketServerThread main_; - rtc::BasicPacketSocketFactory socket_factory_; - std::unique_ptr nat_server1_; - std::unique_ptr nat_server2_; - rtc::NATSocketFactory nat_factory1_; - rtc::NATSocketFactory nat_factory2_; - rtc::BasicPacketSocketFactory nat_socket_factory1_; - rtc::BasicPacketSocketFactory nat_socket_factory2_; - std::unique_ptr stun_server_; + std::list networks_; + std::unique_ptr ss_; + AutoSocketServerThread main_; + BasicPacketSocketFactory socket_factory_; + std::unique_ptr nat_server1_; + std::unique_ptr nat_server2_; + NATSocketFactory nat_factory1_; + NATSocketFactory nat_factory2_; + BasicPacketSocketFactory nat_socket_factory1_; + BasicPacketSocketFactory nat_socket_factory2_; + TestStunServer::StunServerPtr stun_server_; TestTurnServer turn_server_; std::string username_; std::string password_; bool role_conflict_; int ports_destroyed_; - webrtc::test::ScopedKeyValueConfig field_trials_; }; void PortTest::TestConnectivity(absl::string_view name1, @@ -884,10 +969,10 @@ void PortTest::TestConnectivity(absl::string_view name1, bool same_addr1, bool same_addr2, bool possible) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; RTC_LOG(LS_INFO) << "Test: " << name1 << " to " << name2 << ": "; - port1->set_component(cricket::ICE_CANDIDATE_COMPONENT_DEFAULT); - port2->set_component(cricket::ICE_CANDIDATE_COMPONENT_DEFAULT); + port1->set_component(ICE_CANDIDATE_COMPONENT_DEFAULT); + port2->set_component(ICE_CANDIDATE_COMPONENT_DEFAULT); // Set up channels and ensure both ports will be deleted. TestChannel ch1(std::move(port1)); @@ -898,14 +983,23 @@ void PortTest::TestConnectivity(absl::string_view name1, // Acquire addresses. ch1.Start(); ch2.Start(); - ASSERT_EQ_SIMULATED_WAIT(1, ch1.complete_count(), kDefaultTimeout, clock); - ASSERT_EQ_SIMULATED_WAIT(1, ch2.complete_count(), kDefaultTimeout, clock); + ASSERT_THAT(webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); + ASSERT_THAT(webrtc::WaitUntil([&] { return ch2.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); // Send a ping from src to dst. This may or may not make it. ch1.CreateConnection(GetCandidate(ch2.port())); ASSERT_TRUE(ch1.conn() != NULL); - EXPECT_TRUE_SIMULATED_WAIT(ch1.conn()->connected(), kDefaultTimeout, - clock); // for TCP connect + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch1.conn()->connected(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // for TCP connect ch1.Ping(); SIMULATED_WAIT(!ch2.remote_address().IsNil(), kShortTimeout, clock); @@ -925,8 +1019,12 @@ void PortTest::TestConnectivity(absl::string_view name1, ch2.AcceptConnection(GetCandidate(ch1.port())); ASSERT_TRUE(ch2.conn() != NULL); ch2.Ping(); - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, - ch2.conn()->write_state(), kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch2.conn()->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); } else { // We can't send a ping from src to dst, so flip it around. This will happen // when the destination NAT is addr/port restricted or symmetric. @@ -949,9 +1047,12 @@ void PortTest::TestConnectivity(absl::string_view name1, // through. So we will have to do another. if (ch1.conn()->write_state() == Connection::STATE_WRITE_INIT) { ch1.Ping(); - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, - ch1.conn()->write_state(), kDefaultTimeout, - clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch1.conn()->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); } } else if (!same_addr1 && possible) { // The new ping went to the candidate address, but that address was bad. @@ -962,8 +1063,11 @@ void PortTest::TestConnectivity(absl::string_view name1, // However, since we have now sent a ping to the source IP, we should be // able to get a ping from it. This gives us the real source address. ch1.Ping(); - EXPECT_TRUE_SIMULATED_WAIT(!ch2.remote_address().IsNil(), kDefaultTimeout, - clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return !ch2.remote_address().IsNil(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); EXPECT_FALSE(ch2.conn()->receiving()); EXPECT_TRUE(ch1.remote_address().IsNil()); @@ -971,9 +1075,12 @@ void PortTest::TestConnectivity(absl::string_view name1, ch2.AcceptConnection(GetCandidate(ch1.port())); ASSERT_TRUE(ch2.conn() != NULL); ch2.Ping(); - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, - ch2.conn()->write_state(), kDefaultTimeout, - clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch2.conn()->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); } else if (!same_addr2 && possible) { // The new ping came in, but from an unexpected address. This will happen // when the destination NAT is symmetric. @@ -983,9 +1090,12 @@ void PortTest::TestConnectivity(absl::string_view name1, // Update our address and complete the connection. ch1.AcceptConnection(GetCandidate(ch2.port())); ch1.Ping(); - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, - ch1.conn()->write_state(), kDefaultTimeout, - clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch1.conn()->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); } else { // (!possible) // There should be s no way for the pings to reach each other. Check it. EXPECT_TRUE(ch1.remote_address().IsNil()); @@ -1015,11 +1125,17 @@ void PortTest::TestConnectivity(absl::string_view name1, // Tear down and ensure that goes smoothly. ch1.Stop(); ch2.Stop(); - EXPECT_TRUE_SIMULATED_WAIT(ch1.conn() == NULL, kDefaultTimeout, clock); - EXPECT_TRUE_SIMULATED_WAIT(ch2.conn() == NULL, kDefaultTimeout, clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return ch1.conn(); }, IsNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); + EXPECT_THAT(webrtc::WaitUntil([&] { return ch2.conn(); }, IsNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); } -class FakePacketSocketFactory : public rtc::PacketSocketFactory { +class FakePacketSocketFactory : public PacketSocketFactory { public: FakePacketSocketFactory() : next_udp_socket_(NULL), next_server_tcp_socket_(NULL) {} @@ -1047,9 +1163,7 @@ class FakePacketSocketFactory : public rtc::PacketSocketFactory { AsyncPacketSocket* CreateClientTcpSocket( const SocketAddress& local_address, const SocketAddress& remote_address, - const rtc::ProxyInfo& proxy_info, - const std::string& user_agent, - const rtc::PacketSocketTcpOptions& opts) override { + const PacketSocketTcpOptions& opts) override { EXPECT_TRUE(next_client_tcp_socket_.has_value()); AsyncPacketSocket* result = *next_client_tcp_socket_; next_client_tcp_socket_ = nullptr; @@ -1065,15 +1179,14 @@ class FakePacketSocketFactory : public rtc::PacketSocketFactory { void set_next_client_tcp_socket(AsyncPacketSocket* next_client_tcp_socket) { next_client_tcp_socket_ = next_client_tcp_socket; } - std::unique_ptr CreateAsyncDnsResolver() - override { + std::unique_ptr CreateAsyncDnsResolver() override { return nullptr; } private: AsyncPacketSocket* next_udp_socket_; AsyncListenSocket* next_server_tcp_socket_; - absl::optional next_client_tcp_socket_; + std::optional next_client_tcp_socket_; }; class FakeAsyncPacketSocket : public AsyncPacketSocket { @@ -1088,7 +1201,7 @@ class FakeAsyncPacketSocket : public AsyncPacketSocket { // Send a packet. virtual int Send(const void* pv, size_t cb, - const rtc::PacketOptions& options) { + const AsyncSocketPacketOptions& options) { if (error_ == 0) { return static_cast(cb); } else { @@ -1098,7 +1211,7 @@ class FakeAsyncPacketSocket : public AsyncPacketSocket { virtual int SendTo(const void* pv, size_t cb, const SocketAddress& addr, - const rtc::PacketOptions& options) { + const AsyncSocketPacketOptions& options) { if (error_ == 0) { return static_cast(cb); } else { @@ -1147,127 +1260,127 @@ TEST_F(PortTest, TestLocalToLocal) { } TEST_F(PortTest, TestLocalToConeNat) { - TestLocalToStun(NAT_OPEN_CONE); + TestLocalToStun(webrtc::NAT_OPEN_CONE); } TEST_F(PortTest, TestLocalToARNat) { - TestLocalToStun(NAT_ADDR_RESTRICTED); + TestLocalToStun(webrtc::NAT_ADDR_RESTRICTED); } TEST_F(PortTest, TestLocalToPRNat) { - TestLocalToStun(NAT_PORT_RESTRICTED); + TestLocalToStun(webrtc::NAT_PORT_RESTRICTED); } TEST_F(PortTest, TestLocalToSymNat) { - TestLocalToStun(NAT_SYMMETRIC); + TestLocalToStun(webrtc::NAT_SYMMETRIC); } // Flaky: https://code.google.com/p/webrtc/issues/detail?id=3316. TEST_F(PortTest, DISABLED_TestLocalToTurn) { - TestLocalToRelay(PROTO_UDP); + TestLocalToRelay(webrtc::PROTO_UDP); } // Cone NAT -> XXXX TEST_F(PortTest, TestConeNatToLocal) { - TestStunToLocal(NAT_OPEN_CONE); + TestStunToLocal(webrtc::NAT_OPEN_CONE); } TEST_F(PortTest, TestConeNatToConeNat) { - TestStunToStun(NAT_OPEN_CONE, NAT_OPEN_CONE); + TestStunToStun(webrtc::NAT_OPEN_CONE, webrtc::NAT_OPEN_CONE); } TEST_F(PortTest, TestConeNatToARNat) { - TestStunToStun(NAT_OPEN_CONE, NAT_ADDR_RESTRICTED); + TestStunToStun(webrtc::NAT_OPEN_CONE, webrtc::NAT_ADDR_RESTRICTED); } TEST_F(PortTest, TestConeNatToPRNat) { - TestStunToStun(NAT_OPEN_CONE, NAT_PORT_RESTRICTED); + TestStunToStun(webrtc::NAT_OPEN_CONE, webrtc::NAT_PORT_RESTRICTED); } TEST_F(PortTest, TestConeNatToSymNat) { - TestStunToStun(NAT_OPEN_CONE, NAT_SYMMETRIC); + TestStunToStun(webrtc::NAT_OPEN_CONE, webrtc::NAT_SYMMETRIC); } TEST_F(PortTest, TestConeNatToTurn) { - TestStunToRelay(NAT_OPEN_CONE, PROTO_UDP); + TestStunToRelay(webrtc::NAT_OPEN_CONE, webrtc::PROTO_UDP); } // Address-restricted NAT -> XXXX TEST_F(PortTest, TestARNatToLocal) { - TestStunToLocal(NAT_ADDR_RESTRICTED); + TestStunToLocal(webrtc::NAT_ADDR_RESTRICTED); } TEST_F(PortTest, TestARNatToConeNat) { - TestStunToStun(NAT_ADDR_RESTRICTED, NAT_OPEN_CONE); + TestStunToStun(webrtc::NAT_ADDR_RESTRICTED, webrtc::NAT_OPEN_CONE); } TEST_F(PortTest, TestARNatToARNat) { - TestStunToStun(NAT_ADDR_RESTRICTED, NAT_ADDR_RESTRICTED); + TestStunToStun(webrtc::NAT_ADDR_RESTRICTED, webrtc::NAT_ADDR_RESTRICTED); } TEST_F(PortTest, TestARNatToPRNat) { - TestStunToStun(NAT_ADDR_RESTRICTED, NAT_PORT_RESTRICTED); + TestStunToStun(webrtc::NAT_ADDR_RESTRICTED, webrtc::NAT_PORT_RESTRICTED); } TEST_F(PortTest, TestARNatToSymNat) { - TestStunToStun(NAT_ADDR_RESTRICTED, NAT_SYMMETRIC); + TestStunToStun(webrtc::NAT_ADDR_RESTRICTED, webrtc::NAT_SYMMETRIC); } TEST_F(PortTest, TestARNatToTurn) { - TestStunToRelay(NAT_ADDR_RESTRICTED, PROTO_UDP); + TestStunToRelay(webrtc::NAT_ADDR_RESTRICTED, webrtc::PROTO_UDP); } // Port-restricted NAT -> XXXX TEST_F(PortTest, TestPRNatToLocal) { - TestStunToLocal(NAT_PORT_RESTRICTED); + TestStunToLocal(webrtc::NAT_PORT_RESTRICTED); } TEST_F(PortTest, TestPRNatToConeNat) { - TestStunToStun(NAT_PORT_RESTRICTED, NAT_OPEN_CONE); + TestStunToStun(webrtc::NAT_PORT_RESTRICTED, webrtc::NAT_OPEN_CONE); } TEST_F(PortTest, TestPRNatToARNat) { - TestStunToStun(NAT_PORT_RESTRICTED, NAT_ADDR_RESTRICTED); + TestStunToStun(webrtc::NAT_PORT_RESTRICTED, webrtc::NAT_ADDR_RESTRICTED); } TEST_F(PortTest, TestPRNatToPRNat) { - TestStunToStun(NAT_PORT_RESTRICTED, NAT_PORT_RESTRICTED); + TestStunToStun(webrtc::NAT_PORT_RESTRICTED, webrtc::NAT_PORT_RESTRICTED); } TEST_F(PortTest, TestPRNatToSymNat) { // Will "fail" - TestStunToStun(NAT_PORT_RESTRICTED, NAT_SYMMETRIC); + TestStunToStun(webrtc::NAT_PORT_RESTRICTED, webrtc::NAT_SYMMETRIC); } TEST_F(PortTest, TestPRNatToTurn) { - TestStunToRelay(NAT_PORT_RESTRICTED, PROTO_UDP); + TestStunToRelay(webrtc::NAT_PORT_RESTRICTED, webrtc::PROTO_UDP); } // Symmetric NAT -> XXXX TEST_F(PortTest, TestSymNatToLocal) { - TestStunToLocal(NAT_SYMMETRIC); + TestStunToLocal(webrtc::NAT_SYMMETRIC); } TEST_F(PortTest, TestSymNatToConeNat) { - TestStunToStun(NAT_SYMMETRIC, NAT_OPEN_CONE); + TestStunToStun(webrtc::NAT_SYMMETRIC, webrtc::NAT_OPEN_CONE); } TEST_F(PortTest, TestSymNatToARNat) { - TestStunToStun(NAT_SYMMETRIC, NAT_ADDR_RESTRICTED); + TestStunToStun(webrtc::NAT_SYMMETRIC, webrtc::NAT_ADDR_RESTRICTED); } TEST_F(PortTest, TestSymNatToPRNat) { // Will "fail" - TestStunToStun(NAT_SYMMETRIC, NAT_PORT_RESTRICTED); + TestStunToStun(webrtc::NAT_SYMMETRIC, webrtc::NAT_PORT_RESTRICTED); } TEST_F(PortTest, TestSymNatToSymNat) { // Will "fail" - TestStunToStun(NAT_SYMMETRIC, NAT_SYMMETRIC); + TestStunToStun(webrtc::NAT_SYMMETRIC, webrtc::NAT_SYMMETRIC); } TEST_F(PortTest, TestSymNatToTurn) { - TestStunToRelay(NAT_SYMMETRIC, PROTO_UDP); + TestStunToRelay(webrtc::NAT_SYMMETRIC, webrtc::PROTO_UDP); } // Outbound TCP -> XXXX @@ -1291,17 +1404,20 @@ TEST_F(PortTest, TestTcpReconnectTimeout) { // destroy the connection. TEST_F(PortTest, TestTcpNeverConnect) { auto port1 = CreateTcpPort(kLocalAddr1); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); - port1->set_component(cricket::ICE_CANDIDATE_COMPONENT_DEFAULT); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); + port1->set_component(ICE_CANDIDATE_COMPONENT_DEFAULT); // Set up a channel and ensure the port will be deleted. TestChannel ch1(std::move(port1)); EXPECT_EQ(0, ch1.complete_count()); ch1.Start(); - ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); - std::unique_ptr server( + std::unique_ptr server( vss()->CreateSocket(kLocalAddr2.family(), SOCK_STREAM)); // Bind but not listen. EXPECT_EQ(0, server->Bind(kLocalAddr2)); @@ -1311,7 +1427,10 @@ TEST_F(PortTest, TestTcpNeverConnect) { ch1.CreateConnection(c); EXPECT_TRUE(ch1.conn()); - EXPECT_TRUE_WAIT(!ch1.conn(), kDefaultTimeout); // for TCP connect + EXPECT_THAT( + webrtc::WaitUntil([&] { return !ch1.conn(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // for TCP connect } /* TODO(?): Enable these once testrelayserver can accept external TCP. @@ -1346,43 +1465,55 @@ TEST_F(PortTest, TestConnectionDead) { // Acquire address. ch1.Start(); ch2.Start(); - ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout); - ASSERT_EQ_WAIT(1, ch2.complete_count(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch2.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Test case that the connection has never received anything. - int64_t before_created = rtc::TimeMillis(); + int64_t before_created = webrtc::TimeMillis(); ch1.CreateConnection(GetCandidate(ch2.port())); - int64_t after_created = rtc::TimeMillis(); + int64_t after_created = webrtc::TimeMillis(); Connection* conn = ch1.conn(); ASSERT_NE(conn, nullptr); // It is not dead if it is after MIN_CONNECTION_LIFETIME but not pruned. conn->UpdateState(after_created + MIN_CONNECTION_LIFETIME + 1); - rtc::Thread::Current()->ProcessMessages(0); + Thread::Current()->ProcessMessages(0); EXPECT_TRUE(ch1.conn() != nullptr); // It is not dead if it is before MIN_CONNECTION_LIFETIME and pruned. conn->UpdateState(before_created + MIN_CONNECTION_LIFETIME - 1); conn->Prune(); - rtc::Thread::Current()->ProcessMessages(0); + Thread::Current()->ProcessMessages(0); EXPECT_TRUE(ch1.conn() != nullptr); // It will be dead after MIN_CONNECTION_LIFETIME and pruned. conn->UpdateState(after_created + MIN_CONNECTION_LIFETIME + 1); - EXPECT_TRUE_WAIT(ch1.conn() == nullptr, kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch1.conn(); }, Eq(nullptr), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Test case that the connection has received something. // Create a connection again and receive a ping. ch1.CreateConnection(GetCandidate(ch2.port())); conn = ch1.conn(); ASSERT_NE(conn, nullptr); - int64_t before_last_receiving = rtc::TimeMillis(); + int64_t before_last_receiving = webrtc::TimeMillis(); conn->ReceivedPing(); - int64_t after_last_receiving = rtc::TimeMillis(); + int64_t after_last_receiving = webrtc::TimeMillis(); // The connection will be dead after DEAD_CONNECTION_RECEIVE_TIMEOUT conn->UpdateState(before_last_receiving + DEAD_CONNECTION_RECEIVE_TIMEOUT - 1); - rtc::Thread::Current()->ProcessMessages(100); + Thread::Current()->ProcessMessages(100); EXPECT_TRUE(ch1.conn() != nullptr); conn->UpdateState(after_last_receiving + DEAD_CONNECTION_RECEIVE_TIMEOUT + 1); - EXPECT_TRUE_WAIT(ch1.conn() == nullptr, kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch1.conn(); }, Eq(nullptr), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); } TEST_F(PortTest, TestConnectionDeadWithDeadConnectionTimeout) { @@ -1391,8 +1522,14 @@ TEST_F(PortTest, TestConnectionDeadWithDeadConnectionTimeout) { // Acquire address. ch1.Start(); ch2.Start(); - ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout); - ASSERT_EQ_WAIT(1, ch2.complete_count(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch2.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Note: set field trials manually since they are parsed by // P2PTransportChannel but P2PTransportChannel is not used in this test. @@ -1405,23 +1542,26 @@ TEST_F(PortTest, TestConnectionDeadWithDeadConnectionTimeout) { conn->SetIceFieldTrials(&field_trials); ASSERT_NE(conn, nullptr); - int64_t before_last_receiving = rtc::TimeMillis(); + int64_t before_last_receiving = webrtc::TimeMillis(); conn->ReceivedPing(); - int64_t after_last_receiving = rtc::TimeMillis(); + int64_t after_last_receiving = webrtc::TimeMillis(); // The connection will be dead after 90s conn->UpdateState(before_last_receiving + 90000 - 1); - rtc::Thread::Current()->ProcessMessages(100); + Thread::Current()->ProcessMessages(100); EXPECT_TRUE(ch1.conn() != nullptr); conn->UpdateState(after_last_receiving + 90000 + 1); - EXPECT_TRUE_WAIT(ch1.conn() == nullptr, kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch1.conn(); }, Eq(nullptr), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); } TEST_F(PortTest, TestConnectionDeadOutstandingPing) { auto port1 = CreateUdpPort(kLocalAddr1); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); port1->SetIceTiebreaker(kTiebreaker1); auto port2 = CreateUdpPort(kLocalAddr2); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); port2->SetIceTiebreaker(kTiebreaker2); TestChannel ch1(std::move(port1)); @@ -1429,8 +1569,14 @@ TEST_F(PortTest, TestConnectionDeadOutstandingPing) { // Acquire address. ch1.Start(); ch2.Start(); - ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout); - ASSERT_EQ_WAIT(1, ch2.complete_count(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch2.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Note: set field trials manually since they are parsed by // P2PTransportChannel but P2PTransportChannel is not used in this test. @@ -1445,15 +1591,18 @@ TEST_F(PortTest, TestConnectionDeadOutstandingPing) { ASSERT_NE(conn, nullptr); conn->ReceivedPing(); - int64_t send_ping_timestamp = rtc::TimeMillis(); + int64_t send_ping_timestamp = webrtc::TimeMillis(); conn->Ping(send_ping_timestamp); // The connection will be dead 30s after the ping was sent. conn->UpdateState(send_ping_timestamp + DEAD_CONNECTION_RECEIVE_TIMEOUT - 1); - rtc::Thread::Current()->ProcessMessages(100); + Thread::Current()->ProcessMessages(100); EXPECT_TRUE(ch1.conn() != nullptr); conn->UpdateState(send_ping_timestamp + DEAD_CONNECTION_RECEIVE_TIMEOUT + 1); - EXPECT_TRUE_WAIT(ch1.conn() == nullptr, kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch1.conn(); }, Eq(nullptr), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); } // This test case verifies standard ICE features in STUN messages. Currently it @@ -1461,10 +1610,10 @@ TEST_F(PortTest, TestConnectionDeadOutstandingPing) { // binding request will have colon (":") between remote and local username. TEST_F(PortTest, TestLocalToLocalStandard) { auto port1 = CreateUdpPort(kLocalAddr1); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); port1->SetIceTiebreaker(kTiebreaker1); auto port2 = CreateUdpPort(kLocalAddr2); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); port2->SetIceTiebreaker(kTiebreaker2); // Same parameters as TestLocalToLocal above. TestConnectivity("udp", std::move(port1), "udp", std::move(port2), true, true, @@ -1477,7 +1626,7 @@ TEST_F(PortTest, TestLocalToLocalStandard) { // must be in controlling. TEST_F(PortTest, TestLoopbackCall) { auto lport = CreateTestPort(kLocalAddr1, "lfrag", "lpass"); - lport->SetIceRole(cricket::ICEROLE_CONTROLLING); + lport->SetIceRole(webrtc::ICEROLE_CONTROLLING); lport->SetIceTiebreaker(kTiebreaker1); lport->PrepareAddress(); ASSERT_FALSE(lport->Candidates().empty()); @@ -1485,12 +1634,18 @@ TEST_F(PortTest, TestLoopbackCall) { lport->CreateConnection(lport->Candidates()[0], Port::ORIGIN_MESSAGE); conn->Ping(0); - ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); IceMessage* msg = lport->last_stun_msg(); EXPECT_EQ(STUN_BINDING_REQUEST, msg->type()); - conn->OnReadPacket(lport->last_stun_buf()->data(), - lport->last_stun_buf()->size(), /* packet_time_us */ -1); - ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, kDefaultTimeout); + conn->OnReadPacket( + ReceivedIpPacket(lport->last_stun_buf(), SocketAddress(), std::nullopt)); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); msg = lport->last_stun_msg(); EXPECT_EQ(STUN_BINDING_RESPONSE, msg->type()); @@ -1503,7 +1658,10 @@ TEST_F(PortTest, TestLoopbackCall) { lport->CreateConnection(lport->Candidates()[1], Port::ORIGIN_MESSAGE); conn1->Ping(0); - ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); msg = lport->last_stun_msg(); EXPECT_EQ(STUN_BINDING_REQUEST, msg->type()); std::unique_ptr modified_req( @@ -1522,8 +1680,13 @@ TEST_F(PortTest, TestLoopbackCall) { lport->Reset(); auto buf = std::make_unique(); WriteStunMessage(*modified_req, buf.get()); - conn1->OnReadPacket(buf->Data(), buf->Length(), /* packet_time_us */ -1); - ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, kDefaultTimeout); + conn1->OnReadPacket(ReceivedIpPacket::CreateFromLegacy( + reinterpret_cast(buf->Data()), buf->Length(), + /*packet_time_us=*/-1)); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); msg = lport->last_stun_msg(); EXPECT_EQ(STUN_BINDING_ERROR_RESPONSE, msg->type()); } @@ -1535,10 +1698,10 @@ TEST_F(PortTest, TestLoopbackCall) { // send role conflict signal. TEST_F(PortTest, TestIceRoleConflict) { auto lport = CreateTestPort(kLocalAddr1, "lfrag", "lpass"); - lport->SetIceRole(cricket::ICEROLE_CONTROLLING); + lport->SetIceRole(webrtc::ICEROLE_CONTROLLING); lport->SetIceTiebreaker(kTiebreaker1); auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass"); - rport->SetIceRole(cricket::ICEROLE_CONTROLLING); + rport->SetIceRole(webrtc::ICEROLE_CONTROLLING); rport->SetIceTiebreaker(kTiebreaker2); lport->PrepareAddress(); @@ -1551,29 +1714,35 @@ TEST_F(PortTest, TestIceRoleConflict) { rport->CreateConnection(lport->Candidates()[0], Port::ORIGIN_MESSAGE); rconn->Ping(0); - ASSERT_TRUE_WAIT(rport->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return rport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); IceMessage* msg = rport->last_stun_msg(); EXPECT_EQ(STUN_BINDING_REQUEST, msg->type()); // Send rport binding request to lport. - lconn->OnReadPacket(rport->last_stun_buf()->data(), - rport->last_stun_buf()->size(), /* packet_time_us */ -1); + lconn->OnReadPacket( + ReceivedIpPacket(rport->last_stun_buf(), SocketAddress(), std::nullopt)); - ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_EQ(STUN_BINDING_RESPONSE, lport->last_stun_msg()->type()); EXPECT_TRUE(role_conflict()); } TEST_F(PortTest, TestTcpNoDelay) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; auto port1 = CreateTcpPort(kLocalAddr1); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); int option_value = -1; - int success = port1->GetOption(rtc::Socket::OPT_NODELAY, &option_value); + int success = port1->GetOption(Socket::OPT_NODELAY, &option_value); ASSERT_EQ(0, success); // GetOption() should complete successfully w/ 0 EXPECT_EQ(1, option_value); auto port2 = CreateTcpPort(kLocalAddr2); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); // Set up a connection, and verify that option is set on connected sockets at // both ends. @@ -1582,13 +1751,22 @@ TEST_F(PortTest, TestTcpNoDelay) { // Acquire addresses. ch1.Start(); ch2.Start(); - ASSERT_EQ_SIMULATED_WAIT(1, ch1.complete_count(), kDefaultTimeout, clock); - ASSERT_EQ_SIMULATED_WAIT(1, ch2.complete_count(), kDefaultTimeout, clock); + ASSERT_THAT(webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); + ASSERT_THAT(webrtc::WaitUntil([&] { return ch2.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); // Connect and send a ping from src to dst. ch1.CreateConnection(GetCandidate(ch2.port())); ASSERT_TRUE(ch1.conn() != NULL); - EXPECT_TRUE_SIMULATED_WAIT(ch1.conn()->connected(), kDefaultTimeout, - clock); // for TCP connect + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch1.conn()->connected(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); // for TCP connect ch1.Ping(); SIMULATED_WAIT(!ch2.remote_address().IsNil(), kShortTimeout, clock); @@ -1599,14 +1777,14 @@ TEST_F(PortTest, TestTcpNoDelay) { option_value = -1; success = static_cast(ch1.conn()) ->socket() - ->GetOption(rtc::Socket::OPT_NODELAY, &option_value); + ->GetOption(Socket::OPT_NODELAY, &option_value); ASSERT_EQ(0, success); EXPECT_EQ(1, option_value); option_value = -1; success = static_cast(ch2.conn()) ->socket() - ->GetOption(rtc::Socket::OPT_NODELAY, &option_value); + ->GetOption(Socket::OPT_NODELAY, &option_value); ASSERT_EQ(0, success); EXPECT_EQ(1, option_value); } @@ -1641,9 +1819,9 @@ TEST_F(PortTest, TestDisableInterfaceOfTcpPort) { lsocket->Bind(kLocalAddr1); rsocket->Bind(kLocalAddr2); - lport->SetIceRole(cricket::ICEROLE_CONTROLLING); + lport->SetIceRole(webrtc::ICEROLE_CONTROLLING); lport->SetIceTiebreaker(kTiebreaker1); - rport->SetIceRole(cricket::ICEROLE_CONTROLLED); + rport->SetIceRole(webrtc::ICEROLE_CONTROLLED); rport->SetIceTiebreaker(kTiebreaker2); lport->PrepareAddress(); @@ -1811,38 +1989,38 @@ TEST_F(PortTest, TestUdpMultipleAddressesV6CrossTypePorts) { TEST_F(PortTest, TestDefaultDscpValue) { int dscp; auto udpport = CreateUdpPort(kLocalAddr1); - EXPECT_EQ(0, udpport->SetOption(rtc::Socket::OPT_DSCP, rtc::DSCP_CS6)); - EXPECT_EQ(0, udpport->GetOption(rtc::Socket::OPT_DSCP, &dscp)); + EXPECT_EQ(0, udpport->SetOption(Socket::OPT_DSCP, DSCP_CS6)); + EXPECT_EQ(0, udpport->GetOption(Socket::OPT_DSCP, &dscp)); auto tcpport = CreateTcpPort(kLocalAddr1); - EXPECT_EQ(0, tcpport->SetOption(rtc::Socket::OPT_DSCP, rtc::DSCP_AF31)); - EXPECT_EQ(0, tcpport->GetOption(rtc::Socket::OPT_DSCP, &dscp)); - EXPECT_EQ(rtc::DSCP_AF31, dscp); + EXPECT_EQ(0, tcpport->SetOption(Socket::OPT_DSCP, DSCP_AF31)); + EXPECT_EQ(0, tcpport->GetOption(Socket::OPT_DSCP, &dscp)); + EXPECT_EQ(DSCP_AF31, dscp); auto stunport = CreateStunPort(kLocalAddr1, nat_socket_factory1()); - EXPECT_EQ(0, stunport->SetOption(rtc::Socket::OPT_DSCP, rtc::DSCP_AF41)); - EXPECT_EQ(0, stunport->GetOption(rtc::Socket::OPT_DSCP, &dscp)); - EXPECT_EQ(rtc::DSCP_AF41, dscp); - auto turnport1 = - CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP); + EXPECT_EQ(0, stunport->SetOption(Socket::OPT_DSCP, DSCP_AF41)); + EXPECT_EQ(0, stunport->GetOption(Socket::OPT_DSCP, &dscp)); + EXPECT_EQ(DSCP_AF41, dscp); + auto turnport1 = CreateTurnPort(kLocalAddr1, nat_socket_factory1(), + webrtc::PROTO_UDP, webrtc::PROTO_UDP); // Socket is created in PrepareAddress. turnport1->PrepareAddress(); - EXPECT_EQ(0, turnport1->SetOption(rtc::Socket::OPT_DSCP, rtc::DSCP_CS7)); - EXPECT_EQ(0, turnport1->GetOption(rtc::Socket::OPT_DSCP, &dscp)); - EXPECT_EQ(rtc::DSCP_CS7, dscp); + EXPECT_EQ(0, turnport1->SetOption(Socket::OPT_DSCP, DSCP_CS7)); + EXPECT_EQ(0, turnport1->GetOption(Socket::OPT_DSCP, &dscp)); + EXPECT_EQ(DSCP_CS7, dscp); // This will verify correct value returned without the socket. - auto turnport2 = - CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP); - EXPECT_EQ(0, turnport2->SetOption(rtc::Socket::OPT_DSCP, rtc::DSCP_CS6)); - EXPECT_EQ(0, turnport2->GetOption(rtc::Socket::OPT_DSCP, &dscp)); - EXPECT_EQ(rtc::DSCP_CS6, dscp); + auto turnport2 = CreateTurnPort(kLocalAddr1, nat_socket_factory1(), + webrtc::PROTO_UDP, webrtc::PROTO_UDP); + EXPECT_EQ(0, turnport2->SetOption(Socket::OPT_DSCP, DSCP_CS6)); + EXPECT_EQ(0, turnport2->GetOption(Socket::OPT_DSCP, &dscp)); + EXPECT_EQ(DSCP_CS6, dscp); } // Test sending STUN messages. TEST_F(PortTest, TestSendStunMessage) { auto lport = CreateTestPort(kLocalAddr1, "lfrag", "lpass"); auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass"); - lport->SetIceRole(cricket::ICEROLE_CONTROLLING); + lport->SetIceRole(webrtc::ICEROLE_CONTROLLING); lport->SetIceTiebreaker(kTiebreaker1); - rport->SetIceRole(cricket::ICEROLE_CONTROLLED); + rport->SetIceRole(webrtc::ICEROLE_CONTROLLED); rport->SetIceTiebreaker(kTiebreaker2); // Send a fake ping from lport to rport. @@ -1856,7 +2034,10 @@ TEST_F(PortTest, TestSendStunMessage) { lconn->Ping(0); // Check that it's a proper BINDING-REQUEST. - ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); IceMessage* msg = lport->last_stun_msg(); EXPECT_EQ(STUN_BINDING_REQUEST, msg->type()); EXPECT_FALSE(msg->IsLegacy()); @@ -1878,7 +2059,8 @@ TEST_F(PortTest, TestSendStunMessage) { EXPECT_TRUE(msg->GetByteString(STUN_ATTR_USE_CANDIDATE) != NULL); EXPECT_TRUE(msg->GetUInt32(STUN_ATTR_FINGERPRINT) != NULL); EXPECT_TRUE(StunMessage::ValidateFingerprint( - lport->last_stun_buf()->data(), lport->last_stun_buf()->size())); + reinterpret_cast(lport->last_stun_buf().data()), + lport->last_stun_buf().size())); // Request should not include ping count. ASSERT_TRUE(msg->GetUInt32(STUN_ATTR_RETRANSMIT_COUNT) == NULL); @@ -1887,14 +2069,15 @@ TEST_F(PortTest, TestSendStunMessage) { std::unique_ptr request = CopyStunMessage(*msg); // Receive the BINDING-REQUEST and respond with BINDING-RESPONSE. - rconn->OnReadPacket(lport->last_stun_buf()->data(), - lport->last_stun_buf()->size(), /* packet_time_us */ -1); + rconn->OnReadPacket( + ReceivedIpPacket(lport->last_stun_buf(), SocketAddress(), std::nullopt)); msg = rport->last_stun_msg(); ASSERT_TRUE(msg != NULL); EXPECT_EQ(STUN_BINDING_RESPONSE, msg->type()); // Received a BINDING-RESPONSE. - lconn->OnReadPacket(rport->last_stun_buf()->data(), - rport->last_stun_buf()->size(), /* packet_time_us */ -1); + lconn->OnReadPacket( + ReceivedIpPacket(rport->last_stun_buf(), SocketAddress(), std::nullopt)); + // Verify the STUN Stats. EXPECT_EQ(1U, lconn->stats().sent_ping_requests_total); EXPECT_EQ(1U, lconn->stats().sent_ping_requests_before_first_response); @@ -1912,7 +2095,8 @@ TEST_F(PortTest, TestSendStunMessage) { msg->ValidateMessageIntegrity("rpass")); EXPECT_TRUE(msg->GetUInt32(STUN_ATTR_FINGERPRINT) != NULL); EXPECT_TRUE(StunMessage::ValidateFingerprint( - lport->last_stun_buf()->data(), lport->last_stun_buf()->size())); + reinterpret_cast(lport->last_stun_buf().data()), + lport->last_stun_buf().size())); // No USERNAME or PRIORITY in ICE responses. EXPECT_TRUE(msg->GetByteString(STUN_ATTR_USERNAME) == NULL); EXPECT_TRUE(msg->GetByteString(STUN_ATTR_PRIORITY) == NULL); @@ -1942,7 +2126,8 @@ TEST_F(PortTest, TestSendStunMessage) { msg->ValidateMessageIntegrity("rpass")); EXPECT_TRUE(msg->GetUInt32(STUN_ATTR_FINGERPRINT) != NULL); EXPECT_TRUE(StunMessage::ValidateFingerprint( - lport->last_stun_buf()->data(), lport->last_stun_buf()->size())); + reinterpret_cast(lport->last_stun_buf().data()), + lport->last_stun_buf().size())); // No USERNAME with ICE. EXPECT_TRUE(msg->GetByteString(STUN_ATTR_USERNAME) == NULL); EXPECT_TRUE(msg->GetByteString(STUN_ATTR_PRIORITY) == NULL); @@ -1954,7 +2139,10 @@ TEST_F(PortTest, TestSendStunMessage) { rconn->Ping(0); rconn->Ping(0); rconn->Ping(0); - ASSERT_TRUE_WAIT(rport->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return rport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); msg = rport->last_stun_msg(); EXPECT_EQ(STUN_BINDING_REQUEST, msg->type()); const StunUInt64Attribute* ice_controlled_attr = @@ -1971,12 +2159,12 @@ TEST_F(PortTest, TestSendStunMessage) { // Respond with a BINDING-RESPONSE. request = CopyStunMessage(*msg); - lconn->OnReadPacket(rport->last_stun_buf()->data(), - rport->last_stun_buf()->size(), /* packet_time_us */ -1); + lconn->OnReadPacket( + ReceivedIpPacket(rport->last_stun_buf(), SocketAddress(), std::nullopt)); msg = lport->last_stun_msg(); // Receive the BINDING-RESPONSE. - rconn->OnReadPacket(lport->last_stun_buf()->data(), - lport->last_stun_buf()->size(), /* packet_time_us */ -1); + rconn->OnReadPacket( + ReceivedIpPacket(lport->last_stun_buf(), SocketAddress(), std::nullopt)); // Verify the Stun ping stats. EXPECT_EQ(3U, rconn->stats().sent_ping_requests_total); @@ -1999,9 +2187,9 @@ TEST_F(PortTest, TestSendStunMessage) { TEST_F(PortTest, TestNomination) { auto lport = CreateTestPort(kLocalAddr1, "lfrag", "lpass"); auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass"); - lport->SetIceRole(cricket::ICEROLE_CONTROLLING); + lport->SetIceRole(webrtc::ICEROLE_CONTROLLING); lport->SetIceTiebreaker(kTiebreaker1); - rport->SetIceRole(cricket::ICEROLE_CONTROLLED); + rport->SetIceRole(webrtc::ICEROLE_CONTROLLED); rport->SetIceTiebreaker(kTiebreaker2); lport->PrepareAddress(); @@ -2025,10 +2213,14 @@ TEST_F(PortTest, TestNomination) { // Send ping (including the nomination value) from `lconn` to `rconn`. This // should set the remote nomination of `rconn`. lconn->Ping(0); - ASSERT_TRUE_WAIT(lport->last_stun_msg(), kDefaultTimeout); - ASSERT_TRUE(lport->last_stun_buf()); - rconn->OnReadPacket(lport->last_stun_buf()->data(), - lport->last_stun_buf()->size(), /* packet_time_us */ -1); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport->last_stun_msg(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + ASSERT_GT(lport->last_stun_buf().size(), 0u); + rconn->OnReadPacket( + ReceivedIpPacket(lport->last_stun_buf(), SocketAddress(), std::nullopt)); + EXPECT_EQ(nomination, rconn->remote_nomination()); EXPECT_FALSE(lconn->nominated()); EXPECT_TRUE(rconn->nominated()); @@ -2037,10 +2229,14 @@ TEST_F(PortTest, TestNomination) { // This should result in an acknowledgment sent back from `rconn` to `lconn`, // updating the acknowledged nomination of `lconn`. - ASSERT_TRUE_WAIT(rport->last_stun_msg(), kDefaultTimeout); - ASSERT_TRUE(rport->last_stun_buf()); - lconn->OnReadPacket(rport->last_stun_buf()->data(), - rport->last_stun_buf()->size(), /* packet_time_us */ -1); + ASSERT_THAT( + webrtc::WaitUntil([&] { return rport->last_stun_msg(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + ASSERT_GT(rport->last_stun_buf().size(), 0u); + lconn->OnReadPacket( + ReceivedIpPacket(rport->last_stun_buf(), SocketAddress(), std::nullopt)); + EXPECT_EQ(nomination, lconn->acked_nomination()); EXPECT_TRUE(lconn->nominated()); EXPECT_TRUE(rconn->nominated()); @@ -2049,13 +2245,13 @@ TEST_F(PortTest, TestNomination) { } TEST_F(PortTest, TestRoundTripTime) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; auto lport = CreateTestPort(kLocalAddr1, "lfrag", "lpass"); auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass"); - lport->SetIceRole(cricket::ICEROLE_CONTROLLING); + lport->SetIceRole(webrtc::ICEROLE_CONTROLLING); lport->SetIceTiebreaker(kTiebreaker1); - rport->SetIceRole(cricket::ICEROLE_CONTROLLED); + rport->SetIceRole(webrtc::ICEROLE_CONTROLLED); rport->SetIceTiebreaker(kTiebreaker2); lport->PrepareAddress(); @@ -2092,9 +2288,9 @@ TEST_F(PortTest, TestRoundTripTime) { TEST_F(PortTest, TestUseCandidateAttribute) { auto lport = CreateTestPort(kLocalAddr1, "lfrag", "lpass"); auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass"); - lport->SetIceRole(cricket::ICEROLE_CONTROLLING); + lport->SetIceRole(webrtc::ICEROLE_CONTROLLING); lport->SetIceTiebreaker(kTiebreaker1); - rport->SetIceRole(cricket::ICEROLE_CONTROLLED); + rport->SetIceRole(webrtc::ICEROLE_CONTROLLED); rport->SetIceTiebreaker(kTiebreaker2); // Send a fake ping from lport to rport. @@ -2104,7 +2300,10 @@ TEST_F(PortTest, TestUseCandidateAttribute) { Connection* lconn = lport->CreateConnection(rport->Candidates()[0], Port::ORIGIN_MESSAGE); lconn->Ping(0); - ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); IceMessage* msg = lport->last_stun_msg(); const StunUInt64Attribute* ice_controlling_attr = msg->GetUInt64(STUN_ATTR_ICE_CONTROLLING); @@ -2118,68 +2317,75 @@ TEST_F(PortTest, TestUseCandidateAttribute) { // change, the network cost of the local candidates will change. Also tests that // the remote network costs are updated with the stun binding requests. TEST_F(PortTest, TestNetworkCostChange) { - rtc::Network* test_network = MakeNetwork(kLocalAddr1); + Network* test_network = MakeNetwork(kLocalAddr1); auto lport = CreateTestPort(test_network, "lfrag", "lpass"); auto rport = CreateTestPort(test_network, "rfrag", "rpass"); - lport->SetIceRole(cricket::ICEROLE_CONTROLLING); + lport->SetIceRole(webrtc::ICEROLE_CONTROLLING); lport->SetIceTiebreaker(kTiebreaker1); - rport->SetIceRole(cricket::ICEROLE_CONTROLLED); + rport->SetIceRole(webrtc::ICEROLE_CONTROLLED); rport->SetIceTiebreaker(kTiebreaker2); lport->PrepareAddress(); rport->PrepareAddress(); - // Default local port cost is rtc::kNetworkCostUnknown. - EXPECT_EQ(rtc::kNetworkCostUnknown, lport->network_cost()); + // Default local port cost is webrtc::kNetworkCostUnknown. + EXPECT_EQ(webrtc::kNetworkCostUnknown, lport->network_cost()); ASSERT_TRUE(!lport->Candidates().empty()); - for (const cricket::Candidate& candidate : lport->Candidates()) { - EXPECT_EQ(rtc::kNetworkCostUnknown, candidate.network_cost()); + for (const Candidate& candidate : lport->Candidates()) { + EXPECT_EQ(webrtc::kNetworkCostUnknown, candidate.network_cost()); } // Change the network type to wifi. - test_network->set_type(rtc::ADAPTER_TYPE_WIFI); - EXPECT_EQ(rtc::kNetworkCostLow, lport->network_cost()); - for (const cricket::Candidate& candidate : lport->Candidates()) { - EXPECT_EQ(rtc::kNetworkCostLow, candidate.network_cost()); + test_network->set_type(webrtc::ADAPTER_TYPE_WIFI); + EXPECT_EQ(webrtc::kNetworkCostLow, lport->network_cost()); + for (const Candidate& candidate : lport->Candidates()) { + EXPECT_EQ(webrtc::kNetworkCostLow, candidate.network_cost()); } // Add a connection and then change the network type. Connection* lconn = lport->CreateConnection(rport->Candidates()[0], Port::ORIGIN_MESSAGE); // Change the network type to cellular. - test_network->set_type(rtc::ADAPTER_TYPE_CELLULAR); - EXPECT_EQ(rtc::kNetworkCostHigh, lport->network_cost()); - for (const cricket::Candidate& candidate : lport->Candidates()) { - EXPECT_EQ(rtc::kNetworkCostHigh, candidate.network_cost()); + test_network->set_type(webrtc::ADAPTER_TYPE_CELLULAR); + EXPECT_EQ(webrtc::kNetworkCostHigh, lport->network_cost()); + for (const Candidate& candidate : lport->Candidates()) { + EXPECT_EQ(webrtc::kNetworkCostHigh, candidate.network_cost()); } - test_network->set_type(rtc::ADAPTER_TYPE_WIFI); + test_network->set_type(webrtc::ADAPTER_TYPE_WIFI); Connection* rconn = rport->CreateConnection(lport->Candidates()[0], Port::ORIGIN_MESSAGE); - test_network->set_type(rtc::ADAPTER_TYPE_CELLULAR); + test_network->set_type(webrtc::ADAPTER_TYPE_CELLULAR); lconn->Ping(0); - // The rconn's remote candidate cost is rtc::kNetworkCostLow, but the ping - // contains an attribute of network cost of rtc::kNetworkCostHigh. Once the + // The rconn's remote candidate cost is webrtc::kNetworkCostLow, but the ping + // contains an attribute of network cost of webrtc::kNetworkCostHigh. Once the // message is handled in rconn, The rconn's remote candidate will have cost - // rtc::kNetworkCostHigh; - EXPECT_EQ(rtc::kNetworkCostLow, rconn->remote_candidate().network_cost()); - ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, kDefaultTimeout); + // webrtc::kNetworkCostHigh; + EXPECT_EQ(webrtc::kNetworkCostLow, rconn->remote_candidate().network_cost()); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); IceMessage* msg = lport->last_stun_msg(); EXPECT_EQ(STUN_BINDING_REQUEST, msg->type()); // Pass the binding request to rport. - rconn->OnReadPacket(lport->last_stun_buf()->data(), - lport->last_stun_buf()->size(), /* packet_time_us */ -1); + rconn->OnReadPacket( + ReceivedIpPacket(lport->last_stun_buf(), SocketAddress(), std::nullopt)); + // Wait until rport sends the response and then check the remote network cost. - ASSERT_TRUE_WAIT(rport->last_stun_msg() != NULL, kDefaultTimeout); - EXPECT_EQ(rtc::kNetworkCostHigh, rconn->remote_candidate().network_cost()); + ASSERT_THAT( + webrtc::WaitUntil([&] { return rport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + EXPECT_EQ(webrtc::kNetworkCostHigh, rconn->remote_candidate().network_cost()); } TEST_F(PortTest, TestNetworkInfoAttribute) { - rtc::Network* test_network = MakeNetwork(kLocalAddr1); + Network* test_network = MakeNetwork(kLocalAddr1); auto lport = CreateTestPort(test_network, "lfrag", "lpass"); auto rport = CreateTestPort(test_network, "rfrag", "rpass"); - lport->SetIceRole(cricket::ICEROLE_CONTROLLING); + lport->SetIceRole(webrtc::ICEROLE_CONTROLLING); lport->SetIceTiebreaker(kTiebreaker1); - rport->SetIceRole(cricket::ICEROLE_CONTROLLED); + rport->SetIceRole(webrtc::ICEROLE_CONTROLLED); rport->SetIceTiebreaker(kTiebreaker2); uint16_t lnetwork_id = 9; @@ -2190,7 +2396,10 @@ TEST_F(PortTest, TestNetworkInfoAttribute) { Connection* lconn = lport->CreateConnection(rport->Candidates()[0], Port::ORIGIN_MESSAGE); lconn->Ping(0); - ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); IceMessage* msg = lport->last_stun_msg(); const StunUInt32Attribute* network_info_attr = msg->GetUInt32(STUN_ATTR_GOOG_NETWORK_INFO); @@ -2198,23 +2407,26 @@ TEST_F(PortTest, TestNetworkInfoAttribute) { uint32_t network_info = network_info_attr->value(); EXPECT_EQ(lnetwork_id, network_info >> 16); // Default network has unknown type and cost kNetworkCostUnknown. - EXPECT_EQ(rtc::kNetworkCostUnknown, network_info & 0xFFFF); + EXPECT_EQ(webrtc::kNetworkCostUnknown, network_info & 0xFFFF); // Set the network type to be cellular so its cost will be kNetworkCostHigh. // Send a fake ping from rport to lport. - test_network->set_type(rtc::ADAPTER_TYPE_CELLULAR); + test_network->set_type(webrtc::ADAPTER_TYPE_CELLULAR); uint16_t rnetwork_id = 8; test_network->set_id(rnetwork_id); Connection* rconn = rport->CreateConnection(lport->Candidates()[0], Port::ORIGIN_MESSAGE); rconn->Ping(0); - ASSERT_TRUE_WAIT(rport->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return rport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); msg = rport->last_stun_msg(); network_info_attr = msg->GetUInt32(STUN_ATTR_GOOG_NETWORK_INFO); ASSERT_TRUE(network_info_attr != NULL); network_info = network_info_attr->value(); EXPECT_EQ(rnetwork_id, network_info >> 16); - EXPECT_EQ(rtc::kNetworkCostHigh, network_info & 0xFFFF); + EXPECT_EQ(webrtc::kNetworkCostHigh, network_info & 0xFFFF); } // Test handling STUN messages. @@ -2224,7 +2436,7 @@ TEST_F(PortTest, TestHandleStunMessage) { std::unique_ptr in_msg, out_msg; auto buf = std::make_unique(); - rtc::SocketAddress addr(kLocalAddr1); + SocketAddress addr(kLocalAddr1); std::string username; // BINDING-REQUEST from local to remote with valid ICE username, @@ -2233,8 +2445,8 @@ TEST_F(PortTest, TestHandleStunMessage) { in_msg->AddMessageIntegrity("rpass"); in_msg->AddFingerprint(); WriteStunMessage(*in_msg, buf.get()); - EXPECT_TRUE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_TRUE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_TRUE(out_msg.get() != NULL); EXPECT_EQ("lfrag", username); @@ -2245,8 +2457,8 @@ TEST_F(PortTest, TestHandleStunMessage) { in_msg->AddMessageIntegrity("rpass"); in_msg->AddFingerprint(); WriteStunMessage(*in_msg, buf.get()); - EXPECT_TRUE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_TRUE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_TRUE(out_msg.get() != NULL); EXPECT_EQ("", username); @@ -2257,8 +2469,8 @@ TEST_F(PortTest, TestHandleStunMessage) { STUN_ERROR_REASON_SERVER_ERROR)); in_msg->AddFingerprint(); WriteStunMessage(*in_msg, buf.get()); - EXPECT_TRUE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_TRUE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_TRUE(out_msg.get() != NULL); EXPECT_EQ("", username); ASSERT_TRUE(out_msg->GetErrorCode() != NULL); @@ -2273,7 +2485,7 @@ TEST_F(PortTest, TestHandleStunMessageBadUsername) { std::unique_ptr in_msg, out_msg; auto buf = std::make_unique(); - rtc::SocketAddress addr(kLocalAddr1); + SocketAddress addr(kLocalAddr1); std::string username; // BINDING-REQUEST with no username. @@ -2281,8 +2493,8 @@ TEST_F(PortTest, TestHandleStunMessageBadUsername) { in_msg->AddMessageIntegrity("rpass"); in_msg->AddFingerprint(); WriteStunMessage(*in_msg, buf.get()); - EXPECT_TRUE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_TRUE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_TRUE(out_msg.get() == NULL); EXPECT_EQ("", username); EXPECT_EQ(STUN_ERROR_BAD_REQUEST, port->last_stun_error_code()); @@ -2292,8 +2504,8 @@ TEST_F(PortTest, TestHandleStunMessageBadUsername) { in_msg->AddMessageIntegrity("rpass"); in_msg->AddFingerprint(); WriteStunMessage(*in_msg, buf.get()); - EXPECT_TRUE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_TRUE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_TRUE(out_msg.get() == NULL); EXPECT_EQ("", username); EXPECT_EQ(STUN_ERROR_UNAUTHORIZED, port->last_stun_error_code()); @@ -2303,8 +2515,8 @@ TEST_F(PortTest, TestHandleStunMessageBadUsername) { in_msg->AddMessageIntegrity("rpass"); in_msg->AddFingerprint(); WriteStunMessage(*in_msg, buf.get()); - EXPECT_TRUE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_TRUE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_TRUE(out_msg.get() == NULL); EXPECT_EQ("", username); EXPECT_EQ(STUN_ERROR_UNAUTHORIZED, port->last_stun_error_code()); @@ -2314,8 +2526,8 @@ TEST_F(PortTest, TestHandleStunMessageBadUsername) { in_msg->AddMessageIntegrity("rpass"); in_msg->AddFingerprint(); WriteStunMessage(*in_msg, buf.get()); - EXPECT_TRUE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_TRUE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_TRUE(out_msg.get() == NULL); EXPECT_EQ("", username); EXPECT_EQ(STUN_ERROR_UNAUTHORIZED, port->last_stun_error_code()); @@ -2325,8 +2537,8 @@ TEST_F(PortTest, TestHandleStunMessageBadUsername) { in_msg->AddMessageIntegrity("rpass"); in_msg->AddFingerprint(); WriteStunMessage(*in_msg, buf.get()); - EXPECT_TRUE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_TRUE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_TRUE(out_msg.get() == NULL); EXPECT_EQ("", username); EXPECT_EQ(STUN_ERROR_UNAUTHORIZED, port->last_stun_error_code()); @@ -2339,7 +2551,7 @@ TEST_F(PortTest, TestHandleStunMessageBadMessageIntegrity) { std::unique_ptr in_msg, out_msg; auto buf = std::make_unique(); - rtc::SocketAddress addr(kLocalAddr1); + SocketAddress addr(kLocalAddr1); std::string username; // BINDING-REQUEST from local to remote with valid ICE username and @@ -2347,8 +2559,8 @@ TEST_F(PortTest, TestHandleStunMessageBadMessageIntegrity) { in_msg = CreateStunMessageWithUsername(STUN_BINDING_REQUEST, "rfrag:lfrag"); in_msg->AddFingerprint(); WriteStunMessage(*in_msg, buf.get()); - EXPECT_TRUE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_TRUE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_TRUE(out_msg.get() == NULL); EXPECT_EQ("", username); EXPECT_EQ(STUN_ERROR_BAD_REQUEST, port->last_stun_error_code()); @@ -2359,8 +2571,8 @@ TEST_F(PortTest, TestHandleStunMessageBadMessageIntegrity) { in_msg->AddMessageIntegrity("invalid"); in_msg->AddFingerprint(); WriteStunMessage(*in_msg, buf.get()); - EXPECT_TRUE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_TRUE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_TRUE(out_msg.get() == NULL); EXPECT_EQ("", username); EXPECT_EQ(STUN_ERROR_UNAUTHORIZED, port->last_stun_error_code()); @@ -2377,7 +2589,7 @@ TEST_F(PortTest, TestHandleStunMessageBadFingerprint) { std::unique_ptr in_msg, out_msg; auto buf = std::make_unique(); - rtc::SocketAddress addr(kLocalAddr1); + SocketAddress addr(kLocalAddr1); std::string username; // BINDING-REQUEST from local to remote with valid ICE username and @@ -2385,16 +2597,16 @@ TEST_F(PortTest, TestHandleStunMessageBadFingerprint) { in_msg = CreateStunMessageWithUsername(STUN_BINDING_REQUEST, "rfrag:lfrag"); in_msg->AddMessageIntegrity("rpass"); WriteStunMessage(*in_msg, buf.get()); - EXPECT_FALSE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_FALSE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_EQ(0, port->last_stun_error_code()); // Now, add a fingerprint, but munge the message so it's not valid. in_msg->AddFingerprint(); in_msg->SetTransactionIdForTesting("TESTTESTBADD"); WriteStunMessage(*in_msg, buf.get()); - EXPECT_FALSE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_FALSE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_EQ(0, port->last_stun_error_code()); // Valid BINDING-RESPONSE, except no FINGERPRINT. @@ -2403,16 +2615,16 @@ TEST_F(PortTest, TestHandleStunMessageBadFingerprint) { STUN_ATTR_XOR_MAPPED_ADDRESS, kLocalAddr2)); in_msg->AddMessageIntegrity("rpass"); WriteStunMessage(*in_msg, buf.get()); - EXPECT_FALSE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_FALSE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_EQ(0, port->last_stun_error_code()); // Now, add a fingerprint, but munge the message so it's not valid. in_msg->AddFingerprint(); in_msg->SetTransactionIdForTesting("TESTTESTBADD"); WriteStunMessage(*in_msg, buf.get()); - EXPECT_FALSE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_FALSE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_EQ(0, port->last_stun_error_code()); // Valid BINDING-ERROR-RESPONSE, except no FINGERPRINT. @@ -2422,16 +2634,16 @@ TEST_F(PortTest, TestHandleStunMessageBadFingerprint) { STUN_ERROR_REASON_SERVER_ERROR)); in_msg->AddMessageIntegrity("rpass"); WriteStunMessage(*in_msg, buf.get()); - EXPECT_FALSE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_FALSE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_EQ(0, port->last_stun_error_code()); // Now, add a fingerprint, but munge the message so it's not valid. in_msg->AddFingerprint(); in_msg->SetTransactionIdForTesting("TESTTESTBADD"); WriteStunMessage(*in_msg, buf.get()); - EXPECT_FALSE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_FALSE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_EQ(0, port->last_stun_error_code()); } @@ -2445,7 +2657,7 @@ TEST_F(PortTest, std::unique_ptr in_msg, out_msg; auto buf = std::make_unique(); - rtc::SocketAddress addr(kLocalAddr1); + SocketAddress addr(kLocalAddr1); std::string username; // Build ordinary message with valid ufrag/pass. @@ -2458,8 +2670,8 @@ TEST_F(PortTest, in_msg->AddAttribute(StunAttribute::CreateUInt32(0xdead)); in_msg->AddFingerprint(); WriteStunMessage(*in_msg, buf.get()); - ASSERT_TRUE(port->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + ASSERT_TRUE(GetStunMessageFromBufferWriter(port.get(), buf.get(), addr, + &out_msg, &username)); IceMessage* error_response = port->last_stun_msg(); ASSERT_NE(nullptr, error_response); @@ -2481,9 +2693,9 @@ TEST_F(PortTest, TestHandleStunResponseWithUnknownComprehensionRequiredAttribute) { // Generic setup. auto lport = CreateTestPort(kLocalAddr1, "lfrag", "lpass", - cricket::ICEROLE_CONTROLLING, kTiebreakerDefault); + webrtc::ICEROLE_CONTROLLING, kTiebreakerDefault); auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass", - cricket::ICEROLE_CONTROLLED, kTiebreakerDefault); + webrtc::ICEROLE_CONTROLLED, kTiebreakerDefault); lport->PrepareAddress(); rport->PrepareAddress(); ASSERT_FALSE(lport->Candidates().empty()); @@ -2495,19 +2707,27 @@ TEST_F(PortTest, // Send request. lconn->Ping(0); - ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, kDefaultTimeout); - rconn->OnReadPacket(lport->last_stun_buf()->data(), - lport->last_stun_buf()->size(), /* packet_time_us */ -1); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + rconn->OnReadPacket( + ReceivedIpPacket(lport->last_stun_buf(), SocketAddress(), std::nullopt)); // Intercept request and add comprehension required attribute. - ASSERT_TRUE_WAIT(rport->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return rport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); auto modified_response = rport->last_stun_msg()->Clone(); modified_response->AddAttribute(StunAttribute::CreateUInt32(0x7777)); modified_response->RemoveAttribute(STUN_ATTR_FINGERPRINT); modified_response->AddFingerprint(); ByteBufferWriter buf; WriteStunMessage(*modified_response, &buf); - lconn->OnReadPacket(buf.Data(), buf.Length(), /* packet_time_us */ -1); + lconn->OnReadPacket(ReceivedIpPacket::CreateFromLegacy( + reinterpret_cast(buf.Data()), buf.Length(), + /*packet_time_us=*/-1)); // Response should have been ignored, leaving us unwritable still. EXPECT_FALSE(lconn->writable()); } @@ -2518,9 +2738,9 @@ TEST_F(PortTest, TestHandleStunIndicationWithUnknownComprehensionRequiredAttribute) { // Generic set up. auto lport = CreateTestPort(kLocalAddr2, "lfrag", "lpass", - cricket::ICEROLE_CONTROLLING, kTiebreakerDefault); + webrtc::ICEROLE_CONTROLLING, kTiebreakerDefault); auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass", - cricket::ICEROLE_CONTROLLED, kTiebreakerDefault); + webrtc::ICEROLE_CONTROLLED, kTiebreakerDefault); lport->PrepareAddress(); rport->PrepareAddress(); ASSERT_FALSE(lport->Candidates().empty()); @@ -2535,7 +2755,8 @@ TEST_F(PortTest, in_msg->AddFingerprint(); ByteBufferWriter buf; WriteStunMessage(*in_msg, &buf); - lconn->OnReadPacket(buf.Data(), buf.Length(), /* packet_time_us */ -1); + lconn->OnReadPacket(ReceivedIpPacket::CreateFromLegacy( + buf.Data(), buf.Length(), /*packet_time_us=*/-1)); EXPECT_EQ(0u, lconn->last_ping_received()); } @@ -2543,19 +2764,19 @@ TEST_F(PortTest, // indications are allowed only to the connection which is in read mode. TEST_F(PortTest, TestHandleStunBindingIndication) { auto lport = CreateTestPort(kLocalAddr2, "lfrag", "lpass", - cricket::ICEROLE_CONTROLLING, kTiebreaker1); + webrtc::ICEROLE_CONTROLLING, kTiebreaker1); // Verifying encoding and decoding STUN indication message. std::unique_ptr in_msg, out_msg; std::unique_ptr buf(new ByteBufferWriter()); - rtc::SocketAddress addr(kLocalAddr1); + SocketAddress addr(kLocalAddr1); std::string username; in_msg = CreateStunMessage(STUN_BINDING_INDICATION); in_msg->AddFingerprint(); WriteStunMessage(*in_msg, buf.get()); - EXPECT_TRUE(lport->GetStunMessage(buf->Data(), buf->Length(), addr, &out_msg, - &username)); + EXPECT_TRUE(GetStunMessageFromBufferWriter(lport.get(), buf.get(), addr, + &out_msg, &username)); EXPECT_TRUE(out_msg.get() != NULL); EXPECT_EQ(out_msg->type(), STUN_BINDING_INDICATION); EXPECT_EQ("", username); @@ -2563,7 +2784,7 @@ TEST_F(PortTest, TestHandleStunBindingIndication) { // Verify connection can handle STUN indication and updates // last_ping_received. auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass"); - rport->SetIceRole(cricket::ICEROLE_CONTROLLED); + rport->SetIceRole(webrtc::ICEROLE_CONTROLLED); rport->SetIceTiebreaker(kTiebreaker2); lport->PrepareAddress(); @@ -2577,20 +2798,28 @@ TEST_F(PortTest, TestHandleStunBindingIndication) { rport->CreateConnection(lport->Candidates()[0], Port::ORIGIN_MESSAGE); rconn->Ping(0); - ASSERT_TRUE_WAIT(rport->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return rport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); IceMessage* msg = rport->last_stun_msg(); EXPECT_EQ(STUN_BINDING_REQUEST, msg->type()); // Send rport binding request to lport. - lconn->OnReadPacket(rport->last_stun_buf()->data(), - rport->last_stun_buf()->size(), /* packet_time_us */ -1); - ASSERT_TRUE_WAIT(lport->last_stun_msg() != NULL, kDefaultTimeout); + lconn->OnReadPacket( + ReceivedIpPacket(rport->last_stun_buf(), SocketAddress(), std::nullopt)); + + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_EQ(STUN_BINDING_RESPONSE, lport->last_stun_msg()->type()); int64_t last_ping_received1 = lconn->last_ping_received(); // Adding a delay of 100ms. - rtc::Thread::Current()->ProcessMessages(100); + Thread::Current()->ProcessMessages(100); // Pinging lconn using stun indication message. - lconn->OnReadPacket(buf->Data(), buf->Length(), /* packet_time_us */ -1); + lconn->OnReadPacket(ReceivedIpPacket::CreateFromLegacy( + buf->Data(), buf->Length(), /*packet_time_us=*/-1)); int64_t last_ping_received2 = lconn->last_ping_received(); EXPECT_GT(last_ping_received2, last_ping_received1); } @@ -2632,7 +2861,7 @@ TEST_F(PortTest, TestComputeCandidatePriority) { } TEST_F(PortTest, TestComputeCandidatePriorityWithPriorityAdjustment) { - webrtc::test::ScopedKeyValueConfig field_trials( + test::ScopedKeyValueConfig field_trials( "WebRTC-IncreaseIceCandidatePriorityHostSrflx/Enabled/"); auto port = CreateTestPort(kLocalAddr1, "name", "pass", &field_trials); port->SetIceTiebreaker(kTiebreakerDefault); @@ -2649,15 +2878,20 @@ TEST_F(PortTest, TestComputeCandidatePriorityWithPriorityAdjustment) { port->AddCandidateAddress(SocketAddress("3ffe::1234:5678", 1234)); // These should all be: // (90 << 24) | (([rfc3484 pref value] << 8) + kMaxTurnServers) | (256 - 177) - uint32_t expected_priority_v4 = 1509957199U + (kMaxTurnServers << 8); - uint32_t expected_priority_v6 = 1509959759U + (kMaxTurnServers << 8); - uint32_t expected_priority_ula = 1509962319U + (kMaxTurnServers << 8); + uint32_t expected_priority_v4 = 1509957199U + (webrtc::kMaxTurnServers << 8); + uint32_t expected_priority_v6 = 1509959759U + (webrtc::kMaxTurnServers << 8); + uint32_t expected_priority_ula = 1509962319U + (webrtc::kMaxTurnServers << 8); uint32_t expected_priority_v4mapped = expected_priority_v4; - uint32_t expected_priority_v4compat = 1509949775U + (kMaxTurnServers << 8); - uint32_t expected_priority_6to4 = 1509954639U + (kMaxTurnServers << 8); - uint32_t expected_priority_teredo = 1509952079U + (kMaxTurnServers << 8); - uint32_t expected_priority_sitelocal = 1509949775U + (kMaxTurnServers << 8); - uint32_t expected_priority_6bone = 1509949775U + (kMaxTurnServers << 8); + uint32_t expected_priority_v4compat = + 1509949775U + (webrtc::kMaxTurnServers << 8); + uint32_t expected_priority_6to4 = + 1509954639U + (webrtc::kMaxTurnServers << 8); + uint32_t expected_priority_teredo = + 1509952079U + (webrtc::kMaxTurnServers << 8); + uint32_t expected_priority_sitelocal = + 1509949775U + (webrtc::kMaxTurnServers << 8); + uint32_t expected_priority_6bone = + 1509949775U + (webrtc::kMaxTurnServers << 8); ASSERT_EQ(expected_priority_v4, port->Candidates()[0].priority()); ASSERT_EQ(expected_priority_v6, port->Candidates()[1].priority()); ASSERT_EQ(expected_priority_ula, port->Candidates()[2].priority()); @@ -2674,18 +2908,20 @@ TEST_F(PortTest, TestComputeCandidatePriorityWithPriorityAdjustment) { TEST_F(PortTest, TestFoundation) { auto testport = CreateTestPort(kLocalAddr1, "name", "pass"); testport->SetIceTiebreaker(kTiebreakerDefault); - testport->AddCandidateAddress(kLocalAddr1, kLocalAddr1, LOCAL_PORT_TYPE, - cricket::ICE_TYPE_PREFERENCE_HOST, false); - testport->AddCandidateAddress(kLocalAddr2, kLocalAddr1, STUN_PORT_TYPE, - cricket::ICE_TYPE_PREFERENCE_SRFLX, true); + testport->AddCandidateAddress(kLocalAddr1, kLocalAddr1, + IceCandidateType::kHost, + ICE_TYPE_PREFERENCE_HOST, false); + testport->AddCandidateAddress(kLocalAddr2, kLocalAddr1, + IceCandidateType::kSrflx, + ICE_TYPE_PREFERENCE_SRFLX, true); EXPECT_NE(testport->Candidates()[0].foundation(), testport->Candidates()[1].foundation()); } // This test verifies the foundation of different types of ICE candidates. TEST_F(PortTest, TestCandidateFoundation) { - std::unique_ptr nat_server( - CreateNatServer(kNatAddr1, NAT_OPEN_CONE)); + std::unique_ptr nat_server( + CreateNatServer(kNatAddr1, webrtc::NAT_OPEN_CONE)); auto udpport1 = CreateUdpPort(kLocalAddr1); udpport1->PrepareAddress(); auto udpport2 = CreateUdpPort(kLocalAddr1); @@ -2700,7 +2936,10 @@ TEST_F(PortTest, TestCandidateFoundation) { tcpport2->Candidates()[0].foundation()); auto stunport = CreateStunPort(kLocalAddr1, nat_socket_factory1()); stunport->PrepareAddress(); - ASSERT_EQ_WAIT(1U, stunport->Candidates().size(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return stunport->Candidates().size(); }, Eq(1U), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_NE(tcpport1->Candidates()[0].foundation(), stunport->Candidates()[0].foundation()); EXPECT_NE(tcpport2->Candidates()[0].foundation(), @@ -2710,43 +2949,56 @@ TEST_F(PortTest, TestCandidateFoundation) { EXPECT_NE(udpport2->Candidates()[0].foundation(), stunport->Candidates()[0].foundation()); // Verifying TURN candidate foundation. - auto turnport1 = - CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP); + auto turnport1 = CreateTurnPort(kLocalAddr1, nat_socket_factory1(), + webrtc::PROTO_UDP, webrtc::PROTO_UDP); turnport1->PrepareAddress(); - ASSERT_EQ_WAIT(1U, turnport1->Candidates().size(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return turnport1->Candidates().size(); }, Eq(1U), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_NE(udpport1->Candidates()[0].foundation(), turnport1->Candidates()[0].foundation()); EXPECT_NE(udpport2->Candidates()[0].foundation(), turnport1->Candidates()[0].foundation()); EXPECT_NE(stunport->Candidates()[0].foundation(), turnport1->Candidates()[0].foundation()); - auto turnport2 = - CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP); + auto turnport2 = CreateTurnPort(kLocalAddr1, nat_socket_factory1(), + webrtc::PROTO_UDP, webrtc::PROTO_UDP); turnport2->PrepareAddress(); - ASSERT_EQ_WAIT(1U, turnport2->Candidates().size(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return turnport2->Candidates().size(); }, Eq(1U), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_EQ(turnport1->Candidates()[0].foundation(), turnport2->Candidates()[0].foundation()); // Running a second turn server, to get different base IP address. - SocketAddress kTurnUdpIntAddr2("99.99.98.4", STUN_SERVER_PORT); + SocketAddress kTurnUdpIntAddr2("99.99.98.4", webrtc::STUN_SERVER_PORT); SocketAddress kTurnUdpExtAddr2("99.99.98.5", 0); - TestTurnServer turn_server2(rtc::Thread::Current(), vss(), kTurnUdpIntAddr2, + TestTurnServer turn_server2(Thread::Current(), vss(), kTurnUdpIntAddr2, kTurnUdpExtAddr2); - auto turnport3 = CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, - PROTO_UDP, kTurnUdpIntAddr2); + auto turnport3 = + CreateTurnPort(kLocalAddr1, nat_socket_factory1(), webrtc::PROTO_UDP, + webrtc::PROTO_UDP, kTurnUdpIntAddr2); turnport3->PrepareAddress(); - ASSERT_EQ_WAIT(1U, turnport3->Candidates().size(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return turnport3->Candidates().size(); }, Eq(1U), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_NE(turnport3->Candidates()[0].foundation(), turnport2->Candidates()[0].foundation()); // Start a TCP turn server, and check that two turn candidates have // different foundations if their relay protocols are different. - TestTurnServer turn_server3(rtc::Thread::Current(), vss(), kTurnTcpIntAddr, - kTurnUdpExtAddr, PROTO_TCP); - auto turnport4 = - CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_TCP, PROTO_UDP); + TestTurnServer turn_server3(Thread::Current(), vss(), kTurnTcpIntAddr, + kTurnUdpExtAddr, webrtc::PROTO_TCP); + auto turnport4 = CreateTurnPort(kLocalAddr1, nat_socket_factory1(), + webrtc::PROTO_TCP, webrtc::PROTO_UDP); turnport4->PrepareAddress(); - ASSERT_EQ_WAIT(1U, turnport4->Candidates().size(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return turnport4->Candidates().size(); }, Eq(1U), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_NE(turnport2->Candidates()[0].foundation(), turnport4->Candidates()[0].foundation()); } @@ -2754,7 +3006,7 @@ TEST_F(PortTest, TestCandidateFoundation) { // This test verifies the related addresses of different types of // ICE candidates. TEST_F(PortTest, TestCandidateRelatedAddress) { - auto nat_server = CreateNatServer(kNatAddr1, NAT_OPEN_CONE); + auto nat_server = CreateNatServer(kNatAddr1, webrtc::NAT_OPEN_CONE); auto udpport = CreateUdpPort(kLocalAddr1); udpport->PrepareAddress(); // For UDPPort, related address will be empty. @@ -2764,7 +3016,10 @@ TEST_F(PortTest, TestCandidateRelatedAddress) { // socket address. auto stunport = CreateStunPort(kLocalAddr1, nat_socket_factory1()); stunport->PrepareAddress(); - ASSERT_EQ_WAIT(1U, stunport->Candidates().size(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return stunport->Candidates().size(); }, Eq(1U), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Check STUN candidate address. EXPECT_EQ(stunport->Candidates()[0].address().ipaddr(), kNatAddr1.ipaddr()); // Check STUN candidate related address. @@ -2772,10 +3027,13 @@ TEST_F(PortTest, TestCandidateRelatedAddress) { stunport->GetLocalAddress()); // Verifying the related address for TURN candidate. // For TURN related address must be equal to the mapped address. - auto turnport = - CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP); + auto turnport = CreateTurnPort(kLocalAddr1, nat_socket_factory1(), + webrtc::PROTO_UDP, webrtc::PROTO_UDP); turnport->PrepareAddress(); - ASSERT_EQ_WAIT(1U, turnport->Candidates().size(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return turnport->Candidates().size(); }, Eq(1U), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); EXPECT_EQ(kTurnUdpExtAddr.ipaddr(), turnport->Candidates()[0].address().ipaddr()); EXPECT_EQ(kNatAddr1.ipaddr(), @@ -2784,9 +3042,9 @@ TEST_F(PortTest, TestCandidateRelatedAddress) { // Test priority value overflow handling when preference is set to 3. TEST_F(PortTest, TestCandidatePriority) { - cricket::Candidate cand1; + Candidate cand1; cand1.set_priority(3); - cricket::Candidate cand2; + Candidate cand2; cand2.set_priority(1); EXPECT_TRUE(cand1.priority() > cand2.priority()); } @@ -2795,11 +3053,11 @@ TEST_F(PortTest, TestCandidatePriority) { TEST_F(PortTest, TestConnectionPriority) { auto lport = CreateTestPort(kLocalAddr1, "lfrag", "lpass"); lport->SetIceTiebreaker(kTiebreakerDefault); - lport->set_type_preference(cricket::ICE_TYPE_PREFERENCE_HOST); + lport->set_type_preference(ICE_TYPE_PREFERENCE_HOST); auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass"); rport->SetIceTiebreaker(kTiebreakerDefault); - rport->set_type_preference(cricket::ICE_TYPE_PREFERENCE_RELAY_UDP); + rport->set_type_preference(ICE_TYPE_PREFERENCE_RELAY_UDP); lport->set_component(123); lport->AddCandidateAddress(SocketAddress("192.168.1.4", 1234)); rport->set_component(23); @@ -2810,8 +3068,8 @@ TEST_F(PortTest, TestConnectionPriority) { // RFC 5245 // pair priority = 2^32*MIN(G,D) + 2*MAX(G,D) + (G>D?1:0) - lport->SetIceRole(cricket::ICEROLE_CONTROLLING); - rport->SetIceRole(cricket::ICEROLE_CONTROLLED); + lport->SetIceRole(webrtc::ICEROLE_CONTROLLING); + rport->SetIceRole(webrtc::ICEROLE_CONTROLLED); Connection* lconn = lport->CreateConnection(rport->Candidates()[0], Port::ORIGIN_MESSAGE); #if defined(WEBRTC_WIN) @@ -2820,8 +3078,8 @@ TEST_F(PortTest, TestConnectionPriority) { EXPECT_EQ(0x2001EE9FC003D0BLLU, lconn->priority()); #endif - lport->SetIceRole(cricket::ICEROLE_CONTROLLED); - rport->SetIceRole(cricket::ICEROLE_CONTROLLING); + lport->SetIceRole(webrtc::ICEROLE_CONTROLLED); + rport->SetIceRole(webrtc::ICEROLE_CONTROLLING); Connection* rconn = rport->CreateConnection(lport->Candidates()[0], Port::ORIGIN_MESSAGE); #if defined(WEBRTC_WIN) @@ -2833,29 +3091,29 @@ TEST_F(PortTest, TestConnectionPriority) { // Test the Connection priority is calculated correctly. TEST_F(PortTest, TestConnectionPriorityWithPriorityAdjustment) { - webrtc::test::ScopedKeyValueConfig field_trials( + test::ScopedKeyValueConfig field_trials( "WebRTC-IncreaseIceCandidatePriorityHostSrflx/Enabled/"); auto lport = CreateTestPort(kLocalAddr1, "lfrag", "lpass", &field_trials); lport->SetIceTiebreaker(kTiebreakerDefault); - lport->set_type_preference(cricket::ICE_TYPE_PREFERENCE_HOST); + lport->set_type_preference(ICE_TYPE_PREFERENCE_HOST); auto rport = CreateTestPort(kLocalAddr2, "rfrag", "rpass", &field_trials); rport->SetIceTiebreaker(kTiebreakerDefault); - rport->set_type_preference(cricket::ICE_TYPE_PREFERENCE_RELAY_UDP); + rport->set_type_preference(ICE_TYPE_PREFERENCE_RELAY_UDP); lport->set_component(123); lport->AddCandidateAddress(SocketAddress("192.168.1.4", 1234)); rport->set_component(23); rport->AddCandidateAddress(SocketAddress("10.1.1.100", 1234)); - EXPECT_EQ(0x7E001E85U + (kMaxTurnServers << 8), + EXPECT_EQ(0x7E001E85U + (webrtc::kMaxTurnServers << 8), lport->Candidates()[0].priority()); - EXPECT_EQ(0x2001EE9U + (kMaxTurnServers << 8), + EXPECT_EQ(0x2001EE9U + (webrtc::kMaxTurnServers << 8), rport->Candidates()[0].priority()); // RFC 5245 // pair priority = 2^32*MIN(G,D) + 2*MAX(G,D) + (G>D?1:0) - lport->SetIceRole(cricket::ICEROLE_CONTROLLING); - rport->SetIceRole(cricket::ICEROLE_CONTROLLED); + lport->SetIceRole(webrtc::ICEROLE_CONTROLLING); + rport->SetIceRole(webrtc::ICEROLE_CONTROLLED); Connection* lconn = lport->CreateConnection(rport->Candidates()[0], Port::ORIGIN_MESSAGE); #if defined(WEBRTC_WIN) @@ -2864,8 +3122,8 @@ TEST_F(PortTest, TestConnectionPriorityWithPriorityAdjustment) { EXPECT_EQ(0x2003EE9FC007D0BLLU, lconn->priority()); #endif - lport->SetIceRole(cricket::ICEROLE_CONTROLLED); - rport->SetIceRole(cricket::ICEROLE_CONTROLLING); + lport->SetIceRole(webrtc::ICEROLE_CONTROLLED); + rport->SetIceRole(webrtc::ICEROLE_CONTROLLING); Connection* rconn = rport->CreateConnection(lport->Candidates()[0], Port::ORIGIN_MESSAGE); RTC_LOG(LS_ERROR) << "RCONN " << rconn->priority(); @@ -2883,11 +3141,11 @@ TEST_F(PortTest, TestConnectionPriorityWithPriorityAdjustment) { // the default setup where the RTT is deterministically one, which generates an // estimate given by `MINIMUM_RTT` = 100. TEST_F(PortTest, TestWritableState) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; auto port1 = CreateUdpPort(kLocalAddr1); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); auto port2 = CreateUdpPort(kLocalAddr2); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); // Set up channels. TestChannel ch1(std::move(port1)); @@ -2896,29 +3154,42 @@ TEST_F(PortTest, TestWritableState) { // Acquire addresses. ch1.Start(); ch2.Start(); - ASSERT_EQ_SIMULATED_WAIT(1, ch1.complete_count(), kDefaultTimeout, clock); - ASSERT_EQ_SIMULATED_WAIT(1, ch2.complete_count(), kDefaultTimeout, clock); + ASSERT_THAT(webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); + ASSERT_THAT(webrtc::WaitUntil([&] { return ch2.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); // Send a ping from src to dst. ch1.CreateConnection(GetCandidate(ch2.port())); ASSERT_TRUE(ch1.conn() != NULL); EXPECT_EQ(Connection::STATE_WRITE_INIT, ch1.conn()->write_state()); // for TCP connect - EXPECT_TRUE_SIMULATED_WAIT(ch1.conn()->connected(), kDefaultTimeout, clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return ch1.conn()->connected(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout), .clock = &clock}), + webrtc::IsRtcOk()); ch1.Ping(); SIMULATED_WAIT(!ch2.remote_address().IsNil(), kShortTimeout, clock); // Data should be sendable before the connection is accepted. char data[] = "abcd"; int data_size = arraysize(data); - rtc::PacketOptions options; + AsyncSocketPacketOptions options; EXPECT_EQ(data_size, ch1.conn()->Send(data, data_size, options)); // Accept the connection to return the binding response, transition to // writable, and allow data to be sent. ch2.AcceptConnection(GetCandidate(ch1.port())); - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, - ch1.conn()->write_state(), kDefaultTimeout, clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return ch1.conn()->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); EXPECT_EQ(data_size, ch1.conn()->Send(data, data_size, options)); // Ask the connection to update state as if enough time has passed to lose @@ -2937,8 +3208,11 @@ TEST_F(PortTest, TestWritableState) { // And now allow the other side to process the pings and send binding // responses. - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, - ch1.conn()->write_state(), kDefaultTimeout, clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return ch1.conn()->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); // Wait long enough for a full timeout (past however long we've already // waited). for (uint32_t i = 1; i <= CONNECTION_WRITE_CONNECT_FAILURES; ++i) { @@ -2960,11 +3234,11 @@ TEST_F(PortTest, TestWritableState) { // the default value given by `CONNECTION_WRITE_CONNECT_TIMEOUT` and // `CONNECTION_WRITE_CONNECT_FAILURES`. TEST_F(PortTest, TestWritableStateWithConfiguredThreshold) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; auto port1 = CreateUdpPort(kLocalAddr1); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); auto port2 = CreateUdpPort(kLocalAddr2); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); // Set up channels. TestChannel ch1(std::move(port1)); @@ -2973,8 +3247,14 @@ TEST_F(PortTest, TestWritableStateWithConfiguredThreshold) { // Acquire addresses. ch1.Start(); ch2.Start(); - ASSERT_EQ_SIMULATED_WAIT(1, ch1.complete_count(), kDefaultTimeout, clock); - ASSERT_EQ_SIMULATED_WAIT(1, ch2.complete_count(), kDefaultTimeout, clock); + ASSERT_THAT(webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); + ASSERT_THAT(webrtc::WaitUntil([&] { return ch2.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); // Send a ping from src to dst. ch1.CreateConnection(GetCandidate(ch2.port())); @@ -2985,8 +3265,11 @@ TEST_F(PortTest, TestWritableStateWithConfiguredThreshold) { // Accept the connection to return the binding response, transition to // writable, and allow data to be sent. ch2.AcceptConnection(GetCandidate(ch1.port())); - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, - ch1.conn()->write_state(), kDefaultTimeout, clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return ch1.conn()->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kDefaultTimeout), + .clock = &clock}), + webrtc::IsRtcOk()); ch1.conn()->set_unwritable_timeout(1000); ch1.conn()->set_unwritable_min_checks(3); @@ -3016,9 +3299,9 @@ TEST_F(PortTest, TestWritableStateWithConfiguredThreshold) { TEST_F(PortTest, TestTimeoutForNeverWritable) { auto port1 = CreateUdpPort(kLocalAddr1); - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); auto port2 = CreateUdpPort(kLocalAddr2); - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); // Set up channels. TestChannel ch1(std::move(port1)); @@ -3045,22 +3328,24 @@ TEST_F(PortTest, TestTimeoutForNeverWritable) { // In this test `ch1` behaves like FULL mode client and we have created // port which responds to the ping message just like LITE client. TEST_F(PortTest, TestIceLiteConnectivity) { - auto ice_full_port = - CreateTestPort(kLocalAddr1, "lfrag", "lpass", - cricket::ICEROLE_CONTROLLING, kTiebreaker1); + auto ice_full_port = CreateTestPort( + kLocalAddr1, "lfrag", "lpass", webrtc::ICEROLE_CONTROLLING, kTiebreaker1); auto* ice_full_port_ptr = ice_full_port.get(); - auto ice_lite_port = CreateTestPort( - kLocalAddr2, "rfrag", "rpass", cricket::ICEROLE_CONTROLLED, kTiebreaker2); + auto ice_lite_port = CreateTestPort(kLocalAddr2, "rfrag", "rpass", + webrtc::ICEROLE_CONTROLLED, kTiebreaker2); // Setup TestChannel. This behaves like FULL mode client. TestChannel ch1(std::move(ice_full_port)); - ch1.SetIceMode(ICEMODE_FULL); + ch1.SetIceMode(webrtc::ICEMODE_FULL); // Start gathering candidates. ch1.Start(); ice_lite_port->PrepareAddress(); - ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); ASSERT_FALSE(ice_lite_port->Candidates().empty()); ch1.CreateConnection(GetCandidate(ice_lite_port.get())); @@ -3073,7 +3358,10 @@ TEST_F(PortTest, TestIceLiteConnectivity) { // Verify stun ping is without USE_CANDIDATE_ATTR. Getting message directly // from port. - ASSERT_TRUE_WAIT(ice_full_port_ptr->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return ice_full_port_ptr->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); IceMessage* msg = ice_full_port_ptr->last_stun_msg(); EXPECT_TRUE(msg->GetByteString(STUN_ATTR_USE_CANDIDATE) == NULL); @@ -3082,25 +3370,34 @@ TEST_F(PortTest, TestIceLiteConnectivity) { // port, as it should be done only after receiving ping with USE_CANDIDATE. // But we need a connection to send a response message. auto* con = ice_lite_port->CreateConnection( - ice_full_port_ptr->Candidates()[0], cricket::Port::ORIGIN_MESSAGE); + ice_full_port_ptr->Candidates()[0], Port::ORIGIN_MESSAGE); std::unique_ptr request = CopyStunMessage(*msg); con->SendStunBindingResponse(request.get()); // Feeding the respone message from litemode to the full mode connection. - ch1.conn()->OnReadPacket(ice_lite_port->last_stun_buf()->data(), - ice_lite_port->last_stun_buf()->size(), - /* packet_time_us */ -1); + ch1.conn()->OnReadPacket(ReceivedIpPacket(ice_lite_port->last_stun_buf(), + SocketAddress(), std::nullopt)); + // Verifying full mode connection becomes writable from the response. - EXPECT_EQ_WAIT(Connection::STATE_WRITABLE, ch1.conn()->write_state(), - kDefaultTimeout); - EXPECT_TRUE_WAIT(ch1.nominated(), kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch1.conn()->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil([&] { return ch1.nominated(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // Clear existing stun messsages. Otherwise we will process old stun // message right after we send ping. ice_full_port_ptr->Reset(); // Send ping. This must have USE_CANDIDATE_ATTR. ch1.Ping(); - ASSERT_TRUE_WAIT(ice_full_port_ptr->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return ice_full_port_ptr->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); msg = ice_full_port_ptr->last_stun_msg(); EXPECT_TRUE(msg->GetByteString(STUN_ATTR_USE_CANDIDATE) != NULL); ch1.Stop(); @@ -3109,36 +3406,36 @@ TEST_F(PortTest, TestIceLiteConnectivity) { namespace { // Utility function for testing goog ping. -absl::optional GetSupportedGoogPingVersion(const StunMessage* msg) { +std::optional GetSupportedGoogPingVersion(const StunMessage* msg) { auto goog_misc = msg->GetUInt16List(STUN_ATTR_GOOG_MISC_INFO); if (goog_misc == nullptr) { - return absl::nullopt; + return std::nullopt; } if (msg->type() == STUN_BINDING_REQUEST) { if (goog_misc->Size() < - static_cast(cricket::IceGoogMiscInfoBindingRequestAttributeIndex:: + static_cast(IceGoogMiscInfoBindingRequestAttributeIndex:: SUPPORT_GOOG_PING_VERSION)) { - return absl::nullopt; + return std::nullopt; } return goog_misc->GetType( - static_cast(cricket::IceGoogMiscInfoBindingRequestAttributeIndex:: + static_cast(IceGoogMiscInfoBindingRequestAttributeIndex:: SUPPORT_GOOG_PING_VERSION)); } if (msg->type() == STUN_BINDING_RESPONSE) { if (goog_misc->Size() < - static_cast(cricket::IceGoogMiscInfoBindingResponseAttributeIndex:: + static_cast(IceGoogMiscInfoBindingResponseAttributeIndex:: SUPPORT_GOOG_PING_VERSION)) { - return absl::nullopt; + return std::nullopt; } return goog_misc->GetType( - static_cast(cricket::IceGoogMiscInfoBindingResponseAttributeIndex:: + static_cast(IceGoogMiscInfoBindingResponseAttributeIndex:: SUPPORT_GOOG_PING_VERSION)); } - return absl::nullopt; + return std::nullopt; } } // namespace @@ -3157,23 +3454,25 @@ TEST_P(GoogPingTest, TestGoogPingAnnounceEnable) { << trials.announce_goog_ping << " enable:" << trials.enable_goog_ping; - auto port1_unique = - CreateTestPort(kLocalAddr1, "lfrag", "lpass", - cricket::ICEROLE_CONTROLLING, kTiebreaker1); + auto port1_unique = CreateTestPort(kLocalAddr1, "lfrag", "lpass", + webrtc::ICEROLE_CONTROLLING, kTiebreaker1); auto* port1 = port1_unique.get(); auto port2 = CreateTestPort(kLocalAddr2, "rfrag", "rpass", - cricket::ICEROLE_CONTROLLED, kTiebreaker2); + webrtc::ICEROLE_CONTROLLED, kTiebreaker2); TestChannel ch1(std::move(port1_unique)); // Block usage of STUN_ATTR_USE_CANDIDATE so that // ch1.conn() will sent GOOG_PING_REQUEST directly. // This only makes test a bit shorter... - ch1.SetIceMode(ICEMODE_LITE); + ch1.SetIceMode(webrtc::ICEMODE_LITE); // Start gathering candidates. ch1.Start(); port2->PrepareAddress(); - ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); ASSERT_FALSE(port2->Candidates().empty()); ch1.CreateConnection(GetCandidate(port2.get())); @@ -3184,15 +3483,18 @@ TEST_P(GoogPingTest, TestGoogPingAnnounceEnable) { // Send ping. ch1.Ping(); - ASSERT_TRUE_WAIT(port1->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return port1->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); const IceMessage* request1 = port1->last_stun_msg(); ASSERT_EQ(trials.enable_goog_ping, GetSupportedGoogPingVersion(request1) && GetSupportedGoogPingVersion(request1) >= kGoogPingVersion); - auto* con = port2->CreateConnection(port1->Candidates()[0], - cricket::Port::ORIGIN_MESSAGE); + auto* con = + port2->CreateConnection(port1->Candidates()[0], Port::ORIGIN_MESSAGE); con->SetIceFieldTrials(&trials); con->SendStunBindingResponse(request1); @@ -3205,15 +3507,17 @@ TEST_P(GoogPingTest, TestGoogPingAnnounceEnable) { GetSupportedGoogPingVersion(response) >= kGoogPingVersion); // Feeding the respone message back. - ch1.conn()->OnReadPacket(port2->last_stun_buf()->data(), - port2->last_stun_buf()->size(), - /* packet_time_us */ -1); + ch1.conn()->OnReadPacket( + ReceivedIpPacket(port2->last_stun_buf(), SocketAddress(), std::nullopt)); port1->Reset(); port2->Reset(); ch1.Ping(); - ASSERT_TRUE_WAIT(port1->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return port1->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); const IceMessage* request2 = port1->last_stun_msg(); // It should be a GOOG_PING if both of these are TRUE @@ -3249,23 +3553,25 @@ TEST_F(PortTest, TestGoogPingUnsupportedVersionInStunBinding) { trials.announce_goog_ping = true; trials.enable_goog_ping = true; - auto port1_unique = - CreateTestPort(kLocalAddr1, "lfrag", "lpass", - cricket::ICEROLE_CONTROLLING, kTiebreaker1); + auto port1_unique = CreateTestPort(kLocalAddr1, "lfrag", "lpass", + webrtc::ICEROLE_CONTROLLING, kTiebreaker1); auto* port1 = port1_unique.get(); auto port2 = CreateTestPort(kLocalAddr2, "rfrag", "rpass", - cricket::ICEROLE_CONTROLLED, kTiebreaker2); + webrtc::ICEROLE_CONTROLLED, kTiebreaker2); TestChannel ch1(std::move(port1_unique)); // Block usage of STUN_ATTR_USE_CANDIDATE so that // ch1.conn() will sent GOOG_PING_REQUEST directly. // This only makes test a bit shorter... - ch1.SetIceMode(ICEMODE_LITE); + ch1.SetIceMode(webrtc::ICEMODE_LITE); // Start gathering candidates. ch1.Start(); port2->PrepareAddress(); - ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); ASSERT_FALSE(port2->Candidates().empty()); ch1.CreateConnection(GetCandidate(port2.get())); @@ -3276,7 +3582,10 @@ TEST_F(PortTest, TestGoogPingUnsupportedVersionInStunBinding) { // Send ping. ch1.Ping(); - ASSERT_TRUE_WAIT(port1->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return port1->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); const IceMessage* request1 = port1->last_stun_msg(); ASSERT_TRUE(GetSupportedGoogPingVersion(request1) && @@ -3292,15 +3601,14 @@ TEST_F(PortTest, TestGoogPingUnsupportedVersionInStunBinding) { auto list = StunAttribute::CreateUInt16ListAttribute(STUN_ATTR_GOOG_MISC_INFO); list->AddTypeAtIndex( - static_cast( - cricket::IceGoogMiscInfoBindingRequestAttributeIndex:: - SUPPORT_GOOG_PING_VERSION), + static_cast(IceGoogMiscInfoBindingRequestAttributeIndex:: + SUPPORT_GOOG_PING_VERSION), /* version */ 0); modified_request1->AddAttribute(std::move(list)); modified_request1->AddMessageIntegrity("rpass"); } - auto* con = port2->CreateConnection(port1->Candidates()[0], - cricket::Port::ORIGIN_MESSAGE); + auto* con = + port2->CreateConnection(port1->Candidates()[0], Port::ORIGIN_MESSAGE); con->SetIceFieldTrials(&trials); con->SendStunBindingResponse(modified_request1.get()); @@ -3320,23 +3628,25 @@ TEST_F(PortTest, TestGoogPingUnsupportedVersionInStunBindingResponse) { trials.announce_goog_ping = true; trials.enable_goog_ping = true; - auto port1_unique = - CreateTestPort(kLocalAddr1, "lfrag", "lpass", - cricket::ICEROLE_CONTROLLING, kTiebreaker1); + auto port1_unique = CreateTestPort(kLocalAddr1, "lfrag", "lpass", + webrtc::ICEROLE_CONTROLLING, kTiebreaker1); auto* port1 = port1_unique.get(); auto port2 = CreateTestPort(kLocalAddr2, "rfrag", "rpass", - cricket::ICEROLE_CONTROLLED, kTiebreaker2); + webrtc::ICEROLE_CONTROLLED, kTiebreaker2); TestChannel ch1(std::move(port1_unique)); // Block usage of STUN_ATTR_USE_CANDIDATE so that // ch1.conn() will sent GOOG_PING_REQUEST directly. // This only makes test a bit shorter... - ch1.SetIceMode(ICEMODE_LITE); + ch1.SetIceMode(webrtc::ICEMODE_LITE); // Start gathering candidates. ch1.Start(); port2->PrepareAddress(); - ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); ASSERT_FALSE(port2->Candidates().empty()); ch1.CreateConnection(GetCandidate(port2.get())); @@ -3347,14 +3657,17 @@ TEST_F(PortTest, TestGoogPingUnsupportedVersionInStunBindingResponse) { // Send ping. ch1.Ping(); - ASSERT_TRUE_WAIT(port1->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return port1->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); const IceMessage* request1 = port1->last_stun_msg(); ASSERT_TRUE(GetSupportedGoogPingVersion(request1) && GetSupportedGoogPingVersion(request1) >= kGoogPingVersion); - auto* con = port2->CreateConnection(port1->Candidates()[0], - cricket::Port::ORIGIN_MESSAGE); + auto* con = + port2->CreateConnection(port1->Candidates()[0], Port::ORIGIN_MESSAGE); con->SetIceFieldTrials(&trials); con->SendStunBindingResponse(request1); @@ -3377,26 +3690,29 @@ TEST_F(PortTest, TestGoogPingUnsupportedVersionInStunBindingResponse) { auto list = StunAttribute::CreateUInt16ListAttribute(STUN_ATTR_GOOG_MISC_INFO); list->AddTypeAtIndex( - static_cast( - cricket::IceGoogMiscInfoBindingResponseAttributeIndex:: - SUPPORT_GOOG_PING_VERSION), + static_cast(IceGoogMiscInfoBindingResponseAttributeIndex:: + SUPPORT_GOOG_PING_VERSION), /* version */ 0); modified_response->AddAttribute(std::move(list)); modified_response->AddMessageIntegrity("rpass"); modified_response->AddFingerprint(); } - rtc::ByteBufferWriter buf; + ByteBufferWriter buf; modified_response->Write(&buf); // Feeding the modified respone message back. - ch1.conn()->OnReadPacket(buf.Data(), buf.Length(), /* packet_time_us */ -1); + ch1.conn()->OnReadPacket(ReceivedIpPacket::CreateFromLegacy( + buf.Data(), buf.Length(), /*packet_time_us=*/-1)); port1->Reset(); port2->Reset(); ch1.Ping(); - ASSERT_TRUE_WAIT(port1->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return port1->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); // This should now be a STUN_BINDING...without a kGoogPingVersion const IceMessage* request2 = port1->last_stun_msg(); @@ -3420,23 +3736,25 @@ TEST_F(PortTest, TestChangeInAttributeMakesGoogPingFallsbackToStunBinding) { trials.announce_goog_ping = true; trials.enable_goog_ping = true; - auto port1_unique = - CreateTestPort(kLocalAddr1, "lfrag", "lpass", - cricket::ICEROLE_CONTROLLING, kTiebreaker1); + auto port1_unique = CreateTestPort(kLocalAddr1, "lfrag", "lpass", + webrtc::ICEROLE_CONTROLLING, kTiebreaker1); auto* port1 = port1_unique.get(); auto port2 = CreateTestPort(kLocalAddr2, "rfrag", "rpass", - cricket::ICEROLE_CONTROLLED, kTiebreaker2); + webrtc::ICEROLE_CONTROLLED, kTiebreaker2); TestChannel ch1(std::move(port1_unique)); // Block usage of STUN_ATTR_USE_CANDIDATE so that // ch1.conn() will sent GOOG_PING_REQUEST directly. // This only makes test a bit shorter... - ch1.SetIceMode(ICEMODE_LITE); + ch1.SetIceMode(webrtc::ICEMODE_LITE); // Start gathering candidates. ch1.Start(); port2->PrepareAddress(); - ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); ASSERT_FALSE(port2->Candidates().empty()); ch1.CreateConnection(GetCandidate(port2.get())); @@ -3447,10 +3765,13 @@ TEST_F(PortTest, TestChangeInAttributeMakesGoogPingFallsbackToStunBinding) { // Send ping. ch1.Ping(); - ASSERT_TRUE_WAIT(port1->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return port1->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); const IceMessage* msg = port1->last_stun_msg(); - auto* con = port2->CreateConnection(port1->Candidates()[0], - cricket::Port::ORIGIN_MESSAGE); + auto* con = + port2->CreateConnection(port1->Candidates()[0], Port::ORIGIN_MESSAGE); con->SetIceFieldTrials(&trials); // Feed the message into the connection. @@ -3462,15 +3783,17 @@ TEST_F(PortTest, TestChangeInAttributeMakesGoogPingFallsbackToStunBinding) { ASSERT_TRUE(GetSupportedGoogPingVersion(response) >= kGoogPingVersion); // Feeding the respone message back. - ch1.conn()->OnReadPacket(port2->last_stun_buf()->data(), - port2->last_stun_buf()->size(), - /* packet_time_us */ -1); + ch1.conn()->OnReadPacket( + ReceivedIpPacket(port2->last_stun_buf(), SocketAddress(), std::nullopt)); port1->Reset(); port2->Reset(); ch1.Ping(); - ASSERT_TRUE_WAIT(port1->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return port1->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); const IceMessage* msg2 = port1->last_stun_msg(); // It should be a GOOG_PING if both of these are TRUE @@ -3491,7 +3814,10 @@ TEST_F(PortTest, TestChangeInAttributeMakesGoogPingFallsbackToStunBinding) { ch1.conn()->set_use_candidate_attr(!ch1.conn()->use_candidate_attr()); ch1.Ping(); - ASSERT_TRUE_WAIT(port1->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return port1->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); const IceMessage* msg3 = port1->last_stun_msg(); // It should be a STUN_BINDING_REQUEST @@ -3506,23 +3832,25 @@ TEST_F(PortTest, TestErrorResponseMakesGoogPingFallBackToStunBinding) { trials.announce_goog_ping = true; trials.enable_goog_ping = true; - auto port1_unique = - CreateTestPort(kLocalAddr1, "lfrag", "lpass", - cricket::ICEROLE_CONTROLLING, kTiebreaker1); + auto port1_unique = CreateTestPort(kLocalAddr1, "lfrag", "lpass", + webrtc::ICEROLE_CONTROLLING, kTiebreaker1); auto* port1 = port1_unique.get(); auto port2 = CreateTestPort(kLocalAddr2, "rfrag", "rpass", - cricket::ICEROLE_CONTROLLED, kTiebreaker2); + webrtc::ICEROLE_CONTROLLED, kTiebreaker2); TestChannel ch1(std::move(port1_unique)); // Block usage of STUN_ATTR_USE_CANDIDATE so that // ch1.conn() will sent GOOG_PING_REQUEST directly. // This only makes test a bit shorter... - ch1.SetIceMode(ICEMODE_LITE); + ch1.SetIceMode(webrtc::ICEMODE_LITE); // Start gathering candidates. ch1.Start(); port2->PrepareAddress(); - ASSERT_EQ_WAIT(1, ch1.complete_count(), kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return ch1.complete_count(); }, Eq(1), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); ASSERT_FALSE(port2->Candidates().empty()); ch1.CreateConnection(GetCandidate(port2.get())); @@ -3533,10 +3861,13 @@ TEST_F(PortTest, TestErrorResponseMakesGoogPingFallBackToStunBinding) { // Send ping. ch1.Ping(); - ASSERT_TRUE_WAIT(port1->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return port1->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); const IceMessage* msg = port1->last_stun_msg(); - auto* con = port2->CreateConnection(port1->Candidates()[0], - cricket::Port::ORIGIN_MESSAGE); + auto* con = + port2->CreateConnection(port1->Candidates()[0], Port::ORIGIN_MESSAGE); con->SetIceFieldTrials(&trials); // Feed the message into the connection. @@ -3548,15 +3879,17 @@ TEST_F(PortTest, TestErrorResponseMakesGoogPingFallBackToStunBinding) { ASSERT_TRUE(GetSupportedGoogPingVersion(response) >= kGoogPingVersion); // Feeding the respone message back. - ch1.conn()->OnReadPacket(port2->last_stun_buf()->data(), - port2->last_stun_buf()->size(), - /* packet_time_us */ -1); + ch1.conn()->OnReadPacket( + ReceivedIpPacket(port2->last_stun_buf(), SocketAddress(), std::nullopt)); port1->Reset(); port2->Reset(); ch1.Ping(); - ASSERT_TRUE_WAIT(port1->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return port1->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); const IceMessage* msg2 = port1->last_stun_msg(); // It should be a GOOG_PING. @@ -3573,18 +3906,21 @@ TEST_F(PortTest, TestErrorResponseMakesGoogPingFallBackToStunBinding) { StunMessage error_response(GOOG_PING_ERROR_RESPONSE); error_response.SetTransactionIdForTesting(response2->transaction_id()); error_response.AddMessageIntegrity32("rpass"); - rtc::ByteBufferWriter buf; + ByteBufferWriter buf; error_response.Write(&buf); - ch1.conn()->OnReadPacket(buf.Data(), buf.Length(), - /* packet_time_us */ -1); + ch1.conn()->OnReadPacket(ReceivedIpPacket::CreateFromLegacy( + buf.Data(), buf.Length(), /*packet_time_us=*/-1)); // And now the third ping...this should be a binding. port1->Reset(); port2->Reset(); ch1.Ping(); - ASSERT_TRUE_WAIT(port1->last_stun_msg() != NULL, kDefaultTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return port1->last_stun_msg(); }, NotNull(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); const IceMessage* msg3 = port1->last_stun_msg(); // It should be a STUN_BINDING_REQUEST @@ -3597,18 +3933,18 @@ TEST_F(PortTest, TestErrorResponseMakesGoogPingFallBackToStunBinding) { // port will time out after connectivity is lost, if they are not marked as // "keep alive until pruned." TEST_F(PortTest, TestPortTimeoutIfNotKeptAlive) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; int timeout_delay = 100; auto port1 = CreateUdpPort(kLocalAddr1); ConnectToSignalDestroyed(port1.get()); port1->set_timeout_delay(timeout_delay); // milliseconds - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); port1->SetIceTiebreaker(kTiebreaker1); auto port2 = CreateUdpPort(kLocalAddr2); ConnectToSignalDestroyed(port2.get()); port2->set_timeout_delay(timeout_delay); // milliseconds - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); port2->SetIceTiebreaker(kTiebreaker2); // Set up channels and ensure both ports will be deleted. @@ -3619,26 +3955,28 @@ TEST_F(PortTest, TestPortTimeoutIfNotKeptAlive) { StartConnectAndStopChannels(&ch1, &ch2); // After the connection is destroyed, the port will be destroyed because // none of them is marked as "keep alive until pruned. - EXPECT_EQ_SIMULATED_WAIT(2, ports_destroyed(), 110, clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return ports_destroyed(); }, Eq(2), + {.clock = &clock}), + webrtc::IsRtcOk()); } // Test that if after all connection are destroyed, new connections are created // and destroyed again, ports won't be destroyed until a timeout period passes // after the last set of connections are all destroyed. TEST_F(PortTest, TestPortTimeoutAfterNewConnectionCreatedAndDestroyed) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; int timeout_delay = 100; auto port1 = CreateUdpPort(kLocalAddr1); ConnectToSignalDestroyed(port1.get()); port1->set_timeout_delay(timeout_delay); // milliseconds - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); port1->SetIceTiebreaker(kTiebreaker1); auto port2 = CreateUdpPort(kLocalAddr2); ConnectToSignalDestroyed(port2.get()); port2->set_timeout_delay(timeout_delay); // milliseconds - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); port2->SetIceTiebreaker(kTiebreaker2); // Set up channels and ensure both ports will be deleted. @@ -3660,31 +3998,33 @@ TEST_F(PortTest, TestPortTimeoutAfterNewConnectionCreatedAndDestroyed) { EXPECT_EQ(0, ports_destroyed()); // The ports on both sides should be destroyed after timeout. - EXPECT_TRUE_SIMULATED_WAIT(ports_destroyed() == 2, 30, clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return ports_destroyed(); }, Eq(2), + {.clock = &clock}), + webrtc::IsRtcOk()); } // This test case verifies that neither the controlling port nor the controlled // port will time out after connectivity is lost if they are marked as "keep // alive until pruned". They will time out after they are pruned. TEST_F(PortTest, TestPortNotTimeoutUntilPruned) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; int timeout_delay = 100; auto port1 = CreateUdpPort(kLocalAddr1); ConnectToSignalDestroyed(port1.get()); port1->set_timeout_delay(timeout_delay); // milliseconds - port1->SetIceRole(cricket::ICEROLE_CONTROLLING); + port1->SetIceRole(webrtc::ICEROLE_CONTROLLING); port1->SetIceTiebreaker(kTiebreaker1); auto port2 = CreateUdpPort(kLocalAddr2); ConnectToSignalDestroyed(port2.get()); port2->set_timeout_delay(timeout_delay); // milliseconds - port2->SetIceRole(cricket::ICEROLE_CONTROLLED); + port2->SetIceRole(webrtc::ICEROLE_CONTROLLED); port2->SetIceTiebreaker(kTiebreaker2); // The connection must not be destroyed before a connection is attempted. EXPECT_EQ(0, ports_destroyed()); - port1->set_component(cricket::ICE_CANDIDATE_COMPONENT_DEFAULT); - port2->set_component(cricket::ICE_CANDIDATE_COMPONENT_DEFAULT); + port1->set_component(ICE_CANDIDATE_COMPONENT_DEFAULT); + port2->set_component(ICE_CANDIDATE_COMPONENT_DEFAULT); // Set up channels and keep the port alive. TestChannel ch1(std::move(port1)); @@ -3701,27 +4041,29 @@ TEST_F(PortTest, TestPortNotTimeoutUntilPruned) { ch1.port()->Prune(); ch2.port()->Prune(); // The ports on both sides should be destroyed after timeout. - EXPECT_TRUE_SIMULATED_WAIT(ports_destroyed() == 2, 1, clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return ports_destroyed(); }, Eq(2), + {.clock = &clock}), + webrtc::IsRtcOk()); } TEST_F(PortTest, TestSupportsProtocol) { auto udp_port = CreateUdpPort(kLocalAddr1); - EXPECT_TRUE(udp_port->SupportsProtocol(UDP_PROTOCOL_NAME)); - EXPECT_FALSE(udp_port->SupportsProtocol(TCP_PROTOCOL_NAME)); + EXPECT_TRUE(udp_port->SupportsProtocol(webrtc::UDP_PROTOCOL_NAME)); + EXPECT_FALSE(udp_port->SupportsProtocol(webrtc::TCP_PROTOCOL_NAME)); auto stun_port = CreateStunPort(kLocalAddr1, nat_socket_factory1()); - EXPECT_TRUE(stun_port->SupportsProtocol(UDP_PROTOCOL_NAME)); - EXPECT_FALSE(stun_port->SupportsProtocol(TCP_PROTOCOL_NAME)); + EXPECT_TRUE(stun_port->SupportsProtocol(webrtc::UDP_PROTOCOL_NAME)); + EXPECT_FALSE(stun_port->SupportsProtocol(webrtc::TCP_PROTOCOL_NAME)); auto tcp_port = CreateTcpPort(kLocalAddr1); - EXPECT_TRUE(tcp_port->SupportsProtocol(TCP_PROTOCOL_NAME)); - EXPECT_TRUE(tcp_port->SupportsProtocol(SSLTCP_PROTOCOL_NAME)); - EXPECT_FALSE(tcp_port->SupportsProtocol(UDP_PROTOCOL_NAME)); - - auto turn_port = - CreateTurnPort(kLocalAddr1, nat_socket_factory1(), PROTO_UDP, PROTO_UDP); - EXPECT_TRUE(turn_port->SupportsProtocol(UDP_PROTOCOL_NAME)); - EXPECT_FALSE(turn_port->SupportsProtocol(TCP_PROTOCOL_NAME)); + EXPECT_TRUE(tcp_port->SupportsProtocol(webrtc::TCP_PROTOCOL_NAME)); + EXPECT_TRUE(tcp_port->SupportsProtocol(webrtc::SSLTCP_PROTOCOL_NAME)); + EXPECT_FALSE(tcp_port->SupportsProtocol(webrtc::UDP_PROTOCOL_NAME)); + + auto turn_port = CreateTurnPort(kLocalAddr1, nat_socket_factory1(), + webrtc::PROTO_UDP, webrtc::PROTO_UDP); + EXPECT_TRUE(turn_port->SupportsProtocol(webrtc::UDP_PROTOCOL_NAME)); + EXPECT_FALSE(turn_port->SupportsProtocol(webrtc::TCP_PROTOCOL_NAME)); } // Test that SetIceParameters updates the component, ufrag and password @@ -3746,26 +4088,25 @@ TEST_F(PortTest, TestAddConnectionWithSameAddress) { port->SetIceTiebreaker(kTiebreakerDefault); port->PrepareAddress(); EXPECT_EQ(1u, port->Candidates().size()); - rtc::SocketAddress address("1.1.1.1", 5000); - cricket::Candidate candidate(1, "udp", address, 0, "", "", "relay", 0, ""); - cricket::Connection* conn1 = - port->CreateConnection(candidate, Port::ORIGIN_MESSAGE); - cricket::Connection* conn_in_use = port->GetConnection(address); + SocketAddress address("1.1.1.1", 5000); + Candidate candidate(1, "udp", address, 0, "", "", IceCandidateType::kRelay, 0, + ""); + Connection* conn1 = port->CreateConnection(candidate, Port::ORIGIN_MESSAGE); + Connection* conn_in_use = port->GetConnection(address); EXPECT_EQ(conn1, conn_in_use); EXPECT_EQ(0u, conn_in_use->remote_candidate().generation()); // Creating with a candidate with the same address again will get us a // different connection with the new candidate. candidate.set_generation(2); - cricket::Connection* conn2 = - port->CreateConnection(candidate, Port::ORIGIN_MESSAGE); + Connection* conn2 = port->CreateConnection(candidate, Port::ORIGIN_MESSAGE); EXPECT_NE(conn1, conn2); conn_in_use = port->GetConnection(address); EXPECT_EQ(conn2, conn_in_use); EXPECT_EQ(2u, conn_in_use->remote_candidate().generation()); // Make sure the new connection was not deleted. - rtc::Thread::Current()->ProcessMessages(300); + Thread::Current()->ProcessMessages(300); EXPECT_TRUE(port->GetConnection(address) != nullptr); } @@ -3777,21 +4118,21 @@ class ConnectionTest : public PortTest { ConnectionTest() { lport_ = CreateTestPort(kLocalAddr1, "lfrag", "lpass"); rport_ = CreateTestPort(kLocalAddr2, "rfrag", "rpass"); - lport_->SetIceRole(cricket::ICEROLE_CONTROLLING); + lport_->SetIceRole(webrtc::ICEROLE_CONTROLLING); lport_->SetIceTiebreaker(kTiebreaker1); - rport_->SetIceRole(cricket::ICEROLE_CONTROLLED); + rport_->SetIceRole(webrtc::ICEROLE_CONTROLLED); rport_->SetIceTiebreaker(kTiebreaker2); lport_->PrepareAddress(); rport_->PrepareAddress(); } - rtc::ScopedFakeClock clock_; + ScopedFakeClock clock_; int num_state_changes_ = 0; Connection* CreateConnection(IceRole role) { Connection* conn; - if (role == cricket::ICEROLE_CONTROLLING) { + if (role == webrtc::ICEROLE_CONTROLLING) { conn = lport_->CreateConnection(rport_->Candidates()[0], Port::ORIGIN_MESSAGE); } else { @@ -3806,30 +4147,36 @@ class ConnectionTest : public PortTest { void SendPingAndCaptureReply(Connection* lconn, Connection* rconn, int64_t ms, - rtc::BufferT* reply) { + BufferT* reply) { TestPort* lport = lconn->PortForTest() == lport_.get() ? lport_.get() : rport_.get(); TestPort* rport = rconn->PortForTest() == rport_.get() ? rport_.get() : lport_.get(); - lconn->Ping(rtc::TimeMillis()); - ASSERT_TRUE_WAIT(lport->last_stun_msg(), kDefaultTimeout); - ASSERT_TRUE(lport->last_stun_buf()); - rconn->OnReadPacket(lport->last_stun_buf()->data(), - lport->last_stun_buf()->size(), - /* packet_time_us */ -1); - clock_.AdvanceTime(webrtc::TimeDelta::Millis(ms)); - ASSERT_TRUE_WAIT(rport->last_stun_msg(), kDefaultTimeout); - ASSERT_TRUE(rport->last_stun_buf()); - *reply = std::move(*rport->last_stun_buf()); + lconn->Ping(webrtc::TimeMillis()); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport->last_stun_msg(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + ASSERT_GT(lport->last_stun_buf().size(), 0u); + rconn->OnReadPacket(ReceivedIpPacket(lport->last_stun_buf(), + SocketAddress(), std::nullopt)); + + clock_.AdvanceTime(TimeDelta::Millis(ms)); + ASSERT_THAT( + webrtc::WaitUntil([&] { return rport->last_stun_msg(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + ASSERT_GT(rport->last_stun_buf().size(), 0u); + reply->SetData(rport->last_stun_buf()); } void SendPingAndReceiveResponse(Connection* lconn, Connection* rconn, int64_t ms) { - rtc::BufferT reply; + BufferT reply; SendPingAndCaptureReply(lconn, rconn, ms, &reply); - lconn->OnReadPacket(reply.data(), reply.size(), - /* packet_time_us */ -1); + + lconn->OnReadPacket(ReceivedIpPacket(reply, SocketAddress(), std::nullopt)); } void OnConnectionStateChange(Connection* connection) { num_state_changes_++; } @@ -3839,8 +4186,8 @@ class ConnectionTest : public PortTest { }; TEST_F(ConnectionTest, ConnectionForgetLearnedState) { - Connection* lconn = CreateConnection(ICEROLE_CONTROLLING); - Connection* rconn = CreateConnection(ICEROLE_CONTROLLED); + Connection* lconn = CreateConnection(webrtc::ICEROLE_CONTROLLING); + Connection* rconn = CreateConnection(webrtc::ICEROLE_CONTROLLED); EXPECT_FALSE(lconn->writable()); EXPECT_FALSE(lconn->receiving()); @@ -3874,15 +4221,15 @@ TEST_F(ConnectionTest, ConnectionForgetLearnedState) { } TEST_F(ConnectionTest, ConnectionForgetLearnedStateDiscardsPendingPings) { - Connection* lconn = CreateConnection(ICEROLE_CONTROLLING); - Connection* rconn = CreateConnection(ICEROLE_CONTROLLED); + Connection* lconn = CreateConnection(webrtc::ICEROLE_CONTROLLING); + Connection* rconn = CreateConnection(webrtc::ICEROLE_CONTROLLED); SendPingAndReceiveResponse(lconn, rconn, 10); EXPECT_TRUE(lconn->writable()); EXPECT_TRUE(lconn->receiving()); - rtc::BufferT reply; + BufferT reply; SendPingAndCaptureReply(lconn, rconn, 10, &reply); lconn->ForgetLearnedState(); @@ -3890,8 +4237,7 @@ TEST_F(ConnectionTest, ConnectionForgetLearnedStateDiscardsPendingPings) { EXPECT_FALSE(lconn->writable()); EXPECT_FALSE(lconn->receiving()); - lconn->OnReadPacket(reply.data(), reply.size(), - /* packet_time_us */ -1); + lconn->OnReadPacket(ReceivedIpPacket(reply, SocketAddress(), std::nullopt)); // That reply was discarded due to the ForgetLearnedState() while it was // outstanding. @@ -3905,8 +4251,8 @@ TEST_F(ConnectionTest, ConnectionForgetLearnedStateDiscardsPendingPings) { } TEST_F(ConnectionTest, ConnectionForgetLearnedStateDoesNotTriggerStateChange) { - Connection* lconn = CreateConnection(ICEROLE_CONTROLLING); - Connection* rconn = CreateConnection(ICEROLE_CONTROLLED); + Connection* lconn = CreateConnection(webrtc::ICEROLE_CONTROLLING); + Connection* rconn = CreateConnection(webrtc::ICEROLE_CONTROLLED); EXPECT_EQ(num_state_changes_, 0); SendPingAndReceiveResponse(lconn, rconn, 10); @@ -3926,8 +4272,8 @@ TEST_F(ConnectionTest, ConnectionForgetLearnedStateDoesNotTriggerStateChange) { // Sending a delta and getting a delta ack in response. TEST_F(ConnectionTest, SendReceiveGoogDelta) { constexpr int64_t ms = 10; - Connection* lconn = CreateConnection(ICEROLE_CONTROLLING); - Connection* rconn = CreateConnection(ICEROLE_CONTROLLED); + Connection* lconn = CreateConnection(webrtc::ICEROLE_CONTROLLING); + Connection* rconn = CreateConnection(webrtc::ICEROLE_CONTROLLED); std::unique_ptr delta = absl::WrapUnique(new StunByteStringAttribute(STUN_ATTR_GOOG_DELTA)); @@ -3941,7 +4287,7 @@ TEST_F(ConnectionTest, SendReceiveGoogDelta) { lconn->SetStunDictConsumer( // DeltaReceived [](const StunByteStringAttribute* delta) - -> std::unique_ptr { return nullptr; }, + -> std::unique_ptr { return nullptr; }, // DeltaAckReceived [&](webrtc::RTCErrorOr error_or_ack) { received_goog_delta_ack = true; @@ -3958,20 +4304,28 @@ TEST_F(ConnectionTest, SendReceiveGoogDelta) { return std::move(delta_ack); }, // DeltaAckReceived - [](webrtc::RTCErrorOr error_or__ack) {}); + [](webrtc::RTCErrorOr error_or__ack) { + }); - lconn->Ping(rtc::TimeMillis(), std::move(delta)); - ASSERT_TRUE_WAIT(lport_->last_stun_msg(), kDefaultTimeout); - ASSERT_TRUE(lport_->last_stun_buf()); - rconn->OnReadPacket(lport_->last_stun_buf()->data(), - lport_->last_stun_buf()->size(), /* packet_time_us */ -1); + lconn->Ping(webrtc::TimeMillis(), std::move(delta)); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport_->last_stun_msg(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + ASSERT_GT(lport_->last_stun_buf().size(), 0u); + rconn->OnReadPacket( + ReceivedIpPacket(lport_->last_stun_buf(), SocketAddress(), std::nullopt)); EXPECT_TRUE(received_goog_delta); - clock_.AdvanceTime(webrtc::TimeDelta::Millis(ms)); - ASSERT_TRUE_WAIT(rport_->last_stun_msg(), kDefaultTimeout); - ASSERT_TRUE(rport_->last_stun_buf()); - lconn->OnReadPacket(rport_->last_stun_buf()->data(), - rport_->last_stun_buf()->size(), /* packet_time_us */ -1); + clock_.AdvanceTime(TimeDelta::Millis(ms)); + ASSERT_THAT( + webrtc::WaitUntil([&] { return rport_->last_stun_msg(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + ASSERT_GT(rport_->last_stun_buf().size(), 0u); + lconn->OnReadPacket( + ReceivedIpPacket(rport_->last_stun_buf(), SocketAddress(), std::nullopt)); + EXPECT_TRUE(received_goog_delta_ack); } @@ -3979,8 +4333,8 @@ TEST_F(ConnectionTest, SendReceiveGoogDelta) { // a delta ack in reply gives an error callback. TEST_F(ConnectionTest, SendGoogDeltaNoReply) { constexpr int64_t ms = 10; - Connection* lconn = CreateConnection(ICEROLE_CONTROLLING); - Connection* rconn = CreateConnection(ICEROLE_CONTROLLED); + Connection* lconn = CreateConnection(webrtc::ICEROLE_CONTROLLING); + Connection* rconn = CreateConnection(webrtc::ICEROLE_CONTROLLED); std::unique_ptr delta = absl::WrapUnique(new StunByteStringAttribute(STUN_ATTR_GOOG_DELTA)); @@ -3990,25 +4344,31 @@ TEST_F(ConnectionTest, SendGoogDeltaNoReply) { lconn->SetStunDictConsumer( // DeltaReceived [](const StunByteStringAttribute* delta) - -> std::unique_ptr { return nullptr; }, + -> std::unique_ptr { return nullptr; }, // DeltaAckReceived [&](webrtc::RTCErrorOr error_or_ack) { received_goog_delta_ack_error = true; EXPECT_FALSE(error_or_ack.ok()); }); - lconn->Ping(rtc::TimeMillis(), std::move(delta)); - ASSERT_TRUE_WAIT(lport_->last_stun_msg(), kDefaultTimeout); - ASSERT_TRUE(lport_->last_stun_buf()); - rconn->OnReadPacket(lport_->last_stun_buf()->data(), - lport_->last_stun_buf()->size(), /* packet_time_us */ -1); - - clock_.AdvanceTime(webrtc::TimeDelta::Millis(ms)); - ASSERT_TRUE_WAIT(rport_->last_stun_msg(), kDefaultTimeout); - ASSERT_TRUE(rport_->last_stun_buf()); - lconn->OnReadPacket(rport_->last_stun_buf()->data(), - rport_->last_stun_buf()->size(), /* packet_time_us */ -1); + lconn->Ping(webrtc::TimeMillis(), std::move(delta)); + ASSERT_THAT( + webrtc::WaitUntil([&] { return lport_->last_stun_msg(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + ASSERT_GT(lport_->last_stun_buf().size(), 0u); + rconn->OnReadPacket( + ReceivedIpPacket(lport_->last_stun_buf(), SocketAddress(), std::nullopt)); + + clock_.AdvanceTime(TimeDelta::Millis(ms)); + ASSERT_THAT( + webrtc::WaitUntil([&] { return rport_->last_stun_msg(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultTimeout)}), + webrtc::IsRtcOk()); + ASSERT_GT(rport_->last_stun_buf().size(), 0u); + lconn->OnReadPacket( + ReceivedIpPacket(rport_->last_stun_buf(), SocketAddress(), std::nullopt)); EXPECT_TRUE(received_goog_delta_ack_error); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/pseudo_tcp.cc b/p2p/base/pseudo_tcp.cc index eff86e849e..6e2e1e5eac 100644 --- a/p2p/base/pseudo_tcp.cc +++ b/p2p/base/pseudo_tcp.cc @@ -19,12 +19,14 @@ #include #include +#include "api/array_view.h" #include "rtc_base/byte_buffer.h" #include "rtc_base/byte_order.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/socket.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" // The following logging is for detailed (packet-level) analysis only. @@ -33,7 +35,7 @@ #define _DBG_VERBOSE 2 #define _DEBUGMSG _DBG_NONE -namespace cricket { +namespace webrtc { ////////////////////////////////////////////////////////////////////// // Network Constants @@ -142,19 +144,19 @@ const uint32_t IDLE_TIMEOUT = 90 * 1000; // 90 seconds; ////////////////////////////////////////////////////////////////////// inline void long_to_bytes(uint32_t val, void* buf) { - *static_cast(buf) = rtc::HostToNetwork32(val); + *static_cast(buf) = webrtc::HostToNetwork32(val); } inline void short_to_bytes(uint16_t val, void* buf) { - *static_cast(buf) = rtc::HostToNetwork16(val); + *static_cast(buf) = webrtc::HostToNetwork16(val); } inline uint32_t bytes_to_long(const void* buf) { - return rtc::NetworkToHost32(*static_cast(buf)); + return webrtc::NetworkToHost32(*static_cast(buf)); } inline uint16_t bytes_to_short(const void* buf) { - return rtc::NetworkToHost16(*static_cast(buf)); + return webrtc::NetworkToHost16(*static_cast(buf)); } ////////////////////////////////////////////////////////////////////// @@ -201,9 +203,9 @@ void ReportStats() { uint32_t PseudoTcp::Now() { #if 0 // Use this to synchronize timers with logging timestamps (easier debug) - return static_cast(rtc::TimeSince(StartTime())); + return static_cast(webrtc::TimeSince(StartTime())); #else - return rtc::Time32(); + return webrtc::Time32(); #endif } @@ -286,7 +288,7 @@ void PseudoTcp::NotifyClock(uint32_t now) { return; // Check if it's time to retransmit a segment - if (m_rto_base && (rtc::TimeDiff32(m_rto_base + m_rx_rto, now) <= 0)) { + if (m_rto_base && (webrtc::TimeDiff32(m_rto_base + m_rx_rto, now) <= 0)) { if (m_slist.empty()) { RTC_DCHECK_NOTREACHED(); } else { @@ -317,8 +319,9 @@ void PseudoTcp::NotifyClock(uint32_t now) { } // Check if it's time to probe closed windows - if ((m_snd_wnd == 0) && (rtc::TimeDiff32(m_lastsend + m_rx_rto, now) <= 0)) { - if (rtc::TimeDiff32(now, m_lastrecv) >= 15000) { + if ((m_snd_wnd == 0) && + (webrtc::TimeDiff32(m_lastsend + m_rx_rto, now) <= 0)) { + if (webrtc::TimeDiff32(now, m_lastrecv) >= 15000) { closedown(ECONNABORTED); return; } @@ -332,7 +335,7 @@ void PseudoTcp::NotifyClock(uint32_t now) { } // Check if it's time to send delayed acks - if (m_t_ack && (rtc::TimeDiff32(m_t_ack + m_ack_delay, now) <= 0)) { + if (m_t_ack && (webrtc::TimeDiff32(m_t_ack + m_ack_delay, now) <= 0)) { packet(m_snd_nxt, 0, 0, 0); } @@ -609,22 +612,22 @@ bool PseudoTcp::clock_check(uint32_t now, long& nTimeout) { nTimeout = DEFAULT_TIMEOUT; if (m_t_ack) { - nTimeout = std::min(nTimeout, - rtc::TimeDiff32(m_t_ack + m_ack_delay, now)); + nTimeout = std::min( + nTimeout, webrtc::TimeDiff32(m_t_ack + m_ack_delay, now)); } if (m_rto_base) { - nTimeout = std::min(nTimeout, - rtc::TimeDiff32(m_rto_base + m_rx_rto, now)); + nTimeout = std::min( + nTimeout, webrtc::TimeDiff32(m_rto_base + m_rx_rto, now)); } if (m_snd_wnd == 0) { - nTimeout = std::min(nTimeout, - rtc::TimeDiff32(m_lastsend + m_rx_rto, now)); + nTimeout = std::min( + nTimeout, webrtc::TimeDiff32(m_lastsend + m_rx_rto, now)); } #if PSEUDO_KEEPALIVE if (m_state == TCP_ESTABLISHED) { nTimeout = std::min( nTimeout, - rtc::TimeDiff32( + webrtc::TimeDiff32( m_lasttraffic + (m_bOutgoing ? IDLE_PING * 3 / 2 : IDLE_PING), now)); } @@ -700,7 +703,7 @@ bool PseudoTcp::process(Segment& seg) { if ((seg.ack > m_snd_una) && (seg.ack <= m_snd_nxt)) { // Calculate round-trip time if (seg.tsecr) { - int32_t rtt = rtc::TimeDiff32(now, seg.tsecr); + int32_t rtt = webrtc::TimeDiff32(now, seg.tsecr); if (rtt >= 0) { if (m_rx_srtt == 0) { m_rx_srtt = rtt; @@ -713,8 +716,8 @@ bool PseudoTcp::process(Segment& seg) { m_rx_rttvar = (3 * m_rx_rttvar + abs_err) / 4; m_rx_srtt = (7 * m_rx_srtt + rtt) / 8; } - m_rx_rto = rtc::SafeClamp(m_rx_srtt + rtc::SafeMax(1, 4 * m_rx_rttvar), - MIN_RTO, MAX_RTO); + m_rx_rto = webrtc::SafeClamp( + m_rx_srtt + webrtc::SafeMax(1, 4 * m_rx_rttvar), MIN_RTO, MAX_RTO); #if _DEBUGMSG >= _DBG_VERBOSE RTC_LOG(LS_INFO) << "rtt: " << rtt << " srtt: " << m_rx_srtt << " rto: " << m_rx_rto; @@ -1046,7 +1049,7 @@ bool PseudoTcp::transmit(const SList::iterator& seg, uint32_t now) { void PseudoTcp::attemptSend(SendFlags sflags) { uint32_t now = Now(); - if (rtc::TimeDiff32(now, m_lastsend) > static_cast(m_rx_rto)) { + if (webrtc::TimeDiff32(now, m_lastsend) > static_cast(m_rx_rto)) { m_cwnd = m_mss; } @@ -1174,7 +1177,7 @@ void PseudoTcp::disableWindowScale() { } void PseudoTcp::queueConnectMessage() { - rtc::ByteBufferWriter buf; + ByteBufferWriter buf; buf.WriteUInt8(CTL_CONNECT); if (m_support_wnd_scale) { @@ -1183,7 +1186,8 @@ void PseudoTcp::queueConnectMessage() { buf.WriteUInt8(m_rwnd_scale); } m_snd_wnd = static_cast(buf.Length()); - queue(buf.Data(), static_cast(buf.Length()), true); + queue(reinterpret_cast(buf.Data()), + static_cast(buf.Length()), true); } void PseudoTcp::parseOptions(const char* data, uint32_t len) { @@ -1191,7 +1195,8 @@ void PseudoTcp::parseOptions(const char* data, uint32_t len) { // See http://www.freesoft.org/CIE/Course/Section4/8.htm for // parsing the options list. - rtc::ByteBufferReader buf(data, len); + ByteBufferReader buf( + MakeArrayView(reinterpret_cast(data), len)); while (buf.Length()) { uint8_t kind = TCP_OPT_EOL; buf.ReadUInt8(&kind); @@ -1211,7 +1216,7 @@ void PseudoTcp::parseOptions(const char* data, uint32_t len) { // Content of this option. if (opt_len <= buf.Length()) { - applyOption(kind, buf.Data(), opt_len); + applyOption(kind, reinterpret_cast(buf.Data()), opt_len); buf.Consume(opt_len); } else { RTC_LOG(LS_ERROR) << "Invalid option length received."; @@ -1293,12 +1298,12 @@ PseudoTcp::LockedFifoBuffer::LockedFifoBuffer(size_t size) PseudoTcp::LockedFifoBuffer::~LockedFifoBuffer() {} size_t PseudoTcp::LockedFifoBuffer::GetBuffered() const { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return data_length_; } bool PseudoTcp::LockedFifoBuffer::SetCapacity(size_t size) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); if (data_length_ > size) return false; @@ -1320,7 +1325,7 @@ bool PseudoTcp::LockedFifoBuffer::ReadOffset(void* buffer, size_t bytes, size_t offset, size_t* bytes_read) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return ReadOffsetLocked(buffer, bytes, offset, bytes_read); } @@ -1328,14 +1333,14 @@ bool PseudoTcp::LockedFifoBuffer::WriteOffset(const void* buffer, size_t bytes, size_t offset, size_t* bytes_written) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return WriteOffsetLocked(buffer, bytes, offset, bytes_written); } bool PseudoTcp::LockedFifoBuffer::Read(void* buffer, size_t bytes, size_t* bytes_read) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); size_t copy = 0; if (!ReadOffsetLocked(buffer, bytes, 0, ©)) return false; @@ -1353,7 +1358,7 @@ bool PseudoTcp::LockedFifoBuffer::Read(void* buffer, bool PseudoTcp::LockedFifoBuffer::Write(const void* buffer, size_t bytes, size_t* bytes_written) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); size_t copy = 0; if (!WriteOffsetLocked(buffer, bytes, 0, ©)) return false; @@ -1368,20 +1373,20 @@ bool PseudoTcp::LockedFifoBuffer::Write(const void* buffer, } void PseudoTcp::LockedFifoBuffer::ConsumeReadData(size_t size) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); RTC_DCHECK(size <= data_length_); read_position_ = (read_position_ + size) % buffer_length_; data_length_ -= size; } void PseudoTcp::LockedFifoBuffer::ConsumeWriteBuffer(size_t size) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); RTC_DCHECK(size <= buffer_length_ - data_length_); data_length_ += size; } bool PseudoTcp::LockedFifoBuffer::GetWriteRemaining(size_t* size) const { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); *size = buffer_length_ - data_length_; return true; } @@ -1429,4 +1434,4 @@ bool PseudoTcp::LockedFifoBuffer::WriteOffsetLocked(const void* buffer, return true; } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/pseudo_tcp.h b/p2p/base/pseudo_tcp.h index 65b54ba125..9ba5799b73 100644 --- a/p2p/base/pseudo_tcp.h +++ b/p2p/base/pseudo_tcp.h @@ -19,8 +19,9 @@ #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread_annotations.h" -namespace cricket { +namespace webrtc { ////////////////////////////////////////////////////////////////////// // IPseudoTcpNotify @@ -238,7 +239,7 @@ class RTC_EXPORT PseudoTcp { size_t data_length_ RTC_GUARDED_BY(mutex_); // offset to the readable data size_t read_position_ RTC_GUARDED_BY(mutex_); - mutable webrtc::Mutex mutex_; + mutable Mutex mutex_; }; IPseudoTcpNotify* m_notify; @@ -290,6 +291,15 @@ class RTC_EXPORT PseudoTcp { bool m_support_wnd_scale; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::IPseudoTcpNotify; +using ::webrtc::PseudoTcp; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_PSEUDO_TCP_H_ diff --git a/p2p/base/pseudo_tcp_unittest.cc b/p2p/base/pseudo_tcp_unittest.cc index 48df2b119a..1c3ae2118f 100644 --- a/p2p/base/pseudo_tcp_unittest.cc +++ b/p2p/base/pseudo_tcp_unittest.cc @@ -14,21 +14,28 @@ #include #include +#include #include #include #include +#include "api/array_view.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" +#include "api/test/rtc_error_matchers.h" #include "api/units/time_delta.h" -#include "rtc_base/gunit.h" -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/logging.h" #include "rtc_base/memory_stream.h" +#include "rtc_base/stream.h" +#include "rtc_base/thread.h" #include "rtc_base/time_utils.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" -using ::cricket::PseudoTcp; +using ::testing::IsTrue; +using ::webrtc::PseudoTcp; using ::webrtc::ScopedTaskSafety; using ::webrtc::TaskQueueBase; using ::webrtc::TimeDelta; @@ -37,18 +44,20 @@ static const int kConnectTimeoutMs = 10000; // ~3 * default RTO of 3000ms static const int kTransferTimeoutMs = 15000; static const int kBlockSize = 4096; -class PseudoTcpForTest : public cricket::PseudoTcp { +class PseudoTcpForTest : public webrtc::PseudoTcp { public: - PseudoTcpForTest(cricket::IPseudoTcpNotify* notify, uint32_t conv) - : PseudoTcp(notify, conv) {} + PseudoTcpForTest(webrtc::IPseudoTcpNotify* notify, uint32_t conv) + : webrtc::PseudoTcp(notify, conv) {} - bool isReceiveBufferFull() const { return PseudoTcp::isReceiveBufferFull(); } + bool isReceiveBufferFull() const { + return webrtc::PseudoTcp::isReceiveBufferFull(); + } - void disableWindowScale() { PseudoTcp::disableWindowScale(); } + void disableWindowScale() { webrtc::PseudoTcp::disableWindowScale(); } }; class PseudoTcpTestBase : public ::testing::Test, - public cricket::IPseudoTcpNotify { + public webrtc::IPseudoTcpNotify { public: PseudoTcpTestBase() : local_(this, 1), @@ -61,11 +70,11 @@ class PseudoTcpTestBase : public ::testing::Test, loss_(0) { // Set use of the test RNG to get predictable loss patterns. Otherwise, // this test would occasionally get really unlucky loss and time out. - rtc::SetRandomTestMode(true); + webrtc::SetRandomTestMode(true); } ~PseudoTcpTestBase() { // Put it back for the next test. - rtc::SetRandomTestMode(false); + webrtc::SetRandomTestMode(false); } // If true, both endpoints will send the "connect" segment simultaneously, // rather than `local_` sending it followed by a response from `remote_`. @@ -158,7 +167,7 @@ class PseudoTcpTestBase : public ::testing::Test, return WR_SUCCESS; } // Randomly drop the desired percentage of packets. - if (rtc::CreateRandomId() % 100 < static_cast(loss_)) { + if (webrtc::CreateRandomId() % 100 < static_cast(loss_)) { RTC_LOG(LS_VERBOSE) << "Randomly dropping packet, size=" << len; return WR_SUCCESS; } @@ -205,13 +214,13 @@ class PseudoTcpTestBase : public ::testing::Test, TimeDelta::Millis(interval)); } - rtc::AutoThread main_thread_; + webrtc::AutoThread main_thread_; PseudoTcpForTest local_; PseudoTcpForTest remote_; ScopedTaskSafety local_timer_; ScopedTaskSafety remote_timer_; - rtc::MemoryStream send_stream_; - rtc::MemoryStream recv_stream_; + webrtc::MemoryStream send_stream_; + webrtc::MemoryStream recv_stream_; bool have_connected_; bool have_disconnected_; int local_mtu_; @@ -235,19 +244,25 @@ class PseudoTcpTest : public PseudoTcpTestBase { uint8_t ch = static_cast(i); size_t written; int error; - send_stream_.Write(rtc::MakeArrayView(&ch, 1), written, error); + send_stream_.Write(webrtc::MakeArrayView(&ch, 1), written, error); } send_stream_.Rewind(); // Prepare the receive stream. recv_stream_.ReserveSize(size); // Connect and wait until connected. - start = rtc::Time32(); + start = webrtc::Time32(); EXPECT_EQ(0, Connect()); - EXPECT_TRUE_WAIT(have_connected_, kConnectTimeoutMs); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return have_connected_; }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kConnectTimeoutMs)}), + webrtc::IsRtcOk()); // Sending will start from OnTcpWriteable and complete when all data has // been received. - EXPECT_TRUE_WAIT(have_disconnected_, kTransferTimeoutMs); - elapsed = rtc::Time32() - start; + EXPECT_THAT(webrtc::WaitUntil( + [&] { return have_disconnected_; }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTransferTimeoutMs)}), + webrtc::IsRtcOk()); + elapsed = webrtc::Time32() - start; recv_stream_.GetSize(&received); // Ensure we closed down OK and we got the right data. // TODO(?): Ensure the errors are cleared properly. @@ -301,7 +316,7 @@ class PseudoTcpTest : public PseudoTcpTestBase { size_t written; int error; recv_stream_.Write( - rtc::MakeArrayView(reinterpret_cast(block), received), + webrtc::MakeArrayView(reinterpret_cast(block), received), written, error); recv_stream_.GetPosition(&position); RTC_LOG(LS_VERBOSE) << "Received: " << position; @@ -315,9 +330,9 @@ class PseudoTcpTest : public PseudoTcpTestBase { do { send_stream_.GetPosition(&position); int error; - if (send_stream_.Read( - rtc::MakeArrayView(reinterpret_cast(block), kBlockSize), - tosend, error) != rtc::SR_EOS) { + if (send_stream_.Read(webrtc::MakeArrayView( + reinterpret_cast(block), kBlockSize), + tosend, error) != webrtc::SR_EOS) { sent = local_.Send(block, tosend); UpdateLocalClock(); if (sent != -1) { @@ -335,8 +350,8 @@ class PseudoTcpTest : public PseudoTcpTestBase { } private: - rtc::MemoryStream send_stream_; - rtc::MemoryStream recv_stream_; + webrtc::MemoryStream send_stream_; + webrtc::MemoryStream recv_stream_; }; class PseudoTcpTestPingPong : public PseudoTcpTestBase { @@ -358,19 +373,25 @@ class PseudoTcpTestPingPong : public PseudoTcpTestBase { uint8_t ch = static_cast(i); size_t written; int error; - send_stream_.Write(rtc::MakeArrayView(&ch, 1), written, error); + send_stream_.Write(webrtc::MakeArrayView(&ch, 1), written, error); } send_stream_.Rewind(); // Prepare the receive stream. recv_stream_.ReserveSize(size); // Connect and wait until connected. - start = rtc::Time32(); + start = webrtc::Time32(); EXPECT_EQ(0, Connect()); - EXPECT_TRUE_WAIT(have_connected_, kConnectTimeoutMs); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return have_connected_; }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kConnectTimeoutMs)}), + webrtc::IsRtcOk()); // Sending will start from OnTcpWriteable and stop when the required // number of iterations have completed. - EXPECT_TRUE_WAIT(have_disconnected_, kTransferTimeoutMs); - elapsed = rtc::TimeSince(start); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return have_disconnected_; }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTransferTimeoutMs)}), + webrtc::IsRtcOk()); + elapsed = webrtc::TimeSince(start); RTC_LOG(LS_INFO) << "Performed " << iterations << " pings in " << elapsed << " ms"; } @@ -424,8 +445,8 @@ class PseudoTcpTestPingPong : public PseudoTcpTestBase { size_t written; int error; recv_stream_.Write( - rtc::MakeArrayView(reinterpret_cast(block), - received), + webrtc::MakeArrayView(reinterpret_cast(block), + received), written, error); recv_stream_.GetPosition(&position); RTC_LOG(LS_VERBOSE) << "Received: " << position; @@ -441,8 +462,8 @@ class PseudoTcpTestPingPong : public PseudoTcpTestBase { tosend = bytes_per_send_ ? bytes_per_send_ : sizeof(block); int error; if (send_stream_.Read( - rtc::MakeArrayView(reinterpret_cast(block), tosend), - tosend, error) != rtc::SR_EOS) { + webrtc::MakeArrayView(reinterpret_cast(block), tosend), + tosend, error) != webrtc::SR_EOS) { sent = sender_->Send(block, tosend); UpdateLocalClock(); if (sent != -1) { @@ -479,7 +500,7 @@ class PseudoTcpTestReceiveWindow : public PseudoTcpTestBase { uint8_t ch = static_cast(i); size_t written; int error; - send_stream_.Write(rtc::MakeArrayView(&ch, 1), written, error); + send_stream_.Write(webrtc::MakeArrayView(&ch, 1), written, error); } send_stream_.Rewind(); @@ -488,10 +509,16 @@ class PseudoTcpTestReceiveWindow : public PseudoTcpTestBase { // Connect and wait until connected. EXPECT_EQ(0, Connect()); - EXPECT_TRUE_WAIT(have_connected_, kConnectTimeoutMs); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return have_connected_; }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kConnectTimeoutMs)}), + webrtc::IsRtcOk()); TaskQueueBase::Current()->PostTask([this] { WriteData(); }); - EXPECT_TRUE_WAIT(have_disconnected_, kTransferTimeoutMs); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return have_disconnected_; }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTransferTimeoutMs)}), + webrtc::IsRtcOk()); ASSERT_EQ(2u, send_position_.size()); ASSERT_EQ(2u, recv_position_.size()); @@ -518,9 +545,9 @@ class PseudoTcpTestReceiveWindow : public PseudoTcpTestBase { private: // IPseudoTcpNotify interface - virtual void OnTcpReadable(PseudoTcp* tcp) {} + virtual void OnTcpReadable(PseudoTcp* /* tcp */) {} - virtual void OnTcpWriteable(PseudoTcp* tcp) {} + virtual void OnTcpWriteable(PseudoTcp* /* tcp */) {} void ReadUntilIOPending() { char block[kBlockSize]; @@ -533,7 +560,7 @@ class PseudoTcpTestReceiveWindow : public PseudoTcpTestBase { size_t written; int error; recv_stream_.Write( - rtc::MakeArrayView(reinterpret_cast(block), received), + webrtc::MakeArrayView(reinterpret_cast(block), received), written, error); recv_stream_.GetPosition(&position); RTC_LOG(LS_VERBOSE) << "Received: " << position; @@ -560,9 +587,9 @@ class PseudoTcpTestReceiveWindow : public PseudoTcpTestBase { send_stream_.GetPosition(&position); int error; if (send_stream_.Read( - rtc::MakeArrayView(reinterpret_cast(block), - sizeof(block)), - tosend, error) != rtc::SR_EOS) { + webrtc::MakeArrayView(reinterpret_cast(block), + sizeof(block)), + tosend, error) != webrtc::SR_EOS) { sent = local_.Send(block, tosend); UpdateLocalClock(); if (sent != -1) { @@ -581,8 +608,8 @@ class PseudoTcpTestReceiveWindow : public PseudoTcpTestBase { if (packets_in_flight_ > 0) { // If there are packet tasks, attempt to continue sending after giving // those packets time to process, which should free up the send buffer. - rtc::Thread::Current()->PostDelayedTask([this] { WriteData(); }, - TimeDelta::Millis(10)); + webrtc::Thread::Current()->PostDelayedTask([this] { WriteData(); }, + TimeDelta::Millis(10)); } else { if (!remote_.isReceiveBufferFull()) { RTC_LOG(LS_ERROR) << "This shouldn't happen - the send buffer is full, " @@ -598,8 +625,8 @@ class PseudoTcpTestReceiveWindow : public PseudoTcpTestBase { } private: - rtc::MemoryStream send_stream_; - rtc::MemoryStream recv_stream_; + webrtc::MemoryStream send_stream_; + webrtc::MemoryStream recv_stream_; std::vector send_position_; std::vector recv_position_; diff --git a/p2p/base/regathering_controller.cc b/p2p/base/regathering_controller.cc index 572c2a616f..57575e2948 100644 --- a/p2p/base/regathering_controller.cc +++ b/p2p/base/regathering_controller.cc @@ -10,15 +10,19 @@ #include "p2p/base/regathering_controller.h" +#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/units/time_delta.h" +#include "p2p/base/ice_transport_internal.h" +#include "rtc_base/checks.h" +#include "rtc_base/thread.h" namespace webrtc { BasicRegatheringController::BasicRegatheringController( const Config& config, - cricket::IceTransportInternal* ice_transport, - rtc::Thread* thread) + IceTransportInternal* ice_transport, + Thread* thread) : config_(config), ice_transport_(ice_transport), thread_(thread) { RTC_DCHECK(thread_); RTC_DCHECK_RUN_ON(thread_); diff --git a/p2p/base/regathering_controller.h b/p2p/base/regathering_controller.h index a0dfb8053d..8a138ecd45 100644 --- a/p2p/base/regathering_controller.h +++ b/p2p/base/regathering_controller.h @@ -12,10 +12,15 @@ #define P2P_BASE_REGATHERING_CONTROLLER_H_ #include +#include #include "api/task_queue/pending_task_safety_flag.h" #include "p2p/base/ice_transport_internal.h" +#include "p2p/base/p2p_constants.h" +#include "p2p/base/packet_transport_internal.h" #include "p2p/base/port_allocator.h" +#include "rtc_base/network_route.h" +#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" namespace webrtc { @@ -45,19 +50,19 @@ class BasicRegatheringController : public sigslot::has_slots<> { public: struct Config { int regather_on_failed_networks_interval = - cricket::REGATHER_ON_FAILED_NETWORKS_INTERVAL; + REGATHER_ON_FAILED_NETWORKS_INTERVAL; }; BasicRegatheringController() = delete; BasicRegatheringController(const Config& config, - cricket::IceTransportInternal* ice_transport, - rtc::Thread* thread); + IceTransportInternal* ice_transport, + Thread* thread); ~BasicRegatheringController() override; // TODO(qingsi): Remove this method after implementing a new signal in // P2PTransportChannel and reacting to that signal for the initial schedules // of regathering. void Start(); - void set_allocator_session(cricket::PortAllocatorSession* allocator_session) { + void set_allocator_session(PortAllocatorSession* allocator_session) { allocator_session_ = allocator_session; } // Setting a different config of the regathering interval range on all @@ -71,10 +76,10 @@ class BasicRegatheringController : public sigslot::has_slots<> { // TODO(qingsi): Implement the following methods and use methods from the ICE // transport like GetStats to get additional information for the decision // making in regathering. - void OnIceTransportStateChanged(cricket::IceTransportInternal*) {} - void OnIceTransportWritableState(rtc::PacketTransportInternal*) {} - void OnIceTransportReceivingState(rtc::PacketTransportInternal*) {} - void OnIceTransportNetworkRouteChanged(absl::optional) {} + void OnIceTransportStateChanged(IceTransportInternal*) {} + void OnIceTransportWritableState(PacketTransportInternal*) {} + void OnIceTransportReceivingState(PacketTransportInternal*) {} + void OnIceTransportNetworkRouteChanged(std::optional) {} // Schedules delayed and repeated regathering of local candidates on failed // networks, where the delay in milliseconds is given by the config. Each // repetition is separated by the same delay. When scheduled, all previous @@ -87,9 +92,9 @@ class BasicRegatheringController : public sigslot::has_slots<> { // the object goes out of scope or the config changes. std::unique_ptr pending_regathering_; Config config_; - cricket::IceTransportInternal* ice_transport_; - cricket::PortAllocatorSession* allocator_session_ = nullptr; - rtc::Thread* const thread_; + IceTransportInternal* ice_transport_; + PortAllocatorSession* allocator_session_ = nullptr; + Thread* const thread_; }; } // namespace webrtc diff --git a/p2p/base/regathering_controller_unittest.cc b/p2p/base/regathering_controller_unittest.cc index 91b7270f77..9b1c216db4 100644 --- a/p2p/base/regathering_controller_unittest.cc +++ b/p2p/base/regathering_controller_unittest.cc @@ -12,35 +12,39 @@ #include #include -#include #include -#include "api/scoped_refptr.h" -#include "p2p/base/fake_port_allocator.h" -#include "p2p/base/mock_ice_transport.h" +#include "api/environment/environment_factory.h" +#include "api/transport/enums.h" +#include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" -#include "p2p/base/stun_server.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/port_interface.h" +#include "p2p/test/fake_port_allocator.h" +#include "p2p/test/mock_ice_transport.h" +#include "p2p/test/stun_server.h" +#include "rtc_base/fake_clock.h" #include "rtc_base/gunit.h" #include "rtc_base/socket_address.h" +#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/virtual_socket_server.h" -#include "test/scoped_key_value_config.h" +#include "test/gtest.h" namespace { -const int kOnlyLocalPorts = cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_RELAY | - cricket::PORTALLOCATOR_DISABLE_TCP; +const int kOnlyLocalPorts = webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_DISABLE_TCP; // The address of the public STUN server. -const rtc::SocketAddress kStunAddr("99.99.99.1", cricket::STUN_SERVER_PORT); +const webrtc::SocketAddress kStunAddr("99.99.99.1", webrtc::STUN_SERVER_PORT); // The addresses for the public TURN server. -const rtc::SocketAddress kTurnUdpIntAddr("99.99.99.3", - cricket::STUN_SERVER_PORT); -const cricket::RelayCredentials kRelayCredentials("test", "test"); +const webrtc::SocketAddress kTurnUdpIntAddr("99.99.99.3", + webrtc::STUN_SERVER_PORT); +const webrtc::RelayCredentials kRelayCredentials("test", "test"); const char kIceUfrag[] = "UF00"; const char kIcePwd[] = "TESTICEPWD00000000000000"; -constexpr uint64_t kTiebreakerDefault = 44444; } // namespace @@ -50,36 +54,30 @@ class RegatheringControllerTest : public ::testing::Test, public sigslot::has_slots<> { public: RegatheringControllerTest() - : vss_(std::make_unique()), + : vss_(std::make_unique()), thread_(vss_.get()), - ice_transport_(std::make_unique()), - packet_socket_factory_( - std::make_unique(vss_.get())), - allocator_(std::make_unique( - rtc::Thread::Current(), - packet_socket_factory_.get(), - &field_trials_)) { - allocator_->SetIceTiebreaker(kTiebreakerDefault); + ice_transport_(std::make_unique()), + allocator_(std::make_unique(CreateEnvironment(), + vss_.get())) { BasicRegatheringController::Config regathering_config; regathering_config.regather_on_failed_networks_interval = 0; regathering_controller_.reset(new BasicRegatheringController( - regathering_config, ice_transport_.get(), rtc::Thread::Current())); + regathering_config, ice_transport_.get(), Thread::Current())); } // Initializes the allocator and gathers candidates once by StartGettingPorts. void InitializeAndGatherOnce() { - cricket::ServerAddresses stun_servers; + ServerAddresses stun_servers; stun_servers.insert(kStunAddr); - cricket::RelayServerConfig turn_server; + RelayServerConfig turn_server; turn_server.credentials = kRelayCredentials; - turn_server.ports.push_back( - cricket::ProtocolAddress(kTurnUdpIntAddr, cricket::PROTO_UDP)); - std::vector turn_servers(1, turn_server); + turn_server.ports.push_back(ProtocolAddress(kTurnUdpIntAddr, PROTO_UDP)); + std::vector turn_servers(1, turn_server); allocator_->set_flags(kOnlyLocalPorts); allocator_->SetConfiguration(stun_servers, turn_servers, 0 /* pool size */, webrtc::NO_PRUNE); allocator_session_ = allocator_->CreateSession( - "test", cricket::ICE_CANDIDATE_COMPONENT_RTP, kIceUfrag, kIcePwd); + "test", ICE_CANDIDATE_COMPONENT_RTP, kIceUfrag, kIcePwd); // The gathering will take place on the current thread and the following // call of StartGettingPorts is blocking. We will not ClearGettingPorts // prematurely. @@ -97,12 +95,12 @@ class RegatheringControllerTest : public ::testing::Test, allocator_session_->ClearGettingPorts(); } - void OnIceRegathering(cricket::PortAllocatorSession* allocator_session, - cricket::IceRegatheringReason reason) { + void OnIceRegathering(PortAllocatorSession* /* allocator_session */, + IceRegatheringReason reason) { ++count_[reason]; } - int GetRegatheringReasonCount(cricket::IceRegatheringReason reason) { + int GetRegatheringReasonCount(IceRegatheringReason reason) { return count_[reason]; } @@ -111,15 +109,13 @@ class RegatheringControllerTest : public ::testing::Test, } private: - webrtc::test::ScopedKeyValueConfig field_trials_; - std::unique_ptr vss_; - rtc::AutoSocketServerThread thread_; - std::unique_ptr ice_transport_; + std::unique_ptr vss_; + AutoSocketServerThread thread_; + std::unique_ptr ice_transport_; std::unique_ptr regathering_controller_; - std::unique_ptr packet_socket_factory_; - std::unique_ptr allocator_; - std::unique_ptr allocator_session_; - std::map count_; + std::unique_ptr allocator_; + std::unique_ptr allocator_session_; + std::map count_; }; // Tests that ICE regathering occurs only if the port allocator session is @@ -127,7 +123,7 @@ class RegatheringControllerTest : public ::testing::Test, // still in progress or the continual gathering is not enabled. TEST_F(RegatheringControllerTest, IceRegatheringDoesNotOccurIfSessionNotCleared) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; InitializeAndGatherOnce(); // Session not cleared. BasicRegatheringController::Config config; @@ -136,12 +132,12 @@ TEST_F(RegatheringControllerTest, regathering_controller()->Start(); SIMULATED_WAIT(false, 10000, clock); // Expect no regathering in the last 10s. - EXPECT_EQ(0, GetRegatheringReasonCount( - cricket::IceRegatheringReason::NETWORK_FAILURE)); + EXPECT_EQ(0, + GetRegatheringReasonCount(IceRegatheringReason::NETWORK_FAILURE)); } TEST_F(RegatheringControllerTest, IceRegatheringRepeatsAsScheduled) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; InitializeAndGatherOnceWithSessionCleared(); BasicRegatheringController::Config config; @@ -150,24 +146,24 @@ TEST_F(RegatheringControllerTest, IceRegatheringRepeatsAsScheduled) { regathering_controller()->Start(); SIMULATED_WAIT(false, 2000 - 1, clock); // Expect no regathering. - EXPECT_EQ(0, GetRegatheringReasonCount( - cricket::IceRegatheringReason::NETWORK_FAILURE)); + EXPECT_EQ(0, + GetRegatheringReasonCount(IceRegatheringReason::NETWORK_FAILURE)); SIMULATED_WAIT(false, 2, clock); // Expect regathering on all networks and on failed networks to happen once // respectively in that last 2s with 2s interval. - EXPECT_EQ(1, GetRegatheringReasonCount( - cricket::IceRegatheringReason::NETWORK_FAILURE)); + EXPECT_EQ(1, + GetRegatheringReasonCount(IceRegatheringReason::NETWORK_FAILURE)); SIMULATED_WAIT(false, 11000, clock); // Expect regathering to happen for another 5 times in 11s with 2s interval. - EXPECT_EQ(6, GetRegatheringReasonCount( - cricket::IceRegatheringReason::NETWORK_FAILURE)); + EXPECT_EQ(6, + GetRegatheringReasonCount(IceRegatheringReason::NETWORK_FAILURE)); } // Tests that the schedule of ICE regathering on failed networks can be canceled // and replaced by a new recurring schedule. TEST_F(RegatheringControllerTest, ScheduleOfIceRegatheringOnFailedNetworksCanBeReplaced) { - rtc::ScopedFakeClock clock; + ScopedFakeClock clock; InitializeAndGatherOnceWithSessionCleared(); BasicRegatheringController::Config config; @@ -178,12 +174,12 @@ TEST_F(RegatheringControllerTest, regathering_controller()->SetConfig(config); SIMULATED_WAIT(false, 3000, clock); // Expect no regathering from the previous schedule. - EXPECT_EQ(0, GetRegatheringReasonCount( - cricket::IceRegatheringReason::NETWORK_FAILURE)); + EXPECT_EQ(0, + GetRegatheringReasonCount(IceRegatheringReason::NETWORK_FAILURE)); SIMULATED_WAIT(false, 11000 - 3000, clock); // Expect regathering to happen twice in the last 11s with 5s interval. - EXPECT_EQ(2, GetRegatheringReasonCount( - cricket::IceRegatheringReason::NETWORK_FAILURE)); + EXPECT_EQ(2, + GetRegatheringReasonCount(IceRegatheringReason::NETWORK_FAILURE)); } } // namespace webrtc diff --git a/p2p/base/stun_dictionary.cc b/p2p/base/stun_dictionary.cc index bf6a1e49c2..dce69ca493 100644 --- a/p2p/base/stun_dictionary.cc +++ b/p2p/base/stun_dictionary.cc @@ -11,12 +11,45 @@ #include "p2p/base/stun_dictionary.h" #include +#include +#include #include +#include +#include #include +#include +#include "api/rtc_error.h" +#include "api/transport/stun.h" +#include "rtc_base/byte_buffer.h" #include "rtc_base/logging.h" -namespace cricket { +namespace webrtc { + +namespace { + +StunAttributeValueType GetStunAttributeValueType(int value_type) { + switch (value_type) { + case STUN_VALUE_ADDRESS: + return STUN_VALUE_ADDRESS; + case STUN_VALUE_XOR_ADDRESS: + return STUN_VALUE_XOR_ADDRESS; + case STUN_VALUE_UINT32: + return STUN_VALUE_UINT32; + case STUN_VALUE_UINT64: + return STUN_VALUE_UINT64; + case STUN_VALUE_BYTE_STRING: + return STUN_VALUE_BYTE_STRING; + case STUN_VALUE_ERROR_CODE: + return STUN_VALUE_ERROR_CODE; + case STUN_VALUE_UINT16_LIST: + return STUN_VALUE_UINT16_LIST; + default: + return STUN_VALUE_UNKNOWN; + } +} + +} // namespace const StunAddressAttribute* StunDictionaryView::GetAddress(int key) const { const StunAttribute* attr = GetOrNull(key, STUN_VALUE_ADDRESS); @@ -62,7 +95,7 @@ const StunUInt16ListAttribute* StunDictionaryView::GetUInt16List( const StunAttribute* StunDictionaryView::GetOrNull( int key, - absl::optional type) const { + std::optional type) const { const auto it = attrs_.find(key); if (it == attrs_.end()) { return nullptr; @@ -77,29 +110,26 @@ const StunAttribute* StunDictionaryView::GetOrNull( return (*it).second.get(); } -webrtc::RTCErrorOr< - std::pair>>> +RTCErrorOr>>> StunDictionaryView::ParseDelta(const StunByteStringAttribute& delta) { - rtc::ByteBufferReader buf(delta.bytes(), delta.length()); + ByteBufferReader buf(delta.array_view()); uint16_t magic; if (!buf.ReadUInt16(&magic)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to read magic number"); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Failed to read magic number"); } if (magic != kDeltaMagic) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Invalid magic number"); + return RTCError(RTCErrorType::INVALID_PARAMETER, "Invalid magic number"); } uint16_t delta_version; if (!buf.ReadUInt16(&delta_version)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to read version"); + return RTCError(RTCErrorType::INVALID_PARAMETER, "Failed to read version"); } if (delta_version != kDeltaVersion) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Unsupported delta version"); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Unsupported delta version"); } // Now read all the attributes @@ -107,47 +137,45 @@ StunDictionaryView::ParseDelta(const StunByteStringAttribute& delta) { while (buf.Length()) { uint16_t key, length, value_type; if (!buf.ReadUInt16(&key)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to read attribute key"); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Failed to read attribute key"); } if (!buf.ReadUInt16(&length)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to read attribute length"); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Failed to read attribute length"); } if (!buf.ReadUInt16(&value_type)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to read value type"); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Failed to read value type"); } StunAttributeValueType value_type_enum = - static_cast(value_type); + GetStunAttributeValueType(value_type); std::unique_ptr attr( StunAttribute::Create(value_type_enum, key, length, nullptr)); if (!attr) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to create attribute"); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Failed to create attribute"); } if (attr->length() != length) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Inconsistent attribute length"); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Inconsistent attribute length"); } if (!attr->Read(&buf)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to read attribute content"); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Failed to read attribute content"); } attrs.push_back(std::move(attr)); } // The first attribute should be the version... if (attrs.empty()) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Empty delta!"); + return RTCError(RTCErrorType::INVALID_PARAMETER, "Empty delta!"); } if (attrs[0]->type() != kVersionKey || attrs[0]->value_type() != STUN_VALUE_UINT64) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Missing version!"); + return RTCError(RTCErrorType::INVALID_PARAMETER, "Missing version!"); } uint64_t version_in_delta = @@ -159,12 +187,12 @@ StunDictionaryView::ParseDelta(const StunByteStringAttribute& delta) { } // Apply a delta return an StunUInt64Attribute to ack the update. -webrtc::RTCErrorOr< +RTCErrorOr< std::pair, std::vector>> StunDictionaryView::ApplyDelta(const StunByteStringAttribute& delta) { auto parsed_delta = ParseDelta(delta); if (!parsed_delta.ok()) { - return webrtc::RTCError(parsed_delta.error()); + return RTCError(parsed_delta.error()); } uint64_t version_in_delta = parsed_delta.value().first; @@ -188,7 +216,7 @@ StunDictionaryView::ApplyDelta(const StunByteStringAttribute& delta) { << " new_length: " << new_length << " bytes_stored_: " << bytes_stored_ << " new_bytes_stored: " << new_bytes_stored; - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER); + return RTCError(RTCErrorType::INVALID_PARAMETER); } if (new_bytes_stored > max_bytes_stored_) { RTC_LOG(LS_INFO) << "attr: " << attr->type() @@ -202,7 +230,7 @@ StunDictionaryView::ApplyDelta(const StunByteStringAttribute& delta) { if (new_bytes_stored > max_bytes_stored_) { RTC_LOG(LS_INFO) << " bytes_stored_: " << bytes_stored_ << " new_bytes_stored: " << new_bytes_stored; - return webrtc::RTCError(webrtc::RTCErrorType::RESOURCE_EXHAUSTED); + return RTCError(RTCErrorType::RESOURCE_EXHAUSTED); } // Apply the update. @@ -214,7 +242,8 @@ StunDictionaryView::ApplyDelta(const StunByteStringAttribute& delta) { if (attr->value_type() == STUN_VALUE_BYTE_STRING && attr->length() == 0) { attrs_.erase(attr->type()); } else { - attrs_[attr->type()] = std::move(attr); + int attribute_type = attr->type(); + attrs_[attribute_type] = std::move(attr); } } } @@ -302,7 +331,7 @@ std::unique_ptr StunDictionaryWriter::CreateDelta() { return nullptr; } - rtc::ByteBufferWriter buf; + ByteBufferWriter buf; buf.WriteUInt16(StunDictionaryView::kDeltaMagic); // 0,1 buf.WriteUInt16(StunDictionaryView::kDeltaVersion); // 2,3 @@ -354,4 +383,4 @@ int StunDictionaryWriter::Pending() const { return pending_.size(); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/stun_dictionary.h b/p2p/base/stun_dictionary.h index f93a1f151f..a16d7a9b26 100644 --- a/p2p/base/stun_dictionary.h +++ b/p2p/base/stun_dictionary.h @@ -11,16 +11,19 @@ #ifndef P2P_BASE_STUN_DICTIONARY_H_ #define P2P_BASE_STUN_DICTIONARY_H_ +#include +#include #include #include #include +#include #include #include #include "api/rtc_error.h" #include "api/transport/stun.h" -namespace cricket { +namespace webrtc { // A StunDictionaryView is a dictionary of StunAttributes. // - the StunAttributes can be read using the |Get|-methods. @@ -72,7 +75,7 @@ class StunDictionaryView { // a pair with // - StunUInt64Attribute to ack the |delta|. // - vector of keys that was modified. - webrtc::RTCErrorOr< + RTCErrorOr< std::pair, std::vector>> ApplyDelta(const StunByteStringAttribute& delta); @@ -81,9 +84,9 @@ class StunDictionaryView { const StunAttribute* GetOrNull( int key, - absl::optional = absl::nullopt) const; + std::optional = std::nullopt) const; size_t GetLength(int key) const; - static webrtc::RTCErrorOr< + static RTCErrorOr< std::pair>>> ParseDelta(const StunByteStringAttribute& delta); @@ -199,6 +202,15 @@ class StunDictionaryWriter { std::map> tombstones_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::StunDictionaryView; +using ::webrtc::StunDictionaryWriter; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_STUN_DICTIONARY_H_ diff --git a/p2p/base/stun_dictionary_unittest.cc b/p2p/base/stun_dictionary_unittest.cc index b6af420d78..0e292f85ea 100644 --- a/p2p/base/stun_dictionary_unittest.cc +++ b/p2p/base/stun_dictionary_unittest.cc @@ -10,16 +10,22 @@ #include "p2p/base/stun_dictionary.h" +#include +#include +#include +#include #include +#include -#include "rtc_base/gunit.h" +#include "api/transport/stun.h" #include "rtc_base/logging.h" +#include "rtc_base/socket_address.h" #include "test/gtest.h" namespace { -void Sync(cricket::StunDictionaryView& dictionary, - cricket::StunDictionaryWriter& writer) { +void Sync(webrtc::StunDictionaryView& dictionary, + webrtc::StunDictionaryWriter& writer) { int pending = writer.Pending(); auto delta = writer.CreateDelta(); if (delta == nullptr) { @@ -37,18 +43,18 @@ void Sync(cricket::StunDictionaryView& dictionary, } } -void XorToggle(cricket::StunByteStringAttribute& attr, size_t byte) { +void XorToggle(webrtc::StunByteStringAttribute& attr, size_t byte) { ASSERT_TRUE(attr.length() > byte); uint8_t val = attr.GetByte(byte); uint8_t new_val = val ^ (128 - (byte & 255)); attr.SetByte(byte, new_val); } -std::unique_ptr Crop( - const cricket::StunByteStringAttribute& attr, +std::unique_ptr Crop( + const webrtc::StunByteStringAttribute& attr, int new_length) { auto new_attr = - std::make_unique(attr.type()); + std::make_unique(attr.type()); std::string content = std::string(attr.string_view()); content.erase(new_length); new_attr->CopyBytes(content); @@ -57,7 +63,7 @@ std::unique_ptr Crop( } // namespace -namespace cricket { +namespace webrtc { constexpr int kKey1 = 100; @@ -279,7 +285,7 @@ TEST(StunDictionary, DataTypes) { StunDictionaryWriter writer; StunDictionaryView dictionary; - rtc::SocketAddress addr("127.0.0.1", 8080); + SocketAddress addr("127.0.0.1", 8080); writer.SetUInt32(kKey1)->SetValue(27); writer.SetUInt64(kKey1 + 1)->SetValue(28); @@ -299,7 +305,7 @@ TEST(StunDictionary, ParseError) { StunDictionaryWriter writer; StunDictionaryView dictionary; - rtc::SocketAddress addr("127.0.0.1", 8080); + SocketAddress addr("127.0.0.1", 8080); writer.SetUInt32(kKey1)->SetValue(27); writer.SetUInt64(kKey1 + 1)->SetValue(28); @@ -334,4 +340,4 @@ TEST(StunDictionary, ParseError) { } } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/stun_port.cc b/p2p/base/stun_port.cc index 82832d8062..63105bec4b 100644 --- a/p2p/base/stun_port.cc +++ b/p2p/base/stun_port.cc @@ -10,24 +10,41 @@ #include "p2p/base/stun_port.h" +#include +#include +#include +#include +#include #include #include #include "absl/memory/memory.h" #include "absl/strings/string_view.h" +#include "api/async_dns_resolver.h" +#include "api/candidate.h" +#include "api/field_trials_view.h" +#include "api/packet_socket_factory.h" #include "api/transport/stun.h" #include "p2p/base/connection.h" #include "p2p/base/p2p_constants.h" -#include "p2p/base/port_allocator.h" -#include "rtc_base/async_resolver_interface.h" +#include "p2p/base/port.h" +#include "p2p/base/port_interface.h" +#include "p2p/base/stun_request.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/helpers.h" +#include "rtc_base/dscp.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" +#include "rtc_base/net_helper.h" +#include "rtc_base/network.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/time_utils.h" -namespace cricket { +namespace webrtc { // TODO(?): Move these to a common place (used in relayport too) const int RETRY_TIMEOUT = 50 * 1000; // 50 seconds @@ -40,15 +57,17 @@ const int kSendErrorLogLimit = 5; class StunBindingRequest : public StunRequest { public: StunBindingRequest(UDPPort* port, - const rtc::SocketAddress& addr, + const SocketAddress& addr, int64_t start_time) : StunRequest(port->request_manager(), std::make_unique(STUN_BINDING_REQUEST)), port_(port), server_addr_(addr), - start_time_(start_time) {} + start_time_(start_time) { + SetAuthenticationRequired(false); + } - const rtc::SocketAddress& server_addr() const { return server_addr_; } + const SocketAddress& server_addr() const { return server_addr_; } void OnResponse(StunMessage* response) override { const StunAddressAttribute* addr_attr = @@ -59,12 +78,12 @@ class StunBindingRequest : public StunRequest { addr_attr->family() != STUN_ADDRESS_IPV6) { RTC_LOG(LS_ERROR) << "Binding address has bad family"; } else { - rtc::SocketAddress addr(addr_attr->ipaddr(), addr_attr->port()); + SocketAddress addr(addr_attr->ipaddr(), addr_attr->port()); port_->OnStunBindingRequestSucceeded(this->Elapsed(), server_addr_, addr); } // The keep-alive requests will be stopped after its lifetime has passed. - if (WithinLifetime(rtc::TimeMillis())) { + if (WithinLifetime(webrtc::TimeMillis())) { port_->request_manager_.SendDelayed( new StunBindingRequest(port_, server_addr_, start_time_), port_->stun_keepalive_delay()); @@ -87,9 +106,9 @@ class StunBindingRequest : public StunRequest { attr ? attr->reason() : "STUN binding response with no error code attribute."); - int64_t now = rtc::TimeMillis(); + int64_t now = webrtc::TimeMillis(); if (WithinLifetime(now) && - rtc::TimeDiff(now, start_time_) < RETRY_TIMEOUT) { + webrtc::TimeDiff(now, start_time_) < RETRY_TIMEOUT) { port_->request_manager_.SendDelayed( new StunBindingRequest(port_, server_addr_, start_time_), port_->stun_keepalive_delay()); @@ -100,7 +119,7 @@ class StunBindingRequest : public StunRequest { << port_->GetLocalAddress().ToSensitiveString() << " (" << port_->Network()->name() << ")"; port_->OnStunBindingOrResolveRequestFailed( - server_addr_, SERVER_NOT_REACHABLE_ERROR, + server_addr_, STUN_ERROR_SERVER_NOT_REACHABLE, "STUN binding request timed out."); } @@ -109,32 +128,31 @@ class StunBindingRequest : public StunRequest { // lifetime means infinite). bool WithinLifetime(int64_t now) const { int lifetime = port_->stun_keepalive_lifetime(); - return lifetime < 0 || rtc::TimeDiff(now, start_time_) <= lifetime; + return lifetime < 0 || webrtc::TimeDiff(now, start_time_) <= lifetime; } UDPPort* port_; - const rtc::SocketAddress server_addr_; + const SocketAddress server_addr_; int64_t start_time_; }; UDPPort::AddressResolver::AddressResolver( - rtc::PacketSocketFactory* factory, - std::function done_callback) + PacketSocketFactory* factory, + std::function done_callback) : socket_factory_(factory), done_(std::move(done_callback)) {} void UDPPort::AddressResolver::Resolve( - const rtc::SocketAddress& address, + const SocketAddress& address, int family, - const webrtc::FieldTrialsView& field_trials) { + const FieldTrialsView& /* field_trials */) { if (resolvers_.find(address) != resolvers_.end()) return; auto resolver = socket_factory_->CreateAsyncDnsResolver(); auto resolver_ptr = resolver.get(); - std::pair> - pair = std::make_pair(address, std::move(resolver)); + std::pair> pair = + std::make_pair(address, std::move(resolver)); resolvers_.insert(std::move(pair)); auto callback = [this, address] { @@ -146,10 +164,9 @@ void UDPPort::AddressResolver::Resolve( resolver_ptr->Start(address, family, std::move(callback)); } -bool UDPPort::AddressResolver::GetResolvedAddress( - const rtc::SocketAddress& input, - int family, - rtc::SocketAddress* output) const { +bool UDPPort::AddressResolver::GetResolvedAddress(const SocketAddress& input, + int family, + SocketAddress* output) const { ResolverMap::const_iterator it = resolvers_.find(input); if (it == resolvers_.end()) return false; @@ -157,23 +174,13 @@ bool UDPPort::AddressResolver::GetResolvedAddress( return it->second->result().GetResolvedAddress(family, output); } -UDPPort::UDPPort(rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, - rtc::AsyncPacketSocket* socket, - absl::string_view username, - absl::string_view password, - bool emit_local_for_anyaddress, - const webrtc::FieldTrialsView* field_trials) - : Port(thread, - LOCAL_PORT_TYPE, - factory, - network, - username, - password, - field_trials), +UDPPort::UDPPort(const PortParametersRef& args, + IceCandidateType type, + AsyncPacketSocket* socket, + bool emit_local_for_anyaddress) + : Port(args, type), request_manager_( - thread, + args.network_thread, [this](const void* data, size_t size, StunRequest* request) { OnSendPacket(data, size, request); }), @@ -181,29 +188,17 @@ UDPPort::UDPPort(rtc::Thread* thread, error_(0), ready_(false), stun_keepalive_delay_(STUN_KEEPALIVE_INTERVAL), - dscp_(rtc::DSCP_NO_CHANGE), + dscp_(webrtc::DSCP_NO_CHANGE), emit_local_for_anyaddress_(emit_local_for_anyaddress) {} -UDPPort::UDPPort(rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, +UDPPort::UDPPort(const PortParametersRef& args, + IceCandidateType type, uint16_t min_port, uint16_t max_port, - absl::string_view username, - absl::string_view password, - bool emit_local_for_anyaddress, - const webrtc::FieldTrialsView* field_trials) - : Port(thread, - LOCAL_PORT_TYPE, - factory, - network, - min_port, - max_port, - username, - password, - field_trials), + bool emit_local_for_anyaddress) + : Port(args, type, min_port, max_port), request_manager_( - thread, + args.network_thread, [this](const void* data, size_t size, StunRequest* request) { OnSendPacket(data, size, request); }), @@ -211,7 +206,7 @@ UDPPort::UDPPort(rtc::Thread* thread, error_(0), ready_(false), stun_keepalive_delay_(STUN_KEEPALIVE_INTERVAL), - dscp_(rtc::DSCP_NO_CHANGE), + dscp_(webrtc::DSCP_NO_CHANGE), emit_local_for_anyaddress_(emit_local_for_anyaddress) {} bool UDPPort::Init() { @@ -219,12 +214,15 @@ bool UDPPort::Init() { if (!SharedSocket()) { RTC_DCHECK(socket_ == nullptr); socket_ = socket_factory()->CreateUdpSocket( - rtc::SocketAddress(Network()->GetBestIP(), 0), min_port(), max_port()); + SocketAddress(Network()->GetBestIP(), 0), min_port(), max_port()); if (!socket_) { RTC_LOG(LS_WARNING) << ToString() << ": UDP socket creation failed"; return false; } - socket_->SignalReadPacket.connect(this, &UDPPort::OnReadPacket); + socket_->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + OnReadPacket(socket, packet); + }); } socket_->SignalSentPacket.connect(this, &UDPPort::OnSentPacket); socket_->SignalReadyToSend.connect(this, &UDPPort::OnReadyToSend); @@ -239,7 +237,7 @@ UDPPort::~UDPPort() { void UDPPort::PrepareAddress() { RTC_DCHECK(request_manager_.empty()); - if (socket_->GetState() == rtc::AsyncPacketSocket::STATE_BOUND) { + if (socket_->GetState() == AsyncPacketSocket::STATE_BOUND) { OnLocalAddressReady(socket_, socket_->GetLocalAddress()); } } @@ -256,7 +254,7 @@ void UDPPort::MaybePrepareStunCandidate() { } Connection* UDPPort::CreateConnection(const Candidate& address, - CandidateOrigin origin) { + CandidateOrigin /* origin */) { if (!SupportsProtocol(address.protocol())) { return nullptr; } @@ -284,7 +282,7 @@ Connection* UDPPort::CreateConnection(const Candidate& address, // // See also the definition of MdnsNameRegistrationStatus::kNotStarted in // port.h. - RTC_DCHECK(!SharedSocket() || Candidates()[0].type() == LOCAL_PORT_TYPE || + RTC_DCHECK(!SharedSocket() || Candidates()[0].is_local() || mdns_name_registration_status() != MdnsNameRegistrationStatus::kNotStarted); @@ -295,10 +293,10 @@ Connection* UDPPort::CreateConnection(const Candidate& address, int UDPPort::SendTo(const void* data, size_t size, - const rtc::SocketAddress& addr, - const rtc::PacketOptions& options, - bool payload) { - rtc::PacketOptions modified_options(options); + const SocketAddress& addr, + const AsyncSocketPacketOptions& options, + bool /* payload */) { + AsyncSocketPacketOptions modified_options(options); CopyPortInformationToPacketInfo(&modified_options.info_signaled_after_sent); int sent = socket_->SendTo(data, size, addr, modified_options); if (sent < 0) { @@ -323,19 +321,19 @@ void UDPPort::UpdateNetworkCost() { stun_keepalive_lifetime_ = GetStunKeepaliveLifetime(); } -rtc::DiffServCodePoint UDPPort::StunDscpValue() const { +DiffServCodePoint UDPPort::StunDscpValue() const { return dscp_; } -int UDPPort::SetOption(rtc::Socket::Option opt, int value) { - if (opt == rtc::Socket::OPT_DSCP) { +int UDPPort::SetOption(Socket::Option opt, int value) { + if (opt == Socket::OPT_DSCP) { // Save value for future packets we instantiate. - dscp_ = static_cast(value); + dscp_ = static_cast(value); } return socket_->SetOption(opt, value); } -int UDPPort::GetOption(rtc::Socket::Option opt, int* value) { +int UDPPort::GetOption(Socket::Option opt, int* value) { return socket_->GetOption(opt, value); } @@ -343,83 +341,80 @@ int UDPPort::GetError() { return error_; } -bool UDPPort::HandleIncomingPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - int64_t packet_time_us) { +bool UDPPort::HandleIncomingPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet) { // All packets given to UDP port will be consumed. - OnReadPacket(socket, data, size, remote_addr, packet_time_us); + OnReadPacket(socket, packet); return true; } bool UDPPort::SupportsProtocol(absl::string_view protocol) const { - return protocol == UDP_PROTOCOL_NAME; + return protocol == webrtc::UDP_PROTOCOL_NAME; } ProtocolType UDPPort::GetProtocol() const { - return PROTO_UDP; + return webrtc::PROTO_UDP; } -void UDPPort::GetStunStats(absl::optional* stats) { +void UDPPort::GetStunStats(std::optional* stats) { *stats = stats_; } -void UDPPort::set_stun_keepalive_delay(const absl::optional& delay) { +void UDPPort::set_stun_keepalive_delay(const std::optional& delay) { stun_keepalive_delay_ = delay.value_or(STUN_KEEPALIVE_INTERVAL); } -void UDPPort::OnLocalAddressReady(rtc::AsyncPacketSocket* socket, - const rtc::SocketAddress& address) { +void UDPPort::OnLocalAddressReady(AsyncPacketSocket* /* socket */, + const SocketAddress& address) { // When adapter enumeration is disabled and binding to the any address, the // default local address will be issued as a candidate instead if // `emit_local_for_anyaddress` is true. This is to allow connectivity for // applications which absolutely requires a HOST candidate. - rtc::SocketAddress addr = address; + SocketAddress addr = address; // If MaybeSetDefaultLocalAddress fails, we keep the "any" IP so that at // least the port is listening. MaybeSetDefaultLocalAddress(&addr); - AddAddress(addr, addr, rtc::SocketAddress(), UDP_PROTOCOL_NAME, "", "", - LOCAL_PORT_TYPE, ICE_TYPE_PREFERENCE_HOST, 0, "", false); + AddAddress(addr, addr, SocketAddress(), webrtc::UDP_PROTOCOL_NAME, "", "", + IceCandidateType::kHost, ICE_TYPE_PREFERENCE_HOST, 0, "", false); MaybePrepareStunCandidate(); } -void UDPPort::PostAddAddress(bool is_final) { +void UDPPort::PostAddAddress(bool /* is_final */) { MaybeSetPortCompleteOrError(); } -void UDPPort::OnReadPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - const int64_t& packet_time_us) { +void UDPPort::OnReadPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet) { RTC_DCHECK(socket == socket_); - RTC_DCHECK(!remote_addr.IsUnresolvedIP()); + RTC_DCHECK(!packet.source_address().IsUnresolvedIP()); // Look for a response from the STUN server. // Even if the response doesn't match one of our outstanding requests, we // will eat it because it might be a response to a retransmitted packet, and // we already cleared the request when we got the first response. - if (server_addresses_.find(remote_addr) != server_addresses_.end()) { - request_manager_.CheckResponse(data, size); + if (server_addresses_.find(packet.source_address()) != + server_addresses_.end()) { + request_manager_.CheckResponse( + reinterpret_cast(packet.payload().data()), + packet.payload().size()); return; } - if (Connection* conn = GetConnection(remote_addr)) { - conn->OnReadPacket(data, size, packet_time_us); + if (Connection* conn = GetConnection(packet.source_address())) { + conn->OnReadPacket(packet); } else { - Port::OnReadPacket(data, size, remote_addr, PROTO_UDP); + Port::OnReadPacket(packet, webrtc::PROTO_UDP); } } -void UDPPort::OnSentPacket(rtc::AsyncPacketSocket* socket, - const rtc::SentPacket& sent_packet) { +void UDPPort::OnSentPacket(AsyncPacketSocket* /* socket */, + const SentPacketInfo& sent_packet) { PortInterface::SignalSentPacket(sent_packet); } -void UDPPort::OnReadyToSend(rtc::AsyncPacketSocket* socket) { +void UDPPort::OnReadyToSend(AsyncPacketSocket* /* socket */) { Port::OnReadyToSend(); } @@ -439,10 +434,10 @@ void UDPPort::SendStunBindingRequests() { } } -void UDPPort::ResolveStunAddress(const rtc::SocketAddress& stun_addr) { +void UDPPort::ResolveStunAddress(const SocketAddress& stun_addr) { if (!resolver_) { resolver_.reset(new AddressResolver( - socket_factory(), [&](const rtc::SocketAddress& input, int error) { + socket_factory(), [&](const SocketAddress& input, int error) { OnResolveResult(input, error); })); } @@ -452,16 +447,16 @@ void UDPPort::ResolveStunAddress(const rtc::SocketAddress& stun_addr) { resolver_->Resolve(stun_addr, Network()->family(), field_trials()); } -void UDPPort::OnResolveResult(const rtc::SocketAddress& input, int error) { +void UDPPort::OnResolveResult(const SocketAddress& input, int error) { RTC_DCHECK(resolver_.get() != nullptr); - rtc::SocketAddress resolved; + SocketAddress resolved; if (error != 0 || !resolver_->GetResolvedAddress( input, Network()->GetBestIP().family(), &resolved)) { RTC_LOG(LS_WARNING) << ToString() << ": StunPort: stun host lookup received error " << error; - OnStunBindingOrResolveRequestFailed(input, SERVER_NOT_REACHABLE_ERROR, + OnStunBindingOrResolveRequestFailed(input, STUN_ERROR_SERVER_NOT_REACHABLE, "STUN host lookup received error."); return; } @@ -474,32 +469,34 @@ void UDPPort::OnResolveResult(const rtc::SocketAddress& input, int error) { } } -void UDPPort::SendStunBindingRequest(const rtc::SocketAddress& stun_addr) { +void UDPPort::SendStunBindingRequest(const SocketAddress& stun_addr) { if (stun_addr.IsUnresolvedIP()) { ResolveStunAddress(stun_addr); - } else if (socket_->GetState() == rtc::AsyncPacketSocket::STATE_BOUND) { + } else if (socket_->GetState() == AsyncPacketSocket::STATE_BOUND) { // Check if `server_addr_` is compatible with the port's ip. if (IsCompatibleAddress(stun_addr)) { request_manager_.Send( - new StunBindingRequest(this, stun_addr, rtc::TimeMillis())); + new StunBindingRequest(this, stun_addr, webrtc::TimeMillis())); } else { // Since we can't send stun messages to the server, we should mark this - // port ready. - const char* reason = "STUN server address is incompatible."; - RTC_LOG(LS_WARNING) << reason; - OnStunBindingOrResolveRequestFailed(stun_addr, SERVER_NOT_REACHABLE_ERROR, - reason); + // port ready. This is not an error but similar to ignoring + // a mismatch of th address family when pairing candidates. + RTC_LOG(LS_WARNING) << ToString() + << ": STUN server address is incompatible."; + OnStunBindingOrResolveRequestFailed( + stun_addr, STUN_ERROR_NOT_AN_ERROR, + "STUN server address is incompatible."); } } } -bool UDPPort::MaybeSetDefaultLocalAddress(rtc::SocketAddress* addr) const { +bool UDPPort::MaybeSetDefaultLocalAddress(SocketAddress* addr) const { if (!addr->IsAnyIP() || !emit_local_for_anyaddress_ || !Network()->default_local_address_provider()) { return true; } - rtc::IPAddress default_address; + IPAddress default_address; bool result = Network()->default_local_address_provider()->GetDefaultLocalAddress( addr->family(), &default_address); @@ -513,8 +510,8 @@ bool UDPPort::MaybeSetDefaultLocalAddress(rtc::SocketAddress* addr) const { void UDPPort::OnStunBindingRequestSucceeded( int rtt_ms, - const rtc::SocketAddress& stun_server_addr, - const rtc::SocketAddress& stun_reflected_addr) { + const SocketAddress& stun_server_addr, + const SocketAddress& stun_reflected_addr) { RTC_DCHECK(stats_.stun_binding_responses_received < stats_.stun_binding_requests_sent); stats_.stun_binding_responses_received++; @@ -532,33 +529,35 @@ void UDPPort::OnStunBindingRequestSucceeded( if ((!SharedSocket() || stun_reflected_addr != socket_->GetLocalAddress() || Network()->GetMdnsResponder() != nullptr) && !HasStunCandidateWithAddress(stun_reflected_addr)) { - rtc::SocketAddress related_address = socket_->GetLocalAddress(); + SocketAddress related_address = socket_->GetLocalAddress(); // If we can't stamp the related address correctly, empty it to avoid leak. if (!MaybeSetDefaultLocalAddress(&related_address)) { related_address = - rtc::EmptySocketAddressWithFamily(related_address.family()); + webrtc::EmptySocketAddressWithFamily(related_address.family()); } - rtc::StringBuilder url; + StringBuilder url; url << "stun:" << stun_server_addr.hostname() << ":" << stun_server_addr.port(); AddAddress(stun_reflected_addr, socket_->GetLocalAddress(), related_address, - UDP_PROTOCOL_NAME, "", "", STUN_PORT_TYPE, + webrtc::UDP_PROTOCOL_NAME, "", "", IceCandidateType::kSrflx, ICE_TYPE_PREFERENCE_SRFLX, 0, url.str(), false); } MaybeSetPortCompleteOrError(); } void UDPPort::OnStunBindingOrResolveRequestFailed( - const rtc::SocketAddress& stun_server_addr, + const SocketAddress& stun_server_addr, int error_code, absl::string_view reason) { - rtc::StringBuilder url; - url << "stun:" << stun_server_addr.ToString(); - SignalCandidateError( - this, IceCandidateErrorEvent(GetLocalAddress().HostAsSensitiveURIString(), - GetLocalAddress().port(), url.str(), - error_code, reason)); + if (error_code != STUN_ERROR_NOT_AN_ERROR) { + StringBuilder url; + url << "stun:" << stun_server_addr.ToString(); + SignalCandidateError( + this, IceCandidateErrorEvent( + GetLocalAddress().HostAsSensitiveURIString(), + GetLocalAddress().port(), url.str(), error_code, reason)); + } if (bind_request_failed_servers_.find(stun_server_addr) != bind_request_failed_servers_.end()) { return; @@ -601,8 +600,8 @@ void UDPPort::MaybeSetPortCompleteOrError() { // TODO(?): merge this with SendTo above. void UDPPort::OnSendPacket(const void* data, size_t size, StunRequest* req) { StunBindingRequest* sreq = static_cast(req); - rtc::PacketOptions options(StunDscpValue()); - options.info_signaled_after_sent.packet_type = rtc::PacketType::kStunMessage; + AsyncSocketPacketOptions options(StunDscpValue()); + options.info_signaled_after_sent.packet_type = PacketType::kStunMessage; CopyPortInformationToPacketInfo(&options.info_signaled_after_sent); if (socket_->SendTo(data, size, sreq->server_addr(), options) < 0) { RTC_LOG_ERR_EX(LS_ERROR, socket_->GetError()) @@ -613,32 +612,24 @@ void UDPPort::OnSendPacket(const void* data, size_t size, StunRequest* req) { stats_.stun_binding_requests_sent++; } -bool UDPPort::HasStunCandidateWithAddress( - const rtc::SocketAddress& addr) const { +bool UDPPort::HasStunCandidateWithAddress(const SocketAddress& addr) const { const std::vector& existing_candidates = Candidates(); std::vector::const_iterator it = existing_candidates.begin(); for (; it != existing_candidates.end(); ++it) { - if (it->type() == STUN_PORT_TYPE && it->address() == addr) + if (it->is_stun() && it->address() == addr) return true; } return false; } std::unique_ptr StunPort::Create( - rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, + const PortParametersRef& args, uint16_t min_port, uint16_t max_port, - absl::string_view username, - absl::string_view password, const ServerAddresses& servers, - absl::optional stun_keepalive_interval, - const webrtc::FieldTrialsView* field_trials) { + std::optional stun_keepalive_interval) { // Using `new` to access a non-public constructor. - auto port = absl::WrapUnique(new StunPort(thread, factory, network, min_port, - max_port, username, password, - servers, field_trials)); + auto port = absl::WrapUnique(new StunPort(args, min_port, max_port, servers)); port->set_stun_keepalive_delay(stun_keepalive_interval); if (!port->Init()) { return nullptr; @@ -646,26 +637,11 @@ std::unique_ptr StunPort::Create( return port; } -StunPort::StunPort(rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, +StunPort::StunPort(const PortParametersRef& args, uint16_t min_port, uint16_t max_port, - absl::string_view username, - absl::string_view password, - const ServerAddresses& servers, - const webrtc::FieldTrialsView* field_trials) - : UDPPort(thread, - factory, - network, - min_port, - max_port, - username, - password, - false, - field_trials) { - // UDPPort will set these to local udp, updating these to STUN. - set_type(STUN_PORT_TYPE); + const ServerAddresses& servers) + : UDPPort(args, IceCandidateType::kSrflx, min_port, max_port, false) { set_server_addresses(servers); } @@ -673,4 +649,4 @@ void StunPort::PrepareAddress() { SendStunBindingRequests(); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/stun_port.h b/p2p/base/stun_port.h index 380dbecd2d..612ee0f8b3 100644 --- a/p2p/base/stun_port.h +++ b/p2p/base/stun_port.h @@ -11,20 +11,33 @@ #ifndef P2P_BASE_STUN_PORT_H_ #define P2P_BASE_STUN_PORT_H_ +#include +#include #include #include #include -#include +#include #include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "api/task_queue/pending_task_safety_flag.h" +#include "api/async_dns_resolver.h" +#include "api/candidate.h" +#include "api/field_trials_view.h" +#include "api/packet_socket_factory.h" +#include "p2p/base/connection.h" #include "p2p/base/port.h" +#include "p2p/base/port_interface.h" #include "p2p/base/stun_request.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/dscp.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/system/rtc_export.h" -namespace cricket { +namespace webrtc { // Lifetime chosen for STUN ports on low-cost networks. static const int INFINITE_LIFETIME = -1; @@ -35,19 +48,13 @@ static const int HIGH_COST_PORT_KEEPALIVE_LIFETIME = 2 * 60 * 1000; class RTC_EXPORT UDPPort : public Port { public: static std::unique_ptr Create( - rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, - rtc::AsyncPacketSocket* socket, - absl::string_view username, - absl::string_view password, + const PortParametersRef& args, + AsyncPacketSocket* socket, bool emit_local_for_anyaddress, - absl::optional stun_keepalive_interval, - const webrtc::FieldTrialsView* field_trials = nullptr) { + std::optional stun_keepalive_interval) { // Using `new` to access a non-public constructor. - auto port = absl::WrapUnique( - new UDPPort(thread, factory, network, socket, username, password, - emit_local_for_anyaddress, field_trials)); + auto port = absl::WrapUnique(new UDPPort( + args, IceCandidateType::kHost, socket, emit_local_for_anyaddress)); port->set_stun_keepalive_delay(stun_keepalive_interval); if (!port->Init()) { return nullptr; @@ -56,20 +63,15 @@ class RTC_EXPORT UDPPort : public Port { } static std::unique_ptr Create( - rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, + const PortParametersRef& args, uint16_t min_port, uint16_t max_port, - absl::string_view username, - absl::string_view password, bool emit_local_for_anyaddress, - absl::optional stun_keepalive_interval, - const webrtc::FieldTrialsView* field_trials = nullptr) { + std::optional stun_keepalive_interval) { // Using `new` to access a non-public constructor. - auto port = absl::WrapUnique( - new UDPPort(thread, factory, network, min_port, max_port, username, - password, emit_local_for_anyaddress, field_trials)); + auto port = + absl::WrapUnique(new UDPPort(args, IceCandidateType::kHost, min_port, + max_port, emit_local_for_anyaddress)); port->set_stun_keepalive_delay(stun_keepalive_interval); if (!port->Init()) { return nullptr; @@ -79,9 +81,7 @@ class RTC_EXPORT UDPPort : public Port { ~UDPPort() override; - rtc::SocketAddress GetLocalAddress() const { - return socket_->GetLocalAddress(); - } + SocketAddress GetLocalAddress() const { return socket_->GetLocalAddress(); } const ServerAddresses& server_addresses() const { return server_addresses_; } void set_server_addresses(const ServerAddresses& addresses) { @@ -92,22 +92,19 @@ class RTC_EXPORT UDPPort : public Port { Connection* CreateConnection(const Candidate& address, CandidateOrigin origin) override; - int SetOption(rtc::Socket::Option opt, int value) override; - int GetOption(rtc::Socket::Option opt, int* value) override; + int SetOption(Socket::Option opt, int value) override; + int GetOption(Socket::Option opt, int* value) override; int GetError() override; - bool HandleIncomingPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - int64_t packet_time_us) override; + bool HandleIncomingPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet) override; bool SupportsProtocol(absl::string_view protocol) const override; ProtocolType GetProtocol() const override; - void GetStunStats(absl::optional* stats) override; + void GetStunStats(std::optional* stats) override; - void set_stun_keepalive_delay(const absl::optional& delay); + void set_stun_keepalive_delay(const std::optional& delay); int stun_keepalive_delay() const { return stun_keepalive_delay_; } // Visible for testing. @@ -119,52 +116,38 @@ class RTC_EXPORT UDPPort : public Port { StunRequestManager& request_manager() { return request_manager_; } protected: - UDPPort(rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, + UDPPort(const PortParametersRef& args, + IceCandidateType type, + AsyncPacketSocket* socket, + bool emit_local_for_anyaddress); + UDPPort(const PortParametersRef& args, + IceCandidateType type, uint16_t min_port, uint16_t max_port, - absl::string_view username, - absl::string_view password, - bool emit_local_for_anyaddress, - const webrtc::FieldTrialsView* field_trials); - - UDPPort(rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, - rtc::AsyncPacketSocket* socket, - absl::string_view username, - absl::string_view password, - bool emit_local_for_anyaddress, - const webrtc::FieldTrialsView* field_trials); - + bool emit_local_for_anyaddress); bool Init(); int SendTo(const void* data, size_t size, - const rtc::SocketAddress& addr, - const rtc::PacketOptions& options, + const SocketAddress& addr, + const AsyncSocketPacketOptions& options, bool payload) override; void UpdateNetworkCost() override; - rtc::DiffServCodePoint StunDscpValue() const override; + DiffServCodePoint StunDscpValue() const override; - void OnLocalAddressReady(rtc::AsyncPacketSocket* socket, - const rtc::SocketAddress& address); + void OnLocalAddressReady(AsyncPacketSocket* socket, + const SocketAddress& address); void PostAddAddress(bool is_final) override; - void OnReadPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - const int64_t& packet_time_us); + void OnReadPacket(AsyncPacketSocket* socket, const ReceivedIpPacket& packet); - void OnSentPacket(rtc::AsyncPacketSocket* socket, - const rtc::SentPacket& sent_packet) override; + void OnSentPacket(AsyncPacketSocket* socket, + const SentPacketInfo& sent_packet) override; - void OnReadyToSend(rtc::AsyncPacketSocket* socket); + void OnReadyToSend(AsyncPacketSocket* socket); // This method will send STUN binding request if STUN server address is set. void MaybePrepareStunCandidate(); @@ -175,73 +158,71 @@ class RTC_EXPORT UDPPort : public Port { // `addr` is the "any" address and `emit_local_for_anyaddress_` is true. When // returning false, it indicates that the operation has failed and the // address shouldn't be used by any candidate. - bool MaybeSetDefaultLocalAddress(rtc::SocketAddress* addr) const; + bool MaybeSetDefaultLocalAddress(SocketAddress* addr) const; private: // A helper class which can be called repeatedly to resolve multiple - // addresses, as opposed to rtc::AsyncDnsResolverInterface, which can only + // addresses, as opposed to webrtc::AsyncDnsResolverInterface, which can only // resolve one address per instance. class AddressResolver { public: explicit AddressResolver( - rtc::PacketSocketFactory* factory, - std::function done_callback); + PacketSocketFactory* factory, + std::function done_callback); - void Resolve(const rtc::SocketAddress& address, + void Resolve(const SocketAddress& address, int family, - const webrtc::FieldTrialsView& field_trials); - bool GetResolvedAddress(const rtc::SocketAddress& input, + const FieldTrialsView& field_trials); + bool GetResolvedAddress(const SocketAddress& input, int family, - rtc::SocketAddress* output) const; + SocketAddress* output) const; private: - typedef std::map> + typedef std::map> ResolverMap; - rtc::PacketSocketFactory* socket_factory_; + PacketSocketFactory* socket_factory_; // The function is called when resolving the specified address is finished. // The first argument is the input address, the second argument is the error // or 0 if it succeeded. - std::function done_; + std::function done_; // Resolver may fire callbacks that refer to done_, so ensure // that all resolvers are destroyed first. ResolverMap resolvers_; }; // DNS resolution of the STUN server. - void ResolveStunAddress(const rtc::SocketAddress& stun_addr); - void OnResolveResult(const rtc::SocketAddress& input, int error); + void ResolveStunAddress(const SocketAddress& stun_addr); + void OnResolveResult(const SocketAddress& input, int error); // Send a STUN binding request to the given address. Calling this method may // cause the set of known server addresses to be modified, eg. by replacing an // unresolved server address with a resolved address. - void SendStunBindingRequest(const rtc::SocketAddress& stun_addr); + void SendStunBindingRequest(const SocketAddress& stun_addr); // Below methods handles binding request responses. - void OnStunBindingRequestSucceeded( - int rtt_ms, - const rtc::SocketAddress& stun_server_addr, - const rtc::SocketAddress& stun_reflected_addr); + void OnStunBindingRequestSucceeded(int rtt_ms, + const SocketAddress& stun_server_addr, + const SocketAddress& stun_reflected_addr); void OnStunBindingOrResolveRequestFailed( - const rtc::SocketAddress& stun_server_addr, + const SocketAddress& stun_server_addr, int error_code, absl::string_view reason); // Sends STUN requests to the server. void OnSendPacket(const void* data, size_t size, StunRequest* req); - // TODO(mallinaht) - Move this up to cricket::Port when SignalAddressReady is + // TODO(mallinaht): Move this up to webrtc::Port when SignalAddressReady is // changed to SignalPortReady. void MaybeSetPortCompleteOrError(); - bool HasStunCandidateWithAddress(const rtc::SocketAddress& addr) const; + bool HasStunCandidateWithAddress(const SocketAddress& addr) const; // If this is a low-cost network, it will keep on sending STUN binding // requests indefinitely to keep the NAT binding alive. Otherwise, stop // sending STUN binding requests after HIGH_COST_PORT_KEEPALIVE_LIFETIME. int GetStunKeepaliveLifetime() { - return (network_cost() >= rtc::kNetworkCostHigh) + return (network_cost() >= webrtc::kNetworkCostHigh) ? HIGH_COST_PORT_KEEPALIVE_LIFETIME : INFINITE_LIFETIME; } @@ -250,14 +231,14 @@ class RTC_EXPORT UDPPort : public Port { ServerAddresses bind_request_succeeded_servers_; ServerAddresses bind_request_failed_servers_; StunRequestManager request_manager_; - rtc::AsyncPacketSocket* socket_; + AsyncPacketSocket* socket_; int error_; int send_error_count_ = 0; std::unique_ptr resolver_; bool ready_; int stun_keepalive_delay_; int stun_keepalive_lifetime_ = INFINITE_LIFETIME; - rtc::DiffServCodePoint dscp_; + DiffServCodePoint dscp_; StunStats stats_; @@ -271,31 +252,32 @@ class RTC_EXPORT UDPPort : public Port { class StunPort : public UDPPort { public: static std::unique_ptr Create( - rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, + const PortParametersRef& args, uint16_t min_port, uint16_t max_port, - absl::string_view username, - absl::string_view password, const ServerAddresses& servers, - absl::optional stun_keepalive_interval, - const webrtc::FieldTrialsView* field_trials); + std::optional stun_keepalive_interval); void PrepareAddress() override; protected: - StunPort(rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, + StunPort(const PortParametersRef& args, uint16_t min_port, uint16_t max_port, - absl::string_view username, - absl::string_view password, - const ServerAddresses& servers, - const webrtc::FieldTrialsView* field_trials); + const ServerAddresses& servers); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::HIGH_COST_PORT_KEEPALIVE_LIFETIME; +using ::webrtc::INFINITE_LIFETIME; +using ::webrtc::StunPort; +using ::webrtc::UDPPort; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_STUN_PORT_H_ diff --git a/p2p/base/stun_port_unittest.cc b/p2p/base/stun_port_unittest.cc index bf51151536..f0d08739fd 100644 --- a/p2p/base/stun_port_unittest.cc +++ b/p2p/base/stun_port_unittest.cc @@ -10,48 +10,97 @@ #include "p2p/base/stun_port.h" +#include +#include #include - +#include +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/candidate.h" +#include "api/environment/environment_factory.h" +#include "api/field_trials_view.h" +#include "api/packet_socket_factory.h" #include "api/test/mock_async_dns_resolver.h" +#include "api/test/rtc_error_matchers.h" +#include "api/transport/stun.h" +#include "api/units/time_delta.h" #include "p2p/base/basic_packet_socket_factory.h" -#include "p2p/base/mock_dns_resolving_packet_socket_factory.h" -#include "p2p/base/test_stun_server.h" +#include "p2p/base/port.h" +#include "p2p/base/stun_request.h" +#include "p2p/test/mock_dns_resolving_packet_socket_factory.h" +#include "p2p/test/nat_server.h" +#include "p2p/test/nat_socket_factory.h" +#include "p2p/test/nat_types.h" +#include "p2p/test/test_stun_server.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/checks.h" +#include "rtc_base/crypto_random.h" +#include "rtc_base/dscp.h" +#include "rtc_base/fake_clock.h" #include "rtc_base/gunit.h" -#include "rtc_base/helpers.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/mdns_responder_interface.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/network.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/socket.h" #include "rtc_base/socket_address.h" -#include "rtc_base/ssl_adapter.h" +#include "rtc_base/socket_factory.h" +#include "rtc_base/socket_server.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" #include "rtc_base/virtual_socket_server.h" #include "test/gmock.h" +#include "test/gtest.h" #include "test/scoped_key_value_config.h" +#include "test/wait_until.h" namespace { -using cricket::ServerAddresses; -using rtc::SocketAddress; using ::testing::_; using ::testing::DoAll; +using ::testing::Eq; +using ::testing::IsTrue; using ::testing::Return; using ::testing::ReturnPointee; using ::testing::SetArgPointee; +using ::webrtc::CreateEnvironment; +using ::webrtc::IceCandidateType; +using ::webrtc::ServerAddresses; +using ::webrtc::SocketAddress; + +static const SocketAddress kPrivateIP("192.168.1.12", 0); +static const SocketAddress kMsdnAddress("unittest-mdns-host-name.local", 0); +static const SocketAddress kPublicIP("212.116.91.133", 0); +static const SocketAddress kNatAddr(kPublicIP.ipaddr(), + webrtc::NAT_SERVER_UDP_PORT); +static const SocketAddress kStunServerAddr1("34.38.54.120", 5000); +static const SocketAddress kStunServerAddr2("34.38.54.120", 4000); + +static const SocketAddress kPrivateIPv6("2001:4860:4860::8844", 0); +static const SocketAddress kPublicIPv6("2002:4860:4860::8844", 5000); +static const SocketAddress kNatAddrIPv6(kPublicIPv6.ipaddr(), + webrtc::NAT_SERVER_UDP_PORT); +static const SocketAddress kStunServerAddrIPv6Addr("2003:4860:4860::8844", + 5000); -static const SocketAddress kLocalAddr("127.0.0.1", 0); -static const SocketAddress kIPv6LocalAddr("::1", 0); -static const SocketAddress kStunAddr1("127.0.0.1", 5000); -static const SocketAddress kStunAddr2("127.0.0.1", 4000); -static const SocketAddress kStunAddr3("127.0.0.1", 3000); -static const SocketAddress kIPv6StunAddr1("::1", 5000); static const SocketAddress kBadAddr("0.0.0.1", 5000); +static const SocketAddress kIPv6BadAddr("::ffff:0:1", 5000); static const SocketAddress kValidHostnameAddr("valid-hostname", 5000); static const SocketAddress kBadHostnameAddr("not-a-real-hostname", 5000); -// STUN timeout (with all retries) is cricket::STUN_TOTAL_TIMEOUT. +// STUN timeout (with all retries) is webrtc::STUN_TOTAL_TIMEOUT. // Add some margin of error for slow bots. -static const int kTimeoutMs = cricket::STUN_TOTAL_TIMEOUT; +static const int kTimeoutMs = webrtc::STUN_TOTAL_TIMEOUT; // stun prio = 100 (srflx) << 24 | 30 (IPv4) << 8 | 256 - 1 (component) static const uint32_t kStunCandidatePriority = (100 << 24) | (30 << 8) | (256 - 1); -// stun prio = 100 (srflx) << 24 | 60 (loopback IPv6) << 8 | 256 - 1 (component) +// stun prio = 100 (srflx) << 24 | 40 (IPv6) << 8 | 256 - 1 (component) static const uint32_t kIPv6StunCandidatePriority = - (100 << 24) | (60 << 8) | (256 - 1); + (100 << 24) | (40 << 8) | (256 - 1); static const int kInfiniteLifetime = -1; static const int kHighCostPortKeepaliveLifetimeMs = 2 * 60 * 1000; @@ -59,16 +108,16 @@ constexpr uint64_t kTiebreakerDefault = 44444; class FakeMdnsResponder : public webrtc::MdnsResponderInterface { public: - void CreateNameForAddress(const rtc::IPAddress& addr, + void CreateNameForAddress(const webrtc::IPAddress& addr, NameCreatedCallback callback) override { - callback(addr, std::string("unittest-mdns-host-name.local")); + callback(addr, kMsdnAddress.HostAsSensitiveURIString()); } - void RemoveNameForAddress(const rtc::IPAddress& addr, + void RemoveNameForAddress(const webrtc::IPAddress& addr, NameRemovedCallback callback) override {} }; -class FakeMdnsResponderProvider : public rtc::MdnsResponderProvider { +class FakeMdnsResponderProvider : public webrtc::MdnsResponderProvider { public: FakeMdnsResponderProvider() : mdns_responder_(new FakeMdnsResponder()) {} @@ -81,36 +130,49 @@ class FakeMdnsResponderProvider : public rtc::MdnsResponderProvider { }; // Base class for tests connecting a StunPort to a fake STUN server -// (cricket::StunServer). +// (webrtc::StunServer). class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { public: StunPortTestBase() - : StunPortTestBase( - rtc::Network("unittest", "unittest", kLocalAddr.ipaddr(), 32), - kLocalAddr.ipaddr()) {} - - StunPortTestBase(rtc::Network network, const rtc::IPAddress address) - : ss_(new rtc::VirtualSocketServer()), + : StunPortTestBase(kPrivateIP.ipaddr(), + {kStunServerAddr1, kStunServerAddr2}, + kNatAddr) {} + + StunPortTestBase(const webrtc::IPAddress address, + const std::set& stun_server_addresses, + const webrtc::SocketAddress& nat_server_address) + : ss_(new webrtc::VirtualSocketServer()), thread_(ss_.get()), - network_(network), - socket_factory_(ss_.get()), - stun_server_1_(cricket::TestStunServer::Create(ss_.get(), kStunAddr1)), - stun_server_2_(cricket::TestStunServer::Create(ss_.get(), kStunAddr2)), + nat_factory_(ss_.get(), nat_server_address, nat_server_address), + nat_socket_factory_(&nat_factory_), mdns_responder_provider_(new FakeMdnsResponderProvider()), + nat_server_(CreateNatServer(nat_server_address, webrtc::NAT_OPEN_CONE)), done_(false), error_(false), stun_keepalive_delay_(1), stun_keepalive_lifetime_(-1) { - network_.AddIP(address); + network_ = MakeNetwork(address); + RTC_CHECK(address.family() == nat_server_address.family()); + for (const auto& addr : stun_server_addresses) { + RTC_CHECK(addr.family() == address.family()); + stun_servers_.push_back( + webrtc::TestStunServer::Create(ss_.get(), addr, thread_)); + } } - virtual rtc::PacketSocketFactory* socket_factory() { - return &socket_factory_; + std::unique_ptr CreateNatServer(const SocketAddress& addr, + webrtc::NATType type) { + return std::make_unique(type, thread_, ss_.get(), addr, + addr, thread_, ss_.get(), addr); + } + + virtual webrtc::PacketSocketFactory* socket_factory() { + return &nat_socket_factory_; } - rtc::VirtualSocketServer* ss() const { return ss_.get(); } - cricket::UDPPort* port() const { return stun_port_.get(); } - rtc::AsyncPacketSocket* socket() const { return socket_.get(); } + webrtc::SocketServer* ss() const { return ss_.get(); } + webrtc::UDPPort* port() const { return stun_port_.get(); } + webrtc::AsyncPacketSocket* socket() const { return socket_.get(); } bool done() const { return done_; } bool error() const { return error_; } @@ -118,11 +180,11 @@ class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { return stun_port_->request_manager().HasRequestForTest(msg_type); } - void SetNetworkType(rtc::AdapterType adapter_type) { - network_.set_type(adapter_type); + void SetNetworkType(webrtc::AdapterType adapter_type) { + network_->set_type(adapter_type); } - void CreateStunPort(const rtc::SocketAddress& server_addr, + void CreateStunPort(const webrtc::SocketAddress& server_addr, const webrtc::FieldTrialsView* field_trials = nullptr) { ServerAddresses stun_servers; stun_servers.insert(server_addr); @@ -131,10 +193,14 @@ class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { void CreateStunPort(const ServerAddresses& stun_servers, const webrtc::FieldTrialsView* field_trials = nullptr) { - stun_port_ = cricket::StunPort::Create( - rtc::Thread::Current(), socket_factory(), &network_, 0, 0, - rtc::CreateRandomString(16), rtc::CreateRandomString(22), stun_servers, - absl::nullopt, field_trials); + stun_port_ = webrtc::StunPort::Create( + {.env = CreateEnvironment(field_trials), + .network_thread = &thread_, + .socket_factory = socket_factory(), + .network = network_, + .ice_username_fragment = webrtc::CreateRandomString(16), + .ice_password = webrtc::CreateRandomString(22)}, + 0, 0, stun_servers, std::nullopt); stun_port_->SetIceTiebreaker(kTiebreakerDefault); stun_port_->set_stun_keepalive_delay(stun_keepalive_delay_); // If `stun_keepalive_lifetime_` is negative, let the stun port @@ -150,26 +216,34 @@ class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { } void CreateSharedUdpPort( - const rtc::SocketAddress& server_addr, - rtc::AsyncPacketSocket* socket, + const webrtc::SocketAddress& server_addr, + webrtc::AsyncPacketSocket* socket, const webrtc::FieldTrialsView* field_trials = nullptr) { if (socket) { socket_.reset(socket); } else { socket_.reset(socket_factory()->CreateUdpSocket( - rtc::SocketAddress(kLocalAddr.ipaddr(), 0), 0, 0)); + webrtc::SocketAddress(kPrivateIP.ipaddr(), 0), 0, 0)); } ASSERT_TRUE(socket_ != NULL); - socket_->SignalReadPacket.connect(this, &StunPortTestBase::OnReadPacket); - stun_port_ = cricket::UDPPort::Create( - rtc::Thread::Current(), socket_factory(), &network_, socket_.get(), - rtc::CreateRandomString(16), rtc::CreateRandomString(22), false, - absl::nullopt, field_trials); - ASSERT_TRUE(stun_port_ != NULL); - stun_port_->SetIceTiebreaker(kTiebreakerDefault); + socket_->RegisterReceivedPacketCallback( + [&](webrtc::AsyncPacketSocket* socket, + const webrtc::ReceivedIpPacket& packet) { + OnReadPacket(socket, packet); + }); ServerAddresses stun_servers; stun_servers.insert(server_addr); + stun_port_ = webrtc::UDPPort::Create( + {.env = CreateEnvironment(field_trials), + .network_thread = &thread_, + .socket_factory = socket_factory(), + .network = network_, + .ice_username_fragment = webrtc::CreateRandomString(16), + .ice_password = webrtc::CreateRandomString(22)}, + socket_.get(), false, std::nullopt); stun_port_->set_server_addresses(stun_servers); + ASSERT_TRUE(stun_port_ != NULL); + stun_port_->SetIceTiebreaker(kTiebreakerDefault); stun_port_->SignalPortComplete.connect(this, &StunPortTestBase::OnPortComplete); stun_port_->SignalPortError.connect(this, &StunPortTestBase::OnPortError); @@ -177,42 +251,39 @@ class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { void PrepareAddress() { stun_port_->PrepareAddress(); } - void OnReadPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - const int64_t& /* packet_time_us */) { - stun_port_->HandleIncomingPacket(socket, data, size, remote_addr, - /* packet_time_us */ -1); + void OnReadPacket(webrtc::AsyncPacketSocket* socket, + const webrtc::ReceivedIpPacket& packet) { + stun_port_->HandleIncomingPacket(socket, packet); } void SendData(const char* data, size_t len) { - stun_port_->HandleIncomingPacket(socket_.get(), data, len, - rtc::SocketAddress("22.22.22.22", 0), - /* packet_time_us */ -1); + stun_port_->HandleIncomingPacket( + socket_.get(), webrtc::ReceivedIpPacket::CreateFromLegacy( + data, len, /* packet_time_us */ -1, + webrtc::SocketAddress("22.22.22.22", 0))); } void EnableMdnsObfuscation() { - network_.set_mdns_responder_provider(mdns_responder_provider_.get()); + network_->set_mdns_responder_provider(mdns_responder_provider_.get()); } protected: static void SetUpTestSuite() { // Ensure the RNG is inited. - rtc::InitRandom(NULL, 0); + webrtc::InitRandom(NULL, 0); } - void OnPortComplete(cricket::Port* port) { + void OnPortComplete(webrtc::Port* /* port */) { ASSERT_FALSE(done_); done_ = true; error_ = false; } - void OnPortError(cricket::Port* port) { + void OnPortError(webrtc::Port* /* port */) { done_ = true; error_ = true; } - void OnCandidateError(cricket::Port* port, - const cricket::IceCandidateErrorEvent& event) { + void OnCandidateError(webrtc::Port* /* port */, + const webrtc::IceCandidateErrorEvent& event) { error_event_ = event; } void SetKeepaliveDelay(int delay) { stun_keepalive_delay_ = delay; } @@ -221,59 +292,75 @@ class StunPortTestBase : public ::testing::Test, public sigslot::has_slots<> { stun_keepalive_lifetime_ = lifetime; } - cricket::TestStunServer* stun_server_1() { return stun_server_1_.get(); } - cricket::TestStunServer* stun_server_2() { return stun_server_2_.get(); } + webrtc::Network* MakeNetwork(const webrtc::IPAddress& addr) { + networks_.emplace_back("unittest", "unittest", addr, 32); + networks_.back().AddIP(addr); + return &networks_.back(); + } + + webrtc::TestStunServer* stun_server_1() { return stun_servers_[0].get(); } + webrtc::TestStunServer* stun_server_2() { return stun_servers_[1].get(); } + + webrtc::AutoSocketServerThread& thread() { return thread_; } + webrtc::SocketFactory* nat_factory() { return &nat_factory_; } private: - std::unique_ptr ss_; - rtc::AutoSocketServerThread thread_; - rtc::Network network_; - rtc::BasicPacketSocketFactory socket_factory_; - std::unique_ptr stun_port_; - std::unique_ptr stun_server_1_; - std::unique_ptr stun_server_2_; - std::unique_ptr socket_; - std::unique_ptr mdns_responder_provider_; + std::vector networks_; + webrtc::Network* network_; + + std::unique_ptr ss_; + webrtc::AutoSocketServerThread thread_; + webrtc::NATSocketFactory nat_factory_; + webrtc::BasicPacketSocketFactory nat_socket_factory_; + std::unique_ptr stun_port_; + std::vector stun_servers_; + std::unique_ptr socket_; + std::unique_ptr mdns_responder_provider_; + std::unique_ptr nat_server_; bool done_; bool error_; int stun_keepalive_delay_; int stun_keepalive_lifetime_; protected: - cricket::IceCandidateErrorEvent error_event_; + webrtc::IceCandidateErrorEvent error_event_; }; class StunPortTestWithRealClock : public StunPortTestBase {}; class FakeClockBase { public: - rtc::ScopedFakeClock fake_clock; + webrtc::ScopedFakeClock fake_clock; }; class StunPortTest : public FakeClockBase, public StunPortTestBase {}; // Test that we can create a STUN port. TEST_F(StunPortTest, TestCreateStunPort) { - CreateStunPort(kStunAddr1); - EXPECT_EQ("stun", port()->Type()); + CreateStunPort(kStunServerAddr1); + EXPECT_EQ(IceCandidateType::kSrflx, port()->Type()); EXPECT_EQ(0U, port()->Candidates().size()); } // Test that we can create a UDP port. TEST_F(StunPortTest, TestCreateUdpPort) { - CreateSharedUdpPort(kStunAddr1, nullptr); - EXPECT_EQ("local", port()->Type()); + CreateSharedUdpPort(kStunServerAddr1, nullptr, nullptr); + EXPECT_EQ(IceCandidateType::kHost, port()->Type()); EXPECT_EQ(0U, port()->Candidates().size()); } // Test that we can get an address from a STUN server. TEST_F(StunPortTest, TestPrepareAddress) { - CreateStunPort(kStunAddr1); + CreateStunPort(kStunServerAddr1); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); ASSERT_EQ(1U, port()->Candidates().size()); - EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[0].address())); - std::string expected_server_url = "stun:127.0.0.1:5000"; + EXPECT_TRUE(kPublicIP.EqualIPs(port()->Candidates()[0].address())); + std::string expected_server_url = "stun:" + kStunServerAddr1.ToString(); EXPECT_EQ(port()->Candidates()[0].url(), expected_server_url); } @@ -281,34 +368,57 @@ TEST_F(StunPortTest, TestPrepareAddress) { TEST_F(StunPortTest, TestPrepareAddressFail) { CreateStunPort(kBadAddr); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_TRUE(error()); EXPECT_EQ(0U, port()->Candidates().size()); - EXPECT_EQ_SIMULATED_WAIT(error_event_.error_code, - cricket::SERVER_NOT_REACHABLE_ERROR, kTimeoutMs, - fake_clock); - ASSERT_NE(error_event_.error_text.find('.'), std::string::npos); - ASSERT_NE(error_event_.address.find(kLocalAddr.HostAsSensitiveURIString()), + EXPECT_THAT( + webrtc::WaitUntil([&] { return error_event_.error_code; }, + Eq(webrtc::STUN_ERROR_SERVER_NOT_REACHABLE), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_NE(error_event_.error_text.find('.'), std::string::npos); + EXPECT_NE(error_event_.address.find(kPrivateIP.HostAsSensitiveURIString()), std::string::npos); std::string server_url = "stun:" + kBadAddr.ToString(); - ASSERT_EQ(error_event_.url, server_url); + EXPECT_EQ(error_event_.url, server_url); +} + +// Test that we fail without emitting an error if we try to get an address from +// a STUN server with a different address family. IPv4 local, IPv6 STUN. +TEST_F(StunPortTest, TestServerAddressFamilyMismatch) { + CreateStunPort(kStunServerAddrIPv6Addr); + PrepareAddress(); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE(error()); + EXPECT_EQ(0U, port()->Candidates().size()); + EXPECT_EQ(0, error_event_.error_code); } class StunPortWithMockDnsResolverTest : public StunPortTest { public: - StunPortWithMockDnsResolverTest() : StunPortTest(), socket_factory_(ss()) {} + StunPortWithMockDnsResolverTest() + : StunPortTest(), socket_factory_(nat_factory()) {} - rtc::PacketSocketFactory* socket_factory() override { + webrtc::PacketSocketFactory* socket_factory() override { return &socket_factory_; } void SetDnsResolverExpectations( - rtc::MockDnsResolvingPacketSocketFactory::Expectations expectations) { + webrtc::MockDnsResolvingPacketSocketFactory::Expectations expectations) { socket_factory_.SetExpectations(expectations); } private: - rtc::MockDnsResolvingPacketSocketFactory socket_factory_; + webrtc::MockDnsResolvingPacketSocketFactory socket_factory_; }; // Test that we can get an address from a STUN server specified by a hostname. @@ -317,21 +427,25 @@ TEST_F(StunPortWithMockDnsResolverTest, TestPrepareAddressHostname) { [](webrtc::MockAsyncDnsResolver* resolver, webrtc::MockAsyncDnsResolverResult* resolver_result) { EXPECT_CALL(*resolver, Start(kValidHostnameAddr, /*family=*/AF_INET, _)) - .WillOnce([](const rtc::SocketAddress& addr, int family, + .WillOnce([](const webrtc::SocketAddress& /* addr */, + int /* family */, absl::AnyInvocable callback) { callback(); }); EXPECT_CALL(*resolver, result) .WillRepeatedly(ReturnPointee(resolver_result)); EXPECT_CALL(*resolver_result, GetError).WillOnce(Return(0)); EXPECT_CALL(*resolver_result, GetResolvedAddress(AF_INET, _)) - .WillOnce(DoAll(SetArgPointee<1>(SocketAddress("127.0.0.1", 5000)), - Return(true))); + .WillOnce(DoAll(SetArgPointee<1>(kStunServerAddr1), Return(true))); }); CreateStunPort(kValidHostnameAddr); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); ASSERT_EQ(1U, port()->Candidates().size()); - EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[0].address())); + EXPECT_TRUE(kPublicIP.EqualIPs(port()->Candidates()[0].address())); EXPECT_EQ(kStunCandidatePriority, port()->Candidates()[0].priority()); } @@ -343,21 +457,25 @@ TEST_F(StunPortWithMockDnsResolverTest, [](webrtc::MockAsyncDnsResolver* resolver, webrtc::MockAsyncDnsResolverResult* resolver_result) { EXPECT_CALL(*resolver, Start(kValidHostnameAddr, /*family=*/AF_INET, _)) - .WillOnce([](const rtc::SocketAddress& addr, int family, + .WillOnce([](const webrtc::SocketAddress& /* addr */, + int /* family */, absl::AnyInvocable callback) { callback(); }); EXPECT_CALL(*resolver, result) .WillRepeatedly(ReturnPointee(resolver_result)); EXPECT_CALL(*resolver_result, GetError).WillOnce(Return(0)); EXPECT_CALL(*resolver_result, GetResolvedAddress(AF_INET, _)) - .WillOnce(DoAll(SetArgPointee<1>(SocketAddress("127.0.0.1", 5000)), - Return(true))); + .WillOnce(DoAll(SetArgPointee<1>(kStunServerAddr1), Return(true))); }); CreateStunPort(kValidHostnameAddr); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); ASSERT_EQ(1U, port()->Candidates().size()); - EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[0].address())); - EXPECT_EQ(kStunCandidatePriority + (cricket::kMaxTurnServers << 8), + EXPECT_TRUE(kPublicIP.EqualIPs(port()->Candidates()[0].address())); + EXPECT_EQ(kStunCandidatePriority + (webrtc::kMaxTurnServers << 8), port()->Candidates()[0].priority()); } @@ -365,33 +483,50 @@ TEST_F(StunPortWithMockDnsResolverTest, TEST_F(StunPortTestWithRealClock, TestPrepareAddressHostnameFail) { CreateStunPort(kBadHostnameAddr); PrepareAddress(); - EXPECT_TRUE_WAIT(done(), kTimeoutMs); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs)}), + webrtc::IsRtcOk()); EXPECT_TRUE(error()); EXPECT_EQ(0U, port()->Candidates().size()); - EXPECT_EQ_WAIT(error_event_.error_code, cricket::SERVER_NOT_REACHABLE_ERROR, - kTimeoutMs); + EXPECT_THAT( + webrtc::WaitUntil([&] { return error_event_.error_code; }, + Eq(webrtc::STUN_ERROR_SERVER_NOT_REACHABLE), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs)}), + webrtc::IsRtcOk()); } // This test verifies keepalive response messages don't result in // additional candidate generation. TEST_F(StunPortTest, TestKeepAliveResponse) { SetKeepaliveDelay(500); // 500ms of keepalive delay. - CreateStunPort(kStunAddr1); + CreateStunPort(kStunServerAddr1); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); ASSERT_EQ(1U, port()->Candidates().size()); - EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[0].address())); + EXPECT_TRUE(kPublicIP.EqualIPs(port()->Candidates()[0].address())); SIMULATED_WAIT(false, 1000, fake_clock); EXPECT_EQ(1U, port()->Candidates().size()); } // Test that a local candidate can be generated using a shared socket. TEST_F(StunPortTest, TestSharedSocketPrepareAddress) { - CreateSharedUdpPort(kStunAddr1, nullptr); + CreateSharedUdpPort(kStunServerAddr1, nullptr, nullptr); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); - ASSERT_EQ(1U, port()->Candidates().size()); - EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[0].address())); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + ASSERT_EQ(2U, port()->Candidates().size()); + EXPECT_EQ(port()->Candidates()[0].type(), IceCandidateType::kHost); + EXPECT_TRUE(kPrivateIP.EqualIPs(port()->Candidates()[0].address())); + EXPECT_EQ(port()->Candidates()[1].type(), IceCandidateType::kSrflx); + EXPECT_TRUE(kPublicIP.EqualIPs(port()->Candidates()[1].address())); } // Test that we still get a local candidate with invalid stun server hostname. @@ -401,49 +536,47 @@ TEST_F(StunPortTestWithRealClock, TestSharedSocketPrepareAddressInvalidHostname) { CreateSharedUdpPort(kBadHostnameAddr, nullptr); PrepareAddress(); - EXPECT_TRUE_WAIT(done(), kTimeoutMs); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs)}), + webrtc::IsRtcOk()); ASSERT_EQ(1U, port()->Candidates().size()); - EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[0].address())); + EXPECT_TRUE(kPrivateIP.EqualIPs(port()->Candidates()[0].address())); // Send data to port after it's ready. This is to make sure, UDP port can // handle data with unresolved stun server address. - std::string data = "some random data, sending to cricket::Port."; + std::string data = "some random data, sending to webrtc::Port."; SendData(data.c_str(), data.length()); // No crash is success. } -// Test that a stun candidate (srflx candidate) is discarded whose address is -// equal to that of a local candidate if mDNS obfuscation is not enabled. -TEST_F(StunPortTest, TestStunCandidateDiscardedWithMdnsObfuscationNotEnabled) { - CreateSharedUdpPort(kStunAddr1, nullptr); - PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); - ASSERT_EQ(1U, port()->Candidates().size()); - EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[0].address())); - EXPECT_EQ(port()->Candidates()[0].type(), cricket::LOCAL_PORT_TYPE); -} - // Test that a stun candidate (srflx candidate) is generated whose address is // equal to that of a local candidate if mDNS obfuscation is enabled. TEST_F(StunPortTest, TestStunCandidateGeneratedWithMdnsObfuscationEnabled) { EnableMdnsObfuscation(); - CreateSharedUdpPort(kStunAddr1, nullptr); + CreateSharedUdpPort(kStunServerAddr1, nullptr); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); ASSERT_EQ(2U, port()->Candidates().size()); - // The addresses of the candidates are both equal to kLocalAddr. - EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[0].address())); - EXPECT_TRUE(kLocalAddr.EqualIPs(port()->Candidates()[1].address())); - // One of the generated candidates is a local candidate and the other is a // stun candidate. EXPECT_NE(port()->Candidates()[0].type(), port()->Candidates()[1].type()); - if (port()->Candidates()[0].type() == cricket::LOCAL_PORT_TYPE) { - EXPECT_EQ(port()->Candidates()[1].type(), cricket::STUN_PORT_TYPE); + if (port()->Candidates()[0].is_local()) { + EXPECT_EQ(kMsdnAddress.HostAsSensitiveURIString(), + port()->Candidates()[0].address().HostAsSensitiveURIString()); + EXPECT_TRUE(port()->Candidates()[1].is_stun()); + EXPECT_TRUE(kPublicIP.EqualIPs(port()->Candidates()[1].address())); } else { - EXPECT_EQ(port()->Candidates()[0].type(), cricket::STUN_PORT_TYPE); - EXPECT_EQ(port()->Candidates()[1].type(), cricket::LOCAL_PORT_TYPE); + EXPECT_TRUE(port()->Candidates()[0].is_stun()); + EXPECT_TRUE(kPublicIP.EqualIPs(port()->Candidates()[0].address())); + EXPECT_TRUE(port()->Candidates()[1].is_local()); + EXPECT_EQ(kMsdnAddress.HostAsSensitiveURIString(), + port()->Candidates()[1].address().HostAsSensitiveURIString()); } } @@ -451,12 +584,16 @@ TEST_F(StunPortTest, TestStunCandidateGeneratedWithMdnsObfuscationEnabled) { // use. TEST_F(StunPortTest, TestNoDuplicatedAddressWithTwoStunServers) { ServerAddresses stun_servers; - stun_servers.insert(kStunAddr1); - stun_servers.insert(kStunAddr2); + stun_servers.insert(kStunServerAddr1); + stun_servers.insert(kStunServerAddr2); CreateStunPort(stun_servers); - EXPECT_EQ("stun", port()->Type()); + EXPECT_EQ(IceCandidateType::kSrflx, port()->Type()); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(1U, port()->Candidates().size()); EXPECT_EQ(port()->Candidates()[0].relay_protocol(), ""); } @@ -465,16 +602,23 @@ TEST_F(StunPortTest, TestNoDuplicatedAddressWithTwoStunServers) { // which is not reachable. TEST_F(StunPortTest, TestMultipleStunServersWithBadServer) { ServerAddresses stun_servers; - stun_servers.insert(kStunAddr1); + stun_servers.insert(kStunServerAddr1); stun_servers.insert(kBadAddr); CreateStunPort(stun_servers); - EXPECT_EQ("stun", port()->Type()); + EXPECT_EQ(IceCandidateType::kSrflx, port()->Type()); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(1U, port()->Candidates().size()); std::string server_url = "stun:" + kBadAddr.ToString(); - ASSERT_EQ_SIMULATED_WAIT(error_event_.url, server_url, kTimeoutMs, - fake_clock); + ASSERT_THAT( + webrtc::WaitUntil([&] { return error_event_.url; }, Eq(server_url), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); } // Test that two candidates are allocated if the two STUN servers return @@ -486,12 +630,16 @@ TEST_F(StunPortTest, TestTwoCandidatesWithTwoStunServersAcrossNat) { stun_server_2()->set_fake_stun_addr(kStunMappedAddr2); ServerAddresses stun_servers; - stun_servers.insert(kStunAddr1); - stun_servers.insert(kStunAddr2); + stun_servers.insert(kStunServerAddr1); + stun_servers.insert(kStunServerAddr2); CreateStunPort(stun_servers); - EXPECT_EQ("stun", port()->Type()); + EXPECT_EQ(IceCandidateType::kSrflx, port()->Type()); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(2U, port()->Candidates().size()); EXPECT_EQ(port()->Candidates()[0].relay_protocol(), ""); EXPECT_EQ(port()->Candidates()[1].relay_protocol(), ""); @@ -502,16 +650,16 @@ TEST_F(StunPortTest, TestTwoCandidatesWithTwoStunServersAcrossNat) { // changes. TEST_F(StunPortTest, TestStunPortGetStunKeepaliveLifetime) { // Lifetime for the default (unknown) network type is `kInfiniteLifetime`. - CreateStunPort(kStunAddr1); + CreateStunPort(kStunServerAddr1); EXPECT_EQ(kInfiniteLifetime, port()->stun_keepalive_lifetime()); // Lifetime for the cellular network is `kHighCostPortKeepaliveLifetimeMs` - SetNetworkType(rtc::ADAPTER_TYPE_CELLULAR); + SetNetworkType(webrtc::ADAPTER_TYPE_CELLULAR); EXPECT_EQ(kHighCostPortKeepaliveLifetimeMs, port()->stun_keepalive_lifetime()); // Lifetime for the wifi network is `kInfiniteLifetime`. - SetNetworkType(rtc::ADAPTER_TYPE_WIFI); - CreateStunPort(kStunAddr2); + SetNetworkType(webrtc::ADAPTER_TYPE_WIFI); + CreateStunPort(kStunServerAddr2); EXPECT_EQ(kInfiniteLifetime, port()->stun_keepalive_lifetime()); } @@ -520,16 +668,16 @@ TEST_F(StunPortTest, TestStunPortGetStunKeepaliveLifetime) { // if the network type changes. TEST_F(StunPortTest, TestUdpPortGetStunKeepaliveLifetime) { // Lifetime for the default (unknown) network type is `kInfiniteLifetime`. - CreateSharedUdpPort(kStunAddr1, nullptr); + CreateSharedUdpPort(kStunServerAddr1, nullptr); EXPECT_EQ(kInfiniteLifetime, port()->stun_keepalive_lifetime()); // Lifetime for the cellular network is `kHighCostPortKeepaliveLifetimeMs`. - SetNetworkType(rtc::ADAPTER_TYPE_CELLULAR); + SetNetworkType(webrtc::ADAPTER_TYPE_CELLULAR); EXPECT_EQ(kHighCostPortKeepaliveLifetimeMs, port()->stun_keepalive_lifetime()); // Lifetime for the wifi network type is `kInfiniteLifetime`. - SetNetworkType(rtc::ADAPTER_TYPE_WIFI); - CreateSharedUdpPort(kStunAddr2, nullptr); + SetNetworkType(webrtc::ADAPTER_TYPE_WIFI); + CreateSharedUdpPort(kStunServerAddr2, nullptr); EXPECT_EQ(kInfiniteLifetime, port()->stun_keepalive_lifetime()); } @@ -538,24 +686,38 @@ TEST_F(StunPortTest, TestUdpPortGetStunKeepaliveLifetime) { TEST_F(StunPortTest, TestStunBindingRequestShortLifetime) { SetKeepaliveDelay(101); SetKeepaliveLifetime(100); - CreateStunPort(kStunAddr1); + CreateStunPort(kStunServerAddr1); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); - EXPECT_TRUE_SIMULATED_WAIT(!HasPendingRequest(cricket::STUN_BINDING_REQUEST), - 2000, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return !HasPendingRequest(webrtc::STUN_BINDING_REQUEST); }, + IsTrue(), {.clock = &fake_clock}), + webrtc::IsRtcOk()); } // Test that by default, the STUN binding requests will last for a long time. TEST_F(StunPortTest, TestStunBindingRequestLongLifetime) { SetKeepaliveDelay(101); - CreateStunPort(kStunAddr1); + CreateStunPort(kStunServerAddr1); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); - EXPECT_TRUE_SIMULATED_WAIT(HasPendingRequest(cricket::STUN_BINDING_REQUEST), - 1000, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return HasPendingRequest(webrtc::STUN_BINDING_REQUEST); }, + IsTrue(), {.clock = &fake_clock}), + webrtc::IsRtcOk()); } -class MockAsyncPacketSocket : public rtc::AsyncPacketSocket { +class MockAsyncPacketSocket : public webrtc::AsyncPacketSocket { public: ~MockAsyncPacketSocket() = default; @@ -563,7 +725,9 @@ class MockAsyncPacketSocket : public rtc::AsyncPacketSocket { MOCK_METHOD(SocketAddress, GetRemoteAddress, (), (const, override)); MOCK_METHOD(int, Send, - (const void* pv, size_t cb, const rtc::PacketOptions& options), + (const void* pv, + size_t cb, + const webrtc::AsyncSocketPacketOptions& options), (override)); MOCK_METHOD(int, @@ -571,15 +735,18 @@ class MockAsyncPacketSocket : public rtc::AsyncPacketSocket { (const void* pv, size_t cb, const SocketAddress& addr, - const rtc::PacketOptions& options), + const webrtc::AsyncSocketPacketOptions& options), (override)); MOCK_METHOD(int, Close, (), (override)); MOCK_METHOD(State, GetState, (), (const, override)); MOCK_METHOD(int, GetOption, - (rtc::Socket::Option opt, int* value), + (webrtc::Socket::Option opt, int* value), + (override)); + MOCK_METHOD(int, + SetOption, + (webrtc::Socket::Option opt, int value), (override)); - MOCK_METHOD(int, SetOption, (rtc::Socket::Option opt, int value), (override)); MOCK_METHOD(int, GetError, (), (const, override)); MOCK_METHOD(void, SetError, (int error), (override)); }; @@ -587,43 +754,40 @@ class MockAsyncPacketSocket : public rtc::AsyncPacketSocket { // Test that outbound packets inherit the dscp value assigned to the socket. TEST_F(StunPortTest, TestStunPacketsHaveDscpPacketOption) { MockAsyncPacketSocket* socket = new MockAsyncPacketSocket(); - CreateSharedUdpPort(kStunAddr1, socket); - EXPECT_CALL(*socket, GetLocalAddress()).WillRepeatedly(Return(kLocalAddr)); + CreateSharedUdpPort(kStunServerAddr1, socket); + EXPECT_CALL(*socket, GetLocalAddress()).WillRepeatedly(Return(kPrivateIP)); EXPECT_CALL(*socket, GetState()) - .WillRepeatedly(Return(rtc::AsyncPacketSocket::STATE_BOUND)); + .WillRepeatedly(Return(webrtc::AsyncPacketSocket::STATE_BOUND)); EXPECT_CALL(*socket, SetOption(_, _)).WillRepeatedly(Return(0)); // If DSCP is not set on the socket, stun packets should have no value. EXPECT_CALL(*socket, SendTo(_, _, _, - ::testing::Field(&rtc::PacketOptions::dscp, - ::testing::Eq(rtc::DSCP_NO_CHANGE)))) + ::testing::Field(&webrtc::AsyncSocketPacketOptions::dscp, + Eq(webrtc::DSCP_NO_CHANGE)))) .WillOnce(Return(100)); PrepareAddress(); // Once it is set transport wide, they should inherit that value. - port()->SetOption(rtc::Socket::OPT_DSCP, rtc::DSCP_AF41); - EXPECT_CALL(*socket, SendTo(_, _, _, - ::testing::Field(&rtc::PacketOptions::dscp, - ::testing::Eq(rtc::DSCP_AF41)))) + port()->SetOption(webrtc::Socket::OPT_DSCP, webrtc::DSCP_AF41); + EXPECT_CALL(*socket, + SendTo(_, _, _, + ::testing::Field(&webrtc::AsyncSocketPacketOptions::dscp, + Eq(webrtc::DSCP_AF41)))) .WillRepeatedly(Return(100)); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); } class StunIPv6PortTestBase : public StunPortTestBase { public: StunIPv6PortTestBase() - : StunPortTestBase(rtc::Network("unittestipv6", - "unittestipv6", - kIPv6LocalAddr.ipaddr(), - 128), - kIPv6LocalAddr.ipaddr()) { - stun_server_ipv6_1_.reset( - cricket::TestStunServer::Create(ss(), kIPv6StunAddr1)); - } - - protected: - std::unique_ptr stun_server_ipv6_1_; + : StunPortTestBase(kPrivateIPv6.ipaddr(), + {kStunServerAddrIPv6Addr}, + kNatAddrIPv6) {} }; class StunIPv6PortTestWithRealClock : public StunIPv6PortTestBase {}; @@ -632,42 +796,73 @@ class StunIPv6PortTest : public FakeClockBase, public StunIPv6PortTestBase {}; // Test that we can get an address from a STUN server. TEST_F(StunIPv6PortTest, TestPrepareAddress) { - CreateStunPort(kIPv6StunAddr1); + CreateStunPort(kStunServerAddrIPv6Addr); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); ASSERT_EQ(1U, port()->Candidates().size()); - EXPECT_TRUE(kIPv6LocalAddr.EqualIPs(port()->Candidates()[0].address())); - std::string expected_server_url = "stun:::1:5000"; + EXPECT_TRUE(kPublicIPv6.EqualIPs(port()->Candidates()[0].address())); + std::string expected_server_url = "stun:2003:4860:4860::8844:5000"; EXPECT_EQ(port()->Candidates()[0].url(), expected_server_url); } // Test that we fail properly if we can't get an address. TEST_F(StunIPv6PortTest, TestPrepareAddressFail) { - CreateStunPort(kBadAddr); + CreateStunPort(kIPv6BadAddr); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_TRUE(error()); EXPECT_EQ(0U, port()->Candidates().size()); - EXPECT_EQ_SIMULATED_WAIT(error_event_.error_code, - cricket::SERVER_NOT_REACHABLE_ERROR, kTimeoutMs, - fake_clock); - ASSERT_NE(error_event_.error_text.find('.'), std::string::npos); - ASSERT_NE( - error_event_.address.find(kIPv6LocalAddr.HostAsSensitiveURIString()), - std::string::npos); - std::string server_url = "stun:" + kBadAddr.ToString(); - ASSERT_EQ(error_event_.url, server_url); + EXPECT_THAT( + webrtc::WaitUntil([&] { return error_event_.error_code; }, + Eq(webrtc::STUN_ERROR_SERVER_NOT_REACHABLE), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_NE(error_event_.error_text.find('.'), std::string::npos); + EXPECT_NE(error_event_.address.find(kPrivateIPv6.HostAsSensitiveURIString()), + std::string::npos); + std::string server_url = "stun:" + kIPv6BadAddr.ToString(); + EXPECT_EQ(error_event_.url, server_url); +} + +// Test that we fail without emitting an error if we try to get an address from +// a STUN server with a different address family. IPv6 local, IPv4 STUN. +TEST_F(StunIPv6PortTest, TestServerAddressFamilyMismatch) { + CreateStunPort(kStunServerAddr1); + PrepareAddress(); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE(error()); + EXPECT_EQ(0U, port()->Candidates().size()); + EXPECT_EQ(0, error_event_.error_code); } // Test that we handle hostname lookup failures properly with a real clock. TEST_F(StunIPv6PortTestWithRealClock, TestPrepareAddressHostnameFail) { CreateStunPort(kBadHostnameAddr); PrepareAddress(); - EXPECT_TRUE_WAIT(done(), kTimeoutMs); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs)}), + webrtc::IsRtcOk()); EXPECT_TRUE(error()); EXPECT_EQ(0U, port()->Candidates().size()); - EXPECT_EQ_WAIT(error_event_.error_code, cricket::SERVER_NOT_REACHABLE_ERROR, - kTimeoutMs); + EXPECT_THAT( + webrtc::WaitUntil([&] { return error_event_.error_code; }, + Eq(webrtc::STUN_ERROR_SERVER_NOT_REACHABLE), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs)}), + webrtc::IsRtcOk()); } class StunIPv6PortTestWithMockDnsResolver : public StunIPv6PortTest { @@ -675,17 +870,17 @@ class StunIPv6PortTestWithMockDnsResolver : public StunIPv6PortTest { StunIPv6PortTestWithMockDnsResolver() : StunIPv6PortTest(), socket_factory_(ss()) {} - rtc::PacketSocketFactory* socket_factory() override { + webrtc::PacketSocketFactory* socket_factory() override { return &socket_factory_; } void SetDnsResolverExpectations( - rtc::MockDnsResolvingPacketSocketFactory::Expectations expectations) { + webrtc::MockDnsResolvingPacketSocketFactory::Expectations expectations) { socket_factory_.SetExpectations(expectations); } private: - rtc::MockDnsResolvingPacketSocketFactory socket_factory_; + webrtc::MockDnsResolvingPacketSocketFactory socket_factory_; }; // Test that we can get an address from a STUN server specified by a hostname. @@ -695,21 +890,25 @@ TEST_F(StunIPv6PortTestWithMockDnsResolver, TestPrepareAddressHostname) { webrtc::MockAsyncDnsResolverResult* resolver_result) { EXPECT_CALL(*resolver, Start(kValidHostnameAddr, /*family=*/AF_INET6, _)) - .WillOnce([](const rtc::SocketAddress& addr, int family, + .WillOnce([](const webrtc::SocketAddress& addr, int family, absl::AnyInvocable callback) { callback(); }); EXPECT_CALL(*resolver, result) .WillRepeatedly(ReturnPointee(resolver_result)); EXPECT_CALL(*resolver_result, GetError).WillOnce(Return(0)); EXPECT_CALL(*resolver_result, GetResolvedAddress(AF_INET6, _)) - .WillOnce(DoAll(SetArgPointee<1>(SocketAddress("::1", 5000)), - Return(true))); + .WillOnce( + DoAll(SetArgPointee<1>(kStunServerAddrIPv6Addr), Return(true))); }); CreateStunPort(kValidHostnameAddr); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); ASSERT_EQ(1U, port()->Candidates().size()); - EXPECT_TRUE(kIPv6LocalAddr.EqualIPs(port()->Candidates()[0].address())); + EXPECT_TRUE(kPrivateIPv6.EqualIPs(port()->Candidates()[0].address())); EXPECT_EQ(kIPv6StunCandidatePriority, port()->Candidates()[0].priority()); } @@ -723,21 +922,25 @@ TEST_F(StunIPv6PortTestWithMockDnsResolver, webrtc::MockAsyncDnsResolverResult* resolver_result) { EXPECT_CALL(*resolver, Start(kValidHostnameAddr, /*family=*/AF_INET6, _)) - .WillOnce([](const rtc::SocketAddress& addr, int family, + .WillOnce([](const webrtc::SocketAddress& addr, int family, absl::AnyInvocable callback) { callback(); }); EXPECT_CALL(*resolver, result) .WillRepeatedly(ReturnPointee(resolver_result)); EXPECT_CALL(*resolver_result, GetError).WillOnce(Return(0)); EXPECT_CALL(*resolver_result, GetResolvedAddress(AF_INET6, _)) - .WillOnce(DoAll(SetArgPointee<1>(SocketAddress("::1", 5000)), - Return(true))); + .WillOnce( + DoAll(SetArgPointee<1>(kStunServerAddrIPv6Addr), Return(true))); }); CreateStunPort(kValidHostnameAddr, &field_trials); PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(done(), kTimeoutMs, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil([&] { return done(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeoutMs), + .clock = &fake_clock}), + webrtc::IsRtcOk()); ASSERT_EQ(1U, port()->Candidates().size()); - EXPECT_TRUE(kIPv6LocalAddr.EqualIPs(port()->Candidates()[0].address())); - EXPECT_EQ(kIPv6StunCandidatePriority + (cricket::kMaxTurnServers << 8), + EXPECT_TRUE(kPrivateIPv6.EqualIPs(port()->Candidates()[0].address())); + EXPECT_EQ(kIPv6StunCandidatePriority + (webrtc::kMaxTurnServers << 8), port()->Candidates()[0].priority()); } diff --git a/p2p/base/stun_request.cc b/p2p/base/stun_request.cc index 25d387cc3a..952590e471 100644 --- a/p2p/base/stun_request.cc +++ b/p2p/base/stun_request.cc @@ -11,19 +11,28 @@ #include "p2p/base/stun_request.h" #include +#include +#include +#include #include +#include #include #include #include "absl/memory/memory.h" +#include "api/array_view.h" +#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/stun.h" +#include "api/units/time_delta.h" +#include "rtc_base/byte_buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/helpers.h" #include "rtc_base/logging.h" #include "rtc_base/string_encode.h" #include "rtc_base/time_utils.h" // For TimeMillis -namespace cricket { +namespace webrtc { using ::webrtc::SafeTask; // RFC 5389 says SHOULD be 500ms. @@ -44,8 +53,8 @@ const int STUN_MAX_RETRANSMISSIONS = 8; // Total sends: 9 const int STUN_MAX_RTO = 8000; // milliseconds, or 5 doublings StunRequestManager::StunRequestManager( - webrtc::TaskQueueBase* thread, - std::function send_packet) + TaskQueueBase* thread, + std::function send_packet) : thread_(thread), send_packet_(std::move(send_packet)) {} StunRequestManager::~StunRequestManager() = default; @@ -57,10 +66,14 @@ void StunRequestManager::Send(StunRequest* request) { void StunRequestManager::SendDelayed(StunRequest* request, int delay) { RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK_EQ(this, request->manager()); + RTC_DCHECK(!request->AuthenticationRequired() || + request->msg()->integrity() != + StunMessage::IntegrityStatus::kNotSet) + << "Sending request w/o integrity!"; auto [iter, was_inserted] = requests_.emplace(request->id(), absl::WrapUnique(request)); RTC_DCHECK(was_inserted); - request->Send(webrtc::TimeDelta::Millis(delay)); + request->Send(TimeDelta::Millis(delay)); } void StunRequestManager::FlushForTest(int msg_type) { @@ -74,7 +87,7 @@ void StunRequestManager::FlushForTest(int msg_type) { // of canceling any outstanding tasks and prepare a new flag for // operations related to this call to `Send`. request->ResetTasksForTest(); - request->Send(webrtc::TimeDelta::Zero()); + request->Send(TimeDelta::Zero()); } } } @@ -104,15 +117,23 @@ bool StunRequestManager::CheckResponse(StunMessage* msg) { StunRequest* request = iter->second.get(); // Now that we know the request, we can see if the response is - // integrity-protected or not. - // For some tests, the message integrity is not set in the request. - // Complain, and then don't check. + // integrity-protected or not. Some requests explicitly disables + // integrity checks using SetAuthenticationRequired. + // TODO(chromium:1177125): Remove below! + // And we suspect that for some tests, the message integrity is not set in the + // request. Complain, and then don't check. bool skip_integrity_checking = (request->msg()->integrity() == StunMessage::IntegrityStatus::kNotSet); - if (skip_integrity_checking) { + if (!request->AuthenticationRequired()) { + // This is a STUN_BINDING to from stun_port.cc or + // the initial (unauthenticated) TURN_ALLOCATE_REQUEST. + } else if (skip_integrity_checking) { + // TODO(chromium:1177125): Remove below! // This indicates lazy test writing (not adding integrity attribute). // Complain, but only in debug mode (while developing). - RTC_DLOG(LS_ERROR) + RTC_LOG(LS_ERROR) + << "CheckResponse called on a passwordless request. Fix test!"; + RTC_DCHECK(false) << "CheckResponse called on a passwordless request. Fix test!"; } else { if (msg->integrity() == StunMessage::IntegrityStatus::kNotSet) { @@ -133,31 +154,39 @@ bool StunRequestManager::CheckResponse(StunMessage* msg) { } } - bool success = true; - if (!msg->GetNonComprehendedAttributes().empty()) { // If a response contains unknown comprehension-required attributes, it's // simply discarded and the transaction is considered failed. See RFC5389 // sections 7.3.3 and 7.3.4. RTC_LOG(LS_ERROR) << ": Discarding response due to unknown " "comprehension-required attribute."; - success = false; + requests_.erase(iter); + return false; } else if (msg->type() == GetStunSuccessResponseType(request->type())) { if (!msg->IntegrityOk() && !skip_integrity_checking) { return false; } - request->OnResponse(msg); + // Erase element from hash before calling callback. This ensures + // that the callback can modify the StunRequestManager any way it + // sees fit. + std::unique_ptr owned_request = std::move(iter->second); + requests_.erase(iter); + owned_request->OnResponse(msg); + return true; } else if (msg->type() == GetStunErrorResponseType(request->type())) { - request->OnErrorResponse(msg); + // Erase element from hash before calling callback. This ensures + // that the callback can modify the StunRequestManager any way it + // sees fit. + std::unique_ptr owned_request = std::move(iter->second); + requests_.erase(iter); + owned_request->OnErrorResponse(msg); + return true; } else { RTC_LOG(LS_ERROR) << "Received response with wrong type: " << msg->type() << " (expecting " << GetStunSuccessResponseType(request->type()) << ")"; return false; } - - requests_.erase(iter); - return success; } bool StunRequestManager::empty() const { @@ -182,11 +211,12 @@ bool StunRequestManager::CheckResponse(const char* data, size_t size) { // Parse the STUN message and continue processing as usual. - rtc::ByteBufferReader buf(data, size); + ByteBufferReader buf( + MakeArrayView(reinterpret_cast(data), size)); std::unique_ptr response(iter->second->msg_->CreateNew()); if (!response->Read(&buf)) { RTC_LOG(LS_WARNING) << "Failed to read STUN response " - << rtc::hex_encode(id); + << webrtc::hex_encode(id); return false; } @@ -238,7 +268,7 @@ const StunMessage* StunRequest::msg() const { int StunRequest::Elapsed() const { RTC_DCHECK_RUN_ON(network_thread()); - return static_cast(rtc::TimeMillis() - tstamp_); + return static_cast(webrtc::TimeMillis() - tstamp_); } void StunRequest::SendInternal() { @@ -249,22 +279,22 @@ void StunRequest::SendInternal() { return; } - tstamp_ = rtc::TimeMillis(); + tstamp_ = webrtc::TimeMillis(); - rtc::ByteBufferWriter buf; + ByteBufferWriter buf; msg_->Write(&buf); manager_.SendPacket(buf.Data(), buf.Length(), this); OnSent(); - SendDelayed(webrtc::TimeDelta::Millis(resend_delay())); + SendDelayed(TimeDelta::Millis(resend_delay())); } -void StunRequest::SendDelayed(webrtc::TimeDelta delay) { +void StunRequest::SendDelayed(TimeDelta delay) { network_thread()->PostDelayedTask( SafeTask(task_safety_.flag(), [this]() { SendInternal(); }), delay); } -void StunRequest::Send(webrtc::TimeDelta delay) { +void StunRequest::Send(TimeDelta delay) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK_GE(delay.ms(), 0); @@ -276,7 +306,7 @@ void StunRequest::Send(webrtc::TimeDelta delay) { void StunRequest::ResetTasksForTest() { RTC_DCHECK_RUN_ON(network_thread()); - task_safety_.reset(webrtc::PendingTaskSafetyFlag::CreateDetachedInactive()); + task_safety_.reset(PendingTaskSafetyFlag::CreateDetachedInactive()); count_ = 0; RTC_DCHECK(!timeout_); } @@ -307,4 +337,4 @@ void StunRequest::set_timed_out() { timeout_ = true; } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/stun_request.h b/p2p/base/stun_request.h index 6e83be3830..9c10e40a78 100644 --- a/p2p/base/stun_request.h +++ b/p2p/base/stun_request.h @@ -23,8 +23,9 @@ #include "api/task_queue/task_queue_base.h" #include "api/transport/stun.h" #include "api/units/time_delta.h" +#include "rtc_base/thread_annotations.h" -namespace cricket { +namespace webrtc { class StunRequest; @@ -40,8 +41,9 @@ const int STUN_TOTAL_TIMEOUT = 39750; // milliseconds class StunRequestManager { public: StunRequestManager( - webrtc::TaskQueueBase* thread, - std::function send_packet); + TaskQueueBase* thread, + std::function + send_packet); ~StunRequestManager(); // Starts sending the given request (perhaps after a delay). @@ -74,16 +76,17 @@ class StunRequestManager { bool empty() const; - webrtc::TaskQueueBase* network_thread() const { return thread_; } + TaskQueueBase* network_thread() const { return thread_; } void SendPacket(const void* data, size_t size, StunRequest* request); private: typedef std::map> RequestMap; - webrtc::TaskQueueBase* const thread_; + TaskQueueBase* const thread_; RequestMap requests_ RTC_GUARDED_BY(thread_); - const std::function send_packet_; + const std::function + send_packet_; }; // Represents an individual request to be sent. The STUN message can either be @@ -115,11 +118,17 @@ class StunRequest { // Time elapsed since last send (in ms) int Elapsed() const; + // Add method to explitly allow requests w/o password. + // - STUN_BINDINGs from StunPort to a stun server + // - The initial TURN_ALLOCATE_REQUEST + void SetAuthenticationRequired(bool val) { authentication_required_ = val; } + bool AuthenticationRequired() const { return authentication_required_; } + protected: friend class StunRequestManager; // Called by StunRequestManager. - void Send(webrtc::TimeDelta delay); + void Send(TimeDelta delay); // Called from FlushForTest. // TODO(tommi): Remove when FlushForTest gets removed. @@ -128,17 +137,15 @@ class StunRequest { StunMessage* mutable_msg() { return msg_.get(); } // Called when the message receives a response or times out. - virtual void OnResponse(StunMessage* response) {} - virtual void OnErrorResponse(StunMessage* response) {} + virtual void OnResponse(StunMessage* /* response */) {} + virtual void OnErrorResponse(StunMessage* /* response */) {} virtual void OnTimeout() {} // Called when the message is sent. virtual void OnSent(); // Returns the next delay for resends in milliseconds. virtual int resend_delay(); - webrtc::TaskQueueBase* network_thread() const { - return manager_.network_thread(); - } + TaskQueueBase* network_thread() const { return manager_.network_thread(); } void set_timed_out(); @@ -146,17 +153,29 @@ class StunRequest { void SendInternal(); // Calls `PostDelayedTask` to queue up a call to SendInternal after the // specified timeout. - void SendDelayed(webrtc::TimeDelta delay); + void SendDelayed(TimeDelta delay); StunRequestManager& manager_; const std::unique_ptr msg_; int64_t tstamp_ RTC_GUARDED_BY(network_thread()); int count_ RTC_GUARDED_BY(network_thread()); bool timeout_ RTC_GUARDED_BY(network_thread()); - webrtc::ScopedTaskSafety task_safety_{ - webrtc::PendingTaskSafetyFlag::CreateDetachedInactive()}; + ScopedTaskSafety task_safety_{ + PendingTaskSafetyFlag::CreateDetachedInactive()}; + bool authentication_required_ = true; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::kAllRequestsForTest; +using ::webrtc::STUN_TOTAL_TIMEOUT; +using ::webrtc::StunRequest; +using ::webrtc::StunRequestManager; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_STUN_REQUEST_H_ diff --git a/p2p/base/stun_request_unittest.cc b/p2p/base/stun_request_unittest.cc index 6831d9ffa2..f1745d490a 100644 --- a/p2p/base/stun_request_unittest.cc +++ b/p2p/base/stun_request_unittest.cc @@ -10,18 +10,28 @@ #include "p2p/base/stun_request.h" -#include +#include +#include +#include #include +#include "api/test/rtc_error_matchers.h" +#include "api/transport/stun.h" +#include "api/units/time_delta.h" #include "rtc_base/fake_clock.h" #include "rtc_base/gunit.h" -#include "rtc_base/helpers.h" #include "rtc_base/logging.h" +#include "rtc_base/thread.h" #include "rtc_base/time_utils.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" -namespace cricket { +namespace webrtc { namespace { + +using ::testing::Ne; + std::unique_ptr CreateStunMessage( StunMessageType type, const StunMessage* req = nullptr) { @@ -40,7 +50,7 @@ int TotalDelay(int sends) { class StunRequestTest : public ::testing::Test { public: StunRequestTest() - : manager_(rtc::Thread::Current(), + : manager_(Thread::Current(), [this](const void* data, size_t size, StunRequest* request) { OnSendPacket(data, size, request); }), @@ -54,18 +64,18 @@ class StunRequestTest : public ::testing::Test { request_count_++; } - void OnResponse(StunMessage* res) { + virtual void OnResponse(StunMessage* res) { response_ = res; success_ = true; } - void OnErrorResponse(StunMessage* res) { + virtual void OnErrorResponse(StunMessage* res) { response_ = res; failure_ = true; } - void OnTimeout() { timeout_ = true; } + virtual void OnTimeout() { timeout_ = true; } protected: - rtc::AutoThread main_thread_; + AutoThread main_thread_; StunRequestManager manager_; int request_count_; StunMessage* response_; @@ -79,7 +89,9 @@ class StunRequestThunker : public StunRequest { public: StunRequestThunker(StunRequestManager& manager, StunRequestTest* test) : StunRequest(manager, CreateStunMessage(STUN_BINDING_REQUEST)), - test_(test) {} + test_(test) { + SetAuthenticationRequired(false); + } std::unique_ptr CreateResponseMessage(StunMessageType type) { return CreateStunMessage(type, msg()); @@ -139,17 +151,20 @@ TEST_F(StunRequestTest, TestUnexpected) { // Test that requests are sent at the right times. TEST_F(StunRequestTest, TestBackoff) { - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; auto* request = new StunRequestThunker(manager_, this); std::unique_ptr res = request->CreateResponseMessage(STUN_BINDING_RESPONSE); - int64_t start = rtc::TimeMillis(); + int64_t start = webrtc::TimeMillis(); manager_.Send(request); for (int i = 0; i < 9; ++i) { - EXPECT_TRUE_SIMULATED_WAIT(request_count_ != i, STUN_TOTAL_TIMEOUT, - fake_clock); - int64_t elapsed = rtc::TimeMillis() - start; + EXPECT_THAT( + webrtc::WaitUntil([&] { return request_count_; }, Ne(i), + {.timeout = TimeDelta::Millis(STUN_TOTAL_TIMEOUT), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + int64_t elapsed = webrtc::TimeMillis() - start; RTC_DLOG(LS_INFO) << "STUN request #" << (i + 1) << " sent at " << elapsed << " ms"; EXPECT_EQ(TotalDelay(i), elapsed); @@ -164,13 +179,13 @@ TEST_F(StunRequestTest, TestBackoff) { // Test that we timeout properly if no response is received. TEST_F(StunRequestTest, TestTimeout) { - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; auto* request = new StunRequestThunker(manager_, this); std::unique_ptr res = request->CreateResponseMessage(STUN_BINDING_RESPONSE); manager_.Send(request); - SIMULATED_WAIT(false, cricket::STUN_TOTAL_TIMEOUT, fake_clock); + SIMULATED_WAIT(false, STUN_TOTAL_TIMEOUT, fake_clock); EXPECT_FALSE(manager_.CheckResponse(res.get())); EXPECT_TRUE(response_ == NULL); @@ -216,4 +231,42 @@ TEST_F(StunRequestTest, TestUnrecognizedComprehensionRequiredAttribute) { EXPECT_FALSE(timeout_); } -} // namespace cricket +class StunRequestReentranceTest : public StunRequestTest { + public: + void OnResponse(StunMessage* res) override { + manager_.Clear(); + StunRequestTest::OnResponse(res); + } + void OnErrorResponse(StunMessage* res) override { + manager_.Clear(); + StunRequestTest::OnErrorResponse(res); + } +}; + +TEST_F(StunRequestReentranceTest, TestSuccess) { + auto* request = new StunRequestThunker(manager_, this); + std::unique_ptr res = + request->CreateResponseMessage(STUN_BINDING_RESPONSE); + manager_.Send(request); + EXPECT_TRUE(manager_.CheckResponse(res.get())); + + EXPECT_TRUE(response_ == res.get()); + EXPECT_TRUE(success_); + EXPECT_FALSE(failure_); + EXPECT_FALSE(timeout_); +} + +TEST_F(StunRequestReentranceTest, TestError) { + auto* request = new StunRequestThunker(manager_, this); + std::unique_ptr res = + request->CreateResponseMessage(STUN_BINDING_ERROR_RESPONSE); + manager_.Send(request); + EXPECT_TRUE(manager_.CheckResponse(res.get())); + + EXPECT_TRUE(response_ == res.get()); + EXPECT_FALSE(success_); + EXPECT_TRUE(failure_); + EXPECT_FALSE(timeout_); +} + +} // namespace webrtc diff --git a/p2p/base/tcp_port.cc b/p2p/base/tcp_port.cc index 5f25624d00..7b3b9fddbf 100644 --- a/p2p/base/tcp_port.cc +++ b/p2p/base/tcp_port.cc @@ -68,43 +68,49 @@ #include +#include +#include +#include #include -#include #include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "absl/strings/string_view.h" +#include "api/candidate.h" +#include "api/packet_socket_factory.h" +#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" +#include "api/transport/stun.h" #include "api/units/time_delta.h" +#include "p2p/base/connection.h" +#include "p2p/base/connection_info.h" #include "p2p/base/p2p_constants.h" +#include "p2p/base/port.h" +#include "p2p/base/port_interface.h" +#include "p2p/base/stun_request.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/checks.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" #include "rtc_base/rate_tracker.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/time_utils.h" +#include "rtc_base/weak_ptr.h" -namespace cricket { +namespace webrtc { +using ::webrtc::IceCandidateType; using ::webrtc::SafeTask; using ::webrtc::TimeDelta; -TCPPort::TCPPort(rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, +TCPPort::TCPPort(const PortParametersRef& args, uint16_t min_port, uint16_t max_port, - absl::string_view username, - absl::string_view password, - bool allow_listen, - const webrtc::FieldTrialsView* field_trials) - : Port(thread, - LOCAL_PORT_TYPE, - factory, - network, - min_port, - max_port, - username, - password, - field_trials), + bool allow_listen) + : Port(args, IceCandidateType::kHost, min_port, max_port), allow_listen_(allow_listen), error_(0) { // TODO(mallinath) - Set preference value as per RFC 6544. @@ -115,7 +121,7 @@ TCPPort::TCPPort(rtc::Thread* thread, // Set TCP_NODELAY (via OPT_NODELAY) for improved performance; this causes // small media packets to be sent immediately rather than being buffered up, // reducing latency. - SetOption(rtc::Socket::OPT_NODELAY, 1); + SetOption(Socket::OPT_NODELAY, 1); } TCPPort::~TCPPort() { @@ -132,8 +138,7 @@ Connection* TCPPort::CreateConnection(const Candidate& address, return NULL; } - if ((address.tcptype() == TCPTYPE_ACTIVE_STR && - address.type() != PRFLX_PORT_TYPE) || + if ((address.tcptype() == TCPTYPE_ACTIVE_STR && !address.is_prflx()) || (address.tcptype().empty() && address.address().port() == 0)) { // It's active only candidate, we should not try to create connections // for these candidates. @@ -145,7 +150,7 @@ Connection* TCPPort::CreateConnection(const Candidate& address, return NULL; // We don't know how to act as an ssl server yet - if ((address.protocol() == SSLTCP_PROTOCOL_NAME) && + if ((address.protocol() == webrtc::SSLTCP_PROTOCOL_NAME) && (origin == ORIGIN_THIS_PORT)) { return NULL; } @@ -155,20 +160,15 @@ Connection* TCPPort::CreateConnection(const Candidate& address, } TCPConnection* conn = NULL; - if (rtc::AsyncPacketSocket* socket = GetIncoming(address.address(), true)) { + if (AsyncPacketSocket* socket = GetIncoming(address.address(), true)) { // Incoming connection; we already created a socket and connected signals, // so we need to hand off the "read packet" responsibility to // TCPConnection. - socket->SignalReadPacket.disconnect(this); + socket->DeregisterReceivedPacketCallback(); conn = new TCPConnection(NewWeakPtr(), address, socket); } else { - // Outgoing connection, which will create a new socket for which we still - // need to connect SignalReadyToSend and SignalSentPacket. + // Outgoing connection, which will create a new socket. conn = new TCPConnection(NewWeakPtr(), address); - if (conn->socket()) { - conn->socket()->SignalReadyToSend.connect(this, &TCPPort::OnReadyToSend); - conn->socket()->SignalSentPacket.connect(this, &TCPPort::OnSentPacket); - } } AddOrReplaceConnection(conn); return conn; @@ -180,10 +180,10 @@ void TCPPort::PrepareAddress() { // failed, we still want to add the socket address. RTC_LOG(LS_VERBOSE) << "Preparing TCP address, current state: " << static_cast(listen_socket_->GetState()); - AddAddress(listen_socket_->GetLocalAddress(), - listen_socket_->GetLocalAddress(), rtc::SocketAddress(), - TCP_PROTOCOL_NAME, "", TCPTYPE_PASSIVE_STR, LOCAL_PORT_TYPE, - ICE_TYPE_PREFERENCE_HOST_TCP, 0, "", true); + AddAddress( + listen_socket_->GetLocalAddress(), listen_socket_->GetLocalAddress(), + SocketAddress(), webrtc::TCP_PROTOCOL_NAME, "", TCPTYPE_PASSIVE_STR, + IceCandidateType::kHost, ICE_TYPE_PREFERENCE_HOST_TCP, 0, "", true); } else { RTC_LOG(LS_INFO) << ToString() << ": Not listening due to firewall restrictions."; @@ -195,19 +195,20 @@ void TCPPort::PrepareAddress() { // can do. // TODO(deadbeef): We could do something like create a dummy socket just to // see what IP we get. But that may be overkill. - AddAddress(rtc::SocketAddress(Network()->GetBestIP(), DISCARD_PORT), - rtc::SocketAddress(Network()->GetBestIP(), 0), - rtc::SocketAddress(), TCP_PROTOCOL_NAME, "", TCPTYPE_ACTIVE_STR, - LOCAL_PORT_TYPE, ICE_TYPE_PREFERENCE_HOST_TCP, 0, "", true); + AddAddress(SocketAddress(Network()->GetBestIP(), DISCARD_PORT), + SocketAddress(Network()->GetBestIP(), 0), SocketAddress(), + webrtc::TCP_PROTOCOL_NAME, "", TCPTYPE_ACTIVE_STR, + IceCandidateType::kHost, ICE_TYPE_PREFERENCE_HOST_TCP, 0, "", + true); } } int TCPPort::SendTo(const void* data, size_t size, - const rtc::SocketAddress& addr, - const rtc::PacketOptions& options, + const SocketAddress& addr, + const AsyncSocketPacketOptions& options, bool payload) { - rtc::AsyncPacketSocket* socket = NULL; + AsyncPacketSocket* socket = NULL; TCPConnection* conn = static_cast(GetConnection(addr)); // For Connection, this is the code path used by Ping() to establish @@ -238,7 +239,7 @@ int TCPPort::SendTo(const void* data, return SOCKET_ERROR; } } - rtc::PacketOptions modified_options(options); + AsyncSocketPacketOptions modified_options(options); CopyPortInformationToPacketInfo(&modified_options.info_signaled_after_sent); int sent = socket->Send(data, size, modified_options); if (sent < 0) { @@ -252,7 +253,7 @@ int TCPPort::SendTo(const void* data, return sent; } -int TCPPort::GetOption(rtc::Socket::Option opt, int* value) { +int TCPPort::GetOption(Socket::Option opt, int* value) { auto const& it = socket_options_.find(opt); if (it == socket_options_.end()) { return -1; @@ -261,7 +262,7 @@ int TCPPort::GetOption(rtc::Socket::Option opt, int* value) { return 0; } -int TCPPort::SetOption(rtc::Socket::Option opt, int value) { +int TCPPort::SetOption(Socket::Option opt, int value) { socket_options_[opt] = value; return 0; } @@ -271,15 +272,16 @@ int TCPPort::GetError() { } bool TCPPort::SupportsProtocol(absl::string_view protocol) const { - return protocol == TCP_PROTOCOL_NAME || protocol == SSLTCP_PROTOCOL_NAME; + return protocol == webrtc::TCP_PROTOCOL_NAME || + protocol == webrtc::SSLTCP_PROTOCOL_NAME; } ProtocolType TCPPort::GetProtocol() const { - return PROTO_TCP; + return webrtc::PROTO_TCP; } -void TCPPort::OnNewConnection(rtc::AsyncListenSocket* socket, - rtc::AsyncPacketSocket* new_socket) { +void TCPPort::OnNewConnection(AsyncListenSocket* socket, + AsyncPacketSocket* new_socket) { RTC_DCHECK_EQ(socket, listen_socket_.get()); for (const auto& option : socket_options_) { @@ -288,7 +290,10 @@ void TCPPort::OnNewConnection(rtc::AsyncListenSocket* socket, Incoming incoming; incoming.addr = new_socket->GetRemoteAddress(); incoming.socket = new_socket; - incoming.socket->SignalReadPacket.connect(this, &TCPPort::OnReadPacket); + incoming.socket->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + OnReadPacket(socket, packet); + }); incoming.socket->SignalReadyToSend.connect(this, &TCPPort::OnReadyToSend); incoming.socket->SignalSentPacket.connect(this, &TCPPort::OnSentPacket); @@ -299,7 +304,7 @@ void TCPPort::OnNewConnection(rtc::AsyncListenSocket* socket, void TCPPort::TryCreateServerSocket() { listen_socket_ = absl::WrapUnique(socket_factory()->CreateServerTcpSocket( - rtc::SocketAddress(Network()->GetBestIP(), 0), min_port(), max_port(), + SocketAddress(Network()->GetBestIP(), 0), min_port(), max_port(), false /* ssl */)); if (!listen_socket_) { RTC_LOG(LS_WARNING) @@ -310,9 +315,9 @@ void TCPPort::TryCreateServerSocket() { listen_socket_->SignalNewConnection.connect(this, &TCPPort::OnNewConnection); } -rtc::AsyncPacketSocket* TCPPort::GetIncoming(const rtc::SocketAddress& addr, - bool remove) { - rtc::AsyncPacketSocket* socket = NULL; +AsyncPacketSocket* TCPPort::GetIncoming(const SocketAddress& addr, + bool remove) { + AsyncPacketSocket* socket = NULL; for (std::list::iterator it = incoming_.begin(); it != incoming_.end(); ++it) { if (it->addr == addr) { @@ -325,20 +330,17 @@ rtc::AsyncPacketSocket* TCPPort::GetIncoming(const rtc::SocketAddress& addr, return socket; } -void TCPPort::OnReadPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - const int64_t& packet_time_us) { - Port::OnReadPacket(data, size, remote_addr, PROTO_TCP); +void TCPPort::OnReadPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet) { + Port::OnReadPacket(packet, webrtc::PROTO_TCP); } -void TCPPort::OnSentPacket(rtc::AsyncPacketSocket* socket, - const rtc::SentPacket& sent_packet) { +void TCPPort::OnSentPacket(AsyncPacketSocket* socket, + const SentPacketInfo& sent_packet) { PortInterface::SignalSentPacket(sent_packet); } -void TCPPort::OnReadyToSend(rtc::AsyncPacketSocket* socket) { +void TCPPort::OnReadyToSend(AsyncPacketSocket* socket) { Port::OnReadyToSend(); } @@ -346,18 +348,19 @@ void TCPPort::OnReadyToSend(rtc::AsyncPacketSocket* socket) { // `ice_unwritable_timeout` in IceConfig when determining the writability state. // Replace this constant with the config parameter assuming the default value if // we decide it is also applicable here. -TCPConnection::TCPConnection(rtc::WeakPtr tcp_port, +TCPConnection::TCPConnection(WeakPtr tcp_port, const Candidate& candidate, - rtc::AsyncPacketSocket* socket) + AsyncPacketSocket* socket) : Connection(std::move(tcp_port), 0, candidate), socket_(socket), error_(0), outgoing_(socket == NULL), connection_pending_(false), pretending_to_be_writable_(false), - reconnection_timeout_(cricket::CONNECTION_WRITE_CONNECT_TIMEOUT) { + reconnection_timeout_(CONNECTION_WRITE_CONNECT_TIMEOUT) { RTC_DCHECK_RUN_ON(network_thread_); - RTC_DCHECK_EQ(port()->GetProtocol(), PROTO_TCP); // Needs to be TCPPort. + RTC_DCHECK_EQ(port()->GetProtocol(), + webrtc::PROTO_TCP); // Needs to be TCPPort. SignalDestroyed.connect(this, &TCPConnection::OnDestroyed); @@ -370,7 +373,7 @@ TCPConnection::TCPConnection(rtc::WeakPtr tcp_port, << socket_->GetLocalAddress().ToSensitiveString() << ", port() Network:" << port()->Network()->ToString(); RTC_DCHECK(absl::c_any_of( - port_->Network()->GetIPs(), [this](const rtc::InterfaceAddress& addr) { + port_->Network()->GetIPs(), [this](const InterfaceAddress& addr) { return socket_->GetLocalAddress().ipaddr() == addr; })); ConnectSocketSignals(socket); @@ -383,7 +386,7 @@ TCPConnection::~TCPConnection() { int TCPConnection::Send(const void* data, size_t size, - const rtc::PacketOptions& options) { + const AsyncSocketPacketOptions& options) { if (!socket_) { error_ = ENOTCONN; return SOCKET_ERROR; @@ -406,11 +409,11 @@ int TCPConnection::Send(const void* data, return SOCKET_ERROR; } stats_.sent_total_packets++; - rtc::PacketOptions modified_options(options); + AsyncSocketPacketOptions modified_options(options); tcp_port()->CopyPortInformationToPacketInfo( &modified_options.info_signaled_after_sent); int sent = socket_->Send(data, size, modified_options); - int64_t now = rtc::TimeMillis(); + int64_t now = webrtc::TimeMillis(); if (sent < 0) { stats_.sent_discarded_packets++; error_ = socket_->GetError(); @@ -425,6 +428,14 @@ int TCPConnection::GetError() { return error_; } +void TCPConnection::OnSentPacket(AsyncPacketSocket* socket, + const SentPacketInfo& sent_packet) { + RTC_DCHECK_RUN_ON(network_thread()); + if (port()) { + port()->SignalSentPacket(sent_packet); + } +} + void TCPConnection::OnConnectionRequestResponse(StunRequest* req, StunMessage* response) { // Process the STUN response before we inform upper layer ready to send. @@ -440,7 +451,7 @@ void TCPConnection::OnConnectionRequestResponse(StunRequest* req, RTC_DCHECK(write_state() == STATE_WRITABLE); } -void TCPConnection::OnConnect(rtc::AsyncPacketSocket* socket) { +void TCPConnection::OnConnect(AsyncPacketSocket* socket) { RTC_DCHECK_EQ(socket, socket_.get()); if (!port_) { @@ -462,9 +473,9 @@ void TCPConnection::OnConnect(rtc::AsyncPacketSocket* socket) { // // Note that, aside from minor differences in log statements, this logic is // identical to that in TurnPort. - const rtc::SocketAddress& socket_address = socket->GetLocalAddress(); + const SocketAddress& socket_address = socket->GetLocalAddress(); if (absl::c_any_of(port_->Network()->GetIPs(), - [socket_address](const rtc::InterfaceAddress& addr) { + [socket_address](const InterfaceAddress& addr) { return socket_address.ipaddr() == addr; })) { RTC_LOG(LS_VERBOSE) << ToString() << ": Connection established to " @@ -476,7 +487,7 @@ void TCPConnection::OnConnect(rtc::AsyncPacketSocket* socket) { << ", rather than an address associated with network:" << port_->Network()->ToString() << ". Still allowing it since it's localhost."; - } else if (IPIsAny(port_->Network()->GetBestIP())) { + } else if (webrtc::IPIsAny(port_->Network()->GetBestIP())) { RTC_LOG(LS_WARNING) << "Socket is bound to the address:" << socket_address.ipaddr().ToSensitiveString() @@ -499,7 +510,7 @@ void TCPConnection::OnConnect(rtc::AsyncPacketSocket* socket) { connection_pending_ = false; } -void TCPConnection::OnClose(rtc::AsyncPacketSocket* socket, int error) { +void TCPConnection::OnClose(AsyncPacketSocket* socket, int error) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK_EQ(socket, socket_.get()); RTC_LOG(LS_INFO) << ToString() << ": Connection closed with error " << error; @@ -558,17 +569,14 @@ void TCPConnection::MaybeReconnect() { error_ = EPIPE; } -void TCPConnection::OnReadPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - const int64_t& packet_time_us) { +void TCPConnection::OnReadPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK_EQ(socket, socket_.get()); - Connection::OnReadPacket(data, size, packet_time_us); + Connection::OnReadPacket(packet); } -void TCPConnection::OnReadyToSend(rtc::AsyncPacketSocket* socket) { +void TCPConnection::OnReadyToSend(AsyncPacketSocket* socket) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK_EQ(socket, socket_.get()); Connection::OnReadyToSend(); @@ -584,20 +592,19 @@ void TCPConnection::OnDestroyed(Connection* c) { void TCPConnection::CreateOutgoingTcpSocket() { RTC_DCHECK(outgoing_); - int opts = (remote_candidate().protocol() == SSLTCP_PROTOCOL_NAME) - ? rtc::PacketSocketFactory::OPT_TLS_FAKE + int opts = (remote_candidate().protocol() == webrtc::SSLTCP_PROTOCOL_NAME) + ? PacketSocketFactory::OPT_TLS_FAKE : 0; if (socket_) { DisconnectSocketSignals(socket_.get()); } - rtc::PacketSocketTcpOptions tcp_opts; + PacketSocketTcpOptions tcp_opts; tcp_opts.opts = opts; socket_.reset(port()->socket_factory()->CreateClientTcpSocket( - rtc::SocketAddress(port()->Network()->GetBestIP(), 0), - remote_candidate().address(), port()->proxy(), port()->user_agent(), - tcp_opts)); + SocketAddress(port()->Network()->GetBestIP(), 0), + remote_candidate().address(), tcp_opts)); if (socket_) { RTC_LOG(LS_VERBOSE) << ToString() << ": Connecting from " << socket_->GetLocalAddress().ToSensitiveString() @@ -619,26 +626,37 @@ void TCPConnection::CreateOutgoingTcpSocket() { } } -void TCPConnection::ConnectSocketSignals(rtc::AsyncPacketSocket* socket) { +void TCPConnection::ConnectSocketSignals(AsyncPacketSocket* socket) { + // Incoming connections register SignalSentPacket and SignalReadyToSend + // directly on the port in TCPPort::OnNewConnection. if (outgoing_) { socket->SignalConnect.connect(this, &TCPConnection::OnConnect); + socket->SignalSentPacket.connect(this, &TCPConnection::OnSentPacket); + socket->SignalReadyToSend.connect(this, &TCPConnection::OnReadyToSend); } - socket->SignalReadPacket.connect(this, &TCPConnection::OnReadPacket); - socket->SignalReadyToSend.connect(this, &TCPConnection::OnReadyToSend); + + // For incoming connections, this re-register ReceivedPacketCallback to the + // connection instead of the port. + socket->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + OnReadPacket(socket, packet); + }); socket->SubscribeCloseEvent(this, [this, safety = network_safety_.flag()]( - rtc::AsyncPacketSocket* s, int err) { + AsyncPacketSocket* s, int err) { if (safety->alive()) OnClose(s, err); }); } -void TCPConnection::DisconnectSocketSignals(rtc::AsyncPacketSocket* socket) { +void TCPConnection::DisconnectSocketSignals(AsyncPacketSocket* socket) { if (outgoing_) { + // Incoming connections do not register these signals in TCPConnection. socket->SignalConnect.disconnect(this); + socket->SignalReadyToSend.disconnect(this); + socket->SignalSentPacket.disconnect(this); } - socket->SignalReadPacket.disconnect(this); - socket->SignalReadyToSend.disconnect(this); + socket->DeregisterReceivedPacketCallback(); socket->UnsubscribeCloseEvent(this); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/tcp_port.h b/p2p/base/tcp_port.h index a1bbaa9f35..3f82c9d79d 100644 --- a/p2p/base/tcp_port.h +++ b/p2p/base/tcp_port.h @@ -11,19 +11,32 @@ #ifndef P2P_BASE_TCP_PORT_H_ #define P2P_BASE_TCP_PORT_H_ +#include +#include #include #include -#include #include "absl/memory/memory.h" #include "absl/strings/string_view.h" +#include "api/candidate.h" +#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" +#include "api/transport/stun.h" #include "p2p/base/connection.h" #include "p2p/base/port.h" +#include "p2p/base/port_interface.h" +#include "p2p/base/stun_request.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/checks.h" #include "rtc_base/containers/flat_map.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/weak_ptr.h" -namespace cricket { +namespace webrtc { class TCPConnection; @@ -35,21 +48,15 @@ class TCPConnection; // call this TCPPort::OnReadPacket (3 arg) to dispatch to a connection. class TCPPort : public Port { public: - static std::unique_ptr Create( - rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, - uint16_t min_port, - uint16_t max_port, - absl::string_view username, - absl::string_view password, - bool allow_listen, - const webrtc::FieldTrialsView* field_trials = nullptr) { + static std::unique_ptr Create(const PortParametersRef& args, + uint16_t min_port, + uint16_t max_port, + bool allow_listen) { // Using `new` to access a non-public constructor. - return absl::WrapUnique(new TCPPort(thread, factory, network, min_port, - max_port, username, password, - allow_listen, field_trials)); + return absl::WrapUnique( + new TCPPort(args, min_port, max_port, allow_listen)); } + ~TCPPort() override; Connection* CreateConnection(const Candidate& address, @@ -60,64 +67,55 @@ class TCPPort : public Port { // Options apply to accepted sockets. // TODO(bugs.webrtc.org/13065): Apply also to outgoing and existing // connections. - int GetOption(rtc::Socket::Option opt, int* value) override; - int SetOption(rtc::Socket::Option opt, int value) override; + int GetOption(Socket::Option opt, int* value) override; + int SetOption(Socket::Option opt, int value) override; int GetError() override; bool SupportsProtocol(absl::string_view protocol) const override; ProtocolType GetProtocol() const override; protected: - TCPPort(rtc::Thread* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, + TCPPort(const PortParametersRef& args, uint16_t min_port, uint16_t max_port, - absl::string_view username, - absl::string_view password, - bool allow_listen, - const webrtc::FieldTrialsView* field_trials); + bool allow_listen); // Handles sending using the local TCP socket. int SendTo(const void* data, size_t size, - const rtc::SocketAddress& addr, - const rtc::PacketOptions& options, + const SocketAddress& addr, + const AsyncSocketPacketOptions& options, bool payload) override; // Accepts incoming TCP connection. - void OnNewConnection(rtc::AsyncListenSocket* socket, - rtc::AsyncPacketSocket* new_socket); + void OnNewConnection(AsyncListenSocket* socket, + AsyncPacketSocket* new_socket); private: struct Incoming { - rtc::SocketAddress addr; - rtc::AsyncPacketSocket* socket; + SocketAddress addr; + AsyncPacketSocket* socket; }; void TryCreateServerSocket(); - rtc::AsyncPacketSocket* GetIncoming(const rtc::SocketAddress& addr, - bool remove = false); + AsyncPacketSocket* GetIncoming(const SocketAddress& addr, + bool remove = false); // Receives packet signal from the local TCP Socket. - void OnReadPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - const int64_t& packet_time_us); + void OnReadPacket(AsyncPacketSocket* socket, const ReceivedIpPacket& packet); - void OnSentPacket(rtc::AsyncPacketSocket* socket, - const rtc::SentPacket& sent_packet) override; + void OnSentPacket(AsyncPacketSocket* socket, + const SentPacketInfo& sent_packet) override; - void OnReadyToSend(rtc::AsyncPacketSocket* socket); + void OnReadyToSend(AsyncPacketSocket* socket); bool allow_listen_; - std::unique_ptr listen_socket_; + std::unique_ptr listen_socket_; // Options to be applied to accepted sockets. // TODO(bugs.webrtc:13065): Configure connect/accept in the same way, but // currently, setting OPT_NODELAY for client sockets is done (unconditionally) // by BasicPacketSocketFactory::CreateClientTcpSocket. - webrtc::flat_map socket_options_; + flat_map socket_options_; int error_; std::list incoming_; @@ -128,17 +126,17 @@ class TCPPort : public Port { class TCPConnection : public Connection, public sigslot::has_slots<> { public: // Connection is outgoing unless socket is specified - TCPConnection(rtc::WeakPtr tcp_port, + TCPConnection(WeakPtr tcp_port, const Candidate& candidate, - rtc::AsyncPacketSocket* socket = nullptr); + AsyncPacketSocket* socket = nullptr); ~TCPConnection() override; int Send(const void* data, size_t size, - const rtc::PacketOptions& options) override; + const AsyncSocketPacketOptions& options) override; int GetError() override; - rtc::AsyncPacketSocket* socket() { return socket_.get(); } + AsyncPacketSocket* socket() { return socket_.get(); } // Allow test cases to overwrite the default timeout period. int reconnection_timeout() const { return reconnection_timeout_; } @@ -161,28 +159,26 @@ class TCPConnection : public Connection, public sigslot::has_slots<> { void CreateOutgoingTcpSocket() RTC_RUN_ON(network_thread()); - void ConnectSocketSignals(rtc::AsyncPacketSocket* socket) + void ConnectSocketSignals(AsyncPacketSocket* socket) RTC_RUN_ON(network_thread()); - void DisconnectSocketSignals(rtc::AsyncPacketSocket* socket) + void DisconnectSocketSignals(AsyncPacketSocket* socket) RTC_RUN_ON(network_thread()); - void OnConnect(rtc::AsyncPacketSocket* socket); - void OnClose(rtc::AsyncPacketSocket* socket, int error); - void OnReadPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - const int64_t& packet_time_us); - void OnReadyToSend(rtc::AsyncPacketSocket* socket); + void OnConnect(AsyncPacketSocket* socket); + void OnClose(AsyncPacketSocket* socket, int error); + void OnSentPacket(AsyncPacketSocket* socket, + const SentPacketInfo& sent_packet); + void OnReadPacket(AsyncPacketSocket* socket, const ReceivedIpPacket& packet); + void OnReadyToSend(AsyncPacketSocket* socket); void OnDestroyed(Connection* c); TCPPort* tcp_port() { - RTC_DCHECK_EQ(port()->GetProtocol(), PROTO_TCP); + RTC_DCHECK_EQ(port()->GetProtocol(), webrtc::PROTO_TCP); return static_cast(port()); } - std::unique_ptr socket_; + std::unique_ptr socket_; int error_; const bool outgoing_; @@ -200,9 +196,18 @@ class TCPConnection : public Connection, public sigslot::has_slots<> { // Allow test case to overwrite the default timeout period. int reconnection_timeout_; - webrtc::ScopedTaskSafety network_safety_; + ScopedTaskSafety network_safety_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::TCPConnection; +using ::webrtc::TCPPort; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_TCP_PORT_H_ diff --git a/p2p/base/tcp_port_unittest.cc b/p2p/base/tcp_port_unittest.cc index 1bb59811b8..a5e7982bbf 100644 --- a/p2p/base/tcp_port_unittest.cc +++ b/p2p/base/tcp_port_unittest.cc @@ -10,29 +10,48 @@ #include "p2p/base/tcp_port.h" +#include #include #include +#include #include +#include "api/candidate.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" #include "p2p/base/basic_packet_socket_factory.h" +#include "p2p/base/connection.h" #include "p2p/base/p2p_constants.h" +#include "p2p/base/port.h" #include "p2p/base/transport_description.h" -#include "rtc_base/gunit.h" -#include "rtc_base/helpers.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/checks.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/ip_address.h" +#include "rtc_base/network.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "rtc_base/virtual_socket_server.h" +#include "test/gmock.h" #include "test/gtest.h" -#include "test/scoped_key_value_config.h" - -using cricket::Connection; -using cricket::ICE_PWD_LENGTH; -using cricket::ICE_UFRAG_LENGTH; -using cricket::Port; -using cricket::TCPPort; -using rtc::SocketAddress; +#include "test/wait_until.h" + +using ::testing::Eq; +using ::testing::IsTrue; +using ::webrtc::Connection; +using ::webrtc::CreateEnvironment; +using ::webrtc::Environment; +using ::webrtc::ICE_PWD_LENGTH; +using ::webrtc::ICE_UFRAG_LENGTH; +using ::webrtc::Port; +using ::webrtc::SocketAddress; +using ::webrtc::TCPPort; static int kTimeout = 1000; static const SocketAddress kLocalAddr("11.11.11.11", 0); @@ -70,45 +89,57 @@ class ConnectionObserver : public sigslot::has_slots<> { class TCPPortTest : public ::testing::Test, public sigslot::has_slots<> { public: TCPPortTest() - : ss_(new rtc::VirtualSocketServer()), + : ss_(new webrtc::VirtualSocketServer()), main_(ss_.get()), socket_factory_(ss_.get()), - username_(rtc::CreateRandomString(ICE_UFRAG_LENGTH)), - password_(rtc::CreateRandomString(ICE_PWD_LENGTH)) {} + username_(webrtc::CreateRandomString(webrtc::ICE_UFRAG_LENGTH)), + password_(webrtc::CreateRandomString(webrtc::ICE_PWD_LENGTH)) {} - rtc::Network* MakeNetwork(const SocketAddress& addr) { + webrtc::Network* MakeNetwork(const SocketAddress& addr) { networks_.emplace_back("unittest", "unittest", addr.ipaddr(), 32); networks_.back().AddIP(addr.ipaddr()); return &networks_.back(); } - std::unique_ptr CreateTCPPort(const SocketAddress& addr) { + std::unique_ptr CreateTCPPort(const SocketAddress& addr, + bool allow_listen = true, + int port_number = 0) { auto port = std::unique_ptr( - TCPPort::Create(&main_, &socket_factory_, MakeNetwork(addr), 0, 0, - username_, password_, true, &field_trials_)); + TCPPort::Create({.env = env_, + .network_thread = &main_, + .socket_factory = &socket_factory_, + .network = MakeNetwork(addr), + .ice_username_fragment = username_, + .ice_password = password_}, + port_number, port_number, allow_listen)); port->SetIceTiebreaker(kTiebreakerDefault); return port; } - std::unique_ptr CreateTCPPort(const rtc::Network* network) { + std::unique_ptr CreateTCPPort(const webrtc::Network* network) { auto port = std::unique_ptr( - TCPPort::Create(&main_, &socket_factory_, network, 0, 0, username_, - password_, true, &field_trials_)); + TCPPort::Create({.env = env_, + .network_thread = &main_, + .socket_factory = &socket_factory_, + .network = network, + .ice_username_fragment = username_, + .ice_password = password_}, + 0, 0, true)); port->SetIceTiebreaker(kTiebreakerDefault); return port; } protected: + const Environment env_ = CreateEnvironment(); // When a "create port" helper method is called with an IP, we create a // Network with that IP and add it to this list. Using a list instead of a // vector so that when it grows, pointers aren't invalidated. - std::list networks_; - std::unique_ptr ss_; - rtc::AutoSocketServerThread main_; - rtc::BasicPacketSocketFactory socket_factory_; + std::list networks_; + std::unique_ptr ss_; + webrtc::AutoSocketServerThread main_; + webrtc::BasicPacketSocketFactory socket_factory_; std::string username_; std::string password_; - webrtc::test::ScopedKeyValueConfig field_trials_; }; TEST_F(TCPPortTest, TestTCPPortWithLocalhostAddress) { @@ -122,7 +153,10 @@ TEST_F(TCPPortTest, TestTCPPortWithLocalhostAddress) { remote_port->PrepareAddress(); Connection* conn = local_port->CreateConnection(remote_port->Candidates()[0], Port::ORIGIN_MESSAGE); - EXPECT_TRUE_WAIT(conn->connected(), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return conn->connected(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); // Verify that the socket actually used localhost, otherwise this test isn't // doing what it meant to. ASSERT_EQ(local_address.ipaddr(), @@ -151,7 +185,10 @@ TEST_F(TCPPortTest, TCPPortDiscardedIfBoundAddressDoesNotMatchNetwork) { Connection* conn = local_port->CreateConnection(remote_port->Candidates()[0], Port::ORIGIN_MESSAGE); ConnectionObserver observer(conn); - EXPECT_TRUE_WAIT(observer.connection_destroyed(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return observer.connection_destroyed(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); } // A caveat for the above logic: if the socket ends up bound to one of the IPs @@ -163,7 +200,7 @@ TEST_F(TCPPortTest, TCPPortNotDiscardedIfNotBoundToBestIP) { // Set up a network with kLocalAddr1 as the "best" IP, and kAlternateLocalAddr // as an alternate. - rtc::Network* network = MakeNetwork(kLocalAddr); + webrtc::Network* network = MakeNetwork(kLocalAddr); network->AddIP(kAlternateLocalAddr.ipaddr()); ASSERT_EQ(kLocalAddr.ipaddr(), network->GetBestIP()); @@ -176,7 +213,10 @@ TEST_F(TCPPortTest, TCPPortNotDiscardedIfNotBoundToBestIP) { // Expect connection to succeed. Connection* conn = local_port->CreateConnection(remote_port->Candidates()[0], Port::ORIGIN_MESSAGE); - EXPECT_TRUE_WAIT(conn->connected(), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return conn->connected(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); // Verify that the socket actually used the alternate address, otherwise this // test isn't doing what it meant to. @@ -185,11 +225,11 @@ TEST_F(TCPPortTest, TCPPortNotDiscardedIfNotBoundToBestIP) { } // Regression test for crbug.com/webrtc/8972, caused by buggy comparison -// between rtc::IPAddress and rtc::InterfaceAddress. +// between webrtc::IPAddress and webrtc::InterfaceAddress. TEST_F(TCPPortTest, TCPPortNotDiscardedIfBoundToTemporaryIP) { networks_.emplace_back("unittest", "unittest", kLocalIPv6Addr.ipaddr(), 32); - networks_.back().AddIP(rtc::InterfaceAddress( - kLocalIPv6Addr.ipaddr(), rtc::IPV6_ADDRESS_FLAG_TEMPORARY)); + networks_.back().AddIP(webrtc::InterfaceAddress( + kLocalIPv6Addr.ipaddr(), webrtc::IPV6_ADDRESS_FLAG_TEMPORARY)); auto local_port = CreateTCPPort(&networks_.back()); auto remote_port = CreateTCPPort(kRemoteIPv6Addr); @@ -200,7 +240,10 @@ TEST_F(TCPPortTest, TCPPortNotDiscardedIfBoundToTemporaryIP) { Connection* conn = local_port->CreateConnection(remote_port->Candidates()[0], Port::ORIGIN_MESSAGE); ASSERT_NE(nullptr, conn); - EXPECT_TRUE_WAIT(conn->connected(), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return conn->connected(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); } class SentPacketCounter : public sigslot::has_slots<> { @@ -212,7 +255,7 @@ class SentPacketCounter : public sigslot::has_slots<> { int sent_packets() const { return sent_packets_; } private: - void OnSentPacket(const rtc::SentPacket&) { ++sent_packets_; } + void OnSentPacket(const webrtc::SentPacketInfo&) { ++sent_packets_; } int sent_packets_ = 0; }; @@ -222,38 +265,182 @@ class SentPacketCounter : public sigslot::has_slots<> { TEST_F(TCPPortTest, SignalSentPacket) { std::unique_ptr client(CreateTCPPort(kLocalAddr)); std::unique_ptr server(CreateTCPPort(kRemoteAddr)); - client->SetIceRole(cricket::ICEROLE_CONTROLLING); - server->SetIceRole(cricket::ICEROLE_CONTROLLED); + client->SetIceRole(webrtc::ICEROLE_CONTROLLING); + server->SetIceRole(webrtc::ICEROLE_CONTROLLED); client->PrepareAddress(); server->PrepareAddress(); Connection* client_conn = client->CreateConnection(server->Candidates()[0], Port::ORIGIN_MESSAGE); ASSERT_NE(nullptr, client_conn); - ASSERT_TRUE_WAIT(client_conn->connected(), kTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return client_conn->connected(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); // Need to get the port of the actual outgoing socket, not the server socket.. - cricket::Candidate client_candidate = client->Candidates()[0]; - client_candidate.set_address(static_cast(client_conn) + webrtc::Candidate client_candidate = client->Candidates()[0]; + client_candidate.set_address(static_cast(client_conn) ->socket() ->GetLocalAddress()); Connection* server_conn = server->CreateConnection(client_candidate, Port::ORIGIN_THIS_PORT); ASSERT_NE(nullptr, server_conn); - ASSERT_TRUE_WAIT(server_conn->connected(), kTimeout); - - client_conn->Ping(rtc::TimeMillis()); - server_conn->Ping(rtc::TimeMillis()); - ASSERT_TRUE_WAIT(client_conn->writable(), kTimeout); - ASSERT_TRUE_WAIT(server_conn->writable(), kTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return server_conn->connected(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); + + client_conn->Ping(webrtc::TimeMillis()); + server_conn->Ping(webrtc::TimeMillis()); + ASSERT_THAT( + webrtc::WaitUntil([&] { return client_conn->writable(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); + ASSERT_THAT( + webrtc::WaitUntil([&] { return server_conn->writable(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); SentPacketCounter client_counter(client.get()); SentPacketCounter server_counter(server.get()); static const char kData[] = "hello"; for (int i = 0; i < 10; ++i) { - client_conn->Send(&kData, sizeof(kData), rtc::PacketOptions()); - server_conn->Send(&kData, sizeof(kData), rtc::PacketOptions()); + client_conn->Send(&kData, sizeof(kData), + webrtc::AsyncSocketPacketOptions()); + server_conn->Send(&kData, sizeof(kData), + webrtc::AsyncSocketPacketOptions()); + } + EXPECT_THAT( + webrtc::WaitUntil([&] { return client_counter.sent_packets(); }, Eq(10), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil([&] { return server_counter.sent_packets(); }, Eq(10), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); +} + +// Test that SignalSentPacket is fired when a packet is successfully sent, even +// after a remote server has been restarted. +TEST_F(TCPPortTest, SignalSentPacketAfterReconnect) { + std::unique_ptr client( + CreateTCPPort(kLocalAddr, /*allow_listen=*/false)); + constexpr int kServerPort = 123; + std::unique_ptr server( + CreateTCPPort(kRemoteAddr, /*allow_listen=*/true, kServerPort)); + client->SetIceRole(webrtc::ICEROLE_CONTROLLING); + server->SetIceRole(webrtc::ICEROLE_CONTROLLED); + client->PrepareAddress(); + server->PrepareAddress(); + + Connection* client_conn = + client->CreateConnection(server->Candidates()[0], Port::ORIGIN_MESSAGE); + ASSERT_NE(nullptr, client_conn); + ASSERT_THAT( + webrtc::WaitUntil([&] { return client_conn->connected(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); + + // Need to get the port of the actual outgoing socket. + webrtc::Candidate client_candidate = client->Candidates()[0]; + client_candidate.set_address(static_cast(client_conn) + ->socket() + ->GetLocalAddress()); + client_candidate.set_tcptype(""); + Connection* server_conn = + server->CreateConnection(client_candidate, Port::ORIGIN_THIS_PORT); + ASSERT_THAT( + webrtc::WaitUntil([&] { return server_conn->connected(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); + EXPECT_FALSE(client_conn->writable()); + client_conn->Ping(webrtc::TimeMillis()); + ASSERT_THAT( + webrtc::WaitUntil([&] { return client_conn->writable(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); + + SentPacketCounter client_counter(client.get()); + static const char kData[] = "hello"; + int result = client_conn->Send(&kData, sizeof(kData), + webrtc::AsyncSocketPacketOptions()); + EXPECT_EQ(result, 6); + + // Deleting the server port should break the current connection. + server = nullptr; + server_conn = nullptr; + ASSERT_THAT( + webrtc::WaitUntil([&] { return !client_conn->connected(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); + + // Recreate the server port with the same port number. + server = CreateTCPPort(kRemoteAddr, /*allow_listen=*/true, kServerPort); + server->SetIceRole(webrtc::ICEROLE_CONTROLLED); + server->PrepareAddress(); + + // Sending a packet from the client will trigger a reconnect attempt but the + // packet will be discarded. + result = client_conn->Send(&kData, sizeof(kData), + webrtc::AsyncSocketPacketOptions()); + EXPECT_EQ(result, SOCKET_ERROR); + ASSERT_THAT( + webrtc::WaitUntil([&] { return client_conn->connected(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); + // For unknown reasons, connection is still supposed to be writable.... + EXPECT_TRUE(client_conn->writable()); + for (int i = 0; i < 10; ++i) { + // All sent packets still fail to send. + EXPECT_EQ(client_conn->Send(&kData, sizeof(kData), + webrtc::AsyncSocketPacketOptions()), + SOCKET_ERROR); + } + // And are not reported as sent. + EXPECT_THAT( + webrtc::WaitUntil([&] { return client_counter.sent_packets(); }, Eq(1), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); + + // Create the server connection again so server can reply to STUN pings. + // Client outgoing socket port will have changed since the client create a new + // socket when it reconnect. + client_candidate = client->Candidates()[0]; + client_candidate.set_address(static_cast(client_conn) + ->socket() + ->GetLocalAddress()); + client_candidate.set_tcptype(""); + server_conn = + server->CreateConnection(client_candidate, Port::ORIGIN_THIS_PORT); + ASSERT_THAT( + webrtc::WaitUntil([&] { return server_conn->connected(); }, IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil([&] { return client_counter.sent_packets(); }, Eq(1), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); + + // Send Stun Binding request. + client_conn->Ping(webrtc::TimeMillis()); + // The Stun Binding request is reported as sent. + EXPECT_THAT( + webrtc::WaitUntil([&] { return client_counter.sent_packets(); }, Eq(2), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); + // Wait a bit for the Stun response to be received. + webrtc::Thread::Current()->ProcessMessages(100); + + // After the Stun Ping response has been received, packets can be sent again + // and SignalSentPacket should be invoked. + for (int i = 0; i < 5; ++i) { + EXPECT_EQ(client_conn->Send(&kData, sizeof(kData), + webrtc::AsyncSocketPacketOptions()), + 6); } - EXPECT_EQ_WAIT(10, client_counter.sent_packets(), kTimeout); - EXPECT_EQ_WAIT(10, server_counter.sent_packets(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return client_counter.sent_packets(); }, Eq(2 + 5), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + webrtc::IsRtcOk()); } diff --git a/p2p/base/test_stun_server.cc b/p2p/base/test_stun_server.cc deleted file mode 100644 index d4c3b2d851..0000000000 --- a/p2p/base/test_stun_server.cc +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2017 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "p2p/base/test_stun_server.h" - -#include "rtc_base/socket.h" -#include "rtc_base/socket_server.h" - -namespace cricket { - -TestStunServer* TestStunServer::Create(rtc::SocketServer* ss, - const rtc::SocketAddress& addr) { - rtc::Socket* socket = ss->CreateSocket(addr.family(), SOCK_DGRAM); - rtc::AsyncUDPSocket* udp_socket = rtc::AsyncUDPSocket::Create(socket, addr); - - return new TestStunServer(udp_socket); -} - -void TestStunServer::OnBindingRequest(StunMessage* msg, - const rtc::SocketAddress& remote_addr) { - if (fake_stun_addr_.IsNil()) { - StunServer::OnBindingRequest(msg, remote_addr); - } else { - StunMessage response(STUN_BINDING_RESPONSE, msg->transaction_id()); - GetStunBindResponse(msg, fake_stun_addr_, &response); - SendResponse(response, remote_addr); - } -} - -} // namespace cricket diff --git a/p2p/base/test_stun_server.h b/p2p/base/test_stun_server.h deleted file mode 100644 index 11ac620bb8..0000000000 --- a/p2p/base/test_stun_server.h +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2008 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef P2P_BASE_TEST_STUN_SERVER_H_ -#define P2P_BASE_TEST_STUN_SERVER_H_ - -#include "api/transport/stun.h" -#include "p2p/base/stun_server.h" -#include "rtc_base/async_udp_socket.h" -#include "rtc_base/socket_address.h" -#include "rtc_base/socket_server.h" - -namespace cricket { - -// A test STUN server. Useful for unit tests. -class TestStunServer : StunServer { - public: - static TestStunServer* Create(rtc::SocketServer* ss, - const rtc::SocketAddress& addr); - - // Set a fake STUN address to return to the client. - void set_fake_stun_addr(const rtc::SocketAddress& addr) { - fake_stun_addr_ = addr; - } - - private: - explicit TestStunServer(rtc::AsyncUDPSocket* socket) : StunServer(socket) {} - - void OnBindingRequest(StunMessage* msg, - const rtc::SocketAddress& remote_addr) override; - - private: - rtc::SocketAddress fake_stun_addr_; -}; - -} // namespace cricket - -#endif // P2P_BASE_TEST_STUN_SERVER_H_ diff --git a/p2p/base/transport_description.cc b/p2p/base/transport_description.cc index f3b1fbb6ea..0a5621a16d 100644 --- a/p2p/base/transport_description.cc +++ b/p2p/base/transport_description.cc @@ -10,19 +10,27 @@ #include "p2p/base/transport_description.h" +#include +#include +#include +#include + +#include "absl/algorithm/container.h" #include "absl/strings/ascii.h" #include "absl/strings/match.h" #include "absl/strings/string_view.h" +#include "api/rtc_error.h" #include "p2p/base/p2p_constants.h" #include "rtc_base/arraysize.h" #include "rtc_base/logging.h" +#include "rtc_base/ssl_fingerprint.h" #include "rtc_base/strings/string_builder.h" using webrtc::RTCError; using webrtc::RTCErrorOr; using webrtc::RTCErrorType; -namespace cricket { +namespace webrtc { namespace { bool IsIceChar(char c) { @@ -42,7 +50,7 @@ bool IsIceChar(char c) { RTCError ValidateIceUfrag(absl::string_view raw_ufrag) { if (!(ICE_UFRAG_MIN_LENGTH <= raw_ufrag.size() && raw_ufrag.size() <= ICE_UFRAG_MAX_LENGTH)) { - rtc::StringBuilder sb; + StringBuilder sb; sb << "ICE ufrag must be between " << ICE_UFRAG_MIN_LENGTH << " and " << ICE_UFRAG_MAX_LENGTH << " characters long."; return RTCError(RTCErrorType::SYNTAX_ERROR, sb.Release()); @@ -60,7 +68,7 @@ RTCError ValidateIceUfrag(absl::string_view raw_ufrag) { RTCError ValidateIcePwd(absl::string_view raw_pwd) { if (!(ICE_PWD_MIN_LENGTH <= raw_pwd.size() && raw_pwd.size() <= ICE_PWD_MAX_LENGTH)) { - rtc::StringBuilder sb; + StringBuilder sb; sb << "ICE pwd must be between " << ICE_PWD_MIN_LENGTH << " and " << ICE_PWD_MAX_LENGTH << " characters long."; return RTCError(RTCErrorType::SYNTAX_ERROR, sb.Release()); @@ -109,7 +117,7 @@ RTCError IceParameters::Validate() const { return RTCError::OK(); } -absl::optional StringToConnectionRole( +std::optional StringToConnectionRole( absl::string_view role_str) { const char* const roles[] = { CONNECTIONROLE_ACTIVE_STR, CONNECTIONROLE_PASSIVE_STR, @@ -120,22 +128,22 @@ absl::optional StringToConnectionRole( return static_cast(CONNECTIONROLE_ACTIVE + i); } } - return absl::nullopt; + return std::nullopt; } bool ConnectionRoleToString(const ConnectionRole& role, std::string* role_str) { switch (role) { - case cricket::CONNECTIONROLE_ACTIVE: - *role_str = cricket::CONNECTIONROLE_ACTIVE_STR; + case CONNECTIONROLE_ACTIVE: + *role_str = CONNECTIONROLE_ACTIVE_STR; break; - case cricket::CONNECTIONROLE_ACTPASS: - *role_str = cricket::CONNECTIONROLE_ACTPASS_STR; + case CONNECTIONROLE_ACTPASS: + *role_str = CONNECTIONROLE_ACTPASS_STR; break; - case cricket::CONNECTIONROLE_PASSIVE: - *role_str = cricket::CONNECTIONROLE_PASSIVE_STR; + case CONNECTIONROLE_PASSIVE: + *role_str = CONNECTIONROLE_PASSIVE_STR; break; - case cricket::CONNECTIONROLE_HOLDCONN: - *role_str = cricket::CONNECTIONROLE_HOLDCONN_STR; + case CONNECTIONROLE_HOLDCONN: + *role_str = CONNECTIONROLE_HOLDCONN_STR; break; default: return false; @@ -152,7 +160,7 @@ TransportDescription::TransportDescription( absl::string_view ice_pwd, IceMode ice_mode, ConnectionRole role, - const rtc::SSLFingerprint* identity_fingerprint) + const SSLFingerprint* identity_fingerprint) : transport_options(transport_options), ice_ufrag(ice_ufrag), ice_pwd(ice_pwd), @@ -193,4 +201,4 @@ TransportDescription& TransportDescription::operator=( return *this; } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/transport_description.h b/p2p/base/transport_description.h index 7d28ad52e9..7f113e35f3 100644 --- a/p2p/base/transport_description.h +++ b/p2p/base/transport_description.h @@ -11,30 +11,20 @@ #ifndef P2P_BASE_TRANSPORT_DESCRIPTION_H_ #define P2P_BASE_TRANSPORT_DESCRIPTION_H_ +#include #include +#include #include #include #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/rtc_error.h" -#include "p2p/base/p2p_constants.h" +#include "p2p/base/p2p_constants.h" // IWYU pragma: keep #include "rtc_base/ssl_fingerprint.h" #include "rtc_base/system/rtc_export.h" -namespace cricket { - -// SEC_ENABLED and SEC_REQUIRED should only be used if the session -// was negotiated over TLS, to protect the inline crypto material -// exchange. -// SEC_DISABLED: No crypto in outgoing offer, ignore any supplied crypto. -// SEC_ENABLED: Crypto in outgoing offer and answer (if supplied in offer). -// SEC_REQUIRED: Crypto in outgoing offer and answer. Fail any offer with absent -// or unsupported crypto. -// TODO(deadbeef): Remove this or rename it to something more appropriate, like -// SdesPolicy. -enum SecurePolicy { SEC_DISABLED, SEC_ENABLED, SEC_REQUIRED }; +namespace webrtc { // Whether our side of the call is driving the negotiation, or the other side. enum IceRole { ICEROLE_CONTROLLING = 0, ICEROLE_CONTROLLED, ICEROLE_UNKNOWN }; @@ -61,9 +51,8 @@ enum ConnectionRole { struct IceParameters { // Constructs an IceParameters from a user-provided ufrag/pwd combination. // Returns a SyntaxError if the ufrag or pwd are malformed. - static RTC_EXPORT webrtc::RTCErrorOr Parse( - absl::string_view raw_ufrag, - absl::string_view raw_pwd); + static RTC_EXPORT RTCErrorOr Parse(absl::string_view raw_ufrag, + absl::string_view raw_pwd); // TODO(honghaiz): Include ICE mode in this structure to match the ORTC // struct: @@ -87,18 +76,13 @@ struct IceParameters { // Validate IceParameters, returns a SyntaxError if the ufrag or pwd are // malformed. - webrtc::RTCError Validate() const; + RTCError Validate() const; }; -extern const char CONNECTIONROLE_ACTIVE_STR[]; -extern const char CONNECTIONROLE_PASSIVE_STR[]; -extern const char CONNECTIONROLE_ACTPASS_STR[]; -extern const char CONNECTIONROLE_HOLDCONN_STR[]; - constexpr auto* ICE_OPTION_TRICKLE = "trickle"; constexpr auto* ICE_OPTION_RENOMINATION = "renomination"; -absl::optional StringToConnectionRole( +std::optional StringToConnectionRole( absl::string_view role_str); bool ConnectionRoleToString(const ConnectionRole& role, std::string* role_str); @@ -109,7 +93,7 @@ struct TransportDescription { absl::string_view ice_pwd, IceMode ice_mode, ConnectionRole role, - const rtc::SSLFingerprint* identity_fingerprint); + const SSLFingerprint* identity_fingerprint); TransportDescription(absl::string_view ice_ufrag, absl::string_view ice_pwd); TransportDescription(const TransportDescription& from); ~TransportDescription(); @@ -130,11 +114,11 @@ struct TransportDescription { HasOption(ICE_OPTION_RENOMINATION)); } - static rtc::SSLFingerprint* CopyFingerprint(const rtc::SSLFingerprint* from) { + static SSLFingerprint* CopyFingerprint(const SSLFingerprint* from) { if (!from) return NULL; - return new rtc::SSLFingerprint(*from); + return new SSLFingerprint(*from); } // These are actually ICE options (appearing in the ice-options attribute in @@ -146,9 +130,35 @@ struct TransportDescription { IceMode ice_mode; ConnectionRole connection_role; - std::unique_ptr identity_fingerprint; + std::unique_ptr identity_fingerprint; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::ConnectionRole; +using ::webrtc::CONNECTIONROLE_ACTIVE; +using ::webrtc::CONNECTIONROLE_ACTPASS; +using ::webrtc::CONNECTIONROLE_HOLDCONN; +using ::webrtc::CONNECTIONROLE_NONE; +using ::webrtc::CONNECTIONROLE_PASSIVE; +using ::webrtc::ConnectionRoleToString; +using ::webrtc::ICE_OPTION_RENOMINATION; +using ::webrtc::ICE_OPTION_TRICKLE; +using ::webrtc::IceMode; +using ::webrtc::ICEMODE_FULL; +using ::webrtc::ICEMODE_LITE; +using ::webrtc::IceParameters; +using ::webrtc::IceRole; +using ::webrtc::ICEROLE_CONTROLLED; +using ::webrtc::ICEROLE_CONTROLLING; +using ::webrtc::ICEROLE_UNKNOWN; +using ::webrtc::StringToConnectionRole; +using ::webrtc::TransportDescription; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_TRANSPORT_DESCRIPTION_H_ diff --git a/p2p/base/transport_description_factory.cc b/p2p/base/transport_description_factory.cc index 7eb21da166..f016afa466 100644 --- a/p2p/base/transport_description_factory.cc +++ b/p2p/base/transport_description_factory.cc @@ -15,15 +15,18 @@ #include #include +#include "api/field_trials_view.h" +#include "p2p/base/ice_credentials_iterator.h" #include "p2p/base/transport_description.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/ssl_fingerprint.h" -namespace cricket { +namespace webrtc { TransportDescriptionFactory::TransportDescriptionFactory( - const webrtc::FieldTrialsView& field_trials) - : secure_(SEC_DISABLED), field_trials_(field_trials) {} + const FieldTrialsView& field_trials) + : field_trials_(field_trials) {} TransportDescriptionFactory::~TransportDescriptionFactory() = default; @@ -47,13 +50,15 @@ std::unique_ptr TransportDescriptionFactory::CreateOffer( desc->AddOption(ICE_OPTION_RENOMINATION); } - // If we are trying to establish a secure transport, add a fingerprint. - if (secure_ == SEC_ENABLED || secure_ == SEC_REQUIRED) { - // Fail if we can't create the fingerprint. - // If we are the initiator set role to "actpass". - if (!SetSecurityInfo(desc.get(), CONNECTIONROLE_ACTPASS)) { - return NULL; - } + // If we are not trying to establish a secure transport, don't add a + // fingerprint. + if (insecure_ && !certificate_) { + return desc; + } + // Fail if we can't create the fingerprint. + // If we are the initiator set role to "actpass". + if (!SetSecurityInfo(desc.get(), CONNECTIONROLE_ACTPASS)) { + return NULL; } return desc; @@ -87,43 +92,49 @@ std::unique_ptr TransportDescriptionFactory::CreateAnswer( if (options.enable_ice_renomination) { desc->AddOption(ICE_OPTION_RENOMINATION); } - - // Negotiate security params. - if (offer && offer->identity_fingerprint.get()) { - // The offer supports DTLS, so answer with DTLS, as long as we support it. - if (secure_ == SEC_ENABLED || secure_ == SEC_REQUIRED) { - ConnectionRole role = CONNECTIONROLE_NONE; - // If the offer does not constrain the role, go with preference. - if (offer->connection_role == CONNECTIONROLE_ACTPASS) { - role = (options.prefer_passive_role) ? CONNECTIONROLE_PASSIVE - : CONNECTIONROLE_ACTIVE; - } else if (offer->connection_role == CONNECTIONROLE_ACTIVE) { - role = CONNECTIONROLE_PASSIVE; - } else if (offer->connection_role == CONNECTIONROLE_PASSIVE) { - role = CONNECTIONROLE_ACTIVE; - } else if (offer->connection_role == CONNECTIONROLE_NONE) { - // This case may be reached if a=setup is not present in the SDP. - RTC_LOG(LS_WARNING) << "Remote offer connection role is NONE, which is " - "a protocol violation"; - role = (options.prefer_passive_role) ? CONNECTIONROLE_PASSIVE - : CONNECTIONROLE_ACTIVE; - } else { - RTC_LOG(LS_ERROR) << "Remote offer connection role is " << role - << " which is a protocol violation"; - RTC_DCHECK_NOTREACHED(); - } - - if (!SetSecurityInfo(desc.get(), role)) { - return NULL; - } + // Special affordance for testing: Answer without DTLS params + // if we are insecure without a certificate, or if we are + // insecure with a non-DTLS offer. + if ((!certificate_ || !offer->identity_fingerprint.get()) && insecure()) { + return desc; + } + if (!offer->identity_fingerprint.get()) { + if (require_transport_attributes) { + // We require DTLS, but the other side didn't offer it. Fail. + RTC_LOG(LS_WARNING) << "Failed to create TransportDescription answer " + "because of incompatible security settings"; + return NULL; } - } else if (require_transport_attributes && secure_ == SEC_REQUIRED) { - // We require DTLS, but the other side didn't offer it. Fail. - RTC_LOG(LS_WARNING) << "Failed to create TransportDescription answer " - "because of incompatible security settings"; + // This may be a bundled section, fingerprint may legitimately be missing. + return desc; + } + // Negotiate security params. + // The offer supports DTLS, so answer with DTLS. + RTC_CHECK(certificate_); + ConnectionRole role = CONNECTIONROLE_NONE; + // If the offer does not constrain the role, go with preference. + if (offer->connection_role == CONNECTIONROLE_ACTPASS) { + role = (options.prefer_passive_role) ? CONNECTIONROLE_PASSIVE + : CONNECTIONROLE_ACTIVE; + } else if (offer->connection_role == CONNECTIONROLE_ACTIVE) { + role = CONNECTIONROLE_PASSIVE; + } else if (offer->connection_role == CONNECTIONROLE_PASSIVE) { + role = CONNECTIONROLE_ACTIVE; + } else if (offer->connection_role == CONNECTIONROLE_NONE) { + // This case may be reached if a=setup is not present in the SDP. + RTC_LOG(LS_WARNING) << "Remote offer connection role is NONE, which is " + "a protocol violation"; + role = (options.prefer_passive_role) ? CONNECTIONROLE_PASSIVE + : CONNECTIONROLE_ACTIVE; + } else { + RTC_LOG(LS_ERROR) << "Remote offer connection role is " << role + << " which is a protocol violation"; + RTC_DCHECK_NOTREACHED(); + return NULL; + } + if (!SetSecurityInfo(desc.get(), role)) { return NULL; } - return desc; } @@ -138,7 +149,7 @@ bool TransportDescriptionFactory::SetSecurityInfo(TransportDescription* desc, // RFC 4572 Section 5 requires that those lines use the same hash function as // the certificate's signature, which is what CreateFromCertificate does. desc->identity_fingerprint = - rtc::SSLFingerprint::CreateFromCertificate(*certificate_); + SSLFingerprint::CreateFromCertificate(*certificate_); if (!desc->identity_fingerprint) { return false; } @@ -148,4 +159,4 @@ bool TransportDescriptionFactory::SetSecurityInfo(TransportDescription* desc, return true; } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/transport_description_factory.h b/p2p/base/transport_description_factory.h index 11352f88b4..b1a7cc956b 100644 --- a/p2p/base/transport_description_factory.h +++ b/p2p/base/transport_description_factory.h @@ -15,15 +15,13 @@ #include #include "api/field_trials_view.h" +#include "api/scoped_refptr.h" #include "p2p/base/ice_credentials_iterator.h" #include "p2p/base/transport_description.h" #include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_identity.h" -namespace rtc { -class SSLIdentity; -} - -namespace cricket { +namespace webrtc { struct TransportOptions { bool ice_restart = false; @@ -39,20 +37,16 @@ struct TransportOptions { class TransportDescriptionFactory { public: // Default ctor; use methods below to set configuration. - explicit TransportDescriptionFactory( - const webrtc::FieldTrialsView& field_trials); + explicit TransportDescriptionFactory(const FieldTrialsView& field_trials); ~TransportDescriptionFactory(); - SecurePolicy secure() const { return secure_; } // The certificate to use when setting up DTLS. - const rtc::scoped_refptr& certificate() const { + const scoped_refptr& certificate() const { return certificate_; } - // Specifies the transport security policy to use. - void set_secure(SecurePolicy s) { secure_ = s; } - // Specifies the certificate to use (only used when secure != SEC_DISABLED). - void set_certificate(rtc::scoped_refptr certificate) { + // Specifies the certificate to use + void set_certificate(scoped_refptr certificate) { certificate_ = std::move(certificate); } @@ -75,17 +69,32 @@ class TransportDescriptionFactory { const TransportDescription* current_description, IceCredentialsIterator* ice_credentials) const; - const webrtc::FieldTrialsView& trials() const { return field_trials_; } + const FieldTrialsView& trials() const { return field_trials_; } + // Functions for disabling encryption - test only! + // In insecure mode, the connection will accept a description without + // fingerprint, and will generate SDP even if certificate is not set. + // If certificate is set, it will accept a description both with and + // without fingerprint, but will generate a description with fingerprint. + bool insecure() const { return insecure_; } + void SetInsecureForTesting() { insecure_ = true; } private: bool SetSecurityInfo(TransportDescription* description, ConnectionRole role) const; - - SecurePolicy secure_; - rtc::scoped_refptr certificate_; - const webrtc::FieldTrialsView& field_trials_; + bool insecure_ = false; + scoped_refptr certificate_; + const FieldTrialsView& field_trials_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::TransportDescriptionFactory; +using ::webrtc::TransportOptions; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_TRANSPORT_DESCRIPTION_FACTORY_H_ diff --git a/p2p/base/transport_description_factory_unittest.cc b/p2p/base/transport_description_factory_unittest.cc index 0da5b7c294..1324a4fc76 100644 --- a/p2p/base/transport_description_factory_unittest.cc +++ b/p2p/base/transport_description_factory_unittest.cc @@ -17,10 +17,13 @@ #include #include "absl/strings/string_view.h" +#include "api/scoped_refptr.h" +#include "p2p/base/ice_credentials_iterator.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/transport_description.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/fake_ssl_identity.h" +#include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_fingerprint.h" #include "rtc_base/ssl_identity.h" @@ -28,11 +31,12 @@ #include "test/gtest.h" #include "test/scoped_key_value_config.h" -using cricket::TransportDescription; -using cricket::TransportDescriptionFactory; -using cricket::TransportOptions; using ::testing::Contains; using ::testing::Not; +using ::testing::NotNull; +using ::webrtc::TransportDescription; +using ::webrtc::TransportDescriptionFactory; +using ::webrtc::TransportOptions; class TransportDescriptionFactoryTest : public ::testing::Test { public: @@ -40,10 +44,16 @@ class TransportDescriptionFactoryTest : public ::testing::Test { : ice_credentials_({}), f1_(field_trials_), f2_(field_trials_), - cert1_(rtc::RTCCertificate::Create(std::unique_ptr( - new rtc::FakeSSLIdentity("User1")))), - cert2_(rtc::RTCCertificate::Create(std::unique_ptr( - new rtc::FakeSSLIdentity("User2")))) {} + cert1_( + webrtc::RTCCertificate::Create(std::unique_ptr( + new webrtc::FakeSSLIdentity("User1")))), + cert2_( + webrtc::RTCCertificate::Create(std::unique_ptr( + new webrtc::FakeSSLIdentity("User2")))) { + // By default, certificates are supplied. + f1_.set_certificate(cert1_); + f2_.set_certificate(cert2_); + } void CheckDesc(const TransportDescription* desc, absl::string_view opt, @@ -53,9 +63,9 @@ class TransportDescriptionFactoryTest : public ::testing::Test { ASSERT_TRUE(desc != NULL); EXPECT_EQ(!opt.empty(), desc->HasOption(opt)); if (ice_ufrag.empty() && ice_pwd.empty()) { - EXPECT_EQ(static_cast(cricket::ICE_UFRAG_LENGTH), + EXPECT_EQ(static_cast(webrtc::ICE_UFRAG_LENGTH), desc->ice_ufrag.size()); - EXPECT_EQ(static_cast(cricket::ICE_PWD_LENGTH), + EXPECT_EQ(static_cast(webrtc::ICE_PWD_LENGTH), desc->ice_pwd.size()); } else { EXPECT_EQ(ice_ufrag, desc->ice_ufrag); @@ -75,8 +85,13 @@ class TransportDescriptionFactoryTest : public ::testing::Test { // in the offer and answer is changed. // If `dtls` is true, the test verifies that the finger print is not changed. void TestIceRestart(bool dtls) { - SetDtls(dtls); - cricket::TransportOptions options; + if (dtls) { + f1_.set_certificate(cert1_); + f2_.set_certificate(cert2_); + } else { + SetInsecure(); + } + webrtc::TransportOptions options; // The initial offer / answer exchange. std::unique_ptr offer = f1_.CreateOffer(options, NULL, &ice_credentials_); @@ -102,11 +117,13 @@ class TransportDescriptionFactoryTest : public ::testing::Test { void VerifyUfragAndPasswordChanged(bool dtls, const TransportDescription* org_desc, const TransportDescription* restart_desc) { + ASSERT_THAT(org_desc, NotNull()); + ASSERT_THAT(restart_desc, NotNull()); EXPECT_NE(org_desc->ice_pwd, restart_desc->ice_pwd); EXPECT_NE(org_desc->ice_ufrag, restart_desc->ice_ufrag); - EXPECT_EQ(static_cast(cricket::ICE_UFRAG_LENGTH), + EXPECT_EQ(static_cast(webrtc::ICE_UFRAG_LENGTH), restart_desc->ice_ufrag.size()); - EXPECT_EQ(static_cast(cricket::ICE_PWD_LENGTH), + EXPECT_EQ(static_cast(webrtc::ICE_PWD_LENGTH), restart_desc->ice_pwd.size()); // If DTLS is enabled, make sure the finger print is unchanged. if (dtls) { @@ -118,9 +135,11 @@ class TransportDescriptionFactoryTest : public ::testing::Test { } void TestIceRenomination(bool dtls) { - SetDtls(dtls); + if (!dtls) { + SetInsecureNoDtls(); + } - cricket::TransportOptions options; + webrtc::TransportOptions options; // The initial offer / answer exchange. std::unique_ptr offer = f1_.CreateOffer(options, nullptr, &ice_credentials_); @@ -148,51 +167,39 @@ class TransportDescriptionFactoryTest : public ::testing::Test { } protected: - void SetDtls(bool dtls) { - if (dtls) { - f1_.set_secure(cricket::SEC_ENABLED); - f2_.set_secure(cricket::SEC_ENABLED); - f1_.set_certificate(cert1_); - f2_.set_certificate(cert2_); - } else { - f1_.set_secure(cricket::SEC_DISABLED); - f2_.set_secure(cricket::SEC_DISABLED); - } + // This will enable responding to non-DTLS requests. + void SetInsecure() { + f1_.SetInsecureForTesting(); + f2_.SetInsecureForTesting(); + } + // This will disable the ability to respond to DTLS requests. + void SetInsecureNoDtls() { + SetInsecure(); + f1_.set_certificate(nullptr); + f2_.set_certificate(nullptr); } webrtc::test::ScopedKeyValueConfig field_trials_; - cricket::IceCredentialsIterator ice_credentials_; + webrtc::IceCredentialsIterator ice_credentials_; TransportDescriptionFactory f1_; TransportDescriptionFactory f2_; - rtc::scoped_refptr cert1_; - rtc::scoped_refptr cert2_; + webrtc::scoped_refptr cert1_; + webrtc::scoped_refptr cert2_; }; -TEST_F(TransportDescriptionFactoryTest, TestOfferDefault) { - std::unique_ptr desc = - f1_.CreateOffer(TransportOptions(), NULL, &ice_credentials_); - CheckDesc(desc.get(), "", "", "", ""); -} - TEST_F(TransportDescriptionFactoryTest, TestOfferDtls) { - f1_.set_secure(cricket::SEC_ENABLED); - f1_.set_certificate(cert1_); std::string digest_alg; ASSERT_TRUE( cert1_->GetSSLCertificate().GetSignatureDigestAlgorithm(&digest_alg)); std::unique_ptr desc = f1_.CreateOffer(TransportOptions(), NULL, &ice_credentials_); CheckDesc(desc.get(), "", "", "", digest_alg); - // Ensure it also works with SEC_REQUIRED. - f1_.set_secure(cricket::SEC_REQUIRED); - desc = f1_.CreateOffer(TransportOptions(), NULL, &ice_credentials_); - CheckDesc(desc.get(), "", "", "", digest_alg); } // Test generating an offer with DTLS fails with no identity. TEST_F(TransportDescriptionFactoryTest, TestOfferDtlsWithNoIdentity) { - f1_.set_secure(cricket::SEC_ENABLED); + f1_.set_certificate(nullptr); std::unique_ptr desc = f1_.CreateOffer(TransportOptions(), NULL, &ice_credentials_); ASSERT_TRUE(desc.get() == NULL); @@ -201,8 +208,6 @@ TEST_F(TransportDescriptionFactoryTest, TestOfferDtlsWithNoIdentity) { // Test updating an offer with DTLS to pick ICE. // The ICE credentials should stay the same in the new offer. TEST_F(TransportDescriptionFactoryTest, TestOfferDtlsReofferDtls) { - f1_.set_secure(cricket::SEC_ENABLED); - f1_.set_certificate(cert1_); std::string digest_alg; ASSERT_TRUE( cert1_->GetSSLCertificate().GetSignatureDigestAlgorithm(&digest_alg)); @@ -215,19 +220,25 @@ TEST_F(TransportDescriptionFactoryTest, TestOfferDtlsReofferDtls) { } TEST_F(TransportDescriptionFactoryTest, TestAnswerDefault) { + std::string digest_alg; + ASSERT_TRUE( + cert1_->GetSSLCertificate().GetSignatureDigestAlgorithm(&digest_alg)); std::unique_ptr offer = f1_.CreateOffer(TransportOptions(), NULL, &ice_credentials_); ASSERT_TRUE(offer.get() != NULL); std::unique_ptr desc = f2_.CreateAnswer( offer.get(), TransportOptions(), true, NULL, &ice_credentials_); - CheckDesc(desc.get(), "", "", "", ""); + CheckDesc(desc.get(), "", "", "", digest_alg); desc = f2_.CreateAnswer(offer.get(), TransportOptions(), true, NULL, &ice_credentials_); - CheckDesc(desc.get(), "", "", "", ""); + CheckDesc(desc.get(), "", "", "", digest_alg); } // Test that we can update an answer properly; ICE credentials shouldn't change. TEST_F(TransportDescriptionFactoryTest, TestReanswer) { + std::string digest_alg; + ASSERT_TRUE( + cert1_->GetSSLCertificate().GetSignatureDigestAlgorithm(&digest_alg)); std::unique_ptr offer = f1_.CreateOffer(TransportOptions(), NULL, &ice_credentials_); ASSERT_TRUE(offer.get() != NULL); @@ -237,13 +248,13 @@ TEST_F(TransportDescriptionFactoryTest, TestReanswer) { std::unique_ptr desc = f2_.CreateAnswer( offer.get(), TransportOptions(), true, old_desc.get(), &ice_credentials_); ASSERT_TRUE(desc.get() != NULL); - CheckDesc(desc.get(), "", old_desc->ice_ufrag, old_desc->ice_pwd, ""); + CheckDesc(desc.get(), "", old_desc->ice_ufrag, old_desc->ice_pwd, digest_alg); } // Test that we handle answering an offer with DTLS with no DTLS. TEST_F(TransportDescriptionFactoryTest, TestAnswerDtlsToNoDtls) { - f1_.set_secure(cricket::SEC_ENABLED); - f1_.set_certificate(cert1_); + f2_.SetInsecureForTesting(); + f2_.set_certificate(nullptr); std::unique_ptr offer = f1_.CreateOffer(TransportOptions(), NULL, &ice_credentials_); ASSERT_TRUE(offer.get() != NULL); @@ -255,28 +266,25 @@ TEST_F(TransportDescriptionFactoryTest, TestAnswerDtlsToNoDtls) { // Test that we handle answering an offer without DTLS if we have DTLS enabled, // but fail if we require DTLS. TEST_F(TransportDescriptionFactoryTest, TestAnswerNoDtlsToDtls) { - f2_.set_secure(cricket::SEC_ENABLED); - f2_.set_certificate(cert2_); + f1_.SetInsecureForTesting(); + f1_.set_certificate(nullptr); std::unique_ptr offer = f1_.CreateOffer(TransportOptions(), NULL, &ice_credentials_); ASSERT_TRUE(offer.get() != NULL); + // Normal case. std::unique_ptr desc = f2_.CreateAnswer( offer.get(), TransportOptions(), true, NULL, &ice_credentials_); - CheckDesc(desc.get(), "", "", "", ""); - f2_.set_secure(cricket::SEC_REQUIRED); + ASSERT_TRUE(desc.get() == NULL); + // Insecure case. + f2_.SetInsecureForTesting(); desc = f2_.CreateAnswer(offer.get(), TransportOptions(), true, NULL, &ice_credentials_); - ASSERT_TRUE(desc.get() == NULL); + CheckDesc(desc.get(), "", "", "", ""); } -// Test that we handle answering an DTLS offer with DTLS, both if we have -// DTLS enabled and required. +// Test that we handle answering an DTLS offer with DTLS, +// even if we don't require DTLS. TEST_F(TransportDescriptionFactoryTest, TestAnswerDtlsToDtls) { - f1_.set_secure(cricket::SEC_ENABLED); - f1_.set_certificate(cert1_); - - f2_.set_secure(cricket::SEC_ENABLED); - f2_.set_certificate(cert2_); // f2_ produces the answer that is being checked in this test, so the // answer must contain fingerprint lines with cert2_'s digest algorithm. std::string digest_alg2; @@ -289,7 +297,8 @@ TEST_F(TransportDescriptionFactoryTest, TestAnswerDtlsToDtls) { std::unique_ptr desc = f2_.CreateAnswer( offer.get(), TransportOptions(), true, NULL, &ice_credentials_); CheckDesc(desc.get(), "", "", "", digest_alg2); - f2_.set_secure(cricket::SEC_REQUIRED); + + f2_.SetInsecureForTesting(); desc = f2_.CreateAnswer(offer.get(), TransportOptions(), true, NULL, &ice_credentials_); CheckDesc(desc.get(), "", "", "", digest_alg2); @@ -322,9 +331,10 @@ TEST_F(TransportDescriptionFactoryTest, TestIceRenominationWithDtls) { // Test that offers and answers have ice-option:trickle. TEST_F(TransportDescriptionFactoryTest, AddsTrickleIceOption) { - cricket::TransportOptions options; + webrtc::TransportOptions options; std::unique_ptr offer = f1_.CreateOffer(options, nullptr, &ice_credentials_); + ASSERT_THAT(offer, NotNull()); EXPECT_TRUE(offer->HasOption("trickle")); std::unique_ptr answer = f2_.CreateAnswer(offer.get(), options, true, nullptr, &ice_credentials_); @@ -333,10 +343,10 @@ TEST_F(TransportDescriptionFactoryTest, AddsTrickleIceOption) { // Test CreateOffer with IceCredentialsIterator. TEST_F(TransportDescriptionFactoryTest, CreateOfferIceCredentialsIterator) { - std::vector credentials = { - cricket::IceParameters("kalle", "anka", false)}; - cricket::IceCredentialsIterator credentialsIterator(credentials); - cricket::TransportOptions options; + std::vector credentials = { + webrtc::IceParameters("kalle", "anka", false)}; + webrtc::IceCredentialsIterator credentialsIterator(credentials); + webrtc::TransportOptions options; std::unique_ptr offer = f1_.CreateOffer(options, nullptr, &credentialsIterator); EXPECT_EQ(offer->GetIceParameters().ufrag, credentials[0].ufrag); @@ -345,13 +355,13 @@ TEST_F(TransportDescriptionFactoryTest, CreateOfferIceCredentialsIterator) { // Test CreateAnswer with IceCredentialsIterator. TEST_F(TransportDescriptionFactoryTest, CreateAnswerIceCredentialsIterator) { - cricket::TransportOptions options; + webrtc::TransportOptions options; std::unique_ptr offer = f1_.CreateOffer(options, nullptr, &ice_credentials_); - std::vector credentials = { - cricket::IceParameters("kalle", "anka", false)}; - cricket::IceCredentialsIterator credentialsIterator(credentials); + std::vector credentials = { + webrtc::IceParameters("kalle", "anka", false)}; + webrtc::IceCredentialsIterator credentialsIterator(credentials); std::unique_ptr answer = f1_.CreateAnswer( offer.get(), options, false, nullptr, &credentialsIterator); EXPECT_EQ(answer->GetIceParameters().ufrag, credentials[0].ufrag); @@ -359,48 +369,33 @@ TEST_F(TransportDescriptionFactoryTest, CreateAnswerIceCredentialsIterator) { } TEST_F(TransportDescriptionFactoryTest, CreateAnswerToDtlsActpassOffer) { - f1_.set_secure(cricket::SEC_ENABLED); - f1_.set_certificate(cert1_); - - f2_.set_secure(cricket::SEC_ENABLED); - f2_.set_certificate(cert2_); - cricket::TransportOptions options; + webrtc::TransportOptions options; std::unique_ptr offer = f1_.CreateOffer(options, nullptr, &ice_credentials_); std::unique_ptr answer = f2_.CreateAnswer(offer.get(), options, false, nullptr, &ice_credentials_); - EXPECT_EQ(answer->connection_role, cricket::CONNECTIONROLE_ACTIVE); + EXPECT_EQ(answer->connection_role, webrtc::CONNECTIONROLE_ACTIVE); } TEST_F(TransportDescriptionFactoryTest, CreateAnswerToDtlsActiveOffer) { - f1_.set_secure(cricket::SEC_ENABLED); - f1_.set_certificate(cert1_); - - f2_.set_secure(cricket::SEC_ENABLED); - f2_.set_certificate(cert2_); - cricket::TransportOptions options; + webrtc::TransportOptions options; std::unique_ptr offer = f1_.CreateOffer(options, nullptr, &ice_credentials_); - offer->connection_role = cricket::CONNECTIONROLE_ACTIVE; + offer->connection_role = webrtc::CONNECTIONROLE_ACTIVE; std::unique_ptr answer = f2_.CreateAnswer(offer.get(), options, false, nullptr, &ice_credentials_); - EXPECT_EQ(answer->connection_role, cricket::CONNECTIONROLE_PASSIVE); + EXPECT_EQ(answer->connection_role, webrtc::CONNECTIONROLE_PASSIVE); } TEST_F(TransportDescriptionFactoryTest, CreateAnswerToDtlsPassiveOffer) { - f1_.set_secure(cricket::SEC_ENABLED); - f1_.set_certificate(cert1_); - - f2_.set_secure(cricket::SEC_ENABLED); - f2_.set_certificate(cert2_); - cricket::TransportOptions options; + webrtc::TransportOptions options; std::unique_ptr offer = f1_.CreateOffer(options, nullptr, &ice_credentials_); - offer->connection_role = cricket::CONNECTIONROLE_PASSIVE; + offer->connection_role = webrtc::CONNECTIONROLE_PASSIVE; std::unique_ptr answer = f2_.CreateAnswer(offer.get(), options, false, nullptr, &ice_credentials_); - EXPECT_EQ(answer->connection_role, cricket::CONNECTIONROLE_ACTIVE); + EXPECT_EQ(answer->connection_role, webrtc::CONNECTIONROLE_ACTIVE); } diff --git a/p2p/base/transport_description_unittest.cc b/p2p/base/transport_description_unittest.cc index c3746ba628..66e8e907a3 100644 --- a/p2p/base/transport_description_unittest.cc +++ b/p2p/base/transport_description_unittest.cc @@ -10,11 +10,14 @@ #include "p2p/base/transport_description.h" +#include + +#include "api/rtc_error.h" #include "test/gtest.h" using webrtc::RTCErrorType; -namespace cricket { +namespace webrtc { TEST(IceParameters, SuccessfulParse) { auto result = IceParameters::Parse("ufrag", "22+characters+long+pwd"); @@ -56,4 +59,4 @@ TEST(IceParameters, FailedParseBadPwdChar) { EXPECT_EQ(RTCErrorType::SYNTAX_ERROR, result.error().type()); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/transport_info.h b/p2p/base/transport_info.h index 1f60b64012..54689b5d1d 100644 --- a/p2p/base/transport_info.h +++ b/p2p/base/transport_info.h @@ -14,12 +14,9 @@ #include #include -#include "api/candidate.h" -#include "p2p/base/p2p_constants.h" #include "p2p/base/transport_description.h" -#include "rtc_base/helpers.h" -namespace cricket { +namespace webrtc { // A TransportInfo is NOT a transport-info message. It is comparable // to a "ContentInfo". A transport-infos message is basically just a @@ -37,6 +34,15 @@ struct TransportInfo { typedef std::vector TransportInfos; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::TransportInfo; +using ::webrtc::TransportInfos; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_TRANSPORT_INFO_H_ diff --git a/p2p/base/turn_port.cc b/p2p/base/turn_port.cc index 88c9a45c61..e0ef1c8384 100644 --- a/p2p/base/turn_port.cc +++ b/p2p/base/turn_port.cc @@ -10,33 +10,54 @@ #include "p2p/base/turn_port.h" +#include +#include +#include #include #include +#include +#include #include #include #include "absl/algorithm/container.h" #include "absl/strings/match.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/candidate.h" +#include "api/packet_socket_factory.h" +#include "api/scoped_refptr.h" #include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "api/transport/stun.h" +#include "api/turn_customizer.h" +#include "api/units/time_delta.h" #include "p2p/base/connection.h" #include "p2p/base/p2p_constants.h" +#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/port_interface.h" +#include "p2p/base/stun_request.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/byte_buffer.h" #include "rtc_base/byte_order.h" +#include "rtc_base/callback_list.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/dscp.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" -#include "rtc_base/net_helpers.h" +#include "rtc_base/net_helper.h" +#include "rtc_base/network.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/socket.h" #include "rtc_base/socket_address.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/third_party/sigslot/sigslot.h" -namespace cricket { - -using ::webrtc::SafeTask; -using ::webrtc::TaskQueueBase; -using ::webrtc::TimeDelta; +namespace webrtc { // TODO(juberti): Move to stun.h when relay messages have been renamed. static const int TURN_ALLOCATE_REQUEST = STUN_ALLOCATE_REQUEST; @@ -64,14 +85,14 @@ inline bool IsTurnChannelData(uint16_t msg_type) { return ((msg_type & 0xC000) == 0x4000); // MSB are 0b01 } -static int GetRelayPreference(cricket::ProtocolType proto) { +static int GetRelayPreference(ProtocolType proto) { switch (proto) { - case cricket::PROTO_TCP: + case webrtc::PROTO_TCP: return ICE_TYPE_PREFERENCE_RELAY_TCP; - case cricket::PROTO_TLS: + case webrtc::PROTO_TLS: return ICE_TYPE_PREFERENCE_RELAY_TLS; default: - RTC_DCHECK(proto == PROTO_UDP); + RTC_DCHECK(proto == webrtc::PROTO_UDP); return ICE_TYPE_PREFERENCE_RELAY_UDP; } } @@ -109,7 +130,7 @@ class TurnCreatePermissionRequest : public StunRequest { public: TurnCreatePermissionRequest(TurnPort* port, TurnEntry* entry, - const rtc::SocketAddress& ext_addr); + const SocketAddress& ext_addr); ~TurnCreatePermissionRequest() override; void OnSent() override; void OnResponse(StunMessage* response) override; @@ -119,15 +140,15 @@ class TurnCreatePermissionRequest : public StunRequest { private: TurnPort* port_; TurnEntry* entry_; - rtc::SocketAddress ext_addr_; + SocketAddress ext_addr_; }; class TurnChannelBindRequest : public StunRequest { public: TurnChannelBindRequest(TurnPort* port, TurnEntry* entry, - int channel_id, - const rtc::SocketAddress& ext_addr); + uint16_t channel_id, + const SocketAddress& ext_addr); ~TurnChannelBindRequest() override; void OnSent() override; void OnResponse(StunMessage* response) override; @@ -135,10 +156,10 @@ class TurnChannelBindRequest : public StunRequest { void OnTimeout() override; private: - TurnPort* port_; - TurnEntry* entry_; - int channel_id_; - rtc::SocketAddress ext_addr_; + TurnPort* const port_; + TurnEntry* entry_; // Could be WeakPtr. + const uint16_t channel_id_; + const SocketAddress ext_addr_; }; // Manages a "connection" to a remote destination. We will attempt to bring up @@ -151,11 +172,9 @@ class TurnEntry : public sigslot::has_slots<> { TurnPort* port() { return port_; } - int channel_id() const { return channel_id_; } - // For testing only. - void set_channel_id(int channel_id) { channel_id_ = channel_id; } + uint16_t channel_id() const { return channel_id_; } - const rtc::SocketAddress& address() const { return ext_addr_; } + const SocketAddress& address() const { return ext_addr_; } BindState state() const { return state_; } // Adds a new connection object to the list of connections that are associated @@ -170,8 +189,7 @@ class TurnEntry : public sigslot::has_slots<> { // timeout expires. If during this timeout `TrackConnection` is called, the // flag will be reset and pending tasks associated with it, cancelled. // * If `conn` was not the last connection, the return value will be nullptr. - rtc::scoped_refptr UntrackConnection( - Connection* conn); + scoped_refptr UntrackConnection(Connection* conn); // Helper methods to send permission and channel bind requests. void SendCreatePermissionRequest(int delay); @@ -181,7 +199,7 @@ class TurnEntry : public sigslot::has_slots<> { int Send(const void* data, size_t size, bool payload, - const rtc::PacketOptions& options); + const AsyncSocketPacketOptions& options); void OnCreatePermissionSuccess(); void OnCreatePermissionError(StunMessage* response, int code); @@ -190,51 +208,41 @@ class TurnEntry : public sigslot::has_slots<> { void OnChannelBindError(StunMessage* response, int code); void OnChannelBindTimeout(); // Signal sent when TurnEntry is destroyed. - webrtc::CallbackList destroyed_callback_list_; + CallbackList destroyed_callback_list_; private: - TurnPort* port_; - int channel_id_; - rtc::SocketAddress ext_addr_; + TurnPort* const port_; + const uint16_t channel_id_; + const SocketAddress ext_addr_; BindState state_; // List of associated connection instances to keep track of how many and // which connections are associated with this entry. Once this is empty, // the entry can be deleted. std::vector connections_; - webrtc::ScopedTaskSafety task_safety_; + ScopedTaskSafety task_safety_; }; -TurnPort::TurnPort(TaskQueueBase* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, - rtc::AsyncPacketSocket* socket, - absl::string_view username, - absl::string_view password, +TurnPort::TurnPort(const PortParametersRef& args, + AsyncPacketSocket* socket, const ProtocolAddress& server_address, const RelayCredentials& credentials, int server_priority, const std::vector& tls_alpn_protocols, const std::vector& tls_elliptic_curves, - webrtc::TurnCustomizer* customizer, - rtc::SSLCertificateVerifier* tls_cert_verifier, - const webrtc::FieldTrialsView* field_trials) - : Port(thread, - RELAY_PORT_TYPE, - factory, - network, - username, - password, - field_trials), + TurnCustomizer* customizer, + SSLCertificateVerifier* tls_cert_verifier) + : Port(args, IceCandidateType::kRelay), server_address_(server_address), + server_url_(ReconstructServerUrl()), tls_alpn_protocols_(tls_alpn_protocols), tls_elliptic_curves_(tls_elliptic_curves), tls_cert_verifier_(tls_cert_verifier), credentials_(credentials), socket_(socket), error_(0), - stun_dscp_value_(rtc::DSCP_NO_CHANGE), + stun_dscp_value_(webrtc::DSCP_NO_CHANGE), request_manager_( - thread, + args.network_thread, [this](const void* data, size_t size, StunRequest* request) { OnSendStunPacket(data, size, request); }), @@ -244,40 +252,28 @@ TurnPort::TurnPort(TaskQueueBase* thread, allocate_mismatch_retries_(0), turn_customizer_(customizer) {} -TurnPort::TurnPort(TaskQueueBase* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, +TurnPort::TurnPort(const PortParametersRef& args, uint16_t min_port, uint16_t max_port, - absl::string_view username, - absl::string_view password, const ProtocolAddress& server_address, const RelayCredentials& credentials, int server_priority, const std::vector& tls_alpn_protocols, const std::vector& tls_elliptic_curves, - webrtc::TurnCustomizer* customizer, - rtc::SSLCertificateVerifier* tls_cert_verifier, - const webrtc::FieldTrialsView* field_trials) - : Port(thread, - RELAY_PORT_TYPE, - factory, - network, - min_port, - max_port, - username, - password, - field_trials), + TurnCustomizer* customizer, + SSLCertificateVerifier* tls_cert_verifier) + : Port(args, IceCandidateType::kRelay, min_port, max_port), server_address_(server_address), + server_url_(ReconstructServerUrl()), tls_alpn_protocols_(tls_alpn_protocols), tls_elliptic_curves_(tls_elliptic_curves), tls_cert_verifier_(tls_cert_verifier), credentials_(credentials), socket_(nullptr), error_(0), - stun_dscp_value_(rtc::DSCP_NO_CHANGE), + stun_dscp_value_(webrtc::DSCP_NO_CHANGE), request_manager_( - thread, + args.network_thread, [this](const void* data, size_t size, StunRequest* request) { OnSendStunPacket(data, size, request); }), @@ -306,8 +302,22 @@ TurnPort::~TurnPort() { } } -rtc::SocketAddress TurnPort::GetLocalAddress() const { - return socket_ ? socket_->GetLocalAddress() : rtc::SocketAddress(); +void TurnPort::set_realm(absl::string_view realm) { + if (realm.empty()) { + // Fail silently since this reduces the entropy going into the hash but log + // a warning. + RTC_LOG(LS_WARNING) << "Setting realm to the empty string, " + << "this is not supported."; + return; + } + if (realm != realm_) { + realm_ = std::string(realm); + UpdateHash(); + } +} + +SocketAddress TurnPort::GetLocalAddress() const { + return socket_ ? socket_->GetLocalAddress() : SocketAddress(); } ProtocolType TurnPort::GetProtocol() const { @@ -336,7 +346,8 @@ std::vector TurnPort::GetTlsEllipticCurves() const { void TurnPort::PrepareAddress() { if (credentials_.username.empty() || credentials_.password.empty()) { - RTC_LOG(LS_ERROR) << "Allocation can't be started without setting the" + RTC_LOG(LS_ERROR) << ToString() + << ": Allocation can't be started without setting the" " TURN server credentials for the user."; OnAllocateError(STUN_ERROR_UNAUTHORIZED, "Missing TURN server credentials."); @@ -348,10 +359,11 @@ void TurnPort::PrepareAddress() { server_address_.address.SetPort(TURN_DEFAULT_PORT); } - if (!AllowedTurnPort(server_address_.address.port(), &field_trials())) { + if (!AllowedTurnPort(server_address_.address.port())) { // This can only happen after a 300 ALTERNATE SERVER, since the port can't // be created with a disallowed port number. - RTC_LOG(LS_ERROR) << "Attempt to start allocation with disallowed port# " + RTC_LOG(LS_ERROR) << ToString() + << ": Attempt to start allocation with disallowed port# " << server_address_.address.port(); OnAllocateError(STUN_ERROR_SERVER_ERROR, "Attempt to start allocation to a disallowed port"); @@ -362,11 +374,12 @@ void TurnPort::PrepareAddress() { } else { // If protocol family of server address doesn't match with local, return. if (!IsCompatibleAddress(server_address_.address)) { - RTC_LOG(LS_ERROR) << "IP address family does not match. server: " + RTC_LOG(LS_ERROR) << ToString() + << ": IP address family does not match. Server: " << server_address_.address.family() << " local: " << Network()->GetBestIP().family(); - OnAllocateError(STUN_ERROR_GLOBAL_FAILURE, - "IP address family does not match."); + OnAllocateError(STUN_ERROR_NOT_AN_ERROR, + "TURN server address is incompatible."); return; } @@ -378,12 +391,13 @@ void TurnPort::PrepareAddress() { << ProtoToString(server_address_.proto) << " @ " << server_address_.address.ToSensitiveNameAndAddressString(); if (!CreateTurnClientSocket()) { - RTC_LOG(LS_ERROR) << "Failed to create TURN client socket"; - OnAllocateError(SERVER_NOT_REACHABLE_ERROR, + RTC_LOG(LS_ERROR) << ToString() + << ": Failed to create TURN client socket"; + OnAllocateError(STUN_ERROR_SERVER_NOT_REACHABLE, "Failed to create TURN client socket."); return; } - if (server_address_.proto == PROTO_UDP) { + if (server_address_.proto == webrtc::PROTO_UDP) { // If its UDP, send AllocateRequest now. // For TCP and TLS AllcateRequest will be sent by OnSocketConnect. SendRequest(new TurnAllocateRequest(this), 0); @@ -394,32 +408,32 @@ void TurnPort::PrepareAddress() { bool TurnPort::CreateTurnClientSocket() { RTC_DCHECK(!socket_ || SharedSocket()); - if (server_address_.proto == PROTO_UDP && !SharedSocket()) { + if (server_address_.proto == webrtc::PROTO_UDP && !SharedSocket()) { socket_ = socket_factory()->CreateUdpSocket( - rtc::SocketAddress(Network()->GetBestIP(), 0), min_port(), max_port()); - } else if (server_address_.proto == PROTO_TCP || - server_address_.proto == PROTO_TLS) { + SocketAddress(Network()->GetBestIP(), 0), min_port(), max_port()); + } else if (server_address_.proto == webrtc::PROTO_TCP || + server_address_.proto == webrtc::PROTO_TLS) { RTC_DCHECK(!SharedSocket()); - int opts = rtc::PacketSocketFactory::OPT_STUN; + int opts = PacketSocketFactory::OPT_STUN; // Apply server address TLS and insecure bits to options. - if (server_address_.proto == PROTO_TLS) { + if (server_address_.proto == webrtc::PROTO_TLS) { if (tls_cert_policy_ == TlsCertPolicy::TLS_CERT_POLICY_INSECURE_NO_CHECK) { - opts |= rtc::PacketSocketFactory::OPT_TLS_INSECURE; + opts |= PacketSocketFactory::OPT_TLS_INSECURE; } else { - opts |= rtc::PacketSocketFactory::OPT_TLS; + opts |= PacketSocketFactory::OPT_TLS; } } - rtc::PacketSocketTcpOptions tcp_options; + PacketSocketTcpOptions tcp_options; tcp_options.opts = opts; tcp_options.tls_alpn_protocols = tls_alpn_protocols_; tcp_options.tls_elliptic_curves = tls_elliptic_curves_; tcp_options.tls_cert_verifier = tls_cert_verifier_; socket_ = socket_factory()->CreateClientTcpSocket( - rtc::SocketAddress(Network()->GetBestIP(), 0), server_address_.address, - proxy(), user_agent(), tcp_options); + SocketAddress(Network()->GetBestIP(), 0), server_address_.address, + tcp_options); } if (!socket_) { @@ -435,7 +449,10 @@ bool TurnPort::CreateTurnClientSocket() { if (!SharedSocket()) { // If socket is shared, AllocationSequence will receive the packet. - socket_->SignalReadPacket.connect(this, &TurnPort::OnReadPacket); + socket_->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + OnReadPacket(socket, packet); + }); } socket_->SignalReadyToSend.connect(this, &TurnPort::OnReadyToSend); @@ -444,23 +461,22 @@ bool TurnPort::CreateTurnClientSocket() { // TCP port is ready to send stun requests after the socket is connected, // while UDP port is ready to do so once the socket is created. - if (server_address_.proto == PROTO_TCP || - server_address_.proto == PROTO_TLS) { + if (server_address_.proto == webrtc::PROTO_TCP || + server_address_.proto == webrtc::PROTO_TLS) { socket_->SignalConnect.connect(this, &TurnPort::OnSocketConnect); socket_->SubscribeCloseEvent( - this, - [this](rtc::AsyncPacketSocket* s, int err) { OnSocketClose(s, err); }); + this, [this](AsyncPacketSocket* s, int err) { OnSocketClose(s, err); }); } else { state_ = STATE_CONNECTED; } return true; } -void TurnPort::OnSocketConnect(rtc::AsyncPacketSocket* socket) { +void TurnPort::OnSocketConnect(AsyncPacketSocket* socket) { // This slot should only be invoked if we're using a connection-oriented // protocol. - RTC_DCHECK(server_address_.proto == PROTO_TCP || - server_address_.proto == PROTO_TLS); + RTC_DCHECK(server_address_.proto == webrtc::PROTO_TCP || + server_address_.proto == webrtc::PROTO_TLS); // Do not use this port if the socket bound to an address not associated with // the desired network interface. This is seen in Chrome, where TCP sockets @@ -476,32 +492,32 @@ void TurnPort::OnSocketConnect(rtc::AsyncPacketSocket* socket) { // // Note that, aside from minor differences in log statements, this logic is // identical to that in TcpPort. - const rtc::SocketAddress& socket_address = socket->GetLocalAddress(); + const SocketAddress& socket_address = socket->GetLocalAddress(); if (absl::c_none_of(Network()->GetIPs(), - [socket_address](const rtc::InterfaceAddress& addr) { + [socket_address](const InterfaceAddress& addr) { return socket_address.ipaddr() == addr; })) { if (socket->GetLocalAddress().IsLoopbackIP()) { - RTC_LOG(LS_WARNING) << "Socket is bound to the address:" + RTC_LOG(LS_WARNING) << ToString() << ": Socket is bound to the address:" << socket_address.ToSensitiveNameAndAddressString() << ", rather than an address associated with network:" << Network()->ToString() << ". Still allowing it since it's localhost."; - } else if (IPIsAny(Network()->GetBestIP())) { + } else if (webrtc::IPIsAny(Network()->GetBestIP())) { RTC_LOG(LS_WARNING) - << "Socket is bound to the address:" + << ToString() << ": Socket is bound to the address:" << socket_address.ToSensitiveNameAndAddressString() << ", rather than an address associated with network:" << Network()->ToString() << ". Still allowing it since it's the 'any' address" ", possibly caused by multiple_routes being disabled."; } else { - RTC_LOG(LS_WARNING) << "Socket is bound to the address:" + RTC_LOG(LS_WARNING) << ToString() << ": Socket is bound to the address:" << socket_address.ToSensitiveNameAndAddressString() << ", rather than an address associated with network:" << Network()->ToString() << ". Discarding TURN port."; OnAllocateError( - STUN_ERROR_GLOBAL_FAILURE, + STUN_ERROR_SERVER_NOT_REACHABLE, "Address not associated with the desired network interface."); return; } @@ -518,7 +534,7 @@ void TurnPort::OnSocketConnect(rtc::AsyncPacketSocket* socket) { SendRequest(new TurnAllocateRequest(this), 0); } -void TurnPort::OnSocketClose(rtc::AsyncPacketSocket* socket, int error) { +void TurnPort::OnSocketClose(AsyncPacketSocket* socket, int error) { RTC_LOG(LS_WARNING) << ToString() << ": Connection with server failed with error: " << error; @@ -578,9 +594,8 @@ Connection* TurnPort::CreateConnection(const Candidate& remote_candidate, // and TURN candidate later. for (size_t index = 0; index < Candidates().size(); ++index) { const Candidate& local_candidate = Candidates()[index]; - if (local_candidate.type() == RELAY_PORT_TYPE && - local_candidate.address().family() == - remote_candidate.address().family()) { + if (local_candidate.is_relay() && local_candidate.address().family() == + remote_candidate.address().family()) { ProxyConnection* conn = new ProxyConnection(NewWeakPtr(), index, remote_candidate); // Create an entry, if needed, so we can get our permissions set up @@ -595,7 +610,7 @@ Connection* TurnPort::CreateConnection(const Candidate& remote_candidate, return nullptr; } -bool TurnPort::FailAndPruneConnection(const rtc::SocketAddress& address) { +bool TurnPort::FailAndPruneConnection(const SocketAddress& address) { Connection* conn = GetConnection(address); if (conn != nullptr) { conn->FailAndPrune(); @@ -604,10 +619,10 @@ bool TurnPort::FailAndPruneConnection(const rtc::SocketAddress& address) { return false; } -int TurnPort::SetOption(rtc::Socket::Option opt, int value) { +int TurnPort::SetOption(Socket::Option opt, int value) { // Remember the last requested DSCP value, for STUN traffic. - if (opt == rtc::Socket::OPT_DSCP) - stun_dscp_value_ = static_cast(value); + if (opt == Socket::OPT_DSCP) + stun_dscp_value_ = static_cast(value); if (!socket_) { // If socket is not created yet, these options will be applied during socket @@ -618,7 +633,7 @@ int TurnPort::SetOption(rtc::Socket::Option opt, int value) { return socket_->SetOption(opt, value); } -int TurnPort::GetOption(rtc::Socket::Option opt, int* value) { +int TurnPort::GetOption(Socket::Option opt, int* value) { if (!socket_) { SocketOptionsMap::const_iterator it = socket_options_.find(opt); if (it == socket_options_.end()) { @@ -637,8 +652,8 @@ int TurnPort::GetError() { int TurnPort::SendTo(const void* data, size_t size, - const rtc::SocketAddress& addr, - const rtc::PacketOptions& options, + const SocketAddress& addr, + const AsyncSocketPacketOptions& options, bool payload) { // Try to find an entry for this specific address; we should have one. TurnEntry* entry = FindEntry(addr); @@ -650,7 +665,7 @@ int TurnPort::SendTo(const void* data, } // Send the actual contents to the server using the usual mechanism. - rtc::PacketOptions modified_options(options); + AsyncSocketPacketOptions modified_options(options); CopyPortInformationToPacketInfo(&modified_options.info_signaled_after_sent); int sent = entry->Send(data, size, payload, modified_options); if (sent <= 0) { @@ -663,13 +678,12 @@ int TurnPort::SendTo(const void* data, return static_cast(size); } -bool TurnPort::CanHandleIncomingPacketsFrom( - const rtc::SocketAddress& addr) const { +bool TurnPort::CanHandleIncomingPacketsFrom(const SocketAddress& addr) const { return server_address_.address == addr; } void TurnPort::SendBindingErrorResponse(StunMessage* message, - const rtc::SocketAddress& addr, + const SocketAddress& addr, int error_code, absl::string_view reason) { if (!GetConnection(addr)) @@ -678,11 +692,8 @@ void TurnPort::SendBindingErrorResponse(StunMessage* message, Port::SendBindingErrorResponse(message, addr, error_code, reason); } -bool TurnPort::HandleIncomingPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - int64_t packet_time_us) { +bool TurnPort::HandleIncomingPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet) { if (socket != socket_) { // The packet was received on a shared socket after we've allocated a new // socket for this TURN port. @@ -692,16 +703,17 @@ bool TurnPort::HandleIncomingPacket(rtc::AsyncPacketSocket* socket, // This is to guard against a STUN response from previous server after // alternative server redirection. TODO(guoweis): add a unit test for this // race condition. - if (remote_addr != server_address_.address) { + if (packet.source_address() != server_address_.address) { RTC_LOG(LS_WARNING) << ToString() << ": Discarding TURN message from unknown address: " - << remote_addr.ToSensitiveNameAndAddressString() << " server_address_: " + << packet.source_address().ToSensitiveNameAndAddressString() + << " server_address_: " << server_address_.address.ToSensitiveNameAndAddressString(); return false; } // The message must be at least the size of a channel header. - if (size < TURN_CHANNEL_HEADER_SIZE) { + if (packet.payload().size() < TURN_CHANNEL_HEADER_SIZE) { RTC_LOG(LS_WARNING) << ToString() << ": Received TURN message that was too short"; return false; @@ -714,10 +726,15 @@ bool TurnPort::HandleIncomingPacket(rtc::AsyncPacketSocket* socket, return false; } + const char* data = reinterpret_cast(packet.payload().data()); + int size = packet.payload().size(); + int64_t packet_time_us = + packet.arrival_time() ? packet.arrival_time()->us() : -1; + // Check the message type, to see if is a Channel Data message. // The message will either be channel data, a TURN data indication, or // a response to a previous request. - uint16_t msg_type = rtc::GetBE16(data); + uint16_t msg_type = webrtc::GetBE16(packet.payload().data()); if (IsTurnChannelData(msg_type)) { HandleChannelData(msg_type, data, size, packet_time_us); return true; @@ -741,20 +758,17 @@ bool TurnPort::HandleIncomingPacket(rtc::AsyncPacketSocket* socket, return true; } -void TurnPort::OnReadPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - const int64_t& packet_time_us) { - HandleIncomingPacket(socket, data, size, remote_addr, packet_time_us); +void TurnPort::OnReadPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet) { + HandleIncomingPacket(socket, packet); } -void TurnPort::OnSentPacket(rtc::AsyncPacketSocket* socket, - const rtc::SentPacket& sent_packet) { +void TurnPort::OnSentPacket(AsyncPacketSocket* socket, + const SentPacketInfo& sent_packet) { PortInterface::SignalSentPacket(sent_packet); } -void TurnPort::OnReadyToSend(rtc::AsyncPacketSocket* socket) { +void TurnPort::OnReadyToSend(AsyncPacketSocket* socket) { if (ready()) { Port::OnReadyToSend(); } @@ -762,11 +776,11 @@ void TurnPort::OnReadyToSend(rtc::AsyncPacketSocket* socket) { bool TurnPort::SupportsProtocol(absl::string_view protocol) const { // Turn port only connects to UDP candidates. - return protocol == UDP_PROTOCOL_NAME; + return protocol == webrtc::UDP_PROTOCOL_NAME; } // Update current server address port with the alternate server address port. -bool TurnPort::SetAlternateServer(const rtc::SocketAddress& address) { +bool TurnPort::SetAlternateServer(const SocketAddress& address) { // Check if we have seen this address before and reject if we did. AttemptedServerSet::iterator iter = attempted_server_addresses_.find(address); if (iter != attempted_server_addresses_.end()) { @@ -778,7 +792,8 @@ bool TurnPort::SetAlternateServer(const rtc::SocketAddress& address) { // If protocol family of server address doesn't match with local, return. if (!IsCompatibleAddress(address)) { - RTC_LOG(LS_WARNING) << "Server IP address family does not match with " + RTC_LOG(LS_WARNING) << ToString() + << ": Server IP address family does not match with " "local host address family type"; return false; } @@ -802,7 +817,7 @@ bool TurnPort::SetAlternateServer(const rtc::SocketAddress& address) { return true; } -void TurnPort::ResolveTurnAddress(const rtc::SocketAddress& address) { +void TurnPort::ResolveTurnAddress(const SocketAddress& address) { if (resolver_) return; @@ -816,10 +831,11 @@ void TurnPort::ResolveTurnAddress(const rtc::SocketAddress& address) { // assuming socket layer will resolve the hostname through a HTTP proxy (if // any). auto& result = resolver_->result(); - if (result.GetError() != 0 && (server_address_.proto == PROTO_TCP || - server_address_.proto == PROTO_TLS)) { + if (result.GetError() != 0 && + (server_address_.proto == webrtc::PROTO_TCP || + server_address_.proto == webrtc::PROTO_TLS)) { if (!CreateTurnClientSocket()) { - OnAllocateError(SERVER_NOT_REACHABLE_ERROR, + OnAllocateError(STUN_ERROR_SERVER_NOT_REACHABLE, "TURN host lookup received error."); } return; @@ -827,14 +843,14 @@ void TurnPort::ResolveTurnAddress(const rtc::SocketAddress& address) { // Copy the original server address in `resolved_address`. For TLS based // sockets we need hostname along with resolved address. - rtc::SocketAddress resolved_address = server_address_.address; + SocketAddress resolved_address = server_address_.address; if (result.GetError() != 0 || !result.GetResolvedAddress(Network()->GetBestIP().family(), &resolved_address)) { RTC_LOG(LS_WARNING) << ToString() << ": TURN host lookup received error " << result.GetError(); error_ = result.GetError(); - OnAllocateError(SERVER_NOT_REACHABLE_ERROR, + OnAllocateError(STUN_ERROR_SERVER_NOT_REACHABLE, "TURN host lookup received error."); return; } @@ -848,8 +864,8 @@ void TurnPort::OnSendStunPacket(const void* data, size_t size, StunRequest* request) { RTC_DCHECK(connected()); - rtc::PacketOptions options(StunDscpValue()); - options.info_signaled_after_sent.packet_type = rtc::PacketType::kTurnMessage; + AsyncSocketPacketOptions options(StunDscpValue()); + options.info_signaled_after_sent.packet_type = PacketType::kTurnMessage; CopyPortInformationToPacketInfo(&options.info_signaled_after_sent); if (Send(data, size, options) < 0) { RTC_LOG(LS_ERROR) << ToString() << ": Failed to send TURN message, error: " @@ -857,7 +873,7 @@ void TurnPort::OnSendStunPacket(const void* data, } } -void TurnPort::OnStunAddress(const rtc::SocketAddress& address) { +void TurnPort::OnStunAddress(const SocketAddress& address) { // STUN Port will discover STUN candidate, as it's supplied with first TURN // server address. // Why not using this address? - P2PTransportChannel will start creating @@ -867,21 +883,22 @@ void TurnPort::OnStunAddress(const rtc::SocketAddress& address) { // handle to UDPPort to pass back the address. } -void TurnPort::OnAllocateSuccess(const rtc::SocketAddress& address, - const rtc::SocketAddress& stun_address) { +void TurnPort::OnAllocateSuccess(const SocketAddress& address, + const SocketAddress& stun_address) { state_ = STATE_READY; - rtc::SocketAddress related_address = stun_address; + SocketAddress related_address = stun_address; // For relayed candidate, Base is the candidate itself. AddAddress(address, // Candidate address. address, // Base address. related_address, // Related address. - UDP_PROTOCOL_NAME, + webrtc::UDP_PROTOCOL_NAME, ProtoToString(server_address_.proto), // The first hop protocol. "", // TCP candidate type, empty for turn candidates. - RELAY_PORT_TYPE, GetRelayPreference(server_address_.proto), - server_priority_, ReconstructedServerUrl(), true); + IceCandidateType::kRelay, + GetRelayPreference(server_address_.proto), server_priority_, + server_url_, true); } void TurnPort::OnAllocateError(int error_code, absl::string_view reason) { @@ -892,14 +909,16 @@ void TurnPort::OnAllocateError(int error_code, absl::string_view reason) { SafeTask(task_safety_.flag(), [this] { SignalPortError(this); })); std::string address = GetLocalAddress().HostAsSensitiveURIString(); int port = GetLocalAddress().port(); - if (server_address_.proto == PROTO_TCP && + if (server_address_.proto == webrtc::PROTO_TCP && server_address_.address.IsPrivateIP()) { address.clear(); port = 0; } - SignalCandidateError( - this, IceCandidateErrorEvent(address, port, ReconstructedServerUrl(), - error_code, reason)); + if (error_code != STUN_ERROR_NOT_AN_ERROR) { + SignalCandidateError( + this, + IceCandidateErrorEvent(address, port, server_url_, error_code, reason)); + } } void TurnPort::OnRefreshError() { @@ -932,7 +951,10 @@ void TurnPort::Release() { void TurnPort::Close() { if (!ready()) { - OnAllocateError(SERVER_NOT_REACHABLE_ERROR, ""); + OnAllocateError(STUN_ERROR_SERVER_NOT_REACHABLE, + GetProtocol() != webrtc::PROTO_UDP + ? "Failed to establish connection" + : ""); } request_manager_.Clear(); // Stop the port from creating new connections. @@ -944,28 +966,22 @@ void TurnPort::Close() { } } -rtc::DiffServCodePoint TurnPort::StunDscpValue() const { +DiffServCodePoint TurnPort::StunDscpValue() const { return stun_dscp_value_; } // static -bool TurnPort::AllowedTurnPort(int port, - const webrtc::FieldTrialsView* field_trials) { +bool TurnPort::AllowedTurnPort(int port) { // Port 53, 80 and 443 are used for existing deployments. // Ports above 1024 are assumed to be OK to use. if (port == 53 || port == 80 || port == 443 || port >= 1024) { return true; } - // Allow any port if relevant field trial is set. This allows disabling the - // check. - if (field_trials && field_trials->IsEnabled("WebRTC-Turn-AllowSystemPorts")) { - return true; - } return false; } void TurnPort::TryAlternateServer() { - if (server_address().proto == PROTO_UDP) { + if (server_address().proto == webrtc::PROTO_UDP) { // Send another allocate request to alternate server, with the received // realm and nonce values. SendRequest(new TurnAllocateRequest(this), 0); @@ -973,8 +989,8 @@ void TurnPort::TryAlternateServer() { // Since it's TCP, we have to delete the connected socket and reconnect // with the alternate server. PrepareAddress will send stun binding once // the new socket is connected. - RTC_DCHECK(server_address().proto == PROTO_TCP || - server_address().proto == PROTO_TLS); + RTC_DCHECK(server_address().proto == webrtc::PROTO_TCP || + server_address().proto == webrtc::PROTO_TLS); RTC_DCHECK(!SharedSocket()); delete socket_; socket_ = nullptr; @@ -983,7 +999,7 @@ void TurnPort::TryAlternateServer() { } void TurnPort::OnAllocateRequestTimeout() { - OnAllocateError(SERVER_NOT_REACHABLE_ERROR, + OnAllocateError(STUN_ERROR_SERVER_NOT_REACHABLE, "TURN allocate request timed out."); } @@ -991,7 +1007,8 @@ void TurnPort::HandleDataIndication(const char* data, size_t size, int64_t packet_time_us) { // Read in the message, and process according to RFC5766, Section 10.4. - rtc::ByteBufferReader buf(data, size); + ByteBufferReader buf( + MakeArrayView(reinterpret_cast(data), size)); TurnMessage msg; if (!msg.Read(&buf)) { RTC_LOG(LS_WARNING) << ToString() @@ -1019,19 +1036,21 @@ void TurnPort::HandleDataIndication(const char* data, // Log a warning if the data didn't come from an address that we think we have // a permission for. - rtc::SocketAddress ext_addr(addr_attr->GetAddress()); + SocketAddress ext_addr(addr_attr->GetAddress()); if (!HasPermission(ext_addr.ipaddr())) { RTC_LOG(LS_WARNING) << ToString() << ": Received TURN data indication with unknown " "peer address, addr: " << ext_addr.ToSensitiveString(); } - - DispatchPacket(data_attr->bytes(), data_attr->length(), ext_addr, PROTO_UDP, + // TODO(bugs.webrtc.org/14870): rebuild DispatchPacket to take an + // ArrayView + DispatchPacket(reinterpret_cast(data_attr->array_view().data()), + data_attr->length(), ext_addr, webrtc::PROTO_UDP, packet_time_us); } -void TurnPort::HandleChannelData(int channel_id, +void TurnPort::HandleChannelData(uint16_t channel_id, const char* data, size_t size, int64_t packet_time_us) { @@ -1050,7 +1069,7 @@ void TurnPort::HandleChannelData(int channel_id, // +-------------------------------+ // Extract header fields from the message. - uint16_t len = rtc::GetBE16(data + 2); + uint16_t len = webrtc::GetBE16(data + 2); if (len > size - TURN_CHANNEL_HEADER_SIZE) { RTC_LOG(LS_WARNING) << ToString() << ": Received TURN channel data message with " @@ -1070,18 +1089,20 @@ void TurnPort::HandleChannelData(int channel_id, } DispatchPacket(data + TURN_CHANNEL_HEADER_SIZE, len, entry->address(), - PROTO_UDP, packet_time_us); + webrtc::PROTO_UDP, packet_time_us); } void TurnPort::DispatchPacket(const char* data, size_t size, - const rtc::SocketAddress& remote_addr, + const SocketAddress& remote_addr, ProtocolType proto, int64_t packet_time_us) { + ReceivedIpPacket packet = ReceivedIpPacket::CreateFromLegacy( + data, size, packet_time_us, remote_addr); if (Connection* conn = GetConnection(remote_addr)) { - conn->OnReadPacket(data, size, packet_time_us); + conn->OnReadPacket(packet); } else { - Port::OnReadPacket(data, size, remote_addr, proto); + Port::OnReadPacket(packet, proto); } } @@ -1137,7 +1158,7 @@ void TurnPort::AddRequestAuthInfo(StunMessage* msg) { int TurnPort::Send(const void* data, size_t len, - const rtc::PacketOptions& options) { + const AsyncSocketPacketOptions& options) { return socket_->SendTo(data, len, server_address_.address, options); } @@ -1154,7 +1175,8 @@ bool TurnPort::UpdateNonce(StunMessage* response) { const StunByteStringAttribute* realm_attr = response->GetByteString(STUN_ATTR_REALM); if (!realm_attr) { - RTC_LOG(LS_ERROR) << "Missing STUN_ATTR_REALM attribute in " + RTC_LOG(LS_ERROR) << ToString() + << ": Missing STUN_ATTR_REALM attribute in " "stale nonce error response."; return false; } @@ -1163,7 +1185,8 @@ bool TurnPort::UpdateNonce(StunMessage* response) { const StunByteStringAttribute* nonce_attr = response->GetByteString(STUN_ATTR_NONCE); if (!nonce_attr) { - RTC_LOG(LS_ERROR) << "Missing STUN_ATTR_NONCE attribute in " + RTC_LOG(LS_ERROR) << ToString() + << ": Missing STUN_ATTR_NONCE attribute in " "stale nonce error response."; return false; } @@ -1177,19 +1200,19 @@ void TurnPort::ResetNonce() { realm_.clear(); } -bool TurnPort::HasPermission(const rtc::IPAddress& ipaddr) const { +bool TurnPort::HasPermission(const IPAddress& ipaddr) const { return absl::c_any_of(entries_, [&ipaddr](const auto& e) { return e->address().ipaddr() == ipaddr; }); } -TurnEntry* TurnPort::FindEntry(const rtc::SocketAddress& addr) const { +TurnEntry* TurnPort::FindEntry(const SocketAddress& addr) const { auto it = absl::c_find_if( entries_, [&addr](const auto& e) { return e->address() == addr; }); return (it != entries_.end()) ? it->get() : nullptr; } -TurnEntry* TurnPort::FindEntry(int channel_id) const { +TurnEntry* TurnPort::FindEntry(uint16_t channel_id) const { auto it = absl::c_find_if(entries_, [&channel_id](const auto& e) { return e->channel_id() == channel_id; }); @@ -1214,11 +1237,10 @@ bool TurnPort::CreateOrRefreshEntry(Connection* conn, int channel_number) { void TurnPort::HandleConnectionDestroyed(Connection* conn) { // Schedule an event to destroy TurnEntry for the connection, which is // being destroyed. - const rtc::SocketAddress& remote_address = conn->remote_candidate().address(); + const SocketAddress& remote_address = conn->remote_candidate().address(); // We should always have an entry for this connection. TurnEntry* entry = FindEntry(remote_address); - rtc::scoped_refptr flag = - entry->UntrackConnection(conn); + scoped_refptr flag = entry->UntrackConnection(conn); if (flag) { // An assumption here is that the lifetime flag for the entry, is within // the lifetime scope of `task_safety_` and therefore use of `this` is safe. @@ -1241,40 +1263,28 @@ void TurnPort::SetCallbacksForTest(CallbacksForTest* callbacks) { callbacks_for_test_ = callbacks; } -bool TurnPort::SetEntryChannelId(const rtc::SocketAddress& address, - int channel_id) { - TurnEntry* entry = FindEntry(address); - if (!entry) { - return false; - } - entry->set_channel_id(channel_id); - return true; -} - -std::string TurnPort::ReconstructedServerUrl() { - // draft-petithuguenin-behave-turn-uris-01 - // turnURI = scheme ":" turn-host [ ":" turn-port ] +std::string TurnPort::ReconstructServerUrl() { + // https://www.rfc-editor.org/rfc/rfc7065#section-3.1 + // turnURI = scheme ":" host [ ":" port ] // [ "?transport=" transport ] // scheme = "turn" / "turns" // transport = "udp" / "tcp" / transport-ext // transport-ext = 1*unreserved - // turn-host = IP-literal / IPv4address / reg-name - // turn-port = *DIGIT std::string scheme = "turn"; std::string transport = "tcp"; switch (server_address_.proto) { - case PROTO_SSLTCP: - case PROTO_TLS: + case webrtc::PROTO_SSLTCP: + case webrtc::PROTO_TLS: scheme = "turns"; break; - case PROTO_UDP: + case webrtc::PROTO_UDP: transport = "udp"; break; - case PROTO_TCP: + case webrtc::PROTO_TCP: break; } - rtc::StringBuilder url; - url << scheme << ":" << server_address_.address.hostname() << ":" + StringBuilder url; + url << scheme << ":" << server_address_.address.HostAsURIString() << ":" << server_address_.address.port() << "?transport=" << transport; return url.Release(); } @@ -1318,6 +1328,8 @@ TurnAllocateRequest::TurnAllocateRequest(TurnPort* port) message->AddAttribute(std::move(transport_attr)); if (!port_->hash().empty()) { port_->AddRequestAuthInfo(message); + } else { + SetAuthenticationRequired(false); } port_->MaybeAddTurnLoggingId(message); port_->TurnCustomizerMaybeModifyOutgoingStunMessage(message); @@ -1325,14 +1337,14 @@ TurnAllocateRequest::TurnAllocateRequest(TurnPort* port) void TurnAllocateRequest::OnSent() { RTC_LOG(LS_INFO) << port_->ToString() << ": TURN allocate request sent, id=" - << rtc::hex_encode(id()); + << webrtc::hex_encode(id()); StunRequest::OnSent(); } void TurnAllocateRequest::OnResponse(StunMessage* response) { RTC_LOG(LS_INFO) << port_->ToString() << ": TURN allocate requested successfully, id=" - << rtc::hex_encode(id()) + << webrtc::hex_encode(id()) << ", code=0" // Makes logging easier to parse. ", rtt=" << Elapsed(); @@ -1359,10 +1371,10 @@ void TurnAllocateRequest::OnResponse(StunMessage* response) { } const StunUInt32Attribute* lifetime_attr = - response->GetUInt32(STUN_ATTR_TURN_LIFETIME); + response->GetUInt32(STUN_ATTR_LIFETIME); if (!lifetime_attr) { RTC_LOG(LS_WARNING) << port_->ToString() - << ": Missing STUN_ATTR_TURN_LIFETIME attribute in " + << ": Missing STUN_ATTR_LIFETIME attribute in " "allocate success response"; return; } @@ -1378,7 +1390,7 @@ void TurnAllocateRequest::OnErrorResponse(StunMessage* response) { RTC_LOG(LS_INFO) << port_->ToString() << ": Received TURN allocate error response, id=" - << rtc::hex_encode(id()) << ", code=" << error_code + << webrtc::hex_encode(id()) << ", code=" << error_code << ", rtt=" << Elapsed(); switch (error_code) { @@ -1398,7 +1410,7 @@ void TurnAllocateRequest::OnErrorResponse(StunMessage* response) { default: RTC_LOG(LS_WARNING) << port_->ToString() << ": Received TURN allocate error response, id=" - << rtc::hex_encode(id()) << ", code=" << error_code + << webrtc::hex_encode(id()) << ", code=" << error_code << ", rtt=" << Elapsed(); const StunErrorCodeAttribute* attr = response->GetErrorCode(); port_->OnAllocateError(error_code, attr ? attr->reason() : ""); @@ -1407,7 +1419,7 @@ void TurnAllocateRequest::OnErrorResponse(StunMessage* response) { void TurnAllocateRequest::OnTimeout() { RTC_LOG(LS_WARNING) << port_->ToString() << ": TURN allocate request " - << rtc::hex_encode(id()) << " timeout"; + << webrtc::hex_encode(id()) << " timeout"; port_->OnAllocateRequestTimeout(); } @@ -1515,24 +1527,24 @@ TurnRefreshRequest::TurnRefreshRequest(TurnPort* port, int lifetime /*= -1*/) void TurnRefreshRequest::OnSent() { RTC_LOG(LS_INFO) << port_->ToString() << ": TURN refresh request sent, id=" - << rtc::hex_encode(id()); + << webrtc::hex_encode(id()); StunRequest::OnSent(); } void TurnRefreshRequest::OnResponse(StunMessage* response) { RTC_LOG(LS_INFO) << port_->ToString() << ": TURN refresh requested successfully, id=" - << rtc::hex_encode(id()) + << webrtc::hex_encode(id()) << ", code=0" // Makes logging easier to parse. ", rtt=" << Elapsed(); // Check mandatory attributes as indicated in RFC5766, Section 7.3. const StunUInt32Attribute* lifetime_attr = - response->GetUInt32(STUN_ATTR_TURN_LIFETIME); + response->GetUInt32(STUN_ATTR_LIFETIME); if (!lifetime_attr) { RTC_LOG(LS_WARNING) << port_->ToString() - << ": Missing STUN_ATTR_TURN_LIFETIME attribute in " + << ": Missing STUN_ATTR_LIFETIME attribute in " "refresh success response."; return; } @@ -1564,7 +1576,7 @@ void TurnRefreshRequest::OnErrorResponse(StunMessage* response) { } else { RTC_LOG(LS_WARNING) << port_->ToString() << ": Received TURN refresh error response, id=" - << rtc::hex_encode(id()) << ", code=" << error_code + << webrtc::hex_encode(id()) << ", code=" << error_code << ", rtt=" << Elapsed(); port_->OnRefreshError(); if (port_->callbacks_for_test_) { @@ -1575,14 +1587,14 @@ void TurnRefreshRequest::OnErrorResponse(StunMessage* response) { void TurnRefreshRequest::OnTimeout() { RTC_LOG(LS_WARNING) << port_->ToString() << ": TURN refresh timeout " - << rtc::hex_encode(id()); + << webrtc::hex_encode(id()); port_->OnRefreshError(); } TurnCreatePermissionRequest::TurnCreatePermissionRequest( TurnPort* port, TurnEntry* entry, - const rtc::SocketAddress& ext_addr) + const SocketAddress& ext_addr) : StunRequest( port->request_manager(), std::make_unique(TURN_CREATE_PERMISSION_REQUEST)), @@ -1612,14 +1624,14 @@ TurnCreatePermissionRequest::~TurnCreatePermissionRequest() { void TurnCreatePermissionRequest::OnSent() { RTC_LOG(LS_INFO) << port_->ToString() << ": TURN create permission request sent, id=" - << rtc::hex_encode(id()); + << webrtc::hex_encode(id()); StunRequest::OnSent(); } void TurnCreatePermissionRequest::OnResponse(StunMessage* response) { RTC_LOG(LS_INFO) << port_->ToString() << ": TURN permission requested successfully, id=" - << rtc::hex_encode(id()) + << webrtc::hex_encode(id()) << ", code=0" // Makes logging easier to parse. ", rtt=" << Elapsed(); @@ -1633,7 +1645,7 @@ void TurnCreatePermissionRequest::OnErrorResponse(StunMessage* response) { int error_code = response->GetErrorCodeValue(); RTC_LOG(LS_WARNING) << port_->ToString() << ": Received TURN create permission error response, id=" - << rtc::hex_encode(id()) << ", code=" << error_code + << webrtc::hex_encode(id()) << ", code=" << error_code << ", rtt=" << Elapsed(); if (entry_) { entry_->OnCreatePermissionError(response, error_code); @@ -1643,17 +1655,16 @@ void TurnCreatePermissionRequest::OnErrorResponse(StunMessage* response) { void TurnCreatePermissionRequest::OnTimeout() { RTC_LOG(LS_WARNING) << port_->ToString() << ": TURN create permission timeout " - << rtc::hex_encode(id()); + << webrtc::hex_encode(id()); if (entry_) { entry_->OnCreatePermissionTimeout(); } } -TurnChannelBindRequest::TurnChannelBindRequest( - TurnPort* port, - TurnEntry* entry, - int channel_id, - const rtc::SocketAddress& ext_addr) +TurnChannelBindRequest::TurnChannelBindRequest(TurnPort* port, + TurnEntry* entry, + uint16_t channel_id, + const SocketAddress& ext_addr) : StunRequest(port->request_manager(), std::make_unique(TURN_CHANNEL_BIND_REQUEST)), port_(port), @@ -1685,14 +1696,14 @@ TurnChannelBindRequest::~TurnChannelBindRequest() { void TurnChannelBindRequest::OnSent() { RTC_LOG(LS_INFO) << port_->ToString() << ": TURN channel bind request sent, id=" - << rtc::hex_encode(id()); + << webrtc::hex_encode(id()); StunRequest::OnSent(); } void TurnChannelBindRequest::OnResponse(StunMessage* response) { RTC_LOG(LS_INFO) << port_->ToString() << ": TURN channel bind requested successfully, id=" - << rtc::hex_encode(id()) + << webrtc::hex_encode(id()) << ", code=0" // Makes logging easier to parse. ", rtt=" << Elapsed(); @@ -1714,7 +1725,7 @@ void TurnChannelBindRequest::OnErrorResponse(StunMessage* response) { int error_code = response->GetErrorCodeValue(); RTC_LOG(LS_WARNING) << port_->ToString() << ": Received TURN channel bind error response, id=" - << rtc::hex_encode(id()) << ", code=" << error_code + << webrtc::hex_encode(id()) << ", code=" << error_code << ", rtt=" << Elapsed(); if (entry_) { entry_->OnChannelBindError(response, error_code); @@ -1723,7 +1734,7 @@ void TurnChannelBindRequest::OnErrorResponse(StunMessage* response) { void TurnChannelBindRequest::OnTimeout() { RTC_LOG(LS_WARNING) << port_->ToString() << ": TURN channel bind timeout " - << rtc::hex_encode(id()); + << webrtc::hex_encode(id()); if (entry_) { entry_->OnChannelBindTimeout(); } @@ -1751,7 +1762,7 @@ void TurnEntry::TrackConnection(Connection* conn) { connections_.push_back(conn); } -rtc::scoped_refptr TurnEntry::UntrackConnection( +scoped_refptr TurnEntry::UntrackConnection( Connection* conn) { connections_.erase(absl::c_find(connections_, conn)); return connections_.empty() ? task_safety_.flag() : nullptr; @@ -1770,8 +1781,8 @@ void TurnEntry::SendChannelBindRequest(int delay) { int TurnEntry::Send(const void* data, size_t size, bool payload, - const rtc::PacketOptions& options) { - rtc::ByteBufferWriter buf; + const AsyncSocketPacketOptions& options) { + ByteBufferWriter buf; if (state_ != STATE_BOUND || !port_->TurnCustomizerAllowChannelData(data, size, payload)) { // If we haven't bound the channel yet, we have to use a Send Indication. @@ -1796,9 +1807,10 @@ int TurnEntry::Send(const void* data, // If the channel is bound, we can send the data as a Channel Message. buf.WriteUInt16(channel_id_); buf.WriteUInt16(static_cast(size)); - buf.WriteBytes(reinterpret_cast(data), size); + buf.Write( + ArrayView(reinterpret_cast(data), size)); } - rtc::PacketOptions modified_options(options); + AsyncSocketPacketOptions modified_options(options); modified_options.info_signaled_after_sent.turn_overhead_bytes = buf.Length() - size; return port_->Send(buf.Data(), buf.Length(), modified_options); @@ -1872,4 +1884,4 @@ void TurnEntry::OnChannelBindTimeout() { state_ = STATE_UNBOUND; port_->FailAndPruneConnection(ext_addr_); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/turn_port.h b/p2p/base/turn_port.h index ac660d6599..44a4563942 100644 --- a/p2p/base/turn_port.h +++ b/p2p/base/turn_port.h @@ -13,6 +13,7 @@ #include +#include #include #include #include @@ -22,18 +23,28 @@ #include "absl/memory/memory.h" #include "absl/strings/string_view.h" #include "api/async_dns_resolver.h" +#include "api/candidate.h" #include "api/task_queue/pending_task_safety_flag.h" -#include "api/task_queue/task_queue_base.h" +#include "api/transport/stun.h" +#include "p2p/base/connection.h" #include "p2p/base/port.h" -#include "p2p/client/basic_port_allocator.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/port_interface.h" +#include "p2p/base/stun_request.h" +#include "p2p/client/relay_port_factory_interface.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/dscp.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/logging.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/ssl_certificate.h" namespace webrtc { -class TurnCustomizer; -} -namespace cricket { +class TurnCustomizer; const int kMaxTurnUsernameLength = 509; // RFC 8489 section 14.3 @@ -62,8 +73,7 @@ class TurnPort : public Port { return false; } // Do not connect to low-numbered ports. The default STUN port is 3478. - if (!AllowedTurnPort(args.server_address->address.port(), - args.field_trials)) { + if (!AllowedTurnPort(args.server_address->address.port())) { RTC_LOG(LS_ERROR) << "Attempt to use TURN to connect to port " << args.server_address->address.port(); return false; @@ -73,18 +83,22 @@ class TurnPort : public Port { // Create a TURN port using the shared UDP socket, `socket`. static std::unique_ptr Create(const CreateRelayPortArgs& args, - rtc::AsyncPacketSocket* socket) { + AsyncPacketSocket* socket) { if (!Validate(args)) { return nullptr; } // Using `new` to access a non-public constructor. return absl::WrapUnique( - new TurnPort(args.network_thread, args.socket_factory, args.network, - socket, args.username, args.password, *args.server_address, - args.config->credentials, args.relative_priority, - args.config->tls_alpn_protocols, + new TurnPort({.env = args.env, + .network_thread = args.network_thread, + .socket_factory = args.socket_factory, + .network = args.network, + .ice_username_fragment = args.username, + .ice_password = args.password}, + socket, *args.server_address, args.config->credentials, + args.relative_priority, args.config->tls_alpn_protocols, args.config->tls_elliptic_curves, args.turn_customizer, - args.config->tls_cert_verifier, args.field_trials)); + args.config->tls_cert_verifier)); } // Create a TURN port that will use a new socket, bound to `network` and @@ -96,20 +110,24 @@ class TurnPort : public Port { return nullptr; } // Using `new` to access a non-public constructor. - return absl::WrapUnique( - new TurnPort(args.network_thread, args.socket_factory, args.network, - min_port, max_port, args.username, args.password, - *args.server_address, args.config->credentials, - args.relative_priority, args.config->tls_alpn_protocols, - args.config->tls_elliptic_curves, args.turn_customizer, - args.config->tls_cert_verifier, args.field_trials)); + return absl::WrapUnique(new TurnPort( + {.env = args.env, + .network_thread = args.network_thread, + .socket_factory = args.socket_factory, + .network = args.network, + .ice_username_fragment = args.username, + .ice_password = args.password}, + min_port, max_port, *args.server_address, args.config->credentials, + args.relative_priority, args.config->tls_alpn_protocols, + args.config->tls_elliptic_curves, args.turn_customizer, + args.config->tls_cert_verifier)); } ~TurnPort() override; const ProtocolAddress& server_address() const { return server_address_; } // Returns an empty address if the local address has not been assigned. - rtc::SocketAddress GetLocalAddress() const; + SocketAddress GetLocalAddress() const; bool ready() const { return state_ == STATE_READY; } bool connected() const { @@ -136,41 +154,34 @@ class TurnPort : public Port { PortInterface::CandidateOrigin origin) override; int SendTo(const void* data, size_t size, - const rtc::SocketAddress& addr, - const rtc::PacketOptions& options, + const SocketAddress& addr, + const AsyncSocketPacketOptions& options, bool payload) override; - int SetOption(rtc::Socket::Option opt, int value) override; - int GetOption(rtc::Socket::Option opt, int* value) override; + int SetOption(Socket::Option opt, int value) override; + int GetOption(Socket::Option opt, int* value) override; int GetError() override; - bool HandleIncomingPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - int64_t packet_time_us) override; - bool CanHandleIncomingPacketsFrom( - const rtc::SocketAddress& addr) const override; + bool HandleIncomingPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet) override; + bool CanHandleIncomingPacketsFrom(const SocketAddress& addr) const override; // Checks if a connection exists for `addr` before forwarding the call to // the base class. void SendBindingErrorResponse(StunMessage* message, - const rtc::SocketAddress& addr, + const SocketAddress& addr, int error_code, absl::string_view reason) override; - virtual void OnReadPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - const int64_t& packet_time_us); + virtual void OnReadPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet); - void OnSentPacket(rtc::AsyncPacketSocket* socket, - const rtc::SentPacket& sent_packet) override; - virtual void OnReadyToSend(rtc::AsyncPacketSocket* socket); + void OnSentPacket(AsyncPacketSocket* socket, + const SentPacketInfo& sent_packet) override; + virtual void OnReadyToSend(AsyncPacketSocket* socket); bool SupportsProtocol(absl::string_view protocol) const override; - void OnSocketConnect(rtc::AsyncPacketSocket* socket); - void OnSocketClose(rtc::AsyncPacketSocket* socket, int error); + void OnSocketConnect(AsyncPacketSocket* socket); + void OnSocketClose(AsyncPacketSocket* socket, int error); const std::string& hash() const { return hash_; } const std::string& nonce() const { return nonce_; } @@ -179,16 +190,13 @@ class TurnPort : public Port { void OnAllocateMismatch(); - rtc::AsyncPacketSocket* socket() const { return socket_; } + AsyncPacketSocket* socket() const { return socket_; } StunRequestManager& request_manager() { return request_manager_; } bool HasRequests() { return !request_manager_.empty(); } void set_credentials(const RelayCredentials& credentials) { credentials_ = credentials; } - // Finds the turn entry with `address` and sets its channel id. - // Returns true if the entry is found. - bool SetEntryChannelId(const rtc::SocketAddress& address, int channel_id); void HandleConnectionDestroyed(Connection* conn) override; @@ -205,110 +213,93 @@ class TurnPort : public Port { void SetCallbacksForTest(CallbacksForTest* callbacks); protected: - TurnPort(webrtc::TaskQueueBase* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, - rtc::AsyncPacketSocket* socket, - absl::string_view username, - absl::string_view password, + TurnPort(const PortParametersRef& args, + AsyncPacketSocket* socket, const ProtocolAddress& server_address, const RelayCredentials& credentials, int server_priority, const std::vector& tls_alpn_protocols, const std::vector& tls_elliptic_curves, - webrtc::TurnCustomizer* customizer, - rtc::SSLCertificateVerifier* tls_cert_verifier = nullptr, - const webrtc::FieldTrialsView* field_trials = nullptr); + TurnCustomizer* customizer, + SSLCertificateVerifier* tls_cert_verifier = nullptr); - TurnPort(webrtc::TaskQueueBase* thread, - rtc::PacketSocketFactory* factory, - const rtc::Network* network, + TurnPort(const PortParametersRef& args, uint16_t min_port, uint16_t max_port, - absl::string_view username, - absl::string_view password, const ProtocolAddress& server_address, const RelayCredentials& credentials, int server_priority, const std::vector& tls_alpn_protocols, const std::vector& tls_elliptic_curves, - webrtc::TurnCustomizer* customizer, - rtc::SSLCertificateVerifier* tls_cert_verifier = nullptr, - const webrtc::FieldTrialsView* field_trials = nullptr); + TurnCustomizer* customizer, + SSLCertificateVerifier* tls_cert_verifier = nullptr); // NOTE: This method needs to be accessible for StunPort // return true if entry was created (i.e channel_number consumed). bool CreateOrRefreshEntry(Connection* conn, int channel_number); - rtc::DiffServCodePoint StunDscpValue() const override; + DiffServCodePoint StunDscpValue() const override; // Shuts down the turn port, frees requests and deletes connections. void Close(); private: - typedef std::map SocketOptionsMap; - typedef std::set AttemptedServerSet; + typedef std::map SocketOptionsMap; + typedef std::set AttemptedServerSet; - static bool AllowedTurnPort(int port, - const webrtc::FieldTrialsView* field_trials); + static bool AllowedTurnPort(int port); void TryAlternateServer(); bool CreateTurnClientSocket(); void set_nonce(absl::string_view nonce) { nonce_ = std::string(nonce); } - void set_realm(absl::string_view realm) { - if (realm != realm_) { - realm_ = std::string(realm); - UpdateHash(); - } - } + void set_realm(absl::string_view realm); void OnRefreshError(); void HandleRefreshError(); - bool SetAlternateServer(const rtc::SocketAddress& address); - void ResolveTurnAddress(const rtc::SocketAddress& address); - void OnResolveResult(rtc::AsyncResolverInterface* resolver); + bool SetAlternateServer(const SocketAddress& address); + void ResolveTurnAddress(const SocketAddress& address); + void OnResolveResult(const AsyncDnsResolverResult& result); void AddRequestAuthInfo(StunMessage* msg); void OnSendStunPacket(const void* data, size_t size, StunRequest* request); // Stun address from allocate success response. // Currently used only for testing. - void OnStunAddress(const rtc::SocketAddress& address); - void OnAllocateSuccess(const rtc::SocketAddress& address, - const rtc::SocketAddress& stun_address); + void OnStunAddress(const SocketAddress& address); + void OnAllocateSuccess(const SocketAddress& address, + const SocketAddress& stun_address); void OnAllocateError(int error_code, absl::string_view reason); void OnAllocateRequestTimeout(); void HandleDataIndication(const char* data, size_t size, int64_t packet_time_us); - void HandleChannelData(int channel_id, + void HandleChannelData(uint16_t channel_id, const char* data, size_t size, int64_t packet_time_us); void DispatchPacket(const char* data, size_t size, - const rtc::SocketAddress& remote_addr, + const SocketAddress& remote_addr, ProtocolType proto, int64_t packet_time_us); bool ScheduleRefresh(uint32_t lifetime); void SendRequest(StunRequest* request, int delay); - int Send(const void* data, size_t size, const rtc::PacketOptions& options); + int Send(const void* data, + size_t size, + const AsyncSocketPacketOptions& options); void UpdateHash(); bool UpdateNonce(StunMessage* response); void ResetNonce(); - bool HasPermission(const rtc::IPAddress& ipaddr) const; - TurnEntry* FindEntry(const rtc::SocketAddress& address) const; - TurnEntry* FindEntry(int channel_id) const; + bool HasPermission(const IPAddress& ipaddr) const; + TurnEntry* FindEntry(const SocketAddress& address) const; + TurnEntry* FindEntry(uint16_t channel_id) const; // Marks the connection with remote address `address` failed and // pruned (a.k.a. write-timed-out). Returns true if a connection is found. - bool FailAndPruneConnection(const rtc::SocketAddress& address); - - // Reconstruct the URL of the server which the candidate is gathered from. - std::string ReconstructedServerUrl(); + bool FailAndPruneConnection(const SocketAddress& address); void MaybeAddTurnLoggingId(StunMessage* message); @@ -318,18 +309,24 @@ class TurnPort : public Port { bool payload); ProtocolAddress server_address_; + // Reconstruct the URL of the server which the candidate is gathered from. + // A copy needs to be stored as server_address_ will resolve and clear its + // hostname field. + std::string ReconstructServerUrl(); + std::string server_url_; + TlsCertPolicy tls_cert_policy_ = TlsCertPolicy::TLS_CERT_POLICY_SECURE; std::vector tls_alpn_protocols_; std::vector tls_elliptic_curves_; - rtc::SSLCertificateVerifier* tls_cert_verifier_; + SSLCertificateVerifier* tls_cert_verifier_; RelayCredentials credentials_; AttemptedServerSet attempted_server_addresses_; - rtc::AsyncPacketSocket* socket_; + AsyncPacketSocket* socket_; SocketOptionsMap socket_options_; - std::unique_ptr resolver_; + std::unique_ptr resolver_; int error_; - rtc::DiffServCodePoint stun_dscp_value_; + DiffServCodePoint stun_dscp_value_; StunRequestManager request_manager_; std::string realm_; // From 401/438 response message. @@ -349,7 +346,7 @@ class TurnPort : public Port { // Optional TurnCustomizer that can modify outgoing messages. Once set, this // must outlive the TurnPort's lifetime. - webrtc::TurnCustomizer* turn_customizer_ = nullptr; + TurnCustomizer* turn_customizer_ = nullptr; // Optional TurnLoggingId. // An identifier set by application that is added to TURN_ALLOCATE_REQUEST @@ -359,7 +356,7 @@ class TurnPort : public Port { // to be more easy to work with. std::string turn_logging_id_; - webrtc::ScopedTaskSafety task_safety_; + ScopedTaskSafety task_safety_; CallbacksForTest* callbacks_for_test_ = nullptr; @@ -370,6 +367,14 @@ class TurnPort : public Port { friend class TurnChannelBindRequest; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::TurnPort; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_TURN_PORT_H_ diff --git a/p2p/base/turn_port_unittest.cc b/p2p/base/turn_port_unittest.cc index a570fc70a7..8f6403c966 100644 --- a/p2p/base/turn_port_unittest.cc +++ b/p2p/base/turn_port_unittest.cc @@ -7,6 +7,32 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/array_view.h" +#include "api/candidate.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/packet_socket_factory.h" +#include "api/test/mock_async_dns_resolver.h" +#include "api/test/rtc_error_matchers.h" +#include "api/transport/stun.h" +#include "p2p/base/connection_info.h" +#include "p2p/base/port.h" +#include "p2p/base/port_interface.h" +#include "p2p/base/stun_request.h" +#include "p2p/client/relay_port_factory_interface.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/network.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "test/gmock.h" +#include "test/wait_until.h" #if defined(WEBRTC_POSIX) #include @@ -15,22 +41,22 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/units/time_delta.h" #include "p2p/base/basic_packet_socket_factory.h" #include "p2p/base/connection.h" -#include "p2p/base/mock_dns_resolving_packet_socket_factory.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/port_allocator.h" #include "p2p/base/stun_port.h" -#include "p2p/base/test_turn_customizer.h" -#include "p2p/base/test_turn_server.h" #include "p2p/base/transport_description.h" #include "p2p/base/turn_port.h" -#include "p2p/base/turn_server.h" +#include "p2p/test/mock_dns_resolving_packet_socket_factory.h" +#include "p2p/test/test_turn_customizer.h" +#include "p2p/test/test_turn_server.h" +#include "p2p/test/turn_server.h" #include "rtc_base/buffer.h" #include "rtc_base/byte_buffer.h" #include "rtc_base/checks.h" @@ -43,16 +69,20 @@ #include "rtc_base/time_utils.h" #include "rtc_base/virtual_socket_server.h" #include "test/gtest.h" -#include "test/scoped_key_value_config.h" namespace { -using rtc::SocketAddress; - using ::testing::_; using ::testing::DoAll; +using ::testing::Eq; +using ::testing::IsTrue; +using ::testing::Ne; using ::testing::Return; using ::testing::ReturnPointee; using ::testing::SetArgPointee; +using ::webrtc::CreateEnvironment; +using ::webrtc::Environment; +using ::webrtc::IceCandidateType; +using ::webrtc::SocketAddress; static const SocketAddress kLocalAddr1("11.11.11.11", 0); static const SocketAddress kLocalAddr2("22.22.22.22", 0); @@ -61,12 +91,12 @@ static const SocketAddress kLocalIPv6Addr("2401:fa00:4:1000:be30:5bff:fee5:c3", static const SocketAddress kLocalIPv6Addr2("2401:fa00:4:2000:be30:5bff:fee5:d4", 0); static const SocketAddress kTurnUdpIntAddr("99.99.99.3", - cricket::TURN_SERVER_PORT); + webrtc::TURN_SERVER_PORT); static const SocketAddress kTurnTcpIntAddr("99.99.99.4", - cricket::TURN_SERVER_PORT); + webrtc::TURN_SERVER_PORT); static const SocketAddress kTurnUdpExtAddr("99.99.99.5", 0); static const SocketAddress kTurnAlternateIntAddr("99.99.99.6", - cricket::TURN_SERVER_PORT); + webrtc::TURN_SERVER_PORT); // Port for redirecting to a TCP Web server. Should not work. static const SocketAddress kTurnDangerousAddr("99.99.99.7", 81); // Port 53 (the DNS port); should work. @@ -76,14 +106,13 @@ static const SocketAddress kTurnPort80Addr("99.99.99.7", 80); // Port 443 (the HTTPS port); should work. static const SocketAddress kTurnPort443Addr("99.99.99.7", 443); // The default TURN server port. -static const SocketAddress kTurnIntAddr("99.99.99.7", - cricket::TURN_SERVER_PORT); +static const SocketAddress kTurnIntAddr("99.99.99.7", webrtc::TURN_SERVER_PORT); static const SocketAddress kTurnIPv6IntAddr( "2400:4030:2:2c00:be30:abcd:efab:cdef", - cricket::TURN_SERVER_PORT); + webrtc::TURN_SERVER_PORT); static const SocketAddress kTurnUdpIPv6IntAddr( "2400:4030:1:2c00:be30:abcd:efab:cdef", - cricket::TURN_SERVER_PORT); + webrtc::TURN_SERVER_PORT); static const SocketAddress kTurnInvalidAddr("www.google.invalid.", 3478); static const SocketAddress kTurnValidAddr("www.google.valid.", 3478); @@ -106,29 +135,28 @@ static constexpr unsigned int kResolverTimeout = 10000; constexpr uint64_t kTiebreakerDefault = 44444; -static const cricket::ProtocolAddress kTurnUdpProtoAddr(kTurnUdpIntAddr, - cricket::PROTO_UDP); -static const cricket::ProtocolAddress kTurnTcpProtoAddr(kTurnTcpIntAddr, - cricket::PROTO_TCP); -static const cricket::ProtocolAddress kTurnTlsProtoAddr(kTurnTcpIntAddr, - cricket::PROTO_TLS); -static const cricket::ProtocolAddress kTurnUdpIPv6ProtoAddr(kTurnUdpIPv6IntAddr, - cricket::PROTO_UDP); -static const cricket::ProtocolAddress kTurnDangerousProtoAddr( - kTurnDangerousAddr, - cricket::PROTO_TCP); -static const cricket::ProtocolAddress kTurnPort53ProtoAddr(kTurnPort53Addr, - cricket::PROTO_TCP); -static const cricket::ProtocolAddress kTurnPort80ProtoAddr(kTurnPort80Addr, - cricket::PROTO_TCP); -static const cricket::ProtocolAddress kTurnPort443ProtoAddr(kTurnPort443Addr, - cricket::PROTO_TCP); -static const cricket::ProtocolAddress kTurnPortInvalidHostnameProtoAddr( +static const webrtc::ProtocolAddress kTurnUdpProtoAddr(kTurnUdpIntAddr, + webrtc::PROTO_UDP); +static const webrtc::ProtocolAddress kTurnTcpProtoAddr(kTurnTcpIntAddr, + webrtc::PROTO_TCP); +static const webrtc::ProtocolAddress kTurnTlsProtoAddr(kTurnTcpIntAddr, + webrtc::PROTO_TLS); +static const webrtc::ProtocolAddress kTurnUdpIPv6ProtoAddr(kTurnUdpIPv6IntAddr, + webrtc::PROTO_UDP); +static const webrtc::ProtocolAddress kTurnDangerousProtoAddr(kTurnDangerousAddr, + webrtc::PROTO_TCP); +static const webrtc::ProtocolAddress kTurnPort53ProtoAddr(kTurnPort53Addr, + webrtc::PROTO_TCP); +static const webrtc::ProtocolAddress kTurnPort80ProtoAddr(kTurnPort80Addr, + webrtc::PROTO_TCP); +static const webrtc::ProtocolAddress kTurnPort443ProtoAddr(kTurnPort443Addr, + webrtc::PROTO_TCP); +static const webrtc::ProtocolAddress kTurnPortInvalidHostnameProtoAddr( kTurnInvalidAddr, - cricket::PROTO_UDP); -static const cricket::ProtocolAddress kTurnPortValidHostnameProtoAddr( + webrtc::PROTO_UDP); +static const webrtc::ProtocolAddress kTurnPortValidHostnameProtoAddr( kTurnValidAddr, - cricket::PROTO_UDP); + webrtc::PROTO_UDP); #if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) static int GetFDCount() { @@ -147,9 +175,9 @@ static int GetFDCount() { } // unnamed namespace -namespace cricket { +namespace webrtc { -class TurnPortTestVirtualSocketServer : public rtc::VirtualSocketServer { +class TurnPortTestVirtualSocketServer : public VirtualSocketServer { public: TurnPortTestVirtualSocketServer() { // This configures the virtual socket server to always add a simulated @@ -158,7 +186,7 @@ class TurnPortTestVirtualSocketServer : public rtc::VirtualSocketServer { UpdateDelayDistribution(); } - using rtc::VirtualSocketServer::LookupBinding; + using VirtualSocketServer::LookupBinding; }; class TestConnectionWrapper : public sigslot::has_slots<> { @@ -199,13 +227,12 @@ class TurnPortTest : public ::testing::Test, // Some code uses "last received time == 0" to represent "nothing received // so far", so we need to start the fake clock at a nonzero time... // TODO(deadbeef): Fix this. - fake_clock_.AdvanceTime(webrtc::TimeDelta::Seconds(1)); + fake_clock_.AdvanceTime(TimeDelta::Seconds(1)); } void OnTurnPortComplete(Port* port) { turn_ready_ = true; } void OnTurnPortError(Port* port) { turn_error_ = true; } - void OnCandidateError(Port* port, - const cricket::IceCandidateErrorEvent& event) { + void OnCandidateError(Port* port, const IceCandidateErrorEvent& event) { error_event_ = event; } void OnTurnUnknownAddress(PortInterface* port, @@ -216,26 +243,10 @@ class TurnPortTest : public ::testing::Test, bool /*port_muxed*/) { turn_unknown_address_ = true; } - void OnTurnReadPacket(Connection* conn, - const char* data, - size_t size, - int64_t packet_time_us) { - turn_packets_.push_back(rtc::Buffer(data, size)); - } void OnUdpPortComplete(Port* port) { udp_ready_ = true; } - void OnUdpReadPacket(Connection* conn, - const char* data, - size_t size, - int64_t packet_time_us) { - udp_packets_.push_back(rtc::Buffer(data, size)); - } - void OnSocketReadPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - const int64_t& packet_time_us) { - turn_port_->HandleIncomingPacket(socket, data, size, remote_addr, - packet_time_us); + void OnSocketReadPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet) { + turn_port_->HandleIncomingPacket(socket, packet); } void OnTurnPortDestroyed(PortInterface* port) { turn_port_destroyed_ = true; } @@ -248,14 +259,18 @@ class TurnPortTest : public ::testing::Test, } void OnTurnPortClosed() override { turn_port_closed_ = true; } - rtc::Socket* CreateServerSocket(const SocketAddress addr) { - rtc::Socket* socket = ss_->CreateSocket(AF_INET, SOCK_STREAM); + void OnConnectionSignalDestroyed(Connection* connection) { + connection->DeregisterReceivedPacketCallback(); + } + + Socket* CreateServerSocket(const SocketAddress addr) { + Socket* socket = ss_->CreateSocket(AF_INET, SOCK_STREAM); EXPECT_GE(socket->Bind(addr), 0); EXPECT_GE(socket->Listen(5), 0); return socket; } - rtc::Network* MakeNetwork(const SocketAddress& addr) { + Network* MakeNetwork(const SocketAddress& addr) { networks_.emplace_back("unittest", "unittest", addr.ipaddr(), 32); networks_.back().AddIP(addr.ipaddr()); return &networks_.back(); @@ -267,7 +282,7 @@ class TurnPortTest : public ::testing::Test, return CreateTurnPortWithAllParams(MakeNetwork(kLocalAddr1), username, password, server_address); } - bool CreateTurnPort(const rtc::SocketAddress& local_address, + bool CreateTurnPort(const SocketAddress& local_address, absl::string_view username, absl::string_view password, const ProtocolAddress& server_address) { @@ -275,7 +290,7 @@ class TurnPortTest : public ::testing::Test, password, server_address); } - bool CreateTurnPortWithNetwork(const rtc::Network* network, + bool CreateTurnPortWithNetwork(const Network* network, absl::string_view username, absl::string_view password, const ProtocolAddress& server_address) { @@ -286,13 +301,13 @@ class TurnPortTest : public ::testing::Test, // Version of CreateTurnPort that takes all possible parameters; all other // helper methods call this, such that "SetIceRole" and "ConnectSignals" (and // possibly other things in the future) only happen in one place. - bool CreateTurnPortWithAllParams(const rtc::Network* network, + bool CreateTurnPortWithAllParams(const Network* network, absl::string_view username, absl::string_view password, const ProtocolAddress& server_address) { RelayServerConfig config; config.credentials = RelayCredentials(username, password); - CreateRelayPortArgs args; + CreateRelayPortArgs args = {.env = env_}; args.network_thread = &main_; args.socket_factory = socket_factory(); args.network = network; @@ -301,7 +316,6 @@ class TurnPortTest : public ::testing::Test, args.server_address = &server_address; args.config = &config; args.turn_customizer = turn_customizer_.get(); - args.field_trials = &field_trials_; turn_port_ = TurnPort::Create(args, 0, 0); if (!turn_port_) { @@ -312,7 +326,7 @@ class TurnPortTest : public ::testing::Test, turn_port_->SetIceTiebreaker(kTiebreakerDefault); ConnectSignals(); - if (server_address.proto == cricket::PROTO_TLS) { + if (server_address.proto == webrtc::PROTO_TLS) { // The test TURN server has a self-signed certificate so will not pass // the normal client validation. Instruct the client to ignore certificate // errors for testing only. @@ -325,19 +339,21 @@ class TurnPortTest : public ::testing::Test, void CreateSharedTurnPort(absl::string_view username, absl::string_view password, const ProtocolAddress& server_address) { - RTC_CHECK(server_address.proto == PROTO_UDP); + RTC_CHECK(server_address.proto == webrtc::PROTO_UDP); if (!socket_) { socket_.reset(socket_factory()->CreateUdpSocket( - rtc::SocketAddress(kLocalAddr1.ipaddr(), 0), 0, 0)); + SocketAddress(kLocalAddr1.ipaddr(), 0), 0, 0)); ASSERT_TRUE(socket_ != NULL); - socket_->SignalReadPacket.connect(this, - &TurnPortTest::OnSocketReadPacket); + socket_->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + OnSocketReadPacket(socket, packet); + }); } RelayServerConfig config; config.credentials = RelayCredentials(username, password); - CreateRelayPortArgs args; + CreateRelayPortArgs args = {.env = env_}; args.network_thread = &main_; args.socket_factory = socket_factory(); args.network = MakeNetwork(kLocalAddr1); @@ -346,7 +362,6 @@ class TurnPortTest : public ::testing::Test, args.server_address = &server_address; args.config = &config; args.turn_customizer = turn_customizer_.get(); - args.field_trials = &field_trials_; turn_port_ = TurnPort::Create(args, socket_.get()); // This TURN port will be the controlling. turn_port_->SetIceRole(ICEROLE_CONTROLLING); @@ -370,9 +385,13 @@ class TurnPortTest : public ::testing::Test, void CreateUdpPort() { CreateUdpPort(kLocalAddr2); } void CreateUdpPort(const SocketAddress& address) { - udp_port_ = UDPPort::Create(&main_, socket_factory(), MakeNetwork(address), - 0, 0, kIceUfrag2, kIcePwd2, false, - absl::nullopt, &field_trials_); + udp_port_ = UDPPort::Create({.env = env_, + .network_thread = &main_, + .socket_factory = socket_factory(), + .network = MakeNetwork(address), + .ice_username_fragment = kIceUfrag2, + .ice_password = kIcePwd2}, + 0, 0, false, std::nullopt); // UDP port will be controlled. udp_port_->SetIceRole(ICEROLE_CONTROLLED); udp_port_->SetIceTiebreaker(kTiebreakerDefault); @@ -384,27 +403,33 @@ class TurnPortTest : public ::testing::Test, // turn_port_ should have been created. ASSERT_TRUE(turn_port_ != nullptr); turn_port_->PrepareAddress(); - ASSERT_TRUE_SIMULATED_WAIT( - turn_ready_, TimeToGetTurnCandidate(protocol_type), fake_clock_); + ASSERT_THAT(webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis( + TimeToGetTurnCandidate(protocol_type)), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); CreateUdpPort(); udp_port_->PrepareAddress(); - ASSERT_TRUE_SIMULATED_WAIT(udp_ready_, kSimulatedRtt, fake_clock_); + ASSERT_THAT(webrtc::WaitUntil([&] { return udp_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); } // Returns the fake clock time to establish a connection over the given // protocol. int TimeToConnect(ProtocolType protocol_type) { switch (protocol_type) { - case PROTO_TCP: + case webrtc::PROTO_TCP: // The virtual socket server will delay by a fixed half a round trip // for a TCP connection. return kSimulatedRtt / 2; - case PROTO_TLS: + case webrtc::PROTO_TLS: // TLS operates over TCP and additionally has a round of HELLO for // negotiating ciphers and a round for exchanging certificates. - return 2 * kSimulatedRtt + TimeToConnect(PROTO_TCP); - case PROTO_UDP: + return 2 * kSimulatedRtt + TimeToConnect(webrtc::PROTO_TCP); + case webrtc::PROTO_UDP: default: // UDP requires no round trips to set up the connection. return 0; @@ -453,7 +478,10 @@ class TurnPortTest : public ::testing::Test, void TestTurnAllocateSucceeds(unsigned int timeout) { ASSERT_TRUE(turn_port_); turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, timeout, fake_clock_); + EXPECT_THAT(webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(timeout), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); ASSERT_EQ(1U, turn_port_->Candidates().size()); EXPECT_EQ(kTurnUdpExtAddr.ipaddr(), turn_port_->Candidates()[0].address().ipaddr()); @@ -464,14 +492,17 @@ class TurnPortTest : public ::testing::Test, absl::string_view expected_url) { ASSERT_TRUE(turn_port_); turn_port_->PrepareAddress(); - ASSERT_TRUE_SIMULATED_WAIT( - turn_ready_, TimeToGetTurnCandidate(protocol_type), fake_clock_); + ASSERT_THAT(webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis( + TimeToGetTurnCandidate(protocol_type)), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); ASSERT_EQ(1U, turn_port_->Candidates().size()); EXPECT_EQ(turn_port_->Candidates()[0].url(), expected_url); } void TestTurnAlternateServer(ProtocolType protocol_type) { - std::vector redirect_addresses; + std::vector redirect_addresses; redirect_addresses.push_back(kTurnAlternateIntAddr); TestTurnRedirector redirector(redirect_addresses); @@ -486,9 +517,12 @@ class TurnPortTest : public ::testing::Test, const SocketAddress old_addr = turn_port_->server_address().address; turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, - TimeToGetAlternateTurnCandidate(protocol_type), - fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis( + TimeToGetAlternateTurnCandidate(protocol_type)), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Retrieve the address again, the turn port's address should be // changed. const SocketAddress new_addr = turn_port_->server_address().address; @@ -500,7 +534,7 @@ class TurnPortTest : public ::testing::Test, } void TestTurnAlternateServerV4toV6(ProtocolType protocol_type) { - std::vector redirect_addresses; + std::vector redirect_addresses; redirect_addresses.push_back(kTurnIPv6IntAddr); TestTurnRedirector redirector(redirect_addresses); @@ -511,12 +545,16 @@ class TurnPortTest : public ::testing::Test, turn_port_->PrepareAddress(); // Need time to connect to TURN server, send Allocate request and receive // redirect notice. - EXPECT_TRUE_SIMULATED_WAIT( - turn_error_, kSimulatedRtt + TimeToConnect(protocol_type), fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis( + kSimulatedRtt + TimeToConnect(protocol_type)), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); } void TestTurnAlternateServerPingPong(ProtocolType protocol_type) { - std::vector redirect_addresses; + std::vector redirect_addresses; redirect_addresses.push_back(kTurnAlternateIntAddr); redirect_addresses.push_back(kTurnIntAddr); @@ -529,18 +567,21 @@ class TurnPortTest : public ::testing::Test, ProtocolAddress(kTurnIntAddr, protocol_type)); turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_error_, - TimeToGetAlternateTurnCandidate(protocol_type), - fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis( + TimeToGetAlternateTurnCandidate(protocol_type)), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); ASSERT_EQ(0U, turn_port_->Candidates().size()); - rtc::SocketAddress address; + SocketAddress address; // Verify that we have exhausted all alternate servers instead of // failure caused by other errors. EXPECT_FALSE(redirector.ShouldRedirect(address, &address)); } void TestTurnAlternateServerDetectRepetition(ProtocolType protocol_type) { - std::vector redirect_addresses; + std::vector redirect_addresses; redirect_addresses.push_back(kTurnAlternateIntAddr); redirect_addresses.push_back(kTurnAlternateIntAddr); @@ -553,9 +594,12 @@ class TurnPortTest : public ::testing::Test, ProtocolAddress(kTurnIntAddr, protocol_type)); turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_error_, - TimeToGetAlternateTurnCandidate(protocol_type), - fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis( + TimeToGetAlternateTurnCandidate(protocol_type)), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); ASSERT_EQ(0U, turn_port_->Candidates().size()); } @@ -568,20 +612,21 @@ class TurnPortTest : public ::testing::Test, const SocketAddress& server_address = ipv6 ? kTurnIPv6IntAddr : kTurnIntAddr; - std::vector redirect_addresses; + std::vector redirect_addresses; // Pick an unusual address in the 127.0.0.0/8 range to make sure more than // 127.0.0.1 is covered. SocketAddress loopback_address(ipv6 ? "::1" : "127.1.2.3", - TURN_SERVER_PORT); + webrtc::TURN_SERVER_PORT); redirect_addresses.push_back(loopback_address); // Make a socket and bind it to the local port, to make extra sure no // packet is sent to this address. - std::unique_ptr loopback_socket(ss_->CreateSocket( - AF_INET, protocol_type == PROTO_UDP ? SOCK_DGRAM : SOCK_STREAM)); + std::unique_ptr loopback_socket(ss_->CreateSocket( + AF_INET, + protocol_type == webrtc::PROTO_UDP ? SOCK_DGRAM : SOCK_STREAM)); ASSERT_NE(nullptr, loopback_socket.get()); ASSERT_EQ(0, loopback_socket->Bind(loopback_address)); - if (protocol_type == PROTO_TCP) { + if (protocol_type == webrtc::PROTO_TCP) { ASSERT_EQ(0, loopback_socket->Listen(1)); } @@ -593,19 +638,21 @@ class TurnPortTest : public ::testing::Test, ProtocolAddress(server_address, protocol_type)); turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT( - turn_error_, TimeToGetTurnCandidate(protocol_type), fake_clock_); + EXPECT_THAT(webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis( + TimeToGetTurnCandidate(protocol_type)), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Wait for some extra time, and make sure no packets were received on the // loopback port we created (or in the case of TCP, no connection attempt // occurred). SIMULATED_WAIT(false, kSimulatedRtt, fake_clock_); - if (protocol_type == PROTO_UDP) { + if (protocol_type == webrtc::PROTO_UDP) { char buf[1]; EXPECT_EQ(-1, loopback_socket->Recv(&buf, 1, nullptr)); } else { - std::unique_ptr accepted_socket( - loopback_socket->Accept(nullptr)); + std::unique_ptr accepted_socket(loopback_socket->Accept(nullptr)); EXPECT_EQ(nullptr, accepted_socket.get()); } } @@ -629,26 +676,37 @@ class TurnPortTest : public ::testing::Test, Connection* conn2 = turn_port_->CreateConnection(udp_port_->Candidates()[0], Port::ORIGIN_MESSAGE); ASSERT_TRUE(conn2 != NULL); - ASSERT_TRUE_SIMULATED_WAIT(turn_create_permission_success_, kSimulatedRtt, - fake_clock_); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return turn_create_permission_success_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); conn2->Ping(0); // Two hops from TURN port to UDP port through TURN server, thus two RTTs. - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, conn2->write_state(), - kSimulatedRtt * 2, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return conn2->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 2), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); EXPECT_TRUE(conn1->receiving()); EXPECT_TRUE(conn2->receiving()); EXPECT_EQ(Connection::STATE_WRITE_INIT, conn1->write_state()); // Send another ping from UDP to TURN. conn1->Ping(0); - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, conn1->write_state(), - kSimulatedRtt * 2, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return conn1->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 2), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); EXPECT_TRUE(conn2->receiving()); } void TestDestroyTurnConnection() { - PrepareTurnAndUdpPorts(PROTO_UDP); + PrepareTurnAndUdpPorts(webrtc::PROTO_UDP); // Create connections on both ends. Connection* conn1 = udp_port_->CreateConnection(turn_port_->Candidates()[0], @@ -661,12 +719,19 @@ class TurnPortTest : public ::testing::Test, turn_port_->set_timeout_delay(10 * 60 * 1000); ASSERT_TRUE(conn2 != NULL); - ASSERT_TRUE_SIMULATED_WAIT(turn_create_permission_success_, kSimulatedRtt, - fake_clock_); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return turn_create_permission_success_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Make sure turn connection can receive. conn1->Ping(0); - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, conn1->write_state(), - kSimulatedRtt * 2, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return conn1->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 2), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); EXPECT_FALSE(turn_unknown_address_); // Destroy the connection on the TURN port. The TurnEntry still exists, so @@ -674,14 +739,17 @@ class TurnPortTest : public ::testing::Test, turn_port_->DestroyConnection(conn2); conn1->Ping(0); - EXPECT_TRUE_SIMULATED_WAIT(turn_unknown_address_, kSimulatedRtt, - fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_unknown_address_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Wait for TurnEntry to expire. Timeout is 5 minutes. // Expect that it still processes an incoming ping and signals the // unknown address. turn_unknown_address_ = false; - fake_clock_.AdvanceTime(webrtc::TimeDelta::Seconds(5 * 60)); + fake_clock_.AdvanceTime(TimeDelta::Seconds(5 * 60)); // TODO(chromium:1395625): When `TurnPort` doesn't find connection objects // for incoming packets, it forwards calls to the parent class, `Port`. This @@ -699,7 +767,7 @@ class TurnPortTest : public ::testing::Test, conn1->set_remote_password_for_test("bad"); auto msg = conn1->BuildPingRequestForTest(); - rtc::ByteBufferWriter buf; + ByteBufferWriter buf; msg->Write(&buf); conn1->Send(buf.Data(), buf.Length(), options); @@ -707,14 +775,20 @@ class TurnPortTest : public ::testing::Test, conn1->set_remote_password_for_test(pwd); conn1->Ping(0); - EXPECT_TRUE_SIMULATED_WAIT(turn_unknown_address_, kSimulatedRtt, - fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_unknown_address_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // If the connection is created again, it will start to receive pings. conn2 = turn_port_->CreateConnection(udp_port_->Candidates()[0], Port::ORIGIN_MESSAGE); conn1->Ping(0); - EXPECT_TRUE_SIMULATED_WAIT(conn2->receiving(), kSimulatedRtt, fake_clock_); + EXPECT_THAT(webrtc::WaitUntil([&] { return conn2->receiving(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); } void TestTurnSendData(ProtocolType protocol_type) { @@ -727,16 +801,34 @@ class TurnPortTest : public ::testing::Test, Port::ORIGIN_MESSAGE); ASSERT_TRUE(conn1 != NULL); ASSERT_TRUE(conn2 != NULL); - conn1->SignalReadPacket.connect(static_cast(this), - &TurnPortTest::OnTurnReadPacket); - conn2->SignalReadPacket.connect(static_cast(this), - &TurnPortTest::OnUdpReadPacket); + conn1->RegisterReceivedPacketCallback( + [&](Connection* connection, const ReceivedIpPacket& packet) { + turn_packets_.push_back( + Buffer(packet.payload().data(), packet.payload().size())); + }); + conn1->SignalDestroyed.connect(this, + &TurnPortTest::OnConnectionSignalDestroyed); + conn2->RegisterReceivedPacketCallback( + [&](Connection* connection, const ReceivedIpPacket& packet) { + udp_packets_.push_back( + Buffer(packet.payload().data(), packet.payload().size())); + }); + conn2->SignalDestroyed.connect(this, + &TurnPortTest::OnConnectionSignalDestroyed); conn1->Ping(0); - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, conn1->write_state(), - kSimulatedRtt * 2, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return conn1->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 2), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); conn2->Ping(0); - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, conn2->write_state(), - kSimulatedRtt * 2, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return conn2->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 2), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Send some data. size_t num_packets = 256; @@ -764,8 +856,12 @@ class TurnPortTest : public ::testing::Test, void TestTurnReleaseAllocation(ProtocolType protocol_type) { PrepareTurnAndUdpPorts(protocol_type); turn_port_.reset(); - EXPECT_EQ_SIMULATED_WAIT(0U, turn_server_.server()->allocations().size(), - kSimulatedRtt, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return turn_server_.server()->allocations().size(); }, Eq(0U), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); } // Test that the TURN allocation is released by sending a refresh request @@ -780,16 +876,35 @@ class TurnPortTest : public ::testing::Test, Port::ORIGIN_MESSAGE); ASSERT_TRUE(conn1 != NULL); ASSERT_TRUE(conn2 != NULL); - conn1->SignalReadPacket.connect(static_cast(this), - &TurnPortTest::OnTurnReadPacket); - conn2->SignalReadPacket.connect(static_cast(this), - &TurnPortTest::OnUdpReadPacket); + conn1->RegisterReceivedPacketCallback( + [&](Connection* connection, const ReceivedIpPacket& packet) { + turn_packets_.push_back( + Buffer(packet.payload().data(), packet.payload().size())); + }); + conn1->SignalDestroyed.connect(this, + &TurnPortTest::OnConnectionSignalDestroyed); + conn2->RegisterReceivedPacketCallback( + [&](Connection* connection, const ReceivedIpPacket& packet) { + udp_packets_.push_back( + Buffer(packet.payload().data(), packet.payload().size())); + }); + conn2->SignalDestroyed.connect(this, + &TurnPortTest::OnConnectionSignalDestroyed); + conn1->Ping(0); - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, conn1->write_state(), - kSimulatedRtt * 2, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return conn1->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 2), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); conn2->Ping(0); - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, conn2->write_state(), - kSimulatedRtt * 2, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return conn2->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 2), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Send some data from Udp to TurnPort. unsigned char buf[256] = {0}; @@ -800,7 +915,10 @@ class TurnPortTest : public ::testing::Test, turn_port_->Release(); // Wait for the TurnPort to signal closed. - ASSERT_TRUE_SIMULATED_WAIT(turn_port_closed_, kSimulatedRtt, fake_clock_); + ASSERT_THAT(webrtc::WaitUntil([&] { return turn_port_closed_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // But the data should have arrived first. ASSERT_EQ(1ul, turn_packets_.size()); @@ -811,19 +929,17 @@ class TurnPortTest : public ::testing::Test, } protected: - virtual rtc::PacketSocketFactory* socket_factory() { - return &socket_factory_; - } + virtual PacketSocketFactory* socket_factory() { return &socket_factory_; } - webrtc::test::ScopedKeyValueConfig field_trials_; - rtc::ScopedFakeClock fake_clock_; + ScopedFakeClock fake_clock_; + const Environment env_ = CreateEnvironment(); // When a "create port" helper method is called with an IP, we create a // Network with that IP and add it to this list. Using a list instead of a // vector so that when it grows, pointers aren't invalidated. - std::list networks_; + std::list networks_; std::unique_ptr ss_; - rtc::AutoSocketServerThread main_; - std::unique_ptr socket_; + AutoSocketServerThread main_; + std::unique_ptr socket_; TestTurnServer turn_server_; std::unique_ptr turn_port_; std::unique_ptr udp_port_; @@ -836,47 +952,51 @@ class TurnPortTest : public ::testing::Test, bool udp_ready_ = false; bool test_finish_ = false; bool turn_refresh_success_ = false; - std::vector turn_packets_; - std::vector udp_packets_; - rtc::PacketOptions options; - std::unique_ptr turn_customizer_; - cricket::IceCandidateErrorEvent error_event_; + std::vector turn_packets_; + std::vector udp_packets_; + AsyncSocketPacketOptions options; + std::unique_ptr turn_customizer_; + IceCandidateErrorEvent error_event_; private: - rtc::BasicPacketSocketFactory socket_factory_; + BasicPacketSocketFactory socket_factory_; }; TEST_F(TurnPortTest, TestTurnPortType) { CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); - EXPECT_EQ(cricket::RELAY_PORT_TYPE, turn_port_->Type()); + EXPECT_EQ(IceCandidateType::kRelay, turn_port_->Type()); } // Tests that the URL of the servers can be correctly reconstructed when // gathering the candidates. TEST_F(TurnPortTest, TestReconstructedServerUrlForUdpIPv4) { CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); - TestReconstructedServerUrl(PROTO_UDP, "turn:99.99.99.3:3478?transport=udp"); + TestReconstructedServerUrl(webrtc::PROTO_UDP, + "turn:99.99.99.3:3478?transport=udp"); } TEST_F(TurnPortTest, TestReconstructedServerUrlForUdpIPv6) { - turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, PROTO_UDP); + turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, webrtc::PROTO_UDP); CreateTurnPort(kLocalIPv6Addr, kTurnUsername, kTurnPassword, kTurnUdpIPv6ProtoAddr); + // Should add [] around the IPv6. TestReconstructedServerUrl( - PROTO_UDP, - "turn:2400:4030:1:2c00:be30:abcd:efab:cdef:3478?transport=udp"); + webrtc::PROTO_UDP, + "turn:[2400:4030:1:2c00:be30:abcd:efab:cdef]:3478?transport=udp"); } TEST_F(TurnPortTest, TestReconstructedServerUrlForTcp) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTcpProtoAddr); - TestReconstructedServerUrl(PROTO_TCP, "turn:99.99.99.4:3478?transport=tcp"); + TestReconstructedServerUrl(webrtc::PROTO_TCP, + "turn:99.99.99.4:3478?transport=tcp"); } TEST_F(TurnPortTest, TestReconstructedServerUrlForTls) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TLS); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TLS); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTlsProtoAddr); - TestReconstructedServerUrl(PROTO_TLS, "turns:99.99.99.4:3478?transport=tcp"); + TestReconstructedServerUrl(webrtc::PROTO_TLS, + "turns:99.99.99.4:3478?transport=tcp"); } TEST_F(TurnPortTest, TestReconstructedServerUrlForHostname) { @@ -886,7 +1006,10 @@ TEST_F(TurnPortTest, TestReconstructedServerUrlForHostname) { // As VSS doesn't provide DNS resolution, name resolve will fail, // the error will be set and contain the url. turn_port_->PrepareAddress(); - EXPECT_TRUE_WAIT(turn_error_, kResolverTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kResolverTimeout)}), + webrtc::IsRtcOk()); std::string server_url = "turn:" + kTurnInvalidAddr.ToString() + "?transport=udp"; ASSERT_EQ(error_event_.url, server_url); @@ -895,7 +1018,7 @@ TEST_F(TurnPortTest, TestReconstructedServerUrlForHostname) { // Do a normal TURN allocation. TEST_F(TurnPortTest, TestTurnAllocate) { CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); - EXPECT_EQ(0, turn_port_->SetOption(rtc::Socket::OPT_SNDBUF, 10 * 1024)); + EXPECT_EQ(0, turn_port_->SetOption(Socket::OPT_SNDBUF, 10 * 1024)); TestTurnAllocateSucceeds(kSimulatedRtt * 2); } @@ -905,9 +1028,9 @@ class TurnLoggingIdValidator : public StunMessageObserver { : expect_val_(expect_val) {} ~TurnLoggingIdValidator() {} void ReceivedMessage(const TurnMessage* msg) override { - if (msg->type() == cricket::STUN_ALLOCATE_REQUEST) { + if (msg->type() == STUN_ALLOCATE_REQUEST) { const StunByteStringAttribute* attr = - msg->GetByteString(cricket::STUN_ATTR_TURN_LOGGING_ID); + msg->GetByteString(STUN_ATTR_TURN_LOGGING_ID); if (expect_val_) { ASSERT_NE(nullptr, attr); ASSERT_EQ(expect_val_, attr->string_view()); @@ -916,7 +1039,7 @@ class TurnLoggingIdValidator : public StunMessageObserver { } } } - void ReceivedChannelData(const char* data, size_t size) override {} + void ReceivedChannelData(ArrayView packet) override {} private: const char* expect_val_; @@ -941,18 +1064,55 @@ TEST_F(TurnPortTest, TestTurnAllocateWithoutLoggingId) { TEST_F(TurnPortTest, TestTurnBadCredentials) { CreateTurnPort(kTurnUsername, "bad", kTurnUdpProtoAddr); turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_error_, kSimulatedRtt * 3, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 3), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); ASSERT_EQ(0U, turn_port_->Candidates().size()); - EXPECT_EQ_SIMULATED_WAIT(error_event_.error_code, STUN_ERROR_UNAUTHORIZED, - kSimulatedRtt * 3, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return error_event_.error_code; }, + Eq(STUN_ERROR_UNAUTHORIZED), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 3), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); EXPECT_EQ(error_event_.error_text, "Unauthorized"); } +// Test that we fail without emitting an error if we try to get an address from +// a TURN server with a different address family. IPv4 local, IPv6 TURN. +TEST_F(TurnPortTest, TestServerAddressFamilyMismatch) { + CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpIPv6ProtoAddr); + turn_port_->PrepareAddress(); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 3), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); + ASSERT_EQ(0U, turn_port_->Candidates().size()); + EXPECT_EQ(0, error_event_.error_code); +} + +// Test that we fail without emitting an error if we try to get an address from +// a TURN server with a different address family. IPv6 local, IPv4 TURN. +TEST_F(TurnPortTest, TestServerAddressFamilyMismatch6) { + CreateTurnPort(kLocalIPv6Addr, kTurnUsername, kTurnPassword, + kTurnUdpProtoAddr); + turn_port_->PrepareAddress(); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 3), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); + ASSERT_EQ(0U, turn_port_->Candidates().size()); + EXPECT_EQ(0, error_event_.error_code); +} + // Testing a normal UDP allocation using TCP connection. TEST_F(TurnPortTest, TestTurnTcpAllocate) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTcpProtoAddr); - EXPECT_EQ(0, turn_port_->SetOption(rtc::Socket::OPT_SNDBUF, 10 * 1024)); + EXPECT_EQ(0, turn_port_->SetOption(Socket::OPT_SNDBUF, 10 * 1024)); TestTurnAllocateSucceeds(kSimulatedRtt * 3); } @@ -965,9 +1125,9 @@ TEST_F(TurnPortTest, TestTurnTcpAllocationWhenProxyChangesAddressToLocalHost) { // kLocalAddr, it will end up using localhost instead. ss_->SetAlternativeLocalAddress(kLocalAddr1.ipaddr(), local_address.ipaddr()); - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); CreateTurnPort(kLocalAddr1, kTurnUsername, kTurnPassword, kTurnTcpProtoAddr); - EXPECT_EQ(0, turn_port_->SetOption(rtc::Socket::OPT_SNDBUF, 10 * 1024)); + EXPECT_EQ(0, turn_port_->SetOption(Socket::OPT_SNDBUF, 10 * 1024)); TestTurnAllocateSucceeds(kSimulatedRtt * 3); // Verify that the socket actually used localhost, otherwise this test isn't @@ -988,7 +1148,7 @@ TEST_F(TurnPortTest, ss_->SetAlternativeLocalAddress(kLocalAddr1.ipaddr(), kLocalAddr2.ipaddr()); // Set up TURN server to use TCP (this logic only exists for TCP). - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); // Create TURN port and tell it to start allocation. CreateTurnPort(kLocalAddr1, kTurnUsername, kTurnPassword, kTurnTcpProtoAddr); @@ -996,16 +1156,22 @@ TEST_F(TurnPortTest, // Shouldn't take more than 1 RTT to realize the bound address isn't the one // expected. - EXPECT_TRUE_SIMULATED_WAIT(turn_error_, kSimulatedRtt, fake_clock_); - EXPECT_EQ_SIMULATED_WAIT(error_event_.error_code, STUN_ERROR_GLOBAL_FAILURE, - kSimulatedRtt, fake_clock_); - ASSERT_NE(error_event_.error_text.find('.'), std::string::npos); - ASSERT_NE(error_event_.address.find(kLocalAddr2.HostAsSensitiveURIString()), + EXPECT_THAT(webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); + EXPECT_THAT(webrtc::WaitUntil([&] { return error_event_.error_code; }, + Eq(STUN_ERROR_SERVER_NOT_REACHABLE), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); + EXPECT_NE(error_event_.error_text.find('.'), std::string::npos); + EXPECT_NE(error_event_.address.find(kLocalAddr2.HostAsSensitiveURIString()), std::string::npos); - ASSERT_NE(error_event_.port, 0); + EXPECT_NE(error_event_.port, 0); std::string server_url = "turn:" + kTurnTcpIntAddr.ToString() + "?transport=tcp"; - ASSERT_EQ(error_event_.url, server_url); + EXPECT_EQ(error_event_.url, server_url); } // A caveat for the above logic: if the socket ends up bound to one of the IPs @@ -1016,12 +1182,12 @@ TEST_F(TurnPortTest, TurnTcpAllocationNotDiscardedIfNotBoundToBestIP) { // Set up a network with kLocalAddr1 as the "best" IP, and kLocalAddr2 as an // alternate. - rtc::Network* network = MakeNetwork(kLocalAddr1); + Network* network = MakeNetwork(kLocalAddr1); network->AddIP(kLocalAddr2.ipaddr()); ASSERT_EQ(kLocalAddr1.ipaddr(), network->GetBestIP()); // Set up TURN server to use TCP (this logic only exists for TCP). - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); // Create TURN port using our special Network, and tell it to start // allocation. @@ -1030,7 +1196,11 @@ TEST_F(TurnPortTest, TurnTcpAllocationNotDiscardedIfNotBoundToBestIP) { turn_port_->PrepareAddress(); // Candidate should be gathered as normally. - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 3, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 3), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); ASSERT_EQ(1U, turn_port_->Candidates().size()); // Verify that the socket actually used the alternate address, otherwise this @@ -1040,41 +1210,51 @@ TEST_F(TurnPortTest, TurnTcpAllocationNotDiscardedIfNotBoundToBestIP) { } // Regression test for crbug.com/webrtc/8972, caused by buggy comparison -// between rtc::IPAddress and rtc::InterfaceAddress. +// between webrtc::IPAddress and webrtc::InterfaceAddress. TEST_F(TurnPortTest, TCPPortNotDiscardedIfBoundToTemporaryIP) { networks_.emplace_back("unittest", "unittest", kLocalIPv6Addr.ipaddr(), 32); - networks_.back().AddIP(rtc::InterfaceAddress( - kLocalIPv6Addr.ipaddr(), rtc::IPV6_ADDRESS_FLAG_TEMPORARY)); + networks_.back().AddIP(InterfaceAddress(kLocalIPv6Addr.ipaddr(), + webrtc::IPV6_ADDRESS_FLAG_TEMPORARY)); // Set up TURN server to use TCP (this logic only exists for TCP). - turn_server_.AddInternalSocket(kTurnIPv6IntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnIPv6IntAddr, webrtc::PROTO_TCP); // Create TURN port using our special Network, and tell it to start // allocation. CreateTurnPortWithNetwork( &networks_.back(), kTurnUsername, kTurnPassword, - cricket::ProtocolAddress(kTurnIPv6IntAddr, PROTO_TCP)); + ProtocolAddress(kTurnIPv6IntAddr, webrtc::PROTO_TCP)); turn_port_->PrepareAddress(); // Candidate should be gathered as normally. - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 3, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 3), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); ASSERT_EQ(1U, turn_port_->Candidates().size()); } // Testing turn port will attempt to create TCP socket on address resolution // failure. TEST_F(TurnPortTest, TestTurnTcpOnAddressResolveFailure) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); CreateTurnPort(kTurnUsername, kTurnPassword, - ProtocolAddress(kTurnInvalidAddr, PROTO_TCP)); + ProtocolAddress(kTurnInvalidAddr, webrtc::PROTO_TCP)); turn_port_->PrepareAddress(); - EXPECT_TRUE_WAIT(turn_error_, kResolverTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kResolverTimeout)}), + webrtc::IsRtcOk()); // As VSS doesn't provide DNS resolution, name resolve will fail. TurnPort // will proceed in creating a TCP socket which will fail as there is no // server on the above domain and error will be set to SOCKET_ERROR. EXPECT_EQ(SOCKET_ERROR, turn_port_->error()); - EXPECT_EQ_SIMULATED_WAIT(error_event_.error_code, SERVER_NOT_REACHABLE_ERROR, - kSimulatedRtt, fake_clock_); + EXPECT_THAT(webrtc::WaitUntil([&] { return error_event_.error_code; }, + Eq(STUN_ERROR_SERVER_NOT_REACHABLE), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); std::string server_url = "turn:" + kTurnInvalidAddr.ToString() + "?transport=tcp"; ASSERT_EQ(error_event_.url, server_url); @@ -1083,11 +1263,14 @@ TEST_F(TurnPortTest, TestTurnTcpOnAddressResolveFailure) { // Testing turn port will attempt to create TLS socket on address resolution // failure. TEST_F(TurnPortTest, TestTurnTlsOnAddressResolveFailure) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TLS); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TLS); CreateTurnPort(kTurnUsername, kTurnPassword, - ProtocolAddress(kTurnInvalidAddr, PROTO_TLS)); + ProtocolAddress(kTurnInvalidAddr, webrtc::PROTO_TLS)); turn_port_->PrepareAddress(); - EXPECT_TRUE_WAIT(turn_error_, kResolverTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kResolverTimeout)}), + webrtc::IsRtcOk()); EXPECT_EQ(SOCKET_ERROR, turn_port_->error()); } @@ -1095,9 +1278,12 @@ TEST_F(TurnPortTest, TestTurnTlsOnAddressResolveFailure) { // and return allocate failure. TEST_F(TurnPortTest, TestTurnUdpOnAddressResolveFailure) { CreateTurnPort(kTurnUsername, kTurnPassword, - ProtocolAddress(kTurnInvalidAddr, PROTO_UDP)); + ProtocolAddress(kTurnInvalidAddr, webrtc::PROTO_UDP)); turn_port_->PrepareAddress(); - EXPECT_TRUE_WAIT(turn_error_, kResolverTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kResolverTimeout)}), + webrtc::IsRtcOk()); // Error from turn port will not be socket error. EXPECT_NE(SOCKET_ERROR, turn_port_->error()); } @@ -1106,7 +1292,11 @@ TEST_F(TurnPortTest, TestTurnUdpOnAddressResolveFailure) { TEST_F(TurnPortTest, TestTurnAllocateBadPassword) { CreateTurnPort(kTurnUsername, "bad", kTurnUdpProtoAddr); turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_error_, kSimulatedRtt * 2, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 2), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); ASSERT_EQ(0U, turn_port_->Candidates().size()); } @@ -1116,8 +1306,12 @@ TEST_F(TurnPortTest, TestTurnAllocateNonceResetAfterAllocateMismatch) { // Do a normal allocation first. CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 2, fake_clock_); - rtc::SocketAddress first_addr(turn_port_->socket()->GetLocalAddress()); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 2), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); + SocketAddress first_addr(turn_port_->socket()->GetLocalAddress()); // Destroy the turnport while keeping the drop probability to 1 to // suppress the release of the allocation at the server. ss_->set_drop_probability(1.0); @@ -1134,7 +1328,7 @@ TEST_F(TurnPortTest, TestTurnAllocateNonceResetAfterAllocateMismatch) { // using timestamp `ts_before` but then get an allocate mismatch error and // receive an even newer nonce based on the system clock. `ts_before` is // chosen so that the two NONCEs generated by the server will be different. - int64_t ts_before = rtc::TimeMillis() - 1; + int64_t ts_before = webrtc::TimeMillis() - 1; std::string first_nonce = turn_server_.server()->SetTimestampForNextNonce(ts_before); turn_port_->PrepareAddress(); @@ -1142,7 +1336,11 @@ TEST_F(TurnPortTest, TestTurnAllocateNonceResetAfterAllocateMismatch) { // Four round trips; first we'll get "stale nonce", then // "allocate mismatch", then "stale nonce" again, then finally it will // succeed. - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 4, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 4), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); EXPECT_NE(first_nonce, turn_port_->nonce()); } @@ -1152,8 +1350,12 @@ TEST_F(TurnPortTest, TestTurnAllocateMismatch) { // Do a normal allocation first. CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 2, fake_clock_); - rtc::SocketAddress first_addr(turn_port_->socket()->GetLocalAddress()); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 2), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); + SocketAddress first_addr(turn_port_->socket()->GetLocalAddress()); // Clear connected_ flag on turnport to suppress the release of // the allocation. @@ -1172,7 +1374,11 @@ TEST_F(TurnPortTest, TestTurnAllocateMismatch) { // Four round trips; first we'll get "stale nonce", then // "allocate mismatch", then "stale nonce" again, then finally it will // succeed. - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 4, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 4), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Verifies that the new port has a different address now. EXPECT_NE(first_addr, turn_port_->socket()->GetLocalAddress()); @@ -1180,8 +1386,10 @@ TEST_F(TurnPortTest, TestTurnAllocateMismatch) { // Verify that all packets received from the shared socket are ignored. std::string test_packet = "Test packet"; EXPECT_FALSE(turn_port_->HandleIncomingPacket( - socket_.get(), test_packet.data(), test_packet.size(), - rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0), rtc::TimeMicros())); + socket_.get(), + ReceivedIpPacket::CreateFromLegacy( + test_packet.data(), test_packet.size(), webrtc::TimeMicros(), + SocketAddress(kTurnUdpExtAddr.ipaddr(), 0)))); } // Tests that a shared-socket-TurnPort creates its own socket after @@ -1190,8 +1398,12 @@ TEST_F(TurnPortTest, TestSharedSocketAllocateMismatch) { // Do a normal allocation first. CreateSharedTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 2, fake_clock_); - rtc::SocketAddress first_addr(turn_port_->socket()->GetLocalAddress()); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 2), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); + SocketAddress first_addr(turn_port_->socket()->GetLocalAddress()); // Clear connected_ flag on turnport to suppress the release of // the allocation. @@ -1206,7 +1418,11 @@ TEST_F(TurnPortTest, TestSharedSocketAllocateMismatch) { turn_port_->PrepareAddress(); // Extra 2 round trips due to allocate mismatch. - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 4, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 4), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Verifies that the new port has a different address now. EXPECT_NE(first_addr, turn_port_->socket()->GetLocalAddress()); @@ -1214,13 +1430,17 @@ TEST_F(TurnPortTest, TestSharedSocketAllocateMismatch) { } TEST_F(TurnPortTest, TestTurnTcpAllocateMismatch) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTcpProtoAddr); // Do a normal allocation first. turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 3, fake_clock_); - rtc::SocketAddress first_addr(turn_port_->socket()->GetLocalAddress()); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 3), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); + SocketAddress first_addr(turn_port_->socket()->GetLocalAddress()); // Clear connected_ flag on turnport to suppress the release of // the allocation. @@ -1237,7 +1457,11 @@ TEST_F(TurnPortTest, TestTurnTcpAllocateMismatch) { EXPECT_EQ(first_addr, turn_port_->socket()->GetLocalAddress()); // Extra 2 round trips due to allocate mismatch. - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 5, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 5), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Verifies that the new port has a different address now. EXPECT_NE(first_addr, turn_port_->socket()->GetLocalAddress()); @@ -1245,7 +1469,7 @@ TEST_F(TurnPortTest, TestTurnTcpAllocateMismatch) { TEST_F(TurnPortTest, TestRefreshRequestGetsErrorResponse) { CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); - PrepareTurnAndUdpPorts(PROTO_UDP); + PrepareTurnAndUdpPorts(webrtc::PROTO_UDP); turn_port_->CreateConnection(udp_port_->Candidates()[0], Port::ORIGIN_MESSAGE); // Set bad credentials. @@ -1256,11 +1480,17 @@ TEST_F(TurnPortTest, TestRefreshRequestGetsErrorResponse) { // When this succeeds, it will schedule a new RefreshRequest with the bad // credential. turn_port_->request_manager().FlushForTest(TURN_REFRESH_REQUEST); - EXPECT_TRUE_SIMULATED_WAIT(turn_refresh_success_, kSimulatedRtt, fake_clock_); + EXPECT_THAT(webrtc::WaitUntil([&] { return turn_refresh_success_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Flush it again, it will receive a bad response. turn_port_->request_manager().FlushForTest(TURN_REFRESH_REQUEST); - EXPECT_TRUE_SIMULATED_WAIT(!turn_refresh_success_, kSimulatedRtt, - fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return !turn_refresh_success_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), .clock = &fake_clock_}), + webrtc::IsRtcOk()); EXPECT_FALSE(turn_port_->connected()); EXPECT_TRUE(CheckAllConnectionsFailedAndPruned()); EXPECT_FALSE(turn_port_->HasRequests()); @@ -1270,7 +1500,7 @@ TEST_F(TurnPortTest, TestRefreshRequestGetsErrorResponse) { // closed. TEST_F(TurnPortTest, TestStopProcessingPacketsAfterClosed) { CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); - PrepareTurnAndUdpPorts(PROTO_UDP); + PrepareTurnAndUdpPorts(webrtc::PROTO_UDP); Connection* conn1 = turn_port_->CreateConnection(udp_port_->Candidates()[0], Port::ORIGIN_MESSAGE); Connection* conn2 = udp_port_->CreateConnection(turn_port_->Candidates()[0], @@ -1279,8 +1509,12 @@ TEST_F(TurnPortTest, TestStopProcessingPacketsAfterClosed) { ASSERT_TRUE(conn2 != NULL); // Make sure conn2 is writable. conn2->Ping(0); - EXPECT_EQ_SIMULATED_WAIT(Connection::STATE_WRITABLE, conn2->write_state(), - kSimulatedRtt * 2, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return conn2->write_state(); }, + Eq(Connection::STATE_WRITABLE), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 2), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); turn_port_->CloseForTest(); SIMULATED_WAIT(false, kSimulatedRtt, fake_clock_); @@ -1294,9 +1528,9 @@ TEST_F(TurnPortTest, TestStopProcessingPacketsAfterClosed) { // Test that CreateConnection will return null if port becomes disconnected. TEST_F(TurnPortTest, TestCreateConnectionWhenSocketClosed) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTcpProtoAddr); - PrepareTurnAndUdpPorts(PROTO_TCP); + PrepareTurnAndUdpPorts(webrtc::PROTO_TCP); // Create a connection. Connection* conn1 = turn_port_->CreateConnection(udp_port_->Candidates()[0], Port::ORIGIN_MESSAGE); @@ -1312,94 +1546,97 @@ TEST_F(TurnPortTest, TestCreateConnectionWhenSocketClosed) { // Tests that when a TCP socket is closed, the respective TURN connection will // be destroyed. TEST_F(TurnPortTest, TestSocketCloseWillDestroyConnection) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTcpProtoAddr); - PrepareTurnAndUdpPorts(PROTO_TCP); + PrepareTurnAndUdpPorts(webrtc::PROTO_TCP); Connection* conn = turn_port_->CreateConnection(udp_port_->Candidates()[0], Port::ORIGIN_MESSAGE); EXPECT_NE(nullptr, conn); EXPECT_TRUE(!turn_port_->connections().empty()); turn_port_->socket()->NotifyClosedForTest(1); - EXPECT_TRUE_SIMULATED_WAIT(turn_port_->connections().empty(), - kConnectionDestructionDelay, fake_clock_); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return turn_port_->connections().empty(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kConnectionDestructionDelay), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); } // Test try-alternate-server feature. TEST_F(TurnPortTest, TestTurnAlternateServerUDP) { - TestTurnAlternateServer(PROTO_UDP); + TestTurnAlternateServer(webrtc::PROTO_UDP); } TEST_F(TurnPortTest, TestTurnAlternateServerTCP) { - TestTurnAlternateServer(PROTO_TCP); + TestTurnAlternateServer(webrtc::PROTO_TCP); } TEST_F(TurnPortTest, TestTurnAlternateServerTLS) { - TestTurnAlternateServer(PROTO_TLS); + TestTurnAlternateServer(webrtc::PROTO_TLS); } // Test that we fail when we redirect to an address different from // current IP family. TEST_F(TurnPortTest, TestTurnAlternateServerV4toV6UDP) { - TestTurnAlternateServerV4toV6(PROTO_UDP); + TestTurnAlternateServerV4toV6(webrtc::PROTO_UDP); } TEST_F(TurnPortTest, TestTurnAlternateServerV4toV6TCP) { - TestTurnAlternateServerV4toV6(PROTO_TCP); + TestTurnAlternateServerV4toV6(webrtc::PROTO_TCP); } TEST_F(TurnPortTest, TestTurnAlternateServerV4toV6TLS) { - TestTurnAlternateServerV4toV6(PROTO_TLS); + TestTurnAlternateServerV4toV6(webrtc::PROTO_TLS); } // Test try-alternate-server catches the case of pingpong. TEST_F(TurnPortTest, TestTurnAlternateServerPingPongUDP) { - TestTurnAlternateServerPingPong(PROTO_UDP); + TestTurnAlternateServerPingPong(webrtc::PROTO_UDP); } TEST_F(TurnPortTest, TestTurnAlternateServerPingPongTCP) { - TestTurnAlternateServerPingPong(PROTO_TCP); + TestTurnAlternateServerPingPong(webrtc::PROTO_TCP); } TEST_F(TurnPortTest, TestTurnAlternateServerPingPongTLS) { - TestTurnAlternateServerPingPong(PROTO_TLS); + TestTurnAlternateServerPingPong(webrtc::PROTO_TLS); } // Test try-alternate-server catch the case of repeated server. TEST_F(TurnPortTest, TestTurnAlternateServerDetectRepetitionUDP) { - TestTurnAlternateServerDetectRepetition(PROTO_UDP); + TestTurnAlternateServerDetectRepetition(webrtc::PROTO_UDP); } TEST_F(TurnPortTest, TestTurnAlternateServerDetectRepetitionTCP) { - TestTurnAlternateServerDetectRepetition(PROTO_TCP); + TestTurnAlternateServerDetectRepetition(webrtc::PROTO_TCP); } TEST_F(TurnPortTest, TestTurnAlternateServerDetectRepetitionTLS) { - TestTurnAlternateServerDetectRepetition(PROTO_TCP); + TestTurnAlternateServerDetectRepetition(webrtc::PROTO_TCP); } // Test catching the case of a redirect to loopback. TEST_F(TurnPortTest, TestTurnAlternateServerLoopbackUdpIpv4) { - TestTurnAlternateServerLoopback(PROTO_UDP, false); + TestTurnAlternateServerLoopback(webrtc::PROTO_UDP, false); } TEST_F(TurnPortTest, TestTurnAlternateServerLoopbackUdpIpv6) { - TestTurnAlternateServerLoopback(PROTO_UDP, true); + TestTurnAlternateServerLoopback(webrtc::PROTO_UDP, true); } TEST_F(TurnPortTest, TestTurnAlternateServerLoopbackTcpIpv4) { - TestTurnAlternateServerLoopback(PROTO_TCP, false); + TestTurnAlternateServerLoopback(webrtc::PROTO_TCP, false); } TEST_F(TurnPortTest, TestTurnAlternateServerLoopbackTcpIpv6) { - TestTurnAlternateServerLoopback(PROTO_TCP, true); + TestTurnAlternateServerLoopback(webrtc::PROTO_TCP, true); } TEST_F(TurnPortTest, TestTurnAlternateServerLoopbackTlsIpv4) { - TestTurnAlternateServerLoopback(PROTO_TLS, false); + TestTurnAlternateServerLoopback(webrtc::PROTO_TLS, false); } TEST_F(TurnPortTest, TestTurnAlternateServerLoopbackTlsIpv6) { - TestTurnAlternateServerLoopback(PROTO_TLS, true); + TestTurnAlternateServerLoopback(webrtc::PROTO_TLS, true); } // Do a TURN allocation and try to send a packet to it from the outside. @@ -1408,27 +1645,27 @@ TEST_F(TurnPortTest, TestTurnAlternateServerLoopbackTlsIpv6) { // outside. It should now work as well. TEST_F(TurnPortTest, TestTurnConnection) { CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); - TestTurnConnection(PROTO_UDP); + TestTurnConnection(webrtc::PROTO_UDP); } // Similar to above, except that this test will use the shared socket. TEST_F(TurnPortTest, TestTurnConnectionUsingSharedSocket) { CreateSharedTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); - TestTurnConnection(PROTO_UDP); + TestTurnConnection(webrtc::PROTO_UDP); } // Test that we can establish a TCP connection with TURN server. TEST_F(TurnPortTest, TestTurnTcpConnection) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTcpProtoAddr); - TestTurnConnection(PROTO_TCP); + TestTurnConnection(webrtc::PROTO_TCP); } // Test that we can establish a TLS connection with TURN server. TEST_F(TurnPortTest, TestTurnTlsConnection) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TLS); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TLS); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTlsProtoAddr); - TestTurnConnection(PROTO_TLS); + TestTurnConnection(webrtc::PROTO_TLS); } // Test that if a connection on a TURN port is destroyed, the TURN port can @@ -1451,7 +1688,7 @@ TEST_F(TurnPortTest, TestDestroyTurnConnectionUsingSharedSocket) { TEST_F(TurnPortTest, TestTurnConnectionUsingOTUNonce) { turn_server_.set_enable_otu_nonce(true); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); - TestTurnConnection(PROTO_UDP); + TestTurnConnection(webrtc::PROTO_UDP); } // Test that CreatePermissionRequest will be scheduled after the success @@ -1459,13 +1696,16 @@ TEST_F(TurnPortTest, TestTurnConnectionUsingOTUNonce) { // ErrorResponse if the ufrag and pwd are incorrect. TEST_F(TurnPortTest, TestRefreshCreatePermissionRequest) { CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); - PrepareTurnAndUdpPorts(PROTO_UDP); + PrepareTurnAndUdpPorts(webrtc::PROTO_UDP); Connection* conn = turn_port_->CreateConnection(udp_port_->Candidates()[0], Port::ORIGIN_MESSAGE); ASSERT_TRUE(conn != NULL); - EXPECT_TRUE_SIMULATED_WAIT(turn_create_permission_success_, kSimulatedRtt, - fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return turn_create_permission_success_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), .clock = &fake_clock_}), + webrtc::IsRtcOk()); turn_create_permission_success_ = false; // A create-permission-request should be pending. // After the next create-permission-response is received, it will schedule @@ -1473,18 +1713,24 @@ TEST_F(TurnPortTest, TestRefreshCreatePermissionRequest) { RelayCredentials bad_credentials("bad_user", "bad_pwd"); turn_port_->set_credentials(bad_credentials); turn_port_->request_manager().FlushForTest(kAllRequestsForTest); - EXPECT_TRUE_SIMULATED_WAIT(turn_create_permission_success_, kSimulatedRtt, - fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return turn_create_permission_success_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Flush the requests again; the create-permission-request will fail. turn_port_->request_manager().FlushForTest(kAllRequestsForTest); - EXPECT_TRUE_SIMULATED_WAIT(!turn_create_permission_success_, kSimulatedRtt, - fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return !turn_create_permission_success_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), .clock = &fake_clock_}), + webrtc::IsRtcOk()); EXPECT_TRUE(CheckConnectionFailedAndPruned(conn)); } TEST_F(TurnPortTest, TestChannelBindGetErrorResponse) { CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); - PrepareTurnAndUdpPorts(PROTO_UDP); + PrepareTurnAndUdpPorts(webrtc::PROTO_UDP); Connection* conn1 = turn_port_->CreateConnection(udp_port_->Candidates()[0], Port::ORIGIN_MESSAGE); ASSERT_TRUE(conn1 != nullptr); @@ -1493,59 +1739,80 @@ TEST_F(TurnPortTest, TestChannelBindGetErrorResponse) { ASSERT_TRUE(conn2 != nullptr); conn1->Ping(0); - EXPECT_TRUE_SIMULATED_WAIT(conn1->writable(), kSimulatedRtt * 2, fake_clock_); - // TODO(deadbeef): SetEntryChannelId should not be a public method. - // Instead we should set an option on the fake TURN server to force it to - // send a channel bind errors. - ASSERT_TRUE( - turn_port_->SetEntryChannelId(udp_port_->Candidates()[0].address(), -1)); + EXPECT_THAT( + webrtc::WaitUntil([&] { return conn1->writable(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 2), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); + + // Tell the TURN server to reject all bind requests from now on. + turn_server_.server()->set_reject_bind_requests(true); std::string data = "ABC"; conn1->Send(data.data(), data.length(), options); - EXPECT_TRUE_SIMULATED_WAIT(CheckConnectionFailedAndPruned(conn1), - kSimulatedRtt, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnectionFailedAndPruned(conn1); }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Verify that packets are allowed to be sent after a bind request error. // They'll just use a send indication instead. - conn2->SignalReadPacket.connect(static_cast(this), - &TurnPortTest::OnUdpReadPacket); + + conn2->RegisterReceivedPacketCallback( + [&](Connection* connection, const ReceivedIpPacket& packet) { + // TODO(bugs.webrtc.org/345518625): Verify that the packet was + // received unchanneled, not channeled. + udp_packets_.push_back( + Buffer(packet.payload().data(), packet.payload().size())); + }); conn1->Send(data.data(), data.length(), options); - EXPECT_TRUE_SIMULATED_WAIT(!udp_packets_.empty(), kSimulatedRtt, fake_clock_); + EXPECT_THAT(webrtc::WaitUntil([&] { return !udp_packets_.empty(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); + conn2->DeregisterReceivedPacketCallback(); } // Do a TURN allocation, establish a UDP connection, and send some data. TEST_F(TurnPortTest, TestTurnSendDataTurnUdpToUdp) { // Create ports and prepare addresses. CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); - TestTurnSendData(PROTO_UDP); - EXPECT_EQ(UDP_PROTOCOL_NAME, turn_port_->Candidates()[0].relay_protocol()); + TestTurnSendData(webrtc::PROTO_UDP); + EXPECT_EQ(webrtc::UDP_PROTOCOL_NAME, + turn_port_->Candidates()[0].relay_protocol()); } // Do a TURN allocation, establish a TCP connection, and send some data. TEST_F(TurnPortTest, TestTurnSendDataTurnTcpToUdp) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); // Create ports and prepare addresses. CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTcpProtoAddr); - TestTurnSendData(PROTO_TCP); - EXPECT_EQ(TCP_PROTOCOL_NAME, turn_port_->Candidates()[0].relay_protocol()); + TestTurnSendData(webrtc::PROTO_TCP); + EXPECT_EQ(webrtc::TCP_PROTOCOL_NAME, + turn_port_->Candidates()[0].relay_protocol()); } // Do a TURN allocation, establish a TLS connection, and send some data. TEST_F(TurnPortTest, TestTurnSendDataTurnTlsToUdp) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TLS); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TLS); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTlsProtoAddr); - TestTurnSendData(PROTO_TLS); - EXPECT_EQ(TLS_PROTOCOL_NAME, turn_port_->Candidates()[0].relay_protocol()); + TestTurnSendData(webrtc::PROTO_TLS); + EXPECT_EQ(webrtc::TLS_PROTOCOL_NAME, + turn_port_->Candidates()[0].relay_protocol()); } // Test TURN fails to make a connection from IPv6 address to a server which has // IPv4 address. TEST_F(TurnPortTest, TestTurnLocalIPv6AddressServerIPv4) { - turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, PROTO_UDP); + turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, webrtc::PROTO_UDP); CreateTurnPort(kLocalIPv6Addr, kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); turn_port_->PrepareAddress(); - ASSERT_TRUE_SIMULATED_WAIT(turn_error_, kSimulatedRtt, fake_clock_); + ASSERT_THAT(webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); EXPECT_TRUE(turn_port_->Candidates().empty()); } @@ -1553,7 +1820,7 @@ TEST_F(TurnPortTest, TestTurnLocalIPv6AddressServerIPv4) { // IPv6 intenal address. But in this test external address is a IPv4 address, // hence allocated address will be a IPv4 address. TEST_F(TurnPortTest, TestTurnLocalIPv6AddressServerIPv6ExtenalIPv4) { - turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, PROTO_UDP); + turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, webrtc::PROTO_UDP); CreateTurnPort(kLocalIPv6Addr, kTurnUsername, kTurnPassword, kTurnUdpIPv6ProtoAddr); TestTurnAllocateSucceeds(kSimulatedRtt * 2); @@ -1564,17 +1831,22 @@ TEST_F(TurnPortTest, TestTurnLocalIPv6AddressServerIPv6ExtenalIPv4) { // its local candidate will still be an IPv4 address and it can only create // connections with IPv4 remote candidates. TEST_F(TurnPortTest, TestCandidateAddressFamilyMatch) { - turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, PROTO_UDP); + turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, webrtc::PROTO_UDP); CreateTurnPort(kLocalIPv6Addr, kTurnUsername, kTurnPassword, kTurnUdpIPv6ProtoAddr); turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 2, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 2), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); ASSERT_EQ(1U, turn_port_->Candidates().size()); // Create an IPv4 candidate. It will match the TURN candidate. Candidate remote_candidate(ICE_CANDIDATE_COMPONENT_RTP, "udp", kLocalAddr2, 0, - "", "", "local", 0, kCandidateFoundation); + "", "", IceCandidateType::kHost, 0, + kCandidateFoundation); remote_candidate.set_address(kLocalAddr2); Connection* conn = turn_port_->CreateConnection(remote_candidate, Port::ORIGIN_MESSAGE); @@ -1590,15 +1862,22 @@ TEST_F(TurnPortTest, TestCandidateAddressFamilyMatch) { // Test that a CreatePermission failure will result in the connection being // pruned and failed. TEST_F(TurnPortTest, TestConnectionFailedAndPrunedOnCreatePermissionFailure) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); turn_server_.server()->set_reject_private_addresses(true); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTcpProtoAddr); turn_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(turn_ready_, kSimulatedRtt * 3, fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return turn_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt * 3), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); CreateUdpPort(SocketAddress("10.0.0.10", 0)); udp_port_->PrepareAddress(); - EXPECT_TRUE_SIMULATED_WAIT(udp_ready_, kSimulatedRtt, fake_clock_); + EXPECT_THAT(webrtc::WaitUntil([&] { return udp_ready_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Create a connection. TestConnectionWrapper conn(turn_port_->CreateConnection( udp_port_->Candidates()[0], Port::ORIGIN_MESSAGE)); @@ -1606,10 +1885,17 @@ TEST_F(TurnPortTest, TestConnectionFailedAndPrunedOnCreatePermissionFailure) { // Asynchronously, CreatePermission request should be sent and fail, which // will make the connection pruned and failed. - EXPECT_TRUE_SIMULATED_WAIT(CheckConnectionFailedAndPruned(conn.connection()), - kSimulatedRtt, fake_clock_); - EXPECT_TRUE_SIMULATED_WAIT(!turn_create_permission_success_, kSimulatedRtt, - fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return CheckConnectionFailedAndPruned(conn.connection()); }, + IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), .clock = &fake_clock_}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return !turn_create_permission_success_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kSimulatedRtt), .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Check that the connection is not deleted asynchronously. SIMULATED_WAIT(conn.connection() == nullptr, kConnectionDestructionDelay, fake_clock_); @@ -1619,38 +1905,38 @@ TEST_F(TurnPortTest, TestConnectionFailedAndPrunedOnCreatePermissionFailure) { // Test that a TURN allocation is released when the port is closed. TEST_F(TurnPortTest, TestTurnReleaseAllocation) { CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); - TestTurnReleaseAllocation(PROTO_UDP); + TestTurnReleaseAllocation(webrtc::PROTO_UDP); } // Test that a TURN TCP allocation is released when the port is closed. TEST_F(TurnPortTest, TestTurnTCPReleaseAllocation) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTcpProtoAddr); - TestTurnReleaseAllocation(PROTO_TCP); + TestTurnReleaseAllocation(webrtc::PROTO_TCP); } TEST_F(TurnPortTest, TestTurnTLSReleaseAllocation) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TLS); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TLS); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTlsProtoAddr); - TestTurnReleaseAllocation(PROTO_TLS); + TestTurnReleaseAllocation(webrtc::PROTO_TLS); } TEST_F(TurnPortTest, TestTurnUDPGracefulReleaseAllocation) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_UDP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_UDP); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); - TestTurnGracefulReleaseAllocation(PROTO_UDP); + TestTurnGracefulReleaseAllocation(webrtc::PROTO_UDP); } TEST_F(TurnPortTest, TestTurnTCPGracefulReleaseAllocation) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTcpProtoAddr); - TestTurnGracefulReleaseAllocation(PROTO_TCP); + TestTurnGracefulReleaseAllocation(webrtc::PROTO_TCP); } TEST_F(TurnPortTest, TestTurnTLSGracefulReleaseAllocation) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TLS); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TLS); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTlsProtoAddr); - TestTurnGracefulReleaseAllocation(PROTO_TLS); + TestTurnGracefulReleaseAllocation(webrtc::PROTO_TLS); } // Test that nothing bad happens if we try to create a connection to the same @@ -1658,7 +1944,7 @@ TEST_F(TurnPortTest, TestTurnTLSGracefulReleaseAllocation) { // DCHECK. TEST_F(TurnPortTest, CanCreateTwoConnectionsToSameAddress) { CreateTurnPort(kTurnUsername, kTurnPassword, kTurnUdpProtoAddr); - PrepareTurnAndUdpPorts(PROTO_UDP); + PrepareTurnAndUdpPorts(webrtc::PROTO_UDP); Connection* conn1 = turn_port_->CreateConnection(udp_port_->Candidates()[0], Port::ORIGIN_MESSAGE); Connection* conn2 = turn_port_->CreateConnection(udp_port_->Candidates()[0], @@ -1671,18 +1957,23 @@ TEST_F(TurnPortTest, CanCreateTwoConnectionsToSameAddress) { #if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) TEST_F(TurnPortTest, TestResolverShutdown) { - turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, PROTO_UDP); + turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, webrtc::PROTO_UDP); int last_fd_count = GetFDCount(); // Need to supply unresolved address to kick off resolver. CreateTurnPort(kLocalIPv6Addr, kTurnUsername, kTurnPassword, - ProtocolAddress(kTurnInvalidAddr, PROTO_UDP)); + ProtocolAddress(kTurnInvalidAddr, webrtc::PROTO_UDP)); turn_port_->PrepareAddress(); - ASSERT_TRUE_WAIT(turn_error_, kResolverTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return turn_error_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kResolverTimeout)}), + webrtc::IsRtcOk()); EXPECT_TRUE(turn_port_->Candidates().empty()); turn_port_.reset(); - rtc::Thread::Current()->PostTask([this] { test_finish_ = true; }); + Thread::Current()->PostTask([this] { test_finish_ = true; }); // Waiting for above message to be processed. - ASSERT_TRUE_SIMULATED_WAIT(test_finish_, 1, fake_clock_); + ASSERT_THAT(webrtc::WaitUntil([&] { return test_finish_; }, IsTrue(), + {.clock = &fake_clock_}), + webrtc::IsRtcOk()); EXPECT_EQ(last_fd_count, GetFDCount()); } #endif @@ -1704,14 +1995,14 @@ class MessageObserver : public StunMessageObserver { const StunByteStringAttribute* attr = msg->GetByteString(TestTurnCustomizer::STUN_ATTR_COUNTER); if (attr != nullptr && attr_counter_ != nullptr) { - rtc::ByteBufferReader buf(attr->bytes(), attr->length()); + ByteBufferReader buf(attr->array_view()); unsigned int val = ~0u; buf.ReadUInt32(&val); (*attr_counter_)++; } } - void ReceivedChannelData(const char* data, size_t size) override { + void ReceivedChannelData(ArrayView payload) override { if (channel_data_counter_ != nullptr) { (*channel_data_counter_)++; } @@ -1738,13 +2029,14 @@ TEST_F(TurnPortTest, TestTurnCustomizerCount) { &observer_message_counter, &observer_channel_data_counter, &observer_attr_counter)); - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TLS); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TLS); turn_customizer_.reset(customizer); turn_server_.server()->SetStunMessageObserver(std::move(validator)); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTlsProtoAddr); - TestTurnSendData(PROTO_TLS); - EXPECT_EQ(TLS_PROTOCOL_NAME, turn_port_->Candidates()[0].relay_protocol()); + TestTurnSendData(webrtc::PROTO_TLS); + EXPECT_EQ(webrtc::TLS_PROTOCOL_NAME, + turn_port_->Candidates()[0].relay_protocol()); // There should have been at least turn_packets_.size() calls to `customizer`. EXPECT_GE(customizer->modify_cnt_ + customizer->allow_channel_data_cnt_, @@ -1768,13 +2060,14 @@ TEST_F(TurnPortTest, TestTurnCustomizerDisallowChannelData) { &observer_message_counter, &observer_channel_data_counter, &observer_attr_counter)); customizer->allow_channel_data_ = false; - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TLS); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TLS); turn_customizer_.reset(customizer); turn_server_.server()->SetStunMessageObserver(std::move(validator)); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTlsProtoAddr); - TestTurnSendData(PROTO_TLS); - EXPECT_EQ(TLS_PROTOCOL_NAME, turn_port_->Candidates()[0].relay_protocol()); + TestTurnSendData(webrtc::PROTO_TLS); + EXPECT_EQ(webrtc::TLS_PROTOCOL_NAME, + turn_port_->Candidates()[0].relay_protocol()); // There should have been at least turn_packets_.size() calls to `customizer`. EXPECT_GE(customizer->modify_cnt_, turn_packets_.size()); @@ -1798,13 +2091,14 @@ TEST_F(TurnPortTest, TestTurnCustomizerAddAttribute) { &observer_attr_counter)); customizer->allow_channel_data_ = false; customizer->add_counter_ = true; - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TLS); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TLS); turn_customizer_.reset(customizer); turn_server_.server()->SetStunMessageObserver(std::move(validator)); CreateTurnPort(kTurnUsername, kTurnPassword, kTurnTlsProtoAddr); - TestTurnSendData(PROTO_TLS); - EXPECT_EQ(TLS_PROTOCOL_NAME, turn_port_->Candidates()[0].relay_protocol()); + TestTurnSendData(webrtc::PROTO_TLS); + EXPECT_EQ(webrtc::TLS_PROTOCOL_NAME, + turn_port_->Candidates()[0].relay_protocol()); // There should have been at least turn_packets_.size() calls to `customizer`. EXPECT_GE(customizer->modify_cnt_, turn_packets_.size()); @@ -1854,8 +2148,8 @@ TEST_F(TurnPortTest, TestTurnDangerousServerPermits443) { } TEST_F(TurnPortTest, TestTurnDangerousAlternateServer) { - const ProtocolType protocol_type = PROTO_TCP; - std::vector redirect_addresses; + const ProtocolType protocol_type = webrtc::PROTO_TCP; + std::vector redirect_addresses; redirect_addresses.push_back(kTurnDangerousAddr); TestTurnRedirector redirector(redirect_addresses); @@ -1871,38 +2165,32 @@ TEST_F(TurnPortTest, TestTurnDangerousAlternateServer) { turn_port_->PrepareAddress(); // This should result in an error event. - EXPECT_TRUE_SIMULATED_WAIT(error_event_.error_code != 0, - TimeToGetAlternateTurnCandidate(protocol_type), - fake_clock_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return error_event_.error_code; }, Ne(0), + {.timeout = TimeDelta::Millis( + TimeToGetAlternateTurnCandidate(protocol_type)), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // but should NOT result in the port turning ready, and no candidates // should be gathered. EXPECT_FALSE(turn_ready_); ASSERT_EQ(0U, turn_port_->Candidates().size()); } -TEST_F(TurnPortTest, TestTurnDangerousServerAllowedWithFieldTrial) { - webrtc::test::ScopedKeyValueConfig override_field_trials( - field_trials_, "WebRTC-Turn-AllowSystemPorts/Enabled/"); - CreateTurnPort(kTurnUsername, kTurnPassword, kTurnDangerousProtoAddr); - ASSERT_TRUE(turn_port_); -} - class TurnPortWithMockDnsResolverTest : public TurnPortTest { public: TurnPortWithMockDnsResolverTest() : TurnPortTest(), socket_factory_(ss_.get()) {} - rtc::PacketSocketFactory* socket_factory() override { - return &socket_factory_; - } + PacketSocketFactory* socket_factory() override { return &socket_factory_; } void SetDnsResolverExpectations( - rtc::MockDnsResolvingPacketSocketFactory::Expectations expectations) { + MockDnsResolvingPacketSocketFactory::Expectations expectations) { socket_factory_.SetExpectations(expectations); } private: - rtc::MockDnsResolvingPacketSocketFactory socket_factory_; + MockDnsResolvingPacketSocketFactory socket_factory_; }; // Test an allocation from a TURN server specified by a hostname. @@ -1912,7 +2200,7 @@ TEST_F(TurnPortWithMockDnsResolverTest, TestHostnameResolved) { [](webrtc::MockAsyncDnsResolver* resolver, webrtc::MockAsyncDnsResolverResult* resolver_result) { EXPECT_CALL(*resolver, Start(kTurnValidAddr, /*family=*/AF_INET, _)) - .WillOnce([](const rtc::SocketAddress& addr, int family, + .WillOnce([](const webrtc::SocketAddress& addr, int family, absl::AnyInvocable callback) { callback(); }); EXPECT_CALL(*resolver, result) .WillRepeatedly(ReturnPointee(resolver_result)); @@ -1926,14 +2214,14 @@ TEST_F(TurnPortWithMockDnsResolverTest, TestHostnameResolved) { // Test an allocation from a TURN server specified by a hostname on an IPv6 // network. TEST_F(TurnPortWithMockDnsResolverTest, TestHostnameResolvedIPv6Network) { - turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, PROTO_UDP); + turn_server_.AddInternalSocket(kTurnUdpIPv6IntAddr, webrtc::PROTO_UDP); CreateTurnPort(kLocalIPv6Addr, kTurnUsername, kTurnPassword, kTurnPortValidHostnameProtoAddr); SetDnsResolverExpectations( [](webrtc::MockAsyncDnsResolver* resolver, webrtc::MockAsyncDnsResolverResult* resolver_result) { EXPECT_CALL(*resolver, Start(kTurnValidAddr, /*family=*/AF_INET6, _)) - .WillOnce([](const rtc::SocketAddress& addr, int family, + .WillOnce([](const webrtc::SocketAddress& addr, int family, absl::AnyInvocable callback) { callback(); }); EXPECT_CALL(*resolver, result) .WillRepeatedly(ReturnPointee(resolver_result)); @@ -1945,4 +2233,4 @@ TEST_F(TurnPortWithMockDnsResolverTest, TestHostnameResolvedIPv6Network) { TestTurnAllocateSucceeds(kSimulatedRtt * 2); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/wrapping_active_ice_controller.cc b/p2p/base/wrapping_active_ice_controller.cc index c6659217fc..1b0ad0ca7c 100644 --- a/p2p/base/wrapping_active_ice_controller.cc +++ b/p2p/base/wrapping_active_ice_controller.cc @@ -20,25 +20,26 @@ #include "p2p/base/basic_ice_controller.h" #include "p2p/base/connection.h" #include "p2p/base/ice_agent_interface.h" +#include "p2p/base/ice_controller_factory_interface.h" #include "p2p/base/ice_controller_interface.h" #include "p2p/base/ice_switch_reason.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/transport_description.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/thread.h" -#include "rtc_base/time_utils.h" namespace { using ::webrtc::SafeTask; using ::webrtc::TimeDelta; } // unnamed namespace -namespace cricket { +namespace webrtc { WrappingActiveIceController::WrappingActiveIceController( IceAgentInterface* ice_agent, std::unique_ptr wrapped) - : network_thread_(rtc::Thread::Current()), + : network_thread_(Thread::Current()), wrapped_(std::move(wrapped)), agent_(*ice_agent) { RTC_DCHECK(ice_agent != nullptr); @@ -48,7 +49,7 @@ WrappingActiveIceController::WrappingActiveIceController( IceAgentInterface* ice_agent, IceControllerFactoryInterface* wrapped_factory, const IceControllerFactoryArgs& wrapped_factory_args) - : network_thread_(rtc::Thread::Current()), agent_(*ice_agent) { + : network_thread_(Thread::Current()), agent_(*ice_agent) { RTC_DCHECK(ice_agent != nullptr); if (wrapped_factory) { wrapped_ = wrapped_factory->Create(wrapped_factory_args); @@ -250,4 +251,4 @@ const Connection* WrappingActiveIceController::FindNextPingableConnection() { return wrapped_->FindNextPingableConnection(); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/wrapping_active_ice_controller.h b/p2p/base/wrapping_active_ice_controller.h index 449c0f0ee1..63a5836f27 100644 --- a/p2p/base/wrapping_active_ice_controller.h +++ b/p2p/base/wrapping_active_ice_controller.h @@ -13,7 +13,6 @@ #include -#include "absl/types/optional.h" #include "api/task_queue/pending_task_safety_flag.h" #include "p2p/base/active_ice_controller_interface.h" #include "p2p/base/connection.h" @@ -26,7 +25,7 @@ #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" -namespace cricket { +namespace webrtc { // WrappingActiveIceController provides the functionality of a legacy passive // ICE controller but packaged as an active ICE Controller. @@ -79,8 +78,8 @@ class WrappingActiveIceController : public ActiveIceControllerInterface { void PruneConnections(); - rtc::Thread* const network_thread_; - webrtc::ScopedTaskSafety task_safety_; + Thread* const network_thread_; + ScopedTaskSafety task_safety_; bool started_pinging_ RTC_GUARDED_BY(network_thread_) = false; bool sort_pending_ RTC_GUARDED_BY(network_thread_) = false; @@ -92,6 +91,14 @@ class WrappingActiveIceController : public ActiveIceControllerInterface { IceAgentInterface& agent_ RTC_GUARDED_BY(network_thread_); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::WrappingActiveIceController; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_BASE_WRAPPING_ACTIVE_ICE_CONTROLLER_H_ diff --git a/p2p/base/wrapping_active_ice_controller_unittest.cc b/p2p/base/wrapping_active_ice_controller_unittest.cc index b4811bd297..ee9cc510f7 100644 --- a/p2p/base/wrapping_active_ice_controller_unittest.cc +++ b/p2p/base/wrapping_active_ice_controller_unittest.cc @@ -14,27 +14,35 @@ #include #include +#include "api/units/time_delta.h" #include "p2p/base/connection.h" -#include "p2p/base/mock_ice_agent.h" -#include "p2p/base/mock_ice_controller.h" +#include "p2p/base/ice_controller_factory_interface.h" +#include "p2p/base/ice_controller_interface.h" +#include "p2p/base/ice_switch_reason.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/transport_description.h" +#include "p2p/test/mock_ice_agent.h" +#include "p2p/test/mock_ice_controller.h" +#include "rtc_base/event.h" #include "rtc_base/fake_clock.h" -#include "rtc_base/gunit.h" #include "rtc_base/thread.h" +#include "test/gmock.h" +#include "test/gtest.h" namespace { -using ::cricket::Connection; -using ::cricket::IceConfig; -using ::cricket::IceControllerFactoryArgs; -using ::cricket::IceControllerInterface; -using ::cricket::IceMode; -using ::cricket::IceRecheckEvent; -using ::cricket::IceSwitchReason; -using ::cricket::MockIceAgent; -using ::cricket::MockIceController; -using ::cricket::MockIceControllerFactory; -using ::cricket::NominationMode; -using ::cricket::WrappingActiveIceController; +using ::webrtc::Connection; +using ::webrtc::IceConfig; +using ::webrtc::IceControllerFactoryArgs; +using ::webrtc::IceControllerInterface; +using ::webrtc::IceMode; +using ::webrtc::IceRecheckEvent; +using ::webrtc::IceSwitchReason; +using ::webrtc::MockIceAgent; +using ::webrtc::MockIceController; +using ::webrtc::MockIceControllerFactory; +using ::webrtc::NominationMode; +using ::webrtc::WrappingActiveIceController; using ::testing::_; using ::testing::ElementsAreArray; @@ -44,9 +52,9 @@ using ::testing::Ref; using ::testing::Return; using ::testing::Sequence; -using ::rtc::AutoThread; -using ::rtc::Event; -using ::rtc::ScopedFakeClock; +using ::webrtc::AutoThread; +using ::webrtc::Event; +using ::webrtc::ScopedFakeClock; using ::webrtc::TimeDelta; using NiceMockIceController = NiceMock; @@ -87,10 +95,10 @@ TEST(WrappingActiveIceControllerTest, PassthroughIceControllerInterface) { EXPECT_CALL(*wrapped, GetUseCandidateAttr(kConnection, NominationMode::AGGRESSIVE, - IceMode::ICEMODE_LITE)) + webrtc::ICEMODE_LITE)) .WillOnce(Return(true)); EXPECT_TRUE(controller.GetUseCandidateAttribute( - kConnection, NominationMode::AGGRESSIVE, IceMode::ICEMODE_LITE)); + kConnection, NominationMode::AGGRESSIVE, webrtc::ICEMODE_LITE)); EXPECT_CALL(*wrapped, AddConnection(kConnection)); controller.OnConnectionAdded(kConnection); diff --git a/p2p/client/basic_port_allocator.cc b/p2p/client/basic_port_allocator.cc index b6cbf1fff9..ed9db392c4 100644 --- a/p2p/client/basic_port_allocator.cc +++ b/p2p/client/basic_port_allocator.cc @@ -11,37 +11,54 @@ #include "p2p/client/basic_port_allocator.h" #include +#include +#include #include #include +#include #include #include #include #include #include "absl/algorithm/container.h" +#include "absl/base/nullability.h" #include "absl/memory/memory.h" #include "absl/strings/string_view.h" +#include "api/candidate.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/packet_socket_factory.h" +#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" -#include "api/transport/field_trial_based_config.h" +#include "api/transport/enums.h" #include "api/units/time_delta.h" -#include "p2p/base/basic_packet_socket_factory.h" #include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/port_interface.h" #include "p2p/base/stun_port.h" #include "p2p/base/tcp_port.h" #include "p2p/base/turn_port.h" -#include "p2p/base/udp_port.h" +#include "p2p/client/relay_port_factory_interface.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/field_trial_parser.h" -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" +#include "rtc_base/net_helper.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/network.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/network_constants.h" +#include "rtc_base/socket_address.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/thread.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/metrics.h" -namespace cricket { +namespace webrtc { namespace { -using ::rtc::CreateRandomId; +using ::webrtc::CreateRandomId; +using ::webrtc::IceCandidateType; using ::webrtc::SafeTask; using ::webrtc::TimeDelta; @@ -52,14 +69,14 @@ const int PHASE_TCP = 2; const int kNumPhases = 3; // Gets protocol priority: UDP > TCP > SSLTCP == TLS. -int GetProtocolPriority(cricket::ProtocolType protocol) { +int GetProtocolPriority(ProtocolType protocol) { switch (protocol) { - case cricket::PROTO_UDP: + case webrtc::PROTO_UDP: return 2; - case cricket::PROTO_TCP: + case webrtc::PROTO_TCP: return 1; - case cricket::PROTO_SSLTCP: - case cricket::PROTO_TLS: + case webrtc::PROTO_SSLTCP: + case webrtc::PROTO_TLS: return 0; default: RTC_DCHECK_NOTREACHED(); @@ -80,7 +97,7 @@ int GetAddressFamilyPriority(int ip_family) { } // Returns positive if a is better, negative if b is better, and 0 otherwise. -int ComparePort(const cricket::Port* a, const cricket::Port* b) { +int ComparePort(const Port* a, const Port* b) { int a_protocol = GetProtocolPriority(a->GetProtocol()); int b_protocol = GetProtocolPriority(b->GetProtocol()); int cmp_protocol = a_protocol - b_protocol; @@ -94,16 +111,15 @@ int ComparePort(const cricket::Port* a, const cricket::Port* b) { } struct NetworkFilter { - using Predicate = std::function; + using Predicate = std::function; NetworkFilter(Predicate pred, absl::string_view description) - : predRemain( - [pred](const rtc::Network* network) { return !pred(network); }), + : predRemain([pred](const Network* network) { return !pred(network); }), description(description) {} Predicate predRemain; const std::string description; }; -void FilterNetworks(std::vector* networks, +void FilterNetworks(std::vector* networks, NetworkFilter filter) { auto start_to_remove = std::partition(networks->begin(), networks->end(), filter.predRemain); @@ -126,12 +142,16 @@ bool IsAllowedByCandidateFilter(const Candidate& c, uint32_t filter) { return false; } - if (c.type() == RELAY_PORT_TYPE) { - return ((filter & CF_RELAY) != 0); - } else if (c.type() == STUN_PORT_TYPE) { - return ((filter & CF_REFLEXIVE) != 0); - } else if (c.type() == LOCAL_PORT_TYPE) { - if ((filter & CF_REFLEXIVE) && !c.address().IsPrivateIP()) { + if (c.is_relay()) { + return ((filter & webrtc::CF_RELAY) != 0); + } + + if (c.is_stun()) { + return ((filter & webrtc::CF_REFLEXIVE) != 0); + } + + if (c.is_local()) { + if ((filter & webrtc::CF_REFLEXIVE) && !c.address().IsPrivateIP()) { // We allow host candidates if the filter allows server-reflexive // candidates and the candidate is a public IP. Because we don't generate // server-reflexive candidates if they have the same IP as the host @@ -141,13 +161,14 @@ bool IsAllowedByCandidateFilter(const Candidate& c, uint32_t filter) { return true; } - return ((filter & CF_HOST) != 0); + return ((filter & webrtc::CF_HOST) != 0); } + return false; } -std::string NetworksToString(const std::vector& networks) { - rtc::StringBuilder ost; +std::string NetworksToString(const std::vector& networks) { + StringBuilder ost; for (auto n : networks) { ost << n->name() << " "; } @@ -157,61 +178,23 @@ std::string NetworksToString(const std::vector& networks) { } // namespace const uint32_t DISABLE_ALL_PHASES = - PORTALLOCATOR_DISABLE_UDP | PORTALLOCATOR_DISABLE_TCP | - PORTALLOCATOR_DISABLE_STUN | PORTALLOCATOR_DISABLE_RELAY; + webrtc::PORTALLOCATOR_DISABLE_UDP | webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_DISABLE_STUN | webrtc::PORTALLOCATOR_DISABLE_RELAY; -// BasicPortAllocator BasicPortAllocator::BasicPortAllocator( - rtc::NetworkManager* network_manager, - rtc::PacketSocketFactory* socket_factory, - webrtc::TurnCustomizer* customizer, - RelayPortFactoryInterface* relay_port_factory, - const webrtc::FieldTrialsView* field_trials) - : field_trials_(field_trials), + const Environment& env, + NetworkManager* absl_nonnull network_manager, + PacketSocketFactory* absl_nonnull socket_factory, + TurnCustomizer* absl_nullable turn_customizer, + RelayPortFactoryInterface* absl_nullable relay_port_factory) + : env_(env), network_manager_(network_manager), socket_factory_(socket_factory), - default_relay_port_factory_(relay_port_factory ? nullptr - : new TurnPortFactory()), - relay_port_factory_(relay_port_factory - ? relay_port_factory - : default_relay_port_factory_.get()) { + relay_port_factory_(relay_port_factory) { RTC_CHECK(socket_factory_); - RTC_DCHECK(relay_port_factory_); RTC_DCHECK(network_manager_); SetConfiguration(ServerAddresses(), std::vector(), 0, - webrtc::NO_PRUNE, customizer); -} - -BasicPortAllocator::BasicPortAllocator( - rtc::NetworkManager* network_manager, - rtc::PacketSocketFactory* socket_factory, - const ServerAddresses& stun_servers, - const webrtc::FieldTrialsView* field_trials) - : field_trials_(field_trials), - network_manager_(network_manager), - socket_factory_(socket_factory), - default_relay_port_factory_(new TurnPortFactory()), - relay_port_factory_(default_relay_port_factory_.get()) { - RTC_CHECK(socket_factory_); - RTC_DCHECK(relay_port_factory_); - RTC_DCHECK(network_manager_); - SetConfiguration(stun_servers, std::vector(), 0, - webrtc::NO_PRUNE, nullptr); -} - -void BasicPortAllocator::OnIceRegathering(PortAllocatorSession* session, - IceRegatheringReason reason) { - // If the session has not been taken by an active channel, do not report the - // metric. - for (auto& allocator_session : pooled_sessions()) { - if (allocator_session.get() == session) { - return; - } - } - - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.IceRegatheringReason", - static_cast(reason), - static_cast(IceRegatheringReason::MAX_VALUE)); + webrtc::NO_PRUNE, turn_customizer); } BasicPortAllocator::~BasicPortAllocator() { @@ -233,11 +216,11 @@ int BasicPortAllocator::GetNetworkIgnoreMask() const { CheckRunOnValidThreadIfInitialized(); int mask = network_ignore_mask_; switch (vpn_preference_) { - case webrtc::VpnPreference::kOnlyUseVpn: - mask |= ~static_cast(rtc::ADAPTER_TYPE_VPN); + case VpnPreference::kOnlyUseVpn: + mask |= ~static_cast(webrtc::ADAPTER_TYPE_VPN); break; - case webrtc::VpnPreference::kNeverUseVpn: - mask |= static_cast(rtc::ADAPTER_TYPE_VPN); + case VpnPreference::kNeverUseVpn: + mask |= static_cast(webrtc::ADAPTER_TYPE_VPN); break; default: break; @@ -251,12 +234,9 @@ PortAllocatorSession* BasicPortAllocator::CreateSessionInternal( absl::string_view ice_ufrag, absl::string_view ice_pwd) { CheckRunOnValidThreadAndInitialized(); - PortAllocatorSession* session = new BasicPortAllocatorSession( - this, std::string(content_name), component, std::string(ice_ufrag), - std::string(ice_pwd)); - session->SignalIceRegathering.connect(this, - &BasicPortAllocator::OnIceRegathering); - return session; + return new BasicPortAllocatorSession(this, std::string(content_name), + component, std::string(ice_ufrag), + std::string(ice_pwd)); } void BasicPortAllocator::AddTurnServerForTesting( @@ -281,7 +261,7 @@ BasicPortAllocatorSession::BasicPortAllocatorSession( ice_pwd, allocator->flags()), allocator_(allocator), - network_thread_(rtc::Thread::Current()), + network_thread_(Thread::Current()), socket_factory_(allocator->socket_factory()), allocation_started_(false), network_manager_started_(false), @@ -335,7 +315,7 @@ void BasicPortAllocatorSession::SetCandidateFilter(uint32_t filter) { PortData::State cur_state = port_data.state(); bool found_signalable_candidate = false; bool found_pairable_candidate = false; - cricket::Port* port = port_data.port(); + Port* port = port_data.port(); for (const auto& c : port->Candidates()) { if (!IsStopped() && !IsAllowedByCandidateFilter(c, prev_filter) && IsAllowedByCandidateFilter(c, filter)) { @@ -426,11 +406,10 @@ bool BasicPortAllocatorSession::IsStopped() const { return state_ == SessionState::STOPPED; } -std::vector -BasicPortAllocatorSession::GetFailedNetworks() { +std::vector BasicPortAllocatorSession::GetFailedNetworks() { RTC_DCHECK_RUN_ON(network_thread_); - std::vector networks = GetNetworks(); + std::vector networks = GetNetworks(); // A network interface may have both IPv4 and IPv6 networks. Only if // neither of the networks has any connections, the network interface // is considered failed and need to be regathered on. @@ -444,7 +423,7 @@ BasicPortAllocatorSession::GetFailedNetworks() { networks.erase( std::remove_if(networks.begin(), networks.end(), - [networks_with_connection](const rtc::Network* network) { + [networks_with_connection](const Network* network) { // If a network does not have any connection, it is // considered failed. return networks_with_connection.find(network->name()) != @@ -458,7 +437,7 @@ void BasicPortAllocatorSession::RegatherOnFailedNetworks() { RTC_DCHECK_RUN_ON(network_thread_); // Find the list of networks that have no connection. - std::vector failed_networks = GetFailedNetworks(); + std::vector failed_networks = GetFailedNetworks(); if (failed_networks.empty()) { return; } @@ -481,7 +460,7 @@ void BasicPortAllocatorSession::RegatherOnFailedNetworks() { } void BasicPortAllocatorSession::Regather( - const std::vector& networks, + const std::vector& networks, bool disable_equivalent_phases, IceRegatheringReason reason) { RTC_DCHECK_RUN_ON(network_thread_); @@ -506,7 +485,7 @@ void BasicPortAllocatorSession::GetCandidateStatsFromReadyPorts( for (auto* port : ports) { auto candidates = port->Candidates(); for (const auto& candidate : candidates) { - absl::optional stun_stats; + std::optional stun_stats; port->GetStunStats(&stun_stats); CandidateStats candidate_stats(allocator_->SanitizeCandidate(candidate), std::move(stun_stats)); @@ -516,15 +495,16 @@ void BasicPortAllocatorSession::GetCandidateStatsFromReadyPorts( } void BasicPortAllocatorSession::SetStunKeepaliveIntervalForReadyPorts( - const absl::optional& stun_keepalive_interval) { + const std::optional& stun_keepalive_interval) { RTC_DCHECK_RUN_ON(network_thread_); auto ports = ReadyPorts(); for (PortInterface* port : ports) { // The port type and protocol can be used to identify different subclasses // of Port in the current implementation. Note that a TCPPort has the type - // LOCAL_PORT_TYPE but uses the protocol PROTO_TCP. - if (port->Type() == STUN_PORT_TYPE || - (port->Type() == LOCAL_PORT_TYPE && port->GetProtocol() == PROTO_UDP)) { + // IceCandidateType::kHost but uses the protocol PROTO_TCP. + if (port->Type() == IceCandidateType::kSrflx || + (port->Type() == IceCandidateType::kHost && + port->GetProtocol() == webrtc::PROTO_UDP)) { static_cast(port)->set_stun_keepalive_delay( stun_keepalive_interval); } @@ -606,7 +586,7 @@ void BasicPortAllocatorSession::GetPortConfigurations() { auto config = std::make_unique( allocator_->stun_servers(), username(), password(), - allocator()->field_trials()); + &allocator()->env().field_trials()); for (const RelayServerConfig& turn_server : allocator_->turn_servers()) { config->AddRelay(turn_server); @@ -690,22 +670,22 @@ void BasicPortAllocatorSession::OnAllocate(int allocation_epoch) { allocation_started_ = true; } -std::vector BasicPortAllocatorSession::GetNetworks() { +std::vector BasicPortAllocatorSession::GetNetworks() { RTC_DCHECK_RUN_ON(network_thread_); - std::vector networks; - rtc::NetworkManager* network_manager = allocator_->network_manager(); + std::vector networks; + NetworkManager* network_manager = allocator_->network_manager(); RTC_DCHECK(network_manager != nullptr); // If the network permission state is BLOCKED, we just act as if the flag has // been passed in. if (network_manager->enumeration_permission() == - rtc::NetworkManager::ENUMERATION_BLOCKED) { - set_flags(flags() | PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION); + NetworkManager::ENUMERATION_BLOCKED) { + set_flags(flags() | webrtc::PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION); } // If the adapter enumeration is disabled, we'll just bind to any address // instead of specific NIC. This is to ensure the same routing for http // traffic by OS is also used here to avoid any local or public IP leakage // during stun process. - if (flags() & PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION) { + if (flags() & webrtc::PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION) { networks = network_manager->GetAnyAddressNetworks(); } else { networks = network_manager->GetNetworks(); @@ -714,22 +694,22 @@ std::vector BasicPortAllocatorSession::GetNetworks() { // the OS. Or, if the PORTALLOCATOR_ENABLE_ANY_ADDRESS_PORTS flag is // set, we'll use ANY address candidates either way. if (networks.empty() || - (flags() & PORTALLOCATOR_ENABLE_ANY_ADDRESS_PORTS)) { - std::vector any_address_networks = + (flags() & webrtc::PORTALLOCATOR_ENABLE_ANY_ADDRESS_PORTS)) { + std::vector any_address_networks = network_manager->GetAnyAddressNetworks(); networks.insert(networks.end(), any_address_networks.begin(), any_address_networks.end()); } RTC_LOG(LS_INFO) << "Count of networks: " << networks.size(); - for (const rtc::Network* network : networks) { + for (const Network* network : networks) { RTC_LOG(LS_INFO) << network->ToString(); } } // Filter out link-local networks if needed. - if (flags() & PORTALLOCATOR_DISABLE_LINK_LOCAL_NETWORKS) { + if (flags() & webrtc::PORTALLOCATOR_DISABLE_LINK_LOCAL_NETWORKS) { NetworkFilter link_local_filter( - [](const rtc::Network* network) { - return IPIsLinkLocal(network->prefix()); + [](const webrtc::Network* network) { + return webrtc::IPIsLinkLocal(network->prefix()); }, "link-local"); FilterNetworks(&networks, link_local_filter); @@ -737,28 +717,28 @@ std::vector BasicPortAllocatorSession::GetNetworks() { // Do some more filtering, depending on the network ignore mask and "disable // costly networks" flag. NetworkFilter ignored_filter( - [this](const rtc::Network* network) { + [this](const Network* network) { return allocator_->GetNetworkIgnoreMask() & network->type(); }, "ignored"); FilterNetworks(&networks, ignored_filter); - if (flags() & PORTALLOCATOR_DISABLE_COSTLY_NETWORKS) { - uint16_t lowest_cost = rtc::kNetworkCostMax; - for (const rtc::Network* network : networks) { + if (flags() & webrtc::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS) { + uint16_t lowest_cost = webrtc::kNetworkCostMax; + for (const Network* network : networks) { // Don't determine the lowest cost from a link-local network. // On iOS, a device connected to the computer will get a link-local // network for communicating with the computer, however this network can't // be used to connect to a peer outside the network. - if (rtc::IPIsLinkLocal(network->GetBestIP())) { + if (webrtc::IPIsLinkLocal(network->GetBestIP())) { continue; } lowest_cost = std::min( - lowest_cost, network->GetCost(*allocator()->field_trials())); + lowest_cost, network->GetCost(allocator()->env().field_trials())); } NetworkFilter costly_filter( - [lowest_cost, this](const rtc::Network* network) { - return network->GetCost(*allocator()->field_trials()) > - lowest_cost + rtc::kNetworkCostLow; + [lowest_cost, this](const Network* network) { + return network->GetCost(allocator()->env().field_trials()) > + lowest_cost + webrtc::kNetworkCostLow; }, "costly"); FilterNetworks(&networks, costly_filter); @@ -767,7 +747,7 @@ std::vector BasicPortAllocatorSession::GetNetworks() { // Lastly, if we have a limit for the number of IPv6 network interfaces (by // default, it's 5), pick IPv6 networks from different interfaces in a // priority order and stick to the limit. - std::vector ipv6_networks; + std::vector ipv6_networks; for (auto it = networks.begin(); it != networks.end();) { if ((*it)->prefix().family() == AF_INET6) { ipv6_networks.push_back(*it); @@ -782,21 +762,21 @@ std::vector BasicPortAllocatorSession::GetNetworks() { return networks; } -std::vector BasicPortAllocatorSession::SelectIPv6Networks( - std::vector& all_ipv6_networks, +std::vector BasicPortAllocatorSession::SelectIPv6Networks( + std::vector& all_ipv6_networks, int max_ipv6_networks) { if (static_cast(all_ipv6_networks.size()) <= max_ipv6_networks) { return all_ipv6_networks; } // Adapter types are placed in priority order. Cellular type is an alias of // cellular, 2G..5G types. - std::vector adapter_types = { - rtc::ADAPTER_TYPE_ETHERNET, rtc::ADAPTER_TYPE_LOOPBACK, - rtc::ADAPTER_TYPE_WIFI, rtc::ADAPTER_TYPE_CELLULAR, - rtc::ADAPTER_TYPE_VPN, rtc::ADAPTER_TYPE_UNKNOWN, - rtc::ADAPTER_TYPE_ANY}; + std::vector adapter_types = { + webrtc::ADAPTER_TYPE_ETHERNET, webrtc::ADAPTER_TYPE_LOOPBACK, + webrtc::ADAPTER_TYPE_WIFI, webrtc::ADAPTER_TYPE_CELLULAR, + webrtc::ADAPTER_TYPE_VPN, webrtc::ADAPTER_TYPE_UNKNOWN, + webrtc::ADAPTER_TYPE_ANY}; int adapter_types_cnt = adapter_types.size(); - std::vector selected_networks; + std::vector selected_networks; int adapter_types_pos = 0; while (static_cast(selected_networks.size()) < max_ipv6_networks && @@ -806,7 +786,7 @@ std::vector BasicPortAllocatorSession::SelectIPv6Networks( if (adapter_types[adapter_types_pos % adapter_types_cnt] == all_ipv6_networks[network_pos]->type() || (adapter_types[adapter_types_pos % adapter_types_cnt] == - rtc::ADAPTER_TYPE_CELLULAR && + webrtc::ADAPTER_TYPE_CELLULAR && all_ipv6_networks[network_pos]->IsCellular())) { selected_networks.push_back(all_ipv6_networks[network_pos]); all_ipv6_networks.erase(all_ipv6_networks.begin() + network_pos); @@ -825,7 +805,7 @@ std::vector BasicPortAllocatorSession::SelectIPv6Networks( void BasicPortAllocatorSession::DoAllocate(bool disable_equivalent) { RTC_DCHECK_RUN_ON(network_thread_); bool done_signal_needed = false; - std::vector networks = GetNetworks(); + std::vector networks = GetNetworks(); if (networks.empty()) { RTC_LOG(LS_WARNING) << "Machine has no networks; no ports will be allocated"; @@ -845,18 +825,18 @@ void BasicPortAllocatorSession::DoAllocate(bool disable_equivalent) { if (!config || config->relays.empty()) { // No relay ports specified in this config. - sequence_flags |= PORTALLOCATOR_DISABLE_RELAY; + sequence_flags |= webrtc::PORTALLOCATOR_DISABLE_RELAY; } - if (!(sequence_flags & PORTALLOCATOR_ENABLE_IPV6) && + if (!(sequence_flags & webrtc::PORTALLOCATOR_ENABLE_IPV6) && networks[i]->GetBestIP().family() == AF_INET6) { // Skip IPv6 networks unless the flag's been set. continue; } - if (!(sequence_flags & PORTALLOCATOR_ENABLE_IPV6_ON_WIFI) && + if (!(sequence_flags & webrtc::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI) && networks[i]->GetBestIP().family() == AF_INET6 && - networks[i]->type() == rtc::ADAPTER_TYPE_WIFI) { + networks[i]->type() == webrtc::ADAPTER_TYPE_WIFI) { // Skip IPv6 Wi-Fi networks unless the flag's been set. continue; } @@ -893,8 +873,8 @@ void BasicPortAllocatorSession::DoAllocate(bool disable_equivalent) { void BasicPortAllocatorSession::OnNetworksChanged() { RTC_DCHECK_RUN_ON(network_thread_); - std::vector networks = GetNetworks(); - std::vector failed_networks; + std::vector networks = GetNetworks(); + std::vector failed_networks; for (AllocationSequence* sequence : sequences_) { // Mark the sequence as "network failed" if its network is not in // `networks`. @@ -927,7 +907,7 @@ void BasicPortAllocatorSession::OnNetworksChanged() { } void BasicPortAllocatorSession::DisableEquivalentPhases( - const rtc::Network* network, + const Network* network, PortConfiguration* config, uint32_t* flags) { RTC_DCHECK_RUN_ON(network_thread_); @@ -948,10 +928,8 @@ void BasicPortAllocatorSession::AddAllocatedPort(Port* port, port->set_content_name(content_name()); port->set_component(component()); port->set_generation(generation()); - if (allocator_->proxy().type != rtc::PROXY_NONE) - port->set_proxy(allocator_->user_agent(), allocator_->proxy()); port->set_send_retransmit_count_attribute( - (flags() & PORTALLOCATOR_ENABLE_STUN_RETRANSMIT_ATTRIBUTE) != 0); + (flags() & webrtc::PORTALLOCATOR_ENABLE_STUN_RETRANSMIT_ATTRIBUTE) != 0); PortData data(port, seq); ports_.push_back(data); @@ -1006,7 +984,7 @@ void BasicPortAllocatorSession::OnCandidateReady(Port* port, if (CandidatePairable(c, port) && !data->has_pairable_candidate()) { data->set_has_pairable_candidate(true); - if (port->Type() == RELAY_PORT_TYPE) { + if (port->Type() == IceCandidateType::kRelay) { if (turn_port_prune_policy_ == webrtc::KEEP_FIRST_READY) { pruned = PruneNewlyPairableTurnPort(data); } else if (turn_port_prune_policy_ == webrtc::PRUNE_BASED_ON_PRIORITY) { @@ -1054,7 +1032,7 @@ Port* BasicPortAllocatorSession::GetBestTurnPortForNetwork( Port* best_turn_port = nullptr; for (const PortData& data : ports_) { if (data.port()->Network()->name() == network_name && - data.port()->Type() == RELAY_PORT_TYPE && data.ready() && + data.port()->Type() == IceCandidateType::kRelay && data.ready() && (!best_turn_port || ComparePort(data.port(), best_turn_port) > 0)) { best_turn_port = data.port(); } @@ -1065,7 +1043,8 @@ Port* BasicPortAllocatorSession::GetBestTurnPortForNetwork( bool BasicPortAllocatorSession::PruneNewlyPairableTurnPort( PortData* newly_pairable_port_data) { RTC_DCHECK_RUN_ON(network_thread_); - RTC_DCHECK(newly_pairable_port_data->port()->Type() == RELAY_PORT_TYPE); + RTC_DCHECK(newly_pairable_port_data->port()->Type() == + IceCandidateType::kRelay); // If an existing turn port is ready on the same network, prune the newly // pairable port. const std::string& network_name = @@ -1073,7 +1052,7 @@ bool BasicPortAllocatorSession::PruneNewlyPairableTurnPort( for (PortData& data : ports_) { if (data.port()->Network()->name() == network_name && - data.port()->Type() == RELAY_PORT_TYPE && data.ready() && + data.port()->Type() == IceCandidateType::kRelay && data.ready() && &data != newly_pairable_port_data) { RTC_LOG(LS_INFO) << "Port pruned: " << newly_pairable_port_data->port()->ToString(); @@ -1098,7 +1077,7 @@ bool BasicPortAllocatorSession::PruneTurnPorts(Port* newly_pairable_turn_port) { std::vector ports_to_prune; for (PortData& data : ports_) { if (data.port()->Network()->name() == network_name && - data.port()->Type() == RELAY_PORT_TYPE && !data.pruned() && + data.port()->Type() == IceCandidateType::kRelay && !data.pruned() && ComparePort(data.port(), best_turn_port) < 0) { pruned = true; if (data.port() != newly_pairable_turn_port) { @@ -1181,8 +1160,8 @@ bool BasicPortAllocatorSession::CandidatePairable(const Candidate& c, // both device enumeration and host candidates being disabled. bool network_enumeration_disabled = c.address().IsAnyIP(); bool can_ping_from_candidate = - (port->SharedSocket() || c.protocol() == TCP_PROTOCOL_NAME); - bool host_candidates_disabled = !(candidate_filter_ & CF_HOST); + (port->SharedSocket() || c.protocol() == webrtc::TCP_PROTOCOL_NAME); + bool host_candidates_disabled = !(candidate_filter_ & webrtc::CF_HOST); return candidate_signalable || (network_enumeration_disabled && can_ping_from_candidate && @@ -1240,7 +1219,7 @@ BasicPortAllocatorSession::PortData* BasicPortAllocatorSession::FindPort( std::vector BasicPortAllocatorSession::GetUnprunedPorts( - const std::vector& networks) { + const std::vector& networks) { RTC_DCHECK_RUN_ON(network_thread_); std::vector unpruned_ports; for (PortData& port : ports_) { @@ -1278,8 +1257,7 @@ void BasicPortAllocatorSession::PrunePortsAndRemoveCandidates( } } -void BasicPortAllocator::SetVpnList( - const std::vector& vpn_list) { +void BasicPortAllocator::SetVpnList(const std::vector& vpn_list) { network_manager_->set_vpn_list(vpn_list); } @@ -1287,7 +1265,7 @@ void BasicPortAllocator::SetVpnList( AllocationSequence::AllocationSequence( BasicPortAllocatorSession* session, - const rtc::Network* network, + const Network* network, PortConfiguration* config, uint32_t flags, std::function port_allocation_complete_callback) @@ -1303,13 +1281,15 @@ AllocationSequence::AllocationSequence( std::move(port_allocation_complete_callback)) {} void AllocationSequence::Init() { - if (IsFlagSet(PORTALLOCATOR_ENABLE_SHARED_SOCKET)) { + if (IsFlagSet(webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET)) { udp_socket_.reset(session_->socket_factory()->CreateUdpSocket( - rtc::SocketAddress(network_->GetBestIP(), 0), + SocketAddress(network_->GetBestIP(), 0), session_->allocator()->min_port(), session_->allocator()->max_port())); if (udp_socket_) { - udp_socket_->SignalReadPacket.connect(this, - &AllocationSequence::OnReadPacket); + udp_socket_->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + OnReadPacket(socket, packet); + }); } // Continuing if `udp_socket_` is NULL, as local TCP and RelayPort using TCP // are next available options to setup a communication channel. @@ -1329,7 +1309,7 @@ void AllocationSequence::OnNetworkFailed() { Stop(); } -void AllocationSequence::DisableEquivalentPhases(const rtc::Network* network, +void AllocationSequence::DisableEquivalentPhases(const Network* network, PortConfiguration* config, uint32_t* flags) { if (network_failed_) { @@ -1363,20 +1343,22 @@ void AllocationSequence::DisableEquivalentPhases(const rtc::Network* network, if (absl::c_any_of(session_->ports_, [this](const BasicPortAllocatorSession::PortData& p) { return !p.pruned() && p.port()->Network() == network_ && - p.port()->GetProtocol() == PROTO_UDP && - p.port()->Type() == LOCAL_PORT_TYPE && !p.error(); + p.port()->GetProtocol() == webrtc::PROTO_UDP && + p.port()->Type() == IceCandidateType::kHost && + !p.error(); })) { - *flags |= PORTALLOCATOR_DISABLE_UDP; + *flags |= webrtc::PORTALLOCATOR_DISABLE_UDP; } // Similarly we need to check both the protocol used by an existing Port and // its type. if (absl::c_any_of(session_->ports_, [this](const BasicPortAllocatorSession::PortData& p) { return !p.pruned() && p.port()->Network() == network_ && - p.port()->GetProtocol() == PROTO_TCP && - p.port()->Type() == LOCAL_PORT_TYPE && !p.error(); + p.port()->GetProtocol() == webrtc::PROTO_TCP && + p.port()->Type() == IceCandidateType::kHost && + !p.error(); })) { - *flags |= PORTALLOCATOR_DISABLE_TCP; + *flags |= webrtc::PORTALLOCATOR_DISABLE_TCP; } if (config_ && config) { @@ -1386,9 +1368,9 @@ void AllocationSequence::DisableEquivalentPhases(const rtc::Network* network, // 2. We will regather host candidates, hence possibly inducing new NAT // bindings. if (config_->StunServers() == config->StunServers() && - (*flags & PORTALLOCATOR_DISABLE_UDP)) { + (*flags & webrtc::PORTALLOCATOR_DISABLE_UDP)) { // Already got this STUN servers covered. - *flags |= PORTALLOCATOR_DISABLE_STUN; + *flags |= webrtc::PORTALLOCATOR_DISABLE_STUN; } if (!config_->relays.empty()) { // Already got relays covered. @@ -1396,7 +1378,7 @@ void AllocationSequence::DisableEquivalentPhases(const rtc::Network* network, // were to be given one, but that never happens in our codebase. Should // probably get rid of the list in PortConfiguration and just keep a // single relay server in each one. - *flags |= PORTALLOCATOR_DISABLE_RELAY; + *flags |= webrtc::PORTALLOCATOR_DISABLE_RELAY; } } } @@ -1421,7 +1403,7 @@ void AllocationSequence::Stop() { } void AllocationSequence::Process(int epoch) { - RTC_DCHECK(rtc::Thread::Current() == session_->network_thread()); + RTC_DCHECK(Thread::Current() == session_->network_thread()); const char* const PHASE_NAMES[kNumPhases] = {"Udp", "Relay", "Tcp"}; if (epoch != epoch_) @@ -1465,7 +1447,7 @@ void AllocationSequence::Process(int epoch) { } void AllocationSequence::CreateUDPPorts() { - if (IsFlagSet(PORTALLOCATOR_DISABLE_UDP)) { + if (IsFlagSet(webrtc::PORTALLOCATOR_DISABLE_UDP)) { RTC_LOG(LS_VERBOSE) << "AllocationSequence: UDP ports disabled, skipping."; return; } @@ -1474,35 +1456,41 @@ void AllocationSequence::CreateUDPPorts() { // is enabled completely. std::unique_ptr port; bool emit_local_candidate_for_anyaddress = - !IsFlagSet(PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE); - if (IsFlagSet(PORTALLOCATOR_ENABLE_SHARED_SOCKET) && udp_socket_) { + !IsFlagSet(webrtc::PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE); + if (IsFlagSet(webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET) && udp_socket_) { port = UDPPort::Create( - session_->network_thread(), session_->socket_factory(), network_, - udp_socket_.get(), session_->username(), session_->password(), - emit_local_candidate_for_anyaddress, - session_->allocator()->stun_candidate_keepalive_interval(), - session_->allocator()->field_trials()); + {.env = session_->allocator()->env(), + .network_thread = session_->network_thread(), + .socket_factory = session_->socket_factory(), + .network = network_, + .ice_username_fragment = session_->username(), + .ice_password = session_->password()}, + udp_socket_.get(), emit_local_candidate_for_anyaddress, + session_->allocator()->stun_candidate_keepalive_interval()); } else { port = UDPPort::Create( - session_->network_thread(), session_->socket_factory(), network_, + {.env = session_->allocator()->env(), + .network_thread = session_->network_thread(), + .socket_factory = session_->socket_factory(), + .network = network_, + .ice_username_fragment = session_->username(), + .ice_password = session_->password()}, session_->allocator()->min_port(), session_->allocator()->max_port(), - session_->username(), session_->password(), emit_local_candidate_for_anyaddress, - session_->allocator()->stun_candidate_keepalive_interval(), - session_->allocator()->field_trials()); + session_->allocator()->stun_candidate_keepalive_interval()); } if (port) { - port->SetIceTiebreaker(session_->ice_tiebreaker()); + port->SetIceTiebreaker(session_->allocator()->ice_tiebreaker()); // If shared socket is enabled, STUN candidate will be allocated by the // UDPPort. - if (IsFlagSet(PORTALLOCATOR_ENABLE_SHARED_SOCKET)) { + if (IsFlagSet(webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET)) { udp_port_ = port.get(); port->SubscribePortDestroyed( [this](PortInterface* port) { OnPortDestroyed(port); }); // If STUN is not disabled, setting stun server address to port. - if (!IsFlagSet(PORTALLOCATOR_DISABLE_STUN)) { + if (!IsFlagSet(webrtc::PORTALLOCATOR_DISABLE_STUN)) { if (config_ && !config_->StunServers().empty()) { RTC_LOG(LS_INFO) << "AllocationSequence: UDPPort will be handling the " @@ -1517,19 +1505,22 @@ void AllocationSequence::CreateUDPPorts() { } void AllocationSequence::CreateTCPPorts() { - if (IsFlagSet(PORTALLOCATOR_DISABLE_TCP)) { + if (IsFlagSet(webrtc::PORTALLOCATOR_DISABLE_TCP)) { RTC_LOG(LS_VERBOSE) << "AllocationSequence: TCP ports disabled, skipping."; return; } std::unique_ptr port = TCPPort::Create( - session_->network_thread(), session_->socket_factory(), network_, + {.env = session_->allocator()->env(), + .network_thread = session_->network_thread(), + .socket_factory = session_->socket_factory(), + .network = network_, + .ice_username_fragment = session_->username(), + .ice_password = session_->password()}, session_->allocator()->min_port(), session_->allocator()->max_port(), - session_->username(), session_->password(), - session_->allocator()->allow_tcp_listen(), - session_->allocator()->field_trials()); + session_->allocator()->allow_tcp_listen()); if (port) { - port->SetIceTiebreaker(session_->ice_tiebreaker()); + port->SetIceTiebreaker(session_->allocator()->ice_tiebreaker()); session_->AddAllocatedPort(port.release(), this); // Since TCPPort is not created using shared socket, `port` will not be // added to the dequeue. @@ -1537,12 +1528,12 @@ void AllocationSequence::CreateTCPPorts() { } void AllocationSequence::CreateStunPorts() { - if (IsFlagSet(PORTALLOCATOR_DISABLE_STUN)) { + if (IsFlagSet(webrtc::PORTALLOCATOR_DISABLE_STUN)) { RTC_LOG(LS_VERBOSE) << "AllocationSequence: STUN ports disabled, skipping."; return; } - if (IsFlagSet(PORTALLOCATOR_ENABLE_SHARED_SOCKET)) { + if (IsFlagSet(webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET)) { return; } @@ -1553,13 +1544,17 @@ void AllocationSequence::CreateStunPorts() { } std::unique_ptr port = StunPort::Create( - session_->network_thread(), session_->socket_factory(), network_, + {.env = session_->allocator()->env(), + .network_thread = session_->network_thread(), + .socket_factory = session_->socket_factory(), + .network = network_, + .ice_username_fragment = session_->username(), + .ice_password = session_->password()}, session_->allocator()->min_port(), session_->allocator()->max_port(), - session_->username(), session_->password(), config_->StunServers(), - session_->allocator()->stun_candidate_keepalive_interval(), - session_->allocator()->field_trials()); + config_->StunServers(), + session_->allocator()->stun_candidate_keepalive_interval()); if (port) { - port->SetIceTiebreaker(session_->ice_tiebreaker()); + port->SetIceTiebreaker(session_->allocator()->ice_tiebreaker()); session_->AddAllocatedPort(port.release(), this); // Since StunPort is not created using shared socket, `port` will not be // added to the dequeue. @@ -1567,7 +1562,7 @@ void AllocationSequence::CreateStunPorts() { } void AllocationSequence::CreateRelayPorts() { - if (IsFlagSet(PORTALLOCATOR_DISABLE_RELAY)) { + if (IsFlagSet(webrtc::PORTALLOCATOR_DISABLE_RELAY)) { RTC_LOG(LS_VERBOSE) << "AllocationSequence: Relay ports disabled, skipping."; return; @@ -1598,8 +1593,8 @@ void AllocationSequence::CreateTurnPort(const RelayServerConfig& config, for (relay_port = config.ports.begin(); relay_port != config.ports.end(); ++relay_port) { // Skip UDP connections to relay servers if it's disallowed. - if (IsFlagSet(PORTALLOCATOR_DISABLE_UDP_RELAY) && - relay_port->proto == PROTO_UDP) { + if (IsFlagSet(webrtc::PORTALLOCATOR_DISABLE_UDP_RELAY) && + relay_port->proto == webrtc::PROTO_UDP) { continue; } @@ -1616,7 +1611,7 @@ void AllocationSequence::CreateTurnPort(const RelayServerConfig& config, continue; } - CreateRelayPortArgs args; + CreateRelayPortArgs args = {.env = session_->allocator()->env()}; args.network_thread = session_->network_thread(); args.socket_factory = session_->socket_factory(); args.network = network_; @@ -1625,16 +1620,15 @@ void AllocationSequence::CreateTurnPort(const RelayServerConfig& config, args.server_address = &(*relay_port); args.config = &config; args.turn_customizer = session_->allocator()->turn_customizer(); - args.field_trials = session_->allocator()->field_trials(); args.relative_priority = relative_priority; - std::unique_ptr port; + std::unique_ptr port; // Shared socket mode must be enabled only for UDP based ports. Hence // don't pass shared socket for ports which will create TCP sockets. // TODO(mallinath) - Enable shared socket mode for TURN ports. Disabled // due to webrtc bug https://code.google.com/p/webrtc/issues/detail?id=3537 - if (IsFlagSet(PORTALLOCATOR_ENABLE_SHARED_SOCKET) && - relay_port->proto == PROTO_UDP && udp_socket_) { + if (IsFlagSet(webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET) && + relay_port->proto == webrtc::PROTO_UDP && udp_socket_) { port = session_->allocator()->relay_port_factory()->Create( args, udp_socket_.get()); @@ -1662,16 +1656,13 @@ void AllocationSequence::CreateTurnPort(const RelayServerConfig& config, } } RTC_DCHECK(port != NULL); - port->SetIceTiebreaker(session_->ice_tiebreaker()); + port->SetIceTiebreaker(session_->allocator()->ice_tiebreaker()); session_->AddAllocatedPort(port.release(), this); } } -void AllocationSequence::OnReadPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - const int64_t& packet_time_us) { +void AllocationSequence::OnReadPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet) { RTC_DCHECK(socket == udp_socket_.get()); bool turn_port_found = false; @@ -1683,9 +1674,8 @@ void AllocationSequence::OnReadPacket(rtc::AsyncPacketSocket* socket, // the message type. The TurnPort will just ignore the message since it will // not find any request by transaction ID. for (auto* port : relay_ports_) { - if (port->CanHandleIncomingPacketsFrom(remote_addr)) { - if (port->HandleIncomingPacket(socket, data, size, remote_addr, - packet_time_us)) { + if (port->CanHandleIncomingPacketsFrom(packet.source_address())) { + if (port->HandleIncomingPacket(socket, packet)) { return; } turn_port_found = true; @@ -1698,10 +1688,9 @@ void AllocationSequence::OnReadPacket(rtc::AsyncPacketSocket* socket, // Pass the packet to the UdpPort if there is no matching TurnPort, or if // the TURN server is also a STUN server. if (!turn_port_found || - stun_servers.find(remote_addr) != stun_servers.end()) { + stun_servers.find(packet.source_address()) != stun_servers.end()) { RTC_DCHECK(udp_port_->SharedSocket()); - udp_port_->HandleIncomingPacket(socket, data, size, remote_addr, - packet_time_us); + udp_port_->HandleIncomingPacket(socket, packet); } } } @@ -1721,11 +1710,10 @@ void AllocationSequence::OnPortDestroyed(PortInterface* port) { } } -PortConfiguration::PortConfiguration( - const ServerAddresses& stun_servers, - absl::string_view username, - absl::string_view password, - const webrtc::FieldTrialsView* field_trials) +PortConfiguration::PortConfiguration(const ServerAddresses& stun_servers, + absl::string_view username, + absl::string_view password, + const FieldTrialsView* field_trials) : stun_servers(stun_servers), username(username), password(password) { if (!stun_servers.empty()) stun_address = *(stun_servers.begin()); @@ -1749,8 +1737,8 @@ ServerAddresses PortConfiguration::StunServers() { // Every UDP TURN server should also be used as a STUN server if // use_turn_server_as_stun_server is not disabled or the stun servers are // empty. - ServerAddresses turn_servers = GetRelayServerAddresses(PROTO_UDP); - for (const rtc::SocketAddress& turn_server : turn_servers) { + ServerAddresses turn_servers = GetRelayServerAddresses(webrtc::PROTO_UDP); + for (const SocketAddress& turn_server : turn_servers) { if (stun_servers.find(turn_server) == stun_servers.end()) { stun_servers.insert(turn_server); } @@ -1792,4 +1780,4 @@ ServerAddresses PortConfiguration::GetRelayServerAddresses( return servers; } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/client/basic_port_allocator.h b/p2p/client/basic_port_allocator.h index 95bbdb183e..3af4d1b194 100644 --- a/p2p/client/basic_port_allocator.h +++ b/p2p/client/basic_port_allocator.h @@ -11,54 +11,69 @@ #ifndef P2P_CLIENT_BASIC_PORT_ALLOCATOR_H_ #define P2P_CLIENT_BASIC_PORT_ALLOCATOR_H_ +#include +#include #include +#include #include #include +#include "absl/base/attributes.h" +#include "absl/base/nullability.h" #include "absl/strings/string_view.h" +#include "api/candidate.h" +#include "api/environment/environment.h" #include "api/field_trials_view.h" +#include "api/packet_socket_factory.h" #include "api/task_queue/pending_task_safety_flag.h" +#include "api/transport/enums.h" #include "api/turn_customizer.h" +#include "p2p/base/port.h" #include "p2p/base/port_allocator.h" +#include "p2p/base/port_interface.h" +#include "p2p/base/stun_port.h" +#include "p2p/base/turn_port.h" #include "p2p/client/relay_port_factory_interface.h" #include "p2p/client/turn_port_factory.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/checks.h" +#include "rtc_base/ip_address.h" #include "rtc_base/memory/always_valid_pointer.h" #include "rtc_base/network.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/socket_address.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" -namespace cricket { +namespace webrtc { class RTC_EXPORT BasicPortAllocator : public PortAllocator { public: - // The NetworkManager is a mandatory argument. The other arguments are - // optional. All pointers are owned by caller and must have a life time - // that exceeds that of BasicPortAllocator. - BasicPortAllocator(rtc::NetworkManager* network_manager, - rtc::PacketSocketFactory* socket_factory, - webrtc::TurnCustomizer* customizer = nullptr, - RelayPortFactoryInterface* relay_port_factory = nullptr, - const webrtc::FieldTrialsView* field_trials = nullptr); - BasicPortAllocator(rtc::NetworkManager* network_manager, - rtc::PacketSocketFactory* socket_factory, - const ServerAddresses& stun_servers, - const webrtc::FieldTrialsView* field_trials = nullptr); + BasicPortAllocator( + const Environment& env, + NetworkManager* absl_nonnull network_manager, + PacketSocketFactory* absl_nonnull socket_factory, + TurnCustomizer* absl_nullable turn_customizer = nullptr, + RelayPortFactoryInterface* absl_nullable relay_port_factory = nullptr); + + BasicPortAllocator(const BasicPortAllocator&) = delete; + BasicPortAllocator& operator=(const BasicPortAllocator&) = delete; + ~BasicPortAllocator() override; // Set to kDefaultNetworkIgnoreMask by default. void SetNetworkIgnoreMask(int network_ignore_mask) override; int GetNetworkIgnoreMask() const; - rtc::NetworkManager* network_manager() const { + NetworkManager* network_manager() const { CheckRunOnValidThreadIfInitialized(); return network_manager_; } // If socket_factory() is set to NULL each PortAllocatorSession // creates its own socket factory. - rtc::PacketSocketFactory* socket_factory() { + PacketSocketFactory* socket_factory() { CheckRunOnValidThreadIfInitialized(); return socket_factory_; } @@ -74,33 +89,24 @@ class RTC_EXPORT BasicPortAllocator : public PortAllocator { RelayPortFactoryInterface* relay_port_factory() { CheckRunOnValidThreadIfInitialized(); - return relay_port_factory_; + return relay_port_factory_.get(); } - void SetVpnList(const std::vector& vpn_list) override; + void SetVpnList(const std::vector& vpn_list) override; - const webrtc::FieldTrialsView* field_trials() const { - return field_trials_.get(); - } + const Environment& env() const { return env_; } private: - void OnIceRegathering(PortAllocatorSession* session, - IceRegatheringReason reason); - bool MdnsObfuscationEnabled() const override; - webrtc::AlwaysValidPointer - field_trials_; - rtc::NetworkManager* network_manager_; + const Environment env_; + NetworkManager* network_manager_; // Always externally-owned pointer to a socket factory. - rtc::PacketSocketFactory* const socket_factory_; - int network_ignore_mask_ = rtc::kDefaultNetworkIgnoreMask; + PacketSocketFactory* const socket_factory_; + int network_ignore_mask_ = webrtc::kDefaultNetworkIgnoreMask; - // This instance is created if caller does pass a factory. - const std::unique_ptr default_relay_port_factory_; - // This is the factory being used. - RelayPortFactoryInterface* const relay_port_factory_; + AlwaysValidPointer + relay_port_factory_; }; struct PortConfiguration; @@ -126,8 +132,8 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { ~BasicPortAllocatorSession() override; virtual BasicPortAllocator* allocator(); - rtc::Thread* network_thread() { return network_thread_; } - rtc::PacketSocketFactory* socket_factory() { return socket_factory_; } + Thread* network_thread() { return network_thread_; } + PacketSocketFactory* socket_factory() { return socket_factory_; } // If the new filter allows new types of candidates compared to the previous // filter, gathered candidates that were discarded because of not matching the @@ -137,7 +143,7 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { // the type of candidates to gather and the candidate filter only controls the // signaling of candidates. As a result, with the candidate filter changed // alone, all newly allowed candidates for signaling should already be - // gathered by the respective cricket::Port. + // gathered by the respective webrtc::Port. void SetCandidateFilter(uint32_t filter) override; void StartGettingPorts() override; void StopGettingPorts() override; @@ -145,7 +151,7 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { bool IsGettingPorts() override; bool IsCleared() const override; bool IsStopped() const override; - // These will all be cricket::Ports. + // These will all be webrtc::Ports. std::vector ReadyPorts() const override; std::vector ReadyCandidates() const override; bool CandidatesAllocationDone() const override; @@ -153,10 +159,10 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { void GetCandidateStatsFromReadyPorts( CandidateStatsList* candidate_stats_list) const override; void SetStunKeepaliveIntervalForReadyPorts( - const absl::optional& stun_keepalive_interval) override; + const std::optional& stun_keepalive_interval) override; void PruneAllPorts() override; - static std::vector SelectIPv6Networks( - std::vector& all_ipv6_networks, + static std::vector SelectIPv6Networks( + std::vector& all_ipv6_networks, int max_ipv6_networks); protected: @@ -233,7 +239,7 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { void DoAllocate(bool disable_equivalent_phases); void OnNetworksChanged(); void OnAllocationSequenceObjectsCreated(); - void DisableEquivalentPhases(const rtc::Network* network, + void DisableEquivalentPhases(const Network* network, PortConfiguration* config, uint32_t* flags); void AddAllocatedPort(Port* port, AllocationSequence* seq); @@ -246,9 +252,9 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { void MaybeSignalCandidatesAllocationDone(); void OnPortAllocationComplete(); PortData* FindPort(Port* port); - std::vector GetNetworks(); - std::vector GetFailedNetworks(); - void Regather(const std::vector& networks, + std::vector GetNetworks(); + std::vector GetFailedNetworks(); + void Regather(const std::vector& networks, bool disable_equivalent_phases, IceRegatheringReason reason); @@ -256,7 +262,7 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { bool CandidatePairable(const Candidate& c, const Port* port) const; std::vector GetUnprunedPorts( - const std::vector& networks); + const std::vector& networks); // Prunes ports and signal the remote side to remove the candidates that // were previously signaled from these ports. void PrunePortsAndRemoveCandidates( @@ -271,8 +277,8 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { bool PruneNewlyPairableTurnPort(PortData* newly_pairable_turn_port); BasicPortAllocator* allocator_; - rtc::Thread* network_thread_; - rtc::PacketSocketFactory* socket_factory_; + Thread* network_thread_; + PacketSocketFactory* socket_factory_; bool allocation_started_; bool network_manager_started_; bool allocation_sequences_created_; @@ -280,12 +286,12 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { std::vector sequences_; std::vector ports_; std::vector candidate_error_events_; - uint32_t candidate_filter_ = CF_ALL; + uint32_t candidate_filter_ = webrtc::CF_ALL; // Policy on how to prune turn ports, taken from the port allocator. - webrtc::PortPrunePolicy turn_port_prune_policy_; + PortPrunePolicy turn_port_prune_policy_; SessionState state_ = SessionState::CLEARED; int allocation_epoch_ RTC_GUARDED_BY(network_thread_) = 0; - webrtc::ScopedTaskSafety network_safety_; + ScopedTaskSafety network_safety_; friend class AllocationSequence; }; @@ -294,7 +300,7 @@ class RTC_EXPORT BasicPortAllocatorSession : public PortAllocatorSession { // TODO(deadbeef): Rename "relay" to "turn_server" in this struct. struct RTC_EXPORT PortConfiguration { // TODO(jiayl): remove `stun_address` when Chrome is updated. - rtc::SocketAddress stun_address; + SocketAddress stun_address; ServerAddresses stun_servers; std::string username; std::string password; @@ -306,7 +312,7 @@ struct RTC_EXPORT PortConfiguration { PortConfiguration(const ServerAddresses& stun_servers, absl::string_view username, absl::string_view password, - const webrtc::FieldTrialsView* field_trials = nullptr); + const FieldTrialsView* field_trials = nullptr); // Returns addresses of both the explicitly configured STUN servers, // and TURN servers that should be used as STUN servers. @@ -324,13 +330,10 @@ struct RTC_EXPORT PortConfiguration { ServerAddresses GetRelayServerAddresses(ProtocolType type) const; }; -class UDPPort; -class TurnPort; - // Performs the allocation of ports, in a sequenced (timed) manner, for a given // network and IP address. // This class is thread-compatible. -class AllocationSequence : public sigslot::has_slots<> { +class AllocationSequence { public: enum State { kInit, // Initial state. @@ -348,7 +351,7 @@ class AllocationSequence : public sigslot::has_slots<> { // event to trigger signal. This can also be achieved by starting a timer in // BPAS, but this is less deterministic. AllocationSequence(BasicPortAllocatorSession* session, - const rtc::Network* network, + const Network* network, PortConfiguration* config, uint32_t flags, std::function port_allocation_complete_callback); @@ -357,14 +360,14 @@ class AllocationSequence : public sigslot::has_slots<> { void OnNetworkFailed(); State state() const { return state_; } - const rtc::Network* network() const { return network_; } + const Network* network() const { return network_; } bool network_failed() const { return network_failed_; } void set_network_failed() { network_failed_ = true; } // Disables the phases for a new sequence that this one already covers for an // equivalent network setup. - void DisableEquivalentPhases(const rtc::Network* network, + void DisableEquivalentPhases(const Network* network, PortConfiguration* config, uint32_t* flags); @@ -385,24 +388,20 @@ class AllocationSequence : public sigslot::has_slots<> { void CreateStunPorts(); void CreateRelayPorts(); - void OnReadPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& remote_addr, - const int64_t& packet_time_us); + void OnReadPacket(AsyncPacketSocket* socket, const ReceivedIpPacket& packet); void OnPortDestroyed(PortInterface* port); BasicPortAllocatorSession* session_; bool network_failed_ = false; - const rtc::Network* network_; + const Network* network_; // Compared with the new best IP in DisableEquivalentPhases. - rtc::IPAddress previous_best_ip_; + IPAddress previous_best_ip_; PortConfiguration* config_; State state_; uint32_t flags_; ProtocolList protocols_; - std::unique_ptr udp_socket_; + std::unique_ptr udp_socket_; // There will be only one udp port per AllocationSequence. UDPPort* udp_port_; std::vector relay_ports_; @@ -412,9 +411,21 @@ class AllocationSequence : public sigslot::has_slots<> { // posted. If the sampled counter doesn't match `epoch_` on reception, the // posted task is ignored. int epoch_ = 0; - webrtc::ScopedTaskSafety safety_; + ScopedTaskSafety safety_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::AllocationSequence; +using ::webrtc::BasicPortAllocator; +using ::webrtc::BasicPortAllocatorSession; +using ::webrtc::PortConfiguration; +using ::webrtc::SessionState; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_CLIENT_BASIC_PORT_ALLOCATOR_H_ diff --git a/p2p/client/basic_port_allocator_unittest.cc b/p2p/client/basic_port_allocator_unittest.cc index 55222a1be2..5cf1b6e5f1 100644 --- a/p2p/client/basic_port_allocator_unittest.cc +++ b/p2p/client/basic_port_allocator_unittest.cc @@ -10,18 +10,35 @@ #include "p2p/client/basic_port_allocator.h" +#include +#include +#include #include -#include // no-presubmit-check TODO(webrtc:8982) +#include +#include +#include #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" +#include "api/candidate.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/test/rtc_error_matchers.h" +#include "api/transport/enums.h" +#include "api/units/time_delta.h" #include "p2p/base/basic_packet_socket_factory.h" #include "p2p/base/p2p_constants.h" +#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/port_interface.h" #include "p2p/base/stun_port.h" #include "p2p/base/stun_request.h" -#include "p2p/base/stun_server.h" -#include "p2p/base/test_stun_server.h" -#include "p2p/base/test_turn_server.h" +#include "p2p/test/nat_server.h" +#include "p2p/test/nat_socket_factory.h" +#include "p2p/test/nat_types.h" +#include "p2p/test/stun_server.h" +#include "p2p/test/test_stun_server.h" +#include "p2p/test/test_turn_server.h" #include "rtc_base/fake_clock.h" #include "rtc_base/fake_mdns_responder.h" #include "rtc_base/fake_network.h" @@ -29,32 +46,33 @@ #include "rtc_base/gunit.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" -#include "rtc_base/nat_server.h" -#include "rtc_base/nat_socket_factory.h" -#include "rtc_base/nat_types.h" #include "rtc_base/net_helper.h" -#include "rtc_base/net_helpers.h" #include "rtc_base/net_test_helpers.h" #include "rtc_base/network.h" #include "rtc_base/network_constants.h" -#include "rtc_base/network_monitor.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" -#include "rtc_base/socket_address_pair.h" +#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/virtual_socket_server.h" #include "system_wrappers/include/metrics.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" +#include "test/wait_until.h" -using rtc::IPAddress; -using rtc::SocketAddress; using ::testing::Contains; +using ::testing::Eq; +using ::testing::IsTrue; using ::testing::Not; +using ::webrtc::CreateEnvironment; +using ::webrtc::Environment; +using ::webrtc::IceCandidateType; +using ::webrtc::IPAddress; +using ::webrtc::SocketAddress; #define MAYBE_SKIP_IPV4 \ - if (!rtc::HasIPv4Enabled()) { \ + if (!::webrtc::HasIPv4Enabled()) { \ RTC_LOG(LS_INFO) << "No IPv4... skipping"; \ return; \ } @@ -79,10 +97,12 @@ static const SocketAddress kClientIPv6Addr4( static const SocketAddress kClientIPv6Addr5( "2401:fa00:4:5000:be30:5bff:fee5:c3", 0); -static const SocketAddress kNatUdpAddr("77.77.77.77", rtc::NAT_SERVER_UDP_PORT); -static const SocketAddress kNatTcpAddr("77.77.77.77", rtc::NAT_SERVER_TCP_PORT); +static const SocketAddress kNatUdpAddr("77.77.77.77", + webrtc::NAT_SERVER_UDP_PORT); +static const SocketAddress kNatTcpAddr("77.77.77.77", + webrtc::NAT_SERVER_TCP_PORT); static const SocketAddress kRemoteClientAddr("22.22.22.22", 0); -static const SocketAddress kStunAddr("99.99.99.1", cricket::STUN_SERVER_PORT); +static const SocketAddress kStunAddr("99.99.99.1", webrtc::STUN_SERVER_PORT); static const SocketAddress kTurnUdpIntAddr("99.99.99.4", 3478); static const SocketAddress kTurnUdpIntIPv6Addr( "2402:fb00:4:1000:be30:5bff:fee5:c3", @@ -108,24 +128,22 @@ static const int kDefaultAllocationTimeout = 3000; static const char kTurnUsername[] = "test"; static const char kTurnPassword[] = "test"; -// STUN timeout (with all retries) is cricket::STUN_TOTAL_TIMEOUT. +// STUN timeout (with all retries) is webrtc::STUN_TOTAL_TIMEOUT. // Add some margin of error for slow bots. -static const int kStunTimeoutMs = cricket::STUN_TOTAL_TIMEOUT; - -constexpr uint64_t kTiebreakerDefault = 44444; +static const int kStunTimeoutMs = webrtc::STUN_TOTAL_TIMEOUT; namespace { void CheckStunKeepaliveIntervalOfAllReadyPorts( - const cricket::PortAllocatorSession* allocator_session, + const webrtc::PortAllocatorSession* allocator_session, int expected) { auto ready_ports = allocator_session->ReadyPorts(); for (const auto* port : ready_ports) { - if (port->Type() == cricket::STUN_PORT_TYPE || - (port->Type() == cricket::LOCAL_PORT_TYPE && - port->GetProtocol() == cricket::PROTO_UDP)) { + if (port->Type() == IceCandidateType::kSrflx || + (port->Type() == IceCandidateType::kHost && + port->GetProtocol() == webrtc::PROTO_UDP)) { EXPECT_EQ( - static_cast(port)->stun_keepalive_delay(), + static_cast(port)->stun_keepalive_delay(), expected); } } @@ -133,50 +151,31 @@ void CheckStunKeepaliveIntervalOfAllReadyPorts( } // namespace -namespace cricket { - -// Helper for dumping candidates -std::ostream& operator<<(std::ostream& os, - const std::vector& candidates) { - os << '['; - bool first = true; - for (const Candidate& c : candidates) { - if (!first) { - os << ", "; - } - os << c.ToString(); - first = false; - } - os << ']'; - return os; -} +namespace webrtc { class BasicPortAllocatorTestBase : public ::testing::Test, public sigslot::has_slots<> { public: BasicPortAllocatorTestBase() - : vss_(new rtc::VirtualSocketServer()), - fss_(new rtc::FirewallSocketServer(vss_.get())), + : vss_(new VirtualSocketServer()), + fss_(new FirewallSocketServer(vss_.get())), socket_factory_(fss_.get()), thread_(fss_.get()), // Note that the NAT is not used by default. ResetWithStunServerAndNat // must be called. nat_factory_(vss_.get(), kNatUdpAddr, kNatTcpAddr), - nat_socket_factory_(new rtc::BasicPacketSocketFactory(&nat_factory_)), - stun_server_(TestStunServer::Create(fss_.get(), kStunAddr)), - turn_server_(rtc::Thread::Current(), + nat_socket_factory_(new BasicPacketSocketFactory(&nat_factory_)), + stun_server_(TestStunServer::Create(fss_.get(), kStunAddr, thread_)), + turn_server_(Thread::Current(), fss_.get(), kTurnUdpIntAddr, kTurnUdpExtAddr), candidate_allocation_done_(false) { - ServerAddresses stun_servers; - stun_servers.insert(kStunAddr); + allocator_.emplace(env_, &network_manager_, &socket_factory_); + allocator_->SetConfiguration({kStunAddr}, {}, 0, webrtc::NO_PRUNE, nullptr); - allocator_ = std::make_unique( - &network_manager_, &socket_factory_, stun_servers, &field_trials_); allocator_->Initialize(); - allocator_->set_step_delay(kMinimumStepDelay); - allocator_->SetIceTiebreaker(kTiebreakerDefault); + allocator_->set_step_delay(webrtc::kMinimumStepDelay); webrtc::metrics::Reset(); } @@ -188,7 +187,7 @@ class BasicPortAllocatorTestBase : public ::testing::Test, } void AddInterface(const SocketAddress& addr, absl::string_view if_name, - rtc::AdapterType type) { + AdapterType type) { network_manager_.AddInterface(addr, if_name, type); } // The default source address is the public address that STUN server will @@ -211,44 +210,42 @@ class BasicPortAllocatorTestBase : public ::testing::Test, } // Endpoint is on the public network. No STUN or TURN. void ResetWithNoServersOrNat() { - allocator_.reset( - new BasicPortAllocator(&network_manager_, &socket_factory_)); + allocator_.emplace(env_, &network_manager_, &socket_factory_); allocator_->Initialize(); - allocator_->SetIceTiebreaker(kTiebreakerDefault); - allocator_->set_step_delay(kMinimumStepDelay); + allocator_->set_step_delay(webrtc::kMinimumStepDelay); } // Endpoint is behind a NAT, with STUN specified. - void ResetWithStunServerAndNat(const rtc::SocketAddress& stun_server) { + void ResetWithStunServerAndNat(const SocketAddress& stun_server) { ResetWithStunServer(stun_server, true); } // Endpoint is on the public network, with STUN specified. - void ResetWithStunServerNoNat(const rtc::SocketAddress& stun_server) { + void ResetWithStunServerNoNat(const SocketAddress& stun_server) { ResetWithStunServer(stun_server, false); } // Endpoint is on the public network, with TURN specified. - void ResetWithTurnServersNoNat(const rtc::SocketAddress& udp_turn, - const rtc::SocketAddress& tcp_turn) { + void ResetWithTurnServersNoNat(const SocketAddress& udp_turn, + const SocketAddress& tcp_turn) { ResetWithNoServersOrNat(); AddTurnServers(udp_turn, tcp_turn); } - RelayServerConfig CreateTurnServers(const rtc::SocketAddress& udp_turn, - const rtc::SocketAddress& tcp_turn) { + RelayServerConfig CreateTurnServers(const SocketAddress& udp_turn, + const SocketAddress& tcp_turn) { RelayServerConfig turn_server; RelayCredentials credentials(kTurnUsername, kTurnPassword); turn_server.credentials = credentials; if (!udp_turn.IsNil()) { - turn_server.ports.push_back(ProtocolAddress(udp_turn, PROTO_UDP)); + turn_server.ports.push_back(ProtocolAddress(udp_turn, webrtc::PROTO_UDP)); } if (!tcp_turn.IsNil()) { - turn_server.ports.push_back(ProtocolAddress(tcp_turn, PROTO_TCP)); + turn_server.ports.push_back(ProtocolAddress(tcp_turn, webrtc::PROTO_TCP)); } return turn_server; } - void AddTurnServers(const rtc::SocketAddress& udp_turn, - const rtc::SocketAddress& tcp_turn) { + void AddTurnServers(const SocketAddress& udp_turn, + const SocketAddress& tcp_turn) { RelayServerConfig turn_server = CreateTurnServers(udp_turn, tcp_turn); allocator_->AddTurnServerForTesting(turn_server); } @@ -299,7 +296,6 @@ class BasicPortAllocatorTestBase : public ::testing::Test, this, &BasicPortAllocatorTestBase::OnCandidatesRemoved); session->SignalCandidatesAllocationDone.connect( this, &BasicPortAllocatorTestBase::OnCandidatesAllocationDone); - session->set_ice_tiebreaker(kTiebreakerDefault); return session; } @@ -311,14 +307,14 @@ class BasicPortAllocatorTestBase : public ::testing::Test, const SocketAddress& pattern) { return address.ipaddr() == pattern.ipaddr() && ((pattern.port() == 0 && - (address.port() != 0 || IPIsAny(address.ipaddr()))) || + (address.port() != 0 || webrtc::IPIsAny(address.ipaddr()))) || (pattern.port() != 0 && address.port() == pattern.port())); } // Returns the number of ports that have matching type, protocol and // address. static int CountPorts(const std::vector& ports, - absl::string_view type, + IceCandidateType type, ProtocolType protocol, const SocketAddress& client_addr) { return absl::c_count_if( @@ -328,20 +324,9 @@ class BasicPortAllocatorTestBase : public ::testing::Test, }); } - static int CountCandidates(const std::vector& candidates, - absl::string_view type, - absl::string_view proto, - const SocketAddress& addr) { - return absl::c_count_if( - candidates, [type, proto, addr](const Candidate& c) { - return c.type() == type && c.protocol() == proto && - AddressMatch(c.address(), addr); - }); - } - // Find a candidate and return it. static bool FindCandidate(const std::vector& candidates, - absl::string_view type, + IceCandidateType type, absl::string_view proto, const SocketAddress& addr, Candidate* found) { @@ -358,7 +343,7 @@ class BasicPortAllocatorTestBase : public ::testing::Test, // Convenience method to call FindCandidate with no return. static bool HasCandidate(const std::vector& candidates, - absl::string_view type, + IceCandidateType type, absl::string_view proto, const SocketAddress& addr) { return FindCandidate(candidates, type, proto, addr, nullptr); @@ -367,7 +352,7 @@ class BasicPortAllocatorTestBase : public ::testing::Test, // Version of HasCandidate that also takes a related address. static bool HasCandidateWithRelatedAddr( const std::vector& candidates, - absl::string_view type, + IceCandidateType type, absl::string_view proto, const SocketAddress& addr, const SocketAddress& related_addr) { @@ -379,16 +364,14 @@ class BasicPortAllocatorTestBase : public ::testing::Test, }); } - static bool CheckPort(const rtc::SocketAddress& addr, - int min_port, - int max_port) { + static bool CheckPort(const SocketAddress& addr, int min_port, int max_port) { return (addr.port() >= min_port && addr.port() <= max_port); } - static bool HasNetwork(const std::vector& networks, - const rtc::Network& to_be_found) { + static bool HasNetwork(const std::vector& networks, + const Network& to_be_found) { auto it = - absl::c_find_if(networks, [to_be_found](const rtc::Network* network) { + absl::c_find_if(networks, [to_be_found](const Network* network) { return network->description() == to_be_found.description() && network->name() == to_be_found.name() && network->prefix() == to_be_found.prefix(); @@ -414,16 +397,15 @@ class BasicPortAllocatorTestBase : public ::testing::Test, int send_buffer_size; if (expected == -1) { EXPECT_EQ(SOCKET_ERROR, - (*it)->GetOption(rtc::Socket::OPT_SNDBUF, &send_buffer_size)); + (*it)->GetOption(Socket::OPT_SNDBUF, &send_buffer_size)); } else { - EXPECT_EQ(0, - (*it)->GetOption(rtc::Socket::OPT_SNDBUF, &send_buffer_size)); + EXPECT_EQ(0, (*it)->GetOption(Socket::OPT_SNDBUF, &send_buffer_size)); ASSERT_EQ(expected, send_buffer_size); } } } - rtc::VirtualSocketServer* virtual_socket_server() { return vss_.get(); } + VirtualSocketServer* virtual_socket_server() { return vss_.get(); } protected: BasicPortAllocator& allocator() { return *allocator_; } @@ -492,44 +474,44 @@ class BasicPortAllocatorTestBase : public ::testing::Test, return false; } - void ResetWithStunServer(const rtc::SocketAddress& stun_server, - bool with_nat) { + void ResetWithStunServer(const SocketAddress& stun_server, bool with_nat) { if (with_nat) { - nat_server_.reset(new rtc::NATServer( - rtc::NAT_OPEN_CONE, vss_.get(), kNatUdpAddr, kNatTcpAddr, vss_.get(), - rtc::SocketAddress(kNatUdpAddr.ipaddr(), 0))); + nat_server_.reset(new NATServer( + NAT_OPEN_CONE, thread_, vss_.get(), kNatUdpAddr, kNatTcpAddr, thread_, + vss_.get(), SocketAddress(kNatUdpAddr.ipaddr(), 0))); } else { nat_socket_factory_ = - std::make_unique(fss_.get()); + std::make_unique(fss_.get()); } ServerAddresses stun_servers; if (!stun_server.IsNil()) { stun_servers.insert(stun_server); } - allocator_.reset(new BasicPortAllocator(&network_manager_, - nat_socket_factory_.get(), - stun_servers, &field_trials_)); + allocator_.emplace(env_, &network_manager_, nat_socket_factory_.get()); + allocator_->SetConfiguration(stun_servers, {}, 0, webrtc::NO_PRUNE, + nullptr); + allocator_->Initialize(); - allocator_->set_step_delay(kMinimumStepDelay); + allocator_->set_step_delay(webrtc::kMinimumStepDelay); } - std::unique_ptr vss_; - std::unique_ptr fss_; - rtc::BasicPacketSocketFactory socket_factory_; - rtc::AutoSocketServerThread thread_; - std::unique_ptr nat_server_; - rtc::NATSocketFactory nat_factory_; - std::unique_ptr nat_socket_factory_; - std::unique_ptr stun_server_; + Environment env_ = CreateEnvironment(); + std::unique_ptr vss_; + std::unique_ptr fss_; + BasicPacketSocketFactory socket_factory_; + AutoSocketServerThread thread_; + std::unique_ptr nat_server_; + NATSocketFactory nat_factory_; + std::unique_ptr nat_socket_factory_; + TestStunServer::StunServerPtr stun_server_; TestTurnServer turn_server_; - rtc::FakeNetworkManager network_manager_; - std::unique_ptr allocator_; + FakeNetworkManager network_manager_; + std::optional allocator_; std::unique_ptr session_; std::vector ports_; std::vector candidates_; bool candidate_allocation_done_; - webrtc::test::ScopedKeyValueConfig field_trials_; }; class BasicPortAllocatorTestWithRealClock : public BasicPortAllocatorTestBase { @@ -537,7 +519,7 @@ class BasicPortAllocatorTestWithRealClock : public BasicPortAllocatorTestBase { class FakeClockBase { public: - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; }; class BasicPortAllocatorTest : public FakeClockBase, @@ -550,51 +532,54 @@ class BasicPortAllocatorTest : public FakeClockBase, // it should be ignore. void CheckDisableAdapterEnumeration( uint32_t total_ports, - const rtc::IPAddress& host_candidate_addr, - const rtc::IPAddress& stun_candidate_addr, - const rtc::IPAddress& relay_candidate_udp_transport_addr, - const rtc::IPAddress& relay_candidate_tcp_transport_addr) { + const IPAddress& host_candidate_addr, + const IPAddress& stun_candidate_addr, + const IPAddress& relay_candidate_udp_transport_addr, + const IPAddress& relay_candidate_tcp_transport_addr) { network_manager_.set_default_local_addresses(kPrivateAddr.ipaddr(), - rtc::IPAddress()); + IPAddress()); if (!session_) { ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); } session_->set_flags(session_->flags() | - PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION | - PORTALLOCATOR_ENABLE_SHARED_SOCKET); + webrtc::PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); allocator().set_allow_tcp_listen(false); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); uint32_t total_candidates = 0; if (!host_candidate_addr.IsNil()) { - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", - rtc::SocketAddress(kPrivateAddr.ipaddr(), 0))); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + SocketAddress(kPrivateAddr.ipaddr(), 0))); ++total_candidates; } if (!stun_candidate_addr.IsNil()) { - rtc::SocketAddress related_address(host_candidate_addr, 0); + SocketAddress related_address(host_candidate_addr, 0); if (host_candidate_addr.IsNil()) { - related_address.SetIP(rtc::GetAnyIP(stun_candidate_addr.family())); + related_address.SetIP(webrtc::GetAnyIP(stun_candidate_addr.family())); } EXPECT_TRUE(HasCandidateWithRelatedAddr( - candidates_, "stun", "udp", - rtc::SocketAddress(stun_candidate_addr, 0), related_address)); + candidates_, IceCandidateType::kSrflx, "udp", + SocketAddress(stun_candidate_addr, 0), related_address)); ++total_candidates; } if (!relay_candidate_udp_transport_addr.IsNil()) { EXPECT_TRUE(HasCandidateWithRelatedAddr( - candidates_, "relay", "udp", - rtc::SocketAddress(relay_candidate_udp_transport_addr, 0), - rtc::SocketAddress(stun_candidate_addr, 0))); + candidates_, IceCandidateType::kRelay, "udp", + SocketAddress(relay_candidate_udp_transport_addr, 0), + SocketAddress(stun_candidate_addr, 0))); ++total_candidates; } if (!relay_candidate_tcp_transport_addr.IsNil()) { EXPECT_TRUE(HasCandidateWithRelatedAddr( - candidates_, "relay", "udp", - rtc::SocketAddress(relay_candidate_tcp_transport_addr, 0), - rtc::SocketAddress(stun_candidate_addr, 0))); + candidates_, IceCandidateType::kRelay, "udp", + SocketAddress(relay_candidate_tcp_transport_addr, 0), + SocketAddress(stun_candidate_addr, 0))); ++total_candidates; } @@ -603,35 +588,41 @@ class BasicPortAllocatorTest : public FakeClockBase, } void TestIPv6TurnPortPrunesIPv4TurnPort() { - turn_server_.AddInternalSocket(kTurnUdpIntIPv6Addr, PROTO_UDP); + turn_server_.AddInternalSocket(kTurnUdpIntIPv6Addr, webrtc::PROTO_UDP); // Add two IP addresses on the same interface. AddInterface(kClientAddr, "net1"); AddInterface(kClientIPv6Addr, "net1"); - allocator_.reset( - new BasicPortAllocator(&network_manager_, &socket_factory_)); + allocator_.emplace(env_, &network_manager_, &socket_factory_); allocator_->Initialize(); allocator_->SetConfiguration(allocator_->stun_servers(), allocator_->turn_servers(), 0, webrtc::PRUNE_BASED_ON_PRIORITY); - AddTurnServers(kTurnUdpIntIPv6Addr, rtc::SocketAddress()); - AddTurnServers(kTurnUdpIntAddr, rtc::SocketAddress()); + AddTurnServers(kTurnUdpIntIPv6Addr, SocketAddress()); + AddTurnServers(kTurnUdpIntAddr, SocketAddress()); - allocator_->set_step_delay(kMinimumStepDelay); + allocator_->set_step_delay(webrtc::kMinimumStepDelay); allocator_->set_flags( - allocator().flags() | PORTALLOCATOR_ENABLE_SHARED_SOCKET | - PORTALLOCATOR_ENABLE_IPV6 | PORTALLOCATOR_DISABLE_TCP); + allocator().flags() | webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + webrtc::PORTALLOCATOR_ENABLE_IPV6 | webrtc::PORTALLOCATOR_DISABLE_TCP); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Three ports (one IPv4 STUN, one IPv6 STUN and one TURN) will be ready. EXPECT_EQ(3U, session_->ReadyPorts().size()); EXPECT_EQ(3U, ports_.size()); - EXPECT_EQ(1, CountPorts(ports_, "local", PROTO_UDP, kClientAddr)); - EXPECT_EQ(1, CountPorts(ports_, "local", PROTO_UDP, kClientIPv6Addr)); - EXPECT_EQ(1, CountPorts(ports_, "relay", PROTO_UDP, kClientIPv6Addr)); - EXPECT_EQ(0, CountPorts(ports_, "relay", PROTO_UDP, kClientAddr)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kHost, webrtc::PROTO_UDP, + kClientAddr)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kHost, webrtc::PROTO_UDP, + kClientIPv6Addr)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kRelay, webrtc::PROTO_UDP, + kClientIPv6Addr)); + EXPECT_EQ(0, CountPorts(ports_, IceCandidateType::kRelay, webrtc::PROTO_UDP, + kClientAddr)); // Now that we remove candidates when a TURN port is pruned, there will be // exactly 3 candidates in both `candidates_` and `ready_candidates`. @@ -639,43 +630,46 @@ class BasicPortAllocatorTest : public FakeClockBase, const std::vector& ready_candidates = session_->ReadyCandidates(); EXPECT_EQ(3U, ready_candidates.size()); - EXPECT_TRUE(HasCandidate(ready_candidates, "local", "udp", kClientAddr)); - EXPECT_TRUE(HasCandidate(ready_candidates, "relay", "udp", - rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); + EXPECT_TRUE(HasCandidate(ready_candidates, IceCandidateType::kHost, "udp", + kClientAddr)); + EXPECT_TRUE(HasCandidate(ready_candidates, IceCandidateType::kRelay, "udp", + SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); } - void TestTurnPortPrunesWithUdpAndTcpPorts( - webrtc::PortPrunePolicy prune_policy, - bool tcp_pruned) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + void TestTurnPortPrunesWithUdpAndTcpPorts(PortPrunePolicy prune_policy, + bool tcp_pruned) { + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); AddInterface(kClientAddr); - allocator_.reset( - new BasicPortAllocator(&network_manager_, &socket_factory_)); + allocator_.emplace(env_, &network_manager_, &socket_factory_); allocator_->Initialize(); allocator_->SetConfiguration(allocator_->stun_servers(), allocator_->turn_servers(), 0, prune_policy); AddTurnServers(kTurnUdpIntAddr, kTurnTcpIntAddr); - allocator_->set_step_delay(kMinimumStepDelay); + allocator_->set_step_delay(webrtc::kMinimumStepDelay); allocator_->set_flags(allocator().flags() | - PORTALLOCATOR_ENABLE_SHARED_SOCKET | - PORTALLOCATOR_DISABLE_TCP); + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + webrtc::PORTALLOCATOR_DISABLE_TCP); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Only 2 ports (one STUN and one TURN) are actually being used. EXPECT_EQ(2U, session_->ReadyPorts().size()); // We have verified that each port, when it is added to `ports_`, it is // found in `ready_ports`, and when it is pruned, it is not found in // `ready_ports`, so we only need to verify the content in one of them. EXPECT_EQ(2U, ports_.size()); - EXPECT_EQ(1, CountPorts(ports_, "local", PROTO_UDP, kClientAddr)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kHost, webrtc::PROTO_UDP, + kClientAddr)); int num_udp_ports = tcp_pruned ? 1 : 0; - EXPECT_EQ(num_udp_ports, - CountPorts(ports_, "relay", PROTO_UDP, kClientAddr)); - EXPECT_EQ(1 - num_udp_ports, - CountPorts(ports_, "relay", PROTO_TCP, kClientAddr)); + EXPECT_EQ(num_udp_ports, CountPorts(ports_, IceCandidateType::kRelay, + webrtc::PROTO_UDP, kClientAddr)); + EXPECT_EQ(1 - num_udp_ports, CountPorts(ports_, IceCandidateType::kRelay, + webrtc::PROTO_TCP, kClientAddr)); // Now that we remove candidates when a TURN port is pruned, `candidates_` // should only contains two candidates regardless whether the TCP TURN port @@ -686,24 +680,24 @@ class BasicPortAllocatorTest : public FakeClockBase, const std::vector& ready_candidates = session_->ReadyCandidates(); EXPECT_EQ(2U, ready_candidates.size()); - EXPECT_TRUE(HasCandidate(ready_candidates, "local", "udp", kClientAddr)); + EXPECT_TRUE(HasCandidate(ready_candidates, IceCandidateType::kHost, "udp", + kClientAddr)); // The external candidate is always udp. - EXPECT_TRUE(HasCandidate(ready_candidates, "relay", "udp", - rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); + EXPECT_TRUE(HasCandidate(ready_candidates, IceCandidateType::kRelay, "udp", + SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); } void TestEachInterfaceHasItsOwnTurnPorts() { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); - turn_server_.AddInternalSocket(kTurnUdpIntIPv6Addr, PROTO_UDP); - turn_server_.AddInternalSocket(kTurnTcpIntIPv6Addr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); + turn_server_.AddInternalSocket(kTurnUdpIntIPv6Addr, webrtc::PROTO_UDP); + turn_server_.AddInternalSocket(kTurnTcpIntIPv6Addr, webrtc::PROTO_TCP); // Add two interfaces both having IPv4 and IPv6 addresses. - AddInterface(kClientAddr, "net1", rtc::ADAPTER_TYPE_WIFI); - AddInterface(kClientIPv6Addr, "net1", rtc::ADAPTER_TYPE_WIFI); - AddInterface(kClientAddr2, "net2", rtc::ADAPTER_TYPE_CELLULAR); - AddInterface(kClientIPv6Addr2, "net2", rtc::ADAPTER_TYPE_CELLULAR); - allocator_.reset( - new BasicPortAllocator(&network_manager_, &socket_factory_)); + AddInterface(kClientAddr, "net1", ADAPTER_TYPE_WIFI); + AddInterface(kClientIPv6Addr, "net1", ADAPTER_TYPE_WIFI); + AddInterface(kClientAddr2, "net2", ADAPTER_TYPE_CELLULAR); + AddInterface(kClientIPv6Addr2, "net2", ADAPTER_TYPE_CELLULAR); + allocator_.emplace(env_, &network_manager_, &socket_factory_); allocator_->Initialize(); allocator_->SetConfiguration(allocator_->stun_servers(), allocator_->turn_servers(), 0, @@ -712,28 +706,42 @@ class BasicPortAllocatorTest : public FakeClockBase, AddTurnServers(kTurnUdpIntAddr, kTurnTcpIntAddr); AddTurnServers(kTurnUdpIntIPv6Addr, kTurnTcpIntIPv6Addr); - allocator_->set_step_delay(kMinimumStepDelay); - allocator_->set_flags( - allocator().flags() | PORTALLOCATOR_ENABLE_SHARED_SOCKET | - PORTALLOCATOR_ENABLE_IPV6 | PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); + allocator_->set_step_delay(webrtc::kMinimumStepDelay); + allocator_->set_flags(allocator().flags() | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + webrtc::PORTALLOCATOR_ENABLE_IPV6 | + webrtc::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // 10 ports (4 STUN and 1 TURN ports on each interface) will be ready to // use. EXPECT_EQ(10U, session_->ReadyPorts().size()); EXPECT_EQ(10U, ports_.size()); - EXPECT_EQ(1, CountPorts(ports_, "local", PROTO_UDP, kClientAddr)); - EXPECT_EQ(1, CountPorts(ports_, "local", PROTO_UDP, kClientAddr2)); - EXPECT_EQ(1, CountPorts(ports_, "local", PROTO_UDP, kClientIPv6Addr)); - EXPECT_EQ(1, CountPorts(ports_, "local", PROTO_UDP, kClientIPv6Addr2)); - EXPECT_EQ(1, CountPorts(ports_, "local", PROTO_TCP, kClientAddr)); - EXPECT_EQ(1, CountPorts(ports_, "local", PROTO_TCP, kClientAddr2)); - EXPECT_EQ(1, CountPorts(ports_, "local", PROTO_TCP, kClientIPv6Addr)); - EXPECT_EQ(1, CountPorts(ports_, "local", PROTO_TCP, kClientIPv6Addr2)); - EXPECT_EQ(1, CountPorts(ports_, "relay", PROTO_UDP, kClientIPv6Addr)); - EXPECT_EQ(1, CountPorts(ports_, "relay", PROTO_UDP, kClientIPv6Addr2)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kHost, webrtc::PROTO_UDP, + kClientAddr)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kHost, webrtc::PROTO_UDP, + kClientAddr2)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kHost, webrtc::PROTO_UDP, + kClientIPv6Addr)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kHost, webrtc::PROTO_UDP, + kClientIPv6Addr2)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kHost, webrtc::PROTO_TCP, + kClientAddr)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kHost, webrtc::PROTO_TCP, + kClientAddr2)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kHost, webrtc::PROTO_TCP, + kClientIPv6Addr)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kHost, webrtc::PROTO_TCP, + kClientIPv6Addr2)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kRelay, webrtc::PROTO_UDP, + kClientIPv6Addr)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kRelay, webrtc::PROTO_UDP, + kClientIPv6Addr2)); // Now that we remove candidates when TURN ports are pruned, there will be // exactly 10 candidates in `candidates_`. @@ -741,20 +749,24 @@ class BasicPortAllocatorTest : public FakeClockBase, const std::vector& ready_candidates = session_->ReadyCandidates(); EXPECT_EQ(10U, ready_candidates.size()); - EXPECT_TRUE(HasCandidate(ready_candidates, "local", "udp", kClientAddr)); - EXPECT_TRUE(HasCandidate(ready_candidates, "local", "udp", kClientAddr2)); - EXPECT_TRUE( - HasCandidate(ready_candidates, "local", "udp", kClientIPv6Addr)); - EXPECT_TRUE( - HasCandidate(ready_candidates, "local", "udp", kClientIPv6Addr2)); - EXPECT_TRUE(HasCandidate(ready_candidates, "local", "tcp", kClientAddr)); - EXPECT_TRUE(HasCandidate(ready_candidates, "local", "tcp", kClientAddr2)); - EXPECT_TRUE( - HasCandidate(ready_candidates, "local", "tcp", kClientIPv6Addr)); - EXPECT_TRUE( - HasCandidate(ready_candidates, "local", "tcp", kClientIPv6Addr2)); - EXPECT_TRUE(HasCandidate(ready_candidates, "relay", "udp", - rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); + EXPECT_TRUE(HasCandidate(ready_candidates, IceCandidateType::kHost, "udp", + kClientAddr)); + EXPECT_TRUE(HasCandidate(ready_candidates, IceCandidateType::kHost, "udp", + kClientAddr2)); + EXPECT_TRUE(HasCandidate(ready_candidates, IceCandidateType::kHost, "udp", + kClientIPv6Addr)); + EXPECT_TRUE(HasCandidate(ready_candidates, IceCandidateType::kHost, "udp", + kClientIPv6Addr2)); + EXPECT_TRUE(HasCandidate(ready_candidates, IceCandidateType::kHost, "tcp", + kClientAddr)); + EXPECT_TRUE(HasCandidate(ready_candidates, IceCandidateType::kHost, "tcp", + kClientAddr2)); + EXPECT_TRUE(HasCandidate(ready_candidates, IceCandidateType::kHost, "tcp", + kClientIPv6Addr)); + EXPECT_TRUE(HasCandidate(ready_candidates, IceCandidateType::kHost, "tcp", + kClientIPv6Addr2)); + EXPECT_TRUE(HasCandidate(ready_candidates, IceCandidateType::kRelay, "udp", + SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); } }; @@ -771,21 +783,25 @@ TEST_F(BasicPortAllocatorTest, TestBasic) { // Tests that our network filtering works properly. TEST_F(BasicPortAllocatorTest, TestIgnoreOnlyLoopbackNetworkByDefault) { AddInterface(SocketAddress(IPAddress(0x12345600U), 0), "test_eth0", - rtc::ADAPTER_TYPE_ETHERNET); + ADAPTER_TYPE_ETHERNET); AddInterface(SocketAddress(IPAddress(0x12345601U), 0), "test_wlan0", - rtc::ADAPTER_TYPE_WIFI); + ADAPTER_TYPE_WIFI); AddInterface(SocketAddress(IPAddress(0x12345602U), 0), "test_cell0", - rtc::ADAPTER_TYPE_CELLULAR); + ADAPTER_TYPE_CELLULAR); AddInterface(SocketAddress(IPAddress(0x12345603U), 0), "test_vpn0", - rtc::ADAPTER_TYPE_VPN); + ADAPTER_TYPE_VPN); AddInterface(SocketAddress(IPAddress(0x12345604U), 0), "test_lo", - rtc::ADAPTER_TYPE_LOOPBACK); + ADAPTER_TYPE_LOOPBACK); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); - session_->set_flags(PORTALLOCATOR_DISABLE_STUN | PORTALLOCATOR_DISABLE_RELAY | - PORTALLOCATOR_DISABLE_TCP); + session_->set_flags(webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_DISABLE_TCP); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(4U, candidates_.size()); for (const Candidate& candidate : candidates_) { EXPECT_LT(candidate.address().ip(), 0x12345604U); @@ -794,20 +810,23 @@ TEST_F(BasicPortAllocatorTest, TestIgnoreOnlyLoopbackNetworkByDefault) { TEST_F(BasicPortAllocatorTest, TestIgnoreNetworksAccordingToIgnoreMask) { AddInterface(SocketAddress(IPAddress(0x12345600U), 0), "test_eth0", - rtc::ADAPTER_TYPE_ETHERNET); + ADAPTER_TYPE_ETHERNET); AddInterface(SocketAddress(IPAddress(0x12345601U), 0), "test_wlan0", - rtc::ADAPTER_TYPE_WIFI); + ADAPTER_TYPE_WIFI); AddInterface(SocketAddress(IPAddress(0x12345602U), 0), "test_cell0", - rtc::ADAPTER_TYPE_CELLULAR); - allocator_->SetNetworkIgnoreMask(rtc::ADAPTER_TYPE_ETHERNET | - rtc::ADAPTER_TYPE_LOOPBACK | - rtc::ADAPTER_TYPE_WIFI); + ADAPTER_TYPE_CELLULAR); + allocator_->SetNetworkIgnoreMask(ADAPTER_TYPE_ETHERNET | + ADAPTER_TYPE_LOOPBACK | ADAPTER_TYPE_WIFI); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); - session_->set_flags(PORTALLOCATOR_DISABLE_STUN | PORTALLOCATOR_DISABLE_RELAY | - PORTALLOCATOR_DISABLE_TCP); + session_->set_flags(webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_DISABLE_TCP); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(1U, candidates_.size()); EXPECT_EQ(0x12345602U, candidates_[0].address().ip()); } @@ -818,20 +837,23 @@ TEST_F(BasicPortAllocatorTest, WifiUsedInsteadOfCellWhenCostlyNetworksDisabled) { SocketAddress wifi(IPAddress(0x12345600U), 0); SocketAddress cell(IPAddress(0x12345601U), 0); - AddInterface(wifi, "test_wlan0", rtc::ADAPTER_TYPE_WIFI); - AddInterface(cell, "test_cell0", rtc::ADAPTER_TYPE_CELLULAR); + AddInterface(wifi, "test_wlan0", ADAPTER_TYPE_WIFI); + AddInterface(cell, "test_cell0", ADAPTER_TYPE_CELLULAR); // Disable all but UDP candidates to make the test simpler. - allocator().set_flags(cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_RELAY | - cricket::PORTALLOCATOR_DISABLE_TCP | - cricket::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); - ASSERT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP)); + allocator().set_flags(webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); + ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Should only get one Wi-Fi candidate. EXPECT_EQ(1U, candidates_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", wifi)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", wifi)); } // Test that when the PORTALLOCATOR_DISABLE_COSTLY_NETWORKS flag is set and @@ -843,22 +865,27 @@ TEST_F(BasicPortAllocatorTest, SocketAddress cell(IPAddress(0x12345601U), 0); SocketAddress unknown1(IPAddress(0x12345602U), 0); SocketAddress unknown2(IPAddress(0x12345603U), 0); - AddInterface(cell, "test_cell0", rtc::ADAPTER_TYPE_CELLULAR); - AddInterface(unknown1, "test_unknown0", rtc::ADAPTER_TYPE_UNKNOWN); - AddInterface(unknown2, "test_unknown1", rtc::ADAPTER_TYPE_UNKNOWN); + AddInterface(cell, "test_cell0", ADAPTER_TYPE_CELLULAR); + AddInterface(unknown1, "test_unknown0", ADAPTER_TYPE_UNKNOWN); + AddInterface(unknown2, "test_unknown1", ADAPTER_TYPE_UNKNOWN); // Disable all but UDP candidates to make the test simpler. - allocator().set_flags(cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_RELAY | - cricket::PORTALLOCATOR_DISABLE_TCP | - cricket::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); - ASSERT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP)); + allocator().set_flags(webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); + ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Should only get two candidates, none of which is cell. EXPECT_EQ(2U, candidates_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", unknown1)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", unknown2)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", unknown1)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", unknown2)); } // Test that when the PORTALLOCATOR_DISABLE_COSTLY_NETWORKS flag is set and @@ -870,22 +897,25 @@ TEST_F(BasicPortAllocatorTest, SocketAddress cellular(IPAddress(0x12345601U), 0); SocketAddress unknown1(IPAddress(0x12345602U), 0); SocketAddress unknown2(IPAddress(0x12345603U), 0); - AddInterface(wifi, "test_wlan0", rtc::ADAPTER_TYPE_WIFI); - AddInterface(cellular, "test_cell0", rtc::ADAPTER_TYPE_CELLULAR); - AddInterface(unknown1, "test_unknown0", rtc::ADAPTER_TYPE_UNKNOWN); - AddInterface(unknown2, "test_unknown1", rtc::ADAPTER_TYPE_UNKNOWN); + AddInterface(wifi, "test_wlan0", ADAPTER_TYPE_WIFI); + AddInterface(cellular, "test_cell0", ADAPTER_TYPE_CELLULAR); + AddInterface(unknown1, "test_unknown0", ADAPTER_TYPE_UNKNOWN); + AddInterface(unknown2, "test_unknown1", ADAPTER_TYPE_UNKNOWN); // Disable all but UDP candidates to make the test simpler. - allocator().set_flags(cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_RELAY | - cricket::PORTALLOCATOR_DISABLE_TCP | - cricket::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); - ASSERT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP)); + allocator().set_flags(webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); + ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Should only get one Wi-Fi candidate. EXPECT_EQ(1U, candidates_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", wifi)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", wifi)); } // Test that if the PORTALLOCATOR_DISABLE_COSTLY_NETWORKS flag is set, but the @@ -894,19 +924,23 @@ TEST_F(BasicPortAllocatorTest, TEST_F(BasicPortAllocatorTest, CellUsedWhenCostlyNetworksDisabledButThereAreNoOtherInterfaces) { SocketAddress cellular(IPAddress(0x12345601U), 0); - AddInterface(cellular, "test_cell0", rtc::ADAPTER_TYPE_CELLULAR); + AddInterface(cellular, "test_cell0", ADAPTER_TYPE_CELLULAR); // Disable all but UDP candidates to make the test simpler. - allocator().set_flags(cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_RELAY | - cricket::PORTALLOCATOR_DISABLE_TCP | - cricket::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); - ASSERT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP)); + allocator().set_flags(webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); + ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Make sure we got the cell candidate. EXPECT_EQ(1U, candidates_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", cellular)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", cellular)); } // Test that if both PORTALLOCATOR_DISABLE_COSTLY_NETWORKS is set, and there is @@ -916,21 +950,26 @@ TEST_F(BasicPortAllocatorTest, CellNotRemovedWhenCostlyNetworksDisabledAndWifiIsLinkLocal) { SocketAddress wifi_link_local("169.254.0.1", 0); SocketAddress cellular(IPAddress(0x12345601U), 0); - AddInterface(wifi_link_local, "test_wlan0", rtc::ADAPTER_TYPE_WIFI); - AddInterface(cellular, "test_cell0", rtc::ADAPTER_TYPE_CELLULAR); - - allocator().set_flags(cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_RELAY | - cricket::PORTALLOCATOR_DISABLE_TCP | - cricket::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); - ASSERT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP)); + AddInterface(wifi_link_local, "test_wlan0", ADAPTER_TYPE_WIFI); + AddInterface(cellular, "test_cell0", ADAPTER_TYPE_CELLULAR); + + allocator().set_flags(webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); + ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Make sure we got both wifi and cell candidates. EXPECT_EQ(2U, candidates_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", wifi_link_local)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", cellular)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + wifi_link_local)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", cellular)); } // Test that if both PORTALLOCATOR_DISABLE_COSTLY_NETWORKS is set, and there is @@ -942,22 +981,26 @@ TEST_F(BasicPortAllocatorTest, SocketAddress wifi(IPAddress(0x12345600U), 0); SocketAddress wifi_link_local("169.254.0.1", 0); SocketAddress cellular(IPAddress(0x12345601U), 0); - AddInterface(wifi, "test_wlan0", rtc::ADAPTER_TYPE_WIFI); - AddInterface(wifi_link_local, "test_wlan1", rtc::ADAPTER_TYPE_WIFI); - AddInterface(cellular, "test_cell0", rtc::ADAPTER_TYPE_CELLULAR); - - allocator().set_flags(cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_RELAY | - cricket::PORTALLOCATOR_DISABLE_TCP | - cricket::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); - ASSERT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP)); + AddInterface(wifi, "test_wlan0", ADAPTER_TYPE_WIFI); + AddInterface(wifi_link_local, "test_wlan1", ADAPTER_TYPE_WIFI); + AddInterface(cellular, "test_cell0", ADAPTER_TYPE_CELLULAR); + + allocator().set_flags(webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); + ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Make sure we got only wifi candidates. EXPECT_EQ(2U, candidates_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", wifi)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", wifi_link_local)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", wifi)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + wifi_link_local)); } // Test that the adapter types of the Ethernet and the VPN can be correctly @@ -966,15 +1009,18 @@ TEST_F(BasicPortAllocatorTest, // set. TEST_F(BasicPortAllocatorTest, EthernetIsNotFilteredOutWhenCostlyNetworksDisabledAndVpnPresent) { - AddInterface(kClientAddr, "eth0", rtc::ADAPTER_TYPE_ETHERNET); - AddInterface(kClientAddr2, "tap0", rtc::ADAPTER_TYPE_VPN); - allocator().set_flags(PORTALLOCATOR_DISABLE_COSTLY_NETWORKS | - PORTALLOCATOR_DISABLE_RELAY | - PORTALLOCATOR_DISABLE_TCP); + AddInterface(kClientAddr, "eth0", ADAPTER_TYPE_ETHERNET); + AddInterface(kClientAddr2, "tap0", ADAPTER_TYPE_VPN); + allocator().set_flags(webrtc::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_DISABLE_TCP); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // The VPN tap0 network should be filtered out as a costly network, and we // should have a UDP port and a STUN port from the Ethernet eth0. ASSERT_EQ(2U, ports_.size()); @@ -987,23 +1033,28 @@ TEST_F(BasicPortAllocatorTest, TEST_F(BasicPortAllocatorTest, MaxIpv6NetworksLimitEnforced) { // Add three IPv6 network interfaces, but tell the allocator to only use two. allocator().set_max_ipv6_networks(2); - AddInterface(kClientIPv6Addr, "eth0", rtc::ADAPTER_TYPE_ETHERNET); - AddInterface(kClientIPv6Addr2, "eth1", rtc::ADAPTER_TYPE_ETHERNET); - AddInterface(kClientIPv6Addr3, "eth2", rtc::ADAPTER_TYPE_ETHERNET); + AddInterface(kClientIPv6Addr, "eth0", ADAPTER_TYPE_ETHERNET); + AddInterface(kClientIPv6Addr2, "eth1", ADAPTER_TYPE_ETHERNET); + AddInterface(kClientIPv6Addr3, "eth2", ADAPTER_TYPE_ETHERNET); // To simplify the test, only gather UDP host candidates. - allocator().set_flags(PORTALLOCATOR_ENABLE_IPV6 | PORTALLOCATOR_DISABLE_TCP | - PORTALLOCATOR_DISABLE_STUN | - PORTALLOCATOR_DISABLE_RELAY); + allocator().set_flags( + webrtc::PORTALLOCATOR_ENABLE_IPV6 | webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_DISABLE_STUN | webrtc::PORTALLOCATOR_DISABLE_RELAY); - ASSERT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP)); + ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(2U, candidates_.size()); // Ensure the expected two interfaces (eth0 and eth1) were used. - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientIPv6Addr)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientIPv6Addr2)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + kClientIPv6Addr)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + kClientIPv6Addr2)); } // Ensure that allocator.max_ipv6_networks() doesn't prevent IPv4 networks from @@ -1011,38 +1062,48 @@ TEST_F(BasicPortAllocatorTest, MaxIpv6NetworksLimitEnforced) { TEST_F(BasicPortAllocatorTest, MaxIpv6NetworksLimitDoesNotImpactIpv4Networks) { // Set the "max IPv6" limit to 1, adding two IPv6 and two IPv4 networks. allocator().set_max_ipv6_networks(1); - AddInterface(kClientIPv6Addr, "eth0", rtc::ADAPTER_TYPE_ETHERNET); - AddInterface(kClientIPv6Addr2, "eth1", rtc::ADAPTER_TYPE_ETHERNET); - AddInterface(kClientAddr, "eth2", rtc::ADAPTER_TYPE_ETHERNET); - AddInterface(kClientAddr2, "eth3", rtc::ADAPTER_TYPE_ETHERNET); + AddInterface(kClientIPv6Addr, "eth0", ADAPTER_TYPE_ETHERNET); + AddInterface(kClientIPv6Addr2, "eth1", ADAPTER_TYPE_ETHERNET); + AddInterface(kClientAddr, "eth2", ADAPTER_TYPE_ETHERNET); + AddInterface(kClientAddr2, "eth3", ADAPTER_TYPE_ETHERNET); // To simplify the test, only gather UDP host candidates. - allocator().set_flags(PORTALLOCATOR_ENABLE_IPV6 | PORTALLOCATOR_DISABLE_TCP | - PORTALLOCATOR_DISABLE_STUN | - PORTALLOCATOR_DISABLE_RELAY); + allocator().set_flags( + webrtc::PORTALLOCATOR_ENABLE_IPV6 | webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_DISABLE_STUN | webrtc::PORTALLOCATOR_DISABLE_RELAY); - ASSERT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP)); + ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); // Ensure that only one IPv6 interface was used, but both IPv4 interfaces // were used. - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientIPv6Addr)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr2)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + kClientIPv6Addr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr2)); } // Test that we could use loopback interface as host candidate. TEST_F(BasicPortAllocatorTest, TestLoopbackNetworkInterface) { - AddInterface(kLoopbackAddr, "test_loopback", rtc::ADAPTER_TYPE_LOOPBACK); + AddInterface(kLoopbackAddr, "test_loopback", ADAPTER_TYPE_LOOPBACK); allocator_->SetNetworkIgnoreMask(0); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); - session_->set_flags(PORTALLOCATOR_DISABLE_STUN | PORTALLOCATOR_DISABLE_RELAY | - PORTALLOCATOR_DISABLE_TCP); + session_->set_flags(webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_DISABLE_TCP); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(1U, candidates_.size()); } @@ -1051,13 +1112,19 @@ TEST_F(BasicPortAllocatorTest, TestGetAllPortsWithMinimumStepDelay) { AddInterface(kClientAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); EXPECT_EQ(3U, ports_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); - EXPECT_TRUE(HasCandidate(candidates_, "stun", "udp", kClientAddr)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "tcp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kSrflx, "udp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "tcp", kClientAddr)); } // Test that when the same network interface is brought down and up, the @@ -1068,8 +1135,11 @@ TEST_F(BasicPortAllocatorTest, TestSameNetworkDownAndUpWhenSessionNotStopped) { AddInterface(kClientAddr, if_name); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); EXPECT_EQ(3U, ports_.size()); candidate_allocation_done_ = false; @@ -1092,8 +1162,11 @@ TEST_F(BasicPortAllocatorTest, TestSameNetworkDownAndUpWhenSessionNotStopped) { fss_->set_tcp_sockets_enabled(true); fss_->set_udp_sockets_enabled(true); AddInterface(kClientAddr, if_name); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); EXPECT_EQ(3U, ports_.size()); } @@ -1106,8 +1179,11 @@ TEST_F(BasicPortAllocatorTest, TestSameNetworkDownAndUpWhenSessionStopped) { AddInterface(kClientAddr, if_name); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); EXPECT_EQ(3U, ports_.size()); session_->StopGettingPorts(); @@ -1144,13 +1220,17 @@ TEST_F(BasicPortAllocatorTest, CandidatesRegatheredAfterBindingFails) { fss_->set_udp_sockets_enabled(false); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Make sure we actually prevented candidates from being gathered (other than // a single TCP active candidate, since that doesn't require creating a // socket). ASSERT_EQ(1U, candidates_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "tcp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "tcp", kClientAddr)); candidate_allocation_done_ = false; // Now simulate the interface coming up, with the newfound ability to bind @@ -1158,30 +1238,42 @@ TEST_F(BasicPortAllocatorTest, CandidatesRegatheredAfterBindingFails) { fss_->set_tcp_sockets_enabled(true); fss_->set_udp_sockets_enabled(true); AddInterface(kClientAddr, if_name); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Should get UDP and TCP candidate. ASSERT_EQ(2U, candidates_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); // TODO(deadbeef): This is actually the same active TCP candidate as before. // We should extend this test to also verify that a server candidate is // gathered. - EXPECT_TRUE(HasCandidate(candidates_, "local", "tcp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "tcp", kClientAddr)); } // Verify candidates with default step delay of 1sec. TEST_F(BasicPortAllocatorTest, TestGetAllPortsWithOneSecondStepDelay) { AddInterface(kClientAddr); - allocator_->set_step_delay(kDefaultStepDelay); + allocator_->set_step_delay(webrtc::kDefaultStepDelay); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_EQ_SIMULATED_WAIT(2U, candidates_.size(), 1000, fake_clock); + ASSERT_THAT(webrtc::WaitUntil([&] { return candidates_.size(); }, Eq(2U), + {.clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(2U, ports_.size()); - ASSERT_EQ_SIMULATED_WAIT(3U, candidates_.size(), 2000, fake_clock); + ASSERT_THAT(webrtc::WaitUntil([&] { return candidates_.size(); }, Eq(3U), + {.clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, ports_.size()); - ASSERT_EQ_SIMULATED_WAIT(3U, candidates_.size(), 1500, fake_clock); - EXPECT_TRUE(HasCandidate(candidates_, "local", "tcp", kClientAddr)); + ASSERT_THAT(webrtc::WaitUntil([&] { return candidates_.size(); }, Eq(3U), + {.clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "tcp", kClientAddr)); EXPECT_EQ(3U, ports_.size()); EXPECT_TRUE(candidate_allocation_done_); // If we Stop gathering now, we shouldn't get a second "done" callback. @@ -1192,8 +1284,11 @@ TEST_F(BasicPortAllocatorTest, TestSetupVideoRtpPortsWithNormalSendBuffers) { AddInterface(kClientAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP, CN_VIDEO)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); // If we Stop gathering now, we shouldn't get a second "done" callback. session_->StopGettingPorts(); @@ -1208,12 +1303,18 @@ TEST_F(BasicPortAllocatorTest, TestStopGetAllPorts) { AddInterface(kClientAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_EQ_SIMULATED_WAIT(2U, candidates_.size(), kDefaultAllocationTimeout, - fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidates_.size(); }, Eq(2U), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(2U, ports_.size()); session_->StopGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); } // Test that we restrict client ports appropriately when a port range is set. @@ -1229,15 +1330,18 @@ TEST_F(BasicPortAllocatorTest, TestGetAllPortsPortRange) { EXPECT_TRUE(SetPortRange(kMinPort, kMaxPort)); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); EXPECT_EQ(3U, ports_.size()); int num_nonrelay_candidates = 0; for (const Candidate& candidate : candidates_) { // Check the port number for the UDP/STUN/TCP port objects. - if (candidate.type() != RELAY_PORT_TYPE) { + if (!candidate.is_relay()) { EXPECT_TRUE(CheckPort(candidate.address(), kMinPort, kMaxPort)); ++num_nonrelay_candidates; } @@ -1251,30 +1355,39 @@ TEST_F(BasicPortAllocatorTest, TestGetAllPortsNoAdapters) { // Default config uses GTURN and no NAT, so replace that with the // desired setup (NAT, STUN server, TURN server, UDP/TCP). ResetWithStunServerAndNat(kStunAddr); - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); AddTurnServers(kTurnUdpIntAddr, kTurnTcpIntAddr); AddTurnServers(kTurnUdpIntIPv6Addr, kTurnTcpIntIPv6Addr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(4U, ports_.size()); - EXPECT_EQ(1, CountPorts(ports_, "stun", PROTO_UDP, kAnyAddr)); - EXPECT_EQ(1, CountPorts(ports_, "local", PROTO_TCP, kAnyAddr)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kSrflx, webrtc::PROTO_UDP, + kAnyAddr)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kHost, webrtc::PROTO_TCP, + kAnyAddr)); // Two TURN ports, using UDP/TCP for the first hop to the TURN server. - EXPECT_EQ(1, CountPorts(ports_, "relay", PROTO_UDP, kAnyAddr)); - EXPECT_EQ(1, CountPorts(ports_, "relay", PROTO_TCP, kAnyAddr)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kRelay, webrtc::PROTO_UDP, + kAnyAddr)); + EXPECT_EQ(1, CountPorts(ports_, IceCandidateType::kRelay, webrtc::PROTO_TCP, + kAnyAddr)); // The "any" address port should be in the signaled ready ports, but the host // candidate for it is useless and shouldn't be signaled. So we only have // STUN/TURN candidates. EXPECT_EQ(3U, candidates_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "stun", "udp", - rtc::SocketAddress(kNatUdpAddr.ipaddr(), 0))); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kSrflx, "udp", + SocketAddress(kNatUdpAddr.ipaddr(), 0))); // Again, two TURN candidates, using UDP/TCP for the first hop to the TURN // server. - EXPECT_EQ(2, - CountCandidates(candidates_, "relay", "udp", - rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); + SocketAddress addr(kTurnUdpExtAddr.ipaddr(), 0); + EXPECT_EQ(2, absl::c_count_if(candidates_, [&](const Candidate& c) { + return c.is_relay() && c.protocol() == "udp" && + AddressMatch(c.address(), addr); + })); } // Test that when enumeration is disabled, we should not have any ports when @@ -1282,10 +1395,10 @@ TEST_F(BasicPortAllocatorTest, TestGetAllPortsNoAdapters) { TEST_F(BasicPortAllocatorTest, TestDisableAdapterEnumerationWithoutNatRelayTransportOnly) { ResetWithStunServerNoNat(kStunAddr); - allocator().SetCandidateFilter(CF_RELAY); + allocator().SetCandidateFilter(webrtc::CF_RELAY); // Expect to see no ports and no candidates. - CheckDisableAdapterEnumeration(0U, rtc::IPAddress(), rtc::IPAddress(), - rtc::IPAddress(), rtc::IPAddress()); + CheckDisableAdapterEnumeration(0U, IPAddress(), IPAddress(), IPAddress(), + IPAddress()); } // Test that even with multiple interfaces, the result should still be a single @@ -1296,26 +1409,26 @@ TEST_F(BasicPortAllocatorTest, AddInterface(kPrivateAddr); AddInterface(kPrivateAddr2); ResetWithStunServerAndNat(kStunAddr); - AddTurnServers(kTurnUdpIntAddr, rtc::SocketAddress()); + AddTurnServers(kTurnUdpIntAddr, SocketAddress()); // Enable IPv6 here. Since the network_manager doesn't have IPv6 default // address set and we have no IPv6 STUN server, there should be no IPv6 // candidates. ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); - session_->set_flags(PORTALLOCATOR_ENABLE_IPV6); + session_->set_flags(webrtc::PORTALLOCATOR_ENABLE_IPV6); // Expect to see 3 ports for IPv4: HOST/STUN, TURN/UDP and TCP ports, 2 ports // for IPv6: HOST, and TCP. Only IPv4 candidates: a default private, STUN and // TURN/UDP candidates. CheckDisableAdapterEnumeration(5U, kPrivateAddr.ipaddr(), kNatUdpAddr.ipaddr(), kTurnUdpExtAddr.ipaddr(), - rtc::IPAddress()); + IPAddress()); } // Test that we should get a default private, STUN, TURN/UDP and TURN/TCP // candidates when both TURN/UDP and TURN/TCP servers are specified. TEST_F(BasicPortAllocatorTest, TestDisableAdapterEnumerationBehindNatWithTcp) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); AddInterface(kPrivateAddr); ResetWithStunServerAndNat(kStunAddr); AddTurnServers(kTurnUdpIntAddr, kTurnTcpIntAddr); @@ -1332,8 +1445,8 @@ TEST_F(BasicPortAllocatorTest, TestDisableAdapterEnumerationWithoutNatOrServers) { ResetWithNoServersOrNat(); // Expect to see 2 ports: STUN and TCP ports, one default private candidate. - CheckDisableAdapterEnumeration(2U, kPrivateAddr.ipaddr(), rtc::IPAddress(), - rtc::IPAddress(), rtc::IPAddress()); + CheckDisableAdapterEnumeration(2U, kPrivateAddr.ipaddr(), IPAddress(), + IPAddress(), IPAddress()); } // Test that when adapter enumeration is disabled, with @@ -1343,11 +1456,11 @@ TEST_F(BasicPortAllocatorTest, TestDisableAdapterEnumerationWithoutNatLocalhostCandidateDisabled) { ResetWithStunServerNoNat(kStunAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); - session_->set_flags(PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE); + session_->set_flags(webrtc::PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE); // Expect to see 2 ports: STUN and TCP ports, localhost candidate and STUN // candidate. - CheckDisableAdapterEnumeration(2U, rtc::IPAddress(), rtc::IPAddress(), - rtc::IPAddress(), rtc::IPAddress()); + CheckDisableAdapterEnumeration(2U, IPAddress(), IPAddress(), IPAddress(), + IPAddress()); } // Test that when adapter enumeration is disabled, with @@ -1360,11 +1473,11 @@ TEST_F(BasicPortAllocatorTest, ResetWithStunServerNoNat(kStunAddr); AddInterfaceAsDefaultSourceAddresss(kClientAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); - session_->set_flags(PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE); + session_->set_flags(webrtc::PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE); // Expect to see 2 ports: STUN and TCP ports, localhost candidate and STUN // candidate. - CheckDisableAdapterEnumeration(2U, rtc::IPAddress(), kClientAddr.ipaddr(), - rtc::IPAddress(), rtc::IPAddress()); + CheckDisableAdapterEnumeration(2U, IPAddress(), kClientAddr.ipaddr(), + IPAddress(), IPAddress()); } // Test that when adapter enumeration is disabled, with @@ -1374,38 +1487,43 @@ TEST_F(BasicPortAllocatorTest, TestDisableAdapterEnumerationWithNatLocalhostCandidateDisabled) { ResetWithStunServerAndNat(kStunAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); - session_->set_flags(PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE); + session_->set_flags(webrtc::PORTALLOCATOR_DISABLE_DEFAULT_LOCAL_CANDIDATE); // Expect to see 2 ports: STUN and TCP ports, and single STUN candidate. - CheckDisableAdapterEnumeration(2U, rtc::IPAddress(), kNatUdpAddr.ipaddr(), - rtc::IPAddress(), rtc::IPAddress()); + CheckDisableAdapterEnumeration(2U, IPAddress(), kNatUdpAddr.ipaddr(), + IPAddress(), IPAddress()); } // Test that we disable relay over UDP, and only TCP is used when connecting to // the relay server. TEST_F(BasicPortAllocatorTest, TestDisableUdpTurn) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); AddInterface(kClientAddr); ResetWithStunServerAndNat(kStunAddr); AddTurnServers(kTurnUdpIntAddr, kTurnTcpIntAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); - session_->set_flags(PORTALLOCATOR_DISABLE_UDP_RELAY | - PORTALLOCATOR_DISABLE_UDP | PORTALLOCATOR_DISABLE_STUN | - PORTALLOCATOR_ENABLE_SHARED_SOCKET); + session_->set_flags(webrtc::PORTALLOCATOR_DISABLE_UDP_RELAY | + webrtc::PORTALLOCATOR_DISABLE_UDP | + webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Expect to see 2 ports and 2 candidates - TURN/TCP and TCP ports, TCP and // TURN/TCP candidates. EXPECT_EQ(2U, ports_.size()); EXPECT_EQ(2U, candidates_.size()); Candidate turn_candidate; - EXPECT_TRUE(FindCandidate(candidates_, "relay", "udp", kTurnUdpExtAddr, - &turn_candidate)); + EXPECT_TRUE(FindCandidate(candidates_, IceCandidateType::kRelay, "udp", + kTurnUdpExtAddr, &turn_candidate)); // The TURN candidate should use TCP to contact the TURN server. - EXPECT_EQ(TCP_PROTOCOL_NAME, turn_candidate.relay_protocol()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "tcp", kClientAddr)); + EXPECT_EQ(webrtc::TCP_PROTOCOL_NAME, turn_candidate.relay_protocol()); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "tcp", kClientAddr)); } // Test that we can get OnCandidatesAllocationDone callback when all the ports @@ -1413,10 +1531,13 @@ TEST_F(BasicPortAllocatorTest, TestDisableUdpTurn) { TEST_F(BasicPortAllocatorTest, TestDisableAllPorts) { AddInterface(kClientAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); - session_->set_flags(PORTALLOCATOR_DISABLE_UDP | PORTALLOCATOR_DISABLE_STUN | - PORTALLOCATOR_DISABLE_RELAY | PORTALLOCATOR_DISABLE_TCP); + session_->set_flags( + webrtc::PORTALLOCATOR_DISABLE_UDP | webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_RELAY | webrtc::PORTALLOCATOR_DISABLE_TCP); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, 1000, fake_clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return candidate_allocation_done_; }, + IsTrue(), {.clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(0U, candidates_.size()); } @@ -1426,11 +1547,15 @@ TEST_F(BasicPortAllocatorTest, TestGetAllPortsNoUdpSockets) { fss_->set_udp_sockets_enabled(false); ASSERT_TRUE(CreateSession(1)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(1U, candidates_.size()); EXPECT_EQ(1U, ports_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "tcp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "tcp", kClientAddr)); } // Test that we don't crash or malfunction if we can't create UDP sockets or @@ -1442,11 +1567,15 @@ TEST_F(BasicPortAllocatorTest, TestGetAllPortsNoUdpSocketsNoTcpListen) { fss_->set_tcp_listen_enabled(false); ASSERT_TRUE(CreateSession(1)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(1U, candidates_.size()); EXPECT_EQ(1U, ports_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "tcp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "tcp", kClientAddr)); } // Test that we don't crash or malfunction if we can't create any sockets. @@ -1457,7 +1586,7 @@ TEST_F(BasicPortAllocatorTest, TestGetAllPortsNoSockets) { fss_->set_udp_sockets_enabled(false); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - SIMULATED_WAIT(candidates_.size() > 0, 2000, fake_clock); + SIMULATED_WAIT(!candidates_.empty(), 2000, fake_clock); // TODO(deadbeef): Check candidate_allocation_done signal. // In case of Relay, ports creation will succeed but sockets will fail. // There is no error reporting from RelayEntry to handle this failure. @@ -1465,19 +1594,27 @@ TEST_F(BasicPortAllocatorTest, TestGetAllPortsNoSockets) { // Testing STUN timeout. TEST_F(BasicPortAllocatorTest, TestGetAllPortsNoUdpAllowed) { - fss_->AddRule(false, rtc::FP_UDP, rtc::FD_ANY, kClientAddr); + fss_->AddRule(false, webrtc::FP_UDP, webrtc::FD_ANY, kClientAddr); AddInterface(kClientAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_EQ_SIMULATED_WAIT(2U, candidates_.size(), kDefaultAllocationTimeout, - fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidates_.size(); }, Eq(2U), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(2U, ports_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "tcp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "tcp", kClientAddr)); // We wait at least for a full STUN timeout, which - // cricket::STUN_TOTAL_TIMEOUT seconds. - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - cricket::STUN_TOTAL_TIMEOUT, fake_clock); + // webrtc::STUN_TOTAL_TIMEOUT seconds. + EXPECT_THAT( + webrtc::WaitUntil([&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(STUN_TOTAL_TIMEOUT), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // No additional (STUN) candidates. EXPECT_EQ(2U, candidates_.size()); } @@ -1487,12 +1624,16 @@ TEST_F(BasicPortAllocatorTest, TestCandidatePriorityOfMultipleInterfaces) { AddInterface(kClientAddr2); // Allocating only host UDP ports. This is done purely for testing // convenience. - allocator().set_flags(PORTALLOCATOR_DISABLE_TCP | PORTALLOCATOR_DISABLE_STUN | - PORTALLOCATOR_DISABLE_RELAY); + allocator().set_flags(webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_DISABLE_STUN | + webrtc::PORTALLOCATOR_DISABLE_RELAY); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); ASSERT_EQ(2U, candidates_.size()); EXPECT_EQ(2U, ports_.size()); // Candidates priorities should be different. @@ -1504,8 +1645,11 @@ TEST_F(BasicPortAllocatorTest, TestGetAllPortsRestarts) { AddInterface(kClientAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); EXPECT_EQ(3U, ports_.size()); // TODO(deadbeef): Extend this to verify ICE restart. @@ -1519,12 +1663,15 @@ TEST_F(BasicPortAllocatorTest, TestSessionUsesOwnCandidateFilter) { AddInterface(kClientAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); // Set candidate filter *after* creating the session. Should have no effect. - allocator().SetCandidateFilter(CF_RELAY); + allocator().SetCandidateFilter(webrtc::CF_RELAY); session_->StartGettingPorts(); // 7 candidates and 4 ports is what we would normally get (see the // TestGetAllPorts* tests). - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); EXPECT_EQ(3U, ports_.size()); } @@ -1537,35 +1684,41 @@ TEST_F(BasicPortAllocatorTest, TestSessionUsesOwnCandidateFilter) { TEST_F(BasicPortAllocatorTest, TestCandidateFilterWithRelayOnly) { AddInterface(kClientAddr); // GTURN is not configured here. - ResetWithTurnServersNoNat(kTurnUdpIntAddr, rtc::SocketAddress()); - allocator().SetCandidateFilter(CF_RELAY); + ResetWithTurnServersNoNat(kTurnUdpIntAddr, SocketAddress()); + allocator().SetCandidateFilter(webrtc::CF_RELAY); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); - EXPECT_TRUE(HasCandidate(candidates_, "relay", "udp", - rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kRelay, "udp", + SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); EXPECT_EQ(1U, candidates_.size()); EXPECT_EQ(1U, ports_.size()); // Only Relay port will be in ready state. - EXPECT_EQ(std::string(RELAY_PORT_TYPE), candidates_[0].type()); + EXPECT_TRUE(candidates_[0].is_relay()); EXPECT_EQ( candidates_[0].related_address(), - rtc::EmptySocketAddressWithFamily(candidates_[0].address().family())); + webrtc::EmptySocketAddressWithFamily(candidates_[0].address().family())); } TEST_F(BasicPortAllocatorTest, TestCandidateFilterWithHostOnly) { AddInterface(kClientAddr); - allocator().set_flags(PORTALLOCATOR_ENABLE_SHARED_SOCKET); - allocator().SetCandidateFilter(CF_HOST); + allocator().set_flags(webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); + allocator().SetCandidateFilter(webrtc::CF_HOST); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(2U, candidates_.size()); // Host UDP/TCP candidates only. EXPECT_EQ(2U, ports_.size()); // UDP/TCP ports only. for (const Candidate& candidate : candidates_) { - EXPECT_EQ(std::string(LOCAL_PORT_TYPE), candidate.type()); + EXPECT_TRUE(candidate.is_local()); } } @@ -1574,36 +1727,42 @@ TEST_F(BasicPortAllocatorTest, TestCandidateFilterWithReflexiveOnly) { AddInterface(kPrivateAddr); ResetWithStunServerAndNat(kStunAddr); - allocator().set_flags(PORTALLOCATOR_ENABLE_SHARED_SOCKET); - allocator().SetCandidateFilter(CF_REFLEXIVE); + allocator().set_flags(webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); + allocator().SetCandidateFilter(webrtc::CF_REFLEXIVE); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Host is behind NAT, no private address will be exposed. Hence only UDP // port with STUN candidate will be sent outside. EXPECT_EQ(1U, candidates_.size()); // Only STUN candidate. EXPECT_EQ(1U, ports_.size()); // Only UDP port will be in ready state. - EXPECT_EQ(std::string(STUN_PORT_TYPE), candidates_[0].type()); + EXPECT_TRUE(candidates_[0].is_stun()); EXPECT_EQ( candidates_[0].related_address(), - rtc::EmptySocketAddressWithFamily(candidates_[0].address().family())); + webrtc::EmptySocketAddressWithFamily(candidates_[0].address().family())); } // Host is not behind the NAT. TEST_F(BasicPortAllocatorTest, TestCandidateFilterWithReflexiveOnlyAndNoNAT) { AddInterface(kClientAddr); - allocator().set_flags(PORTALLOCATOR_ENABLE_SHARED_SOCKET); - allocator().SetCandidateFilter(CF_REFLEXIVE); + allocator().set_flags(webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); + allocator().SetCandidateFilter(webrtc::CF_REFLEXIVE); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Host has a public address, both UDP and TCP candidates will be exposed. EXPECT_EQ(2U, candidates_.size()); // Local UDP + TCP candidate. EXPECT_EQ(2U, ports_.size()); // UDP and TCP ports will be in ready state. for (const Candidate& candidate : candidates_) { - EXPECT_EQ(std::string(LOCAL_PORT_TYPE), candidate.type()); + EXPECT_TRUE(candidate.is_local()); } } @@ -1612,12 +1771,18 @@ TEST_F(BasicPortAllocatorTest, TestEnableSharedUfrag) { AddInterface(kClientAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); - EXPECT_TRUE(HasCandidate(candidates_, "stun", "udp", kClientAddr)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "tcp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kSrflx, "udp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "tcp", kClientAddr)); EXPECT_EQ(3U, ports_.size()); for (const Candidate& candidate : candidates_) { EXPECT_EQ(kIceUfrag0, candidate.username()); @@ -1632,15 +1797,22 @@ TEST_F(BasicPortAllocatorTest, TestEnableSharedUfrag) { TEST_F(BasicPortAllocatorTest, TestSharedSocketWithoutNat) { AddInterface(kClientAddr); allocator_->set_flags(allocator().flags() | - PORTALLOCATOR_ENABLE_SHARED_SOCKET); + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_EQ_SIMULATED_WAIT(2U, candidates_.size(), kDefaultAllocationTimeout, - fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidates_.size(); }, Eq(2U), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(2U, ports_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); } // Test that when PORTALLOCATOR_ENABLE_SHARED_SOCKET is enabled only one port @@ -1651,46 +1823,57 @@ TEST_F(BasicPortAllocatorTest, TestSharedSocketWithNat) { ResetWithStunServerAndNat(kStunAddr); allocator_->set_flags(allocator().flags() | - PORTALLOCATOR_ENABLE_SHARED_SOCKET); + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_EQ_SIMULATED_WAIT(3U, candidates_.size(), kDefaultAllocationTimeout, - fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidates_.size(); }, Eq(3U), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); ASSERT_EQ(2U, ports_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); - EXPECT_TRUE(HasCandidate(candidates_, "stun", "udp", - rtc::SocketAddress(kNatUdpAddr.ipaddr(), 0))); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kSrflx, "udp", + SocketAddress(kNatUdpAddr.ipaddr(), 0))); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); } // Test TURN port in shared socket mode with UDP and TCP TURN server addresses. TEST_F(BasicPortAllocatorTest, TestSharedSocketWithoutNatUsingTurn) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); AddInterface(kClientAddr); - allocator_.reset(new BasicPortAllocator(&network_manager_, &socket_factory_)); + allocator_.emplace(env_, &network_manager_, &socket_factory_); allocator_->Initialize(); AddTurnServers(kTurnUdpIntAddr, kTurnTcpIntAddr); - allocator_->set_step_delay(kMinimumStepDelay); + allocator_->set_step_delay(webrtc::kMinimumStepDelay); allocator_->set_flags(allocator().flags() | - PORTALLOCATOR_ENABLE_SHARED_SOCKET | - PORTALLOCATOR_DISABLE_TCP); + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + webrtc::PORTALLOCATOR_DISABLE_TCP); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); ASSERT_EQ(3U, candidates_.size()); ASSERT_EQ(3U, ports_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); - EXPECT_TRUE(HasCandidate(candidates_, "relay", "udp", - rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); - EXPECT_TRUE(HasCandidate(candidates_, "relay", "udp", - rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kRelay, "udp", + SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kRelay, "udp", + SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); } // Test that if the turn port prune policy is PRUNE_BASED_ON_PRIORITY, TCP TURN @@ -1801,27 +1984,30 @@ TEST_F(BasicPortAllocatorTestWithRealClock, // This test relies on a real query for "localhost", so it won't work on an // IPv6-only machine. MAYBE_SKIP_IPV4; - turn_server_.AddInternalSocket(rtc::SocketAddress("127.0.0.1", 3478), - PROTO_UDP); + turn_server_.AddInternalSocket(SocketAddress("127.0.0.1", 3478), + webrtc::PROTO_UDP); AddInterface(kClientAddr); - allocator_.reset(new BasicPortAllocator(&network_manager_, &socket_factory_)); + allocator_.emplace(env_, &network_manager_, &socket_factory_); allocator_->Initialize(); RelayServerConfig turn_server; RelayCredentials credentials(kTurnUsername, kTurnPassword); turn_server.credentials = credentials; turn_server.ports.push_back( - ProtocolAddress(rtc::SocketAddress("localhost", 3478), PROTO_UDP)); + ProtocolAddress(SocketAddress("localhost", 3478), webrtc::PROTO_UDP)); allocator_->AddTurnServerForTesting(turn_server); - allocator_->set_step_delay(kMinimumStepDelay); + allocator_->set_step_delay(webrtc::kMinimumStepDelay); allocator_->set_flags(allocator().flags() | - PORTALLOCATOR_ENABLE_SHARED_SOCKET | - PORTALLOCATOR_DISABLE_TCP); + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + webrtc::PORTALLOCATOR_DISABLE_TCP); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_EQ_WAIT(2U, ports_.size(), kDefaultAllocationTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return ports_.size(); }, Eq(2U), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout)}), + webrtc::IsRtcOk()); } // Test that when PORTALLOCATOR_ENABLE_SHARED_SOCKET is enabled only one port @@ -1831,26 +2017,33 @@ TEST_F(BasicPortAllocatorTest, TestSharedSocketWithNatUsingTurn) { AddInterface(kClientAddr); ResetWithStunServerAndNat(kStunAddr); - AddTurnServers(kTurnUdpIntAddr, rtc::SocketAddress()); + AddTurnServers(kTurnUdpIntAddr, SocketAddress()); allocator_->set_flags(allocator().flags() | - PORTALLOCATOR_ENABLE_SHARED_SOCKET | - PORTALLOCATOR_DISABLE_TCP); + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + webrtc::PORTALLOCATOR_DISABLE_TCP); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); ASSERT_EQ(2U, ports_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); - EXPECT_TRUE(HasCandidate(candidates_, "stun", "udp", - rtc::SocketAddress(kNatUdpAddr.ipaddr(), 0))); - EXPECT_TRUE(HasCandidate(candidates_, "relay", "udp", - rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kSrflx, "udp", + SocketAddress(kNatUdpAddr.ipaddr(), 0))); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kRelay, "udp", + SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Local port will be created first and then TURN port. // TODO(deadbeef): This isn't something the BasicPortAllocator API contract // guarantees... @@ -1865,7 +2058,7 @@ TEST_F(BasicPortAllocatorTest, TestSharedSocketWithNatUsingTurnAsStun) { AddInterface(kClientAddr); // Use an empty SocketAddress to add a NAT without STUN server. ResetWithStunServerAndNat(SocketAddress()); - AddTurnServers(kTurnUdpIntAddr, rtc::SocketAddress()); + AddTurnServers(kTurnUdpIntAddr, SocketAddress()); // Must set the step delay to 0 to make sure the relay allocation phase is // started before the STUN candidates are obtained, so that the STUN binding @@ -1873,24 +2066,27 @@ TEST_F(BasicPortAllocatorTest, TestSharedSocketWithNatUsingTurnAsStun) { // webrtc issue 3537. allocator_->set_step_delay(0); allocator_->set_flags(allocator().flags() | - PORTALLOCATOR_ENABLE_SHARED_SOCKET | - PORTALLOCATOR_DISABLE_TCP); + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + webrtc::PORTALLOCATOR_DISABLE_TCP); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); Candidate stun_candidate; - EXPECT_TRUE(FindCandidate(candidates_, "stun", "udp", - rtc::SocketAddress(kNatUdpAddr.ipaddr(), 0), + EXPECT_TRUE(FindCandidate(candidates_, IceCandidateType::kSrflx, "udp", + SocketAddress(kNatUdpAddr.ipaddr(), 0), &stun_candidate)); EXPECT_TRUE(HasCandidateWithRelatedAddr( - candidates_, "relay", "udp", - rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0), - stun_candidate.address())); + candidates_, IceCandidateType::kRelay, "udp", + SocketAddress(kTurnUdpExtAddr.ipaddr(), 0), stun_candidate.address())); // Local port will be created first and then TURN port. // TODO(deadbeef): This isn't something the BasicPortAllocator API contract @@ -1903,25 +2099,29 @@ TEST_F(BasicPortAllocatorTest, TestSharedSocketWithNatUsingTurnAsStun) { // a UDP STUN server, as this could leak our IP address. Thus we should only // expect two ports, a UDPPort and TurnPort. TEST_F(BasicPortAllocatorTest, TestSharedSocketWithNatUsingTurnTcpOnly) { - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); AddInterface(kClientAddr); - ResetWithStunServerAndNat(rtc::SocketAddress()); - AddTurnServers(rtc::SocketAddress(), kTurnTcpIntAddr); + ResetWithStunServerAndNat(SocketAddress()); + AddTurnServers(SocketAddress(), kTurnTcpIntAddr); allocator_->set_flags(allocator().flags() | - PORTALLOCATOR_ENABLE_SHARED_SOCKET | - PORTALLOCATOR_DISABLE_TCP); + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + webrtc::PORTALLOCATOR_DISABLE_TCP); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(2U, candidates_.size()); ASSERT_EQ(2U, ports_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); - EXPECT_TRUE(HasCandidate(candidates_, "relay", "udp", - rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kRelay, "udp", + SocketAddress(kTurnUdpExtAddr.ipaddr(), 0))); EXPECT_EQ(1U, ports_[0]->Candidates().size()); EXPECT_EQ(1U, ports_[1]->Candidates().size()); } @@ -1935,25 +2135,30 @@ TEST_F(BasicPortAllocatorTest, TestNonSharedSocketWithNatUsingTurnAsStun) { AddInterface(kClientAddr); // Use an empty SocketAddress to add a NAT without STUN server. ResetWithStunServerAndNat(SocketAddress()); - AddTurnServers(kTurnUdpIntAddr, rtc::SocketAddress()); + AddTurnServers(kTurnUdpIntAddr, SocketAddress()); - allocator_->set_flags(allocator().flags() | PORTALLOCATOR_DISABLE_TCP); + allocator_->set_flags(allocator().flags() | + webrtc::PORTALLOCATOR_DISABLE_TCP); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); ASSERT_EQ(3U, ports_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); Candidate stun_candidate; - EXPECT_TRUE(FindCandidate(candidates_, "stun", "udp", - rtc::SocketAddress(kNatUdpAddr.ipaddr(), 0), + EXPECT_TRUE(FindCandidate(candidates_, IceCandidateType::kSrflx, "udp", + SocketAddress(kNatUdpAddr.ipaddr(), 0), &stun_candidate)); Candidate turn_candidate; - EXPECT_TRUE(FindCandidate(candidates_, "relay", "udp", - rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0), + EXPECT_TRUE(FindCandidate(candidates_, IceCandidateType::kRelay, "udp", + SocketAddress(kTurnUdpExtAddr.ipaddr(), 0), &turn_candidate)); // Not using shared socket, so the STUN request's server reflexive address // should be different than the TURN request's server reflexive address. @@ -1972,26 +2177,29 @@ TEST_F(BasicPortAllocatorTest, TestSharedSocketWithNatUsingTurnAndStun) { // the TURN server actually being used as a STUN server. ResetWithStunServerAndNat(kStunAddr); stun_server_.reset(); - AddTurnServers(kTurnUdpIntAddr, rtc::SocketAddress()); + AddTurnServers(kTurnUdpIntAddr, SocketAddress()); allocator_->set_flags(allocator().flags() | - PORTALLOCATOR_ENABLE_SHARED_SOCKET | - PORTALLOCATOR_DISABLE_TCP); + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + webrtc::PORTALLOCATOR_DISABLE_TCP); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_EQ_SIMULATED_WAIT(3U, candidates_.size(), kDefaultAllocationTimeout, - fake_clock); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidates_.size(); }, Eq(3U), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); Candidate stun_candidate; - EXPECT_TRUE(FindCandidate(candidates_, "stun", "udp", - rtc::SocketAddress(kNatUdpAddr.ipaddr(), 0), + EXPECT_TRUE(FindCandidate(candidates_, IceCandidateType::kSrflx, "udp", + SocketAddress(kNatUdpAddr.ipaddr(), 0), &stun_candidate)); EXPECT_TRUE(HasCandidateWithRelatedAddr( - candidates_, "relay", "udp", - rtc::SocketAddress(kTurnUdpExtAddr.ipaddr(), 0), - stun_candidate.address())); + candidates_, IceCandidateType::kRelay, "udp", + SocketAddress(kTurnUdpExtAddr.ipaddr(), 0), stun_candidate.address())); // Don't bother waiting for STUN timeout, since we already verified // that we got a STUN candidate from the TURN server. @@ -2001,20 +2209,28 @@ TEST_F(BasicPortAllocatorTest, TestSharedSocketWithNatUsingTurnAndStun) { // and fail to generate STUN candidate, local UDP candidate is generated // properly. TEST_F(BasicPortAllocatorTest, TestSharedSocketNoUdpAllowed) { - allocator().set_flags(allocator().flags() | PORTALLOCATOR_DISABLE_RELAY | - PORTALLOCATOR_DISABLE_TCP | - PORTALLOCATOR_ENABLE_SHARED_SOCKET); - fss_->AddRule(false, rtc::FP_UDP, rtc::FD_ANY, kClientAddr); + allocator().set_flags(allocator().flags() | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); + fss_->AddRule(false, webrtc::FP_UDP, webrtc::FD_ANY, kClientAddr); AddInterface(kClientAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_EQ_SIMULATED_WAIT(1U, ports_.size(), kDefaultAllocationTimeout, - fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return ports_.size(); }, Eq(1U), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(1U, candidates_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); // STUN timeout is 9.5sec. We need to wait to get candidate done signal. - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, kStunTimeoutMs, - fake_clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kStunTimeoutMs), .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(1U, candidates_.size()); } @@ -2023,57 +2239,76 @@ TEST_F(BasicPortAllocatorTest, TestSharedSocketNoUdpAllowed) { // automatically. TEST_F(BasicPortAllocatorTest, TestNetworkPermissionBlocked) { network_manager_.set_default_local_addresses(kPrivateAddr.ipaddr(), - rtc::IPAddress()); + IPAddress()); network_manager_.set_enumeration_permission( - rtc::NetworkManager::ENUMERATION_BLOCKED); - allocator().set_flags(allocator().flags() | PORTALLOCATOR_DISABLE_RELAY | - PORTALLOCATOR_DISABLE_TCP | - PORTALLOCATOR_ENABLE_SHARED_SOCKET); - EXPECT_EQ(0U, - allocator_->flags() & PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION); + NetworkManager::ENUMERATION_BLOCKED); + allocator().set_flags(allocator().flags() | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); + EXPECT_EQ(0U, allocator_->flags() & + webrtc::PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); - EXPECT_EQ(0U, session_->flags() & PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION); + EXPECT_EQ(0U, session_->flags() & + webrtc::PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION); session_->StartGettingPorts(); - EXPECT_EQ_SIMULATED_WAIT(1U, ports_.size(), kDefaultAllocationTimeout, - fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return ports_.size(); }, Eq(1U), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(1U, candidates_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kPrivateAddr)); - EXPECT_NE(0U, session_->flags() & PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kPrivateAddr)); + EXPECT_NE(0U, session_->flags() & + webrtc::PORTALLOCATOR_DISABLE_ADAPTER_ENUMERATION); } // This test verifies allocator can use IPv6 addresses along with IPv4. TEST_F(BasicPortAllocatorTest, TestEnableIPv6Addresses) { - allocator().set_flags(allocator().flags() | PORTALLOCATOR_DISABLE_RELAY | - PORTALLOCATOR_ENABLE_IPV6 | - PORTALLOCATOR_ENABLE_SHARED_SOCKET); + allocator().set_flags(allocator().flags() | + webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_ENABLE_IPV6 | + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); AddInterface(kClientIPv6Addr); AddInterface(kClientAddr); - allocator_->set_step_delay(kMinimumStepDelay); + allocator_->set_step_delay(webrtc::kMinimumStepDelay); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(4U, ports_.size()); EXPECT_EQ(4U, candidates_.size()); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientIPv6Addr)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientAddr)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "tcp", kClientIPv6Addr)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "tcp", kClientAddr)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + kClientIPv6Addr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "udp", kClientAddr)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "tcp", + kClientIPv6Addr)); + EXPECT_TRUE( + HasCandidate(candidates_, IceCandidateType::kHost, "tcp", kClientAddr)); } TEST_F(BasicPortAllocatorTest, TestStopGettingPorts) { AddInterface(kClientAddr); - allocator_->set_step_delay(kDefaultStepDelay); + allocator_->set_step_delay(webrtc::kDefaultStepDelay); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_EQ_SIMULATED_WAIT(2U, candidates_.size(), 1000, fake_clock); + ASSERT_THAT(webrtc::WaitUntil([&] { return candidates_.size(); }, Eq(2U), + {.clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(2U, ports_.size()); session_->StopGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, 1000, fake_clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return candidate_allocation_done_; }, + IsTrue(), {.clock = &fake_clock}), + webrtc::IsRtcOk()); // After stopping getting ports, adding a new interface will not start // getting ports again. - allocator_->set_step_delay(kMinimumStepDelay); + allocator_->set_step_delay(webrtc::kMinimumStepDelay); candidates_.clear(); ports_.clear(); candidate_allocation_done_ = false; @@ -2085,25 +2320,34 @@ TEST_F(BasicPortAllocatorTest, TestStopGettingPorts) { TEST_F(BasicPortAllocatorTest, TestClearGettingPorts) { AddInterface(kClientAddr); - allocator_->set_step_delay(kDefaultStepDelay); + allocator_->set_step_delay(webrtc::kDefaultStepDelay); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_EQ_SIMULATED_WAIT(2U, candidates_.size(), 1000, fake_clock); + ASSERT_THAT(webrtc::WaitUntil([&] { return candidates_.size(); }, Eq(2U), + {.clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(2U, ports_.size()); session_->ClearGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, 1000, fake_clock); + EXPECT_THAT(webrtc::WaitUntil([&] { return candidate_allocation_done_; }, + IsTrue(), {.clock = &fake_clock}), + webrtc::IsRtcOk()); // After clearing getting ports, adding a new interface will start getting // ports again. - allocator_->set_step_delay(kMinimumStepDelay); + allocator_->set_step_delay(webrtc::kMinimumStepDelay); candidates_.clear(); ports_.clear(); candidate_allocation_done_ = false; network_manager_.AddInterface(kClientAddr2); - ASSERT_EQ_SIMULATED_WAIT(2U, candidates_.size(), 1000, fake_clock); + ASSERT_THAT(webrtc::WaitUntil([&] { return candidates_.size(); }, Eq(2U), + {.clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(2U, ports_.size()); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); } // Test that the ports and candidates are updated with new ufrag/pwd/etc. when @@ -2116,8 +2360,12 @@ TEST_F(BasicPortAllocatorTest, TestTransportInformationUpdated) { webrtc::NO_PRUNE); const PortAllocatorSession* peeked_session = allocator_->GetPooledSession(); ASSERT_NE(nullptr, peeked_session); - EXPECT_EQ_SIMULATED_WAIT(true, peeked_session->CandidatesAllocationDone(), - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return peeked_session->CandidatesAllocationDone(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); // Expect that when TakePooledSession is called, // UpdateTransportInformationInternal will be called and the // BasicPortAllocatorSession will update the ufrag/pwd of ports and @@ -2153,11 +2401,15 @@ TEST_F(BasicPortAllocatorTest, TestSetCandidateFilterAfterCandidatesGathered) { webrtc::NO_PRUNE); const PortAllocatorSession* peeked_session = allocator_->GetPooledSession(); ASSERT_NE(nullptr, peeked_session); - EXPECT_EQ_SIMULATED_WAIT(true, peeked_session->CandidatesAllocationDone(), - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return peeked_session->CandidatesAllocationDone(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); size_t initial_candidates_size = peeked_session->ReadyCandidates().size(); size_t initial_ports_size = peeked_session->ReadyPorts().size(); - allocator_->SetCandidateFilter(CF_RELAY); + allocator_->SetCandidateFilter(webrtc::CF_RELAY); // Assume that when TakePooledSession is called, the candidate filter will be // applied to the pooled session. This is tested by PortAllocatorTest. session_ = @@ -2170,14 +2422,14 @@ TEST_F(BasicPortAllocatorTest, TestSetCandidateFilterAfterCandidatesGathered) { EXPECT_GT(initial_ports_size, ports.size()); for (const PortInterface* port : ports) { // Expect only relay ports. - EXPECT_EQ(RELAY_PORT_TYPE, port->Type()); + EXPECT_EQ(IceCandidateType::kRelay, port->Type()); } for (const Candidate& candidate : candidates) { // Expect only relay candidates now that the filter is applied. - EXPECT_EQ(std::string(RELAY_PORT_TYPE), candidate.type()); + EXPECT_TRUE(candidate.is_relay()); // Expect that the raddr is emptied due to the CF_RELAY filter. - EXPECT_EQ(candidate.related_address(), - rtc::EmptySocketAddressWithFamily(candidate.address().family())); + EXPECT_EQ(candidate.related_address(), webrtc::EmptySocketAddressWithFamily( + candidate.address().family())); } } @@ -2192,39 +2444,51 @@ TEST_F(BasicPortAllocatorTest, AddInterface(kPrivateAddr); ResetWithStunServerAndNat(kStunAddr); - AddTurnServers(kTurnUdpIntAddr, rtc::SocketAddress()); + AddTurnServers(kTurnUdpIntAddr, SocketAddress()); allocator_->set_flags(allocator().flags() | - PORTALLOCATOR_ENABLE_SHARED_SOCKET | - PORTALLOCATOR_DISABLE_TCP); + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + webrtc::PORTALLOCATOR_DISABLE_TCP); - allocator_->SetCandidateFilter(CF_NONE); + allocator_->SetCandidateFilter(webrtc::CF_NONE); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_TRUE(candidates_.empty()); EXPECT_TRUE(ports_.empty()); // Surface the relay candidate previously gathered but not signaled. - session_->SetCandidateFilter(CF_RELAY); - ASSERT_EQ_SIMULATED_WAIT(1u, candidates_.size(), kDefaultAllocationTimeout, - fake_clock); - EXPECT_EQ(RELAY_PORT_TYPE, candidates_.back().type()); + session_->SetCandidateFilter(webrtc::CF_RELAY); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidates_.size(); }, Eq(1u), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE(candidates_.back().is_relay()); EXPECT_EQ(1u, ports_.size()); // Surface the srflx candidate previously gathered but not signaled. - session_->SetCandidateFilter(CF_RELAY | CF_REFLEXIVE); - ASSERT_EQ_SIMULATED_WAIT(2u, candidates_.size(), kDefaultAllocationTimeout, - fake_clock); - EXPECT_EQ(STUN_PORT_TYPE, candidates_.back().type()); + session_->SetCandidateFilter(webrtc::CF_RELAY | webrtc::CF_REFLEXIVE); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidates_.size(); }, Eq(2u), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE(candidates_.back().is_stun()); EXPECT_EQ(2u, ports_.size()); // Surface the srflx candidate previously gathered but not signaled. - session_->SetCandidateFilter(CF_ALL); - ASSERT_EQ_SIMULATED_WAIT(3u, candidates_.size(), kDefaultAllocationTimeout, - fake_clock); - EXPECT_EQ(LOCAL_PORT_TYPE, candidates_.back().type()); + session_->SetCandidateFilter(webrtc::CF_ALL); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidates_.size(); }, Eq(3u), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE(candidates_.back().is_local()); EXPECT_EQ(2u, ports_.size()); } @@ -2242,40 +2506,52 @@ TEST_F( AddInterface(kPrivateAddr); ResetWithStunServerAndNat(kStunAddr); - AddTurnServers(kTurnUdpIntAddr, rtc::SocketAddress()); + AddTurnServers(kTurnUdpIntAddr, SocketAddress()); allocator_->set_flags(allocator().flags() | - PORTALLOCATOR_ENABLE_SHARED_SOCKET | - PORTALLOCATOR_DISABLE_TCP); + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + webrtc::PORTALLOCATOR_DISABLE_TCP); - allocator_->SetCandidateFilter(CF_NONE); + allocator_->SetCandidateFilter(webrtc::CF_NONE); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_TRUE(candidates_.empty()); EXPECT_TRUE(ports_.empty()); // Surface the relay candidate previously gathered but not signaled. - session_->SetCandidateFilter(CF_RELAY); - EXPECT_EQ_SIMULATED_WAIT(1u, candidates_.size(), kDefaultAllocationTimeout, - fake_clock); - EXPECT_EQ(RELAY_PORT_TYPE, candidates_.back().type()); + session_->SetCandidateFilter(webrtc::CF_RELAY); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidates_.size(); }, Eq(1u), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE(candidates_.back().is_relay()); EXPECT_EQ(1u, ports_.size()); // Surface the srflx candidate previously gathered but not signaled. - session_->SetCandidateFilter(CF_REFLEXIVE); - EXPECT_EQ_SIMULATED_WAIT(2u, candidates_.size(), kDefaultAllocationTimeout, - fake_clock); - EXPECT_EQ(STUN_PORT_TYPE, candidates_.back().type()); + session_->SetCandidateFilter(webrtc::CF_REFLEXIVE); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidates_.size(); }, Eq(2u), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE(candidates_.back().is_stun()); EXPECT_EQ(2u, ports_.size()); // Surface the host candidate previously gathered but not signaled. - session_->SetCandidateFilter(CF_HOST); - EXPECT_EQ_SIMULATED_WAIT(3u, candidates_.size(), kDefaultAllocationTimeout, - fake_clock); - EXPECT_EQ(LOCAL_PORT_TYPE, candidates_.back().type()); - // We use a shared socket and cricket::UDPPort handles the srflx candidate. + session_->SetCandidateFilter(webrtc::CF_HOST); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidates_.size(); }, Eq(3u), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); + EXPECT_TRUE(candidates_.back().is_local()); + // We use a shared socket and webrtc::UDPPort handles the srflx candidate. EXPECT_EQ(2u, ports_.size()); } @@ -2287,17 +2563,20 @@ TEST_F(BasicPortAllocatorTest, AddInterface(kPrivateAddr); ResetWithStunServerAndNat(kStunAddr); - AddTurnServers(kTurnUdpIntAddr, rtc::SocketAddress()); + AddTurnServers(kTurnUdpIntAddr, SocketAddress()); allocator_->set_flags(allocator().flags() | - PORTALLOCATOR_ENABLE_SHARED_SOCKET | - PORTALLOCATOR_DISABLE_TCP); + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET | + webrtc::PORTALLOCATOR_DISABLE_TCP); - allocator_->SetCandidateFilter(CF_NONE); + allocator_->SetCandidateFilter(webrtc::CF_NONE); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); auto test_invariants = [this]() { EXPECT_TRUE(candidates_.empty()); EXPECT_TRUE(ports_.empty()); @@ -2307,15 +2586,15 @@ TEST_F(BasicPortAllocatorTest, session_->StopGettingPorts(); - session_->SetCandidateFilter(CF_RELAY); + session_->SetCandidateFilter(webrtc::CF_RELAY); SIMULATED_WAIT(false, kDefaultAllocationTimeout, fake_clock); test_invariants(); - session_->SetCandidateFilter(CF_RELAY | CF_REFLEXIVE); + session_->SetCandidateFilter(webrtc::CF_RELAY | webrtc::CF_REFLEXIVE); SIMULATED_WAIT(false, kDefaultAllocationTimeout, fake_clock); test_invariants(); - session_->SetCandidateFilter(CF_ALL); + session_->SetCandidateFilter(webrtc::CF_ALL); SIMULATED_WAIT(false, kDefaultAllocationTimeout, fake_clock); test_invariants(); } @@ -2329,8 +2608,12 @@ TEST_F(BasicPortAllocatorTest, SetStunKeepaliveIntervalForPorts) { webrtc::NO_PRUNE, nullptr, expected_stun_keepalive_interval); auto* pooled_session = allocator_->GetPooledSession(); ASSERT_NE(nullptr, pooled_session); - EXPECT_EQ_SIMULATED_WAIT(true, pooled_session->CandidatesAllocationDone(), - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return pooled_session->CandidatesAllocationDone(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); CheckStunKeepaliveIntervalOfAllReadyPorts(pooled_session, expected_stun_keepalive_interval); } @@ -2344,8 +2627,12 @@ TEST_F(BasicPortAllocatorTest, webrtc::NO_PRUNE, nullptr, 123 /* stun keepalive interval */); auto* pooled_session = allocator_->GetPooledSession(); ASSERT_NE(nullptr, pooled_session); - EXPECT_EQ_SIMULATED_WAIT(true, pooled_session->CandidatesAllocationDone(), - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return pooled_session->CandidatesAllocationDone(); }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); const int expected_stun_keepalive_interval = 321; allocator_->SetConfiguration( allocator_->stun_servers(), allocator_->turn_servers(), pool_size, @@ -2360,14 +2647,17 @@ TEST_F(BasicPortAllocatorTest, const int expected_stun_keepalive_interval = 123; AddInterface(kClientAddr); allocator_->set_flags(allocator().flags() | - PORTALLOCATOR_ENABLE_SHARED_SOCKET); + webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET); allocator_->SetConfiguration( allocator_->stun_servers(), allocator_->turn_servers(), pool_size, webrtc::NO_PRUNE, nullptr, expected_stun_keepalive_interval); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); CheckStunKeepaliveIntervalOfAllReadyPorts(session_.get(), expected_stun_keepalive_interval); } @@ -2378,36 +2668,21 @@ TEST_F(BasicPortAllocatorTest, const int expected_stun_keepalive_interval = 123; AddInterface(kClientAddr); allocator_->set_flags(allocator().flags() & - ~(PORTALLOCATOR_ENABLE_SHARED_SOCKET)); + ~(webrtc::PORTALLOCATOR_ENABLE_SHARED_SOCKET)); allocator_->SetConfiguration( allocator_->stun_servers(), allocator_->turn_servers(), pool_size, webrtc::NO_PRUNE, nullptr, expected_stun_keepalive_interval); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); CheckStunKeepaliveIntervalOfAllReadyPorts(session_.get(), expected_stun_keepalive_interval); } -TEST_F(BasicPortAllocatorTest, IceRegatheringMetricsLoggedWhenNetworkChanges) { - // Only test local ports to simplify test. - ResetWithNoServersOrNat(); - AddInterface(kClientAddr, "test_net0"); - ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); - session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); - candidate_allocation_done_ = false; - AddInterface(kClientAddr2, "test_net1"); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); - EXPECT_METRIC_EQ(1, - webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.IceRegatheringReason", - static_cast(IceRegatheringReason::NETWORK_CHANGE))); -} - // Test that when an mDNS responder is present, the local address of a host // candidate is concealed by an mDNS hostname and the related address of a srflx // candidate is set to 0.0.0.0 or ::0. @@ -2415,18 +2690,21 @@ TEST_F(BasicPortAllocatorTest, HostCandidateAddressIsReplacedByHostname) { // Default config uses GTURN and no NAT, so replace that with the // desired setup (NAT, STUN server, TURN server, UDP/TCP). ResetWithStunServerAndNat(kStunAddr); - turn_server_.AddInternalSocket(kTurnTcpIntAddr, PROTO_TCP); + turn_server_.AddInternalSocket(kTurnTcpIntAddr, webrtc::PROTO_TCP); AddTurnServers(kTurnUdpIntAddr, kTurnTcpIntAddr); AddTurnServers(kTurnUdpIntIPv6Addr, kTurnTcpIntIPv6Addr); ASSERT_EQ(&network_manager_, allocator().network_manager()); network_manager_.set_mdns_responder( - std::make_unique(rtc::Thread::Current())); + std::make_unique(Thread::Current())); AddInterface(kClientAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(5u, candidates_.size()); int num_host_udp_candidates = 0; int num_host_tcp_candidates = 0; @@ -2435,21 +2713,21 @@ TEST_F(BasicPortAllocatorTest, HostCandidateAddressIsReplacedByHostname) { for (const auto& candidate : candidates_) { const auto& raddr = candidate.related_address(); - if (candidate.type() == LOCAL_PORT_TYPE) { + if (candidate.is_local()) { EXPECT_FALSE(candidate.address().hostname().empty()); EXPECT_TRUE(raddr.IsNil()); - if (candidate.protocol() == UDP_PROTOCOL_NAME) { + if (candidate.protocol() == webrtc::UDP_PROTOCOL_NAME) { ++num_host_udp_candidates; } else { ++num_host_tcp_candidates; } - } else if (candidate.type() == STUN_PORT_TYPE) { + } else if (candidate.is_stun()) { // For a srflx candidate, the related address should be set to 0.0.0.0 or // ::0 - EXPECT_TRUE(IPIsAny(raddr.ipaddr())); + EXPECT_TRUE(webrtc::IPIsAny(raddr.ipaddr())); EXPECT_EQ(raddr.port(), 0); ++num_srflx_candidates; - } else if (candidate.type() == RELAY_PORT_TYPE) { + } else if (candidate.is_relay()) { EXPECT_EQ(kNatUdpAddr.ipaddr(), raddr.ipaddr()); EXPECT_EQ(kNatUdpAddr.family(), raddr.family()); ++num_relay_candidates; @@ -2476,7 +2754,7 @@ TEST_F(BasicPortAllocatorTest, TestUseTurnServerAsStunSever) { } TEST_F(BasicPortAllocatorTest, TestDoNotUseTurnServerAsStunSever) { - webrtc::test::ScopedKeyValueConfig field_trials( + test::ScopedKeyValueConfig field_trials( "WebRTC-UseTurnServerAsStunServer/Disabled/"); ServerAddresses stun_servers; stun_servers.insert(kStunAddr); @@ -2492,7 +2770,7 @@ TEST_F(BasicPortAllocatorTest, TestDoNotUseTurnServerAsStunSever) { // Test that candidates from different servers get assigned a unique local // preference (the middle 16 bits of the priority) TEST_F(BasicPortAllocatorTest, AssignsUniqueLocalPreferencetoRelayCandidates) { - allocator_->SetCandidateFilter(CF_RELAY); + allocator_->SetCandidateFilter(webrtc::CF_RELAY); allocator_->AddTurnServerForTesting( CreateTurnServers(kTurnUdpIntAddr, SocketAddress())); allocator_->AddTurnServerForTesting( @@ -2503,8 +2781,11 @@ TEST_F(BasicPortAllocatorTest, AssignsUniqueLocalPreferencetoRelayCandidates) { AddInterface(kClientAddr); ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - ASSERT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + ASSERT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3u, candidates_.size()); EXPECT_GT((candidates_[0].priority() >> 8) & 0xFFFF, (candidates_[1].priority() >> 8) & 0xFFFF); @@ -2515,13 +2796,13 @@ TEST_F(BasicPortAllocatorTest, AssignsUniqueLocalPreferencetoRelayCandidates) { // Test that no more than allocator.max_ipv6_networks() IPv6 networks are used // to gather candidates. TEST_F(BasicPortAllocatorTest, TwoIPv6AreSelectedBecauseOfMaxIpv6Limit) { - rtc::Network wifi1("wifi1", "Test NetworkAdapter 1", kClientIPv6Addr.ipaddr(), - 64, rtc::ADAPTER_TYPE_WIFI); - rtc::Network ethe1("ethe1", "Test NetworkAdapter 2", - kClientIPv6Addr2.ipaddr(), 64, rtc::ADAPTER_TYPE_ETHERNET); - rtc::Network wifi2("wifi2", "Test NetworkAdapter 3", - kClientIPv6Addr3.ipaddr(), 64, rtc::ADAPTER_TYPE_WIFI); - std::vector networks = {&wifi1, ðe1, &wifi2}; + Network wifi1("wifi1", "Test NetworkAdapter 1", kClientIPv6Addr.ipaddr(), 64, + ADAPTER_TYPE_WIFI); + Network ethe1("ethe1", "Test NetworkAdapter 2", kClientIPv6Addr2.ipaddr(), 64, + ADAPTER_TYPE_ETHERNET); + Network wifi2("wifi2", "Test NetworkAdapter 3", kClientIPv6Addr3.ipaddr(), 64, + ADAPTER_TYPE_WIFI); + std::vector networks = {&wifi1, ðe1, &wifi2}; // Ensure that only 2 interfaces were selected. EXPECT_EQ(2U, BasicPortAllocatorSession::SelectIPv6Networks( @@ -2532,11 +2813,11 @@ TEST_F(BasicPortAllocatorTest, TwoIPv6AreSelectedBecauseOfMaxIpv6Limit) { // Test that if the number of available IPv6 networks is less than // allocator.max_ipv6_networks(), all IPv6 networks will be selected. TEST_F(BasicPortAllocatorTest, AllIPv6AreSelected) { - rtc::Network wifi1("wifi1", "Test NetworkAdapter 1", kClientIPv6Addr.ipaddr(), - 64, rtc::ADAPTER_TYPE_WIFI); - rtc::Network ethe1("ethe1", "Test NetworkAdapter 2", - kClientIPv6Addr2.ipaddr(), 64, rtc::ADAPTER_TYPE_ETHERNET); - std::vector networks = {&wifi1, ðe1}; + Network wifi1("wifi1", "Test NetworkAdapter 1", kClientIPv6Addr.ipaddr(), 64, + ADAPTER_TYPE_WIFI); + Network ethe1("ethe1", "Test NetworkAdapter 2", kClientIPv6Addr2.ipaddr(), 64, + ADAPTER_TYPE_ETHERNET); + std::vector networks = {&wifi1, ðe1}; // Ensure that all 2 interfaces were selected. EXPECT_EQ(2U, BasicPortAllocatorSession::SelectIPv6Networks( @@ -2547,20 +2828,18 @@ TEST_F(BasicPortAllocatorTest, AllIPv6AreSelected) { // If there are some IPv6 networks with different types, diversify IPv6 // networks. TEST_F(BasicPortAllocatorTest, TwoIPv6WifiAreSelectedIfThereAreTwo) { - rtc::Network wifi1("wifi1", "Test NetworkAdapter 1", kClientIPv6Addr.ipaddr(), - 64, rtc::ADAPTER_TYPE_WIFI); - rtc::Network ethe1("ethe1", "Test NetworkAdapter 2", - kClientIPv6Addr2.ipaddr(), 64, rtc::ADAPTER_TYPE_ETHERNET); - rtc::Network ethe2("ethe2", "Test NetworkAdapter 3", - kClientIPv6Addr3.ipaddr(), 64, rtc::ADAPTER_TYPE_ETHERNET); - rtc::Network unknown1("unknown1", "Test NetworkAdapter 4", - kClientIPv6Addr2.ipaddr(), 64, - rtc::ADAPTER_TYPE_UNKNOWN); - rtc::Network cell1("cell1", "Test NetworkAdapter 5", - kClientIPv6Addr3.ipaddr(), 64, - rtc::ADAPTER_TYPE_CELLULAR_4G); - std::vector networks = {&wifi1, ðe1, ðe2, - &unknown1, &cell1}; + Network wifi1("wifi1", "Test NetworkAdapter 1", kClientIPv6Addr.ipaddr(), 64, + ADAPTER_TYPE_WIFI); + Network ethe1("ethe1", "Test NetworkAdapter 2", kClientIPv6Addr2.ipaddr(), 64, + ADAPTER_TYPE_ETHERNET); + Network ethe2("ethe2", "Test NetworkAdapter 3", kClientIPv6Addr3.ipaddr(), 64, + ADAPTER_TYPE_ETHERNET); + Network unknown1("unknown1", "Test NetworkAdapter 4", + kClientIPv6Addr2.ipaddr(), 64, ADAPTER_TYPE_UNKNOWN); + Network cell1("cell1", "Test NetworkAdapter 5", kClientIPv6Addr3.ipaddr(), 64, + ADAPTER_TYPE_CELLULAR_4G); + std::vector networks = {&wifi1, ðe1, ðe2, &unknown1, + &cell1}; networks = BasicPortAllocatorSession::SelectIPv6Networks( networks, /*max_ipv6_networks=*/4); @@ -2578,22 +2857,18 @@ TEST_F(BasicPortAllocatorTest, TwoIPv6WifiAreSelectedIfThereAreTwo) { // is no other option. TEST_F(BasicPortAllocatorTest, IPv6WithSameTypeAreSelectedIfNoOtherOption) { // Add 5 cellular interfaces - rtc::Network cell1("cell1", "Test NetworkAdapter 1", kClientIPv6Addr.ipaddr(), - 64, rtc::ADAPTER_TYPE_CELLULAR_2G); - rtc::Network cell2("cell2", "Test NetworkAdapter 2", - kClientIPv6Addr2.ipaddr(), 64, - rtc::ADAPTER_TYPE_CELLULAR_3G); - rtc::Network cell3("cell3", "Test NetworkAdapter 3", - kClientIPv6Addr3.ipaddr(), 64, - rtc::ADAPTER_TYPE_CELLULAR_4G); - rtc::Network cell4("cell4", "Test NetworkAdapter 4", - kClientIPv6Addr2.ipaddr(), 64, - rtc::ADAPTER_TYPE_CELLULAR_5G); - rtc::Network cell5("cell5", "Test NetworkAdapter 5", - kClientIPv6Addr3.ipaddr(), 64, - rtc::ADAPTER_TYPE_CELLULAR_3G); - std::vector networks = {&cell1, &cell2, &cell3, &cell4, - &cell5}; + Network cell1("cell1", "Test NetworkAdapter 1", kClientIPv6Addr.ipaddr(), 64, + ADAPTER_TYPE_CELLULAR_2G); + Network cell2("cell2", "Test NetworkAdapter 2", kClientIPv6Addr2.ipaddr(), 64, + ADAPTER_TYPE_CELLULAR_3G); + Network cell3("cell3", "Test NetworkAdapter 3", kClientIPv6Addr3.ipaddr(), 64, + ADAPTER_TYPE_CELLULAR_4G); + Network cell4("cell4", "Test NetworkAdapter 4", kClientIPv6Addr2.ipaddr(), 64, + ADAPTER_TYPE_CELLULAR_5G); + Network cell5("cell5", "Test NetworkAdapter 5", kClientIPv6Addr3.ipaddr(), 64, + ADAPTER_TYPE_CELLULAR_3G); + std::vector networks = {&cell1, &cell2, &cell3, &cell4, + &cell5}; // Ensure that 4 interfaces were selected. EXPECT_EQ(4U, BasicPortAllocatorSession::SelectIPv6Networks( @@ -2602,13 +2877,13 @@ TEST_F(BasicPortAllocatorTest, IPv6WithSameTypeAreSelectedIfNoOtherOption) { } TEST_F(BasicPortAllocatorTest, IPv6EthernetHasHigherPriorityThanWifi) { - rtc::Network wifi1("wifi1", "Test NetworkAdapter 1", kClientIPv6Addr.ipaddr(), - 64, rtc::ADAPTER_TYPE_WIFI); - rtc::Network ethe1("ethe1", "Test NetworkAdapter 2", - kClientIPv6Addr2.ipaddr(), 64, rtc::ADAPTER_TYPE_ETHERNET); - rtc::Network wifi2("wifi2", "Test NetworkAdapter 3", - kClientIPv6Addr3.ipaddr(), 64, rtc::ADAPTER_TYPE_WIFI); - std::vector networks = {&wifi1, ðe1, &wifi2}; + Network wifi1("wifi1", "Test NetworkAdapter 1", kClientIPv6Addr.ipaddr(), 64, + ADAPTER_TYPE_WIFI); + Network ethe1("ethe1", "Test NetworkAdapter 2", kClientIPv6Addr2.ipaddr(), 64, + ADAPTER_TYPE_ETHERNET); + Network wifi2("wifi2", "Test NetworkAdapter 3", kClientIPv6Addr3.ipaddr(), 64, + ADAPTER_TYPE_WIFI); + std::vector networks = {&wifi1, ðe1, &wifi2}; networks = BasicPortAllocatorSession::SelectIPv6Networks( networks, /*max_ipv6_networks=*/1); @@ -2619,19 +2894,18 @@ TEST_F(BasicPortAllocatorTest, IPv6EthernetHasHigherPriorityThanWifi) { } TEST_F(BasicPortAllocatorTest, IPv6EtherAndWifiHaveHigherPriorityThanOthers) { - rtc::Network cell1("cell1", "Test NetworkAdapter 1", kClientIPv6Addr.ipaddr(), - 64, rtc::ADAPTER_TYPE_CELLULAR_3G); - rtc::Network ethe1("ethe1", "Test NetworkAdapter 2", - kClientIPv6Addr2.ipaddr(), 64, rtc::ADAPTER_TYPE_ETHERNET); - rtc::Network wifi1("wifi1", "Test NetworkAdapter 3", - kClientIPv6Addr3.ipaddr(), 64, rtc::ADAPTER_TYPE_WIFI); - rtc::Network unknown("unknown", "Test NetworkAdapter 4", - kClientIPv6Addr2.ipaddr(), 64, - rtc::ADAPTER_TYPE_UNKNOWN); - rtc::Network vpn1("vpn1", "Test NetworkAdapter 5", kClientIPv6Addr3.ipaddr(), - 64, rtc::ADAPTER_TYPE_VPN); - std::vector networks = {&cell1, ðe1, &wifi1, &unknown, - &vpn1}; + Network cell1("cell1", "Test NetworkAdapter 1", kClientIPv6Addr.ipaddr(), 64, + ADAPTER_TYPE_CELLULAR_3G); + Network ethe1("ethe1", "Test NetworkAdapter 2", kClientIPv6Addr2.ipaddr(), 64, + ADAPTER_TYPE_ETHERNET); + Network wifi1("wifi1", "Test NetworkAdapter 3", kClientIPv6Addr3.ipaddr(), 64, + ADAPTER_TYPE_WIFI); + Network unknown("unknown", "Test NetworkAdapter 4", kClientIPv6Addr2.ipaddr(), + 64, ADAPTER_TYPE_UNKNOWN); + Network vpn1("vpn1", "Test NetworkAdapter 5", kClientIPv6Addr3.ipaddr(), 64, + ADAPTER_TYPE_VPN); + std::vector networks = {&cell1, ðe1, &wifi1, &unknown, + &vpn1}; networks = BasicPortAllocatorSession::SelectIPv6Networks( networks, /*max_ipv6_networks=*/2); @@ -2644,80 +2918,98 @@ TEST_F(BasicPortAllocatorTest, IPv6EtherAndWifiHaveHigherPriorityThanOthers) { TEST_F(BasicPortAllocatorTest, Select2DifferentIntefaces) { allocator().set_max_ipv6_networks(2); - AddInterface(kClientIPv6Addr, "ethe1", rtc::ADAPTER_TYPE_ETHERNET); - AddInterface(kClientIPv6Addr2, "ethe2", rtc::ADAPTER_TYPE_ETHERNET); - AddInterface(kClientIPv6Addr3, "wifi1", rtc::ADAPTER_TYPE_WIFI); - AddInterface(kClientIPv6Addr4, "wifi2", rtc::ADAPTER_TYPE_WIFI); - AddInterface(kClientIPv6Addr5, "cell1", rtc::ADAPTER_TYPE_CELLULAR_3G); + AddInterface(kClientIPv6Addr, "ethe1", ADAPTER_TYPE_ETHERNET); + AddInterface(kClientIPv6Addr2, "ethe2", ADAPTER_TYPE_ETHERNET); + AddInterface(kClientIPv6Addr3, "wifi1", ADAPTER_TYPE_WIFI); + AddInterface(kClientIPv6Addr4, "wifi2", ADAPTER_TYPE_WIFI); + AddInterface(kClientIPv6Addr5, "cell1", ADAPTER_TYPE_CELLULAR_3G); // To simplify the test, only gather UDP host candidates. - allocator().set_flags(PORTALLOCATOR_ENABLE_IPV6 | PORTALLOCATOR_DISABLE_TCP | - PORTALLOCATOR_DISABLE_STUN | - PORTALLOCATOR_DISABLE_RELAY | - PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); + allocator().set_flags( + webrtc::PORTALLOCATOR_ENABLE_IPV6 | webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_DISABLE_STUN | webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); - ASSERT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP)); + ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(2U, candidates_.size()); // ethe1 and wifi1 were selected. - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientIPv6Addr)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientIPv6Addr3)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + kClientIPv6Addr)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + kClientIPv6Addr3)); } TEST_F(BasicPortAllocatorTest, Select3DifferentIntefaces) { allocator().set_max_ipv6_networks(3); - AddInterface(kClientIPv6Addr, "ethe1", rtc::ADAPTER_TYPE_ETHERNET); - AddInterface(kClientIPv6Addr2, "ethe2", rtc::ADAPTER_TYPE_ETHERNET); - AddInterface(kClientIPv6Addr3, "wifi1", rtc::ADAPTER_TYPE_WIFI); - AddInterface(kClientIPv6Addr4, "wifi2", rtc::ADAPTER_TYPE_WIFI); - AddInterface(kClientIPv6Addr5, "cell1", rtc::ADAPTER_TYPE_CELLULAR_3G); + AddInterface(kClientIPv6Addr, "ethe1", ADAPTER_TYPE_ETHERNET); + AddInterface(kClientIPv6Addr2, "ethe2", ADAPTER_TYPE_ETHERNET); + AddInterface(kClientIPv6Addr3, "wifi1", ADAPTER_TYPE_WIFI); + AddInterface(kClientIPv6Addr4, "wifi2", ADAPTER_TYPE_WIFI); + AddInterface(kClientIPv6Addr5, "cell1", ADAPTER_TYPE_CELLULAR_3G); // To simplify the test, only gather UDP host candidates. - allocator().set_flags(PORTALLOCATOR_ENABLE_IPV6 | PORTALLOCATOR_DISABLE_TCP | - PORTALLOCATOR_DISABLE_STUN | - PORTALLOCATOR_DISABLE_RELAY | - PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); + allocator().set_flags( + webrtc::PORTALLOCATOR_ENABLE_IPV6 | webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_DISABLE_STUN | webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); - ASSERT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP)); + ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(3U, candidates_.size()); // ethe1, wifi1, and cell1 were selected. - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientIPv6Addr)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientIPv6Addr3)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientIPv6Addr5)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + kClientIPv6Addr)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + kClientIPv6Addr3)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + kClientIPv6Addr5)); } TEST_F(BasicPortAllocatorTest, Select4DifferentIntefaces) { allocator().set_max_ipv6_networks(4); - AddInterface(kClientIPv6Addr, "ethe1", rtc::ADAPTER_TYPE_ETHERNET); - AddInterface(kClientIPv6Addr2, "ethe2", rtc::ADAPTER_TYPE_ETHERNET); - AddInterface(kClientIPv6Addr3, "wifi1", rtc::ADAPTER_TYPE_WIFI); - AddInterface(kClientIPv6Addr4, "wifi2", rtc::ADAPTER_TYPE_WIFI); - AddInterface(kClientIPv6Addr5, "cell1", rtc::ADAPTER_TYPE_CELLULAR_3G); + AddInterface(kClientIPv6Addr, "ethe1", ADAPTER_TYPE_ETHERNET); + AddInterface(kClientIPv6Addr2, "ethe2", ADAPTER_TYPE_ETHERNET); + AddInterface(kClientIPv6Addr3, "wifi1", ADAPTER_TYPE_WIFI); + AddInterface(kClientIPv6Addr4, "wifi2", ADAPTER_TYPE_WIFI); + AddInterface(kClientIPv6Addr5, "cell1", ADAPTER_TYPE_CELLULAR_3G); // To simplify the test, only gather UDP host candidates. - allocator().set_flags(PORTALLOCATOR_ENABLE_IPV6 | PORTALLOCATOR_DISABLE_TCP | - PORTALLOCATOR_DISABLE_STUN | - PORTALLOCATOR_DISABLE_RELAY | - PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); + allocator().set_flags( + webrtc::PORTALLOCATOR_ENABLE_IPV6 | webrtc::PORTALLOCATOR_DISABLE_TCP | + webrtc::PORTALLOCATOR_DISABLE_STUN | webrtc::PORTALLOCATOR_DISABLE_RELAY | + webrtc::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); - ASSERT_TRUE(CreateSession(cricket::ICE_CANDIDATE_COMPONENT_RTP)); + ASSERT_TRUE(CreateSession(ICE_CANDIDATE_COMPONENT_RTP)); session_->StartGettingPorts(); - EXPECT_TRUE_SIMULATED_WAIT(candidate_allocation_done_, - kDefaultAllocationTimeout, fake_clock); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return candidate_allocation_done_; }, IsTrue(), + {.timeout = TimeDelta::Millis(kDefaultAllocationTimeout), + .clock = &fake_clock}), + webrtc::IsRtcOk()); EXPECT_EQ(4U, candidates_.size()); // ethe1, ethe2, wifi1, and cell1 were selected. - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientIPv6Addr)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientIPv6Addr2)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientIPv6Addr3)); - EXPECT_TRUE(HasCandidate(candidates_, "local", "udp", kClientIPv6Addr5)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + kClientIPv6Addr)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + kClientIPv6Addr2)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + kClientIPv6Addr3)); + EXPECT_TRUE(HasCandidate(candidates_, IceCandidateType::kHost, "udp", + kClientIPv6Addr5)); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/client/relay_port_factory_interface.h b/p2p/client/relay_port_factory_interface.h index edfca3697b..3d5106d2f2 100644 --- a/p2p/client/relay_port_factory_interface.h +++ b/p2p/client/relay_port_factory_interface.h @@ -14,37 +14,32 @@ #include #include -#include "p2p/base/port_interface.h" -#include "rtc_base/ref_count.h" - -namespace rtc { -class AsyncPacketSocket; -class Network; -class PacketSocketFactory; -class Thread; -} // namespace rtc +#include "api/environment/environment.h" +#include "api/packet_socket_factory.h" +#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/network.h" +#include "rtc_base/thread.h" namespace webrtc { class TurnCustomizer; class FieldTrialsView; } // namespace webrtc -namespace cricket { -class Port; -struct ProtocolAddress; -struct RelayServerConfig; +namespace webrtc { // A struct containing arguments to RelayPortFactory::Create() struct CreateRelayPortArgs { - rtc::Thread* network_thread; - rtc::PacketSocketFactory* socket_factory; - const rtc::Network* network; + Environment env; + Thread* network_thread; + PacketSocketFactory* socket_factory; + const Network* network; const ProtocolAddress* server_address; const RelayServerConfig* config; std::string username; std::string password; - webrtc::TurnCustomizer* turn_customizer = nullptr; - const webrtc::FieldTrialsView* field_trials = nullptr; + TurnCustomizer* turn_customizer = nullptr; // Relative priority of candidates from this TURN server in relation // to the candidates from other servers. Required because ICE priorities // need to be unique. @@ -59,7 +54,7 @@ class RelayPortFactoryInterface { // This variant is used for UDP connection to the relay server // using a already existing shared socket. virtual std::unique_ptr Create(const CreateRelayPortArgs& args, - rtc::AsyncPacketSocket* udp_socket) = 0; + AsyncPacketSocket* udp_socket) = 0; // This variant is used for the other cases. virtual std::unique_ptr Create(const CreateRelayPortArgs& args, @@ -67,6 +62,15 @@ class RelayPortFactoryInterface { int max_port) = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::CreateRelayPortArgs; +using ::webrtc::RelayPortFactoryInterface; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_CLIENT_RELAY_PORT_FACTORY_INTERFACE_H_ diff --git a/p2p/client/turn_port_factory.cc b/p2p/client/turn_port_factory.cc index 555387dbbf..8cf3222cdb 100644 --- a/p2p/client/turn_port_factory.cc +++ b/p2p/client/turn_port_factory.cc @@ -13,16 +13,18 @@ #include #include +#include "p2p/base/port.h" #include "p2p/base/port_allocator.h" #include "p2p/base/turn_port.h" +#include "p2p/client/relay_port_factory_interface.h" +#include "rtc_base/async_packet_socket.h" -namespace cricket { +namespace webrtc { TurnPortFactory::~TurnPortFactory() {} -std::unique_ptr TurnPortFactory::Create( - const CreateRelayPortArgs& args, - rtc::AsyncPacketSocket* udp_socket) { +std::unique_ptr TurnPortFactory::Create(const CreateRelayPortArgs& args, + AsyncPacketSocket* udp_socket) { auto port = TurnPort::Create(args, udp_socket); if (!port) return nullptr; @@ -42,4 +44,4 @@ std::unique_ptr TurnPortFactory::Create(const CreateRelayPortArgs& args, return std::move(port); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/client/turn_port_factory.h b/p2p/client/turn_port_factory.h index abb1f67fe9..e1c33c13e0 100644 --- a/p2p/client/turn_port_factory.h +++ b/p2p/client/turn_port_factory.h @@ -17,7 +17,7 @@ #include "p2p/client/relay_port_factory_interface.h" #include "rtc_base/async_packet_socket.h" -namespace cricket { +namespace webrtc { // This is a RelayPortFactory that produces TurnPorts. class TurnPortFactory : public RelayPortFactoryInterface { @@ -25,13 +25,21 @@ class TurnPortFactory : public RelayPortFactoryInterface { ~TurnPortFactory() override; std::unique_ptr Create(const CreateRelayPortArgs& args, - rtc::AsyncPacketSocket* udp_socket) override; + AsyncPacketSocket* udp_socket) override; std::unique_ptr Create(const CreateRelayPortArgs& args, int min_port, int max_port) override; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::TurnPortFactory; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // P2P_CLIENT_TURN_PORT_FACTORY_H_ diff --git a/p2p/dtls/dtls_ice_integrationtest.cc b/p2p/dtls/dtls_ice_integrationtest.cc new file mode 100644 index 0000000000..d45eeda2e0 --- /dev/null +++ b/p2p/dtls/dtls_ice_integrationtest.cc @@ -0,0 +1,529 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include +#include +#include +#include + +#include "absl/strings/str_cat.h" +#include "api/candidate.h" +#include "api/crypto/crypto_options.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/field_trials.h" +#include "api/scoped_refptr.h" +#include "api/test/create_network_emulation_manager.h" +#include "api/test/network_emulation_manager.h" +#include "api/test/rtc_error_matchers.h" +#include "api/test/simulated_network.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "p2p/base/basic_packet_socket_factory.h" +#include "p2p/base/connection_info.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/p2p_transport_channel.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/transport_description.h" +#include "p2p/client/basic_port_allocator.h" +#include "p2p/dtls/dtls_transport.h" +#include "rtc_base/checks.h" +#include "rtc_base/fake_clock.h" +#include "rtc_base/fake_network.h" +#include "rtc_base/logging.h" +#include "rtc_base/network.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_fingerprint.h" +#include "rtc_base/ssl_identity.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/virtual_socket_server.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/wait_until.h" + +namespace { +constexpr int kDefaultTimeout = 30000; +} // namespace + +namespace webrtc { + +using ::testing::IsTrue; + +class DtlsIceIntegrationTest : public ::testing::TestWithParam>, + public sigslot::has_slots<> { + public: + void CandidateC2S(webrtc::IceTransportInternal*, const webrtc::Candidate& c) { + server_thread()->PostTask( + [this, c = c]() { server_.ice->AddRemoteCandidate(c); }); + } + void CandidateS2C(webrtc::IceTransportInternal*, const webrtc::Candidate& c) { + client_thread()->PostTask( + [this, c = c]() { client_.ice->AddRemoteCandidate(c); }); + } + + private: + struct Endpoint { + explicit Endpoint(bool dtls_in_stun, bool pqc_) + : env(CreateEnvironment(FieldTrials::CreateNoGlobal(absl::StrCat( + (dtls_in_stun ? "WebRTC-IceHandshakeDtls/Enabled/" : ""), + (pqc_ ? "WebRTC-EnableDtlsPqc/Enabled/" : ""))))), + dtls_stun_piggyback(dtls_in_stun), + pqc(pqc_) {} + + webrtc::EmulatedNetworkManagerInterface* emulated_network_manager = nullptr; + std::unique_ptr network_manager; + std::unique_ptr packet_socket_factory; + std::unique_ptr allocator; + std::unique_ptr ice; + std::unique_ptr dtls; + + // SetRemoteFingerprintFromCert does not actually set the fingerprint, + // but only store it for setting later. + bool store_but_dont_set_remote_fingerprint = false; + std::unique_ptr remote_fingerprint; + + Environment env; + bool dtls_stun_piggyback; + bool pqc; + }; + + protected: + DtlsIceIntegrationTest() + : ss_(std::make_unique()), + socket_factory_( + std::make_unique(ss_.get())), + client_(std::get<0>(GetParam()), + std::get<2>(GetParam()) == webrtc::SSL_PROTOCOL_DTLS_13 && + std::get<4>(GetParam())), + server_(std::get<1>(GetParam()), + std::get<2>(GetParam()) == webrtc::SSL_PROTOCOL_DTLS_13 && + std::get<5>(GetParam())), + client_ice_parameters_("c_ufrag", + "c_icepwd_something_something", + false), + server_ice_parameters_("s_ufrag", + "s_icepwd_something_something", + false) {} + + void ConfigureEmulatedNetwork() { + network_emulation_manager_ = webrtc::CreateNetworkEmulationManager( + {.time_mode = webrtc::TimeMode::kSimulated}); + + BuiltInNetworkBehaviorConfig networkBehavior; + networkBehavior.link_capacity = webrtc::DataRate::KilobitsPerSec(200); + // TODO (webrtc:383141571) : Investigate why this testcase fails for + // DTLS 1.3 delay if networkBehavior.queue_delay_ms = 100ms. + // - unless both peers support dtls in stun, in which case it passes. + // - note: only for dtls1.3, it works for dtls1.2! + networkBehavior.queue_delay_ms = 50; + networkBehavior.queue_length_packets = 30; + networkBehavior.loss_percent = 50; + + auto pair = network_emulation_manager_->CreateEndpointPairWithTwoWayRoutes( + networkBehavior); + + client_.emulated_network_manager = pair.first; + server_.emulated_network_manager = pair.second; + } + + void SetupEndpoint( + Endpoint& ep, + bool client, + const scoped_refptr client_certificate, + const scoped_refptr server_certificate) { + thread(ep)->BlockingCall([&]() { + if (network_emulation_manager_ == nullptr) { + ep.allocator = std::make_unique( + ep.env, &network_manager_, socket_factory_.get()); + } else { + ep.network_manager = + ep.emulated_network_manager->ReleaseNetworkManager(); + ep.packet_socket_factory = + std::make_unique( + ep.emulated_network_manager->socket_factory()); + ep.allocator = std::make_unique( + ep.env, ep.network_manager.get(), ep.packet_socket_factory.get()); + } + ep.allocator->set_flags(ep.allocator->flags() | + webrtc::PORTALLOCATOR_DISABLE_TCP); + ep.ice = std::make_unique( + client ? "client_transport" : "server_transport", 0, + ep.allocator.get(), &ep.env.field_trials()); + ep.dtls = std::make_unique( + ep.ice.get(), webrtc::CryptoOptions(), + /*event_log=*/nullptr, std::get<2>(GetParam())); + + // Enable(or disable) the dtls_in_stun parameter before + // DTLS is negotiated. + webrtc::IceConfig config; + config.continual_gathering_policy = webrtc::GATHER_CONTINUALLY; + config.dtls_handshake_in_stun = ep.dtls_stun_piggyback; + ep.ice->SetIceConfig(config); + + // Setup ICE. + ep.ice->SetIceParameters(client ? client_ice_parameters_ + : server_ice_parameters_); + ep.ice->SetRemoteIceParameters(client ? server_ice_parameters_ + : client_ice_parameters_); + if (client) { + ep.ice->SetIceRole(std::get<3>(GetParam()) + ? webrtc::ICEROLE_CONTROLLED + : webrtc::ICEROLE_CONTROLLING); + } else { + ep.ice->SetIceRole(std::get<3>(GetParam()) + ? webrtc::ICEROLE_CONTROLLING + : webrtc::ICEROLE_CONTROLLED); + } + if (client) { + ep.ice->SignalCandidateGathered.connect( + this, &DtlsIceIntegrationTest::CandidateC2S); + } else { + ep.ice->SignalCandidateGathered.connect( + this, &DtlsIceIntegrationTest::CandidateS2C); + } + + // Setup DTLS. + ep.dtls->SetDtlsRole(client ? webrtc::SSL_SERVER : webrtc::SSL_CLIENT); + SetLocalCertificate(ep, client ? client_certificate : server_certificate); + SetRemoteFingerprintFromCert( + ep, client ? server_certificate : client_certificate); + }); + } + + void Prepare() { + auto client_certificate = webrtc::RTCCertificate::Create( + webrtc::SSLIdentity::Create("test", webrtc::KT_DEFAULT)); + auto server_certificate = webrtc::RTCCertificate::Create( + webrtc::SSLIdentity::Create("test", webrtc::KT_DEFAULT)); + + if (network_emulation_manager_ == nullptr) { + thread_ = std::make_unique(ss_.get()); + } + + client_thread()->BlockingCall([&]() { + SetupEndpoint(client_, /* client= */ true, client_certificate, + server_certificate); + }); + + server_thread()->BlockingCall([&]() { + SetupEndpoint(server_, /* client= */ false, client_certificate, + server_certificate); + }); + + // Setup the network. + if (network_emulation_manager_ == nullptr) { + network_manager_.AddInterface(webrtc::SocketAddress("192.168.1.1", 0)); + } + + client_thread()->BlockingCall([&]() { client_.allocator->Initialize(); }); + server_thread()->BlockingCall([&]() { server_.allocator->Initialize(); }); + } + + void TearDown() { + client_thread()->BlockingCall([&]() { + client_.dtls.reset(); + client_.ice.reset(); + client_.allocator.reset(); + }); + + server_thread()->BlockingCall([&]() { + server_.dtls.reset(); + server_.ice.reset(); + server_.allocator.reset(); + }); + } + + ~DtlsIceIntegrationTest() = default; + + static int CountConnectionsWithFilter( + webrtc::IceTransportInternal* ice, + std::function filter) { + webrtc::IceTransportStats stats; + ice->GetStats(&stats); + int count = 0; + for (const auto& con : stats.connection_infos) { + if (filter(con)) { + count++; + } + } + return count; + } + + static int CountConnections(webrtc::IceTransportInternal* ice) { + return CountConnectionsWithFilter(ice, [](auto con) { return true; }); + } + + static int CountWritableConnections(webrtc::IceTransportInternal* ice) { + return CountConnectionsWithFilter(ice, + [](auto con) { return con.writable; }); + } + + webrtc::WaitUntilSettings wait_until_settings() { + if (network_emulation_manager_ == nullptr) { + return { + .timeout = webrtc::TimeDelta::Millis(kDefaultTimeout), + .clock = &fake_clock_, + }; + } else { + return { + .timeout = webrtc::TimeDelta::Millis(kDefaultTimeout), + .clock = network_emulation_manager_->time_controller(), + }; + } + } + + webrtc::Thread* thread(Endpoint& ep) { + if (ep.emulated_network_manager == nullptr) { + return thread_.get(); + } else { + return ep.emulated_network_manager->network_thread(); + } + } + + webrtc::Thread* client_thread() { return thread(client_); } + + webrtc::Thread* server_thread() { return thread(server_); } + + void SetRemoteFingerprintFromCert(Endpoint& ep, + const scoped_refptr& cert) { + ep.remote_fingerprint = + webrtc::SSLFingerprint::CreateFromCertificate(*cert); + if (ep.store_but_dont_set_remote_fingerprint) { + return; + } + SetRemoteFingerprint(ep); + } + + void SetRemoteFingerprint(Endpoint& ep) { + RTC_CHECK(ep.remote_fingerprint); + RTC_LOG(LS_INFO) << ((&ep == &client_) ? "client" : "server") + << "::SetRemoteFingerprint"; + ep.dtls->SetRemoteParameters( + ep.remote_fingerprint->algorithm, + reinterpret_cast(ep.remote_fingerprint->digest.data()), + ep.remote_fingerprint->digest.size(), std::nullopt); + } + + void SetLocalCertificate(Endpoint& ep, + const scoped_refptr certificate) { + RTC_CHECK(certificate); + RTC_LOG(LS_INFO) << ((&ep == &client_) ? "client" : "server") + << "::SetLocalCertificate: "; + ep.dtls->SetLocalCertificate(certificate); + } + + webrtc::ScopedFakeClock fake_clock_; + webrtc::FakeNetworkManager network_manager_; + std::unique_ptr ss_; + std::unique_ptr socket_factory_; + std::unique_ptr network_emulation_manager_; + std::unique_ptr thread_; + + Endpoint client_; + Endpoint server_; + + webrtc::IceParameters client_ice_parameters_; + webrtc::IceParameters server_ice_parameters_; +}; + +TEST_P(DtlsIceIntegrationTest, SmokeTest) { + Prepare(); + client_.ice->MaybeStartGathering(); + server_.ice->MaybeStartGathering(); + + // Note: this only reaches the pending piggybacking state. + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return client_.dtls->writable() && server_.dtls->writable(); }, + IsTrue(), wait_until_settings()), + webrtc::IsRtcOk()); + EXPECT_EQ(client_.dtls->IsDtlsPiggybackSupportedByPeer(), + client_.dtls_stun_piggyback && server_.dtls_stun_piggyback); + EXPECT_EQ(server_.dtls->IsDtlsPiggybackSupportedByPeer(), + client_.dtls_stun_piggyback && server_.dtls_stun_piggyback); + EXPECT_EQ(client_.dtls->WasDtlsCompletedByPiggybacking(), + client_.dtls_stun_piggyback && server_.dtls_stun_piggyback); + EXPECT_EQ(server_.dtls->WasDtlsCompletedByPiggybacking(), + client_.dtls_stun_piggyback && server_.dtls_stun_piggyback); + + if (!(client_.pqc || server_.pqc) && client_.dtls_stun_piggyback && + server_.dtls_stun_piggyback) { + EXPECT_EQ(client_.dtls->GetStunDataCount(), 2); + EXPECT_EQ(server_.dtls->GetStunDataCount(), 1); + } else { + // TODO(webrtc:404763475) + } + + if ((client_.pqc || server_.pqc) && + !(client_.dtls_stun_piggyback && server_.dtls_stun_piggyback)) { + // TODO(webrtc:404763475) : The retransmissions is due to early + // client hello and the code only saves 1 packet. + } else { + EXPECT_EQ(client_.dtls->GetRetransmissionCount(), 0); + EXPECT_EQ(server_.dtls->GetRetransmissionCount(), 0); + } + + // Validate that we can add new Connections (that become writable). + network_manager_.AddInterface(webrtc::SocketAddress("192.168.2.1", 0)); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return CountWritableConnections(client_.ice.get()) > 1 && + CountWritableConnections(server_.ice.get()) > 1; + }, + IsTrue(), wait_until_settings()), + webrtc::IsRtcOk()); +} + +// Check that DtlsInStun still works even if SetRemoteFingerprint is called +// "late". This is what happens if the answer sdp comes strictly after ICE has +// connected. Before this patch, this would disable stun-piggy-backing. +TEST_P(DtlsIceIntegrationTest, ClientLateCertificate) { + client_.store_but_dont_set_remote_fingerprint = true; + Prepare(); + client_.ice->MaybeStartGathering(); + server_.ice->MaybeStartGathering(); + + ASSERT_THAT( + webrtc::WaitUntil( + [&] { return CountWritableConnections(client_.ice.get()) > 0; }, + IsTrue(), wait_until_settings()), + webrtc::IsRtcOk()); + SetRemoteFingerprint(client_); + + ASSERT_THAT( + webrtc::WaitUntil( + [&] { return client_.dtls->writable() && server_.dtls->writable(); }, + IsTrue(), wait_until_settings()), + webrtc::IsRtcOk()); + + EXPECT_EQ(client_.dtls->IsDtlsPiggybackSupportedByPeer(), + client_.dtls_stun_piggyback && server_.dtls_stun_piggyback); + + EXPECT_EQ(client_.dtls->WasDtlsCompletedByPiggybacking(), + client_.dtls_stun_piggyback && server_.dtls_stun_piggyback); + EXPECT_EQ(server_.dtls->WasDtlsCompletedByPiggybacking(), + client_.dtls_stun_piggyback && server_.dtls_stun_piggyback); + + if ((client_.pqc || server_.pqc) && + !(client_.dtls_stun_piggyback && server_.dtls_stun_piggyback)) { + // TODO(webrtc:404763475) : The retransmissions is due to early + // client hello and the code only saves 1 packet. + } else { + EXPECT_EQ(client_.dtls->GetRetransmissionCount(), 0); + EXPECT_EQ(server_.dtls->GetRetransmissionCount(), 0); + } +} + +TEST_P(DtlsIceIntegrationTest, TestWithPacketLoss) { + if (!SSLStreamAdapter::IsBoringSsl()) { + GTEST_SKIP() << "Needs boringssl."; + } + ConfigureEmulatedNetwork(); + Prepare(); + + client_thread()->PostTask([&]() { client_.ice->MaybeStartGathering(); }); + + server_thread()->PostTask([&]() { server_.ice->MaybeStartGathering(); }); + + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return client_thread()->BlockingCall([&]() { + return client_.dtls->writable(); + }) && server_thread()->BlockingCall([&]() { + return server_.dtls->writable(); + }); + }, + IsTrue(), wait_until_settings()), + webrtc::IsRtcOk()); + + EXPECT_EQ(client_thread()->BlockingCall([&]() { + return client_.dtls->IsDtlsPiggybackSupportedByPeer(); + }), + client_.dtls_stun_piggyback && server_.dtls_stun_piggyback); + EXPECT_EQ(server_thread()->BlockingCall([&]() { + return server_.dtls->IsDtlsPiggybackSupportedByPeer(); + }), + client_.dtls_stun_piggyback && server_.dtls_stun_piggyback); +} + +// Verify that DtlsStunPiggybacking works even if one (or several) +// of the STUN_BINDING_REQUESTs are so full that dtls does not fit. +TEST_P(DtlsIceIntegrationTest, AlmostFullSTUN_BINDING) { + Prepare(); + + std::string a_long_string(500, 'a'); + client_.ice->GetDictionaryWriter()->get().SetByteString(77)->CopyBytes( + a_long_string); + server_.ice->GetDictionaryWriter()->get().SetByteString(78)->CopyBytes( + a_long_string); + + client_.ice->MaybeStartGathering(); + server_.ice->MaybeStartGathering(); + + // Note: this only reaches the pending piggybacking state. + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return client_.dtls->writable() && server_.dtls->writable(); }, + IsTrue(), wait_until_settings()), + webrtc::IsRtcOk()); + EXPECT_EQ(client_.dtls->IsDtlsPiggybackSupportedByPeer(), + client_.dtls_stun_piggyback && server_.dtls_stun_piggyback); + EXPECT_EQ(server_.dtls->IsDtlsPiggybackSupportedByPeer(), + client_.dtls_stun_piggyback && server_.dtls_stun_piggyback); + EXPECT_EQ(client_.dtls->WasDtlsCompletedByPiggybacking(), + client_.dtls_stun_piggyback && server_.dtls_stun_piggyback); + EXPECT_EQ(server_.dtls->WasDtlsCompletedByPiggybacking(), + client_.dtls_stun_piggyback && server_.dtls_stun_piggyback); + + if (!(client_.pqc || server_.pqc) && client_.dtls_stun_piggyback && + server_.dtls_stun_piggyback) { + EXPECT_EQ(client_.dtls->GetStunDataCount(), 2); + EXPECT_EQ(server_.dtls->GetStunDataCount(), 1); + } else { + // TODO(webrtc:404763475) + } + + if ((client_.pqc || server_.pqc) && + !(client_.dtls_stun_piggyback && server_.dtls_stun_piggyback)) { + // TODO(webrtc:404763475) : The retransmissions is due to early + // client hello and the code only saves 1 packet. + } else { + EXPECT_EQ(client_.dtls->GetRetransmissionCount(), 0); + EXPECT_EQ(server_.dtls->GetRetransmissionCount(), 0); + } +} + +// Test cases are parametrized by +// * client-piggybacking-enabled, +// * server-piggybacking-enabled, +// * maximum DTLS version to use. +INSTANTIATE_TEST_SUITE_P( + DtlsStunPiggybackingIntegrationTest, + DtlsIceIntegrationTest, + ::testing::Combine(testing::Bool(), + testing::Bool(), + testing::Values(webrtc::SSL_PROTOCOL_DTLS_12, + webrtc::SSL_PROTOCOL_DTLS_13), + testing::Bool(), + testing::Bool(), + testing::Bool())); + +} // namespace webrtc diff --git a/p2p/dtls/dtls_stun_piggyback_callbacks.h b/p2p/dtls/dtls_stun_piggyback_callbacks.h new file mode 100644 index 0000000000..4c8b226f91 --- /dev/null +++ b/p2p/dtls/dtls_stun_piggyback_callbacks.h @@ -0,0 +1,90 @@ +/* + * Copyright 2025 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_DTLS_DTLS_STUN_PIGGYBACK_CALLBACKS_H_ +#define P2P_DTLS_DTLS_STUN_PIGGYBACK_CALLBACKS_H_ + +#include +#include + +#include "absl/functional/any_invocable.h" +#include "absl/strings/string_view.h" +#include "api/transport/stun.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +class DtlsStunPiggybackCallbacks { + public: + DtlsStunPiggybackCallbacks() : send_data_(nullptr), recv_data_(nullptr) {} + + DtlsStunPiggybackCallbacks( + // Function invoked when sending a `request-type` (e.g. + // STUN_BINDING_REQUEST). Returns a pair of data that will be sent: + // - an optional DTLS_IN_STUN attribute + // - an optional DTLS_IN_STUN_ACK attribute + absl::AnyInvocable, + std::optional>( + /* request-type */ webrtc::StunMessageType)>&& send_data, + + // Function invoked when receiving a STUN_BINDING { REQUEST / RESPONSE } + // contains the (nullable) DTLS_IN_STUN and DTLS_IN_STUN_ACK attributes. + absl::AnyInvocable< + void(const webrtc::StunByteStringAttribute* /* DTLS_IN_STUN */, + const webrtc::StunByteStringAttribute* /* DTLS_IN_STUN_ACK */)>&& + recv_data) + : send_data_(std::move(send_data)), recv_data_(std::move(recv_data)) { + RTC_DCHECK( + // either all set + (send_data_ != nullptr && recv_data_ != nullptr) || + // or all nullptr + (send_data_ == nullptr && recv_data_ == nullptr)); + } + + std::pair, std::optional> + send_data(StunMessageType request_type) { + RTC_DCHECK(send_data_); + return send_data_(request_type); + } + + void recv_data(const StunByteStringAttribute* data, + const StunByteStringAttribute* ack) { + RTC_DCHECK(recv_data_); + return recv_data_(data, ack); + } + + bool empty() const { return send_data_ == nullptr; } + void reset() { + send_data_ = nullptr; + recv_data_ = nullptr; + } + + private: + absl::AnyInvocable, + std::optional>( + /* request-type */ webrtc::StunMessageType)> + send_data_; + absl::AnyInvocable + recv_data_; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::DtlsStunPiggybackCallbacks; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // P2P_DTLS_DTLS_STUN_PIGGYBACK_CALLBACKS_H_ diff --git a/p2p/dtls/dtls_stun_piggyback_controller.cc b/p2p/dtls/dtls_stun_piggyback_controller.cc new file mode 100644 index 0000000000..447257a52e --- /dev/null +++ b/p2p/dtls/dtls_stun_piggyback_controller.cc @@ -0,0 +1,236 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "p2p/dtls/dtls_stun_piggyback_controller.h" + +#include +#include +#include +#include +#include + +#include "absl/container/flat_hash_set.h" +#include "absl/functional/any_invocable.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/sequence_checker.h" +#include "api/transport/stun.h" +#include "p2p/dtls/dtls_utils.h" +#include "rtc_base/byte_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/str_join.h" + +namespace webrtc { + +DtlsStunPiggybackController::DtlsStunPiggybackController( + absl::AnyInvocable)> + dtls_data_callback) + : dtls_data_callback_(std::move(dtls_data_callback)) {} + +DtlsStunPiggybackController::~DtlsStunPiggybackController() {} + +void DtlsStunPiggybackController::SetDtlsHandshakeComplete(bool is_dtls_client, + bool is_dtls13) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + + // As DTLS 1.2 server we need to keep the last flight around until + // we receive the post-handshake acknowledgment. + // As DTLS 1.2 client we have nothing more to send at this point + // but will continue to send ACK attributes until receiving + // the last flight from the server. + // For DTLS 1.3 this is reversed since the handshake has one round trip less. + if ((is_dtls_client && !is_dtls13) || (!is_dtls_client && is_dtls13)) { + pending_packets_.clear(); + } + + // Peer does not support this so fallback to a normal DTLS handshake + // happened. + if (state_ == State::OFF) { + return; + } + state_ = State::PENDING; +} + +void DtlsStunPiggybackController::CapturePacket(ArrayView data) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (!IsDtlsPacket(data)) { + return; + } + + // BoringSSL writes burst of packets...but the interface + // is made for 1-packet at a time. Use the writing_packets_ variable to keep + // track of a full batch. The writing_packets_ is reset in Flush. + if (!writing_packets_) { + pending_packets_.clear(); + writing_packets_ = true; + } + + pending_packets_.Add(data); +} + +void DtlsStunPiggybackController::ClearCachedPacketForTesting() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + pending_packets_.clear(); +} + +void DtlsStunPiggybackController::Flush() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + writing_packets_ = false; +} + +std::optional +DtlsStunPiggybackController::GetDataToPiggyback( + StunMessageType stun_message_type) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(stun_message_type == STUN_BINDING_REQUEST || + stun_message_type == STUN_BINDING_RESPONSE || + stun_message_type == STUN_BINDING_INDICATION); + + // No longer writing packets...since we're now about to send them. + RTC_DCHECK(!writing_packets_); + + if (state_ == State::COMPLETE) { + return std::nullopt; + } + + if (stun_message_type == STUN_BINDING_INDICATION) { + // TODO(jonaso, webrtc:367395350): Remove this branch that returns the + // pending packet even if state is OFF when we remove + // P2PTransportChannel::PeriodicRetransmitDtlsPacketUntilDtlsConnected. + } else if (state_ == State::OFF) { + return std::nullopt; + } + + if (pending_packets_.empty()) { + return std::nullopt; + } + + const auto packet = pending_packets_.GetNext(); + return absl::string_view(reinterpret_cast(packet.data()), + packet.size()); +} + +std::optional DtlsStunPiggybackController::GetAckToPiggyback( + StunMessageType stun_message_type) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + + if (state_ == State::OFF || state_ == State::COMPLETE) { + return std::nullopt; + } + return handshake_ack_writer_.DataAsStringView(); +} + +void DtlsStunPiggybackController::ReportDataPiggybacked( + const StunByteStringAttribute* data, + const StunByteStringAttribute* ack) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + + // Drop silently when receiving acked data when the peer previously did not + // support or we already moved to the complete state. + if (state_ == State::OFF || state_ == State::COMPLETE) { + return; + } + + // We sent dtls piggybacked but got nothing in return or + // we received a stun request with neither attribute set + // => peer does not support. + if (state_ == State::TENTATIVE && data == nullptr && ack == nullptr) { + RTC_LOG(LS_INFO) << "DTLS-STUN piggybacking not supported by peer."; + state_ = State::OFF; + return; + } + + // In PENDING state the peer may have stopped sending the ack + // when it moved to the COMPLETE state. Move to the same state. + if (state_ == State::PENDING && data == nullptr && ack == nullptr) { + RTC_LOG(LS_INFO) << "DTLS-STUN piggybacking complete."; + state_ = State::COMPLETE; + pending_packets_.clear(); + handshake_ack_writer_.Clear(); + handshake_messages_received_.clear(); + return; + } + + // We sent dtls piggybacked and got something in return => peer does support. + if (state_ == State::TENTATIVE) { + state_ = State::CONFIRMED; + } + + if (ack != nullptr) { + if (!pending_packets_.empty()) { + // Unpack the ACK attribute (a list of uint32_t) + absl::flat_hash_set acked_packets; + { + ByteBufferReader ack_reader(ack->array_view()); + uint32_t packet_hash; + while (ack_reader.ReadUInt32(&packet_hash)) { + acked_packets.insert(packet_hash); + } + } + RTC_LOG(LS_VERBOSE) << "DTLS-STUN piggybacking ACK: " + << StrJoin(acked_packets, ","); + + // Remove all acked packets from pending_packets_. + pending_packets_.Prune(acked_packets); + } + } + + // The response to the final flight of the handshake will not contain + // the DTLS data but will contain an ack. + // Must not happen on the initial server to client packet which + // has no DTLS data yet. + if (data == nullptr && ack != nullptr && state_ == State::PENDING) { + RTC_LOG(LS_INFO) << "DTLS-STUN piggybacking complete."; + state_ = State::COMPLETE; + pending_packets_.clear(); + handshake_ack_writer_.Clear(); + handshake_messages_received_.clear(); + return; + } + + if (!data || data->length() == 0) { + return; + } + + // Drop non-DTLS packets. + if (!IsDtlsPacket(data->array_view())) { + RTC_LOG(LS_WARNING) << "Dropping non-DTLS data."; + return; + } + data_recv_count_++; + + // Extract the received message id of the handshake + // from the packet and prepare the ack to be sent. + uint32_t hash = ComputeDtlsPacketHash(data->array_view()); + + // Check if we already received this packet. + if (std::find(handshake_messages_received_.begin(), + handshake_messages_received_.end(), + hash) == handshake_messages_received_.end()) { + handshake_messages_received_.push_back(hash); + handshake_ack_writer_.WriteUInt32(hash); + + if (handshake_ack_writer_.Length() > kMaxAckSize) { + // If needed, limit size of ack attribute...by removing oldest ack. + handshake_messages_received_.erase(handshake_messages_received_.begin()); + handshake_ack_writer_.Clear(); + for (const auto& val : handshake_messages_received_) { + handshake_ack_writer_.WriteUInt32(val); + } + } + + RTC_DCHECK(handshake_ack_writer_.Length() <= kMaxAckSize); + } + + dtls_data_callback_(data->array_view()); +} + +} // namespace webrtc diff --git a/p2p/dtls/dtls_stun_piggyback_controller.h b/p2p/dtls/dtls_stun_piggyback_controller.h new file mode 100644 index 0000000000..13a422d72e --- /dev/null +++ b/p2p/dtls/dtls_stun_piggyback_controller.h @@ -0,0 +1,117 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_DTLS_DTLS_STUN_PIGGYBACK_CONTROLLER_H_ +#define P2P_DTLS_DTLS_STUN_PIGGYBACK_CONTROLLER_H_ + +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/sequence_checker.h" +#include "api/transport/stun.h" +#include "p2p/dtls/dtls_utils.h" +#include "rtc_base/byte_buffer.h" +#include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +// This class is not thread safe; all methods must be called on the same thread +// as the constructor. +class DtlsStunPiggybackController { + public: + // Never ack more than 4 packets. + static constexpr unsigned kMaxAckSize = 16; + + // dtls_data_callback will be called with any DTLS packets received + // piggybacked. + DtlsStunPiggybackController( + absl::AnyInvocable)> + dtls_data_callback); + ~DtlsStunPiggybackController(); + + enum class State { + // We don't know if peer support DTLS piggybacked in STUN. + // We will piggyback DTLS until we get a piggybacked response + // or a STUN response with piggyback support. + TENTATIVE = 0, + // The peer supports DTLS in STUN and we continue the handshake. + CONFIRMED = 1, + // We are waiting for the final ack. Semantic differs depending + // on DTLS role. + PENDING = 2, + // We successfully completed the DTLS handshake in STUN. + COMPLETE = 3, + // The peer does not support piggybacking DTLS in STUN. + OFF = 4, + }; + + State state() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return state_; + } + + // Called by DtlsTransport when handshake is complete. + void SetDtlsHandshakeComplete(bool is_dtls_client, bool is_dtls13); + + // Intercepts DTLS packets which should go into the STUN packets during the + // handshake. + void CapturePacket(ArrayView data); + void ClearCachedPacketForTesting(); + + // Inform piggybackcontroller that a flight is complete. + void Flush(); + + // Called by Connection, when sending a STUN BINDING { REQUEST / RESPONSE } + // to obtain optional DTLS data or ACKs. + std::optional GetDataToPiggyback( + StunMessageType stun_message_type); + std::optional GetAckToPiggyback( + StunMessageType stun_message_type); + + // Called by Connection when receiving a STUN BINDING { REQUEST / RESPONSE }. + void ReportDataPiggybacked(const StunByteStringAttribute* data, + const StunByteStringAttribute* ack); + + int GetCountOfReceivedData() const { return data_recv_count_; } + + private: + State state_ RTC_GUARDED_BY(sequence_checker_) = State::TENTATIVE; + bool writing_packets_ RTC_GUARDED_BY(sequence_checker_) = false; + PacketStash pending_packets_ RTC_GUARDED_BY(sequence_checker_); + absl::AnyInvocable)> dtls_data_callback_; + absl::AnyInvocable disable_piggybacking_callback_; + + std::vector handshake_messages_received_ + RTC_GUARDED_BY(sequence_checker_); + ByteBufferWriter handshake_ack_writer_ RTC_GUARDED_BY(sequence_checker_); + + // Count of data attributes received. + int data_recv_count_ = 0; + + // In practice this will be the network thread. + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::DtlsStunPiggybackController; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // P2P_DTLS_DTLS_STUN_PIGGYBACK_CONTROLLER_H_ diff --git a/p2p/dtls/dtls_stun_piggyback_controller_unittest.cc b/p2p/dtls/dtls_stun_piggyback_controller_unittest.cc new file mode 100644 index 0000000000..75a44df698 --- /dev/null +++ b/p2p/dtls/dtls_stun_piggyback_controller_unittest.cc @@ -0,0 +1,429 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "p2p/dtls/dtls_stun_piggyback_controller.h" + +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/transport/stun.h" +#include "p2p/dtls/dtls_utils.h" +#include "rtc_base/byte_buffer.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace { +// Extracted from a stock DTLS call using Wireshark. +// Each packet (apart from the last) is truncated to +// the first fragment to keep things short. + +// Based on a "server hello done" but with different msg_seq. +const std::vector dtls_flight1 = { + 0x16, 0xfe, 0xfd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // + 0x00, 0x01, // seq=1 + 0x00, 0x0c, 0x0e, 0x00, 0x00, 0x00, 0x12, 0x34, 0x00, // msg_seq=0x1234 + 0x00, 0x00, 0x00, 0x00, 0x00}; + +const std::vector dtls_flight2 = { + 0x16, 0xfe, 0xfd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // + 0x00, 0x02, // seq=2 + 0x00, 0x0c, 0x0e, 0x00, 0x00, 0x00, 0x43, 0x21, 0x00, // msg_seq=0x4321 + 0x00, 0x00, 0x00, 0x00, 0x00}; + +const std::vector dtls_flight3 = { + 0x16, 0xfe, 0xfd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // + 0x00, 0x03, // seq=3 + 0x00, 0x0c, 0x0e, 0x00, 0x00, 0x00, 0x44, 0x44, 0x00, // msg_seq=0x4444 + 0x00, 0x00, 0x00, 0x00, 0x00}; + +const std::vector dtls_flight4 = { + 0x16, 0xfe, 0xfd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // + 0x00, 0x04, // seq=4 + 0x00, 0x0c, 0x0e, 0x00, 0x00, 0x00, 0x54, 0x86, 0x00, // msg_seq=0x5486 + 0x00, 0x00, 0x00, 0x00, 0x00}; + +const std::vector empty = {}; + +std::string AsAckAttribute(const std::vector& list) { + webrtc::ByteBufferWriter writer; + for (const auto& val : list) { + writer.WriteUInt32(val); + } + return std::string(writer.DataAsStringView()); +} + +std::vector FakeDtlsPacket(uint16_t packet_number) { + auto packet = dtls_flight1; + packet[17] = static_cast(packet_number >> 8); + packet[18] = static_cast(packet_number & 255); + return packet; +} + +} // namespace + +namespace webrtc { + +using ::testing::MockFunction; +using State = DtlsStunPiggybackController::State; + +class DtlsStunPiggybackControllerTest : public ::testing::Test { + protected: + DtlsStunPiggybackControllerTest() + : client_( + [this](ArrayView data) { ClientPacketSink(data); }), + server_([this](ArrayView data) { + ServerPacketSink(data); + }) {} + + void SendClientToServer(const std::vector packet, + StunMessageType type) { + if (!packet.empty()) { + client_.CapturePacket(packet); + client_.Flush(); + } else { + client_.ClearCachedPacketForTesting(); + } + std::unique_ptr attr_data; + if (auto data = client_.GetDataToPiggyback(type)) { + attr_data = WrapInStun(STUN_ATTR_META_DTLS_IN_STUN, *data); + } + std::unique_ptr attr_ack; + if (auto ack = client_.GetAckToPiggyback(type)) { + attr_ack = WrapInStun(STUN_ATTR_META_DTLS_IN_STUN_ACK, *ack); + } + server_.ReportDataPiggybacked(attr_data.get(), attr_ack.get()); + } + void SendServerToClient(const std::vector packet, + StunMessageType type) { + if (!packet.empty()) { + server_.CapturePacket(packet); + server_.Flush(); + } else { + server_.ClearCachedPacketForTesting(); + } + std::unique_ptr attr_data; + if (auto data = server_.GetDataToPiggyback(type)) { + attr_data = WrapInStun(STUN_ATTR_META_DTLS_IN_STUN, *data); + } + std::unique_ptr attr_ack; + if (auto ack = server_.GetAckToPiggyback(type)) { + attr_ack = WrapInStun(STUN_ATTR_META_DTLS_IN_STUN_ACK, *ack); + } + client_.ReportDataPiggybacked(attr_data.get(), attr_ack.get()); + if (packet == dtls_flight4) { + // After sending flight 4, the server handshake is complete. + server_.SetDtlsHandshakeComplete(/*is_client=*/false, + /*is_dtls13=*/false); + // When receiving flight 4, client handshake is complete. + client_.SetDtlsHandshakeComplete(/*is_client=*/true, /*is_dtls13=*/false); + } + } + + std::unique_ptr WrapInStun(IceAttributeType type, + absl::string_view data) { + return std::make_unique(type, data); + } + + std::unique_ptr WrapInStun( + IceAttributeType type, + const std::vector& data) { + return std::make_unique(type, data.data(), + data.size()); + } + + void DisableSupport(DtlsStunPiggybackController& client_or_server) { + ASSERT_EQ(client_or_server.state(), State::TENTATIVE); + client_or_server.ReportDataPiggybacked(nullptr, nullptr); + ASSERT_EQ(client_or_server.state(), State::OFF); + } + + DtlsStunPiggybackController client_; + DtlsStunPiggybackController server_; + + MOCK_METHOD(void, ClientPacketSink, (ArrayView)); + MOCK_METHOD(void, ServerPacketSink, (ArrayView)); +}; + +TEST_F(DtlsStunPiggybackControllerTest, BasicHandshake) { + // Flight 1+2 + SendClientToServer(dtls_flight1, STUN_BINDING_REQUEST); + EXPECT_EQ(server_.state(), State::CONFIRMED); + SendServerToClient(dtls_flight2, STUN_BINDING_RESPONSE); + EXPECT_EQ(client_.state(), State::CONFIRMED); + + // Flight 3+4 + SendClientToServer(dtls_flight3, STUN_BINDING_REQUEST); + SendServerToClient(dtls_flight4, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.state(), State::PENDING); + EXPECT_EQ(client_.state(), State::PENDING); + + // Post-handshake ACK + SendServerToClient(empty, STUN_BINDING_REQUEST); + SendClientToServer(empty, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.state(), State::COMPLETE); + EXPECT_EQ(client_.state(), State::COMPLETE); +} + +TEST_F(DtlsStunPiggybackControllerTest, FirstClientPacketLost) { + // Client to server got lost (or arrives late) + // Flight 1 + SendServerToClient(empty, STUN_BINDING_REQUEST); + SendClientToServer(dtls_flight1, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.state(), State::CONFIRMED); + EXPECT_EQ(client_.state(), State::CONFIRMED); + + // Flight 2+3 + SendServerToClient(dtls_flight2, STUN_BINDING_REQUEST); + SendClientToServer(dtls_flight3, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.state(), State::CONFIRMED); + EXPECT_EQ(client_.state(), State::CONFIRMED); + + // Flight 4 + SendServerToClient(dtls_flight4, STUN_BINDING_REQUEST); + SendClientToServer(empty, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.state(), State::COMPLETE); + EXPECT_EQ(client_.state(), State::PENDING); + + // Post-handshake ACK + SendServerToClient(empty, STUN_BINDING_REQUEST); + EXPECT_EQ(client_.state(), State::COMPLETE); +} + +TEST_F(DtlsStunPiggybackControllerTest, NotSupportedByServer) { + DisableSupport(server_); + + // Flight 1 + SendClientToServer(dtls_flight1, STUN_BINDING_REQUEST); + SendServerToClient(empty, STUN_BINDING_RESPONSE); + EXPECT_EQ(client_.state(), State::OFF); +} + +TEST_F(DtlsStunPiggybackControllerTest, NotSupportedByServerClientReceives) { + DisableSupport(server_); + + // Client to server got lost (or arrives late) + SendServerToClient(empty, STUN_BINDING_REQUEST); + EXPECT_EQ(client_.state(), State::OFF); +} + +TEST_F(DtlsStunPiggybackControllerTest, NotSupportedByClient) { + DisableSupport(client_); + + SendServerToClient(empty, STUN_BINDING_REQUEST); + SendClientToServer(empty, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.state(), State::OFF); +} + +TEST_F(DtlsStunPiggybackControllerTest, SomeRequestsDoNotGoThrough) { + // Client to server got lost (or arrives late) + // Flight 1 + SendServerToClient(empty, STUN_BINDING_REQUEST); + SendClientToServer(dtls_flight1, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.state(), State::CONFIRMED); + EXPECT_EQ(client_.state(), State::CONFIRMED); + + // Flight 1+2, server sent request got lost. + SendClientToServer(dtls_flight1, STUN_BINDING_REQUEST); + SendServerToClient(dtls_flight2, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.state(), State::CONFIRMED); + EXPECT_EQ(client_.state(), State::CONFIRMED); + + // Flight 3+4 + SendClientToServer(dtls_flight3, STUN_BINDING_REQUEST); + SendServerToClient(dtls_flight4, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.state(), State::PENDING); + EXPECT_EQ(client_.state(), State::PENDING); + + // Post-handshake ACK + SendClientToServer(empty, STUN_BINDING_REQUEST); + SendServerToClient(empty, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.state(), State::COMPLETE); + EXPECT_EQ(client_.state(), State::COMPLETE); +} + +TEST_F(DtlsStunPiggybackControllerTest, LossOnPostHandshakeAck) { + // Flight 1+2 + SendClientToServer(dtls_flight1, STUN_BINDING_REQUEST); + EXPECT_EQ(server_.state(), State::CONFIRMED); + SendServerToClient(dtls_flight2, STUN_BINDING_RESPONSE); + EXPECT_EQ(client_.state(), State::CONFIRMED); + + // Flight 3+4 + SendClientToServer(dtls_flight3, STUN_BINDING_REQUEST); + SendServerToClient(dtls_flight4, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.state(), State::PENDING); + EXPECT_EQ(client_.state(), State::PENDING); + + // Post-handshake ACK. Client to server gets lost + SendServerToClient(empty, STUN_BINDING_REQUEST); + SendClientToServer(empty, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.state(), State::COMPLETE); + EXPECT_EQ(client_.state(), State::COMPLETE); +} + +TEST_F(DtlsStunPiggybackControllerTest, + UnsupportedStateAfterFallbackHandshakeRemainsOff) { + DisableSupport(client_); + DisableSupport(server_); + + // Set DTLS complete after normal handshake. + client_.SetDtlsHandshakeComplete(/*is_client=*/true, /*is_dtls13=*/false); + EXPECT_EQ(client_.state(), State::OFF); + server_.SetDtlsHandshakeComplete(/*is_client=*/false, /*is_dtls13=*/false); + EXPECT_EQ(server_.state(), State::OFF); +} + +TEST_F(DtlsStunPiggybackControllerTest, BasicHandshakeAckData) { + EXPECT_EQ(server_.GetAckToPiggyback(STUN_BINDING_RESPONSE), ""); + EXPECT_EQ(client_.GetAckToPiggyback(STUN_BINDING_REQUEST), ""); + + // Flight 1+2 + SendClientToServer(dtls_flight1, STUN_BINDING_REQUEST); + SendServerToClient(dtls_flight2, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.GetAckToPiggyback(STUN_BINDING_REQUEST), + AsAckAttribute({ComputeDtlsPacketHash(dtls_flight1)})); + EXPECT_EQ(client_.GetAckToPiggyback(STUN_BINDING_RESPONSE), + AsAckAttribute({ComputeDtlsPacketHash(dtls_flight2)})); + + // Flight 3+4 + SendClientToServer(dtls_flight3, STUN_BINDING_REQUEST); + SendServerToClient(dtls_flight4, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.GetAckToPiggyback(STUN_BINDING_RESPONSE), + AsAckAttribute({ + ComputeDtlsPacketHash(dtls_flight1), + ComputeDtlsPacketHash(dtls_flight3), + })); + EXPECT_EQ(client_.GetAckToPiggyback(STUN_BINDING_REQUEST), + AsAckAttribute({ + ComputeDtlsPacketHash(dtls_flight2), + ComputeDtlsPacketHash(dtls_flight4), + })); + + // Post-handshake ACK + SendServerToClient(empty, STUN_BINDING_REQUEST); + SendClientToServer(empty, STUN_BINDING_RESPONSE); + EXPECT_EQ(server_.state(), State::COMPLETE); + EXPECT_EQ(client_.state(), State::COMPLETE); + EXPECT_EQ(server_.GetAckToPiggyback(STUN_BINDING_RESPONSE), std::nullopt); + EXPECT_EQ(client_.GetAckToPiggyback(STUN_BINDING_REQUEST), std::nullopt); +} + +TEST_F(DtlsStunPiggybackControllerTest, AckDataNoDuplicates) { + // Flight 1+2 + SendClientToServer(dtls_flight1, STUN_BINDING_REQUEST); + EXPECT_EQ(server_.GetAckToPiggyback(STUN_BINDING_REQUEST), + AsAckAttribute({ComputeDtlsPacketHash(dtls_flight1)})); + SendClientToServer(dtls_flight3, STUN_BINDING_REQUEST); + EXPECT_EQ(server_.GetAckToPiggyback(STUN_BINDING_REQUEST), + AsAckAttribute({ + ComputeDtlsPacketHash(dtls_flight1), + ComputeDtlsPacketHash(dtls_flight3), + })); + + // Receive Flight 1 again, no change expected. + SendClientToServer(dtls_flight1, STUN_BINDING_REQUEST); + EXPECT_EQ(server_.GetAckToPiggyback(STUN_BINDING_REQUEST), + AsAckAttribute({ + ComputeDtlsPacketHash(dtls_flight1), + ComputeDtlsPacketHash(dtls_flight3), + })); +} + +TEST_F(DtlsStunPiggybackControllerTest, IgnoresNonDtlsData) { + std::vector ascii = {0x64, 0x72, 0x6f, 0x70, 0x6d, 0x65}; + + EXPECT_CALL(*this, ServerPacketSink).Times(0); + server_.ReportDataPiggybacked( + WrapInStun(STUN_ATTR_META_DTLS_IN_STUN, ascii).get(), nullptr); + EXPECT_EQ(0, server_.GetCountOfReceivedData()); +} + +TEST_F(DtlsStunPiggybackControllerTest, DontSendAckedPackets) { + server_.CapturePacket(dtls_flight1); + server_.Flush(); + EXPECT_TRUE(server_.GetDataToPiggyback(STUN_BINDING_REQUEST).has_value()); + server_.ReportDataPiggybacked( + nullptr, WrapInStun(STUN_ATTR_META_DTLS_IN_STUN_ACK, + AsAckAttribute({ComputeDtlsPacketHash(dtls_flight1)})) + .get()); + // No unacked packet exists. + EXPECT_FALSE(server_.GetDataToPiggyback(STUN_BINDING_REQUEST).has_value()); +} + +TEST_F(DtlsStunPiggybackControllerTest, LimitAckSize) { + std::vector dtls_flight5 = FakeDtlsPacket(0x5487); + + server_.ReportDataPiggybacked( + WrapInStun(STUN_ATTR_META_DTLS_IN_STUN, dtls_flight1).get(), nullptr); + EXPECT_EQ(server_.GetAckToPiggyback(STUN_BINDING_REQUEST)->size(), 4u); + server_.ReportDataPiggybacked( + WrapInStun(STUN_ATTR_META_DTLS_IN_STUN, dtls_flight2).get(), nullptr); + EXPECT_EQ(server_.GetAckToPiggyback(STUN_BINDING_REQUEST)->size(), 8u); + server_.ReportDataPiggybacked( + WrapInStun(STUN_ATTR_META_DTLS_IN_STUN, dtls_flight3).get(), nullptr); + EXPECT_EQ(server_.GetAckToPiggyback(STUN_BINDING_REQUEST)->size(), 12u); + server_.ReportDataPiggybacked( + WrapInStun(STUN_ATTR_META_DTLS_IN_STUN, dtls_flight4).get(), nullptr); + EXPECT_EQ(server_.GetAckToPiggyback(STUN_BINDING_REQUEST)->size(), 16u); + + // Limit size of ack so that it does not grow unbounded. + server_.ReportDataPiggybacked( + WrapInStun(STUN_ATTR_META_DTLS_IN_STUN, dtls_flight5).get(), nullptr); + EXPECT_EQ(server_.GetAckToPiggyback(STUN_BINDING_REQUEST)->size(), + DtlsStunPiggybackController::kMaxAckSize); + EXPECT_EQ(server_.GetAckToPiggyback(STUN_BINDING_REQUEST), + AsAckAttribute({ + ComputeDtlsPacketHash(dtls_flight2), + ComputeDtlsPacketHash(dtls_flight3), + ComputeDtlsPacketHash(dtls_flight4), + ComputeDtlsPacketHash(dtls_flight5), + })); +} + +TEST_F(DtlsStunPiggybackControllerTest, MultiPacketRoundRobin) { + // Let's pretend that a flight is 3 packets... + server_.CapturePacket(dtls_flight1); + server_.CapturePacket(dtls_flight2); + server_.CapturePacket(dtls_flight3); + server_.Flush(); + EXPECT_EQ(server_.GetDataToPiggyback(STUN_BINDING_REQUEST), + std::string(dtls_flight1.begin(), dtls_flight1.end())); + EXPECT_EQ(server_.GetDataToPiggyback(STUN_BINDING_REQUEST), + std::string(dtls_flight2.begin(), dtls_flight2.end())); + EXPECT_EQ(server_.GetDataToPiggyback(STUN_BINDING_REQUEST), + std::string(dtls_flight3.begin(), dtls_flight3.end())); + + server_.ReportDataPiggybacked( + nullptr, WrapInStun(STUN_ATTR_META_DTLS_IN_STUN_ACK, + AsAckAttribute({ComputeDtlsPacketHash(dtls_flight1)})) + .get()); + + EXPECT_EQ(server_.GetDataToPiggyback(STUN_BINDING_REQUEST), + std::string(dtls_flight2.begin(), dtls_flight2.end())); + EXPECT_EQ(server_.GetDataToPiggyback(STUN_BINDING_REQUEST), + std::string(dtls_flight3.begin(), dtls_flight3.end())); + + server_.ReportDataPiggybacked( + nullptr, WrapInStun(STUN_ATTR_META_DTLS_IN_STUN_ACK, + AsAckAttribute({ComputeDtlsPacketHash(dtls_flight3)})) + .get()); + + EXPECT_EQ(server_.GetDataToPiggyback(STUN_BINDING_REQUEST), + std::string(dtls_flight2.begin(), dtls_flight2.end())); + EXPECT_EQ(server_.GetDataToPiggyback(STUN_BINDING_REQUEST), + std::string(dtls_flight2.begin(), dtls_flight2.end())); +} + +} // namespace webrtc diff --git a/p2p/dtls/dtls_transport.cc b/p2p/dtls/dtls_transport.cc new file mode 100644 index 0000000000..f8d118a8ac --- /dev/null +++ b/p2p/dtls/dtls_transport.cc @@ -0,0 +1,1152 @@ +/* + * Copyright 2011 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "p2p/dtls/dtls_transport.h" + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/crypto/crypto_options.h" +#include "api/dtls_transport_interface.h" +#include "api/rtc_error.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/transport/ecn_marking.h" +#include "api/transport/stun.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" +#include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/packet_transport_internal.h" +#include "p2p/dtls/dtls_stun_piggyback_callbacks.h" +#include "p2p/dtls/dtls_stun_piggyback_controller.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "p2p/dtls/dtls_utils.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/stream.h" +#include "rtc_base/thread.h" +#include "rtc_base/time_utils.h" + +namespace webrtc { + +template +void AbslStringify(Sink& sink, webrtc::DtlsTransportState state) { + switch (state) { + case webrtc::DtlsTransportState::kNew: + sink.Append("kNew"); + break; + case webrtc::DtlsTransportState::kConnecting: + sink.Append("kConnecting"); + break; + case webrtc::DtlsTransportState::kConnected: + sink.Append("kConnected"); + break; + case webrtc::DtlsTransportState::kClosed: + sink.Append("kClosed"); + break; + case webrtc::DtlsTransportState::kFailed: + sink.Append("kFailed"); + break; + case webrtc::DtlsTransportState::kNumValues: + sink.Append("kNumValues"); + break; + } +} + +// We don't pull the RTP constants from rtputils.h, to avoid a layer violation. +constexpr size_t kMinRtpPacketLen = 12; + +// Maximum number of pending packets in the queue. Packets are read immediately +// after they have been written, so a capacity of "1" is sufficient. +// +// However, this bug seems to indicate that's not the case: crbug.com/1063834 +// So, temporarily increasing it to 2 to see if that makes a difference. +constexpr size_t kMaxPendingPackets = 2; + +// Minimum and maximum values for the initial DTLS handshake timeout. We'll pick +// an initial timeout based on ICE RTT estimates, but clamp it to this range. +constexpr int kMinDtlsHandshakeTimeoutMs = 50; +constexpr int kMaxDtlsHandshakeTimeoutMs = 3000; +// This effectively disables the handshake timeout. +constexpr int kDisabledHandshakeTimeoutMs = 3600 * 1000 * 24; + +constexpr uint32_t kMaxCachedClientHello = 4; + +static bool IsRtpPacket(ArrayView payload) { + const uint8_t* u = payload.data(); + return (payload.size() >= kMinRtpPacketLen && (u[0] & 0xC0) == 0x80); +} + +StreamInterfaceChannel::StreamInterfaceChannel( + webrtc::IceTransportInternal* ice_transport) + : ice_transport_(ice_transport), + state_(webrtc::SS_OPEN), + packets_(kMaxPendingPackets, webrtc::kMaxDtlsPacketLen) {} + +void StreamInterfaceChannel::SetDtlsStunPiggybackController( + webrtc::DtlsStunPiggybackController* dtls_stun_piggyback_controller) { + dtls_stun_piggyback_controller_ = dtls_stun_piggyback_controller; +} + +StreamResult StreamInterfaceChannel::Read(ArrayView buffer, + size_t& read, + int& /* error */) { + RTC_DCHECK_RUN_ON(&callback_sequence_); + + if (state_ == webrtc::SS_CLOSED) + return webrtc::SR_EOS; + if (state_ == webrtc::SS_OPENING) + return webrtc::SR_BLOCK; + + if (!packets_.ReadFront(buffer.data(), buffer.size(), &read)) { + return webrtc::SR_BLOCK; + } + + return webrtc::SR_SUCCESS; +} + +StreamResult StreamInterfaceChannel::Write(ArrayView data, + size_t& written, + int& /* error */) { + RTC_DCHECK_RUN_ON(&callback_sequence_); + + // If we use DTLS-in-STUN, DTLS packets will be sent as part of STUN + // packets, they are captured by the controller. + if (dtls_stun_piggyback_controller_) { + dtls_stun_piggyback_controller_->CapturePacket(data); + } + + AsyncSocketPacketOptions packet_options; + ice_transport_->SendPacket(reinterpret_cast(data.data()), + data.size(), packet_options); + written = data.size(); + return webrtc::SR_SUCCESS; +} + +bool StreamInterfaceChannel::Flush() { + RTC_DCHECK_RUN_ON(&callback_sequence_); + + if (dtls_stun_piggyback_controller_) { + dtls_stun_piggyback_controller_->Flush(); + } + return false; +} + +bool StreamInterfaceChannel::OnPacketReceived(const char* data, size_t size) { + RTC_DCHECK_RUN_ON(&callback_sequence_); + if (packets_.size() > 0) { + RTC_LOG(LS_WARNING) << "Packet already in queue."; + } + bool ret = packets_.WriteBack(data, size, NULL); + if (!ret) { + // Somehow we received another packet before the SSLStreamAdapter read the + // previous one out of our temporary buffer. In this case, we'll log an + // error and still signal the read event, hoping that it will read the + // packet currently in packets_. + RTC_LOG(LS_ERROR) << "Failed to write packet to queue."; + } + FireEvent(webrtc::SE_READ, 0); + return ret; +} + +StreamState StreamInterfaceChannel::GetState() const { + RTC_DCHECK_RUN_ON(&callback_sequence_); + return state_; +} + +void StreamInterfaceChannel::Close() { + RTC_DCHECK_RUN_ON(&callback_sequence_); + packets_.Clear(); + state_ = webrtc::SS_CLOSED; +} + +DtlsTransportInternalImpl::DtlsTransportInternalImpl( + webrtc::IceTransportInternal* ice_transport, + const webrtc::CryptoOptions& crypto_options, + webrtc::RtcEventLog* event_log, + webrtc::SSLProtocolVersion max_version) + : component_(ice_transport->component()), + ice_transport_(ice_transport), + downward_(nullptr), + srtp_ciphers_(crypto_options.GetSupportedDtlsSrtpCryptoSuites()), + ssl_max_version_(max_version), + event_log_(event_log), + dtls_stun_piggyback_controller_( + [this](ArrayView piggybacked_dtls_packet) { + if (piggybacked_dtls_callback_ == nullptr) { + return; + } + piggybacked_dtls_callback_( + this, ReceivedIpPacket(piggybacked_dtls_packet, + webrtc::SocketAddress())); + }) { + RTC_DCHECK(ice_transport_); + ConnectToIceTransport(); + if (auto field_trials = ice_transport_->field_trials()) { + dtls_in_stun_ = field_trials->IsEnabled("WebRTC-IceHandshakeDtls"); + } else { + // TODO (BUG=webrtc:367395350): Fix upstream testcase(s). + RTC_DLOG(LS_ERROR) << "ice_transport_>field_trials() is NULL"; + dtls_in_stun_ = false; + } +} + +DtlsTransportInternalImpl::~DtlsTransportInternalImpl() { + if (ice_transport_) { + ice_transport_->ResetDtlsStunPiggybackCallbacks(); + ice_transport_->DeregisterReceivedPacketCallback(this); + } +} + +webrtc::DtlsTransportState DtlsTransportInternalImpl::dtls_state() const { + return dtls_state_; +} + +const std::string& DtlsTransportInternalImpl::transport_name() const { + return ice_transport_->transport_name(); +} + +int DtlsTransportInternalImpl::component() const { + return component_; +} + +bool DtlsTransportInternalImpl::IsDtlsActive() const { + return dtls_active_; +} + +bool DtlsTransportInternalImpl::SetLocalCertificate( + const scoped_refptr& certificate) { + if (dtls_active_) { + if (certificate == local_certificate_) { + // This may happen during renegotiation. + RTC_LOG(LS_INFO) << ToString() << ": Ignoring identical DTLS identity"; + return true; + } else { + RTC_LOG(LS_ERROR) << ToString() + << ": Can't change DTLS local identity in this state"; + return false; + } + } + + if (certificate) { + local_certificate_ = certificate; + dtls_active_ = true; + } else { + RTC_LOG(LS_INFO) << ToString() + << ": NULL DTLS identity supplied. Not doing DTLS"; + } + + return true; +} + +scoped_refptr +DtlsTransportInternalImpl::GetLocalCertificate() const { + return local_certificate_; +} + +bool DtlsTransportInternalImpl::SetDtlsRole(webrtc::SSLRole role) { + if (dtls_) { + RTC_DCHECK(dtls_role_); + if (*dtls_role_ != role) { + RTC_LOG(LS_ERROR) + << "SSL Role can't be reversed after the session is setup."; + return false; + } + return true; + } + + dtls_role_ = role; + return true; +} + +bool DtlsTransportInternalImpl::GetDtlsRole(webrtc::SSLRole* role) const { + if (!dtls_role_) { + return false; + } + *role = *dtls_role_; + return true; +} + +bool DtlsTransportInternalImpl::GetSslCipherSuite(int* cipher) const { + if (dtls_state() != webrtc::DtlsTransportState::kConnected) { + return false; + } + + return dtls_->GetSslCipherSuite(cipher); +} + +std::optional +DtlsTransportInternalImpl::GetTlsCipherSuiteName() const { + if (dtls_state() != webrtc::DtlsTransportState::kConnected) { + return std::nullopt; + } + return dtls_->GetTlsCipherSuiteName(); +} + +webrtc::RTCError DtlsTransportInternalImpl::SetRemoteParameters( + absl::string_view digest_alg, + const uint8_t* digest, + size_t digest_len, + std::optional role) { + Buffer remote_fingerprint_value(digest, digest_len); + bool is_dtls_restart = + dtls_active_ && remote_fingerprint_value_ != remote_fingerprint_value; + // Set SSL role. Role must be set before fingerprint is applied, which + // initiates DTLS setup. + if (role) { + if (is_dtls_restart) { + dtls_role_ = *role; + } else { + if (!SetDtlsRole(*role)) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Failed to set SSL role for the transport."); + } + } + } + // Apply remote fingerprint. + if (!SetRemoteFingerprint(digest_alg, digest, digest_len)) { + return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, + "Failed to apply remote fingerprint."); + } + return webrtc::RTCError::OK(); +} + +bool DtlsTransportInternalImpl::SetRemoteFingerprint( + absl::string_view digest_alg, + const uint8_t* digest, + size_t digest_len) { + Buffer remote_fingerprint_value(digest, digest_len); + + // Once we have the local certificate, the same remote fingerprint can be set + // multiple times. + if (dtls_active_ && remote_fingerprint_value_ == remote_fingerprint_value && + !digest_alg.empty()) { + // This may happen during renegotiation. + RTC_LOG(LS_INFO) << ToString() + << ": Ignoring identical remote DTLS fingerprint"; + return true; + } + + // If the other side doesn't support DTLS, turn off `dtls_active_`. + // TODO(deadbeef): Remove this. It's dangerous, because it relies on higher + // level code to ensure DTLS is actually used, but there are tests that + // depend on it, for the case where an m= section is rejected. In that case + // SetRemoteFingerprint shouldn't even be called though. + if (digest_alg.empty()) { + RTC_DCHECK(!digest_len); + RTC_LOG(LS_INFO) << ToString() << ": Other side didn't support DTLS."; + dtls_active_ = false; + return true; + } + + // Otherwise, we must have a local certificate before setting remote + // fingerprint. + if (!dtls_active_) { + RTC_LOG(LS_ERROR) << ToString() + << ": Can't set DTLS remote settings in this state."; + return false; + } + + // At this point we know we are doing DTLS + bool fingerprint_changing = remote_fingerprint_value_.size() > 0u; + remote_fingerprint_value_ = std::move(remote_fingerprint_value); + remote_fingerprint_algorithm_ = std::string(digest_alg); + + if (dtls_ && !fingerprint_changing) { + // This can occur if DTLS is set up before a remote fingerprint is + // received. For instance, if we set up DTLS due to receiving an early + // ClientHello. + webrtc::SSLPeerCertificateDigestError err = dtls_->SetPeerCertificateDigest( + remote_fingerprint_algorithm_, remote_fingerprint_value_); + if (err != webrtc::SSLPeerCertificateDigestError::NONE) { + RTC_LOG(LS_ERROR) << ToString() + << ": Couldn't set DTLS certificate digest."; + set_dtls_state(webrtc::DtlsTransportState::kFailed); + // If the error is "verification failed", don't return false, because + // this means the fingerprint was formatted correctly but didn't match + // the certificate from the DTLS handshake. Thus the DTLS state should go + // to "failed", but SetRemoteDescription shouldn't fail. + return err == webrtc::SSLPeerCertificateDigestError::VERIFICATION_FAILED; + } + return true; + } + + // If the fingerprint is changing, we'll tear down the DTLS association and + // create a new one, resetting our state. + if (dtls_ && fingerprint_changing) { + dtls_.reset(nullptr); + set_dtls_state(webrtc::DtlsTransportState::kNew); + set_writable(false); + } + + if (!SetupDtls()) { + set_dtls_state(webrtc::DtlsTransportState::kFailed); + return false; + } + + return true; +} + +std::unique_ptr +DtlsTransportInternalImpl::GetRemoteSSLCertChain() const { + if (!dtls_) { + return nullptr; + } + + return dtls_->GetPeerSSLCertChain(); +} + +bool DtlsTransportInternalImpl::ExportSrtpKeyingMaterial( + ZeroOnFreeBuffer& keying_material) { + return dtls_ ? dtls_->ExportSrtpKeyingMaterial(keying_material) : false; +} + +bool DtlsTransportInternalImpl::SetupDtls() { + RTC_DCHECK(dtls_role_); + + dtls_in_stun_ = ice_transport_->config().dtls_handshake_in_stun; + { + auto downward = std::make_unique(ice_transport_); + StreamInterfaceChannel* downward_ptr = downward.get(); + + if (dtls_in_stun_) { + downward_ptr->SetDtlsStunPiggybackController( + &dtls_stun_piggyback_controller_); + } + dtls_ = webrtc::SSLStreamAdapter::Create( + std::move(downward), + [this](SSLHandshakeError error) { OnDtlsHandshakeError(error); }, + ice_transport_->field_trials()); + if (!dtls_) { + RTC_LOG(LS_ERROR) << ToString() << ": Failed to create DTLS adapter."; + return false; + } + downward_ = downward_ptr; + } + + // TODO(jonaso,webrtc:367395350): Add more clever handling of MTU + // (such as automatic packetization smoothing). + if (dtls_in_stun_) { + // - This is only needed when using PQC but we don't know that here. + // - 800 is sufficiently small so that dtls pqc handshake packets + // can get put into STUN attributes. + const int kDtlsMtu = 800; + dtls_->SetMTU(kDtlsMtu); + } + + dtls_->SetIdentity(local_certificate_->identity()->Clone()); + dtls_->SetMaxProtocolVersion(ssl_max_version_); + dtls_->SetServerRole(*dtls_role_); + dtls_->SetEventCallback( + [this](int events, int err) { OnDtlsEvent(events, err); }); + if (remote_fingerprint_value_.size() && + dtls_->SetPeerCertificateDigest(remote_fingerprint_algorithm_, + remote_fingerprint_value_) != + webrtc::SSLPeerCertificateDigestError::NONE) { + RTC_LOG(LS_ERROR) << ToString() + << ": Couldn't set DTLS certificate digest."; + return false; + } + + // Set up DTLS-SRTP, if it's been enabled. + if (!srtp_ciphers_.empty()) { + if (!dtls_->SetDtlsSrtpCryptoSuites(srtp_ciphers_)) { + RTC_LOG(LS_ERROR) << ToString() << ": Couldn't set DTLS-SRTP ciphers."; + return false; + } + } else { + RTC_LOG(LS_INFO) << ToString() << ": Not using DTLS-SRTP."; + } + + RTC_LOG(LS_INFO) << ToString() + << ": DTLS setup complete, dtls_in_stun: " << dtls_in_stun_; + + // If the underlying ice_transport is already writable at this point, we may + // be able to start DTLS right away. + MaybeStartDtls(); + return true; +} + +bool DtlsTransportInternalImpl::GetSrtpCryptoSuite(int* cipher) const { + if (dtls_state() != webrtc::DtlsTransportState::kConnected) { + return false; + } + + return dtls_->GetDtlsSrtpCryptoSuite(cipher); +} + +bool DtlsTransportInternalImpl::GetSslVersionBytes(int* version) const { + if (dtls_state() != webrtc::DtlsTransportState::kConnected) { + return false; + } + + return dtls_->GetSslVersionBytes(version); +} + +uint16_t DtlsTransportInternalImpl::GetSslPeerSignatureAlgorithm() const { + if (dtls_state() != webrtc::DtlsTransportState::kConnected) { + return webrtc::kSslSignatureAlgorithmUnknown; // "not applicable" + } + return dtls_->GetPeerSignatureAlgorithm(); +} + +// Called from upper layers to send a media packet. +int DtlsTransportInternalImpl::SendPacket( + const char* data, + size_t size, + const AsyncSocketPacketOptions& options, + int flags) { + if (!dtls_active_) { + // Not doing DTLS. + return ice_transport_->SendPacket(data, size, options); + } + + switch (dtls_state()) { + case webrtc::DtlsTransportState::kNew: + // Can't send data until the connection is active. + // TODO(ekr@rtfm.com): assert here if dtls_ is NULL? + return -1; + case webrtc::DtlsTransportState::kConnecting: + // Can't send data until the connection is active. + return -1; + case webrtc::DtlsTransportState::kConnected: + if (flags & webrtc::PF_SRTP_BYPASS) { + RTC_DCHECK(!srtp_ciphers_.empty()); + if (!IsRtpPacket( + MakeArrayView(reinterpret_cast(data), size))) { + return -1; + } + + return ice_transport_->SendPacket(data, size, options); + } else { + size_t written; + int error; + return (dtls_->WriteAll( + MakeArrayView(reinterpret_cast(data), size), + written, error) == webrtc::SR_SUCCESS) + ? static_cast(size) + : -1; + } + case webrtc::DtlsTransportState::kFailed: + // Can't send anything when we're failed. + RTC_LOG(LS_ERROR) << ToString() + << ": Couldn't send packet due to " + "webrtc::DtlsTransportState::kFailed."; + return -1; + case webrtc::DtlsTransportState::kClosed: + // Can't send anything when we're closed. + RTC_LOG(LS_ERROR) << ToString() + << ": Couldn't send packet due to " + "webrtc::DtlsTransportState::kClosed."; + return -1; + default: + RTC_DCHECK_NOTREACHED(); + return -1; + } +} + +webrtc::IceTransportInternal* DtlsTransportInternalImpl::ice_transport() { + return ice_transport_; +} + +bool DtlsTransportInternalImpl::IsDtlsConnected() { + return dtls_ && dtls_->IsTlsConnected(); +} + +bool DtlsTransportInternalImpl::receiving() const { + return receiving_; +} + +bool DtlsTransportInternalImpl::writable() const { + return writable_; +} + +int DtlsTransportInternalImpl::GetError() { + return ice_transport_->GetError(); +} + +std::optional DtlsTransportInternalImpl::network_route() + const { + return ice_transport_->network_route(); +} + +bool DtlsTransportInternalImpl::GetOption(webrtc::Socket::Option opt, + int* value) { + return ice_transport_->GetOption(opt, value); +} + +int DtlsTransportInternalImpl::SetOption(webrtc::Socket::Option opt, + int value) { + return ice_transport_->SetOption(opt, value); +} + +void DtlsTransportInternalImpl::ConnectToIceTransport() { + RTC_DCHECK(ice_transport_); + ice_transport_->SignalWritableState.connect( + this, &DtlsTransportInternalImpl::OnWritableState); + ice_transport_->RegisterReceivedPacketCallback( + this, + [&](PacketTransportInternal* transport, const ReceivedIpPacket& packet) { + OnReadPacket(transport, packet, /* piggybacked= */ false); + }); + + ice_transport_->SignalSentPacket.connect( + this, &DtlsTransportInternalImpl::OnSentPacket); + ice_transport_->SignalReadyToSend.connect( + this, &DtlsTransportInternalImpl::OnReadyToSend); + ice_transport_->SignalReceivingState.connect( + this, &DtlsTransportInternalImpl::OnReceivingState); + ice_transport_->SignalNetworkRouteChanged.connect( + this, &DtlsTransportInternalImpl::OnNetworkRouteChanged); + ice_transport_->SetDtlsStunPiggybackCallbacks( + webrtc::DtlsStunPiggybackCallbacks( + [&](auto stun_message_type) { + std::optional data; + std::optional ack; + if (dtls_in_stun_) { + data = dtls_stun_piggyback_controller_.GetDataToPiggyback( + stun_message_type); + ack = dtls_stun_piggyback_controller_.GetAckToPiggyback( + stun_message_type); + } + return std::make_pair(data, ack); + }, + [&](auto data, auto ack) { + if (!dtls_in_stun_) { + return; + } + dtls_stun_piggyback_controller_.ReportDataPiggybacked(data, ack); + })); + SetPiggybackDtlsDataCallback([this](PacketTransportInternal* transport, + const ReceivedIpPacket& packet) { + RTC_DCHECK(dtls_active_); + RTC_DCHECK(webrtc::IsDtlsPacket(packet.payload())); + if (!dtls_active_) { + // Not doing DTLS. + return; + } + if (!webrtc::IsDtlsPacket(packet.payload())) { + return; + } + OnReadPacket(ice_transport_, packet, /* piggybacked= */ true); + }); +} + +// The state transition logic here is as follows: +// (1) If we're not doing DTLS-SRTP, then the state is just the +// state of the underlying impl() +// (2) If we're doing DTLS-SRTP: +// - Prior to the DTLS handshake, the state is neither receiving nor +// writable +// - When the impl goes writable for the first time we +// start the DTLS handshake +// - Once the DTLS handshake completes, the state is that of the +// impl again +void DtlsTransportInternalImpl::OnWritableState( + webrtc::PacketTransportInternal* transport) { + RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK(transport == ice_transport_); + RTC_LOG(LS_INFO) << ToString() << ": ice_transport writable state changed to " + << ice_transport_->writable() + << " dtls_state: " << dtls_state(); + + if (!ice_has_been_writable_) { + // Ice starts as not writable. The first time this method is called, it + // should be when ice change to writable = true. + RTC_DCHECK(ice_transport_->writable()); + } + bool first_ice_writable = !ice_has_been_writable_; + ice_has_been_writable_ = true; + + if (!dtls_active_) { + // Not doing DTLS. + // Note: SignalWritableState fired by set_writable. + set_writable(ice_transport_->writable()); + return; + } + + switch (dtls_state()) { + case webrtc::DtlsTransportState::kNew: + MaybeStartDtls(); + break; + case webrtc::DtlsTransportState::kConnected: + // Note: SignalWritableState fired by set_writable. + if (dtls_in_stun_ && dtls_ && first_ice_writable) { + // Dtls1.3 has one remaining packet after it has become kConnected (?), + // make sure that this packet is sent too. + ConfigureHandshakeTimeout(); + PeriodicRetransmitDtlsPacketUntilDtlsConnected(); + } + set_writable(ice_transport_->writable()); + break; + case webrtc::DtlsTransportState::kConnecting: + if (dtls_in_stun_ && dtls_) { + // If DTLS piggybacking is enabled, we set the timeout + // on the DTLS object (which is then different from the + // inital kDisabledHandshakeTimeoutMs) + ConfigureHandshakeTimeout(); + PeriodicRetransmitDtlsPacketUntilDtlsConnected(); + } + break; + case webrtc::DtlsTransportState::kFailed: + // Should not happen. Do nothing. + RTC_LOG(LS_ERROR) << ToString() + << ": OnWritableState() called in state " + "webrtc::DtlsTransportState::kFailed."; + break; + case webrtc::DtlsTransportState::kClosed: + // Should not happen. Do nothing. + RTC_LOG(LS_ERROR) << ToString() + << ": OnWritableState() called in state " + "webrtc::DtlsTransportState::kClosed."; + break; + case webrtc::DtlsTransportState::kNumValues: + RTC_DCHECK_NOTREACHED(); + break; + } +} + +void DtlsTransportInternalImpl::OnReceivingState( + webrtc::PacketTransportInternal* transport) { + RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK(transport == ice_transport_); + RTC_LOG(LS_VERBOSE) << ToString() + << ": ice_transport " + "receiving state changed to " + << ice_transport_->receiving(); + if (!dtls_active_ || dtls_state() == webrtc::DtlsTransportState::kConnected) { + // Note: SignalReceivingState fired by set_receiving. + set_receiving(ice_transport_->receiving()); + } +} + +void DtlsTransportInternalImpl::OnReadPacket( + webrtc::PacketTransportInternal* transport, + const ReceivedIpPacket& packet, + bool piggybacked) { + RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK(transport == ice_transport_); + + if (!dtls_active_) { + // Not doing DTLS. + NotifyPacketReceived(packet); + return; + } + + switch (dtls_state()) { + case webrtc::DtlsTransportState::kNew: + if (dtls_) { + RTC_LOG(LS_INFO) << ToString() + << ": Packet received before DTLS started."; + } else { + RTC_LOG(LS_WARNING) << ToString() + << ": Packet received before we know if we are " + "doing DTLS or not."; + } + // Cache a client hello packet received before DTLS has actually started. + if (webrtc::IsDtlsClientHelloPacket(packet.payload())) { + RTC_LOG(LS_INFO) << ToString() + << ": Caching DTLS ClientHello packet until DTLS is " + "started."; + cached_client_hello_.AddIfUnique(packet.payload()); + cached_client_hello_.Prune(kMaxCachedClientHello); + // If we haven't started setting up DTLS yet (because we don't have a + // remote fingerprint/role), we can use the client hello as a clue that + // the peer has chosen the client role, and proceed with the handshake. + // The fingerprint will be verified when it's set. + if (!dtls_ && local_certificate_) { + SetDtlsRole(webrtc::SSL_SERVER); + SetupDtls(); + } + } else { + RTC_LOG(LS_INFO) << ToString() + << ": Not a DTLS ClientHello packet; dropping."; + } + break; + + case webrtc::DtlsTransportState::kConnecting: + case webrtc::DtlsTransportState::kConnected: + // We should only get DTLS or SRTP packets; STUN's already been demuxed. + // Is this potentially a DTLS packet? + if (webrtc::IsDtlsPacket(packet.payload())) { + if (!HandleDtlsPacket(packet.payload())) { + RTC_LOG(LS_ERROR) << ToString() << ": Failed to handle DTLS packet."; + return; + } + } else { + // Not a DTLS packet; our handshake should be complete by now. + if (dtls_state() != webrtc::DtlsTransportState::kConnected) { + RTC_LOG(LS_ERROR) << ToString() + << ": Received non-DTLS packet before DTLS " + "complete."; + return; + } + + // And it had better be a SRTP packet. + if (!IsRtpPacket(packet.payload())) { + RTC_LOG(LS_ERROR) + << ToString() << ": Received unexpected non-DTLS packet."; + return; + } + + // Sanity check. + RTC_DCHECK(!srtp_ciphers_.empty()); + + // Signal this upwards as a bypass packet. + NotifyPacketReceived( + packet.CopyAndSet(ReceivedIpPacket::kSrtpEncrypted)); + } + break; + case webrtc::DtlsTransportState::kFailed: + case webrtc::DtlsTransportState::kClosed: + case webrtc::DtlsTransportState::kNumValues: + // This shouldn't be happening. Drop the packet. + break; + } +} + +void DtlsTransportInternalImpl::OnSentPacket( + webrtc::PacketTransportInternal* /* transport */, + const SentPacketInfo& sent_packet) { + RTC_DCHECK_RUN_ON(&thread_checker_); + SignalSentPacket(this, sent_packet); +} + +void DtlsTransportInternalImpl::OnReadyToSend( + webrtc::PacketTransportInternal* /* transport */) { + RTC_DCHECK_RUN_ON(&thread_checker_); + if (writable()) { + SignalReadyToSend(this); + } +} + +void DtlsTransportInternalImpl::OnDtlsEvent(int sig, int err) { + RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK(dtls_); + + if (sig & webrtc::SE_OPEN) { + // This is the first time. + RTC_LOG(LS_INFO) << ToString() << ": DTLS handshake complete."; + // The check for OPEN shouldn't be necessary but let's make + // sure we don't accidentally frob the state if it's closed. + if (dtls_->GetState() == webrtc::SS_OPEN) { + int ssl_version_bytes; + bool ret = dtls_->GetSslVersionBytes(&ssl_version_bytes); + RTC_DCHECK(ret); + dtls_stun_piggyback_controller_.SetDtlsHandshakeComplete( + dtls_role_ == webrtc::SSL_CLIENT, + ssl_version_bytes == webrtc::kDtls13VersionBytes); + downward_->SetDtlsStunPiggybackController(nullptr); + set_dtls_state(webrtc::DtlsTransportState::kConnected); + set_writable(true); + } + } + if (sig & webrtc::SE_READ) { + uint8_t buf[webrtc::kMaxDtlsPacketLen]; + size_t read; + int read_error; + StreamResult ret; + // The underlying DTLS stream may have received multiple DTLS records in + // one packet, so read all of them. + do { + ret = dtls_->Read(buf, read, read_error); + if (ret == webrtc::SR_SUCCESS) { + // TODO(bugs.webrtc.org/15368): It should be possible to use information + // from the original packet here to populate socket address and + // timestamp. + NotifyPacketReceived(ReceivedIpPacket( + MakeArrayView(buf, read), webrtc::SocketAddress(), + webrtc::Timestamp::Micros(webrtc::TimeMicros()), + EcnMarking::kNotEct, ReceivedIpPacket::kDtlsDecrypted)); + } else if (ret == webrtc::SR_EOS) { + // Remote peer shut down the association with no error. + RTC_LOG(LS_INFO) << ToString() << ": DTLS transport closed by remote"; + set_writable(false); + set_dtls_state(webrtc::DtlsTransportState::kClosed); + NotifyOnClose(); + } else if (ret == webrtc::SR_ERROR) { + // Remote peer shut down the association with an error. + RTC_LOG(LS_INFO) + << ToString() + << ": Closed by remote with DTLS transport error, code=" + << read_error; + set_writable(false); + set_dtls_state(webrtc::DtlsTransportState::kFailed); + NotifyOnClose(); + } + } while (ret == webrtc::SR_SUCCESS); + } + if (sig & webrtc::SE_CLOSE) { + RTC_DCHECK(sig == webrtc::SE_CLOSE); // SE_CLOSE should be by itself. + set_writable(false); + if (!err) { + RTC_LOG(LS_INFO) << ToString() << ": DTLS transport closed"; + set_dtls_state(webrtc::DtlsTransportState::kClosed); + } else { + RTC_LOG(LS_INFO) << ToString() << ": DTLS transport error, code=" << err; + set_dtls_state(webrtc::DtlsTransportState::kFailed); + } + } +} + +void DtlsTransportInternalImpl::OnNetworkRouteChanged( + std::optional network_route) { + RTC_DCHECK_RUN_ON(&thread_checker_); + SignalNetworkRouteChanged(network_route); +} + +void DtlsTransportInternalImpl::MaybeStartDtls() { + RTC_DCHECK(ice_transport_); + // When adding the DTLS handshake in STUN we want to call StartSSL even + // before the ICE transport is ready. + if (dtls_ && (ice_transport_->writable() || dtls_in_stun_)) { + ConfigureHandshakeTimeout(); + + if (dtls_->StartSSL()) { + // This should never fail: + // Because we are operating in a nonblocking mode and all + // incoming packets come in via OnReadPacket(), which rejects + // packets in this state, the incoming queue must be empty. We + // ignore write errors, thus any errors must be because of + // configuration and therefore are our fault. + RTC_DCHECK_NOTREACHED() << "StartSSL failed."; + RTC_LOG(LS_ERROR) << ToString() << ": Couldn't start DTLS handshake"; + set_dtls_state(webrtc::DtlsTransportState::kFailed); + return; + } + RTC_LOG(LS_INFO) + << ToString() + << ": DtlsTransportInternalImpl: Started DTLS handshake active=" + << IsDtlsActive() + << " role=" << (*dtls_role_ == SSL_SERVER ? "server" : "client"); + set_dtls_state(webrtc::DtlsTransportState::kConnecting); + // Now that the handshake has started, we can process a cached ClientHello + // (if one exists). + if (!cached_client_hello_.empty()) { + if (*dtls_role_ == webrtc::SSL_SERVER) { + int size = cached_client_hello_.size(); + RTC_LOG(LS_INFO) << ToString() << ": Handling #" << size + << " cached DTLS ClientHello packet(s)."; + for (int i = 0; i < size; i++) { + if (!HandleDtlsPacket(cached_client_hello_.GetNext())) { + RTC_LOG(LS_ERROR) + << ToString() << ": Failed to handle DTLS packet."; + break; + } + } + } else { + RTC_LOG(LS_WARNING) << ToString() + << ": Discarding cached DTLS ClientHello packet " + "because we don't have the server role."; + } + cached_client_hello_.clear(); + } + } +} + +// Called from OnReadPacket when a DTLS packet is received. +bool DtlsTransportInternalImpl::HandleDtlsPacket( + ArrayView payload) { + // Pass to the StreamInterfaceChannel which ends up being passed to the DTLS + // stack. + return downward_->OnPacketReceived( + reinterpret_cast(payload.data()), payload.size()); +} + +void DtlsTransportInternalImpl::set_receiving(bool receiving) { + if (receiving_ == receiving) { + return; + } + receiving_ = receiving; + SignalReceivingState(this); +} + +void DtlsTransportInternalImpl::set_writable(bool writable) { + if (writable_ == writable) { + return; + } + if (writable && !ice_has_been_writable_) { + // Wait with reporting writable until ICE has become writable once, + // so as to not confuse other part of stack (such as sctp). + RTC_DCHECK(dtls_in_stun_); + RTC_LOG(LS_INFO) + << ToString() + << ": defer set_writable(true) until ICE has become writable once"; + return; + } + + if (event_log_) { + event_log_->Log( + std::make_unique(writable)); + } + RTC_LOG(LS_VERBOSE) << ToString() << ": set_writable to: " << writable; + writable_ = writable; + if (writable_) { + SignalReadyToSend(this); + } + SignalWritableState(this); +} + +void DtlsTransportInternalImpl::set_dtls_state( + webrtc::DtlsTransportState state) { + if (dtls_state_ == state) { + return; + } + if (event_log_) { + event_log_->Log( + std::make_unique(state)); + } + RTC_LOG(LS_VERBOSE) << ToString() << ": set_dtls_state from:" + << static_cast(dtls_state_) << " to " + << static_cast(state); + dtls_state_ = state; + SendDtlsState(this, state); +} + +void DtlsTransportInternalImpl::OnDtlsHandshakeError( + webrtc::SSLHandshakeError error) { + SendDtlsHandshakeError(error); +} + +int ComputeRetransmissionTimeout(int rtt_ms) { + return std::max(kMinDtlsHandshakeTimeoutMs, + std::min(kMaxDtlsHandshakeTimeoutMs, 2 * (rtt_ms))); +} + +void DtlsTransportInternalImpl::ConfigureHandshakeTimeout() { + RTC_DCHECK(dtls_); + std::optional rtt_ms = ice_transport_->GetRttEstimate(); + if (rtt_ms) { + // Limit the timeout to a reasonable range in case the ICE RTT takes + // extreme values. + int initial_timeout_ms = ComputeRetransmissionTimeout(*rtt_ms); + RTC_LOG(LS_INFO) << ToString() << ": configuring DTLS handshake timeout " + << initial_timeout_ms << "ms based on ICE RTT " << *rtt_ms; + dtls_->SetInitialRetransmissionTimeout(initial_timeout_ms); + } else if (dtls_in_stun_) { + // Configure a very high timeout to effectively disable the DTLS timeout + // and avoid fragmented resends. This is ok since DTLS-in-STUN caches + // the handshake pacets and resends them using the pacing of ICE. + RTC_LOG(LS_INFO) << ToString() << ": configuring DTLS handshake timeout " + << kDisabledHandshakeTimeoutMs << "ms for DTLS-in-STUN"; + dtls_->SetInitialRetransmissionTimeout(kDisabledHandshakeTimeoutMs); + } else { + RTC_LOG(LS_INFO) + << ToString() + << ": no RTT estimate - using default DTLS handshake timeout"; + } +} + +void DtlsTransportInternalImpl::SetPiggybackDtlsDataCallback( + absl::AnyInvocable callback) { + RTC_DCHECK(callback == nullptr || !piggybacked_dtls_callback_); + piggybacked_dtls_callback_ = std::move(callback); +} + +bool DtlsTransportInternalImpl::IsDtlsPiggybackSupportedByPeer() { + RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK(ice_transport_); + return dtls_in_stun_ && (dtls_stun_piggyback_controller_.state() != + webrtc::DtlsStunPiggybackController::State::OFF); +} + +bool DtlsTransportInternalImpl::WasDtlsCompletedByPiggybacking() { + RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK(ice_transport_); + return dtls_in_stun_ && + (dtls_stun_piggyback_controller_.state() == + webrtc::DtlsStunPiggybackController::State::COMPLETE || + dtls_stun_piggyback_controller_.state() == + webrtc::DtlsStunPiggybackController::State::PENDING); +} + +// TODO (jonaso, webrtc:367395350): Switch to upcoming +// DTLSv1_set_timeout_duration. Remove once we can get DTLS to handle +// retransmission also when handshake is not complete but we become writable +// (e.g. by setting a good timeout). +void DtlsTransportInternalImpl:: + PeriodicRetransmitDtlsPacketUntilDtlsConnected() { + RTC_DCHECK_RUN_ON(&thread_checker_); + if (pending_periodic_retransmit_dtls_packet_ == true) { + // PeriodicRetransmitDtlsPacketUntilDtlsConnected is called in two places + // a) Either by PostTask, where pending_ping_until_dtls_connected_ is FALSE + // b) When Ice get connected, in which it is unknown if + // pending_periodic_retransmit_dtls_packet_. + return; + } + if (ice_transport_->writable() && dtls_in_stun_) { + auto data_to_send = dtls_stun_piggyback_controller_.GetDataToPiggyback( + webrtc::STUN_BINDING_INDICATION); + if (!data_to_send) { + // No data to send, we're done. + return; + } + AsyncSocketPacketOptions packet_options; + ice_transport_->SendPacket(data_to_send->data(), data_to_send->size(), + packet_options, /* flags= */ 0); + } + + const auto rtt_ms = ice_transport_->GetRttEstimate().value_or(100); + const int delay_ms = ComputeRetransmissionTimeout(rtt_ms); + + // Set pending before we post task. + pending_periodic_retransmit_dtls_packet_ = true; + webrtc::Thread::Current()->PostDelayedHighPrecisionTask( + webrtc::SafeTask(safety_flag_.flag(), + [this] { + RTC_DCHECK_RUN_ON(&thread_checker_); + // Clear pending then the PostTask runs. + pending_periodic_retransmit_dtls_packet_ = false; + PeriodicRetransmitDtlsPacketUntilDtlsConnected(); + }), + webrtc::TimeDelta::Millis(delay_ms)); + RTC_LOG(LS_INFO) << ToString() + << ": Scheduled retransmit of DTLS packet, delay_ms: " + << delay_ms; +} + +int DtlsTransportInternalImpl::GetRetransmissionCount() const { + if (!dtls_) { + return 0; + } + return dtls_->GetRetransmissionCount(); +} + +int DtlsTransportInternalImpl::GetStunDataCount() const { + if (!dtls_in_stun_) { + return 0; + } + return dtls_stun_piggyback_controller_.GetCountOfReceivedData(); +} + +} // namespace webrtc diff --git a/p2p/dtls/dtls_transport.h b/p2p/dtls/dtls_transport.h new file mode 100644 index 0000000000..1f099ee757 --- /dev/null +++ b/p2p/dtls/dtls_transport.h @@ -0,0 +1,337 @@ +/* + * Copyright 2011 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_DTLS_DTLS_TRANSPORT_H_ +#define P2P_DTLS_DTLS_TRANSPORT_H_ + +#include +#include +#include +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/crypto/crypto_options.h" +#include "api/dtls_transport_interface.h" +#include "api/rtc_error.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/packet_transport_internal.h" +#include "p2p/dtls/dtls_stun_piggyback_controller.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "p2p/dtls/dtls_utils.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/buffer.h" +#include "rtc_base/buffer_queue.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/socket.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/stream.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +// A bridge between a packet-oriented/transport-type interface on +// the bottom and a StreamInterface on the top. +class StreamInterfaceChannel : public StreamInterface { + public: + explicit StreamInterfaceChannel(webrtc::IceTransportInternal* ice_transport); + + void SetDtlsStunPiggybackController( + webrtc::DtlsStunPiggybackController* dtls_stun_piggyback_controller); + + StreamInterfaceChannel(const StreamInterfaceChannel&) = delete; + StreamInterfaceChannel& operator=(const StreamInterfaceChannel&) = delete; + + // Push in a packet; this gets pulled out from Read(). + bool OnPacketReceived(const char* data, size_t size); + + // Implementations of StreamInterface + StreamState GetState() const override; + void Close() override; + StreamResult Read(ArrayView buffer, + size_t& read, + int& error) override; + StreamResult Write(ArrayView data, + size_t& written, + int& error) override; + + bool Flush() override; + + private: + webrtc::IceTransportInternal* const ice_transport_; // owned by DtlsTransport + webrtc::DtlsStunPiggybackController* dtls_stun_piggyback_controller_ = + nullptr; // owned by DtlsTransport + StreamState state_ RTC_GUARDED_BY(callback_sequence_); + webrtc::BufferQueue packets_ RTC_GUARDED_BY(callback_sequence_); +}; + +// This class provides a DTLS SSLStreamAdapter inside a TransportChannel-style +// packet-based interface, wrapping an existing TransportChannel instance +// (e.g a P2PTransportChannel) +// Here's the way this works: +// +// DtlsTransport { +// SSLStreamAdapter* dtls_ { +// StreamInterfaceChannel downward_ { +// IceTransportInternal* ice_transport_; +// } +// } +// } +// +// - Data which comes into DtlsTransport from the underlying +// ice_transport_ via OnReadPacket() is checked for whether it is DTLS +// or not, and if it is, is passed to DtlsTransport::HandleDtlsPacket, +// which pushes it into to downward_. dtls_ is listening for events on +// downward_, so it immediately calls downward_->Read(). +// +// - Data written to DtlsTransport is passed either to downward_ or directly +// to ice_transport_, depending on whether DTLS is negotiated and whether +// the flags include PF_SRTP_BYPASS +// +// - The SSLStreamAdapter writes to downward_->Write() which translates it +// into packet writes on ice_transport_. +// +// This class is not thread safe; all methods must be called on the same thread +// as the constructor. +class DtlsTransportInternalImpl : public webrtc::DtlsTransportInternal { + public: + // `ice_transport` is the ICE transport this DTLS transport is wrapping. It + // must outlive this DTLS transport. + // + // `crypto_options` are the options used for the DTLS handshake. This affects + // whether GCM crypto suites are negotiated. + // + // `event_log` is an optional RtcEventLog for logging state changes. It should + // outlive the DtlsTransport. + DtlsTransportInternalImpl( + webrtc::IceTransportInternal* ice_transport, + const webrtc::CryptoOptions& crypto_options, + webrtc::RtcEventLog* event_log, + webrtc::SSLProtocolVersion max_version = webrtc::SSL_PROTOCOL_DTLS_12); + + ~DtlsTransportInternalImpl() override; + + DtlsTransportInternalImpl(const DtlsTransportInternalImpl&) = delete; + DtlsTransportInternalImpl& operator=(const DtlsTransportInternalImpl&) = + delete; + + webrtc::DtlsTransportState dtls_state() const override; + const std::string& transport_name() const override; + int component() const override; + + // DTLS is active if a local certificate was set. Otherwise this acts in a + // "passthrough" mode, sending packets directly through the underlying ICE + // transport. + // TODO(deadbeef): Remove this weirdness, and handle it in the upper layers. + bool IsDtlsActive() const override; + + // SetLocalCertificate is what makes DTLS active. It must be called before + // SetRemoteFinterprint. + // TODO(deadbeef): Once DtlsTransportInternalImpl no longer has the concept of + // being "active" or not (acting as a passthrough if not active), just require + // this certificate on construction or "Start". + bool SetLocalCertificate( + const scoped_refptr& certificate) override; + scoped_refptr GetLocalCertificate() const override; + + // SetRemoteFingerprint must be called after SetLocalCertificate, and any + // other methods like SetDtlsRole. It's what triggers the actual DTLS setup. + // TODO(deadbeef): Rename to "Start" like in ORTC? + bool SetRemoteFingerprint(absl::string_view digest_alg, + const uint8_t* digest, + size_t digest_len) override; + + // SetRemoteParameters must be called after SetLocalCertificate. + webrtc::RTCError SetRemoteParameters( + absl::string_view digest_alg, + const uint8_t* digest, + size_t digest_len, + std::optional role) override; + + // Called to send a packet (via DTLS, if turned on). + int SendPacket(const char* data, + size_t size, + const AsyncSocketPacketOptions& options, + int flags) override; + + bool GetOption(webrtc::Socket::Option opt, int* value) override; + + // Find out which TLS version was negotiated + bool GetSslVersionBytes(int* version) const override; + // Find out which DTLS-SRTP cipher was negotiated + bool GetSrtpCryptoSuite(int* cipher) const override; + + // Find out which signature algorithm was used by the peer. Returns values + // from + // https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#tls-signaturescheme + // If not applicable, it returns zero. + uint16_t GetSslPeerSignatureAlgorithm() const override; + + bool GetDtlsRole(webrtc::SSLRole* role) const override; + bool SetDtlsRole(webrtc::SSLRole role) override; + + // Find out which DTLS cipher was negotiated + bool GetSslCipherSuite(int* cipher) const override; + std::optional GetTlsCipherSuiteName() const override; + + // Once DTLS has been established, this method retrieves the certificate + // chain in use by the remote peer, for use in external identity + // verification. + std::unique_ptr GetRemoteSSLCertChain() const override; + + // Once DTLS has established (i.e., this ice_transport is writable), this + // method extracts the keys negotiated during the DTLS handshake, for use in + // external encryption. DTLS-SRTP uses this to extract the needed SRTP keys. + bool ExportSrtpKeyingMaterial( + ZeroOnFreeBuffer& keying_material) override; + + webrtc::IceTransportInternal* ice_transport() override; + + // For informational purposes. Tells if the DTLS handshake has finished. + // This may be true even if writable() is false, if the remote fingerprint + // has not yet been verified. + bool IsDtlsConnected(); + + bool receiving() const override; + bool writable() const override; + + int GetError() override; + + std::optional network_route() const override; + + int SetOption(webrtc::Socket::Option opt, int value) override; + + std::string ToString() const { + const absl::string_view RECEIVING_ABBREV[2] = {"_", "R"}; + const absl::string_view WRITABLE_ABBREV[2] = {"_", "W"}; + StringBuilder sb; + sb << "DtlsTransport[" << transport_name() << "|" << component_ << "|" + << RECEIVING_ABBREV[receiving()] << WRITABLE_ABBREV[writable()] << "]"; + return sb.Release(); + } + + // Number of times "DTLS retransmission" has been triggered. + // Currently used for testing but maybe put into stats in the future? + int GetRetransmissionCount() const; + + // Number of times data has been received from a STUN BINDING. + int GetStunDataCount() const; + + // Two methods for testing. + bool IsDtlsPiggybackSupportedByPeer(); + bool WasDtlsCompletedByPiggybacking(); + + private: + void ConnectToIceTransport(); + + void OnWritableState(webrtc::PacketTransportInternal* transport); + void OnReadPacket(webrtc::PacketTransportInternal* transport, + const ReceivedIpPacket& packet, + bool piggybacked); + void OnSentPacket(webrtc::PacketTransportInternal* transport, + const SentPacketInfo& sent_packet); + void OnReadyToSend(webrtc::PacketTransportInternal* transport); + void OnReceivingState(webrtc::PacketTransportInternal* transport); + void OnDtlsEvent(int sig, int err); + void OnNetworkRouteChanged(std::optional network_route); + bool SetupDtls(); + void MaybeStartDtls(); + bool HandleDtlsPacket(ArrayView payload); + void OnDtlsHandshakeError(webrtc::SSLHandshakeError error); + void ConfigureHandshakeTimeout(); + + void set_receiving(bool receiving); + void set_writable(bool writable); + // Sets the DTLS state, signaling if necessary. + void set_dtls_state(webrtc::DtlsTransportState state); + void SetPiggybackDtlsDataCallback( + absl::AnyInvocable + callback); + void PeriodicRetransmitDtlsPacketUntilDtlsConnected(); + + RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker thread_checker_; + + const int component_; + webrtc::DtlsTransportState dtls_state_ = webrtc::DtlsTransportState::kNew; + // Underlying ice_transport, not owned by this class. + webrtc::IceTransportInternal* const ice_transport_; + std::unique_ptr dtls_; // The DTLS stream + StreamInterfaceChannel* + downward_; // Wrapper for ice_transport_, owned by dtls_. + const std::vector srtp_ciphers_; // SRTP ciphers to use with DTLS. + bool dtls_active_ = false; + scoped_refptr local_certificate_; + std::optional dtls_role_; + const webrtc::SSLProtocolVersion ssl_max_version_; + Buffer remote_fingerprint_value_; + std::string remote_fingerprint_algorithm_; + + // Cached DTLS ClientHello packet that was received before we started the + // DTLS handshake. This could happen if the hello was received before the + // ice transport became writable, or before a remote fingerprint was received. + PacketStash cached_client_hello_; + + bool receiving_ = false; + bool writable_ = false; + + // Keep track if ICE has ever been writable. + // This is used to prevent "spurious" Dtls::Writable with DTLS-in-STUN, + // where DTLS can become writable before ICE. This can confuse other parts + // of the stack. + bool ice_has_been_writable_ = false; + + webrtc::RtcEventLog* const event_log_; + + // Initialized in constructor based on WebRTC-IceHandshakeDtls, + // (so that we return PIGGYBACK_ACK to client if we get STUN_BINDING_REQUEST + // directly). Maybe disabled in SetupDtls has been called. + bool dtls_in_stun_ = false; + + // A controller for piggybacking DTLS in STUN. + webrtc::DtlsStunPiggybackController dtls_stun_piggyback_controller_; + + absl::AnyInvocable + piggybacked_dtls_callback_; + + // When ICE get writable during dtls piggybacked handshake + // there is currently no safe way of updating the timeout + // in boringssl (that is work in progress). Therefore + // DtlsTransportInternalImpl has a "hack" to periodically retransmit. + bool pending_periodic_retransmit_dtls_packet_ = false; + webrtc::ScopedTaskSafetyDetached safety_flag_; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using DtlsTransport = ::webrtc::DtlsTransportInternalImpl; +using ::webrtc::StreamInterfaceChannel; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // P2P_DTLS_DTLS_TRANSPORT_H_ diff --git a/p2p/base/dtls_transport_factory.h b/p2p/dtls/dtls_transport_factory.h similarity index 60% rename from p2p/base/dtls_transport_factory.h rename to p2p/dtls/dtls_transport_factory.h index 7c4a24adc8..62e9eeec34 100644 --- a/p2p/base/dtls_transport_factory.h +++ b/p2p/dtls/dtls_transport_factory.h @@ -8,16 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef P2P_BASE_DTLS_TRANSPORT_FACTORY_H_ -#define P2P_BASE_DTLS_TRANSPORT_FACTORY_H_ +#ifndef P2P_DTLS_DTLS_TRANSPORT_FACTORY_H_ +#define P2P_DTLS_DTLS_TRANSPORT_FACTORY_H_ #include -#include -#include "p2p/base/dtls_transport_internal.h" +#include "api/crypto/crypto_options.h" #include "p2p/base/ice_transport_internal.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "rtc_base/ssl_stream_adapter.h" -namespace cricket { +namespace webrtc { // This interface is used to create DTLS transports. The external transports // can be injected into the JsepTransportController through it. @@ -31,10 +32,18 @@ class DtlsTransportFactory { virtual std::unique_ptr CreateDtlsTransport( IceTransportInternal* ice, - const webrtc::CryptoOptions& crypto_options, - rtc::SSLProtocolVersion max_version) = 0; + const CryptoOptions& crypto_options, + SSLProtocolVersion max_version) = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::DtlsTransportFactory; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES -#endif // P2P_BASE_DTLS_TRANSPORT_FACTORY_H_ +#endif // P2P_DTLS_DTLS_TRANSPORT_FACTORY_H_ diff --git a/p2p/base/dtls_transport_internal.cc b/p2p/dtls/dtls_transport_internal.cc similarity index 85% rename from p2p/base/dtls_transport_internal.cc rename to p2p/dtls/dtls_transport_internal.cc index 6997dbc702..3e3387fffc 100644 --- a/p2p/base/dtls_transport_internal.cc +++ b/p2p/dtls/dtls_transport_internal.cc @@ -8,12 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "p2p/base/dtls_transport_internal.h" +#include "p2p/dtls/dtls_transport_internal.h" -namespace cricket { +namespace webrtc { DtlsTransportInternal::DtlsTransportInternal() = default; DtlsTransportInternal::~DtlsTransportInternal() = default; -} // namespace cricket +} // namespace webrtc diff --git a/p2p/dtls/dtls_transport_internal.h b/p2p/dtls/dtls_transport_internal.h new file mode 100644 index 0000000000..df17ac600f --- /dev/null +++ b/p2p/dtls/dtls_transport_internal.h @@ -0,0 +1,169 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_DTLS_DTLS_TRANSPORT_INTERNAL_H_ +#define P2P_DTLS_DTLS_TRANSPORT_INTERNAL_H_ + +#include +#include + +#include +#include +#include + +#include "absl/base/attributes.h" +#include "absl/strings/string_view.h" +#include "api/dtls_transport_interface.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/packet_transport_internal.h" +#include "rtc_base/buffer.h" +#include "rtc_base/callback_list.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" + +namespace webrtc { + +enum PacketFlags { + PF_NORMAL = 0x00, // A normal packet. + PF_SRTP_BYPASS = 0x01, // An encrypted SRTP packet; bypass any additional + // crypto provided by the transport (e.g. DTLS) +}; + +// DtlsTransportInternal is an internal interface that does DTLS, also +// negotiating SRTP crypto suites so that it may be used for DTLS-SRTP. +// +// Once the public interface is supported, +// (https://www.w3.org/TR/webrtc/#rtcdtlstransport-interface) +// the DtlsTransportInterface will be split from this class. +class DtlsTransportInternal : public PacketTransportInternal { + public: + ~DtlsTransportInternal() override; + + DtlsTransportInternal(const DtlsTransportInternal&) = delete; + DtlsTransportInternal& operator=(const DtlsTransportInternal&) = delete; + + virtual DtlsTransportState dtls_state() const = 0; + + virtual int component() const = 0; + + virtual bool IsDtlsActive() const = 0; + + virtual bool GetDtlsRole(SSLRole* role) const = 0; + + virtual bool SetDtlsRole(SSLRole role) = 0; + + // Finds out which TLS/DTLS version is running. + virtual bool GetSslVersionBytes(int* version) const = 0; + // Finds out which DTLS-SRTP cipher was negotiated. + // TODO(zhihuang): Remove this once all dependencies implement this. + virtual bool GetSrtpCryptoSuite(int* cipher) const = 0; + + // Finds out which DTLS cipher was negotiated. + // TODO(zhihuang): Remove this once all dependencies implement this. + virtual bool GetSslCipherSuite(int* cipher) const = 0; + virtual std::optional GetTlsCipherSuiteName() const = 0; + + // Find out which signature algorithm was used by the peer. Returns values + // from + // https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#tls-signaturescheme + // If not applicable, it returns zero. + virtual uint16_t GetSslPeerSignatureAlgorithm() const = 0; + + // Gets the local RTCCertificate used for DTLS. + virtual scoped_refptr GetLocalCertificate() const = 0; + + virtual bool SetLocalCertificate( + const scoped_refptr& certificate) = 0; + + // Gets a copy of the remote side's SSL certificate chain. + virtual std::unique_ptr GetRemoteSSLCertChain() const = 0; + + // Allows key material to be extracted for external encryption. + virtual bool ExportSrtpKeyingMaterial( + ZeroOnFreeBuffer& keying_material) = 0; + + // Set DTLS remote fingerprint. Must be after local identity set. + ABSL_DEPRECATED("Use SetRemoteParameters instead.") + virtual bool SetRemoteFingerprint(absl::string_view digest_alg, + const uint8_t* digest, + size_t digest_len) = 0; + + // Set DTLS remote fingerprint and role. Must be after local identity set. + virtual RTCError SetRemoteParameters(absl::string_view digest_alg, + const uint8_t* digest, + size_t digest_len, + std::optional role) = 0; + + ABSL_DEPRECATED("Set the max version via construction.") + bool SetSslMaxProtocolVersion(SSLProtocolVersion /* version */) { + return true; + } + + // Expose the underneath IceTransport. + virtual IceTransportInternal* ice_transport() = 0; + + // F: void(DtlsTransportInternal*, const webrtc::DtlsTransportState) + template + void SubscribeDtlsTransportState(F&& callback) { + dtls_transport_state_callback_list_.AddReceiver(std::forward(callback)); + } + + template + void SubscribeDtlsTransportState(const void* id, F&& callback) { + dtls_transport_state_callback_list_.AddReceiver(id, + std::forward(callback)); + } + // Unsubscribe the subscription with given id. + void UnsubscribeDtlsTransportState(const void* id) { + dtls_transport_state_callback_list_.RemoveReceivers(id); + } + + void SendDtlsState(DtlsTransportInternal* transport, + DtlsTransportState state) { + dtls_transport_state_callback_list_.Send(transport, state); + } + + // Emitted whenever the Dtls handshake failed on some transport channel. + // F: void(webrtc::SSLHandshakeError) + template + void SubscribeDtlsHandshakeError(F&& callback) { + dtls_handshake_error_callback_list_.AddReceiver(std::forward(callback)); + } + + void SendDtlsHandshakeError(SSLHandshakeError error) { + dtls_handshake_error_callback_list_.Send(error); + } + + protected: + DtlsTransportInternal(); + + private: + CallbackList dtls_handshake_error_callback_list_; + CallbackList + dtls_transport_state_callback_list_; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::DtlsTransportInternal; +using ::webrtc::PacketFlags; +using ::webrtc::PF_NORMAL; +using ::webrtc::PF_SRTP_BYPASS; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // P2P_DTLS_DTLS_TRANSPORT_INTERNAL_H_ diff --git a/p2p/dtls/dtls_transport_unittest.cc b/p2p/dtls/dtls_transport_unittest.cc new file mode 100644 index 0000000000..0440516458 --- /dev/null +++ b/p2p/dtls/dtls_transport_unittest.cc @@ -0,0 +1,1899 @@ +/* + * Copyright 2011 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "p2p/dtls/dtls_transport.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/crypto/crypto_options.h" +#include "api/dtls_transport_interface.h" +#include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" +#include "api/transport/stun.h" +#include "api/units/time_delta.h" +#include "p2p/base/packet_transport_internal.h" +#include "p2p/base/transport_description.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "p2p/dtls/dtls_utils.h" +#include "p2p/test/fake_ice_transport.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/buffer.h" +#include "rtc_base/byte_order.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/fake_clock.h" +#include "rtc_base/logging.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_fingerprint.h" +#include "rtc_base/ssl_identity.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/wait_until.h" + +namespace webrtc { + +using ::testing::Eq; +using ::testing::IsTrue; + +static const size_t kPacketNumOffset = 8; +static const size_t kPacketHeaderLen = 12; +static const int kFakePacketId = 0x1234; +static const int kTimeout = 10000; + +const uint8_t kRtpLeadByte = 0x80; + +static bool IsRtpLeadByte(uint8_t b) { + return b == kRtpLeadByte; +} + +// `modify_digest` is used to set modified fingerprints that are meant to fail +// validation. +void SetRemoteFingerprintFromCert(DtlsTransportInternalImpl* transport, + const scoped_refptr& cert, + bool modify_digest = false) { + std::unique_ptr fingerprint = + SSLFingerprint::CreateFromCertificate(*cert); + if (modify_digest) { + ++fingerprint->digest.MutableData()[0]; + } + + // Even if digest is verified to be incorrect, should fail asynchronously. + EXPECT_TRUE( + transport + ->SetRemoteParameters( + fingerprint->algorithm, + reinterpret_cast(fingerprint->digest.data()), + fingerprint->digest.size(), std::nullopt) + .ok()); +} + +class DtlsTestClient : public sigslot::has_slots<> { + public: + explicit DtlsTestClient(absl::string_view name) : name_(name) {} + void CreateCertificate(KeyType key_type) { + certificate_ = RTCCertificate::Create(SSLIdentity::Create(name_, key_type)); + } + const scoped_refptr& certificate() { return certificate_; } + void SetupMaxProtocolVersion(SSLProtocolVersion version) { + ssl_max_version_ = version; + } + void SetPqc(bool value) { pqc_ = value; } + void set_async_delay(int async_delay_ms) { async_delay_ms_ = async_delay_ms; } + + // Set up fake ICE transport and real DTLS transport under test. + void SetupTransports(IceRole role, + bool rtt_estimate = true, + absl::string_view field_trials_string = "") { + dtls_transport_ = nullptr; + fake_ice_transport_ = nullptr; + + if (field_trials_string.empty() && pqc_) { + field_trials_string = "WebRTC-EnableDtlsPqc/Enabled/"; + } + + fake_ice_transport_.reset(new FakeIceTransport( + absl::StrCat("fake-", name_), 0, + /* network_thread= */ nullptr, field_trials_string)); + if (rtt_estimate) { + fake_ice_transport_->set_rtt_estimate( + async_delay_ms_ ? std::optional(async_delay_ms_) : std::nullopt, + /* async= */ true); + } else if (async_delay_ms_) { + fake_ice_transport_->SetAsync(async_delay_ms_); + fake_ice_transport_->SetAsyncDelay(async_delay_ms_); + } + fake_ice_transport_->SetIceRole(role); + // Hook the raw packets so that we can verify they are encrypted. + fake_ice_transport_->RegisterReceivedPacketCallback( + this, [&](PacketTransportInternal* transport, + const ReceivedIpPacket& packet) { + OnFakeIceTransportReadPacket(transport, packet); + }); + + dtls_transport_ = std::make_unique( + fake_ice_transport_.get(), CryptoOptions(), + /*event_log=*/nullptr, ssl_max_version_); + // Note: Certificate may be null here if testing passthrough. + dtls_transport_->SetLocalCertificate(certificate_); + dtls_transport_->SignalWritableState.connect( + this, &DtlsTestClient::OnTransportWritableState); + dtls_transport_->RegisterReceivedPacketCallback( + this, [&](PacketTransportInternal* transport, + const ReceivedIpPacket& packet) { + OnTransportReadPacket(transport, packet); + }); + dtls_transport_->SignalSentPacket.connect( + this, &DtlsTestClient::OnTransportSentPacket); + } + + FakeIceTransport* fake_ice_transport() { + return static_cast(dtls_transport_->ice_transport()); + } + + DtlsTransportInternalImpl* dtls_transport() { return dtls_transport_.get(); } + + // Simulate fake ICE transports connecting. + bool Connect(DtlsTestClient* peer, bool asymmetric) { + fake_ice_transport()->SetDestination(peer->fake_ice_transport(), + asymmetric); + return true; + } + + // Connect the fake ICE transports so that packets flows from one to other. + bool ConnectIceTransport(DtlsTestClient* peer) { + fake_ice_transport()->SetDestinationNotWritable(peer->fake_ice_transport()); + fake_ice_transport()->set_drop_non_stun_unless_writable(true); + return true; + } + + bool SendIcePing(int n = 1) { + for (int i = 0; i < n; i++) { + if (!fake_ice_transport_->SendIcePing()) { + return false; + } + } + return true; + } + + bool SendIcePingConf(int n = 1) { + for (int i = 0; i < n; i++) { + if (!fake_ice_transport_->SendIcePingConf()) { + return false; + } + } + return true; + } + + int received_dtls_client_hellos() const { + return received_dtls_client_hellos_; + } + + int received_dtls_server_hellos() const { + return received_dtls_server_hellos_; + } + + std::optional GetVersionBytes() { + int value; + if (dtls_transport_->GetSslVersionBytes(&value)) { + return value; + } + return std::nullopt; + } + + void CheckRole(SSLRole role) { + if (role == SSL_CLIENT) { + ASSERT_EQ(0, received_dtls_client_hellos_); + ASSERT_GT(received_dtls_server_hellos_, 0); + } else { + ASSERT_GT(received_dtls_client_hellos_, 0); + ASSERT_EQ(0, received_dtls_server_hellos_); + } + } + + void CheckSrtp(int expected_crypto_suite) { + int crypto_suite; + bool rv = dtls_transport_->GetSrtpCryptoSuite(&crypto_suite); + if (dtls_transport_->IsDtlsActive() && expected_crypto_suite) { + ASSERT_TRUE(rv); + ASSERT_EQ(crypto_suite, expected_crypto_suite); + } else { + ASSERT_FALSE(rv); + } + } + + void CheckSsl() { + int cipher; + bool rv = dtls_transport_->GetSslCipherSuite(&cipher); + if (dtls_transport_->IsDtlsActive()) { + ASSERT_TRUE(rv); + EXPECT_TRUE(SSLStreamAdapter::IsAcceptableCipher(cipher, KT_DEFAULT)); + } else { + ASSERT_FALSE(rv); + } + } + + void SendPackets(size_t size, size_t count, bool srtp) { + std::unique_ptr packet(new char[size]); + size_t sent = 0; + do { + // Fill the packet with a known value and a sequence number to check + // against, and make sure that it doesn't look like DTLS. + memset(packet.get(), sent & 0xff, size); + packet[0] = (srtp) ? kRtpLeadByte : 0x00; + SetBE32(packet.get() + kPacketNumOffset, static_cast(sent)); + + // Only set the bypass flag if we've activated DTLS. + int flags = (certificate_ && srtp) ? PF_SRTP_BYPASS : 0; + AsyncSocketPacketOptions packet_options; + packet_options.packet_id = kFakePacketId; + int rv = dtls_transport_->SendPacket(packet.get(), size, packet_options, + flags); + ASSERT_GT(rv, 0); + ASSERT_EQ(size, static_cast(rv)); + ++sent; + } while (sent < count); + } + + int SendInvalidSrtpPacket(size_t size) { + std::unique_ptr packet(new char[size]); + // Fill the packet with 0 to form an invalid SRTP packet. + memset(packet.get(), 0, size); + + AsyncSocketPacketOptions packet_options; + return dtls_transport_->SendPacket(packet.get(), size, packet_options, + PF_SRTP_BYPASS); + } + + void ExpectPackets(size_t size) { + packet_size_ = size; + received_.clear(); + } + + size_t NumPacketsReceived() { return received_.size(); } + + // Inverse of SendPackets. + bool VerifyPacket(ArrayView payload, uint32_t* out_num) { + const uint8_t* data = payload.data(); + size_t size = payload.size(); + + if (size != packet_size_ || (data[0] != 0 && (data[0]) != 0x80)) { + return false; + } + uint32_t packet_num = GetBE32(data + kPacketNumOffset); + for (size_t i = kPacketHeaderLen; i < size; ++i) { + if (data[i] != (packet_num & 0xff)) { + return false; + } + } + if (out_num) { + *out_num = packet_num; + } + return true; + } + bool VerifyEncryptedPacket(const uint8_t* data, size_t size) { + // This is an encrypted data packet; let's make sure it's mostly random; + // less than 10% of the bytes should be equal to the cleartext packet. + if (size <= packet_size_) { + return false; + } + uint32_t packet_num = GetBE32(data + kPacketNumOffset); + int num_matches = 0; + for (size_t i = kPacketNumOffset; i < size; ++i) { + if (data[i] == (packet_num & 0xff)) { + ++num_matches; + } + } + return (num_matches < ((static_cast(size) - 5) / 10)); + } + + // Transport callbacks + void set_writable_callback(absl::AnyInvocable func) { + writable_func_ = std::move(func); + } + void OnTransportWritableState(PacketTransportInternal* transport) { + RTC_LOG(LS_INFO) << name_ << ": Transport '" << transport->transport_name() + << "' is writable"; + if (writable_func_) { + writable_func_(); + } + } + + void OnTransportReadPacket(PacketTransportInternal* /* transport */, + const ReceivedIpPacket& packet) { + uint32_t packet_num = 0; + ASSERT_TRUE(VerifyPacket(packet.payload(), &packet_num)); + received_.insert(packet_num); + switch (packet.decryption_info()) { + case ReceivedIpPacket::kSrtpEncrypted: + ASSERT_TRUE(certificate_ && IsRtpLeadByte(packet.payload()[0])); + break; + case ReceivedIpPacket::kDtlsDecrypted: + ASSERT_TRUE(certificate_ && !IsRtpLeadByte(packet.payload()[0])); + break; + case ReceivedIpPacket::kNotDecrypted: + ASSERT_FALSE(certificate_); + break; + } + } + + void OnTransportSentPacket(PacketTransportInternal* /* transport */, + const SentPacketInfo& sent_packet) { + sent_packet_ = sent_packet; + } + + SentPacketInfo sent_packet() const { return sent_packet_; } + + // Hook into the raw packet stream to make sure DTLS packets are encrypted. + void OnFakeIceTransportReadPacket(PacketTransportInternal* /* transport */, + const ReceivedIpPacket& packet) { + // Packets should not be decrypted on the underlying Transport packets. + ASSERT_EQ(packet.decryption_info(), ReceivedIpPacket::kNotDecrypted); + + // Look at the handshake packets to see what role we played. + // Check that non-handshake packets are DTLS data or SRTP bypass. + const uint8_t* data = packet.payload().data(); + if (IsDtlsHandshakePacket(packet.payload())) { + if (IsDtlsClientHelloPacket(packet.payload())) { + ++received_dtls_client_hellos_; + } else if (data[13] == 2) { + ++received_dtls_server_hellos_; + } + } else if (data[0] == 26) { + RTC_LOG(LS_INFO) << "Found DTLS ACK"; + } else if (dtls_transport_->IsDtlsActive()) { + if (IsRtpLeadByte(data[0])) { + ASSERT_TRUE(VerifyPacket(packet.payload(), NULL)); + } else if (packet_size_ && packet.payload().size() >= packet_size_) { + ASSERT_TRUE(VerifyEncryptedPacket(data, packet.payload().size())); + } + } + } + + absl::string_view name() { return name_; } + + private: + std::string name_; + scoped_refptr certificate_; + std::unique_ptr fake_ice_transport_; + std::unique_ptr dtls_transport_; + size_t packet_size_ = 0u; + std::set received_; + SSLProtocolVersion ssl_max_version_ = SSL_PROTOCOL_DTLS_12; + int received_dtls_client_hellos_ = 0; + int received_dtls_server_hellos_ = 0; + SentPacketInfo sent_packet_; + absl::AnyInvocable writable_func_; + int async_delay_ms_ = 100; + bool pqc_ = false; +}; + +// Base class for DtlsTransportInternalImplTest and DtlsEventOrderingTest, which +// inherit from different variants of ::testing::Test. +// +// Note that this test always uses a FakeClock, due to the `fake_clock_` member +// variable. +class DtlsTransportInternalImplTestBase { + public: + DtlsTransportInternalImplTestBase() + : client1_("P1"), client2_("P2"), use_dtls_(false) { + start_time_ns_ = fake_clock_.TimeNanos(); + } + + void SetPqc(bool value) { + client1_.SetPqc(value); + client2_.SetPqc(value); + } + + void SetMaxProtocolVersions(SSLProtocolVersion c1, SSLProtocolVersion c2) { + client1_.SetupMaxProtocolVersion(c1); + client2_.SetupMaxProtocolVersion(c2); + } + + // If not called, DtlsTransportInternalImpl will be used in SRTP bypass mode. + void PrepareDtls(KeyType key_type) { + client1_.CreateCertificate(key_type); + client2_.CreateCertificate(key_type); + use_dtls_ = true; + } + + // This test negotiates DTLS parameters before the underlying transports are + // writable. DtlsEventOrderingTest is responsible for exercising differerent + // orderings. + bool Connect(bool client1_server = true) { + Negotiate(client1_server); + EXPECT_TRUE(client1_.Connect(&client2_, false)); + + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return client1_.dtls_transport()->writable() && + client2_.dtls_transport()->writable(); + }, + IsTrue(), + {.timeout = TimeDelta::Millis(kTimeout), .clock = &fake_clock_}), + IsRtcOk()); + if (!client1_.dtls_transport()->writable() || + !client2_.dtls_transport()->writable()) + return false; + + // Check that we used the right roles. + if (use_dtls_) { + client1_.CheckRole(client1_server ? SSL_SERVER : SSL_CLIENT); + client2_.CheckRole(client1_server ? SSL_CLIENT : SSL_SERVER); + } + + if (use_dtls_) { + // Check that we negotiated the right ciphers. Since GCM ciphers are not + // negotiated by default, we should end up with kSrtpAes128CmSha1_80. + client1_.CheckSrtp(kSrtpAes128CmSha1_80); + client2_.CheckSrtp(kSrtpAes128CmSha1_80); + } else { + // If DTLS isn't actually being used, GetSrtpCryptoSuite should return + // false. + client1_.CheckSrtp(kSrtpInvalidCryptoSuite); + client2_.CheckSrtp(kSrtpInvalidCryptoSuite); + } + + client1_.CheckSsl(); + client2_.CheckSsl(); + + return true; + } + + void Negotiate(bool client1_server = true) { + client1_.SetupTransports(ICEROLE_CONTROLLING); + client2_.SetupTransports(ICEROLE_CONTROLLED); + client1_.dtls_transport()->SetDtlsRole(client1_server ? SSL_SERVER + : SSL_CLIENT); + client2_.dtls_transport()->SetDtlsRole(client1_server ? SSL_CLIENT + : SSL_SERVER); + if (client2_.certificate()) { + SetRemoteFingerprintFromCert(client1_.dtls_transport(), + client2_.certificate()); + } + if (client1_.certificate()) { + SetRemoteFingerprintFromCert(client2_.dtls_transport(), + client1_.certificate()); + } + } + + void TestTransfer(size_t size, size_t count, bool srtp) { + RTC_LOG(LS_INFO) << "Expect packets, size=" << size; + client2_.ExpectPackets(size); + client1_.SendPackets(size, count, srtp); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return client2_.NumPacketsReceived(); }, Eq(count), + {.timeout = TimeDelta::Millis(kTimeout), .clock = &fake_clock_}), + IsRtcOk()); + } + + void AddPacketLogging() { + client1_.fake_ice_transport()->set_packet_recv_filter( + [&](auto packet, auto timestamp_us) { + return LogRecv(client1_.name(), packet); + }); + client2_.fake_ice_transport()->set_packet_recv_filter( + [&](auto packet, auto timestamp_us) { + return LogRecv(client2_.name(), packet); + }); + client1_.set_writable_callback([&]() {}); + client2_.set_writable_callback([&]() {}); + + client1_.fake_ice_transport()->set_packet_send_filter( + [&](auto data, auto len, auto options, auto flags) { + return LogSend(client1_.name(), /* drop=*/false, data, len); + }); + client2_.fake_ice_transport()->set_packet_send_filter( + [&](auto data, auto len, auto options, auto flags) { + return LogSend(client2_.name(), /* drop=*/false, data, len); + }); + } + + void ClearPacketFilters() { + client1_.fake_ice_transport()->set_packet_send_filter(nullptr); + client2_.fake_ice_transport()->set_packet_send_filter(nullptr); + client1_.fake_ice_transport()->set_packet_recv_filter(nullptr); + client2_.fake_ice_transport()->set_packet_recv_filter(nullptr); + } + + bool LogRecv(absl::string_view name, const CopyOnWriteBuffer& packet) { + auto timestamp_ms = (fake_clock_.TimeNanos() - start_time_ns_) / 1000000; + RTC_LOG(LS_INFO) << "time=" << timestamp_ms << " : " << name + << ": ReceivePacket packet len=" << packet.size() + << ", data[0]: " << static_cast(packet.data()[0]); + return false; + } + + bool LogSend(absl::string_view name, + bool drop, + const char* data, + size_t len) { + auto timestamp_ms = (fake_clock_.TimeNanos() - start_time_ns_) / 1000000; + if (drop) { + RTC_LOG(LS_INFO) << "time=" << timestamp_ms << " : " << name + << ": dropping packet len=" << len + << ", data[0]: " << static_cast(data[0]); + } else { + RTC_LOG(LS_INFO) << "time=" << timestamp_ms << " : " << name + << ": SendPacket, len=" << len + << ", data[0]: " << static_cast(data[0]); + } + return drop; + } + + template + bool WaitUntil(Fn func) { + return ::webrtc::WaitUntil( + func, IsTrue(), + {.timeout = TimeDelta::Millis(kTimeout), .clock = &fake_clock_}) + .ok(); + } + + protected: + AutoThread main_thread_; + ScopedFakeClock fake_clock_; + DtlsTestClient client1_; + DtlsTestClient client2_; + bool use_dtls_; + bool pqc_ = false; + uint64_t start_time_ns_; + SSLProtocolVersion ssl_expected_version_; +}; + +class DtlsTransportInternalImplTest : public DtlsTransportInternalImplTestBase, + public ::testing::Test {}; + +// Connect without DTLS, and transfer RTP data. +TEST_F(DtlsTransportInternalImplTest, TestTransferRtp) { + ASSERT_TRUE(Connect()); + TestTransfer(1000, 100, /*srtp=*/false); +} + +// Test that the SignalSentPacket signal is wired up. +TEST_F(DtlsTransportInternalImplTest, TestSignalSentPacket) { + ASSERT_TRUE(Connect()); + // Sanity check default value (-1). + ASSERT_EQ(client1_.sent_packet().send_time_ms, -1); + TestTransfer(1000, 100, false); + // Check that we get the expected fake packet ID, and a time of 0 from the + // fake clock. + EXPECT_EQ(kFakePacketId, client1_.sent_packet().packet_id); + EXPECT_GE(client1_.sent_packet().send_time_ms, 0); +} + +// Connect without DTLS, and transfer SRTP data. +TEST_F(DtlsTransportInternalImplTest, TestTransferSrtp) { + ASSERT_TRUE(Connect()); + TestTransfer(1000, 100, /*srtp=*/true); +} + +// Connect with DTLS, and transfer data over DTLS. +TEST_F(DtlsTransportInternalImplTest, TestTransferDtls) { + PrepareDtls(KT_DEFAULT); + ASSERT_TRUE(Connect()); + TestTransfer(1000, 100, /*srtp=*/false); +} + +// Connect with DTLS, combine multiple DTLS records into one packet. +// Our DTLS implementation doesn't do this, but other implementations may; +// see https://tools.ietf.org/html/rfc6347#section-4.1.1. +// This has caused interoperability problems with ORTCLib in the past. +TEST_F(DtlsTransportInternalImplTest, TestTransferDtlsCombineRecords) { + PrepareDtls(KT_DEFAULT); + ASSERT_TRUE(Connect()); + // Our DTLS implementation always sends one record per packet, so to simulate + // an endpoint that sends multiple records per packet, we configure the fake + // ICE transport to combine every two consecutive packets into a single + // packet. + FakeIceTransport* transport = client1_.fake_ice_transport(); + transport->combine_outgoing_packets(true); + TestTransfer(500, 100, /*srtp=*/false); +} + +TEST_F(DtlsTransportInternalImplTest, KeyingMaterialExporter) { + PrepareDtls(KT_DEFAULT); + ASSERT_TRUE(Connect()); + + int crypto_suite; + EXPECT_TRUE(client1_.dtls_transport()->GetSrtpCryptoSuite(&crypto_suite)); + int key_len; + int salt_len; + EXPECT_TRUE(GetSrtpKeyAndSaltLengths(crypto_suite, &key_len, &salt_len)); + ZeroOnFreeBuffer client1_out(2 * (key_len + salt_len)); + ZeroOnFreeBuffer client2_out(2 * (key_len + salt_len)); + EXPECT_TRUE(client1_.dtls_transport()->ExportSrtpKeyingMaterial(client1_out)); + EXPECT_TRUE(client2_.dtls_transport()->ExportSrtpKeyingMaterial(client2_out)); + EXPECT_EQ(client1_out, client2_out); +} + +enum HandshakeTestEvent { + EV_CLIENT_SEND = 0, + EV_SERVER_SEND = 1, + EV_CLIENT_RECV = 2, + EV_SERVER_RECV = 3, + EV_CLIENT_WRITABLE = 4, + EV_SERVER_WRITABLE = 5, + + EV_CLIENT_SEND_DROPPED = 6, + EV_SERVER_SEND_DROPPED = 7, +}; + +template +void AbslStringify(Sink& sink, HandshakeTestEvent event) { + switch (event) { + case EV_CLIENT_SEND: + sink.Append("C-SEND"); + return; + case EV_SERVER_SEND: + sink.Append("S-SEND"); + return; + case EV_CLIENT_RECV: + sink.Append("C-RECV"); + return; + case EV_SERVER_RECV: + sink.Append("S-RECV"); + return; + case EV_CLIENT_WRITABLE: + sink.Append("C-WRITABLE"); + return; + case EV_SERVER_WRITABLE: + sink.Append("S-WRITABLE"); + return; + case EV_CLIENT_SEND_DROPPED: + sink.Append("C-SEND-DROPPED"); + return; + case EV_SERVER_SEND_DROPPED: + sink.Append("S-SEND-DROPPED"); + return; + } +} + +static const std::vector dtls_12_handshake_events{ + // Flight 1 + EV_CLIENT_SEND, + EV_SERVER_RECV, + EV_SERVER_SEND, + EV_CLIENT_RECV, + + // Flight 2 + EV_CLIENT_SEND, + EV_SERVER_RECV, + EV_SERVER_SEND, + EV_SERVER_WRITABLE, + EV_CLIENT_RECV, + EV_CLIENT_WRITABLE, +}; + +static const std::vector dtls_13_handshake_events{ + // Flight 1 + EV_CLIENT_SEND, + EV_SERVER_RECV, + EV_SERVER_SEND, + EV_CLIENT_RECV, + + // Flight 2 + EV_CLIENT_SEND, + EV_CLIENT_WRITABLE, + EV_SERVER_RECV, + EV_SERVER_SEND, + EV_SERVER_WRITABLE, +}; + +static const std::vector dtls_pqc_handshake_events{ + // Flight 1 + EV_CLIENT_SEND, + EV_CLIENT_SEND, + EV_SERVER_RECV, + EV_SERVER_RECV, + EV_SERVER_SEND, + EV_SERVER_SEND, + EV_CLIENT_RECV, + EV_CLIENT_RECV, + + // Flight 2 + EV_CLIENT_SEND, + EV_CLIENT_WRITABLE, + EV_SERVER_RECV, + EV_SERVER_SEND, + EV_SERVER_WRITABLE, +}; + +static const struct { + int version_bytes; + const std::vector& events; +} kEventsPerVersion[] = { + {kDtls12VersionBytes, dtls_12_handshake_events}, + {kDtls13VersionBytes, dtls_13_handshake_events}, +}; + +struct EndpointConfig { + SSLProtocolVersion max_protocol_version; + bool dtls_in_stun = false; + std::optional ice_role; + std::optional ssl_role; + bool pqc = false; + + template + friend void AbslStringify(Sink& sink, const EndpointConfig& config) { + sink.Append("[ dtls: "); + sink.Append(config.ssl_role == SSL_SERVER ? "server/" : "client/"); + switch (config.max_protocol_version) { + case SSL_PROTOCOL_DTLS_10: + sink.Append("1.0"); + break; + case SSL_PROTOCOL_DTLS_12: + sink.Append("1.2"); + break; + case SSL_PROTOCOL_DTLS_13: + sink.Append("1.3"); + break; + default: + sink.Append(""); + break; + } + absl::Format(&sink, " dtls_in_stun: %u ice: ", config.dtls_in_stun); + sink.Append(config.ice_role == ICEROLE_CONTROLLED ? "controlled" + : "controlling"); + absl::Format(&sink, " pqc: %u", config.pqc); + sink.Append(" ]"); + } + + int GetFirstFlightPackets() const { + if (pqc) { + return 2; + } else { + return 1; + } + } +}; + +class DtlsTransportInternalImplVersionTest + : public DtlsTransportInternalImplTestBase, + public ::testing::TestWithParam< + std::tuple> { + public: + void Prepare(bool rtt_estimate = true) { + PrepareDtls(KT_DEFAULT); + const auto& config1 = std::get<0>(GetParam()); + const auto& config2 = std::get<1>(GetParam()); + SetMaxProtocolVersions(config1.max_protocol_version, + config2.max_protocol_version); + + client1_.set_async_delay(50); + client2_.set_async_delay(50); + + client1_.SetupTransports( + config1.ice_role.value_or(ICEROLE_CONTROLLING), rtt_estimate, + config1.pqc ? "WebRTC-EnableDtlsPqc/Enabled/" : ""); + client2_.SetupTransports( + config2.ice_role.value_or(ICEROLE_CONTROLLED), rtt_estimate, + config2.pqc ? "WebRTC-EnableDtlsPqc/Enabled/" : ""); + client1_.dtls_transport()->SetDtlsRole( + config1.ssl_role.value_or(SSL_CLIENT)); + client2_.dtls_transport()->SetDtlsRole( + config2.ssl_role.value_or(SSL_SERVER)); + + if (config1.dtls_in_stun) { + auto config = client1_.fake_ice_transport()->config(); + config.dtls_handshake_in_stun = true; + client1_.fake_ice_transport()->SetIceConfig(config); + } + if (config2.dtls_in_stun) { + auto config = client2_.fake_ice_transport()->config(); + config.dtls_handshake_in_stun = true; + client2_.fake_ice_transport()->SetIceConfig(config); + } + + SetRemoteFingerprintFromCert(client1_.dtls_transport(), + client2_.certificate()); + SetRemoteFingerprintFromCert(client2_.dtls_transport(), + client1_.certificate()); + } + + // Run DTLS handshake. + // - store events in `events` + // - drop packets as specified in `packets_to_drop` + std::pair> + RunHandshake(std::set packets_to_drop) { + std::vector events; + client1_.fake_ice_transport()->set_packet_recv_filter( + [&](auto packet, auto timestamp_us) { + events.push_back(EV_CLIENT_RECV); + return LogRecv("client", packet); + }); + client2_.fake_ice_transport()->set_packet_recv_filter( + [&](auto packet, auto timestamp_us) { + events.push_back(EV_SERVER_RECV); + return LogRecv("server", packet); + }); + client1_.set_writable_callback( + [&]() { events.push_back(EV_CLIENT_WRITABLE); }); + client2_.set_writable_callback( + [&]() { events.push_back(EV_SERVER_WRITABLE); }); + + unsigned packet_num = 0; + client1_.fake_ice_transport()->set_packet_send_filter( + [&](auto data, auto len, auto options, auto flags) { + auto packet_type = options.info_signaled_after_sent.packet_type; + if (packet_type == PacketType::kIceConnectivityCheck || + packet_type == PacketType::kIceConnectivityCheckResponse) { + // Ignore stun pings for now. + return LogSend("client-stun", /* drop= */ false, data, len); + } + bool drop = packets_to_drop.find(packet_num) != packets_to_drop.end(); + packet_num++; + if (!drop) { + events.push_back(EV_CLIENT_SEND); + } else { + events.push_back(EV_CLIENT_SEND_DROPPED); + } + return LogSend("client", drop, data, len); + }); + client2_.fake_ice_transport()->set_packet_send_filter( + [&](auto data, auto len, auto options, auto flags) { + auto packet_type = options.info_signaled_after_sent.packet_type; + if (packet_type == PacketType::kIceConnectivityCheck || + packet_type == PacketType::kIceConnectivityCheckResponse) { + // Ignore stun pings for now. + return LogSend("server-stun", /* drop= */ false, data, len); + } + bool drop = packets_to_drop.find(packet_num) != packets_to_drop.end(); + packet_num++; + if (!drop) { + events.push_back(EV_SERVER_SEND); + } else { + events.push_back(EV_SERVER_SEND_DROPPED); + } + return LogSend("server", drop, data, len); + }); + + EXPECT_TRUE(client1_.ConnectIceTransport(&client2_)); + client1_.SendIcePing(std::get<0>(GetParam()).GetFirstFlightPackets()); + client2_.SendIcePingConf(std::get<0>(GetParam()).GetFirstFlightPackets()); + client2_.SendIcePing(); + client1_.SendIcePingConf(); + + EXPECT_TRUE(WaitUntil([&] { + return client1_.dtls_transport()->writable() && + client2_.dtls_transport()->writable(); + })); + + ClearPacketFilters(); + + auto dtls_version_bytes = client1_.GetVersionBytes(); + EXPECT_EQ(dtls_version_bytes, client2_.GetVersionBytes()); + return std::make_pair(dtls_version_bytes.value_or(0), std::move(events)); + } + + int GetExpectedDtlsVersionBytes() { + int version = std::min( + static_cast(std::get<0>(GetParam()).max_protocol_version), + static_cast(std::get<1>(GetParam()).max_protocol_version)); + if (version == SSL_PROTOCOL_DTLS_13) { + return kDtls13VersionBytes; + } else { + return kDtls12VersionBytes; + } + } + + std::vector GetExpectedEvents(int dtls_version_bytes, + bool pqc = false) { + if (pqc) { + return dtls_pqc_handshake_events; + } + for (const auto e : kEventsPerVersion) { + if (e.version_bytes == dtls_version_bytes) { + return e.events; + } + } + return {}; + } +}; + +static const EndpointConfig kEndpointVariants[] = { + { + .max_protocol_version = SSL_PROTOCOL_DTLS_10, + .dtls_in_stun = false, + }, + { + .max_protocol_version = SSL_PROTOCOL_DTLS_12, + .dtls_in_stun = false, + }, + { + .max_protocol_version = SSL_PROTOCOL_DTLS_13, + .dtls_in_stun = false, + }, + { + .max_protocol_version = SSL_PROTOCOL_DTLS_13, + .dtls_in_stun = false, + .pqc = true, + }, + { + .max_protocol_version = SSL_PROTOCOL_DTLS_10, + .dtls_in_stun = true, + }, + { + .max_protocol_version = SSL_PROTOCOL_DTLS_12, + .dtls_in_stun = true, + }, + { + .max_protocol_version = SSL_PROTOCOL_DTLS_13, + .dtls_in_stun = true, + }, + { + .max_protocol_version = SSL_PROTOCOL_DTLS_13, + .dtls_in_stun = true, + .pqc = true, + }, +}; + +// Will test every combination of 1.0/1.2/1.3 on the client and server. +// DTLS will negotiate an effective version (the min of client & sewrver). +INSTANTIATE_TEST_SUITE_P( + DtlsTransportInternalImplVersionTest, + DtlsTransportInternalImplVersionTest, + ::testing::Combine(testing::ValuesIn(kEndpointVariants), + testing::ValuesIn(kEndpointVariants))); + +// Test that an acceptable cipher suite is negotiated when different versions +// of DTLS are supported. Note that it's IsAcceptableCipher that does the actual +// work. +TEST_P(DtlsTransportInternalImplVersionTest, CipherSuiteNegotiation) { + Prepare(); + ASSERT_TRUE(Connect()); +} + +TEST_P(DtlsTransportInternalImplVersionTest, HandshakeFlights) { + if (!SSLStreamAdapter::IsBoringSsl()) { + GTEST_SKIP() << "Needs boringssl."; + } + if (std::get<0>(GetParam()).dtls_in_stun || + (std::get<0>(GetParam()).dtls_in_stun && + std::get<1>(GetParam()).dtls_in_stun)) { + GTEST_SKIP() << "This test does not support dtls in stun"; + } + if ((std::get<0>(GetParam()).GetFirstFlightPackets() > 1) != + (std::get<1>(GetParam()).GetFirstFlightPackets() > 1)) { + GTEST_SKIP() << "This test does not support one sided pqc"; + } + bool pqc = std::get<0>(GetParam()).GetFirstFlightPackets() > 1; + + if (pqc && std::get<1>(GetParam()).dtls_in_stun) { + // TODO(jonaso,webrtc:367395350): Remove once we have more clever MTU + // handling. + GTEST_SKIP() << "This test does not support pqc with dtls-in-stun."; + } + + Prepare(); + auto [dtls_version_bytes, events] = RunHandshake({}); + + RTC_LOG(LS_INFO) << "Verifying events with ssl version bytes= " + << dtls_version_bytes; + auto expect = GetExpectedEvents(dtls_version_bytes, pqc); + EXPECT_EQ(events, expect); +} + +TEST_P(DtlsTransportInternalImplVersionTest, HandshakeLoseFirstClientPacket) { + if (!SSLStreamAdapter::IsBoringSsl()) { + GTEST_SKIP() << "Needs boringssl."; + } + if (std::get<0>(GetParam()).dtls_in_stun || + (std::get<0>(GetParam()).dtls_in_stun && + std::get<1>(GetParam()).dtls_in_stun)) { + GTEST_SKIP() << "This test does not support dtls in stun"; + } + if (std::get<0>(GetParam()).GetFirstFlightPackets() > 1) { + GTEST_SKIP() << "This test does not support pqc"; + } + + Prepare(); + auto [dtls_version_bytes, events] = RunHandshake({/* packet_num= */ 0}); + + auto expect = GetExpectedEvents(dtls_version_bytes); + + // If first packet is lost...it is simply retransmitted by client, + // nothing else changes. + expect.insert(expect.begin(), EV_CLIENT_SEND_DROPPED); + + EXPECT_EQ(events, expect); +} + +TEST_P(DtlsTransportInternalImplVersionTest, + PqcHandshakeLoseFirstClientPacket) { + if (!SSLStreamAdapter::IsBoringSsl()) { + GTEST_SKIP() << "Needs boringssl."; + } + if (std::get<0>(GetParam()).dtls_in_stun || + std::get<1>(GetParam()).dtls_in_stun) { + GTEST_SKIP() << "This test does not support dtls in stun"; + } + if (std::get<0>(GetParam()).GetFirstFlightPackets() == 1 || + std::get<1>(GetParam()).GetFirstFlightPackets() == 1) { + GTEST_SKIP() << "This test need not support pqc"; + } + + Prepare(); + auto [dtls_version_bytes, events] = RunHandshake({/* packet_num= */ 0}); + + const std::vector expect = { + EV_CLIENT_SEND_DROPPED, // p1 + EV_CLIENT_SEND, // p2 + EV_SERVER_RECV, // p2 + + EV_CLIENT_SEND, // p1 (retransmit) + EV_CLIENT_SEND, // p2 (retransmit) + + EV_SERVER_RECV, // p1 + EV_SERVER_SEND, EV_SERVER_SEND, + EV_SERVER_RECV, // p2 (retransmit) + EV_CLIENT_RECV, EV_CLIENT_RECV, + + // Flight 2 + EV_CLIENT_SEND, EV_CLIENT_WRITABLE, + + EV_SERVER_SEND, // unknown?? + + EV_SERVER_RECV, EV_SERVER_SEND, EV_SERVER_WRITABLE, + + EV_CLIENT_RECV, // unknown?? + }; + + EXPECT_EQ(events, expect); +} + +TEST_P(DtlsTransportInternalImplVersionTest, + PqcHandshakeLoseSecondClientPacket) { + if (!SSLStreamAdapter::IsBoringSsl()) { + GTEST_SKIP() << "Needs boringssl."; + } + if (std::get<0>(GetParam()).dtls_in_stun || + std::get<1>(GetParam()).dtls_in_stun) { + GTEST_SKIP() << "This test does not support dtls in stun"; + } + if (std::get<0>(GetParam()).GetFirstFlightPackets() == 1 || + std::get<1>(GetParam()).GetFirstFlightPackets() == 1) { + GTEST_SKIP() << "This test need not support pqc"; + } + + Prepare(); + auto [dtls_version_bytes, events] = RunHandshake({/* packet_num= */ 1}); + + const std::vector expect = { + EV_CLIENT_SEND, // p1 + EV_CLIENT_SEND_DROPPED, // p2 + EV_SERVER_RECV, // p1 + + EV_CLIENT_SEND, // p1 (retransmit) + EV_CLIENT_SEND, // p2 (retransmit) + + EV_SERVER_RECV, // p1 + EV_SERVER_RECV, // p2 + EV_SERVER_SEND, + EV_SERVER_SEND, + EV_CLIENT_RECV, + EV_CLIENT_RECV, + + // Flight 2 + EV_CLIENT_SEND, + EV_CLIENT_WRITABLE, + + EV_SERVER_RECV, + EV_SERVER_SEND, + EV_SERVER_WRITABLE, + }; + + EXPECT_EQ(events, expect); +} + +TEST_P(DtlsTransportInternalImplVersionTest, HandshakeLoseSecondClientPacket) { + if (!SSLStreamAdapter::IsBoringSsl()) { + GTEST_SKIP() << "Needs boringssl."; + } + if (std::get<0>(GetParam()).dtls_in_stun || + (std::get<0>(GetParam()).dtls_in_stun && + std::get<1>(GetParam()).dtls_in_stun)) { + GTEST_SKIP() << "This test does not support dtls in stun"; + } + if (std::get<0>(GetParam()).GetFirstFlightPackets() > 1) { + GTEST_SKIP() << "This test does not support pqc"; + } + + Prepare(); + auto [dtls_version_bytes, events] = RunHandshake({/* packet_num= */ 2}); + + std::vector expect; + + switch (dtls_version_bytes) { + case kDtls12VersionBytes: + expect = { + // Flight 1 + EV_CLIENT_SEND, + EV_SERVER_RECV, + EV_SERVER_SEND, + EV_CLIENT_RECV, + + // Flight 2 + EV_CLIENT_SEND_DROPPED, + + // Server retransmit. + EV_SERVER_SEND, + // Client retransmit. + EV_CLIENT_SEND, + // Client receive retransmit => Do nothing, has already retransmitted. + EV_CLIENT_RECV, + // Handshake resume. + EV_SERVER_RECV, + EV_SERVER_SEND, + EV_SERVER_WRITABLE, + EV_CLIENT_RECV, + EV_CLIENT_WRITABLE, + }; + break; + case kDtls13VersionBytes: + expect = { + // Flight 1 + EV_CLIENT_SEND, + EV_SERVER_RECV, + EV_SERVER_SEND, + EV_CLIENT_RECV, + + // Flight 2 + EV_CLIENT_SEND_DROPPED, + // Client doesn't know packet it is dropped, so it becomes writable. + EV_CLIENT_WRITABLE, + + // Server retransmit. + EV_SERVER_SEND, + // Client retransmit. + EV_CLIENT_SEND, + + // Client receive retransmit => Do nothing, has already retransmitted. + EV_CLIENT_RECV, + // Handshake resume. + EV_SERVER_RECV, + EV_SERVER_SEND, + EV_SERVER_WRITABLE, + }; + break; + default: + FAIL() << "Unknown dtls version bytes: " << dtls_version_bytes; + } + EXPECT_EQ(events, expect); +} + +// Connect with DTLS, negotiating DTLS-SRTP, and transfer SRTP using bypass. +TEST_F(DtlsTransportInternalImplTest, TestTransferDtlsSrtp) { + PrepareDtls(KT_DEFAULT); + ASSERT_TRUE(Connect()); + TestTransfer(1000, 100, /*srtp=*/true); +} + +// Connect with DTLS-SRTP, transfer an invalid SRTP packet, and expects -1 +// returned. +TEST_F(DtlsTransportInternalImplTest, TestTransferDtlsInvalidSrtpPacket) { + PrepareDtls(KT_DEFAULT); + ASSERT_TRUE(Connect()); + EXPECT_EQ(-1, client1_.SendInvalidSrtpPacket(100)); +} + +// Create a single transport with DTLS, and send normal data and SRTP data on +// it. +TEST_F(DtlsTransportInternalImplTest, TestTransferDtlsSrtpDemux) { + PrepareDtls(KT_DEFAULT); + ASSERT_TRUE(Connect()); + TestTransfer(1000, 100, /*srtp=*/false); + TestTransfer(1000, 100, /*srtp=*/true); +} + +// Test transferring when the "answerer" has the server role. +TEST_F(DtlsTransportInternalImplTest, TestTransferDtlsSrtpAnswererIsPassive) { + PrepareDtls(KT_DEFAULT); + ASSERT_TRUE(Connect(/*client1_server=*/false)); + TestTransfer(1000, 100, /*srtp=*/true); +} + +// Test that renegotiation (setting same role and fingerprint again) can be +// started before the clients become connected in the first negotiation. +TEST_F(DtlsTransportInternalImplTest, TestRenegotiateBeforeConnect) { + PrepareDtls(KT_DEFAULT); + // Note: This is doing the same thing Connect normally does, minus some + // additional checks not relevant for this test. + Negotiate(); + Negotiate(); + EXPECT_TRUE(client1_.Connect(&client2_, false)); + EXPECT_TRUE(WaitUntil([&] { + return client1_.dtls_transport()->writable() && + client2_.dtls_transport()->writable(); + })); + TestTransfer(1000, 100, true); +} + +// Test Certificates state after negotiation but before connection. +TEST_F(DtlsTransportInternalImplTest, TestCertificatesBeforeConnect) { + PrepareDtls(KT_DEFAULT); + Negotiate(); + + // After negotiation, each side has a distinct local certificate, but still no + // remote certificate, because connection has not yet occurred. + auto certificate1 = client1_.dtls_transport()->GetLocalCertificate(); + auto certificate2 = client2_.dtls_transport()->GetLocalCertificate(); + ASSERT_NE(certificate1->GetSSLCertificate().ToPEMString(), + certificate2->GetSSLCertificate().ToPEMString()); + ASSERT_FALSE(client1_.dtls_transport()->GetRemoteSSLCertChain()); + ASSERT_FALSE(client2_.dtls_transport()->GetRemoteSSLCertChain()); +} + +// Test Certificates state after connection. +TEST_F(DtlsTransportInternalImplTest, TestCertificatesAfterConnect) { + PrepareDtls(KT_DEFAULT); + ASSERT_TRUE(Connect()); + + // After connection, each side has a distinct local certificate. + auto certificate1 = client1_.dtls_transport()->GetLocalCertificate(); + auto certificate2 = client2_.dtls_transport()->GetLocalCertificate(); + ASSERT_NE(certificate1->GetSSLCertificate().ToPEMString(), + certificate2->GetSSLCertificate().ToPEMString()); + + // Each side's remote certificate is the other side's local certificate. + std::unique_ptr remote_cert1 = + client1_.dtls_transport()->GetRemoteSSLCertChain(); + ASSERT_TRUE(remote_cert1); + ASSERT_EQ(1u, remote_cert1->GetSize()); + ASSERT_EQ(remote_cert1->Get(0).ToPEMString(), + certificate2->GetSSLCertificate().ToPEMString()); + std::unique_ptr remote_cert2 = + client2_.dtls_transport()->GetRemoteSSLCertChain(); + ASSERT_TRUE(remote_cert2); + ASSERT_EQ(1u, remote_cert2->GetSize()); + ASSERT_EQ(remote_cert2->Get(0).ToPEMString(), + certificate1->GetSSLCertificate().ToPEMString()); +} + +// Test that packets are retransmitted according to the expected schedule. +// Each time a timeout occurs, the retransmission timer should be doubled up to +// 60 seconds. The timer defaults to 1 second, but for WebRTC we should be +// initializing it to 50ms. +TEST_F(DtlsTransportInternalImplTest, TestRetransmissionSchedule) { + if (!SSLStreamAdapter::IsBoringSsl()) { + // We can only change the retransmission schedule with a recently-added + // BoringSSL API. Skip the test if not built with BoringSSL. + GTEST_SKIP() << "Needs boringssl."; + } + PrepareDtls(KT_DEFAULT); + + // This test is written with assumption of 0 delay + // which affect the hard coded schedule below. + client1_.set_async_delay(0); + client2_.set_async_delay(0); + + // Exchange fingerprints and set SSL roles. + Negotiate(); + + // Make client2_ writable, but not client1_. + // This means client1_ will send DTLS client hellos but get no response. + EXPECT_TRUE(client2_.Connect(&client1_, true)); + EXPECT_TRUE( + WaitUntil([&] { return client2_.fake_ice_transport()->writable(); })); + + // Wait for the first client hello to be sent. + EXPECT_TRUE( + WaitUntil([&] { return client1_.received_dtls_client_hellos(); })); + EXPECT_FALSE(client1_.fake_ice_transport()->writable()); + + static int timeout_schedule_ms[] = {50, 100, 200, 400, 800, 1600, + 3200, 6400, 12800, 25600, 51200, 60000}; + + int expected_hellos = 1; + for (size_t i = 0; + i < (sizeof(timeout_schedule_ms) / sizeof(timeout_schedule_ms[0])); + ++i) { + // For each expected retransmission time, advance the fake clock a + // millisecond before the expected time and verify that no unexpected + // retransmissions were sent. Then advance it the final millisecond and + // verify that the expected retransmission was sent. + fake_clock_.AdvanceTime(TimeDelta::Millis(timeout_schedule_ms[i] - 1)); + EXPECT_EQ(expected_hellos, client1_.received_dtls_client_hellos()); + fake_clock_.AdvanceTime(TimeDelta::Millis(1)); + EXPECT_EQ(++expected_hellos, client1_.received_dtls_client_hellos()); + } +} + +// The following events can occur in many different orders: +// 1. Caller receives remote fingerprint. +// 2. Caller is writable. +// 3. Caller receives ClientHello. +// 4. DTLS handshake finishes. +// +// The tests below cover all causally consistent permutations of these events; +// the caller must be writable and receive a ClientHello before the handshake +// finishes, but otherwise any ordering is possible. +// +// For each permutation, the test verifies that a connection is established and +// fingerprint verified without any DTLS packet needing to be retransmitted. +// +// Each permutation is also tested with valid and invalid fingerprints, +// ensuring that the handshake fails with an invalid fingerprint. +enum DtlsTransportInternalImplEvent { + CALLER_RECEIVES_FINGERPRINT, + CALLER_WRITABLE, + CALLER_RECEIVES_CLIENTHELLO, + HANDSHAKE_FINISHES +}; + +class DtlsEventOrderingTest + : public DtlsTransportInternalImplTestBase, + public ::testing::TestWithParam< + ::testing::tuple, + bool /* valid_fingerprint */, + SSLProtocolVersion, + bool /* pqc */>> { + protected: + // If `valid_fingerprint` is false, the caller will receive a fingerprint + // that doesn't match the callee's certificate, so the handshake should fail. + void TestEventOrdering( + const std::vector& events, + bool valid_fingerprint) { + bool pqc = ::testing::get<3>(GetParam()); + if (pqc && ::testing::get<2>(GetParam()) != SSL_PROTOCOL_DTLS_13) { + GTEST_SKIP() << "PQC requires DTLS1.3"; + } + + SetPqc(::testing::get<3>(GetParam())); + SetMaxProtocolVersions(::testing::get<2>(GetParam()), + ::testing::get<2>(GetParam())); + + // Pre-setup: Set local certificate on both caller and callee, and + // remote fingerprint on callee, but neither is writable and the caller + // doesn't have the callee's fingerprint. + PrepareDtls(KT_DEFAULT); + client1_.SetupTransports(ICEROLE_CONTROLLING); + client2_.SetupTransports(ICEROLE_CONTROLLED); + // Similar to how NegotiateOrdering works. + client1_.dtls_transport()->SetDtlsRole(SSL_SERVER); + client2_.dtls_transport()->SetDtlsRole(SSL_CLIENT); + SetRemoteFingerprintFromCert(client2_.dtls_transport(), + client1_.certificate()); + + for (DtlsTransportInternalImplEvent e : events) { + switch (e) { + case CALLER_RECEIVES_FINGERPRINT: + if (valid_fingerprint) { + SetRemoteFingerprintFromCert(client1_.dtls_transport(), + client2_.certificate()); + } else { + SetRemoteFingerprintFromCert(client1_.dtls_transport(), + client2_.certificate(), + true /*modify_digest*/); + } + break; + case CALLER_WRITABLE: + EXPECT_TRUE(client1_.Connect(&client2_, true)); + EXPECT_TRUE(WaitUntil( + [&] { return client1_.fake_ice_transport()->writable(); })); + break; + case CALLER_RECEIVES_CLIENTHELLO: + // Sanity check that a ClientHello hasn't already been received. + EXPECT_EQ(0, client1_.received_dtls_client_hellos()); + // Making client2_ writable will cause it to send the ClientHello. + EXPECT_TRUE(client2_.Connect(&client1_, true)); + EXPECT_TRUE(WaitUntil( + [&] { return client2_.fake_ice_transport()->writable(); })); + EXPECT_TRUE(WaitUntil( + [&] { return client1_.received_dtls_client_hellos() >= 1; })); + break; + case HANDSHAKE_FINISHES: + // Sanity check that the handshake hasn't already finished. + EXPECT_FALSE(client1_.dtls_transport()->IsDtlsConnected() || + client1_.dtls_transport()->dtls_state() == + DtlsTransportState::kFailed); + EXPECT_TRUE(WaitUntil([&] { + return client1_.dtls_transport()->IsDtlsConnected() || + client1_.dtls_transport()->dtls_state() == + DtlsTransportState::kFailed; + })); + break; + } + } + + DtlsTransportState expected_final_state = + valid_fingerprint ? DtlsTransportState::kConnected + : DtlsTransportState::kFailed; + EXPECT_TRUE(WaitUntil([&] { + return client1_.dtls_transport()->dtls_state() == expected_final_state; + })); + EXPECT_TRUE(WaitUntil([&] { + return client2_.dtls_transport()->dtls_state() == expected_final_state || + // Unlike BoringSSL, OpenSSL can not send a fatal alert to the peer + // so the peer will be stuck in kConnecting. + (!SSLStreamAdapter::IsBoringSsl() && + expected_final_state == DtlsTransportState::kFailed && + client2_.dtls_transport()->dtls_state() == + DtlsTransportState::kConnecting); + })); + + // Transports should be writable iff there was a valid fingerprint. + EXPECT_EQ(valid_fingerprint, client1_.dtls_transport()->writable()); + EXPECT_EQ(valid_fingerprint, client2_.dtls_transport()->writable()); + + int count = pqc ? 2 : 1; + // Check that no hello needed to be retransmitted. + EXPECT_EQ(count, client1_.received_dtls_client_hellos()); + EXPECT_EQ(1, client2_.received_dtls_server_hellos()); + + if (valid_fingerprint) { + TestTransfer(1000, 100, false); + } + } +}; + +TEST_P(DtlsEventOrderingTest, TestEventOrdering) { + TestEventOrdering(::testing::get<0>(GetParam()), + ::testing::get<1>(GetParam())); +} + +INSTANTIATE_TEST_SUITE_P( + TestEventOrdering, + DtlsEventOrderingTest, + ::testing::Combine( + ::testing::Values( + std::vector{ + CALLER_RECEIVES_FINGERPRINT, CALLER_WRITABLE, + CALLER_RECEIVES_CLIENTHELLO, HANDSHAKE_FINISHES}, + std::vector{ + CALLER_WRITABLE, CALLER_RECEIVES_FINGERPRINT, + CALLER_RECEIVES_CLIENTHELLO, HANDSHAKE_FINISHES}, + std::vector{ + CALLER_WRITABLE, CALLER_RECEIVES_CLIENTHELLO, + CALLER_RECEIVES_FINGERPRINT, HANDSHAKE_FINISHES}, + std::vector{ + CALLER_WRITABLE, CALLER_RECEIVES_CLIENTHELLO, + HANDSHAKE_FINISHES, CALLER_RECEIVES_FINGERPRINT}, + std::vector{ + CALLER_RECEIVES_FINGERPRINT, CALLER_RECEIVES_CLIENTHELLO, + CALLER_WRITABLE, HANDSHAKE_FINISHES}, + std::vector{ + CALLER_RECEIVES_CLIENTHELLO, CALLER_RECEIVES_FINGERPRINT, + CALLER_WRITABLE, HANDSHAKE_FINISHES}, + std::vector{ + CALLER_RECEIVES_CLIENTHELLO, CALLER_WRITABLE, + CALLER_RECEIVES_FINGERPRINT, HANDSHAKE_FINISHES}, + std::vector{ + CALLER_RECEIVES_CLIENTHELLO, CALLER_WRITABLE, + HANDSHAKE_FINISHES, CALLER_RECEIVES_FINGERPRINT}), + ::testing::Bool(), + ::testing::Values(SSL_PROTOCOL_DTLS_12, SSL_PROTOCOL_DTLS_13), + ::testing::Bool())); + +class DtlsTransportInternalImplDtlsInStunTest + : public DtlsTransportInternalImplVersionTest { + public: + DtlsTransportInternalImplDtlsInStunTest() {} +}; + +std::vector> AllEndpointVariants() { + std::vector> v; + for (auto ice_role : {ICEROLE_CONTROLLING, ICEROLE_CONTROLLED}) { + for (auto ssl_role : {SSL_CLIENT, SSL_SERVER}) { + for (auto version1 : { + SSL_PROTOCOL_DTLS_12, + SSL_PROTOCOL_DTLS_13, + }) { + for (auto version2 : { + SSL_PROTOCOL_DTLS_12, + SSL_PROTOCOL_DTLS_13, + }) { + for (auto dtls_in_stun1 : {false, true}) { + for (auto dtls_in_stun2 : {false, true}) { + v.push_back(std::make_tuple( + EndpointConfig{ + .max_protocol_version = version1, + .dtls_in_stun = dtls_in_stun1, + .ice_role = ice_role, + .ssl_role = ssl_role, + }, + EndpointConfig{ + .max_protocol_version = version2, + .dtls_in_stun = dtls_in_stun2, + .ice_role = ice_role == ICEROLE_CONTROLLING + ? ICEROLE_CONTROLLED + : ICEROLE_CONTROLLING, + .ssl_role = + ssl_role == SSL_CLIENT ? SSL_SERVER : SSL_CLIENT, + })); + } + } + } + } + } + } + return v; +} + +TEST_P(DtlsTransportInternalImplDtlsInStunTest, Handshake1) { + Prepare(/* rtt_estimate= */ false); + AddPacketLogging(); + + RTC_LOG(LS_INFO) << "client1: " << std::get<0>(GetParam()); + RTC_LOG(LS_INFO) << "client2: " << std::get<1>(GetParam()); + + ASSERT_TRUE(client1_.ConnectIceTransport(&client2_)); + + for (int i = 1; i < 3; i++) { + client1_.SendIcePing(); + ASSERT_TRUE(WaitUntil([&] { + return client2_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_REQUEST) == i; + })); + client2_.SendIcePingConf(); + ASSERT_TRUE(WaitUntil([&] { + return client1_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_RESPONSE) == i; + })); + client2_.SendIcePing(); + ASSERT_TRUE(WaitUntil([&] { + return client1_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_REQUEST) == i; + })); + client1_.SendIcePingConf(); + ASSERT_TRUE(WaitUntil([&] { + return client2_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_RESPONSE) == i; + })); + if (client1_.dtls_transport()->writable() && + client2_.dtls_transport()->writable()) { + break; + } + } + + EXPECT_TRUE(WaitUntil([&] { + return client1_.dtls_transport()->writable() && + client2_.dtls_transport()->writable(); + })); + + EXPECT_TRUE(client1_.dtls_transport()->writable()); + EXPECT_TRUE(client2_.dtls_transport()->writable()); + + EXPECT_EQ(client1_.dtls_transport()->GetRetransmissionCount(), 0); + EXPECT_EQ(client2_.dtls_transport()->GetRetransmissionCount(), 0); + + ClearPacketFilters(); +} + +TEST_P(DtlsTransportInternalImplDtlsInStunTest, Handshake2) { + Prepare(/* rtt_estimate= */ false); + AddPacketLogging(); + + RTC_LOG(LS_INFO) << "client1: " << std::get<0>(GetParam()); + RTC_LOG(LS_INFO) << "client2: " << std::get<1>(GetParam()); + + ASSERT_TRUE(client1_.ConnectIceTransport(&client2_)); + + for (int i = 1; i < 3; i++) { + client1_.SendIcePing(); + client2_.SendIcePing(); + ASSERT_TRUE(WaitUntil([&] { + return client1_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_REQUEST) == i; + })); + ASSERT_TRUE(WaitUntil([&] { + return client2_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_REQUEST) == i; + })); + client1_.SendIcePingConf(); + client2_.SendIcePingConf(); + + ASSERT_TRUE(WaitUntil([&] { + return client1_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_RESPONSE) == i; + })); + ASSERT_TRUE(WaitUntil([&] { + return client2_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_RESPONSE) == i; + })); + if (client1_.dtls_transport()->writable() && + client2_.dtls_transport()->writable()) { + break; + } + } + + EXPECT_TRUE(WaitUntil([&] { + return client1_.dtls_transport()->writable() && + client2_.dtls_transport()->writable(); + })); + + EXPECT_TRUE(client1_.dtls_transport()->writable()); + EXPECT_TRUE(client2_.dtls_transport()->writable()); + + EXPECT_EQ(client1_.dtls_transport()->GetRetransmissionCount(), 0); + EXPECT_EQ(client2_.dtls_transport()->GetRetransmissionCount(), 0); + + ClearPacketFilters(); +} + +// Test scenario where DTLS is partially transferred with +// STUN and the "rest" of the handshake is transported +// by DtlsTransportInternalImpl. +TEST_P(DtlsTransportInternalImplDtlsInStunTest, PartiallyPiggybacked) { + Prepare(/* rtt_estimate= */ false); + AddPacketLogging(); + + RTC_LOG(LS_INFO) << "client1: " << std::get<0>(GetParam()); + RTC_LOG(LS_INFO) << "client2: " << std::get<1>(GetParam()); + + ASSERT_TRUE(client1_.ConnectIceTransport(&client2_)); + + for (int i = 1; i < 2; i++) { + client1_.SendIcePing(); + client2_.SendIcePing(); + ASSERT_TRUE(WaitUntil([&] { + return client1_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_REQUEST) == i; + })); + ASSERT_TRUE(WaitUntil([&] { + return client2_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_REQUEST) == i; + })); + client1_.SendIcePingConf(); + client2_.SendIcePingConf(); + + ASSERT_TRUE(WaitUntil([&] { + return client1_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_RESPONSE) == i; + })); + ASSERT_TRUE(WaitUntil([&] { + return client2_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_RESPONSE) == i; + })); + if (client1_.dtls_transport()->writable() && + client2_.dtls_transport()->writable()) { + break; + } + } + + EXPECT_FALSE(client1_.dtls_transport()->writable() && + client2_.dtls_transport()->writable()); + + EXPECT_TRUE(WaitUntil([&] { + return client1_.dtls_transport()->writable() && + client2_.dtls_transport()->writable(); + })); + + EXPECT_TRUE(client1_.dtls_transport()->writable()); + EXPECT_TRUE(client2_.dtls_transport()->writable()); + + EXPECT_EQ(client1_.dtls_transport()->GetRetransmissionCount(), 0); + EXPECT_EQ(client2_.dtls_transport()->GetRetransmissionCount(), 0); + + ClearPacketFilters(); +} + +TEST_P(DtlsTransportInternalImplDtlsInStunTest, + DtlsDoesNotSignalWritableUnlessIceWritableOnce) { + Prepare(/* rtt_estimate= */ false); + AddPacketLogging(); + + RTC_LOG(LS_INFO) << "client1: " << std::get<0>(GetParam()); + RTC_LOG(LS_INFO) << "client2: " << std::get<1>(GetParam()); + + ASSERT_TRUE(client1_.ConnectIceTransport(&client2_)); + + client1_.SendIcePing(); + ASSERT_TRUE(WaitUntil([&] { + return client2_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_REQUEST) == 1; + })); + client2_.SendIcePingConf(); + ASSERT_TRUE(WaitUntil([&] { + return client1_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_RESPONSE) == 1; + })); + client1_.SendIcePing(); + ASSERT_TRUE(WaitUntil([&] { + return client2_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_REQUEST) == 2; + })); + client2_.SendIcePingConf(); + ASSERT_TRUE(WaitUntil([&] { + return client1_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_RESPONSE) == 2; + })); + + bool dtls_in_stun = std::get<0>(GetParam()).dtls_in_stun && + std::get<1>(GetParam()).dtls_in_stun; + if (dtls_in_stun) { + ASSERT_TRUE(client1_.dtls_transport()->writable()); + } + // Ice has never been writable on client2. + ASSERT_FALSE(client2_.dtls_transport()->writable()); + + client2_.SendIcePing(); + ASSERT_TRUE(WaitUntil([&] { + return client1_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_REQUEST) == 1; + })); + client1_.SendIcePingConf(); + ASSERT_TRUE(WaitUntil([&] { + return client2_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_RESPONSE) == 1; + })); + + EXPECT_TRUE(WaitUntil([&] { + return client1_.dtls_transport()->writable() && + client2_.dtls_transport()->writable(); + })); + + EXPECT_TRUE(client1_.dtls_transport()->writable()); + EXPECT_TRUE(client2_.dtls_transport()->writable()); + + if (dtls_in_stun) { + EXPECT_EQ(client1_.dtls_transport()->GetRetransmissionCount(), 0); + EXPECT_EQ(client2_.dtls_transport()->GetRetransmissionCount(), 0); + } + + ClearPacketFilters(); +} + +INSTANTIATE_TEST_SUITE_P(DtlsTransportInternalImplDtlsInStunTest, + DtlsTransportInternalImplDtlsInStunTest, + testing::ValuesIn(AllEndpointVariants())); + +class DtlsInStunTest : public DtlsTransportInternalImplDtlsInStunTest {}; + +std::vector> Dtls13WithDtlsInStun() { + return { + std::make_tuple( + EndpointConfig{ + .max_protocol_version = SSL_PROTOCOL_DTLS_13, + .dtls_in_stun = true, + .ice_role = ICEROLE_CONTROLLING, + .ssl_role = SSL_CLIENT, + .pqc = false, + }, + EndpointConfig{ + .max_protocol_version = SSL_PROTOCOL_DTLS_13, + .dtls_in_stun = true, + .ice_role = ICEROLE_CONTROLLED, + .ssl_role = SSL_SERVER, + .pqc = false, + }), + std::make_tuple( + EndpointConfig{ + .max_protocol_version = SSL_PROTOCOL_DTLS_13, + .dtls_in_stun = true, + .ice_role = ICEROLE_CONTROLLING, + .ssl_role = SSL_CLIENT, + .pqc = true, + }, + EndpointConfig{ + .max_protocol_version = SSL_PROTOCOL_DTLS_13, + .dtls_in_stun = true, + .ice_role = ICEROLE_CONTROLLED, + .ssl_role = SSL_SERVER, + .pqc = false, + }), + std::make_tuple( + EndpointConfig{ + .max_protocol_version = SSL_PROTOCOL_DTLS_13, + .dtls_in_stun = true, + .ice_role = ICEROLE_CONTROLLING, + .ssl_role = SSL_CLIENT, + .pqc = false, + }, + EndpointConfig{ + .max_protocol_version = SSL_PROTOCOL_DTLS_13, + .dtls_in_stun = true, + .ice_role = ICEROLE_CONTROLLED, + .ssl_role = SSL_SERVER, + .pqc = true, + }), + std::make_tuple( + EndpointConfig{ + .max_protocol_version = SSL_PROTOCOL_DTLS_13, + .dtls_in_stun = true, + .ice_role = ICEROLE_CONTROLLING, + .ssl_role = SSL_CLIENT, + .pqc = true, + }, + EndpointConfig{ + .max_protocol_version = SSL_PROTOCOL_DTLS_13, + .dtls_in_stun = true, + .ice_role = ICEROLE_CONTROLLED, + .ssl_role = SSL_SERVER, + .pqc = true, + }), + }; +} + +INSTANTIATE_TEST_SUITE_P(DtlsInStunTest, + DtlsInStunTest, + testing::ValuesIn(Dtls13WithDtlsInStun())); + +TEST_P(DtlsInStunTest, OptimalDtls13Handshake) { + if (!SSLStreamAdapter::IsBoringSsl()) { + GTEST_SKIP() << "Needs boringssl."; + } + + RTC_LOG(LS_INFO) << "client1: " << std::get<0>(GetParam()); + RTC_LOG(LS_INFO) << "client2: " << std::get<1>(GetParam()); + + int client1_first_flight_packets = + std::get<0>(GetParam()).GetFirstFlightPackets(); + int client2_first_flight_packets = + std::get<1>(GetParam()).GetFirstFlightPackets(); + + Prepare(/* rtt_estimate= */ true); + AddPacketLogging(); + + ASSERT_TRUE(client1_.ConnectIceTransport(&client2_)); + + client1_.SendIcePing(client1_first_flight_packets); + client2_.SendIcePing(client2_first_flight_packets); + + ASSERT_TRUE(WaitUntil([&] { + return client1_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_REQUEST) == client2_first_flight_packets; + })); + ASSERT_TRUE(WaitUntil([&] { + return client2_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_REQUEST) == client1_first_flight_packets; + })); + + client2_.SendIcePingConf(client1_first_flight_packets); + client1_.SendIcePingConf(client2_first_flight_packets); + + ASSERT_TRUE(WaitUntil([&] { + return client1_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_RESPONSE) == client1_first_flight_packets; + })); + EXPECT_TRUE(client1_.dtls_transport()->writable()); + ASSERT_TRUE(WaitUntil([&] { + return client2_.fake_ice_transport()->GetCountOfReceivedStunMessages( + STUN_BINDING_RESPONSE) == client2_first_flight_packets; + })); + EXPECT_FALSE(client2_.dtls_transport()->writable()); + + // Here client1 sends one more packet, which should make client2 (server) also + // writable. Wait for that to arrive + int expected_packets = + 1 + client2_.fake_ice_transport()->GetCountOfReceivedPackets(); + + EXPECT_TRUE(WaitUntil([&] { + return client2_.fake_ice_transport()->GetCountOfReceivedPackets() == + expected_packets; + })); + EXPECT_TRUE(client2_.dtls_transport()->writable()); + + EXPECT_EQ(client1_.dtls_transport()->GetRetransmissionCount(), 0); + EXPECT_EQ(client2_.dtls_transport()->GetRetransmissionCount(), 0); + + ClearPacketFilters(); +} + +} // namespace webrtc diff --git a/p2p/dtls/dtls_utils.cc b/p2p/dtls/dtls_utils.cc new file mode 100644 index 0000000000..c1b3a0ab10 --- /dev/null +++ b/p2p/dtls/dtls_utils.cc @@ -0,0 +1,211 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "p2p/dtls/dtls_utils.h" + +#include +#include +#include +#include +#include + +#include "absl/container/flat_hash_set.h" +#include "api/array_view.h" +#include "rtc_base/buffer.h" +#include "rtc_base/byte_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/crc32.h" + +namespace { +// https://datatracker.ietf.org/doc/html/rfc5246#appendix-A.1 +const uint8_t kDtlsChangeCipherSpecRecord = 20; +const uint8_t kDtlsHandshakeRecord = 22; + +// https://www.rfc-editor.org/rfc/rfc9147.html#section-4 +const uint8_t kFixedBitmask = 0b00100000; +const uint8_t kConnectionBitmask = 0b00010000; +const uint8_t kSequenceNumberBitmask = 0b00001000; +const uint8_t kLengthPresentBitmask = 0b00000100; +} // namespace + +namespace webrtc { + +bool IsDtlsPacket(ArrayView payload) { + const uint8_t* u = payload.data(); + return (payload.size() >= kDtlsRecordHeaderLen && (u[0] > 19 && u[0] < 64)); +} + +bool IsDtlsClientHelloPacket(ArrayView payload) { + if (!IsDtlsPacket(payload)) { + return false; + } + const uint8_t* u = payload.data(); + return payload.size() > 17 && u[0] == kDtlsHandshakeRecord && u[13] == 1; +} + +bool IsDtlsHandshakePacket(ArrayView payload) { + if (!IsDtlsPacket(payload)) { + return false; + } + // change cipher spec is not a handshake packet. This used + // to work because it was aggregated with the session ticket + // which is no more. It is followed by the encrypted handshake + // message which starts with a handshake record (22) again. + return payload.size() > 17 && (payload[0] == kDtlsHandshakeRecord || + payload[0] == kDtlsChangeCipherSpecRecord); +} + +// Returns a (unsorted) list of (msg_seq) received as part of the handshake. +std::optional> GetDtlsHandshakeAcks( + ArrayView dtls_packet) { + std::vector acks; + ByteBufferReader record_buf(dtls_packet); + // https://datatracker.ietf.org/doc/html/rfc6347#section-4.1 + while (record_buf.Length() >= kDtlsRecordHeaderLen) { + uint8_t content_type; + uint64_t epoch_and_seq; + uint16_t len; + // Read content_type(1). + if (!record_buf.ReadUInt8(&content_type)) { + return std::nullopt; + } + + // DTLS 1.3 rules: + // https://www.rfc-editor.org/rfc/rfc9147.html#section-4.1 + if ((content_type & kFixedBitmask) == kFixedBitmask) { + // Interpret as DTLSCipherText: + // https://www.rfc-editor.org/rfc/rfc9147.html#appendix-A.1 + // We assume no connection id is used so C must be 0. + if ((content_type & kConnectionBitmask) != 0) { + return std::nullopt; + } + // Skip sequence_number(1 or 2 bytes depending on S bit). + if (!record_buf.Consume((content_type & kSequenceNumberBitmask) == + kSequenceNumberBitmask + ? 2 + : 1)) { + return std::nullopt; + } + // If the L bit is set, consume the 16 bit length field. + if ((content_type & kLengthPresentBitmask) == kLengthPresentBitmask) { + if (!(record_buf.ReadUInt16(&len) && record_buf.Consume(len))) { + return std::nullopt; + } + } + // DTLSCipherText is encrypted so we can not read it. + continue; + } + // Skip version(2), read epoch+seq(2+6), read len(2) + if (!(record_buf.Consume(2) && record_buf.ReadUInt64(&epoch_and_seq) && + record_buf.ReadUInt16(&len) && record_buf.Length() >= len)) { + return std::nullopt; + } + if (content_type != kDtlsHandshakeRecord) { + record_buf.Consume(len); + continue; + } + // Epoch 1+ is encrypted so we can not parse it. + if (epoch_and_seq >> 6 != 0) { + record_buf.Consume(len); + continue; + } + + // https://www.rfc-editor.org/rfc/rfc6347.html#section-4.2.2 + ByteBufferReader handshake_buf(record_buf.DataView().subview(0, len)); + while (handshake_buf.Length() > 0) { + uint16_t msg_seq; + uint32_t fragment_len; + uint32_t fragment_offset; + // Skip msg_type(1) and length(3), read msg_seq(2), skip + // fragment_offset(3), read fragment_length(3) and consume it. + if (!(handshake_buf.Consume(1 + 3) && + handshake_buf.ReadUInt16(&msg_seq) && + handshake_buf.ReadUInt24(&fragment_offset) && + handshake_buf.ReadUInt24(&fragment_len) && + handshake_buf.Consume(fragment_len))) { + return std::nullopt; + } + acks.push_back(msg_seq); + // Advance outer buffer. + record_buf.Consume(12 + fragment_len); + } + RTC_DCHECK(handshake_buf.Length() == 0); + } + + // Should have consumed everything. + if (record_buf.Length() != 0) { + return std::nullopt; + } + return acks; +} + +uint32_t ComputeDtlsPacketHash(ArrayView dtls_packet) { + return webrtc::ComputeCrc32(dtls_packet.data(), dtls_packet.size()); +} + +bool PacketStash::AddIfUnique(ArrayView packet) { + uint32_t h = ComputeDtlsPacketHash(packet); + for (const auto& [hash, p] : packets_) { + if (h == hash) { + return false; + } + } + packets_.push_back({.hash = h, + .buffer = std::make_unique( + packet.data(), packet.size())}); + return true; +} + +void PacketStash::Add(ArrayView packet) { + packets_.push_back({.hash = ComputeDtlsPacketHash(packet), + .buffer = std::make_unique( + packet.data(), packet.size())}); +} + +void PacketStash::Prune(const absl::flat_hash_set& hashes) { + if (hashes.empty()) { + return; + } + uint32_t before = packets_.size(); + packets_.erase(std::remove_if(packets_.begin(), packets_.end(), + [&](const auto& val) { + return hashes.contains(val.hash); + }), + packets_.end()); + uint32_t after = packets_.size(); + uint32_t removed = before - after; + if (pos_ >= removed) { + pos_ -= removed; + } +} + +void PacketStash::Prune(uint32_t max_size) { + auto size = packets_.size(); + if (size <= max_size) { + return; + } + auto removed = size - max_size; + packets_.erase(packets_.begin(), packets_.begin() + removed); + if (pos_ <= removed) { + pos_ = 0; + } else { + pos_ -= removed; + } +} + +ArrayView PacketStash::GetNext() { + RTC_DCHECK(!packets_.empty()); + auto pos = pos_; + pos_ = (pos + 1) % packets_.size(); + const auto& buffer = packets_[pos].buffer; + return ArrayView(buffer->data(), buffer->size()); +} + +} // namespace webrtc diff --git a/p2p/dtls/dtls_utils.h b/p2p/dtls/dtls_utils.h new file mode 100644 index 0000000000..88e6521dfd --- /dev/null +++ b/p2p/dtls/dtls_utils.h @@ -0,0 +1,87 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_DTLS_DTLS_UTILS_H_ +#define P2P_DTLS_DTLS_UTILS_H_ + +#include +#include +#include +#include +#include + +#include "absl/container/flat_hash_set.h" +#include "api/array_view.h" +#include "rtc_base/buffer.h" + +namespace webrtc { + +const size_t kDtlsRecordHeaderLen = 13; +const size_t kMaxDtlsPacketLen = 2048; + +bool IsDtlsPacket(ArrayView payload); +bool IsDtlsClientHelloPacket(ArrayView payload); +bool IsDtlsHandshakePacket(ArrayView payload); + +std::optional> GetDtlsHandshakeAcks( + ArrayView dtls_packet); + +uint32_t ComputeDtlsPacketHash(ArrayView dtls_packet); + +class PacketStash { + public: + PacketStash() {} + + void Add(ArrayView packet); + bool AddIfUnique(ArrayView packet); + void Prune(const absl::flat_hash_set& packet_hashes); + void Prune(uint32_t max_size); + ArrayView GetNext(); + + void clear() { + packets_.clear(); + pos_ = 0; + } + bool empty() const { return packets_.empty(); } + int size() const { return packets_.size(); } + + static uint32_t Hash(ArrayView packet) { + return ComputeDtlsPacketHash(packet); + } + + private: + struct StashedPacket { + uint32_t hash; + std::unique_ptr buffer; + }; + + // This vector will only contain very few items, + // so it is appropriate to use a vector rather than + // e.g. a hash map. + uint32_t pos_ = 0; + std::vector packets_; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::GetDtlsHandshakeAcks; +using ::webrtc::IsDtlsClientHelloPacket; +using ::webrtc::IsDtlsHandshakePacket; +using ::webrtc::IsDtlsPacket; +using ::webrtc::kDtlsRecordHeaderLen; +using ::webrtc::kMaxDtlsPacketLen; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // P2P_DTLS_DTLS_UTILS_H_ diff --git a/p2p/dtls/dtls_utils_unittest.cc b/p2p/dtls/dtls_utils_unittest.cc new file mode 100644 index 0000000000..5dc15621c9 --- /dev/null +++ b/p2p/dtls/dtls_utils_unittest.cc @@ -0,0 +1,340 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "p2p/dtls/dtls_utils.h" + +#include +#include +#include + +#include "absl/container/flat_hash_set.h" +#include "api/array_view.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { + +TEST(DtlsUtils, GetDtlsHandshakeAcksRejectsTooShort) { + std::vector packet = { + 0x16, 0xfe, 0xff, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0xde, 0xad // Length given but bytes not present. + }; + EXPECT_FALSE(GetDtlsHandshakeAcks(packet)); +} + +TEST(DtlsUtils, GetDtlsHandshakeAcksRejectsInvalidContent) { + std::vector packet = {0x16, 0xfe, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, + // Correct length given but data is garbage. + 0x04, 0xde, 0xad, 0xbe, 0xef}; + EXPECT_FALSE(GetDtlsHandshakeAcks(packet)); +} + +TEST(DtlsUtils, GetDtlsHandshakeAcksRejectsTrailingData) { + std::vector packet = { + 0x16, 0xfe, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x0c, 0x0e, 0x00, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Server hello done. + 0xde, 0xad, 0xbe, 0xef // Trailing data. + }; + EXPECT_FALSE(GetDtlsHandshakeAcks(packet)); +} + +TEST(DtlsUtils, GetDtlsHandshakeAcksBasic) { + std::vector packet = { + 0x16, 0xfe, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x0c, 0x0e, 0x00, 0x00, 0x00, 0x00, + 0xac, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Server hello done. + }; + std::optional> acks = GetDtlsHandshakeAcks(packet); + ASSERT_TRUE(acks); + EXPECT_EQ(acks->size(), 1u); + EXPECT_THAT(*acks, ::testing::ElementsAreArray({0xac})); +} + +TEST(DtlsUtils, GetDtlsHandshakeAcksPackedRecords) { + // Flight two from server to client but with fragment packing per + // https://boringssl.googlesource.com/boringssl/+/5245371a08528f7fb21ab20bd7a479d8e395b61c + std::vector packet = { + 0x16, 0xfe, 0xfd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, + 0x43, 0x02, 0x00, 0x00, 0x60, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x60, 0xfe, 0xfd, 0x67, 0x2a, 0x87, 0x84, 0xf8, 0xe5, 0xbc, 0xe5, 0xd1, + 0x2b, 0xfe, 0x53, 0x20, 0xd2, 0xd4, 0x53, 0xa5, 0xbe, 0xd8, 0x38, 0x58, + 0x91, 0xdf, 0x76, 0x21, 0x81, 0x60, 0x7c, 0x6d, 0x8c, 0xdb, 0x93, 0x20, + 0x91, 0xc8, 0xf6, 0x9c, 0xaa, 0xbe, 0x79, 0xa3, 0x28, 0xa6, 0x84, 0xc9, + 0xfa, 0xee, 0x59, 0x22, 0x5d, 0xe2, 0x11, 0x28, 0xf4, 0x80, 0xd6, 0x1a, + 0x3a, 0xb5, 0x3d, 0xb6, 0x61, 0x74, 0xb6, 0x1d, 0xc0, 0x2b, 0x00, 0x00, + 0x18, 0x00, 0x17, 0x00, 0x00, 0xff, 0x01, 0x00, 0x01, 0x00, 0x00, 0x0b, + 0x00, 0x02, 0x01, 0x00, 0x00, 0x0e, 0x00, 0x05, 0x00, 0x02, 0x00, 0x01, + 0x00, 0x0b, 0x00, 0x01, 0x1f, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x01, + 0x1f, 0x00, 0x01, 0x1c, 0x00, 0x01, 0x19, 0x30, 0x82, 0x01, 0x15, 0x30, + 0x81, 0xbd, 0xa0, 0x03, 0x02, 0x01, 0x02, 0x02, 0x09, 0x00, 0x9d, 0x2a, + 0x69, 0x9b, 0x1d, 0x5a, 0x38, 0xe5, 0x30, 0x0a, 0x06, 0x08, 0x2a, 0x86, + 0x48, 0xce, 0x3d, 0x04, 0x03, 0x02, 0x30, 0x11, 0x31, 0x0f, 0x30, 0x0d, + 0x06, 0x03, 0x55, 0x04, 0x03, 0x0c, 0x06, 0x57, 0x65, 0x62, 0x52, 0x54, + 0x43, 0x30, 0x1e, 0x17, 0x0d, 0x32, 0x34, 0x31, 0x31, 0x30, 0x34, 0x32, + 0x31, 0x30, 0x30, 0x35, 0x31, 0x5a, 0x17, 0x0d, 0x32, 0x34, 0x31, 0x32, + 0x30, 0x35, 0x32, 0x31, 0x30, 0x30, 0x35, 0x31, 0x5a, 0x30, 0x11, 0x31, + 0x0f, 0x30, 0x0d, 0x06, 0x03, 0x55, 0x04, 0x03, 0x0c, 0x06, 0x57, 0x65, + 0x62, 0x52, 0x54, 0x43, 0x30, 0x59, 0x30, 0x13, 0x06, 0x07, 0x2a, 0x86, + 0x48, 0xce, 0x3d, 0x02, 0x01, 0x06, 0x08, 0x2a, 0x86, 0x48, 0xce, 0x3d, + 0x03, 0x01, 0x07, 0x03, 0x42, 0x00, 0x04, 0x1e, 0xd8, 0xad, 0x96, 0x82, + 0xd0, 0xfb, 0xc8, 0xaa, 0xff, 0x84, 0x40, 0x84, 0xfc, 0x1e, 0x4a, 0xfd, + 0x8b, 0xfc, 0x13, 0xbb, 0xee, 0x93, 0xea, 0x91, 0x55, 0x61, 0x7d, 0xc3, + 0x96, 0x66, 0x38, 0x6d, 0x51, 0x59, 0x57, 0xbd, 0xc3, 0xd2, 0x03, 0xf4, + 0xde, 0x48, 0x3f, 0x61, 0x5e, 0x59, 0x2b, 0xfa, 0xfe, 0x68, 0xc0, 0x98, + 0xa3, 0x33, 0xe7, 0xd6, 0xb4, 0x0e, 0xbe, 0x56, 0x48, 0x50, 0x4b, 0x30, + 0x0a, 0x06, 0x08, 0x2a, 0x86, 0x48, 0xce, 0x3d, 0x04, 0x03, 0x02, 0x03, + 0x47, 0x00, 0x30, 0x44, 0x02, 0x20, 0x4d, 0xff, 0x9f, 0xf3, 0xc4, 0x08, + 0x15, 0xe0, 0xdd, 0x76, 0x64, 0x0d, 0x50, 0x42, 0x30, 0xbb, 0xf7, 0xca, + 0x78, 0xff, 0xe7, 0x86, 0x05, 0x0f, 0x23, 0x6e, 0xd2, 0x69, 0xd3, 0xc5, + 0xbd, 0xaa, 0x02, 0x20, 0x43, 0x71, 0x52, 0x2f, 0x74, 0x25, 0x78, 0xcd, + 0x62, 0x62, 0x62, 0x0b, 0xbf, 0x76, 0x35, 0xe1, 0xfe, 0x8c, 0x03, 0x6b, + 0x56, 0xb8, 0x96, 0x1f, 0xb1, 0x3a, 0x9f, 0xd9, 0x78, 0x05, 0x66, 0xa7, + 0x0c, 0x00, 0x00, 0x6f, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x6f, + 0x03, 0x00, 0x1d, 0x20, 0xf5, 0x50, 0xad, 0x14, 0x55, 0xd1, 0xbc, 0x82, + 0xa8, 0xb0, 0x2b, 0x81, 0x3d, 0x18, 0xf4, 0xba, 0x11, 0x54, 0xbb, 0x24, + 0x8b, 0x07, 0xa7, 0x17, 0xf1, 0x33, 0xca, 0x45, 0xc0, 0x6a, 0x16, 0x0a, + 0x04, 0x03, 0x00, 0x47, 0x30, 0x45, 0x02, 0x21, 0x00, 0xad, 0xb6, 0x59, + 0x0c, 0xe0, 0x56, 0x42, 0xb8, 0x9f, 0x40, 0x43, 0xd3, 0x7f, 0x9f, 0xa0, + 0x1d, 0xbc, 0x78, 0xf5, 0xc3, 0x38, 0x99, 0x02, 0xde, 0x11, 0x85, 0x0f, + 0x50, 0xd6, 0x5b, 0x82, 0x7c, 0x02, 0x20, 0x57, 0x2e, 0x0a, 0x82, 0xf7, + 0x14, 0xb6, 0xd6, 0xb2, 0x4b, 0xd7, 0x1a, 0xd6, 0x1b, 0xc2, 0xf6, 0xc2, + 0x4f, 0x3f, 0xe2, 0x8a, 0x06, 0x97, 0xf3, 0x84, 0xc8, 0x60, 0xf1, 0xab, + 0x2d, 0x29, 0xaf, 0x0d, 0x00, 0x00, 0x19, 0x00, 0x03, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x19, 0x02, 0x01, 0x40, 0x00, 0x12, 0x04, 0x03, 0x08, 0x04, + 0x04, 0x01, 0x05, 0x03, 0x08, 0x05, 0x05, 0x01, 0x08, 0x06, 0x06, 0x01, + 0x02, 0x01, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00}; + + std::optional> acks = GetDtlsHandshakeAcks(packet); + ASSERT_TRUE(acks); + EXPECT_EQ(acks->size(), 5u); + EXPECT_THAT(*acks, ::testing::ElementsAreArray({0, 1, 2, 3, 4})); + + std::vector packet2 = { + 0x16, 0xfe, 0xfd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x6c, 0x02, 0x00, 0x00, 0x60, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x60, 0xfe, 0xfd, 0x00, 0x00, 0x00, 0x00, 0xb2, 0xeb, 0x05, 0xc9, 0xba, + 0x39, 0xd0, 0xf6, 0x4b, 0xc9, 0x7e, 0xee, 0x57, 0xfc, 0x2b, 0x90, 0x93, + 0x09, 0xfd, 0x05, 0x80, 0xb3, 0xf5, 0xfc, 0x06, 0x68, 0x38, 0xa8, 0x20, + 0x42, 0x1d, 0x78, 0xe1, 0x97, 0x73, 0x55, 0x0a, 0x16, 0x2d, 0xc1, 0x3e, + 0x4f, 0x71, 0x55, 0xb4, 0x9f, 0xf8, 0x61, 0xe1, 0xbd, 0xe3, 0xf2, 0x2e, + 0x40, 0x29, 0x30, 0x58, 0x37, 0x26, 0x0d, 0xe8, 0xc0, 0x2b, 0x00, 0x00, + 0x18, 0x00, 0x17, 0x00, 0x00, 0xff, 0x01, 0x00, 0x01, 0x00, 0x00, 0x0b, + 0x00, 0x02, 0x01, 0x00, 0x00, 0x0e, 0x00, 0x05, 0x00, 0x02, 0x00, 0x01, + 0x00, // + 0x16, 0xfe, 0xfd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, + 0x28, 0x0b, 0x00, 0x01, 0x1c, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x01, + 0x1c, 0x00, 0x01, 0x19, 0x00, 0x01, 0x16, 0x30, 0x82, 0x01, 0x12, 0x30, + 0x81, 0xb8, 0xa0, 0x03, 0x02, 0x01, 0x02, 0x02, 0x08, 0x15, 0xc9, 0xcc, + 0xd0, 0x55, 0x57, 0xa2, 0x32, 0x30, 0x0a, 0x06, 0x08, 0x2a, 0x86, 0x48, + 0xce, 0x3d, 0x04, 0x03, 0x02, 0x30, 0x0f, 0x31, 0x0d, 0x30, 0x0b, 0x06, + 0x03, 0x55, 0x04, 0x03, 0x0c, 0x04, 0x74, 0x65, 0x73, 0x74, 0x30, 0x1e, + 0x17, 0x0d, 0x32, 0x34, 0x31, 0x31, 0x30, 0x35, 0x32, 0x30, 0x35, 0x33, + 0x34, 0x38, 0x5a, 0x17, 0x0d, 0x32, 0x34, 0x31, 0x32, 0x30, 0x36, 0x32, + 0x30, 0x35, 0x33, 0x34, 0x38, 0x5a, 0x30, 0x0f, 0x31, 0x0d, 0x30, 0x0b, + 0x06, 0x03, 0x55, 0x04, 0x03, 0x0c, 0x04, 0x74, 0x65, 0x73, 0x74, 0x30, + 0x59, 0x30, 0x13, 0x06, 0x07, 0x2a, 0x86, 0x48, 0xce, 0x3d, 0x02, 0x01, + 0x06, 0x08, 0x2a, 0x86, 0x48, 0xce, 0x3d, 0x03, 0x01, 0x07, 0x03, 0x42, + 0x00, 0x04, 0x8f, 0x43, 0xeb, 0x7b, 0x88, 0x73, 0x4f, 0xe7, 0x69, 0x06, + 0x81, 0xb6, 0xb9, 0xf3, 0xca, 0x73, 0x32, 0x69, 0xb2, 0xc5, 0xe6, 0x4e, + 0xf0, 0x8c, 0xf4, 0xdd, 0x4e, 0x5b, 0xea, 0x06, 0x52, 0x94, 0x9a, 0x12, + 0x77, 0x11, 0xde, 0xf9, 0x12, 0x9a, 0xeb, 0x3c, 0x7c, 0xe4, 0xcf, 0x58, + 0x4c, 0x74, 0x44, 0x84, 0x0a, 0x84, 0xeb, 0xe6, 0xa4, 0xd5, 0xd3, 0x06, + 0xca, 0x52, 0x15, 0x7e, 0xeb, 0x19, 0x30, 0x0a, 0x06, 0x08, 0x2a, 0x86, + 0x48, 0xce, 0x3d, 0x04, 0x03, 0x02, 0x03, 0x49, 0x00, 0x30, 0x46, 0x02, + 0x21, 0x00, 0xab, 0xc7, 0x06, 0x7e, 0x36, 0x9b, 0xad, 0xe0, 0x26, 0x61, + 0x6b, 0x59, 0xa0, 0x1c, 0x70, 0x0c, 0xa6, 0xd3, 0xff, 0x8a, 0xc7, 0xba, + 0xe4, 0x23, 0x0a, 0x8b, 0x22, 0x82, 0xcd, 0x5a, 0x5c, 0x56, 0x02, 0x21, + 0x00, 0xc7, 0xe8, 0x57, 0x04, 0xb5, 0x44, 0x69, 0x42, 0xa2, 0xa2, 0x1e, + 0xde, 0x7f, 0xc4, 0x44, 0x98, 0xa4, 0x5c, 0x84, 0x41, 0xa1, 0x31, 0x38, + 0x3c, 0xe5, 0x4f, 0xf5, 0xc0, 0xa9, 0xa8, 0xbc, 0x16, // + 0x16, 0xfe, 0xfd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, + 0x7a, 0x0c, 0x00, 0x00, 0x6e, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x6e, 0x03, 0x00, 0x1d, 0x20, 0x89, 0xb9, 0xeb, 0x39, 0x29, 0xa0, 0x31, + 0x08, 0x9a, 0xbf, 0xc3, 0xc0, 0x20, 0x60, 0xbb, 0xea, 0x73, 0x19, 0xcf, + 0x63, 0xe4, 0x5a, 0xa8, 0xa9, 0x56, 0x77, 0xe8, 0x81, 0x48, 0xae, 0x9f, + 0x34, 0x04, 0x03, 0x00, 0x46, 0x30, 0x44, 0x02, 0x20, 0x23, 0x34, 0xc6, + 0x39, 0x94, 0x84, 0xcc, 0x67, 0xeb, 0x44, 0xf9, 0xc3, 0x5c, 0x52, 0xb3, + 0x99, 0x52, 0xf7, 0x4f, 0xff, 0x8b, 0xc5, 0xea, 0xb5, 0xd0, 0xf9, 0x36, + 0xb3, 0xe6, 0xfc, 0x37, 0x50, 0x02, 0x20, 0x4c, 0xe2, 0x29, 0xf5, 0x4a, + 0x4c, 0x7a, 0x01, 0x37, 0xce, 0xc1, 0xb0, 0x15, 0x23, 0xfd, 0xa5, 0xd9, + 0xac, 0x75, 0xcb, 0x55, 0x56, 0x99, 0x97, 0xe3, 0x13, 0xbd, 0x5b, 0xcc, + 0x5d, 0x0c, + 0xa8, // + 0x16, 0xfe, 0xfd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, + 0x25, 0x0d, 0x00, 0x00, 0x19, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x19, 0x02, 0x01, 0x40, 0x00, 0x12, 0x04, 0x03, 0x08, 0x04, 0x04, 0x01, + 0x05, 0x03, 0x08, 0x05, 0x05, 0x01, 0x08, 0x06, 0x06, 0x01, 0x02, 0x01, + 0x00, 0x00, // + 0x16, 0xfe, 0xfd, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, + 0x0c, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00}; + + std::optional> acks2 = GetDtlsHandshakeAcks(packet2); + ASSERT_TRUE(acks2); + EXPECT_EQ(acks2->size(), 5u); + EXPECT_THAT(*acks2, ::testing::ElementsAreArray({0, 1, 2, 3, 4})); +} + +TEST(DtlsUtils, GetDtls13HandshakeAcks) { + // DTLS 1.3 encrypted data, captured with Wireshark. This is a single + // encrypted record which can not be parsed and should be skipped. + std::vector packet = { + 0x2f, 0x5b, 0x4c, 0x00, 0x23, 0x47, 0xab, 0xe7, 0x90, 0x96, + 0xc0, 0xac, 0x2f, 0x25, 0x40, 0x35, 0x35, 0xa3, 0x81, 0x50, + 0x0c, 0x38, 0x0a, 0xf6, 0xd4, 0xd5, 0x7d, 0xbe, 0x9a, 0xa3, + 0xcb, 0xcb, 0x67, 0xb0, 0x77, 0x79, 0x8b, 0x48, 0x60, 0xf8, + }; + + std::optional> acks = GetDtlsHandshakeAcks(packet); + ASSERT_TRUE(acks); + EXPECT_EQ(acks->size(), 0u); +} + +std::vector ToVector(ArrayView array) { + return std::vector(array.begin(), array.end()); +} + +TEST(PacketStash, Add) { + PacketStash stash; + std::vector packet = { + 0x2f, 0x5b, 0x4c, 0x00, 0x23, 0x47, 0xab, 0xe7, 0x90, 0x96, + 0xc0, 0xac, 0x2f, 0x25, 0x40, 0x35, 0x35, 0xa3, 0x81, 0x50, + 0x0c, 0x38, 0x0a, 0xf6, 0xd4, 0xd5, 0x7d, 0xbe, 0x9a, 0xa3, + 0xcb, 0xcb, 0x67, 0xb0, 0x77, 0x79, 0x8b, 0x48, 0x60, 0xf8, + }; + + stash.Add(packet); + EXPECT_EQ(stash.size(), 1); + EXPECT_EQ(ToVector(stash.GetNext()), packet); + + stash.Add(packet); + EXPECT_EQ(stash.size(), 2); + EXPECT_EQ(ToVector(stash.GetNext()), packet); + EXPECT_EQ(ToVector(stash.GetNext()), packet); +} + +TEST(PacketStash, AddIfUnique) { + PacketStash stash; + std::vector packet1 = { + 0x2f, 0x5b, 0x4c, 0x00, 0x23, 0x47, 0xab, 0xe7, 0x90, 0x96, + 0xc0, 0xac, 0x2f, 0x25, 0x40, 0x35, 0x35, 0xa3, 0x81, 0x50, + 0x0c, 0x38, 0x0a, 0xf6, 0xd4, 0xd5, 0x7d, 0xbe, 0x9a, 0xa3, + 0xcb, 0xcb, 0x67, 0xb0, 0x77, 0x79, 0x8b, 0x48, 0x60, 0xf8, + }; + + std::vector packet2 = { + 0x16, 0xfe, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x0c, 0x0e, 0x00, 0x00, 0x00, 0x00, + 0xac, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + }; + + stash.AddIfUnique(packet1); + EXPECT_EQ(stash.size(), 1); + EXPECT_EQ(ToVector(stash.GetNext()), packet1); + + stash.AddIfUnique(packet1); + EXPECT_EQ(stash.size(), 1); + EXPECT_EQ(ToVector(stash.GetNext()), packet1); + + stash.AddIfUnique(packet2); + EXPECT_EQ(stash.size(), 2); + EXPECT_EQ(ToVector(stash.GetNext()), packet1); + EXPECT_EQ(ToVector(stash.GetNext()), packet2); + + stash.AddIfUnique(packet2); + EXPECT_EQ(stash.size(), 2); +} + +TEST(PacketStash, Prune) { + PacketStash stash; + std::vector packet1 = { + 0x2f, 0x5b, 0x4c, 0x00, 0x23, 0x47, 0xab, 0xe7, 0x90, 0x96, + 0xc0, 0xac, 0x2f, 0x25, 0x40, 0x35, 0x35, 0xa3, 0x81, 0x50, + 0x0c, 0x38, 0x0a, 0xf6, 0xd4, 0xd5, 0x7d, 0xbe, 0x9a, 0xa3, + 0xcb, 0xcb, 0x67, 0xb0, 0x77, 0x79, 0x8b, 0x48, 0x60, 0xf8, + }; + + std::vector packet2 = { + 0x16, 0xfe, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x0c, 0x0e, 0x00, 0x00, 0x00, 0x00, + 0xac, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + }; + + stash.AddIfUnique(packet1); + stash.AddIfUnique(packet2); + EXPECT_EQ(stash.size(), 2); + EXPECT_EQ(ToVector(stash.GetNext()), packet1); + EXPECT_EQ(ToVector(stash.GetNext()), packet2); + + absl::flat_hash_set remove; + remove.insert(PacketStash::Hash(packet1)); + stash.Prune(remove); + + EXPECT_EQ(stash.size(), 1); + EXPECT_EQ(ToVector(stash.GetNext()), packet2); +} + +TEST(PacketStash, PruneSize) { + PacketStash stash; + std::vector packet1 = { + 0x2f, 0x5b, 0x4c, 0x00, 0x23, 0x47, 0xab, 0xe7, 0x90, 0x96, + 0xc0, 0xac, 0x2f, 0x25, 0x40, 0x35, 0x35, 0xa3, 0x81, 0x50, + 0x0c, 0x38, 0x0a, 0xf6, 0xd4, 0xd5, 0x7d, 0xbe, 0x9a, 0xa3, + 0xcb, 0xcb, 0x67, 0xb0, 0x77, 0x79, 0x8b, 0x48, 0x60, 0xf8, + }; + + std::vector packet2 = { + 0x16, 0xfe, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x0c, 0x0e, 0x00, 0x00, 0x00, 0x00, + 0xac, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + }; + + std::vector packet3 = {0x3}; + std::vector packet4 = {0x4}; + std::vector packet5 = {0x5}; + std::vector packet6 = {0x6}; + + stash.AddIfUnique(packet1); + stash.AddIfUnique(packet2); + stash.AddIfUnique(packet3); + stash.AddIfUnique(packet4); + stash.AddIfUnique(packet5); + stash.AddIfUnique(packet6); + EXPECT_EQ(stash.size(), 6); + EXPECT_EQ(ToVector(stash.GetNext()), packet1); + EXPECT_EQ(ToVector(stash.GetNext()), packet2); + EXPECT_EQ(ToVector(stash.GetNext()), packet3); + EXPECT_EQ(ToVector(stash.GetNext()), packet4); + EXPECT_EQ(ToVector(stash.GetNext()), packet5); + EXPECT_EQ(ToVector(stash.GetNext()), packet6); + + // Should be NOP. + stash.Prune(/* max_size= */ 6); + EXPECT_EQ(ToVector(stash.GetNext()), packet1); + EXPECT_EQ(ToVector(stash.GetNext()), packet2); + EXPECT_EQ(ToVector(stash.GetNext()), packet3); + EXPECT_EQ(ToVector(stash.GetNext()), packet4); + EXPECT_EQ(ToVector(stash.GetNext()), packet5); + EXPECT_EQ(ToVector(stash.GetNext()), packet6); + + // Move "cursor" forward. + EXPECT_EQ(ToVector(stash.GetNext()), packet1); + stash.Prune(/* max_size= */ 4); + EXPECT_EQ(stash.size(), 4); + EXPECT_EQ(ToVector(stash.GetNext()), packet3); + EXPECT_EQ(ToVector(stash.GetNext()), packet4); + EXPECT_EQ(ToVector(stash.GetNext()), packet5); + EXPECT_EQ(ToVector(stash.GetNext()), packet6); +} + +} // namespace webrtc diff --git a/p2p/base/fake_dtls_transport.h b/p2p/dtls/fake_dtls_transport.h similarity index 63% rename from p2p/base/fake_dtls_transport.h rename to p2p/dtls/fake_dtls_transport.h index 283488bc38..e75e5e6bca 100644 --- a/p2p/base/fake_dtls_transport.h +++ b/p2p/dtls/fake_dtls_transport.h @@ -8,23 +8,40 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef P2P_BASE_FAKE_DTLS_TRANSPORT_H_ -#define P2P_BASE_FAKE_DTLS_TRANSPORT_H_ +#ifndef P2P_DTLS_FAKE_DTLS_TRANSPORT_H_ +#define P2P_DTLS_FAKE_DTLS_TRANSPORT_H_ +#include +#include #include +#include #include #include -#include #include "absl/strings/string_view.h" -#include "api/crypto/crypto_options.h" +#include "api/array_view.h" #include "api/dtls_transport_interface.h" -#include "p2p/base/dtls_transport_internal.h" -#include "p2p/base/fake_ice_transport.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/packet_transport_internal.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "p2p/test/fake_ice_transport.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" #include "rtc_base/fake_ssl_identity.h" +#include "rtc_base/logging.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network_route.h" #include "rtc_base/rtc_certificate.h" +#include "rtc_base/socket.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_fingerprint.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/thread.h" -namespace cricket { +namespace webrtc { // Fake DTLS transport which is implemented by wrapping a fake ICE transport. // Doesn't interact directly with fake ICE transport for anything other than @@ -37,8 +54,11 @@ class FakeDtlsTransport : public DtlsTransportInternal { component_(ice_transport->component()), dtls_fingerprint_("", nullptr) { RTC_DCHECK(ice_transport_); - ice_transport_->SignalReadPacket.connect( - this, &FakeDtlsTransport::OnIceTransportReadPacket); + ice_transport_->RegisterReceivedPacketCallback( + this, [&](PacketTransportInternal* transport, + const ReceivedIpPacket& packet) { + OnIceTransportReadPacket(transport, packet); + }); ice_transport_->SignalNetworkRouteChanged.connect( this, &FakeDtlsTransport::OnNetworkRouteChanged); } @@ -47,10 +67,13 @@ class FakeDtlsTransport : public DtlsTransportInternal { : owned_ice_transport_(std::move(ice)), transport_name_(owned_ice_transport_->transport_name()), component_(owned_ice_transport_->component()), - dtls_fingerprint_("", rtc::ArrayView()) { + dtls_fingerprint_("", ArrayView()) { ice_transport_ = owned_ice_transport_.get(); - ice_transport_->SignalReadPacket.connect( - this, &FakeDtlsTransport::OnIceTransportReadPacket); + ice_transport_->RegisterReceivedPacketCallback( + this, [&](PacketTransportInternal* transport, + const ReceivedIpPacket& packet) { + OnIceTransportReadPacket(transport, packet); + }); ice_transport_->SignalNetworkRouteChanged.connect( this, &FakeDtlsTransport::OnNetworkRouteChanged); } @@ -62,7 +85,7 @@ class FakeDtlsTransport : public DtlsTransportInternal { } FakeDtlsTransport(const std::string& name, int component, - rtc::Thread* network_thread) + Thread* network_thread) : FakeDtlsTransport(std::make_unique(name, component, network_thread)) {} @@ -71,6 +94,7 @@ class FakeDtlsTransport : public DtlsTransportInternal { if (dest_ && dest_->dest_ == this) { dest_->dest_ = nullptr; } + ice_transport_->DeregisterReceivedPacketCallback(this); } // Get inner fake ICE transport. @@ -91,7 +115,7 @@ class FakeDtlsTransport : public DtlsTransportInternal { ice_transport_->SetReceiving(receiving); set_receiving(receiving); } - void SetDtlsState(webrtc::DtlsTransportState state) { + void SetDtlsState(DtlsTransportState state) { dtls_state_ = state; SendDtlsState(this, dtls_state_); } @@ -121,9 +145,9 @@ class FakeDtlsTransport : public DtlsTransportInternal { } // If the `dtls_role_` is unset, set it to SSL_CLIENT by default. if (!dtls_role_) { - dtls_role_ = std::move(rtc::SSL_CLIENT); + dtls_role_ = std::move(webrtc::SSL_CLIENT); } - SetDtlsState(webrtc::DtlsTransportState::kConnected); + SetDtlsState(DtlsTransportState::kConnected); ice_transport_->SetDestination( static_cast(dest->ice_transport()), asymmetric); } else { @@ -135,34 +159,31 @@ class FakeDtlsTransport : public DtlsTransportInternal { } // Fake DtlsTransportInternal implementation. - webrtc::DtlsTransportState dtls_state() const override { return dtls_state_; } + DtlsTransportState dtls_state() const override { return dtls_state_; } const std::string& transport_name() const override { return transport_name_; } int component() const override { return component_; } - const rtc::SSLFingerprint& dtls_fingerprint() const { - return dtls_fingerprint_; - } - webrtc::RTCError SetRemoteParameters(absl::string_view alg, - const uint8_t* digest, - size_t digest_len, - absl::optional role) { + const SSLFingerprint& dtls_fingerprint() const { return dtls_fingerprint_; } + RTCError SetRemoteParameters(absl::string_view alg, + const uint8_t* digest, + size_t digest_len, + std::optional role) { if (role) { SetDtlsRole(*role); } SetRemoteFingerprint(alg, digest, digest_len); - return webrtc::RTCError::OK(); + return RTCError::OK(); } bool SetRemoteFingerprint(absl::string_view alg, const uint8_t* digest, size_t digest_len) { - dtls_fingerprint_ = - rtc::SSLFingerprint(alg, rtc::MakeArrayView(digest, digest_len)); + dtls_fingerprint_ = SSLFingerprint(alg, MakeArrayView(digest, digest_len)); return true; } - bool SetDtlsRole(rtc::SSLRole role) override { + bool SetDtlsRole(SSLRole role) override { dtls_role_ = std::move(role); return true; } - bool GetDtlsRole(rtc::SSLRole* role) const override { + bool GetDtlsRole(SSLRole* role) const override { if (!dtls_role_) { return false; } @@ -170,12 +191,12 @@ class FakeDtlsTransport : public DtlsTransportInternal { return true; } bool SetLocalCertificate( - const rtc::scoped_refptr& certificate) override { + const scoped_refptr& certificate) override { do_dtls_ = true; local_cert_ = certificate; return true; } - void SetRemoteSSLCertificate(rtc::FakeSSLCertificate* cert) { + void SetRemoteSSLCertificate(FakeSSLCertificate* cert) { remote_cert_ = cert; } bool IsDtlsActive() const override { return do_dtls_; } @@ -186,7 +207,7 @@ class FakeDtlsTransport : public DtlsTransportInternal { *version = 0x0102; return true; } - bool GetSrtpCryptoSuite(int* crypto_suite) override { + bool GetSrtpCryptoSuite(int* crypto_suite) const override { if (!do_dtls_) { return false; } @@ -195,41 +216,41 @@ class FakeDtlsTransport : public DtlsTransportInternal { } void SetSrtpCryptoSuite(int crypto_suite) { crypto_suite_ = crypto_suite; } - bool GetSslCipherSuite(int* cipher_suite) override { + bool GetSslCipherSuite(int* cipher_suite) const override { if (ssl_cipher_suite_) { *cipher_suite = *ssl_cipher_suite_; return true; } return false; } - void SetSslCipherSuite(absl::optional cipher_suite) { + void SetSslCipherSuite(std::optional cipher_suite) { ssl_cipher_suite_ = cipher_suite; } - rtc::scoped_refptr GetLocalCertificate() const override { + + std::optional GetTlsCipherSuiteName() const override { + return "FakeTlsCipherSuite"; + } + uint16_t GetSslPeerSignatureAlgorithm() const override { return 0; } + scoped_refptr GetLocalCertificate() const override { return local_cert_; } - std::unique_ptr GetRemoteSSLCertChain() const override { + std::unique_ptr GetRemoteSSLCertChain() const override { if (!remote_cert_) { return nullptr; } - return std::make_unique(remote_cert_->Clone()); - } - bool ExportKeyingMaterial(absl::string_view label, - const uint8_t* context, - size_t context_len, - bool use_context, - uint8_t* result, - size_t result_len) override { - if (!do_dtls_) { - return false; + return std::make_unique(remote_cert_->Clone()); + } + bool ExportSrtpKeyingMaterial( + ZeroOnFreeBuffer& keying_material) override { + if (do_dtls_) { + std::memset(keying_material.data(), 0xff, keying_material.size()); } - memset(result, 0xff, result_len); - return true; + return do_dtls_; } - void set_ssl_max_protocol_version(rtc::SSLProtocolVersion version) { + void set_ssl_max_protocol_version(SSLProtocolVersion version) { ssl_max_version_ = version; } - rtc::SSLProtocolVersion ssl_max_protocol_version() const { + SSLProtocolVersion ssl_max_protocol_version() const { return ssl_max_version_; } @@ -241,7 +262,7 @@ class FakeDtlsTransport : public DtlsTransportInternal { bool receiving() const override { return receiving_; } int SendPacket(const char* data, size_t len, - const rtc::PacketOptions& options, + const AsyncSocketPacketOptions& options, int flags) override { // We expect only SRTP packets to be sent through this interface. if (flags != PF_SRTP_BYPASS && flags != 0) { @@ -249,25 +270,22 @@ class FakeDtlsTransport : public DtlsTransportInternal { } return ice_transport_->SendPacket(data, len, options, flags); } - int SetOption(rtc::Socket::Option opt, int value) override { + int SetOption(Socket::Option opt, int value) override { return ice_transport_->SetOption(opt, value); } - bool GetOption(rtc::Socket::Option opt, int* value) override { + bool GetOption(Socket::Option opt, int* value) override { return ice_transport_->GetOption(opt, value); } int GetError() override { return ice_transport_->GetError(); } - absl::optional network_route() const override { + std::optional network_route() const override { return ice_transport_->network_route(); } private: - void OnIceTransportReadPacket(PacketTransportInternal* ice_, - const char* data, - size_t len, - const int64_t& packet_time_us, - int flags) { - SignalReadPacket(this, data, len, packet_time_us, flags); + void OnIceTransportReadPacket(PacketTransportInternal* /* ice_ */, + const ReceivedIpPacket& packet) { + NotifyPacketReceived(packet); } void set_receiving(bool receiving) { @@ -289,7 +307,7 @@ class FakeDtlsTransport : public DtlsTransportInternal { SignalWritableState(this); } - void OnNetworkRouteChanged(absl::optional network_route) { + void OnNetworkRouteChanged(std::optional network_route) { SignalNetworkRouteChanged(network_route); } @@ -298,21 +316,29 @@ class FakeDtlsTransport : public DtlsTransportInternal { std::string transport_name_; int component_; FakeDtlsTransport* dest_ = nullptr; - rtc::scoped_refptr local_cert_; - rtc::FakeSSLCertificate* remote_cert_ = nullptr; + scoped_refptr local_cert_; + FakeSSLCertificate* remote_cert_ = nullptr; bool do_dtls_ = false; - rtc::SSLProtocolVersion ssl_max_version_ = rtc::SSL_PROTOCOL_DTLS_12; - rtc::SSLFingerprint dtls_fingerprint_; - absl::optional dtls_role_; - int crypto_suite_ = rtc::kSrtpAes128CmSha1_80; - absl::optional ssl_cipher_suite_; + SSLProtocolVersion ssl_max_version_ = webrtc::SSL_PROTOCOL_DTLS_12; + SSLFingerprint dtls_fingerprint_; + std::optional dtls_role_; + int crypto_suite_ = webrtc::kSrtpAes128CmSha1_80; + std::optional ssl_cipher_suite_; - webrtc::DtlsTransportState dtls_state_ = webrtc::DtlsTransportState::kNew; + DtlsTransportState dtls_state_ = DtlsTransportState::kNew; bool receiving_ = false; bool writable_ = false; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::FakeDtlsTransport; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES -#endif // P2P_BASE_FAKE_DTLS_TRANSPORT_H_ +#endif // P2P_DTLS_FAKE_DTLS_TRANSPORT_H_ diff --git a/p2p/g3doc/ice.md b/p2p/g3doc/ice.md index 81c9541b64..1b3300df40 100644 --- a/p2p/g3doc/ice.md +++ b/p2p/g3doc/ice.md @@ -12,47 +12,47 @@ server. This documentation provides an overview of how ICE is implemented, i.e how the following classes interact. -* [`cricket::IceTransportInternal`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/ice_transport_internal.h;l=225;drc=8cb97062880b0e0a78f9d578370a01aced81a13f) - +* [`webrtc::IceTransportInternal`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/ice_transport_internal.h;l=225;drc=8cb97062880b0e0a78f9d578370a01aced81a13f) - is the interface that does ICE (manage ports, candidates, connections to send/receive packets). The interface is implemented by - [`cricket::P2PTransportChannel`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/p2p_transport_channel.h;l=103;drc=0ccfbd2de7bc3b237a0f8c30f48666c97b9e5523). + [`webrtc::P2PTransportChannel`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/p2p_transport_channel.h;l=103;drc=0ccfbd2de7bc3b237a0f8c30f48666c97b9e5523). -* [`cricket::PortInterface`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/port_interface.h;l=47;drc=c3a486c41e682cce943f2b20fe987c9421d4b631) +* [`webrtc::PortInterface`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/port_interface.h;l=47;drc=c3a486c41e682cce943f2b20fe987c9421d4b631) Represents a local communication mechanism that can be used to create connections to similar mechanisms of the other client. There are 4 - implementations of `cricket::PortInterface` - [`cricket::UDPPort`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/stun_port.h;l=33;drc=a4d873786f10eedd72de25ad0d94ad7c53c1f68a), - [`cricket::StunPort`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/stun_port.h;l=265;drc=a4d873786f10eedd72de25ad0d94ad7c53c1f68a), - [`cricket::TcpPort`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/tcp_port.h;l=33;drc=7a284e1614a38286477ed2334ecbdde78e87b79c) + implementations of `webrtc::PortInterface` + [`webrtc::UDPPort`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/stun_port.h;l=33;drc=a4d873786f10eedd72de25ad0d94ad7c53c1f68a), + [`webrtc::StunPort`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/stun_port.h;l=265;drc=a4d873786f10eedd72de25ad0d94ad7c53c1f68a), + [`webrtc::TcpPort`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/tcp_port.h;l=33;drc=7a284e1614a38286477ed2334ecbdde78e87b79c) and - [`cricket::TurnPort`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/turn_port.h;l=44;drc=ffb7603b6025fbd6e79f360d293ab49092bded54). + [`webrtc::TurnPort`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/turn_port.h;l=44;drc=ffb7603b6025fbd6e79f360d293ab49092bded54). The ports share lots of functionality in a base class, - [`cricket::Port`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/port.h;l=187;drc=3ba7beba29c4e542c4a9bffcc5a47d5e911865be). + [`webrtc::Port`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/port.h;l=187;drc=3ba7beba29c4e542c4a9bffcc5a47d5e911865be). -* [`cricket::Candidate`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/candidate.h;l=30;drc=10542f21c8e4e2d60b136fab45338f2b1e132dde) - represents an address discovered by a `cricket::Port`. A candidate can be +* [`webrtc::Candidate`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/candidate.h;l=30;drc=10542f21c8e4e2d60b136fab45338f2b1e132dde) + represents an address discovered by a `webrtc::Port`. A candidate can be local (i.e discovered by a local port) or remote. Remote candidates are transported using signaling, i.e outside of webrtc. There are 4 types of candidates: `local`, `stun`, `prflx` or `relay` ([standard](https://developer.mozilla.org/en-US/docs/Web/API/RTCIceCandidateType)) -* [`cricket::Connection`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/connection.h) - provides the management of a `cricket::CandidatePair`, i.e for sending data +* [`webrtc::Connection`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/connection.h) + provides the management of a `webrtc::CandidatePair`, i.e for sending data between two candidates. It sends STUN Binding requests (aka STUN pings) to verify that packets can traverse back and forth and keep connections alive (both that NAT binding is kept, and that the remote peer still wants the connection to remain open). -* `cricket::P2PTransportChannel` uses an - [`cricket::PortAllocator`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/port_allocator.h;l=335;drc=9438fb3fff97c803d1ead34c0e4f223db168526f) - to create ports and discover local candidates. The `cricket::PortAllocator` +* `webrtc::P2PTransportChannel` uses an + [`webrtc::PortAllocator`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/port_allocator.h;l=335;drc=9438fb3fff97c803d1ead34c0e4f223db168526f) + to create ports and discover local candidates. The `webrtc::PortAllocator` is implemented by - [`cricket::BasicPortAllocator`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/client/basic_port_allocator.h;l=29;drc=e27f3dea8293884701283a54f90f8a429ea99505). + [`webrtc::BasicPortAllocator`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/client/basic_port_allocator.h;l=29;drc=e27f3dea8293884701283a54f90f8a429ea99505). -* `cricket::P2PTransportChannel` uses an - [`cricket::IceControllerInterface`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/ice_controller_interface.h;l=73;drc=9438fb3fff97c803d1ead34c0e4f223db168526f) - to manage a set of connections. The `cricket::IceControllerInterface` - decides which `cricket::Connection` to send data on. +* `webrtc::P2PTransportChannel` uses an + [`webrtc::IceControllerInterface`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/ice_controller_interface.h;l=73;drc=9438fb3fff97c803d1ead34c0e4f223db168526f) + to manage a set of connections. The `webrtc::IceControllerInterface` + decides which `webrtc::Connection` to send data on. ## Connection establishment @@ -65,14 +65,14 @@ All of these steps are invoked by interactions with `PeerConnection`. 1. [`P2PTransportChannel::MaybeStartGathering`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/p2p_transport_channel.cc;l=864;drc=0ccfbd2de7bc3b237a0f8c30f48666c97b9e5523) This function is invoked as part of `PeerConnection::SetLocalDescription`. - `P2PTransportChannel` will use the `cricket::PortAllocator` to create a - `cricket::PortAllocatorSession`. The `cricket::PortAllocatorSession` will + `P2PTransportChannel` will use the `webrtc::PortAllocator` to create a + `webrtc::PortAllocatorSession`. The `webrtc::PortAllocatorSession` will create local ports as configured, and the ports will start gathering candidates. 2. [`IceTransportInternal::SignalCandidateGathered`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/ice_transport_internal.h;l=293;drc=8cb97062880b0e0a78f9d578370a01aced81a13f) When a port finds a local candidate, it will be added to a list on - `cricket::P2PTransportChannel` and signaled to application using + `webrtc::P2PTransportChannel` and signaled to application using `IceTransportInternal::SignalCandidateGathered`. A p2p application can then send them to peer using favorite transport mechanism whereas a client-server application will do nothing. @@ -83,20 +83,20 @@ All of these steps are invoked by interactions with `PeerConnection`. `PeerConnection::SetRemoteDescription` has been called!), this will trickle down to `P2PTransportChannel::AddRemoteCandidate`. `P2PTransportChannel` will combine the remote candidate with all compatible local candidates to - form new `cricket::Connection`(s). Candidates are compatible if it is + form new `webrtc::Connection`(s). Candidates are compatible if it is possible to send/receive data (e.g ipv4 can only send to ipv4, tcp can only - connect to tcp etc...) The newly formed `cricket::Connection`(s) will be - added to the `cricket::IceController` that will decide which - `cricket::Connection` to send STUN ping on. + connect to tcp etc...) The newly formed `webrtc::Connection`(s) will be + added to the `webrtc::IceController` that will decide which + `webrtc::Connection` to send STUN ping on. 4. [`P2PTransportChannel::SignalCandidatePairChanged`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/ice_transport_internal.h;l=310;drc=8cb97062880b0e0a78f9d578370a01aced81a13f) - When a remote connection replies to a STUN ping, `cricket::IceController` + When a remote connection replies to a STUN ping, `webrtc::IceController` will instruct `P2PTransportChannel` to use the connection. This is signalled up the stack using `P2PTransportChannel::SignalCandidatePairChanged`. Note - that `cricket::IceController` will continue to send STUN pings on the + that `webrtc::IceController` will continue to send STUN pings on the selected connection, as well as other connections. 5. [`P2PTransportChannel::SignalIceTransportStateChanged`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/ice_transport_internal.h;l=323;drc=8cb97062880b0e0a78f9d578370a01aced81a13f) The initial selection of a connection makes `P2PTransportChannel` signal up - stack that state has changed, which may make [`cricket::DtlsTransportInternal`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/dtls_transport_internal.h;l=63;drc=653bab6790ac92c513b7cf4cd3ad59039c589a95) + stack that state has changed, which may make [`webrtc::DtlsTransportInternal`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/p2p/base/dtls_transport_internal.h;l=63;drc=653bab6790ac92c513b7cf4cd3ad59039c589a95) initiate a DTLS handshake (depending on the DTLS role). diff --git a/p2p/stunprober/stun_prober.cc b/p2p/stunprober/stun_prober.cc deleted file mode 100644 index 977ead4d72..0000000000 --- a/p2p/stunprober/stun_prober.cc +++ /dev/null @@ -1,610 +0,0 @@ -/* - * Copyright 2015 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "p2p/stunprober/stun_prober.h" - -#include -#include -#include -#include -#include - -#include "api/packet_socket_factory.h" -#include "api/task_queue/pending_task_safety_flag.h" -#include "api/transport/stun.h" -#include "api/units/time_delta.h" -#include "rtc_base/async_packet_socket.h" -#include "rtc_base/async_resolver_interface.h" -#include "rtc_base/checks.h" -#include "rtc_base/helpers.h" -#include "rtc_base/logging.h" -#include "rtc_base/thread.h" -#include "rtc_base/time_utils.h" - -namespace stunprober { - -namespace { -using ::webrtc::SafeTask; -using ::webrtc::TimeDelta; - -const int THREAD_WAKE_UP_INTERVAL_MS = 5; - -template -void IncrementCounterByAddress(std::map* counter_per_ip, const T& ip) { - counter_per_ip->insert(std::make_pair(ip, 0)).first->second++; -} - -} // namespace - -// A requester tracks the requests and responses from a single socket to many -// STUN servers -class StunProber::Requester : public sigslot::has_slots<> { - public: - // Each Request maps to a request and response. - struct Request { - // Actual time the STUN bind request was sent. - int64_t sent_time_ms = 0; - // Time the response was received. - int64_t received_time_ms = 0; - - // Server reflexive address from STUN response for this given request. - rtc::SocketAddress srflx_addr; - - rtc::IPAddress server_addr; - - int64_t rtt() { return received_time_ms - sent_time_ms; } - void ProcessResponse(const char* buf, size_t buf_len); - }; - - // StunProber provides `server_ips` for Requester to probe. For shared - // socket mode, it'll be all the resolved IP addresses. For non-shared mode, - // it'll just be a single address. - Requester(StunProber* prober, - rtc::AsyncPacketSocket* socket, - const std::vector& server_ips); - ~Requester() override; - - Requester(const Requester&) = delete; - Requester& operator=(const Requester&) = delete; - - // There is no callback for SendStunRequest as the underneath socket send is - // expected to be completed immediately. Otherwise, it'll skip this request - // and move to the next one. - void SendStunRequest(); - - void OnStunResponseReceived(rtc::AsyncPacketSocket* socket, - const char* buf, - size_t size, - const rtc::SocketAddress& addr, - const int64_t& packet_time_us); - - const std::vector& requests() { return requests_; } - - // Whether this Requester has completed all requests. - bool Done() { - return static_cast(num_request_sent_) == server_ips_.size(); - } - - private: - Request* GetRequestByAddress(const rtc::IPAddress& ip); - - StunProber* prober_; - - // The socket for this session. - std::unique_ptr socket_; - - // Temporary SocketAddress and buffer for RecvFrom. - rtc::SocketAddress addr_; - std::unique_ptr response_packet_; - - std::vector requests_; - std::vector server_ips_; - int16_t num_request_sent_ = 0; - int16_t num_response_received_ = 0; - - webrtc::SequenceChecker& thread_checker_; -}; - -StunProber::Requester::Requester( - StunProber* prober, - rtc::AsyncPacketSocket* socket, - const std::vector& server_ips) - : prober_(prober), - socket_(socket), - response_packet_(new rtc::ByteBufferWriter(nullptr, kMaxUdpBufferSize)), - server_ips_(server_ips), - thread_checker_(prober->thread_checker_) { - socket_->SignalReadPacket.connect( - this, &StunProber::Requester::OnStunResponseReceived); -} - -StunProber::Requester::~Requester() { - if (socket_) { - socket_->Close(); - } - for (auto* req : requests_) { - if (req) { - delete req; - } - } -} - -void StunProber::Requester::SendStunRequest() { - RTC_DCHECK(thread_checker_.IsCurrent()); - requests_.push_back(new Request()); - Request& request = *(requests_.back()); - // Random transaction ID, STUN_BINDING_REQUEST - cricket::StunMessage message(cricket::STUN_BINDING_REQUEST); - - std::unique_ptr request_packet( - new rtc::ByteBufferWriter(nullptr, kMaxUdpBufferSize)); - if (!message.Write(request_packet.get())) { - prober_->ReportOnFinished(WRITE_FAILED); - return; - } - - auto addr = server_ips_[num_request_sent_]; - request.server_addr = addr.ipaddr(); - - // The write must succeed immediately. Otherwise, the calculating of the STUN - // request timing could become too complicated. Callback is ignored by passing - // empty AsyncCallback. - rtc::PacketOptions options; - int rv = socket_->SendTo(const_cast(request_packet->Data()), - request_packet->Length(), addr, options); - if (rv < 0) { - prober_->ReportOnFinished(WRITE_FAILED); - return; - } - - request.sent_time_ms = rtc::TimeMillis(); - - num_request_sent_++; - RTC_DCHECK(static_cast(num_request_sent_) <= server_ips_.size()); -} - -void StunProber::Requester::Request::ProcessResponse(const char* buf, - size_t buf_len) { - int64_t now = rtc::TimeMillis(); - rtc::ByteBufferReader message(buf, buf_len); - cricket::StunMessage stun_response; - if (!stun_response.Read(&message)) { - // Invalid or incomplete STUN packet. - received_time_ms = 0; - return; - } - - // Get external address of the socket. - const cricket::StunAddressAttribute* addr_attr = - stun_response.GetAddress(cricket::STUN_ATTR_MAPPED_ADDRESS); - if (addr_attr == nullptr) { - // Addresses not available to detect whether or not behind a NAT. - return; - } - - if (addr_attr->family() != cricket::STUN_ADDRESS_IPV4 && - addr_attr->family() != cricket::STUN_ADDRESS_IPV6) { - return; - } - - received_time_ms = now; - - srflx_addr = addr_attr->GetAddress(); -} - -void StunProber::Requester::OnStunResponseReceived( - rtc::AsyncPacketSocket* socket, - const char* buf, - size_t size, - const rtc::SocketAddress& addr, - const int64_t& /* packet_time_us */) { - RTC_DCHECK(thread_checker_.IsCurrent()); - RTC_DCHECK(socket_); - Request* request = GetRequestByAddress(addr.ipaddr()); - if (!request) { - // Something is wrong, finish the test. - prober_->ReportOnFinished(GENERIC_FAILURE); - return; - } - - num_response_received_++; - request->ProcessResponse(buf, size); -} - -StunProber::Requester::Request* StunProber::Requester::GetRequestByAddress( - const rtc::IPAddress& ipaddr) { - RTC_DCHECK(thread_checker_.IsCurrent()); - for (auto* request : requests_) { - if (request->server_addr == ipaddr) { - return request; - } - } - - return nullptr; -} - -StunProber::Stats::Stats() = default; - -StunProber::Stats::~Stats() = default; - -StunProber::ObserverAdapter::ObserverAdapter() = default; - -StunProber::ObserverAdapter::~ObserverAdapter() = default; - -void StunProber::ObserverAdapter::OnPrepared(StunProber* stunprober, - Status status) { - if (status == SUCCESS) { - stunprober->Start(this); - } else { - callback_(stunprober, status); - } -} - -void StunProber::ObserverAdapter::OnFinished(StunProber* stunprober, - Status status) { - callback_(stunprober, status); -} - -StunProber::StunProber(rtc::PacketSocketFactory* socket_factory, - rtc::Thread* thread, - std::vector networks) - : interval_ms_(0), - socket_factory_(socket_factory), - thread_(thread), - networks_(std::move(networks)) {} - -StunProber::~StunProber() { - RTC_DCHECK(thread_checker_.IsCurrent()); - for (auto* req : requesters_) { - if (req) { - delete req; - } - } - for (auto* s : sockets_) { - if (s) { - delete s; - } - } -} - -bool StunProber::Start(const std::vector& servers, - bool shared_socket_mode, - int interval_ms, - int num_request_per_ip, - int timeout_ms, - const AsyncCallback callback) { - observer_adapter_.set_callback(callback); - return Prepare(servers, shared_socket_mode, interval_ms, num_request_per_ip, - timeout_ms, &observer_adapter_); -} - -bool StunProber::Prepare(const std::vector& servers, - bool shared_socket_mode, - int interval_ms, - int num_request_per_ip, - int timeout_ms, - StunProber::Observer* observer) { - RTC_DCHECK(thread_checker_.IsCurrent()); - interval_ms_ = interval_ms; - shared_socket_mode_ = shared_socket_mode; - - requests_per_ip_ = num_request_per_ip; - if (requests_per_ip_ == 0 || servers.size() == 0) { - return false; - } - - timeout_ms_ = timeout_ms; - servers_ = servers; - observer_ = observer; - // Remove addresses that are already resolved. - for (auto it = servers_.begin(); it != servers_.end();) { - if (it->ipaddr().family() != AF_UNSPEC) { - all_servers_addrs_.push_back(*it); - it = servers_.erase(it); - } else { - ++it; - } - } - if (servers_.empty()) { - CreateSockets(); - return true; - } - return ResolveServerName(servers_.back()); -} - -bool StunProber::Start(StunProber::Observer* observer) { - observer_ = observer; - if (total_ready_sockets_ != total_socket_required()) { - return false; - } - MaybeScheduleStunRequests(); - return true; -} - -bool StunProber::ResolveServerName(const rtc::SocketAddress& addr) { - rtc::AsyncResolverInterface* resolver = - socket_factory_->CreateAsyncResolver(); - if (!resolver) { - return false; - } - resolver->SignalDone.connect(this, &StunProber::OnServerResolved); - resolver->Start(addr); - return true; -} - -void StunProber::OnSocketReady(rtc::AsyncPacketSocket* socket, - const rtc::SocketAddress& addr) { - total_ready_sockets_++; - if (total_ready_sockets_ == total_socket_required()) { - ReportOnPrepared(SUCCESS); - } -} - -void StunProber::OnServerResolved(rtc::AsyncResolverInterface* resolver) { - RTC_DCHECK(thread_checker_.IsCurrent()); - - if (resolver->GetError() == 0) { - rtc::SocketAddress addr(resolver->address().ipaddr(), - resolver->address().port()); - all_servers_addrs_.push_back(addr); - } - - // Deletion of AsyncResolverInterface can't be done in OnResolveResult which - // handles SignalDone. - thread_->PostTask([resolver] { resolver->Destroy(false); }); - servers_.pop_back(); - - if (servers_.size()) { - if (!ResolveServerName(servers_.back())) { - ReportOnPrepared(RESOLVE_FAILED); - } - return; - } - - if (all_servers_addrs_.size() == 0) { - ReportOnPrepared(RESOLVE_FAILED); - return; - } - - CreateSockets(); -} - -void StunProber::CreateSockets() { - // Dedupe. - std::set addrs(all_servers_addrs_.begin(), - all_servers_addrs_.end()); - all_servers_addrs_.assign(addrs.begin(), addrs.end()); - - // Prepare all the sockets beforehand. All of them will bind to "any" address. - while (sockets_.size() < total_socket_required()) { - std::unique_ptr socket( - socket_factory_->CreateUdpSocket(rtc::SocketAddress(INADDR_ANY, 0), 0, - 0)); - if (!socket) { - ReportOnPrepared(GENERIC_FAILURE); - return; - } - // Chrome and WebRTC behave differently in terms of the state of a socket - // once returned from PacketSocketFactory::CreateUdpSocket. - if (socket->GetState() == rtc::AsyncPacketSocket::STATE_BINDING) { - socket->SignalAddressReady.connect(this, &StunProber::OnSocketReady); - } else { - OnSocketReady(socket.get(), rtc::SocketAddress(INADDR_ANY, 0)); - } - sockets_.push_back(socket.release()); - } -} - -StunProber::Requester* StunProber::CreateRequester() { - RTC_DCHECK(thread_checker_.IsCurrent()); - if (!sockets_.size()) { - return nullptr; - } - StunProber::Requester* requester; - if (shared_socket_mode_) { - requester = new Requester(this, sockets_.back(), all_servers_addrs_); - } else { - std::vector server_ip; - server_ip.push_back( - all_servers_addrs_[(num_request_sent_ % all_servers_addrs_.size())]); - requester = new Requester(this, sockets_.back(), server_ip); - } - - sockets_.pop_back(); - return requester; -} - -bool StunProber::SendNextRequest() { - if (!current_requester_ || current_requester_->Done()) { - current_requester_ = CreateRequester(); - requesters_.push_back(current_requester_); - } - if (!current_requester_) { - return false; - } - current_requester_->SendStunRequest(); - num_request_sent_++; - return true; -} - -bool StunProber::should_send_next_request(int64_t now) { - if (interval_ms_ < THREAD_WAKE_UP_INTERVAL_MS) { - return now >= next_request_time_ms_; - } else { - return (now + (THREAD_WAKE_UP_INTERVAL_MS / 2)) >= next_request_time_ms_; - } -} - -int StunProber::get_wake_up_interval_ms() { - if (interval_ms_ < THREAD_WAKE_UP_INTERVAL_MS) { - return 1; - } else { - return THREAD_WAKE_UP_INTERVAL_MS; - } -} - -void StunProber::MaybeScheduleStunRequests() { - RTC_DCHECK_RUN_ON(thread_); - int64_t now = rtc::TimeMillis(); - - if (Done()) { - thread_->PostDelayedTask( - SafeTask(task_safety_.flag(), [this] { ReportOnFinished(SUCCESS); }), - TimeDelta::Millis(timeout_ms_)); - return; - } - if (should_send_next_request(now)) { - if (!SendNextRequest()) { - ReportOnFinished(GENERIC_FAILURE); - return; - } - next_request_time_ms_ = now + interval_ms_; - } - thread_->PostDelayedTask( - SafeTask(task_safety_.flag(), [this] { MaybeScheduleStunRequests(); }), - TimeDelta::Millis(get_wake_up_interval_ms())); -} - -bool StunProber::GetStats(StunProber::Stats* prob_stats) const { - // No need to be on the same thread. - if (!prob_stats) { - return false; - } - - StunProber::Stats stats; - - int rtt_sum = 0; - int64_t first_sent_time = 0; - int64_t last_sent_time = 0; - NatType nat_type = NATTYPE_INVALID; - - // Track of how many srflx IP that we have seen. - std::set srflx_ips; - - // If we're not receiving any response on a given IP, all requests sent to - // that IP should be ignored as this could just be an DNS error. - std::map num_response_per_server; - std::map num_request_per_server; - - for (auto* requester : requesters_) { - std::map num_response_per_srflx_addr; - for (auto* request : requester->requests()) { - if (request->sent_time_ms <= 0) { - continue; - } - - ++stats.raw_num_request_sent; - IncrementCounterByAddress(&num_request_per_server, request->server_addr); - - if (!first_sent_time) { - first_sent_time = request->sent_time_ms; - } - last_sent_time = request->sent_time_ms; - - if (request->received_time_ms < request->sent_time_ms) { - continue; - } - - IncrementCounterByAddress(&num_response_per_server, request->server_addr); - IncrementCounterByAddress(&num_response_per_srflx_addr, - request->srflx_addr); - rtt_sum += request->rtt(); - stats.srflx_addrs.insert(request->srflx_addr.ToString()); - srflx_ips.insert(request->srflx_addr.ipaddr()); - } - - // If we're using shared mode and seeing >1 srflx addresses for a single - // requester, it's symmetric NAT. - if (shared_socket_mode_ && num_response_per_srflx_addr.size() > 1) { - nat_type = NATTYPE_SYMMETRIC; - } - } - - // We're probably not behind a regular NAT. We have more than 1 distinct - // server reflexive IPs. - if (srflx_ips.size() > 1) { - return false; - } - - int num_sent = 0; - int num_received = 0; - int num_server_ip_with_response = 0; - - for (const auto& kv : num_response_per_server) { - RTC_DCHECK_GT(kv.second, 0); - num_server_ip_with_response++; - num_received += kv.second; - num_sent += num_request_per_server[kv.first]; - } - - // Shared mode is only true if we use the shared socket and there are more - // than 1 responding servers. - stats.shared_socket_mode = - shared_socket_mode_ && (num_server_ip_with_response > 1); - - if (stats.shared_socket_mode && nat_type == NATTYPE_INVALID) { - nat_type = NATTYPE_NON_SYMMETRIC; - } - - // If we could find a local IP matching srflx, we're not behind a NAT. - rtc::SocketAddress srflx_addr; - if (stats.srflx_addrs.size() && - !srflx_addr.FromString(*(stats.srflx_addrs.begin()))) { - return false; - } - for (const auto* net : networks_) { - if (srflx_addr.ipaddr() == net->GetBestIP()) { - nat_type = stunprober::NATTYPE_NONE; - stats.host_ip = net->GetBestIP().ToString(); - break; - } - } - - // Finally, we know we're behind a NAT but can't determine which type it is. - if (nat_type == NATTYPE_INVALID) { - nat_type = NATTYPE_UNKNOWN; - } - - stats.nat_type = nat_type; - stats.num_request_sent = num_sent; - stats.num_response_received = num_received; - stats.target_request_interval_ns = interval_ms_ * 1000; - - if (num_sent) { - stats.success_percent = static_cast(100 * num_received / num_sent); - } - - if (stats.raw_num_request_sent > 1) { - stats.actual_request_interval_ns = - (1000 * (last_sent_time - first_sent_time)) / - (stats.raw_num_request_sent - 1); - } - - if (num_received) { - stats.average_rtt_ms = static_cast((rtt_sum / num_received)); - } - - *prob_stats = stats; - return true; -} - -void StunProber::ReportOnPrepared(StunProber::Status status) { - if (observer_) { - observer_->OnPrepared(this, status); - } -} - -void StunProber::ReportOnFinished(StunProber::Status status) { - if (observer_) { - observer_->OnFinished(this, status); - } -} - -} // namespace stunprober diff --git a/p2p/stunprober/stun_prober.h b/p2p/stunprober/stun_prober.h deleted file mode 100644 index 7d5094a3b9..0000000000 --- a/p2p/stunprober/stun_prober.h +++ /dev/null @@ -1,250 +0,0 @@ -/* - * Copyright 2015 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef P2P_STUNPROBER_STUN_PROBER_H_ -#define P2P_STUNPROBER_STUN_PROBER_H_ - -#include -#include -#include - -#include "api/sequence_checker.h" -#include "api/task_queue/pending_task_safety_flag.h" -#include "rtc_base/byte_buffer.h" -#include "rtc_base/ip_address.h" -#include "rtc_base/network.h" -#include "rtc_base/socket_address.h" -#include "rtc_base/system/rtc_export.h" -#include "rtc_base/thread.h" - -namespace rtc { -class AsyncPacketSocket; -class PacketSocketFactory; -class Thread; -class NetworkManager; -class AsyncResolverInterface; -} // namespace rtc - -namespace stunprober { - -class StunProber; - -static const int kMaxUdpBufferSize = 1200; - -typedef std::function AsyncCallback; - -enum NatType { - NATTYPE_INVALID, - NATTYPE_NONE, // Not behind a NAT. - NATTYPE_UNKNOWN, // Behind a NAT but type can't be determine. - NATTYPE_SYMMETRIC, // Behind a symmetric NAT. - NATTYPE_NON_SYMMETRIC // Behind a non-symmetric NAT. -}; - -class RTC_EXPORT StunProber : public sigslot::has_slots<> { - public: - enum Status { // Used in UMA_HISTOGRAM_ENUMERATION. - SUCCESS, // Successfully received bytes from the server. - GENERIC_FAILURE, // Generic failure. - RESOLVE_FAILED, // Host resolution failed. - WRITE_FAILED, // Sending a message to the server failed. - READ_FAILED, // Reading the reply from the server failed. - }; - - class Observer { - public: - virtual ~Observer() = default; - virtual void OnPrepared(StunProber* prober, StunProber::Status status) = 0; - virtual void OnFinished(StunProber* prober, StunProber::Status status) = 0; - }; - - struct RTC_EXPORT Stats { - Stats(); - ~Stats(); - - // `raw_num_request_sent` is the total number of requests - // sent. `num_request_sent` is the count of requests against a server where - // we see at least one response. `num_request_sent` is designed to protect - // against DNS resolution failure or the STUN server is not responsive - // which could skew the result. - int raw_num_request_sent = 0; - int num_request_sent = 0; - - int num_response_received = 0; - NatType nat_type = NATTYPE_INVALID; - int average_rtt_ms = -1; - int success_percent = 0; - int target_request_interval_ns = 0; - int actual_request_interval_ns = 0; - - // Also report whether this trial can't be considered truly as shared - // mode. Share mode only makes sense when we have multiple IP resolved and - // successfully probed. - bool shared_socket_mode = false; - - std::string host_ip; - - // If the srflx_addrs has more than 1 element, the NAT is symmetric. - std::set srflx_addrs; - }; - - StunProber(rtc::PacketSocketFactory* socket_factory, - rtc::Thread* thread, - std::vector networks); - ~StunProber() override; - - StunProber(const StunProber&) = delete; - StunProber& operator=(const StunProber&) = delete; - - // Begin performing the probe test against the `servers`. If - // `shared_socket_mode` is false, each request will be done with a new socket. - // Otherwise, a unique socket will be used for a single round of requests - // against all resolved IPs. No single socket will be used against a given IP - // more than once. The interval of requests will be as close to the requested - // inter-probe interval `stun_ta_interval_ms` as possible. After sending out - // the last scheduled request, the probe will wait `timeout_ms` for request - // responses and then call `finish_callback`. `requests_per_ip` indicates how - // many requests should be tried for each resolved IP address. In shared mode, - // (the number of sockets to be created) equals to `requests_per_ip`. In - // non-shared mode, (the number of sockets) equals to requests_per_ip * (the - // number of resolved IP addresses). TODO(guoweis): Remove this once - // everything moved to Prepare() and Run(). - bool Start(const std::vector& servers, - bool shared_socket_mode, - int stun_ta_interval_ms, - int requests_per_ip, - int timeout_ms, - AsyncCallback finish_callback); - - // TODO(guoweis): The combination of Prepare() and Run() are equivalent to the - // Start() above. Remove Start() once everything is migrated. - bool Prepare(const std::vector& servers, - bool shared_socket_mode, - int stun_ta_interval_ms, - int requests_per_ip, - int timeout_ms, - StunProber::Observer* observer); - - // Start to send out the STUN probes. - bool Start(StunProber::Observer* observer); - - // Method to retrieve the Stats once `finish_callback` is invoked. Returning - // false when the result is inconclusive, for example, whether it's behind a - // NAT or not. - bool GetStats(Stats* stats) const; - - int estimated_execution_time() { - return static_cast(requests_per_ip_ * all_servers_addrs_.size() * - interval_ms_); - } - - private: - // A requester tracks the requests and responses from a single socket to many - // STUN servers. - class Requester; - - // TODO(guoweis): Remove this once all dependencies move away from - // AsyncCallback. - class ObserverAdapter : public Observer { - public: - ObserverAdapter(); - ~ObserverAdapter() override; - - void set_callback(AsyncCallback callback) { callback_ = callback; } - void OnPrepared(StunProber* stunprober, Status status) override; - void OnFinished(StunProber* stunprober, Status status) override; - - private: - AsyncCallback callback_; - }; - - bool ResolveServerName(const rtc::SocketAddress& addr); - void OnServerResolved(rtc::AsyncResolverInterface* resolver); - - void OnSocketReady(rtc::AsyncPacketSocket* socket, - const rtc::SocketAddress& addr); - - void CreateSockets(); - - bool Done() { - return num_request_sent_ >= requests_per_ip_ * all_servers_addrs_.size(); - } - - size_t total_socket_required() { - return (shared_socket_mode_ ? 1 : all_servers_addrs_.size()) * - requests_per_ip_; - } - - bool should_send_next_request(int64_t now); - int get_wake_up_interval_ms(); - - bool SendNextRequest(); - - // Will be invoked in 1ms intervals and schedule the next request from the - // `current_requester_` if the time has passed for another request. - void MaybeScheduleStunRequests(); - - void ReportOnPrepared(StunProber::Status status); - void ReportOnFinished(StunProber::Status status); - - Requester* CreateRequester(); - - Requester* current_requester_ = nullptr; - - // The time when the next request should go out. - int64_t next_request_time_ms_ = 0; - - // Total requests sent so far. - uint32_t num_request_sent_ = 0; - - bool shared_socket_mode_ = false; - - // How many requests should be done against each resolved IP. - uint32_t requests_per_ip_ = 0; - - // Milliseconds to pause between each STUN request. - int interval_ms_; - - // Timeout period after the last request is sent. - int timeout_ms_; - - // STUN server name to be resolved. - std::vector servers_; - - // Weak references. - rtc::PacketSocketFactory* socket_factory_; - rtc::Thread* thread_; - - // Accumulate all resolved addresses. - std::vector all_servers_addrs_; - - // The set of STUN probe sockets and their state. - std::vector requesters_; - - webrtc::SequenceChecker thread_checker_; - - // Temporary storage for created sockets. - std::vector sockets_; - // This tracks how many of the sockets are ready. - size_t total_ready_sockets_ = 0; - - Observer* observer_ = nullptr; - // TODO(guoweis): Remove this once all dependencies move away from - // AsyncCallback. - ObserverAdapter observer_adapter_; - - const std::vector networks_; - - webrtc::ScopedTaskSafety task_safety_; -}; - -} // namespace stunprober - -#endif // P2P_STUNPROBER_STUN_PROBER_H_ diff --git a/p2p/stunprober/stun_prober_unittest.cc b/p2p/stunprober/stun_prober_unittest.cc deleted file mode 100644 index b57f93b634..0000000000 --- a/p2p/stunprober/stun_prober_unittest.cc +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright 2015 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "p2p/stunprober/stun_prober.h" - -#include - -#include -#include - -#include "p2p/base/basic_packet_socket_factory.h" -#include "p2p/base/test_stun_server.h" -#include "rtc_base/gunit.h" -#include "rtc_base/ip_address.h" -#include "rtc_base/ssl_adapter.h" -#include "rtc_base/virtual_socket_server.h" -#include "test/gtest.h" - -using stunprober::AsyncCallback; -using stunprober::StunProber; - -namespace stunprober { - -namespace { - -const rtc::SocketAddress kLocalAddr("192.168.0.1", 0); -const rtc::SocketAddress kStunAddr1("1.1.1.1", 3478); -const rtc::SocketAddress kStunAddr2("1.1.1.2", 3478); -const rtc::SocketAddress kFailedStunAddr("1.1.1.3", 3478); -const rtc::SocketAddress kStunMappedAddr("77.77.77.77", 0); - -} // namespace - -class StunProberTest : public ::testing::Test { - public: - StunProberTest() - : ss_(std::make_unique()), - main_(ss_.get()), - result_(StunProber::SUCCESS), - stun_server_1_(cricket::TestStunServer::Create(ss_.get(), kStunAddr1)), - stun_server_2_(cricket::TestStunServer::Create(ss_.get(), kStunAddr2)) { - stun_server_1_->set_fake_stun_addr(kStunMappedAddr); - stun_server_2_->set_fake_stun_addr(kStunMappedAddr); - rtc::InitializeSSL(); - } - - void set_expected_result(int result) { result_ = result; } - - void StartProbing(rtc::PacketSocketFactory* socket_factory, - const std::vector& addrs, - std::vector networks, - bool shared_socket, - uint16_t interval, - uint16_t pings_per_ip) { - prober_ = std::make_unique( - socket_factory, rtc::Thread::Current(), std::move(networks)); - prober_->Start(addrs, shared_socket, interval, pings_per_ip, - 100 /* timeout_ms */, - [this](StunProber* prober, int result) { - StopCallback(prober, result); - }); - } - - void RunProber(bool shared_mode) { - const int pings_per_ip = 3; - std::vector addrs; - addrs.push_back(kStunAddr1); - addrs.push_back(kStunAddr2); - // Add a non-existing server. This shouldn't pollute the result. - addrs.push_back(kFailedStunAddr); - - rtc::Network ipv4_network1("test_eth0", "Test Network Adapter 1", - rtc::IPAddress(0x12345600U), 24); - ipv4_network1.AddIP(rtc::IPAddress(0x12345678)); - std::vector networks; - networks.push_back(&ipv4_network1); - - auto socket_factory = - std::make_unique(ss_.get()); - - // Set up the expected results for verification. - std::set srflx_addresses; - srflx_addresses.insert(kStunMappedAddr.ToString()); - const uint32_t total_pings_tried = - static_cast(pings_per_ip * addrs.size()); - - // The reported total_pings should not count for pings sent to the - // kFailedStunAddr. - const uint32_t total_pings_reported = total_pings_tried - pings_per_ip; - - StartProbing(socket_factory.get(), addrs, std::move(networks), shared_mode, - 3, pings_per_ip); - - WAIT(stopped_, 1000); - - StunProber::Stats stats; - EXPECT_TRUE(prober_->GetStats(&stats)); - EXPECT_EQ(stats.success_percent, 100); - EXPECT_TRUE(stats.nat_type > stunprober::NATTYPE_NONE); - EXPECT_EQ(stats.srflx_addrs, srflx_addresses); - EXPECT_EQ(static_cast(stats.num_request_sent), - total_pings_reported); - EXPECT_EQ(static_cast(stats.num_response_received), - total_pings_reported); - } - - private: - void StopCallback(StunProber* prober, int result) { - EXPECT_EQ(result, result_); - stopped_ = true; - } - - std::unique_ptr ss_; - rtc::AutoSocketServerThread main_; - std::unique_ptr prober_; - int result_ = 0; - bool stopped_ = false; - std::unique_ptr stun_server_1_; - std::unique_ptr stun_server_2_; -}; - -TEST_F(StunProberTest, NonSharedMode) { - RunProber(false); -} - -TEST_F(StunProberTest, SharedMode) { - RunProber(true); -} - -} // namespace stunprober diff --git a/p2p/test/fake_ice_transport.h b/p2p/test/fake_ice_transport.h new file mode 100644 index 0000000000..29b8a9e7b7 --- /dev/null +++ b/p2p/test/fake_ice_transport.h @@ -0,0 +1,687 @@ +/* + * Copyright 2017 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_TEST_FAKE_ICE_TRANSPORT_H_ +#define P2P_TEST_FAKE_ICE_TRANSPORT_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/functional/any_invocable.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/candidate.h" +#include "api/ice_transport_interface.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/transport/enums.h" +#include "api/transport/stun.h" +#include "api/units/time_delta.h" +#include "p2p/base/candidate_pair_interface.h" +#include "p2p/base/connection.h" +#include "p2p/base/connection_info.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/port.h" +#include "p2p/base/transport_description.h" +#include "p2p/dtls/dtls_stun_piggyback_callbacks.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/byte_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/logging.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" +#include "rtc_base/socket.h" +#include "rtc_base/task_queue_for_test.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/time_utils.h" +#include "test/explicit_key_value_config.h" + +namespace webrtc { +using ::webrtc::SafeTask; +using ::webrtc::TimeDelta; + +// All methods must be called on the network thread (which is either the thread +// calling the constructor, or the separate thread explicitly passed to the +// constructor). +class FakeIceTransport : public IceTransportInternal { + public: + explicit FakeIceTransport(absl::string_view name, + int component, + Thread* network_thread = nullptr, + absl::string_view field_trials_string = "") + : name_(name), + component_(component), + network_thread_(network_thread ? network_thread : Thread::Current()), + field_trials_(field_trials_string) { + RTC_DCHECK(network_thread_); + } + + // Must be called either on the network thread, or after the network thread + // has been shut down. + ~FakeIceTransport() override { + if (dest_ && dest_->dest_ == this) { + dest_->dest_ = nullptr; + } + } + + // If async, will send packets by "Post"-ing to message queue instead of + // synchronously "Send"-ing. + void SetAsync(bool async) { + RTC_DCHECK_RUN_ON(network_thread_); + async_ = async; + } + void SetAsyncDelay(int delay_ms) { + RTC_DCHECK_RUN_ON(network_thread_); + async_delay_ms_ = delay_ms; + } + + // SetWritable, SetReceiving and SetDestination are the main methods that can + // be used for testing, to simulate connectivity or lack thereof. + void SetWritable(bool writable) { + RTC_DCHECK_RUN_ON(network_thread_); + set_writable(writable); + } + void SetReceiving(bool receiving) { + RTC_DCHECK_RUN_ON(network_thread_); + set_receiving(receiving); + } + + // Simulates the two transports connecting to each other. + // If `asymmetric` is true this method only affects this FakeIceTransport. + // If false, it affects `dest` as well. + void SetDestination(FakeIceTransport* dest, bool asymmetric = false) { + RTC_DCHECK_RUN_ON(network_thread_); + if (dest == dest_) { + return; + } + RTC_DCHECK(!dest || !dest_) + << "Changing fake destination from one to another is not supported."; + if (dest) { + // This simulates the delivery of candidates. + dest_ = dest; + set_writable(true); + if (!asymmetric) { + dest->SetDestination(this, true); + } + } else { + // Simulates loss of connectivity, by asymmetrically forgetting dest_. + dest_ = nullptr; + set_writable(false); + } + } + + void SetDestinationNotWritable(FakeIceTransport* dest) { + RTC_DCHECK_RUN_ON(network_thread_); + if (dest == dest_) { + return; + } + RTC_DCHECK(!dest || !dest_) + << "Changing fake destination from one to another is not supported."; + + if (dest) { + RTC_DCHECK_RUN_ON(dest->network_thread_); + dest->dest_ = this; + } else if (dest_) { + RTC_DCHECK_RUN_ON(dest_->network_thread_); + dest_->dest_ = nullptr; + } + dest_ = dest; + } + + void SetTransportState(IceTransportState state, + IceTransportStateInternal legacy_state) { + RTC_DCHECK_RUN_ON(network_thread_); + transport_state_ = state; + legacy_transport_state_ = legacy_state; + SignalIceTransportStateChanged(this); + } + + void SetConnectionCount(size_t connection_count) { + RTC_DCHECK_RUN_ON(network_thread_); + size_t old_connection_count = connection_count_; + connection_count_ = connection_count; + if (connection_count) { + had_connection_ = true; + } + // In this fake transport channel, `connection_count_` determines the + // transport state. + if (connection_count_ < old_connection_count) { + SignalStateChanged(this); + } + } + + void SetCandidatesGatheringComplete() { + RTC_DCHECK_RUN_ON(network_thread_); + if (gathering_state_ != webrtc::kIceGatheringComplete) { + gathering_state_ = webrtc::kIceGatheringComplete; + SendGatheringStateEvent(); + } + } + + // Convenience functions for accessing ICE config and other things. + int receiving_timeout() const { + RTC_DCHECK_RUN_ON(network_thread_); + return ice_config_.receiving_timeout_or_default(); + } + bool gather_continually() const { + RTC_DCHECK_RUN_ON(network_thread_); + return ice_config_.gather_continually(); + } + const Candidates& remote_candidates() const { + RTC_DCHECK_RUN_ON(network_thread_); + return remote_candidates_; + } + + // Fake IceTransportInternal implementation. + const std::string& transport_name() const override { return name_; } + int component() const override { return component_; } + IceMode remote_ice_mode() const { + RTC_DCHECK_RUN_ON(network_thread_); + return remote_ice_mode_; + } + const IceParameters* local_ice_parameters() const override { + RTC_DCHECK_RUN_ON(network_thread_); + return &ice_parameters_; + } + const IceParameters* remote_ice_parameters() const override { + RTC_DCHECK_RUN_ON(network_thread_); + return &remote_ice_parameters_; + } + + IceTransportStateInternal GetState() const override { + RTC_DCHECK_RUN_ON(network_thread_); + if (legacy_transport_state_) { + return *legacy_transport_state_; + } + + if (connection_count_ == 0) { + return had_connection_ ? IceTransportStateInternal::STATE_FAILED + : IceTransportStateInternal::STATE_INIT; + } + + if (connection_count_ == 1) { + return IceTransportStateInternal::STATE_COMPLETED; + } + + return IceTransportStateInternal::STATE_CONNECTING; + } + + IceTransportState GetIceTransportState() const override { + RTC_DCHECK_RUN_ON(network_thread_); + if (transport_state_) { + return *transport_state_; + } + + if (connection_count_ == 0) { + return had_connection_ ? IceTransportState::kFailed + : IceTransportState::kNew; + } + + if (connection_count_ == 1) { + return IceTransportState::kCompleted; + } + + return IceTransportState::kConnected; + } + + void SetIceRole(IceRole role) override { + RTC_DCHECK_RUN_ON(network_thread_); + role_ = role; + } + IceRole GetIceRole() const override { + RTC_DCHECK_RUN_ON(network_thread_); + return role_; + } + void SetIceParameters(const IceParameters& ice_params) override { + RTC_DCHECK_RUN_ON(network_thread_); + ice_parameters_ = ice_params; + } + void SetRemoteIceParameters(const IceParameters& params) override { + RTC_DCHECK_RUN_ON(network_thread_); + remote_ice_parameters_ = params; + } + + void SetRemoteIceMode(IceMode mode) override { + RTC_DCHECK_RUN_ON(network_thread_); + remote_ice_mode_ = mode; + } + + void MaybeStartGathering() override { + RTC_DCHECK_RUN_ON(network_thread_); + if (gathering_state_ == webrtc::kIceGatheringNew) { + gathering_state_ = webrtc::kIceGatheringGathering; + SendGatheringStateEvent(); + } + } + + IceGatheringState gathering_state() const override { + RTC_DCHECK_RUN_ON(network_thread_); + return gathering_state_; + } + + void SetIceConfig(const IceConfig& config) override { + RTC_DCHECK_RUN_ON(network_thread_); + ice_config_ = config; + } + + const IceConfig& config() const override { return ice_config_; } + + void AddRemoteCandidate(const Candidate& candidate) override { + RTC_DCHECK_RUN_ON(network_thread_); + remote_candidates_.push_back(candidate); + } + void RemoveRemoteCandidate(const Candidate& candidate) override { + RTC_DCHECK_RUN_ON(network_thread_); + auto it = absl::c_find(remote_candidates_, candidate); + if (it == remote_candidates_.end()) { + RTC_LOG(LS_INFO) << "Trying to remove a candidate which doesn't exist."; + return; + } + + remote_candidates_.erase(it); + } + + void RemoveAllRemoteCandidates() override { + RTC_DCHECK_RUN_ON(network_thread_); + remote_candidates_.clear(); + } + + bool GetStats(IceTransportStats* ice_transport_stats) override { + CandidateStats candidate_stats; + ConnectionInfo candidate_pair_stats; + ice_transport_stats->candidate_stats_list.clear(); + ice_transport_stats->candidate_stats_list.push_back(candidate_stats); + ice_transport_stats->connection_infos.clear(); + ice_transport_stats->connection_infos.push_back(candidate_pair_stats); + return true; + } + + std::optional GetRttEstimate() override { return rtt_estimate_; } + + const Connection* selected_connection() const override { return nullptr; } + std::optional GetSelectedCandidatePair() const override { + return std::nullopt; + } + + // Fake PacketTransportInternal implementation. + bool writable() const override { + RTC_DCHECK_RUN_ON(network_thread_); + return writable_; + } + bool receiving() const override { + RTC_DCHECK_RUN_ON(network_thread_); + return receiving_; + } + // If combine is enabled, every two consecutive packets to be sent with + // "SendPacket" will be combined into one outgoing packet. + void combine_outgoing_packets(bool combine) { + RTC_DCHECK_RUN_ON(network_thread_); + combine_outgoing_packets_ = combine; + } + int SendPacket(const char* data, + size_t len, + const AsyncSocketPacketOptions& options, + int flags) override { + RTC_DCHECK_RUN_ON(network_thread_); + if (!dest_) { + return -1; + } + + send_packet_.AppendData(data, len); + if (!combine_outgoing_packets_ || send_packet_.size() > len) { + CopyOnWriteBuffer packet(std::move(send_packet_)); + if (!SendPacketInternal(packet, options, flags)) { + return -1; + } + } + + SentPacketInfo sent_packet(options.packet_id, webrtc::TimeMillis()); + SignalSentPacket(this, sent_packet); + return static_cast(len); + } + + int SetOption(Socket::Option opt, int value) override { + RTC_DCHECK_RUN_ON(network_thread_); + socket_options_[opt] = value; + return true; + } + bool GetOption(Socket::Option opt, int* value) override { + RTC_DCHECK_RUN_ON(network_thread_); + auto it = socket_options_.find(opt); + if (it != socket_options_.end()) { + *value = it->second; + return true; + } else { + return false; + } + } + + int GetError() override { return 0; } + + CopyOnWriteBuffer last_sent_packet() { + RTC_DCHECK_RUN_ON(network_thread_); + return last_sent_packet_; + } + + std::optional network_route() const override { + RTC_DCHECK_RUN_ON(network_thread_); + return network_route_; + } + void SetNetworkRoute(std::optional network_route) { + RTC_DCHECK_RUN_ON(network_thread_); + network_route_ = network_route; + SendTask(network_thread_, [this] { + RTC_DCHECK_RUN_ON(network_thread_); + SignalNetworkRouteChanged(network_route_); + }); + } + + // If `func` return TRUE means that packet will be dropped. + void set_packet_send_filter( + absl::AnyInvocable func) { + RTC_DCHECK_RUN_ON(network_thread_); + packet_send_filter_func_ = std::move(func); + } + + // If `func` return TRUE means that packet will be dropped. + void set_packet_recv_filter( + absl::AnyInvocable func) { + RTC_DCHECK_RUN_ON(network_thread_); + packet_recv_filter_func_ = std::move(func); + } + + void set_rtt_estimate(std::optional value, bool set_async = false) { + rtt_estimate_ = value; + if (value && set_async) { + SetAsync(true); + SetAsyncDelay(*value / 2); + } + } + + void ResetDtlsStunPiggybackCallbacks() override { + dtls_stun_piggyback_callbacks_.reset(); + } + void SetDtlsStunPiggybackCallbacks( + DtlsStunPiggybackCallbacks&& callbacks) override { + if (!callbacks.empty()) { + RTC_LOG(LS_INFO) << name_ << ": SetDtlsStunPiggybackCallbacks"; + } else if (!dtls_stun_piggyback_callbacks_.empty()) { + RTC_LOG(LS_INFO) << name_ << ": ResetDtlsStunPiggybackCallbacks"; + } + dtls_stun_piggyback_callbacks_ = std::move(callbacks); + } + + bool SendIcePing() { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DLOG(LS_INFO) << name_ << ": SendIcePing()"; + last_sent_ping_timestamp_ = webrtc::TimeMicros(); + auto msg = std::make_unique(STUN_BINDING_REQUEST); + MaybeAddDtlsPiggybackingAttributes(msg.get()); + msg->AddFingerprint(); + ByteBufferWriter buf; + msg->Write(&buf); + AsyncSocketPacketOptions options; + options.info_signaled_after_sent.packet_type = + PacketType::kIceConnectivityCheck; + SendPacketInternal(CopyOnWriteBuffer(buf.DataView()), options, 0); + return true; + } + + void MaybeAddDtlsPiggybackingAttributes(StunMessage* msg) { + if (dtls_stun_piggyback_callbacks_.empty()) { + return; + } + + const auto& [attr, ack] = dtls_stun_piggyback_callbacks_.send_data( + static_cast(msg->type())); + + RTC_DLOG(LS_INFO) << name_ << ": Adding attr: " << attr.has_value() + << " ack: " << ack.has_value() << " to stun message: " + << StunMethodToString(msg->type()); + + if (attr) { + msg->AddAttribute(std::make_unique( + STUN_ATTR_META_DTLS_IN_STUN, *attr)); + } + if (ack) { + msg->AddAttribute(std::make_unique( + STUN_ATTR_META_DTLS_IN_STUN_ACK, *ack)); + } + } + + bool SendIcePingConf() { + RTC_DCHECK_RUN_ON(network_thread_); + RTC_DLOG(LS_INFO) << name_ << ": SendIcePingConf()"; + auto msg = std::make_unique(STUN_BINDING_RESPONSE); + MaybeAddDtlsPiggybackingAttributes(msg.get()); + msg->AddFingerprint(); + ByteBufferWriter buf; + msg->Write(&buf); + AsyncSocketPacketOptions options; + options.info_signaled_after_sent.packet_type = + PacketType::kIceConnectivityCheckResponse; + SendPacketInternal(CopyOnWriteBuffer(buf.DataView()), options, 0); + return true; + } + + int GetCountOfReceivedStunMessages(int type) { + return received_stun_messages_per_type[type]; + } + + int GetCountOfReceivedPackets() { return received_packets_; } + + const FieldTrialsView* field_trials() const { return &field_trials_; } + + void set_drop_non_stun_unless_writable(bool value) { + drop_non_stun_unless_writable_ = value; + } + + private: + void set_writable(bool writable) + RTC_EXCLUSIVE_LOCKS_REQUIRED(network_thread_) { + if (writable_ == writable) { + return; + } + RTC_LOG(LS_INFO) << "Change writable_ to " << writable; + writable_ = writable; + if (writable_) { + SignalReadyToSend(this); + } + SignalWritableState(this); + } + + void set_receiving(bool receiving) + RTC_EXCLUSIVE_LOCKS_REQUIRED(network_thread_) { + if (receiving_ == receiving) { + return; + } + receiving_ = receiving; + SignalReceivingState(this); + } + + bool SendPacketInternal(const CopyOnWriteBuffer& packet, + const AsyncSocketPacketOptions& options, + int flags) + RTC_EXCLUSIVE_LOCKS_REQUIRED(network_thread_) { + last_sent_packet_ = packet; + bool is_stun = + StunMessage::ValidateFingerprint(packet.data(), packet.size()); + if (packet_send_filter_func_ && + packet_send_filter_func_(packet.data(), packet.size(), options, + flags)) { + RTC_LOG(LS_INFO) << name_ << ": dropping packet len=" << packet.size() + << ", data[0]: " + << static_cast(packet.data()[0]); + return false; + } + + if (drop_non_stun_unless_writable_ && !writable_ && !is_stun) { + RTC_LOG(LS_INFO) << name_ + << ": dropping non stun packet len=" << packet.size() + << ", data[0]: " + << static_cast(packet.data()[0]); + return false; + } + if (async_) { + network_thread_->PostDelayedTask( + SafeTask(task_safety_.flag(), + [this, packet] { + RTC_DCHECK_RUN_ON(network_thread_); + if (dest_) { + dest_->ReceivePacketInternal(packet); + } + }), + TimeDelta::Millis(async_delay_ms_)); + } else { + if (dest_) { + dest_->ReceivePacketInternal(packet); + } + } + return true; + } + + void ReceivePacketInternal(const CopyOnWriteBuffer& packet) { + RTC_DCHECK_RUN_ON(network_thread_); + auto now = webrtc::TimeMicros(); + if (auto msg = GetStunMessage(packet)) { + RTC_LOG(LS_INFO) << name_ << ": RECV STUN message: " + << ", data[0]: " + << static_cast(packet.data()[0]); + + const auto* dtls_piggyback_attr = + msg->GetByteString(STUN_ATTR_META_DTLS_IN_STUN); + const auto* dtls_piggyback_ack = + msg->GetByteString(STUN_ATTR_META_DTLS_IN_STUN_ACK); + RTC_DLOG(LS_INFO) << name_ << ": Got STUN message: " + << StunMethodToString(msg->type()) + << " attr: " << (dtls_piggyback_attr != nullptr) + << " ack: " << (dtls_piggyback_ack != nullptr); + if (!dtls_stun_piggyback_callbacks_.empty()) { + dtls_stun_piggyback_callbacks_.recv_data(dtls_piggyback_attr, + dtls_piggyback_ack); + } + + if (msg->type() == STUN_BINDING_RESPONSE) { + if (!rtt_estimate_ && last_sent_ping_timestamp_) { + rtt_estimate_ = (now - *last_sent_ping_timestamp_) / 1000; + } + set_writable(true); + } + + received_stun_messages_per_type[msg->type()]++; + return; + } + + if (packet_recv_filter_func_ && packet_recv_filter_func_(packet, now)) { + RTC_DLOG(LS_INFO) << name_ + << ": dropping packet at receiver len=" << packet.size() + << ", data[0]: " + << static_cast(packet.data()[0]); + } else { + received_packets_++; + NotifyPacketReceived(ReceivedIpPacket::CreateFromLegacy( + packet.data(), packet.size(), now)); + } + } + + std::unique_ptr GetStunMessage(const CopyOnWriteBuffer& packet) { + if (!StunMessage::ValidateFingerprint(packet.data(), packet.size())) { + return nullptr; + } + + std::unique_ptr stun_msg(new IceMessage()); + ByteBufferReader buf(MakeArrayView(packet.data(), packet.size())); + RTC_CHECK(stun_msg->Read(&buf)); + return stun_msg; + } + + const std::string name_; + const int component_; + FakeIceTransport* dest_ RTC_GUARDED_BY(network_thread_) = nullptr; + bool async_ RTC_GUARDED_BY(network_thread_) = false; + int async_delay_ms_ RTC_GUARDED_BY(network_thread_) = 0; + Candidates remote_candidates_ RTC_GUARDED_BY(network_thread_); + IceConfig ice_config_ RTC_GUARDED_BY(network_thread_); + IceRole role_ RTC_GUARDED_BY(network_thread_) = ICEROLE_UNKNOWN; + IceParameters ice_parameters_ RTC_GUARDED_BY(network_thread_); + IceParameters remote_ice_parameters_ RTC_GUARDED_BY(network_thread_); + IceMode remote_ice_mode_ RTC_GUARDED_BY(network_thread_) = ICEMODE_FULL; + size_t connection_count_ RTC_GUARDED_BY(network_thread_) = 0; + std::optional transport_state_ + RTC_GUARDED_BY(network_thread_); + std::optional legacy_transport_state_ + RTC_GUARDED_BY(network_thread_); + IceGatheringState gathering_state_ RTC_GUARDED_BY(network_thread_) = + webrtc::kIceGatheringNew; + bool had_connection_ RTC_GUARDED_BY(network_thread_) = false; + bool writable_ RTC_GUARDED_BY(network_thread_) = false; + bool receiving_ RTC_GUARDED_BY(network_thread_) = false; + bool combine_outgoing_packets_ RTC_GUARDED_BY(network_thread_) = false; + CopyOnWriteBuffer send_packet_ RTC_GUARDED_BY(network_thread_); + std::optional network_route_ RTC_GUARDED_BY(network_thread_); + std::map socket_options_ RTC_GUARDED_BY(network_thread_); + CopyOnWriteBuffer last_sent_packet_ RTC_GUARDED_BY(network_thread_); + Thread* const network_thread_; + ScopedTaskSafetyDetached task_safety_; + std::optional rtt_estimate_; + std::optional last_sent_ping_timestamp_; + + // If filter func return TRUE means that packet will be dropped. + absl::AnyInvocable< + // NOLINTNEXTLINE(readability/casting) - not a cast; false positive! + bool(const char*, size_t, const AsyncSocketPacketOptions&, int)> + packet_send_filter_func_ RTC_GUARDED_BY(network_thread_) = nullptr; + absl::AnyInvocable + packet_recv_filter_func_ RTC_GUARDED_BY(network_thread_) = nullptr; + DtlsStunPiggybackCallbacks dtls_stun_piggyback_callbacks_; + std::map received_stun_messages_per_type; + int received_packets_ = 0; + test::ExplicitKeyValueConfig field_trials_; + bool drop_non_stun_unless_writable_ = false; +}; + +class FakeIceTransportWrapper : public IceTransportInterface { + public: + explicit FakeIceTransportWrapper(std::unique_ptr internal) + : internal_(std::move(internal)) {} + + IceTransportInternal* internal() override { return internal_.get(); } + + private: + std::unique_ptr internal_; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::FakeIceTransport; +using ::webrtc::FakeIceTransportWrapper; +using ::webrtc::SafeTask; +using ::webrtc::TimeDelta; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // P2P_TEST_FAKE_ICE_TRANSPORT_H_ diff --git a/p2p/base/fake_packet_transport.h b/p2p/test/fake_packet_transport.h similarity index 68% rename from p2p/base/fake_packet_transport.h rename to p2p/test/fake_packet_transport.h index e80af0e008..d563ba5339 100644 --- a/p2p/base/fake_packet_transport.h +++ b/p2p/test/fake_packet_transport.h @@ -8,16 +8,25 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef P2P_BASE_FAKE_PACKET_TRANSPORT_H_ -#define P2P_BASE_FAKE_PACKET_TRANSPORT_H_ +#ifndef P2P_TEST_FAKE_PACKET_TRANSPORT_H_ +#define P2P_TEST_FAKE_PACKET_TRANSPORT_H_ +#include #include +#include #include +#include "api/transport/ecn_marking.h" +#include "api/units/timestamp.h" #include "p2p/base/packet_transport_internal.h" #include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network_route.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/time_utils.h" -namespace rtc { +namespace webrtc { // Used to simulate a packet-based transport. class FakePacketTransport : public PacketTransportInternal { @@ -58,15 +67,15 @@ class FakePacketTransport : public PacketTransportInternal { bool receiving() const override { return receiving_; } int SendPacket(const char* data, size_t len, - const PacketOptions& options, - int flags) override { - if (!dest_) { + const AsyncSocketPacketOptions& options, + int /* flags */) override { + if (!dest_ || error_ != 0) { return -1; } CopyOnWriteBuffer packet(data, len); - SendPacketInternal(packet); + SendPacketInternal(packet, options); - SentPacket sent_packet(options.packet_id, TimeMillis()); + SentPacketInfo sent_packet(options.packet_id, TimeMillis()); SignalSentPacket(this, sent_packet); return static_cast(len); } @@ -90,14 +99,17 @@ class FakePacketTransport : public PacketTransportInternal { const CopyOnWriteBuffer* last_sent_packet() { return &last_sent_packet_; } - absl::optional network_route() const override { + std::optional network_route() const override { return network_route_; } - void SetNetworkRoute(absl::optional network_route) { + void SetNetworkRoute(std::optional network_route) { network_route_ = network_route; SignalNetworkRouteChanged(network_route); } + using PacketTransportInternal::NotifyOnClose; + using PacketTransportInternal::NotifyPacketReceived; + private: void set_writable(bool writable) { if (writable_ == writable) { @@ -118,11 +130,13 @@ class FakePacketTransport : public PacketTransportInternal { SignalReceivingState(this); } - void SendPacketInternal(const CopyOnWriteBuffer& packet) { + void SendPacketInternal(const CopyOnWriteBuffer& packet, + const AsyncSocketPacketOptions& options) { last_sent_packet_ = packet; if (dest_) { - dest_->SignalReadPacket(dest_, packet.data(), packet.size(), - TimeMicros(), 0); + dest_->NotifyPacketReceived(ReceivedIpPacket( + packet, SocketAddress(), Timestamp::Micros(TimeMicros()), + options.ecn_1 ? EcnMarking::kEct1 : EcnMarking::kNotEct)); } } @@ -135,9 +149,17 @@ class FakePacketTransport : public PacketTransportInternal { std::map options_; int error_ = 0; - absl::optional network_route_; + std::optional network_route_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::FakePacketTransport; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES -#endif // P2P_BASE_FAKE_PACKET_TRANSPORT_H_ +#endif // P2P_TEST_FAKE_PACKET_TRANSPORT_H_ diff --git a/p2p/test/fake_port_allocator.h b/p2p/test/fake_port_allocator.h new file mode 100644 index 0000000000..bce083448b --- /dev/null +++ b/p2p/test/fake_port_allocator.h @@ -0,0 +1,280 @@ +/* + * Copyright 2010 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_TEST_FAKE_PORT_ALLOCATOR_H_ +#define P2P_TEST_FAKE_PORT_ALLOCATOR_H_ + +#include +#include +#include + +#include "absl/base/nullability.h" +#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "api/candidate.h" +#include "api/environment/environment.h" +#include "api/packet_socket_factory.h" +#include "api/task_queue/task_queue_base.h" +#include "p2p/base/basic_packet_socket_factory.h" +#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/port_interface.h" +#include "p2p/base/stun_port.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/checks.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/net_test_helpers.h" +#include "rtc_base/network.h" +#include "rtc_base/socket_factory.h" +#include "rtc_base/task_queue_for_test.h" + +namespace webrtc { + +class TestUDPPort : public UDPPort { + public: + static TestUDPPort* Create(const PortParametersRef& args, + uint16_t min_port, + uint16_t max_port, + bool emit_localhost_for_anyaddress) { + TestUDPPort* port = new TestUDPPort(args, min_port, max_port, + emit_localhost_for_anyaddress); + if (!port->Init()) { + delete port; + port = nullptr; + } + return port; + } + + static std::unique_ptr Create( + const PortParametersRef& args, + AsyncPacketSocket* socket, + bool emit_localhost_for_anyaddress) { + auto port = absl::WrapUnique( + new TestUDPPort(args, socket, emit_localhost_for_anyaddress)); + if (!port->Init()) { + return nullptr; + } + return port; + } + + protected: + TestUDPPort(const PortParametersRef& args, + uint16_t min_port, + uint16_t max_port, + bool emit_localhost_for_anyaddress) + : UDPPort(args, + IceCandidateType::kHost, + min_port, + max_port, + emit_localhost_for_anyaddress) {} + + TestUDPPort(const PortParametersRef& args, + AsyncPacketSocket* socket, + bool emit_localhost_for_anyaddress) + : UDPPort(args, + IceCandidateType::kHost, + socket, + emit_localhost_for_anyaddress) {} +}; + +// A FakePortAllocatorSession can be used with either a real or fake socket +// factory. It gathers a single loopback port, using IPv6 if available and +// not disabled. +class FakePortAllocatorSession : public PortAllocatorSession { + public: + FakePortAllocatorSession(const Environment& env, + PortAllocator* allocator, + TaskQueueBase* network_thread, + PacketSocketFactory* factory, + absl::string_view content_name, + int component, + absl::string_view ice_ufrag, + absl::string_view ice_pwd) + : PortAllocatorSession(content_name, + component, + ice_ufrag, + ice_pwd, + allocator->flags()), + env_(env), + allocator_(allocator), + network_thread_(network_thread), + factory_(factory), + ipv4_network_("network", "unittest", IPAddress(INADDR_LOOPBACK), 32), + ipv6_network_("network", "unittest", IPAddress(in6addr_loopback), 64), + port_(), + port_config_count_(0), + stun_servers_(allocator->stun_servers()), + turn_servers_(allocator->turn_servers()) { + ipv4_network_.AddIP(IPAddress(INADDR_LOOPBACK)); + ipv6_network_.AddIP(IPAddress(in6addr_loopback)); + } + + void SetCandidateFilter(uint32_t filter) override { + candidate_filter_ = filter; + } + + void StartGettingPorts() override { + if (!port_) { + Network& network = (webrtc::HasIPv6Enabled() && + (flags() & webrtc::PORTALLOCATOR_ENABLE_IPV6)) + ? ipv6_network_ + : ipv4_network_; + port_.reset(TestUDPPort::Create({.env = env_, + .network_thread = network_thread_, + .socket_factory = factory_, + .network = &network, + .ice_username_fragment = username(), + .ice_password = password()}, + 0, 0, false)); + RTC_DCHECK(port_); + port_->SetIceTiebreaker(allocator_->ice_tiebreaker()); + port_->SubscribePortDestroyed( + [this](PortInterface* port) { OnPortDestroyed(port); }); + AddPort(port_.get()); + } + ++port_config_count_; + running_ = true; + } + + void StopGettingPorts() override { running_ = false; } + bool IsGettingPorts() override { return running_; } + void ClearGettingPorts() override { is_cleared = true; } + bool IsCleared() const override { return is_cleared; } + + void RegatherOnFailedNetworks() override { + SignalIceRegathering(this, IceRegatheringReason::NETWORK_FAILURE); + } + + std::vector ReadyPorts() const override { + return ready_ports_; + } + std::vector ReadyCandidates() const override { + return candidates_; + } + void PruneAllPorts() override { port_->Prune(); } + bool CandidatesAllocationDone() const override { return allocation_done_; } + + int port_config_count() { return port_config_count_; } + + const ServerAddresses& stun_servers() const { return stun_servers_; } + + const std::vector& turn_servers() const { + return turn_servers_; + } + + uint32_t candidate_filter() const { return candidate_filter_; } + + int transport_info_update_count() const { + return transport_info_update_count_; + } + + protected: + void UpdateIceParametersInternal() override { + // Since this class is a fake and this method only is overridden for tests, + // we don't need to actually update the transport info. + ++transport_info_update_count_; + } + + private: + void AddPort(Port* port) { + port->set_component(component()); + port->set_generation(generation()); + port->SignalPortComplete.connect(this, + &FakePortAllocatorSession::OnPortComplete); + port->PrepareAddress(); + ready_ports_.push_back(port); + SignalPortReady(this, port); + port->KeepAliveUntilPruned(); + } + void OnPortComplete(Port* port) { + const std::vector& candidates = port->Candidates(); + candidates_.insert(candidates_.end(), candidates.begin(), candidates.end()); + SignalCandidatesReady(this, candidates); + + allocation_done_ = true; + SignalCandidatesAllocationDone(this); + } + void OnPortDestroyed(PortInterface* /* port */) { + // Don't want to double-delete port if it deletes itself. + port_.release(); + } + + const Environment env_; + PortAllocator* allocator_; + TaskQueueBase* network_thread_; + PacketSocketFactory* factory_; + Network ipv4_network_; + Network ipv6_network_; + std::unique_ptr port_; + int port_config_count_; + std::vector candidates_; + std::vector ready_ports_; + bool allocation_done_ = false; + bool is_cleared = false; + ServerAddresses stun_servers_; + std::vector turn_servers_; + uint32_t candidate_filter_ = webrtc::CF_ALL; + int transport_info_update_count_ = 0; + bool running_ = false; +}; + +class FakePortAllocator : public PortAllocator { + public: + FakePortAllocator( + const Environment& env, + SocketFactory* absl_nonnull socket_factory, + TaskQueueBase* absl_nonnull network_thread = TaskQueueBase::Current()) + : env_(env), network_thread_(network_thread), factory_(socket_factory) { + RTC_CHECK(network_thread); + SendTask(network_thread_, [this] { Initialize(); }); + } + + void SetNetworkIgnoreMask(int /* network_ignore_mask */) override {} + + PortAllocatorSession* CreateSessionInternal( + absl::string_view content_name, + int component, + absl::string_view ice_ufrag, + absl::string_view ice_pwd) override { + return new FakePortAllocatorSession(env_, this, network_thread_, &factory_, + content_name, component, ice_ufrag, + ice_pwd); + } + + bool initialized() const { return initialized_; } + + // For testing: Manipulate MdnsObfuscationEnabled() + bool MdnsObfuscationEnabled() const override { + return mdns_obfuscation_enabled_; + } + void SetMdnsObfuscationEnabledForTesting(bool enabled) { + mdns_obfuscation_enabled_ = enabled; + } + + private: + const Environment env_; + TaskQueueBase* absl_nonnull network_thread_; + BasicPacketSocketFactory factory_; + bool mdns_obfuscation_enabled_ = false; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::FakePortAllocator; +using ::webrtc::FakePortAllocatorSession; +using ::webrtc::TestUDPPort; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // P2P_TEST_FAKE_PORT_ALLOCATOR_H_ diff --git a/p2p/base/mock_active_ice_controller.h b/p2p/test/mock_active_ice_controller.h similarity index 50% rename from p2p/base/mock_active_ice_controller.h rename to p2p/test/mock_active_ice_controller.h index 908967bd1d..df18a444a8 100644 --- a/p2p/base/mock_active_ice_controller.h +++ b/p2p/test/mock_active_ice_controller.h @@ -8,75 +8,73 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef P2P_BASE_MOCK_ACTIVE_ICE_CONTROLLER_H_ -#define P2P_BASE_MOCK_ACTIVE_ICE_CONTROLLER_H_ +#ifndef P2P_TEST_MOCK_ACTIVE_ICE_CONTROLLER_H_ +#define P2P_TEST_MOCK_ACTIVE_ICE_CONTROLLER_H_ #include #include "p2p/base/active_ice_controller_factory_interface.h" #include "p2p/base/active_ice_controller_interface.h" +#include "p2p/base/connection.h" +#include "p2p/base/ice_switch_reason.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/transport_description.h" #include "test/gmock.h" -namespace cricket { +namespace webrtc { -class MockActiveIceController : public cricket::ActiveIceControllerInterface { +class MockActiveIceController : public ActiveIceControllerInterface { public: explicit MockActiveIceController( - const cricket::ActiveIceControllerFactoryArgs& args) {} + const ActiveIceControllerFactoryArgs& /* args */) {} ~MockActiveIceController() override = default; - MOCK_METHOD(void, SetIceConfig, (const cricket::IceConfig&), (override)); - MOCK_METHOD(void, - OnConnectionAdded, - (const cricket::Connection*), - (override)); + MOCK_METHOD(void, SetIceConfig, (const webrtc::IceConfig&), (override)); + MOCK_METHOD(void, OnConnectionAdded, (const webrtc::Connection*), (override)); MOCK_METHOD(void, OnConnectionSwitched, - (const cricket::Connection*), + (const webrtc::Connection*), (override)); MOCK_METHOD(void, OnConnectionDestroyed, - (const cricket::Connection*), + (const webrtc::Connection*), (override)); MOCK_METHOD(void, OnConnectionPinged, - (const cricket::Connection*), + (const webrtc::Connection*), (override)); MOCK_METHOD(void, OnConnectionUpdated, - (const cricket::Connection*), + (const webrtc::Connection*), (override)); MOCK_METHOD(bool, GetUseCandidateAttribute, - (const cricket::Connection*, - cricket::NominationMode, - cricket::IceMode), + (const webrtc::Connection*, + webrtc::NominationMode, + webrtc::IceMode), (const, override)); MOCK_METHOD(void, OnSortAndSwitchRequest, - (cricket::IceSwitchReason), + (webrtc::IceSwitchReason), (override)); MOCK_METHOD(void, OnImmediateSortAndSwitchRequest, - (cricket::IceSwitchReason), + (webrtc::IceSwitchReason), (override)); MOCK_METHOD(bool, OnImmediateSwitchRequest, - (cricket::IceSwitchReason, const cricket::Connection*), - (override)); - MOCK_METHOD(const cricket::Connection*, - FindNextPingableConnection, - (), + (webrtc::IceSwitchReason, const webrtc::Connection*), (override)); + MOCK_METHOD(const Connection*, FindNextPingableConnection, (), (override)); }; class MockActiveIceControllerFactory - : public cricket::ActiveIceControllerFactoryInterface { + : public ActiveIceControllerFactoryInterface { public: ~MockActiveIceControllerFactory() override = default; - std::unique_ptr Create( - const cricket::ActiveIceControllerFactoryArgs& args) { + std::unique_ptr Create( + const ActiveIceControllerFactoryArgs& args) { RecordActiveIceControllerCreated(); return std::make_unique(args); } @@ -84,6 +82,15 @@ class MockActiveIceControllerFactory MOCK_METHOD(void, RecordActiveIceControllerCreated, ()); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::MockActiveIceController; +using ::webrtc::MockActiveIceControllerFactory; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES -#endif // P2P_BASE_MOCK_ACTIVE_ICE_CONTROLLER_H_ +#endif // P2P_TEST_MOCK_ACTIVE_ICE_CONTROLLER_H_ diff --git a/p2p/base/mock_dns_resolving_packet_socket_factory.h b/p2p/test/mock_dns_resolving_packet_socket_factory.h similarity index 56% rename from p2p/base/mock_dns_resolving_packet_socket_factory.h rename to p2p/test/mock_dns_resolving_packet_socket_factory.h index 8f18e9b0e1..f2e2ed7fd7 100644 --- a/p2p/base/mock_dns_resolving_packet_socket_factory.h +++ b/p2p/test/mock_dns_resolving_packet_socket_factory.h @@ -8,31 +8,32 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef P2P_BASE_MOCK_DNS_RESOLVING_PACKET_SOCKET_FACTORY_H_ -#define P2P_BASE_MOCK_DNS_RESOLVING_PACKET_SOCKET_FACTORY_H_ +#ifndef P2P_TEST_MOCK_DNS_RESOLVING_PACKET_SOCKET_FACTORY_H_ +#define P2P_TEST_MOCK_DNS_RESOLVING_PACKET_SOCKET_FACTORY_H_ #include #include +#include "api/async_dns_resolver.h" #include "api/test/mock_async_dns_resolver.h" #include "p2p/base/basic_packet_socket_factory.h" +#include "rtc_base/socket_factory.h" -namespace rtc { +namespace webrtc { // A PacketSocketFactory implementation for tests that uses a mock DnsResolver // and allows setting expectations on the resolver and results. class MockDnsResolvingPacketSocketFactory : public BasicPacketSocketFactory { public: - using Expectations = std::function; + using Expectations = + std::function; explicit MockDnsResolvingPacketSocketFactory(SocketFactory* socket_factory) : BasicPacketSocketFactory(socket_factory) {} - std::unique_ptr CreateAsyncDnsResolver() - override { - std::unique_ptr resolver = - std::make_unique(); + std::unique_ptr CreateAsyncDnsResolver() override { + std::unique_ptr resolver = + std::make_unique(); if (expectations_) { expectations_(resolver.get(), &resolver_result_); } @@ -44,10 +45,18 @@ class MockDnsResolvingPacketSocketFactory : public BasicPacketSocketFactory { } private: - webrtc::MockAsyncDnsResolverResult resolver_result_; + MockAsyncDnsResolverResult resolver_result_; Expectations expectations_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::MockDnsResolvingPacketSocketFactory; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES -#endif // P2P_BASE_MOCK_DNS_RESOLVING_PACKET_SOCKET_FACTORY_H_ +#endif // P2P_TEST_MOCK_DNS_RESOLVING_PACKET_SOCKET_FACTORY_H_ diff --git a/p2p/test/mock_ice_agent.h b/p2p/test/mock_ice_agent.h new file mode 100644 index 0000000000..113ddf4105 --- /dev/null +++ b/p2p/test/mock_ice_agent.h @@ -0,0 +1,59 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_TEST_MOCK_ICE_AGENT_H_ +#define P2P_TEST_MOCK_ICE_AGENT_H_ + +#include + +#include "api/array_view.h" +#include "p2p/base/connection.h" +#include "p2p/base/ice_agent_interface.h" +#include "p2p/base/ice_switch_reason.h" +#include "p2p/base/transport_description.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockIceAgent : public IceAgentInterface { + public: + ~MockIceAgent() override = default; + + MOCK_METHOD(int64_t, GetLastPingSentMs, (), (override, const)); + MOCK_METHOD(IceRole, GetIceRole, (), (override, const)); + MOCK_METHOD(void, OnStartedPinging, (), (override)); + MOCK_METHOD(void, UpdateConnectionStates, (), (override)); + MOCK_METHOD(void, UpdateState, (), (override)); + MOCK_METHOD(void, + ForgetLearnedStateForConnections, + (webrtc::ArrayView), + (override)); + MOCK_METHOD(void, SendPingRequest, (const webrtc::Connection*), (override)); + MOCK_METHOD(void, + SwitchSelectedConnection, + (const webrtc::Connection*, webrtc::IceSwitchReason), + (override)); + MOCK_METHOD(bool, + PruneConnections, + (webrtc::ArrayView), + (override)); +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::MockIceAgent; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // P2P_TEST_MOCK_ICE_AGENT_H_ diff --git a/p2p/test/mock_ice_controller.h b/p2p/test/mock_ice_controller.h new file mode 100644 index 0000000000..5b92dde753 --- /dev/null +++ b/p2p/test/mock_ice_controller.h @@ -0,0 +1,100 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_TEST_MOCK_ICE_CONTROLLER_H_ +#define P2P_TEST_MOCK_ICE_CONTROLLER_H_ + +#include +#include +#include + +#include "api/array_view.h" +#include "p2p/base/connection.h" +#include "p2p/base/ice_controller_factory_interface.h" +#include "p2p/base/ice_controller_interface.h" +#include "p2p/base/ice_switch_reason.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/transport_description.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockIceController : public IceControllerInterface { + public: + explicit MockIceController(const IceControllerFactoryArgs& /* args */) {} + ~MockIceController() override = default; + + MOCK_METHOD(void, SetIceConfig, (const webrtc::IceConfig&), (override)); + MOCK_METHOD(void, + SetSelectedConnection, + (const webrtc::Connection*), + (override)); + MOCK_METHOD(void, AddConnection, (const webrtc::Connection*), (override)); + MOCK_METHOD(void, + OnConnectionDestroyed, + (const webrtc::Connection*), + (override)); + MOCK_METHOD(ArrayView, + GetConnections, + (), + (const, override)); + MOCK_METHOD(ArrayView, connections, (), (const, override)); + MOCK_METHOD(bool, HasPingableConnection, (), (const, override)); + MOCK_METHOD(IceControllerInterface::PingResult, + SelectConnectionToPing, + (int64_t), + (override)); + MOCK_METHOD(bool, + GetUseCandidateAttr, + (const webrtc::Connection*, + webrtc::NominationMode, + webrtc::IceMode), + (const, override)); + MOCK_METHOD(const Connection*, FindNextPingableConnection, (), (override)); + MOCK_METHOD(void, + MarkConnectionPinged, + (const webrtc::Connection*), + (override)); + MOCK_METHOD(IceControllerInterface::SwitchResult, + ShouldSwitchConnection, + (webrtc::IceSwitchReason, const webrtc::Connection*), + (override)); + MOCK_METHOD(IceControllerInterface::SwitchResult, + SortAndSwitchConnection, + (webrtc::IceSwitchReason), + (override)); + MOCK_METHOD(std::vector, PruneConnections, (), (override)); +}; + +class MockIceControllerFactory : public IceControllerFactoryInterface { + public: + ~MockIceControllerFactory() override = default; + + std::unique_ptr Create( + const IceControllerFactoryArgs& args) override { + RecordIceControllerCreated(); + return std::make_unique(args); + } + + MOCK_METHOD(void, RecordIceControllerCreated, ()); +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::MockIceController; +using ::webrtc::MockIceControllerFactory; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // P2P_TEST_MOCK_ICE_CONTROLLER_H_ diff --git a/p2p/test/mock_ice_transport.h b/p2p/test/mock_ice_transport.h new file mode 100644 index 0000000000..d886c5869e --- /dev/null +++ b/p2p/test/mock_ice_transport.h @@ -0,0 +1,93 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_TEST_MOCK_ICE_TRANSPORT_H_ +#define P2P_TEST_MOCK_ICE_TRANSPORT_H_ + +#include +#include +#include + +#include "api/candidate.h" +#include "api/transport/enums.h" +#include "p2p/base/candidate_pair_interface.h" +#include "p2p/base/connection.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/transport_description.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/socket.h" +#include "test/gmock.h" + +namespace webrtc { + +// Used in Chromium/remoting/protocol/channel_socket_adapter_unittest.cc +class MockIceTransport : public IceTransportInternal { + public: + MockIceTransport() { + SignalReadyToSend(this); + SignalWritableState(this); + } + + MOCK_METHOD(int, + SendPacket, + (const char* data, + size_t len, + const AsyncSocketPacketOptions& options, + int flags), + (override)); + MOCK_METHOD(int, SetOption, (Socket::Option opt, int value), (override)); + MOCK_METHOD(int, GetError, (), (override)); + MOCK_METHOD(IceRole, GetIceRole, (), (const, override)); + MOCK_METHOD(bool, + GetStats, + (IceTransportStats * ice_transport_stats), + (override)); + MOCK_METHOD(IceTransportStateInternal, GetState, (), (const override)); + MOCK_METHOD(IceTransportState, GetIceTransportState, (), (const override)); + + MOCK_METHOD(const std::string&, transport_name, (), (const override)); + MOCK_METHOD(int, component, (), (const override)); + MOCK_METHOD(void, SetIceRole, (IceRole), (override)); + // The ufrag and pwd in `ice_params` must be set + // before candidate gathering can start. + MOCK_METHOD(void, SetIceParameters, (const IceParameters&), (override)); + MOCK_METHOD(void, SetRemoteIceParameters, (const IceParameters&), (override)); + MOCK_METHOD(IceParameters*, local_ice_parameters, (), (const, override)); + MOCK_METHOD(IceParameters*, remote_ice_parameters, (), (const, override)); + MOCK_METHOD(void, SetRemoteIceMode, (IceMode), (override)); + MOCK_METHOD(void, SetIceConfig, (const IceConfig& config), (override)); + MOCK_METHOD(const IceConfig&, config, (), (const override)); + MOCK_METHOD(std::optional, GetRttEstimate, (), (override)); + MOCK_METHOD(const Connection*, selected_connection, (), (const, override)); + MOCK_METHOD(std::optional, + GetSelectedCandidatePair, + (), + (const, override)); + MOCK_METHOD(void, MaybeStartGathering, (), (override)); + MOCK_METHOD(void, AddRemoteCandidate, (const Candidate&), (override)); + MOCK_METHOD(void, RemoveRemoteCandidate, (const Candidate&), (override)); + MOCK_METHOD(void, RemoveAllRemoteCandidates, (), (override)); + MOCK_METHOD(IceGatheringState, gathering_state, (), (const override)); + + MOCK_METHOD(bool, receiving, (), (const override)); + MOCK_METHOD(bool, writable, (), (const override)); +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::MockIceTransport; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // P2P_TEST_MOCK_ICE_TRANSPORT_H_ diff --git a/rtc_base/nat_server.cc b/p2p/test/nat_server.cc similarity index 61% rename from rtc_base/nat_server.cc rename to p2p/test/nat_server.cc index b818685efb..655357240a 100644 --- a/rtc_base/nat_server.cc +++ b/p2p/test/nat_server.cc @@ -8,16 +8,34 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "rtc_base/nat_server.h" +#include "p2p/test/nat_server.h" +#include +#include +#include #include +#include "api/array_view.h" +#include "p2p/test/nat_socket_factory.h" +#include "p2p/test/nat_types.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/async_udp_socket.h" #include "rtc_base/checks.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" -#include "rtc_base/nat_socket_factory.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/proxy_server.h" +#include "rtc_base/server_socket_adapters.h" +#include "rtc_base/socket.h" #include "rtc_base/socket_adapters.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/socket_address_pair.h" +#include "rtc_base/socket_factory.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread.h" -namespace rtc { +namespace webrtc { RouteCmp::RouteCmp(NAT* nat) : symmetric(nat->IsSymmetric()) {} @@ -47,7 +65,7 @@ AddrCmp::AddrCmp(NAT* nat) size_t AddrCmp::operator()(const SocketAddress& a) const { size_t h = 0; if (use_ip) - h ^= HashIP(a.ipaddr()); + h ^= webrtc::HashIP(a.ipaddr()); if (use_port) h ^= a.port() | (a.port() << 16); return h; @@ -70,8 +88,8 @@ bool AddrCmp::operator()(const SocketAddress& a1, // a TCP connection to the NAT server. class NATProxyServerSocket : public AsyncProxyServerSocket { public: - NATProxyServerSocket(Socket* socket) - : AsyncProxyServerSocket(socket, kNATEncodedIPv6AddressSize) { + explicit NATProxyServerSocket(Socket* socket) + : AsyncProxyServerSocket(socket, webrtc::kNATEncodedIPv6AddressSize) { BufferInput(true); } @@ -88,14 +106,15 @@ class NATProxyServerSocket : public AsyncProxyServerSocket { int family = data[1]; RTC_DCHECK(family == AF_INET || family == AF_INET6); - if ((family == AF_INET && *len < kNATEncodedIPv4AddressSize) || - (family == AF_INET6 && *len < kNATEncodedIPv6AddressSize)) { + if ((family == AF_INET && *len < webrtc::kNATEncodedIPv4AddressSize) || + (family == AF_INET6 && *len < webrtc::kNATEncodedIPv6AddressSize)) { return; } SocketAddress dest_addr; - size_t address_length = UnpackAddressFromNAT(data, *len, &dest_addr); - + size_t address_length = webrtc::UnpackAddressFromNAT( + MakeArrayView(reinterpret_cast(data), *len), + &dest_addr); *len -= address_length; if (*len > 0) { memmove(data, data + address_length, *len); @@ -125,17 +144,27 @@ class NATProxyServer : public ProxyServer { }; NATServer::NATServer(NATType type, + Thread& internal_socket_thread, SocketFactory* internal, const SocketAddress& internal_udp_addr, const SocketAddress& internal_tcp_addr, + Thread& external_socket_thread, SocketFactory* external, const SocketAddress& external_ip) - : external_(external), external_ip_(external_ip.ipaddr(), 0) { + : internal_socket_thread_(internal_socket_thread), + external_socket_thread_(external_socket_thread), + external_(external), + external_ip_(external_ip.ipaddr(), 0) { nat_ = NAT::Create(type); - udp_server_socket_ = AsyncUDPSocket::Create(internal, internal_udp_addr); - udp_server_socket_->SignalReadPacket.connect(this, - &NATServer::OnInternalUDPPacket); + internal_socket_thread_.BlockingCall([&] { + udp_server_socket_ = AsyncUDPSocket::Create(internal, internal_udp_addr); + udp_server_socket_->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + OnInternalUDPPacket(socket, packet); + }); + }); + tcp_proxy_server_ = new NATProxyServer(internal, internal_tcp_addr, external, external_ip); @@ -156,16 +185,14 @@ NATServer::~NATServer() { } void NATServer::OnInternalUDPPacket(AsyncPacketSocket* socket, - const char* buf, - size_t size, - const SocketAddress& addr, - const int64_t& /* packet_time_us */) { + const ReceivedIpPacket& packet) { + RTC_DCHECK(internal_socket_thread_.IsCurrent()); // Read the intended destination from the wire. SocketAddress dest_addr; - size_t length = UnpackAddressFromNAT(buf, size, &dest_addr); + size_t length = webrtc::UnpackAddressFromNAT(packet.payload(), &dest_addr); // Find the translation for these addresses (allocating one if necessary). - SocketAddressPair route(addr, dest_addr); + SocketAddressPair route(packet.source_address(), dest_addr); InternalMap::iterator iter = int_map_->find(route); if (iter == int_map_->end()) { Translate(route); @@ -177,15 +204,15 @@ void NATServer::OnInternalUDPPacket(AsyncPacketSocket* socket, iter->second->AllowlistInsert(dest_addr); // Send the packet to its intended destination. - rtc::PacketOptions options; + AsyncSocketPacketOptions options; + const char* buf = reinterpret_cast(packet.payload().data()); + size_t size = packet.payload().size(); iter->second->socket->SendTo(buf + length, size - length, dest_addr, options); } void NATServer::OnExternalUDPPacket(AsyncPacketSocket* socket, - const char* buf, - size_t size, - const SocketAddress& remote_addr, - const int64_t& /* packet_time_us */) { + const ReceivedIpPacket& packet) { + RTC_DCHECK(external_socket_thread_.IsCurrent()); SocketAddress local_addr = socket->GetLocalAddress(); // Find the translation for this addresses. @@ -193,36 +220,47 @@ void NATServer::OnExternalUDPPacket(AsyncPacketSocket* socket, RTC_DCHECK(iter != ext_map_->end()); // Allow the NAT to reject this packet. - if (ShouldFilterOut(iter->second, remote_addr)) { - RTC_LOG(LS_INFO) << "Packet from " << remote_addr.ToSensitiveString() + if (ShouldFilterOut(iter->second, packet.source_address())) { + RTC_LOG(LS_INFO) << "Packet from " + << packet.source_address().ToSensitiveString() << " was filtered out by the NAT."; return; } // Forward this packet to the internal address. // First prepend the address in a quasi-STUN format. - std::unique_ptr real_buf(new char[size + kNATEncodedIPv6AddressSize]); - size_t addrlength = PackAddressForNAT( - real_buf.get(), size + kNATEncodedIPv6AddressSize, remote_addr); + std::unique_ptr real_buf( + new char[packet.payload().size() + webrtc::kNATEncodedIPv6AddressSize]); + size_t addrlength = webrtc::PackAddressForNAT( + real_buf.get(), + packet.payload().size() + webrtc::kNATEncodedIPv6AddressSize, + packet.source_address()); // Copy the data part after the address. - rtc::PacketOptions options; - memcpy(real_buf.get() + addrlength, buf, size); - udp_server_socket_->SendTo(real_buf.get(), size + addrlength, + AsyncSocketPacketOptions options; + memcpy(real_buf.get() + addrlength, packet.payload().data(), + packet.payload().size()); + udp_server_socket_->SendTo(real_buf.get(), + packet.payload().size() + addrlength, iter->second->route.source(), options); } void NATServer::Translate(const SocketAddressPair& route) { - AsyncUDPSocket* socket = AsyncUDPSocket::Create(external_, external_ip_); + external_socket_thread_.BlockingCall([&] { + AsyncUDPSocket* socket = AsyncUDPSocket::Create(external_, external_ip_); - if (!socket) { - RTC_LOG(LS_ERROR) << "Couldn't find a free port!"; - return; - } + if (!socket) { + RTC_LOG(LS_ERROR) << "Couldn't find a free port!"; + return; + } - TransEntry* entry = new TransEntry(route, socket, nat_); - (*int_map_)[route] = entry; - (*ext_map_)[socket->GetLocalAddress()] = entry; - socket->SignalReadPacket.connect(this, &NATServer::OnExternalUDPPacket); + TransEntry* entry = new TransEntry(route, socket, nat_); + (*int_map_)[route] = entry; + (*ext_map_)[socket->GetLocalAddress()] = entry; + socket->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + OnExternalUDPPacket(socket, packet); + }); + }); } bool NATServer::ShouldFilterOut(TransEntry* entry, @@ -243,13 +281,13 @@ NATServer::TransEntry::~TransEntry() { } void NATServer::TransEntry::AllowlistInsert(const SocketAddress& addr) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); allowlist->insert(addr); } bool NATServer::TransEntry::AllowlistContains(const SocketAddress& ext_addr) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return allowlist->find(ext_addr) == allowlist->end(); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/nat_server.h b/p2p/test/nat_server.h similarity index 78% rename from rtc_base/nat_server.h rename to p2p/test/nat_server.h index acbd62a092..510d9ea36b 100644 --- a/rtc_base/nat_server.h +++ b/p2p/test/nat_server.h @@ -8,21 +8,25 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTC_BASE_NAT_SERVER_H_ -#define RTC_BASE_NAT_SERVER_H_ +#ifndef P2P_TEST_NAT_SERVER_H_ +#define P2P_TEST_NAT_SERVER_H_ +#include #include #include +#include "p2p/test/nat_types.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/async_udp_socket.h" -#include "rtc_base/nat_types.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/proxy_server.h" +#include "rtc_base/socket_address.h" #include "rtc_base/socket_address_pair.h" #include "rtc_base/socket_factory.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread.h" -namespace rtc { +namespace webrtc { // Change how routes (socketaddress pairs) are compared based on the type of // NAT. The NAT server maintains a hashtable of the routes that it knows @@ -58,15 +62,17 @@ struct AddrCmp { const int NAT_SERVER_UDP_PORT = 4237; const int NAT_SERVER_TCP_PORT = 4238; -class NATServer : public sigslot::has_slots<> { +class NATServer { public: NATServer(NATType type, + Thread& internal_socket_thread, SocketFactory* internal, const SocketAddress& internal_udp_addr, const SocketAddress& internal_tcp_addr, + Thread& external_socket_thread, SocketFactory* external, const SocketAddress& external_ip); - ~NATServer() override; + ~NATServer(); NATServer(const NATServer&) = delete; NATServer& operator=(const NATServer&) = delete; @@ -81,15 +87,9 @@ class NATServer : public sigslot::has_slots<> { // Packets received on one of the networks. void OnInternalUDPPacket(AsyncPacketSocket* socket, - const char* buf, - size_t size, - const SocketAddress& addr, - const int64_t& packet_time_us); + const ReceivedIpPacket& packet); void OnExternalUDPPacket(AsyncPacketSocket* socket, - const char* buf, - size_t size, - const SocketAddress& remote_addr, - const int64_t& packet_time_us); + const ReceivedIpPacket& packet); private: typedef std::set AddressSet; @@ -105,7 +105,7 @@ class NATServer : public sigslot::has_slots<> { SocketAddressPair route; AsyncUDPSocket* socket; AddressSet* allowlist; - webrtc::Mutex mutex_; + Mutex mutex_; }; typedef std::map InternalMap; @@ -118,6 +118,8 @@ class NATServer : public sigslot::has_slots<> { bool ShouldFilterOut(TransEntry* entry, const SocketAddress& ext_addr); NAT* nat_; + Thread& internal_socket_thread_; + Thread& external_socket_thread_; SocketFactory* external_; SocketAddress external_ip_; AsyncUDPSocket* udp_server_socket_; @@ -126,6 +128,18 @@ class NATServer : public sigslot::has_slots<> { ExternalMap* ext_map_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::AddrCmp; +using ::webrtc::NAT_SERVER_TCP_PORT; +using ::webrtc::NAT_SERVER_UDP_PORT; +using ::webrtc::NATServer; +using ::webrtc::RouteCmp; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES -#endif // RTC_BASE_NAT_SERVER_H_ +#endif // P2P_TEST_NAT_SERVER_H_ diff --git a/rtc_base/nat_socket_factory.cc b/p2p/test/nat_socket_factory.cc similarity index 86% rename from rtc_base/nat_socket_factory.cc rename to p2p/test/nat_socket_factory.cc index fe021b95ff..f7fbf21784 100644 --- a/rtc_base/nat_socket_factory.cc +++ b/p2p/test/nat_socket_factory.cc @@ -8,15 +8,35 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "rtc_base/nat_socket_factory.h" - +#include "p2p/test/nat_socket_factory.h" + +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "p2p/test/nat_server.h" +#include "p2p/test/nat_types.h" #include "rtc_base/arraysize.h" +#include "rtc_base/buffer.h" +#include "rtc_base/byte_order.h" #include "rtc_base/checks.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" -#include "rtc_base/nat_server.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/socket_factory.h" +#include "rtc_base/socket_server.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" #include "rtc_base/virtual_socket_server.h" -namespace rtc { +namespace webrtc { // Packs the given socketaddress into the buffer in buf, in the quasi-STUN // format that the natserver uses. @@ -29,7 +49,8 @@ size_t PackAddressForNAT(char* buf, buf[0] = 0; buf[1] = family; // Writes the port. - *(reinterpret_cast(&buf[2])) = HostToNetwork16(remote_addr.port()); + *(reinterpret_cast(&buf[2])) = + webrtc::HostToNetwork16(remote_addr.port()); if (family == AF_INET) { RTC_DCHECK(buf_size >= kNATEncodedIPv4AddressSize); in_addr v4addr = ip.ipv4_address(); @@ -47,21 +68,20 @@ size_t PackAddressForNAT(char* buf, // Decodes the remote address from a packet that has been encoded with the nat's // quasi-STUN format. Returns the length of the address (i.e., the offset into // data where the original packet starts). -size_t UnpackAddressFromNAT(const char* buf, - size_t buf_size, +size_t UnpackAddressFromNAT(ArrayView buf, SocketAddress* remote_addr) { - RTC_DCHECK(buf_size >= 8); - RTC_DCHECK(buf[0] == 0); + RTC_CHECK(buf.size() >= 8); + RTC_DCHECK(buf.data()[0] == 0); int family = buf[1]; - uint16_t port = - NetworkToHost16(*(reinterpret_cast(&buf[2]))); + uint16_t port = webrtc::NetworkToHost16( + *(reinterpret_cast(&buf.data()[2]))); if (family == AF_INET) { - const in_addr* v4addr = reinterpret_cast(&buf[4]); + const in_addr* v4addr = reinterpret_cast(&buf.data()[4]); *remote_addr = SocketAddress(IPAddress(*v4addr), port); return kNATEncodedIPv4AddressSize; } else if (family == AF_INET6) { - RTC_DCHECK(buf_size >= 20); - const in6_addr* v6addr = reinterpret_cast(&buf[4]); + RTC_DCHECK(buf.size() >= 20); + const in6_addr* v6addr = reinterpret_cast(&buf.data()[4]); *remote_addr = SocketAddress(IPAddress(*v6addr), port); return kNATEncodedIPv6AddressSize; } @@ -76,14 +96,9 @@ class NATSocket : public Socket, public sigslot::has_slots<> { family_(family), type_(type), connected_(false), - socket_(nullptr), - buf_(nullptr), - size_(0) {} + socket_(nullptr) {} - ~NATSocket() override { - delete socket_; - delete[] buf_; - } + ~NATSocket() override { delete socket_; } SocketAddress GetLocalAddress() const override { return (socket_) ? socket_->GetLocalAddress() : SocketAddress(); @@ -106,7 +121,7 @@ class NATSocket : public Socket, public sigslot::has_slots<> { // If we're not already bound (meaning `socket_` is null), bind to ANY // address. if (!socket_) { - result = BindInternal(SocketAddress(GetAnyIP(family_), 0)); + result = BindInternal(SocketAddress(webrtc::GetAnyIP(family_), 0)); if (result < 0) { return result; } @@ -165,23 +180,20 @@ class NATSocket : public Socket, public sigslot::has_slots<> { } // Make sure we have enough room to read the requested amount plus the // largest possible header address. - SocketAddress remote_addr; - Grow(size + kNATEncodedIPv6AddressSize); + buf_.EnsureCapacity(size + kNATEncodedIPv6AddressSize); // Read the packet from the socket. - int result = socket_->RecvFrom(buf_, size_, &remote_addr, timestamp); + Socket::ReceiveBuffer receive_buffer(buf_); + int result = socket_->RecvFrom(receive_buffer); if (result >= 0) { - RTC_DCHECK(remote_addr == server_addr_); - - // TODO: we need better framing so we know how many bytes we can - // return before we need to read the next address. For UDP, this will be - // fine as long as the reader always reads everything in the packet. - RTC_DCHECK((size_t)result < size_); + RTC_DCHECK(receive_buffer.source_address == server_addr_); + *timestamp = + receive_buffer.arrival_time.value_or(Timestamp::Micros(0)).us(); // Decode the wire packet into the actual results. SocketAddress real_remote_addr; - size_t addrlength = UnpackAddressFromNAT(buf_, result, &real_remote_addr); - memcpy(data, buf_ + addrlength, result - addrlength); + size_t addrlength = UnpackAddressFromNAT(buf_, &real_remote_addr); + memcpy(data, buf_.data() + addrlength, result - addrlength); // Make sure this packet should be delivered before returning it. if (!connected_ || (real_remote_addr == remote_addr_)) { @@ -285,15 +297,6 @@ class NATSocket : public Socket, public sigslot::has_slots<> { return result; } - // Makes sure the buffer is at least the given size. - void Grow(size_t new_size) { - if (size_ < new_size) { - delete[] buf_; - size_ = new_size; - buf_ = new char[size_]; - } - } - // Sends the destination address to the server to tell it to connect. void SendConnectRequest() { char buf[kNATEncodedIPv6AddressSize]; @@ -323,8 +326,7 @@ class NATSocket : public Socket, public sigslot::has_slots<> { Socket* socket_; // Need to hold error in case it occurs before the socket is created. int error_ = 0; - char* buf_; - size_t size_; + Buffer buf_; }; // NATSocketFactory @@ -368,7 +370,8 @@ NATSocketServer::Translator* NATSocketServer::AddTranslator( if (nats_.Get(ext_ip)) return nullptr; - return nats_.Add(ext_ip, new Translator(this, type, int_ip, server_, ext_ip)); + return nats_.Add( + ext_ip, new Translator(this, type, int_ip, *msg_queue_, server_, ext_ip)); } void NATSocketServer::RemoveTranslator(const SocketAddress& ext_ip) { @@ -384,8 +387,7 @@ void NATSocketServer::SetMessageQueue(Thread* queue) { server_->SetMessageQueue(queue); } -bool NATSocketServer::Wait(webrtc::TimeDelta max_wait_duration, - bool process_io) { +bool NATSocketServer::Wait(TimeDelta max_wait_duration, bool process_io) { return server_->Wait(max_wait_duration, process_io); } @@ -413,6 +415,7 @@ Socket* NATSocketServer::CreateInternalSocket(int family, NATSocketServer::Translator::Translator(NATSocketServer* server, NATType type, const SocketAddress& int_ip, + Thread& external_socket_thread, SocketFactory* ext_factory, const SocketAddress& ext_ip) : server_(server) { @@ -422,7 +425,8 @@ NATSocketServer::Translator::Translator(NATSocketServer* server, internal_server_ = std::make_unique(); internal_server_->SetMessageQueue(server_->queue()); nat_server_ = std::make_unique( - type, internal_server_.get(), int_ip, int_ip, ext_factory, ext_ip); + type, *server->queue(), internal_server_.get(), int_ip, int_ip, + external_socket_thread, ext_factory, ext_ip); } NATSocketServer::Translator::~Translator() { @@ -443,8 +447,8 @@ NATSocketServer::Translator* NATSocketServer::Translator::AddTranslator( return nullptr; AddClient(ext_ip); - return nats_.Add(ext_ip, - new Translator(server_, type, int_ip, server_, ext_ip)); + return nats_.Add(ext_ip, new Translator(server_, type, int_ip, + *server_->queue(), server_, ext_ip)); } void NATSocketServer::Translator::RemoveTranslator( const SocketAddress& ext_ip) { @@ -512,4 +516,4 @@ NATSocketServer::Translator* NATSocketServer::TranslatorMap::FindClient( return nat; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/nat_socket_factory.h b/p2p/test/nat_socket_factory.h similarity index 86% rename from rtc_base/nat_socket_factory.h rename to p2p/test/nat_socket_factory.h index 0b301b5844..d73cc99ea6 100644 --- a/rtc_base/nat_socket_factory.h +++ b/p2p/test/nat_socket_factory.h @@ -8,24 +8,27 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTC_BASE_NAT_SOCKET_FACTORY_H_ -#define RTC_BASE_NAT_SOCKET_FACTORY_H_ +#ifndef P2P_TEST_NAT_SOCKET_FACTORY_H_ +#define P2P_TEST_NAT_SOCKET_FACTORY_H_ #include +#include #include #include #include -#include "rtc_base/nat_server.h" -#include "rtc_base/nat_types.h" +#include "api/array_view.h" +#include "api/units/time_delta.h" +#include "p2p/test/nat_server.h" +#include "p2p/test/nat_types.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" #include "rtc_base/socket_factory.h" #include "rtc_base/socket_server.h" #include "rtc_base/thread.h" -namespace rtc { +namespace webrtc { const size_t kNATEncodedIPv4AddressSize = 8U; const size_t kNATEncodedIPv6AddressSize = 20U; @@ -102,6 +105,7 @@ class NATSocketServer : public SocketServer, public NATInternalSocketFactory { Translator(NATSocketServer* server, NATType type, const SocketAddress& int_addr, + Thread& external_socket_thread, SocketFactory* ext_factory, const SocketAddress& ext_addr); ~Translator(); @@ -152,7 +156,7 @@ class NATSocketServer : public SocketServer, public NATInternalSocketFactory { Socket* CreateSocket(int family, int type) override; void SetMessageQueue(Thread* queue) override; - bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override; + bool Wait(TimeDelta max_wait_duration, bool process_io) override; void WakeUp() override; // NATInternalSocketFactory implementation @@ -171,9 +175,22 @@ class NATSocketServer : public SocketServer, public NATInternalSocketFactory { size_t PackAddressForNAT(char* buf, size_t buf_size, const SocketAddress& remote_addr); -size_t UnpackAddressFromNAT(const char* buf, - size_t buf_size, +size_t UnpackAddressFromNAT(ArrayView buf, SocketAddress* remote_addr); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::kNATEncodedIPv4AddressSize; +using ::webrtc::kNATEncodedIPv6AddressSize; +using ::webrtc::NATInternalSocketFactory; +using ::webrtc::NATSocketFactory; +using ::webrtc::NATSocketServer; +using ::webrtc::PackAddressForNAT; +using ::webrtc::UnpackAddressFromNAT; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES -#endif // RTC_BASE_NAT_SOCKET_FACTORY_H_ +#endif // P2P_TEST_NAT_SOCKET_FACTORY_H_ diff --git a/rtc_base/nat_types.cc b/p2p/test/nat_types.cc similarity index 95% rename from rtc_base/nat_types.cc rename to p2p/test/nat_types.cc index 9ca03608e8..6fbc973280 100644 --- a/rtc_base/nat_types.cc +++ b/p2p/test/nat_types.cc @@ -8,11 +8,11 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "rtc_base/nat_types.h" +#include "p2p/test/nat_types.h" #include "rtc_base/checks.h" -namespace rtc { +namespace webrtc { class SymmetricNAT : public NAT { public: @@ -58,4 +58,4 @@ NAT* NAT::Create(NATType type) { } } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/nat_types.h b/p2p/test/nat_types.h similarity index 68% rename from rtc_base/nat_types.h rename to p2p/test/nat_types.h index 60e7fbd4b4..9ef70026ef 100644 --- a/rtc_base/nat_types.h +++ b/p2p/test/nat_types.h @@ -8,10 +8,10 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTC_BASE_NAT_TYPES_H_ -#define RTC_BASE_NAT_TYPES_H_ +#ifndef P2P_TEST_NAT_TYPES_H_ +#define P2P_TEST_NAT_TYPES_H_ -namespace rtc { +namespace webrtc { /* Identifies each type of NAT that can be simulated. */ enum NATType { @@ -42,6 +42,19 @@ class NAT { static NAT* Create(NATType type); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::NAT; +using ::webrtc::NAT_ADDR_RESTRICTED; +using ::webrtc::NAT_OPEN_CONE; +using ::webrtc::NAT_PORT_RESTRICTED; +using ::webrtc::NAT_SYMMETRIC; +using ::webrtc::NATType; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES -#endif // RTC_BASE_NAT_TYPES_H_ +#endif // P2P_TEST_NAT_TYPES_H_ diff --git a/p2p/test/nat_unittest.cc b/p2p/test/nat_unittest.cc new file mode 100644 index 0000000000..bb00fbacf5 --- /dev/null +++ b/p2p/test/nat_unittest.cc @@ -0,0 +1,442 @@ +/* + * Copyright 2004 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "api/environment/environment_factory.h" +#include "api/test/rtc_error_matchers.h" +#include "p2p/test/nat_server.h" +#include "p2p/test/nat_socket_factory.h" +#include "p2p/test/nat_types.h" +#include "rtc_base/async_tcp_socket.h" +#include "rtc_base/async_udp_socket.h" +#include "rtc_base/buffer.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/logging.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/net_test_helpers.h" +#include "rtc_base/network.h" +#include "rtc_base/physical_socket_server.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/socket_factory.h" +#include "rtc_base/socket_server.h" +#include "rtc_base/test_client.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/virtual_socket_server.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/wait_until.h" + +namespace webrtc { +namespace { + +bool CheckReceive(webrtc::TestClient* client, + bool should_receive, + const char* buf, + size_t size) { + return (should_receive) ? client->CheckNextPacket(buf, size, 0) + : client->CheckNoPacket(); +} + +webrtc::TestClient* CreateTestClient(webrtc::SocketFactory* factory, + const webrtc::SocketAddress& local_addr) { + return new webrtc::TestClient( + absl::WrapUnique(webrtc::AsyncUDPSocket::Create(factory, local_addr))); +} + +webrtc::TestClient* CreateTCPTestClient(webrtc::Socket* socket) { + return new webrtc::TestClient( + std::make_unique(socket)); +} + +// Tests that when sending from internal_addr to external_addrs through the +// NAT type specified by nat_type, all external addrs receive the sent packet +// and, if exp_same is true, all use the same mapped-address on the NAT. +void TestSend(webrtc::SocketServer* internal, + const webrtc::SocketAddress& internal_addr, + webrtc::SocketServer* external, + const webrtc::SocketAddress external_addrs[4], + webrtc::NATType nat_type, + bool exp_same) { + webrtc::Thread th_int(internal); + webrtc::Thread th_ext(external); + + th_int.Start(); + th_ext.Start(); + + webrtc::SocketAddress server_addr = internal_addr; + server_addr.SetPort(0); // Auto-select a port + webrtc::NATServer* nat = + new webrtc::NATServer(nat_type, th_int, internal, server_addr, + server_addr, th_ext, external, external_addrs[0]); + webrtc::NATSocketFactory* natsf = new webrtc::NATSocketFactory( + internal, nat->internal_udp_address(), nat->internal_tcp_address()); + + webrtc::TestClient* in; + th_int.BlockingCall([&] { in = CreateTestClient(natsf, internal_addr); }); + + webrtc::TestClient* out[4]; + th_ext.BlockingCall([&] { + for (int i = 0; i < 4; i++) + out[i] = CreateTestClient(external, external_addrs[i]); + }); + + const char* buf = "filter_test"; + size_t len = strlen(buf); + + th_int.BlockingCall([&] { in->SendTo(buf, len, out[0]->address()); }); + webrtc::SocketAddress trans_addr; + th_ext.BlockingCall( + [&] { EXPECT_TRUE(out[0]->CheckNextPacket(buf, len, &trans_addr)); }); + + for (int i = 1; i < 4; i++) { + th_int.BlockingCall([&] { in->SendTo(buf, len, out[i]->address()); }); + webrtc::SocketAddress trans_addr2; + th_ext.BlockingCall([&] { + EXPECT_TRUE(out[i]->CheckNextPacket(buf, len, &trans_addr2)); + bool are_same = (trans_addr == trans_addr2); + ASSERT_EQ(are_same, exp_same) << "same translated address"; + ASSERT_NE(AF_UNSPEC, trans_addr.family()); + ASSERT_NE(AF_UNSPEC, trans_addr2.family()); + }); + } + + th_int.Stop(); + th_ext.Stop(); + + delete nat; + delete natsf; + delete in; + for (int i = 0; i < 4; i++) + delete out[i]; +} + +// Tests that when sending from external_addrs to internal_addr, the packet +// is delivered according to the specified filter_ip and filter_port rules. +void TestRecv(webrtc::SocketServer* internal, + const webrtc::SocketAddress& internal_addr, + webrtc::SocketServer* external, + const webrtc::SocketAddress external_addrs[4], + webrtc::NATType nat_type, + bool filter_ip, + bool filter_port) { + webrtc::Thread th_int(internal); + webrtc::Thread th_ext(external); + + webrtc::SocketAddress server_addr = internal_addr; + server_addr.SetPort(0); // Auto-select a port + th_int.Start(); + th_ext.Start(); + webrtc::NATServer* nat = + new webrtc::NATServer(nat_type, th_int, internal, server_addr, + server_addr, th_ext, external, external_addrs[0]); + webrtc::NATSocketFactory* natsf = new webrtc::NATSocketFactory( + internal, nat->internal_udp_address(), nat->internal_tcp_address()); + + webrtc::TestClient* in = nullptr; + th_int.BlockingCall([&] { in = CreateTestClient(natsf, internal_addr); }); + + webrtc::TestClient* out[4]; + th_ext.BlockingCall([&] { + for (int i = 0; i < 4; i++) + out[i] = CreateTestClient(external, external_addrs[i]); + }); + + const char* buf = "filter_test"; + size_t len = strlen(buf); + + th_int.BlockingCall([&] { in->SendTo(buf, len, out[0]->address()); }); + webrtc::SocketAddress trans_addr; + th_ext.BlockingCall( + [&] { EXPECT_TRUE(out[0]->CheckNextPacket(buf, len, &trans_addr)); }); + + th_ext.BlockingCall([&] { out[1]->SendTo(buf, len, trans_addr); }); + th_int.BlockingCall( + [&] { EXPECT_TRUE(CheckReceive(in, !filter_ip, buf, len)); }); + th_ext.BlockingCall([&] { out[2]->SendTo(buf, len, trans_addr); }); + + th_int.BlockingCall( + [&] { EXPECT_TRUE(CheckReceive(in, !filter_port, buf, len)); }); + + th_ext.BlockingCall([&] { out[3]->SendTo(buf, len, trans_addr); }); + + th_int.BlockingCall([&] { + EXPECT_TRUE(CheckReceive(in, !filter_ip && !filter_port, buf, len)); + }); + + th_int.Stop(); + th_ext.Stop(); + + delete nat; + delete natsf; + delete in; + for (int i = 0; i < 4; i++) + delete out[i]; +} + +// Tests that NATServer allocates bindings properly. +void TestBindings(webrtc::SocketServer* internal, + const webrtc::SocketAddress& internal_addr, + webrtc::SocketServer* external, + const webrtc::SocketAddress external_addrs[4]) { + TestSend(internal, internal_addr, external, external_addrs, + webrtc::NAT_OPEN_CONE, true); + TestSend(internal, internal_addr, external, external_addrs, + webrtc::NAT_ADDR_RESTRICTED, true); + TestSend(internal, internal_addr, external, external_addrs, + webrtc::NAT_PORT_RESTRICTED, true); + TestSend(internal, internal_addr, external, external_addrs, + webrtc::NAT_SYMMETRIC, false); +} + +// Tests that NATServer filters packets properly. +void TestFilters(webrtc::SocketServer* internal, + const webrtc::SocketAddress& internal_addr, + webrtc::SocketServer* external, + const webrtc::SocketAddress external_addrs[4]) { + TestRecv(internal, internal_addr, external, external_addrs, + webrtc::NAT_OPEN_CONE, false, false); + TestRecv(internal, internal_addr, external, external_addrs, + webrtc::NAT_ADDR_RESTRICTED, true, false); + TestRecv(internal, internal_addr, external, external_addrs, + webrtc::NAT_PORT_RESTRICTED, true, true); + TestRecv(internal, internal_addr, external, external_addrs, + webrtc::NAT_SYMMETRIC, true, true); +} + +bool TestConnectivity(const webrtc::SocketAddress& src, + const webrtc::IPAddress& dst) { + // The physical NAT tests require connectivity to the selected ip from the + // internal address used for the NAT. Things like firewalls can break that, so + // check to see if it's worth even trying with this ip. + std::unique_ptr pss( + new webrtc::PhysicalSocketServer()); + std::unique_ptr client( + pss->CreateSocket(src.family(), SOCK_DGRAM)); + std::unique_ptr server( + pss->CreateSocket(src.family(), SOCK_DGRAM)); + if (client->Bind(webrtc::SocketAddress(src.ipaddr(), 0)) != 0 || + server->Bind(webrtc::SocketAddress(dst, 0)) != 0) { + return false; + } + const char* buf = "hello other socket"; + size_t len = strlen(buf); + int sent = client->SendTo(buf, len, server->GetLocalAddress()); + + webrtc::Thread::Current()->SleepMs(100); + Buffer payload; + webrtc::Socket::ReceiveBuffer receive_buffer(payload); + int received = server->RecvFrom(receive_buffer); + return received == sent && ::memcmp(buf, payload.data(), len) == 0; +} + +void TestPhysicalInternal(const webrtc::SocketAddress& int_addr) { + webrtc::AutoThread main_thread; + webrtc::PhysicalSocketServer socket_server; + webrtc::BasicNetworkManager network_manager(CreateEnvironment(), + &socket_server); + network_manager.StartUpdating(); + // Process pending messages so the network list is updated. + webrtc::Thread::Current()->ProcessMessages(0); + + std::vector networks = network_manager.GetNetworks(); + networks.erase(std::remove_if(networks.begin(), networks.end(), + [](const webrtc::Network* network) { + return webrtc::kDefaultNetworkIgnoreMask & + network->type(); + }), + networks.end()); + if (networks.empty()) { + RTC_LOG(LS_WARNING) << "Not enough network adapters for test."; + return; + } + + webrtc::SocketAddress ext_addr1(int_addr); + webrtc::SocketAddress ext_addr2; + // Find an available IP with matching family. The test breaks if int_addr + // can't talk to ip, so check for connectivity as well. + for (const Network* const network : networks) { + const webrtc::IPAddress& ip = network->GetBestIP(); + if (ip.family() == int_addr.family() && TestConnectivity(int_addr, ip)) { + ext_addr2.SetIP(ip); + break; + } + } + if (ext_addr2.IsNil()) { + RTC_LOG(LS_WARNING) << "No available IP of same family as " + << int_addr.ToString(); + return; + } + + RTC_LOG(LS_INFO) << "selected ip " << ext_addr2.ipaddr().ToString(); + + webrtc::SocketAddress ext_addrs[4] = { + webrtc::SocketAddress(ext_addr1), webrtc::SocketAddress(ext_addr2), + webrtc::SocketAddress(ext_addr1), webrtc::SocketAddress(ext_addr2)}; + + std::unique_ptr int_pss( + new webrtc::PhysicalSocketServer()); + std::unique_ptr ext_pss( + new webrtc::PhysicalSocketServer()); + + TestBindings(int_pss.get(), int_addr, ext_pss.get(), ext_addrs); + TestFilters(int_pss.get(), int_addr, ext_pss.get(), ext_addrs); +} + +TEST(NatTest, TestPhysicalIPv4) { + TestPhysicalInternal(webrtc::SocketAddress("127.0.0.1", 0)); +} + +TEST(NatTest, TestPhysicalIPv6) { + if (webrtc::HasIPv6Enabled()) { + TestPhysicalInternal(webrtc::SocketAddress("::1", 0)); + } else { + RTC_LOG(LS_WARNING) << "No IPv6, skipping"; + } +} + +namespace { + +class TestVirtualSocketServer : public webrtc::VirtualSocketServer { + public: + // Expose this publicly + webrtc::IPAddress GetNextIP(int af) { + return webrtc::VirtualSocketServer::GetNextIP(af); + } +}; + +} // namespace + +void TestVirtualInternal(int family) { + webrtc::AutoThread main_thread; + std::unique_ptr int_vss( + new TestVirtualSocketServer()); + std::unique_ptr ext_vss( + new TestVirtualSocketServer()); + + webrtc::SocketAddress int_addr; + webrtc::SocketAddress ext_addrs[4]; + int_addr.SetIP(int_vss->GetNextIP(family)); + ext_addrs[0].SetIP(ext_vss->GetNextIP(int_addr.family())); + ext_addrs[1].SetIP(ext_vss->GetNextIP(int_addr.family())); + ext_addrs[2].SetIP(ext_addrs[0].ipaddr()); + ext_addrs[3].SetIP(ext_addrs[1].ipaddr()); + + TestBindings(int_vss.get(), int_addr, ext_vss.get(), ext_addrs); + TestFilters(int_vss.get(), int_addr, ext_vss.get(), ext_addrs); +} + +TEST(NatTest, TestVirtualIPv4) { + TestVirtualInternal(AF_INET); +} + +TEST(NatTest, TestVirtualIPv6) { + if (webrtc::HasIPv6Enabled()) { + TestVirtualInternal(AF_INET6); + } else { + RTC_LOG(LS_WARNING) << "No IPv6, skipping"; + } +} + +class NatTcpTest : public ::testing::Test, public sigslot::has_slots<> { + public: + NatTcpTest() + : int_addr_("192.168.0.1", 0), + ext_addr_("10.0.0.1", 0), + connected_(false), + int_vss_(new TestVirtualSocketServer()), + ext_vss_(new TestVirtualSocketServer()), + int_thread_(new webrtc::Thread(int_vss_.get())), + ext_thread_(new webrtc::Thread(ext_vss_.get())), + nat_(new webrtc::NATServer(webrtc::NAT_OPEN_CONE, + *int_thread_, + int_vss_.get(), + int_addr_, + int_addr_, + *ext_thread_, + ext_vss_.get(), + ext_addr_)), + natsf_(new webrtc::NATSocketFactory(int_vss_.get(), + nat_->internal_udp_address(), + nat_->internal_tcp_address())) { + int_thread_->Start(); + ext_thread_->Start(); + } + + void OnConnectEvent(webrtc::Socket* socket) { connected_ = true; } + + void OnAcceptEvent(webrtc::Socket* socket) { + accepted_.reset(server_->Accept(nullptr)); + } + + void OnCloseEvent(webrtc::Socket* socket, int error) {} + + void ConnectEvents() { + server_->SignalReadEvent.connect(this, &NatTcpTest::OnAcceptEvent); + client_->SignalConnectEvent.connect(this, &NatTcpTest::OnConnectEvent); + } + + webrtc::SocketAddress int_addr_; + webrtc::SocketAddress ext_addr_; + bool connected_; + std::unique_ptr int_vss_; + std::unique_ptr ext_vss_; + std::unique_ptr int_thread_; + std::unique_ptr ext_thread_; + std::unique_ptr nat_; + std::unique_ptr natsf_; + std::unique_ptr client_; + std::unique_ptr server_; + std::unique_ptr accepted_; +}; + +TEST_F(NatTcpTest, DISABLED_TestConnectOut) { + server_.reset(ext_vss_->CreateSocket(AF_INET, SOCK_STREAM)); + server_->Bind(ext_addr_); + server_->Listen(5); + + client_.reset(natsf_->CreateSocket(AF_INET, SOCK_STREAM)); + EXPECT_GE(0, client_->Bind(int_addr_)); + EXPECT_GE(0, client_->Connect(server_->GetLocalAddress())); + + ConnectEvents(); + + EXPECT_THAT( + webrtc::WaitUntil([&] { return connected_; }, ::testing::IsTrue()), + webrtc::IsRtcOk()); + EXPECT_EQ(client_->GetRemoteAddress(), server_->GetLocalAddress()); + EXPECT_EQ(accepted_->GetRemoteAddress().ipaddr(), ext_addr_.ipaddr()); + + std::unique_ptr in( + CreateTCPTestClient(client_.release())); + std::unique_ptr out( + CreateTCPTestClient(accepted_.release())); + + const char* buf = "test_packet"; + size_t len = strlen(buf); + + in->Send(buf, len); + webrtc::SocketAddress trans_addr; + EXPECT_TRUE(out->CheckNextPacket(buf, len, &trans_addr)); + + out->Send(buf, len); + EXPECT_TRUE(in->CheckNextPacket(buf, len, &trans_addr)); +} + +} // namespace +} // namespace webrtc diff --git a/p2p/base/stun_server.cc b/p2p/test/stun_server.cc similarity index 64% rename from p2p/base/stun_server.cc rename to p2p/test/stun_server.cc index 7827a0bb81..c1bba69f12 100644 --- a/p2p/base/stun_server.cc +++ b/p2p/test/stun_server.cc @@ -8,60 +8,72 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "p2p/base/stun_server.h" +#include "p2p/test/stun_server.h" +#include #include #include #include "absl/strings/string_view.h" +#include "api/sequence_checker.h" +#include "api/transport/stun.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/async_udp_socket.h" #include "rtc_base/byte_buffer.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/socket_address.h" -namespace cricket { +namespace webrtc { -StunServer::StunServer(rtc::AsyncUDPSocket* socket) : socket_(socket) { - socket_->SignalReadPacket.connect(this, &StunServer::OnPacket); +StunServer::StunServer(AsyncUDPSocket* socket) : socket_(socket) { + socket_->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + OnPacket(socket, packet); + }); } StunServer::~StunServer() { - socket_->SignalReadPacket.disconnect(this); + RTC_DCHECK_RUN_ON(&sequence_checker_); + socket_->DeregisterReceivedPacketCallback(); } -void StunServer::OnPacket(rtc::AsyncPacketSocket* socket, - const char* buf, - size_t size, - const rtc::SocketAddress& remote_addr, - const int64_t& /* packet_time_us */) { +void StunServer::OnPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet) { + RTC_DCHECK_RUN_ON(&sequence_checker_); // Parse the STUN message; eat any messages that fail to parse. - rtc::ByteBufferReader bbuf(buf, size); + ByteBufferReader bbuf(packet.payload()); StunMessage msg; if (!msg.Read(&bbuf)) { return; } - // TODO(?): If unknown non-optional (<= 0x7fff) attributes are found, send a + // TODO(?): If unknown non-optional (<= 0x7fff) attributes are found, + // send a // 420 "Unknown Attribute" response. // Send the message to the appropriate handler function. switch (msg.type()) { case STUN_BINDING_REQUEST: - OnBindingRequest(&msg, remote_addr); + OnBindingRequest(&msg, packet.source_address()); break; default: - SendErrorResponse(msg, remote_addr, 600, "Operation Not Supported"); + SendErrorResponse(msg, packet.source_address(), 600, + "Operation Not Supported"); } } void StunServer::OnBindingRequest(StunMessage* msg, - const rtc::SocketAddress& remote_addr) { + const SocketAddress& remote_addr) { StunMessage response(STUN_BINDING_RESPONSE, msg->transaction_id()); GetStunBindResponse(msg, remote_addr, &response); SendResponse(response, remote_addr); } void StunServer::SendErrorResponse(const StunMessage& msg, - const rtc::SocketAddress& addr, + const SocketAddress& addr, int error_code, absl::string_view error_desc) { StunMessage err_msg(GetStunErrorResponseType(msg.type()), @@ -76,16 +88,16 @@ void StunServer::SendErrorResponse(const StunMessage& msg, } void StunServer::SendResponse(const StunMessage& msg, - const rtc::SocketAddress& addr) { - rtc::ByteBufferWriter buf; + const SocketAddress& addr) { + ByteBufferWriter buf; msg.Write(&buf); - rtc::PacketOptions options; + AsyncSocketPacketOptions options; if (socket_->SendTo(buf.Data(), buf.Length(), addr, options) < 0) RTC_LOG_ERR(LS_ERROR) << "sendto"; } void StunServer::GetStunBindResponse(StunMessage* message, - const rtc::SocketAddress& remote_addr, + const SocketAddress& remote_addr, StunMessage* response) const { RTC_DCHECK_EQ(response->type(), STUN_BINDING_RESPONSE); RTC_DCHECK_EQ(response->transaction_id(), message->transaction_id()); @@ -101,4 +113,4 @@ void StunServer::GetStunBindResponse(StunMessage* message, response->AddAttribute(std::move(mapped_addr)); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/stun_server.h b/p2p/test/stun_server.h similarity index 52% rename from p2p/base/stun_server.h rename to p2p/test/stun_server.h index 505773b052..517fa1dece 100644 --- a/p2p/base/stun_server.h +++ b/p2p/test/stun_server.h @@ -8,65 +8,70 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef P2P_BASE_STUN_SERVER_H_ -#define P2P_BASE_STUN_SERVER_H_ +#ifndef P2P_TEST_STUN_SERVER_H_ +#define P2P_TEST_STUN_SERVER_H_ #include -#include #include #include "absl/strings/string_view.h" +#include "api/sequence_checker.h" #include "api/transport/stun.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/async_udp_socket.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/socket_address.h" -#include "rtc_base/third_party/sigslot/sigslot.h" -namespace cricket { +namespace webrtc { const int STUN_SERVER_PORT = 3478; -class StunServer : public sigslot::has_slots<> { +class StunServer { public: // Creates a STUN server, which will listen on the given socket. - explicit StunServer(rtc::AsyncUDPSocket* socket); + explicit StunServer(AsyncUDPSocket* socket); // Removes the STUN server from the socket and deletes the socket. - ~StunServer() override; + virtual ~StunServer(); protected: - // Slot for Socket.PacketRead: - void OnPacket(rtc::AsyncPacketSocket* socket, - const char* buf, - size_t size, - const rtc::SocketAddress& remote_addr, - const int64_t& packet_time_us); + // Callback for packets from socket. + void OnPacket(AsyncPacketSocket* socket, const ReceivedIpPacket& packet); // Handlers for the different types of STUN/TURN requests: - virtual void OnBindingRequest(StunMessage* msg, - const rtc::SocketAddress& addr); - void OnAllocateRequest(StunMessage* msg, const rtc::SocketAddress& addr); - void OnSharedSecretRequest(StunMessage* msg, const rtc::SocketAddress& addr); - void OnSendRequest(StunMessage* msg, const rtc::SocketAddress& addr); + virtual void OnBindingRequest(StunMessage* msg, const SocketAddress& addr); + void OnAllocateRequest(StunMessage* msg, const SocketAddress& addr); + void OnSharedSecretRequest(StunMessage* msg, const SocketAddress& addr); + void OnSendRequest(StunMessage* msg, const SocketAddress& addr); // Sends an error response to the given message back to the user. void SendErrorResponse(const StunMessage& msg, - const rtc::SocketAddress& addr, + const SocketAddress& addr, int error_code, absl::string_view error_desc); // Sends the given message to the appropriate destination. - void SendResponse(const StunMessage& msg, const rtc::SocketAddress& addr); + void SendResponse(const StunMessage& msg, const SocketAddress& addr); // A helper method to compose a STUN binding response. void GetStunBindResponse(StunMessage* message, - const rtc::SocketAddress& remote_addr, + const SocketAddress& remote_addr, StunMessage* response) const; private: - std::unique_ptr socket_; + SequenceChecker sequence_checker_; + std::unique_ptr socket_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::STUN_SERVER_PORT; +using ::webrtc::StunServer; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES -#endif // P2P_BASE_STUN_SERVER_H_ +#endif // P2P_TEST_STUN_SERVER_H_ diff --git a/p2p/base/stun_server_unittest.cc b/p2p/test/stun_server_unittest.cc similarity index 77% rename from p2p/base/stun_server_unittest.cc rename to p2p/test/stun_server_unittest.cc index 5d3f31fb98..1a270d8b02 100644 --- a/p2p/base/stun_server_unittest.cc +++ b/p2p/test/stun_server_unittest.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "p2p/base/stun_server.h" +#include "p2p/test/stun_server.h" #include @@ -16,37 +16,37 @@ #include #include "absl/memory/memory.h" +#include "api/transport/stun.h" +#include "rtc_base/async_udp_socket.h" #include "rtc_base/byte_buffer.h" -#include "rtc_base/ip_address.h" -#include "rtc_base/logging.h" +#include "rtc_base/socket_address.h" #include "rtc_base/test_client.h" #include "rtc_base/thread.h" #include "rtc_base/virtual_socket_server.h" #include "test/gtest.h" -namespace cricket { +namespace webrtc { namespace { -const rtc::SocketAddress server_addr("99.99.99.1", 3478); -const rtc::SocketAddress client_addr("1.2.3.4", 1234); +const SocketAddress server_addr("99.99.99.1", 3478); +const SocketAddress client_addr("1.2.3.4", 1234); } // namespace class StunServerTest : public ::testing::Test { public: - StunServerTest() : ss_(new rtc::VirtualSocketServer()), network_(ss_.get()) { + StunServerTest() : ss_(new VirtualSocketServer()) { + ss_->SetMessageQueue(&main_thread); server_.reset( - new StunServer(rtc::AsyncUDPSocket::Create(ss_.get(), server_addr))); - client_.reset(new rtc::TestClient( - absl::WrapUnique(rtc::AsyncUDPSocket::Create(ss_.get(), client_addr)))); - - network_.Start(); + new StunServer(AsyncUDPSocket::Create(ss_.get(), server_addr))); + client_.reset(new TestClient( + absl::WrapUnique(AsyncUDPSocket::Create(ss_.get(), client_addr)))); } - ~StunServerTest() override { network_.Stop(); } void Send(const StunMessage& msg) { - rtc::ByteBufferWriter buf; + ByteBufferWriter buf; msg.Write(&buf); - Send(buf.Data(), static_cast(buf.Length())); + Send(reinterpret_cast(buf.Data()), + static_cast(buf.Length())); } void Send(const char* buf, int len) { client_->SendTo(buf, len, server_addr); @@ -54,10 +54,10 @@ class StunServerTest : public ::testing::Test { bool ReceiveFails() { return (client_->CheckNoPacket()); } StunMessage* Receive() { StunMessage* msg = NULL; - std::unique_ptr packet = - client_->NextPacket(rtc::TestClient::kTimeoutMs); + std::unique_ptr packet = + client_->NextPacket(TestClient::kTimeoutMs); if (packet) { - rtc::ByteBufferReader buf(packet->buf, packet->size); + ByteBufferReader buf(packet->buf); msg = new StunMessage(); msg->Read(&buf); } @@ -65,11 +65,10 @@ class StunServerTest : public ::testing::Test { } private: - rtc::AutoThread main_thread; - std::unique_ptr ss_; - rtc::Thread network_; + AutoThread main_thread; + std::unique_ptr ss_; std::unique_ptr server_; - std::unique_ptr client_; + std::unique_ptr client_; }; TEST_F(StunServerTest, TestGood) { @@ -142,4 +141,4 @@ TEST_F(StunServerTest, TestBad) { ASSERT_TRUE(ReceiveFails()); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/test/test_stun_server.cc b/p2p/test/test_stun_server.cc new file mode 100644 index 0000000000..f440d1e186 --- /dev/null +++ b/p2p/test/test_stun_server.cc @@ -0,0 +1,57 @@ +/* + * Copyright 2017 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "p2p/test/test_stun_server.h" + +#include +#include + +#include "api/sequence_checker.h" +#include "api/transport/stun.h" +#include "p2p/test/stun_server.h" +#include "rtc_base/async_udp_socket.h" +#include "rtc_base/checks.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/socket_server.h" + +namespace webrtc { + +std::unique_ptr> +TestStunServer::Create(SocketServer* ss, + const SocketAddress& addr, + Thread& network_thread) { + Socket* socket = ss->CreateSocket(addr.family(), SOCK_DGRAM); + RTC_CHECK(socket != nullptr) << "Failed to create socket"; + AsyncUDPSocket* udp_socket = AsyncUDPSocket::Create(socket, addr); + RTC_CHECK(udp_socket != nullptr) << "Failed to create AsyncUDPSocket"; + TestStunServer* server = nullptr; + network_thread.BlockingCall( + [&]() { server = new TestStunServer(udp_socket, network_thread); }); + std::unique_ptr> + result(server, [&](TestStunServer* server) { + network_thread.BlockingCall([server]() { delete server; }); + }); + return result; +} + +void TestStunServer::OnBindingRequest(StunMessage* msg, + const SocketAddress& remote_addr) { + RTC_DCHECK_RUN_ON(&network_thread_); + if (fake_stun_addr_.IsNil()) { + StunServer::OnBindingRequest(msg, remote_addr); + } else { + StunMessage response(STUN_BINDING_RESPONSE, msg->transaction_id()); + GetStunBindResponse(msg, fake_stun_addr_, &response); + SendResponse(response, remote_addr); + } +} + +} // namespace webrtc diff --git a/p2p/test/test_stun_server.h b/p2p/test/test_stun_server.h new file mode 100644 index 0000000000..b65bc85f0f --- /dev/null +++ b/p2p/test/test_stun_server.h @@ -0,0 +1,63 @@ +/* + * Copyright 2008 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef P2P_TEST_TEST_STUN_SERVER_H_ +#define P2P_TEST_TEST_STUN_SERVER_H_ + +#include +#include + +#include "api/transport/stun.h" +#include "p2p/test/stun_server.h" +#include "rtc_base/async_udp_socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/socket_server.h" +#include "rtc_base/thread.h" + +namespace webrtc { + +// A test STUN server. Useful for unit tests. +class TestStunServer : StunServer { + public: + using StunServerPtr = + std::unique_ptr>; + static StunServerPtr Create(SocketServer* ss, + const SocketAddress& addr, + Thread& network_thread); + + // Set a fake STUN address to return to the client. + void set_fake_stun_addr(const SocketAddress& addr) { fake_stun_addr_ = addr; } + + private: + static void DeleteOnNetworkThread(TestStunServer* server); + + TestStunServer(AsyncUDPSocket* socket, Thread& network_thread) + : StunServer(socket), network_thread_(network_thread) {} + + void OnBindingRequest(StunMessage* msg, + const SocketAddress& remote_addr) override; + + private: + SocketAddress fake_stun_addr_; + Thread& network_thread_; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::TestStunServer; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // P2P_TEST_TEST_STUN_SERVER_H_ diff --git a/p2p/base/test_turn_customizer.h b/p2p/test/test_turn_customizer.h similarity index 58% rename from p2p/base/test_turn_customizer.h rename to p2p/test/test_turn_customizer.h index 415b13fbf2..0d447eb7d0 100644 --- a/p2p/base/test_turn_customizer.h +++ b/p2p/test/test_turn_customizer.h @@ -8,17 +8,20 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef P2P_BASE_TEST_TURN_CUSTOMIZER_H_ -#define P2P_BASE_TEST_TURN_CUSTOMIZER_H_ +#ifndef P2P_TEST_TEST_TURN_CUSTOMIZER_H_ +#define P2P_TEST_TEST_TURN_CUSTOMIZER_H_ +#include #include +#include "api/transport/stun.h" #include "api/turn_customizer.h" -#include "rtc_base/gunit.h" +#include "p2p/base/port_interface.h" +#include "test/gtest.h" -namespace cricket { +namespace webrtc { -class TestTurnCustomizer : public webrtc::TurnCustomizer { +class TestTurnCustomizer : public TurnCustomizer { public: TestTurnCustomizer() {} virtual ~TestTurnCustomizer() {} @@ -28,19 +31,19 @@ class TestTurnCustomizer : public webrtc::TurnCustomizer { STUN_ATTR_COUNTER = 0xFF02 // Number }; - void MaybeModifyOutgoingStunMessage(cricket::PortInterface* port, - cricket::StunMessage* message) override { + void MaybeModifyOutgoingStunMessage(PortInterface* port, + StunMessage* message) override { modify_cnt_++; ASSERT_NE(0, message->type()); if (add_counter_) { - message->AddAttribute(std::make_unique( + message->AddAttribute(std::make_unique( STUN_ATTR_COUNTER, modify_cnt_)); } return; } - bool AllowChannelData(cricket::PortInterface* port, + bool AllowChannelData(PortInterface* port, const void* data, size_t size, bool payload) override { @@ -54,6 +57,14 @@ class TestTurnCustomizer : public webrtc::TurnCustomizer { unsigned int allow_channel_data_cnt_ = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::TestTurnCustomizer; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES -#endif // P2P_BASE_TEST_TURN_CUSTOMIZER_H_ +#endif // P2P_TEST_TEST_TURN_CUSTOMIZER_H_ diff --git a/p2p/base/test_turn_server.h b/p2p/test/test_turn_server.h similarity index 67% rename from p2p/base/test_turn_server.h rename to p2p/test/test_turn_server.h index 4070372db2..e4165001d8 100644 --- a/p2p/base/test_turn_server.h +++ b/p2p/test/test_turn_server.h @@ -8,9 +8,10 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef P2P_BASE_TEST_TURN_SERVER_H_ -#define P2P_BASE_TEST_TURN_SERVER_H_ +#ifndef P2P_TEST_TEST_TURN_SERVER_H_ +#define P2P_TEST_TEST_TURN_SERVER_H_ +#include #include #include #include @@ -20,25 +21,31 @@ #include "api/sequence_checker.h" #include "api/transport/stun.h" #include "p2p/base/basic_packet_socket_factory.h" -#include "p2p/base/turn_server.h" +#include "p2p/base/port_interface.h" +#include "p2p/test/turn_server.h" #include "rtc_base/async_udp_socket.h" +#include "rtc_base/checks.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/socket_factory.h" #include "rtc_base/ssl_adapter.h" #include "rtc_base/ssl_identity.h" +#include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/thread.h" -namespace cricket { +namespace webrtc { static const char kTestRealm[] = "example.org"; static const char kTestSoftware[] = "TestTurnServer"; class TestTurnRedirector : public TurnRedirectInterface { public: - explicit TestTurnRedirector(const std::vector& addresses) + explicit TestTurnRedirector(const std::vector& addresses) : alternate_server_addresses_(addresses), iter_(alternate_server_addresses_.begin()) {} - virtual bool ShouldRedirect(const rtc::SocketAddress&, - rtc::SocketAddress* out) { + virtual bool ShouldRedirect(const SocketAddress&, SocketAddress* out) { if (!out || iter_ == alternate_server_addresses_.end()) { return false; } @@ -47,23 +54,23 @@ class TestTurnRedirector : public TurnRedirectInterface { } private: - const std::vector& alternate_server_addresses_; - std::vector::const_iterator iter_; + const std::vector& alternate_server_addresses_; + std::vector::const_iterator iter_; }; class TestTurnServer : public TurnAuthInterface { public: - TestTurnServer(rtc::Thread* thread, - rtc::SocketFactory* socket_factory, - const rtc::SocketAddress& int_addr, - const rtc::SocketAddress& udp_ext_addr, - ProtocolType int_protocol = PROTO_UDP, + TestTurnServer(Thread* thread, + SocketFactory* socket_factory, + const SocketAddress& int_addr, + const SocketAddress& udp_ext_addr, + ProtocolType int_protocol = webrtc::PROTO_UDP, bool ignore_bad_cert = true, absl::string_view common_name = "test turn server") : server_(thread), socket_factory_(socket_factory) { AddInternalSocket(int_addr, int_protocol, ignore_bad_cert, common_name); server_.SetExternalSocketFactory( - new rtc::BasicPacketSocketFactory(socket_factory), udp_ext_addr); + new BasicPacketSocketFactory(socket_factory), udp_ext_addr); server_.set_realm(kTestRealm); server_.set_software(kTestSoftware); server_.set_auth_hook(this); @@ -91,30 +98,30 @@ class TestTurnServer : public TurnAuthInterface { server_.set_enable_permission_checks(enable); } - void AddInternalSocket(const rtc::SocketAddress& int_addr, + void AddInternalSocket(const SocketAddress& int_addr, ProtocolType proto, bool ignore_bad_cert = true, absl::string_view common_name = "test turn server") { RTC_DCHECK(thread_checker_.IsCurrent()); - if (proto == cricket::PROTO_UDP) { + if (proto == webrtc::PROTO_UDP) { server_.AddInternalSocket( - rtc::AsyncUDPSocket::Create(socket_factory_, int_addr), proto); - } else if (proto == cricket::PROTO_TCP || proto == cricket::PROTO_TLS) { + AsyncUDPSocket::Create(socket_factory_, int_addr), proto); + } else if (proto == webrtc::PROTO_TCP || proto == webrtc::PROTO_TLS) { // For TCP we need to create a server socket which can listen for incoming // new connections. - rtc::Socket* socket = socket_factory_->CreateSocket(AF_INET, SOCK_STREAM); + Socket* socket = socket_factory_->CreateSocket(AF_INET, SOCK_STREAM); socket->Bind(int_addr); socket->Listen(5); - if (proto == cricket::PROTO_TLS) { + if (proto == webrtc::PROTO_TLS) { // For TLS, wrap the TCP socket with an SSL adapter. The adapter must // be configured with a self-signed certificate for testing. // Additionally, the client will not present a valid certificate, so we // must not fail when checking the peer's identity. - std::unique_ptr ssl_adapter_factory = - rtc::SSLAdapterFactory::Create(); - ssl_adapter_factory->SetRole(rtc::SSL_SERVER); + std::unique_ptr ssl_adapter_factory = + SSLAdapterFactory::Create(); + ssl_adapter_factory->SetRole(webrtc::SSL_SERVER); ssl_adapter_factory->SetIdentity( - rtc::SSLIdentity::Create(common_name, rtc::KeyParams())); + SSLIdentity::Create(common_name, KeyParams())); ssl_adapter_factory->SetIgnoreBadCert(ignore_bad_cert); server_.AddInternalServerSocket(socket, proto, std::move(ssl_adapter_factory)); @@ -128,7 +135,7 @@ class TestTurnServer : public TurnAuthInterface { // Finds the first allocation in the server allocation map with a source // ip and port matching the socket address provided. - TurnServerAllocation* FindAllocation(const rtc::SocketAddress& src) { + TurnServerAllocation* FindAllocation(const SocketAddress& src) { RTC_DCHECK(thread_checker_.IsCurrent()); const TurnServer::AllocationMap& map = server_.allocations(); for (TurnServer::AllocationMap::const_iterator it = map.begin(); @@ -152,10 +159,21 @@ class TestTurnServer : public TurnAuthInterface { } TurnServer server_; - rtc::SocketFactory* socket_factory_; - webrtc::SequenceChecker thread_checker_; + SocketFactory* socket_factory_; + SequenceChecker thread_checker_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::kTestRealm; +using ::webrtc::kTestSoftware; +using ::webrtc::TestTurnRedirector; +using ::webrtc::TestTurnServer; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES -#endif // P2P_BASE_TEST_TURN_SERVER_H_ +#endif // P2P_TEST_TEST_TURN_SERVER_H_ diff --git a/p2p/base/turn_server.cc b/p2p/test/turn_server.cc similarity index 80% rename from p2p/base/turn_server.cc rename to p2p/test/turn_server.cc index b362bfa5cd..cfc38acc67 100644 --- a/p2p/base/turn_server.cc +++ b/p2p/test/turn_server.cc @@ -8,10 +8,13 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "p2p/base/turn_server.h" +#include "p2p/test/turn_server.h" #include +#include +#include #include +#include #include // for std::tie #include @@ -20,18 +23,30 @@ #include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/packet_socket_factory.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "api/transport/stun.h" +#include "api/units/time_delta.h" #include "p2p/base/async_stun_tcp_socket.h" +#include "p2p/base/port_interface.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/byte_buffer.h" +#include "rtc_base/byte_order.h" #include "rtc_base/checks.h" -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/message_digest.h" -#include "rtc_base/socket_adapters.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_adapter.h" +#include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/time_utils.h" -namespace cricket { +namespace webrtc { namespace { using ::webrtc::TimeDelta; @@ -42,15 +57,12 @@ constexpr TimeDelta kDefaultAllocationTimeout = TimeDelta::Minutes(10); constexpr TimeDelta kPermissionTimeout = TimeDelta::Minutes(5); constexpr TimeDelta kChannelTimeout = TimeDelta::Minutes(10); -constexpr int kMinChannelNumber = 0x4000; -constexpr int kMaxChannelNumber = 0x7FFF; - constexpr size_t kNonceKeySize = 16; constexpr size_t kNonceSize = 48; constexpr size_t TURN_CHANNEL_HEADER_SIZE = 4U; -// TODO(mallinath) - Move these to a common place. +// TODO(mallinath): Move these to a common place. bool IsTurnChannelData(uint16_t msg_type) { // The first two bits of a channel data message are 0b01. return ((msg_type & 0xC000) == 0x4000); @@ -71,13 +83,13 @@ int GetStunErrorResponseTypeOrZero(const StunMessage& req) { static void InitErrorResponse(int code, absl::string_view reason, StunMessage* resp) { - resp->AddAttribute(std::make_unique( + resp->AddAttribute(std::make_unique( STUN_ATTR_ERROR_CODE, code, std::string(reason))); } -TurnServer::TurnServer(webrtc::TaskQueueBase* thread) +TurnServer::TurnServer(TaskQueueBase* thread) : thread_(thread), - nonce_key_(rtc::CreateRandomString(kNonceKeySize)), + nonce_key_(CreateRandomString(kNonceKeySize)), auth_hook_(NULL), redirect_hook_(NULL), enable_otu_nonce_(false) {} @@ -86,29 +98,33 @@ TurnServer::~TurnServer() { RTC_DCHECK_RUN_ON(thread_); for (InternalSocketMap::iterator it = server_sockets_.begin(); it != server_sockets_.end(); ++it) { - rtc::AsyncPacketSocket* socket = it->first; + AsyncPacketSocket* socket = it->first; delete socket; } for (ServerSocketMap::iterator it = server_listen_sockets_.begin(); it != server_listen_sockets_.end(); ++it) { - rtc::Socket* socket = it->first; + Socket* socket = it->first; delete socket; } } -void TurnServer::AddInternalSocket(rtc::AsyncPacketSocket* socket, +void TurnServer::AddInternalSocket(AsyncPacketSocket* socket, ProtocolType proto) { RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(server_sockets_.end() == server_sockets_.find(socket)); server_sockets_[socket] = proto; - socket->SignalReadPacket.connect(this, &TurnServer::OnInternalPacket); + socket->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + RTC_DCHECK_RUN_ON(thread_); + OnInternalPacket(socket, packet); + }); } void TurnServer::AddInternalServerSocket( - rtc::Socket* socket, + Socket* socket, ProtocolType proto, - std::unique_ptr ssl_adapter_factory) { + std::unique_ptr ssl_adapter_factory) { RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(server_listen_sockets_.end() == @@ -117,38 +133,38 @@ void TurnServer::AddInternalServerSocket( socket->SignalReadEvent.connect(this, &TurnServer::OnNewInternalConnection); } -void TurnServer::SetExternalSocketFactory( - rtc::PacketSocketFactory* factory, - const rtc::SocketAddress& external_addr) { +void TurnServer::SetExternalSocketFactory(PacketSocketFactory* factory, + const SocketAddress& external_addr) { RTC_DCHECK_RUN_ON(thread_); external_socket_factory_.reset(factory); external_addr_ = external_addr; } -void TurnServer::OnNewInternalConnection(rtc::Socket* socket) { +void TurnServer::OnNewInternalConnection(Socket* socket) { RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(server_listen_sockets_.find(socket) != server_listen_sockets_.end()); AcceptConnection(socket); } -void TurnServer::AcceptConnection(rtc::Socket* server_socket) { +void TurnServer::AcceptConnection(Socket* server_socket) { + RTC_DCHECK_RUN_ON(thread_); + // Check if someone is trying to connect to us. - rtc::SocketAddress accept_addr; - rtc::Socket* accepted_socket = server_socket->Accept(&accept_addr); + SocketAddress accept_addr; + Socket* accepted_socket = server_socket->Accept(&accept_addr); if (accepted_socket != NULL) { const ServerSocketInfo& info = server_listen_sockets_[server_socket]; if (info.ssl_adapter_factory) { - rtc::SSLAdapter* ssl_adapter = + SSLAdapter* ssl_adapter = info.ssl_adapter_factory->CreateAdapter(accepted_socket); ssl_adapter->StartSSL(""); accepted_socket = ssl_adapter; } - cricket::AsyncStunTCPSocket* tcp_socket = - new cricket::AsyncStunTCPSocket(accepted_socket); + AsyncStunTCPSocket* tcp_socket = new AsyncStunTCPSocket(accepted_socket); tcp_socket->SubscribeCloseEvent(this, - [this](rtc::AsyncPacketSocket* s, int err) { + [this](AsyncPacketSocket* s, int err) { OnInternalSocketClose(s, err); }); // Finally add the socket so it can start communicating with the client. @@ -156,46 +172,42 @@ void TurnServer::AcceptConnection(rtc::Socket* server_socket) { } } -void TurnServer::OnInternalSocketClose(rtc::AsyncPacketSocket* socket, - int err) { +void TurnServer::OnInternalSocketClose(AsyncPacketSocket* socket, int err) { RTC_DCHECK_RUN_ON(thread_); DestroyInternalSocket(socket); } -void TurnServer::OnInternalPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& addr, - const int64_t& /* packet_time_us */) { +void TurnServer::OnInternalPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet) { RTC_DCHECK_RUN_ON(thread_); // Fail if the packet is too small to even contain a channel header. - if (size < TURN_CHANNEL_HEADER_SIZE) { + if (packet.payload().size() < TURN_CHANNEL_HEADER_SIZE) { return; } InternalSocketMap::iterator iter = server_sockets_.find(socket); RTC_DCHECK(iter != server_sockets_.end()); - TurnServerConnection conn(addr, iter->second, socket); - uint16_t msg_type = rtc::GetBE16(data); + TurnServerConnection conn(packet.source_address(), iter->second, socket); + uint16_t msg_type = webrtc::GetBE16(packet.payload().data()); if (!IsTurnChannelData(msg_type)) { // This is a STUN message. - HandleStunMessage(&conn, data, size); + HandleStunMessage(&conn, packet.payload()); } else { // This is a channel message; let the allocation handle it. TurnServerAllocation* allocation = FindAllocation(&conn); if (allocation) { - allocation->HandleChannelData(data, size); + allocation->HandleChannelData(packet.payload()); } if (stun_message_observer_ != nullptr) { - stun_message_observer_->ReceivedChannelData(data, size); + stun_message_observer_->ReceivedChannelData(packet.payload()); } } } void TurnServer::HandleStunMessage(TurnServerConnection* conn, - const char* data, - size_t size) { + ArrayView payload) { + RTC_DCHECK_RUN_ON(thread_); TurnMessage msg; - rtc::ByteBufferReader buf(data, size); + ByteBufferReader buf(payload); if (!msg.Read(&buf) || (buf.Length() > 0)) { RTC_LOG(LS_WARNING) << "Received invalid STUN message"; return; @@ -212,7 +224,7 @@ void TurnServer::HandleStunMessage(TurnServerConnection* conn, } if (redirect_hook_ != NULL && msg.type() == STUN_ALLOCATE_REQUEST) { - rtc::SocketAddress address; + SocketAddress address; if (redirect_hook_->ShouldRedirect(conn->src(), &address)) { SendErrorResponseWithAlternateServer(conn, &msg, address); return; @@ -231,7 +243,7 @@ void TurnServer::HandleStunMessage(TurnServerConnection* conn, // Ensure the message is authorized; only needed for requests. if (IsStunRequestType(msg.type())) { - if (!CheckAuthorization(conn, &msg, data, size, key)) { + if (!CheckAuthorization(conn, &msg, key)) { return; } } @@ -272,8 +284,6 @@ bool TurnServer::GetKey(const StunMessage* msg, std::string* key) { bool TurnServer::CheckAuthorization(TurnServerConnection* conn, StunMessage* msg, - const char* data, - size_t size, absl::string_view key) { // RFC 5389, 10.2.2. RTC_DCHECK(IsStunRequestType(msg->type())); @@ -377,8 +387,8 @@ void TurnServer::HandleAllocateRequest(TurnServerConnection* conn, std::string TurnServer::GenerateNonce(int64_t now) const { // Generate a nonce of the form hex(now + HMAC-MD5(nonce_key_, now)) std::string input(reinterpret_cast(&now), sizeof(now)); - std::string nonce = rtc::hex_encode(input); - nonce += rtc::ComputeHmac(rtc::DIGEST_MD5, nonce_key_, input); + std::string nonce = hex_encode(input); + nonce += ComputeHmac(DIGEST_MD5, nonce_key_, input); RTC_DCHECK(nonce.size() == kNonceSize); return nonce; @@ -393,21 +403,20 @@ bool TurnServer::ValidateNonce(absl::string_view nonce) const { // Decode the timestamp. int64_t then; char* p = reinterpret_cast(&then); - size_t len = rtc::hex_decode(rtc::ArrayView(p, sizeof(then)), - nonce.substr(0, sizeof(then) * 2)); + size_t len = hex_decode(ArrayView(p, sizeof(then)), + nonce.substr(0, sizeof(then) * 2)); if (len != sizeof(then)) { return false; } // Verify the HMAC. if (nonce.substr(sizeof(then) * 2) != - rtc::ComputeHmac(rtc::DIGEST_MD5, nonce_key_, - std::string(p, sizeof(then)))) { + ComputeHmac(DIGEST_MD5, nonce_key_, std::string(p, sizeof(then)))) { return false; } // Validate the timestamp. - return TimeDelta::Millis(rtc::TimeMillis() - then) < kNonceTimeout; + return TimeDelta::Millis(TimeMillis() - then) < kNonceTimeout; } TurnServerAllocation* TurnServer::FindAllocation(TurnServerConnection* conn) { @@ -418,7 +427,7 @@ TurnServerAllocation* TurnServer::FindAllocation(TurnServerConnection* conn) { TurnServerAllocation* TurnServer::CreateAllocation(TurnServerConnection* conn, int proto, absl::string_view key) { - rtc::AsyncPacketSocket* external_socket = + AsyncPacketSocket* external_socket = (external_socket_factory_) ? external_socket_factory_->CreateUdpSocket(external_addr_, 0, 0) : NULL; @@ -453,7 +462,7 @@ void TurnServer::SendErrorResponseWithRealmAndNonce(TurnServerConnection* conn, TurnMessage resp(GetStunErrorResponseTypeOrZero(*msg), msg->transaction_id()); InitErrorResponse(code, reason, &resp); - int64_t timestamp = rtc::TimeMillis(); + int64_t timestamp = TimeMillis(); if (ts_for_next_nonce_) { timestamp = ts_for_next_nonce_; ts_for_next_nonce_ = 0; @@ -468,7 +477,7 @@ void TurnServer::SendErrorResponseWithRealmAndNonce(TurnServerConnection* conn, void TurnServer::SendErrorResponseWithAlternateServer( TurnServerConnection* conn, const StunMessage* msg, - const rtc::SocketAddress& addr) { + const SocketAddress& addr) { TurnMessage resp(GetStunErrorResponseTypeOrZero(*msg), msg->transaction_id()); InitErrorResponse(STUN_ERROR_TRY_ALTERNATE, STUN_ERROR_REASON_TRY_ALTERNATE_SERVER, &resp); @@ -479,7 +488,7 @@ void TurnServer::SendErrorResponseWithAlternateServer( void TurnServer::SendStun(TurnServerConnection* conn, StunMessage* msg) { RTC_DCHECK_RUN_ON(thread_); - rtc::ByteBufferWriter buf; + ByteBufferWriter buf; // Add a SOFTWARE attribute if one is set. if (!software_.empty()) { msg->AddAttribute(std::make_unique( @@ -489,37 +498,36 @@ void TurnServer::SendStun(TurnServerConnection* conn, StunMessage* msg) { Send(conn, buf); } -void TurnServer::Send(TurnServerConnection* conn, - const rtc::ByteBufferWriter& buf) { +void TurnServer::Send(TurnServerConnection* conn, const ByteBufferWriter& buf) { RTC_DCHECK_RUN_ON(thread_); - rtc::PacketOptions options; + AsyncSocketPacketOptions options; conn->socket()->SendTo(buf.Data(), buf.Length(), conn->src(), options); } void TurnServer::DestroyAllocation(TurnServerAllocation* allocation) { // Removing the internal socket if the connection is not udp. - rtc::AsyncPacketSocket* socket = allocation->conn()->socket(); + AsyncPacketSocket* socket = allocation->conn()->socket(); InternalSocketMap::iterator iter = server_sockets_.find(socket); // Skip if the socket serving this allocation is UDP, as this will be shared // by all allocations. // Note: We may not find a socket if it's a TCP socket that was closed, and // the allocation is only now timing out. - if (iter != server_sockets_.end() && iter->second != cricket::PROTO_UDP) { + if (iter != server_sockets_.end() && iter->second != webrtc::PROTO_UDP) { DestroyInternalSocket(socket); } allocations_.erase(*(allocation->conn())); } -void TurnServer::DestroyInternalSocket(rtc::AsyncPacketSocket* socket) { +void TurnServer::DestroyInternalSocket(AsyncPacketSocket* socket) { InternalSocketMap::iterator iter = server_sockets_.find(socket); if (iter != server_sockets_.end()) { - rtc::AsyncPacketSocket* socket = iter->first; - socket->UnsubscribeCloseEvent(this); - socket->SignalReadPacket.disconnect(this); + AsyncPacketSocket* server_socket = iter->first; + server_socket->UnsubscribeCloseEvent(this); + server_socket->DeregisterReceivedPacketCallback(); server_sockets_.erase(iter); - std::unique_ptr socket_to_delete = - absl::WrapUnique(socket); + std::unique_ptr socket_to_delete = + absl::WrapUnique(server_socket); // We must destroy the socket async to avoid invalidating the sigslot // callback list iterator inside a sigslot callback. (In other words, // deleting an object from within a callback from that object). @@ -527,9 +535,9 @@ void TurnServer::DestroyInternalSocket(rtc::AsyncPacketSocket* socket) { } } -TurnServerConnection::TurnServerConnection(const rtc::SocketAddress& src, +TurnServerConnection::TurnServerConnection(const SocketAddress& src, ProtocolType proto, - rtc::AsyncPacketSocket* socket) + AsyncPacketSocket* socket) : src_(src), dst_(socket->GetRemoteAddress()), proto_(proto), @@ -545,24 +553,27 @@ bool TurnServerConnection::operator<(const TurnServerConnection& c) const { std::string TurnServerConnection::ToString() const { const char* const kProtos[] = {"unknown", "udp", "tcp", "ssltcp"}; - rtc::StringBuilder ost; + StringBuilder ost; ost << src_.ToSensitiveString() << "-" << dst_.ToSensitiveString() << ":" << kProtos[proto_]; return ost.Release(); } TurnServerAllocation::TurnServerAllocation(TurnServer* server, - webrtc::TaskQueueBase* thread, + TaskQueueBase* thread, const TurnServerConnection& conn, - rtc::AsyncPacketSocket* socket, + AsyncPacketSocket* socket, absl::string_view key) : server_(server), thread_(thread), conn_(conn), external_socket_(socket), key_(key) { - external_socket_->SignalReadPacket.connect( - this, &TurnServerAllocation::OnExternalPacket); + external_socket_->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + RTC_DCHECK_RUN_ON(thread_); + OnExternalPacket(socket, packet); + }); } TurnServerAllocation::~TurnServerAllocation() { @@ -572,12 +583,13 @@ TurnServerAllocation::~TurnServerAllocation() { } std::string TurnServerAllocation::ToString() const { - rtc::StringBuilder ost; + StringBuilder ost; ost << "Alloc[" << conn_.ToString() << "]"; return ost.Release(); } void TurnServerAllocation::HandleTurnMessage(const TurnMessage* msg) { + RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(msg != NULL); switch (msg->type()) { case STUN_ALLOCATE_REQUEST: @@ -669,8 +681,8 @@ void TurnServerAllocation::HandleSendIndication(const TurnMessage* msg) { // If a permission exists, send the data on to the peer. if (HasPermission(peer_attr->GetAddress().ipaddr())) { - SendExternal(data_attr->bytes(), data_attr->length(), - peer_attr->GetAddress()); + SendExternal(reinterpret_cast(data_attr->array_view().data()), + data_attr->length(), peer_attr->GetAddress()); } else { RTC_LOG(LS_WARNING) << ToString() << ": Received send indication without permission" @@ -681,6 +693,7 @@ void TurnServerAllocation::HandleSendIndication(const TurnMessage* msg) { void TurnServerAllocation::HandleCreatePermissionRequest( const TurnMessage* msg) { + RTC_DCHECK_RUN_ON(server_->thread_); // Check mandatory attributes. const StunAddressAttribute* peer_attr = msg->GetAddress(STUN_ATTR_XOR_PEER_ADDRESS); @@ -690,7 +703,7 @@ void TurnServerAllocation::HandleCreatePermissionRequest( } if (server_->reject_private_addresses_ && - rtc::IPIsPrivate(peer_attr->GetAddress().ipaddr())) { + webrtc::IPIsPrivate(peer_attr->GetAddress().ipaddr())) { SendErrorResponse(msg, STUN_ERROR_FORBIDDEN, STUN_ERROR_REASON_FORBIDDEN); return; } @@ -708,6 +721,13 @@ void TurnServerAllocation::HandleCreatePermissionRequest( } void TurnServerAllocation::HandleChannelBindRequest(const TurnMessage* msg) { + RTC_DCHECK_RUN_ON(server_->thread_); + if (server_->reject_bind_requests_) { + RTC_LOG(LS_ERROR) << "HandleChannelBindRequest: Rejecting bind requests"; + SendBadRequestResponse(msg); + return; + } + // Check mandatory attributes. const StunUInt32Attribute* channel_attr = msg->GetUInt32(STUN_ATTR_CHANNEL_NUMBER); @@ -719,8 +739,9 @@ void TurnServerAllocation::HandleChannelBindRequest(const TurnMessage* msg) { } // Check that channel id is valid. - int channel_id = channel_attr->value() >> 16; - if (channel_id < kMinChannelNumber || channel_id > kMaxChannelNumber) { + uint16_t channel_id = static_cast(channel_attr->value() >> 16); + if (channel_id < kMinTurnChannelNumber || + channel_id > kMaxTurnChannelNumber) { SendBadRequestResponse(msg); return; } @@ -758,14 +779,14 @@ void TurnServerAllocation::HandleChannelBindRequest(const TurnMessage* msg) { SendResponse(&response); } -void TurnServerAllocation::HandleChannelData(const char* data, size_t size) { +void TurnServerAllocation::HandleChannelData(ArrayView payload) { // Extract the channel number from the data. - uint16_t channel_id = rtc::GetBE16(data); + uint16_t channel_id = webrtc::GetBE16(payload.data()); auto channel = FindChannel(channel_id); if (channel != channels_.end()) { // Send the data to the peer address. - SendExternal(data + TURN_CHANNEL_HEADER_SIZE, - size - TURN_CHANNEL_HEADER_SIZE, channel->peer); + SendExternal(payload.data() + TURN_CHANNEL_HEADER_SIZE, + payload.size() - TURN_CHANNEL_HEADER_SIZE, channel->peer); } else { RTC_LOG(LS_WARNING) << ToString() << ": Received channel data for invalid channel, id=" @@ -773,34 +794,30 @@ void TurnServerAllocation::HandleChannelData(const char* data, size_t size) { } } -void TurnServerAllocation::OnExternalPacket( - rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& addr, - const int64_t& /* packet_time_us */) { +void TurnServerAllocation::OnExternalPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet) { RTC_DCHECK(external_socket_.get() == socket); - auto channel = FindChannel(addr); + auto channel = FindChannel(packet.source_address()); if (channel != channels_.end()) { // There is a channel bound to this address. Send as a channel message. - rtc::ByteBufferWriter buf; + ByteBufferWriter buf; buf.WriteUInt16(channel->id); - buf.WriteUInt16(static_cast(size)); - buf.WriteBytes(data, size); + buf.WriteUInt16(static_cast(packet.payload().size())); + buf.Write(ArrayView(packet.payload())); server_->Send(&conn_, buf); } else if (!server_->enable_permission_checks_ || - HasPermission(addr.ipaddr())) { + HasPermission(packet.source_address().ipaddr())) { // No channel, but a permission exists. Send as a data indication. TurnMessage msg(TURN_DATA_INDICATION); msg.AddAttribute(std::make_unique( - STUN_ATTR_XOR_PEER_ADDRESS, addr)); - msg.AddAttribute( - std::make_unique(STUN_ATTR_DATA, data, size)); + STUN_ATTR_XOR_PEER_ADDRESS, packet.source_address())); + msg.AddAttribute(std::make_unique( + STUN_ATTR_DATA, packet.payload().data(), packet.payload().size())); server_->SendStun(&conn_, &msg); } else { RTC_LOG(LS_WARNING) << ToString() << ": Received external packet without permission, peer=" - << addr.ToSensitiveString(); + << packet.source_address().ToSensitiveString(); } } @@ -812,11 +829,11 @@ TimeDelta TurnServerAllocation::ComputeLifetime(const TurnMessage& msg) { return kDefaultAllocationTimeout; } -bool TurnServerAllocation::HasPermission(const rtc::IPAddress& addr) { +bool TurnServerAllocation::HasPermission(const IPAddress& addr) { return FindPermission(addr) != perms_.end(); } -void TurnServerAllocation::AddPermission(const rtc::IPAddress& addr) { +void TurnServerAllocation::AddPermission(const IPAddress& addr) { auto perm = FindPermission(addr); if (perm == perms_.end()) { perm = perms_.insert(perms_.end(), {.peer = addr}); @@ -829,7 +846,7 @@ void TurnServerAllocation::AddPermission(const rtc::IPAddress& addr) { } TurnServerAllocation::PermissionList::iterator -TurnServerAllocation::FindPermission(const rtc::IPAddress& addr) { +TurnServerAllocation::FindPermission(const IPAddress& addr) { return absl::c_find_if(perms_, [&](const Permission& p) { return p.peer == addr; }); } @@ -841,7 +858,7 @@ TurnServerAllocation::ChannelList::iterator TurnServerAllocation::FindChannel( } TurnServerAllocation::ChannelList::iterator TurnServerAllocation::FindChannel( - const rtc::SocketAddress& addr) { + const SocketAddress& addr) { return absl::c_find_if(channels_, [&](const Channel& c) { return c.peer == addr; }); } @@ -864,8 +881,8 @@ void TurnServerAllocation::SendErrorResponse(const TurnMessage* req, void TurnServerAllocation::SendExternal(const void* data, size_t size, - const rtc::SocketAddress& peer) { - rtc::PacketOptions options; + const SocketAddress& peer) { + AsyncSocketPacketOptions options; external_socket_->SendTo(data, size, peer, options); } @@ -878,4 +895,4 @@ void TurnServerAllocation::PostDeleteSelf(TimeDelta delay) { delay); } -} // namespace cricket +} // namespace webrtc diff --git a/p2p/base/turn_server.h b/p2p/test/turn_server.h similarity index 69% rename from p2p/base/turn_server.h rename to p2p/test/turn_server.h index e951d089af..0a74d52650 100644 --- a/p2p/base/turn_server.h +++ b/p2p/test/turn_server.h @@ -8,37 +8,41 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef P2P_BASE_TURN_SERVER_H_ -#define P2P_BASE_TURN_SERVER_H_ +#ifndef P2P_TEST_TURN_SERVER_H_ +#define P2P_TEST_TURN_SERVER_H_ +#include +#include #include #include #include -#include #include #include -#include #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/packet_socket_factory.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" +#include "api/transport/stun.h" #include "api/units/time_delta.h" #include "p2p/base/port_interface.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/byte_buffer.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/socket.h" #include "rtc_base/socket_address.h" #include "rtc_base/ssl_adapter.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread_annotations.h" -namespace rtc { -class ByteBufferWriter; -class PacketSocketFactory; -} // namespace rtc +namespace webrtc { -namespace cricket { +constexpr int kMinTurnChannelNumber = 0x4000; +constexpr int kMaxTurnChannelNumber = 0x7FFF; -class StunMessage; -class TurnMessage; class TurnServer; // The default server port for TURN, as specified in RFC5766. @@ -47,21 +51,21 @@ const int TURN_SERVER_PORT = 3478; // Encapsulates the client's connection to the server. class TurnServerConnection { public: - TurnServerConnection() : proto_(PROTO_UDP), socket_(NULL) {} - TurnServerConnection(const rtc::SocketAddress& src, + TurnServerConnection() : proto_(webrtc::PROTO_UDP), socket_(NULL) {} + TurnServerConnection(const SocketAddress& src, ProtocolType proto, - rtc::AsyncPacketSocket* socket); - const rtc::SocketAddress& src() const { return src_; } - rtc::AsyncPacketSocket* socket() { return socket_; } + AsyncPacketSocket* socket); + const SocketAddress& src() const { return src_; } + AsyncPacketSocket* socket() { return socket_; } bool operator==(const TurnServerConnection& t) const; bool operator<(const TurnServerConnection& t) const; std::string ToString() const; private: - rtc::SocketAddress src_; - rtc::SocketAddress dst_; - cricket::ProtocolType proto_; - rtc::AsyncPacketSocket* socket_; + SocketAddress src_; + SocketAddress dst_; + ProtocolType proto_; + AsyncPacketSocket* socket_; }; // Encapsulates a TURN allocation. @@ -69,14 +73,14 @@ class TurnServerConnection { // handles TURN messages (via HandleTurnMessage) and channel data messages // (via HandleChannelData) for this allocation when received by the server. // The object informs the server when its lifetime timer expires. -class TurnServerAllocation : public sigslot::has_slots<> { +class TurnServerAllocation final { public: TurnServerAllocation(TurnServer* server_, - webrtc::TaskQueueBase* thread, + TaskQueueBase* thread, const TurnServerConnection& conn, - rtc::AsyncPacketSocket* server_socket, + AsyncPacketSocket* server_socket, absl::string_view key); - ~TurnServerAllocation() override; + ~TurnServerAllocation(); TurnServerConnection* conn() { return &conn_; } const std::string& key() const { return key_; } @@ -90,22 +94,22 @@ class TurnServerAllocation : public sigslot::has_slots<> { std::string ToString() const; void HandleTurnMessage(const TurnMessage* msg); - void HandleChannelData(const char* data, size_t size); + void HandleChannelData(ArrayView payload); private: struct Channel { - webrtc::ScopedTaskSafety pending_delete; - int id; - rtc::SocketAddress peer; + ScopedTaskSafety pending_delete; + const uint16_t id; + const SocketAddress peer; }; struct Permission { - webrtc::ScopedTaskSafety pending_delete; - rtc::IPAddress peer; + ScopedTaskSafety pending_delete; + IPAddress peer; }; using PermissionList = std::list; using ChannelList = std::list; - void PostDeleteSelf(webrtc::TimeDelta delay); + void PostDeleteSelf(TimeDelta delay); void HandleAllocateRequest(const TurnMessage* msg); void HandleRefreshRequest(const TurnMessage* msg); @@ -113,39 +117,34 @@ class TurnServerAllocation : public sigslot::has_slots<> { void HandleCreatePermissionRequest(const TurnMessage* msg); void HandleChannelBindRequest(const TurnMessage* msg); - void OnExternalPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& addr, - const int64_t& packet_time_us); + void OnExternalPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet); - static webrtc::TimeDelta ComputeLifetime(const TurnMessage& msg); - bool HasPermission(const rtc::IPAddress& addr); - void AddPermission(const rtc::IPAddress& addr); - PermissionList::iterator FindPermission(const rtc::IPAddress& addr); + static TimeDelta ComputeLifetime(const TurnMessage& msg); + bool HasPermission(const IPAddress& addr); + void AddPermission(const IPAddress& addr); + PermissionList::iterator FindPermission(const IPAddress& addr); ChannelList::iterator FindChannel(int channel_id); - ChannelList::iterator FindChannel(const rtc::SocketAddress& addr); + ChannelList::iterator FindChannel(const SocketAddress& addr); void SendResponse(TurnMessage* msg); void SendBadRequestResponse(const TurnMessage* req); void SendErrorResponse(const TurnMessage* req, int code, absl::string_view reason); - void SendExternal(const void* data, - size_t size, - const rtc::SocketAddress& peer); + void SendExternal(const void* data, size_t size, const SocketAddress& peer); TurnServer* const server_; - webrtc::TaskQueueBase* const thread_; + TaskQueueBase* const thread_; TurnServerConnection conn_; - std::unique_ptr external_socket_; + std::unique_ptr external_socket_; std::string key_; std::string transaction_id_; std::string username_; std::string last_nonce_; PermissionList perms_; ChannelList channels_; - webrtc::ScopedTaskSafety safety_; + ScopedTaskSafety safety_; }; // An interface through which the MD5 credential hash can be retrieved. @@ -163,15 +162,15 @@ class TurnAuthInterface { // An interface enables Turn Server to control redirection behavior. class TurnRedirectInterface { public: - virtual bool ShouldRedirect(const rtc::SocketAddress& address, - rtc::SocketAddress* out) = 0; + virtual bool ShouldRedirect(const SocketAddress& address, + SocketAddress* out) = 0; virtual ~TurnRedirectInterface() {} }; class StunMessageObserver { public: virtual void ReceivedMessage(const TurnMessage* msg) = 0; - virtual void ReceivedChannelData(const char* data, size_t size) = 0; + virtual void ReceivedChannelData(ArrayView payload) = 0; virtual ~StunMessageObserver() {} }; @@ -184,7 +183,7 @@ class TurnServer : public sigslot::has_slots<> { typedef std::map> AllocationMap; - explicit TurnServer(webrtc::TaskQueueBase* thread); + explicit TurnServer(TaskQueueBase* thread); ~TurnServer() override; // Gets/sets the realm value to use for the server. @@ -234,23 +233,28 @@ class TurnServer : public sigslot::has_slots<> { reject_private_addresses_ = filter; } + void set_reject_bind_requests(bool filter) { + RTC_DCHECK_RUN_ON(thread_); + reject_bind_requests_ = filter; + } + void set_enable_permission_checks(bool enable) { RTC_DCHECK_RUN_ON(thread_); enable_permission_checks_ = enable; } // Starts listening for packets from internal clients. - void AddInternalSocket(rtc::AsyncPacketSocket* socket, ProtocolType proto); + void AddInternalSocket(AsyncPacketSocket* socket, ProtocolType proto); // Starts listening for the connections on this socket. When someone tries // to connect, the connection will be accepted and a new internal socket // will be added. void AddInternalServerSocket( - rtc::Socket* socket, + Socket* socket, ProtocolType proto, - std::unique_ptr ssl_adapter_factory = nullptr); + std::unique_ptr ssl_adapter_factory = nullptr); // Specifies the factory to use for creating external sockets. - void SetExternalSocketFactory(rtc::PacketSocketFactory* factory, - const rtc::SocketAddress& address); + void SetExternalSocketFactory(PacketSocketFactory* factory, + const SocketAddress& address); // For testing only. std::string SetTimestampForNextNonce(int64_t timestamp) { RTC_DCHECK_RUN_ON(thread_); @@ -266,24 +270,20 @@ class TurnServer : public sigslot::has_slots<> { private: // All private member functions and variables should have access restricted to // thread_. But compile-time annotations are missing for members access from - // TurnServerAllocation (via friend declaration), and the On* methods, which - // are called via sigslot. + // TurnServerAllocation (via friend declaration). + std::string GenerateNonce(int64_t now) const RTC_RUN_ON(thread_); - void OnInternalPacket(rtc::AsyncPacketSocket* socket, - const char* data, - size_t size, - const rtc::SocketAddress& address, - const int64_t& packet_time_us); + void OnInternalPacket(AsyncPacketSocket* socket, + const ReceivedIpPacket& packet) RTC_RUN_ON(thread_); - void OnNewInternalConnection(rtc::Socket* socket); + void OnNewInternalConnection(Socket* socket); // Accept connections on this server socket. - void AcceptConnection(rtc::Socket* server_socket) RTC_RUN_ON(thread_); - void OnInternalSocketClose(rtc::AsyncPacketSocket* socket, int err); + void AcceptConnection(Socket* server_socket) RTC_RUN_ON(thread_); + void OnInternalSocketClose(AsyncPacketSocket* socket, int err); void HandleStunMessage(TurnServerConnection* conn, - const char* data, - size_t size) RTC_RUN_ON(thread_); + ArrayView payload) RTC_RUN_ON(thread_); void HandleBindingRequest(TurnServerConnection* conn, const StunMessage* msg) RTC_RUN_ON(thread_); void HandleAllocateRequest(TurnServerConnection* conn, @@ -293,8 +293,6 @@ class TurnServer : public sigslot::has_slots<> { bool GetKey(const StunMessage* msg, std::string* key) RTC_RUN_ON(thread_); bool CheckAuthorization(TurnServerConnection* conn, StunMessage* msg, - const char* data, - size_t size, absl::string_view key) RTC_RUN_ON(thread_); bool ValidateNonce(absl::string_view nonce) const RTC_RUN_ON(thread_); @@ -318,25 +316,24 @@ class TurnServer : public sigslot::has_slots<> { void SendErrorResponseWithAlternateServer(TurnServerConnection* conn, const StunMessage* req, - const rtc::SocketAddress& addr) + const SocketAddress& addr) RTC_RUN_ON(thread_); void SendStun(TurnServerConnection* conn, StunMessage* msg); - void Send(TurnServerConnection* conn, const rtc::ByteBufferWriter& buf); + void Send(TurnServerConnection* conn, const ByteBufferWriter& buf); void DestroyAllocation(TurnServerAllocation* allocation) RTC_RUN_ON(thread_); - void DestroyInternalSocket(rtc::AsyncPacketSocket* socket) - RTC_RUN_ON(thread_); + void DestroyInternalSocket(AsyncPacketSocket* socket) RTC_RUN_ON(thread_); - typedef std::map InternalSocketMap; + typedef std::map InternalSocketMap; struct ServerSocketInfo { ProtocolType proto; // If non-null, used to wrap accepted sockets. - std::unique_ptr ssl_adapter_factory; + std::unique_ptr ssl_adapter_factory; }; - typedef std::map ServerSocketMap; + typedef std::map ServerSocketMap; - webrtc::TaskQueueBase* const thread_; + TaskQueueBase* const thread_; const std::string nonce_key_; std::string realm_ RTC_GUARDED_BY(thread_); std::string software_ RTC_GUARDED_BY(thread_); @@ -345,15 +342,16 @@ class TurnServer : public sigslot::has_slots<> { // otu - one-time-use. Server will respond with 438 if it's // sees the same nonce in next transaction. bool enable_otu_nonce_ RTC_GUARDED_BY(thread_); - bool reject_private_addresses_ = false; + bool reject_private_addresses_ RTC_GUARDED_BY(thread_) = false; + bool reject_bind_requests_ RTC_GUARDED_BY(thread_) = false; // Check for permission when receiving an external packet. bool enable_permission_checks_ = true; InternalSocketMap server_sockets_ RTC_GUARDED_BY(thread_); ServerSocketMap server_listen_sockets_ RTC_GUARDED_BY(thread_); - std::unique_ptr external_socket_factory_ + std::unique_ptr external_socket_factory_ RTC_GUARDED_BY(thread_); - rtc::SocketAddress external_addr_ RTC_GUARDED_BY(thread_); + SocketAddress external_addr_ RTC_GUARDED_BY(thread_); AllocationMap allocations_ RTC_GUARDED_BY(thread_); @@ -368,6 +366,22 @@ class TurnServer : public sigslot::has_slots<> { friend class TurnServerAllocation; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::kMaxTurnChannelNumber; +using ::webrtc::kMinTurnChannelNumber; +using ::webrtc::StunMessageObserver; +using ::webrtc::TURN_SERVER_PORT; +using ::webrtc::TurnAuthInterface; +using ::webrtc::TurnRedirectInterface; +using ::webrtc::TurnServer; +using ::webrtc::TurnServerAllocation; +using ::webrtc::TurnServerConnection; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES -#endif // P2P_BASE_TURN_SERVER_H_ +#endif // P2P_TEST_TURN_SERVER_H_ diff --git a/p2p/base/turn_server_unittest.cc b/p2p/test/turn_server_unittest.cc similarity index 77% rename from p2p/base/turn_server_unittest.cc rename to p2p/test/turn_server_unittest.cc index e534f6598c..2ed8b668d5 100644 --- a/p2p/base/turn_server_unittest.cc +++ b/p2p/test/turn_server_unittest.cc @@ -8,16 +8,22 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "p2p/base/turn_server.h" +#include "p2p/test/turn_server.h" + +#include #include "p2p/base/basic_packet_socket_factory.h" +#include "p2p/base/port_interface.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/thread.h" #include "rtc_base/virtual_socket_server.h" #include "test/gtest.h" // NOTE: This is a work in progress. Currently this file only has tests for // TurnServerConnection, a primitive class used by TurnServer. -namespace cricket { +namespace webrtc { class TurnServerConnectionTest : public ::testing::Test { public: @@ -39,16 +45,16 @@ class TurnServerConnectionTest : public ::testing::Test { } protected: - rtc::VirtualSocketServer vss_; - rtc::AutoSocketServerThread thread_; - rtc::BasicPacketSocketFactory socket_factory_; + VirtualSocketServer vss_; + AutoSocketServerThread thread_; + BasicPacketSocketFactory socket_factory_; }; TEST_F(TurnServerConnectionTest, ComparisonOperators) { - std::unique_ptr socket1( - socket_factory_.CreateUdpSocket(rtc::SocketAddress("1.1.1.1", 1), 0, 0)); - std::unique_ptr socket2( - socket_factory_.CreateUdpSocket(rtc::SocketAddress("2.2.2.2", 2), 0, 0)); + std::unique_ptr socket1( + socket_factory_.CreateUdpSocket(SocketAddress("1.1.1.1", 1), 0, 0)); + std::unique_ptr socket2( + socket_factory_.CreateUdpSocket(SocketAddress("2.2.2.2", 2), 0, 0)); TurnServerConnection connection1(socket2->GetLocalAddress(), PROTO_UDP, socket1.get()); TurnServerConnection connection2(socket2->GetLocalAddress(), PROTO_UDP, @@ -62,4 +68,4 @@ TEST_F(TurnServerConnectionTest, ComparisonOperators) { ExpectNotEqual(connection1, connection4); } -} // namespace cricket +} // namespace webrtc diff --git a/pc/BUILD.gn b/pc/BUILD.gn index b2e448e96a..d6b0acbe56 100644 --- a/pc/BUILD.gn +++ b/pc/BUILD.gn @@ -16,7 +16,6 @@ # - rtc_pc # - session_description # - simulcast_description -# - peerconnection # - sdp_utils # - media_stream_observer # - video_track_source @@ -35,17 +34,22 @@ if (is_android) { import("//build/config/android/rules.gni") } +if (rtc_build_libsrtp) { + import("//third_party/libsrtp/options.gni") + assert(rtc_build_ssl == libsrtp_build_boringssl, + "Mismatch ssl build settings detected") + assert(rtc_ssl_root == libsrtp_ssl_root, "Mismatch in ssl root detected") +} + group("pc") { deps = [ ":rtc_pc" ] } -rtc_library("proxy") { +rtc_source_set("proxy") { visibility = [ ":*" ] - sources = [ - "proxy.cc", - "proxy.h", - ] + sources = [ "proxy.h" ] deps = [ + "../api:make_ref_counted", "../api:scoped_refptr", "../api/task_queue", "../rtc_base:event_tracer", @@ -71,24 +75,24 @@ rtc_source_set("channel") { ":rtp_transport_internal", ":session_description", "../api:libjingle_peerconnection_api", + "../api:rtp_headers", "../api:rtp_parameters", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", "../api/crypto:options", + "../api/task_queue", "../api/task_queue:pending_task_safety_flag", - "../api/units:timestamp", "../call:rtp_interfaces", "../call:rtp_receiver", "../media:codec", "../media:media_channel", "../media:media_channel_impl", "../media:rid_description", - "../media:rtc_media_base", "../media:rtp_utils", "../media:stream_params", "../modules/rtp_rtcp:rtp_rtcp_format", - "../p2p:rtc_p2p", + "../p2p:dtls_transport_internal", "../rtc_base:async_packet_socket", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", @@ -102,11 +106,8 @@ rtc_source_set("channel") { "../rtc_base:unique_id_generator", "../rtc_base/containers:flat_set", "../rtc_base/network:sent_packet", - "../rtc_base/third_party/sigslot", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -115,12 +116,14 @@ rtc_source_set("channel_interface") { sources = [ "channel_interface.h" ] deps = [ ":rtp_transport_internal", + ":session_description", "../api:libjingle_peerconnection_api", "../api:rtp_parameters", "../media:media_channel", - "../media:rtc_media_base", + "../media:rtc_media_config", + "../media:stream_params", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_source_set("dtls_srtp_transport") { @@ -132,19 +135,21 @@ rtc_source_set("dtls_srtp_transport") { deps = [ ":srtp_transport", "../api:dtls_transport_interface", - "../api:libjingle_peerconnection_api", - "../api:rtc_error", - "../p2p:rtc_p2p", + "../api:field_trials_view", + "../p2p:dtls_transport_internal", + "../p2p:packet_transport_internal", "../rtc_base:buffer", "../rtc_base:checks", "../rtc_base:logging", - "../rtc_base:ssl", + "../rtc_base:ssl_adapter", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("dtls_transport") { - visibility = [ ":*" ] + visibility = [ + ":*", + "../test/*", + ] sources = [ "dtls_transport.cc", "dtls_transport.h", @@ -153,19 +158,17 @@ rtc_source_set("dtls_transport") { ":ice_transport", "../api:dtls_transport_interface", "../api:ice_transport_interface", - "../api:libjingle_peerconnection_api", "../api:make_ref_counted", "../api:scoped_refptr", "../api:sequence_checker", - "../p2p:rtc_p2p", + "../p2p:dtls_transport_internal", "../rtc_base:checks", "../rtc_base:logging", "../rtc_base:macromagic", - "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:threading", "../rtc_base/synchronization:mutex", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("external_hmac") { @@ -191,7 +194,6 @@ rtc_source_set("ice_transport") { ] deps = [ "../api:ice_transport_interface", - "../api:libjingle_peerconnection_api", "../api:sequence_checker", "../rtc_base:checks", "../rtc_base:macromagic", @@ -213,29 +215,34 @@ rtc_source_set("jsep_transport") { ":rtp_transport_internal", ":sctp_transport", ":session_description", - ":srtp_filter", ":srtp_transport", ":transport_stats", "../api:array_view", "../api:candidate", "../api:ice_transport_interface", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:rtc_error", "../api:scoped_refptr", "../api:sequence_checker", "../api/transport:datagram_transport_interface", + "../call:payload_type_picker", "../media:rtc_data_sctp_transport_internal", - "../p2p:rtc_p2p", + "../p2p:dtls_transport_internal", + "../p2p:ice_transport_internal", + "../p2p:p2p_constants", + "../p2p:p2p_transport_channel", + "../p2p:transport_description", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", "../rtc_base:event_tracer", "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:stringutils", "../rtc_base:threading", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("jsep_transport_collection") { @@ -249,13 +256,13 @@ rtc_source_set("jsep_transport_collection") { ":session_description", "../api:libjingle_peerconnection_api", "../api:sequence_checker", - "../p2p:rtc_p2p", + "../p2p:p2p_constants", "../rtc_base:checks", "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base/system:no_unique_address", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [] } rtc_source_set("jsep_transport_controller") { @@ -268,7 +275,6 @@ rtc_source_set("jsep_transport_controller") { "jsep_transport_controller.h", ] deps = [ - ":channel", ":dtls_srtp_transport", ":dtls_transport", ":jsep_transport", @@ -290,12 +296,27 @@ rtc_source_set("jsep_transport_controller") { "../api:scoped_refptr", "../api:sequence_checker", "../api/crypto:options", + "../api/environment", "../api/rtc_event_log", "../api/transport:datagram_transport_interface", "../api/transport:enums", "../api/transport:sctp_transport_factory_interface", + "../call:payload_type", + "../call:payload_type_picker", + "../media:codec", "../media:rtc_data_sctp_transport_internal", - "../p2p:rtc_p2p", + "../modules/rtp_rtcp:rtp_rtcp_format", + "../p2p:dtls_transport", + "../p2p:dtls_transport_factory", + "../p2p:dtls_transport_internal", + "../p2p:ice_transport_internal", + "../p2p:p2p_constants", + "../p2p:p2p_transport_channel", + "../p2p:packet_transport_internal", + "../p2p:port", + "../p2p:port_allocator", + "../p2p:transport_description", + "../p2p:transport_info", "../rtc_base:callback_list", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", @@ -303,13 +324,21 @@ rtc_source_set("jsep_transport_controller") { "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:threading", "../rtc_base/third_party/sigslot", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_source_set("media_factory") { + sources = [ "media_factory.h" ] + deps = [ + "../api/environment", + "../call:call_interfaces", + "../media:media_engine", ] } @@ -320,7 +349,8 @@ rtc_source_set("media_session") { "media_session.h", ] deps = [ - ":jsep_transport", + ":codec_vendor", + ":media_options", ":media_protocol_names", ":rtp_media_utils", ":session_description", @@ -328,29 +358,100 @@ rtc_source_set("media_session") { ":used_ids", "../api:field_trials_view", "../api:libjingle_peerconnection_api", + "../api:rtc_error", "../api:rtp_parameters", "../api:rtp_transceiver_direction", - "../api/crypto:options", "../media:codec", + "../media:codec_list", "../media:media_constants", + "../media:media_engine", "../media:rid_description", - "../media:rtc_data_sctp_transport_internal", - "../media:rtc_media_base", - "../media:rtc_sdp_video_format_utils", "../media:stream_params", - "../p2p:rtc_p2p", + "../p2p:ice_credentials_iterator", + "../p2p:p2p_constants", + "../p2p:transport_description", + "../p2p:transport_description_factory", + "../p2p:transport_info", "../rtc_base:checks", "../rtc_base:logging", - "../rtc_base:ssl", - "../rtc_base:stringutils", "../rtc_base:unique_id_generator", "../rtc_base/memory:always_valid_pointer", - "../rtc_base/third_party/base64", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ +} + +rtc_library("media_options") { + visibility = [ ":*" ] + sources = [ + "media_options.cc", + "media_options.h", + ] + deps = [ + ":simulcast_description", + "../api:rtp_parameters", + "../api:rtp_transceiver_direction", + "../api/crypto:options", + "../media:codec", + "../media:rid_description", + "../p2p:transport_description", + "../p2p:transport_description_factory", + "../rtc_base:checks", + "//third_party/abseil-cpp/absl/algorithm:container", + ] +} + +rtc_library("codec_vendor") { + visibility = [ ":*" ] + sources = [ + "codec_vendor.cc", + "codec_vendor.h", + ] + deps = [ + ":media_options", + ":rtp_media_utils", + ":session_description", + ":typed_codec_vendor", + ":used_ids", + "../api:field_trials_view", + "../api:rtc_error", + "../api:rtp_parameters", + "../api:rtp_transceiver_direction", + "../api/video_codecs:video_codecs_api", + "../call:payload_type", + "../media:codec", + "../media:codec_list", + "../media:media_constants", + "../media:media_engine", + "../media:rtc_sdp_video_format_utils", + "../rtc_base:checks", + "../rtc_base:logging", + "../rtc_base:stringutils", + "../rtc_base:unique_id_generator", "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:str_format", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("typed_codec_vendor") { + visibility = [ ":*" ] + sources = [ + "typed_codec_vendor.cc", + "typed_codec_vendor.h", + ] + deps = [ + "../api:field_trials_view", + "../api:rtp_parameters", + "../api/audio_codecs:audio_codecs_api", + "../media:codec", + "../media:codec_list", + "../media:media_constants", + "../media:media_engine", + "../rtc_base:logging", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -377,7 +478,13 @@ rtc_source_set("peer_connection_factory_proxy") { sources = [ "peer_connection_factory_proxy.h" ] deps = [ ":proxy", + "../api:audio_options_api", "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + "../api:rtc_error", + "../api:rtp_parameters", + "../api:scoped_refptr", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -386,7 +493,21 @@ rtc_source_set("peer_connection_proxy") { sources = [ "peer_connection_proxy.h" ] deps = [ ":proxy", + "../api:candidate", + "../api:data_channel_event_observer_interface", + "../api:dtls_transport_interface", + "../api:libjingle_logging_api", "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + "../api:rtc_error", + "../api:rtc_stats_api", + "../api:rtp_parameters", + "../api:scoped_refptr", + "../api/adaptation:resource_adaptation_api", + "../api/transport:bandwidth_estimation_settings", + "../api/transport:bitrate_settings", + "../api/transport:network_control", + "../rtc_base:threading", ] } @@ -419,7 +540,14 @@ rtc_source_set("rtp_receiver_proxy") { sources = [ "rtp_receiver_proxy.h" ] deps = [ ":proxy", + "../api:dtls_transport_interface", + "../api:frame_transformer_interface", "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + "../api:rtp_parameters", + "../api:scoped_refptr", + "../api/crypto:frame_decryptor_interface", + "../api/transport/rtp:rtp_source", ] } @@ -429,7 +557,6 @@ rtc_source_set("rtp_sender_proxy") { deps = [ ":proxy", "../api:libjingle_peerconnection_api", - "../api:rtp_sender_interface", ] } @@ -443,14 +570,15 @@ rtc_source_set("rtp_transport") { ":rtp_transport_internal", ":session_description", "../api:array_view", + "../api:field_trials_view", + "../api/task_queue", "../api/task_queue:pending_task_safety_flag", "../api/units:timestamp", "../call:rtp_receiver", - "../call:video_stream_api", - "../media:rtc_media_base", + "../call:video_receive_stream_api", "../media:rtp_utils", "../modules/rtp_rtcp:rtp_rtcp_format", - "../p2p:rtc_p2p", + "../p2p:packet_transport_internal", "../rtc_base:async_packet_socket", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", @@ -458,12 +586,11 @@ rtc_source_set("rtp_transport") { "../rtc_base:logging", "../rtc_base:network_route", "../rtc_base:socket", + "../rtc_base/containers:flat_set", + "../rtc_base/network:ecn_marking", + "../rtc_base/network:received_packet", "../rtc_base/network:sent_packet", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_source_set("rtp_transport_internal") { @@ -475,10 +602,15 @@ rtc_source_set("rtp_transport_internal") { deps = [ ":session_description", "../call:rtp_receiver", - "../p2p:rtc_p2p", + "../p2p:ice_transport_internal", "../rtc_base:callback_list", + "../rtc_base:copy_on_write_buffer", "../rtc_base:network_route", - "../rtc_base:ssl", + "../rtc_base:socket", + "../rtc_base:ssl_adapter", + "../rtc_base/network:sent_packet", + "../rtc_base/third_party/sigslot", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] } @@ -492,18 +624,19 @@ rtc_source_set("sctp_transport") { ":dtls_transport", "../api:dtls_transport_interface", "../api:libjingle_peerconnection_api", + "../api:priority", + "../api:rtc_error", "../api:scoped_refptr", "../api:sequence_checker", "../api/transport:datagram_transport_interface", "../media:rtc_data_sctp_transport_internal", - "../p2p:rtc_p2p", + "../p2p:dtls_transport_internal", "../rtc_base:checks", + "../rtc_base:copy_on_write_buffer", "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:threading", - "../rtc_base/third_party/sigslot", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("sctp_utils") { @@ -521,38 +654,13 @@ rtc_source_set("sctp_utils") { "../api/transport:datagram_transport_interface", "../media:media_channel", "../media:rtc_data_sctp_transport_internal", - "../media:rtc_media_base", "../net/dcsctp/public:types", "../rtc_base:byte_buffer", "../rtc_base:copy_on_write_buffer", "../rtc_base:logging", - "../rtc_base:ssl", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] -} -rtc_source_set("srtp_filter") { - visibility = [ ":*" ] - sources = [ - "srtp_filter.cc", - "srtp_filter.h", - ] - deps = [ - ":session_description", - "../api:array_view", - "../api:libjingle_peerconnection_api", - "../api:sequence_checker", - "../rtc_base:buffer", - "../rtc_base:logging", - "../rtc_base:ssl", - "../rtc_base:zero_memory", - "../rtc_base/third_party/base64", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "../rtc_base:ssl_adapter", ] } - rtc_source_set("srtp_session") { visibility = [ ":*" ] sources = [ @@ -563,22 +671,21 @@ rtc_source_set("srtp_session") { ":external_hmac", "../api:array_view", "../api:field_trials_view", - "../api:scoped_refptr", "../api:sequence_checker", "../modules/rtp_rtcp:rtp_rtcp_format", + "../rtc_base:buffer", "../rtc_base:byte_order", "../rtc_base:checks", + "../rtc_base:copy_on_write_buffer", + "../rtc_base:ip_address", "../rtc_base:logging", "../rtc_base:macromagic", - "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:stringutils", "../rtc_base:timeutils", "../rtc_base/synchronization:mutex", "../system_wrappers:metrics", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/strings:strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (rtc_build_libsrtp) { deps += [ "//third_party/libsrtp" ] @@ -594,12 +701,11 @@ rtc_source_set("srtp_transport") { ":rtp_transport", ":srtp_session", "../api:field_trials_view", - "../api:libjingle_peerconnection_api", - "../api:rtc_error", - "../media:rtc_media_base", + "../api/units:timestamp", + "../call:rtp_receiver", "../media:rtp_utils", "../modules/rtp_rtcp:rtp_rtcp_format", - "../p2p:rtc_p2p", + "../p2p:packet_transport_internal", "../rtc_base:async_packet_socket", "../rtc_base:buffer", "../rtc_base:checks", @@ -607,14 +713,7 @@ rtc_source_set("srtp_transport") { "../rtc_base:event_tracer", "../rtc_base:logging", "../rtc_base:network_route", - "../rtc_base:safe_conversions", - "../rtc_base:ssl", - "../rtc_base:zero_memory", - "../rtc_base/third_party/base64", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "../rtc_base/network:received_packet", ] } @@ -626,9 +725,9 @@ rtc_source_set("transport_stats") { ] deps = [ "../api:dtls_transport_interface", - "../api:libjingle_peerconnection_api", - "../p2p:rtc_p2p", - "../rtc_base:ssl", + "../p2p:ice_transport_internal", + "../rtc_base:ssl_adapter", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -638,7 +737,6 @@ rtc_source_set("used_ids") { deps = [ "../api:rtp_parameters", "../media:codec", - "../media:rtc_media_base", "../rtc_base:checks", "../rtc_base:logging", ] @@ -660,7 +758,6 @@ rtc_source_set("video_track_source_proxy") { "../api/video:video_frame", "../rtc_base:threading", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("session_description") { @@ -681,18 +778,16 @@ rtc_source_set("session_description") { "../media:media_channel", "../media:media_constants", "../media:rid_description", - "../media:rtc_media_base", "../media:stream_params", - "../p2p:rtc_p2p", + "../p2p:transport_description", + "../p2p:transport_info", "../rtc_base:checks", "../rtc_base:socket_address", "../rtc_base:stringutils", "../rtc_base/system:rtc_export", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/memory:memory", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -703,10 +798,8 @@ rtc_source_set("simulcast_description") { ] deps = [ "../rtc_base:checks", - "../rtc_base:socket_address", - "../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_source_set("rtc_pc") { @@ -723,143 +816,7 @@ rtc_library("media_protocol_names") { "media_protocol_names.cc", "media_protocol_names.h", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] -} - -rtc_source_set("peerconnection") { - # TODO(bugs.webrtc.org/13661): Reduce visibility if possible - visibility = [ "*" ] # Used by Chromium and others - cflags = [] - sources = [] - - deps = [ - ":audio_rtp_receiver", - ":audio_track", - ":connection_context", - ":data_channel_controller", - ":data_channel_utils", - ":dtmf_sender", - ":ice_server_parsing", - ":jitter_buffer_delay", - ":jsep_ice_candidate", - ":jsep_session_description", - ":legacy_stats_collector", - ":legacy_stats_collector_interface", - ":local_audio_source", - ":media_protocol_names", - ":media_stream", - ":media_stream_observer", - ":peer_connection", - ":peer_connection_factory", - ":peer_connection_internal", - ":peer_connection_message_handler", - ":proxy", - ":remote_audio_source", - ":rtc_stats_collector", - ":rtc_stats_traversal", - ":rtp_parameters_conversion", - ":rtp_receiver", - ":rtp_sender", - ":rtp_transceiver", - ":rtp_transmission_manager", - ":sctp_data_channel", - ":sdp_offer_answer", - ":sdp_serializer", - ":sdp_state_provider", - ":sdp_utils", - ":session_description", - ":simulcast_description", - ":stream_collection", - ":track_media_info_map", - ":transceiver_list", - ":usage_pattern", - ":video_rtp_receiver", - ":video_track", - ":video_track_source", - ":webrtc_sdp", - ":webrtc_session_description_factory", - "../api:array_view", - "../api:async_dns_resolver", - "../api:audio_options_api", - "../api:call_api", - "../api:callfactory_api", - "../api:fec_controller_api", - "../api:field_trials_view", - "../api:frame_transformer_interface", - "../api:ice_transport_factory", - "../api:libjingle_logging_api", - "../api:libjingle_peerconnection_api", - "../api:media_stream_interface", - "../api:network_state_predictor_api", - "../api:packet_socket_factory", - "../api:priority", - "../api:rtc_error", - "../api:rtc_event_log_output_file", - "../api:rtc_stats_api", - "../api:rtp_parameters", - "../api:rtp_transceiver_direction", - "../api:scoped_refptr", - "../api:sequence_checker", - "../api/adaptation:resource_adaptation_api", - "../api/audio_codecs:audio_codecs_api", - "../api/crypto:frame_decryptor_interface", - "../api/crypto:options", - "../api/neteq:neteq_api", - "../api/rtc_event_log", - "../api/task_queue", - "../api/task_queue:pending_task_safety_flag", - "../api/transport:bitrate_settings", - "../api/transport:datagram_transport_interface", - "../api/transport:enums", - "../api/transport:field_trial_based_config", - "../api/transport:network_control", - "../api/transport:sctp_transport_factory_interface", - "../api/units:data_rate", - "../api/video:builtin_video_bitrate_allocator_factory", - "../api/video:video_bitrate_allocator_factory", - "../api/video:video_codec_constants", - "../api/video:video_frame", - "../api/video:video_rtp_headers", - "../api/video_codecs:video_codecs_api", - "../call:call_interfaces", - "../call:rtp_interfaces", - "../call:rtp_sender", - "../common_video", - "../logging:ice_log", - "../media:rtc_data_sctp_transport_internal", - "../media:rtc_media_base", - "../media:rtc_media_config", - "../modules/audio_processing:audio_processing_statistics", - "../modules/rtp_rtcp:rtp_rtcp_format", - "../p2p:rtc_p2p", - "../rtc_base:callback_list", - "../rtc_base:checks", - "../rtc_base:ip_address", - "../rtc_base:network_constants", - "../rtc_base:rtc_operations_chain", - "../rtc_base:safe_minmax", - "../rtc_base:socket_address", - "../rtc_base:threading", - "../rtc_base:weak_ptr", - "../rtc_base/experiments:field_trial_parser", - "../rtc_base/network:sent_packet", - "../rtc_base/synchronization:mutex", - "../rtc_base/system:file_wrapper", - "../rtc_base/system:no_unique_address", - "../rtc_base/system:rtc_export", - "../rtc_base/system:unused", - "../rtc_base/third_party/base64", - "../rtc_base/third_party/sigslot", - "../stats", - "../system_wrappers", - "../system_wrappers:field_trial", - "../system_wrappers:metrics", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] + deps = [ "//third_party/abseil-cpp/absl/strings:string_view" ] } rtc_library("sctp_data_channel") { @@ -873,27 +830,26 @@ rtc_library("sctp_data_channel") { ":proxy", ":sctp_utils", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:priority", "../api:rtc_error", "../api:scoped_refptr", "../api:sequence_checker", "../api/task_queue:pending_task_safety_flag", "../api/transport:datagram_transport_interface", - "../media:media_channel", "../media:rtc_data_sctp_transport_internal", - "../media:rtc_media_base", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", "../rtc_base:logging", "../rtc_base:macromagic", - "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:threading", "../rtc_base:weak_ptr", "../rtc_base/containers:flat_set", "../rtc_base/system:no_unique_address", "../rtc_base/system:unused", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("data_channel_utils") { @@ -906,40 +862,37 @@ rtc_library("data_channel_utils") { ] deps = [ "../api:libjingle_peerconnection_api", - "../media:rtc_media_base", + "../media:media_engine", "../rtc_base:checks", ] } rtc_library("connection_context") { - visibility = [ ":*" ] sources = [ "connection_context.cc", "connection_context.h", ] deps = [ - "../api:callfactory_api", - "../api:field_trials_view", + ":media_factory", "../api:libjingle_peerconnection_api", - "../api:media_stream_interface", + "../api:packet_socket_factory", "../api:refcountedbase", "../api:scoped_refptr", "../api:sequence_checker", - "../api/neteq:neteq_api", - "../api/transport:field_trial_based_config", + "../api/environment", "../api/transport:sctp_transport_factory_interface", + "../media:media_engine", "../media:rtc_data_sctp_transport_factory", - "../media:rtc_media_base", - "../p2p:rtc_p2p", + "../p2p:basic_packet_socket_factory", "../rtc_base:checks", + "../rtc_base:crypto_random", "../rtc_base:macromagic", "../rtc_base:network", - "../rtc_base:rtc_certificate_generator", "../rtc_base:socket_factory", "../rtc_base:socket_server", - "../rtc_base:ssl", "../rtc_base:threading", "../rtc_base:timeutils", + "../rtc_base:unique_id_generator", "../rtc_base/memory:always_valid_pointer", ] } @@ -955,25 +908,25 @@ rtc_source_set("data_channel_controller") { ":peer_connection_internal", ":sctp_data_channel", ":sctp_utils", + "../api:array_view", + "../api:data_channel_event_observer_interface", "../api:libjingle_peerconnection_api", + "../api:priority", "../api:rtc_error", "../api:scoped_refptr", "../api:sequence_checker", "../api/task_queue:pending_task_safety_flag", "../api/transport:datagram_transport_interface", - "../media:media_channel", - "../media:rtc_media_base", + "../media:rtc_data_sctp_transport_internal", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", "../rtc_base:logging", "../rtc_base:macromagic", - "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:threading", + "../rtc_base:timeutils", "../rtc_base:weak_ptr", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -981,17 +934,32 @@ rtc_source_set("peer_connection_internal") { visibility = [ ":*" ] sources = [ "peer_connection_internal.h" ] deps = [ + ":data_channel_utils", ":jsep_transport_controller", ":peer_connection_message_handler", ":rtp_transceiver", ":rtp_transmission_manager", - ":sctp_data_channel", + ":session_description", + ":transport_stats", + ":usage_pattern", + "../api:candidate", + "../api:field_trials_view", "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + "../api:rtc_error", + "../api:rtp_parameters", + "../api:scoped_refptr", + "../api/audio:audio_device", + "../api/crypto:options", "../call:call_interfaces", - "../modules/audio_device", + "../call:payload_type_picker", + "../p2p:port", + "../p2p:port_allocator", + "../rtc_base:ssl", + "../rtc_base:ssl_adapter", + "../rtc_base:threading", + "//third_party/abseil-cpp/absl/strings:string_view", ] - - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("rtc_stats_collector") { @@ -1004,7 +972,6 @@ rtc_source_set("rtc_stats_collector") { "rtc_stats_collector.h", ] deps = [ - ":channel", ":channel_interface", ":data_channel_utils", ":peer_connection_internal", @@ -1022,43 +989,51 @@ rtc_source_set("rtc_stats_collector") { "../api:candidate", "../api:dtls_transport_interface", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:media_stream_interface", + "../api:ref_count", "../api:rtc_stats_api", "../api:rtp_parameters", + "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", - "../api/task_queue:task_queue", + "../api/audio:audio_device", + "../api/audio:audio_processing_statistics", + "../api/environment", + "../api/transport:enums", "../api/units:time_delta", + "../api/units:timestamp", "../api/video:video_rtp_headers", "../api/video_codecs:scalability_mode", "../call:call_interfaces", - "../common_video:common_video", + "../common_video", "../media:media_channel", - "../media:media_channel_impl", - "../media:rtc_media_base", - "../modules/audio_device", - "../modules/audio_processing:audio_processing_statistics", + "../media:stream_params", "../modules/rtp_rtcp:rtp_rtcp_format", - "../p2p:rtc_p2p", + "../p2p:connection_info", + "../p2p:ice_transport_internal", + "../p2p:p2p_constants", + "../p2p:port", + "../p2p:transport_description", "../rtc_base:checks", "../rtc_base:event_tracer", "../rtc_base:ip_address", "../rtc_base:logging", + "../rtc_base:macromagic", "../rtc_base:network_constants", "../rtc_base:refcount", "../rtc_base:rtc_event", "../rtc_base:socket_address", "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:stringutils", "../rtc_base:threading", "../rtc_base:timeutils", "../rtc_base/containers:flat_set", "../rtc_base/synchronization:mutex", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:bind_front", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -1075,20 +1050,41 @@ rtc_source_set("rtc_stats_traversal") { ] } +rtc_source_set("sdp_munging_detector") { + visibility = [ ":*" ] + sources = [ + "sdp_munging_detector.cc", + "sdp_munging_detector.h", + ] + deps = [ + ":session_description", + "../api:libjingle_peerconnection_api", + "../api:rtp_parameters", + "../media:codec", + "../media:media_constants", + "../media:stream_params", + "../p2p:transport_description", + "../p2p:transport_info", + "../rtc_base:checks", + "../rtc_base:logging", + "//third_party/abseil-cpp/absl/algorithm:container", + ] +} rtc_source_set("sdp_offer_answer") { visibility = [ ":*" ] sources = [ - "sdp_offer_answer.cc", # TODO: Make separate target when not circular - "sdp_offer_answer.h", # dependent on peerconnection.h + "sdp_offer_answer.cc", + "sdp_offer_answer.h", ] deps = [ - ":channel", ":channel_interface", + ":codec_vendor", ":connection_context", ":data_channel_controller", ":dtls_transport", ":jsep_transport_controller", ":legacy_stats_collector", + ":media_options", ":media_session", ":media_stream", ":media_stream_observer", @@ -1097,11 +1093,11 @@ rtc_source_set("sdp_offer_answer") { ":peer_connection_message_handler", ":rtp_media_utils", ":rtp_receiver", - ":rtp_receiver_proxy", ":rtp_sender", ":rtp_sender_proxy", ":rtp_transceiver", ":rtp_transmission_manager", + ":sdp_munging_detector", ":sdp_state_provider", ":session_description", ":simulcast_description", @@ -1112,42 +1108,50 @@ rtc_source_set("sdp_offer_answer") { "../api:array_view", "../api:audio_options_api", "../api:candidate", - "../api:dtls_transport_interface", - "../api:field_trials_view", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:media_stream_interface", "../api:rtc_error", "../api:rtp_parameters", - "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", "../api/crypto:options", "../api/video:builtin_video_bitrate_allocator_factory", "../api/video:video_bitrate_allocator_factory", + "../api/video:video_codec_constants", + "../call:payload_type", "../media:codec", "../media:media_channel", + "../media:media_constants", + "../media:media_engine", "../media:rid_description", - "../media:rtc_media_base", "../media:stream_params", - "../p2p:rtc_p2p", + "../p2p:ice_transport_internal", + "../p2p:p2p_constants", + "../p2p:p2p_transport_channel", + "../p2p:port_allocator", + "../p2p:transport_description", + "../p2p:transport_description_factory", + "../p2p:transport_info", "../rtc_base:checks", + "../rtc_base:crypto_random", "../rtc_base:event_tracer", "../rtc_base:logging", "../rtc_base:macromagic", + "../rtc_base:rtc_certificate_generator", "../rtc_base:rtc_operations_chain", "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:stringutils", "../rtc_base:threading", "../rtc_base:unique_id_generator", "../rtc_base:weak_ptr", "../system_wrappers:metrics", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/memory:memory", - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } rtc_source_set("jsep_ice_candidate") { @@ -1164,6 +1168,7 @@ rtc_source_set("local_audio_source") { ] deps = [ "../api:audio_options_api", + "../api:make_ref_counted", "../api:media_stream_interface", "../api:scoped_refptr", ] @@ -1175,8 +1180,8 @@ rtc_source_set("peer_connection") { "peer_connection.h", ] deps = [ - ":channel", ":channel_interface", + ":codec_vendor", ":connection_context", ":data_channel_controller", ":data_channel_utils", @@ -1202,83 +1207,97 @@ rtc_source_set("peer_connection") { ":transceiver_list", ":transport_stats", ":usage_pattern", - ":webrtc_session_description_factory", "../api:async_dns_resolver", "../api:candidate", + "../api:data_channel_event_observer_interface", "../api:dtls_transport_interface", "../api:field_trials_view", "../api:ice_transport_interface", "../api:libjingle_logging_api", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:media_stream_interface", "../api:rtc_error", "../api:rtc_stats_api", "../api:rtp_parameters", - "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", "../api:turn_customizer", "../api/adaptation:resource_adaptation_api", + "../api/audio:audio_device", "../api/crypto:options", - "../api/rtc_event_log", + "../api/environment", "../api/task_queue:pending_task_safety_flag", + "../api/transport:bandwidth_estimation_settings", "../api/transport:bitrate_settings", "../api/transport:datagram_transport_interface", "../api/transport:enums", + "../api/transport:network_control", + "../api/units:time_delta", "../api/video:video_codec_constants", "../call:call_interfaces", - "../media:media_channel", + "../call:payload_type", + "../call:payload_type_picker", + "../media:codec", + "../media:media_engine", "../media:rid_description", - "../media:rtc_media_base", "../media:rtc_media_config", "../media:stream_params", "../modules/rtp_rtcp:rtp_rtcp_format", - "../p2p:rtc_p2p", + "../p2p:connection_info", + "../p2p:dtls_transport_internal", + "../p2p:ice_transport_internal", + "../p2p:p2p_constants", + "../p2p:p2p_transport_channel", + "../p2p:port", + "../p2p:port_allocator", + "../p2p:transport_description", + "../p2p:transport_info", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", + "../rtc_base:crypto_random", "../rtc_base:event_tracer", "../rtc_base:ip_address", "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:net_helper", + "../rtc_base:net_helpers", "../rtc_base:network", "../rtc_base:network_constants", "../rtc_base:socket_address", "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:stringutils", "../rtc_base:threading", "../rtc_base:unique_id_generator", "../rtc_base:weak_ptr", "../system_wrappers:metrics", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } -rtc_source_set("sdp_serializer") { +rtc_source_set("simulcast_sdp_serializer") { visibility = [ ":*" ] sources = [ - "sdp_serializer.cc", - "sdp_serializer.h", + "simulcast_sdp_serializer.cc", + "simulcast_sdp_serializer.h", ] deps = [ ":session_description", ":simulcast_description", "../api:rtc_error", + "../api:rtp_parameters", + "../media:codec", "../media:rid_description", - "../media:rtc_media_base", "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", + "../rtc_base:logging", "../rtc_base:stringutils", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } rtc_source_set("sdp_utils") { @@ -1289,7 +1308,7 @@ rtc_source_set("sdp_utils") { deps = [ ":session_description", "../api:libjingle_peerconnection_api", - "../p2p:rtc_p2p", + "../p2p:transport_info", "../rtc_base:checks", "../rtc_base/system:rtc_export", ] @@ -1316,16 +1335,17 @@ rtc_source_set("legacy_stats_collector") { "../api:libjingle_peerconnection_api", "../api:media_stream_interface", "../api:rtp_parameters", - "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:sequence_checker", + "../api/audio:audio_processing_statistics", "../api/audio_codecs:audio_codecs_api", "../api/video:video_rtp_headers", "../call:call_interfaces", "../media:media_channel", - "../media:rtc_media_base", - "../modules/audio_processing:audio_processing_statistics", - "../p2p:rtc_p2p", + "../p2p:connection_info", + "../p2p:ice_transport_internal", + "../p2p:p2p_constants", + "../p2p:port", "../rtc_base:checks", "../rtc_base:event_tracer", "../rtc_base:ip_address", @@ -1334,13 +1354,12 @@ rtc_source_set("legacy_stats_collector") { "../rtc_base:network_constants", "../rtc_base:socket_address", "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:stringutils", "../rtc_base:threading", "../rtc_base:timeutils", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } rtc_source_set("stream_collection") { @@ -1362,13 +1381,11 @@ rtc_source_set("track_media_info_map") { "../api:rtp_parameters", "../api:scoped_refptr", "../media:media_channel", - "../media:rtc_media_base", "../media:stream_params", "../rtc_base:checks", "../rtc_base:refcount", "../rtc_base:threading", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("webrtc_sdp") { # TODO(bugs.webrtc.org/13661): Reduce visibility if possible @@ -1383,9 +1400,9 @@ rtc_source_set("webrtc_sdp") { deps = [ ":media_protocol_names", ":media_session", - ":sdp_serializer", ":session_description", ":simulcast_description", + ":simulcast_sdp_serializer", "../api:candidate", "../api:libjingle_peerconnection_api", "../api:rtc_error", @@ -1395,25 +1412,29 @@ rtc_source_set("webrtc_sdp") { "../media:media_constants", "../media:rid_description", "../media:rtc_data_sctp_transport_internal", - "../media:rtc_media_base", "../media:rtp_utils", "../media:stream_params", - "../p2p:rtc_p2p", + "../p2p:ice_transport_internal", + "../p2p:p2p_constants", + "../p2p:port", + "../p2p:port_interface", + "../p2p:transport_description", + "../p2p:transport_info", "../rtc_base:checks", + "../rtc_base:crypto_random", "../rtc_base:ip_address", "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:net_helper", + "../rtc_base:net_helpers", "../rtc_base:network_constants", "../rtc_base:socket_address", "../rtc_base:ssl", "../rtc_base:stringutils", "../rtc_base/system:rtc_export", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } rtc_source_set("webrtc_session_description_factory") { @@ -1423,28 +1444,30 @@ rtc_source_set("webrtc_session_description_factory") { "webrtc_session_description_factory.h", ] deps = [ + ":codec_vendor", ":connection_context", + ":media_options", ":media_session", ":sdp_state_provider", ":session_description", + "../api:field_trials_view", "../api:libjingle_peerconnection_api", "../api:rtc_error", "../api:scoped_refptr", "../api:sequence_checker", "../api/task_queue", - "../p2p:rtc_p2p", + "../p2p:transport_description_factory", "../rtc_base:checks", "../rtc_base:logging", "../rtc_base:rtc_certificate_generator", "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:stringutils", "../rtc_base:unique_id_generator", "../rtc_base:weak_ptr", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings", ] } @@ -1457,9 +1480,12 @@ rtc_library("ice_server_parsing") { "ice_server_parsing.h", ] deps = [ + "../api:candidate", "../api:libjingle_peerconnection_api", "../api:rtc_error", - "../p2p:rtc_p2p", + "../p2p:port", + "../p2p:port_allocator", + "../p2p:port_interface", "../rtc_base:checks", "../rtc_base:ip_address", "../rtc_base:logging", @@ -1467,6 +1493,7 @@ rtc_library("ice_server_parsing") { "../rtc_base:socket_address", "../rtc_base:stringutils", "../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -1478,40 +1505,49 @@ rtc_library("media_stream_observer") { deps = [ "../api:media_stream_interface", "../api:scoped_refptr", + "//third_party/abseil-cpp/absl/algorithm:container", ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } rtc_source_set("peer_connection_factory") { # TODO(bugs.webrtc.org/13661): Reduce visibility if possible visibility = [ "*" ] # Known to be used externally + allow_poison = [ "environment_construction" ] sources = [ "peer_connection_factory.cc", "peer_connection_factory.h", ] deps = [ + ":audio_track", + ":codec_vendor", + ":connection_context", + ":ice_server_parsing", ":local_audio_source", + ":media_factory", + ":media_stream", ":media_stream_proxy", ":media_stream_track_proxy", ":peer_connection", ":peer_connection_factory_proxy", ":peer_connection_proxy", + ":rtp_parameters_conversion", + ":video_track", "../api:audio_options_api", - "../api:callfactory_api", "../api:fec_controller_api", "../api:field_trials_view", "../api:ice_transport_interface", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:media_stream_interface", "../api:network_state_predictor_api", - "../api:packet_socket_factory", "../api:rtc_error", "../api:rtp_parameters", "../api:scoped_refptr", "../api:sequence_checker", + "../api/environment", + "../api/environment:environment_factory", "../api/metronome", "../api/neteq:neteq_api", - "../api/rtc_event_log:rtc_event_log", - "../api/task_queue:task_queue", + "../api/rtc_event_log:rtc_event_log_factory_interface", "../api/transport:bitrate_settings", "../api/transport:network_control", "../api/transport:sctp_transport_factory_interface", @@ -1519,14 +1555,14 @@ rtc_source_set("peer_connection_factory") { "../call:call_interfaces", "../call:rtp_interfaces", "../call:rtp_sender", - "../media:rtc_media_base", - "../p2p:rtc_p2p", - "../pc:audio_track", - "../pc:connection_context", - "../pc:media_stream", - "../pc:rtp_parameters_conversion", - "../pc:session_description", - "../pc:video_track", + "../media:codec", + "../media:media_engine", + "../p2p:basic_async_resolver_factory", + "../p2p:basic_port_allocator", + "../p2p:default_ice_transport_factory", + "../p2p:ice_transport_internal", + "../p2p:port", + "../p2p:port_allocator", "../rtc_base:checks", "../rtc_base:logging", "../rtc_base:macromagic", @@ -1535,8 +1571,9 @@ rtc_source_set("peer_connection_factory") { "../rtc_base:threading", "../rtc_base/experiments:field_trial_parser", "../rtc_base/system:file_wrapper", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings:strings" ] } rtc_library("peer_connection_message_handler") { @@ -1554,6 +1591,7 @@ rtc_library("peer_connection_message_handler") { "../api:sequence_checker", "../api/task_queue", "../api/task_queue:pending_task_safety_flag", + "../api/units:time_delta", "../rtc_base:checks", ] } @@ -1580,10 +1618,10 @@ rtc_library("rtp_transceiver") { deps = [ ":channel", ":channel_interface", + ":codec_vendor", ":connection_context", ":proxy", ":rtp_media_utils", - ":rtp_parameters_conversion", ":rtp_receiver", ":rtp_receiver_proxy", ":rtp_sender", @@ -1592,11 +1630,9 @@ rtc_library("rtp_transceiver") { ":session_description", "../api:array_view", "../api:audio_options_api", - "../api:field_trials_view", "../api:libjingle_peerconnection_api", "../api:rtc_error", "../api:rtp_parameters", - "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", @@ -1605,23 +1641,17 @@ rtc_library("rtp_transceiver") { "../api/task_queue", "../api/task_queue:pending_task_safety_flag", "../api/video:video_bitrate_allocator_factory", + "../api/video_codecs:scalability_mode", "../media:codec", "../media:media_channel", - "../media:media_channel_impl", - "../media:media_constants", - "../media:rtc_media_base", + "../media:media_engine", "../media:rtc_media_config", "../rtc_base:checks", "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:threading", - "../rtc_base/third_party/sigslot", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -1633,8 +1663,9 @@ rtc_library("rtp_transmission_manager") { ] deps = [ ":audio_rtp_receiver", - ":channel", ":channel_interface", + ":codec_vendor", + ":connection_context", ":legacy_stats_collector_interface", ":rtp_receiver", ":rtp_receiver_proxy", @@ -1645,27 +1676,24 @@ rtc_library("rtp_transmission_manager") { ":usage_pattern", ":video_rtp_receiver", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:media_stream_interface", "../api:rtc_error", "../api:rtp_parameters", - "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", + "../api/environment", "../media:media_channel", - "../media:rtc_media_base", + "../media:media_engine", "../rtc_base:checks", + "../rtc_base:crypto_random", "../rtc_base:logging", "../rtc_base:macromagic", - "../rtc_base:ssl", "../rtc_base:threading", + "../rtc_base:unique_id_generator", "../rtc_base:weak_ptr", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("transceiver_list") { @@ -1679,14 +1707,12 @@ rtc_library("transceiver_list") { "../api:libjingle_peerconnection_api", "../api:rtc_error", "../api:rtp_parameters", - "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:sequence_checker", "../rtc_base:checks", "../rtc_base:macromagic", "../rtc_base/system:no_unique_address", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("rtp_receiver") { @@ -1698,25 +1724,13 @@ rtc_library("rtp_receiver") { deps = [ ":media_stream", ":media_stream_proxy", - ":video_track_source", "../api:dtls_transport_interface", "../api:libjingle_peerconnection_api", "../api:media_stream_interface", - "../api:rtp_parameters", "../api:scoped_refptr", - "../api/crypto:frame_decryptor_interface", - "../api/video:video_frame", "../media:media_channel", - "../media:rtc_media_base", - "../rtc_base:checks", - "../rtc_base:logging", "../rtc_base:threading", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("audio_rtp_receiver") { @@ -1728,13 +1742,13 @@ rtc_library("audio_rtp_receiver") { deps = [ ":audio_track", ":jitter_buffer_delay", - ":media_stream", ":media_stream_track_proxy", ":remote_audio_source", ":rtp_receiver", "../api:dtls_transport_interface", "../api:frame_transformer_interface", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:media_stream_interface", "../api:rtp_parameters", "../api:scoped_refptr", @@ -1743,17 +1757,11 @@ rtc_library("audio_rtp_receiver") { "../api/task_queue:pending_task_safety_flag", "../api/transport/rtp:rtp_source", "../media:media_channel", - "../media:rtc_media_base", "../rtc_base:checks", "../rtc_base:macromagic", "../rtc_base:threading", "../rtc_base/system:no_unique_address", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("video_rtp_receiver") { @@ -1764,7 +1772,6 @@ rtc_library("video_rtp_receiver") { ] deps = [ ":jitter_buffer_delay", - ":media_stream", ":media_stream_track_proxy", ":rtp_receiver", ":video_rtp_track_source", @@ -1772,6 +1779,7 @@ rtc_library("video_rtp_receiver") { "../api:dtls_transport_interface", "../api:frame_transformer_interface", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:media_stream_interface", "../api:rtp_parameters", "../api:scoped_refptr", @@ -1781,18 +1789,12 @@ rtc_library("video_rtp_receiver") { "../api/video:recordable_encoded_frame", "../api/video:video_frame", "../media:media_channel", - "../media:rtc_media_base", "../rtc_base:checks", "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:threading", "../rtc_base/system:no_unique_address", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("video_rtp_track_source") { @@ -1821,11 +1823,13 @@ rtc_library("audio_track") { "audio_track.h", ] deps = [ + "../api:make_ref_counted", "../api:media_stream_interface", "../api:scoped_refptr", "../api:sequence_checker", "../rtc_base:checks", "../rtc_base/system:no_unique_address", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -1837,17 +1841,18 @@ rtc_library("video_track") { ] deps = [ ":video_track_source_proxy", + "../api:make_ref_counted", "../api:media_stream_interface", "../api:scoped_refptr", "../api:sequence_checker", "../api/video:video_frame", - "../media:rtc_media_base", + "../media:video_source_base", "../rtc_base:checks", "../rtc_base:macromagic", "../rtc_base:threading", "../rtc_base/system:no_unique_address", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("sdp_state_provider") { @@ -1870,7 +1875,6 @@ rtc_library("jitter_buffer_delay") { "../rtc_base:safe_minmax", "../rtc_base/system:no_unique_address", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("remote_audio_source") { @@ -1880,25 +1884,18 @@ rtc_library("remote_audio_source") { "remote_audio_source.h", ] deps = [ - ":channel", "../api:call_api", "../api:media_stream_interface", "../api:scoped_refptr", "../api:sequence_checker", "../api/task_queue", "../media:media_channel", - "../media:rtc_media_base", "../rtc_base:checks", "../rtc_base:event_tracer", "../rtc_base:logging", - "../rtc_base:safe_conversions", "../rtc_base:stringutils", "../rtc_base/synchronization:mutex", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -1914,31 +1911,32 @@ rtc_library("rtp_sender") { "../api:audio_options_api", "../api:dtls_transport_interface", "../api:dtmf_sender_interface", + "../api:field_trials_view", "../api:frame_transformer_interface", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:media_stream_interface", "../api:priority", "../api:rtc_error", "../api:rtp_parameters", - "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:sequence_checker", "../api/crypto:frame_encryptor_interface", + "../api/environment", + "../api/video_codecs:video_codecs_api", + "../media:audio_source", + "../media:codec", "../media:media_channel", - "../media:rtc_media_base", + "../media:media_engine", "../rtc_base:checks", + "../rtc_base:crypto_random", "../rtc_base:event_tracer", "../rtc_base:logging", "../rtc_base:macromagic", - "../rtc_base:ssl", + "../rtc_base:rtc_event", "../rtc_base:threading", "../rtc_base/synchronization:mutex", - "../rtc_base/third_party/sigslot", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -1950,23 +1948,12 @@ rtc_library("rtp_parameters_conversion") { ] deps = [ ":session_description", - "../api:array_view", - "../api:libjingle_peerconnection_api", "../api:rtc_error", "../api:rtp_parameters", "../media:codec", "../media:media_constants", - "../media:rtc_media_base", - "../media:rtp_utils", "../media:stream_params", - "../rtc_base:checks", "../rtc_base:logging", - "../rtc_base:stringutils", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -1979,7 +1966,7 @@ rtc_library("dtmf_sender") { deps = [ ":proxy", "../api:dtmf_sender_interface", - "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:scoped_refptr", "../api:sequence_checker", "../api/task_queue", @@ -1989,12 +1976,6 @@ rtc_library("dtmf_sender") { "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:refcount", - "../rtc_base/third_party/sigslot", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -2005,16 +1986,11 @@ rtc_library("media_stream") { "media_stream.h", ] deps = [ - "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:media_stream_interface", "../api:scoped_refptr", "../rtc_base:checks", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("video_track_source") { @@ -2028,13 +2004,11 @@ rtc_library("video_track_source") { "../api/video:recordable_encoded_frame", "../api/video:video_frame", "../media:media_channel", - "../media:rtc_media_base", "../rtc_base:checks", "../rtc_base:macromagic", "../rtc_base/system:no_unique_address", "../rtc_base/system:rtc_export", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("legacy_stats_collector_interface") { @@ -2046,23 +2020,42 @@ rtc_source_set("legacy_stats_collector_interface") { ] } +# This target contains the libraries that are required in order to get an +# usable peerconnection-using binary. rtc_source_set("libjingle_peerconnection") { # TODO(bugs.webrtc.org/13661): Reduce visibility if possible visibility = [ "*" ] # Used by Chrome and others + allow_poison = [ "environment_construction" ] deps = [ - ":peerconnection", + ":jsep_session_description", + ":peer_connection_factory", + ":rtc_stats_collector", "../api:libjingle_peerconnection_api", + "../stats", ] } -if (rtc_include_tests && !build_with_chromium) { +if (rtc_include_tests && !build_with_chromium) { + rtc_source_set("fake_codec_lookup_helper") { + testonly = true + sources = [ "test/fake_codec_lookup_helper.h" ] + deps = [ + ":codec_vendor", + ":connection_context", + "../call:payload_type", + "../rtc_base:checks", + ] + } + rtc_test("rtc_pc_unittests") { testonly = true sources = [ "audio_rtp_receiver_unittest.cc", "channel_unittest.cc", + "codec_vendor_unittest.cc", + "dtls_srtp_transport_integrationtest.cc", "dtls_srtp_transport_unittest.cc", "dtls_transport_unittest.cc", "ice_transport_unittest.cc", @@ -2073,7 +2066,6 @@ if (rtc_include_tests && !build_with_chromium) { "rtp_transport_unittest.cc", "sctp_transport_unittest.cc", "session_description_unittest.cc", - "srtp_filter_unittest.cc", "srtp_session_unittest.cc", "srtp_transport_unittest.cc", "test/rtp_transport_test_util.h", @@ -2082,8 +2074,6 @@ if (rtc_include_tests && !build_with_chromium) { "video_rtp_receiver_unittest.cc", ] - include_dirs = [ "//third_party/libsrtp/srtp" ] - if (is_win) { libs = [ "strmiids.lib" ] } @@ -2091,79 +2081,113 @@ if (rtc_include_tests && !build_with_chromium) { deps = [ ":audio_rtp_receiver", ":channel", + ":codec_vendor", ":dtls_srtp_transport", ":dtls_transport", ":ice_transport", ":jsep_transport", ":jsep_transport_controller", - ":libjingle_peerconnection", + ":media_options", ":media_protocol_names", ":media_session", ":pc_test_utils", - ":peerconnection", - ":rtc_pc", ":rtcp_mux_filter", ":rtp_media_utils", + ":rtp_parameters_conversion", ":rtp_transport", ":rtp_transport_internal", ":sctp_transport", ":session_description", - ":srtp_filter", + ":simulcast_description", ":srtp_session", ":srtp_transport", + ":transport_stats", ":used_ids", ":video_rtp_receiver", "../api:array_view", "../api:audio_options_api", "../api:candidate", "../api:dtls_transport_interface", + "../api:field_trials", + "../api:field_trials_view", "../api:ice_transport_factory", + "../api:ice_transport_interface", "../api:libjingle_peerconnection_api", "../api:make_ref_counted", - "../api:make_ref_counted", + "../api:media_stream_interface", + "../api:priority", "../api:rtc_error", + "../api:rtc_error_matchers", "../api:rtp_headers", "../api:rtp_parameters", + "../api:rtp_transceiver_direction", "../api:scoped_refptr", "../api:sequence_checker", + "../api/audio_codecs:audio_codecs_api", + "../api/crypto:options", + "../api/environment", + "../api/environment:environment_factory", + "../api/task_queue", "../api/task_queue:pending_task_safety_flag", - "../api/task_queue:task_queue", "../api/transport:datagram_transport_interface", "../api/transport:enums", - "../api/video:builtin_video_bitrate_allocator_factory", + "../api/units:time_delta", "../api/video:recordable_encoded_frame", + "../api/video:video_frame", "../api/video/test:mock_recordable_encoded_frame", + "../api/video_codecs:video_codecs_api", + "../call:fake_payload_type_suggester", + "../call:payload_type", + "../call:payload_type_picker", "../call:rtp_interfaces", "../call:rtp_receiver", "../media:codec", + "../media:codec_list", "../media:media_channel", "../media:media_constants", "../media:rid_description", "../media:rtc_data_sctp_transport_internal", - "../media:rtc_media_base", "../media:rtc_media_tests_utils", + "../media:stream_params", "../modules/rtp_rtcp:rtp_rtcp_format", + "../p2p:basic_packet_socket_factory", + "../p2p:candidate_pair_interface", + "../p2p:dtls_transport", + "../p2p:dtls_transport_factory", + "../p2p:dtls_transport_internal", "../p2p:fake_ice_transport", "../p2p:fake_port_allocator", + "../p2p:ice_transport_internal", + "../p2p:p2p_constants", "../p2p:p2p_test_utils", - "../p2p:rtc_p2p", + "../p2p:packet_transport_internal", + "../p2p:port_allocator", + "../p2p:transport_description", + "../p2p:transport_description_factory", + "../p2p:transport_info", "../rtc_base:async_packet_socket", "../rtc_base:buffer", "../rtc_base:byte_order", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", - "../rtc_base:gunit_helpers", "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:net_helper", + "../rtc_base:network_route", "../rtc_base:rtc_base_tests_utils", + "../rtc_base:socket", "../rtc_base:socket_address", + "../rtc_base:socket_server", "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:stringutils", "../rtc_base:task_queue_for_test", "../rtc_base:threading", "../rtc_base:unique_id_generator", "../rtc_base/containers:flat_set", + "../rtc_base/network:ecn_marking", + "../rtc_base/network:received_packet", + "../rtc_base/network:sent_packet", "../rtc_base/third_party/sigslot", "../system_wrappers:metrics", "../test:explicit_key_value_config", @@ -2171,51 +2195,56 @@ if (rtc_include_tests && !build_with_chromium) { "../test:scoped_key_value_config", "../test:test_main", "../test:test_support", - ] - absl_deps = [ + "../test:wait_until", "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] + include_dirs = [] if (rtc_build_libsrtp) { + include_dirs += [ "//third_party/libsrtp/srtp" ] deps += [ "//third_party/libsrtp" ] + if (!rtc_build_ssl) { + configs += [ "..:external_ssl_library" ] + } } if (is_android) { use_default_launcher = false - deps += [ - "//build/android/gtest_apk:native_test_instrumentation_test_runner_java", - "//testing/android/native_test:native_test_java", - "//testing/android/native_test:native_test_support", - ] + deps += [ "//build/android/gtest_apk:native_test_instrumentation_test_runner_java" ] } } rtc_library("peerconnection_perf_tests") { testonly = true - sources = [ "peer_connection_rampup_tests.cc" ] + sources = [ + "peer_connection_callsetup_perf_tests.cc", + "peer_connection_rampup_tests.cc", + ] deps = [ ":pc_test_utils", ":peer_connection", - ":peerconnection", ":peerconnection_wrapper", + ":sdp_utils", "../api:audio_options_api", - "../api:create_peerconnection_factory", + "../api:enable_media_with_defaults", + "../api:field_trials", + "../api:field_trials_view", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:media_stream_interface", "../api:rtc_error", + "../api:rtc_error_matchers", "../api:rtc_stats_api", "../api:scoped_refptr", - "../api/audio:audio_mixer_api", - "../api/audio_codecs:audio_codecs_api", "../api/audio_codecs:builtin_audio_decoder_factory", "../api/audio_codecs:builtin_audio_encoder_factory", "../api/test/metrics:global_metrics_logger_and_exporter", "../api/test/metrics:metric", - "../api/video_codecs:video_codecs_api", + "../api/units:time_delta", "../api/video_codecs:video_decoder_factory_template", "../api/video_codecs:video_decoder_factory_template_dav1d_adapter", "../api/video_codecs:video_decoder_factory_template_libvpx_vp8_adapter", @@ -2226,23 +2255,25 @@ if (rtc_include_tests && !build_with_chromium) { "../api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter", "../api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter", "../api/video_codecs:video_encoder_factory_template_open_h264_adapter", - "../media:rtc_media_tests_utils", - "../modules/audio_device:audio_device_api", - "../modules/audio_processing:api", + "../p2p:basic_packet_socket_factory", "../p2p:p2p_test_utils", - "../p2p:rtc_p2p", + "../p2p:port_interface", + "../p2p:transport_description", "../rtc_base:checks", - "../rtc_base:gunit_helpers", + "../rtc_base:crypto_random", + "../rtc_base:logging", "../rtc_base:rtc_base_tests_utils", "../rtc_base:socket_address", "../rtc_base:socket_factory", - "../rtc_base:ssl", + "../rtc_base:stringutils", "../rtc_base:task_queue_for_test", "../rtc_base:threading", + "../rtc_base:timeutils", "../system_wrappers", "../test:test_support", + "../test:wait_until", + "//third_party/abseil-cpp/absl/strings", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("peerconnection_wrapper") { @@ -2253,22 +2284,23 @@ if (rtc_include_tests && !build_with_chromium) { ] deps = [ ":pc_test_utils", - ":peerconnection", + ":peer_connection", + ":peer_connection_proxy", ":sdp_utils", "../api:function_view", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:media_stream_interface", "../api:rtc_error", + "../api:rtc_error_matchers", "../api:rtc_stats_api", "../api:rtp_parameters", - "../api:rtp_sender_interface", "../api:scoped_refptr", "../rtc_base:checks", - "../rtc_base:gunit_helpers", "../rtc_base:logging", "../test:test_support", + "../test:wait_until", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_test("slow_peer_connection_unittests") { @@ -2279,11 +2311,14 @@ if (rtc_include_tests && !build_with_chromium) { ":pc_test_utils", "../api:dtmf_sender_interface", "../api:libjingle_peerconnection_api", + "../api:rtc_error_matchers", "../api:scoped_refptr", "../api/units:time_delta", + "../p2p:connection", "../p2p:p2p_server_utils", "../p2p:p2p_test_utils", - "../p2p:rtc_p2p", + "../p2p:port_allocator", + "../p2p:port_interface", "../rtc_base:gunit_helpers", "../rtc_base:logging", "../rtc_base:rtc_base_tests_utils", @@ -2291,16 +2326,17 @@ if (rtc_include_tests && !build_with_chromium) { "../rtc_base:ssl", "../test:test_main", "../test:test_support", + "../test:wait_until", "../test/time_controller:time_controller", "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } rtc_test("peerconnection_unittests") { testonly = true sources = [ + "congestion_control_integrationtest.cc", "data_channel_integrationtest.cc", "data_channel_unittest.cc", "dtmf_sender_unittest.cc", @@ -2328,9 +2364,8 @@ if (rtc_include_tests && !build_with_chromium) { "peer_connection_rtp_unittest.cc", "peer_connection_signaling_unittest.cc", "peer_connection_simulcast_unittest.cc", + "peer_connection_stability_integrationtest.cc", "peer_connection_svc_integrationtest.cc", - "peer_connection_wrapper.cc", - "peer_connection_wrapper.h", "proxy_unittest.cc", "rtc_stats_collector_unittest.cc", "rtc_stats_integrationtest.cc", @@ -2341,7 +2376,7 @@ if (rtc_include_tests && !build_with_chromium) { "rtp_transceiver_unittest.cc", "sctp_utils_unittest.cc", "sdp_offer_answer_unittest.cc", - "sdp_serializer_unittest.cc", + "simulcast_sdp_serializer_unittest.cc", "test/fake_audio_capture_module_unittest.cc", "test/test_sdp_strings.h", "track_media_info_map_unittest.cc", @@ -2355,10 +2390,13 @@ if (rtc_include_tests && !build_with_chromium) { ":audio_track", ":channel", ":channel_interface", - ":data_channel_controller_unittest", + ":codec_vendor", + ":connection_context", ":dtls_srtp_transport", ":dtls_transport", ":dtmf_sender", + ":enable_fake_media", + ":fake_codec_lookup_helper", ":ice_server_parsing", ":integration_test_helpers", ":jitter_buffer_delay", @@ -2367,15 +2405,20 @@ if (rtc_include_tests && !build_with_chromium) { ":media_protocol_names", ":media_session", ":media_stream", + ":pc_test_utils", ":peer_connection", ":peer_connection_factory", + ":peer_connection_factory_proxy", + ":peer_connection_internal", ":peer_connection_proxy", + ":peerconnection_wrapper", ":proxy", ":rtc_stats_collector", ":rtc_stats_traversal", ":rtp_media_utils", ":rtp_parameters_conversion", ":rtp_receiver", + ":rtp_receiver_proxy", ":rtp_sender", ":rtp_sender_proxy", ":rtp_transceiver", @@ -2383,10 +2426,10 @@ if (rtc_include_tests && !build_with_chromium) { ":sctp_data_channel", ":sctp_transport", ":sctp_utils", - ":sdp_serializer", ":sdp_utils", ":session_description", ":simulcast_description", + ":simulcast_sdp_serializer", ":stream_collection", ":track_media_info_map", ":transport_stats", @@ -2401,11 +2444,12 @@ if (rtc_include_tests && !build_with_chromium) { "../api:candidate", "../api:create_peerconnection_factory", "../api:dtls_transport_interface", - "../api:dtmf_sender_interface", + "../api:enable_media", + "../api:enable_media_with_defaults", "../api:fake_frame_decryptor", "../api:fake_frame_encryptor", + "../api:field_trials", "../api:field_trials_view", - "../api:function_view", "../api:ice_transport_interface", "../api:libjingle_logging_api", "../api:libjingle_peerconnection_api", @@ -2417,20 +2461,40 @@ if (rtc_include_tests && !build_with_chromium) { "../api:mock_video_track", "../api:packet_socket_factory", "../api:priority", + "../api:ref_count", "../api:rtc_error", + "../api:rtc_error_matchers", + "../api:rtc_stats_api", + "../api:rtp_parameters", "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", + "../api:sequence_checker", "../api/adaptation:resource_adaptation_api", + "../api/audio:audio_device", "../api/audio:audio_mixer_api", + "../api/audio:audio_processing", + "../api/audio:audio_processing_statistics", + "../api/audio_codecs:audio_codecs_api", + "../api/audio_codecs:builtin_audio_decoder_factory", + "../api/audio_codecs:builtin_audio_encoder_factory", + "../api/audio_codecs:opus_audio_decoder_factory", + "../api/audio_codecs:opus_audio_encoder_factory", + "../api/audio_codecs/L16:audio_decoder_L16", + "../api/audio_codecs/L16:audio_encoder_L16", "../api/crypto:frame_decryptor_interface", "../api/crypto:frame_encryptor_interface", "../api/crypto:options", + "../api/environment", + "../api/environment:environment_factory", "../api/rtc_event_log", "../api/rtc_event_log:rtc_event_log_factory", "../api/task_queue", "../api/task_queue:default_task_queue_factory", + "../api/task_queue:pending_task_safety_flag", + "../api/transport:bitrate_settings", "../api/transport:datagram_transport_interface", + "../api/transport:enums", "../api/transport:field_trial_based_config", "../api/transport:sctp_transport_factory_interface", "../api/transport/rtp:rtp_source", @@ -2444,60 +2508,102 @@ if (rtc_include_tests && !build_with_chromium) { "../api/video:video_codec_constants", "../api/video:video_frame", "../api/video:video_rtp_headers", + "../api/video_codecs:builtin_video_decoder_factory", + "../api/video_codecs:builtin_video_encoder_factory", "../api/video_codecs:scalability_mode", + "../api/video_codecs:video_codecs_api", + "../api/video_codecs:video_decoder_factory_template", + "../api/video_codecs:video_decoder_factory_template_dav1d_adapter", + "../api/video_codecs:video_decoder_factory_template_libvpx_vp8_adapter", + "../api/video_codecs:video_decoder_factory_template_libvpx_vp9_adapter", + "../api/video_codecs:video_decoder_factory_template_open_h264_adapter", + "../api/video_codecs:video_encoder_factory_template", + "../api/video_codecs:video_encoder_factory_template_libaom_av1_adapter", + "../api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter", + "../api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter", + "../api/video_codecs:video_encoder_factory_template_open_h264_adapter", + "../call:call_interfaces", "../call/adaptation:resource_adaptation_test_utilities", "../common_video", "../logging:fake_rtc_event_log", "../media:codec", "../media:media_channel", "../media:media_constants", + "../media:media_engine", "../media:rid_description", + "../media:rtc_audio_video", "../media:rtc_data_sctp_transport_internal", "../media:rtc_media_config", - "../media:rtc_media_engine_defaults", + "../media:rtc_media_tests_utils", "../media:stream_params", - "../modules/audio_device:audio_device_api", - "../modules/audio_processing:audio_processing_statistics", + "../modules/audio_processing:mocks", "../modules/rtp_rtcp:rtp_rtcp_format", + "../p2p:basic_packet_socket_factory", + "../p2p:connection_info", + "../p2p:dtls_transport_internal", "../p2p:fake_port_allocator", - "../p2p:p2p_server_utils", + "../p2p:ice_transport_internal", + "../p2p:p2p_constants", + "../p2p:p2p_test_utils", + "../p2p:port", + "../p2p:port_allocator", + "../p2p:port_interface", + "../p2p:transport_description", + "../p2p:transport_info", + "../rtc_base:base64", "../rtc_base:byte_buffer", "../rtc_base:checks", "../rtc_base:copy_on_write_buffer", + "../rtc_base:crypto_random", + "../rtc_base:digest", "../rtc_base:event_tracer", "../rtc_base:gunit_helpers", "../rtc_base:ip_address", "../rtc_base:logging", "../rtc_base:macromagic", - "../rtc_base:mdns_responder_interface", "../rtc_base:net_helper", "../rtc_base:network", "../rtc_base:network_constants", "../rtc_base:null_socket_server", + "../rtc_base:random", "../rtc_base:refcount", "../rtc_base:rtc_base_tests_utils", "../rtc_base:rtc_certificate_generator", + "../rtc_base:rtc_event", "../rtc_base:rtc_json", + "../rtc_base:safe_conversions", "../rtc_base:socket_address", + "../rtc_base:socket_server", "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:stringutils", "../rtc_base:task_queue_for_test", "../rtc_base:threading", "../rtc_base:timeutils", "../rtc_base:unique_id_generator", + "../rtc_base/containers:flat_map", "../rtc_base/synchronization:mutex", - "../rtc_base/third_party/base64", "../rtc_base/third_party/sigslot", + "../system_wrappers", "../system_wrappers:metrics", - "../test:field_trial", + "../test:audio_codec_mocks", "../test:rtc_expect_death", "../test:run_loop", - "../test:scoped_key_value_config", + "../test:test_support", + "../test:wait_until", "../test/pc/sctp:fake_sctp_transport", "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", + "//third_party/jsoncpp", + ] + + # These deps are kept separately because they can't be automatically + # regenerated by gn_check_autofix tool + deps += [ + ":data_channel_controller_unittest", + "../test:test_main", ] if (is_android) { @@ -2507,54 +2613,10 @@ if (rtc_include_tests && !build_with_chromium) { # We need to depend on this one directly, or classloads will fail for # the voice engine BuildInfo, for instance. - "//sdk/android:libjingle_peerconnection_java", - "//sdk/android:native_test_jni_onload", + "../sdk/android:libjingle_peerconnection_java", ] shard_timeout = 900 } - - deps += [ - ":libjingle_peerconnection", - ":pc_test_utils", - ":rtc_pc", - "../api:callfactory_api", - "../api:rtc_event_log_output_file", - "../api:rtc_stats_api", - "../api:rtp_parameters", - "../api/audio_codecs:audio_codecs_api", - "../api/audio_codecs:builtin_audio_decoder_factory", - "../api/audio_codecs:builtin_audio_encoder_factory", - "../api/audio_codecs:opus_audio_decoder_factory", - "../api/audio_codecs:opus_audio_encoder_factory", - "../api/audio_codecs/L16:audio_decoder_L16", - "../api/audio_codecs/L16:audio_encoder_L16", - "../api/video_codecs:builtin_video_decoder_factory", - "../api/video_codecs:builtin_video_encoder_factory", - "../api/video_codecs:video_codecs_api", - "../api/video_codecs:video_decoder_factory_template", - "../api/video_codecs:video_decoder_factory_template_dav1d_adapter", - "../api/video_codecs:video_decoder_factory_template_libvpx_vp8_adapter", - "../api/video_codecs:video_decoder_factory_template_libvpx_vp9_adapter", - "../api/video_codecs:video_decoder_factory_template_open_h264_adapter", - "../api/video_codecs:video_encoder_factory_template", - "../api/video_codecs:video_encoder_factory_template_libaom_av1_adapter", - "../api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter", - "../api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter", - "../api/video_codecs:video_encoder_factory_template_open_h264_adapter", - "../call:call_interfaces", - "../media:rtc_audio_video", - "../media:rtc_media_base", - "../media:rtc_media_tests_utils", - "../modules/audio_processing", - "../modules/audio_processing:api", - "../p2p:p2p_test_utils", - "../p2p:rtc_p2p", - "../rtc_base:rtc_task_queue", - "../rtc_base:safe_conversions", - "../test:audio_codec_mocks", - "../test:test_main", - "../test:test_support", - ] } rtc_library("data_channel_controller_unittest") { @@ -2565,7 +2627,21 @@ if (rtc_include_tests && !build_with_chromium) { ":pc_test_utils", ":peer_connection_internal", ":sctp_data_channel", + ":sctp_utils", + "../api:data_channel_event_observer_interface", + "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", + "../api:priority", + "../api:rtc_error", + "../api:scoped_refptr", + "../api/transport:datagram_transport_interface", + "../api/units:timestamp", + "../media:rtc_data_sctp_transport_internal", + "../rtc_base:copy_on_write_buffer", "../rtc_base:null_socket_server", + "../rtc_base:rtc_base_tests_utils", + "../rtc_base:ssl_adapter", + "../rtc_base:threading", "../test:run_loop", "../test:test_support", ] @@ -2581,12 +2657,11 @@ if (rtc_include_tests && !build_with_chromium) { "test/android_test_initializer.h", ] deps = [ - "../rtc_base:ssl", + "../modules/utility:utility", + "../rtc_base:checks", + "../rtc_base:ssl_adapter", "../sdk/android:internal_jni", "../sdk/android:libjingle_peerconnection_jni", - "//modules/utility:utility", - "//rtc_base:checks", - "//testing/android/native_test:native_test_support", ] } } @@ -2598,121 +2673,83 @@ if (rtc_include_tests && !build_with_chromium) { "test/integration_test_helpers.h", ] deps = [ - ":audio_rtp_receiver", - ":audio_track", - ":dtmf_sender", - ":jitter_buffer_delay", - ":local_audio_source", - ":media_session", - ":media_stream", ":pc_test_utils", ":peer_connection", ":peer_connection_factory", ":peer_connection_proxy", - ":peerconnection", - ":remote_audio_source", - ":rtp_media_utils", - ":rtp_parameters_conversion", - ":rtp_receiver", - ":rtp_sender", - ":rtp_transceiver", ":session_description", - ":usage_pattern", - ":video_rtp_receiver", - ":video_rtp_track_source", - ":video_track", ":video_track_source", - "../api:array_view", "../api:audio_options_api", - "../api:callfactory_api", "../api:candidate", - "../api:create_peerconnection_factory", - "../api:fake_frame_decryptor", - "../api:fake_frame_encryptor", + "../api:enable_media_with_defaults", + "../api:field_trials", "../api:field_trials_view", - "../api:function_view", "../api:ice_transport_interface", "../api:libjingle_logging_api", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:media_stream_interface", "../api:mock_async_dns_resolver", - "../api:mock_rtp", - "../api:packet_socket_factory", "../api:rtc_error", + "../api:rtc_error_matchers", "../api:rtc_stats_api", "../api:rtp_parameters", - "../api:rtp_sender_interface", "../api:rtp_transceiver_direction", "../api:scoped_refptr", - "../api/audio:audio_mixer_api", - "../api/crypto:frame_decryptor_interface", - "../api/crypto:frame_encryptor_interface", + "../api:sequence_checker", + "../api/audio:builtin_audio_processing_builder", "../api/crypto:options", - "../api/rtc_event_log", + "../api/metronome", "../api/rtc_event_log:rtc_event_log_factory", "../api/task_queue", "../api/task_queue:default_task_queue_factory", "../api/task_queue:pending_task_safety_flag", - "../api/transport:field_trial_based_config", - "../api/transport/rtp:rtp_source", "../api/units:time_delta", - "../api/video:builtin_video_bitrate_allocator_factory", "../api/video:video_rtp_headers", - "../api/video_codecs:video_codecs_api", - "../call:call_interfaces", - "../call/adaptation:resource_adaptation_test_utilities", "../logging:fake_rtc_event_log", - "../media:rtc_audio_video", - "../media:rtc_media_base", - "../media:rtc_media_config", - "../media:rtc_media_engine_defaults", - "../media:rtc_media_tests_utils", "../media:stream_params", - "../modules/audio_device:audio_device_api", - "../modules/audio_processing:api", - "../modules/audio_processing:audio_processing_statistics", - "../modules/audio_processing:audioproc_test_utils", - "../modules/rtp_rtcp:rtp_rtcp_format", "../p2p:fake_ice_transport", - "../p2p:fake_port_allocator", - "../p2p:p2p_server_utils", + "../p2p:ice_transport_internal", "../p2p:p2p_test_utils", - "../p2p:rtc_p2p", + "../p2p:port", + "../p2p:port_interface", "../rtc_base:checks", - "../rtc_base:gunit_helpers", + "../rtc_base:crypto_random", "../rtc_base:ip_address", "../rtc_base:logging", - "../rtc_base:macromagic", - "../rtc_base:mdns_responder_interface", - "../rtc_base:null_socket_server", "../rtc_base:rtc_base_tests_utils", - "../rtc_base:rtc_certificate_generator", - "../rtc_base:rtc_event", - "../rtc_base:rtc_json", - "../rtc_base:safe_conversions", "../rtc_base:socket_address", - "../rtc_base:ssl", + "../rtc_base:socket_factory", + "../rtc_base:socket_server", + "../rtc_base:ssl_adapter", "../rtc_base:task_queue_for_test", "../rtc_base:threading", "../rtc_base:timeutils", - "../rtc_base/synchronization:mutex", - "../rtc_base/task_utils:repeating_task", - "../rtc_base/third_party/base64", - "../rtc_base/third_party/sigslot", "../system_wrappers:metrics", - "../test:explicit_key_value_config", - "../test:fileutils", - "../test:rtp_test_utils", - "../test:scoped_key_value_config", "../test:test_support", - "../test/pc/sctp:fake_sctp_transport", - "../test/time_controller", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", + "../test:wait_until", + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", + ] + } + + rtc_library("enable_fake_media") { + testonly = true + visibility = [ ":*" ] + sources = [ + "test/enable_fake_media.cc", + "test/enable_fake_media.h", + ] + deps = [ + ":media_factory", + "../api:libjingle_peerconnection_api", + "../api/environment", + "../call:call_interfaces", + "../media:media_engine", + "../media:rtc_media_tests_utils", + "../rtc_base:checks", + "//third_party/abseil-cpp/absl/base:nullability", ] } @@ -2748,35 +2785,69 @@ if (rtc_include_tests && !build_with_chromium) { deps = [ ":channel", ":channel_interface", + ":connection_context", + ":data_channel_utils", + ":enable_fake_media", + ":fake_codec_lookup_helper", ":jitter_buffer_delay", + ":jsep_transport_controller", ":libjingle_peerconnection", ":peer_connection_internal", - ":peerconnection", + ":peer_connection_message_handler", ":rtp_receiver", + ":rtp_receiver_proxy", ":rtp_sender", + ":rtp_sender_proxy", + ":rtp_transceiver", + ":rtp_transmission_manager", + ":rtp_transport_internal", ":sctp_data_channel", + ":sctp_utils", ":session_description", ":simulcast_description", ":stream_collection", + ":transport_stats", + ":usage_pattern", ":video_track_source", "../api:audio_options_api", "../api:call_api", + "../api:candidate", "../api:create_frame_generator", "../api:create_peerconnection_factory", + "../api:data_channel_event_observer_interface", + "../api:dtls_transport_interface", "../api:field_trials_view", "../api:field_trials_view", + "../api:frame_transformer_interface", + "../api:libjingle_logging_api", "../api:libjingle_peerconnection_api", "../api:make_ref_counted", "../api:media_stream_interface", + "../api:priority", "../api:rtc_error", + "../api:rtc_error_matchers", "../api:rtc_stats_api", + "../api:rtp_headers", "../api:rtp_parameters", "../api:scoped_refptr", "../api:sequence_checker", + "../api/adaptation:resource_adaptation_api", + "../api/audio:audio_device", "../api/audio:audio_mixer_api", + "../api/audio:audio_processing", "../api/audio_codecs:audio_codecs_api", + "../api/crypto:frame_decryptor_interface", + "../api/crypto:frame_encryptor_interface", + "../api/crypto:options", + "../api/environment", + "../api/environment:environment_factory", "../api/task_queue", "../api/task_queue:default_task_queue_factory", + "../api/transport:bandwidth_estimation_settings", + "../api/transport:bitrate_settings", + "../api/transport:datagram_transport_interface", + "../api/transport:network_control", + "../api/transport/rtp:rtp_source", "../api/units:time_delta", "../api/video:builtin_video_bitrate_allocator_factory", "../api/video:resolution", @@ -2794,42 +2865,52 @@ if (rtc_include_tests && !build_with_chromium) { "../api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter", "../api/video_codecs:video_encoder_factory_template_open_h264_adapter", "../call:call_interfaces", + "../call:payload_type_picker", + "../media:codec", "../media:media_channel", "../media:media_channel_impl", "../media:rtc_media", - "../media:rtc_media_base", "../media:rtc_media_tests_utils", "../media:rtc_simulcast_encoder_adapter", + "../media:stream_params", + "../media:video_broadcaster", "../modules/audio_device", "../modules/audio_processing", - "../modules/audio_processing:api", "../modules/rtp_rtcp:rtp_rtcp_format", + "../p2p:basic_packet_socket_factory", + "../p2p:connection", "../p2p:fake_port_allocator", + "../p2p:p2p_constants", "../p2p:p2p_test_utils", - "../p2p:rtc_p2p", + "../p2p:port", + "../p2p:port_allocator", "../rtc_base:checks", + "../rtc_base:copy_on_write_buffer", "../rtc_base:gunit_helpers", "../rtc_base:logging", "../rtc_base:macromagic", + "../rtc_base:refcount", "../rtc_base:rtc_certificate_generator", - "../rtc_base:rtc_task_queue", + "../rtc_base:socket_server", "../rtc_base:ssl", + "../rtc_base:ssl_adapter", "../rtc_base:stringutils", "../rtc_base:task_queue_for_test", "../rtc_base:threading", "../rtc_base:timeutils", + "../rtc_base:unique_id_generator", "../rtc_base:weak_ptr", "../rtc_base/synchronization:mutex", "../rtc_base/task_utils:repeating_task", "../rtc_base/third_party/sigslot", + "../system_wrappers", "../test:frame_generator_capturer", "../test:scoped_key_value_config", "../test:test_support", - ] - absl_deps = [ + "../test:wait_until", + "//testing/gmock", "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -2856,28 +2937,41 @@ if (rtc_include_tests && !build_with_chromium) { "../api:create_peer_connection_quality_test_frame_generator", "../api:create_peerconnection_quality_test_fixture", "../api:frame_generator_api", + "../api:function_view", "../api:media_stream_interface", "../api:network_emulation_manager_api", "../api:peer_connection_quality_test_fixture_api", "../api:rtc_stats_api", + "../api:rtp_parameters", + "../api:scoped_refptr", "../api:simulated_network_api", "../api:time_controller", + "../api:video_quality_analyzer_api", "../api/test/metrics:global_metrics_logger_and_exporter", "../api/test/pclf:media_configuration", "../api/test/pclf:media_quality_test_params", "../api/test/pclf:peer_configurer", + "../api/units:time_delta", + "../api/video:encoded_image", + "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", - "../call:simulated_network", + "../media:media_constants", "../modules/video_coding:webrtc_vp9", "../modules/video_coding/svc:scalability_mode_util", + "../rtc_base:checks", + "../rtc_base:logging", "../rtc_base/containers:flat_map", + "../system_wrappers", "../system_wrappers:field_trial", "../test:field_trial", "../test:fileutils", "../test:test_main", "../test:test_support", + "../test/network:simulated_network", "../test/pc/e2e:network_quality_metrics_reporter", "../test/pc/e2e/analyzer/video:default_video_quality_analyzer", + "../test/pc/e2e/analyzer/video:default_video_quality_analyzer_shared", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (is_ios) { diff --git a/pc/DEPS b/pc/DEPS index 80a702d716..53ef6e45ee 100644 --- a/pc/DEPS +++ b/pc/DEPS @@ -14,13 +14,12 @@ include_rules = [ "+net/dcsctp", "+p2p", "+system_wrappers", + "+absl/strings", ] specific_include_rules = { - "androidtestinitializer\.cc": [ - "+base/android", # Allowed only for Android tests. + "rtc_stats_collector_unittest.cc": [ + "+json/reader.h", + "+json/value.h", ], - "srtpfilter_unittest\.cc": [ - "+crypto", - ], -} +} \ No newline at end of file diff --git a/pc/OWNERS b/pc/OWNERS index 8ceb1f6c63..4b662f76a0 100644 --- a/pc/OWNERS +++ b/pc/OWNERS @@ -3,7 +3,6 @@ hta@webrtc.org perkj@webrtc.org tommi@webrtc.org deadbeef@webrtc.org -orphis@webrtc.org # Adding features via SDP munging requires approval from SDP owners per-file webrtc_sdp.cc = set noparent diff --git a/pc/audio_rtp_receiver.cc b/pc/audio_rtp_receiver.cc index a8659de5f9..10bd129390 100644 --- a/pc/audio_rtp_receiver.cc +++ b/pc/audio_rtp_receiver.cc @@ -12,23 +12,38 @@ #include +#include +#include #include #include #include +#include "api/crypto/frame_decryptor_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/transport/rtp/rtp_source.h" +#include "media/base/media_channel.h" #include "pc/audio_track.h" #include "pc/media_stream_track_proxy.h" +#include "pc/remote_audio_source.h" #include "rtc_base/checks.h" +#include "rtc_base/thread.h" namespace webrtc { AudioRtpReceiver::AudioRtpReceiver( - rtc::Thread* worker_thread, + Thread* worker_thread, std::string receiver_id, std::vector stream_ids, bool is_unified_plan, - cricket::VoiceMediaReceiveChannelInterface* voice_channel /*= nullptr*/) + VoiceMediaReceiveChannelInterface* voice_channel /*= nullptr*/) : AudioRtpReceiver(worker_thread, receiver_id, CreateStreamsFromIds(std::move(stream_ids)), @@ -36,20 +51,20 @@ AudioRtpReceiver::AudioRtpReceiver( voice_channel) {} AudioRtpReceiver::AudioRtpReceiver( - rtc::Thread* worker_thread, + Thread* worker_thread, const std::string& receiver_id, - const std::vector>& streams, + const std::vector>& streams, bool is_unified_plan, - cricket::VoiceMediaReceiveChannelInterface* voice_channel /*= nullptr*/) + VoiceMediaReceiveChannelInterface* voice_channel /*= nullptr*/) : worker_thread_(worker_thread), id_(receiver_id), - source_(rtc::make_ref_counted( + source_(make_ref_counted( worker_thread, is_unified_plan ? RemoteAudioSource::OnAudioChannelGoneAction::kSurvive : RemoteAudioSource::OnAudioChannelGoneAction::kEnd)), track_(AudioTrackProxyWithInternal::Create( - rtc::Thread::Current(), + Thread::Current(), AudioTrack::Create(receiver_id, source_))), media_channel_(voice_channel), cached_track_enabled_(track_->internal()->enabled()), @@ -113,8 +128,7 @@ void AudioRtpReceiver::OnSetVolume(double volume) { }); } -rtc::scoped_refptr AudioRtpReceiver::dtls_transport() - const { +scoped_refptr AudioRtpReceiver::dtls_transport() const { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); return dtls_transport_; } @@ -127,8 +141,8 @@ std::vector AudioRtpReceiver::stream_ids() const { return stream_ids; } -std::vector> -AudioRtpReceiver::streams() const { +std::vector> AudioRtpReceiver::streams() + const { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); return streams_; } @@ -144,7 +158,7 @@ RtpParameters AudioRtpReceiver::GetParameters() const { } void AudioRtpReceiver::SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) { + scoped_refptr frame_decryptor) { RTC_DCHECK_RUN_ON(worker_thread_); frame_decryptor_ = std::move(frame_decryptor); // Special Case: Set the frame decryptor to any value on any existing channel. @@ -153,8 +167,8 @@ void AudioRtpReceiver::SetFrameDecryptor( } } -rtc::scoped_refptr -AudioRtpReceiver::GetFrameDecryptor() const { +scoped_refptr AudioRtpReceiver::GetFrameDecryptor() + const { RTC_DCHECK_RUN_ON(worker_thread_); return frame_decryptor_; } @@ -165,7 +179,7 @@ void AudioRtpReceiver::Stop() { track_->internal()->set_ended(); } -void AudioRtpReceiver::RestartMediaChannel(absl::optional ssrc) { +void AudioRtpReceiver::RestartMediaChannel(std::optional ssrc) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); bool enabled = track_->internal()->enabled(); MediaSourceInterface::SourceState state = source_->state(); @@ -177,7 +191,7 @@ void AudioRtpReceiver::RestartMediaChannel(absl::optional ssrc) { } void AudioRtpReceiver::RestartMediaChannel_w( - absl::optional ssrc, + std::optional ssrc, bool track_enabled, MediaSourceInterface::SourceState state) { RTC_DCHECK_RUN_ON(worker_thread_); @@ -212,10 +226,10 @@ void AudioRtpReceiver::SetupMediaChannel(uint32_t ssrc) { void AudioRtpReceiver::SetupUnsignaledMediaChannel() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - RestartMediaChannel(absl::nullopt); + RestartMediaChannel(std::nullopt); } -absl::optional AudioRtpReceiver::ssrc() const { +std::optional AudioRtpReceiver::ssrc() const { RTC_DCHECK_RUN_ON(worker_thread_); if (!signaled_ssrc_.has_value() && media_channel_) { return media_channel_->GetUnsignaledSsrc(); @@ -229,13 +243,13 @@ void AudioRtpReceiver::set_stream_ids(std::vector stream_ids) { } void AudioRtpReceiver::set_transport( - rtc::scoped_refptr dtls_transport) { + scoped_refptr dtls_transport) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); dtls_transport_ = std::move(dtls_transport); } void AudioRtpReceiver::SetStreams( - const std::vector>& streams) { + const std::vector>& streams) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); // Remove remote track from any streams that are going away. for (const auto& existing_stream : streams_) { @@ -277,8 +291,8 @@ std::vector AudioRtpReceiver::GetSources() const { return media_channel_->GetSources(current_ssrc.value()); } -void AudioRtpReceiver::SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) { +void AudioRtpReceiver::SetFrameTransformer( + scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(worker_thread_); if (media_channel_) { media_channel_->SetDepacketizerToDecoderFrameTransformer( @@ -314,7 +328,7 @@ void AudioRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) { } void AudioRtpReceiver::SetJitterBufferMinimumDelay( - absl::optional delay_seconds) { + std::optional delay_seconds) { RTC_DCHECK_RUN_ON(worker_thread_); delay_.Set(delay_seconds); if (media_channel_ && signaled_ssrc_) @@ -323,7 +337,7 @@ void AudioRtpReceiver::SetJitterBufferMinimumDelay( } void AudioRtpReceiver::SetMediaChannel( - cricket::MediaReceiveChannelInterface* media_channel) { + MediaReceiveChannelInterface* media_channel) { RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); @@ -333,7 +347,7 @@ void AudioRtpReceiver::SetMediaChannel( media_channel ? worker_thread_safety_->SetAlive() : worker_thread_safety_->SetNotAlive(); media_channel_ = - static_cast(media_channel); + static_cast(media_channel); } void AudioRtpReceiver::NotifyFirstPacketReceived() { diff --git a/pc/audio_rtp_receiver.h b/pc/audio_rtp_receiver.h index 86c42d532a..df88699e92 100644 --- a/pc/audio_rtp_receiver.h +++ b/pc/audio_rtp_receiver.h @@ -13,10 +13,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "api/crypto/frame_decryptor_interface.h" #include "api/dtls_transport_interface.h" #include "api/frame_transformer_interface.h" @@ -50,19 +50,18 @@ class AudioRtpReceiver : public ObserverInterface, // However, when using that, the assumption is that right after construction, // a call to either `SetupUnsignaledMediaChannel` or `SetupMediaChannel` // will be made, which will internally start the source on the worker thread. - AudioRtpReceiver( - rtc::Thread* worker_thread, - std::string receiver_id, - std::vector stream_ids, - bool is_unified_plan, - cricket::VoiceMediaReceiveChannelInterface* voice_channel = nullptr); + AudioRtpReceiver(Thread* worker_thread, + std::string receiver_id, + std::vector stream_ids, + bool is_unified_plan, + VoiceMediaReceiveChannelInterface* voice_channel = nullptr); // TODO(https://crbug.com/webrtc/9480): Remove this when streams() is removed. AudioRtpReceiver( - rtc::Thread* worker_thread, + Thread* worker_thread, const std::string& receiver_id, - const std::vector>& streams, + const std::vector>& streams, bool is_unified_plan, - cricket::VoiceMediaReceiveChannelInterface* media_channel = nullptr); + VoiceMediaReceiveChannelInterface* media_channel = nullptr); virtual ~AudioRtpReceiver(); // ObserverInterface implementation @@ -71,19 +70,18 @@ class AudioRtpReceiver : public ObserverInterface, // AudioSourceInterface::AudioObserver implementation void OnSetVolume(double volume) override; - rtc::scoped_refptr audio_track() const { return track_; } + scoped_refptr audio_track() const { return track_; } // RtpReceiverInterface implementation - rtc::scoped_refptr track() const override { + scoped_refptr track() const override { return track_; } - rtc::scoped_refptr dtls_transport() const override; + scoped_refptr dtls_transport() const override; std::vector stream_ids() const override; - std::vector> streams() - const override; + std::vector> streams() const override; - cricket::MediaType media_type() const override { - return cricket::MEDIA_TYPE_AUDIO; + webrtc::MediaType media_type() const override { + return webrtc::MediaType::AUDIO; } std::string id() const override { return id_; } @@ -91,40 +89,37 @@ class AudioRtpReceiver : public ObserverInterface, RtpParameters GetParameters() const override; void SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) override; + scoped_refptr frame_decryptor) override; - rtc::scoped_refptr GetFrameDecryptor() - const override; + scoped_refptr GetFrameDecryptor() const override; // RtpReceiverInternal implementation. void Stop() override; void SetupMediaChannel(uint32_t ssrc) override; void SetupUnsignaledMediaChannel() override; - absl::optional ssrc() const override; + std::optional ssrc() const override; void NotifyFirstPacketReceived() override; void set_stream_ids(std::vector stream_ids) override; void set_transport( - rtc::scoped_refptr dtls_transport) override; - void SetStreams(const std::vector>& - streams) override; + scoped_refptr dtls_transport) override; + void SetStreams( + const std::vector>& streams) override; void SetObserver(RtpReceiverObserverInterface* observer) override; void SetJitterBufferMinimumDelay( - absl::optional delay_seconds) override; + std::optional delay_seconds) override; - void SetMediaChannel( - cricket::MediaReceiveChannelInterface* media_channel) override; + void SetMediaChannel(MediaReceiveChannelInterface* media_channel) override; std::vector GetSources() const override; int AttachmentId() const override { return attachment_id_; } - void SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) - override; + void SetFrameTransformer( + scoped_refptr frame_transformer) override; private: - void RestartMediaChannel(absl::optional ssrc) + void RestartMediaChannel(std::optional ssrc) RTC_RUN_ON(&signaling_thread_checker_); - void RestartMediaChannel_w(absl::optional ssrc, + void RestartMediaChannel_w(std::optional ssrc, bool track_enabled, MediaSourceInterface::SourceState state) RTC_RUN_ON(worker_thread_); @@ -132,14 +127,14 @@ class AudioRtpReceiver : public ObserverInterface, void SetOutputVolume_w(double volume) RTC_RUN_ON(worker_thread_); RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_; - rtc::Thread* const worker_thread_; + Thread* const worker_thread_; const std::string id_; - const rtc::scoped_refptr source_; - const rtc::scoped_refptr> track_; - cricket::VoiceMediaReceiveChannelInterface* media_channel_ + const scoped_refptr source_; + const scoped_refptr> track_; + VoiceMediaReceiveChannelInterface* media_channel_ RTC_GUARDED_BY(worker_thread_) = nullptr; - absl::optional signaled_ssrc_ RTC_GUARDED_BY(worker_thread_); - std::vector> streams_ + std::optional signaled_ssrc_ RTC_GUARDED_BY(worker_thread_); + std::vector> streams_ RTC_GUARDED_BY(&signaling_thread_checker_); bool cached_track_enabled_ RTC_GUARDED_BY(&signaling_thread_checker_); double cached_volume_ RTC_GUARDED_BY(worker_thread_) = 1.0; @@ -148,16 +143,16 @@ class AudioRtpReceiver : public ObserverInterface, bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) = false; const int attachment_id_; - rtc::scoped_refptr frame_decryptor_ + scoped_refptr frame_decryptor_ RTC_GUARDED_BY(worker_thread_); - rtc::scoped_refptr dtls_transport_ + scoped_refptr dtls_transport_ RTC_GUARDED_BY(&signaling_thread_checker_); // Stores and updates the playout delay. Handles caching cases if // `SetJitterBufferMinimumDelay` is called before start. JitterBufferDelay delay_ RTC_GUARDED_BY(worker_thread_); - rtc::scoped_refptr frame_transformer_ + scoped_refptr frame_transformer_ RTC_GUARDED_BY(worker_thread_); - const rtc::scoped_refptr worker_thread_safety_; + const scoped_refptr worker_thread_safety_; }; } // namespace webrtc diff --git a/pc/audio_rtp_receiver_unittest.cc b/pc/audio_rtp_receiver_unittest.cc index 9eb20c982f..91614d7d69 100644 --- a/pc/audio_rtp_receiver_unittest.cc +++ b/pc/audio_rtp_receiver_unittest.cc @@ -11,17 +11,24 @@ #include "pc/audio_rtp_receiver.h" #include - +#include +#include +#include + +#include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" #include "pc/test/mock_voice_media_receive_channel_interface.h" -#include "rtc_base/gunit.h" #include "rtc_base/thread.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/run_loop.h" +#include "test/wait_until.h" using ::testing::_; +using ::testing::Eq; using ::testing::InvokeWithoutArgs; -using ::testing::Mock; static const int kTimeOut = 100; static const double kDefaultVolume = 1; @@ -33,12 +40,11 @@ namespace webrtc { class AudioRtpReceiverTest : public ::testing::Test { protected: AudioRtpReceiverTest() - : worker_(rtc::Thread::Current()), - receiver_( - rtc::make_ref_counted(worker_, - std::string(), - std::vector(), - false)) { + : worker_(Thread::Current()), + receiver_(make_ref_counted(worker_, + std::string(), + std::vector(), + false)) { EXPECT_CALL(receive_channel_, SetRawAudioSink(kSsrc, _)); EXPECT_CALL(receive_channel_, SetBaseMinimumPlayoutDelayMs(kSsrc, _)); } @@ -48,10 +54,10 @@ class AudioRtpReceiverTest : public ::testing::Test { receiver_->SetMediaChannel(nullptr); } - rtc::AutoThread main_thread_; - rtc::Thread* worker_; - rtc::scoped_refptr receiver_; - cricket::MockVoiceMediaReceiveChannelInterface receive_channel_; + AutoThread main_thread_; + Thread* worker_; + scoped_refptr receiver_; + MockVoiceMediaReceiveChannelInterface receive_channel_; }; TEST_F(AudioRtpReceiverTest, SetOutputVolumeIsCalled) { @@ -76,7 +82,9 @@ TEST_F(AudioRtpReceiverTest, SetOutputVolumeIsCalled) { })); receiver_->OnSetVolume(kVolume); - EXPECT_TRUE_WAIT(set_volume_calls == 2, kTimeOut); + EXPECT_THAT(WaitUntil([&] { return set_volume_calls.load(); }, Eq(2), + {.timeout = webrtc::TimeDelta::Millis(kTimeOut)}), + IsRtcOk()); } TEST_F(AudioRtpReceiverTest, VolumesSetBeforeStartingAreRespected) { @@ -98,12 +106,12 @@ TEST_F(AudioRtpReceiverTest, VolumesSetBeforeStartingAreRespected) { // thread when a media channel pointer is passed to the receiver via the // constructor. TEST(AudioRtpReceiver, OnChangedNotificationsAfterConstruction) { - webrtc::test::RunLoop loop; - auto* thread = rtc::Thread::Current(); // Points to loop's thread. - cricket::MockVoiceMediaReceiveChannelInterface receive_channel; - auto receiver = rtc::make_ref_counted( - thread, std::string(), std::vector(), true, - &receive_channel); + test::RunLoop loop; + auto* thread = Thread::Current(); // Points to loop's thread. + MockVoiceMediaReceiveChannelInterface receive_channel; + auto receiver = make_ref_counted(thread, std::string(), + std::vector(), + true, &receive_channel); EXPECT_CALL(receive_channel, SetDefaultRawAudioSink(_)).Times(1); EXPECT_CALL(receive_channel, SetDefaultOutputVolume(kDefaultVolume)).Times(1); diff --git a/pc/audio_track.cc b/pc/audio_track.cc index c012442d13..dc446c42cc 100644 --- a/pc/audio_track.cc +++ b/pc/audio_track.cc @@ -10,19 +10,26 @@ #include "pc/audio_track.h" -#include "rtc_base/checks.h" +#include + +#include "absl/strings/string_view.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/media_stream_track.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" namespace webrtc { // static -rtc::scoped_refptr AudioTrack::Create( +scoped_refptr AudioTrack::Create( absl::string_view id, - const rtc::scoped_refptr& source) { - return rtc::make_ref_counted(id, source); + const scoped_refptr& source) { + return make_ref_counted(id, source); } AudioTrack::AudioTrack(absl::string_view label, - const rtc::scoped_refptr& source) + const scoped_refptr& source) : MediaStreamTrack(label), audio_source_(source) { if (audio_source_) { audio_source_->RegisterObserver(this); diff --git a/pc/audio_track.h b/pc/audio_track.h index ae326b304b..7b7f5af60b 100644 --- a/pc/audio_track.h +++ b/pc/audio_track.h @@ -13,6 +13,7 @@ #include +#include "absl/strings/string_view.h" #include "api/media_stream_interface.h" #include "api/media_stream_track.h" #include "api/scoped_refptr.h" @@ -30,7 +31,7 @@ class AudioTrack : public MediaStreamTrack, protected: // Protected ctor to force use of factory method. AudioTrack(absl::string_view label, - const rtc::scoped_refptr& source); + const scoped_refptr& source); AudioTrack() = delete; AudioTrack(const AudioTrack&) = delete; @@ -39,9 +40,9 @@ class AudioTrack : public MediaStreamTrack, ~AudioTrack() override; public: - static rtc::scoped_refptr Create( + static scoped_refptr Create( absl::string_view id, - const rtc::scoped_refptr& source); + const scoped_refptr& source); // MediaStreamTrack implementation. std::string kind() const override; @@ -57,8 +58,8 @@ class AudioTrack : public MediaStreamTrack, void OnChanged() override; private: - const rtc::scoped_refptr audio_source_; - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker signaling_thread_checker_; + const scoped_refptr audio_source_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_; }; } // namespace webrtc diff --git a/pc/channel.cc b/pc/channel.cc index 82ca1a389e..deeb8b7103 100644 --- a/pc/channel.cc +++ b/pc/channel.cc @@ -12,35 +12,50 @@ #include #include +#include +#include +#include #include -#include #include +#include +#include "absl/algorithm/container.h" #include "absl/strings/string_view.h" +#include "api/crypto/crypto_options.h" +#include "api/jsep.h" +#include "api/media_types.h" +#include "api/rtp_headers.h" #include "api/rtp_parameters.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" -#include "api/units/timestamp.h" +#include "api/task_queue/task_queue_base.h" #include "media/base/codec.h" +#include "media/base/media_channel.h" #include "media/base/rid_description.h" #include "media/base/rtp_utils.h" +#include "media/base/stream_params.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "p2p/base/dtls_transport_internal.h" +#include "p2p/dtls/dtls_transport_internal.h" #include "pc/rtp_media_utils.h" +#include "pc/rtp_transport_internal.h" +#include "pc/session_description.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/checks.h" +#include "rtc_base/containers/flat_set.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" +#include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" +#include "rtc_base/socket.h" #include "rtc_base/strings/string_format.h" +#include "rtc_base/thread.h" #include "rtc_base/trace_event.h" +#include "rtc_base/unique_id_generator.h" -namespace cricket { +namespace webrtc { namespace { -using ::rtc::StringFormat; -using ::rtc::UniqueRandomIdGenerator; -using ::webrtc::PendingTaskSafetyFlag; -using ::webrtc::SdpType; +using ::webrtc::UniqueRandomIdGenerator; // Finds a stream based on target's Primary SSRC or RIDs. // This struct is used in BaseChannel::UpdateLocalStreams_w. @@ -77,12 +92,13 @@ struct StreamFinder { } // namespace -template void MediaChannelParametersFromMediaDescription( - const MediaContentDescriptionImpl* desc, + const MediaContentDescription* desc, const RtpHeaderExtensions& extensions, bool is_stream_active, MediaChannelParameters* params) { + RTC_DCHECK(desc->type() == MediaType::AUDIO || + desc->type() == MediaType::VIDEO); params->is_stream_active = is_stream_active; params->codecs = desc->codecs(); // TODO(bugs.webrtc.org/11513): See if we really need @@ -94,14 +110,12 @@ void MediaChannelParametersFromMediaDescription( params->rtcp.remote_estimate = desc->remote_estimate(); } -template void RtpSendParametersFromMediaDescription( - const MediaContentDescriptionImpl* desc, - webrtc::RtpExtension::Filter extensions_filter, + const MediaContentDescription* desc, + RtpExtension::Filter extensions_filter, SenderParameters* send_params) { - RtpHeaderExtensions extensions = - webrtc::RtpExtension::DeduplicateHeaderExtensions( - desc->rtp_header_extensions(), extensions_filter); + RtpHeaderExtensions extensions = RtpExtension::DeduplicateHeaderExtensions( + desc->rtp_header_extensions(), extensions_filter); const bool is_stream_active = webrtc::RtpTransceiverDirectionHasRecv(desc->direction()); MediaChannelParametersFromMediaDescription(desc, extensions, is_stream_active, @@ -111,14 +125,14 @@ void RtpSendParametersFromMediaDescription( } BaseChannel::BaseChannel( - rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, + TaskQueueBase* worker_thread, + Thread* network_thread, + TaskQueueBase* signaling_thread, std::unique_ptr send_media_channel_impl, std::unique_ptr receive_media_channel_impl, absl::string_view mid, bool srtp_required, - webrtc::CryptoOptions crypto_options, + CryptoOptions crypto_options, UniqueRandomIdGenerator* ssrc_generator) : media_send_channel_(std::move(send_media_channel_impl)), media_receive_channel_(std::move(receive_media_channel_impl)), @@ -129,8 +143,8 @@ BaseChannel::BaseChannel( srtp_required_(srtp_required), extensions_filter_( crypto_options.srtp.enable_encrypted_rtp_header_extensions - ? webrtc::RtpExtension::kPreferEncryptedExtension - : webrtc::RtpExtension::kDiscardEncryptedExtension), + ? RtpExtension::kPreferEncryptedExtension + : RtpExtension::kDiscardEncryptedExtension), demuxer_criteria_(mid), ssrc_generator_(ssrc_generator) { RTC_DCHECK_RUN_ON(worker_thread_); @@ -152,7 +166,7 @@ BaseChannel::~BaseChannel() { } std::string BaseChannel::ToString() const { - return StringFormat( + return webrtc::StringFormat( "{mid: %s, media_type: %s}", mid().c_str(), MediaTypeToString(media_send_channel_->media_type()).c_str()); } @@ -169,14 +183,14 @@ bool BaseChannel::ConnectToRtpTransport_n() { rtp_transport_->SubscribeReadyToSend( this, [this](bool ready) { OnTransportReadyToSend(ready); }); rtp_transport_->SubscribeNetworkRouteChanged( - this, [this](absl::optional route) { + this, [this](std::optional route) { OnNetworkRouteChanged(route); }); rtp_transport_->SubscribeWritableState( this, [this](bool state) { OnWritableState(state); }); rtp_transport_->SubscribeSentPacket( this, - [this](const rtc::SentPacket& packet) { SignalSentPacket_n(packet); }); + [this](const SentPacketInfo& packet) { SignalSentPacket_n(packet); }); return true; } @@ -193,7 +207,7 @@ void BaseChannel::DisconnectFromRtpTransport_n() { media_receive_channel()->SetInterface(nullptr); } -bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) { +bool BaseChannel::SetRtpTransport(RtpTransportInternal* rtp_transport) { TRACE_EVENT0("webrtc", "BaseChannel::SetRtpTransport"); RTC_DCHECK_RUN_ON(network_thread()); if (rtp_transport == rtp_transport_) { @@ -294,30 +308,27 @@ bool BaseChannel::IsReadyToSendMedia_w() const { was_ever_writable_; } -bool BaseChannel::SendPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) { +bool BaseChannel::SendPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options) { return SendPacket(false, packet, options); } -bool BaseChannel::SendRtcp(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) { +bool BaseChannel::SendRtcp(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options) { return SendPacket(true, packet, options); } -int BaseChannel::SetOption(SocketType type, - rtc::Socket::Option opt, - int value) { +int BaseChannel::SetOption(SocketType type, Socket::Option opt, int value) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK(network_initialized()); RTC_DCHECK(rtp_transport_); switch (type) { case ST_RTP: - socket_options_.push_back( - std::pair(opt, value)); + socket_options_.push_back(std::pair(opt, value)); return rtp_transport_->SetRtpOption(opt, value); case ST_RTCP: rtcp_socket_options_.push_back( - std::pair(opt, value)); + std::pair(opt, value)); return rtp_transport_->SetRtcpOption(opt, value); } return -1; @@ -334,13 +345,13 @@ void BaseChannel::OnWritableState(bool writable) { } void BaseChannel::OnNetworkRouteChanged( - absl::optional network_route) { + std::optional network_route) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK(network_initialized()); RTC_LOG(LS_INFO) << "Network route changed for " << ToString(); - rtc::NetworkRoute new_route; + NetworkRoute new_route; if (network_route) { new_route = *(network_route); } @@ -365,6 +376,13 @@ void BaseChannel::SetFirstPacketReceivedCallback( on_first_packet_received_ = std::move(callback); } +void BaseChannel::SetFirstPacketSentCallback(std::function callback) { + RTC_DCHECK_RUN_ON(network_thread()); + RTC_DCHECK(!on_first_packet_sent_ || !callback); + + on_first_packet_sent_ = std::move(callback); +} + void BaseChannel::OnTransportReadyToSend(bool ready) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK(network_initialized()); @@ -372,8 +390,8 @@ void BaseChannel::OnTransportReadyToSend(bool ready) { } bool BaseChannel::SendPacket(bool rtcp, - rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) { + CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK(network_initialized()); TRACE_EVENT0("webrtc", "BaseChannel::SendPacket"); @@ -413,11 +431,16 @@ bool BaseChannel::SendPacket(bool rtcp, << "."; } + if (on_first_packet_sent_ && options.info_signaled_after_sent.is_media) { + on_first_packet_sent_(); + on_first_packet_sent_ = nullptr; + } + return rtcp ? rtp_transport_->SendRtcpPacket(packet, options, PF_SRTP_BYPASS) : rtp_transport_->SendRtpPacket(packet, options, PF_SRTP_BYPASS); } -void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) { +void BaseChannel::OnRtpPacket(const RtpPacketReceived& parsed_packet) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK(network_initialized()); @@ -448,7 +471,7 @@ void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) { bool BaseChannel::MaybeUpdateDemuxerAndRtpExtensions_w( bool update_demuxer, - absl::optional extensions, + std::optional extensions, std::string& error_desc) { if (extensions) { if (rtp_header_extensions_ == extensions) { @@ -478,9 +501,9 @@ bool BaseChannel::MaybeUpdateDemuxerAndRtpExtensions_w( return true; if (!rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria_, this)) { - error_desc = - StringFormat("Failed to apply demuxer criteria for '%s': '%s'.", - mid().c_str(), demuxer_criteria_.ToString().c_str()); + error_desc = webrtc::StringFormat( + "Failed to apply demuxer criteria for '%s': '%s'.", mid().c_str(), + demuxer_criteria_.ToString().c_str()); return false; } return true; @@ -636,7 +659,7 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, continue; } if (!media_send_channel()->RemoveSendStream(old_stream.first_ssrc())) { - error_desc = StringFormat( + error_desc = webrtc::StringFormat( "Failed to remove send stream with ssrc %u from m-section with " "mid='%s'.", old_stream.first_ssrc(), mid().c_str()); @@ -662,7 +685,7 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, RTC_DCHECK(new_stream.has_ssrcs() || new_stream.has_rids()); if (new_stream.has_ssrcs() && new_stream.has_rids()) { - error_desc = StringFormat( + error_desc = webrtc::StringFormat( "Failed to add send stream: %u into m-section with mid='%s'. Stream " "has both SSRCs and RIDs.", new_stream.first_ssrc(), mid().c_str()); @@ -682,7 +705,7 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, RTC_LOG(LS_INFO) << "Add send stream ssrc: " << new_stream.ssrcs[0] << " into " << ToString(); } else { - error_desc = StringFormat( + error_desc = webrtc::StringFormat( "Failed to add send stream ssrc: %u into m-section with mid='%s'", new_stream.first_ssrc(), mid().c_str()); ret = false; @@ -724,7 +747,7 @@ bool BaseChannel::UpdateRemoteStreams_w(const MediaContentDescription* content, RTC_LOG(LS_INFO) << "Remove remote ssrc: " << old_stream.first_ssrc() << " from " << ToString() << "."; } else { - error_desc = StringFormat( + error_desc = webrtc::StringFormat( "Failed to remove remote stream with ssrc %u from m-section with " "mid='%s'.", old_stream.first_ssrc(), mid().c_str()); @@ -734,7 +757,7 @@ bool BaseChannel::UpdateRemoteStreams_w(const MediaContentDescription* content, } // Check for new streams. - webrtc::flat_set ssrcs; + flat_set ssrcs; for (const StreamParams& new_stream : streams) { // We allow a StreamParams with an empty list of SSRCs, in which case the // MediaChannel will cache the parameters and use them for any unsignaled @@ -748,12 +771,12 @@ bool BaseChannel::UpdateRemoteStreams_w(const MediaContentDescription* content, : "unsignaled") << " to " << ToString(); } else { - error_desc = - StringFormat("Failed to add remote stream ssrc: %s to %s", - new_stream.has_ssrcs() - ? std::to_string(new_stream.first_ssrc()).c_str() - : "unsignaled", - ToString().c_str()); + error_desc = webrtc::StringFormat( + "Failed to add remote stream ssrc: %s to %s", + new_stream.has_ssrcs() + ? std::to_string(new_stream.first_ssrc()).c_str() + : "unsignaled", + ToString().c_str()); return false; } } @@ -770,8 +793,8 @@ bool BaseChannel::UpdateRemoteStreams_w(const MediaContentDescription* content, // Re-register the sink to update after changing the demuxer criteria. if (needs_re_registration && !RegisterRtpDemuxerSink_w()) { - error_desc = StringFormat("Failed to set up audio demuxing for mid='%s'.", - mid().c_str()); + error_desc = webrtc::StringFormat( + "Failed to set up audio demuxing for mid='%s'.", mid().c_str()); return false; } @@ -787,8 +810,8 @@ bool BaseChannel::UpdateRemoteStreams_w(const MediaContentDescription* content, RtpHeaderExtensions BaseChannel::GetDeduplicatedRtpHeaderExtensions( const RtpHeaderExtensions& extensions) { - return webrtc::RtpExtension::DeduplicateHeaderExtensions(extensions, - extensions_filter_); + return RtpExtension::DeduplicateHeaderExtensions(extensions, + extensions_filter_); } bool BaseChannel::MaybeAddHandledPayloadType(int payload_type) { @@ -811,21 +834,21 @@ bool BaseChannel::ClearHandledPayloadTypes() { return !was_empty; } -void BaseChannel::SignalSentPacket_n(const rtc::SentPacket& sent_packet) { +void BaseChannel::SignalSentPacket_n(const SentPacketInfo& sent_packet) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK(network_initialized()); media_send_channel()->OnPacketSent(sent_packet); } VoiceChannel::VoiceChannel( - rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, + TaskQueueBase* worker_thread, + Thread* network_thread, + TaskQueueBase* signaling_thread, std::unique_ptr media_send_channel, std::unique_ptr media_receive_channel, absl::string_view mid, bool srtp_required, - webrtc::CryptoOptions crypto_options, + CryptoOptions crypto_options, UniqueRandomIdGenerator* ssrc_generator) : BaseChannel(worker_thread, network_thread, @@ -843,19 +866,6 @@ VoiceChannel::~VoiceChannel() { DisableMedia_w(); } -void VoiceChannel::InitCallback() { - RTC_DCHECK_RUN_ON(worker_thread()); - // TODO(bugs.webrtc.org/13931): Remove when values are set - // in a more sensible fashion - send_channel()->SetSendCodecChangedCallback([this]() { - RTC_DCHECK_RUN_ON(worker_thread()); - // Adjust receive streams based on send codec. - receive_channel()->SetReceiveNackEnabled( - send_channel()->SendCodecHasNack()); - receive_channel()->SetReceiveNonSenderRttEnabled( - send_channel()->SenderNonSenderRttEnabled()); - }); -} void VoiceChannel::UpdateMediaSendRecvState_w() { // Render incoming data if we're the active call, and we have the local // content. We receive data on the default channel and multiplexed streams. @@ -887,12 +897,13 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, AudioReceiverParameters recv_params = last_recv_params_; MediaChannelParametersFromMediaDescription( - content->as_audio(), header_extensions, + content, header_extensions, webrtc::RtpTransceiverDirectionHasRecv(content->direction()), &recv_params); + recv_params.mid = mid(); if (!media_receive_channel()->SetReceiverParameters(recv_params)) { - error_desc = StringFormat( + error_desc = webrtc::StringFormat( "Failed to set local audio description recv parameters for m-section " "with mid='%s'.", mid().c_str()); @@ -901,7 +912,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, bool criteria_modified = false; if (webrtc::RtpTransceiverDirectionHasRecv(content->direction())) { - for (const AudioCodec& codec : content->as_audio()->codecs()) { + for (const Codec& codec : content->codecs()) { if (MaybeAddHandledPayloadType(codec.id)) { criteria_modified = true; } @@ -910,7 +921,7 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, last_recv_params_ = recv_params; - if (!UpdateLocalStreams_w(content->as_audio()->streams(), type, error_desc)) { + if (!UpdateLocalStreams_w(content->streams(), type, error_desc)) { RTC_DCHECK(!error_desc.empty()); return false; } @@ -918,16 +929,18 @@ bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, set_local_content_direction(content->direction()); UpdateMediaSendRecvState_w(); - RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); + // Disabled because suggeting PTs takes thread jumps. + // TODO: https://issues.webrtc.org/360058654 - reenable after cleanup + // RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); bool success = MaybeUpdateDemuxerAndRtpExtensions_w( criteria_modified, update_header_extensions - ? absl::optional(std::move(header_extensions)) - : absl::nullopt, + ? std::optional(std::move(header_extensions)) + : std::nullopt, error_desc); - RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); + // RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); return success; } @@ -939,19 +952,25 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content, RTC_LOG(LS_INFO) << "Setting remote voice description for " << ToString(); AudioSenderParameter send_params = last_send_params_; - RtpSendParametersFromMediaDescription(content->as_audio(), - extensions_filter(), &send_params); + RtpSendParametersFromMediaDescription(content, extensions_filter(), + &send_params); send_params.mid = mid(); bool parameters_applied = media_send_channel()->SetSenderParameters(send_params); if (!parameters_applied) { - error_desc = StringFormat( + error_desc = webrtc::StringFormat( "Failed to set remote audio description send parameters for m-section " "with mid='%s'.", mid().c_str()); return false; } + // The receive channel can send RTCP packets in the reverse direction. It + // should use the reduced size mode if a peer has requested it through the + // remote content. + media_receive_channel()->SetRtcpMode(content->rtcp_reduced_size() + ? RtcpMode::kReducedSize + : RtcpMode::kCompound); // Update Receive channel based on Send channel's codec information. // TODO(bugs.webrtc.org/14911): This is silly. Stop doing it. media_receive_channel()->SetReceiveNackEnabled( @@ -964,14 +983,14 @@ bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content, } VideoChannel::VideoChannel( - rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, + TaskQueueBase* worker_thread, + Thread* network_thread, + TaskQueueBase* signaling_thread, std::unique_ptr media_send_channel, std::unique_ptr media_receive_channel, absl::string_view mid, bool srtp_required, - webrtc::CryptoOptions crypto_options, + CryptoOptions crypto_options, UniqueRandomIdGenerator* ssrc_generator) : BaseChannel(worker_thread, network_thread, @@ -1028,33 +1047,64 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, VideoReceiverParameters recv_params = last_recv_params_; MediaChannelParametersFromMediaDescription( - content->as_video(), header_extensions, + content, header_extensions, webrtc::RtpTransceiverDirectionHasRecv(content->direction()), &recv_params); VideoSenderParameters send_params = last_send_params_; + // Ensure that there is a matching packetization for each send codec. If the + // other peer offered to exclusively send non-standard packetization but we + // only accept to receive standard packetization we effectively amend their + // offer by ignoring the packetiztion and fall back to standard packetization + // instead. bool needs_send_params_update = false; if (type == SdpType::kAnswer || type == SdpType::kPrAnswer) { - for (auto& send_codec : send_params.codecs) { - auto* recv_codec = FindMatchingCodec(recv_params.codecs, send_codec); - if (recv_codec) { - if (!recv_codec->packetization && send_codec.packetization) { - send_codec.packetization.reset(); - needs_send_params_update = true; - } else if (recv_codec->packetization != send_codec.packetization) { - error_desc = StringFormat( - "Failed to set local answer due to invalid codec packetization " - "specified in m-section with mid='%s'.", - mid().c_str()); - return false; + flat_set matched_codecs; + for (Codec& send_codec : send_params.codecs) { + if (absl::c_any_of(matched_codecs, [&](const Codec* c) { + return send_codec.Matches(*c); + })) { + continue; + } + + std::vector recv_codecs = + FindAllMatchingCodecs(recv_params.codecs, send_codec); + if (recv_codecs.empty()) { + continue; + } + + bool may_ignore_packetization = false; + bool has_matching_packetization = false; + for (const Codec* recv_codec : recv_codecs) { + if (!recv_codec->packetization.has_value() && + send_codec.packetization.has_value()) { + may_ignore_packetization = true; + } else if (recv_codec->packetization == send_codec.packetization) { + has_matching_packetization = true; + break; } } + + if (may_ignore_packetization) { + send_codec.packetization = std::nullopt; + needs_send_params_update = true; + } else if (!has_matching_packetization) { + error_desc = webrtc::StringFormat( + "Failed to set local answer due to incompatible codec " + "packetization for pt='%d' specified in m-section with mid='%s'.", + send_codec.id, mid().c_str()); + return false; + } + + if (has_matching_packetization) { + matched_codecs.insert(&send_codec); + } } } if (!media_receive_channel()->SetReceiverParameters(recv_params)) { - error_desc = StringFormat( + error_desc = webrtc::StringFormat( "Failed to set local video description recv parameters for m-section " "with mid='%s'.", mid().c_str()); @@ -1063,7 +1113,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, bool criteria_modified = false; if (webrtc::RtpTransceiverDirectionHasRecv(content->direction())) { - for (const VideoCodec& codec : content->as_video()->codecs()) { + for (const Codec& codec : content->codecs()) { if (MaybeAddHandledPayloadType(codec.id)) criteria_modified = true; } @@ -1073,7 +1123,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, if (needs_send_params_update) { if (!media_send_channel()->SetSenderParameters(send_params)) { - error_desc = StringFormat( + error_desc = webrtc::StringFormat( "Failed to set send parameters for m-section with mid='%s'.", mid().c_str()); return false; @@ -1081,7 +1131,7 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, last_send_params_ = send_params; } - if (!UpdateLocalStreams_w(content->as_video()->streams(), type, error_desc)) { + if (!UpdateLocalStreams_w(content->streams(), type, error_desc)) { RTC_DCHECK(!error_desc.empty()); return false; } @@ -1094,8 +1144,8 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, bool success = MaybeUpdateDemuxerAndRtpExtensions_w( criteria_modified, update_header_extensions - ? absl::optional(std::move(header_extensions)) - : absl::nullopt, + ? std::optional(std::move(header_extensions)) + : std::nullopt, error_desc); RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); @@ -1109,37 +1159,66 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, TRACE_EVENT0("webrtc", "VideoChannel::SetRemoteContent_w"); RTC_LOG(LS_INFO) << "Setting remote video description for " << ToString(); - const VideoContentDescription* video = content->as_video(); - VideoSenderParameters send_params = last_send_params_; - RtpSendParametersFromMediaDescription(video, extensions_filter(), + RtpSendParametersFromMediaDescription(content, extensions_filter(), &send_params); send_params.mid = mid(); - send_params.conference_mode = video->conference_mode(); + send_params.conference_mode = content->conference_mode(); VideoReceiverParameters recv_params = last_recv_params_; + // Ensure that there is a matching packetization for each receive codec. If we + // offered to exclusively receive a non-standard packetization but the other + // peer only accepts to send standard packetization we effectively amend our + // offer by ignoring the packetiztion and fall back to standard packetization + // instead. bool needs_recv_params_update = false; if (type == SdpType::kAnswer || type == SdpType::kPrAnswer) { - for (auto& recv_codec : recv_params.codecs) { - auto* send_codec = FindMatchingCodec(send_params.codecs, recv_codec); - if (send_codec) { - if (!send_codec->packetization && recv_codec.packetization) { - recv_codec.packetization.reset(); - needs_recv_params_update = true; - } else if (send_codec->packetization != recv_codec.packetization) { - error_desc = StringFormat( - "Failed to set remote answer due to invalid codec packetization " - "specifid in m-section with mid='%s'.", - mid().c_str()); - return false; + flat_set matched_codecs; + for (Codec& recv_codec : recv_params.codecs) { + if (absl::c_any_of(matched_codecs, [&](const Codec* c) { + return recv_codec.Matches(*c); + })) { + continue; + } + + std::vector send_codecs = + FindAllMatchingCodecs(send_params.codecs, recv_codec); + if (send_codecs.empty()) { + continue; + } + + bool may_ignore_packetization = false; + bool has_matching_packetization = false; + for (const Codec* send_codec : send_codecs) { + if (!send_codec->packetization.has_value() && + recv_codec.packetization.has_value()) { + may_ignore_packetization = true; + } else if (send_codec->packetization == recv_codec.packetization) { + has_matching_packetization = true; + break; } } + + if (may_ignore_packetization) { + recv_codec.packetization = std::nullopt; + needs_recv_params_update = true; + } else if (!has_matching_packetization) { + error_desc = webrtc::StringFormat( + "Failed to set remote answer due to incompatible codec " + "packetization for pt='%d' specified in m-section with mid='%s'.", + recv_codec.id, mid().c_str()); + return false; + } + + if (has_matching_packetization) { + matched_codecs.insert(&recv_codec); + } } } if (!media_send_channel()->SetSenderParameters(send_params)) { - error_desc = StringFormat( + error_desc = webrtc::StringFormat( "Failed to set remote video description send parameters for m-section " "with mid='%s'.", mid().c_str()); @@ -1155,7 +1234,7 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, if (needs_recv_params_update) { if (!media_receive_channel()->SetReceiverParameters(recv_params)) { - error_desc = StringFormat( + error_desc = webrtc::StringFormat( "Failed to set recv parameters for m-section with mid='%s'.", mid().c_str()); return false; @@ -1166,4 +1245,4 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, return UpdateRemoteStreams_w(content, type, error_desc); } -} // namespace cricket +} // namespace webrtc diff --git a/pc/channel.h b/pc/channel.h index d3a7e89366..36744ceb53 100644 --- a/pc/channel.h +++ b/pc/channel.h @@ -15,12 +15,12 @@ #include #include +#include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/crypto/crypto_options.h" #include "api/jsep.h" #include "api/media_types.h" @@ -29,10 +29,10 @@ #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "call/rtp_demuxer.h" #include "call/rtp_packet_sink_interface.h" #include "media/base/media_channel.h" -#include "media/base/media_channel_impl.h" #include "media/base/stream_params.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "pc/channel_interface.h" @@ -49,7 +49,7 @@ #include "rtc_base/thread_annotations.h" #include "rtc_base/unique_id_generator.h" -namespace cricket { +namespace webrtc { // BaseChannel contains logic common to voice and video, including enable, // marshaling calls to a worker and network threads, and connection and media @@ -71,7 +71,7 @@ class BaseChannel : public ChannelInterface, // TODO(tommi): Consider implementing these interfaces // via composition. public MediaChannelNetworkInterface, - public webrtc::RtpPacketSinkInterface { + public RtpPacketSinkInterface { public: // If `srtp_required` is true, the channel will not send or receive any // RTP/RTCP packets without using SRTP (either using SDES or DTLS-SRTP). @@ -82,19 +82,19 @@ class BaseChannel : public ChannelInterface, // Constructor for use when the MediaChannels are split BaseChannel( - rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, + TaskQueueBase* worker_thread, + Thread* network_thread, + TaskQueueBase* signaling_thread, std::unique_ptr media_send_channel, std::unique_ptr media_receive_channel, absl::string_view mid, bool srtp_required, - webrtc::CryptoOptions crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator); + CryptoOptions crypto_options, + UniqueRandomIdGenerator* ssrc_generator); virtual ~BaseChannel(); - rtc::Thread* worker_thread() const { return worker_thread_; } - rtc::Thread* network_thread() const { return network_thread_; } + TaskQueueBase* worker_thread() const { return worker_thread_; } + Thread* network_thread() const { return network_thread_; } const std::string& mid() const override { return demuxer_criteria_.mid(); } // TODO(deadbeef): This is redundant; remove this. absl::string_view transport_name() const override { @@ -114,19 +114,19 @@ class BaseChannel : public ChannelInterface, // encryption, an SrtpTransport for SDES or a DtlsSrtpTransport for DTLS-SRTP. // This can be called from any thread and it hops to the network thread // internally. It would replace the `SetTransports` and its variants. - bool SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) override; + bool SetRtpTransport(RtpTransportInternal* rtp_transport) override; - webrtc::RtpTransportInternal* rtp_transport() const { + RtpTransportInternal* rtp_transport() const { RTC_DCHECK_RUN_ON(network_thread()); return rtp_transport_; } // Channel control bool SetLocalContent(const MediaContentDescription* content, - webrtc::SdpType type, + SdpType type, std::string& error_desc) override; bool SetRemoteContent(const MediaContentDescription* content, - webrtc::SdpType type, + SdpType type, std::string& error_desc) override; // Controls whether this channel will receive packets on the basis of // matching payload type alone. This is needed for legacy endpoints that @@ -149,15 +149,16 @@ class BaseChannel : public ChannelInterface, // Used for latency measurements. void SetFirstPacketReceivedCallback(std::function callback) override; + void SetFirstPacketSentCallback(std::function callback) override; // From RtpTransport - public for testing only void OnTransportReadyToSend(bool ready); // Only public for unit tests. Otherwise, consider protected. - int SetOption(SocketType type, rtc::Socket::Option o, int val) override; + int SetOption(SocketType type, Socket::Option o, int val) override; // RtpPacketSinkInterface overrides. - void OnRtpPacket(const webrtc::RtpPacketReceived& packet) override; + void OnRtpPacket(const RtpPacketReceived& packet) override; VideoMediaSendChannelInterface* video_media_send_channel() override { RTC_CHECK(false) << "Attempt to fetch video channel from non-video"; @@ -177,36 +178,34 @@ class BaseChannel : public ChannelInterface, } protected: - void set_local_content_direction(webrtc::RtpTransceiverDirection direction) + void set_local_content_direction(RtpTransceiverDirection direction) RTC_RUN_ON(worker_thread()) { local_content_direction_ = direction; } - webrtc::RtpTransceiverDirection local_content_direction() const + RtpTransceiverDirection local_content_direction() const RTC_RUN_ON(worker_thread()) { return local_content_direction_; } - void set_remote_content_direction(webrtc::RtpTransceiverDirection direction) + void set_remote_content_direction(RtpTransceiverDirection direction) RTC_RUN_ON(worker_thread()) { remote_content_direction_ = direction; } - webrtc::RtpTransceiverDirection remote_content_direction() const + RtpTransceiverDirection remote_content_direction() const RTC_RUN_ON(worker_thread()) { return remote_content_direction_; } - webrtc::RtpExtension::Filter extensions_filter() const { - return extensions_filter_; - } + RtpExtension::Filter extensions_filter() const { return extensions_filter_; } bool network_initialized() RTC_RUN_ON(network_thread()) { return media_send_channel()->HasNetworkInterface(); } bool enabled() const RTC_RUN_ON(worker_thread()) { return enabled_; } - rtc::Thread* signaling_thread() const { return signaling_thread_; } + TaskQueueBase* signaling_thread() const { return signaling_thread_; } // Call to verify that: // * The required content description directions have been set. @@ -220,19 +219,19 @@ class BaseChannel : public ChannelInterface, bool IsReadyToSendMedia_w() const RTC_RUN_ON(worker_thread()); // NetworkInterface implementation, called by MediaEngine - bool SendPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) override; - bool SendRtcp(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options) override; + bool SendPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options) override; + bool SendRtcp(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options) override; // From RtpTransportInternal void OnWritableState(bool writable); - void OnNetworkRouteChanged(absl::optional network_route); + void OnNetworkRouteChanged(std::optional network_route); bool SendPacket(bool rtcp, - rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options); + CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options); void EnableMedia_w() RTC_RUN_ON(worker_thread()); void DisableMedia_w() RTC_RUN_ON(worker_thread()); @@ -253,19 +252,19 @@ class BaseChannel : public ChannelInterface, virtual void UpdateMediaSendRecvState_w() RTC_RUN_ON(worker_thread()) = 0; bool UpdateLocalStreams_w(const std::vector& streams, - webrtc::SdpType type, + SdpType type, std::string& error_desc) RTC_RUN_ON(worker_thread()); bool UpdateRemoteStreams_w(const MediaContentDescription* content, - webrtc::SdpType type, + SdpType type, std::string& error_desc) RTC_RUN_ON(worker_thread()); virtual bool SetLocalContent_w(const MediaContentDescription* content, - webrtc::SdpType type, + SdpType type, std::string& error_desc) RTC_RUN_ON(worker_thread()) = 0; virtual bool SetRemoteContent_w(const MediaContentDescription* content, - webrtc::SdpType type, + SdpType type, std::string& error_desc) RTC_RUN_ON(worker_thread()) = 0; @@ -295,7 +294,7 @@ class BaseChannel : public ChannelInterface, // failed, which needs to be treated as an error. bool MaybeUpdateDemuxerAndRtpExtensions_w( bool update_demuxer, - absl::optional extensions, + std::optional extensions, std::string& error_desc) RTC_RUN_ON(worker_thread()); bool RegisterRtpDemuxerSink_w() RTC_RUN_ON(worker_thread()); @@ -309,22 +308,24 @@ class BaseChannel : public ChannelInterface, private: bool ConnectToRtpTransport_n() RTC_RUN_ON(network_thread()); void DisconnectFromRtpTransport_n() RTC_RUN_ON(network_thread()); - void SignalSentPacket_n(const rtc::SentPacket& sent_packet); + void SignalSentPacket_n(const SentPacketInfo& sent_packet); - rtc::Thread* const worker_thread_; - rtc::Thread* const network_thread_; - rtc::Thread* const signaling_thread_; - rtc::scoped_refptr alive_; + TaskQueueBase* const worker_thread_; + Thread* const network_thread_; + TaskQueueBase* const signaling_thread_; + scoped_refptr alive_; + // The functions are deleted after they have been called. std::function on_first_packet_received_ RTC_GUARDED_BY(network_thread()); + std::function on_first_packet_sent_ RTC_GUARDED_BY(network_thread()); - webrtc::RtpTransportInternal* rtp_transport_ - RTC_GUARDED_BY(network_thread()) = nullptr; + RtpTransportInternal* rtp_transport_ RTC_GUARDED_BY(network_thread()) = + nullptr; - std::vector > socket_options_ + std::vector > socket_options_ RTC_GUARDED_BY(network_thread()); - std::vector > rtcp_socket_options_ + std::vector > rtcp_socket_options_ RTC_GUARDED_BY(network_thread()); bool writable_ RTC_GUARDED_BY(network_thread()) = false; bool was_ever_writable_n_ RTC_GUARDED_BY(network_thread()) = false; @@ -333,7 +334,7 @@ class BaseChannel : public ChannelInterface, // Set to either kPreferEncryptedExtension or kDiscardEncryptedExtension // based on the supplied CryptoOptions. - const webrtc::RtpExtension::Filter extensions_filter_; + const RtpExtension::Filter extensions_filter_; // Currently the `enabled_` flag is accessed from the signaling thread as // well, but it can be changed only when signaling thread does a synchronous @@ -343,23 +344,23 @@ class BaseChannel : public ChannelInterface, bool payload_type_demuxing_enabled_ RTC_GUARDED_BY(worker_thread()) = true; std::vector local_streams_ RTC_GUARDED_BY(worker_thread()); std::vector remote_streams_ RTC_GUARDED_BY(worker_thread()); - webrtc::RtpTransceiverDirection local_content_direction_ RTC_GUARDED_BY( - worker_thread()) = webrtc::RtpTransceiverDirection::kInactive; - webrtc::RtpTransceiverDirection remote_content_direction_ RTC_GUARDED_BY( - worker_thread()) = webrtc::RtpTransceiverDirection::kInactive; + RtpTransceiverDirection local_content_direction_ + RTC_GUARDED_BY(worker_thread()) = RtpTransceiverDirection::kInactive; + RtpTransceiverDirection remote_content_direction_ + RTC_GUARDED_BY(worker_thread()) = RtpTransceiverDirection::kInactive; // Cached list of payload types, used if payload type demuxing is re-enabled. - webrtc::flat_set payload_types_ RTC_GUARDED_BY(worker_thread()); + flat_set payload_types_ RTC_GUARDED_BY(worker_thread()); // A stored copy of the rtp header extensions as applied to the transport. RtpHeaderExtensions rtp_header_extensions_ RTC_GUARDED_BY(worker_thread()); // TODO(bugs.webrtc.org/12239): Modified on worker thread, accessed // on network thread in RegisterRtpDemuxerSink_n (called from Init_w) - webrtc::RtpDemuxerCriteria demuxer_criteria_; + RtpDemuxerCriteria demuxer_criteria_; // This generator is used to generate SSRCs for local streams. // This is needed in cases where SSRCs are not negotiated or set explicitly // like in Simulcast. // This object is not owned by the channel so it must outlive it. - rtc::UniqueRandomIdGenerator* const ssrc_generator_; + UniqueRandomIdGenerator* const ssrc_generator_; }; // VoiceChannel is a specialization that adds support for early media, DTMF, @@ -367,15 +368,15 @@ class BaseChannel : public ChannelInterface, class VoiceChannel : public BaseChannel { public: VoiceChannel( - rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, + TaskQueueBase* worker_thread, + Thread* network_thread, + TaskQueueBase* signaling_thread, std::unique_ptr send_channel_impl, std::unique_ptr receive_channel_impl, absl::string_view mid, bool srtp_required, - webrtc::CryptoOptions crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator); + CryptoOptions crypto_options, + UniqueRandomIdGenerator* ssrc_generator); ~VoiceChannel(); @@ -409,20 +410,17 @@ class VoiceChannel : public BaseChannel { return receive_channel(); } - cricket::MediaType media_type() const override { - return cricket::MEDIA_TYPE_AUDIO; - } + MediaType media_type() const override { return MediaType::AUDIO; } private: - void InitCallback(); // overrides from BaseChannel void UpdateMediaSendRecvState_w() RTC_RUN_ON(worker_thread()) override; bool SetLocalContent_w(const MediaContentDescription* content, - webrtc::SdpType type, + SdpType type, std::string& error_desc) RTC_RUN_ON(worker_thread()) override; bool SetRemoteContent_w(const MediaContentDescription* content, - webrtc::SdpType type, + SdpType type, std::string& error_desc) RTC_RUN_ON(worker_thread()) override; @@ -438,15 +436,15 @@ class VoiceChannel : public BaseChannel { class VideoChannel : public BaseChannel { public: VideoChannel( - rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, + TaskQueueBase* worker_thread, + Thread* network_thread, + TaskQueueBase* signaling_thread, std::unique_ptr media_send_channel, std::unique_ptr media_receive_channel, absl::string_view mid, bool srtp_required, - webrtc::CryptoOptions crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator); + CryptoOptions crypto_options, + UniqueRandomIdGenerator* ssrc_generator); ~VideoChannel(); VideoChannel* AsVideoChannel() override { return this; } @@ -479,19 +477,17 @@ class VideoChannel : public BaseChannel { return receive_channel(); } - cricket::MediaType media_type() const override { - return cricket::MEDIA_TYPE_VIDEO; - } + MediaType media_type() const override { return MediaType::VIDEO; } private: // overrides from BaseChannel void UpdateMediaSendRecvState_w() RTC_RUN_ON(worker_thread()) override; bool SetLocalContent_w(const MediaContentDescription* content, - webrtc::SdpType type, + SdpType type, std::string& error_desc) RTC_RUN_ON(worker_thread()) override; bool SetRemoteContent_w(const MediaContentDescription* content, - webrtc::SdpType type, + SdpType type, std::string& error_desc) RTC_RUN_ON(worker_thread()) override; @@ -503,6 +499,16 @@ class VideoChannel : public BaseChannel { VideoReceiverParameters last_recv_params_ RTC_GUARDED_BY(worker_thread()); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::BaseChannel; +using ::webrtc::VideoChannel; +using ::webrtc::VoiceChannel; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // PC_CHANNEL_H_ diff --git a/pc/channel_interface.h b/pc/channel_interface.h index 8d6a9fe745..c89ada5ccd 100644 --- a/pc/channel_interface.h +++ b/pc/channel_interface.h @@ -11,7 +11,7 @@ #ifndef PC_CHANNEL_INTERFACE_H_ #define PC_CHANNEL_INTERFACE_H_ -#include +#include #include #include @@ -19,19 +19,19 @@ #include "api/jsep.h" #include "api/media_types.h" #include "media/base/media_channel.h" +#include "media/base/media_config.h" +#include "media/base/stream_params.h" #include "pc/rtp_transport_internal.h" +#include "pc/session_description.h" namespace webrtc { class Call; class VideoBitrateAllocatorFactory; +class VideoChannel; +class VoiceChannel; } // namespace webrtc -namespace cricket { - -class VoiceChannel; -class VideoChannel; -class MediaContentDescription; -struct MediaConfig; +namespace webrtc { // A Channel is a construct that groups media streams of the same type // (audio or video), both outgoing and incoming. @@ -47,7 +47,7 @@ struct MediaConfig; class ChannelInterface { public: virtual ~ChannelInterface() = default; - virtual cricket::MediaType media_type() const = 0; + virtual MediaType media_type() const = 0; virtual VideoChannel* AsVideoChannel() = 0; virtual VoiceChannel* AsVoiceChannel() = 0; @@ -78,13 +78,14 @@ class ChannelInterface { // Used for latency measurements. virtual void SetFirstPacketReceivedCallback( std::function callback) = 0; + virtual void SetFirstPacketSentCallback(std::function callback) = 0; // Channel control virtual bool SetLocalContent(const MediaContentDescription* content, - webrtc::SdpType type, + SdpType type, std::string& error_desc) = 0; virtual bool SetRemoteContent(const MediaContentDescription* content, - webrtc::SdpType type, + SdpType type, std::string& error_desc) = 0; virtual bool SetPayloadTypeDemuxingEnabled(bool enabled) = 0; @@ -97,9 +98,17 @@ class ChannelInterface { // * An RtpTransport without encryption. // * An SrtpTransport for SDES. // * A DtlsSrtpTransport for DTLS-SRTP. - virtual bool SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) = 0; + virtual bool SetRtpTransport(RtpTransportInternal* rtp_transport) = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::ChannelInterface; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // PC_CHANNEL_INTERFACE_H_ diff --git a/pc/channel_unittest.cc b/pc/channel_unittest.cc index 0d7f0b0cd0..bdb4a05a9d 100644 --- a/pc/channel_unittest.cc +++ b/pc/channel_unittest.cc @@ -13,13 +13,22 @@ #include #include +#include +#include #include -#include +#include +#include #include "absl/functional/any_invocable.h" #include "api/array_view.h" #include "api/audio_options.h" +#include "api/crypto/crypto_options.h" +#include "api/jsep.h" +#include "api/rtp_headers.h" #include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "media/base/codec.h" #include "media/base/fake_media_engine.h" @@ -27,46 +36,55 @@ #include "media/base/media_channel.h" #include "media/base/media_constants.h" #include "media/base/rid_description.h" +#include "media/base/stream_params.h" #include "p2p/base/candidate_pair_interface.h" -#include "p2p/base/dtls_transport_internal.h" -#include "p2p/base/fake_dtls_transport.h" -#include "p2p/base/fake_packet_transport.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/packet_transport_internal.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "p2p/dtls/fake_dtls_transport.h" +#include "p2p/test/fake_packet_transport.h" #include "pc/dtls_srtp_transport.h" #include "pc/jsep_transport.h" #include "pc/rtp_transport.h" +#include "pc/rtp_transport_internal.h" +#include "pc/session_description.h" #include "rtc_base/arraysize.h" #include "rtc_base/buffer.h" #include "rtc_base/byte_order.h" #include "rtc_base/checks.h" +#include "rtc_base/network_route.h" #include "rtc_base/rtc_certificate.h" +#include "rtc_base/socket.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/task_queue_for_test.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/unique_id_generator.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" -using cricket::DtlsTransportInternal; -using cricket::FakeVoiceMediaReceiveChannel; -using cricket::FakeVoiceMediaSendChannel; -using cricket::RidDescription; -using cricket::RidDirection; -using cricket::StreamParams; -using webrtc::RtpTransceiverDirection; -using webrtc::SdpType; - namespace { -const cricket::AudioCodec kPcmuCodec = - cricket::CreateAudioCodec(0, "PCMU", 64000, 1); -const cricket::AudioCodec kPcmaCodec = - cricket::CreateAudioCodec(8, "PCMA", 64000, 1); -const cricket::AudioCodec kIsacCodec = - cricket::CreateAudioCodec(103, "ISAC", 40000, 1); -const cricket::VideoCodec kH264Codec = cricket::CreateVideoCodec(97, "H264"); -const cricket::VideoCodec kH264SvcCodec = - cricket::CreateVideoCodec(99, "H264-SVC"); + +using ::testing::AllOf; +using ::testing::ElementsAre; +using ::testing::Field; +using ::webrtc::DtlsTransportInternal; +using ::webrtc::FakeVoiceMediaReceiveChannel; +using ::webrtc::FakeVoiceMediaSendChannel; +using ::webrtc::RidDescription; +using ::webrtc::RidDirection; +using ::webrtc::RtpTransceiverDirection; +using ::webrtc::SdpType; +using ::webrtc::StreamParams; + +const webrtc::Codec kPcmuCodec = webrtc::CreateAudioCodec(0, "PCMU", 64000, 1); +const webrtc::Codec kPcmaCodec = webrtc::CreateAudioCodec(8, "PCMA", 64000, 1); +const webrtc::Codec kIsacCodec = + webrtc::CreateAudioCodec(103, "ISAC", 40000, 1); +const webrtc::Codec kH264Codec = webrtc::CreateVideoCodec(97, "H264"); +const webrtc::Codec kH264SvcCodec = webrtc::CreateVideoCodec(99, "H264-SVC"); const uint32_t kSsrc1 = 0x1111; const uint32_t kSsrc2 = 0x2222; const uint32_t kSsrc3 = 0x3333; @@ -75,15 +93,12 @@ const int kAudioPts[] = {0, 8}; const int kVideoPts[] = {97, 99}; enum class NetworkIsWorker { Yes, No }; -} // namespace - template class Traits { @@ -94,30 +109,27 @@ class Traits { typedef MediaSendChannelInterfaceT MediaSendChannelInterface; typedef MediaReceiveChannelInterfaceT MediaReceiveChannelInterface; typedef ContentT Content; - typedef CodecT Codec; typedef MediaInfoT MediaInfo; typedef OptionsT Options; }; -class VoiceTraits : public Traits {}; - -class VideoTraits : public Traits {}; +class VoiceTraits : public Traits {}; + +class VideoTraits : public Traits {}; // Base class for Voice/Video tests template @@ -133,16 +145,16 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { }; ChannelTest(bool verify_playout, - rtc::ArrayView rtp_data, - rtc::ArrayView rtcp_data, + webrtc::ArrayView rtp_data, + webrtc::ArrayView rtcp_data, NetworkIsWorker network_is_worker) : verify_playout_(verify_playout), rtp_packet_(rtp_data.data(), rtp_data.size()), rtcp_packet_(rtcp_data.data(), rtcp_data.size()) { if (network_is_worker == NetworkIsWorker::Yes) { - network_thread_ = rtc::Thread::Current(); + network_thread_ = webrtc::Thread::Current(); } else { - network_thread_keeper_ = rtc::Thread::Create(); + network_thread_keeper_ = webrtc::Thread::Create(); network_thread_keeper_->SetName("Network", nullptr); network_thread_ = network_thread_keeper_.get(); } @@ -154,6 +166,19 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { SendTask(network_thread_, [this]() { network_thread_safety_->SetNotAlive(); DeinitChannels(); + + // Transports must be created and destroyed on the network thread. + fake_rtp_dtls_transport1_ = nullptr; + fake_rtcp_dtls_transport1_ = nullptr; + fake_rtp_dtls_transport2_ = nullptr; + fake_rtcp_dtls_transport2_ = nullptr; + fake_rtp_packet_transport1_ = nullptr; + fake_rtcp_packet_transport1_ = nullptr; + fake_rtp_packet_transport2_ = nullptr; + fake_rtcp_packet_transport2_ = nullptr; + rtp_transport1_ = nullptr; + rtp_transport2_ = nullptr; + new_rtp_transport_ = nullptr; }); } } @@ -188,67 +213,71 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { // Make sure if using raw packet transports, they're used for both // channels. RTC_DCHECK_EQ(flags1 & RAW_PACKET_TRANSPORT, flags2 & RAW_PACKET_TRANSPORT); - rtc::Thread* worker_thread = rtc::Thread::Current(); - // Based on flags, create fake DTLS or raw packet transports. - if (flags1 & RAW_PACKET_TRANSPORT) { - fake_rtp_packet_transport1_.reset( - new rtc::FakePacketTransport("channel1_rtp")); - if (!(flags1 & RTCP_MUX)) { - fake_rtcp_packet_transport1_.reset( - new rtc::FakePacketTransport("channel1_rtcp")); - } - } else { - // Confirmed to work with KT_RSA and KT_ECDSA. - fake_rtp_dtls_transport1_.reset(new cricket::FakeDtlsTransport( - "channel1", cricket::ICE_CANDIDATE_COMPONENT_RTP, network_thread_)); - if (!(flags1 & RTCP_MUX)) { - fake_rtcp_dtls_transport1_.reset(new cricket::FakeDtlsTransport( - "channel1", cricket::ICE_CANDIDATE_COMPONENT_RTCP, - network_thread_)); - } - if (flags1 & DTLS) { - auto cert1 = rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); - fake_rtp_dtls_transport1_->SetLocalCertificate(cert1); - if (fake_rtcp_dtls_transport1_) { - fake_rtcp_dtls_transport1_->SetLocalCertificate(cert1); + webrtc::Thread* worker_thread = webrtc::Thread::Current(); + + network_thread_->BlockingCall([&] { + // Based on flags, create fake DTLS or raw packet transports. + + if (flags1 & RAW_PACKET_TRANSPORT) { + fake_rtp_packet_transport1_.reset( + new webrtc::FakePacketTransport("channel1_rtp")); + if (!(flags1 & RTCP_MUX)) { + fake_rtcp_packet_transport1_.reset( + new webrtc::FakePacketTransport("channel1_rtcp")); + } + } else { + // Confirmed to work with KT_RSA and KT_ECDSA. + fake_rtp_dtls_transport1_.reset(new webrtc::FakeDtlsTransport( + "channel1", webrtc::ICE_CANDIDATE_COMPONENT_RTP, network_thread_)); + if (!(flags1 & RTCP_MUX)) { + fake_rtcp_dtls_transport1_.reset(new webrtc::FakeDtlsTransport( + "channel1", webrtc::ICE_CANDIDATE_COMPONENT_RTCP, + network_thread_)); + } + if (flags1 & DTLS) { + auto cert1 = webrtc::RTCCertificate::Create( + webrtc::SSLIdentity::Create("session1", webrtc::KT_DEFAULT)); + fake_rtp_dtls_transport1_->SetLocalCertificate(cert1); + if (fake_rtcp_dtls_transport1_) { + fake_rtcp_dtls_transport1_->SetLocalCertificate(cert1); + } } } - } - // Based on flags, create fake DTLS or raw packet transports. - if (flags2 & RAW_PACKET_TRANSPORT) { - fake_rtp_packet_transport2_.reset( - new rtc::FakePacketTransport("channel2_rtp")); - if (!(flags2 & RTCP_MUX)) { - fake_rtcp_packet_transport2_.reset( - new rtc::FakePacketTransport("channel2_rtcp")); - } - } else { - // Confirmed to work with KT_RSA and KT_ECDSA. - fake_rtp_dtls_transport2_.reset(new cricket::FakeDtlsTransport( - "channel2", cricket::ICE_CANDIDATE_COMPONENT_RTP, network_thread_)); - if (!(flags2 & RTCP_MUX)) { - fake_rtcp_dtls_transport2_.reset(new cricket::FakeDtlsTransport( - "channel2", cricket::ICE_CANDIDATE_COMPONENT_RTCP, - network_thread_)); - } - if (flags2 & DTLS) { - auto cert2 = rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("session2", rtc::KT_DEFAULT)); - fake_rtp_dtls_transport2_->SetLocalCertificate(cert2); - if (fake_rtcp_dtls_transport2_) { - fake_rtcp_dtls_transport2_->SetLocalCertificate(cert2); + // Based on flags, create fake DTLS or raw packet transports. + if (flags2 & RAW_PACKET_TRANSPORT) { + fake_rtp_packet_transport2_.reset( + new webrtc::FakePacketTransport("channel2_rtp")); + if (!(flags2 & RTCP_MUX)) { + fake_rtcp_packet_transport2_.reset( + new webrtc::FakePacketTransport("channel2_rtcp")); + } + } else { + // Confirmed to work with KT_RSA and KT_ECDSA. + fake_rtp_dtls_transport2_.reset(new webrtc::FakeDtlsTransport( + "channel2", webrtc::ICE_CANDIDATE_COMPONENT_RTP, network_thread_)); + if (!(flags2 & RTCP_MUX)) { + fake_rtcp_dtls_transport2_.reset(new webrtc::FakeDtlsTransport( + "channel2", webrtc::ICE_CANDIDATE_COMPONENT_RTCP, + network_thread_)); + } + if (flags2 & DTLS) { + auto cert2 = webrtc::RTCCertificate::Create( + webrtc::SSLIdentity::Create("session2", webrtc::KT_DEFAULT)); + fake_rtp_dtls_transport2_->SetLocalCertificate(cert2); + if (fake_rtcp_dtls_transport2_) { + fake_rtcp_dtls_transport2_->SetLocalCertificate(cert2); + } } } - } - rtp_transport1_ = CreateRtpTransportBasedOnFlags( - fake_rtp_packet_transport1_.get(), fake_rtcp_packet_transport1_.get(), - fake_rtp_dtls_transport1_.get(), fake_rtcp_dtls_transport1_.get(), - flags1); - rtp_transport2_ = CreateRtpTransportBasedOnFlags( - fake_rtp_packet_transport2_.get(), fake_rtcp_packet_transport2_.get(), - fake_rtp_dtls_transport2_.get(), fake_rtcp_dtls_transport2_.get(), - flags2); + rtp_transport1_ = CreateRtpTransportBasedOnFlags( + fake_rtp_packet_transport1_.get(), fake_rtcp_packet_transport1_.get(), + fake_rtp_dtls_transport1_.get(), fake_rtcp_dtls_transport1_.get(), + flags1); + rtp_transport2_ = CreateRtpTransportBasedOnFlags( + fake_rtp_packet_transport2_.get(), fake_rtcp_packet_transport2_.get(), + fake_rtp_dtls_transport2_.get(), fake_rtcp_dtls_transport2_.get(), + flags2); + }); channel1_ = CreateChannel(worker_thread, network_thread_, std::move(ch1s), std::move(ch1r), rtp_transport1_.get(), flags1); @@ -274,16 +303,16 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { } } std::unique_ptr CreateChannel( - rtc::Thread* worker_thread, - rtc::Thread* network_thread, + webrtc::Thread* worker_thread, + webrtc::Thread* network_thread, std::unique_ptr ch_send, std::unique_ptr ch_receive, webrtc::RtpTransportInternal* rtp_transport, int flags); std::unique_ptr CreateRtpTransportBasedOnFlags( - rtc::PacketTransportInternal* rtp_packet_transport, - rtc::PacketTransportInternal* rtcp_packet_transport, + webrtc::PacketTransportInternal* rtp_packet_transport, + webrtc::PacketTransportInternal* rtcp_packet_transport, DtlsTransportInternal* rtp_dtls_transport, DtlsTransportInternal* rtcp_dtls_transport, int flags) { @@ -322,10 +351,10 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { } std::unique_ptr CreateUnencryptedTransport( - rtc::PacketTransportInternal* rtp_packet_transport, - rtc::PacketTransportInternal* rtcp_packet_transport) { + webrtc::PacketTransportInternal* rtp_packet_transport, + webrtc::PacketTransportInternal* rtcp_packet_transport) { auto rtp_transport = std::make_unique( - rtcp_packet_transport == nullptr); + rtcp_packet_transport == nullptr, field_trials_); SendTask(network_thread_, [&rtp_transport, rtp_packet_transport, rtcp_packet_transport] { @@ -338,8 +367,8 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { } std::unique_ptr CreateDtlsSrtpTransport( - cricket::DtlsTransportInternal* rtp_dtls_transport, - cricket::DtlsTransportInternal* rtcp_dtls_transport) { + webrtc::DtlsTransportInternal* rtp_dtls_transport, + webrtc::DtlsTransportInternal* rtcp_dtls_transport) { auto dtls_srtp_transport = std::make_unique( rtcp_dtls_transport == nullptr, field_trials_); @@ -440,27 +469,28 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { return result; } - void SendRtp(typename T::MediaSendChannel* media_channel, rtc::Buffer data) { + void SendRtp(typename T::MediaSendChannel* media_channel, + webrtc::Buffer data) { network_thread_->PostTask(webrtc::SafeTask( network_thread_safety_, [media_channel, data = std::move(data)]() { media_channel->SendPacket(data.data(), data.size(), - rtc::PacketOptions()); + webrtc::AsyncSocketPacketOptions()); })); } void SendRtp1() { - SendRtp1(rtc::Buffer(rtp_packet_.data(), rtp_packet_.size())); + SendRtp1(webrtc::Buffer(rtp_packet_.data(), rtp_packet_.size())); } - void SendRtp1(rtc::Buffer data) { + void SendRtp1(webrtc::Buffer data) { SendRtp(media_send_channel1_impl(), std::move(data)); } void SendRtp2() { - SendRtp2(rtc::Buffer(rtp_packet_.data(), rtp_packet_.size())); + SendRtp2(webrtc::Buffer(rtp_packet_.data(), rtp_packet_.size())); } - void SendRtp2(rtc::Buffer data) { + void SendRtp2(webrtc::Buffer data) { SendRtp(media_send_channel2_impl(), std::move(data)); } @@ -482,20 +512,22 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { } // Methods to check custom data. bool CheckCustomRtp1(uint32_t ssrc, int sequence_number, int pl_type = -1) { - rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type); + webrtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type); return media_receive_channel1_impl()->CheckRtp(data.data(), data.size()); } bool CheckCustomRtp2(uint32_t ssrc, int sequence_number, int pl_type = -1) { - rtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type); + webrtc::Buffer data = CreateRtpData(ssrc, sequence_number, pl_type); return media_receive_channel2_impl()->CheckRtp(data.data(), data.size()); } - rtc::Buffer CreateRtpData(uint32_t ssrc, int sequence_number, int pl_type) { - rtc::Buffer data(rtp_packet_.data(), rtp_packet_.size()); + webrtc::Buffer CreateRtpData(uint32_t ssrc, + int sequence_number, + int pl_type) { + webrtc::Buffer data(rtp_packet_.data(), rtp_packet_.size()); // Set SSRC in the rtp packet copy. - rtc::SetBE32(data.data() + 8, ssrc); - rtc::SetBE16(data.data() + 2, sequence_number); + webrtc::SetBE32(data.data() + 8, ssrc); + webrtc::SetBE16(data.data() + 2, sequence_number); if (pl_type >= 0) { - rtc::Set8(data.data(), 1, static_cast(pl_type)); + webrtc::Set8(data.data(), 1, static_cast(pl_type)); } return data; } @@ -504,8 +536,8 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { bool CheckNoRtp2() { return media_send_channel2_impl()->CheckNoRtp(); } void CreateContent(int flags, - const cricket::AudioCodec& audio_codec, - const cricket::VideoCodec& video_codec, + const webrtc::Codec& audio_codec, + const webrtc::Codec& video_codec, typename T::Content* content) { // overridden in specialized classes } @@ -528,24 +560,20 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { class ScopedCallThread { public: explicit ScopedCallThread(absl::AnyInvocable functor) - : thread_(rtc::Thread::Create()) { + : thread_(webrtc::Thread::Create()) { thread_->Start(); thread_->PostTask(std::move(functor)); } ~ScopedCallThread() { thread_->Stop(); } - rtc::Thread* thread() { return thread_.get(); } + webrtc::Thread* thread() { return thread_.get(); } private: - std::unique_ptr thread_; + std::unique_ptr thread_; }; - bool CodecMatches(const typename T::Codec& c1, const typename T::Codec& c2) { - return false; // overridden in specialized classes - } - - cricket::CandidatePairInterface* last_selected_candidate_pair() { + webrtc::CandidatePairInterface* last_selected_candidate_pair() { return last_selected_candidate_pair_; } @@ -610,8 +638,8 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_EQ(0U, media_send_channel1_impl()->send_codecs().size()); EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err)); ASSERT_EQ(1U, media_send_channel1_impl()->send_codecs().size()); - EXPECT_TRUE(CodecMatches(content.codecs()[0], - media_send_channel1_impl()->send_codecs()[0])); + EXPECT_EQ(content.codecs()[0], + media_send_channel1_impl()->send_codecs()[0]); } // Test that SetLocalContent and SetRemoteContent properly configure @@ -658,8 +686,8 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_EQ(0U, media_send_channel1_impl()->send_codecs().size()); EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err)); ASSERT_EQ(1U, media_send_channel1_impl()->send_codecs().size()); - EXPECT_TRUE(CodecMatches(content.codecs()[0], - media_send_channel1_impl()->send_codecs()[0])); + EXPECT_EQ(content.codecs()[0], + media_send_channel1_impl()->send_codecs()[0]); } // Test that SetLocalContent and SetRemoteContent properly set RTCP @@ -679,16 +707,47 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_TRUE(channel2_->SetRemoteContent(&content, SdpType::kAnswer, err)); } + // Test that SetLocalContent and SetRemoteContent properly set RTCP + // reduced_size. + void TestSetContentsRtcpReducedSize() { + CreateChannels(0, 0); + typename T::Content content; + CreateContent(0, kPcmuCodec, kH264Codec, &content); + // Both sides agree on reduced size. + content.set_rtcp_reduced_size(true); + std::string err; + // The RTCP mode is a send property and should be configured based on + // the remote content and not the local content. + EXPECT_TRUE(channel1_->SetLocalContent(&content, SdpType::kOffer, err)); + EXPECT_EQ(media_receive_channel1_impl()->RtcpMode(), + webrtc::RtcpMode::kCompound); + EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err)); + EXPECT_EQ(media_receive_channel1_impl()->RtcpMode(), + webrtc::RtcpMode::kReducedSize); + // Only initiator supports reduced size. + EXPECT_TRUE(channel2_->SetLocalContent(&content, SdpType::kOffer, err)); + EXPECT_EQ(media_receive_channel2_impl()->RtcpMode(), + webrtc::RtcpMode::kCompound); + content.set_rtcp_reduced_size(false); + EXPECT_TRUE(channel2_->SetRemoteContent(&content, SdpType::kAnswer, err)); + EXPECT_EQ(media_receive_channel2_impl()->RtcpMode(), + webrtc::RtcpMode::kCompound); + // Peer renegotiates without reduced size. + EXPECT_TRUE(channel1_->SetRemoteContent(&content, SdpType::kAnswer, err)); + EXPECT_EQ(media_receive_channel1_impl()->RtcpMode(), + webrtc::RtcpMode::kCompound); + } + // Test that SetLocalContent and SetRemoteContent properly // handles adding and removing StreamParams when the action is a full // SdpType::kOffer / SdpType::kAnswer. void TestChangeStreamParamsInContent() { - cricket::StreamParams stream1; + webrtc::StreamParams stream1; stream1.id = "stream1"; stream1.ssrcs.push_back(kSsrc1); stream1.cname = "stream1_cname"; - cricket::StreamParams stream2; + webrtc::StreamParams stream2; stream2.id = "stream2"; stream2.ssrcs.push_back(kSsrc2); stream2.cname = "stream2_cname"; @@ -922,10 +981,10 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { WaitForThreads(); media_send_channel1_impl->set_num_network_route_changes(0); SendTask(network_thread_, [this] { - rtc::NetworkRoute network_route; + webrtc::NetworkRoute network_route; // The transport channel becomes disconnected. fake_rtp_dtls_transport1_->ice_transport()->SignalNetworkRouteChanged( - absl::optional(network_route)); + std::optional(network_route)); }); WaitForThreads(); EXPECT_EQ(1, media_send_channel1_impl->num_network_route_changes()); @@ -933,18 +992,18 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { media_send_channel1_impl->set_num_network_route_changes(0); SendTask(network_thread_, [this] { - rtc::NetworkRoute network_route; + webrtc::NetworkRoute network_route; network_route.connected = true; network_route.local = - rtc::RouteEndpoint::CreateWithNetworkId(kLocalNetId); + webrtc::RouteEndpoint::CreateWithNetworkId(kLocalNetId); network_route.remote = - rtc::RouteEndpoint::CreateWithNetworkId(kRemoteNetId); + webrtc::RouteEndpoint::CreateWithNetworkId(kRemoteNetId); network_route.last_sent_packet_id = kLastPacketId; network_route.packet_overhead = kTransportOverheadPerPacket; // The transport channel becomes connected. fake_rtp_dtls_transport1_->ice_transport()->SignalNetworkRouteChanged( - absl::optional(network_route)); + std::optional(network_route)); }); WaitForThreads(); EXPECT_EQ(1, media_send_channel1_impl->num_network_route_changes()); @@ -1070,7 +1129,8 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_TRUE(SendAccept()); ScopedCallThread send_rtp1([this] { SendRtp1(); }); ScopedCallThread send_rtp2([this] { SendRtp2(); }); - rtc::Thread* involved_threads[] = {send_rtp1.thread(), send_rtp2.thread()}; + webrtc::Thread* involved_threads[] = {send_rtp1.thread(), + send_rtp2.thread()}; WaitForThreads(involved_threads); EXPECT_TRUE(CheckRtp1()); EXPECT_TRUE(CheckRtp2()); @@ -1320,7 +1380,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { return channel1_->SetRemoteContent(&content, SdpType::kOffer, NULL); } - webrtc::RtpParameters BitrateLimitedParameters(absl::optional limit) { + webrtc::RtpParameters BitrateLimitedParameters(std::optional limit) { webrtc::RtpParameters parameters; webrtc::RtpEncodingParameters encoding; encoding.max_bitrate_bps = limit; @@ -1329,7 +1389,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { } void VerifyMaxBitrate(const webrtc::RtpParameters& parameters, - absl::optional expected_bitrate) { + std::optional expected_bitrate) { EXPECT_EQ(1UL, parameters.encodings.size()); EXPECT_EQ(expected_bitrate, parameters.encodings[0].max_bitrate_bps); } @@ -1341,7 +1401,7 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { SdpType::kOffer, err)); EXPECT_EQ(media_send_channel1_impl()->max_bps(), -1); VerifyMaxBitrate(media_send_channel1()->GetRtpSendParameters(kSsrc1), - absl::nullopt); + std::nullopt); } // Test that when a channel gets new RtpTransport with a call to @@ -1357,21 +1417,20 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { CreateChannels(DTLS, DTLS); - new_rtp_transport_ = CreateDtlsSrtpTransport( - fake_rtp_dtls_transport2_.get(), fake_rtcp_dtls_transport2_.get()); - bool rcv_success, send_success; int rcv_buf, send_buf; SendTask(network_thread_, [&] { - channel1_->SetOption(cricket::BaseChannel::ST_RTP, - rtc::Socket::Option::OPT_SNDBUF, kSndBufSize); - channel2_->SetOption(cricket::BaseChannel::ST_RTP, - rtc::Socket::Option::OPT_RCVBUF, kRcvBufSize); + new_rtp_transport_ = CreateDtlsSrtpTransport( + fake_rtp_dtls_transport2_.get(), fake_rtcp_dtls_transport2_.get()); + channel1_->SetOption(webrtc::BaseChannel::ST_RTP, + webrtc::Socket::Option::OPT_SNDBUF, kSndBufSize); + channel2_->SetOption(webrtc::BaseChannel::ST_RTP, + webrtc::Socket::Option::OPT_RCVBUF, kRcvBufSize); channel1_->SetRtpTransport(new_rtp_transport_.get()); send_success = fake_rtp_dtls_transport2_->GetOption( - rtc::Socket::Option::OPT_SNDBUF, &send_buf); + webrtc::Socket::Option::OPT_SNDBUF, &send_buf); rcv_success = fake_rtp_dtls_transport2_->GetOption( - rtc::Socket::Option::OPT_RCVBUF, &rcv_buf); + webrtc::Socket::Option::OPT_RCVBUF, &rcv_buf); }); ASSERT_TRUE(send_success); @@ -1432,29 +1491,31 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { } protected: - void WaitForThreads() { WaitForThreads(rtc::ArrayView()); } - static void ProcessThreadQueue(rtc::Thread* thread) { + void WaitForThreads() { + WaitForThreads(webrtc::ArrayView()); + } + static void ProcessThreadQueue(webrtc::Thread* thread) { RTC_DCHECK(thread->IsCurrent()); while (!thread->empty()) { thread->ProcessMessages(0); } } static void FlushCurrentThread() { - rtc::Thread::Current()->ProcessMessages(0); + webrtc::Thread::Current()->ProcessMessages(0); } - void WaitForThreads(rtc::ArrayView threads) { + void WaitForThreads(webrtc::ArrayView threads) { // `threads` and current thread post packets to network thread. - for (rtc::Thread* thread : threads) { + for (webrtc::Thread* thread : threads) { SendTask(thread, [thread] { ProcessThreadQueue(thread); }); } - ProcessThreadQueue(rtc::Thread::Current()); + ProcessThreadQueue(webrtc::Thread::Current()); // Network thread move them around and post back to worker = current thread. if (!network_thread_->IsCurrent()) { SendTask(network_thread_, [this] { ProcessThreadQueue(network_thread_); }); } // Worker thread = current Thread process received messages. - ProcessThreadQueue(rtc::Thread::Current()); + ProcessThreadQueue(webrtc::Thread::Current()); } // Accessors that return the standard VideoMedia{Send|Receive}ChannelInterface @@ -1502,26 +1563,26 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { channel2_->media_receive_channel()); } - rtc::AutoThread main_thread_; + webrtc::AutoThread main_thread_; // TODO(pbos): Remove playout from all media channels and let renderers mute // themselves. const bool verify_playout_; - rtc::scoped_refptr network_thread_safety_ = + webrtc::scoped_refptr network_thread_safety_ = webrtc::PendingTaskSafetyFlag::CreateDetached(); - std::unique_ptr network_thread_keeper_; - rtc::Thread* network_thread_; - std::unique_ptr fake_rtp_dtls_transport1_; - std::unique_ptr fake_rtcp_dtls_transport1_; - std::unique_ptr fake_rtp_dtls_transport2_; - std::unique_ptr fake_rtcp_dtls_transport2_; - std::unique_ptr fake_rtp_packet_transport1_; - std::unique_ptr fake_rtcp_packet_transport1_; - std::unique_ptr fake_rtp_packet_transport2_; - std::unique_ptr fake_rtcp_packet_transport2_; + std::unique_ptr network_thread_keeper_; + webrtc::Thread* network_thread_; + std::unique_ptr fake_rtp_dtls_transport1_; + std::unique_ptr fake_rtcp_dtls_transport1_; + std::unique_ptr fake_rtp_dtls_transport2_; + std::unique_ptr fake_rtcp_dtls_transport2_; + std::unique_ptr fake_rtp_packet_transport1_; + std::unique_ptr fake_rtcp_packet_transport1_; + std::unique_ptr fake_rtp_packet_transport2_; + std::unique_ptr fake_rtcp_packet_transport2_; std::unique_ptr rtp_transport1_; std::unique_ptr rtp_transport2_; std::unique_ptr new_rtp_transport_; - cricket::FakeMediaEngine media_engine_; + webrtc::FakeMediaEngine media_engine_; std::unique_ptr channel1_; std::unique_ptr channel2_; typename T::Content local_media_content1_; @@ -1529,25 +1590,25 @@ class ChannelTest : public ::testing::Test, public sigslot::has_slots<> { typename T::Content remote_media_content1_; typename T::Content remote_media_content2_; // The RTP and RTCP packets to send in the tests. - rtc::Buffer rtp_packet_; - rtc::Buffer rtcp_packet_; - cricket::CandidatePairInterface* last_selected_candidate_pair_; - rtc::UniqueRandomIdGenerator ssrc_generator_; + webrtc::Buffer rtp_packet_; + webrtc::Buffer rtcp_packet_; + webrtc::CandidatePairInterface* last_selected_candidate_pair_; + webrtc::UniqueRandomIdGenerator ssrc_generator_; webrtc::test::ScopedKeyValueConfig field_trials_; }; template <> -std::unique_ptr ChannelTest::CreateChannel( - rtc::Thread* worker_thread, - rtc::Thread* network_thread, - std::unique_ptr send_ch, - std::unique_ptr receive_ch, +std::unique_ptr ChannelTest::CreateChannel( + webrtc::Thread* worker_thread, + webrtc::Thread* network_thread, + std::unique_ptr send_ch, + std::unique_ptr receive_ch, webrtc::RtpTransportInternal* rtp_transport, int flags) { - rtc::Thread* signaling_thread = rtc::Thread::Current(); - auto channel = std::make_unique( + webrtc::Thread* signaling_thread = webrtc::Thread::Current(); + auto channel = std::make_unique( worker_thread, network_thread, signaling_thread, std::move(send_ch), - std::move(receive_ch), cricket::CN_AUDIO, (flags & DTLS) != 0, + std::move(receive_ch), webrtc::CN_AUDIO, (flags & DTLS) != 0, webrtc::CryptoOptions(), &ssrc_generator_); SendTask(network_thread, [&]() { RTC_DCHECK_RUN_ON(channel->network_thread()); @@ -1559,32 +1620,25 @@ std::unique_ptr ChannelTest::CreateChannel( template <> void ChannelTest::CreateContent( int flags, - const cricket::AudioCodec& audio_codec, - const cricket::VideoCodec& video_codec, - cricket::AudioContentDescription* audio) { + const webrtc::Codec& audio_codec, + const webrtc::Codec& video_codec, + webrtc::AudioContentDescription* audio) { audio->AddCodec(audio_codec); audio->set_rtcp_mux((flags & RTCP_MUX) != 0); } template <> void ChannelTest::CopyContent( - const cricket::AudioContentDescription& source, - cricket::AudioContentDescription* audio) { + const webrtc::AudioContentDescription& source, + webrtc::AudioContentDescription* audio) { *audio = source; } -template <> -bool ChannelTest::CodecMatches(const cricket::AudioCodec& c1, - const cricket::AudioCodec& c2) { - return c1.name == c2.name && c1.clockrate == c2.clockrate && - c1.bitrate == c2.bitrate && c1.channels == c2.channels; -} - template <> void ChannelTest::AddLegacyStreamInContent( uint32_t ssrc, int flags, - cricket::AudioContentDescription* audio) { + webrtc::AudioContentDescription* audio) { audio->AddLegacyStream(ssrc); } @@ -1624,17 +1678,17 @@ class VoiceChannelWithEncryptedRtpHeaderExtensionsDoubleThreadTest // override to add NULL parameter template <> -std::unique_ptr ChannelTest::CreateChannel( - rtc::Thread* worker_thread, - rtc::Thread* network_thread, - std::unique_ptr send_ch, - std::unique_ptr receive_ch, +std::unique_ptr ChannelTest::CreateChannel( + webrtc::Thread* worker_thread, + webrtc::Thread* network_thread, + std::unique_ptr send_ch, + std::unique_ptr receive_ch, webrtc::RtpTransportInternal* rtp_transport, int flags) { - rtc::Thread* signaling_thread = rtc::Thread::Current(); - auto channel = std::make_unique( + webrtc::Thread* signaling_thread = webrtc::Thread::Current(); + auto channel = std::make_unique( worker_thread, network_thread, signaling_thread, std::move(send_ch), - std::move(receive_ch), cricket::CN_VIDEO, (flags & DTLS) != 0, + std::move(receive_ch), webrtc::CN_VIDEO, (flags & DTLS) != 0, webrtc::CryptoOptions(), &ssrc_generator_); SendTask(network_thread, [&]() { RTC_DCHECK_RUN_ON(channel->network_thread()); @@ -1646,31 +1700,25 @@ std::unique_ptr ChannelTest::CreateChannel( template <> void ChannelTest::CreateContent( int flags, - const cricket::AudioCodec& audio_codec, - const cricket::VideoCodec& video_codec, - cricket::VideoContentDescription* video) { + const webrtc::Codec& audio_codec, + const webrtc::Codec& video_codec, + webrtc::VideoContentDescription* video) { video->AddCodec(video_codec); video->set_rtcp_mux((flags & RTCP_MUX) != 0); } template <> void ChannelTest::CopyContent( - const cricket::VideoContentDescription& source, - cricket::VideoContentDescription* video) { + const webrtc::VideoContentDescription& source, + webrtc::VideoContentDescription* video) { *video = source; } -template <> -bool ChannelTest::CodecMatches(const cricket::VideoCodec& c1, - const cricket::VideoCodec& c2) { - return c1.name == c2.name; -} - template <> void ChannelTest::AddLegacyStreamInContent( uint32_t ssrc, int flags, - cricket::VideoContentDescription* video) { + webrtc::VideoContentDescription* video) { video->AddLegacyStream(ssrc); } @@ -1732,6 +1780,10 @@ TEST_F(VoiceChannelSingleThreadTest, TestSetContentsRtcpMuxWithPrAnswer) { Base::TestSetContentsRtcpMux(); } +TEST_F(VoiceChannelSingleThreadTest, TestSetContentsRtcpReducedSize) { + Base::TestSetContentsRtcpReducedSize(); +} + TEST_F(VoiceChannelSingleThreadTest, TestChangeStreamParamsInContent) { Base::TestChangeStreamParamsInContent(); } @@ -1869,6 +1921,10 @@ TEST_F(VoiceChannelDoubleThreadTest, TestSetContentsRtcpMuxWithPrAnswer) { Base::TestSetContentsRtcpMux(); } +TEST_F(VoiceChannelDoubleThreadTest, TestSetContentsRtcpReducedSize) { + Base::TestSetContentsRtcpReducedSize(); +} + TEST_F(VoiceChannelDoubleThreadTest, TestChangeStreamParamsInContent) { Base::TestChangeStreamParamsInContent(); } @@ -2101,10 +2157,10 @@ TEST_F(VideoChannelSingleThreadTest, UpdateLocalStreamsWithSimulcast) { } TEST_F(VideoChannelSingleThreadTest, TestSetLocalOfferWithPacketization) { - const cricket::VideoCodec kVp8Codec = cricket::CreateVideoCodec(97, "VP8"); - cricket::VideoCodec vp9_codec = cricket::CreateVideoCodec(98, "VP9"); - vp9_codec.packetization = cricket::kPacketizationParamRaw; - cricket::VideoContentDescription video; + const webrtc::Codec kVp8Codec = webrtc::CreateVideoCodec(97, "VP8"); + webrtc::Codec vp9_codec = webrtc::CreateVideoCodec(98, "VP9"); + vp9_codec.packetization = webrtc::kPacketizationParamRaw; + webrtc::VideoContentDescription video; video.set_codecs({kVp8Codec, vp9_codec}); CreateChannels(0, 0); @@ -2113,21 +2169,21 @@ TEST_F(VideoChannelSingleThreadTest, TestSetLocalOfferWithPacketization) { EXPECT_TRUE(channel1_->SetLocalContent(&video, SdpType::kOffer, err)); EXPECT_THAT(media_send_channel1_impl()->send_codecs(), testing::IsEmpty()); ASSERT_THAT(media_receive_channel1_impl()->recv_codecs(), testing::SizeIs(2)); - EXPECT_TRUE(media_receive_channel1_impl()->recv_codecs()[0].Matches( - kVp8Codec, &field_trials_)); + EXPECT_TRUE( + media_receive_channel1_impl()->recv_codecs()[0].Matches(kVp8Codec)); EXPECT_EQ(media_receive_channel1_impl()->recv_codecs()[0].packetization, - absl::nullopt); - EXPECT_TRUE(media_receive_channel1_impl()->recv_codecs()[1].Matches( - vp9_codec, &field_trials_)); + std::nullopt); + EXPECT_TRUE( + media_receive_channel1_impl()->recv_codecs()[1].Matches(vp9_codec)); EXPECT_EQ(media_receive_channel1_impl()->recv_codecs()[1].packetization, - cricket::kPacketizationParamRaw); + webrtc::kPacketizationParamRaw); } TEST_F(VideoChannelSingleThreadTest, TestSetRemoteOfferWithPacketization) { - const cricket::VideoCodec kVp8Codec = cricket::CreateVideoCodec(97, "VP8"); - cricket::VideoCodec vp9_codec = cricket::CreateVideoCodec(98, "VP9"); - vp9_codec.packetization = cricket::kPacketizationParamRaw; - cricket::VideoContentDescription video; + const webrtc::Codec kVp8Codec = webrtc::CreateVideoCodec(97, "VP8"); + webrtc::Codec vp9_codec = webrtc::CreateVideoCodec(98, "VP9"); + vp9_codec.packetization = webrtc::kPacketizationParamRaw; + webrtc::VideoContentDescription video; video.set_codecs({kVp8Codec, vp9_codec}); CreateChannels(0, 0); @@ -2137,21 +2193,19 @@ TEST_F(VideoChannelSingleThreadTest, TestSetRemoteOfferWithPacketization) { EXPECT_TRUE(err.empty()); EXPECT_THAT(media_receive_channel1_impl()->recv_codecs(), testing::IsEmpty()); ASSERT_THAT(media_send_channel1_impl()->send_codecs(), testing::SizeIs(2)); - EXPECT_TRUE(media_send_channel1_impl()->send_codecs()[0].Matches( - kVp8Codec, &field_trials_)); + EXPECT_TRUE(media_send_channel1_impl()->send_codecs()[0].Matches(kVp8Codec)); EXPECT_EQ(media_send_channel1_impl()->send_codecs()[0].packetization, - absl::nullopt); - EXPECT_TRUE(media_send_channel1_impl()->send_codecs()[1].Matches( - vp9_codec, &field_trials_)); + std::nullopt); + EXPECT_TRUE(media_send_channel1_impl()->send_codecs()[1].Matches(vp9_codec)); EXPECT_EQ(media_send_channel1_impl()->send_codecs()[1].packetization, - cricket::kPacketizationParamRaw); + webrtc::kPacketizationParamRaw); } TEST_F(VideoChannelSingleThreadTest, TestSetAnswerWithPacketization) { - const cricket::VideoCodec kVp8Codec = cricket::CreateVideoCodec(97, "VP8"); - cricket::VideoCodec vp9_codec = cricket::CreateVideoCodec(98, "VP9"); - vp9_codec.packetization = cricket::kPacketizationParamRaw; - cricket::VideoContentDescription video; + const webrtc::Codec kVp8Codec = webrtc::CreateVideoCodec(97, "VP8"); + webrtc::Codec vp9_codec = webrtc::CreateVideoCodec(98, "VP9"); + vp9_codec.packetization = webrtc::kPacketizationParamRaw; + webrtc::VideoContentDescription video; video.set_codecs({kVp8Codec, vp9_codec}); CreateChannels(0, 0); @@ -2162,32 +2216,30 @@ TEST_F(VideoChannelSingleThreadTest, TestSetAnswerWithPacketization) { EXPECT_TRUE(channel1_->SetRemoteContent(&video, SdpType::kAnswer, err)); EXPECT_TRUE(err.empty()); ASSERT_THAT(media_receive_channel1_impl()->recv_codecs(), testing::SizeIs(2)); - EXPECT_TRUE(media_receive_channel1_impl()->recv_codecs()[0].Matches( - kVp8Codec, &field_trials_)); + EXPECT_TRUE( + media_receive_channel1_impl()->recv_codecs()[0].Matches(kVp8Codec)); EXPECT_EQ(media_receive_channel1_impl()->recv_codecs()[0].packetization, - absl::nullopt); - EXPECT_TRUE(media_receive_channel1_impl()->recv_codecs()[1].Matches( - vp9_codec, &field_trials_)); + std::nullopt); + EXPECT_TRUE( + media_receive_channel1_impl()->recv_codecs()[1].Matches(vp9_codec)); EXPECT_EQ(media_receive_channel1_impl()->recv_codecs()[1].packetization, - cricket::kPacketizationParamRaw); + webrtc::kPacketizationParamRaw); EXPECT_THAT(media_send_channel1_impl()->send_codecs(), testing::SizeIs(2)); - EXPECT_TRUE(media_send_channel1_impl()->send_codecs()[0].Matches( - kVp8Codec, &field_trials_)); + EXPECT_TRUE(media_send_channel1_impl()->send_codecs()[0].Matches(kVp8Codec)); EXPECT_EQ(media_send_channel1_impl()->send_codecs()[0].packetization, - absl::nullopt); - EXPECT_TRUE(media_send_channel1_impl()->send_codecs()[1].Matches( - vp9_codec, &field_trials_)); + std::nullopt); + EXPECT_TRUE(media_send_channel1_impl()->send_codecs()[1].Matches(vp9_codec)); EXPECT_EQ(media_send_channel1_impl()->send_codecs()[1].packetization, - cricket::kPacketizationParamRaw); + webrtc::kPacketizationParamRaw); } TEST_F(VideoChannelSingleThreadTest, TestSetLocalAnswerWithoutPacketization) { - const cricket::VideoCodec kLocalCodec = cricket::CreateVideoCodec(98, "VP8"); - cricket::VideoCodec remote_codec = cricket::CreateVideoCodec(99, "VP8"); - remote_codec.packetization = cricket::kPacketizationParamRaw; - cricket::VideoContentDescription local_video; + const webrtc::Codec kLocalCodec = webrtc::CreateVideoCodec(98, "VP8"); + webrtc::Codec remote_codec = webrtc::CreateVideoCodec(99, "VP8"); + remote_codec.packetization = webrtc::kPacketizationParamRaw; + webrtc::VideoContentDescription local_video; local_video.set_codecs({kLocalCodec}); - cricket::VideoContentDescription remote_video; + webrtc::VideoContentDescription remote_video; remote_video.set_codecs({remote_codec}); CreateChannels(0, 0); @@ -2197,19 +2249,19 @@ TEST_F(VideoChannelSingleThreadTest, TestSetLocalAnswerWithoutPacketization) { EXPECT_TRUE(channel1_->SetLocalContent(&local_video, SdpType::kAnswer, err)); ASSERT_THAT(media_receive_channel1_impl()->recv_codecs(), testing::SizeIs(1)); EXPECT_EQ(media_receive_channel1_impl()->recv_codecs()[0].packetization, - absl::nullopt); + std::nullopt); ASSERT_THAT(media_send_channel1_impl()->send_codecs(), testing::SizeIs(1)); EXPECT_EQ(media_send_channel1_impl()->send_codecs()[0].packetization, - absl::nullopt); + std::nullopt); } TEST_F(VideoChannelSingleThreadTest, TestSetRemoteAnswerWithoutPacketization) { - cricket::VideoCodec local_codec = cricket::CreateVideoCodec(98, "VP8"); - local_codec.packetization = cricket::kPacketizationParamRaw; - const cricket::VideoCodec kRemoteCodec = cricket::CreateVideoCodec(99, "VP8"); - cricket::VideoContentDescription local_video; + webrtc::Codec local_codec = webrtc::CreateVideoCodec(98, "VP8"); + local_codec.packetization = webrtc::kPacketizationParamRaw; + const webrtc::Codec kRemoteCodec = webrtc::CreateVideoCodec(99, "VP8"); + webrtc::VideoContentDescription local_video; local_video.set_codecs({local_codec}); - cricket::VideoContentDescription remote_video; + webrtc::VideoContentDescription remote_video; remote_video.set_codecs({kRemoteCodec}); CreateChannels(0, 0); @@ -2220,21 +2272,21 @@ TEST_F(VideoChannelSingleThreadTest, TestSetRemoteAnswerWithoutPacketization) { channel1_->SetRemoteContent(&remote_video, SdpType::kAnswer, err)); ASSERT_THAT(media_receive_channel1_impl()->recv_codecs(), testing::SizeIs(1)); EXPECT_EQ(media_receive_channel1_impl()->recv_codecs()[0].packetization, - absl::nullopt); + std::nullopt); ASSERT_THAT(media_send_channel1_impl()->send_codecs(), testing::SizeIs(1)); EXPECT_EQ(media_send_channel1_impl()->send_codecs()[0].packetization, - absl::nullopt); + std::nullopt); } TEST_F(VideoChannelSingleThreadTest, TestSetRemoteAnswerWithInvalidPacketization) { - cricket::VideoCodec local_codec = cricket::CreateVideoCodec(98, "VP8"); - local_codec.packetization = cricket::kPacketizationParamRaw; - cricket::VideoCodec remote_codec = cricket::CreateVideoCodec(99, "VP8"); + webrtc::Codec local_codec = webrtc::CreateVideoCodec(98, "VP8"); + local_codec.packetization = webrtc::kPacketizationParamRaw; + webrtc::Codec remote_codec = webrtc::CreateVideoCodec(99, "VP8"); remote_codec.packetization = "unknownpacketizationattributevalue"; - cricket::VideoContentDescription local_video; + webrtc::VideoContentDescription local_video; local_video.set_codecs({local_codec}); - cricket::VideoContentDescription remote_video; + webrtc::VideoContentDescription remote_video; remote_video.set_codecs({remote_codec}); CreateChannels(0, 0); @@ -2247,18 +2299,18 @@ TEST_F(VideoChannelSingleThreadTest, EXPECT_FALSE(err.empty()); ASSERT_THAT(media_receive_channel1_impl()->recv_codecs(), testing::SizeIs(1)); EXPECT_EQ(media_receive_channel1_impl()->recv_codecs()[0].packetization, - cricket::kPacketizationParamRaw); + webrtc::kPacketizationParamRaw); EXPECT_THAT(media_send_channel1_impl()->send_codecs(), testing::IsEmpty()); } TEST_F(VideoChannelSingleThreadTest, TestSetLocalAnswerWithInvalidPacketization) { - cricket::VideoCodec local_codec = cricket::CreateVideoCodec(98, "VP8"); - local_codec.packetization = cricket::kPacketizationParamRaw; - const cricket::VideoCodec kRemoteCodec = cricket::CreateVideoCodec(99, "VP8"); - cricket::VideoContentDescription local_video; + webrtc::Codec local_codec = webrtc::CreateVideoCodec(98, "VP8"); + local_codec.packetization = webrtc::kPacketizationParamRaw; + const webrtc::Codec kRemoteCodec = webrtc::CreateVideoCodec(99, "VP8"); + webrtc::VideoContentDescription local_video; local_video.set_codecs({local_codec}); - cricket::VideoContentDescription remote_video; + webrtc::VideoContentDescription remote_video; remote_video.set_codecs({kRemoteCodec}); CreateChannels(0, 0); @@ -2271,7 +2323,143 @@ TEST_F(VideoChannelSingleThreadTest, EXPECT_THAT(media_receive_channel1_impl()->recv_codecs(), testing::IsEmpty()); ASSERT_THAT(media_send_channel1_impl()->send_codecs(), testing::SizeIs(1)); EXPECT_EQ(media_send_channel1_impl()->send_codecs()[0].packetization, - absl::nullopt); + std::nullopt); +} + +TEST_F(VideoChannelSingleThreadTest, + StopsPacketizationVerificationWhenMatchIsFoundInRemoteAnswer) { + webrtc::Codec vp8_foo = webrtc::CreateVideoCodec(96, "VP8"); + vp8_foo.packetization = "foo"; + webrtc::Codec vp8_bar = webrtc::CreateVideoCodec(97, "VP8"); + vp8_bar.packetization = "bar"; + webrtc::Codec vp9 = webrtc::CreateVideoCodec(98, "VP9"); + webrtc::Codec vp9_foo = webrtc::CreateVideoCodec(99, "VP9"); + vp9_foo.packetization = "bar"; + webrtc::VideoContentDescription local; + local.set_codecs({vp8_foo, vp8_bar, vp9_foo}); + webrtc::VideoContentDescription remote; + remote.set_codecs({vp8_foo, vp9}); + + CreateChannels(0, 0); + std::string err; + ASSERT_TRUE(channel1_->SetLocalContent(&local, SdpType::kOffer, err)) << err; + ASSERT_TRUE(channel1_->SetRemoteContent(&remote, SdpType::kAnswer, err)) + << err; + + EXPECT_THAT( + media_receive_channel1_impl()->recv_codecs(), + ElementsAre(AllOf(Field(&webrtc::Codec::id, 96), + Field(&webrtc::Codec::packetization, "foo")), + AllOf(Field(&webrtc::Codec::id, 97), + Field(&webrtc::Codec::packetization, "bar")), + AllOf(Field(&webrtc::Codec::id, 99), + Field(&webrtc::Codec::packetization, std::nullopt)))); + EXPECT_THAT( + media_send_channel1_impl()->send_codecs(), + ElementsAre(AllOf(Field(&webrtc::Codec::id, 96), + Field(&webrtc::Codec::packetization, "foo")), + AllOf(Field(&webrtc::Codec::id, 98), + Field(&webrtc::Codec::packetization, std::nullopt)))); +} + +TEST_F(VideoChannelSingleThreadTest, + StopsPacketizationVerificationWhenMatchIsFoundInLocalAnswer) { + webrtc::Codec vp8_foo = webrtc::CreateVideoCodec(96, "VP8"); + vp8_foo.packetization = "foo"; + webrtc::Codec vp8_bar = webrtc::CreateVideoCodec(97, "VP8"); + vp8_bar.packetization = "bar"; + webrtc::Codec vp9 = webrtc::CreateVideoCodec(98, "VP9"); + webrtc::Codec vp9_foo = webrtc::CreateVideoCodec(99, "VP9"); + vp9_foo.packetization = "bar"; + webrtc::VideoContentDescription local; + local.set_codecs({vp8_foo, vp9}); + webrtc::VideoContentDescription remote; + remote.set_codecs({vp8_foo, vp8_bar, vp9_foo}); + + CreateChannels(0, 0); + std::string err; + ASSERT_TRUE(channel1_->SetRemoteContent(&remote, SdpType::kOffer, err)) + << err; + ASSERT_TRUE(channel1_->SetLocalContent(&local, SdpType::kAnswer, err)) << err; + + EXPECT_THAT( + media_receive_channel1_impl()->recv_codecs(), + ElementsAre(AllOf(Field(&webrtc::Codec::id, 96), + Field(&webrtc::Codec::packetization, "foo")), + AllOf(Field(&webrtc::Codec::id, 98), + Field(&webrtc::Codec::packetization, std::nullopt)))); + EXPECT_THAT( + media_send_channel1_impl()->send_codecs(), + ElementsAre(AllOf(Field(&webrtc::Codec::id, 96), + Field(&webrtc::Codec::packetization, "foo")), + AllOf(Field(&webrtc::Codec::id, 97), + Field(&webrtc::Codec::packetization, "bar")), + AllOf(Field(&webrtc::Codec::id, 99), + Field(&webrtc::Codec::packetization, std::nullopt)))); +} + +TEST_F(VideoChannelSingleThreadTest, + ConsidersAllCodecsWithDiffrentPacketizationsInRemoteAnswer) { + webrtc::Codec vp8 = webrtc::CreateVideoCodec(96, "VP8"); + webrtc::Codec vp8_raw = webrtc::CreateVideoCodec(97, "VP8"); + vp8_raw.packetization = webrtc::kPacketizationParamRaw; + webrtc::VideoContentDescription local; + local.set_codecs({vp8, vp8_raw}); + webrtc::VideoContentDescription remote; + remote.set_codecs({vp8_raw, vp8}); + + CreateChannels(0, 0); + std::string err; + ASSERT_TRUE(channel1_->SetLocalContent(&local, SdpType::kOffer, err)) << err; + ASSERT_TRUE(channel1_->SetRemoteContent(&remote, SdpType::kAnswer, err)) + << err; + + EXPECT_THAT( + media_receive_channel1_impl()->recv_codecs(), + ElementsAre(AllOf(Field(&webrtc::Codec::id, 96), + Field(&webrtc::Codec::packetization, std::nullopt)), + AllOf(Field(&webrtc::Codec::id, 97), + Field(&webrtc::Codec::packetization, + webrtc::kPacketizationParamRaw)))); + EXPECT_THAT( + media_send_channel1_impl()->send_codecs(), + ElementsAre(AllOf(Field(&webrtc::Codec::id, 97), + Field(&webrtc::Codec::packetization, + webrtc::kPacketizationParamRaw)), + AllOf(Field(&webrtc::Codec::id, 96), + Field(&webrtc::Codec::packetization, std::nullopt)))); +} + +TEST_F(VideoChannelSingleThreadTest, + ConsidersAllCodecsWithDiffrentPacketizationsInLocalAnswer) { + webrtc::Codec vp8 = webrtc::CreateVideoCodec(96, "VP8"); + webrtc::Codec vp8_raw = webrtc::CreateVideoCodec(97, "VP8"); + vp8_raw.packetization = webrtc::kPacketizationParamRaw; + webrtc::VideoContentDescription local; + local.set_codecs({vp8_raw, vp8}); + webrtc::VideoContentDescription remote; + remote.set_codecs({vp8, vp8_raw}); + + CreateChannels(0, 0); + std::string err; + ASSERT_TRUE(channel1_->SetRemoteContent(&remote, SdpType::kOffer, err)) + << err; + ASSERT_TRUE(channel1_->SetLocalContent(&local, SdpType::kAnswer, err)) << err; + + EXPECT_THAT( + media_receive_channel1_impl()->recv_codecs(), + ElementsAre(AllOf(Field(&webrtc::Codec::id, 97), + Field(&webrtc::Codec::packetization, + webrtc::kPacketizationParamRaw)), + AllOf(Field(&webrtc::Codec::id, 96), + Field(&webrtc::Codec::packetization, std::nullopt)))); + EXPECT_THAT( + media_send_channel1_impl()->send_codecs(), + ElementsAre(AllOf(Field(&webrtc::Codec::id, 96), + Field(&webrtc::Codec::packetization, std::nullopt)), + AllOf(Field(&webrtc::Codec::id, 97), + Field(&webrtc::Codec::packetization, + webrtc::kPacketizationParamRaw)))); } // VideoChannelDoubleThreadTest @@ -2409,4 +2597,4 @@ TEST_F(VideoChannelDoubleThreadTest, SocketOptionsMergedOnSetTransport) { Base::SocketOptionsMergedOnSetTransport(); } -// TODO(pthatcher): TestSetReceiver? +} // namespace diff --git a/pc/codec_vendor.cc b/pc/codec_vendor.cc new file mode 100644 index 0000000000..42d52f6487 --- /dev/null +++ b/pc/codec_vendor.cc @@ -0,0 +1,1063 @@ +/* + * Copyright 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "pc/codec_vendor.h" + +#include + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/strings/match.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" +#include "api/field_trials_view.h" +#include "api/media_types.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" +#include "call/payload_type.h" +#include "media/base/codec.h" +#include "media/base/codec_comparators.h" +#include "media/base/codec_list.h" +#include "media/base/media_constants.h" +#include "media/base/media_engine.h" +#include "media/base/sdp_video_format_utils.h" +#include "pc/media_options.h" +#include "pc/rtp_media_utils.h" +#include "pc/session_description.h" +#include "pc/typed_codec_vendor.h" +#include "pc/used_ids.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/string_encode.h" +#include "rtc_base/strings/str_join.h" +#include "rtc_base/strings/string_builder.h" + +#ifdef RTC_ENABLE_H265 +#include "api/video_codecs/h265_profile_tier_level.h" +#endif + +namespace webrtc { + +namespace { + +using webrtc::PayloadTypeSuggester; +using webrtc::RTCError; +using webrtc::RTCErrorOr; +using webrtc::RtpTransceiverDirection; + +bool IsRtxCodec(const RtpCodecCapability& capability) { + return absl::EqualsIgnoreCase(capability.name, kRtxCodecName); +} + +bool IsRedCodec(const RtpCodecCapability& capability) { + return absl::EqualsIgnoreCase(capability.name, kRedCodecName); +} + +bool IsComfortNoiseCodec(const Codec& codec) { + return absl::EqualsIgnoreCase(codec.name, kComfortNoiseCodecName); +} + +// Wrapper for FindMatchingCodecs that uses CodecList +std::optional FindMatchingCodec(const CodecList& codecs1, + const CodecList& codecs2, + const Codec& codec_to_match) { + return webrtc::FindMatchingCodec(codecs1.codecs(), codecs2.codecs(), + codec_to_match); +} + +void StripCNCodecs(CodecList& audio_codecs) { + audio_codecs.writable_codecs().erase( + std::remove_if( + audio_codecs.begin(), audio_codecs.end(), + [](const Codec& codec) { return IsComfortNoiseCodec(codec); }), + audio_codecs.end()); +} + +bool IsMediaContentOfType(const ContentInfo* content, MediaType media_type) { + if (!content || !content->media_description()) { + return false; + } + return content->media_description()->type() == media_type; +} +// Find the codec in `codec_list` that `rtx_codec` is associated with. +const Codec* GetAssociatedCodecForRtx(const CodecList& codec_list, + const Codec& rtx_codec) { + std::string associated_pt_str; + if (!rtx_codec.GetParam(kCodecParamAssociatedPayloadType, + &associated_pt_str)) { + RTC_LOG(LS_WARNING) << "RTX codec " << rtx_codec.id + << " is missing an associated payload type."; + return nullptr; + } + + int associated_pt; + if (!webrtc::FromString(associated_pt_str, &associated_pt)) { + RTC_LOG(LS_WARNING) << "Couldn't convert payload type " << associated_pt_str + << " of RTX codec " << rtx_codec.id + << " to an integer."; + return nullptr; + } + + // Find the associated codec for the RTX codec. + const Codec* associated_codec = + FindCodecById(codec_list.codecs(), associated_pt); + if (!associated_codec) { + RTC_LOG(LS_WARNING) << "Couldn't find associated codec with payload type " + << associated_pt << " for RTX codec " << rtx_codec.id + << "."; + } + return associated_codec; +} + +// Find the codec in `codec_list` that `red_codec` is associated with. +const Codec* GetAssociatedCodecForRed(const CodecList& codec_list, + const Codec& red_codec) { + std::string fmtp; + if (!red_codec.GetParam(kCodecParamNotInNameValueFormat, &fmtp)) { + // Don't log for video/RED where this is normal. + if (red_codec.type == Codec::Type::kAudio) { + RTC_LOG(LS_WARNING) << "RED codec " << red_codec.id + << " is missing an associated payload type."; + } + return nullptr; + } + + std::vector redundant_payloads = webrtc::split(fmtp, '/'); + if (redundant_payloads.size() < 2) { + return nullptr; + } + + absl::string_view associated_pt_str = redundant_payloads[0]; + int associated_pt; + if (!webrtc::FromString(associated_pt_str, &associated_pt)) { + RTC_LOG(LS_WARNING) << "Couldn't convert first payload type " + << associated_pt_str << " of RED codec " << red_codec.id + << " to an integer."; + return nullptr; + } + + // Find the associated codec for the RED codec. + const Codec* associated_codec = + FindCodecById(codec_list.codecs(), associated_pt); + if (!associated_codec) { + RTC_LOG(LS_WARNING) << "Couldn't find associated codec with payload type " + << associated_pt << " for RED codec " << red_codec.id + << "."; + } + return associated_codec; +} + +// Adds all codecs from `reference_codecs` to `offered_codecs` that don't +// already exist in `offered_codecs` and ensure the payload types don't +// collide. +RTCError MergeCodecs(const CodecList& reference_codecs, + const std::string& mid, + CodecList& offered_codecs, + PayloadTypeSuggester& pt_suggester) { + // Add all new codecs that are not RTX/RED codecs. + // The two-pass splitting of the loops means preferring payload types + // of actual codecs with respect to collisions. + for (const Codec& reference_codec : reference_codecs) { + if (reference_codec.GetResiliencyType() != Codec::ResiliencyType::kRtx && + reference_codec.GetResiliencyType() != Codec::ResiliencyType::kRed && + !FindMatchingCodec(reference_codecs, offered_codecs, reference_codec)) { + Codec codec = reference_codec; + RTCErrorOr suggestion = + pt_suggester.SuggestPayloadType(mid, codec); + if (!suggestion.ok()) { + return suggestion.MoveError(); + } + codec.id = suggestion.value(); + offered_codecs.push_back(codec); + } + } + + // Add all new RTX or RED codecs. + for (const Codec& reference_codec : reference_codecs) { + if (reference_codec.GetResiliencyType() == Codec::ResiliencyType::kRtx && + !FindMatchingCodec(reference_codecs, offered_codecs, reference_codec)) { + Codec rtx_codec = reference_codec; + const Codec* associated_codec = + GetAssociatedCodecForRtx(reference_codecs, rtx_codec); + if (!associated_codec) { + continue; + } + // Find a codec in the offered list that matches the reference codec. + // Its payload type may be different than the reference codec. + std::optional matching_codec = FindMatchingCodec( + reference_codecs, offered_codecs, *associated_codec); + if (!matching_codec) { + RTC_LOG(LS_WARNING) + << "Couldn't find matching " << associated_codec->name << " codec."; + continue; + } + + rtx_codec.params[kCodecParamAssociatedPayloadType] = + absl::StrCat(matching_codec->id); + RTCErrorOr suggestion = + pt_suggester.SuggestPayloadType(mid, rtx_codec); + if (!suggestion.ok()) { + return suggestion.MoveError(); + } + rtx_codec.id = suggestion.value(); + offered_codecs.push_back(rtx_codec); + } else if (reference_codec.GetResiliencyType() == + Codec::ResiliencyType::kRed && + !FindMatchingCodec(reference_codecs, offered_codecs, + reference_codec)) { + Codec red_codec = reference_codec; + const Codec* associated_codec = + GetAssociatedCodecForRed(reference_codecs, red_codec); + if (associated_codec) { + std::optional matching_codec = FindMatchingCodec( + reference_codecs, offered_codecs, *associated_codec); + if (!matching_codec) { + RTC_LOG(LS_WARNING) << "Couldn't find matching " + << associated_codec->name << " codec."; + continue; + } + std::string red_param = absl::StrCat(matching_codec->id); + red_codec.params[kCodecParamNotInNameValueFormat] = + webrtc::StrJoin(std::vector{red_param, red_param}, "/"); + } + RTCErrorOr suggestion = + pt_suggester.SuggestPayloadType(mid, red_codec); + if (!suggestion.ok()) { + return suggestion.MoveError(); + } + red_codec.id = suggestion.value(); + offered_codecs.push_back(red_codec); + } + } + offered_codecs.CheckConsistency(); + return RTCError::OK(); +} + +// Adds all codecs from `reference_codecs` to `offered_codecs` that don't +// already exist in `offered_codecs` and ensure the payload types don't +// collide. +// OLD VERSION - uses UsedPayloadTypes +void MergeCodecs(const CodecList& reference_codecs, + CodecList& offered_codecs, + UsedPayloadTypes* used_pltypes) { + // Add all new codecs that are not RTX/RED codecs. + // The two-pass splitting of the loops means preferring payload types + // of actual codecs with respect to collisions. + for (const Codec& reference_codec : reference_codecs) { + if (reference_codec.GetResiliencyType() != Codec::ResiliencyType::kRtx && + reference_codec.GetResiliencyType() != Codec::ResiliencyType::kRed && + !FindMatchingCodec(reference_codecs, offered_codecs, reference_codec)) { + Codec codec = reference_codec; + used_pltypes->FindAndSetIdUsed(&codec); + offered_codecs.push_back(codec); + } + } + + // Add all new RTX or RED codecs. + for (const Codec& reference_codec : reference_codecs) { + if (reference_codec.GetResiliencyType() == Codec::ResiliencyType::kRtx && + !FindMatchingCodec(reference_codecs, offered_codecs, reference_codec)) { + Codec rtx_codec = reference_codec; + const Codec* associated_codec = + GetAssociatedCodecForRtx(reference_codecs, rtx_codec); + if (!associated_codec) { + continue; + } + // Find a codec in the offered list that matches the reference codec. + // Its payload type may be different than the reference codec. + std::optional matching_codec = FindMatchingCodec( + reference_codecs, offered_codecs, *associated_codec); + if (!matching_codec) { + RTC_LOG(LS_WARNING) + << "Couldn't find matching " << associated_codec->name << " codec."; + continue; + } + + rtx_codec.params[kCodecParamAssociatedPayloadType] = + absl::StrCat(matching_codec->id); + used_pltypes->FindAndSetIdUsed(&rtx_codec); + offered_codecs.push_back(rtx_codec); + } else if (reference_codec.GetResiliencyType() == + Codec::ResiliencyType::kRed && + !FindMatchingCodec(reference_codecs, offered_codecs, + reference_codec)) { + Codec red_codec = reference_codec; + const Codec* associated_codec = + GetAssociatedCodecForRed(reference_codecs, red_codec); + if (associated_codec) { + std::optional matching_codec = FindMatchingCodec( + reference_codecs, offered_codecs, *associated_codec); + if (!matching_codec) { + RTC_LOG(LS_WARNING) << "Couldn't find matching " + << associated_codec->name << " codec."; + continue; + } + + red_codec.params[kCodecParamNotInNameValueFormat] = + absl::StrCat(matching_codec->id) + "/" + + absl::StrCat(matching_codec->id); + } + used_pltypes->FindAndSetIdUsed(&red_codec); + offered_codecs.push_back(red_codec); + } + } + offered_codecs.CheckConsistency(); +} + +// `codecs` is a full list of codecs with correct payload type mappings, which +// don't conflict with mappings of the other media type; `supported_codecs` is +// a list filtered for the media section`s direction but with default payload +// types. +// static +CodecList MatchCodecPreference( + const std::vector& codec_preferences, + const CodecList& codecs, + const CodecList& supported_codecs) { + CodecList filtered_codecs; + bool want_rtx = false; + bool want_red = false; + + for (const auto& codec_preference : codec_preferences) { + if (IsRtxCodec(codec_preference)) { + want_rtx = true; + } else if (IsRedCodec(codec_preference)) { + want_red = true; + } + } + bool red_was_added = false; + for (const auto& codec_preference : codec_preferences) { + auto found_codec = absl::c_find_if( + supported_codecs, [&codec_preference](const Codec& codec) { + // We should not filter out the codec in |codec_preferences| if it + // has a higher level than the codec in |supported_codecs|, as the + // codec in |supported_codecs| may be only with lower level in + // |send_codecs_| and |recv_codecs_| for the same codec. + return IsSameRtpCodecIgnoringLevel(codec, codec_preference); + }); + + if (found_codec != supported_codecs.end()) { + std::optional found_codec_with_correct_pt = + FindMatchingCodec(supported_codecs, codecs, *found_codec); + if (found_codec_with_correct_pt) { + // RED may already have been added if its primary codec is before RED + // in the codec list. + bool is_red_codec = found_codec_with_correct_pt->GetResiliencyType() == + Codec::ResiliencyType::kRed; + if (!is_red_codec || !red_was_added) { + filtered_codecs.push_back(*found_codec_with_correct_pt); + red_was_added = is_red_codec ? true : red_was_added; + } + std::string id = absl::StrCat(found_codec_with_correct_pt->id); + // Search for the matching rtx or red codec. + if (want_red || want_rtx) { + for (const auto& codec : codecs) { + if (want_rtx && + codec.GetResiliencyType() == Codec::ResiliencyType::kRtx) { + const auto apt = + codec.params.find(kCodecParamAssociatedPayloadType); + if (apt != codec.params.end() && apt->second == id) { + filtered_codecs.push_back(codec); + break; + } + } else if (want_red && codec.GetResiliencyType() == + Codec::ResiliencyType::kRed) { + // For RED, do not insert the codec again if it was already + // inserted. audio/red for opus gets enabled by having RED before + // the primary codec. + const auto fmtp = + codec.params.find(kCodecParamNotInNameValueFormat); + if (fmtp != codec.params.end()) { + std::vector redundant_payloads = + webrtc::split(fmtp->second, '/'); + if (!redundant_payloads.empty() && + redundant_payloads[0] == id) { + if (!red_was_added) { + filtered_codecs.push_back(codec); + red_was_added = true; + } + break; + } + } + } + } + } + } + } + } + + return filtered_codecs; +} + +void NegotiatePacketization(const Codec& local_codec, + const Codec& remote_codec, + Codec* negotiated_codec) { + negotiated_codec->packetization = + (local_codec.packetization == remote_codec.packetization) + ? local_codec.packetization + : std::nullopt; +} + +#ifdef RTC_ENABLE_H265 +void NegotiateTxMode(const Codec& local_codec, + const Codec& remote_codec, + Codec* negotiated_codec) { + negotiated_codec->tx_mode = (local_codec.tx_mode == remote_codec.tx_mode) + ? local_codec.tx_mode + : std::nullopt; +} +#endif + +// For offer, negotiated codec must have the same level-id as that in +// |supported_codecs| with same profile. +void NegotiateVideoCodecLevelsForOffer( + const MediaDescriptionOptions& media_description_options, + const CodecList& supported_codecs, + CodecList& filtered_codecs) { + if (filtered_codecs.empty() || supported_codecs.empty()) { + return; + } + + // TODO(http://crbugs.com/376306259): We should handle level-idx for AV1. + // Ideally this should be done for all codecs, but RFCs of other codecs + // do not clear define the expected behavior for the level in the offer. +#ifdef RTC_ENABLE_H265 + if (media_description_options.type == MediaType::VIDEO) { + std::unordered_map supported_h265_profiles; + // The assumption here is that H.265 codecs with the same profile and tier + // are already with highest level for that profile in both + // |supported_codecs| and |filtered_codecs|. + for (const Codec& supported_codec : supported_codecs) { + if (absl::EqualsIgnoreCase(supported_codec.name, kH265CodecName)) { + std::optional supported_ptl = + webrtc::ParseSdpForH265ProfileTierLevel(supported_codec.params); + if (supported_ptl.has_value()) { + supported_h265_profiles[supported_ptl->profile] = + supported_ptl->level; + } + } + } + + if (supported_h265_profiles.empty()) { + return; + } + + for (auto& filtered_codec : filtered_codecs) { + if (absl::EqualsIgnoreCase(filtered_codec.name, kH265CodecName)) { + std::optional filtered_ptl = + webrtc::ParseSdpForH265ProfileTierLevel(filtered_codec.params); + if (filtered_ptl.has_value()) { + auto it = supported_h265_profiles.find(filtered_ptl->profile); + + if (it != supported_h265_profiles.end() && + filtered_ptl->level != it->second) { + filtered_codec.params[kH265FmtpLevelId] = + webrtc::H265LevelToString(it->second); + } + } + } + } + } +#endif +} + +RTCError NegotiateCodecs(const CodecList& local_codecs, + const CodecList& offered_codecs, + CodecList& negotiated_codecs_out, + bool keep_offer_order) { + std::map pt_mapping_table; + // Since we build the negotiated codec list one entry at a time, + // the list will have inconsistencies during building. + std::vector negotiated_codecs; + for (const Codec& ours : local_codecs) { + std::optional theirs = + FindMatchingCodec(local_codecs, offered_codecs, ours); + // Note that we intentionally only find one matching codec for each of our + // local codecs, in case the remote offer contains duplicate codecs. + if (theirs) { + Codec negotiated = ours; + NegotiatePacketization(ours, *theirs, &negotiated); + negotiated.IntersectFeedbackParams(*theirs); + if (negotiated.GetResiliencyType() == Codec::ResiliencyType::kRtx) { + // We support parsing the declarative rtx-time parameter. + const auto rtx_time_it = theirs->params.find(kCodecParamRtxTime); + if (rtx_time_it != theirs->params.end()) { + negotiated.SetParam(kCodecParamRtxTime, rtx_time_it->second); + } + } else if (negotiated.GetResiliencyType() == + Codec::ResiliencyType::kRed) { + const auto red_it = + theirs->params.find(kCodecParamNotInNameValueFormat); + if (red_it != theirs->params.end()) { + negotiated.SetParam(kCodecParamNotInNameValueFormat, red_it->second); + } + } + if (absl::EqualsIgnoreCase(ours.name, kH264CodecName)) { + webrtc::H264GenerateProfileLevelIdForAnswer(ours.params, theirs->params, + &negotiated.params); + } +#ifdef RTC_ENABLE_H265 + if (absl::EqualsIgnoreCase(ours.name, kH265CodecName)) { + webrtc::H265GenerateProfileTierLevelForAnswer( + ours.params, theirs->params, &negotiated.params); + NegotiateTxMode(ours, *theirs, &negotiated); + } +#endif + // Use their ID, if available. + pt_mapping_table.insert({negotiated.id, theirs->id}); + negotiated.id = theirs->id; + negotiated.name = theirs->name; + negotiated_codecs.push_back(std::move(negotiated)); + } + } + // Fix up apt parameters that point to other PTs. + for (Codec& negotiated : negotiated_codecs) { + if (negotiated.GetResiliencyType() == Codec::ResiliencyType::kRtx) { + // Change the apt value according to the pt mapping table. + // This avoids changing to apt values that don't exist any more. + std::string apt_str; + if (!negotiated.GetParam(kCodecParamAssociatedPayloadType, &apt_str)) { + RTC_LOG(LS_WARNING) << "No apt value"; + continue; + } + int apt_value; + if (!webrtc::FromString(apt_str, &apt_value)) { + RTC_LOG(LS_WARNING) << "Unconvertable apt value"; + continue; + } + if (pt_mapping_table.count(apt_value) != 1) { + RTC_LOG(LS_WARNING) << "Unmapped apt value " << apt_value; + continue; + } + negotiated.SetParam(kCodecParamAssociatedPayloadType, + pt_mapping_table.at(apt_value)); + } + } + if (keep_offer_order) { + // RFC3264: Although the answerer MAY list the formats in their desired + // order of preference, it is RECOMMENDED that unless there is a + // specific reason, the answerer list formats in the same relative order + // they were present in the offer. + // This can be skipped when the transceiver has any codec preferences. + std::unordered_map payload_type_preferences; + int preference = static_cast(offered_codecs.size() + 1); + for (const Codec& codec : offered_codecs) { + payload_type_preferences[codec.id] = preference--; + } + absl::c_sort(negotiated_codecs, [&payload_type_preferences]( + const Codec& a, const Codec& b) { + return payload_type_preferences[a.id] > payload_type_preferences[b.id]; + }); + } + RTCErrorOr result = CodecList::Create(negotiated_codecs); + if (!result.ok()) { + return result.MoveError(); + } + negotiated_codecs_out = result.MoveValue(); + return RTCError::OK(); +} + +// Update the ID fields of the codec vector. +// If any codec has an ID with value "kIdNotSet", use the payload type suggester +// to assign and record a payload type for it. +// If there is a RED codec without its fmtp parameter, give it the ID of the +// first OPUS codec in the codec list. +RTCError AssignCodecIdsAndLinkRed(PayloadTypeSuggester* pt_suggester, + const std::string& mid, + std::vector& codecs) { + int codec_payload_type = Codec::kIdNotSet; + for (Codec& codec : codecs) { + if (codec.id == Codec::kIdNotSet) { + // Add payload types to codecs, if needed + // This should only happen if WebRTC-PayloadTypesInTransport field trial + // is enabled. + RTC_CHECK(pt_suggester); + auto result = pt_suggester->SuggestPayloadType(mid, codec); + if (!result.ok()) { + return result.error(); + } + codec.id = result.value(); + } + // record first Opus codec id + if (absl::EqualsIgnoreCase(codec.name, kOpusCodecName) && + codec_payload_type == Codec::kIdNotSet) { + codec_payload_type = codec.id; + } + } + if (codec_payload_type != Codec::kIdNotSet) { + for (Codec& codec : codecs) { + if (codec.type == Codec::Type::kAudio && + absl::EqualsIgnoreCase(codec.name, kRedCodecName)) { + if (codec.params.empty()) { + char buffer[100]; + SimpleStringBuilder param(buffer); + param << codec_payload_type << "/" << codec_payload_type; + codec.SetParam(kCodecParamNotInNameValueFormat, param.str()); + } + } + } + } + return RTCError::OK(); +} + +} // namespace + +RTCErrorOr> CodecVendor::GetNegotiatedCodecsForOffer( + const MediaDescriptionOptions& media_description_options, + const MediaSessionOptions& session_options, + const ContentInfo* current_content, + PayloadTypeSuggester& pt_suggester) { + CodecList codecs; + std::string mid = media_description_options.mid; + // If current content exists and is not being recycled, use its codecs. + if (current_content && current_content->mid() == mid) { + RTCErrorOr checked_codec_list = + CodecList::Create(current_content->media_description()->codecs()); + if (!checked_codec_list.ok()) { + return checked_codec_list.MoveError(); + } + // Use MergeCodecs in order to handle PT clashes. + MergeCodecs(checked_codec_list.value(), mid, codecs, pt_suggester); + } + // Add our codecs that are not in the current description. + if (media_description_options.type == MediaType::AUDIO) { + MergeCodecs(all_audio_codecs(), mid, codecs, pt_suggester); + } else { + MergeCodecs(all_video_codecs(), mid, codecs, pt_suggester); + } + CodecList filtered_codecs; + CodecList supported_codecs = + media_description_options.type == MediaType::AUDIO + ? GetAudioCodecsForOffer(media_description_options.direction) + : GetVideoCodecsForOffer(media_description_options.direction); + + if (media_description_options.codecs_to_include.empty()) { + if (!media_description_options.codec_preferences.empty()) { + // Add the codecs from the current transceiver's codec preferences. + // They override any existing codecs from previous negotiations. + filtered_codecs = + MatchCodecPreference(media_description_options.codec_preferences, + codecs, supported_codecs); + } else { + // Add the codecs from current content if it exists and is not rejected + // nor recycled. + if (current_content && !current_content->rejected && + current_content->mid() == media_description_options.mid) { + if (!IsMediaContentOfType(current_content, + media_description_options.type)) { + // Can happen if the remote side re-uses a MID while recycling. + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Media type for content with mid='" + + current_content->mid() + + "' does not match previous type."); + } + const MediaContentDescription* mcd = + current_content->media_description(); + for (const Codec& codec : mcd->codecs()) { + if (webrtc::FindMatchingCodec(mcd->codecs(), codecs.codecs(), + codec)) { + filtered_codecs.push_back(codec); + } + } + } + // Note what PTs are already in use. + UsedPayloadTypes + used_pltypes; // Used to avoid pt collisions in filtered_codecs + for (auto& codec : filtered_codecs) { + // Note: This may change PTs. Doing so woud indicate an error, but + // UsedPayloadTypes doesn't offer a means to make the distinction. + used_pltypes.FindAndSetIdUsed(&codec); + } + // Add other supported codecs. + for (const Codec& codec : supported_codecs) { + std::optional found_codec = + FindMatchingCodec(supported_codecs, codecs, codec); + if (found_codec && + !FindMatchingCodec(supported_codecs, filtered_codecs, codec)) { + // Use the `found_codec` from `codecs` because it has the + // correctly mapped payload type (most of the time). + if (media_description_options.type == MediaType::VIDEO && + found_codec->GetResiliencyType() == Codec::ResiliencyType::kRtx) { + // For RTX we might need to adjust the apt parameter if we got a + // remote offer without RTX for a codec for which we support RTX. + // This is only done for video since we do not yet have rtx for + // audio. + auto referenced_codec = + GetAssociatedCodecForRtx(supported_codecs, codec); + RTC_DCHECK(referenced_codec); + + // Find the codec we should be referencing and point to it. + std::optional changed_referenced_codec = FindMatchingCodec( + supported_codecs, filtered_codecs, *referenced_codec); + if (changed_referenced_codec) { + found_codec->SetParam(kCodecParamAssociatedPayloadType, + changed_referenced_codec->id); + } + } + // Quick fix for b/395077842: Remap the codec if it collides. + used_pltypes.FindAndSetIdUsed(&(*found_codec)); + filtered_codecs.push_back(*found_codec); + } + } + } + + if (media_description_options.type == MediaType::AUDIO && + !session_options.vad_enabled) { + // If application doesn't want CN codecs in offer. + StripCNCodecs(filtered_codecs); + } else if (media_description_options.type == MediaType::VIDEO && + session_options.raw_packetization_for_video) { + for (Codec& codec : filtered_codecs) { + if (codec.IsMediaCodec()) { + codec.packetization = kPacketizationParamRaw; + } + } + } + NegotiateVideoCodecLevelsForOffer(media_description_options, + supported_codecs, filtered_codecs); + } else { + // media_description_options.codecs_to_include contains codecs + // TODO: issues.webrtc.org/360058654 - figure out if this can be deleted. + RTCErrorOr codecs_from_arg = + CodecList::Create(media_description_options.codecs_to_include); + if (!codecs_from_arg.ok()) { + return codecs_from_arg.MoveError(); + } + filtered_codecs = codecs_from_arg.MoveValue(); + } + AssignCodecIdsAndLinkRed(&pt_suggester, mid, + filtered_codecs.writable_codecs()); + return filtered_codecs.codecs(); +} + +RTCErrorOr CodecVendor::GetNegotiatedCodecsForAnswer( + const MediaDescriptionOptions& media_description_options, + const MediaSessionOptions& session_options, + RtpTransceiverDirection offer_rtd, + RtpTransceiverDirection answer_rtd, + const ContentInfo* current_content, + const std::vector codecs_from_offer, + PayloadTypeSuggester& pt_suggester) { + CodecList codecs; + std::string mid = media_description_options.mid; + if (current_content && current_content->mid() == mid) { + RTCErrorOr checked_codec_list = + CodecList::Create(current_content->media_description()->codecs()); + if (!checked_codec_list.ok()) { + return checked_codec_list.MoveError(); + } + MergeCodecs(checked_codec_list.value(), mid, codecs, pt_suggester); + } + // Add all our supported codecs + if (media_description_options.type == MediaType::AUDIO) { + MergeCodecs(all_audio_codecs(), mid, codecs, pt_suggester); + } else { + MergeCodecs(all_video_codecs(), mid, codecs, pt_suggester); + } + CodecList filtered_codecs; + CodecList negotiated_codecs; + if (media_description_options.codecs_to_include.empty()) { + const CodecList& supported_codecs = + media_description_options.type == MediaType::AUDIO + ? GetAudioCodecsForAnswer(offer_rtd, answer_rtd) + : GetVideoCodecsForAnswer(offer_rtd, answer_rtd); + if (!media_description_options.codec_preferences.empty()) { + filtered_codecs = + MatchCodecPreference(media_description_options.codec_preferences, + codecs, supported_codecs); + } else { + // Add the codecs from current content if it exists and is not rejected + // nor recycled. + if (current_content && !current_content->rejected && + current_content->mid() == media_description_options.mid) { + if (!IsMediaContentOfType(current_content, + media_description_options.type)) { + // Can happen if the remote side re-uses a MID while recycling. + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Media type for content with mid='" + + current_content->mid() + + "' does not match previous type."); + } + const MediaContentDescription* mcd = + current_content->media_description(); + for (const Codec& codec : mcd->codecs()) { + if (std::optional found_codec = webrtc::FindMatchingCodec( + mcd->codecs(), codecs.codecs(), codec)) { + filtered_codecs.push_back(*found_codec); + } + } + } + // Merge other_codecs into filtered_codecs, resolving PT conflicts. + MergeCodecs(supported_codecs, mid, filtered_codecs, pt_suggester); + } + + if (media_description_options.type == MediaType::AUDIO && + !session_options.vad_enabled) { + // If application doesn't want CN codecs in offer. + StripCNCodecs(filtered_codecs); + } else if (media_description_options.type == MediaType::VIDEO && + session_options.raw_packetization_for_video) { + for (Codec& codec : filtered_codecs) { + if (codec.IsMediaCodec()) { + codec.packetization = kPacketizationParamRaw; + } + } + } + // An offer is external data, so needs to be checked before use. + auto checked_codecs_from_offer = CodecList::Create(codecs_from_offer); + if (!checked_codecs_from_offer.ok()) { + return checked_codecs_from_offer.MoveError(); + } + NegotiateCodecs(filtered_codecs, checked_codecs_from_offer.value(), + negotiated_codecs, + media_description_options.codec_preferences.empty()); + } else { + // media_description_options.codecs_to_include contains codecs + RTCErrorOr codecs_from_arg = + CodecList::Create(media_description_options.codecs_to_include); + if (!codecs_from_arg.ok()) { + return codecs_from_arg.MoveError(); + } + negotiated_codecs = codecs_from_arg.MoveValue(); + } + AssignCodecIdsAndLinkRed(&pt_suggester, media_description_options.mid, + negotiated_codecs.writable_codecs()); + return negotiated_codecs.codecs(); +} + +CodecVendor::CodecVendor( + MediaEngineInterface* media_engine, + bool rtx_enabled, + const FieldTrialsView& trials) { // Null media_engine is permitted in + // order to allow unit testing where + // the codecs are explicitly set by the test. + if (media_engine) { + audio_send_codecs_ = + TypedCodecVendor(media_engine, MediaType::AUDIO, + /* is_sender= */ true, rtx_enabled, trials); + audio_recv_codecs_ = + TypedCodecVendor(media_engine, MediaType::AUDIO, + /* is_sender= */ false, rtx_enabled, trials); + video_send_codecs_ = + TypedCodecVendor(media_engine, MediaType::VIDEO, + /* is_sender= */ true, rtx_enabled, trials); + video_recv_codecs_ = + TypedCodecVendor(media_engine, MediaType::VIDEO, + /* is_sender= */ false, rtx_enabled, trials); + } +} + +const CodecList& CodecVendor::audio_send_codecs() const { + return audio_send_codecs_.codecs(); +} + +const CodecList& CodecVendor::audio_recv_codecs() const { + return audio_recv_codecs_.codecs(); +} + +void CodecVendor::set_audio_codecs(const CodecList& send_codecs, + const CodecList& recv_codecs) { + audio_send_codecs_.set_codecs(send_codecs); + audio_recv_codecs_.set_codecs(recv_codecs); +} + +const CodecList& CodecVendor::video_send_codecs() const { + return video_send_codecs_.codecs(); +} + +const CodecList& CodecVendor::video_recv_codecs() const { + return video_recv_codecs_.codecs(); +} + +void CodecVendor::set_video_codecs(const CodecList& send_codecs, + const CodecList& recv_codecs) { + video_send_codecs_.set_codecs(send_codecs); + video_recv_codecs_.set_codecs(recv_codecs); +} + +CodecList CodecVendor::GetVideoCodecsForOffer( + const RtpTransceiverDirection& direction) const { + switch (direction) { + // If stream is inactive - generate list as if sendrecv. + case RtpTransceiverDirection::kSendRecv: + case RtpTransceiverDirection::kStopped: + case RtpTransceiverDirection::kInactive: + return video_sendrecv_codecs(); + case RtpTransceiverDirection::kSendOnly: + return video_send_codecs_.codecs(); + case RtpTransceiverDirection::kRecvOnly: + return video_recv_codecs_.codecs(); + } + RTC_CHECK_NOTREACHED(); +} + +CodecList CodecVendor::GetVideoCodecsForAnswer( + const RtpTransceiverDirection& offer, + const RtpTransceiverDirection& answer) const { + switch (answer) { + // For inactive and sendrecv answers, generate lists as if we were to accept + // the offer's direction. See RFC 3264 Section 6.1. + case RtpTransceiverDirection::kSendRecv: + case RtpTransceiverDirection::kStopped: + case RtpTransceiverDirection::kInactive: + return GetVideoCodecsForOffer( + webrtc::RtpTransceiverDirectionReversed(offer)); + case RtpTransceiverDirection::kSendOnly: + return video_send_codecs_.codecs(); + case RtpTransceiverDirection::kRecvOnly: + return video_recv_codecs_.codecs(); + } + RTC_CHECK_NOTREACHED(); +} + +CodecList CodecVendor::GetAudioCodecsForOffer( + const RtpTransceiverDirection& direction) const { + switch (direction) { + // If stream is inactive - generate list as if sendrecv. + case RtpTransceiverDirection::kSendRecv: + case RtpTransceiverDirection::kStopped: + case RtpTransceiverDirection::kInactive: + return audio_sendrecv_codecs(); + case RtpTransceiverDirection::kSendOnly: + return audio_send_codecs_.codecs(); + case RtpTransceiverDirection::kRecvOnly: + return audio_recv_codecs_.codecs(); + } + RTC_CHECK_NOTREACHED(); +} + +CodecList CodecVendor::GetAudioCodecsForAnswer( + const RtpTransceiverDirection& offer, + const RtpTransceiverDirection& answer) const { + switch (answer) { + // For inactive and sendrecv answers, generate lists as if we were to accept + // the offer's direction. See RFC 3264 Section 6.1. + case RtpTransceiverDirection::kSendRecv: + case RtpTransceiverDirection::kStopped: + case RtpTransceiverDirection::kInactive: + return GetAudioCodecsForOffer( + webrtc::RtpTransceiverDirectionReversed(offer)); + case RtpTransceiverDirection::kSendOnly: + return audio_send_codecs_.codecs(); + case RtpTransceiverDirection::kRecvOnly: + return audio_recv_codecs_.codecs(); + } + RTC_CHECK_NOTREACHED(); +} + +CodecList CodecVendor::all_video_codecs() const { + CodecList all_codecs; + UsedPayloadTypes used_payload_types; + for (const Codec& codec : video_recv_codecs_.codecs()) { + Codec codec_mutable = codec; + used_payload_types.FindAndSetIdUsed(&codec_mutable); + all_codecs.push_back(codec_mutable); + } + + // Use MergeCodecs to merge the second half of our list as it already checks + // and fixes problems with duplicate payload types. + MergeCodecs(video_send_codecs_.codecs(), all_codecs, &used_payload_types); + + return all_codecs; +} + +CodecList CodecVendor::all_audio_codecs() const { + // Compute the audio codecs union. + CodecList codecs; + for (const Codec& send : audio_send_codecs_.codecs()) { + codecs.push_back(send); + if (!FindMatchingCodec(audio_send_codecs_.codecs(), + audio_recv_codecs_.codecs(), send)) { + // It doesn't make sense to have an RTX codec we support sending but not + // receiving. + RTC_DCHECK(send.GetResiliencyType() != Codec::ResiliencyType::kRtx); + } + } + for (const Codec& recv : audio_recv_codecs_.codecs()) { + if (!FindMatchingCodec(audio_recv_codecs_.codecs(), + audio_send_codecs_.codecs(), recv)) { + codecs.push_back(recv); + } + } + return codecs; +} + +CodecList CodecVendor::audio_sendrecv_codecs() const { + // Use NegotiateCodecs to merge our codec lists, since the operation is + // essentially the same. Put send_codecs as the offered_codecs, which is the + // order we'd like to follow. The reasoning is that encoding is usually more + // expensive than decoding, and prioritizing a codec in the send list probably + // means it's a codec we can handle efficiently. + CodecList audio_sendrecv_codecs; + auto error = + NegotiateCodecs(audio_recv_codecs_.codecs(), audio_send_codecs_.codecs(), + audio_sendrecv_codecs, true); + RTC_DCHECK(error.ok()); + return audio_sendrecv_codecs; +} + +CodecList CodecVendor::video_sendrecv_codecs() const { + // Use NegotiateCodecs to merge our codec lists, since the operation is + // essentially the same. Put send_codecs as the offered_codecs, which is the + // order we'd like to follow. The reasoning is that encoding is usually more + // expensive than decoding, and prioritizing a codec in the send list probably + // means it's a codec we can handle efficiently. + // Also for the same profile of a codec, if there are different levels in the + // send and receive codecs, |video_sendrecv_codecs| will contain the lower + // level of the two for that profile. + CodecList video_sendrecv_codecs; + auto error = + NegotiateCodecs(video_recv_codecs_.codecs(), video_send_codecs_.codecs(), + video_sendrecv_codecs, true); + RTC_DCHECK(error.ok()); + return video_sendrecv_codecs; +} + +void CodecVendor::ModifyVideoCodecs( + std::vector> changes) { + // For each codec in the first element that occurs in our supported codecs, + // replace it with the codec in the second element. Exact matches only. + // Note: This needs further work to work with PT late assignment. + for (const std::pair& change : changes) { + { + CodecList send_codecs = video_send_codecs_.codecs(); + bool changed = false; + for (Codec& codec : send_codecs.writable_codecs()) { + if (codec == change.first) { + changed = true; + } + } + if (changed) { + video_send_codecs_.set_codecs(send_codecs); + } + } + { + bool changed = false; + CodecList recv_codecs = video_recv_codecs_.codecs(); + for (Codec& codec : recv_codecs.writable_codecs()) { + if (codec == change.first) { + codec = change.second; + changed = true; + } + } + if (changed) { + video_recv_codecs_.set_codecs(recv_codecs); + } + } + } +} + +} // namespace webrtc diff --git a/pc/codec_vendor.h b/pc/codec_vendor.h new file mode 100644 index 0000000000..1472225a00 --- /dev/null +++ b/pc/codec_vendor.h @@ -0,0 +1,140 @@ +/* + * Copyright 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_CODEC_VENDOR_H_ +#define PC_CODEC_VENDOR_H_ + +#include +#include + +#include "api/field_trials_view.h" +#include "api/rtc_error.h" +#include "api/rtp_transceiver_direction.h" +#include "call/payload_type.h" +#include "media/base/codec.h" +#include "media/base/codec_list.h" +#include "media/base/media_engine.h" +#include "pc/media_options.h" +#include "pc/session_description.h" +#include "pc/typed_codec_vendor.h" + +namespace webrtc { + +// This class contains the functions required to compute the list of codecs +// for SDP offer/answer. It is exposed to MediaSessionDescriptionFactory +// for the construction of offers and answers. + +// TODO: bugs.webrtc.org/360058654 - complete the architectural changes +// The list of things to be done: +// - Make as much as possible private. +// - Split object usage into four objects: sender/receiver/audio/video. +// - Remove audio/video from the call names, merge code where possible. +// - Make the class instances owned by transceivers, so that codec +// lists can differ per transceiver. +// For cleanliness: +// - Thread guard +class CodecVendor { + public: + CodecVendor(MediaEngineInterface* media_engine, + bool rtx_enabled, + const FieldTrialsView& trials); + + public: + RTCErrorOr> GetNegotiatedCodecsForOffer( + const MediaDescriptionOptions& media_description_options, + const MediaSessionOptions& session_options, + const ContentInfo* current_content, + PayloadTypeSuggester& pt_suggester); + + RTCErrorOr GetNegotiatedCodecsForAnswer( + const MediaDescriptionOptions& media_description_options, + const MediaSessionOptions& session_options, + RtpTransceiverDirection offer_rtd, + RtpTransceiverDirection answer_rtd, + const ContentInfo* current_content, + std::vector codecs_from_offer, + PayloadTypeSuggester& pt_suggester); + + // Function exposed for issues.webrtc.org/412904801 + // Modify the video codecs to return on subsequent GetNegotiated* calls. + // The input is a vector of pairs of codecs. + // For each pair, the first element is the codec to be replaced, + // and the second element is the codec to replace it with. + void ModifyVideoCodecs(std::vector> changes); + // Functions exposed for testing + void set_audio_codecs(const CodecList& send_codecs, + const CodecList& recv_codecs); + void set_audio_codecs(const std::vector& send_codecs, + const std::vector& recv_codecs) { + set_audio_codecs(CodecList::CreateFromTrustedData(send_codecs), + CodecList::CreateFromTrustedData(recv_codecs)); + } + void set_video_codecs(const CodecList& send_codecs, + const CodecList& recv_codecs); + void set_video_codecs(const std::vector& send_codecs, + const std::vector& recv_codecs) { + set_video_codecs(CodecList::CreateFromTrustedData(send_codecs), + CodecList::CreateFromTrustedData(recv_codecs)); + } + CodecList audio_sendrecv_codecs() const; + const CodecList& audio_send_codecs() const; + const CodecList& audio_recv_codecs() const; + CodecList video_sendrecv_codecs() const; + const CodecList& video_send_codecs() const; + const CodecList& video_recv_codecs() const; + + private: + CodecList GetAudioCodecsForOffer( + const RtpTransceiverDirection& direction) const; + CodecList GetAudioCodecsForAnswer( + const RtpTransceiverDirection& offer, + const RtpTransceiverDirection& answer) const; + CodecList GetVideoCodecsForOffer( + const RtpTransceiverDirection& direction) const; + CodecList GetVideoCodecsForAnswer( + const RtpTransceiverDirection& offer, + const RtpTransceiverDirection& answer) const; + + CodecList all_video_codecs() const; + CodecList all_audio_codecs() const; + + TypedCodecVendor audio_send_codecs_; + TypedCodecVendor audio_recv_codecs_; + + TypedCodecVendor video_send_codecs_; + TypedCodecVendor video_recv_codecs_; +}; + +// A class to assist in looking up data for a codec mapping. +// Pure virtual to allow implementations that depend on things that +// codec_vendor.h should not depend on. +// Pointers returned are not stable, and should not be stored. +class CodecLookupHelper { + public: + virtual ~CodecLookupHelper() = default; + virtual PayloadTypeSuggester* PayloadTypeSuggester() = 0; + // Look up the codec vendor to use, depending on context. + // This call may get additional arguments in the future, to aid + // in selection of the correct context. + virtual CodecVendor* GetCodecVendor() = 0; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::CodecLookupHelper; +using ::webrtc::CodecVendor; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // PC_CODEC_VENDOR_H_ diff --git a/pc/codec_vendor_unittest.cc b/pc/codec_vendor_unittest.cc new file mode 100644 index 0000000000..6e2ae36b53 --- /dev/null +++ b/pc/codec_vendor_unittest.cc @@ -0,0 +1,248 @@ +/* + * Copyright 2004 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/codec_vendor.h" + +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/field_trials.h" +#include "api/media_types.h" +#include "api/rtc_error.h" +#include "api/rtp_transceiver_direction.h" +#include "api/test/rtc_error_matchers.h" +#include "call/fake_payload_type_suggester.h" +#include "media/base/codec.h" +#include "media/base/codec_list.h" +#include "media/base/fake_media_engine.h" +#include "media/base/media_constants.h" +#include "media/base/test_utils.h" +#include "pc/media_options.h" +#include "pc/rtp_parameters_conversion.h" +#include "pc/session_description.h" +#include "rtc_base/checks.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using testing::Contains; +using testing::Eq; +using testing::Field; + +Codec CreateRedAudioCodec(absl::string_view encoding_id) { + Codec red = CreateAudioCodec(63, "red", 48000, 2); + red.SetParam(kCodecParamNotInNameValueFormat, + std::string(encoding_id) + '/' + std::string(encoding_id)); + return red; +} + +const Codec kAudioCodecs1[] = {CreateAudioCodec(111, "opus", 48000, 2), + CreateRedAudioCodec("111"), + CreateAudioCodec(102, "G722", 16000, 1), + CreateAudioCodec(0, "PCMU", 8000, 1), + CreateAudioCodec(8, "PCMA", 8000, 1), + CreateAudioCodec(107, "CN", 48000, 1)}; + +const Codec kAudioCodecs2[] = { + CreateAudioCodec(126, "foo", 16000, 1), + CreateAudioCodec(0, "PCMU", 8000, 1), + CreateAudioCodec(127, "G722", 16000, 1), +}; + +const Codec kAudioCodecsAnswer[] = { + CreateAudioCodec(102, "G722", 16000, 1), + CreateAudioCodec(0, "PCMU", 8000, 1), +}; + +TEST(CodecVendorTest, TestSetAudioCodecs) { + std::unique_ptr trials = FieldTrials::CreateNoGlobal(""); + CodecVendor codec_vendor(nullptr, false, *trials); + std::vector send_codecs = MAKE_VECTOR(kAudioCodecs1); + std::vector recv_codecs = MAKE_VECTOR(kAudioCodecs2); + + // The merged list of codecs should contain any send codecs that are also + // nominally in the receive codecs list. Payload types should be picked from + // the send codecs and a number-of-channels of 0 and 1 should be equivalent + // (set to 1). This equals what happens when the send codecs are used in an + // offer and the receive codecs are used in the following answer. + const std::vector sendrecv_codecs = MAKE_VECTOR(kAudioCodecsAnswer); + CodecList no_codecs; + + RTC_CHECK_EQ(send_codecs[2].name, "G722") + << "Please don't change shared test data!"; + RTC_CHECK_EQ(recv_codecs[2].name, "G722") + << "Please don't change shared test data!"; + // Alter iLBC send codec to have zero channels, to test that that is handled + // properly. + send_codecs[2].channels = 0; + + // Alter PCMU receive codec to be lowercase, to test that case conversions + // are handled properly. + recv_codecs[1].name = "pcmu"; + + // Test proper merge + codec_vendor.set_audio_codecs(CodecList::CreateFromTrustedData(send_codecs), + CodecList::CreateFromTrustedData(recv_codecs)); + EXPECT_EQ(send_codecs, codec_vendor.audio_send_codecs().codecs()); + EXPECT_EQ(recv_codecs, codec_vendor.audio_recv_codecs().codecs()); + EXPECT_EQ(sendrecv_codecs, codec_vendor.audio_sendrecv_codecs().codecs()); + + // Test empty send codecs list + codec_vendor.set_audio_codecs(no_codecs, + CodecList::CreateFromTrustedData(recv_codecs)); + EXPECT_EQ(no_codecs.codecs(), codec_vendor.audio_send_codecs().codecs()); + EXPECT_EQ(recv_codecs, codec_vendor.audio_recv_codecs().codecs()); + EXPECT_EQ(no_codecs.codecs(), codec_vendor.audio_sendrecv_codecs().codecs()); + + // Test empty recv codecs list + codec_vendor.set_audio_codecs(CodecList::CreateFromTrustedData(send_codecs), + no_codecs); + EXPECT_EQ(send_codecs, codec_vendor.audio_send_codecs().codecs()); + EXPECT_EQ(no_codecs.codecs(), codec_vendor.audio_recv_codecs().codecs()); + EXPECT_EQ(no_codecs.codecs(), codec_vendor.audio_sendrecv_codecs().codecs()); + + // Test all empty codec lists + codec_vendor.set_audio_codecs(no_codecs, no_codecs); + EXPECT_EQ(no_codecs, codec_vendor.audio_send_codecs()); + EXPECT_EQ(no_codecs, codec_vendor.audio_recv_codecs()); + EXPECT_EQ(no_codecs, codec_vendor.audio_sendrecv_codecs()); +} + +TEST(CodecVendorTest, VideoRtxIsIncludedWhenAskedFor) { + Environment env = CreateEnvironment(); + FakeMediaEngine media_engine; + std::vector video_codecs({ + CreateVideoCodec(97, "vp8"), + CreateVideoRtxCodec(98, 97), + }); + FakePayloadTypeSuggester pt_suggester; + media_engine.SetVideoSendCodecs(video_codecs); + CodecVendor codec_vendor(&media_engine, /* rtx_enabled= */ true, + env.field_trials()); + RTCErrorOr> offered_codecs = + codec_vendor.GetNegotiatedCodecsForOffer( + MediaDescriptionOptions(MediaType::VIDEO, "mid", + RtpTransceiverDirection::kSendOnly, false), + MediaSessionOptions(), nullptr, pt_suggester); + EXPECT_THAT(offered_codecs.value(), + Contains(Field("name", &Codec::name, "rtx"))); +} + +TEST(CodecVendorTest, VideoRtxIsExcludedWhenNotAskedFor) { + Environment env = CreateEnvironment(); + FakeMediaEngine media_engine; + std::vector video_codecs({ + CreateVideoCodec(97, "vp8"), + CreateVideoRtxCodec(98, 97), + }); + FakePayloadTypeSuggester pt_suggester; + media_engine.SetVideoSendCodecs(video_codecs); + CodecVendor codec_vendor(&media_engine, /* rtx_enabled= */ false, + env.field_trials()); + RTCErrorOr> offered_codecs = + codec_vendor.GetNegotiatedCodecsForOffer( + MediaDescriptionOptions(MediaType::VIDEO, "mid", + RtpTransceiverDirection::kSendOnly, false), + MediaSessionOptions(), nullptr, pt_suggester); + EXPECT_THAT(offered_codecs.value(), + Not(Contains(Field("name", &Codec::name, "rtx")))); +} + +TEST(CodecVendorTest, PreferencesAffectCodecChoice) { + Environment env = CreateEnvironment(); + FakeMediaEngine media_engine; + std::vector video_codecs({ + CreateVideoCodec(97, "vp8"), + CreateVideoRtxCodec(98, 97), + CreateVideoCodec(99, "vp9"), + CreateVideoRtxCodec(100, 99), + }); + media_engine.SetVideoSendCodecs(video_codecs); + CodecVendor codec_vendor(&media_engine, /* rtx_enabled= */ false, + env.field_trials()); + MediaDescriptionOptions options(MediaType::VIDEO, "mid", + RtpTransceiverDirection::kSendOnly, false); + options.codec_preferences = { + ToRtpCodecCapability(CreateVideoCodec(-1, "vp9")), + }; + FakePayloadTypeSuggester pt_suggester; + + RTCErrorOr> offered_codecs = + codec_vendor.GetNegotiatedCodecsForOffer(options, MediaSessionOptions(), + nullptr, pt_suggester); + ASSERT_TRUE(offered_codecs.ok()); + EXPECT_THAT(offered_codecs.value(), + Contains(Field("name", &Codec::name, "vp9"))); + EXPECT_THAT(offered_codecs.value(), + Not(Contains(Field("name", &Codec::name, "vp8")))); + EXPECT_THAT(offered_codecs.value().size(), Eq(1)); +} + +TEST(CodecVendorTest, GetNegotiatedCodecsForAnswerSimple) { + Environment env = CreateEnvironment(); + FakeMediaEngine media_engine; + std::vector video_codecs({ + CreateVideoCodec(97, "vp8"), + CreateVideoRtxCodec(98, 97), + CreateVideoCodec(99, "vp9"), + CreateVideoRtxCodec(100, 99), + }); + media_engine.SetVideoSendCodecs(video_codecs); + CodecVendor codec_vendor(&media_engine, /* rtx_enabled= */ true, + env.field_trials()); + MediaDescriptionOptions options(MediaType::VIDEO, "mid", + RtpTransceiverDirection::kSendOnly, false); + FakePayloadTypeSuggester pt_suggester; + ContentInfo* current_content = nullptr; + RTCErrorOr> answered_codecs = + codec_vendor.GetNegotiatedCodecsForAnswer( + options, MediaSessionOptions(), RtpTransceiverDirection::kSendOnly, + RtpTransceiverDirection::kSendOnly, current_content, video_codecs, + pt_suggester); + EXPECT_THAT(answered_codecs, IsRtcOkAndHolds(video_codecs)); +} + +TEST(CodecVendorTest, GetNegotiatedCodecsForAnswerWithCollision) { + Environment env = CreateEnvironment(); + FakeMediaEngine media_engine; + std::vector video_codecs({ + CreateVideoCodec(97, "vp8"), + CreateVideoCodec(99, "vp9"), + CreateVideoCodec(101, "av1"), + }); + std::vector remote_codecs({ + CreateVideoCodec(97, "av1"), + CreateVideoCodec(99, "vp9"), + }); + media_engine.SetVideoSendCodecs(video_codecs); + CodecVendor codec_vendor(&media_engine, /* rtx_enabled= */ false, + env.field_trials()); + MediaDescriptionOptions options(MediaType::VIDEO, "mid", + RtpTransceiverDirection::kSendOnly, false); + FakePayloadTypeSuggester pt_suggester; + ContentInfo* current_content = nullptr; + RTCErrorOr> answered_codecs = + codec_vendor.GetNegotiatedCodecsForAnswer( + options, MediaSessionOptions(), RtpTransceiverDirection::kSendOnly, + RtpTransceiverDirection::kSendOnly, current_content, remote_codecs, + pt_suggester); + EXPECT_THAT(answered_codecs, IsRtcOkAndHolds(remote_codecs)); +} + +} // namespace +} // namespace webrtc diff --git a/pc/congestion_control_integrationtest.cc b/pc/congestion_control_integrationtest.cc new file mode 100644 index 0000000000..d93a31924f --- /dev/null +++ b/pc/congestion_control_integrationtest.cc @@ -0,0 +1,129 @@ +/* + * Copyright 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains tests that verify that congestion control options +// are correctly negotiated in the SDP offer/answer. + +#include + +#include "absl/strings/str_cat.h" +#include "api/peer_connection_interface.h" +#include "api/test/rtc_error_matchers.h" +#include "pc/test/integration_test_helpers.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/wait_until.h" + +namespace webrtc { + +using testing::Eq; +using ::testing::Gt; +using testing::HasSubstr; +using ::testing::IsTrue; +using testing::Not; + +class PeerConnectionCongestionControlTest + : public PeerConnectionIntegrationBaseTest { + public: + PeerConnectionCongestionControlTest() + : PeerConnectionIntegrationBaseTest(SdpSemantics::kUnifiedPlan) {} +}; + +TEST_F(PeerConnectionCongestionControlTest, OfferContainsCcfbIfEnabled) { + SetFieldTrials("WebRTC-RFC8888CongestionControlFeedback/Enabled/"); + ASSERT_TRUE(CreatePeerConnectionWrappers()); + caller()->AddAudioVideoTracks(); + auto offer = caller()->CreateOfferAndWait(); + std::string offer_str = absl::StrCat(*offer); + EXPECT_THAT(offer_str, HasSubstr("a=rtcp-fb:* ack ccfb\r\n")); +} + +TEST_F(PeerConnectionCongestionControlTest, ReceiveOfferSetsCcfbFlag) { + SetFieldTrials("WebRTC-RFC8888CongestionControlFeedback/Enabled/"); + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignalingForSdpOnly(); + caller()->AddAudioVideoTracks(); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + { + // Check that the callee parsed it. + auto parsed_contents = + callee()->pc()->remote_description()->description()->contents(); + EXPECT_FALSE(parsed_contents.empty()); + for (const auto& content : parsed_contents) { + EXPECT_TRUE(content.media_description()->rtcp_fb_ack_ccfb()); + } + } + + { + // Check that the caller also parsed it. + auto parsed_contents = + caller()->pc()->remote_description()->description()->contents(); + EXPECT_FALSE(parsed_contents.empty()); + for (const auto& content : parsed_contents) { + EXPECT_TRUE(content.media_description()->rtcp_fb_ack_ccfb()); + } + } + // Check that the answer does not contain transport-cc + std::string answer_str = absl::StrCat(*caller()->pc()->remote_description()); + EXPECT_THAT(answer_str, Not(HasSubstr("transport-cc"))); +} + +TEST_F(PeerConnectionCongestionControlTest, CcfbGetsUsed) { + SetFieldTrials("WebRTC-RFC8888CongestionControlFeedback/Enabled/"); + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + caller()->AddAudioVideoTracks(); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + MediaExpectations media_expectations; + media_expectations.CalleeExpectsSomeAudio(); + media_expectations.CalleeExpectsSomeVideo(); + ASSERT_TRUE(ExpectNewFrames(media_expectations)); + auto pc_internal = caller()->pc_internal(); + EXPECT_THAT( + WaitUntil( + [&] { + return pc_internal->FeedbackAccordingToRfc8888CountForTesting(); + }, + Gt(0)), + IsRtcOk()); + // There should be no transport-cc generated. + EXPECT_THAT(pc_internal->FeedbackAccordingToTransportCcCountForTesting(), + Eq(0)); +} + +TEST_F(PeerConnectionCongestionControlTest, TransportCcGetsUsed) { + SetFieldTrials("WebRTC-RFC8888CongestionControlFeedback/Disabled/"); + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + caller()->AddAudioVideoTracks(); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + MediaExpectations media_expectations; + media_expectations.CalleeExpectsSomeAudio(); + media_expectations.CalleeExpectsSomeVideo(); + ASSERT_TRUE(ExpectNewFrames(media_expectations)); + auto pc_internal = caller()->pc_internal(); + EXPECT_THAT( + WaitUntil( + [&] { + return pc_internal->FeedbackAccordingToTransportCcCountForTesting(); + }, + Gt(0)), + IsRtcOk()); + // Test that RFC 8888 feedback is NOT generated when field trial disabled. + EXPECT_THAT(pc_internal->FeedbackAccordingToRfc8888CountForTesting(), Eq(0)); +} + +} // namespace webrtc diff --git a/pc/connection_context.cc b/pc/connection_context.cc index 661550e2d4..0e208ea067 100644 --- a/pc/connection_context.cc +++ b/pc/connection_context.cc @@ -10,32 +10,39 @@ #include "pc/connection_context.h" -#include +#include #include -#include -#include "api/transport/field_trial_based_config.h" +#include "api/environment/environment.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/transport/sctp_transport_factory_interface.h" #include "media/base/media_engine.h" #include "media/sctp/sctp_transport_factory.h" -#include "rtc_base/helpers.h" +#include "p2p/base/basic_packet_socket_factory.h" +#include "pc/media_factory.h" +#include "rtc_base/checks.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/internal/default_socket_server.h" +#include "rtc_base/network.h" +#include "rtc_base/socket_factory.h" #include "rtc_base/socket_server.h" +#include "rtc_base/thread.h" #include "rtc_base/time_utils.h" namespace webrtc { namespace { -rtc::Thread* MaybeStartNetworkThread( - rtc::Thread* old_thread, - std::unique_ptr& socket_factory_holder, - std::unique_ptr& thread_holder) { +Thread* MaybeStartNetworkThread( + Thread* old_thread, + std::unique_ptr& socket_factory_holder, + std::unique_ptr& thread_holder) { if (old_thread) { return old_thread; } - std::unique_ptr socket_server = - rtc::CreateDefaultSocketServer(); - thread_holder = std::make_unique(socket_server.get()); + std::unique_ptr socket_server = CreateDefaultSocketServer(); + thread_holder = std::make_unique(socket_server.get()); socket_factory_holder = std::move(socket_server); thread_holder->SetName("pc_network_thread", nullptr); @@ -43,17 +50,16 @@ rtc::Thread* MaybeStartNetworkThread( return thread_holder.get(); } -rtc::Thread* MaybeWrapThread(rtc::Thread* signaling_thread, - bool& wraps_current_thread) { +Thread* MaybeWrapThread(Thread* signaling_thread, bool& wraps_current_thread) { wraps_current_thread = false; if (signaling_thread) { return signaling_thread; } - auto this_thread = rtc::Thread::Current(); + auto this_thread = Thread::Current(); if (!this_thread) { - // If this thread isn't already wrapped by an rtc::Thread, create a + // If this thread isn't already wrapped by an webrtc::Thread, create a // wrapper and own it in this class. - this_thread = rtc::ThreadManager::Instance()->WrapCurrentThread(); + this_thread = ThreadManager::Instance()->WrapCurrentThread(); wraps_current_thread = true; } return this_thread; @@ -61,13 +67,12 @@ rtc::Thread* MaybeWrapThread(rtc::Thread* signaling_thread, std::unique_ptr MaybeCreateSctpFactory( std::unique_ptr factory, - rtc::Thread* network_thread, - const FieldTrialsView& field_trials) { + Thread* network_thread) { if (factory) { return factory; } #ifdef WEBRTC_HAVE_SCTP - return std::make_unique(network_thread); + return std::make_unique(network_thread); #else return nullptr; #endif @@ -76,38 +81,42 @@ std::unique_ptr MaybeCreateSctpFactory( } // namespace // Static -rtc::scoped_refptr ConnectionContext::Create( +scoped_refptr ConnectionContext::Create( + const Environment& env, PeerConnectionFactoryDependencies* dependencies) { - return rtc::scoped_refptr( - new ConnectionContext(dependencies)); + return scoped_refptr( + new ConnectionContext(env, dependencies)); } ConnectionContext::ConnectionContext( + const Environment& env, PeerConnectionFactoryDependencies* dependencies) : network_thread_(MaybeStartNetworkThread(dependencies->network_thread, owned_socket_factory_, owned_network_thread_)), worker_thread_(dependencies->worker_thread, []() { - auto thread_holder = rtc::Thread::Create(); + auto thread_holder = Thread::Create(); thread_holder->SetName("pc_worker_thread", nullptr); thread_holder->Start(); return thread_holder; }), signaling_thread_(MaybeWrapThread(dependencies->signaling_thread, wraps_current_thread_)), - trials_(dependencies->trials ? std::move(dependencies->trials) - : std::make_unique()), - media_engine_(std::move(dependencies->media_engine)), + env_(env), + media_engine_( + dependencies->media_factory != nullptr + ? dependencies->media_factory->CreateMediaEngine(env_, + *dependencies) + : nullptr), network_monitor_factory_( std::move(dependencies->network_monitor_factory)), default_network_manager_(std::move(dependencies->network_manager)), - call_factory_(std::move(dependencies->call_factory)), + call_factory_(std::move(dependencies->media_factory)), default_socket_factory_(std::move(dependencies->packet_socket_factory)), sctp_factory_( MaybeCreateSctpFactory(std::move(dependencies->sctp_factory), - network_thread(), - *trials_.get())), + network_thread())), use_rtx_(true) { RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(!(default_network_manager_ && network_monitor_factory_)) @@ -131,9 +140,9 @@ ConnectionContext::ConnectionContext( }); } - rtc::InitRandom(rtc::Time32()); + InitRandom(Time32()); - rtc::SocketFactory* socket_factory = dependencies->socket_factory; + SocketFactory* socket_factory = dependencies->socket_factory; if (socket_factory == nullptr) { if (owned_socket_factory_) { socket_factory = owned_socket_factory_.get(); @@ -141,20 +150,20 @@ ConnectionContext::ConnectionContext( // TODO(bugs.webrtc.org/13145): This case should be deleted. Either // require that a PacketSocketFactory and NetworkManager always are // injected (with no need to construct these default objects), or require - // that if a network_thread is injected, an approprite rtc::SocketServer - // should be injected too. + // that if a network_thread is injected, an approprite + // webrtc::SocketServer should be injected too. socket_factory = network_thread()->socketserver(); } } if (!default_network_manager_) { // If network_monitor_factory_ is non-null, it will be used to create a // network monitor while on the network thread. - default_network_manager_ = std::make_unique( - network_monitor_factory_.get(), socket_factory, &field_trials()); + default_network_manager_ = std::make_unique( + env, socket_factory, network_monitor_factory_.get()); } if (!default_socket_factory_) { default_socket_factory_ = - std::make_unique(socket_factory); + std::make_unique(socket_factory); } // Set warning levels on the threads, to give warnings when response // may be slower than is expected of the thread. @@ -174,15 +183,8 @@ ConnectionContext::ConnectionContext( ConnectionContext::~ConnectionContext() { RTC_DCHECK_RUN_ON(signaling_thread_); - worker_thread_->BlockingCall([&] { - RTC_DCHECK_RUN_ON(worker_thread()); - // While `media_engine_` is const throughout the ConnectionContext's - // lifetime, it requires destruction to happen on the worker thread. Instead - // of marking the pointer as non-const, we live with this const_cast<> in - // the destructor. - const_cast&>(media_engine_) - .reset(); - }); + // `media_engine_` requires destruction to happen on the worker thread. + worker_thread_->PostTask([media_engine = std::move(media_engine_)] {}); // Make sure `worker_thread()` and `signaling_thread()` outlive // `default_socket_factory_` and `default_network_manager_`. @@ -190,7 +192,7 @@ ConnectionContext::~ConnectionContext() { default_network_manager_ = nullptr; if (wraps_current_thread_) - rtc::ThreadManager::Instance()->UnwrapCurrentThread(); + ThreadManager::Instance()->UnwrapCurrentThread(); } } // namespace webrtc diff --git a/pc/connection_context.h b/pc/connection_context.h index 38a6f8e514..9d27561fe1 100644 --- a/pc/connection_context.h +++ b/pc/connection_context.h @@ -12,48 +12,38 @@ #define PC_CONNECTION_CONTEXT_H_ #include -#include -#include "api/call/call_factory_interface.h" -#include "api/field_trials_view.h" -#include "api/media_stream_interface.h" +#include "api/environment/environment.h" +#include "api/packet_socket_factory.h" #include "api/peer_connection_interface.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/transport/sctp_transport_factory_interface.h" #include "media/base/media_engine.h" -#include "p2p/base/basic_packet_socket_factory.h" -#include "rtc_base/checks.h" +#include "rtc_base/memory/always_valid_pointer.h" #include "rtc_base/network.h" #include "rtc_base/network_monitor_factory.h" -#include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/socket_factory.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" - -namespace rtc { -class BasicPacketSocketFactory; -class UniqueRandomIdGenerator; -} // namespace rtc +#include "rtc_base/unique_id_generator.h" namespace webrtc { -class RtcEventLog; - // This class contains resources needed by PeerConnection and associated // objects. A reference to this object is passed to each PeerConnection. The // methods on this object are assumed not to change the state in any way that // interferes with the operation of other PeerConnections. // // This class must be created and destroyed on the signaling thread. -class ConnectionContext final - : public rtc::RefCountedNonVirtual { +class ConnectionContext final : public RefCountedNonVirtual { public: // Creates a ConnectionContext. May return null if initialization fails. // The Dependencies class allows simple management of all new dependencies // being added to the ConnectionContext. - static rtc::scoped_refptr Create( + static scoped_refptr Create( + const Environment& env, PeerConnectionFactoryDependencies* dependencies); // This class is not copyable or movable. @@ -65,37 +55,34 @@ class ConnectionContext final return sctp_factory_.get(); } - cricket::MediaEngineInterface* media_engine() const { - return media_engine_.get(); - } + MediaEngineInterface* media_engine() const { return media_engine_.get(); } - rtc::Thread* signaling_thread() { return signaling_thread_; } - const rtc::Thread* signaling_thread() const { return signaling_thread_; } - rtc::Thread* worker_thread() { return worker_thread_.get(); } - const rtc::Thread* worker_thread() const { return worker_thread_.get(); } - rtc::Thread* network_thread() { return network_thread_; } - const rtc::Thread* network_thread() const { return network_thread_; } + Thread* signaling_thread() { return signaling_thread_; } + const Thread* signaling_thread() const { return signaling_thread_; } + Thread* worker_thread() { return worker_thread_.get(); } + const Thread* worker_thread() const { return worker_thread_.get(); } + Thread* network_thread() { return network_thread_; } + const Thread* network_thread() const { return network_thread_; } - // Field trials associated with the PeerConnectionFactory. - // Note: that there can be different field trials for different - // PeerConnections (but they are not supposed change after creating the - // PeerConnection). - const FieldTrialsView& field_trials() const { return *trials_.get(); } + // Environment associated with the PeerConnectionFactory. + // Note: environments are different for different PeerConnections, + // but they are not supposed to change after creating the PeerConnection. + const Environment& env() const { return env_; } // Accessors only used from the PeerConnectionFactory class - rtc::NetworkManager* default_network_manager() { + NetworkManager* default_network_manager() { RTC_DCHECK_RUN_ON(signaling_thread_); return default_network_manager_.get(); } - rtc::PacketSocketFactory* default_socket_factory() { + PacketSocketFactory* default_socket_factory() { RTC_DCHECK_RUN_ON(signaling_thread_); return default_socket_factory_.get(); } - CallFactoryInterface* call_factory() { + MediaFactory* call_factory() { RTC_DCHECK_RUN_ON(worker_thread()); return call_factory_.get(); } - rtc::UniqueRandomIdGenerator* ssrc_generator() { return &ssrc_generator_; } + UniqueRandomIdGenerator* ssrc_generator() { return &ssrc_generator_; } // Note: There is lots of code that wants to know whether or not we // use RTX, but so far, no code has been found that sets it to false. // Kept in the API in order to ease introduction if we want to resurrect @@ -106,40 +93,42 @@ class ConnectionContext final void set_use_rtx(bool use_rtx) { use_rtx_ = use_rtx; } protected: - explicit ConnectionContext(PeerConnectionFactoryDependencies* dependencies); + ConnectionContext(const Environment& env, + PeerConnectionFactoryDependencies* dependencies); - friend class rtc::RefCountedNonVirtual; + friend class RefCountedNonVirtual; ~ConnectionContext(); private: // The following three variables are used to communicate between the // constructor and the destructor, and are never exposed externally. bool wraps_current_thread_; - std::unique_ptr owned_socket_factory_; - std::unique_ptr owned_network_thread_ + std::unique_ptr owned_socket_factory_; + std::unique_ptr owned_network_thread_ RTC_GUARDED_BY(signaling_thread_); - rtc::Thread* const network_thread_; - AlwaysValidPointer const worker_thread_; - rtc::Thread* const signaling_thread_; + Thread* const network_thread_; + AlwaysValidPointer const worker_thread_; + Thread* const signaling_thread_; - // Accessed both on signaling thread and worker thread. - std::unique_ptr const trials_; + const Environment env_; - const std::unique_ptr media_engine_; + // This object is const over the lifetime of the ConnectionContext, and is + // only altered in the destructor. + std::unique_ptr media_engine_; // This object should be used to generate any SSRC that is not explicitly // specified by the user (or by the remote party). // TODO(bugs.webrtc.org/12666): This variable is used from both the signaling // and worker threads. See if we can't restrict usage to a single thread. - rtc::UniqueRandomIdGenerator ssrc_generator_; - std::unique_ptr const network_monitor_factory_ + UniqueRandomIdGenerator ssrc_generator_; + std::unique_ptr const network_monitor_factory_ RTC_GUARDED_BY(signaling_thread_); - std::unique_ptr default_network_manager_ + std::unique_ptr default_network_manager_ RTC_GUARDED_BY(signaling_thread_); - std::unique_ptr const call_factory_ + std::unique_ptr const call_factory_ RTC_GUARDED_BY(worker_thread()); - std::unique_ptr default_socket_factory_ + std::unique_ptr default_socket_factory_ RTC_GUARDED_BY(signaling_thread_); std::unique_ptr const sctp_factory_; diff --git a/pc/data_channel_controller.cc b/pc/data_channel_controller.cc index 93599fdba9..f8018d7187 100644 --- a/pc/data_channel_controller.cc +++ b/pc/data_channel_controller.cc @@ -10,17 +10,42 @@ #include "pc/data_channel_controller.h" +#include +#include +#include +#include +#include #include +#include #include "absl/algorithm/container.h" +#include "api/array_view.h" +#include "api/data_channel_event_observer_interface.h" +#include "api/data_channel_interface.h" #include "api/peer_connection_interface.h" +#include "api/priority.h" #include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/transport/data_channel_transport_interface.h" +#include "media/sctp/sctp_transport_internal.h" +#include "pc/data_channel_utils.h" #include "pc/peer_connection_internal.h" +#include "pc/sctp_data_channel.h" #include "pc/sctp_utils.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/thread.h" +#include "rtc_base/time_utils.h" namespace webrtc { +using Message = DataChannelEventObserverInterface::Message; +using Direction = DataChannelEventObserverInterface::Message::Direction; + DataChannelController::~DataChannelController() { RTC_DCHECK(sctp_data_channels_n_.empty()) << "Missing call to TeardownDataChannelTransport_n?"; @@ -38,24 +63,38 @@ bool DataChannelController::HasUsedDataChannels() const { return channel_usage_ != DataChannelUsage::kNeverUsed; } -RTCError DataChannelController::SendData( - StreamId sid, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload) { +void DataChannelController::SetEventObserver( + std::unique_ptr observer) { + RTC_DCHECK_RUN_ON(network_thread()); + event_observer_ = std::move(observer); +} + +RTCError DataChannelController::SendData(StreamId sid, + const SendDataParams& params, + const CopyOnWriteBuffer& payload) { RTC_DCHECK_RUN_ON(network_thread()); if (!data_channel_transport_) { RTC_LOG(LS_ERROR) << "SendData called before transport is ready"; return RTCError(RTCErrorType::INVALID_STATE); } - return data_channel_transport_->SendData(sid.stream_id_int(), params, - payload); + RTCError result = + data_channel_transport_->SendData(sid.stream_id_int(), params, payload); + + if (event_observer_ && result.ok()) { + if (std::optional message = + BuildObserverMessage(sid, params.type, payload, Direction::kSend)) { + event_observer_->OnMessage(*message); + } + } + + return result; } -void DataChannelController::AddSctpDataStream(StreamId sid) { +void DataChannelController::AddSctpDataStream(StreamId sid, + PriorityValue priority) { RTC_DCHECK_RUN_ON(network_thread()); - RTC_DCHECK(sid.HasValue()); if (data_channel_transport_) { - data_channel_transport_->OpenChannel(sid.stream_id_int()); + data_channel_transport_->OpenChannel(sid.stream_id_int(), priority); } } @@ -89,27 +128,62 @@ void DataChannelController::OnChannelStateChanged( })); } -void DataChannelController::OnDataReceived( - int channel_id, - DataMessageType type, - const rtc::CopyOnWriteBuffer& buffer) { +size_t DataChannelController::buffered_amount(StreamId sid) const { + RTC_DCHECK_RUN_ON(network_thread()); + if (!data_channel_transport_) { + return 0; + } + return data_channel_transport_->buffered_amount(sid.stream_id_int()); +} + +size_t DataChannelController::buffered_amount_low_threshold( + StreamId sid) const { + RTC_DCHECK_RUN_ON(network_thread()); + if (!data_channel_transport_) { + return 0; + } + return data_channel_transport_->buffered_amount_low_threshold( + sid.stream_id_int()); +} + +void DataChannelController::SetBufferedAmountLowThreshold(StreamId sid, + size_t bytes) { + RTC_DCHECK_RUN_ON(network_thread()); + if (!data_channel_transport_) { + return; + } + data_channel_transport_->SetBufferedAmountLowThreshold(sid.stream_id_int(), + bytes); +} + +void DataChannelController::OnDataReceived(int channel_id, + DataMessageType type, + const CopyOnWriteBuffer& buffer) { RTC_DCHECK_RUN_ON(network_thread()); if (HandleOpenMessage_n(channel_id, type, buffer)) return; auto it = absl::c_find_if(sctp_data_channels_n_, [&](const auto& c) { - return c->sid_n().stream_id_int() == channel_id; + return c->sid_n().has_value() && c->sid_n()->stream_id_int() == channel_id; }); - if (it != sctp_data_channels_n_.end()) + if (it != sctp_data_channels_n_.end()) { (*it)->OnDataReceived(type, buffer); + + if (event_observer_) { + if (std::optional message = BuildObserverMessage( + StreamId(channel_id), type, buffer, Direction::kReceive)) { + event_observer_->OnMessage(*message); + } + } + } } void DataChannelController::OnChannelClosing(int channel_id) { RTC_DCHECK_RUN_ON(network_thread()); auto it = absl::c_find_if(sctp_data_channels_n_, [&](const auto& c) { - return c->sid_n().stream_id_int() == channel_id; + return c->sid_n().has_value() && c->sid_n()->stream_id_int() == channel_id; }); if (it != sctp_data_channels_n_.end()) @@ -124,7 +198,7 @@ void DataChannelController::OnChannelClosed(int channel_id) { [&](const auto& c) { return c->sid_n() == sid; }); if (it != sctp_data_channels_n_.end()) { - rtc::scoped_refptr channel = std::move(*it); + scoped_refptr channel = std::move(*it); sctp_data_channels_n_.erase(it); channel->OnClosingProcedureComplete(); } @@ -134,7 +208,7 @@ void DataChannelController::OnReadyToSend() { RTC_DCHECK_RUN_ON(network_thread()); auto copy = sctp_data_channels_n_; for (const auto& channel : copy) { - if (channel->sid_n().HasValue()) { + if (channel->sid_n().has_value()) { channel->OnTransportReady(); } else { // This happens for role==SSL_SERVER channels when we get notified by @@ -153,14 +227,26 @@ void DataChannelController::OnTransportClosed(RTCError error) { // `OnSctpDataChannelClosed`. We'll empty `sctp_data_channels_n_`, first // and `OnSctpDataChannelClosed` will become a noop but we'll release the // StreamId here. - std::vector> temp_sctp_dcs; + std::vector> temp_sctp_dcs; temp_sctp_dcs.swap(sctp_data_channels_n_); for (const auto& channel : temp_sctp_dcs) { channel->OnTransportChannelClosed(error); - sid_allocator_.ReleaseSid(channel->sid_n()); + if (channel->sid_n().has_value()) { + sid_allocator_.ReleaseSid(*channel->sid_n()); + } } } +void DataChannelController::OnBufferedAmountLow(int channel_id) { + RTC_DCHECK_RUN_ON(network_thread()); + auto it = absl::c_find_if(sctp_data_channels_n_, [&](const auto& c) { + return c->sid_n().has_value() && c->sid_n()->stream_id_int() == channel_id; + }); + + if (it != sctp_data_channels_n_.end()) + (*it)->OnBufferedAmountLow(); +} + void DataChannelController::SetupDataChannelTransport_n( DataChannelTransportInterface* transport) { RTC_DCHECK_RUN_ON(network_thread()); @@ -207,7 +293,7 @@ std::vector DataChannelController::GetDataChannelStats() bool DataChannelController::HandleOpenMessage_n( int channel_id, DataMessageType type, - const rtc::CopyOnWriteBuffer& buffer) { + const CopyOnWriteBuffer& buffer) { if (type != DataMessageType::kControl || !IsOpenMessage(buffer)) return false; @@ -239,7 +325,7 @@ bool DataChannelController::HandleOpenMessage_n( } void DataChannelController::OnDataChannelOpenMessage( - rtc::scoped_refptr channel, + scoped_refptr channel, bool ready_to_send) { channel_usage_ = DataChannelUsage::kInUse; auto proxy = SctpDataChannel::CreateProxy(channel, signaling_safety_.flag()); @@ -257,56 +343,65 @@ void DataChannelController::OnDataChannelOpenMessage( // RTC_RUN_ON(network_thread()) RTCError DataChannelController::ReserveOrAllocateSid( - StreamId& sid, - absl::optional fallback_ssl_role) { - if (sid.HasValue()) { - return sid_allocator_.ReserveSid(sid) + std::optional& sid, + std::optional fallback_ssl_role) { + if (sid.has_value()) { + return sid_allocator_.ReserveSid(*sid) ? RTCError::OK() - : RTCError(RTCErrorType::INVALID_RANGE, - "StreamId out of range or reserved."); + : RTCError(RTCErrorType::INVALID_RANGE, "StreamId reserved."); } // Attempt to allocate an ID based on the negotiated role. - absl::optional role = pc_->GetSctpSslRole_n(); + std::optional role = pc_->GetSctpSslRole_n(); if (!role) role = fallback_ssl_role; if (role) { sid = sid_allocator_.AllocateSid(*role); - if (!sid.HasValue()) + if (!sid.has_value()) return RTCError(RTCErrorType::RESOURCE_EXHAUSTED); } // When we get here, we may still not have an ID, but that's a supported case // whereby an id will be assigned later. - RTC_DCHECK(sid.HasValue() || !role); + RTC_DCHECK(sid.has_value() || !role); return RTCError::OK(); } // RTC_RUN_ON(network_thread()) -RTCErrorOr> +RTCErrorOr> DataChannelController::CreateDataChannel(const std::string& label, InternalDataChannelInit& config) { - StreamId sid(config.id); + std::optional sid = std::nullopt; + if (config.id != -1) { + if (config.id < 0 || config.id > kMaxSctpSid) { + return RTCError(RTCErrorType::INVALID_RANGE, "StreamId out of range."); + } + sid = StreamId(config.id); + } + RTCError err = ReserveOrAllocateSid(sid, config.fallback_ssl_role); if (!err.ok()) return err; // In case `sid` has changed. Update `config` accordingly. - config.id = sid.stream_id_int(); + if (sid.has_value()) { + config.id = sid->stream_id_int(); + } - rtc::scoped_refptr channel = SctpDataChannel::Create( + scoped_refptr channel = SctpDataChannel::Create( weak_factory_.GetWeakPtr(), label, data_channel_transport_ != nullptr, config, signaling_thread(), network_thread()); RTC_DCHECK(channel); sctp_data_channels_n_.push_back(channel); // If we have an id already, notify the transport. - if (sid.HasValue()) - AddSctpDataStream(sid); + if (sid.has_value()) + AddSctpDataStream(*sid, + config.priority.value_or(PriorityValue(Priority::kLow))); return channel; } -RTCErrorOr> +RTCErrorOr> DataChannelController::InternalCreateDataChannelWithProxy( const std::string& label, const InternalDataChannelInit& config) { @@ -319,9 +414,8 @@ DataChannelController::InternalCreateDataChannelWithProxy( bool ready_to_send = false; InternalDataChannelInit new_config = config; - StreamId sid(new_config.id); auto ret = network_thread()->BlockingCall( - [&]() -> RTCErrorOr> { + [&]() -> RTCErrorOr> { RTC_DCHECK_RUN_ON(network_thread()); auto channel = CreateDataChannel(label, new_config); if (!channel.ok()) @@ -351,26 +445,26 @@ DataChannelController::InternalCreateDataChannelWithProxy( signaling_safety_.flag()); } -void DataChannelController::AllocateSctpSids(rtc::SSLRole role) { +void DataChannelController::AllocateSctpSids(SSLRole role) { RTC_DCHECK_RUN_ON(network_thread()); const bool ready_to_send = data_channel_transport_ && data_channel_transport_->IsReadyToSend(); std::vector> channels_to_update; - std::vector> channels_to_close; + std::vector> channels_to_close; for (auto it = sctp_data_channels_n_.begin(); it != sctp_data_channels_n_.end();) { - if (!(*it)->sid_n().HasValue()) { - StreamId sid = sid_allocator_.AllocateSid(role); - if (sid.HasValue()) { - (*it)->SetSctpSid_n(sid); - AddSctpDataStream(sid); + if (!(*it)->sid_n().has_value()) { + std::optional sid = sid_allocator_.AllocateSid(role); + if (sid.has_value()) { + (*it)->SetSctpSid_n(*sid); + AddSctpDataStream(*sid, (*it)->priority()); if (ready_to_send) { RTC_LOG(LS_INFO) << "AllocateSctpSids: Id assigned, ready to send."; (*it)->OnTransportReady(); } - channels_to_update.push_back(std::make_pair((*it).get(), sid)); + channels_to_update.push_back(std::make_pair((*it).get(), *sid)); } else { channels_to_close.push_back(std::move(*it)); it = sctp_data_channels_n_.erase(it); @@ -391,8 +485,8 @@ void DataChannelController::OnSctpDataChannelClosed(SctpDataChannel* channel) { RTC_DCHECK_RUN_ON(network_thread()); // After the closing procedure is done, it's safe to use this ID for // another data channel. - if (channel->sid_n().HasValue()) { - sid_allocator_.ReleaseSid(channel->sid_n()); + if (channel->sid_n().has_value()) { + sid_allocator_.ReleaseSid(*channel->sid_n()); } auto it = absl::c_find_if(sctp_data_channels_n_, [&](const auto& c) { return c.get() == channel; }); @@ -418,22 +512,55 @@ void DataChannelController::set_data_channel_transport( } } +std::optional DataChannelController::BuildObserverMessage( + StreamId sid, + DataMessageType type, + ArrayView payload, + Message::Direction direction) const { + RTC_DCHECK_RUN_ON(network_thread()); + + if (type != DataMessageType::kText && type != DataMessageType::kBinary) { + return std::nullopt; + } + + auto it = absl::c_find_if(sctp_data_channels_n_, [sid](const auto& channel) { + return channel->sid_n() == sid; + }); + + if (it == sctp_data_channels_n_.end()) { + return std::nullopt; + } + + Message message; + Message::DataType data_type = type == DataMessageType::kBinary + ? Message::DataType::kBinary + : Message::DataType::kString; + message.set_data_type(data_type); + message.set_unix_timestamp_ms(TimeUTCMillis()); + message.set_datachannel_id(sid.stream_id_int()); + message.set_label((*it)->label()); + message.set_direction(direction); + message.set_data(payload); + + return message; +} + void DataChannelController::NotifyDataChannelsOfTransportCreated() { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK(data_channel_transport_); for (const auto& channel : sctp_data_channels_n_) { - if (channel->sid_n().HasValue()) - AddSctpDataStream(channel->sid_n()); + if (channel->sid_n().has_value()) + AddSctpDataStream(*channel->sid_n(), channel->priority()); channel->OnTransportChannelCreated(); } } -rtc::Thread* DataChannelController::network_thread() const { +Thread* DataChannelController::network_thread() const { return pc_->network_thread(); } -rtc::Thread* DataChannelController::signaling_thread() const { +Thread* DataChannelController::signaling_thread() const { return pc_->signaling_thread(); } diff --git a/pc/data_channel_controller.h b/pc/data_channel_controller.h index bf3ac03437..b9e8c60008 100644 --- a/pc/data_channel_controller.h +++ b/pc/data_channel_controller.h @@ -11,10 +11,17 @@ #ifndef PC_DATA_CHANNEL_CONTROLLER_H_ #define PC_DATA_CHANNEL_CONTROLLER_H_ +#include +#include +#include +#include #include #include +#include "api/array_view.h" +#include "api/data_channel_event_observer_interface.h" #include "api/data_channel_interface.h" +#include "api/priority.h" #include "api/rtc_error.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" @@ -22,7 +29,7 @@ #include "api/transport/data_channel_transport_interface.h" #include "pc/data_channel_utils.h" #include "pc/sctp_data_channel.h" -#include "rtc_base/checks.h" +#include "pc/sctp_utils.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/thread.h" @@ -49,20 +56,24 @@ class DataChannelController : public SctpDataChannelControllerInterface, // SctpDataChannelProviderInterface. RTCError SendData(StreamId sid, const SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload) override; - void AddSctpDataStream(StreamId sid) override; + const CopyOnWriteBuffer& payload) override; + void AddSctpDataStream(StreamId sid, PriorityValue priority) override; void RemoveSctpDataStream(StreamId sid) override; void OnChannelStateChanged(SctpDataChannel* channel, DataChannelInterface::DataState state) override; + size_t buffered_amount(StreamId sid) const override; + size_t buffered_amount_low_threshold(StreamId sid) const override; + void SetBufferedAmountLowThreshold(StreamId sid, size_t bytes) override; // Implements DataChannelSink. void OnDataReceived(int channel_id, DataMessageType type, - const rtc::CopyOnWriteBuffer& buffer) override; + const CopyOnWriteBuffer& buffer) override; void OnChannelClosing(int channel_id) override; void OnChannelClosed(int channel_id) override; void OnReadyToSend() override; void OnTransportClosed(RTCError error) override; + void OnBufferedAmountLow(int channel_id) override; // Called as part of destroying the owning PeerConnection. void PrepareForShutdown(); @@ -82,10 +93,10 @@ class DataChannelController : public SctpDataChannelControllerInterface, // Creates channel and adds it to the collection of DataChannels that will // be offered in a SessionDescription, and wraps it in a proxy object. - RTCErrorOr> + RTCErrorOr> InternalCreateDataChannelWithProxy(const std::string& label, const InternalDataChannelInit& config); - void AllocateSctpSids(rtc::SSLRole role); + void AllocateSctpSids(SSLRole role); // Check if data channels are currently tracked. Used to decide whether a // rejected m=application section should be reoffered. @@ -94,15 +105,18 @@ class DataChannelController : public SctpDataChannelControllerInterface, // At some point in time, a data channel has existed. bool HasUsedDataChannels() const; + void SetEventObserver( + std::unique_ptr observer); + protected: - rtc::Thread* network_thread() const; - rtc::Thread* signaling_thread() const; + Thread* network_thread() const; + Thread* signaling_thread() const; private: void OnSctpDataChannelClosed(SctpDataChannel* channel); // Creates a new SctpDataChannel object on the network thread. - RTCErrorOr> CreateDataChannel( + RTCErrorOr> CreateDataChannel( const std::string& label, InternalDataChannelInit& config) RTC_RUN_ON(network_thread()); @@ -110,10 +124,10 @@ class DataChannelController : public SctpDataChannelControllerInterface, // message and should be considered to be handled, false otherwise. bool HandleOpenMessage_n(int channel_id, DataMessageType type, - const rtc::CopyOnWriteBuffer& buffer) + const CopyOnWriteBuffer& buffer) RTC_RUN_ON(network_thread()); // Called when a valid data channel OPEN message is received. - void OnDataChannelOpenMessage(rtc::scoped_refptr channel, + void OnDataChannelOpenMessage(scoped_refptr channel, bool ready_to_send) RTC_RUN_ON(signaling_thread()); @@ -125,8 +139,8 @@ class DataChannelController : public SctpDataChannelControllerInterface, // will still be unassigned upon return, but will be assigned later. // If the pool has been exhausted or a sid has already been reserved, an // error will be returned. - RTCError ReserveOrAllocateSid(StreamId& sid, - absl::optional fallback_ssl_role) + RTCError ReserveOrAllocateSid(std::optional& sid, + std::optional fallback_ssl_role) RTC_RUN_ON(network_thread()); // Called when all data channels need to be notified of a transport channel @@ -135,13 +149,21 @@ class DataChannelController : public SctpDataChannelControllerInterface, void set_data_channel_transport(DataChannelTransportInterface* transport); + std::optional + BuildObserverMessage( + StreamId sid, + DataMessageType type, + ArrayView payload, + DataChannelEventObserverInterface::Message::Direction direction) const + RTC_RUN_ON(network_thread()); + // Plugin transport used for data channels. Pointer may be accessed and // checked from any thread, but the object may only be touched on the // network thread. DataChannelTransportInterface* data_channel_transport_ RTC_GUARDED_BY(network_thread()) = nullptr; SctpSidAllocator sid_allocator_ RTC_GUARDED_BY(network_thread()); - std::vector> sctp_data_channels_n_ + std::vector> sctp_data_channels_n_ RTC_GUARDED_BY(network_thread()); enum class DataChannelUsage : uint8_t { kNeverUsed = 0, @@ -151,11 +173,13 @@ class DataChannelController : public SctpDataChannelControllerInterface, DataChannelUsage channel_usage_ RTC_GUARDED_BY(signaling_thread()) = DataChannelUsage::kNeverUsed; + std::unique_ptr event_observer_; + // Owning PeerConnection. PeerConnectionInternal* const pc_; // The weak pointers must be dereferenced and invalidated on the network // thread only. - rtc::WeakPtrFactory weak_factory_ + WeakPtrFactory weak_factory_ RTC_GUARDED_BY(network_thread()){this}; ScopedTaskSafety signaling_safety_; }; diff --git a/pc/data_channel_controller_unittest.cc b/pc/data_channel_controller_unittest.cc index 3b8adb6819..88f8a9e6c7 100644 --- a/pc/data_channel_controller_unittest.cc +++ b/pc/data_channel_controller_unittest.cc @@ -10,12 +10,31 @@ #include "pc/data_channel_controller.h" +#include +#include #include +#include +#include +#include +#include "api/data_channel_event_observer_interface.h" +#include "api/data_channel_interface.h" +#include "api/make_ref_counted.h" +#include "api/priority.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "api/transport/data_channel_transport_interface.h" +#include "api/units/timestamp.h" +#include "media/sctp/sctp_transport_internal.h" #include "pc/peer_connection_internal.h" #include "pc/sctp_data_channel.h" +#include "pc/sctp_utils.h" #include "pc/test/mock_peer_connection_internal.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/fake_clock.h" #include "rtc_base/null_socket_server.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/thread.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/run_loop.h" @@ -24,23 +43,49 @@ namespace webrtc { namespace { +using Message = DataChannelEventObserverInterface::Message; +using ::testing::ElementsAreArray; +using ::testing::IsEmpty; using ::testing::NiceMock; using ::testing::Return; +using ::testing::SizeIs; -class MockDataChannelTransport : public webrtc::DataChannelTransportInterface { +constexpr uint8_t kSomeData[] = {5, 4, 3, 2, 1}; + +class MockDataChannelTransport : public DataChannelTransportInterface { public: ~MockDataChannelTransport() override {} - MOCK_METHOD(RTCError, OpenChannel, (int channel_id), (override)); + MOCK_METHOD(RTCError, + OpenChannel, + (int channel_id, PriorityValue priority), + (override)); MOCK_METHOD(RTCError, SendData, (int channel_id, const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer), + const webrtc::CopyOnWriteBuffer& buffer), (override)); MOCK_METHOD(RTCError, CloseChannel, (int channel_id), (override)); MOCK_METHOD(void, SetDataSink, (DataChannelSink * sink), (override)); MOCK_METHOD(bool, IsReadyToSend, (), (const, override)); + MOCK_METHOD(size_t, buffered_amount, (int channel_id), (const, override)); + MOCK_METHOD(size_t, + buffered_amount_low_threshold, + (int channel_id), + (const, override)); + MOCK_METHOD(void, + SetBufferedAmountLowThreshold, + (int channel_id, size_t bytes), + (override)); +}; + +class MockDataChannelEventObserver : public DataChannelEventObserverInterface { + public: + MOCK_METHOD(void, + OnMessage, + (const DataChannelEventObserverInterface::Message& message), + (override)); }; // Convenience class for tests to ensure that shutdown methods for DCC @@ -71,11 +116,10 @@ class DataChannelControllerForTest : public DataChannelController { class DataChannelControllerTest : public ::testing::Test { protected: DataChannelControllerTest() - : network_thread_(std::make_unique()) { + : network_thread_(std::make_unique()) { network_thread_.Start(); - pc_ = rtc::make_ref_counted>(); - ON_CALL(*pc_, signaling_thread) - .WillByDefault(Return(rtc::Thread::Current())); + pc_ = make_ref_counted>(); + ON_CALL(*pc_, signaling_thread).WillByDefault(Return(Thread::Current())); ON_CALL(*pc_, network_thread).WillByDefault(Return(&network_thread_)); } @@ -84,9 +128,10 @@ class DataChannelControllerTest : public ::testing::Test { network_thread_.Stop(); } + ScopedBaseFakeClock clock_; test::RunLoop run_loop_; - rtc::Thread network_thread_; - rtc::scoped_refptr> pc_; + Thread network_thread_; + scoped_refptr> pc_; }; TEST_F(DataChannelControllerTest, CreateAndDestroy) { @@ -146,18 +191,17 @@ TEST_F(DataChannelControllerTest, MaxChannels) { int channel_id = 0; ON_CALL(*pc_, GetSctpSslRole_n).WillByDefault([&]() { - return absl::optional((channel_id & 1) ? rtc::SSL_SERVER - : rtc::SSL_CLIENT); + return std::optional((channel_id & 1) ? SSL_SERVER : SSL_CLIENT); }); DataChannelControllerForTest dcc(pc_.get(), &transport); // Allocate the maximum number of channels + 1. Inside the loop, the creation // process will allocate a stream id for each channel. - for (channel_id = 0; channel_id <= cricket::kMaxSctpStreams; ++channel_id) { + for (channel_id = 0; channel_id <= kMaxSctpStreams; ++channel_id) { auto ret = dcc.InternalCreateDataChannelWithProxy( "label", InternalDataChannelInit(DataChannelInit())); - if (channel_id == cricket::kMaxSctpStreams) { + if (channel_id == kMaxSctpStreams) { // We've reached the maximum and the previous call should have failed. EXPECT_FALSE(ret.ok()); } else { @@ -167,13 +211,23 @@ TEST_F(DataChannelControllerTest, MaxChannels) { } } +TEST_F(DataChannelControllerTest, BufferedAmountIncludesFromTransport) { + NiceMock transport; + EXPECT_CALL(transport, buffered_amount(0)).WillOnce(Return(4711)); + ON_CALL(*pc_, GetSctpSslRole_n).WillByDefault([&]() { return SSL_CLIENT; }); + + DataChannelControllerForTest dcc(pc_.get(), &transport); + auto dc = dcc.InternalCreateDataChannelWithProxy( + "label", InternalDataChannelInit(DataChannelInit())) + .MoveValue(); + EXPECT_EQ(dc->buffered_amount(), 4711u); +} + // Test that while a data channel is in the `kClosing` state, its StreamId does // not get re-used for new channels. Only once the state reaches `kClosed` // should a StreamId be available again for allocation. TEST_F(DataChannelControllerTest, NoStreamIdReuseWhileClosing) { - ON_CALL(*pc_, GetSctpSslRole_n).WillByDefault([&]() { - return rtc::SSL_CLIENT; - }); + ON_CALL(*pc_, GetSctpSslRole_n).WillByDefault([&]() { return SSL_CLIENT; }); NiceMock transport; // Wider scope than `dcc`. DataChannelControllerForTest dcc(pc_.get(), &transport); @@ -210,5 +264,273 @@ TEST_F(DataChannelControllerTest, NoStreamIdReuseWhileClosing) { EXPECT_EQ(channel3->id(), channel1->id()); } +TEST_F(DataChannelControllerTest, ObserverNotifiedOnStringMessageSent) { + NiceMock transport; + DataChannelControllerForTest dcc(pc_.get(), &transport); + + std::vector messages; + auto observer = std::make_unique>(); + ON_CALL(*observer, OnMessage).WillByDefault([&](const Message& m) { + messages.push_back(m); + }); + network_thread_.BlockingCall( + [&]() { dcc.SetEventObserver(std::move(observer)); }); + + RTCErrorOr> ret = + dcc.InternalCreateDataChannelWithProxy( + "TestingSomeSendStuff", + InternalDataChannelInit({.negotiated = true, .id = 5})); + ASSERT_TRUE(ret.ok()); + auto channel = ret.MoveValue(); + + clock_.SetTime(Timestamp::Millis(123)); + network_thread_.BlockingCall([&]() { + dcc.SendData(StreamId(5), {.type = DataMessageType::kText}, + CopyOnWriteBuffer(kSomeData)); + }); + + channel->Close(); + run_loop_.Flush(); + + ASSERT_THAT(messages, SizeIs(1)); + EXPECT_EQ(messages[0].unix_timestamp_ms(), 123); + EXPECT_EQ(messages[0].datachannel_id(), 5); + EXPECT_EQ(messages[0].label(), "TestingSomeSendStuff"); + EXPECT_EQ(messages[0].direction(), Message::Direction::kSend); + EXPECT_EQ(messages[0].data_type(), Message::DataType::kString); + EXPECT_THAT(messages[0].data(), ElementsAreArray(kSomeData)); +} + +TEST_F(DataChannelControllerTest, ObserverNotifiedOnBinaryMessageSent) { + NiceMock transport; + DataChannelControllerForTest dcc(pc_.get(), &transport); + + std::vector messages; + auto observer = std::make_unique>(); + ON_CALL(*observer, OnMessage).WillByDefault([&](const Message& m) { + messages.push_back(m); + }); + network_thread_.BlockingCall( + [&]() { dcc.SetEventObserver(std::move(observer)); }); + + RTCErrorOr> ret = + dcc.InternalCreateDataChannelWithProxy( + "TestingSomeSendStuff", + InternalDataChannelInit({.negotiated = true, .id = 5})); + ASSERT_TRUE(ret.ok()); + auto channel = ret.MoveValue(); + + clock_.SetTime(Timestamp::Millis(123)); + network_thread_.BlockingCall([&]() { + dcc.SendData(StreamId(5), {.type = DataMessageType::kBinary}, + CopyOnWriteBuffer(kSomeData)); + }); + + channel->Close(); + run_loop_.Flush(); + + ASSERT_THAT(messages, SizeIs(1)); + EXPECT_EQ(messages[0].unix_timestamp_ms(), 123); + EXPECT_EQ(messages[0].datachannel_id(), 5); + EXPECT_EQ(messages[0].label(), "TestingSomeSendStuff"); + EXPECT_EQ(messages[0].direction(), Message::Direction::kSend); + EXPECT_EQ(messages[0].data_type(), Message::DataType::kBinary); + EXPECT_THAT(messages[0].data(), ElementsAreArray(kSomeData)); +} + +TEST_F(DataChannelControllerTest, ObserverNotNotifiedOnControlMessageSent) { + NiceMock transport; + DataChannelControllerForTest dcc(pc_.get(), &transport); + + std::vector messages; + auto observer = std::make_unique>(); + ON_CALL(*observer, OnMessage).WillByDefault([&](const Message& m) { + messages.push_back(m); + }); + network_thread_.BlockingCall( + [&]() { dcc.SetEventObserver(std::move(observer)); }); + + RTCErrorOr> ret = + dcc.InternalCreateDataChannelWithProxy( + "TestingSomeSendStuff", + InternalDataChannelInit({.negotiated = true, .id = 5})); + ASSERT_TRUE(ret.ok()); + auto channel = ret.MoveValue(); + + network_thread_.BlockingCall([&]() { + dcc.SendData(StreamId(5), {.type = DataMessageType::kControl}, + CopyOnWriteBuffer(kSomeData)); + }); + + channel->Close(); + run_loop_.Flush(); + + ASSERT_TRUE(messages.empty()); +} + +TEST_F(DataChannelControllerTest, ObserverNotNotifiedOnTransportFailed) { + NiceMock transport; + ON_CALL(transport, SendData) + .WillByDefault(Return(RTCError(RTCErrorType::INVALID_STATE))); + DataChannelControllerForTest dcc(pc_.get(), &transport); + + std::vector messages; + auto observer = std::make_unique>(); + ON_CALL(*observer, OnMessage).WillByDefault([&](const Message& m) { + messages.push_back(m); + }); + network_thread_.BlockingCall( + [&]() { dcc.SetEventObserver(std::move(observer)); }); + + RTCErrorOr> ret = + dcc.InternalCreateDataChannelWithProxy( + "TestingSomeSendStuff", + InternalDataChannelInit({.negotiated = true, .id = 5})); + ASSERT_TRUE(ret.ok()); + auto channel = ret.MoveValue(); + + network_thread_.BlockingCall([&]() { + dcc.SendData(StreamId(5), {.type = DataMessageType::kText}, + CopyOnWriteBuffer(kSomeData)); + }); + + channel->Close(); + run_loop_.Flush(); + + ASSERT_TRUE(messages.empty()); +} + +TEST_F(DataChannelControllerTest, ObserverNotifiedOnStringMessageReceived) { + NiceMock transport; + DataChannelControllerForTest dcc(pc_.get(), &transport); + + std::vector messages; + auto observer = std::make_unique>(); + ON_CALL(*observer, OnMessage).WillByDefault([&](const Message& m) { + messages.push_back(m); + }); + network_thread_.BlockingCall( + [&]() { dcc.SetEventObserver(std::move(observer)); }); + + RTCErrorOr> ret = + dcc.InternalCreateDataChannelWithProxy( + "TestingSomeReceiveStuff", + InternalDataChannelInit({.negotiated = true, .id = 5})); + ASSERT_TRUE(ret.ok()); + auto channel = ret.MoveValue(); + + clock_.SetTime(Timestamp::Millis(123)); + network_thread_.BlockingCall([&]() { + dcc.OnDataReceived(5, DataMessageType::kText, CopyOnWriteBuffer(kSomeData)); + }); + + channel->Close(); + run_loop_.Flush(); + + ASSERT_THAT(messages, SizeIs(1)); + EXPECT_EQ(messages[0].unix_timestamp_ms(), 123); + EXPECT_EQ(messages[0].datachannel_id(), 5); + EXPECT_EQ(messages[0].label(), "TestingSomeReceiveStuff"); + EXPECT_EQ(messages[0].direction(), Message::Direction::kReceive); + EXPECT_EQ(messages[0].data_type(), Message::DataType::kString); + EXPECT_THAT(messages[0].data(), ElementsAreArray(kSomeData)); +} + +TEST_F(DataChannelControllerTest, ObserverNotifiedOnBinaryMessageReceived) { + NiceMock transport; + DataChannelControllerForTest dcc(pc_.get(), &transport); + + std::vector messages; + auto observer = std::make_unique>(); + ON_CALL(*observer, OnMessage).WillByDefault([&](const Message& m) { + messages.push_back(m); + }); + network_thread_.BlockingCall( + [&]() { dcc.SetEventObserver(std::move(observer)); }); + + RTCErrorOr> ret = + dcc.InternalCreateDataChannelWithProxy( + "TestingSomeReceiveStuff", + InternalDataChannelInit({.negotiated = true, .id = 5})); + ASSERT_TRUE(ret.ok()); + auto channel = ret.MoveValue(); + + clock_.SetTime(Timestamp::Millis(123)); + network_thread_.BlockingCall([&]() { + dcc.OnDataReceived(5, DataMessageType::kBinary, + CopyOnWriteBuffer(kSomeData)); + }); + + channel->Close(); + run_loop_.Flush(); + + ASSERT_THAT(messages, SizeIs(1)); + EXPECT_EQ(messages[0].unix_timestamp_ms(), 123); + EXPECT_EQ(messages[0].datachannel_id(), 5); + EXPECT_EQ(messages[0].label(), "TestingSomeReceiveStuff"); + EXPECT_EQ(messages[0].direction(), Message::Direction::kReceive); + EXPECT_EQ(messages[0].data_type(), Message::DataType::kBinary); + EXPECT_THAT(messages[0].data(), ElementsAreArray(kSomeData)); +} + +TEST_F(DataChannelControllerTest, ObserverNotNotifiedOnControlMessageReceived) { + NiceMock transport; + DataChannelControllerForTest dcc(pc_.get(), &transport); + + std::vector messages; + auto observer = std::make_unique>(); + ON_CALL(*observer, OnMessage).WillByDefault([&](const Message& m) { + messages.push_back(m); + }); + network_thread_.BlockingCall( + [&]() { dcc.SetEventObserver(std::move(observer)); }); + + RTCErrorOr> ret = + dcc.InternalCreateDataChannelWithProxy( + "TestingSomeReceiveStuff", + InternalDataChannelInit({.negotiated = true, .id = 5})); + ASSERT_TRUE(ret.ok()); + auto channel = ret.MoveValue(); + + network_thread_.BlockingCall([&]() { + dcc.OnDataReceived(5, DataMessageType::kControl, + CopyOnWriteBuffer(kSomeData)); + }); + + channel->Close(); + run_loop_.Flush(); + + EXPECT_THAT(messages, IsEmpty()); +} + +TEST_F(DataChannelControllerTest, ObserverNotNotifiedOnUnknownId) { + NiceMock transport; + DataChannelControllerForTest dcc(pc_.get(), &transport); + + std::vector messages; + auto observer = std::make_unique>(); + ON_CALL(*observer, OnMessage).WillByDefault([&](const Message& m) { + messages.push_back(m); + }); + network_thread_.BlockingCall( + [&]() { dcc.SetEventObserver(std::move(observer)); }); + + RTCErrorOr> ret = + dcc.InternalCreateDataChannelWithProxy( + "TestingSomeReceiveStuff", + InternalDataChannelInit({.negotiated = true, .id = 5})); + ASSERT_TRUE(ret.ok()); + auto channel = ret.MoveValue(); + + network_thread_.BlockingCall([&]() { + dcc.OnDataReceived(3, DataMessageType::kText, CopyOnWriteBuffer(kSomeData)); + }); + + channel->Close(); + run_loop_.Flush(); + + EXPECT_THAT(messages, IsEmpty()); +} + } // namespace } // namespace webrtc diff --git a/pc/data_channel_integrationtest.cc b/pc/data_channel_integrationtest.cc index faec76d03e..de87405b30 100644 --- a/pc/data_channel_integrationtest.cc +++ b/pc/data_channel_integrationtest.cc @@ -10,42 +10,61 @@ #include +#include #include #include +#include +#include #include #include +#include #include #include "absl/algorithm/container.h" -#include "absl/types/optional.h" +#include "absl/strings/match.h" #include "api/data_channel_interface.h" #include "api/dtls_transport_interface.h" +#include "api/jsep.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" #include "api/scoped_refptr.h" #include "api/sctp_transport_interface.h" #include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" +#include "api/test/rtc_error_matchers.h" #include "api/units/time_delta.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_info.h" +#include "p2p/test/test_turn_server.h" #include "pc/media_session.h" #include "pc/session_description.h" +#include "pc/test/fake_rtc_certificate_generator.h" #include "pc/test/integration_test_helpers.h" #include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/fake_clock.h" #include "rtc_base/gunit.h" -#include "rtc_base/helpers.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/task_queue_for_test.h" #include "rtc_base/virtual_socket_server.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" namespace webrtc { namespace { +using ::testing::Eq; +using ::testing::IsTrue; +using ::testing::Ne; +using ::testing::ValuesIn; + // All tests in this file require SCTP support. #ifdef WEBRTC_HAVE_SCTP @@ -84,13 +103,13 @@ class DataChannelIntegrationTest // where order of construction is finely controlled. // This also ensures peerconnection is closed before switching back to non-fake // clock, avoiding other races and DCHECK failures such as in rtp_sender.cc. -class FakeClockForTest : public rtc::ScopedFakeClock { +class FakeClockForTest : public ScopedFakeClock { protected: FakeClockForTest() { // Some things use a time of "0" as a special value, so we need to start out // the fake clock at a nonzero time. // TODO(deadbeef): Fix this. - AdvanceTime(webrtc::TimeDelta::Seconds(1)); + AdvanceTime(TimeDelta::Seconds(1)); } // Explicit handle. @@ -111,10 +130,19 @@ class DataChannelIntegrationTestUnifiedPlan : PeerConnectionIntegrationBaseTest(SdpSemantics::kUnifiedPlan) {} }; -void MakeActiveSctpOffer(cricket::SessionDescription* desc) { - auto& transport_infos = desc->transport_infos(); +void MakeOfferHaveActiveDtlsRole( + std::unique_ptr& desc) { + auto& transport_infos = desc->description()->transport_infos(); + for (auto& transport_info : transport_infos) { + transport_info.description.connection_role = CONNECTIONROLE_ACTIVE; + } +} + +void MakeOfferHavePassiveDtlsRole( + std::unique_ptr& desc) { + auto& transport_infos = desc->description()->transport_infos(); for (auto& transport_info : transport_infos) { - transport_info.description.connection_role = cricket::CONNECTIONROLE_ACTIVE; + transport_info.description.connection_role = CONNECTIONROLE_PASSIVE; } } @@ -126,23 +154,31 @@ TEST_P(DataChannelIntegrationTest, DataChannelWhileDisconnected) { ConnectFakeSignaling(); caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_observer(); }, IsTrue()), + IsRtcOk()); std::string data1 = "hello first"; caller()->data_channel()->Send(DataBuffer(data1)); - EXPECT_EQ_WAIT(data1, callee()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->last_message(); }, + Eq(data1)), + IsRtcOk()); // Cause a network outage virtual_socket_server()->set_drop_probability(1.0); - EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionDisconnected, - caller()->standardized_ice_connection_state(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return caller()->standardized_ice_connection_state(); }, + Eq(PeerConnectionInterface::kIceConnectionDisconnected), + {.timeout = TimeDelta::Seconds(10)}), + IsRtcOk()); std::string data2 = "hello second"; caller()->data_channel()->Send(DataBuffer(data2)); // Remove the network outage. The connection should reestablish. virtual_socket_server()->set_drop_probability(0.0); - EXPECT_EQ_WAIT(data2, callee()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->last_message(); }, + Eq(data2)), + IsRtcOk()); } // This test causes a PeerConnection to enter Disconnected state, @@ -154,17 +190,23 @@ TEST_P(DataChannelIntegrationTest, DataChannelWhileDisconnectedIceRestart) { ConnectFakeSignaling(); caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_observer(); }, IsTrue()), + IsRtcOk()); std::string data1 = "hello first"; caller()->data_channel()->Send(DataBuffer(data1)); - EXPECT_EQ_WAIT(data1, callee()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->last_message(); }, + Eq(data1)), + IsRtcOk()); // Cause a network outage virtual_socket_server()->set_drop_probability(1.0); - ASSERT_EQ_WAIT(PeerConnectionInterface::kIceConnectionDisconnected, - caller()->standardized_ice_connection_state(), - kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return caller()->standardized_ice_connection_state(); }, + Eq(PeerConnectionInterface::kIceConnectionDisconnected), + {.timeout = TimeDelta::Seconds(10)}), + IsRtcOk()); std::string data2 = "hello second"; caller()->data_channel()->Send(DataBuffer(data2)); @@ -172,11 +214,14 @@ TEST_P(DataChannelIntegrationTest, DataChannelWhileDisconnectedIceRestart) { // the network outage. caller()->SetOfferAnswerOptions(IceRestartOfferAnswerOptions()); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); // Remove the network outage. The connection should reestablish. virtual_socket_server()->set_drop_probability(0.0); - EXPECT_EQ_WAIT(data2, callee()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->last_message(); }, + Eq(data2)), + IsRtcOk()); } // This test sets up a call between two parties with audio, video and an SCTP @@ -192,7 +237,8 @@ TEST_P(DataChannelIntegrationTest, EndToEndCallWithSctpDataChannel) { callee()->AddAudioVideoTracks(); } caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); if (allow_media()) { // Ensure the existence of the SCTP data channel didn't impede audio/video. MediaExpectations media_expectations; @@ -202,18 +248,27 @@ TEST_P(DataChannelIntegrationTest, EndToEndCallWithSctpDataChannel) { // Caller data channel should already exist (it created one). Callee data // channel may not exist yet, since negotiation happens in-band, not in SDP. ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, Ne(nullptr)), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); // Ensure data can be sent in both directions. std::string data = "hello world"; caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->last_message(); }, + Eq(data)), + IsRtcOk()); callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return caller()->data_observer()->last_message(); }, + Eq(data)), + IsRtcOk()); } // This test sets up a call between two parties with an SCTP @@ -226,33 +281,111 @@ TEST_P(DataChannelIntegrationTest, // well. caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); // Caller data channel should already exist (it created one). Callee data // channel may not exist yet, since negotiation happens in-band, not in SDP. ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, Ne(nullptr)), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); for (int message_size = 1; message_size < 100000; message_size *= 2) { std::string data(message_size, 'a'); caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->last_message(); }, + Eq(data)), + IsRtcOk()); callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return caller()->data_observer()->last_message(); }, + Eq(data)), + IsRtcOk()); } // Specifically probe the area around the MTU size. for (int message_size = 1100; message_size < 1300; message_size += 1) { std::string data(message_size, 'a'); caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->last_message(); }, + Eq(data)), + IsRtcOk()); callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return caller()->data_observer()->last_message(); }, + Eq(data)), + IsRtcOk()); + } + caller()->data_channel()->Close(); + + EXPECT_THAT(WaitUntil([&] { return caller()->data_observer()->state(); }, + Eq(webrtc::DataChannelInterface::kClosed)), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return callee()->data_observer()->state(); }, + Eq(webrtc::DataChannelInterface::kClosed)), + IsRtcOk()); +} + +// This test sets up a call between two parties with an SCTP +// data channel only, and sends enough messages to fill the queue and then +// closes on the caller. We expect the state to transition to closed on both +// caller and callee. +TEST_P(DataChannelIntegrationTest, EndToEndCallWithSctpDataChannelFullBuffer) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + // Expect that data channel created on caller side will show up for callee as + // well. + caller()->CreateDataChannel(); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + // Caller data channel should already exist (it created one). Callee data + // channel may not exist yet, since negotiation happens in-band, not in SDP. + ASSERT_NE(nullptr, caller()->data_channel()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, Ne(nullptr)), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); + + std::string data(256 * 1024, 'a'); + for (size_t queued_size = 0; + queued_size < webrtc::DataChannelInterface::MaxSendQueueSize(); + queued_size += data.size()) { + caller()->data_channel()->SendAsync(DataBuffer(data), nullptr); } + + caller()->data_channel()->Close(); + + DataChannelInterface::DataState expected_states[] = { + DataChannelInterface::DataState::kConnecting, + DataChannelInterface::DataState::kOpen, + DataChannelInterface::DataState::kClosing, + DataChannelInterface::DataState::kClosed}; + + // Debug data channels are very slow, use a long timeout for those slow, + // heavily parallelized runs. + EXPECT_THAT(WaitUntil([&] { return caller()->data_observer()->state(); }, + Eq(DataChannelInterface::DataState::kClosed), + {.timeout = kLongTimeout}), + IsRtcOk()); + EXPECT_THAT(caller()->data_observer()->states(), + ::testing::ElementsAreArray(expected_states)); + + EXPECT_THAT(WaitUntil([&] { return callee()->data_observer()->state(); }, + Eq(DataChannelInterface::DataState::kClosed)), + IsRtcOk()); + EXPECT_THAT(callee()->data_observer()->states(), + ::testing::ElementsAreArray(expected_states)); } // This test sets up a call between two parties with an SCTP @@ -265,38 +398,56 @@ TEST_P(DataChannelIntegrationTest, // well. caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); // Caller data channel should already exist (it created one). Callee data // channel may not exist yet, since negotiation happens in-band, not in SDP. ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, Ne(nullptr)), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); // Ensure data can be sent in both directions. // Sending empty string data std::string data = ""; caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil( + [&] { return callee()->data_observer()->received_message_count(); }, + Eq(1u)), + IsRtcOk()); EXPECT_TRUE(callee()->data_observer()->last_message().empty()); EXPECT_FALSE(callee()->data_observer()->messages().back().binary); callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(1u, caller()->data_observer()->received_message_count(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil( + [&] { return caller()->data_observer()->received_message_count(); }, + Eq(1u)), + IsRtcOk()); EXPECT_TRUE(caller()->data_observer()->last_message().empty()); EXPECT_FALSE(caller()->data_observer()->messages().back().binary); // Sending empty binary data - rtc::CopyOnWriteBuffer empty_buffer; + CopyOnWriteBuffer empty_buffer; caller()->data_channel()->Send(DataBuffer(empty_buffer, true)); - EXPECT_EQ_WAIT(2u, callee()->data_observer()->received_message_count(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil( + [&] { return callee()->data_observer()->received_message_count(); }, + Eq(2u)), + IsRtcOk()); EXPECT_TRUE(callee()->data_observer()->last_message().empty()); EXPECT_TRUE(callee()->data_observer()->messages().back().binary); callee()->data_channel()->Send(DataBuffer(empty_buffer, true)); - EXPECT_EQ_WAIT(2u, caller()->data_observer()->received_message_count(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil( + [&] { return caller()->data_observer()->received_message_count(); }, + Eq(2u)), + IsRtcOk()); EXPECT_TRUE(caller()->data_observer()->last_message().empty()); EXPECT_TRUE(caller()->data_observer()->messages().back().binary); } @@ -315,23 +466,33 @@ TEST_P(DataChannelIntegrationTest, // well. caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); // Caller data channel should already exist (it created one). Callee data // channel may not exist yet, since negotiation happens in-band, not in SDP. ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, Ne(nullptr)), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); virtual_socket_server()->set_max_udp_payload(kLowestSafePayloadSizeLimit); for (int message_size = 1140; message_size < 1240; message_size += 1) { std::string data(message_size, 'a'); caller()->data_channel()->Send(DataBuffer(data)); - ASSERT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_observer()->last_message(); }, + Eq(data)), + IsRtcOk()); callee()->data_channel()->Send(DataBuffer(data)); - ASSERT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return caller()->data_observer()->last_message(); }, + Eq(data)), + IsRtcOk()); } } @@ -350,11 +511,22 @@ TEST_P(DataChannelIntegrationTest, EndToEndCallWithSctpDataChannelHarmfulMtu) { ConnectFakeSignaling(); caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, Ne(nullptr)), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); + + if (caller()->tls_version() == kDtls13VersionBytes) { + ASSERT_EQ(caller()->tls_version(), kDtls13VersionBytes); + GTEST_SKIP() << "DTLS1.3 fragments packets larger than MTU"; + } virtual_socket_server()->set_max_udp_payload(kLowestSafePayloadSizeLimit - 1); // Probe for an undelivered or slowly delivered message. The exact @@ -392,11 +564,17 @@ TEST_P(DataChannelIntegrationTest, CalleeClosesSctpDataChannel) { callee()->AddAudioVideoTracks(); } caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, Ne(nullptr)), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); // Close the data channel on the callee side, and wait for it to reach the // "closed" state on both sides. @@ -408,13 +586,15 @@ TEST_P(DataChannelIntegrationTest, CalleeClosesSctpDataChannel) { DataChannelInterface::DataState::kClosing, DataChannelInterface::DataState::kClosed}; - EXPECT_EQ_WAIT(DataChannelInterface::DataState::kClosed, - caller()->data_observer()->state(), kDefaultTimeout); + EXPECT_THAT(WaitUntil([&] { return caller()->data_observer()->state(); }, + Eq(DataChannelInterface::DataState::kClosed)), + IsRtcOk()); EXPECT_THAT(caller()->data_observer()->states(), ::testing::ElementsAreArray(expected_states)); - EXPECT_EQ_WAIT(DataChannelInterface::DataState::kClosed, - callee()->data_observer()->state(), kDefaultTimeout); + EXPECT_THAT(WaitUntil([&] { return callee()->data_observer()->state(); }, + Eq(DataChannelInterface::DataState::kClosed)), + IsRtcOk()); EXPECT_THAT(callee()->data_observer()->states(), ::testing::ElementsAreArray(expected_states)); } @@ -422,7 +602,7 @@ TEST_P(DataChannelIntegrationTest, CalleeClosesSctpDataChannel) { TEST_P(DataChannelIntegrationTest, SctpDataChannelConfigSentToOtherSide) { ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); - webrtc::DataChannelInit init; + DataChannelInit init; init.id = 53; init.maxRetransmits = 52; caller()->CreateDataChannel("data-channel", &init); @@ -431,13 +611,18 @@ TEST_P(DataChannelIntegrationTest, SctpDataChannelConfigSentToOtherSide) { callee()->AddAudioVideoTracks(); } caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, Ne(nullptr)), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); // Since "negotiated" is false, the "id" parameter should be ignored. EXPECT_NE(init.id, callee()->data_channel()->id()); EXPECT_EQ("data-channel", callee()->data_channel()->label()); - EXPECT_EQ(init.maxRetransmits, callee()->data_channel()->maxRetransmits()); + EXPECT_EQ(init.maxRetransmits, + *callee()->data_channel()->maxRetransmitsOpt()); EXPECT_FALSE(callee()->data_channel()->negotiated()); } @@ -453,15 +638,21 @@ TEST_P(DataChannelIntegrationTest, StressTestUnorderedSctpDataChannel) { // Normal procedure, but with unordered data channel config. ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); - webrtc::DataChannelInit init; + DataChannelInit init; init.ordered = false; caller()->CreateDataChannel(&init); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, Ne(nullptr)), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); static constexpr int kNumMessages = 100; // Deliberately chosen to be larger than the MTU so messages get fragmented. @@ -472,19 +663,23 @@ TEST_P(DataChannelIntegrationTest, StressTestUnorderedSctpDataChannel) { size_t length = (rand() % kMaxMessageSize) + 1; // NOLINT (rand_r instead of rand) std::string message; - ASSERT_TRUE(rtc::CreateRandomString(length, &message)); + ASSERT_TRUE(CreateRandomString(length, &message)); caller()->data_channel()->Send(DataBuffer(message)); callee()->data_channel()->Send(DataBuffer(message)); sent_messages.push_back(message); } // Wait for all messages to be received. - EXPECT_EQ_WAIT(rtc::checked_cast(kNumMessages), - caller()->data_observer()->received_message_count(), - kDefaultTimeout); - EXPECT_EQ_WAIT(rtc::checked_cast(kNumMessages), - callee()->data_observer()->received_message_count(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil( + [&] { return caller()->data_observer()->received_message_count(); }, + Eq(checked_cast(kNumMessages))), + IsRtcOk()); + EXPECT_THAT( + WaitUntil( + [&] { return callee()->data_observer()->received_message_count(); }, + Eq(checked_cast(kNumMessages))), + IsRtcOk()); // Sort and compare to make sure none of the messages were corrupted. std::vector caller_received_messages; @@ -515,12 +710,12 @@ TEST_P(DataChannelIntegrationTest, StressTestOpenCloseChannelNoDelay) { const size_t kIterations = 10; bool has_negotiated = false; - webrtc::DataChannelInit init; + DataChannelInit init; for (size_t repeats = 0; repeats < kIterations; ++repeats) { RTC_LOG(LS_INFO) << "Iteration " << (repeats + 1) << "/" << kIterations; for (size_t i = 0; i < kChannelCount; ++i) { - rtc::StringBuilder sb; + StringBuilder sb; sb << "channel-" << channel_id++; caller()->CreateDataChannel(sb.Release(), &init); } @@ -528,22 +723,28 @@ TEST_P(DataChannelIntegrationTest, StressTestOpenCloseChannelNoDelay) { if (!has_negotiated) { caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); has_negotiated = true; } for (size_t i = 0; i < kChannelCount; ++i) { - ASSERT_EQ_WAIT(caller()->data_channels()[i]->state(), - DataChannelInterface::DataState::kOpen, kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return caller()->data_channels()[i]->state(); }, + Eq(DataChannelInterface::DataState::kOpen)), + IsRtcOk()); RTC_LOG(LS_INFO) << "Caller Channel " << caller()->data_channels()[i]->label() << " with id " << caller()->data_channels()[i]->id() << " is open."; } - ASSERT_EQ_WAIT(callee()->data_channels().size(), kChannelCount, - kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channels().size(); }, + Eq(kChannelCount)), + IsRtcOk()); for (size_t i = 0; i < kChannelCount; ++i) { - ASSERT_EQ_WAIT(callee()->data_channels()[i]->state(), - DataChannelInterface::DataState::kOpen, kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_channels()[i]->state(); }, + Eq(DataChannelInterface::DataState::kOpen)), + IsRtcOk()); RTC_LOG(LS_INFO) << "Callee Channel " << callee()->data_channels()[i]->label() << " with id " << callee()->data_channels()[i]->id() << " is open."; @@ -562,10 +763,14 @@ TEST_P(DataChannelIntegrationTest, StressTestOpenCloseChannelNoDelay) { } for (size_t i = 0; i < kChannelCount; ++i) { - ASSERT_EQ_WAIT(caller()->data_channels()[i]->state(), - DataChannelInterface::DataState::kClosed, kDefaultTimeout); - ASSERT_EQ_WAIT(callee()->data_channels()[i]->state(), - DataChannelInterface::DataState::kClosed, kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return caller()->data_channels()[i]->state(); }, + Eq(DataChannelInterface::DataState::kClosed)), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_channels()[i]->state(); }, + Eq(DataChannelInterface::DataState::kClosed)), + IsRtcOk()); } caller()->data_channels().clear(); @@ -592,12 +797,12 @@ TEST_P(DataChannelIntegrationTest, StressTestOpenCloseChannelWithDelay) { const size_t kIterations = 10; bool has_negotiated = false; - webrtc::DataChannelInit init; + DataChannelInit init; for (size_t repeats = 0; repeats < kIterations; ++repeats) { RTC_LOG(LS_INFO) << "Iteration " << (repeats + 1) << "/" << kIterations; for (size_t i = 0; i < kChannelCount; ++i) { - rtc::StringBuilder sb; + StringBuilder sb; sb << "channel-" << channel_id++; caller()->CreateDataChannel(sb.Release(), &init); } @@ -605,22 +810,28 @@ TEST_P(DataChannelIntegrationTest, StressTestOpenCloseChannelWithDelay) { if (!has_negotiated) { caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); has_negotiated = true; } for (size_t i = 0; i < kChannelCount; ++i) { - ASSERT_EQ_WAIT(caller()->data_channels()[i]->state(), - DataChannelInterface::DataState::kOpen, kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return caller()->data_channels()[i]->state(); }, + Eq(DataChannelInterface::DataState::kOpen)), + IsRtcOk()); RTC_LOG(LS_INFO) << "Caller Channel " << caller()->data_channels()[i]->label() << " with id " << caller()->data_channels()[i]->id() << " is open."; } - ASSERT_EQ_WAIT(callee()->data_channels().size(), kChannelCount, - kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channels().size(); }, + Eq(kChannelCount)), + IsRtcOk()); for (size_t i = 0; i < kChannelCount; ++i) { - ASSERT_EQ_WAIT(callee()->data_channels()[i]->state(), - DataChannelInterface::DataState::kOpen, kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_channels()[i]->state(); }, + Eq(DataChannelInterface::DataState::kOpen)), + IsRtcOk()); RTC_LOG(LS_INFO) << "Callee Channel " << callee()->data_channels()[i]->label() << " with id " << callee()->data_channels()[i]->id() << " is open."; @@ -639,10 +850,14 @@ TEST_P(DataChannelIntegrationTest, StressTestOpenCloseChannelWithDelay) { } for (size_t i = 0; i < kChannelCount; ++i) { - ASSERT_EQ_WAIT(caller()->data_channels()[i]->state(), - DataChannelInterface::DataState::kClosed, kDefaultTimeout); - ASSERT_EQ_WAIT(callee()->data_channels()[i]->state(), - DataChannelInterface::DataState::kClosed, kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return caller()->data_channels()[i]->state(); }, + Eq(DataChannelInterface::DataState::kClosed)), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_channels()[i]->state(); }, + Eq(DataChannelInterface::DataState::kClosed)), + IsRtcOk()); } caller()->data_channels().clear(); @@ -665,25 +880,36 @@ TEST_P(DataChannelIntegrationTest, AddSctpDataChannelInSubsequentOffer) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); // Create data channel and do new offer and answer. caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); // Caller data channel should already exist (it created one). Callee data // channel may not exist yet, since negotiation happens in-band, not in SDP. ASSERT_NE(nullptr, caller()->data_channel()); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, Ne(nullptr)), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); // Ensure data can be sent in both directions. std::string data = "hello world"; caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->last_message(); }, + Eq(data)), + IsRtcOk()); callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return caller()->data_observer()->last_message(); }, + Eq(data)), + IsRtcOk()); } // Set up a connection initially just using SCTP data channels, later @@ -700,26 +926,34 @@ TEST_P(DataChannelIntegrationTest, SctpDataChannelToAudioVideoUpgrade) { // Do initial offer/answer with just data channel. caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); // Wait until data can be sent over the data channel. - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, Ne(nullptr)), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); // Do subsequent offer/answer with two-way audio and video. Audio and video // should end up bundled on the DTLS/ICE transport already used for data. caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); ASSERT_TRUE(ExpectNewFrames(media_expectations)); } -static void MakeSpecCompliantSctpOffer(cricket::SessionDescription* desc) { - cricket::SctpDataContentDescription* dcd_offer = - GetFirstSctpDataContentDescription(desc); +static void MakeSpecCompliantSctpOffer( + std::unique_ptr& desc) { + SctpDataContentDescription* dcd_offer = + GetFirstSctpDataContentDescription(desc->description()); // See https://crbug.com/webrtc/11211 - this function is a no-op ASSERT_TRUE(dcd_offer); dcd_offer->set_use_sctpmap(false); @@ -736,19 +970,29 @@ TEST_P(DataChannelIntegrationTest, caller()->CreateDataChannel(); caller()->SetGeneratedSdpMunger(MakeSpecCompliantSctpOffer); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); - EXPECT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, Ne(nullptr)), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); // Ensure data can be sent in both directions. std::string data = "hello world"; caller()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, callee()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->last_message(); }, + Eq(data)), + IsRtcOk()); callee()->data_channel()->Send(DataBuffer(data)); - EXPECT_EQ_WAIT(data, caller()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return caller()->data_observer()->last_message(); }, + Eq(data)), + IsRtcOk()); } // Test that after closing PeerConnections, they stop sending any packets @@ -764,7 +1008,8 @@ TEST_P(DataChannelIntegrationTest, ClosingConnectionStopsPacketFlow) { caller()->AddAudioVideoTracks(); caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.CalleeExpectsSomeAudioAndVideo(); ASSERT_TRUE(ExpectNewFrames(media_expectations)); @@ -783,8 +1028,11 @@ TEST_P(DataChannelIntegrationTest, DtlsRoleIsSetNormally) { caller()->CreateDataChannel(); ASSERT_FALSE(caller()->pc()->GetSctpTransport()); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); ASSERT_TRUE(caller()->pc()->GetSctpTransport()); ASSERT_TRUE( caller()->pc()->GetSctpTransport()->Information().dtls_transport()); @@ -821,10 +1069,13 @@ TEST_P(DataChannelIntegrationTest, DtlsRoleIsSetWhenReversed) { ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); caller()->CreateDataChannel(); - callee()->SetReceivedSdpMunger(MakeActiveSctpOffer); + callee()->SetReceivedSdpMunger(MakeOfferHaveActiveDtlsRole); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); EXPECT_TRUE(caller() ->pc() ->GetSctpTransport() @@ -860,15 +1111,23 @@ TEST_P(DataChannelIntegrationTest, ConnectFakeSignaling(); caller()->CreateDataChannel(); - callee()->SetReceivedSdpMunger([this](cricket::SessionDescription* desc) { - MakeActiveSctpOffer(desc); - callee()->CreateDataChannel(); - }); + callee()->SetReceivedSdpMunger( + [this](std::unique_ptr& desc) { + MakeOfferHaveActiveDtlsRole(desc); + callee()->CreateDataChannel(); + }); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->data_observer()->IsOpen(), kDefaultTimeout); - ASSERT_EQ_WAIT(callee()->data_channels().size(), 2U, kDefaultTimeout); - ASSERT_EQ_WAIT(caller()->data_channels().size(), 2U, kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_channels().size(); }, Eq(2U)), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return caller()->data_channels().size(); }, Eq(2U)), + IsRtcOk()); EXPECT_TRUE(caller() ->pc() ->GetSctpTransport() @@ -912,8 +1171,10 @@ TEST_P(DataChannelIntegrationTest, caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, IsTrue()), + IsRtcOk()); auto caller_report = caller()->NewGetStats(); EXPECT_EQ(1u, caller_report->GetStatsOfType().size()); @@ -926,24 +1187,36 @@ TEST_P(DataChannelIntegrationTest, QueuedPacketsGetDeliveredInReliableMode) { ConnectFakeSignaling(); caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, IsTrue()), + IsRtcOk()); caller()->data_channel()->Send(DataBuffer("hello first")); - ASSERT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(), - kDefaultTimeout); + ASSERT_THAT( + WaitUntil( + [&] { return callee()->data_observer()->received_message_count(); }, + Eq(1u)), + IsRtcOk()); // Cause a temporary network outage virtual_socket_server()->set_drop_probability(1.0); for (int i = 1; i <= 10; i++) { caller()->data_channel()->Send(DataBuffer("Sent while blocked")); } // Nothing should be delivered during outage. Short wait. - EXPECT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(), 10); + EXPECT_THAT( + WaitUntil( + [&] { return callee()->data_observer()->received_message_count(); }, + Eq(1u)), + IsRtcOk()); // Reverse outage virtual_socket_server()->set_drop_probability(0.0); // All packets should be delivered. - EXPECT_EQ_WAIT(11u, callee()->data_observer()->received_message_count(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil( + [&] { return callee()->data_observer()->received_message_count(); }, + Eq(11u)), + IsRtcOk()); } TEST_P(DataChannelIntegrationTest, QueuedPacketsGetDroppedInUnreliableMode) { @@ -954,11 +1227,16 @@ TEST_P(DataChannelIntegrationTest, QueuedPacketsGetDroppedInUnreliableMode) { init.ordered = false; caller()->CreateDataChannel(&init); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, IsTrue()), + IsRtcOk()); caller()->data_channel()->Send(DataBuffer("hello first")); - ASSERT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(), - kDefaultTimeout); + ASSERT_THAT( + WaitUntil( + [&] { return callee()->data_observer()->received_message_count(); }, + Eq(1u)), + IsRtcOk()); // Cause a temporary network outage virtual_socket_server()->set_drop_probability(1.0); // Send a few packets. Note that all get dropped only when all packets @@ -975,8 +1253,10 @@ TEST_P(DataChannelIntegrationTest, QueuedPacketsGetDroppedInUnreliableMode) { virtual_socket_server()->set_drop_probability(0.0); // Send a new packet, and wait for it to be delivered. caller()->data_channel()->Send(DataBuffer("After block")); - EXPECT_EQ_WAIT("After block", callee()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->last_message(); }, + Eq("After block")), + IsRtcOk()); // Some messages should be lost, but first and last message should have // been delivered. // First, check that the protocol guarantee is preserved. @@ -995,11 +1275,16 @@ TEST_P(DataChannelIntegrationTest, init.ordered = false; caller()->CreateDataChannel(&init); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, IsTrue()), + IsRtcOk()); caller()->data_channel()->Send(DataBuffer("hello first")); - ASSERT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(), - kDefaultTimeout); + ASSERT_THAT( + WaitUntil( + [&] { return callee()->data_observer()->received_message_count(); }, + Eq(1u)), + IsRtcOk()); // Cause a temporary network outage virtual_socket_server()->set_drop_probability(1.0); for (int i = 1; i <= 200; i++) { @@ -1014,8 +1299,10 @@ TEST_P(DataChannelIntegrationTest, virtual_socket_server()->set_drop_probability(0.0); // Send a new packet, and wait for it to be delivered. caller()->data_channel()->Send(DataBuffer("After block")); - EXPECT_EQ_WAIT("After block", callee()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->last_message(); }, + Eq("After block")), + IsRtcOk()); // Some messages should be lost, but first and last message should have // been delivered. // First, check that the protocol guarantee is preserved. @@ -1035,21 +1322,28 @@ TEST_P(DataChannelIntegrationTest, init.ordered = false; caller()->CreateDataChannel(&init); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, IsTrue()), + IsRtcOk()); caller()->data_channel()->Send(DataBuffer("hello first")); - ASSERT_EQ_WAIT(1u, callee()->data_observer()->received_message_count(), - kDefaultTimeout); + ASSERT_THAT( + WaitUntil( + [&] { return callee()->data_observer()->received_message_count(); }, + Eq(1u)), + IsRtcOk()); // Cause a temporary network outage virtual_socket_server()->set_drop_probability(1.0); - // Fill the buffer until queued data starts to build + // Fill the SCTP socket buffer until queued data starts to build. + constexpr size_t kBufferedDataInSctpSocket = 2'000'000; size_t packet_counter = 0; - while (caller()->data_channel()->buffered_amount() < 1 && + while (caller()->data_channel()->buffered_amount() < + kBufferedDataInSctpSocket && packet_counter < 10000) { packet_counter++; caller()->data_channel()->Send(DataBuffer("Sent while blocked")); } - if (caller()->data_channel()->buffered_amount()) { + if (caller()->data_channel()->buffered_amount() > kBufferedDataInSctpSocket) { RTC_LOG(LS_INFO) << "Buffered data after " << packet_counter << " packets"; } else { RTC_LOG(LS_INFO) << "No buffered data after " << packet_counter @@ -1063,8 +1357,10 @@ TEST_P(DataChannelIntegrationTest, virtual_socket_server()->set_drop_probability(0.0); // Send a new packet, and wait for it to be delivered. caller()->data_channel()->Send(DataBuffer("After block")); - EXPECT_EQ_WAIT("After block", callee()->data_observer()->last_message(), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee()->data_observer()->last_message(); }, + Eq("After block")), + IsRtcOk()); // Some messages should be lost, but first and last message should have // been delivered. // Due to the fact that retransmissions are only counted when the packet @@ -1101,63 +1397,425 @@ TEST_F(DataChannelIntegrationTestUnifiedPlan, caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->pc()->GetSctpTransport(), kDefaultTimeout); - ASSERT_EQ_WAIT(SctpTransportState::kConnected, - caller()->pc()->GetSctpTransport()->Information().state(), - kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return caller()->pc()->GetSctpTransport(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT( + WaitUntil( + [&] { + return caller()->pc()->GetSctpTransport()->Information().state(); + }, + Eq(SctpTransportState::kConnected)), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); } TEST_F(DataChannelIntegrationTestUnifiedPlan, EndToEndCallWithDataChannelOnlyConnects) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); + ASSERT_TRUE(CreatePeerConnectionWrappersWithoutMediaEngine()); ConnectFakeSignaling(); caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_channel(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_channel(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); ASSERT_TRUE(caller()->data_observer()->IsOpen()); } TEST_F(DataChannelIntegrationTestUnifiedPlan, DataChannelClosesWhenClosed) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); + ASSERT_TRUE(CreatePeerConnectionWrappersWithoutMediaEngine()); ConnectFakeSignaling(); caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_observer(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); caller()->data_channel()->Close(); - ASSERT_TRUE_WAIT(!callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return !callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); } TEST_F(DataChannelIntegrationTestUnifiedPlan, DataChannelClosesWhenClosedReverse) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); + ASSERT_TRUE(CreatePeerConnectionWrappersWithoutMediaEngine()); ConnectFakeSignaling(); caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_observer(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); callee()->data_channel()->Close(); - ASSERT_TRUE_WAIT(!caller()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return !caller()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); } TEST_F(DataChannelIntegrationTestUnifiedPlan, DataChannelClosesWhenPeerConnectionClosed) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); + ASSERT_TRUE(CreatePeerConnectionWrappersWithoutMediaEngine()); ConnectFakeSignaling(); caller()->CreateDataChannel(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer(), kDefaultTimeout); - ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return SignalingStateStable(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->data_observer(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); caller()->pc()->Close(); - ASSERT_TRUE_WAIT(!callee()->data_observer()->IsOpen(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return !callee()->data_observer()->IsOpen(); }, IsTrue()), + IsRtcOk()); +} + +class DataChannelIntegrationTestUnifiedPlanFieldTrials + : public DataChannelIntegrationTestUnifiedPlan, + public ::testing::WithParamInterface> { + protected: + DataChannelIntegrationTestUnifiedPlanFieldTrials() { + const bool callee_active = std::get<0>(GetParam()); + RTC_LOG(LS_INFO) << "dtls_active: " << (callee_active ? "callee" : "caller") + << " field-trials: caller: " << std::get<1>(GetParam()) + << " callee: " << std::get<2>(GetParam()) + << " callee2: " << std::get<3>(GetParam()); + + SetFieldTrials(kCallerName, std::get<1>(GetParam())); + SetFieldTrials(kCalleeName, std::get<2>(GetParam())); + SetFieldTrials("Callee2", std::get<3>(GetParam())); + } + + std::unique_ptr SetupCallee2AndDc( + bool addTurn) { + RTCConfiguration config; + if (addTurn) { + static const SocketAddress turn_server_1_internal_address{"192.0.2.1", + 3478}; + static const SocketAddress turn_server_1_external_address{"192.0.3.1", 0}; + TestTurnServer* turn_server_1 = CreateTurnServer( + turn_server_1_internal_address, turn_server_1_external_address); + + // Bypass permission check on received packets so media can be sent before + // the candidate is signaled. + SendTask(network_thread(), [turn_server_1] { + turn_server_1->set_enable_permission_checks(false); + }); + + PeerConnectionInterface::IceServer ice_server_1; + ice_server_1.urls.push_back("turn:192.0.2.1:3478"); + ice_server_1.username = "test"; + ice_server_1.password = "test"; + config.servers.push_back(ice_server_1); + config.type = PeerConnectionInterface::kRelay; + config.presume_writable_when_fully_relayed = true; + } + CreatePeerConnectionWrappersWithConfig(config, config, + /* create_media_engine= */ false); + PeerConnectionDependencies dependencies(nullptr); + std::unique_ptr cert_generator( + new FakeRTCCertificateGenerator()); + cert_generator->use_alternate_key(); + dependencies.cert_generator = std::move(cert_generator); + auto callee2 = CreatePeerConnectionWrapper("Callee2", nullptr, &config, + std::move(dependencies), nullptr, + /*reset_encoder_factory=*/false, + /*reset_decoder_factory=*/false, + /*create_media_engine=*/false); + ConnectFakeSignaling(); + DataChannelInit dc_init; + dc_init.negotiated = true; + dc_init.id = 77; + caller()->CreateDataChannel("label", &dc_init); + callee()->CreateDataChannel("label", &dc_init); + callee2->CreateDataChannel("label", &dc_init); + + callee2->set_signaling_message_receiver(caller()); + return callee2; + } + + void WaitConnectedAndDcOpen(bool prAnswer, + PeerConnectionIntegrationWrapper* caller, + PeerConnectionIntegrationWrapper* callee) { + if (prAnswer) { + EXPECT_EQ(caller->pc()->signaling_state(), + PeerConnectionInterface::kHaveRemotePrAnswer); + EXPECT_EQ(callee->pc()->signaling_state(), + PeerConnectionInterface::kHaveLocalPrAnswer); + } else { + EXPECT_EQ(caller->pc()->signaling_state(), + PeerConnectionInterface::kStable); + EXPECT_EQ(callee->pc()->signaling_state(), + PeerConnectionInterface::kStable); + } + ASSERT_THAT(WaitUntil([&] { return caller->data_channel()->state(); }, + Eq(DataChannelInterface::kOpen)), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee->data_channel()->state(); }, + Eq(DataChannelInterface::kOpen)), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return caller->pc()->peer_connection_state(); }, + Eq(PeerConnectionInterface::PeerConnectionState::kConnected)), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return callee->pc()->peer_connection_state(); }, + Eq(PeerConnectionInterface::PeerConnectionState::kConnected)), + IsRtcOk()); + } + + static void SendOnDatachannelWhenConnectedCallback( + PeerConnectionIntegrationWrapper* peer, + const std::string& data, + std::atomic& signal) { + if (peer->pc()->peer_connection_state() == + PeerConnectionInterface::PeerConnectionState::kConnected && + peer->data_channel()->state() == DataChannelInterface::kOpen) { + peer->data_channel()->SendAsync(DataBuffer(data), [&](RTCError err) { + signal.store(err.ok() ? 1 : -1); + }); + } + } + + void VerifyDtlsRoles(PeerConnectionIntegrationWrapper* caller, + PeerConnectionIntegrationWrapper* callee) { + const bool callee_active = std::get<0>(GetParam()); + if (callee_active) { + ASSERT_THAT(caller->dtls_transport_role(), + Eq(DtlsTransportTlsRole::kServer)); + ASSERT_THAT(callee->dtls_transport_role(), + Eq(DtlsTransportTlsRole::kClient)); + } else { + ASSERT_THAT(caller->dtls_transport_role(), + Eq(DtlsTransportTlsRole::kClient)); + ASSERT_THAT(callee->dtls_transport_role(), + Eq(DtlsTransportTlsRole::kServer)); + } + } + + void VerifyReceivedDcMessages(PeerConnectionIntegrationWrapper* peer, + const std::string& data, + std::atomic& signal) { + ASSERT_THAT(WaitUntil([&] { return signal.load(); }, Ne(0)), IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return peer->data_observer()->last_message(); }, + Eq(data)), + IsRtcOk()); + } + + const char* CheckSupported() { + const bool callee_active = std::get<0>(GetParam()); + const bool callee_has_dtls_in_stun = absl::StrContains( + std::get<2>(GetParam()), "WebRTC-IceHandshakeDtls/Enabled/"); + const bool callee2_has_dtls_in_stun = absl::StrContains( + std::get<3>(GetParam()), "WebRTC-IceHandshakeDtls/Enabled/"); + if (callee_active && + (callee_has_dtls_in_stun || callee2_has_dtls_in_stun)) { + return "dtls-in-stun when callee(s) are dtls clients"; + } + return nullptr; + } +}; + +static const char* kTrialsVariants[] = { + "", + "WebRTC-ForceDtls13/Enabled/", + "WebRTC-IceHandshakeDtls/Enabled/", + "WebRTC-ForceDtls13/Enabled/WebRTC-EnableDtlsPqc/Enabled/", + "WebRTC-ForceDtls13/Enabled/WebRTC-IceHandshakeDtls/Enabled/", + ("WebRTC-ForceDtls13/Enabled/WebRTC-IceHandshakeDtls/Enabled/" + "WebRTC-EnableDtlsPqc/Enabled/"), +}; + +INSTANTIATE_TEST_SUITE_P(DataChannelIntegrationTestUnifiedPlanFieldTrials, + DataChannelIntegrationTestUnifiedPlanFieldTrials, + Combine(testing::Bool(), + ValuesIn(kTrialsVariants), + ValuesIn(kTrialsVariants), + ValuesIn(kTrialsVariants))); + +TEST_P(DataChannelIntegrationTestUnifiedPlanFieldTrials, + DtlsRestartOneCalleAtATime) { + if (auto msg = CheckSupported()) { + GTEST_SKIP() << "Testcase not supported for this scenario: " << msg; + } + + auto callee2 = SetupCallee2AndDc(/* addTurn= */ false); + const bool callee_active = std::get<0>(GetParam()); + std::unique_ptr offer; + callee()->SetReceivedSdpMunger( + [&](std::unique_ptr& sdp) { + if (callee_active) { + MakeOfferHavePassiveDtlsRole(sdp); + } else { + MakeOfferHaveActiveDtlsRole(sdp); + } + // Capture offer so that it can be sent to Callee2 too. + offer = sdp->Clone(); + }); + callee()->SetGeneratedSdpMunger( + [&](std::unique_ptr& sdp) { + // Modify offer to kPrAnswer + SetSdpType(sdp, SdpType::kPrAnswer); + if (callee_active) { + MakeOfferHaveActiveDtlsRole(sdp); + } else { + MakeOfferHavePassiveDtlsRole(sdp); + } + }); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_FALSE(HasFailure()); + WaitConnectedAndDcOpen(/* prAnswer= */ true, caller(), callee()); + VerifyDtlsRoles(caller(), callee()); + ASSERT_FALSE(HasFailure()); + + std::atomic caller_sent_on_dc(0); + std::atomic callee2_sent_on_dc(0); + caller()->set_connection_change_callback([&](auto new_state) { + SendOnDatachannelWhenConnectedCallback(caller(), "KESO", caller_sent_on_dc); + }); + // Install same cb on both connection_change_callback and + // data_observer->set_state_change_callback since they can fire in any order. + callee2->set_connection_change_callback([&](auto new_state) { + SendOnDatachannelWhenConnectedCallback(callee2.get(), "KENT", + callee2_sent_on_dc); + }); + callee2->data_observer()->set_state_change_callback([&](auto new_state) { + SendOnDatachannelWhenConnectedCallback(callee2.get(), "KENT", + callee2_sent_on_dc); + }); + + // Now let callee2 get the offer, apply it and send the answer to caller. + std::string offer_sdp; + EXPECT_TRUE(offer->ToString(&offer_sdp)); + callee2->ReceiveSdpMessage(SdpType::kOffer, offer_sdp); + WaitConnectedAndDcOpen(/* prAnswer= */ false, caller(), callee2.get()); + ASSERT_FALSE(HasFailure()); + + VerifyReceivedDcMessages(caller(), "KENT", callee2_sent_on_dc); + VerifyReceivedDcMessages(callee2.get(), "KESO", caller_sent_on_dc); + VerifyDtlsRoles(caller(), callee2.get()); + ASSERT_FALSE(HasFailure()); +} + +TEST_P(DataChannelIntegrationTestUnifiedPlanFieldTrials, + DtlsRestartTwoActiveCallees) { + if (auto msg = CheckSupported()) { + GTEST_SKIP() << "Testcase not supported for this scenario: " << msg; + } + + auto callee2 = SetupCallee2AndDc(/* addTurn= */ true); + const bool callee_active = std::get<0>(GetParam()); + std::unique_ptr offer; + callee()->SetReceivedSdpMunger( + [&](std::unique_ptr& sdp) { + if (callee_active) { + MakeOfferHavePassiveDtlsRole(sdp); + } else { + MakeOfferHaveActiveDtlsRole(sdp); + } + // Capture offer so that it can be sent to Callee2 too. + offer = sdp->Clone(); + }); + callee()->SetGeneratedSdpMunger( + [&](std::unique_ptr& sdp) { + // Modify offer to kPrAnswer + SetSdpType(sdp, SdpType::kPrAnswer); + if (callee_active) { + MakeOfferHaveActiveDtlsRole(sdp); + } else { + MakeOfferHavePassiveDtlsRole(sdp); + } + }); + bool first_answer = true; + std::unique_ptr answer; + caller()->SetReceivedSdpMunger( + [&](std::unique_ptr& sdp) { + if (first_answer) { + first_answer = false; + } else { + answer = std::move(sdp); + } + }); + caller()->CreateAndSetAndSignalOffer(); + std::string offer_sdp; + EXPECT_TRUE(offer->ToString(&offer_sdp)); + // Apply offer on callee2 "in parallell" to callee. + callee2->ReceiveSdpMessage(SdpType::kOffer, offer_sdp); + + ASSERT_FALSE(HasFailure()); + EXPECT_EQ(callee2->pc()->signaling_state(), PeerConnectionInterface::kStable); + WaitConnectedAndDcOpen(/* prAnswer= */ true, caller(), callee()); + ASSERT_FALSE(HasFailure()); + + // Forward turn ice candidate also to callee2. + auto candidate = caller()->last_gathered_ice_candidate(); + std::string ice_sdp; + EXPECT_TRUE(candidate->ToString(&ice_sdp)); + callee2->ReceiveIceMessage(candidate->sdp_mid(), candidate->sdp_mline_index(), + ice_sdp); + + // Wait until callee2 is ICE connected. + ASSERT_THAT( + WaitUntil( + [&] { return callee2->pc()->standardized_ice_connection_state(); }, + Eq(PeerConnectionInterface::kIceConnectionConnected)), + IsRtcOk()); + + VerifyDtlsRoles(caller(), callee()); + ASSERT_THAT(callee2->dtls_transport_role(), Eq(std::nullopt)); + + std::atomic caller_sent_on_dc(0); + std::atomic callee2_sent_on_dc(0); + caller()->set_connection_change_callback([&](auto new_state) { + SendOnDatachannelWhenConnectedCallback(caller(), "KESO", caller_sent_on_dc); + }); + // Install same cb on both connection_change_callback and + // data_observer->set_state_change_callback since they can fire in any order. + callee2->set_connection_change_callback([&](auto new_state) { + SendOnDatachannelWhenConnectedCallback(callee2.get(), "KENT", + callee2_sent_on_dc); + }); + callee2->data_observer()->set_state_change_callback([&](auto new_state) { + SendOnDatachannelWhenConnectedCallback(callee2.get(), "KENT", + callee2_sent_on_dc); + }); + + // Now switch to callee2! + first_answer = true; + ASSERT_THAT(answer, testing::Not(testing::IsNull())); + std::string answer_sdp; + EXPECT_TRUE(answer->ToString(&answer_sdp)); + caller()->ReceiveSdpMessage(SdpType::kAnswer, answer_sdp); + + EXPECT_EQ(caller()->pc()->signaling_state(), + PeerConnectionInterface::kStable); + + VerifyReceivedDcMessages(caller(), "KENT", callee2_sent_on_dc); + VerifyReceivedDcMessages(callee2.get(), "KESO", caller_sent_on_dc); + VerifyDtlsRoles(caller(), callee2.get()); + ASSERT_FALSE(HasFailure()); } #endif // WEBRTC_HAVE_SCTP diff --git a/pc/data_channel_unittest.cc b/pc/data_channel_unittest.cc index 9b84a1be61..293d33f4ba 100644 --- a/pc/data_channel_unittest.cc +++ b/pc/data_channel_unittest.cc @@ -12,25 +12,32 @@ #include #include +#include #include #include #include "api/data_channel_interface.h" +#include "api/make_ref_counted.h" +#include "api/priority.h" #include "api/rtc_error.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/test/rtc_error_matchers.h" #include "api/transport/data_channel_transport_interface.h" -#include "media/base/media_channel.h" #include "media/sctp/sctp_transport_internal.h" #include "pc/sctp_data_channel.h" #include "pc/sctp_utils.h" #include "pc/test/fake_data_channel_controller.h" +#include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/gunit.h" #include "rtc_base/null_socket_server.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/thread.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/run_loop.h" +#include "test/wait_until.h" #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) #include "test/testsupport/rtc_expect_death.h" @@ -40,7 +47,7 @@ namespace webrtc { namespace { -static constexpr int kDefaultTimeout = 10000; +using ::testing::Eq; class FakeDataChannelObserver : public DataChannelObserver { public: @@ -77,12 +84,11 @@ class FakeDataChannelObserver : public DataChannelObserver { class SctpDataChannelTest : public ::testing::Test { protected: SctpDataChannelTest() - : network_thread_(std::make_unique()), + : network_thread_(std::make_unique()), controller_(new FakeDataChannelController(&network_thread_)) { network_thread_.Start(); inner_channel_ = controller_->CreateDataChannel("test", init_); - channel_ = - webrtc::SctpDataChannel::CreateProxy(inner_channel_, signaling_safety_); + channel_ = SctpDataChannel::CreateProxy(inner_channel_, signaling_safety_); } ~SctpDataChannelTest() override { run_loop_.Flush(); @@ -99,9 +105,9 @@ class SctpDataChannelTest : public ::testing::Test { StreamId sid(0); network_thread_.BlockingCall([&]() { RTC_DCHECK_RUN_ON(&network_thread_); - if (!inner_channel_->sid_n().HasValue()) { + if (!inner_channel_->sid_n().has_value()) { inner_channel_->SetSctpSid_n(sid); - controller_->AddSctpDataStream(sid); + controller_->AddSctpDataStream(sid, inner_channel_->priority()); } inner_channel_->OnTransportChannelCreated(); }); @@ -114,12 +120,11 @@ class SctpDataChannelTest : public ::testing::Test { // in the SctpDataChannel code is (still) tied to the signaling thread, but // the `AddSctpDataStream` operation is a bridge to the transport and needs // to run on the network thread. - void SetChannelSid(const rtc::scoped_refptr& channel, + void SetChannelSid(const scoped_refptr& channel, StreamId sid) { - RTC_DCHECK(sid.HasValue()); network_thread_.BlockingCall([&]() { channel->SetSctpSid_n(sid); - controller_->AddSctpDataStream(sid); + controller_->AddSctpDataStream(sid, channel->priority()); }); } @@ -143,14 +148,14 @@ class SctpDataChannelTest : public ::testing::Test { } test::RunLoop run_loop_; - rtc::Thread network_thread_; + Thread network_thread_; InternalDataChannelInit init_; - rtc::scoped_refptr signaling_safety_ = + scoped_refptr signaling_safety_ = PendingTaskSafetyFlag::Create(); std::unique_ptr controller_; std::unique_ptr observer_; - rtc::scoped_refptr inner_channel_; - rtc::scoped_refptr channel_; + scoped_refptr inner_channel_; + scoped_refptr channel_; }; TEST_F(SctpDataChannelTest, VerifyConfigurationGetters) { @@ -164,16 +169,14 @@ TEST_F(SctpDataChannelTest, VerifyConfigurationGetters) { EXPECT_EQ(channel_->reliable(), init_.reliable); EXPECT_EQ(channel_->ordered(), init_.ordered); EXPECT_EQ(channel_->negotiated(), init_.negotiated); - EXPECT_EQ(channel_->priority(), Priority::kLow); - EXPECT_EQ(channel_->maxRetransmitTime(), static_cast(-1)); + EXPECT_EQ(channel_->priority(), PriorityValue(Priority::kLow)); EXPECT_EQ(channel_->maxPacketLifeTime(), init_.maxRetransmitTime); - EXPECT_EQ(channel_->maxRetransmits(), static_cast(-1)); EXPECT_EQ(channel_->maxRetransmitsOpt(), init_.maxRetransmits); // Check the non-const part of the configuration. EXPECT_EQ(channel_->id(), init_.id); network_thread_.BlockingCall( - [&]() { EXPECT_EQ(inner_channel_->sid_n(), StreamId()); }); + [&]() { EXPECT_EQ(inner_channel_->sid_n(), std::nullopt); }); SetChannelReady(); EXPECT_EQ(channel_->id(), 0); @@ -184,17 +187,19 @@ TEST_F(SctpDataChannelTest, VerifyConfigurationGetters) { // Verifies that the data channel is connected to the transport after creation. TEST_F(SctpDataChannelTest, ConnectedToTransportOnCreated) { controller_->set_transport_available(true); - rtc::scoped_refptr dc = + scoped_refptr dc = controller_->CreateDataChannel("test1", init_); EXPECT_TRUE(controller_->IsConnected(dc.get())); // The sid is not set yet, so it should not have added the streams. - StreamId sid = network_thread_.BlockingCall([&]() { return dc->sid_n(); }); - EXPECT_FALSE(controller_->IsStreamAdded(sid)); + std::optional sid = + network_thread_.BlockingCall([&]() { return dc->sid_n(); }); + EXPECT_FALSE(sid.has_value()); SetChannelSid(dc, StreamId(0)); sid = network_thread_.BlockingCall([&]() { return dc->sid_n(); }); - EXPECT_TRUE(controller_->IsStreamAdded(sid)); + ASSERT_TRUE(sid.has_value()); + EXPECT_TRUE(controller_->IsStreamAdded(*sid)); } // Tests the state of the data channel. @@ -222,266 +227,6 @@ TEST_F(SctpDataChannelTest, StateTransition) { EXPECT_FALSE(controller_->IsConnected(inner_channel_.get())); } -// Tests that DataChannel::buffered_amount() is correct after the channel is -// blocked. -TEST_F(SctpDataChannelTest, BufferedAmountWhenBlocked) { - AddObserver(); - SetChannelReady(); - DataBuffer buffer("abcd"); - size_t successful_sends = 0; - auto send_complete = [&](RTCError err) { - EXPECT_TRUE(err.ok()); - ++successful_sends; - }; - channel_->SendAsync(buffer, send_complete); - FlushNetworkThreadAndPendingOperations(); - EXPECT_EQ(channel_->buffered_amount(), 0u); - size_t successful_send_count = 1; - EXPECT_EQ(successful_send_count, successful_sends); - EXPECT_EQ(successful_send_count, - observer_->on_buffered_amount_change_count()); - - controller_->set_send_blocked(true); - const int number_of_packets = 3; - for (int i = 0; i < number_of_packets; ++i) { - channel_->SendAsync(buffer, send_complete); - ++successful_send_count; - } - FlushNetworkThreadAndPendingOperations(); - EXPECT_EQ(buffer.data.size() * number_of_packets, - channel_->buffered_amount()); - EXPECT_EQ(successful_send_count, successful_sends); - - // An event should not have been fired for buffered amount. - EXPECT_EQ(1u, observer_->on_buffered_amount_change_count()); - - // Now buffered amount events should get fired and the value - // get down to 0u. - controller_->set_send_blocked(false); - run_loop_.Flush(); - EXPECT_EQ(channel_->buffered_amount(), 0u); - EXPECT_EQ(successful_send_count, successful_sends); - EXPECT_EQ(successful_send_count, - observer_->on_buffered_amount_change_count()); -} - -// TODO(tommi): This test uses `Send()`. Remove once fully deprecated. -TEST_F(SctpDataChannelTest, DeprecatedBufferedAmountWhenBlocked) { - AddObserver(); - SetChannelReady(); - DataBuffer buffer("abcd"); - EXPECT_TRUE(channel_->Send(buffer)); - size_t successful_send_count = 1; - - run_loop_.Flush(); - EXPECT_EQ(0U, channel_->buffered_amount()); - EXPECT_EQ(successful_send_count, - observer_->on_buffered_amount_change_count()); - - controller_->set_send_blocked(true); - - const int number_of_packets = 3; - for (int i = 0; i < number_of_packets; ++i) { - EXPECT_TRUE(channel_->Send(buffer)); - } - EXPECT_EQ(buffer.data.size() * number_of_packets, - channel_->buffered_amount()); - EXPECT_EQ(successful_send_count, - observer_->on_buffered_amount_change_count()); - - controller_->set_send_blocked(false); - run_loop_.Flush(); - successful_send_count += number_of_packets; - EXPECT_EQ(channel_->buffered_amount(), 0u); - EXPECT_EQ(successful_send_count, - observer_->on_buffered_amount_change_count()); -} - -// Tests that the queued data are sent when the channel transitions from blocked -// to unblocked. -TEST_F(SctpDataChannelTest, QueuedDataSentWhenUnblocked) { - AddObserver(); - SetChannelReady(); - DataBuffer buffer("abcd"); - controller_->set_send_blocked(true); - size_t successful_send = 0u; - auto send_complete = [&](RTCError err) { - EXPECT_TRUE(err.ok()); - ++successful_send; - }; - channel_->SendAsync(buffer, send_complete); - FlushNetworkThreadAndPendingOperations(); - EXPECT_EQ(1U, successful_send); - EXPECT_EQ(0U, observer_->on_buffered_amount_change_count()); - - controller_->set_send_blocked(false); - SetChannelReady(); - EXPECT_EQ(channel_->buffered_amount(), 0u); - EXPECT_EQ(observer_->on_buffered_amount_change_count(), 1u); -} - -// TODO(tommi): This test uses `Send()`. Remove once fully deprecated. -TEST_F(SctpDataChannelTest, DeprecatedQueuedDataSentWhenUnblocked) { - AddObserver(); - SetChannelReady(); - DataBuffer buffer("abcd"); - controller_->set_send_blocked(true); - EXPECT_TRUE(channel_->Send(buffer)); - - EXPECT_EQ(0U, observer_->on_buffered_amount_change_count()); - - controller_->set_send_blocked(false); - SetChannelReady(); - EXPECT_EQ(0U, channel_->buffered_amount()); - EXPECT_EQ(1U, observer_->on_buffered_amount_change_count()); -} - -// Tests that no crash when the channel is blocked right away while trying to -// send queued data. -TEST_F(SctpDataChannelTest, BlockedWhenSendQueuedDataNoCrash) { - AddObserver(); - SetChannelReady(); - DataBuffer buffer("abcd"); - controller_->set_send_blocked(true); - size_t successful_send = 0u; - auto send_complete = [&](RTCError err) { - EXPECT_TRUE(err.ok()); - ++successful_send; - }; - channel_->SendAsync(buffer, send_complete); - FlushNetworkThreadAndPendingOperations(); - EXPECT_EQ(1U, successful_send); - EXPECT_EQ(0U, observer_->on_buffered_amount_change_count()); - - // Set channel ready while it is still blocked. - SetChannelReady(); - EXPECT_EQ(buffer.size(), channel_->buffered_amount()); - EXPECT_EQ(0U, observer_->on_buffered_amount_change_count()); - - // Unblock the channel to send queued data again, there should be no crash. - controller_->set_send_blocked(false); - SetChannelReady(); - EXPECT_EQ(0U, channel_->buffered_amount()); - EXPECT_EQ(1U, observer_->on_buffered_amount_change_count()); -} - -// TODO(tommi): This test uses `Send()`. Remove once fully deprecated. -TEST_F(SctpDataChannelTest, DeprecatedBlockedWhenSendQueuedDataNoCrash) { - AddObserver(); - SetChannelReady(); - DataBuffer buffer("abcd"); - controller_->set_send_blocked(true); - EXPECT_TRUE(channel_->Send(buffer)); - EXPECT_EQ(0U, observer_->on_buffered_amount_change_count()); - - // Set channel ready while it is still blocked. - SetChannelReady(); - EXPECT_EQ(buffer.size(), channel_->buffered_amount()); - EXPECT_EQ(0U, observer_->on_buffered_amount_change_count()); - - // Unblock the channel to send queued data again, there should be no crash. - controller_->set_send_blocked(false); - SetChannelReady(); - EXPECT_EQ(0U, channel_->buffered_amount()); - EXPECT_EQ(1U, observer_->on_buffered_amount_change_count()); -} - -// Tests that DataChannel::messages_sent() and DataChannel::bytes_sent() are -// correct, sending data both while unblocked and while blocked. -TEST_F(SctpDataChannelTest, VerifyMessagesAndBytesSent) { - AddObserver(); - SetChannelReady(); - std::vector buffers({ - DataBuffer("message 1"), - DataBuffer("msg 2"), - DataBuffer("message three"), - DataBuffer("quadra message"), - DataBuffer("fifthmsg"), - DataBuffer("message of the beast"), - }); - - // Default values. - EXPECT_EQ(0U, channel_->messages_sent()); - EXPECT_EQ(0U, channel_->bytes_sent()); - - // Send three buffers while not blocked. - controller_->set_send_blocked(false); - for (int i : {0, 1, 2}) { - channel_->SendAsync(buffers[i], nullptr); - } - FlushNetworkThreadAndPendingOperations(); - - size_t bytes_sent = buffers[0].size() + buffers[1].size() + buffers[2].size(); - EXPECT_EQ_WAIT(0U, channel_->buffered_amount(), kDefaultTimeout); - EXPECT_EQ(3U, channel_->messages_sent()); - EXPECT_EQ(bytes_sent, channel_->bytes_sent()); - - // Send three buffers while blocked, queuing the buffers. - controller_->set_send_blocked(true); - for (int i : {3, 4, 5}) { - channel_->SendAsync(buffers[i], nullptr); - } - FlushNetworkThreadAndPendingOperations(); - size_t bytes_queued = - buffers[3].size() + buffers[4].size() + buffers[5].size(); - EXPECT_EQ(bytes_queued, channel_->buffered_amount()); - EXPECT_EQ(3U, channel_->messages_sent()); - EXPECT_EQ(bytes_sent, channel_->bytes_sent()); - - // Unblock and make sure everything was sent. - controller_->set_send_blocked(false); - EXPECT_EQ_WAIT(0U, channel_->buffered_amount(), kDefaultTimeout); - bytes_sent += bytes_queued; - EXPECT_EQ(6U, channel_->messages_sent()); - EXPECT_EQ(bytes_sent, channel_->bytes_sent()); -} - -// TODO(tommi): This test uses `Send()`. Remove once fully deprecated. -TEST_F(SctpDataChannelTest, DeprecatedVerifyMessagesAndBytesSent) { - AddObserver(); - SetChannelReady(); - std::vector buffers({ - DataBuffer("message 1"), - DataBuffer("msg 2"), - DataBuffer("message three"), - DataBuffer("quadra message"), - DataBuffer("fifthmsg"), - DataBuffer("message of the beast"), - }); - - // Default values. - EXPECT_EQ(0U, channel_->messages_sent()); - EXPECT_EQ(0U, channel_->bytes_sent()); - - // Send three buffers while not blocked. - controller_->set_send_blocked(false); - EXPECT_TRUE(channel_->Send(buffers[0])); - EXPECT_TRUE(channel_->Send(buffers[1])); - EXPECT_TRUE(channel_->Send(buffers[2])); - size_t bytes_sent = buffers[0].size() + buffers[1].size() + buffers[2].size(); - EXPECT_EQ_WAIT(0U, channel_->buffered_amount(), kDefaultTimeout); - EXPECT_EQ(3U, channel_->messages_sent()); - EXPECT_EQ(bytes_sent, channel_->bytes_sent()); - - // Send three buffers while blocked, queuing the buffers. - controller_->set_send_blocked(true); - EXPECT_TRUE(channel_->Send(buffers[3])); - EXPECT_TRUE(channel_->Send(buffers[4])); - EXPECT_TRUE(channel_->Send(buffers[5])); - size_t bytes_queued = - buffers[3].size() + buffers[4].size() + buffers[5].size(); - EXPECT_EQ(bytes_queued, channel_->buffered_amount()); - EXPECT_EQ(3U, channel_->messages_sent()); - EXPECT_EQ(bytes_sent, channel_->bytes_sent()); - - // Unblock and make sure everything was sent. - controller_->set_send_blocked(false); - EXPECT_EQ_WAIT(0U, channel_->buffered_amount(), kDefaultTimeout); - bytes_sent += bytes_queued; - EXPECT_EQ(6U, channel_->messages_sent()); - EXPECT_EQ(bytes_sent, channel_->bytes_sent()); -} - // Tests that the queued control message is sent when channel is ready. TEST_F(SctpDataChannelTest, OpenMessageSent) { // Initially the id is unassigned. @@ -494,23 +239,13 @@ TEST_F(SctpDataChannelTest, OpenMessageSent) { EXPECT_EQ(controller_->last_sid(), channel_->id()); } -TEST_F(SctpDataChannelTest, QueuedOpenMessageSent) { - controller_->set_send_blocked(true); - SetChannelReady(); - controller_->set_send_blocked(false); - - EXPECT_EQ(DataMessageType::kControl, - controller_->last_send_data_params().type); - EXPECT_EQ(controller_->last_sid(), channel_->id()); -} - // Tests that the DataChannel created after transport gets ready can enter OPEN // state. TEST_F(SctpDataChannelTest, LateCreatedChannelTransitionToOpen) { SetChannelReady(); InternalDataChannelInit init; init.id = 1; - auto dc = webrtc::SctpDataChannel::CreateProxy( + auto dc = SctpDataChannel::CreateProxy( controller_->CreateDataChannel("test1", init), signaling_safety_); EXPECT_EQ(DataChannelInterface::kOpen, dc->state()); } @@ -522,11 +257,13 @@ TEST_F(SctpDataChannelTest, SendUnorderedAfterReceivesOpenAck) { InternalDataChannelInit init; init.id = 1; init.ordered = false; - rtc::scoped_refptr dc = + scoped_refptr dc = controller_->CreateDataChannel("test1", init); - auto proxy = webrtc::SctpDataChannel::CreateProxy(dc, signaling_safety_); + auto proxy = SctpDataChannel::CreateProxy(dc, signaling_safety_); - EXPECT_EQ_WAIT(DataChannelInterface::kOpen, proxy->state(), 1000); + EXPECT_THAT(WaitUntil([&] { return proxy->state(); }, + Eq(DataChannelInterface::kOpen)), + IsRtcOk()); // Sends a message and verifies it's ordered. DataBuffer buffer("some data"); @@ -534,7 +271,7 @@ TEST_F(SctpDataChannelTest, SendUnorderedAfterReceivesOpenAck) { EXPECT_TRUE(controller_->last_send_data_params().ordered); // Emulates receiving an OPEN_ACK message. - rtc::CopyOnWriteBuffer payload; + CopyOnWriteBuffer payload; WriteDataChannelOpenAckMessage(&payload); network_thread_.BlockingCall( [&] { dc->OnDataReceived(DataMessageType::kControl, payload); }); @@ -551,11 +288,13 @@ TEST_F(SctpDataChannelTest, DeprecatedSendUnorderedAfterReceivesOpenAck) { InternalDataChannelInit init; init.id = 1; init.ordered = false; - rtc::scoped_refptr dc = + scoped_refptr dc = controller_->CreateDataChannel("test1", init); - auto proxy = webrtc::SctpDataChannel::CreateProxy(dc, signaling_safety_); + auto proxy = SctpDataChannel::CreateProxy(dc, signaling_safety_); - EXPECT_EQ_WAIT(DataChannelInterface::kOpen, proxy->state(), 1000); + EXPECT_THAT(WaitUntil([&] { return proxy->state(); }, + Eq(DataChannelInterface::kOpen)), + IsRtcOk()); // Sends a message and verifies it's ordered. DataBuffer buffer("some data"); @@ -563,7 +302,7 @@ TEST_F(SctpDataChannelTest, DeprecatedSendUnorderedAfterReceivesOpenAck) { EXPECT_TRUE(controller_->last_send_data_params().ordered); // Emulates receiving an OPEN_ACK message. - rtc::CopyOnWriteBuffer payload; + CopyOnWriteBuffer payload; WriteDataChannelOpenAckMessage(&payload); network_thread_.BlockingCall( [&] { dc->OnDataReceived(DataMessageType::kControl, payload); }); @@ -580,11 +319,13 @@ TEST_F(SctpDataChannelTest, SendUnorderedAfterReceiveData) { InternalDataChannelInit init; init.id = 1; init.ordered = false; - rtc::scoped_refptr dc = + scoped_refptr dc = controller_->CreateDataChannel("test1", init); - auto proxy = webrtc::SctpDataChannel::CreateProxy(dc, signaling_safety_); + auto proxy = SctpDataChannel::CreateProxy(dc, signaling_safety_); - EXPECT_EQ_WAIT(DataChannelInterface::kOpen, proxy->state(), 1000); + EXPECT_THAT(WaitUntil([&] { return proxy->state(); }, + Eq(DataChannelInterface::kOpen)), + IsRtcOk()); // Emulates receiving a DATA message. DataBuffer buffer("data"); @@ -603,11 +344,13 @@ TEST_F(SctpDataChannelTest, DeprecatedSendUnorderedAfterReceiveData) { InternalDataChannelInit init; init.id = 1; init.ordered = false; - rtc::scoped_refptr dc = + scoped_refptr dc = controller_->CreateDataChannel("test1", init); - auto proxy = webrtc::SctpDataChannel::CreateProxy(dc, signaling_safety_); + auto proxy = SctpDataChannel::CreateProxy(dc, signaling_safety_); - EXPECT_EQ_WAIT(DataChannelInterface::kOpen, proxy->state(), 1000); + EXPECT_THAT(WaitUntil([&] { return proxy->state(); }, + Eq(DataChannelInterface::kOpen)), + IsRtcOk()); // Emulates receiving a DATA message. DataBuffer buffer("data"); @@ -619,56 +362,6 @@ TEST_F(SctpDataChannelTest, DeprecatedSendUnorderedAfterReceiveData) { EXPECT_FALSE(controller_->last_send_data_params().ordered); } -// Tests that the channel can't open until it's successfully sent the OPEN -// message. -TEST_F(SctpDataChannelTest, OpenWaitsForOpenMesssage) { - DataBuffer buffer("foo"); - - controller_->set_send_blocked(true); - SetChannelReady(); - EXPECT_EQ(DataChannelInterface::kConnecting, channel_->state()); - controller_->set_send_blocked(false); - EXPECT_EQ_WAIT(DataChannelInterface::kOpen, channel_->state(), 1000); - EXPECT_EQ(DataMessageType::kControl, - controller_->last_send_data_params().type); -} - -// Tests that close first makes sure all queued data gets sent. -TEST_F(SctpDataChannelTest, QueuedCloseFlushes) { - DataBuffer buffer("foo"); - - controller_->set_send_blocked(true); - SetChannelReady(); - EXPECT_EQ(DataChannelInterface::kConnecting, channel_->state()); - controller_->set_send_blocked(false); - EXPECT_EQ_WAIT(DataChannelInterface::kOpen, channel_->state(), 1000); - controller_->set_send_blocked(true); - channel_->SendAsync(buffer, nullptr); - channel_->Close(); - controller_->set_send_blocked(false); - EXPECT_EQ_WAIT(DataChannelInterface::kClosed, channel_->state(), 1000); - EXPECT_TRUE(channel_->error().ok()); - EXPECT_EQ(DataMessageType::kText, controller_->last_send_data_params().type); -} - -// TODO(tommi): This test uses `Send()`. Remove once fully deprecated. -TEST_F(SctpDataChannelTest, DeprecatedQueuedCloseFlushes) { - DataBuffer buffer("foo"); - - controller_->set_send_blocked(true); - SetChannelReady(); - EXPECT_EQ(DataChannelInterface::kConnecting, channel_->state()); - controller_->set_send_blocked(false); - EXPECT_EQ_WAIT(DataChannelInterface::kOpen, channel_->state(), 1000); - controller_->set_send_blocked(true); - channel_->Send(buffer); - channel_->Close(); - controller_->set_send_blocked(false); - EXPECT_EQ_WAIT(DataChannelInterface::kClosed, channel_->state(), 1000); - EXPECT_TRUE(channel_->error().ok()); - EXPECT_EQ(DataMessageType::kText, controller_->last_send_data_params().type); -} - // Tests that messages are sent with the right id. TEST_F(SctpDataChannelTest, SendDataId) { SetChannelSid(inner_channel_, StreamId(1)); @@ -712,11 +405,13 @@ TEST_F(SctpDataChannelTest, NoMsgSentIfNegotiatedAndNotFromOpenMsg) { config.open_handshake_role = InternalDataChannelInit::kNone; SetChannelReady(); - rtc::scoped_refptr dc = + scoped_refptr dc = controller_->CreateDataChannel("test1", config); - auto proxy = webrtc::SctpDataChannel::CreateProxy(dc, signaling_safety_); + auto proxy = SctpDataChannel::CreateProxy(dc, signaling_safety_); - EXPECT_EQ_WAIT(DataChannelInterface::kOpen, proxy->state(), 1000); + EXPECT_THAT(WaitUntil([&] { return proxy->state(); }, + Eq(DataChannelInterface::kOpen)), + IsRtcOk()); EXPECT_EQ(0, controller_->last_sid()); } @@ -777,11 +472,13 @@ TEST_F(SctpDataChannelTest, OpenAckSentIfCreatedFromOpenMessage) { config.open_handshake_role = InternalDataChannelInit::kAcker; SetChannelReady(); - rtc::scoped_refptr dc = + scoped_refptr dc = controller_->CreateDataChannel("test1", config); - auto proxy = webrtc::SctpDataChannel::CreateProxy(dc, signaling_safety_); + auto proxy = SctpDataChannel::CreateProxy(dc, signaling_safety_); - EXPECT_EQ_WAIT(DataChannelInterface::kOpen, proxy->state(), 1000); + EXPECT_THAT(WaitUntil([&] { return proxy->state(); }, + Eq(DataChannelInterface::kOpen)), + IsRtcOk()); EXPECT_EQ(config.id, controller_->last_sid()); EXPECT_EQ(DataMessageType::kControl, @@ -800,59 +497,6 @@ TEST_F(SctpDataChannelTest, OpenAckRoleInitialization) { EXPECT_EQ(InternalDataChannelInit::kNone, init2.open_handshake_role); } -// Tests that that Send() returns false if the sending buffer is full -// and the channel stays open. -TEST_F(SctpDataChannelTest, OpenWhenSendBufferFull) { - AddObserver(); - SetChannelReady(); - - const size_t packetSize = 1024; - - rtc::CopyOnWriteBuffer buffer(packetSize); - memset(buffer.MutableData(), 0, buffer.size()); - - DataBuffer packet(buffer, true); - controller_->set_send_blocked(true); - size_t successful_send = 0u, failed_send = 0u; - auto send_complete = [&](RTCError err) { - err.ok() ? ++successful_send : ++failed_send; - }; - - size_t count = DataChannelInterface::MaxSendQueueSize() / packetSize; - for (size_t i = 0; i < count; ++i) { - channel_->SendAsync(packet, send_complete); - } - - // The sending buffer should be full, `Send()` returns false. - channel_->SendAsync(packet, std::move(send_complete)); - FlushNetworkThreadAndPendingOperations(); - EXPECT_TRUE(DataChannelInterface::kOpen == channel_->state()); - EXPECT_EQ(successful_send, count); - EXPECT_EQ(failed_send, 1u); -} - -// TODO(tommi): This test uses `Send()`. Remove once fully deprecated. -TEST_F(SctpDataChannelTest, DeprecatedOpenWhenSendBufferFull) { - SetChannelReady(); - - const size_t packetSize = 1024; - - rtc::CopyOnWriteBuffer buffer(packetSize); - memset(buffer.MutableData(), 0, buffer.size()); - - DataBuffer packet(buffer, true); - controller_->set_send_blocked(true); - - for (size_t i = 0; i < DataChannelInterface::MaxSendQueueSize() / packetSize; - ++i) { - EXPECT_TRUE(channel_->Send(packet)); - } - - // The sending buffer should be full, `Send()` returns false. - EXPECT_FALSE(channel_->Send(packet)); - EXPECT_TRUE(DataChannelInterface::kOpen == channel_->state()); -} - // Tests that the DataChannel is closed on transport errors. TEST_F(SctpDataChannelTest, ClosedOnTransportError) { SetChannelReady(); @@ -884,7 +528,7 @@ TEST_F(SctpDataChannelTest, DeprecatedClosedOnTransportError) { // Tests that the DataChannel is closed if the received buffer is full. TEST_F(SctpDataChannelTest, ClosedWhenReceivedBufferFull) { SetChannelReady(); - rtc::CopyOnWriteBuffer buffer(1024); + CopyOnWriteBuffer buffer(1024); memset(buffer.MutableData(), 0, buffer.size()); network_thread_.BlockingCall([&] { @@ -943,12 +587,11 @@ TEST_F(SctpDataChannelTest, TransportDestroyedWhileDataBuffered) { AddObserver(); SetChannelReady(); - rtc::CopyOnWriteBuffer buffer(1024); + CopyOnWriteBuffer buffer(100 * 1024); memset(buffer.MutableData(), 0, buffer.size()); DataBuffer packet(buffer, true); - // Send a packet while sending is blocked so it ends up buffered. - controller_->set_send_blocked(true); + // Send a very large packet, forcing the message to become buffered. channel_->SendAsync(packet, nullptr); // Tell the data channel that its transport is being destroyed. @@ -959,35 +602,9 @@ TEST_F(SctpDataChannelTest, TransportDestroyedWhileDataBuffered) { network_thread_.BlockingCall( [&] { inner_channel_->OnTransportChannelClosed(error); }); controller_.reset(nullptr); - EXPECT_EQ_WAIT(DataChannelInterface::kClosed, channel_->state(), - kDefaultTimeout); - EXPECT_FALSE(channel_->error().ok()); - EXPECT_EQ(RTCErrorType::OPERATION_ERROR_WITH_DATA, channel_->error().type()); - EXPECT_EQ(RTCErrorDetailType::SCTP_FAILURE, channel_->error().error_detail()); -} - -// TODO(tommi): This test uses `Send()`. Remove once fully deprecated. -TEST_F(SctpDataChannelTest, DeprecatedTransportDestroyedWhileDataBuffered) { - SetChannelReady(); - - rtc::CopyOnWriteBuffer buffer(1024); - memset(buffer.MutableData(), 0, buffer.size()); - DataBuffer packet(buffer, true); - - // Send a packet while sending is blocked so it ends up buffered. - controller_->set_send_blocked(true); - EXPECT_TRUE(channel_->Send(packet)); - - // Tell the data channel that its transport is being destroyed. - // It should then stop using the transport (allowing us to delete it) and - // transition to the "closed" state. - RTCError error(RTCErrorType::OPERATION_ERROR_WITH_DATA, ""); - error.set_error_detail(RTCErrorDetailType::SCTP_FAILURE); - network_thread_.BlockingCall( - [&] { inner_channel_->OnTransportChannelClosed(error); }); - controller_.reset(nullptr); - EXPECT_EQ_WAIT(DataChannelInterface::kClosed, channel_->state(), - kDefaultTimeout); + EXPECT_THAT(WaitUntil([&] { return channel_->state(); }, + Eq(DataChannelInterface::kClosed)), + IsRtcOk()); EXPECT_FALSE(channel_->error().ok()); EXPECT_EQ(RTCErrorType::OPERATION_ERROR_WITH_DATA, channel_->error().type()); EXPECT_EQ(RTCErrorDetailType::SCTP_FAILURE, channel_->error().error_detail()); @@ -1003,18 +620,18 @@ TEST_F(SctpDataChannelTest, TransportGotErrorCode) { "Transport channel closed"); error.set_error_detail(RTCErrorDetailType::SCTP_FAILURE); error.set_sctp_cause_code( - static_cast(cricket::SctpErrorCauseCode::kProtocolViolation)); + static_cast(SctpErrorCauseCode::kProtocolViolation)); network_thread_.BlockingCall( [&] { inner_channel_->OnTransportChannelClosed(error); }); controller_.reset(nullptr); - EXPECT_EQ_WAIT(DataChannelInterface::kClosed, channel_->state(), - kDefaultTimeout); + EXPECT_THAT(WaitUntil([&] { return channel_->state(); }, + Eq(DataChannelInterface::kClosed)), + IsRtcOk()); EXPECT_FALSE(channel_->error().ok()); EXPECT_EQ(RTCErrorType::OPERATION_ERROR_WITH_DATA, channel_->error().type()); EXPECT_EQ(RTCErrorDetailType::SCTP_FAILURE, channel_->error().error_detail()); - EXPECT_EQ( - static_cast(cricket::SctpErrorCauseCode::kProtocolViolation), - channel_->error().sctp_cause_code()); + EXPECT_EQ(static_cast(SctpErrorCauseCode::kProtocolViolation), + channel_->error().sctp_cause_code()); } class SctpSidAllocatorTest : public ::testing::Test { @@ -1025,10 +642,10 @@ class SctpSidAllocatorTest : public ::testing::Test { // Verifies that an even SCTP id is allocated for SSL_CLIENT and an odd id for // SSL_SERVER. TEST_F(SctpSidAllocatorTest, SctpIdAllocationBasedOnRole) { - EXPECT_EQ(allocator_.AllocateSid(rtc::SSL_SERVER), StreamId(1)); - EXPECT_EQ(allocator_.AllocateSid(rtc::SSL_CLIENT), StreamId(0)); - EXPECT_EQ(allocator_.AllocateSid(rtc::SSL_SERVER), StreamId(3)); - EXPECT_EQ(allocator_.AllocateSid(rtc::SSL_CLIENT), StreamId(2)); + EXPECT_EQ(allocator_.AllocateSid(SSL_SERVER), StreamId(1)); + EXPECT_EQ(allocator_.AllocateSid(SSL_CLIENT), StreamId(0)); + EXPECT_EQ(allocator_.AllocateSid(SSL_SERVER), StreamId(3)); + EXPECT_EQ(allocator_.AllocateSid(SSL_CLIENT), StreamId(2)); } // Verifies that SCTP ids of existing DataChannels are not reused. @@ -1036,14 +653,14 @@ TEST_F(SctpSidAllocatorTest, SctpIdAllocationNoReuse) { StreamId old_id(1); EXPECT_TRUE(allocator_.ReserveSid(old_id)); - StreamId new_id = allocator_.AllocateSid(rtc::SSL_SERVER); - EXPECT_TRUE(new_id.HasValue()); + std::optional new_id = allocator_.AllocateSid(SSL_SERVER); + EXPECT_TRUE(new_id.has_value()); EXPECT_NE(old_id, new_id); old_id = StreamId(0); EXPECT_TRUE(allocator_.ReserveSid(old_id)); - new_id = allocator_.AllocateSid(rtc::SSL_CLIENT); - EXPECT_TRUE(new_id.HasValue()); + new_id = allocator_.AllocateSid(SSL_CLIENT); + EXPECT_TRUE(new_id.has_value()); EXPECT_NE(old_id, new_id); } @@ -1054,34 +671,34 @@ TEST_F(SctpSidAllocatorTest, SctpIdReusedForRemovedDataChannel) { EXPECT_TRUE(allocator_.ReserveSid(odd_id)); EXPECT_TRUE(allocator_.ReserveSid(even_id)); - StreamId allocated_id = allocator_.AllocateSid(rtc::SSL_SERVER); - EXPECT_EQ(odd_id.stream_id_int() + 2, allocated_id.stream_id_int()); + std::optional allocated_id = allocator_.AllocateSid(SSL_SERVER); + EXPECT_EQ(odd_id.stream_id_int() + 2, allocated_id->stream_id_int()); - allocated_id = allocator_.AllocateSid(rtc::SSL_CLIENT); - EXPECT_EQ(even_id.stream_id_int() + 2, allocated_id.stream_id_int()); + allocated_id = allocator_.AllocateSid(SSL_CLIENT); + EXPECT_EQ(even_id.stream_id_int() + 2, allocated_id->stream_id_int()); - allocated_id = allocator_.AllocateSid(rtc::SSL_SERVER); - EXPECT_EQ(odd_id.stream_id_int() + 4, allocated_id.stream_id_int()); + allocated_id = allocator_.AllocateSid(SSL_SERVER); + EXPECT_EQ(odd_id.stream_id_int() + 4, allocated_id->stream_id_int()); - allocated_id = allocator_.AllocateSid(rtc::SSL_CLIENT); - EXPECT_EQ(even_id.stream_id_int() + 4, allocated_id.stream_id_int()); + allocated_id = allocator_.AllocateSid(SSL_CLIENT); + EXPECT_EQ(even_id.stream_id_int() + 4, allocated_id->stream_id_int()); allocator_.ReleaseSid(odd_id); allocator_.ReleaseSid(even_id); // Verifies that removed ids are reused. - allocated_id = allocator_.AllocateSid(rtc::SSL_SERVER); + allocated_id = allocator_.AllocateSid(SSL_SERVER); EXPECT_EQ(odd_id, allocated_id); - allocated_id = allocator_.AllocateSid(rtc::SSL_CLIENT); + allocated_id = allocator_.AllocateSid(SSL_CLIENT); EXPECT_EQ(even_id, allocated_id); // Verifies that used higher ids are not reused. - allocated_id = allocator_.AllocateSid(rtc::SSL_SERVER); - EXPECT_EQ(odd_id.stream_id_int() + 6, allocated_id.stream_id_int()); + allocated_id = allocator_.AllocateSid(SSL_SERVER); + EXPECT_EQ(odd_id.stream_id_int() + 6, allocated_id->stream_id_int()); - allocated_id = allocator_.AllocateSid(rtc::SSL_CLIENT); - EXPECT_EQ(even_id.stream_id_int() + 6, allocated_id.stream_id_int()); + allocated_id = allocator_.AllocateSid(SSL_CLIENT); + EXPECT_EQ(even_id.stream_id_int() + 6, allocated_id->stream_id_int()); } // Code coverage tests for default implementations in data_channel_interface.*. @@ -1119,10 +736,8 @@ class NoImplObserver : public DataChannelObserver { } // namespace TEST(DataChannelInterfaceTest, Coverage) { - auto channel = rtc::make_ref_counted(); + auto channel = make_ref_counted(); EXPECT_FALSE(channel->ordered()); - EXPECT_EQ(channel->maxRetransmitTime(), 0u); - EXPECT_EQ(channel->maxRetransmits(), 0u); EXPECT_FALSE(channel->maxRetransmitsOpt()); EXPECT_FALSE(channel->maxPacketLifeTime()); EXPECT_TRUE(channel->protocol().empty()); @@ -1137,12 +752,12 @@ TEST(DataChannelInterfaceTest, Coverage) { #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) TEST(DataChannelInterfaceDeathTest, SendDefaultImplDchecks) { - auto channel = rtc::make_ref_counted(); + auto channel = webrtc::make_ref_counted(); RTC_EXPECT_DEATH(channel->Send(DataBuffer("Foo")), "Check failed: false"); } TEST(DataChannelInterfaceDeathTest, SendAsyncDefaultImplDchecks) { - auto channel = rtc::make_ref_counted(); + auto channel = webrtc::make_ref_counted(); RTC_EXPECT_DEATH(channel->SendAsync(DataBuffer("Foo"), nullptr), "Check failed: false"); } diff --git a/pc/data_channel_utils.cc b/pc/data_channel_utils.cc index a772241c3e..39a14b6b54 100644 --- a/pc/data_channel_utils.cc +++ b/pc/data_channel_utils.cc @@ -10,8 +10,11 @@ #include "pc/data_channel_utils.h" +#include +#include #include +#include "api/data_channel_interface.h" #include "rtc_base/checks.h" namespace webrtc { diff --git a/pc/data_channel_utils.h b/pc/data_channel_utils.h index 8681ba4657..2558fcd6d4 100644 --- a/pc/data_channel_utils.h +++ b/pc/data_channel_utils.h @@ -17,10 +17,8 @@ #include #include #include -#include #include "api/data_channel_interface.h" -#include "media/base/media_engine.h" namespace webrtc { diff --git a/pc/dtls_srtp_transport.cc b/pc/dtls_srtp_transport.cc index d28285dc8d..10a52910da 100644 --- a/pc/dtls_srtp_transport.cc +++ b/pc/dtls_srtp_transport.cc @@ -10,21 +10,22 @@ #include "pc/dtls_srtp_transport.h" -#include - +#include +#include #include #include +#include #include "api/dtls_transport_interface.h" +#include "api/field_trials_view.h" +#include "p2p/base/packet_transport_internal.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "pc/srtp_transport.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/ssl_stream_adapter.h" -namespace { -// Value specified in RFC 5764. -static const char kDtlsSrtpExporterLabel[] = "EXTRACTOR-dtls_srtp"; -} // namespace - namespace webrtc { DtlsSrtpTransport::DtlsSrtpTransport(bool rtcp_mux_enabled, @@ -32,8 +33,8 @@ DtlsSrtpTransport::DtlsSrtpTransport(bool rtcp_mux_enabled, : SrtpTransport(rtcp_mux_enabled, field_trials) {} void DtlsSrtpTransport::SetDtlsTransports( - cricket::DtlsTransportInternal* rtp_dtls_transport, - cricket::DtlsTransportInternal* rtcp_dtls_transport) { + DtlsTransportInternal* rtp_dtls_transport, + DtlsTransportInternal* rtcp_dtls_transport) { // Transport names should be the same. if (rtp_dtls_transport && rtcp_dtls_transport) { RTC_DCHECK(rtp_dtls_transport->transport_name() == @@ -160,15 +161,13 @@ void DtlsSrtpTransport::SetupRtpDtlsSrtp() { } int selected_crypto_suite; - rtc::ZeroOnFreeBuffer send_key; - rtc::ZeroOnFreeBuffer recv_key; + ZeroOnFreeBuffer send_key; + ZeroOnFreeBuffer recv_key; if (!ExtractParams(rtp_dtls_transport_, &selected_crypto_suite, &send_key, &recv_key) || - !SetRtpParams(selected_crypto_suite, &send_key[0], - static_cast(send_key.size()), send_extension_ids, - selected_crypto_suite, &recv_key[0], - static_cast(recv_key.size()), recv_extension_ids)) { + !SetRtpParams(selected_crypto_suite, send_key, send_extension_ids, + selected_crypto_suite, recv_key, recv_extension_ids)) { RTC_LOG(LS_WARNING) << "DTLS-SRTP key installation for RTP failed"; } } @@ -191,24 +190,21 @@ void DtlsSrtpTransport::SetupRtcpDtlsSrtp() { } int selected_crypto_suite; - rtc::ZeroOnFreeBuffer rtcp_send_key; - rtc::ZeroOnFreeBuffer rtcp_recv_key; + ZeroOnFreeBuffer rtcp_send_key; + ZeroOnFreeBuffer rtcp_recv_key; if (!ExtractParams(rtcp_dtls_transport_, &selected_crypto_suite, &rtcp_send_key, &rtcp_recv_key) || - !SetRtcpParams(selected_crypto_suite, &rtcp_send_key[0], - static_cast(rtcp_send_key.size()), send_extension_ids, - selected_crypto_suite, &rtcp_recv_key[0], - static_cast(rtcp_recv_key.size()), + !SetRtcpParams(selected_crypto_suite, rtcp_send_key, send_extension_ids, + selected_crypto_suite, rtcp_recv_key, recv_extension_ids)) { RTC_LOG(LS_WARNING) << "DTLS-SRTP key installation for RTCP failed"; } } -bool DtlsSrtpTransport::ExtractParams( - cricket::DtlsTransportInternal* dtls_transport, - int* selected_crypto_suite, - rtc::ZeroOnFreeBuffer* send_key, - rtc::ZeroOnFreeBuffer* recv_key) { +bool DtlsSrtpTransport::ExtractParams(DtlsTransportInternal* dtls_transport, + int* selected_crypto_suite, + ZeroOnFreeBuffer* send_key, + ZeroOnFreeBuffer* recv_key) { if (!dtls_transport || !dtls_transport->IsDtlsActive()) { return false; } @@ -223,44 +219,42 @@ bool DtlsSrtpTransport::ExtractParams( int key_len; int salt_len; - if (!rtc::GetSrtpKeyAndSaltLengths((*selected_crypto_suite), &key_len, - &salt_len)) { + if (!GetSrtpKeyAndSaltLengths((*selected_crypto_suite), &key_len, + &salt_len)) { RTC_LOG(LS_ERROR) << "Unknown DTLS-SRTP crypto suite" << selected_crypto_suite; return false; } // OK, we're now doing DTLS (RFC 5764) - rtc::ZeroOnFreeBuffer dtls_buffer(key_len * 2 + salt_len * 2); + ZeroOnFreeBuffer dtls_buffer(key_len * 2 + salt_len * 2); // RFC 5705 exporter using the RFC 5764 parameters - if (!dtls_transport->ExportKeyingMaterial(kDtlsSrtpExporterLabel, NULL, 0, - false, &dtls_buffer[0], - dtls_buffer.size())) { - RTC_LOG(LS_WARNING) << "DTLS-SRTP key export failed"; + if (!dtls_transport->ExportSrtpKeyingMaterial(dtls_buffer)) { + RTC_LOG(LS_ERROR) << "DTLS-SRTP key export failed"; RTC_DCHECK_NOTREACHED(); // This should never happen return false; } // Sync up the keys with the DTLS-SRTP interface - rtc::ZeroOnFreeBuffer client_write_key(key_len + salt_len); - rtc::ZeroOnFreeBuffer server_write_key(key_len + salt_len); - size_t offset = 0; - memcpy(&client_write_key[0], &dtls_buffer[offset], key_len); - offset += key_len; - memcpy(&server_write_key[0], &dtls_buffer[offset], key_len); - offset += key_len; - memcpy(&client_write_key[key_len], &dtls_buffer[offset], salt_len); - offset += salt_len; - memcpy(&server_write_key[key_len], &dtls_buffer[offset], salt_len); - - rtc::SSLRole role; + // https://datatracker.ietf.org/doc/html/rfc5764#section-4.2 + // The keying material is in the format: + // client_write_key|server_write_key|client_write_salt|server_write_salt + ZeroOnFreeBuffer client_write_key(&dtls_buffer[0], key_len, + key_len + salt_len); + ZeroOnFreeBuffer server_write_key(&dtls_buffer[key_len], key_len, + key_len + salt_len); + client_write_key.AppendData(&dtls_buffer[key_len + key_len], salt_len); + server_write_key.AppendData(&dtls_buffer[key_len + key_len + salt_len], + salt_len); + + SSLRole role; if (!dtls_transport->GetDtlsRole(&role)) { RTC_LOG(LS_WARNING) << "Failed to get the DTLS role."; return false; } - if (role == rtc::SSL_SERVER) { + if (role == SSL_SERVER) { *send_key = std::move(server_write_key); *recv_key = std::move(client_write_key); } else { @@ -271,8 +265,8 @@ bool DtlsSrtpTransport::ExtractParams( } void DtlsSrtpTransport::SetDtlsTransport( - cricket::DtlsTransportInternal* new_dtls_transport, - cricket::DtlsTransportInternal** old_dtls_transport) { + DtlsTransportInternal* new_dtls_transport, + DtlsTransportInternal** old_dtls_transport) { if (*old_dtls_transport == new_dtls_transport) { return; } @@ -286,22 +280,23 @@ void DtlsSrtpTransport::SetDtlsTransport( if (new_dtls_transport) { new_dtls_transport->SubscribeDtlsTransportState( this, - [this](cricket::DtlsTransportInternal* transport, - DtlsTransportState state) { OnDtlsState(transport, state); }); + [this](DtlsTransportInternal* transport, DtlsTransportState state) { + OnDtlsState(transport, state); + }); } } void DtlsSrtpTransport::SetRtpDtlsTransport( - cricket::DtlsTransportInternal* rtp_dtls_transport) { + DtlsTransportInternal* rtp_dtls_transport) { SetDtlsTransport(rtp_dtls_transport, &rtp_dtls_transport_); } void DtlsSrtpTransport::SetRtcpDtlsTransport( - cricket::DtlsTransportInternal* rtcp_dtls_transport) { + DtlsTransportInternal* rtcp_dtls_transport) { SetDtlsTransport(rtcp_dtls_transport, &rtcp_dtls_transport_); } -void DtlsSrtpTransport::OnDtlsState(cricket::DtlsTransportInternal* transport, +void DtlsSrtpTransport::OnDtlsState(DtlsTransportInternal* transport, DtlsTransportState state) { RTC_DCHECK(transport == rtp_dtls_transport_ || transport == rtcp_dtls_transport_); @@ -319,7 +314,7 @@ void DtlsSrtpTransport::OnDtlsState(cricket::DtlsTransportInternal* transport, } void DtlsSrtpTransport::OnWritableState( - rtc::PacketTransportInternal* packet_transport) { + PacketTransportInternal* packet_transport) { MaybeSetupDtlsSrtp(); } diff --git a/pc/dtls_srtp_transport.h b/pc/dtls_srtp_transport.h index 0f8338ca0d..a5592b2256 100644 --- a/pc/dtls_srtp_transport.h +++ b/pc/dtls_srtp_transport.h @@ -11,16 +11,15 @@ #ifndef PC_DTLS_SRTP_TRANSPORT_H_ #define PC_DTLS_SRTP_TRANSPORT_H_ +#include #include -#include +#include #include -#include "absl/types/optional.h" -#include "api/crypto_params.h" #include "api/dtls_transport_interface.h" -#include "api/rtc_error.h" -#include "p2p/base/dtls_transport_internal.h" +#include "api/field_trials_view.h" #include "p2p/base/packet_transport_internal.h" +#include "p2p/dtls/dtls_transport_internal.h" #include "pc/srtp_transport.h" #include "rtc_base/buffer.h" @@ -35,8 +34,8 @@ class DtlsSrtpTransport : public SrtpTransport { // Set P2P layer RTP/RTCP DtlsTransports. When using RTCP-muxing, // `rtcp_dtls_transport` is null. - void SetDtlsTransports(cricket::DtlsTransportInternal* rtp_dtls_transport, - cricket::DtlsTransportInternal* rtcp_dtls_transport); + void SetDtlsTransports(DtlsTransportInternal* rtp_dtls_transport, + DtlsTransportInternal* rtcp_dtls_transport); void SetRtcpMuxEnabled(bool enable) override; @@ -49,15 +48,6 @@ class DtlsSrtpTransport : public SrtpTransport { void SetOnDtlsStateChange(std::function callback); - RTCError SetSrtpSendKey(const cricket::CryptoParams& params) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, - "Set SRTP keys for DTLS-SRTP is not supported."); - } - RTCError SetSrtpReceiveKey(const cricket::CryptoParams& params) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, - "Set SRTP keys for DTLS-SRTP is not supported."); - } - // If `active_reset_srtp_params_` is set to be true, the SRTP parameters will // be reset whenever the DtlsTransports are reset. void SetActiveResetSrtpParams(bool active_reset_srtp_params) { @@ -72,29 +62,28 @@ class DtlsSrtpTransport : public SrtpTransport { void MaybeSetupDtlsSrtp(); void SetupRtpDtlsSrtp(); void SetupRtcpDtlsSrtp(); - bool ExtractParams(cricket::DtlsTransportInternal* dtls_transport, + bool ExtractParams(DtlsTransportInternal* dtls_transport, int* selected_crypto_suite, - rtc::ZeroOnFreeBuffer* send_key, - rtc::ZeroOnFreeBuffer* recv_key); - void SetDtlsTransport(cricket::DtlsTransportInternal* new_dtls_transport, - cricket::DtlsTransportInternal** old_dtls_transport); - void SetRtpDtlsTransport(cricket::DtlsTransportInternal* rtp_dtls_transport); - void SetRtcpDtlsTransport( - cricket::DtlsTransportInternal* rtcp_dtls_transport); - - void OnDtlsState(cricket::DtlsTransportInternal* dtls_transport, + ZeroOnFreeBuffer* send_key, + ZeroOnFreeBuffer* recv_key); + void SetDtlsTransport(DtlsTransportInternal* new_dtls_transport, + DtlsTransportInternal** old_dtls_transport); + void SetRtpDtlsTransport(DtlsTransportInternal* rtp_dtls_transport); + void SetRtcpDtlsTransport(DtlsTransportInternal* rtcp_dtls_transport); + + void OnDtlsState(DtlsTransportInternal* dtls_transport, DtlsTransportState state); // Override the SrtpTransport::OnWritableState. - void OnWritableState(rtc::PacketTransportInternal* packet_transport) override; + void OnWritableState(PacketTransportInternal* packet_transport) override; // Owned by the TransportController. - cricket::DtlsTransportInternal* rtp_dtls_transport_ = nullptr; - cricket::DtlsTransportInternal* rtcp_dtls_transport_ = nullptr; + DtlsTransportInternal* rtp_dtls_transport_ = nullptr; + DtlsTransportInternal* rtcp_dtls_transport_ = nullptr; // The encrypted header extension IDs. - absl::optional> send_extension_ids_; - absl::optional> recv_extension_ids_; + std::optional> send_extension_ids_; + std::optional> recv_extension_ids_; bool active_reset_srtp_params_ = false; std::function on_dtls_state_change_; diff --git a/pc/dtls_srtp_transport_integrationtest.cc b/pc/dtls_srtp_transport_integrationtest.cc new file mode 100644 index 0000000000..1b4990b978 --- /dev/null +++ b/pc/dtls_srtp_transport_integrationtest.cc @@ -0,0 +1,250 @@ +/* + * Copyright 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include +#include +#include + +#include "absl/strings/str_cat.h" +#include "api/crypto/crypto_options.h" +#include "api/dtls_transport_interface.h" +#include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" +#include "call/rtp_demuxer.h" +#include "media/base/fake_rtp.h" +#include "p2p/base/transport_description.h" +#include "p2p/dtls/dtls_transport.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "p2p/test/fake_ice_transport.h" +#include "pc/dtls_srtp_transport.h" +#include "pc/srtp_transport.h" +#include "pc/test/rtp_transport_test_util.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/buffer.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/fake_clock.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_fingerprint.h" +#include "rtc_base/ssl_identity.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/thread.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/scoped_key_value_config.h" +#include "test/wait_until.h" + +const int kRtpAuthTagLen = 10; +static const int kTimeout = 10000; + +/* A test using a DTLS-SRTP transport on one side and + * SrtpTransport+DtlsTransport on the other side, connected by a + * FakeIceTransport. + */ +class DtlsSrtpTransportIntegrationTest : public ::testing::Test { + protected: + DtlsSrtpTransportIntegrationTest() + : client_ice_transport_(MakeIceTransport(webrtc::ICEROLE_CONTROLLING)), + server_ice_transport_(MakeIceTransport(webrtc::ICEROLE_CONTROLLED)), + client_dtls_transport_(MakeDtlsTransport(client_ice_transport_.get())), + server_dtls_transport_(MakeDtlsTransport(server_ice_transport_.get())), + client_certificate_(MakeCertificate()), + server_certificate_(MakeCertificate()), + dtls_srtp_transport_(false, field_trials_), + srtp_transport_(false, field_trials_) { + dtls_srtp_transport_.SetDtlsTransports(server_dtls_transport_.get(), + nullptr); + srtp_transport_.SetRtpPacketTransport(client_ice_transport_.get()); + + webrtc::RtpDemuxerCriteria demuxer_criteria; + demuxer_criteria.payload_types() = {0x00}; + dtls_srtp_transport_.RegisterRtpDemuxerSink(demuxer_criteria, + &dtls_srtp_transport_observer_); + srtp_transport_.RegisterRtpDemuxerSink(demuxer_criteria, + &srtp_transport_observer_); + } + ~DtlsSrtpTransportIntegrationTest() { + dtls_srtp_transport_.UnregisterRtpDemuxerSink( + &dtls_srtp_transport_observer_); + srtp_transport_.UnregisterRtpDemuxerSink(&srtp_transport_observer_); + } + + webrtc::scoped_refptr MakeCertificate() { + return webrtc::RTCCertificate::Create( + webrtc::SSLIdentity::Create("test", webrtc::KT_DEFAULT)); + } + std::unique_ptr MakeIceTransport( + webrtc::IceRole role) { + auto ice_transport = std::make_unique( + "fake_" + absl::StrCat(static_cast(role)), 0); + ice_transport->SetAsync(true); + ice_transport->SetAsyncDelay(0); + ice_transport->SetIceRole(role); + return ice_transport; + } + + std::unique_ptr MakeDtlsTransport( + webrtc::FakeIceTransport* ice_transport) { + return std::make_unique( + ice_transport, webrtc::CryptoOptions(), + /*event_log=*/nullptr, webrtc::SSL_PROTOCOL_DTLS_12); + } + void SetRemoteFingerprintFromCert( + webrtc::DtlsTransportInternalImpl* transport, + const webrtc::scoped_refptr& cert) { + std::unique_ptr fingerprint = + webrtc::SSLFingerprint::CreateFromCertificate(*cert); + + transport->SetRemoteParameters( + fingerprint->algorithm, + reinterpret_cast(fingerprint->digest.data()), + fingerprint->digest.size(), std::nullopt); + } + + void Connect() { + client_dtls_transport_->SetLocalCertificate(client_certificate_); + client_dtls_transport_->SetDtlsRole(webrtc::SSL_SERVER); + server_dtls_transport_->SetLocalCertificate(server_certificate_); + server_dtls_transport_->SetDtlsRole(webrtc::SSL_CLIENT); + + SetRemoteFingerprintFromCert(server_dtls_transport_.get(), + client_certificate_); + SetRemoteFingerprintFromCert(client_dtls_transport_.get(), + server_certificate_); + + // Wire up the ICE and transport. + client_ice_transport_->SetDestination(server_ice_transport_.get()); + + // Wait for the DTLS connection to be up. + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return client_dtls_transport_->writable() && + server_dtls_transport_->writable(); + }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); + EXPECT_EQ(client_dtls_transport_->dtls_state(), + webrtc::DtlsTransportState::kConnected); + EXPECT_EQ(server_dtls_transport_->dtls_state(), + webrtc::DtlsTransportState::kConnected); + } + void SetupClientKeysManually() { + // Setup the client-side SRTP transport with the keys from the server DTLS + // transport. + int selected_crypto_suite; + ASSERT_TRUE( + server_dtls_transport_->GetSrtpCryptoSuite(&selected_crypto_suite)); + int key_len; + int salt_len; + ASSERT_TRUE(webrtc::GetSrtpKeyAndSaltLengths((selected_crypto_suite), + &key_len, &salt_len)); + + // Extract the keys. The order depends on the role! + webrtc::ZeroOnFreeBuffer dtls_buffer(key_len * 2 + salt_len * 2); + ASSERT_TRUE(server_dtls_transport_->ExportSrtpKeyingMaterial(dtls_buffer)); + + webrtc::ZeroOnFreeBuffer client_write_key( + &dtls_buffer[0], key_len, key_len + salt_len); + webrtc::ZeroOnFreeBuffer server_write_key( + &dtls_buffer[key_len], key_len, key_len + salt_len); + client_write_key.AppendData(&dtls_buffer[key_len + key_len], salt_len); + server_write_key.AppendData(&dtls_buffer[key_len + key_len + salt_len], + salt_len); + + EXPECT_TRUE(srtp_transport_.SetRtpParams( + selected_crypto_suite, server_write_key, {}, selected_crypto_suite, + client_write_key, {})); + } + + webrtc::CopyOnWriteBuffer CreateRtpPacket() { + size_t rtp_len = sizeof(kPcmuFrame); + size_t packet_size = rtp_len + kRtpAuthTagLen; + webrtc::Buffer rtp_packet_buffer(packet_size); + char* rtp_packet_data = rtp_packet_buffer.data(); + memcpy(rtp_packet_data, kPcmuFrame, rtp_len); + + return {rtp_packet_data, rtp_len, packet_size}; + } + + void SendRtpPacketFromSrtpToDtlsSrtp() { + webrtc::AsyncSocketPacketOptions options; + webrtc::CopyOnWriteBuffer packet = CreateRtpPacket(); + + EXPECT_TRUE(srtp_transport_.SendRtpPacket(&packet, options, + webrtc::PF_SRTP_BYPASS)); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return dtls_srtp_transport_observer_.rtp_count(); }, + ::testing::Eq(1), + {.timeout = webrtc::TimeDelta::Millis(kTimeout), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); + EXPECT_EQ(1, dtls_srtp_transport_observer_.rtp_count()); + ASSERT_TRUE(dtls_srtp_transport_observer_.last_recv_rtp_packet().data()); + EXPECT_EQ( + 0, + std::memcmp(dtls_srtp_transport_observer_.last_recv_rtp_packet().data(), + kPcmuFrame, sizeof(kPcmuFrame))); + } + + void SendRtpPacketFromDtlsSrtpToSrtp() { + webrtc::AsyncSocketPacketOptions options; + webrtc::CopyOnWriteBuffer packet = CreateRtpPacket(); + + EXPECT_TRUE(dtls_srtp_transport_.SendRtpPacket(&packet, options, + webrtc::PF_SRTP_BYPASS)); + EXPECT_THAT( + webrtc::WaitUntil([&] { return srtp_transport_observer_.rtp_count(); }, + ::testing::Eq(1), + {.timeout = webrtc::TimeDelta::Millis(kTimeout), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); + EXPECT_EQ(1, srtp_transport_observer_.rtp_count()); + ASSERT_TRUE(srtp_transport_observer_.last_recv_rtp_packet().data()); + EXPECT_EQ( + 0, std::memcmp(srtp_transport_observer_.last_recv_rtp_packet().data(), + kPcmuFrame, sizeof(kPcmuFrame))); + } + + private: + webrtc::AutoThread main_thread_; + webrtc::ScopedFakeClock fake_clock_; + webrtc::test::ScopedKeyValueConfig field_trials_; + + std::unique_ptr client_ice_transport_; + std::unique_ptr server_ice_transport_; + + std::unique_ptr client_dtls_transport_; + std::unique_ptr server_dtls_transport_; + + webrtc::scoped_refptr client_certificate_; + webrtc::scoped_refptr server_certificate_; + + webrtc::DtlsSrtpTransport dtls_srtp_transport_; + webrtc::SrtpTransport srtp_transport_; + + webrtc::TransportObserver dtls_srtp_transport_observer_; + webrtc::TransportObserver srtp_transport_observer_; +}; + +TEST_F(DtlsSrtpTransportIntegrationTest, SendRtpFromSrtpToDtlsSrtp) { + Connect(); + SetupClientKeysManually(); + SendRtpPacketFromSrtpToDtlsSrtp(); +} + +TEST_F(DtlsSrtpTransportIntegrationTest, SendRtpFromDtlsSrtpToSrtp) { + Connect(); + SetupClientKeysManually(); + SendRtpPacketFromDtlsSrtpToSrtp(); +} diff --git a/pc/dtls_srtp_transport_unittest.cc b/pc/dtls_srtp_transport_unittest.cc index bf0676c324..19499ab8d0 100644 --- a/pc/dtls_srtp_transport_unittest.cc +++ b/pc/dtls_srtp_transport_unittest.cc @@ -14,28 +14,32 @@ #include #include +#include #include "call/rtp_demuxer.h" #include "media/base/fake_rtp.h" -#include "p2p/base/dtls_transport_internal.h" -#include "p2p/base/fake_dtls_transport.h" -#include "p2p/base/fake_ice_transport.h" #include "p2p/base/p2p_constants.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "p2p/dtls/fake_dtls_transport.h" +#include "p2p/test/fake_ice_transport.h" #include "pc/rtp_transport.h" +#include "pc/srtp_transport.h" #include "pc/test/rtp_transport_test_util.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/buffer.h" #include "rtc_base/byte_order.h" #include "rtc_base/containers/flat_set.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" -using cricket::FakeDtlsTransport; -using cricket::FakeIceTransport; using webrtc::DtlsSrtpTransport; +using ::webrtc::FakeDtlsTransport; +using ::webrtc::FakeIceTransport; using webrtc::RtpTransport; using webrtc::SrtpTransport; @@ -79,7 +83,7 @@ class DtlsSrtpTransportTest : public ::testing::Test, dtls_srtp_transport1_->SubscribeRtcpPacketReceived( &transport_observer1_, - [this](rtc::CopyOnWriteBuffer* buffer, int64_t packet_time_ms) { + [this](webrtc::CopyOnWriteBuffer* buffer, int64_t packet_time_ms) { transport_observer1_.OnRtcpPacketReceived(buffer, packet_time_ms); }); dtls_srtp_transport1_->SubscribeReadyToSend( @@ -88,7 +92,7 @@ class DtlsSrtpTransportTest : public ::testing::Test, dtls_srtp_transport2_->SubscribeRtcpPacketReceived( &transport_observer2_, - [this](rtc::CopyOnWriteBuffer* buffer, int64_t packet_time_ms) { + [this](webrtc::CopyOnWriteBuffer* buffer, int64_t packet_time_ms) { transport_observer2_.OnRtcpPacketReceived(buffer, packet_time_ms); }); dtls_srtp_transport2_->SubscribeReadyToSend( @@ -105,11 +109,11 @@ class DtlsSrtpTransportTest : public ::testing::Test, void CompleteDtlsHandshake(FakeDtlsTransport* fake_dtls1, FakeDtlsTransport* fake_dtls2) { - auto cert1 = rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); + auto cert1 = webrtc::RTCCertificate::Create( + webrtc::SSLIdentity::Create("session1", webrtc::KT_DEFAULT)); fake_dtls1->SetLocalCertificate(cert1); - auto cert2 = rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); + auto cert2 = webrtc::RTCCertificate::Create( + webrtc::SSLIdentity::Create("session1", webrtc::KT_DEFAULT)); fake_dtls2->SetLocalCertificate(cert2); fake_dtls1->SetDestination(fake_dtls2); } @@ -122,24 +126,24 @@ class DtlsSrtpTransportTest : public ::testing::Test, size_t rtp_len = sizeof(kPcmuFrame); size_t packet_size = rtp_len + kRtpAuthTagLen; - rtc::Buffer rtp_packet_buffer(packet_size); + webrtc::Buffer rtp_packet_buffer(packet_size); char* rtp_packet_data = rtp_packet_buffer.data(); memcpy(rtp_packet_data, kPcmuFrame, rtp_len); // In order to be able to run this test function multiple times we can not // use the same sequence number twice. Increase the sequence number by one. - rtc::SetBE16(reinterpret_cast(rtp_packet_data) + 2, - ++sequence_number_); - rtc::CopyOnWriteBuffer rtp_packet1to2(rtp_packet_data, rtp_len, - packet_size); - rtc::CopyOnWriteBuffer rtp_packet2to1(rtp_packet_data, rtp_len, - packet_size); - - rtc::PacketOptions options; + webrtc::SetBE16(reinterpret_cast(rtp_packet_data) + 2, + ++sequence_number_); + webrtc::CopyOnWriteBuffer rtp_packet1to2(rtp_packet_data, rtp_len, + packet_size); + webrtc::CopyOnWriteBuffer rtp_packet2to1(rtp_packet_data, rtp_len, + packet_size); + + webrtc::AsyncSocketPacketOptions options; // Send a packet from `srtp_transport1_` to `srtp_transport2_` and verify // that the packet can be successfully received and decrypted. int prev_received_packets = transport_observer2_.rtp_count(); ASSERT_TRUE(dtls_srtp_transport1_->SendRtpPacket(&rtp_packet1to2, options, - cricket::PF_SRTP_BYPASS)); + webrtc::PF_SRTP_BYPASS)); ASSERT_TRUE(transport_observer2_.last_recv_rtp_packet().data()); EXPECT_EQ(0, memcmp(transport_observer2_.last_recv_rtp_packet().data(), kPcmuFrame, rtp_len)); @@ -147,7 +151,7 @@ class DtlsSrtpTransportTest : public ::testing::Test, prev_received_packets = transport_observer1_.rtp_count(); ASSERT_TRUE(dtls_srtp_transport2_->SendRtpPacket(&rtp_packet2to1, options, - cricket::PF_SRTP_BYPASS)); + webrtc::PF_SRTP_BYPASS)); ASSERT_TRUE(transport_observer1_.last_recv_rtp_packet().data()); EXPECT_EQ(0, memcmp(transport_observer1_.last_recv_rtp_packet().data(), kPcmuFrame, rtp_len)); @@ -157,19 +161,21 @@ class DtlsSrtpTransportTest : public ::testing::Test, void SendRecvRtcpPackets() { size_t rtcp_len = sizeof(kRtcpReport); size_t packet_size = rtcp_len + 4 + kRtpAuthTagLen; - rtc::Buffer rtcp_packet_buffer(packet_size); + webrtc::Buffer rtcp_packet_buffer(packet_size); // TODO(zhihuang): Remove the extra copy when the SendRtpPacket method // doesn't take the CopyOnWriteBuffer by pointer. - rtc::CopyOnWriteBuffer rtcp_packet1to2(kRtcpReport, rtcp_len, packet_size); - rtc::CopyOnWriteBuffer rtcp_packet2to1(kRtcpReport, rtcp_len, packet_size); + webrtc::CopyOnWriteBuffer rtcp_packet1to2(kRtcpReport, rtcp_len, + packet_size); + webrtc::CopyOnWriteBuffer rtcp_packet2to1(kRtcpReport, rtcp_len, + packet_size); - rtc::PacketOptions options; + webrtc::AsyncSocketPacketOptions options; // Send a packet from `srtp_transport1_` to `srtp_transport2_` and verify // that the packet can be successfully received and decrypted. int prev_received_packets = transport_observer2_.rtcp_count(); ASSERT_TRUE(dtls_srtp_transport1_->SendRtcpPacket(&rtcp_packet1to2, options, - cricket::PF_SRTP_BYPASS)); + webrtc::PF_SRTP_BYPASS)); ASSERT_TRUE(transport_observer2_.last_recv_rtcp_packet().data()); EXPECT_EQ(0, memcmp(transport_observer2_.last_recv_rtcp_packet().data(), kRtcpReport, rtcp_len)); @@ -178,7 +184,7 @@ class DtlsSrtpTransportTest : public ::testing::Test, // Do the same thing in the opposite direction; prev_received_packets = transport_observer1_.rtcp_count(); ASSERT_TRUE(dtls_srtp_transport2_->SendRtcpPacket(&rtcp_packet2to1, options, - cricket::PF_SRTP_BYPASS)); + webrtc::PF_SRTP_BYPASS)); ASSERT_TRUE(transport_observer1_.last_recv_rtcp_packet().data()); EXPECT_EQ(0, memcmp(transport_observer1_.last_recv_rtcp_packet().data(), kRtcpReport, rtcp_len)); @@ -194,26 +200,26 @@ class DtlsSrtpTransportTest : public ::testing::Test, size_t rtp_len = sizeof(kPcmuFrameWithExtensions); size_t packet_size = rtp_len + kRtpAuthTagLen; - rtc::Buffer rtp_packet_buffer(packet_size); + webrtc::Buffer rtp_packet_buffer(packet_size); char* rtp_packet_data = rtp_packet_buffer.data(); memcpy(rtp_packet_data, kPcmuFrameWithExtensions, rtp_len); // In order to be able to run this test function multiple times we can not // use the same sequence number twice. Increase the sequence number by one. - rtc::SetBE16(reinterpret_cast(rtp_packet_data) + 2, - ++sequence_number_); - rtc::CopyOnWriteBuffer rtp_packet1to2(rtp_packet_data, rtp_len, - packet_size); - rtc::CopyOnWriteBuffer rtp_packet2to1(rtp_packet_data, rtp_len, - packet_size); + webrtc::SetBE16(reinterpret_cast(rtp_packet_data) + 2, + ++sequence_number_); + webrtc::CopyOnWriteBuffer rtp_packet1to2(rtp_packet_data, rtp_len, + packet_size); + webrtc::CopyOnWriteBuffer rtp_packet2to1(rtp_packet_data, rtp_len, + packet_size); char original_rtp_data[sizeof(kPcmuFrameWithExtensions)]; memcpy(original_rtp_data, rtp_packet_data, rtp_len); - rtc::PacketOptions options; + webrtc::AsyncSocketPacketOptions options; // Send a packet from `srtp_transport1_` to `srtp_transport2_` and verify // that the packet can be successfully received and decrypted. ASSERT_TRUE(dtls_srtp_transport1_->SendRtpPacket(&rtp_packet1to2, options, - cricket::PF_SRTP_BYPASS)); + webrtc::PF_SRTP_BYPASS)); ASSERT_TRUE(transport_observer2_.last_recv_rtp_packet().data()); EXPECT_EQ(0, memcmp(transport_observer2_.last_recv_rtp_packet().data(), original_rtp_data, rtp_len)); @@ -233,7 +239,7 @@ class DtlsSrtpTransportTest : public ::testing::Test, // Do the same thing in the opposite direction. ASSERT_TRUE(dtls_srtp_transport2_->SendRtpPacket(&rtp_packet2to1, options, - cricket::PF_SRTP_BYPASS)); + webrtc::PF_SRTP_BYPASS)); ASSERT_TRUE(transport_observer1_.last_recv_rtp_packet().data()); EXPECT_EQ(0, memcmp(transport_observer1_.last_recv_rtp_packet().data(), original_rtp_data, rtp_len)); @@ -257,7 +263,7 @@ class DtlsSrtpTransportTest : public ::testing::Test, SendRecvRtcpPackets(); } - rtc::AutoThread main_thread_; + webrtc::AutoThread main_thread_; std::unique_ptr dtls_srtp_transport1_; std::unique_ptr dtls_srtp_transport2_; webrtc::TransportObserver transport_observer1_; @@ -271,17 +277,17 @@ class DtlsSrtpTransportTest : public ::testing::Test, // transport finished the handshake, SRTP is set up. TEST_F(DtlsSrtpTransportTest, SetTransportsAfterHandshakeCompleteWithRtcpMux) { auto rtp_dtls1 = std::make_unique( - "video", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "video", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtp_dtls2 = std::make_unique( - "video", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "video", webrtc::ICE_CANDIDATE_COMPONENT_RTP); MakeDtlsSrtpTransports(rtp_dtls1.get(), nullptr, rtp_dtls2.get(), nullptr, /*rtcp_mux_enabled=*/true); auto rtp_dtls3 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtp_dtls4 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); CompleteDtlsHandshake(rtp_dtls3.get(), rtp_dtls4.get()); @@ -296,25 +302,25 @@ TEST_F(DtlsSrtpTransportTest, SetTransportsAfterHandshakeCompleteWithRtcpMux) { TEST_F(DtlsSrtpTransportTest, SetTransportsAfterHandshakeCompleteWithoutRtcpMux) { auto rtp_dtls1 = std::make_unique( - "video", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "video", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls1 = std::make_unique( - "video", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "video", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); auto rtp_dtls2 = std::make_unique( - "video", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "video", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls2 = std::make_unique( - "video", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "video", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); MakeDtlsSrtpTransports(rtp_dtls1.get(), rtcp_dtls1.get(), rtp_dtls2.get(), rtcp_dtls2.get(), /*rtcp_mux_enabled=*/false); auto rtp_dtls3 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls3 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); auto rtp_dtls4 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls4 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); CompleteDtlsHandshake(rtp_dtls3.get(), rtp_dtls4.get()); CompleteDtlsHandshake(rtcp_dtls3.get(), rtcp_dtls4.get()); @@ -328,13 +334,13 @@ TEST_F(DtlsSrtpTransportTest, // handshake is finished. TEST_F(DtlsSrtpTransportTest, SetTransportsBeforeHandshakeCompleteWithRtcpMux) { auto rtp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); auto rtp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); MakeDtlsSrtpTransports(rtp_dtls1.get(), rtcp_dtls1.get(), rtp_dtls2.get(), rtcp_dtls2.get(), @@ -351,13 +357,13 @@ TEST_F(DtlsSrtpTransportTest, SetTransportsBeforeHandshakeCompleteWithRtcpMux) { TEST_F(DtlsSrtpTransportTest, SetTransportsBeforeHandshakeCompleteWithoutRtcpMux) { auto rtp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); auto rtp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); MakeDtlsSrtpTransports(rtp_dtls1.get(), rtcp_dtls1.get(), rtp_dtls2.get(), rtcp_dtls2.get(), /*rtcp_mux_enabled=*/false); @@ -374,9 +380,9 @@ TEST_F(DtlsSrtpTransportTest, // complete. TEST_F(DtlsSrtpTransportTest, DtlsSrtpResetAfterDtlsTransportChange) { auto rtp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); MakeDtlsSrtpTransports(rtp_dtls1.get(), nullptr, rtp_dtls2.get(), nullptr, /*rtcp_mux_enabled=*/true); @@ -386,9 +392,9 @@ TEST_F(DtlsSrtpTransportTest, DtlsSrtpResetAfterDtlsTransportChange) { EXPECT_TRUE(dtls_srtp_transport2_->IsSrtpActive()); auto rtp_dtls3 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtp_dtls4 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); // The previous context is reset. dtls_srtp_transport1_->SetDtlsTransports(rtp_dtls3.get(), nullptr); @@ -406,13 +412,13 @@ TEST_F(DtlsSrtpTransportTest, DtlsSrtpResetAfterDtlsTransportChange) { TEST_F(DtlsSrtpTransportTest, RtcpMuxEnabledAfterRtpTransportHandshakeComplete) { auto rtp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); auto rtp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); MakeDtlsSrtpTransports(rtp_dtls1.get(), rtcp_dtls1.get(), rtp_dtls2.get(), rtcp_dtls2.get(), /*rtcp_mux_enabled=*/false); @@ -433,9 +439,9 @@ TEST_F(DtlsSrtpTransportTest, // sessions are updated with new encryped header extension IDs immediately. TEST_F(DtlsSrtpTransportTest, EncryptedHeaderExtensionIdUpdated) { auto rtp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); MakeDtlsSrtpTransports(rtp_dtls1.get(), nullptr, rtp_dtls2.get(), nullptr, /*rtcp_mux_enabled=*/true); @@ -459,9 +465,9 @@ TEST_F(DtlsSrtpTransportTest, EncryptedHeaderExtensionIdUpdated) { // RTP DtlsTransport is ready. TEST_F(DtlsSrtpTransportTest, SignalReadyToSendFiredWithRtcpMux) { auto rtp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); MakeDtlsSrtpTransports(rtp_dtls1.get(), nullptr, rtp_dtls2.get(), nullptr, /*rtcp_mux_enabled=*/true); @@ -475,13 +481,13 @@ TEST_F(DtlsSrtpTransportTest, SignalReadyToSendFiredWithRtcpMux) { // both the RTP and RTCP DtlsTransport are ready. TEST_F(DtlsSrtpTransportTest, SignalReadyToSendFiredWithoutRtcpMux) { auto rtp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); auto rtp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); MakeDtlsSrtpTransports(rtp_dtls1.get(), rtcp_dtls1.get(), rtp_dtls2.get(), rtcp_dtls2.get(), /*rtcp_mux_enabled=*/false); @@ -502,13 +508,13 @@ TEST_F(DtlsSrtpTransportTest, SignalReadyToSendFiredWithoutRtcpMux) { // Regression test for bugs.webrtc.org/8996 TEST_F(DtlsSrtpTransportTest, SrtpSessionNotResetWhenRtcpTransportRemoved) { auto rtp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); auto rtp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); MakeDtlsSrtpTransports(rtp_dtls1.get(), rtcp_dtls1.get(), rtp_dtls2.get(), rtcp_dtls2.get(), /*rtcp_mux_enabled=*/true); @@ -531,13 +537,13 @@ TEST_F(DtlsSrtpTransportTest, SrtpSessionNotResetWhenRtcpTransportRemoved) { // the SRTP parameters with the `active_reset_srtp_params_` flag. TEST_F(DtlsSrtpTransportTest, ActivelyResetSrtpParams) { auto rtp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls1 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); auto rtp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTP); auto rtcp_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTCP); + "audio", webrtc::ICE_CANDIDATE_COMPONENT_RTCP); MakeDtlsSrtpTransports(rtp_dtls1.get(), rtcp_dtls1.get(), rtp_dtls2.get(), rtcp_dtls2.get(), /*rtcp_mux_enabled=*/true); @@ -556,11 +562,12 @@ TEST_F(DtlsSrtpTransportTest, ActivelyResetSrtpParams) { // Sending some RTCP packets. size_t rtcp_len = sizeof(kRtcpReport); size_t packet_size = rtcp_len + 4 + kRtpAuthTagLen; - rtc::Buffer rtcp_packet_buffer(packet_size); - rtc::CopyOnWriteBuffer rtcp_packet(kRtcpReport, rtcp_len, packet_size); + webrtc::Buffer rtcp_packet_buffer(packet_size); + webrtc::CopyOnWriteBuffer rtcp_packet(kRtcpReport, rtcp_len, packet_size); int prev_received_packets = transport_observer2_.rtcp_count(); ASSERT_TRUE(dtls_srtp_transport1_->SendRtcpPacket( - &rtcp_packet, rtc::PacketOptions(), cricket::PF_SRTP_BYPASS)); + &rtcp_packet, webrtc::AsyncSocketPacketOptions(), + webrtc::PF_SRTP_BYPASS)); // The RTCP packet is not exepected to be received because the SRTP parameters // are only reset on one side and the SRTCP index is out of sync. EXPECT_EQ(prev_received_packets, transport_observer2_.rtcp_count()); diff --git a/pc/dtls_transport.cc b/pc/dtls_transport.cc index 15eed9e47b..f79f1bb203 100644 --- a/pc/dtls_transport.cc +++ b/pc/dtls_transport.cc @@ -10,39 +10,53 @@ #include "pc/dtls_transport.h" +#include +#include #include -#include "absl/types/optional.h" #include "api/dtls_transport_interface.h" +#include "api/ice_transport_interface.h" #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "p2p/dtls/dtls_transport_internal.h" #include "pc/ice_transport.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread.h" namespace webrtc { // Implementation of DtlsTransportInterface -DtlsTransport::DtlsTransport( - std::unique_ptr internal) - : owner_thread_(rtc::Thread::Current()), +DtlsTransport::DtlsTransport(std::unique_ptr internal) + : owner_thread_(Thread::Current()), info_(DtlsTransportState::kNew), internal_dtls_transport_(std::move(internal)), - ice_transport_(rtc::make_ref_counted( + ice_transport_(make_ref_counted( internal_dtls_transport_->ice_transport())) { RTC_DCHECK(internal_dtls_transport_.get()); internal_dtls_transport_->SubscribeDtlsTransportState( - [this](cricket::DtlsTransportInternal* transport, - DtlsTransportState state) { + [this](DtlsTransportInternal* transport, DtlsTransportState state) { OnInternalDtlsState(transport, state); }); UpdateInformation(); } DtlsTransport::~DtlsTransport() { + // TODO(tommi): Due to a reference being held by the RtpSenderBase + // implementation, the last reference to the `DtlsTransport` instance can + // be released on the signaling thread. + // RTC_DCHECK_RUN_ON(owner_thread_); + // We depend on the signaling thread to call Clear() before dropping // its last reference to this object. + + // If there are non `owner_thread_` references outstanding, and those + // references are the last ones released, we depend on Clear() having been + // called from the owner_thread before the last reference is deleted. + // `Clear()` is currently called from `JsepTransport::~JsepTransport`. RTC_DCHECK(owner_thread_->IsCurrent() || !internal_dtls_transport_); } @@ -62,7 +76,7 @@ void DtlsTransport::UnregisterObserver() { observer_ = nullptr; } -rtc::scoped_refptr DtlsTransport::ice_transport() { +scoped_refptr DtlsTransport::ice_transport() { return ice_transport_; } @@ -72,23 +86,16 @@ void DtlsTransport::Clear() { RTC_DCHECK(internal()); bool must_send_event = (internal()->dtls_state() != DtlsTransportState::kClosed); - // The destructor of cricket::DtlsTransportInternal calls back - // into DtlsTransport, so we can't hold the lock while releasing. - std::unique_ptr transport_to_release; - { - MutexLock lock(&lock_); - transport_to_release = std::move(internal_dtls_transport_); - ice_transport_->Clear(); - } + internal_dtls_transport_.reset(); + ice_transport_->Clear(); UpdateInformation(); if (observer_ && must_send_event) { observer_->OnStateChange(Information()); } } -void DtlsTransport::OnInternalDtlsState( - cricket::DtlsTransportInternal* transport, - DtlsTransportState state) { +void DtlsTransport::OnInternalDtlsState(DtlsTransportInternal* transport, + DtlsTransportState state) { RTC_DCHECK_RUN_ON(owner_thread_); RTC_DCHECK(transport == internal()); RTC_DCHECK(state == internal()->dtls_state()); @@ -100,23 +107,22 @@ void DtlsTransport::OnInternalDtlsState( void DtlsTransport::UpdateInformation() { RTC_DCHECK_RUN_ON(owner_thread_); - MutexLock lock(&lock_); if (internal_dtls_transport_) { if (internal_dtls_transport_->dtls_state() == DtlsTransportState::kConnected) { bool success = true; - rtc::SSLRole internal_role; - absl::optional role; + SSLRole internal_role; + std::optional role; int ssl_cipher_suite; int tls_version; int srtp_cipher; success &= internal_dtls_transport_->GetDtlsRole(&internal_role); if (success) { switch (internal_role) { - case rtc::SSL_CLIENT: + case SSL_CLIENT: role = DtlsTransportTlsRole::kClient; break; - case rtc::SSL_SERVER: + case SSL_SERVER: role = DtlsTransportTlsRole::kServer; break; } @@ -125,23 +131,24 @@ void DtlsTransport::UpdateInformation() { success &= internal_dtls_transport_->GetSslCipherSuite(&ssl_cipher_suite); success &= internal_dtls_transport_->GetSrtpCryptoSuite(&srtp_cipher); if (success) { - info_ = DtlsTransportInformation( + set_info(DtlsTransportInformation( internal_dtls_transport_->dtls_state(), role, tls_version, ssl_cipher_suite, srtp_cipher, - internal_dtls_transport_->GetRemoteSSLCertChain()); + internal_dtls_transport_->GetRemoteSSLCertChain())); } else { RTC_LOG(LS_ERROR) << "DtlsTransport in connected state has incomplete " "TLS information"; - info_ = DtlsTransportInformation( - internal_dtls_transport_->dtls_state(), role, absl::nullopt, - absl::nullopt, absl::nullopt, - internal_dtls_transport_->GetRemoteSSLCertChain()); + set_info(DtlsTransportInformation( + internal_dtls_transport_->dtls_state(), role, std::nullopt, + std::nullopt, std::nullopt, + internal_dtls_transport_->GetRemoteSSLCertChain())); } } else { - info_ = DtlsTransportInformation(internal_dtls_transport_->dtls_state()); + set_info( + DtlsTransportInformation(internal_dtls_transport_->dtls_state())); } } else { - info_ = DtlsTransportInformation(DtlsTransportState::kClosed); + set_info(DtlsTransportInformation(DtlsTransportState::kClosed)); } } diff --git a/pc/dtls_transport.h b/pc/dtls_transport.h index cca4cc980a..0ddc573ab6 100644 --- a/pc/dtls_transport.h +++ b/pc/dtls_transport.h @@ -12,12 +12,13 @@ #define PC_DTLS_TRANSPORT_H_ #include +#include #include "api/dtls_transport_interface.h" #include "api/ice_transport_interface.h" #include "api/scoped_refptr.h" -#include "p2p/base/dtls_transport.h" -#include "p2p/base/dtls_transport_internal.h" +#include "api/sequence_checker.h" +#include "p2p/dtls/dtls_transport_internal.h" #include "pc/ice_transport.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread.h" @@ -27,31 +28,34 @@ namespace webrtc { class IceTransportWithPointer; -// This implementation wraps a cricket::DtlsTransport, and takes +// This implementation wraps a webrtc::DtlsTransportInternalImpl, and takes // ownership of it. class DtlsTransport : public DtlsTransportInterface { public: // This object must be constructed and updated on a consistent thread, - // the same thread as the one the cricket::DtlsTransportInternal object + // the same thread as the one the webrtc::DtlsTransportInternal object // lives on. // The Information() function can be called from a different thread, // such as the signalling thread. - explicit DtlsTransport( - std::unique_ptr internal); + explicit DtlsTransport(std::unique_ptr internal); - rtc::scoped_refptr ice_transport() override; + scoped_refptr ice_transport() override; + + // Currently called from the signaling thread and potentially Chromium's + // JS thread. DtlsTransportInformation Information() override; + void RegisterObserver(DtlsTransportObserverInterface* observer) override; void UnregisterObserver() override; void Clear(); - cricket::DtlsTransportInternal* internal() { - MutexLock lock(&lock_); + DtlsTransportInternal* internal() { + RTC_DCHECK_RUN_ON(owner_thread_); return internal_dtls_transport_.get(); } - const cricket::DtlsTransportInternal* internal() const { - MutexLock lock(&lock_); + const DtlsTransportInternal* internal() const { + RTC_DCHECK_RUN_ON(owner_thread_); return internal_dtls_transport_.get(); } @@ -59,17 +63,24 @@ class DtlsTransport : public DtlsTransportInterface { ~DtlsTransport(); private: - void OnInternalDtlsState(cricket::DtlsTransportInternal* transport, + void OnInternalDtlsState(DtlsTransportInternal* transport, DtlsTransportState state); void UpdateInformation(); + // Called when changing `info_`. We only change the values from the + // `owner_thread_` (a.k.a. the network thread). + void set_info(DtlsTransportInformation&& info) RTC_RUN_ON(owner_thread_) { + MutexLock lock(&lock_); + info_ = std::move(info); + } + DtlsTransportObserverInterface* observer_ = nullptr; - rtc::Thread* owner_thread_; + Thread* owner_thread_; mutable Mutex lock_; DtlsTransportInformation info_ RTC_GUARDED_BY(lock_); - std::unique_ptr internal_dtls_transport_ - RTC_GUARDED_BY(lock_); - const rtc::scoped_refptr ice_transport_; + std::unique_ptr internal_dtls_transport_ + RTC_GUARDED_BY(owner_thread_); + const scoped_refptr ice_transport_; }; } // namespace webrtc diff --git a/pc/dtls_transport_unittest.cc b/pc/dtls_transport_unittest.cc index c234176635..476975664f 100644 --- a/pc/dtls_transport_unittest.cc +++ b/pc/dtls_transport_unittest.cc @@ -10,29 +10,32 @@ #include "pc/dtls_transport.h" +#include +#include #include #include -#include "absl/types/optional.h" +#include "api/dtls_transport_interface.h" #include "api/make_ref_counted.h" #include "api/rtc_error.h" -#include "p2p/base/fake_dtls_transport.h" +#include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" #include "p2p/base/p2p_constants.h" +#include "p2p/dtls/fake_dtls_transport.h" #include "rtc_base/fake_ssl_identity.h" -#include "rtc_base/gunit.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_identity.h" +#include "rtc_base/thread.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" + +namespace webrtc { -constexpr int kDefaultTimeout = 1000; // milliseconds constexpr int kNonsenseCipherSuite = 1234; -using cricket::FakeDtlsTransport; using ::testing::ElementsAre; -namespace webrtc { - class TestDtlsTransportObserver : public DtlsTransportObserverInterface { public: void OnStateChange(DtlsTransportInformation info) override { @@ -44,7 +47,7 @@ class TestDtlsTransportObserver : public DtlsTransportObserverInterface { void OnError(RTCError error) override {} DtlsTransportState state() { - if (states_.size() > 0) { + if (!states_.empty()) { return states_[states_.size() - 1]; } else { return DtlsTransportState::kNew; @@ -61,40 +64,39 @@ class DtlsTransportTest : public ::testing::Test { DtlsTransport* transport() { return transport_.get(); } DtlsTransportObserverInterface* observer() { return &observer_; } - void CreateTransport(rtc::FakeSSLCertificate* certificate = nullptr) { + void CreateTransport(FakeSSLCertificate* certificate = nullptr) { auto cricket_transport = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + "audio", ICE_CANDIDATE_COMPONENT_RTP); if (certificate) { cricket_transport->SetRemoteSSLCertificate(certificate); } cricket_transport->SetSslCipherSuite(kNonsenseCipherSuite); - transport_ = - rtc::make_ref_counted(std::move(cricket_transport)); + transport_ = make_ref_counted(std::move(cricket_transport)); } void CompleteDtlsHandshake() { auto fake_dtls1 = static_cast(transport_->internal()); auto fake_dtls2 = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); - auto cert1 = rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); + "audio", ICE_CANDIDATE_COMPONENT_RTP); + auto cert1 = + RTCCertificate::Create(SSLIdentity::Create("session1", KT_DEFAULT)); fake_dtls1->SetLocalCertificate(cert1); - auto cert2 = rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); + auto cert2 = + RTCCertificate::Create(SSLIdentity::Create("session1", KT_DEFAULT)); fake_dtls2->SetLocalCertificate(cert2); fake_dtls1->SetDestination(fake_dtls2.get()); } - rtc::AutoThread main_thread_; - rtc::scoped_refptr transport_; + AutoThread main_thread_; + scoped_refptr transport_; TestDtlsTransportObserver observer_; }; TEST_F(DtlsTransportTest, CreateClearDelete) { - auto cricket_transport = std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); + auto cricket_transport = + std::make_unique("audio", ICE_CANDIDATE_COMPONENT_RTP); auto webrtc_transport = - rtc::make_ref_counted(std::move(cricket_transport)); + make_ref_counted(std::move(cricket_transport)); ASSERT_TRUE(webrtc_transport->internal()); ASSERT_EQ(DtlsTransportState::kNew, webrtc_transport->Information().state()); webrtc_transport->Clear(); @@ -107,7 +109,9 @@ TEST_F(DtlsTransportTest, EventsObservedWhenConnecting) { CreateTransport(); transport()->RegisterObserver(observer()); CompleteDtlsHandshake(); - ASSERT_TRUE_WAIT(observer_.state_change_called_, kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return observer_.state_change_called_; }, + ::testing::IsTrue()), + IsRtcOk()); EXPECT_THAT( observer_.states_, ElementsAre( // FakeDtlsTransport doesn't signal the "connecting" state. @@ -120,47 +124,53 @@ TEST_F(DtlsTransportTest, CloseWhenClearing) { CreateTransport(); transport()->RegisterObserver(observer()); CompleteDtlsHandshake(); - ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kConnected, - kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return observer_.state(); }, + ::testing::Eq(DtlsTransportState::kConnected)), + IsRtcOk()); transport()->Clear(); - ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kClosed, - kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return observer_.state(); }, + ::testing::Eq(DtlsTransportState::kClosed)), + IsRtcOk()); } TEST_F(DtlsTransportTest, RoleAppearsOnConnect) { - rtc::FakeSSLCertificate fake_certificate("fake data"); + FakeSSLCertificate fake_certificate("fake data"); CreateTransport(&fake_certificate); transport()->RegisterObserver(observer()); EXPECT_FALSE(transport()->Information().role()); CompleteDtlsHandshake(); - ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kConnected, - kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return observer_.state(); }, + ::testing::Eq(DtlsTransportState::kConnected)), + IsRtcOk()); EXPECT_TRUE(observer_.info_.role()); EXPECT_TRUE(transport()->Information().role()); EXPECT_EQ(transport()->Information().role(), DtlsTransportTlsRole::kClient); } TEST_F(DtlsTransportTest, CertificateAppearsOnConnect) { - rtc::FakeSSLCertificate fake_certificate("fake data"); + FakeSSLCertificate fake_certificate("fake data"); CreateTransport(&fake_certificate); transport()->RegisterObserver(observer()); CompleteDtlsHandshake(); - ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kConnected, - kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return observer_.state(); }, + ::testing::Eq(DtlsTransportState::kConnected)), + IsRtcOk()); EXPECT_TRUE(observer_.info_.remote_ssl_certificates() != nullptr); } TEST_F(DtlsTransportTest, CertificateDisappearsOnClose) { - rtc::FakeSSLCertificate fake_certificate("fake data"); + FakeSSLCertificate fake_certificate("fake data"); CreateTransport(&fake_certificate); transport()->RegisterObserver(observer()); CompleteDtlsHandshake(); - ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kConnected, - kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return observer_.state(); }, + ::testing::Eq(DtlsTransportState::kConnected)), + IsRtcOk()); EXPECT_TRUE(observer_.info_.remote_ssl_certificates() != nullptr); transport()->Clear(); - ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kClosed, - kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return observer_.state(); }, + ::testing::Eq(DtlsTransportState::kClosed)), + IsRtcOk()); EXPECT_FALSE(observer_.info_.remote_ssl_certificates()); } @@ -168,13 +178,15 @@ TEST_F(DtlsTransportTest, CipherSuiteVisibleWhenConnected) { CreateTransport(); transport()->RegisterObserver(observer()); CompleteDtlsHandshake(); - ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kConnected, - kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return observer_.state(); }, + ::testing::Eq(DtlsTransportState::kConnected)), + IsRtcOk()); ASSERT_TRUE(observer_.info_.ssl_cipher_suite()); EXPECT_EQ(kNonsenseCipherSuite, *observer_.info_.ssl_cipher_suite()); transport()->Clear(); - ASSERT_TRUE_WAIT(observer_.state() == DtlsTransportState::kClosed, - kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return observer_.state(); }, + ::testing::Eq(DtlsTransportState::kClosed)), + IsRtcOk()); EXPECT_FALSE(observer_.info_.ssl_cipher_suite()); } diff --git a/pc/dtmf_sender.cc b/pc/dtmf_sender.cc index 45a4a58abb..6ecc285c08 100644 --- a/pc/dtmf_sender.cc +++ b/pc/dtmf_sender.cc @@ -13,6 +13,13 @@ #include #include +#include +#include + +#include "api/dtmf_sender_interface.h" +#include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" @@ -57,13 +64,12 @@ bool GetDtmfCode(char tone, int* code) { return true; } -rtc::scoped_refptr DtmfSender::Create( - TaskQueueBase* signaling_thread, - DtmfProviderInterface* provider) { +scoped_refptr DtmfSender::Create(TaskQueueBase* signaling_thread, + DtmfProviderInterface* provider) { if (!signaling_thread) { return nullptr; } - return rtc::make_ref_counted(signaling_thread, provider); + return make_ref_counted(signaling_thread, provider); } DtmfSender::DtmfSender(TaskQueueBase* signaling_thread, diff --git a/pc/dtmf_sender.h b/pc/dtmf_sender.h index c99c7bee50..5c7575036c 100644 --- a/pc/dtmf_sender.h +++ b/pc/dtmf_sender.h @@ -21,7 +21,6 @@ #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "pc/proxy.h" -#include "rtc_base/ref_count.h" #include "rtc_base/thread_annotations.h" // DtmfSender is the native implementation of the RTCDTMFSender defined by @@ -48,8 +47,8 @@ class DtmfProviderInterface { class DtmfSender : public DtmfSenderInterface { public: - static rtc::scoped_refptr Create(TaskQueueBase* signaling_thread, - DtmfProviderInterface* provider); + static scoped_refptr Create(TaskQueueBase* signaling_thread, + DtmfProviderInterface* provider); void OnDtmfProviderDestroyed(); @@ -92,7 +91,7 @@ class DtmfSender : public DtmfSenderInterface { int comma_delay_ RTC_GUARDED_BY(signaling_thread_); // For cancelling the tasks which feed the DTMF provider one tone at a time. - rtc::scoped_refptr safety_flag_ RTC_GUARDED_BY( + scoped_refptr safety_flag_ RTC_GUARDED_BY( signaling_thread_) RTC_PT_GUARDED_BY(signaling_thread_) = nullptr; }; diff --git a/pc/dtmf_sender_unittest.cc b/pc/dtmf_sender_unittest.cc index ab5ba9eaee..d121809129 100644 --- a/pc/dtmf_sender_unittest.cc +++ b/pc/dtmf_sender_unittest.cc @@ -12,14 +12,21 @@ #include +#include #include #include #include +#include "api/dtmf_sender_interface.h" +#include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" #include "rtc_base/fake_clock.h" -#include "rtc_base/gunit.h" +#include "rtc_base/thread.h" #include "rtc_base/time_utils.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" using webrtc::DtmfProviderInterface; using webrtc::DtmfSender; @@ -55,7 +62,7 @@ class FakeDtmfObserver : public DtmfSenderObserverInterface { const std::vector& tones_from_single_argument_callback() const { return tones_from_single_argument_callback_; } - const std::string tones_remaining() { return tones_remaining_; } + std::string tones_remaining() { return tones_remaining_; } bool completed() const { return completed_; } private: @@ -82,12 +89,12 @@ class FakeDtmfProvider : public DtmfProviderInterface { bool InsertDtmf(int code, int duration) override { int gap = 0; - // TODO(ronghuawu): Make the timer (basically the rtc::TimeNanos) + // TODO(ronghuawu): Make the timer (basically the webrtc::TimeNanos) // mockable and use a fake timer in the unit tests. if (last_insert_dtmf_call_ > 0) { - gap = static_cast(rtc::TimeMillis() - last_insert_dtmf_call_); + gap = static_cast(webrtc::TimeMillis() - last_insert_dtmf_call_); } - last_insert_dtmf_call_ = rtc::TimeMillis(); + last_insert_dtmf_call_ = webrtc::TimeMillis(); dtmf_info_queue_.push_back(DtmfInfo(code, duration, gap)); return true; @@ -112,12 +119,12 @@ class DtmfSenderTest : public ::testing::Test { DtmfSenderTest() : observer_(new FakeDtmfObserver()), provider_(new FakeDtmfProvider()) { provider_->SetCanInsertDtmf(true); - dtmf_ = DtmfSender::Create(rtc::Thread::Current(), provider_.get()); + dtmf_ = DtmfSender::Create(webrtc::Thread::Current(), provider_.get()); dtmf_->RegisterObserver(observer_.get()); } ~DtmfSenderTest() { - if (dtmf_.get()) { + if (dtmf_) { dtmf_->UnregisterObserver(); } } @@ -207,11 +214,11 @@ class DtmfSenderTest : public ::testing::Test { } } - rtc::AutoThread main_thread_; + webrtc::AutoThread main_thread_; std::unique_ptr observer_; std::unique_ptr provider_; - rtc::scoped_refptr dtmf_; - rtc::ScopedFakeClock fake_clock_; + webrtc::scoped_refptr dtmf_; + webrtc::ScopedFakeClock fake_clock_; }; TEST_F(DtmfSenderTest, CanInsertDtmf) { @@ -225,7 +232,11 @@ TEST_F(DtmfSenderTest, InsertDtmf) { int duration = 100; int inter_tone_gap = 50; EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap)); - EXPECT_TRUE_SIMULATED_WAIT(observer_->completed(), kMaxWaitMs, fake_clock_); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return observer_->completed(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kMaxWaitMs), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // The unrecognized characters should be ignored. std::string known_tones = "1a*"; @@ -241,14 +252,21 @@ TEST_F(DtmfSenderTest, InsertDtmfTwice) { EXPECT_TRUE(dtmf_->InsertDtmf(tones1, duration, inter_tone_gap)); VerifyExpectedState(tones1, duration, inter_tone_gap); // Wait until the first tone got sent. - EXPECT_TRUE_SIMULATED_WAIT(observer_->tones().size() == 1, kMaxWaitMs, - fake_clock_); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return observer_->tones().size(); }, ::testing::Eq(1), + {.timeout = webrtc::TimeDelta::Millis(kMaxWaitMs), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); VerifyExpectedState("2", duration, inter_tone_gap); // Insert with another tone buffer. EXPECT_TRUE(dtmf_->InsertDtmf(tones2, duration, inter_tone_gap)); VerifyExpectedState(tones2, duration, inter_tone_gap); // Wait until it's completed. - EXPECT_TRUE_SIMULATED_WAIT(observer_->completed(), kMaxWaitMs, fake_clock_); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return observer_->completed(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kMaxWaitMs), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); std::vector dtmf_queue_ref; GetDtmfInfoFromString("1", duration, inter_tone_gap, &dtmf_queue_ref); @@ -263,13 +281,16 @@ TEST_F(DtmfSenderTest, InsertDtmfWhileProviderIsDeleted) { int inter_tone_gap = 50; EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap)); // Wait until the first tone got sent. - EXPECT_TRUE_SIMULATED_WAIT(observer_->tones().size() == 1, kMaxWaitMs, - fake_clock_); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return observer_->tones().size(); }, ::testing::Eq(1), + {.timeout = webrtc::TimeDelta::Millis(kMaxWaitMs), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Delete provider. dtmf_->OnDtmfProviderDestroyed(); provider_.reset(); // The queue should be discontinued so no more tone callbacks. - SIMULATED_WAIT(false, 200, fake_clock_); + fake_clock_.AdvanceTime(webrtc::TimeDelta::Millis(200)); EXPECT_EQ(1U, observer_->tones().size()); } @@ -279,12 +300,15 @@ TEST_F(DtmfSenderTest, InsertDtmfWhileSenderIsDeleted) { int inter_tone_gap = 50; EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap)); // Wait until the first tone got sent. - EXPECT_TRUE_SIMULATED_WAIT(observer_->tones().size() == 1, kMaxWaitMs, - fake_clock_); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return observer_->tones().size(); }, ::testing::Eq(1), + {.timeout = webrtc::TimeDelta::Millis(kMaxWaitMs), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Delete the sender. dtmf_ = NULL; // The queue should be discontinued so no more tone callbacks. - SIMULATED_WAIT(false, 200, fake_clock_); + fake_clock_.AdvanceTime(webrtc::TimeDelta::Millis(200)); EXPECT_EQ(1U, observer_->tones().size()); } @@ -295,12 +319,19 @@ TEST_F(DtmfSenderTest, InsertEmptyTonesToCancelPreviousTask) { int inter_tone_gap = 50; EXPECT_TRUE(dtmf_->InsertDtmf(tones1, duration, inter_tone_gap)); // Wait until the first tone got sent. - EXPECT_TRUE_SIMULATED_WAIT(observer_->tones().size() == 1, kMaxWaitMs, - fake_clock_); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return observer_->tones().size(); }, ::testing::Eq(1), + {.timeout = webrtc::TimeDelta::Millis(kMaxWaitMs), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); // Insert with another tone buffer. EXPECT_TRUE(dtmf_->InsertDtmf(tones2, duration, inter_tone_gap)); // Wait until it's completed. - EXPECT_TRUE_SIMULATED_WAIT(observer_->completed(), kMaxWaitMs, fake_clock_); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return observer_->completed(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kMaxWaitMs), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); std::vector dtmf_queue_ref; GetDtmfInfoFromString("1", duration, inter_tone_gap, &dtmf_queue_ref); @@ -315,7 +346,11 @@ TEST_F(DtmfSenderTest, InsertDtmfWithDefaultCommaDelay) { int default_comma_delay = webrtc::DtmfSender::kDtmfDefaultCommaDelayMs; EXPECT_EQ(dtmf_->comma_delay(), default_comma_delay); EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap)); - EXPECT_TRUE_SIMULATED_WAIT(observer_->completed(), kMaxWaitMs, fake_clock_); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return observer_->completed(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kMaxWaitMs), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); VerifyOnProvider(tones, duration, inter_tone_gap); VerifyOnObserver(tones); @@ -330,7 +365,11 @@ TEST_F(DtmfSenderTest, InsertDtmfWithNonDefaultCommaDelay) { int comma_delay = 500; EXPECT_EQ(dtmf_->comma_delay(), default_comma_delay); EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap, comma_delay)); - EXPECT_TRUE_SIMULATED_WAIT(observer_->completed(), kMaxWaitMs, fake_clock_); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return observer_->completed(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kMaxWaitMs), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); VerifyOnProvider(tones, duration, inter_tone_gap, comma_delay); VerifyOnObserver(tones); @@ -365,7 +404,10 @@ TEST_F(DtmfSenderTest, InsertDtmfSendsAfterWait) { EXPECT_TRUE(dtmf_->InsertDtmf(tones, duration, inter_tone_gap)); VerifyExpectedState("ABC", duration, inter_tone_gap); // Wait until the first tone got sent. - EXPECT_TRUE_SIMULATED_WAIT(observer_->tones().size() == 1, kMaxWaitMs, - fake_clock_); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return observer_->tones().size(); }, ::testing::Eq(1), + {.timeout = webrtc::TimeDelta::Millis(kMaxWaitMs), + .clock = &fake_clock_}), + webrtc::IsRtcOk()); VerifyExpectedState("BC", duration, inter_tone_gap); } diff --git a/pc/external_hmac.cc b/pc/external_hmac.cc index 27b5d0e5ab..6ee917d2ba 100644 --- a/pc/external_hmac.cc +++ b/pc/external_hmac.cc @@ -13,8 +13,11 @@ #include // For malloc/free. #include +#include + #include "rtc_base/logging.h" #include "rtc_base/zero_memory.h" +#include "third_party/libsrtp/crypto/include/auth.h" #include "third_party/libsrtp/include/srtp.h" // Begin test case 0 */ @@ -90,7 +93,8 @@ srtp_err_status_t external_hmac_alloc(srtp_auth_t** a, } srtp_err_status_t external_hmac_dealloc(srtp_auth_t* a) { - rtc::ExplicitZeroMemory(a, sizeof(ExternalHmacContext) + sizeof(srtp_auth_t)); + webrtc::ExplicitZeroMemory(a, + sizeof(ExternalHmacContext) + sizeof(srtp_auth_t)); // Free memory delete[] a; diff --git a/pc/external_hmac.h b/pc/external_hmac.h index c5071fc192..3319beaed4 100644 --- a/pc/external_hmac.h +++ b/pc/external_hmac.h @@ -30,9 +30,9 @@ #include +#include "third_party/libsrtp/crypto/include/auth.h" #include "third_party/libsrtp/crypto/include/crypto_types.h" #include "third_party/libsrtp/include/srtp.h" -#include "third_party/libsrtp/include/srtp_priv.h" #define EXTERNAL_HMAC_SHA1 SRTP_HMAC_SHA1 + 1 #define HMAC_KEY_LENGTH 20 diff --git a/pc/g3doc/dtls_transport.md b/pc/g3doc/dtls_transport.md index 28d6739413..b5673381a2 100644 --- a/pc/g3doc/dtls_transport.md +++ b/pc/g3doc/dtls_transport.md @@ -23,17 +23,17 @@ following classes interact. ## webrtc::DtlsTransport The [`webrtc::DtlsTransport`][1] class is a wrapper around the -`cricket::DtlsTransportInternal` and allows registering observers implementing +`webrtc::DtlsTransportInternal` and allows registering observers implementing the `webrtc::DtlsTransportObserverInterface`. The [`webrtc::DtlsTransportObserverInterface`][2] will provide updates to the observers, passing around a snapshot of the transports state such as the connection state, the remote certificate(s) and the SRTP ciphers as [`DtlsTransportInformation`][3]. -## cricket::DtlsTransportInternal +## webrtc::DtlsTransportInternal -The [`cricket::DtlsTransportInternal`][4] class is an interface. Its -implementation is [`cricket::DtlsTransport`][5]. The `cricket::DtlsTransport` +The [`webrtc::DtlsTransportInternal`][4] class is an interface. Its +implementation is [`webrtc::DtlsTransportInternalImpl`][5]. The `webrtc::DtlsTransportInternalImpl` sends and receives network packets via an ICE transport. It also demultiplexes DTLS packets and SRTP packets according to the scheme described in [RFC 5764](https://tools.ietf.org/html/rfc5764#section-5.1.2). @@ -42,7 +42,7 @@ DTLS packets and SRTP packets according to the scheme described in The [`webrtc::DtlsSrtpTransport`][6] class is responsіble for extracting the SRTP keys after the DTLS handshake as well as protection and unprotection of -SRTP packets via its [`cricket::SrtpSession`][7]. +SRTP packets via its [`webrtc::SrtpSession`][7]. [1]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/dtls_transport.h;l=32;drc=6a55e7307b78edb50f94a1ff1ef8393d58218369 [2]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/dtls_transport_interface.h;l=76;drc=34437d5660a80393d631657329ef74c6538be25a diff --git a/pc/g3doc/peer_connection.md b/pc/g3doc/peer_connection.md index cd01265cff..255def1e72 100644 --- a/pc/g3doc/peer_connection.md +++ b/pc/g3doc/peer_connection.md @@ -53,7 +53,7 @@ all here; please consult the .h files. PeerConnectionFactory owns an object called ConnectionContext, and a reference to this is passed to each PeerConnection. It is referenced -via an rtc::scoped_refptr, which means that it is guaranteed to be +via an webrtc::scoped_refptr, which means that it is guaranteed to be alive as long as either the factory or one of the PeerConnections is using it. diff --git a/pc/g3doc/sctp_transport.md b/pc/g3doc/sctp_transport.md index 100eb92e47..8d4979ebab 100644 --- a/pc/g3doc/sctp_transport.md +++ b/pc/g3doc/sctp_transport.md @@ -18,16 +18,16 @@ of an SctpTransport changes; this callback is called on the network thread (as set during PeerConnectionFactory initialization). The implementation of this object lives in pc/sctp_transport.{h,cc}, and is -basically a wrapper around a `cricket::SctpTransportInternal`, hiding its +basically a wrapper around a `webrtc::SctpTransportInternal`, hiding its implementation details and APIs that shouldn't be accessed from the user. The `webrtc::SctpTransport` is a ref counted object; it should be regarded as owned by the PeerConnection, and will be closed when the PeerConnection closes, but the object itself may survive longer than the PeerConnection. -## cricket::SctpTransportInternal +## webrtc::SctpTransportInternal -[`cricket::SctpTransportInternal`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/media/sctp/sctp_transport_internal.h?q=cricket::SctpTransportInternal) owns two objects: The SCTP association object +[`webrtc::SctpTransportInternal`](https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/media/sctp/sctp_transport_internal.h?q=webrtc::SctpTransportInternal) owns two objects: The SCTP association object and the DTLS transport, which is the object used to send and receive messages as emitted from or consumed by the sctp library. diff --git a/pc/g3doc/srtp.md b/pc/g3doc/srtp.md index eb457efacf..cdc1311a4e 100644 --- a/pc/g3doc/srtp.md +++ b/pc/g3doc/srtp.md @@ -23,16 +23,17 @@ The implementation supports the following cipher suites: * SRTP_AEAD_AES_128_GCM * SRTP_AEAD_AES_256_GCM -The SRTP_AES128_CM_HMAC_SHA1_32 cipher suite is accepted for audio-only -connections if offered by the other side. It is not actively supported, see -[SelectCrypto][2] for details. +The SRTP_AES128_CM_HMAC_SHA1_32 cipher suite is not enabled by default and +off in Chromium. When enabled, it is accepted for audio-only connections if +offered by the other side. It is not actively supported, see [SelectCrypto][2] +for details. The cipher suite ordering allows a non-WebRTC peer to prefer GCM cipher suites, however they are not selected as default by two instances of the WebRTC library. -## cricket::SrtpSession +## webrtc::SrtpSession -The [`cricket::SrtpSession`][3] is providing encryption and decryption of SRTP +The [`webrtc::SrtpSession`][3] is providing encryption and decryption of SRTP packets using [`libsrtp`](https://github.com/cisco/libsrtp). Keys will be provided by `SrtpTransport` or `DtlsSrtpTransport` in the [`SetSend`][4] and [`SetRecv`][5] methods. @@ -54,10 +55,6 @@ extracts the keying material when the DTLS handshake is done and configures it in its base class. It will also become writable only once the DTLS handshake is done. -## cricket::SrtpFilter - -The [`cricket::SrtpFilter`][12] class is used to negotiate SDES. - [1]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/peer_connection_interface.h;l=1413;drc=f467b445631189557d44de86a77ca6a0c3e2108d [2]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/media_session.cc;l=297;drc=3ac73bd0aa5322abee98f1ff8705af64a184bf61 [3]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=33;drc=be66d95ab7f9428028806bbf66cb83800bda9241 @@ -69,4 +66,3 @@ The [`cricket::SrtpFilter`][12] class is used to negotiate SDES. [9]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_session.h;l=73;drc=be66d95ab7f9428028806bbf66cb83800bda9241 [10]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_transport.h;l=37;drc=a4d873786f10eedd72de25ad0d94ad7c53c1f68a [11]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/dtls_srtp_transport.h;l=31;drc=2f8e0536eb97ce2131e7a74e3ca06077aa0b64b3 -[12]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/pc/srtp_filter.h;drc=d15a575ec3528c252419149d35977e55269d8a41 diff --git a/pc/ice_server_parsing.cc b/pc/ice_server_parsing.cc index 896305c54b..02612eb23a 100644 --- a/pc/ice_server_parsing.cc +++ b/pc/ice_server_parsing.cc @@ -13,9 +13,17 @@ #include #include // For std::isdigit. +#include #include #include - +#include + +#include "absl/strings/string_view.h" +#include "api/candidate.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" #include "p2p/base/port_interface.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" @@ -89,14 +97,14 @@ std::tuple GetServiceTypeAndHostnameFromUri( return {ServiceType::INVALID, ""}; } -absl::optional ParsePort(absl::string_view in_str) { +std::optional ParsePort(absl::string_view in_str) { // Make sure port only contains digits. StringToNumber doesn't check this. for (const char& c : in_str) { if (!std::isdigit(static_cast(c))) { return false; } } - return rtc::StringToNumber(in_str); + return StringToNumber(in_str); } // This method parses IPv6 and IPv4 literal strings, along with hostnames in @@ -123,7 +131,7 @@ std::tuple ParseHostnameAndPortFromString( } auto colonpos = in_str.find(':', closebracket); if (absl::string_view::npos != colonpos) { - if (absl::optional opt_port = + if (std::optional opt_port = ParsePort(in_str.substr(closebracket + 2))) { port = *opt_port; } else { @@ -135,7 +143,7 @@ std::tuple ParseHostnameAndPortFromString( // IPv4address or reg-name syntax auto colonpos = in_str.find(':'); if (absl::string_view::npos != colonpos) { - if (absl::optional opt_port = + if (std::optional opt_port = ParsePort(in_str.substr(colonpos + 1))) { port = *opt_port; } else { @@ -155,11 +163,10 @@ std::tuple ParseHostnameAndPortFromString( // Adds a STUN or TURN server to the appropriate list, // by parsing `url` and using the username/password in `server`. -RTCError ParseIceServerUrl( - const PeerConnectionInterface::IceServer& server, - absl::string_view url, - cricket::ServerAddresses* stun_servers, - std::vector* turn_servers) { +RTCError ParseIceServerUrl(const PeerConnectionInterface::IceServer& server, + absl::string_view url, + ServerAddresses* stun_servers, + std::vector* turn_servers) { // RFC 7064 // stunURI = scheme ":" host [ ":" port ] // scheme = "stun" / "stuns" @@ -177,14 +184,13 @@ RTCError ParseIceServerUrl( RTC_DCHECK(stun_servers != nullptr); RTC_DCHECK(turn_servers != nullptr); - cricket::ProtocolType turn_transport_type = cricket::PROTO_UDP; + ProtocolType turn_transport_type = PROTO_UDP; RTC_DCHECK(!url.empty()); - std::vector tokens = rtc::split(url, '?'); + std::vector tokens = split(url, '?'); absl::string_view uri_without_transport = tokens[0]; // Let's look into transport= param, if it exists. if (tokens.size() == kTurnTransportTokensNum) { // ?transport= is present. - std::vector transport_tokens = - rtc::split(tokens[1], '='); + std::vector transport_tokens = split(tokens[1], '='); if (transport_tokens[0] != kTransport) { LOG_AND_RETURN_ERROR( RTCErrorType::SYNTAX_ERROR, @@ -196,10 +202,8 @@ RTCError ParseIceServerUrl( "ICE server parsing failed: Transport parameter missing value."); } - absl::optional proto = - cricket::StringToProto(transport_tokens[1]); - if (!proto || - (*proto != cricket::PROTO_UDP && *proto != cricket::PROTO_TCP)) { + std::optional proto = StringToProto(transport_tokens[1]); + if (!proto || (*proto != PROTO_UDP && *proto != PROTO_TCP)) { LOG_AND_RETURN_ERROR( RTCErrorType::SYNTAX_ERROR, "ICE server parsing failed: Transport parameter should " @@ -232,7 +236,7 @@ RTCError ParseIceServerUrl( int default_port = kDefaultStunPort; if (service_type == ServiceType::TURNS) { default_port = kDefaultStunTlsPort; - turn_transport_type = cricket::PROTO_TLS; + turn_transport_type = PROTO_TLS; } if (hoststring.find('@') != absl::string_view::npos) { @@ -260,7 +264,7 @@ RTCError ParseIceServerUrl( switch (service_type) { case ServiceType::STUN: case ServiceType::STUNS: - stun_servers->insert(rtc::SocketAddress(address, port)); + stun_servers->insert(SocketAddress(address, port)); break; case ServiceType::TURN: case ServiceType::TURNS: { @@ -277,9 +281,9 @@ RTCError ParseIceServerUrl( // handshake (SNI and Certificate verification). absl::string_view hostname = server.hostname.empty() ? address : server.hostname; - rtc::SocketAddress socket_address(hostname, port); + SocketAddress socket_address(hostname, port); if (!server.hostname.empty()) { - rtc::IPAddress ip; + IPAddress ip; if (!IPFromString(address, &ip)) { // When hostname is set, the server address must be a // resolved ip address. @@ -291,13 +295,13 @@ RTCError ParseIceServerUrl( } socket_address.SetResolvedIP(ip); } - cricket::RelayServerConfig config = - cricket::RelayServerConfig(socket_address, server.username, - server.password, turn_transport_type); + RelayServerConfig config = + RelayServerConfig(socket_address, server.username, server.password, + turn_transport_type); if (server.tls_cert_policy == PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck) { config.tls_cert_policy = - cricket::TlsCertPolicy::TLS_CERT_POLICY_INSECURE_NO_CHECK; + TlsCertPolicy::TLS_CERT_POLICY_INSECURE_NO_CHECK; } config.tls_alpn_protocols = server.tls_alpn_protocols; config.tls_elliptic_curves = server.tls_elliptic_curves; @@ -319,8 +323,8 @@ RTCError ParseIceServerUrl( RTCError ParseIceServersOrError( const PeerConnectionInterface::IceServers& servers, - cricket::ServerAddresses* stun_servers, - std::vector* turn_servers) { + ServerAddresses* stun_servers, + std::vector* turn_servers) { for (const PeerConnectionInterface::IceServer& server : servers) { if (!server.urls.empty()) { for (const std::string& url : server.urls) { @@ -350,11 +354,33 @@ RTCError ParseIceServersOrError( return RTCError::OK(); } -RTCErrorType ParseIceServers( - const PeerConnectionInterface::IceServers& servers, - cricket::ServerAddresses* stun_servers, - std::vector* turn_servers) { - return ParseIceServersOrError(servers, stun_servers, turn_servers).type(); +RTCError ParseAndValidateIceServersFromConfiguration( + const PeerConnectionInterface::RTCConfiguration& configuration, + ServerAddresses& stun_servers, + std::vector& turn_servers) { + RTC_DCHECK(stun_servers.empty()); + RTC_DCHECK(turn_servers.empty()); + RTCError err = ParseIceServersOrError(configuration.servers, &stun_servers, + &turn_servers); + if (!err.ok()) { + return err; + } + + // Restrict number of TURN servers. + if (turn_servers.size() > kMaxTurnServers) { + RTC_LOG(LS_WARNING) << "Number of configured TURN servers is " + << turn_servers.size() + << " which exceeds the maximum allowed number of " + << kMaxTurnServers; + turn_servers.resize(kMaxTurnServers); + } + + // Add the turn logging id to all turn servers + for (RelayServerConfig& turn_server : turn_servers) { + turn_server.turn_logging_id = configuration.turn_logging_id; + } + + return RTCError::OK(); } } // namespace webrtc diff --git a/pc/ice_server_parsing.h b/pc/ice_server_parsing.h index 549964e285..75c16f096d 100644 --- a/pc/ice_server_parsing.h +++ b/pc/ice_server_parsing.h @@ -29,13 +29,15 @@ namespace webrtc { // PeerConnection through RTCConfiguration. RTC_EXPORT RTCError ParseIceServersOrError(const PeerConnectionInterface::IceServers& servers, - cricket::ServerAddresses* stun_servers, - std::vector* turn_servers); - -[[deprecated("use ParseIceServersOrError")]] RTC_EXPORT RTCErrorType -ParseIceServers(const PeerConnectionInterface::IceServers& servers, - cricket::ServerAddresses* stun_servers, - std::vector* turn_servers); + ServerAddresses* stun_servers, + std::vector* turn_servers); + +// Calls `ParseIceServersOrError` to extract ice server information from the +// `configuration` and then validates the extracted configuration. +RTC_EXPORT RTCError ParseAndValidateIceServersFromConfiguration( + const PeerConnectionInterface::RTCConfiguration& configuration, + ServerAddresses& stun_servers, + std::vector& turn_servers); } // namespace webrtc diff --git a/pc/ice_server_parsing_unittest.cc b/pc/ice_server_parsing_unittest.cc index 4356b1efb0..df459e6ddd 100644 --- a/pc/ice_server_parsing_unittest.cc +++ b/pc/ice_server_parsing_unittest.cc @@ -13,6 +13,9 @@ #include #include +#include "api/peer_connection_interface.h" +#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" #include "p2p/base/port_interface.h" #include "rtc_base/ip_address.h" #include "rtc_base/socket_address.h" @@ -62,14 +65,12 @@ class IceServerParsingTest : public ::testing::Test { server.tls_cert_policy = tls_certificate_policy; server.hostname = hostname; servers.push_back(server); - return webrtc::ParseIceServersOrError(servers, &stun_servers_, - &turn_servers_) - .ok(); + return ParseIceServersOrError(servers, &stun_servers_, &turn_servers_).ok(); } protected: - cricket::ServerAddresses stun_servers_; - std::vector turn_servers_; + ServerAddresses stun_servers_; + std::vector turn_servers_; }; // Make sure all STUN/TURN prefixes are parsed correctly. @@ -85,14 +86,14 @@ TEST_F(IceServerParsingTest, ParseStunPrefixes) { EXPECT_TRUE(ParseTurnUrl("turn:hostname")); EXPECT_EQ(0U, stun_servers_.size()); EXPECT_EQ(1U, turn_servers_.size()); - EXPECT_EQ(cricket::PROTO_UDP, turn_servers_[0].ports[0].proto); + EXPECT_EQ(PROTO_UDP, turn_servers_[0].ports[0].proto); EXPECT_TRUE(ParseTurnUrl("turns:hostname")); EXPECT_EQ(0U, stun_servers_.size()); EXPECT_EQ(1U, turn_servers_.size()); - EXPECT_EQ(cricket::PROTO_TLS, turn_servers_[0].ports[0].proto); + EXPECT_EQ(PROTO_TLS, turn_servers_[0].ports[0].proto); EXPECT_TRUE(turn_servers_[0].tls_cert_policy == - cricket::TlsCertPolicy::TLS_CERT_POLICY_SECURE); + TlsCertPolicy::TLS_CERT_POLICY_SECURE); EXPECT_TRUE(ParseUrl( "turns:hostname", "username", "password", @@ -100,8 +101,8 @@ TEST_F(IceServerParsingTest, ParseStunPrefixes) { EXPECT_EQ(0U, stun_servers_.size()); EXPECT_EQ(1U, turn_servers_.size()); EXPECT_TRUE(turn_servers_[0].tls_cert_policy == - cricket::TlsCertPolicy::TLS_CERT_POLICY_INSECURE_NO_CHECK); - EXPECT_EQ(cricket::PROTO_TLS, turn_servers_[0].ports[0].proto); + TlsCertPolicy::TLS_CERT_POLICY_INSECURE_NO_CHECK); + EXPECT_EQ(PROTO_TLS, turn_servers_[0].ports[0].proto); // invalid prefixes EXPECT_FALSE(ParseUrl("stunn:hostname")); @@ -115,13 +116,13 @@ TEST_F(IceServerParsingTest, VerifyDefaults) { EXPECT_TRUE(ParseTurnUrl("turns:hostname")); EXPECT_EQ(1U, turn_servers_.size()); EXPECT_EQ(5349, turn_servers_[0].ports[0].address.port()); - EXPECT_EQ(cricket::PROTO_TLS, turn_servers_[0].ports[0].proto); + EXPECT_EQ(PROTO_TLS, turn_servers_[0].ports[0].proto); // TURN defaults EXPECT_TRUE(ParseTurnUrl("turn:hostname")); EXPECT_EQ(1U, turn_servers_.size()); EXPECT_EQ(3478, turn_servers_[0].ports[0].address.port()); - EXPECT_EQ(cricket::PROTO_UDP, turn_servers_[0].ports[0].proto); + EXPECT_EQ(PROTO_UDP, turn_servers_[0].ports[0].proto); // STUN defaults EXPECT_TRUE(ParseUrl("stun:hostname")); @@ -168,7 +169,7 @@ TEST_F(IceServerParsingTest, ParseHostnameAndPort) { PeerConnectionInterface::TlsCertPolicy::kTlsCertPolicySecure, "hostname")); EXPECT_EQ(1U, turn_servers_.size()); - rtc::SocketAddress address = turn_servers_[0].ports[0].address; + SocketAddress address = turn_servers_[0].ports[0].address; EXPECT_EQ("hostname", address.hostname()); EXPECT_EQ(1234, address.port()); EXPECT_FALSE(address.IsUnresolvedIP()); @@ -196,11 +197,11 @@ TEST_F(IceServerParsingTest, ParseHostnameAndPort) { TEST_F(IceServerParsingTest, ParseTransport) { EXPECT_TRUE(ParseTurnUrl("turn:hostname:1234?transport=tcp")); EXPECT_EQ(1U, turn_servers_.size()); - EXPECT_EQ(cricket::PROTO_TCP, turn_servers_[0].ports[0].proto); + EXPECT_EQ(PROTO_TCP, turn_servers_[0].ports[0].proto); EXPECT_TRUE(ParseTurnUrl("turn:hostname?transport=udp")); EXPECT_EQ(1U, turn_servers_.size()); - EXPECT_EQ(cricket::PROTO_UDP, turn_servers_[0].ports[0].proto); + EXPECT_EQ(PROTO_UDP, turn_servers_[0].ports[0].proto); EXPECT_FALSE(ParseTurnUrl("turn:hostname?transport=invalid")); EXPECT_FALSE(ParseTurnUrl("turn:hostname?transport=")); @@ -233,8 +234,7 @@ TEST_F(IceServerParsingTest, ParseMultipleUrls) { server.password = "bar"; servers.push_back(server); EXPECT_TRUE( - webrtc::ParseIceServersOrError(servers, &stun_servers_, &turn_servers_) - .ok()); + ParseIceServersOrError(servers, &stun_servers_, &turn_servers_).ok()); EXPECT_EQ(1U, stun_servers_.size()); EXPECT_EQ(1U, turn_servers_.size()); } diff --git a/pc/ice_transport.cc b/pc/ice_transport.cc index 205846755d..56f24901dd 100644 --- a/pc/ice_transport.cc +++ b/pc/ice_transport.cc @@ -10,6 +10,7 @@ #include "pc/ice_transport.h" +#include "api/ice_transport_interface.h" #include "api/sequence_checker.h" namespace webrtc { @@ -23,7 +24,7 @@ IceTransportWithPointer::~IceTransportWithPointer() { } } -cricket::IceTransportInternal* IceTransportWithPointer::internal() { +IceTransportInternal* IceTransportWithPointer::internal() { RTC_DCHECK_RUN_ON(creator_thread_); return internal_; } diff --git a/pc/ice_transport.h b/pc/ice_transport.h index e31ec546b2..74b78fd06f 100644 --- a/pc/ice_transport.h +++ b/pc/ice_transport.h @@ -24,8 +24,8 @@ namespace webrtc { // is deallocated. class IceTransportWithPointer : public IceTransportInterface { public: - explicit IceTransportWithPointer(cricket::IceTransportInternal* internal) - : creator_thread_(rtc::Thread::Current()), internal_(internal) { + explicit IceTransportWithPointer(IceTransportInternal* internal) + : creator_thread_(Thread::Current()), internal_(internal) { RTC_DCHECK(internal_); } @@ -33,7 +33,7 @@ class IceTransportWithPointer : public IceTransportInterface { IceTransportWithPointer(const IceTransportWithPointer&) = delete; IceTransportWithPointer& operator=(const IceTransportWithPointer&) = delete; - cricket::IceTransportInternal* internal() override; + IceTransportInternal* internal() override; // This call will ensure that the pointer passed at construction is // no longer in use by this object. Later calls to internal() will return // null. @@ -43,8 +43,8 @@ class IceTransportWithPointer : public IceTransportInterface { ~IceTransportWithPointer() override; private: - const rtc::Thread* creator_thread_; - cricket::IceTransportInternal* internal_ RTC_GUARDED_BY(creator_thread_); + const Thread* creator_thread_; + IceTransportInternal* internal_ RTC_GUARDED_BY(creator_thread_); }; } // namespace webrtc diff --git a/pc/ice_transport_unittest.cc b/pc/ice_transport_unittest.cc index aaf9f2e57a..767d1ad2a3 100644 --- a/pc/ice_transport_unittest.cc +++ b/pc/ice_transport_unittest.cc @@ -13,50 +13,47 @@ #include #include +#include "api/environment/environment_factory.h" #include "api/ice_transport_factory.h" +#include "api/ice_transport_interface.h" #include "api/make_ref_counted.h" #include "api/scoped_refptr.h" -#include "p2p/base/fake_ice_transport.h" -#include "p2p/base/fake_port_allocator.h" +#include "p2p/test/fake_ice_transport.h" +#include "p2p/test/fake_port_allocator.h" #include "rtc_base/internal/default_socket_server.h" +#include "rtc_base/socket_server.h" +#include "rtc_base/thread.h" #include "test/gtest.h" -#include "test/scoped_key_value_config.h" namespace webrtc { class IceTransportTest : public ::testing::Test { protected: IceTransportTest() - : socket_server_(rtc::CreateDefaultSocketServer()), + : socket_server_(CreateDefaultSocketServer()), main_thread_(socket_server_.get()) {} - rtc::SocketServer* socket_server() const { return socket_server_.get(); } - - webrtc::test::ScopedKeyValueConfig field_trials_; + SocketServer* socket_server() const { return socket_server_.get(); } private: - std::unique_ptr socket_server_; - rtc::AutoSocketServerThread main_thread_; + std::unique_ptr socket_server_; + AutoSocketServerThread main_thread_; }; TEST_F(IceTransportTest, CreateNonSelfDeletingTransport) { auto cricket_transport = - std::make_unique("name", 0, nullptr); + std::make_unique("name", 0, nullptr); auto ice_transport = - rtc::make_ref_counted(cricket_transport.get()); + make_ref_counted(cricket_transport.get()); EXPECT_EQ(ice_transport->internal(), cricket_transport.get()); ice_transport->Clear(); EXPECT_NE(ice_transport->internal(), cricket_transport.get()); } TEST_F(IceTransportTest, CreateSelfDeletingTransport) { - std::unique_ptr port_allocator( - std::make_unique( - nullptr, - std::make_unique(socket_server()), - &field_trials_)); + FakePortAllocator port_allocator(CreateEnvironment(), socket_server()); IceTransportInit init; - init.set_port_allocator(port_allocator.get()); + init.set_port_allocator(&port_allocator); auto ice_transport = CreateIceTransport(std::move(init)); EXPECT_NE(nullptr, ice_transport->internal()); } diff --git a/pc/jitter_buffer_delay.cc b/pc/jitter_buffer_delay.cc index f22b0650f9..7e8c3a5593 100644 --- a/pc/jitter_buffer_delay.cc +++ b/pc/jitter_buffer_delay.cc @@ -10,8 +10,9 @@ #include "pc/jitter_buffer_delay.h" +#include + #include "api/sequence_checker.h" -#include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/safe_minmax.h" @@ -22,16 +23,15 @@ constexpr int kMaximumDelayMs = 10000; namespace webrtc { -void JitterBufferDelay::Set(absl::optional delay_seconds) { +void JitterBufferDelay::Set(std::optional delay_seconds) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); cached_delay_seconds_ = delay_seconds; } int JitterBufferDelay::GetMs() const { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - return rtc::SafeClamp( - rtc::saturated_cast(cached_delay_seconds_.value_or(kDefaultDelay) * - 1000), + return SafeClamp( + saturated_cast(cached_delay_seconds_.value_or(kDefaultDelay) * 1000), 0, kMaximumDelayMs); } diff --git a/pc/jitter_buffer_delay.h b/pc/jitter_buffer_delay.h index caf713b045..bc506fe653 100644 --- a/pc/jitter_buffer_delay.h +++ b/pc/jitter_buffer_delay.h @@ -13,7 +13,8 @@ #include -#include "absl/types/optional.h" +#include + #include "api/sequence_checker.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" @@ -27,13 +28,13 @@ class JitterBufferDelay { public: JitterBufferDelay() = default; - void Set(absl::optional delay_seconds); + void Set(std::optional delay_seconds); int GetMs() const; private: RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_{ SequenceChecker::kDetached}; - absl::optional cached_delay_seconds_ + std::optional cached_delay_seconds_ RTC_GUARDED_BY(&worker_thread_checker_); }; diff --git a/pc/jsep_ice_candidate.cc b/pc/jsep_ice_candidate.cc index 1e97ad42d8..0b0a282431 100644 --- a/pc/jsep_ice_candidate.cc +++ b/pc/jsep_ice_candidate.cc @@ -10,6 +10,11 @@ #include "api/jsep_ice_candidate.h" +#include +#include +#include + +#include "api/candidate.h" #include "pc/webrtc_sdp.h" // This file contains JsepIceCandidate-related functions that are not @@ -34,7 +39,7 @@ IceCandidateInterface* CreateIceCandidate(const std::string& sdp_mid, std::unique_ptr CreateIceCandidate( const std::string& sdp_mid, int sdp_mline_index, - const cricket::Candidate& candidate) { + const Candidate& candidate) { return std::make_unique(sdp_mid, sdp_mline_index, candidate); } @@ -45,7 +50,7 @@ JsepIceCandidate::JsepIceCandidate(const std::string& sdp_mid, JsepIceCandidate::JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index, - const cricket::Candidate& candidate) + const Candidate& candidate) : sdp_mid_(sdp_mid), sdp_mline_index_(sdp_mline_index), candidate_(candidate) {} diff --git a/pc/jsep_session_description.cc b/pc/jsep_session_description.cc index 885c1eb310..fe4bd79afc 100644 --- a/pc/jsep_session_description.cc +++ b/pc/jsep_session_description.cc @@ -10,75 +10,60 @@ #include "api/jsep_session_description.h" +#include #include +#include +#include #include +#include -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" +#include "api/candidate.h" +#include "api/jsep.h" +#include "api/jsep_ice_candidate.h" #include "p2p/base/p2p_constants.h" -#include "p2p/base/port.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_info.h" #include "pc/media_session.h" // IWYU pragma: keep +#include "pc/session_description.h" #include "pc/webrtc_sdp.h" #include "rtc_base/checks.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" +#include "rtc_base/net_helpers.h" #include "rtc_base/socket_address.h" -using cricket::SessionDescription; +using webrtc::Candidate; +using ::webrtc::SessionDescription; namespace webrtc { namespace { -// RFC 5245 -// It is RECOMMENDED that default candidates be chosen based on the -// likelihood of those candidates to work with the peer that is being -// contacted. It is RECOMMENDED that relayed > reflexive > host. -constexpr int kPreferenceUnknown = 0; -constexpr int kPreferenceHost = 1; -constexpr int kPreferenceReflexive = 2; -constexpr int kPreferenceRelayed = 3; - constexpr char kDummyAddress[] = "0.0.0.0"; constexpr int kDummyPort = 9; -int GetCandidatePreferenceFromType(const std::string& type) { - int preference = kPreferenceUnknown; - if (type == cricket::LOCAL_PORT_TYPE) { - preference = kPreferenceHost; - } else if (type == cricket::STUN_PORT_TYPE) { - preference = kPreferenceReflexive; - } else if (type == cricket::RELAY_PORT_TYPE) { - preference = kPreferenceRelayed; - } else { - preference = kPreferenceUnknown; - } - return preference; -} - // Update the connection address for the MediaContentDescription based on the // candidates. void UpdateConnectionAddress( const JsepCandidateCollection& candidate_collection, - cricket::MediaContentDescription* media_desc) { + MediaContentDescription* media_desc) { int port = kDummyPort; std::string ip = kDummyAddress; std::string hostname; - int current_preference = kPreferenceUnknown; + int current_preference = 0; // Start with lowest preference. int current_family = AF_UNSPEC; for (size_t i = 0; i < candidate_collection.count(); ++i) { const IceCandidateInterface* jsep_candidate = candidate_collection.at(i); if (jsep_candidate->candidate().component() != - cricket::ICE_CANDIDATE_COMPONENT_RTP) { + ICE_CANDIDATE_COMPONENT_RTP) { continue; } // Default destination should be UDP only. - if (jsep_candidate->candidate().protocol() != cricket::UDP_PROTOCOL_NAME) { + if (jsep_candidate->candidate().protocol() != UDP_PROTOCOL_NAME) { continue; } - const int preference = - GetCandidatePreferenceFromType(jsep_candidate->candidate().type()); + const int preference = jsep_candidate->candidate().type_preference(); const int family = jsep_candidate->candidate().address().ipaddr().family(); // See if this candidate is more preferable then the current one if it's the // same family. Or if the current family is IPv4 already so we could safely @@ -90,14 +75,13 @@ void UpdateConnectionAddress( } current_preference = preference; current_family = family; - const rtc::SocketAddress& candidate_addr = - jsep_candidate->candidate().address(); + const SocketAddress& candidate_addr = jsep_candidate->candidate().address(); port = candidate_addr.port(); ip = candidate_addr.ipaddr().ToString(); hostname = candidate_addr.hostname(); } - rtc::SocketAddress connection_addr(ip, port); - if (rtc::IPIsUnspec(connection_addr.ipaddr()) && !hostname.empty()) { + SocketAddress connection_addr(ip, port); + if (IPIsUnspec(connection_addr.ipaddr()) && !hostname.empty()) { // When a hostname candidate becomes the (default) connection address, // we use the dummy address 0.0.0.0 and port 9 in the c= and the m= lines. // @@ -114,7 +98,7 @@ void UpdateConnectionAddress( // populate the c= and the m= lines. See `BuildMediaDescription` in // webrtc_sdp.cc for the SDP generation with // `media_desc->connection_address()`. - connection_addr = rtc::SocketAddress(kDummyAddress, kDummyPort); + connection_addr = SocketAddress(kDummyAddress, kDummyPort); } media_desc->set_connection_address(connection_addr); } @@ -124,7 +108,7 @@ void UpdateConnectionAddress( // TODO(steveanton): Remove this default implementation once Chromium has been // updated. SdpType SessionDescriptionInterface::GetType() const { - absl::optional maybe_type = SdpTypeFromString(type()); + std::optional maybe_type = SdpTypeFromString(type()); if (maybe_type) { return *maybe_type; } else { @@ -139,7 +123,7 @@ SdpType SessionDescriptionInterface::GetType() const { SessionDescriptionInterface* CreateSessionDescription(const std::string& type, const std::string& sdp, SdpParseError* error) { - absl::optional maybe_type = SdpTypeFromString(type); + std::optional maybe_type = SdpTypeFromString(type); if (!maybe_type) { return nullptr; } @@ -170,7 +154,7 @@ std::unique_ptr CreateSessionDescription( SdpType type, const std::string& session_id, const std::string& session_version, - std::unique_ptr description) { + std::unique_ptr description) { auto jsep_description = std::make_unique(type); bool initialize_success = jsep_description->Initialize( std::move(description), session_id, session_version); @@ -181,7 +165,7 @@ std::unique_ptr CreateSessionDescription( JsepSessionDescription::JsepSessionDescription(SdpType type) : type_(type) {} JsepSessionDescription::JsepSessionDescription(const std::string& type) { - absl::optional maybe_type = SdpTypeFromString(type); + std::optional maybe_type = SdpTypeFromString(type); if (maybe_type) { type_ = *maybe_type; } else { @@ -194,7 +178,7 @@ JsepSessionDescription::JsepSessionDescription(const std::string& type) { JsepSessionDescription::JsepSessionDescription( SdpType type, - std::unique_ptr description, + std::unique_ptr description, absl::string_view session_id, absl::string_view session_version) : description_(std::move(description)), @@ -208,7 +192,7 @@ JsepSessionDescription::JsepSessionDescription( JsepSessionDescription::~JsepSessionDescription() {} bool JsepSessionDescription::Initialize( - std::unique_ptr description, + std::unique_ptr description, const std::string& session_id, const std::string& session_version) { if (!description) @@ -246,14 +230,14 @@ bool JsepSessionDescription::AddCandidate( if (mediasection_index >= number_of_mediasections()) return false; const std::string& content_name = - description_->contents()[mediasection_index].name; - const cricket::TransportInfo* transport_info = + description_->contents()[mediasection_index].mid(); + const TransportInfo* transport_info = description_->GetTransportInfoByName(content_name); if (!transport_info) { return false; } - cricket::Candidate updated_candidate = candidate->candidate(); + Candidate updated_candidate = candidate->candidate(); if (updated_candidate.username().empty()) { updated_candidate.set_username(transport_info->description.ice_ufrag); } @@ -278,7 +262,7 @@ bool JsepSessionDescription::AddCandidate( } size_t JsepSessionDescription::RemoveCandidates( - const std::vector& candidates) { + const std::vector& candidates) { size_t num_removed = 0; for (auto& candidate : candidates) { int mediasection_index = GetMediasectionIndex(candidate); @@ -337,7 +321,7 @@ bool JsepSessionDescription::GetMediasectionIndex( bool found = false; // Try to match the sdp_mid with content name. for (size_t i = 0; i < description_->contents().size(); ++i) { - if (candidate->sdp_mid() == description_->contents().at(i).name) { + if (candidate->sdp_mid() == description_->contents().at(i).mid()) { *index = i; found = true; break; @@ -352,12 +336,11 @@ bool JsepSessionDescription::GetMediasectionIndex( return true; } -int JsepSessionDescription::GetMediasectionIndex( - const cricket::Candidate& candidate) { +int JsepSessionDescription::GetMediasectionIndex(const Candidate& candidate) { // Find the description with a matching transport name of the candidate. const std::string& transport_name = candidate.transport_name(); for (size_t i = 0; i < description_->contents().size(); ++i) { - if (transport_name == description_->contents().at(i).name) { + if (transport_name == description_->contents().at(i).mid()) { return static_cast(i); } } diff --git a/pc/jsep_session_description_unittest.cc b/pc/jsep_session_description_unittest.cc index c4b993d687..60247df538 100644 --- a/pc/jsep_session_description_unittest.cc +++ b/pc/jsep_session_description_unittest.cc @@ -13,31 +13,33 @@ #include #include +#include +#include #include #include +#include "absl/strings/str_cat.h" #include "api/candidate.h" #include "api/jsep.h" #include "api/jsep_ice_candidate.h" #include "media/base/codec.h" #include "p2p/base/p2p_constants.h" -#include "p2p/base/port.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_info.h" #include "pc/session_description.h" #include "pc/webrtc_sdp.h" -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/net_helper.h" #include "rtc_base/socket_address.h" -#include "rtc_base/string_encode.h" #include "test/gtest.h" -using cricket::MediaProtocolType; using ::testing::Values; using webrtc::IceCandidateCollection; using webrtc::IceCandidateInterface; +using webrtc::IceCandidateType; using webrtc::JsepIceCandidate; using webrtc::JsepSessionDescription; +using ::webrtc::MediaProtocolType; using webrtc::SdpType; using webrtc::SessionDescriptionInterface; @@ -53,33 +55,31 @@ static const uint32_t kCandidateGeneration = 2; // This creates a session description with both audio and video media contents. // In SDP this is described by two m lines, one audio and one video. -static std::unique_ptr +static std::unique_ptr CreateCricketSessionDescription() { - auto desc = std::make_unique(); + auto desc = std::make_unique(); // AudioContentDescription - auto audio = std::make_unique(); + auto audio = std::make_unique(); // VideoContentDescription - auto video = std::make_unique(); + auto video = std::make_unique(); - audio->AddCodec(cricket::CreateAudioCodec(103, "ISAC", 16000, 0)); - desc->AddContent(cricket::CN_AUDIO, MediaProtocolType::kRtp, - std::move(audio)); + audio->AddCodec(webrtc::CreateAudioCodec(103, "ISAC", 16000, 0)); + desc->AddContent(webrtc::CN_AUDIO, MediaProtocolType::kRtp, std::move(audio)); - video->AddCodec(cricket::CreateVideoCodec(120, "VP8")); - desc->AddContent(cricket::CN_VIDEO, MediaProtocolType::kRtp, - std::move(video)); + video->AddCodec(webrtc::CreateVideoCodec(120, "VP8")); + desc->AddContent(webrtc::CN_VIDEO, MediaProtocolType::kRtp, std::move(video)); - desc->AddTransportInfo(cricket::TransportInfo( - cricket::CN_AUDIO, - cricket::TransportDescription( + desc->AddTransportInfo(webrtc::TransportInfo( + webrtc::CN_AUDIO, + webrtc::TransportDescription( std::vector(), kCandidateUfragVoice, kCandidatePwdVoice, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_NONE, NULL))); - desc->AddTransportInfo(cricket::TransportInfo( - cricket::CN_VIDEO, - cricket::TransportDescription( + webrtc::ICEMODE_FULL, webrtc::CONNECTIONROLE_NONE, NULL))); + desc->AddTransportInfo(webrtc::TransportInfo( + webrtc::CN_VIDEO, + webrtc::TransportDescription( std::vector(), kCandidateUfragVideo, kCandidatePwdVideo, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_NONE, NULL))); + webrtc::ICEMODE_FULL, webrtc::CONNECTIONROLE_NONE, NULL))); return desc; } @@ -87,12 +87,13 @@ class JsepSessionDescriptionTest : public ::testing::Test { protected: virtual void SetUp() { int port = 1234; - rtc::SocketAddress address("127.0.0.1", port++); - cricket::Candidate candidate(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", - address, 1, "", "", "local", 0, "1"); + webrtc::SocketAddress address("127.0.0.1", port++); + webrtc::Candidate candidate(webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + address, 1, "", "", IceCandidateType::kHost, 0, + "1"); candidate_ = candidate; - const std::string session_id = rtc::ToString(rtc::CreateRandomId64()); - const std::string session_version = rtc::ToString(rtc::CreateRandomId()); + const std::string session_id = absl::StrCat(webrtc::CreateRandomId64()); + const std::string session_version = absl::StrCat(webrtc::CreateRandomId()); jsep_desc_ = std::make_unique(SdpType::kOffer); ASSERT_TRUE(jsep_desc_->Initialize(CreateCricketSessionDescription(), session_id, session_version)); @@ -112,7 +113,7 @@ class JsepSessionDescriptionTest : public ::testing::Test { return std::move(jsep_desc); } - cricket::Candidate candidate_; + webrtc::Candidate candidate_; std::unique_ptr jsep_desc_; }; @@ -135,14 +136,14 @@ TEST_F(JsepSessionDescriptionTest, CloneRollback) { } TEST_F(JsepSessionDescriptionTest, CloneWithCandidates) { - cricket::Candidate candidate_v4( - cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", - rtc::SocketAddress("192.168.1.5", 1234), kCandidatePriority, "", "", - cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation); - cricket::Candidate candidate_v6( - cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", - rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "", - cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation); + webrtc::Candidate candidate_v4( + webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + webrtc::SocketAddress("192.168.1.5", 1234), kCandidatePriority, "", "", + IceCandidateType::kSrflx, kCandidateGeneration, kCandidateFoundation); + webrtc::Candidate candidate_v6( + webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + webrtc::SocketAddress("::1", 1234), kCandidatePriority, "", "", + IceCandidateType::kHost, kCandidateGeneration, kCandidateFoundation); JsepIceCandidate jice_v4("audio", 0, candidate_v4); JsepIceCandidate jice_v6("audio", 0, candidate_v6); @@ -202,7 +203,7 @@ TEST_F(JsepSessionDescriptionTest, AddAndRemoveCandidatesWithMid) { // The mline index should have been updated according to mid. EXPECT_EQ(1, ice_candidate->sdp_mline_index()); - std::vector candidates(1, candidate_); + std::vector candidates(1, candidate_); candidates[0].set_transport_name(mid); EXPECT_EQ(1u, jsep_desc_->RemoveCandidates(candidates)); EXPECT_EQ(0u, jsep_desc_->candidates(0)->count()); @@ -261,11 +262,11 @@ TEST_F(JsepSessionDescriptionTest, AddCandidateDuplicates) { // Test that the connection address is set to a hostname address after adding a // hostname candidate. TEST_F(JsepSessionDescriptionTest, AddHostnameCandidate) { - cricket::Candidate c; - c.set_component(cricket::ICE_CANDIDATE_COMPONENT_RTP); - c.set_protocol(cricket::UDP_PROTOCOL_NAME); - c.set_address(rtc::SocketAddress("example.local", 1234)); - c.set_type(cricket::LOCAL_PORT_TYPE); + webrtc::Candidate c; + c.set_component(webrtc::ICE_CANDIDATE_COMPONENT_RTP); + c.set_protocol(webrtc::UDP_PROTOCOL_NAME); + c.set_address(webrtc::SocketAddress("example.local", 1234)); + c.set_type(IceCandidateType::kHost); const size_t audio_index = 0; JsepIceCandidate hostname_candidate("audio", audio_index, c); EXPECT_TRUE(jsep_desc_->AddCandidate(&hostname_candidate)); @@ -292,11 +293,11 @@ TEST_F(JsepSessionDescriptionTest, SerializeDeserialize) { // is the default destination and deserialize it again. The connection address // in the deserialized description should be the dummy address 0.0.0.0:9. TEST_F(JsepSessionDescriptionTest, SerializeDeserializeWithHostnameCandidate) { - cricket::Candidate c; - c.set_component(cricket::ICE_CANDIDATE_COMPONENT_RTP); - c.set_protocol(cricket::UDP_PROTOCOL_NAME); - c.set_address(rtc::SocketAddress("example.local", 1234)); - c.set_type(cricket::LOCAL_PORT_TYPE); + webrtc::Candidate c; + c.set_component(webrtc::ICE_CANDIDATE_COMPONENT_RTP); + c.set_protocol(webrtc::UDP_PROTOCOL_NAME); + c.set_address(webrtc::SocketAddress("example.local", 1234)); + c.set_type(IceCandidateType::kHost); const size_t audio_index = 0; const size_t video_index = 1; JsepIceCandidate hostname_candidate_audio("audio", audio_index, c); @@ -345,14 +346,14 @@ TEST_F(JsepSessionDescriptionTest, SerializeDeserializeWithCandidates) { // is used as default address in c line according to preference. TEST_F(JsepSessionDescriptionTest, SerializeSessionDescriptionWithIPv6Only) { // Stun has a high preference than local host. - cricket::Candidate candidate1( - cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", - rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "", - cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation); - cricket::Candidate candidate2( - cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", - rtc::SocketAddress("::2", 1235), kCandidatePriority, "", "", - cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation); + webrtc::Candidate candidate1( + webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + webrtc::SocketAddress("::1", 1234), kCandidatePriority, "", "", + IceCandidateType::kSrflx, kCandidateGeneration, kCandidateFoundation); + webrtc::Candidate candidate2( + webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + webrtc::SocketAddress("::2", 1235), kCandidatePriority, "", "", + IceCandidateType::kHost, kCandidateGeneration, kCandidateFoundation); JsepIceCandidate jice1("audio", 0, candidate1); JsepIceCandidate jice2("audio", 0, candidate2); @@ -375,14 +376,14 @@ TEST_F(JsepSessionDescriptionTest, SerializeSessionDescriptionWithIPv6Only) { // preference of IPv4 is lower. TEST_F(JsepSessionDescriptionTest, SerializeSessionDescriptionWithBothIPFamilies) { - cricket::Candidate candidate_v4( - cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", - rtc::SocketAddress("192.168.1.5", 1234), kCandidatePriority, "", "", - cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation); - cricket::Candidate candidate_v6( - cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", - rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "", - cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation); + webrtc::Candidate candidate_v4( + webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + webrtc::SocketAddress("192.168.1.5", 1234), kCandidatePriority, "", "", + IceCandidateType::kSrflx, kCandidateGeneration, kCandidateFoundation); + webrtc::Candidate candidate_v6( + webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + webrtc::SocketAddress("::1", 1234), kCandidatePriority, "", "", + IceCandidateType::kHost, kCandidateGeneration, kCandidateFoundation); JsepIceCandidate jice_v4("audio", 0, candidate_v4); JsepIceCandidate jice_v6("audio", 0, candidate_v6); @@ -406,14 +407,14 @@ TEST_F(JsepSessionDescriptionTest, TEST_F(JsepSessionDescriptionTest, SerializeSessionDescriptionWithBothProtocols) { // Stun has a high preference than local host. - cricket::Candidate candidate1( - cricket::ICE_CANDIDATE_COMPONENT_RTP, "tcp", - rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "", - cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation); - cricket::Candidate candidate2( - cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", - rtc::SocketAddress("fe80::1234:5678:abcd:ef12", 1235), kCandidatePriority, - "", "", cricket::LOCAL_PORT_TYPE, kCandidateGeneration, + webrtc::Candidate candidate1( + webrtc::ICE_CANDIDATE_COMPONENT_RTP, "tcp", + webrtc::SocketAddress("::1", 1234), kCandidatePriority, "", "", + IceCandidateType::kSrflx, kCandidateGeneration, kCandidateFoundation); + webrtc::Candidate candidate2( + webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + webrtc::SocketAddress("fe80::1234:5678:abcd:ef12", 1235), + kCandidatePriority, "", "", IceCandidateType::kHost, kCandidateGeneration, kCandidateFoundation); JsepIceCandidate jice1("audio", 0, candidate1); @@ -437,14 +438,14 @@ TEST_F(JsepSessionDescriptionTest, // null IPv4 is used as default address in c line. TEST_F(JsepSessionDescriptionTest, SerializeSessionDescriptionWithTCPOnly) { // Stun has a high preference than local host. - cricket::Candidate candidate1( - cricket::ICE_CANDIDATE_COMPONENT_RTP, "tcp", - rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "", - cricket::STUN_PORT_TYPE, kCandidateGeneration, kCandidateFoundation); - cricket::Candidate candidate2( - cricket::ICE_CANDIDATE_COMPONENT_RTP, "tcp", - rtc::SocketAddress("::2", 1235), kCandidatePriority, "", "", - cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation); + webrtc::Candidate candidate1( + webrtc::ICE_CANDIDATE_COMPONENT_RTP, "tcp", + webrtc::SocketAddress("::1", 1234), kCandidatePriority, "", "", + IceCandidateType::kSrflx, kCandidateGeneration, kCandidateFoundation); + webrtc::Candidate candidate2( + webrtc::ICE_CANDIDATE_COMPONENT_RTP, "tcp", + webrtc::SocketAddress("::2", 1235), kCandidatePriority, "", "", + IceCandidateType::kHost, kCandidateGeneration, kCandidateFoundation); JsepIceCandidate jice1("audio", 0, candidate1); JsepIceCandidate jice2("audio", 0, candidate2); @@ -464,22 +465,22 @@ TEST_F(JsepSessionDescriptionTest, SerializeSessionDescriptionWithTCPOnly) { // Tests that the connection address will be correctly set when the Candidate is // removed. TEST_F(JsepSessionDescriptionTest, RemoveCandidateAndSetConnectionAddress) { - cricket::Candidate candidate1( - cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", - rtc::SocketAddress("::1", 1234), kCandidatePriority, "", "", - cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation); + webrtc::Candidate candidate1( + webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + webrtc::SocketAddress("::1", 1234), kCandidatePriority, "", "", + IceCandidateType::kHost, kCandidateGeneration, kCandidateFoundation); candidate1.set_transport_name("audio"); - cricket::Candidate candidate2( - cricket::ICE_CANDIDATE_COMPONENT_RTP, "tcp", - rtc::SocketAddress("::2", 1235), kCandidatePriority, "", "", - cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation); + webrtc::Candidate candidate2( + webrtc::ICE_CANDIDATE_COMPONENT_RTP, "tcp", + webrtc::SocketAddress("::2", 1235), kCandidatePriority, "", "", + IceCandidateType::kHost, kCandidateGeneration, kCandidateFoundation); candidate2.set_transport_name("audio"); - cricket::Candidate candidate3( - cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", - rtc::SocketAddress("192.168.1.1", 1236), kCandidatePriority, "", "", - cricket::LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation); + webrtc::Candidate candidate3( + webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + webrtc::SocketAddress("192.168.1.1", 1236), kCandidatePriority, "", "", + IceCandidateType::kHost, kCandidateGeneration, kCandidateFoundation); candidate3.set_transport_name("audio"); JsepIceCandidate jice1("audio", 0, candidate1); @@ -494,7 +495,7 @@ TEST_F(JsepSessionDescriptionTest, RemoveCandidateAndSetConnectionAddress) { ASSERT_TRUE(jsep_desc_->AddCandidate(&jice2)); ASSERT_TRUE(jsep_desc_->AddCandidate(&jice3)); - std::vector candidates; + std::vector candidates; EXPECT_EQ("192.168.1.1:1236", media_desc->connection_address().ToString()); candidates.push_back(candidate3); diff --git a/pc/jsep_transport.cc b/pc/jsep_transport.cc index c7d41c8a4c..8f0a3979c4 100644 --- a/pc/jsep_transport.cc +++ b/pc/jsep_transport.cc @@ -15,33 +15,54 @@ #include #include +#include #include #include +#include #include "api/array_view.h" #include "api/candidate.h" +#include "api/ice_transport_interface.h" +#include "api/jsep.h" +#include "api/make_ref_counted.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "call/payload_type_picker.h" +#include "media/sctp/sctp_transport_internal.h" +#include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/p2p_transport_channel.h" +#include "p2p/base/transport_description.h" +#include "pc/dtls_srtp_transport.h" +#include "pc/dtls_transport.h" +#include "pc/rtp_transport.h" +#include "pc/sctp_transport.h" +#include "pc/session_description.h" +#include "pc/srtp_transport.h" +#include "pc/transport_stats.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_fingerprint.h" +#include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/thread.h" #include "rtc_base/trace_event.h" using webrtc::SdpType; -namespace cricket { +namespace webrtc { JsepTransportDescription::JsepTransportDescription() {} JsepTransportDescription::JsepTransportDescription( bool rtcp_mux_enabled, - const std::vector& cryptos, const std::vector& encrypted_header_extension_ids, int rtp_abs_sendtime_extn_id, const TransportDescription& transport_desc) : rtcp_mux_enabled(rtcp_mux_enabled), - cryptos(cryptos), encrypted_header_extension_ids(encrypted_header_extension_ids), rtp_abs_sendtime_extn_id(rtp_abs_sendtime_extn_id), transport_desc(transport_desc) {} @@ -49,7 +70,6 @@ JsepTransportDescription::JsepTransportDescription( JsepTransportDescription::JsepTransportDescription( const JsepTransportDescription& from) : rtcp_mux_enabled(from.rtcp_mux_enabled), - cryptos(from.cryptos), encrypted_header_extension_ids(from.encrypted_header_extension_ids), rtp_abs_sendtime_extn_id(from.rtp_abs_sendtime_extn_id), transport_desc(from.transport_desc) {} @@ -62,7 +82,6 @@ JsepTransportDescription& JsepTransportDescription::operator=( return *this; } rtcp_mux_enabled = from.rtcp_mux_enabled; - cryptos = from.cryptos; encrypted_header_extension_ids = from.encrypted_header_extension_ids; rtp_abs_sendtime_extn_id = from.rtp_abs_sendtime_extn_id; transport_desc = from.transport_desc; @@ -72,17 +91,18 @@ JsepTransportDescription& JsepTransportDescription::operator=( JsepTransport::JsepTransport( const std::string& mid, - const rtc::scoped_refptr& local_certificate, - rtc::scoped_refptr ice_transport, - rtc::scoped_refptr rtcp_ice_transport, - std::unique_ptr unencrypted_rtp_transport, - std::unique_ptr sdes_transport, - std::unique_ptr dtls_srtp_transport, + const scoped_refptr& local_certificate, + scoped_refptr ice_transport, + scoped_refptr rtcp_ice_transport, + std::unique_ptr unencrypted_rtp_transport, + std::unique_ptr sdes_transport, + std::unique_ptr dtls_srtp_transport, std::unique_ptr rtp_dtls_transport, std::unique_ptr rtcp_dtls_transport, std::unique_ptr sctp_transport, - std::function rtcp_mux_active_callback) - : network_thread_(rtc::Thread::Current()), + std::function rtcp_mux_active_callback, + PayloadTypePicker& suggester) + : network_thread_(Thread::Current()), mid_(mid), local_certificate_(local_certificate), ice_transport_(std::move(ice_transport)), @@ -90,19 +110,22 @@ JsepTransport::JsepTransport( unencrypted_rtp_transport_(std::move(unencrypted_rtp_transport)), sdes_transport_(std::move(sdes_transport)), dtls_srtp_transport_(std::move(dtls_srtp_transport)), - rtp_dtls_transport_(rtp_dtls_transport - ? rtc::make_ref_counted( - std::move(rtp_dtls_transport)) - : nullptr), - rtcp_dtls_transport_(rtcp_dtls_transport - ? rtc::make_ref_counted( - std::move(rtcp_dtls_transport)) - : nullptr), + rtp_dtls_transport_( + rtp_dtls_transport + ? make_ref_counted(std::move(rtp_dtls_transport)) + : nullptr), + rtcp_dtls_transport_( + rtcp_dtls_transport + ? make_ref_counted(std::move(rtcp_dtls_transport)) + : nullptr), sctp_transport_(sctp_transport - ? rtc::make_ref_counted( - std::move(sctp_transport)) + ? make_ref_counted<::webrtc::SctpTransport>( + std::move(sctp_transport), + rtp_dtls_transport_) : nullptr), - rtcp_mux_active_callback_(std::move(rtcp_mux_active_callback)) { + rtcp_mux_active_callback_(std::move(rtcp_mux_active_callback)), + remote_payload_types_(suggester), + local_payload_types_(suggester) { TRACE_EVENT0("webrtc", "JsepTransport::JsepTransport"); RTC_DCHECK(ice_transport_); RTC_DCHECK(rtp_dtls_transport_); @@ -122,10 +145,6 @@ JsepTransport::JsepTransport( RTC_DCHECK(!unencrypted_rtp_transport); RTC_DCHECK(!sdes_transport); } - - if (sctp_transport_) { - sctp_transport_->SetDtlsTransport(rtp_dtls_transport_); - } } JsepTransport::~JsepTransport() { @@ -144,40 +163,29 @@ JsepTransport::~JsepTransport() { // ICE will be the last transport to be deleted. } -webrtc::RTCError JsepTransport::SetLocalJsepTransportDescription( +RTCError JsepTransport::SetLocalJsepTransportDescription( const JsepTransportDescription& jsep_description, SdpType type) { - webrtc::RTCError error; + RTCError error; TRACE_EVENT0("webrtc", "JsepTransport::SetLocalJsepTransportDescription"); RTC_DCHECK_RUN_ON(network_thread_); IceParameters ice_parameters = jsep_description.transport_desc.GetIceParameters(); - webrtc::RTCError ice_parameters_result = ice_parameters.Validate(); + RTCError ice_parameters_result = ice_parameters.Validate(); if (!ice_parameters_result.ok()) { - rtc::StringBuilder sb; + StringBuilder sb; sb << "Invalid ICE parameters: " << ice_parameters_result.message(); - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - sb.Release()); + return RTCError(RTCErrorType::INVALID_PARAMETER, sb.Release()); } if (!SetRtcpMux(jsep_description.rtcp_mux_enabled, type, ContentSource::CS_LOCAL)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to setup RTCP mux."); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Failed to setup RTCP mux."); } - // If doing SDES, setup the SDES crypto parameters. - if (sdes_transport_) { - RTC_DCHECK(!unencrypted_rtp_transport_); - RTC_DCHECK(!dtls_srtp_transport_); - if (!SetSdes(jsep_description.cryptos, - jsep_description.encrypted_header_extension_ids, type, - ContentSource::CS_LOCAL)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to setup SDES crypto parameters."); - } - } else if (dtls_srtp_transport_) { + if (dtls_srtp_transport_) { RTC_DCHECK(!unencrypted_rtp_transport_); RTC_DCHECK(!sdes_transport_); dtls_srtp_transport_->UpdateRecvEncryptedHeaderExtensionIds( @@ -190,7 +198,7 @@ webrtc::RTCError JsepTransport::SetLocalJsepTransportDescription( ice_parameters.ufrag, ice_parameters.pwd); local_description_.reset(new JsepTransportDescription(jsep_description)); - rtc::SSLFingerprint* local_fp = + SSLFingerprint* local_fp = local_description_->transport_desc.identity_fingerprint.get(); if (!local_fp) { @@ -226,47 +234,34 @@ webrtc::RTCError JsepTransport::SetLocalJsepTransportDescription( << mid(); } - return webrtc::RTCError::OK(); + return RTCError::OK(); } -webrtc::RTCError JsepTransport::SetRemoteJsepTransportDescription( +RTCError JsepTransport::SetRemoteJsepTransportDescription( const JsepTransportDescription& jsep_description, - webrtc::SdpType type) { + SdpType type) { TRACE_EVENT0("webrtc", "JsepTransport::SetLocalJsepTransportDescription"); - webrtc::RTCError error; + RTCError error; RTC_DCHECK_RUN_ON(network_thread_); IceParameters ice_parameters = jsep_description.transport_desc.GetIceParameters(); - webrtc::RTCError ice_parameters_result = ice_parameters.Validate(); + RTCError ice_parameters_result = ice_parameters.Validate(); if (!ice_parameters_result.ok()) { remote_description_.reset(); - rtc::StringBuilder sb; + StringBuilder sb; sb << "Invalid ICE parameters: " << ice_parameters_result.message(); - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - sb.Release()); + return RTCError(RTCErrorType::INVALID_PARAMETER, sb.Release()); } if (!SetRtcpMux(jsep_description.rtcp_mux_enabled, type, ContentSource::CS_REMOTE)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to setup RTCP mux."); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Failed to setup RTCP mux."); } - // If doing SDES, setup the SDES crypto parameters. - if (sdes_transport_) { - RTC_DCHECK(!unencrypted_rtp_transport_); - RTC_DCHECK(!dtls_srtp_transport_); - if (!SetSdes(jsep_description.cryptos, - jsep_description.encrypted_header_extension_ids, type, - ContentSource::CS_REMOTE)) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Failed to setup SDES crypto parameters."); - } - sdes_transport_->CacheRtpAbsSendTimeHeaderExtension( - jsep_description.rtp_abs_sendtime_extn_id); - } else if (dtls_srtp_transport_) { + if (dtls_srtp_transport_) { RTC_DCHECK(!unencrypted_rtp_transport_); RTC_DCHECK(!sdes_transport_); dtls_srtp_transport_->UpdateSendEncryptedHeaderExtensionIds( @@ -292,35 +287,32 @@ webrtc::RTCError JsepTransport::SetRemoteJsepTransportDescription( remote_description_.reset(); return error; } - return webrtc::RTCError::OK(); + return RTCError::OK(); } -webrtc::RTCError JsepTransport::AddRemoteCandidates( - const Candidates& candidates) { +RTCError JsepTransport::AddRemoteCandidates(const Candidates& candidates) { RTC_DCHECK_RUN_ON(network_thread_); if (!local_description_ || !remote_description_) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_STATE, - mid() + - " is not ready to use the remote candidate " - "because the local or remote description is " - "not set."); - } - - for (const cricket::Candidate& candidate : candidates) { - auto transport = - candidate.component() == cricket::ICE_CANDIDATE_COMPONENT_RTP - ? rtp_dtls_transport_ - : rtcp_dtls_transport_; + return RTCError(RTCErrorType::INVALID_STATE, + mid() + + " is not ready to use the remote candidate " + "because the local or remote description is " + "not set."); + } + + for (const Candidate& candidate : candidates) { + auto transport = candidate.component() == ICE_CANDIDATE_COMPONENT_RTP + ? rtp_dtls_transport_ + : rtcp_dtls_transport_; if (!transport) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Candidate has an unknown component: " + - candidate.ToSensitiveString() + " for mid " + - mid()); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Candidate has an unknown component: " + + candidate.ToSensitiveString() + " for mid " + mid()); } RTC_DCHECK(transport->internal() && transport->internal()->ice_transport()); transport->internal()->ice_transport()->AddRemoteCandidate(candidate); } - return webrtc::RTCError::OK(); + return RTCError::OK(); } void JsepTransport::SetNeedsIceRestartFlag() { @@ -331,19 +323,19 @@ void JsepTransport::SetNeedsIceRestartFlag() { } } -absl::optional JsepTransport::GetDtlsRole() const { +std::optional JsepTransport::GetDtlsRole() const { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(rtp_dtls_transport_); RTC_DCHECK(rtp_dtls_transport_->internal()); - rtc::SSLRole dtls_role; + SSLRole dtls_role; if (!rtp_dtls_transport_->internal()->GetDtlsRole(&dtls_role)) { - return absl::optional(); + return std::optional(); } - return absl::optional(dtls_role); + return std::optional(dtls_role); } -bool JsepTransport::GetStats(TransportStats* stats) { +bool JsepTransport::GetStats(TransportStats* stats) const { TRACE_EVENT0("webrtc", "JsepTransport::GetStats"); RTC_DCHECK_RUN_ON(network_thread_); stats->transport_name = mid(); @@ -360,33 +352,30 @@ bool JsepTransport::GetStats(TransportStats* stats) { return ret; } -webrtc::RTCError JsepTransport::VerifyCertificateFingerprint( - const rtc::RTCCertificate* certificate, - const rtc::SSLFingerprint* fingerprint) const { +RTCError JsepTransport::VerifyCertificateFingerprint( + const RTCCertificate* certificate, + const SSLFingerprint* fingerprint) const { TRACE_EVENT0("webrtc", "JsepTransport::VerifyCertificateFingerprint"); RTC_DCHECK_RUN_ON(network_thread_); if (!fingerprint) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "No fingerprint"); + return RTCError(RTCErrorType::INVALID_PARAMETER, "No fingerprint"); } if (!certificate) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - "Fingerprint provided but no identity available."); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Fingerprint provided but no identity available."); } - std::unique_ptr fp_tmp = - rtc::SSLFingerprint::CreateUnique(fingerprint->algorithm, - *certificate->identity()); + std::unique_ptr fp_tmp = SSLFingerprint::CreateUnique( + fingerprint->algorithm, *certificate->identity()); RTC_DCHECK(fp_tmp.get() != NULL); if (*fp_tmp == *fingerprint) { - return webrtc::RTCError::OK(); + return RTCError::OK(); } char ss_buf[1024]; - rtc::SimpleStringBuilder desc(ss_buf); + SimpleStringBuilder desc(ss_buf); desc << "Local fingerprint does not match identity. Expected: "; desc << fp_tmp->ToString(); desc << " Got: " << fingerprint->ToString(); - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER, - std::string(desc.str())); + return RTCError(RTCErrorType::INVALID_PARAMETER, std::string(desc.str())); } void JsepTransport::SetActiveResetSrtpParams(bool active_reset_srtp_params) { @@ -399,6 +388,34 @@ void JsepTransport::SetActiveResetSrtpParams(bool active_reset_srtp_params) { } } +RTCError JsepTransport::RecordPayloadTypes(bool local, + SdpType type, + const ContentInfo& content) { + RTC_DCHECK_RUN_ON(network_thread_); + if (local) { + local_payload_types_.DisallowRedefinition(); + } else { + remote_payload_types_.DisallowRedefinition(); + } + RTCError result = RTCError::OK(); + for (auto codec : content.media_description()->codecs()) { + if (local) { + result = local_payload_types_.AddMapping(codec.id, codec); + } else { + result = remote_payload_types_.AddMapping(codec.id, codec); + } + if (!result.ok()) { + break; + } + } + if (local) { + local_payload_types_.ReallowRedefinition(); + } else { + remote_payload_types_.ReallowRedefinition(); + } + return result; +} + void JsepTransport::SetRemoteIceParameters( const IceParameters& ice_parameters, IceTransportInternal* ice_transport) { @@ -410,10 +427,10 @@ void JsepTransport::SetRemoteIceParameters( ice_transport->SetRemoteIceMode(remote_description_->transport_desc.ice_mode); } -webrtc::RTCError JsepTransport::SetNegotiatedDtlsParameters( +RTCError JsepTransport::SetNegotiatedDtlsParameters( DtlsTransportInternal* dtls_transport, - absl::optional dtls_role, - rtc::SSLFingerprint* remote_fingerprint) { + std::optional dtls_role, + SSLFingerprint* remote_fingerprint) { RTC_DCHECK(dtls_transport); return dtls_transport->SetRemoteParameters( remote_fingerprint->algorithm, remote_fingerprint->digest.cdata(), @@ -421,7 +438,7 @@ webrtc::RTCError JsepTransport::SetNegotiatedDtlsParameters( } bool JsepTransport::SetRtcpMux(bool enable, - webrtc::SdpType type, + SdpType type, ContentSource source) { RTC_DCHECK_RUN_ON(network_thread_); bool ret = false; @@ -474,69 +491,24 @@ void JsepTransport::ActivateRtcpMux() { rtcp_mux_active_callback_(); } -bool JsepTransport::SetSdes(const std::vector& cryptos, - const std::vector& encrypted_extension_ids, - webrtc::SdpType type, - ContentSource source) { - RTC_DCHECK_RUN_ON(network_thread_); - bool ret = false; - ret = sdes_negotiator_.Process(cryptos, type, source); - if (!ret) { - return ret; - } - - if (source == ContentSource::CS_LOCAL) { - recv_extension_ids_ = encrypted_extension_ids; - } else { - send_extension_ids_ = encrypted_extension_ids; - } - - // If setting an SDES answer succeeded, apply the negotiated parameters - // to the SRTP transport. - if ((type == SdpType::kPrAnswer || type == SdpType::kAnswer) && ret) { - if (sdes_negotiator_.send_crypto_suite() && - sdes_negotiator_.recv_crypto_suite()) { - RTC_DCHECK(send_extension_ids_); - RTC_DCHECK(recv_extension_ids_); - ret = sdes_transport_->SetRtpParams( - *(sdes_negotiator_.send_crypto_suite()), - sdes_negotiator_.send_key().data(), - static_cast(sdes_negotiator_.send_key().size()), - *(send_extension_ids_), *(sdes_negotiator_.recv_crypto_suite()), - sdes_negotiator_.recv_key().data(), - static_cast(sdes_negotiator_.recv_key().size()), - *(recv_extension_ids_)); - } else { - RTC_LOG(LS_INFO) << "No crypto keys are provided for SDES."; - if (type == SdpType::kAnswer) { - // Explicitly reset the `sdes_transport_` if no crypto param is - // provided in the answer. No need to call `ResetParams()` for - // `sdes_negotiator_` because it resets the params inside `SetAnswer`. - sdes_transport_->ResetParams(); - } - } - } - return ret; -} - -webrtc::RTCError JsepTransport::NegotiateAndSetDtlsParameters( +RTCError JsepTransport::NegotiateAndSetDtlsParameters( SdpType local_description_type) { RTC_DCHECK_RUN_ON(network_thread_); if (!local_description_ || !remote_description_) { - return webrtc::RTCError(webrtc::RTCErrorType::INVALID_STATE, - "Applying an answer transport description " - "without applying any offer."); + return RTCError(RTCErrorType::INVALID_STATE, + "Applying an answer transport description " + "without applying any offer."); } - std::unique_ptr remote_fingerprint; - absl::optional negotiated_dtls_role; + std::unique_ptr remote_fingerprint; + std::optional negotiated_dtls_role; - rtc::SSLFingerprint* local_fp = + SSLFingerprint* local_fp = local_description_->transport_desc.identity_fingerprint.get(); - rtc::SSLFingerprint* remote_fp = + SSLFingerprint* remote_fp = remote_description_->transport_desc.identity_fingerprint.get(); if (remote_fp && local_fp) { - remote_fingerprint = std::make_unique(*remote_fp); - webrtc::RTCError error = + remote_fingerprint = std::make_unique(*remote_fp); + RTCError error = NegotiateDtlsRole(local_description_type, local_description_->transport_desc.connection_role, remote_description_->transport_desc.connection_role, @@ -545,13 +517,13 @@ webrtc::RTCError JsepTransport::NegotiateAndSetDtlsParameters( return error; } } else if (local_fp && (local_description_type == SdpType::kAnswer)) { - return webrtc::RTCError( - webrtc::RTCErrorType::INVALID_PARAMETER, + return RTCError( + RTCErrorType::INVALID_PARAMETER, "Local fingerprint supplied when caller didn't offer DTLS."); } else { // We are not doing DTLS - remote_fingerprint = std::make_unique( - "", rtc::ArrayView()); + remote_fingerprint = + std::make_unique("", ArrayView()); } // Now that we have negotiated everything, push it downward. // Note that we cache the result so that if we have race conditions @@ -559,7 +531,7 @@ webrtc::RTCError JsepTransport::NegotiateAndSetDtlsParameters( // creation, we have the negotiation state saved until a new // negotiation happens. RTC_DCHECK(rtp_dtls_transport()); - webrtc::RTCError error = SetNegotiatedDtlsParameters( + RTCError error = SetNegotiatedDtlsParameters( rtp_dtls_transport(), negotiated_dtls_role, remote_fingerprint.get()); if (!error.ok()) { return error; @@ -572,11 +544,11 @@ webrtc::RTCError JsepTransport::NegotiateAndSetDtlsParameters( return error; } -webrtc::RTCError JsepTransport::NegotiateDtlsRole( +RTCError JsepTransport::NegotiateDtlsRole( SdpType local_description_type, ConnectionRole local_connection_role, ConnectionRole remote_connection_role, - absl::optional* negotiated_dtls_role) { + std::optional* negotiated_dtls_role) { // From RFC 4145, section-4.1, The following are the values that the // 'setup' attribute can take in an offer/answer exchange: // Offer Answer @@ -606,9 +578,8 @@ webrtc::RTCError JsepTransport::NegotiateDtlsRole( bool is_remote_server = false; if (local_description_type == SdpType::kOffer) { if (local_connection_role != CONNECTIONROLE_ACTPASS) { - return webrtc::RTCError( - webrtc::RTCErrorType::INVALID_PARAMETER, - "Offerer must use actpass value for setup attribute."); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Offerer must use actpass value for setup attribute."); } if (remote_connection_role == CONNECTIONROLE_ACTIVE || @@ -616,10 +587,9 @@ webrtc::RTCError JsepTransport::NegotiateDtlsRole( remote_connection_role == CONNECTIONROLE_NONE) { is_remote_server = (remote_connection_role == CONNECTIONROLE_PASSIVE); } else { - return webrtc::RTCError( - webrtc::RTCErrorType::INVALID_PARAMETER, - "Answerer must use either active or passive value " - "for setup attribute."); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Answerer must use either active or passive value " + "for setup attribute."); } // If remote is NONE or ACTIVE it will act as client. } else { @@ -638,15 +608,15 @@ webrtc::RTCError JsepTransport::NegotiateDtlsRole( switch (remote_connection_role) { case CONNECTIONROLE_ACTIVE: if (local_connection_role != CONNECTIONROLE_PASSIVE) { - return webrtc::RTCError( - webrtc::RTCErrorType::INVALID_PARAMETER, + return RTCError( + RTCErrorType::INVALID_PARAMETER, "Answerer must be passive when offerer is active"); } break; case CONNECTIONROLE_PASSIVE: if (local_connection_role != CONNECTIONROLE_ACTIVE) { - return webrtc::RTCError( - webrtc::RTCErrorType::INVALID_PARAMETER, + return RTCError( + RTCErrorType::INVALID_PARAMETER, "Answerer must be active when offerer is passive"); } break; @@ -655,14 +625,13 @@ webrtc::RTCError JsepTransport::NegotiateDtlsRole( break; } } else { - if ((*current_dtls_role == rtc::SSL_CLIENT && + if ((*current_dtls_role == webrtc::SSL_CLIENT && remote_connection_role == CONNECTIONROLE_ACTIVE) || - (*current_dtls_role == rtc::SSL_SERVER && + (*current_dtls_role == webrtc::SSL_SERVER && remote_connection_role == CONNECTIONROLE_PASSIVE)) { - return webrtc::RTCError( - webrtc::RTCErrorType::INVALID_PARAMETER, - "Offerer must use current negotiated role for " - "setup attribute."); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Offerer must use current negotiated role for " + "setup attribute."); } } } @@ -671,23 +640,22 @@ webrtc::RTCError JsepTransport::NegotiateDtlsRole( local_connection_role == CONNECTIONROLE_PASSIVE) { is_remote_server = (local_connection_role == CONNECTIONROLE_ACTIVE); } else { - return webrtc::RTCError( - webrtc::RTCErrorType::INVALID_PARAMETER, - "Answerer must use either active or passive value " - "for setup attribute."); + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Answerer must use either active or passive value " + "for setup attribute."); } // If local is passive, local will act as server. } *negotiated_dtls_role = - (is_remote_server ? rtc::SSL_CLIENT : rtc::SSL_SERVER); - return webrtc::RTCError::OK(); + (is_remote_server ? webrtc::SSL_CLIENT : webrtc::SSL_SERVER); + return RTCError::OK(); } bool JsepTransport::GetTransportStats(DtlsTransportInternal* dtls_transport, int component, - TransportStats* stats) { + TransportStats* stats) const { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(dtls_transport); TransportChannelStats substats; @@ -695,8 +663,9 @@ bool JsepTransport::GetTransportStats(DtlsTransportInternal* dtls_transport, dtls_transport->GetSslVersionBytes(&substats.ssl_version_bytes); dtls_transport->GetSrtpCryptoSuite(&substats.srtp_crypto_suite); dtls_transport->GetSslCipherSuite(&substats.ssl_cipher_suite); + substats.tls_cipher_suite_name = dtls_transport->GetTlsCipherSuiteName(); substats.dtls_state = dtls_transport->dtls_state(); - rtc::SSLRole dtls_role; + SSLRole dtls_role; if (dtls_transport->GetDtlsRole(&dtls_role)) { substats.dtls_role = dtls_role; } @@ -704,8 +673,10 @@ bool JsepTransport::GetTransportStats(DtlsTransportInternal* dtls_transport, &substats.ice_transport_stats)) { return false; } + substats.ssl_peer_signature_algorithm = + dtls_transport->GetSslPeerSignatureAlgorithm(); stats->channel_stats.push_back(substats); return true; } -} // namespace cricket +} // namespace webrtc diff --git a/pc/jsep_transport.h b/pc/jsep_transport.h index f2643070a1..2840cc2461 100644 --- a/pc/jsep_transport.h +++ b/pc/jsep_transport.h @@ -12,27 +12,22 @@ #define PC_JSEP_TRANSPORT_H_ #include -#include #include +#include #include #include -#include "absl/types/optional.h" -#include "api/candidate.h" -#include "api/crypto_params.h" #include "api/ice_transport_interface.h" #include "api/jsep.h" #include "api/rtc_error.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/transport/data_channel_transport_interface.h" +#include "call/payload_type_picker.h" #include "media/sctp/sctp_transport_internal.h" -#include "p2p/base/dtls_transport.h" -#include "p2p/base/dtls_transport_internal.h" #include "p2p/base/ice_transport_internal.h" -#include "p2p/base/p2p_constants.h" #include "p2p/base/transport_description.h" -#include "p2p/base/transport_info.h" +#include "p2p/dtls/dtls_transport_internal.h" #include "pc/dtls_srtp_transport.h" #include "pc/dtls_transport.h" #include "pc/rtcp_mux_filter.h" @@ -40,26 +35,21 @@ #include "pc/rtp_transport_internal.h" #include "pc/sctp_transport.h" #include "pc/session_description.h" -#include "pc/srtp_filter.h" #include "pc/srtp_transport.h" #include "pc/transport_stats.h" -#include "rtc_base/checks.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_fingerprint.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" -namespace cricket { - -class DtlsTransportInternal; +namespace webrtc { struct JsepTransportDescription { public: JsepTransportDescription(); JsepTransportDescription( bool rtcp_mux_enabled, - const std::vector& cryptos, const std::vector& encrypted_header_extension_ids, int rtp_abs_sendtime_extn_id, const TransportDescription& transport_description); @@ -69,7 +59,6 @@ struct JsepTransportDescription { JsepTransportDescription& operator=(const JsepTransportDescription& from); bool rtcp_mux_enabled = true; - std::vector cryptos; std::vector encrypted_header_extension_ids; int rtp_abs_sendtime_extn_id = -1; // TODO(zhihuang): Add the ICE and DTLS related variables and methods from @@ -90,18 +79,18 @@ class JsepTransport { // `mid` is just used for log statements in order to identify the Transport. // Note that `local_certificate` is allowed to be null since a remote // description may be set before a local certificate is generated. - JsepTransport( - const std::string& mid, - const rtc::scoped_refptr& local_certificate, - rtc::scoped_refptr ice_transport, - rtc::scoped_refptr rtcp_ice_transport, - std::unique_ptr unencrypted_rtp_transport, - std::unique_ptr sdes_transport, - std::unique_ptr dtls_srtp_transport, - std::unique_ptr rtp_dtls_transport, - std::unique_ptr rtcp_dtls_transport, - std::unique_ptr sctp_transport, - std::function rtcp_mux_active_callback); + JsepTransport(const std::string& mid, + const scoped_refptr& local_certificate, + scoped_refptr ice_transport, + scoped_refptr rtcp_ice_transport, + std::unique_ptr unencrypted_rtp_transport, + std::unique_ptr sdes_transport, + std::unique_ptr dtls_srtp_transport, + std::unique_ptr rtp_dtls_transport, + std::unique_ptr rtcp_dtls_transport, + std::unique_ptr sctp_transport, + std::function rtcp_mux_active_callback, + PayloadTypePicker& suggester); ~JsepTransport(); @@ -114,27 +103,27 @@ class JsepTransport { // Must be called before applying local session description. // Needed in order to verify the local fingerprint. void SetLocalCertificate( - const rtc::scoped_refptr& local_certificate) { + const scoped_refptr& local_certificate) { RTC_DCHECK_RUN_ON(network_thread_); local_certificate_ = local_certificate; } // Return the local certificate provided by SetLocalCertificate. - rtc::scoped_refptr GetLocalCertificate() const { + scoped_refptr GetLocalCertificate() const { RTC_DCHECK_RUN_ON(network_thread_); return local_certificate_; } - webrtc::RTCError SetLocalJsepTransportDescription( + RTCError SetLocalJsepTransportDescription( const JsepTransportDescription& jsep_description, - webrtc::SdpType type); + SdpType type); // Set the remote TransportDescription to be used by DTLS and ICE channels // that are part of this Transport. - webrtc::RTCError SetRemoteJsepTransportDescription( + RTCError SetRemoteJsepTransportDescription( const JsepTransportDescription& jsep_description, - webrtc::SdpType type); - webrtc::RTCError AddRemoteCandidates(const Candidates& candidates); + SdpType type); + RTCError AddRemoteCandidates(const Candidates& candidates); // Set the "needs-ice-restart" flag as described in JSEP. After the flag is // set, offers should generate new ufrags/passwords until an ICE restart @@ -151,12 +140,11 @@ class JsepTransport { return needs_ice_restart_; } - // Returns role if negotiated, or empty absl::optional if it hasn't been + // Returns role if negotiated, or empty std::optional if it hasn't been // negotiated yet. - absl::optional GetDtlsRole() const; + std::optional GetDtlsRole() const; - // TODO(deadbeef): Make this const. See comment in transportcontroller.h. - bool GetStats(TransportStats* stats); + bool GetStats(TransportStats* stats) const; const JsepTransportDescription* local_description() const { RTC_DCHECK_RUN_ON(network_thread_); @@ -169,7 +157,7 @@ class JsepTransport { } // Returns the rtp transport, if any. - webrtc::RtpTransportInternal* rtp_transport() const { + RtpTransportInternal* rtp_transport() const { if (dtls_srtp_transport_) { return dtls_srtp_transport_.get(); } @@ -212,17 +200,15 @@ class JsepTransport { return nullptr; } - rtc::scoped_refptr RtpDtlsTransport() { + scoped_refptr RtpDtlsTransport() { return rtp_dtls_transport_; } - rtc::scoped_refptr SctpTransport() const { - return sctp_transport_; - } + scoped_refptr SctpTransport() const { return sctp_transport_; } // TODO(bugs.webrtc.org/9719): Delete method, update callers to use // SctpTransport() instead. - webrtc::DataChannelTransportInterface* data_channel_transport() const { + DataChannelTransportInterface* data_channel_transport() const { return sctp_transport_.get(); } @@ -232,59 +218,71 @@ class JsepTransport { // Returns an error if the certificate's identity does not match the // fingerprint, or either is NULL. - webrtc::RTCError VerifyCertificateFingerprint( - const rtc::RTCCertificate* certificate, - const rtc::SSLFingerprint* fingerprint) const; + RTCError VerifyCertificateFingerprint( + const RTCCertificate* certificate, + const SSLFingerprint* fingerprint) const; void SetActiveResetSrtpParams(bool active_reset_srtp_params); + // Record the PT mappings from a single media section. + // This is used to store info needed when generating subsequent SDP. + RTCError RecordPayloadTypes(bool local, + SdpType type, + const ContentInfo& content); + + const PayloadTypeRecorder& remote_payload_types() const { + return remote_payload_types_; + } + const PayloadTypeRecorder& local_payload_types() const { + return local_payload_types_; + } + PayloadTypeRecorder& local_payload_types() { return local_payload_types_; } + void CommitPayloadTypes() { + RTC_DCHECK_RUN_ON(network_thread_); + local_payload_types_.Commit(); + remote_payload_types_.Commit(); + } + private: - bool SetRtcpMux(bool enable, webrtc::SdpType type, ContentSource source); + bool SetRtcpMux(bool enable, SdpType type, ContentSource source); void ActivateRtcpMux() RTC_RUN_ON(network_thread_); - bool SetSdes(const std::vector& cryptos, - const std::vector& encrypted_extension_ids, - webrtc::SdpType type, - ContentSource source); - // Negotiates and sets the DTLS parameters based on the current local and // remote transport description, such as the DTLS role to use, and whether // DTLS should be activated. // // Called when an answer TransportDescription is applied. - webrtc::RTCError NegotiateAndSetDtlsParameters( - webrtc::SdpType local_description_type); + RTCError NegotiateAndSetDtlsParameters(SdpType local_description_type); // Negotiates the DTLS role based off the offer and answer as specified by // RFC 4145, section-4.1. Returns an RTCError if role cannot be determined // from the local description and remote description. - webrtc::RTCError NegotiateDtlsRole( - webrtc::SdpType local_description_type, - ConnectionRole local_connection_role, - ConnectionRole remote_connection_role, - absl::optional* negotiated_dtls_role); + RTCError NegotiateDtlsRole(SdpType local_description_type, + ConnectionRole local_connection_role, + ConnectionRole remote_connection_role, + std::optional* negotiated_dtls_role); // Pushes down the ICE parameters from the remote description. void SetRemoteIceParameters(const IceParameters& ice_parameters, IceTransportInternal* ice); // Pushes down the DTLS parameters obtained via negotiation. - static webrtc::RTCError SetNegotiatedDtlsParameters( + static RTCError SetNegotiatedDtlsParameters( DtlsTransportInternal* dtls_transport, - absl::optional dtls_role, - rtc::SSLFingerprint* remote_fingerprint); + std::optional dtls_role, + SSLFingerprint* remote_fingerprint); bool GetTransportStats(DtlsTransportInternal* dtls_transport, int component, - TransportStats* stats); + TransportStats* stats) const; // Owning thread, for safety checks - const rtc::Thread* const network_thread_; + const Thread* const network_thread_; const std::string mid_; // needs-ice-restart bit as described in JSEP. bool needs_ice_restart_ RTC_GUARDED_BY(network_thread_) = false; - rtc::scoped_refptr local_certificate_ + scoped_refptr local_certificate_ RTC_GUARDED_BY(network_thread_); std::unique_ptr local_description_ RTC_GUARDED_BY(network_thread_); @@ -293,38 +291,51 @@ class JsepTransport { // Ice transport which may be used by any of upper-layer transports (below). // Owned by JsepTransport and guaranteed to outlive the transports below. - const rtc::scoped_refptr ice_transport_; - const rtc::scoped_refptr rtcp_ice_transport_; + const scoped_refptr ice_transport_; + const scoped_refptr rtcp_ice_transport_; // To avoid downcasting and make it type safe, keep three unique pointers for // different SRTP mode and only one of these is non-nullptr. - const std::unique_ptr unencrypted_rtp_transport_; - const std::unique_ptr sdes_transport_; - const std::unique_ptr dtls_srtp_transport_; + const std::unique_ptr unencrypted_rtp_transport_; + const std::unique_ptr sdes_transport_; + const std::unique_ptr dtls_srtp_transport_; - const rtc::scoped_refptr rtp_dtls_transport_; + const scoped_refptr rtp_dtls_transport_; // The RTCP transport is const for all usages, except that it is cleared // when RTCP multiplexing is turned on; this happens on the network thread. - rtc::scoped_refptr rtcp_dtls_transport_ + scoped_refptr rtcp_dtls_transport_ RTC_GUARDED_BY(network_thread_); - const rtc::scoped_refptr sctp_transport_; + const scoped_refptr<::webrtc::SctpTransport> sctp_transport_; - SrtpFilter sdes_negotiator_ RTC_GUARDED_BY(network_thread_); RtcpMuxFilter rtcp_mux_negotiator_ RTC_GUARDED_BY(network_thread_); // Cache the encrypted header extension IDs for SDES negoitation. - absl::optional> send_extension_ids_ + std::optional> send_extension_ids_ RTC_GUARDED_BY(network_thread_); - absl::optional> recv_extension_ids_ + std::optional> recv_extension_ids_ RTC_GUARDED_BY(network_thread_); // This is invoked when RTCP-mux becomes active and // `rtcp_dtls_transport_` is destroyed. The JsepTransportController will // receive the callback and update the aggregate transport states. std::function rtcp_mux_active_callback_; + + // Assigned PTs from the remote description, used when sending. + PayloadTypeRecorder remote_payload_types_ RTC_GUARDED_BY(network_thread_); + // Assigned PTs from the local description, used when receiving. + PayloadTypeRecorder local_payload_types_ RTC_GUARDED_BY(network_thread_); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::JsepTransport; +using ::webrtc::JsepTransportDescription; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // PC_JSEP_TRANSPORT_H_ diff --git a/pc/jsep_transport_collection.cc b/pc/jsep_transport_collection.cc index b50d303d77..c773b670bd 100644 --- a/pc/jsep_transport_collection.cc +++ b/pc/jsep_transport_collection.cc @@ -12,16 +12,25 @@ #include #include +#include #include -#include +#include #include +#include +#include "absl/strings/string_view.h" +#include "api/jsep.h" +#include "api/peer_connection_interface.h" +#include "api/sequence_checker.h" #include "p2p/base/p2p_constants.h" +#include "pc/jsep_transport.h" +#include "pc/session_description.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { -void BundleManager::Update(const cricket::SessionDescription* description, +void BundleManager::Update(const SessionDescription* description, SdpType type) { RTC_DCHECK_RUN_ON(&sequence_checker_); // Rollbacks should call Rollback, not Update. @@ -37,10 +46,10 @@ void BundleManager::Update(const cricket::SessionDescription* description, // groups. bundle_groups_changed = true; bundle_groups_.clear(); - for (const cricket::ContentGroup* new_bundle_group : - description->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE)) { + for (const ContentGroup* new_bundle_group : + description->GetGroupsByName(GROUP_TYPE_BUNDLE)) { bundle_groups_.push_back( - std::make_unique(*new_bundle_group)); + std::make_unique(*new_bundle_group)); RTC_DLOG(LS_VERBOSE) << "Establishing bundle group " << new_bundle_group->ToString(); } @@ -51,8 +60,8 @@ void BundleManager::Update(const cricket::SessionDescription* description, // Thus any m= sections added to a BUNDLE group in this offer can // preemptively start using the bundled transport, as there is no possible // non-bundled fallback. - for (const cricket::ContentGroup* new_bundle_group : - description->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE)) { + for (const ContentGroup* new_bundle_group : + description->GetGroupsByName(GROUP_TYPE_BUNDLE)) { // Attempt to find a matching existing group. for (const std::string& mid : new_bundle_group->content_names()) { auto it = established_bundle_groups_by_mid_.find(mid); @@ -71,7 +80,7 @@ void BundleManager::Update(const cricket::SessionDescription* description, } } -const cricket::ContentGroup* BundleManager::LookupGroupByMid( +const ContentGroup* BundleManager::LookupGroupByMid( const std::string& mid) const { auto it = established_bundle_groups_by_mid_.find(mid); return it != established_bundle_groups_by_mid_.end() ? it->second : nullptr; @@ -84,12 +93,12 @@ bool BundleManager::IsFirstMidInGroup(const std::string& mid) const { return mid == *(group->FirstContentName()); } -cricket::ContentGroup* BundleManager::LookupGroupByMid(const std::string& mid) { +ContentGroup* BundleManager::LookupGroupByMid(const std::string& mid) { auto it = established_bundle_groups_by_mid_.find(mid); return it != established_bundle_groups_by_mid_.end() ? it->second : nullptr; } -void BundleManager::DeleteMid(const cricket::ContentGroup* bundle_group, +void BundleManager::DeleteMid(const ContentGroup* bundle_group, const std::string& mid) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_LOG(LS_VERBOSE) << "Deleting mid " << mid << " from bundle group " @@ -97,26 +106,26 @@ void BundleManager::DeleteMid(const cricket::ContentGroup* bundle_group, // Remove the rejected content from the `bundle_group`. // The const pointer arg is used to identify the group, we verify // it before we use it to make a modification. - auto bundle_group_it = std::find_if( - bundle_groups_.begin(), bundle_groups_.end(), - [bundle_group](std::unique_ptr& group) { - return bundle_group == group.get(); - }); + auto bundle_group_it = + std::find_if(bundle_groups_.begin(), bundle_groups_.end(), + [bundle_group](std::unique_ptr& group) { + return bundle_group == group.get(); + }); RTC_DCHECK(bundle_group_it != bundle_groups_.end()); (*bundle_group_it)->RemoveContentName(mid); established_bundle_groups_by_mid_.erase( established_bundle_groups_by_mid_.find(mid)); } -void BundleManager::DeleteGroup(const cricket::ContentGroup* bundle_group) { +void BundleManager::DeleteGroup(const ContentGroup* bundle_group) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DLOG(LS_VERBOSE) << "Deleting bundle group " << bundle_group->ToString(); - auto bundle_group_it = std::find_if( - bundle_groups_.begin(), bundle_groups_.end(), - [bundle_group](std::unique_ptr& group) { - return bundle_group == group.get(); - }); + auto bundle_group_it = + std::find_if(bundle_groups_.begin(), bundle_groups_.end(), + [bundle_group](std::unique_ptr& group) { + return bundle_group == group.get(); + }); RTC_DCHECK(bundle_group_it != bundle_groups_.end()); auto mid_list = (*bundle_group_it)->content_names(); for (const auto& content_name : mid_list) { @@ -129,8 +138,7 @@ void BundleManager::Rollback() { RTC_DCHECK_RUN_ON(&sequence_checker_); bundle_groups_.clear(); for (const auto& bundle_group : stable_bundle_groups_) { - bundle_groups_.push_back( - std::make_unique(*bundle_group)); + bundle_groups_.push_back(std::make_unique(*bundle_group)); } RefreshEstablishedBundleGroupsByMid(); } @@ -140,7 +148,7 @@ void BundleManager::Commit() { stable_bundle_groups_.clear(); for (const auto& bundle_group : bundle_groups_) { stable_bundle_groups_.push_back( - std::make_unique(*bundle_group)); + std::make_unique(*bundle_group)); } } @@ -155,31 +163,29 @@ void BundleManager::RefreshEstablishedBundleGroupsByMid() { void JsepTransportCollection::RegisterTransport( const std::string& mid, - std::unique_ptr transport) { + std::unique_ptr transport) { RTC_DCHECK_RUN_ON(&sequence_checker_); SetTransportForMid(mid, transport.get()); jsep_transports_by_name_[mid] = std::move(transport); RTC_DCHECK(IsConsistent()); } -std::vector JsepTransportCollection::Transports() { +std::vector JsepTransportCollection::Transports() { RTC_DCHECK_RUN_ON(&sequence_checker_); - std::vector result; + std::vector result; for (auto& kv : jsep_transports_by_name_) { result.push_back(kv.second.get()); } return result; } -std::vector -JsepTransportCollection::ActiveTransports() { +std::vector JsepTransportCollection::ActiveTransports() { RTC_DCHECK_RUN_ON(&sequence_checker_); - std::set transports; + std::set transports; for (const auto& kv : mid_to_transport_) { transports.insert(kv.second); } - return std::vector(transports.begin(), - transports.end()); + return std::vector(transports.begin(), transports.end()); } void JsepTransportCollection::DestroyAllTransports() { @@ -191,35 +197,35 @@ void JsepTransportCollection::DestroyAllTransports() { RTC_DCHECK(IsConsistent()); } -const cricket::JsepTransport* JsepTransportCollection::GetTransportByName( +const JsepTransport* JsepTransportCollection::GetTransportByName( const std::string& transport_name) const { RTC_DCHECK_RUN_ON(&sequence_checker_); auto it = jsep_transports_by_name_.find(transport_name); return (it == jsep_transports_by_name_.end()) ? nullptr : it->second.get(); } -cricket::JsepTransport* JsepTransportCollection::GetTransportByName( +JsepTransport* JsepTransportCollection::GetTransportByName( const std::string& transport_name) { RTC_DCHECK_RUN_ON(&sequence_checker_); auto it = jsep_transports_by_name_.find(transport_name); return (it == jsep_transports_by_name_.end()) ? nullptr : it->second.get(); } -cricket::JsepTransport* JsepTransportCollection::GetTransportForMid( +JsepTransport* JsepTransportCollection::GetTransportForMid( const std::string& mid) { RTC_DCHECK_RUN_ON(&sequence_checker_); auto it = mid_to_transport_.find(mid); return it == mid_to_transport_.end() ? nullptr : it->second; } -const cricket::JsepTransport* JsepTransportCollection::GetTransportForMid( +const JsepTransport* JsepTransportCollection::GetTransportForMid( const std::string& mid) const { RTC_DCHECK_RUN_ON(&sequence_checker_); auto it = mid_to_transport_.find(mid); return it == mid_to_transport_.end() ? nullptr : it->second; } -cricket::JsepTransport* JsepTransportCollection::GetTransportForMid( +JsepTransport* JsepTransportCollection::GetTransportForMid( absl::string_view mid) { RTC_DCHECK_RUN_ON(&sequence_checker_); // TODO(hta): should be a better way. @@ -227,7 +233,7 @@ cricket::JsepTransport* JsepTransportCollection::GetTransportForMid( return it == mid_to_transport_.end() ? nullptr : it->second; } -const cricket::JsepTransport* JsepTransportCollection::GetTransportForMid( +const JsepTransport* JsepTransportCollection::GetTransportForMid( absl::string_view mid) const { RTC_DCHECK_RUN_ON(&sequence_checker_); // TODO(hta): Should be a better way @@ -237,7 +243,7 @@ const cricket::JsepTransport* JsepTransportCollection::GetTransportForMid( bool JsepTransportCollection::SetTransportForMid( const std::string& mid, - cricket::JsepTransport* jsep_transport) { + JsepTransport* jsep_transport) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK(jsep_transport); @@ -306,11 +312,14 @@ void JsepTransportCollection::CommitTransports() { RTC_DCHECK_RUN_ON(&sequence_checker_); stable_mid_to_transport_ = mid_to_transport_; DestroyUnusedTransports(); + for (auto& transport : jsep_transports_by_name_) { + transport.second->CommitPayloadTypes(); + } RTC_DCHECK(IsConsistent()); } bool JsepTransportCollection::TransportInUse( - cricket::JsepTransport* jsep_transport) const { + JsepTransport* jsep_transport) const { RTC_DCHECK_RUN_ON(&sequence_checker_); for (const auto& kv : mid_to_transport_) { if (kv.second == jsep_transport) { @@ -321,7 +330,7 @@ bool JsepTransportCollection::TransportInUse( } bool JsepTransportCollection::TransportNeededForRollback( - cricket::JsepTransport* jsep_transport) const { + JsepTransport* jsep_transport) const { RTC_DCHECK_RUN_ON(&sequence_checker_); for (const auto& kv : stable_mid_to_transport_) { if (kv.second == jsep_transport) { @@ -332,7 +341,7 @@ bool JsepTransportCollection::TransportNeededForRollback( } void JsepTransportCollection::MaybeDestroyJsepTransport( - cricket::JsepTransport* transport) { + JsepTransport* transport) { RTC_DCHECK_RUN_ON(&sequence_checker_); // Don't destroy the JsepTransport if there are still media sections referring // to it, or if it will be needed in case of rollback. diff --git a/pc/jsep_transport_collection.h b/pc/jsep_transport_collection.h index f5eba64e96..755d09f1ea 100644 --- a/pc/jsep_transport_collection.h +++ b/pc/jsep_transport_collection.h @@ -15,15 +15,14 @@ #include #include #include -#include #include +#include "absl/strings/string_view.h" #include "api/jsep.h" #include "api/peer_connection_interface.h" #include "api/sequence_checker.h" #include "pc/jsep_transport.h" #include "pc/session_description.h" -#include "rtc_base/checks.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" @@ -45,25 +44,23 @@ class BundleManager { public: explicit BundleManager(PeerConnectionInterface::BundlePolicy bundle_policy) : bundle_policy_(bundle_policy) {} - const std::vector>& bundle_groups() - const { + const std::vector>& bundle_groups() const { RTC_DCHECK_RUN_ON(&sequence_checker_); return bundle_groups_; } // Lookup a bundle group by a member mid name. - const cricket::ContentGroup* LookupGroupByMid(const std::string& mid) const; - cricket::ContentGroup* LookupGroupByMid(const std::string& mid); + const ContentGroup* LookupGroupByMid(const std::string& mid) const; + ContentGroup* LookupGroupByMid(const std::string& mid); // Returns true if the MID is the first item of a group, or if // the MID is not a member of a group. bool IsFirstMidInGroup(const std::string& mid) const; // Update the groups description. This completely replaces the group // description with the one from the SessionDescription. - void Update(const cricket::SessionDescription* description, SdpType type); + void Update(const SessionDescription* description, SdpType type); // Delete a MID from the group that contains it. - void DeleteMid(const cricket::ContentGroup* bundle_group, - const std::string& mid); + void DeleteMid(const ContentGroup* bundle_group, const std::string& mid); // Delete a group. - void DeleteGroup(const cricket::ContentGroup* bundle_group); + void DeleteGroup(const ContentGroup* bundle_group); // Roll back to previous stable state. void Rollback(); // Commit current bundle groups. @@ -76,12 +73,11 @@ class BundleManager { RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_{ SequenceChecker::kDetached}; PeerConnectionInterface::BundlePolicy bundle_policy_; - std::vector> bundle_groups_ + std::vector> bundle_groups_ RTC_GUARDED_BY(sequence_checker_); - std::vector> stable_bundle_groups_ + std::vector> stable_bundle_groups_ RTC_GUARDED_BY(sequence_checker_); - std::map - established_bundle_groups_by_mid_; + std::map established_bundle_groups_by_mid_; }; // This class keeps the mapping of MIDs to transports. @@ -90,35 +86,33 @@ class BundleManager { // the managers may merge. class JsepTransportCollection { public: - JsepTransportCollection(std::function - map_change_callback, - std::function state_change_callback) + JsepTransportCollection( + std::function map_change_callback, + std::function state_change_callback) : map_change_callback_(map_change_callback), state_change_callback_(state_change_callback) {} void RegisterTransport(const std::string& mid, - std::unique_ptr transport); + std::unique_ptr transport); // Returns all transports, including those not currently mapped to any MID // because they're being kept alive in case of rollback. - std::vector Transports(); + std::vector Transports(); // Only returns transports currently mapped to a MID. - std::vector ActiveTransports(); + std::vector ActiveTransports(); void DestroyAllTransports(); // Lookup a JsepTransport by the MID that was used to register it. - cricket::JsepTransport* GetTransportByName(const std::string& mid); - const cricket::JsepTransport* GetTransportByName( - const std::string& mid) const; + JsepTransport* GetTransportByName(const std::string& mid); + const JsepTransport* GetTransportByName(const std::string& mid) const; // Lookup a JsepTransport by any MID that refers to it. - cricket::JsepTransport* GetTransportForMid(const std::string& mid); - const cricket::JsepTransport* GetTransportForMid( - const std::string& mid) const; - cricket::JsepTransport* GetTransportForMid(absl::string_view mid); - const cricket::JsepTransport* GetTransportForMid(absl::string_view mid) const; + JsepTransport* GetTransportForMid(const std::string& mid); + const JsepTransport* GetTransportForMid(const std::string& mid) const; + JsepTransport* GetTransportForMid(absl::string_view mid); + const JsepTransport* GetTransportForMid(absl::string_view mid) const; // Set transport for a MID. This may destroy a transport if it is no // longer in use. bool SetTransportForMid(const std::string& mid, - cricket::JsepTransport* jsep_transport); + JsepTransport* jsep_transport); // Remove a transport for a MID. This may destroy a transport if it is // no longer in use. void RemoveTransportForMid(const std::string& mid); @@ -131,15 +125,15 @@ class JsepTransportCollection { private: // Returns true if any mid currently maps to this transport. - bool TransportInUse(cricket::JsepTransport* jsep_transport) const; + bool TransportInUse(JsepTransport* jsep_transport) const; // Returns true if any mid in the last stable mapping maps to this transport, // meaning it should be kept alive in case of rollback. - bool TransportNeededForRollback(cricket::JsepTransport* jsep_transport) const; + bool TransportNeededForRollback(JsepTransport* jsep_transport) const; // Destroy a transport if it's no longer in use. This includes whether it // will be needed in case of rollback. - void MaybeDestroyJsepTransport(cricket::JsepTransport* transport); + void MaybeDestroyJsepTransport(JsepTransport* transport); // Destroys all transports that are no longer in use. void DestroyUnusedTransports(); @@ -149,20 +143,20 @@ class JsepTransportCollection { RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_{ SequenceChecker::kDetached}; // This member owns the JSEP transports. - std::map> - jsep_transports_by_name_ RTC_GUARDED_BY(sequence_checker_); + std::map> jsep_transports_by_name_ + RTC_GUARDED_BY(sequence_checker_); // This keeps track of the mapping between media section // (BaseChannel/SctpTransport) and the JsepTransport underneath. - std::map mid_to_transport_ + std::map mid_to_transport_ RTC_GUARDED_BY(sequence_checker_); // A snapshot of mid_to_transport_ at the last stable state. Used for // rollback. - std::map stable_mid_to_transport_ + std::map stable_mid_to_transport_ RTC_GUARDED_BY(sequence_checker_); // Callback used to inform subscribers of altered transports. const std::function + webrtc::JsepTransport* transport)> map_change_callback_; // Callback used to inform subscribers of possibly altered state. const std::function state_change_callback_; diff --git a/pc/jsep_transport_controller.cc b/pc/jsep_transport_controller.cc index 792365b521..7fb6917c91 100644 --- a/pc/jsep_transport_controller.cc +++ b/pc/jsep_transport_controller.cc @@ -12,24 +12,59 @@ #include +#include #include +#include #include +#include #include -#include #include +#include #include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" +#include "api/async_dns_resolver.h" +#include "api/candidate.h" #include "api/dtls_transport_interface.h" +#include "api/environment/environment.h" +#include "api/ice_transport_interface.h" +#include "api/jsep.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" #include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/transport/data_channel_transport_interface.h" #include "api/transport/enums.h" +#include "call/payload_type.h" +#include "call/payload_type_picker.h" +#include "media/base/codec.h" #include "media/sctp/sctp_transport_internal.h" -#include "p2p/base/dtls_transport.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" +#include "p2p/base/packet_transport_internal.h" #include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/transport_description.h" +#include "p2p/base/transport_info.h" +#include "p2p/dtls/dtls_transport.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "pc/dtls_srtp_transport.h" +#include "pc/dtls_transport.h" +#include "pc/jsep_transport.h" +#include "pc/rtp_transport.h" +#include "pc/rtp_transport_internal.h" +#include "pc/sctp_transport.h" +#include "pc/session_description.h" +#include "pc/srtp_transport.h" +#include "pc/transport_stats.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/thread.h" #include "rtc_base/trace_event.h" @@ -38,15 +73,18 @@ using webrtc::SdpType; namespace webrtc { JsepTransportController::JsepTransportController( - rtc::Thread* network_thread, - cricket::PortAllocator* port_allocator, + const Environment& env, + Thread* network_thread, + PortAllocator* port_allocator, AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, + PayloadTypePicker& payload_type_picker, Config config) - : network_thread_(network_thread), + : env_(env), + network_thread_(network_thread), port_allocator_(port_allocator), async_dns_resolver_factory_(async_dns_resolver_factory), transports_( - [this](const std::string& mid, cricket::JsepTransport* transport) { + [this](const std::string& mid, JsepTransport* transport) { return OnTransportChanged(mid, transport); }, [this]() { @@ -55,16 +93,13 @@ JsepTransportController::JsepTransportController( }), config_(std::move(config)), active_reset_srtp_params_(config.active_reset_srtp_params), - bundles_(config.bundle_policy) { + bundles_(config.bundle_policy), + payload_type_picker_(payload_type_picker) { // The `transport_observer` is assumed to be non-null. RTC_DCHECK(config_.transport_observer); RTC_DCHECK(config_.rtcp_handler); RTC_DCHECK(config_.ice_transport_factory); RTC_DCHECK(config_.on_dtls_handshake_error_); - RTC_DCHECK(config_.field_trials); - if (port_allocator_) { - port_allocator_->SetIceTiebreaker(ice_tiebreaker_); - } } JsepTransportController::~JsepTransportController() { @@ -76,36 +111,44 @@ JsepTransportController::~JsepTransportController() { RTCError JsepTransportController::SetLocalDescription( SdpType type, - const cricket::SessionDescription* description) { + const SessionDescription* local_desc, + const SessionDescription* remote_desc) { + RTC_DCHECK(local_desc); TRACE_EVENT0("webrtc", "JsepTransportController::SetLocalDescription"); + if (!network_thread_->IsCurrent()) { - return network_thread_->BlockingCall( - [=] { return SetLocalDescription(type, description); }); + return network_thread_->BlockingCall([this, type, local_desc, remote_desc] { + return SetLocalDescription(type, local_desc, remote_desc); + }); } RTC_DCHECK_RUN_ON(network_thread_); + if (!initial_offerer_.has_value()) { initial_offerer_.emplace(type == SdpType::kOffer); if (*initial_offerer_) { - SetIceRole_n(cricket::ICEROLE_CONTROLLING); + SetIceRole_n(ICEROLE_CONTROLLING); } else { - SetIceRole_n(cricket::ICEROLE_CONTROLLED); + SetIceRole_n(ICEROLE_CONTROLLED); } } - return ApplyDescription_n(/*local=*/true, type, description); + return ApplyDescription_n(/*local=*/true, type, local_desc, remote_desc); } RTCError JsepTransportController::SetRemoteDescription( SdpType type, - const cricket::SessionDescription* description) { + const SessionDescription* local_desc, + const SessionDescription* remote_desc) { + RTC_DCHECK(remote_desc); TRACE_EVENT0("webrtc", "JsepTransportController::SetRemoteDescription"); if (!network_thread_->IsCurrent()) { - return network_thread_->BlockingCall( - [=] { return SetRemoteDescription(type, description); }); + return network_thread_->BlockingCall([this, type, local_desc, remote_desc] { + return SetRemoteDescription(type, local_desc, remote_desc); + }); } RTC_DCHECK_RUN_ON(network_thread_); - return ApplyDescription_n(/*local=*/false, type, description); + return ApplyDescription_n(/*local=*/false, type, local_desc, remote_desc); } RtpTransportInternal* JsepTransportController::GetRtpTransport( @@ -128,7 +171,7 @@ DataChannelTransportInterface* JsepTransportController::GetDataChannelTransport( return jsep_transport->data_channel_transport(); } -cricket::DtlsTransportInternal* JsepTransportController::GetDtlsTransport( +DtlsTransportInternal* JsepTransportController::GetDtlsTransport( const std::string& mid) { RTC_DCHECK_RUN_ON(network_thread_); auto jsep_transport = GetJsepTransportForMid(mid); @@ -138,8 +181,8 @@ cricket::DtlsTransportInternal* JsepTransportController::GetDtlsTransport( return jsep_transport->rtp_dtls_transport(); } -const cricket::DtlsTransportInternal* -JsepTransportController::GetRtcpDtlsTransport(const std::string& mid) const { +const DtlsTransportInternal* JsepTransportController::GetRtcpDtlsTransport( + const std::string& mid) const { RTC_DCHECK_RUN_ON(network_thread_); auto jsep_transport = GetJsepTransportForMid(mid); if (!jsep_transport) { @@ -148,8 +191,8 @@ JsepTransportController::GetRtcpDtlsTransport(const std::string& mid) const { return jsep_transport->rtcp_dtls_transport(); } -rtc::scoped_refptr -JsepTransportController::LookupDtlsTransportByMid(const std::string& mid) { +scoped_refptr JsepTransportController::LookupDtlsTransportByMid( + const std::string& mid) { RTC_DCHECK_RUN_ON(network_thread_); auto jsep_transport = GetJsepTransportForMid(mid); if (!jsep_transport) { @@ -158,7 +201,7 @@ JsepTransportController::LookupDtlsTransportByMid(const std::string& mid) { return jsep_transport->RtpDtlsTransport(); } -rtc::scoped_refptr JsepTransportController::GetSctpTransport( +scoped_refptr JsepTransportController::GetSctpTransport( const std::string& mid) const { RTC_DCHECK_RUN_ON(network_thread_); auto jsep_transport = GetJsepTransportForMid(mid); @@ -168,7 +211,7 @@ rtc::scoped_refptr JsepTransportController::GetSctpTransport( return jsep_transport->SctpTransport(); } -void JsepTransportController::SetIceConfig(const cricket::IceConfig& config) { +void JsepTransportController::SetIceConfig(const IceConfig& config) { RTC_DCHECK_RUN_ON(network_thread_); ice_config_ = config; for (auto& dtls : GetDtlsTransports()) { @@ -187,15 +230,14 @@ bool JsepTransportController::NeedsIceRestart( const std::string& transport_name) const { RTC_DCHECK_RUN_ON(network_thread_); - const cricket::JsepTransport* transport = - GetJsepTransportByName(transport_name); + const JsepTransport* transport = GetJsepTransportByName(transport_name); if (!transport) { return false; } return transport->needs_ice_restart(); } -absl::optional JsepTransportController::GetDtlsRole( +std::optional JsepTransportController::GetDtlsRole( const std::string& mid) const { // TODO(tommi): Remove this hop. Currently it's called from the signaling // thread during negotiations, potentially multiple times. @@ -206,15 +248,82 @@ absl::optional JsepTransportController::GetDtlsRole( RTC_DCHECK_RUN_ON(network_thread_); - const cricket::JsepTransport* t = GetJsepTransportForMid(mid); + const JsepTransport* t = GetJsepTransportForMid(mid); if (!t) { - return absl::optional(); + return std::optional(); } return t->GetDtlsRole(); } +RTCErrorOr JsepTransportController::SuggestPayloadType( + const std::string& mid, + Codec codec) { + // Because SDP processing runs on the signal thread and Call processing + // runs on the worker thread, we allow cross thread invocation until we + // can clean up the thread work. + if (!network_thread_->IsCurrent()) { + return network_thread_->BlockingCall([&] { + RTC_DCHECK_RUN_ON(network_thread_); + return SuggestPayloadType(mid, codec); + }); + } + RTC_DCHECK_RUN_ON(network_thread_); + const JsepTransport* transport = GetJsepTransportForMid(mid); + if (transport) { + RTCErrorOr local_result = + transport->local_payload_types().LookupPayloadType(codec); + if (local_result.ok()) { + return local_result; + } + RTCErrorOr remote_result = + transport->remote_payload_types().LookupPayloadType(codec); + if (remote_result.ok()) { + RTCErrorOr local_codec = + transport->local_payload_types().LookupCodec(remote_result.value()); + if (local_result.ok()) { + // Already in use, possibly for something else. + // Fall through to SuggestMapping. + RTC_LOG(LS_WARNING) << "Ignoring remote suggestion of PT " + << static_cast(remote_result.value()) + << " for " << codec << "; already in use"; + } else { + // Tell the local payload type registry that we've taken this + RTC_DCHECK(local_result.error().type() == + RTCErrorType::INVALID_PARAMETER); + AddLocalMapping(mid, remote_result.value(), codec); + return remote_result; + } + } + return payload_type_picker_.SuggestMapping( + codec, &transport->local_payload_types()); + } + // If there is no transport, there are no exclusions. + return payload_type_picker_.SuggestMapping(codec, nullptr); +} + +RTCError JsepTransportController::AddLocalMapping(const std::string& mid, + PayloadType payload_type, + const Codec& codec) { + // Because SDP processing runs on the signal thread and Call processing + // runs on the worker thread, we allow cross thread invocation until we + // can clean up the thread work. + if (!network_thread_->IsCurrent()) { + return network_thread_->BlockingCall([&] { + RTC_DCHECK_RUN_ON(network_thread_); + return AddLocalMapping(mid, payload_type, codec); + }); + } + RTC_DCHECK_RUN_ON(network_thread_); + JsepTransport* transport = GetJsepTransportForMid(mid); + if (!transport) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "AddLocalMapping: no transport for mid"); + } + return transport->local_payload_types().AddMapping(payload_type, codec); +} + bool JsepTransportController::SetLocalCertificate( - const rtc::scoped_refptr& certificate) { + const scoped_refptr& certificate) { if (!network_thread_->IsCurrent()) { return network_thread_->BlockingCall( [&] { return SetLocalCertificate(certificate); }); @@ -241,20 +350,18 @@ bool JsepTransportController::SetLocalCertificate( return true; } -rtc::scoped_refptr -JsepTransportController::GetLocalCertificate( +scoped_refptr JsepTransportController::GetLocalCertificate( const std::string& transport_name) const { RTC_DCHECK_RUN_ON(network_thread_); - const cricket::JsepTransport* t = GetJsepTransportByName(transport_name); + const JsepTransport* t = GetJsepTransportByName(transport_name); if (!t) { return nullptr; } return t->GetLocalCertificate(); } -std::unique_ptr -JsepTransportController::GetRemoteSSLCertChain( +std::unique_ptr JsepTransportController::GetRemoteSSLCertChain( const std::string& transport_name) const { RTC_DCHECK_RUN_ON(network_thread_); @@ -286,7 +393,7 @@ void JsepTransportController::MaybeStartGathering() { RTCError JsepTransportController::AddRemoteCandidates( const std::string& transport_name, - const cricket::Candidates& candidates) { + const Candidates& candidates) { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(VerifyCandidates(candidates).ok()); auto jsep_transport = GetJsepTransportByName(transport_name); @@ -299,7 +406,7 @@ RTCError JsepTransportController::AddRemoteCandidates( } RTCError JsepTransportController::RemoveRemoteCandidates( - const cricket::Candidates& candidates) { + const Candidates& candidates) { if (!network_thread_->IsCurrent()) { return network_thread_->BlockingCall( [&] { return RemoveRemoteCandidates(candidates); }); @@ -313,8 +420,8 @@ RTCError JsepTransportController::RemoveRemoteCandidates( return error; } - std::map candidates_by_transport_name; - for (const cricket::Candidate& cand : candidates) { + std::map candidates_by_transport_name; + for (const Candidate& cand : candidates) { if (!cand.transport_name().empty()) { candidates_by_transport_name[cand.transport_name()].push_back(cand); } else { @@ -326,17 +433,16 @@ RTCError JsepTransportController::RemoveRemoteCandidates( for (const auto& kv : candidates_by_transport_name) { const std::string& transport_name = kv.first; - const cricket::Candidates& candidates = kv.second; - cricket::JsepTransport* jsep_transport = - GetJsepTransportByName(transport_name); + const Candidates& transport_candidates = kv.second; + JsepTransport* jsep_transport = GetJsepTransportByName(transport_name); if (!jsep_transport) { RTC_LOG(LS_WARNING) << "Not removing candidate because the JsepTransport doesn't exist."; continue; } - for (const cricket::Candidate& candidate : candidates) { - cricket::DtlsTransportInternal* dtls = - candidate.component() == cricket::ICE_CANDIDATE_COMPONENT_RTP + for (const Candidate& candidate : transport_candidates) { + DtlsTransportInternal* dtls = + candidate.component() == ICE_CANDIDATE_COMPONENT_RTP ? jsep_transport->rtp_dtls_transport() : jsep_transport->rtcp_dtls_transport(); if (dtls) { @@ -348,10 +454,10 @@ RTCError JsepTransportController::RemoveRemoteCandidates( } bool JsepTransportController::GetStats(const std::string& transport_name, - cricket::TransportStats* stats) { + TransportStats* stats) const { RTC_DCHECK_RUN_ON(network_thread_); - cricket::JsepTransport* transport = GetJsepTransportByName(transport_name); + const JsepTransport* transport = GetJsepTransportByName(transport_name); if (!transport) { return false; } @@ -360,11 +466,6 @@ bool JsepTransportController::GetStats(const std::string& transport_name, void JsepTransportController::SetActiveResetSrtpParams( bool active_reset_srtp_params) { - if (!network_thread_->IsCurrent()) { - network_thread_->BlockingCall( - [=] { SetActiveResetSrtpParams(active_reset_srtp_params); }); - return; - } RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_INFO) << "Updating the active_reset_srtp_params for JsepTransportController: " @@ -377,7 +478,8 @@ void JsepTransportController::SetActiveResetSrtpParams( RTCError JsepTransportController::RollbackTransports() { if (!network_thread_->IsCurrent()) { - return network_thread_->BlockingCall([=] { return RollbackTransports(); }); + return network_thread_->BlockingCall( + [this] { return RollbackTransports(); }); } RTC_DCHECK_RUN_ON(network_thread_); bundles_.Rollback(); @@ -388,41 +490,39 @@ RTCError JsepTransportController::RollbackTransports() { return RTCError::OK(); } -rtc::scoped_refptr +scoped_refptr JsepTransportController::CreateIceTransport(const std::string& transport_name, bool rtcp) { - int component = rtcp ? cricket::ICE_CANDIDATE_COMPONENT_RTCP - : cricket::ICE_CANDIDATE_COMPONENT_RTP; + int component = + rtcp ? ICE_CANDIDATE_COMPONENT_RTCP : ICE_CANDIDATE_COMPONENT_RTP; IceTransportInit init; init.set_port_allocator(port_allocator_); init.set_async_dns_resolver_factory(async_dns_resolver_factory_); init.set_event_log(config_.event_log); - init.set_field_trials(config_.field_trials); + init.set_field_trials(&env_.field_trials()); auto transport = config_.ice_transport_factory->CreateIceTransport( transport_name, component, std::move(init)); RTC_DCHECK(transport); transport->internal()->SetIceRole(ice_role_); - transport->internal()->SetIceTiebreaker(ice_tiebreaker_); transport->internal()->SetIceConfig(ice_config_); return transport; } -std::unique_ptr -JsepTransportController::CreateDtlsTransport( - const cricket::ContentInfo& content_info, - cricket::IceTransportInternal* ice) { +std::unique_ptr +JsepTransportController::CreateDtlsTransport(const ContentInfo& content_info, + IceTransportInternal* ice) { RTC_DCHECK_RUN_ON(network_thread_); - std::unique_ptr dtls; + std::unique_ptr dtls; if (config_.dtls_transport_factory) { dtls = config_.dtls_transport_factory->CreateDtlsTransport( ice, config_.crypto_options, config_.ssl_max_version); } else { - dtls = std::make_unique(ice, config_.crypto_options, - config_.event_log, - config_.ssl_max_version); + dtls = std::make_unique( + ice, config_.crypto_options, config_.event_log, + config_.ssl_max_version); } RTC_DCHECK(dtls); @@ -438,36 +538,49 @@ JsepTransportController::CreateDtlsTransport( this, &JsepTransportController::OnTransportWritableState_n); dtls->SignalReceivingState.connect( this, &JsepTransportController::OnTransportReceivingState_n); - dtls->ice_transport()->SignalGatheringState.connect( - this, &JsepTransportController::OnTransportGatheringState_n); + dtls->ice_transport()->AddGatheringStateCallback( + this, [this](IceTransportInternal* transport) { + RTC_DCHECK_RUN_ON(network_thread_); + OnTransportGatheringState_n(transport); + }); dtls->ice_transport()->SignalCandidateGathered.connect( this, &JsepTransportController::OnTransportCandidateGathered_n); - dtls->ice_transport()->SignalCandidateError.connect( - this, &JsepTransportController::OnTransportCandidateError_n); - dtls->ice_transport()->SignalCandidatesRemoved.connect( - this, &JsepTransportController::OnTransportCandidatesRemoved_n); + dtls->ice_transport()->SetCandidateErrorCallback( + [this](IceTransportInternal* transport, + const IceCandidateErrorEvent& error) { + RTC_DCHECK_RUN_ON(network_thread_); + OnTransportCandidateError_n(transport, error); + }); + dtls->ice_transport()->SetCandidatesRemovedCallback( + [this](IceTransportInternal* transport, const Candidates& candidates) { + RTC_DCHECK_RUN_ON(network_thread_); + OnTransportCandidatesRemoved_n(transport, candidates); + }); dtls->ice_transport()->SignalRoleConflict.connect( this, &JsepTransportController::OnTransportRoleConflict_n); dtls->ice_transport()->SignalStateChanged.connect( this, &JsepTransportController::OnTransportStateChanged_n); dtls->ice_transport()->SignalIceTransportStateChanged.connect( this, &JsepTransportController::OnTransportStateChanged_n); - dtls->ice_transport()->SignalCandidatePairChanged.connect( - this, &JsepTransportController::OnTransportCandidatePairChanged_n); + dtls->ice_transport()->SetCandidatePairChangeCallback( + [this](const CandidatePairChangeEvent& event) { + RTC_DCHECK_RUN_ON(network_thread_); + OnTransportCandidatePairChanged_n(event); + }); dtls->SubscribeDtlsHandshakeError( - [this](rtc::SSLHandshakeError error) { OnDtlsHandshakeError(error); }); + [this](SSLHandshakeError error) { OnDtlsHandshakeError(error); }); return dtls; } -std::unique_ptr +std::unique_ptr JsepTransportController::CreateUnencryptedRtpTransport( const std::string& transport_name, - rtc::PacketTransportInternal* rtp_packet_transport, - rtc::PacketTransportInternal* rtcp_packet_transport) { + PacketTransportInternal* rtp_packet_transport, + PacketTransportInternal* rtcp_packet_transport) { RTC_DCHECK_RUN_ON(network_thread_); - auto unencrypted_rtp_transport = - std::make_unique(rtcp_packet_transport == nullptr); + auto unencrypted_rtp_transport = std::make_unique( + rtcp_packet_transport == nullptr, env_.field_trials()); unencrypted_rtp_transport->SetRtpPacketTransport(rtp_packet_transport); if (rtcp_packet_transport) { unencrypted_rtp_transport->SetRtcpPacketTransport(rtcp_packet_transport); @@ -475,14 +588,13 @@ JsepTransportController::CreateUnencryptedRtpTransport( return unencrypted_rtp_transport; } -std::unique_ptr -JsepTransportController::CreateSdesTransport( +std::unique_ptr JsepTransportController::CreateSdesTransport( const std::string& transport_name, - cricket::DtlsTransportInternal* rtp_dtls_transport, - cricket::DtlsTransportInternal* rtcp_dtls_transport) { + DtlsTransportInternal* rtp_dtls_transport, + DtlsTransportInternal* rtcp_dtls_transport) { RTC_DCHECK_RUN_ON(network_thread_); - auto srtp_transport = std::make_unique( - rtcp_dtls_transport == nullptr, *config_.field_trials); + auto srtp_transport = std::make_unique( + rtcp_dtls_transport == nullptr, env_.field_trials()); RTC_DCHECK(rtp_dtls_transport); srtp_transport->SetRtpPacketTransport(rtp_dtls_transport); if (rtcp_dtls_transport) { @@ -494,14 +606,14 @@ JsepTransportController::CreateSdesTransport( return srtp_transport; } -std::unique_ptr +std::unique_ptr JsepTransportController::CreateDtlsSrtpTransport( const std::string& transport_name, - cricket::DtlsTransportInternal* rtp_dtls_transport, - cricket::DtlsTransportInternal* rtcp_dtls_transport) { + DtlsTransportInternal* rtp_dtls_transport, + DtlsTransportInternal* rtcp_dtls_transport) { RTC_DCHECK_RUN_ON(network_thread_); - auto dtls_srtp_transport = std::make_unique( - rtcp_dtls_transport == nullptr, *config_.field_trials); + auto dtls_srtp_transport = std::make_unique( + rtcp_dtls_transport == nullptr, env_.field_trials()); if (config_.enable_external_auth) { dtls_srtp_transport->EnableExternalAuth(); } @@ -518,10 +630,10 @@ JsepTransportController::CreateDtlsSrtpTransport( return dtls_srtp_transport; } -std::vector +std::vector JsepTransportController::GetDtlsTransports() { RTC_DCHECK_RUN_ON(network_thread_); - std::vector dtls_transports; + std::vector dtls_transports; for (auto jsep_transport : transports_.Transports()) { RTC_DCHECK(jsep_transport); if (jsep_transport->rtp_dtls_transport()) { @@ -535,10 +647,10 @@ JsepTransportController::GetDtlsTransports() { return dtls_transports; } -std::vector +std::vector JsepTransportController::GetActiveDtlsTransports() { RTC_DCHECK_RUN_ON(network_thread_); - std::vector dtls_transports; + std::vector dtls_transports; for (auto jsep_transport : transports_.ActiveTransports()) { RTC_DCHECK(jsep_transport); if (jsep_transport->rtp_dtls_transport()) { @@ -555,33 +667,34 @@ JsepTransportController::GetActiveDtlsTransports() { RTCError JsepTransportController::ApplyDescription_n( bool local, SdpType type, - const cricket::SessionDescription* description) { + const SessionDescription* local_desc, + const SessionDescription* remote_desc) { TRACE_EVENT0("webrtc", "JsepTransportController::ApplyDescription_n"); - RTC_DCHECK(description); - if (local) { - local_desc_ = description; - } else { - remote_desc_ = description; - } + // Stash away the description object that we'll be applying (since this + // function is used for both local and remote). + const SessionDescription* description = local ? local_desc : remote_desc; + + RTC_DCHECK(description); RTCError error; - error = ValidateAndMaybeUpdateBundleGroups(local, type, description); + error = + ValidateAndMaybeUpdateBundleGroups(local, type, local_desc, remote_desc); if (!error.ok()) { return error; } - std::map> + std::map> merged_encrypted_extension_ids_by_bundle; if (!bundles_.bundle_groups().empty()) { merged_encrypted_extension_ids_by_bundle = MergeEncryptedHeaderExtensionIdsForBundles(description); } - for (const cricket::ContentInfo& content_info : description->contents()) { + for (const ContentInfo& content_info : description->contents()) { // Don't create transports for rejected m-lines and bundled m-lines. if (content_info.rejected || - !bundles_.IsFirstMidInGroup(content_info.name)) { + !bundles_.IsFirstMidInGroup(content_info.mid())) { continue; } error = MaybeCreateJsepTransport(local, content_info, *description); @@ -593,9 +706,8 @@ RTCError JsepTransportController::ApplyDescription_n( RTC_DCHECK(description->contents().size() == description->transport_infos().size()); for (size_t i = 0; i < description->contents().size(); ++i) { - const cricket::ContentInfo& content_info = description->contents()[i]; - const cricket::TransportInfo& transport_info = - description->transport_infos()[i]; + const ContentInfo& content_info = description->contents()[i]; + const TransportInfo& transport_info = description->transport_infos()[i]; if (content_info.rejected) { // This may cause groups to be removed from |bundles_.bundle_groups()|. @@ -603,18 +715,18 @@ RTCError JsepTransportController::ApplyDescription_n( continue; } - const cricket::ContentGroup* established_bundle_group = - bundles_.LookupGroupByMid(content_info.name); + const ContentGroup* established_bundle_group = + bundles_.LookupGroupByMid(content_info.mid()); // For bundle members that are not BUNDLE-tagged (not first in the group), // configure their transport to be the same as the BUNDLE-tagged transport. if (established_bundle_group && - content_info.name != *established_bundle_group->FirstContentName()) { + content_info.mid() != *established_bundle_group->FirstContentName()) { if (!HandleBundledContent(content_info, *established_bundle_group)) { return RTCError(RTCErrorType::INVALID_PARAMETER, "Failed to process the bundled m= section with " "mid='" + - content_info.name + "'."); + content_info.mid() + "'."); } continue; } @@ -627,7 +739,7 @@ RTCError JsepTransportController::ApplyDescription_n( std::vector extension_ids; // Is BUNDLE-tagged (first in the group)? if (established_bundle_group && - content_info.name == *established_bundle_group->FirstContentName()) { + content_info.mid() == *established_bundle_group->FirstContentName()) { auto it = merged_encrypted_extension_ids_by_bundle.find( established_bundle_group); RTC_DCHECK(it != merged_encrypted_extension_ids_by_bundle.end()); @@ -639,15 +751,18 @@ RTCError JsepTransportController::ApplyDescription_n( int rtp_abs_sendtime_extn_id = GetRtpAbsSendTimeHeaderExtensionId(content_info); - cricket::JsepTransport* transport = - GetJsepTransportForMid(content_info.name); - RTC_DCHECK(transport); + JsepTransport* transport = GetJsepTransportForMid(content_info.mid()); + if (!transport) { + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_PARAMETER, + "Could not find transport for m= section with mid='" + + content_info.mid() + "'"); + } SetIceRole_n(DetermineIceRole(transport, transport_info, type, local)); - cricket::JsepTransportDescription jsep_description = - CreateJsepTransportDescription(content_info, transport_info, - extension_ids, rtp_abs_sendtime_extn_id); + JsepTransportDescription jsep_description = CreateJsepTransportDescription( + content_info, transport_info, extension_ids, rtp_abs_sendtime_extn_id); if (local) { error = transport->SetLocalJsepTransportDescription(jsep_description, type); @@ -660,7 +775,13 @@ RTCError JsepTransportController::ApplyDescription_n( LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_PARAMETER, "Failed to apply the description for m= section with mid='" + - content_info.name + "': " + error.message()); + content_info.mid() + "': " + error.message()); + } + error = transport->RecordPayloadTypes(local, type, content_info); + if (!error.ok()) { + RTC_LOG(LS_ERROR) << "RecordPayloadTypes failed: " + << ToString(error.type()) << " - " << error.message(); + return error; } } if (type == SdpType::kAnswer) { @@ -673,14 +794,17 @@ RTCError JsepTransportController::ApplyDescription_n( RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroups( bool local, SdpType type, - const cricket::SessionDescription* description) { + const SessionDescription* local_desc, + const SessionDescription* remote_desc) { + const SessionDescription* description = local ? local_desc : remote_desc; + RTC_DCHECK(description); - std::vector new_bundle_groups = - description->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); + std::vector new_bundle_groups = + description->GetGroupsByName(GROUP_TYPE_BUNDLE); // Verify `new_bundle_groups`. - std::map new_bundle_groups_by_mid; - for (const cricket::ContentGroup* new_bundle_group : new_bundle_groups) { + std::map new_bundle_groups_by_mid; + for (const ContentGroup* new_bundle_group : new_bundle_groups) { for (const std::string& content_name : new_bundle_group->content_names()) { // The BUNDLE group must not contain a MID that is a member of a different // BUNDLE group, or that contains the same MID multiple times. @@ -708,14 +832,13 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroups( // vice versa. Switching things around like this requires a separate offer // that removes the relevant sections from their group, as per RFC 8843, // section 7.5.2. - std::map + std::map new_bundle_groups_by_existing_bundle_groups; - std::map + std::map existing_bundle_groups_by_new_bundle_groups; - for (const cricket::ContentGroup* new_bundle_group : new_bundle_groups) { + for (const ContentGroup* new_bundle_group : new_bundle_groups) { for (const std::string& mid : new_bundle_group->content_names()) { - cricket::ContentGroup* existing_bundle_group = - bundles_.LookupGroupByMid(mid); + ContentGroup* existing_bundle_group = bundles_.LookupGroupByMid(mid); if (!existing_bundle_group) { continue; } @@ -739,72 +862,72 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroups( } } } else if (type == SdpType::kAnswer) { - std::vector offered_bundle_groups = - local ? remote_desc_->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE) - : local_desc_->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); - - std::map - offered_bundle_groups_by_mid; - for (const cricket::ContentGroup* offered_bundle_group : - offered_bundle_groups) { - for (const std::string& content_name : - offered_bundle_group->content_names()) { - offered_bundle_groups_by_mid[content_name] = offered_bundle_group; + if ((local && remote_desc) || (!local && local_desc)) { + std::vector offered_bundle_groups = + local ? remote_desc->GetGroupsByName(GROUP_TYPE_BUNDLE) + : local_desc->GetGroupsByName(GROUP_TYPE_BUNDLE); + + std::map offered_bundle_groups_by_mid; + for (const ContentGroup* offered_bundle_group : offered_bundle_groups) { + for (const std::string& content_name : + offered_bundle_group->content_names()) { + offered_bundle_groups_by_mid[content_name] = offered_bundle_group; + } } - } - std::map - new_bundle_groups_by_offered_bundle_groups; - for (const cricket::ContentGroup* new_bundle_group : new_bundle_groups) { - if (!new_bundle_group->FirstContentName()) { - // Empty groups could be a subset of any group. - continue; - } - // The group in the answer (new_bundle_group) must have a corresponding - // group in the offer (original_group), because the answer groups may only - // be subsets of the offer groups. - auto it = offered_bundle_groups_by_mid.find( - *new_bundle_group->FirstContentName()); - if (it == offered_bundle_groups_by_mid.end()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "A BUNDLE group was added in the answer that did not " - "exist in the offer."); - } - const cricket::ContentGroup* offered_bundle_group = it->second; - if (new_bundle_groups_by_offered_bundle_groups.find( - offered_bundle_group) != - new_bundle_groups_by_offered_bundle_groups.end()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "A MID in the answer has changed group."); - } - new_bundle_groups_by_offered_bundle_groups.insert( - std::make_pair(offered_bundle_group, new_bundle_group)); - for (const std::string& content_name : - new_bundle_group->content_names()) { - it = offered_bundle_groups_by_mid.find(content_name); - // The BUNDLE group in answer should be a subset of offered group. - if (it == offered_bundle_groups_by_mid.end() || - it->second != offered_bundle_group) { + std::map + new_bundle_groups_by_offered_bundle_groups; + for (const ContentGroup* new_bundle_group : new_bundle_groups) { + if (!new_bundle_group->FirstContentName()) { + // Empty groups could be a subset of any group. + continue; + } + // The group in the answer (new_bundle_group) must have a corresponding + // group in the offer (original_group), because the answer groups may + // only be subsets of the offer groups. + auto it = offered_bundle_groups_by_mid.find( + *new_bundle_group->FirstContentName()); + if (it == offered_bundle_groups_by_mid.end()) { return RTCError(RTCErrorType::INVALID_PARAMETER, - "A BUNDLE group in answer contains a MID='" + - content_name + - "' that was not in the offered group."); + "A BUNDLE group was added in the answer that did not " + "exist in the offer."); } - } - } - - for (const auto& bundle_group : bundles_.bundle_groups()) { - for (const std::string& content_name : bundle_group->content_names()) { - // An answer that removes m= sections from pre-negotiated BUNDLE group - // without rejecting it, is invalid. - auto it = new_bundle_groups_by_mid.find(content_name); - if (it == new_bundle_groups_by_mid.end()) { - auto* content_info = description->GetContentByName(content_name); - if (!content_info || !content_info->rejected) { + const ContentGroup* offered_bundle_group = it->second; + if (new_bundle_groups_by_offered_bundle_groups.find( + offered_bundle_group) != + new_bundle_groups_by_offered_bundle_groups.end()) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "A MID in the answer has changed group."); + } + new_bundle_groups_by_offered_bundle_groups.insert( + std::make_pair(offered_bundle_group, new_bundle_group)); + for (const std::string& content_name : + new_bundle_group->content_names()) { + it = offered_bundle_groups_by_mid.find(content_name); + // The BUNDLE group in answer should be a subset of offered group. + if (it == offered_bundle_groups_by_mid.end() || + it->second != offered_bundle_group) { return RTCError(RTCErrorType::INVALID_PARAMETER, - "Answer cannot remove m= section with mid='" + + "A BUNDLE group in answer contains a MID='" + content_name + - "' from already-established BUNDLE group."); + "' that was not in the offered group."); + } + } + } + + for (const auto& bundle_group : bundles_.bundle_groups()) { + for (const std::string& content_name : bundle_group->content_names()) { + // An answer that removes m= sections from pre-negotiated BUNDLE group + // without rejecting it, is invalid. + auto it = new_bundle_groups_by_mid.find(content_name); + if (it == new_bundle_groups_by_mid.end()) { + auto* content_info = description->GetContentByName(content_name); + if (!content_info || !content_info->rejected) { + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Answer cannot remove m= section with mid='" + + content_name + + "' from already-established BUNDLE group."); + } } } } @@ -813,7 +936,7 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroups( if (config_.bundle_policy == PeerConnectionInterface::kBundlePolicyMaxBundle && - !description->HasGroup(cricket::GROUP_TYPE_BUNDLE) && + !description->HasGroup(GROUP_TYPE_BUNDLE) && description->contents().size() > 1) { return RTCError(RTCErrorType::INVALID_PARAMETER, "max-bundle is used but no bundle group found."); @@ -851,14 +974,14 @@ RTCError JsepTransportController::ValidateAndMaybeUpdateBundleGroups( } RTCError JsepTransportController::ValidateContent( - const cricket::ContentInfo& content_info) { + const ContentInfo& content_info) { if (config_.rtcp_mux_policy == PeerConnectionInterface::kRtcpMuxPolicyRequire && - content_info.type == cricket::MediaProtocolType::kRtp && + content_info.type == MediaProtocolType::kRtp && !content_info.bundle_only && !content_info.media_description()->rtcp_mux()) { return RTCError(RTCErrorType::INVALID_PARAMETER, - "The m= section with mid='" + content_info.name + + "The m= section with mid='" + content_info.mid() + "' is invalid. RTCP-MUX is not " "enabled when it is required."); } @@ -866,14 +989,13 @@ RTCError JsepTransportController::ValidateContent( } void JsepTransportController::HandleRejectedContent( - const cricket::ContentInfo& content_info) { + const ContentInfo& content_info) { // If the content is rejected, let the // BaseChannel/SctpTransport change the RtpTransport/DtlsTransport first, - // then destroy the cricket::JsepTransport. - cricket::ContentGroup* bundle_group = - bundles_.LookupGroupByMid(content_info.name); + // then destroy the webrtc::JsepTransport. + ContentGroup* bundle_group = bundles_.LookupGroupByMid(content_info.mid()); if (bundle_group && !bundle_group->content_names().empty() && - content_info.name == *bundle_group->FirstContentName()) { + content_info.mid() == *bundle_group->FirstContentName()) { // Rejecting a BUNDLE group's first mid means we are rejecting the entire // group. for (const auto& content_name : bundle_group->content_names()) { @@ -882,17 +1004,17 @@ void JsepTransportController::HandleRejectedContent( // Delete the BUNDLE group. bundles_.DeleteGroup(bundle_group); } else { - transports_.RemoveTransportForMid(content_info.name); + transports_.RemoveTransportForMid(content_info.mid()); if (bundle_group) { // Remove the rejected content from the `bundle_group`. - bundles_.DeleteMid(bundle_group, content_info.name); + bundles_.DeleteMid(bundle_group, content_info.mid()); } } } bool JsepTransportController::HandleBundledContent( - const cricket::ContentInfo& content_info, - const cricket::ContentGroup& bundle_group) { + const ContentInfo& content_info, + const ContentGroup& bundle_group) { TRACE_EVENT0("webrtc", "JsepTransportController::HandleBundledContent"); RTC_DCHECK(bundle_group.FirstContentName()); auto jsep_transport = @@ -900,37 +1022,37 @@ bool JsepTransportController::HandleBundledContent( RTC_DCHECK(jsep_transport); // If the content is bundled, let the // BaseChannel/SctpTransport change the RtpTransport/DtlsTransport first, - // then destroy the cricket::JsepTransport. + // then destroy the webrtc::JsepTransport. // TODO(bugs.webrtc.org/9719) For media transport this is far from ideal, // because it means that we first create media transport and start // connecting it, and then we destroy it. We will need to address it before // video path is enabled. - return transports_.SetTransportForMid(content_info.name, jsep_transport); + return transports_.SetTransportForMid(content_info.mid(), jsep_transport); } -cricket::JsepTransportDescription +JsepTransportDescription JsepTransportController::CreateJsepTransportDescription( - const cricket::ContentInfo& content_info, - const cricket::TransportInfo& transport_info, + const ContentInfo& content_info, + const TransportInfo& transport_info, const std::vector& encrypted_extension_ids, int rtp_abs_sendtime_extn_id) { TRACE_EVENT0("webrtc", "JsepTransportController::CreateJsepTransportDescription"); - const cricket::MediaContentDescription* content_desc = + const MediaContentDescription* content_desc = content_info.media_description(); RTC_DCHECK(content_desc); - bool rtcp_mux_enabled = content_info.type == cricket::MediaProtocolType::kSctp + bool rtcp_mux_enabled = content_info.type == MediaProtocolType::kSctp ? true : content_desc->rtcp_mux(); - return cricket::JsepTransportDescription( - rtcp_mux_enabled, content_desc->cryptos(), encrypted_extension_ids, - rtp_abs_sendtime_extn_id, transport_info.description); + return JsepTransportDescription(rtcp_mux_enabled, encrypted_extension_ids, + rtp_abs_sendtime_extn_id, + transport_info.description); } std::vector JsepTransportController::GetEncryptedHeaderExtensionIds( - const cricket::ContentInfo& content_info) { - const cricket::MediaContentDescription* content_desc = + const ContentInfo& content_info) { + const MediaContentDescription* content_desc = content_info.media_description(); if (!config_.crypto_options.srtp.enable_encrypted_rtp_header_extensions) { @@ -949,16 +1071,16 @@ std::vector JsepTransportController::GetEncryptedHeaderExtensionIds( return encrypted_header_extension_ids; } -std::map> +std::map> JsepTransportController::MergeEncryptedHeaderExtensionIdsForBundles( - const cricket::SessionDescription* description) { + const SessionDescription* description) { RTC_DCHECK(description); RTC_DCHECK(!bundles_.bundle_groups().empty()); - std::map> + std::map> merged_encrypted_extension_ids_by_bundle; // Union the encrypted header IDs in the group when bundle is enabled. - for (const cricket::ContentInfo& content_info : description->contents()) { - auto group = bundles_.LookupGroupByMid(content_info.name); + for (const ContentInfo& content_info : description->contents()) { + auto group = bundles_.LookupGroupByMid(content_info.mid()); if (!group) continue; // Get or create list of IDs for the BUNDLE group. @@ -977,84 +1099,77 @@ JsepTransportController::MergeEncryptedHeaderExtensionIdsForBundles( } int JsepTransportController::GetRtpAbsSendTimeHeaderExtensionId( - const cricket::ContentInfo& content_info) { + const ContentInfo& content_info) { if (!config_.enable_external_auth) { return -1; } - const cricket::MediaContentDescription* content_desc = + const MediaContentDescription* content_desc = content_info.media_description(); - const webrtc::RtpExtension* send_time_extension = - webrtc::RtpExtension::FindHeaderExtensionByUri( - content_desc->rtp_header_extensions(), - webrtc::RtpExtension::kAbsSendTimeUri, + const RtpExtension* send_time_extension = + RtpExtension::FindHeaderExtensionByUri( + content_desc->rtp_header_extensions(), RtpExtension::kAbsSendTimeUri, config_.crypto_options.srtp.enable_encrypted_rtp_header_extensions - ? webrtc::RtpExtension::kPreferEncryptedExtension - : webrtc::RtpExtension::kDiscardEncryptedExtension); + ? RtpExtension::kPreferEncryptedExtension + : RtpExtension::kDiscardEncryptedExtension); return send_time_extension ? send_time_extension->id : -1; } -const cricket::JsepTransport* JsepTransportController::GetJsepTransportForMid( +const JsepTransport* JsepTransportController::GetJsepTransportForMid( const std::string& mid) const { return transports_.GetTransportForMid(mid); } -cricket::JsepTransport* JsepTransportController::GetJsepTransportForMid( +JsepTransport* JsepTransportController::GetJsepTransportForMid( const std::string& mid) { return transports_.GetTransportForMid(mid); } -const cricket::JsepTransport* JsepTransportController::GetJsepTransportForMid( +const JsepTransport* JsepTransportController::GetJsepTransportForMid( absl::string_view mid) const { return transports_.GetTransportForMid(mid); } -cricket::JsepTransport* JsepTransportController::GetJsepTransportForMid( +JsepTransport* JsepTransportController::GetJsepTransportForMid( absl::string_view mid) { return transports_.GetTransportForMid(mid); } -const cricket::JsepTransport* JsepTransportController::GetJsepTransportByName( +const JsepTransport* JsepTransportController::GetJsepTransportByName( const std::string& transport_name) const { return transports_.GetTransportByName(transport_name); } -cricket::JsepTransport* JsepTransportController::GetJsepTransportByName( +JsepTransport* JsepTransportController::GetJsepTransportByName( const std::string& transport_name) { return transports_.GetTransportByName(transport_name); } RTCError JsepTransportController::MaybeCreateJsepTransport( bool local, - const cricket::ContentInfo& content_info, - const cricket::SessionDescription& description) { - cricket::JsepTransport* transport = GetJsepTransportByName(content_info.name); + const ContentInfo& content_info, + const SessionDescription& description) { + JsepTransport* transport = GetJsepTransportByName(content_info.mid()); if (transport) { return RTCError::OK(); } - const cricket::MediaContentDescription* content_desc = - content_info.media_description(); - if (certificate_ && !content_desc->cryptos().empty()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "SDES and DTLS-SRTP cannot be enabled at the same time."); - } - rtc::scoped_refptr ice = - CreateIceTransport(content_info.name, /*rtcp=*/false); + scoped_refptr ice = + CreateIceTransport(content_info.mid(), /*rtcp=*/false); - std::unique_ptr rtp_dtls_transport = + std::unique_ptr rtp_dtls_transport = CreateDtlsTransport(content_info, ice->internal()); - std::unique_ptr rtcp_dtls_transport; + std::unique_ptr rtcp_dtls_transport; std::unique_ptr unencrypted_rtp_transport; std::unique_ptr sdes_transport; std::unique_ptr dtls_srtp_transport; - rtc::scoped_refptr rtcp_ice; + scoped_refptr rtcp_ice; if (config_.rtcp_mux_policy != PeerConnectionInterface::kRtcpMuxPolicyRequire && - content_info.type == cricket::MediaProtocolType::kRtp) { - rtcp_ice = CreateIceTransport(content_info.name, /*rtcp=*/true); + content_info.type == MediaProtocolType::kRtp) { + rtcp_ice = CreateIceTransport(content_info.mid(), /*rtcp=*/true); rtcp_dtls_transport = CreateDtlsTransport(content_info, rtcp_ice->internal()); } @@ -1063,45 +1178,45 @@ RTCError JsepTransportController::MaybeCreateJsepTransport( RTC_LOG(LS_INFO) << "Creating UnencryptedRtpTransport, becayse encryption is disabled."; unencrypted_rtp_transport = CreateUnencryptedRtpTransport( - content_info.name, rtp_dtls_transport.get(), rtcp_dtls_transport.get()); - } else if (!content_desc->cryptos().empty()) { - sdes_transport = CreateSdesTransport( - content_info.name, rtp_dtls_transport.get(), rtcp_dtls_transport.get()); - RTC_LOG(LS_INFO) << "Creating SdesTransport."; + content_info.mid(), rtp_dtls_transport.get(), + rtcp_dtls_transport.get()); } else { RTC_LOG(LS_INFO) << "Creating DtlsSrtpTransport."; - dtls_srtp_transport = CreateDtlsSrtpTransport( - content_info.name, rtp_dtls_transport.get(), rtcp_dtls_transport.get()); + dtls_srtp_transport = + CreateDtlsSrtpTransport(content_info.mid(), rtp_dtls_transport.get(), + rtcp_dtls_transport.get()); } - std::unique_ptr sctp_transport; + std::unique_ptr sctp_transport; if (config_.sctp_factory) { - sctp_transport = - config_.sctp_factory->CreateSctpTransport(rtp_dtls_transport.get()); + sctp_transport = config_.sctp_factory->CreateSctpTransport( + env_, rtp_dtls_transport.get()); } - std::unique_ptr jsep_transport = - std::make_unique( - content_info.name, certificate_, std::move(ice), std::move(rtcp_ice), + std::unique_ptr jsep_transport = + std::make_unique( + content_info.mid(), certificate_, std::move(ice), std::move(rtcp_ice), std::move(unencrypted_rtp_transport), std::move(sdes_transport), std::move(dtls_srtp_transport), std::move(rtp_dtls_transport), - std::move(rtcp_dtls_transport), std::move(sctp_transport), [&]() { + std::move(rtcp_dtls_transport), std::move(sctp_transport), + [&]() { RTC_DCHECK_RUN_ON(network_thread_); UpdateAggregateStates_n(); - }); + }, + payload_type_picker_); jsep_transport->rtp_transport()->SubscribeRtcpPacketReceived( - this, [this](rtc::CopyOnWriteBuffer* buffer, int64_t packet_time_ms) { + this, [this](CopyOnWriteBuffer* buffer, int64_t packet_time_ms) { RTC_DCHECK_RUN_ON(network_thread_); OnRtcpPacketReceived_n(buffer, packet_time_ms); }); jsep_transport->rtp_transport()->SetUnDemuxableRtpPacketReceivedHandler( - [this](webrtc::RtpPacketReceived& packet) { + [this](RtpPacketReceived& packet) { RTC_DCHECK_RUN_ON(network_thread_); OnUnDemuxableRtpPacketReceived_n(packet); }); - transports_.RegisterTransport(content_info.name, std::move(jsep_transport)); + transports_.RegisterTransport(content_info.mid(), std::move(jsep_transport)); UpdateAggregateStates_n(); return RTCError::OK(); } @@ -1110,7 +1225,7 @@ void JsepTransportController::DestroyAllJsepTransports_n() { transports_.DestroyAllTransports(); } -void JsepTransportController::SetIceRole_n(cricket::IceRole ice_role) { +void JsepTransportController::SetIceRole_n(IceRole ice_role) { ice_role_ = ice_role; auto dtls_transports = GetDtlsTransports(); for (auto& dtls : dtls_transports) { @@ -1118,12 +1233,12 @@ void JsepTransportController::SetIceRole_n(cricket::IceRole ice_role) { } } -cricket::IceRole JsepTransportController::DetermineIceRole( - cricket::JsepTransport* jsep_transport, - const cricket::TransportInfo& transport_info, +IceRole JsepTransportController::DetermineIceRole( + JsepTransport* jsep_transport, + const TransportInfo& transport_info, SdpType type, bool local) { - cricket::IceRole ice_role = ice_role_; + IceRole ice_role = ice_role_; auto tdesc = transport_info.description; if (local) { // The initial offer side may use ICE Lite, in which case, per RFC5245 @@ -1135,20 +1250,18 @@ cricket::IceRole JsepTransportController::DetermineIceRole( // SetLocalDescription in JsepTransportController. if (jsep_transport->remote_description() && jsep_transport->remote_description()->transport_desc.ice_mode == - cricket::ICEMODE_LITE && - ice_role_ == cricket::ICEROLE_CONTROLLED && - tdesc.ice_mode == cricket::ICEMODE_FULL) { - ice_role = cricket::ICEROLE_CONTROLLING; + ICEMODE_LITE && + ice_role_ == ICEROLE_CONTROLLED && tdesc.ice_mode == ICEMODE_FULL) { + ice_role = ICEROLE_CONTROLLING; } } else { - // If our role is cricket::ICEROLE_CONTROLLED and the remote endpoint + // If our role is webrtc::ICEROLE_CONTROLLED and the remote endpoint // supports only ice_lite, this local endpoint should take the CONTROLLING // role. // TODO(deadbeef): This is a session-level attribute, so it really shouldn't // be in a TransportDescription in the first place... - if (ice_role_ == cricket::ICEROLE_CONTROLLED && - tdesc.ice_mode == cricket::ICEMODE_LITE) { - ice_role = cricket::ICEROLE_CONTROLLING; + if (ice_role_ == ICEROLE_CONTROLLED && tdesc.ice_mode == ICEMODE_LITE) { + ice_role = ICEROLE_CONTROLLING; } // If we use ICE Lite and the remote endpoint uses the full implementation @@ -1156,10 +1269,9 @@ cricket::IceRole JsepTransportController::DetermineIceRole( // side must be the controlling role. if (jsep_transport->local_description() && jsep_transport->local_description()->transport_desc.ice_mode == - cricket::ICEMODE_LITE && - ice_role_ == cricket::ICEROLE_CONTROLLING && - tdesc.ice_mode == cricket::ICEMODE_FULL) { - ice_role = cricket::ICEROLE_CONTROLLED; + ICEMODE_LITE && + ice_role_ == ICEROLE_CONTROLLING && tdesc.ice_mode == ICEMODE_FULL) { + ice_role = ICEROLE_CONTROLLED; } } @@ -1167,7 +1279,7 @@ cricket::IceRole JsepTransportController::DetermineIceRole( } void JsepTransportController::OnTransportWritableState_n( - rtc::PacketTransportInternal* transport) { + PacketTransportInternal* transport) { RTC_LOG(LS_INFO) << " Transport " << transport->transport_name() << " writability changed to " << transport->writable() << "."; @@ -1175,61 +1287,60 @@ void JsepTransportController::OnTransportWritableState_n( } void JsepTransportController::OnTransportReceivingState_n( - rtc::PacketTransportInternal* transport) { + PacketTransportInternal* transport) { UpdateAggregateStates_n(); } void JsepTransportController::OnTransportGatheringState_n( - cricket::IceTransportInternal* transport) { + IceTransportInternal* transport) { UpdateAggregateStates_n(); } void JsepTransportController::OnTransportCandidateGathered_n( - cricket::IceTransportInternal* transport, - const cricket::Candidate& candidate) { + IceTransportInternal* transport, + const Candidate& candidate) { // We should never signal peer-reflexive candidates. - if (candidate.type() == cricket::PRFLX_PORT_TYPE) { + if (candidate.is_prflx()) { RTC_DCHECK_NOTREACHED(); return; } - signal_ice_candidates_gathered_.Send( - transport->transport_name(), std::vector{candidate}); + signal_ice_candidates_gathered_.Send(transport->transport_name(), + std::vector{candidate}); } void JsepTransportController::OnTransportCandidateError_n( - cricket::IceTransportInternal* transport, - const cricket::IceCandidateErrorEvent& event) { + IceTransportInternal* transport, + const IceCandidateErrorEvent& event) { signal_ice_candidate_error_.Send(event); } void JsepTransportController::OnTransportCandidatesRemoved_n( - cricket::IceTransportInternal* transport, - const cricket::Candidates& candidates) { + IceTransportInternal* transport, + const Candidates& candidates) { signal_ice_candidates_removed_.Send(candidates); } void JsepTransportController::OnTransportCandidatePairChanged_n( - const cricket::CandidatePairChangeEvent& event) { + const CandidatePairChangeEvent& event) { signal_ice_candidate_pair_changed_.Send(event); } void JsepTransportController::OnTransportRoleConflict_n( - cricket::IceTransportInternal* transport) { + IceTransportInternal* transport) { // Note: since the role conflict is handled entirely on the network thread, // we don't need to worry about role conflicts occurring on two ports at // once. The first one encountered should immediately reverse the role. - cricket::IceRole reversed_role = (ice_role_ == cricket::ICEROLE_CONTROLLING) - ? cricket::ICEROLE_CONTROLLED - : cricket::ICEROLE_CONTROLLING; + IceRole reversed_role = (ice_role_ == ICEROLE_CONTROLLING) + ? ICEROLE_CONTROLLED + : ICEROLE_CONTROLLING; RTC_LOG(LS_INFO) << "Got role conflict; switching to " - << (reversed_role == cricket::ICEROLE_CONTROLLING - ? "controlling" - : "controlled") + << (reversed_role == ICEROLE_CONTROLLING ? "controlling" + : "controlled") << " role."; SetIceRole_n(reversed_role); } void JsepTransportController::OnTransportStateChanged_n( - cricket::IceTransportInternal* transport) { + IceTransportInternal* transport) { RTC_LOG(LS_INFO) << transport->transport_name() << " Transport " << transport->component() << " state changed. Check if state is complete."; @@ -1239,13 +1350,12 @@ void JsepTransportController::OnTransportStateChanged_n( void JsepTransportController::UpdateAggregateStates_n() { TRACE_EVENT0("webrtc", "JsepTransportController::UpdateAggregateStates_n"); auto dtls_transports = GetActiveDtlsTransports(); - cricket::IceConnectionState new_connection_state = - cricket::kIceConnectionConnecting; + IceConnectionState new_connection_state = kIceConnectionConnecting; PeerConnectionInterface::IceConnectionState new_ice_connection_state = PeerConnectionInterface::IceConnectionState::kIceConnectionNew; PeerConnectionInterface::PeerConnectionState new_combined_state = PeerConnectionInterface::PeerConnectionState::kNew; - cricket::IceGatheringState new_gathering_state = cricket::kIceGatheringNew; + IceGatheringState new_gathering_state = kIceGatheringNew; bool any_failed = false; bool all_connected = !dtls_transports.empty(); bool all_completed = !dtls_transports.empty(); @@ -1257,31 +1367,30 @@ void JsepTransportController::UpdateAggregateStates_n() { for (const auto& dtls : dtls_transports) { any_failed = any_failed || dtls->ice_transport()->GetState() == - cricket::IceTransportState::STATE_FAILED; + IceTransportStateInternal::STATE_FAILED; all_connected = all_connected && dtls->writable(); all_completed = all_completed && dtls->writable() && dtls->ice_transport()->GetState() == - cricket::IceTransportState::STATE_COMPLETED && - dtls->ice_transport()->GetIceRole() == cricket::ICEROLE_CONTROLLING && - dtls->ice_transport()->gathering_state() == - cricket::kIceGatheringComplete; + IceTransportStateInternal::STATE_COMPLETED && + dtls->ice_transport()->GetIceRole() == ICEROLE_CONTROLLING && + dtls->ice_transport()->gathering_state() == kIceGatheringComplete; any_gathering = any_gathering || dtls->ice_transport()->gathering_state() != - cricket::kIceGatheringNew; + kIceGatheringNew; all_done_gathering = - all_done_gathering && dtls->ice_transport()->gathering_state() == - cricket::kIceGatheringComplete; + all_done_gathering && + dtls->ice_transport()->gathering_state() == kIceGatheringComplete; dtls_state_counts[dtls->dtls_state()]++; ice_state_counts[dtls->ice_transport()->GetIceTransportState()]++; } if (any_failed) { - new_connection_state = cricket::kIceConnectionFailed; + new_connection_state = kIceConnectionFailed; } else if (all_completed) { - new_connection_state = cricket::kIceConnectionCompleted; + new_connection_state = kIceConnectionCompleted; } else if (all_connected) { - new_connection_state = cricket::kIceConnectionConnected; + new_connection_state = kIceConnectionConnected; } if (ice_connection_state_ != new_connection_state) { ice_connection_state_ = new_connection_state; @@ -1401,11 +1510,11 @@ void JsepTransportController::UpdateAggregateStates_n() { // Compute the gathering state. if (dtls_transports.empty()) { - new_gathering_state = cricket::kIceGatheringNew; + new_gathering_state = kIceGatheringNew; } else if (all_done_gathering) { - new_gathering_state = cricket::kIceGatheringComplete; + new_gathering_state = kIceGatheringComplete; } else if (any_gathering) { - new_gathering_state = cricket::kIceGatheringGathering; + new_gathering_state = kIceGatheringGathering; } if (ice_gathering_state_ != new_gathering_state) { ice_gathering_state_ = new_gathering_state; @@ -1413,27 +1522,25 @@ void JsepTransportController::UpdateAggregateStates_n() { } } -void JsepTransportController::OnRtcpPacketReceived_n( - rtc::CopyOnWriteBuffer* packet, - int64_t packet_time_us) { +void JsepTransportController::OnRtcpPacketReceived_n(CopyOnWriteBuffer* packet, + int64_t packet_time_us) { RTC_DCHECK(config_.rtcp_handler); config_.rtcp_handler(*packet, packet_time_us); } void JsepTransportController::OnUnDemuxableRtpPacketReceived_n( - const webrtc::RtpPacketReceived& packet) { + const RtpPacketReceived& packet) { RTC_DCHECK(config_.un_demuxable_packet_handler); config_.un_demuxable_packet_handler(packet); } -void JsepTransportController::OnDtlsHandshakeError( - rtc::SSLHandshakeError error) { +void JsepTransportController::OnDtlsHandshakeError(SSLHandshakeError error) { config_.on_dtls_handshake_error_(error); } bool JsepTransportController::OnTransportChanged( const std::string& mid, - cricket::JsepTransport* jsep_transport) { + JsepTransport* jsep_transport) { if (config_.transport_observer) { if (jsep_transport) { return config_.transport_observer->OnTransportChanged( diff --git a/pc/jsep_transport_controller.h b/pc/jsep_transport_controller.h index 5880e346cd..3f42a0251b 100644 --- a/pc/jsep_transport_controller.h +++ b/pc/jsep_transport_controller.h @@ -16,17 +16,17 @@ #include #include #include +#include #include -#include #include #include #include "absl/functional/any_invocable.h" -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" #include "api/async_dns_resolver.h" #include "api/candidate.h" #include "api/crypto/crypto_options.h" -#include "api/ice_transport_factory.h" +#include "api/environment/environment.h" #include "api/ice_transport_interface.h" #include "api/jsep.h" #include "api/peer_connection_interface.h" @@ -36,10 +36,10 @@ #include "api/sequence_checker.h" #include "api/transport/data_channel_transport_interface.h" #include "api/transport/sctp_transport_factory_interface.h" -#include "media/sctp/sctp_transport_internal.h" -#include "p2p/base/dtls_transport.h" -#include "p2p/base/dtls_transport_factory.h" -#include "p2p/base/dtls_transport_internal.h" +#include "call/payload_type.h" +#include "call/payload_type_picker.h" +#include "media/base/codec.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_transport_channel.h" #include "p2p/base/packet_transport_internal.h" @@ -47,6 +47,8 @@ #include "p2p/base/port_allocator.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_info.h" +#include "p2p/dtls/dtls_transport_factory.h" +#include "p2p/dtls/dtls_transport_internal.h" #include "pc/dtls_srtp_transport.h" #include "pc/dtls_transport.h" #include "pc/jsep_transport.h" @@ -58,9 +60,7 @@ #include "pc/srtp_transport.h" #include "pc/transport_stats.h" #include "rtc_base/callback_list.h" -#include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/helpers.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_stream_adapter.h" @@ -68,14 +68,10 @@ #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" -namespace rtc { -class Thread; -class PacketTransportInternal; -} // namespace rtc - namespace webrtc { -class JsepTransportController : public sigslot::has_slots<> { +class JsepTransportController : public PayloadTypeSuggester, + public sigslot::has_slots<> { public: // Used when the RtpTransport/DtlsTransport of the m= section is changed // because the section is rejected or BUNDLE is enabled. @@ -100,7 +96,7 @@ class JsepTransportController : public sigslot::has_slots<> { virtual bool OnTransportChanged( const std::string& mid, RtpTransportInternal* rtp_transport, - rtc::scoped_refptr dtls_transport, + scoped_refptr dtls_transport, DataChannelTransportInterface* data_channel_transport) = 0; }; @@ -109,10 +105,10 @@ class JsepTransportController : public sigslot::has_slots<> { // upon setting a local transport description that indicates an ICE // restart. bool redetermine_role_on_ice_restart = true; - rtc::SSLProtocolVersion ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12; + SSLProtocolVersion ssl_max_version = SSL_PROTOCOL_DTLS_12; // `crypto_options` is used to determine if created DTLS transports // negotiate GCM crypto suites or not. - webrtc::CryptoOptions crypto_options; + CryptoOptions crypto_options; PeerConnectionInterface::BundlePolicy bundle_policy = PeerConnectionInterface::kBundlePolicyBalanced; PeerConnectionInterface::RtcpMuxPolicy rtcp_mux_policy = @@ -120,12 +116,12 @@ class JsepTransportController : public sigslot::has_slots<> { bool disable_encryption = false; bool enable_external_auth = false; // Used to inject the ICE/DTLS transports created externally. - webrtc::IceTransportFactory* ice_transport_factory = nullptr; - cricket::DtlsTransportFactory* dtls_transport_factory = nullptr; + IceTransportFactory* ice_transport_factory = nullptr; + DtlsTransportFactory* dtls_transport_factory = nullptr; Observer* transport_observer = nullptr; // Must be provided and valid for the lifetime of the // JsepTransportController instance. - absl::AnyInvocable rtcp_handler; absl::AnyInvocable @@ -137,10 +133,7 @@ class JsepTransportController : public sigslot::has_slots<> { // Factory for SCTP transports. SctpTransportFactoryInterface* sctp_factory = nullptr; - std::function on_dtls_handshake_error_; - - // Field trials. - const webrtc::FieldTrialsView* field_trials; + std::function on_dtls_handshake_error_; }; // The ICE related events are fired on the `network_thread`. @@ -148,9 +141,11 @@ class JsepTransportController : public sigslot::has_slots<> { // and destruction of the JsepTransportController must occur on the // `network_thread`. JsepTransportController( - rtc::Thread* network_thread, - cricket::PortAllocator* port_allocator, + const Environment& env, + Thread* network_thread, + PortAllocator* port_allocator, AsyncDnsResolverFactoryInterface* async_dns_resolver_factory, + PayloadTypePicker& payload_type_picker, Config config); virtual ~JsepTransportController(); @@ -161,23 +156,34 @@ class JsepTransportController : public sigslot::has_slots<> { // level, creating/destroying transport objects as needed and updating their // properties. This includes RTP, DTLS, and ICE (but not SCTP). At least not // yet? May make sense to in the future. + // + // `local_desc` must always be valid. If a remote description has previously + // been set via a call to `SetRemoteDescription()` then `remote_desc` should + // point to that description object in order to keep the current local and + // remote session descriptions in sync. RTCError SetLocalDescription(SdpType type, - const cricket::SessionDescription* description); - + const SessionDescription* local_desc, + const SessionDescription* remote_desc); + + // Call to apply a remote description (See `SetLocalDescription()` for local). + // + // `remote_desc` must always be valid. If a local description has previously + // been set via a call to `SetLocalDescription()` then `local_desc` should + // point to that description object in order to keep the current local and + // remote session descriptions in sync. RTCError SetRemoteDescription(SdpType type, - const cricket::SessionDescription* description); + const SessionDescription* local_desc, + const SessionDescription* remote_desc); // Get transports to be used for the provided `mid`. If bundling is enabled, // calling GetRtpTransport for multiple MIDs may yield the same object. RtpTransportInternal* GetRtpTransport(absl::string_view mid) const; - cricket::DtlsTransportInternal* GetDtlsTransport(const std::string& mid); - const cricket::DtlsTransportInternal* GetRtcpDtlsTransport( + DtlsTransportInternal* GetDtlsTransport(const std::string& mid); + const DtlsTransportInternal* GetRtcpDtlsTransport( const std::string& mid) const; // Gets the externally sharable version of the DtlsTransport. - rtc::scoped_refptr LookupDtlsTransportByMid( - const std::string& mid); - rtc::scoped_refptr GetSctpTransport( - const std::string& mid) const; + scoped_refptr LookupDtlsTransportByMid(const std::string& mid); + scoped_refptr GetSctpTransport(const std::string& mid) const; DataChannelTransportInterface* GetDataChannelTransport( const std::string& mid) const; @@ -187,7 +193,7 @@ class JsepTransportController : public sigslot::has_slots<> { ********************/ // This method is public to allow PeerConnection to update it from // SetConfiguration. - void SetIceConfig(const cricket::IceConfig& config); + void SetIceConfig(const IceConfig& config); // Set the "needs-ice-restart" flag as described in JSEP. After the flag is // set, offers should generate new ufrags/passwords until an ICE restart // occurs. @@ -200,32 +206,39 @@ class JsepTransportController : public sigslot::has_slots<> { // Start gathering candidates for any new transports, or transports doing an // ICE restart. void MaybeStartGathering(); - RTCError AddRemoteCandidates( - const std::string& mid, - const std::vector& candidates); - RTCError RemoveRemoteCandidates( - const std::vector& candidates); + RTCError AddRemoteCandidates(const std::string& mid, + const std::vector& candidates); + RTCError RemoveRemoteCandidates(const std::vector& candidates); /********************** * DTLS-related methods *********************/ // Specifies the identity to use in this session. // Can only be called once. - bool SetLocalCertificate( - const rtc::scoped_refptr& certificate); - rtc::scoped_refptr GetLocalCertificate( + bool SetLocalCertificate(const scoped_refptr& certificate); + scoped_refptr GetLocalCertificate( const std::string& mid) const; // Caller owns returned certificate chain. This method mainly exists for // stats reporting. - std::unique_ptr GetRemoteSSLCertChain( + std::unique_ptr GetRemoteSSLCertChain( const std::string& mid) const; // Get negotiated role, if one has been negotiated. - absl::optional GetDtlsRole(const std::string& mid) const; + std::optional GetDtlsRole(const std::string& mid) const; + + // Suggest a payload type for a given codec on a given media section. + // Media section is indicated by MID. + // The function will either return a PT already in use on the connection + // or a newly suggested one. + RTCErrorOr SuggestPayloadType(const std::string& mid, + Codec codec) override; + RTCError AddLocalMapping(const std::string& mid, + PayloadType payload_type, + const Codec& codec) override; + const PayloadTypePicker& PayloadTypePickerForTesting() const { + return payload_type_picker_; + } - // TODO(deadbeef): GetStats isn't const because all the way down to - // OpenSSLStreamAdapter, GetSslCipherSuite and GetDtlsSrtpCryptoSuite are not - // const. Fix this. - bool GetStats(const std::string& mid, cricket::TransportStats* stats); + bool GetStats(const std::string& mid, TransportStats* stats) const; bool initial_offerer() const { return initial_offerer_ && *initial_offerer_; } @@ -233,14 +246,14 @@ class JsepTransportController : public sigslot::has_slots<> { RTCError RollbackTransports(); - // F: void(const std::string&, const std::vector&) + // F: void(const std::string&, const std::vector&) template void SubscribeIceCandidateGathered(F&& callback) { RTC_DCHECK_RUN_ON(network_thread_); signal_ice_candidates_gathered_.AddReceiver(std::forward(callback)); } - // F: void(cricket::IceConnectionState) + // F: void(webrtc::IceConnectionState) template void SubscribeIceConnectionState(F&& callback) { RTC_DCHECK_RUN_ON(network_thread_); @@ -262,28 +275,28 @@ class JsepTransportController : public sigslot::has_slots<> { std::forward(callback)); } - // F: void(cricket::IceGatheringState) + // F: void(webrtc::IceGatheringState) template void SubscribeIceGatheringState(F&& callback) { RTC_DCHECK_RUN_ON(network_thread_); signal_ice_gathering_state_.AddReceiver(std::forward(callback)); } - // F: void(const cricket::IceCandidateErrorEvent&) + // F: void(const webrtc::IceCandidateErrorEvent&) template void SubscribeIceCandidateError(F&& callback) { RTC_DCHECK_RUN_ON(network_thread_); signal_ice_candidate_error_.AddReceiver(std::forward(callback)); } - // F: void(const std::vector&) + // F: void(const std::vector&) template void SubscribeIceCandidatesRemoved(F&& callback) { RTC_DCHECK_RUN_ON(network_thread_); signal_ice_candidates_removed_.AddReceiver(std::forward(callback)); } - // F: void(const cricket::CandidatePairChangeEvent&) + // F: void(const webrtc::CandidatePairChangeEvent&) template void SubscribeIceCandidatePairChanged(F&& callback) { RTC_DCHECK_RUN_ON(network_thread_); @@ -297,7 +310,7 @@ class JsepTransportController : public sigslot::has_slots<> { // Else if all completed => completed, // Else if all connected => connected, // Else => connecting - CallbackList signal_ice_connection_state_ + CallbackList signal_ice_connection_state_ RTC_GUARDED_BY(network_thread_); CallbackList @@ -309,188 +322,189 @@ class JsepTransportController : public sigslot::has_slots<> { // If all transports done gathering => complete, // Else if any are gathering => gathering, // Else => new - CallbackList signal_ice_gathering_state_ + CallbackList signal_ice_gathering_state_ RTC_GUARDED_BY(network_thread_); // [mid, candidates] - CallbackList&> + CallbackList&> signal_ice_candidates_gathered_ RTC_GUARDED_BY(network_thread_); - CallbackList - signal_ice_candidate_error_ RTC_GUARDED_BY(network_thread_); + CallbackList signal_ice_candidate_error_ + RTC_GUARDED_BY(network_thread_); - CallbackList&> - signal_ice_candidates_removed_ RTC_GUARDED_BY(network_thread_); + CallbackList&> signal_ice_candidates_removed_ + RTC_GUARDED_BY(network_thread_); - CallbackList + CallbackList signal_ice_candidate_pair_changed_ RTC_GUARDED_BY(network_thread_); + // Called from SetLocalDescription and SetRemoteDescription. + // When `local` is true, local_desc must be valid. Similarly when + // `local` is false, remote_desc must be valid. The description counterpart + // to the one that's being applied, may be nullptr but when it's supplied + // the counterpart description's content groups will be kept up to date for + // `type == SdpType::kAnswer`. RTCError ApplyDescription_n(bool local, SdpType type, - const cricket::SessionDescription* description) + const SessionDescription* local_desc, + const SessionDescription* remote_desc) RTC_RUN_ON(network_thread_); RTCError ValidateAndMaybeUpdateBundleGroups( bool local, SdpType type, - const cricket::SessionDescription* description); - RTCError ValidateContent(const cricket::ContentInfo& content_info); + const SessionDescription* local_desc, + const SessionDescription* remote_desc) RTC_RUN_ON(network_thread_); + RTCError ValidateContent(const ContentInfo& content_info); - void HandleRejectedContent(const cricket::ContentInfo& content_info) + void HandleRejectedContent(const ContentInfo& content_info) RTC_RUN_ON(network_thread_); - bool HandleBundledContent(const cricket::ContentInfo& content_info, - const cricket::ContentGroup& bundle_group) + bool HandleBundledContent(const ContentInfo& content_info, + const ContentGroup& bundle_group) RTC_RUN_ON(network_thread_); - cricket::JsepTransportDescription CreateJsepTransportDescription( - const cricket::ContentInfo& content_info, - const cricket::TransportInfo& transport_info, + JsepTransportDescription CreateJsepTransportDescription( + const ContentInfo& content_info, + const TransportInfo& transport_info, const std::vector& encrypted_extension_ids, int rtp_abs_sendtime_extn_id); - std::map> + std::map> MergeEncryptedHeaderExtensionIdsForBundles( - const cricket::SessionDescription* description); + const SessionDescription* description); std::vector GetEncryptedHeaderExtensionIds( - const cricket::ContentInfo& content_info); + const ContentInfo& content_info); - int GetRtpAbsSendTimeHeaderExtensionId( - const cricket::ContentInfo& content_info); + int GetRtpAbsSendTimeHeaderExtensionId(const ContentInfo& content_info); // This method takes the BUNDLE group into account. If the JsepTransport is // destroyed because of BUNDLE, it would return the transport which other // transports are bundled on (In current implementation, it is the first // content in the BUNDLE group). - const cricket::JsepTransport* GetJsepTransportForMid( - const std::string& mid) const RTC_RUN_ON(network_thread_); - cricket::JsepTransport* GetJsepTransportForMid(const std::string& mid) + const JsepTransport* GetJsepTransportForMid(const std::string& mid) const + RTC_RUN_ON(network_thread_); + JsepTransport* GetJsepTransportForMid(const std::string& mid) + RTC_RUN_ON(network_thread_); + const JsepTransport* GetJsepTransportForMid(absl::string_view mid) const RTC_RUN_ON(network_thread_); - const cricket::JsepTransport* GetJsepTransportForMid( - absl::string_view mid) const RTC_RUN_ON(network_thread_); - cricket::JsepTransport* GetJsepTransportForMid(absl::string_view mid) + JsepTransport* GetJsepTransportForMid(absl::string_view mid) RTC_RUN_ON(network_thread_); // Get the JsepTransport without considering the BUNDLE group. Return nullptr // if the JsepTransport is destroyed. - const cricket::JsepTransport* GetJsepTransportByName( + const JsepTransport* GetJsepTransportByName( const std::string& transport_name) const RTC_RUN_ON(network_thread_); - cricket::JsepTransport* GetJsepTransportByName( - const std::string& transport_name) RTC_RUN_ON(network_thread_); + JsepTransport* GetJsepTransportByName(const std::string& transport_name) + RTC_RUN_ON(network_thread_); // Creates jsep transport. Noop if transport is already created. // Transport is created either during SetLocalDescription (`local` == true) or // during SetRemoteDescription (`local` == false). Passing `local` helps to // differentiate initiator (caller) from answerer (callee). - RTCError MaybeCreateJsepTransport( - bool local, - const cricket::ContentInfo& content_info, - const cricket::SessionDescription& description) + RTCError MaybeCreateJsepTransport(bool local, + const ContentInfo& content_info, + const SessionDescription& description) RTC_RUN_ON(network_thread_); void DestroyAllJsepTransports_n() RTC_RUN_ON(network_thread_); - void SetIceRole_n(cricket::IceRole ice_role) RTC_RUN_ON(network_thread_); + void SetIceRole_n(IceRole ice_role) RTC_RUN_ON(network_thread_); - cricket::IceRole DetermineIceRole( - cricket::JsepTransport* jsep_transport, - const cricket::TransportInfo& transport_info, - SdpType type, - bool local); + IceRole DetermineIceRole(JsepTransport* jsep_transport, + const TransportInfo& transport_info, + SdpType type, + bool local); - std::unique_ptr CreateDtlsTransport( - const cricket::ContentInfo& content_info, - cricket::IceTransportInternal* ice); - rtc::scoped_refptr CreateIceTransport( + std::unique_ptr CreateDtlsTransport( + const ContentInfo& content_info, + IceTransportInternal* ice); + scoped_refptr CreateIceTransport( const std::string& transport_name, bool rtcp); - std::unique_ptr CreateUnencryptedRtpTransport( + std::unique_ptr CreateUnencryptedRtpTransport( const std::string& transport_name, - rtc::PacketTransportInternal* rtp_packet_transport, - rtc::PacketTransportInternal* rtcp_packet_transport); - std::unique_ptr CreateSdesTransport( + PacketTransportInternal* rtp_packet_transport, + PacketTransportInternal* rtcp_packet_transport); + std::unique_ptr CreateSdesTransport( const std::string& transport_name, - cricket::DtlsTransportInternal* rtp_dtls_transport, - cricket::DtlsTransportInternal* rtcp_dtls_transport); - std::unique_ptr CreateDtlsSrtpTransport( + DtlsTransportInternal* rtp_dtls_transport, + DtlsTransportInternal* rtcp_dtls_transport); + std::unique_ptr CreateDtlsSrtpTransport( const std::string& transport_name, - cricket::DtlsTransportInternal* rtp_dtls_transport, - cricket::DtlsTransportInternal* rtcp_dtls_transport); + DtlsTransportInternal* rtp_dtls_transport, + DtlsTransportInternal* rtcp_dtls_transport); // Collect all the DtlsTransports, including RTP and RTCP, from the // JsepTransports, including those not mapped to a MID because they are being // kept alive in case of rollback. - std::vector GetDtlsTransports(); + std::vector GetDtlsTransports(); // Same as the above, but doesn't include rollback transports. // JsepTransportController can iterate all the DtlsTransports and update the // aggregate states. - std::vector GetActiveDtlsTransports(); + std::vector GetActiveDtlsTransports(); // Handlers for signals from Transport. - void OnTransportWritableState_n(rtc::PacketTransportInternal* transport) + void OnTransportWritableState_n(PacketTransportInternal* transport) RTC_RUN_ON(network_thread_); - void OnTransportReceivingState_n(rtc::PacketTransportInternal* transport) + void OnTransportReceivingState_n(PacketTransportInternal* transport) RTC_RUN_ON(network_thread_); - void OnTransportGatheringState_n(cricket::IceTransportInternal* transport) + void OnTransportGatheringState_n(IceTransportInternal* transport) RTC_RUN_ON(network_thread_); - void OnTransportCandidateGathered_n(cricket::IceTransportInternal* transport, - const cricket::Candidate& candidate) + void OnTransportCandidateGathered_n(IceTransportInternal* transport, + const Candidate& candidate) RTC_RUN_ON(network_thread_); - void OnTransportCandidateError_n(cricket::IceTransportInternal* transport, - const cricket::IceCandidateErrorEvent& event) + void OnTransportCandidateError_n(IceTransportInternal* transport, + const IceCandidateErrorEvent& event) RTC_RUN_ON(network_thread_); - void OnTransportCandidatesRemoved_n(cricket::IceTransportInternal* transport, - const cricket::Candidates& candidates) + void OnTransportCandidatesRemoved_n(IceTransportInternal* transport, + const Candidates& candidates) RTC_RUN_ON(network_thread_); - void OnTransportRoleConflict_n(cricket::IceTransportInternal* transport) + void OnTransportRoleConflict_n(IceTransportInternal* transport) RTC_RUN_ON(network_thread_); - void OnTransportStateChanged_n(cricket::IceTransportInternal* transport) + void OnTransportStateChanged_n(IceTransportInternal* transport) RTC_RUN_ON(network_thread_); - void OnTransportCandidatePairChanged_n( - const cricket::CandidatePairChangeEvent& event) + void OnTransportCandidatePairChanged_n(const CandidatePairChangeEvent& event) RTC_RUN_ON(network_thread_); void UpdateAggregateStates_n() RTC_RUN_ON(network_thread_); - void OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer* packet, - int64_t packet_time_us) + void OnRtcpPacketReceived_n(CopyOnWriteBuffer* packet, int64_t packet_time_us) RTC_RUN_ON(network_thread_); - void OnUnDemuxableRtpPacketReceived_n(const webrtc::RtpPacketReceived& packet) + void OnUnDemuxableRtpPacketReceived_n(const RtpPacketReceived& packet) RTC_RUN_ON(network_thread_); - void OnDtlsHandshakeError(rtc::SSLHandshakeError error); + void OnDtlsHandshakeError(SSLHandshakeError error); - bool OnTransportChanged(const std::string& mid, - cricket::JsepTransport* transport); + bool OnTransportChanged(const std::string& mid, JsepTransport* transport); - rtc::Thread* const network_thread_ = nullptr; - cricket::PortAllocator* const port_allocator_ = nullptr; + const Environment env_; + Thread* const network_thread_ = nullptr; + PortAllocator* const port_allocator_ = nullptr; AsyncDnsResolverFactoryInterface* const async_dns_resolver_factory_ = nullptr; JsepTransportCollection transports_ RTC_GUARDED_BY(network_thread_); // Aggregate states for Transports. // standardized_ice_connection_state_ is intended to replace // ice_connection_state, see bugs.webrtc.org/9308 - cricket::IceConnectionState ice_connection_state_ = - cricket::kIceConnectionConnecting; + IceConnectionState ice_connection_state_ = kIceConnectionConnecting; PeerConnectionInterface::IceConnectionState standardized_ice_connection_state_ = PeerConnectionInterface::kIceConnectionNew; PeerConnectionInterface::PeerConnectionState combined_connection_state_ = PeerConnectionInterface::PeerConnectionState::kNew; - cricket::IceGatheringState ice_gathering_state_ = cricket::kIceGatheringNew; + IceGatheringState ice_gathering_state_ = kIceGatheringNew; const Config config_; bool active_reset_srtp_params_ RTC_GUARDED_BY(network_thread_); - const cricket::SessionDescription* local_desc_ = nullptr; - const cricket::SessionDescription* remote_desc_ = nullptr; - absl::optional initial_offerer_; + std::optional initial_offerer_; - cricket::IceConfig ice_config_; - cricket::IceRole ice_role_ = cricket::ICEROLE_CONTROLLING; - uint64_t ice_tiebreaker_ = rtc::CreateRandomId64(); - rtc::scoped_refptr certificate_; + IceConfig ice_config_; + IceRole ice_role_ = ICEROLE_CONTROLLING; + scoped_refptr certificate_; BundleManager bundles_; + // Reference to the SdpOfferAnswerHandler's payload type picker. + PayloadTypePicker& payload_type_picker_; }; } // namespace webrtc diff --git a/pc/jsep_transport_controller_unittest.cc b/pc/jsep_transport_controller_unittest.cc index d3d238115d..e8d811769f 100644 --- a/pc/jsep_transport_controller_unittest.cc +++ b/pc/jsep_transport_controller_unittest.cc @@ -10,34 +10,68 @@ #include "pc/jsep_transport_controller.h" +#include #include +#include +#include #include #include +#include +#include "api/candidate.h" +#include "api/crypto/crypto_options.h" #include "api/dtls_transport_interface.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/ice_transport_interface.h" +#include "api/jsep.h" +#include "api/make_ref_counted.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" +#include "api/transport/data_channel_transport_interface.h" #include "api/transport/enums.h" -#include "p2p/base/candidate_pair_interface.h" -#include "p2p/base/dtls_transport_factory.h" -#include "p2p/base/fake_dtls_transport.h" -#include "p2p/base/fake_ice_transport.h" +#include "api/units/time_delta.h" +#include "call/payload_type.h" +#include "call/payload_type_picker.h" +#include "media/base/codec.h" +#include "media/base/media_constants.h" +#include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/transport_description.h" #include "p2p/base/transport_info.h" +#include "p2p/dtls/dtls_transport_factory.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "p2p/dtls/fake_dtls_transport.h" +#include "p2p/test/fake_ice_transport.h" +#include "pc/dtls_transport.h" +#include "pc/rtp_transport_internal.h" +#include "pc/session_description.h" +#include "pc/transport_stats.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/fake_ssl_identity.h" -#include "rtc_base/gunit.h" #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" +#include "rtc_base/rtc_certificate.h" #include "rtc_base/socket_address.h" +#include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_fingerprint.h" #include "rtc_base/ssl_identity.h" +#include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/task_queue_for_test.h" +#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" +#include "test/wait_until.h" -using cricket::Candidate; -using cricket::Candidates; -using cricket::FakeDtlsTransport; -using webrtc::SdpType; +using webrtc::Candidate; +using ::webrtc::Candidates; +using ::webrtc::FakeDtlsTransport; static const int kTimeout = 100; static const char kIceUfrag1[] = "u0001"; @@ -56,26 +90,26 @@ static const char kDataMid1[] = "data1"; namespace webrtc { -class FakeIceTransportFactory : public webrtc::IceTransportFactory { +class FakeIceTransportFactory : public IceTransportFactory { public: ~FakeIceTransportFactory() override = default; - rtc::scoped_refptr CreateIceTransport( + scoped_refptr CreateIceTransport( const std::string& transport_name, int component, IceTransportInit init) override { - return rtc::make_ref_counted( - std::make_unique(transport_name, component)); + return make_ref_counted( + std::make_unique(transport_name, component)); } }; -class FakeDtlsTransportFactory : public cricket::DtlsTransportFactory { +class FakeDtlsTransportFactory : public DtlsTransportFactory { public: - std::unique_ptr CreateDtlsTransport( - cricket::IceTransportInternal* ice, - const webrtc::CryptoOptions& crypto_options, - rtc::SSLProtocolVersion max_version) override { + std::unique_ptr CreateDtlsTransport( + IceTransportInternal* ice, + const CryptoOptions& crypto_options, + SSLProtocolVersion max_version) override { return std::make_unique( - static_cast(ice)); + static_cast(ice)); } }; @@ -83,33 +117,34 @@ class JsepTransportControllerTest : public JsepTransportController::Observer, public ::testing::Test, public sigslot::has_slots<> { public: - JsepTransportControllerTest() : signaling_thread_(rtc::Thread::Current()) { + JsepTransportControllerTest() + : env_(CreateEnvironment(&field_trials_)), + signaling_thread_(Thread::Current()) { fake_ice_transport_factory_ = std::make_unique(); fake_dtls_transport_factory_ = std::make_unique(); } - void CreateJsepTransportController( - JsepTransportController::Config config, - rtc::Thread* network_thread = rtc::Thread::Current(), - cricket::PortAllocator* port_allocator = nullptr) { + void CreateJsepTransportController(JsepTransportController::Config config, + Thread* network_thread = Thread::Current(), + PortAllocator* port_allocator = nullptr) { config.transport_observer = this; - config.rtcp_handler = [](const rtc::CopyOnWriteBuffer& packet, + config.rtcp_handler = [](const CopyOnWriteBuffer& packet, int64_t packet_time_us) { RTC_DCHECK_NOTREACHED(); }; config.ice_transport_factory = fake_ice_transport_factory_.get(); config.dtls_transport_factory = fake_dtls_transport_factory_.get(); - config.on_dtls_handshake_error_ = [](rtc::SSLHandshakeError s) {}; - config.field_trials = &field_trials_; + config.on_dtls_handshake_error_ = [](SSLHandshakeError s) {}; transport_controller_ = std::make_unique( - network_thread, port_allocator, nullptr /* async_resolver_factory */, + env_, network_thread, port_allocator, + nullptr /* async_resolver_factory */, payload_type_picker_, std::move(config)); SendTask(network_thread, [&] { ConnectTransportControllerSignals(); }); } void ConnectTransportControllerSignals() { transport_controller_->SubscribeIceConnectionState( - [this](cricket::IceConnectionState s) { + [this](IceConnectionState s) { JsepTransportControllerTest::OnConnectionState(s); }); transport_controller_->SubscribeConnectionState( @@ -121,33 +156,30 @@ class JsepTransportControllerTest : public JsepTransportController::Observer, JsepTransportControllerTest::OnStandardizedIceConnectionState(s); }); transport_controller_->SubscribeIceGatheringState( - [this](cricket::IceGatheringState s) { + [this](IceGatheringState s) { JsepTransportControllerTest::OnGatheringState(s); }); transport_controller_->SubscribeIceCandidateGathered( [this](const std::string& transport, - const std::vector& candidates) { + const std::vector& candidates) { JsepTransportControllerTest::OnCandidatesGathered(transport, candidates); }); } - std::unique_ptr - CreateSessionDescriptionWithoutBundle() { - auto description = std::make_unique(); + std::unique_ptr CreateSessionDescriptionWithoutBundle() { + auto description = std::make_unique(); AddAudioSection(description.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(description.get(), kVideoMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); return description; } - std::unique_ptr + std::unique_ptr CreateSessionDescriptionWithBundleGroup() { auto description = CreateSessionDescriptionWithoutBundle(); - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); bundle_group.AddContentName(kAudioMid1); bundle_group.AddContentName(kVideoMid1); description->AddGroup(bundle_group); @@ -155,14 +187,13 @@ class JsepTransportControllerTest : public JsepTransportController::Observer, return description; } - std::unique_ptr + std::unique_ptr CreateSessionDescriptionWithBundledData() { auto description = CreateSessionDescriptionWithoutBundle(); - AddDataSection(description.get(), kDataMid1, - cricket::MediaProtocolType::kSctp, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, + AddDataSection(description.get(), kDataMid1, MediaProtocolType::kSctp, + kIceUfrag1, kIcePwd1, ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); bundle_group.AddContentName(kAudioMid1); bundle_group.AddContentName(kVideoMid1); bundle_group.AddContentName(kDataMid1); @@ -170,77 +201,76 @@ class JsepTransportControllerTest : public JsepTransportController::Observer, return description; } - void AddAudioSection(cricket::SessionDescription* description, + void AddAudioSection(SessionDescription* description, const std::string& mid, const std::string& ufrag, const std::string& pwd, - cricket::IceMode ice_mode, - cricket::ConnectionRole conn_role, - rtc::scoped_refptr cert) { - std::unique_ptr audio( - new cricket::AudioContentDescription()); + IceMode ice_mode, + ConnectionRole conn_role, + scoped_refptr cert) { + std::unique_ptr audio( + new AudioContentDescription()); // Set RTCP-mux to be true because the default policy is "mux required". audio->set_rtcp_mux(true); - description->AddContent(mid, cricket::MediaProtocolType::kRtp, + description->AddContent(mid, MediaProtocolType::kRtp, /*rejected=*/false, std::move(audio)); AddTransportInfo(description, mid, ufrag, pwd, ice_mode, conn_role, cert); } - void AddVideoSection(cricket::SessionDescription* description, + void AddVideoSection(SessionDescription* description, const std::string& mid, const std::string& ufrag, const std::string& pwd, - cricket::IceMode ice_mode, - cricket::ConnectionRole conn_role, - rtc::scoped_refptr cert) { - std::unique_ptr video( - new cricket::VideoContentDescription()); + IceMode ice_mode, + ConnectionRole conn_role, + scoped_refptr cert) { + std::unique_ptr video( + new VideoContentDescription()); // Set RTCP-mux to be true because the default policy is "mux required". video->set_rtcp_mux(true); - description->AddContent(mid, cricket::MediaProtocolType::kRtp, + description->AddContent(mid, MediaProtocolType::kRtp, /*rejected=*/false, std::move(video)); AddTransportInfo(description, mid, ufrag, pwd, ice_mode, conn_role, cert); } - void AddDataSection(cricket::SessionDescription* description, + void AddDataSection(SessionDescription* description, const std::string& mid, - cricket::MediaProtocolType protocol_type, + MediaProtocolType protocol_type, const std::string& ufrag, const std::string& pwd, - cricket::IceMode ice_mode, - cricket::ConnectionRole conn_role, - rtc::scoped_refptr cert) { - RTC_CHECK(protocol_type == cricket::MediaProtocolType::kSctp); - std::unique_ptr data( - new cricket::SctpDataContentDescription()); + IceMode ice_mode, + ConnectionRole conn_role, + scoped_refptr cert) { + RTC_CHECK(protocol_type == MediaProtocolType::kSctp); + std::unique_ptr data( + new SctpDataContentDescription()); data->set_rtcp_mux(true); description->AddContent(mid, protocol_type, /*rejected=*/false, std::move(data)); AddTransportInfo(description, mid, ufrag, pwd, ice_mode, conn_role, cert); } - void AddTransportInfo(cricket::SessionDescription* description, + void AddTransportInfo(SessionDescription* description, const std::string& mid, const std::string& ufrag, const std::string& pwd, - cricket::IceMode ice_mode, - cricket::ConnectionRole conn_role, - rtc::scoped_refptr cert) { - std::unique_ptr fingerprint; + IceMode ice_mode, + ConnectionRole conn_role, + scoped_refptr cert) { + std::unique_ptr fingerprint; if (cert) { - fingerprint = rtc::SSLFingerprint::CreateFromCertificate(*cert); + fingerprint = SSLFingerprint::CreateFromCertificate(*cert); } - cricket::TransportDescription transport_desc(std::vector(), - ufrag, pwd, ice_mode, - conn_role, fingerprint.get()); - description->AddTransportInfo(cricket::TransportInfo(mid, transport_desc)); + TransportDescription transport_desc(std::vector(), ufrag, pwd, + ice_mode, conn_role, fingerprint.get()); + description->AddTransportInfo(TransportInfo(mid, transport_desc)); } - cricket::IceConfig CreateIceConfig( + IceConfig CreateIceConfig( int receiving_timeout, - cricket::ContinualGatheringPolicy continual_gathering_policy) { - cricket::IceConfig config; + ContinualGatheringPolicy continual_gathering_policy) { + IceConfig config; config.receiving_timeout = receiving_timeout; config.continual_gathering_policy = continual_gathering_policy; return config; @@ -249,9 +279,9 @@ class JsepTransportControllerTest : public JsepTransportController::Observer, Candidate CreateCandidate(const std::string& transport_name, int component) { Candidate c; c.set_transport_name(transport_name); - c.set_address(rtc::SocketAddress("192.168.1.1", 8000)); + c.set_address(SocketAddress("192.168.1.1", 8000)); c.set_component(component); - c.set_protocol(cricket::UDP_PROTOCOL_NAME); + c.set_protocol(UDP_PROTOCOL_NAME); c.set_priority(1); return c; } @@ -265,9 +295,10 @@ class JsepTransportControllerTest : public JsepTransportController::Observer, } auto description = CreateSessionDescriptionWithBundleGroup(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); transport_controller_->MaybeStartGathering(); auto fake_audio_dtls = static_cast( @@ -293,15 +324,15 @@ class JsepTransportControllerTest : public JsepTransportController::Observer, } protected: - void OnConnectionState(cricket::IceConnectionState state) { - ice_signaled_on_thread_ = rtc::Thread::Current(); + void OnConnectionState(IceConnectionState state) { + ice_signaled_on_thread_ = Thread::Current(); connection_state_ = state; ++connection_state_signal_count_; } void OnStandardizedIceConnectionState( PeerConnectionInterface::IceConnectionState state) { - ice_signaled_on_thread_ = rtc::Thread::Current(); + ice_signaled_on_thread_ = Thread::Current(); ice_connection_state_ = state; ++ice_connection_state_signal_count_; } @@ -310,20 +341,20 @@ class JsepTransportControllerTest : public JsepTransportController::Observer, PeerConnectionInterface::PeerConnectionState state) { RTC_LOG(LS_INFO) << "OnCombinedConnectionState: " << static_cast(state); - ice_signaled_on_thread_ = rtc::Thread::Current(); + ice_signaled_on_thread_ = Thread::Current(); combined_connection_state_ = state; ++combined_connection_state_signal_count_; } - void OnGatheringState(cricket::IceGatheringState state) { - ice_signaled_on_thread_ = rtc::Thread::Current(); + void OnGatheringState(IceGatheringState state) { + ice_signaled_on_thread_ = Thread::Current(); gathering_state_ = state; ++gathering_state_signal_count_; } void OnCandidatesGathered(const std::string& transport_name, const Candidates& candidates) { - ice_signaled_on_thread_ = rtc::Thread::Current(); + ice_signaled_on_thread_ = Thread::Current(); candidates_[transport_name].insert(candidates_[transport_name].end(), candidates.begin(), candidates.end()); ++candidates_signal_count_; @@ -333,7 +364,7 @@ class JsepTransportControllerTest : public JsepTransportController::Observer, bool OnTransportChanged( const std::string& mid, RtpTransportInternal* rtp_transport, - rtc::scoped_refptr dtls_transport, + scoped_refptr dtls_transport, DataChannelTransportInterface* data_channel_transport) override { changed_rtp_transport_by_mid_[mid] = rtp_transport; if (dtls_transport) { @@ -344,16 +375,17 @@ class JsepTransportControllerTest : public JsepTransportController::Observer, return true; } - rtc::AutoThread main_thread_; + test::ScopedKeyValueConfig field_trials_; + Environment env_; + AutoThread main_thread_; // Information received from signals from transport controller. - cricket::IceConnectionState connection_state_ = - cricket::kIceConnectionConnecting; + IceConnectionState connection_state_ = kIceConnectionConnecting; PeerConnectionInterface::IceConnectionState ice_connection_state_ = PeerConnectionInterface::kIceConnectionNew; PeerConnectionInterface::PeerConnectionState combined_connection_state_ = PeerConnectionInterface::PeerConnectionState::kNew; bool receiving_ = false; - cricket::IceGatheringState gathering_state_ = cricket::kIceGatheringNew; + IceGatheringState gathering_state_ = kIceGatheringNew; // transport_name => candidates std::map candidates_; // Counts of each signal emitted. @@ -365,29 +397,28 @@ class JsepTransportControllerTest : public JsepTransportController::Observer, int candidates_signal_count_ = 0; // `network_thread_` should be destroyed after `transport_controller_` - std::unique_ptr network_thread_; + std::unique_ptr network_thread_; std::unique_ptr fake_ice_transport_factory_; std::unique_ptr fake_dtls_transport_factory_; - rtc::Thread* const signaling_thread_ = nullptr; - rtc::Thread* ice_signaled_on_thread_ = nullptr; + Thread* const signaling_thread_ = nullptr; + Thread* ice_signaled_on_thread_ = nullptr; // Used to verify the SignalRtpTransportChanged/SignalDtlsTransportChanged are // signaled correctly. std::map changed_rtp_transport_by_mid_; - std::map - changed_dtls_transport_by_mid_; - + std::map changed_dtls_transport_by_mid_; + webrtc::PayloadTypePicker payload_type_picker_; // Transport controller needs to be destroyed first, because it may issue // callbacks that modify the changed_*_by_mid in the destructor. std::unique_ptr transport_controller_; - webrtc::test::ScopedKeyValueConfig field_trials_; }; TEST_F(JsepTransportControllerTest, GetRtpTransport) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithoutBundle(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); auto audio_rtp_transport = transport_controller_->GetRtpTransport(kAudioMid1); auto video_rtp_transport = transport_controller_->GetRtpTransport(kVideoMid1); EXPECT_NE(nullptr, audio_rtp_transport); @@ -402,9 +433,10 @@ TEST_F(JsepTransportControllerTest, GetDtlsTransport) { config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyNegotiate; CreateJsepTransportController(std::move(config)); auto description = CreateSessionDescriptionWithoutBundle(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); EXPECT_NE(nullptr, transport_controller_->GetDtlsTransport(kAudioMid1)); EXPECT_NE(nullptr, transport_controller_->GetRtcpDtlsTransport(kAudioMid1)); EXPECT_NE(nullptr, @@ -425,7 +457,7 @@ TEST_F(JsepTransportControllerTest, GetDtlsTransport) { // and verify that the resulting container is empty. auto dtls_transport = transport_controller_->LookupDtlsTransportByMid(kVideoMid1); - webrtc::DtlsTransport* my_transport = + DtlsTransport* my_transport = static_cast(dtls_transport.get()); EXPECT_NE(nullptr, my_transport->internal()); transport_controller_.reset(); @@ -437,9 +469,10 @@ TEST_F(JsepTransportControllerTest, GetDtlsTransportWithRtcpMux) { config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; CreateJsepTransportController(std::move(config)); auto description = CreateSessionDescriptionWithoutBundle(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); EXPECT_NE(nullptr, transport_controller_->GetDtlsTransport(kAudioMid1)); EXPECT_EQ(nullptr, transport_controller_->GetRtcpDtlsTransport(kAudioMid1)); EXPECT_NE(nullptr, transport_controller_->GetDtlsTransport(kVideoMid1)); @@ -449,12 +482,13 @@ TEST_F(JsepTransportControllerTest, GetDtlsTransportWithRtcpMux) { TEST_F(JsepTransportControllerTest, SetIceConfig) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithoutBundle(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); transport_controller_->SetIceConfig( - CreateIceConfig(kTimeout, cricket::GATHER_CONTINUALLY)); + CreateIceConfig(kTimeout, GATHER_CONTINUALLY)); FakeDtlsTransport* fake_audio_dtls = static_cast( transport_controller_->GetDtlsTransport(kAudioMid1)); ASSERT_NE(nullptr, fake_audio_dtls); @@ -464,12 +498,12 @@ TEST_F(JsepTransportControllerTest, SetIceConfig) { // Test that value stored in controller is applied to new transports. AddAudioSection(description.get(), kAudioMid2, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); fake_audio_dtls = static_cast( transport_controller_->GetDtlsTransport(kAudioMid2)); ASSERT_NE(nullptr, fake_audio_dtls); @@ -482,11 +516,14 @@ TEST_F(JsepTransportControllerTest, SetIceConfig) { TEST_F(JsepTransportControllerTest, NeedIceRestart) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithoutBundle(); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); + // TODO(tommi): Note that _now_ we set `remote`. (was not set before). EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, description.get()) + ->SetRemoteDescription(SdpType::kAnswer, description.get(), + description.get()) .ok()); // Initially NeedsIceRestart should return false. @@ -505,7 +542,8 @@ TEST_F(JsepTransportControllerTest, NeedIceRestart) { audio_transport_info->description.ice_ufrag = kIceUfrag2; audio_transport_info->description.ice_pwd = kIcePwd2; EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) + ->SetLocalDescription(SdpType::kOffer, description.get(), + description.get()) .ok()); // Because the ICE is only restarted for audio, NeedsIceRestart is expected to // return false for audio and true for video. @@ -516,29 +554,33 @@ TEST_F(JsepTransportControllerTest, NeedIceRestart) { TEST_F(JsepTransportControllerTest, MaybeStartGathering) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithBundleGroup(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); // After setting the local description, we should be able to start gathering // candidates. transport_controller_->MaybeStartGathering(); - EXPECT_EQ_WAIT(cricket::kIceGatheringGathering, gathering_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceGatheringGathering; }, + ::testing::Eq(gathering_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, gathering_state_signal_count_); } TEST_F(JsepTransportControllerTest, AddRemoveRemoteCandidates) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithoutBundle(); - transport_controller_->SetLocalDescription(SdpType::kOffer, - description.get()); - transport_controller_->SetRemoteDescription(SdpType::kAnswer, - description.get()); + transport_controller_->SetLocalDescription(SdpType::kOffer, description.get(), + nullptr); + transport_controller_->SetRemoteDescription( + SdpType::kAnswer, description.get(), description.get()); auto fake_audio_dtls = static_cast( transport_controller_->GetDtlsTransport(kAudioMid1)); ASSERT_NE(nullptr, fake_audio_dtls); Candidates candidates; candidates.push_back( - CreateCandidate(kAudioMid1, cricket::ICE_CANDIDATE_COMPONENT_RTP)); + CreateCandidate(kAudioMid1, ICE_CANDIDATE_COMPONENT_RTP)); EXPECT_TRUE( transport_controller_->AddRemoteCandidates(kAudioMid1, candidates).ok()); EXPECT_EQ(1U, @@ -552,22 +594,21 @@ TEST_F(JsepTransportControllerTest, AddRemoveRemoteCandidates) { TEST_F(JsepTransportControllerTest, SetAndGetLocalCertificate) { CreateJsepTransportController(JsepTransportController::Config()); - rtc::scoped_refptr certificate1 = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); - rtc::scoped_refptr returned_certificate; + scoped_refptr certificate1 = + RTCCertificate::Create(SSLIdentity::Create("session1", KT_DEFAULT)); + scoped_refptr returned_certificate; - auto description = std::make_unique(); + auto description = std::make_unique(); AddAudioSection(description.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - certificate1); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, certificate1); // Apply the local certificate. EXPECT_TRUE(transport_controller_->SetLocalCertificate(certificate1)); // Apply the local description. - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); returned_certificate = transport_controller_->GetLocalCertificate(kAudioMid1); EXPECT_TRUE(returned_certificate); EXPECT_EQ(certificate1->identity()->certificate().ToPEMString(), @@ -577,24 +618,24 @@ TEST_F(JsepTransportControllerTest, SetAndGetLocalCertificate) { EXPECT_EQ(nullptr, transport_controller_->GetLocalCertificate(kVideoMid1)); // Shouldn't be able to change the identity once set. - rtc::scoped_refptr certificate2 = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("session2", rtc::KT_DEFAULT)); + scoped_refptr certificate2 = + RTCCertificate::Create(SSLIdentity::Create("session2", KT_DEFAULT)); EXPECT_FALSE(transport_controller_->SetLocalCertificate(certificate2)); } TEST_F(JsepTransportControllerTest, GetRemoteSSLCertChain) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithBundleGroup(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); - rtc::FakeSSLCertificate fake_certificate("fake_data"); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); + FakeSSLCertificate fake_certificate("fake_data"); auto fake_audio_dtls = static_cast( transport_controller_->GetDtlsTransport(kAudioMid1)); fake_audio_dtls->SetRemoteSSLCertificate(&fake_certificate); - std::unique_ptr returned_cert_chain = + std::unique_ptr returned_cert_chain = transport_controller_->GetRemoteSSLCertChain(kAudioMid1); ASSERT_TRUE(returned_cert_chain); ASSERT_EQ(1u, returned_cert_chain->GetSize()); @@ -607,46 +648,46 @@ TEST_F(JsepTransportControllerTest, GetRemoteSSLCertChain) { TEST_F(JsepTransportControllerTest, GetDtlsRole) { CreateJsepTransportController(JsepTransportController::Config()); - auto offer_certificate = rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("offer", rtc::KT_DEFAULT)); - auto answer_certificate = rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("answer", rtc::KT_DEFAULT)); + auto offer_certificate = + RTCCertificate::Create(SSLIdentity::Create("offer", KT_DEFAULT)); + auto answer_certificate = + RTCCertificate::Create(SSLIdentity::Create("answer", KT_DEFAULT)); transport_controller_->SetLocalCertificate(offer_certificate); - auto offer_desc = std::make_unique(); + auto offer_desc = std::make_unique(); AddAudioSection(offer_desc.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - offer_certificate); - auto answer_desc = std::make_unique(); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, offer_certificate); + auto answer_desc = std::make_unique(); AddAudioSection(answer_desc.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - answer_certificate); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, answer_certificate); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, offer_desc.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, offer_desc.get(), nullptr) + .ok()); - absl::optional role = - transport_controller_->GetDtlsRole(kAudioMid1); + std::optional role = transport_controller_->GetDtlsRole(kAudioMid1); // The DTLS role is not decided yet. EXPECT_FALSE(role); EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, answer_desc.get()) + ->SetRemoteDescription(SdpType::kAnswer, offer_desc.get(), + answer_desc.get()) .ok()); role = transport_controller_->GetDtlsRole(kAudioMid1); ASSERT_TRUE(role); - EXPECT_EQ(rtc::SSL_CLIENT, *role); + EXPECT_EQ(SSL_CLIENT, *role); } TEST_F(JsepTransportControllerTest, GetStats) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithBundleGroup(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); - cricket::TransportStats stats; + TransportStats stats; EXPECT_TRUE(transport_controller_->GetStats(kAudioMid1, &stats)); EXPECT_EQ(kAudioMid1, stats.transport_name); EXPECT_EQ(1u, stats.channel_stats.size()); @@ -657,23 +698,34 @@ TEST_F(JsepTransportControllerTest, GetStats) { TEST_F(JsepTransportControllerTest, SignalConnectionStateFailed) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithoutBundle(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); - auto fake_ice = static_cast( + auto fake_ice = static_cast( transport_controller_->GetDtlsTransport(kAudioMid1)->ice_transport()); fake_ice->SetCandidatesGatheringComplete(); fake_ice->SetConnectionCount(1); // The connection stats will be failed if there is no active connection. fake_ice->SetConnectionCount(0); - EXPECT_EQ_WAIT(cricket::kIceConnectionFailed, connection_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceConnectionFailed; }, + ::testing::Eq(connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, connection_state_signal_count_); - EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionFailed, - ice_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil([&] { return PeerConnectionInterface::kIceConnectionFailed; }, + ::testing::Eq(ice_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, ice_connection_state_signal_count_); - EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kFailed, - combined_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil( + [&] { return PeerConnectionInterface::PeerConnectionState::kFailed; }, + ::testing::Eq(combined_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, combined_connection_state_signal_count_); } @@ -681,9 +733,10 @@ TEST_F(JsepTransportControllerTest, SignalConnectionStateConnectedNoMediaTransport) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithoutBundle(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); auto fake_audio_dtls = static_cast( transport_controller_->GetDtlsTransport(kAudioMid1)); @@ -701,37 +754,61 @@ TEST_F(JsepTransportControllerTest, fake_video_dtls->fake_ice_transport()->SetConnectionCount(0); fake_video_dtls->fake_ice_transport()->SetCandidatesGatheringComplete(); - EXPECT_EQ_WAIT(cricket::kIceConnectionFailed, connection_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceConnectionFailed; }, + ::testing::Eq(connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, connection_state_signal_count_); - EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionFailed, - ice_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil([&] { return PeerConnectionInterface::kIceConnectionFailed; }, + ::testing::Eq(ice_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(2, ice_connection_state_signal_count_); - EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kFailed, - combined_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil( + [&] { return PeerConnectionInterface::PeerConnectionState::kFailed; }, + ::testing::Eq(combined_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(2, combined_connection_state_signal_count_); fake_audio_dtls->SetDtlsState(DtlsTransportState::kConnected); fake_video_dtls->SetDtlsState(DtlsTransportState::kConnected); - // Set the connection count to be 2 and the cricket::FakeIceTransport will set + // Set the connection count to be 2 and the webrtc::FakeIceTransport will set // the transport state to be STATE_CONNECTING. fake_video_dtls->fake_ice_transport()->SetConnectionCount(2); fake_video_dtls->SetWritable(true); - EXPECT_EQ_WAIT(cricket::kIceConnectionConnected, connection_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceConnectionConnected; }, + ::testing::Eq(connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(2, connection_state_signal_count_); - EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionConnected, - ice_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil( + [&] { return PeerConnectionInterface::kIceConnectionConnected; }, + ::testing::Eq(ice_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(3, ice_connection_state_signal_count_); - EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kConnected, - combined_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil( + [&] { + return PeerConnectionInterface::PeerConnectionState::kConnected; + }, + ::testing::Eq(combined_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(3, combined_connection_state_signal_count_); } TEST_F(JsepTransportControllerTest, SignalConnectionStateComplete) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithoutBundle(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); auto fake_audio_dtls = static_cast( transport_controller_->GetDtlsTransport(kAudioMid1)); @@ -743,69 +820,106 @@ TEST_F(JsepTransportControllerTest, SignalConnectionStateComplete) { // We should only get a signal when all are connected. fake_audio_dtls->fake_ice_transport()->SetTransportState( IceTransportState::kCompleted, - cricket::IceTransportState::STATE_COMPLETED); + IceTransportStateInternal::STATE_COMPLETED); fake_audio_dtls->SetWritable(true); fake_audio_dtls->fake_ice_transport()->SetCandidatesGatheringComplete(); - EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionChecking, - ice_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil([&] { return PeerConnectionInterface::kIceConnectionChecking; }, + ::testing::Eq(ice_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, ice_connection_state_signal_count_); - EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kConnecting, - combined_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil( + [&] { + return PeerConnectionInterface::PeerConnectionState::kConnecting; + }, + ::testing::Eq(combined_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, combined_connection_state_signal_count_); fake_video_dtls->fake_ice_transport()->SetTransportState( - IceTransportState::kFailed, cricket::IceTransportState::STATE_FAILED); + IceTransportState::kFailed, IceTransportStateInternal::STATE_FAILED); fake_video_dtls->fake_ice_transport()->SetCandidatesGatheringComplete(); - EXPECT_EQ_WAIT(cricket::kIceConnectionFailed, connection_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceConnectionFailed; }, + ::testing::Eq(connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, connection_state_signal_count_); - EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionFailed, - ice_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil([&] { return PeerConnectionInterface::kIceConnectionFailed; }, + ::testing::Eq(ice_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(2, ice_connection_state_signal_count_); - EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kFailed, - combined_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil( + [&] { return PeerConnectionInterface::PeerConnectionState::kFailed; }, + ::testing::Eq(combined_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(2, combined_connection_state_signal_count_); fake_audio_dtls->SetDtlsState(DtlsTransportState::kConnected); fake_video_dtls->SetDtlsState(DtlsTransportState::kConnected); - // Set the connection count to be 1 and the cricket::FakeIceTransport will set + // Set the connection count to be 1 and the webrtc::FakeIceTransport will set // the transport state to be STATE_COMPLETED. fake_video_dtls->fake_ice_transport()->SetTransportState( IceTransportState::kCompleted, - cricket::IceTransportState::STATE_COMPLETED); + IceTransportStateInternal::STATE_COMPLETED); fake_video_dtls->SetWritable(true); - EXPECT_EQ_WAIT(cricket::kIceConnectionCompleted, connection_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceConnectionCompleted; }, + ::testing::Eq(connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(3, connection_state_signal_count_); - EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionCompleted, - ice_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil( + [&] { return PeerConnectionInterface::kIceConnectionCompleted; }, + ::testing::Eq(ice_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(3, ice_connection_state_signal_count_); - EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kConnected, - combined_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil( + [&] { + return PeerConnectionInterface::PeerConnectionState::kConnected; + }, + ::testing::Eq(combined_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(3, combined_connection_state_signal_count_); } TEST_F(JsepTransportControllerTest, SignalIceGatheringStateGathering) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithoutBundle(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); auto fake_audio_dtls = static_cast( transport_controller_->GetDtlsTransport(kAudioMid1)); fake_audio_dtls->fake_ice_transport()->MaybeStartGathering(); // Should be in the gathering state as soon as any transport starts gathering. - EXPECT_EQ_WAIT(cricket::kIceGatheringGathering, gathering_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceGatheringGathering; }, + ::testing::Eq(gathering_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, gathering_state_signal_count_); } TEST_F(JsepTransportControllerTest, SignalIceGatheringStateComplete) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithoutBundle(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); auto fake_audio_dtls = static_cast( transport_controller_->GetDtlsTransport(kAudioMid1)); @@ -813,7 +927,10 @@ TEST_F(JsepTransportControllerTest, SignalIceGatheringStateComplete) { transport_controller_->GetDtlsTransport(kVideoMid1)); fake_audio_dtls->fake_ice_transport()->MaybeStartGathering(); - EXPECT_EQ_WAIT(cricket::kIceGatheringGathering, gathering_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceGatheringGathering; }, + ::testing::Eq(gathering_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, gathering_state_signal_count_); // Have one transport finish gathering, to make sure gathering @@ -822,11 +939,17 @@ TEST_F(JsepTransportControllerTest, SignalIceGatheringStateComplete) { EXPECT_EQ(1, gathering_state_signal_count_); fake_video_dtls->fake_ice_transport()->MaybeStartGathering(); - EXPECT_EQ_WAIT(cricket::kIceGatheringGathering, gathering_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceGatheringGathering; }, + ::testing::Eq(gathering_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, gathering_state_signal_count_); fake_video_dtls->fake_ice_transport()->SetCandidatesGatheringComplete(); - EXPECT_EQ_WAIT(cricket::kIceGatheringComplete, gathering_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceGatheringComplete; }, + ::testing::Eq(gathering_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(2, gathering_state_signal_count_); } @@ -838,9 +961,10 @@ TEST_F(JsepTransportControllerTest, SignalingWhenLastIncompleteTransportDestroyed) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithBundleGroup(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); auto fake_audio_dtls = static_cast( transport_controller_->GetDtlsTransport(kAudioMid1)); @@ -849,7 +973,10 @@ TEST_F(JsepTransportControllerTest, EXPECT_NE(fake_audio_dtls, fake_video_dtls); fake_audio_dtls->fake_ice_transport()->MaybeStartGathering(); - EXPECT_EQ_WAIT(cricket::kIceGatheringGathering, gathering_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return webrtc::kIceGatheringGathering; }, + ::testing::Eq(gathering_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, gathering_state_signal_count_); // Let the audio transport complete. @@ -861,19 +988,26 @@ TEST_F(JsepTransportControllerTest, // Set the remote description and enable the bundle. EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, description.get()) + ->SetRemoteDescription(SdpType::kAnswer, description.get(), + description.get()) .ok()); // The BUNDLE should be enabled, the incomplete video transport should be // deleted and the states should be updated. fake_video_dtls = static_cast( transport_controller_->GetDtlsTransport(kVideoMid1)); EXPECT_EQ(fake_audio_dtls, fake_video_dtls); - EXPECT_EQ_WAIT(cricket::kIceConnectionCompleted, connection_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceConnectionCompleted; }, + ::testing::Eq(connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(PeerConnectionInterface::kIceConnectionCompleted, ice_connection_state_); EXPECT_EQ(PeerConnectionInterface::PeerConnectionState::kConnected, combined_connection_state_); - EXPECT_EQ_WAIT(cricket::kIceGatheringComplete, gathering_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceGatheringComplete; }, + ::testing::Eq(gathering_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(2, gathering_state_signal_count_); } @@ -883,15 +1017,16 @@ TEST_F(JsepTransportControllerTest, TEST_F(JsepTransportControllerTest, IceStatesReturnToNewWhenTransportsDiscarded) { CreateJsepTransportController(JsepTransportController::Config()); - auto description = std::make_unique(); + auto description = std::make_unique(); AddAudioSection(description.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, description.get()) + ->SetRemoteDescription(SdpType::kAnswer, description.get(), + description.get()) .ok()); // Trigger and verify initial non-new states. @@ -899,63 +1034,102 @@ TEST_F(JsepTransportControllerTest, transport_controller_->GetDtlsTransport(kAudioMid1)); fake_audio_dtls->fake_ice_transport()->MaybeStartGathering(); fake_audio_dtls->fake_ice_transport()->SetTransportState( - webrtc::IceTransportState::kChecking, - cricket::IceTransportState::STATE_CONNECTING); - EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionChecking, - ice_connection_state_, kTimeout); + IceTransportState::kChecking, + IceTransportStateInternal::STATE_CONNECTING); + EXPECT_THAT( + WaitUntil([&] { return PeerConnectionInterface::kIceConnectionChecking; }, + ::testing::Eq(ice_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, ice_connection_state_signal_count_); - EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kConnecting, - combined_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil( + [&] { + return PeerConnectionInterface::PeerConnectionState::kConnecting; + }, + ::testing::Eq(combined_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, combined_connection_state_signal_count_); - EXPECT_EQ_WAIT(cricket::kIceGatheringGathering, gathering_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceGatheringGathering; }, + ::testing::Eq(gathering_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1, gathering_state_signal_count_); // Reject m= section which should disconnect the transport and return states // to "new". description->contents()[0].rejected = true; EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kOffer, description.get()) + ->SetRemoteDescription(SdpType::kOffer, description.get(), + description.get()) .ok()); - EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionNew, - ice_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil([&] { return PeerConnectionInterface::kIceConnectionNew; }, + ::testing::Eq(ice_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(2, ice_connection_state_signal_count_); - EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kNew, - combined_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil( + [&] { return PeerConnectionInterface::PeerConnectionState::kNew; }, + ::testing::Eq(combined_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(2, combined_connection_state_signal_count_); - EXPECT_EQ_WAIT(cricket::kIceGatheringNew, gathering_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceGatheringNew; }, + ::testing::Eq(gathering_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(2, gathering_state_signal_count_); // For good measure, rollback the offer and verify that states return to // their previous values. EXPECT_TRUE(transport_controller_->RollbackTransports().ok()); - EXPECT_EQ_WAIT(PeerConnectionInterface::kIceConnectionChecking, - ice_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil([&] { return PeerConnectionInterface::kIceConnectionChecking; }, + ::testing::Eq(ice_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(3, ice_connection_state_signal_count_); - EXPECT_EQ_WAIT(PeerConnectionInterface::PeerConnectionState::kConnecting, - combined_connection_state_, kTimeout); + EXPECT_THAT( + WaitUntil( + [&] { + return PeerConnectionInterface::PeerConnectionState::kConnecting; + }, + ::testing::Eq(combined_connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(3, combined_connection_state_signal_count_); - EXPECT_EQ_WAIT(cricket::kIceGatheringGathering, gathering_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceGatheringGathering; }, + ::testing::Eq(gathering_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(3, gathering_state_signal_count_); } TEST_F(JsepTransportControllerTest, SignalCandidatesGathered) { CreateJsepTransportController(JsepTransportController::Config()); auto description = CreateSessionDescriptionWithBundleGroup(); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, description.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, description.get(), nullptr) + .ok()); transport_controller_->MaybeStartGathering(); auto fake_audio_dtls = static_cast( transport_controller_->GetDtlsTransport(kAudioMid1)); fake_audio_dtls->fake_ice_transport()->SignalCandidateGathered( fake_audio_dtls->fake_ice_transport(), CreateCandidate(kAudioMid1, 1)); - EXPECT_EQ_WAIT(1, candidates_signal_count_, kTimeout); + EXPECT_THAT( + WaitUntil([&] { return 1; }, ::testing::Eq(candidates_signal_count_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(1u, candidates_[kAudioMid1].size()); } TEST_F(JsepTransportControllerTest, IceSignalingOccursOnNetworkThread) { - network_thread_ = rtc::Thread::CreateWithSocketServer(); + network_thread_ = Thread::CreateWithSocketServer(); network_thread_->Start(); EXPECT_EQ(ice_signaled_on_thread_, nullptr); CreateJsepTransportController(JsepTransportController::Config(), @@ -964,15 +1138,27 @@ TEST_F(JsepTransportControllerTest, IceSignalingOccursOnNetworkThread) { CreateLocalDescriptionAndCompleteConnectionOnNetworkThread(); // connecting --> connected --> completed - EXPECT_EQ_WAIT(cricket::kIceConnectionCompleted, connection_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceConnectionCompleted; }, + ::testing::Eq(connection_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(2, connection_state_signal_count_); // new --> gathering --> complete - EXPECT_EQ_WAIT(cricket::kIceGatheringComplete, gathering_state_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return kIceGatheringComplete; }, + ::testing::Eq(gathering_state_), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(2, gathering_state_signal_count_); - EXPECT_EQ_WAIT(1u, candidates_[kAudioMid1].size(), kTimeout); - EXPECT_EQ_WAIT(1u, candidates_[kVideoMid1].size(), kTimeout); + EXPECT_THAT(WaitUntil([&] { return candidates_[kAudioMid1].size(); }, + ::testing::Eq(1u), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return candidates_[kVideoMid1].size(); }, + ::testing::Eq(1u), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_EQ(2, candidates_signal_count_); EXPECT_EQ(ice_signaled_on_thread_, network_thread_.get()); @@ -989,68 +1175,62 @@ TEST_F(JsepTransportControllerTest, IceRoleNotRedetermined) { CreateJsepTransportController(std::move(config)); // Let the `transport_controller_` be the controlled side initially. - auto remote_offer = std::make_unique(); + auto remote_offer = std::make_unique(); AddAudioSection(remote_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - auto local_answer = std::make_unique(); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); + auto local_answer = std::make_unique(); AddAudioSection(local_answer.get(), kAudioMid1, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); + EXPECT_TRUE( + transport_controller_ + ->SetRemoteDescription(SdpType::kOffer, nullptr, remote_offer.get()) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kOffer, remote_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kAnswer, local_answer.get()) + ->SetLocalDescription(SdpType::kAnswer, local_answer.get(), + remote_offer.get()) .ok()); auto fake_dtls = static_cast( transport_controller_->GetDtlsTransport(kAudioMid1)); - EXPECT_EQ(cricket::ICEROLE_CONTROLLED, - fake_dtls->fake_ice_transport()->GetIceRole()); + EXPECT_EQ(ICEROLE_CONTROLLED, fake_dtls->fake_ice_transport()->GetIceRole()); // New offer will trigger the ICE restart. - auto restart_local_offer = std::make_unique(); + auto restart_local_offer = std::make_unique(); AddAudioSection(restart_local_offer.get(), kAudioMid1, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - EXPECT_TRUE( - transport_controller_ - ->SetLocalDescription(SdpType::kOffer, restart_local_offer.get()) - .ok()); - EXPECT_EQ(cricket::ICEROLE_CONTROLLED, - fake_dtls->fake_ice_transport()->GetIceRole()); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); + EXPECT_TRUE(transport_controller_ + ->SetLocalDescription(SdpType::kOffer, + restart_local_offer.get(), + remote_offer.get()) + .ok()); + EXPECT_EQ(ICEROLE_CONTROLLED, fake_dtls->fake_ice_transport()->GetIceRole()); } // Tests ICE-Lite mode in remote answer. TEST_F(JsepTransportControllerTest, SetIceRoleWhenIceLiteInRemoteAnswer) { CreateJsepTransportController(JsepTransportController::Config()); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); auto fake_dtls = static_cast( transport_controller_->GetDtlsTransport(kAudioMid1)); - EXPECT_EQ(cricket::ICEROLE_CONTROLLING, - fake_dtls->fake_ice_transport()->GetIceRole()); - EXPECT_EQ(cricket::ICEMODE_FULL, - fake_dtls->fake_ice_transport()->remote_ice_mode()); + EXPECT_EQ(ICEROLE_CONTROLLING, fake_dtls->fake_ice_transport()->GetIceRole()); + EXPECT_EQ(ICEMODE_FULL, fake_dtls->fake_ice_transport()->remote_ice_mode()); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_LITE, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_LITE, CONNECTIONROLE_PASSIVE, nullptr); EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); - EXPECT_EQ(cricket::ICEROLE_CONTROLLING, - fake_dtls->fake_ice_transport()->GetIceRole()); - EXPECT_EQ(cricket::ICEMODE_LITE, - fake_dtls->fake_ice_transport()->remote_ice_mode()); + EXPECT_EQ(ICEROLE_CONTROLLING, fake_dtls->fake_ice_transport()->GetIceRole()); + EXPECT_EQ(ICEMODE_LITE, fake_dtls->fake_ice_transport()->remote_ice_mode()); } // Tests that the ICE role remains "controlling" if a subsequent offer that @@ -1059,97 +1239,89 @@ TEST_F(JsepTransportControllerTest, SetIceRoleWhenIceLiteInRemoteAnswer) { TEST_F(JsepTransportControllerTest, IceRoleIsControllingAfterIceRestartFromIceLiteEndpoint) { CreateJsepTransportController(JsepTransportController::Config()); - auto remote_offer = std::make_unique(); + auto remote_offer = std::make_unique(); AddAudioSection(remote_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_LITE, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - auto local_answer = std::make_unique(); + ICEMODE_LITE, CONNECTIONROLE_ACTPASS, nullptr); + auto local_answer = std::make_unique(); AddAudioSection(local_answer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); // Initial Offer/Answer exchange. If the remote offerer is ICE-Lite, then the // local side is the controlling. + EXPECT_TRUE( + transport_controller_ + ->SetRemoteDescription(SdpType::kOffer, nullptr, remote_offer.get()) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kOffer, remote_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kAnswer, local_answer.get()) + ->SetLocalDescription(SdpType::kAnswer, local_answer.get(), + remote_offer.get()) .ok()); auto fake_dtls = static_cast( transport_controller_->GetDtlsTransport(kAudioMid1)); - EXPECT_EQ(cricket::ICEROLE_CONTROLLING, - fake_dtls->fake_ice_transport()->GetIceRole()); + EXPECT_EQ(ICEROLE_CONTROLLING, fake_dtls->fake_ice_transport()->GetIceRole()); // In the subsequence remote offer triggers an ICE restart. - auto remote_offer2 = std::make_unique(); + auto remote_offer2 = std::make_unique(); AddAudioSection(remote_offer2.get(), kAudioMid1, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_LITE, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - auto local_answer2 = std::make_unique(); - AddAudioSection(local_answer2.get(), kAudioMid1, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_LITE, CONNECTIONROLE_ACTPASS, nullptr); EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kOffer, remote_offer2.get()) + ->SetRemoteDescription(SdpType::kOffer, local_answer.get(), + remote_offer2.get()) .ok()); + auto local_answer2 = std::make_unique(); + AddAudioSection(local_answer2.get(), kAudioMid1, kIceUfrag2, kIcePwd2, + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kAnswer, local_answer2.get()) + ->SetLocalDescription(SdpType::kAnswer, local_answer2.get(), + remote_offer2.get()) .ok()); fake_dtls = static_cast( transport_controller_->GetDtlsTransport(kAudioMid1)); // The local side is still the controlling role since the remote side is using // ICE-Lite. - EXPECT_EQ(cricket::ICEROLE_CONTROLLING, - fake_dtls->fake_ice_transport()->GetIceRole()); + EXPECT_EQ(ICEROLE_CONTROLLING, fake_dtls->fake_ice_transport()->GetIceRole()); } // Tests that the SDP has more than one audio/video m= sections. TEST_F(JsepTransportControllerTest, MultipleMediaSectionsOfSameTypeWithBundle) { CreateJsepTransportController(JsepTransportController::Config()); - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); bundle_group.AddContentName(kAudioMid1); bundle_group.AddContentName(kAudioMid2); bundle_group.AddContentName(kVideoMid1); bundle_group.AddContentName(kDataMid1); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kAudioMid2, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kVideoMid1, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - AddDataSection(local_offer.get(), kDataMid1, - cricket::MediaProtocolType::kSctp, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); + AddDataSection(local_offer.get(), kDataMid1, MediaProtocolType::kSctp, + kIceUfrag1, kIcePwd1, ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); AddAudioSection(remote_answer.get(), kAudioMid2, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); AddVideoSection(remote_answer.get(), kVideoMid1, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); - AddDataSection(remote_answer.get(), kDataMid1, - cricket::MediaProtocolType::kSctp, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); + AddDataSection(remote_answer.get(), kDataMid1, MediaProtocolType::kSctp, + kIceUfrag1, kIcePwd1, ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); local_offer->AddGroup(bundle_group); remote_answer->AddGroup(bundle_group); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); // Verify that all the sections are bundled on kAudio1. auto transport1 = transport_controller_->GetRtpTransport(kAudioMid1); @@ -1185,50 +1357,44 @@ TEST_F(JsepTransportControllerTest, MultipleBundleGroups) { static const char kMid4Video[] = "4_video"; CreateJsepTransportController(JsepTransportController::Config()); - cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group1(GROUP_TYPE_BUNDLE); bundle_group1.AddContentName(kMid1Audio); bundle_group1.AddContentName(kMid2Video); - cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group2(GROUP_TYPE_BUNDLE); bundle_group2.AddContentName(kMid3Audio); bundle_group2.AddContentName(kMid4Video); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid2Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag4, kIcePwd4, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); local_offer->AddGroup(bundle_group1); local_offer->AddGroup(bundle_group2); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid2Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag4, kIcePwd4, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); remote_answer->AddGroup(bundle_group1); remote_answer->AddGroup(bundle_group2); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); // Verify that (kMid1Audio,kMid2Video) and (kMid3Audio,kMid4Video) form two @@ -1266,52 +1432,46 @@ TEST_F(JsepTransportControllerTest, static const char kMid4Video[] = "4_video"; CreateJsepTransportController(JsepTransportController::Config()); - cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group1(GROUP_TYPE_BUNDLE); bundle_group1.AddContentName(kMid1Audio); bundle_group1.AddContentName(kMid2Video); - cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group2(GROUP_TYPE_BUNDLE); bundle_group2.AddContentName(kMid3Audio); bundle_group2.AddContentName(kMid4Video); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid2Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag4, kIcePwd4, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); // The offer has both groups. local_offer->AddGroup(bundle_group1); local_offer->AddGroup(bundle_group2); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid2Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag4, kIcePwd4, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); // The answer only has a single group! This is what happens when talking to an // endpoint that does not have support for multiple BUNDLE groups. remote_answer->AddGroup(bundle_group1); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); // Verify that (kMid1Audio,kMid2Video) form a bundle group, but that @@ -1334,60 +1494,54 @@ TEST_F(JsepTransportControllerTest, MultipleBundleGroupsIllegallyChangeGroup) { CreateJsepTransportController(JsepTransportController::Config()); // Offer groups (kMid1Audio,kMid2Video) and (kMid3Audio,kMid4Video). - cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup offer_bundle_group1(GROUP_TYPE_BUNDLE); offer_bundle_group1.AddContentName(kMid1Audio); offer_bundle_group1.AddContentName(kMid2Video); - cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup offer_bundle_group2(GROUP_TYPE_BUNDLE); offer_bundle_group2.AddContentName(kMid3Audio); offer_bundle_group2.AddContentName(kMid4Video); // Answer groups (kMid1Audio,kMid4Video) and (kMid3Audio,kMid2Video), i.e. the // second group members have switched places. This should get rejected. - cricket::ContentGroup answer_bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup answer_bundle_group1(GROUP_TYPE_BUNDLE); answer_bundle_group1.AddContentName(kMid1Audio); answer_bundle_group1.AddContentName(kMid4Video); - cricket::ContentGroup answer_bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup answer_bundle_group2(GROUP_TYPE_BUNDLE); answer_bundle_group2.AddContentName(kMid3Audio); answer_bundle_group2.AddContentName(kMid2Video); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid2Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag4, kIcePwd4, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); local_offer->AddGroup(offer_bundle_group1); local_offer->AddGroup(offer_bundle_group2); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid2Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag4, kIcePwd4, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); remote_answer->AddGroup(answer_bundle_group1); remote_answer->AddGroup(answer_bundle_group2); // Accept offer. - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); // Reject answer! EXPECT_FALSE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); } @@ -1399,58 +1553,52 @@ TEST_F(JsepTransportControllerTest, MultipleBundleGroupsInvalidSubsets) { CreateJsepTransportController(JsepTransportController::Config()); // Offer groups (kMid1Audio,kMid2Video) and (kMid3Audio,kMid4Video). - cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup offer_bundle_group1(GROUP_TYPE_BUNDLE); offer_bundle_group1.AddContentName(kMid1Audio); offer_bundle_group1.AddContentName(kMid2Video); - cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup offer_bundle_group2(GROUP_TYPE_BUNDLE); offer_bundle_group2.AddContentName(kMid3Audio); offer_bundle_group2.AddContentName(kMid4Video); // Answer groups (kMid1Audio) and (kMid2Video), i.e. the second group was // moved from the first group. This should get rejected. - cricket::ContentGroup answer_bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup answer_bundle_group1(GROUP_TYPE_BUNDLE); answer_bundle_group1.AddContentName(kMid1Audio); - cricket::ContentGroup answer_bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup answer_bundle_group2(GROUP_TYPE_BUNDLE); answer_bundle_group2.AddContentName(kMid2Video); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid2Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag4, kIcePwd4, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); local_offer->AddGroup(offer_bundle_group1); local_offer->AddGroup(offer_bundle_group2); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid2Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag4, kIcePwd4, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); remote_answer->AddGroup(answer_bundle_group1); remote_answer->AddGroup(answer_bundle_group2); // Accept offer. - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); // Reject answer! EXPECT_FALSE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); } @@ -1462,32 +1610,30 @@ TEST_F(JsepTransportControllerTest, MultipleBundleGroupsInvalidOverlap) { CreateJsepTransportController(JsepTransportController::Config()); // Offer groups (kMid1Audio,kMid3Audio) and (kMid2Video,kMid3Audio), i.e. // kMid3Audio is in both groups - this is illegal. - cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup offer_bundle_group1(GROUP_TYPE_BUNDLE); offer_bundle_group1.AddContentName(kMid1Audio); offer_bundle_group1.AddContentName(kMid3Audio); - cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup offer_bundle_group2(GROUP_TYPE_BUNDLE); offer_bundle_group2.AddContentName(kMid2Video); offer_bundle_group2.AddContentName(kMid3Audio); - auto offer = std::make_unique(); - AddAudioSection(offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - AddVideoSection(offer.get(), kMid2Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - AddAudioSection(offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + auto offer = std::make_unique(); + AddAudioSection(offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, ICEMODE_FULL, + CONNECTIONROLE_ACTPASS, nullptr); + AddVideoSection(offer.get(), kMid2Video, kIceUfrag2, kIcePwd2, ICEMODE_FULL, + CONNECTIONROLE_ACTPASS, nullptr); + AddAudioSection(offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3, ICEMODE_FULL, + CONNECTIONROLE_ACTPASS, nullptr); offer->AddGroup(offer_bundle_group1); offer->AddGroup(offer_bundle_group2); // Reject offer, both if set as local or remote. + EXPECT_FALSE(transport_controller_ + ->SetLocalDescription(SdpType::kOffer, offer.get(), nullptr) + .ok()); EXPECT_FALSE( - transport_controller_->SetLocalDescription(SdpType::kOffer, offer.get()) - .ok()); - EXPECT_FALSE( - transport_controller_->SetRemoteDescription(SdpType::kOffer, offer.get()) + transport_controller_ + ->SetRemoteDescription(SdpType::kOffer, offer.get(), offer.get()) .ok()); } @@ -1502,72 +1648,62 @@ TEST_F(JsepTransportControllerTest, MultipleBundleGroupsUnbundleFirstMid) { CreateJsepTransportController(JsepTransportController::Config()); // Offer groups (kMid1Audio,kMid2Audio,kMid3Audio) and // (kMid4Video,kMid5Video,kMid6Video). - cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup offer_bundle_group1(GROUP_TYPE_BUNDLE); offer_bundle_group1.AddContentName(kMid1Audio); offer_bundle_group1.AddContentName(kMid2Audio); offer_bundle_group1.AddContentName(kMid3Audio); - cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup offer_bundle_group2(GROUP_TYPE_BUNDLE); offer_bundle_group2.AddContentName(kMid4Video); offer_bundle_group2.AddContentName(kMid5Video); offer_bundle_group2.AddContentName(kMid6Video); // Answer groups (kMid2Audio,kMid3Audio) and (kMid5Video,kMid6Video), i.e. // we've moved the first MIDs out of the groups. - cricket::ContentGroup answer_bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup answer_bundle_group1(GROUP_TYPE_BUNDLE); answer_bundle_group1.AddContentName(kMid2Audio); answer_bundle_group1.AddContentName(kMid3Audio); - cricket::ContentGroup answer_bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup answer_bundle_group2(GROUP_TYPE_BUNDLE); answer_bundle_group2.AddContentName(kMid5Video); answer_bundle_group2.AddContentName(kMid6Video); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid5Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid6Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); local_offer->AddGroup(offer_bundle_group1); local_offer->AddGroup(offer_bundle_group2); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(remote_answer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid5Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid6Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); remote_answer->AddGroup(answer_bundle_group1); remote_answer->AddGroup(answer_bundle_group2); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio); @@ -1595,73 +1731,62 @@ TEST_F(JsepTransportControllerTest, MultipleBundleGroupsChangeFirstMid) { CreateJsepTransportController(JsepTransportController::Config()); // Offer groups (kMid1Audio,kMid2Audio,kMid3Audio) and // (kMid4Video,kMid5Video,kMid6Video). - cricket::ContentGroup offer_bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup offer_bundle_group1(GROUP_TYPE_BUNDLE); offer_bundle_group1.AddContentName(kMid1Audio); offer_bundle_group1.AddContentName(kMid2Audio); offer_bundle_group1.AddContentName(kMid3Audio); - cricket::ContentGroup offer_bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup offer_bundle_group2(GROUP_TYPE_BUNDLE); offer_bundle_group2.AddContentName(kMid4Video); offer_bundle_group2.AddContentName(kMid5Video); offer_bundle_group2.AddContentName(kMid6Video); // Answer groups (kMid2Audio,kMid1Audio,kMid3Audio) and // (kMid5Video,kMid6Video,kMid4Video), i.e. we've changed which MID is first // but accept the whole group. - cricket::ContentGroup answer_bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup answer_bundle_group1(GROUP_TYPE_BUNDLE); answer_bundle_group1.AddContentName(kMid2Audio); answer_bundle_group1.AddContentName(kMid1Audio); answer_bundle_group1.AddContentName(kMid3Audio); - cricket::ContentGroup answer_bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup answer_bundle_group2(GROUP_TYPE_BUNDLE); answer_bundle_group2.AddContentName(kMid5Video); answer_bundle_group2.AddContentName(kMid6Video); answer_bundle_group2.AddContentName(kMid4Video); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid5Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid6Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); local_offer->AddGroup(offer_bundle_group1); local_offer->AddGroup(offer_bundle_group2); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(remote_answer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(remote_answer.get(), kMid3Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid5Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid6Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); remote_answer->AddGroup(answer_bundle_group1); remote_answer->AddGroup(answer_bundle_group2); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); // The fact that we accept this answer is actually a bug. If we accept the // first MID to be in the group, we should also accept that it is the tagged @@ -1669,7 +1794,8 @@ TEST_F(JsepTransportControllerTest, MultipleBundleGroupsChangeFirstMid) { // TODO(https://crbug.com/webrtc/12699): When this issue is fixed, change this // to EXPECT_FALSE and remove the below expectations about transports. EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio); auto mid2_transport = transport_controller_->GetRtpTransport(kMid2Audio); @@ -1695,81 +1821,70 @@ TEST_F(JsepTransportControllerTest, CreateJsepTransportController(JsepTransportController::Config()); // Start by grouping (kMid1Audio,kMid2Audio) and (kMid4Video,kMid4f5Video). - cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group1(GROUP_TYPE_BUNDLE); bundle_group1.AddContentName(kMid1Audio); bundle_group1.AddContentName(kMid2Audio); - cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group2(GROUP_TYPE_BUNDLE); bundle_group2.AddContentName(kMid4Video); bundle_group2.AddContentName(kMid5Video); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid5Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); local_offer->AddGroup(bundle_group1); local_offer->AddGroup(bundle_group2); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(remote_answer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid5Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); remote_answer->AddGroup(bundle_group1); remote_answer->AddGroup(bundle_group2); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); // Add kMid3Audio and kMid6Video to the respective audio/video bundle groups. - cricket::ContentGroup new_bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup new_bundle_group1(GROUP_TYPE_BUNDLE); bundle_group1.AddContentName(kMid3Audio); - cricket::ContentGroup new_bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup new_bundle_group2(GROUP_TYPE_BUNDLE); bundle_group2.AddContentName(kMid6Video); - auto subsequent_offer = std::make_unique(); + auto subsequent_offer = std::make_unique(); AddAudioSection(subsequent_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(subsequent_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(subsequent_offer.get(), kMid3Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(subsequent_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(subsequent_offer.get(), kMid5Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(subsequent_offer.get(), kMid6Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); subsequent_offer->AddGroup(bundle_group1); subsequent_offer->AddGroup(bundle_group2); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, subsequent_offer.get()) + ->SetLocalDescription(SdpType::kOffer, subsequent_offer.get(), + remote_answer.get()) .ok()); auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio); auto mid2_transport = transport_controller_->GetRtpTransport(kMid2Audio); @@ -1793,78 +1908,69 @@ TEST_F(JsepTransportControllerTest, CreateJsepTransportController(JsepTransportController::Config()); // Start by grouping (kMid1Audio,kMid2Audio) and (kMid3Video,kMid4Video). - cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group1(GROUP_TYPE_BUNDLE); bundle_group1.AddContentName(kMid1Audio); bundle_group1.AddContentName(kMid2Audio); - cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group2(GROUP_TYPE_BUNDLE); bundle_group2.AddContentName(kMid3Video); bundle_group2.AddContentName(kMid4Video); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid3Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); local_offer->AddGroup(bundle_group1); local_offer->AddGroup(bundle_group2); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(remote_answer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid3Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); remote_answer->AddGroup(bundle_group1); remote_answer->AddGroup(bundle_group2); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); // Switch to grouping (kMid1Audio,kMid2Audio,kMid3Video,kMid4Video). // This is a illegal without first removing m= sections from their groups. - cricket::ContentGroup new_bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup new_bundle_group(GROUP_TYPE_BUNDLE); new_bundle_group.AddContentName(kMid1Audio); new_bundle_group.AddContentName(kMid2Audio); new_bundle_group.AddContentName(kMid3Video); new_bundle_group.AddContentName(kMid4Video); - auto subsequent_offer = std::make_unique(); + auto subsequent_offer = std::make_unique(); AddAudioSection(subsequent_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(subsequent_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(subsequent_offer.get(), kMid3Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(subsequent_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); subsequent_offer->AddGroup(new_bundle_group); - EXPECT_FALSE( - transport_controller_ - ->SetLocalDescription(SdpType::kOffer, subsequent_offer.get()) - .ok()); + EXPECT_FALSE(transport_controller_ + ->SetLocalDescription(SdpType::kOffer, + subsequent_offer.get(), + remote_answer.get()) + .ok()); } TEST_F(JsepTransportControllerTest, @@ -1876,77 +1982,68 @@ TEST_F(JsepTransportControllerTest, CreateJsepTransportController(JsepTransportController::Config()); // Start by grouping (kMid1Audio,kMid2Audio,kMid3Video,kMid4Video). - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); bundle_group.AddContentName(kMid1Audio); bundle_group.AddContentName(kMid2Audio); bundle_group.AddContentName(kMid3Video); bundle_group.AddContentName(kMid4Video); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid3Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); local_offer->AddGroup(bundle_group); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(remote_answer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid3Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); remote_answer->AddGroup(bundle_group); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); // Switch to grouping (kMid1Audio,kMid2Audio) and (kMid3Video,kMid4Video). // This is a illegal without first removing m= sections from their groups. - cricket::ContentGroup new_bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup new_bundle_group1(GROUP_TYPE_BUNDLE); new_bundle_group1.AddContentName(kMid1Audio); new_bundle_group1.AddContentName(kMid2Audio); - cricket::ContentGroup new_bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup new_bundle_group2(GROUP_TYPE_BUNDLE); new_bundle_group2.AddContentName(kMid3Video); new_bundle_group2.AddContentName(kMid4Video); - auto subsequent_offer = std::make_unique(); + auto subsequent_offer = std::make_unique(); AddAudioSection(subsequent_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(subsequent_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(subsequent_offer.get(), kMid3Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(subsequent_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); subsequent_offer->AddGroup(new_bundle_group1); subsequent_offer->AddGroup(new_bundle_group2); - EXPECT_FALSE( - transport_controller_ - ->SetLocalDescription(SdpType::kOffer, subsequent_offer.get()) - .ok()); + EXPECT_FALSE(transport_controller_ + ->SetLocalDescription(SdpType::kOffer, + subsequent_offer.get(), + remote_answer.get()) + .ok()); } TEST_F(JsepTransportControllerTest, @@ -1958,118 +2055,105 @@ TEST_F(JsepTransportControllerTest, CreateJsepTransportController(JsepTransportController::Config()); // Start by grouping (kMid1Audio,kMid2Audio) and (kMid3Video,kMid4Video). - cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group1(GROUP_TYPE_BUNDLE); bundle_group1.AddContentName(kMid1Audio); bundle_group1.AddContentName(kMid2Audio); - cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group2(GROUP_TYPE_BUNDLE); bundle_group2.AddContentName(kMid3Video); bundle_group2.AddContentName(kMid4Video); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid3Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); local_offer->AddGroup(bundle_group1); local_offer->AddGroup(bundle_group2); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(remote_answer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid3Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(remote_answer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); remote_answer->AddGroup(bundle_group1); remote_answer->AddGroup(bundle_group2); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); // Switch to grouping (kMid1Audio,kMid3Video) and (kMid2Audio,kMid3Video). // This is a illegal without first removing m= sections from their groups. - cricket::ContentGroup new_bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup new_bundle_group1(GROUP_TYPE_BUNDLE); new_bundle_group1.AddContentName(kMid1Audio); new_bundle_group1.AddContentName(kMid3Video); - cricket::ContentGroup new_bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup new_bundle_group2(GROUP_TYPE_BUNDLE); new_bundle_group2.AddContentName(kMid2Audio); new_bundle_group2.AddContentName(kMid4Video); - auto subsequent_offer = std::make_unique(); + auto subsequent_offer = std::make_unique(); AddAudioSection(subsequent_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(subsequent_offer.get(), kMid2Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(subsequent_offer.get(), kMid3Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(subsequent_offer.get(), kMid4Video, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); subsequent_offer->AddGroup(new_bundle_group1); subsequent_offer->AddGroup(new_bundle_group2); - EXPECT_FALSE( - transport_controller_ - ->SetLocalDescription(SdpType::kOffer, subsequent_offer.get()) - .ok()); + EXPECT_FALSE(transport_controller_ + ->SetLocalDescription(SdpType::kOffer, + subsequent_offer.get(), + remote_answer.get()) + .ok()); } // Tests that only a subset of all the m= sections are bundled. TEST_F(JsepTransportControllerTest, BundleSubsetOfMediaSections) { CreateJsepTransportController(JsepTransportController::Config()); - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); bundle_group.AddContentName(kAudioMid1); bundle_group.AddContentName(kVideoMid1); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kAudioMid2, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kVideoMid1, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); AddAudioSection(remote_answer.get(), kAudioMid2, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); AddVideoSection(remote_answer.get(), kVideoMid1, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); local_offer->AddGroup(bundle_group); remote_answer->AddGroup(bundle_group); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); // Verifiy that only `kAudio1` and `kVideo1` are bundled. @@ -2090,57 +2174,55 @@ TEST_F(JsepTransportControllerTest, BundleSubsetOfMediaSections) { // sections are added in the subsequent offer. TEST_F(JsepTransportControllerTest, BundleOnDataSectionInSubsequentOffer) { CreateJsepTransportController(JsepTransportController::Config()); - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); bundle_group.AddContentName(kDataMid1); - auto local_offer = std::make_unique(); - AddDataSection(local_offer.get(), kDataMid1, - cricket::MediaProtocolType::kSctp, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, + auto local_offer = std::make_unique(); + AddDataSection(local_offer.get(), kDataMid1, MediaProtocolType::kSctp, + kIceUfrag1, kIcePwd1, ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); - auto remote_answer = std::make_unique(); - AddDataSection(remote_answer.get(), kDataMid1, - cricket::MediaProtocolType::kSctp, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, + auto remote_answer = std::make_unique(); + AddDataSection(remote_answer.get(), kDataMid1, MediaProtocolType::kSctp, + kIceUfrag1, kIcePwd1, ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); local_offer->AddGroup(bundle_group); remote_answer->AddGroup(bundle_group); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); auto data_transport = transport_controller_->GetRtpTransport(kDataMid1); // Add audio/video sections in subsequent offer. AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kVideoMid1, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); AddVideoSection(remote_answer.get(), kVideoMid1, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); // Reset the bundle group and do another offer/answer exchange. bundle_group.AddContentName(kAudioMid1); bundle_group.AddContentName(kVideoMid1); - local_offer->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); - remote_answer->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); + local_offer->RemoveGroupByName(GROUP_TYPE_BUNDLE); local_offer->AddGroup(bundle_group); - remote_answer->AddGroup(bundle_group); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), + remote_answer.get()) .ok()); + remote_answer->RemoveGroupByName(GROUP_TYPE_BUNDLE); + remote_answer->AddGroup(bundle_group); EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); auto audio_transport = transport_controller_->GetRtpTransport(kAudioMid1); @@ -2151,33 +2233,27 @@ TEST_F(JsepTransportControllerTest, BundleOnDataSectionInSubsequentOffer) { TEST_F(JsepTransportControllerTest, VideoDataRejectedInAnswer) { CreateJsepTransportController(JsepTransportController::Config()); - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); bundle_group.AddContentName(kAudioMid1); bundle_group.AddContentName(kVideoMid1); bundle_group.AddContentName(kDataMid1); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kVideoMid1, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - AddDataSection(local_offer.get(), kDataMid1, - cricket::MediaProtocolType::kSctp, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); + AddDataSection(local_offer.get(), kDataMid1, MediaProtocolType::kSctp, + kIceUfrag3, kIcePwd3, ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); AddVideoSection(remote_answer.get(), kVideoMid1, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); - AddDataSection(remote_answer.get(), kDataMid1, - cricket::MediaProtocolType::kSctp, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); + AddDataSection(remote_answer.get(), kDataMid1, MediaProtocolType::kSctp, + kIceUfrag3, kIcePwd3, ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); // Reject video and data section. remote_answer->contents()[1].rejected = true; @@ -2186,11 +2262,13 @@ TEST_F(JsepTransportControllerTest, VideoDataRejectedInAnswer) { local_offer->AddGroup(bundle_group); remote_answer->AddGroup(bundle_group); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); // Verify the RtpTransport/DtlsTransport is destroyed correctly. @@ -2211,33 +2289,31 @@ TEST_F(JsepTransportControllerTest, VideoDataRejectedInAnswer) { // fixed TEST_F(JsepTransportControllerTest, ChangeBundledMidNotSupported) { CreateJsepTransportController(JsepTransportController::Config()); - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); bundle_group.AddContentName(kAudioMid1); bundle_group.AddContentName(kVideoMid1); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kVideoMid1, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); AddVideoSection(remote_answer.get(), kVideoMid1, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); local_offer->AddGroup(bundle_group); remote_answer->AddGroup(bundle_group); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); EXPECT_EQ(transport_controller_->GetRtpTransport(kAudioMid1), transport_controller_->GetRtpTransport(kVideoMid1)); @@ -2245,48 +2321,44 @@ TEST_F(JsepTransportControllerTest, ChangeBundledMidNotSupported) { // Reorder the bundle group. EXPECT_TRUE(bundle_group.RemoveContentName(kAudioMid1)); bundle_group.AddContentName(kAudioMid1); + EXPECT_TRUE(transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), + remote_answer.get()) + .ok()); // The answerer uses the new bundle group and now the bundle mid is changed to // `kVideo1`. - remote_answer->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); + remote_answer->RemoveGroupByName(GROUP_TYPE_BUNDLE); remote_answer->AddGroup(bundle_group); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); EXPECT_FALSE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); } // Test that rejecting only the first m= section of a BUNDLE group is treated as // an error, but rejecting all of them works as expected. TEST_F(JsepTransportControllerTest, RejectFirstContentInBundleGroup) { CreateJsepTransportController(JsepTransportController::Config()); - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); bundle_group.AddContentName(kAudioMid1); bundle_group.AddContentName(kVideoMid1); bundle_group.AddContentName(kDataMid1); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kVideoMid1, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - AddDataSection(local_offer.get(), kDataMid1, - cricket::MediaProtocolType::kSctp, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); + AddDataSection(local_offer.get(), kDataMid1, MediaProtocolType::kSctp, + kIceUfrag3, kIcePwd3, ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); AddVideoSection(remote_answer.get(), kVideoMid1, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); - AddDataSection(remote_answer.get(), kDataMid1, - cricket::MediaProtocolType::kSctp, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); + AddDataSection(remote_answer.get(), kDataMid1, MediaProtocolType::kSctp, + kIceUfrag3, kIcePwd3, ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); // Reject audio content in answer. remote_answer->contents()[0].rejected = true; @@ -2294,18 +2366,21 @@ TEST_F(JsepTransportControllerTest, RejectFirstContentInBundleGroup) { local_offer->AddGroup(bundle_group); remote_answer->AddGroup(bundle_group); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_FALSE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); // Reject all the contents. remote_answer->contents()[1].rejected = true; remote_answer->contents()[2].rejected = true; EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); EXPECT_EQ(nullptr, transport_controller_->GetRtpTransport(kAudioMid1)); EXPECT_EQ(nullptr, transport_controller_->GetRtpTransport(kVideoMid1)); @@ -2318,16 +2393,16 @@ TEST_F(JsepTransportControllerTest, ApplyNonRtcpMuxOfferWhenMuxingRequired) { JsepTransportController::Config config; config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; CreateJsepTransportController(std::move(config)); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); local_offer->contents()[0].media_description()->set_rtcp_mux(false); // Applying a non-RTCP-mux offer is expected to fail. - EXPECT_FALSE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); + EXPECT_FALSE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); } // Tests that applying non-RTCP-mux answer would fail when kRtcpMuxPolicyRequire @@ -2336,22 +2411,22 @@ TEST_F(JsepTransportControllerTest, ApplyNonRtcpMuxAnswerWhenMuxingRequired) { JsepTransportController::Config config; config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; CreateJsepTransportController(std::move(config)); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); - auto remote_answer = std::make_unique(); + auto remote_answer = std::make_unique(); AddAudioSection(remote_answer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_PASSIVE, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_PASSIVE, nullptr); // Applying a non-RTCP-mux answer is expected to fail. remote_answer->contents()[0].media_description()->set_rtcp_mux(false); EXPECT_FALSE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); } @@ -2363,19 +2438,21 @@ TEST_F(JsepTransportControllerTest, auto local_offer = CreateSessionDescriptionWithoutBundle(); auto remote_answer = CreateSessionDescriptionWithoutBundle(); - cricket::ContentGroup offer_bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup offer_bundle_group(GROUP_TYPE_BUNDLE); offer_bundle_group.AddContentName(kAudioMid1); local_offer->AddGroup(offer_bundle_group); - cricket::ContentGroup answer_bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup answer_bundle_group(GROUP_TYPE_BUNDLE); answer_bundle_group.AddContentName(kAudioMid1); answer_bundle_group.AddContentName(kVideoMid1); remote_answer->AddGroup(answer_bundle_group); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_FALSE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); } @@ -2385,18 +2462,20 @@ TEST_F(JsepTransportControllerTest, RejectBundleGroupWithNonExistingMid) { auto local_offer = CreateSessionDescriptionWithoutBundle(); auto remote_answer = CreateSessionDescriptionWithoutBundle(); - cricket::ContentGroup invalid_bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup invalid_bundle_group(GROUP_TYPE_BUNDLE); // The BUNDLE group is invalid because there is no data section in the // description. invalid_bundle_group.AddContentName(kDataMid1); local_offer->AddGroup(invalid_bundle_group); remote_answer->AddGroup(invalid_bundle_group); + EXPECT_FALSE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_FALSE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_FALSE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); } @@ -2407,19 +2486,22 @@ TEST_F(JsepTransportControllerTest, RemoveContentFromBundleGroup) { auto local_offer = CreateSessionDescriptionWithBundleGroup(); auto remote_answer = CreateSessionDescriptionWithBundleGroup(); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); // Do an re-offer/answer. EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), + remote_answer.get()) .ok()); auto new_answer = CreateSessionDescriptionWithoutBundle(); - cricket::ContentGroup new_bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup new_bundle_group(GROUP_TYPE_BUNDLE); // The answer removes video from the BUNDLE group without rejecting it is // invalid. new_bundle_group.AddContentName(kAudioMid1); @@ -2427,7 +2509,8 @@ TEST_F(JsepTransportControllerTest, RemoveContentFromBundleGroup) { // Applying invalid answer is expected to fail. EXPECT_FALSE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, new_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + new_answer.get()) .ok()); // Rejected the video content. @@ -2435,7 +2518,8 @@ TEST_F(JsepTransportControllerTest, RemoveContentFromBundleGroup) { ASSERT_TRUE(video_content); video_content->rejected = true; EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, new_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + new_answer.get()) .ok()); } @@ -2446,44 +2530,41 @@ TEST_F(JsepTransportControllerTest, RemoveContentFromBundleGroup) { TEST_F(JsepTransportControllerTest, ChangeTaggedMediaSectionMaxBundle) { CreateJsepTransportController(JsepTransportController::Config()); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); bundle_group.AddContentName(kAudioMid1); local_offer->AddGroup(bundle_group); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); - std::unique_ptr remote_answer( - local_offer->Clone()); + std::unique_ptr remote_answer(local_offer->Clone()); EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); - std::unique_ptr local_reoffer( - local_offer->Clone()); + std::unique_ptr local_reoffer(local_offer->Clone()); local_reoffer->contents()[0].rejected = true; AddVideoSection(local_reoffer.get(), kVideoMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - local_reoffer->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); - cricket::ContentGroup new_bundle_group(cricket::GROUP_TYPE_BUNDLE); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); + local_reoffer->RemoveGroupByName(GROUP_TYPE_BUNDLE); + ContentGroup new_bundle_group(GROUP_TYPE_BUNDLE); new_bundle_group.AddContentName(kVideoMid1); local_reoffer->AddGroup(new_bundle_group); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_reoffer.get()) + ->SetLocalDescription(SdpType::kOffer, local_reoffer.get(), + remote_answer.get()) + .ok()); + std::unique_ptr remote_reanswer(local_reoffer->Clone()); + EXPECT_TRUE(transport_controller_ + ->SetRemoteDescription(SdpType::kAnswer, local_reoffer.get(), + remote_reanswer.get()) .ok()); - - std::unique_ptr remote_reanswer( - local_reoffer->Clone()); - EXPECT_TRUE( - transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_reanswer.get()) - .ok()); } TEST_F(JsepTransportControllerTest, RollbackRestoresRejectedTransport) { @@ -2491,31 +2572,31 @@ TEST_F(JsepTransportControllerTest, RollbackRestoresRejectedTransport) { // Perform initial offer/answer. CreateJsepTransportController(JsepTransportController::Config()); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - std::unique_ptr remote_answer( - local_offer->Clone()); - EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); + std::unique_ptr remote_answer(local_offer->Clone()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio); // Apply a reoffer which rejects the m= section, causing the transport to be // set to null. - auto local_reoffer = std::make_unique(); + auto local_reoffer = std::make_unique(); AddAudioSection(local_reoffer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); local_reoffer->contents()[0].rejected = true; EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_reoffer.get()) + ->SetLocalDescription(SdpType::kOffer, local_reoffer.get(), + remote_answer.get()) .ok()); auto old_mid1_transport = mid1_transport; mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio); @@ -2538,30 +2619,28 @@ TEST_F(JsepTransportControllerTest, RollbackRestoresPreviousTransportMapping) { // Perform an initial offer/answer to establish a (kMid1Audio,kMid2Audio) // group. CreateJsepTransportController(JsepTransportController::Config()); - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); bundle_group.AddContentName(kMid1Audio); bundle_group.AddContentName(kMid2Audio); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid2Audio, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_offer.get(), kMid3Audio, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); local_offer->AddGroup(bundle_group); - std::unique_ptr remote_answer( - local_offer->Clone()); + std::unique_ptr remote_answer(local_offer->Clone()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio); @@ -2573,20 +2652,18 @@ TEST_F(JsepTransportControllerTest, RollbackRestoresPreviousTransportMapping) { // Apply a reoffer adding kMid3Audio to the group; transport mapping should // change, even without an answer, since this is an existing group. bundle_group.AddContentName(kMid3Audio); - auto local_reoffer = std::make_unique(); + auto local_reoffer = std::make_unique(); AddAudioSection(local_reoffer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_reoffer.get(), kMid2Audio, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddAudioSection(local_reoffer.get(), kMid3Audio, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); local_reoffer->AddGroup(bundle_group); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_reoffer.get()) + ->SetLocalDescription(SdpType::kOffer, local_reoffer.get(), + remote_answer.get()) .ok()); // Store the old transport pointer and verify that the offer actually changed @@ -2616,52 +2693,49 @@ TEST_F(JsepTransportControllerTest, RollbackAndAddToDifferentBundleGroup) { // Perform an initial offer/answer to establish two bundle groups, each with // one MID. CreateJsepTransportController(JsepTransportController::Config()); - cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group1(GROUP_TYPE_BUNDLE); bundle_group1.AddContentName(kMid1Audio); - cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group2(GROUP_TYPE_BUNDLE); bundle_group2.AddContentName(kMid2Audio); - auto local_offer = std::make_unique(); + auto local_offer = std::make_unique(); AddAudioSection(local_offer.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(local_offer.get(), kMid2Audio, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); local_offer->AddGroup(bundle_group1); local_offer->AddGroup(bundle_group2); - std::unique_ptr remote_answer( - local_offer->Clone()); + std::unique_ptr remote_answer(local_offer->Clone()); + EXPECT_TRUE( + transport_controller_ + ->SetLocalDescription(SdpType::kOffer, local_offer.get(), nullptr) + .ok()); EXPECT_TRUE(transport_controller_ - ->SetLocalDescription(SdpType::kOffer, local_offer.get()) - .ok()); - EXPECT_TRUE(transport_controller_ - ->SetRemoteDescription(SdpType::kAnswer, remote_answer.get()) + ->SetRemoteDescription(SdpType::kAnswer, local_offer.get(), + remote_answer.get()) .ok()); // Apply an offer that adds kMid3Audio to the first BUNDLE group., - cricket::ContentGroup modified_bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup modified_bundle_group1(GROUP_TYPE_BUNDLE); modified_bundle_group1.AddContentName(kMid1Audio); modified_bundle_group1.AddContentName(kMid3Audio); - auto subsequent_offer_1 = std::make_unique(); + auto subsequent_offer_1 = std::make_unique(); AddAudioSection(subsequent_offer_1.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(subsequent_offer_1.get(), kMid2Audio, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(subsequent_offer_1.get(), kMid3Audio, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); subsequent_offer_1->AddGroup(modified_bundle_group1); subsequent_offer_1->AddGroup(bundle_group2); - EXPECT_TRUE( - transport_controller_ - ->SetLocalDescription(SdpType::kOffer, subsequent_offer_1.get()) - .ok()); + EXPECT_TRUE(transport_controller_ + ->SetLocalDescription(SdpType::kOffer, + subsequent_offer_1.get(), + remote_answer.get()) + .ok()); auto mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio); auto mid2_transport = transport_controller_->GetRtpTransport(kMid2Audio); @@ -2674,26 +2748,24 @@ TEST_F(JsepTransportControllerTest, RollbackAndAddToDifferentBundleGroup) { EXPECT_EQ(nullptr, transport_controller_->GetRtpTransport(kMid3Audio)); // Apply an offer that adds kMid3Audio to the second BUNDLE group., - cricket::ContentGroup modified_bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup modified_bundle_group2(GROUP_TYPE_BUNDLE); modified_bundle_group2.AddContentName(kMid2Audio); modified_bundle_group2.AddContentName(kMid3Audio); - auto subsequent_offer_2 = std::make_unique(); + auto subsequent_offer_2 = std::make_unique(); AddAudioSection(subsequent_offer_2.get(), kMid1Audio, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(subsequent_offer_2.get(), kMid2Audio, kIceUfrag2, kIcePwd2, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); AddVideoSection(subsequent_offer_2.get(), kMid3Audio, kIceUfrag3, kIcePwd3, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + ICEMODE_FULL, CONNECTIONROLE_ACTPASS, nullptr); subsequent_offer_2->AddGroup(bundle_group1); subsequent_offer_2->AddGroup(modified_bundle_group2); - EXPECT_TRUE( - transport_controller_ - ->SetLocalDescription(SdpType::kOffer, subsequent_offer_2.get()) - .ok()); + EXPECT_TRUE(transport_controller_ + ->SetLocalDescription(SdpType::kOffer, + subsequent_offer_2.get(), + remote_answer.get()) + .ok()); mid1_transport = transport_controller_->GetRtpTransport(kMid1Audio); mid2_transport = transport_controller_->GetRtpTransport(kMid2Audio); @@ -2706,26 +2778,24 @@ TEST_F(JsepTransportControllerTest, RollbackAndAddToDifferentBundleGroup) { // is accepted. TEST_F(JsepTransportControllerTest, BundleOnlySectionDoesNotNeedRtcpMux) { CreateJsepTransportController(JsepTransportController::Config()); - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); bundle_group.AddContentName(kAudioMid1); bundle_group.AddContentName(kVideoMid1); - auto offer = std::make_unique(); - AddAudioSection(offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - AddVideoSection(offer.get(), kVideoMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); + auto offer = std::make_unique(); + AddAudioSection(offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, ICEMODE_FULL, + CONNECTIONROLE_ACTPASS, nullptr); + AddVideoSection(offer.get(), kVideoMid1, kIceUfrag1, kIcePwd1, ICEMODE_FULL, + CONNECTIONROLE_ACTPASS, nullptr); offer->AddGroup(bundle_group); // Remove rtcp-mux and set bundle-only on the second content. offer->contents()[1].media_description()->set_rtcp_mux(false); offer->contents()[1].bundle_only = true; - EXPECT_TRUE( - transport_controller_->SetRemoteDescription(SdpType::kOffer, offer.get()) - .ok()); + EXPECT_TRUE(transport_controller_ + ->SetRemoteDescription(SdpType::kOffer, nullptr, offer.get()) + .ok()); } // Test that with max-bundle a single unbundled m-line is accepted. @@ -2735,13 +2805,68 @@ TEST_F(JsepTransportControllerTest, config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; CreateJsepTransportController(std::move(config)); - auto offer = std::make_unique(); - AddAudioSection(offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, - cricket::ICEMODE_FULL, cricket::CONNECTIONROLE_ACTPASS, - nullptr); - EXPECT_TRUE( - transport_controller_->SetRemoteDescription(SdpType::kOffer, offer.get()) - .ok()); + auto offer = std::make_unique(); + AddAudioSection(offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, ICEMODE_FULL, + CONNECTIONROLE_ACTPASS, nullptr); + EXPECT_TRUE(transport_controller_ + ->SetRemoteDescription(SdpType::kOffer, nullptr, offer.get()) + .ok()); +} + +TEST_F(JsepTransportControllerTest, SuggestPayloadTypeBasic) { + auto config = JsepTransportController::Config(); + CreateJsepTransportController(std::move(config)); + Codec pcmu_codec = CreateAudioCodec(-1, kPcmuCodecName, 8000, 1); + RTCErrorOr pcmu_pt = + transport_controller_->SuggestPayloadType("mid", pcmu_codec); + ASSERT_TRUE(pcmu_pt.ok()); + EXPECT_EQ(pcmu_pt.value(), PayloadType(0)); +} + +TEST_F(JsepTransportControllerTest, SuggestPayloadTypeReusesRemotePayloadType) { + auto config = JsepTransportController::Config(); + CreateJsepTransportController(std::move(config)); + const PayloadType remote_lyra_pt(99); + Codec remote_lyra_codec = CreateAudioCodec(remote_lyra_pt, "lyra", 8000, 1); + auto offer = std::make_unique(); + AddAudioSection(offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, ICEMODE_FULL, + CONNECTIONROLE_ACTPASS, nullptr); + offer->contents()[0].media_description()->set_codecs({remote_lyra_codec}); + EXPECT_TRUE(transport_controller_ + ->SetRemoteDescription(SdpType::kOffer, nullptr, offer.get()) + .ok()); + Codec local_lyra_codec = CreateAudioCodec(-1, "lyra", 8000, 1); + RTCErrorOr lyra_pt = + transport_controller_->SuggestPayloadType(kAudioMid1, local_lyra_codec); + ASSERT_TRUE(lyra_pt.ok()); + EXPECT_EQ(lyra_pt.value(), remote_lyra_pt); +} + +TEST_F(JsepTransportControllerTest, + SuggestPayloadTypeAvoidsRemoteLocalConflict) { + auto config = JsepTransportController::Config(); + CreateJsepTransportController(std::move(config)); + // libwebrtc will normally allocate 110 to DTMF/48000 + const PayloadType remote_opus_pt(110); + Codec remote_opus_codec = CreateAudioCodec(remote_opus_pt, "opus", 48000, 2); + auto offer = std::make_unique(); + AddAudioSection(offer.get(), kAudioMid1, kIceUfrag1, kIcePwd1, ICEMODE_FULL, + CONNECTIONROLE_ACTPASS, nullptr); + offer->contents()[0].media_description()->set_codecs({remote_opus_codec}); + EXPECT_TRUE(transport_controller_ + ->SetRemoteDescription(SdpType::kOffer, nullptr, offer.get()) + .ok()); + // Check that we get the Opus codec back with the remote PT + Codec local_opus_codec = CreateAudioCodec(-1, "opus", 48000, 2); + RTCErrorOr local_opus_pt = + transport_controller_->SuggestPayloadType(kAudioMid1, local_opus_codec); + EXPECT_EQ(local_opus_pt.value(), remote_opus_pt); + // Check that we don't get 110 allocated for DTMF, since it's in use for opus + Codec local_other_codec = CreateAudioCodec(-1, kDtmfCodecName, 48000, 1); + RTCErrorOr other_pt = + transport_controller_->SuggestPayloadType(kAudioMid1, local_other_codec); + ASSERT_TRUE(other_pt.ok()); + EXPECT_NE(other_pt.value(), remote_opus_pt); } } // namespace webrtc diff --git a/pc/jsep_transport_unittest.cc b/pc/jsep_transport_unittest.cc index f057d37a0d..f11d41625d 100644 --- a/pc/jsep_transport_unittest.cc +++ b/pc/jsep_transport_unittest.cc @@ -13,34 +13,55 @@ #include #include +#include +#include #include #include #include #include +#include #include "api/candidate.h" +#include "api/ice_transport_interface.h" +#include "api/jsep.h" +#include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "call/payload_type_picker.h" #include "media/base/fake_rtp.h" -#include "p2p/base/fake_dtls_transport.h" -#include "p2p/base/fake_ice_transport.h" +#include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/packet_transport_internal.h" +#include "p2p/base/transport_description.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "p2p/dtls/fake_dtls_transport.h" +#include "p2p/test/fake_ice_transport.h" +#include "pc/dtls_srtp_transport.h" +#include "pc/rtp_transport.h" +#include "pc/srtp_transport.h" +#include "pc/transport_stats.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/buffer.h" #include "rtc_base/byte_order.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/helpers.h" #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/rtc_certificate.h" #include "rtc_base/socket_address.h" #include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_fingerprint.h" #include "rtc_base/ssl_identity.h" +#include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" -namespace cricket { +namespace webrtc { namespace { + using webrtc::SdpType; +using ::webrtc::test::ScopedKeyValueConfig; static const char kIceUfrag1[] = "U001"; static const char kIcePwd1[] = "TESTICEPWD00000000000001"; @@ -48,11 +69,6 @@ static const char kIceUfrag2[] = "U002"; static const char kIcePwd2[] = "TESTIEPWD00000000000002"; static const char kTransportName[] = "Test Transport"; -enum class SrtpMode { - kSdes, - kDtlsSrtp, -}; - struct NegotiateRoleParams { ConnectionRole local_role; ConnectionRole remote_role; @@ -74,21 +90,21 @@ std::ostream& operator<<(std::ostream& os, const NegotiateRoleParams& param) { return os; } -rtc::scoped_refptr CreateIceTransport( +scoped_refptr CreateIceTransport( std::unique_ptr internal) { if (!internal) { return nullptr; } - return rtc::make_ref_counted(std::move(internal)); + return make_ref_counted(std::move(internal)); } class JsepTransport2Test : public ::testing::Test, public sigslot::has_slots<> { protected: - std::unique_ptr CreateSdesTransport( - rtc::PacketTransportInternal* rtp_packet_transport, - rtc::PacketTransportInternal* rtcp_packet_transport) { - auto srtp_transport = std::make_unique( + std::unique_ptr CreateSdesTransport( + PacketTransportInternal* rtp_packet_transport, + PacketTransportInternal* rtcp_packet_transport) { + auto srtp_transport = std::make_unique( rtcp_packet_transport == nullptr, field_trials_); srtp_transport->SetRtpPacketTransport(rtp_packet_transport); @@ -98,10 +114,10 @@ class JsepTransport2Test : public ::testing::Test, public sigslot::has_slots<> { return srtp_transport; } - std::unique_ptr CreateDtlsSrtpTransport( - cricket::DtlsTransportInternal* rtp_dtls_transport, - cricket::DtlsTransportInternal* rtcp_dtls_transport) { - auto dtls_srtp_transport = std::make_unique( + std::unique_ptr CreateDtlsSrtpTransport( + DtlsTransportInternal* rtp_dtls_transport, + DtlsTransportInternal* rtcp_dtls_transport) { + auto dtls_srtp_transport = std::make_unique( rtcp_dtls_transport == nullptr, field_trials_); dtls_srtp_transport->SetDtlsTransports(rtp_dtls_transport, rtcp_dtls_transport); @@ -110,8 +126,7 @@ class JsepTransport2Test : public ::testing::Test, public sigslot::has_slots<> { // Create a new JsepTransport with a FakeDtlsTransport and a // FakeIceTransport. - std::unique_ptr CreateJsepTransport2(bool rtcp_mux_enabled, - SrtpMode srtp_mode) { + std::unique_ptr CreateJsepTransport2(bool rtcp_mux_enabled) { auto ice_internal = std::make_unique( kTransportName, ICE_CANDIDATE_COMPONENT_RTP); auto rtp_dtls_transport = @@ -128,22 +143,11 @@ class JsepTransport2Test : public ::testing::Test, public sigslot::has_slots<> { } auto rtcp_ice = CreateIceTransport(std::move(rtcp_ice_internal)); - std::unique_ptr unencrypted_rtp_transport; - std::unique_ptr sdes_transport; - std::unique_ptr dtls_srtp_transport; - switch (srtp_mode) { - case SrtpMode::kSdes: - sdes_transport = CreateSdesTransport(rtp_dtls_transport.get(), - rtcp_dtls_transport.get()); - sdes_transport_ = sdes_transport.get(); - break; - case SrtpMode::kDtlsSrtp: - dtls_srtp_transport = CreateDtlsSrtpTransport( - rtp_dtls_transport.get(), rtcp_dtls_transport.get()); - break; - default: - RTC_DCHECK_NOTREACHED(); - } + std::unique_ptr unencrypted_rtp_transport; + std::unique_ptr sdes_transport; + std::unique_ptr dtls_srtp_transport; + dtls_srtp_transport = CreateDtlsSrtpTransport(rtp_dtls_transport.get(), + rtcp_dtls_transport.get()); auto jsep_transport = std::make_unique( kTransportName, /*local_certificate=*/nullptr, std::move(ice), @@ -151,7 +155,8 @@ class JsepTransport2Test : public ::testing::Test, public sigslot::has_slots<> { std::move(sdes_transport), std::move(dtls_srtp_transport), std::move(rtp_dtls_transport), std::move(rtcp_dtls_transport), /*sctp_transport=*/nullptr, - /*rtcp_mux_active_callback=*/[&]() { OnRtcpMuxActive(); }); + /*rtcp_mux_active_callback=*/[&]() { OnRtcpMuxActive(); }, + payload_type_picker_); signal_rtcp_mux_active_received_ = false; return jsep_transport; @@ -161,14 +166,14 @@ class JsepTransport2Test : public ::testing::Test, public sigslot::has_slots<> { bool rtcp_mux_enabled, const char* ufrag, const char* pwd, - const rtc::scoped_refptr& cert, + const scoped_refptr& cert, ConnectionRole role = CONNECTIONROLE_NONE) { JsepTransportDescription jsep_description; jsep_description.rtcp_mux_enabled = rtcp_mux_enabled; - std::unique_ptr fingerprint; + std::unique_ptr fingerprint; if (cert) { - fingerprint = rtc::SSLFingerprint::CreateFromCertificate(*cert); + fingerprint = SSLFingerprint::CreateFromCertificate(*cert); } jsep_description.transport_desc = TransportDescription(std::vector(), ufrag, pwd, @@ -178,23 +183,24 @@ class JsepTransport2Test : public ::testing::Test, public sigslot::has_slots<> { Candidate CreateCandidate(int component) { Candidate c; - c.set_address(rtc::SocketAddress("192.168.1.1", 8000)); + c.set_address(SocketAddress("192.168.1.1", 8000)); c.set_component(component); - c.set_protocol(UDP_PROTOCOL_NAME); + c.set_protocol(webrtc::UDP_PROTOCOL_NAME); c.set_priority(1); return c; } void OnRtcpMuxActive() { signal_rtcp_mux_active_received_ = true; } - rtc::AutoThread main_thread_; + AutoThread main_thread_; std::unique_ptr jsep_transport_; bool signal_rtcp_mux_active_received_ = false; // The SrtpTransport is owned by `jsep_transport_`. Keep a raw pointer here // for testing. - webrtc::SrtpTransport* sdes_transport_ = nullptr; + SrtpTransport* sdes_transport_ = nullptr; - webrtc::test::ScopedKeyValueConfig field_trials_; + ScopedKeyValueConfig field_trials_; + PayloadTypePicker payload_type_picker_; }; // The parameterized tests cover both cases when RTCP mux is enable and @@ -205,7 +211,7 @@ class JsepTransport2WithRtcpMux : public JsepTransport2Test, // This test verifies the ICE parameters are properly applied to the transports. TEST_P(JsepTransport2WithRtcpMux, SetIceParameters) { bool rtcp_mux_enabled = GetParam(); - jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); JsepTransportDescription jsep_description; jsep_description.transport_desc = TransportDescription(kIceUfrag1, kIcePwd1); @@ -217,15 +223,15 @@ TEST_P(JsepTransport2WithRtcpMux, SetIceParameters) { auto fake_ice_transport = static_cast( jsep_transport_->rtp_dtls_transport()->ice_transport()); EXPECT_EQ(ICEMODE_FULL, fake_ice_transport->remote_ice_mode()); - EXPECT_EQ(kIceUfrag1, fake_ice_transport->ice_ufrag()); - EXPECT_EQ(kIcePwd1, fake_ice_transport->ice_pwd()); + EXPECT_EQ(kIceUfrag1, fake_ice_transport->local_ice_parameters()->ufrag); + EXPECT_EQ(kIcePwd1, fake_ice_transport->local_ice_parameters()->pwd); if (!rtcp_mux_enabled) { fake_ice_transport = static_cast( jsep_transport_->rtcp_dtls_transport()->ice_transport()); ASSERT_TRUE(fake_ice_transport); EXPECT_EQ(ICEMODE_FULL, fake_ice_transport->remote_ice_mode()); - EXPECT_EQ(kIceUfrag1, fake_ice_transport->ice_ufrag()); - EXPECT_EQ(kIcePwd1, fake_ice_transport->ice_pwd()); + EXPECT_EQ(kIceUfrag1, fake_ice_transport->local_ice_parameters()->ufrag); + EXPECT_EQ(kIcePwd1, fake_ice_transport->local_ice_parameters()->pwd); } jsep_description.transport_desc = TransportDescription(kIceUfrag2, kIcePwd2); @@ -236,30 +242,28 @@ TEST_P(JsepTransport2WithRtcpMux, SetIceParameters) { fake_ice_transport = static_cast( jsep_transport_->rtp_dtls_transport()->ice_transport()); EXPECT_EQ(ICEMODE_FULL, fake_ice_transport->remote_ice_mode()); - EXPECT_EQ(kIceUfrag2, fake_ice_transport->remote_ice_ufrag()); - EXPECT_EQ(kIcePwd2, fake_ice_transport->remote_ice_pwd()); + EXPECT_EQ(kIceUfrag2, fake_ice_transport->remote_ice_parameters()->ufrag); + EXPECT_EQ(kIcePwd2, fake_ice_transport->remote_ice_parameters()->pwd); if (!rtcp_mux_enabled) { fake_ice_transport = static_cast( jsep_transport_->rtcp_dtls_transport()->ice_transport()); ASSERT_TRUE(fake_ice_transport); EXPECT_EQ(ICEMODE_FULL, fake_ice_transport->remote_ice_mode()); - EXPECT_EQ(kIceUfrag2, fake_ice_transport->remote_ice_ufrag()); - EXPECT_EQ(kIcePwd2, fake_ice_transport->remote_ice_pwd()); + EXPECT_EQ(kIceUfrag2, fake_ice_transport->remote_ice_parameters()->ufrag); + EXPECT_EQ(kIcePwd2, fake_ice_transport->remote_ice_parameters()->pwd); } } // Similarly, test DTLS parameters are properly applied to the transports. TEST_P(JsepTransport2WithRtcpMux, SetDtlsParameters) { bool rtcp_mux_enabled = GetParam(); - jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); // Create certificates. - rtc::scoped_refptr local_cert = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("local", rtc::KT_DEFAULT)); - rtc::scoped_refptr remote_cert = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("remote", rtc::KT_DEFAULT)); + scoped_refptr local_cert = + RTCCertificate::Create(SSLIdentity::Create("local", webrtc::KT_DEFAULT)); + scoped_refptr remote_cert = + RTCCertificate::Create(SSLIdentity::Create("remote", webrtc::KT_DEFAULT)); jsep_transport_->SetLocalCertificate(local_cert); // Apply offer. @@ -283,7 +287,8 @@ TEST_P(JsepTransport2WithRtcpMux, SetDtlsParameters) { // transport descriptions. auto role = jsep_transport_->GetDtlsRole(); ASSERT_TRUE(role); - EXPECT_EQ(rtc::SSL_SERVER, role); // Because remote description was "active". + EXPECT_EQ(webrtc::SSL_SERVER, + role); // Because remote description was "active". auto fake_dtls = static_cast(jsep_transport_->rtp_dtls_transport()); EXPECT_EQ(remote_description.transport_desc.identity_fingerprint->ToString(), @@ -302,15 +307,13 @@ TEST_P(JsepTransport2WithRtcpMux, SetDtlsParameters) { // CONNECTIONROLE_PASSIVE, expecting SSL_CLIENT role. TEST_P(JsepTransport2WithRtcpMux, SetDtlsParametersWithPassiveAnswer) { bool rtcp_mux_enabled = GetParam(); - jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); // Create certificates. - rtc::scoped_refptr local_cert = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("local", rtc::KT_DEFAULT)); - rtc::scoped_refptr remote_cert = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("remote", rtc::KT_DEFAULT)); + scoped_refptr local_cert = + RTCCertificate::Create(SSLIdentity::Create("local", webrtc::KT_DEFAULT)); + scoped_refptr remote_cert = + RTCCertificate::Create(SSLIdentity::Create("remote", webrtc::KT_DEFAULT)); jsep_transport_->SetLocalCertificate(local_cert); // Apply offer. @@ -334,7 +337,7 @@ TEST_P(JsepTransport2WithRtcpMux, SetDtlsParametersWithPassiveAnswer) { // transport descriptions. auto role = jsep_transport_->GetDtlsRole(); ASSERT_TRUE(role); - EXPECT_EQ(rtc::SSL_CLIENT, + EXPECT_EQ(webrtc::SSL_CLIENT, role); // Because remote description was "passive". auto fake_dtls = static_cast(jsep_transport_->rtp_dtls_transport()); @@ -354,7 +357,7 @@ TEST_P(JsepTransport2WithRtcpMux, SetDtlsParametersWithPassiveAnswer) { // only starts returning "false" once an ICE restart has been initiated. TEST_P(JsepTransport2WithRtcpMux, NeedsIceRestart) { bool rtcp_mux_enabled = GetParam(); - jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); // Use the same JsepTransportDescription for both offer and answer. JsepTransportDescription description; @@ -399,7 +402,7 @@ TEST_P(JsepTransport2WithRtcpMux, NeedsIceRestart) { TEST_P(JsepTransport2WithRtcpMux, GetStats) { bool rtcp_mux_enabled = GetParam(); - jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); size_t expected_stats_size = rtcp_mux_enabled ? 1u : 2u; TransportStats stats; @@ -415,25 +418,24 @@ TEST_P(JsepTransport2WithRtcpMux, GetStats) { // certificate matches the fingerprint. TEST_P(JsepTransport2WithRtcpMux, VerifyCertificateFingerprint) { bool rtcp_mux_enabled = GetParam(); - jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); EXPECT_FALSE( jsep_transport_->VerifyCertificateFingerprint(nullptr, nullptr).ok()); - rtc::KeyType key_types[] = {rtc::KT_RSA, rtc::KT_ECDSA}; + KeyType key_types[] = {webrtc::KT_RSA, webrtc::KT_ECDSA}; for (auto& key_type : key_types) { - rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("testing", key_type)); + scoped_refptr certificate = + RTCCertificate::Create(SSLIdentity::Create("testing", key_type)); ASSERT_NE(nullptr, certificate); std::string digest_algorithm; ASSERT_TRUE(certificate->GetSSLCertificate().GetSignatureDigestAlgorithm( &digest_algorithm)); ASSERT_FALSE(digest_algorithm.empty()); - std::unique_ptr good_fingerprint = - rtc::SSLFingerprint::CreateUnique(digest_algorithm, - *certificate->identity()); + std::unique_ptr good_fingerprint = + SSLFingerprint::CreateUnique(digest_algorithm, + *certificate->identity()); ASSERT_NE(nullptr, good_fingerprint); EXPECT_TRUE(jsep_transport_ @@ -448,7 +450,7 @@ TEST_P(JsepTransport2WithRtcpMux, VerifyCertificateFingerprint) { ->VerifyCertificateFingerprint(nullptr, good_fingerprint.get()) .ok()); - rtc::SSLFingerprint bad_fingerprint = *good_fingerprint; + SSLFingerprint bad_fingerprint = *good_fingerprint; bad_fingerprint.digest.AppendData("0", 1); EXPECT_FALSE( jsep_transport_ @@ -462,9 +464,8 @@ TEST_P(JsepTransport2WithRtcpMux, ValidDtlsRoleNegotiation) { bool rtcp_mux_enabled = GetParam(); // Just use the same certificate for both sides; doesn't really matter in a // non end-to-end test. - rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA)); + scoped_refptr certificate = + RTCCertificate::Create(SSLIdentity::Create("testing", webrtc::KT_ECDSA)); JsepTransportDescription local_description = MakeJsepTransportDescription( rtcp_mux_enabled, kIceUfrag1, kIcePwd1, certificate); @@ -489,8 +490,7 @@ TEST_P(JsepTransport2WithRtcpMux, ValidDtlsRoleNegotiation) { }; for (auto& param : valid_client_params) { - jsep_transport_ = - CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); jsep_transport_->SetLocalCertificate(certificate); local_description.transport_desc.connection_role = param.local_role; @@ -516,7 +516,7 @@ TEST_P(JsepTransport2WithRtcpMux, ValidDtlsRoleNegotiation) { param.local_type) .ok()); } - EXPECT_EQ(rtc::SSL_CLIENT, *jsep_transport_->GetDtlsRole()); + EXPECT_EQ(webrtc::SSL_CLIENT, *jsep_transport_->GetDtlsRole()); } // Parameters which set the SSL role to SSL_SERVER. @@ -535,8 +535,7 @@ TEST_P(JsepTransport2WithRtcpMux, ValidDtlsRoleNegotiation) { }; for (auto& param : valid_server_params) { - jsep_transport_ = - CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); jsep_transport_->SetLocalCertificate(certificate); local_description.transport_desc.connection_role = param.local_role; @@ -562,7 +561,7 @@ TEST_P(JsepTransport2WithRtcpMux, ValidDtlsRoleNegotiation) { param.local_type) .ok()); } - EXPECT_EQ(rtc::SSL_SERVER, *jsep_transport_->GetDtlsRole()); + EXPECT_EQ(webrtc::SSL_SERVER, *jsep_transport_->GetDtlsRole()); } } @@ -571,9 +570,8 @@ TEST_P(JsepTransport2WithRtcpMux, InvalidDtlsRoleNegotiation) { bool rtcp_mux_enabled = GetParam(); // Just use the same certificate for both sides; doesn't really matter in a // non end-to-end test. - rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA)); + scoped_refptr certificate = + RTCCertificate::Create(SSLIdentity::Create("testing", webrtc::KT_ECDSA)); JsepTransportDescription local_description = MakeJsepTransportDescription( rtcp_mux_enabled, kIceUfrag1, kIcePwd1, certificate); @@ -607,8 +605,7 @@ TEST_P(JsepTransport2WithRtcpMux, InvalidDtlsRoleNegotiation) { SdpType::kPrAnswer}}; for (auto& param : duplicate_params) { - jsep_transport_ = - CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); jsep_transport_->SetLocalCertificate(certificate); local_description.transport_desc.connection_role = param.local_role; @@ -658,8 +655,7 @@ TEST_P(JsepTransport2WithRtcpMux, InvalidDtlsRoleNegotiation) { SdpType::kPrAnswer}}; for (auto& param : offerer_without_actpass_params) { - jsep_transport_ = - CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); jsep_transport_->SetLocalCertificate(certificate); local_description.transport_desc.connection_role = param.local_role; @@ -701,11 +697,10 @@ INSTANTIATE_TEST_SUITE_P(JsepTransport2Test, TEST_F(JsepTransport2Test, ValidDtlsReofferFromAnswerer) { // Just use the same certificate for both sides; doesn't really matter in a // non end-to-end test. - rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA)); + scoped_refptr certificate = + RTCCertificate::Create(SSLIdentity::Create("testing", webrtc::KT_ECDSA)); bool rtcp_mux_enabled = true; - jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); jsep_transport_->SetLocalCertificate(certificate); JsepTransportDescription local_offer = @@ -748,11 +743,10 @@ TEST_F(JsepTransport2Test, ValidDtlsReofferFromAnswerer) { TEST_F(JsepTransport2Test, InvalidDtlsReofferFromAnswerer) { // Just use the same certificate for both sides; doesn't really matter in a // non end-to-end test. - rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA)); + scoped_refptr certificate = + RTCCertificate::Create(SSLIdentity::Create("testing", webrtc::KT_ECDSA)); bool rtcp_mux_enabled = true; - jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); jsep_transport_->SetLocalCertificate(certificate); JsepTransportDescription local_offer = @@ -794,11 +788,10 @@ TEST_F(JsepTransport2Test, InvalidDtlsReofferFromAnswerer) { // This is allowed by dtls-sdp, though we'll never generate such an offer, // since JSEP requires generating "actpass". TEST_F(JsepTransport2Test, RemoteOfferWithCurrentNegotiatedDtlsRole) { - rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA)); + scoped_refptr certificate = + RTCCertificate::Create(SSLIdentity::Create("testing", webrtc::KT_ECDSA)); bool rtcp_mux_enabled = true; - jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); jsep_transport_->SetLocalCertificate(certificate); JsepTransportDescription remote_desc = @@ -820,9 +813,9 @@ TEST_F(JsepTransport2Test, RemoteOfferWithCurrentNegotiatedDtlsRole) { .ok()); // Sanity check that role was actually negotiated. - absl::optional role = jsep_transport_->GetDtlsRole(); + std::optional role = jsep_transport_->GetDtlsRole(); ASSERT_TRUE(role); - EXPECT_EQ(rtc::SSL_CLIENT, *role); + EXPECT_EQ(webrtc::SSL_CLIENT, *role); // Subsequent offer with current negotiated role of "passive". remote_desc.transport_desc.connection_role = CONNECTIONROLE_PASSIVE; @@ -839,11 +832,10 @@ TEST_F(JsepTransport2Test, RemoteOfferWithCurrentNegotiatedDtlsRole) { // Test that a remote offer with the inverse of the current negotiated DTLS // role is rejected. TEST_F(JsepTransport2Test, RemoteOfferThatChangesNegotiatedDtlsRole) { - rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA)); + scoped_refptr certificate = + RTCCertificate::Create(SSLIdentity::Create("testing", webrtc::KT_ECDSA)); bool rtcp_mux_enabled = true; - jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); jsep_transport_->SetLocalCertificate(certificate); JsepTransportDescription remote_desc = @@ -865,9 +857,9 @@ TEST_F(JsepTransport2Test, RemoteOfferThatChangesNegotiatedDtlsRole) { .ok()); // Sanity check that role was actually negotiated. - absl::optional role = jsep_transport_->GetDtlsRole(); + std::optional role = jsep_transport_->GetDtlsRole(); ASSERT_TRUE(role); - EXPECT_EQ(rtc::SSL_CLIENT, *role); + EXPECT_EQ(webrtc::SSL_CLIENT, *role); // Subsequent offer with current negotiated role of "passive". remote_desc.transport_desc.connection_role = CONNECTIONROLE_ACTIVE; @@ -883,14 +875,12 @@ TEST_F(JsepTransport2Test, RemoteOfferThatChangesNegotiatedDtlsRole) { // Test that a remote offer which changes both fingerprint and role is accepted. TEST_F(JsepTransport2Test, RemoteOfferThatChangesFingerprintAndDtlsRole) { - rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("testing1", rtc::KT_ECDSA)); - rtc::scoped_refptr certificate2 = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("testing2", rtc::KT_ECDSA)); + scoped_refptr certificate = + RTCCertificate::Create(SSLIdentity::Create("testing1", webrtc::KT_ECDSA)); + scoped_refptr certificate2 = + RTCCertificate::Create(SSLIdentity::Create("testing2", webrtc::KT_ECDSA)); bool rtcp_mux_enabled = true; - jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); jsep_transport_->SetLocalCertificate(certificate); JsepTransportDescription remote_desc = @@ -916,9 +906,9 @@ TEST_F(JsepTransport2Test, RemoteOfferThatChangesFingerprintAndDtlsRole) { .ok()); // Sanity check that role was actually negotiated. - absl::optional role = jsep_transport_->GetDtlsRole(); + std::optional role = jsep_transport_->GetDtlsRole(); ASSERT_TRUE(role); - EXPECT_EQ(rtc::SSL_CLIENT, *role); + EXPECT_EQ(webrtc::SSL_CLIENT, *role); // Subsequent exchange with new remote fingerprint and different role. local_desc.transport_desc.connection_role = CONNECTIONROLE_PASSIVE; @@ -933,17 +923,16 @@ TEST_F(JsepTransport2Test, RemoteOfferThatChangesFingerprintAndDtlsRole) { role = jsep_transport_->GetDtlsRole(); ASSERT_TRUE(role); - EXPECT_EQ(rtc::SSL_SERVER, *role); + EXPECT_EQ(webrtc::SSL_SERVER, *role); } // Testing that a legacy client that doesn't use the setup attribute will be // interpreted as having an active role. TEST_F(JsepTransport2Test, DtlsSetupWithLegacyAsAnswerer) { - rtc::scoped_refptr certificate = - rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("testing", rtc::KT_ECDSA)); + scoped_refptr certificate = + RTCCertificate::Create(SSLIdentity::Create("testing", webrtc::KT_ECDSA)); bool rtcp_mux_enabled = true; - jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(rtcp_mux_enabled); jsep_transport_->SetLocalCertificate(certificate); JsepTransportDescription remote_desc = @@ -965,18 +954,17 @@ TEST_F(JsepTransport2Test, DtlsSetupWithLegacyAsAnswerer) { ->SetRemoteJsepTransportDescription(remote_desc, SdpType::kAnswer) .ok()); - absl::optional role = jsep_transport_->GetDtlsRole(); + std::optional role = jsep_transport_->GetDtlsRole(); ASSERT_TRUE(role); // Since legacy answer omitted setup atribute, and we offered actpass, we // should act as passive (server). - EXPECT_EQ(rtc::SSL_SERVER, *role); + EXPECT_EQ(webrtc::SSL_SERVER, *role); } // Tests that when the RTCP mux is successfully negotiated, the RTCP transport // will be destroyed and the SignalRtpMuxActive will be fired. TEST_F(JsepTransport2Test, RtcpMuxNegotiation) { - jsep_transport_ = - CreateJsepTransport2(/*rtcp_mux_enabled=*/false, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(/*rtcp_mux_enabled=*/false); JsepTransportDescription local_desc; local_desc.rtcp_mux_enabled = true; ASSERT_NE(nullptr, jsep_transport_->rtcp_dtls_transport()); @@ -998,8 +986,7 @@ TEST_F(JsepTransport2Test, RtcpMuxNegotiation) { EXPECT_TRUE(signal_rtcp_mux_active_received_); // The remote side doesn't support RTCP-mux. - jsep_transport_ = - CreateJsepTransport2(/*rtcp_mux_enabled=*/false, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(/*rtcp_mux_enabled=*/false); signal_rtcp_mux_active_received_ = false; remote_desc.rtcp_mux_enabled = false; ASSERT_TRUE( @@ -1015,87 +1002,10 @@ TEST_F(JsepTransport2Test, RtcpMuxNegotiation) { EXPECT_FALSE(signal_rtcp_mux_active_received_); } -TEST_F(JsepTransport2Test, SdesNegotiation) { - jsep_transport_ = - CreateJsepTransport2(/*rtcp_mux_enabled=*/true, SrtpMode::kSdes); - ASSERT_TRUE(sdes_transport_); - EXPECT_FALSE(sdes_transport_->IsSrtpActive()); - - JsepTransportDescription offer_desc; - offer_desc.cryptos.push_back(cricket::CryptoParams( - 1, rtc::kCsAesCm128HmacSha1_32, "inline:" + rtc::CreateRandomString(40), - std::string())); - ASSERT_TRUE( - jsep_transport_ - ->SetLocalJsepTransportDescription(offer_desc, SdpType::kOffer) - .ok()); - - JsepTransportDescription answer_desc; - answer_desc.cryptos.push_back(cricket::CryptoParams( - 1, rtc::kCsAesCm128HmacSha1_32, "inline:" + rtc::CreateRandomString(40), - std::string())); - ASSERT_TRUE( - jsep_transport_ - ->SetRemoteJsepTransportDescription(answer_desc, SdpType::kAnswer) - .ok()); - EXPECT_TRUE(sdes_transport_->IsSrtpActive()); -} - -TEST_F(JsepTransport2Test, SdesNegotiationWithEmptyCryptosInAnswer) { - jsep_transport_ = - CreateJsepTransport2(/*rtcp_mux_enabled=*/true, SrtpMode::kSdes); - ASSERT_TRUE(sdes_transport_); - EXPECT_FALSE(sdes_transport_->IsSrtpActive()); - - JsepTransportDescription offer_desc; - offer_desc.cryptos.push_back(cricket::CryptoParams( - 1, rtc::kCsAesCm128HmacSha1_32, "inline:" + rtc::CreateRandomString(40), - std::string())); - ASSERT_TRUE( - jsep_transport_ - ->SetLocalJsepTransportDescription(offer_desc, SdpType::kOffer) - .ok()); - - JsepTransportDescription answer_desc; - ASSERT_TRUE( - jsep_transport_ - ->SetRemoteJsepTransportDescription(answer_desc, SdpType::kAnswer) - .ok()); - // SRTP is not active because the crypto parameter is answer is empty. - EXPECT_FALSE(sdes_transport_->IsSrtpActive()); -} - -TEST_F(JsepTransport2Test, SdesNegotiationWithMismatchedCryptos) { - jsep_transport_ = - CreateJsepTransport2(/*rtcp_mux_enabled=*/true, SrtpMode::kSdes); - ASSERT_TRUE(sdes_transport_); - EXPECT_FALSE(sdes_transport_->IsSrtpActive()); - - JsepTransportDescription offer_desc; - offer_desc.cryptos.push_back(cricket::CryptoParams( - 1, rtc::kCsAesCm128HmacSha1_32, "inline:" + rtc::CreateRandomString(40), - std::string())); - ASSERT_TRUE( - jsep_transport_ - ->SetLocalJsepTransportDescription(offer_desc, SdpType::kOffer) - .ok()); - - JsepTransportDescription answer_desc; - answer_desc.cryptos.push_back(cricket::CryptoParams( - 1, rtc::kCsAesCm128HmacSha1_80, "inline:" + rtc::CreateRandomString(40), - std::string())); - // Expected to fail because the crypto parameters don't match. - ASSERT_FALSE( - jsep_transport_ - ->SetRemoteJsepTransportDescription(answer_desc, SdpType::kAnswer) - .ok()); -} - // Tests that the remote candidates can be added to the transports after both // local and remote descriptions are set. TEST_F(JsepTransport2Test, AddRemoteCandidates) { - jsep_transport_ = - CreateJsepTransport2(/*rtcp_mux_enabled=*/true, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(/*rtcp_mux_enabled=*/true); auto fake_ice_transport = static_cast( jsep_transport_->rtp_dtls_transport()->ice_transport()); @@ -1119,7 +1029,6 @@ TEST_F(JsepTransport2Test, AddRemoteCandidates) { } enum class Scenario { - kSdes, kDtlsBeforeCallerSendOffer, kDtlsBeforeCallerSetAnswer, kDtlsAfterCallerSetAnswer, @@ -1131,53 +1040,53 @@ class JsepTransport2HeaderExtensionTest protected: JsepTransport2HeaderExtensionTest() {} - void CreateJsepTransportPair(SrtpMode mode) { - jsep_transport1_ = CreateJsepTransport2(/*rtcp_mux_enabled=*/true, mode); - jsep_transport2_ = CreateJsepTransport2(/*rtcp_mux_enabled=*/true, mode); + void CreateJsepTransportPair() { + jsep_transport1_ = CreateJsepTransport2(/*rtcp_mux_enabled=*/true); + jsep_transport2_ = CreateJsepTransport2(/*rtcp_mux_enabled=*/true); auto fake_dtls1 = static_cast(jsep_transport1_->rtp_dtls_transport()); auto fake_dtls2 = static_cast(jsep_transport2_->rtp_dtls_transport()); - fake_dtls1->fake_ice_transport()->SignalReadPacket.connect( - this, &JsepTransport2HeaderExtensionTest::OnReadPacket1); - fake_dtls2->fake_ice_transport()->SignalReadPacket.connect( - this, &JsepTransport2HeaderExtensionTest::OnReadPacket2); - - if (mode == SrtpMode::kDtlsSrtp) { - auto cert1 = rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); - jsep_transport1_->rtp_dtls_transport()->SetLocalCertificate(cert1); - auto cert2 = rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("session1", rtc::KT_DEFAULT)); - jsep_transport2_->rtp_dtls_transport()->SetLocalCertificate(cert2); - } + fake_dtls1->fake_ice_transport()->RegisterReceivedPacketCallback( + this, [&](PacketTransportInternal* transport, + const ReceivedIpPacket& packet) { + OnReadPacket1(transport, packet); + }); + fake_dtls2->fake_ice_transport()->RegisterReceivedPacketCallback( + this, [&](PacketTransportInternal* transport, + const ReceivedIpPacket& packet) { + OnReadPacket2(transport, packet); + }); + + auto cert1 = RTCCertificate::Create( + SSLIdentity::Create("session1", webrtc::KT_DEFAULT)); + jsep_transport1_->rtp_dtls_transport()->SetLocalCertificate(cert1); + auto cert2 = RTCCertificate::Create( + SSLIdentity::Create("session1", webrtc::KT_DEFAULT)); + jsep_transport2_->rtp_dtls_transport()->SetLocalCertificate(cert2); } - void OnReadPacket1(rtc::PacketTransportInternal* transport, - const char* data, - size_t size, - const int64_t& /* packet_time_us */, - int flags) { + void OnReadPacket1(PacketTransportInternal* transport, + const ReceivedIpPacket& packet) { RTC_LOG(LS_INFO) << "JsepTransport 1 Received a packet."; CompareHeaderExtensions( reinterpret_cast(kPcmuFrameWithExtensions), - sizeof(kPcmuFrameWithExtensions), data, size, recv_encrypted_headers1_, - false); + sizeof(kPcmuFrameWithExtensions), + reinterpret_cast(packet.payload().data()), + packet.payload().size(), recv_encrypted_headers1_, false); received_packet_count_++; } - void OnReadPacket2(rtc::PacketTransportInternal* transport, - const char* data, - size_t size, - const int64_t& /* packet_time_us */, - int flags) { + void OnReadPacket2(PacketTransportInternal* transport, + const ReceivedIpPacket& packet) { RTC_LOG(LS_INFO) << "JsepTransport 2 Received a packet."; CompareHeaderExtensions( reinterpret_cast(kPcmuFrameWithExtensions), - sizeof(kPcmuFrameWithExtensions), data, size, recv_encrypted_headers2_, - false); + sizeof(kPcmuFrameWithExtensions), + reinterpret_cast(packet.payload().data()), + packet.payload().size(), recv_encrypted_headers2_, false); received_packet_count_++; } @@ -1208,21 +1117,21 @@ class JsepTransport2HeaderExtensionTest JsepTransport* sender_transport) { size_t rtp_len = sizeof(kPcmuFrameWithExtensions); size_t packet_size = rtp_len + GetRtpAuthLen(); - rtc::Buffer rtp_packet_buffer(packet_size); + Buffer rtp_packet_buffer(packet_size); char* rtp_packet_data = rtp_packet_buffer.data(); memcpy(rtp_packet_data, kPcmuFrameWithExtensions, rtp_len); // In order to be able to run this test function multiple times we can not // use the same sequence number twice. Increase the sequence number by one. - rtc::SetBE16(reinterpret_cast(rtp_packet_data) + 2, - ++sequence_number_); - rtc::CopyOnWriteBuffer rtp_packet(rtp_packet_data, rtp_len, packet_size); + webrtc::SetBE16(reinterpret_cast(rtp_packet_data) + 2, + ++sequence_number_); + CopyOnWriteBuffer rtp_packet(rtp_packet_data, rtp_len, packet_size); int packet_count_before = received_packet_count_; - rtc::PacketOptions options; + AsyncSocketPacketOptions options; // Send a packet and verify that the packet can be successfully received and // decrypted. ASSERT_TRUE(sender_transport->rtp_transport()->SendRtpPacket( - &rtp_packet, options, cricket::PF_SRTP_BYPASS)); + &rtp_packet, options, PF_SRTP_BYPASS)); EXPECT_EQ(packet_count_before + 1, received_packet_count_); } @@ -1239,25 +1148,18 @@ class JsepTransport2HeaderExtensionTest TEST_P(JsepTransport2HeaderExtensionTest, EncryptedHeaderExtensionNegotiation) { Scenario scenario = std::get<0>(GetParam()); bool use_gcm = std::get<1>(GetParam()); - SrtpMode mode = SrtpMode ::kDtlsSrtp; - if (scenario == Scenario::kSdes) { - mode = SrtpMode::kSdes; - } - CreateJsepTransportPair(mode); + CreateJsepTransportPair(); recv_encrypted_headers1_.push_back(kHeaderExtensionIDs[0]); recv_encrypted_headers2_.push_back(kHeaderExtensionIDs[1]); - cricket::CryptoParams sdes_param(1, rtc::kCsAesCm128HmacSha1_80, - "inline:" + rtc::CreateRandomString(40), - std::string()); if (use_gcm) { auto fake_dtls1 = static_cast(jsep_transport1_->rtp_dtls_transport()); auto fake_dtls2 = static_cast(jsep_transport2_->rtp_dtls_transport()); - fake_dtls1->SetSrtpCryptoSuite(rtc::kSrtpAeadAes256Gcm); - fake_dtls2->SetSrtpCryptoSuite(rtc::kSrtpAeadAes256Gcm); + fake_dtls1->SetSrtpCryptoSuite(webrtc::kSrtpAeadAes256Gcm); + fake_dtls2->SetSrtpCryptoSuite(webrtc::kSrtpAeadAes256Gcm); } if (scenario == Scenario::kDtlsBeforeCallerSendOffer) { @@ -1266,9 +1168,6 @@ TEST_P(JsepTransport2HeaderExtensionTest, EncryptedHeaderExtensionNegotiation) { JsepTransportDescription offer_desc; offer_desc.encrypted_header_extension_ids = recv_encrypted_headers1_; - if (scenario == Scenario::kSdes) { - offer_desc.cryptos.push_back(sdes_param); - } ASSERT_TRUE( jsep_transport1_ ->SetLocalJsepTransportDescription(offer_desc, SdpType::kOffer) @@ -1280,9 +1179,6 @@ TEST_P(JsepTransport2HeaderExtensionTest, EncryptedHeaderExtensionNegotiation) { JsepTransportDescription answer_desc; answer_desc.encrypted_header_extension_ids = recv_encrypted_headers2_; - if (scenario == Scenario::kSdes) { - answer_desc.cryptos.push_back(sdes_param); - } ASSERT_TRUE( jsep_transport2_ ->SetLocalJsepTransportDescription(answer_desc, SdpType::kAnswer) @@ -1301,8 +1197,7 @@ TEST_P(JsepTransport2HeaderExtensionTest, EncryptedHeaderExtensionNegotiation) { ->SetRemoteJsepTransportDescription(answer_desc, SdpType::kAnswer) .ok()); - if (scenario == Scenario::kDtlsAfterCallerSetAnswer || - scenario == Scenario::kSdes) { + if (scenario == Scenario::kDtlsAfterCallerSetAnswer) { ConnectTransport(); } EXPECT_TRUE(jsep_transport1_->rtp_transport()->IsSrtpActive()); @@ -1341,7 +1236,6 @@ INSTANTIATE_TEST_SUITE_P( JsepTransport2Test, JsepTransport2HeaderExtensionTest, ::testing::Values( - std::make_tuple(Scenario::kSdes, false), std::make_tuple(Scenario::kDtlsBeforeCallerSendOffer, true), std::make_tuple(Scenario::kDtlsBeforeCallerSetAnswer, true), std::make_tuple(Scenario::kDtlsAfterCallerSetAnswer, true), @@ -1351,8 +1245,7 @@ INSTANTIATE_TEST_SUITE_P( // This test verifies the ICE parameters are properly applied to the transports. TEST_F(JsepTransport2Test, SetIceParametersWithRenomination) { - jsep_transport_ = - CreateJsepTransport2(/* rtcp_mux_enabled= */ true, SrtpMode::kDtlsSrtp); + jsep_transport_ = CreateJsepTransport2(/* rtcp_mux_enabled= */ true); JsepTransportDescription jsep_description; jsep_description.transport_desc = TransportDescription(kIceUfrag1, kIcePwd1); @@ -1364,9 +1257,9 @@ TEST_F(JsepTransport2Test, SetIceParametersWithRenomination) { auto fake_ice_transport = static_cast( jsep_transport_->rtp_dtls_transport()->ice_transport()); EXPECT_EQ(ICEMODE_FULL, fake_ice_transport->remote_ice_mode()); - EXPECT_EQ(kIceUfrag1, fake_ice_transport->ice_ufrag()); - EXPECT_EQ(kIcePwd1, fake_ice_transport->ice_pwd()); - EXPECT_TRUE(fake_ice_transport->ice_parameters().renomination); + EXPECT_EQ(kIceUfrag1, fake_ice_transport->local_ice_parameters()->ufrag); + EXPECT_EQ(kIcePwd1, fake_ice_transport->local_ice_parameters()->pwd); + EXPECT_TRUE(fake_ice_transport->local_ice_parameters()->renomination); jsep_description.transport_desc = TransportDescription(kIceUfrag2, kIcePwd2); jsep_description.transport_desc.AddOption(ICE_OPTION_RENOMINATION); @@ -1377,10 +1270,10 @@ TEST_F(JsepTransport2Test, SetIceParametersWithRenomination) { fake_ice_transport = static_cast( jsep_transport_->rtp_dtls_transport()->ice_transport()); EXPECT_EQ(ICEMODE_FULL, fake_ice_transport->remote_ice_mode()); - EXPECT_EQ(kIceUfrag2, fake_ice_transport->remote_ice_ufrag()); - EXPECT_EQ(kIcePwd2, fake_ice_transport->remote_ice_pwd()); - EXPECT_TRUE(fake_ice_transport->remote_ice_parameters().renomination); + EXPECT_EQ(kIceUfrag2, fake_ice_transport->remote_ice_parameters()->ufrag); + EXPECT_EQ(kIcePwd2, fake_ice_transport->remote_ice_parameters()->pwd); + EXPECT_TRUE(fake_ice_transport->remote_ice_parameters()->renomination); } } // namespace -} // namespace cricket +} // namespace webrtc diff --git a/pc/legacy_stats_collector.cc b/pc/legacy_stats_collector.cc index 3bc65ee3ee..87cc3eb7f8 100644 --- a/pc/legacy_stats_collector.cc +++ b/pc/legacy_stats_collector.cc @@ -15,31 +15,39 @@ #include #include -#include +#include +#include +#include #include +#include #include #include +#include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/audio/audio_processing_statistics.h" #include "api/audio_codecs/audio_encoder.h" #include "api/candidate.h" #include "api/data_channel_interface.h" #include "api/field_trials_view.h" +#include "api/legacy_stats_types.h" +#include "api/media_stream_interface.h" #include "api/media_types.h" +#include "api/peer_connection_interface.h" #include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/video/video_content_type.h" -#include "api/video/video_timing.h" #include "call/call.h" #include "media/base/media_channel.h" -#include "modules/audio_processing/include/audio_processing_statistics.h" +#include "p2p/base/connection_info.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" +#include "p2p/base/port.h" #include "pc/channel.h" #include "pc/channel_interface.h" #include "pc/data_channel_utils.h" +#include "pc/peer_connection_internal.h" #include "pc/rtp_receiver.h" #include "pc/rtp_receiver_proxy.h" #include "pc/rtp_sender_proxy.h" @@ -48,10 +56,11 @@ #include "rtc_base/checks.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" +#include "rtc_base/network_constants.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/socket_address.h" +#include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/string_encode.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -122,7 +131,7 @@ void CreateTrackReports(const TrackVector& tracks, } } -void ExtractCommonSendProperties(const cricket::MediaSenderInfo& info, +void ExtractCommonSendProperties(const MediaSenderInfo& info, StatsReport* report, bool use_standard_bytes_stats) { report->AddString(StatsReport::kStatsValueNameCodecName, info.codec_name); @@ -136,7 +145,7 @@ void ExtractCommonSendProperties(const cricket::MediaSenderInfo& info, } } -void ExtractCommonReceiveProperties(const cricket::MediaReceiverInfo& info, +void ExtractCommonReceiveProperties(const MediaReceiverInfo& info, StatsReport* report) { report->AddString(StatsReport::kStatsValueNameCodecName, info.codec_name); } @@ -174,7 +183,7 @@ void SetAudioProcessingStats(StatsReport* report, } } -void ExtractStats(const cricket::VoiceReceiverInfo& info, +void ExtractStats(const VoiceReceiverInfo& info, StatsReport* report, bool use_standard_bytes_stats) { ExtractCommonReceiveProperties(info, report); @@ -188,9 +197,10 @@ void ExtractStats(const cricket::VoiceReceiverInfo& info, {StatsReport::kStatsValueNameAccelerateRate, info.accelerate_rate}, {StatsReport::kStatsValueNamePreemptiveExpandRate, info.preemptive_expand_rate}, - {StatsReport::kStatsValueNameTotalAudioEnergy, info.total_output_energy}, + {StatsReport::kStatsValueNameTotalAudioEnergy, + static_cast(info.total_output_energy)}, {StatsReport::kStatsValueNameTotalSamplesDuration, - info.total_output_duration}}; + static_cast(info.total_output_duration)}}; const IntForAdd ints[] = { {StatsReport::kStatsValueNameCurrentDelayMs, info.delay_estimate_ms}, @@ -236,7 +246,7 @@ void ExtractStats(const cricket::VoiceReceiverInfo& info, report->AddString(StatsReport::kStatsValueNameMediaType, "audio"); } -void ExtractStats(const cricket::VoiceSenderInfo& info, +void ExtractStats(const VoiceSenderInfo& info, StatsReport* report, bool use_standard_bytes_stats) { ExtractCommonSendProperties(info, report, use_standard_bytes_stats); @@ -244,9 +254,10 @@ void ExtractStats(const cricket::VoiceSenderInfo& info, SetAudioProcessingStats(report, info.apm_statistics); const FloatForAdd floats[] = { - {StatsReport::kStatsValueNameTotalAudioEnergy, info.total_input_energy}, + {StatsReport::kStatsValueNameTotalAudioEnergy, + static_cast(info.total_input_energy)}, {StatsReport::kStatsValueNameTotalSamplesDuration, - info.total_input_duration}}; + static_cast(info.total_input_duration)}}; RTC_DCHECK_GE(info.audio_level, 0); const IntForAdd ints[] = { @@ -296,7 +307,7 @@ void ExtractStats(const cricket::VoiceSenderInfo& info, } } -void ExtractStats(const cricket::VideoReceiverInfo& info, +void ExtractStats(const VideoReceiverInfo& info, StatsReport* report, bool use_standard_bytes_stats) { ExtractCommonReceiveProperties(info, report); @@ -340,7 +351,8 @@ void ExtractStats(const cricket::VideoReceiverInfo& info, {StatsReport::kStatsValueNamePlisSent, info.plis_sent}, {StatsReport::kStatsValueNameRenderDelayMs, info.render_delay_ms}, {StatsReport::kStatsValueNameTargetDelayMs, info.target_delay_ms}, - {StatsReport::kStatsValueNameFramesDecoded, info.frames_decoded}, + {StatsReport::kStatsValueNameFramesDecoded, + static_cast(info.frames_decoded)}, }; for (const auto& i : ints) @@ -355,12 +367,11 @@ void ExtractStats(const cricket::VideoReceiverInfo& info, report->AddInt64(StatsReport::kStatsValueNameInterframeDelayMaxMs, info.interframe_delay_max_ms); - report->AddString( - StatsReport::kStatsValueNameContentType, - webrtc::videocontenttypehelpers::ToString(info.content_type)); + report->AddString(StatsReport::kStatsValueNameContentType, + videocontenttypehelpers::ToString(info.content_type)); } -void ExtractStats(const cricket::VideoSenderInfo& info, +void ExtractStats(const VideoSenderInfo& info, StatsReport* report, bool use_standard_bytes_stats) { ExtractCommonSendProperties(info, report, use_standard_bytes_stats); @@ -384,26 +395,29 @@ void ExtractStats(const cricket::VideoSenderInfo& info, info.encode_usage_percent}, {StatsReport::kStatsValueNameFirsReceived, info.firs_received}, {StatsReport::kStatsValueNameFrameHeightSent, info.send_frame_height}, - {StatsReport::kStatsValueNameFrameRateInput, round(info.framerate_input)}, + {StatsReport::kStatsValueNameFrameRateInput, + static_cast(round(info.framerate_input))}, {StatsReport::kStatsValueNameFrameRateSent, info.framerate_sent}, {StatsReport::kStatsValueNameFrameWidthSent, info.send_frame_width}, - {StatsReport::kStatsValueNameNacksReceived, info.nacks_received}, + {StatsReport::kStatsValueNameNacksReceived, + static_cast(info.nacks_received)}, {StatsReport::kStatsValueNamePacketsLost, info.packets_lost}, {StatsReport::kStatsValueNamePacketsSent, info.packets_sent}, {StatsReport::kStatsValueNamePlisReceived, info.plis_received}, - {StatsReport::kStatsValueNameFramesEncoded, info.frames_encoded}, - {StatsReport::kStatsValueNameHugeFramesSent, info.huge_frames_sent}, + {StatsReport::kStatsValueNameFramesEncoded, + static_cast(info.frames_encoded)}, + {StatsReport::kStatsValueNameHugeFramesSent, + static_cast(info.huge_frames_sent)}, }; for (const auto& i : ints) report->AddInt(i.name, i.value); report->AddString(StatsReport::kStatsValueNameMediaType, "video"); - report->AddString( - StatsReport::kStatsValueNameContentType, - webrtc::videocontenttypehelpers::ToString(info.content_type)); + report->AddString(StatsReport::kStatsValueNameContentType, + videocontenttypehelpers::ToString(info.content_type)); } -void ExtractStats(const cricket::BandwidthEstimationInfo& info, +void ExtractStats(const BandwidthEstimationInfo& info, double stats_gathering_started, StatsReport* report) { RTC_DCHECK(report->type() == StatsReport::kStatsReportTypeBwe); @@ -424,14 +438,12 @@ void ExtractStats(const cricket::BandwidthEstimationInfo& info, report->AddInt64(StatsReport::kStatsValueNameBucketDelay, info.bucket_delay); } -void ExtractRemoteStats(const cricket::MediaSenderInfo& info, - StatsReport* report) { +void ExtractRemoteStats(const MediaSenderInfo& info, StatsReport* report) { report->set_timestamp(info.remote_stats[0].timestamp); // TODO(hta): Extract some stats here. } -void ExtractRemoteStats(const cricket::MediaReceiverInfo& info, - StatsReport* report) { +void ExtractRemoteStats(const MediaReceiverInfo& info, StatsReport* report) { report->set_timestamp(info.remote_stats[0].timestamp); // TODO(hta): Extract some stats here. } @@ -491,42 +503,51 @@ void ExtractStatsFromList( } // namespace -const char* IceCandidateTypeToStatsType(const std::string& candidate_type) { - if (candidate_type == cricket::LOCAL_PORT_TYPE) { +const char* IceCandidateTypeToStatsType(const Candidate& candidate) { + if (candidate.is_local()) { return STATSREPORT_LOCAL_PORT_TYPE; } - if (candidate_type == cricket::STUN_PORT_TYPE) { + if (candidate.is_stun()) { return STATSREPORT_STUN_PORT_TYPE; } - if (candidate_type == cricket::PRFLX_PORT_TYPE) { + if (candidate.is_prflx()) { return STATSREPORT_PRFLX_PORT_TYPE; } - if (candidate_type == cricket::RELAY_PORT_TYPE) { + if (candidate.is_relay()) { return STATSREPORT_RELAY_PORT_TYPE; } RTC_DCHECK_NOTREACHED(); return "unknown"; } -const char* AdapterTypeToStatsType(rtc::AdapterType type) { +// Return std::string to make sure that the type remains kString compatible. +std::string GetLegacyCandidateTypeName(const Candidate& c) { + if (c.is_local()) + return "local"; + if (c.is_stun()) + return "stun"; + return std::string(c.type_name()); +} + +const char* AdapterTypeToStatsType(AdapterType type) { switch (type) { - case rtc::ADAPTER_TYPE_UNKNOWN: + case ADAPTER_TYPE_UNKNOWN: return "unknown"; - case rtc::ADAPTER_TYPE_ETHERNET: + case ADAPTER_TYPE_ETHERNET: return STATSREPORT_ADAPTER_TYPE_ETHERNET; - case rtc::ADAPTER_TYPE_WIFI: + case ADAPTER_TYPE_WIFI: return STATSREPORT_ADAPTER_TYPE_WIFI; - case rtc::ADAPTER_TYPE_CELLULAR: - case rtc::ADAPTER_TYPE_CELLULAR_2G: - case rtc::ADAPTER_TYPE_CELLULAR_3G: - case rtc::ADAPTER_TYPE_CELLULAR_4G: - case rtc::ADAPTER_TYPE_CELLULAR_5G: + case ADAPTER_TYPE_CELLULAR: + case ADAPTER_TYPE_CELLULAR_2G: + case ADAPTER_TYPE_CELLULAR_3G: + case ADAPTER_TYPE_CELLULAR_4G: + case ADAPTER_TYPE_CELLULAR_5G: return STATSREPORT_ADAPTER_TYPE_WWAN; - case rtc::ADAPTER_TYPE_VPN: + case ADAPTER_TYPE_VPN: return STATSREPORT_ADAPTER_TYPE_VPN; - case rtc::ADAPTER_TYPE_LOOPBACK: + case ADAPTER_TYPE_LOOPBACK: return STATSREPORT_ADAPTER_TYPE_LOOPBACK; - case rtc::ADAPTER_TYPE_ANY: + case ADAPTER_TYPE_ANY: return STATSREPORT_ADAPTER_TYPE_WILDCARD; default: RTC_DCHECK_NOTREACHED(); @@ -548,7 +569,7 @@ LegacyStatsCollector::~LegacyStatsCollector() { // Wallclock time in ms. double LegacyStatsCollector::GetTimeNow() { - return static_cast(rtc::TimeUTCMillis()); + return static_cast(TimeUTCMillis()); } // Adds a MediaStream with tracks that can be used as a `selector` in a call @@ -616,7 +637,7 @@ void LegacyStatsCollector::GetStats(MediaStreamTrackInterface* track, RTC_DCHECK(reports != NULL); RTC_DCHECK(reports->empty()); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; if (!track) { reports->reserve(reports_.size()); @@ -657,7 +678,7 @@ void LegacyStatsCollector::UpdateStats( // will be ignored. Using a monotonic clock specifically for this, while using // a UTC clock for the reports themselves. const int64_t kMinGatherStatsPeriodMs = 50; - int64_t cache_now_ms = rtc::TimeMillis(); + int64_t cache_now_ms = TimeMillis(); if (cache_timestamp_ms_ != 0 && cache_timestamp_ms_ + kMinGatherStatsPeriodMs > cache_now_ms) { return; @@ -693,7 +714,7 @@ StatsReport* LegacyStatsCollector::PrepareReport( StatsReport::Id id(StatsReport::NewIdWithDirection( local ? StatsReport::kStatsReportTypeSsrc : StatsReport::kStatsReportTypeRemoteSsrc, - rtc::ToString(ssrc), direction)); + absl::StrCat(ssrc), direction)); StatsReport* report = reports_.Find(id); if (!report) { report = reports_.InsertNew(id); @@ -725,12 +746,12 @@ bool LegacyStatsCollector::IsValidTrack(const std::string& track_id) { } StatsReport* LegacyStatsCollector::AddCertificateReports( - std::unique_ptr cert_stats) { + std::unique_ptr cert_stats) { RTC_DCHECK_RUN_ON(pc_->signaling_thread()); StatsReport* first_report = nullptr; StatsReport* prev_report = nullptr; - for (rtc::SSLCertificateStats* stats = cert_stats.get(); stats; + for (SSLCertificateStats* stats = cert_stats.get(); stats; stats = stats->issuer.get()) { StatsReport::Id id(StatsReport::NewTypedId( StatsReport::kStatsReportTypeCertificate, stats->fingerprint)); @@ -757,7 +778,7 @@ StatsReport* LegacyStatsCollector::AddConnectionInfoReport( int component, int connection_id, const StatsReport::Id& channel_report_id, - const cricket::ConnectionInfo& info) { + const ConnectionInfo& info) { StatsReport::Id id( StatsReport::NewCandidatePairId(content_name, component, connection_id)); StatsReport* report = reports_.ReplaceOrAddNew(id); @@ -772,27 +793,33 @@ StatsReport* LegacyStatsCollector::AddConnectionInfoReport( report->AddBoolean(b.name, b.value); report->AddId(StatsReport::kStatsValueNameChannelId, channel_report_id); - cricket::CandidateStats local_candidate_stats(info.local_candidate); - cricket::CandidateStats remote_candidate_stats(info.remote_candidate); + CandidateStats local_candidate_stats(info.local_candidate); + CandidateStats remote_candidate_stats(info.remote_candidate); report->AddId(StatsReport::kStatsValueNameLocalCandidateId, AddCandidateReport(local_candidate_stats, true)->id()); report->AddId(StatsReport::kStatsValueNameRemoteCandidateId, AddCandidateReport(remote_candidate_stats, false)->id()); const Int64ForAdd int64s[] = { - {StatsReport::kStatsValueNameBytesReceived, info.recv_total_bytes}, - {StatsReport::kStatsValueNameBytesSent, info.sent_total_bytes}, - {StatsReport::kStatsValueNamePacketsSent, info.sent_total_packets}, - {StatsReport::kStatsValueNameRtt, info.rtt}, + {StatsReport::kStatsValueNameBytesReceived, + static_cast(info.recv_total_bytes)}, + {StatsReport::kStatsValueNameBytesSent, + static_cast(info.sent_total_bytes)}, + {StatsReport::kStatsValueNamePacketsSent, + static_cast(info.sent_total_packets)}, + {StatsReport::kStatsValueNameRtt, static_cast(info.rtt)}, {StatsReport::kStatsValueNameSendPacketsDiscarded, - info.sent_discarded_packets}, + static_cast(info.sent_discarded_packets)}, {StatsReport::kStatsValueNameSentPingRequestsTotal, - info.sent_ping_requests_total}, + static_cast(info.sent_ping_requests_total)}, {StatsReport::kStatsValueNameSentPingRequestsBeforeFirstResponse, - info.sent_ping_requests_before_first_response}, - {StatsReport::kStatsValueNameSentPingResponses, info.sent_ping_responses}, - {StatsReport::kStatsValueNameRecvPingRequests, info.recv_ping_requests}, - {StatsReport::kStatsValueNameRecvPingResponses, info.recv_ping_responses}, + static_cast(info.sent_ping_requests_before_first_response)}, + {StatsReport::kStatsValueNameSentPingResponses, + static_cast(info.sent_ping_responses)}, + {StatsReport::kStatsValueNameRecvPingRequests, + static_cast(info.recv_ping_requests)}, + {StatsReport::kStatsValueNameRecvPingResponses, + static_cast(info.recv_ping_responses)}, }; for (const auto& i : int64s) report->AddInt64(i.name, i.value); @@ -800,11 +827,11 @@ StatsReport* LegacyStatsCollector::AddConnectionInfoReport( report->AddString(StatsReport::kStatsValueNameLocalAddress, info.local_candidate.address().ToString()); report->AddString(StatsReport::kStatsValueNameLocalCandidateType, - info.local_candidate.type()); + GetLegacyCandidateTypeName(info.local_candidate)); report->AddString(StatsReport::kStatsValueNameRemoteAddress, info.remote_candidate.address().ToString()); report->AddString(StatsReport::kStatsValueNameRemoteCandidateType, - info.remote_candidate.type()); + GetLegacyCandidateTypeName(info.remote_candidate)); report->AddString(StatsReport::kStatsValueNameTransportType, info.local_candidate.protocol()); report->AddString(StatsReport::kStatsValueNameLocalCandidateRelayProtocol, @@ -814,7 +841,7 @@ StatsReport* LegacyStatsCollector::AddConnectionInfoReport( } StatsReport* LegacyStatsCollector::AddCandidateReport( - const cricket::CandidateStats& candidate_stats, + const CandidateStats& candidate_stats, bool local) { const auto& candidate = candidate_stats.candidate(); StatsReport::Id id(StatsReport::NewCandidateId(local, candidate.id())); @@ -833,7 +860,7 @@ StatsReport* LegacyStatsCollector::AddCandidateReport( report->AddInt(StatsReport::kStatsValueNameCandidatePriority, candidate.priority()); report->AddString(StatsReport::kStatsValueNameCandidateType, - IceCandidateTypeToStatsType(candidate.type())); + IceCandidateTypeToStatsType(candidate)); report->AddString(StatsReport::kStatsValueNameCandidateTransportType, candidate.protocol()); } @@ -880,17 +907,17 @@ LegacyStatsCollector::ExtractSessionAndDataInfo() { } LegacyStatsCollector::SessionStats LegacyStatsCollector::ExtractSessionInfo_n( - const std::vector>>& transceivers, - absl::optional sctp_transport_name, - absl::optional sctp_mid) { + std::optional sctp_transport_name, + std::optional sctp_mid) { TRACE_EVENT0("webrtc", "LegacyStatsCollector::ExtractSessionInfo_n"); RTC_DCHECK_RUN_ON(pc_->network_thread()); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; SessionStats stats; stats.candidate_stats = pc_->GetPooledCandidateStats(); for (auto& transceiver : transceivers) { - cricket::ChannelInterface* channel = transceiver->internal()->channel(); + ChannelInterface* channel = transceiver->internal()->channel(); if (channel) { stats.transport_names_by_mid[channel->mid()] = std::string(channel->transport_name()); @@ -907,7 +934,7 @@ LegacyStatsCollector::SessionStats LegacyStatsCollector::ExtractSessionInfo_n( transport_names.insert(entry.second); } - std::map transport_stats_by_name = + std::map transport_stats_by_name = pc_->GetTransportStatsByNames(transport_names); for (auto& entry : transport_stats_by_name) { @@ -919,13 +946,13 @@ LegacyStatsCollector::SessionStats LegacyStatsCollector::ExtractSessionInfo_n( // same local and remote certificates. // StatsReport::Id local_cert_report_id, remote_cert_report_id; - rtc::scoped_refptr certificate; + scoped_refptr certificate; if (pc_->GetLocalCertificate(transport.name, &certificate)) { transport.local_cert_stats = certificate->GetSSLCertificateChain().GetStats(); } - std::unique_ptr remote_cert_chain = + std::unique_ptr remote_cert_chain = pc_->GetRemoteSSLCertChain(transport.name); if (remote_cert_chain) { transport.remote_cert_stats = remote_cert_chain->GetStats(); @@ -937,7 +964,7 @@ LegacyStatsCollector::SessionStats LegacyStatsCollector::ExtractSessionInfo_n( void LegacyStatsCollector::ExtractSessionInfo_s(SessionStats& session_stats) { RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; StatsReport::Id id(StatsReport::NewTypedId( StatsReport::kStatsReportTypeSession, pc_->session_id())); @@ -946,7 +973,7 @@ void LegacyStatsCollector::ExtractSessionInfo_s(SessionStats& session_stats) { report->AddBoolean(StatsReport::kStatsValueNameInitiator, pc_->initial_offerer()); - for (const cricket::CandidateStats& stats : session_stats.candidate_stats) { + for (const CandidateStats& stats : session_stats.candidate_stats) { AddCandidateReport(stats, true); } @@ -986,19 +1013,15 @@ void LegacyStatsCollector::ExtractSessionInfo_s(SessionStats& session_stats) { remote_cert_report_id); } int srtp_crypto_suite = channel_iter.srtp_crypto_suite; - if (srtp_crypto_suite != rtc::kSrtpInvalidCryptoSuite && - rtc::SrtpCryptoSuiteToName(srtp_crypto_suite).length()) { - channel_report->AddString( - StatsReport::kStatsValueNameSrtpCipher, - rtc::SrtpCryptoSuiteToName(srtp_crypto_suite)); + if (srtp_crypto_suite != kSrtpInvalidCryptoSuite && + SrtpCryptoSuiteToName(srtp_crypto_suite).length()) { + channel_report->AddString(StatsReport::kStatsValueNameSrtpCipher, + SrtpCryptoSuiteToName(srtp_crypto_suite)); } - int ssl_cipher_suite = channel_iter.ssl_cipher_suite; - if (ssl_cipher_suite != rtc::kTlsNullWithNullNull && - rtc::SSLStreamAdapter::SslCipherSuiteToName(ssl_cipher_suite) - .length()) { + if (channel_iter.tls_cipher_suite_name) { channel_report->AddString( StatsReport::kStatsValueNameDtlsCipher, - rtc::SSLStreamAdapter::SslCipherSuiteToName(ssl_cipher_suite)); + std::string(*channel_iter.tls_cipher_suite_name)); } // Collect stats for non-pooled candidates. Note that the reports @@ -1006,13 +1029,13 @@ void LegacyStatsCollector::ExtractSessionInfo_s(SessionStats& session_stats) { // AddConnectionInfoReport below, and they may report candidates that are // not paired. Also, the candidate report generated in // AddConnectionInfoReport do not report port stats like StunStats. - for (const cricket::CandidateStats& stats : + for (const CandidateStats& stats : channel_iter.ice_transport_stats.candidate_stats_list) { AddCandidateReport(stats, true); } int connection_id = 0; - for (const cricket::ConnectionInfo& info : + for (const ConnectionInfo& info : channel_iter.ice_transport_stats.connection_infos) { StatsReport* connection_report = AddConnectionInfoReport( transport.name, channel_iter.component, connection_id++, @@ -1033,8 +1056,8 @@ void LegacyStatsCollector::ExtractBweInfo() { if (pc_->signaling_state() == PeerConnectionInterface::kClosed) return; - webrtc::Call::Stats call_stats = pc_->GetCallStats(); - cricket::BandwidthEstimationInfo bwe_info; + Call::Stats call_stats = pc_->GetCallStats(); + BandwidthEstimationInfo bwe_info; bwe_info.available_send_bandwidth = call_stats.send_bandwidth_bps; bwe_info.available_recv_bandwidth = call_stats.recv_bandwidth_bps; bwe_info.bucket_delay = call_stats.pacer_delay_ms; @@ -1042,9 +1065,9 @@ void LegacyStatsCollector::ExtractBweInfo() { // Fill in target encoder bitrate, actual encoder bitrate, rtx bitrate, etc. // TODO(holmer): Also fill this in for audio. auto transceivers = pc_->GetTransceiversInternal(); - std::vector video_media_channels; + std::vector video_media_channels; for (const auto& transceiver : transceivers) { - if (transceiver->media_type() != cricket::MEDIA_TYPE_VIDEO) { + if (transceiver->media_type() != webrtc::MediaType::VIDEO) { continue; } auto* video_channel = transceiver->internal()->channel(); @@ -1091,7 +1114,7 @@ class ChannelStatsGatherer { const std::vector& sender_data) const { RTC_DCHECK(collector); StatsReport::Id transport_id = StatsReport::NewComponentId( - transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP); + transport_name, ICE_CANDIDATE_COMPONENT_RTP); ExtractStatsFromList(receiver_data, transport_id, collector, StatsReport::kReceive, receiver_track_id_by_ssrc); ExtractStatsFromList(sender_data, transport_id, collector, @@ -1101,22 +1124,22 @@ class ChannelStatsGatherer { class VoiceChannelStatsGatherer final : public ChannelStatsGatherer { public: - explicit VoiceChannelStatsGatherer(cricket::VoiceChannel* voice_channel) + explicit VoiceChannelStatsGatherer(VoiceChannel* voice_channel) : voice_channel_(voice_channel) { RTC_DCHECK(voice_channel_); } bool GetStatsOnWorkerThread() override { - cricket::VoiceMediaSendInfo send_info; - cricket::VoiceMediaReceiveInfo receive_info; + VoiceMediaSendInfo send_info; + VoiceMediaReceiveInfo receive_info; bool success = voice_channel_->voice_media_send_channel()->GetStats(&send_info); success &= voice_channel_->voice_media_receive_channel()->GetStats( &receive_info, /*get_and_clear_legacy_stats=*/true); if (success) { - voice_media_info = cricket::VoiceMediaInfo(std::move(send_info), - std::move(receive_info)); + voice_media_info = + VoiceMediaInfo(std::move(send_info), std::move(receive_info)); } return success; } @@ -1137,27 +1160,27 @@ class VoiceChannelStatsGatherer final : public ChannelStatsGatherer { } private: - cricket::VoiceChannel* voice_channel_; - cricket::VoiceMediaInfo voice_media_info; + VoiceChannel* voice_channel_; + VoiceMediaInfo voice_media_info; }; class VideoChannelStatsGatherer final : public ChannelStatsGatherer { public: - explicit VideoChannelStatsGatherer(cricket::VideoChannel* video_channel) + explicit VideoChannelStatsGatherer(VideoChannel* video_channel) : video_channel_(video_channel) { RTC_DCHECK(video_channel_); } bool GetStatsOnWorkerThread() override { - cricket::VideoMediaSendInfo send_info; - cricket::VideoMediaReceiveInfo receive_info; + VideoMediaSendInfo send_info; + VideoMediaReceiveInfo receive_info; bool success = video_channel_->video_media_send_channel()->GetStats(&send_info); success &= video_channel_->video_media_receive_channel()->GetStats(&receive_info); if (success) { - video_media_info = cricket::VideoMediaInfo(std::move(send_info), - std::move(receive_info)); + video_media_info = + VideoMediaInfo(std::move(send_info), std::move(receive_info)); } return success; } @@ -1170,18 +1193,18 @@ class VideoChannelStatsGatherer final : public ChannelStatsGatherer { bool HasRemoteAudio() const override { return false; } private: - cricket::VideoChannel* video_channel_; - cricket::VideoMediaInfo video_media_info; + VideoChannel* video_channel_; + VideoMediaInfo video_media_info; }; std::unique_ptr CreateChannelStatsGatherer( - cricket::ChannelInterface* channel) { + ChannelInterface* channel) { RTC_DCHECK(channel); - if (channel->media_type() == cricket::MEDIA_TYPE_AUDIO) { + if (channel->media_type() == webrtc::MediaType::AUDIO) { return std::make_unique( channel->AsVoiceChannel()); } else { - RTC_DCHECK_EQ(channel->media_type(), cricket::MEDIA_TYPE_VIDEO); + RTC_DCHECK_EQ(channel->media_type(), webrtc::MediaType::VIDEO); return std::make_unique( channel->AsVideoChannel()); } @@ -1197,9 +1220,9 @@ void LegacyStatsCollector::ExtractMediaInfo( auto transceivers = pc_->GetTransceiversInternal(); { - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const auto& transceiver : transceivers) { - cricket::ChannelInterface* channel = transceiver->internal()->channel(); + ChannelInterface* channel = transceiver->internal()->channel(); if (!channel) { continue; } @@ -1224,11 +1247,11 @@ void LegacyStatsCollector::ExtractMediaInfo( } pc_->worker_thread()->BlockingCall([&] { - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; // Populate `receiver_track_id_by_ssrc` for the gatherers. int i = 0; for (const auto& transceiver : transceivers) { - cricket::ChannelInterface* channel = transceiver->internal()->channel(); + ChannelInterface* channel = transceiver->internal()->channel(); if (!channel) continue; ChannelStatsGatherer* gatherer = gatherers[i++].get(); @@ -1253,7 +1276,7 @@ void LegacyStatsCollector::ExtractMediaInfo( } }); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; bool has_remote_audio = false; for (const auto& gatherer : gatherers) { @@ -1273,7 +1296,7 @@ void LegacyStatsCollector::ExtractSenderInfo() { if (!sender->ssrc()) { continue; } - const rtc::scoped_refptr track(sender->track()); + const scoped_refptr track(sender->track()); if (!track || track->kind() != MediaStreamTrackInterface::kVideoKind) { continue; } @@ -1287,7 +1310,7 @@ void LegacyStatsCollector::ExtractSenderInfo() { continue; } const StatsReport::Id stats_id = StatsReport::NewIdWithDirection( - StatsReport::kStatsReportTypeSsrc, rtc::ToString(sender->ssrc()), + StatsReport::kStatsReportTypeSsrc, absl::StrCat(sender->ssrc()), StatsReport::kSend); StatsReport* report = reports_.FindOrAddNew(stats_id); report->AddInt(StatsReport::kStatsValueNameFrameWidthInput, @@ -1300,7 +1323,7 @@ void LegacyStatsCollector::ExtractSenderInfo() { void LegacyStatsCollector::ExtractDataInfo_n(StatsCollection* reports) { RTC_DCHECK_RUN_ON(pc_->network_thread()); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; std::vector data_stats = pc_->GetDataChannelStats(); for (const auto& stats : data_stats) { @@ -1336,7 +1359,7 @@ void LegacyStatsCollector::UpdateStatsFromExistingLocalAudioTracks( AudioTrackInterface* track = it.first; uint32_t ssrc = it.second; StatsReport* report = GetReport(StatsReport::kStatsReportTypeSsrc, - rtc::ToString(ssrc), StatsReport::kSend); + absl::StrCat(ssrc), StatsReport::kSend); if (report == NULL) { // This can happen if a local audio track is added to a stream on the // fly and the report has not been set up yet. Do nothing in this case. @@ -1382,7 +1405,7 @@ void LegacyStatsCollector::UpdateReportFromAudioTrack( void LegacyStatsCollector::UpdateTrackReports() { RTC_DCHECK_RUN_ON(pc_->signaling_thread()); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const auto& entry : track_ids_) { StatsReport* report = entry.second; diff --git a/pc/legacy_stats_collector.h b/pc/legacy_stats_collector.h index e905b39d48..e2786b1f21 100644 --- a/pc/legacy_stats_collector.h +++ b/pc/legacy_stats_collector.h @@ -16,17 +16,15 @@ #include -#include #include #include #include +#include #include -#include #include #include -#include "absl/types/optional.h" -#include "api/field_trials_view.h" +#include "api/candidate.h" #include "api/legacy_stats_types.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" @@ -45,12 +43,12 @@ namespace webrtc { // Conversion function to convert candidate type string to the corresponding one // from enum RTCStatsIceCandidateType. -const char* IceCandidateTypeToStatsType(const std::string& candidate_type); +const char* IceCandidateTypeToStatsType(const Candidate& candidate); // Conversion function to convert adapter type to report string which are more // fitting to the general style of http://w3c.github.io/webrtc-stats. This is // only used by stats collector. -const char* AdapterTypeToStatsType(rtc::AdapterType type); +const char* AdapterTypeToStatsType(AdapterType type); // A mapping between track ids and their StatsReport. typedef std::map TrackIdMap; @@ -119,15 +117,15 @@ class LegacyStatsCollector : public LegacyStatsCollectorInterface { struct TransportStats { TransportStats() = default; TransportStats(std::string transport_name, - cricket::TransportStats transport_stats) + ::webrtc::TransportStats transport_stats) : name(std::move(transport_name)), stats(std::move(transport_stats)) {} TransportStats(TransportStats&&) = default; TransportStats(const TransportStats&) = delete; std::string name; - cricket::TransportStats stats; - std::unique_ptr local_cert_stats; - std::unique_ptr remote_cert_stats; + ::webrtc::TransportStats stats; + std::unique_ptr local_cert_stats; + std::unique_ptr remote_cert_stats; }; struct SessionStats { @@ -138,7 +136,7 @@ class LegacyStatsCollector : public LegacyStatsCollectorInterface { SessionStats& operator=(SessionStats&&) = default; SessionStats& operator=(SessionStats&) = delete; - cricket::CandidateStatsList candidate_stats; + CandidateStatsList candidate_stats; std::vector transport_stats; std::map transport_names_by_mid; }; @@ -150,20 +148,19 @@ class LegacyStatsCollector : public LegacyStatsCollectorInterface { // Helper method for creating IceCandidate report. `is_local` indicates // whether this candidate is local or remote. - StatsReport* AddCandidateReport( - const cricket::CandidateStats& candidate_stats, - bool local); + StatsReport* AddCandidateReport(const CandidateStats& candidate_stats, + bool local); // Adds a report for this certificate and every certificate in its chain, and // returns the leaf certificate's report (`cert_stats`'s report). StatsReport* AddCertificateReports( - std::unique_ptr cert_stats); + std::unique_ptr cert_stats); StatsReport* AddConnectionInfoReport(const std::string& content_name, int component, int connection_id, const StatsReport::Id& channel_report_id, - const cricket::ConnectionInfo& info); + const ConnectionInfo& info); void ExtractDataInfo_n(StatsCollection* reports); @@ -177,9 +174,9 @@ class LegacyStatsCollector : public LegacyStatsCollectorInterface { void ExtractMediaInfo( const std::map& transport_names_by_mid); void ExtractSenderInfo(); - webrtc::StatsReport* GetReport(const StatsReport::StatsType& type, - const std::string& id, - StatsReport::Direction direction); + StatsReport* GetReport(const StatsReport::StatsType& type, + const std::string& id, + StatsReport::Direction direction); // Helper method to get stats from the local audio tracks. void UpdateStatsFromExistingLocalAudioTracks(bool has_remote_tracks); @@ -191,10 +188,10 @@ class LegacyStatsCollector : public LegacyStatsCollectorInterface { void UpdateTrackReports(); SessionStats ExtractSessionInfo_n( - const std::vector>>& transceivers, - absl::optional sctp_transport_name, - absl::optional sctp_mid); + std::optional sctp_transport_name, + std::optional sctp_mid); void ExtractSessionInfo_s(SessionStats& session_stats); // A collection for all of our stats reports. diff --git a/pc/legacy_stats_collector_unittest.cc b/pc/legacy_stats_collector_unittest.cc index 3099d1188a..1f02180733 100644 --- a/pc/legacy_stats_collector_unittest.cc +++ b/pc/legacy_stats_collector_unittest.cc @@ -13,23 +13,32 @@ #include #include +#include +#include +#include +#include +#include #include "absl/algorithm/container.h" -#include "absl/types/optional.h" +#include "absl/strings/str_cat.h" +#include "api/audio/audio_processing_statistics.h" #include "api/audio_codecs/audio_encoder.h" #include "api/candidate.h" #include "api/data_channel_interface.h" +#include "api/legacy_stats_types.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" #include "api/media_stream_track.h" #include "api/media_types.h" +#include "api/peer_connection_interface.h" #include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" #include "call/call.h" #include "media/base/media_channel.h" -#include "modules/audio_processing/include/audio_processing_statistics.h" +#include "p2p/base/connection_info.h" #include "p2p/base/ice_transport_internal.h" #include "pc/media_stream.h" -#include "pc/rtp_receiver.h" -#include "pc/rtp_sender.h" +#include "pc/peer_connection_internal.h" #include "pc/sctp_data_channel.h" #include "pc/test/fake_peer_connection_for_stats.h" #include "pc/test/fake_video_track_source.h" @@ -37,41 +46,38 @@ #include "pc/test/mock_rtp_sender_internal.h" #include "pc/transport_stats.h" #include "pc/video_track.h" +#include "rtc_base/base64.h" +#include "rtc_base/checks.h" #include "rtc_base/fake_ssl_identity.h" #include "rtc_base/message_digest.h" #include "rtc_base/net_helper.h" +#include "rtc_base/network_constants.h" #include "rtc_base/null_socket_server.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/socket_address.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/string_encode.h" -#include "rtc_base/third_party/base64/base64.h" #include "rtc_base/thread.h" #include "test/gmock.h" #include "test/gtest.h" -using cricket::ConnectionInfo; -using cricket::SsrcReceiverInfo; -using cricket::TransportChannelStats; -using cricket::VideoMediaInfo; -using cricket::VideoReceiverInfo; -using cricket::VideoSenderInfo; -using cricket::VoiceMediaInfo; -using cricket::VoiceReceiverInfo; -using cricket::VoiceSenderInfo; using ::testing::_; using ::testing::AtMost; +using ::testing::Eq; using ::testing::Return; using ::testing::UnorderedElementsAre; +using ::webrtc::ConnectionInfo; +using ::webrtc::SsrcReceiverInfo; +using ::webrtc::TransportChannelStats; +using ::webrtc::VideoMediaInfo; +using ::webrtc::VideoReceiverInfo; +using ::webrtc::VideoSenderInfo; +using ::webrtc::VoiceMediaInfo; +using ::webrtc::VoiceReceiverInfo; +using ::webrtc::VoiceSenderInfo; namespace webrtc { -namespace internal { -// This value comes from openssl/tls1.h -static const int TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014; -} // namespace internal - // Error return values const char kNotFound[] = "NOT FOUND"; @@ -103,7 +109,7 @@ class FakeAudioTrack : public MediaStreamTrack { public: explicit FakeAudioTrack(const std::string& id) : MediaStreamTrack(id), - processor_(rtc::make_ref_counted()) {} + processor_(make_ref_counted()) {} std::string kind() const override { return "audio"; } AudioSourceInterface* GetSource() const override { return NULL; } void AddSink(AudioTrackSinkInterface* sink) override {} @@ -112,12 +118,12 @@ class FakeAudioTrack : public MediaStreamTrack { *level = 1; return true; } - rtc::scoped_refptr GetAudioProcessor() override { + scoped_refptr GetAudioProcessor() override { return processor_; } private: - rtc::scoped_refptr processor_; + scoped_refptr processor_; }; // This fake audio processor is used to verify that the undesired initial values @@ -140,7 +146,7 @@ class FakeAudioTrackWithInitValue public: explicit FakeAudioTrackWithInitValue(const std::string& id) : MediaStreamTrack(id), - processor_(rtc::make_ref_counted()) {} + processor_(make_ref_counted()) {} std::string kind() const override { return "audio"; } AudioSourceInterface* GetSource() const override { return NULL; } void AddSink(AudioTrackSinkInterface* sink) override {} @@ -149,12 +155,12 @@ class FakeAudioTrackWithInitValue *level = 1; return true; } - rtc::scoped_refptr GetAudioProcessor() override { + scoped_refptr GetAudioProcessor() override { return processor_; } private: - rtc::scoped_refptr processor_; + scoped_refptr processor_; }; bool GetValue(const StatsReport* report, @@ -223,18 +229,18 @@ const StatsReport* FindNthReportByType(const StatsReports& reports, // `n` starts from 1 for finding the first report. // If either the `n`-th report is not found, or the stat is not present in that // report, then nullopt is returned. -absl::optional GetValueInNthReportByType( +std::optional GetValueInNthReportByType( const StatsReports& reports, StatsReport::StatsType type, StatsReport::StatsValueName name, int n) { const StatsReport* report = FindNthReportByType(reports, type, n); if (!report) { - return absl::nullopt; + return std::nullopt; } std::string value; if (!GetValue(report, name, &value)) { - return absl::nullopt; + return std::nullopt; } return value; } @@ -270,9 +276,9 @@ std::string ExtractBweStatsValue(const StatsReports& reports, } std::string DerToPem(const std::string& der) { - return rtc::SSLIdentity::DerToPem( - rtc::kPemTypeCertificate, - reinterpret_cast(der.c_str()), der.length()); + return SSLIdentity::DerToPem( + kPemTypeCertificate, reinterpret_cast(der.c_str()), + der.length()); } std::vector DersToPems(const std::vector& ders) { @@ -293,15 +299,14 @@ void CheckCertChainReports(const StatsReports& reports, std::string der_base64; EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDer, &der_base64)); - std::string der = rtc::Base64::Decode(der_base64, rtc::Base64::DO_STRICT); - EXPECT_EQ(ders[i], der); + EXPECT_THAT(ders[i], Eq(Base64Decode(der_base64))); std::string fingerprint_algorithm; EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameFingerprintAlgorithm, &fingerprint_algorithm)); // The digest algorithm for a FakeSSLCertificate is always SHA-1. - std::string sha_1_str = rtc::DIGEST_SHA_1; + std::string sha_1_str = DIGEST_SHA_1; EXPECT_EQ(sha_1_str, fingerprint_algorithm); std::string fingerprint; @@ -322,110 +327,110 @@ void CheckCertChainReports(const StatsReports& reports, } void VerifyVoiceReceiverInfoReport(const StatsReport* report, - const cricket::VoiceReceiverInfo& info) { + const VoiceReceiverInfo& info) { std::string value_in_report; EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAudioOutputLevel, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.audio_level), value_in_report); + EXPECT_EQ(absl::StrCat(info.audio_level), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameBytesReceived, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.payload_bytes_received + - info.header_and_padding_bytes_received), + EXPECT_EQ(absl::StrCat(info.payload_bytes_received + + info.header_and_padding_bytes_received), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameJitterReceived, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.jitter_ms), value_in_report); + EXPECT_EQ(absl::StrCat(info.jitter_ms), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameJitterBufferMs, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.jitter_buffer_ms), value_in_report); + EXPECT_EQ(absl::StrCat(info.jitter_buffer_ms), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNamePreferredJitterBufferMs, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.jitter_buffer_preferred_ms), value_in_report); + EXPECT_EQ(absl::StrCat(info.jitter_buffer_preferred_ms), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameCurrentDelayMs, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.delay_estimate_ms), value_in_report); + EXPECT_EQ(absl::StrCat(info.delay_estimate_ms), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameExpandRate, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.expand_rate), value_in_report); + EXPECT_EQ(absl::StrCat(info.expand_rate), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameSpeechExpandRate, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.speech_expand_rate), value_in_report); + EXPECT_EQ(absl::StrCat(info.speech_expand_rate), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAccelerateRate, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.accelerate_rate), value_in_report); + EXPECT_EQ(absl::StrCat(info.accelerate_rate), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNamePreemptiveExpandRate, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.preemptive_expand_rate), value_in_report); + EXPECT_EQ(absl::StrCat(info.preemptive_expand_rate), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameSecondaryDecodedRate, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.secondary_decoded_rate), value_in_report); + EXPECT_EQ(absl::StrCat(info.secondary_decoded_rate), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameSecondaryDiscardedRate, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.secondary_discarded_rate), value_in_report); + EXPECT_EQ(absl::StrCat(info.secondary_discarded_rate), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNamePacketsReceived, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.packets_received), value_in_report); + EXPECT_EQ(absl::StrCat(info.packets_received), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingCTSG, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.decoding_calls_to_silence_generator), + EXPECT_EQ(absl::StrCat(info.decoding_calls_to_silence_generator), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingCTN, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.decoding_calls_to_neteq), value_in_report); + EXPECT_EQ(absl::StrCat(info.decoding_calls_to_neteq), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingNormal, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.decoding_normal), value_in_report); + EXPECT_EQ(absl::StrCat(info.decoding_normal), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingPLC, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.decoding_plc), value_in_report); + EXPECT_EQ(absl::StrCat(info.decoding_plc), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingCodecPLC, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.decoding_codec_plc), value_in_report); + EXPECT_EQ(absl::StrCat(info.decoding_codec_plc), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingCNG, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.decoding_cng), value_in_report); + EXPECT_EQ(absl::StrCat(info.decoding_cng), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingPLCCNG, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.decoding_plc_cng), value_in_report); + EXPECT_EQ(absl::StrCat(info.decoding_plc_cng), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameDecodingMutedOutput, &value_in_report)); - EXPECT_EQ(rtc::ToString(info.decoding_muted_output), value_in_report); + EXPECT_EQ(absl::StrCat(info.decoding_muted_output), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameCodecName, &value_in_report)); } void VerifyVoiceSenderInfoReport(const StatsReport* report, - const cricket::VoiceSenderInfo& sinfo) { + const VoiceSenderInfo& sinfo) { std::string value_in_report; EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameCodecName, &value_in_report)); EXPECT_EQ(sinfo.codec_name, value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameBytesSent, &value_in_report)); - EXPECT_EQ(rtc::ToString(sinfo.payload_bytes_sent + - sinfo.header_and_padding_bytes_sent), + EXPECT_EQ(absl::StrCat(sinfo.payload_bytes_sent + + sinfo.header_and_padding_bytes_sent), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNamePacketsSent, &value_in_report)); - EXPECT_EQ(rtc::ToString(sinfo.packets_sent), value_in_report); + EXPECT_EQ(absl::StrCat(sinfo.packets_sent), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNamePacketsLost, &value_in_report)); - EXPECT_EQ(rtc::ToString(sinfo.packets_lost), value_in_report); + EXPECT_EQ(absl::StrCat(sinfo.packets_lost), value_in_report); EXPECT_TRUE( GetValue(report, StatsReport::kStatsValueNameRtt, &value_in_report)); - EXPECT_EQ(rtc::ToString(sinfo.rtt_ms), value_in_report); + EXPECT_EQ(absl::StrCat(sinfo.rtt_ms), value_in_report); EXPECT_TRUE( GetValue(report, StatsReport::kStatsValueNameRtt, &value_in_report)); - EXPECT_EQ(rtc::ToString(sinfo.rtt_ms), value_in_report); + EXPECT_EQ(absl::StrCat(sinfo.rtt_ms), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameJitterReceived, &value_in_report)); - EXPECT_EQ(rtc::ToString(sinfo.jitter_ms), value_in_report); + EXPECT_EQ(absl::StrCat(sinfo.jitter_ms), value_in_report); if (sinfo.apm_statistics.delay_median_ms) { EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameEchoDelayMedian, &value_in_report)); - EXPECT_EQ(rtc::ToString(*sinfo.apm_statistics.delay_median_ms), + EXPECT_EQ(absl::StrCat(*sinfo.apm_statistics.delay_median_ms), value_in_report); } else { EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameEchoDelayMedian, @@ -434,7 +439,7 @@ void VerifyVoiceSenderInfoReport(const StatsReport* report, if (sinfo.apm_statistics.delay_standard_deviation_ms) { EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameEchoDelayStdDev, &value_in_report)); - EXPECT_EQ(rtc::ToString(*sinfo.apm_statistics.delay_standard_deviation_ms), + EXPECT_EQ(absl::StrCat(*sinfo.apm_statistics.delay_standard_deviation_ms), value_in_report); } else { EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameEchoDelayStdDev, @@ -443,7 +448,7 @@ void VerifyVoiceSenderInfoReport(const StatsReport* report, if (sinfo.apm_statistics.echo_return_loss) { EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameEchoReturnLoss, &value_in_report)); - EXPECT_EQ(rtc::ToString(*sinfo.apm_statistics.echo_return_loss), + EXPECT_EQ(absl::StrCat(*sinfo.apm_statistics.echo_return_loss), value_in_report); } else { EXPECT_FALSE(GetValue(report, StatsReport::kStatsValueNameEchoReturnLoss, @@ -453,7 +458,7 @@ void VerifyVoiceSenderInfoReport(const StatsReport* report, EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameEchoReturnLossEnhancement, &value_in_report)); - EXPECT_EQ(rtc::ToString(*sinfo.apm_statistics.echo_return_loss_enhancement), + EXPECT_EQ(absl::StrCat(*sinfo.apm_statistics.echo_return_loss_enhancement), value_in_report); } else { EXPECT_FALSE(GetValue(report, @@ -464,7 +469,7 @@ void VerifyVoiceSenderInfoReport(const StatsReport* report, EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameResidualEchoLikelihood, &value_in_report)); - EXPECT_EQ(rtc::ToString(*sinfo.apm_statistics.residual_echo_likelihood), + EXPECT_EQ(absl::StrCat(*sinfo.apm_statistics.residual_echo_likelihood), value_in_report); } else { EXPECT_FALSE(GetValue(report, @@ -475,9 +480,9 @@ void VerifyVoiceSenderInfoReport(const StatsReport* report, EXPECT_TRUE(GetValue( report, StatsReport::kStatsValueNameResidualEchoLikelihoodRecentMax, &value_in_report)); - EXPECT_EQ(rtc::ToString( - *sinfo.apm_statistics.residual_echo_likelihood_recent_max), - value_in_report); + EXPECT_EQ( + absl::StrCat(*sinfo.apm_statistics.residual_echo_likelihood_recent_max), + value_in_report); } else { EXPECT_FALSE(GetValue( report, StatsReport::kStatsValueNameResidualEchoLikelihoodRecentMax, @@ -485,51 +490,51 @@ void VerifyVoiceSenderInfoReport(const StatsReport* report, } EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAudioInputLevel, &value_in_report)); - EXPECT_EQ(rtc::ToString(sinfo.audio_level), value_in_report); + EXPECT_EQ(absl::StrCat(sinfo.audio_level), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAnaBitrateActionCounter, &value_in_report)); ASSERT_TRUE(sinfo.ana_statistics.bitrate_action_counter); - EXPECT_EQ(rtc::ToString(*sinfo.ana_statistics.bitrate_action_counter), + EXPECT_EQ(absl::StrCat(*sinfo.ana_statistics.bitrate_action_counter), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAnaChannelActionCounter, &value_in_report)); ASSERT_TRUE(sinfo.ana_statistics.channel_action_counter); - EXPECT_EQ(rtc::ToString(*sinfo.ana_statistics.channel_action_counter), + EXPECT_EQ(absl::StrCat(*sinfo.ana_statistics.channel_action_counter), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAnaDtxActionCounter, &value_in_report)); ASSERT_TRUE(sinfo.ana_statistics.dtx_action_counter); - EXPECT_EQ(rtc::ToString(*sinfo.ana_statistics.dtx_action_counter), + EXPECT_EQ(absl::StrCat(*sinfo.ana_statistics.dtx_action_counter), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAnaFecActionCounter, &value_in_report)); ASSERT_TRUE(sinfo.ana_statistics.fec_action_counter); - EXPECT_EQ(rtc::ToString(*sinfo.ana_statistics.fec_action_counter), + EXPECT_EQ(absl::StrCat(*sinfo.ana_statistics.fec_action_counter), value_in_report); EXPECT_TRUE(GetValue( report, StatsReport::kStatsValueNameAnaFrameLengthIncreaseCounter, &value_in_report)); ASSERT_TRUE(sinfo.ana_statistics.frame_length_increase_counter); - EXPECT_EQ(rtc::ToString(*sinfo.ana_statistics.frame_length_increase_counter), + EXPECT_EQ(absl::StrCat(*sinfo.ana_statistics.frame_length_increase_counter), value_in_report); EXPECT_TRUE(GetValue( report, StatsReport::kStatsValueNameAnaFrameLengthDecreaseCounter, &value_in_report)); ASSERT_TRUE(sinfo.ana_statistics.frame_length_decrease_counter); - EXPECT_EQ(rtc::ToString(*sinfo.ana_statistics.frame_length_decrease_counter), + EXPECT_EQ(absl::StrCat(*sinfo.ana_statistics.frame_length_decrease_counter), value_in_report); EXPECT_TRUE(GetValue(report, StatsReport::kStatsValueNameAnaUplinkPacketLossFraction, &value_in_report)); ASSERT_TRUE(sinfo.ana_statistics.uplink_packet_loss_fraction); - EXPECT_EQ(rtc::ToString(*sinfo.ana_statistics.uplink_packet_loss_fraction), + EXPECT_EQ(absl::StrCat(*sinfo.ana_statistics.uplink_packet_loss_fraction), value_in_report); } // Helper methods to avoid duplication of code. -void InitVoiceSenderInfo(cricket::VoiceSenderInfo* voice_sender_info, +void InitVoiceSenderInfo(VoiceSenderInfo* voice_sender_info, uint32_t ssrc = kSsrcOfTrack) { voice_sender_info->add_ssrc(ssrc); voice_sender_info->codec_name = "fake_codec"; @@ -554,17 +559,16 @@ void InitVoiceSenderInfo(cricket::VoiceSenderInfo* voice_sender_info, voice_sender_info->ana_statistics.uplink_packet_loss_fraction = 118.0; } -void UpdateVoiceSenderInfoFromAudioTrack( - AudioTrackInterface* audio_track, - cricket::VoiceSenderInfo* voice_sender_info, - bool has_remote_tracks) { +void UpdateVoiceSenderInfoFromAudioTrack(AudioTrackInterface* audio_track, + VoiceSenderInfo* voice_sender_info, + bool has_remote_tracks) { audio_track->GetSignalLevel(&voice_sender_info->audio_level); AudioProcessorInterface::AudioProcessorStatistics audio_processor_stats = audio_track->GetAudioProcessor()->GetStats(has_remote_tracks); voice_sender_info->apm_statistics = audio_processor_stats.apm_statistics; } -void InitVoiceReceiverInfo(cricket::VoiceReceiverInfo* voice_receiver_info) { +void InitVoiceReceiverInfo(VoiceReceiverInfo* voice_receiver_info) { voice_receiver_info->add_ssrc(kSsrcOfTrack); voice_receiver_info->payload_bytes_received = 98; voice_receiver_info->header_and_padding_bytes_received = 12; @@ -597,8 +601,8 @@ class LegacyStatsCollectorForTest : public LegacyStatsCollector { class LegacyStatsCollectorTest : public ::testing::Test { protected: - rtc::scoped_refptr CreatePeerConnection() { - return rtc::make_ref_counted(); + scoped_refptr CreatePeerConnection() { + return make_ref_counted(); } std::unique_ptr CreateStatsCollector( @@ -624,7 +628,7 @@ class LegacyStatsCollectorTest : public ::testing::Test { EXPECT_EQ(audio_track->id(), track_id); std::string ssrc_id = ExtractSsrcStatsValue(*reports, StatsReport::kStatsValueNameSsrc); - EXPECT_EQ(rtc::ToString(kSsrcOfTrack), ssrc_id); + EXPECT_EQ(absl::StrCat(kSsrcOfTrack), ssrc_id); std::string media_type = ExtractSsrcStatsValue(*reports, StatsReport::kStatsValueNameMediaType); @@ -650,7 +654,7 @@ class LegacyStatsCollectorTest : public ::testing::Test { EXPECT_EQ(audio_track->id(), track_id); ssrc_id = ExtractSsrcStatsValue(track_reports, StatsReport::kStatsValueNameSsrc); - EXPECT_EQ(rtc::ToString(kSsrcOfTrack), ssrc_id); + EXPECT_EQ(absl::StrCat(kSsrcOfTrack), ssrc_id); if (!voice_info.senders.empty()) { VerifyVoiceSenderInfoReport(track_report, voice_info.senders[0]); } @@ -659,9 +663,9 @@ class LegacyStatsCollectorTest : public ::testing::Test { } } - void TestCertificateReports(const rtc::FakeSSLIdentity& local_identity, + void TestCertificateReports(const FakeSSLIdentity& local_identity, const std::vector& local_ders, - const rtc::FakeSSLIdentity& remote_identity, + const FakeSSLIdentity& remote_identity, const std::vector& remote_ders) { const std::string kTransportName = "transport"; @@ -673,14 +677,13 @@ class LegacyStatsCollectorTest : public ::testing::Test { // Fake stats to process. TransportChannelStats channel_stats; channel_stats.component = 1; - channel_stats.srtp_crypto_suite = rtc::kSrtpAes128CmSha1_80; - channel_stats.ssl_cipher_suite = - internal::TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA; + channel_stats.srtp_crypto_suite = kSrtpAes128CmSha1_80; + channel_stats.tls_cipher_suite_name = "cipher_suite_for_test"; pc->SetTransportStats(kTransportName, channel_stats); // Fake certificate to report. - rtc::scoped_refptr local_certificate( - rtc::RTCCertificate::Create(local_identity.Clone())); + scoped_refptr local_certificate( + RTCCertificate::Create(local_identity.Clone())); pc->SetLocalCertificate(kTransportName, local_certificate); pc->SetRemoteCertChain(kTransportName, remote_identity.cert_chain().Clone()); @@ -722,48 +725,45 @@ class LegacyStatsCollectorTest : public ::testing::Test { std::string dtls_cipher_suite = ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports, StatsReport::kStatsValueNameDtlsCipher); - EXPECT_EQ(rtc::SSLStreamAdapter::SslCipherSuiteToName( - internal::TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA), - dtls_cipher_suite); + EXPECT_EQ(dtls_cipher_suite, "cipher_suite_for_test"); std::string srtp_crypto_suite = ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports, StatsReport::kStatsValueNameSrtpCipher); - EXPECT_EQ(rtc::SrtpCryptoSuiteToName(rtc::kSrtpAes128CmSha1_80), - srtp_crypto_suite); + EXPECT_EQ(SrtpCryptoSuiteToName(kSrtpAes128CmSha1_80), srtp_crypto_suite); } private: - rtc::AutoThread main_thread_; + AutoThread main_thread_; }; -static rtc::scoped_refptr CreateMockSender( - rtc::scoped_refptr track, +static scoped_refptr CreateMockSender( + scoped_refptr track, uint32_t ssrc) { - auto sender = rtc::make_ref_counted(); + auto sender = make_ref_counted(); EXPECT_CALL(*sender, track()).WillRepeatedly(Return(track)); EXPECT_CALL(*sender, ssrc()).WillRepeatedly(Return(ssrc)); EXPECT_CALL(*sender, media_type()) .WillRepeatedly( Return(track->kind() == MediaStreamTrackInterface::kAudioKind - ? cricket::MEDIA_TYPE_AUDIO - : cricket::MEDIA_TYPE_VIDEO)); + ? webrtc::MediaType::AUDIO + : webrtc::MediaType::VIDEO)); EXPECT_CALL(*sender, SetMediaChannel(_)).Times(AtMost(2)); EXPECT_CALL(*sender, SetTransceiverAsStopped()).Times(AtMost(1)); EXPECT_CALL(*sender, Stop()); return sender; } -static rtc::scoped_refptr CreateMockReceiver( - rtc::scoped_refptr track, +static scoped_refptr CreateMockReceiver( + scoped_refptr track, uint32_t ssrc) { - auto receiver = rtc::make_ref_counted(); + auto receiver = make_ref_counted(); EXPECT_CALL(*receiver, track()).WillRepeatedly(Return(track)); EXPECT_CALL(*receiver, ssrc()).WillRepeatedly(Return(ssrc)); EXPECT_CALL(*receiver, media_type()) .WillRepeatedly( Return(track->kind() == MediaStreamTrackInterface::kAudioKind - ? cricket::MEDIA_TYPE_AUDIO - : cricket::MEDIA_TYPE_VIDEO)); + ? webrtc::MediaType::AUDIO + : webrtc::MediaType::VIDEO)); EXPECT_CALL(*receiver, SetMediaChannel(_)).WillRepeatedly(Return()); EXPECT_CALL(*receiver, Stop()).WillRepeatedly(Return()); return receiver; @@ -778,7 +778,7 @@ class StatsCollectorTrackTest : public LegacyStatsCollectorTest, void AddOutgoingVideoTrack(FakePeerConnectionForStats* pc, LegacyStatsCollectorForTest* stats) { video_track_ = VideoTrack::Create( - kLocalTrackId, FakeVideoTrackSource::Create(), rtc::Thread::Current()); + kLocalTrackId, FakeVideoTrackSource::Create(), Thread::Current()); if (GetParam()) { if (!stream_) stream_ = MediaStream::Create("streamid"); @@ -794,7 +794,7 @@ class StatsCollectorTrackTest : public LegacyStatsCollectorTest, void AddIncomingVideoTrack(FakePeerConnectionForStats* pc, LegacyStatsCollectorForTest* stats) { video_track_ = VideoTrack::Create( - kRemoteTrackId, FakeVideoTrackSource::Create(), rtc::Thread::Current()); + kRemoteTrackId, FakeVideoTrackSource::Create(), Thread::Current()); if (GetParam()) { stream_ = MediaStream::Create("streamid"); stream_->AddTrack(video_track()); @@ -809,10 +809,10 @@ class StatsCollectorTrackTest : public LegacyStatsCollectorTest, // and register it into the stats object. // If GetParam() returns true, the track is also inserted into the local // stream, which is created if necessary. - rtc::scoped_refptr AddOutgoingAudioTrack( + scoped_refptr AddOutgoingAudioTrack( FakePeerConnectionForStats* pc, LegacyStatsCollectorForTest* stats) { - audio_track_ = rtc::make_ref_counted(kLocalTrackId); + audio_track_ = make_ref_counted(kLocalTrackId); if (GetParam()) { if (!stream_) stream_ = MediaStream::Create("streamid"); @@ -827,7 +827,7 @@ class StatsCollectorTrackTest : public LegacyStatsCollectorTest, // Adds a incoming audio track with a given SSRC into the stats. void AddIncomingAudioTrack(FakePeerConnectionForStats* pc, LegacyStatsCollectorForTest* stats) { - audio_track_ = rtc::make_ref_counted(kRemoteTrackId); + audio_track_ = make_ref_counted(kRemoteTrackId); if (GetParam()) { if (stream_ == nullptr) stream_ = MediaStream::Create("streamid"); @@ -839,12 +839,12 @@ class StatsCollectorTrackTest : public LegacyStatsCollectorTest, pc->AddReceiver(CreateMockReceiver(audio_track_, kSsrcOfTrack)); } - rtc::scoped_refptr audio_track() { return audio_track_; } - rtc::scoped_refptr video_track() { return video_track_; } + scoped_refptr audio_track() { return audio_track_; } + scoped_refptr video_track() { return video_track_; } - rtc::scoped_refptr stream_; - rtc::scoped_refptr video_track_; - rtc::scoped_refptr audio_track_; + scoped_refptr stream_; + scoped_refptr video_track_; + scoped_refptr audio_track_; }; TEST(StatsCollectionTest, DetachAndMerge) { @@ -877,7 +877,7 @@ TEST(StatsCollectionTest, DetachAndMerge) { // Similar to `DetachAndMerge` above but detaches on one thread, merges on // another to test that we don't trigger sequence checker. TEST(StatsCollectionTest, DetachAndMergeThreaded) { - rtc::Thread new_thread(std::make_unique()); + Thread new_thread(std::make_unique()); new_thread.Start(); StatsReport::Id id( @@ -947,7 +947,7 @@ TEST_F(LegacyStatsCollectorTest, ExtractDataInfo) { EXPECT_EQ(kDataChannelLabel, ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel, reports, StatsReport::kStatsValueNameLabel)); - EXPECT_EQ(rtc::ToString(kDataChannelId), + EXPECT_EQ(absl::StrCat(kDataChannelId), ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel, reports, StatsReport::kStatsValueNameDataChannelId)); EXPECT_EQ(kConnectingString, @@ -982,7 +982,7 @@ TEST_P(StatsCollectorTrackTest, BytesCounterHandles64Bits) { stats->GetStats(nullptr, &reports); EXPECT_EQ( - rtc::ToString(kBytesSent), + absl::StrCat(kBytesSent), ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameBytesSent)); } @@ -1022,17 +1022,17 @@ TEST_P(StatsCollectorTrackTest, AudioBandwidthEstimationInfoIsReported) { stats->GetStats(nullptr, &reports); EXPECT_EQ( - rtc::ToString(kBytesSent), + absl::StrCat(kBytesSent), ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameBytesSent)); - EXPECT_EQ(rtc::ToString(kSendBandwidth), + EXPECT_EQ(absl::StrCat(kSendBandwidth), ExtractBweStatsValue( reports, StatsReport::kStatsValueNameAvailableSendBandwidth)); EXPECT_EQ( - rtc::ToString(kRecvBandwidth), + absl::StrCat(kRecvBandwidth), ExtractBweStatsValue( reports, StatsReport::kStatsValueNameAvailableReceiveBandwidth)); EXPECT_EQ( - rtc::ToString(kPacerDelay), + absl::StrCat(kPacerDelay), ExtractBweStatsValue(reports, StatsReport::kStatsValueNameBucketDelay)); } @@ -1071,17 +1071,17 @@ TEST_P(StatsCollectorTrackTest, VideoBandwidthEstimationInfoIsReported) { stats->GetStats(nullptr, &reports); EXPECT_EQ( - rtc::ToString(kBytesSent), + absl::StrCat(kBytesSent), ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameBytesSent)); - EXPECT_EQ(rtc::ToString(kSendBandwidth), + EXPECT_EQ(absl::StrCat(kSendBandwidth), ExtractBweStatsValue( reports, StatsReport::kStatsValueNameAvailableSendBandwidth)); EXPECT_EQ( - rtc::ToString(kRecvBandwidth), + absl::StrCat(kRecvBandwidth), ExtractBweStatsValue( reports, StatsReport::kStatsValueNameAvailableReceiveBandwidth)); EXPECT_EQ( - rtc::ToString(kPacerDelay), + absl::StrCat(kPacerDelay), ExtractBweStatsValue(reports, StatsReport::kStatsValueNameBucketDelay)); } @@ -1181,7 +1181,7 @@ TEST_P(StatsCollectorTrackTest, TrackAndSsrcObjectExistAfterUpdateSsrcStats) { std::string ssrc_id = ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameSsrc); - EXPECT_EQ(rtc::ToString(kSsrcOfTrack), ssrc_id); + EXPECT_EQ(absl::StrCat(kSsrcOfTrack), ssrc_id); std::string track_id = ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameTrackId); @@ -1315,7 +1315,7 @@ TEST_P(StatsCollectorTrackTest, ReportsFromRemoteTrack) { std::string ssrc_id = ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameSsrc); - EXPECT_EQ(rtc::ToString(kSsrcOfTrack), ssrc_id); + EXPECT_EQ(absl::StrCat(kSsrcOfTrack), ssrc_id); std::string track_id = ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameTrackId); @@ -1326,32 +1326,32 @@ TEST_P(StatsCollectorTrackTest, ReportsFromRemoteTrack) { // information from local/remote candidates. TEST_F(LegacyStatsCollectorTest, IceCandidateReport) { const std::string kTransportName = "transport"; - const rtc::AdapterType kNetworkType = rtc::ADAPTER_TYPE_ETHERNET; + const AdapterType kNetworkType = ADAPTER_TYPE_ETHERNET; constexpr uint32_t kPriority = 1000; constexpr int kLocalPort = 2000; const std::string kLocalIp = "192.168.0.1"; - const rtc::SocketAddress kLocalAddress(kLocalIp, kLocalPort); + const SocketAddress kLocalAddress(kLocalIp, kLocalPort); constexpr int kRemotePort = 2001; const std::string kRemoteIp = "192.168.0.2"; - const rtc::SocketAddress kRemoteAddress(kRemoteIp, kRemotePort); + const SocketAddress kRemoteAddress(kRemoteIp, kRemotePort); auto pc = CreatePeerConnection(); auto stats = CreateStatsCollector(pc.get()); - cricket::Candidate local; + Candidate local; EXPECT_GT(local.id().length(), 0u); - local.set_type(cricket::LOCAL_PORT_TYPE); - local.set_protocol(cricket::UDP_PROTOCOL_NAME); + RTC_DCHECK_EQ(local.type(), IceCandidateType::kHost); + local.set_protocol(UDP_PROTOCOL_NAME); local.set_address(kLocalAddress); local.set_priority(kPriority); local.set_network_type(kNetworkType); - cricket::Candidate remote; + Candidate remote; EXPECT_GT(remote.id().length(), 0u); - remote.set_type(cricket::PRFLX_PORT_TYPE); - remote.set_protocol(cricket::UDP_PROTOCOL_NAME); + remote.set_type(IceCandidateType::kPrflx); + remote.set_protocol(UDP_PROTOCOL_NAME); remote.set_address(kRemoteAddress); remote.set_priority(kPriority); remote.set_network_type(kNetworkType); @@ -1379,19 +1379,19 @@ TEST_F(LegacyStatsCollectorTest, IceCandidateReport) { ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports, StatsReport::kStatsValueNameCandidateIPAddress)); EXPECT_EQ( - rtc::ToString(kLocalPort), + absl::StrCat(kLocalPort), ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports, StatsReport::kStatsValueNameCandidatePortNumber)); EXPECT_EQ( - cricket::UDP_PROTOCOL_NAME, + UDP_PROTOCOL_NAME, ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports, StatsReport::kStatsValueNameCandidateTransportType)); EXPECT_EQ( - rtc::ToString(kPriority), + absl::StrCat(kPriority), ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports, StatsReport::kStatsValueNameCandidatePriority)); EXPECT_EQ( - IceCandidateTypeToStatsType(cricket::LOCAL_PORT_TYPE), + IceCandidateTypeToStatsType(local), ExtractStatsValue(StatsReport::kStatsReportTypeIceLocalCandidate, reports, StatsReport::kStatsValueNameCandidateType)); EXPECT_EQ( @@ -1408,20 +1408,20 @@ TEST_F(LegacyStatsCollectorTest, IceCandidateReport) { ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate, reports, StatsReport::kStatsValueNameCandidateIPAddress)); - EXPECT_EQ(rtc::ToString(kRemotePort), + EXPECT_EQ(absl::StrCat(kRemotePort), ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate, reports, StatsReport::kStatsValueNameCandidatePortNumber)); - EXPECT_EQ(cricket::UDP_PROTOCOL_NAME, + EXPECT_EQ(UDP_PROTOCOL_NAME, ExtractStatsValue( StatsReport::kStatsReportTypeIceRemoteCandidate, reports, StatsReport::kStatsValueNameCandidateTransportType)); - EXPECT_EQ(rtc::ToString(kPriority), + EXPECT_EQ(absl::StrCat(kPriority), ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate, reports, StatsReport::kStatsValueNameCandidatePriority)); EXPECT_EQ( - IceCandidateTypeToStatsType(cricket::PRFLX_PORT_TYPE), + IceCandidateTypeToStatsType(remote), ExtractStatsValue(StatsReport::kStatsReportTypeIceRemoteCandidate, reports, StatsReport::kStatsValueNameCandidateType)); EXPECT_EQ(kNotFound, @@ -1440,7 +1440,7 @@ TEST_F(LegacyStatsCollectorTest, ChainedCertificateReportsCreated) { local_ders[2] = "some"; local_ders[3] = "der"; local_ders[4] = "values"; - rtc::FakeSSLIdentity local_identity(DersToPems(local_ders)); + FakeSSLIdentity local_identity(DersToPems(local_ders)); // Build remote certificate chain std::vector remote_ders(4); @@ -1448,7 +1448,7 @@ TEST_F(LegacyStatsCollectorTest, ChainedCertificateReportsCreated) { remote_ders[1] = "non-"; remote_ders[2] = "intersecting"; remote_ders[3] = "set"; - rtc::FakeSSLIdentity remote_identity(DersToPems(remote_ders)); + FakeSSLIdentity remote_identity(DersToPems(remote_ders)); TestCertificateReports(local_identity, local_ders, remote_identity, remote_ders); @@ -1459,11 +1459,11 @@ TEST_F(LegacyStatsCollectorTest, ChainedCertificateReportsCreated) { TEST_F(LegacyStatsCollectorTest, ChainlessCertificateReportsCreated) { // Build local certificate. std::string local_der = "This is the local der."; - rtc::FakeSSLIdentity local_identity(DerToPem(local_der)); + FakeSSLIdentity local_identity(DerToPem(local_der)); // Build remote certificate. std::string remote_der = "This is somebody else's der."; - rtc::FakeSSLIdentity remote_identity(DerToPem(remote_der)); + FakeSSLIdentity remote_identity(DerToPem(remote_der)); TestCertificateReports(local_identity, std::vector(1, local_der), remote_identity, @@ -1512,13 +1512,13 @@ TEST_F(LegacyStatsCollectorTest, NoTransport) { TEST_F(LegacyStatsCollectorTest, UnsupportedDigestIgnored) { // Build a local certificate. std::string local_der = "This is the local der."; - rtc::FakeSSLIdentity local_identity(DerToPem(local_der)); + FakeSSLIdentity local_identity(DerToPem(local_der)); // Build a remote certificate with an unsupported digest algorithm. std::string remote_der = "This is somebody else's der."; - rtc::FakeSSLCertificate remote_cert(DerToPem(remote_der)); + FakeSSLCertificate remote_cert(DerToPem(remote_der)); remote_cert.set_digest_algorithm("foobar"); - rtc::FakeSSLIdentity remote_identity(remote_cert); + FakeSSLIdentity remote_identity(remote_cert); TestCertificateReports(local_identity, std::vector(1, local_der), remote_identity, std::vector()); @@ -1538,8 +1538,8 @@ TEST_P(StatsCollectorTrackTest, FilterOutNegativeInitialValues) { // Create a local stream with a local audio track and adds it to the stats. stream_ = MediaStream::Create("streamid"); auto local_track = - rtc::make_ref_counted(kLocalTrackId); - stream_->AddTrack(rtc::scoped_refptr(local_track.get())); + make_ref_counted(kLocalTrackId); + stream_->AddTrack(scoped_refptr(local_track.get())); pc->AddSender(CreateMockSender(local_track, kSsrcOfTrack)); if (GetParam()) { stats->AddStream(stream_.get()); @@ -1547,10 +1547,10 @@ TEST_P(StatsCollectorTrackTest, FilterOutNegativeInitialValues) { stats->AddLocalAudioTrack(local_track.get(), kSsrcOfTrack); // Create a remote stream with a remote audio track and adds it to the stats. - rtc::scoped_refptr remote_stream( + scoped_refptr remote_stream( MediaStream::Create("remotestreamid")); - rtc::scoped_refptr remote_track = - rtc::make_ref_counted(kRemoteTrackId); + scoped_refptr remote_track = + make_ref_counted(kRemoteTrackId); remote_stream->AddTrack(remote_track); pc->AddReceiver(CreateMockReceiver(remote_track, kSsrcOfTrack)); if (GetParam()) { @@ -1703,7 +1703,7 @@ TEST_P(StatsCollectorTrackTest, GetStatsAfterRemoveAudioStream) { EXPECT_EQ(kLocalTrackId, track_id); std::string ssrc_id = ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameSsrc); - EXPECT_EQ(rtc::ToString(kSsrcOfTrack), ssrc_id); + EXPECT_EQ(absl::StrCat(kSsrcOfTrack), ssrc_id); // Verifies the values in the track report, no value will be changed by the // AudioTrackInterface::GetSignalValue() and @@ -1722,10 +1722,10 @@ TEST_P(StatsCollectorTrackTest, LocalAndRemoteTracksWithSameSsrc) { stats->AddLocalAudioTrack(audio_track_.get(), kSsrcOfTrack); // Create a remote stream with a remote audio track and adds it to the stats. - rtc::scoped_refptr remote_stream( + scoped_refptr remote_stream( MediaStream::Create("remotestreamid")); - rtc::scoped_refptr remote_track = - rtc::make_ref_counted(kRemoteTrackId); + scoped_refptr remote_track = + make_ref_counted(kRemoteTrackId); pc->AddReceiver(CreateMockReceiver(remote_track, kSsrcOfTrack)); remote_stream->AddTrack(remote_track); stats->AddStream(remote_stream.get()); @@ -1817,10 +1817,9 @@ TEST_P(StatsCollectorTrackTest, TwoLocalTracksWithSameSsrc) { // Create a new audio track and adds it to the stream and stats. static const std::string kNewTrackId = "new_track_id"; - auto new_audio_track = rtc::make_ref_counted(kNewTrackId); + auto new_audio_track = make_ref_counted(kNewTrackId); pc->AddSender(CreateMockSender(new_audio_track, kSsrcOfTrack)); - stream_->AddTrack( - rtc::scoped_refptr(new_audio_track.get())); + stream_->AddTrack(scoped_refptr(new_audio_track.get())); stats->AddLocalAudioTrack(new_audio_track.get(), kSsrcOfTrack); stats->InvalidateCache(); @@ -1850,7 +1849,7 @@ TEST_P(StatsCollectorTrackTest, TwoLocalSendersWithSameTrack) { auto stats = CreateStatsCollector(pc.get()); auto local_track = - rtc::make_ref_counted(kLocalTrackId); + make_ref_counted(kLocalTrackId); pc->AddSender(CreateMockSender(local_track, kFirstSsrc)); stats->AddLocalAudioTrack(local_track.get(), kFirstSsrc); pc->AddSender(CreateMockSender(local_track, kSecondSsrc)); @@ -1889,13 +1888,13 @@ TEST_P(StatsCollectorTrackTest, TwoLocalSendersWithSameTrack) { // The SSRC in each SSRC report is different and correspond to the sender // SSRC. - std::vector> ssrcs = { + std::vector> ssrcs = { GetValueInNthReportByType(reports, StatsReport::kStatsReportTypeSsrc, StatsReport::kStatsValueNameSsrc, 1), GetValueInNthReportByType(reports, StatsReport::kStatsReportTypeSsrc, StatsReport::kStatsValueNameSsrc, 2)}; - EXPECT_THAT(ssrcs, UnorderedElementsAre(rtc::ToString(kFirstSsrc), - rtc::ToString(kSecondSsrc))); + EXPECT_THAT(ssrcs, UnorderedElementsAre(absl::StrCat(kFirstSsrc), + absl::StrCat(kSecondSsrc))); // There is one track report with the same track ID as the SSRC reports. EXPECT_EQ( @@ -1925,10 +1924,10 @@ TEST_P(StatsCollectorTrackTest, VerifyVideoSendSsrcStats) { StatsReports reports; stats->GetStats(nullptr, &reports); - EXPECT_EQ(rtc::ToString(video_sender_info.frames_encoded), + EXPECT_EQ(absl::StrCat(video_sender_info.frames_encoded), ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameFramesEncoded)); - EXPECT_EQ(rtc::ToString(*video_sender_info.qp_sum), + EXPECT_EQ(absl::StrCat(*video_sender_info.qp_sum), ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameQpSum)); } @@ -1952,10 +1951,10 @@ TEST_P(StatsCollectorTrackTest, VerifyVideoReceiveSsrcStatsNew) { StatsReports reports; stats->GetStats(nullptr, &reports); - EXPECT_EQ(rtc::ToString(video_receiver_info.frames_decoded), + EXPECT_EQ(absl::StrCat(video_receiver_info.frames_decoded), ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameFramesDecoded)); - EXPECT_EQ(rtc::ToString(*video_receiver_info.qp_sum), + EXPECT_EQ(absl::StrCat(*video_receiver_info.qp_sum), ExtractSsrcStatsValue(reports, StatsReport::kStatsValueNameQpSum)); } diff --git a/pc/local_audio_source.cc b/pc/local_audio_source.cc index 51949f7f4d..9e4f35f839 100644 --- a/pc/local_audio_source.cc +++ b/pc/local_audio_source.cc @@ -10,18 +10,23 @@ #include "pc/local_audio_source.h" +#include "api/audio_options.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/scoped_refptr.h" + using webrtc::MediaSourceInterface; namespace webrtc { -rtc::scoped_refptr LocalAudioSource::Create( - const cricket::AudioOptions* audio_options) { - auto source = rtc::make_ref_counted(); +scoped_refptr LocalAudioSource::Create( + const AudioOptions* audio_options) { + auto source = make_ref_counted(); source->Initialize(audio_options); return source; } -void LocalAudioSource::Initialize(const cricket::AudioOptions* audio_options) { +void LocalAudioSource::Initialize(const AudioOptions* audio_options) { if (!audio_options) return; diff --git a/pc/local_audio_source.h b/pc/local_audio_source.h index 587ce10809..84f1207214 100644 --- a/pc/local_audio_source.h +++ b/pc/local_audio_source.h @@ -24,13 +24,13 @@ namespace webrtc { class LocalAudioSource : public Notifier { public: // Creates an instance of LocalAudioSource. - static rtc::scoped_refptr Create( - const cricket::AudioOptions* audio_options); + static scoped_refptr Create( + const AudioOptions* audio_options); SourceState state() const override { return kLive; } bool remote() const override { return false; } - const cricket::AudioOptions options() const override { return options_; } + const AudioOptions options() const override { return options_; } void AddSink(AudioTrackSinkInterface* sink) override {} void RemoveSink(AudioTrackSinkInterface* sink) override {} @@ -40,9 +40,9 @@ class LocalAudioSource : public Notifier { ~LocalAudioSource() override {} private: - void Initialize(const cricket::AudioOptions* audio_options); + void Initialize(const AudioOptions* audio_options); - cricket::AudioOptions options_; + AudioOptions options_; }; } // namespace webrtc diff --git a/pc/local_audio_source_unittest.cc b/pc/local_audio_source_unittest.cc index 76d3b366c3..7d0c38b38e 100644 --- a/pc/local_audio_source_unittest.cc +++ b/pc/local_audio_source_unittest.cc @@ -10,21 +10,24 @@ #include "pc/local_audio_source.h" -#include "absl/types/optional.h" +#include + +#include "api/audio_options.h" +#include "api/scoped_refptr.h" #include "test/gtest.h" using webrtc::LocalAudioSource; TEST(LocalAudioSourceTest, InitWithAudioOptions) { - cricket::AudioOptions audio_options; + webrtc::AudioOptions audio_options; audio_options.highpass_filter = true; - rtc::scoped_refptr source = + webrtc::scoped_refptr source = LocalAudioSource::Create(&audio_options); EXPECT_EQ(true, source->options().highpass_filter); } TEST(LocalAudioSourceTest, InitWithNoOptions) { - rtc::scoped_refptr source = + webrtc::scoped_refptr source = LocalAudioSource::Create(nullptr); - EXPECT_EQ(absl::nullopt, source->options().highpass_filter); + EXPECT_EQ(std::nullopt, source->options().highpass_filter); } diff --git a/pc/media_factory.h b/pc/media_factory.h new file mode 100644 index 0000000000..932609d87f --- /dev/null +++ b/pc/media_factory.h @@ -0,0 +1,45 @@ +/* + * Copyright 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_MEDIA_FACTORY_H_ +#define PC_MEDIA_FACTORY_H_ + +#include + +#include "api/environment/environment.h" +#include "call/call.h" +#include "call/call_config.h" +#include "media/base/media_engine.h" + +namespace webrtc { + +// PeerConnectionFactoryDependencies is forward declared because of circular +// dependency between MediaFactory and PeerConnectionFactoryDependencies: +// PeerConnectionFactoryDependencies keeps an instance of MediaFactory and thus +// needs to know how to destroy it. +// MediaFactory mentions PeerConnectionFactoryDependencies in api, but does not +// need its full definition. +struct PeerConnectionFactoryDependencies; + +// Interface repsponsible for constructing media specific classes for +// PeerConnectionFactory and PeerConnection. +class MediaFactory { + public: + virtual ~MediaFactory() = default; + + virtual std::unique_ptr CreateCall(CallConfig config) = 0; + virtual std::unique_ptr CreateMediaEngine( + const Environment& env, + PeerConnectionFactoryDependencies& dependencies) = 0; +}; + +} // namespace webrtc + +#endif // PC_MEDIA_FACTORY_H_ diff --git a/pc/media_options.cc b/pc/media_options.cc new file mode 100644 index 0000000000..56afb8c8bd --- /dev/null +++ b/pc/media_options.cc @@ -0,0 +1,82 @@ +/* + * Copyright 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/media_options.h" + +#include +#include + +#include "absl/algorithm/container.h" +#include "api/media_types.h" +#include "media/base/rid_description.h" +#include "pc/simulcast_description.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +namespace { +// note: function duplicated in media_session.cc +bool ValidateSimulcastLayers(const std::vector& rids, + const SimulcastLayerList& simulcast_layers) { + return absl::c_all_of( + simulcast_layers.GetAllLayers(), [&rids](const SimulcastLayer& layer) { + return absl::c_any_of(rids, [&layer](const RidDescription& rid) { + return rid.rid == layer.rid; + }); + }); +} + +} // namespace + +void MediaDescriptionOptions::AddAudioSender( + const std::string& track_id, + const std::vector& stream_ids) { + RTC_DCHECK(type == MediaType::AUDIO); + AddSenderInternal(track_id, stream_ids, {}, SimulcastLayerList(), 1); +} + +void MediaDescriptionOptions::AddVideoSender( + const std::string& track_id, + const std::vector& stream_ids, + const std::vector& rids, + const SimulcastLayerList& simulcast_layers, + int num_sim_layers) { + RTC_DCHECK(type == MediaType::VIDEO); + RTC_DCHECK(rids.empty() || num_sim_layers == 0) + << "RIDs are the compliant way to indicate simulcast."; + RTC_DCHECK(ValidateSimulcastLayers(rids, simulcast_layers)); + AddSenderInternal(track_id, stream_ids, rids, simulcast_layers, + num_sim_layers); +} + +void MediaDescriptionOptions::AddSenderInternal( + const std::string& track_id, + const std::vector& stream_ids, + const std::vector& rids, + const SimulcastLayerList& simulcast_layers, + int num_sim_layers) { + // TODO(steveanton): Support any number of stream ids. + RTC_CHECK(stream_ids.size() == 1U); + SenderOptions options; + options.track_id = track_id; + options.stream_ids = stream_ids; + options.simulcast_layers = simulcast_layers; + options.rids = rids; + options.num_sim_layers = num_sim_layers; + sender_options.push_back(options); +} + +bool MediaSessionOptions::HasMediaDescription(MediaType type) const { + return absl::c_any_of( + media_description_options, + [type](const MediaDescriptionOptions& t) { return t.type == type; }); +} + +} // namespace webrtc diff --git a/pc/media_options.h b/pc/media_options.h new file mode 100644 index 0000000000..731efac709 --- /dev/null +++ b/pc/media_options.h @@ -0,0 +1,131 @@ +/* + * Copyright 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Option structures for MediaSession APIs. +#ifndef PC_MEDIA_OPTIONS_H_ +#define PC_MEDIA_OPTIONS_H_ + +#include +#include + +#include "api/crypto/crypto_options.h" +#include "api/media_types.h" +#include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" +#include "media/base/codec.h" +#include "media/base/rid_description.h" +#include "p2p/base/transport_description.h" +#include "p2p/base/transport_description_factory.h" +#include "pc/simulcast_description.h" + +namespace webrtc { + +// Default RTCP CNAME for unit tests. +const char kDefaultRtcpCname[] = "DefaultRtcpCname"; + +// Options for an RtpSender contained with an media description/"m=" section. +// Note: Spec-compliant Simulcast and legacy simulcast are mutually exclusive. +struct SenderOptions { + std::string track_id; + std::vector stream_ids; + // Use RIDs and Simulcast Layers to indicate spec-compliant Simulcast. + std::vector rids; + SimulcastLayerList simulcast_layers; + // Use `num_sim_layers` to indicate legacy simulcast. + int num_sim_layers; +}; + +// Options for an individual media description/"m=" section. +struct MediaDescriptionOptions { + MediaDescriptionOptions(MediaType type, + const std::string& mid, + RtpTransceiverDirection direction, + bool stopped) + : type(type), mid(mid), direction(direction), stopped(stopped) {} + + // TODO(deadbeef): When we don't support Plan B, there will only be one + // sender per media description and this can be simplified. + void AddAudioSender(const std::string& track_id, + const std::vector& stream_ids); + void AddVideoSender(const std::string& track_id, + const std::vector& stream_ids, + const std::vector& rids, + const SimulcastLayerList& simulcast_layers, + int num_sim_layers); + + MediaType type; + std::string mid; + RtpTransceiverDirection direction; + bool stopped; + TransportOptions transport_options; + // Note: There's no equivalent "RtpReceiverOptions" because only send + // stream information goes in the local descriptions. + std::vector sender_options; + std::vector codec_preferences; + std::vector header_extensions; + // Codecs to include in a generated offer or answer. + // If this is used, session-level codec lists MUST be ignored. + std::vector codecs_to_include; + + private: + // Doesn't DCHECK on `type`. + void AddSenderInternal(const std::string& track_id, + const std::vector& stream_ids, + const std::vector& rids, + const SimulcastLayerList& simulcast_layers, + int num_sim_layers); +}; + +// Provides a mechanism for describing how m= sections should be generated. +// The m= section with index X will use media_description_options[X]. There +// must be an option for each existing section if creating an answer, or a +// subsequent offer. +struct MediaSessionOptions { + MediaSessionOptions() {} + + bool has_audio() const { return HasMediaDescription(MediaType::AUDIO); } + bool has_video() const { return HasMediaDescription(MediaType::VIDEO); } + bool has_data() const { return HasMediaDescription(MediaType::DATA); } + + bool HasMediaDescription(MediaType type) const; + + bool vad_enabled = true; // When disabled, removes all CN codecs from SDP. + bool rtcp_mux_enabled = true; + bool bundle_enabled = false; + bool offer_extmap_allow_mixed = false; + bool raw_packetization_for_video = false; + std::string rtcp_cname = kDefaultRtcpCname; + CryptoOptions crypto_options; + // List of media description options in the same order that the media + // descriptions will be generated. + std::vector media_description_options; + std::vector pooled_ice_credentials; + + // Use the draft-ietf-mmusic-sctp-sdp-03 obsolete syntax for SCTP + // datachannels. + // Default is true for backwards compatibility with clients that use + // this internal interface. + bool use_obsolete_sctp_sdp = true; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::kDefaultRtcpCname; +using ::webrtc::MediaDescriptionOptions; +using ::webrtc::MediaSessionOptions; +using ::webrtc::SenderOptions; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // PC_MEDIA_OPTIONS_H_ diff --git a/pc/media_protocol_names.cc b/pc/media_protocol_names.cc index 52d676daf5..d2416cd5c7 100644 --- a/pc/media_protocol_names.cc +++ b/pc/media_protocol_names.cc @@ -15,7 +15,7 @@ #include -namespace cricket { +namespace webrtc { // The official registry of RTP parameters is at // http://www.iana.org/assignments/rtp-parameters/rtp-parameters.xml @@ -72,7 +72,7 @@ bool IsRtpProtocol(absl::string_view protocol) { if (protocol.empty()) { return true; } - size_t pos = protocol.find(cricket::kMediaProtocolRtpPrefix); + size_t pos = protocol.find(kMediaProtocolRtpPrefix); if (pos == std::string::npos) { return false; } @@ -102,4 +102,4 @@ bool IsPlainRtp(absl::string_view protocol) { protocol == kMediaProtocolSavp || protocol == kMediaProtocolAvp; } -} // namespace cricket +} // namespace webrtc diff --git a/pc/media_protocol_names.h b/pc/media_protocol_names.h index 989c1dab6c..3d34348736 100644 --- a/pc/media_protocol_names.h +++ b/pc/media_protocol_names.h @@ -13,7 +13,7 @@ #include "absl/strings/string_view.h" -namespace cricket { +namespace webrtc { // Names or name prefixes of protocols as defined by SDP specifications, // and generated in SDP produced by WebRTC. @@ -42,6 +42,26 @@ bool IsPlainRtp(absl::string_view protocol); // Returns true if the given media protocol is encrypted RTP bool IsDtlsRtp(absl::string_view protocol); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::IsDtlsRtp; +using ::webrtc::IsDtlsSctp; +using ::webrtc::IsPlainRtp; +using ::webrtc::IsPlainSctp; +using ::webrtc::IsRtpProtocol; +using ::webrtc::IsSctpProtocol; +using ::webrtc::kMediaProtocolAvpf; +using ::webrtc::kMediaProtocolDtlsSavpf; +using ::webrtc::kMediaProtocolDtlsSctp; +using ::webrtc::kMediaProtocolSavpf; +using ::webrtc::kMediaProtocolSctp; +using ::webrtc::kMediaProtocolTcpDtlsSctp; +using ::webrtc::kMediaProtocolUdpDtlsSctp; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // PC_MEDIA_PROTOCOL_NAMES_H_ diff --git a/pc/media_session.cc b/pc/media_session.cc index a2ea39f890..cd03330175 100644 --- a/pc/media_session.cc +++ b/pc/media_session.cc @@ -13,60 +13,62 @@ #include #include -#include +#include +#include #include -#include #include +#include #include "absl/algorithm/container.h" #include "absl/strings/match.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/crypto_params.h" +#include "api/field_trials_view.h" +#include "api/media_types.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" +#include "api/sctp_transport_interface.h" #include "media/base/codec.h" #include "media/base/media_constants.h" #include "media/base/media_engine.h" -#include "media/base/sdp_video_format_utils.h" -#include "media/sctp/sctp_transport_internal.h" +#include "media/base/rid_description.h" +#include "media/base/stream_params.h" +#include "p2p/base/ice_credentials_iterator.h" #include "p2p/base/p2p_constants.h" +#include "p2p/base/transport_description.h" +#include "p2p/base/transport_description_factory.h" +#include "p2p/base/transport_info.h" +#include "pc/codec_vendor.h" +#include "pc/media_options.h" #include "pc/media_protocol_names.h" #include "pc/rtp_media_utils.h" +#include "pc/session_description.h" +#include "pc/simulcast_description.h" #include "pc/used_ids.h" #include "rtc_base/checks.h" -#include "rtc_base/helpers.h" #include "rtc_base/logging.h" -#include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/string_encode.h" -#include "rtc_base/third_party/base64/base64.h" #include "rtc_base/unique_id_generator.h" +#ifdef RTC_ENABLE_H265 +#endif + namespace { -using rtc::UniqueRandomIdGenerator; +using webrtc::RTCError; +using webrtc::RTCErrorType; using webrtc::RtpTransceiverDirection; - -const char kInline[] = "inline:"; - -void GetSupportedSdesCryptoSuiteNames( - void (*func)(const webrtc::CryptoOptions&, std::vector*), - const webrtc::CryptoOptions& crypto_options, - std::vector* names) { - std::vector crypto_suites; - func(crypto_options, &crypto_suites); - for (const auto crypto : crypto_suites) { - names->push_back(rtc::SrtpCryptoSuiteToName(crypto)); - } -} +using webrtc::UniqueRandomIdGenerator; webrtc::RtpExtension RtpExtensionFromCapability( const webrtc::RtpHeaderExtensionCapability& capability) { return webrtc::RtpExtension(capability.uri, - capability.preferred_id.value_or(1)); + capability.preferred_id.value_or(1), + capability.preferred_encrypt); } -cricket::RtpHeaderExtensions RtpHeaderExtensionsFromCapabilities( +webrtc::RtpHeaderExtensions RtpHeaderExtensionsFromCapabilities( const std::vector& capabilities) { - cricket::RtpHeaderExtensions exts; + webrtc::RtpHeaderExtensions exts; for (const auto& capability : capabilities) { exts.push_back(RtpExtensionFromCapability(capability)); } @@ -87,22 +89,20 @@ UnstoppedRtpHeaderExtensionCapabilities( } bool IsCapabilityPresent(const webrtc::RtpHeaderExtensionCapability& capability, - const cricket::RtpHeaderExtensions& extensions) { + const webrtc::RtpHeaderExtensions& extensions) { return std::find_if(extensions.begin(), extensions.end(), [&capability](const webrtc::RtpExtension& extension) { return capability.uri == extension.uri; }) != extensions.end(); } -cricket::RtpHeaderExtensions UnstoppedOrPresentRtpHeaderExtensions( +webrtc::RtpHeaderExtensions UnstoppedOrPresentRtpHeaderExtensions( const std::vector& capabilities, - const cricket::RtpHeaderExtensions& unencrypted, - const cricket::RtpHeaderExtensions& encrypted) { - cricket::RtpHeaderExtensions extensions; + const webrtc::RtpHeaderExtensions& all_encountered_extensions) { + webrtc::RtpHeaderExtensions extensions; for (const auto& capability : capabilities) { if (capability.direction != RtpTransceiverDirection::kStopped || - IsCapabilityPresent(capability, unencrypted) || - IsCapabilityPresent(capability, encrypted)) { + IsCapabilityPresent(capability, all_encountered_extensions)) { extensions.push_back(RtpExtensionFromCapability(capability)); } } @@ -111,55 +111,27 @@ cricket::RtpHeaderExtensions UnstoppedOrPresentRtpHeaderExtensions( } // namespace -namespace cricket { - -static bool IsRtxCodec(const Codec& codec) { - return absl::EqualsIgnoreCase(codec.name, kRtxCodecName); -} - -static bool IsRtxCodec(const webrtc::RtpCodecCapability& capability) { - return absl::EqualsIgnoreCase(capability.name, kRtxCodecName); -} - -static bool ContainsRtxCodec(const std::vector& codecs) { - for (const auto& codec : codecs) { - if (IsRtxCodec(codec)) { - return true; - } - } - return false; -} - -static bool IsRedCodec(const Codec& codec) { - return absl::EqualsIgnoreCase(codec.name, kRedCodecName); -} +namespace webrtc { -static bool IsRedCodec(const webrtc::RtpCodecCapability& capability) { - return absl::EqualsIgnoreCase(capability.name, kRedCodecName); -} - -static bool IsFlexfecCodec(const Codec& codec) { - return absl::EqualsIgnoreCase(codec.name, kFlexfecCodecName); -} +namespace { -static bool ContainsFlexfecCodec(const std::vector& codecs) { - for (const auto& codec : codecs) { - if (IsFlexfecCodec(codec)) { - return true; - } - } - return false; +bool ContainsRtxCodec(const std::vector& codecs) { + return absl::c_find_if(codecs, [](const Codec& c) { + return c.GetResiliencyType() == Codec::ResiliencyType::kRtx; + }) != codecs.end(); } -static bool IsUlpfecCodec(const Codec& codec) { - return absl::EqualsIgnoreCase(codec.name, kUlpfecCodecName); +bool ContainsFlexfecCodec(const std::vector& codecs) { + return absl::c_find_if(codecs, [](const Codec& c) { + return c.GetResiliencyType() == Codec::ResiliencyType::kFlexfec; + }) != codecs.end(); } -static bool IsComfortNoiseCodec(const Codec& codec) { +bool IsComfortNoiseCodec(const Codec& codec) { return absl::EqualsIgnoreCase(codec.name, kComfortNoiseCodecName); } -static RtpTransceiverDirection NegotiateRtpTransceiverDirection( +RtpTransceiverDirection NegotiateRtpTransceiverDirection( RtpTransceiverDirection offer, RtpTransceiverDirection wants) { bool offer_send = webrtc::RtpTransceiverDirectionHasSend(offer); @@ -170,167 +142,16 @@ static RtpTransceiverDirection NegotiateRtpTransceiverDirection( offer_send && wants_recv); } -static bool IsMediaContentOfType(const ContentInfo* content, - MediaType media_type) { +bool IsMediaContentOfType(const ContentInfo* content, + webrtc::MediaType media_type) { if (!content || !content->media_description()) { return false; } return content->media_description()->type() == media_type; } -static bool CreateCryptoParams(int tag, - const std::string& cipher, - CryptoParams* crypto_out) { - int key_len; - int salt_len; - if (!rtc::GetSrtpKeyAndSaltLengths(rtc::SrtpCryptoSuiteFromName(cipher), - &key_len, &salt_len)) { - return false; - } - - int master_key_len = key_len + salt_len; - std::string master_key; - if (!rtc::CreateRandomData(master_key_len, &master_key)) { - return false; - } - - RTC_CHECK_EQ(master_key_len, master_key.size()); - std::string key = rtc::Base64::Encode(master_key); - - crypto_out->tag = tag; - crypto_out->crypto_suite = cipher; - crypto_out->key_params = kInline; - crypto_out->key_params += key; - return true; -} - -static bool AddCryptoParams(const std::string& crypto_suite, - CryptoParamsVec* cryptos_out) { - int size = static_cast(cryptos_out->size()); - - cryptos_out->resize(size + 1); - return CreateCryptoParams(size, crypto_suite, &cryptos_out->at(size)); -} - -void AddMediaCryptos(const CryptoParamsVec& cryptos, - MediaContentDescription* media) { - for (const CryptoParams& crypto : cryptos) { - media->AddCrypto(crypto); - } -} - -bool CreateMediaCryptos(const std::vector& crypto_suites, - MediaContentDescription* media) { - CryptoParamsVec cryptos; - for (const std::string& crypto_suite : crypto_suites) { - if (!AddCryptoParams(crypto_suite, &cryptos)) { - return false; - } - } - AddMediaCryptos(cryptos, media); - return true; -} - -const CryptoParamsVec* GetCryptos(const ContentInfo* content) { - if (!content || !content->media_description()) { - return nullptr; - } - return &content->media_description()->cryptos(); -} - -bool FindMatchingCrypto(const CryptoParamsVec& cryptos, - const CryptoParams& crypto, - CryptoParams* crypto_out) { - auto it = absl::c_find_if( - cryptos, [&crypto](const CryptoParams& c) { return crypto.Matches(c); }); - if (it == cryptos.end()) { - return false; - } - *crypto_out = *it; - return true; -} - -// For audio, HMAC 32 (if enabled) is prefered over HMAC 80 because of the -// low overhead. -void GetSupportedAudioSdesCryptoSuites( - const webrtc::CryptoOptions& crypto_options, - std::vector* crypto_suites) { - if (crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher) { - crypto_suites->push_back(rtc::kSrtpAes128CmSha1_32); - } - crypto_suites->push_back(rtc::kSrtpAes128CmSha1_80); - if (crypto_options.srtp.enable_gcm_crypto_suites) { - crypto_suites->push_back(rtc::kSrtpAeadAes256Gcm); - crypto_suites->push_back(rtc::kSrtpAeadAes128Gcm); - } -} - -void GetSupportedAudioSdesCryptoSuiteNames( - const webrtc::CryptoOptions& crypto_options, - std::vector* crypto_suite_names) { - GetSupportedSdesCryptoSuiteNames(GetSupportedAudioSdesCryptoSuites, - crypto_options, crypto_suite_names); -} - -void GetSupportedVideoSdesCryptoSuites( - const webrtc::CryptoOptions& crypto_options, - std::vector* crypto_suites) { - crypto_suites->push_back(rtc::kSrtpAes128CmSha1_80); - if (crypto_options.srtp.enable_gcm_crypto_suites) { - crypto_suites->push_back(rtc::kSrtpAeadAes256Gcm); - crypto_suites->push_back(rtc::kSrtpAeadAes128Gcm); - } -} - -void GetSupportedVideoSdesCryptoSuiteNames( - const webrtc::CryptoOptions& crypto_options, - std::vector* crypto_suite_names) { - GetSupportedSdesCryptoSuiteNames(GetSupportedVideoSdesCryptoSuites, - crypto_options, crypto_suite_names); -} - -void GetSupportedDataSdesCryptoSuites( - const webrtc::CryptoOptions& crypto_options, - std::vector* crypto_suites) { - crypto_suites->push_back(rtc::kSrtpAes128CmSha1_80); - if (crypto_options.srtp.enable_gcm_crypto_suites) { - crypto_suites->push_back(rtc::kSrtpAeadAes256Gcm); - crypto_suites->push_back(rtc::kSrtpAeadAes128Gcm); - } -} - -void GetSupportedDataSdesCryptoSuiteNames( - const webrtc::CryptoOptions& crypto_options, - std::vector* crypto_suite_names) { - GetSupportedSdesCryptoSuiteNames(GetSupportedDataSdesCryptoSuites, - crypto_options, crypto_suite_names); -} - -// Support any GCM cipher (if enabled through options). For video support only -// 80-bit SHA1 HMAC. For audio 32-bit HMAC is tolerated (if enabled) unless -// bundle is enabled because it is low overhead. -// Pick the crypto in the list that is supported. -static bool SelectCrypto(const MediaContentDescription* offer, - bool bundle, - const webrtc::CryptoOptions& crypto_options, - CryptoParams* crypto_out) { - bool audio = offer->type() == MEDIA_TYPE_AUDIO; - const CryptoParamsVec& cryptos = offer->cryptos(); - - for (const CryptoParams& crypto : cryptos) { - if ((crypto_options.srtp.enable_gcm_crypto_suites && - rtc::IsGcmCryptoSuiteName(crypto.crypto_suite)) || - rtc::kCsAesCm128HmacSha1_80 == crypto.crypto_suite || - (rtc::kCsAesCm128HmacSha1_32 == crypto.crypto_suite && audio && - !bundle && crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher)) { - return CreateCryptoParams(crypto.tag, crypto.crypto_suite, crypto_out); - } - } - return false; -} - // Finds all StreamParams of all media types and attach them to stream_params. -static StreamParamsVec GetCurrentStreamParams( +StreamParamsVec GetCurrentStreamParams( const std::vector& active_local_contents) { StreamParamsVec stream_params; for (const ContentInfo* content : active_local_contents) { @@ -341,13 +162,13 @@ static StreamParamsVec GetCurrentStreamParams( return stream_params; } -static StreamParams CreateStreamParamsForNewSenderWithSsrcs( +StreamParams CreateStreamParamsForNewSenderWithSsrcs( const SenderOptions& sender, const std::string& rtcp_cname, bool include_rtx_streams, bool include_flexfec_stream, UniqueRandomIdGenerator* ssrc_generator, - const webrtc::FieldTrialsView& field_trials) { + const FieldTrialsView& field_trials) { StreamParams result; result.id = sender.track_id; @@ -374,9 +195,8 @@ static StreamParams CreateStreamParamsForNewSenderWithSsrcs( return result; } -static bool ValidateSimulcastLayers( - const std::vector& rids, - const SimulcastLayerList& simulcast_layers) { +bool ValidateSimulcastLayers(const std::vector& rids, + const SimulcastLayerList& simulcast_layers) { return absl::c_all_of( simulcast_layers.GetAllLayers(), [&rids](const SimulcastLayer& layer) { return absl::c_any_of(rids, [&layer](const RidDescription& rid) { @@ -385,7 +205,7 @@ static bool ValidateSimulcastLayers( }); } -static StreamParams CreateStreamParamsForNewSenderWithRids( +StreamParams CreateStreamParamsForNewSenderWithRids( const SenderOptions& sender, const std::string& rtcp_cname) { RTC_DCHECK(!sender.rids.empty()); @@ -407,7 +227,7 @@ static StreamParams CreateStreamParamsForNewSenderWithRids( // Adds SimulcastDescription if indicated by the media description options. // MediaContentDescription should already be set up with the send rids. -static void AddSimulcastToMediaDescription( +void AddSimulcastToMediaDescription( const MediaDescriptionOptions& media_description_options, MediaContentDescription* description) { RTC_DCHECK(description); @@ -422,8 +242,8 @@ static void AddSimulcastToMediaDescription( RTC_DCHECK_EQ(1, description->streams().size()) << "RIDs are only supported in Unified Plan semantics."; RTC_DCHECK_EQ(1, media_description_options.sender_options.size()); - RTC_DCHECK(description->type() == MediaType::MEDIA_TYPE_AUDIO || - description->type() == MediaType::MEDIA_TYPE_VIDEO); + RTC_DCHECK(description->type() == webrtc::MediaType::AUDIO || + description->type() == webrtc::MediaType::VIDEO); // One RID or less indicates that simulcast is not needed. if (description->streams()[0].rids().size() <= 1) { @@ -440,13 +260,12 @@ static void AddSimulcastToMediaDescription( // Adds a StreamParams for each SenderOptions in `sender_options` to // content_description. // `current_params` - All currently known StreamParams of any media type. -template -static bool AddStreamParams(const std::vector& sender_options, - const std::string& rtcp_cname, - UniqueRandomIdGenerator* ssrc_generator, - StreamParamsVec* current_streams, - MediaContentDescriptionImpl* content_description, - const webrtc::FieldTrialsView& field_trials) { +bool AddStreamParams(const std::vector& sender_options, + const std::string& rtcp_cname, + UniqueRandomIdGenerator* ssrc_generator, + StreamParamsVec* current_streams, + MediaContentDescription* content_description, + const FieldTrialsView& field_trials) { // SCTP streams are not negotiated using SDP/ContentDescriptions. if (IsSctpProtocol(content_description->protocol())) { return true; @@ -493,8 +312,8 @@ static bool AddStreamParams(const std::vector& sender_options, // `bundle_group`. The transport infos of the content names within the // `bundle_group` should be updated to use the ufrag, pwd and DTLS role of the // first content within the `bundle_group`. -static bool UpdateTransportInfoForBundle(const ContentGroup& bundle_group, - SessionDescription* sdesc) { +bool UpdateTransportInfoForBundle(const ContentGroup& bundle_group, + SessionDescription* sdesc) { // The bundle should not be empty. if (!sdesc || !bundle_group.FirstContentName()) { return false; @@ -526,120 +345,7 @@ static bool UpdateTransportInfoForBundle(const ContentGroup& bundle_group, return true; } -// Gets the CryptoParamsVec of the given `content_name` from `sdesc`, and -// sets it to `cryptos`. -static bool GetCryptosByName(const SessionDescription* sdesc, - const std::string& content_name, - CryptoParamsVec* cryptos) { - if (!sdesc || !cryptos) { - return false; - } - const ContentInfo* content = sdesc->GetContentByName(content_name); - if (!content || !content->media_description()) { - return false; - } - *cryptos = content->media_description()->cryptos(); - return true; -} - -// Prunes the `target_cryptos` by removing the crypto params (crypto_suite) -// which are not available in `filter`. -static void PruneCryptos(const CryptoParamsVec& filter, - CryptoParamsVec* target_cryptos) { - if (!target_cryptos) { - return; - } - - target_cryptos->erase( - std::remove_if(target_cryptos->begin(), target_cryptos->end(), - // Returns true if the `crypto`'s crypto_suite is not - // found in `filter`. - [&filter](const CryptoParams& crypto) { - for (const CryptoParams& entry : filter) { - if (entry.crypto_suite == crypto.crypto_suite) - return false; - } - return true; - }), - target_cryptos->end()); -} - -static bool IsRtpContent(SessionDescription* sdesc, - const std::string& content_name) { - bool is_rtp = false; - ContentInfo* content = sdesc->GetContentByName(content_name); - if (content && content->media_description()) { - is_rtp = IsRtpProtocol(content->media_description()->protocol()); - } - return is_rtp; -} - -// Updates the crypto parameters of the `sdesc` according to the given -// `bundle_group`. The crypto parameters of all the contents within the -// `bundle_group` should be updated to use the common subset of the -// available cryptos. -static bool UpdateCryptoParamsForBundle(const ContentGroup& bundle_group, - SessionDescription* sdesc) { - // The bundle should not be empty. - if (!sdesc || !bundle_group.FirstContentName()) { - return false; - } - - bool common_cryptos_needed = false; - // Get the common cryptos. - const ContentNames& content_names = bundle_group.content_names(); - CryptoParamsVec common_cryptos; - bool first = true; - for (const std::string& content_name : content_names) { - if (!IsRtpContent(sdesc, content_name)) { - continue; - } - // The common cryptos are needed if any of the content does not have DTLS - // enabled. - if (!sdesc->GetTransportInfoByName(content_name)->description.secure()) { - common_cryptos_needed = true; - } - if (first) { - first = false; - // Initial the common_cryptos with the first content in the bundle group. - if (!GetCryptosByName(sdesc, content_name, &common_cryptos)) { - return false; - } - if (common_cryptos.empty()) { - // If there's no crypto params, we should just return. - return true; - } - } else { - CryptoParamsVec cryptos; - if (!GetCryptosByName(sdesc, content_name, &cryptos)) { - return false; - } - PruneCryptos(cryptos, &common_cryptos); - } - } - - if (common_cryptos.empty() && common_cryptos_needed) { - return false; - } - - // Update to use the common cryptos. - for (const std::string& content_name : content_names) { - if (!IsRtpContent(sdesc, content_name)) { - continue; - } - ContentInfo* content = sdesc->GetContentByName(content_name); - if (IsMediaContent(content)) { - MediaContentDescription* media_desc = content->media_description(); - if (!media_desc) { - return false; - } - media_desc->set_cryptos(common_cryptos); - } - } - return true; -} - -static std::vector GetActiveContents( +std::vector GetActiveContents( const SessionDescription& description, const MediaSessionOptions& session_options) { std::vector active_contents; @@ -649,7 +355,7 @@ static std::vector GetActiveContents( const MediaDescriptionOptions& media_options = session_options.media_description_options[i]; if (!content.rejected && !media_options.stopped && - content.name == media_options.mid) { + content.mid() == media_options.mid) { active_contents.push_back(&content); } } @@ -662,27 +368,23 @@ static std::vector GetActiveContents( // crypto (in current_cryptos) and it is enabled (in secure_policy), crypto is // created (according to crypto_suites). The created content is added to the // offer. -static bool CreateContentOffer( +RTCError CreateContentOffer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, - const SecurePolicy& secure_policy, - const CryptoParamsVec* current_cryptos, - const std::vector& crypto_suites, const RtpHeaderExtensions& rtp_extensions, UniqueRandomIdGenerator* ssrc_generator, StreamParamsVec* current_streams, MediaContentDescription* offer) { offer->set_rtcp_mux(session_options.rtcp_mux_enabled); - if (offer->type() == cricket::MEDIA_TYPE_VIDEO) { - offer->set_rtcp_reduced_size(true); - } + offer->set_rtcp_reduced_size(true); // Build the vector of header extensions with directions for this // media_description's options. RtpHeaderExtensions extensions; - for (auto extension_with_id : rtp_extensions) { + for (const auto& extension_with_id : rtp_extensions) { for (const auto& extension : media_description_options.header_extensions) { - if (extension_with_id.uri == extension.uri) { + if (extension_with_id.uri == extension.uri && + extension_with_id.encrypt == extension.preferred_encrypt) { // TODO(crbug.com/1051821): Configure the extension direction from // the information in the media_description_options extension // capability. @@ -696,649 +398,130 @@ static bool CreateContentOffer( AddSimulcastToMediaDescription(media_description_options, offer); - if (secure_policy != SEC_DISABLED) { - if (current_cryptos) { - AddMediaCryptos(*current_cryptos, offer); - } - if (offer->cryptos().empty()) { - if (!CreateMediaCryptos(crypto_suites, offer)) { - return false; - } - } - } - - if (secure_policy == SEC_REQUIRED && offer->cryptos().empty()) { - return false; - } - return true; + return RTCError::OK(); } -template -static bool CreateMediaContentOffer( + +RTCError CreateMediaContentOffer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, - const std::vector& codecs, - const SecurePolicy& secure_policy, - const CryptoParamsVec* current_cryptos, - const std::vector& crypto_suites, + const std::vector& codecs, const RtpHeaderExtensions& rtp_extensions, UniqueRandomIdGenerator* ssrc_generator, StreamParamsVec* current_streams, - MediaContentDescriptionImpl* offer, - const webrtc::FieldTrialsView& field_trials) { + MediaContentDescription* offer, + const FieldTrialsView& field_trials) { offer->AddCodecs(codecs); if (!AddStreamParams(media_description_options.sender_options, session_options.rtcp_cname, ssrc_generator, current_streams, offer, field_trials)) { - return false; + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to add stream parameters"); } return CreateContentOffer(media_description_options, session_options, - secure_policy, current_cryptos, crypto_suites, rtp_extensions, ssrc_generator, current_streams, offer); } -template -static bool ReferencedCodecsMatch(const std::vector& codecs1, - const int codec1_id, - const std::vector& codecs2, - const int codec2_id, - const webrtc::FieldTrialsView* field_trials) { - const C* codec1 = FindCodecById(codecs1, codec1_id); - const C* codec2 = FindCodecById(codecs2, codec2_id); - return codec1 != nullptr && codec2 != nullptr && - codec1->Matches(*codec2, field_trials); -} - -template -static void NegotiatePacketization(const C& local_codec, - const C& remote_codec, - C* negotiated_codec) {} - -template <> -void NegotiatePacketization(const VideoCodec& local_codec, - const VideoCodec& remote_codec, - VideoCodec* negotiated_codec) { - negotiated_codec->packetization = - (local_codec.packetization == remote_codec.packetization) - ? local_codec.packetization - : absl::nullopt; -} - -template -static void NegotiateCodecs(const std::vector& local_codecs, - const std::vector& offered_codecs, - std::vector* negotiated_codecs, - bool keep_offer_order, - const webrtc::FieldTrialsView* field_trials) { - for (const C& ours : local_codecs) { - absl::optional theirs = - FindMatchingCodec(local_codecs, offered_codecs, ours, field_trials); - // Note that we intentionally only find one matching codec for each of our - // local codecs, in case the remote offer contains duplicate codecs. - if (theirs) { - C negotiated = ours; - NegotiatePacketization(ours, *theirs, &negotiated); - negotiated.IntersectFeedbackParams(*theirs); - if (IsRtxCodec(negotiated)) { - const auto apt_it = - theirs->params.find(kCodecParamAssociatedPayloadType); - // FindMatchingCodec shouldn't return something with no apt value. - RTC_DCHECK(apt_it != theirs->params.end()); - negotiated.SetParam(kCodecParamAssociatedPayloadType, apt_it->second); - - // We support parsing the declarative rtx-time parameter. - const auto rtx_time_it = theirs->params.find(kCodecParamRtxTime); - if (rtx_time_it != theirs->params.end()) { - negotiated.SetParam(kCodecParamRtxTime, rtx_time_it->second); - } - } else if (IsRedCodec(negotiated)) { - const auto red_it = - theirs->params.find(kCodecParamNotInNameValueFormat); - if (red_it != theirs->params.end()) { - negotiated.SetParam(kCodecParamNotInNameValueFormat, red_it->second); - } - } - if (absl::EqualsIgnoreCase(ours.name, kH264CodecName)) { - webrtc::H264GenerateProfileLevelIdForAnswer(ours.params, theirs->params, - &negotiated.params); - } - negotiated.id = theirs->id; - negotiated.name = theirs->name; - negotiated_codecs->push_back(std::move(negotiated)); - } - } - if (keep_offer_order) { - // RFC3264: Although the answerer MAY list the formats in their desired - // order of preference, it is RECOMMENDED that unless there is a - // specific reason, the answerer list formats in the same relative order - // they were present in the offer. - // This can be skipped when the transceiver has any codec preferences. - std::unordered_map payload_type_preferences; - int preference = static_cast(offered_codecs.size() + 1); - for (const C& codec : offered_codecs) { - payload_type_preferences[codec.id] = preference--; - } - absl::c_sort(*negotiated_codecs, [&payload_type_preferences](const C& a, - const C& b) { - return payload_type_preferences[a.id] > payload_type_preferences[b.id]; - }); - } -} - -// Finds a codec in `codecs2` that matches `codec_to_match`, which is -// a member of `codecs1`. If `codec_to_match` is an RED or RTX codec, both -// the codecs themselves and their associated codecs must match. -template -static absl::optional FindMatchingCodec( - const std::vector& codecs1, - const std::vector& codecs2, - const C& codec_to_match, - const webrtc::FieldTrialsView* field_trials) { - // `codec_to_match` should be a member of `codecs1`, in order to look up - // RED/RTX codecs' associated codecs correctly. If not, that's a programming - // error. - RTC_DCHECK(absl::c_any_of(codecs1, [&codec_to_match](const C& codec) { - return &codec == &codec_to_match; - })); - for (const C& potential_match : codecs2) { - if (potential_match.Matches(codec_to_match, field_trials)) { - if (IsRtxCodec(codec_to_match)) { - int apt_value_1 = 0; - int apt_value_2 = 0; - if (!codec_to_match.GetParam(kCodecParamAssociatedPayloadType, - &apt_value_1) || - !potential_match.GetParam(kCodecParamAssociatedPayloadType, - &apt_value_2)) { - RTC_LOG(LS_WARNING) << "RTX missing associated payload type."; - continue; - } - if (!ReferencedCodecsMatch(codecs1, apt_value_1, codecs2, apt_value_2, - field_trials)) { - continue; - } - } else if (IsRedCodec(codec_to_match)) { - auto red_parameters_1 = - codec_to_match.params.find(kCodecParamNotInNameValueFormat); - auto red_parameters_2 = - potential_match.params.find(kCodecParamNotInNameValueFormat); - bool has_parameters_1 = red_parameters_1 != codec_to_match.params.end(); - bool has_parameters_2 = - red_parameters_2 != potential_match.params.end(); - if (has_parameters_1 && has_parameters_2) { - // Mixed reference codecs (i.e. 111/112) are not supported. - // Different levels of redundancy between offer and answer are - // since RED is considered to be declarative. - std::vector redundant_payloads_1 = - rtc::split(red_parameters_1->second, '/'); - std::vector redundant_payloads_2 = - rtc::split(red_parameters_2->second, '/'); - if (redundant_payloads_1.size() > 0 && - redundant_payloads_2.size() > 0) { - bool consistent = true; - for (size_t i = 1; i < redundant_payloads_1.size(); i++) { - if (redundant_payloads_1[i] != redundant_payloads_1[0]) { - consistent = false; - break; - } - } - for (size_t i = 1; i < redundant_payloads_2.size(); i++) { - if (redundant_payloads_2[i] != redundant_payloads_2[0]) { - consistent = false; - break; - } - } - if (!consistent) { - continue; - } - - int red_value_1; - int red_value_2; - if (rtc::FromString(redundant_payloads_1[0], &red_value_1) && - rtc::FromString(redundant_payloads_2[0], &red_value_2)) { - if (!ReferencedCodecsMatch(codecs1, red_value_1, codecs2, - red_value_2, field_trials)) { - continue; - } - } - } - } else if (has_parameters_1 != has_parameters_2) { - continue; - } - } - return potential_match; - } - } - return absl::nullopt; -} - -// Find the codec in `codec_list` that `rtx_codec` is associated with. -template -static const C* GetAssociatedCodecForRtx(const std::vector& codec_list, - const C& rtx_codec) { - std::string associated_pt_str; - if (!rtx_codec.GetParam(kCodecParamAssociatedPayloadType, - &associated_pt_str)) { - RTC_LOG(LS_WARNING) << "RTX codec " << rtx_codec.name - << " is missing an associated payload type."; - return nullptr; - } - - int associated_pt; - if (!rtc::FromString(associated_pt_str, &associated_pt)) { - RTC_LOG(LS_WARNING) << "Couldn't convert payload type " << associated_pt_str - << " of RTX codec " << rtx_codec.name - << " to an integer."; - return nullptr; - } - - // Find the associated codec for the RTX codec. - const C* associated_codec = FindCodecById(codec_list, associated_pt); - if (!associated_codec) { - RTC_LOG(LS_WARNING) << "Couldn't find associated codec with payload type " - << associated_pt << " for RTX codec " << rtx_codec.name - << "."; - } - return associated_codec; -} - -// Find the codec in `codec_list` that `red_codec` is associated with. -template -static const C* GetAssociatedCodecForRed(const std::vector& codec_list, - const C& red_codec) { - std::string fmtp; - if (!red_codec.GetParam(kCodecParamNotInNameValueFormat, &fmtp)) { - // Normal for video/RED. - if constexpr (std::is_same_v) { - RTC_LOG(LS_WARNING) << "RED codec " << red_codec.name - << " is missing an associated payload type."; - } - return nullptr; - } - - std::vector redundant_payloads = rtc::split(fmtp, '/'); - if (redundant_payloads.size() < 2) { - return nullptr; - } - - absl::string_view associated_pt_str = redundant_payloads[0]; - int associated_pt; - if (!rtc::FromString(associated_pt_str, &associated_pt)) { - RTC_LOG(LS_WARNING) << "Couldn't convert first payload type " - << associated_pt_str << " of RED codec " - << red_codec.name << " to an integer."; - return nullptr; - } - - // Find the associated codec for the RED codec. - const C* associated_codec = FindCodecById(codec_list, associated_pt); - if (!associated_codec) { - RTC_LOG(LS_WARNING) << "Couldn't find associated codec with payload type " - << associated_pt << " for RED codec " << red_codec.name - << "."; - } - return associated_codec; -} - -// Adds all codecs from `reference_codecs` to `offered_codecs` that don't -// already exist in `offered_codecs` and ensure the payload types don't -// collide. -template -static void MergeCodecs(const std::vector& reference_codecs, - std::vector* offered_codecs, - UsedPayloadTypes* used_pltypes, - const webrtc::FieldTrialsView* field_trials) { - // Add all new codecs that are not RTX/RED codecs. - // The two-pass splitting of the loops means preferring payload types - // of actual codecs with respect to collisions. - for (const C& reference_codec : reference_codecs) { - if (!IsRtxCodec(reference_codec) && !IsRedCodec(reference_codec) && - !FindMatchingCodec(reference_codecs, *offered_codecs, - reference_codec, field_trials)) { - C codec = reference_codec; - used_pltypes->FindAndSetIdUsed(&codec); - offered_codecs->push_back(codec); - } - } - - // Add all new RTX or RED codecs. - for (const C& reference_codec : reference_codecs) { - if (IsRtxCodec(reference_codec) && - !FindMatchingCodec(reference_codecs, *offered_codecs, - reference_codec, field_trials)) { - C rtx_codec = reference_codec; - const C* associated_codec = - GetAssociatedCodecForRtx(reference_codecs, rtx_codec); - if (!associated_codec) { - continue; - } - // Find a codec in the offered list that matches the reference codec. - // Its payload type may be different than the reference codec. - absl::optional matching_codec = FindMatchingCodec( - reference_codecs, *offered_codecs, *associated_codec, field_trials); - if (!matching_codec) { - RTC_LOG(LS_WARNING) - << "Couldn't find matching " << associated_codec->name << " codec."; - continue; - } - - rtx_codec.params[kCodecParamAssociatedPayloadType] = - rtc::ToString(matching_codec->id); - used_pltypes->FindAndSetIdUsed(&rtx_codec); - offered_codecs->push_back(rtx_codec); - } else if (IsRedCodec(reference_codec) && - !FindMatchingCodec(reference_codecs, *offered_codecs, - reference_codec, field_trials)) { - C red_codec = reference_codec; - const C* associated_codec = - GetAssociatedCodecForRed(reference_codecs, red_codec); - if (associated_codec) { - absl::optional matching_codec = FindMatchingCodec( - reference_codecs, *offered_codecs, *associated_codec, field_trials); - if (!matching_codec) { - RTC_LOG(LS_WARNING) << "Couldn't find matching " - << associated_codec->name << " codec."; - continue; - } - - red_codec.params[kCodecParamNotInNameValueFormat] = - rtc::ToString(matching_codec->id) + "/" + - rtc::ToString(matching_codec->id); - } - used_pltypes->FindAndSetIdUsed(&red_codec); - offered_codecs->push_back(red_codec); - } - } -} - -// `codecs` is a full list of codecs with correct payload type mappings, which -// don't conflict with mappings of the other media type; `supported_codecs` is -// a list filtered for the media section`s direction but with default payload -// types. -template -static Codecs MatchCodecPreference( - const std::vector& codec_preferences, - const Codecs& codecs, - const Codecs& supported_codecs, - const webrtc::FieldTrialsView* field_trials) { - Codecs filtered_codecs; - bool want_rtx = false; - bool want_red = false; - - for (const auto& codec_preference : codec_preferences) { - if (IsRtxCodec(codec_preference)) { - want_rtx = true; - } else if (IsRedCodec(codec_preference)) { - want_red = true; - } - } - for (const auto& codec_preference : codec_preferences) { - auto found_codec = absl::c_find_if( - supported_codecs, - [&codec_preference](const typename Codecs::value_type& codec) { - webrtc::RtpCodecParameters codec_parameters = - codec.ToCodecParameters(); - return codec_parameters.name == codec_preference.name && - codec_parameters.kind == codec_preference.kind && - codec_parameters.num_channels == - codec_preference.num_channels && - codec_parameters.clock_rate == codec_preference.clock_rate && - codec_parameters.parameters == codec_preference.parameters; - }); - - if (found_codec != supported_codecs.end()) { - absl::optional found_codec_with_correct_pt = - FindMatchingCodec(supported_codecs, codecs, *found_codec, - field_trials); - if (found_codec_with_correct_pt) { - filtered_codecs.push_back(*found_codec_with_correct_pt); - std::string id = rtc::ToString(found_codec_with_correct_pt->id); - // Search for the matching rtx or red codec. - if (want_red || want_rtx) { - for (const auto& codec : codecs) { - if (IsRtxCodec(codec)) { - const auto apt = - codec.params.find(cricket::kCodecParamAssociatedPayloadType); - if (apt != codec.params.end() && apt->second == id) { - filtered_codecs.push_back(codec); - break; - } - } else if (IsRedCodec(codec)) { - // For RED, do not insert the codec again if it was already - // inserted. audio/red for opus gets enabled by having RED before - // the primary codec. - const auto fmtp = - codec.params.find(cricket::kCodecParamNotInNameValueFormat); - if (fmtp != codec.params.end()) { - std::vector redundant_payloads = - rtc::split(fmtp->second, '/'); - if (redundant_payloads.size() > 0 && - redundant_payloads[0] == id) { - if (std::find(filtered_codecs.begin(), filtered_codecs.end(), - codec) == filtered_codecs.end()) { - filtered_codecs.push_back(codec); - } - break; - } - } - } - } - } - } - } - } - - return filtered_codecs; -} - -// Compute the union of `codecs1` and `codecs2`. -template -std::vector ComputeCodecsUnion(const std::vector& codecs1, - const std::vector& codecs2, - const webrtc::FieldTrialsView* field_trials) { - std::vector all_codecs; - UsedPayloadTypes used_payload_types; - for (const C& codec : codecs1) { - C codec_mutable = codec; - used_payload_types.FindAndSetIdUsed(&codec_mutable); - all_codecs.push_back(codec_mutable); - } - - // Use MergeCodecs to merge the second half of our list as it already checks - // and fixes problems with duplicate payload types. - MergeCodecs(codecs2, &all_codecs, &used_payload_types, field_trials); - - return all_codecs; -} - // Adds all extensions from `reference_extensions` to `offered_extensions` that -// don't already exist in `offered_extensions` and ensure the IDs don't -// collide. If an extension is added, it's also added to `regular_extensions` or -// `encrypted_extensions`, and if the extension is in `regular_extensions` or -// `encrypted_extensions`, its ID is marked as used in `used_ids`. -// `offered_extensions` is for either audio or video while `regular_extensions` -// and `encrypted_extensions` are used for both audio and video. There could be +// don't already exist in `offered_extensions` and ensures the IDs don't +// collide. If an extension is added, it's also added to +// `all_encountered_extensions`. Also when doing the addition a new ID is set +// for that extension. `offered_extensions` is for either audio or video while +// `all_encountered_extensions` is used for both audio and video. There could be // overlap between audio extensions and video extensions. -static void MergeRtpHdrExts(const RtpHeaderExtensions& reference_extensions, - RtpHeaderExtensions* offered_extensions, - RtpHeaderExtensions* regular_extensions, - RtpHeaderExtensions* encrypted_extensions, - UsedRtpHeaderExtensionIds* used_ids) { +void MergeRtpHdrExts(const RtpHeaderExtensions& reference_extensions, + bool enable_encrypted_rtp_header_extensions, + RtpHeaderExtensions* offered_extensions, + RtpHeaderExtensions* all_encountered_extensions, + UsedRtpHeaderExtensionIds* used_ids) { for (auto reference_extension : reference_extensions) { - if (!webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption( + if (!RtpExtension::FindHeaderExtensionByUriAndEncryption( *offered_extensions, reference_extension.uri, reference_extension.encrypt)) { - if (reference_extension.encrypt) { - const webrtc::RtpExtension* existing = - webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption( - *encrypted_extensions, reference_extension.uri, - reference_extension.encrypt); - if (existing) { - offered_extensions->push_back(*existing); - } else { - used_ids->FindAndSetIdUsed(&reference_extension); - encrypted_extensions->push_back(reference_extension); - offered_extensions->push_back(reference_extension); - } + if (reference_extension.encrypt && + !enable_encrypted_rtp_header_extensions) { + // Negotiating of encrypted headers is deactivated. + continue; + } + const RtpExtension* existing = + RtpExtension::FindHeaderExtensionByUriAndEncryption( + *all_encountered_extensions, reference_extension.uri, + reference_extension.encrypt); + if (existing) { + // E.g. in the case where the same RTP header extension is used for + // audio and video. + offered_extensions->push_back(*existing); } else { - const webrtc::RtpExtension* existing = - webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption( - *regular_extensions, reference_extension.uri, - reference_extension.encrypt); - if (existing) { - offered_extensions->push_back(*existing); - } else { - used_ids->FindAndSetIdUsed(&reference_extension); - regular_extensions->push_back(reference_extension); - offered_extensions->push_back(reference_extension); - } + used_ids->FindAndSetIdUsed(&reference_extension); + all_encountered_extensions->push_back(reference_extension); + offered_extensions->push_back(reference_extension); } } } } -static void AddEncryptedVersionsOfHdrExts( - RtpHeaderExtensions* offered_extensions, - RtpHeaderExtensions* encrypted_extensions, - UsedRtpHeaderExtensionIds* used_ids) { - RtpHeaderExtensions encrypted_extensions_to_add; - for (const auto& extension : *offered_extensions) { - // Skip existing encrypted offered extension - if (extension.encrypt) { - continue; - } - - // Skip if we cannot encrypt the extension - if (!webrtc::RtpExtension::IsEncryptionSupported(extension.uri)) { - continue; - } - - // Skip if an encrypted extension with that URI already exists in the - // offered extensions. - const bool have_encrypted_extension = - webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption( - *offered_extensions, extension.uri, true); - if (have_encrypted_extension) { - continue; - } - - // Determine if a shared encrypted extension with that URI already exists. - const webrtc::RtpExtension* shared_encrypted_extension = - webrtc::RtpExtension::FindHeaderExtensionByUriAndEncryption( - *encrypted_extensions, extension.uri, true); - if (shared_encrypted_extension) { - // Re-use the shared encrypted extension - encrypted_extensions_to_add.push_back(*shared_encrypted_extension); - continue; - } - - // None exists. Create a new shared encrypted extension from the - // non-encrypted one. - webrtc::RtpExtension new_encrypted_extension(extension); - new_encrypted_extension.encrypt = true; - used_ids->FindAndSetIdUsed(&new_encrypted_extension); - encrypted_extensions->push_back(new_encrypted_extension); - encrypted_extensions_to_add.push_back(new_encrypted_extension); - } - - // Append the additional encrypted extensions to be offered - offered_extensions->insert(offered_extensions->end(), - encrypted_extensions_to_add.begin(), - encrypted_extensions_to_add.end()); -} - // Mostly identical to RtpExtension::FindHeaderExtensionByUri but discards any // encrypted extensions that this implementation cannot encrypt. -static const webrtc::RtpExtension* FindHeaderExtensionByUriDiscardUnsupported( - const std::vector& extensions, +const RtpExtension* FindHeaderExtensionByUriDiscardUnsupported( + const std::vector& extensions, absl::string_view uri, - webrtc::RtpExtension::Filter filter) { + RtpExtension::Filter filter) { // Note: While it's technically possible to decrypt extensions that we don't // encrypt, the symmetric API of libsrtp does not allow us to supply // different IDs for encryption/decryption of header extensions depending on // whether the packet is inbound or outbound. Thereby, we are limited to // what we can send in encrypted form. - if (!webrtc::RtpExtension::IsEncryptionSupported(uri)) { + if (!RtpExtension::IsEncryptionSupported(uri)) { // If there's no encryption support and we only want encrypted extensions, // there's no point in continuing the search here. - if (filter == webrtc::RtpExtension::kRequireEncryptedExtension) { + if (filter == RtpExtension::kRequireEncryptedExtension) { return nullptr; } // Instruct to only return non-encrypted extensions - filter = webrtc::RtpExtension::Filter::kDiscardEncryptedExtension; + filter = RtpExtension::Filter::kDiscardEncryptedExtension; } - return webrtc::RtpExtension::FindHeaderExtensionByUri(extensions, uri, - filter); + return RtpExtension::FindHeaderExtensionByUri(extensions, uri, filter); } -static void NegotiateRtpHeaderExtensions( - const RtpHeaderExtensions& local_extensions, - const RtpHeaderExtensions& offered_extensions, - webrtc::RtpExtension::Filter filter, - RtpHeaderExtensions* negotiated_extensions) { - // TransportSequenceNumberV2 is not offered by default. The special logic for - // the TransportSequenceNumber extensions works as follows: - // Offer Answer - // V1 V1 if in local_extensions. - // V1 and V2 V2 regardless of local_extensions. - // V2 V2 regardless of local_extensions. - const webrtc::RtpExtension* transport_sequence_number_v2_offer = - FindHeaderExtensionByUriDiscardUnsupported( - offered_extensions, - webrtc::RtpExtension::kTransportSequenceNumberV2Uri, filter); - +void NegotiateRtpHeaderExtensions(const RtpHeaderExtensions& local_extensions, + const RtpHeaderExtensions& offered_extensions, + RtpExtension::Filter filter, + RtpHeaderExtensions* negotiated_extensions) { bool frame_descriptor_in_local = false; bool dependency_descriptor_in_local = false; bool abs_capture_time_in_local = false; for (const webrtc::RtpExtension& ours : local_extensions) { - if (ours.uri == webrtc::RtpExtension::kGenericFrameDescriptorUri00) + if (ours.uri == RtpExtension::kGenericFrameDescriptorUri00) frame_descriptor_in_local = true; - else if (ours.uri == webrtc::RtpExtension::kDependencyDescriptorUri) + else if (ours.uri == RtpExtension::kDependencyDescriptorUri) dependency_descriptor_in_local = true; - else if (ours.uri == webrtc::RtpExtension::kAbsoluteCaptureTimeUri) + else if (ours.uri == RtpExtension::kAbsoluteCaptureTimeUri) abs_capture_time_in_local = true; - const webrtc::RtpExtension* theirs = - FindHeaderExtensionByUriDiscardUnsupported(offered_extensions, ours.uri, - filter); - if (theirs) { - if (transport_sequence_number_v2_offer && - ours.uri == webrtc::RtpExtension::kTransportSequenceNumberUri) { - // Don't respond to - // http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01 - // if we get an offer including - // http://www.webrtc.org/experiments/rtp-hdrext/transport-wide-cc-02 - continue; - } else { - // We respond with their RTP header extension id. - negotiated_extensions->push_back(*theirs); - } - } - } - if (transport_sequence_number_v2_offer) { - // Respond that we support kTransportSequenceNumberV2Uri. - negotiated_extensions->push_back(*transport_sequence_number_v2_offer); + const RtpExtension* theirs = FindHeaderExtensionByUriDiscardUnsupported( + offered_extensions, ours.uri, filter); + if (theirs && theirs->encrypt == ours.encrypt) { + // We respond with their RTP header extension id. + negotiated_extensions->push_back(*theirs); + } } // Frame descriptors support. If the extension is not present locally, but is // in the offer, we add it to the list. if (!dependency_descriptor_in_local) { - const webrtc::RtpExtension* theirs = - FindHeaderExtensionByUriDiscardUnsupported( - offered_extensions, webrtc::RtpExtension::kDependencyDescriptorUri, - filter); + const RtpExtension* theirs = FindHeaderExtensionByUriDiscardUnsupported( + offered_extensions, RtpExtension::kDependencyDescriptorUri, filter); if (theirs) { negotiated_extensions->push_back(*theirs); } } if (!frame_descriptor_in_local) { - const webrtc::RtpExtension* theirs = - FindHeaderExtensionByUriDiscardUnsupported( - offered_extensions, - webrtc::RtpExtension::kGenericFrameDescriptorUri00, filter); + const RtpExtension* theirs = FindHeaderExtensionByUriDiscardUnsupported( + offered_extensions, RtpExtension::kGenericFrameDescriptorUri00, filter); if (theirs) { negotiated_extensions->push_back(*theirs); } @@ -1347,39 +530,25 @@ static void NegotiateRtpHeaderExtensions( // Absolute capture time support. If the extension is not present locally, but // is in the offer, we add it to the list. if (!abs_capture_time_in_local) { - const webrtc::RtpExtension* theirs = - FindHeaderExtensionByUriDiscardUnsupported( - offered_extensions, webrtc::RtpExtension::kAbsoluteCaptureTimeUri, - filter); + const RtpExtension* theirs = FindHeaderExtensionByUriDiscardUnsupported( + offered_extensions, RtpExtension::kAbsoluteCaptureTimeUri, filter); if (theirs) { negotiated_extensions->push_back(*theirs); } } } -static void StripCNCodecs(AudioCodecs* audio_codecs) { - audio_codecs->erase(std::remove_if(audio_codecs->begin(), audio_codecs->end(), - [](const AudioCodec& codec) { - return IsComfortNoiseCodec(codec); - }), - audio_codecs->end()); -} - -template -static bool SetCodecsInAnswer( - const MediaContentDescriptionImpl* offer, - const std::vector& local_codecs, - const MediaDescriptionOptions& media_description_options, - const MediaSessionOptions& session_options, - UniqueRandomIdGenerator* ssrc_generator, - StreamParamsVec* current_streams, - MediaContentDescriptionImpl* answer, - const webrtc::FieldTrialsView& field_trials) { - std::vector negotiated_codecs; - NegotiateCodecs(local_codecs, offer->codecs(), &negotiated_codecs, - media_description_options.codec_preferences.empty(), - &field_trials); - answer->AddCodecs(negotiated_codecs); +bool SetCodecsInAnswer(const MediaContentDescription* offer, + const std::vector& local_codecs, + const MediaDescriptionOptions& media_description_options, + const MediaSessionOptions& session_options, + UniqueRandomIdGenerator* ssrc_generator, + StreamParamsVec* current_streams, + MediaContentDescription* answer, + const FieldTrialsView& field_trials) { + RTC_DCHECK(offer->type() == webrtc::MediaType::AUDIO || + offer->type() == webrtc::MediaType::VIDEO); + answer->AddCodecs(local_codecs); answer->set_protocol(offer->protocol()); if (!AddStreamParams(media_description_options.sender_options, session_options.rtcp_cname, ssrc_generator, @@ -1396,12 +565,10 @@ static bool SetCodecsInAnswer( // (according to crypto_suites). The codecs, rtcp_mux, and crypto are all // negotiated with the offer. If the negotiation fails, this method returns // false. The created content is added to the offer. -static bool CreateMediaContentAnswer( +bool CreateMediaContentAnswer( const MediaContentDescription* offer, const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, - const SecurePolicy& sdes_policy, - const CryptoParamsVec* current_cryptos, const RtpHeaderExtensions& local_rtp_extensions, UniqueRandomIdGenerator* ssrc_generator, bool enable_encrypted_rtp_header_extensions, @@ -1409,16 +576,17 @@ static bool CreateMediaContentAnswer( bool bundle_enabled, MediaContentDescription* answer) { answer->set_extmap_allow_mixed_enum(offer->extmap_allow_mixed_enum()); - const webrtc::RtpExtension::Filter extensions_filter = + const RtpExtension::Filter extensions_filter = enable_encrypted_rtp_header_extensions - ? webrtc::RtpExtension::Filter::kPreferEncryptedExtension - : webrtc::RtpExtension::Filter::kDiscardEncryptedExtension; + ? RtpExtension::Filter::kPreferEncryptedExtension + : RtpExtension::Filter::kDiscardEncryptedExtension; // Filter local extensions by capabilities and direction. RtpHeaderExtensions local_rtp_extensions_to_reply_with; - for (auto extension_with_id : local_rtp_extensions) { + for (const auto& extension_with_id : local_rtp_extensions) { for (const auto& extension : media_description_options.header_extensions) { - if (extension_with_id.uri == extension.uri) { + if (extension_with_id.uri == extension.uri && + extension_with_id.encrypt == extension.preferred_encrypt) { // TODO(crbug.com/1051821): Configure the extension direction from // the information in the media_description_options extension // capability. For now, do not include stopped extensions. @@ -1436,27 +604,9 @@ static bool CreateMediaContentAnswer( answer->set_rtp_header_extensions(negotiated_rtp_extensions); answer->set_rtcp_mux(session_options.rtcp_mux_enabled && offer->rtcp_mux()); - if (answer->type() == cricket::MEDIA_TYPE_VIDEO) { - answer->set_rtcp_reduced_size(offer->rtcp_reduced_size()); - } - + answer->set_rtcp_reduced_size(offer->rtcp_reduced_size()); answer->set_remote_estimate(offer->remote_estimate()); - if (sdes_policy != SEC_DISABLED) { - CryptoParams crypto; - if (SelectCrypto(offer, bundle_enabled, session_options.crypto_options, - &crypto)) { - if (current_cryptos) { - FindMatchingCrypto(*current_cryptos, crypto, &crypto); - } - answer->AddCrypto(crypto); - } - } - - if (answer->cryptos().empty() && sdes_policy == SEC_REQUIRED) { - return false; - } - AddSimulcastToMediaDescription(media_description_options, answer); answer->set_direction(NegotiateRtpTransceiverDirection( @@ -1465,16 +615,16 @@ static bool CreateMediaContentAnswer( return true; } -static bool IsMediaProtocolSupported(MediaType type, - const std::string& protocol, - bool secure_transport) { +bool IsMediaProtocolSupported(webrtc::MediaType type, + const std::string& protocol, + bool secure_transport) { // Since not all applications serialize and deserialize the media protocol, // we will have to accept `protocol` to be empty. if (protocol.empty()) { return true; } - if (type == MEDIA_TYPE_DATA) { + if (type == webrtc::MediaType::DATA) { // Check for SCTP if (secure_transport) { // Most likely scenarios first. @@ -1494,11 +644,8 @@ static bool IsMediaProtocolSupported(MediaType type, } } -static void SetMediaProtocol(bool secure_transport, - MediaContentDescription* desc) { - if (!desc->cryptos().empty()) - desc->set_protocol(kMediaProtocolSavpf); - else if (secure_transport) +void SetMediaProtocol(bool secure_transport, MediaContentDescription* desc) { + if (secure_transport) desc->set_protocol(kMediaProtocolDtlsSavpf); else desc->set_protocol(kMediaProtocolAvpf); @@ -1506,7 +653,7 @@ static void SetMediaProtocol(bool secure_transport, // Gets the TransportInfo of the given `content_name` from the // `current_description`. If doesn't exist, returns a new one. -static const TransportDescription* GetTransportDescription( +const TransportDescription* GetTransportDescription( const std::string& content_name, const SessionDescription* current_description) { const TransportDescription* desc = NULL; @@ -1520,155 +667,43 @@ static const TransportDescription* GetTransportDescription( return desc; } -// Gets the current DTLS state from the transport description. -static bool IsDtlsActive(const ContentInfo* content, - const SessionDescription* current_description) { - if (!content) { - return false; - } - - size_t msection_index = content - ¤t_description->contents()[0]; - - if (current_description->transport_infos().size() <= msection_index) { - return false; - } - - return current_description->transport_infos()[msection_index] - .description.secure(); -} - -void MediaDescriptionOptions::AddAudioSender( - const std::string& track_id, - const std::vector& stream_ids) { - RTC_DCHECK(type == MEDIA_TYPE_AUDIO); - AddSenderInternal(track_id, stream_ids, {}, SimulcastLayerList(), 1); -} - -void MediaDescriptionOptions::AddVideoSender( - const std::string& track_id, - const std::vector& stream_ids, - const std::vector& rids, - const SimulcastLayerList& simulcast_layers, - int num_sim_layers) { - RTC_DCHECK(type == MEDIA_TYPE_VIDEO); - RTC_DCHECK(rids.empty() || num_sim_layers == 0) - << "RIDs are the compliant way to indicate simulcast."; - RTC_DCHECK(ValidateSimulcastLayers(rids, simulcast_layers)); - AddSenderInternal(track_id, stream_ids, rids, simulcast_layers, - num_sim_layers); -} - -void MediaDescriptionOptions::AddSenderInternal( - const std::string& track_id, - const std::vector& stream_ids, - const std::vector& rids, - const SimulcastLayerList& simulcast_layers, - int num_sim_layers) { - // TODO(steveanton): Support any number of stream ids. - RTC_CHECK(stream_ids.size() == 1U); - SenderOptions options; - options.track_id = track_id; - options.stream_ids = stream_ids; - options.simulcast_layers = simulcast_layers; - options.rids = rids; - options.num_sim_layers = num_sim_layers; - sender_options.push_back(options); -} - -bool MediaSessionOptions::HasMediaDescription(MediaType type) const { - return absl::c_any_of( - media_description_options, - [type](const MediaDescriptionOptions& t) { return t.type == type; }); -} +} // namespace MediaSessionDescriptionFactory::MediaSessionDescriptionFactory( + MediaEngineInterface* media_engine, + bool rtx_enabled, + UniqueRandomIdGenerator* ssrc_generator, const TransportDescriptionFactory* transport_desc_factory, - rtc::UniqueRandomIdGenerator* ssrc_generator) + CodecLookupHelper* codec_lookup_helper) : ssrc_generator_(ssrc_generator), - transport_desc_factory_(transport_desc_factory) {} - -MediaSessionDescriptionFactory::MediaSessionDescriptionFactory( - cricket::MediaEngineInterface* media_engine, - bool rtx_enabled, - rtc::UniqueRandomIdGenerator* ssrc_generator, - const TransportDescriptionFactory* transport_desc_factory) - : MediaSessionDescriptionFactory(transport_desc_factory, ssrc_generator) { - if (media_engine) { - audio_send_codecs_ = media_engine->voice().send_codecs(); - audio_recv_codecs_ = media_engine->voice().recv_codecs(); - video_send_codecs_ = media_engine->video().send_codecs(rtx_enabled); - video_recv_codecs_ = media_engine->video().recv_codecs(rtx_enabled); - } - ComputeAudioCodecsIntersectionAndUnion(); - ComputeVideoCodecsIntersectionAndUnion(); -} - -const AudioCodecs& MediaSessionDescriptionFactory::audio_sendrecv_codecs() - const { - return audio_sendrecv_codecs_; -} - -const AudioCodecs& MediaSessionDescriptionFactory::audio_send_codecs() const { - return audio_send_codecs_; -} - -const AudioCodecs& MediaSessionDescriptionFactory::audio_recv_codecs() const { - return audio_recv_codecs_; -} - -void MediaSessionDescriptionFactory::set_audio_codecs( - const AudioCodecs& send_codecs, - const AudioCodecs& recv_codecs) { - audio_send_codecs_ = send_codecs; - audio_recv_codecs_ = recv_codecs; - ComputeAudioCodecsIntersectionAndUnion(); -} - -const VideoCodecs& MediaSessionDescriptionFactory::video_sendrecv_codecs() - const { - return video_sendrecv_codecs_; -} - -const VideoCodecs& MediaSessionDescriptionFactory::video_send_codecs() const { - return video_send_codecs_; -} - -const VideoCodecs& MediaSessionDescriptionFactory::video_recv_codecs() const { - return video_recv_codecs_; -} - -void MediaSessionDescriptionFactory::set_video_codecs( - const VideoCodecs& send_codecs, - const VideoCodecs& recv_codecs) { - video_send_codecs_ = send_codecs; - video_recv_codecs_ = recv_codecs; - ComputeVideoCodecsIntersectionAndUnion(); -} - -static void RemoveUnifiedPlanExtensions(RtpHeaderExtensions* extensions) { - RTC_DCHECK(extensions); - - extensions->erase( - std::remove_if(extensions->begin(), extensions->end(), - [](auto extension) { - return extension.uri == webrtc::RtpExtension::kMidUri || - extension.uri == webrtc::RtpExtension::kRidUri || - extension.uri == - webrtc::RtpExtension::kRepairedRidUri; - }), - extensions->end()); + transport_desc_factory_(transport_desc_factory), + codec_lookup_helper_(codec_lookup_helper), + payload_types_in_transport_trial_enabled_( + transport_desc_factory_->trials().IsEnabled( + "WebRTC-PayloadTypesInTransport")) { + RTC_CHECK(transport_desc_factory_); + RTC_CHECK(codec_lookup_helper_); } RtpHeaderExtensions MediaSessionDescriptionFactory::filtered_rtp_header_extensions( RtpHeaderExtensions extensions) const { if (!is_unified_plan_) { - RemoveUnifiedPlanExtensions(&extensions); + // Remove extensions only supported with unified-plan. + extensions.erase( + std::remove_if(extensions.begin(), extensions.end(), + [](const webrtc::RtpExtension& extension) { + return extension.uri == RtpExtension::kMidUri || + extension.uri == RtpExtension::kRidUri || + extension.uri == RtpExtension::kRepairedRidUri; + }), + extensions.end()); } return extensions; } -std::unique_ptr MediaSessionDescriptionFactory::CreateOffer( +RTCErrorOr> +MediaSessionDescriptionFactory::CreateOfferOrError( const MediaSessionOptions& session_options, const SessionDescription* current_description) const { // Must have options for each existing section. @@ -1689,10 +724,6 @@ std::unique_ptr MediaSessionDescriptionFactory::CreateOffer( StreamParamsVec current_streams = GetCurrentStreamParams(current_active_contents); - AudioCodecs offer_audio_codecs; - VideoCodecs offer_video_codecs; - GetCodecsForOffer(current_active_contents, &offer_audio_codecs, - &offer_video_codecs); AudioVideoRtpHeaderExtensions extensions_with_ids = GetOfferedRtpHeaderExtensionsWithIds( current_active_contents, session_options.offer_extmap_allow_mixed, @@ -1710,47 +741,36 @@ std::unique_ptr MediaSessionDescriptionFactory::CreateOffer( msection_index < current_description->contents().size()) { current_content = ¤t_description->contents()[msection_index]; // Media type must match unless this media section is being recycled. - RTC_DCHECK(current_content->name != media_description_options.mid || - IsMediaContentOfType(current_content, - media_description_options.type)); } + RTCError error; switch (media_description_options.type) { - case MEDIA_TYPE_AUDIO: - if (!AddAudioContentForOffer(media_description_options, session_options, - current_content, current_description, - extensions_with_ids.audio, - offer_audio_codecs, ¤t_streams, - offer.get(), &ice_credentials)) { - return nullptr; - } + case webrtc::MediaType::AUDIO: + case webrtc::MediaType::VIDEO: + error = AddRtpContentForOffer( + media_description_options, session_options, current_content, + current_description, + media_description_options.type == webrtc::MediaType::AUDIO + ? extensions_with_ids.audio + : extensions_with_ids.video, + ¤t_streams, offer.get(), &ice_credentials); break; - case MEDIA_TYPE_VIDEO: - if (!AddVideoContentForOffer(media_description_options, session_options, - current_content, current_description, - extensions_with_ids.video, - offer_video_codecs, ¤t_streams, - offer.get(), &ice_credentials)) { - return nullptr; - } + case webrtc::MediaType::DATA: + error = AddDataContentForOffer(media_description_options, + session_options, current_content, + current_description, ¤t_streams, + offer.get(), &ice_credentials); break; - case MEDIA_TYPE_DATA: - if (!AddDataContentForOffer(media_description_options, session_options, - current_content, current_description, - ¤t_streams, offer.get(), - &ice_credentials)) { - return nullptr; - } - break; - case MEDIA_TYPE_UNSUPPORTED: - if (!AddUnsupportedContentForOffer( - media_description_options, session_options, current_content, - current_description, offer.get(), &ice_credentials)) { - return nullptr; - } + case webrtc::MediaType::UNSUPPORTED: + error = AddUnsupportedContentForOffer( + media_description_options, session_options, current_content, + current_description, offer.get(), &ice_credentials); break; default: RTC_DCHECK_NOTREACHED(); } + if (!error.ok()) { + return error; + } ++msection_index; } @@ -1767,19 +787,14 @@ std::unique_ptr MediaSessionDescriptionFactory::CreateOffer( // type to represent different codecs, or same IDs for different header // extensions. We need to detect this and not try to bundle those media // descriptions together. - offer_bundle.AddContentName(content.name); + offer_bundle.AddContentName(content.mid()); } if (!offer_bundle.content_names().empty()) { offer->AddGroup(offer_bundle); if (!UpdateTransportInfoForBundle(offer_bundle, offer.get())) { - RTC_LOG(LS_ERROR) - << "CreateOffer failed to UpdateTransportInfoForBundle."; - return nullptr; - } - if (!UpdateCryptoParamsForBundle(offer_bundle, offer.get())) { - RTC_LOG(LS_ERROR) - << "CreateOffer failed to UpdateCryptoParamsForBundle."; - return nullptr; + LOG_AND_RETURN_ERROR( + RTCErrorType::INTERNAL_ERROR, + "CreateOffer failed to UpdateTransportInfoForBundle"); } } } @@ -1790,11 +805,13 @@ std::unique_ptr MediaSessionDescriptionFactory::CreateOffer( // Be conservative and signal using both a=msid and a=ssrc lines. Unified // Plan answerers will look at a=msid and Plan B answerers will look at the // a=ssrc MSID line. - offer->set_msid_signaling(cricket::kMsidSignalingMediaSection | - cricket::kMsidSignalingSsrcAttribute); + offer->set_msid_signaling(kMsidSignalingSemantic | + kMsidSignalingMediaSection | + kMsidSignalingSsrcAttribute); } else { // Plan B always signals MSID using a=ssrc lines. - offer->set_msid_signaling(cricket::kMsidSignalingSsrcAttribute); + offer->set_msid_signaling(kMsidSignalingSemantic | + kMsidSignalingSsrcAttribute); } offer->set_extmap_allow_mixed(session_options.offer_extmap_allow_mixed); @@ -1802,13 +819,13 @@ std::unique_ptr MediaSessionDescriptionFactory::CreateOffer( return offer; } -std::unique_ptr -MediaSessionDescriptionFactory::CreateAnswer( +RTCErrorOr> +MediaSessionDescriptionFactory::CreateAnswerOrError( const SessionDescription* offer, const MediaSessionOptions& session_options, const SessionDescription* current_description) const { if (!offer) { - return nullptr; + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, "Called without offer."); } // Must have options for exactly as many sections as in the offer. @@ -1827,16 +844,21 @@ MediaSessionDescriptionFactory::CreateAnswer( StreamParamsVec current_streams = GetCurrentStreamParams(current_active_contents); - // Get list of all possible codecs that respects existing payload type - // mappings and uses a single payload type space. - // - // Note that these lists may be further filtered for each m= section; this - // step is done just to establish the payload type mappings shared by all - // sections. - AudioCodecs answer_audio_codecs; - VideoCodecs answer_video_codecs; - GetCodecsForAnswer(current_active_contents, *offer, &answer_audio_codecs, - &answer_video_codecs); + // Decide what congestion control feedback format we're using. + bool has_ack_ccfb = false; + if (transport_desc_factory_->trials().IsEnabled( + "WebRTC-RFC8888CongestionControlFeedback")) { + for (const auto& content : offer->contents()) { + if (content.media_description()->rtcp_fb_ack_ccfb()) { + has_ack_ccfb = true; + } else if (has_ack_ccfb) { + RTC_LOG(LS_ERROR) + << "Inconsistent rtcp_fb_ack_ccfb marking, ignoring all"; + has_ack_ccfb = false; + break; + } + } + } auto answer = std::make_unique(); @@ -1868,9 +890,9 @@ MediaSessionDescriptionFactory::CreateAnswer( // MediaDescriptionOptions. RTC_DCHECK( IsMediaContentOfType(offer_content, media_description_options.type)); - RTC_DCHECK(media_description_options.mid == offer_content->name); + RTC_DCHECK(media_description_options.mid == offer_content->mid()); // Get the index of the BUNDLE group that this MID belongs to, if any. - absl::optional bundle_index; + std::optional bundle_index; for (size_t i = 0; i < offer_bundles.size(); ++i) { if (offer_bundles[i]->HasContentName(media_description_options.mid)) { bundle_index = i; @@ -1886,47 +908,45 @@ MediaSessionDescriptionFactory::CreateAnswer( msection_index < current_description->contents().size()) { current_content = ¤t_description->contents()[msection_index]; } + // Don't offer the transport-cc header extension if "ack ccfb" is in use. + auto header_extensions_in = media_description_options.header_extensions; + if (has_ack_ccfb) { + for (auto& option : header_extensions_in) { + if (option.uri == RtpExtension::kTransportSequenceNumberUri) { + option.direction = RtpTransceiverDirection::kStopped; + } + } + } RtpHeaderExtensions header_extensions = RtpHeaderExtensionsFromCapabilities( - UnstoppedRtpHeaderExtensionCapabilities( - media_description_options.header_extensions)); + UnstoppedRtpHeaderExtensionCapabilities(header_extensions_in)); + RTCError error; switch (media_description_options.type) { - case MEDIA_TYPE_AUDIO: - if (!AddAudioContentForAnswer( - media_description_options, session_options, offer_content, - offer, current_content, current_description, bundle_transport, - answer_audio_codecs, header_extensions, ¤t_streams, - answer.get(), &ice_credentials)) { - return nullptr; - } + case webrtc::MediaType::AUDIO: + case webrtc::MediaType::VIDEO: + error = AddRtpContentForAnswer( + media_description_options, session_options, offer_content, offer, + current_content, current_description, bundle_transport, + header_extensions, ¤t_streams, answer.get(), + &ice_credentials); break; - case MEDIA_TYPE_VIDEO: - if (!AddVideoContentForAnswer( - media_description_options, session_options, offer_content, - offer, current_content, current_description, bundle_transport, - answer_video_codecs, header_extensions, ¤t_streams, - answer.get(), &ice_credentials)) { - return nullptr; - } + case webrtc::MediaType::DATA: + error = AddDataContentForAnswer( + media_description_options, session_options, offer_content, offer, + current_content, current_description, bundle_transport, + ¤t_streams, answer.get(), &ice_credentials); break; - case MEDIA_TYPE_DATA: - if (!AddDataContentForAnswer( - media_description_options, session_options, offer_content, - offer, current_content, current_description, bundle_transport, - ¤t_streams, answer.get(), &ice_credentials)) { - return nullptr; - } - break; - case MEDIA_TYPE_UNSUPPORTED: - if (!AddUnsupportedContentForAnswer( - media_description_options, session_options, offer_content, - offer, current_content, current_description, bundle_transport, - answer.get(), &ice_credentials)) { - return nullptr; - } + case webrtc::MediaType::UNSUPPORTED: + error = AddUnsupportedContentForAnswer( + media_description_options, session_options, offer_content, offer, + current_content, current_description, bundle_transport, + answer.get(), &ice_credentials); break; default: RTC_DCHECK_NOTREACHED(); } + if (!error.ok()) { + return error; + } ++msection_index; // See if we can add the newly generated m= section to the BUNDLE group in // the answer. @@ -1934,10 +954,10 @@ MediaSessionDescriptionFactory::CreateAnswer( if (!added.rejected && session_options.bundle_enabled && bundle_index.has_value()) { // The `bundle_index` is for `media_description_options.mid`. - RTC_DCHECK_EQ(media_description_options.mid, added.name); - answer_bundles[bundle_index.value()].AddContentName(added.name); + RTC_DCHECK_EQ(media_description_options.mid, added.mid()); + answer_bundles[bundle_index.value()].AddContentName(added.mid()); bundle_transports[bundle_index.value()].reset( - new TransportInfo(*answer->GetTransportInfoByName(added.name))); + new TransportInfo(*answer->GetTransportInfoByName(added.mid()))); } } @@ -1955,15 +975,9 @@ MediaSessionDescriptionFactory::CreateAnswer( // Share the same ICE credentials and crypto params across all contents, // as BUNDLE requires. if (!UpdateTransportInfoForBundle(answer_bundle, answer.get())) { - RTC_LOG(LS_ERROR) - << "CreateAnswer failed to UpdateTransportInfoForBundle."; - return NULL; - } - - if (!UpdateCryptoParamsForBundle(answer_bundle, answer.get())) { - RTC_LOG(LS_ERROR) - << "CreateAnswer failed to UpdateCryptoParamsForBundle."; - return NULL; + LOG_AND_RETURN_ERROR( + RTCErrorType::INTERNAL_ERROR, + "CreateAnswer failed to UpdateTransportInfoForBundle."); } } } @@ -1974,214 +988,45 @@ MediaSessionDescriptionFactory::CreateAnswer( if (is_unified_plan_) { // Unified Plan needs to look at what the offer included to find the most // compatible answer. - if (offer->msid_signaling() == 0) { + int msid_signaling = offer->msid_signaling(); + if (msid_signaling == (kMsidSignalingSemantic | kMsidSignalingMediaSection | + kMsidSignalingSsrcAttribute)) { + // If both a=msid and a=ssrc MSID signaling methods were used, we're + // probably talking to a Unified Plan endpoint so respond with just + // a=msid. + answer->set_msid_signaling(kMsidSignalingSemantic | + kMsidSignalingMediaSection); + } else if (msid_signaling == + (kMsidSignalingSemantic | kMsidSignalingSsrcAttribute) || + msid_signaling == kMsidSignalingSsrcAttribute) { + // If only a=ssrc MSID signaling method was used, we're probably talking + // to a Plan B endpoint so respond with just a=ssrc MSID. + answer->set_msid_signaling(kMsidSignalingSemantic | + kMsidSignalingSsrcAttribute); + } else { // We end up here in one of three cases: // 1. An empty offer. We'll reply with an empty answer so it doesn't // matter what we pick here. // 2. A data channel only offer. We won't add any MSIDs to the answer so // it also doesn't matter what we pick here. - // 3. Media that's either sendonly or inactive from the remote endpoint. + // 3. Media that's either recvonly or inactive from the remote point of + // view. // We don't have any information to say whether the endpoint is Plan B - // or Unified Plan, so be conservative and send both. - answer->set_msid_signaling(cricket::kMsidSignalingMediaSection | - cricket::kMsidSignalingSsrcAttribute); - } else if (offer->msid_signaling() == - (cricket::kMsidSignalingMediaSection | - cricket::kMsidSignalingSsrcAttribute)) { - // If both a=msid and a=ssrc MSID signaling methods were used, we're - // probably talking to a Unified Plan endpoint so respond with just - // a=msid. - answer->set_msid_signaling(cricket::kMsidSignalingMediaSection); - } else { - // Otherwise, it's clear which method the offerer is using so repeat that - // back to them. - answer->set_msid_signaling(offer->msid_signaling()); + // or Unified Plan. Since plan-b is obsolete, do not respond with it. + // We assume that endpoints not supporting MSID will silently ignore + // the a=msid lines they do not understand. + answer->set_msid_signaling(kMsidSignalingSemantic | + kMsidSignalingMediaSection); } } else { // Plan B always signals MSID using a=ssrc lines. - answer->set_msid_signaling(cricket::kMsidSignalingSsrcAttribute); + answer->set_msid_signaling(kMsidSignalingSemantic | + kMsidSignalingSsrcAttribute); } return answer; } -const AudioCodecs& MediaSessionDescriptionFactory::GetAudioCodecsForOffer( - const RtpTransceiverDirection& direction) const { - switch (direction) { - // If stream is inactive - generate list as if sendrecv. - case RtpTransceiverDirection::kSendRecv: - case RtpTransceiverDirection::kStopped: - case RtpTransceiverDirection::kInactive: - return audio_sendrecv_codecs_; - case RtpTransceiverDirection::kSendOnly: - return audio_send_codecs_; - case RtpTransceiverDirection::kRecvOnly: - return audio_recv_codecs_; - } - RTC_CHECK_NOTREACHED(); -} - -const AudioCodecs& MediaSessionDescriptionFactory::GetAudioCodecsForAnswer( - const RtpTransceiverDirection& offer, - const RtpTransceiverDirection& answer) const { - switch (answer) { - // For inactive and sendrecv answers, generate lists as if we were to accept - // the offer's direction. See RFC 3264 Section 6.1. - case RtpTransceiverDirection::kSendRecv: - case RtpTransceiverDirection::kStopped: - case RtpTransceiverDirection::kInactive: - return GetAudioCodecsForOffer( - webrtc::RtpTransceiverDirectionReversed(offer)); - case RtpTransceiverDirection::kSendOnly: - return audio_send_codecs_; - case RtpTransceiverDirection::kRecvOnly: - return audio_recv_codecs_; - } - RTC_CHECK_NOTREACHED(); -} - -const VideoCodecs& MediaSessionDescriptionFactory::GetVideoCodecsForOffer( - const RtpTransceiverDirection& direction) const { - switch (direction) { - // If stream is inactive - generate list as if sendrecv. - case RtpTransceiverDirection::kSendRecv: - case RtpTransceiverDirection::kStopped: - case RtpTransceiverDirection::kInactive: - return video_sendrecv_codecs_; - case RtpTransceiverDirection::kSendOnly: - return video_send_codecs_; - case RtpTransceiverDirection::kRecvOnly: - return video_recv_codecs_; - } - RTC_CHECK_NOTREACHED(); -} - -const VideoCodecs& MediaSessionDescriptionFactory::GetVideoCodecsForAnswer( - const RtpTransceiverDirection& offer, - const RtpTransceiverDirection& answer) const { - switch (answer) { - // For inactive and sendrecv answers, generate lists as if we were to accept - // the offer's direction. See RFC 3264 Section 6.1. - case RtpTransceiverDirection::kSendRecv: - case RtpTransceiverDirection::kStopped: - case RtpTransceiverDirection::kInactive: - return GetVideoCodecsForOffer( - webrtc::RtpTransceiverDirectionReversed(offer)); - case RtpTransceiverDirection::kSendOnly: - return video_send_codecs_; - case RtpTransceiverDirection::kRecvOnly: - return video_recv_codecs_; - } - RTC_CHECK_NOTREACHED(); -} - -void MergeCodecsFromDescription( - const std::vector& current_active_contents, - AudioCodecs* audio_codecs, - VideoCodecs* video_codecs, - UsedPayloadTypes* used_pltypes, - const webrtc::FieldTrialsView* field_trials) { - for (const ContentInfo* content : current_active_contents) { - if (IsMediaContentOfType(content, MEDIA_TYPE_AUDIO)) { - const AudioContentDescription* audio = - content->media_description()->as_audio(); - MergeCodecs(audio->codecs(), audio_codecs, used_pltypes, - field_trials); - } else if (IsMediaContentOfType(content, MEDIA_TYPE_VIDEO)) { - const VideoContentDescription* video = - content->media_description()->as_video(); - MergeCodecs(video->codecs(), video_codecs, used_pltypes, - field_trials); - } - } -} - -// Getting codecs for an offer involves these steps: -// -// 1. Construct payload type -> codec mappings for current description. -// 2. Add any reference codecs that weren't already present -// 3. For each individual media description (m= section), filter codecs based -// on the directional attribute (happens in another method). -void MediaSessionDescriptionFactory::GetCodecsForOffer( - const std::vector& current_active_contents, - AudioCodecs* audio_codecs, - VideoCodecs* video_codecs) const { - const webrtc::FieldTrialsView* field_trials = - &transport_desc_factory_->trials(); - // First - get all codecs from the current description if the media type - // is used. Add them to `used_pltypes` so the payload type is not reused if a - // new media type is added. - UsedPayloadTypes used_pltypes; - MergeCodecsFromDescription(current_active_contents, audio_codecs, - video_codecs, &used_pltypes, field_trials); - - // Add our codecs that are not in the current description. - MergeCodecs(all_audio_codecs_, audio_codecs, &used_pltypes, - field_trials); - MergeCodecs(all_video_codecs_, video_codecs, &used_pltypes, - field_trials); -} - -// Getting codecs for an answer involves these steps: -// -// 1. Construct payload type -> codec mappings for current description. -// 2. Add any codecs from the offer that weren't already present. -// 3. Add any remaining codecs that weren't already present. -// 4. For each individual media description (m= section), filter codecs based -// on the directional attribute (happens in another method). -void MediaSessionDescriptionFactory::GetCodecsForAnswer( - const std::vector& current_active_contents, - const SessionDescription& remote_offer, - AudioCodecs* audio_codecs, - VideoCodecs* video_codecs) const { - const webrtc::FieldTrialsView* field_trials = - &transport_desc_factory_->trials(); - // First - get all codecs from the current description if the media type - // is used. Add them to `used_pltypes` so the payload type is not reused if a - // new media type is added. - UsedPayloadTypes used_pltypes; - MergeCodecsFromDescription(current_active_contents, audio_codecs, - video_codecs, &used_pltypes, field_trials); - - // Second - filter out codecs that we don't support at all and should ignore. - AudioCodecs filtered_offered_audio_codecs; - VideoCodecs filtered_offered_video_codecs; - for (const ContentInfo& content : remote_offer.contents()) { - if (IsMediaContentOfType(&content, MEDIA_TYPE_AUDIO)) { - const AudioContentDescription* audio = - content.media_description()->as_audio(); - for (const AudioCodec& offered_audio_codec : audio->codecs()) { - if (!FindMatchingCodec(audio->codecs(), - filtered_offered_audio_codecs, - offered_audio_codec, field_trials) && - FindMatchingCodec(audio->codecs(), all_audio_codecs_, - offered_audio_codec, field_trials)) { - filtered_offered_audio_codecs.push_back(offered_audio_codec); - } - } - } else if (IsMediaContentOfType(&content, MEDIA_TYPE_VIDEO)) { - const VideoContentDescription* video = - content.media_description()->as_video(); - for (const VideoCodec& offered_video_codec : video->codecs()) { - if (!FindMatchingCodec(video->codecs(), - filtered_offered_video_codecs, - offered_video_codec, field_trials) && - FindMatchingCodec(video->codecs(), all_video_codecs_, - offered_video_codec, field_trials)) { - filtered_offered_video_codecs.push_back(offered_video_codec); - } - } - } - } - - // Add codecs that are not in the current description but were in - // `remote_offer`. - MergeCodecs(filtered_offered_audio_codecs, audio_codecs, - &used_pltypes, field_trials); - MergeCodecs(filtered_offered_video_codecs, video_codecs, - &used_pltypes, field_trials); -} - MediaSessionDescriptionFactory::AudioVideoRtpHeaderExtensions MediaSessionDescriptionFactory::GetOfferedRtpHeaderExtensionsWithIds( const std::vector& current_active_contents, @@ -2198,8 +1043,8 @@ MediaSessionDescriptionFactory::GetOfferedRtpHeaderExtensionsWithIds( UsedRtpHeaderExtensionIds used_ids( extmap_allow_mixed ? UsedRtpHeaderExtensionIds::IdDomain::kTwoByteAllowed : UsedRtpHeaderExtensionIds::IdDomain::kOneByteOnly); - RtpHeaderExtensions all_regular_extensions; - RtpHeaderExtensions all_encrypted_extensions; + + RtpHeaderExtensions all_encountered_extensions; AudioVideoRtpHeaderExtensions offered_extensions; // First - get all extensions from the current description if the media type @@ -2207,17 +1052,15 @@ MediaSessionDescriptionFactory::GetOfferedRtpHeaderExtensionsWithIds( // Add them to `used_ids` so the local ids are not reused if a new media // type is added. for (const ContentInfo* content : current_active_contents) { - if (IsMediaContentOfType(content, MEDIA_TYPE_AUDIO)) { - const AudioContentDescription* audio = - content->media_description()->as_audio(); - MergeRtpHdrExts(audio->rtp_header_extensions(), &offered_extensions.audio, - &all_regular_extensions, &all_encrypted_extensions, + if (IsMediaContentOfType(content, webrtc::MediaType::AUDIO)) { + MergeRtpHdrExts(content->media_description()->rtp_header_extensions(), + enable_encrypted_rtp_header_extensions_, + &offered_extensions.audio, &all_encountered_extensions, &used_ids); - } else if (IsMediaContentOfType(content, MEDIA_TYPE_VIDEO)) { - const VideoContentDescription* video = - content->media_description()->as_video(); - MergeRtpHdrExts(video->rtp_header_extensions(), &offered_extensions.video, - &all_regular_extensions, &all_encrypted_extensions, + } else if (IsMediaContentOfType(content, webrtc::MediaType::VIDEO)) { + MergeRtpHdrExts(content->media_description()->rtp_header_extensions(), + enable_encrypted_rtp_header_extensions_, + &offered_extensions.video, &all_encountered_extensions, &used_ids); } } @@ -2228,37 +1071,25 @@ MediaSessionDescriptionFactory::GetOfferedRtpHeaderExtensionsWithIds( for (const auto& entry : media_description_options) { RtpHeaderExtensions filtered_extensions = filtered_rtp_header_extensions(UnstoppedOrPresentRtpHeaderExtensions( - entry.header_extensions, all_regular_extensions, - all_encrypted_extensions)); - if (entry.type == MEDIA_TYPE_AUDIO) - MergeRtpHdrExts(filtered_extensions, &offered_extensions.audio, - &all_regular_extensions, &all_encrypted_extensions, - &used_ids); - else if (entry.type == MEDIA_TYPE_VIDEO) - MergeRtpHdrExts(filtered_extensions, &offered_extensions.video, - &all_regular_extensions, &all_encrypted_extensions, - &used_ids); - } - // TODO(jbauch): Support adding encrypted header extensions to existing - // sessions. - if (enable_encrypted_rtp_header_extensions_ && - current_active_contents.empty()) { - AddEncryptedVersionsOfHdrExts(&offered_extensions.audio, - &all_encrypted_extensions, &used_ids); - AddEncryptedVersionsOfHdrExts(&offered_extensions.video, - &all_encrypted_extensions, &used_ids); + entry.header_extensions, all_encountered_extensions)); + if (entry.type == webrtc::MediaType::AUDIO) + MergeRtpHdrExts( + filtered_extensions, enable_encrypted_rtp_header_extensions_, + &offered_extensions.audio, &all_encountered_extensions, &used_ids); + else if (entry.type == webrtc::MediaType::VIDEO) + MergeRtpHdrExts( + filtered_extensions, enable_encrypted_rtp_header_extensions_, + &offered_extensions.video, &all_encountered_extensions, &used_ids); } return offered_extensions; } -bool MediaSessionDescriptionFactory::AddTransportOffer( +RTCError MediaSessionDescriptionFactory::AddTransportOffer( const std::string& content_name, const TransportOptions& transport_options, const SessionDescription* current_desc, SessionDescription* offer_desc, IceCredentialsIterator* ice_credentials) const { - if (!transport_desc_factory_) - return false; const TransportDescription* current_tdesc = GetTransportDescription(content_name, current_desc); std::unique_ptr new_tdesc( @@ -2269,7 +1100,7 @@ bool MediaSessionDescriptionFactory::AddTransportOffer( << content_name; } offer_desc->AddTransportInfo(TransportInfo(content_name, *new_tdesc)); - return true; + return RTCError::OK(); } std::unique_ptr @@ -2280,8 +1111,6 @@ MediaSessionDescriptionFactory::CreateTransportAnswer( const SessionDescription* current_desc, bool require_transport_attributes, IceCredentialsIterator* ice_credentials) const { - if (!transport_desc_factory_) - return NULL; const TransportDescription* offer_tdesc = GetTransportDescription(content_name, offer_desc); const TransportDescription* current_tdesc = @@ -2291,232 +1120,89 @@ MediaSessionDescriptionFactory::CreateTransportAnswer( current_tdesc, ice_credentials); } -bool MediaSessionDescriptionFactory::AddTransportAnswer( +RTCError MediaSessionDescriptionFactory::AddTransportAnswer( const std::string& content_name, const TransportDescription& transport_desc, SessionDescription* answer_desc) const { answer_desc->AddTransportInfo(TransportInfo(content_name, transport_desc)); - return true; + return RTCError::OK(); } -// `audio_codecs` = set of all possible codecs that can be used, with correct +// Add the RTP description to the SessionDescription. +// If media_description_options.codecs_to_include is set, those codecs are used. +// +// If it is not set, the codecs used are computed based on: +// `codecs` = set of all possible codecs that can be used, with correct // payload type mappings // -// `supported_audio_codecs` = set of codecs that are supported for the direction +// `supported_codecs` = set of codecs that are supported for the direction // of this m= section +// `current_content` = current description, may be null. +// current_content->codecs() = set of previously negotiated codecs for this m= +// section // -// acd->codecs() = set of previously negotiated codecs for this m= section -// -// The payload types should come from audio_codecs, but the order should come -// from acd->codecs() and then supported_codecs, to ensure that re-offers don't -// change existing codec priority, and that new codecs are added with the right -// priority. -bool MediaSessionDescriptionFactory::AddAudioContentForOffer( +// The payload types should come from codecs, but the order should come +// from current_content->codecs() and then supported_codecs, to ensure that +// re-offers don't change existing codec priority, and that new codecs are added +// with the right priority. +RTCError MediaSessionDescriptionFactory::AddRtpContentForOffer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, const ContentInfo* current_content, const SessionDescription* current_description, - const RtpHeaderExtensions& audio_rtp_extensions, - const AudioCodecs& audio_codecs, + const RtpHeaderExtensions& header_extensions, StreamParamsVec* current_streams, - SessionDescription* desc, + SessionDescription* session_description, IceCredentialsIterator* ice_credentials) const { - const webrtc::FieldTrialsView* field_trials = - &transport_desc_factory_->trials(); - // Filter audio_codecs (which includes all codecs, with correctly remapped - // payload types) based on transceiver direction. - const AudioCodecs& supported_audio_codecs = - GetAudioCodecsForOffer(media_description_options.direction); - - AudioCodecs filtered_codecs; - - if (!media_description_options.codec_preferences.empty()) { - // Add the codecs from the current transceiver's codec preferences. - // They override any existing codecs from previous negotiations. - filtered_codecs = MatchCodecPreference( - media_description_options.codec_preferences, audio_codecs, - supported_audio_codecs, field_trials); + RTC_DCHECK(media_description_options.type == webrtc::MediaType::AUDIO || + media_description_options.type == webrtc::MediaType::VIDEO); + + std::vector codecs_to_include; + std::string mid = media_description_options.mid; + RTCErrorOr> error_or_filtered_codecs = + codec_lookup_helper_->GetCodecVendor()->GetNegotiatedCodecsForOffer( + media_description_options, session_options, current_content, + *codec_lookup_helper_->PayloadTypeSuggester()); + if (!error_or_filtered_codecs.ok()) { + return error_or_filtered_codecs.MoveError(); + } + codecs_to_include = error_or_filtered_codecs.MoveValue(); + std::unique_ptr content_description; + if (media_description_options.type == webrtc::MediaType::AUDIO) { + content_description = std::make_unique(); } else { - // Add the codecs from current content if it exists and is not rejected nor - // recycled. - if (current_content && !current_content->rejected && - current_content->name == media_description_options.mid) { - RTC_CHECK(IsMediaContentOfType(current_content, MEDIA_TYPE_AUDIO)); - const AudioContentDescription* acd = - current_content->media_description()->as_audio(); - for (const AudioCodec& codec : acd->codecs()) { - if (FindMatchingCodec(acd->codecs(), audio_codecs, codec, - field_trials)) { - filtered_codecs.push_back(codec); - } - } - } - // Add other supported audio codecs. - - for (const AudioCodec& codec : supported_audio_codecs) { - absl::optional found_codec = FindMatchingCodec( - supported_audio_codecs, audio_codecs, codec, field_trials); - if (found_codec && - !FindMatchingCodec( - supported_audio_codecs, filtered_codecs, codec, field_trials)) { - // Use the `found_codec` from `audio_codecs` because it has the - // correctly mapped payload type. - filtered_codecs.push_back(*found_codec); - } - } - } - if (!session_options.vad_enabled) { - // If application doesn't want CN codecs in offer. - StripCNCodecs(&filtered_codecs); - } - - cricket::SecurePolicy sdes_policy = - IsDtlsActive(current_content, current_description) ? cricket::SEC_DISABLED - : secure(); - - auto audio = std::make_unique(); - std::vector crypto_suites; - GetSupportedAudioSdesCryptoSuiteNames(session_options.crypto_options, - &crypto_suites); - if (!CreateMediaContentOffer( - media_description_options, session_options, filtered_codecs, - sdes_policy, GetCryptos(current_content), crypto_suites, - audio_rtp_extensions, ssrc_generator(), current_streams, audio.get(), - transport_desc_factory_->trials())) { - return false; - } - - bool secure_transport = (transport_desc_factory_->secure() != SEC_DISABLED); - SetMediaProtocol(secure_transport, audio.get()); - - audio->set_direction(media_description_options.direction); - - desc->AddContent(media_description_options.mid, MediaProtocolType::kRtp, - media_description_options.stopped, std::move(audio)); - if (!AddTransportOffer(media_description_options.mid, - media_description_options.transport_options, - current_description, desc, ice_credentials)) { - return false; - } - - return true; -} - -// TODO(kron): This function is very similar to AddAudioContentForOffer. -// Refactor to reuse shared code. -bool MediaSessionDescriptionFactory::AddVideoContentForOffer( - const MediaDescriptionOptions& media_description_options, - const MediaSessionOptions& session_options, - const ContentInfo* current_content, - const SessionDescription* current_description, - const RtpHeaderExtensions& video_rtp_extensions, - const VideoCodecs& video_codecs, - StreamParamsVec* current_streams, - SessionDescription* desc, - IceCredentialsIterator* ice_credentials) const { - const webrtc::FieldTrialsView* field_trials = - &transport_desc_factory_->trials(); - // Filter video_codecs (which includes all codecs, with correctly remapped - // payload types) based on transceiver direction. - const VideoCodecs& supported_video_codecs = - GetVideoCodecsForOffer(media_description_options.direction); - - VideoCodecs filtered_codecs; - - if (!media_description_options.codec_preferences.empty()) { - // Add the codecs from the current transceiver's codec preferences. - // They override any existing codecs from previous negotiations. - filtered_codecs = MatchCodecPreference( - media_description_options.codec_preferences, video_codecs, - supported_video_codecs, field_trials); - } else { - // Add the codecs from current content if it exists and is not rejected nor - // recycled. - if (current_content && !current_content->rejected && - current_content->name == media_description_options.mid) { - RTC_CHECK(IsMediaContentOfType(current_content, MEDIA_TYPE_VIDEO)); - const VideoContentDescription* vcd = - current_content->media_description()->as_video(); - for (const VideoCodec& codec : vcd->codecs()) { - if (FindMatchingCodec(vcd->codecs(), video_codecs, codec, - field_trials)) { - filtered_codecs.push_back(codec); - } - } - } - // Add other supported video codecs. - for (const VideoCodec& codec : supported_video_codecs) { - absl::optional found_codec = FindMatchingCodec( - supported_video_codecs, video_codecs, codec, field_trials); - if (found_codec && - !FindMatchingCodec( - supported_video_codecs, filtered_codecs, codec, field_trials)) { - // Use the `found_codec` from `video_codecs` because it has the - // correctly mapped payload type. - if (IsRtxCodec(codec)) { - // For RTX we might need to adjust the apt parameter if we got a - // remote offer without RTX for a codec for which we support RTX. - auto referenced_codec = - GetAssociatedCodecForRtx(supported_video_codecs, codec); - RTC_DCHECK(referenced_codec); - - // Find the codec we should be referencing and point to it. - absl::optional changed_referenced_codec = - FindMatchingCodec(supported_video_codecs, - filtered_codecs, *referenced_codec, - field_trials); - if (changed_referenced_codec) { - found_codec->SetParam(kCodecParamAssociatedPayloadType, - changed_referenced_codec->id); - } - } - filtered_codecs.push_back(*found_codec); - } - } - } - - if (session_options.raw_packetization_for_video) { - for (VideoCodec& codec : filtered_codecs) { - if (codec.IsMediaCodec()) { - codec.packetization = kPacketizationParamRaw; - } - } - } - - cricket::SecurePolicy sdes_policy = - IsDtlsActive(current_content, current_description) ? cricket::SEC_DISABLED - : secure(); - auto video = std::make_unique(); - std::vector crypto_suites; - GetSupportedVideoSdesCryptoSuiteNames(session_options.crypto_options, - &crypto_suites); - if (!CreateMediaContentOffer( - media_description_options, session_options, filtered_codecs, - sdes_policy, GetCryptos(current_content), crypto_suites, - video_rtp_extensions, ssrc_generator(), current_streams, video.get(), - transport_desc_factory_->trials())) { - return false; - } - - video->set_bandwidth(kAutoBandwidth); - - bool secure_transport = (transport_desc_factory_->secure() != SEC_DISABLED); - SetMediaProtocol(secure_transport, video.get()); - - video->set_direction(media_description_options.direction); - - desc->AddContent(media_description_options.mid, MediaProtocolType::kRtp, - media_description_options.stopped, std::move(video)); - if (!AddTransportOffer(media_description_options.mid, - media_description_options.transport_options, - current_description, desc, ice_credentials)) { - return false; - } - - return true; -} - -bool MediaSessionDescriptionFactory::AddDataContentForOffer( + content_description = std::make_unique(); + } + // RFC 8888 support. + content_description->set_rtcp_fb_ack_ccfb( + transport_desc_factory_->trials().IsEnabled( + "WebRTC-RFC8888CongestionControlFeedback")); + auto error = CreateMediaContentOffer( + media_description_options, session_options, codecs_to_include, + header_extensions, ssrc_generator(), current_streams, + content_description.get(), transport_desc_factory_->trials()); + if (!error.ok()) { + return error; + } + + // Insecure transport should only occur in testing. + bool secure_transport = !(transport_desc_factory_->insecure()); + SetMediaProtocol(secure_transport, content_description.get()); + + content_description->set_direction(media_description_options.direction); + bool has_codecs = !content_description->codecs().empty(); + + session_description->AddContent( + media_description_options.mid, MediaProtocolType::kRtp, + media_description_options.stopped || !has_codecs, + std::move(content_description)); + return AddTransportOffer(media_description_options.mid, + media_description_options.transport_options, + current_description, session_description, + ice_credentials); +} + +RTCError MediaSessionDescriptionFactory::AddDataContentForOffer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, const ContentInfo* current_content, @@ -2526,15 +1212,9 @@ bool MediaSessionDescriptionFactory::AddDataContentForOffer( IceCredentialsIterator* ice_credentials) const { auto data = std::make_unique(); - bool secure_transport = (transport_desc_factory_->secure() != SEC_DISABLED); + bool secure_transport = true; - cricket::SecurePolicy sdes_policy = - IsDtlsActive(current_content, current_description) ? cricket::SEC_DISABLED - : secure(); std::vector crypto_suites; - // SDES doesn't make sense for SCTP, so we disable it, and we only - // get SDES crypto suites for RTP-based data channels. - sdes_policy = cricket::SEC_DISABLED; // Unlike SetMediaProtocol below, we need to set the protocol // before we call CreateMediaContentOffer. Otherwise, // CreateMediaContentOffer won't know this is SCTP and will @@ -2542,33 +1222,31 @@ bool MediaSessionDescriptionFactory::AddDataContentForOffer( data->set_protocol(secure_transport ? kMediaProtocolUdpDtlsSctp : kMediaProtocolSctp); data->set_use_sctpmap(session_options.use_obsolete_sctp_sdp); - data->set_max_message_size(kSctpSendBufferSize); + data->set_max_message_size(webrtc::kSctpSendBufferSize); - if (!CreateContentOffer(media_description_options, session_options, - sdes_policy, GetCryptos(current_content), - crypto_suites, RtpHeaderExtensions(), - ssrc_generator(), current_streams, data.get())) { - return false; + auto error = CreateContentOffer(media_description_options, session_options, + RtpHeaderExtensions(), ssrc_generator(), + current_streams, data.get()); + if (!error.ok()) { + return error; } desc->AddContent(media_description_options.mid, MediaProtocolType::kSctp, media_description_options.stopped, std::move(data)); - if (!AddTransportOffer(media_description_options.mid, - media_description_options.transport_options, - current_description, desc, ice_credentials)) { - return false; - } - return true; + return AddTransportOffer(media_description_options.mid, + media_description_options.transport_options, + current_description, desc, ice_credentials); } -bool MediaSessionDescriptionFactory::AddUnsupportedContentForOffer( +RTCError MediaSessionDescriptionFactory::AddUnsupportedContentForOffer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, const ContentInfo* current_content, const SessionDescription* current_description, SessionDescription* desc, IceCredentialsIterator* ice_credentials) const { - RTC_CHECK(IsMediaContentOfType(current_content, MEDIA_TYPE_UNSUPPORTED)); + RTC_CHECK( + IsMediaContentOfType(current_content, webrtc::MediaType::UNSUPPORTED)); const UnsupportedContentDescription* current_unsupported_description = current_content->media_description()->as_unsupported(); @@ -2578,27 +1256,24 @@ bool MediaSessionDescriptionFactory::AddUnsupportedContentForOffer( desc->AddContent(media_description_options.mid, MediaProtocolType::kOther, /*rejected=*/true, std::move(unsupported)); - if (!AddTransportOffer(media_description_options.mid, - media_description_options.transport_options, - current_description, desc, ice_credentials)) { - return false; - } - return true; + return AddTransportOffer(media_description_options.mid, + media_description_options.transport_options, + current_description, desc, ice_credentials); } -// `audio_codecs` = set of all possible codecs that can be used, with correct +// `codecs` = set of all possible codecs that can be used, with correct // payload type mappings // -// `supported_audio_codecs` = set of codecs that are supported for the direction +// `supported_codecs` = set of codecs that are supported for the direction // of this m= section // -// acd->codecs() = set of previously negotiated codecs for this m= section +// mcd->codecs() = set of previously negotiated codecs for this m= section // -// The payload types should come from audio_codecs, but the order should come -// from acd->codecs() and then supported_codecs, to ensure that re-offers don't +// The payload types should come from codecs, but the order should come +// from mcd->codecs() and then supported_codecs, to ensure that re-offers don't // change existing codec priority, and that new codecs are added with the right // priority. -bool MediaSessionDescriptionFactory::AddAudioContentForAnswer( +RTCError MediaSessionDescriptionFactory::AddRtpContentForAnswer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, const ContentInfo* offer_content, @@ -2606,257 +1281,116 @@ bool MediaSessionDescriptionFactory::AddAudioContentForAnswer( const ContentInfo* current_content, const SessionDescription* current_description, const TransportInfo* bundle_transport, - const AudioCodecs& audio_codecs, - const RtpHeaderExtensions& rtp_header_extensions, + const RtpHeaderExtensions& header_extensions, StreamParamsVec* current_streams, SessionDescription* answer, IceCredentialsIterator* ice_credentials) const { - const webrtc::FieldTrialsView* field_trials = - &transport_desc_factory_->trials(); - RTC_CHECK(IsMediaContentOfType(offer_content, MEDIA_TYPE_AUDIO)); - const AudioContentDescription* offer_audio_description = - offer_content->media_description()->as_audio(); - - std::unique_ptr audio_transport = CreateTransportAnswer( + RTC_DCHECK(media_description_options.type == webrtc::MediaType::AUDIO || + media_description_options.type == webrtc::MediaType::VIDEO); + RTC_CHECK( + IsMediaContentOfType(offer_content, media_description_options.type)); + const RtpMediaContentDescription* offer_content_description; + if (media_description_options.type == webrtc::MediaType::AUDIO) { + offer_content_description = offer_content->media_description()->as_audio(); + } else { + offer_content_description = offer_content->media_description()->as_video(); + } + // If this section is part of a bundle, bundle_transport is non-null. + // Then require_transport_attributes is false - we can handle sections + // without the DTLS parameters. For rejected m-lines it does not matter. + // Otherwise, transport attributes MUST be present. + std::unique_ptr transport = CreateTransportAnswer( media_description_options.mid, offer_description, media_description_options.transport_options, current_description, - bundle_transport != nullptr, ice_credentials); - if (!audio_transport) { - return false; + !offer_content->rejected && bundle_transport == nullptr, ice_credentials); + if (!transport) { + LOG_AND_RETURN_ERROR( + RTCErrorType::INTERNAL_ERROR, + "Failed to create transport answer, transport is missing"); } // Pick codecs based on the requested communications direction in the offer // and the selected direction in the answer. // Note these will be filtered one final time in CreateMediaContentAnswer. auto wants_rtd = media_description_options.direction; - auto offer_rtd = offer_audio_description->direction(); + auto offer_rtd = offer_content_description->direction(); auto answer_rtd = NegotiateRtpTransceiverDirection(offer_rtd, wants_rtd); - AudioCodecs supported_audio_codecs = - GetAudioCodecsForAnswer(offer_rtd, answer_rtd); - AudioCodecs filtered_codecs; - - if (!media_description_options.codec_preferences.empty()) { - filtered_codecs = MatchCodecPreference( - media_description_options.codec_preferences, audio_codecs, - supported_audio_codecs, field_trials); - } else { - // Add the codecs from current content if it exists and is not rejected nor - // recycled. - if (current_content && !current_content->rejected && - current_content->name == media_description_options.mid) { - RTC_CHECK(IsMediaContentOfType(current_content, MEDIA_TYPE_AUDIO)); - const AudioContentDescription* acd = - current_content->media_description()->as_audio(); - for (const AudioCodec& codec : acd->codecs()) { - if (FindMatchingCodec(acd->codecs(), audio_codecs, codec, - field_trials)) { - filtered_codecs.push_back(codec); - } - } - } - // Add other supported audio codecs. - for (const AudioCodec& codec : supported_audio_codecs) { - if (FindMatchingCodec(supported_audio_codecs, audio_codecs, - codec, field_trials) && - !FindMatchingCodec( - supported_audio_codecs, filtered_codecs, codec, field_trials)) { - // We should use the local codec with local parameters and the codec id - // would be correctly mapped in `NegotiateCodecs`. - filtered_codecs.push_back(codec); - } - } - } - if (!session_options.vad_enabled) { - // If application doesn't want CN codecs in answer. - StripCNCodecs(&filtered_codecs); + std::vector codecs_to_include; + RTCErrorOr> error_or_filtered_codecs = + codec_lookup_helper_->GetCodecVendor()->GetNegotiatedCodecsForAnswer( + media_description_options, session_options, offer_rtd, answer_rtd, + current_content, offer_content_description->codecs(), + *codec_lookup_helper_->PayloadTypeSuggester()); + if (!error_or_filtered_codecs.ok()) { + return error_or_filtered_codecs.MoveError(); } - + codecs_to_include = error_or_filtered_codecs.MoveValue(); // Determine if we have media codecs in common. - bool has_common_media_codecs = - std::find_if(filtered_codecs.begin(), filtered_codecs.end(), - [](const AudioCodec& c) { - return !(IsRedCodec(c) || IsComfortNoiseCodec(c)); - }) != filtered_codecs.end(); + bool has_usable_media_codecs = + std::find_if(codecs_to_include.begin(), codecs_to_include.end(), + [](const Codec& c) { + return c.IsMediaCodec() && !IsComfortNoiseCodec(c); + }) != codecs_to_include.end(); bool bundle_enabled = offer_description->HasGroup(GROUP_TYPE_BUNDLE) && session_options.bundle_enabled; - auto audio_answer = std::make_unique(); - // Do not require or create SDES cryptos if DTLS is used. - cricket::SecurePolicy sdes_policy = - audio_transport->secure() ? cricket::SEC_DISABLED : secure(); - if (!SetCodecsInAnswer(offer_audio_description, filtered_codecs, - media_description_options, session_options, - ssrc_generator(), current_streams, audio_answer.get(), - transport_desc_factory_->trials())) { - return false; - } - if (!CreateMediaContentAnswer( - offer_audio_description, media_description_options, session_options, - sdes_policy, GetCryptos(current_content), - filtered_rtp_header_extensions(rtp_header_extensions), - ssrc_generator(), enable_encrypted_rtp_header_extensions_, - current_streams, bundle_enabled, audio_answer.get())) { - return false; // Fails the session setup. - } - - bool secure = bundle_transport ? bundle_transport->description.secure() - : audio_transport->secure(); - bool rejected = media_description_options.stopped || - offer_content->rejected || !has_common_media_codecs || - !IsMediaProtocolSupported(MEDIA_TYPE_AUDIO, - audio_answer->protocol(), secure); - if (!AddTransportAnswer(media_description_options.mid, - *(audio_transport.get()), answer)) { - return false; - } - - if (rejected) { - RTC_LOG(LS_INFO) << "Audio m= section '" << media_description_options.mid - << "' being rejected in answer."; - } - - answer->AddContent(media_description_options.mid, offer_content->type, - rejected, std::move(audio_answer)); - return true; -} - -// TODO(kron): This function is very similar to AddAudioContentForAnswer. -// Refactor to reuse shared code. -bool MediaSessionDescriptionFactory::AddVideoContentForAnswer( - const MediaDescriptionOptions& media_description_options, - const MediaSessionOptions& session_options, - const ContentInfo* offer_content, - const SessionDescription* offer_description, - const ContentInfo* current_content, - const SessionDescription* current_description, - const TransportInfo* bundle_transport, - const VideoCodecs& video_codecs, - const RtpHeaderExtensions& default_video_rtp_header_extensions, - StreamParamsVec* current_streams, - SessionDescription* answer, - IceCredentialsIterator* ice_credentials) const { - const webrtc::FieldTrialsView* field_trials = - &transport_desc_factory_->trials(); - RTC_CHECK(IsMediaContentOfType(offer_content, MEDIA_TYPE_VIDEO)); - const VideoContentDescription* offer_video_description = - offer_content->media_description()->as_video(); - - std::unique_ptr video_transport = CreateTransportAnswer( - media_description_options.mid, offer_description, - media_description_options.transport_options, current_description, - bundle_transport != nullptr, ice_credentials); - if (!video_transport) { - return false; - } - - // Pick codecs based on the requested communications direction in the offer - // and the selected direction in the answer. - // Note these will be filtered one final time in CreateMediaContentAnswer. - auto wants_rtd = media_description_options.direction; - auto offer_rtd = offer_video_description->direction(); - auto answer_rtd = NegotiateRtpTransceiverDirection(offer_rtd, wants_rtd); - VideoCodecs supported_video_codecs = - GetVideoCodecsForAnswer(offer_rtd, answer_rtd); - - VideoCodecs filtered_codecs; - - if (!media_description_options.codec_preferences.empty()) { - filtered_codecs = MatchCodecPreference( - media_description_options.codec_preferences, video_codecs, - supported_video_codecs, field_trials); + std::unique_ptr answer_content; + if (media_description_options.type == webrtc::MediaType::AUDIO) { + answer_content = std::make_unique(); } else { - // Add the codecs from current content if it exists and is not rejected nor - // recycled. - if (current_content && !current_content->rejected && - current_content->name == media_description_options.mid) { - RTC_CHECK(IsMediaContentOfType(current_content, MEDIA_TYPE_VIDEO)); - const VideoContentDescription* vcd = - current_content->media_description()->as_video(); - for (const VideoCodec& codec : vcd->codecs()) { - if (FindMatchingCodec(vcd->codecs(), video_codecs, codec, - field_trials)) { - filtered_codecs.push_back(codec); - } - } - } - - // Add other supported video codecs. - VideoCodecs other_video_codecs; - for (const VideoCodec& codec : supported_video_codecs) { - if (FindMatchingCodec(supported_video_codecs, video_codecs, - codec, field_trials) && - !FindMatchingCodec( - supported_video_codecs, filtered_codecs, codec, field_trials)) { - // We should use the local codec with local parameters and the codec id - // would be correctly mapped in `NegotiateCodecs`. - other_video_codecs.push_back(codec); - } - } - - // Use ComputeCodecsUnion to avoid having duplicate payload IDs - filtered_codecs = ComputeCodecsUnion( - filtered_codecs, other_video_codecs, field_trials); + answer_content = std::make_unique(); } - // Determine if we have media codecs in common. - bool has_common_media_codecs = - std::find_if( - filtered_codecs.begin(), filtered_codecs.end(), - [](const VideoCodec& c) { - return !(IsRedCodec(c) || IsUlpfecCodec(c) || IsFlexfecCodec(c)); - }) != filtered_codecs.end(); - - if (session_options.raw_packetization_for_video) { - for (VideoCodec& codec : filtered_codecs) { - if (codec.IsMediaCodec()) { - codec.packetization = kPacketizationParamRaw; - } + // RFC 8888 support. Only answer with "ack ccfb" if offer has it and + // experiment is enabled. + if (offer_content_description->rtcp_fb_ack_ccfb()) { + answer_content->set_rtcp_fb_ack_ccfb( + transport_desc_factory_->trials().IsEnabled( + "WebRTC-RFC8888CongestionControlFeedback")); + for (auto& codec : codecs_to_include) { + codec.feedback_params.Remove(FeedbackParam(kRtcpFbParamTransportCc)); } } - - bool bundle_enabled = offer_description->HasGroup(GROUP_TYPE_BUNDLE) && - session_options.bundle_enabled; - auto video_answer = std::make_unique(); - // Do not require or create SDES cryptos if DTLS is used. - cricket::SecurePolicy sdes_policy = - video_transport->secure() ? cricket::SEC_DISABLED : secure(); - if (!SetCodecsInAnswer(offer_video_description, filtered_codecs, + if (!SetCodecsInAnswer(offer_content_description, codecs_to_include, media_description_options, session_options, - ssrc_generator(), current_streams, video_answer.get(), + ssrc_generator(), current_streams, + answer_content.get(), transport_desc_factory_->trials())) { - return false; + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to set codecs in answer"); } if (!CreateMediaContentAnswer( - offer_video_description, media_description_options, session_options, - sdes_policy, GetCryptos(current_content), - filtered_rtp_header_extensions(default_video_rtp_header_extensions), - ssrc_generator(), enable_encrypted_rtp_header_extensions_, - current_streams, bundle_enabled, video_answer.get())) { - return false; // Failed the session setup. + offer_content_description, media_description_options, session_options, + filtered_rtp_header_extensions(header_extensions), ssrc_generator(), + enable_encrypted_rtp_header_extensions_, current_streams, + bundle_enabled, answer_content.get())) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create answer"); } + bool secure = bundle_transport ? bundle_transport->description.secure() - : video_transport->secure(); + : transport->secure(); bool rejected = media_description_options.stopped || - offer_content->rejected || !has_common_media_codecs || - !IsMediaProtocolSupported(MEDIA_TYPE_VIDEO, - video_answer->protocol(), secure); - if (!AddTransportAnswer(media_description_options.mid, - *(video_transport.get()), answer)) { - return false; + offer_content->rejected || !has_usable_media_codecs || + !IsMediaProtocolSupported(webrtc::MediaType::AUDIO, + answer_content->protocol(), secure); + if (rejected) { + RTC_LOG(LS_INFO) << "m= section '" << media_description_options.mid + << "' being rejected in answer."; } - if (!rejected) { - video_answer->set_bandwidth(kAutoBandwidth); - } else { - RTC_LOG(LS_INFO) << "Video m= section '" << media_description_options.mid - << "' being rejected in answer."; + auto error = + AddTransportAnswer(media_description_options.mid, *transport, answer); + if (!error.ok()) { + return error; } + answer->AddContent(media_description_options.mid, offer_content->type, - rejected, std::move(video_answer)); - return true; + rejected, std::move(answer_content)); + return RTCError::OK(); } -bool MediaSessionDescriptionFactory::AddDataContentForAnswer( +RTCError MediaSessionDescriptionFactory::AddDataContentForAnswer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, const ContentInfo* offer_content, @@ -2870,17 +1404,16 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer( std::unique_ptr data_transport = CreateTransportAnswer( media_description_options.mid, offer_description, media_description_options.transport_options, current_description, - bundle_transport != nullptr, ice_credentials); + !offer_content->rejected && bundle_transport == nullptr, ice_credentials); if (!data_transport) { - return false; + LOG_AND_RETURN_ERROR( + RTCErrorType::INTERNAL_ERROR, + "Failed to create transport answer, data transport is missing"); } - // Do not require or create SDES cryptos if DTLS is used. - cricket::SecurePolicy sdes_policy = - data_transport->secure() ? cricket::SEC_DISABLED : secure(); bool bundle_enabled = offer_description->HasGroup(GROUP_TYPE_BUNDLE) && session_options.bundle_enabled; - RTC_CHECK(IsMediaContentOfType(offer_content, MEDIA_TYPE_DATA)); + RTC_CHECK(IsMediaContentOfType(offer_content, webrtc::MediaType::DATA)); std::unique_ptr data_answer; if (offer_content->media_description()->as_sctp()) { // SCTP data content @@ -2894,18 +1427,20 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer( // 0 is treated specially - it means "I can accept any size". Since // we do not implement infinite size messages, reply with // kSctpSendBufferSize. - if (offer_data_description->max_message_size() == 0) { - data_answer->as_sctp()->set_max_message_size(kSctpSendBufferSize); + if (offer_data_description->max_message_size() <= 0) { + data_answer->as_sctp()->set_max_message_size(webrtc::kSctpSendBufferSize); } else { - data_answer->as_sctp()->set_max_message_size(std::min( - offer_data_description->max_message_size(), kSctpSendBufferSize)); + data_answer->as_sctp()->set_max_message_size( + std::min(offer_data_description->max_message_size(), + webrtc::kSctpSendBufferSize)); } if (!CreateMediaContentAnswer( offer_data_description, media_description_options, session_options, - sdes_policy, GetCryptos(current_content), RtpHeaderExtensions(), - ssrc_generator(), enable_encrypted_rtp_header_extensions_, - current_streams, bundle_enabled, data_answer.get())) { - return false; // Fails the session setup. + RtpHeaderExtensions(), ssrc_generator(), + enable_encrypted_rtp_header_extensions_, current_streams, + bundle_enabled, data_answer.get())) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create answer"); } // Respond with sctpmap if the offer uses sctpmap. bool offer_uses_sctpmap = offer_data_description->use_sctpmap(); @@ -2919,19 +1454,19 @@ bool MediaSessionDescriptionFactory::AddDataContentForAnswer( bool rejected = media_description_options.stopped || offer_content->rejected || - !IsMediaProtocolSupported(MEDIA_TYPE_DATA, + !IsMediaProtocolSupported(webrtc::MediaType::DATA, data_answer->protocol(), secure); - if (!AddTransportAnswer(media_description_options.mid, - *(data_transport.get()), answer)) { - return false; + auto error = AddTransportAnswer(media_description_options.mid, + *data_transport, answer); + if (!error.ok()) { + return error; } - answer->AddContent(media_description_options.mid, offer_content->type, rejected, std::move(data_answer)); - return true; + return RTCError::OK(); } -bool MediaSessionDescriptionFactory::AddUnsupportedContentForAnswer( +RTCError MediaSessionDescriptionFactory::AddUnsupportedContentForAnswer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, const ContentInfo* offer_content, @@ -2942,14 +1477,18 @@ bool MediaSessionDescriptionFactory::AddUnsupportedContentForAnswer( SessionDescription* answer, IceCredentialsIterator* ice_credentials) const { std::unique_ptr unsupported_transport = - CreateTransportAnswer(media_description_options.mid, offer_description, - media_description_options.transport_options, - current_description, bundle_transport != nullptr, - ice_credentials); + CreateTransportAnswer( + media_description_options.mid, offer_description, + media_description_options.transport_options, current_description, + !offer_content->rejected && bundle_transport == nullptr, + ice_credentials); if (!unsupported_transport) { - return false; + LOG_AND_RETURN_ERROR( + RTCErrorType::INTERNAL_ERROR, + "Failed to create transport answer, unsupported transport is missing"); } - RTC_CHECK(IsMediaContentOfType(offer_content, MEDIA_TYPE_UNSUPPORTED)); + RTC_CHECK( + IsMediaContentOfType(offer_content, webrtc::MediaType::UNSUPPORTED)); const UnsupportedContentDescription* offer_unsupported_description = offer_content->media_description()->as_unsupported(); @@ -2958,61 +1497,15 @@ bool MediaSessionDescriptionFactory::AddUnsupportedContentForAnswer( offer_unsupported_description->media_type()); unsupported_answer->set_protocol(offer_unsupported_description->protocol()); - if (!AddTransportAnswer(media_description_options.mid, - *(unsupported_transport.get()), answer)) { - return false; + auto error = AddTransportAnswer(media_description_options.mid, + *unsupported_transport, answer); + if (!error.ok()) { + return error; } + answer->AddContent(media_description_options.mid, offer_content->type, /*rejected=*/true, std::move(unsupported_answer)); - return true; -} - -void MediaSessionDescriptionFactory::ComputeAudioCodecsIntersectionAndUnion() { - const webrtc::FieldTrialsView* field_trials = - &transport_desc_factory_->trials(); - audio_sendrecv_codecs_.clear(); - all_audio_codecs_.clear(); - // Compute the audio codecs union. - for (const AudioCodec& send : audio_send_codecs_) { - all_audio_codecs_.push_back(send); - if (!FindMatchingCodec(audio_send_codecs_, audio_recv_codecs_, - send, field_trials)) { - // It doesn't make sense to have an RTX codec we support sending but not - // receiving. - RTC_DCHECK(!IsRtxCodec(send)); - } - } - for (const AudioCodec& recv : audio_recv_codecs_) { - if (!FindMatchingCodec(audio_recv_codecs_, audio_send_codecs_, - recv, field_trials)) { - all_audio_codecs_.push_back(recv); - } - } - // Use NegotiateCodecs to merge our codec lists, since the operation is - // essentially the same. Put send_codecs as the offered_codecs, which is the - // order we'd like to follow. The reasoning is that encoding is usually more - // expensive than decoding, and prioritizing a codec in the send list probably - // means it's a codec we can handle efficiently. - NegotiateCodecs(audio_recv_codecs_, audio_send_codecs_, - &audio_sendrecv_codecs_, true, field_trials); -} - -void MediaSessionDescriptionFactory::ComputeVideoCodecsIntersectionAndUnion() { - const webrtc::FieldTrialsView* field_trials = - &transport_desc_factory_->trials(); - video_sendrecv_codecs_.clear(); - - // Use ComputeCodecsUnion to avoid having duplicate payload IDs - all_video_codecs_ = - ComputeCodecsUnion(video_recv_codecs_, video_send_codecs_, field_trials); - - // Use NegotiateCodecs to merge our codec lists, since the operation is - // essentially the same. Put send_codecs as the offered_codecs, which is the - // order we'd like to follow. The reasoning is that encoding is usually more - // expensive than decoding, and prioritizing a codec in the send list probably - // means it's a codec we can handle efficiently. - NegotiateCodecs(video_recv_codecs_, video_send_codecs_, - &video_sendrecv_codecs_, true, field_trials); + return RTCError::OK(); } bool IsMediaContent(const ContentInfo* content) { @@ -3021,23 +1514,23 @@ bool IsMediaContent(const ContentInfo* content) { } bool IsAudioContent(const ContentInfo* content) { - return IsMediaContentOfType(content, MEDIA_TYPE_AUDIO); + return IsMediaContentOfType(content, webrtc::MediaType::AUDIO); } bool IsVideoContent(const ContentInfo* content) { - return IsMediaContentOfType(content, MEDIA_TYPE_VIDEO); + return IsMediaContentOfType(content, webrtc::MediaType::VIDEO); } bool IsDataContent(const ContentInfo* content) { - return IsMediaContentOfType(content, MEDIA_TYPE_DATA); + return IsMediaContentOfType(content, webrtc::MediaType::DATA); } bool IsUnsupportedContent(const ContentInfo* content) { - return IsMediaContentOfType(content, MEDIA_TYPE_UNSUPPORTED); + return IsMediaContentOfType(content, webrtc::MediaType::UNSUPPORTED); } const ContentInfo* GetFirstMediaContent(const ContentInfos& contents, - MediaType media_type) { + webrtc::MediaType media_type) { for (const ContentInfo& content : contents) { if (IsMediaContentOfType(&content, media_type)) { return &content; @@ -3047,19 +1540,19 @@ const ContentInfo* GetFirstMediaContent(const ContentInfos& contents, } const ContentInfo* GetFirstAudioContent(const ContentInfos& contents) { - return GetFirstMediaContent(contents, MEDIA_TYPE_AUDIO); + return GetFirstMediaContent(contents, webrtc::MediaType::AUDIO); } const ContentInfo* GetFirstVideoContent(const ContentInfos& contents) { - return GetFirstMediaContent(contents, MEDIA_TYPE_VIDEO); + return GetFirstMediaContent(contents, webrtc::MediaType::VIDEO); } const ContentInfo* GetFirstDataContent(const ContentInfos& contents) { - return GetFirstMediaContent(contents, MEDIA_TYPE_DATA); + return GetFirstMediaContent(contents, webrtc::MediaType::DATA); } const ContentInfo* GetFirstMediaContent(const SessionDescription* sdesc, - MediaType media_type) { + webrtc::MediaType media_type) { if (sdesc == nullptr) { return nullptr; } @@ -3068,39 +1561,39 @@ const ContentInfo* GetFirstMediaContent(const SessionDescription* sdesc, } const ContentInfo* GetFirstAudioContent(const SessionDescription* sdesc) { - return GetFirstMediaContent(sdesc, MEDIA_TYPE_AUDIO); + return GetFirstMediaContent(sdesc, webrtc::MediaType::AUDIO); } const ContentInfo* GetFirstVideoContent(const SessionDescription* sdesc) { - return GetFirstMediaContent(sdesc, MEDIA_TYPE_VIDEO); + return GetFirstMediaContent(sdesc, webrtc::MediaType::VIDEO); } const ContentInfo* GetFirstDataContent(const SessionDescription* sdesc) { - return GetFirstMediaContent(sdesc, MEDIA_TYPE_DATA); + return GetFirstMediaContent(sdesc, webrtc::MediaType::DATA); } const MediaContentDescription* GetFirstMediaContentDescription( const SessionDescription* sdesc, - MediaType media_type) { + webrtc::MediaType media_type) { const ContentInfo* content = GetFirstMediaContent(sdesc, media_type); return (content ? content->media_description() : nullptr); } const AudioContentDescription* GetFirstAudioContentDescription( const SessionDescription* sdesc) { - auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_AUDIO); + auto desc = GetFirstMediaContentDescription(sdesc, webrtc::MediaType::AUDIO); return desc ? desc->as_audio() : nullptr; } const VideoContentDescription* GetFirstVideoContentDescription( const SessionDescription* sdesc) { - auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_VIDEO); + auto desc = GetFirstMediaContentDescription(sdesc, webrtc::MediaType::VIDEO); return desc ? desc->as_video() : nullptr; } const SctpDataContentDescription* GetFirstSctpDataContentDescription( const SessionDescription* sdesc) { - auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_DATA); + auto desc = GetFirstMediaContentDescription(sdesc, webrtc::MediaType::DATA); return desc ? desc->as_sctp() : nullptr; } @@ -3109,7 +1602,7 @@ const SctpDataContentDescription* GetFirstSctpDataContentDescription( // ContentInfo* GetFirstMediaContent(ContentInfos* contents, - MediaType media_type) { + webrtc::MediaType media_type) { for (ContentInfo& content : *contents) { if (IsMediaContentOfType(&content, media_type)) { return &content; @@ -3119,19 +1612,19 @@ ContentInfo* GetFirstMediaContent(ContentInfos* contents, } ContentInfo* GetFirstAudioContent(ContentInfos* contents) { - return GetFirstMediaContent(contents, MEDIA_TYPE_AUDIO); + return GetFirstMediaContent(contents, webrtc::MediaType::AUDIO); } ContentInfo* GetFirstVideoContent(ContentInfos* contents) { - return GetFirstMediaContent(contents, MEDIA_TYPE_VIDEO); + return GetFirstMediaContent(contents, webrtc::MediaType::VIDEO); } ContentInfo* GetFirstDataContent(ContentInfos* contents) { - return GetFirstMediaContent(contents, MEDIA_TYPE_DATA); + return GetFirstMediaContent(contents, webrtc::MediaType::DATA); } ContentInfo* GetFirstMediaContent(SessionDescription* sdesc, - MediaType media_type) { + webrtc::MediaType media_type) { if (sdesc == nullptr) { return nullptr; } @@ -3140,40 +1633,40 @@ ContentInfo* GetFirstMediaContent(SessionDescription* sdesc, } ContentInfo* GetFirstAudioContent(SessionDescription* sdesc) { - return GetFirstMediaContent(sdesc, MEDIA_TYPE_AUDIO); + return GetFirstMediaContent(sdesc, webrtc::MediaType::AUDIO); } ContentInfo* GetFirstVideoContent(SessionDescription* sdesc) { - return GetFirstMediaContent(sdesc, MEDIA_TYPE_VIDEO); + return GetFirstMediaContent(sdesc, webrtc::MediaType::VIDEO); } ContentInfo* GetFirstDataContent(SessionDescription* sdesc) { - return GetFirstMediaContent(sdesc, MEDIA_TYPE_DATA); + return GetFirstMediaContent(sdesc, webrtc::MediaType::DATA); } MediaContentDescription* GetFirstMediaContentDescription( SessionDescription* sdesc, - MediaType media_type) { + webrtc::MediaType media_type) { ContentInfo* content = GetFirstMediaContent(sdesc, media_type); return (content ? content->media_description() : nullptr); } AudioContentDescription* GetFirstAudioContentDescription( SessionDescription* sdesc) { - auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_AUDIO); + auto desc = GetFirstMediaContentDescription(sdesc, webrtc::MediaType::AUDIO); return desc ? desc->as_audio() : nullptr; } VideoContentDescription* GetFirstVideoContentDescription( SessionDescription* sdesc) { - auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_VIDEO); + auto desc = GetFirstMediaContentDescription(sdesc, webrtc::MediaType::VIDEO); return desc ? desc->as_video() : nullptr; } SctpDataContentDescription* GetFirstSctpDataContentDescription( SessionDescription* sdesc) { - auto desc = GetFirstMediaContentDescription(sdesc, MEDIA_TYPE_DATA); + auto desc = GetFirstMediaContentDescription(sdesc, webrtc::MediaType::DATA); return desc ? desc->as_sctp() : nullptr; } -} // namespace cricket +} // namespace webrtc diff --git a/pc/media_session.h b/pc/media_session.h index 87f04c779f..f33cd49a19 100644 --- a/pc/media_session.h +++ b/pc/media_session.h @@ -13,27 +13,21 @@ #ifndef PC_MEDIA_SESSION_H_ #define PC_MEDIA_SESSION_H_ -#include #include #include #include -#include "api/crypto/crypto_options.h" -#include "api/field_trials_view.h" #include "api/media_types.h" -#include "api/rtp_parameters.h" -#include "api/rtp_transceiver_direction.h" -#include "media/base/media_constants.h" -#include "media/base/rid_description.h" +#include "api/rtc_error.h" +#include "media/base/media_engine.h" #include "media/base/stream_params.h" #include "p2p/base/ice_credentials_iterator.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_description_factory.h" #include "p2p/base/transport_info.h" -#include "pc/jsep_transport.h" -#include "pc/media_protocol_names.h" +#include "pc/codec_vendor.h" +#include "pc/media_options.h" #include "pc/session_description.h" -#include "pc/simulcast_description.h" #include "rtc_base/memory/always_valid_pointer.h" #include "rtc_base/unique_id_generator.h" @@ -44,94 +38,7 @@ class ConnectionContext; } // namespace webrtc -namespace cricket { - -class MediaEngineInterface; - -// Default RTCP CNAME for unit tests. -const char kDefaultRtcpCname[] = "DefaultRtcpCname"; - -// Options for an RtpSender contained with an media description/"m=" section. -// Note: Spec-compliant Simulcast and legacy simulcast are mutually exclusive. -struct SenderOptions { - std::string track_id; - std::vector stream_ids; - // Use RIDs and Simulcast Layers to indicate spec-compliant Simulcast. - std::vector rids; - SimulcastLayerList simulcast_layers; - // Use `num_sim_layers` to indicate legacy simulcast. - int num_sim_layers; -}; - -// Options for an individual media description/"m=" section. -struct MediaDescriptionOptions { - MediaDescriptionOptions(MediaType type, - const std::string& mid, - webrtc::RtpTransceiverDirection direction, - bool stopped) - : type(type), mid(mid), direction(direction), stopped(stopped) {} - - // TODO(deadbeef): When we don't support Plan B, there will only be one - // sender per media description and this can be simplified. - void AddAudioSender(const std::string& track_id, - const std::vector& stream_ids); - void AddVideoSender(const std::string& track_id, - const std::vector& stream_ids, - const std::vector& rids, - const SimulcastLayerList& simulcast_layers, - int num_sim_layers); - - MediaType type; - std::string mid; - webrtc::RtpTransceiverDirection direction; - bool stopped; - TransportOptions transport_options; - // Note: There's no equivalent "RtpReceiverOptions" because only send - // stream information goes in the local descriptions. - std::vector sender_options; - std::vector codec_preferences; - std::vector header_extensions; - - private: - // Doesn't DCHECK on `type`. - void AddSenderInternal(const std::string& track_id, - const std::vector& stream_ids, - const std::vector& rids, - const SimulcastLayerList& simulcast_layers, - int num_sim_layers); -}; - -// Provides a mechanism for describing how m= sections should be generated. -// The m= section with index X will use media_description_options[X]. There -// must be an option for each existing section if creating an answer, or a -// subsequent offer. -struct MediaSessionOptions { - MediaSessionOptions() {} - - bool has_audio() const { return HasMediaDescription(MEDIA_TYPE_AUDIO); } - bool has_video() const { return HasMediaDescription(MEDIA_TYPE_VIDEO); } - bool has_data() const { return HasMediaDescription(MEDIA_TYPE_DATA); } - - bool HasMediaDescription(MediaType type) const; - - bool vad_enabled = true; // When disabled, removes all CN codecs from SDP. - bool rtcp_mux_enabled = true; - bool bundle_enabled = false; - bool offer_extmap_allow_mixed = false; - bool raw_packetization_for_video = false; - std::string rtcp_cname = kDefaultRtcpCname; - webrtc::CryptoOptions crypto_options; - // List of media description options in the same order that the media - // descriptions will be generated. - std::vector media_description_options; - std::vector pooled_ice_credentials; - - // Use the draft-ietf-mmusic-sctp-sdp-03 obsolete syntax for SCTP - // datachannels. - // Default is true for backwards compatibility with clients that use - // this internal interface. - bool use_obsolete_sctp_sdp = true; -}; +namespace webrtc { // Creates media session descriptions according to the supplied codecs and // other fields, as well as the supplied per-call options. @@ -139,35 +46,19 @@ struct MediaSessionOptions { // of the various fields to determine the proper result. class MediaSessionDescriptionFactory { public: - // Simple constructor that does not set any configuration for the factory. - // When using this constructor, the methods below can be used to set the - // configuration. - // The TransportDescriptionFactory and the UniqueRandomIdGenerator are not - // owned by MediaSessionDescriptionFactory, so they must be kept alive by the - // user of this class. - MediaSessionDescriptionFactory(const TransportDescriptionFactory* factory, - rtc::UniqueRandomIdGenerator* ssrc_generator); - // This helper automatically sets up the factory to get its configuration - // from the specified MediaEngine - MediaSessionDescriptionFactory(cricket::MediaEngineInterface* media_engine, + // This constructor automatically sets up the factory to get its configuration + // from the specified MediaEngine (when provided). + // The TransportDescriptionFactory, the UniqueRandomIdGenerator, and the + // PayloadTypeSuggester are not owned by MediaSessionDescriptionFactory, so + // they must be kept alive by the user of this class. + MediaSessionDescriptionFactory(MediaEngineInterface* media_engine, bool rtx_enabled, - rtc::UniqueRandomIdGenerator* ssrc_generator, - const TransportDescriptionFactory* factory); + UniqueRandomIdGenerator* ssrc_generator, + const TransportDescriptionFactory* factory, + CodecLookupHelper* codec_lookup_helper); - const AudioCodecs& audio_sendrecv_codecs() const; - const AudioCodecs& audio_send_codecs() const; - const AudioCodecs& audio_recv_codecs() const; - void set_audio_codecs(const AudioCodecs& send_codecs, - const AudioCodecs& recv_codecs); - const VideoCodecs& video_sendrecv_codecs() const; - const VideoCodecs& video_send_codecs() const; - const VideoCodecs& video_recv_codecs() const; - void set_video_codecs(const VideoCodecs& send_codecs, - const VideoCodecs& recv_codecs); RtpHeaderExtensions filtered_rtp_header_extensions( RtpHeaderExtensions extensions) const; - SecurePolicy secure() const { return secure_; } - void set_secure(SecurePolicy s) { secure_ = s; } void set_enable_encrypted_rtp_header_extensions(bool enable) { enable_encrypted_rtp_header_extensions_ = enable; @@ -177,10 +68,10 @@ class MediaSessionDescriptionFactory { is_unified_plan_ = is_unified_plan; } - std::unique_ptr CreateOffer( + RTCErrorOr> CreateOfferOrError( const MediaSessionOptions& options, const SessionDescription* current_description) const; - std::unique_ptr CreateAnswer( + RTCErrorOr> CreateAnswerOrError( const SessionDescription* offer, const MediaSessionOptions& options, const SessionDescription* current_description) const; @@ -191,35 +82,16 @@ class MediaSessionDescriptionFactory { RtpHeaderExtensions video; }; - const AudioCodecs& GetAudioCodecsForOffer( - const webrtc::RtpTransceiverDirection& direction) const; - const AudioCodecs& GetAudioCodecsForAnswer( - const webrtc::RtpTransceiverDirection& offer, - const webrtc::RtpTransceiverDirection& answer) const; - const VideoCodecs& GetVideoCodecsForOffer( - const webrtc::RtpTransceiverDirection& direction) const; - const VideoCodecs& GetVideoCodecsForAnswer( - const webrtc::RtpTransceiverDirection& offer, - const webrtc::RtpTransceiverDirection& answer) const; - void GetCodecsForOffer( - const std::vector& current_active_contents, - AudioCodecs* audio_codecs, - VideoCodecs* video_codecs) const; - void GetCodecsForAnswer( - const std::vector& current_active_contents, - const SessionDescription& remote_offer, - AudioCodecs* audio_codecs, - VideoCodecs* video_codecs) const; AudioVideoRtpHeaderExtensions GetOfferedRtpHeaderExtensionsWithIds( const std::vector& current_active_contents, bool extmap_allow_mixed, const std::vector& media_description_options) const; - bool AddTransportOffer(const std::string& content_name, - const TransportOptions& transport_options, - const SessionDescription* current_desc, - SessionDescription* offer, - IceCredentialsIterator* ice_credentials) const; + RTCError AddTransportOffer(const std::string& content_name, + const TransportOptions& transport_options, + const SessionDescription* current_desc, + SessionDescription* offer, + IceCredentialsIterator* ice_credentials) const; std::unique_ptr CreateTransportAnswer( const std::string& content_name, @@ -229,54 +101,39 @@ class MediaSessionDescriptionFactory { bool require_transport_attributes, IceCredentialsIterator* ice_credentials) const; - bool AddTransportAnswer(const std::string& content_name, - const TransportDescription& transport_desc, - SessionDescription* answer_desc) const; + RTCError AddTransportAnswer(const std::string& content_name, + const TransportDescription& transport_desc, + SessionDescription* answer_desc) const; - // Helpers for adding media contents to the SessionDescription. Returns true - // it succeeds or the media content is not needed, or false if there is any - // error. - - bool AddAudioContentForOffer( + // Helpers for adding media contents to the SessionDescription. + RTCError AddRtpContentForOffer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, const ContentInfo* current_content, const SessionDescription* current_description, - const RtpHeaderExtensions& audio_rtp_extensions, - const AudioCodecs& audio_codecs, + const RtpHeaderExtensions& header_extensions, StreamParamsVec* current_streams, SessionDescription* desc, IceCredentialsIterator* ice_credentials) const; - bool AddVideoContentForOffer( + RTCError AddDataContentForOffer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, const ContentInfo* current_content, const SessionDescription* current_description, - const RtpHeaderExtensions& video_rtp_extensions, - const VideoCodecs& video_codecs, StreamParamsVec* current_streams, SessionDescription* desc, IceCredentialsIterator* ice_credentials) const; - bool AddDataContentForOffer( + RTCError AddUnsupportedContentForOffer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, const ContentInfo* current_content, const SessionDescription* current_description, - StreamParamsVec* current_streams, SessionDescription* desc, IceCredentialsIterator* ice_credentials) const; - bool AddUnsupportedContentForOffer( - const MediaDescriptionOptions& media_description_options, - const MediaSessionOptions& session_options, - const ContentInfo* current_content, - const SessionDescription* current_description, - SessionDescription* desc, - IceCredentialsIterator* ice_credentials) const; - - bool AddAudioContentForAnswer( + RTCError AddRtpContentForAnswer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, const ContentInfo* offer_content, @@ -284,13 +141,12 @@ class MediaSessionDescriptionFactory { const ContentInfo* current_content, const SessionDescription* current_description, const TransportInfo* bundle_transport, - const AudioCodecs& audio_codecs, - const RtpHeaderExtensions& rtp_header_extensions, + const RtpHeaderExtensions& header_extensions, StreamParamsVec* current_streams, SessionDescription* answer, IceCredentialsIterator* ice_credentials) const; - bool AddVideoContentForAnswer( + RTCError AddDataContentForAnswer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, const ContentInfo* offer_content, @@ -298,13 +154,11 @@ class MediaSessionDescriptionFactory { const ContentInfo* current_content, const SessionDescription* current_description, const TransportInfo* bundle_transport, - const VideoCodecs& video_codecs, - const RtpHeaderExtensions& rtp_header_extensions, StreamParamsVec* current_streams, SessionDescription* answer, IceCredentialsIterator* ice_credentials) const; - bool AddDataContentForAnswer( + RTCError AddUnsupportedContentForAnswer( const MediaDescriptionOptions& media_description_options, const MediaSessionOptions& session_options, const ContentInfo* offer_content, @@ -312,50 +166,20 @@ class MediaSessionDescriptionFactory { const ContentInfo* current_content, const SessionDescription* current_description, const TransportInfo* bundle_transport, - StreamParamsVec* current_streams, SessionDescription* answer, IceCredentialsIterator* ice_credentials) const; - bool AddUnsupportedContentForAnswer( - const MediaDescriptionOptions& media_description_options, - const MediaSessionOptions& session_options, - const ContentInfo* offer_content, - const SessionDescription* offer_description, - const ContentInfo* current_content, - const SessionDescription* current_description, - const TransportInfo* bundle_transport, - SessionDescription* answer, - IceCredentialsIterator* ice_credentials) const; - - void ComputeAudioCodecsIntersectionAndUnion(); - - void ComputeVideoCodecsIntersectionAndUnion(); - - rtc::UniqueRandomIdGenerator* ssrc_generator() const { + UniqueRandomIdGenerator* ssrc_generator() const { return ssrc_generator_.get(); } bool is_unified_plan_ = false; - AudioCodecs audio_send_codecs_; - AudioCodecs audio_recv_codecs_; - // Intersection of send and recv. - AudioCodecs audio_sendrecv_codecs_; - // Union of send and recv. - AudioCodecs all_audio_codecs_; - VideoCodecs video_send_codecs_; - VideoCodecs video_recv_codecs_; - // Intersection of send and recv. - VideoCodecs video_sendrecv_codecs_; - // Union of send and recv. - VideoCodecs all_video_codecs_; // This object may or may not be owned by this class. - webrtc::AlwaysValidPointer const - ssrc_generator_; - bool enable_encrypted_rtp_header_extensions_ = false; - // TODO(zhihuang): Rename secure_ to sdec_policy_; rename the related getter - // and setter. - SecurePolicy secure_ = SEC_DISABLED; + AlwaysValidPointer const ssrc_generator_; + bool enable_encrypted_rtp_header_extensions_ = true; const TransportDescriptionFactory* transport_desc_factory_; + CodecLookupHelper* codec_lookup_helper_; + bool payload_types_in_transport_trial_enabled_; }; // Convenience functions. @@ -365,12 +189,12 @@ bool IsVideoContent(const ContentInfo* content); bool IsDataContent(const ContentInfo* content); bool IsUnsupportedContent(const ContentInfo* content); const ContentInfo* GetFirstMediaContent(const ContentInfos& contents, - MediaType media_type); + webrtc::MediaType media_type); const ContentInfo* GetFirstAudioContent(const ContentInfos& contents); const ContentInfo* GetFirstVideoContent(const ContentInfos& contents); const ContentInfo* GetFirstDataContent(const ContentInfos& contents); const ContentInfo* GetFirstMediaContent(const SessionDescription* sdesc, - MediaType media_type); + webrtc::MediaType media_type); const ContentInfo* GetFirstAudioContent(const SessionDescription* sdesc); const ContentInfo* GetFirstVideoContent(const SessionDescription* sdesc); const ContentInfo* GetFirstDataContent(const SessionDescription* sdesc); @@ -382,12 +206,13 @@ const SctpDataContentDescription* GetFirstSctpDataContentDescription( const SessionDescription* sdesc); // Non-const versions of the above functions. // Useful when modifying an existing description. -ContentInfo* GetFirstMediaContent(ContentInfos* contents, MediaType media_type); +ContentInfo* GetFirstMediaContent(ContentInfos* contents, + webrtc::MediaType media_type); ContentInfo* GetFirstAudioContent(ContentInfos* contents); ContentInfo* GetFirstVideoContent(ContentInfos* contents); ContentInfo* GetFirstDataContent(ContentInfos* contents); ContentInfo* GetFirstMediaContent(SessionDescription* sdesc, - MediaType media_type); + webrtc::MediaType media_type); ContentInfo* GetFirstAudioContent(SessionDescription* sdesc); ContentInfo* GetFirstVideoContent(SessionDescription* sdesc); ContentInfo* GetFirstDataContent(SessionDescription* sdesc); @@ -398,26 +223,26 @@ VideoContentDescription* GetFirstVideoContentDescription( SctpDataContentDescription* GetFirstSctpDataContentDescription( SessionDescription* sdesc); -// Helper functions to return crypto suites used for SDES. -void GetSupportedAudioSdesCryptoSuites( - const webrtc::CryptoOptions& crypto_options, - std::vector* crypto_suites); -void GetSupportedVideoSdesCryptoSuites( - const webrtc::CryptoOptions& crypto_options, - std::vector* crypto_suites); -void GetSupportedDataSdesCryptoSuites( - const webrtc::CryptoOptions& crypto_options, - std::vector* crypto_suites); -void GetSupportedAudioSdesCryptoSuiteNames( - const webrtc::CryptoOptions& crypto_options, - std::vector* crypto_suite_names); -void GetSupportedVideoSdesCryptoSuiteNames( - const webrtc::CryptoOptions& crypto_options, - std::vector* crypto_suite_names); -void GetSupportedDataSdesCryptoSuiteNames( - const webrtc::CryptoOptions& crypto_options, - std::vector* crypto_suite_names); +} // namespace webrtc +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::GetFirstAudioContent; +using ::webrtc::GetFirstAudioContentDescription; +using ::webrtc::GetFirstDataContent; +using ::webrtc::GetFirstMediaContent; +using ::webrtc::GetFirstSctpDataContentDescription; +using ::webrtc::GetFirstVideoContent; +using ::webrtc::GetFirstVideoContentDescription; +using ::webrtc::IsAudioContent; +using ::webrtc::IsDataContent; +using ::webrtc::IsMediaContent; +using ::webrtc::IsUnsupportedContent; +using ::webrtc::IsVideoContent; +using ::webrtc::MediaSessionDescriptionFactory; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // PC_MEDIA_SESSION_H_ diff --git a/pc/media_session_unittest.cc b/pc/media_session_unittest.cc index a082bcc289..475dfd916a 100644 --- a/pc/media_session_unittest.cc +++ b/pc/media_session_unittest.cc @@ -12,10 +12,10 @@ #include -#include #include #include #include +#include #include #include #include @@ -23,24 +23,39 @@ #include "absl/algorithm/container.h" #include "absl/strings/match.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/audio_codecs/audio_format.h" #include "api/candidate.h" -#include "api/crypto_params.h" +#include "api/field_trials_view.h" +#include "api/media_types.h" +#include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" +#include "api/sctp_transport_interface.h" +#include "api/video_codecs/sdp_video_format.h" +#include "call/fake_payload_type_suggester.h" +#include "call/payload_type.h" #include "media/base/codec.h" +#include "media/base/codec_list.h" #include "media/base/media_constants.h" +#include "media/base/rid_description.h" +#include "media/base/stream_params.h" #include "media/base/test_utils.h" -#include "media/sctp/sctp_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/transport_description.h" +#include "p2p/base/transport_description_factory.h" #include "p2p/base/transport_info.h" +#include "pc/codec_vendor.h" +#include "pc/media_options.h" #include "pc/media_protocol_names.h" #include "pc/rtp_media_utils.h" -#include "rtc_base/arraysize.h" +#include "pc/rtp_parameters_conversion.h" +#include "pc/session_description.h" +#include "pc/simulcast_description.h" #include "rtc_base/checks.h" #include "rtc_base/fake_ssl_identity.h" +#include "rtc_base/logging.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_identity.h" -#include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/unique_id_generator.h" @@ -48,198 +63,275 @@ #include "test/gtest.h" #include "test/scoped_key_value_config.h" -#define ASSERT_CRYPTO(cd, s, cs) \ - ASSERT_EQ(s, cd->cryptos().size()); \ - ASSERT_EQ(cs, cd->cryptos()[0].crypto_suite) - -typedef std::vector Candidates; - -using cricket::AudioCodec; -using cricket::AudioContentDescription; -using cricket::ContentInfo; -using cricket::CryptoParamsVec; -using cricket::GetFirstAudioContent; -using cricket::GetFirstAudioContentDescription; -using cricket::GetFirstDataContent; -using cricket::GetFirstVideoContent; -using cricket::GetFirstVideoContentDescription; -using cricket::kAutoBandwidth; -using cricket::MEDIA_TYPE_AUDIO; -using cricket::MEDIA_TYPE_DATA; -using cricket::MEDIA_TYPE_VIDEO; -using cricket::MediaContentDescription; -using cricket::MediaDescriptionOptions; -using cricket::MediaProtocolType; -using cricket::MediaSessionDescriptionFactory; -using cricket::MediaSessionOptions; -using cricket::MediaType; -using cricket::RidDescription; -using cricket::RidDirection; -using cricket::SctpDataContentDescription; -using cricket::SEC_DISABLED; -using cricket::SEC_ENABLED; -using cricket::SEC_REQUIRED; -using cricket::SessionDescription; -using cricket::SimulcastDescription; -using cricket::SimulcastLayer; -using cricket::SimulcastLayerList; -using cricket::SsrcGroup; -using cricket::StreamParams; -using cricket::StreamParamsVec; -using cricket::TransportDescription; -using cricket::TransportDescriptionFactory; -using cricket::TransportInfo; -using cricket::VideoCodec; -using cricket::VideoContentDescription; -using rtc::kCsAeadAes128Gcm; -using rtc::kCsAeadAes256Gcm; -using rtc::kCsAesCm128HmacSha1_32; -using rtc::kCsAesCm128HmacSha1_80; -using rtc::UniqueRandomIdGenerator; +namespace webrtc { +namespace { + +using ::testing::Bool; +using ::testing::Combine; using ::testing::Contains; -using ::testing::Each; using ::testing::ElementsAre; using ::testing::ElementsAreArray; using ::testing::Eq; using ::testing::Field; +using ::testing::Gt; using ::testing::IsEmpty; -using ::testing::IsFalse; -using ::testing::Ne; using ::testing::Not; using ::testing::Pointwise; using ::testing::SizeIs; -using webrtc::RtpExtension; -using webrtc::RtpTransceiverDirection; - -static const AudioCodec kAudioCodecs1[] = { - cricket::CreateAudioCodec(103, "ISAC", 16000, 1), - cricket::CreateAudioCodec(102, "iLBC", 8000, 1), - cricket::CreateAudioCodec(0, "PCMU", 8000, 1), - cricket::CreateAudioCodec(8, "PCMA", 8000, 1), - cricket::CreateAudioCodec(117, "red", 8000, 1), - cricket::CreateAudioCodec(107, "CN", 48000, 1)}; - -static const AudioCodec kAudioCodecs2[] = { - cricket::CreateAudioCodec(126, "foo", 16000, 1), - cricket::CreateAudioCodec(0, "PCMU", 8000, 1), - cricket::CreateAudioCodec(127, "iLBC", 8000, 1), -}; +using ::testing::UnorderedElementsAreArray; +using ::testing::Values; +using ::testing::ValuesIn; +using ::webrtc::UniqueRandomIdGenerator; +using ::webrtc::test::ScopedKeyValueConfig; + +using Candidates = std::vector; -static const AudioCodec kAudioCodecsAnswer[] = { - cricket::CreateAudioCodec(102, "iLBC", 8000, 1), - cricket::CreateAudioCodec(0, "PCMU", 8000, 1), +class CodecLookupHelperForTesting : public CodecLookupHelper { + public: + explicit CodecLookupHelperForTesting(const FieldTrialsView& field_trials) + : codec_vendor_(nullptr, false, field_trials) {} + webrtc::PayloadTypeSuggester* PayloadTypeSuggester() override { + return &payload_type_suggester_; + } + CodecVendor* GetCodecVendor() override { return &codec_vendor_; } + + private: + FakePayloadTypeSuggester payload_type_suggester_; + CodecVendor codec_vendor_; }; -static const VideoCodec kVideoCodecs1[] = { - cricket::CreateVideoCodec(96, "H264-SVC"), - cricket::CreateVideoCodec(97, "H264")}; +Codec CreateRedAudioCodec(absl::string_view encoding_id) { + Codec red = CreateAudioCodec(63, "red", 48000, 2); + red.SetParam(kCodecParamNotInNameValueFormat, + std::string(encoding_id) + '/' + std::string(encoding_id)); + return red; +} + +const Codec kAudioCodecs1[] = {CreateAudioCodec(111, "opus", 48000, 2), + CreateRedAudioCodec("111"), + CreateAudioCodec(103, "G722", 16000, 1), + CreateAudioCodec(0, "PCMU", 8000, 1), + CreateAudioCodec(8, "PCMA", 8000, 1), + CreateAudioCodec(107, "CN", 48000, 1)}; -static const VideoCodec kVideoCodecs1Reverse[] = { - cricket::CreateVideoCodec(97, "H264"), - cricket::CreateVideoCodec(96, "H264-SVC")}; +const Codec kAudioCodecs2[] = { + CreateAudioCodec(126, "foo", 16000, 1), + CreateAudioCodec(0, "PCMU", 8000, 1), + CreateAudioCodec(127, "G722", 16000, 1), +}; -static const VideoCodec kVideoCodecs2[] = { - cricket::CreateVideoCodec(126, "H264"), - cricket::CreateVideoCodec(127, "H263")}; +const Codec kAudioCodecsAnswer[] = { + CreateAudioCodec(103, "G722", 16000, 1), + CreateAudioCodec(0, "PCMU", 8000, 1), +}; -static const VideoCodec kVideoCodecsAnswer[] = { - cricket::CreateVideoCodec(97, "H264")}; +const Codec kVideoCodecs1[] = {CreateVideoCodec(96, "H264-SVC"), + CreateVideoCodec(97, "H264")}; + +const Codec kVideoCodecs1Reverse[] = {CreateVideoCodec(97, "H264"), + CreateVideoCodec(96, "H264-SVC")}; + +const Codec kVideoCodecs2[] = {CreateVideoCodec(126, "H264"), + CreateVideoCodec(127, "H263")}; + +const Codec kVideoCodecsAnswer[] = {CreateVideoCodec(97, "H264")}; + +// H.265 level-id, according to H.265 spec, is calculated this way: +// For any given H.265 level a.b, level-id = (a * 10 + b) * 3. For level 6.0, +// level-id = (6 * 10 + 0) * 3 = 180. Similar for all other H.265 levels. +const char kVideoCodecsH265Level6LevelId[] = "180"; +const char kVideoCodecsH265Level52LevelId[] = "156"; +const char kVideoCodecsH265Level5LevelId[] = "150"; +const char kVideoCodecsH265Level4LevelId[] = "120"; +const char kVideoCodecsH265Level31LevelId[] = "93"; + +const SdpVideoFormat kH265MainProfileLevel31Sdp( + "H265", + {{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", kVideoCodecsH265Level31LevelId}, + {"tx-mode", "SRST"}}); +const SdpVideoFormat kH265MainProfileLevel4Sdp("H265", + {{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", + kVideoCodecsH265Level4LevelId}, + {"tx-mode", "SRST"}}); +const SdpVideoFormat kH265MainProfileLevel5Sdp("H265", + {{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", + kVideoCodecsH265Level5LevelId}, + {"tx-mode", "SRST"}}); +const SdpVideoFormat kH265MainProfileLevel52Sdp( + "H265", + {{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", kVideoCodecsH265Level52LevelId}, + {"tx-mode", "SRST"}}); +const SdpVideoFormat kH265MainProfileLevel6Sdp("H265", + {{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", + kVideoCodecsH265Level6LevelId}, + {"tx-mode", "SRST"}}); + +const Codec kVideoCodecsH265Level31[] = { + CreateVideoCodec(96, kH265MainProfileLevel31Sdp)}; +const Codec kVideoCodecsH265Level4[] = { + CreateVideoCodec(96, kH265MainProfileLevel4Sdp)}; +const Codec kVideoCodecsH265Level5[] = { + CreateVideoCodec(96, kH265MainProfileLevel5Sdp)}; +const Codec kVideoCodecsH265Level52[] = { + CreateVideoCodec(96, kH265MainProfileLevel52Sdp)}; +const Codec kVideoCodecsH265Level6[] = { + CreateVideoCodec(96, kH265MainProfileLevel6Sdp)}; +// Match two codec lists for content, but ignore the ID. +bool CodecListsMatch(ArrayView list1, + ArrayView list2) { + if (list1.size() != list2.size()) { + return false; + } + for (size_t i = 0; i < list1.size(); ++i) { + Codec codec1 = list1[i]; + Codec codec2 = list2[i]; + codec1.id = Codec::kIdNotSet; + codec2.id = Codec::kIdNotSet; + if (codec1 != codec2) { + RTC_LOG(LS_ERROR) << "Mismatch at position " << i << " between " << codec1 + << " and " << codec2; + return false; + } + } + return true; +} -static const RtpExtension kAudioRtpExtension1[] = { +const RtpExtension kAudioRtpExtension1[] = { RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 8), RtpExtension("http://google.com/testing/audio_something", 10), }; -static const RtpExtension kAudioRtpExtensionEncrypted1[] = { +const RtpExtension kAudioRtpExtensionEncrypted1[] = { RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 8), - RtpExtension("http://google.com/testing/audio_something", 10), - RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 12, true), RtpExtension("http://google.com/testing/audio_something", 11, true), }; -static const RtpExtension kAudioRtpExtension2[] = { +const RtpExtension kAudioRtpExtension2[] = { RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 2), RtpExtension("http://google.com/testing/audio_something_else", 8), RtpExtension("http://google.com/testing/both_audio_and_video", 7), }; -static const RtpExtension kAudioRtpExtension3[] = { +const RtpExtension kAudioRtpExtensionEncrypted2[] = { + RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 2), + RtpExtension("http://google.com/testing/audio_something", 13, true), + RtpExtension("http://google.com/testing/audio_something_else", 5, true), +}; + +const RtpExtension kAudioRtpExtension3[] = { RtpExtension("http://google.com/testing/audio_something", 2), RtpExtension("http://google.com/testing/both_audio_and_video", 3), }; -static const RtpExtension kAudioRtpExtension3ForEncryption[] = { - RtpExtension("http://google.com/testing/audio_something", 2), - // Use RTP extension that supports encryption. - RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 3), +const RtpExtension kAudioRtpExtensionMixedEncryption1[] = { + RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 8), + RtpExtension("http://google.com/testing/audio_something", 9), + RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 10, true), + RtpExtension("http://google.com/testing/audio_something", 11, true), + RtpExtension("http://google.com/testing/audio_something_else", 12, true), }; -static const RtpExtension kAudioRtpExtension3ForEncryptionOffer[] = { - RtpExtension("http://google.com/testing/audio_something", 2), - RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 3), - RtpExtension("http://google.com/testing/audio_something", 14, true), - RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 13, true), +const RtpExtension kAudioRtpExtensionMixedEncryption2[] = { + RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 5), + RtpExtension("http://google.com/testing/audio_something", 6), + RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 7, true), + RtpExtension("http://google.com/testing/audio_something", 8, true), + RtpExtension("http://google.com/testing/audio_something_else", 9), }; -static const RtpExtension kVideoRtpExtension3ForEncryptionOffer[] = { - RtpExtension("http://google.com/testing/video_something", 4), - RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 3), - RtpExtension("http://google.com/testing/video_something", 12, true), - RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 13, true), +const RtpExtension kAudioRtpExtensionAnswer[] = { + RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 8), }; -static const RtpExtension kAudioRtpExtensionAnswer[] = { +const RtpExtension kAudioRtpExtensionEncryptedAnswer[] = { RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 8), + RtpExtension("http://google.com/testing/audio_something", 11, true), +}; + +const RtpExtension kAudioRtpExtensionMixedEncryptionAnswerEncryptionEnabled[] = + { + RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 10, true), + RtpExtension("http://google.com/testing/audio_something", 11, true), }; -static const RtpExtension kAudioRtpExtensionEncryptedAnswer[] = { - RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 12, true), +const RtpExtension kAudioRtpExtensionMixedEncryptionAnswerEncryptionDisabled[] = + { + RtpExtension("urn:ietf:params:rtp-hdrext:ssrc-audio-level", 8), + RtpExtension("http://google.com/testing/audio_something", 9), }; -static const RtpExtension kVideoRtpExtension1[] = { +const RtpExtension kVideoRtpExtension1[] = { RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 14), RtpExtension("http://google.com/testing/video_something", 13), }; -static const RtpExtension kVideoRtpExtensionEncrypted1[] = { +const RtpExtension kVideoRtpExtensionEncrypted1[] = { RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 14), - RtpExtension("http://google.com/testing/video_something", 13), - RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 9, true), RtpExtension("http://google.com/testing/video_something", 7, true), }; -static const RtpExtension kVideoRtpExtension2[] = { +const RtpExtension kVideoRtpExtension2[] = { RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 2), RtpExtension("http://google.com/testing/video_something_else", 14), RtpExtension("http://google.com/testing/both_audio_and_video", 7), }; -static const RtpExtension kVideoRtpExtension3[] = { +const RtpExtension kVideoRtpExtensionEncrypted2[] = { + RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 8), + RtpExtension("http://google.com/testing/video_something", 10, true), + RtpExtension("http://google.com/testing/video_something_else", 4, true), +}; + +const RtpExtension kVideoRtpExtension3[] = { RtpExtension("http://google.com/testing/video_something", 4), RtpExtension("http://google.com/testing/both_audio_and_video", 5), }; -static const RtpExtension kVideoRtpExtension3ForEncryption[] = { - RtpExtension("http://google.com/testing/video_something", 4), - // Use RTP extension that supports encryption. - RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 5), +const RtpExtension kVideoRtpExtensionMixedEncryption[] = { + RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 14), + RtpExtension("http://google.com/testing/video_something", 13), + RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 15, true), + RtpExtension("http://google.com/testing/video_something", 16, true), +}; + +const RtpExtension kVideoRtpExtensionAnswer[] = { + RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 14), }; -static const RtpExtension kVideoRtpExtensionAnswer[] = { +const RtpExtension kVideoRtpExtensionEncryptedAnswer[] = { RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 14), + RtpExtension("http://google.com/testing/video_something", 7, true), +}; + +const RtpExtension kVideoRtpExtensionMixedEncryptionAnswerEncryptionEnabled[] = + { + RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 15, true), + RtpExtension("http://google.com/testing/video_something", 16, true), }; -static const RtpExtension kVideoRtpExtensionEncryptedAnswer[] = { - RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 9, true), +const RtpExtension kVideoRtpExtensionMixedEncryptionAnswerEncryptionDisabled[] = + { + RtpExtension("urn:ietf:params:rtp-hdrext:toffset", 14), + RtpExtension("http://google.com/testing/video_something", 13), }; -static const RtpExtension kRtpExtensionTransportSequenceNumber01[] = { +const RtpExtension kRtpExtensionTransportSequenceNumber01[] = { RtpExtension("http://www.ietf.org/id/" "draft-holmer-rmcat-transport-wide-cc-extensions-01", 1), }; -static const RtpExtension kRtpExtensionTransportSequenceNumber01And02[] = { +const RtpExtension kRtpExtensionTransportSequenceNumber01And02[] = { RtpExtension("http://www.ietf.org/id/" "draft-holmer-rmcat-transport-wide-cc-extensions-01", 1), @@ -248,67 +340,78 @@ static const RtpExtension kRtpExtensionTransportSequenceNumber01And02[] = { 2), }; -static const RtpExtension kRtpExtensionTransportSequenceNumber02[] = { +const RtpExtension kRtpExtensionTransportSequenceNumber02[] = { RtpExtension( "http://www.webrtc.org/experiments/rtp-hdrext/transport-wide-cc-02", 2), }; -static const RtpExtension kRtpExtensionGenericFrameDescriptorUri00[] = { +const RtpExtension kRtpExtensionGenericFrameDescriptorUri00[] = { RtpExtension("http://www.webrtc.org/experiments/rtp-hdrext/" "generic-frame-descriptor-00", 3), }; -static const uint32_t kSimulcastParamsSsrc[] = {10, 11, 20, 21, 30, 31}; -static const uint32_t kSimSsrc[] = {10, 20, 30}; -static const uint32_t kFec1Ssrc[] = {10, 11}; -static const uint32_t kFec2Ssrc[] = {20, 21}; -static const uint32_t kFec3Ssrc[] = {30, 31}; - -static const char kMediaStream1[] = "stream_1"; -static const char kMediaStream2[] = "stream_2"; -static const char kVideoTrack1[] = "video_1"; -static const char kVideoTrack2[] = "video_2"; -static const char kAudioTrack1[] = "audio_1"; -static const char kAudioTrack2[] = "audio_2"; -static const char kAudioTrack3[] = "audio_3"; - -static const char* kMediaProtocols[] = {"RTP/AVP", "RTP/SAVP", "RTP/AVPF", - "RTP/SAVPF"}; -static const char* kMediaProtocolsDtls[] = { - "TCP/TLS/RTP/SAVPF", "TCP/TLS/RTP/SAVP", "UDP/TLS/RTP/SAVPF", - "UDP/TLS/RTP/SAVP"}; - -// SRTP cipher name negotiated by the tests. This must be updated if the -// default changes. -static const char* kDefaultSrtpCryptoSuite = kCsAesCm128HmacSha1_80; -static const char* kDefaultSrtpCryptoSuiteGcm = kCsAeadAes256Gcm; +const uint32_t kSimulcastParamsSsrc[] = {10, 11, 20, 21, 30, 31}; +const uint32_t kSimSsrc[] = {10, 20, 30}; +const uint32_t kFec1Ssrc[] = {10, 11}; +const uint32_t kFec2Ssrc[] = {20, 21}; +const uint32_t kFec3Ssrc[] = {30, 31}; + +const char kMediaStream1[] = "stream_1"; +const char kMediaStream2[] = "stream_2"; +const char kVideoTrack1[] = "video_1"; +const char kVideoTrack2[] = "video_2"; +const char kAudioTrack1[] = "audio_1"; +const char kAudioTrack2[] = "audio_2"; +const char kAudioTrack3[] = "audio_3"; + +const char* kMediaProtocols[] = {"RTP/AVP", "RTP/SAVP", "RTP/AVPF", + "RTP/SAVPF"}; +const char* kMediaProtocolsDtls[] = {"TCP/TLS/RTP/SAVPF", "TCP/TLS/RTP/SAVP", + "UDP/TLS/RTP/SAVPF", "UDP/TLS/RTP/SAVP"}; // These constants are used to make the code using "AddMediaDescriptionOptions" // more readable. -static constexpr bool kStopped = true; -static constexpr bool kActive = false; +constexpr bool kStopped = true; +constexpr bool kActive = false; + +// Helper used for debugging. It reports the media type and the parameters. +std::string FullMimeType(Codec codec) { + StringBuilder sb; + switch (codec.type) { + case Codec::Type::kAudio: + sb << "audio/"; + break; + case Codec::Type::kVideo: + sb << "video/"; + break; + } + sb << codec.name; + for (auto& param : codec.params) { + sb << ";" << param.first << "=" << param.second; + } + return sb.Release(); +} -static bool IsMediaContentOfType(const ContentInfo* content, - MediaType media_type) { +bool IsMediaContentOfType(const ContentInfo* content, + webrtc::MediaType media_type) { RTC_DCHECK(content); return content->media_description()->type() == media_type; } -static RtpTransceiverDirection GetMediaDirection(const ContentInfo* content) { +RtpTransceiverDirection GetMediaDirection(const ContentInfo* content) { RTC_DCHECK(content); return content->media_description()->direction(); } -static void AddRtxCodec(const VideoCodec& rtx_codec, - std::vector* codecs) { - ASSERT_FALSE(cricket::FindCodecById(*codecs, rtx_codec.id)); +void AddRtxCodec(const Codec& rtx_codec, std::vector* codecs) { + RTC_LOG(LS_VERBOSE) << "Adding RTX codec " << FullMimeType(rtx_codec); + ASSERT_FALSE(FindCodecById(*codecs, rtx_codec.id)); codecs->push_back(rtx_codec); } -template -static std::vector GetCodecNames(const std::vector& codecs) { +std::vector GetCodecNames(const std::vector& codecs) { std::vector codec_names; codec_names.reserve(codecs.size()); for (const auto& codec : codecs) { @@ -337,31 +440,32 @@ FindFirstMediaDescriptionByMid(const std::string& mid, } // Add a media section to the `session_options`. -static void AddMediaDescriptionOptions(MediaType type, - const std::string& mid, - RtpTransceiverDirection direction, - bool stopped, - MediaSessionOptions* opts) { +void AddMediaDescriptionOptions(webrtc::MediaType type, + const std::string& mid, + RtpTransceiverDirection direction, + bool stopped, + MediaSessionOptions* opts) { opts->media_description_options.push_back( MediaDescriptionOptions(type, mid, direction, stopped)); } -static void AddAudioVideoSections(RtpTransceiverDirection direction, - MediaSessionOptions* opts) { - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", direction, kActive, - opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", direction, kActive, - opts); +void AddAudioVideoSections(RtpTransceiverDirection direction, + MediaSessionOptions* opts) { + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", direction, + kActive, opts); + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", direction, + kActive, opts); } -static void AddDataSection(RtpTransceiverDirection direction, - MediaSessionOptions* opts) { - AddMediaDescriptionOptions(MEDIA_TYPE_DATA, "data", direction, kActive, opts); +void AddDataSection(RtpTransceiverDirection direction, + MediaSessionOptions* opts) { + AddMediaDescriptionOptions(webrtc::MediaType::DATA, "data", direction, + kActive, opts); } -static void AttachSenderToMediaDescriptionOptions( +void AttachSenderToMediaDescriptionOptions( const std::string& mid, - MediaType type, + webrtc::MediaType type, const std::string& track_id, const std::vector& stream_ids, const std::vector& rids, @@ -370,10 +474,10 @@ static void AttachSenderToMediaDescriptionOptions( MediaSessionOptions* session_options) { auto it = FindFirstMediaDescriptionByMid(mid, session_options); switch (type) { - case MEDIA_TYPE_AUDIO: + case webrtc::MediaType::AUDIO: it->AddAudioSender(track_id, stream_ids); break; - case MEDIA_TYPE_VIDEO: + case webrtc::MediaType::VIDEO: it->AddVideoSender(track_id, stream_ids, rids, simulcast_layers, num_sim_layer); break; @@ -382,9 +486,9 @@ static void AttachSenderToMediaDescriptionOptions( } } -static void AttachSenderToMediaDescriptionOptions( +void AttachSenderToMediaDescriptionOptions( const std::string& mid, - MediaType type, + webrtc::MediaType type, const std::string& track_id, const std::vector& stream_ids, int num_sim_layer, @@ -394,64 +498,53 @@ static void AttachSenderToMediaDescriptionOptions( session_options); } -static void DetachSenderFromMediaSection(const std::string& mid, - const std::string& track_id, - MediaSessionOptions* session_options) { - std::vector& sender_options_list = +void DetachSenderFromMediaSection(const std::string& mid, + const std::string& track_id, + MediaSessionOptions* session_options) { + std::vector& sender_options_list = FindFirstMediaDescriptionByMid(mid, session_options)->sender_options; - auto sender_it = - absl::c_find_if(sender_options_list, - [track_id](const cricket::SenderOptions& sender_options) { - return sender_options.track_id == track_id; - }); + auto sender_it = absl::c_find_if( + sender_options_list, [track_id](const SenderOptions& sender_options) { + return sender_options.track_id == track_id; + }); RTC_DCHECK(sender_it != sender_options_list.end()); sender_options_list.erase(sender_it); } -// Helper function used to create a default MediaSessionOptions for Plan B SDP. -// (https://tools.ietf.org/html/draft-uberti-rtcweb-plan-00). -static MediaSessionOptions CreatePlanBMediaSessionOptions() { +// Helper function used to create recv-only audio MediaSessionOptions. +MediaSessionOptions CreateAudioMediaSession() { MediaSessionOptions session_options; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kRecvOnly, kActive, &session_options); return session_options; } -// prefers GCM SDES crypto suites by removing non-GCM defaults. -void PreferGcmCryptoParameters(CryptoParamsVec* cryptos) { - cryptos->erase( - std::remove_if(cryptos->begin(), cryptos->end(), - [](const cricket::CryptoParams& crypto) { - return crypto.crypto_suite != kCsAeadAes256Gcm && - crypto.crypto_suite != kCsAeadAes128Gcm; - }), - cryptos->end()); -} - // TODO(zhihuang): Most of these tests were written while MediaSessionOptions // was designed for Plan B SDP, where only one audio "m=" section and one video // "m=" section could be generated, and ordering couldn't be controlled. Many of // these tests may be obsolete as a result, and should be refactored or removed. -class MediaSessionDescriptionFactoryTest : public ::testing::Test { +class MediaSessionDescriptionFactoryTest : public testing::Test { public: MediaSessionDescriptionFactoryTest() : tdf1_(field_trials), tdf2_(field_trials), - f1_(&tdf1_, &ssrc_generator1), - f2_(&tdf2_, &ssrc_generator2) { - f1_.set_audio_codecs(MAKE_VECTOR(kAudioCodecs1), - MAKE_VECTOR(kAudioCodecs1)); - f1_.set_video_codecs(MAKE_VECTOR(kVideoCodecs1), - MAKE_VECTOR(kVideoCodecs1)); - f2_.set_audio_codecs(MAKE_VECTOR(kAudioCodecs2), - MAKE_VECTOR(kAudioCodecs2)); - f2_.set_video_codecs(MAKE_VECTOR(kVideoCodecs2), - MAKE_VECTOR(kVideoCodecs2)); - tdf1_.set_certificate(rtc::RTCCertificate::Create( - std::unique_ptr(new rtc::FakeSSLIdentity("id1")))); - tdf2_.set_certificate(rtc::RTCCertificate::Create( - std::unique_ptr(new rtc::FakeSSLIdentity("id2")))); + codec_lookup_helper_1_(field_trials), + codec_lookup_helper_2_(field_trials), + f1_(nullptr, false, &ssrc_generator1, &tdf1_, &codec_lookup_helper_1_), + f2_(nullptr, false, &ssrc_generator2, &tdf2_, &codec_lookup_helper_2_) { + codec_lookup_helper_1_.GetCodecVendor()->set_audio_codecs( + MAKE_VECTOR(kAudioCodecs1), MAKE_VECTOR(kAudioCodecs1)); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs( + MAKE_VECTOR(kVideoCodecs1), MAKE_VECTOR(kVideoCodecs1)); + codec_lookup_helper_2_.GetCodecVendor()->set_audio_codecs( + MAKE_VECTOR(kAudioCodecs2), MAKE_VECTOR(kAudioCodecs2)); + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs( + MAKE_VECTOR(kVideoCodecs2), MAKE_VECTOR(kVideoCodecs2)); + tdf1_.set_certificate(RTCCertificate::Create( + std::unique_ptr(new FakeSSLIdentity("id1")))); + tdf2_.set_certificate(RTCCertificate::Create( + std::unique_ptr(new FakeSSLIdentity("id2")))); } // Create a video StreamParamsVec object with: @@ -481,18 +574,6 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { return video_streams; } - bool CompareCryptoParams(const CryptoParamsVec& c1, - const CryptoParamsVec& c2) { - if (c1.size() != c2.size()) - return false; - for (size_t i = 0; i < c1.size(); ++i) - if (c1[i].tag != c2[i].tag || c1[i].crypto_suite != c2[i].crypto_suite || - c1[i].key_params != c2[i].key_params || - c1[i].session_params != c2[i].session_params) - return false; - return true; - } - // Returns true if the transport info contains "renomination" as an // ICE option. bool GetIceRenomination(const TransportInfo* transport_info) { @@ -523,23 +604,24 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { "data", TransportDescription(current_data_ufrag, current_data_pwd))); } if (offer) { - desc = f1_.CreateOffer(options, current_desc.get()); + desc = f1_.CreateOfferOrError(options, current_desc.get()).MoveValue(); } else { - std::unique_ptr offer; - offer = f1_.CreateOffer(options, NULL); - desc = f1_.CreateAnswer(offer.get(), options, current_desc.get()); + std::unique_ptr offer_desc; + offer_desc = f1_.CreateOfferOrError(options, nullptr).MoveValue(); + desc = + f1_.CreateAnswerOrError(offer_desc.get(), options, current_desc.get()) + .MoveValue(); } - ASSERT_TRUE(desc.get() != NULL); + ASSERT_TRUE(desc); const TransportInfo* ti_audio = desc->GetTransportInfoByName("audio"); if (options.has_audio()) { - EXPECT_TRUE(ti_audio != NULL); if (has_current_desc) { EXPECT_EQ(current_audio_ufrag, ti_audio->description.ice_ufrag); EXPECT_EQ(current_audio_pwd, ti_audio->description.ice_pwd); } else { - EXPECT_EQ(static_cast(cricket::ICE_UFRAG_LENGTH), + EXPECT_EQ(static_cast(ICE_UFRAG_LENGTH), ti_audio->description.ice_ufrag.size()); - EXPECT_EQ(static_cast(cricket::ICE_PWD_LENGTH), + EXPECT_EQ(static_cast(ICE_PWD_LENGTH), ti_audio->description.ice_pwd.size()); } auto media_desc_options_it = @@ -547,12 +629,9 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { EXPECT_EQ( media_desc_options_it->transport_options.enable_ice_renomination, GetIceRenomination(ti_audio)); - } else { - EXPECT_TRUE(ti_audio == NULL); } const TransportInfo* ti_video = desc->GetTransportInfoByName("video"); if (options.has_video()) { - EXPECT_TRUE(ti_video != NULL); auto media_desc_options_it = FindFirstMediaDescriptionByMid("video", options); if (options.bundle_enabled) { @@ -564,21 +643,18 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { EXPECT_EQ(current_video_ufrag, ti_video->description.ice_ufrag); EXPECT_EQ(current_video_pwd, ti_video->description.ice_pwd); } else { - EXPECT_EQ(static_cast(cricket::ICE_UFRAG_LENGTH), + EXPECT_EQ(static_cast(ICE_UFRAG_LENGTH), ti_video->description.ice_ufrag.size()); - EXPECT_EQ(static_cast(cricket::ICE_PWD_LENGTH), + EXPECT_EQ(static_cast(ICE_PWD_LENGTH), ti_video->description.ice_pwd.size()); } } EXPECT_EQ( media_desc_options_it->transport_options.enable_ice_renomination, GetIceRenomination(ti_video)); - } else { - EXPECT_TRUE(ti_video == NULL); } const TransportInfo* ti_data = desc->GetTransportInfoByName("data"); if (options.has_data()) { - EXPECT_TRUE(ti_data != NULL); if (options.bundle_enabled) { EXPECT_EQ(ti_audio->description.ice_ufrag, ti_data->description.ice_ufrag); @@ -588,9 +664,9 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { EXPECT_EQ(current_data_ufrag, ti_data->description.ice_ufrag); EXPECT_EQ(current_data_pwd, ti_data->description.ice_pwd); } else { - EXPECT_EQ(static_cast(cricket::ICE_UFRAG_LENGTH), + EXPECT_EQ(static_cast(ICE_UFRAG_LENGTH), ti_data->description.ice_ufrag.size()); - EXPECT_EQ(static_cast(cricket::ICE_PWD_LENGTH), + EXPECT_EQ(static_cast(ICE_PWD_LENGTH), ti_data->description.ice_pwd.size()); } } @@ -599,54 +675,7 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { EXPECT_EQ( media_desc_options_it->transport_options.enable_ice_renomination, GetIceRenomination(ti_data)); - - } else { - EXPECT_TRUE(ti_data == NULL); - } - } - - void TestCryptoWithBundle(bool offer) { - f1_.set_secure(SEC_ENABLED); - MediaSessionOptions options; - AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options); - std::unique_ptr ref_desc; - std::unique_ptr desc; - if (offer) { - options.bundle_enabled = false; - ref_desc = f1_.CreateOffer(options, NULL); - options.bundle_enabled = true; - desc = f1_.CreateOffer(options, ref_desc.get()); - } else { - options.bundle_enabled = true; - ref_desc = f1_.CreateOffer(options, NULL); - desc = f1_.CreateAnswer(ref_desc.get(), options, NULL); - } - ASSERT_TRUE(desc); - const cricket::MediaContentDescription* audio_media_desc = - desc->GetContentDescriptionByName("audio"); - ASSERT_TRUE(audio_media_desc); - const cricket::MediaContentDescription* video_media_desc = - desc->GetContentDescriptionByName("video"); - ASSERT_TRUE(video_media_desc); - EXPECT_TRUE(CompareCryptoParams(audio_media_desc->cryptos(), - video_media_desc->cryptos())); - EXPECT_EQ(1u, audio_media_desc->cryptos().size()); - EXPECT_EQ(kDefaultSrtpCryptoSuite, - audio_media_desc->cryptos()[0].crypto_suite); - - // Verify the selected crypto is one from the reference audio - // media content. - const cricket::MediaContentDescription* ref_audio_media_desc = - ref_desc->GetContentDescriptionByName("audio"); - bool found = false; - for (size_t i = 0; i < ref_audio_media_desc->cryptos().size(); ++i) { - if (ref_audio_media_desc->cryptos()[i].Matches( - audio_media_desc->cryptos()[0])) { - found = true; - break; - } } - EXPECT_TRUE(found); } // This test that the audio and video media direction is set to @@ -659,17 +688,17 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { AddAudioVideoSections(direction_in_offer, &offer_opts); std::unique_ptr offer = - f1_.CreateOffer(offer_opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); ContentInfo* ac_offer = offer->GetContentByName("audio"); - ASSERT_TRUE(ac_offer != NULL); + ASSERT_TRUE(ac_offer); ContentInfo* vc_offer = offer->GetContentByName("video"); - ASSERT_TRUE(vc_offer != NULL); + ASSERT_TRUE(vc_offer); MediaSessionOptions answer_opts; AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &answer_opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), answer_opts, NULL); + f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue(); const AudioContentDescription* acd_answer = GetFirstAudioContentDescription(answer.get()); EXPECT_EQ(expected_direction_in_answer, acd_answer->direction()); @@ -678,13 +707,10 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { EXPECT_EQ(expected_direction_in_answer, vcd_answer->direction()); } - bool VerifyNoCNCodecs(const cricket::ContentInfo* content) { + bool VerifyNoCNCodecs(const ContentInfo* content) { RTC_DCHECK(content); RTC_CHECK(content->media_description()); - const cricket::AudioContentDescription* audio_desc = - content->media_description()->as_audio(); - RTC_CHECK(audio_desc); - for (const cricket::AudioCodec& codec : audio_desc->codecs()) { + for (const Codec& codec : content->media_description()->codecs()) { if (codec.name == "CN") { return false; } @@ -692,104 +718,55 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { return true; } - void TestVideoGcmCipher(bool gcm_offer, bool gcm_answer) { - MediaSessionOptions offer_opts; - AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &offer_opts); - offer_opts.crypto_options.srtp.enable_gcm_crypto_suites = gcm_offer; - - MediaSessionOptions answer_opts; - AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &answer_opts); - answer_opts.crypto_options.srtp.enable_gcm_crypto_suites = gcm_answer; - - f1_.set_secure(SEC_ENABLED); - f2_.set_secure(SEC_ENABLED); - std::unique_ptr offer = - f1_.CreateOffer(offer_opts, NULL); - ASSERT_TRUE(offer.get() != NULL); - if (gcm_offer && gcm_answer) { - for (cricket::ContentInfo& content : offer->contents()) { - auto cryptos = content.media_description()->cryptos(); - PreferGcmCryptoParameters(&cryptos); - content.media_description()->set_cryptos(cryptos); - } - } - std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), answer_opts, NULL); - const ContentInfo* ac = answer->GetContentByName("audio"); - const ContentInfo* vc = answer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc != NULL); - EXPECT_EQ(MediaProtocolType::kRtp, ac->type); - EXPECT_EQ(MediaProtocolType::kRtp, vc->type); - const AudioContentDescription* acd = ac->media_description()->as_audio(); - const VideoContentDescription* vcd = vc->media_description()->as_video(); - EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type()); - EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer)); - EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // negotiated auto bw - EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attached - EXPECT_TRUE(acd->rtcp_mux()); // negotiated rtcp-mux - if (gcm_offer && gcm_answer) { - ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuiteGcm); - } else { - ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite); - } - EXPECT_EQ(MEDIA_TYPE_VIDEO, vcd->type()); - EXPECT_THAT(vcd->codecs(), ElementsAreArray(kVideoCodecsAnswer)); - EXPECT_EQ(0U, vcd->first_ssrc()); // no sender is attached - EXPECT_TRUE(vcd->rtcp_mux()); // negotiated rtcp-mux - if (gcm_offer && gcm_answer) { - ASSERT_CRYPTO(vcd, 1U, kDefaultSrtpCryptoSuiteGcm); - } else { - ASSERT_CRYPTO(vcd, 1U, kDefaultSrtpCryptoSuite); - } - EXPECT_EQ(cricket::kMediaProtocolSavpf, vcd->protocol()); - } - void TestTransportSequenceNumberNegotiation( - const cricket::RtpHeaderExtensions& local, - const cricket::RtpHeaderExtensions& offered, - const cricket::RtpHeaderExtensions& expectedAnswer) { + const RtpHeaderExtensions& local, + const RtpHeaderExtensions& offered, + const RtpHeaderExtensions& expectedAnswer) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); SetAudioVideoRtpHeaderExtensions(offered, offered, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); SetAudioVideoRtpHeaderExtensions(local, local, &opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); - EXPECT_EQ( + EXPECT_THAT( expectedAnswer, - GetFirstAudioContentDescription(answer.get())->rtp_header_extensions()); - EXPECT_EQ( + UnorderedElementsAreArray(GetFirstAudioContentDescription(answer.get()) + ->rtp_header_extensions())); + EXPECT_THAT( expectedAnswer, - GetFirstVideoContentDescription(answer.get())->rtp_header_extensions()); + UnorderedElementsAreArray(GetFirstVideoContentDescription(answer.get()) + ->rtp_header_extensions())); } - std::vector - HeaderExtensionCapabilitiesFromRtpExtensions( - cricket::RtpHeaderExtensions extensions) { - std::vector capabilities; + std::vector + HeaderExtensionCapabilitiesFromRtpExtensions(RtpHeaderExtensions extensions) { + std::vector capabilities; for (const auto& extension : extensions) { - webrtc::RtpHeaderExtensionCapability capability( - extension.uri, extension.id, - webrtc::RtpTransceiverDirection::kSendRecv); + RtpHeaderExtensionCapability capability( + extension.uri, extension.id, extension.encrypt, + RtpTransceiverDirection::kSendRecv); capabilities.push_back(capability); } return capabilities; } - void SetAudioVideoRtpHeaderExtensions(cricket::RtpHeaderExtensions audio_exts, - cricket::RtpHeaderExtensions video_exts, + void SetAudioVideoRtpHeaderExtensions(RtpHeaderExtensions audio_exts, + RtpHeaderExtensions video_exts, MediaSessionOptions* opts) { - auto audio_caps = HeaderExtensionCapabilitiesFromRtpExtensions(audio_exts); - auto video_caps = HeaderExtensionCapabilitiesFromRtpExtensions(video_exts); + std::vector audio_caps = + HeaderExtensionCapabilitiesFromRtpExtensions(audio_exts); + std::vector video_caps = + HeaderExtensionCapabilitiesFromRtpExtensions(video_exts); for (auto& entry : opts->media_description_options) { switch (entry.type) { - case MEDIA_TYPE_AUDIO: + case webrtc::MediaType::AUDIO: entry.header_extensions = audio_caps; break; - case MEDIA_TYPE_VIDEO: + case webrtc::MediaType::VIDEO: entry.header_extensions = video_caps; break; default: @@ -799,85 +776,243 @@ class MediaSessionDescriptionFactoryTest : public ::testing::Test { } protected: - webrtc::test::ScopedKeyValueConfig field_trials; + ScopedKeyValueConfig field_trials; UniqueRandomIdGenerator ssrc_generator1; UniqueRandomIdGenerator ssrc_generator2; TransportDescriptionFactory tdf1_; TransportDescriptionFactory tdf2_; + CodecLookupHelperForTesting codec_lookup_helper_1_; + CodecLookupHelperForTesting codec_lookup_helper_2_; MediaSessionDescriptionFactory f1_; MediaSessionDescriptionFactory f2_; }; // Create a typical audio offer, and ensure it matches what we expect. TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioOffer) { - f1_.set_secure(SEC_ENABLED); std::unique_ptr offer = - f1_.CreateOffer(CreatePlanBMediaSessionOptions(), NULL); - ASSERT_TRUE(offer.get() != NULL); + f1_.CreateOfferOrError(CreateAudioMediaSession(), nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); const ContentInfo* ac = offer->GetContentByName("audio"); const ContentInfo* vc = offer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc == NULL); + ASSERT_TRUE(ac); + EXPECT_FALSE(vc); EXPECT_EQ(MediaProtocolType::kRtp, ac->type); - const AudioContentDescription* acd = ac->media_description()->as_audio(); - EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type()); - EXPECT_EQ(f1_.audio_sendrecv_codecs(), acd->codecs()); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_EQ(webrtc::MediaType::AUDIO, acd->type()); + EXPECT_THAT(codec_lookup_helper_1_.GetCodecVendor()->audio_sendrecv_codecs(), + ElementsAreArray(acd->codecs())); EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attached. - EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // default bandwidth (auto) + EXPECT_EQ(kAutoBandwidth, + acd->bandwidth()); // default bandwidth (auto) EXPECT_TRUE(acd->rtcp_mux()); // rtcp-mux defaults on - ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite); - EXPECT_EQ(cricket::kMediaProtocolSavpf, acd->protocol()); + EXPECT_EQ(kMediaProtocolDtlsSavpf, acd->protocol()); +} + +// Create an offer with just Opus and RED. +TEST_F(MediaSessionDescriptionFactoryTest, + TestCreateAudioOfferWithJustOpusAndRed) { + // First, prefer to only use opus and red. + std::vector preferences; + preferences.push_back(webrtc::ToRtpCodecCapability( + codec_lookup_helper_1_.GetCodecVendor()->audio_sendrecv_codecs()[0])); + preferences.push_back(webrtc::ToRtpCodecCapability( + codec_lookup_helper_1_.GetCodecVendor()->audio_sendrecv_codecs()[1])); + EXPECT_EQ("opus", preferences[0].name); + EXPECT_EQ("red", preferences[1].name); + + auto opts = CreateAudioMediaSession(); + opts.media_description_options.at(0).codec_preferences = preferences; + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* ac = offer->GetContentByName("audio"); + const ContentInfo* vc = offer->GetContentByName("video"); + ASSERT_TRUE(ac != NULL); + ASSERT_TRUE(vc == NULL); + EXPECT_EQ(MediaProtocolType::kRtp, ac->type); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_EQ(webrtc::MediaType::AUDIO, acd->type()); + EXPECT_EQ(2U, acd->codecs().size()); + EXPECT_EQ("opus", acd->codecs()[0].name); + EXPECT_EQ("red", acd->codecs()[1].name); +} + +// Create an offer with RED before Opus, which enables RED with Opus encoding. +TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioOfferWithRedForOpus) { + // First, prefer to only use opus and red. + std::vector preferences; + preferences.push_back(webrtc::ToRtpCodecCapability( + codec_lookup_helper_1_.GetCodecVendor()->audio_sendrecv_codecs()[1])); + preferences.push_back(webrtc::ToRtpCodecCapability( + codec_lookup_helper_1_.GetCodecVendor()->audio_sendrecv_codecs()[0])); + EXPECT_EQ("red", preferences[0].name); + EXPECT_EQ("opus", preferences[1].name); + + auto opts = CreateAudioMediaSession(); + opts.media_description_options.at(0).codec_preferences = preferences; + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* ac = offer->GetContentByName("audio"); + const ContentInfo* vc = offer->GetContentByName("video"); + ASSERT_TRUE(ac != NULL); + ASSERT_TRUE(vc == NULL); + EXPECT_EQ(MediaProtocolType::kRtp, ac->type); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_EQ(webrtc::MediaType::AUDIO, acd->type()); + EXPECT_EQ(2U, acd->codecs().size()); + EXPECT_EQ("red", acd->codecs()[0].name); + EXPECT_EQ("opus", acd->codecs()[1].name); } // Create a typical video offer, and ensure it matches what we expect. TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoOffer) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - f1_.set_secure(SEC_ENABLED); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); const ContentInfo* ac = offer->GetContentByName("audio"); const ContentInfo* vc = offer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc != NULL); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); EXPECT_EQ(MediaProtocolType::kRtp, ac->type); EXPECT_EQ(MediaProtocolType::kRtp, vc->type); - const AudioContentDescription* acd = ac->media_description()->as_audio(); - const VideoContentDescription* vcd = vc->media_description()->as_video(); - EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type()); - EXPECT_EQ(f1_.audio_sendrecv_codecs(), acd->codecs()); + const MediaContentDescription* acd = ac->media_description(); + const MediaContentDescription* vcd = vc->media_description(); + EXPECT_EQ(webrtc::MediaType::AUDIO, acd->type()); + EXPECT_EQ( + codec_lookup_helper_1_.GetCodecVendor()->audio_sendrecv_codecs().codecs(), + acd->codecs()); EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attached - EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // default bandwidth (auto) + EXPECT_EQ(kAutoBandwidth, + acd->bandwidth()); // default bandwidth (auto) EXPECT_TRUE(acd->rtcp_mux()); // rtcp-mux defaults on - ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite); - EXPECT_EQ(cricket::kMediaProtocolSavpf, acd->protocol()); - EXPECT_EQ(MEDIA_TYPE_VIDEO, vcd->type()); - EXPECT_EQ(f1_.video_sendrecv_codecs(), vcd->codecs()); + EXPECT_EQ(kMediaProtocolDtlsSavpf, acd->protocol()); + EXPECT_EQ(webrtc::MediaType::VIDEO, vcd->type()); + EXPECT_EQ( + codec_lookup_helper_1_.GetCodecVendor()->video_sendrecv_codecs().codecs(), + vcd->codecs()); EXPECT_EQ(0U, vcd->first_ssrc()); // no sender is attached - EXPECT_EQ(kAutoBandwidth, vcd->bandwidth()); // default bandwidth (auto) + EXPECT_EQ(kAutoBandwidth, + vcd->bandwidth()); // default bandwidth (auto) EXPECT_TRUE(vcd->rtcp_mux()); // rtcp-mux defaults on - ASSERT_CRYPTO(vcd, 1U, kDefaultSrtpCryptoSuite); - EXPECT_EQ(cricket::kMediaProtocolSavpf, vcd->protocol()); + EXPECT_EQ(kMediaProtocolDtlsSavpf, vcd->protocol()); +} + +TEST_F(MediaSessionDescriptionFactoryTest, TestCreateOfferWithCustomCodecs) { + MediaSessionOptions opts; + + SdpAudioFormat audio_format("custom-audio", 8000, 2); + Codec custom_audio_codec = CreateAudioCodec(audio_format); + custom_audio_codec.id = 123; // picked at random, but valid + auto audio_options = + MediaDescriptionOptions(webrtc::MediaType::AUDIO, "0", + RtpTransceiverDirection::kSendRecv, kActive); + audio_options.codecs_to_include.push_back(custom_audio_codec); + opts.media_description_options.push_back(audio_options); + + Codec custom_video_codec = CreateVideoCodec("custom-video"); + custom_video_codec.id = 124; // picked at random, but valid + auto video_options = + MediaDescriptionOptions(webrtc::MediaType::VIDEO, "1", + RtpTransceiverDirection::kSendRecv, kActive); + video_options.codecs_to_include.push_back(custom_video_codec); + opts.media_description_options.push_back(video_options); + + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* ac = offer->GetContentByName("0"); + const ContentInfo* vc = offer->GetContentByName("1"); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); + EXPECT_EQ(MediaProtocolType::kRtp, ac->type); + EXPECT_EQ(MediaProtocolType::kRtp, vc->type); + const MediaContentDescription* acd = ac->media_description(); + const MediaContentDescription* vcd = vc->media_description(); + EXPECT_EQ(webrtc::MediaType::AUDIO, acd->type()); + ASSERT_EQ(acd->codecs().size(), 1U); + // Fields in codec are set during the gen process, so simple compare + // does not work. + EXPECT_EQ(acd->codecs()[0].name, custom_audio_codec.name); + + EXPECT_EQ(webrtc::MediaType::VIDEO, vcd->type()); + ASSERT_EQ(vcd->codecs().size(), 1U); + EXPECT_EQ(vcd->codecs()[0].name, custom_video_codec.name); +} + +TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAnswerWithCustomCodecs) { + MediaSessionOptions offer_opts; + MediaSessionOptions answer_opts; + + AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &offer_opts); + // Create custom codecs and add to answer. These will override + // the normally generated codec list in the answer. + // This breaks O/A rules - the responsibility for obeying those is + // on the caller, not on this function. + SdpAudioFormat audio_format("custom-audio", 8000, 2); + Codec custom_audio_codec = CreateAudioCodec(audio_format); + custom_audio_codec.id = 123; // picked at random, but valid + auto audio_options = + MediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", + RtpTransceiverDirection::kSendRecv, kActive); + audio_options.codecs_to_include.push_back(custom_audio_codec); + answer_opts.media_description_options.push_back(audio_options); + + Codec custom_video_codec = CreateVideoCodec("custom-video"); + custom_video_codec.id = 124; + auto video_options = + MediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive); + video_options.codecs_to_include.push_back(custom_video_codec); + answer_opts.media_description_options.push_back(video_options); + + std::unique_ptr offer = + f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + std::unique_ptr answer = + f1_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue(); + const ContentInfo* ac = answer->GetContentByName("audio"); + const ContentInfo* vc = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); + EXPECT_EQ(MediaProtocolType::kRtp, ac->type); + EXPECT_EQ(MediaProtocolType::kRtp, vc->type); + const MediaContentDescription* acd = ac->media_description(); + const MediaContentDescription* vcd = vc->media_description(); + EXPECT_EQ(webrtc::MediaType::AUDIO, acd->type()); + ASSERT_EQ(acd->codecs().size(), 1U); + // Fields in codec are set during the gen process, so simple compare + // does not work. + EXPECT_EQ(acd->codecs()[0].name, custom_audio_codec.name); + + EXPECT_EQ(webrtc::MediaType::VIDEO, vcd->type()); + ASSERT_EQ(vcd->codecs().size(), 1U); + EXPECT_EQ(vcd->codecs()[0].name, custom_video_codec.name); } // Test creating an offer with bundle where the Codecs have the same dynamic -// RTP playlod type. The test verifies that the offer don't contain the +// RTP paylod type. The test verifies that the offer don't contain the // duplicate RTP payload types. TEST_F(MediaSessionDescriptionFactoryTest, TestBundleOfferWithSameCodecPlType) { - const VideoCodec& offered_video_codec = f2_.video_sendrecv_codecs()[0]; - const AudioCodec& offered_audio_codec = f2_.audio_sendrecv_codecs()[0]; + Codec offered_video_codec = + codec_lookup_helper_2_.GetCodecVendor()->video_sendrecv_codecs()[0]; + Codec offered_audio_codec = + codec_lookup_helper_2_.GetCodecVendor()->audio_sendrecv_codecs()[0]; ASSERT_EQ(offered_video_codec.id, offered_audio_codec.id); MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); opts.bundle_enabled = true; - std::unique_ptr offer = f2_.CreateOffer(opts, NULL); + std::unique_ptr offer = + f2_.CreateOfferOrError(opts, nullptr).MoveValue(); const VideoContentDescription* vcd = GetFirstVideoContentDescription(offer.get()); const AudioContentDescription* acd = GetFirstAudioContentDescription(offer.get()); - ASSERT_TRUE(NULL != vcd); - ASSERT_TRUE(NULL != acd); + ASSERT_TRUE(vcd); + ASSERT_TRUE(acd); EXPECT_NE(vcd->codecs()[0].id, acd->codecs()[0].id); EXPECT_EQ(vcd->codecs()[0].name, offered_video_codec.name); EXPECT_EQ(acd->codecs()[0].name, offered_audio_codec.name); @@ -887,37 +1022,34 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestBundleOfferWithSameCodecPlType) { // after an audio only session has been negotiated. TEST_F(MediaSessionDescriptionFactoryTest, TestCreateUpdatedVideoOfferWithBundle) { - f1_.set_secure(SEC_ENABLED); - f2_.set_secure(SEC_ENABLED); MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kRecvOnly, kActive, &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kInactive, kStopped, &opts); opts.bundle_enabled = true; - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); MediaSessionOptions updated_opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &updated_opts); updated_opts.bundle_enabled = true; std::unique_ptr updated_offer( - f1_.CreateOffer(updated_opts, answer.get())); + f1_.CreateOfferOrError(updated_opts, answer.get()).MoveValue()); const AudioContentDescription* acd = GetFirstAudioContentDescription(updated_offer.get()); const VideoContentDescription* vcd = GetFirstVideoContentDescription(updated_offer.get()); - EXPECT_TRUE(NULL != vcd); - EXPECT_TRUE(NULL != acd); + EXPECT_TRUE(vcd); + EXPECT_TRUE(acd); - ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite); - EXPECT_EQ(cricket::kMediaProtocolSavpf, acd->protocol()); - ASSERT_CRYPTO(vcd, 1U, kDefaultSrtpCryptoSuite); - EXPECT_EQ(cricket::kMediaProtocolSavpf, vcd->protocol()); + EXPECT_EQ(kMediaProtocolDtlsSavpf, acd->protocol()); + EXPECT_EQ(kMediaProtocolDtlsSavpf, vcd->protocol()); } // Create an SCTP data offer with bundle without error. @@ -925,14 +1057,14 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateSctpDataOffer) { MediaSessionOptions opts; opts.bundle_enabled = true; AddDataSection(RtpTransceiverDirection::kSendRecv, &opts); - f1_.set_secure(SEC_ENABLED); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - EXPECT_TRUE(offer.get() != NULL); - EXPECT_TRUE(offer->GetContentByName("data") != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + EXPECT_TRUE(offer.get()); + EXPECT_TRUE(offer->GetContentByName("data")); auto dcd = GetFirstSctpDataContentDescription(offer.get()); ASSERT_TRUE(dcd); // Since this transport is insecure, the protocol should be "SCTP". - EXPECT_EQ(cricket::kMediaProtocolSctp, dcd->protocol()); + EXPECT_EQ(kMediaProtocolUdpDtlsSctp, dcd->protocol()); } // Create an SCTP data offer with bundle without error. @@ -940,15 +1072,14 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateSecureSctpDataOffer) { MediaSessionOptions opts; opts.bundle_enabled = true; AddDataSection(RtpTransceiverDirection::kSendRecv, &opts); - f1_.set_secure(SEC_ENABLED); - tdf1_.set_secure(SEC_ENABLED); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - EXPECT_TRUE(offer.get() != NULL); - EXPECT_TRUE(offer->GetContentByName("data") != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + EXPECT_TRUE(offer.get()); + EXPECT_TRUE(offer->GetContentByName("data")); auto dcd = GetFirstSctpDataContentDescription(offer.get()); ASSERT_TRUE(dcd); // The protocol should now be "UDP/DTLS/SCTP" - EXPECT_EQ(cricket::kMediaProtocolUdpDtlsSctp, dcd->protocol()); + EXPECT_EQ(kMediaProtocolUdpDtlsSctp, dcd->protocol()); } // Test creating an sctp data channel from an already generated offer. @@ -956,18 +1087,18 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateImplicitSctpDataOffer) { MediaSessionOptions opts; opts.bundle_enabled = true; AddDataSection(RtpTransceiverDirection::kSendRecv, &opts); - f1_.set_secure(SEC_ENABLED); - std::unique_ptr offer1(f1_.CreateOffer(opts, NULL)); - ASSERT_TRUE(offer1.get() != NULL); + std::unique_ptr offer1( + f1_.CreateOfferOrError(opts, nullptr).MoveValue()); + ASSERT_TRUE(offer1.get()); const ContentInfo* data = offer1->GetContentByName("data"); - ASSERT_TRUE(data != NULL); - ASSERT_EQ(cricket::kMediaProtocolSctp, data->media_description()->protocol()); + ASSERT_TRUE(data); + ASSERT_EQ(kMediaProtocolUdpDtlsSctp, data->media_description()->protocol()); std::unique_ptr offer2( - f1_.CreateOffer(opts, offer1.get())); + f1_.CreateOfferOrError(opts, offer1.get()).MoveValue()); data = offer2->GetContentByName("data"); - ASSERT_TRUE(data != NULL); - EXPECT_EQ(cricket::kMediaProtocolSctp, data->media_description()->protocol()); + ASSERT_TRUE(data); + EXPECT_EQ(kMediaProtocolUdpDtlsSctp, data->media_description()->protocol()); } // Test that if BUNDLE is enabled and all media sections are rejected then the @@ -975,16 +1106,17 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateImplicitSctpDataOffer) { TEST_F(MediaSessionDescriptionFactoryTest, ReOfferNoBundleGroupIfAllRejected) { MediaSessionOptions opts; opts.bundle_enabled = true; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); opts.media_description_options[0].stopped = true; std::unique_ptr reoffer = - f1_.CreateOffer(opts, offer.get()); + f1_.CreateOfferOrError(opts, offer.get()).MoveValue(); - EXPECT_FALSE(reoffer->GetGroupByName(cricket::GROUP_TYPE_BUNDLE)); + EXPECT_FALSE(reoffer->GetGroupByName(GROUP_TYPE_BUNDLE)); } // Test that if BUNDLE is enabled and the remote re-offer does not include a @@ -993,20 +1125,21 @@ TEST_F(MediaSessionDescriptionFactoryTest, ReOfferNoBundleGroupIfAllRejected) { TEST_F(MediaSessionDescriptionFactoryTest, ReAnswerNoBundleGroupIfAllRejected) { MediaSessionOptions opts; opts.bundle_enabled = true; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); opts.media_description_options[0].stopped = true; std::unique_ptr reoffer = - f1_.CreateOffer(opts, offer.get()); + f1_.CreateOfferOrError(opts, offer.get()).MoveValue(); std::unique_ptr reanswer = - f2_.CreateAnswer(reoffer.get(), opts, answer.get()); + f2_.CreateAnswerOrError(reoffer.get(), opts, answer.get()).MoveValue(); - EXPECT_FALSE(reanswer->GetGroupByName(cricket::GROUP_TYPE_BUNDLE)); + EXPECT_FALSE(reanswer->GetGroupByName(GROUP_TYPE_BUNDLE)); } // Test that if BUNDLE is enabled and the previous offerer-tagged media section @@ -1015,21 +1148,21 @@ TEST_F(MediaSessionDescriptionFactoryTest, ReAnswerNoBundleGroupIfAllRejected) { TEST_F(MediaSessionDescriptionFactoryTest, ReOfferChangeBundleOffererTagged) { MediaSessionOptions opts; opts.bundle_enabled = true; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); // Reject the audio m= section and add a video m= section. opts.media_description_options[0].stopped = true; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &opts); std::unique_ptr reoffer = - f1_.CreateOffer(opts, offer.get()); + f1_.CreateOfferOrError(opts, offer.get()).MoveValue(); - const cricket::ContentGroup* bundle_group = - reoffer->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + const ContentGroup* bundle_group = reoffer->GetGroupByName(GROUP_TYPE_BUNDLE); ASSERT_TRUE(bundle_group); EXPECT_FALSE(bundle_group->HasContentName("audio")); EXPECT_TRUE(bundle_group->HasContentName("video")); @@ -1041,25 +1174,26 @@ TEST_F(MediaSessionDescriptionFactoryTest, ReOfferChangeBundleOffererTagged) { TEST_F(MediaSessionDescriptionFactoryTest, ReAnswerChangedBundleOffererTagged) { MediaSessionOptions opts; opts.bundle_enabled = true; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); // Reject the audio m= section and add a video m= section. opts.media_description_options[0].stopped = true; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &opts); std::unique_ptr reoffer = - f1_.CreateOffer(opts, offer.get()); + f1_.CreateOfferOrError(opts, offer.get()).MoveValue(); std::unique_ptr reanswer = - f2_.CreateAnswer(reoffer.get(), opts, answer.get()); + f2_.CreateAnswerOrError(reoffer.get(), opts, answer.get()).MoveValue(); - const cricket::ContentGroup* bundle_group = - reanswer->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + const ContentGroup* bundle_group = + reanswer->GetGroupByName(GROUP_TYPE_BUNDLE); ASSERT_TRUE(bundle_group); EXPECT_FALSE(bundle_group->HasContentName("audio")); EXPECT_TRUE(bundle_group->HasContentName("video")); @@ -1070,28 +1204,29 @@ TEST_F(MediaSessionDescriptionFactoryTest, // Create an offer with 4 m= sections, initially without BUNDLE groups. MediaSessionOptions opts; opts.bundle_enabled = false; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "1", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "1", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "2", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "2", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "3", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "3", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "4", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "4", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); ASSERT_TRUE(offer->groups().empty()); // Munge the offer to have two groups. Offers like these cannot be generated // without munging, but it is valid to receive such offers from remote // endpoints. - cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group1(GROUP_TYPE_BUNDLE); bundle_group1.AddContentName("1"); bundle_group1.AddContentName("2"); - cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group2(GROUP_TYPE_BUNDLE); bundle_group2.AddContentName("3"); bundle_group2.AddContentName("4"); offer->AddGroup(bundle_group1); @@ -1101,10 +1236,10 @@ TEST_F(MediaSessionDescriptionFactoryTest, // groups. opts.bundle_enabled = true; std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); - std::vector answer_groups = - answer->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); + std::vector answer_groups = + answer->GetGroupsByName(GROUP_TYPE_BUNDLE); ASSERT_EQ(answer_groups.size(), 2u); EXPECT_EQ(answer_groups[0]->content_names().size(), 2u); EXPECT_TRUE(answer_groups[0]->HasContentName("1")); @@ -1116,9 +1251,9 @@ TEST_F(MediaSessionDescriptionFactoryTest, // If BUNDLE is disabled, the answer to this offer should reject both BUNDLE // groups. opts.bundle_enabled = false; - answer = f2_.CreateAnswer(offer.get(), opts, nullptr); + answer = f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); - answer_groups = answer->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); + answer_groups = answer->GetGroupsByName(GROUP_TYPE_BUNDLE); // Rejected groups are still listed, but they are empty. ASSERT_EQ(answer_groups.size(), 2u); EXPECT_TRUE(answer_groups[0]->content_names().empty()); @@ -1134,14 +1269,15 @@ TEST_F(MediaSessionDescriptionFactoryTest, MediaSessionOptions opts; opts.bundle_enabled = true; AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); // Reject the audio m= section. opts.media_description_options[0].stopped = true; std::unique_ptr reoffer = - f1_.CreateOffer(opts, offer.get()); + f1_.CreateOfferOrError(opts, offer.get()).MoveValue(); const TransportDescription* offer_tagged = offer->GetTransportDescriptionByName("audio"); @@ -1162,16 +1298,17 @@ TEST_F(MediaSessionDescriptionFactoryTest, MediaSessionOptions opts; opts.bundle_enabled = true; AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); // Reject the audio m= section. opts.media_description_options[0].stopped = true; std::unique_ptr reoffer = - f1_.CreateOffer(opts, offer.get()); + f1_.CreateOfferOrError(opts, offer.get()).MoveValue(); std::unique_ptr reanswer = - f2_.CreateAnswer(reoffer.get(), opts, answer.get()); + f2_.CreateAnswerOrError(reoffer.get(), opts, answer.get()).MoveValue(); const TransportDescription* answer_tagged = answer->GetTransportDescriptionByName("audio"); @@ -1188,14 +1325,15 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateOfferWithoutLegacyStreams) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); const ContentInfo* ac = offer->GetContentByName("audio"); const ContentInfo* vc = offer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc != NULL); - const AudioContentDescription* acd = ac->media_description()->as_audio(); - const VideoContentDescription* vcd = vc->media_description()->as_video(); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); + const MediaContentDescription* acd = ac->media_description(); + const MediaContentDescription* vcd = vc->media_description(); EXPECT_FALSE(vcd->has_ssrcs()); // No StreamParams. EXPECT_FALSE(acd->has_ssrcs()); // No StreamParams. @@ -1205,16 +1343,21 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, TestCreateSendOnlyOffer) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kSendOnly, &opts); - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1, - {kMediaStream1}, 1, &opts); - AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack1, - {kMediaStream1}, 1, &opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + AttachSenderToMediaDescriptionOptions("audio", webrtc::MediaType::AUDIO, + kAudioTrack1, {kMediaStream1}, 1, + &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); EXPECT_EQ(2u, offer->contents().size()); - EXPECT_TRUE(IsMediaContentOfType(&offer->contents()[0], MEDIA_TYPE_AUDIO)); - EXPECT_TRUE(IsMediaContentOfType(&offer->contents()[1], MEDIA_TYPE_VIDEO)); + EXPECT_TRUE( + IsMediaContentOfType(&offer->contents()[0], webrtc::MediaType::AUDIO)); + EXPECT_TRUE( + IsMediaContentOfType(&offer->contents()[1], webrtc::MediaType::VIDEO)); EXPECT_EQ(RtpTransceiverDirection::kSendOnly, GetMediaDirection(&offer->contents()[0])); @@ -1228,106 +1371,106 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateOfferContentOrder) { MediaSessionOptions opts; AddDataSection(RtpTransceiverDirection::kSendRecv, &opts); - std::unique_ptr offer1(f1_.CreateOffer(opts, NULL)); - ASSERT_TRUE(offer1.get() != NULL); + std::unique_ptr offer1( + f1_.CreateOfferOrError(opts, nullptr).MoveValue()); + ASSERT_TRUE(offer1.get()); EXPECT_EQ(1u, offer1->contents().size()); - EXPECT_TRUE(IsMediaContentOfType(&offer1->contents()[0], MEDIA_TYPE_DATA)); + EXPECT_TRUE( + IsMediaContentOfType(&offer1->contents()[0], webrtc::MediaType::DATA)); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kRecvOnly, kActive, &opts); std::unique_ptr offer2( - f1_.CreateOffer(opts, offer1.get())); - ASSERT_TRUE(offer2.get() != NULL); + f1_.CreateOfferOrError(opts, offer1.get()).MoveValue()); + ASSERT_TRUE(offer2.get()); EXPECT_EQ(2u, offer2->contents().size()); - EXPECT_TRUE(IsMediaContentOfType(&offer2->contents()[0], MEDIA_TYPE_DATA)); - EXPECT_TRUE(IsMediaContentOfType(&offer2->contents()[1], MEDIA_TYPE_VIDEO)); + EXPECT_TRUE( + IsMediaContentOfType(&offer2->contents()[0], webrtc::MediaType::DATA)); + EXPECT_TRUE( + IsMediaContentOfType(&offer2->contents()[1], webrtc::MediaType::VIDEO)); - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kRecvOnly, kActive, &opts); std::unique_ptr offer3( - f1_.CreateOffer(opts, offer2.get())); - ASSERT_TRUE(offer3.get() != NULL); + f1_.CreateOfferOrError(opts, offer2.get()).MoveValue()); + ASSERT_TRUE(offer3.get()); EXPECT_EQ(3u, offer3->contents().size()); - EXPECT_TRUE(IsMediaContentOfType(&offer3->contents()[0], MEDIA_TYPE_DATA)); - EXPECT_TRUE(IsMediaContentOfType(&offer3->contents()[1], MEDIA_TYPE_VIDEO)); - EXPECT_TRUE(IsMediaContentOfType(&offer3->contents()[2], MEDIA_TYPE_AUDIO)); + EXPECT_TRUE( + IsMediaContentOfType(&offer3->contents()[0], webrtc::MediaType::DATA)); + EXPECT_TRUE( + IsMediaContentOfType(&offer3->contents()[1], webrtc::MediaType::VIDEO)); + EXPECT_TRUE( + IsMediaContentOfType(&offer3->contents()[2], webrtc::MediaType::AUDIO)); } // Create a typical audio answer, and ensure it matches what we expect. TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioAnswer) { - f1_.set_secure(SEC_ENABLED); - f2_.set_secure(SEC_ENABLED); std::unique_ptr offer = - f1_.CreateOffer(CreatePlanBMediaSessionOptions(), NULL); - ASSERT_TRUE(offer.get() != NULL); + f1_.CreateOfferOrError(CreateAudioMediaSession(), nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), CreatePlanBMediaSessionOptions(), NULL); + f2_.CreateAnswerOrError(offer.get(), CreateAudioMediaSession(), nullptr) + .MoveValue(); const ContentInfo* ac = answer->GetContentByName("audio"); const ContentInfo* vc = answer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc == NULL); + ASSERT_TRUE(ac); + EXPECT_FALSE(vc); EXPECT_EQ(MediaProtocolType::kRtp, ac->type); - const AudioContentDescription* acd = ac->media_description()->as_audio(); - EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type()); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_EQ(webrtc::MediaType::AUDIO, acd->type()); EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer)); EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attached EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // negotiated auto bw EXPECT_TRUE(acd->rtcp_mux()); // negotiated rtcp-mux - ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite); - EXPECT_EQ(cricket::kMediaProtocolSavpf, acd->protocol()); + EXPECT_EQ(kMediaProtocolDtlsSavpf, acd->protocol()); } // Create a typical audio answer with GCM ciphers enabled, and ensure it // matches what we expect. TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioAnswerGcm) { - f1_.set_secure(SEC_ENABLED); - f2_.set_secure(SEC_ENABLED); - MediaSessionOptions opts = CreatePlanBMediaSessionOptions(); - opts.crypto_options.srtp.enable_gcm_crypto_suites = true; - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); - for (cricket::ContentInfo& content : offer->contents()) { - auto cryptos = content.media_description()->cryptos(); - PreferGcmCryptoParameters(&cryptos); - content.media_description()->set_cryptos(cryptos); - } + MediaSessionOptions opts = CreateAudioMediaSession(); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const ContentInfo* ac = answer->GetContentByName("audio"); const ContentInfo* vc = answer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc == NULL); + ASSERT_TRUE(ac); + EXPECT_FALSE(vc); EXPECT_EQ(MediaProtocolType::kRtp, ac->type); - const AudioContentDescription* acd = ac->media_description()->as_audio(); - EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type()); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_EQ(webrtc::MediaType::AUDIO, acd->type()); EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer)); EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attached EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // negotiated auto bw EXPECT_TRUE(acd->rtcp_mux()); // negotiated rtcp-mux - ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuiteGcm); - EXPECT_EQ(cricket::kMediaProtocolSavpf, acd->protocol()); + EXPECT_EQ(kMediaProtocolDtlsSavpf, acd->protocol()); } // Create an audio answer with no common codecs, and ensure it is rejected. TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioAnswerWithNoCommonCodecs) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::vector f1_codecs = {cricket::CreateAudioCodec(96, "opus", 48000, 1)}; - f1_.set_audio_codecs(f1_codecs, f1_codecs); + std::vector f1_codecs = {CreateAudioCodec(96, "opus", 48000, 1)}; + codec_lookup_helper_1_.GetCodecVendor()->set_audio_codecs(f1_codecs, + f1_codecs); - std::vector f2_codecs = {cricket::CreateAudioCodec(0, "PCMU", 8000, 1)}; - f2_.set_audio_codecs(f2_codecs, f2_codecs); + std::vector f2_codecs = {CreateAudioCodec(0, "PCMU", 8000, 1)}; + codec_lookup_helper_2_.GetCodecVendor()->set_audio_codecs(f2_codecs, + f2_codecs); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const ContentInfo* ac = answer->GetContentByName("audio"); - ASSERT_TRUE(ac != NULL); + ASSERT_TRUE(ac); EXPECT_TRUE(ac->rejected); } @@ -1335,70 +1478,52 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswer) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - f1_.set_secure(SEC_ENABLED); - f2_.set_secure(SEC_ENABLED); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const ContentInfo* ac = answer->GetContentByName("audio"); const ContentInfo* vc = answer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc != NULL); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); EXPECT_EQ(MediaProtocolType::kRtp, ac->type); EXPECT_EQ(MediaProtocolType::kRtp, vc->type); - const AudioContentDescription* acd = ac->media_description()->as_audio(); - const VideoContentDescription* vcd = vc->media_description()->as_video(); - EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type()); + const MediaContentDescription* acd = ac->media_description(); + const MediaContentDescription* vcd = vc->media_description(); + EXPECT_EQ(webrtc::MediaType::AUDIO, acd->type()); EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer)); EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // negotiated auto bw EXPECT_EQ(0U, acd->first_ssrc()); // no sender is attached EXPECT_TRUE(acd->rtcp_mux()); // negotiated rtcp-mux - ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite); - EXPECT_EQ(MEDIA_TYPE_VIDEO, vcd->type()); + EXPECT_EQ(webrtc::MediaType::VIDEO, vcd->type()); EXPECT_THAT(vcd->codecs(), ElementsAreArray(kVideoCodecsAnswer)); EXPECT_EQ(0U, vcd->first_ssrc()); // no sender is attached EXPECT_TRUE(vcd->rtcp_mux()); // negotiated rtcp-mux - ASSERT_CRYPTO(vcd, 1U, kDefaultSrtpCryptoSuite); - EXPECT_EQ(cricket::kMediaProtocolSavpf, vcd->protocol()); -} - -// Create a typical video answer with GCM ciphers enabled, and ensure it -// matches what we expect. -TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerGcm) { - TestVideoGcmCipher(true, true); -} - -// Create a typical video answer with GCM ciphers enabled for the offer only, -// and ensure it matches what we expect. -TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerGcmOffer) { - TestVideoGcmCipher(true, false); -} - -// Create a typical video answer with GCM ciphers enabled for the answer only, -// and ensure it matches what we expect. -TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerGcmAnswer) { - TestVideoGcmCipher(false, true); + EXPECT_EQ(kMediaProtocolDtlsSavpf, vcd->protocol()); } // Create a video answer with no common codecs, and ensure it is rejected. TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerWithNoCommonCodecs) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::vector f1_codecs = {cricket::CreateVideoCodec(96, "H264")}; - f1_.set_video_codecs(f1_codecs, f1_codecs); + std::vector f1_codecs = {CreateVideoCodec(96, "H264")}; + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); - std::vector f2_codecs = {cricket::CreateVideoCodec(97, "VP8")}; - f2_.set_video_codecs(f2_codecs, f2_codecs); + std::vector f2_codecs = {CreateVideoCodec(97, "VP8")}; + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(f2_codecs, + f2_codecs); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const ContentInfo* vc = answer->GetContentByName("video"); - ASSERT_TRUE(vc != NULL); + ASSERT_TRUE(vc); EXPECT_TRUE(vc->rejected); } @@ -1407,22 +1532,25 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerWithOnlyFecCodecsCommon) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::vector f1_codecs = {cricket::CreateVideoCodec(96, "H264"), - cricket::CreateVideoCodec(118, "flexfec-03")}; - f1_.set_video_codecs(f1_codecs, f1_codecs); + std::vector f1_codecs = {CreateVideoCodec(96, "H264"), + CreateVideoCodec(118, "flexfec-03")}; + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); - std::vector f2_codecs = {cricket::CreateVideoCodec(97, "VP8"), - cricket::CreateVideoCodec(118, "flexfec-03")}; - f2_.set_video_codecs(f2_codecs, f2_codecs); + std::vector f2_codecs = {CreateVideoCodec(97, "VP8"), + CreateVideoCodec(118, "flexfec-03")}; + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(f2_codecs, + f2_codecs); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const ContentInfo* vc = answer->GetContentByName("video"); - ASSERT_TRUE(vc != NULL); + ASSERT_TRUE(vc); EXPECT_TRUE(vc->rejected); } @@ -1431,18 +1559,19 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswerUsesSctpmap) { MediaSessionOptions opts; AddDataSection(RtpTransceiverDirection::kSendRecv, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); ContentInfo* dc_offer = offer->GetContentByName("data"); - ASSERT_TRUE(dc_offer != NULL); + ASSERT_TRUE(dc_offer); SctpDataContentDescription* dcd_offer = dc_offer->media_description()->as_sctp(); EXPECT_TRUE(dcd_offer->use_sctpmap()); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const ContentInfo* dc_answer = answer->GetContentByName("data"); - ASSERT_TRUE(dc_answer != NULL); + ASSERT_TRUE(dc_answer); const SctpDataContentDescription* dcd_answer = dc_answer->media_description()->as_sctp(); EXPECT_TRUE(dcd_answer->use_sctpmap()); @@ -1452,18 +1581,19 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswerUsesSctpmap) { TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswerWithoutSctpmap) { MediaSessionOptions opts; AddDataSection(RtpTransceiverDirection::kSendRecv, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); ContentInfo* dc_offer = offer->GetContentByName("data"); - ASSERT_TRUE(dc_offer != NULL); + ASSERT_TRUE(dc_offer); SctpDataContentDescription* dcd_offer = dc_offer->media_description()->as_sctp(); dcd_offer->set_use_sctpmap(false); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const ContentInfo* dc_answer = answer->GetContentByName("data"); - ASSERT_TRUE(dc_answer != NULL); + ASSERT_TRUE(dc_answer); const SctpDataContentDescription* dcd_answer = dc_answer->media_description()->as_sctp(); EXPECT_FALSE(dcd_answer->use_sctpmap()); @@ -1473,19 +1603,13 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswerWithoutSctpmap) { // and "TCP/DTLS/SCTP" offers. TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswerToDifferentOfferedProtos) { - // Need to enable DTLS offer/answer generation (disabled by default in this - // test). - f1_.set_secure(SEC_ENABLED); - f2_.set_secure(SEC_ENABLED); - tdf1_.set_secure(SEC_ENABLED); - tdf2_.set_secure(SEC_ENABLED); - MediaSessionOptions opts; AddDataSection(RtpTransceiverDirection::kSendRecv, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); - ASSERT_TRUE(offer.get() != nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); ContentInfo* dc_offer = offer->GetContentByName("data"); - ASSERT_TRUE(dc_offer != nullptr); + ASSERT_TRUE(dc_offer); SctpDataContentDescription* dcd_offer = dc_offer->media_description()->as_sctp(); ASSERT_TRUE(dcd_offer); @@ -1495,9 +1619,9 @@ TEST_F(MediaSessionDescriptionFactoryTest, for (const std::string& proto : protos) { dcd_offer->set_protocol(proto); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const ContentInfo* dc_answer = answer->GetContentByName("data"); - ASSERT_TRUE(dc_answer != nullptr); + ASSERT_TRUE(dc_answer); const SctpDataContentDescription* dcd_answer = dc_answer->media_description()->as_sctp(); EXPECT_FALSE(dc_answer->rejected); @@ -1507,27 +1631,21 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswerToOfferWithDefinedMessageSize) { - // Need to enable DTLS offer/answer generation (disabled by default in this - // test). - f1_.set_secure(SEC_ENABLED); - f2_.set_secure(SEC_ENABLED); - tdf1_.set_secure(SEC_ENABLED); - tdf2_.set_secure(SEC_ENABLED); - MediaSessionOptions opts; AddDataSection(RtpTransceiverDirection::kSendRecv, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); - ASSERT_TRUE(offer.get() != nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); ContentInfo* dc_offer = offer->GetContentByName("data"); - ASSERT_TRUE(dc_offer != nullptr); + ASSERT_TRUE(dc_offer); SctpDataContentDescription* dcd_offer = dc_offer->media_description()->as_sctp(); ASSERT_TRUE(dcd_offer); dcd_offer->set_max_message_size(1234); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const ContentInfo* dc_answer = answer->GetContentByName("data"); - ASSERT_TRUE(dc_answer != nullptr); + ASSERT_TRUE(dc_answer); const SctpDataContentDescription* dcd_answer = dc_answer->media_description()->as_sctp(); EXPECT_FALSE(dc_answer->rejected); @@ -1536,31 +1654,25 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, TestCreateDataAnswerToOfferWithZeroMessageSize) { - // Need to enable DTLS offer/answer generation (disabled by default in this - // test). - f1_.set_secure(SEC_ENABLED); - f2_.set_secure(SEC_ENABLED); - tdf1_.set_secure(SEC_ENABLED); - tdf2_.set_secure(SEC_ENABLED); - MediaSessionOptions opts; AddDataSection(RtpTransceiverDirection::kSendRecv, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); - ASSERT_TRUE(offer.get() != nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); ContentInfo* dc_offer = offer->GetContentByName("data"); - ASSERT_TRUE(dc_offer != nullptr); + ASSERT_TRUE(dc_offer); SctpDataContentDescription* dcd_offer = dc_offer->media_description()->as_sctp(); ASSERT_TRUE(dcd_offer); dcd_offer->set_max_message_size(0); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const ContentInfo* dc_answer = answer->GetContentByName("data"); - ASSERT_TRUE(dc_answer != nullptr); + ASSERT_TRUE(dc_answer); const SctpDataContentDescription* dcd_answer = dc_answer->media_description()->as_sctp(); EXPECT_FALSE(dc_answer->rejected); - EXPECT_EQ(cricket::kSctpSendBufferSize, dcd_answer->max_message_size()); + EXPECT_EQ(webrtc::kSctpSendBufferSize, dcd_answer->max_message_size()); } // Verifies that the order of the media contents in the offer is preserved in @@ -1570,32 +1682,36 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAnswerContentOrder) { // Creates a data only offer. AddDataSection(RtpTransceiverDirection::kSendRecv, &opts); - std::unique_ptr offer1(f1_.CreateOffer(opts, NULL)); - ASSERT_TRUE(offer1.get() != NULL); + std::unique_ptr offer1( + f1_.CreateOfferOrError(opts, nullptr).MoveValue()); + ASSERT_TRUE(offer1.get()); // Appends audio to the offer. - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kRecvOnly, kActive, &opts); std::unique_ptr offer2( - f1_.CreateOffer(opts, offer1.get())); - ASSERT_TRUE(offer2.get() != NULL); + f1_.CreateOfferOrError(opts, offer1.get()).MoveValue()); + ASSERT_TRUE(offer2.get()); // Appends video to the offer. - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kRecvOnly, kActive, &opts); std::unique_ptr offer3( - f1_.CreateOffer(opts, offer2.get())); - ASSERT_TRUE(offer3.get() != NULL); + f1_.CreateOfferOrError(opts, offer2.get()).MoveValue()); + ASSERT_TRUE(offer3.get()); std::unique_ptr answer = - f2_.CreateAnswer(offer3.get(), opts, NULL); - ASSERT_TRUE(answer.get() != NULL); + f2_.CreateAnswerOrError(offer3.get(), opts, nullptr).MoveValue(); + ASSERT_TRUE(answer.get()); EXPECT_EQ(3u, answer->contents().size()); - EXPECT_TRUE(IsMediaContentOfType(&answer->contents()[0], MEDIA_TYPE_DATA)); - EXPECT_TRUE(IsMediaContentOfType(&answer->contents()[1], MEDIA_TYPE_AUDIO)); - EXPECT_TRUE(IsMediaContentOfType(&answer->contents()[2], MEDIA_TYPE_VIDEO)); + EXPECT_TRUE( + IsMediaContentOfType(&answer->contents()[0], webrtc::MediaType::DATA)); + EXPECT_TRUE( + IsMediaContentOfType(&answer->contents()[1], webrtc::MediaType::AUDIO)); + EXPECT_TRUE( + IsMediaContentOfType(&answer->contents()[2], webrtc::MediaType::VIDEO)); } // TODO(deadbeef): Extend these tests to ensure the correct direction with other @@ -1629,87 +1745,101 @@ TEST_F(MediaSessionDescriptionFactoryTest, CreateAnswerToInactiveOffer) { RtpTransceiverDirection::kInactive); } -// Test that the media protocol is RTP/AVPF if DTLS and SDES are disabled. +// Test that the media protocol is RTP/AVPF if DTLS is disabled. TEST_F(MediaSessionDescriptionFactoryTest, AudioOfferAnswerWithCryptoDisabled) { - MediaSessionOptions opts = CreatePlanBMediaSessionOptions(); - f1_.set_secure(SEC_DISABLED); - f2_.set_secure(SEC_DISABLED); - tdf1_.set_secure(SEC_DISABLED); - tdf2_.set_secure(SEC_DISABLED); + MediaSessionOptions opts = CreateAudioMediaSession(); + tdf1_.SetInsecureForTesting(); + tdf1_.set_certificate(nullptr); + tdf2_.SetInsecureForTesting(); + tdf2_.set_certificate(nullptr); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); const AudioContentDescription* offer_acd = GetFirstAudioContentDescription(offer.get()); - ASSERT_TRUE(offer_acd != NULL); - EXPECT_EQ(cricket::kMediaProtocolAvpf, offer_acd->protocol()); + ASSERT_TRUE(offer_acd); + EXPECT_EQ(kMediaProtocolAvpf, offer_acd->protocol()); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const ContentInfo* ac_answer = answer->GetContentByName("audio"); - ASSERT_TRUE(ac_answer != NULL); + ASSERT_TRUE(ac_answer); EXPECT_FALSE(ac_answer->rejected); const AudioContentDescription* answer_acd = GetFirstAudioContentDescription(answer.get()); - ASSERT_TRUE(answer_acd != NULL); - EXPECT_EQ(cricket::kMediaProtocolAvpf, answer_acd->protocol()); -} - -// Create a video offer and answer and ensure the RTP header extensions -// matches what we expect. -TEST_F(MediaSessionDescriptionFactoryTest, TestOfferAnswerWithRtpExtensions) { - MediaSessionOptions opts; - AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1), - MAKE_VECTOR(kVideoRtpExtension1), &opts); - - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); - SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2), - MAKE_VECTOR(kVideoRtpExtension2), &opts); - std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); - - EXPECT_EQ( - MAKE_VECTOR(kAudioRtpExtension1), - GetFirstAudioContentDescription(offer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kVideoRtpExtension1), - GetFirstVideoContentDescription(offer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kAudioRtpExtensionAnswer), - GetFirstAudioContentDescription(answer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kVideoRtpExtensionAnswer), - GetFirstVideoContentDescription(answer.get())->rtp_header_extensions()); + ASSERT_TRUE(answer_acd); + EXPECT_EQ(kMediaProtocolAvpf, answer_acd->protocol()); } // Create a audio/video offer and answer and ensure that the -// TransportSequenceNumber RTP header extensions are handled correctly. 02 is -// supported and should take precedence even though not listed among locally -// supported extensions. +// TransportSequenceNumber RTP v1 and v2 header extensions are handled +// correctly. TEST_F(MediaSessionDescriptionFactoryTest, - TestOfferAnswerWithTransportSequenceNumberInOffer) { + TestOfferAnswerWithTransportSequenceNumberV1LocalAndV1InOffer) { TestTransportSequenceNumberNegotiation( MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), // Local. MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), // Offer. MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01)); // Expected answer. } TEST_F(MediaSessionDescriptionFactoryTest, - TestOfferAnswerWithTransportSequenceNumber01And02InOffer) { + TestOfferAnswerWithTransportSequenceNumberV1LocalAndV1V2InOffer) { TestTransportSequenceNumberNegotiation( MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), // Local. MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01And02), // Offer. + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01)); // Expected answer. +} +TEST_F(MediaSessionDescriptionFactoryTest, + TestOfferAnswerWithTransportSequenceNumberV1LocalAndV2InOffer) { + TestTransportSequenceNumberNegotiation( + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), // Local. + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber02), // Offer. + {}); // Expected answer. +} +TEST_F(MediaSessionDescriptionFactoryTest, + TestOfferAnswerWithTransportSequenceNumberV2LocalAndV1InOffer) { + TestTransportSequenceNumberNegotiation( + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber02), // Local. + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), // Offer. + {}); // Expected answer. +} +TEST_F(MediaSessionDescriptionFactoryTest, + TestOfferAnswerWithTransportSequenceNumberV2LocalAndV1V2InOffer) { + TestTransportSequenceNumberNegotiation( + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber02), // Local. + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01And02), // Offer. MAKE_VECTOR(kRtpExtensionTransportSequenceNumber02)); // Expected answer. } TEST_F(MediaSessionDescriptionFactoryTest, - TestOfferAnswerWithTransportSequenceNumber02InOffer) { + TestOfferAnswerWithTransportSequenceNumberV2LocalAndV2InOffer) { TestTransportSequenceNumberNegotiation( - MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), // Local. + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber02), // Local. MAKE_VECTOR(kRtpExtensionTransportSequenceNumber02), // Offer. MAKE_VECTOR(kRtpExtensionTransportSequenceNumber02)); // Expected answer. } +TEST_F(MediaSessionDescriptionFactoryTest, + TestOfferAnswerWithTransportSequenceNumberV1V2LocalAndV1InOffer) { + TestTransportSequenceNumberNegotiation( + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01And02), // Local. + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), // Offer. + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01)); // Expected answer. +} +TEST_F(MediaSessionDescriptionFactoryTest, + TestOfferAnswerWithTransportSequenceNumberV1V2LocalAndV2InOffer) { + TestTransportSequenceNumberNegotiation( + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01And02), // Local. + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber02), // Offer. + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber02)); // Expected answer. +} +TEST_F(MediaSessionDescriptionFactoryTest, + TestOfferAnswerWithTransportSequenceNumberV1V2LocalAndV1V2InOffer) { + TestTransportSequenceNumberNegotiation( + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01And02), // Local. + MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01And02), // Offer. + MAKE_VECTOR( + kRtpExtensionTransportSequenceNumber01And02)); // Expected answer. +} TEST_F(MediaSessionDescriptionFactoryTest, TestNegotiateFrameDescriptorWhenUnexposedLocally) { @@ -1719,12 +1849,13 @@ TEST_F(MediaSessionDescriptionFactoryTest, SetAudioVideoRtpHeaderExtensions( MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00), MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00), &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); SetAudioVideoRtpHeaderExtensions( MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), MAKE_VECTOR(kRtpExtensionTransportSequenceNumber01), &opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); EXPECT_THAT( GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(), ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00)); @@ -1741,9 +1872,10 @@ TEST_F(MediaSessionDescriptionFactoryTest, SetAudioVideoRtpHeaderExtensions( MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00), MAKE_VECTOR(kRtpExtensionGenericFrameDescriptorUri00), &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); EXPECT_THAT( GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(), ElementsAreArray(kRtpExtensionGenericFrameDescriptorUri00)); @@ -1759,11 +1891,12 @@ TEST_F(MediaSessionDescriptionFactoryTest, RtpExtension offer_dd(RtpExtension::kDependencyDescriptorUri, 7); SetAudioVideoRtpHeaderExtensions({}, {offer_dd}, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); RtpExtension local_tsn(RtpExtension::kTransportSequenceNumberUri, 5); SetAudioVideoRtpHeaderExtensions({}, {local_tsn}, &opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); EXPECT_THAT( GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), ElementsAre(offer_dd)); @@ -1777,10 +1910,11 @@ TEST_F(MediaSessionDescriptionFactoryTest, RtpExtension offer_dd(RtpExtension::kDependencyDescriptorUri, 7); RtpExtension local_dd(RtpExtension::kDependencyDescriptorUri, 5); SetAudioVideoRtpHeaderExtensions({}, {offer_dd}, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); SetAudioVideoRtpHeaderExtensions({}, {local_dd}, &opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); EXPECT_THAT( GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), ElementsAre(offer_dd)); @@ -1791,16 +1925,17 @@ TEST_F(MediaSessionDescriptionFactoryTest, MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - const cricket::RtpHeaderExtensions offered_extensions = { + const RtpHeaderExtensions offered_extensions = { RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 7)}; - const cricket::RtpHeaderExtensions local_extensions = { + const RtpHeaderExtensions local_extensions = { RtpExtension(RtpExtension::kTransportSequenceNumberUri, 5)}; SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); EXPECT_THAT( GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), ElementsAreArray(offered_extensions)); @@ -1814,16 +1949,17 @@ TEST_F(MediaSessionDescriptionFactoryTest, MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - const cricket::RtpHeaderExtensions offered_extensions = { + const RtpHeaderExtensions offered_extensions = { RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 7)}; - const cricket::RtpHeaderExtensions local_extensions = { + const RtpHeaderExtensions local_extensions = { RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 5)}; SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); EXPECT_THAT( GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), ElementsAreArray(offered_extensions)); @@ -1837,16 +1973,17 @@ TEST_F(MediaSessionDescriptionFactoryTest, MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - const cricket::RtpHeaderExtensions offered_extensions = { + const RtpHeaderExtensions offered_extensions = { RtpExtension(RtpExtension::kTransportSequenceNumberUri, 7)}; - const cricket::RtpHeaderExtensions local_extensions = { + const RtpHeaderExtensions local_extensions = { RtpExtension(RtpExtension::kAbsoluteCaptureTimeUri, 5)}; SetAudioVideoRtpHeaderExtensions(offered_extensions, offered_extensions, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); SetAudioVideoRtpHeaderExtensions(local_extensions, local_extensions, &opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); EXPECT_THAT( GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), IsEmpty()); @@ -1858,23 +1995,23 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, OffersUnstoppedExtensionsWithAudioVideoExtensionStopped) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kSendRecv, kActive, &opts); opts.media_description_options.back().header_extensions = { - webrtc::RtpHeaderExtensionCapability("uri1", 1, - RtpTransceiverDirection::kStopped), - webrtc::RtpHeaderExtensionCapability("uri2", 3, - RtpTransceiverDirection::kSendOnly)}; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1", + RtpHeaderExtensionCapability("uri1", 1, + RtpTransceiverDirection::kStopped), + RtpHeaderExtensionCapability("uri2", 3, + RtpTransceiverDirection::kSendOnly)}; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video1", RtpTransceiverDirection::kSendRecv, kActive, &opts); opts.media_description_options.back().header_extensions = { - webrtc::RtpHeaderExtensionCapability("uri1", 1, - RtpTransceiverDirection::kStopped), - webrtc::RtpHeaderExtensionCapability("uri3", 7, - RtpTransceiverDirection::kSendOnly)}; - auto offer = f1_.CreateOffer(opts, nullptr); + RtpHeaderExtensionCapability("uri1", 1, + RtpTransceiverDirection::kStopped), + RtpHeaderExtensionCapability("uri3", 7, + RtpTransceiverDirection::kSendOnly)}; + auto offer = f1_.CreateOfferOrError(opts, nullptr).MoveValue(); EXPECT_THAT( offer->contents(), ElementsAre( @@ -1891,23 +2028,23 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, OffersUnstoppedExtensionsWithAudioExtensionStopped) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kSendRecv, kActive, &opts); opts.media_description_options.back().header_extensions = { - webrtc::RtpHeaderExtensionCapability("uri1", 1, - RtpTransceiverDirection::kSendOnly), - webrtc::RtpHeaderExtensionCapability("uri2", 3, - RtpTransceiverDirection::kStopped)}; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1", + RtpHeaderExtensionCapability("uri1", 1, + RtpTransceiverDirection::kSendOnly), + RtpHeaderExtensionCapability("uri2", 3, + RtpTransceiverDirection::kStopped)}; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video1", RtpTransceiverDirection::kSendRecv, kActive, &opts); opts.media_description_options.back().header_extensions = { - webrtc::RtpHeaderExtensionCapability("uri42", 42, - RtpTransceiverDirection::kSendRecv), - webrtc::RtpHeaderExtensionCapability("uri3", 7, - RtpTransceiverDirection::kSendOnly)}; - auto offer = f1_.CreateOffer(opts, nullptr); + RtpHeaderExtensionCapability("uri42", 42, + RtpTransceiverDirection::kSendRecv), + RtpHeaderExtensionCapability("uri3", 7, + RtpTransceiverDirection::kSendOnly)}; + auto offer = f1_.CreateOfferOrError(opts, nullptr).MoveValue(); EXPECT_THAT( offer->contents(), ElementsAre( @@ -1926,23 +2063,23 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, OffersUnstoppedExtensionsWithVideoExtensionStopped) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kSendRecv, kActive, &opts); opts.media_description_options.back().header_extensions = { - webrtc::RtpHeaderExtensionCapability("uri1", 5, - RtpTransceiverDirection::kSendOnly), - webrtc::RtpHeaderExtensionCapability("uri2", 7, - RtpTransceiverDirection::kSendRecv)}; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1", + RtpHeaderExtensionCapability("uri1", 5, + RtpTransceiverDirection::kSendOnly), + RtpHeaderExtensionCapability("uri2", 7, + RtpTransceiverDirection::kSendRecv)}; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video1", RtpTransceiverDirection::kSendRecv, kActive, &opts); opts.media_description_options.back().header_extensions = { - webrtc::RtpHeaderExtensionCapability("uri42", 42, - RtpTransceiverDirection::kSendRecv), - webrtc::RtpHeaderExtensionCapability("uri3", 7, - RtpTransceiverDirection::kStopped)}; - auto offer = f1_.CreateOffer(opts, nullptr); + RtpHeaderExtensionCapability("uri42", 42, + RtpTransceiverDirection::kSendRecv), + RtpHeaderExtensionCapability("uri3", 7, + RtpTransceiverDirection::kStopped)}; + auto offer = f1_.CreateOfferOrError(opts, nullptr).MoveValue(); EXPECT_THAT( offer->contents(), ElementsAre( @@ -1960,29 +2097,29 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, AnswersUnstoppedExtensions) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kSendRecv, kActive, &opts); opts.media_description_options.back().header_extensions = { - webrtc::RtpHeaderExtensionCapability("uri1", 4, - RtpTransceiverDirection::kStopped), - webrtc::RtpHeaderExtensionCapability("uri2", 3, - RtpTransceiverDirection::kSendOnly), - webrtc::RtpHeaderExtensionCapability("uri3", 2, - RtpTransceiverDirection::kRecvOnly), - webrtc::RtpHeaderExtensionCapability("uri4", 1, - RtpTransceiverDirection::kSendRecv)}; - auto offer = f1_.CreateOffer(opts, nullptr); + RtpHeaderExtensionCapability("uri1", 4, + RtpTransceiverDirection::kStopped), + RtpHeaderExtensionCapability("uri2", 3, + RtpTransceiverDirection::kSendOnly), + RtpHeaderExtensionCapability("uri3", 2, + RtpTransceiverDirection::kRecvOnly), + RtpHeaderExtensionCapability("uri4", 1, + RtpTransceiverDirection::kSendRecv)}; + auto offer = f1_.CreateOfferOrError(opts, nullptr).MoveValue(); opts.media_description_options.back().header_extensions = { - webrtc::RtpHeaderExtensionCapability("uri1", 4, - RtpTransceiverDirection::kSendOnly), - webrtc::RtpHeaderExtensionCapability("uri2", 3, - RtpTransceiverDirection::kRecvOnly), - webrtc::RtpHeaderExtensionCapability("uri3", 2, - RtpTransceiverDirection::kStopped), - webrtc::RtpHeaderExtensionCapability("uri4", 1, - RtpTransceiverDirection::kSendRecv)}; - auto answer = f2_.CreateAnswer(offer.get(), opts, nullptr); + RtpHeaderExtensionCapability("uri1", 4, + RtpTransceiverDirection::kSendOnly), + RtpHeaderExtensionCapability("uri2", 3, + RtpTransceiverDirection::kRecvOnly), + RtpHeaderExtensionCapability("uri3", 2, + RtpTransceiverDirection::kStopped), + RtpHeaderExtensionCapability("uri4", 1, + RtpTransceiverDirection::kSendRecv)}; + auto answer = f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); EXPECT_THAT( answer->contents(), ElementsAre(Property( @@ -1995,23 +2132,23 @@ TEST_F(MediaSessionDescriptionFactoryTest, AnswersUnstoppedExtensions) { TEST_F(MediaSessionDescriptionFactoryTest, AppendsUnstoppedExtensionsToCurrentDescription) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kSendRecv, kActive, &opts); opts.media_description_options.back().header_extensions = { - webrtc::RtpHeaderExtensionCapability("uri1", 1, - RtpTransceiverDirection::kSendRecv)}; - auto offer = f1_.CreateOffer(opts, nullptr); + RtpHeaderExtensionCapability("uri1", 1, + RtpTransceiverDirection::kSendRecv)}; + auto offer = f1_.CreateOfferOrError(opts, nullptr).MoveValue(); opts.media_description_options.back().header_extensions = { - webrtc::RtpHeaderExtensionCapability("uri1", 2, - RtpTransceiverDirection::kSendRecv), - webrtc::RtpHeaderExtensionCapability("uri2", 3, - RtpTransceiverDirection::kRecvOnly), - webrtc::RtpHeaderExtensionCapability("uri3", 5, - RtpTransceiverDirection::kStopped), - webrtc::RtpHeaderExtensionCapability("uri4", 6, - RtpTransceiverDirection::kSendRecv)}; - auto offer2 = f1_.CreateOffer(opts, offer.get()); + RtpHeaderExtensionCapability("uri1", 2, + RtpTransceiverDirection::kSendRecv), + RtpHeaderExtensionCapability("uri2", 3, + RtpTransceiverDirection::kRecvOnly), + RtpHeaderExtensionCapability("uri3", 5, + RtpTransceiverDirection::kStopped), + RtpHeaderExtensionCapability("uri4", 6, + RtpTransceiverDirection::kSendRecv)}; + auto offer2 = f1_.CreateOfferOrError(opts, offer.get()).MoveValue(); EXPECT_THAT( offer2->contents(), ElementsAre(Property( @@ -2025,24 +2162,24 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, AppendsStoppedExtensionIfKnownAndPresentInTheOffer) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &opts); opts.media_description_options.back().header_extensions = { - webrtc::RtpHeaderExtensionCapability("uri1", 1, - RtpTransceiverDirection::kSendRecv), - webrtc::RtpHeaderExtensionCapability("uri2", 2, - RtpTransceiverDirection::kSendRecv)}; - auto offer = f1_.CreateOffer(opts, nullptr); + RtpHeaderExtensionCapability("uri1", 1, + RtpTransceiverDirection::kSendRecv), + RtpHeaderExtensionCapability("uri2", 2, + RtpTransceiverDirection::kSendRecv)}; + auto offer = f1_.CreateOfferOrError(opts, nullptr).MoveValue(); // Check that a subsequent offer after setting "uri2" to stopped no longer // contains the extension. opts.media_description_options.back().header_extensions = { - webrtc::RtpHeaderExtensionCapability("uri1", 1, - RtpTransceiverDirection::kSendRecv), - webrtc::RtpHeaderExtensionCapability("uri2", 2, - RtpTransceiverDirection::kStopped)}; - auto offer2 = f1_.CreateOffer(opts, offer.get()); + RtpHeaderExtensionCapability("uri1", 1, + RtpTransceiverDirection::kSendRecv), + RtpHeaderExtensionCapability("uri2", 2, + RtpTransceiverDirection::kStopped)}; + auto offer2 = f1_.CreateOfferOrError(opts, offer.get()).MoveValue(); EXPECT_THAT( offer2->contents(), ElementsAre(Property( @@ -2052,95 +2189,208 @@ TEST_F(MediaSessionDescriptionFactoryTest, Field(&RtpExtension::uri, "uri2"))))))); } +// Create a video offer and answer and ensure the RTP header extensions +// matches what we expect. TEST_F(MediaSessionDescriptionFactoryTest, - TestOfferAnswerWithEncryptedRtpExtensionsBoth) { + TestOfferAnswerWithRtpExtensionHeadersWithNoEncryption) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - - f1_.set_enable_encrypted_rtp_header_extensions(true); - f2_.set_enable_encrypted_rtp_header_extensions(true); - SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1), MAKE_VECTOR(kVideoRtpExtension1), &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2), MAKE_VECTOR(kVideoRtpExtension2), &opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); - EXPECT_EQ( - MAKE_VECTOR(kAudioRtpExtensionEncrypted1), - GetFirstAudioContentDescription(offer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kVideoRtpExtensionEncrypted1), - GetFirstVideoContentDescription(offer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kAudioRtpExtensionEncryptedAnswer), - GetFirstAudioContentDescription(answer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kVideoRtpExtensionEncryptedAnswer), - GetFirstVideoContentDescription(answer.get())->rtp_header_extensions()); + EXPECT_THAT( + GetFirstAudioContentDescription(offer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kAudioRtpExtension1)); + EXPECT_THAT( + GetFirstVideoContentDescription(offer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kVideoRtpExtension1)); + EXPECT_THAT( + GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kAudioRtpExtensionAnswer)); + EXPECT_THAT( + GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kVideoRtpExtensionAnswer)); } TEST_F(MediaSessionDescriptionFactoryTest, - TestOfferAnswerWithEncryptedRtpExtensionsOffer) { + TestOfferAnswerWithRtpExtensionHeadersWithEncryption) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); f1_.set_enable_encrypted_rtp_header_extensions(true); + f2_.set_enable_encrypted_rtp_header_extensions(true); - SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1), - MAKE_VECTOR(kVideoRtpExtension1), &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); - SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2), - MAKE_VECTOR(kVideoRtpExtension2), &opts); + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtensionEncrypted1), + MAKE_VECTOR(kVideoRtpExtensionEncrypted1), + &opts); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtensionEncrypted2), + MAKE_VECTOR(kVideoRtpExtensionEncrypted2), + &opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); - EXPECT_EQ( - MAKE_VECTOR(kAudioRtpExtensionEncrypted1), - GetFirstAudioContentDescription(offer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kVideoRtpExtensionEncrypted1), - GetFirstVideoContentDescription(offer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kAudioRtpExtensionAnswer), - GetFirstAudioContentDescription(answer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kVideoRtpExtensionAnswer), - GetFirstVideoContentDescription(answer.get())->rtp_header_extensions()); + EXPECT_THAT( + GetFirstAudioContentDescription(offer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kAudioRtpExtensionEncrypted1)); + EXPECT_THAT( + GetFirstVideoContentDescription(offer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kVideoRtpExtensionEncrypted1)); + EXPECT_THAT( + GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kAudioRtpExtensionEncryptedAnswer)); + EXPECT_THAT( + GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kVideoRtpExtensionEncryptedAnswer)); } TEST_F(MediaSessionDescriptionFactoryTest, - TestOfferAnswerWithEncryptedRtpExtensionsAnswer) { + NegotiationWithEncryptedRtpExtensionHeadersDisabledInReceiver) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - f2_.set_enable_encrypted_rtp_header_extensions(true); + f2_.set_enable_encrypted_rtp_header_extensions(false); - SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1), - MAKE_VECTOR(kVideoRtpExtension1), &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); - SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2), - MAKE_VECTOR(kVideoRtpExtension2), &opts); + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtensionEncrypted1), + MAKE_VECTOR(kVideoRtpExtensionEncrypted1), + &opts); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtensionEncrypted2), + MAKE_VECTOR(kVideoRtpExtensionEncrypted2), + &opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); - EXPECT_EQ( - MAKE_VECTOR(kAudioRtpExtension1), - GetFirstAudioContentDescription(offer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kVideoRtpExtension1), - GetFirstVideoContentDescription(offer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kAudioRtpExtensionAnswer), - GetFirstAudioContentDescription(answer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kVideoRtpExtensionAnswer), - GetFirstVideoContentDescription(answer.get())->rtp_header_extensions()); + EXPECT_THAT( + GetFirstAudioContentDescription(offer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kAudioRtpExtensionEncrypted1)); + EXPECT_THAT( + GetFirstVideoContentDescription(offer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kVideoRtpExtensionEncrypted1)); + EXPECT_THAT( + GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kAudioRtpExtensionAnswer)); + EXPECT_THAT( + GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kVideoRtpExtensionAnswer)); +} + +TEST_F(MediaSessionDescriptionFactoryTest, + NegotiationWithEncryptedRtpExtensionHeadersDisabledInSender) { + MediaSessionOptions opts; + AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); + + f1_.set_enable_encrypted_rtp_header_extensions(false); + + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtensionEncrypted1), + MAKE_VECTOR(kVideoRtpExtensionEncrypted1), + &opts); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtensionEncrypted2), + MAKE_VECTOR(kVideoRtpExtensionEncrypted2), + &opts); + std::unique_ptr answer = + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); + + EXPECT_THAT( + GetFirstAudioContentDescription(offer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kAudioRtpExtensionAnswer)); + EXPECT_THAT( + GetFirstVideoContentDescription(offer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kVideoRtpExtensionAnswer)); + EXPECT_THAT( + GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kAudioRtpExtensionAnswer)); + EXPECT_THAT( + GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kVideoRtpExtensionAnswer)); +} + +TEST_F(MediaSessionDescriptionFactoryTest, + PreferEncryptedRtpHeaderExtensionsWhenEncryptionEnabled) { + MediaSessionOptions opts; + AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); + + SetAudioVideoRtpHeaderExtensions( + MAKE_VECTOR(kAudioRtpExtensionMixedEncryption1), + MAKE_VECTOR(kVideoRtpExtensionMixedEncryption), &opts); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + SetAudioVideoRtpHeaderExtensions( + MAKE_VECTOR(kAudioRtpExtensionMixedEncryption2), + MAKE_VECTOR(kVideoRtpExtensionMixedEncryption), &opts); + std::unique_ptr answer = + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); + ASSERT_TRUE(answer.get()); + + EXPECT_THAT( + GetFirstAudioContentDescription(offer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kAudioRtpExtensionMixedEncryption1)); + EXPECT_THAT( + GetFirstVideoContentDescription(offer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kVideoRtpExtensionMixedEncryption)); + EXPECT_THAT( + GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(), + UnorderedElementsAreArray( + kAudioRtpExtensionMixedEncryptionAnswerEncryptionEnabled)); + EXPECT_THAT( + GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), + UnorderedElementsAreArray( + kVideoRtpExtensionMixedEncryptionAnswerEncryptionEnabled)); +} + +TEST_F(MediaSessionDescriptionFactoryTest, + UseUnencryptedRtpHeaderExtensionsWhenEncryptionDisabled) { + MediaSessionOptions opts; + AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); + + f1_.set_enable_encrypted_rtp_header_extensions(false); + f2_.set_enable_encrypted_rtp_header_extensions(false); + + SetAudioVideoRtpHeaderExtensions( + MAKE_VECTOR(kAudioRtpExtensionMixedEncryption1), + MAKE_VECTOR(kVideoRtpExtensionMixedEncryption), &opts); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + SetAudioVideoRtpHeaderExtensions( + MAKE_VECTOR(kAudioRtpExtensionMixedEncryption2), + MAKE_VECTOR(kVideoRtpExtensionMixedEncryption), &opts); + std::unique_ptr answer = + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); + ASSERT_TRUE(answer.get()); + + EXPECT_THAT( + GetFirstAudioContentDescription(offer.get())->rtp_header_extensions(), + UnorderedElementsAreArray( + kAudioRtpExtensionMixedEncryptionAnswerEncryptionDisabled)); + EXPECT_THAT( + GetFirstVideoContentDescription(offer.get())->rtp_header_extensions(), + UnorderedElementsAreArray( + kVideoRtpExtensionMixedEncryptionAnswerEncryptionDisabled)); + EXPECT_THAT( + GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(), + UnorderedElementsAreArray( + kAudioRtpExtensionMixedEncryptionAnswerEncryptionDisabled)); + EXPECT_THAT( + GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), + UnorderedElementsAreArray( + kVideoRtpExtensionMixedEncryptionAnswerEncryptionDisabled)); } // Create an audio, video, data answer without legacy StreamParams. @@ -2148,16 +2398,17 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAnswerWithoutLegacyStreams) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const ContentInfo* ac = answer->GetContentByName("audio"); const ContentInfo* vc = answer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc != NULL); - const AudioContentDescription* acd = ac->media_description()->as_audio(); - const VideoContentDescription* vcd = vc->media_description()->as_video(); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); + const MediaContentDescription* acd = ac->media_description(); + const MediaContentDescription* vcd = vc->media_description(); EXPECT_FALSE(acd->has_ssrcs()); // No StreamParams. EXPECT_FALSE(vcd->has_ssrcs()); // No StreamParams. @@ -2176,12 +2427,13 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerRtcpMux) { offer_opts.rtcp_mux_enabled = true; answer_opts.rtcp_mux_enabled = true; - offer = f1_.CreateOffer(offer_opts, NULL); - answer = f2_.CreateAnswer(offer.get(), answer_opts, NULL); - ASSERT_TRUE(NULL != GetFirstAudioContentDescription(offer.get())); - ASSERT_TRUE(NULL != GetFirstVideoContentDescription(offer.get())); - ASSERT_TRUE(NULL != GetFirstAudioContentDescription(answer.get())); - ASSERT_TRUE(NULL != GetFirstVideoContentDescription(answer.get())); + offer = f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue(); + answer = + f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue(); + ASSERT_TRUE(GetFirstAudioContentDescription(offer.get())); + ASSERT_TRUE(GetFirstVideoContentDescription(offer.get())); + ASSERT_TRUE(GetFirstAudioContentDescription(answer.get())); + ASSERT_TRUE(GetFirstVideoContentDescription(answer.get())); EXPECT_TRUE(GetFirstAudioContentDescription(offer.get())->rtcp_mux()); EXPECT_TRUE(GetFirstVideoContentDescription(offer.get())->rtcp_mux()); EXPECT_TRUE(GetFirstAudioContentDescription(answer.get())->rtcp_mux()); @@ -2189,12 +2441,13 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerRtcpMux) { offer_opts.rtcp_mux_enabled = true; answer_opts.rtcp_mux_enabled = false; - offer = f1_.CreateOffer(offer_opts, NULL); - answer = f2_.CreateAnswer(offer.get(), answer_opts, NULL); - ASSERT_TRUE(NULL != GetFirstAudioContentDescription(offer.get())); - ASSERT_TRUE(NULL != GetFirstVideoContentDescription(offer.get())); - ASSERT_TRUE(NULL != GetFirstAudioContentDescription(answer.get())); - ASSERT_TRUE(NULL != GetFirstVideoContentDescription(answer.get())); + offer = f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue(); + answer = + f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue(); + ASSERT_TRUE(GetFirstAudioContentDescription(offer.get())); + ASSERT_TRUE(GetFirstVideoContentDescription(offer.get())); + ASSERT_TRUE(GetFirstAudioContentDescription(answer.get())); + ASSERT_TRUE(GetFirstVideoContentDescription(answer.get())); EXPECT_TRUE(GetFirstAudioContentDescription(offer.get())->rtcp_mux()); EXPECT_TRUE(GetFirstVideoContentDescription(offer.get())->rtcp_mux()); EXPECT_FALSE(GetFirstAudioContentDescription(answer.get())->rtcp_mux()); @@ -2202,12 +2455,13 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerRtcpMux) { offer_opts.rtcp_mux_enabled = false; answer_opts.rtcp_mux_enabled = true; - offer = f1_.CreateOffer(offer_opts, NULL); - answer = f2_.CreateAnswer(offer.get(), answer_opts, NULL); - ASSERT_TRUE(NULL != GetFirstAudioContentDescription(offer.get())); - ASSERT_TRUE(NULL != GetFirstVideoContentDescription(offer.get())); - ASSERT_TRUE(NULL != GetFirstAudioContentDescription(answer.get())); - ASSERT_TRUE(NULL != GetFirstVideoContentDescription(answer.get())); + offer = f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue(); + answer = + f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue(); + ASSERT_TRUE(GetFirstAudioContentDescription(offer.get())); + ASSERT_TRUE(GetFirstVideoContentDescription(offer.get())); + ASSERT_TRUE(GetFirstAudioContentDescription(answer.get())); + ASSERT_TRUE(GetFirstVideoContentDescription(answer.get())); EXPECT_FALSE(GetFirstAudioContentDescription(offer.get())->rtcp_mux()); EXPECT_FALSE(GetFirstVideoContentDescription(offer.get())->rtcp_mux()); EXPECT_FALSE(GetFirstAudioContentDescription(answer.get())->rtcp_mux()); @@ -2215,12 +2469,13 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerRtcpMux) { offer_opts.rtcp_mux_enabled = false; answer_opts.rtcp_mux_enabled = false; - offer = f1_.CreateOffer(offer_opts, NULL); - answer = f2_.CreateAnswer(offer.get(), answer_opts, NULL); - ASSERT_TRUE(NULL != GetFirstAudioContentDescription(offer.get())); - ASSERT_TRUE(NULL != GetFirstVideoContentDescription(offer.get())); - ASSERT_TRUE(NULL != GetFirstAudioContentDescription(answer.get())); - ASSERT_TRUE(NULL != GetFirstVideoContentDescription(answer.get())); + offer = f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue(); + answer = + f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue(); + ASSERT_TRUE(GetFirstAudioContentDescription(offer.get())); + ASSERT_TRUE(GetFirstVideoContentDescription(offer.get())); + ASSERT_TRUE(GetFirstAudioContentDescription(answer.get())); + ASSERT_TRUE(GetFirstVideoContentDescription(answer.get())); EXPECT_FALSE(GetFirstAudioContentDescription(offer.get())->rtcp_mux()); EXPECT_FALSE(GetFirstVideoContentDescription(offer.get())->rtcp_mux()); EXPECT_FALSE(GetFirstAudioContentDescription(answer.get())->rtcp_mux()); @@ -2230,23 +2485,24 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerRtcpMux) { // Create an audio-only answer to a video offer. TEST_F(MediaSessionDescriptionFactoryTest, TestCreateAudioAnswerToVideo) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kRecvOnly, kActive, &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kRecvOnly, kActive, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); opts.media_description_options[1].stopped = true; std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const ContentInfo* ac = answer->GetContentByName("audio"); const ContentInfo* vc = answer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc != NULL); - ASSERT_TRUE(vc->media_description() != NULL); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); + ASSERT_TRUE(vc->media_description()); EXPECT_TRUE(vc->rejected); } @@ -2255,20 +2511,21 @@ TEST_F(MediaSessionDescriptionFactoryTest, CreateAnswerToOfferWithRejectedMedia) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); ContentInfo* ac = offer->GetContentByName("audio"); ContentInfo* vc = offer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc != NULL); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); ac->rejected = true; vc->rejected = true; std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); ac = answer->GetContentByName("audio"); vc = answer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc != NULL); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); EXPECT_TRUE(ac->rejected); EXPECT_TRUE(vc->rejected); } @@ -2277,11 +2534,13 @@ TEST_F(MediaSessionDescriptionFactoryTest, OfferAndAnswerDoesNotHaveMixedByteSessionAttribute) { MediaSessionOptions opts; std::unique_ptr offer = - f1_.CreateOffer(opts, /*current_description=*/nullptr); + f1_.CreateOfferOrError(opts, /*current_description=*/nullptr).MoveValue(); offer->set_extmap_allow_mixed(false); std::unique_ptr answer( - f2_.CreateAnswer(offer.get(), opts, /*current_description=*/nullptr)); + f2_.CreateAnswerOrError(offer.get(), opts, + /*current_description=*/nullptr) + .MoveValue()); EXPECT_FALSE(answer->extmap_allow_mixed()); } @@ -2290,11 +2549,13 @@ TEST_F(MediaSessionDescriptionFactoryTest, OfferAndAnswerHaveMixedByteSessionAttribute) { MediaSessionOptions opts; std::unique_ptr offer = - f1_.CreateOffer(opts, /*current_description=*/nullptr); + f1_.CreateOfferOrError(opts, /*current_description=*/nullptr).MoveValue(); offer->set_extmap_allow_mixed(true); std::unique_ptr answer_support( - f2_.CreateAnswer(offer.get(), opts, /*current_description=*/nullptr)); + f2_.CreateAnswerOrError(offer.get(), opts, + /*current_description=*/nullptr) + .MoveValue()); EXPECT_TRUE(answer_support->extmap_allow_mixed()); } @@ -2304,7 +2565,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &opts); std::unique_ptr offer = - f1_.CreateOffer(opts, /*current_description=*/nullptr); + f1_.CreateOfferOrError(opts, /*current_description=*/nullptr).MoveValue(); offer->set_extmap_allow_mixed(false); MediaContentDescription* audio_offer = offer->GetContentDescriptionByName("audio"); @@ -2316,7 +2577,9 @@ TEST_F(MediaSessionDescriptionFactoryTest, video_offer->extmap_allow_mixed_enum()); std::unique_ptr answer( - f2_.CreateAnswer(offer.get(), opts, /*current_description=*/nullptr)); + f2_.CreateAnswerOrError(offer.get(), opts, + /*current_description=*/nullptr) + .MoveValue()); MediaContentDescription* audio_answer = answer->GetContentDescriptionByName("audio"); @@ -2333,7 +2596,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &opts); std::unique_ptr offer = - f1_.CreateOffer(opts, /*current_description=*/nullptr); + f1_.CreateOfferOrError(opts, /*current_description=*/nullptr).MoveValue(); offer->set_extmap_allow_mixed(false); MediaContentDescription* audio_offer = offer->GetContentDescriptionByName("audio"); @@ -2343,7 +2606,9 @@ TEST_F(MediaSessionDescriptionFactoryTest, video_offer->set_extmap_allow_mixed_enum(MediaContentDescription::kMedia); std::unique_ptr answer( - f2_.CreateAnswer(offer.get(), opts, /*current_description=*/nullptr)); + f2_.CreateAnswerOrError(offer.get(), opts, + /*current_description=*/nullptr) + .MoveValue()); MediaContentDescription* audio_answer = answer->GetContentDescriptionByName("audio"); @@ -2360,7 +2625,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &opts); std::unique_ptr offer = - f1_.CreateOffer(opts, /*current_description=*/nullptr); + f1_.CreateOfferOrError(opts, /*current_description=*/nullptr).MoveValue(); offer->set_extmap_allow_mixed(false); MediaContentDescription* audio_offer = offer->GetContentDescriptionByName("audio"); @@ -2370,7 +2635,9 @@ TEST_F(MediaSessionDescriptionFactoryTest, video_offer->set_extmap_allow_mixed_enum(MediaContentDescription::kMedia); std::unique_ptr answer( - f2_.CreateAnswer(offer.get(), opts, /*current_description=*/nullptr)); + f2_.CreateAnswerOrError(offer.get(), opts, + /*current_description=*/nullptr) + .MoveValue()); MediaContentDescription* audio_answer = answer->GetContentDescriptionByName("audio"); @@ -2390,25 +2657,30 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoOffer) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kSendRecv, &opts); - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1, - {kMediaStream1}, 1, &opts); - AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack1, - {kMediaStream1}, 1, &opts); - AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack2, - {kMediaStream1}, 1, &opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + AttachSenderToMediaDescriptionOptions("audio", webrtc::MediaType::AUDIO, + kAudioTrack1, {kMediaStream1}, 1, + &opts); + AttachSenderToMediaDescriptionOptions("audio", webrtc::MediaType::AUDIO, + kAudioTrack2, {kMediaStream1}, 1, + &opts); - f1_.set_secure(SEC_ENABLED); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); - ASSERT_TRUE(offer.get() != NULL); + ASSERT_TRUE(offer.get()); const ContentInfo* ac = offer->GetContentByName("audio"); const ContentInfo* vc = offer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc != NULL); - const AudioContentDescription* acd = ac->media_description()->as_audio(); - const VideoContentDescription* vcd = vc->media_description()->as_video(); - EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type()); - EXPECT_EQ(f1_.audio_sendrecv_codecs(), acd->codecs()); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); + const MediaContentDescription* acd = ac->media_description(); + const MediaContentDescription* vcd = vc->media_description(); + EXPECT_EQ(webrtc::MediaType::AUDIO, acd->type()); + EXPECT_EQ( + codec_lookup_helper_1_.GetCodecVendor()->audio_sendrecv_codecs().codecs(), + acd->codecs()); const StreamParamsVec& audio_streams = acd->streams(); ASSERT_EQ(2U, audio_streams.size()); @@ -2420,49 +2692,47 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoOffer) { ASSERT_EQ(1U, audio_streams[1].ssrcs.size()); EXPECT_NE(0U, audio_streams[1].ssrcs[0]); - EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // default bandwidth (auto) + EXPECT_EQ(kAutoBandwidth, + acd->bandwidth()); // default bandwidth (auto) EXPECT_TRUE(acd->rtcp_mux()); // rtcp-mux defaults on - ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite); - EXPECT_EQ(MEDIA_TYPE_VIDEO, vcd->type()); - EXPECT_EQ(f1_.video_sendrecv_codecs(), vcd->codecs()); - ASSERT_CRYPTO(vcd, 1U, kDefaultSrtpCryptoSuite); + EXPECT_EQ(webrtc::MediaType::VIDEO, vcd->type()); + EXPECT_EQ( + codec_lookup_helper_1_.GetCodecVendor()->video_sendrecv_codecs().codecs(), + vcd->codecs()); const StreamParamsVec& video_streams = vcd->streams(); ASSERT_EQ(1U, video_streams.size()); EXPECT_EQ(video_streams[0].cname, audio_streams[0].cname); EXPECT_EQ(kVideoTrack1, video_streams[0].id); - EXPECT_EQ(kAutoBandwidth, vcd->bandwidth()); // default bandwidth (auto) + EXPECT_EQ(kAutoBandwidth, + vcd->bandwidth()); // default bandwidth (auto) EXPECT_TRUE(vcd->rtcp_mux()); // rtcp-mux defaults on // Update the offer. Add a new video track that is not synched to the // other tracks and replace audio track 2 with audio track 3. - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack2, - {kMediaStream2}, 1, &opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack2, {kMediaStream2}, 1, + &opts); DetachSenderFromMediaSection("audio", kAudioTrack2, &opts); - AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack3, - {kMediaStream1}, 1, &opts); + AttachSenderToMediaDescriptionOptions("audio", webrtc::MediaType::AUDIO, + kAudioTrack3, {kMediaStream1}, 1, + &opts); std::unique_ptr updated_offer( - f1_.CreateOffer(opts, offer.get())); + f1_.CreateOfferOrError(opts, offer.get()).MoveValue()); - ASSERT_TRUE(updated_offer.get() != NULL); + ASSERT_TRUE(updated_offer.get()); ac = updated_offer->GetContentByName("audio"); vc = updated_offer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc != NULL); - const AudioContentDescription* updated_acd = - ac->media_description()->as_audio(); - const VideoContentDescription* updated_vcd = - vc->media_description()->as_video(); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); + const MediaContentDescription* updated_acd = ac->media_description(); + const MediaContentDescription* updated_vcd = vc->media_description(); EXPECT_EQ(acd->type(), updated_acd->type()); EXPECT_EQ(acd->codecs(), updated_acd->codecs()); EXPECT_EQ(vcd->type(), updated_vcd->type()); EXPECT_EQ(vcd->codecs(), updated_vcd->codecs()); - ASSERT_CRYPTO(updated_acd, 1U, kDefaultSrtpCryptoSuite); - EXPECT_TRUE(CompareCryptoParams(acd->cryptos(), updated_acd->cryptos())); - ASSERT_CRYPTO(updated_vcd, 1U, kDefaultSrtpCryptoSuite); - EXPECT_TRUE(CompareCryptoParams(vcd->cryptos(), updated_vcd->cryptos())); const StreamParamsVec& updated_audio_streams = updated_acd->streams(); ASSERT_EQ(2U, updated_audio_streams.size()); @@ -2483,38 +2753,40 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoOffer) { // Create an offer with simulcast video stream. TEST_F(MediaSessionDescriptionFactoryTest, TestCreateSimulcastVideoOffer) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kRecvOnly, kActive, &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &opts); const int num_sim_layers = 3; - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1, - {kMediaStream1}, num_sim_layers, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, + num_sim_layers, &opts); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); - ASSERT_TRUE(offer.get() != NULL); + ASSERT_TRUE(offer.get()); const ContentInfo* vc = offer->GetContentByName("video"); - ASSERT_TRUE(vc != NULL); - const VideoContentDescription* vcd = vc->media_description()->as_video(); + ASSERT_TRUE(vc); + const MediaContentDescription* vcd = vc->media_description(); const StreamParamsVec& video_streams = vcd->streams(); ASSERT_EQ(1U, video_streams.size()); EXPECT_EQ(kVideoTrack1, video_streams[0].id); const SsrcGroup* sim_ssrc_group = - video_streams[0].get_ssrc_group(cricket::kSimSsrcGroupSemantics); - ASSERT_TRUE(sim_ssrc_group != NULL); + video_streams[0].get_ssrc_group(kSimSsrcGroupSemantics); + ASSERT_TRUE(sim_ssrc_group); EXPECT_EQ(static_cast(num_sim_layers), sim_ssrc_group->ssrcs.size()); } MATCHER(RidDescriptionEquals, "Verifies that two RidDescriptions are equal.") { - const RidDescription& rid1 = ::testing::get<0>(arg); - const RidDescription& rid2 = ::testing::get<1>(arg); + const RidDescription& rid1 = std::get<0>(arg); + const RidDescription& rid2 = std::get<1>(arg); return rid1.rid == rid2.rid && rid1.direction == rid2.direction; } -static void CheckSimulcastInSessionDescription( +void CheckSimulcastInSessionDescription( const SessionDescription* description, const std::string& content_name, const std::vector& send_rids, @@ -2544,7 +2816,7 @@ static void CheckSimulcastInSessionDescription( // Create an offer with spec-compliant simulcast video stream. TEST_F(MediaSessionDescriptionFactoryTest, TestCreateCompliantSimulcastOffer) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &opts); std::vector send_rids; @@ -2555,10 +2827,11 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateCompliantSimulcastOffer) { simulcast_layers.AddLayer(SimulcastLayer(send_rids[0].rid, false)); simulcast_layers.AddLayer(SimulcastLayer(send_rids[1].rid, true)); simulcast_layers.AddLayer(SimulcastLayer(send_rids[2].rid, false)); - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1, - {kMediaStream1}, send_rids, - simulcast_layers, 0, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, + send_rids, simulcast_layers, 0, &opts); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); CheckSimulcastInSessionDescription(offer.get(), "video", send_rids, simulcast_layers); @@ -2568,14 +2841,15 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateCompliantSimulcastOffer) { // In this scenario, RIDs do not need to be negotiated (there is only one). TEST_F(MediaSessionDescriptionFactoryTest, TestOfferWithRidsNoSimulcast) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &opts); RidDescription rid("f", RidDirection::kSend); - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1, - {kMediaStream1}, {rid}, + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, {rid}, SimulcastLayerList(), 0, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); ASSERT_NE(offer.get(), nullptr); const ContentInfo* content = offer->GetContentByName("video"); @@ -2594,16 +2868,17 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestOfferWithRidsNoSimulcast) { // In this scenario, the SFU is the caller requesting that we send Simulcast. TEST_F(MediaSessionDescriptionFactoryTest, TestCreateCompliantSimulcastAnswer) { MediaSessionOptions offer_opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &offer_opts); - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1, - {kMediaStream1}, 1, &offer_opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &offer_opts); std::unique_ptr offer = - f1_.CreateOffer(offer_opts, nullptr); + f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue(); MediaSessionOptions answer_opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &answer_opts); @@ -2616,11 +2891,11 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateCompliantSimulcastAnswer) { simulcast_layers.AddLayer(SimulcastLayer(rid_descriptions[0].rid, false)); simulcast_layers.AddLayer(SimulcastLayer(rid_descriptions[1].rid, true)); simulcast_layers.AddLayer(SimulcastLayer(rid_descriptions[2].rid, false)); - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1, - {kMediaStream1}, rid_descriptions, - simulcast_layers, 0, &answer_opts); + AttachSenderToMediaDescriptionOptions( + "video", webrtc::MediaType::VIDEO, kVideoTrack1, {kMediaStream1}, + rid_descriptions, simulcast_layers, 0, &answer_opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), answer_opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue(); CheckSimulcastInSessionDescription(answer.get(), "video", rid_descriptions, simulcast_layers); @@ -2631,27 +2906,27 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateCompliantSimulcastAnswer) { // Note that RID Direction is not the same as the transceiver direction. TEST_F(MediaSessionDescriptionFactoryTest, TestAnswerWithRidsNoSimulcast) { MediaSessionOptions offer_opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &offer_opts); RidDescription rid_offer("f", RidDirection::kSend); - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1, - {kMediaStream1}, {rid_offer}, - SimulcastLayerList(), 0, &offer_opts); + AttachSenderToMediaDescriptionOptions( + "video", webrtc::MediaType::VIDEO, kVideoTrack1, {kMediaStream1}, + {rid_offer}, SimulcastLayerList(), 0, &offer_opts); std::unique_ptr offer = - f1_.CreateOffer(offer_opts, nullptr); + f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue(); MediaSessionOptions answer_opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &answer_opts); RidDescription rid_answer("f", RidDirection::kReceive); - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1, - {kMediaStream1}, {rid_answer}, - SimulcastLayerList(), 0, &answer_opts); + AttachSenderToMediaDescriptionOptions( + "video", webrtc::MediaType::VIDEO, kVideoTrack1, {kMediaStream1}, + {rid_answer}, SimulcastLayerList(), 0, &answer_opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), answer_opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue(); ASSERT_NE(answer.get(), nullptr); const ContentInfo* content = offer->GetContentByName("video"); @@ -2674,44 +2949,44 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestAnswerWithRidsNoSimulcast) { // adding a new video track and removes one of the audio tracks. TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoAnswer) { MediaSessionOptions offer_opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kRecvOnly, kActive, &offer_opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kRecvOnly, kActive, &offer_opts); - f1_.set_secure(SEC_ENABLED); - f2_.set_secure(SEC_ENABLED); - std::unique_ptr offer = f1_.CreateOffer(offer_opts, NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue(); MediaSessionOptions answer_opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kSendRecv, kActive, &answer_opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &answer_opts); - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack1, - {kMediaStream1}, 1, &answer_opts); - AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack1, - {kMediaStream1}, 1, &answer_opts); - AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, kAudioTrack2, - {kMediaStream1}, 1, &answer_opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &answer_opts); + AttachSenderToMediaDescriptionOptions("audio", webrtc::MediaType::AUDIO, + kAudioTrack1, {kMediaStream1}, 1, + &answer_opts); + AttachSenderToMediaDescriptionOptions("audio", webrtc::MediaType::AUDIO, + kAudioTrack2, {kMediaStream1}, 1, + &answer_opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), answer_opts, NULL); + f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue(); - ASSERT_TRUE(answer.get() != NULL); + ASSERT_TRUE(answer.get()); const ContentInfo* ac = answer->GetContentByName("audio"); const ContentInfo* vc = answer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc != NULL); - const AudioContentDescription* acd = ac->media_description()->as_audio(); - const VideoContentDescription* vcd = vc->media_description()->as_video(); - ASSERT_CRYPTO(acd, 1U, kDefaultSrtpCryptoSuite); - ASSERT_CRYPTO(vcd, 1U, kDefaultSrtpCryptoSuite); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); + const MediaContentDescription* acd = ac->media_description(); + const MediaContentDescription* vcd = vc->media_description(); - EXPECT_EQ(MEDIA_TYPE_AUDIO, acd->type()); + EXPECT_EQ(webrtc::MediaType::AUDIO, acd->type()); EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecsAnswer)); const StreamParamsVec& audio_streams = acd->streams(); @@ -2724,41 +2999,38 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCreateMultiStreamVideoAnswer) { ASSERT_EQ(1U, audio_streams[1].ssrcs.size()); EXPECT_NE(0U, audio_streams[1].ssrcs[0]); - EXPECT_EQ(kAutoBandwidth, acd->bandwidth()); // default bandwidth (auto) + EXPECT_EQ(kAutoBandwidth, + acd->bandwidth()); // default bandwidth (auto) EXPECT_TRUE(acd->rtcp_mux()); // rtcp-mux defaults on - EXPECT_EQ(MEDIA_TYPE_VIDEO, vcd->type()); + EXPECT_EQ(webrtc::MediaType::VIDEO, vcd->type()); EXPECT_THAT(vcd->codecs(), ElementsAreArray(kVideoCodecsAnswer)); const StreamParamsVec& video_streams = vcd->streams(); ASSERT_EQ(1U, video_streams.size()); EXPECT_EQ(video_streams[0].cname, audio_streams[0].cname); EXPECT_EQ(kVideoTrack1, video_streams[0].id); - EXPECT_EQ(kAutoBandwidth, vcd->bandwidth()); // default bandwidth (auto) + EXPECT_EQ(kAutoBandwidth, + vcd->bandwidth()); // default bandwidth (auto) EXPECT_TRUE(vcd->rtcp_mux()); // rtcp-mux defaults on // Update the answer. Add a new video track that is not synched to the // other tracks and remove 1 audio track. - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, kVideoTrack2, - {kMediaStream2}, 1, &answer_opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack2, {kMediaStream2}, 1, + &answer_opts); DetachSenderFromMediaSection("audio", kAudioTrack2, &answer_opts); std::unique_ptr updated_answer( - f2_.CreateAnswer(offer.get(), answer_opts, answer.get())); + f2_.CreateAnswerOrError(offer.get(), answer_opts, answer.get()) + .MoveValue()); - ASSERT_TRUE(updated_answer.get() != NULL); + ASSERT_TRUE(updated_answer.get()); ac = updated_answer->GetContentByName("audio"); vc = updated_answer->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc != NULL); - const AudioContentDescription* updated_acd = - ac->media_description()->as_audio(); - const VideoContentDescription* updated_vcd = - vc->media_description()->as_video(); - - ASSERT_CRYPTO(updated_acd, 1U, kDefaultSrtpCryptoSuite); - EXPECT_TRUE(CompareCryptoParams(acd->cryptos(), updated_acd->cryptos())); - ASSERT_CRYPTO(updated_vcd, 1U, kDefaultSrtpCryptoSuite); - EXPECT_TRUE(CompareCryptoParams(vcd->cryptos(), updated_vcd->cryptos())); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); + const MediaContentDescription* updated_acd = ac->media_description(); + const MediaContentDescription* updated_vcd = vc->media_description(); EXPECT_EQ(acd->type(), updated_acd->type()); EXPECT_EQ(acd->codecs(), updated_acd->codecs()); @@ -2785,9 +3057,10 @@ TEST_F(MediaSessionDescriptionFactoryTest, MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const AudioContentDescription* acd = GetFirstAudioContentDescription(answer.get()); @@ -2798,14 +3071,14 @@ TEST_F(MediaSessionDescriptionFactoryTest, EXPECT_THAT(vcd->codecs(), ElementsAreArray(kVideoCodecsAnswer)); std::unique_ptr updated_offer( - f2_.CreateOffer(opts, answer.get())); + f2_.CreateOfferOrError(opts, answer.get()).MoveValue()); // The expected audio codecs are the common audio codecs from the first // offer/answer exchange plus the audio codecs only `f2_` offer, sorted in // preference order. // TODO(wu): `updated_offer` should not include the codec // (i.e. `kAudioCodecs2[0]`) the other side doesn't support. - const AudioCodec kUpdatedAudioCodecOffer[] = { + const Codec kUpdatedAudioCodecOffer[] = { kAudioCodecsAnswer[0], kAudioCodecsAnswer[1], kAudioCodecs2[0], @@ -2814,96 +3087,111 @@ TEST_F(MediaSessionDescriptionFactoryTest, // The expected video codecs are the common video codecs from the first // offer/answer exchange plus the video codecs only `f2_` offer, sorted in // preference order. - const VideoCodec kUpdatedVideoCodecOffer[] = { + const Codec kUpdatedVideoCodecOffer[] = { kVideoCodecsAnswer[0], kVideoCodecs2[1], }; const AudioContentDescription* updated_acd = GetFirstAudioContentDescription(updated_offer.get()); - EXPECT_THAT(updated_acd->codecs(), ElementsAreArray(kUpdatedAudioCodecOffer)); + EXPECT_TRUE(CodecListsMatch(updated_acd->codecs(), kUpdatedAudioCodecOffer)); const VideoContentDescription* updated_vcd = GetFirstVideoContentDescription(updated_offer.get()); - EXPECT_THAT(updated_vcd->codecs(), ElementsAreArray(kUpdatedVideoCodecOffer)); + EXPECT_TRUE(CodecListsMatch(updated_vcd->codecs(), kUpdatedVideoCodecOffer)); } // Test that a reoffer does not reuse audio codecs from a previous media section // that is being recycled. TEST_F(MediaSessionDescriptionFactoryTest, ReOfferDoesNotReUseRecycledAudioCodecs) { - f1_.set_video_codecs({}, {}); - f2_.set_video_codecs({}, {}); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(CodecList{}, + CodecList{}); + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(CodecList{}, + CodecList{}); MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "a0", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "a0", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); // Recycle the media section by changing its mid. opts.media_description_options[0].mid = "a1"; std::unique_ptr reoffer = - f2_.CreateOffer(opts, answer.get()); + f2_.CreateOfferOrError(opts, answer.get()).MoveValue(); // Expect that the results of the first negotiation are ignored. If the m= // section was not recycled the payload types would match the initial offerer. const AudioContentDescription* acd = GetFirstAudioContentDescription(reoffer.get()); - EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecs2)); + // EXPECT_THAT(acd->codecs(), ElementsAreArray(kAudioCodecs2)), + // except that we don't want to check the PT numbers. + EXPECT_EQ(acd->codecs().size(), + sizeof(kAudioCodecs2) / sizeof(kAudioCodecs2[0])); + for (size_t i = 0; i < acd->codecs().size(); ++i) { + EXPECT_EQ(acd->codecs()[i].name, kAudioCodecs2[i].name); + } } // Test that a reoffer does not reuse video codecs from a previous media section // that is being recycled. TEST_F(MediaSessionDescriptionFactoryTest, ReOfferDoesNotReUseRecycledVideoCodecs) { - f1_.set_audio_codecs({}, {}); - f2_.set_audio_codecs({}, {}); + codec_lookup_helper_1_.GetCodecVendor()->set_audio_codecs(CodecList{}, + CodecList{}); + codec_lookup_helper_2_.GetCodecVendor()->set_audio_codecs(CodecList{}, + CodecList{}); MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "v0", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "v0", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); - auto answer = f2_.CreateAnswer(offer.get(), opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + auto answer = f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); // Recycle the media section by changing its mid. opts.media_description_options[0].mid = "v1"; std::unique_ptr reoffer = - f2_.CreateOffer(opts, answer.get()); + f2_.CreateOfferOrError(opts, answer.get()).MoveValue(); // Expect that the results of the first negotiation are ignored. If the m= // section was not recycled the payload types would match the initial offerer. const VideoContentDescription* vcd = GetFirstVideoContentDescription(reoffer.get()); - EXPECT_THAT(vcd->codecs(), ElementsAreArray(kVideoCodecs2)); + EXPECT_TRUE(CodecListsMatch(vcd->codecs(), kVideoCodecs2)); } // Test that a reanswer does not reuse audio codecs from a previous media // section that is being recycled. TEST_F(MediaSessionDescriptionFactoryTest, ReAnswerDoesNotReUseRecycledAudioCodecs) { - f1_.set_video_codecs({}, {}); - f2_.set_video_codecs({}, {}); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(CodecList{}, + CodecList{}); + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(CodecList{}, + CodecList{}); // Perform initial offer/answer in reverse (`f2_` as offerer) so that the // second offer/answer is forward (`f1_` as offerer). MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "a0", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "a0", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::unique_ptr offer = f2_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f2_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr answer = - f1_.CreateAnswer(offer.get(), opts, nullptr); + f1_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); // Recycle the media section by changing its mid. opts.media_description_options[0].mid = "a1"; std::unique_ptr reoffer = - f1_.CreateOffer(opts, answer.get()); + f1_.CreateOfferOrError(opts, answer.get()).MoveValue(); std::unique_ptr reanswer = - f2_.CreateAnswer(reoffer.get(), opts, offer.get()); + f2_.CreateAnswerOrError(reoffer.get(), opts, offer.get()).MoveValue(); // Expect that the results of the first negotiation are ignored. If the m= // section was not recycled the payload types would match the initial offerer. @@ -2916,25 +3204,28 @@ TEST_F(MediaSessionDescriptionFactoryTest, // section that is being recycled. TEST_F(MediaSessionDescriptionFactoryTest, ReAnswerDoesNotReUseRecycledVideoCodecs) { - f1_.set_audio_codecs({}, {}); - f2_.set_audio_codecs({}, {}); + codec_lookup_helper_1_.GetCodecVendor()->set_audio_codecs(CodecList{}, + CodecList{}); + codec_lookup_helper_2_.GetCodecVendor()->set_audio_codecs(CodecList{}, + CodecList{}); // Perform initial offer/answer in reverse (`f2_` as offerer) so that the // second offer/answer is forward (`f1_` as offerer). MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "v0", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "v0", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::unique_ptr offer = f2_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f2_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr answer = - f1_.CreateAnswer(offer.get(), opts, nullptr); + f1_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); // Recycle the media section by changing its mid. opts.media_description_options[0].mid = "v1"; std::unique_ptr reoffer = - f1_.CreateOffer(opts, answer.get()); + f1_.CreateOfferOrError(opts, answer.get()).MoveValue(); std::unique_ptr reanswer = - f2_.CreateAnswer(reoffer.get(), opts, offer.get()); + f2_.CreateAnswerOrError(reoffer.get(), opts, offer.get()).MoveValue(); // Expect that the results of the first negotiation are ignored. If the m= // section was not recycled the payload types would match the initial offerer. @@ -2949,48 +3240,49 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, RespondentCreatesOfferAfterCreatingAnswerWithRtx) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kRecvOnly, kActive, &opts); - std::vector f1_codecs = MAKE_VECTOR(kVideoCodecs1); + std::vector f1_codecs = MAKE_VECTOR(kVideoCodecs1); // This creates rtx for H264 with the payload type `f1_` uses. - AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id), - &f1_codecs); - f1_.set_video_codecs(f1_codecs, f1_codecs); + AddRtxCodec(CreateVideoRtxCodec(126, kVideoCodecs1[1].id), &f1_codecs); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); - std::vector f2_codecs = MAKE_VECTOR(kVideoCodecs2); + std::vector f2_codecs = MAKE_VECTOR(kVideoCodecs2); // This creates rtx for H264 with the payload type `f2_` uses. - AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs2[0].id), - &f2_codecs); - f2_.set_video_codecs(f2_codecs, f2_codecs); + AddRtxCodec(CreateVideoRtxCodec(125, kVideoCodecs2[0].id), &f2_codecs); + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(f2_codecs, + f2_codecs); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const VideoContentDescription* vcd = GetFirstVideoContentDescription(answer.get()); - std::vector expected_codecs = MAKE_VECTOR(kVideoCodecsAnswer); - AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id), - &expected_codecs); + std::vector expected_codecs = MAKE_VECTOR(kVideoCodecsAnswer); + AddRtxCodec(CreateVideoRtxCodec(126, kVideoCodecs1[1].id), &expected_codecs); - EXPECT_EQ(expected_codecs, vcd->codecs()); + EXPECT_TRUE(CodecListsMatch(expected_codecs, vcd->codecs())); // Now, make sure we get same result (except for the order) if `f2_` creates // an updated offer even though the default payload types between `f1_` and // `f2_` are different. std::unique_ptr updated_offer( - f2_.CreateOffer(opts, answer.get())); + f2_.CreateOfferOrError(opts, answer.get()).MoveValue()); ASSERT_TRUE(updated_offer); std::unique_ptr updated_answer( - f1_.CreateAnswer(updated_offer.get(), opts, answer.get())); + f1_.CreateAnswerOrError(updated_offer.get(), opts, answer.get()) + .MoveValue()); const VideoContentDescription* updated_vcd = GetFirstVideoContentDescription(updated_answer.get()); - EXPECT_EQ(expected_codecs, updated_vcd->codecs()); + EXPECT_TRUE(CodecListsMatch(expected_codecs, updated_vcd->codecs())); } // Regression test for: @@ -3001,46 +3293,48 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, RespondentCreatesOfferAfterCreatingAnswerWithRemappedRtxPayloadType) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kRecvOnly, kActive, &opts); // We specifically choose different preferred payload types for VP8 to // trigger the issue. - cricket::VideoCodec vp8_offerer = cricket::CreateVideoCodec(100, "VP8"); - cricket::VideoCodec vp8_offerer_rtx = - cricket::CreateVideoRtxCodec(101, vp8_offerer.id); - cricket::VideoCodec vp8_answerer = cricket::CreateVideoCodec(110, "VP8"); - cricket::VideoCodec vp8_answerer_rtx = - cricket::CreateVideoRtxCodec(111, vp8_answerer.id); - cricket::VideoCodec vp9 = cricket::CreateVideoCodec(120, "VP9"); - cricket::VideoCodec vp9_rtx = cricket::CreateVideoRtxCodec(121, vp9.id); - - std::vector f1_codecs = {vp8_offerer, vp8_offerer_rtx}; + Codec vp8_offerer = CreateVideoCodec(100, "VP8"); + Codec vp8_offerer_rtx = CreateVideoRtxCodec(101, vp8_offerer.id); + Codec vp8_answerer = CreateVideoCodec(110, "VP8"); + Codec vp8_answerer_rtx = CreateVideoRtxCodec(111, vp8_answerer.id); + Codec vp9 = CreateVideoCodec(120, "VP9"); + Codec vp9_rtx = CreateVideoRtxCodec(121, vp9.id); + + std::vector f1_codecs = {vp8_offerer, vp8_offerer_rtx}; // We also specifically cause the answerer to prefer VP9, such that if it // *doesn't* honor the existing preferred codec (VP8) we'll notice. - std::vector f2_codecs = {vp9, vp9_rtx, vp8_answerer, - vp8_answerer_rtx}; - - f1_.set_video_codecs(f1_codecs, f1_codecs); - f2_.set_video_codecs(f2_codecs, f2_codecs); - std::vector audio_codecs; - f1_.set_audio_codecs(audio_codecs, audio_codecs); - f2_.set_audio_codecs(audio_codecs, audio_codecs); + std::vector f2_codecs = {vp9, vp9_rtx, vp8_answerer, vp8_answerer_rtx}; + + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(f2_codecs, + f2_codecs); + std::vector audio_codecs; + codec_lookup_helper_1_.GetCodecVendor()->set_audio_codecs(audio_codecs, + audio_codecs); + codec_lookup_helper_2_.GetCodecVendor()->set_audio_codecs(audio_codecs, + audio_codecs); // Offer will be {VP8, RTX for VP8}. Answer will be the same. - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); // Updated offer *should* be {VP8, RTX for VP8, VP9, RTX for VP9}. // But if the bug is triggered, RTX for VP8 ends up last. std::unique_ptr updated_offer( - f2_.CreateOffer(opts, answer.get())); + f2_.CreateOfferOrError(opts, answer.get()).MoveValue()); const VideoContentDescription* vcd = GetFirstVideoContentDescription(updated_offer.get()); - std::vector codecs = vcd->codecs(); + std::vector codecs = vcd->codecs(); ASSERT_EQ(4u, codecs.size()); EXPECT_EQ(vp8_offerer, codecs[0]); EXPECT_EQ(vp8_offerer_rtx, codecs[1]); @@ -3054,20 +3348,21 @@ TEST_F(MediaSessionDescriptionFactoryTest, // use, the added codecs payload types are changed. TEST_F(MediaSessionDescriptionFactoryTest, RespondentCreatesOfferWithVideoAndRtxAfterCreatingAudioAnswer) { - std::vector f1_codecs = MAKE_VECTOR(kVideoCodecs1); + std::vector f1_codecs = MAKE_VECTOR(kVideoCodecs1); // This creates rtx for H264 with the payload type `f1_` uses. - AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id), - &f1_codecs); - f1_.set_video_codecs(f1_codecs, f1_codecs); + AddRtxCodec(CreateVideoRtxCodec(126, kVideoCodecs1[1].id), &f1_codecs); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kRecvOnly, kActive, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const AudioContentDescription* acd = GetFirstAudioContentDescription(answer.get()); @@ -3079,17 +3374,20 @@ TEST_F(MediaSessionDescriptionFactoryTest, opts.media_description_options.clear(); AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - std::vector f2_codecs = MAKE_VECTOR(kVideoCodecs2); + std::vector f2_codecs = MAKE_VECTOR(kVideoCodecs2); + ASSERT_THAT(acd->codecs().size(), Gt(0)); int used_pl_type = acd->codecs()[0].id; f2_codecs[0].id = used_pl_type; // Set the payload type for H264. - AddRtxCodec(cricket::CreateVideoRtxCodec(125, used_pl_type), &f2_codecs); - f2_.set_video_codecs(f2_codecs, f2_codecs); + AddRtxCodec(CreateVideoRtxCodec(125, used_pl_type), &f2_codecs); + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(f2_codecs, + f2_codecs); std::unique_ptr updated_offer( - f2_.CreateOffer(opts, answer.get())); + f2_.CreateOfferOrError(opts, answer.get()).MoveValue()); ASSERT_TRUE(updated_offer); std::unique_ptr updated_answer( - f1_.CreateAnswer(updated_offer.get(), opts, answer.get())); + f1_.CreateAnswerOrError(updated_offer.get(), opts, answer.get()) + .MoveValue()); const AudioContentDescription* updated_acd = GetFirstAudioContentDescription(answer.get()); @@ -3099,12 +3397,12 @@ TEST_F(MediaSessionDescriptionFactoryTest, GetFirstVideoContentDescription(updated_answer.get()); ASSERT_EQ("H264", updated_vcd->codecs()[0].name); - ASSERT_EQ(cricket::kRtxCodecName, updated_vcd->codecs()[1].name); + ASSERT_EQ(kRtxCodecName, updated_vcd->codecs()[1].name); int new_h264_pl_type = updated_vcd->codecs()[0].id; EXPECT_NE(used_pl_type, new_h264_pl_type); - VideoCodec rtx = updated_vcd->codecs()[1]; - int pt_referenced_by_rtx = rtc::FromString( - rtx.params[cricket::kCodecParamAssociatedPayloadType]); + Codec rtx = updated_vcd->codecs()[1]; + int pt_referenced_by_rtx = + FromString(rtx.params[kCodecParamAssociatedPayloadType]); EXPECT_EQ(new_h264_pl_type, pt_referenced_by_rtx); } @@ -3116,28 +3414,29 @@ TEST_F(MediaSessionDescriptionFactoryTest, MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - std::vector f2_codecs = MAKE_VECTOR(kVideoCodecs2); + std::vector f2_codecs = MAKE_VECTOR(kVideoCodecs2); // This creates rtx for H264 with the payload type `f2_` uses. - AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs2[0].id), - &f2_codecs); - f2_.set_video_codecs(f2_codecs, f2_codecs); + AddRtxCodec(CreateVideoRtxCodec(125, kVideoCodecs2[0].id), &f2_codecs); + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(f2_codecs, + f2_codecs); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); - ASSERT_TRUE(offer.get() != nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const VideoContentDescription* vcd = GetFirstVideoContentDescription(answer.get()); - std::vector expected_codecs = MAKE_VECTOR(kVideoCodecsAnswer); + std::vector expected_codecs = MAKE_VECTOR(kVideoCodecsAnswer); EXPECT_EQ(expected_codecs, vcd->codecs()); // Now, ensure that the RTX codec is created correctly when `f2_` creates an // updated offer, even though the default payload types are different from // those of `f1_`. std::unique_ptr updated_offer( - f2_.CreateOffer(opts, answer.get())); + f2_.CreateOfferOrError(opts, answer.get()).MoveValue()); ASSERT_TRUE(updated_offer); const VideoContentDescription* updated_vcd = @@ -3145,84 +3444,84 @@ TEST_F(MediaSessionDescriptionFactoryTest, // New offer should attempt to add H263, and RTX for H264. expected_codecs.push_back(kVideoCodecs2[1]); - AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs1[1].id), - &expected_codecs); - EXPECT_EQ(expected_codecs, updated_vcd->codecs()); + AddRtxCodec(CreateVideoRtxCodec(125, kVideoCodecs1[1].id), &expected_codecs); + EXPECT_TRUE(CodecListsMatch(expected_codecs, updated_vcd->codecs())); } // Test that RTX is ignored when there is no associated payload type parameter. TEST_F(MediaSessionDescriptionFactoryTest, RtxWithoutApt) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kRecvOnly, kActive, &opts); - std::vector f1_codecs = MAKE_VECTOR(kVideoCodecs1); + std::vector f1_codecs = MAKE_VECTOR(kVideoCodecs1); // This creates RTX without associated payload type parameter. - AddRtxCodec(cricket::CreateVideoCodec(126, cricket::kRtxCodecName), - &f1_codecs); - f1_.set_video_codecs(f1_codecs, f1_codecs); + AddRtxCodec(CreateVideoCodec(126, kRtxCodecName), &f1_codecs); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); - std::vector f2_codecs = MAKE_VECTOR(kVideoCodecs2); + std::vector f2_codecs = MAKE_VECTOR(kVideoCodecs2); // This creates RTX for H264 with the payload type `f2_` uses. - AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs2[0].id), - &f2_codecs); - f2_.set_video_codecs(f2_codecs, f2_codecs); + AddRtxCodec(CreateVideoRtxCodec(125, kVideoCodecs2[0].id), &f2_codecs); + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(f2_codecs, + f2_codecs); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); // kCodecParamAssociatedPayloadType will always be added to the offer when RTX // is selected. Manually remove kCodecParamAssociatedPayloadType so that it // is possible to test that that RTX is dropped when // kCodecParamAssociatedPayloadType is missing in the offer. MediaContentDescription* media_desc = - offer->GetContentDescriptionByName(cricket::CN_VIDEO); + offer->GetContentDescriptionByName(CN_VIDEO); ASSERT_TRUE(media_desc); - VideoContentDescription* desc = media_desc->as_video(); - std::vector codecs = desc->codecs(); - for (VideoCodec& codec : codecs) { - if (absl::StartsWith(codec.name, cricket::kRtxCodecName)) { + std::vector codecs = media_desc->codecs(); + for (Codec& codec : codecs) { + if (absl::StartsWith(codec.name, kRtxCodecName)) { codec.params.clear(); } } - desc->set_codecs(codecs); + media_desc->set_codecs(codecs); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); EXPECT_THAT( GetCodecNames(GetFirstVideoContentDescription(answer.get())->codecs()), - Not(Contains(cricket::kRtxCodecName))); + Not(Contains(kRtxCodecName))); } // Test that RTX will be filtered out in the answer if its associated payload // type doesn't match the local value. TEST_F(MediaSessionDescriptionFactoryTest, FilterOutRtxIfAptDoesntMatch) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kRecvOnly, kActive, &opts); - std::vector f1_codecs = MAKE_VECTOR(kVideoCodecs1); + std::vector f1_codecs = MAKE_VECTOR(kVideoCodecs1); // This creates RTX for H264 in sender. - AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id), - &f1_codecs); - f1_.set_video_codecs(f1_codecs, f1_codecs); + AddRtxCodec(CreateVideoRtxCodec(126, kVideoCodecs1[1].id), &f1_codecs); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); - std::vector f2_codecs = MAKE_VECTOR(kVideoCodecs2); + std::vector f2_codecs = MAKE_VECTOR(kVideoCodecs2); // This creates RTX for H263 in receiver. - AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs2[1].id), - &f2_codecs); - f2_.set_video_codecs(f2_codecs, f2_codecs); + AddRtxCodec(CreateVideoRtxCodec(125, kVideoCodecs2[1].id), &f2_codecs); + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(f2_codecs, + f2_codecs); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); // Associated payload type doesn't match, therefore, RTX codec is removed in // the answer. std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); EXPECT_THAT( GetCodecNames(GetFirstVideoContentDescription(answer.get())->codecs()), - Not(Contains(cricket::kRtxCodecName))); + Not(Contains(kRtxCodecName))); } // Test that when multiple RTX codecs are offered, only the matched RTX codec @@ -3230,105 +3529,105 @@ TEST_F(MediaSessionDescriptionFactoryTest, FilterOutRtxIfAptDoesntMatch) { TEST_F(MediaSessionDescriptionFactoryTest, FilterOutUnsupportedRtxWhenCreatingAnswer) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kRecvOnly, kActive, &opts); - std::vector f1_codecs = MAKE_VECTOR(kVideoCodecs1); + std::vector f1_codecs = MAKE_VECTOR(kVideoCodecs1); // This creates RTX for H264-SVC in sender. - AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs1[0].id), - &f1_codecs); - f1_.set_video_codecs(f1_codecs, f1_codecs); + AddRtxCodec(CreateVideoRtxCodec(125, kVideoCodecs1[0].id), &f1_codecs); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); // This creates RTX for H264 in sender. - AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id), - &f1_codecs); - f1_.set_video_codecs(f1_codecs, f1_codecs); + AddRtxCodec(CreateVideoRtxCodec(126, kVideoCodecs1[1].id), &f1_codecs); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); - std::vector f2_codecs = MAKE_VECTOR(kVideoCodecs2); + std::vector f2_codecs = MAKE_VECTOR(kVideoCodecs2); // This creates RTX for H264 in receiver. - AddRtxCodec(cricket::CreateVideoRtxCodec(124, kVideoCodecs2[0].id), - &f2_codecs); - f2_.set_video_codecs(f2_codecs, f1_codecs); + AddRtxCodec(CreateVideoRtxCodec(124, kVideoCodecs2[0].id), &f2_codecs); + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(f2_codecs, + f1_codecs); // H264-SVC codec is removed in the answer, therefore, associated RTX codec // for H264-SVC should also be removed. - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const VideoContentDescription* vcd = GetFirstVideoContentDescription(answer.get()); - std::vector expected_codecs = MAKE_VECTOR(kVideoCodecsAnswer); - AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id), - &expected_codecs); + std::vector expected_codecs = MAKE_VECTOR(kVideoCodecsAnswer); + AddRtxCodec(CreateVideoRtxCodec(126, kVideoCodecs1[1].id), &expected_codecs); - EXPECT_EQ(expected_codecs, vcd->codecs()); + EXPECT_TRUE(CodecListsMatch(expected_codecs, vcd->codecs())); } // Test that after one RTX codec has been negotiated, a new offer can attempt // to add another. TEST_F(MediaSessionDescriptionFactoryTest, AddSecondRtxInNewOffer) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kRecvOnly, kActive, &opts); - std::vector f1_codecs = MAKE_VECTOR(kVideoCodecs1); + std::vector f1_codecs = MAKE_VECTOR(kVideoCodecs1); // This creates RTX for H264 for the offerer. - AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id), - &f1_codecs); - f1_.set_video_codecs(f1_codecs, f1_codecs); + AddRtxCodec(CreateVideoRtxCodec(126, kVideoCodecs1[1].id), &f1_codecs); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); ASSERT_TRUE(offer); const VideoContentDescription* vcd = GetFirstVideoContentDescription(offer.get()); - std::vector expected_codecs = MAKE_VECTOR(kVideoCodecs1); - AddRtxCodec(cricket::CreateVideoRtxCodec(126, kVideoCodecs1[1].id), - &expected_codecs); - EXPECT_EQ(expected_codecs, vcd->codecs()); + std::vector expected_codecs = MAKE_VECTOR(kVideoCodecs1); + AddRtxCodec(CreateVideoRtxCodec(126, kVideoCodecs1[1].id), &expected_codecs); + EXPECT_TRUE(CodecListsMatch(expected_codecs, vcd->codecs())); // Now, attempt to add RTX for H264-SVC. - AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs1[0].id), - &f1_codecs); - f1_.set_video_codecs(f1_codecs, f1_codecs); + AddRtxCodec(CreateVideoRtxCodec(125, kVideoCodecs1[0].id), &f1_codecs); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); std::unique_ptr updated_offer( - f1_.CreateOffer(opts, offer.get())); + f1_.CreateOfferOrError(opts, offer.get()).MoveValue()); ASSERT_TRUE(updated_offer); vcd = GetFirstVideoContentDescription(updated_offer.get()); - AddRtxCodec(cricket::CreateVideoRtxCodec(125, kVideoCodecs1[0].id), - &expected_codecs); - EXPECT_EQ(expected_codecs, vcd->codecs()); + AddRtxCodec(CreateVideoRtxCodec(125, kVideoCodecs1[0].id), &expected_codecs); + EXPECT_TRUE(CodecListsMatch(expected_codecs, vcd->codecs())); } // Test that when RTX is used in conjunction with simulcast, an RTX ssrc is // generated for each simulcast ssrc and correctly grouped. TEST_F(MediaSessionDescriptionFactoryTest, SimSsrcsGenerateMultipleRtxSsrcs) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &opts); // Add simulcast streams. - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, "stream1", - {"stream1label"}, 3, &opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + "stream1", {"stream1label"}, 3, &opts); // Use a single real codec, and then add RTX for it. - std::vector f1_codecs; - f1_codecs.push_back(cricket::CreateVideoCodec(97, "H264")); - AddRtxCodec(cricket::CreateVideoRtxCodec(125, 97), &f1_codecs); - f1_.set_video_codecs(f1_codecs, f1_codecs); + std::vector f1_codecs; + f1_codecs.push_back(CreateVideoCodec(97, "H264")); + AddRtxCodec(CreateVideoRtxCodec(125, 97), &f1_codecs); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); // Ensure that the offer has an RTX ssrc for each regular ssrc, and that there // is a FID ssrc + grouping for each. - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); MediaContentDescription* media_desc = - offer->GetContentDescriptionByName(cricket::CN_VIDEO); + offer->GetContentDescriptionByName(CN_VIDEO); ASSERT_TRUE(media_desc); - VideoContentDescription* desc = media_desc->as_video(); - const StreamParamsVec& streams = desc->streams(); + const StreamParamsVec& streams = media_desc->streams(); // Single stream. ASSERT_EQ(1u, streams.size()); // Stream should have 6 ssrcs: 3 for video, 3 for RTX. @@ -3348,31 +3647,32 @@ TEST_F(MediaSessionDescriptionFactoryTest, SimSsrcsGenerateMultipleRtxSsrcs) { // Test that, when the FlexFEC codec is added, a FlexFEC ssrc is created // together with a FEC-FR grouping. Guarded by WebRTC-FlexFEC-03 trial. TEST_F(MediaSessionDescriptionFactoryTest, GenerateFlexfecSsrc) { - webrtc::test::ScopedKeyValueConfig override_field_trials( - field_trials, "WebRTC-FlexFEC-03/Enabled/"); + ScopedKeyValueConfig override_field_trials(field_trials, + "WebRTC-FlexFEC-03/Enabled/"); MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &opts); // Add single stream. - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, "stream1", - {"stream1label"}, 1, &opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + "stream1", {"stream1label"}, 1, &opts); // Use a single real codec, and then add FlexFEC for it. - std::vector f1_codecs; - f1_codecs.push_back(cricket::CreateVideoCodec(97, "H264")); - f1_codecs.push_back(cricket::CreateVideoCodec(118, "flexfec-03")); - f1_.set_video_codecs(f1_codecs, f1_codecs); + std::vector f1_codecs; + f1_codecs.push_back(CreateVideoCodec(97, "H264")); + f1_codecs.push_back(CreateVideoCodec(118, "flexfec-03")); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); // Ensure that the offer has a single FlexFEC ssrc and that // there is no FEC-FR ssrc + grouping for each. - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); - ASSERT_TRUE(offer.get() != nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); MediaContentDescription* media_desc = - offer->GetContentDescriptionByName(cricket::CN_VIDEO); + offer->GetContentDescriptionByName(CN_VIDEO); ASSERT_TRUE(media_desc); - VideoContentDescription* desc = media_desc->as_video(); - const StreamParamsVec& streams = desc->streams(); + const StreamParamsVec& streams = media_desc->streams(); // Single stream. ASSERT_EQ(1u, streams.size()); // Stream should have 2 ssrcs: 1 for video, 1 for FlexFEC. @@ -3391,31 +3691,32 @@ TEST_F(MediaSessionDescriptionFactoryTest, GenerateFlexfecSsrc) { // TODO(brandtr): Remove this test when we support simulcast, either through // multiple FlexfecSenders, or through multistream protection. TEST_F(MediaSessionDescriptionFactoryTest, SimSsrcsGenerateNoFlexfecSsrcs) { - webrtc::test::ScopedKeyValueConfig override_field_trials( - field_trials, "WebRTC-FlexFEC-03/Enabled/"); + ScopedKeyValueConfig override_field_trials(field_trials, + "WebRTC-FlexFEC-03/Enabled/"); MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &opts); // Add simulcast streams. - AttachSenderToMediaDescriptionOptions("video", MEDIA_TYPE_VIDEO, "stream1", - {"stream1label"}, 3, &opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + "stream1", {"stream1label"}, 3, &opts); // Use a single real codec, and then add FlexFEC for it. - std::vector f1_codecs; - f1_codecs.push_back(cricket::CreateVideoCodec(97, "H264")); - f1_codecs.push_back(cricket::CreateVideoCodec(118, "flexfec-03")); - f1_.set_video_codecs(f1_codecs, f1_codecs); + std::vector f1_codecs; + f1_codecs.push_back(CreateVideoCodec(97, "H264")); + f1_codecs.push_back(CreateVideoCodec(118, "flexfec-03")); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); // Ensure that the offer has no FlexFEC ssrcs for each regular ssrc, and that // there is no FEC-FR ssrc + grouping for each. - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); - ASSERT_TRUE(offer.get() != nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); MediaContentDescription* media_desc = - offer->GetContentDescriptionByName(cricket::CN_VIDEO); + offer->GetContentDescriptionByName(CN_VIDEO); ASSERT_TRUE(media_desc); - VideoContentDescription* desc = media_desc->as_video(); - const StreamParamsVec& streams = desc->streams(); + const StreamParamsVec& streams = media_desc->streams(); // Single stream. ASSERT_EQ(1u, streams.size()); // Stream should have 3 ssrcs: 3 for video, 0 for FlexFEC. @@ -3443,21 +3744,22 @@ TEST_F(MediaSessionDescriptionFactoryTest, SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension1), MAKE_VECTOR(kVideoRtpExtension1), &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension2), MAKE_VECTOR(kVideoRtpExtension2), &opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, NULL); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); - EXPECT_EQ( - MAKE_VECTOR(kAudioRtpExtensionAnswer), - GetFirstAudioContentDescription(answer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kVideoRtpExtensionAnswer), - GetFirstVideoContentDescription(answer.get())->rtp_header_extensions()); + EXPECT_THAT( + GetFirstAudioContentDescription(answer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kAudioRtpExtensionAnswer)); + EXPECT_THAT( + GetFirstVideoContentDescription(answer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kVideoRtpExtensionAnswer)); std::unique_ptr updated_offer( - f2_.CreateOffer(opts, answer.get())); + f2_.CreateOfferOrError(opts, answer.get()).MoveValue()); // The expected RTP header extensions in the new offer are the resulting // extensions from the first offer/answer exchange plus the extensions only @@ -3480,13 +3782,13 @@ TEST_F(MediaSessionDescriptionFactoryTest, const AudioContentDescription* updated_acd = GetFirstAudioContentDescription(updated_offer.get()); - EXPECT_EQ(MAKE_VECTOR(kUpdatedAudioRtpExtensions), - updated_acd->rtp_header_extensions()); + EXPECT_THAT(updated_acd->rtp_header_extensions(), + UnorderedElementsAreArray(kUpdatedAudioRtpExtensions)); const VideoContentDescription* updated_vcd = GetFirstVideoContentDescription(updated_offer.get()); - EXPECT_EQ(MAKE_VECTOR(kUpdatedVideoRtpExtensions), - updated_vcd->rtp_header_extensions()); + EXPECT_THAT(updated_vcd->rtp_header_extensions(), + UnorderedElementsAreArray(kUpdatedVideoRtpExtensions)); } // Verify that if the same RTP extension URI is used for audio and video, the @@ -3498,7 +3800,8 @@ TEST_F(MediaSessionDescriptionFactoryTest, RtpExtensionIdReused) { SetAudioVideoRtpHeaderExtensions(MAKE_VECTOR(kAudioRtpExtension3), MAKE_VECTOR(kVideoRtpExtension3), &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); // Since the audio extensions used ID 3 for "both_audio_and_video", so should // the video extensions. @@ -3507,86 +3810,54 @@ TEST_F(MediaSessionDescriptionFactoryTest, RtpExtensionIdReused) { kAudioRtpExtension3[1], }; - EXPECT_EQ( - MAKE_VECTOR(kAudioRtpExtension3), - GetFirstAudioContentDescription(offer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kExpectedVideoRtpExtension), - GetFirstVideoContentDescription(offer.get())->rtp_header_extensions()); - - // Nothing should change when creating a new offer - std::unique_ptr updated_offer( - f1_.CreateOffer(opts, offer.get())); - - EXPECT_EQ(MAKE_VECTOR(kAudioRtpExtension3), - GetFirstAudioContentDescription(updated_offer.get()) - ->rtp_header_extensions()); - EXPECT_EQ(MAKE_VECTOR(kExpectedVideoRtpExtension), - GetFirstVideoContentDescription(updated_offer.get()) - ->rtp_header_extensions()); -} - -// Same as "RtpExtensionIdReused" above for encrypted RTP extensions. -TEST_F(MediaSessionDescriptionFactoryTest, RtpExtensionIdReusedEncrypted) { - MediaSessionOptions opts; - AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - - f1_.set_enable_encrypted_rtp_header_extensions(true); - f2_.set_enable_encrypted_rtp_header_extensions(true); - - SetAudioVideoRtpHeaderExtensions( - MAKE_VECTOR(kAudioRtpExtension3ForEncryption), - MAKE_VECTOR(kVideoRtpExtension3ForEncryption), &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, NULL); - - EXPECT_EQ( - MAKE_VECTOR(kAudioRtpExtension3ForEncryptionOffer), - GetFirstAudioContentDescription(offer.get())->rtp_header_extensions()); - EXPECT_EQ( - MAKE_VECTOR(kVideoRtpExtension3ForEncryptionOffer), - GetFirstVideoContentDescription(offer.get())->rtp_header_extensions()); + EXPECT_THAT( + GetFirstAudioContentDescription(offer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kAudioRtpExtension3)); + EXPECT_THAT( + GetFirstVideoContentDescription(offer.get())->rtp_header_extensions(), + UnorderedElementsAreArray(kExpectedVideoRtpExtension)); // Nothing should change when creating a new offer std::unique_ptr updated_offer( - f1_.CreateOffer(opts, offer.get())); + f1_.CreateOfferOrError(opts, offer.get()).MoveValue()); - EXPECT_EQ(MAKE_VECTOR(kAudioRtpExtension3ForEncryptionOffer), - GetFirstAudioContentDescription(updated_offer.get()) - ->rtp_header_extensions()); - EXPECT_EQ(MAKE_VECTOR(kVideoRtpExtension3ForEncryptionOffer), - GetFirstVideoContentDescription(updated_offer.get()) - ->rtp_header_extensions()); + EXPECT_THAT(GetFirstAudioContentDescription(updated_offer.get()) + ->rtp_header_extensions(), + UnorderedElementsAreArray(kAudioRtpExtension3)); + EXPECT_THAT(GetFirstVideoContentDescription(updated_offer.get()) + ->rtp_header_extensions(), + UnorderedElementsAreArray(kExpectedVideoRtpExtension)); } TEST(MediaSessionDescription, CopySessionDescription) { SessionDescription source; - cricket::ContentGroup group(cricket::CN_AUDIO); + ContentGroup group(CN_AUDIO); source.AddGroup(group); std::unique_ptr acd = std::make_unique(); acd->set_codecs(MAKE_VECTOR(kAudioCodecs1)); acd->AddLegacyStream(1); - source.AddContent(cricket::CN_AUDIO, MediaProtocolType::kRtp, acd->Clone()); + source.AddContent(CN_AUDIO, MediaProtocolType::kRtp, acd->Clone()); std::unique_ptr vcd = std::make_unique(); vcd->set_codecs(MAKE_VECTOR(kVideoCodecs1)); vcd->AddLegacyStream(2); - source.AddContent(cricket::CN_VIDEO, MediaProtocolType::kRtp, vcd->Clone()); + source.AddContent(CN_VIDEO, MediaProtocolType::kRtp, vcd->Clone()); std::unique_ptr copy = source.Clone(); - ASSERT_TRUE(copy.get() != NULL); - EXPECT_TRUE(copy->HasGroup(cricket::CN_AUDIO)); + ASSERT_TRUE(copy.get()); + EXPECT_TRUE(copy->HasGroup(CN_AUDIO)); const ContentInfo* ac = copy->GetContentByName("audio"); const ContentInfo* vc = copy->GetContentByName("video"); - ASSERT_TRUE(ac != NULL); - ASSERT_TRUE(vc != NULL); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); EXPECT_EQ(MediaProtocolType::kRtp, ac->type); - const AudioContentDescription* acd_copy = ac->media_description()->as_audio(); + const MediaContentDescription* acd_copy = ac->media_description(); EXPECT_EQ(acd->codecs(), acd_copy->codecs()); EXPECT_EQ(1u, acd->first_ssrc()); EXPECT_EQ(MediaProtocolType::kRtp, vc->type); - const VideoContentDescription* vcd_copy = vc->media_description()->as_video(); + const MediaContentDescription* vcd_copy = vc->media_description(); EXPECT_EQ(vcd->codecs(), vcd_copy->codecs()); EXPECT_EQ(2u, vcd->first_ssrc()); } @@ -3595,7 +3866,7 @@ TEST(MediaSessionDescription, CopySessionDescription) { // ensure the TransportInfo in the SessionDescription matches what we expect. TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoOfferAudio) { MediaSessionOptions options; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kRecvOnly, kActive, &options); TestTransportInfo(true, options, false); @@ -3604,7 +3875,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoOfferAudio) { TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoOfferIceRenomination) { MediaSessionOptions options; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kRecvOnly, kActive, &options); options.media_description_options[0] @@ -3614,7 +3885,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoOfferAudioCurrent) { MediaSessionOptions options; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kRecvOnly, kActive, &options); TestTransportInfo(true, options, true); @@ -3650,7 +3921,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoAnswerAudio) { MediaSessionOptions options; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kRecvOnly, kActive, &options); TestTransportInfo(false, options, false); @@ -3659,7 +3930,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoAnswerAudio) { TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoAnswerIceRenomination) { MediaSessionOptions options; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kRecvOnly, kActive, &options); options.media_description_options[0] @@ -3670,7 +3941,7 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfoAnswerAudioCurrent) { MediaSessionOptions options; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kRecvOnly, kActive, &options); TestTransportInfo(false, options, true); @@ -3704,221 +3975,67 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestTransportInfo(false, options, true); } -// Create an offer with bundle enabled and verify the crypto parameters are -// the common set of the available cryptos. -TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoWithOfferBundle) { - TestCryptoWithBundle(true); -} - -// Create an answer with bundle enabled and verify the crypto parameters are -// the common set of the available cryptos. -TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoWithAnswerBundle) { - TestCryptoWithBundle(false); -} - -// Verifies that creating answer fails if the offer has UDP/TLS/RTP/SAVPF but -// DTLS is not enabled locally. -TEST_F(MediaSessionDescriptionFactoryTest, - TestOfferDtlsSavpfWithoutDtlsFailed) { - f1_.set_secure(SEC_ENABLED); - f2_.set_secure(SEC_ENABLED); - tdf1_.set_secure(SEC_DISABLED); - tdf2_.set_secure(SEC_DISABLED); - - std::unique_ptr offer = - f1_.CreateOffer(CreatePlanBMediaSessionOptions(), NULL); - ASSERT_TRUE(offer.get() != NULL); - ContentInfo* offer_content = offer->GetContentByName("audio"); - ASSERT_TRUE(offer_content != NULL); - AudioContentDescription* offer_audio_desc = - offer_content->media_description()->as_audio(); - offer_audio_desc->set_protocol(cricket::kMediaProtocolDtlsSavpf); - - std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), CreatePlanBMediaSessionOptions(), NULL); - ASSERT_TRUE(answer != NULL); - ContentInfo* answer_content = answer->GetContentByName("audio"); - ASSERT_TRUE(answer_content != NULL); - - ASSERT_TRUE(answer_content->rejected); -} - // Offers UDP/TLS/RTP/SAVPF and verifies the answer can be created and contains // UDP/TLS/RTP/SAVPF. TEST_F(MediaSessionDescriptionFactoryTest, TestOfferDtlsSavpfCreateAnswer) { - f1_.set_secure(SEC_ENABLED); - f2_.set_secure(SEC_ENABLED); - tdf1_.set_secure(SEC_ENABLED); - tdf2_.set_secure(SEC_ENABLED); - std::unique_ptr offer = - f1_.CreateOffer(CreatePlanBMediaSessionOptions(), NULL); - ASSERT_TRUE(offer.get() != NULL); + f1_.CreateOfferOrError(CreateAudioMediaSession(), nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); ContentInfo* offer_content = offer->GetContentByName("audio"); - ASSERT_TRUE(offer_content != NULL); - AudioContentDescription* offer_audio_desc = - offer_content->media_description()->as_audio(); - offer_audio_desc->set_protocol(cricket::kMediaProtocolDtlsSavpf); + ASSERT_TRUE(offer_content); + MediaContentDescription* offer_audio_desc = + offer_content->media_description(); + offer_audio_desc->set_protocol(kMediaProtocolDtlsSavpf); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), CreatePlanBMediaSessionOptions(), NULL); - ASSERT_TRUE(answer != NULL); + f2_.CreateAnswerOrError(offer.get(), CreateAudioMediaSession(), nullptr) + .MoveValue(); + ASSERT_TRUE(answer); const ContentInfo* answer_content = answer->GetContentByName("audio"); - ASSERT_TRUE(answer_content != NULL); + ASSERT_TRUE(answer_content); ASSERT_FALSE(answer_content->rejected); - const AudioContentDescription* answer_audio_desc = - answer_content->media_description()->as_audio(); - EXPECT_EQ(cricket::kMediaProtocolDtlsSavpf, answer_audio_desc->protocol()); -} - -// Test that we include both SDES and DTLS in the offer, but only include SDES -// in the answer if DTLS isn't negotiated. -TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoDtls) { - f1_.set_secure(SEC_ENABLED); - f2_.set_secure(SEC_ENABLED); - tdf1_.set_secure(SEC_ENABLED); - tdf2_.set_secure(SEC_DISABLED); - MediaSessionOptions options; - AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options); - std::unique_ptr offer, answer; - const cricket::MediaContentDescription* audio_media_desc; - const cricket::MediaContentDescription* video_media_desc; - const cricket::TransportDescription* audio_trans_desc; - const cricket::TransportDescription* video_trans_desc; - - // Generate an offer with SDES and DTLS support. - offer = f1_.CreateOffer(options, NULL); - ASSERT_TRUE(offer.get() != NULL); - - audio_media_desc = offer->GetContentDescriptionByName("audio"); - ASSERT_TRUE(audio_media_desc != NULL); - video_media_desc = offer->GetContentDescriptionByName("video"); - ASSERT_TRUE(video_media_desc != NULL); - EXPECT_EQ(1u, audio_media_desc->cryptos().size()); - EXPECT_EQ(1u, video_media_desc->cryptos().size()); - - audio_trans_desc = offer->GetTransportDescriptionByName("audio"); - ASSERT_TRUE(audio_trans_desc != NULL); - video_trans_desc = offer->GetTransportDescriptionByName("video"); - ASSERT_TRUE(video_trans_desc != NULL); - ASSERT_TRUE(audio_trans_desc->identity_fingerprint.get() != NULL); - ASSERT_TRUE(video_trans_desc->identity_fingerprint.get() != NULL); - - // Generate an answer with only SDES support, since tdf2 has crypto disabled. - answer = f2_.CreateAnswer(offer.get(), options, NULL); - ASSERT_TRUE(answer.get() != NULL); - - audio_media_desc = answer->GetContentDescriptionByName("audio"); - ASSERT_TRUE(audio_media_desc != NULL); - video_media_desc = answer->GetContentDescriptionByName("video"); - ASSERT_TRUE(video_media_desc != NULL); - EXPECT_EQ(1u, audio_media_desc->cryptos().size()); - EXPECT_EQ(1u, video_media_desc->cryptos().size()); - - audio_trans_desc = answer->GetTransportDescriptionByName("audio"); - ASSERT_TRUE(audio_trans_desc != NULL); - video_trans_desc = answer->GetTransportDescriptionByName("video"); - ASSERT_TRUE(video_trans_desc != NULL); - ASSERT_TRUE(audio_trans_desc->identity_fingerprint.get() == NULL); - ASSERT_TRUE(video_trans_desc->identity_fingerprint.get() == NULL); - - // Enable DTLS; the answer should now only have DTLS support. - tdf2_.set_secure(SEC_ENABLED); - answer = f2_.CreateAnswer(offer.get(), options, NULL); - ASSERT_TRUE(answer.get() != NULL); - - audio_media_desc = answer->GetContentDescriptionByName("audio"); - ASSERT_TRUE(audio_media_desc != NULL); - video_media_desc = answer->GetContentDescriptionByName("video"); - ASSERT_TRUE(video_media_desc != NULL); - EXPECT_TRUE(audio_media_desc->cryptos().empty()); - EXPECT_TRUE(video_media_desc->cryptos().empty()); - EXPECT_EQ(cricket::kMediaProtocolSavpf, audio_media_desc->protocol()); - EXPECT_EQ(cricket::kMediaProtocolSavpf, video_media_desc->protocol()); - - audio_trans_desc = answer->GetTransportDescriptionByName("audio"); - ASSERT_TRUE(audio_trans_desc != NULL); - video_trans_desc = answer->GetTransportDescriptionByName("video"); - ASSERT_TRUE(video_trans_desc != NULL); - ASSERT_TRUE(audio_trans_desc->identity_fingerprint.get() != NULL); - ASSERT_TRUE(video_trans_desc->identity_fingerprint.get() != NULL); - - // Try creating offer again. DTLS enabled now, crypto's should be empty - // in new offer. - offer = f1_.CreateOffer(options, offer.get()); - ASSERT_TRUE(offer.get() != NULL); - audio_media_desc = offer->GetContentDescriptionByName("audio"); - ASSERT_TRUE(audio_media_desc != NULL); - video_media_desc = offer->GetContentDescriptionByName("video"); - ASSERT_TRUE(video_media_desc != NULL); - EXPECT_TRUE(audio_media_desc->cryptos().empty()); - EXPECT_TRUE(video_media_desc->cryptos().empty()); - - audio_trans_desc = offer->GetTransportDescriptionByName("audio"); - ASSERT_TRUE(audio_trans_desc != NULL); - video_trans_desc = offer->GetTransportDescriptionByName("video"); - ASSERT_TRUE(video_trans_desc != NULL); - ASSERT_TRUE(audio_trans_desc->identity_fingerprint.get() != NULL); - ASSERT_TRUE(video_trans_desc->identity_fingerprint.get() != NULL); -} - -// Test that an answer can't be created if cryptos are required but the offer is -// unsecure. -TEST_F(MediaSessionDescriptionFactoryTest, TestSecureAnswerToUnsecureOffer) { - MediaSessionOptions options = CreatePlanBMediaSessionOptions(); - f1_.set_secure(SEC_DISABLED); - tdf1_.set_secure(SEC_DISABLED); - f2_.set_secure(SEC_REQUIRED); - tdf1_.set_secure(SEC_ENABLED); - - std::unique_ptr offer = f1_.CreateOffer(options, NULL); - ASSERT_TRUE(offer.get() != NULL); - std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), options, NULL); - EXPECT_TRUE(answer.get() == NULL); + const MediaContentDescription* answer_audio_desc = + answer_content->media_description(); + EXPECT_EQ(kMediaProtocolDtlsSavpf, answer_audio_desc->protocol()); } // Test that we accept a DTLS offer without SDES and create an appropriate // answer. TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoOfferDtlsButNotSdes) { + /* TODO(hta): Figure this one out. f1_.set_secure(SEC_DISABLED); f2_.set_secure(SEC_ENABLED); tdf1_.set_secure(SEC_ENABLED); tdf2_.set_secure(SEC_ENABLED); + */ MediaSessionOptions options; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options); - // Generate an offer with DTLS but without SDES. - std::unique_ptr offer = f1_.CreateOffer(options, NULL); - ASSERT_TRUE(offer.get() != NULL); - - const AudioContentDescription* audio_offer = - GetFirstAudioContentDescription(offer.get()); - ASSERT_TRUE(audio_offer->cryptos().empty()); - const VideoContentDescription* video_offer = - GetFirstVideoContentDescription(offer.get()); - ASSERT_TRUE(video_offer->cryptos().empty()); + // Generate an offer with DTLS + std::unique_ptr offer = + f1_.CreateOfferOrError(options, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); - const cricket::TransportDescription* audio_offer_trans_desc = + const TransportDescription* audio_offer_trans_desc = offer->GetTransportDescriptionByName("audio"); - ASSERT_TRUE(audio_offer_trans_desc->identity_fingerprint.get() != NULL); - const cricket::TransportDescription* video_offer_trans_desc = + ASSERT_TRUE(audio_offer_trans_desc->identity_fingerprint.get()); + const TransportDescription* video_offer_trans_desc = offer->GetTransportDescriptionByName("video"); - ASSERT_TRUE(video_offer_trans_desc->identity_fingerprint.get() != NULL); + ASSERT_TRUE(video_offer_trans_desc->identity_fingerprint.get()); // Generate an answer with DTLS. std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), options, NULL); - ASSERT_TRUE(answer.get() != NULL); + f2_.CreateAnswerOrError(offer.get(), options, nullptr).MoveValue(); + ASSERT_TRUE(answer.get()); - const cricket::TransportDescription* audio_answer_trans_desc = + const TransportDescription* audio_answer_trans_desc = answer->GetTransportDescriptionByName("audio"); - EXPECT_TRUE(audio_answer_trans_desc->identity_fingerprint.get() != NULL); - const cricket::TransportDescription* video_answer_trans_desc = + EXPECT_TRUE(audio_answer_trans_desc->identity_fingerprint.get()); + const TransportDescription* video_answer_trans_desc = answer->GetTransportDescriptionByName("video"); - EXPECT_TRUE(video_answer_trans_desc->identity_fingerprint.get() != NULL); + EXPECT_TRUE(video_answer_trans_desc->identity_fingerprint.get()); } // Verifies if vad_enabled option is set to false, CN codecs are not present in @@ -3926,19 +4043,20 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestCryptoOfferDtlsButNotSdes) { TEST_F(MediaSessionDescriptionFactoryTest, TestVADEnableOption) { MediaSessionOptions options; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &options); - std::unique_ptr offer = f1_.CreateOffer(options, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + f1_.CreateOfferOrError(options, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); const ContentInfo* audio_content = offer->GetContentByName("audio"); EXPECT_FALSE(VerifyNoCNCodecs(audio_content)); options.vad_enabled = false; - offer = f1_.CreateOffer(options, NULL); - ASSERT_TRUE(offer.get() != NULL); + offer = f1_.CreateOfferOrError(options, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); audio_content = offer->GetContentByName("audio"); EXPECT_TRUE(VerifyNoCNCodecs(audio_content)); std::unique_ptr answer = - f1_.CreateAnswer(offer.get(), options, NULL); - ASSERT_TRUE(answer.get() != NULL); + f1_.CreateAnswerOrError(offer.get(), options, nullptr).MoveValue(); + ASSERT_TRUE(answer.get()); audio_content = answer->GetContentByName("audio"); EXPECT_TRUE(VerifyNoCNCodecs(audio_content)); } @@ -3946,29 +4064,30 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestVADEnableOption) { // Test that the generated MIDs match the existing offer. TEST_F(MediaSessionDescriptionFactoryTest, TestMIDsMatchesExistingOffer) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio_modified", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio_modified", RtpTransceiverDirection::kRecvOnly, kActive, &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video_modified", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video_modified", RtpTransceiverDirection::kRecvOnly, kActive, &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_DATA, "data_modified", + AddMediaDescriptionOptions(webrtc::MediaType::DATA, "data_modified", RtpTransceiverDirection::kSendRecv, kActive, &opts); // Create offer. - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); std::unique_ptr updated_offer( - f1_.CreateOffer(opts, offer.get())); + f1_.CreateOfferOrError(opts, offer.get()).MoveValue()); const ContentInfo* audio_content = GetFirstAudioContent(updated_offer.get()); const ContentInfo* video_content = GetFirstVideoContent(updated_offer.get()); const ContentInfo* data_content = GetFirstDataContent(updated_offer.get()); - ASSERT_TRUE(audio_content != nullptr); - ASSERT_TRUE(video_content != nullptr); - ASSERT_TRUE(data_content != nullptr); - EXPECT_EQ("audio_modified", audio_content->name); - EXPECT_EQ("video_modified", video_content->name); - EXPECT_EQ("data_modified", data_content->name); + ASSERT_TRUE(audio_content); + ASSERT_TRUE(video_content); + ASSERT_TRUE(data_content); + EXPECT_EQ("audio_modified", audio_content->mid()); + EXPECT_EQ("video_modified", video_content->mid()); + EXPECT_EQ("data_modified", data_content->mid()); } // The following tests verify that the unified plan SDP is supported. @@ -3977,55 +4096,58 @@ TEST_F(MediaSessionDescriptionFactoryTest, TestMIDsMatchesExistingOffer) { TEST_F(MediaSessionDescriptionFactoryTest, CreateOfferWithMultipleAVMediaSections) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio_1", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio_1", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AttachSenderToMediaDescriptionOptions( - "audio_1", MEDIA_TYPE_AUDIO, kAudioTrack1, {kMediaStream1}, 1, &opts); + AttachSenderToMediaDescriptionOptions("audio_1", webrtc::MediaType::AUDIO, + kAudioTrack1, {kMediaStream1}, 1, + &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video_1", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video_1", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AttachSenderToMediaDescriptionOptions( - "video_1", MEDIA_TYPE_VIDEO, kVideoTrack1, {kMediaStream1}, 1, &opts); + AttachSenderToMediaDescriptionOptions("video_1", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio_2", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio_2", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AttachSenderToMediaDescriptionOptions( - "audio_2", MEDIA_TYPE_AUDIO, kAudioTrack2, {kMediaStream2}, 1, &opts); + AttachSenderToMediaDescriptionOptions("audio_2", webrtc::MediaType::AUDIO, + kAudioTrack2, {kMediaStream2}, 1, + &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video_2", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video_2", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AttachSenderToMediaDescriptionOptions( - "video_2", MEDIA_TYPE_VIDEO, kVideoTrack2, {kMediaStream2}, 1, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + AttachSenderToMediaDescriptionOptions("video_2", webrtc::MediaType::VIDEO, + kVideoTrack2, {kMediaStream2}, 1, + &opts); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); ASSERT_TRUE(offer); ASSERT_EQ(4u, offer->contents().size()); EXPECT_FALSE(offer->contents()[0].rejected); - const AudioContentDescription* acd = - offer->contents()[0].media_description()->as_audio(); + const MediaContentDescription* acd = offer->contents()[0].media_description(); ASSERT_EQ(1u, acd->streams().size()); EXPECT_EQ(kAudioTrack1, acd->streams()[0].id); EXPECT_EQ(RtpTransceiverDirection::kSendRecv, acd->direction()); EXPECT_FALSE(offer->contents()[1].rejected); - const VideoContentDescription* vcd = - offer->contents()[1].media_description()->as_video(); + const MediaContentDescription* vcd = offer->contents()[1].media_description(); ASSERT_EQ(1u, vcd->streams().size()); EXPECT_EQ(kVideoTrack1, vcd->streams()[0].id); EXPECT_EQ(RtpTransceiverDirection::kSendRecv, vcd->direction()); EXPECT_FALSE(offer->contents()[2].rejected); - acd = offer->contents()[2].media_description()->as_audio(); + acd = offer->contents()[2].media_description(); ASSERT_EQ(1u, acd->streams().size()); EXPECT_EQ(kAudioTrack2, acd->streams()[0].id); EXPECT_EQ(RtpTransceiverDirection::kSendRecv, acd->direction()); EXPECT_FALSE(offer->contents()[3].rejected); - vcd = offer->contents()[3].media_description()->as_video(); + vcd = offer->contents()[3].media_description(); ASSERT_EQ(1u, vcd->streams().size()); EXPECT_EQ(kVideoTrack2, vcd->streams()[0].id); EXPECT_EQ(RtpTransceiverDirection::kSendRecv, vcd->direction()); @@ -4036,58 +4158,63 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, CreateAnswerWithMultipleAVMediaSections) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio_1", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio_1", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AttachSenderToMediaDescriptionOptions( - "audio_1", MEDIA_TYPE_AUDIO, kAudioTrack1, {kMediaStream1}, 1, &opts); + AttachSenderToMediaDescriptionOptions("audio_1", webrtc::MediaType::AUDIO, + kAudioTrack1, {kMediaStream1}, 1, + &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video_1", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video_1", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AttachSenderToMediaDescriptionOptions( - "video_1", MEDIA_TYPE_VIDEO, kVideoTrack1, {kMediaStream1}, 1, &opts); + AttachSenderToMediaDescriptionOptions("video_1", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio_2", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio_2", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AttachSenderToMediaDescriptionOptions( - "audio_2", MEDIA_TYPE_AUDIO, kAudioTrack2, {kMediaStream2}, 1, &opts); + AttachSenderToMediaDescriptionOptions("audio_2", webrtc::MediaType::AUDIO, + kAudioTrack2, {kMediaStream2}, 1, + &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video_2", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video_2", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AttachSenderToMediaDescriptionOptions( - "video_2", MEDIA_TYPE_VIDEO, kVideoTrack2, {kMediaStream2}, 1, &opts); + AttachSenderToMediaDescriptionOptions("video_2", webrtc::MediaType::VIDEO, + kVideoTrack2, {kMediaStream2}, 1, + &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); ASSERT_TRUE(offer); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); ASSERT_EQ(4u, answer->contents().size()); EXPECT_FALSE(answer->contents()[0].rejected); - const AudioContentDescription* acd = - answer->contents()[0].media_description()->as_audio(); + const MediaContentDescription* acd = + answer->contents()[0].media_description(); ASSERT_EQ(1u, acd->streams().size()); EXPECT_EQ(kAudioTrack1, acd->streams()[0].id); EXPECT_EQ(RtpTransceiverDirection::kSendRecv, acd->direction()); EXPECT_FALSE(answer->contents()[1].rejected); - const VideoContentDescription* vcd = - answer->contents()[1].media_description()->as_video(); + const MediaContentDescription* vcd = + answer->contents()[1].media_description(); ASSERT_EQ(1u, vcd->streams().size()); EXPECT_EQ(kVideoTrack1, vcd->streams()[0].id); EXPECT_EQ(RtpTransceiverDirection::kSendRecv, vcd->direction()); EXPECT_FALSE(answer->contents()[2].rejected); - acd = answer->contents()[2].media_description()->as_audio(); + acd = answer->contents()[2].media_description(); ASSERT_EQ(1u, acd->streams().size()); EXPECT_EQ(kAudioTrack2, acd->streams()[0].id); EXPECT_EQ(RtpTransceiverDirection::kSendRecv, acd->direction()); EXPECT_FALSE(answer->contents()[3].rejected); - vcd = answer->contents()[3].media_description()->as_video(); + vcd = answer->contents()[3].media_description(); ASSERT_EQ(1u, vcd->streams().size()); EXPECT_EQ(kVideoTrack2, vcd->streams()[0].id); EXPECT_EQ(RtpTransceiverDirection::kSendRecv, vcd->direction()); @@ -4099,14 +4226,14 @@ TEST_F(MediaSessionDescriptionFactoryTest, CreateOfferWithMediaSectionStoppedByOfferer) { // Create an offer with two audio sections and one of them is stopped. MediaSessionOptions offer_opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio1", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio1", RtpTransceiverDirection::kSendRecv, kActive, &offer_opts); - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio2", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio2", RtpTransceiverDirection::kInactive, kStopped, &offer_opts); std::unique_ptr offer = - f1_.CreateOffer(offer_opts, nullptr); + f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue(); ASSERT_TRUE(offer); ASSERT_EQ(2u, offer->contents().size()); EXPECT_FALSE(offer->contents()[0].rejected); @@ -4119,14 +4246,14 @@ TEST_F(MediaSessionDescriptionFactoryTest, CreateAnswerWithMediaSectionStoppedByOfferer) { // Create an offer with two audio sections and one of them is stopped. MediaSessionOptions offer_opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio1", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio1", RtpTransceiverDirection::kSendRecv, kActive, &offer_opts); - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio2", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio2", RtpTransceiverDirection::kInactive, kStopped, &offer_opts); std::unique_ptr offer = - f1_.CreateOffer(offer_opts, nullptr); + f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue(); ASSERT_TRUE(offer); ASSERT_EQ(2u, offer->contents().size()); EXPECT_FALSE(offer->contents()[0].rejected); @@ -4134,14 +4261,14 @@ TEST_F(MediaSessionDescriptionFactoryTest, // Create an answer based on the offer. MediaSessionOptions answer_opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio1", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio1", RtpTransceiverDirection::kSendRecv, kActive, &answer_opts); - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio2", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio2", RtpTransceiverDirection::kSendRecv, kActive, &answer_opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), answer_opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue(); ASSERT_EQ(2u, answer->contents().size()); EXPECT_FALSE(answer->contents()[0].rejected); EXPECT_TRUE(answer->contents()[1].rejected); @@ -4153,14 +4280,14 @@ TEST_F(MediaSessionDescriptionFactoryTest, CreateAnswerWithMediaSectionRejectedByAnswerer) { // Create an offer with two audio sections. MediaSessionOptions offer_opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio1", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio1", RtpTransceiverDirection::kSendRecv, kActive, &offer_opts); - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio2", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio2", RtpTransceiverDirection::kSendRecv, kActive, &offer_opts); std::unique_ptr offer = - f1_.CreateOffer(offer_opts, nullptr); + f1_.CreateOfferOrError(offer_opts, nullptr).MoveValue(); ASSERT_TRUE(offer); ASSERT_EQ(2u, offer->contents().size()); ASSERT_FALSE(offer->contents()[0].rejected); @@ -4168,14 +4295,14 @@ TEST_F(MediaSessionDescriptionFactoryTest, // The answerer rejects one of the audio sections. MediaSessionOptions answer_opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio1", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio1", RtpTransceiverDirection::kSendRecv, kActive, &answer_opts); - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio2", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio2", RtpTransceiverDirection::kInactive, kStopped, &answer_opts); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), answer_opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), answer_opts, nullptr).MoveValue(); ASSERT_EQ(2u, answer->contents().size()); EXPECT_FALSE(answer->contents()[0].rejected); EXPECT_TRUE(answer->contents()[1].rejected); @@ -4192,18 +4319,19 @@ TEST_F(MediaSessionDescriptionFactoryTest, MediaSessionOptions opts; // This tests put video section first because normally audio comes first by // default. - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); ASSERT_TRUE(offer); ASSERT_EQ(2u, offer->contents().size()); - EXPECT_EQ("video", offer->contents()[0].name); - EXPECT_EQ("audio", offer->contents()[1].name); + EXPECT_EQ("video", offer->contents()[0].mid()); + EXPECT_EQ("audio", offer->contents()[1].mid()); } // Test that different media sections using the same codec have same payload @@ -4211,20 +4339,21 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, PayloadTypesSharedByMediaSectionsOfSameType) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video1", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video2", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video2", RtpTransceiverDirection::kSendRecv, kActive, &opts); // Create an offer with two video sections using same codecs. - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); ASSERT_TRUE(offer); ASSERT_EQ(2u, offer->contents().size()); - const VideoContentDescription* vcd1 = - offer->contents()[0].media_description()->as_video(); - const VideoContentDescription* vcd2 = - offer->contents()[1].media_description()->as_video(); + const MediaContentDescription* vcd1 = + offer->contents()[0].media_description(); + const MediaContentDescription* vcd2 = + offer->contents()[1].media_description(); EXPECT_EQ(vcd1->codecs().size(), vcd2->codecs().size()); ASSERT_EQ(2u, vcd1->codecs().size()); EXPECT_EQ(vcd1->codecs()[0].name, vcd2->codecs()[0].name); @@ -4234,107 +4363,191 @@ TEST_F(MediaSessionDescriptionFactoryTest, // Create answer and negotiate the codecs. std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); ASSERT_TRUE(answer); ASSERT_EQ(2u, answer->contents().size()); - vcd1 = answer->contents()[0].media_description()->as_video(); - vcd2 = answer->contents()[1].media_description()->as_video(); + vcd1 = answer->contents()[0].media_description(); + vcd2 = answer->contents()[1].media_description(); EXPECT_EQ(vcd1->codecs().size(), vcd2->codecs().size()); ASSERT_EQ(1u, vcd1->codecs().size()); EXPECT_EQ(vcd1->codecs()[0].name, vcd2->codecs()[0].name); EXPECT_EQ(vcd1->codecs()[0].id, vcd2->codecs()[0].id); } -// Test verifying that negotiating codecs with the same packetization retains -// the packetization value. -TEST_F(MediaSessionDescriptionFactoryTest, PacketizationIsEqual) { - std::vector f1_codecs = {cricket::CreateVideoCodec(96, "H264")}; - f1_codecs.back().packetization = "raw"; - f1_.set_video_codecs(f1_codecs, f1_codecs); +#ifdef RTC_ENABLE_H265 +// Test verifying that negotiating codecs with the same tx-mode retains the +// tx-mode value. +TEST_F(MediaSessionDescriptionFactoryTest, H265TxModeIsEqualRetainIt) { + std::vector f1_codecs = {CreateVideoCodec(96, "H265")}; + f1_codecs.back().tx_mode = "mrst"; + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); - std::vector f2_codecs = {cricket::CreateVideoCodec(96, "H264")}; - f2_codecs.back().packetization = "raw"; - f2_.set_video_codecs(f2_codecs, f2_codecs); + std::vector f2_codecs = {CreateVideoCodec(96, "H265")}; + f2_codecs.back().tx_mode = "mrst"; + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(f2_codecs, + f2_codecs); MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video1", RtpTransceiverDirection::kSendRecv, kActive, &opts); // Create an offer with two video sections using same codecs. - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); ASSERT_TRUE(offer); ASSERT_EQ(1u, offer->contents().size()); - const VideoContentDescription* vcd1 = - offer->contents()[0].media_description()->as_video(); + const MediaContentDescription* vcd1 = + offer->contents()[0].media_description(); ASSERT_EQ(1u, vcd1->codecs().size()); - EXPECT_EQ(vcd1->codecs()[0].packetization, "raw"); + EXPECT_EQ(vcd1->codecs()[0].tx_mode, "mrst"); // Create answer and negotiate the codecs. std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); ASSERT_TRUE(answer); ASSERT_EQ(1u, answer->contents().size()); - vcd1 = answer->contents()[0].media_description()->as_video(); + vcd1 = answer->contents()[0].media_description(); ASSERT_EQ(1u, vcd1->codecs().size()); - EXPECT_EQ(vcd1->codecs()[0].packetization, "raw"); + EXPECT_EQ(vcd1->codecs()[0].tx_mode, "mrst"); } -// Test verifying that negotiating codecs with different packetization removes -// the packetization value. -TEST_F(MediaSessionDescriptionFactoryTest, PacketizationIsDifferent) { - std::vector f1_codecs = {cricket::CreateVideoCodec(96, "H264")}; - f1_codecs.back().packetization = "raw"; - f1_.set_video_codecs(f1_codecs, f1_codecs); +// Test verifying that negotiating codecs with different tx_mode removes +// the tx_mode value. +TEST_F(MediaSessionDescriptionFactoryTest, H265TxModeIsDifferentDropCodecs) { + std::vector f1_codecs = {CreateVideoCodec(96, "H265")}; + f1_codecs.back().tx_mode = "mrst"; + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); - std::vector f2_codecs = {cricket::CreateVideoCodec(96, "H264")}; - f2_codecs.back().packetization = "notraw"; - f2_.set_video_codecs(f2_codecs, f2_codecs); + std::vector f2_codecs = {CreateVideoCodec(96, "H265")}; + f2_codecs.back().tx_mode = "mrmt"; + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(f2_codecs, + f2_codecs); MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video1", RtpTransceiverDirection::kSendRecv, kActive, &opts); // Create an offer with two video sections using same codecs. - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); ASSERT_TRUE(offer); ASSERT_EQ(1u, offer->contents().size()); const VideoContentDescription* vcd1 = offer->contents()[0].media_description()->as_video(); ASSERT_EQ(1u, vcd1->codecs().size()); - EXPECT_EQ(vcd1->codecs()[0].packetization, "raw"); + EXPECT_EQ(vcd1->codecs()[0].tx_mode, "mrst"); // Create answer and negotiate the codecs. std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); ASSERT_TRUE(answer); ASSERT_EQ(1u, answer->contents().size()); vcd1 = answer->contents()[0].media_description()->as_video(); ASSERT_EQ(1u, vcd1->codecs().size()); - EXPECT_EQ(vcd1->codecs()[0].packetization, absl::nullopt); + EXPECT_EQ(vcd1->codecs()[0].tx_mode, std::nullopt); } +#endif + +// Test verifying that negotiating codecs with the same packetization retains +// the packetization value. +TEST_F(MediaSessionDescriptionFactoryTest, PacketizationIsEqual) { + std::vector f1_codecs = {CreateVideoCodec(96, "H264")}; + f1_codecs.back().packetization = "raw"; + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); + + std::vector f2_codecs = {CreateVideoCodec(96, "H264")}; + f2_codecs.back().packetization = "raw"; + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(f2_codecs, + f2_codecs); -// Test that the codec preference order per media section is respected in -// subsequent offer. -TEST_F(MediaSessionDescriptionFactoryTest, - CreateOfferRespectsCodecPreferenceOrder) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1", - RtpTransceiverDirection::kSendRecv, kActive, - &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video2", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video1", RtpTransceiverDirection::kSendRecv, kActive, &opts); + // Create an offer with two video sections using same codecs. - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); ASSERT_TRUE(offer); - ASSERT_EQ(2u, offer->contents().size()); - VideoContentDescription* vcd1 = - offer->contents()[0].media_description()->as_video(); - const VideoContentDescription* vcd2 = - offer->contents()[1].media_description()->as_video(); - auto video_codecs = MAKE_VECTOR(kVideoCodecs1); + ASSERT_EQ(1u, offer->contents().size()); + const MediaContentDescription* vcd1 = + offer->contents()[0].media_description(); + ASSERT_EQ(1u, vcd1->codecs().size()); + EXPECT_EQ(vcd1->codecs()[0].packetization, "raw"); + + // Create answer and negotiate the codecs. + std::unique_ptr answer = + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); + ASSERT_TRUE(answer); + ASSERT_EQ(1u, answer->contents().size()); + vcd1 = answer->contents()[0].media_description(); + ASSERT_EQ(1u, vcd1->codecs().size()); + EXPECT_EQ(vcd1->codecs()[0].packetization, "raw"); +} + +// Test verifying that negotiating codecs with different packetization removes +// the packetization value. +TEST_F(MediaSessionDescriptionFactoryTest, PacketizationIsDifferent) { + std::vector f1_codecs = {CreateVideoCodec(96, "H264")}; + f1_codecs.back().packetization = "raw"; + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(f1_codecs, + f1_codecs); + + std::vector f2_codecs = {CreateVideoCodec(96, "H264")}; + f2_codecs.back().packetization = "notraw"; + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(f2_codecs, + f2_codecs); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video1", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + + // Create an offer with two video sections using same codecs. + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer); + ASSERT_EQ(1u, offer->contents().size()); + const VideoContentDescription* vcd1 = + offer->contents()[0].media_description()->as_video(); + ASSERT_EQ(1u, vcd1->codecs().size()); + EXPECT_EQ(vcd1->codecs()[0].packetization, "raw"); + + // Create answer and negotiate the codecs. + std::unique_ptr answer = + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); + ASSERT_TRUE(answer); + ASSERT_EQ(1u, answer->contents().size()); + vcd1 = answer->contents()[0].media_description()->as_video(); + ASSERT_EQ(1u, vcd1->codecs().size()); + EXPECT_EQ(vcd1->codecs()[0].packetization, std::nullopt); +} + +// Test that the codec preference order per media section is respected in +// subsequent offer. +TEST_F(MediaSessionDescriptionFactoryTest, + CreateOfferRespectsCodecPreferenceOrder) { + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video1", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video2", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + // Create an offer with two video sections using same codecs. + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer); + ASSERT_EQ(2u, offer->contents().size()); + MediaContentDescription* vcd1 = offer->contents()[0].media_description(); + const MediaContentDescription* vcd2 = + offer->contents()[1].media_description(); + auto video_codecs = MAKE_VECTOR(kVideoCodecs1); EXPECT_EQ(video_codecs, vcd1->codecs()); EXPECT_EQ(video_codecs, vcd2->codecs()); @@ -4343,9 +4556,9 @@ TEST_F(MediaSessionDescriptionFactoryTest, auto video_codecs_reverse = MAKE_VECTOR(kVideoCodecs1Reverse); vcd1->set_codecs(video_codecs_reverse); std::unique_ptr updated_offer( - f1_.CreateOffer(opts, offer.get())); - vcd1 = updated_offer->contents()[0].media_description()->as_video(); - vcd2 = updated_offer->contents()[1].media_description()->as_video(); + f1_.CreateOfferOrError(opts, offer.get()).MoveValue()); + vcd1 = updated_offer->contents()[0].media_description(); + vcd2 = updated_offer->contents()[1].media_description(); // The video codec preference order should be respected. EXPECT_EQ(video_codecs_reverse, vcd1->codecs()); EXPECT_EQ(video_codecs, vcd2->codecs()); @@ -4356,20 +4569,20 @@ TEST_F(MediaSessionDescriptionFactoryTest, TEST_F(MediaSessionDescriptionFactoryTest, CreateAnswerRespectsCodecPreferenceOrder) { MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video1", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video1", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video2", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video2", RtpTransceiverDirection::kSendRecv, kActive, &opts); // Create an offer with two video sections using same codecs. - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); ASSERT_TRUE(offer); ASSERT_EQ(2u, offer->contents().size()); - VideoContentDescription* vcd1 = - offer->contents()[0].media_description()->as_video(); - const VideoContentDescription* vcd2 = - offer->contents()[1].media_description()->as_video(); + MediaContentDescription* vcd1 = offer->contents()[0].media_description(); + const MediaContentDescription* vcd2 = + offer->contents()[1].media_description(); auto video_codecs = MAKE_VECTOR(kVideoCodecs1); EXPECT_EQ(video_codecs, vcd1->codecs()); EXPECT_EQ(video_codecs, vcd2->codecs()); @@ -4379,9 +4592,9 @@ TEST_F(MediaSessionDescriptionFactoryTest, auto video_codecs_reverse = MAKE_VECTOR(kVideoCodecs1Reverse); vcd1->set_codecs(video_codecs_reverse); std::unique_ptr answer = - f1_.CreateAnswer(offer.get(), opts, nullptr); - vcd1 = answer->contents()[0].media_description()->as_video(); - vcd2 = answer->contents()[1].media_description()->as_video(); + f1_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); + vcd1 = answer->contents()[0].media_description(); + vcd2 = answer->contents()[1].media_description(); // The video codec preference order should be respected. EXPECT_EQ(video_codecs_reverse, vcd1->codecs()); EXPECT_EQ(video_codecs, vcd2->codecs()); @@ -4408,23 +4621,28 @@ TEST_F(MediaSessionDescriptionFactoryTest, CreateAnswerWithLocalCodecParams) { audio_codecs2[0].SetParam(audio_param_name, audio_value2); video_codecs2[0].SetParam(video_param_name, video_value2); - f1_.set_audio_codecs(audio_codecs1, audio_codecs1); - f1_.set_video_codecs(video_codecs1, video_codecs1); - f2_.set_audio_codecs(audio_codecs2, audio_codecs2); - f2_.set_video_codecs(video_codecs2, video_codecs2); + codec_lookup_helper_1_.GetCodecVendor()->set_audio_codecs(audio_codecs1, + audio_codecs1); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs(video_codecs1, + video_codecs1); + codec_lookup_helper_2_.GetCodecVendor()->set_audio_codecs(audio_codecs2, + audio_codecs2); + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs(video_codecs2, + video_codecs2); MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", RtpTransceiverDirection::kSendRecv, kActive, &opts); - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); ASSERT_TRUE(offer); - auto offer_acd = offer->contents()[0].media_description()->as_audio(); - auto offer_vcd = offer->contents()[1].media_description()->as_video(); + auto offer_acd = offer->contents()[0].media_description(); + auto offer_vcd = offer->contents()[1].media_description(); std::string value; EXPECT_TRUE(offer_acd->codecs()[0].GetParam(audio_param_name, &value)); EXPECT_EQ(audio_value1, value); @@ -4432,11 +4650,13 @@ TEST_F(MediaSessionDescriptionFactoryTest, CreateAnswerWithLocalCodecParams) { EXPECT_EQ(video_value1, value); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); ASSERT_TRUE(answer); - auto answer_acd = answer->contents()[0].media_description()->as_audio(); - auto answer_vcd = answer->contents()[1].media_description()->as_video(); + auto answer_acd = answer->contents()[0].media_description(); + auto answer_vcd = answer->contents()[1].media_description(); // Use the parameters from the local codecs. + ASSERT_TRUE(answer_acd); + ASSERT_THAT(answer_acd->codecs().size(), Gt(0)); EXPECT_TRUE(answer_acd->codecs()[0].GetParam(audio_param_name, &value)); EXPECT_EQ(audio_value2, value); EXPECT_TRUE(answer_vcd->codecs()[0].GetParam(video_param_name, &value)); @@ -4452,68 +4672,71 @@ TEST_F(MediaSessionDescriptionFactoryTest, H264MatchCriteriaIncludesPacketizationMode) { // Create two H264 codecs with the same profile level ID and different // packetization modes. - VideoCodec h264_pm0 = cricket::CreateVideoCodec(96, "H264"); - h264_pm0.params[cricket::kH264FmtpProfileLevelId] = "42c01f"; - h264_pm0.params[cricket::kH264FmtpPacketizationMode] = "0"; - VideoCodec h264_pm1 = cricket::CreateVideoCodec(97, "H264"); - h264_pm1.params[cricket::kH264FmtpProfileLevelId] = "42c01f"; - h264_pm1.params[cricket::kH264FmtpPacketizationMode] = "1"; + Codec h264_pm0 = CreateVideoCodec(96, "H264"); + h264_pm0.params[kH264FmtpProfileLevelId] = "42c01f"; + h264_pm0.params[kH264FmtpPacketizationMode] = "0"; + Codec h264_pm1 = CreateVideoCodec(97, "H264"); + h264_pm1.params[kH264FmtpProfileLevelId] = "42c01f"; + h264_pm1.params[kH264FmtpPacketizationMode] = "1"; // Offerer will send both codecs, answerer should choose the one with matching // packetization mode (and not the first one it sees). - f1_.set_video_codecs({h264_pm0, h264_pm1}, {h264_pm0, h264_pm1}); - f2_.set_video_codecs({h264_pm1}, {h264_pm1}); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs( + {h264_pm0, h264_pm1}, {h264_pm0, h264_pm1}); + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs({h264_pm1}, + {h264_pm1}); MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_VIDEO, "video", + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", RtpTransceiverDirection::kSendRecv, kActive, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); ASSERT_TRUE(offer); std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); ASSERT_TRUE(answer); // Answer should have one negotiated codec with packetization-mode=1 using the // offered payload type. ASSERT_EQ(1u, answer->contents().size()); - auto answer_vcd = answer->contents()[0].media_description()->as_video(); + auto answer_vcd = answer->contents()[0].media_description(); ASSERT_EQ(1u, answer_vcd->codecs().size()); auto answer_codec = answer_vcd->codecs()[0]; EXPECT_EQ(h264_pm1.id, answer_codec.id); } -class MediaProtocolTest : public ::testing::TestWithParam { +class MediaProtocolTest : public testing::TestWithParam { public: MediaProtocolTest() : tdf1_(field_trials_), tdf2_(field_trials_), - f1_(&tdf1_, &ssrc_generator1), - f2_(&tdf2_, &ssrc_generator2) { - f1_.set_audio_codecs(MAKE_VECTOR(kAudioCodecs1), - MAKE_VECTOR(kAudioCodecs1)); - f1_.set_video_codecs(MAKE_VECTOR(kVideoCodecs1), - MAKE_VECTOR(kVideoCodecs1)); - f2_.set_audio_codecs(MAKE_VECTOR(kAudioCodecs2), - MAKE_VECTOR(kAudioCodecs2)); - f2_.set_video_codecs(MAKE_VECTOR(kVideoCodecs2), - MAKE_VECTOR(kVideoCodecs2)); - f1_.set_secure(SEC_ENABLED); - f2_.set_secure(SEC_ENABLED); - tdf1_.set_certificate(rtc::RTCCertificate::Create( - std::unique_ptr(new rtc::FakeSSLIdentity("id1")))); - tdf2_.set_certificate(rtc::RTCCertificate::Create( - std::unique_ptr(new rtc::FakeSSLIdentity("id2")))); - tdf1_.set_secure(SEC_ENABLED); - tdf2_.set_secure(SEC_ENABLED); + codec_lookup_helper_1_(field_trials_), + codec_lookup_helper_2_(field_trials_), + f1_(nullptr, false, &ssrc_generator1, &tdf1_, &codec_lookup_helper_1_), + f2_(nullptr, false, &ssrc_generator2, &tdf2_, &codec_lookup_helper_2_) { + codec_lookup_helper_1_.GetCodecVendor()->set_audio_codecs( + MAKE_VECTOR(kAudioCodecs1), MAKE_VECTOR(kAudioCodecs1)); + codec_lookup_helper_1_.GetCodecVendor()->set_video_codecs( + MAKE_VECTOR(kVideoCodecs1), MAKE_VECTOR(kVideoCodecs1)); + codec_lookup_helper_2_.GetCodecVendor()->set_audio_codecs( + MAKE_VECTOR(kAudioCodecs2), MAKE_VECTOR(kAudioCodecs2)); + codec_lookup_helper_2_.GetCodecVendor()->set_video_codecs( + MAKE_VECTOR(kVideoCodecs2), MAKE_VECTOR(kVideoCodecs2)); + tdf1_.set_certificate(RTCCertificate::Create( + std::unique_ptr(new FakeSSLIdentity("id1")))); + tdf2_.set_certificate(RTCCertificate::Create( + std::unique_ptr(new FakeSSLIdentity("id2")))); } protected: - webrtc::test::ScopedKeyValueConfig field_trials_; + ScopedKeyValueConfig field_trials_; TransportDescriptionFactory tdf1_; TransportDescriptionFactory tdf2_; + CodecLookupHelperForTesting codec_lookup_helper_1_; + CodecLookupHelperForTesting codec_lookup_helper_2_; MediaSessionDescriptionFactory f1_; MediaSessionDescriptionFactory f2_; UniqueRandomIdGenerator ssrc_generator1; @@ -4523,99 +4746,43 @@ class MediaProtocolTest : public ::testing::TestWithParam { TEST_P(MediaProtocolTest, TestAudioVideoAcceptance) { MediaSessionOptions opts; AddAudioVideoSections(RtpTransceiverDirection::kRecvOnly, &opts); - std::unique_ptr offer = f1_.CreateOffer(opts, nullptr); - ASSERT_TRUE(offer.get() != nullptr); + std::unique_ptr offer = + f1_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); // Set the protocol for all the contents. - for (auto& content : offer.get()->contents()) { + for (auto& content : offer->contents()) { content.media_description()->set_protocol(GetParam()); } std::unique_ptr answer = - f2_.CreateAnswer(offer.get(), opts, nullptr); + f2_.CreateAnswerOrError(offer.get(), opts, nullptr).MoveValue(); const ContentInfo* ac = answer->GetContentByName("audio"); const ContentInfo* vc = answer->GetContentByName("video"); - ASSERT_TRUE(ac != nullptr); - ASSERT_TRUE(vc != nullptr); + ASSERT_TRUE(ac); + ASSERT_TRUE(vc); EXPECT_FALSE(ac->rejected); // the offer is accepted EXPECT_FALSE(vc->rejected); - const AudioContentDescription* acd = ac->media_description()->as_audio(); - const VideoContentDescription* vcd = vc->media_description()->as_video(); + const MediaContentDescription* acd = ac->media_description(); + const MediaContentDescription* vcd = vc->media_description(); EXPECT_EQ(GetParam(), acd->protocol()); EXPECT_EQ(GetParam(), vcd->protocol()); } INSTANTIATE_TEST_SUITE_P(MediaProtocolPatternTest, MediaProtocolTest, - ::testing::ValuesIn(kMediaProtocols)); + ValuesIn(kMediaProtocols)); INSTANTIATE_TEST_SUITE_P(MediaProtocolDtlsPatternTest, MediaProtocolTest, - ::testing::ValuesIn(kMediaProtocolsDtls)); + ValuesIn(kMediaProtocolsDtls)); -TEST_F(MediaSessionDescriptionFactoryTest, TestSetAudioCodecs) { - webrtc::test::ScopedKeyValueConfig field_trials; - TransportDescriptionFactory tdf(field_trials); - UniqueRandomIdGenerator ssrc_generator; - MediaSessionDescriptionFactory sf(&tdf, &ssrc_generator); - std::vector send_codecs = MAKE_VECTOR(kAudioCodecs1); - std::vector recv_codecs = MAKE_VECTOR(kAudioCodecs2); - - // The merged list of codecs should contain any send codecs that are also - // nominally in the receive codecs list. Payload types should be picked from - // the send codecs and a number-of-channels of 0 and 1 should be equivalent - // (set to 1). This equals what happens when the send codecs are used in an - // offer and the receive codecs are used in the following answer. - const std::vector sendrecv_codecs = - MAKE_VECTOR(kAudioCodecsAnswer); - const std::vector no_codecs; - - RTC_CHECK_EQ(send_codecs[1].name, "iLBC") - << "Please don't change shared test data!"; - RTC_CHECK_EQ(recv_codecs[2].name, "iLBC") - << "Please don't change shared test data!"; - // Alter iLBC send codec to have zero channels, to test that that is handled - // properly. - send_codecs[1].channels = 0; - - // Alter iLBC receive codec to be lowercase, to test that case conversions - // are handled properly. - recv_codecs[2].name = "ilbc"; - - // Test proper merge - sf.set_audio_codecs(send_codecs, recv_codecs); - EXPECT_EQ(send_codecs, sf.audio_send_codecs()); - EXPECT_EQ(recv_codecs, sf.audio_recv_codecs()); - EXPECT_EQ(sendrecv_codecs, sf.audio_sendrecv_codecs()); - - // Test empty send codecs list - sf.set_audio_codecs(no_codecs, recv_codecs); - EXPECT_EQ(no_codecs, sf.audio_send_codecs()); - EXPECT_EQ(recv_codecs, sf.audio_recv_codecs()); - EXPECT_EQ(no_codecs, sf.audio_sendrecv_codecs()); - - // Test empty recv codecs list - sf.set_audio_codecs(send_codecs, no_codecs); - EXPECT_EQ(send_codecs, sf.audio_send_codecs()); - EXPECT_EQ(no_codecs, sf.audio_recv_codecs()); - EXPECT_EQ(no_codecs, sf.audio_sendrecv_codecs()); - - // Test all empty codec lists - sf.set_audio_codecs(no_codecs, no_codecs); - EXPECT_EQ(no_codecs, sf.audio_send_codecs()); - EXPECT_EQ(no_codecs, sf.audio_recv_codecs()); - EXPECT_EQ(no_codecs, sf.audio_sendrecv_codecs()); -} - -namespace { // Compare the two vectors of codecs ignoring the payload type. -template bool CodecsMatch(const std::vector& codecs1, - const std::vector& codecs2, - const webrtc::FieldTrialsView* field_trials) { + const std::vector& codecs2) { if (codecs1.size() != codecs2.size()) { return false; } for (size_t i = 0; i < codecs1.size(); ++i) { - if (!codecs1[i].Matches(codecs2[i], field_trials)) { + if (!codecs1[i].Matches(codecs2[i])) { return false; } } @@ -4623,28 +4790,35 @@ bool CodecsMatch(const std::vector& codecs1, } void TestAudioCodecsOffer(RtpTransceiverDirection direction) { - webrtc::test::ScopedKeyValueConfig field_trials; + ScopedKeyValueConfig field_trials; TransportDescriptionFactory tdf(field_trials); + tdf.set_certificate(RTCCertificate::Create( + std::unique_ptr(new FakeSSLIdentity("id")))); + UniqueRandomIdGenerator ssrc_generator; - MediaSessionDescriptionFactory sf(&tdf, &ssrc_generator); - const std::vector send_codecs = MAKE_VECTOR(kAudioCodecs1); - const std::vector recv_codecs = MAKE_VECTOR(kAudioCodecs2); - const std::vector sendrecv_codecs = - MAKE_VECTOR(kAudioCodecsAnswer); - sf.set_audio_codecs(send_codecs, recv_codecs); + CodecLookupHelperForTesting codec_lookup_helper(field_trials); + MediaSessionDescriptionFactory sf(nullptr, false, &ssrc_generator, &tdf, + &codec_lookup_helper); + const std::vector send_codecs = MAKE_VECTOR(kAudioCodecs1); + const std::vector recv_codecs = MAKE_VECTOR(kAudioCodecs2); + const std::vector sendrecv_codecs = MAKE_VECTOR(kAudioCodecsAnswer); + codec_lookup_helper.GetCodecVendor()->set_audio_codecs(send_codecs, + recv_codecs); MediaSessionOptions opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", direction, kActive, - &opts); + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", direction, + kActive, &opts); if (direction == RtpTransceiverDirection::kSendRecv || direction == RtpTransceiverDirection::kSendOnly) { - AttachSenderToMediaDescriptionOptions( - "audio", MEDIA_TYPE_AUDIO, kAudioTrack1, {kMediaStream1}, 1, &opts); + AttachSenderToMediaDescriptionOptions("audio", webrtc::MediaType::AUDIO, + kAudioTrack1, {kMediaStream1}, 1, + &opts); } - std::unique_ptr offer = sf.CreateOffer(opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + std::unique_ptr offer = + sf.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); ContentInfo* ac = offer->GetContentByName("audio"); // If the factory didn't add any audio content to the offer, we cannot check @@ -4652,32 +4826,30 @@ void TestAudioCodecsOffer(RtpTransceiverDirection direction) { // send nor receive audio. The checks are still in place if at some point // we'd instead create an inactive stream. if (ac) { - AudioContentDescription* acd = ac->media_description()->as_audio(); + MediaContentDescription* acd = ac->media_description(); // sendrecv and inactive should both present lists as if the channel was // to be used for sending and receiving. Inactive essentially means it // might eventually be used anything, but we don't know more at this // moment. if (acd->direction() == RtpTransceiverDirection::kSendOnly) { - EXPECT_TRUE( - CodecsMatch(send_codecs, acd->codecs(), &field_trials)); + EXPECT_TRUE(CodecsMatch(send_codecs, acd->codecs())); } else if (acd->direction() == RtpTransceiverDirection::kRecvOnly) { - EXPECT_TRUE( - CodecsMatch(recv_codecs, acd->codecs(), &field_trials)); + EXPECT_TRUE(CodecsMatch(recv_codecs, acd->codecs())); } else { - EXPECT_TRUE(CodecsMatch(sendrecv_codecs, acd->codecs(), - &field_trials)); + EXPECT_TRUE(CodecsMatch(sendrecv_codecs, acd->codecs())); } } } -static const AudioCodec kOfferAnswerCodecs[] = { - cricket::CreateAudioCodec(0, "codec0", 16000, 1), - cricket::CreateAudioCodec(1, "codec1", 8000, 1), - cricket::CreateAudioCodec(2, "codec2", 8000, 1), - cricket::CreateAudioCodec(3, "codec3", 8000, 1), - cricket::CreateAudioCodec(4, "codec4", 8000, 2), - cricket::CreateAudioCodec(5, "codec5", 32000, 1), - cricket::CreateAudioCodec(6, "codec6", 48000, 1)}; +// Since the PT suggester reserves the static range for specific codecs, +// PT numbers from the 36-63 range are used. +const Codec kOfferAnswerCodecs[] = {CreateAudioCodec(40, "codec0", 16000, 1), + CreateAudioCodec(41, "codec1", 8000, 1), + CreateAudioCodec(42, "codec2", 8000, 1), + CreateAudioCodec(43, "codec3", 8000, 1), + CreateAudioCodec(44, "codec4", 8000, 2), + CreateAudioCodec(45, "codec5", 32000, 1), + CreateAudioCodec(46, "codec6", 48000, 1)}; /* The codecs groups below are chosen as per the matrix below. The objective * is to have different sets of codecs in the inputs, to get unique sets of @@ -4698,18 +4870,18 @@ static const AudioCodec kOfferAnswerCodecs[] = { * 6 | x x x | x x x | x x x x x */ // Codecs used by offerer in the AudioCodecsAnswerTest -static const int kOfferSendCodecs[] = {0, 1, 3, 5, 6}; -static const int kOfferRecvCodecs[] = {1, 2, 3, 4, 6}; +const int kOfferSendCodecs[] = {0, 1, 3, 5, 6}; +const int kOfferRecvCodecs[] = {1, 2, 3, 4, 6}; // Codecs used in the answerer in the AudioCodecsAnswerTest. The order is // jumbled to catch the answer not following the order in the offer. -static const int kAnswerSendCodecs[] = {6, 5, 2, 3, 4}; -static const int kAnswerRecvCodecs[] = {6, 5, 4, 1, 0}; +const int kAnswerSendCodecs[] = {6, 5, 2, 3, 4}; +const int kAnswerRecvCodecs[] = {6, 5, 4, 1, 0}; // The resulting sets of codecs in the answer in the AudioCodecsAnswerTest -static const int kResultSend_RecvCodecs[] = {0, 1, 5, 6}; -static const int kResultRecv_SendCodecs[] = {2, 3, 4, 6}; -static const int kResultSendrecv_SendCodecs[] = {3, 6}; -static const int kResultSendrecv_RecvCodecs[] = {1, 6}; -static const int kResultSendrecv_SendrecvCodecs[] = {6}; +const int kResultSend_RecvCodecs[] = {0, 1, 5, 6}; +const int kResultRecv_SendCodecs[] = {2, 3, 4, 6}; +const int kResultSendrecv_SendCodecs[] = {3, 6}; +const int kResultSendrecv_RecvCodecs[] = {1, 6}; +const int kResultSendrecv_SendrecvCodecs[] = {6}; template std::vector VectorFromIndices(const T* array, const int (&indices)[IDXS]) { @@ -4724,45 +4896,55 @@ std::vector VectorFromIndices(const T* array, const int (&indices)[IDXS]) { void TestAudioCodecsAnswer(RtpTransceiverDirection offer_direction, RtpTransceiverDirection answer_direction, bool add_legacy_stream) { - webrtc::test::ScopedKeyValueConfig field_trials; + ScopedKeyValueConfig field_trials; TransportDescriptionFactory offer_tdf(field_trials); TransportDescriptionFactory answer_tdf(field_trials); + offer_tdf.set_certificate(RTCCertificate::Create( + std::unique_ptr(new FakeSSLIdentity("offer_id")))); + answer_tdf.set_certificate(RTCCertificate::Create( + std::unique_ptr(new FakeSSLIdentity("answer_id")))); UniqueRandomIdGenerator ssrc_generator1, ssrc_generator2; - MediaSessionDescriptionFactory offer_factory(&offer_tdf, &ssrc_generator1); - MediaSessionDescriptionFactory answer_factory(&answer_tdf, &ssrc_generator2); - - offer_factory.set_audio_codecs( + CodecLookupHelperForTesting offer_codec_lookup_helper(field_trials); + MediaSessionDescriptionFactory offer_factory( + nullptr, false, &ssrc_generator1, &offer_tdf, &offer_codec_lookup_helper); + CodecLookupHelperForTesting answer_codec_lookup_helper(field_trials); + MediaSessionDescriptionFactory answer_factory(nullptr, false, + &ssrc_generator2, &answer_tdf, + &answer_codec_lookup_helper); + + offer_codec_lookup_helper.GetCodecVendor()->set_audio_codecs( VectorFromIndices(kOfferAnswerCodecs, kOfferSendCodecs), VectorFromIndices(kOfferAnswerCodecs, kOfferRecvCodecs)); - answer_factory.set_audio_codecs( + answer_codec_lookup_helper.GetCodecVendor()->set_audio_codecs( VectorFromIndices(kOfferAnswerCodecs, kAnswerSendCodecs), VectorFromIndices(kOfferAnswerCodecs, kAnswerRecvCodecs)); MediaSessionOptions offer_opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", offer_direction, + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", offer_direction, kActive, &offer_opts); if (webrtc::RtpTransceiverDirectionHasSend(offer_direction)) { - AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, + AttachSenderToMediaDescriptionOptions("audio", webrtc::MediaType::AUDIO, kAudioTrack1, {kMediaStream1}, 1, &offer_opts); } std::unique_ptr offer = - offer_factory.CreateOffer(offer_opts, NULL); - ASSERT_TRUE(offer.get() != NULL); + offer_factory.CreateOfferOrError(offer_opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); MediaSessionOptions answer_opts; - AddMediaDescriptionOptions(MEDIA_TYPE_AUDIO, "audio", answer_direction, - kActive, &answer_opts); + AddMediaDescriptionOptions(webrtc::MediaType::AUDIO, "audio", + answer_direction, kActive, &answer_opts); if (webrtc::RtpTransceiverDirectionHasSend(answer_direction)) { - AttachSenderToMediaDescriptionOptions("audio", MEDIA_TYPE_AUDIO, + AttachSenderToMediaDescriptionOptions("audio", webrtc::MediaType::AUDIO, kAudioTrack1, {kMediaStream1}, 1, &answer_opts); } std::unique_ptr answer = - answer_factory.CreateAnswer(offer.get(), answer_opts, NULL); + answer_factory.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); const ContentInfo* ac = answer->GetContentByName("audio"); // If the factory didn't add any audio content to the answer, we cannot @@ -4770,10 +4952,10 @@ void TestAudioCodecsAnswer(RtpTransceiverDirection offer_direction, // to send nor receive audio. The checks are still in place if at some point // we'd instead create an inactive stream. if (ac) { - ASSERT_EQ(MEDIA_TYPE_AUDIO, ac->media_description()->type()); - const AudioContentDescription* acd = ac->media_description()->as_audio(); + ASSERT_EQ(webrtc::MediaType::AUDIO, ac->media_description()->type()); + const MediaContentDescription* acd = ac->media_description(); - std::vector target_codecs; + std::vector target_codecs; // For offers with sendrecv or inactive, we should never reply with more // codecs than offered, with these codec sets. switch (offer_direction) { @@ -4806,12 +4988,12 @@ void TestAudioCodecsAnswer(RtpTransceiverDirection offer_direction, RTC_DCHECK_NOTREACHED(); } - auto format_codecs = [](const std::vector& codecs) { - rtc::StringBuilder os; + auto format_codecs = [](const std::vector& codecs) { + StringBuilder os; bool first = true; os << "{"; for (const auto& c : codecs) { - os << (first ? " " : ", ") << c.id; + os << (first ? " " : ", ") << c.id << ":" << c.name; first = false; } os << " }"; @@ -4833,10 +5015,7 @@ void TestAudioCodecsAnswer(RtpTransceiverDirection offer_direction, } } -} // namespace - -class AudioCodecsOfferTest - : public ::testing::TestWithParam {}; +using AudioCodecsOfferTest = testing::TestWithParam; TEST_P(AudioCodecsOfferTest, TestCodecsInOffer) { TestAudioCodecsOffer(GetParam()); @@ -4844,31 +5023,1193 @@ TEST_P(AudioCodecsOfferTest, TestCodecsInOffer) { INSTANTIATE_TEST_SUITE_P(MediaSessionDescriptionFactoryTest, AudioCodecsOfferTest, - ::testing::Values(RtpTransceiverDirection::kSendOnly, - RtpTransceiverDirection::kRecvOnly, - RtpTransceiverDirection::kSendRecv, - RtpTransceiverDirection::kInactive)); + Values(RtpTransceiverDirection::kSendOnly, + RtpTransceiverDirection::kRecvOnly, + RtpTransceiverDirection::kSendRecv, + RtpTransceiverDirection::kInactive)); -class AudioCodecsAnswerTest - : public ::testing::TestWithParam<::testing::tuple> {}; +using AudioCodecsAnswerTest = testing::TestWithParam< + std::tuple>; TEST_P(AudioCodecsAnswerTest, TestCodecsInAnswer) { - TestAudioCodecsAnswer(::testing::get<0>(GetParam()), - ::testing::get<1>(GetParam()), - ::testing::get<2>(GetParam())); -} - -INSTANTIATE_TEST_SUITE_P( - MediaSessionDescriptionFactoryTest, - AudioCodecsAnswerTest, - ::testing::Combine(::testing::Values(RtpTransceiverDirection::kSendOnly, - RtpTransceiverDirection::kRecvOnly, - RtpTransceiverDirection::kSendRecv, - RtpTransceiverDirection::kInactive), - ::testing::Values(RtpTransceiverDirection::kSendOnly, - RtpTransceiverDirection::kRecvOnly, - RtpTransceiverDirection::kSendRecv, - RtpTransceiverDirection::kInactive), - ::testing::Bool())); + TestAudioCodecsAnswer(std::get<0>(GetParam()), std::get<1>(GetParam()), + std::get<2>(GetParam())); +} + +INSTANTIATE_TEST_SUITE_P(MediaSessionDescriptionFactoryTest, + AudioCodecsAnswerTest, + Combine(Values(RtpTransceiverDirection::kSendOnly, + RtpTransceiverDirection::kRecvOnly, + RtpTransceiverDirection::kSendRecv, + RtpTransceiverDirection::kInactive), + Values(RtpTransceiverDirection::kSendOnly, + RtpTransceiverDirection::kRecvOnly, + RtpTransceiverDirection::kSendRecv, + RtpTransceiverDirection::kInactive), + Bool())); + +#ifdef RTC_ENABLE_H265 +class VideoCodecsOfferH265LevelIdTest : public testing::Test { + public: + VideoCodecsOfferH265LevelIdTest() + : tdf_offerer_(field_trials_), + tdf_answerer_(field_trials_), + sf_offerer_(nullptr, + false, + &ssrc_generator_offerer_, + &tdf_offerer_, + &codec_lookup_helper_offerer_), + sf_answerer_(nullptr, + false, + &ssrc_generator_answerer_, + &tdf_answerer_, + &codec_lookup_helper_answerer_), + codec_lookup_helper_offerer_(field_trials_), + codec_lookup_helper_answerer_(field_trials_) { + tdf_offerer_.set_certificate(RTCCertificate::Create( + std::unique_ptr(new FakeSSLIdentity("offer_id")))); + tdf_answerer_.set_certificate(RTCCertificate::Create( + std::unique_ptr(new FakeSSLIdentity("answer_id")))); + } + + void CheckH265Level(const std::vector& codecs, + const std::string& expected_level) { + for (const auto& codec : codecs) { + if (codec.name == "H265") { + auto it = codec.params.find("level-id"); + ASSERT_TRUE(it != codec.params.end()); + EXPECT_EQ(it->second, expected_level); + } + } + } + + protected: + ScopedKeyValueConfig field_trials_; + TransportDescriptionFactory tdf_offerer_; + TransportDescriptionFactory tdf_answerer_; + UniqueRandomIdGenerator ssrc_generator_offerer_; + UniqueRandomIdGenerator ssrc_generator_answerer_; + MediaSessionDescriptionFactory sf_offerer_; + MediaSessionDescriptionFactory sf_answerer_; + CodecLookupHelperForTesting codec_lookup_helper_offerer_; + CodecLookupHelperForTesting codec_lookup_helper_answerer_; +}; + +// Both sides support H.265 level 5.2 for encoding and decoding. +// Offer: level 5.2, SendRecv +// Answer: level 5.2, SendRecv +TEST_F(VideoCodecsOfferH265LevelIdTest, TestSendRecvSymmetrical) { + const std::vector send_codecs = MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector recv_codecs = MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs(send_codecs, + recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs(recv_codecs, + send_codecs); + EXPECT_EQ(sendrecv_codecs, codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level52LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive, + &answer_opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level52LevelId); +} + +// Both sides support H.265 level 6.0 for encoding and decoding. +// Offer: level 6.0, SendOnly +// Answer: level 6.0, RecvOnly +TEST_F(VideoCodecsOfferH265LevelIdTest, TestSendOnlySymmetrical) { + const std::vector send_codecs = MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector recv_codecs = MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs(send_codecs, + recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs(recv_codecs, + send_codecs); + EXPECT_EQ(sendrecv_codecs, codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendOnly, kActive, + &opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level6), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level6LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kRecvOnly, kActive, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level6), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level6LevelId); +} + +// Both sides support H.265 level 5.2 for encoding and decoding. +// Offer: level 5.2, RecvOnly +// Answer: level 5.2, SendOnly +TEST_F(VideoCodecsOfferH265LevelIdTest, TestRecvOnlySymmetrical) { + const std::vector send_codecs = MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector recv_codecs = MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs(send_codecs, + recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs(recv_codecs, + send_codecs); + EXPECT_EQ(sendrecv_codecs, codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kRecvOnly, kActive, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level52LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendOnly, kActive, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level52LevelId); +} + +// Offerer encodes up to level 5.2, and decodes up to level 6.0. +// Answerer encodes up to level 6.0, and decodes up to level 5.2. +// Offer: level 5.2, SendRecv +// Answer: level 5.2, SendRecv +TEST_F(VideoCodecsOfferH265LevelIdTest, + SendRecvOffererEncode52Decode60AnswererEncode60Decode52) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level52LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive, + &answer_opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level52LevelId); +} + +// Offerer encodes up to level 6, and decodes up to level 5.2. +// Answerer encodes up to level 5.2, and decodes up to level 6.0. +// Offer: level 5.2, SendRecv +// Answer: level 5.2, SendRecv +TEST_F(VideoCodecsOfferH265LevelIdTest, + SendRecvOffererEncode60Decode52AnswererEncode52Decode60) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level52LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive, + &answer_opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level52LevelId); +} + +// Offerer encodes up to level 6, and decodes up to level 5.2. +// Answerer encodes up to level 3.1, and decodes up to level 5.0. +// Offer: level 5.2, SendRecv +// Answer: level 3.1, SendRecv +TEST_F(VideoCodecsOfferH265LevelIdTest, + SendRecvOffererEncode60Decode52AnswererEncode31Decode50) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level31); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level5); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level52LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive, + &answer_opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level31), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level31LevelId); + + std::unique_ptr reoffer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(reoffer.get()); + const ContentInfo* reoffer_oc = reoffer->GetContentByName("video"); + ASSERT_TRUE(reoffer_oc); + const MediaContentDescription* reoffer_ocd = reoffer_oc->media_description(); + EXPECT_TRUE( + CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), reoffer_ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level52LevelId); +} + +// Offerer encodes up to level 6, and decodes up to level 5.2. +// Answerer encodes up to level 4, and decodes up to level 6. +// Offer: level 5.2, SendRecv +// Answer: level 4, SendRecv +TEST_F(VideoCodecsOfferH265LevelIdTest, + SendRecvOffererEncode60Decode52AnswererEncode40Decode60) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level4); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level52LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive, + &answer_opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level4), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level4LevelId); +} + +// Offerer encodes up to level 4, and decodes up to level 6. +// Answerer encodes up to level 6, and decodes up to level 5.2. +// Offer: level 4, SendRecv +// Answer: level 4, SendRecv +TEST_F(VideoCodecsOfferH265LevelIdTest, + SendRecvOffererEncode40Decode60AnswererEncode60Decode52) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level4); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level4); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level4), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level4LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive, + &answer_opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level4), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level4LevelId); +} + +// Offerer encodes up to level 5.2, and decodes up to level 6. +// Answerer encodes up to level 6, and decodes up to level 5.2. +// Offer: level 6, RecvOnly +// Answer: level 6, SendOnly +TEST_F(VideoCodecsOfferH265LevelIdTest, + RecvOnlyOffererEncode52Decode60AnswererEncode60Decode52) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kRecvOnly, kActive, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level6), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level6LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendOnly, kActive, + &answer_opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level6), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level6LevelId); +} + +// Offerer encodes up to level 6, and decodes up to level 5.2. +// Answerer encodes up to level 5.2, and decodes up to level 6. +// Offer: level 5.2, RecvOnly +// Answer: level 5.2, SendOnly +TEST_F(VideoCodecsOfferH265LevelIdTest, + RecvOnlyOffererEncode60Decode52AnswererEncode52Decode60) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kRecvOnly, kActive, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level52LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendOnly, kActive, + &answer_opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level52LevelId); +} + +// Offerer encodes up to level 6, and decodes up to level 5.2. +// Answerer encodes up to level 3.1, and decodes up to level 5. +// Offer: level 5.2, RecvOnly +// Answer: level 3.1, SendOnly +TEST_F(VideoCodecsOfferH265LevelIdTest, + RecvOnlyOffererEncode60Decode52AnswererEncode31Decode50) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level31); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level5); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kRecvOnly, kActive, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level52LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendOnly, kActive, + &answer_opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level31), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level31LevelId); +} + +// Offerer encodes up to level 6, and decodes up to level 5.2. +// Answerer encodes up to level 4, and decodes up to level 6. +// Offer: level 5.2, RecvOnly +// Answer: level 4, SendOnly +TEST_F(VideoCodecsOfferH265LevelIdTest, + RecvOnlyOffererEncode60Decode52AnswererEncode40Decode60) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level4); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kRecvOnly, kActive, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level52LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendOnly, kActive, + &answer_opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level4), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level4LevelId); +} + +// Offerer encodes up to level 4, and decodes up to level 6. +// Answerer encodes up to level 6, and decodes up to level 5.2. +// Offer: level 6, RecvOnly +// Answer: level 6, SendOnly +TEST_F(VideoCodecsOfferH265LevelIdTest, + RecvOnlyOffererEncode40Decode60AnswererEncode60Decode52) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level4); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level4); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kRecvOnly, kActive, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level6), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level6LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendOnly, kActive, + &answer_opts); + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level6), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level6LevelId); +} + +// Offerer encodes up to level 5.2, and decodes up to level 6. +// Answerer encodes up to level 6, and decodes up to level 5.2. +// Offer: level 5.2, SendOnly +// Answer: level 5.2, RecvOnly +TEST_F(VideoCodecsOfferH265LevelIdTest, + SendOnlyOffererEncode52Decode60AnswererEncode60Decode52) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendOnly, kActive, + &opts); + + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level52LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kRecvOnly, kActive, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level52), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level52LevelId); +} + +// Offerer encodes up to level 6, and decodes up to level 5.2. +// Answerer encodes up to level 5.2, and decodes up to level 6. +// Offer: level 6, SendOnly +// Answer: level 6, RecvOnly +TEST_F(VideoCodecsOfferH265LevelIdTest, + SendOnlyOffererEncode60Decode52AnswererEncode52Decode60) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendOnly, kActive, + &opts); + + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level6), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level6LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kRecvOnly, kActive, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level6), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level6LevelId); +} + +// Offerer encodes up to level 6, and decodes up to level 5.2. +// Answerer encodes up to level 3.1, and decodes up to level 5. +// Offer: level 6, SendOnly +// Answer: level 5, RecvOnly +TEST_F(VideoCodecsOfferH265LevelIdTest, + SendOnlyOffererEncode60Decode52AnswererEncode31Decode50) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level31); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level5); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendOnly, kActive, + &opts); + + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level6), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level6LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kRecvOnly, kActive, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level5), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level5LevelId); +} + +// Offerer encodes up to level 6, and decodes up to level 5.2. +// Answerer encodes up to level 4, and decodes up to level 6. +// Offer: level 6, SendOnly +// Answer: level 6, RecvOnly +TEST_F(VideoCodecsOfferH265LevelIdTest, + SendOnlyOffererEncode60Decode52AnswererEncode40Decode60) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level4); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendOnly, kActive, + &opts); + + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level6), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level6LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kRecvOnly, kActive, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level6), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level6LevelId); +} + +// Offerer encodes up to level 4, and decodes up to level 6. +// Answerer encodes up to level 6, and decodes up to level 5.2. +// Offer: level 4, SendOnly +// Answer: level 4, RecvOnly +TEST_F(VideoCodecsOfferH265LevelIdTest, + SendOnlyOffererEncode40Decode60AnswererEncode60Decode52) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level4); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level4); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendOnly, kActive, + &opts); + + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level4), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level4LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kRecvOnly, kActive, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level4), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level4LevelId); +} + +TEST_F(VideoCodecsOfferH265LevelIdTest, + SendOnlyOffererEncode40Decode60AnswererEncode60Decode52WithPreference) { + const std::vector offerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level4); + const std::vector offerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector offerer_sendrecv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level4); + const std::vector answerer_send_codecs = + MAKE_VECTOR(kVideoCodecsH265Level6); + const std::vector answerer_recv_codecs = + MAKE_VECTOR(kVideoCodecsH265Level52); + codec_lookup_helper_offerer_.GetCodecVendor()->set_video_codecs( + offerer_send_codecs, offerer_recv_codecs); + codec_lookup_helper_answerer_.GetCodecVendor()->set_video_codecs( + answerer_send_codecs, answerer_recv_codecs); + EXPECT_EQ(offerer_sendrecv_codecs, + codec_lookup_helper_offerer_.GetCodecVendor() + ->video_sendrecv_codecs() + .codecs()); + + MediaSessionOptions opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kSendRecv, kActive, + &opts); + + AttachSenderToMediaDescriptionOptions("video", webrtc::MediaType::VIDEO, + kVideoTrack1, {kMediaStream1}, 1, + &opts); + std::vector preferences; + for (const auto& codec : + codec_lookup_helper_offerer_.GetCodecVendor()->video_recv_codecs()) { + preferences.push_back(webrtc::ToRtpCodecCapability(codec)); + } + opts.media_description_options[0].codec_preferences = preferences; + + std::unique_ptr offer = + sf_offerer_.CreateOfferOrError(opts, nullptr).MoveValue(); + ASSERT_TRUE(offer.get()); + const ContentInfo* oc = offer->GetContentByName("video"); + ASSERT_TRUE(oc); + const MediaContentDescription* ocd = oc->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level4), ocd->codecs())); + CheckH265Level(ocd->codecs(), kVideoCodecsH265Level4LevelId); + + MediaSessionOptions answer_opts; + AddMediaDescriptionOptions(webrtc::MediaType::VIDEO, "video", + RtpTransceiverDirection::kRecvOnly, kActive, + &answer_opts); + + std::unique_ptr answer = + sf_answerer_.CreateAnswerOrError(offer.get(), answer_opts, nullptr) + .MoveValue(); + ASSERT_TRUE(answer.get()); + const ContentInfo* ac = answer->GetContentByName("video"); + ASSERT_TRUE(ac); + const MediaContentDescription* acd = ac->media_description(); + EXPECT_TRUE(CodecsMatch(MAKE_VECTOR(kVideoCodecsH265Level4), acd->codecs())); + CheckH265Level(acd->codecs(), kVideoCodecsH265Level4LevelId); +} + +#endif + +} // namespace +} // namespace webrtc diff --git a/pc/media_stream.cc b/pc/media_stream.cc index 57be76c6c8..7c62bff9d0 100644 --- a/pc/media_stream.cc +++ b/pc/media_stream.cc @@ -12,8 +12,12 @@ #include +#include #include +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/scoped_refptr.h" #include "rtc_base/checks.h" namespace webrtc { @@ -29,29 +33,29 @@ static typename V::iterator FindTrack(V* vector, const std::string& track_id) { return it; } -rtc::scoped_refptr MediaStream::Create(const std::string& id) { - return rtc::make_ref_counted(id); +scoped_refptr MediaStream::Create(const std::string& id) { + return make_ref_counted(id); } MediaStream::MediaStream(const std::string& id) : id_(id) {} -bool MediaStream::AddTrack(rtc::scoped_refptr track) { +bool MediaStream::AddTrack(scoped_refptr track) { return AddTrack(&audio_tracks_, track); } -bool MediaStream::AddTrack(rtc::scoped_refptr track) { +bool MediaStream::AddTrack(scoped_refptr track) { return AddTrack(&video_tracks_, track); } -bool MediaStream::RemoveTrack(rtc::scoped_refptr track) { +bool MediaStream::RemoveTrack(scoped_refptr track) { return RemoveTrack(&audio_tracks_, track); } -bool MediaStream::RemoveTrack(rtc::scoped_refptr track) { +bool MediaStream::RemoveTrack(scoped_refptr track) { return RemoveTrack(&video_tracks_, track); } -rtc::scoped_refptr MediaStream::FindAudioTrack( +scoped_refptr MediaStream::FindAudioTrack( const std::string& track_id) { AudioTrackVector::iterator it = FindTrack(&audio_tracks_, track_id); if (it == audio_tracks_.end()) @@ -59,7 +63,7 @@ rtc::scoped_refptr MediaStream::FindAudioTrack( return *it; } -rtc::scoped_refptr MediaStream::FindVideoTrack( +scoped_refptr MediaStream::FindVideoTrack( const std::string& track_id) { VideoTrackVector::iterator it = FindTrack(&video_tracks_, track_id); if (it == video_tracks_.end()) @@ -68,8 +72,7 @@ rtc::scoped_refptr MediaStream::FindVideoTrack( } template -bool MediaStream::AddTrack(TrackVector* tracks, - rtc::scoped_refptr track) { +bool MediaStream::AddTrack(TrackVector* tracks, scoped_refptr track) { typename TrackVector::iterator it = FindTrack(tracks, track->id()); if (it != tracks->end()) return false; @@ -79,9 +82,8 @@ bool MediaStream::AddTrack(TrackVector* tracks, } template -bool MediaStream::RemoveTrack( - TrackVector* tracks, - rtc::scoped_refptr track) { +bool MediaStream::RemoveTrack(TrackVector* tracks, + scoped_refptr track) { RTC_DCHECK(tracks != NULL); if (!track) return false; diff --git a/pc/media_stream.h b/pc/media_stream.h index c033cf6f35..108a4b68cc 100644 --- a/pc/media_stream.h +++ b/pc/media_stream.h @@ -23,17 +23,17 @@ namespace webrtc { class MediaStream : public Notifier { public: - static rtc::scoped_refptr Create(const std::string& id); + static scoped_refptr Create(const std::string& id); std::string id() const override { return id_; } - bool AddTrack(rtc::scoped_refptr track) override; - bool AddTrack(rtc::scoped_refptr track) override; - bool RemoveTrack(rtc::scoped_refptr track) override; - bool RemoveTrack(rtc::scoped_refptr track) override; - rtc::scoped_refptr FindAudioTrack( + bool AddTrack(scoped_refptr track) override; + bool AddTrack(scoped_refptr track) override; + bool RemoveTrack(scoped_refptr track) override; + bool RemoveTrack(scoped_refptr track) override; + scoped_refptr FindAudioTrack( const std::string& track_id) override; - rtc::scoped_refptr FindVideoTrack( + scoped_refptr FindVideoTrack( const std::string& track_id) override; AudioTrackVector GetAudioTracks() override { return audio_tracks_; } @@ -44,10 +44,10 @@ class MediaStream : public Notifier { private: template - bool AddTrack(TrackVector* Tracks, rtc::scoped_refptr track); + bool AddTrack(TrackVector* Tracks, scoped_refptr track); template bool RemoveTrack(TrackVector* Tracks, - rtc::scoped_refptr track); + scoped_refptr track); const std::string id_; AudioTrackVector audio_tracks_; diff --git a/pc/media_stream_observer.cc b/pc/media_stream_observer.cc index 6264a7657a..86a32b7223 100644 --- a/pc/media_stream_observer.cc +++ b/pc/media_stream_observer.cc @@ -16,6 +16,7 @@ #include #include "absl/algorithm/container.h" +#include "api/media_stream_interface.h" namespace webrtc { diff --git a/pc/media_stream_observer.h b/pc/media_stream_observer.h index 83bbd20994..c60f76606b 100644 --- a/pc/media_stream_observer.h +++ b/pc/media_stream_observer.h @@ -39,7 +39,7 @@ class MediaStreamObserver : public ObserverInterface { void OnChanged() override; private: - rtc::scoped_refptr stream_; + scoped_refptr stream_; AudioTrackVector cached_audio_tracks_; VideoTrackVector cached_video_tracks_; const std::function diff --git a/pc/media_stream_proxy.h b/pc/media_stream_proxy.h index 3e263bfd8b..11ea22b429 100644 --- a/pc/media_stream_proxy.h +++ b/pc/media_stream_proxy.h @@ -25,16 +25,16 @@ PROXY_PRIMARY_THREAD_DESTRUCTOR() BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_METHOD0(AudioTrackVector, GetAudioTracks) PROXY_METHOD0(VideoTrackVector, GetVideoTracks) -PROXY_METHOD1(rtc::scoped_refptr, +PROXY_METHOD1(scoped_refptr, FindAudioTrack, const std::string&) -PROXY_METHOD1(rtc::scoped_refptr, +PROXY_METHOD1(scoped_refptr, FindVideoTrack, const std::string&) -PROXY_METHOD1(bool, AddTrack, rtc::scoped_refptr) -PROXY_METHOD1(bool, AddTrack, rtc::scoped_refptr) -PROXY_METHOD1(bool, RemoveTrack, rtc::scoped_refptr) -PROXY_METHOD1(bool, RemoveTrack, rtc::scoped_refptr) +PROXY_METHOD1(bool, AddTrack, scoped_refptr) +PROXY_METHOD1(bool, AddTrack, scoped_refptr) +PROXY_METHOD1(bool, RemoveTrack, scoped_refptr) +PROXY_METHOD1(bool, RemoveTrack, scoped_refptr) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) END_PROXY_MAP(MediaStream) diff --git a/pc/media_stream_track_proxy.h b/pc/media_stream_track_proxy.h index 2af3aedb22..8775ecd344 100644 --- a/pc/media_stream_track_proxy.h +++ b/pc/media_stream_track_proxy.h @@ -33,7 +33,7 @@ BYPASS_PROXY_CONSTMETHOD0(AudioSourceInterface*, GetSource) PROXY_METHOD1(void, AddSink, AudioTrackSinkInterface*) PROXY_METHOD1(void, RemoveSink, AudioTrackSinkInterface*) PROXY_METHOD1(bool, GetSignalLevel, int*) -PROXY_METHOD0(rtc::scoped_refptr, GetAudioProcessor) +PROXY_METHOD0(scoped_refptr, GetAudioProcessor) PROXY_METHOD1(bool, set_enabled, bool) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) @@ -50,9 +50,9 @@ PROXY_CONSTMETHOD0(ContentHint, content_hint) PROXY_METHOD1(void, set_content_hint, ContentHint) PROXY_SECONDARY_METHOD2(void, AddOrUpdateSink, - rtc::VideoSinkInterface*, - const rtc::VideoSinkWants&) -PROXY_SECONDARY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) + VideoSinkInterface*, + const VideoSinkWants&) +PROXY_SECONDARY_METHOD1(void, RemoveSink, VideoSinkInterface*) PROXY_SECONDARY_METHOD0(void, RequestRefreshFrame) BYPASS_PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource) diff --git a/pc/media_stream_unittest.cc b/pc/media_stream_unittest.cc index f55ea203fb..9647080f49 100644 --- a/pc/media_stream_unittest.cc +++ b/pc/media_stream_unittest.cc @@ -12,6 +12,8 @@ #include +#include "api/media_stream_interface.h" +#include "api/scoped_refptr.h" #include "pc/audio_track.h" #include "pc/test/fake_video_track_source.h" #include "pc/video_track.h" @@ -23,8 +25,8 @@ static const char kStreamId1[] = "local_stream_1"; static const char kVideoTrackId[] = "dummy_video_cam_1"; static const char kAudioTrackId[] = "dummy_microphone_1"; -using rtc::scoped_refptr; using ::testing::Exactly; +using webrtc::scoped_refptr; namespace webrtc { @@ -57,7 +59,7 @@ class MediaStreamTest : public ::testing::Test { ASSERT_TRUE(stream_.get() != NULL); video_track_ = VideoTrack::Create( - kVideoTrackId, FakeVideoTrackSource::Create(), rtc::Thread::Current()); + kVideoTrackId, FakeVideoTrackSource::Create(), Thread::Current()); ASSERT_TRUE(video_track_.get() != NULL); EXPECT_EQ(MediaStreamTrackInterface::kLive, video_track_->state()); @@ -80,7 +82,7 @@ class MediaStreamTest : public ::testing::Test { EXPECT_FALSE(track->enabled()); } - rtc::AutoThread main_thread_; + AutoThread main_thread_; scoped_refptr stream_; scoped_refptr audio_track_; scoped_refptr video_track_; @@ -91,7 +93,7 @@ TEST_F(MediaStreamTest, GetTrackInfo) { ASSERT_EQ(1u, stream_->GetAudioTracks().size()); // Verify the video track. - scoped_refptr video_track( + scoped_refptr video_track( stream_->GetVideoTracks()[0]); EXPECT_EQ(0, video_track->id().compare(kVideoTrackId)); EXPECT_TRUE(video_track->enabled()); @@ -105,7 +107,7 @@ TEST_F(MediaStreamTest, GetTrackInfo) { EXPECT_TRUE(video_track->enabled()); // Verify the audio track. - scoped_refptr audio_track( + scoped_refptr audio_track( stream_->GetAudioTracks()[0]); EXPECT_EQ(0, audio_track->id().compare(kAudioTrackId)); EXPECT_TRUE(audio_track->enabled()); @@ -134,19 +136,17 @@ TEST_F(MediaStreamTest, RemoveTrack) { EXPECT_EQ(0u, stream_->GetVideoTracks().size()); EXPECT_EQ(0u, stream_->GetVideoTracks().size()); - EXPECT_FALSE(stream_->RemoveTrack(rtc::scoped_refptr())); - EXPECT_FALSE(stream_->RemoveTrack(rtc::scoped_refptr())); + EXPECT_FALSE(stream_->RemoveTrack(scoped_refptr())); + EXPECT_FALSE(stream_->RemoveTrack(scoped_refptr())); } TEST_F(MediaStreamTest, ChangeVideoTrack) { - scoped_refptr video_track( - stream_->GetVideoTracks()[0]); + scoped_refptr video_track(stream_->GetVideoTracks()[0]); ChangeTrack(video_track.get()); } TEST_F(MediaStreamTest, ChangeAudioTrack) { - scoped_refptr audio_track( - stream_->GetAudioTracks()[0]); + scoped_refptr audio_track(stream_->GetAudioTracks()[0]); ChangeTrack(audio_track.get()); } diff --git a/pc/peer_connection.cc b/pc/peer_connection.cc index cde3d91047..74c4c4531e 100644 --- a/pc/peer_connection.cc +++ b/pc/peer_connection.cc @@ -13,112 +13,158 @@ #include #include -#include +#include +#include +#include #include +#include #include #include #include +#include #include "absl/algorithm/container.h" #include "absl/strings/match.h" +#include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/adaptation/resource.h" +#include "api/audio/audio_device.h" +#include "api/candidate.h" +#include "api/crypto/crypto_options.h" +#include "api/data_channel_event_observer_interface.h" +#include "api/data_channel_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/environment/environment.h" +#include "api/jsep.h" #include "api/jsep_ice_candidate.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" #include "api/media_types.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtc_event_log_output.h" #include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_direction.h" +#include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/sctp_transport_interface.h" +#include "api/sequence_checker.h" +#include "api/set_local_description_observer_interface.h" +#include "api/set_remote_description_observer_interface.h" +#include "api/stats/rtc_stats_collector_callback.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/transport/bandwidth_estimation_settings.h" +#include "api/transport/bitrate_settings.h" +#include "api/transport/data_channel_transport_interface.h" +#include "api/transport/enums.h" +#include "api/turn_customizer.h" #include "api/uma_metrics.h" +#include "api/units/time_delta.h" #include "api/video/video_codec_constants.h" #include "call/audio_state.h" #include "call/packet_receiver.h" -#include "media/base/media_channel.h" +#include "call/payload_type.h" +#include "media/base/codec.h" #include "media/base/media_config.h" #include "media/base/media_engine.h" -#include "media/base/rid_description.h" -#include "media/base/stream_params.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "p2p/base/basic_async_resolver_factory.h" -#include "p2p/base/connection.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "p2p/base/connection_info.h" -#include "p2p/base/dtls_transport_internal.h" +#include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/p2p_transport_channel.h" +#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/transport_description.h" #include "p2p/base/transport_info.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "pc/channel_interface.h" +#include "pc/codec_vendor.h" +#include "pc/connection_context.h" +#include "pc/data_channel_utils.h" +#include "pc/dtls_transport.h" #include "pc/ice_server_parsing.h" +#include "pc/jsep_transport_controller.h" +#include "pc/legacy_stats_collector.h" +#include "pc/rtc_stats_collector.h" #include "pc/rtp_receiver.h" #include "pc/rtp_receiver_proxy.h" #include "pc/rtp_sender.h" #include "pc/rtp_sender_proxy.h" +#include "pc/rtp_transceiver.h" +#include "pc/rtp_transmission_manager.h" +#include "pc/rtp_transport_internal.h" +#include "pc/sctp_data_channel.h" #include "pc/sctp_transport.h" -#include "pc/simulcast_description.h" -#include "pc/webrtc_session_description_factory.h" -#include "rtc_base/helpers.h" +#include "pc/sdp_offer_answer.h" +#include "pc/session_description.h" +#include "pc/transceiver_list.h" +#include "pc/transport_stats.h" +#include "pc/usage_pattern.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" +#include "rtc_base/net_helpers.h" #include "rtc_base/network.h" #include "rtc_base/network_constants.h" +#include "rtc_base/rtc_certificate.h" #include "rtc_base/socket_address.h" -#include "rtc_base/string_encode.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/thread.h" #include "rtc_base/trace_event.h" #include "rtc_base/unique_id_generator.h" #include "system_wrappers/include/metrics.h" -using cricket::ContentInfo; -using cricket::ContentInfos; -using cricket::MediaContentDescription; -using cricket::MediaProtocolType; -using cricket::RidDescription; -using cricket::RidDirection; -using cricket::SessionDescription; -using cricket::SimulcastDescription; -using cricket::SimulcastLayer; -using cricket::SimulcastLayerList; -using cricket::StreamParams; -using cricket::TransportInfo; - -using cricket::LOCAL_PORT_TYPE; -using cricket::PRFLX_PORT_TYPE; -using cricket::RELAY_PORT_TYPE; -using cricket::STUN_PORT_TYPE; - namespace webrtc { namespace { +static const int REPORT_USAGE_PATTERN_DELAY_MS = 60000; -// UMA metric names. -const char kSimulcastNumberOfEncodings[] = - "WebRTC.PeerConnection.Simulcast.NumberOfSendEncodings"; +class CodecLookupHelperForPeerConnection : public CodecLookupHelper { + public: + explicit CodecLookupHelperForPeerConnection(PeerConnection* self) + : self_(self), + codec_vendor_(self_->context()->media_engine(), + self_->context()->use_rtx(), + self_->context()->env().field_trials()) {} -static const int REPORT_USAGE_PATTERN_DELAY_MS = 60000; + webrtc::PayloadTypeSuggester* PayloadTypeSuggester() override { + return self_->transport_controller_s(); + } + + CodecVendor* GetCodecVendor() override { return &codec_vendor_; } + + private: + PeerConnection* self_; + CodecVendor codec_vendor_; +}; uint32_t ConvertIceTransportTypeToCandidateFilter( PeerConnectionInterface::IceTransportsType type) { switch (type) { case PeerConnectionInterface::kNone: - return cricket::CF_NONE; + return CF_NONE; case PeerConnectionInterface::kRelay: - return cricket::CF_RELAY; + return CF_RELAY; case PeerConnectionInterface::kNoHost: - return (cricket::CF_ALL & ~cricket::CF_HOST); + return (CF_ALL & ~CF_HOST); case PeerConnectionInterface::kAll: - return cricket::CF_ALL; + return CF_ALL; default: RTC_DCHECK_NOTREACHED(); } - return cricket::CF_NONE; + return CF_NONE; } -IceCandidatePairType GetIceCandidatePairCounter( - const cricket::Candidate& local, - const cricket::Candidate& remote) { - const auto& l = local.type(); - const auto& r = remote.type(); - const auto& host = LOCAL_PORT_TYPE; - const auto& srflx = STUN_PORT_TYPE; - const auto& relay = RELAY_PORT_TYPE; - const auto& prflx = PRFLX_PORT_TYPE; - if (l == host && r == host) { +IceCandidatePairType GetIceCandidatePairCounter(const Candidate& local, + const Candidate& remote) { + if (local.is_local() && remote.is_local()) { bool local_hostname = !local.address().hostname().empty() && local.address().IsUnresolvedIP(); bool remote_hostname = !remote.address().hostname().empty() && @@ -151,44 +197,42 @@ IceCandidatePairType GetIceCandidatePairCounter( } } } - if (l == host && r == srflx) - return kIceCandidatePairHostSrflx; - if (l == host && r == relay) - return kIceCandidatePairHostRelay; - if (l == host && r == prflx) - return kIceCandidatePairHostPrflx; - if (l == srflx && r == host) - return kIceCandidatePairSrflxHost; - if (l == srflx && r == srflx) - return kIceCandidatePairSrflxSrflx; - if (l == srflx && r == relay) - return kIceCandidatePairSrflxRelay; - if (l == srflx && r == prflx) - return kIceCandidatePairSrflxPrflx; - if (l == relay && r == host) - return kIceCandidatePairRelayHost; - if (l == relay && r == srflx) - return kIceCandidatePairRelaySrflx; - if (l == relay && r == relay) - return kIceCandidatePairRelayRelay; - if (l == relay && r == prflx) - return kIceCandidatePairRelayPrflx; - if (l == prflx && r == host) - return kIceCandidatePairPrflxHost; - if (l == prflx && r == srflx) - return kIceCandidatePairPrflxSrflx; - if (l == prflx && r == relay) - return kIceCandidatePairPrflxRelay; - return kIceCandidatePairMax; -} -absl::optional RTCConfigurationToIceConfigOptionalInt( - int rtc_configuration_parameter) { - if (rtc_configuration_parameter == - webrtc::PeerConnectionInterface::RTCConfiguration::kUndefined) { - return absl::nullopt; + if (local.is_local()) { + if (remote.is_stun()) + return kIceCandidatePairHostSrflx; + if (remote.is_relay()) + return kIceCandidatePairHostRelay; + if (remote.is_prflx()) + return kIceCandidatePairHostPrflx; + } else if (local.is_stun()) { + if (remote.is_local()) + return kIceCandidatePairSrflxHost; + if (remote.is_stun()) + return kIceCandidatePairSrflxSrflx; + if (remote.is_relay()) + return kIceCandidatePairSrflxRelay; + if (remote.is_prflx()) + return kIceCandidatePairSrflxPrflx; + } else if (local.is_relay()) { + if (remote.is_local()) + return kIceCandidatePairRelayHost; + if (remote.is_stun()) + return kIceCandidatePairRelaySrflx; + if (remote.is_relay()) + return kIceCandidatePairRelayRelay; + if (remote.is_prflx()) + return kIceCandidatePairRelayPrflx; + } else if (local.is_prflx()) { + if (remote.is_local()) + return kIceCandidatePairPrflxHost; + if (remote.is_stun()) + return kIceCandidatePairPrflxSrflx; + if (remote.is_relay()) + return kIceCandidatePairPrflxRelay; } - return rtc_configuration_parameter; + + return kIceCandidatePairMax; } // Check if the changes of IceTransportsType motives an ice restart. @@ -211,60 +255,87 @@ bool NeedIceRestart(bool surface_ice_candidates_on_ice_transport_type_changed, return (current_filter & modified_filter) != current_filter; } -cricket::IceConfig ParseIceConfig( - const PeerConnectionInterface::RTCConfiguration& config) { - cricket::ContinualGatheringPolicy gathering_policy; - switch (config.continual_gathering_policy) { - case PeerConnectionInterface::GATHER_ONCE: - gathering_policy = cricket::GATHER_ONCE; - break; - case PeerConnectionInterface::GATHER_CONTINUALLY: - gathering_policy = cricket::GATHER_CONTINUALLY; - break; - default: - RTC_DCHECK_NOTREACHED(); - gathering_policy = cricket::GATHER_ONCE; - } - - cricket::IceConfig ice_config; - ice_config.receiving_timeout = RTCConfigurationToIceConfigOptionalInt( - config.ice_connection_receiving_timeout); - ice_config.prioritize_most_likely_candidate_pairs = - config.prioritize_most_likely_ice_candidate_pairs; - ice_config.backup_connection_ping_interval = - RTCConfigurationToIceConfigOptionalInt( - config.ice_backup_candidate_pair_ping_interval); - ice_config.continual_gathering_policy = gathering_policy; - ice_config.presume_writable_when_fully_relayed = - config.presume_writable_when_fully_relayed; - ice_config.surface_ice_candidates_on_ice_transport_type_changed = - config.surface_ice_candidates_on_ice_transport_type_changed; - ice_config.ice_check_interval_strong_connectivity = - config.ice_check_interval_strong_connectivity; - ice_config.ice_check_interval_weak_connectivity = - config.ice_check_interval_weak_connectivity; - ice_config.ice_check_min_interval = config.ice_check_min_interval; - ice_config.ice_unwritable_timeout = config.ice_unwritable_timeout; - ice_config.ice_unwritable_min_checks = config.ice_unwritable_min_checks; - ice_config.ice_inactive_timeout = config.ice_inactive_timeout; - ice_config.stun_keepalive_interval = config.stun_candidate_keepalive_interval; - ice_config.network_preference = config.network_preference; - ice_config.stable_writable_connection_ping_interval = - config.stable_writable_connection_ping_interval_ms; - return ice_config; -} - -// Ensures the configuration doesn't have any parameters with invalid values, -// or values that conflict with other parameters. -// -// Returns RTCError::OK() if there are no issues. -RTCError ValidateConfiguration( - const PeerConnectionInterface::RTCConfiguration& config) { - return cricket::P2PTransportChannel::ValidateIceConfig( - ParseIceConfig(config)); -} - -bool HasRtcpMuxEnabled(const cricket::ContentInfo* content) { +// Checks for valid pool size range and if a previous value has already been +// set, which is done via SetLocalDescription. +RTCError ValidateIceCandidatePoolSize( + int ice_candidate_pool_size, + std::optional previous_ice_candidate_pool_size) { + // Note that this isn't possible through chromium, since it's an unsigned + // short in WebIDL. + if (ice_candidate_pool_size < 0 || + ice_candidate_pool_size > static_cast(UINT16_MAX)) { + return RTCError(RTCErrorType::INVALID_RANGE); + } + + // According to JSEP, after setLocalDescription, changing the candidate pool + // size is not allowed, and changing the set of ICE servers will not result + // in new candidates being gathered. + if (previous_ice_candidate_pool_size.has_value() && + ice_candidate_pool_size != previous_ice_candidate_pool_size.value()) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, + "Can't change candidate pool size after calling " + "SetLocalDescription."); + } + + return RTCError::OK(); +} + +// The simplest (and most future-compatible) way to tell if a config was +// modified in an invalid way is to copy each property we do support modifying, +// then use operator==. There are far more properties we don't support modifying +// than those we do, and more could be added. +// This helper function accepts a proposed new `configuration` object, an +// existing configuration and returns a valid, modified, configuration that's +// based on the existing configuration, with modified properties copied from +// `configuration`. +// If the result of creating a modified configuration doesn't pass the above +// `operator==` test or a call to `ValidateConfiguration()`, then the function +// will return an error. Otherwise, the return value will be the new config. +RTCErrorOr ApplyConfiguration( + const PeerConnectionInterface::RTCConfiguration& configuration, + const PeerConnectionInterface::RTCConfiguration& existing_configuration) { + PeerConnectionInterface::RTCConfiguration modified_config = + existing_configuration; + modified_config.servers = configuration.servers; + modified_config.type = configuration.type; + modified_config.ice_candidate_pool_size = + configuration.ice_candidate_pool_size; + modified_config.prune_turn_ports = configuration.prune_turn_ports; + modified_config.turn_port_prune_policy = configuration.turn_port_prune_policy; + modified_config.surface_ice_candidates_on_ice_transport_type_changed = + configuration.surface_ice_candidates_on_ice_transport_type_changed; + modified_config.ice_check_min_interval = configuration.ice_check_min_interval; + modified_config.ice_check_interval_strong_connectivity = + configuration.ice_check_interval_strong_connectivity; + modified_config.ice_check_interval_weak_connectivity = + configuration.ice_check_interval_weak_connectivity; + modified_config.ice_unwritable_timeout = configuration.ice_unwritable_timeout; + modified_config.ice_unwritable_min_checks = + configuration.ice_unwritable_min_checks; + modified_config.ice_inactive_timeout = configuration.ice_inactive_timeout; + modified_config.stun_candidate_keepalive_interval = + configuration.stun_candidate_keepalive_interval; + modified_config.turn_customizer = configuration.turn_customizer; + modified_config.network_preference = configuration.network_preference; + modified_config.active_reset_srtp_params = + configuration.active_reset_srtp_params; + modified_config.turn_logging_id = configuration.turn_logging_id; + modified_config.stable_writable_connection_ping_interval_ms = + configuration.stable_writable_connection_ping_interval_ms; + if (configuration != modified_config) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, + "Modifying the configuration in an unsupported way."); + } + + RTCError err = IceConfig(modified_config).IsValid(); + if (!err.ok()) { + return err; + } + + return modified_config; +} + +bool HasRtcpMuxEnabled(const ContentInfo* content) { return content->media_description()->rtcp_mux(); } @@ -275,15 +346,18 @@ bool DtlsEnabled(const PeerConnectionInterface::RTCConfiguration& configuration, return false; // Enable DTLS by default if we have an identity store or a certificate. - bool default_enabled = - (dependencies.cert_generator || !configuration.certificates.empty()); - -#if defined(WEBRTC_FUCHSIA) - // The `configuration` can override the default value. - return configuration.enable_dtls_srtp.value_or(default_enabled); -#else - return default_enabled; -#endif + return (dependencies.cert_generator || !configuration.certificates.empty()); +} + +void NoteServerUsage(UsagePattern& usage_pattern, + const ServerAddresses& stun_servers, + const std::vector& turn_servers) { + if (!stun_servers.empty()) { + usage_pattern.NoteUsageEvent(UsageEvent::STUN_SERVER_ADDED); + } + if (!turn_servers.empty()) { + usage_pattern.NoteUsageEvent(UsageEvent::TURN_SERVER_ADDED); + } } } // namespace @@ -297,15 +371,12 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( IceTransportsType type; BundlePolicy bundle_policy; RtcpMuxPolicy rtcp_mux_policy; - std::vector> certificates; + std::vector> certificates; int ice_candidate_pool_size; bool disable_ipv6_on_wifi; int max_ipv6_networks; bool disable_link_local_networks; - absl::optional screencast_min_bitrate; -#if defined(WEBRTC_FUCHSIA) - absl::optional enable_dtls_srtp; -#endif + std::optional screencast_min_bitrate; TcpCandidatePolicy tcp_candidate_policy; CandidateNetworkPolicy candidate_network_policy; int audio_jitter_buffer_max_packets; @@ -315,35 +386,34 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( int ice_backup_candidate_pair_ping_interval; ContinualGatheringPolicy continual_gathering_policy; bool prioritize_most_likely_ice_candidate_pairs; - struct cricket::MediaConfig media_config; + struct MediaConfig media_config; bool prune_turn_ports; PortPrunePolicy turn_port_prune_policy; bool presume_writable_when_fully_relayed; bool enable_ice_renomination; bool redetermine_role_on_ice_restart; bool surface_ice_candidates_on_ice_transport_type_changed; - absl::optional ice_check_interval_strong_connectivity; - absl::optional ice_check_interval_weak_connectivity; - absl::optional ice_check_min_interval; - absl::optional ice_unwritable_timeout; - absl::optional ice_unwritable_min_checks; - absl::optional ice_inactive_timeout; - absl::optional stun_candidate_keepalive_interval; - webrtc::TurnCustomizer* turn_customizer; + std::optional ice_check_interval_strong_connectivity; + std::optional ice_check_interval_weak_connectivity; + std::optional ice_check_min_interval; + std::optional ice_unwritable_timeout; + std::optional ice_unwritable_min_checks; + std::optional ice_inactive_timeout; + std::optional stun_candidate_keepalive_interval; + TurnCustomizer* turn_customizer; SdpSemantics sdp_semantics; - absl::optional network_preference; + std::optional network_preference; bool active_reset_srtp_params; - absl::optional crypto_options; + std::optional crypto_options; bool offer_extmap_allow_mixed; std::string turn_logging_id; bool enable_implicit_rollback; - absl::optional allow_codec_switching; - absl::optional report_usage_pattern_delay_ms; - absl::optional stable_writable_connection_ping_interval_ms; - webrtc::VpnPreference vpn_preference; - std::vector vpn_list; + std::optional report_usage_pattern_delay_ms; + std::optional stable_writable_connection_ping_interval_ms; + VpnPreference vpn_preference; + std::vector vpn_list; PortAllocatorConfig port_allocator_config; - absl::optional pacer_burst_interval; + std::optional pacer_burst_interval; }; static_assert(sizeof(stuff_being_tested_for_equality) == sizeof(*this), "Did you add something to RTCConfiguration and forget to " @@ -371,9 +441,6 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( max_ipv6_networks == o.max_ipv6_networks && disable_link_local_networks == o.disable_link_local_networks && screencast_min_bitrate == o.screencast_min_bitrate && -#if defined(WEBRTC_FUCHSIA) - enable_dtls_srtp == o.enable_dtls_srtp && -#endif ice_candidate_pool_size == o.ice_candidate_pool_size && prune_turn_ports == o.prune_turn_ports && turn_port_prune_policy == o.turn_port_prune_policy && @@ -401,7 +468,6 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( offer_extmap_allow_mixed == o.offer_extmap_allow_mixed && turn_logging_id == o.turn_logging_id && enable_implicit_rollback == o.enable_implicit_rollback && - allow_codec_switching == o.allow_codec_switching && report_usage_pattern_delay_ms == o.report_usage_pattern_delay_ms && stable_writable_connection_ping_interval_ms == o.stable_writable_connection_ping_interval_ms && @@ -417,123 +483,119 @@ bool PeerConnectionInterface::RTCConfiguration::operator!=( return !(*this == o); } -RTCErrorOr> PeerConnection::Create( - rtc::scoped_refptr context, +scoped_refptr PeerConnection::Create( + const Environment& env, + scoped_refptr context, const PeerConnectionFactoryInterface::Options& options, - std::unique_ptr event_log, std::unique_ptr call, const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies dependencies) { - // TODO(https://crbug.com/webrtc/13528): Remove support for kPlanB. - if (configuration.sdp_semantics == SdpSemantics::kPlanB_DEPRECATED) { - RTC_LOG(LS_WARNING) - << "PeerConnection constructed with legacy SDP semantics!"; - } - - RTCError config_error = cricket::P2PTransportChannel::ValidateIceConfig( - ParseIceConfig(configuration)); - if (!config_error.ok()) { - RTC_LOG(LS_ERROR) << "Invalid ICE configuration: " - << config_error.message(); - return config_error; - } - - if (!dependencies.allocator) { - RTC_LOG(LS_ERROR) - << "PeerConnection initialized without a PortAllocator? " - "This shouldn't happen if using PeerConnectionFactory."; - return RTCError( - RTCErrorType::INVALID_PARAMETER, - "Attempt to create a PeerConnection without a PortAllocatorFactory"); - } - - if (!dependencies.observer) { - // TODO(deadbeef): Why do we do this? - RTC_LOG(LS_ERROR) << "PeerConnection initialized without a " - "PeerConnectionObserver"; - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Attempt to create a PeerConnection without an observer"); - } + PeerConnectionDependencies& dependencies, + const ServerAddresses& stun_servers, + const std::vector& turn_servers) { + RTC_DCHECK(IceConfig(configuration).IsValid().ok()); + RTC_DCHECK(dependencies.observer); + RTC_DCHECK(dependencies.async_dns_resolver_factory); + RTC_DCHECK(dependencies.allocator); bool is_unified_plan = configuration.sdp_semantics == SdpSemantics::kUnifiedPlan; bool dtls_enabled = DtlsEnabled(configuration, options, dependencies); - // Interim code: If an AsyncResolverFactory is given, but not an - // AsyncDnsResolverFactory, wrap it in a WrappingAsyncDnsResolverFactory - // If neither is given, create a BasicAsyncDnsResolverFactory. - // TODO(bugs.webrtc.org/12598): Remove code once all callers pass a - // AsyncDnsResolverFactory. -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wdeprecated-declarations" - if (dependencies.async_dns_resolver_factory && - dependencies.async_resolver_factory) { - RTC_LOG(LS_ERROR) - << "Attempt to set both old and new type of DNS resolver factory"; - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Both old and new type of DNS resolver given"); - } - if (!dependencies.async_dns_resolver_factory) { - if (dependencies.async_resolver_factory) { - dependencies.async_dns_resolver_factory = - std::make_unique( - std::move(dependencies.async_resolver_factory)); - } else { - dependencies.async_dns_resolver_factory = - std::make_unique(); - } - } -#pragma clang diagnostic pop - - // The PeerConnection constructor consumes some, but not all, dependencies. - auto pc = rtc::make_ref_counted( - context, options, is_unified_plan, std::move(event_log), std::move(call), - dependencies, dtls_enabled); - RTCError init_error = pc->Initialize(configuration, std::move(dependencies)); - if (!init_error.ok()) { - RTC_LOG(LS_ERROR) << "PeerConnection initialization failed"; - return init_error; - } - return pc; + TRACE_EVENT0("webrtc", "PeerConnection::Create"); + return make_ref_counted( + configuration, env, context, options, is_unified_plan, std::move(call), + dependencies, stun_servers, turn_servers, dtls_enabled); } PeerConnection::PeerConnection( - rtc::scoped_refptr context, + const PeerConnectionInterface::RTCConfiguration& configuration, + const Environment& env, + scoped_refptr context, const PeerConnectionFactoryInterface::Options& options, bool is_unified_plan, - std::unique_ptr event_log, std::unique_ptr call, PeerConnectionDependencies& dependencies, + const ServerAddresses& stun_servers, + const std::vector& turn_servers, bool dtls_enabled) - : context_(context), - trials_(std::move(dependencies.trials), &context->field_trials()), + : env_(env), + context_(context), options_(options), observer_(dependencies.observer), is_unified_plan_(is_unified_plan), - event_log_(std::move(event_log)), - event_log_ptr_(event_log_.get()), + dtls_enabled_(dtls_enabled), + configuration_(configuration), async_dns_resolver_factory_( std::move(dependencies.async_dns_resolver_factory)), port_allocator_(std::move(dependencies.allocator)), ice_transport_factory_(std::move(dependencies.ice_transport_factory)), tls_cert_verifier_(std::move(dependencies.tls_cert_verifier)), call_(std::move(call)), + network_thread_safety_( + PendingTaskSafetyFlag::CreateAttachedToTaskQueue(true, + network_thread())), + worker_thread_safety_(PendingTaskSafetyFlag::CreateAttachedToTaskQueue( + /*alive=*/call_ != nullptr, + worker_thread())), call_ptr_(call_.get()), + legacy_stats_(std::make_unique(this)), + stats_collector_(RTCStatsCollector::Create(this, env_)), // RFC 3264: The numeric value of the session id and version in the // o line MUST be representable with a "64 bit signed integer". // Due to this constraint session id `session_id_` is max limited to // LLONG_MAX. - session_id_(rtc::ToString(rtc::CreateRandomId64() & LLONG_MAX)), - dtls_enabled_(dtls_enabled), + session_id_(absl::StrCat(CreateRandomId64() & LLONG_MAX)), data_channel_controller_(this), message_handler_(signaling_thread()), + codec_lookup_helper_( + std::make_unique(this)), weak_factory_(this) { - worker_thread()->BlockingCall([this] { - RTC_DCHECK_RUN_ON(worker_thread()); - worker_thread_safety_ = PendingTaskSafetyFlag::Create(); - if (!call_) - worker_thread_safety_->SetNotAlive(); - }); + // Field trials specific to the peerconnection should be owned by the `env`, + RTC_DCHECK(dependencies.trials == nullptr); + + transport_controller_copy_ = + InitializeNetworkThread(stun_servers, turn_servers); + + if (call_ptr_) { + worker_thread()->BlockingCall([this, tc = transport_controller_copy_] { + RTC_DCHECK_RUN_ON(worker_thread()); + call_->SetPayloadTypeSuggester(tc); + }); + } + + sdp_handler_ = SdpOfferAnswerHandler::Create( + this, configuration_, std::move(dependencies.cert_generator), + std::move(dependencies.video_bitrate_allocator_factory), context_.get(), + codec_lookup_helper_.get()); + rtp_manager_ = std::make_unique( + env_, IsUnifiedPlan(), context_.get(), codec_lookup_helper_.get(), + &usage_pattern_, observer_, legacy_stats_.get(), [this]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + sdp_handler_->UpdateNegotiationNeeded(); + }); + // Add default audio/video transceivers for Plan B SDP. + if (!IsUnifiedPlan()) { + rtp_manager_->transceivers()->Add( + RtpTransceiverProxyWithInternal::Create( + signaling_thread(), make_ref_counted( + webrtc::MediaType::AUDIO, context_.get(), + codec_lookup_helper_.get()))); + rtp_manager_->transceivers()->Add( + RtpTransceiverProxyWithInternal::Create( + signaling_thread(), make_ref_counted( + webrtc::MediaType::VIDEO, context_.get(), + codec_lookup_helper_.get()))); + } + + const int delay_ms = configuration_.report_usage_pattern_delay_ms + ? *configuration_.report_usage_pattern_delay_ms + : REPORT_USAGE_PATTERN_DELAY_MS; + message_handler_.RequestUsagePatternReport( + [this]() { + RTC_DCHECK_RUN_ON(signaling_thread()); + ReportUsagePattern(); + }, + delay_ms); } PeerConnection::~PeerConnection() { @@ -566,10 +628,8 @@ PeerConnection::~PeerConnection() { if (sdp_handler_) { // Don't destroy BaseChannels until after stats has been cleaned up so that // the last stats request can still read from the channels. - sdp_handler_->DestroyAllChannels(); - + sdp_handler_->DestroyMediaChannels(); RTC_LOG(LS_INFO) << "Session: " << session_id() << " is destroyed."; - sdp_handler_->ResetSessionDescFactory(); } @@ -584,113 +644,41 @@ PeerConnection::~PeerConnection() { if (network_thread_safety_) network_thread_safety_->SetNotAlive(); }); + sctp_mid_s_.reset(); + SetSctpTransportName(""); - // call_ and event_log_ must be destroyed on the worker thread. + // call_ must be destroyed on the worker thread. worker_thread()->BlockingCall([this] { RTC_DCHECK_RUN_ON(worker_thread()); worker_thread_safety_->SetNotAlive(); call_.reset(); - // The event log must outlive call (and any other object that uses it). - event_log_.reset(); }); data_channel_controller_.PrepareForShutdown(); } -RTCError PeerConnection::Initialize( - const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies dependencies) { +JsepTransportController* PeerConnection::InitializeNetworkThread( + const ServerAddresses& stun_servers, + const std::vector& turn_servers) { RTC_DCHECK_RUN_ON(signaling_thread()); - TRACE_EVENT0("webrtc", "PeerConnection::Initialize"); - - cricket::ServerAddresses stun_servers; - std::vector turn_servers; - - RTCError parse_error = ParseIceServersOrError(configuration.servers, - &stun_servers, &turn_servers); - if (!parse_error.ok()) { - return parse_error; - } - - // Restrict number of TURN servers. - if (turn_servers.size() > cricket::kMaxTurnServers) { - RTC_LOG(LS_WARNING) << "Number of configured TURN servers is " - << turn_servers.size() - << " which exceeds the maximum allowed number of " - << cricket::kMaxTurnServers; - turn_servers.resize(cricket::kMaxTurnServers); - } - - // Add the turn logging id to all turn servers - for (cricket::RelayServerConfig& turn_server : turn_servers) { - turn_server.turn_logging_id = configuration.turn_logging_id; - } - - // Note if STUN or TURN servers were supplied. - if (!stun_servers.empty()) { - NoteUsageEvent(UsageEvent::STUN_SERVER_ADDED); - } - if (!turn_servers.empty()) { - NoteUsageEvent(UsageEvent::TURN_SERVER_ADDED); - } - // Network thread initialization. - transport_controller_copy_ = network_thread()->BlockingCall([&] { + NoteServerUsage(usage_pattern_, stun_servers, turn_servers); + return network_thread()->BlockingCall([&, config = &configuration_] { RTC_DCHECK_RUN_ON(network_thread()); - network_thread_safety_ = PendingTaskSafetyFlag::Create(); + RTC_DCHECK(network_thread_safety_->alive()); InitializePortAllocatorResult pa_result = - InitializePortAllocator_n(stun_servers, turn_servers, configuration); + InitializePortAllocator_n(stun_servers, turn_servers, *config); // Send information about IPv4/IPv6 status. PeerConnectionAddressFamilyCounter address_family = pa_result.enable_ipv6 ? kPeerConnection_IPv6 : kPeerConnection_IPv4; RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.IPMetrics", address_family, kPeerConnectionAddressFamilyCounter_Max); - return InitializeTransportController_n(configuration, dependencies); + return InitializeTransportController_n(*config); }); - - configuration_ = configuration; - - legacy_stats_ = std::make_unique(this); - stats_collector_ = RTCStatsCollector::Create(this); - - sdp_handler_ = SdpOfferAnswerHandler::Create(this, configuration, - dependencies, context_.get()); - - rtp_manager_ = std::make_unique( - IsUnifiedPlan(), context_.get(), &usage_pattern_, observer_, - legacy_stats_.get(), [this]() { - RTC_DCHECK_RUN_ON(signaling_thread()); - sdp_handler_->UpdateNegotiationNeeded(); - }); - - // Add default audio/video transceivers for Plan B SDP. - if (!IsUnifiedPlan()) { - rtp_manager()->transceivers()->Add( - RtpTransceiverProxyWithInternal::Create( - signaling_thread(), rtc::make_ref_counted( - cricket::MEDIA_TYPE_AUDIO, context()))); - rtp_manager()->transceivers()->Add( - RtpTransceiverProxyWithInternal::Create( - signaling_thread(), rtc::make_ref_counted( - cricket::MEDIA_TYPE_VIDEO, context()))); - } - - int delay_ms = configuration.report_usage_pattern_delay_ms - ? *configuration.report_usage_pattern_delay_ms - : REPORT_USAGE_PATTERN_DELAY_MS; - message_handler_.RequestUsagePatternReport( - [this]() { - RTC_DCHECK_RUN_ON(signaling_thread()); - ReportUsagePattern(); - }, - delay_ms); - - return RTCError::OK(); } JsepTransportController* PeerConnection::InitializeTransportController_n( - const RTCConfiguration& configuration, - const PeerConnectionDependencies& dependencies) { + const RTCConfiguration& configuration) { JsepTransportController::Config config; config.redetermine_role_on_ice_restart = configuration.redetermine_role_on_ice_restart; @@ -706,7 +694,7 @@ JsepTransportController* PeerConnection::InitializeTransportController_n( config.transport_observer = this; config.rtcp_handler = InitializeRtcpCallback(); config.un_demuxable_packet_handler = InitializeUnDemuxablePacketHandler(); - config.event_log = event_log_ptr_; + config.event_log = &env_.event_log(); #if defined(ENABLE_EXTERNAL_AUTH) config.enable_external_auth = true; #endif @@ -719,20 +707,19 @@ JsepTransportController* PeerConnection::InitializeTransportController_n( config.ice_transport_factory = ice_transport_factory_.get(); config.on_dtls_handshake_error_ = - [weak_ptr = weak_factory_.GetWeakPtr()](rtc::SSLHandshakeError s) { + [weak_ptr = weak_factory_.GetWeakPtr()](SSLHandshakeError s) { if (weak_ptr) { weak_ptr->OnTransportControllerDtlsHandshakeError(s); } }; - config.field_trials = trials_.get(); - - transport_controller_.reset(new JsepTransportController( - network_thread(), port_allocator_.get(), - async_dns_resolver_factory_.get(), std::move(config))); + transport_controller_.reset( + new JsepTransportController(env_, network_thread(), port_allocator_.get(), + async_dns_resolver_factory_.get(), + payload_type_picker_, std::move(config))); transport_controller_->SubscribeIceConnectionState( - [this](cricket::IceConnectionState s) { + [this](::webrtc::IceConnectionState s) { RTC_DCHECK_RUN_ON(network_thread()); signaling_thread()->PostTask( SafeTask(signaling_thread_safety_.flag(), [this, s]() { @@ -759,7 +746,7 @@ JsepTransportController* PeerConnection::InitializeTransportController_n( })); }); transport_controller_->SubscribeIceGatheringState( - [this](cricket::IceGatheringState s) { + [this](::webrtc::IceGatheringState s) { RTC_DCHECK_RUN_ON(network_thread()); signaling_thread()->PostTask( SafeTask(signaling_thread_safety_.flag(), [this, s]() { @@ -769,7 +756,7 @@ JsepTransportController* PeerConnection::InitializeTransportController_n( }); transport_controller_->SubscribeIceCandidateGathered( [this](const std::string& transport, - const std::vector& candidates) { + const std::vector& candidates) { RTC_DCHECK_RUN_ON(network_thread()); signaling_thread()->PostTask( SafeTask(signaling_thread_safety_.flag(), @@ -779,7 +766,7 @@ JsepTransportController* PeerConnection::InitializeTransportController_n( })); }); transport_controller_->SubscribeIceCandidateError( - [this](const cricket::IceCandidateErrorEvent& event) { + [this](const IceCandidateErrorEvent& event) { RTC_DCHECK_RUN_ON(network_thread()); signaling_thread()->PostTask( SafeTask(signaling_thread_safety_.flag(), [this, event = event]() { @@ -788,7 +775,7 @@ JsepTransportController* PeerConnection::InitializeTransportController_n( })); }); transport_controller_->SubscribeIceCandidatesRemoved( - [this](const std::vector& c) { + [this](const std::vector& c) { RTC_DCHECK_RUN_ON(network_thread()); signaling_thread()->PostTask( SafeTask(signaling_thread_safety_.flag(), [this, c = c]() { @@ -797,7 +784,7 @@ JsepTransportController* PeerConnection::InitializeTransportController_n( })); }); transport_controller_->SubscribeIceCandidatePairChanged( - [this](const cricket::CandidatePairChangeEvent& event) { + [this](const CandidatePairChangeEvent& event) { RTC_DCHECK_RUN_ON(network_thread()); signaling_thread()->PostTask( SafeTask(signaling_thread_safety_.flag(), [this, event = event]() { @@ -806,11 +793,15 @@ JsepTransportController* PeerConnection::InitializeTransportController_n( })); }); - transport_controller_->SetIceConfig(ParseIceConfig(configuration)); + IceConfig ice_config(configuration); + ice_config.dtls_handshake_in_stun = + CanAttemptDtlsStunPiggybacking(configuration); + + transport_controller_->SetIceConfig(ice_config); return transport_controller_.get(); } -rtc::scoped_refptr PeerConnection::local_streams() { +scoped_refptr PeerConnection::local_streams() { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_CHECK(!IsUnifiedPlan()) << "local_streams is not available with Unified " "Plan SdpSemantics. Please use GetSenders " @@ -818,7 +809,7 @@ rtc::scoped_refptr PeerConnection::local_streams() { return sdp_handler_->local_streams(); } -rtc::scoped_refptr PeerConnection::remote_streams() { +scoped_refptr PeerConnection::remote_streams() { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_CHECK(!IsUnifiedPlan()) << "remote_streams is not available with Unified " "Plan SdpSemantics. Please use GetReceivers " @@ -848,21 +839,21 @@ void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) { sdp_handler_->RemoveStream(local_stream); } -RTCErrorOr> PeerConnection::AddTrack( - rtc::scoped_refptr track, +RTCErrorOr> PeerConnection::AddTrack( + scoped_refptr track, const std::vector& stream_ids) { return AddTrack(std::move(track), stream_ids, nullptr); } -RTCErrorOr> PeerConnection::AddTrack( - rtc::scoped_refptr track, +RTCErrorOr> PeerConnection::AddTrack( + scoped_refptr track, const std::vector& stream_ids, const std::vector& init_send_encodings) { return AddTrack(std::move(track), stream_ids, &init_send_encodings); } -RTCErrorOr> PeerConnection::AddTrack( - rtc::scoped_refptr track, +RTCErrorOr> PeerConnection::AddTrack( + scoped_refptr track, const std::vector& stream_ids, const std::vector* init_send_encodings) { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -898,7 +889,7 @@ RTCErrorOr> PeerConnection::AddTrack( } RTCError PeerConnection::RemoveTrackOrError( - rtc::scoped_refptr sender) { + scoped_refptr sender) { RTC_DCHECK_RUN_ON(signaling_thread()); if (!ConfiguredForMedia()) { LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, @@ -926,11 +917,11 @@ RTCError PeerConnection::RemoveTrackOrError( } } else { bool removed; - if (sender->media_type() == cricket::MEDIA_TYPE_AUDIO) { + if (sender->media_type() == webrtc::MediaType::AUDIO) { removed = rtp_manager()->GetAudioTransceiver()->internal()->RemoveSender( sender.get()); } else { - RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, sender->media_type()); + RTC_DCHECK_EQ(webrtc::MediaType::VIDEO, sender->media_type()); removed = rtp_manager()->GetVideoTransceiver()->internal()->RemoveSender( sender.get()); } @@ -944,15 +935,14 @@ RTCError PeerConnection::RemoveTrackOrError( return RTCError::OK(); } -rtc::scoped_refptr> +scoped_refptr> PeerConnection::FindTransceiverBySender( - rtc::scoped_refptr sender) { + scoped_refptr sender) { return rtp_manager()->transceivers()->FindBySender(sender); } -RTCErrorOr> -PeerConnection::AddTransceiver( - rtc::scoped_refptr track) { +RTCErrorOr> +PeerConnection::AddTransceiver(scoped_refptr track) { if (!ConfiguredForMedia()) { LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, "Not configured for media"); @@ -961,22 +951,9 @@ PeerConnection::AddTransceiver( return AddTransceiver(track, RtpTransceiverInit()); } -RtpTransportInternal* PeerConnection::GetRtpTransport(const std::string& mid) { - // TODO(bugs.webrtc.org/9987): Avoid the thread jump. - // This might be done by caching the value on the signaling thread. - RTC_DCHECK_RUN_ON(signaling_thread()); - return network_thread()->BlockingCall([this, &mid] { - RTC_DCHECK_RUN_ON(network_thread()); - auto rtp_transport = transport_controller_->GetRtpTransport(mid); - RTC_DCHECK(rtp_transport); - return rtp_transport; - }); -} - -RTCErrorOr> -PeerConnection::AddTransceiver( - rtc::scoped_refptr track, - const RtpTransceiverInit& init) { +RTCErrorOr> +PeerConnection::AddTransceiver(scoped_refptr track, + const RtpTransceiverInit& init) { RTC_DCHECK_RUN_ON(signaling_thread()); if (!ConfiguredForMedia()) { LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, @@ -987,11 +964,11 @@ PeerConnection::AddTransceiver( if (!track) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "track is null"); } - cricket::MediaType media_type; + webrtc::MediaType media_type; if (track->kind() == MediaStreamTrackInterface::kAudioKind) { - media_type = cricket::MEDIA_TYPE_AUDIO; + media_type = webrtc::MediaType::AUDIO; } else if (track->kind() == MediaStreamTrackInterface::kVideoKind) { - media_type = cricket::MEDIA_TYPE_VIDEO; + media_type = webrtc::MediaType::VIDEO; } else { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "Track kind is not audio or video"); @@ -999,13 +976,13 @@ PeerConnection::AddTransceiver( return AddTransceiver(media_type, track, init); } -RTCErrorOr> -PeerConnection::AddTransceiver(cricket::MediaType media_type) { +RTCErrorOr> +PeerConnection::AddTransceiver(webrtc::MediaType media_type) { return AddTransceiver(media_type, RtpTransceiverInit()); } -RTCErrorOr> -PeerConnection::AddTransceiver(cricket::MediaType media_type, +RTCErrorOr> +PeerConnection::AddTransceiver(webrtc::MediaType media_type, const RtpTransceiverInit& init) { RTC_DCHECK_RUN_ON(signaling_thread()); if (!ConfiguredForMedia()) { @@ -1014,37 +991,33 @@ PeerConnection::AddTransceiver(cricket::MediaType media_type, } RTC_CHECK(IsUnifiedPlan()) << "AddTransceiver is only available with Unified Plan SdpSemantics"; - if (!(media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO)) { + if (!(media_type == webrtc::MediaType::AUDIO || + media_type == webrtc::MediaType::VIDEO)) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "media type is not audio or video"); } return AddTransceiver(media_type, nullptr, init); } -RTCErrorOr> -PeerConnection::AddTransceiver( - cricket::MediaType media_type, - rtc::scoped_refptr track, - const RtpTransceiverInit& init, - bool update_negotiation_needed) { +RTCErrorOr> +PeerConnection::AddTransceiver(webrtc::MediaType media_type, + scoped_refptr track, + const RtpTransceiverInit& init, + bool update_negotiation_needed) { RTC_DCHECK_RUN_ON(signaling_thread()); if (!ConfiguredForMedia()) { LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION, "Not configured for media"); } - RTC_DCHECK((media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO)); + RTC_DCHECK((media_type == webrtc::MediaType::AUDIO || + media_type == webrtc::MediaType::VIDEO)); if (track) { RTC_DCHECK_EQ(media_type, (track->kind() == MediaStreamTrackInterface::kAudioKind - ? cricket::MEDIA_TYPE_AUDIO - : cricket::MEDIA_TYPE_VIDEO)); + ? webrtc::MediaType::AUDIO + : webrtc::MediaType::VIDEO)); } - RTC_HISTOGRAM_COUNTS_LINEAR(kSimulcastNumberOfEncodings, - init.send_encodings.size(), 0, 7, 8); - size_t num_rids = absl::c_count_if(init.send_encodings, [](const RtpEncodingParameters& encoding) { return !encoding.rid.empty(); @@ -1077,7 +1050,7 @@ PeerConnection::AddTransceiver( // Encodings are dropped from the tail if too many are provided. size_t max_simulcast_streams = - media_type == cricket::MEDIA_TYPE_VIDEO ? kMaxSimulcastStreams : 1u; + media_type == webrtc::MediaType::VIDEO ? kMaxSimulcastStreams : 1u; if (parameters.encodings.size() > max_simulcast_streams) { parameters.encodings.erase( parameters.encodings.begin() + max_simulcast_streams, @@ -1093,7 +1066,7 @@ PeerConnection::AddTransceiver( // If RIDs were not provided, they are generated for simulcast scenario. if (parameters.encodings.size() > 1 && num_rids == 0) { - rtc::UniqueStringGenerator rid_generator; + UniqueStringGenerator rid_generator; for (RtpEncodingParameters& encoding : parameters.encodings) { encoding.rid = rid_generator.GenerateString(); } @@ -1110,17 +1083,19 @@ PeerConnection::AddTransceiver( "Attempted to set an unimplemented parameter of RtpParameters."); } - std::vector codecs; + std::vector codecs; // Gather the current codec capabilities to allow checking scalabilityMode and // codec selection against supported values. - if (media_type == cricket::MEDIA_TYPE_VIDEO) { - codecs = context_->media_engine()->video().send_codecs(false); + CodecVendor codec_vendor(context_->media_engine(), false, + context_->env().field_trials()); + if (media_type == webrtc::MediaType::VIDEO) { + codecs = codec_vendor.video_send_codecs().codecs(); } else { - codecs = context_->media_engine()->voice().send_codecs(); + codecs = codec_vendor.audio_send_codecs().codecs(); } - auto result = - cricket::CheckRtpParametersValues(parameters, codecs, absl::nullopt); + auto result = CheckRtpParametersValues(parameters, codecs, std::nullopt, + env_.field_trials()); if (!result.ok()) { if (result.type() == RTCErrorType::INVALID_MODIFICATION) { result.set_type(RTCErrorType::UNSUPPORTED_OPERATION); @@ -1128,17 +1103,16 @@ PeerConnection::AddTransceiver( LOG_AND_RETURN_ERROR(result.type(), result.message()); } - RTC_LOG(LS_INFO) << "Adding " << cricket::MediaTypeToString(media_type) + RTC_LOG(LS_INFO) << "Adding " << webrtc::MediaTypeToString(media_type) << " transceiver in response to a call to AddTransceiver."; // Set the sender ID equal to the track ID if the track is specified unless // that sender ID is already in use. std::string sender_id = (track && !rtp_manager()->FindSenderById(track->id()) ? track->id() - : rtc::CreateRandomUuid()); + : CreateRandomUuid()); auto sender = rtp_manager()->CreateSender( media_type, sender_id, track, init.stream_ids, parameters.encodings); - auto receiver = - rtp_manager()->CreateReceiver(media_type, rtc::CreateRandomUuid()); + auto receiver = rtp_manager()->CreateReceiver(media_type, CreateRandomUuid()); auto transceiver = rtp_manager()->CreateAndAddTransceiver(sender, receiver); transceiver->internal()->set_direction(init.direction); @@ -1146,7 +1120,7 @@ PeerConnection::AddTransceiver( sdp_handler_->UpdateNegotiationNeeded(); } - return rtc::scoped_refptr(transceiver); + return scoped_refptr(transceiver); } void PeerConnection::OnNegotiationNeeded() { @@ -1155,7 +1129,7 @@ void PeerConnection::OnNegotiationNeeded() { sdp_handler_->UpdateNegotiationNeeded(); } -rtc::scoped_refptr PeerConnection::CreateSender( +scoped_refptr PeerConnection::CreateSender( const std::string& kind, const std::string& stream_id) { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -1175,7 +1149,7 @@ rtc::scoped_refptr PeerConnection::CreateSender( // generate a random stream ID if not specified. std::vector stream_ids; if (stream_id.empty()) { - stream_ids.push_back(rtc::CreateRandomUuid()); + stream_ids.push_back(CreateRandomUuid()); RTC_LOG(LS_INFO) << "No stream_id specified for sender. Generated stream ID: " << stream_ids[0]; @@ -1184,10 +1158,10 @@ rtc::scoped_refptr PeerConnection::CreateSender( } // TODO(steveanton): Move construction of the RtpSenders to RtpTransceiver. - rtc::scoped_refptr> new_sender; + scoped_refptr> new_sender; if (kind == MediaStreamTrackInterface::kAudioKind) { auto audio_sender = - AudioRtpSender::Create(worker_thread(), rtc::CreateRandomUuid(), + AudioRtpSender::Create(env_, worker_thread(), CreateRandomUuid(), legacy_stats_.get(), rtp_manager()); audio_sender->SetMediaChannel(rtp_manager()->voice_media_send_channel()); new_sender = RtpSenderProxyWithInternal::Create( @@ -1195,7 +1169,7 @@ rtc::scoped_refptr PeerConnection::CreateSender( rtp_manager()->GetAudioTransceiver()->internal()->AddSender(new_sender); } else if (kind == MediaStreamTrackInterface::kVideoKind) { auto video_sender = VideoRtpSender::Create( - worker_thread(), rtc::CreateRandomUuid(), rtp_manager()); + env_, worker_thread(), CreateRandomUuid(), rtp_manager()); video_sender->SetMediaChannel(rtp_manager()->video_media_send_channel()); new_sender = RtpSenderProxyWithInternal::Create( signaling_thread(), video_sender); @@ -1209,10 +1183,10 @@ rtc::scoped_refptr PeerConnection::CreateSender( return new_sender; } -std::vector> PeerConnection::GetSenders() +std::vector> PeerConnection::GetSenders() const { RTC_DCHECK_RUN_ON(signaling_thread()); - std::vector> ret; + std::vector> ret; if (ConfiguredForMedia()) { for (const auto& sender : rtp_manager()->GetSendersInternal()) { ret.push_back(sender); @@ -1221,10 +1195,10 @@ std::vector> PeerConnection::GetSenders() return ret; } -std::vector> -PeerConnection::GetReceivers() const { +std::vector> PeerConnection::GetReceivers() + const { RTC_DCHECK_RUN_ON(signaling_thread()); - std::vector> ret; + std::vector> ret; if (ConfiguredForMedia()) { for (const auto& receiver : rtp_manager()->GetReceiversInternal()) { ret.push_back(receiver); @@ -1233,12 +1207,12 @@ PeerConnection::GetReceivers() const { return ret; } -std::vector> +std::vector> PeerConnection::GetTransceivers() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_CHECK(IsUnifiedPlan()) << "GetTransceivers is only supported with Unified Plan SdpSemantics."; - std::vector> all_transceivers; + std::vector> all_transceivers; if (ConfiguredForMedia()) { for (const auto& transceiver : rtp_manager()->transceivers()->List()) { all_transceivers.push_back(transceiver); @@ -1282,19 +1256,19 @@ void PeerConnection::GetStats(RTCStatsCollectorCallback* callback) { RTC_DCHECK(callback); RTC_LOG_THREAD_BLOCK_COUNT(); stats_collector_->GetStatsReport( - rtc::scoped_refptr(callback)); + scoped_refptr(callback)); RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2); } void PeerConnection::GetStats( - rtc::scoped_refptr selector, - rtc::scoped_refptr callback) { + scoped_refptr selector, + scoped_refptr callback) { TRACE_EVENT0("webrtc", "PeerConnection::GetStats"); RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(callback); RTC_DCHECK(stats_collector_); RTC_LOG_THREAD_BLOCK_COUNT(); - rtc::scoped_refptr internal_sender; + scoped_refptr internal_sender; if (selector) { for (const auto& proxy_transceiver : rtp_manager()->transceivers()->List()) { @@ -1319,14 +1293,14 @@ void PeerConnection::GetStats( } void PeerConnection::GetStats( - rtc::scoped_refptr selector, - rtc::scoped_refptr callback) { + scoped_refptr selector, + scoped_refptr callback) { TRACE_EVENT0("webrtc", "PeerConnection::GetStats"); RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(callback); RTC_DCHECK(stats_collector_); RTC_LOG_THREAD_BLOCK_COUNT(); - rtc::scoped_refptr internal_receiver; + scoped_refptr internal_receiver; if (selector) { for (const auto& proxy_transceiver : rtp_manager()->transceivers()->List()) { @@ -1379,24 +1353,24 @@ PeerConnection::ice_gathering_state() { return ice_gathering_state_; } -absl::optional PeerConnection::can_trickle_ice_candidates() { +std::optional PeerConnection::can_trickle_ice_candidates() { RTC_DCHECK_RUN_ON(signaling_thread()); const SessionDescriptionInterface* description = current_remote_description(); if (!description) { description = pending_remote_description(); } if (!description) { - return absl::nullopt; + return std::nullopt; } // TODO(bugs.webrtc.org/7443): Change to retrieve from session-level option. if (description->description()->transport_infos().size() < 1) { - return absl::nullopt; + return std::nullopt; } return description->description()->transport_infos()[0].description.HasOption( "trickle"); } -RTCErrorOr> +RTCErrorOr> PeerConnection::CreateDataChannelOrError(const std::string& label, const DataChannelInit* config) { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -1415,14 +1389,14 @@ PeerConnection::CreateDataChannelOrError(const std::string& label, } internal_config.fallback_ssl_role = sdp_handler_->GuessSslRole(); - RTCErrorOr> ret = + RTCErrorOr> ret = data_channel_controller_.InternalCreateDataChannelWithProxy( label, internal_config); if (!ret.ok()) { return ret.MoveError(); } - rtc::scoped_refptr channel = ret.MoveValue(); + scoped_refptr channel = ret.MoveValue(); // Check the onRenegotiationNeeded event (with plan-b backward compat) if (configuration_.sdp_semantics == SdpSemantics::kUnifiedPlan || @@ -1460,7 +1434,7 @@ void PeerConnection::SetLocalDescription( void PeerConnection::SetLocalDescription( std::unique_ptr desc, - rtc::scoped_refptr observer) { + scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); sdp_handler_->SetLocalDescription(std::move(desc), observer); } @@ -1472,7 +1446,7 @@ void PeerConnection::SetLocalDescription( } void PeerConnection::SetLocalDescription( - rtc::scoped_refptr observer) { + scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); sdp_handler_->SetLocalDescription(observer); } @@ -1486,7 +1460,7 @@ void PeerConnection::SetRemoteDescription( void PeerConnection::SetRemoteDescription( std::unique_ptr desc, - rtc::scoped_refptr observer) { + scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); sdp_handler_->SetRemoteDescription(std::move(desc), observer); } @@ -1505,106 +1479,43 @@ RTCError PeerConnection::SetConfiguration( "SetConfiguration: PeerConnection is closed."); } - // According to JSEP, after setLocalDescription, changing the candidate pool - // size is not allowed, and changing the set of ICE servers will not result - // in new candidates being gathered. - if (local_description() && configuration.ice_candidate_pool_size != - configuration_.ice_candidate_pool_size) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, - "Can't change candidate pool size after calling " - "SetLocalDescription."); + const bool has_local_description = local_description() != nullptr; + + RTCError validate_error = ValidateIceCandidatePoolSize( + configuration.ice_candidate_pool_size, + has_local_description + ? std::optional(configuration_.ice_candidate_pool_size) + : std::nullopt); + if (!validate_error.ok()) { + return validate_error; } - if (local_description() && + if (has_local_description && configuration.crypto_options != configuration_.crypto_options) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, "Can't change crypto_options after calling " "SetLocalDescription."); } - // The simplest (and most future-compatible) way to tell if the config was - // modified in an invalid way is to copy each property we do support - // modifying, then use operator==. There are far more properties we don't - // support modifying than those we do, and more could be added. - RTCConfiguration modified_config = configuration_; - modified_config.servers = configuration.servers; - modified_config.type = configuration.type; - modified_config.ice_candidate_pool_size = - configuration.ice_candidate_pool_size; - modified_config.prune_turn_ports = configuration.prune_turn_ports; - modified_config.turn_port_prune_policy = configuration.turn_port_prune_policy; - modified_config.surface_ice_candidates_on_ice_transport_type_changed = - configuration.surface_ice_candidates_on_ice_transport_type_changed; - modified_config.ice_check_min_interval = configuration.ice_check_min_interval; - modified_config.ice_check_interval_strong_connectivity = - configuration.ice_check_interval_strong_connectivity; - modified_config.ice_check_interval_weak_connectivity = - configuration.ice_check_interval_weak_connectivity; - modified_config.ice_unwritable_timeout = configuration.ice_unwritable_timeout; - modified_config.ice_unwritable_min_checks = - configuration.ice_unwritable_min_checks; - modified_config.ice_inactive_timeout = configuration.ice_inactive_timeout; - modified_config.stun_candidate_keepalive_interval = - configuration.stun_candidate_keepalive_interval; - modified_config.turn_customizer = configuration.turn_customizer; - modified_config.network_preference = configuration.network_preference; - modified_config.active_reset_srtp_params = - configuration.active_reset_srtp_params; - modified_config.turn_logging_id = configuration.turn_logging_id; - modified_config.allow_codec_switching = configuration.allow_codec_switching; - modified_config.stable_writable_connection_ping_interval_ms = - configuration.stable_writable_connection_ping_interval_ms; - if (configuration != modified_config) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, - "Modifying the configuration in an unsupported way."); + // Create a new, configuration object whose Ice config will have been + // validated.. + RTCErrorOr validated_config = + ApplyConfiguration(configuration, configuration_); + if (!validated_config.ok()) { + return validated_config.error(); } - // Validate the modified configuration. - RTCError validate_error = ValidateConfiguration(modified_config); + // Parse ICE servers before hopping to network thread. + ServerAddresses stun_servers; + std::vector turn_servers; + validate_error = ParseAndValidateIceServersFromConfiguration( + configuration, stun_servers, turn_servers); if (!validate_error.ok()) { return validate_error; } + NoteServerUsage(usage_pattern_, stun_servers, turn_servers); - // Note that this isn't possible through chromium, since it's an unsigned - // short in WebIDL. - if (configuration.ice_candidate_pool_size < 0 || - configuration.ice_candidate_pool_size > static_cast(UINT16_MAX)) { - return RTCError(RTCErrorType::INVALID_RANGE); - } - - // Parse ICE servers before hopping to network thread. - cricket::ServerAddresses stun_servers; - std::vector turn_servers; - RTCError parse_error = ParseIceServersOrError(configuration.servers, - &stun_servers, &turn_servers); - if (!parse_error.ok()) { - return parse_error; - } - - // Restrict number of TURN servers. - if (turn_servers.size() > cricket::kMaxTurnServers) { - RTC_LOG(LS_WARNING) << "Number of configured TURN servers is " - << turn_servers.size() - << " which exceeds the maximum allowed number of " - << cricket::kMaxTurnServers; - turn_servers.resize(cricket::kMaxTurnServers); - } - - // Add the turn logging id to all turn servers - for (cricket::RelayServerConfig& turn_server : turn_servers) { - turn_server.turn_logging_id = configuration.turn_logging_id; - } - - // Note if STUN or TURN servers were supplied. - if (!stun_servers.empty()) { - NoteUsageEvent(UsageEvent::STUN_SERVER_ADDED); - } - if (!turn_servers.empty()) { - NoteUsageEvent(UsageEvent::TURN_SERVER_ADDED); - } - - const bool has_local_description = local_description() != nullptr; - + const RTCConfiguration& modified_config = validated_config.value(); const bool needs_ice_restart = modified_config.servers != configuration_.servers || NeedIceRestart( @@ -1612,7 +1523,9 @@ RTCError PeerConnection::SetConfiguration( configuration_.type, modified_config.type) || modified_config.GetTurnPortPrunePolicy() != configuration_.GetTurnPortPrunePolicy(); - cricket::IceConfig ice_config = ParseIceConfig(modified_config); + IceConfig ice_config(modified_config); + ice_config.dtls_handshake_in_stun = + CanAttemptDtlsStunPiggybacking(modified_config); // Apply part of the configuration on the network thread. In theory this // shouldn't fail. @@ -1628,6 +1541,8 @@ RTCError PeerConnection::SetConfiguration( transport_controller_->SetNeedsIceRestartFlag(); transport_controller_->SetIceConfig(ice_config); + transport_controller_->SetActiveResetSrtpParams( + modified_config.active_reset_srtp_params); return ReconfigurePortAllocator_n( stun_servers, turn_servers, modified_config.type, modified_config.ice_candidate_pool_size, @@ -1640,37 +1555,6 @@ RTCError PeerConnection::SetConfiguration( "Failed to apply configuration to PortAllocator."); } - if (configuration_.active_reset_srtp_params != - modified_config.active_reset_srtp_params) { - // TODO(tommi): merge BlockingCalls - network_thread()->BlockingCall([this, &modified_config] { - RTC_DCHECK_RUN_ON(network_thread()); - transport_controller_->SetActiveResetSrtpParams( - modified_config.active_reset_srtp_params); - }); - } - - if (modified_config.allow_codec_switching.has_value()) { - std::vector channels; - for (const auto& transceiver : rtp_manager()->transceivers()->List()) { - if (transceiver->media_type() != cricket::MEDIA_TYPE_VIDEO) - continue; - - auto* video_channel = transceiver->internal()->channel(); - if (video_channel) - channels.push_back( - static_cast( - video_channel->media_send_channel())); - } - - worker_thread()->BlockingCall( - [channels = std::move(channels), - allow_codec_switching = *modified_config.allow_codec_switching]() { - for (auto* ch : channels) - ch->SetVideoCodecSwitchingEnabled(allow_codec_switching); - }); - } - configuration_ = modified_config; return RTCError::OK(); } @@ -1687,14 +1571,14 @@ void PeerConnection::AddIceCandidate( std::function callback) { RTC_DCHECK_RUN_ON(signaling_thread()); sdp_handler_->AddIceCandidate(std::move(candidate), - [this, callback](webrtc::RTCError result) { + [this, callback](RTCError result) { ClearStatsCache(); callback(result); }); } bool PeerConnection::RemoveIceCandidates( - const std::vector& candidates) { + const std::vector& candidates) { TRACE_EVENT0("webrtc", "PeerConnection::RemoveIceCandidates"); RTC_DCHECK_RUN_ON(signaling_thread()); return sdp_handler_->RemoveIceCandidates(candidates); @@ -1741,6 +1625,15 @@ RTCError PeerConnection::SetBitrate(const BitrateSettings& bitrate) { return RTCError::OK(); } +void PeerConnection::ReconfigureBandwidthEstimation( + const BandwidthEstimationSettings& settings) { + worker_thread()->PostTask(SafeTask(worker_thread_safety_, [this, settings]() { + RTC_DCHECK_RUN_ON(worker_thread()); + call_->GetTransportControllerSend()->ReconfigureBandwidthEstimation( + settings); + })); +} + void PeerConnection::SetAudioPlayout(bool playout) { if (!worker_thread()->IsCurrent()) { worker_thread()->BlockingCall( @@ -1761,8 +1654,7 @@ void PeerConnection::SetAudioRecording(bool recording) { audio_state->SetRecording(recording); } -void PeerConnection::AddAdaptationResource( - rtc::scoped_refptr resource) { +void PeerConnection::AddAdaptationResource(scoped_refptr resource) { if (!worker_thread()->IsCurrent()) { return worker_thread()->BlockingCall( [this, resource]() { return AddAdaptationResource(resource); }); @@ -1791,7 +1683,7 @@ bool PeerConnection::StartRtcEventLog( std::unique_ptr output) { int64_t output_period_ms = 5000; if (trials().IsDisabled("WebRTC-RtcEventLogNewFormat")) { - output_period_ms = webrtc::RtcEventLog::kImmediateOutput; + output_period_ms = RtcEventLog::kImmediateOutput; } return StartRtcEventLog(std::move(output), output_period_ms); } @@ -1800,14 +1692,23 @@ void PeerConnection::StopRtcEventLog() { worker_thread()->BlockingCall([this] { StopRtcEventLog_w(); }); } -rtc::scoped_refptr -PeerConnection::LookupDtlsTransportByMid(const std::string& mid) { +void PeerConnection::SetDataChannelEventObserver( + std::unique_ptr observer) { + network_thread()->PostTask(SafeTask( + network_thread_safety_, [this, obs = std::move(observer)]() mutable { + RTC_DCHECK_RUN_ON(network_thread()); + data_channel_controller_.SetEventObserver(std::move(obs)); + })); +} + +scoped_refptr PeerConnection::LookupDtlsTransportByMid( + const std::string& mid) { RTC_DCHECK_RUN_ON(network_thread()); return transport_controller_->LookupDtlsTransportByMid(mid); } -rtc::scoped_refptr -PeerConnection::LookupDtlsTransportByMidInternal(const std::string& mid) { +scoped_refptr PeerConnection::LookupDtlsTransportByMidInternal( + const std::string& mid) { RTC_DCHECK_RUN_ON(signaling_thread()); // TODO(bugs.webrtc.org/9987): Avoid the thread jump. // This might be done by caching the value on the signaling thread. @@ -1817,8 +1718,7 @@ PeerConnection::LookupDtlsTransportByMidInternal(const std::string& mid) { }); } -rtc::scoped_refptr PeerConnection::GetSctpTransport() - const { +scoped_refptr PeerConnection::GetSctpTransport() const { RTC_DCHECK_RUN_ON(network_thread()); if (!sctp_mid_n_) return nullptr; @@ -1899,7 +1799,12 @@ void PeerConnection::Close() { // Don't destroy BaseChannels until after stats has been cleaned up so that // the last stats request can still read from the channels. - sdp_handler_->DestroyAllChannels(); + // TODO(tommi): The voice/video channels will be partially uninitialized on + // the network thread (see `RtpTransceiver::ClearChannel`), partially on the + // worker thread (see `PushNewMediaChannelAndDeleteChannel`) and then + // eventually freed on the signaling thread. + // It would be good to combine those steps with the teardown steps here. + sdp_handler_->DestroyMediaChannels(); // The event log is used in the transport controller, which must be outlived // by the former. CreateOffer by the peer connection is implemented @@ -1912,12 +1817,8 @@ void PeerConnection::Close() { } network_thread()->BlockingCall([this] { - // Data channels will already have been unset via the DestroyAllChannels() - // call above, which triggers a call to TeardownDataChannelTransport_n(). - // TODO(tommi): ^^ That's not exactly optimal since this is yet another - // blocking hop to the network thread during Close(). Further still, the - // voice/video/data channels will be cleared on the worker thread. RTC_DCHECK_RUN_ON(network_thread()); + TeardownDataChannelTransport_n({}); transport_controller_.reset(); port_allocator_->DiscardCandidatePool(); if (network_thread_safety_) { @@ -1925,14 +1826,17 @@ void PeerConnection::Close() { } }); + sctp_mid_s_.reset(); + SetSctpTransportName(""); + worker_thread()->BlockingCall([this] { RTC_DCHECK_RUN_ON(worker_thread()); worker_thread_safety_->SetNotAlive(); call_.reset(); - // The event log must outlive call (and any other object that uses it). - event_log_.reset(); + StopRtcEventLog_w(); }); ReportUsagePattern(); + ReportCloseUsageMetrics(); // Signal shutdown to the sdp handler. This invalidates weak pointers for // internal pending callbacks. @@ -2027,13 +1931,13 @@ void PeerConnection::ReportFirstConnectUsageMetrics() { RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.ProvisionalAnswer", pranswer, kProvisionalAnswerMax); - // Record the number of valid / invalid ice-ufrag. We do allow certain - // non-spec ice-char for backward-compat reasons. At this point we know - // that the ufrag/pwd consists of a valid ice-char or one of the four - // not allowed characters since we have passed the IsIceChar check done - // by the p2p transport description on setRemoteDescription calls. auto transport_infos = remote_description()->description()->transport_infos(); - if (transport_infos.size() > 0) { + if (!transport_infos.empty()) { + // Record the number of valid / invalid ice-ufrag. We do allow certain + // non-spec ice-char for backward-compat reasons. At this point we know + // that the ufrag/pwd consists of a valid ice-char or one of the four + // not allowed characters since we have passed the IsIceChar check done + // by the p2p transport description on setRemoteDescription calls. auto ice_parameters = transport_infos[0].description.GetIceParameters(); auto is_invalid_char = [](char c) { return c == '-' || c == '=' || c == '#' || c == '_'; @@ -2045,6 +1949,16 @@ void PeerConnection::ReportFirstConnectUsageMetrics() { RTC_HISTOGRAM_BOOLEAN( "WebRTC.PeerConnection.ValidIceChars", !(isUsingInvalidIceCharInUfrag || isUsingInvalidIceCharInPwd)); + + // Record whether the hash algorithm of the first transport's + // DTLS fingerprint is still using SHA-1. + if (transport_infos[0].description.identity_fingerprint) { + RTC_HISTOGRAM_BOOLEAN( + "WebRTC.PeerConnection.DtlsFingerprintLegacySha1", + absl::EqualsIgnoreCase( + transport_infos[0].description.identity_fingerprint->algorithm, + "sha-1")); + } } // Record RtcpMuxPolicy setting. @@ -2059,6 +1973,54 @@ void PeerConnection::ReportFirstConnectUsageMetrics() { } RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.RtcpMuxPolicy", rtcp_mux_policy, kRtcpMuxPolicyUsageMax); + switch (local_description()->GetType()) { + case SdpType::kOffer: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.PeerConnection.SdpMunging.Offer.ConnectionEstablished", + sdp_handler_->sdp_munging_type(), SdpMungingType::kMaxValue); + break; + case SdpType::kAnswer: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.PeerConnection.SdpMunging.Answer.ConnectionEstablished", + sdp_handler_->sdp_munging_type(), SdpMungingType::kMaxValue); + break; + case SdpType::kPrAnswer: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.PeerConnection.SdpMunging.PrAnswer.ConnectionEstablished", + sdp_handler_->sdp_munging_type(), SdpMungingType::kMaxValue); + break; + case SdpType::kRollback: + // Rollback does not have SDP so can not be munged. + break; + } +} + +void PeerConnection::ReportCloseUsageMetrics() { + if (!was_ever_connected_) { + return; + } + RTC_DCHECK(local_description()); + RTC_DCHECK(sdp_handler_); + switch (local_description()->GetType()) { + case SdpType::kOffer: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.PeerConnection.SdpMunging.Offer.ConnectionClosed", + sdp_handler_->sdp_munging_type(), SdpMungingType::kMaxValue); + break; + case SdpType::kAnswer: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.PeerConnection.SdpMunging.Answer.ConnectionClosed", + sdp_handler_->sdp_munging_type(), SdpMungingType::kMaxValue); + break; + case SdpType::kPrAnswer: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.PeerConnection.SdpMunging.PrAnswer.ConnectionClosed", + sdp_handler_->sdp_munging_type(), SdpMungingType::kMaxValue); + break; + case SdpType::kRollback: + // Rollback does not have SDP so can not be munged. + break; + } } void PeerConnection::OnIceGatheringChange( @@ -2092,7 +2054,7 @@ void PeerConnection::OnIceCandidateError(const std::string& address, } void PeerConnection::OnIceCandidatesRemoved( - const std::vector& candidates) { + const std::vector& candidates) { if (IsClosed()) { return; } @@ -2100,35 +2062,44 @@ void PeerConnection::OnIceCandidatesRemoved( } void PeerConnection::OnSelectedCandidatePairChanged( - const cricket::CandidatePairChangeEvent& event) { + const CandidatePairChangeEvent& event) { if (IsClosed()) { return; } - if (event.selected_candidate_pair.local_candidate().type() == - LOCAL_PORT_TYPE && - event.selected_candidate_pair.remote_candidate().type() == - LOCAL_PORT_TYPE) { + if (event.selected_candidate_pair.local_candidate().is_local() && + event.selected_candidate_pair.remote_candidate().is_local()) { NoteUsageEvent(UsageEvent::DIRECT_CONNECTION_SELECTED); } Observer()->OnIceSelectedCandidatePairChanged(event); } -absl::optional PeerConnection::GetDataMid() const { +bool PeerConnection::CreateDataChannelTransport(absl::string_view mid) { RTC_DCHECK_RUN_ON(signaling_thread()); - return sctp_mid_s_; -} + RTC_DCHECK(!sctp_mid().has_value() || mid == sctp_mid().value()); + RTC_LOG(LS_INFO) << "Creating data channel, mid=" << mid; + + std::optional transport_name = + network_thread()->BlockingCall([&] { + RTC_DCHECK_RUN_ON(network_thread()); + return SetupDataChannelTransport_n(mid); + }); + if (!transport_name) + return false; -void PeerConnection::SetSctpDataInfo(absl::string_view mid, - absl::string_view transport_name) { - RTC_DCHECK_RUN_ON(signaling_thread()); sctp_mid_s_ = std::string(mid); - SetSctpTransportName(std::string(transport_name)); + SetSctpTransportName(transport_name.value()); + + return true; } -void PeerConnection::ResetSctpDataInfo() { +void PeerConnection::DestroyDataChannelTransport(RTCError error) { RTC_DCHECK_RUN_ON(signaling_thread()); + network_thread()->BlockingCall([&] { + RTC_DCHECK_RUN_ON(network_thread()); + TeardownDataChannelTransport_n(error); + }); sctp_mid_s_.reset(); SetSctpTransportName(""); } @@ -2143,8 +2114,8 @@ void PeerConnection::OnSctpDataChannelStateChanged( PeerConnection::InitializePortAllocatorResult PeerConnection::InitializePortAllocator_n( - const cricket::ServerAddresses& stun_servers, - const std::vector& turn_servers, + const ServerAddresses& stun_servers, + const std::vector& turn_servers, const RTCConfiguration& configuration) { RTC_DCHECK_RUN_ON(network_thread()); @@ -2152,36 +2123,36 @@ PeerConnection::InitializePortAllocator_n( // To handle both internal and externally created port allocator, we will // enable BUNDLE here. int port_allocator_flags = port_allocator_->flags(); - port_allocator_flags |= cricket::PORTALLOCATOR_ENABLE_SHARED_SOCKET | - cricket::PORTALLOCATOR_ENABLE_IPV6 | - cricket::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI; + port_allocator_flags |= PORTALLOCATOR_ENABLE_SHARED_SOCKET | + PORTALLOCATOR_ENABLE_IPV6 | + PORTALLOCATOR_ENABLE_IPV6_ON_WIFI; if (trials().IsDisabled("WebRTC-IPv6Default")) { - port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6); + port_allocator_flags &= ~(PORTALLOCATOR_ENABLE_IPV6); } if (configuration.disable_ipv6_on_wifi) { - port_allocator_flags &= ~(cricket::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); + port_allocator_flags &= ~(PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); RTC_LOG(LS_INFO) << "IPv6 candidates on Wi-Fi are disabled."; } if (configuration.tcp_candidate_policy == kTcpCandidatePolicyDisabled) { - port_allocator_flags |= cricket::PORTALLOCATOR_DISABLE_TCP; + port_allocator_flags |= PORTALLOCATOR_DISABLE_TCP; RTC_LOG(LS_INFO) << "TCP candidates are disabled."; } if (configuration.candidate_network_policy == kCandidateNetworkPolicyLowCost) { - port_allocator_flags |= cricket::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS; + port_allocator_flags |= PORTALLOCATOR_DISABLE_COSTLY_NETWORKS; RTC_LOG(LS_INFO) << "Do not gather candidates on high-cost networks"; } if (configuration.disable_link_local_networks) { - port_allocator_flags |= cricket::PORTALLOCATOR_DISABLE_LINK_LOCAL_NETWORKS; + port_allocator_flags |= PORTALLOCATOR_DISABLE_LINK_LOCAL_NETWORKS; RTC_LOG(LS_INFO) << "Disable candidates on link-local network interfaces."; } port_allocator_->set_flags(port_allocator_flags); // No step delay is used while allocating ports. - port_allocator_->set_step_delay(cricket::kMinimumStepDelay); + port_allocator_->set_step_delay(kMinimumStepDelay); port_allocator_->SetCandidateFilter( ConvertIceTransportTypeToCandidateFilter(configuration.type)); port_allocator_->set_max_ipv6_networks(configuration.max_ipv6_networks); @@ -2199,28 +2170,22 @@ PeerConnection::InitializePortAllocator_n( configuration.stun_candidate_keepalive_interval); InitializePortAllocatorResult res; - res.enable_ipv6 = port_allocator_flags & cricket::PORTALLOCATOR_ENABLE_IPV6; + res.enable_ipv6 = port_allocator_flags & PORTALLOCATOR_ENABLE_IPV6; return res; } bool PeerConnection::ReconfigurePortAllocator_n( - const cricket::ServerAddresses& stun_servers, - const std::vector& turn_servers, + const ServerAddresses& stun_servers, + const std::vector& turn_servers, IceTransportsType type, int candidate_pool_size, PortPrunePolicy turn_port_prune_policy, - webrtc::TurnCustomizer* turn_customizer, - absl::optional stun_candidate_keepalive_interval, + TurnCustomizer* turn_customizer, + std::optional stun_candidate_keepalive_interval, bool have_local_description) { RTC_DCHECK_RUN_ON(network_thread()); port_allocator_->SetCandidateFilter( ConvertIceTransportTypeToCandidateFilter(type)); - // According to JSEP, after setLocalDescription, changing the candidate pool - // size is not allowed, and changing the set of ICE servers will not result - // in new candidates being gathered. - if (have_local_description) { - port_allocator_->FreezeCandidatePool(); - } // Add the custom tls turn servers if they exist. auto turn_servers_copy = turn_servers; for (auto& turn_server : turn_servers_copy) { @@ -2238,27 +2203,25 @@ bool PeerConnection::StartRtcEventLog_w( std::unique_ptr output, int64_t output_period_ms) { RTC_DCHECK_RUN_ON(worker_thread()); - if (!event_log_) { + if (!worker_thread_safety_->alive()) { return false; } - return event_log_->StartLogging(std::move(output), output_period_ms); + return env_.event_log().StartLogging(std::move(output), output_period_ms); } void PeerConnection::StopRtcEventLog_w() { RTC_DCHECK_RUN_ON(worker_thread()); - if (event_log_) { - event_log_->StopLogging(); - } + env_.event_log().StopLogging(); } -absl::optional PeerConnection::GetSctpSslRole_n() { +std::optional PeerConnection::GetSctpSslRole_n() { RTC_DCHECK_RUN_ON(network_thread()); return sctp_mid_n_ ? transport_controller_->GetDtlsRole(*sctp_mid_n_) - : absl::nullopt; + : std::nullopt; } bool PeerConnection::GetSslRole(const std::string& content_name, - rtc::SSLRole* role) { + SSLRole* role) { RTC_DCHECK_RUN_ON(signaling_thread()); if (!local_description() || !remote_description()) { RTC_LOG(LS_INFO) @@ -2281,7 +2244,7 @@ bool PeerConnection::GetSslRole(const std::string& content_name, bool PeerConnection::GetTransportDescription( const SessionDescription* description, const std::string& content_name, - cricket::TransportDescription* tdesc) { + TransportDescription* tdesc) { if (!description || !tdesc) { return false; } @@ -2299,11 +2262,11 @@ std::vector PeerConnection::GetDataChannelStats() const { return data_channel_controller_.GetDataChannelStats(); } -absl::optional PeerConnection::sctp_transport_name() const { +std::optional PeerConnection::sctp_transport_name() const { RTC_DCHECK_RUN_ON(signaling_thread()); if (sctp_mid_s_ && transport_controller_copy_) return sctp_transport_name_s_; - return absl::optional(); + return std::optional(); } void PeerConnection::SetSctpTransportName(std::string sctp_transport_name) { @@ -2312,32 +2275,31 @@ void PeerConnection::SetSctpTransportName(std::string sctp_transport_name) { ClearStatsCache(); } -absl::optional PeerConnection::sctp_mid() const { +std::optional PeerConnection::sctp_mid() const { RTC_DCHECK_RUN_ON(signaling_thread()); return sctp_mid_s_; } -cricket::CandidateStatsList PeerConnection::GetPooledCandidateStats() const { +CandidateStatsList PeerConnection::GetPooledCandidateStats() const { RTC_DCHECK_RUN_ON(network_thread()); if (!network_thread_safety_->alive()) return {}; - cricket::CandidateStatsList candidate_stats_list; + CandidateStatsList candidate_stats_list; port_allocator_->GetCandidateStatsFromPooledSessions(&candidate_stats_list); return candidate_stats_list; } -std::map -PeerConnection::GetTransportStatsByNames( +std::map PeerConnection::GetTransportStatsByNames( const std::set& transport_names) { TRACE_EVENT0("webrtc", "PeerConnection::GetTransportStatsByNames"); RTC_DCHECK_RUN_ON(network_thread()); if (!network_thread_safety_->alive()) return {}; - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - std::map transport_stats_by_name; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; + std::map transport_stats_by_name; for (const std::string& transport_name : transport_names) { - cricket::TransportStats transport_stats; + TransportStats transport_stats; bool success = transport_controller_->GetStats(transport_name, &transport_stats); if (success) { @@ -2352,7 +2314,7 @@ PeerConnection::GetTransportStatsByNames( bool PeerConnection::GetLocalCertificate( const std::string& transport_name, - rtc::scoped_refptr* certificate) { + scoped_refptr* certificate) { RTC_DCHECK_RUN_ON(network_thread()); if (!network_thread_safety_->alive() || !certificate) { return false; @@ -2361,7 +2323,7 @@ bool PeerConnection::GetLocalCertificate( return *certificate != nullptr; } -std::unique_ptr PeerConnection::GetRemoteSSLCertChain( +std::unique_ptr PeerConnection::GetRemoteSSLCertChain( const std::string& transport_name) { RTC_DCHECK_RUN_ON(network_thread()); return transport_controller_->GetRemoteSSLCertChain(transport_name); @@ -2380,9 +2342,9 @@ bool PeerConnection::NeedsIceRestart(const std::string& content_name) const { } void PeerConnection::OnTransportControllerConnectionState( - cricket::IceConnectionState state) { + ::webrtc::IceConnectionState state) { switch (state) { - case cricket::kIceConnectionConnecting: + case ::webrtc::kIceConnectionConnecting: // If the current state is Connected or Completed, then there were // writable channels but now there are not, so the next state must // be Disconnected. @@ -2397,10 +2359,10 @@ void PeerConnection::OnTransportControllerConnectionState( PeerConnectionInterface::kIceConnectionDisconnected); } break; - case cricket::kIceConnectionFailed: + case ::webrtc::kIceConnectionFailed: SetIceConnectionState(PeerConnectionInterface::kIceConnectionFailed); break; - case cricket::kIceConnectionConnected: + case ::webrtc::kIceConnectionConnected: RTC_LOG(LS_INFO) << "Changing to ICE connected state because " "all transports are writable."; { @@ -2420,7 +2382,7 @@ void PeerConnection::OnTransportControllerConnectionState( SetIceConnectionState(PeerConnectionInterface::kIceConnectionConnected); NoteUsageEvent(UsageEvent::ICE_STATE_CONNECTED); break; - case cricket::kIceConnectionCompleted: + case ::webrtc::kIceConnectionCompleted: RTC_LOG(LS_INFO) << "Changing to ICE completed state because " "all transports are complete."; if (ice_connection_state_ != @@ -2440,7 +2402,7 @@ void PeerConnection::OnTransportControllerConnectionState( void PeerConnection::OnTransportControllerCandidatesGathered( const std::string& transport_name, - const cricket::Candidates& candidates) { + const Candidates& candidates) { // TODO(bugs.webrtc.org/12427): Expect this to come in on the network thread // (not signaling as it currently does), handle appropriately. int sdp_mline_index; @@ -2451,7 +2413,7 @@ void PeerConnection::OnTransportControllerCandidatesGathered( return; } - for (cricket::Candidates::const_iterator citer = candidates.begin(); + for (Candidates::const_iterator citer = candidates.begin(); citer != candidates.end(); ++citer) { // Use transport_name as the candidate media id. std::unique_ptr candidate( @@ -2462,15 +2424,15 @@ void PeerConnection::OnTransportControllerCandidatesGathered( } void PeerConnection::OnTransportControllerCandidateError( - const cricket::IceCandidateErrorEvent& event) { + const IceCandidateErrorEvent& event) { OnIceCandidateError(event.address, event.port, event.url, event.error_code, event.error_text); } void PeerConnection::OnTransportControllerCandidatesRemoved( - const std::vector& candidates) { + const std::vector& candidates) { // Sanity check. - for (const cricket::Candidate& candidate : candidates) { + for (const Candidate& candidate : candidates) { if (candidate.transport_name().empty()) { RTC_LOG(LS_ERROR) << "OnTransportControllerCandidatesRemoved: " "empty content name in candidate " @@ -2483,15 +2445,15 @@ void PeerConnection::OnTransportControllerCandidatesRemoved( } void PeerConnection::OnTransportControllerCandidateChanged( - const cricket::CandidatePairChangeEvent& event) { + const CandidatePairChangeEvent& event) { OnSelectedCandidatePairChanged(event); } void PeerConnection::OnTransportControllerDtlsHandshakeError( - rtc::SSLHandshakeError error) { - RTC_HISTOGRAM_ENUMERATION( - "WebRTC.PeerConnection.DtlsHandshakeError", static_cast(error), - static_cast(rtc::SSLHandshakeError::MAX_VALUE)); + SSLHandshakeError error) { + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.DtlsHandshakeError", + static_cast(error), + static_cast(SSLHandshakeError::MAX_VALUE)); } // Returns the media index for a local ice candidate given the content name. @@ -2505,7 +2467,7 @@ bool PeerConnection::GetLocalCandidateMediaIndex( bool content_found = false; const ContentInfos& contents = local_description()->description()->contents(); for (size_t index = 0; index < contents.size(); ++index) { - if (contents[index].name == content_name) { + if (contents[index].mid() == content_name) { *sdp_mline_index = static_cast(index); content_found = true; break; @@ -2519,7 +2481,7 @@ Call::Stats PeerConnection::GetCallStats() { return worker_thread()->BlockingCall([this] { return GetCallStats(); }); } RTC_DCHECK_RUN_ON(worker_thread()); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; if (call_) { return call_->GetStats(); } else { @@ -2527,14 +2489,14 @@ Call::Stats PeerConnection::GetCallStats() { } } -absl::optional PeerConnection::GetAudioDeviceStats() { +std::optional PeerConnection::GetAudioDeviceStats() { if (context_->media_engine()) { return context_->media_engine()->voice().GetAudioDeviceStats(); } - return absl::nullopt; + return std::nullopt; } -absl::optional PeerConnection::SetupDataChannelTransport_n( +std::optional PeerConnection::SetupDataChannelTransport_n( absl::string_view mid) { sctp_mid_n_ = std::string(mid); DataChannelTransportInterface* transport = @@ -2543,12 +2505,12 @@ absl::optional PeerConnection::SetupDataChannelTransport_n( RTC_LOG(LS_ERROR) << "Data channel transport is not available for data channels, mid=" << mid; - sctp_mid_n_ = absl::nullopt; - return absl::nullopt; + sctp_mid_n_ = std::nullopt; + return std::nullopt; } - absl::optional transport_name; - cricket::DtlsTransportInternal* dtls_transport = + std::optional transport_name; + DtlsTransportInternal* dtls_transport = transport_controller_->GetDtlsTransport(*sctp_mid_n_); if (dtls_transport) { transport_name = dtls_transport->transport_name(); @@ -2577,17 +2539,16 @@ void PeerConnection::TeardownDataChannelTransport_n(RTCError error) { // Returns false if bundle is enabled and rtcp_mux is disabled. bool PeerConnection::ValidateBundleSettings( const SessionDescription* desc, - const std::map& - bundle_groups_by_mid) { + const std::map& bundle_groups_by_mid) { if (bundle_groups_by_mid.empty()) return true; - const cricket::ContentInfos& contents = desc->contents(); - for (cricket::ContentInfos::const_iterator citer = contents.begin(); + const ContentInfos& contents = desc->contents(); + for (ContentInfos::const_iterator citer = contents.begin(); citer != contents.end(); ++citer) { - const cricket::ContentInfo* content = (&*citer); + const ContentInfo* content = (&*citer); RTC_DCHECK(content != NULL); - auto it = bundle_groups_by_mid.find(content->name); + auto it = bundle_groups_by_mid.find(content->mid()); if (it != bundle_groups_by_mid.end() && !(content->rejected || content->bundle_only) && content->type == MediaProtocolType::kRtp) { @@ -2604,18 +2565,18 @@ void PeerConnection::ReportSdpBundleUsage( RTC_DCHECK_RUN_ON(signaling_thread()); bool using_bundle = - remote_description.description()->HasGroup(cricket::GROUP_TYPE_BUNDLE); + remote_description.description()->HasGroup(GROUP_TYPE_BUNDLE); int num_audio_mlines = 0; int num_video_mlines = 0; int num_data_mlines = 0; for (const ContentInfo& content : remote_description.description()->contents()) { - cricket::MediaType media_type = content.media_description()->type(); - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + webrtc::MediaType media_type = content.media_description()->type(); + if (media_type == webrtc::MediaType::AUDIO) { num_audio_mlines += 1; - } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { + } else if (media_type == webrtc::MediaType::VIDEO) { num_video_mlines += 1; - } else if (media_type == cricket::MEDIA_TYPE_DATA) { + } else if (media_type == webrtc::MediaType::DATA) { num_data_mlines += 1; } } @@ -2645,8 +2606,7 @@ void PeerConnection::ReportSdpBundleUsage( kBundleUsageMax); } -void PeerConnection::ReportIceCandidateCollected( - const cricket::Candidate& candidate) { +void PeerConnection::ReportIceCandidateCollected(const Candidate& candidate) { NoteUsageEvent(UsageEvent::CANDIDATE_COLLECTED); if (candidate.address().IsPrivateIP()) { NoteUsageEvent(UsageEvent::PRIVATE_CANDIDATE_COLLECTED); @@ -2665,25 +2625,26 @@ void PeerConnection::NoteUsageEvent(UsageEvent event) { } // Asynchronously adds remote candidates on the network thread. -void PeerConnection::AddRemoteCandidate(const std::string& mid, - const cricket::Candidate& candidate) { +void PeerConnection::AddRemoteCandidate(absl::string_view mid, + const Candidate& candidate) { RTC_DCHECK_RUN_ON(signaling_thread()); - if (candidate.network_type() != rtc::ADAPTER_TYPE_UNKNOWN) { + if (candidate.network_type() != ADAPTER_TYPE_UNKNOWN) { RTC_DLOG(LS_WARNING) << "Using candidate with adapter type set - this " "should only happen in test"; } // Clear fields that do not make sense as remote candidates. - cricket::Candidate new_candidate(candidate); - new_candidate.set_network_type(rtc::ADAPTER_TYPE_UNKNOWN); + Candidate new_candidate(candidate); + new_candidate.set_network_type(ADAPTER_TYPE_UNKNOWN); new_candidate.set_relay_protocol(""); - new_candidate.set_underlying_type_for_vpn(rtc::ADAPTER_TYPE_UNKNOWN); + new_candidate.set_underlying_type_for_vpn(ADAPTER_TYPE_UNKNOWN); network_thread()->PostTask(SafeTask( - network_thread_safety_, [this, mid = mid, candidate = new_candidate] { + network_thread_safety_, + [this, mid = std::string(mid), candidate = new_candidate] { RTC_DCHECK_RUN_ON(network_thread()); - std::vector candidates = {candidate}; + std::vector candidates = {candidate}; RTCError error = transport_controller_->AddRemoteCandidates(mid, candidates); if (error.ok()) { @@ -2720,8 +2681,7 @@ void PeerConnection::ReportUsagePattern() const { usage_pattern_.ReportUsagePattern(observer_); } -void PeerConnection::ReportRemoteIceCandidateAdded( - const cricket::Candidate& candidate) { +void PeerConnection::ReportRemoteIceCandidateAdded(const Candidate& candidate) { RTC_DCHECK_RUN_ON(signaling_thread()); NoteUsageEvent(UsageEvent::REMOTE_CANDIDATE_ADDED); @@ -2739,19 +2699,17 @@ void PeerConnection::ReportRemoteIceCandidateAdded( bool PeerConnection::SrtpRequired() const { RTC_DCHECK_RUN_ON(signaling_thread()); - return (dtls_enabled_ || - sdp_handler_->webrtc_session_desc_factory()->SdesPolicy() == - cricket::SEC_REQUIRED); + return dtls_enabled_; } void PeerConnection::OnTransportControllerGatheringState( - cricket::IceGatheringState state) { + ::webrtc::IceGatheringState state) { RTC_DCHECK(signaling_thread()->IsCurrent()); - if (state == cricket::kIceGatheringGathering) { + if (state == ::webrtc::kIceGatheringGathering) { OnIceGatheringChange(PeerConnectionInterface::kIceGatheringGathering); - } else if (state == cricket::kIceGatheringComplete) { + } else if (state == ::webrtc::kIceGatheringComplete) { OnIceGatheringChange(PeerConnectionInterface::kIceGatheringComplete); - } else if (state == cricket::kIceGatheringNew) { + } else if (state == ::webrtc::kIceGatheringNew) { OnIceGatheringChange(PeerConnectionInterface::kIceGatheringNew); } else { RTC_LOG(LS_ERROR) << "Unknown state received: " << state; @@ -2763,8 +2721,8 @@ void PeerConnection::OnTransportControllerGatheringState( void PeerConnection::ReportTransportStats( std::vector transceivers) { TRACE_EVENT0("webrtc", "PeerConnection::ReportTransportStats"); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - std::map> + Thread::ScopedDisallowBlockingCalls no_blocking_calls; + std::map> media_types_by_transport_name; for (const auto& transceiver : transceivers) { if (transceiver->internal()->channel()) { @@ -2776,18 +2734,18 @@ void PeerConnection::ReportTransportStats( } if (sctp_mid_n_) { - cricket::DtlsTransportInternal* dtls_transport = + DtlsTransportInternal* dtls_transport = transport_controller_->GetDtlsTransport(*sctp_mid_n_); if (dtls_transport) { media_types_by_transport_name[dtls_transport->transport_name()].insert( - cricket::MEDIA_TYPE_DATA); + webrtc::MediaType::DATA); } } for (const auto& entry : media_types_by_transport_name) { const std::string& transport_name = entry.first; - const std::set media_types = entry.second; - cricket::TransportStats stats; + const std::set media_types = entry.second; + TransportStats stats; if (transport_controller_->GetStats(transport_name, &stats)) { ReportBestConnectionState(stats); ReportNegotiatedCiphers(dtls_enabled_, stats, media_types); @@ -2798,27 +2756,24 @@ void PeerConnection::ReportTransportStats( // Walk through the ConnectionInfos to gather best connection usage // for IPv4 and IPv6. // static (no member state required) -void PeerConnection::ReportBestConnectionState( - const cricket::TransportStats& stats) { - for (const cricket::TransportChannelStats& channel_stats : - stats.channel_stats) { - for (const cricket::ConnectionInfo& connection_info : +void PeerConnection::ReportBestConnectionState(const TransportStats& stats) { + for (const TransportChannelStats& channel_stats : stats.channel_stats) { + for (const ConnectionInfo& connection_info : channel_stats.ice_transport_stats.connection_infos) { if (!connection_info.best_connection) { continue; } - const cricket::Candidate& local = connection_info.local_candidate; - const cricket::Candidate& remote = connection_info.remote_candidate; + const Candidate& local = connection_info.local_candidate; + const Candidate& remote = connection_info.remote_candidate; // Increment the counter for IceCandidatePairType. - if (local.protocol() == cricket::TCP_PROTOCOL_NAME || - (local.type() == RELAY_PORT_TYPE && - local.relay_protocol() == cricket::TCP_PROTOCOL_NAME)) { + if (local.protocol() == TCP_PROTOCOL_NAME || + (local.is_relay() && local.relay_protocol() == TCP_PROTOCOL_NAME)) { RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.CandidatePairType_TCP", GetIceCandidatePairCounter(local, remote), kIceCandidatePairMax); - } else if (local.protocol() == cricket::UDP_PROTOCOL_NAME) { + } else if (local.protocol() == UDP_PROTOCOL_NAME) { RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.CandidatePairType_UDP", GetIceCandidatePairCounter(local, remote), kIceCandidatePairMax); @@ -2848,36 +2803,63 @@ void PeerConnection::ReportBestConnectionState( // static void PeerConnection::ReportNegotiatedCiphers( bool dtls_enabled, - const cricket::TransportStats& stats, - const std::set& media_types) { + const TransportStats& stats, + const std::set& media_types) { if (!dtls_enabled || stats.channel_stats.empty()) { return; } int srtp_crypto_suite = stats.channel_stats[0].srtp_crypto_suite; int ssl_cipher_suite = stats.channel_stats[0].ssl_cipher_suite; - if (srtp_crypto_suite == rtc::kSrtpInvalidCryptoSuite && - ssl_cipher_suite == rtc::kTlsNullWithNullNull) { + if (srtp_crypto_suite == kSrtpInvalidCryptoSuite && + ssl_cipher_suite == kTlsNullWithNullNull) { return; } - if (ssl_cipher_suite != rtc::kTlsNullWithNullNull) { - for (cricket::MediaType media_type : media_types) { + if (ssl_cipher_suite != kTlsNullWithNullNull) { + for (webrtc::MediaType media_type : media_types) { switch (media_type) { - case cricket::MEDIA_TYPE_AUDIO: + case webrtc::MediaType::AUDIO: RTC_HISTOGRAM_ENUMERATION_SPARSE( "WebRTC.PeerConnection.SslCipherSuite.Audio", ssl_cipher_suite, - rtc::kSslCipherSuiteMaxValue); + kSslCipherSuiteMaxValue); break; - case cricket::MEDIA_TYPE_VIDEO: + case webrtc::MediaType::VIDEO: RTC_HISTOGRAM_ENUMERATION_SPARSE( "WebRTC.PeerConnection.SslCipherSuite.Video", ssl_cipher_suite, - rtc::kSslCipherSuiteMaxValue); + kSslCipherSuiteMaxValue); break; - case cricket::MEDIA_TYPE_DATA: + case webrtc::MediaType::DATA: RTC_HISTOGRAM_ENUMERATION_SPARSE( "WebRTC.PeerConnection.SslCipherSuite.Data", ssl_cipher_suite, - rtc::kSslCipherSuiteMaxValue); + kSslCipherSuiteMaxValue); + break; + default: + RTC_DCHECK_NOTREACHED(); + continue; + } + } + } + + uint16_t ssl_peer_signature_algorithm = + stats.channel_stats[0].ssl_peer_signature_algorithm; + if (ssl_peer_signature_algorithm != kSslSignatureAlgorithmUnknown) { + for (webrtc::MediaType media_type : media_types) { + switch (media_type) { + case webrtc::MediaType::AUDIO: + RTC_HISTOGRAM_ENUMERATION_SPARSE( + "WebRTC.PeerConnection.SslPeerSignatureAlgorithm.Audio", + ssl_peer_signature_algorithm, kSslSignatureAlgorithmMaxValue); + break; + case webrtc::MediaType::VIDEO: + RTC_HISTOGRAM_ENUMERATION_SPARSE( + "WebRTC.PeerConnection.SslPeerSignatureAlgorithm.Video", + ssl_peer_signature_algorithm, kSslSignatureAlgorithmMaxValue); + break; + case webrtc::MediaType::DATA: + RTC_HISTOGRAM_ENUMERATION_SPARSE( + "WebRTC.PeerConnection.SslPeerSignatureAlgorithm.Data", + ssl_peer_signature_algorithm, kSslSignatureAlgorithmMaxValue); break; default: RTC_DCHECK_NOTREACHED(); @@ -2890,14 +2872,14 @@ void PeerConnection::ReportNegotiatedCiphers( bool PeerConnection::OnTransportChanged( const std::string& mid, RtpTransportInternal* rtp_transport, - rtc::scoped_refptr dtls_transport, + scoped_refptr dtls_transport, DataChannelTransportInterface* data_channel_transport) { RTC_DCHECK_RUN_ON(network_thread()); bool ret = true; if (ConfiguredForMedia()) { for (const auto& transceiver : rtp_manager()->transceivers()->UnsafeList()) { - cricket::ChannelInterface* channel = transceiver->internal()->channel(); + ChannelInterface* channel = transceiver->internal()->channel(); if (channel && channel->mid() == mid) { ret = channel->SetRtpTransport(rtp_transport); } @@ -2926,21 +2908,18 @@ PeerConnectionObserver* PeerConnection::Observer() const { return observer_; } -void PeerConnection::StartSctpTransport(int local_port, - int remote_port, - int max_message_size) { +RTCError PeerConnection::StartSctpTransport(const SctpOptions& options) { RTC_DCHECK_RUN_ON(signaling_thread()); - if (!sctp_mid_s_) - return; + RTC_DCHECK(sctp_mid_s_); - network_thread()->PostTask(SafeTask( - network_thread_safety_, - [this, mid = *sctp_mid_s_, local_port, remote_port, max_message_size] { - rtc::scoped_refptr sctp_transport = + network_thread()->PostTask( + SafeTask(network_thread_safety_, [this, mid = *sctp_mid_s_, options] { + scoped_refptr sctp_transport = transport_controller_n()->GetSctpTransport(mid); if (sctp_transport) - sctp_transport->Start(local_port, remote_port, max_message_size); + sctp_transport->Start(options); })); + return RTCError::OK(); } CryptoOptions PeerConnection::GetCryptoOptions() { @@ -2977,12 +2956,25 @@ void PeerConnection::RequestUsagePatternReportForTesting() { /* delay_ms= */ 0); } -std::functionBlockingCall([this]() { + RTC_DCHECK_RUN_ON(worker_thread()); + return call_->FeedbackAccordingToRfc8888Count(); + }); +} + +int PeerConnection::FeedbackAccordingToTransportCcCountForTesting() const { + return worker_thread()->BlockingCall([this]() { + RTC_DCHECK_RUN_ON(worker_thread()); + return call_->FeedbackAccordingToTransportCcCount(); + }); +} + +std::function PeerConnection::InitializeRtcpCallback() { RTC_DCHECK_RUN_ON(network_thread()); - return [this](const rtc::CopyOnWriteBuffer& packet, - int64_t /*packet_time_us*/) { + return [this](const CopyOnWriteBuffer& packet, int64_t /*packet_time_us*/) { worker_thread()->PostTask(SafeTask(worker_thread_safety_, [this, packet]() { call_ptr_->Receiver()->DeliverRtcpPacket(packet); })); @@ -3006,4 +2998,14 @@ PeerConnection::InitializeUnDemuxablePacketHandler() { }; } +bool PeerConnection::CanAttemptDtlsStunPiggybacking( + const RTCConfiguration& configuration) { + // Enable DTLS-in-STUN only if no certificates were passed those + // may be RSA certificates and this feature only works with small + // ECDSA certificates. Determining the type of the key is + // not trivially possible at this point. + return dtls_enabled_ && configuration.certificates.empty() && + env_.field_trials().IsEnabled("WebRTC-IceHandshakeDtls"); +} + } // namespace webrtc diff --git a/pc/peer_connection.h b/pc/peer_connection.h index aac1635484..eeea50f0eb 100644 --- a/pc/peer_connection.h +++ b/pc/peer_connection.h @@ -16,17 +16,21 @@ #include #include #include +#include #include #include #include -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" #include "api/adaptation/resource.h" #include "api/async_dns_resolver.h" +#include "api/audio/audio_device.h" #include "api/candidate.h" #include "api/crypto/crypto_options.h" +#include "api/data_channel_event_observer_interface.h" #include "api/data_channel_interface.h" #include "api/dtls_transport_interface.h" +#include "api/environment/environment.h" #include "api/field_trials_view.h" #include "api/ice_transport_interface.h" #include "api/jsep.h" @@ -34,8 +38,8 @@ #include "api/media_types.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" -#include "api/rtc_event_log/rtc_event_log.h" #include "api/rtc_event_log_output.h" +#include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_interface.h" @@ -46,17 +50,21 @@ #include "api/set_remote_description_observer_interface.h" #include "api/stats/rtc_stats_collector_callback.h" #include "api/task_queue/pending_task_safety_flag.h" +#include "api/transport/bandwidth_estimation_settings.h" #include "api/transport/bitrate_settings.h" #include "api/transport/data_channel_transport_interface.h" #include "api/transport/enums.h" +#include "api/transport/network_control.h" #include "api/turn_customizer.h" #include "call/call.h" +#include "call/payload_type_picker.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/port.h" #include "p2p/base/port_allocator.h" #include "p2p/base/transport_description.h" #include "pc/channel_interface.h" +#include "pc/codec_vendor.h" #include "pc/connection_context.h" #include "pc/data_channel_controller.h" #include "pc/data_channel_utils.h" @@ -69,7 +77,6 @@ #include "pc/rtp_transceiver.h" #include "pc/rtp_transmission_manager.h" #include "pc/rtp_transport_internal.h" -#include "pc/sctp_data_channel.h" #include "pc/sdp_offer_answer.h" #include "pc/session_description.h" #include "pc/transceiver_list.h" @@ -109,70 +116,69 @@ class PeerConnection : public PeerConnectionInternal, // // Note that the function takes ownership of dependencies, and will // either use them or release them, whether it succeeds or fails. - static RTCErrorOr> Create( - rtc::scoped_refptr context, + static scoped_refptr Create( + const Environment& env, + scoped_refptr context, const PeerConnectionFactoryInterface::Options& options, - std::unique_ptr event_log, std::unique_ptr call, const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies dependencies); + PeerConnectionDependencies& dependencies, + const ServerAddresses& stun_servers, + const std::vector& turn_servers); - rtc::scoped_refptr local_streams() override; - rtc::scoped_refptr remote_streams() override; + scoped_refptr local_streams() override; + scoped_refptr remote_streams() override; bool AddStream(MediaStreamInterface* local_stream) override; void RemoveStream(MediaStreamInterface* local_stream) override; - RTCErrorOr> AddTrack( - rtc::scoped_refptr track, + RTCErrorOr> AddTrack( + scoped_refptr track, const std::vector& stream_ids) override; - RTCErrorOr> AddTrack( - rtc::scoped_refptr track, + RTCErrorOr> AddTrack( + scoped_refptr track, const std::vector& stream_ids, const std::vector& init_send_encodings) override; - RTCErrorOr> AddTrack( - rtc::scoped_refptr track, + RTCErrorOr> AddTrack( + scoped_refptr track, const std::vector& stream_ids, const std::vector* init_send_encodings); RTCError RemoveTrackOrError( - rtc::scoped_refptr sender) override; + scoped_refptr sender) override; - RTCErrorOr> AddTransceiver( - rtc::scoped_refptr track) override; - RTCErrorOr> AddTransceiver( - rtc::scoped_refptr track, + RTCErrorOr> AddTransceiver( + scoped_refptr track) override; + RTCErrorOr> AddTransceiver( + scoped_refptr track, const RtpTransceiverInit& init) override; - RTCErrorOr> AddTransceiver( - cricket::MediaType media_type) override; - RTCErrorOr> AddTransceiver( - cricket::MediaType media_type, + RTCErrorOr> AddTransceiver( + webrtc::MediaType media_type) override; + RTCErrorOr> AddTransceiver( + webrtc::MediaType media_type, const RtpTransceiverInit& init) override; - rtc::scoped_refptr CreateSender( + scoped_refptr CreateSender( const std::string& kind, const std::string& stream_id) override; - std::vector> GetSenders() + std::vector> GetSenders() const override; + std::vector> GetReceivers() const override; - std::vector> GetReceivers() - const override; - std::vector> GetTransceivers() + std::vector> GetTransceivers() const override; - RTCErrorOr> CreateDataChannelOrError( + RTCErrorOr> CreateDataChannelOrError( const std::string& label, const DataChannelInit* config) override; // WARNING: LEGACY. See peerconnectioninterface.h bool GetStats(StatsObserver* observer, - webrtc::MediaStreamTrackInterface* track, + MediaStreamTrackInterface* track, StatsOutputLevel level) override; // Spec-complaint GetStats(). See peerconnectioninterface.h void GetStats(RTCStatsCollectorCallback* callback) override; - void GetStats( - rtc::scoped_refptr selector, - rtc::scoped_refptr callback) override; - void GetStats( - rtc::scoped_refptr selector, - rtc::scoped_refptr callback) override; + void GetStats(scoped_refptr selector, + scoped_refptr callback) override; + void GetStats(scoped_refptr selector, + scoped_refptr callback) override; void ClearStatsCache() override; SignalingState signaling_state() override; @@ -184,7 +190,7 @@ class PeerConnection : public PeerConnectionInternal, IceConnectionState standardized_ice_connection_state() override; PeerConnectionState peer_connection_state() override; IceGatheringState ice_gathering_state() override; - absl::optional can_trickle_ice_candidates() override; + std::optional can_trickle_ice_candidates() override; const SessionDescriptionInterface* local_description() const override; const SessionDescriptionInterface* remote_description() const override; @@ -205,11 +211,9 @@ class PeerConnection : public PeerConnectionInternal, void SetLocalDescription( std::unique_ptr desc, - rtc::scoped_refptr observer) - override; + scoped_refptr observer) override; void SetLocalDescription( - rtc::scoped_refptr observer) - override; + scoped_refptr observer) override; // TODO(https://crbug.com/webrtc/11798): Delete these methods in favor of the // ones taking SetLocalDescriptionObserverInterface as argument. void SetLocalDescription(SetSessionDescriptionObserver* observer, @@ -218,8 +222,7 @@ class PeerConnection : public PeerConnectionInternal, void SetRemoteDescription( std::unique_ptr desc, - rtc::scoped_refptr observer) - override; + scoped_refptr observer) override; // TODO(https://crbug.com/webrtc/11798): Delete this methods in favor of the // ones taking SetRemoteDescriptionObserverInterface as argument. void SetRemoteDescription(SetSessionDescriptionObserver* observer, @@ -231,38 +234,40 @@ class PeerConnection : public PeerConnectionInternal, bool AddIceCandidate(const IceCandidateInterface* candidate) override; void AddIceCandidate(std::unique_ptr candidate, std::function callback) override; - bool RemoveIceCandidates( - const std::vector& candidates) override; + bool RemoveIceCandidates(const std::vector& candidates) override; RTCError SetBitrate(const BitrateSettings& bitrate) override; + void ReconfigureBandwidthEstimation( + const BandwidthEstimationSettings& settings) override; void SetAudioPlayout(bool playout) override; void SetAudioRecording(bool recording) override; - rtc::scoped_refptr LookupDtlsTransportByMid( + scoped_refptr LookupDtlsTransportByMid( const std::string& mid) override; - rtc::scoped_refptr LookupDtlsTransportByMidInternal( + scoped_refptr LookupDtlsTransportByMidInternal( const std::string& mid); - rtc::scoped_refptr GetSctpTransport() const override; + scoped_refptr GetSctpTransport() const override; - void AddAdaptationResource(rtc::scoped_refptr resource) override; + void AddAdaptationResource(scoped_refptr resource) override; bool StartRtcEventLog(std::unique_ptr output, int64_t output_period_ms) override; bool StartRtcEventLog(std::unique_ptr output) override; void StopRtcEventLog() override; + void SetDataChannelEventObserver( + std::unique_ptr observer) override; + void Close() override; - rtc::Thread* signaling_thread() const final { + Thread* signaling_thread() const final { return context_->signaling_thread(); } - rtc::Thread* network_thread() const final { - return context_->network_thread(); - } - rtc::Thread* worker_thread() const final { return context_->worker_thread(); } + Thread* network_thread() const final { return context_->network_thread(); } + Thread* worker_thread() const final { return context_->worker_thread(); } std::string session_id() const override { return session_id_; } @@ -271,8 +276,7 @@ class PeerConnection : public PeerConnectionInternal, return sdp_handler_->initial_offerer(); } - std::vector< - rtc::scoped_refptr>> + std::vector>> GetTransceiversInternal() const override { RTC_DCHECK_RUN_ON(signaling_thread()); if (!ConfiguredForMedia()) { @@ -283,24 +287,23 @@ class PeerConnection : public PeerConnectionInternal, std::vector GetDataChannelStats() const override; - absl::optional sctp_transport_name() const override; - absl::optional sctp_mid() const override; + std::optional sctp_transport_name() const override; + std::optional sctp_mid() const override; - cricket::CandidateStatsList GetPooledCandidateStats() const override; - std::map GetTransportStatsByNames( + CandidateStatsList GetPooledCandidateStats() const override; + std::map GetTransportStatsByNames( const std::set& transport_names) override; Call::Stats GetCallStats() override; - absl::optional GetAudioDeviceStats() override; + std::optional GetAudioDeviceStats() override; - bool GetLocalCertificate( - const std::string& transport_name, - rtc::scoped_refptr* certificate) override; - std::unique_ptr GetRemoteSSLCertChain( + bool GetLocalCertificate(const std::string& transport_name, + scoped_refptr* certificate) override; + std::unique_ptr GetRemoteSSLCertChain( const std::string& transport_name) override; bool IceRestartPending(const std::string& content_name) const override; bool NeedsIceRestart(const std::string& content_name) const override; - bool GetSslRole(const std::string& content_name, rtc::SSLRole* role) override; + bool GetSslRole(const std::string& content_name, SSLRole* role) override; // Functions needed by DataChannelController void NoteDataAddedEvent() override { NoteUsageEvent(UsageEvent::DATA_ADDED); } @@ -312,7 +315,7 @@ class PeerConnection : public PeerConnectionInternal, sdp_handler_->signaling_state() == PeerConnectionInterface::kClosed; } // Get current SSL role used by SCTP's underlying transport. - absl::optional GetSctpSslRole_n() override; + std::optional GetSctpSslRole_n() override; void OnSctpDataChannelStateChanged( int channel_id, @@ -356,9 +359,7 @@ class PeerConnection : public PeerConnectionInternal, RTC_DCHECK_RUN_ON(network_thread()); return transport_controller_.get(); } - cricket::PortAllocator* port_allocator() override { - return port_allocator_.get(); - } + PortAllocator* port_allocator() override { return port_allocator_.get(); } Call* call_ptr() override { return call_ptr_; } ConnectionContext* context() { return context_.get(); } @@ -369,8 +370,8 @@ class PeerConnection : public PeerConnectionInternal, void NoteUsageEvent(UsageEvent event) override; // Asynchronously adds a remote candidate on the network thread. - void AddRemoteCandidate(const std::string& mid, - const cricket::Candidate& candidate) override; + void AddRemoteCandidate(absl::string_view mid, + const Candidate& candidate) override; // Report the UMA metric BundleUsage for the given remote description. void ReportSdpBundleUsage( @@ -378,6 +379,9 @@ class PeerConnection : public PeerConnectionInternal, // Report several UMA metrics on establishing the connection. void ReportFirstConnectUsageMetrics() RTC_RUN_ON(signaling_thread()); + // Report several UMA metrics for established connections when the connection + // is closed. + void ReportCloseUsageMetrics() RTC_RUN_ON(signaling_thread()); // Returns true if the PeerConnection is configured to use Unified Plan // semantics for creating offers/answers and setting local/remote @@ -386,29 +390,18 @@ class PeerConnection : public PeerConnectionInternal, // TODO(bugs.webrtc.org/8530): Flip the default to be Unified Plan once // sufficient time has passed. bool IsUnifiedPlan() const override { - RTC_DCHECK_RUN_ON(signaling_thread()); return is_unified_plan_; } - bool ValidateBundleSettings( - const cricket::SessionDescription* desc, - const std::map& - bundle_groups_by_mid) override; - - // Returns the MID for the data section associated with the - // SCTP data channel, if it has been set. If no data - // channels are configured this will return nullopt. - absl::optional GetDataMid() const override; + bool ValidateBundleSettings(const SessionDescription* desc, + const std::map& + bundle_groups_by_mid) override; - void SetSctpDataInfo(absl::string_view mid, - absl::string_view transport_name) override; - - void ResetSctpDataInfo() override; + bool CreateDataChannelTransport(absl::string_view mid) override; + void DestroyDataChannelTransport(RTCError error) override; // Asynchronously calls SctpTransport::Start() on the network thread for // `sctp_mid()` if set. Called as part of setting the local description. - void StartSctpTransport(int local_port, - int remote_port, - int max_message_size) override; + RTCError StartSctpTransport(const SctpOptions& options) override; // Returns the CryptoOptions for this PeerConnection. This will always // return the RTCConfiguration.crypto_options if set and will only default @@ -417,25 +410,22 @@ class PeerConnection : public PeerConnectionInternal, // Internal implementation for AddTransceiver family of methods. If // `fire_callback` is set, fires OnRenegotiationNeeded callback if successful. - RTCErrorOr> AddTransceiver( - cricket::MediaType media_type, - rtc::scoped_refptr track, + RTCErrorOr> AddTransceiver( + webrtc::MediaType media_type, + scoped_refptr track, const RtpTransceiverInit& init, bool fire_callback = true) override; - // Returns rtp transport, result can not be nullptr. - RtpTransportInternal* GetRtpTransport(const std::string& mid); - // Returns true if SRTP (either using DTLS-SRTP or SDES) is required by // this session. bool SrtpRequired() const override; - absl::optional SetupDataChannelTransport_n( - absl::string_view mid) override RTC_RUN_ON(network_thread()); - void TeardownDataChannelTransport_n(RTCError error) override + std::optional SetupDataChannelTransport_n(absl::string_view mid) + RTC_RUN_ON(network_thread()); + void TeardownDataChannelTransport_n(RTCError error) RTC_RUN_ON(network_thread()); - const FieldTrialsView& trials() const override { return *trials_; } + const FieldTrialsView& trials() const override { return env_.field_trials(); } bool ConfiguredForMedia() const; @@ -445,30 +435,59 @@ class PeerConnection : public PeerConnectionInternal, return_histogram_very_quickly_ = true; } void RequestUsagePatternReportForTesting(); + int FeedbackAccordingToRfc8888CountForTesting() const; + int FeedbackAccordingToTransportCcCountForTesting() const; + + NetworkControllerInterface* GetNetworkController() override { + if (!worker_thread()->IsCurrent()) { + return worker_thread()->BlockingCall( + [this]() { return GetNetworkController(); }); + } + RTC_DCHECK_RUN_ON(worker_thread()); + RTC_DCHECK(call_); + return call_->GetTransportControllerSend()->GetNetworkController(); + } + PayloadTypePicker& payload_type_picker() override { + return payload_type_picker_; + } + void DisableSdpMungingChecksForTesting() { + if (!signaling_thread()->IsCurrent()) { + signaling_thread()->BlockingCall( + [&]() { DisableSdpMungingChecksForTesting(); }); + return; + } + RTC_DCHECK_RUN_ON(signaling_thread()); + sdp_handler_->DisableSdpMungingChecksForTesting(); + } protected: - // Available for rtc::scoped_refptr creation - PeerConnection(rtc::scoped_refptr context, + // Available for webrtc::scoped_refptr creation + PeerConnection(const PeerConnectionInterface::RTCConfiguration& configuration, + const Environment& env, + scoped_refptr context, const PeerConnectionFactoryInterface::Options& options, bool is_unified_plan, - std::unique_ptr event_log, std::unique_ptr call, PeerConnectionDependencies& dependencies, + const ServerAddresses& stun_servers, + const std::vector& turn_servers, bool dtls_enabled); ~PeerConnection() override; private: - RTCError Initialize( - const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies dependencies); + // Called from the constructor to apply the server configuration on the + // network thread and initialize network thread related state (see + // InitializeTransportController_n). The return value of this function is used + // to set the initial value of `transport_controller_copy_`. + JsepTransportController* InitializeNetworkThread( + const ServerAddresses& stun_servers, + const std::vector& turn_servers); JsepTransportController* InitializeTransportController_n( - const RTCConfiguration& configuration, - const PeerConnectionDependencies& dependencies) - RTC_RUN_ON(network_thread()); + const RTCConfiguration& configuration) RTC_RUN_ON(network_thread()); - rtc::scoped_refptr> - FindTransceiverBySender(rtc::scoped_refptr sender) + scoped_refptr> + FindTransceiverBySender(scoped_refptr sender) RTC_RUN_ON(signaling_thread()); void SetStandardizedIceConnectionState( @@ -492,11 +511,10 @@ class PeerConnection : public PeerConnectionInternal, const std::string& error_text) RTC_RUN_ON(signaling_thread()); // Some local ICE candidates have been removed. - void OnIceCandidatesRemoved(const std::vector& candidates) + void OnIceCandidatesRemoved(const std::vector& candidates) RTC_RUN_ON(signaling_thread()); - void OnSelectedCandidatePairChanged( - const cricket::CandidatePairChangeEvent& event) + void OnSelectedCandidatePairChanged(const CandidatePairChangeEvent& event) RTC_RUN_ON(signaling_thread()); void OnNegotiationNeeded(); @@ -506,19 +524,19 @@ class PeerConnection : public PeerConnectionInternal, bool enable_ipv6; }; InitializePortAllocatorResult InitializePortAllocator_n( - const cricket::ServerAddresses& stun_servers, - const std::vector& turn_servers, + const ServerAddresses& stun_servers, + const std::vector& turn_servers, const RTCConfiguration& configuration); // Called when SetConfiguration is called to apply the supported subset // of the configuration on the network thread. bool ReconfigurePortAllocator_n( - const cricket::ServerAddresses& stun_servers, - const std::vector& turn_servers, + const ServerAddresses& stun_servers, + const std::vector& turn_servers, IceTransportsType type, int candidate_pool_size, PortPrunePolicy turn_port_prune_policy, - webrtc::TurnCustomizer* turn_customizer, - absl::optional stun_candidate_keepalive_interval, + TurnCustomizer* turn_customizer, + std::optional stun_candidate_keepalive_interval, bool have_local_description); // Starts output of an RTC event log to the given output object. @@ -532,10 +550,9 @@ class PeerConnection : public PeerConnectionInternal, // Returns true and the TransportInfo of the given `content_name` // from `description`. Returns false if it's not available. - static bool GetTransportDescription( - const cricket::SessionDescription* description, - const std::string& content_name, - cricket::TransportDescription* info); + static bool GetTransportDescription(const SessionDescription* description, + const std::string& content_name, + TransportDescription* info); // Returns the media index for a local ice candidate given the content name. // Returns false if the local session description does not have a media @@ -545,24 +562,20 @@ class PeerConnection : public PeerConnectionInternal, RTC_RUN_ON(signaling_thread()); // JsepTransportController signal handlers. - void OnTransportControllerConnectionState(cricket::IceConnectionState state) + void OnTransportControllerConnectionState(::webrtc::IceConnectionState state) RTC_RUN_ON(signaling_thread()); - void OnTransportControllerGatheringState(cricket::IceGatheringState state) + void OnTransportControllerGatheringState(::webrtc::IceGatheringState state) RTC_RUN_ON(signaling_thread()); void OnTransportControllerCandidatesGathered( const std::string& transport_name, - const std::vector& candidates) - RTC_RUN_ON(signaling_thread()); - void OnTransportControllerCandidateError( - const cricket::IceCandidateErrorEvent& event) + const std::vector& candidates) RTC_RUN_ON(signaling_thread()); + void OnTransportControllerCandidateError(const IceCandidateErrorEvent& event) RTC_RUN_ON(signaling_thread()); void OnTransportControllerCandidatesRemoved( - const std::vector& candidates) - RTC_RUN_ON(signaling_thread()); + const std::vector& candidates) RTC_RUN_ON(signaling_thread()); void OnTransportControllerCandidateChanged( - const cricket::CandidatePairChangeEvent& event) - RTC_RUN_ON(signaling_thread()); - void OnTransportControllerDtlsHandshakeError(rtc::SSLHandshakeError error); + const CandidatePairChangeEvent& event) RTC_RUN_ON(signaling_thread()); + void OnTransportControllerDtlsHandshakeError(SSLHandshakeError error); // Invoked when TransportController connection completion is signaled. // Reports stats for all transports in use. @@ -570,18 +583,18 @@ class PeerConnection : public PeerConnectionInternal, RTC_RUN_ON(network_thread()); // Gather the usage of IPv4/IPv6 as best connection. - static void ReportBestConnectionState(const cricket::TransportStats& stats); + static void ReportBestConnectionState(const TransportStats& stats); static void ReportNegotiatedCiphers( bool dtls_enabled, - const cricket::TransportStats& stats, - const std::set& media_types); - void ReportIceCandidateCollected(const cricket::Candidate& candidate) + const TransportStats& stats, + const std::set& media_types); + void ReportIceCandidateCollected(const Candidate& candidate) RTC_RUN_ON(signaling_thread()); void ReportUsagePattern() const RTC_RUN_ON(signaling_thread()); - void ReportRemoteIceCandidateAdded(const cricket::Candidate& candidate); + void ReportRemoteIceCandidateAdded(const Candidate& candidate); // JsepTransportController::Observer override. // @@ -592,37 +605,33 @@ class PeerConnection : public PeerConnectionInternal, bool OnTransportChanged( const std::string& mid, RtpTransportInternal* rtp_transport, - rtc::scoped_refptr dtls_transport, + scoped_refptr dtls_transport, DataChannelTransportInterface* data_channel_transport) override; void SetSctpTransportName(std::string sctp_transport_name); - std::function InitializeRtcpCallback(); std::function InitializeUnDemuxablePacketHandler(); - const rtc::scoped_refptr context_; - // Field trials active for this PeerConnection is the first of: - // a) Specified in PeerConnectionDependencies (owned). - // b) Accessed via ConnectionContext (e.g PeerConnectionFactoryDependencies> - // c) Created as Default (FieldTrialBasedConfig). - const webrtc::AlwaysValidPointer - trials_; + bool CanAttemptDtlsStunPiggybacking(const RTCConfiguration& configuration); + + const Environment env_; + const scoped_refptr context_; const PeerConnectionFactoryInterface::Options options_; PeerConnectionObserver* observer_ RTC_GUARDED_BY(signaling_thread()) = nullptr; const bool is_unified_plan_; - - // The EventLog needs to outlive `call_` (and any other object that uses it). - std::unique_ptr event_log_ RTC_GUARDED_BY(worker_thread()); - - // Points to the same thing as `event_log_`. Since it's const, we may read the - // pointer (but not touch the object) from any thread. - RtcEventLog* const event_log_ptr_ RTC_PT_GUARDED_BY(worker_thread()); + const bool dtls_enabled_; + bool return_histogram_very_quickly_ RTC_GUARDED_BY(signaling_thread()) = + false; + // Did the connectionState ever change to `connected`? + // Used to gather metrics only the first such state change. + bool was_ever_connected_ RTC_GUARDED_BY(signaling_thread()) = false; IceConnectionState ice_connection_state_ RTC_GUARDED_BY(signaling_thread()) = kIceConnectionNew; @@ -638,24 +647,24 @@ class PeerConnection : public PeerConnectionInternal, const std::unique_ptr async_dns_resolver_factory_; - std::unique_ptr + std::unique_ptr port_allocator_; // TODO(bugs.webrtc.org/9987): Accessed on both // signaling and network thread. - const std::unique_ptr + const std::unique_ptr ice_transport_factory_; // TODO(bugs.webrtc.org/9987): Accessed on the // signaling thread but the underlying raw // pointer is given to // `jsep_transport_controller_` and used on the // network thread. - const std::unique_ptr tls_cert_verifier_ + const std::unique_ptr tls_cert_verifier_ RTC_GUARDED_BY(network_thread()); // The unique_ptr belongs to the worker thread, but the Call object manages // its own thread safety. std::unique_ptr call_ RTC_GUARDED_BY(worker_thread()); ScopedTaskSafety signaling_thread_safety_; - rtc::scoped_refptr network_thread_safety_; - rtc::scoped_refptr worker_thread_safety_; + scoped_refptr network_thread_safety_; + scoped_refptr worker_thread_safety_; // Points to the same thing as `call_`. Since it's const, we may read the // pointer from any thread. @@ -665,20 +674,11 @@ class PeerConnection : public PeerConnectionInternal, std::unique_ptr legacy_stats_ RTC_GUARDED_BY(signaling_thread()); // A pointer is passed to senders_ - rtc::scoped_refptr stats_collector_ + scoped_refptr stats_collector_ RTC_GUARDED_BY(signaling_thread()); const std::string session_id_; - // The transport controller is set and used on the network thread. - // Some functions pass the value of the transport_controller_ pointer - // around as arguments while running on the signaling thread; these - // use the transport_controller_copy. - std::unique_ptr transport_controller_ - RTC_GUARDED_BY(network_thread()); - JsepTransportController* transport_controller_copy_ - RTC_GUARDED_BY(signaling_thread()) = nullptr; - // `sctp_mid_` is the content name (MID) in SDP. // Note: this is used as the data channel MID by both SCTP and data channel // transports. It is set when either transport is initialized and unset when @@ -686,19 +686,11 @@ class PeerConnection : public PeerConnectionInternal, // There is one copy on the signaling thread and another copy on the // networking thread. Changes are always initiated from the signaling // thread, but applied first on the networking thread via an invoke(). - absl::optional sctp_mid_s_ RTC_GUARDED_BY(signaling_thread()); - absl::optional sctp_mid_n_ RTC_GUARDED_BY(network_thread()); + std::optional sctp_mid_s_ RTC_GUARDED_BY(signaling_thread()); + std::optional sctp_mid_n_ RTC_GUARDED_BY(network_thread()); std::string sctp_transport_name_s_ RTC_GUARDED_BY(signaling_thread()); - // The machinery for handling offers and answers. Const after initialization. - std::unique_ptr sdp_handler_ - RTC_GUARDED_BY(signaling_thread()) RTC_PT_GUARDED_BY(signaling_thread()); - - const bool dtls_enabled_; - UsagePattern usage_pattern_ RTC_GUARDED_BY(signaling_thread()); - bool return_histogram_very_quickly_ RTC_GUARDED_BY(signaling_thread()) = - false; // The DataChannelController is accessed from both the signaling thread // and networking thread. It is a thread-aware object. @@ -708,16 +700,29 @@ class PeerConnection : public PeerConnectionInternal, PeerConnectionMessageHandler message_handler_ RTC_GUARDED_BY(signaling_thread()); + PayloadTypePicker payload_type_picker_; + + // The transport controller is set and used on the network thread. + // Some functions pass the value of the transport_controller_ pointer + // around as arguments while running on the signaling thread; these + // use the transport_controller_copy. + std::unique_ptr transport_controller_ + RTC_GUARDED_BY(network_thread()); + JsepTransportController* transport_controller_copy_ + RTC_GUARDED_BY(signaling_thread()) = nullptr; + + // The machinery for handling offers and answers. Const after initialization. + std::unique_ptr sdp_handler_ + RTC_GUARDED_BY(signaling_thread()) RTC_PT_GUARDED_BY(signaling_thread()); + // Administration of senders, receivers and transceivers // Accessed on both signaling and network thread. Const after Initialize(). std::unique_ptr rtp_manager_; - // Did the connectionState ever change to `connected`? - // Used to gather metrics only the first such state change. - bool was_ever_connected_ RTC_GUARDED_BY(signaling_thread()) = false; + std::unique_ptr codec_lookup_helper_; // This variable needs to be the last one in the class. - rtc::WeakPtrFactory weak_factory_; + WeakPtrFactory weak_factory_; }; } // namespace webrtc diff --git a/pc/peer_connection_adaptation_integrationtest.cc b/pc/peer_connection_adaptation_integrationtest.cc index 882fa36a57..da4f387ea6 100644 --- a/pc/peer_connection_adaptation_integrationtest.cc +++ b/pc/peer_connection_adaptation_integrationtest.cc @@ -11,42 +11,42 @@ #include #include -#include +#include -#include "absl/types/optional.h" #include "api/adaptation/resource.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/make_ref_counted.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" #include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" #include "api/video/video_source_interface.h" #include "call/adaptation/test/fake_resource.h" #include "pc/test/fake_periodic_video_source.h" #include "pc/test/fake_periodic_video_track_source.h" #include "pc/test/peer_connection_test_wrapper.h" #include "rtc_base/checks.h" -#include "rtc_base/gunit.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "rtc_base/virtual_socket_server.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" namespace webrtc { -const int64_t kDefaultTimeoutMs = 5000; - struct TrackWithPeriodicSource { - rtc::scoped_refptr track; - rtc::scoped_refptr periodic_track_source; + scoped_refptr track; + scoped_refptr periodic_track_source; }; // Performs an O/A exchange and waits until the signaling state is stable again. -void Negotiate(rtc::scoped_refptr caller, - rtc::scoped_refptr callee) { +void Negotiate(scoped_refptr caller, + scoped_refptr callee) { // Wire up callbacks and listeners such that a full O/A is performed in // response to CreateOffer(). PeerConnectionTestWrapper::Connect(caller.get(), callee.get()); @@ -55,12 +55,12 @@ void Negotiate(rtc::scoped_refptr caller, } TrackWithPeriodicSource CreateTrackWithPeriodicSource( - rtc::scoped_refptr factory) { + scoped_refptr factory) { FakePeriodicVideoSource::Config periodic_track_source_config; periodic_track_source_config.frame_interval_ms = 100; - periodic_track_source_config.timestamp_offset_ms = rtc::TimeMillis(); - rtc::scoped_refptr periodic_track_source = - rtc::make_ref_counted( + periodic_track_source_config.timestamp_offset_ms = TimeMillis(); + scoped_refptr periodic_track_source = + make_ref_counted( periodic_track_source_config, /* remote */ false); TrackWithPeriodicSource track_with_source; track_with_source.track = @@ -73,8 +73,8 @@ TrackWithPeriodicSource CreateTrackWithPeriodicSource( // parallel and this function makes no guarantee that the returnd VideoSinkWants // have yet to reflect the overuse signal. Used together with EXPECT_TRUE_WAIT // to "spam overuse until a change is observed". -rtc::VideoSinkWants TriggerOveruseAndGetSinkWants( - rtc::scoped_refptr fake_resource, +VideoSinkWants TriggerOveruseAndGetSinkWants( + scoped_refptr fake_resource, const FakePeriodicVideoSource& source) { fake_resource->SetUsageState(ResourceUsageState::kOveruse); return source.wants(); @@ -84,16 +84,15 @@ class PeerConnectionAdaptationIntegrationTest : public ::testing::Test { public: PeerConnectionAdaptationIntegrationTest() : virtual_socket_server_(), - network_thread_(new rtc::Thread(&virtual_socket_server_)), - worker_thread_(rtc::Thread::Create()) { + network_thread_(new Thread(&virtual_socket_server_)), + worker_thread_(Thread::Create()) { RTC_CHECK(network_thread_->Start()); RTC_CHECK(worker_thread_->Start()); } - rtc::scoped_refptr CreatePcWrapper( - const char* name) { - rtc::scoped_refptr pc_wrapper = - rtc::make_ref_counted( + scoped_refptr CreatePcWrapper(const char* name) { + scoped_refptr pc_wrapper = + make_ref_counted( name, &virtual_socket_server_, network_thread_.get(), worker_thread_.get()); PeerConnectionInterface::RTCConfiguration config; @@ -104,9 +103,9 @@ class PeerConnectionAdaptationIntegrationTest : public ::testing::Test { } protected: - rtc::VirtualSocketServer virtual_socket_server_; - std::unique_ptr network_thread_; - std::unique_ptr worker_thread_; + VirtualSocketServer virtual_socket_server_; + std::unique_ptr network_thread_; + std::unique_ptr worker_thread_; }; TEST_F(PeerConnectionAdaptationIntegrationTest, @@ -132,10 +131,13 @@ TEST_F(PeerConnectionAdaptationIntegrationTest, // Inject a fake resource and spam kOveruse until resolution becomes limited. auto fake_resource = FakeResource::Create("FakeResource"); caller->AddAdaptationResource(fake_resource); - EXPECT_TRUE_WAIT( - TriggerOveruseAndGetSinkWants(fake_resource, source).max_pixel_count < - pixel_count_before_overuse, - kDefaultTimeoutMs); + EXPECT_THAT(WaitUntil( + [&] { + return TriggerOveruseAndGetSinkWants(fake_resource, source) + .max_pixel_count; + }, + ::testing::Lt(pixel_count_before_overuse)), + IsRtcOk()); } TEST_F(PeerConnectionAdaptationIntegrationTest, @@ -163,10 +165,13 @@ TEST_F(PeerConnectionAdaptationIntegrationTest, int pixel_count_before_overuse = source.wants().max_pixel_count; // Spam kOveruse until resolution becomes limited. - EXPECT_TRUE_WAIT( - TriggerOveruseAndGetSinkWants(fake_resource, source).max_pixel_count < - pixel_count_before_overuse, - kDefaultTimeoutMs); + EXPECT_THAT(WaitUntil( + [&] { + return TriggerOveruseAndGetSinkWants(fake_resource, source) + .max_pixel_count; + }, + ::testing::Lt(pixel_count_before_overuse)), + IsRtcOk()); } } // namespace webrtc diff --git a/pc/peer_connection_bundle_unittest.cc b/pc/peer_connection_bundle_unittest.cc index e5ef16ff8a..e0f5643e17 100644 --- a/pc/peer_connection_bundle_unittest.cc +++ b/pc/peer_connection_bundle_unittest.cc @@ -10,20 +10,15 @@ #include -#include #include #include #include #include -#include #include #include -#include "api/audio/audio_mixer.h" -#include "api/audio_codecs/builtin_audio_decoder_factory.h" -#include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/candidate.h" -#include "api/create_peerconnection_factory.h" +#include "api/enable_media_with_defaults.h" #include "api/jsep.h" #include "api/media_types.h" #include "api/peer_connection_interface.h" @@ -31,9 +26,9 @@ #include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" -#include "api/stats/rtc_stats.h" #include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" +#include "api/test/rtc_error_matchers.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" @@ -45,38 +40,34 @@ #include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" #include "media/base/stream_params.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" #include "p2p/base/p2p_constants.h" -#include "p2p/base/port.h" #include "p2p/base/port_allocator.h" #include "p2p/base/transport_info.h" -#include "p2p/client/basic_port_allocator.h" #include "pc/channel.h" #include "pc/peer_connection.h" -#include "pc/peer_connection_proxy.h" #include "pc/peer_connection_wrapper.h" #include "pc/rtp_transceiver.h" #include "pc/rtp_transport_internal.h" #include "pc/sdp_utils.h" #include "pc/session_description.h" +#include "pc/test/fake_audio_capture_module.h" +#include "pc/test/integration_test_helpers.h" #include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/checks.h" +#include "rtc_base/fake_network.h" #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" #include "rtc_base/network.h" -#include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/socket_address.h" #include "rtc_base/thread.h" +#include "rtc_base/virtual_socket_server.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" + #ifdef WEBRTC_ANDROID #include "pc/test/android_test_initializer.h" #endif -#include "pc/test/fake_audio_capture_module.h" -#include "rtc_base/fake_network.h" -#include "rtc_base/gunit.h" -#include "rtc_base/virtual_socket_server.h" -#include "test/gmock.h" namespace webrtc { @@ -84,23 +75,20 @@ using BundlePolicy = PeerConnectionInterface::BundlePolicy; using RTCConfiguration = PeerConnectionInterface::RTCConfiguration; using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions; using RtcpMuxPolicy = PeerConnectionInterface::RtcpMuxPolicy; -using rtc::SocketAddress; + using ::testing::Combine; -using ::testing::ElementsAre; using ::testing::UnorderedElementsAre; using ::testing::Values; -constexpr int kDefaultTimeout = 10000; - // TODO(steveanton): These tests should be rewritten to use the standard // RtpSenderInterface/DtlsTransportInterface objects once they're available in // the API. The RtpSender can be used to determine which transport a given media // will use: https://www.w3.org/TR/webrtc/#dom-rtcrtpsender-transport // Should also be able to remove GetTransceiversForTesting at that point. -class FakeNetworkManagerWithNoAnyNetwork : public rtc::FakeNetworkManager { +class FakeNetworkManagerWithNoAnyNetwork : public FakeNetworkManager { public: - std::vector GetAnyAddressNetworks() override { + std::vector GetAnyAddressNetworks() override { // This function allocates networks that are owned by the // NetworkManager. But some tests assume that they can release // all networks independent of the network manager. @@ -115,15 +103,15 @@ class PeerConnectionWrapperForBundleTest : public PeerConnectionWrapper { public: using PeerConnectionWrapper::PeerConnectionWrapper; - bool AddIceCandidateToMedia(cricket::Candidate* candidate, - cricket::MediaType media_type) { + bool AddIceCandidateToMedia(Candidate* candidate, + webrtc::MediaType media_type) { auto* desc = pc()->remote_description()->description(); for (size_t i = 0; i < desc->contents().size(); i++) { const auto& content = desc->contents()[i]; if (content.media_description()->type() == media_type) { - candidate->set_transport_name(content.name); + candidate->set_transport_name(content.mid()); std::unique_ptr jsep_candidate = - CreateIceCandidate(content.name, i, *candidate); + CreateIceCandidate(content.mid(), i, *candidate); return pc()->AddIceCandidate(jsep_candidate.get()); } } @@ -135,12 +123,11 @@ class PeerConnectionWrapperForBundleTest : public PeerConnectionWrapper { return (voice_channel() ? voice_channel()->rtp_transport() : nullptr); } - cricket::VoiceChannel* voice_channel() { + VoiceChannel* voice_channel() { auto transceivers = GetInternalPeerConnection()->GetTransceiversInternal(); for (const auto& transceiver : transceivers) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - return static_cast( - transceiver->internal()->channel()); + if (transceiver->media_type() == webrtc::MediaType::AUDIO) { + return static_cast(transceiver->internal()->channel()); } } return nullptr; @@ -150,24 +137,16 @@ class PeerConnectionWrapperForBundleTest : public PeerConnectionWrapper { return (video_channel() ? video_channel()->rtp_transport() : nullptr); } - cricket::VideoChannel* video_channel() { + VideoChannel* video_channel() { auto transceivers = GetInternalPeerConnection()->GetTransceiversInternal(); for (const auto& transceiver : transceivers) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { - return static_cast( - transceiver->internal()->channel()); + if (transceiver->media_type() == webrtc::MediaType::VIDEO) { + return static_cast(transceiver->internal()->channel()); } } return nullptr; } - PeerConnection* GetInternalPeerConnection() { - auto* pci = - static_cast*>( - pc()); - return static_cast(pci->internal()); - } - // Returns true if the stats indicate that an ICE connection is either in // progress or established with the given remote address. bool HasConnectionWithRemoteAddress(const SocketAddress& address) { @@ -199,12 +178,12 @@ class PeerConnectionWrapperForBundleTest : public PeerConnectionWrapper { return false; } - rtc::FakeNetworkManager* network() { return network_; } + FakeNetworkManager* network() { return network_; } - void set_network(rtc::FakeNetworkManager* network) { network_ = network; } + void set_network(FakeNetworkManager* network) { network_ = network; } private: - rtc::FakeNetworkManager* network_; + FakeNetworkManager* network_; }; class PeerConnectionBundleBaseTest : public ::testing::Test { @@ -212,24 +191,10 @@ class PeerConnectionBundleBaseTest : public ::testing::Test { typedef std::unique_ptr WrapperPtr; explicit PeerConnectionBundleBaseTest(SdpSemantics sdp_semantics) - : vss_(new rtc::VirtualSocketServer()), - socket_factory_(new rtc::BasicPacketSocketFactory(vss_.get())), - main_(vss_.get()), - sdp_semantics_(sdp_semantics) { + : main_(&vss_), sdp_semantics_(sdp_semantics) { #ifdef WEBRTC_ANDROID InitializeAndroidObjects(); #endif - pc_factory_ = CreatePeerConnectionFactory( - rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(), - rtc::scoped_refptr(FakeAudioCaptureModule::Create()), - CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory(), - std::make_unique>(), - std::make_unique>(), - nullptr /* audio_mixer */, nullptr /* audio_processing */); } WrapperPtr CreatePeerConnection() { @@ -237,25 +202,45 @@ class PeerConnectionBundleBaseTest : public ::testing::Test { } WrapperPtr CreatePeerConnection(const RTCConfiguration& config) { - auto* fake_network = NewFakeNetwork(); - auto port_allocator = std::make_unique( - fake_network, socket_factory_.get()); - port_allocator->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP | - cricket::PORTALLOCATOR_DISABLE_RELAY); - port_allocator->set_step_delay(cricket::kMinimumStepDelay); + // Each PeerConnection has its own `NetworkManager` which is injected into + // `PeerConnectionFactoryDependencies`, thus each PeerConnection in these + // tests is created with own PeerConnectionFactory. + PeerConnectionFactoryDependencies pcf_deps; + pcf_deps.network_thread = Thread::Current(); + pcf_deps.worker_thread = Thread::Current(); + pcf_deps.signaling_thread = Thread::Current(); + pcf_deps.socket_factory = &vss_; + auto network_manager = + std::make_unique(); + auto* fake_network = network_manager.get(); + pcf_deps.network_manager = std::move(network_manager); + pcf_deps.adm = FakeAudioCaptureModule::Create(); + pcf_deps.video_encoder_factory = + std::make_unique>(); + pcf_deps.video_decoder_factory = + std::make_unique>(); + EnableMediaWithDefaults(pcf_deps); + + scoped_refptr pc_factory = + CreateModularPeerConnectionFactory(std::move(pcf_deps)); + auto observer = std::make_unique(); RTCConfiguration modified_config = config; + modified_config.set_port_allocator_flags(PORTALLOCATOR_DISABLE_TCP | + PORTALLOCATOR_DISABLE_RELAY); modified_config.sdp_semantics = sdp_semantics_; - PeerConnectionDependencies pc_dependencies(observer.get()); - pc_dependencies.allocator = std::move(port_allocator); - auto result = pc_factory_->CreatePeerConnectionOrError( - modified_config, std::move(pc_dependencies)); + auto result = pc_factory->CreatePeerConnectionOrError( + modified_config, PeerConnectionDependencies(observer.get())); if (!result.ok()) { return nullptr; } auto wrapper = std::make_unique( - pc_factory_, result.MoveValue(), std::move(observer)); + std::move(pc_factory), result.MoveValue(), std::move(observer)); wrapper->set_network(fake_network); return wrapper; } @@ -273,34 +258,16 @@ class PeerConnectionBundleBaseTest : public ::testing::Test { return wrapper; } - cricket::Candidate CreateLocalUdpCandidate( - const rtc::SocketAddress& address) { - cricket::Candidate candidate; - candidate.set_component(cricket::ICE_CANDIDATE_COMPONENT_DEFAULT); - candidate.set_protocol(cricket::UDP_PROTOCOL_NAME); + Candidate CreateLocalUdpCandidate(const SocketAddress& address) { + Candidate candidate; + candidate.set_component(ICE_CANDIDATE_COMPONENT_DEFAULT); + candidate.set_protocol(UDP_PROTOCOL_NAME); candidate.set_address(address); - candidate.set_type(cricket::LOCAL_PORT_TYPE); return candidate; } - rtc::FakeNetworkManager* NewFakeNetwork() { - // The PeerConnection's port allocator is tied to the PeerConnection's - // lifetime and expects the underlying NetworkManager to outlive it. If - // PeerConnectionWrapper owned the NetworkManager, it would be destroyed - // before the PeerConnection (since subclass members are destroyed before - // base class members). Therefore, the test fixture will own all the fake - // networks even though tests should access the fake network through the - // PeerConnectionWrapper. - auto* fake_network = new FakeNetworkManagerWithNoAnyNetwork(); - fake_networks_.emplace_back(fake_network); - return fake_network; - } - - std::unique_ptr vss_; - std::unique_ptr socket_factory_; - rtc::AutoSocketServerThread main_; - rtc::scoped_refptr pc_factory_; - std::vector> fake_networks_; + VirtualSocketServer vss_; + AutoSocketServerThread main_; const SdpSemantics sdp_semantics_; }; @@ -319,7 +286,7 @@ class PeerConnectionBundleTestUnifiedPlan }; SdpContentMutator RemoveRtcpMux() { - return [](cricket::ContentInfo* content, cricket::TransportInfo* transport) { + return [](ContentInfo* content, TransportInfo* transport) { content->media_description()->set_rtcp_mux(false); }; } @@ -358,26 +325,30 @@ TEST_P(PeerConnectionBundleTest, ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer))); // Check that caller has separate RTP and RTCP candidates for each media. - EXPECT_TRUE_WAIT(caller->IsIceGatheringDone(), kDefaultTimeout); + EXPECT_THAT(WaitUntil([&] { return caller->IsIceGatheringDone(); }, + ::testing::IsTrue()), + IsRtcOk()); EXPECT_THAT( GetCandidateComponents(caller->observer()->GetCandidatesByMline(0)), - UnorderedElementsAre(cricket::ICE_CANDIDATE_COMPONENT_RTP, - cricket::ICE_CANDIDATE_COMPONENT_RTCP)); + UnorderedElementsAre(ICE_CANDIDATE_COMPONENT_RTP, + ICE_CANDIDATE_COMPONENT_RTCP)); EXPECT_THAT( GetCandidateComponents(caller->observer()->GetCandidatesByMline(1)), - UnorderedElementsAre(cricket::ICE_CANDIDATE_COMPONENT_RTP, - cricket::ICE_CANDIDATE_COMPONENT_RTCP)); + UnorderedElementsAre(ICE_CANDIDATE_COMPONENT_RTP, + ICE_CANDIDATE_COMPONENT_RTCP)); // Check that callee has separate RTP and RTCP candidates for each media. - EXPECT_TRUE_WAIT(callee->IsIceGatheringDone(), kDefaultTimeout); + EXPECT_THAT(WaitUntil([&] { return callee->IsIceGatheringDone(); }, + ::testing::IsTrue()), + IsRtcOk()); EXPECT_THAT( GetCandidateComponents(callee->observer()->GetCandidatesByMline(0)), - UnorderedElementsAre(cricket::ICE_CANDIDATE_COMPONENT_RTP, - cricket::ICE_CANDIDATE_COMPONENT_RTCP)); + UnorderedElementsAre(ICE_CANDIDATE_COMPONENT_RTP, + ICE_CANDIDATE_COMPONENT_RTCP)); EXPECT_THAT( GetCandidateComponents(callee->observer()->GetCandidatesByMline(1)), - UnorderedElementsAre(cricket::ICE_CANDIDATE_COMPONENT_RTP, - cricket::ICE_CANDIDATE_COMPONENT_RTCP)); + UnorderedElementsAre(ICE_CANDIDATE_COMPONENT_RTP, + ICE_CANDIDATE_COMPONENT_RTCP)); } // Test that there is 1 local UDP candidate for both RTP and RTCP for each media @@ -396,7 +367,9 @@ TEST_P(PeerConnectionBundleTest, ASSERT_TRUE( caller->SetRemoteDescription(callee->CreateAnswer(options_no_bundle))); - EXPECT_TRUE_WAIT(caller->IsIceGatheringDone(), kDefaultTimeout); + EXPECT_THAT(WaitUntil([&] { return caller->IsIceGatheringDone(); }, + ::testing::IsTrue()), + IsRtcOk()); EXPECT_EQ(1u, caller->observer()->GetCandidatesByMline(0).size()); EXPECT_EQ(1u, caller->observer()->GetCandidatesByMline(1).size()); @@ -417,7 +390,9 @@ TEST_P(PeerConnectionBundleTest, ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); ASSERT_TRUE(caller->SetRemoteDescription(callee->CreateAnswer())); - EXPECT_TRUE_WAIT(caller->IsIceGatheringDone(), kDefaultTimeout); + EXPECT_THAT(WaitUntil([&] { return caller->IsIceGatheringDone(); }, + ::testing::IsTrue()), + IsRtcOk()); EXPECT_EQ(1u, caller->observer()->GetCandidatesByMline(0).size()); EXPECT_EQ(0u, caller->observer()->GetCandidatesByMline(1).size()); @@ -682,22 +657,32 @@ TEST_P(PeerConnectionBundleTest, // candidate does _not_ change state. So we interleave candidates and assume // that messages are executed in the order they were posted. - cricket::Candidate audio_candidate1 = CreateLocalUdpCandidate(kAudioAddress1); + Candidate audio_candidate1 = CreateLocalUdpCandidate(kAudioAddress1); ASSERT_TRUE(caller->AddIceCandidateToMedia(&audio_candidate1, - cricket::MEDIA_TYPE_AUDIO)); + webrtc::MediaType::AUDIO)); - cricket::Candidate video_candidate = CreateLocalUdpCandidate(kVideoAddress); + Candidate video_candidate = CreateLocalUdpCandidate(kVideoAddress); ASSERT_TRUE(caller->AddIceCandidateToMedia(&video_candidate, - cricket::MEDIA_TYPE_VIDEO)); + webrtc::MediaType::VIDEO)); - cricket::Candidate audio_candidate2 = CreateLocalUdpCandidate(kAudioAddress2); + Candidate audio_candidate2 = CreateLocalUdpCandidate(kAudioAddress2); ASSERT_TRUE(caller->AddIceCandidateToMedia(&audio_candidate2, - cricket::MEDIA_TYPE_AUDIO)); + webrtc::MediaType::AUDIO)); - EXPECT_TRUE_WAIT(caller->HasConnectionWithRemoteAddress(kAudioAddress1), - kDefaultTimeout); - EXPECT_TRUE_WAIT(caller->HasConnectionWithRemoteAddress(kAudioAddress2), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil( + [&] { + return caller->HasConnectionWithRemoteAddress(kAudioAddress1); + }, + ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil( + [&] { + return caller->HasConnectionWithRemoteAddress(kAudioAddress2); + }, + ::testing::IsTrue()), + IsRtcOk()); EXPECT_FALSE(caller->HasConnectionWithRemoteAddress(kVideoAddress)); } @@ -714,12 +699,12 @@ TEST_P(PeerConnectionBundleTest, BundleOnFirstMidInAnswer) { auto answer = callee->CreateAnswer(); auto* old_bundle_group = - answer->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + answer->description()->GetGroupByName(GROUP_TYPE_BUNDLE); std::string first_mid = old_bundle_group->content_names()[0]; std::string second_mid = old_bundle_group->content_names()[1]; - answer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); + answer->description()->RemoveGroupByName(GROUP_TYPE_BUNDLE); - cricket::ContentGroup new_bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup new_bundle_group(GROUP_TYPE_BUNDLE); new_bundle_group.AddContentName(second_mid); new_bundle_group.AddContentName(first_mid); answer->description()->AddGroup(new_bundle_group); @@ -743,18 +728,18 @@ TEST_P(PeerConnectionBundleTest, ApplyDescriptionWithSameSsrcsBundledFails) { caller->SetLocalDescription(CloneSessionDescription(offer.get()))); // Modify the remote SDP to make two m= sections have the same SSRC. ASSERT_GE(offer->description()->contents().size(), 2U); - offer->description() - ->contents()[0] - .media_description() - ->mutable_streams()[0] - .ssrcs[0] = 1111222; - offer->description() - ->contents()[1] - .media_description() - ->mutable_streams()[0] - .ssrcs[0] = 1111222; - EXPECT_TRUE(callee->SetRemoteDescription(std::move(offer))); + ReplaceFirstSsrc(offer->description() + ->contents()[0] + .media_description() + ->mutable_streams()[0], + 1111222); + ReplaceFirstSsrc(offer->description() + ->contents()[1] + .media_description() + ->mutable_streams()[0], + 1111222); + EXPECT_TRUE(callee->SetRemoteDescription(std::move(offer))); // When BUNDLE is enabled, applying the description is expected to fail // because the demuxing criteria can not be satisfied. auto answer = callee->CreateAnswer(options); @@ -774,16 +759,16 @@ TEST_P(PeerConnectionBundleTest, caller->SetLocalDescription(CloneSessionDescription(offer.get()))); // Modify the remote SDP to make two m= sections have the same SSRC. ASSERT_GE(offer->description()->contents().size(), 2U); - offer->description() - ->contents()[0] - .media_description() - ->mutable_streams()[0] - .ssrcs[0] = 1111222; - offer->description() - ->contents()[1] - .media_description() - ->mutable_streams()[0] - .ssrcs[0] = 1111222; + ReplaceFirstSsrc(offer->description() + ->contents()[0] + .media_description() + ->mutable_streams()[0], + 1111222); + ReplaceFirstSsrc(offer->description() + ->contents()[1] + .media_description() + ->mutable_streams()[0], + 1111222); EXPECT_TRUE(callee->SetRemoteDescription(std::move(offer))); // Without BUNDLE, demuxing is done per-transport. @@ -804,10 +789,10 @@ TEST_P(PeerConnectionBundleTest, RejectDescriptionChangingBundleTag) { // Create a new bundle-group with different bundled_mid. auto* old_bundle_group = - offer->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + offer->description()->GetGroupByName(GROUP_TYPE_BUNDLE); std::string first_mid = old_bundle_group->content_names()[0]; std::string second_mid = old_bundle_group->content_names()[1]; - cricket::ContentGroup new_bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup new_bundle_group(GROUP_TYPE_BUNDLE); new_bundle_group.AddContentName(second_mid); auto re_offer = CloneSessionDescription(offer.get()); @@ -816,14 +801,14 @@ TEST_P(PeerConnectionBundleTest, RejectDescriptionChangingBundleTag) { // Reject the first MID. answer->description()->contents()[0].rejected = true; // Remove the first MID from the bundle group. - answer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); + answer->description()->RemoveGroupByName(GROUP_TYPE_BUNDLE); answer->description()->AddGroup(new_bundle_group); // The answer is expected to be rejected. EXPECT_FALSE(caller->SetRemoteDescription(std::move(answer))); // Do the same thing for re-offer. re_offer->description()->contents()[0].rejected = true; - re_offer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); + re_offer->description()->RemoveGroupByName(GROUP_TYPE_BUNDLE); re_offer->description()->AddGroup(new_bundle_group); // The re-offer is expected to be rejected. EXPECT_FALSE(caller->SetLocalDescription(std::move(re_offer))); @@ -844,17 +829,17 @@ TEST_P(PeerConnectionBundleTest, RemovingContentAndRejectBundleGroup) { // Removing the second MID from the BUNDLE group. auto* old_bundle_group = - offer->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + offer->description()->GetGroupByName(webrtc::GROUP_TYPE_BUNDLE); std::string first_mid = old_bundle_group->content_names()[0]; std::string third_mid = old_bundle_group->content_names()[2]; - cricket::ContentGroup new_bundle_group(cricket::GROUP_TYPE_BUNDLE); + webrtc::ContentGroup new_bundle_group(webrtc::GROUP_TYPE_BUNDLE); new_bundle_group.AddContentName(first_mid); new_bundle_group.AddContentName(third_mid); // Reject the entire new bundle group. re_offer->description()->contents()[0].rejected = true; re_offer->description()->contents()[2].rejected = true; - re_offer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); + re_offer->description()->RemoveGroupByName(webrtc::GROUP_TYPE_BUNDLE); re_offer->description()->AddGroup(new_bundle_group); EXPECT_TRUE(caller->SetLocalDescription(std::move(re_offer))); @@ -868,12 +853,12 @@ TEST_P(PeerConnectionBundleTest, AddContentToBundleGroupInAnswerNotSupported) { auto callee = CreatePeerConnectionWithAudioVideo(); auto offer = caller->CreateOffer(); - std::string first_mid = offer->description()->contents()[0].name; - std::string second_mid = offer->description()->contents()[1].name; + const auto first_mid = offer->description()->contents()[0].mid(); + const auto second_mid = offer->description()->contents()[1].mid(); - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); bundle_group.AddContentName(first_mid); - offer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); + offer->description()->RemoveGroupByName(GROUP_TYPE_BUNDLE); offer->description()->AddGroup(bundle_group); EXPECT_TRUE( caller->SetLocalDescription(CloneSessionDescription(offer.get()))); @@ -881,7 +866,7 @@ TEST_P(PeerConnectionBundleTest, AddContentToBundleGroupInAnswerNotSupported) { auto answer = callee->CreateAnswer(); bundle_group.AddContentName(second_mid); - answer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); + answer->description()->RemoveGroupByName(GROUP_TYPE_BUNDLE); answer->description()->AddGroup(bundle_group); // The answer is expected to be rejected because second mid is not in the @@ -896,9 +881,9 @@ TEST_P(PeerConnectionBundleTest, RejectBundleGroupWithNonExistingMid) { auto offer = caller->CreateOffer(); auto invalid_bundle_group = - *offer->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + *offer->description()->GetGroupByName(GROUP_TYPE_BUNDLE); invalid_bundle_group.AddContentName("non-existing-MID"); - offer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); + offer->description()->RemoveGroupByName(GROUP_TYPE_BUNDLE); offer->description()->AddGroup(invalid_bundle_group); EXPECT_FALSE( @@ -918,12 +903,12 @@ TEST_P(PeerConnectionBundleTest, RemoveContentFromBundleGroup) { EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); auto answer = callee->CreateAnswer(); - std::string second_mid = answer->description()->contents()[1].name; + const auto second_mid = answer->description()->contents()[1].mid(); auto invalid_bundle_group = - *answer->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + *answer->description()->GetGroupByName(GROUP_TYPE_BUNDLE); invalid_bundle_group.RemoveContentName(second_mid); - answer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); + answer->description()->RemoveGroupByName(GROUP_TYPE_BUNDLE); answer->description()->AddGroup(invalid_bundle_group); EXPECT_FALSE( @@ -959,8 +944,8 @@ TEST_F(PeerConnectionBundleTestUnifiedPlan, // Verify that the answer actually contained an empty bundle group. const SessionDescriptionInterface* desc = callee->pc()->local_description(); ASSERT_NE(nullptr, desc); - const cricket::ContentGroup* bundle_group = - desc->description()->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + const ContentGroup* bundle_group = + desc->description()->GetGroupByName(GROUP_TYPE_BUNDLE); ASSERT_NE(nullptr, bundle_group); EXPECT_TRUE(bundle_group->content_names().empty()); } @@ -976,11 +961,11 @@ TEST_F(PeerConnectionBundleTestUnifiedPlan, MultipleBundleGroups) { auto offer = caller->CreateOffer(RTCOfferAnswerOptions()); // Modify the GROUP to have two BUNDLEs. We know that the MIDs will be 0,1,2,4 // because our implementation has predictable MIDs. - offer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); - cricket::ContentGroup bundle_group1(cricket::GROUP_TYPE_BUNDLE); + offer->description()->RemoveGroupByName(GROUP_TYPE_BUNDLE); + ContentGroup bundle_group1(GROUP_TYPE_BUNDLE); bundle_group1.AddContentName("0"); bundle_group1.AddContentName("1"); - cricket::ContentGroup bundle_group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group2(GROUP_TYPE_BUNDLE); bundle_group2.AddContentName("2"); bundle_group2.AddContentName("3"); offer->description()->AddGroup(bundle_group1); @@ -1040,8 +1025,8 @@ TEST_F(PeerConnectionBundleTestUnifiedPlan, AddNonBundledSection) { // Add a track but munge SDP so it's not part of the bundle group. caller->AddAudioTrack("3_audio"); offer = caller->CreateOffer(RTCOfferAnswerOptions()); - offer->description()->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + offer->description()->RemoveGroupByName(GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); bundle_group.AddContentName("0"); bundle_group.AddContentName("1"); offer->description()->AddGroup(bundle_group); diff --git a/pc/peer_connection_callsetup_perf_tests.cc b/pc/peer_connection_callsetup_perf_tests.cc new file mode 100644 index 0000000000..c8c4ff51cc --- /dev/null +++ b/pc/peer_connection_callsetup_perf_tests.cc @@ -0,0 +1,241 @@ +/* + * Copyright 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/strings/str_cat.h" +#include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/data_channel_interface.h" +#include "api/field_trials.h" +#include "api/field_trials_view.h" +#include "api/jsep.h" +#include "api/make_ref_counted.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "api/test/metrics/global_metrics_logger_and_exporter.h" +#include "api/test/metrics/metric.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" +#include "p2p/base/transport_description.h" +#include "pc/sdp_utils.h" +#include "pc/test/mock_peer_connection_observers.h" +#include "pc/test/peer_connection_test_wrapper.h" +#include "rtc_base/checks.h" +#include "rtc_base/thread.h" +#include "rtc_base/time_utils.h" +#include "rtc_base/virtual_socket_server.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/wait_until.h" + +using ::testing::IsTrue; +using ::testing::Values; + +using ::webrtc::test::GetGlobalMetricsLogger; +using ::webrtc::test::ImprovementDirection; +using ::webrtc::test::Unit; +namespace webrtc { + +// All tests in this file require SCTP support. +#ifdef WEBRTC_HAVE_SCTP + +class PeerConnectionDataChannelOpenTest + : public ::testing::TestWithParam< + std::tuple> { + public: + PeerConnectionDataChannelOpenTest() + : background_thread_(std::make_unique(&vss_)) { + RTC_CHECK(background_thread_->Start()); + // Delay is set to 50ms so we get a 100ms RTT. + vss_.set_delay_mean(/*delay_ms=*/50); + vss_.UpdateDelayDistribution(); + } + + scoped_refptr CreatePc( + std::unique_ptr field_trials = nullptr) { + auto pc_wrapper = make_ref_counted( + "pc", &vss_, background_thread_.get(), background_thread_.get()); + pc_wrapper->CreatePc({}, CreateBuiltinAudioEncoderFactory(), + CreateBuiltinAudioDecoderFactory(), + std::move(field_trials)); + return pc_wrapper; + } + + void SignalIceCandidates( + scoped_refptr from_pc_wrapper, + scoped_refptr to_pc_wrapper) { + from_pc_wrapper->SignalOnIceCandidateReady.connect( + to_pc_wrapper.get(), &PeerConnectionTestWrapper::AddIceCandidate); + } + + void Negotiate(scoped_refptr local_pc_wrapper, + scoped_refptr remote_pc_wrapper, + ConnectionRole remote_role) { + std::unique_ptr offer = + CreateOffer(local_pc_wrapper); + scoped_refptr p1 = + SetLocalDescription(local_pc_wrapper, offer.get()); + std::unique_ptr modified_offer = + offer->Clone(); + // Modify offer role to get desired remote role. + if (remote_role == CONNECTIONROLE_PASSIVE) { + auto& transport_infos = modified_offer->description()->transport_infos(); + ASSERT_TRUE(!transport_infos.empty()); + transport_infos[0].description.connection_role = CONNECTIONROLE_ACTIVE; + } + scoped_refptr p2 = + SetRemoteDescription(remote_pc_wrapper, modified_offer.get()); + EXPECT_TRUE(Await({p1, p2})); + std::unique_ptr answer = + CreateAnswer(remote_pc_wrapper); + p1 = SetLocalDescription(remote_pc_wrapper, answer.get()); + p2 = SetRemoteDescription(local_pc_wrapper, answer.get()); + EXPECT_TRUE(Await({p1, p2})); + } + + bool WaitForDataChannelOpen(scoped_refptr dc) { + return WaitUntil( + [&] { + return dc->state() == DataChannelInterface::DataState::kOpen; + }, + IsTrue(), {.timeout = webrtc::TimeDelta::Millis(5000)}) + .ok(); + } + + protected: + std::unique_ptr CreateOffer( + scoped_refptr pc_wrapper) { + auto observer = make_ref_counted(); + pc_wrapper->pc()->CreateOffer(observer.get(), {}); + EXPECT_THAT(WaitUntil([&] { return observer->called(); }, IsTrue()), + IsRtcOk()); + return observer->MoveDescription(); + } + + std::unique_ptr CreateAnswer( + scoped_refptr pc_wrapper) { + auto observer = make_ref_counted(); + pc_wrapper->pc()->CreateAnswer(observer.get(), {}); + EXPECT_THAT(WaitUntil([&] { return observer->called(); }, IsTrue()), + IsRtcOk()); + return observer->MoveDescription(); + } + + scoped_refptr SetLocalDescription( + scoped_refptr pc_wrapper, + SessionDescriptionInterface* sdp) { + auto observer = make_ref_counted(); + pc_wrapper->pc()->SetLocalDescription( + observer.get(), CloneSessionDescription(sdp).release()); + return observer; + } + + scoped_refptr SetRemoteDescription( + scoped_refptr pc_wrapper, + SessionDescriptionInterface* sdp) { + auto observer = make_ref_counted(); + pc_wrapper->pc()->SetRemoteDescription( + observer.get(), CloneSessionDescription(sdp).release()); + return observer; + } + + // To avoid ICE candidates arriving before the remote endpoint has received + // the offer it is important to SetLocalDescription() and + // SetRemoteDescription() are kicked off without awaiting in-between. This + // helper is used to await multiple observers. + bool Await( + std::vector> observers) { + for (auto& observer : observers) { + auto result = WaitUntil([&] { return observer->called(); }, IsTrue()); + + if (!result.ok() || !observer->result()) { + return false; + } + } + return true; + } + + VirtualSocketServer vss_; + std::unique_ptr background_thread_; +}; + +TEST_P(PeerConnectionDataChannelOpenTest, OpenAtCaller) { + std::string trials = std::get<0>(GetParam()); + bool skip_candidates_from_caller = std::get<1>(GetParam()); + ConnectionRole role = std::get<2>(GetParam()); + std::string role_string; + ASSERT_TRUE(ConnectionRoleToString(role, &role_string)); + + scoped_refptr local_pc_wrapper = + CreatePc(FieldTrials::CreateNoGlobal(trials)); + scoped_refptr remote_pc_wrapper = + CreatePc(FieldTrials::CreateNoGlobal(trials)); + + if (!skip_candidates_from_caller) { + SignalIceCandidates(local_pc_wrapper, remote_pc_wrapper); + } + SignalIceCandidates(remote_pc_wrapper, local_pc_wrapper); + + auto dc = local_pc_wrapper->CreateDataChannel("test", {}); + Negotiate(local_pc_wrapper, remote_pc_wrapper, role); + uint64_t start_time = TimeNanos(); + EXPECT_TRUE(WaitForDataChannelOpen(dc)); + uint64_t open_time = TimeNanos(); + uint64_t setup_time = open_time - start_time; + + double setup_time_millis = setup_time / kNumNanosecsPerMillisec; + std::string test_description = + "emulate_server=" + absl::StrCat(skip_candidates_from_caller) + + "/dtls_role=" + role_string + "/trials=" + trials; + GetGlobalMetricsLogger()->LogSingleValueMetric( + "TimeToOpenDataChannel", test_description, setup_time_millis, + Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter); +} + +INSTANTIATE_TEST_SUITE_P( + PeerConnectionDataChannelOpenTest, + PeerConnectionDataChannelOpenTest, + ::testing::Combine( + testing::Values( // Field trials to use. + // WebRTC 1.0 + DTLS 1.2 + "WebRTC-IceHandshakeDtls/Disabled/WebRTC-ForceDtls13/" + "Disabled/", + // SPED + DTLS 1.2 + "WebRTC-IceHandshakeDtls/Enabled/WebRTC-ForceDtls13/" + "Disabled/", + // WebRTC 1.0 + DTLS 1.3 + "WebRTC-IceHandshakeDtls/Disabled/WebRTC-ForceDtls13/" + "Enabled/", + // SPED + DTLS 1.3 + "WebRTC-IceHandshakeDtls/Enabled/WebRTC-ForceDtls13/" + "Enabled/"), + testing::Bool(), // Whether to skip signaling candidates from + // first connection. + testing::Values( + // Default, other side will send + // the DTLS handshake. + CONNECTIONROLE_ACTIVE, + // Local side will send the DTLS + // handshake. + CONNECTIONROLE_PASSIVE))); + +#endif // WEBRTC_HAVE_SCTP + +} // namespace webrtc diff --git a/pc/peer_connection_crypto_unittest.cc b/pc/peer_connection_crypto_unittest.cc index dc350b2be0..fd0f3f6526 100644 --- a/pc/peer_connection_crypto_unittest.cc +++ b/pc/peer_connection_crypto_unittest.cc @@ -14,20 +14,19 @@ #include #include #include -#include #include #include -#include "absl/types/optional.h" -#include "api/audio/audio_mixer.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/create_peerconnection_factory.h" -#include "api/crypto/crypto_options.h" -#include "api/crypto_params.h" +#include "api/environment/environment_factory.h" #include "api/jsep.h" +#include "api/make_ref_counted.h" #include "api/peer_connection_interface.h" #include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" @@ -38,12 +37,9 @@ #include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "p2p/base/fake_port_allocator.h" -#include "p2p/base/port_allocator.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_info.h" +#include "p2p/test/fake_port_allocator.h" #include "pc/media_protocol_names.h" #include "pc/media_session.h" #include "pc/peer_connection_wrapper.h" @@ -55,14 +51,14 @@ #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/ssl_fingerprint.h" #include "rtc_base/thread.h" +#include "test/gmock.h" #include "test/gtest.h" -#include "test/scoped_key_value_config.h" +#include "test/wait_until.h" #ifdef WEBRTC_ANDROID #include "pc/test/android_test_initializer.h" #endif #include "pc/test/fake_audio_capture_module.h" #include "pc/test/fake_rtc_certificate_generator.h" -#include "rtc_base/gunit.h" #include "rtc_base/virtual_socket_server.h" namespace webrtc { @@ -70,6 +66,7 @@ namespace webrtc { using RTCConfiguration = PeerConnectionInterface::RTCConfiguration; using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions; using ::testing::Combine; +using ::testing::HasSubstr; using ::testing::Values; constexpr int kGenerateCertTimeout = 1000; @@ -79,14 +76,14 @@ class PeerConnectionCryptoBaseTest : public ::testing::Test { typedef std::unique_ptr WrapperPtr; explicit PeerConnectionCryptoBaseTest(SdpSemantics sdp_semantics) - : vss_(new rtc::VirtualSocketServer()), + : vss_(new VirtualSocketServer()), main_(vss_.get()), sdp_semantics_(sdp_semantics) { #ifdef WEBRTC_ANDROID InitializeAndroidObjects(); #endif pc_factory_ = CreatePeerConnectionFactory( - rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(), + Thread::Current(), Thread::Current(), Thread::Current(), FakeAudioCaptureModule::Create(), CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory(), std::make_unique cert_gen) { - auto fake_port_allocator = std::make_unique( - rtc::Thread::Current(), - std::make_unique(vss_.get()), - &field_trials_); + std::unique_ptr cert_gen) { + auto fake_port_allocator = + std::make_unique(CreateEnvironment(), vss_.get()); auto observer = std::make_unique(); RTCConfiguration modified_config = config; modified_config.sdp_semantics = sdp_semantics_; @@ -143,67 +138,39 @@ class PeerConnectionCryptoBaseTest : public ::testing::Test { return wrapper; } - cricket::ConnectionRole& AudioConnectionRole( - cricket::SessionDescription* desc) { - return ConnectionRoleFromContent(desc, cricket::GetFirstAudioContent(desc)); + ConnectionRole& AudioConnectionRole(SessionDescription* desc) { + return ConnectionRoleFromContent(desc, GetFirstAudioContent(desc)); } - cricket::ConnectionRole& VideoConnectionRole( - cricket::SessionDescription* desc) { - return ConnectionRoleFromContent(desc, cricket::GetFirstVideoContent(desc)); + ConnectionRole& VideoConnectionRole(SessionDescription* desc) { + return ConnectionRoleFromContent(desc, GetFirstVideoContent(desc)); } - cricket::ConnectionRole& ConnectionRoleFromContent( - cricket::SessionDescription* desc, - cricket::ContentInfo* content) { + ConnectionRole& ConnectionRoleFromContent(SessionDescription* desc, + ContentInfo* content) { RTC_DCHECK(content); - auto* transport_info = desc->GetTransportInfoByName(content->name); + auto* transport_info = desc->GetTransportInfoByName(content->mid()); RTC_DCHECK(transport_info); return transport_info->description.connection_role; } - webrtc::test::ScopedKeyValueConfig field_trials_; - std::unique_ptr vss_; - rtc::AutoSocketServerThread main_; - rtc::scoped_refptr pc_factory_; + std::unique_ptr vss_; + AutoSocketServerThread main_; + scoped_refptr pc_factory_; const SdpSemantics sdp_semantics_; }; SdpContentPredicate HaveDtlsFingerprint() { - return [](const cricket::ContentInfo* content, - const cricket::TransportInfo* transport) { + return [](const ContentInfo* content, const TransportInfo* transport) { return transport->description.identity_fingerprint != nullptr; }; } -SdpContentPredicate HaveSdesCryptos() { - return [](const cricket::ContentInfo* content, - const cricket::TransportInfo* transport) { - return !content->media_description()->cryptos().empty(); - }; -} - SdpContentPredicate HaveProtocol(const std::string& protocol) { - return [protocol](const cricket::ContentInfo* content, - const cricket::TransportInfo* transport) { - return content->media_description()->protocol() == protocol; - }; -} - -SdpContentPredicate HaveSdesGcmCryptos(size_t num_crypto_suites) { - return [num_crypto_suites](const cricket::ContentInfo* content, - const cricket::TransportInfo* transport) { - const auto& cryptos = content->media_description()->cryptos(); - if (cryptos.size() != num_crypto_suites) { - return false; - } - for (size_t i = 0; i < cryptos.size(); ++i) { - if (cryptos[i].key_params.size() == 67U && - cryptos[i].crypto_suite == "AEAD_AES_256_GCM") - return true; - } - return false; - }; + return + [protocol](const ContentInfo* content, const TransportInfo* transport) { + return content->media_description()->protocol() == protocol; + }; } class PeerConnectionCryptoTest @@ -213,20 +180,13 @@ class PeerConnectionCryptoTest PeerConnectionCryptoTest() : PeerConnectionCryptoBaseTest(GetParam()) {} }; -SdpContentMutator RemoveSdesCryptos() { - return [](cricket::ContentInfo* content, cricket::TransportInfo* transport) { - content->media_description()->set_cryptos({}); - }; -} - SdpContentMutator RemoveDtlsFingerprint() { - return [](cricket::ContentInfo* content, cricket::TransportInfo* transport) { + return [](ContentInfo* content, TransportInfo* transport) { transport->description.identity_fingerprint.reset(); }; } -// When DTLS is enabled, the SDP offer/answer should have a DTLS fingerprint and -// no SDES cryptos. +// When DTLS is enabled, the SDP offer/answer should have a DTLS fingerprint TEST_P(PeerConnectionCryptoTest, CorrectCryptoInOfferWhenDtlsEnabled) { RTCConfiguration config; auto caller = CreatePeerConnectionWithAudioVideo(config); @@ -236,8 +196,7 @@ TEST_P(PeerConnectionCryptoTest, CorrectCryptoInOfferWhenDtlsEnabled) { ASSERT_FALSE(offer->description()->contents().empty()); EXPECT_TRUE(SdpContentsAll(HaveDtlsFingerprint(), offer->description())); - EXPECT_TRUE(SdpContentsNone(HaveSdesCryptos(), offer->description())); - EXPECT_TRUE(SdpContentsAll(HaveProtocol(cricket::kMediaProtocolDtlsSavpf), + EXPECT_TRUE(SdpContentsAll(HaveProtocol(kMediaProtocolDtlsSavpf), offer->description())); } TEST_P(PeerConnectionCryptoTest, CorrectCryptoInAnswerWhenDtlsEnabled) { @@ -251,238 +210,10 @@ TEST_P(PeerConnectionCryptoTest, CorrectCryptoInAnswerWhenDtlsEnabled) { ASSERT_FALSE(answer->description()->contents().empty()); EXPECT_TRUE(SdpContentsAll(HaveDtlsFingerprint(), answer->description())); - EXPECT_TRUE(SdpContentsNone(HaveSdesCryptos(), answer->description())); - EXPECT_TRUE(SdpContentsAll(HaveProtocol(cricket::kMediaProtocolDtlsSavpf), - answer->description())); -} - -#if defined(WEBRTC_FUCHSIA) -// When DTLS is disabled, the SDP offer/answer should include SDES cryptos and -// should not have a DTLS fingerprint. -TEST_P(PeerConnectionCryptoTest, CorrectCryptoInOfferWhenDtlsDisabled) { - RTCConfiguration config; - config.enable_dtls_srtp.emplace(false); - auto caller = CreatePeerConnectionWithAudioVideo(config); - - auto offer = caller->CreateOffer(); - ASSERT_TRUE(offer); - - ASSERT_FALSE(offer->description()->contents().empty()); - EXPECT_TRUE(SdpContentsAll(HaveSdesCryptos(), offer->description())); - EXPECT_TRUE(SdpContentsNone(HaveDtlsFingerprint(), offer->description())); - EXPECT_TRUE(SdpContentsAll(HaveProtocol(cricket::kMediaProtocolSavpf), - offer->description())); -} - -TEST_P(PeerConnectionCryptoTest, CorrectCryptoInAnswerWhenDtlsDisabled) { - RTCConfiguration config; - config.enable_dtls_srtp.emplace(false); - auto caller = CreatePeerConnectionWithAudioVideo(config); - auto callee = CreatePeerConnectionWithAudioVideo(config); - - callee->SetRemoteDescription(caller->CreateOffer()); - auto answer = callee->CreateAnswer(); - ASSERT_TRUE(answer); - - ASSERT_FALSE(answer->description()->contents().empty()); - EXPECT_TRUE(SdpContentsAll(HaveSdesCryptos(), answer->description())); - EXPECT_TRUE(SdpContentsNone(HaveDtlsFingerprint(), answer->description())); - EXPECT_TRUE(SdpContentsAll(HaveProtocol(cricket::kMediaProtocolSavpf), - answer->description())); -} - -// When encryption is disabled, the SDP offer/answer should have neither a DTLS -// fingerprint nor any SDES crypto options. -TEST_P(PeerConnectionCryptoTest, CorrectCryptoInOfferWhenEncryptionDisabled) { - PeerConnectionFactoryInterface::Options options; - options.disable_encryption = true; - pc_factory_->SetOptions(options); - - RTCConfiguration config; - config.enable_dtls_srtp.emplace(false); - auto caller = CreatePeerConnectionWithAudioVideo(config); - - auto offer = caller->CreateOffer(); - ASSERT_TRUE(offer); - - ASSERT_FALSE(offer->description()->contents().empty()); - EXPECT_TRUE(SdpContentsNone(HaveSdesCryptos(), offer->description())); - EXPECT_TRUE(SdpContentsNone(HaveDtlsFingerprint(), offer->description())); - EXPECT_TRUE(SdpContentsAll(HaveProtocol(cricket::kMediaProtocolAvpf), - offer->description())); -} - -TEST_P(PeerConnectionCryptoTest, CorrectCryptoInAnswerWhenEncryptionDisabled) { - PeerConnectionFactoryInterface::Options options; - options.disable_encryption = true; - pc_factory_->SetOptions(options); - - RTCConfiguration config; - config.enable_dtls_srtp.emplace(false); - auto caller = CreatePeerConnectionWithAudioVideo(config); - auto callee = CreatePeerConnectionWithAudioVideo(config); - - callee->SetRemoteDescription(caller->CreateOffer()); - auto answer = callee->CreateAnswer(); - ASSERT_TRUE(answer); - - ASSERT_FALSE(answer->description()->contents().empty()); - EXPECT_TRUE(SdpContentsNone(HaveSdesCryptos(), answer->description())); - EXPECT_TRUE(SdpContentsNone(HaveDtlsFingerprint(), answer->description())); - EXPECT_TRUE(SdpContentsAll(HaveProtocol(cricket::kMediaProtocolAvpf), + EXPECT_TRUE(SdpContentsAll(HaveProtocol(kMediaProtocolDtlsSavpf), answer->description())); } -// CryptoOptions has been promoted to RTCConfiguration. As such if it is ever -// set in the configuration it should overrite the settings set in the factory. -TEST_P(PeerConnectionCryptoTest, RTCConfigurationCryptoOptionOverridesFactory) { - PeerConnectionFactoryInterface::Options options; - options.crypto_options.srtp.enable_gcm_crypto_suites = true; - pc_factory_->SetOptions(options); - - RTCConfiguration config; - config.enable_dtls_srtp.emplace(false); - CryptoOptions crypto_options; - crypto_options.srtp.enable_gcm_crypto_suites = false; - config.crypto_options = crypto_options; - auto caller = CreatePeerConnectionWithAudioVideo(config); - - auto offer = caller->CreateOffer(); - ASSERT_TRUE(offer); - - ASSERT_FALSE(offer->description()->contents().empty()); - // This should exist if GCM is enabled see CorrectCryptoInOfferWithSdesAndGcm - EXPECT_FALSE(SdpContentsAll(HaveSdesGcmCryptos(3), offer->description())); -} - -// When DTLS is disabled and GCM cipher suites are enabled, the SDP offer/answer -// should have the correct ciphers in the SDES crypto options. -// With GCM cipher suites enabled, there will be 3 cryptos in the offer and 1 -// in the answer. -TEST_P(PeerConnectionCryptoTest, CorrectCryptoInOfferWithSdesAndGcm) { - PeerConnectionFactoryInterface::Options options; - options.crypto_options.srtp.enable_gcm_crypto_suites = true; - pc_factory_->SetOptions(options); - - RTCConfiguration config; - config.enable_dtls_srtp.emplace(false); - auto caller = CreatePeerConnectionWithAudioVideo(config); - - auto offer = caller->CreateOffer(); - ASSERT_TRUE(offer); - - ASSERT_FALSE(offer->description()->contents().empty()); - EXPECT_TRUE(SdpContentsAll(HaveSdesGcmCryptos(3), offer->description())); -} - -TEST_P(PeerConnectionCryptoTest, CorrectCryptoInAnswerWithSdesAndGcm) { - PeerConnectionFactoryInterface::Options options; - options.crypto_options.srtp.enable_gcm_crypto_suites = true; - pc_factory_->SetOptions(options); - - RTCConfiguration config; - config.enable_dtls_srtp.emplace(false); - auto caller = CreatePeerConnectionWithAudioVideo(config); - auto callee = CreatePeerConnectionWithAudioVideo(config); - - auto offer = caller->CreateOffer(); - for (cricket::ContentInfo& content : offer->description()->contents()) { - auto cryptos = content.media_description()->cryptos(); - cryptos.erase(cryptos.begin()); // Assumes that non-GCM is the default. - content.media_description()->set_cryptos(cryptos); - } - - callee->SetRemoteDescription(std::move(offer)); - auto answer = callee->CreateAnswer(); - ASSERT_TRUE(answer); - - ASSERT_FALSE(answer->description()->contents().empty()); - EXPECT_TRUE(SdpContentsAll(HaveSdesGcmCryptos(1), answer->description())); -} - -TEST_P(PeerConnectionCryptoTest, CanSetSdesGcmRemoteOfferAndLocalAnswer) { - PeerConnectionFactoryInterface::Options options; - options.crypto_options.srtp.enable_gcm_crypto_suites = true; - pc_factory_->SetOptions(options); - - RTCConfiguration config; - config.enable_dtls_srtp.emplace(false); - auto caller = CreatePeerConnectionWithAudioVideo(config); - auto callee = CreatePeerConnectionWithAudioVideo(config); - - auto offer = caller->CreateOffer(); - ASSERT_TRUE(offer); - ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); - - auto answer = callee->CreateAnswer(); - ASSERT_TRUE(answer); - ASSERT_TRUE(callee->SetLocalDescription(std::move(answer))); -} - -// The following group tests that two PeerConnections can successfully exchange -// an offer/answer when DTLS is off and that they will refuse any offer/answer -// applied locally/remotely if it does not include SDES cryptos. -TEST_P(PeerConnectionCryptoTest, ExchangeOfferAnswerWhenSdesOn) { - RTCConfiguration config; - config.enable_dtls_srtp.emplace(false); - auto caller = CreatePeerConnectionWithAudioVideo(config); - auto callee = CreatePeerConnectionWithAudioVideo(config); - - auto offer = caller->CreateOfferAndSetAsLocal(); - ASSERT_TRUE(offer); - ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); - - auto answer = callee->CreateAnswerAndSetAsLocal(); - ASSERT_TRUE(answer); - ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer))); -} -TEST_P(PeerConnectionCryptoTest, FailToSetLocalOfferWithNoCryptosWhenSdesOn) { - RTCConfiguration config; - config.enable_dtls_srtp.emplace(false); - auto caller = CreatePeerConnectionWithAudioVideo(config); - - auto offer = caller->CreateOffer(); - SdpContentsForEach(RemoveSdesCryptos(), offer->description()); - - EXPECT_FALSE(caller->SetLocalDescription(std::move(offer))); -} -TEST_P(PeerConnectionCryptoTest, FailToSetRemoteOfferWithNoCryptosWhenSdesOn) { - RTCConfiguration config; - config.enable_dtls_srtp.emplace(false); - auto caller = CreatePeerConnectionWithAudioVideo(config); - auto callee = CreatePeerConnectionWithAudioVideo(config); - - auto offer = caller->CreateOffer(); - SdpContentsForEach(RemoveSdesCryptos(), offer->description()); - - EXPECT_FALSE(callee->SetRemoteDescription(std::move(offer))); -} -TEST_P(PeerConnectionCryptoTest, FailToSetLocalAnswerWithNoCryptosWhenSdesOn) { - RTCConfiguration config; - config.enable_dtls_srtp.emplace(false); - auto caller = CreatePeerConnectionWithAudioVideo(config); - auto callee = CreatePeerConnectionWithAudioVideo(config); - - callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()); - auto answer = callee->CreateAnswer(); - SdpContentsForEach(RemoveSdesCryptos(), answer->description()); - - EXPECT_FALSE(callee->SetLocalDescription(std::move(answer))); -} -TEST_P(PeerConnectionCryptoTest, FailToSetRemoteAnswerWithNoCryptosWhenSdesOn) { - RTCConfiguration config; - config.enable_dtls_srtp.emplace(false); - auto caller = CreatePeerConnectionWithAudioVideo(config); - auto callee = CreatePeerConnectionWithAudioVideo(config); - - callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()); - auto answer = callee->CreateAnswerAndSetAsLocal(); - SdpContentsForEach(RemoveSdesCryptos(), answer->description()); - - EXPECT_FALSE(caller->SetRemoteDescription(std::move(answer))); -} -#endif - // The following group tests that two PeerConnections can successfully exchange // an offer/answer when DTLS is on and that they will refuse any offer/answer // applied locally/remotely if it does not include a DTLS fingerprint. @@ -543,28 +274,6 @@ TEST_P(PeerConnectionCryptoTest, EXPECT_FALSE(caller->SetRemoteDescription(std::move(answer))); } -#if defined(WEBRTC_FUCHSIA) -// Test that an offer/answer can be exchanged when encryption is disabled. -TEST_P(PeerConnectionCryptoTest, ExchangeOfferAnswerWhenNoEncryption) { - PeerConnectionFactoryInterface::Options options; - options.disable_encryption = true; - pc_factory_->SetOptions(options); - - RTCConfiguration config; - config.enable_dtls_srtp.emplace(false); - auto caller = CreatePeerConnectionWithAudioVideo(config); - auto callee = CreatePeerConnectionWithAudioVideo(config); - - auto offer = caller->CreateOfferAndSetAsLocal(); - ASSERT_TRUE(offer); - ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); - - auto answer = callee->CreateAnswerAndSetAsLocal(); - ASSERT_TRUE(answer); - ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer))); -} -#endif - // Tests that a DTLS call can be established when the certificate is specified // in the PeerConnection config and no certificate generator is specified. TEST_P(PeerConnectionCryptoTest, @@ -663,19 +372,23 @@ TEST_P(PeerConnectionCryptoDtlsCertGenTest, TestCertificateGeneration) { pc->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()); } if (cert_gen_time_ == CertGenTime::kBefore) { - ASSERT_TRUE_WAIT(fake_certificate_generator->generated_certificates() + - fake_certificate_generator->generated_failures() > - 0, - kGenerateCertTimeout); + ASSERT_THAT( + WaitUntil( + [&] { + return fake_certificate_generator->generated_certificates() + + fake_certificate_generator->generated_failures(); + }, + ::testing::Gt(0), + {.timeout = webrtc::TimeDelta::Millis(kGenerateCertTimeout)}), + IsRtcOk()); } else { ASSERT_EQ(fake_certificate_generator->generated_certificates(), 0); fake_certificate_generator->set_should_wait(false); } - std::vector> - observers; + std::vector> observers; for (size_t i = 0; i < concurrent_calls_; i++) { - rtc::scoped_refptr observer = - rtc::make_ref_counted(); + scoped_refptr observer = + make_ref_counted(); observers.push_back(observer); if (sdp_type_ == SdpType::kOffer) { pc->pc()->CreateOffer(observer.get(), @@ -686,7 +399,9 @@ TEST_P(PeerConnectionCryptoDtlsCertGenTest, TestCertificateGeneration) { } } for (auto& observer : observers) { - EXPECT_TRUE_WAIT(observer->called(), 1000); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue()), + IsRtcOk()); if (cert_gen_result_ == CertGenResult::kSucceed) { EXPECT_TRUE(observer->result()); } else { @@ -718,8 +433,8 @@ TEST_P(PeerConnectionCryptoTest, CreateAnswerWithDifferentSslRoles) { ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); auto answer = callee->CreateAnswer(options_no_bundle); - AudioConnectionRole(answer->description()) = cricket::CONNECTIONROLE_ACTIVE; - VideoConnectionRole(answer->description()) = cricket::CONNECTIONROLE_PASSIVE; + AudioConnectionRole(answer->description()) = CONNECTIONROLE_ACTIVE; + VideoConnectionRole(answer->description()) = CONNECTIONROLE_PASSIVE; ASSERT_TRUE( callee->SetLocalDescription(CloneSessionDescription(answer.get()))); @@ -730,10 +445,8 @@ TEST_P(PeerConnectionCryptoTest, CreateAnswerWithDifferentSslRoles) { ASSERT_TRUE(caller->SetRemoteDescription(callee->CreateOfferAndSetAsLocal())); answer = caller->CreateAnswer(options_no_bundle); - EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE, - AudioConnectionRole(answer->description())); - EXPECT_EQ(cricket::CONNECTIONROLE_ACTIVE, - VideoConnectionRole(answer->description())); + EXPECT_EQ(CONNECTIONROLE_PASSIVE, AudioConnectionRole(answer->description())); + EXPECT_EQ(CONNECTIONROLE_ACTIVE, VideoConnectionRole(answer->description())); ASSERT_TRUE( caller->SetLocalDescription(CloneSessionDescription(answer.get()))); @@ -748,10 +461,8 @@ TEST_P(PeerConnectionCryptoTest, CreateAnswerWithDifferentSslRoles) { ASSERT_TRUE(caller->SetRemoteDescription(callee->CreateOfferAndSetAsLocal())); answer = caller->CreateAnswer(options_bundle); - EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE, - AudioConnectionRole(answer->description())); - EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE, - VideoConnectionRole(answer->description())); + EXPECT_EQ(CONNECTIONROLE_PASSIVE, AudioConnectionRole(answer->description())); + EXPECT_EQ(CONNECTIONROLE_PASSIVE, VideoConnectionRole(answer->description())); ASSERT_TRUE( caller->SetLocalDescription(CloneSessionDescription(answer.get()))); @@ -763,8 +474,8 @@ TEST_P(PeerConnectionCryptoTest, CreateAnswerWithDifferentSslRoles) { // error. // This is a regression test for crbug.com/800775 TEST_P(PeerConnectionCryptoTest, SessionErrorIfFingerprintInvalid) { - auto callee_certificate = rtc::RTCCertificate::FromPEM(kRsaPems[0]); - auto other_certificate = rtc::RTCCertificate::FromPEM(kRsaPems[1]); + auto callee_certificate = RTCCertificate::FromPEM(kRsaPems[0]); + auto other_certificate = RTCCertificate::FromPEM(kRsaPems[1]); auto caller = CreatePeerConnectionWithAudioVideo(); RTCConfiguration callee_config; @@ -776,29 +487,25 @@ TEST_P(PeerConnectionCryptoTest, SessionErrorIfFingerprintInvalid) { // Create an invalid answer with the other certificate's fingerprint. auto valid_answer = callee->CreateAnswer(); auto invalid_answer = CloneSessionDescription(valid_answer.get()); - auto* audio_content = - cricket::GetFirstAudioContent(invalid_answer->description()); + auto* audio_content = GetFirstAudioContent(invalid_answer->description()); ASSERT_TRUE(audio_content); auto* audio_transport_info = invalid_answer->description()->GetTransportInfoByName( - audio_content->name); + audio_content->mid()); ASSERT_TRUE(audio_transport_info); audio_transport_info->description.identity_fingerprint = - rtc::SSLFingerprint::CreateFromCertificate(*other_certificate); + SSLFingerprint::CreateFromCertificate(*other_certificate); // Set the invalid answer and expect a fingerprint error. std::string error; ASSERT_FALSE(callee->SetLocalDescription(std::move(invalid_answer), &error)); - EXPECT_PRED_FORMAT2(AssertStringContains, error, - "Local fingerprint does not match identity."); + EXPECT_THAT(error, HasSubstr("Local fingerprint does not match identity.")); // Make sure that setting a valid remote offer or local answer also fails now. ASSERT_FALSE(callee->SetRemoteDescription(caller->CreateOffer(), &error)); - EXPECT_PRED_FORMAT2(AssertStringContains, error, - "Session error code: ERROR_CONTENT."); + EXPECT_THAT(error, HasSubstr("Session error code: ERROR_CONTENT.")); ASSERT_FALSE(callee->SetLocalDescription(std::move(valid_answer), &error)); - EXPECT_PRED_FORMAT2(AssertStringContains, error, - "Session error code: ERROR_CONTENT."); + EXPECT_THAT(error, HasSubstr("Session error code: ERROR_CONTENT.")); } INSTANTIATE_TEST_SUITE_P(PeerConnectionCryptoTest, diff --git a/pc/peer_connection_data_channel_unittest.cc b/pc/peer_connection_data_channel_unittest.cc index 3bb2088866..47a85a5e48 100644 --- a/pc/peer_connection_data_channel_unittest.cc +++ b/pc/peer_connection_data_channel_unittest.cc @@ -9,44 +9,37 @@ */ #include +#include #include -#include #include #include -#include "absl/types/optional.h" -#include "api/call/call_factory_interface.h" #include "api/jsep.h" #include "api/media_types.h" #include "api/peer_connection_interface.h" #include "api/scoped_refptr.h" #include "api/sctp_transport_interface.h" #include "api/task_queue/default_task_queue_factory.h" -#include "api/task_queue/task_queue_factory.h" -#include "api/transport/sctp_transport_factory_interface.h" -#include "media/base/fake_media_engine.h" -#include "media/base/media_engine.h" #include "p2p/base/p2p_constants.h" -#include "p2p/base/port_allocator.h" #include "pc/media_session.h" #include "pc/peer_connection.h" -#include "pc/peer_connection_proxy.h" #include "pc/peer_connection_wrapper.h" #include "pc/sctp_transport.h" #include "pc/sdp_utils.h" #include "pc/session_description.h" +#include "pc/test/enable_fake_media.h" #include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/thread.h" +#include "rtc_base/virtual_socket_server.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/pc/sctp/fake_sctp_transport.h" + #ifdef WEBRTC_ANDROID #include "pc/test/android_test_initializer.h" #endif -#include "rtc_base/virtual_socket_server.h" -#include "test/pc/sctp/fake_sctp_transport.h" namespace webrtc { @@ -60,12 +53,11 @@ namespace { PeerConnectionFactoryDependencies CreatePeerConnectionFactoryDependencies() { PeerConnectionFactoryDependencies deps; - deps.network_thread = rtc::Thread::Current(); - deps.worker_thread = rtc::Thread::Current(); - deps.signaling_thread = rtc::Thread::Current(); + deps.network_thread = Thread::Current(); + deps.worker_thread = Thread::Current(); + deps.signaling_thread = Thread::Current(); deps.task_queue_factory = CreateDefaultTaskQueueFactory(); - deps.media_engine = std::make_unique(); - deps.call_factory = CreateCallFactory(); + EnableFakeMedia(deps); deps.sctp_factory = std::make_unique(); return deps; } @@ -85,21 +77,14 @@ class PeerConnectionWrapperForDataChannelTest : public PeerConnectionWrapper { sctp_transport_factory_ = sctp_transport_factory; } - absl::optional sctp_mid() { + std::optional sctp_mid() { return GetInternalPeerConnection()->sctp_mid(); } - absl::optional sctp_transport_name() { + std::optional sctp_transport_name() { return GetInternalPeerConnection()->sctp_transport_name(); } - PeerConnection* GetInternalPeerConnection() { - auto* pci = - static_cast*>( - pc()); - return static_cast(pci->internal()); - } - private: FakeSctpTransportFactory* sctp_transport_factory_ = nullptr; }; @@ -109,7 +94,7 @@ class PeerConnectionDataChannelBaseTest : public ::testing::Test { typedef std::unique_ptr WrapperPtr; explicit PeerConnectionDataChannelBaseTest(SdpSemantics sdp_semantics) - : vss_(new rtc::VirtualSocketServer()), + : vss_(new VirtualSocketServer()), main_(vss_.get()), sdp_semantics_(sdp_semantics) { #ifdef WEBRTC_ANDROID @@ -132,7 +117,7 @@ class PeerConnectionDataChannelBaseTest : public ::testing::Test { auto factory_deps = CreatePeerConnectionFactoryDependencies(); FakeSctpTransportFactory* fake_sctp_transport_factory = static_cast(factory_deps.sctp_factory.get()); - rtc::scoped_refptr pc_factory = + scoped_refptr pc_factory = CreateModularPeerConnectionFactory(std::move(factory_deps)); pc_factory->SetOptions(factory_options); auto observer = std::make_unique(); @@ -164,17 +149,16 @@ class PeerConnectionDataChannelBaseTest : public ::testing::Test { } // Changes the SCTP data channel port on the given session description. - void ChangeSctpPortOnDescription(cricket::SessionDescription* desc, - int port) { - auto* data_content = cricket::GetFirstDataContent(desc); + void ChangeSctpPortOnDescription(SessionDescription* desc, int port) { + auto* data_content = GetFirstDataContent(desc); RTC_DCHECK(data_content); auto* data_desc = data_content->media_description()->as_sctp(); RTC_DCHECK(data_desc); data_desc->set_port(port); } - std::unique_ptr vss_; - rtc::AutoSocketServerThread main_; + std::unique_ptr vss_; + AutoSocketServerThread main_; const SdpSemantics sdp_semantics_; }; @@ -199,7 +183,7 @@ TEST_P(PeerConnectionDataChannelTest, InternalSctpTransportDeletedOnTeardown) { ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer())); EXPECT_TRUE(caller->sctp_transport_factory()->last_fake_sctp_transport()); - rtc::scoped_refptr sctp_transport = + scoped_refptr sctp_transport = caller->GetInternalPeerConnection()->GetSctpTransport(); caller.reset(); @@ -226,12 +210,12 @@ TEST_P(PeerConnectionDataChannelTest, SctpContentAndTransportNameSetCorrectly) { auto offer = caller->CreateOffer(); const auto& offer_contents = offer->description()->contents(); - ASSERT_EQ(cricket::MEDIA_TYPE_AUDIO, + ASSERT_EQ(webrtc::MediaType::AUDIO, offer_contents[0].media_description()->type()); - std::string audio_mid = offer_contents[0].name; - ASSERT_EQ(cricket::MEDIA_TYPE_DATA, + auto audio_mid = offer_contents[0].mid(); + ASSERT_EQ(webrtc::MediaType::DATA, offer_contents[2].media_description()->type()); - std::string data_mid = offer_contents[2].name; + auto data_mid = offer_contents[2].mid(); ASSERT_TRUE( caller->SetLocalDescription(CloneSessionDescription(offer.get()))); @@ -260,8 +244,8 @@ TEST_P(PeerConnectionDataChannelTest, auto caller = CreatePeerConnection(); auto offer = caller->CreateOffer(); - EXPECT_FALSE(offer->description()->GetContentByName(cricket::CN_DATA)); - EXPECT_FALSE(offer->description()->GetTransportInfoByName(cricket::CN_DATA)); + EXPECT_FALSE(offer->description()->GetContentByName(CN_DATA)); + EXPECT_FALSE(offer->description()->GetTransportInfoByName(CN_DATA)); } TEST_P(PeerConnectionDataChannelTest, @@ -273,11 +257,11 @@ TEST_P(PeerConnectionDataChannelTest, auto answer = callee->CreateAnswer(); ASSERT_TRUE(answer); - auto* data_content = cricket::GetFirstDataContent(answer->description()); + auto* data_content = GetFirstDataContent(answer->description()); ASSERT_TRUE(data_content); EXPECT_FALSE(data_content->rejected); EXPECT_TRUE( - answer->description()->GetTransportInfoByName(data_content->name)); + answer->description()->GetTransportInfoByName(data_content->mid())); } TEST_P(PeerConnectionDataChannelTest, SctpPortPropagatedFromSdpToTransport) { @@ -307,8 +291,8 @@ TEST_P(PeerConnectionDataChannelTest, ModernSdpSyntaxByDefault) { PeerConnectionInterface::RTCOfferAnswerOptions options; auto caller = CreatePeerConnectionWithDataChannel(); auto offer = caller->CreateOffer(options); - EXPECT_FALSE(cricket::GetFirstSctpDataContentDescription(offer->description()) - ->use_sctpmap()); + EXPECT_FALSE( + GetFirstSctpDataContentDescription(offer->description())->use_sctpmap()); std::string sdp; offer->ToString(&sdp); RTC_LOG(LS_ERROR) << sdp; @@ -321,8 +305,8 @@ TEST_P(PeerConnectionDataChannelTest, ObsoleteSdpSyntaxIfSet) { options.use_obsolete_sctp_sdp = true; auto caller = CreatePeerConnectionWithDataChannel(); auto offer = caller->CreateOffer(options); - EXPECT_TRUE(cricket::GetFirstSctpDataContentDescription(offer->description()) - ->use_sctpmap()); + EXPECT_TRUE( + GetFirstSctpDataContentDescription(offer->description())->use_sctpmap()); std::string sdp; offer->ToString(&sdp); EXPECT_THAT(sdp, Not(HasSubstr(" UDP/DTLS/SCTP webrtc-datachannel"))); diff --git a/pc/peer_connection_encodings_integrationtest.cc b/pc/peer_connection_encodings_integrationtest.cc index 5b25e293cd..2834f4c598 100644 --- a/pc/peer_connection_encodings_integrationtest.cc +++ b/pc/peer_connection_encodings_integrationtest.cc @@ -8,58 +8,93 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include +#include +#include +#include +#include #include +#include +#include #include +#include "absl/algorithm/container.h" #include "absl/strings/match.h" -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" -#include "api/audio_codecs/opus_audio_decoder_factory.h" -#include "api/audio_codecs/opus_audio_encoder_factory.h" +#include "api/audio_options.h" +#include "api/field_trials.h" +#include "api/field_trials_view.h" +#include "api/jsep.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" #include "api/media_types.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" +#include "api/test/rtc_error_matchers.h" #include "api/units/data_rate.h" -#include "api/video_codecs/video_decoder_factory_template.h" -#include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" -#include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" -#include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h" -#include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h" -#include "api/video_codecs/video_encoder_factory_template.h" -#include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h" -#include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h" -#include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" -#include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" +#include "api/units/time_delta.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/sdp_video_format.h" +#include "media/base/codec.h" +#include "media/engine/fake_webrtc_video_engine.h" #include "pc/sdp_utils.h" +#include "pc/session_description.h" #include "pc/simulcast_description.h" #include "pc/test/mock_peer_connection_observers.h" #include "pc/test/peer_connection_test_wrapper.h" #include "pc/test/simulcast_layer_util.h" -#include "rtc_base/gunit.h" +#include "rtc_base/checks.h" +#include "rtc_base/containers/flat_map.h" +#include "rtc_base/logging.h" #include "rtc_base/physical_socket_server.h" +#include "rtc_base/thread.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" +using ::testing::AllOf; +using ::testing::AnyOf; +using ::testing::Contains; +using ::testing::Each; using ::testing::Eq; +using ::testing::Field; +using ::testing::Gt; +using ::testing::HasSubstr; +using ::testing::IsSupersetOf; +using ::testing::IsTrue; +using ::testing::Key; +using ::testing::Le; +using ::testing::Matcher; +using ::testing::Ne; +using ::testing::NotNull; using ::testing::Optional; +using ::testing::Pair; +using ::testing::Pointer; +using ::testing::ResultOf; using ::testing::SizeIs; using ::testing::StrCaseEq; using ::testing::StrEq; +using ::testing::UnorderedElementsAre; namespace webrtc { namespace { -constexpr TimeDelta kDefaultTimeout = TimeDelta::Seconds(5); // Most tests pass in 20-30 seconds, but some tests take longer such as AV1 // requiring additional ramp-up time (https://crbug.com/webrtc/15006) or SVC // (LxTx_KEY) being slower than simulcast to send top spatial layer. -// TODO(https://crbug.com/webrtc/15076): Remove need for long rampup timeouts by -// using simulated time. +// TODO(https://crbug.com/webrtc/15076): Remove need for long ramp-up timeouts +// by using simulated time. constexpr TimeDelta kLongTimeoutForRampingUp = TimeDelta::Minutes(1); // The max bitrate 1500 kbps may be subject to change in the future. What we're @@ -68,41 +103,118 @@ constexpr TimeDelta kLongTimeoutForRampingUp = TimeDelta::Minutes(1); constexpr DataRate kVp9ExpectedMaxBitrateForL1T3 = DataRate::KilobitsPerSec(1500); +auto EncoderImplementationIs(absl::string_view impl) { + return Field("encoder_implementation", + &RTCOutboundRtpStreamStats::encoder_implementation, + Optional(StrEq(impl))); +} + +template +auto ScalabilityModeIs(M matcher) { + return Field("scalability_mode", &RTCOutboundRtpStreamStats::scalability_mode, + matcher); +} + +template +auto CodecIs(M matcher) { + return Field("codec_id", &RTCOutboundRtpStreamStats::codec_id, matcher); +} + +template +auto RidIs(M matcher) { + return Field("rid", &RTCOutboundRtpStreamStats::rid, matcher); +} + +template +auto ResolutionIs(WidthMatcher width_matcher, HeightMatcher height_matcher) { + return AllOf(Field("frame_width", &RTCOutboundRtpStreamStats::frame_width, + width_matcher), + Field("frame_height", &RTCOutboundRtpStreamStats::frame_height, + height_matcher)); +} + +template +auto HeightIs(M matcher) { + return Field("frame_height", &RTCOutboundRtpStreamStats::frame_height, + matcher); +} + +template +auto BytesSentIs(M matcher) { + return Field("bytes_sent", &RTCOutboundRtpStreamStats::bytes_sent, matcher); +} + +template +auto FramesEncodedIs(M matcher) { + return Field("frames_encoded", &RTCOutboundRtpStreamStats::frames_encoded, + matcher); +} + +auto Active() { + return Field("active", &RTCOutboundRtpStreamStats::active, true); +} + +Matcher> OutboundRtpStatsAre( + Matcher> matcher) { + return Pointer(ResultOf( + "outbound_rtp", + [&](const RTCStatsReport* report) { + std::vector stats = + report->GetStatsOfType(); + + // Copy to a new vector. + std::vector stats_copy; + stats_copy.reserve(stats.size()); + for (const auto* stat : stats) { + stats_copy.emplace_back(*stat); + } + return stats_copy; + }, + matcher)); +} + +auto HasOutboundRtpBytesSent(size_t num_layers, size_t num_active_layers) { + return OutboundRtpStatsAre(AllOf( + SizeIs(num_layers), + testing::Contains( + Field("bytes_sent", &RTCOutboundRtpStreamStats::bytes_sent, Gt(0))) + .Times(num_active_layers))); +} + +auto HasOutboundRtpBytesSent(size_t num_layers) { + return HasOutboundRtpBytesSent(num_layers, num_layers); +} + +flat_map GetOutboundRtpStreamStatsByRid( + scoped_refptr report) { + flat_map result; + auto stats = report->GetStatsOfType(); + for (const auto* outbound_rtp : stats) { + result.emplace( + std::make_pair(outbound_rtp->rid.value_or(""), *outbound_rtp)); + } + return result; +} + struct StringParamToString { std::string operator()(const ::testing::TestParamInfo& info) { return info.param; } }; -// RTX, RED and FEC are reliability mechanisms used in combinations with other -// codecs, but are not themselves a specific codec. Typically you don't want to -// filter these out of the list of codec preferences. -bool IsReliabilityMechanism(const webrtc::RtpCodecCapability& codec) { - return absl::EqualsIgnoreCase(codec.name, cricket::kRtxCodecName) || - absl::EqualsIgnoreCase(codec.name, cricket::kRedCodecName) || - absl::EqualsIgnoreCase(codec.name, cricket::kUlpfecCodecName); -} - std::string GetCurrentCodecMimeType( - rtc::scoped_refptr report, - const webrtc::RTCOutboundRtpStreamStats& outbound_rtp) { - return outbound_rtp.codec_id.is_defined() - ? *report->GetAs(*outbound_rtp.codec_id) - ->mime_type + scoped_refptr report, + const RTCOutboundRtpStreamStats& outbound_rtp) { + return outbound_rtp.codec_id.has_value() + ? *report->GetAs(*outbound_rtp.codec_id)->mime_type : ""; } -struct RidAndResolution { - std::string rid; - uint32_t width; - uint32_t height; -}; - -const webrtc::RTCOutboundRtpStreamStats* FindOutboundRtpByRid( - const std::vector& outbound_rtps, +const RTCOutboundRtpStreamStats* FindOutboundRtpByRid( + const std::vector& outbound_rtps, const absl::string_view& rid) { for (const auto* outbound_rtp : outbound_rtps) { - if (outbound_rtp->rid.is_defined() && *outbound_rtp->rid == rid) { + if (outbound_rtp->rid.has_value() && *outbound_rtp->rid == rid) { return outbound_rtp; } } @@ -114,41 +226,41 @@ const webrtc::RTCOutboundRtpStreamStats* FindOutboundRtpByRid( class PeerConnectionEncodingsIntegrationTest : public ::testing::Test { public: PeerConnectionEncodingsIntegrationTest() - : background_thread_(std::make_unique(&pss_)) { + : background_thread_(std::make_unique(&pss_)) { RTC_CHECK(background_thread_->Start()); } - rtc::scoped_refptr CreatePc() { - auto pc_wrapper = rtc::make_ref_counted( + scoped_refptr CreatePc( + std::unique_ptr field_trials = nullptr) { + auto pc_wrapper = make_ref_counted( "pc", &pss_, background_thread_.get(), background_thread_.get()); - pc_wrapper->CreatePc({}, webrtc::CreateBuiltinAudioEncoderFactory(), - webrtc::CreateBuiltinAudioDecoderFactory()); + pc_wrapper->CreatePc({}, CreateBuiltinAudioEncoderFactory(), + CreateBuiltinAudioDecoderFactory(), + std::move(field_trials)); return pc_wrapper; } - rtc::scoped_refptr AddTransceiverWithSimulcastLayers( - rtc::scoped_refptr local, - rtc::scoped_refptr remote, - std::vector init_layers) { - rtc::scoped_refptr stream = - local->GetUserMedia( - /*audio=*/false, cricket::AudioOptions(), /*video=*/true, - {.width = 1280, .height = 720}); - rtc::scoped_refptr track = stream->GetVideoTracks()[0]; - - RTCErrorOr> - transceiver_or_error = local->pc()->AddTransceiver( - track, CreateTransceiverInit(init_layers)); + scoped_refptr AddTransceiverWithSimulcastLayers( + scoped_refptr local, + scoped_refptr remote, + std::vector init_layers) { + scoped_refptr stream = local->GetUserMedia( + /*audio=*/false, AudioOptions(), /*video=*/true, + {.width = 1280, .height = 720}); + scoped_refptr track = stream->GetVideoTracks()[0]; + + RTCErrorOr> transceiver_or_error = + local->pc()->AddTransceiver(track, CreateTransceiverInit(init_layers)); EXPECT_TRUE(transceiver_or_error.ok()); return transceiver_or_error.value(); } - bool HasSenderVideoCodecCapability( - rtc::scoped_refptr pc_wrapper, + bool HasReceiverVideoCodecCapability( + scoped_refptr pc_wrapper, absl::string_view codec_name) { std::vector codecs = pc_wrapper->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO) + ->GetRtpReceiverCapabilities(webrtc::MediaType::VIDEO) .codecs; return std::find_if(codecs.begin(), codecs.end(), [&codec_name](const RtpCodecCapability& codec) { @@ -157,15 +269,15 @@ class PeerConnectionEncodingsIntegrationTest : public ::testing::Test { } std::vector GetCapabilitiesAndRestrictToCodec( - rtc::scoped_refptr pc_wrapper, + scoped_refptr pc_wrapper, absl::string_view codec_name) { std::vector codecs = pc_wrapper->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO) + ->GetRtpReceiverCapabilities(webrtc::MediaType::VIDEO) .codecs; codecs.erase(std::remove_if(codecs.begin(), codecs.end(), [&codec_name](const RtpCodecCapability& codec) { - return !IsReliabilityMechanism(codec) && + return !codec.IsResiliencyCodec() && !absl::EqualsIgnoreCase(codec.name, codec_name); }), @@ -179,42 +291,59 @@ class PeerConnectionEncodingsIntegrationTest : public ::testing::Test { } void ExchangeIceCandidates( - rtc::scoped_refptr local_pc_wrapper, - rtc::scoped_refptr remote_pc_wrapper) { + scoped_refptr local_pc_wrapper, + scoped_refptr remote_pc_wrapper) { local_pc_wrapper->SignalOnIceCandidateReady.connect( remote_pc_wrapper.get(), &PeerConnectionTestWrapper::AddIceCandidate); remote_pc_wrapper->SignalOnIceCandidateReady.connect( local_pc_wrapper.get(), &PeerConnectionTestWrapper::AddIceCandidate); } + // Negotiate without any tweaks (does not work for simulcast loopback). + void Negotiate(scoped_refptr local_pc_wrapper, + scoped_refptr remote_pc_wrapper) { + std::unique_ptr offer = + CreateOffer(local_pc_wrapper); + scoped_refptr p1 = + SetLocalDescription(local_pc_wrapper, offer.get()); + scoped_refptr p2 = + SetRemoteDescription(remote_pc_wrapper, offer.get()); + EXPECT_TRUE(Await({p1, p2})); + std::unique_ptr answer = + CreateAnswer(remote_pc_wrapper); + p1 = SetLocalDescription(remote_pc_wrapper, answer.get()); + p2 = SetRemoteDescription(local_pc_wrapper, answer.get()); + EXPECT_TRUE(Await({p1, p2})); + } + void NegotiateWithSimulcastTweaks( - rtc::scoped_refptr local_pc_wrapper, - rtc::scoped_refptr remote_pc_wrapper) { + scoped_refptr local_pc_wrapper, + scoped_refptr remote_pc_wrapper) { // Create and set offer for `local_pc_wrapper`. std::unique_ptr offer = CreateOffer(local_pc_wrapper); - rtc::scoped_refptr p1 = + scoped_refptr p1 = SetLocalDescription(local_pc_wrapper, offer.get()); // Modify the offer before handoff because `remote_pc_wrapper` only supports // receiving singlecast. - cricket::SimulcastDescription simulcast_description = - RemoveSimulcast(offer.get()); - rtc::scoped_refptr p2 = + SimulcastDescription simulcast_description = RemoveSimulcast(offer.get()); + scoped_refptr p2 = SetRemoteDescription(remote_pc_wrapper, offer.get()); EXPECT_TRUE(Await({p1, p2})); // Create and set answer for `remote_pc_wrapper`. std::unique_ptr answer = CreateAnswer(remote_pc_wrapper); + EXPECT_TRUE(answer); p1 = SetLocalDescription(remote_pc_wrapper, answer.get()); // Modify the answer before handoff because `local_pc_wrapper` should still // send simulcast. - cricket::MediaContentDescription* mcd_answer = + MediaContentDescription* mcd_answer = answer->description()->contents()[0].media_description(); mcd_answer->mutable_streams().clear(); - std::vector simulcast_layers = + std::vector simulcast_layers = simulcast_description.send_layers().GetAllLayers(); - cricket::SimulcastLayerList& receive_layers = + SimulcastLayerList& receive_layers = mcd_answer->simulcast_description().receive_layers(); for (const auto& layer : simulcast_layers) { receive_layers.AddLayer(layer); @@ -223,159 +352,55 @@ class PeerConnectionEncodingsIntegrationTest : public ::testing::Test { EXPECT_TRUE(Await({p1, p2})); } - rtc::scoped_refptr GetStats( - rtc::scoped_refptr pc_wrapper) { - auto callback = rtc::make_ref_counted(); + scoped_refptr GetStats( + scoped_refptr pc_wrapper) { + auto callback = make_ref_counted(); pc_wrapper->pc()->GetStats(callback.get()); - EXPECT_TRUE_WAIT(callback->called(), kDefaultTimeout.ms()); + RTC_CHECK(WaitUntil([&]() { return callback->called(); }, testing::IsTrue()) + .ok()); return callback->report(); } - bool IsCodecIdDifferent( - rtc::scoped_refptr pc_wrapper, - size_t index, - const std::string& codec_id) { - return IsCodecIdDifferentWithScalabilityMode(pc_wrapper, index, codec_id, - absl::nullopt); - } - - bool IsCodecIdDifferentWithScalabilityMode( - rtc::scoped_refptr pc_wrapper, - size_t index, - const std::string& codec_id, - absl::optional wanted_scalability_mode) { - rtc::scoped_refptr report = GetStats(pc_wrapper); - std::vector outbound_rtps = - report->GetStatsOfType(); - return outbound_rtps[index]->codec_id.value() != codec_id && - (!wanted_scalability_mode || - (outbound_rtps[index]->scalability_mode.has_value() && - outbound_rtps[index]->scalability_mode.value() == - wanted_scalability_mode)); - } - - bool HasOutboundRtpBytesSent( - rtc::scoped_refptr pc_wrapper, - size_t num_layers) { - return HasOutboundRtpBytesSent(pc_wrapper, num_layers, num_layers); - } - - bool HasOutboundRtpBytesSent( - rtc::scoped_refptr pc_wrapper, - size_t num_layers, - size_t num_active_layers) { - rtc::scoped_refptr report = GetStats(pc_wrapper); - std::vector outbound_rtps = - report->GetStatsOfType(); - if (outbound_rtps.size() != num_layers) { - return false; - } - size_t num_sending_layers = 0; - for (const auto* outbound_rtp : outbound_rtps) { - if (outbound_rtp->bytes_sent.is_defined() && - *outbound_rtp->bytes_sent > 0u) { - ++num_sending_layers; - } - } - return num_sending_layers == num_active_layers; - } - - bool HasOutboundRtpWithRidAndScalabilityMode( - rtc::scoped_refptr pc_wrapper, - absl::string_view rid, - absl::string_view expected_scalability_mode, - uint32_t frame_height) { - rtc::scoped_refptr report = GetStats(pc_wrapper); - std::vector outbound_rtps = - report->GetStatsOfType(); - auto* outbound_rtp = FindOutboundRtpByRid(outbound_rtps, rid); - if (!outbound_rtp || !outbound_rtp->scalability_mode.is_defined() || - *outbound_rtp->scalability_mode != expected_scalability_mode) { - return false; - } - if (outbound_rtp->frame_height.is_defined()) { - RTC_LOG(LS_INFO) << "Waiting for target resolution (" << frame_height - << "p). Currently at " << *outbound_rtp->frame_height - << "p..."; - } else { - RTC_LOG(LS_INFO) - << "Waiting for target resolution. No frames encoded yet..."; - } - if (!outbound_rtp->frame_height.is_defined() || - *outbound_rtp->frame_height != frame_height) { - // Sleep to avoid log spam when this is used in ASSERT_TRUE_WAIT(). - rtc::Thread::Current()->SleepMs(1000); - return false; - } - return true; - } - - bool OutboundRtpResolutionsAreLessThanOrEqualToExpectations( - rtc::scoped_refptr pc_wrapper, - std::vector resolutions) { - rtc::scoped_refptr report = GetStats(pc_wrapper); - std::vector outbound_rtps = - report->GetStatsOfType(); - for (const RidAndResolution& resolution : resolutions) { - const RTCOutboundRtpStreamStats* outbound_rtp = nullptr; - if (!resolution.rid.empty()) { - outbound_rtp = FindOutboundRtpByRid(outbound_rtps, resolution.rid); - } else if (outbound_rtps.size() == 1u) { - outbound_rtp = outbound_rtps[0]; - } - if (!outbound_rtp || !outbound_rtp->frame_width.is_defined() || - !outbound_rtp->frame_height.is_defined()) { - // RTP not found by rid or has not encoded a frame yet. - RTC_LOG(LS_ERROR) << "rid=" << resolution.rid << " does not have " - << "resolution metrics"; - return false; - } - if (*outbound_rtp->frame_width > resolution.width || - *outbound_rtp->frame_height > resolution.height) { - RTC_LOG(LS_ERROR) << "rid=" << resolution.rid << " is " - << *outbound_rtp->frame_width << "x" - << *outbound_rtp->frame_height - << ", this is greater than the " - << "expected " << resolution.width << "x" - << resolution.height; - return false; - } - } - return true; + [[nodiscard]] RTCErrorOr> GetStatsUntil( + scoped_refptr pc_wrapper, + Matcher> matcher, + WaitUntilSettings settings = {}) { + return WaitUntil([&]() { return GetStats(pc_wrapper); }, std::move(matcher), + settings); } protected: std::unique_ptr CreateOffer( - rtc::scoped_refptr pc_wrapper) { - auto observer = - rtc::make_ref_counted(); + scoped_refptr pc_wrapper) { + auto observer = make_ref_counted(); pc_wrapper->pc()->CreateOffer(observer.get(), {}); - EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout.ms()); + EXPECT_THAT(WaitUntil([&] { return observer->called(); }, IsTrue()), + IsRtcOk()); return observer->MoveDescription(); } std::unique_ptr CreateAnswer( - rtc::scoped_refptr pc_wrapper) { - auto observer = - rtc::make_ref_counted(); + scoped_refptr pc_wrapper) { + auto observer = make_ref_counted(); pc_wrapper->pc()->CreateAnswer(observer.get(), {}); - EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout.ms()); + EXPECT_THAT(WaitUntil([&] { return observer->called(); }, IsTrue()), + IsRtcOk()); return observer->MoveDescription(); } - rtc::scoped_refptr SetLocalDescription( - rtc::scoped_refptr pc_wrapper, + scoped_refptr SetLocalDescription( + scoped_refptr pc_wrapper, SessionDescriptionInterface* sdp) { - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); pc_wrapper->pc()->SetLocalDescription( observer.get(), CloneSessionDescription(sdp).release()); return observer; } - rtc::scoped_refptr SetRemoteDescription( - rtc::scoped_refptr pc_wrapper, + scoped_refptr SetRemoteDescription( + scoped_refptr pc_wrapper, SessionDescriptionInterface* sdp) { - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); pc_wrapper->pc()->SetRemoteDescription( observer.get(), CloneSessionDescription(sdp).release()); return observer; @@ -385,34 +410,34 @@ class PeerConnectionEncodingsIntegrationTest : public ::testing::Test { // the offer it is important to SetLocalDescription() and // SetRemoteDescription() are kicked off without awaiting in-between. This // helper is used to await multiple observers. - bool Await(std::vector> - observers) { + bool Await( + std::vector> observers) { for (auto& observer : observers) { - EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout.ms()); - if (!observer->result()) { + auto result = WaitUntil([&] { return observer->called(); }, IsTrue()); + + if (!result.ok() || !observer->result()) { return false; } } return true; } - rtc::PhysicalSocketServer pss_; - std::unique_ptr background_thread_; + PhysicalSocketServer pss_; + std::unique_ptr background_thread_; }; TEST_F(PeerConnectionEncodingsIntegrationTest, VP8_SingleEncodingDefaultsToL1T1) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - std::vector layers = - CreateLayers({"f"}, /*active=*/true); - rtc::scoped_refptr transceiver = + std::vector layers = CreateLayers({"f"}, /*active=*/true); + scoped_refptr transceiver = AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, layers); std::vector codecs = - GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP8"); + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, "VP8"); transceiver->SetCodecPreferences(codecs); NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); @@ -420,15 +445,18 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, remote_pc_wrapper->WaitForConnection(); // Wait until media is flowing. - ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 1u), - kDefaultTimeout.ms()); - EXPECT_TRUE(OutboundRtpResolutionsAreLessThanOrEqualToExpectations( - local_pc_wrapper, {{"", 1280, 720}})); + auto stats_result = + GetStatsUntil(local_pc_wrapper, HasOutboundRtpBytesSent(1)); + ASSERT_THAT(stats_result, IsRtcOk()); + EXPECT_THAT(GetOutboundRtpStreamStatsByRid(stats_result.value()), + ElementsAre(Pair("", ResolutionIs(1280, 720)))); + // Verify codec and scalability mode. - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + scoped_refptr report = stats_result.value(); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_THAT(outbound_rtps, SizeIs(1u)); + EXPECT_THAT(outbound_rtps, Contains(ResolutionIs(Le(1280), Le(720)))); EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[0]), StrCaseEq("video/VP8")); EXPECT_THAT(*outbound_rtps[0]->scalability_mode, StrEq("L1T1")); @@ -436,22 +464,25 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, TEST_F(PeerConnectionEncodingsIntegrationTest, VP8_RejectsSvcAndDefaultsToL1T1) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - std::vector layers = - CreateLayers({"f"}, /*active=*/true); - rtc::scoped_refptr transceiver = + std::vector layers = CreateLayers({"f"}, /*active=*/true); + scoped_refptr transceiver = AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, layers); - // Restricting codecs restricts what SetParameters() will accept or reject. + // Restricting the local receive codecs will restrict what we offer and + // hence the answer if it is a subset of our offer. std::vector codecs = GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP8"); transceiver->SetCodecPreferences(codecs); + + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + // Attempt SVC (L3T3_KEY). This is not possible because only VP8 is up for // negotiation and VP8 does not support it. - rtc::scoped_refptr sender = transceiver->sender(); + scoped_refptr sender = transceiver->sender(); RtpParameters parameters = sender->GetParameters(); ASSERT_EQ(parameters.encodings.size(), 1u); parameters.encodings[0].scalability_mode = "L3T3_KEY"; @@ -460,17 +491,16 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, // `scalability_mode` remains unset because SetParameters() failed. parameters = sender->GetParameters(); ASSERT_EQ(parameters.encodings.size(), 1u); - EXPECT_THAT(parameters.encodings[0].scalability_mode, Eq(absl::nullopt)); + EXPECT_THAT(parameters.encodings[0].scalability_mode, Eq(std::nullopt)); - NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); local_pc_wrapper->WaitForConnection(); remote_pc_wrapper->WaitForConnection(); // Wait until media is flowing. - ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 1u), - kDefaultTimeout.ms()); + ASSERT_THAT(GetStatsUntil(local_pc_wrapper, HasOutboundRtpBytesSent(1)), + IsRtcOk()); // When `scalability_mode` is not set, VP8 defaults to L1T1. - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + scoped_refptr report = GetStats(local_pc_wrapper); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_THAT(outbound_rtps, SizeIs(1u)); @@ -480,30 +510,83 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, // GetParameters() confirms `scalability_mode` is still not set. parameters = sender->GetParameters(); ASSERT_EQ(parameters.encodings.size(), 1u); - EXPECT_THAT(parameters.encodings[0].scalability_mode, Eq(absl::nullopt)); + EXPECT_THAT(parameters.encodings[0].scalability_mode, Eq(std::nullopt)); +} + +TEST_F(PeerConnectionEncodingsIntegrationTest, + SetParametersWithScalabilityModeNotSupportedBySubsequentNegotiation) { + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = CreateLayers({"f"}, /*active=*/true); + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + // Restricting the local receive codecs will restrict what we offer and + // hence the answer if it is a subset of our offer. + std::vector codecs = + GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP8"); + transceiver->SetCodecPreferences(codecs); + + // Attempt SVC (L3T3_KEY). This is still possible because VP9 might be + // available from the remote end. + scoped_refptr sender = transceiver->sender(); + RtpParameters parameters = sender->GetParameters(); + ASSERT_EQ(parameters.encodings.size(), 1u); + parameters.encodings[0].scalability_mode = "L3T3_KEY"; + parameters.encodings[0].scale_resolution_down_by = 1; + EXPECT_TRUE(sender->SetParameters(parameters).ok()); + + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + + // `scalability_mode` is set to the VP8 default since that is what was + // negotiated. + parameters = sender->GetParameters(); + ASSERT_EQ(parameters.encodings.size(), 1u); + EXPECT_THAT(parameters.encodings[0].scalability_mode, Eq("L1T2")); + + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // Wait until media is flowing. + auto error_or_stats = + GetStatsUntil(local_pc_wrapper, HasOutboundRtpBytesSent(1)); + ASSERT_THAT(error_or_stats, IsRtcOk()); + // When `scalability_mode` is not set, VP8 defaults to L1T1. + scoped_refptr report = error_or_stats.value(); + std::vector outbound_rtps = + report->GetStatsOfType(); + ASSERT_THAT(outbound_rtps, SizeIs(1u)); + EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[0]), + StrCaseEq("video/VP8")); + EXPECT_THAT(*outbound_rtps[0]->scalability_mode, StrEq("L1T2")); + // GetParameters() confirms `scalability_mode` is still not set. + parameters = sender->GetParameters(); + ASSERT_EQ(parameters.encodings.size(), 1u); + EXPECT_THAT(parameters.encodings[0].scalability_mode, Eq("L1T2")); } TEST_F(PeerConnectionEncodingsIntegrationTest, VP8_FallbackFromSvcResultsInL1T2) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - std::vector layers = - CreateLayers({"f"}, /*active=*/true); - rtc::scoped_refptr transceiver = + std::vector layers = CreateLayers({"f"}, /*active=*/true); + scoped_refptr transceiver = AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, layers); // Verify test assumption that VP8 is first in the list, but don't modify the // codec preferences because we want the sender to think SVC is a possibility. std::vector codecs = local_pc_wrapper->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO) + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) .codecs; EXPECT_THAT(codecs[0].name, StrCaseEq("VP8")); // Attempt SVC (L3T3_KEY), which is not possible with VP8, but the sender does // not yet know which codec we'll use so the parameters will be accepted. - rtc::scoped_refptr sender = transceiver->sender(); + scoped_refptr sender = transceiver->sender(); RtpParameters parameters = sender->GetParameters(); ASSERT_EQ(parameters.encodings.size(), 1u); parameters.encodings[0].scalability_mode = "L3T3_KEY"; @@ -520,7 +603,7 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, local_pc_wrapper->WaitForConnection(); remote_pc_wrapper->WaitForConnection(); // `scalaiblity_mode` is assigned the fallback value "L1T2" which is different - // than the default of absl::nullopt. + // than the default of std::nullopt. parameters = sender->GetParameters(); ASSERT_EQ(parameters.encodings.size(), 1u); EXPECT_THAT(parameters.encodings[0].scalability_mode, @@ -528,17 +611,24 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, // Wait until media is flowing, no significant time needed because we only // have one layer. - ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 1u), - kDefaultTimeout.ms()); + ASSERT_THAT(GetStatsUntil(local_pc_wrapper, HasOutboundRtpBytesSent(1u)), + IsRtcOk()); // GetStats() confirms "L1T2" is used which is different than the "L1T1" // default or the "L3T3_KEY" that was attempted. - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + scoped_refptr report = GetStats(local_pc_wrapper); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_THAT(outbound_rtps, SizeIs(1u)); EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[0]), StrCaseEq("video/VP8")); EXPECT_THAT(*outbound_rtps[0]->scalability_mode, StrEq("L1T2")); + + // Now that we know VP8 is used, try setting L3T3 which should fail. + parameters = sender->GetParameters(); + ASSERT_EQ(parameters.encodings.size(), 1u); + parameters.encodings[0].scalability_mode = "L3T3_KEY"; + parameters.encodings[0].scale_resolution_down_by = 1; + EXPECT_FALSE(sender->SetParameters(parameters).ok()); } // The legacy SVC path is triggered when VP9 us used, but `scalability_mode` has @@ -548,17 +638,17 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, // (i.e. VP9 is not treated differently than VP8). TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_LegacySvcWhenScalabilityModeNotSpecified) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - std::vector layers = + std::vector layers = CreateLayers({"f", "h", "q"}, /*active=*/true); - rtc::scoped_refptr transceiver = + scoped_refptr transceiver = AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, layers); std::vector codecs = - GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9"); + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, "VP9"); transceiver->SetCodecPreferences(codecs); NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); @@ -567,18 +657,22 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, // Wait until media is flowing. We only expect a single RTP stream. // We expect to see bytes flowing almost immediately on the lowest layer. - ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 1u), - kDefaultTimeout.ms()); + ASSERT_THAT(GetStatsUntil(local_pc_wrapper, HasOutboundRtpBytesSent(1u)), + IsRtcOk()); // Wait until scalability mode is reported and expected resolution reached. // Ramp up time may be significant. - ASSERT_TRUE_WAIT(HasOutboundRtpWithRidAndScalabilityMode( - local_pc_wrapper, "f", "L3T3_KEY", 720), - kLongTimeoutForRampingUp.ms()); + ASSERT_THAT( + GetStatsUntil( + local_pc_wrapper, + OutboundRtpStatsAre(Contains( + AllOf(RidIs("f"), ScalabilityModeIs("L3T3_KEY"), HeightIs(720)))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); // Despite SVC being used on a single RTP stream, GetParameters() returns the // three encodings that we configured earlier (this is not spec-compliant but // it is how legacy SVC behaves). - rtc::scoped_refptr sender = transceiver->sender(); + scoped_refptr sender = transceiver->sender(); std::vector encodings = sender->GetParameters().encodings; ASSERT_EQ(encodings.size(), 3u); @@ -593,20 +687,19 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, // encoding in GetParameters(). TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_StandardSvcWithOnlyOneEncoding) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - std::vector layers = - CreateLayers({"f"}, /*active=*/true); - rtc::scoped_refptr transceiver = + std::vector layers = CreateLayers({"f"}, /*active=*/true); + scoped_refptr transceiver = AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, layers); std::vector codecs = - GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9"); + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, "VP9"); transceiver->SetCodecPreferences(codecs); // Configure SVC, a.k.a. "L3T3_KEY". - rtc::scoped_refptr sender = transceiver->sender(); + scoped_refptr sender = transceiver->sender(); RtpParameters parameters = sender->GetParameters(); ASSERT_EQ(parameters.encodings.size(), 1u); parameters.encodings[0].scalability_mode = "L3T3_KEY"; @@ -619,15 +712,20 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, // Wait until media is flowing. We only expect a single RTP stream. // We expect to see bytes flowing almost immediately on the lowest layer. - ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 1u), - kDefaultTimeout.ms()); - EXPECT_TRUE(OutboundRtpResolutionsAreLessThanOrEqualToExpectations( - local_pc_wrapper, {{"", 1280, 720}})); + + auto error_or_stats = + GetStatsUntil(local_pc_wrapper, + AllOf(HasOutboundRtpBytesSent(1u), + OutboundRtpStatsAre(Contains(HeightIs(720)))), + {.timeout = kLongTimeoutForRampingUp}); + ASSERT_THAT(error_or_stats, IsRtcOk()); // Verify codec and scalability mode. - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + scoped_refptr report = error_or_stats.value(); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_THAT(outbound_rtps, SizeIs(1u)); + EXPECT_THAT(outbound_rtps[0], ResolutionIs(1280, 720)); + EXPECT_THAT(outbound_rtps[0], RidIs(std::nullopt)); EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[0]), StrCaseEq("video/VP9")); EXPECT_THAT(*outbound_rtps[0]->scalability_mode, StrEq("L3T3_KEY")); @@ -646,20 +744,20 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, // observable in GetStats(). TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_StandardSvcWithSingleActiveEncoding) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - std::vector layers = + std::vector layers = CreateLayers({"f", "h", "q"}, /*active=*/true); - rtc::scoped_refptr transceiver = + scoped_refptr transceiver = AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, layers); std::vector codecs = - GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9"); + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, "VP9"); transceiver->SetCodecPreferences(codecs); // Configure SVC, a.k.a. "L3T3_KEY". - rtc::scoped_refptr sender = transceiver->sender(); + scoped_refptr sender = transceiver->sender(); RtpParameters parameters = sender->GetParameters(); ASSERT_THAT(parameters.encodings, SizeIs(3)); parameters.encodings[0].scalability_mode = "L3T3_KEY"; @@ -674,39 +772,41 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, // Since the standard API is configuring simulcast we get three outbound-rtps, // but only one is active. - ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 3u, 1u), - kDefaultTimeout.ms()); // Wait until scalability mode is reported and expected resolution reached. // Ramp up time is significant. - ASSERT_TRUE_WAIT(HasOutboundRtpWithRidAndScalabilityMode( - local_pc_wrapper, "f", "L3T3_KEY", 720), - kLongTimeoutForRampingUp.ms()); + ASSERT_THAT(GetStatsUntil(local_pc_wrapper, + AllOf(HasOutboundRtpBytesSent(3, 1), + OutboundRtpStatsAre(Contains(AllOf( + RidIs("f"), ScalabilityModeIs("L3T3_KEY"), + HeightIs(720))))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); // GetParameters() is consistent with what we asked for and got. parameters = sender->GetParameters(); ASSERT_THAT(parameters.encodings, SizeIs(3)); EXPECT_THAT(parameters.encodings[0].scalability_mode, - Optional(std::string("L3T3_KEY"))); + Optional(StrEq("L3T3_KEY"))); EXPECT_FALSE(parameters.encodings[1].scalability_mode.has_value()); EXPECT_FALSE(parameters.encodings[2].scalability_mode.has_value()); } // Exercise common path where `scalability_mode` is not specified until after -// negotiation, requring us to recreate the stream when the number of streams +// negotiation, requiring us to recreate the stream when the number of streams // changes from 1 (legacy SVC) to 3 (standard simulcast). TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_SwitchFromLegacySvcToStandardSingleActiveEncodingSvc) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - std::vector layers = + std::vector layers = CreateLayers({"f", "h", "q"}, /*active=*/true); - rtc::scoped_refptr transceiver = + scoped_refptr transceiver = AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, layers); std::vector codecs = - GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9"); + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, "VP9"); transceiver->SetCodecPreferences(codecs); // The original negotiation triggers legacy SVC because we didn't specify @@ -718,27 +818,29 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, // Switch to the standard mode. Despite only having a single active stream in // both cases, this internally reconfigures from 1 stream to 3 streams. // Test coverage for https://crbug.com/webrtc/15016. - rtc::scoped_refptr sender = transceiver->sender(); + scoped_refptr sender = transceiver->sender(); RtpParameters parameters = sender->GetParameters(); ASSERT_THAT(parameters.encodings, SizeIs(3)); parameters.encodings[0].active = true; parameters.encodings[0].scalability_mode = "L2T2_KEY"; parameters.encodings[0].scale_resolution_down_by = 2.0; parameters.encodings[1].active = false; - parameters.encodings[1].scalability_mode = absl::nullopt; + parameters.encodings[1].scalability_mode = std::nullopt; parameters.encodings[2].active = false; - parameters.encodings[2].scalability_mode = absl::nullopt; + parameters.encodings[2].scalability_mode = std::nullopt; sender->SetParameters(parameters); // Since the standard API is configuring simulcast we get three outbound-rtps, // but only one is active. - ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 3u, 1u), - kDefaultTimeout.ms()); // Wait until scalability mode is reported and expected resolution reached. // Ramp up time may be significant. - ASSERT_TRUE_WAIT(HasOutboundRtpWithRidAndScalabilityMode( - local_pc_wrapper, "f", "L2T2_KEY", 720 / 2), - kLongTimeoutForRampingUp.ms()); + ASSERT_THAT(GetStatsUntil(local_pc_wrapper, + AllOf(HasOutboundRtpBytesSent(3, 1), + OutboundRtpStatsAre(Contains(AllOf( + RidIs("f"), ScalabilityModeIs("L2T2_KEY"), + HeightIs(720 / 2))))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); // GetParameters() does not report any fallback. parameters = sender->GetParameters(); @@ -749,23 +851,299 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, EXPECT_FALSE(parameters.encodings[2].scalability_mode.has_value()); } +TEST_F(PeerConnectionEncodingsIntegrationTest, + VP9_SimulcastDeactiveActiveLayer_StandardSvc) { + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = + CreateLayers({"q", "h", "f"}, /*active=*/true); + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + constexpr absl::string_view kCodec = "VP9"; + std::vector codecs = + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, kCodec); + transceiver->SetCodecPreferences(codecs); + + // Switch to the standard mode. Despite only having a single active stream in + // both cases, this internally reconfigures from 1 stream to 3 streams. + // Test coverage for https://crbug.com/webrtc/15016. + scoped_refptr sender = transceiver->sender(); + RtpParameters parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(3)); + parameters.encodings[0].active = true; + parameters.encodings[0].scalability_mode = "L1T3"; + parameters.encodings[0].scale_resolution_down_by = 4.0; + parameters.encodings[1].active = true; + parameters.encodings[1].scalability_mode = "L1T1"; + parameters.encodings[1].scale_resolution_down_by = 2.0; + parameters.encodings[2].active = true; + parameters.encodings[2].scalability_mode = "L1T1"; + parameters.encodings[2].scale_resolution_down_by = 1.0; + EXPECT_TRUE(sender->SetParameters(parameters).ok()); + + // The original negotiation triggers legacy SVC because we didn't specify + // any scalability mode. + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // Since the standard API is configuring simulcast we get three outbound-rtps, + // and two are active. + ASSERT_THAT( + WaitUntil( + [&] { + std::vector outbound_rtps = + GetStats(local_pc_wrapper) + ->GetStatsOfType(); + std::vector bytes_sent; + bytes_sent.reserve(outbound_rtps.size()); + for (const auto* outbound_rtp : outbound_rtps) { + bytes_sent.push_back(outbound_rtp->bytes_sent.value_or(0)); + } + return bytes_sent; + }, + AllOf(SizeIs(3), Each(Gt(0))), {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); + scoped_refptr report = GetStats(local_pc_wrapper); + ASSERT_TRUE(report); + std::vector outbound_rtps = + report->GetStatsOfType(); + EXPECT_THAT(outbound_rtps, + Each(EncoderImplementationIs( + "SimulcastEncoderAdapter (libvpx, libvpx, libvpx)"))); + + // GetParameters() does not report any fallback. + parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(3)); + EXPECT_THAT(parameters.encodings[0].scalability_mode, + Optional(StrEq("L1T3"))); + EXPECT_THAT(parameters.encodings[1].scalability_mode, + Optional(StrEq("L1T1"))); + EXPECT_THAT(parameters.encodings[2].scalability_mode, + Optional(StrEq("L1T1"))); + EXPECT_THAT(parameters.encodings[2].scale_resolution_down_by, Eq(1.0)); + EXPECT_THAT(parameters.encodings[1].scale_resolution_down_by, Eq(2.0)); + EXPECT_THAT(parameters.encodings[0].scale_resolution_down_by, Eq(4.0)); + + // Deactivate the active layer. + parameters.encodings[2].active = false; + EXPECT_TRUE(sender->SetParameters(parameters).ok()); + ASSERT_THAT(WaitUntil( + [&]() { + return GetStats(local_pc_wrapper) + ->GetStatsOfType(); + }, + AllOf(Each(EncoderImplementationIs( + "SimulcastEncoderAdapter (libvpx, libvpx)")), + UnorderedElementsAre(ScalabilityModeIs("L1T3"), + ScalabilityModeIs("L1T1"), + ScalabilityModeIs(std::nullopt)))), + IsRtcOk()); +} + +TEST_F(PeerConnectionEncodingsIntegrationTest, + VP9_SimulcastMultiplLayersActive_StandardSvc) { + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = + CreateLayers({"q", "h", "f"}, /*active=*/true); + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + std::vector codecs = + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, "VP9"); + transceiver->SetCodecPreferences(codecs); + + // Switch to the standard mode. Despite only having a single active stream in + // both cases, this internally reconfigures from 1 stream to 3 streams. + // Test coverage for https://crbug.com/webrtc/15016. + scoped_refptr sender = transceiver->sender(); + RtpParameters parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(3)); + parameters.encodings[0].active = true; + parameters.encodings[0].scalability_mode = "L1T3"; + parameters.encodings[0].scale_resolution_down_by = 4.0; + parameters.encodings[1].active = true; + parameters.encodings[1].scalability_mode = "L1T1"; + parameters.encodings[1].scale_resolution_down_by = 2.0; + parameters.encodings[2].active = false; + parameters.encodings[2].scalability_mode = std::nullopt; + EXPECT_TRUE(sender->SetParameters(parameters).ok()); + + // The original negotiation triggers legacy SVC because we didn't specify + // any scalability mode. + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // Since the standard API is configuring simulcast we get three outbound-rtps, + // and two are active. + // Wait until scalability mode is reported and expected resolution reached. + // Ramp up time may be significant. + auto error_or_stats = GetStatsUntil( + local_pc_wrapper, + OutboundRtpStatsAre( + IsSupersetOf({AllOf(RidIs("q"), ScalabilityModeIs("L1T3"), + HeightIs(720 / 4), BytesSentIs(Gt(0))), + AllOf(RidIs("h"), ScalabilityModeIs("L1T1"), + HeightIs(720 / 2), BytesSentIs(Gt(0)))})), + {.timeout = kLongTimeoutForRampingUp}); + ASSERT_THAT(error_or_stats, IsRtcOk()); + scoped_refptr report = error_or_stats.value(); + std::vector outbound_rtps = + report->GetStatsOfType(); + EXPECT_THAT(outbound_rtps, Each(EncoderImplementationIs( + "SimulcastEncoderAdapter (libvpx, libvpx)"))); + + // GetParameters() does not report any fallback. + parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(3)); + EXPECT_THAT(parameters.encodings[0].scalability_mode, + Optional(StrEq("L1T3"))); + EXPECT_THAT(parameters.encodings[1].scalability_mode, + Optional(StrEq("L1T1"))); + EXPECT_THAT(parameters.encodings[2].scalability_mode, Eq(std::nullopt)); +} + +TEST_F(PeerConnectionEncodingsIntegrationTest, + VP9_Simulcast_SwitchToLegacySvc) { + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = + CreateLayers({"f", "h", "q"}, /*active=*/true); + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + std::vector codecs = + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, "VP9"); + transceiver->SetCodecPreferences(codecs); + + // Switch to the standard mode. Despite only having a single active stream in + // both cases, this internally reconfigures from 1 stream to 3 streams. + // Test coverage for https://crbug.com/webrtc/15016. + scoped_refptr sender = transceiver->sender(); + RtpParameters parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(3)); + parameters.encodings[0].active = false; + parameters.encodings[1].active = true; + parameters.encodings[1].scalability_mode = "L1T1"; + parameters.encodings[1].scale_resolution_down_by = 2.0; + parameters.encodings[2].active = true; + parameters.encodings[2].scalability_mode = "L1T3"; + parameters.encodings[2].scale_resolution_down_by = 4.0; + EXPECT_TRUE(sender->SetParameters(parameters).ok()); + + // The original negotiation triggers legacy SVC because we didn't specify + // any scalability mode. + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // Since the standard API is configuring simulcast we get three outbound-rtps, + // and two are active. + // Wait until scalability mode is reported and expected resolution reached. + // Ramp up time may be significant. + auto error_or_stats = GetStatsUntil( + local_pc_wrapper, + OutboundRtpStatsAre(UnorderedElementsAre( + AllOf(RidIs("q"), ScalabilityModeIs("L1T3"), HeightIs(720 / 4)), + AllOf(RidIs("h"), ScalabilityModeIs("L1T1"), HeightIs(720 / 2)), + AllOf(RidIs("f"), BytesSentIs(AnyOf(0, std::nullopt))))), + {.timeout = kLongTimeoutForRampingUp}); + ASSERT_THAT(error_or_stats, IsRtcOk()); + + // GetParameters() does not report any fallback. + parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(3)); + EXPECT_THAT(parameters.encodings[0].scalability_mode, Eq(std::nullopt)); + EXPECT_THAT(parameters.encodings[1].scalability_mode, + Optional(StrEq("L1T1"))); + EXPECT_THAT(parameters.encodings[2].scalability_mode, + Optional(StrEq("L1T3"))); + + // Switch to legacy SVC mode. + parameters.encodings[0].active = true; + parameters.encodings[0].scalability_mode = std::nullopt; + parameters.encodings[0].scale_resolution_down_by = std::nullopt; + parameters.encodings[1].active = true; + parameters.encodings[1].scalability_mode = std::nullopt; + parameters.encodings[1].scale_resolution_down_by = std::nullopt; + parameters.encodings[2].active = false; + parameters.encodings[2].scalability_mode = std::nullopt; + parameters.encodings[2].scale_resolution_down_by = std::nullopt; + + EXPECT_TRUE(sender->SetParameters(parameters).ok()); + // Ensure that we are getting VGA at L1T3 from the "f" rid. + EXPECT_THAT( + GetStatsUntil( + local_pc_wrapper, + OutboundRtpStatsAre(Contains(AllOf( + RidIs("f"), ScalabilityModeIs("L2T3_KEY"), HeightIs(720 / 2)))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); +} + +TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_OneLayerActive_LegacySvc) { + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = + CreateLayers({"f", "h", "q"}, /*active=*/true); + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + std::vector codecs = + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, "VP9"); + transceiver->SetCodecPreferences(codecs); + + // Sending L1T3 with legacy SVC mode means setting 1 layer active. + scoped_refptr sender = transceiver->sender(); + RtpParameters parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(3)); + parameters.encodings[0].active = true; + parameters.encodings[1].active = false; + parameters.encodings[2].active = false; + sender->SetParameters(parameters); + + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // Ensure that we are getting 180P at L1T3 from the "f" rid. + EXPECT_THAT( + GetStatsUntil( + local_pc_wrapper, + OutboundRtpStatsAre(Contains( + AllOf(RidIs("f"), ScalabilityModeIs("L1T3"), HeightIs(720 / 4)))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); +} + TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_AllLayersInactive_LegacySvc) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - std::vector layers = + std::vector layers = CreateLayers({"f", "h", "q"}, /*active=*/true); - rtc::scoped_refptr transceiver = + scoped_refptr transceiver = AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, layers); std::vector codecs = - GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9"); + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, "VP9"); transceiver->SetCodecPreferences(codecs); // Legacy SVC mode and all layers inactive. - rtc::scoped_refptr sender = transceiver->sender(); + scoped_refptr sender = transceiver->sender(); RtpParameters parameters = sender->GetParameters(); ASSERT_THAT(parameters.encodings, SizeIs(3)); parameters.encodings[0].active = false; @@ -778,8 +1156,8 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, remote_pc_wrapper->WaitForConnection(); // Ensure no media is flowing (1 second should be enough). - rtc::Thread::Current()->SleepMs(1000); - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + Thread::Current()->SleepMs(1000); + scoped_refptr report = GetStats(local_pc_wrapper); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_THAT(outbound_rtps, SizeIs(1u)); @@ -788,21 +1166,21 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_AllLayersInactive_StandardSvc) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - std::vector layers = + std::vector layers = CreateLayers({"f", "h", "q"}, /*active=*/true); - rtc::scoped_refptr transceiver = + scoped_refptr transceiver = AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, layers); std::vector codecs = - GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9"); + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, "VP9"); transceiver->SetCodecPreferences(codecs); // Standard mode and all layers inactive. - rtc::scoped_refptr sender = transceiver->sender(); + scoped_refptr sender = transceiver->sender(); RtpParameters parameters = sender->GetParameters(); ASSERT_THAT(parameters.encodings, SizeIs(3)); parameters.encodings[0].scalability_mode = "L3T3_KEY"; @@ -817,8 +1195,8 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, remote_pc_wrapper->WaitForConnection(); // Ensure no media is flowing (1 second should be enough). - rtc::Thread::Current()->SleepMs(1000); - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + Thread::Current()->SleepMs(1000); + scoped_refptr report = GetStats(local_pc_wrapper); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_THAT(outbound_rtps, SizeIs(3u)); @@ -828,22 +1206,22 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, } TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_TargetBitrate_LegacyL1T3) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - std::vector layers = + std::vector layers = CreateLayers({"f", "h", "q"}, /*active=*/true); - rtc::scoped_refptr transceiver = + scoped_refptr transceiver = AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, layers); std::vector codecs = - GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9"); + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, "VP9"); transceiver->SetCodecPreferences(codecs); // In legacy SVC, disabling the bottom two layers encodings is interpreted as // disabling the bottom two spatial layers resulting in L1T3. - rtc::scoped_refptr sender = transceiver->sender(); + scoped_refptr sender = transceiver->sender(); RtpParameters parameters = sender->GetParameters(); parameters.encodings[0].active = false; parameters.encodings[1].active = false; @@ -856,15 +1234,18 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_TargetBitrate_LegacyL1T3) { // Wait until 720p L1T3 has ramped up to 720p. It may take additional time // for the target bitrate to reach its maximum. - ASSERT_TRUE_WAIT(HasOutboundRtpWithRidAndScalabilityMode(local_pc_wrapper, - "f", "L1T3", 720), - kLongTimeoutForRampingUp.ms()); + ASSERT_THAT( + GetStatsUntil(local_pc_wrapper, + OutboundRtpStatsAre(Contains(AllOf( + RidIs("f"), ScalabilityModeIs("L1T3"), HeightIs(720)))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); // The target bitrate typically reaches `kVp9ExpectedMaxBitrateForL1T3` // in a short period of time. However to reduce risk of flakiness in bot // environments, this test only fails if we we exceed the expected target. - rtc::Thread::Current()->SleepMs(1000); - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + Thread::Current()->SleepMs(1000); + scoped_refptr report = GetStats(local_pc_wrapper); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_THAT(outbound_rtps, SizeIs(1)); @@ -875,23 +1256,23 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_TargetBitrate_LegacyL1T3) { // Test coverage for https://crbug.com/1455039. TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_TargetBitrate_StandardL1T3) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - std::vector layers = + std::vector layers = CreateLayers({"f", "h", "q"}, /*active=*/true); - rtc::scoped_refptr transceiver = + scoped_refptr transceiver = AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, layers); std::vector codecs = - GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "VP9"); + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, "VP9"); transceiver->SetCodecPreferences(codecs); // With standard APIs, L1T3 is explicitly specified and the encodings refers // to the RTP streams, not the spatial layers. The end result should be // equivalent to the legacy L1T3 case. - rtc::scoped_refptr sender = transceiver->sender(); + scoped_refptr sender = transceiver->sender(); RtpParameters parameters = sender->GetParameters(); parameters.encodings[0].active = true; parameters.encodings[0].scale_resolution_down_by = 1.0; @@ -906,15 +1287,18 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_TargetBitrate_StandardL1T3) { // Wait until 720p L1T3 has ramped up to 720p. It may take additional time // for the target bitrate to reach its maximum. - ASSERT_TRUE_WAIT(HasOutboundRtpWithRidAndScalabilityMode(local_pc_wrapper, - "f", "L1T3", 720), - kLongTimeoutForRampingUp.ms()); + ASSERT_THAT( + GetStatsUntil(local_pc_wrapper, + OutboundRtpStatsAre(Contains(AllOf( + RidIs("f"), ScalabilityModeIs("L1T3"), HeightIs(720)))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); // The target bitrate typically reaches `kVp9ExpectedMaxBitrateForL1T3` // in a short period of time. However to reduce risk of flakiness in bot // environments, this test only fails if we we exceed the expected target. - rtc::Thread::Current()->SleepMs(1000); - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + Thread::Current()->SleepMs(1000); + scoped_refptr report = GetStats(local_pc_wrapper); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_THAT(outbound_rtps, SizeIs(3)); @@ -924,67 +1308,105 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, VP9_TargetBitrate_StandardL1T3) { EXPECT_LE(target_bitrate.kbps(), kVp9ExpectedMaxBitrateForL1T3.kbps()); } +TEST_F(PeerConnectionEncodingsIntegrationTest, + SimulcastProducesUniqueSsrcAndRtxSsrcs) { + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = + CreateLayers({"f", "h", "q"}, /*active=*/true); + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + std::vector codecs = + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, "VP8"); + transceiver->SetCodecPreferences(codecs); + + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // Wait until media is flowing on all three layers. + // Ramp up time is needed before all three layers are sending. + auto stats = GetStatsUntil(local_pc_wrapper, HasOutboundRtpBytesSent(3u), + {.timeout = kLongTimeoutForRampingUp}); + ASSERT_THAT(stats, IsRtcOk()); + // Verify SSRCs and RTX SSRCs. + scoped_refptr report = stats.MoveValue(); + std::vector outbound_rtps = + report->GetStatsOfType(); + ASSERT_THAT(outbound_rtps, SizeIs(3u)); + + std::set ssrcs; + std::set rtx_ssrcs; + for (const auto& outbound_rtp : outbound_rtps) { + ASSERT_TRUE(outbound_rtp->ssrc.has_value()); + ASSERT_TRUE(outbound_rtp->rtx_ssrc.has_value()); + ssrcs.insert(*outbound_rtp->ssrc); + rtx_ssrcs.insert(*outbound_rtp->rtx_ssrc); + } + EXPECT_EQ(ssrcs.size(), 3u); + EXPECT_EQ(rtx_ssrcs.size(), 3u); +} + TEST_F(PeerConnectionEncodingsIntegrationTest, EncodingParameterCodecIsEmptyWhenCreatedAudio) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); auto transceiver_or_error = - local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - rtc::scoped_refptr audio_transceiver = + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::AUDIO); + scoped_refptr audio_transceiver = transceiver_or_error.MoveValue(); - webrtc::RtpParameters parameters = - audio_transceiver->sender()->GetParameters(); + RtpParameters parameters = audio_transceiver->sender()->GetParameters(); EXPECT_FALSE(parameters.encodings[0].codec.has_value()); } TEST_F(PeerConnectionEncodingsIntegrationTest, EncodingParameterCodecIsEmptyWhenCreatedVideo) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); auto transceiver_or_error = - local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); - rtc::scoped_refptr video_transceiver = + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::VIDEO); + scoped_refptr video_transceiver = transceiver_or_error.MoveValue(); - webrtc::RtpParameters parameters = - video_transceiver->sender()->GetParameters(); + RtpParameters parameters = video_transceiver->sender()->GetParameters(); EXPECT_FALSE(parameters.encodings[0].codec.has_value()); } TEST_F(PeerConnectionEncodingsIntegrationTest, EncodingParameterCodecIsSetByAddTransceiverAudio) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - rtc::scoped_refptr stream = - local_pc_wrapper->GetUserMedia( - /*audio=*/true, {}, /*video=*/false, {}); - rtc::scoped_refptr track = stream->GetAudioTracks()[0]; + scoped_refptr stream = local_pc_wrapper->GetUserMedia( + /*audio=*/true, {}, /*video=*/false, {}); + scoped_refptr track = stream->GetAudioTracks()[0]; - absl::optional pcmu = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO, + std::optional pcmu = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::AUDIO, "pcmu"); ASSERT_TRUE(pcmu); - webrtc::RtpTransceiverInit init; - init.direction = webrtc::RtpTransceiverDirection::kSendOnly; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + init.direction = RtpTransceiverDirection::kSendOnly; + RtpEncodingParameters encoding_parameters; encoding_parameters.codec = pcmu; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = local_pc_wrapper->pc()->AddTransceiver(track, init); - rtc::scoped_refptr audio_transceiver = + scoped_refptr audio_transceiver = transceiver_or_error.MoveValue(); - webrtc::RtpParameters parameters = - audio_transceiver->sender()->GetParameters(); + RtpParameters parameters = audio_transceiver->sender()->GetParameters(); EXPECT_EQ(*parameters.encodings[0].codec, *pcmu); NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); - local_pc_wrapper->WaitForConnection(); - remote_pc_wrapper->WaitForConnection(); + ASSERT_TRUE(local_pc_wrapper->WaitForConnection()); + ASSERT_TRUE(remote_pc_wrapper->WaitForConnection()); - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + scoped_refptr report = GetStats(local_pc_wrapper); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_EQ(outbound_rtps.size(), 1u); @@ -994,44 +1416,42 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, TEST_F(PeerConnectionEncodingsIntegrationTest, EncodingParameterCodecIsSetByAddTransceiverVideo) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - rtc::scoped_refptr stream = - local_pc_wrapper->GetUserMedia( - /*audio=*/false, {}, /*video=*/true, {.width = 1280, .height = 720}); - rtc::scoped_refptr track = stream->GetVideoTracks()[0]; + scoped_refptr stream = local_pc_wrapper->GetUserMedia( + /*audio=*/false, {}, /*video=*/true, {.width = 1280, .height = 720}); + scoped_refptr track = stream->GetVideoTracks()[0]; - absl::optional vp9 = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO, + std::optional vp9 = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::VIDEO, "vp9"); ASSERT_TRUE(vp9); - webrtc::RtpTransceiverInit init; - init.direction = webrtc::RtpTransceiverDirection::kSendOnly; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + init.direction = RtpTransceiverDirection::kSendOnly; + RtpEncodingParameters encoding_parameters; encoding_parameters.codec = vp9; encoding_parameters.scalability_mode = "L3T3"; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = local_pc_wrapper->pc()->AddTransceiver(track, init); - rtc::scoped_refptr audio_transceiver = + scoped_refptr audio_transceiver = transceiver_or_error.MoveValue(); - webrtc::RtpParameters parameters = - audio_transceiver->sender()->GetParameters(); + RtpParameters parameters = audio_transceiver->sender()->GetParameters(); EXPECT_EQ(*parameters.encodings[0].codec, *vp9); NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); local_pc_wrapper->WaitForConnection(); remote_pc_wrapper->WaitForConnection(); - EXPECT_TRUE_WAIT( - IsCodecIdDifferentWithScalabilityMode(local_pc_wrapper, 0, "", "L3T3"), - kDefaultTimeout.ms()); - - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + auto error_or_stats = + GetStatsUntil(local_pc_wrapper, + OutboundRtpStatsAre(Contains(ScalabilityModeIs("L3T3")))); + ASSERT_THAT(error_or_stats, IsRtcOk()); + scoped_refptr report = error_or_stats.MoveValue(); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_EQ(outbound_rtps.size(), 1u); @@ -1042,24 +1462,22 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, TEST_F(PeerConnectionEncodingsIntegrationTest, EncodingParameterCodecIsSetBySetParametersBeforeNegotiationAudio) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - rtc::scoped_refptr stream = - local_pc_wrapper->GetUserMedia( - /*audio=*/true, {}, /*video=*/false, {}); - rtc::scoped_refptr track = stream->GetAudioTracks()[0]; + scoped_refptr stream = local_pc_wrapper->GetUserMedia( + /*audio=*/true, {}, /*video=*/false, {}); + scoped_refptr track = stream->GetAudioTracks()[0]; - absl::optional pcmu = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO, + std::optional pcmu = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::AUDIO, "pcmu"); auto transceiver_or_error = local_pc_wrapper->pc()->AddTransceiver(track); - rtc::scoped_refptr audio_transceiver = + scoped_refptr audio_transceiver = transceiver_or_error.MoveValue(); - webrtc::RtpParameters parameters = - audio_transceiver->sender()->GetParameters(); + RtpParameters parameters = audio_transceiver->sender()->GetParameters(); parameters.encodings[0].codec = pcmu; EXPECT_TRUE(audio_transceiver->sender()->SetParameters(parameters).ok()); @@ -1070,7 +1488,7 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, local_pc_wrapper->WaitForConnection(); remote_pc_wrapper->WaitForConnection(); - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + scoped_refptr report = GetStats(local_pc_wrapper); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_EQ(outbound_rtps.size(), 1u); @@ -1080,28 +1498,27 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, TEST_F(PeerConnectionEncodingsIntegrationTest, EncodingParameterCodecIsSetBySetParametersAfterNegotiationAudio) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - rtc::scoped_refptr stream = - local_pc_wrapper->GetUserMedia( - /*audio=*/true, {}, /*video=*/false, {}); - rtc::scoped_refptr track = stream->GetAudioTracks()[0]; + scoped_refptr stream = local_pc_wrapper->GetUserMedia( + /*audio=*/true, {}, /*video=*/false, {}); + scoped_refptr track = stream->GetAudioTracks()[0]; - absl::optional pcmu = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO, + std::optional pcmu = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::AUDIO, "pcmu"); auto transceiver_or_error = local_pc_wrapper->pc()->AddTransceiver(track); - rtc::scoped_refptr audio_transceiver = + scoped_refptr audio_transceiver = transceiver_or_error.MoveValue(); NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); local_pc_wrapper->WaitForConnection(); remote_pc_wrapper->WaitForConnection(); - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + scoped_refptr report = GetStats(local_pc_wrapper); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_EQ(outbound_rtps.size(), 1u); @@ -1109,18 +1526,18 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, EXPECT_STRCASENE(("audio/" + pcmu->name).c_str(), codec_name.c_str()); std::string last_codec_id = outbound_rtps[0]->codec_id.value(); - webrtc::RtpParameters parameters = - audio_transceiver->sender()->GetParameters(); + RtpParameters parameters = audio_transceiver->sender()->GetParameters(); parameters.encodings[0].codec = pcmu; EXPECT_TRUE(audio_transceiver->sender()->SetParameters(parameters).ok()); parameters = audio_transceiver->sender()->GetParameters(); EXPECT_EQ(parameters.encodings[0].codec, pcmu); - EXPECT_TRUE_WAIT(IsCodecIdDifferent(local_pc_wrapper, 0, last_codec_id), - kDefaultTimeout.ms()); - - report = GetStats(local_pc_wrapper); + auto error_or_stats = + GetStatsUntil(local_pc_wrapper, + OutboundRtpStatsAre(Contains(CodecIs(Ne(last_codec_id))))); + ASSERT_THAT(error_or_stats, IsRtcOk()); + report = error_or_stats.MoveValue(); outbound_rtps = report->GetStatsOfType(); ASSERT_EQ(outbound_rtps.size(), 1u); codec_name = GetCurrentCodecMimeType(report, *outbound_rtps[0]); @@ -1129,24 +1546,22 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, TEST_F(PeerConnectionEncodingsIntegrationTest, EncodingParameterCodecIsSetBySetParametersBeforeNegotiationVideo) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - rtc::scoped_refptr stream = - local_pc_wrapper->GetUserMedia( - /*audio=*/false, {}, /*video=*/true, {.width = 1280, .height = 720}); - rtc::scoped_refptr track = stream->GetVideoTracks()[0]; + scoped_refptr stream = local_pc_wrapper->GetUserMedia( + /*audio=*/false, {}, /*video=*/true, {.width = 1280, .height = 720}); + scoped_refptr track = stream->GetVideoTracks()[0]; - absl::optional vp9 = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO, + std::optional vp9 = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::VIDEO, "vp9"); auto transceiver_or_error = local_pc_wrapper->pc()->AddTransceiver(track); - rtc::scoped_refptr video_transceiver = + scoped_refptr video_transceiver = transceiver_or_error.MoveValue(); - webrtc::RtpParameters parameters = - video_transceiver->sender()->GetParameters(); + RtpParameters parameters = video_transceiver->sender()->GetParameters(); parameters.encodings[0].codec = vp9; parameters.encodings[0].scalability_mode = "L3T3"; EXPECT_TRUE(video_transceiver->sender()->SetParameters(parameters).ok()); @@ -1159,42 +1574,42 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, local_pc_wrapper->WaitForConnection(); remote_pc_wrapper->WaitForConnection(); - EXPECT_TRUE_WAIT( - IsCodecIdDifferentWithScalabilityMode(local_pc_wrapper, 0, "", "L3T3"), - kDefaultTimeout.ms()); - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + auto error_or_stats = GetStatsUntil( + local_pc_wrapper, OutboundRtpStatsAre(Contains(AllOf( + ScalabilityModeIs("L3T3"), CodecIs(Ne("")))))); + ASSERT_THAT(error_or_stats, IsRtcOk()); + scoped_refptr report = error_or_stats.MoveValue(); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_EQ(outbound_rtps.size(), 1u); std::string codec_name = GetCurrentCodecMimeType(report, *outbound_rtps[0]); EXPECT_STRCASEEQ(("video/" + vp9->name).c_str(), codec_name.c_str()); - EXPECT_EQ(outbound_rtps[0]->scalability_mode.ValueOrDefault(""), "L3T3"); + EXPECT_EQ(outbound_rtps[0]->scalability_mode.value_or(""), "L3T3"); } TEST_F(PeerConnectionEncodingsIntegrationTest, EncodingParameterCodecIsSetBySetParametersAfterNegotiationVideo) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - rtc::scoped_refptr stream = - local_pc_wrapper->GetUserMedia( - /*audio=*/false, {}, /*video=*/true, {.width = 1280, .height = 720}); - rtc::scoped_refptr track = stream->GetVideoTracks()[0]; + scoped_refptr stream = local_pc_wrapper->GetUserMedia( + /*audio=*/false, {}, /*video=*/true, {.width = 1280, .height = 720}); + scoped_refptr track = stream->GetVideoTracks()[0]; - absl::optional vp9 = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO, + std::optional vp9 = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::VIDEO, "vp9"); auto transceiver_or_error = local_pc_wrapper->pc()->AddTransceiver(track); - rtc::scoped_refptr video_transceiver = + scoped_refptr video_transceiver = transceiver_or_error.MoveValue(); NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); local_pc_wrapper->WaitForConnection(); remote_pc_wrapper->WaitForConnection(); - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + scoped_refptr report = GetStats(local_pc_wrapper); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_EQ(outbound_rtps.size(), 1u); @@ -1202,8 +1617,7 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, EXPECT_STRCASENE(("audio/" + vp9->name).c_str(), codec_name.c_str()); std::string last_codec_id = outbound_rtps[0]->codec_id.value(); - webrtc::RtpParameters parameters = - video_transceiver->sender()->GetParameters(); + RtpParameters parameters = video_transceiver->sender()->GetParameters(); parameters.encodings[0].codec = vp9; parameters.encodings[0].scalability_mode = "L3T3"; EXPECT_TRUE(video_transceiver->sender()->SetParameters(parameters).ok()); @@ -1212,11 +1626,12 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, EXPECT_EQ(parameters.encodings[0].codec, vp9); EXPECT_EQ(parameters.encodings[0].scalability_mode, "L3T3"); - EXPECT_TRUE_WAIT(IsCodecIdDifferentWithScalabilityMode(local_pc_wrapper, 0, - last_codec_id, "L3T3"), - kDefaultTimeout.ms()); - - report = GetStats(local_pc_wrapper); + auto error_or_stats = GetStatsUntil( + local_pc_wrapper, + OutboundRtpStatsAre(Contains( + AllOf(ScalabilityModeIs("L3T3"), CodecIs(Ne(last_codec_id)))))); + ASSERT_THAT(error_or_stats, IsRtcOk()); + report = error_or_stats.MoveValue(); outbound_rtps = report->GetStatsOfType(); ASSERT_EQ(outbound_rtps.size(), 1u); codec_name = GetCurrentCodecMimeType(report, *outbound_rtps[0]); @@ -1226,22 +1641,22 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, TEST_F(PeerConnectionEncodingsIntegrationTest, AddTransceiverRejectsUnknownCodecParameterAudio) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); - webrtc::RtpCodec dummy_codec; - dummy_codec.kind = cricket::MEDIA_TYPE_AUDIO; + RtpCodec dummy_codec; + dummy_codec.kind = webrtc::MediaType::AUDIO; dummy_codec.name = "FOOBAR"; dummy_codec.clock_rate = 90000; dummy_codec.num_channels = 2; - webrtc::RtpTransceiverInit init; - init.direction = webrtc::RtpTransceiverDirection::kSendOnly; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + init.direction = RtpTransceiverDirection::kSendOnly; + RtpEncodingParameters encoding_parameters; encoding_parameters.codec = dummy_codec; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = - local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init); + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::AUDIO, init); EXPECT_FALSE(transceiver_or_error.ok()); EXPECT_EQ(transceiver_or_error.error().type(), RTCErrorType::UNSUPPORTED_OPERATION); @@ -1249,21 +1664,21 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, TEST_F(PeerConnectionEncodingsIntegrationTest, AddTransceiverRejectsUnknownCodecParameterVideo) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); - webrtc::RtpCodec dummy_codec; - dummy_codec.kind = cricket::MEDIA_TYPE_VIDEO; + RtpCodec dummy_codec; + dummy_codec.kind = webrtc::MediaType::VIDEO; dummy_codec.name = "FOOBAR"; dummy_codec.clock_rate = 90000; - webrtc::RtpTransceiverInit init; - init.direction = webrtc::RtpTransceiverDirection::kSendOnly; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + init.direction = RtpTransceiverDirection::kSendOnly; + RtpEncodingParameters encoding_parameters; encoding_parameters.codec = dummy_codec; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = - local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init); + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::VIDEO, init); EXPECT_FALSE(transceiver_or_error.ok()); EXPECT_EQ(transceiver_or_error.error().type(), RTCErrorType::UNSUPPORTED_OPERATION); @@ -1271,22 +1686,21 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, TEST_F(PeerConnectionEncodingsIntegrationTest, SetParametersRejectsUnknownCodecParameterAudio) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); - webrtc::RtpCodec dummy_codec; - dummy_codec.kind = cricket::MEDIA_TYPE_AUDIO; + RtpCodec dummy_codec; + dummy_codec.kind = webrtc::MediaType::AUDIO; dummy_codec.name = "FOOBAR"; dummy_codec.clock_rate = 90000; dummy_codec.num_channels = 2; auto transceiver_or_error = - local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::AUDIO); ASSERT_TRUE(transceiver_or_error.ok()); - rtc::scoped_refptr audio_transceiver = + scoped_refptr audio_transceiver = transceiver_or_error.MoveValue(); - webrtc::RtpParameters parameters = - audio_transceiver->sender()->GetParameters(); + RtpParameters parameters = audio_transceiver->sender()->GetParameters(); parameters.encodings[0].codec = dummy_codec; RTCError error = audio_transceiver->sender()->SetParameters(parameters); EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION); @@ -1294,38 +1708,39 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, TEST_F(PeerConnectionEncodingsIntegrationTest, SetParametersRejectsUnknownCodecParameterVideo) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); - webrtc::RtpCodec dummy_codec; - dummy_codec.kind = cricket::MEDIA_TYPE_VIDEO; + RtpCodec dummy_codec; + dummy_codec.kind = webrtc::MediaType::VIDEO; dummy_codec.name = "FOOBAR"; dummy_codec.clock_rate = 90000; auto transceiver_or_error = - local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::VIDEO); ASSERT_TRUE(transceiver_or_error.ok()); - rtc::scoped_refptr video_transceiver = + scoped_refptr video_transceiver = transceiver_or_error.MoveValue(); - webrtc::RtpParameters parameters = - video_transceiver->sender()->GetParameters(); + RtpParameters parameters = video_transceiver->sender()->GetParameters(); parameters.encodings[0].codec = dummy_codec; RTCError error = video_transceiver->sender()->SetParameters(parameters); EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION); } TEST_F(PeerConnectionEncodingsIntegrationTest, - SetParametersRejectsNonPreferredCodecParameterAudio) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); + SetParametersRejectsNonNegotiatedCodecParameterAudio) { + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - absl::optional opus = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO, + std::optional opus = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::AUDIO, "opus"); ASSERT_TRUE(opus); - std::vector not_opus_codecs = + std::vector not_opus_codecs = local_pc_wrapper->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO) + ->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO) .codecs; not_opus_codecs.erase( std::remove_if(not_opus_codecs.begin(), not_opus_codecs.end(), @@ -1335,107 +1750,203 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, not_opus_codecs.end()); auto transceiver_or_error = - local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::AUDIO); ASSERT_TRUE(transceiver_or_error.ok()); - rtc::scoped_refptr audio_transceiver = + scoped_refptr audio_transceiver = transceiver_or_error.MoveValue(); ASSERT_TRUE(audio_transceiver->SetCodecPreferences(not_opus_codecs).ok()); - webrtc::RtpParameters parameters = - audio_transceiver->sender()->GetParameters(); + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + RtpParameters parameters = audio_transceiver->sender()->GetParameters(); parameters.encodings[0].codec = opus; RTCError error = audio_transceiver->sender()->SetParameters(parameters); EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION); } TEST_F(PeerConnectionEncodingsIntegrationTest, - SetParametersRejectsNonPreferredCodecParameterVideo) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); + SetParametersRejectsNonRemotelyNegotiatedCodecParameterAudio) { + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - absl::optional vp8 = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO, - "vp8"); - ASSERT_TRUE(vp8); + std::optional opus = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::AUDIO, + "opus"); + ASSERT_TRUE(opus); - std::vector not_vp8_codecs = + std::vector not_opus_codecs = local_pc_wrapper->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO) + ->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO) .codecs; - not_vp8_codecs.erase( - std::remove_if(not_vp8_codecs.begin(), not_vp8_codecs.end(), + not_opus_codecs.erase( + std::remove_if(not_opus_codecs.begin(), not_opus_codecs.end(), [&](const auto& codec) { - return absl::EqualsIgnoreCase(codec.name, vp8->name); + return absl::EqualsIgnoreCase(codec.name, opus->name); }), - not_vp8_codecs.end()); + not_opus_codecs.end()); auto transceiver_or_error = - local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::AUDIO); ASSERT_TRUE(transceiver_or_error.ok()); - rtc::scoped_refptr video_transceiver = + scoped_refptr audio_transceiver = transceiver_or_error.MoveValue(); - ASSERT_TRUE(video_transceiver->SetCodecPreferences(not_vp8_codecs).ok()); - webrtc::RtpParameters parameters = - video_transceiver->sender()->GetParameters(); - parameters.encodings[0].codec = vp8; - RTCError error = video_transceiver->sender()->SetParameters(parameters); + // Negotiation, create offer and apply it + std::unique_ptr offer = + CreateOffer(local_pc_wrapper); + scoped_refptr p1 = + SetLocalDescription(local_pc_wrapper, offer.get()); + scoped_refptr p2 = + SetRemoteDescription(remote_pc_wrapper, offer.get()); + EXPECT_TRUE(Await({p1, p2})); + + // Update the remote transceiver to reject Opus + std::vector> remote_transceivers = + remote_pc_wrapper->pc()->GetTransceivers(); + ASSERT_TRUE(!remote_transceivers.empty()); + scoped_refptr remote_audio_transceiver = + remote_transceivers[0]; + ASSERT_TRUE( + remote_audio_transceiver->SetCodecPreferences(not_opus_codecs).ok()); + + // Create answer and apply it + std::unique_ptr answer = + CreateAnswer(remote_pc_wrapper); + p1 = SetLocalDescription(remote_pc_wrapper, answer.get()); + p2 = SetRemoteDescription(local_pc_wrapper, answer.get()); + EXPECT_TRUE(Await({p1, p2})); + + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + RtpParameters parameters = audio_transceiver->sender()->GetParameters(); + parameters.encodings[0].codec = opus; + RTCError error = audio_transceiver->sender()->SetParameters(parameters); EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION); } +// Test coverage for https://crbug.com/webrtc/391340599. +// Some web apps add non-standard FMTP parameters to video codecs and because +// they get successfully negotiated due to being ignored by SDP rules, they show +// up in GetParameters().codecs. Using SetParameters() with such codecs should +// still work. TEST_F(PeerConnectionEncodingsIntegrationTest, - SetParametersRejectsNonNegotiatedCodecParameterAudio) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + SetParametersAcceptsMungedCodecFromGetParameters) { + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - absl::optional opus = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO, - "opus"); - ASSERT_TRUE(opus); + auto transceiver_or_error = + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::VIDEO); + ASSERT_TRUE(transceiver_or_error.ok()); + scoped_refptr video_transceiver = + transceiver_or_error.MoveValue(); + + std::unique_ptr offer = + CreateOffer(local_pc_wrapper); + // Munge a new parameter for VP8 in the offer. + auto* mcd = offer->description()->contents()[0].media_description(); + ASSERT_THAT(mcd, NotNull()); + std::vector codecs = mcd->codecs(); + ASSERT_THAT(codecs, Contains(Field(&Codec::name, "VP8"))); + auto vp8_codec = absl::c_find_if( + codecs, [](const Codec& codec) { return codec.name == "VP8"; }); + vp8_codec->params.emplace("non-standard-param", "true"); + mcd->set_codecs(codecs); + + scoped_refptr p1 = + SetLocalDescription(local_pc_wrapper, offer.get()); + scoped_refptr p2 = + SetRemoteDescription(remote_pc_wrapper, offer.get()); + EXPECT_TRUE(Await({p1, p2})); + + // Create answer and apply it + std::unique_ptr answer = + CreateAnswer(remote_pc_wrapper); + mcd = answer->description()->contents()[0].media_description(); + ASSERT_THAT(mcd, NotNull()); + codecs = mcd->codecs(); + ASSERT_THAT(codecs, Contains(Field(&Codec::name, "VP8"))); + vp8_codec = absl::c_find_if( + codecs, [](const Codec& codec) { return codec.name == "VP8"; }); + vp8_codec->params.emplace("non-standard-param", "true"); + mcd->set_codecs(codecs); + p1 = SetLocalDescription(remote_pc_wrapper, answer.get()); + p2 = SetRemoteDescription(local_pc_wrapper, answer.get()); + EXPECT_TRUE(Await({p1, p2})); - std::vector not_opus_codecs = + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + RtpParameters parameters = video_transceiver->sender()->GetParameters(); + auto it = absl::c_find_if( + parameters.codecs, [](const auto& codec) { return codec.name == "VP8"; }); + ASSERT_NE(it, parameters.codecs.end()); + RtpCodecParameters& vp8_codec_from_parameters = *it; + EXPECT_THAT(vp8_codec_from_parameters.parameters, + Contains(Pair("non-standard-param", "true"))); + parameters.encodings[0].codec = vp8_codec_from_parameters; + + EXPECT_THAT(video_transceiver->sender()->SetParameters(parameters), + IsRtcOk()); +} + +TEST_F(PeerConnectionEncodingsIntegrationTest, + SetParametersRejectsNonNegotiatedCodecParameterVideo) { + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::optional vp8 = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::VIDEO, + "vp8"); + ASSERT_TRUE(vp8); + + std::vector not_vp8_codecs = local_pc_wrapper->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO) + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) .codecs; - not_opus_codecs.erase( - std::remove_if(not_opus_codecs.begin(), not_opus_codecs.end(), + not_vp8_codecs.erase( + std::remove_if(not_vp8_codecs.begin(), not_vp8_codecs.end(), [&](const auto& codec) { - return absl::EqualsIgnoreCase(codec.name, opus->name); + return absl::EqualsIgnoreCase(codec.name, vp8->name); }), - not_opus_codecs.end()); + not_vp8_codecs.end()); auto transceiver_or_error = - local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::VIDEO); ASSERT_TRUE(transceiver_or_error.ok()); - rtc::scoped_refptr audio_transceiver = + scoped_refptr video_transceiver = transceiver_or_error.MoveValue(); - ASSERT_TRUE(audio_transceiver->SetCodecPreferences(not_opus_codecs).ok()); + ASSERT_TRUE(video_transceiver->SetCodecPreferences(not_vp8_codecs).ok()); NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); local_pc_wrapper->WaitForConnection(); remote_pc_wrapper->WaitForConnection(); - webrtc::RtpParameters parameters = - audio_transceiver->sender()->GetParameters(); - parameters.encodings[0].codec = opus; - RTCError error = audio_transceiver->sender()->SetParameters(parameters); + RtpParameters parameters = video_transceiver->sender()->GetParameters(); + parameters.encodings[0].codec = vp8; + RTCError error = video_transceiver->sender()->SetParameters(parameters); EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION); } TEST_F(PeerConnectionEncodingsIntegrationTest, - SetParametersRejectsNonNegotiatedCodecParameterVideo) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + SetParametersRejectsNonRemotelyNegotiatedCodecParameterVideo) { + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - absl::optional vp8 = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO, + std::optional vp8 = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::VIDEO, "vp8"); ASSERT_TRUE(vp8); - std::vector not_vp8_codecs = + std::vector not_vp8_codecs = local_pc_wrapper->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO) + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) .codecs; not_vp8_codecs.erase( std::remove_if(not_vp8_codecs.begin(), not_vp8_codecs.end(), @@ -1445,18 +1956,40 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, not_vp8_codecs.end()); auto transceiver_or_error = - local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::VIDEO); ASSERT_TRUE(transceiver_or_error.ok()); - rtc::scoped_refptr video_transceiver = + scoped_refptr video_transceiver = transceiver_or_error.MoveValue(); - ASSERT_TRUE(video_transceiver->SetCodecPreferences(not_vp8_codecs).ok()); - NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + // Negotiation, create offer and apply it + std::unique_ptr offer = + CreateOffer(local_pc_wrapper); + scoped_refptr p1 = + SetLocalDescription(local_pc_wrapper, offer.get()); + scoped_refptr p2 = + SetRemoteDescription(remote_pc_wrapper, offer.get()); + EXPECT_TRUE(Await({p1, p2})); + + // Update the remote transceiver to reject VP8 + std::vector> remote_transceivers = + remote_pc_wrapper->pc()->GetTransceivers(); + ASSERT_TRUE(!remote_transceivers.empty()); + scoped_refptr remote_video_transceiver = + remote_transceivers[0]; + ASSERT_TRUE( + remote_video_transceiver->SetCodecPreferences(not_vp8_codecs).ok()); + + // Create answer and apply it + std::unique_ptr answer = + CreateAnswer(remote_pc_wrapper); + p1 = SetLocalDescription(remote_pc_wrapper, answer.get()); + p2 = SetRemoteDescription(local_pc_wrapper, answer.get()); + EXPECT_TRUE(Await({p1, p2})); + local_pc_wrapper->WaitForConnection(); remote_pc_wrapper->WaitForConnection(); - webrtc::RtpParameters parameters = - video_transceiver->sender()->GetParameters(); + RtpParameters parameters = video_transceiver->sender()->GetParameters(); parameters.encodings[0].codec = vp8; RTCError error = video_transceiver->sender()->SetParameters(parameters); EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION); @@ -1464,18 +1997,18 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, TEST_F(PeerConnectionEncodingsIntegrationTest, EncodingParametersCodecRemovedAfterNegotiationAudio) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - absl::optional opus = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_AUDIO, + std::optional opus = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::AUDIO, "opus"); ASSERT_TRUE(opus); - std::vector not_opus_codecs = + std::vector not_opus_codecs = local_pc_wrapper->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO) + ->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO) .codecs; not_opus_codecs.erase( std::remove_if(not_opus_codecs.begin(), not_opus_codecs.end(), @@ -1484,24 +2017,23 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, }), not_opus_codecs.end()); - webrtc::RtpTransceiverInit init; - init.direction = webrtc::RtpTransceiverDirection::kSendOnly; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + init.direction = RtpTransceiverDirection::kSendOnly; + RtpEncodingParameters encoding_parameters; encoding_parameters.codec = opus; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = - local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init); + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::AUDIO, init); ASSERT_TRUE(transceiver_or_error.ok()); - rtc::scoped_refptr audio_transceiver = + scoped_refptr audio_transceiver = transceiver_or_error.MoveValue(); NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); local_pc_wrapper->WaitForConnection(); remote_pc_wrapper->WaitForConnection(); - webrtc::RtpParameters parameters = - audio_transceiver->sender()->GetParameters(); + RtpParameters parameters = audio_transceiver->sender()->GetParameters(); EXPECT_EQ(parameters.encodings[0].codec, opus); ASSERT_TRUE(audio_transceiver->SetCodecPreferences(not_opus_codecs).ok()); @@ -1511,30 +2043,87 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, EXPECT_FALSE(parameters.encodings[0].codec); } +TEST_F(PeerConnectionEncodingsIntegrationTest, + EncodingParametersRedEnabledBeforeNegotiationAudio) { + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector send_codecs = + local_pc_wrapper->pc_factory() + ->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO) + .codecs; + + std::optional opus = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::AUDIO, + "opus"); + ASSERT_TRUE(opus); + + std::optional red = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::AUDIO, + "red"); + ASSERT_TRUE(red); + + RtpTransceiverInit init; + init.direction = RtpTransceiverDirection::kSendOnly; + RtpEncodingParameters encoding_parameters; + encoding_parameters.codec = opus; + init.send_encodings.push_back(encoding_parameters); + + auto transceiver_or_error = + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::AUDIO, init); + ASSERT_TRUE(transceiver_or_error.ok()); + scoped_refptr audio_transceiver = + transceiver_or_error.MoveValue(); + + // Preferring RED over Opus should enable RED with Opus encoding. + send_codecs[0] = red.value(); + send_codecs[1] = opus.value(); + + ASSERT_TRUE(audio_transceiver->SetCodecPreferences(send_codecs).ok()); + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + RtpParameters parameters = audio_transceiver->sender()->GetParameters(); + EXPECT_EQ(parameters.encodings[0].codec, opus); + EXPECT_EQ(parameters.codecs[0].name, red->name); + + // Check that it's possible to switch back to Opus without RED. + send_codecs[0] = opus.value(); + send_codecs[1] = red.value(); + + ASSERT_TRUE(audio_transceiver->SetCodecPreferences(send_codecs).ok()); + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + + parameters = audio_transceiver->sender()->GetParameters(); + EXPECT_EQ(parameters.encodings[0].codec, opus); + EXPECT_EQ(parameters.codecs[0].name, opus->name); +} + TEST_F(PeerConnectionEncodingsIntegrationTest, SetParametersRejectsScalabilityModeForSelectedCodec) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); - absl::optional vp8 = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO, + std::optional vp8 = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::VIDEO, "vp8"); ASSERT_TRUE(vp8); - webrtc::RtpTransceiverInit init; - init.direction = webrtc::RtpTransceiverDirection::kSendOnly; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + init.direction = RtpTransceiverDirection::kSendOnly; + RtpEncodingParameters encoding_parameters; encoding_parameters.codec = vp8; encoding_parameters.scalability_mode = "L1T3"; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = - local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init); + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::VIDEO, init); ASSERT_TRUE(transceiver_or_error.ok()); - rtc::scoped_refptr video_transceiver = + scoped_refptr video_transceiver = transceiver_or_error.MoveValue(); - webrtc::RtpParameters parameters = - video_transceiver->sender()->GetParameters(); + RtpParameters parameters = video_transceiver->sender()->GetParameters(); parameters.encodings[0].scalability_mode = "L3T3"; RTCError error = video_transceiver->sender()->SetParameters(parameters); EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION); @@ -1542,18 +2131,18 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, TEST_F(PeerConnectionEncodingsIntegrationTest, EncodingParametersCodecRemovedByNegotiationVideo) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - absl::optional vp8 = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO, + std::optional vp8 = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::VIDEO, "vp8"); ASSERT_TRUE(vp8); - std::vector not_vp8_codecs = + std::vector not_vp8_codecs = local_pc_wrapper->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO) + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) .codecs; not_vp8_codecs.erase( std::remove_if(not_vp8_codecs.begin(), not_vp8_codecs.end(), @@ -1562,9 +2151,9 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, }), not_vp8_codecs.end()); - webrtc::RtpTransceiverInit init; - init.direction = webrtc::RtpTransceiverDirection::kSendOnly; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + init.direction = RtpTransceiverDirection::kSendOnly; + RtpEncodingParameters encoding_parameters; encoding_parameters.rid = "h"; encoding_parameters.codec = vp8; encoding_parameters.scale_resolution_down_by = 2; @@ -1574,17 +2163,16 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = - local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init); + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::VIDEO, init); ASSERT_TRUE(transceiver_or_error.ok()); - rtc::scoped_refptr video_transceiver = + scoped_refptr video_transceiver = transceiver_or_error.MoveValue(); NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); local_pc_wrapper->WaitForConnection(); remote_pc_wrapper->WaitForConnection(); - webrtc::RtpParameters parameters = - video_transceiver->sender()->GetParameters(); + RtpParameters parameters = video_transceiver->sender()->GetParameters(); ASSERT_EQ(parameters.encodings.size(), 2u); EXPECT_EQ(parameters.encodings[0].codec, vp8); EXPECT_EQ(parameters.encodings[1].codec, vp8); @@ -1601,21 +2189,22 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, AddTransceiverRejectsMixedCodecSimulcast) { // Mixed Codec Simulcast is not yet supported, so we ensure that we reject // such parameters. - rtc::scoped_refptr local_pc_wrapper = CreatePc(); - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - absl::optional vp8 = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO, + std::optional vp8 = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::VIDEO, "vp8"); ASSERT_TRUE(vp8); - absl::optional vp9 = - local_pc_wrapper->FindFirstSendCodecWithName(cricket::MEDIA_TYPE_VIDEO, + std::optional vp9 = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::VIDEO, "vp9"); + ASSERT_TRUE(vp9); - webrtc::RtpTransceiverInit init; - init.direction = webrtc::RtpTransceiverDirection::kSendOnly; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + init.direction = RtpTransceiverDirection::kSendOnly; + RtpEncodingParameters encoding_parameters; encoding_parameters.rid = "h"; encoding_parameters.codec = vp8; encoding_parameters.scale_resolution_down_by = 2; @@ -1626,14 +2215,167 @@ TEST_F(PeerConnectionEncodingsIntegrationTest, init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = - local_pc_wrapper->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init); + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::VIDEO, init); ASSERT_FALSE(transceiver_or_error.ok()); EXPECT_EQ(transceiver_or_error.error().type(), RTCErrorType::UNSUPPORTED_OPERATION); } +TEST_F(PeerConnectionEncodingsIntegrationTest, + AddTransceiverAcceptsMixedCodecSimulcast) { + // Enable WIP mixed codec simulcast support + std::string field_trials = "WebRTC-MixedCodecSimulcast/Enabled/"; + scoped_refptr local_pc_wrapper = + CreatePc(FieldTrials::CreateNoGlobal(field_trials)); + scoped_refptr remote_pc_wrapper = + CreatePc(FieldTrials::CreateNoGlobal(field_trials)); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::optional vp8 = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::VIDEO, + "vp8"); + ASSERT_TRUE(vp8); + std::optional vp9 = + local_pc_wrapper->FindFirstSendCodecWithName(webrtc::MediaType::VIDEO, + "vp9"); + ASSERT_TRUE(vp9); + + RtpTransceiverInit init; + init.direction = RtpTransceiverDirection::kSendOnly; + RtpEncodingParameters encoding_parameters; + encoding_parameters.rid = "h"; + encoding_parameters.codec = vp8; + encoding_parameters.scale_resolution_down_by = 2; + init.send_encodings.push_back(encoding_parameters); + encoding_parameters.rid = "f"; + encoding_parameters.codec = vp9; + encoding_parameters.scale_resolution_down_by = 1; + init.send_encodings.push_back(encoding_parameters); + + auto transceiver_or_error = + local_pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::VIDEO, init); + ASSERT_TRUE(transceiver_or_error.ok()); +} + +TEST_F(PeerConnectionEncodingsIntegrationTest, ScaleToParameterChecking) { + scoped_refptr pc_wrapper = CreatePc(); + + // AddTransceiver: If `scale_resolution_down_to` is specified on any encoding + // it must be specified on all encodings. + RtpTransceiverInit init; + RtpEncodingParameters encoding; + encoding.scale_resolution_down_to = std::nullopt; + init.send_encodings.push_back(encoding); + encoding.scale_resolution_down_to = {.width = 1280, .height = 720}; + init.send_encodings.push_back(encoding); + auto transceiver_or_error = + pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::VIDEO, init); + EXPECT_FALSE(transceiver_or_error.ok()); + EXPECT_EQ(transceiver_or_error.error().type(), + RTCErrorType::UNSUPPORTED_OPERATION); + + // AddTransceiver: Width and height must not be zero. + init.send_encodings[0].scale_resolution_down_to = {.width = 1280, + .height = 0}; + init.send_encodings[1].scale_resolution_down_to = {.width = 0, .height = 720}; + transceiver_or_error = + pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::VIDEO, init); + EXPECT_FALSE(transceiver_or_error.ok()); + EXPECT_EQ(transceiver_or_error.error().type(), + RTCErrorType::UNSUPPORTED_OPERATION); + + // AddTransceiver: Specifying both `scale_resolution_down_to` and + // `scale_resolution_down_by` is allowed (the latter is ignored). + init.send_encodings[0].scale_resolution_down_to = {.width = 640, + .height = 480}; + init.send_encodings[0].scale_resolution_down_by = 1.0; + init.send_encodings[1].scale_resolution_down_to = {.width = 1280, + .height = 720}; + init.send_encodings[1].scale_resolution_down_by = 2.0; + transceiver_or_error = + pc_wrapper->pc()->AddTransceiver(webrtc::MediaType::VIDEO, init); + ASSERT_TRUE(transceiver_or_error.ok()); + + // SetParameters: If `scale_resolution_down_to` is specified on any active + // encoding it must be specified on all active encodings. + auto sender = transceiver_or_error.value()->sender(); + auto parameters = sender->GetParameters(); + parameters.encodings[0].scale_resolution_down_to = {.width = 640, + .height = 480}; + parameters.encodings[1].scale_resolution_down_to = std::nullopt; + auto error = sender->SetParameters(parameters); + EXPECT_FALSE(error.ok()); + EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION); + // But it's OK not to specify `scale_resolution_down_to` on an inactive + // encoding. + parameters = sender->GetParameters(); + parameters.encodings[0].scale_resolution_down_to = {.width = 640, + .height = 480}; + parameters.encodings[1].active = false; + parameters.encodings[1].scale_resolution_down_to = std::nullopt; + error = sender->SetParameters(parameters); + EXPECT_TRUE(error.ok()); + + // SetParameters: Width and height must not be zero. + sender = transceiver_or_error.value()->sender(); + parameters = sender->GetParameters(); + parameters.encodings[0].scale_resolution_down_to = {.width = 1280, + .height = 0}; + parameters.encodings[1].active = true; + parameters.encodings[1].scale_resolution_down_to = {.width = 0, + .height = 720}; + error = sender->SetParameters(parameters); + EXPECT_FALSE(error.ok()); + EXPECT_EQ(error.type(), RTCErrorType::INVALID_MODIFICATION); + + // SetParameters: Specifying both `scale_resolution_down_to` and + // `scale_resolution_down_by` is allowed (the latter is ignored). + parameters = sender->GetParameters(); + parameters.encodings[0].scale_resolution_down_to = {.width = 640, + .height = 480}; + parameters.encodings[0].scale_resolution_down_by = 2.0; + parameters.encodings[1].scale_resolution_down_to = {.width = 1280, + .height = 720}; + parameters.encodings[1].scale_resolution_down_by = 1.0; + error = sender->SetParameters(parameters); + EXPECT_TRUE(error.ok()); +} + +TEST_F(PeerConnectionEncodingsIntegrationTest, + ScaleResolutionDownByIsIgnoredWhenScaleToIsSpecified) { + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); + + scoped_refptr stream = local_pc_wrapper->GetUserMedia( + /*audio=*/false, {}, /*video=*/true, {.width = 640, .height = 360}); + scoped_refptr track = stream->GetVideoTracks()[0]; + + // Configure contradicting scaling factors (180p vs 360p). + RtpTransceiverInit init; + RtpEncodingParameters encoding; + encoding.scale_resolution_down_by = 2.0; + encoding.scale_resolution_down_to = {.width = 640, .height = 360}; + init.send_encodings.push_back(encoding); + auto transceiver_or_error = + local_pc_wrapper->pc()->AddTransceiver(track, init); + + // Negotiate singlecast. + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + Negotiate(local_pc_wrapper, remote_pc_wrapper); + + // Confirm 640x360 is sent. + // If `scale_resolution_down_by` was not ignored we would never ramp up to + // full resolution. + EXPECT_THAT( + GetStatsUntil(local_pc_wrapper, + OutboundRtpStatsAre(ElementsAre(ResolutionIs(640, 360))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); +} + // Tests that use the standard path (specifying both `scalability_mode` and -// `scale_resolution_down_by`) should pass for all codecs. +// `scale_resolution_down_by` or `scale_resolution_down_to`) should pass for all +// codecs. class PeerConnectionEncodingsIntegrationParameterizedTest : public PeerConnectionEncodingsIntegrationTest, public ::testing::WithParamInterface { @@ -1646,9 +2388,9 @@ class PeerConnectionEncodingsIntegrationParameterizedTest // TODO(https://crbug.com/webrtc/15011): Increase availability of AV1 or make // it possible to check support at compile-time. bool SkipTestDueToAv1Missing( - rtc::scoped_refptr local_pc_wrapper) { + scoped_refptr local_pc_wrapper) { if (codec_name_ == "AV1" && - !HasSenderVideoCodecCapability(local_pc_wrapper, "AV1")) { + !HasReceiverVideoCodecCapability(local_pc_wrapper, "AV1")) { RTC_LOG(LS_WARNING) << "\n***\nAV1 is not available, skipping test.\n***"; return true; } @@ -1661,24 +2403,24 @@ class PeerConnectionEncodingsIntegrationParameterizedTest }; TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, AllLayersInactive) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); if (SkipTestDueToAv1Missing(local_pc_wrapper)) { return; } - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - std::vector layers = + std::vector layers = CreateLayers({"f", "h", "q"}, /*active=*/true); - rtc::scoped_refptr transceiver = + scoped_refptr transceiver = AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, layers); std::vector codecs = - GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, codec_name_); + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, codec_name_); transceiver->SetCodecPreferences(codecs); // Standard mode and all layers inactive. - rtc::scoped_refptr sender = transceiver->sender(); + scoped_refptr sender = transceiver->sender(); RtpParameters parameters = sender->GetParameters(); ASSERT_THAT(parameters.encodings, SizeIs(3)); parameters.encodings[0].scalability_mode = "L1T3"; @@ -1693,8 +2435,8 @@ TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, AllLayersInactive) { remote_pc_wrapper->WaitForConnection(); // Ensure no media is flowing (1 second should be enough). - rtc::Thread::Current()->SleepMs(1000); - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + Thread::Current()->SleepMs(1000); + scoped_refptr report = GetStats(local_pc_wrapper); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_THAT(outbound_rtps, SizeIs(3u)); @@ -1703,24 +2445,25 @@ TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, AllLayersInactive) { EXPECT_EQ(*outbound_rtps[2]->bytes_sent, 0u); } +// Configure 4:2:1 using `scale_resolution_down_by`. TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, Simulcast) { - rtc::scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr local_pc_wrapper = CreatePc(); if (SkipTestDueToAv1Missing(local_pc_wrapper)) { return; } - rtc::scoped_refptr remote_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); - std::vector layers = - CreateLayers({"f", "h", "q"}, /*active=*/true); - rtc::scoped_refptr transceiver = + std::vector layers = + CreateLayers({"q", "h", "f"}, /*active=*/true); + scoped_refptr transceiver = AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, layers); std::vector codecs = - GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, codec_name_); + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, codec_name_); transceiver->SetCodecPreferences(codecs); - rtc::scoped_refptr sender = transceiver->sender(); + scoped_refptr sender = transceiver->sender(); RtpParameters parameters = sender->GetParameters(); ASSERT_THAT(parameters.encodings, SizeIs(3)); parameters.encodings[0].scalability_mode = "L1T3"; @@ -1747,12 +2490,17 @@ TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, Simulcast) { // Wait until media is flowing on all three layers. // Ramp up time is needed before all three layers are sending. - ASSERT_TRUE_WAIT(HasOutboundRtpBytesSent(local_pc_wrapper, 3u), - kLongTimeoutForRampingUp.ms()); - EXPECT_TRUE(OutboundRtpResolutionsAreLessThanOrEqualToExpectations( - local_pc_wrapper, {{"f", 320, 180}, {"h", 640, 360}, {"q", 1280, 720}})); + auto error_or_report = + GetStatsUntil(local_pc_wrapper, HasOutboundRtpBytesSent(3u), + {.timeout = kLongTimeoutForRampingUp}); + ASSERT_THAT(error_or_report, IsRtcOk()); // Verify codec and scalability mode. - rtc::scoped_refptr report = GetStats(local_pc_wrapper); + scoped_refptr report = error_or_report.value(); + auto outbound_rtp_by_rid = GetOutboundRtpStreamStatsByRid(report); + EXPECT_THAT(outbound_rtp_by_rid, + UnorderedElementsAre(Pair("q", ResolutionIs(320, 180)), + Pair("h", ResolutionIs(640, 360)), + Pair("f", ResolutionIs(1280, 720)))); std::vector outbound_rtps = report->GetStatsOfType(); ASSERT_THAT(outbound_rtps, SizeIs(3u)); @@ -1767,6 +2515,529 @@ TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, Simulcast) { EXPECT_THAT(*outbound_rtps[2]->scalability_mode, StrEq("L1T3")); } +// Configure 4:2:1 using `scale_resolution_down_to`. +TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, + SimulcastWithScaleTo) { + scoped_refptr local_pc_wrapper = CreatePc(); + if (SkipTestDueToAv1Missing(local_pc_wrapper)) { + return; + } + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = + CreateLayers({"q", "h", "f"}, /*active=*/true); + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + std::vector codecs = + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, codec_name_); + transceiver->SetCodecPreferences(codecs); + + scoped_refptr sender = transceiver->sender(); + RtpParameters parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(3)); + parameters.encodings[0].scalability_mode = "L1T3"; + parameters.encodings[0].scale_resolution_down_to = {.width = 320, + .height = 180}; + parameters.encodings[1].scalability_mode = "L1T3"; + parameters.encodings[1].scale_resolution_down_to = {.width = 640, + .height = 360}; + parameters.encodings[2].scalability_mode = "L1T3"; + parameters.encodings[2].scale_resolution_down_to = {.width = 1280, + .height = 720}; + sender->SetParameters(parameters); + + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // GetParameters() does not report any fallback. + parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(3)); + EXPECT_THAT(parameters.encodings[0].scalability_mode, + Optional(std::string("L1T3"))); + EXPECT_THAT(parameters.encodings[1].scalability_mode, + Optional(std::string("L1T3"))); + EXPECT_THAT(parameters.encodings[2].scalability_mode, + Optional(std::string("L1T3"))); + + // Wait until media is flowing on all three layers. + // Ramp up time is needed before all three layers are sending. + auto error_or_report = + GetStatsUntil(local_pc_wrapper, HasOutboundRtpBytesSent(3u), + {.timeout = kLongTimeoutForRampingUp}); + ASSERT_THAT(error_or_report, IsRtcOk()); + // Verify codec and scalability mode. + scoped_refptr report = error_or_report.value(); + auto outbound_rtp_by_rid = GetOutboundRtpStreamStatsByRid(report); + EXPECT_THAT(outbound_rtp_by_rid, + UnorderedElementsAre(Pair("q", ResolutionIs(320, 180)), + Pair("h", ResolutionIs(640, 360)), + Pair("f", ResolutionIs(1280, 720)))); + // Verify codec and scalability mode. + std::vector outbound_rtps = + report->GetStatsOfType(); + ASSERT_THAT(outbound_rtps, SizeIs(3u)); + EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[0]), + StrCaseEq(mime_type_)); + EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[1]), + StrCaseEq(mime_type_)); + EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[2]), + StrCaseEq(mime_type_)); + EXPECT_THAT(*outbound_rtps[0]->scalability_mode, StrEq("L1T3")); + EXPECT_THAT(*outbound_rtps[1]->scalability_mode, StrEq("L1T3")); + EXPECT_THAT(*outbound_rtps[2]->scalability_mode, StrEq("L1T3")); +} + +// Simulcast starting in 720p 4:2:1 then changing to {180p, 360p, 540p} using +// the `scale_resolution_down_by` API. +TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, + SimulcastScaleDownByNoLongerPowerOfTwo) { + scoped_refptr local_pc_wrapper = CreatePc(); + if (SkipTestDueToAv1Missing(local_pc_wrapper)) { + return; + } + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = + CreateLayers({"q", "h", "f"}, /*active=*/true); + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + std::vector codecs = + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, codec_name_); + transceiver->SetCodecPreferences(codecs); + scoped_refptr sender = transceiver->sender(); + + // Configure {180p, 360p, 720p}. + RtpParameters parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(3)); + parameters.encodings[0].scalability_mode = "L1T1"; + parameters.encodings[0].scale_resolution_down_by = 4.0; + parameters.encodings[1].scalability_mode = "L1T1"; + parameters.encodings[1].scale_resolution_down_by = 2.0; + parameters.encodings[2].scalability_mode = "L1T1"; + parameters.encodings[2].scale_resolution_down_by = 1.0; + sender->SetParameters(parameters); + + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // Wait for media to flow on all layers. + // Needed repro step of https://crbug.com/webrtc/369654168: When the same + // LibvpxVp9Encoder instance was used to first produce simulcast and later for + // a single encoding, the previously used simulcast index (= 2) would still be + // set when producing 180p since non-simulcast config does not reset this, + // resulting in the 180p encoding freezing and the 540p encoding having double + // frame rate and toggling between 180p and 540p in resolution. + ASSERT_THAT(GetStatsUntil(local_pc_wrapper, HasOutboundRtpBytesSent(3u), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); + + // Configure {180p, 360p, 540p}. + parameters = sender->GetParameters(); + parameters.encodings[0].scale_resolution_down_by = 4.0; + parameters.encodings[1].scale_resolution_down_by = 2.0; + parameters.encodings[2].scale_resolution_down_by = 1.333333; + sender->SetParameters(parameters); + + // Wait for the new resolutions to be produced. + auto encoding_resolutions_result = + WaitUntil([&] { return GetStats(local_pc_wrapper); }, + OutboundRtpStatsAre(UnorderedElementsAre( + AllOf(RidIs("q"), ResolutionIs(320, 180)), + AllOf(RidIs("h"), ResolutionIs(640, 360)), + AllOf(RidIs("f"), ResolutionIs(960, 540)))), + {.timeout = kLongTimeoutForRampingUp}); + ASSERT_THAT(encoding_resolutions_result, IsRtcOk()); + + auto outbound_rtp_by_rid = + GetOutboundRtpStreamStatsByRid(encoding_resolutions_result.value()); + ASSERT_THAT(outbound_rtp_by_rid, + UnorderedElementsAre(Key("q"), Key("h"), Key("f"))); + + // Ensure frames continue to be encoded post reconfiguration. + uint64_t frames_encoded_q = + outbound_rtp_by_rid.at("q").frames_encoded.value(); + uint64_t frames_encoded_h = + outbound_rtp_by_rid.at("h").frames_encoded.value(); + uint64_t frames_encoded_f = + outbound_rtp_by_rid.at("f").frames_encoded.value(); + EXPECT_THAT( + GetStatsUntil( + local_pc_wrapper, + OutboundRtpStatsAre(UnorderedElementsAre( + AllOf(RidIs("q"), FramesEncodedIs(Gt(frames_encoded_q))), + AllOf(RidIs("h"), FramesEncodedIs(Gt(frames_encoded_h))), + AllOf(RidIs("f"), FramesEncodedIs(Gt(frames_encoded_f))))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); +} + +// Simulcast starting in 720p 4:2:1 then changing to {180p, 360p, 540p} using +// the `scale_resolution_down_to` API. +TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, + SimulcastScaleToNoLongerPowerOfTwo) { + scoped_refptr local_pc_wrapper = CreatePc(); + if (SkipTestDueToAv1Missing(local_pc_wrapper)) { + return; + } + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = + CreateLayers({"q", "h", "f"}, /*active=*/true); + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + std::vector codecs = + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, codec_name_); + transceiver->SetCodecPreferences(codecs); + scoped_refptr sender = transceiver->sender(); + + // Configure {180p, 360p, 720p}. + RtpParameters parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(3)); + parameters.encodings[0].scalability_mode = "L1T1"; + parameters.encodings[0].scale_resolution_down_to = {.width = 320, + .height = 180}; + parameters.encodings[1].scalability_mode = "L1T1"; + parameters.encodings[1].scale_resolution_down_to = {.width = 640, + .height = 360}; + parameters.encodings[2].scalability_mode = "L1T1"; + parameters.encodings[2].scale_resolution_down_to = {.width = 1280, + .height = 720}; + sender->SetParameters(parameters); + + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // Wait for media to flow on all layers. + // Needed repro step of https://crbug.com/webrtc/369654168: When the same + // LibvpxVp9Encoder instance was used to first produce simulcast and later for + // a single encoding, the previously used simulcast index (= 2) would still be + // set when producing 180p since non-simulcast config does not reset this, + // resulting in the 180p encoding freezing and the 540p encoding having double + // frame rate and toggling between 180p and 540p in resolution. + ASSERT_THAT(GetStatsUntil(local_pc_wrapper, HasOutboundRtpBytesSent(3u), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); + + // Configure {180p, 360p, 540p}. + parameters = sender->GetParameters(); + parameters.encodings[0].scale_resolution_down_to = {.width = 320, + .height = 180}; + parameters.encodings[1].scale_resolution_down_to = {.width = 640, + .height = 360}; + parameters.encodings[2].scale_resolution_down_to = {.width = 960, + .height = 540}; + sender->SetParameters(parameters); + + // Wait for the new resolutions to be produced. + auto error_or_stats = + GetStatsUntil(local_pc_wrapper, + OutboundRtpStatsAre(UnorderedElementsAre( + AllOf(RidIs("q"), ResolutionIs(320, 180)), + AllOf(RidIs("h"), ResolutionIs(640, 360)), + AllOf(RidIs("f"), ResolutionIs(960, 540)))), + {.timeout = kLongTimeoutForRampingUp}); + ASSERT_THAT(error_or_stats, IsRtcOk()); + + auto outbound_rtp_by_rid = + GetOutboundRtpStreamStatsByRid(error_or_stats.value()); + ASSERT_THAT(outbound_rtp_by_rid, + UnorderedElementsAre(Pair("q", BytesSentIs(Ne(std::nullopt))), + Pair("h", BytesSentIs(Ne(std::nullopt))), + Pair("f", BytesSentIs(Ne(std::nullopt))))); + + // Ensure frames continue to be encoded post reconfiguration. + EXPECT_THAT( + GetStatsUntil( + local_pc_wrapper, + OutboundRtpStatsAre(UnorderedElementsAre( + AllOf(RidIs("q"), + BytesSentIs( + Gt(outbound_rtp_by_rid.at("q").bytes_sent.value()))), + AllOf(RidIs("h"), + BytesSentIs( + Gt(outbound_rtp_by_rid.at("h").bytes_sent.value()))), + AllOf(RidIs("f"), + BytesSentIs( + Gt(outbound_rtp_by_rid.at("f").bytes_sent.value()))))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); +} + +// The code path that disables layers based on resolution size should NOT run +// when `scale_resolution_down_to` is specified. (It shouldn't run in any case +// but that is an existing legacy code and non-compliance problem that we don't +// have to repeat here.) +TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, + LowResolutionSimulcastWithScaleTo) { + scoped_refptr local_pc_wrapper = CreatePc(); + if (SkipTestDueToAv1Missing(local_pc_wrapper)) { + return; + } + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = + CreateLayers({"q", "h", "f"}, /*active=*/true); + + // Configure {20p,40p,80p} with 2:1 aspect ratio. + RtpTransceiverInit init; + RtpEncodingParameters encoding; + encoding.scalability_mode = "L1T3"; + encoding.rid = "q"; + encoding.scale_resolution_down_to = {.width = 40, .height = 20}; + init.send_encodings.push_back(encoding); + encoding.rid = "h"; + encoding.scale_resolution_down_to = {.width = 80, .height = 40}; + init.send_encodings.push_back(encoding); + encoding.rid = "f"; + encoding.scale_resolution_down_to = {.width = 160, .height = 80}; + init.send_encodings.push_back(encoding); + scoped_refptr stream = local_pc_wrapper->GetUserMedia( + /*audio=*/false, {}, /*video=*/true, {.width = 160, .height = 80}); + scoped_refptr track = stream->GetVideoTracks()[0]; + auto transceiver_or_error = + local_pc_wrapper->pc()->AddTransceiver(track, init); + ASSERT_TRUE(transceiver_or_error.ok()); + scoped_refptr transceiver = + transceiver_or_error.value(); + + std::vector codecs = + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, codec_name_); + transceiver->SetCodecPreferences(codecs); + + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // Wait for media to flow on all layers. + ASSERT_THAT(GetStatsUntil(local_pc_wrapper, HasOutboundRtpBytesSent(3u)), + IsRtcOk()); + // q=20p, h=40p, f=80p. + EXPECT_THAT(GetStatsUntil(local_pc_wrapper, + OutboundRtpStatsAre(UnorderedElementsAre( + AllOf(RidIs("q"), ResolutionIs(40, 20)), + AllOf(RidIs("h"), ResolutionIs(80, 40)), + AllOf(RidIs("f"), ResolutionIs(160, 80)))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); +} + +TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, + SimulcastEncodingStopWhenRtpEncodingChangeToInactive) { + scoped_refptr local_pc_wrapper = CreatePc(); + if (SkipTestDueToAv1Missing(local_pc_wrapper)) { + return; + } + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = + CreateLayers({"q", "h", "f"}, /*active=*/true); + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + std::vector codecs = + GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, codec_name_); + transceiver->SetCodecPreferences(codecs); + + scoped_refptr sender = transceiver->sender(); + RtpParameters parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(3)); + ASSERT_EQ(parameters.encodings[0].rid, "q"); + parameters.encodings[0].scalability_mode = "L1T3"; + parameters.encodings[0].scale_resolution_down_by = 4; + ASSERT_EQ(parameters.encodings[1].rid, "h"); + parameters.encodings[1].scalability_mode = "L1T3"; + parameters.encodings[1].scale_resolution_down_by = 2; + ASSERT_EQ(parameters.encodings[2].rid, "f"); + parameters.encodings[2].scalability_mode = "L1T3"; + parameters.encodings[2].scale_resolution_down_by = 1; + sender->SetParameters(parameters); + + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + ASSERT_THAT(GetStatsUntil(local_pc_wrapper, + OutboundRtpStatsAre(Contains( + AllOf(RidIs("f"), FramesEncodedIs(Gt(0))))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); + + // Switch higest layer to Inactive. + parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(3)); + parameters.encodings[2].active = false; + sender->SetParameters(parameters); + auto error_or_stats = GetStatsUntil( + local_pc_wrapper, + OutboundRtpStatsAre(Contains(AllOf(RidIs("f"), Not(Active())))), + {.timeout = kLongTimeoutForRampingUp}); + ASSERT_THAT(error_or_stats, IsRtcOk()); + + auto outbound_rtp_by_rid = + GetOutboundRtpStreamStatsByRid(error_or_stats.value()); + int encoded_frames_f = outbound_rtp_by_rid.at("f").frames_encoded.value(); + int encoded_frames_h = outbound_rtp_by_rid.at("h").frames_encoded.value(); + int encoded_frames_q = outbound_rtp_by_rid.at("q").frames_encoded.value(); + + // Wait until the encoder has encoded another 10 frames on lower layers. + ASSERT_THAT( + GetStatsUntil( + local_pc_wrapper, + OutboundRtpStatsAre(UnorderedElementsAre( + AllOf(RidIs("q"), FramesEncodedIs(Gt(encoded_frames_q + 10))), + AllOf(RidIs("h"), FramesEncodedIs(Gt(encoded_frames_h + 10))), + AllOf(RidIs("f"), FramesEncodedIs(Le(encoded_frames_f + 2))))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); +} + +TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, + ScaleToDownscaleAndThenUpscale) { + scoped_refptr local_pc_wrapper = CreatePc(); + if (SkipTestDueToAv1Missing(local_pc_wrapper)) { + return; + } + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = CreateLayers({"f"}, /*active=*/true); + + // This transceiver receives a 1280x720 source. + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + std::vector codecs = + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, codec_name_); + transceiver->SetCodecPreferences(codecs); + + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // Request 640x360, which is the same as scaling down by 2. + scoped_refptr sender = transceiver->sender(); + RtpParameters parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(1)); + parameters.encodings[0].scalability_mode = "L1T3"; + parameters.encodings[0].scale_resolution_down_to = {.width = 640, + .height = 360}; + sender->SetParameters(parameters); + // Confirm 640x360 is sent. + ASSERT_THAT( + GetStatsUntil(local_pc_wrapper, + OutboundRtpStatsAre(ElementsAre(ResolutionIs(640, 360))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); + + // Test coverage for https://crbug.com/webrtc/361477261: + // Due initial frame dropping, OnFrameDroppedDueToSize() should have created + // some resolution restrictions by now. With 720p input frame, restriction is + // 540p which is not observable when sending 360p, but it prevents us from + // immediately sending 720p. Restrictions will be lifted after a few seconds + // (when good QP is reported by QualityScaler) and 720p should be sent. The + // bug was not reconfiguring the encoder when restrictions were updated so the + // restrictions at the time of the SetParameter() call were made indefinite. + + // Request the full 1280x720 resolution. + parameters = sender->GetParameters(); + parameters.encodings[0].scale_resolution_down_to = {.width = 1280, + .height = 720}; + sender->SetParameters(parameters); + // Confirm 1280x720 is sent. + EXPECT_THAT( + GetStatsUntil(local_pc_wrapper, + OutboundRtpStatsAre(ElementsAre(ResolutionIs(1280, 720))), + {.timeout = kLongTimeoutForRampingUp}), + IsRtcOk()); +} + +TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, + ScaleToIsOrientationAgnostic) { + scoped_refptr local_pc_wrapper = CreatePc(); + if (SkipTestDueToAv1Missing(local_pc_wrapper)) { + return; + } + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = CreateLayers({"f"}, /*active=*/true); + + // This transceiver receives a 1280x720 source. + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + std::vector codecs = + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, codec_name_); + transceiver->SetCodecPreferences(codecs); + + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // 360x640 is the same as 640x360 due to orientation agnosticism. + // The orientation is determined by the frame (1280x720): landscape. + scoped_refptr sender = transceiver->sender(); + RtpParameters parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(1)); + parameters.encodings[0].scale_resolution_down_to = {.width = 360, + .height = 640}; + sender->SetParameters(parameters); + // Confirm 640x360 is sent. + EXPECT_THAT( + GetStatsUntil(local_pc_wrapper, + OutboundRtpStatsAre(ElementsAre(ResolutionIs(640, 360)))), + IsRtcOk()); +} + +TEST_P(PeerConnectionEncodingsIntegrationParameterizedTest, + ScaleToMaintainsAspectRatio) { + scoped_refptr local_pc_wrapper = CreatePc(); + if (SkipTestDueToAv1Missing(local_pc_wrapper)) { + return; + } + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = CreateLayers({"f"}, /*active=*/true); + + // This transceiver receives a 1280x720 source. + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + std::vector codecs = + GetCapabilitiesAndRestrictToCodec(remote_pc_wrapper, codec_name_); + transceiver->SetCodecPreferences(codecs); + + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // Restrict height more than width, the scaling factor needed on height should + // also be applied on the width in order to maintain the frame aspect ratio. + scoped_refptr sender = transceiver->sender(); + RtpParameters parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(1)); + parameters.encodings[0].scale_resolution_down_to = {.width = 1280, + .height = 360}; + sender->SetParameters(parameters); + // Confirm 640x360 is sent. + EXPECT_THAT( + GetStatsUntil(local_pc_wrapper, + OutboundRtpStatsAre(ElementsAre(ResolutionIs(640, 360)))), + IsRtcOk()); +} + INSTANTIATE_TEST_SUITE_P(StandardPath, PeerConnectionEncodingsIntegrationParameterizedTest, ::testing::Values("VP8", @@ -1777,4 +3048,379 @@ INSTANTIATE_TEST_SUITE_P(StandardPath, "AV1"), StringParamToString()); +// These tests use fake encoders and decoders, allowing testing of codec +// preferences, SDP negotiation and get/setParamaters(). But because the codecs +// implementations are fake, these tests do not encode or decode any frames. +class PeerConnectionEncodingsFakeCodecsIntegrationTest + : public PeerConnectionEncodingsIntegrationTest { + public: +#ifdef RTC_ENABLE_H265 + scoped_refptr CreatePcWithFakeH265( + std::unique_ptr field_trials = nullptr) { + std::unique_ptr video_encoder_factory = + std::make_unique(); + video_encoder_factory->AddSupportedVideoCodec( + SdpVideoFormat("H265", + {{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", "156"}, + {"tx-mode", "SRST"}}, + {ScalabilityMode::kL1T1})); + std::unique_ptr video_decoder_factory = + std::make_unique(); + video_decoder_factory->AddSupportedVideoCodecType("H265"); + auto pc_wrapper = make_ref_counted( + "pc", &pss_, background_thread_.get(), background_thread_.get()); + pc_wrapper->CreatePc( + {}, CreateBuiltinAudioEncoderFactory(), + CreateBuiltinAudioDecoderFactory(), std::move(video_encoder_factory), + std::move(video_decoder_factory), std::move(field_trials)); + return pc_wrapper; + } +#endif // RTC_ENABLE_H265 + + // Creates a PC where we have H264 with one sendonly, one recvonly and one + // sendrecv "profile-level-id". The sendrecv one is constrained baseline. + scoped_refptr CreatePcWithUnidirectionalH264( + std::unique_ptr field_trials = nullptr) { + std::unique_ptr video_encoder_factory = + std::make_unique(); + SdpVideoFormat h264_constrained_baseline = + SdpVideoFormat("H264", + {{"level-asymmetry-allowed", "1"}, + {"packetization-mode", "1"}, + {"profile-level-id", "42f00b"}}, // sendrecv + {ScalabilityMode::kL1T1}); + video_encoder_factory->AddSupportedVideoCodec(h264_constrained_baseline); + video_encoder_factory->AddSupportedVideoCodec( + SdpVideoFormat("H264", + {{"level-asymmetry-allowed", "1"}, + {"packetization-mode", "1"}, + {"profile-level-id", "640034"}}, // sendonly + {ScalabilityMode::kL1T1})); + std::unique_ptr video_decoder_factory = + std::make_unique(); + video_decoder_factory->AddSupportedVideoCodec(h264_constrained_baseline); + video_decoder_factory->AddSupportedVideoCodec( + SdpVideoFormat("H264", + {{"level-asymmetry-allowed", "1"}, + {"packetization-mode", "1"}, + {"profile-level-id", "f4001f"}}, // recvonly + {ScalabilityMode::kL1T1})); + auto pc_wrapper = make_ref_counted( + "pc", &pss_, background_thread_.get(), background_thread_.get()); + pc_wrapper->CreatePc( + {}, CreateBuiltinAudioEncoderFactory(), + CreateBuiltinAudioDecoderFactory(), std::move(video_encoder_factory), + std::move(video_decoder_factory), std::move(field_trials)); + return pc_wrapper; + } + + std::string LocalDescriptionStr(PeerConnectionTestWrapper* pc_wrapper) { + const SessionDescriptionInterface* local_description = + pc_wrapper->pc()->local_description(); + if (!local_description) { + return ""; + } + std::string str; + if (!local_description->ToString(&str)) { + return ""; + } + return str; + } +}; + +#ifdef RTC_ENABLE_H265 +TEST_F(PeerConnectionEncodingsFakeCodecsIntegrationTest, H265Singlecast) { + scoped_refptr local_pc_wrapper = + CreatePcWithFakeH265(); + scoped_refptr remote_pc_wrapper = + CreatePcWithFakeH265(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + scoped_refptr transceiver = + local_pc_wrapper->pc() + ->AddTransceiver(webrtc::MediaType::VIDEO) + .MoveValue(); + std::vector preferred_codecs = + GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "H265"); + transceiver->SetCodecPreferences(preferred_codecs); + + Negotiate(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // Verify codec. + scoped_refptr report = GetStats(local_pc_wrapper); + std::vector outbound_rtps = + report->GetStatsOfType(); + ASSERT_THAT(outbound_rtps, SizeIs(1u)); + EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[0]), + StrCaseEq("video/H265")); +} + +TEST_F(PeerConnectionEncodingsFakeCodecsIntegrationTest, H265Simulcast) { + scoped_refptr local_pc_wrapper = + CreatePcWithFakeH265(); + scoped_refptr remote_pc_wrapper = + CreatePcWithFakeH265(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = + CreateLayers({"q", "h", "f"}, /*active=*/true); + + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + std::vector preferred_codecs = + GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "H265"); + transceiver->SetCodecPreferences(preferred_codecs); + + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // Wait until all outbound RTPs exist. + EXPECT_THAT( + GetStatsUntil(local_pc_wrapper, OutboundRtpStatsAre(UnorderedElementsAre( + AllOf(RidIs("q")), AllOf(RidIs("h")), + AllOf(RidIs("f"))))), + IsRtcOk()); + + // Verify codec. + scoped_refptr report = GetStats(local_pc_wrapper); + std::vector outbound_rtps = + report->GetStatsOfType(); + ASSERT_THAT(outbound_rtps, SizeIs(3u)); + EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[0]), + StrCaseEq("video/H265")); + EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[1]), + StrCaseEq("video/H265")); + EXPECT_THAT(GetCurrentCodecMimeType(report, *outbound_rtps[2]), + StrCaseEq("video/H265")); +} + +TEST_F(PeerConnectionEncodingsFakeCodecsIntegrationTest, + H265SetParametersIgnoresLevelId) { + scoped_refptr local_pc_wrapper = + CreatePcWithFakeH265(); + scoped_refptr remote_pc_wrapper = + CreatePcWithFakeH265(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = CreateLayers({"f"}, /*active=*/true); + + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + std::vector preferred_codecs = + GetCapabilitiesAndRestrictToCodec(local_pc_wrapper, "H265"); + transceiver->SetCodecPreferences(preferred_codecs); + scoped_refptr sender = transceiver->sender(); + + NegotiateWithSimulcastTweaks(local_pc_wrapper, remote_pc_wrapper); + local_pc_wrapper->WaitForConnection(); + remote_pc_wrapper->WaitForConnection(); + + // This includes non-codecs like rtx, red and flexfec too so we need to find + // H265. + std::vector sender_codecs = + local_pc_wrapper->pc_factory() + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) + .codecs; + auto it = std::find_if(sender_codecs.begin(), sender_codecs.end(), + [](const RtpCodecCapability codec_capability) { + return codec_capability.name == "H265"; + }); + ASSERT_NE(it, sender_codecs.end()); + RtpCodecCapability& h265_codec = *it; + + // SetParameters() without changing level-id. + EXPECT_EQ(h265_codec.parameters["level-id"], "156"); + { + RtpParameters parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(1)); + parameters.encodings[0].codec = h265_codec; + ASSERT_THAT(sender->SetParameters(parameters), IsRtcOk()); + } + // SetParameters() with a lower level-id. + h265_codec.parameters["level-id"] = "30"; + { + RtpParameters parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(1)); + parameters.encodings[0].codec = h265_codec; + ASSERT_THAT(sender->SetParameters(parameters), IsRtcOk()); + } + // SetParameters() with a higher level-id. + h265_codec.parameters["level-id"] = "180"; + { + RtpParameters parameters = sender->GetParameters(); + ASSERT_THAT(parameters.encodings, SizeIs(1)); + parameters.encodings[0].codec = h265_codec; + ASSERT_THAT(sender->SetParameters(parameters), IsRtcOk()); + } +} +#endif // RTC_ENABLE_H265 + +TEST_F(PeerConnectionEncodingsFakeCodecsIntegrationTest, + H264UnidirectionalNegotiation) { + scoped_refptr local_pc_wrapper = + CreatePcWithUnidirectionalH264(); + scoped_refptr remote_pc_wrapper = + CreatePcWithUnidirectionalH264(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + scoped_refptr transceiver = + local_pc_wrapper->pc() + ->AddTransceiver(webrtc::MediaType::VIDEO) + .MoveValue(); + + // Filter on codec name and assert that sender capabilities have codecs for + // {sendrecv, sendonly} and the receiver capabilities have codecs for + // {sendrecv, recvonly}. + std::vector send_codecs = + local_pc_wrapper->pc_factory() + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) + .codecs; + send_codecs.erase(std::remove_if(send_codecs.begin(), send_codecs.end(), + [](const RtpCodecCapability& codec) { + return codec.name != "H264"; + }), + send_codecs.end()); + std::vector recv_codecs = + local_pc_wrapper->pc_factory() + ->GetRtpReceiverCapabilities(webrtc::MediaType::VIDEO) + .codecs; + recv_codecs.erase(std::remove_if(recv_codecs.begin(), recv_codecs.end(), + [](const RtpCodecCapability& codec) { + RTC_LOG(LS_ERROR) << codec.name; + return codec.name != "H264"; + }), + recv_codecs.end()); + ASSERT_THAT(send_codecs, SizeIs(2u)); + ASSERT_THAT(recv_codecs, SizeIs(2u)); + EXPECT_EQ(send_codecs[0], recv_codecs[0]); + EXPECT_NE(send_codecs[1], recv_codecs[1]); + RtpCodecCapability& sendrecv_codec = send_codecs[0]; + RtpCodecCapability& sendonly_codec = send_codecs[1]; + RtpCodecCapability& recvonly_codec = recv_codecs[1]; + + // Preferring sendonly + recvonly on a sendrecv transceiver is the same as + // not having any preferences, meaning the sendrecv codec (not listed) is the + // one being negotiated. + std::vector preferred_codecs = {sendonly_codec, + recvonly_codec}; + EXPECT_THAT(transceiver->SetCodecPreferences(preferred_codecs), IsRtcOk()); + EXPECT_THAT( + transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendRecv), + IsRtcOk()); + Negotiate(local_pc_wrapper, remote_pc_wrapper); + std::string local_sdp = LocalDescriptionStr(local_pc_wrapper.get()); + EXPECT_THAT(local_sdp, + HasSubstr(sendrecv_codec.parameters["profile-level-id"])); + EXPECT_THAT(local_sdp, + Not(HasSubstr(sendonly_codec.parameters["profile-level-id"]))); + EXPECT_THAT(local_sdp, + Not(HasSubstr(recvonly_codec.parameters["profile-level-id"]))); + + // Prefer all codecs and expect that the SDP offer contains the relevant + // codecs after filtering. Complete O/A each time. + preferred_codecs = {sendrecv_codec, sendonly_codec, recvonly_codec}; + EXPECT_THAT(transceiver->SetCodecPreferences(preferred_codecs), IsRtcOk()); + // Transceiver direction: sendrecv. + EXPECT_THAT( + transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendRecv), + IsRtcOk()); + Negotiate(local_pc_wrapper, remote_pc_wrapper); + local_sdp = LocalDescriptionStr(local_pc_wrapper.get()); + EXPECT_THAT(local_sdp, + HasSubstr(sendrecv_codec.parameters["profile-level-id"])); + EXPECT_THAT(local_sdp, + Not(HasSubstr(sendonly_codec.parameters["profile-level-id"]))); + EXPECT_THAT(local_sdp, + Not(HasSubstr(recvonly_codec.parameters["profile-level-id"]))); + // Transceiver direction: sendonly. + EXPECT_THAT( + transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendOnly), + IsRtcOk()); + Negotiate(local_pc_wrapper, remote_pc_wrapper); + local_sdp = LocalDescriptionStr(local_pc_wrapper.get()); + EXPECT_THAT(local_sdp, + HasSubstr(sendrecv_codec.parameters["profile-level-id"])); + EXPECT_THAT(local_sdp, + HasSubstr(sendonly_codec.parameters["profile-level-id"])); + EXPECT_THAT(local_sdp, + Not(HasSubstr(recvonly_codec.parameters["profile-level-id"]))); + // Transceiver direction: recvonly. + EXPECT_THAT( + transceiver->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly), + IsRtcOk()); + Negotiate(local_pc_wrapper, remote_pc_wrapper); + local_sdp = LocalDescriptionStr(local_pc_wrapper.get()); + EXPECT_THAT(local_sdp, + HasSubstr(sendrecv_codec.parameters["profile-level-id"])); + EXPECT_THAT(local_sdp, + Not(HasSubstr(sendonly_codec.parameters["profile-level-id"]))); + EXPECT_THAT(local_sdp, + HasSubstr(recvonly_codec.parameters["profile-level-id"])); + + // Test that offering a sendonly codec on a sendonly transceiver is possible. + // - Note that we don't complete the negotiation this time because we're not + // capable of receiving the codec. + preferred_codecs = {sendonly_codec}; + EXPECT_THAT(transceiver->SetCodecPreferences(preferred_codecs), IsRtcOk()); + EXPECT_THAT( + transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendOnly), + IsRtcOk()); + std::unique_ptr offer = + CreateOffer(local_pc_wrapper); + EXPECT_TRUE(Await({SetLocalDescription(local_pc_wrapper, offer.get())})); + local_sdp = LocalDescriptionStr(local_pc_wrapper.get()); + EXPECT_THAT(local_sdp, + Not(HasSubstr(sendrecv_codec.parameters["profile-level-id"]))); + EXPECT_THAT(local_sdp, + HasSubstr(sendonly_codec.parameters["profile-level-id"])); + EXPECT_THAT(local_sdp, + Not(HasSubstr(recvonly_codec.parameters["profile-level-id"]))); + // Test that offering recvonly codec on a recvonly transceiver is possible. + // - Note that we don't complete the negotiation this time because we're not + // capable of sending the codec. + preferred_codecs = {recvonly_codec}; + EXPECT_THAT(transceiver->SetCodecPreferences(preferred_codecs), IsRtcOk()); + EXPECT_THAT( + transceiver->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly), + IsRtcOk()); + offer = CreateOffer(local_pc_wrapper); + EXPECT_TRUE(Await({SetLocalDescription(local_pc_wrapper, offer.get())})); + local_sdp = LocalDescriptionStr(local_pc_wrapper.get()); + EXPECT_THAT(local_sdp, + Not(HasSubstr(sendrecv_codec.parameters["profile-level-id"]))); + EXPECT_THAT(local_sdp, + Not(HasSubstr(sendonly_codec.parameters["profile-level-id"]))); + EXPECT_THAT(local_sdp, + HasSubstr(recvonly_codec.parameters["profile-level-id"])); +} + +// Regression test for https://issues.chromium.org/issues/399667359 +TEST_F(PeerConnectionEncodingsIntegrationTest, + SimulcastNotSupportedGetParametersDoesNotCrash) { + scoped_refptr local_pc_wrapper = CreatePc(); + scoped_refptr remote_pc_wrapper = CreatePc(); + ExchangeIceCandidates(local_pc_wrapper, remote_pc_wrapper); + + std::vector layers = + CreateLayers({"f", "q"}, /*active=*/true); + scoped_refptr transceiver = + AddTransceiverWithSimulcastLayers(local_pc_wrapper, remote_pc_wrapper, + layers); + // Negotiate - receiver will reject simulcast, so the 2nd layer will be + // disabled + Negotiate(local_pc_wrapper, remote_pc_wrapper); + // Negotiate again without simulcast. + Negotiate(local_pc_wrapper, remote_pc_wrapper); + + RtpParameters parameters = transceiver->sender()->GetParameters(); + EXPECT_TRUE(transceiver->sender()->SetParameters(parameters).ok()); +} + } // namespace webrtc diff --git a/pc/peer_connection_end_to_end_unittest.cc b/pc/peer_connection_end_to_end_unittest.cc index a21d455ec5..2fcf4dc435 100644 --- a/pc/peer_connection_end_to_end_unittest.cc +++ b/pc/peer_connection_end_to_end_unittest.cc @@ -13,13 +13,12 @@ #include #include #include +#include #include -#include #include #include #include "absl/strings/match.h" -#include "absl/types/optional.h" #include "api/audio_codecs/L16/audio_decoder_L16.h" #include "api/audio_codecs/L16/audio_encoder_L16.h" #include "api/audio_codecs/audio_codec_pair_id.h" @@ -34,19 +33,23 @@ #include "api/audio_codecs/opus_audio_encoder_factory.h" #include "api/audio_options.h" #include "api/data_channel_interface.h" -#include "api/media_stream_interface.h" +#include "api/environment/environment.h" +#include "api/make_ref_counted.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" +#include "api/rtp_parameters.h" #include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" #include "media/sctp/sctp_transport_internal.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/gunit.h" #include "rtc_base/physical_socket_server.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" #ifdef WEBRTC_ANDROID #include "pc/test/android_test_initializer.h" @@ -63,11 +66,10 @@ using ::testing::AtLeast; using ::testing::Invoke; using ::testing::StrictMock; using ::testing::Values; - -using webrtc::DataChannelInterface; -using webrtc::MediaStreamInterface; -using webrtc::PeerConnectionInterface; -using webrtc::SdpSemantics; +using ::webrtc::DataChannelInterface; +using ::webrtc::Environment; +using ::webrtc::PeerConnectionInterface; +using ::webrtc::SdpSemantics; namespace { @@ -78,16 +80,17 @@ const int kMaxWait = 25000; class PeerConnectionEndToEndBaseTest : public sigslot::has_slots<>, public ::testing::Test { public: - typedef std::vector> DataChannelList; + typedef std::vector> + DataChannelList; explicit PeerConnectionEndToEndBaseTest(SdpSemantics sdp_semantics) - : network_thread_(std::make_unique(&pss_)), - worker_thread_(rtc::Thread::Create()) { + : network_thread_(std::make_unique(&pss_)), + worker_thread_(webrtc::Thread::Create()) { RTC_CHECK(network_thread_->Start()); RTC_CHECK(worker_thread_->Start()); - caller_ = rtc::make_ref_counted( + caller_ = webrtc::make_ref_counted( "caller", &pss_, network_thread_.get(), worker_thread_.get()); - callee_ = rtc::make_ref_counted( + callee_ = webrtc::make_ref_counted( "callee", &pss_, network_thread_.get(), worker_thread_.get()); webrtc::PeerConnectionInterface::IceServer ice_server; ice_server.uri = "stun:stun.l.google.com:19302"; @@ -100,10 +103,11 @@ class PeerConnectionEndToEndBaseTest : public sigslot::has_slots<>, } void CreatePcs( - rtc::scoped_refptr audio_encoder_factory1, - rtc::scoped_refptr audio_decoder_factory1, - rtc::scoped_refptr audio_encoder_factory2, - rtc::scoped_refptr audio_decoder_factory2) { + webrtc::scoped_refptr audio_encoder_factory1, + webrtc::scoped_refptr audio_decoder_factory1, + webrtc::scoped_refptr audio_encoder_factory2, + webrtc::scoped_refptr + audio_decoder_factory2) { EXPECT_TRUE(caller_->CreatePc(config_, audio_encoder_factory1, audio_decoder_factory1)); EXPECT_TRUE(callee_->CreatePc(config_, audio_encoder_factory2, @@ -117,19 +121,20 @@ class PeerConnectionEndToEndBaseTest : public sigslot::has_slots<>, } void CreatePcs( - rtc::scoped_refptr audio_encoder_factory, - rtc::scoped_refptr audio_decoder_factory) { + webrtc::scoped_refptr audio_encoder_factory, + webrtc::scoped_refptr + audio_decoder_factory) { CreatePcs(audio_encoder_factory, audio_decoder_factory, audio_encoder_factory, audio_decoder_factory); } void GetAndAddUserMedia() { - cricket::AudioOptions audio_options; + webrtc::AudioOptions audio_options; GetAndAddUserMedia(true, audio_options, true); } void GetAndAddUserMedia(bool audio, - const cricket::AudioOptions& audio_options, + const webrtc::AudioOptions& audio_options, bool video) { caller_->GetAndAddUserMedia(audio, audio_options, video); callee_->GetAndAddUserMedia(audio, audio_options, video); @@ -152,12 +157,12 @@ class PeerConnectionEndToEndBaseTest : public sigslot::has_slots<>, void OnCallerAddedDataChanel(DataChannelInterface* dc) { caller_signaled_data_channels_.push_back( - rtc::scoped_refptr(dc)); + webrtc::scoped_refptr(dc)); } void OnCalleeAddedDataChannel(DataChannelInterface* dc) { callee_signaled_data_channels_.push_back( - rtc::scoped_refptr(dc)); + webrtc::scoped_refptr(dc)); } // Tests that `dc1` and `dc2` can send to and receive from each other. @@ -183,14 +188,24 @@ class PeerConnectionEndToEndBaseTest : public sigslot::has_slots<>, } EXPECT_TRUE(dc1->Send(buffer)); - EXPECT_EQ_WAIT(buffer.data, - rtc::CopyOnWriteBuffer(dc2_observer->last_message()), - kMaxWait); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return webrtc::CopyOnWriteBuffer(dc2_observer->last_message()); + }, + ::testing::Eq(buffer.data), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); EXPECT_TRUE(dc2->Send(buffer)); - EXPECT_EQ_WAIT(buffer.data, - rtc::CopyOnWriteBuffer(dc1_observer->last_message()), - kMaxWait); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { + return webrtc::CopyOnWriteBuffer(dc1_observer->last_message()); + }, + ::testing::Eq(buffer.data), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); EXPECT_EQ(1U, dc1_observer->received_message_count()); EXPECT_EQ(size, dc1_observer->last_message().length()); @@ -201,11 +216,22 @@ class PeerConnectionEndToEndBaseTest : public sigslot::has_slots<>, void WaitForDataChannelsToOpen(DataChannelInterface* local_dc, const DataChannelList& remote_dc_list, size_t remote_dc_index) { - EXPECT_EQ_WAIT(DataChannelInterface::kOpen, local_dc->state(), kMaxWait); - - ASSERT_TRUE_WAIT(remote_dc_list.size() > remote_dc_index, kMaxWait); - EXPECT_EQ_WAIT(DataChannelInterface::kOpen, - remote_dc_list[remote_dc_index]->state(), kMaxWait); + EXPECT_THAT( + webrtc::WaitUntil([&] { return local_dc->state(); }, + ::testing::Eq(DataChannelInterface::kOpen), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); + + ASSERT_THAT( + webrtc::WaitUntil([&] { return remote_dc_list.size(); }, + ::testing::Gt(remote_dc_index), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return remote_dc_list[remote_dc_index]->state(); }, + ::testing::Eq(DataChannelInterface::kOpen), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); EXPECT_EQ(local_dc->id(), remote_dc_list[remote_dc_index]->id()); } @@ -213,18 +239,25 @@ class PeerConnectionEndToEndBaseTest : public sigslot::has_slots<>, const DataChannelList& remote_dc_list, size_t remote_dc_index) { local_dc->Close(); - EXPECT_EQ_WAIT(DataChannelInterface::kClosed, local_dc->state(), kMaxWait); - EXPECT_EQ_WAIT(DataChannelInterface::kClosed, - remote_dc_list[remote_dc_index]->state(), kMaxWait); + EXPECT_THAT( + webrtc::WaitUntil([&] { return local_dc->state(); }, + ::testing::Eq(DataChannelInterface::kClosed), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return remote_dc_list[remote_dc_index]->state(); }, + ::testing::Eq(DataChannelInterface::kClosed), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); } protected: - rtc::AutoThread main_thread_; - rtc::PhysicalSocketServer pss_; - std::unique_ptr network_thread_; - std::unique_ptr worker_thread_; - rtc::scoped_refptr caller_; - rtc::scoped_refptr callee_; + webrtc::AutoThread main_thread_; + webrtc::PhysicalSocketServer pss_; + std::unique_ptr network_thread_; + std::unique_ptr worker_thread_; + webrtc::scoped_refptr caller_; + webrtc::scoped_refptr callee_; DataChannelList caller_signaled_data_channels_; DataChannelList callee_signaled_data_channels_; webrtc::PeerConnectionInterface::RTCConfiguration config_; @@ -282,11 +315,11 @@ std::unique_ptr CreateForwardingMockDecoder( return std::move(mock_decoder); } -rtc::scoped_refptr +webrtc::scoped_refptr CreateForwardingMockDecoderFactory( webrtc::AudioDecoderFactory* real_decoder_factory) { - rtc::scoped_refptr mock_decoder_factory = - rtc::make_ref_counted>(); + webrtc::scoped_refptr mock_decoder_factory = + webrtc::make_ref_counted>(); EXPECT_CALL(*mock_decoder_factory, GetSupportedDecoders()) .Times(AtLeast(1)) .WillRepeatedly(Invoke([real_decoder_factory] { @@ -298,35 +331,33 @@ CreateForwardingMockDecoderFactory( Invoke([real_decoder_factory](const webrtc::SdpAudioFormat& format) { return real_decoder_factory->IsSupportedDecoder(format); })); - EXPECT_CALL(*mock_decoder_factory, MakeAudioDecoderMock(_, _, _)) + EXPECT_CALL(*mock_decoder_factory, Create) .Times(AtLeast(2)) .WillRepeatedly( - Invoke([real_decoder_factory]( - const webrtc::SdpAudioFormat& format, - absl::optional codec_pair_id, - std::unique_ptr* return_value) { + [real_decoder_factory]( + const webrtc::Environment& env, + const webrtc::SdpAudioFormat& format, + std::optional codec_pair_id) { auto real_decoder = - real_decoder_factory->MakeAudioDecoder(format, codec_pair_id); - *return_value = - real_decoder - ? CreateForwardingMockDecoder(std::move(real_decoder)) - : nullptr; - })); + real_decoder_factory->Create(env, format, codec_pair_id); + return real_decoder + ? CreateForwardingMockDecoder(std::move(real_decoder)) + : nullptr; + }); return mock_decoder_factory; } struct AudioEncoderUnicornSparklesRainbow { using Config = webrtc::AudioEncoderL16::Config; - static absl::optional SdpToConfig(webrtc::SdpAudioFormat format) { + static std::optional SdpToConfig(webrtc::SdpAudioFormat format) { if (absl::EqualsIgnoreCase(format.name, "UnicornSparklesRainbow")) { - const webrtc::SdpAudioFormat::Parameters expected_params = { - {"num_horns", "1"}}; + const webrtc::CodecParameterMap expected_params = {{"num_horns", "1"}}; EXPECT_EQ(expected_params, format.parameters); format.parameters.clear(); format.name = "L16"; return webrtc::AudioEncoderL16::SdpToConfig(format); } else { - return absl::nullopt; + return std::nullopt; } } static void AppendSupportedEncoders( @@ -346,7 +377,7 @@ struct AudioEncoderUnicornSparklesRainbow { static std::unique_ptr MakeAudioEncoder( const Config& config, int payload_type, - absl::optional codec_pair_id = absl::nullopt) { + std::optional codec_pair_id = std::nullopt) { return webrtc::AudioEncoderL16::MakeAudioEncoder(config, payload_type, codec_pair_id); } @@ -354,16 +385,15 @@ struct AudioEncoderUnicornSparklesRainbow { struct AudioDecoderUnicornSparklesRainbow { using Config = webrtc::AudioDecoderL16::Config; - static absl::optional SdpToConfig(webrtc::SdpAudioFormat format) { + static std::optional SdpToConfig(webrtc::SdpAudioFormat format) { if (absl::EqualsIgnoreCase(format.name, "UnicornSparklesRainbow")) { - const webrtc::SdpAudioFormat::Parameters expected_params = { - {"num_horns", "1"}}; + const webrtc::CodecParameterMap expected_params = {{"num_horns", "1"}}; EXPECT_EQ(expected_params, format.parameters); format.parameters.clear(); format.name = "L16"; return webrtc::AudioDecoderL16::SdpToConfig(format); } else { - return absl::nullopt; + return std::nullopt; } } static void AppendSupportedDecoders( @@ -379,7 +409,7 @@ struct AudioDecoderUnicornSparklesRainbow { } static std::unique_ptr MakeAudioDecoder( const Config& config, - absl::optional codec_pair_id = absl::nullopt) { + std::optional codec_pair_id = std::nullopt) { return webrtc::AudioDecoderL16::MakeAudioDecoder(config, codec_pair_id); } }; @@ -387,7 +417,7 @@ struct AudioDecoderUnicornSparklesRainbow { } // namespace TEST_P(PeerConnectionEndToEndTest, Call) { - rtc::scoped_refptr real_decoder_factory = + webrtc::scoped_refptr real_decoder_factory = webrtc::CreateOpusAudioDecoderFactory(); CreatePcs(webrtc::CreateOpusAudioEncoderFactory(), CreateForwardingMockDecoderFactory(real_decoder_factory.get())); @@ -411,34 +441,34 @@ TEST_P(PeerConnectionEndToEndTest, CallWithCustomCodec) { class IdLoggingAudioEncoderFactory : public webrtc::AudioEncoderFactory { public: IdLoggingAudioEncoderFactory( - rtc::scoped_refptr real_factory, + webrtc::scoped_refptr real_factory, std::vector* const codec_ids) : fact_(real_factory), codec_ids_(codec_ids) {} std::vector GetSupportedEncoders() override { return fact_->GetSupportedEncoders(); } - absl::optional QueryAudioEncoder( + std::optional QueryAudioEncoder( const webrtc::SdpAudioFormat& format) override { return fact_->QueryAudioEncoder(format); } - std::unique_ptr MakeAudioEncoder( - int payload_type, + std::unique_ptr Create( + const Environment& env, const webrtc::SdpAudioFormat& format, - absl::optional codec_pair_id) override { - EXPECT_TRUE(codec_pair_id.has_value()); - codec_ids_->push_back(*codec_pair_id); - return fact_->MakeAudioEncoder(payload_type, format, codec_pair_id); + Options options) override { + EXPECT_TRUE(options.codec_pair_id.has_value()); + codec_ids_->push_back(*options.codec_pair_id); + return fact_->Create(env, format, options); } private: - const rtc::scoped_refptr fact_; + const webrtc::scoped_refptr fact_; std::vector* const codec_ids_; }; class IdLoggingAudioDecoderFactory : public webrtc::AudioDecoderFactory { public: IdLoggingAudioDecoderFactory( - rtc::scoped_refptr real_factory, + webrtc::scoped_refptr real_factory, std::vector* const codec_ids) : fact_(real_factory), codec_ids_(codec_ids) {} std::vector GetSupportedDecoders() override { @@ -447,34 +477,35 @@ TEST_P(PeerConnectionEndToEndTest, CallWithCustomCodec) { bool IsSupportedDecoder(const webrtc::SdpAudioFormat& format) override { return fact_->IsSupportedDecoder(format); } - std::unique_ptr MakeAudioDecoder( + std::unique_ptr Create( + const Environment& env, const webrtc::SdpAudioFormat& format, - absl::optional codec_pair_id) override { + std::optional codec_pair_id) override { EXPECT_TRUE(codec_pair_id.has_value()); codec_ids_->push_back(*codec_pair_id); - return fact_->MakeAudioDecoder(format, codec_pair_id); + return fact_->Create(env, format, codec_pair_id); } private: - const rtc::scoped_refptr fact_; + const webrtc::scoped_refptr fact_; std::vector* const codec_ids_; }; std::vector encoder_id1, encoder_id2, decoder_id1, decoder_id2; - CreatePcs(rtc::make_ref_counted( + CreatePcs(webrtc::make_ref_counted( webrtc::CreateAudioEncoderFactory< AudioEncoderUnicornSparklesRainbow>(), &encoder_id1), - rtc::make_ref_counted( + webrtc::make_ref_counted( webrtc::CreateAudioDecoderFactory< AudioDecoderUnicornSparklesRainbow>(), &decoder_id1), - rtc::make_ref_counted( + webrtc::make_ref_counted( webrtc::CreateAudioEncoderFactory< AudioEncoderUnicornSparklesRainbow>(), &encoder_id2), - rtc::make_ref_counted( + webrtc::make_ref_counted( webrtc::CreateAudioDecoderFactory< AudioDecoderUnicornSparklesRainbow>(), &decoder_id2)); @@ -501,9 +532,9 @@ TEST_P(PeerConnectionEndToEndTest, CreateDataChannelBeforeNegotiate) { webrtc::MockAudioDecoderFactory::CreateEmptyFactory()); webrtc::DataChannelInit init; - rtc::scoped_refptr caller_dc( + webrtc::scoped_refptr caller_dc( caller_->CreateDataChannel("data", init)); - rtc::scoped_refptr callee_dc( + webrtc::scoped_refptr callee_dc( callee_->CreateDataChannel("data", init)); Negotiate(); @@ -530,7 +561,7 @@ TEST_P(PeerConnectionEndToEndTest, CreateDataChannelAfterNegotiate) { webrtc::DataChannelInit init; // This DataChannel is for creating the data content in the negotiation. - rtc::scoped_refptr dummy( + webrtc::scoped_refptr dummy( caller_->CreateDataChannel("data", init)); Negotiate(); WaitForConnection(); @@ -539,9 +570,9 @@ TEST_P(PeerConnectionEndToEndTest, CreateDataChannelAfterNegotiate) { WaitForDataChannelsToOpen(dummy.get(), callee_signaled_data_channels_, 0); // Create new DataChannels after the negotiation and verify their states. - rtc::scoped_refptr caller_dc( + webrtc::scoped_refptr caller_dc( caller_->CreateDataChannel("hello", init)); - rtc::scoped_refptr callee_dc( + webrtc::scoped_refptr callee_dc( callee_->CreateDataChannel("hello", init)); WaitForDataChannelsToOpen(caller_dc.get(), callee_signaled_data_channels_, 1); @@ -564,7 +595,7 @@ TEST_P(PeerConnectionEndToEndTest, CreateDataChannelLargeTransfer) { webrtc::DataChannelInit init; // This DataChannel is for creating the data content in the negotiation. - rtc::scoped_refptr dummy( + webrtc::scoped_refptr dummy( caller_->CreateDataChannel("data", init)); Negotiate(); WaitForConnection(); @@ -573,9 +604,9 @@ TEST_P(PeerConnectionEndToEndTest, CreateDataChannelLargeTransfer) { WaitForDataChannelsToOpen(dummy.get(), callee_signaled_data_channels_, 0); // Create new DataChannels after the negotiation and verify their states. - rtc::scoped_refptr caller_dc( + webrtc::scoped_refptr caller_dc( caller_->CreateDataChannel("hello", init)); - rtc::scoped_refptr callee_dc( + webrtc::scoped_refptr callee_dc( callee_->CreateDataChannel("hello", init)); WaitForDataChannelsToOpen(caller_dc.get(), callee_signaled_data_channels_, 1); @@ -596,9 +627,9 @@ TEST_P(PeerConnectionEndToEndTest, DataChannelIdAssignment) { webrtc::MockAudioDecoderFactory::CreateEmptyFactory()); webrtc::DataChannelInit init; - rtc::scoped_refptr caller_dc_1( + webrtc::scoped_refptr caller_dc_1( caller_->CreateDataChannel("data", init)); - rtc::scoped_refptr callee_dc_1( + webrtc::scoped_refptr callee_dc_1( callee_->CreateDataChannel("data", init)); Negotiate(); @@ -607,9 +638,9 @@ TEST_P(PeerConnectionEndToEndTest, DataChannelIdAssignment) { EXPECT_EQ(1, caller_dc_1->id() % 2); EXPECT_EQ(0, callee_dc_1->id() % 2); - rtc::scoped_refptr caller_dc_2( + webrtc::scoped_refptr caller_dc_2( caller_->CreateDataChannel("data", init)); - rtc::scoped_refptr callee_dc_2( + webrtc::scoped_refptr callee_dc_2( callee_->CreateDataChannel("data", init)); EXPECT_EQ(1, caller_dc_2->id() % 2); @@ -625,9 +656,9 @@ TEST_P(PeerConnectionEndToEndTest, webrtc::DataChannelInit init; - rtc::scoped_refptr caller_dc_1( + webrtc::scoped_refptr caller_dc_1( caller_->CreateDataChannel("data", init)); - rtc::scoped_refptr caller_dc_2( + webrtc::scoped_refptr caller_dc_2( caller_->CreateDataChannel("data", init)); Negotiate(); @@ -649,10 +680,18 @@ TEST_P(PeerConnectionEndToEndTest, const std::string message_2 = "hello 2"; caller_dc_1->Send(webrtc::DataBuffer(message_1)); - EXPECT_EQ_WAIT(message_1, dc_1_observer->last_message(), kMaxWait); + EXPECT_THAT( + webrtc::WaitUntil([&] { return dc_1_observer->last_message(); }, + ::testing::Eq(message_1), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); caller_dc_2->Send(webrtc::DataBuffer(message_2)); - EXPECT_EQ_WAIT(message_2, dc_2_observer->last_message(), kMaxWait); + EXPECT_THAT( + webrtc::WaitUntil([&] { return dc_2_observer->last_message(); }, + ::testing::Eq(message_2), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); EXPECT_EQ(1U, dc_1_observer->received_message_count()); EXPECT_EQ(1U, dc_2_observer->received_message_count()); @@ -668,7 +707,7 @@ TEST_P(PeerConnectionEndToEndTest, webrtc::MockAudioDecoderFactory::CreateEmptyFactory()); webrtc::DataChannelInit init; - rtc::scoped_refptr caller_dc( + webrtc::scoped_refptr caller_dc( caller_->CreateDataChannel("data", init)); Negotiate(); @@ -680,7 +719,11 @@ TEST_P(PeerConnectionEndToEndTest, // Previously, the channel on which Close is called reported being closed // prematurely, and this caused issues; see bugs.webrtc.org/4453. caller_dc->Close(); - EXPECT_EQ_WAIT(DataChannelInterface::kClosed, caller_dc->state(), kMaxWait); + EXPECT_THAT( + webrtc::WaitUntil([&] { return caller_dc->state(); }, + ::testing::Eq(DataChannelInterface::kClosed), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); // Create a new channel and ensure it works after closing the previous one. caller_dc = caller_->CreateDataChannel("data2", init); @@ -703,7 +746,7 @@ TEST_P(PeerConnectionEndToEndTest, CloseDataChannelRemotelyWhileNotReferenced) { webrtc::MockAudioDecoderFactory::CreateEmptyFactory()); webrtc::DataChannelInit init; - rtc::scoped_refptr caller_dc( + webrtc::scoped_refptr caller_dc( caller_->CreateDataChannel("data", init)); Negotiate(); @@ -713,11 +756,15 @@ TEST_P(PeerConnectionEndToEndTest, CloseDataChannelRemotelyWhileNotReferenced) { // This removes the reference to the remote data channel that we hold. callee_signaled_data_channels_.clear(); caller_dc->Close(); - EXPECT_EQ_WAIT(DataChannelInterface::kClosed, caller_dc->state(), kMaxWait); + EXPECT_THAT( + webrtc::WaitUntil([&] { return caller_dc->state(); }, + ::testing::Eq(DataChannelInterface::kClosed), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); // Wait for a bit longer so the remote data channel will receive the // close message and be destroyed. - rtc::Thread::Current()->ProcessMessages(100); + webrtc::Thread::Current()->ProcessMessages(100); } // Test behavior of creating too many datachannels. @@ -726,26 +773,29 @@ TEST_P(PeerConnectionEndToEndTest, TooManyDataChannelsOpenedBeforeConnecting) { webrtc::MockAudioDecoderFactory::CreateEmptyFactory()); webrtc::DataChannelInit init; - std::vector> channels; - for (int i = 0; i <= cricket::kMaxSctpStreams / 2; i++) { - rtc::scoped_refptr caller_dc( + std::vector> channels; + for (int i = 0; i <= webrtc::kMaxSctpStreams / 2; i++) { + webrtc::scoped_refptr caller_dc( caller_->CreateDataChannel("data", init)); channels.push_back(std::move(caller_dc)); } Negotiate(); WaitForConnection(); - EXPECT_EQ_WAIT(callee_signaled_data_channels_.size(), - static_cast(cricket::kMaxSctpStreams / 2), kMaxWait); + EXPECT_THAT( + webrtc::WaitUntil([&] { return callee_signaled_data_channels_; }, + ::testing::SizeIs(webrtc::kMaxSctpStreams / 2), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); EXPECT_EQ(DataChannelInterface::kOpen, - channels[(cricket::kMaxSctpStreams / 2) - 1]->state()); + channels[(webrtc::kMaxSctpStreams / 2) - 1]->state()); EXPECT_EQ(DataChannelInterface::kClosed, - channels[cricket::kMaxSctpStreams / 2]->state()); + channels[webrtc::kMaxSctpStreams / 2]->state()); } #endif // WEBRTC_HAVE_SCTP TEST_P(PeerConnectionEndToEndTest, CanRestartIce) { - rtc::scoped_refptr real_decoder_factory = + webrtc::scoped_refptr real_decoder_factory = webrtc::CreateOpusAudioDecoderFactory(); CreatePcs(webrtc::CreateOpusAudioEncoderFactory(), CreateForwardingMockDecoderFactory(real_decoder_factory.get())); diff --git a/pc/peer_connection_factory.cc b/pc/peer_connection_factory.cc index d933ba6aea..e62d25af0b 100644 --- a/pc/peer_connection_factory.cc +++ b/pc/peer_connection_factory.cc @@ -10,30 +10,46 @@ #include "pc/peer_connection_factory.h" -#include +#include +#include +#include +#include #include +#include #include "absl/strings/match.h" -#include "api/async_resolver_factory.h" -#include "api/call/call_factory_interface.h" -#include "api/fec_controller.h" +#include "absl/strings/string_view.h" +#include "api/audio_options.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/ice_transport_interface.h" -#include "api/network_state_predictor.h" -#include "api/packet_socket_factory.h" -#include "api/rtc_event_log/rtc_event_log.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/transport/bitrate_settings.h" +#include "api/transport/network_control.h" #include "api/units/data_rate.h" -#include "call/audio_state.h" +#include "call/call_config.h" #include "call/rtp_transport_controller_send_factory.h" +#include "media/base/codec.h" #include "media/base/media_engine.h" #include "p2p/base/basic_async_resolver_factory.h" -#include "p2p/base/basic_packet_socket_factory.h" #include "p2p/base/default_ice_transport_factory.h" +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/port.h" #include "p2p/base/port_allocator.h" #include "p2p/client/basic_port_allocator.h" #include "pc/audio_track.h" +#include "pc/codec_vendor.h" +#include "pc/connection_context.h" +#include "pc/ice_server_parsing.h" #include "pc/local_audio_source.h" +#include "pc/media_factory.h" #include "pc/media_stream.h" #include "pc/media_stream_proxy.h" #include "pc/media_stream_track_proxy.h" @@ -41,7 +57,6 @@ #include "pc/peer_connection_factory_proxy.h" #include "pc/peer_connection_proxy.h" #include "pc/rtp_parameters_conversion.h" -#include "pc/session_description.h" #include "pc/video_track.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" @@ -53,7 +68,7 @@ namespace webrtc { -rtc::scoped_refptr +scoped_refptr CreateModularPeerConnectionFactory( PeerConnectionFactoryDependencies dependencies) { // The PeerConnectionFactory must be created on the signaling thread. @@ -76,20 +91,26 @@ CreateModularPeerConnectionFactory( } // Static -rtc::scoped_refptr PeerConnectionFactory::Create( +scoped_refptr PeerConnectionFactory::Create( PeerConnectionFactoryDependencies dependencies) { - auto context = ConnectionContext::Create(&dependencies); + auto context = ConnectionContext::Create( + CreateEnvironment(std::move(dependencies.trials), + std::move(dependencies.task_queue_factory)), + &dependencies); if (!context) { return nullptr; } - return rtc::make_ref_counted(context, &dependencies); + return make_ref_counted(context, &dependencies); } PeerConnectionFactory::PeerConnectionFactory( - rtc::scoped_refptr context, + scoped_refptr context, PeerConnectionFactoryDependencies* dependencies) : context_(context), - task_queue_factory_(std::move(dependencies->task_queue_factory)), + codec_vendor_(context_->media_engine(), + context_->use_rtx(), + context_->env().field_trials()), + event_log_factory_(std::move(dependencies->event_log_factory)), fec_controller_factory_(std::move(dependencies->fec_controller_factory)), network_state_predictor_factory_( @@ -101,18 +122,24 @@ PeerConnectionFactory::PeerConnectionFactory( (dependencies->transport_controller_send_factory) ? std::move(dependencies->transport_controller_send_factory) : std::make_unique()), - metronome_(std::move(dependencies->metronome)) {} + decode_metronome_(std::move(dependencies->decode_metronome)), + encode_metronome_(std::move(dependencies->encode_metronome)) {} PeerConnectionFactory::PeerConnectionFactory( PeerConnectionFactoryDependencies dependencies) - : PeerConnectionFactory(ConnectionContext::Create(&dependencies), - &dependencies) {} + : PeerConnectionFactory( + ConnectionContext::Create( + CreateEnvironment(std::move(dependencies.trials), + std::move(dependencies.task_queue_factory)), + &dependencies), + &dependencies) {} PeerConnectionFactory::~PeerConnectionFactory() { RTC_DCHECK_RUN_ON(signaling_thread()); worker_thread()->BlockingCall([this] { RTC_DCHECK_RUN_ON(worker_thread()); - metronome_ = nullptr; + decode_metronome_ = nullptr; + encode_metronome_ = nullptr; }); } @@ -122,26 +149,24 @@ void PeerConnectionFactory::SetOptions(const Options& options) { } RtpCapabilities PeerConnectionFactory::GetRtpSenderCapabilities( - cricket::MediaType kind) const { + webrtc::MediaType kind) const { RTC_DCHECK_RUN_ON(signaling_thread()); switch (kind) { - case cricket::MEDIA_TYPE_AUDIO: { - cricket::AudioCodecs cricket_codecs; - cricket_codecs = media_engine()->voice().send_codecs(); + case webrtc::MediaType::AUDIO: { + Codecs cricket_codecs; + cricket_codecs = codec_vendor_.audio_send_codecs().codecs(); auto extensions = GetDefaultEnabledRtpHeaderExtensions(media_engine()->voice()); return ToRtpCapabilities(cricket_codecs, extensions); } - case cricket::MEDIA_TYPE_VIDEO: { - cricket::VideoCodecs cricket_codecs; - cricket_codecs = media_engine()->video().send_codecs(context_->use_rtx()); + case webrtc::MediaType::VIDEO: { + Codecs cricket_codecs; + cricket_codecs = codec_vendor_.video_send_codecs().codecs(); auto extensions = GetDefaultEnabledRtpHeaderExtensions(media_engine()->video()); return ToRtpCapabilities(cricket_codecs, extensions); } - case cricket::MEDIA_TYPE_DATA: - return RtpCapabilities(); - case cricket::MEDIA_TYPE_UNSUPPORTED: + default: return RtpCapabilities(); } RTC_DLOG(LS_ERROR) << "Got unexpected MediaType " << kind; @@ -149,37 +174,33 @@ RtpCapabilities PeerConnectionFactory::GetRtpSenderCapabilities( } RtpCapabilities PeerConnectionFactory::GetRtpReceiverCapabilities( - cricket::MediaType kind) const { + webrtc::MediaType kind) const { RTC_DCHECK_RUN_ON(signaling_thread()); switch (kind) { - case cricket::MEDIA_TYPE_AUDIO: { - cricket::AudioCodecs cricket_codecs; - cricket_codecs = media_engine()->voice().recv_codecs(); + case webrtc::MediaType::AUDIO: { + Codecs cricket_codecs; + cricket_codecs = codec_vendor_.audio_recv_codecs().codecs(); auto extensions = GetDefaultEnabledRtpHeaderExtensions(media_engine()->voice()); return ToRtpCapabilities(cricket_codecs, extensions); } - case cricket::MEDIA_TYPE_VIDEO: { - cricket::VideoCodecs cricket_codecs = - media_engine()->video().recv_codecs(context_->use_rtx()); + case webrtc::MediaType::VIDEO: { + Codecs cricket_codecs = codec_vendor_.video_recv_codecs().codecs(); auto extensions = GetDefaultEnabledRtpHeaderExtensions(media_engine()->video()); return ToRtpCapabilities(cricket_codecs, extensions); } - case cricket::MEDIA_TYPE_DATA: - return RtpCapabilities(); - case cricket::MEDIA_TYPE_UNSUPPORTED: + default: return RtpCapabilities(); } RTC_DLOG(LS_ERROR) << "Got unexpected MediaType " << kind; RTC_CHECK_NOTREACHED(); } -rtc::scoped_refptr -PeerConnectionFactory::CreateAudioSource(const cricket::AudioOptions& options) { +scoped_refptr PeerConnectionFactory::CreateAudioSource( + const AudioOptions& options) { RTC_DCHECK(signaling_thread()->IsCurrent()); - rtc::scoped_refptr source( - LocalAudioSource::Create(&options)); + scoped_refptr source(LocalAudioSource::Create(&options)); return source; } @@ -194,29 +215,76 @@ void PeerConnectionFactory::StopAecDump() { media_engine()->voice().StopAecDump(); } -cricket::MediaEngineInterface* PeerConnectionFactory::media_engine() const { +MediaEngineInterface* PeerConnectionFactory::media_engine() const { RTC_DCHECK(context_); return context_->media_engine(); } -RTCErrorOr> +RTCErrorOr> PeerConnectionFactory::CreatePeerConnectionOrError( const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies) { RTC_DCHECK_RUN_ON(signaling_thread()); + // TODO(https://crbug.com/webrtc/13528): Remove support for kPlanB. + if (configuration.sdp_semantics == SdpSemantics::kPlanB_DEPRECATED) { + RTC_LOG(LS_WARNING) + << "PeerConnection constructed with legacy SDP semantics!"; + } + + RTCError err = IceConfig(configuration).IsValid(); + if (!err.ok()) { + RTC_LOG(LS_ERROR) << "Invalid ICE configuration: " << err.message(); + return err; + } + + ServerAddresses stun_servers; + std::vector turn_servers; + err = ParseAndValidateIceServersFromConfiguration(configuration, stun_servers, + turn_servers); + if (!err.ok()) { + return err; + } + + if (!dependencies.observer) { + RTC_LOG(LS_ERROR) << "PeerConnection initialized without a " + "PeerConnectionObserver"; + return RTCError(RTCErrorType::INVALID_PARAMETER, + "Attempt to create a PeerConnection without an observer"); + } + + EnvironmentFactory env_factory(context_->env()); + + // Field trials active for this PeerConnection is the first of: + // a) Specified in the PeerConnectionDependencies + // b) Specified in the PeerConnectionFactoryDependencies + // c) Created as default by the EnvironmentFactory. + env_factory.Set(std::move(dependencies.trials)); + + if (event_log_factory_ != nullptr) { + worker_thread()->BlockingCall([&] { + Environment env_for_rtc_event_log = env_factory.Create(); + env_factory.Set(event_log_factory_->Create(env_for_rtc_event_log)); + }); + } + + const Environment env = env_factory.Create(); + // Set internal defaults if optional dependencies are not set. if (!dependencies.cert_generator) { - dependencies.cert_generator = - std::make_unique(signaling_thread(), - network_thread()); + dependencies.cert_generator = std::make_unique( + signaling_thread(), network_thread()); } + + if (!dependencies.async_dns_resolver_factory) { + dependencies.async_dns_resolver_factory = + std::make_unique(); + } + if (!dependencies.allocator) { - const FieldTrialsView* trials = - dependencies.trials ? dependencies.trials.get() : &field_trials(); - dependencies.allocator = std::make_unique( - context_->default_network_manager(), context_->default_socket_factory(), - configuration.turn_customizer, /*relay_port_factory=*/nullptr, trials); + dependencies.allocator = std::make_unique( + env, context_->default_network_manager(), + context_->default_socket_factory(), configuration.turn_customizer); dependencies.allocator->SetPortRange( configuration.port_allocator_config.min_port, configuration.port_allocator_config.max_port); @@ -232,76 +300,61 @@ PeerConnectionFactory::CreatePeerConnectionOrError( dependencies.allocator->SetNetworkIgnoreMask(options().network_ignore_mask); dependencies.allocator->SetVpnList(configuration.vpn_list); - std::unique_ptr event_log = - worker_thread()->BlockingCall([this] { return CreateRtcEventLog_w(); }); - - const FieldTrialsView* trials = - dependencies.trials ? dependencies.trials.get() : &field_trials(); - std::unique_ptr call = - worker_thread()->BlockingCall([this, &event_log, trials, &configuration] { - return CreateCall_w(event_log.get(), *trials, configuration); + std::unique_ptr + network_controller_factory = + std::move(dependencies.network_controller_factory); + std::unique_ptr call = worker_thread()->BlockingCall( + [this, &env, &configuration, &network_controller_factory] { + return CreateCall_w(env, std::move(configuration), + std::move(network_controller_factory)); }); - auto result = PeerConnection::Create(context_, options_, std::move(event_log), - std::move(call), configuration, - std::move(dependencies)); - if (!result.ok()) { - return result.MoveError(); - } + auto pc = PeerConnection::Create(env, context_, options_, std::move(call), + configuration, dependencies, stun_servers, + turn_servers); // We configure the proxy with a pointer to the network thread for methods // that need to be invoked there rather than on the signaling thread. // Internally, the proxy object has a member variable named `worker_thread_` // which will point to the network thread (and not the factory's // worker_thread()). All such methods have thread checks though, so the code // should still be clear (outside of macro expansion). - rtc::scoped_refptr result_proxy = - PeerConnectionProxy::Create(signaling_thread(), network_thread(), - result.MoveValue()); - return result_proxy; + return scoped_refptr(PeerConnectionProxy::Create( + signaling_thread(), network_thread(), std::move(pc))); } -rtc::scoped_refptr +scoped_refptr PeerConnectionFactory::CreateLocalMediaStream(const std::string& stream_id) { RTC_DCHECK(signaling_thread()->IsCurrent()); return MediaStreamProxy::Create(signaling_thread(), MediaStream::Create(stream_id)); } -rtc::scoped_refptr PeerConnectionFactory::CreateVideoTrack( - rtc::scoped_refptr source, +scoped_refptr PeerConnectionFactory::CreateVideoTrack( + scoped_refptr source, absl::string_view id) { RTC_DCHECK(signaling_thread()->IsCurrent()); - rtc::scoped_refptr track = + scoped_refptr track = VideoTrack::Create(id, source, worker_thread()); return VideoTrackProxy::Create(signaling_thread(), worker_thread(), track); } -rtc::scoped_refptr PeerConnectionFactory::CreateAudioTrack( +scoped_refptr PeerConnectionFactory::CreateAudioTrack( const std::string& id, AudioSourceInterface* source) { RTC_DCHECK(signaling_thread()->IsCurrent()); - rtc::scoped_refptr track = - AudioTrack::Create(id, rtc::scoped_refptr(source)); + scoped_refptr track = + AudioTrack::Create(id, scoped_refptr(source)); return AudioTrackProxy::Create(signaling_thread(), track); } -std::unique_ptr PeerConnectionFactory::CreateRtcEventLog_w() { - RTC_DCHECK_RUN_ON(worker_thread()); - - auto encoding_type = RtcEventLog::EncodingType::NewFormat; - if (field_trials().IsDisabled("WebRTC-RtcEventLogNewFormat")) - encoding_type = RtcEventLog::EncodingType::Legacy; - return event_log_factory_ ? event_log_factory_->Create(encoding_type) - : std::make_unique(); -} - std::unique_ptr PeerConnectionFactory::CreateCall_w( - RtcEventLog* event_log, - const FieldTrialsView& field_trials, - const PeerConnectionInterface::RTCConfiguration& configuration) { + const Environment& env, + const PeerConnectionInterface::RTCConfiguration& configuration, + std::unique_ptr + per_call_network_controller_factory) { RTC_DCHECK_RUN_ON(worker_thread()); - webrtc::Call::Config call_config(event_log, network_thread()); + CallConfig call_config(env, network_thread()); if (!media_engine() || !context_->call_factory()) { return nullptr; } @@ -314,36 +367,38 @@ std::unique_ptr PeerConnectionFactory::CreateCall_w( FieldTrialParameter max_bandwidth("max", DataRate::KilobitsPerSec(2000)); ParseFieldTrial({&min_bandwidth, &start_bandwidth, &max_bandwidth}, - field_trials.Lookup("WebRTC-PcFactoryDefaultBitrates")); + env.field_trials().Lookup("WebRTC-PcFactoryDefaultBitrates")); call_config.bitrate_config.min_bitrate_bps = - rtc::saturated_cast(min_bandwidth->bps()); + saturated_cast(min_bandwidth->bps()); call_config.bitrate_config.start_bitrate_bps = - rtc::saturated_cast(start_bandwidth->bps()); + saturated_cast(start_bandwidth->bps()); call_config.bitrate_config.max_bitrate_bps = - rtc::saturated_cast(max_bandwidth->bps()); + saturated_cast(max_bandwidth->bps()); call_config.fec_controller_factory = fec_controller_factory_.get(); - call_config.task_queue_factory = task_queue_factory_.get(); call_config.network_state_predictor_factory = network_state_predictor_factory_.get(); call_config.neteq_factory = neteq_factory_.get(); - if (IsTrialEnabled("WebRTC-Bwe-InjectedCongestionController")) { - RTC_LOG(LS_INFO) << "Using injected network controller factory"; + if (per_call_network_controller_factory != nullptr) { + RTC_LOG(LS_INFO) << "Using pc injected network controller factory"; + call_config.per_call_network_controller_factory = + std::move(per_call_network_controller_factory); + } else if (IsTrialEnabled("WebRTC-Bwe-InjectedCongestionController")) { + RTC_LOG(LS_INFO) << "Using pcf injected network controller factory"; call_config.network_controller_factory = injected_network_controller_factory_.get(); } else { RTC_LOG(LS_INFO) << "Using default network controller factory"; } - call_config.trials = &field_trials; call_config.rtp_transport_controller_send_factory = transport_controller_send_factory_.get(); - call_config.metronome = metronome_.get(); + call_config.decode_metronome = decode_metronome_.get(); + call_config.encode_metronome = encode_metronome_.get(); call_config.pacer_burst_interval = configuration.pacer_burst_interval; - return std::unique_ptr( - context_->call_factory()->CreateCall(call_config)); + return context_->call_factory()->CreateCall(std::move(call_config)); } bool PeerConnectionFactory::IsTrialEnabled(absl::string_view key) const { diff --git a/pc/peer_connection_factory.h b/pc/peer_connection_factory.h index f55d09f6d8..5b99a69674 100644 --- a/pc/peer_connection_factory.h +++ b/pc/peer_connection_factory.h @@ -20,6 +20,7 @@ #include "absl/strings/string_view.h" #include "api/audio_options.h" +#include "api/environment/environment.h" #include "api/fec_controller.h" #include "api/field_trials_view.h" #include "api/media_stream_interface.h" @@ -29,32 +30,23 @@ #include "api/network_state_predictor.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" -#include "api/rtc_event_log/rtc_event_log.h" #include "api/rtc_event_log/rtc_event_log_factory_interface.h" #include "api/rtp_parameters.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" -#include "api/task_queue/task_queue_factory.h" #include "api/transport/network_control.h" #include "api/transport/sctp_transport_factory_interface.h" #include "call/call.h" #include "call/rtp_transport_controller_send_factory_interface.h" +#include "media/base/media_engine.h" #include "p2p/base/port_allocator.h" +#include "pc/codec_vendor.h" #include "pc/connection_context.h" -#include "rtc_base/checks.h" -#include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" -namespace rtc { -class BasicNetworkManager; -class BasicPacketSocketFactory; -} // namespace rtc - namespace webrtc { -class RtcEventLog; - class PeerConnectionFactory : public PeerConnectionFactoryInterface { public: // Creates a PeerConnectionFactory. It returns nullptr on initialization @@ -62,33 +54,33 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { // // The Dependencies structure allows simple management of all new // dependencies being added to the PeerConnectionFactory. - static rtc::scoped_refptr Create( + static scoped_refptr Create( PeerConnectionFactoryDependencies dependencies); void SetOptions(const Options& options) override; - RTCErrorOr> + RTCErrorOr> CreatePeerConnectionOrError( const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies) override; RtpCapabilities GetRtpSenderCapabilities( - cricket::MediaType kind) const override; + webrtc::MediaType kind) const override; RtpCapabilities GetRtpReceiverCapabilities( - cricket::MediaType kind) const override; + webrtc::MediaType kind) const override; - rtc::scoped_refptr CreateLocalMediaStream( + scoped_refptr CreateLocalMediaStream( const std::string& stream_id) override; - rtc::scoped_refptr CreateAudioSource( - const cricket::AudioOptions& options) override; + scoped_refptr CreateAudioSource( + const AudioOptions& options) override; - rtc::scoped_refptr CreateVideoTrack( - rtc::scoped_refptr video_source, + scoped_refptr CreateVideoTrack( + scoped_refptr video_source, absl::string_view id) override; - rtc::scoped_refptr CreateAudioTrack( + scoped_refptr CreateAudioTrack( const std::string& id, AudioSourceInterface* audio_source) override; @@ -99,13 +91,13 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { return context_->sctp_transport_factory(); } - rtc::Thread* signaling_thread() const { + Thread* signaling_thread() const { // This method can be called on a different thread when the factory is // created in CreatePeerConnectionFactory(). return context_->signaling_thread(); } - rtc::Thread* worker_thread() const { return context_->worker_thread(); } + Thread* worker_thread() const { return context_->worker_thread(); } const Options& options() const { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -113,14 +105,15 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { } const FieldTrialsView& field_trials() const { - return context_->field_trials(); + return context_->env().field_trials(); } - cricket::MediaEngineInterface* media_engine() const; + MediaEngineInterface* media_engine() const; + CodecVendor& CodecVendorForTesting() { return codec_vendor_; } protected: // Constructor used by the static Create() method. Modifies the dependencies. - PeerConnectionFactory(rtc::scoped_refptr context, + PeerConnectionFactory(scoped_refptr context, PeerConnectionFactoryDependencies* dependencies); // Constructor for use in testing. Ignores the possibility of initialization @@ -131,20 +124,20 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { virtual ~PeerConnectionFactory(); private: - rtc::Thread* network_thread() const { return context_->network_thread(); } + Thread* network_thread() const { return context_->network_thread(); } bool IsTrialEnabled(absl::string_view key) const; - std::unique_ptr CreateRtcEventLog_w(); std::unique_ptr CreateCall_w( - RtcEventLog* event_log, - const FieldTrialsView& field_trials, - const PeerConnectionInterface::RTCConfiguration& configuration); + const Environment& env, + const PeerConnectionInterface::RTCConfiguration& configuration, + std::unique_ptr + network_controller_factory); - rtc::scoped_refptr context_; + scoped_refptr context_; PeerConnectionFactoryInterface::Options options_ RTC_GUARDED_BY(signaling_thread()); - std::unique_ptr task_queue_factory_; + CodecVendor codec_vendor_; std::unique_ptr event_log_factory_; std::unique_ptr fec_controller_factory_; std::unique_ptr @@ -154,7 +147,8 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { std::unique_ptr neteq_factory_; const std::unique_ptr transport_controller_send_factory_; - std::unique_ptr metronome_ RTC_GUARDED_BY(worker_thread()); + std::unique_ptr decode_metronome_ RTC_GUARDED_BY(worker_thread()); + std::unique_ptr encode_metronome_ RTC_GUARDED_BY(worker_thread()); }; } // namespace webrtc diff --git a/pc/peer_connection_factory_proxy.h b/pc/peer_connection_factory_proxy.h index 4781497642..e046f66377 100644 --- a/pc/peer_connection_factory_proxy.h +++ b/pc/peer_connection_factory_proxy.h @@ -11,11 +11,18 @@ #ifndef PC_PEER_CONNECTION_FACTORY_PROXY_H_ #define PC_PEER_CONNECTION_FACTORY_PROXY_H_ -#include +#include +#include #include -#include +#include "absl/strings/string_view.h" +#include "api/audio_options.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "pc/proxy.h" namespace webrtc { @@ -25,27 +32,25 @@ namespace webrtc { BEGIN_PROXY_MAP(PeerConnectionFactory) PROXY_PRIMARY_THREAD_DESTRUCTOR() PROXY_METHOD1(void, SetOptions, const Options&) -PROXY_METHOD2(RTCErrorOr>, +PROXY_METHOD2(RTCErrorOr>, CreatePeerConnectionOrError, const PeerConnectionInterface::RTCConfiguration&, PeerConnectionDependencies) -PROXY_CONSTMETHOD1(webrtc::RtpCapabilities, - GetRtpSenderCapabilities, - cricket::MediaType) -PROXY_CONSTMETHOD1(webrtc::RtpCapabilities, +PROXY_CONSTMETHOD1(RtpCapabilities, GetRtpSenderCapabilities, webrtc::MediaType) +PROXY_CONSTMETHOD1(RtpCapabilities, GetRtpReceiverCapabilities, - cricket::MediaType) -PROXY_METHOD1(rtc::scoped_refptr, + webrtc::MediaType) +PROXY_METHOD1(scoped_refptr, CreateLocalMediaStream, const std::string&) -PROXY_METHOD1(rtc::scoped_refptr, +PROXY_METHOD1(scoped_refptr, CreateAudioSource, - const cricket::AudioOptions&) -PROXY_METHOD2(rtc::scoped_refptr, + const AudioOptions&) +PROXY_METHOD2(scoped_refptr, CreateVideoTrack, - rtc::scoped_refptr, + scoped_refptr, absl::string_view) -PROXY_METHOD2(rtc::scoped_refptr, +PROXY_METHOD2(scoped_refptr, CreateAudioTrack, const std::string&, AudioSourceInterface*) diff --git a/pc/peer_connection_factory_unittest.cc b/pc/peer_connection_factory_unittest.cc index 11e232c01f..f895496257 100644 --- a/pc/peer_connection_factory_unittest.cc +++ b/pc/peer_connection_factory_unittest.cc @@ -11,19 +11,31 @@ #include "pc/peer_connection_factory.h" #include +#include +#include #include #include #include -#include "api/audio/audio_mixer.h" +#include "api/audio/audio_device.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/create_peerconnection_factory.h" #include "api/data_channel_interface.h" +#include "api/enable_media.h" +#include "api/enable_media_with_defaults.h" +#include "api/environment/environment_factory.h" #include "api/jsep.h" +#include "api/make_ref_counted.h" #include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/peer_connection_interface.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/test/mock_packet_socket_factory.h" +#include "api/units/time_delta.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" @@ -35,24 +47,24 @@ #include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" #include "media/base/fake_frame_source.h" -#include "media/engine/webrtc_media_engine.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "p2p/base/fake_port_allocator.h" +#include "media/base/media_constants.h" +#include "modules/audio_processing/include/mock_audio_processing.h" #include "p2p/base/port.h" #include "p2p/base/port_allocator.h" #include "p2p/base/port_interface.h" +#include "p2p/test/fake_port_allocator.h" +#include "pc/connection_context.h" #include "pc/test/fake_audio_capture_module.h" #include "pc/test/fake_video_track_source.h" -#include "pc/test/mock_peer_connection_observers.h" -#include "rtc_base/gunit.h" +#include "rtc_base/event.h" #include "rtc_base/internal/default_socket_server.h" -#include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/network.h" #include "rtc_base/socket_address.h" +#include "rtc_base/socket_server.h" +#include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "test/gmock.h" #include "test/gtest.h" -#include "test/scoped_key_value_config.h" #ifdef WEBRTC_ANDROID #include "pc/test/android_test_initializer.h" @@ -64,11 +76,14 @@ namespace webrtc { namespace { using ::testing::_; +using ::testing::A; using ::testing::AtLeast; using ::testing::InvokeWithoutArgs; using ::testing::NiceMock; using ::testing::Return; using ::testing::UnorderedElementsAre; +using ::webrtc::test::MockAudioProcessing; +using ::webrtc::test::MockAudioProcessingBuilder; static const char kStunIceServer[] = "stun:stun.l.google.com:19302"; static const char kTurnIceServer[] = "turn:test.com:1234"; @@ -96,29 +111,24 @@ class NullPeerConnectionObserver : public PeerConnectionObserver { virtual ~NullPeerConnectionObserver() = default; void OnSignalingChange( PeerConnectionInterface::SignalingState new_state) override {} - void OnAddStream(rtc::scoped_refptr stream) override {} - void OnRemoveStream( - rtc::scoped_refptr stream) override {} + void OnAddStream(scoped_refptr stream) override {} + void OnRemoveStream(scoped_refptr stream) override {} void OnDataChannel( - rtc::scoped_refptr data_channel) override {} + scoped_refptr data_channel) override {} void OnRenegotiationNeeded() override {} void OnIceConnectionChange( PeerConnectionInterface::IceConnectionState new_state) override {} void OnIceGatheringChange( PeerConnectionInterface::IceGatheringState new_state) override {} - void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override { - } + void OnIceCandidate(const IceCandidateInterface* candidate) override {} }; -class MockNetworkManager : public rtc::NetworkManager { +class MockNetworkManager : public NetworkManager { public: MOCK_METHOD(void, StartUpdating, (), (override)); MOCK_METHOD(void, StopUpdating, (), (override)); - MOCK_METHOD(std::vector, - GetNetworks, - (), - (const override)); - MOCK_METHOD(std::vector, + MOCK_METHOD(std::vector, GetNetworks, (), (const, override)); + MOCK_METHOD(std::vector, GetAnyAddressNetworks, (), (override)); @@ -127,23 +137,21 @@ class MockNetworkManager : public rtc::NetworkManager { class PeerConnectionFactoryTest : public ::testing::Test { public: PeerConnectionFactoryTest() - : socket_server_(rtc::CreateDefaultSocketServer()), + : socket_server_(CreateDefaultSocketServer()), main_thread_(socket_server_.get()) {} private: void SetUp() { #ifdef WEBRTC_ANDROID - webrtc::InitializeAndroidObjects(); + InitializeAndroidObjects(); #endif // Use fake audio device module since we're only testing the interface // level, and using a real one could make tests flaky e.g. when run in // parallel. - factory_ = webrtc::CreatePeerConnectionFactory( - rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(), - rtc::scoped_refptr( - FakeAudioCaptureModule::Create()), - webrtc::CreateBuiltinAudioEncoderFactory(), - webrtc::CreateBuiltinAudioDecoderFactory(), + factory_ = CreatePeerConnectionFactory( + Thread::Current(), Thread::Current(), Thread::Current(), + scoped_refptr(FakeAudioCaptureModule::Create()), + CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory(), std::make_unique>(), @@ -153,19 +161,17 @@ class PeerConnectionFactoryTest : public ::testing::Test { nullptr /* audio_mixer */, nullptr /* audio_processing */); ASSERT_TRUE(factory_.get() != NULL); - packet_socket_factory_.reset( - new rtc::BasicPacketSocketFactory(socket_server_.get())); - port_allocator_.reset(new cricket::FakePortAllocator( - rtc::Thread::Current(), packet_socket_factory_.get(), &field_trials_)); + port_allocator_ = std::make_unique(CreateEnvironment(), + socket_server_.get()); raw_port_allocator_ = port_allocator_.get(); } protected: - void VerifyStunServers(cricket::ServerAddresses stun_servers) { + void VerifyStunServers(ServerAddresses stun_servers) { EXPECT_EQ(stun_servers, raw_port_allocator_->stun_servers()); } - void VerifyTurnServers(std::vector turn_servers) { + void VerifyTurnServers(std::vector turn_servers) { EXPECT_EQ(turn_servers.size(), raw_port_allocator_->turn_servers().size()); for (size_t i = 0; i < turn_servers.size(); ++i) { ASSERT_EQ(1u, turn_servers[i].ports.size()); @@ -182,64 +188,64 @@ class PeerConnectionFactoryTest : public ::testing::Test { } } - void VerifyAudioCodecCapability(const webrtc::RtpCodecCapability& codec) { - EXPECT_EQ(codec.kind, cricket::MEDIA_TYPE_AUDIO); + void VerifyAudioCodecCapability(const RtpCodecCapability& codec) { + EXPECT_EQ(codec.kind, webrtc::MediaType::AUDIO); EXPECT_FALSE(codec.name.empty()); EXPECT_GT(codec.clock_rate, 0); EXPECT_GT(codec.num_channels, 0); } - void VerifyVideoCodecCapability(const webrtc::RtpCodecCapability& codec, + void VerifyVideoCodecCapability(const RtpCodecCapability& codec, bool sender) { - EXPECT_EQ(codec.kind, cricket::MEDIA_TYPE_VIDEO); + EXPECT_EQ(codec.kind, webrtc::MediaType::VIDEO); EXPECT_FALSE(codec.name.empty()); EXPECT_GT(codec.clock_rate, 0); if (sender) { if (codec.name == "VP8" || codec.name == "H264") { - EXPECT_THAT(codec.scalability_modes, - UnorderedElementsAre(webrtc::ScalabilityMode::kL1T1, - webrtc::ScalabilityMode::kL1T2, - webrtc::ScalabilityMode::kL1T3)) + EXPECT_THAT( + codec.scalability_modes, + UnorderedElementsAre(ScalabilityMode::kL1T1, ScalabilityMode::kL1T2, + ScalabilityMode::kL1T3)) << "Codec: " << codec.name; } else if (codec.name == "VP9" || codec.name == "AV1") { EXPECT_THAT( codec.scalability_modes, UnorderedElementsAre( // clang-format off - webrtc::ScalabilityMode::kL1T1, - webrtc::ScalabilityMode::kL1T2, - webrtc::ScalabilityMode::kL1T3, - webrtc::ScalabilityMode::kL2T1, - webrtc::ScalabilityMode::kL2T1h, - webrtc::ScalabilityMode::kL2T1_KEY, - webrtc::ScalabilityMode::kL2T2, - webrtc::ScalabilityMode::kL2T2h, - webrtc::ScalabilityMode::kL2T2_KEY, - webrtc::ScalabilityMode::kL2T2_KEY_SHIFT, - webrtc::ScalabilityMode::kL2T3, - webrtc::ScalabilityMode::kL2T3h, - webrtc::ScalabilityMode::kL2T3_KEY, - webrtc::ScalabilityMode::kL3T1, - webrtc::ScalabilityMode::kL3T1h, - webrtc::ScalabilityMode::kL3T1_KEY, - webrtc::ScalabilityMode::kL3T2, - webrtc::ScalabilityMode::kL3T2h, - webrtc::ScalabilityMode::kL3T2_KEY, - webrtc::ScalabilityMode::kL3T3, - webrtc::ScalabilityMode::kL3T3h, - webrtc::ScalabilityMode::kL3T3_KEY, - webrtc::ScalabilityMode::kS2T1, - webrtc::ScalabilityMode::kS2T1h, - webrtc::ScalabilityMode::kS2T2, - webrtc::ScalabilityMode::kS2T2h, - webrtc::ScalabilityMode::kS2T3, - webrtc::ScalabilityMode::kS2T3h, - webrtc::ScalabilityMode::kS3T1, - webrtc::ScalabilityMode::kS3T1h, - webrtc::ScalabilityMode::kS3T2, - webrtc::ScalabilityMode::kS3T2h, - webrtc::ScalabilityMode::kS3T3, - webrtc::ScalabilityMode::kS3T3h) + ScalabilityMode::kL1T1, + ScalabilityMode::kL1T2, + ScalabilityMode::kL1T3, + ScalabilityMode::kL2T1, + ScalabilityMode::kL2T1h, + ScalabilityMode::kL2T1_KEY, + ScalabilityMode::kL2T2, + ScalabilityMode::kL2T2h, + ScalabilityMode::kL2T2_KEY, + ScalabilityMode::kL2T2_KEY_SHIFT, + ScalabilityMode::kL2T3, + ScalabilityMode::kL2T3h, + ScalabilityMode::kL2T3_KEY, + ScalabilityMode::kL3T1, + ScalabilityMode::kL3T1h, + ScalabilityMode::kL3T1_KEY, + ScalabilityMode::kL3T2, + ScalabilityMode::kL3T2h, + ScalabilityMode::kL3T2_KEY, + ScalabilityMode::kL3T3, + ScalabilityMode::kL3T3h, + ScalabilityMode::kL3T3_KEY, + ScalabilityMode::kS2T1, + ScalabilityMode::kS2T1h, + ScalabilityMode::kS2T2, + ScalabilityMode::kS2T2h, + ScalabilityMode::kS2T3, + ScalabilityMode::kS2T3h, + ScalabilityMode::kS3T1, + ScalabilityMode::kS3T1h, + ScalabilityMode::kS3T2, + ScalabilityMode::kS3T2h, + ScalabilityMode::kS3T3, + ScalabilityMode::kS3T3h) // clang-format on ) << "Codec: " << codec.name; @@ -251,56 +257,43 @@ class PeerConnectionFactoryTest : public ::testing::Test { } } - webrtc::test::ScopedKeyValueConfig field_trials_; - std::unique_ptr socket_server_; - rtc::AutoSocketServerThread main_thread_; - rtc::scoped_refptr factory_; + std::unique_ptr socket_server_; + AutoSocketServerThread main_thread_; + scoped_refptr factory_; NullPeerConnectionObserver observer_; - std::unique_ptr packet_socket_factory_; - std::unique_ptr port_allocator_; + std::unique_ptr port_allocator_; // Since the PC owns the port allocator after it's been initialized, // this should only be used when known to be safe. - cricket::FakePortAllocator* raw_port_allocator_; + FakePortAllocator* raw_port_allocator_; }; // Since there is no public PeerConnectionFactory API to control RTX usage, need // to reconstruct factory with our own ConnectionContext. -rtc::scoped_refptr +scoped_refptr CreatePeerConnectionFactoryWithRtxDisabled() { - webrtc::PeerConnectionFactoryDependencies pcf_dependencies; - pcf_dependencies.signaling_thread = rtc::Thread::Current(); - pcf_dependencies.worker_thread = rtc::Thread::Current(); - pcf_dependencies.network_thread = rtc::Thread::Current(); + PeerConnectionFactoryDependencies pcf_dependencies; + pcf_dependencies.signaling_thread = Thread::Current(); + pcf_dependencies.worker_thread = Thread::Current(); + pcf_dependencies.network_thread = Thread::Current(); pcf_dependencies.task_queue_factory = CreateDefaultTaskQueueFactory(); - pcf_dependencies.call_factory = CreateCallFactory(); - pcf_dependencies.trials = std::make_unique(); - - cricket::MediaEngineDependencies media_dependencies; - media_dependencies.task_queue_factory = - pcf_dependencies.task_queue_factory.get(); - media_dependencies.adm = rtc::scoped_refptr( - FakeAudioCaptureModule::Create()); - media_dependencies.audio_encoder_factory = - webrtc::CreateBuiltinAudioEncoderFactory(); - media_dependencies.audio_decoder_factory = - webrtc::CreateBuiltinAudioDecoderFactory(); - media_dependencies.video_encoder_factory = + + pcf_dependencies.adm = FakeAudioCaptureModule::Create(); + pcf_dependencies.audio_encoder_factory = CreateBuiltinAudioEncoderFactory(); + pcf_dependencies.audio_decoder_factory = CreateBuiltinAudioDecoderFactory(); + pcf_dependencies.video_encoder_factory = std::make_unique>(); - media_dependencies.video_decoder_factory = + pcf_dependencies.video_decoder_factory = std::make_unique>(), - media_dependencies.trials = pcf_dependencies.trials.get(); - pcf_dependencies.media_engine = - cricket::CreateMediaEngine(std::move(media_dependencies)); + EnableMedia(pcf_dependencies); - rtc::scoped_refptr context = - ConnectionContext::Create(&pcf_dependencies); + scoped_refptr context = + ConnectionContext::Create(CreateEnvironment(), &pcf_dependencies); context->set_use_rtx(false); - return rtc::make_ref_counted(context, - &pcf_dependencies); + return make_ref_counted(context, &pcf_dependencies); } // Verify creation of PeerConnection using internal ADM, video factory and @@ -311,26 +304,26 @@ CreatePeerConnectionFactoryWithRtxDisabled() { // See https://bugs.chromium.org/p/webrtc/issues/detail?id=7806 for details. TEST(PeerConnectionFactoryTestInternal, DISABLED_CreatePCUsingInternalModules) { #ifdef WEBRTC_ANDROID - webrtc::InitializeAndroidObjects(); + InitializeAndroidObjects(); #endif - rtc::scoped_refptr factory( - webrtc::CreatePeerConnectionFactory( + scoped_refptr factory( + CreatePeerConnectionFactory( nullptr /* network_thread */, nullptr /* worker_thread */, nullptr /* signaling_thread */, nullptr /* default_adm */, - webrtc::CreateBuiltinAudioEncoderFactory(), - webrtc::CreateBuiltinAudioDecoderFactory(), + CreateBuiltinAudioEncoderFactory(), + CreateBuiltinAudioDecoderFactory(), nullptr /* video_encoder_factory */, nullptr /* video_decoder_factory */, nullptr /* audio_mixer */, nullptr /* audio_processing */)); NullPeerConnectionObserver observer; - webrtc::PeerConnectionInterface::RTCConfiguration config; - config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; + PeerConnectionInterface::RTCConfiguration config; + config.sdp_semantics = SdpSemantics::kUnifiedPlan; std::unique_ptr cert_generator( new FakeRTCCertificateGenerator()); - webrtc::PeerConnectionDependencies pc_dependencies(&observer); + PeerConnectionDependencies pc_dependencies(&observer); pc_dependencies.cert_generator = std::move(cert_generator); auto result = factory->CreatePeerConnectionOrError(config, std::move(pc_dependencies)); @@ -339,8 +332,8 @@ TEST(PeerConnectionFactoryTestInternal, DISABLED_CreatePCUsingInternalModules) { } TEST_F(PeerConnectionFactoryTest, CheckRtpSenderAudioCapabilities) { - webrtc::RtpCapabilities audio_capabilities = - factory_->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO); + RtpCapabilities audio_capabilities = + factory_->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO); EXPECT_FALSE(audio_capabilities.codecs.empty()); for (const auto& codec : audio_capabilities.codecs) { VerifyAudioCodecCapability(codec); @@ -352,8 +345,8 @@ TEST_F(PeerConnectionFactoryTest, CheckRtpSenderAudioCapabilities) { } TEST_F(PeerConnectionFactoryTest, CheckRtpSenderVideoCapabilities) { - webrtc::RtpCapabilities video_capabilities = - factory_->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO); + RtpCapabilities video_capabilities = + factory_->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO); EXPECT_FALSE(video_capabilities.codecs.empty()); for (const auto& codec : video_capabilities.codecs) { VerifyVideoCodecCapability(codec, true); @@ -365,34 +358,34 @@ TEST_F(PeerConnectionFactoryTest, CheckRtpSenderVideoCapabilities) { } TEST_F(PeerConnectionFactoryTest, CheckRtpSenderRtxEnabledCapabilities) { - webrtc::RtpCapabilities video_capabilities = - factory_->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO); + RtpCapabilities video_capabilities = + factory_->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO); const auto it = std::find_if( video_capabilities.codecs.begin(), video_capabilities.codecs.end(), - [](const auto& c) { return c.name == cricket::kRtxCodecName; }); + [](const auto& c) { return c.name == kRtxCodecName; }); EXPECT_TRUE(it != video_capabilities.codecs.end()); } TEST(PeerConnectionFactoryTestInternal, CheckRtpSenderRtxDisabledCapabilities) { auto factory = CreatePeerConnectionFactoryWithRtxDisabled(); - webrtc::RtpCapabilities video_capabilities = - factory->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO); + RtpCapabilities video_capabilities = + factory->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO); const auto it = std::find_if( video_capabilities.codecs.begin(), video_capabilities.codecs.end(), - [](const auto& c) { return c.name == cricket::kRtxCodecName; }); + [](const auto& c) { return c.name == kRtxCodecName; }); EXPECT_TRUE(it == video_capabilities.codecs.end()); } TEST_F(PeerConnectionFactoryTest, CheckRtpSenderDataCapabilities) { - webrtc::RtpCapabilities data_capabilities = - factory_->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_DATA); + RtpCapabilities data_capabilities = + factory_->GetRtpSenderCapabilities(webrtc::MediaType::DATA); EXPECT_TRUE(data_capabilities.codecs.empty()); EXPECT_TRUE(data_capabilities.header_extensions.empty()); } TEST_F(PeerConnectionFactoryTest, CheckRtpReceiverAudioCapabilities) { - webrtc::RtpCapabilities audio_capabilities = - factory_->GetRtpReceiverCapabilities(cricket::MEDIA_TYPE_AUDIO); + RtpCapabilities audio_capabilities = + factory_->GetRtpReceiverCapabilities(webrtc::MediaType::AUDIO); EXPECT_FALSE(audio_capabilities.codecs.empty()); for (const auto& codec : audio_capabilities.codecs) { VerifyAudioCodecCapability(codec); @@ -404,8 +397,8 @@ TEST_F(PeerConnectionFactoryTest, CheckRtpReceiverAudioCapabilities) { } TEST_F(PeerConnectionFactoryTest, CheckRtpReceiverVideoCapabilities) { - webrtc::RtpCapabilities video_capabilities = - factory_->GetRtpReceiverCapabilities(cricket::MEDIA_TYPE_VIDEO); + RtpCapabilities video_capabilities = + factory_->GetRtpReceiverCapabilities(webrtc::MediaType::VIDEO); EXPECT_FALSE(video_capabilities.codecs.empty()); for (const auto& codec : video_capabilities.codecs) { VerifyVideoCodecCapability(codec, false); @@ -417,28 +410,28 @@ TEST_F(PeerConnectionFactoryTest, CheckRtpReceiverVideoCapabilities) { } TEST_F(PeerConnectionFactoryTest, CheckRtpReceiverRtxEnabledCapabilities) { - webrtc::RtpCapabilities video_capabilities = - factory_->GetRtpReceiverCapabilities(cricket::MEDIA_TYPE_VIDEO); + RtpCapabilities video_capabilities = + factory_->GetRtpReceiverCapabilities(webrtc::MediaType::VIDEO); const auto it = std::find_if( video_capabilities.codecs.begin(), video_capabilities.codecs.end(), - [](const auto& c) { return c.name == cricket::kRtxCodecName; }); + [](const auto& c) { return c.name == kRtxCodecName; }); EXPECT_TRUE(it != video_capabilities.codecs.end()); } TEST(PeerConnectionFactoryTestInternal, CheckRtpReceiverRtxDisabledCapabilities) { auto factory = CreatePeerConnectionFactoryWithRtxDisabled(); - webrtc::RtpCapabilities video_capabilities = - factory->GetRtpReceiverCapabilities(cricket::MEDIA_TYPE_VIDEO); + RtpCapabilities video_capabilities = + factory->GetRtpReceiverCapabilities(webrtc::MediaType::VIDEO); const auto it = std::find_if( video_capabilities.codecs.begin(), video_capabilities.codecs.end(), - [](const auto& c) { return c.name == cricket::kRtxCodecName; }); + [](const auto& c) { return c.name == kRtxCodecName; }); EXPECT_TRUE(it == video_capabilities.codecs.end()); } TEST_F(PeerConnectionFactoryTest, CheckRtpReceiverDataCapabilities) { - webrtc::RtpCapabilities data_capabilities = - factory_->GetRtpReceiverCapabilities(cricket::MEDIA_TYPE_DATA); + RtpCapabilities data_capabilities = + factory_->GetRtpReceiverCapabilities(webrtc::MediaType::DATA); EXPECT_TRUE(data_capabilities.codecs.empty()); EXPECT_TRUE(data_capabilities.header_extensions.empty()); } @@ -447,8 +440,8 @@ TEST_F(PeerConnectionFactoryTest, CheckRtpReceiverDataCapabilities) { // configuration. Also verifies the URL's parsed correctly as expected. TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServers) { PeerConnectionInterface::RTCConfiguration config; - config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; - webrtc::PeerConnectionInterface::IceServer ice_server; + config.sdp_semantics = SdpSemantics::kUnifiedPlan; + PeerConnectionInterface::IceServer ice_server; ice_server.uri = kStunIceServer; config.servers.push_back(ice_server); ice_server.uri = kTurnIceServer; @@ -459,23 +452,23 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServers) { ice_server.username = kTurnUsername; ice_server.password = kTurnPassword; config.servers.push_back(ice_server); - webrtc::PeerConnectionDependencies pc_dependencies(&observer_); + PeerConnectionDependencies pc_dependencies(&observer_); pc_dependencies.cert_generator = std::make_unique(); pc_dependencies.allocator = std::move(port_allocator_); auto result = factory_->CreatePeerConnectionOrError(config, std::move(pc_dependencies)); ASSERT_TRUE(result.ok()); - cricket::ServerAddresses stun_servers; - rtc::SocketAddress stun1("stun.l.google.com", 19302); + ServerAddresses stun_servers; + SocketAddress stun1("stun.l.google.com", 19302); stun_servers.insert(stun1); VerifyStunServers(stun_servers); - std::vector turn_servers; - cricket::RelayServerConfig turn1("test.com", 1234, kTurnUsername, - kTurnPassword, cricket::PROTO_UDP); + std::vector turn_servers; + RelayServerConfig turn1("test.com", 1234, kTurnUsername, kTurnPassword, + PROTO_UDP); turn_servers.push_back(turn1); - cricket::RelayServerConfig turn2("hello.com", kDefaultStunPort, kTurnUsername, - kTurnPassword, cricket::PROTO_TCP); + RelayServerConfig turn2("hello.com", kDefaultStunPort, kTurnUsername, + kTurnPassword, PROTO_TCP); turn_servers.push_back(turn2); VerifyTurnServers(turn_servers); } @@ -484,55 +477,55 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServers) { // configuration. Also verifies the list of URL's parsed correctly as expected. TEST_F(PeerConnectionFactoryTest, CreatePCUsingIceServersUrls) { PeerConnectionInterface::RTCConfiguration config; - config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; - webrtc::PeerConnectionInterface::IceServer ice_server; + config.sdp_semantics = SdpSemantics::kUnifiedPlan; + PeerConnectionInterface::IceServer ice_server; ice_server.urls.push_back(kStunIceServer); ice_server.urls.push_back(kTurnIceServer); ice_server.urls.push_back(kTurnIceServerWithTransport); ice_server.username = kTurnUsername; ice_server.password = kTurnPassword; config.servers.push_back(ice_server); - webrtc::PeerConnectionDependencies pc_dependencies(&observer_); + PeerConnectionDependencies pc_dependencies(&observer_); pc_dependencies.cert_generator = std::make_unique(); pc_dependencies.allocator = std::move(port_allocator_); auto result = factory_->CreatePeerConnectionOrError(config, std::move(pc_dependencies)); ASSERT_TRUE(result.ok()); - cricket::ServerAddresses stun_servers; - rtc::SocketAddress stun1("stun.l.google.com", 19302); + ServerAddresses stun_servers; + SocketAddress stun1("stun.l.google.com", 19302); stun_servers.insert(stun1); VerifyStunServers(stun_servers); - std::vector turn_servers; - cricket::RelayServerConfig turn1("test.com", 1234, kTurnUsername, - kTurnPassword, cricket::PROTO_UDP); + std::vector turn_servers; + RelayServerConfig turn1("test.com", 1234, kTurnUsername, kTurnPassword, + PROTO_UDP); turn_servers.push_back(turn1); - cricket::RelayServerConfig turn2("hello.com", kDefaultStunPort, kTurnUsername, - kTurnPassword, cricket::PROTO_TCP); + RelayServerConfig turn2("hello.com", kDefaultStunPort, kTurnUsername, + kTurnPassword, PROTO_TCP); turn_servers.push_back(turn2); VerifyTurnServers(turn_servers); } TEST_F(PeerConnectionFactoryTest, CreatePCUsingNoUsernameInUri) { PeerConnectionInterface::RTCConfiguration config; - config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; - webrtc::PeerConnectionInterface::IceServer ice_server; + config.sdp_semantics = SdpSemantics::kUnifiedPlan; + PeerConnectionInterface::IceServer ice_server; ice_server.uri = kStunIceServer; config.servers.push_back(ice_server); ice_server.uri = kTurnIceServerWithNoUsernameInUri; ice_server.username = kTurnUsername; ice_server.password = kTurnPassword; config.servers.push_back(ice_server); - webrtc::PeerConnectionDependencies pc_dependencies(&observer_); + PeerConnectionDependencies pc_dependencies(&observer_); pc_dependencies.cert_generator = std::make_unique(); pc_dependencies.allocator = std::move(port_allocator_); auto result = factory_->CreatePeerConnectionOrError(config, std::move(pc_dependencies)); ASSERT_TRUE(result.ok()); - std::vector turn_servers; - cricket::RelayServerConfig turn("test.com", 1234, kTurnUsername, - kTurnPassword, cricket::PROTO_UDP); + std::vector turn_servers; + RelayServerConfig turn("test.com", 1234, kTurnUsername, kTurnPassword, + PROTO_UDP); turn_servers.push_back(turn); VerifyTurnServers(turn_servers); } @@ -541,30 +534,30 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingNoUsernameInUri) { // has transport parameter in it. TEST_F(PeerConnectionFactoryTest, CreatePCUsingTurnUrlWithTransportParam) { PeerConnectionInterface::RTCConfiguration config; - config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; - webrtc::PeerConnectionInterface::IceServer ice_server; + config.sdp_semantics = SdpSemantics::kUnifiedPlan; + PeerConnectionInterface::IceServer ice_server; ice_server.uri = kTurnIceServerWithTransport; ice_server.username = kTurnUsername; ice_server.password = kTurnPassword; config.servers.push_back(ice_server); - webrtc::PeerConnectionDependencies pc_dependencies(&observer_); + PeerConnectionDependencies pc_dependencies(&observer_); pc_dependencies.cert_generator = std::make_unique(); pc_dependencies.allocator = std::move(port_allocator_); auto result = factory_->CreatePeerConnectionOrError(config, std::move(pc_dependencies)); ASSERT_TRUE(result.ok()); - std::vector turn_servers; - cricket::RelayServerConfig turn("hello.com", kDefaultStunPort, kTurnUsername, - kTurnPassword, cricket::PROTO_TCP); + std::vector turn_servers; + RelayServerConfig turn("hello.com", kDefaultStunPort, kTurnUsername, + kTurnPassword, PROTO_TCP); turn_servers.push_back(turn); VerifyTurnServers(turn_servers); } TEST_F(PeerConnectionFactoryTest, CreatePCUsingSecureTurnUrl) { PeerConnectionInterface::RTCConfiguration config; - config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; - webrtc::PeerConnectionInterface::IceServer ice_server; + config.sdp_semantics = SdpSemantics::kUnifiedPlan; + PeerConnectionInterface::IceServer ice_server; ice_server.uri = kSecureTurnIceServer; ice_server.username = kTurnUsername; ice_server.password = kTurnPassword; @@ -577,33 +570,31 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingSecureTurnUrl) { ice_server.username = kTurnUsername; ice_server.password = kTurnPassword; config.servers.push_back(ice_server); - webrtc::PeerConnectionDependencies pc_dependencies(&observer_); + PeerConnectionDependencies pc_dependencies(&observer_); pc_dependencies.cert_generator = std::make_unique(); pc_dependencies.allocator = std::move(port_allocator_); auto result = factory_->CreatePeerConnectionOrError(config, std::move(pc_dependencies)); ASSERT_TRUE(result.ok()); - std::vector turn_servers; - cricket::RelayServerConfig turn1("hello.com", kDefaultStunTlsPort, - kTurnUsername, kTurnPassword, - cricket::PROTO_TLS); + std::vector turn_servers; + RelayServerConfig turn1("hello.com", kDefaultStunTlsPort, kTurnUsername, + kTurnPassword, PROTO_TLS); turn_servers.push_back(turn1); // TURNS with transport param should be default to tcp. - cricket::RelayServerConfig turn2("hello.com", 443, kTurnUsername, - kTurnPassword, cricket::PROTO_TLS); + RelayServerConfig turn2("hello.com", 443, kTurnUsername, kTurnPassword, + PROTO_TLS); turn_servers.push_back(turn2); - cricket::RelayServerConfig turn3("hello.com", kDefaultStunTlsPort, - kTurnUsername, kTurnPassword, - cricket::PROTO_TLS); + RelayServerConfig turn3("hello.com", kDefaultStunTlsPort, kTurnUsername, + kTurnPassword, PROTO_TLS); turn_servers.push_back(turn3); VerifyTurnServers(turn_servers); } TEST_F(PeerConnectionFactoryTest, CreatePCUsingIPLiteralAddress) { PeerConnectionInterface::RTCConfiguration config; - config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; - webrtc::PeerConnectionInterface::IceServer ice_server; + config.sdp_semantics = SdpSemantics::kUnifiedPlan; + PeerConnectionInterface::IceServer ice_server; ice_server.uri = kStunIceServerWithIPv4Address; config.servers.push_back(ice_server); ice_server.uri = kStunIceServerWithIPv4AddressWithoutPort; @@ -616,27 +607,27 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingIPLiteralAddress) { ice_server.username = kTurnUsername; ice_server.password = kTurnPassword; config.servers.push_back(ice_server); - webrtc::PeerConnectionDependencies pc_dependencies(&observer_); + PeerConnectionDependencies pc_dependencies(&observer_); pc_dependencies.cert_generator = std::make_unique(); pc_dependencies.allocator = std::move(port_allocator_); auto result = factory_->CreatePeerConnectionOrError(config, std::move(pc_dependencies)); ASSERT_TRUE(result.ok()); - cricket::ServerAddresses stun_servers; - rtc::SocketAddress stun1("1.2.3.4", 1234); + ServerAddresses stun_servers; + SocketAddress stun1("1.2.3.4", 1234); stun_servers.insert(stun1); - rtc::SocketAddress stun2("1.2.3.4", 3478); + SocketAddress stun2("1.2.3.4", 3478); stun_servers.insert(stun2); // Default port - rtc::SocketAddress stun3("2401:fa00:4::", 1234); + SocketAddress stun3("2401:fa00:4::", 1234); stun_servers.insert(stun3); - rtc::SocketAddress stun4("2401:fa00:4::", 3478); + SocketAddress stun4("2401:fa00:4::", 3478); stun_servers.insert(stun4); // Default port VerifyStunServers(stun_servers); - std::vector turn_servers; - cricket::RelayServerConfig turn1("2401:fa00:4::", 1234, kTurnUsername, - kTurnPassword, cricket::PROTO_UDP); + std::vector turn_servers; + RelayServerConfig turn1("2401:fa00:4::", 1234, kTurnUsername, kTurnPassword, + PROTO_UDP); turn_servers.push_back(turn1); VerifyTurnServers(turn_servers); } @@ -644,14 +635,13 @@ TEST_F(PeerConnectionFactoryTest, CreatePCUsingIPLiteralAddress) { // This test verifies the captured stream is rendered locally using a // local video track. TEST_F(PeerConnectionFactoryTest, LocalRendering) { - rtc::scoped_refptr source = - webrtc::FakeVideoTrackSource::Create(/*is_screencast=*/false); + scoped_refptr source = + FakeVideoTrackSource::Create(/*is_screencast=*/false); - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FakeFrameSource frame_source(1280, 720, kNumMicrosecsPerSec / 30); ASSERT_TRUE(source.get() != NULL); - rtc::scoped_refptr track( + scoped_refptr track( factory_->CreateVideoTrack(source, "testlabel")); ASSERT_TRUE(track.get() != NULL); FakeVideoTrackRenderer local_renderer(track.get()); @@ -673,36 +663,36 @@ TEST_F(PeerConnectionFactoryTest, LocalRendering) { } TEST(PeerConnectionFactoryDependenciesTest, UsesNetworkManager) { - constexpr webrtc::TimeDelta kWaitTimeout = webrtc::TimeDelta::Seconds(10); + constexpr TimeDelta kWaitTimeout = TimeDelta::Seconds(10); auto mock_network_manager = std::make_unique>(); - rtc::Event called; + Event called; EXPECT_CALL(*mock_network_manager, StartUpdating()) .Times(AtLeast(1)) .WillRepeatedly(InvokeWithoutArgs([&] { called.Set(); })); - webrtc::PeerConnectionFactoryDependencies pcf_dependencies; + PeerConnectionFactoryDependencies pcf_dependencies; pcf_dependencies.network_manager = std::move(mock_network_manager); - rtc::scoped_refptr pcf = + scoped_refptr pcf = CreateModularPeerConnectionFactory(std::move(pcf_dependencies)); PeerConnectionInterface::RTCConfiguration config; config.ice_candidate_pool_size = 2; NullPeerConnectionObserver observer; auto pc = pcf->CreatePeerConnectionOrError( - config, webrtc::PeerConnectionDependencies(&observer)); + config, PeerConnectionDependencies(&observer)); ASSERT_TRUE(pc.ok()); called.Wait(kWaitTimeout); } TEST(PeerConnectionFactoryDependenciesTest, UsesPacketSocketFactory) { - constexpr webrtc::TimeDelta kWaitTimeout = webrtc::TimeDelta::Seconds(10); + constexpr TimeDelta kWaitTimeout = TimeDelta::Seconds(10); auto mock_socket_factory = - std::make_unique>(); + std::make_unique>(); - rtc::Event called; + Event called; EXPECT_CALL(*mock_socket_factory, CreateUdpSocket(_, _, _)) .WillOnce(InvokeWithoutArgs([&] { called.Set(); @@ -710,10 +700,10 @@ TEST(PeerConnectionFactoryDependenciesTest, UsesPacketSocketFactory) { })) .WillRepeatedly(Return(nullptr)); - webrtc::PeerConnectionFactoryDependencies pcf_dependencies; + PeerConnectionFactoryDependencies pcf_dependencies; pcf_dependencies.packet_socket_factory = std::move(mock_socket_factory); - rtc::scoped_refptr pcf = + scoped_refptr pcf = CreateModularPeerConnectionFactory(std::move(pcf_dependencies)); // By default, localhost addresses are ignored, which makes tests fail if test @@ -726,11 +716,51 @@ TEST(PeerConnectionFactoryDependenciesTest, UsesPacketSocketFactory) { config.ice_candidate_pool_size = 2; NullPeerConnectionObserver observer; auto pc = pcf->CreatePeerConnectionOrError( - config, webrtc::PeerConnectionDependencies(&observer)); + config, PeerConnectionDependencies(&observer)); ASSERT_TRUE(pc.ok()); called.Wait(kWaitTimeout); } +TEST(PeerConnectionFactoryDependenciesTest, + CreatesAudioProcessingWithProvidedFactory) { + auto ap_factory = std::make_unique(); + auto audio_processing = make_ref_counted>(); + // Validate that provided audio_processing is used by expecting that a request + // to start AEC Dump with unnatural size limit is propagated to the + // `audio_processing`. + EXPECT_CALL(*audio_processing, CreateAndAttachAecDump(A(), 24'242, _)); + EXPECT_CALL(*ap_factory, Build).WillOnce(Return(audio_processing)); + + PeerConnectionFactoryDependencies pcf_dependencies; + pcf_dependencies.adm = FakeAudioCaptureModule::Create(); + pcf_dependencies.audio_processing_builder = std::move(ap_factory); + EnableMediaWithDefaults(pcf_dependencies); + + scoped_refptr pcf = + CreateModularPeerConnectionFactory(std::move(pcf_dependencies)); + pcf->StartAecDump(nullptr, 24'242); +} + +TEST(PeerConnectionFactoryDependenciesTest, UsesAudioProcessingWhenProvided) { + // Test legacy way of providing audio_processing. + // TODO: bugs.webrtc.org/369904700 - Delete this test when webrtc users no + // longer set PeerConnectionFactoryDependencies::audio_processing. + auto audio_processing = make_ref_counted>(); + EXPECT_CALL(*audio_processing, CreateAndAttachAecDump(A(), 24'242, _)); + + PeerConnectionFactoryDependencies pcf_dependencies; + pcf_dependencies.adm = FakeAudioCaptureModule::Create(); +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + pcf_dependencies.audio_processing = std::move(audio_processing); +#pragma clang diagnostic pop + EnableMediaWithDefaults(pcf_dependencies); + + scoped_refptr pcf = + CreateModularPeerConnectionFactory(std::move(pcf_dependencies)); + pcf->StartAecDump(nullptr, 24'242); +} + } // namespace } // namespace webrtc diff --git a/pc/peer_connection_field_trial_tests.cc b/pc/peer_connection_field_trial_tests.cc index 7799c9d6e3..9ce3f16415 100644 --- a/pc/peer_connection_field_trial_tests.cc +++ b/pc/peer_connection_field_trial_tests.cc @@ -11,29 +11,29 @@ // This file contains tests that verify that field trials do what they're // supposed to do. +#include #include +#include -#include "api/audio_codecs/builtin_audio_decoder_factory.h" -#include "api/audio_codecs/builtin_audio_encoder_factory.h" -#include "api/create_peerconnection_factory.h" +#include "absl/algorithm/container.h" +#include "api/enable_media_with_defaults.h" +#include "api/field_trials.h" +#include "api/field_trials_view.h" +#include "api/media_types.h" #include "api/peer_connection_interface.h" -#include "api/stats/rtcstats_objects.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/task_queue/default_task_queue_factory.h" -#include "api/video_codecs/builtin_video_decoder_factory.h" -#include "api/video_codecs/builtin_video_encoder_factory.h" -#include "media/engine/webrtc_media_engine.h" -#include "media/engine/webrtc_media_engine_defaults.h" #include "pc/peer_connection_wrapper.h" #include "pc/session_description.h" #include "pc/test/fake_audio_capture_module.h" -#include "pc/test/frame_generator_capturer_video_track_source.h" -#include "pc/test/peer_connection_test_wrapper.h" -#include "rtc_base/gunit.h" +#include "pc/test/mock_peer_connection_observers.h" +#include "rtc_base/checks.h" #include "rtc_base/internal/default_socket_server.h" -#include "rtc_base/physical_socket_server.h" +#include "rtc_base/socket_server.h" #include "rtc_base/thread.h" +#include "system_wrappers/include/clock.h" #include "test/gtest.h" -#include "test/scoped_key_value_config.h" #ifdef WEBRTC_ANDROID #include "pc/test/android_test_initializer.h" @@ -41,20 +41,6 @@ namespace webrtc { -namespace { -static const int kDefaultTimeoutMs = 5000; - -bool AddIceCandidates(PeerConnectionWrapper* peer, - std::vector candidates) { - for (const auto candidate : candidates) { - if (!peer->pc()->AddIceCandidate(candidate)) { - return false; - } - } - return true; -} -} // namespace - using RTCConfiguration = PeerConnectionInterface::RTCConfiguration; class PeerConnectionFieldTrialTest : public ::testing::Test { @@ -63,12 +49,12 @@ class PeerConnectionFieldTrialTest : public ::testing::Test { PeerConnectionFieldTrialTest() : clock_(Clock::GetRealTimeClock()), - socket_server_(rtc::CreateDefaultSocketServer()), + socket_server_(CreateDefaultSocketServer()), main_thread_(socket_server_.get()) { #ifdef WEBRTC_ANDROID InitializeAndroidObjects(); #endif - webrtc::PeerConnectionInterface::IceServer ice_server; + PeerConnectionInterface::IceServer ice_server; ice_server.uri = "stun:stun.l.google.com:19302"; config_.servers.push_back(ice_server); config_.sdp_semantics = SdpSemantics::kUnifiedPlan; @@ -78,16 +64,11 @@ class PeerConnectionFieldTrialTest : public ::testing::Test { void CreatePCFactory(std::unique_ptr field_trials) { PeerConnectionFactoryDependencies pcf_deps; - pcf_deps.signaling_thread = rtc::Thread::Current(); + pcf_deps.signaling_thread = Thread::Current(); pcf_deps.trials = std::move(field_trials); pcf_deps.task_queue_factory = CreateDefaultTaskQueueFactory(); - pcf_deps.call_factory = webrtc::CreateCallFactory(); - cricket::MediaEngineDependencies media_deps; - media_deps.task_queue_factory = pcf_deps.task_queue_factory.get(); - media_deps.adm = FakeAudioCaptureModule::Create(); - media_deps.trials = pcf_deps.trials.get(); - webrtc::SetMediaEngineDefaults(&media_deps); - pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps)); + pcf_deps.adm = FakeAudioCaptureModule::Create(); + EnableMediaWithDefaults(pcf_deps); pc_factory_ = CreateModularPeerConnectionFactory(std::move(pcf_deps)); // Allow ADAPTER_TYPE_LOOPBACK to create PeerConnections with loopback in @@ -110,72 +91,74 @@ class PeerConnectionFieldTrialTest : public ::testing::Test { } Clock* const clock_; - std::unique_ptr socket_server_; - rtc::AutoSocketServerThread main_thread_; - rtc::scoped_refptr pc_factory_ = nullptr; - webrtc::PeerConnectionInterface::RTCConfiguration config_; + std::unique_ptr socket_server_; + AutoSocketServerThread main_thread_; + scoped_refptr pc_factory_ = nullptr; + PeerConnectionInterface::RTCConfiguration config_; }; // Tests for the dependency descriptor field trial. The dependency descriptor // field trial is implemented in media/engine/webrtc_video_engine.cc. TEST_F(PeerConnectionFieldTrialTest, EnableDependencyDescriptorAdvertised) { - std::unique_ptr field_trials = - std::make_unique( - "WebRTC-DependencyDescriptorAdvertised/Enabled/"); - CreatePCFactory(std::move(field_trials)); + CreatePCFactory(FieldTrials::CreateNoGlobal( + "WebRTC-DependencyDescriptorAdvertised/Enabled/")); WrapperPtr caller = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + caller->AddTransceiver(webrtc::MediaType::VIDEO); auto offer = caller->CreateOffer(); auto contents1 = offer->description()->contents(); ASSERT_EQ(1u, contents1.size()); - const cricket::MediaContentDescription* media_description1 = + const MediaContentDescription* media_description1 = contents1[0].media_description(); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, media_description1->type()); - const cricket::RtpHeaderExtensions& rtp_header_extensions1 = + EXPECT_EQ(webrtc::MediaType::VIDEO, media_description1->type()); + const RtpHeaderExtensions& rtp_header_extensions1 = media_description1->rtp_header_extensions(); - bool found = absl::c_find_if(rtp_header_extensions1, - [](const webrtc::RtpExtension& rtp_extension) { - return rtp_extension.uri == - RtpExtension::kDependencyDescriptorUri; - }) != rtp_header_extensions1.end(); + bool found = + absl::c_find_if( + rtp_header_extensions1, [](const RtpExtension& rtp_extension) { + return rtp_extension.uri == RtpExtension::kDependencyDescriptorUri; + }) != rtp_header_extensions1.end(); EXPECT_TRUE(found); } // Tests that dependency descriptor RTP header extensions can be exchanged // via SDP munging, even if dependency descriptor field trial is disabled. -TEST_F(PeerConnectionFieldTrialTest, InjectDependencyDescriptor) { - std::unique_ptr field_trials = - std::make_unique( - "WebRTC-DependencyDescriptorAdvertised/Disabled/"); - CreatePCFactory(std::move(field_trials)); +#ifdef WEBRTC_WIN +// TODO: crbug.com/webrtc/15876 - Test is flaky on Windows machines. +#define MAYBE_InjectDependencyDescriptor DISABLED_InjectDependencyDescriptor +#else +#define MAYBE_InjectDependencyDescriptor InjectDependencyDescriptor +#endif +TEST_F(PeerConnectionFieldTrialTest, MAYBE_InjectDependencyDescriptor) { + CreatePCFactory(FieldTrials::CreateNoGlobal( + "WebRTC-DependencyDescriptorAdvertised/Disabled/")); WrapperPtr caller = CreatePeerConnection(); WrapperPtr callee = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + caller->AddTransceiver(webrtc::MediaType::VIDEO); auto offer = caller->CreateOffer(); - cricket::ContentInfos& contents1 = offer->description()->contents(); + ContentInfos& contents1 = offer->description()->contents(); ASSERT_EQ(1u, contents1.size()); - cricket::MediaContentDescription* media_description1 = + MediaContentDescription* media_description1 = contents1[0].media_description(); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, media_description1->type()); - cricket::RtpHeaderExtensions rtp_header_extensions1 = + EXPECT_EQ(webrtc::MediaType::VIDEO, media_description1->type()); + RtpHeaderExtensions rtp_header_extensions1 = media_description1->rtp_header_extensions(); - bool found1 = absl::c_find_if(rtp_header_extensions1, - [](const webrtc::RtpExtension& rtp_extension) { - return rtp_extension.uri == - RtpExtension::kDependencyDescriptorUri; - }) != rtp_header_extensions1.end(); + bool found1 = + absl::c_find_if( + rtp_header_extensions1, [](const RtpExtension& rtp_extension) { + return rtp_extension.uri == RtpExtension::kDependencyDescriptorUri; + }) != rtp_header_extensions1.end(); EXPECT_FALSE(found1); std::set existing_ids; - for (const webrtc::RtpExtension& rtp_extension : rtp_header_extensions1) { + for (const RtpExtension& rtp_extension : rtp_header_extensions1) { existing_ids.insert(rtp_extension.id); } @@ -202,76 +185,21 @@ TEST_F(PeerConnectionFieldTrialTest, InjectDependencyDescriptor) { ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); auto answer = callee->CreateAnswer(); - cricket::ContentInfos& contents2 = answer->description()->contents(); + ContentInfos& contents2 = answer->description()->contents(); ASSERT_EQ(1u, contents2.size()); - cricket::MediaContentDescription* media_description2 = + MediaContentDescription* media_description2 = contents2[0].media_description(); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, media_description2->type()); - cricket::RtpHeaderExtensions rtp_header_extensions2 = + EXPECT_EQ(webrtc::MediaType::VIDEO, media_description2->type()); + RtpHeaderExtensions rtp_header_extensions2 = media_description2->rtp_header_extensions(); - bool found2 = absl::c_find_if(rtp_header_extensions2, - [](const webrtc::RtpExtension& rtp_extension) { - return rtp_extension.uri == - RtpExtension::kDependencyDescriptorUri; - }) != rtp_header_extensions2.end(); + bool found2 = + absl::c_find_if( + rtp_header_extensions2, [](const RtpExtension& rtp_extension) { + return rtp_extension.uri == RtpExtension::kDependencyDescriptorUri; + }) != rtp_header_extensions2.end(); EXPECT_TRUE(found2); } -// Test that the ability to emulate degraded networks works without crashing. -TEST_F(PeerConnectionFieldTrialTest, ApplyFakeNetworkConfig) { - std::unique_ptr field_trials = - std::make_unique( - "WebRTC-FakeNetworkSendConfig/link_capacity_kbps:500/" - "WebRTC-FakeNetworkReceiveConfig/loss_percent:1/"); - - CreatePCFactory(std::move(field_trials)); - - WrapperPtr caller = CreatePeerConnection(); - BitrateSettings bitrate_settings; - bitrate_settings.start_bitrate_bps = 1'000'000; - bitrate_settings.max_bitrate_bps = 1'000'000; - caller->pc()->SetBitrate(bitrate_settings); - FrameGeneratorCapturerVideoTrackSource::Config config; - auto video_track_source = - rtc::make_ref_counted( - config, clock_, /*is_screencast=*/false); - video_track_source->Start(); - caller->AddTrack(pc_factory_->CreateVideoTrack(video_track_source, "v")); - WrapperPtr callee = CreatePeerConnection(); - - ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); - ASSERT_TRUE( - caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); - - // Do the SDP negotiation, and also exchange ice candidates. - ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - ASSERT_TRUE_WAIT( - caller->signaling_state() == PeerConnectionInterface::kStable, - kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(caller->IsIceGatheringDone(), kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(callee->IsIceGatheringDone(), kDefaultTimeoutMs); - - // Connect an ICE candidate pairs. - ASSERT_TRUE( - AddIceCandidates(callee.get(), caller->observer()->GetAllCandidates())); - ASSERT_TRUE( - AddIceCandidates(caller.get(), callee->observer()->GetAllCandidates())); - - // This means that ICE and DTLS are connected. - ASSERT_TRUE_WAIT(callee->IsIceConnected(), kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(caller->IsIceConnected(), kDefaultTimeoutMs); - - // Send packets for kDefaultTimeoutMs - WAIT(false, kDefaultTimeoutMs); - - std::vector outbound_rtp_stats = - caller->GetStats()->GetStatsOfType(); - ASSERT_GE(outbound_rtp_stats.size(), 1u); - ASSERT_TRUE(outbound_rtp_stats[0]->target_bitrate.is_defined()); - // Link capacity is limited to 500k, so BWE is expected to be close to 500k. - ASSERT_LE(*outbound_rtp_stats[0]->target_bitrate, 500'000 * 1.1); -} - } // namespace webrtc diff --git a/pc/peer_connection_header_extension_unittest.cc b/pc/peer_connection_header_extension_unittest.cc index b1c6c3cfb5..b62d68b7aa 100644 --- a/pc/peer_connection_header_extension_unittest.cc +++ b/pc/peer_connection_header_extension_unittest.cc @@ -9,40 +9,36 @@ */ #include +#include #include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/call/call_factory_interface.h" +#include "api/environment/environment_factory.h" #include "api/jsep.h" #include "api/media_types.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" #include "api/rtc_event_log/rtc_event_log_factory.h" -#include "api/rtc_event_log/rtc_event_log_factory_interface.h" #include "api/rtp_parameters.h" #include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" #include "api/task_queue/default_task_queue_factory.h" -#include "api/task_queue/task_queue_factory.h" #include "media/base/fake_media_engine.h" -#include "media/base/media_engine.h" -#include "p2p/base/fake_port_allocator.h" -#include "p2p/base/port_allocator.h" +#include "p2p/test/fake_port_allocator.h" #include "pc/peer_connection_wrapper.h" #include "pc/session_description.h" +#include "pc/test/enable_fake_media.h" #include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/internal/default_socket_server.h" -#include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/socket_server.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/thread.h" #include "test/gmock.h" #include "test/gtest.h" -#include "test/scoped_key_value_config.h" namespace webrtc { @@ -54,10 +50,10 @@ using ::testing::Values; class PeerConnectionHeaderExtensionTest : public ::testing::TestWithParam< - std::tuple> { + std::tuple> { protected: PeerConnectionHeaderExtensionTest() - : socket_server_(rtc::CreateDefaultSocketServer()), + : socket_server_(CreateDefaultSocketServer()), main_thread_(socket_server_.get()), extensions_( {RtpHeaderExtensionCapability("uri1", @@ -75,34 +71,28 @@ class PeerConnectionHeaderExtensionTest RtpTransceiverDirection::kSendRecv)}) {} std::unique_ptr CreatePeerConnection( - cricket::MediaType media_type, - absl::optional semantics) { - auto voice = std::make_unique(); - auto video = std::make_unique(); - if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO) - voice->SetRtpHeaderExtensions(extensions_); + webrtc::MediaType media_type, + std::optional semantics) { + auto media_engine = std::make_unique(); + if (media_type == webrtc::MediaType::AUDIO) + media_engine->fake_voice_engine()->SetRtpHeaderExtensions(extensions_); else - video->SetRtpHeaderExtensions(extensions_); - auto media_engine = std::make_unique( - std::move(voice), std::move(video)); + media_engine->fake_video_engine()->SetRtpHeaderExtensions(extensions_); PeerConnectionFactoryDependencies factory_dependencies; - factory_dependencies.network_thread = rtc::Thread::Current(); - factory_dependencies.worker_thread = rtc::Thread::Current(); - factory_dependencies.signaling_thread = rtc::Thread::Current(); + factory_dependencies.network_thread = Thread::Current(); + factory_dependencies.worker_thread = Thread::Current(); + factory_dependencies.signaling_thread = Thread::Current(); factory_dependencies.task_queue_factory = CreateDefaultTaskQueueFactory(); - factory_dependencies.media_engine = std::move(media_engine); - factory_dependencies.call_factory = CreateCallFactory(); + EnableFakeMedia(factory_dependencies, std::move(media_engine)); + factory_dependencies.event_log_factory = - std::make_unique( - factory_dependencies.task_queue_factory.get()); + std::make_unique(); auto pc_factory = CreateModularPeerConnectionFactory(std::move(factory_dependencies)); - auto fake_port_allocator = std::make_unique( - rtc::Thread::Current(), - std::make_unique(socket_server_.get()), - &field_trials_); + auto fake_port_allocator = std::make_unique( + CreateEnvironment(), socket_server_.get()); auto observer = std::make_unique(); PeerConnectionInterface::RTCConfiguration config; if (semantics) @@ -117,14 +107,13 @@ class PeerConnectionHeaderExtensionTest pc_factory, result.MoveValue(), std::move(observer)); } - webrtc::test::ScopedKeyValueConfig field_trials_; - std::unique_ptr socket_server_; - rtc::AutoSocketServerThread main_thread_; + std::unique_ptr socket_server_; + AutoSocketServerThread main_thread_; std::vector extensions_; }; TEST_P(PeerConnectionHeaderExtensionTest, TransceiverOffersHeaderExtensions) { - cricket::MediaType media_type; + webrtc::MediaType media_type; SdpSemantics semantics; std::tie(media_type, semantics) = GetParam(); if (semantics != SdpSemantics::kUnifiedPlan) @@ -137,7 +126,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, TransceiverOffersHeaderExtensions) { TEST_P(PeerConnectionHeaderExtensionTest, SenderReceiverCapabilitiesReturnNotStoppedExtensions) { - cricket::MediaType media_type; + webrtc::MediaType media_type; SdpSemantics semantics; std::tie(media_type, semantics) = GetParam(); std::unique_ptr wrapper = @@ -157,7 +146,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, } TEST_P(PeerConnectionHeaderExtensionTest, OffersUnstoppedDefaultExtensions) { - cricket::MediaType media_type; + webrtc::MediaType media_type; SdpSemantics semantics; std::tie(media_type, semantics) = GetParam(); if (semantics != SdpSemantics::kUnifiedPlan) @@ -176,7 +165,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, OffersUnstoppedDefaultExtensions) { } TEST_P(PeerConnectionHeaderExtensionTest, OffersUnstoppedModifiedExtensions) { - cricket::MediaType media_type; + webrtc::MediaType media_type; SdpSemantics semantics; std::tie(media_type, semantics) = GetParam(); if (semantics != SdpSemantics::kUnifiedPlan) @@ -200,7 +189,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, OffersUnstoppedModifiedExtensions) { } TEST_P(PeerConnectionHeaderExtensionTest, AnswersUnstoppedModifiedExtensions) { - cricket::MediaType media_type; + webrtc::MediaType media_type; SdpSemantics semantics; std::tie(media_type, semantics) = GetParam(); if (semantics != SdpSemantics::kUnifiedPlan) @@ -233,7 +222,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, AnswersUnstoppedModifiedExtensions) { } TEST_P(PeerConnectionHeaderExtensionTest, NegotiatedExtensionsAreAccessible) { - cricket::MediaType media_type; + webrtc::MediaType media_type; SdpSemantics semantics; std::tie(media_type, semantics) = GetParam(); if (semantics != SdpSemantics::kUnifiedPlan) @@ -269,7 +258,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, NegotiatedExtensionsAreAccessible) { } TEST_P(PeerConnectionHeaderExtensionTest, OfferedExtensionsArePerTransceiver) { - cricket::MediaType media_type; + webrtc::MediaType media_type; SdpSemantics semantics; std::tie(media_type, semantics) = GetParam(); if (semantics != SdpSemantics::kUnifiedPlan) @@ -299,7 +288,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, OfferedExtensionsArePerTransceiver) { } TEST_P(PeerConnectionHeaderExtensionTest, RemovalAfterRenegotiation) { - cricket::MediaType media_type; + webrtc::MediaType media_type; SdpSemantics semantics; std::tie(media_type, semantics) = GetParam(); if (semantics != SdpSemantics::kUnifiedPlan) @@ -331,7 +320,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, RemovalAfterRenegotiation) { TEST_P(PeerConnectionHeaderExtensionTest, StoppedByDefaultExtensionCanBeActivatedByRemoteSdp) { - cricket::MediaType media_type; + webrtc::MediaType media_type; SdpSemantics semantics; std::tie(media_type, semantics) = GetParam(); if (semantics != SdpSemantics::kUnifiedPlan) @@ -366,7 +355,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, TEST_P(PeerConnectionHeaderExtensionTest, UnknownExtensionInRemoteOfferDoesNotShowUp) { - cricket::MediaType media_type; + webrtc::MediaType media_type; SdpSemantics semantics; std::tie(media_type, semantics) = GetParam(); if (semantics != SdpSemantics::kUnifiedPlan) @@ -383,7 +372,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, "AD:7E:77:43:2A:29:EC:93\r\n" "a=ice-ufrag:6HHHdzzeIhkE0CKj\r\n" "a=ice-pwd:XYDGVpfvklQIEnZ6YnyLsAew\r\n"; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + if (media_type == webrtc::MediaType::AUDIO) { sdp += "m=audio 9 RTP/AVPF 111\r\n" "a=rtpmap:111 fake_audio_codec/8000\r\n"; @@ -420,7 +409,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, // of the API to only offer non-stopped extensions. TEST_P(PeerConnectionHeaderExtensionTest, SdpMungingAnswerWithoutApiUsageEnablesExtensions) { - cricket::MediaType media_type; + webrtc::MediaType media_type; SdpSemantics semantics; std::tie(media_type, semantics) = GetParam(); if (semantics != SdpSemantics::kUnifiedPlan) @@ -437,7 +426,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, "AD:7E:77:43:2A:29:EC:93\r\n" "a=ice-ufrag:6HHHdzzeIhkE0CKj\r\n" "a=ice-pwd:XYDGVpfvklQIEnZ6YnyLsAew\r\n"; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + if (media_type == webrtc::MediaType::AUDIO) { sdp += "m=audio 9 RTP/AVPF 111\r\n" "a=rtpmap:111 fake_audio_codec/8000\r\n"; @@ -477,7 +466,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, TEST_P(PeerConnectionHeaderExtensionTest, SdpMungingOfferWithoutApiUsageEnablesExtensions) { - cricket::MediaType media_type; + webrtc::MediaType media_type; SdpSemantics semantics; std::tie(media_type, semantics) = GetParam(); if (semantics != SdpSemantics::kUnifiedPlan) @@ -507,7 +496,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, } TEST_P(PeerConnectionHeaderExtensionTest, EnablingExtensionsAfterRemoteOffer) { - cricket::MediaType media_type; + webrtc::MediaType media_type; SdpSemantics semantics; std::tie(media_type, semantics) = GetParam(); if (semantics != SdpSemantics::kUnifiedPlan) @@ -524,7 +513,7 @@ TEST_P(PeerConnectionHeaderExtensionTest, EnablingExtensionsAfterRemoteOffer) { "AD:7E:77:43:2A:29:EC:93\r\n" "a=ice-ufrag:6HHHdzzeIhkE0CKj\r\n" "a=ice-pwd:XYDGVpfvklQIEnZ6YnyLsAew\r\n"; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + if (media_type == webrtc::MediaType::AUDIO) { sdp += "m=audio 9 RTP/AVPF 111\r\n" "a=rtpmap:111 fake_audio_codec/8000\r\n"; @@ -569,19 +558,17 @@ INSTANTIATE_TEST_SUITE_P( , PeerConnectionHeaderExtensionTest, Combine(Values(SdpSemantics::kPlanB_DEPRECATED, SdpSemantics::kUnifiedPlan), - Values(cricket::MediaType::MEDIA_TYPE_AUDIO, - cricket::MediaType::MEDIA_TYPE_VIDEO)), + Values(webrtc::MediaType::AUDIO, webrtc::MediaType::VIDEO)), [](const testing::TestParamInfo< PeerConnectionHeaderExtensionTest::ParamType>& info) { - cricket::MediaType media_type; + webrtc::MediaType media_type; SdpSemantics semantics; std::tie(media_type, semantics) = info.param; - return (rtc::StringBuilder("With") + return (StringBuilder("With") << (semantics == SdpSemantics::kPlanB_DEPRECATED ? "PlanB" : "UnifiedPlan") << "And" - << (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO ? "Voice" - : "Video") + << (media_type == webrtc::MediaType::AUDIO ? "Voice" : "Video") << "Engine") .str(); }); diff --git a/pc/peer_connection_histogram_unittest.cc b/pc/peer_connection_histogram_unittest.cc index 68a4dbc361..ea428e4193 100644 --- a/pc/peer_connection_histogram_unittest.cc +++ b/pc/peer_connection_histogram_unittest.cc @@ -9,32 +9,24 @@ */ #include +#include #include #include #include #include -#include "absl/types/optional.h" -#include "api/async_resolver_factory.h" -#include "api/call/call_factory_interface.h" #include "api/jsep.h" #include "api/jsep_session_description.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" #include "api/scoped_refptr.h" -#include "api/task_queue/default_task_queue_factory.h" -#include "api/task_queue/task_queue_factory.h" #include "api/test/mock_async_dns_resolver.h" -#include "media/base/fake_media_engine.h" -#include "media/base/media_engine.h" -#include "p2p/base/mock_async_resolver.h" -#include "p2p/base/port_allocator.h" -#include "p2p/client/basic_port_allocator.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" #include "pc/peer_connection.h" -#include "pc/peer_connection_factory.h" -#include "pc/peer_connection_proxy.h" #include "pc/peer_connection_wrapper.h" #include "pc/sdp_utils.h" +#include "pc/test/enable_fake_media.h" #include "pc/test/mock_peer_connection_observers.h" #include "pc/usage_pattern.h" #include "pc/webrtc_sdp.h" @@ -43,28 +35,28 @@ #include "rtc_base/fake_mdns_responder.h" #include "rtc_base/fake_network.h" #include "rtc_base/gunit.h" -#include "rtc_base/mdns_responder_interface.h" +#include "rtc_base/network.h" #include "rtc_base/socket_address.h" #include "rtc_base/thread.h" #include "rtc_base/virtual_socket_server.h" #include "system_wrappers/include/metrics.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" namespace webrtc { using RTCConfiguration = PeerConnectionInterface::RTCConfiguration; using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions; using ::testing::NiceMock; -using ::testing::Values; static const char kUsagePatternMetric[] = "WebRTC.PeerConnection.UsagePattern"; -static constexpr int kDefaultTimeout = 10000; -static const rtc::SocketAddress kLocalAddrs[2] = { - rtc::SocketAddress("1.1.1.1", 0), rtc::SocketAddress("2.2.2.2", 0)}; -static const rtc::SocketAddress kPrivateLocalAddress("10.1.1.1", 0); -static const rtc::SocketAddress kPrivateIpv6LocalAddress("fd12:3456:789a:1::1", - 0); +static constexpr webrtc::TimeDelta kDefaultTimeout = + webrtc::TimeDelta::Millis(10000); +static const SocketAddress kLocalAddrs[2] = {SocketAddress("1.1.1.1", 0), + SocketAddress("2.2.2.2", 0)}; +static const SocketAddress kPrivateLocalAddress("10.1.1.1", 0); +static const SocketAddress kPrivateIpv6LocalAddress("fd12:3456:789a:1::1", 0); int MakeUsageFingerprint(std::set events) { int signature = 0; @@ -74,30 +66,13 @@ int MakeUsageFingerprint(std::set events) { return signature; } -class PeerConnectionFactoryForUsageHistogramTest - : public PeerConnectionFactory { - public: - PeerConnectionFactoryForUsageHistogramTest() - : PeerConnectionFactory([] { - PeerConnectionFactoryDependencies dependencies; - dependencies.network_thread = rtc::Thread::Current(); - dependencies.worker_thread = rtc::Thread::Current(); - dependencies.signaling_thread = rtc::Thread::Current(); - dependencies.task_queue_factory = CreateDefaultTaskQueueFactory(); - dependencies.media_engine = - std::make_unique(); - dependencies.call_factory = CreateCallFactory(); - return dependencies; - }()) {} -}; - class PeerConnectionWrapperForUsageHistogramTest; typedef PeerConnectionWrapperForUsageHistogramTest* RawWrapperPtr; class ObserverForUsageHistogramTest : public MockPeerConnectionObserver { public: - void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override; + void OnIceCandidate(const IceCandidateInterface* candidate) override; void OnInterestingUsage(int usage_pattern) override { interesting_usage_detected_ = usage_pattern; @@ -109,18 +84,18 @@ class ObserverForUsageHistogramTest : public MockPeerConnectionObserver { bool HaveDataChannel() { return last_datachannel_ != nullptr; } - absl::optional interesting_usage_detected() { + std::optional interesting_usage_detected() { return interesting_usage_detected_; } void ClearInterestingUsageDetector() { - interesting_usage_detected_ = absl::optional(); + interesting_usage_detected_ = std::optional(); } bool candidate_gathered() const { return candidate_gathered_; } private: - absl::optional interesting_usage_detected_; + std::optional interesting_usage_detected_; bool candidate_gathered_ = false; RawWrapperPtr candidate_target_; // Note: Not thread-safe against deletions. }; @@ -130,13 +105,6 @@ class PeerConnectionWrapperForUsageHistogramTest public: using PeerConnectionWrapper::PeerConnectionWrapper; - PeerConnection* GetInternalPeerConnection() { - auto* pci = - static_cast*>( - pc()); - return static_cast(pci->internal()); - } - // Override with different return type ObserverForUsageHistogramTest* observer() { return static_cast( @@ -160,12 +128,11 @@ class PeerConnectionWrapperForUsageHistogramTest return static_cast(observer()) ->HaveDataChannel(); } - void BufferIceCandidate(const webrtc::IceCandidateInterface* candidate) { + void BufferIceCandidate(const IceCandidateInterface* candidate) { std::string sdp; EXPECT_TRUE(candidate->ToString(&sdp)); - std::unique_ptr candidate_copy( - CreateIceCandidate(candidate->sdp_mid(), candidate->sdp_mline_index(), - sdp, nullptr)); + std::unique_ptr candidate_copy(CreateIceCandidate( + candidate->sdp_mid(), candidate->sdp_mline_index(), sdp, nullptr)); buffered_candidates_.push_back(std::move(candidate_copy)); } @@ -192,12 +159,12 @@ class PeerConnectionWrapperForUsageHistogramTest return false; } // Wait until the gathering completes before we signal the candidate. - WAIT(observer()->ice_gathering_complete_, kDefaultTimeout); - WAIT(callee->observer()->ice_gathering_complete_, kDefaultTimeout); + WAIT(observer()->ice_gathering_complete_, kDefaultTimeout.ms()); + WAIT(callee->observer()->ice_gathering_complete_, kDefaultTimeout.ms()); AddBufferedIceCandidates(); callee->AddBufferedIceCandidates(); - WAIT(IsConnected(), kDefaultTimeout); - WAIT(callee->IsConnected(), kDefaultTimeout); + WAIT(IsConnected(), kDefaultTimeout.ms()); + WAIT(callee->IsConnected(), kDefaultTimeout.ms()); return IsConnected() && callee->IsConnected(); } @@ -212,23 +179,24 @@ class PeerConnectionWrapperForUsageHistogramTest if (!set_local_offer) { return false; } - EXPECT_TRUE_WAIT(observer()->ice_gathering_complete_, kDefaultTimeout); + EXPECT_THAT(WaitUntil([&] { return observer()->ice_gathering_complete_; }, + ::testing::IsTrue()), + IsRtcOk()); return true; } - webrtc::PeerConnectionInterface::IceGatheringState ice_gathering_state() { + PeerConnectionInterface::IceGatheringState ice_gathering_state() { return pc()->ice_gathering_state(); } private: // Candidates that have been sent but not yet configured - std::vector> - buffered_candidates_; + std::vector> buffered_candidates_; }; // Buffers candidates until we add them via AddBufferedIceCandidates. void ObserverForUsageHistogramTest::OnIceCandidate( - const webrtc::IceCandidateInterface* candidate) { + const IceCandidateInterface* candidate) { // If target is not set, ignore. This happens in one-ended unit tests. if (candidate_target_) { this->candidate_target_->BufferIceCandidate(candidate); @@ -241,109 +209,89 @@ class PeerConnectionUsageHistogramTest : public ::testing::Test { typedef std::unique_ptr WrapperPtr; - PeerConnectionUsageHistogramTest() - : vss_(new rtc::VirtualSocketServer()), - socket_factory_(new rtc::BasicPacketSocketFactory(vss_.get())), - main_(vss_.get()) { - webrtc::metrics::Reset(); - } + PeerConnectionUsageHistogramTest() : main_(&vss_) { metrics::Reset(); } WrapperPtr CreatePeerConnection() { RTCConfiguration config; - config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; - return CreatePeerConnection( - config, PeerConnectionFactoryInterface::Options(), nullptr); + config.sdp_semantics = SdpSemantics::kUnifiedPlan; + return CreatePeerConnection(config); } WrapperPtr CreatePeerConnection(const RTCConfiguration& config) { - return CreatePeerConnection( - config, PeerConnectionFactoryInterface::Options(), nullptr); + return CreatePeerConnection(config, + PeerConnectionFactoryInterface::Options(), + PeerConnectionDependencies(nullptr), nullptr); } WrapperPtr CreatePeerConnectionWithMdns(const RTCConfiguration& config) { - auto resolver_factory = - std::make_unique>(); - - webrtc::PeerConnectionDependencies deps(nullptr /* observer_in */); + PeerConnectionDependencies deps(nullptr /* observer_in */); + deps.async_dns_resolver_factory = + std::make_unique>(); - auto fake_network = NewFakeNetwork(); + auto fake_network = std::make_unique(); fake_network->set_mdns_responder( - std::make_unique(rtc::Thread::Current())); + std::make_unique(Thread::Current())); fake_network->AddInterface(NextLocalAddress()); - std::unique_ptr port_allocator( - new cricket::BasicPortAllocator(fake_network, socket_factory_.get())); - - deps.async_dns_resolver_factory = std::move(resolver_factory); - deps.allocator = std::move(port_allocator); - - return CreatePeerConnection( - config, PeerConnectionFactoryInterface::Options(), std::move(deps)); + return CreatePeerConnection(config, + PeerConnectionFactoryInterface::Options(), + std::move(deps), std::move(fake_network)); } WrapperPtr CreatePeerConnectionWithImmediateReport() { RTCConfiguration configuration; - configuration.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; + configuration.sdp_semantics = SdpSemantics::kUnifiedPlan; configuration.report_usage_pattern_delay_ms = 0; - return CreatePeerConnection( - configuration, PeerConnectionFactoryInterface::Options(), nullptr); + return CreatePeerConnection(configuration); } WrapperPtr CreatePeerConnectionWithPrivateLocalAddresses() { - auto* fake_network = NewFakeNetwork(); + auto fake_network = std::make_unique(); fake_network->AddInterface(NextLocalAddress()); fake_network->AddInterface(kPrivateLocalAddress); - auto port_allocator = std::make_unique( - fake_network, socket_factory_.get()); RTCConfiguration config; config.sdp_semantics = SdpSemantics::kUnifiedPlan; - return CreatePeerConnection(config, - PeerConnectionFactoryInterface::Options(), - std::move(port_allocator)); + return CreatePeerConnection( + config, PeerConnectionFactoryInterface::Options(), + PeerConnectionDependencies(nullptr), std::move(fake_network)); } WrapperPtr CreatePeerConnectionWithPrivateIpv6LocalAddresses() { - auto* fake_network = NewFakeNetwork(); + auto fake_network = std::make_unique(); fake_network->AddInterface(NextLocalAddress()); fake_network->AddInterface(kPrivateIpv6LocalAddress); - auto port_allocator = std::make_unique( - fake_network, socket_factory_.get()); - RTCConfiguration config; config.sdp_semantics = SdpSemantics::kUnifiedPlan; - return CreatePeerConnection(config, - PeerConnectionFactoryInterface::Options(), - std::move(port_allocator)); - } - - WrapperPtr CreatePeerConnection( - const RTCConfiguration& config, - const PeerConnectionFactoryInterface::Options factory_options, - std::unique_ptr allocator) { - PeerConnectionDependencies deps(nullptr); - deps.allocator = std::move(allocator); - - return CreatePeerConnection(config, factory_options, std::move(deps)); + return CreatePeerConnection( + config, PeerConnectionFactoryInterface::Options(), + PeerConnectionDependencies(nullptr), std::move(fake_network)); } WrapperPtr CreatePeerConnection( const RTCConfiguration& config, const PeerConnectionFactoryInterface::Options factory_options, - PeerConnectionDependencies deps) { - auto pc_factory = - rtc::make_ref_counted(); - pc_factory->SetOptions(factory_options); - - // If no allocator is provided, one will be created using a network manager - // that uses the host network. This doesn't work on all trybots. - if (!deps.allocator) { - auto fake_network = NewFakeNetwork(); + PeerConnectionDependencies deps, + std::unique_ptr network_manager) { + PeerConnectionFactoryDependencies pcf_deps; + pcf_deps.network_thread = Thread::Current(); + pcf_deps.worker_thread = Thread::Current(); + pcf_deps.signaling_thread = Thread::Current(); + pcf_deps.socket_factory = &vss_; + if (network_manager != nullptr) { + pcf_deps.network_manager = std::move(network_manager); + } else { + // If no network manager is provided, one will be created that uses the + // host network. This doesn't work on all trybots. + auto fake_network = std::make_unique(); fake_network->AddInterface(NextLocalAddress()); - deps.allocator = std::make_unique( - fake_network, socket_factory_.get()); + pcf_deps.network_manager = std::move(fake_network); } + EnableFakeMedia(pcf_deps); + + auto pc_factory = CreateModularPeerConnectionFactory(std::move(pcf_deps)); + pc_factory->SetOptions(factory_options); auto observer = std::make_unique(); deps.observer = observer.get(); @@ -364,39 +312,29 @@ class PeerConnectionUsageHistogramTest : public ::testing::Test { // This works correctly only if there is only one sample value // that has been counted. // Returns -1 for "not found". - return webrtc::metrics::MinSample(kUsagePatternMetric); + return metrics::MinSample(kUsagePatternMetric); } - // The PeerConnection's port allocator is tied to the PeerConnection's - // lifetime and expects the underlying NetworkManager to outlive it. That - // prevents us from having the PeerConnectionWrapper own the fake network. - // Therefore, the test fixture will own all the fake networks even though - // tests should access the fake network through the PeerConnectionWrapper. - rtc::FakeNetworkManager* NewFakeNetwork() { - fake_networks_.emplace_back(std::make_unique()); - return fake_networks_.back().get(); - } - - rtc::SocketAddress NextLocalAddress() { + SocketAddress NextLocalAddress() { RTC_DCHECK(next_local_address_ < (int)arraysize(kLocalAddrs)); return kLocalAddrs[next_local_address_++]; } - std::vector> fake_networks_; int next_local_address_ = 0; - std::unique_ptr vss_; - std::unique_ptr socket_factory_; - rtc::AutoSocketServerThread main_; + VirtualSocketServer vss_; + AutoSocketServerThread main_; }; TEST_F(PeerConnectionUsageHistogramTest, UsageFingerprintHistogramFromTimeout) { auto pc = CreatePeerConnectionWithImmediateReport(); int expected_fingerprint = MakeUsageFingerprint({}); - EXPECT_METRIC_EQ_WAIT(1, webrtc::metrics::NumSamples(kUsagePatternMetric), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return metrics::NumSamples(kUsagePatternMetric); }, + ::testing::Eq(1)), + IsRtcOk()); EXPECT_METRIC_EQ( - 1, webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint)); + 1, metrics::NumEvents(kUsagePatternMetric, expected_fingerprint)); } #ifndef WEBRTC_ANDROID @@ -421,11 +359,10 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintAudioVideo) { UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); // In this case, we may or may not have PRIVATE_CANDIDATE_COLLECTED, // depending on the machine configuration. - EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric)); + EXPECT_METRIC_EQ(2, metrics::NumSamples(kUsagePatternMetric)); EXPECT_METRIC_TRUE( - webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint) == - 2 || - webrtc::metrics::NumEvents( + metrics::NumEvents(kUsagePatternMetric, expected_fingerprint) == 2 || + metrics::NumEvents( kUsagePatternMetric, expected_fingerprint | static_cast(UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == 2); @@ -466,11 +403,11 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintWithMdnsCaller) { UsageEvent::CANDIDATE_COLLECTED, UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, UsageEvent::REMOTE_MDNS_CANDIDATE_ADDED, UsageEvent::ICE_STATE_CONNECTED, UsageEvent::REMOTE_CANDIDATE_ADDED, UsageEvent::CLOSE_CALLED}); - EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric)); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, - expected_fingerprint_caller)); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, - expected_fingerprint_callee)); + EXPECT_METRIC_EQ(2, metrics::NumSamples(kUsagePatternMetric)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_caller)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_callee)); } // Test getting the usage fingerprint when the callee collects an mDNS @@ -507,11 +444,11 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintWithMdnsCallee) { UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, UsageEvent::ICE_STATE_CONNECTED, UsageEvent::REMOTE_CANDIDATE_ADDED, UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); - EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric)); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, - expected_fingerprint_caller)); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, - expected_fingerprint_callee)); + EXPECT_METRIC_EQ(2, metrics::NumSamples(kUsagePatternMetric)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_caller)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_callee)); } #ifdef WEBRTC_HAVE_SCTP @@ -520,7 +457,9 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintDataOnly) { auto callee = CreatePeerConnection(); caller->CreateDataChannel("foodata"); ASSERT_TRUE(caller->ConnectTo(callee.get())); - ASSERT_TRUE_WAIT(callee->HaveDataChannel(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return callee->HaveDataChannel(); }, ::testing::IsTrue()), + IsRtcOk()); caller->pc()->Close(); callee->pc()->Close(); int expected_fingerprint = MakeUsageFingerprint( @@ -529,11 +468,10 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintDataOnly) { UsageEvent::CANDIDATE_COLLECTED, UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, UsageEvent::ICE_STATE_CONNECTED, UsageEvent::REMOTE_CANDIDATE_ADDED, UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); - EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric)); + EXPECT_METRIC_EQ(2, metrics::NumSamples(kUsagePatternMetric)); EXPECT_METRIC_TRUE( - webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint) == - 2 || - webrtc::metrics::NumEvents( + metrics::NumEvents(kUsagePatternMetric, expected_fingerprint) == 2 || + metrics::NumEvents( kUsagePatternMetric, expected_fingerprint | static_cast(UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == 2); @@ -557,9 +495,9 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintStunTurn) { int expected_fingerprint = MakeUsageFingerprint( {UsageEvent::STUN_SERVER_ADDED, UsageEvent::TURN_SERVER_ADDED, UsageEvent::CLOSE_CALLED}); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumSamples(kUsagePatternMetric)); + EXPECT_METRIC_EQ(1, metrics::NumSamples(kUsagePatternMetric)); EXPECT_METRIC_EQ( - 1, webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint)); + 1, metrics::NumEvents(kUsagePatternMetric, expected_fingerprint)); } TEST_F(PeerConnectionUsageHistogramTest, FingerprintStunTurnInReconfiguration) { @@ -579,9 +517,9 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintStunTurnInReconfiguration) { int expected_fingerprint = MakeUsageFingerprint( {UsageEvent::STUN_SERVER_ADDED, UsageEvent::TURN_SERVER_ADDED, UsageEvent::CLOSE_CALLED}); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumSamples(kUsagePatternMetric)); + EXPECT_METRIC_EQ(1, metrics::NumSamples(kUsagePatternMetric)); EXPECT_METRIC_EQ( - 1, webrtc::metrics::NumEvents(kUsagePatternMetric, expected_fingerprint)); + 1, metrics::NumEvents(kUsagePatternMetric, expected_fingerprint)); } TEST_F(PeerConnectionUsageHistogramTest, FingerprintWithPrivateIPCaller) { @@ -607,11 +545,11 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintWithPrivateIPCaller) { UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED, UsageEvent::ICE_STATE_CONNECTED, UsageEvent::REMOTE_CANDIDATE_ADDED, UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); - EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric)); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, - expected_fingerprint_caller)); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, - expected_fingerprint_callee)); + EXPECT_METRIC_EQ(2, metrics::NumSamples(kUsagePatternMetric)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_caller)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_callee)); } TEST_F(PeerConnectionUsageHistogramTest, FingerprintWithPrivateIpv6Callee) { @@ -639,11 +577,11 @@ TEST_F(PeerConnectionUsageHistogramTest, FingerprintWithPrivateIpv6Callee) { UsageEvent::ADD_ICE_CANDIDATE_SUCCEEDED, UsageEvent::REMOTE_CANDIDATE_ADDED, UsageEvent::ICE_STATE_CONNECTED, UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); - EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric)); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, - expected_fingerprint_caller)); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, - expected_fingerprint_callee)); + EXPECT_METRIC_EQ(2, metrics::NumSamples(kUsagePatternMetric)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_caller)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_callee)); } #ifndef WEBRTC_ANDROID @@ -667,8 +605,10 @@ TEST_F(PeerConnectionUsageHistogramTest, ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer())); // Wait until the gathering completes so that the session description would // have contained ICE candidates. - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceGatheringComplete, - caller->ice_gathering_state(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return caller->ice_gathering_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceGatheringComplete)), + IsRtcOk()); EXPECT_TRUE(caller->observer()->candidate_gathered()); // Get the current offer that contains candidates and pass it to the callee. // @@ -689,11 +629,18 @@ TEST_F(PeerConnectionUsageHistogramTest, auto answer = callee->CreateAnswer(); callee->SetLocalDescription(CloneSessionDescription(answer.get())); caller->SetRemoteDescription(std::move(answer)); - EXPECT_TRUE_WAIT(caller->IsConnected(), kDefaultTimeout); - EXPECT_TRUE_WAIT(callee->IsConnected(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return caller->IsConnected(); }, ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee->IsConnected(); }, ::testing::IsTrue()), + IsRtcOk()); // The callee needs to process the open message to have the data channel open. - EXPECT_TRUE_WAIT(callee->observer()->last_datachannel_ != nullptr, - kDefaultTimeout); + EXPECT_THAT( + WaitUntil( + [&] { return callee->observer()->last_datachannel_ != nullptr; }, + ::testing::IsTrue()), + IsRtcOk()); caller->pc()->Close(); callee->pc()->Close(); @@ -716,11 +663,11 @@ TEST_F(PeerConnectionUsageHistogramTest, UsageEvent::REMOTE_PRIVATE_CANDIDATE_ADDED, UsageEvent::REMOTE_IPV6_CANDIDATE_ADDED, UsageEvent::ICE_STATE_CONNECTED, UsageEvent::DIRECT_CONNECTION_SELECTED, UsageEvent::CLOSE_CALLED}); - EXPECT_METRIC_EQ(2, webrtc::metrics::NumSamples(kUsagePatternMetric)); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, - expected_fingerprint_caller)); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents(kUsagePatternMetric, - expected_fingerprint_callee)); + EXPECT_METRIC_EQ(2, metrics::NumSamples(kUsagePatternMetric)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_caller)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents(kUsagePatternMetric, expected_fingerprint_callee)); } TEST_F(PeerConnectionUsageHistogramTest, NotableUsageNoted) { @@ -731,13 +678,13 @@ TEST_F(PeerConnectionUsageHistogramTest, NotableUsageNoted) { int expected_fingerprint = MakeUsageFingerprint( {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, UsageEvent::CANDIDATE_COLLECTED, UsageEvent::CLOSE_CALLED}); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumSamples(kUsagePatternMetric)); + EXPECT_METRIC_EQ(1, metrics::NumSamples(kUsagePatternMetric)); EXPECT_METRIC_TRUE( expected_fingerprint == ObservedFingerprint() || (expected_fingerprint | static_cast(UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == ObservedFingerprint()); - EXPECT_METRIC_EQ(absl::make_optional(ObservedFingerprint()), + EXPECT_METRIC_EQ(std::make_optional(ObservedFingerprint()), caller->observer()->interesting_usage_detected()); } @@ -748,16 +695,18 @@ TEST_F(PeerConnectionUsageHistogramTest, NotableUsageOnEventFiring) { int expected_fingerprint = MakeUsageFingerprint( {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, UsageEvent::CANDIDATE_COLLECTED}); - EXPECT_METRIC_EQ(0, webrtc::metrics::NumSamples(kUsagePatternMetric)); + EXPECT_METRIC_EQ(0, metrics::NumSamples(kUsagePatternMetric)); caller->GetInternalPeerConnection()->RequestUsagePatternReportForTesting(); - EXPECT_METRIC_EQ_WAIT(1, webrtc::metrics::NumSamples(kUsagePatternMetric), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return metrics::NumSamples(kUsagePatternMetric); }, + ::testing::Eq(1)), + IsRtcOk()); EXPECT_METRIC_TRUE( expected_fingerprint == ObservedFingerprint() || (expected_fingerprint | static_cast(UsageEvent::PRIVATE_CANDIDATE_COLLECTED)) == ObservedFingerprint()); - EXPECT_METRIC_EQ(absl::make_optional(ObservedFingerprint()), + EXPECT_METRIC_EQ(std::make_optional(ObservedFingerprint()), caller->observer()->interesting_usage_detected()); } @@ -769,13 +718,15 @@ TEST_F(PeerConnectionUsageHistogramTest, int expected_fingerprint = MakeUsageFingerprint( {UsageEvent::DATA_ADDED, UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED, UsageEvent::CANDIDATE_COLLECTED, UsageEvent::CLOSE_CALLED}); - EXPECT_METRIC_EQ(0, webrtc::metrics::NumSamples(kUsagePatternMetric)); + EXPECT_METRIC_EQ(0, metrics::NumSamples(kUsagePatternMetric)); caller->pc()->Close(); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumSamples(kUsagePatternMetric)); + EXPECT_METRIC_EQ(1, metrics::NumSamples(kUsagePatternMetric)); caller->GetInternalPeerConnection()->RequestUsagePatternReportForTesting(); caller->observer()->ClearInterestingUsageDetector(); - EXPECT_METRIC_EQ_WAIT(2, webrtc::metrics::NumSamples(kUsagePatternMetric), - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return metrics::NumSamples(kUsagePatternMetric); }, + ::testing::Eq(2)), + IsRtcOk()); EXPECT_METRIC_TRUE( expected_fingerprint == ObservedFingerprint() || (expected_fingerprint | diff --git a/pc/peer_connection_ice_unittest.cc b/pc/peer_connection_ice_unittest.cc index 532583f307..7d763db112 100644 --- a/pc/peer_connection_ice_unittest.cc +++ b/pc/peer_connection_ice_unittest.cc @@ -12,31 +12,30 @@ #include #include +#include #include #include -#include #include #include -#include "absl/types/optional.h" -#include "api/audio/audio_mixer.h" #include "api/candidate.h" +#include "api/enable_media_with_defaults.h" +#include "api/environment/environment_factory.h" #include "api/ice_transport_interface.h" #include "api/jsep.h" +#include "api/make_ref_counted.h" #include "api/media_types.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" #include "api/scoped_refptr.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "p2p/base/fake_port_allocator.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" -#include "p2p/base/port.h" #include "p2p/base/port_allocator.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_info.h" -#include "p2p/client/basic_port_allocator.h" +#include "p2p/test/fake_port_allocator.h" #include "pc/channel_interface.h" #include "pc/dtls_transport.h" #include "pc/media_session.h" @@ -50,11 +49,11 @@ #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" -#include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/socket_address.h" +#include "rtc_base/socket_server.h" #include "rtc_base/thread.h" #include "test/gtest.h" -#include "test/scoped_key_value_config.h" +#include "test/wait_until.h" #ifdef WEBRTC_ANDROID #include "pc/test/android_test_initializer.h" #endif @@ -76,7 +75,6 @@ #include "pc/test/fake_audio_capture_module.h" #include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/fake_network.h" -#include "rtc_base/gunit.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/virtual_socket_server.h" #include "system_wrappers/include/metrics.h" @@ -86,7 +84,7 @@ namespace webrtc { using RTCConfiguration = PeerConnectionInterface::RTCConfiguration; using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions; -using rtc::SocketAddress; + using ::testing::Combine; using ::testing::ElementsAre; using ::testing::Pair; @@ -94,24 +92,23 @@ using ::testing::Values; constexpr int kIceCandidatesTimeout = 10000; constexpr int64_t kWaitTimeout = 10000; -constexpr uint64_t kTiebreakerDefault = 44444; class PeerConnectionWrapperForIceTest : public PeerConnectionWrapper { public: using PeerConnectionWrapper::PeerConnectionWrapper; std::unique_ptr CreateJsepCandidateForFirstTransport( - cricket::Candidate* candidate) { + Candidate* candidate) { RTC_DCHECK(pc()->remote_description()); const auto* desc = pc()->remote_description()->description(); - RTC_DCHECK(desc->contents().size() > 0); + RTC_DCHECK(!desc->contents().empty()); const auto& first_content = desc->contents()[0]; - candidate->set_transport_name(first_content.name); - return CreateIceCandidate(first_content.name, -1, *candidate); + candidate->set_transport_name(first_content.mid()); + return CreateIceCandidate(first_content.mid(), -1, *candidate); } // Adds a new ICE candidate to the first transport. - bool AddIceCandidate(cricket::Candidate* candidate) { + bool AddIceCandidate(Candidate* candidate) { return pc()->AddIceCandidate( CreateJsepCandidateForFirstTransport(candidate).get()); } @@ -132,15 +129,12 @@ class PeerConnectionWrapperForIceTest : public PeerConnectionWrapper { return candidates; } - rtc::FakeNetworkManager* network() { return network_; } - - void set_network(rtc::FakeNetworkManager* network) { network_ = network; } + FakeNetworkManager* network() { return network_; } - // The port allocator used by this PC. - cricket::PortAllocator* port_allocator_; + void set_network(FakeNetworkManager* network) { network_ = network; } private: - rtc::FakeNetworkManager* network_; + FakeNetworkManager* network_; }; class PeerConnectionIceBaseTest : public ::testing::Test { @@ -148,24 +142,10 @@ class PeerConnectionIceBaseTest : public ::testing::Test { typedef std::unique_ptr WrapperPtr; explicit PeerConnectionIceBaseTest(SdpSemantics sdp_semantics) - : vss_(new rtc::VirtualSocketServer()), - socket_factory_(new rtc::BasicPacketSocketFactory(vss_.get())), - main_(vss_.get()), - sdp_semantics_(sdp_semantics) { + : main_(&vss_), sdp_semantics_(sdp_semantics) { #ifdef WEBRTC_ANDROID InitializeAndroidObjects(); #endif - pc_factory_ = CreatePeerConnectionFactory( - rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(), - rtc::scoped_refptr(FakeAudioCaptureModule::Create()), - CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory(), - std::make_unique>(), - std::make_unique>(), - nullptr /* audio_mixer */, nullptr /* audio_processing */); } WrapperPtr CreatePeerConnection() { @@ -173,19 +153,34 @@ class PeerConnectionIceBaseTest : public ::testing::Test { } WrapperPtr CreatePeerConnection(const RTCConfiguration& config) { - auto* fake_network = NewFakeNetwork(); - auto port_allocator = std::make_unique( - fake_network, socket_factory_.get()); - port_allocator->set_flags(cricket::PORTALLOCATOR_DISABLE_TCP | - cricket::PORTALLOCATOR_DISABLE_RELAY); - port_allocator->set_step_delay(cricket::kMinimumStepDelay); + PeerConnectionFactoryDependencies pcf_deps; + pcf_deps.network_thread = Thread::Current(); + pcf_deps.worker_thread = Thread::Current(); + pcf_deps.signaling_thread = Thread::Current(); + pcf_deps.socket_factory = &vss_; + auto network_manager = std::make_unique(); + auto* fake_network = network_manager.get(); + pcf_deps.network_manager = std::move(network_manager); + pcf_deps.adm = FakeAudioCaptureModule::Create(); + pcf_deps.video_encoder_factory = + std::make_unique>(); + pcf_deps.video_decoder_factory = + std::make_unique>(); + EnableMediaWithDefaults(pcf_deps); + scoped_refptr pc_factory = + CreateModularPeerConnectionFactory(std::move(pcf_deps)); + RTCConfiguration modified_config = config; + modified_config.set_port_allocator_flags(PORTALLOCATOR_DISABLE_TCP | + PORTALLOCATOR_DISABLE_RELAY); modified_config.sdp_semantics = sdp_semantics_; auto observer = std::make_unique(); - auto port_allocator_copy = port_allocator.get(); PeerConnectionDependencies pc_dependencies(observer.get()); - pc_dependencies.allocator = std::move(port_allocator); - auto result = pc_factory_->CreatePeerConnectionOrError( + auto result = pc_factory->CreatePeerConnectionOrError( modified_config, std::move(pc_dependencies)); if (!result.ok()) { return nullptr; @@ -193,9 +188,8 @@ class PeerConnectionIceBaseTest : public ::testing::Test { observer->SetPeerConnectionInterface(result.value().get()); auto wrapper = std::make_unique( - pc_factory_, result.MoveValue(), std::move(observer)); + std::move(pc_factory), result.MoveValue(), std::move(observer)); wrapper->set_network(fake_network); - wrapper->port_allocator_ = port_allocator_copy; return wrapper; } @@ -212,13 +206,12 @@ class PeerConnectionIceBaseTest : public ::testing::Test { return wrapper; } - cricket::Candidate CreateLocalUdpCandidate( - const rtc::SocketAddress& address) { - cricket::Candidate candidate; - candidate.set_component(cricket::ICE_CANDIDATE_COMPONENT_DEFAULT); - candidate.set_protocol(cricket::UDP_PROTOCOL_NAME); + Candidate CreateLocalUdpCandidate(const SocketAddress& address) { + Candidate candidate; + RTC_DCHECK_EQ(candidate.type(), IceCandidateType::kHost); + candidate.set_component(ICE_CANDIDATE_COMPONENT_DEFAULT); + candidate.set_protocol(UDP_PROTOCOL_NAME); candidate.set_address(address); - candidate.set_type(cricket::LOCAL_PORT_TYPE); return candidate; } @@ -233,58 +226,57 @@ class PeerConnectionIceBaseTest : public ::testing::Test { const std::string& pwd) { auto* desc = sdesc->description(); for (const auto& content : desc->contents()) { - auto* transport_info = desc->GetTransportInfoByName(content.name); + auto* transport_info = desc->GetTransportInfoByName(content.mid()); transport_info->description.ice_ufrag = ufrag; transport_info->description.ice_pwd = pwd; } } // Set ICE mode on the given session description. - void SetIceMode(SessionDescriptionInterface* sdesc, - const cricket::IceMode ice_mode) { + void SetIceMode(SessionDescriptionInterface* sdesc, const IceMode ice_mode) { auto* desc = sdesc->description(); for (const auto& content : desc->contents()) { - auto* transport_info = desc->GetTransportInfoByName(content.name); + auto* transport_info = desc->GetTransportInfoByName(content.mid()); transport_info->description.ice_mode = ice_mode; } } - cricket::TransportDescription* GetFirstTransportDescription( + TransportDescription* GetFirstTransportDescription( SessionDescriptionInterface* sdesc) { auto* desc = sdesc->description(); - RTC_DCHECK(desc->contents().size() > 0); + RTC_DCHECK(!desc->contents().empty()); auto* transport_info = - desc->GetTransportInfoByName(desc->contents()[0].name); + desc->GetTransportInfoByName(desc->contents()[0].mid()); RTC_DCHECK(transport_info); return &transport_info->description; } - const cricket::TransportDescription* GetFirstTransportDescription( + const TransportDescription* GetFirstTransportDescription( const SessionDescriptionInterface* sdesc) { auto* desc = sdesc->description(); - RTC_DCHECK(desc->contents().size() > 0); + RTC_DCHECK(!desc->contents().empty()); auto* transport_info = - desc->GetTransportInfoByName(desc->contents()[0].name); + desc->GetTransportInfoByName(desc->contents()[0].mid()); RTC_DCHECK(transport_info); return &transport_info->description; } // TODO(qingsi): Rewrite this method in terms of the standard IceTransport // after it is implemented. - cricket::IceRole GetIceRole(const WrapperPtr& pc_wrapper_ptr) { + IceRole GetIceRole(const WrapperPtr& pc_wrapper_ptr) { auto* pc_proxy = static_cast*>( pc_wrapper_ptr->pc()); PeerConnection* pc = static_cast(pc_proxy->internal()); for (const auto& transceiver : pc->GetTransceiversInternal()) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { + if (transceiver->media_type() == webrtc::MediaType::AUDIO) { auto dtls_transport = pc->LookupDtlsTransportByMidInternal( transceiver->internal()->channel()->mid()); return dtls_transport->ice_transport()->internal()->GetIceRole(); } } RTC_DCHECK_NOTREACHED(); - return cricket::ICEROLE_UNKNOWN; + return ICEROLE_UNKNOWN; } // Returns a list of (ufrag, pwd) pairs in the order that they appear in @@ -297,7 +289,7 @@ class PeerConnectionIceBaseTest : public ::testing::Test { const auto* desc = description->description(); for (const auto& content_info : desc->contents()) { const auto* transport_info = - desc->GetTransportInfoByName(content_info.name); + desc->GetTransportInfoByName(content_info.mid()); if (transport_info) { ice_credentials.push_back( std::make_pair(transport_info->description.ice_ufrag, @@ -307,33 +299,19 @@ class PeerConnectionIceBaseTest : public ::testing::Test { return ice_credentials; } - bool AddCandidateToFirstTransport(cricket::Candidate* candidate, + bool AddCandidateToFirstTransport(Candidate* candidate, SessionDescriptionInterface* sdesc) { auto* desc = sdesc->description(); - RTC_DCHECK(desc->contents().size() > 0); + RTC_DCHECK(!desc->contents().empty()); const auto& first_content = desc->contents()[0]; - candidate->set_transport_name(first_content.name); + candidate->set_transport_name(first_content.mid()); std::unique_ptr jsep_candidate = - CreateIceCandidate(first_content.name, 0, *candidate); + CreateIceCandidate(first_content.mid(), 0, *candidate); return sdesc->AddCandidate(jsep_candidate.get()); } - rtc::FakeNetworkManager* NewFakeNetwork() { - // The PeerConnection's port allocator is tied to the PeerConnection's - // lifetime and expects the underlying NetworkManager to outlive it. That - // prevents us from having the PeerConnectionWrapper own the fake network. - // Therefore, the test fixture will own all the fake networks even though - // tests should access the fake network through the PeerConnectionWrapper. - auto* fake_network = new rtc::FakeNetworkManager(); - fake_networks_.emplace_back(fake_network); - return fake_network; - } - - std::unique_ptr vss_; - std::unique_ptr socket_factory_; - rtc::AutoSocketServerThread main_; - rtc::scoped_refptr pc_factory_; - std::vector> fake_networks_; + VirtualSocketServer vss_; + AutoSocketServerThread main_; const SdpSemantics sdp_semantics_; }; @@ -342,15 +320,15 @@ class PeerConnectionIceTest public ::testing::WithParamInterface { protected: PeerConnectionIceTest() : PeerConnectionIceBaseTest(GetParam()) { - webrtc::metrics::Reset(); + metrics::Reset(); } }; ::testing::AssertionResult AssertCandidatesEqual(const char* a_expr, const char* b_expr, - const cricket::Candidate& a, - const cricket::Candidate& b) { - rtc::StringBuilder failure_info; + const Candidate& a, + const Candidate& b) { + StringBuilder failure_info; if (a.component() != b.component()) { failure_info << "\ncomponent: " << a.component() << " != " << b.component(); } @@ -362,7 +340,7 @@ ::testing::AssertionResult AssertCandidatesEqual(const char* a_expr, << " != " << b.address().ToString(); } if (a.type() != b.type()) { - failure_info << "\ntype: " << a.type() << " != " << b.type(); + failure_info << "\ntype: " << a.type_name() << " != " << b.type_name(); } std::string failure_info_str = failure_info.str(); if (failure_info_str.empty()) { @@ -383,7 +361,11 @@ TEST_P(PeerConnectionIceTest, OfferContainsGatheredCandidates) { // Start ICE candidate gathering by setting the local offer. ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer())); - EXPECT_TRUE_WAIT(caller->IsIceGatheringDone(), kIceCandidatesTimeout); + EXPECT_THAT( + WaitUntil([&] { return caller->IsIceGatheringDone(); }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kIceCandidatesTimeout)}), + IsRtcOk()); auto offer = caller->CreateOffer(); EXPECT_LT(0u, caller->observer()->GetCandidatesByMline(0).size()); @@ -404,7 +386,11 @@ TEST_P(PeerConnectionIceTest, AnswerContainsGatheredCandidates) { ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); ASSERT_TRUE(callee->SetLocalDescription(callee->CreateAnswer())); - EXPECT_TRUE_WAIT(callee->IsIceGatheringDone(), kIceCandidatesTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee->IsIceGatheringDone(); }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kIceCandidatesTimeout)}), + IsRtcOk()); auto* answer = callee->pc()->local_description(); EXPECT_LT(0u, caller->observer()->GetCandidatesByMline(0).size()); @@ -423,7 +409,7 @@ TEST_P(PeerConnectionIceTest, auto callee = CreatePeerConnectionWithAudioVideo(); auto offer = caller->CreateOfferAndSetAsLocal(); - cricket::Candidate candidate = CreateLocalUdpCandidate(kCallerAddress); + Candidate candidate = CreateLocalUdpCandidate(kCallerAddress); AddCandidateToFirstTransport(&candidate, offer.get()); ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); @@ -477,7 +463,7 @@ TEST_P(PeerConnectionIceTest, NoIceCandidatesBeforeSetLocalDescription) { caller->network()->AddInterface(kLocalAddress); // Pump for 1 second and verify that no candidates are generated. - rtc::Thread::Current()->ProcessMessages(1000); + Thread::Current()->ProcessMessages(1000); EXPECT_EQ(0u, caller->observer()->candidates_.size()); } @@ -490,12 +476,12 @@ TEST_P(PeerConnectionIceTest, caller->network()->AddInterface(kCallerAddress); auto offer = caller->CreateOfferAndSetAsLocal(); - cricket::Candidate candidate = CreateLocalUdpCandidate(kCallerAddress); + Candidate candidate = CreateLocalUdpCandidate(kCallerAddress); AddCandidateToFirstTransport(&candidate, offer.get()); ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); // Pump for 1 second and verify that no candidates are generated. - rtc::Thread::Current()->ProcessMessages(1000); + Thread::Current()->ProcessMessages(1000); EXPECT_EQ(0u, callee->observer()->candidates_.size()); } @@ -504,9 +490,9 @@ TEST_P(PeerConnectionIceTest, CannotAddCandidateWhenRemoteDescriptionNotSet) { const SocketAddress kCalleeAddress("1.1.1.1", 1111); auto caller = CreatePeerConnectionWithAudioVideo(); - cricket::Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress); + Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress); std::unique_ptr jsep_candidate = - CreateIceCandidate(cricket::CN_AUDIO, 0, candidate); + CreateIceCandidate(CN_AUDIO, 0, candidate); EXPECT_FALSE(caller->pc()->AddIceCandidate(jsep_candidate.get())); @@ -514,7 +500,7 @@ TEST_P(PeerConnectionIceTest, CannotAddCandidateWhenRemoteDescriptionNotSet) { EXPECT_FALSE(caller->pc()->AddIceCandidate(jsep_candidate.get())); EXPECT_METRIC_THAT( - webrtc::metrics::Samples("WebRTC.PeerConnection.AddIceCandidate"), + metrics::Samples("WebRTC.PeerConnection.AddIceCandidate"), ElementsAre(Pair(kAddIceCandidateFailNoRemoteDescription, 2))); } @@ -526,11 +512,11 @@ TEST_P(PeerConnectionIceTest, CannotAddCandidateWhenPeerConnectionClosed) { ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - cricket::Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress); - auto* audio_content = cricket::GetFirstAudioContent( - caller->pc()->local_description()->description()); + Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress); + auto* audio_content = + GetFirstAudioContent(caller->pc()->local_description()->description()); std::unique_ptr jsep_candidate = - CreateIceCandidate(audio_content->name, 0, candidate); + CreateIceCandidate(audio_content->mid(), 0, candidate); caller->pc()->Close(); @@ -547,7 +533,7 @@ TEST_P(PeerConnectionIceTest, DuplicateIceCandidateIgnoredWhenAdded) { ASSERT_TRUE( caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); - cricket::Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress); + Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress); caller->AddIceCandidate(&candidate); EXPECT_TRUE(caller->AddIceCandidate(&candidate)); EXPECT_EQ(1u, caller->GetIceCandidatesFromRemoteDescription().size()); @@ -563,7 +549,7 @@ TEST_P(PeerConnectionIceTest, DISABLED_ErrorOnInvalidRemoteIceCandidateAdded) { // Add a candidate to the remote description with a candidate that has an // invalid address (port number == 2). auto answer = callee->CreateAnswerAndSetAsLocal(); - cricket::Candidate bad_candidate = + Candidate bad_candidate = CreateLocalUdpCandidate(SocketAddress("2.2.2.2", 2)); RTC_LOG(LS_INFO) << "Bad candidate: " << bad_candidate.ToString(); AddCandidateToFirstTransport(&bad_candidate, answer.get()); @@ -580,11 +566,11 @@ TEST_P(PeerConnectionIceTest, ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - cricket::Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress); - auto* audio_content = cricket::GetFirstAudioContent( - caller->pc()->local_description()->description()); + Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress); + auto* audio_content = + GetFirstAudioContent(caller->pc()->local_description()->description()); std::unique_ptr ice_candidate = - CreateIceCandidate(audio_content->name, 0, candidate); + CreateIceCandidate(audio_content->mid(), 0, candidate); ASSERT_TRUE(caller->pc()->AddIceCandidate(ice_candidate.get())); @@ -605,11 +591,11 @@ TEST_P(PeerConnectionIceTest, caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); // `candidate.transport_name()` is empty. - cricket::Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress); - auto* audio_content = cricket::GetFirstAudioContent( - caller->pc()->local_description()->description()); + Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress); + auto* audio_content = + GetFirstAudioContent(caller->pc()->local_description()->description()); std::unique_ptr ice_candidate = - CreateIceCandidate(audio_content->name, 0, candidate); + CreateIceCandidate(audio_content->mid(), 0, candidate); EXPECT_TRUE(caller->pc()->AddIceCandidate(ice_candidate.get())); EXPECT_TRUE(caller->pc()->RemoveIceCandidates({candidate})); } @@ -624,7 +610,7 @@ TEST_P(PeerConnectionIceTest, RemoveCandidateRemovesFromRemoteDescription) { ASSERT_TRUE( caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); - cricket::Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress); + Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress); ASSERT_TRUE(caller->AddIceCandidate(&candidate)); EXPECT_TRUE(caller->pc()->RemoveIceCandidates({candidate})); EXPECT_EQ(0u, caller->GetIceCandidatesFromRemoteDescription().size()); @@ -646,12 +632,12 @@ TEST_P(PeerConnectionIceTest, caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); // Add one candidate via `AddIceCandidate`. - cricket::Candidate candidate1 = CreateLocalUdpCandidate(kCallerAddress1); + Candidate candidate1 = CreateLocalUdpCandidate(kCallerAddress1); ASSERT_TRUE(callee->AddIceCandidate(&candidate1)); // Add the second candidate via a reoffer. auto offer = caller->CreateOffer(); - cricket::Candidate candidate2 = CreateLocalUdpCandidate(kCallerAddress2); + Candidate candidate2 = CreateLocalUdpCandidate(kCallerAddress2); AddCandidateToFirstTransport(&candidate2, offer.get()); // Expect both candidates to appear in the callee's remote description. @@ -667,10 +653,15 @@ TEST_P(PeerConnectionIceTest, VerifyUfragPwdLength) { auto set_local_description_with_ufrag_pwd_length = [this](int ufrag_len, int pwd_len) { auto pc = CreatePeerConnectionWithAudioVideo(); + // Because local munging is forbidden by spec, we have to disable the + // check for it. + pc->GetInternalPeerConnection()->DisableSdpMungingChecksForTesting(); auto offer = pc->CreateOffer(); SetIceUfragPwd(offer.get(), std::string(ufrag_len, 'x'), std::string(pwd_len, 'x')); - return pc->SetLocalDescription(std::move(offer)); + bool result = pc->SetLocalDescription(std::move(offer)); + pc->pc()->Close(); + return result; }; auto set_remote_description_with_ufrag_pwd_length = [this](int ufrag_len, @@ -679,7 +670,9 @@ TEST_P(PeerConnectionIceTest, VerifyUfragPwdLength) { auto offer = pc->CreateOffer(); SetIceUfragPwd(offer.get(), std::string(ufrag_len, 'x'), std::string(pwd_len, 'x')); - return pc->SetRemoteDescription(std::move(offer)); + bool result = pc->SetRemoteDescription(std::move(offer)); + pc->pc()->Close(); + return result; }; EXPECT_FALSE(set_local_description_with_ufrag_pwd_length(3, 22)); @@ -701,7 +694,7 @@ ::testing::AssertionResult AssertIpInCandidates( const char* candidates_expr, const SocketAddress& address, const std::vector candidates) { - rtc::StringBuilder candidate_hosts; + StringBuilder candidate_hosts; for (const auto* candidate : candidates) { const auto& candidate_ip = candidate->candidate().address().ipaddr(); if (candidate_ip == address.ipaddr()) { @@ -724,7 +717,11 @@ TEST_P(PeerConnectionIceTest, CandidatesGeneratedForEachLocalInterface) { caller->network()->AddInterface(kLocalAddress2); caller->CreateOfferAndSetAsLocal(); - EXPECT_TRUE_WAIT(caller->IsIceGatheringDone(), kIceCandidatesTimeout); + EXPECT_THAT( + WaitUntil([&] { return caller->IsIceGatheringDone(); }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kIceCandidatesTimeout)}), + IsRtcOk()); auto candidates = caller->observer()->GetCandidatesByMline(0); EXPECT_PRED_FORMAT2(AssertIpInCandidates, kLocalAddress1, candidates); @@ -739,7 +736,7 @@ TEST_P(PeerConnectionIceTest, TrickledSingleCandidateAddedToRemoteDescription) { ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); - cricket::Candidate candidate = CreateLocalUdpCandidate(kCallerAddress); + Candidate candidate = CreateLocalUdpCandidate(kCallerAddress); callee->AddIceCandidate(&candidate); auto candidates = callee->GetIceCandidatesFromRemoteDescription(); ASSERT_EQ(1u, candidates.size()); @@ -758,10 +755,10 @@ TEST_P(PeerConnectionIceTest, TwoTrickledCandidatesAddedToRemoteDescription) { ASSERT_TRUE( caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); - cricket::Candidate candidate1 = CreateLocalUdpCandidate(kCalleeAddress1); + Candidate candidate1 = CreateLocalUdpCandidate(kCalleeAddress1); caller->AddIceCandidate(&candidate1); - cricket::Candidate candidate2 = CreateLocalUdpCandidate(kCalleeAddress2); + Candidate candidate2 = CreateLocalUdpCandidate(kCalleeAddress2); caller->AddIceCandidate(&candidate2); auto candidates = caller->GetIceCandidatesFromRemoteDescription(); @@ -788,7 +785,10 @@ TEST_P(PeerConnectionIceTest, AsyncAddIceCandidateIsAddedToRemoteDescription) { EXPECT_TRUE(result.ok()); operation_completed = true; }); - EXPECT_TRUE_WAIT(operation_completed, kWaitTimeout); + EXPECT_THAT( + WaitUntil([&] { return operation_completed; }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); auto candidates = callee->GetIceCandidatesFromRemoteDescription(); ASSERT_EQ(1u, candidates.size()); @@ -825,7 +825,7 @@ TEST_P(PeerConnectionIceTest, // Chain an operation that will block AddIceCandidate() from executing. auto answer_observer = - rtc::make_ref_counted(); + make_ref_counted(); callee->pc()->CreateAnswer(answer_observer.get(), RTCOfferAnswerOptions()); auto jsep_candidate = @@ -837,7 +837,10 @@ TEST_P(PeerConnectionIceTest, // The operation will not be able to complete until we EXPECT_TRUE_WAIT() // allowing CreateAnswer() to complete. EXPECT_FALSE(operation_completed); - EXPECT_TRUE_WAIT(answer_observer->called(), kWaitTimeout); + EXPECT_THAT( + WaitUntil([&] { return answer_observer->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); // As soon as it does, AddIceCandidate() will execute without delay, so it // must also have completed. EXPECT_TRUE(operation_completed); @@ -849,7 +852,7 @@ TEST_P(PeerConnectionIceTest, auto caller = CreatePeerConnectionWithAudioVideo(); std::unique_ptr jsep_candidate = - CreateIceCandidate(cricket::CN_AUDIO, 0, candidate); + CreateIceCandidate(CN_AUDIO, 0, candidate); bool operation_completed = false; caller->pc()->AddIceCandidate( @@ -859,7 +862,10 @@ TEST_P(PeerConnectionIceTest, std::string("The remote description was null")); operation_completed = true; }); - EXPECT_TRUE_WAIT(operation_completed, kWaitTimeout); + EXPECT_THAT( + WaitUntil([&] { return operation_completed; }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); } TEST_P(PeerConnectionIceTest, @@ -873,7 +879,7 @@ TEST_P(PeerConnectionIceTest, // Chain an operation that will block AddIceCandidate() from executing. auto answer_observer = - rtc::make_ref_counted(); + make_ref_counted(); callee->pc()->CreateAnswer(answer_observer.get(), RTCOfferAnswerOptions()); auto jsep_candidate = @@ -893,7 +899,10 @@ TEST_P(PeerConnectionIceTest, EXPECT_FALSE(operation_completed); // This should delete the callee PC. callee = nullptr; - EXPECT_TRUE_WAIT(operation_completed, kWaitTimeout); + EXPECT_THAT( + WaitUntil([&] { return operation_completed; }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); } TEST_P(PeerConnectionIceTest, LocalDescriptionUpdatedWhenContinualGathering) { @@ -910,9 +919,14 @@ TEST_P(PeerConnectionIceTest, LocalDescriptionUpdatedWhenContinualGathering) { ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer())); // Since we're using continual gathering, we won't get "gathering done". - EXPECT_TRUE_WAIT( - caller->pc()->local_description()->candidates(0)->count() > 0, - kIceCandidatesTimeout); + EXPECT_THAT( + WaitUntil( + [&] { + return caller->pc()->local_description()->candidates(0)->count(); + }, + ::testing::Gt(0), + {.timeout = webrtc::TimeDelta::Millis(kIceCandidatesTimeout)}), + IsRtcOk()); } // Test that when continual gathering is enabled, and a network interface goes @@ -932,16 +946,27 @@ TEST_P(PeerConnectionIceTest, // Start ICE candidate gathering by setting the local offer. ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer())); - EXPECT_TRUE_WAIT( - caller->pc()->local_description()->candidates(0)->count() > 0, - kIceCandidatesTimeout); + EXPECT_THAT( + WaitUntil( + [&] { + return caller->pc()->local_description()->candidates(0)->count(); + }, + ::testing::Gt(0), + {.timeout = webrtc::TimeDelta::Millis(kIceCandidatesTimeout)}), + IsRtcOk()); // Remove the only network interface, causing the PeerConnection to signal // the removal of all candidates derived from this interface. caller->network()->RemoveInterface(kLocalAddress); - EXPECT_EQ_WAIT(0u, caller->pc()->local_description()->candidates(0)->count(), - kIceCandidatesTimeout); + EXPECT_THAT( + WaitUntil( + [&] { + return caller->pc()->local_description()->candidates(0)->count(); + }, + ::testing::Eq(0u), + {.timeout = webrtc::TimeDelta::Millis(kIceCandidatesTimeout)}), + IsRtcOk()); EXPECT_LT(0, caller->observer()->num_candidates_removed_); } @@ -958,12 +983,16 @@ TEST_P(PeerConnectionIceTest, // Start ICE candidate gathering by setting the local offer. ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer())); - EXPECT_TRUE_WAIT(caller->IsIceGatheringDone(), kIceCandidatesTimeout); + EXPECT_THAT( + WaitUntil([&] { return caller->IsIceGatheringDone(); }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kIceCandidatesTimeout)}), + IsRtcOk()); caller->network()->RemoveInterface(kLocalAddress); // Verify that the local candidates are not removed; - rtc::Thread::Current()->ProcessMessages(1000); + Thread::Current()->ProcessMessages(1000); EXPECT_EQ(0, caller->observer()->num_candidates_removed_); } @@ -978,7 +1007,7 @@ TEST_P(PeerConnectionIceTest, IceRestartOfferClearsExistingCandidate) { auto callee = CreatePeerConnectionWithAudioVideo(); auto offer = caller->CreateOfferAndSetAsLocal(); - cricket::Candidate candidate = CreateLocalUdpCandidate(kCallerAddress); + Candidate candidate = CreateLocalUdpCandidate(kCallerAddress); AddCandidateToFirstTransport(&candidate, offer.get()); ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); @@ -999,8 +1028,7 @@ TEST_P(PeerConnectionIceTest, auto callee = CreatePeerConnectionWithAudioVideo(); auto offer = caller->CreateOfferAndSetAsLocal(); - cricket::Candidate old_candidate = - CreateLocalUdpCandidate(kFirstCallerAddress); + Candidate old_candidate = CreateLocalUdpCandidate(kFirstCallerAddress); AddCandidateToFirstTransport(&old_candidate, offer.get()); ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); @@ -1008,8 +1036,7 @@ TEST_P(PeerConnectionIceTest, RTCOfferAnswerOptions options; options.ice_restart = true; auto restart_offer = caller->CreateOfferAndSetAsLocal(options); - cricket::Candidate new_candidate = - CreateLocalUdpCandidate(kRestartedCallerAddress); + Candidate new_candidate = CreateLocalUdpCandidate(kRestartedCallerAddress); AddCandidateToFirstTransport(&new_candidate, restart_offer.get()); ASSERT_TRUE(callee->SetRemoteDescription(std::move(restart_offer))); @@ -1376,19 +1403,19 @@ TEST_P(PeerConnectionIceTest, auto callee = CreatePeerConnectionWithAudioVideo(); auto offer = caller->CreateOffer(); - SetIceMode(offer.get(), cricket::IceMode::ICEMODE_LITE); + SetIceMode(offer.get(), IceMode::ICEMODE_LITE); ASSERT_TRUE( caller->SetLocalDescription(CloneSessionDescription(offer.get()))); ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); auto answer = callee->CreateAnswer(); - SetIceMode(answer.get(), cricket::IceMode::ICEMODE_FULL); + SetIceMode(answer.get(), IceMode::ICEMODE_FULL); ASSERT_TRUE( callee->SetLocalDescription(CloneSessionDescription(answer.get()))); ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer))); - EXPECT_EQ(cricket::ICEROLE_CONTROLLED, GetIceRole(caller)); - EXPECT_EQ(cricket::ICEROLE_CONTROLLING, GetIceRole(callee)); + EXPECT_EQ(ICEROLE_CONTROLLED, GetIceRole(caller)); + EXPECT_EQ(ICEROLE_CONTROLLING, GetIceRole(callee)); } // Test that when the caller and the callee both use the lite implementation of @@ -1400,19 +1427,19 @@ TEST_P(PeerConnectionIceTest, auto callee = CreatePeerConnectionWithAudioVideo(); auto offer = caller->CreateOffer(); - SetIceMode(offer.get(), cricket::IceMode::ICEMODE_LITE); + SetIceMode(offer.get(), IceMode::ICEMODE_LITE); ASSERT_TRUE( caller->SetLocalDescription(CloneSessionDescription(offer.get()))); ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); auto answer = callee->CreateAnswer(); - SetIceMode(answer.get(), cricket::IceMode::ICEMODE_LITE); + SetIceMode(answer.get(), IceMode::ICEMODE_LITE); ASSERT_TRUE( callee->SetLocalDescription(CloneSessionDescription(answer.get()))); ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer))); - EXPECT_EQ(cricket::ICEROLE_CONTROLLING, GetIceRole(caller)); - EXPECT_EQ(cricket::ICEROLE_CONTROLLED, GetIceRole(callee)); + EXPECT_EQ(ICEROLE_CONTROLLING, GetIceRole(caller)); + EXPECT_EQ(ICEROLE_CONTROLLED, GetIceRole(callee)); } INSTANTIATE_TEST_SUITE_P(PeerConnectionIceTest, @@ -1423,13 +1450,13 @@ INSTANTIATE_TEST_SUITE_P(PeerConnectionIceTest, class PeerConnectionIceConfigTest : public ::testing::Test { public: PeerConnectionIceConfigTest() - : socket_server_(rtc::CreateDefaultSocketServer()), + : socket_server_(CreateDefaultSocketServer()), main_thread_(socket_server_.get()) {} protected: void SetUp() override { pc_factory_ = CreatePeerConnectionFactory( - rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(), + Thread::Current(), Thread::Current(), Thread::Current(), FakeAudioCaptureModule::Create(), CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory(), std::make_unique port_allocator( - new cricket::FakePortAllocator(rtc::Thread::Current(), - packet_socket_factory_.get(), - &field_trials_)); + auto port_allocator = std::make_unique( + CreateEnvironment(), socket_server_.get()); port_allocator_ = port_allocator.get(); - port_allocator_->SetIceTiebreaker(kTiebreakerDefault); PeerConnectionDependencies pc_dependencies(&observer_); pc_dependencies.allocator = std::move(port_allocator); auto result = pc_factory_->CreatePeerConnectionOrError( @@ -1457,13 +1479,11 @@ class PeerConnectionIceConfigTest : public ::testing::Test { pc_ = result.MoveValue(); } - webrtc::test::ScopedKeyValueConfig field_trials_; - std::unique_ptr socket_server_; - rtc::AutoSocketServerThread main_thread_; - rtc::scoped_refptr pc_factory_ = nullptr; - rtc::scoped_refptr pc_ = nullptr; - std::unique_ptr packet_socket_factory_; - cricket::FakePortAllocator* port_allocator_ = nullptr; + std::unique_ptr socket_server_; + AutoSocketServerThread main_thread_; + scoped_refptr pc_factory_ = nullptr; + scoped_refptr pc_ = nullptr; + FakePortAllocator* port_allocator_ = nullptr; MockPeerConnectionObserver observer_; }; @@ -1475,7 +1495,7 @@ TEST_F(PeerConnectionIceConfigTest, SetStunCandidateKeepaliveInterval) { config.ice_candidate_pool_size = 1; CreatePeerConnection(config); ASSERT_NE(port_allocator_, nullptr); - absl::optional actual_stun_keepalive_interval = + std::optional actual_stun_keepalive_interval = port_allocator_->stun_candidate_keepalive_interval(); EXPECT_EQ(actual_stun_keepalive_interval.value_or(-1), 123); config.stun_candidate_keepalive_interval = 321; @@ -1523,14 +1543,16 @@ TEST_P(PeerConnectionIceTest, IceCredentialsCreateOffer) { config.sdp_semantics = SdpSemantics::kUnifiedPlan; config.ice_candidate_pool_size = 1; auto pc = CreatePeerConnectionWithAudioVideo(config); - ASSERT_NE(pc->port_allocator_, nullptr); + ASSERT_NE(pc->GetInternalPeerConnection()->port_allocator(), nullptr); auto offer = pc->CreateOffer(); - auto credentials = pc->port_allocator_->GetPooledIceCredentials(); + auto credentials = pc->GetInternalPeerConnection() + ->port_allocator() + ->GetPooledIceCredentials(); ASSERT_EQ(1u, credentials.size()); auto* desc = offer->description(); for (const auto& content : desc->contents()) { - auto* transport_info = desc->GetTransportInfoByName(content.name); + auto* transport_info = desc->GetTransportInfoByName(content.mid()); EXPECT_EQ(transport_info->description.ice_ufrag, credentials[0].ufrag); EXPECT_EQ(transport_info->description.ice_pwd, credentials[0].pwd); } @@ -1541,17 +1563,19 @@ TEST_P(PeerConnectionIceTest, IceCredentialsCreateAnswer) { config.sdp_semantics = SdpSemantics::kUnifiedPlan; config.ice_candidate_pool_size = 1; auto pc = CreatePeerConnectionWithAudioVideo(config); - ASSERT_NE(pc->port_allocator_, nullptr); + ASSERT_NE(pc->GetInternalPeerConnection()->port_allocator(), nullptr); auto offer = pc->CreateOffer(); ASSERT_TRUE(pc->SetRemoteDescription(std::move(offer))); auto answer = pc->CreateAnswer(); - auto credentials = pc->port_allocator_->GetPooledIceCredentials(); + auto credentials = pc->GetInternalPeerConnection() + ->port_allocator() + ->GetPooledIceCredentials(); ASSERT_EQ(1u, credentials.size()); auto* desc = answer->description(); for (const auto& content : desc->contents()) { - auto* transport_info = desc->GetTransportInfoByName(content.name); + auto* transport_info = desc->GetTransportInfoByName(content.mid()); EXPECT_EQ(transport_info->description.ice_ufrag, credentials[0].ufrag); EXPECT_EQ(transport_info->description.ice_pwd, credentials[0].pwd); } @@ -1577,11 +1601,11 @@ TEST_P(PeerConnectionIceTest, PrefersMidOverMLineIndex) { caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); // `candidate.transport_name()` is empty. - cricket::Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress); - auto* audio_content = cricket::GetFirstAudioContent( - caller->pc()->local_description()->description()); + Candidate candidate = CreateLocalUdpCandidate(kCalleeAddress); + auto* audio_content = + GetFirstAudioContent(caller->pc()->local_description()->description()); std::unique_ptr ice_candidate = - CreateIceCandidate(audio_content->name, 65535, candidate); + CreateIceCandidate(audio_content->mid(), 65535, candidate); EXPECT_TRUE(caller->pc()->AddIceCandidate(ice_candidate.get())); EXPECT_TRUE(caller->pc()->RemoveIceCandidates({candidate})); } diff --git a/pc/peer_connection_integrationtest.cc b/pc/peer_connection_integrationtest.cc index d76e5e27d5..95bd835301 100644 --- a/pc/peer_connection_integrationtest.cc +++ b/pc/peer_connection_integrationtest.cc @@ -19,91 +19,98 @@ #include #include +#include #include #include +#include #include #include #include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/async_resolver_factory.h" #include "api/candidate.h" #include "api/crypto/crypto_options.h" #include "api/dtmf_sender_interface.h" -#include "api/ice_transport_interface.h" #include "api/jsep.h" +#include "api/make_ref_counted.h" #include "api/media_stream_interface.h" #include "api/media_types.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" #include "api/rtc_event_log/rtc_event.h" #include "api/rtc_event_log/rtc_event_log.h" -#include "api/rtc_event_log_output.h" #include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" -#include "api/stats/rtc_stats.h" #include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" #include "api/test/mock_async_dns_resolver.h" #include "api/test/mock_encoder_selector.h" +#include "api/test/rtc_error_matchers.h" #include "api/transport/rtp/rtp_source.h" #include "api/uma_metrics.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/video/video_rotation.h" +#include "api/video_codecs/sdp_video_format.h" #include "logging/rtc_event_log/fake_rtc_event_log.h" #include "logging/rtc_event_log/fake_rtc_event_log_factory.h" #include "media/base/codec.h" #include "media/base/media_constants.h" #include "media/base/stream_params.h" -#include "p2p/base/mock_async_resolver.h" #include "p2p/base/port.h" #include "p2p/base/port_allocator.h" #include "p2p/base/port_interface.h" -#include "p2p/base/test_stun_server.h" -#include "p2p/base/test_turn_customizer.h" -#include "p2p/base/test_turn_server.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_info.h" -#include "pc/channel.h" +#include "p2p/test/test_stun_server.h" +#include "p2p/test/test_turn_server.h" #include "pc/media_session.h" #include "pc/peer_connection.h" #include "pc/peer_connection_factory.h" -#include "pc/rtp_transceiver.h" #include "pc/session_description.h" #include "pc/test/fake_periodic_video_source.h" #include "pc/test/integration_test_helpers.h" #include "pc/test/mock_peer_connection_observers.h" +#include "rtc_base/checks.h" #include "rtc_base/fake_clock.h" #include "rtc_base/fake_mdns_responder.h" #include "rtc_base/fake_network.h" #include "rtc_base/firewall_socket_server.h" #include "rtc_base/gunit.h" -#include "rtc_base/helpers.h" #include "rtc_base/logging.h" +#include "rtc_base/random.h" #include "rtc_base/socket_address.h" -#include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_fingerprint.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/string_encode.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/test_certificate_verifier.h" -#include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "rtc_base/virtual_socket_server.h" #include "system_wrappers/include/metrics.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" namespace webrtc { namespace { +using ::testing::AtLeast; +using ::testing::Eq; +using ::testing::Field; +using ::testing::InSequence; +using ::testing::MockFunction; +using ::testing::NiceMock; +using ::testing::NotNull; +using ::testing::Return; + class PeerConnectionIntegrationTest : public PeerConnectionIntegrationBaseTest, public ::testing::WithParamInterface { @@ -118,13 +125,13 @@ class PeerConnectionIntegrationTest // where order of construction is finely controlled. // This also ensures peerconnection is closed before switching back to non-fake // clock, avoiding other races and DCHECK failures such as in rtp_sender.cc. -class FakeClockForTest : public rtc::ScopedFakeClock { +class FakeClockForTest : public ScopedFakeClock { protected: FakeClockForTest() { // Some things use a time of "0" as a special value, so we need to start out // the fake clock at a nonzero time. // TODO(deadbeef): Fix this. - AdvanceTime(webrtc::TimeDelta::Seconds(1)); + AdvanceTime(TimeDelta::Seconds(1)); } // Explicit handle. @@ -161,23 +168,33 @@ TEST_P(PeerConnectionIntegrationTest, callee()->AddAudioVideoTracks(); // Start offer/answer exchange and wait for it to complete. caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Should be one receiver each for audio/video. EXPECT_EQ(2U, caller()->rtp_receiver_observers().size()); EXPECT_EQ(2U, callee()->rtp_receiver_observers().size()); // Wait for all "first packet received" callbacks to be fired. - EXPECT_TRUE_WAIT( - absl::c_all_of(caller()->rtp_receiver_observers(), - [](const std::unique_ptr& o) { - return o->first_packet_received(); - }), - kMaxWaitForFramesMs); - EXPECT_TRUE_WAIT( - absl::c_all_of(callee()->rtp_receiver_observers(), - [](const std::unique_ptr& o) { - return o->first_packet_received(); - }), - kMaxWaitForFramesMs); + EXPECT_THAT(WaitUntil( + [&] { + return absl::c_all_of( + caller()->rtp_receiver_observers(), + [](const std::unique_ptr& o) { + return o->first_packet_received(); + }); + }, + ::testing::IsTrue(), {.timeout = kMaxWaitForFrames}), + IsRtcOk()); + EXPECT_THAT(WaitUntil( + [&] { + return absl::c_all_of( + callee()->rtp_receiver_observers(), + [](const std::unique_ptr& o) { + return o->first_packet_received(); + }); + }, + ::testing::IsTrue(), {.timeout = kMaxWaitForFrames}), + IsRtcOk()); // If new observers are set after the first packet was already received, the // callback should still be invoked. caller()->ResetRtpReceiverObservers(); @@ -196,6 +213,58 @@ TEST_P(PeerConnectionIntegrationTest, })); } +TEST_P(PeerConnectionIntegrationTest, RtpSenderObserverOnFirstPacketSent) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + caller()->AddAudioVideoTracks(); + callee()->AddAudioVideoTracks(); + // Start offer/answer exchange and wait for it to complete. + caller()->CreateAndSetAndSignalOffer(); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + // Should be one sender each for audio/video. + EXPECT_EQ(2U, caller()->rtp_sender_observers().size()); + EXPECT_EQ(2U, callee()->rtp_sender_observers().size()); + // Wait for all "first packet sent" callbacks to be fired. + EXPECT_THAT(WaitUntil( + [&] { + return absl::c_all_of( + caller()->rtp_sender_observers(), + [](const std::unique_ptr& o) { + return o->first_packet_sent(); + }); + }, + ::testing::IsTrue(), {.timeout = kMaxWaitForFrames}), + IsRtcOk()); + EXPECT_THAT(WaitUntil( + [&] { + return absl::c_all_of( + callee()->rtp_sender_observers(), + [](const std::unique_ptr& o) { + return o->first_packet_sent(); + }); + }, + ::testing::IsTrue(), {.timeout = kMaxWaitForFrames}), + IsRtcOk()); + // If new observers are set after the first packet was already sent, the + // callback should still be invoked. + caller()->ResetRtpSenderObservers(); + callee()->ResetRtpSenderObservers(); + EXPECT_EQ(2U, caller()->rtp_sender_observers().size()); + EXPECT_EQ(2U, callee()->rtp_sender_observers().size()); + EXPECT_TRUE( + absl::c_all_of(caller()->rtp_sender_observers(), + [](const std::unique_ptr& o) { + return o->first_packet_sent(); + })); + EXPECT_TRUE( + absl::c_all_of(callee()->rtp_sender_observers(), + [](const std::unique_ptr& o) { + return o->first_packet_sent(); + })); +} + class DummyDtmfObserver : public DtmfSenderObserverInterface { public: DummyDtmfObserver() : completed_(false) {} @@ -221,7 +290,7 @@ class DummyDtmfObserver : public DtmfSenderObserverInterface { void TestDtmfFromSenderToReceiver(PeerConnectionIntegrationWrapper* sender, PeerConnectionIntegrationWrapper* receiver) { // We should be able to get a DTMF sender from the local sender. - rtc::scoped_refptr dtmf_sender = + scoped_refptr dtmf_sender = sender->pc()->GetSenders().at(0)->GetDtmfSender(); ASSERT_TRUE(dtmf_sender); DummyDtmfObserver observer; @@ -231,7 +300,9 @@ void TestDtmfFromSenderToReceiver(PeerConnectionIntegrationWrapper* sender, EXPECT_TRUE(dtmf_sender->CanInsertDtmf()); EXPECT_TRUE(dtmf_sender->InsertDtmf("1a", 100, 50)); - EXPECT_TRUE_WAIT(observer.completed(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return observer.completed(); }, ::testing::IsTrue()), + IsRtcOk()); std::vector tones = {"1", "a", ""}; EXPECT_EQ(tones, observer.tones()); dtmf_sender->UnregisterObserver(); @@ -247,9 +318,12 @@ TEST_P(PeerConnectionIntegrationTest, DtmfSenderObserver) { caller()->AddAudioTrack(); callee()->AddAudioTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // DTLS must finish before the DTMF sender can be used reliably. - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return DtlsConnected(); }, ::testing::IsTrue()), + IsRtcOk()); TestDtmfFromSenderToReceiver(caller(), callee()); TestDtmfFromSenderToReceiver(callee(), caller()); } @@ -265,32 +339,14 @@ TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithDtls) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); ASSERT_TRUE(ExpectNewFrames(media_expectations)); } -#if defined(WEBRTC_FUCHSIA) -// Uses SDES instead of DTLS for key agreement. -TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithSdes) { - PeerConnectionInterface::RTCConfiguration sdes_config; - sdes_config.enable_dtls_srtp.emplace(false); - ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(sdes_config, sdes_config)); - ConnectFakeSignaling(); - - // Do normal offer/answer and wait for some frames to be received in each - // direction. - caller()->AddAudioVideoTracks(); - callee()->AddAudioVideoTracks(); - caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - MediaExpectations media_expectations; - media_expectations.ExpectBidirectionalAudioAndVideo(); - ASSERT_TRUE(ExpectNewFrames(media_expectations)); -} -#endif - // Basic end-to-end test specifying the `enable_encrypted_rtp_header_extensions` // option to offer encrypted versions of all header extensions alongside the // unencrypted versions. @@ -310,7 +366,9 @@ TEST_P(PeerConnectionIntegrationTest, caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); ASSERT_TRUE(ExpectNewFrames(media_expectations)); @@ -324,19 +382,24 @@ TEST_P(PeerConnectionIntegrationTest, ConnectFakeSignaling(); // Add video tracks with 16:9 aspect ratio, size 1280 x 720. - webrtc::FakePeriodicVideoSource::Config config; + FakePeriodicVideoSource::Config config; config.width = 1280; config.height = 720; - config.timestamp_offset_ms = rtc::TimeMillis(); + config.timestamp_offset_ms = TimeMillis(); caller()->AddTrack(caller()->CreateLocalVideoTrackWithConfig(config)); callee()->AddTrack(callee()->CreateLocalVideoTrackWithConfig(config)); // Do normal offer/answer and wait for at least one frame to be received in // each direction. caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(caller()->min_video_frames_received_per_track() > 0 && - callee()->min_video_frames_received_per_track() > 0, - kMaxWaitForFramesMs); + ASSERT_THAT(WaitUntil( + [&] { + return caller()->min_video_frames_received_per_track() > + 0 && + callee()->min_video_frames_received_per_track() > 0; + }, + ::testing::IsTrue(), {.timeout = kMaxWaitForFrames}), + IsRtcOk()); // Check rendered aspect ratio. EXPECT_EQ(16.0 / 9, caller()->local_rendered_aspect_ratio()); @@ -366,14 +429,16 @@ TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithSendOnlyVideo) { CreateOneDirectionalPeerConnectionWrappers(/*caller_to_callee=*/true)); ConnectFakeSignaling(); // Add one-directional video, from caller to callee. - rtc::scoped_refptr caller_track = + scoped_refptr caller_track = caller()->CreateLocalVideoTrack(); caller()->AddTrack(caller_track); PeerConnectionInterface::RTCOfferAnswerOptions options; options.offer_to_receive_video = 0; caller()->SetOfferAnswerOptions(options); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u); // Expect video to be received in one direction. @@ -391,14 +456,16 @@ TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithReceiveOnlyVideo) { CreateOneDirectionalPeerConnectionWrappers(/*caller_to_callee=*/false)); ConnectFakeSignaling(); // Add one-directional video, from callee to caller. - rtc::scoped_refptr callee_track = + scoped_refptr callee_track = callee()->CreateLocalVideoTrack(); callee()->AddTrack(callee_track); PeerConnectionInterface::RTCOfferAnswerOptions options; options.offer_to_receive_video = 1; caller()->SetOfferAnswerOptions(options); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_EQ(caller()->pc()->GetReceivers().size(), 1u); // Expect video to be received in one direction. @@ -414,18 +481,22 @@ TEST_P(PeerConnectionIntegrationTest, ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); // Add one-directional video, from caller to callee. - rtc::scoped_refptr caller_track = + scoped_refptr caller_track = caller()->CreateLocalVideoTrack(); caller()->AddTrack(caller_track); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Add receive video. - rtc::scoped_refptr callee_track = + scoped_refptr callee_track = callee()->CreateLocalVideoTrack(); callee()->AddTrack(callee_track); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Ensure that video frames are received end-to-end. MediaExpectations media_expectations; @@ -438,18 +509,22 @@ TEST_P(PeerConnectionIntegrationTest, ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); // Add one-directional video, from callee to caller. - rtc::scoped_refptr callee_track = + scoped_refptr callee_track = callee()->CreateLocalVideoTrack(); callee()->AddTrack(callee_track); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Add send video. - rtc::scoped_refptr caller_track = + scoped_refptr caller_track = caller()->CreateLocalVideoTrack(); caller()->AddTrack(caller_track); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Expect video to be received in one direction. MediaExpectations media_expectations; @@ -462,24 +537,28 @@ TEST_P(PeerConnectionIntegrationTest, ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); // Add send video, from caller to callee. - rtc::scoped_refptr caller_track = + scoped_refptr caller_track = caller()->CreateLocalVideoTrack(); - rtc::scoped_refptr caller_sender = + scoped_refptr caller_sender = caller()->AddTrack(caller_track); // Add receive video, from callee to caller. - rtc::scoped_refptr callee_track = + scoped_refptr callee_track = callee()->CreateLocalVideoTrack(); - rtc::scoped_refptr callee_sender = + scoped_refptr callee_sender = callee()->AddTrack(callee_track); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Remove receive video (i.e., callee sender track). callee()->pc()->RemoveTrackOrError(callee_sender); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Expect one-directional video. MediaExpectations media_expectations; @@ -494,24 +573,28 @@ TEST_P(PeerConnectionIntegrationTest, ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); // Add send video, from caller to callee. - rtc::scoped_refptr caller_track = + scoped_refptr caller_track = caller()->CreateLocalVideoTrack(); - rtc::scoped_refptr caller_sender = + scoped_refptr caller_sender = caller()->AddTrack(caller_track); // Add receive video, from callee to caller. - rtc::scoped_refptr callee_track = + scoped_refptr callee_track = callee()->CreateLocalVideoTrack(); - rtc::scoped_refptr callee_sender = + scoped_refptr callee_sender = callee()->AddTrack(callee_track); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Remove send video (i.e., caller sender track). caller()->pc()->RemoveTrackOrError(caller_sender); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Expect one-directional video. MediaExpectations media_expectations; @@ -538,13 +621,15 @@ TEST_P(PeerConnectionIntegrationTest, AudioToVideoUpgrade) { } else { callee()->SetRemoteOfferHandler([this] { callee() - ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO) + ->GetFirstTransceiverOfType(webrtc::MediaType::VIDEO) ->StopInternal(); }); } // Do offer/answer and make sure audio is still received end-to-end. caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); { MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudio(); @@ -572,7 +657,7 @@ TEST_P(PeerConnectionIntegrationTest, AudioToVideoUpgrade) { // the offer, but by default it is send only. auto transceivers = caller()->pc()->GetTransceivers(); ASSERT_EQ(2U, transceivers.size()); - ASSERT_EQ(cricket::MEDIA_TYPE_VIDEO, + ASSERT_EQ(webrtc::MediaType::VIDEO, transceivers[1]->receiver()->media_type()); transceivers[1]->sender()->SetTrack( caller()->CreateLocalVideoTrack().get()); @@ -581,7 +666,9 @@ TEST_P(PeerConnectionIntegrationTest, AudioToVideoUpgrade) { }); } callee()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); { // Expect additional audio frames to be received after the upgrade. MediaExpectations media_expectations; @@ -599,12 +686,16 @@ TEST_P(PeerConnectionIntegrationTest, AddAudioToVideoOnlyCall) { caller()->AddVideoTrack(); callee()->AddVideoTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Now add an audio track and do another offer/answer. caller()->AddAudioTrack(); callee()->AddAudioTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Ensure both audio and video frames are received end-to-end. MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); @@ -621,11 +712,14 @@ TEST_P(PeerConnectionIntegrationTest, BundlingEnabledWhileIceRestartOccurs) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); // Remove the bundle group from the SDP received by the callee. - callee()->SetReceivedSdpMunger([](cricket::SessionDescription* desc) { - desc->RemoveGroupByName("BUNDLE"); - }); + callee()->SetReceivedSdpMunger( + [](std::unique_ptr& sdp) { + sdp->description()->RemoveGroupByName("BUNDLE"); + }); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); { MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); @@ -635,7 +729,9 @@ TEST_P(PeerConnectionIntegrationTest, BundlingEnabledWhileIceRestartOccurs) { callee()->SetReceivedSdpMunger(nullptr); caller()->SetOfferAnswerOptions(IceRestartOfferAnswerOptions()); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Expect additional frames to be received after the ICE restart. { @@ -654,16 +750,23 @@ TEST_P(PeerConnectionIntegrationTest, RotatedVideoWithCVOExtension) { ConnectFakeSignaling(); // Add rotated video tracks. caller()->AddTrack( - caller()->CreateLocalVideoTrackWithRotation(webrtc::kVideoRotation_90)); + caller()->CreateLocalVideoTrackWithRotation(kVideoRotation_90)); callee()->AddTrack( - callee()->CreateLocalVideoTrackWithRotation(webrtc::kVideoRotation_270)); + callee()->CreateLocalVideoTrackWithRotation(kVideoRotation_270)); // Wait for video frames to be received by both sides. caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->min_video_frames_received_per_track() > 0 && - callee()->min_video_frames_received_per_track() > 0, - kMaxWaitForFramesMs); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil( + [&] { + return caller()->min_video_frames_received_per_track() > + 0 && + callee()->min_video_frames_received_per_track() > 0; + }, + ::testing::IsTrue(), {.timeout = kMaxWaitForFrames}), + IsRtcOk()); // Ensure that the aspect ratio is unmodified. // TODO(deadbeef): Where does 4:3 come from? Should be explicit in the test, @@ -673,8 +776,8 @@ TEST_P(PeerConnectionIntegrationTest, RotatedVideoWithCVOExtension) { EXPECT_EQ(4.0 / 3, callee()->local_rendered_aspect_ratio()); EXPECT_EQ(4.0 / 3, callee()->rendered_aspect_ratio()); // Ensure that the CVO bits were surfaced to the renderer. - EXPECT_EQ(webrtc::kVideoRotation_270, caller()->rendered_rotation()); - EXPECT_EQ(webrtc::kVideoRotation_90, callee()->rendered_rotation()); + EXPECT_EQ(kVideoRotation_270, caller()->rendered_rotation()); + EXPECT_EQ(kVideoRotation_90, callee()->rendered_rotation()); } // Test that when the CVO extension isn't supported, video is rotated the @@ -684,22 +787,30 @@ TEST_P(PeerConnectionIntegrationTest, RotatedVideoWithoutCVOExtension) { ConnectFakeSignaling(); // Add rotated video tracks. caller()->AddTrack( - caller()->CreateLocalVideoTrackWithRotation(webrtc::kVideoRotation_90)); + caller()->CreateLocalVideoTrackWithRotation(kVideoRotation_90)); callee()->AddTrack( - callee()->CreateLocalVideoTrackWithRotation(webrtc::kVideoRotation_270)); + callee()->CreateLocalVideoTrackWithRotation(kVideoRotation_270)); // Remove the CVO extension from the offered SDP. - callee()->SetReceivedSdpMunger([](cricket::SessionDescription* desc) { - cricket::VideoContentDescription* video = - GetFirstVideoContentDescription(desc); - video->ClearRtpHeaderExtensions(); - }); + callee()->SetReceivedSdpMunger( + [](std::unique_ptr& sdp) { + VideoContentDescription* video = + GetFirstVideoContentDescription(sdp->description()); + video->ClearRtpHeaderExtensions(); + }); // Wait for video frames to be received by both sides. caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(caller()->min_video_frames_received_per_track() > 0 && - callee()->min_video_frames_received_per_track() > 0, - kMaxWaitForFramesMs); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil( + [&] { + return caller()->min_video_frames_received_per_track() > + 0 && + callee()->min_video_frames_received_per_track() > 0; + }, + ::testing::IsTrue(), {.timeout = kMaxWaitForFrames}), + IsRtcOk()); // Expect that the aspect ratio is inversed to account for the 90/270 degree // rotation. @@ -710,8 +821,8 @@ TEST_P(PeerConnectionIntegrationTest, RotatedVideoWithoutCVOExtension) { EXPECT_EQ(3.0 / 4, callee()->local_rendered_aspect_ratio()); EXPECT_EQ(3.0 / 4, callee()->rendered_aspect_ratio()); // Expect that each endpoint is unaware of the rotation of the other endpoint. - EXPECT_EQ(webrtc::kVideoRotation_0, caller()->rendered_rotation()); - EXPECT_EQ(webrtc::kVideoRotation_0, callee()->rendered_rotation()); + EXPECT_EQ(kVideoRotation_0, caller()->rendered_rotation()); + EXPECT_EQ(kVideoRotation_0, callee()->rendered_rotation()); } // Test that if the answerer rejects the audio m= section, no audio is sent or @@ -731,14 +842,16 @@ TEST_P(PeerConnectionIntegrationTest, AnswererRejectsAudioSection) { // rejected in the answer. callee()->SetRemoteOfferHandler([this] { callee() - ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_AUDIO) + ->GetFirstTransceiverOfType(webrtc::MediaType::AUDIO) ->StopInternal(); }); } callee()->AddTrack(callee()->CreateLocalVideoTrack()); // Do offer/answer and wait for successful end-to-end video frames. caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalVideo(); media_expectations.ExpectNoAudio(); @@ -754,7 +867,7 @@ TEST_P(PeerConnectionIntegrationTest, AnswererRejectsAudioSection) { // The caller's transceiver should have stopped after receiving the answer, // and thus no longer listed in transceivers. EXPECT_EQ(nullptr, - caller()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_AUDIO)); + caller()->GetFirstTransceiverOfType(webrtc::MediaType::AUDIO)); } } @@ -775,14 +888,16 @@ TEST_P(PeerConnectionIntegrationTest, AnswererRejectsVideoSection) { // rejected in the answer. callee()->SetRemoteOfferHandler([this] { callee() - ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO) + ->GetFirstTransceiverOfType(webrtc::MediaType::VIDEO) ->StopInternal(); }); } callee()->AddTrack(callee()->CreateLocalAudioTrack()); // Do offer/answer and wait for successful end-to-end audio frames. caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudio(); media_expectations.ExpectNoVideo(); @@ -798,7 +913,7 @@ TEST_P(PeerConnectionIntegrationTest, AnswererRejectsVideoSection) { // The caller's transceiver should have stopped after receiving the answer, // and thus is no longer present. EXPECT_EQ(nullptr, - caller()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)); + caller()->GetFirstTransceiverOfType(webrtc::MediaType::VIDEO)); } } @@ -828,7 +943,9 @@ TEST_P(PeerConnectionIntegrationTest, AnswererRejectsAudioAndVideoSections) { } // Do offer/answer and wait for stable signaling state. caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Sanity check that the callee's description has rejected m= sections. ASSERT_NE(nullptr, callee()->pc()->local_description()); @@ -852,7 +969,9 @@ TEST_P(PeerConnectionIntegrationTest, VideoRejectedInSubsequentOffer) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); { MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); @@ -861,20 +980,23 @@ TEST_P(PeerConnectionIntegrationTest, VideoRejectedInSubsequentOffer) { // Renegotiate, rejecting the video m= section. if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) { caller()->SetGeneratedSdpMunger( - [](cricket::SessionDescription* description) { - for (cricket::ContentInfo& content : description->contents()) { - if (cricket::IsVideoContent(&content)) { + [](std::unique_ptr& sdp) { + for (ContentInfo& content : sdp->description()->contents()) { + if (IsVideoContent(&content)) { content.rejected = true; } } }); } else { caller() - ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO) + ->GetFirstTransceiverOfType(webrtc::MediaType::VIDEO) ->StopInternal(); } caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue(), + {.timeout = kMaxWaitForActivation}), + IsRtcOk()); // Sanity check that the caller's description has a rejected video section. ASSERT_NE(nullptr, caller()->pc()->local_description()); @@ -899,12 +1021,13 @@ TEST_F(PeerConnectionIntegrationTestPlanB, EnableAudioAfterRejecting) { ConnectFakeSignaling(); // Add audio track, do normal offer/answer. - rtc::scoped_refptr track = - caller()->CreateLocalAudioTrack(); - rtc::scoped_refptr sender = + scoped_refptr track = caller()->CreateLocalAudioTrack(); + scoped_refptr sender = caller()->pc()->AddTrack(track, {"stream"}).MoveValue(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Remove audio track, and set offer_to_receive_audio to false to cause the // m= section to be completely disabled, not just "recvonly". @@ -913,7 +1036,9 @@ TEST_F(PeerConnectionIntegrationTestPlanB, EnableAudioAfterRejecting) { options.offer_to_receive_audio = 0; caller()->SetOfferAnswerOptions(options); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Add the audio track again, expecting negotiation to succeed and frames to // flow. @@ -921,7 +1046,9 @@ TEST_F(PeerConnectionIntegrationTestPlanB, EnableAudioAfterRejecting) { options.offer_to_receive_audio = 1; caller()->SetOfferAnswerOptions(options); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.CalleeExpectsSomeAudio(); @@ -942,7 +1069,9 @@ TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithoutSsrcOrMsidSignaling) { // Remove SSRCs and MSIDs from the received offer SDP. callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); ASSERT_TRUE(ExpectNewFrames(media_expectations)); @@ -963,7 +1092,9 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, // Remove SSRCs from the received offer SDP. callee()->SetReceivedSdpMunger(RemoveSsrcsAndKeepMsids); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudio(); ASSERT_TRUE(ExpectNewFrames(media_expectations)); @@ -974,8 +1105,7 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); // Add one-directional video, from caller to callee. - rtc::scoped_refptr track = - caller()->CreateLocalVideoTrack(); + scoped_refptr track = caller()->CreateLocalVideoTrack(); RtpTransceiverInit video_transceiver_init; video_transceiver_init.stream_ids = {"video1"}; @@ -983,17 +1113,21 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, auto video_sender = caller()->pc()->AddTransceiver(track, video_transceiver_init).MoveValue(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Add receive direction. video_sender->SetDirectionWithError(RtpTransceiverDirection::kSendRecv); - rtc::scoped_refptr callee_track = + scoped_refptr callee_track = callee()->CreateLocalVideoTrack(); callee()->AddTrack(callee_track); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Ensure that video frames are received end-to-end. MediaExpectations media_expectations; media_expectations.ExpectBidirectionalVideo(); @@ -1015,7 +1149,9 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, caller()->SetReceivedSdpMunger(&RemoveSsrcsAndKeepMsids); callee()->SetReceivedSdpMunger(&RemoveSsrcsAndKeepMsids); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_EQ(2u, caller()->pc()->GetReceivers().size()); ASSERT_EQ(2u, callee()->pc()->GetReceivers().size()); @@ -1026,12 +1162,13 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, } // Used for the test below. -void RemoveBundleGroupSsrcsAndMidExtension(cricket::SessionDescription* desc) { - RemoveSsrcsAndKeepMsids(desc); - desc->RemoveGroupByName("BUNDLE"); - for (ContentInfo& content : desc->contents()) { - cricket::MediaContentDescription* media = content.media_description(); - cricket::RtpHeaderExtensions extensions = media->rtp_header_extensions(); +void RemoveBundleGroupSsrcsAndMidExtension( + std::unique_ptr& sdp) { + RemoveSsrcsAndKeepMsids(sdp); + sdp->description()->RemoveGroupByName("BUNDLE"); + for (ContentInfo& content : sdp->description()->contents()) { + MediaContentDescription* media = content.media_description(); + RtpHeaderExtensions extensions = media->rtp_header_extensions(); extensions.erase(std::remove_if(extensions.begin(), extensions.end(), [](const RtpExtension& extension) { return extension.uri == @@ -1059,7 +1196,9 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, caller()->SetReceivedSdpMunger(&RemoveBundleGroupSsrcsAndMidExtension); callee()->SetReceivedSdpMunger(&RemoveBundleGroupSsrcsAndMidExtension); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_EQ(2u, caller()->pc()->GetReceivers().size()); ASSERT_EQ(2u, callee()->pc()->GetReceivers().size()); // Make sure we are not bundled. @@ -1074,11 +1213,11 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, // Used for the test below. void ModifyPayloadTypesAndRemoveMidExtension( - cricket::SessionDescription* desc) { + std::unique_ptr& sdp) { int pt = 96; - for (ContentInfo& content : desc->contents()) { - cricket::MediaContentDescription* media = content.media_description(); - cricket::RtpHeaderExtensions extensions = media->rtp_header_extensions(); + for (ContentInfo& content : sdp->description()->contents()) { + MediaContentDescription* media = content.media_description(); + RtpHeaderExtensions extensions = media->rtp_header_extensions(); extensions.erase(std::remove_if(extensions.begin(), extensions.end(), [](const RtpExtension& extension) { return extension.uri == @@ -1086,11 +1225,7 @@ void ModifyPayloadTypesAndRemoveMidExtension( }), extensions.end()); media->set_rtp_header_extensions(extensions); - cricket::VideoContentDescription* video = media->as_video(); - ASSERT_TRUE(video != nullptr); - std::vector codecs = { - cricket::CreateVideoCodec(pt++, "VP8")}; - video->set_codecs(codecs); + media->set_codecs({CreateVideoCodec(pt++, "VP8")}); } } @@ -1113,7 +1248,9 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, caller()->SetReceivedSdpMunger(&RemoveSsrcsAndKeepMsids); callee()->SetReceivedSdpMunger(&RemoveSsrcsAndKeepMsids); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_EQ(2u, caller()->pc()->GetReceivers().size()); ASSERT_EQ(2u, callee()->pc()->GetReceivers().size()); // Make sure we are bundled. @@ -1132,7 +1269,9 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, NoStreamsMsidLinePresent) { caller()->AddAudioTrack(); caller()->AddVideoTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); auto callee_receivers = callee()->pc()->GetReceivers(); ASSERT_EQ(2u, callee_receivers.size()); EXPECT_TRUE(callee_receivers[0]->stream_ids().empty()); @@ -1146,7 +1285,9 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, NoStreamsMsidLineMissing) { caller()->AddVideoTrack(); callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); auto callee_receivers = callee()->pc()->GetReceivers(); ASSERT_EQ(2u, callee_receivers.size()); ASSERT_EQ(1u, callee_receivers[0]->stream_ids().size()); @@ -1166,7 +1307,9 @@ TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithTwoVideoTracks) { caller()->AddAudioVideoTracks(); caller()->AddVideoTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_EQ(3u, callee()->pc()->GetReceivers().size()); MediaExpectations media_expectations; @@ -1174,9 +1317,10 @@ TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithTwoVideoTracks) { ASSERT_TRUE(ExpectNewFrames(media_expectations)); } -static void MakeSpecCompliantMaxBundleOffer(cricket::SessionDescription* desc) { +static void MakeSpecCompliantMaxBundleOffer( + std::unique_ptr& sdp) { bool first = true; - for (cricket::ContentInfo& content : desc->contents()) { + for (ContentInfo& content : sdp->description()->contents()) { if (first) { first = false; continue; @@ -1184,14 +1328,14 @@ static void MakeSpecCompliantMaxBundleOffer(cricket::SessionDescription* desc) { content.bundle_only = true; } first = true; - for (cricket::TransportInfo& transport : desc->transport_infos()) { + for (TransportInfo& transport : sdp->description()->transport_infos()) { if (first) { first = false; continue; } transport.description.ice_ufrag.clear(); transport.description.ice_pwd.clear(); - transport.description.connection_role = cricket::CONNECTIONROLE_NONE; + transport.description.connection_role = CONNECTIONROLE_NONE; transport.description.identity_fingerprint.reset(nullptr); } } @@ -1214,7 +1358,9 @@ TEST_P(PeerConnectionIntegrationTest, // but the first m= section. callee()->SetReceivedSdpMunger(MakeSpecCompliantMaxBundleOffer); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); ASSERT_TRUE(ExpectNewFrames(media_expectations)); @@ -1229,12 +1375,16 @@ TEST_P(PeerConnectionIntegrationTest, GetAudioOutputLevelStatsWithOldStatsApi) { // Just add an audio track. caller()->AddAudioTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Get the audio output level stats. Note that the level is not available // until an RTCP packet has been received. - EXPECT_TRUE_WAIT(callee()->OldGetStats()->AudioOutputLevel() > 0, - kMaxWaitForFramesMs); + EXPECT_THAT( + WaitUntil([&] { return callee()->OldGetStats()->AudioOutputLevel(); }, + ::testing::Gt(0), {.timeout = kMaxWaitForFrames}), + IsRtcOk()); } // Test that an audio input level is reported. @@ -1246,12 +1396,16 @@ TEST_P(PeerConnectionIntegrationTest, GetAudioInputLevelStatsWithOldStatsApi) { // Just add an audio track. caller()->AddAudioTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Get the audio input level stats. The level should be available very // soon after the test starts. - EXPECT_TRUE_WAIT(caller()->OldGetStats()->AudioInputLevel() > 0, - kMaxWaitForStatsMs); + EXPECT_THAT( + WaitUntil([&] { return caller()->OldGetStats()->AudioInputLevel(); }, + ::testing::Gt(0), {.timeout = kMaxWaitForStats}), + IsRtcOk()); } // Test that we can get incoming byte counts from both audio and video tracks. @@ -1261,7 +1415,9 @@ TEST_P(PeerConnectionIntegrationTest, GetBytesReceivedStatsWithOldStatsApi) { caller()->AddAudioVideoTracks(); // Do offer/answer, wait for the callee to receive some frames. caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.CalleeExpectsSomeAudioAndVideo(); @@ -1288,7 +1444,9 @@ TEST_P(PeerConnectionIntegrationTest, GetBytesSentStatsWithOldStatsApi) { caller()->AddTrack(video_track); // Do offer/answer, wait for the callee to receive some frames. caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.CalleeExpectsSomeAudioAndVideo(); ASSERT_TRUE(ExpectNewFrames(media_expectations)); @@ -1311,11 +1469,15 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, auto audio_sender_2 = caller()->AddAudioTrack(); auto video_sender_2 = caller()->AddVideoTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.CalleeExpectsSomeAudioAndVideo(); - ASSERT_TRUE_WAIT(ExpectNewFrames(media_expectations), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return ExpectNewFrames(media_expectations); }, + ::testing::IsTrue()), + IsRtcOk()); std::vector track_ids = { audio_sender_1->track()->id(), video_sender_1->track()->id(), @@ -1338,33 +1500,36 @@ TEST_P(PeerConnectionIntegrationTest, NewGetStatsManyAudioAndManyVideoStreams) { auto audio_sender_2 = caller()->AddAudioTrack(); auto video_sender_2 = caller()->AddVideoTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.CalleeExpectsSomeAudioAndVideo(); - ASSERT_TRUE_WAIT(ExpectNewFrames(media_expectations), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return ExpectNewFrames(media_expectations); }, + ::testing::IsTrue()), + IsRtcOk()); std::vector track_ids = { audio_sender_1->track()->id(), video_sender_1->track()->id(), audio_sender_2->track()->id(), video_sender_2->track()->id()}; - rtc::scoped_refptr caller_report = - caller()->NewGetStats(); + scoped_refptr caller_report = caller()->NewGetStats(); ASSERT_TRUE(caller_report); auto outbound_stream_stats = - caller_report->GetStatsOfType(); + caller_report->GetStatsOfType(); ASSERT_EQ(outbound_stream_stats.size(), 4u); std::vector outbound_track_ids; for (const auto& stat : outbound_stream_stats) { - ASSERT_TRUE(stat->bytes_sent.is_defined()); + ASSERT_TRUE(stat->bytes_sent.has_value()); EXPECT_LT(0u, *stat->bytes_sent); if (*stat->kind == "video") { - ASSERT_TRUE(stat->key_frames_encoded.is_defined()); + ASSERT_TRUE(stat->key_frames_encoded.has_value()); EXPECT_GT(*stat->key_frames_encoded, 0u); - ASSERT_TRUE(stat->frames_encoded.is_defined()); + ASSERT_TRUE(stat->frames_encoded.has_value()); EXPECT_GE(*stat->frames_encoded, *stat->key_frames_encoded); } - ASSERT_TRUE(stat->media_source_id.is_defined()); + ASSERT_TRUE(stat->media_source_id.has_value()); const RTCMediaSourceStats* media_source = static_cast( caller_report->Get(*stat->media_source_id)); @@ -1373,20 +1538,19 @@ TEST_P(PeerConnectionIntegrationTest, NewGetStatsManyAudioAndManyVideoStreams) { } EXPECT_THAT(outbound_track_ids, UnorderedElementsAreArray(track_ids)); - rtc::scoped_refptr callee_report = - callee()->NewGetStats(); + scoped_refptr callee_report = callee()->NewGetStats(); ASSERT_TRUE(callee_report); auto inbound_stream_stats = - callee_report->GetStatsOfType(); + callee_report->GetStatsOfType(); ASSERT_EQ(4u, inbound_stream_stats.size()); std::vector inbound_track_ids; for (const auto& stat : inbound_stream_stats) { - ASSERT_TRUE(stat->bytes_received.is_defined()); + ASSERT_TRUE(stat->bytes_received.has_value()); EXPECT_LT(0u, *stat->bytes_received); if (*stat->kind == "video") { - ASSERT_TRUE(stat->key_frames_decoded.is_defined()); + ASSERT_TRUE(stat->key_frames_decoded.has_value()); EXPECT_GT(*stat->key_frames_decoded, 0u); - ASSERT_TRUE(stat->frames_decoded.is_defined()); + ASSERT_TRUE(stat->frames_decoded.has_value()); EXPECT_GE(*stat->frames_decoded, *stat->key_frames_decoded); } inbound_track_ids.push_back(*stat->track_identifier); @@ -1405,20 +1569,21 @@ TEST_P(PeerConnectionIntegrationTest, // Remove SSRCs and MSIDs from the received offer SDP. callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.CalleeExpectsSomeAudio(1); ASSERT_TRUE(ExpectNewFrames(media_expectations)); // We received a frame, so we should have nonzero "bytes received" stats for // the unsignaled stream, if stats are working for it. - rtc::scoped_refptr report = - callee()->NewGetStats(); + scoped_refptr report = callee()->NewGetStats(); ASSERT_NE(nullptr, report); auto inbound_stream_stats = - report->GetStatsOfType(); + report->GetStatsOfType(); ASSERT_EQ(1U, inbound_stream_stats.size()); - ASSERT_TRUE(inbound_stream_stats[0]->bytes_received.is_defined()); + ASSERT_TRUE(inbound_stream_stats[0]->bytes_received.has_value()); ASSERT_GT(*inbound_stream_stats[0]->bytes_received, 0U); } @@ -1431,7 +1596,9 @@ TEST_P(PeerConnectionIntegrationTest, // Remove SSRCs and MSIDs from the received offer SDP. callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Note that, since the old stats implementation associates SSRCs with tracks // using SDP, when SSRCs aren't signaled in SDP these stats won't have an @@ -1439,8 +1606,10 @@ TEST_P(PeerConnectionIntegrationTest, // // Also, we use "EXPECT_TRUE_WAIT" because the stats collector may decide to // return cached stats if not enough time has passed since the last update. - EXPECT_TRUE_WAIT(callee()->OldGetStats()->BytesReceived() > 0, - kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee()->OldGetStats()->BytesReceived(); }, + ::testing::Gt(0)), + IsRtcOk()); } // Test that we can successfully get the media related stats (audio level @@ -1453,27 +1622,27 @@ TEST_P(PeerConnectionIntegrationTest, // Remove SSRCs and MSIDs from the received offer SDP. callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.CalleeExpectsSomeAudio(1); media_expectations.CalleeExpectsSomeVideo(1); ASSERT_TRUE(ExpectNewFrames(media_expectations)); - rtc::scoped_refptr report = - callee()->NewGetStats(); + scoped_refptr report = callee()->NewGetStats(); ASSERT_NE(nullptr, report); - auto inbound_rtps = - report->GetStatsOfType(); + auto inbound_rtps = report->GetStatsOfType(); auto index = FindFirstMediaStatsIndexByKind("audio", inbound_rtps); ASSERT_GE(index, 0); - EXPECT_TRUE(inbound_rtps[index]->audio_level.is_defined()); + EXPECT_TRUE(inbound_rtps[index]->audio_level.has_value()); } // Test that DTLS 1.0 is used if both sides only support DTLS 1.0. TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithDtls10) { PeerConnectionFactory::Options dtls_10_options; - dtls_10_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10; + dtls_10_options.ssl_max_version = SSL_PROTOCOL_DTLS_10; ASSERT_TRUE(CreatePeerConnectionWrappersWithOptions(dtls_10_options, dtls_10_options)); ConnectFakeSignaling(); @@ -1482,7 +1651,9 @@ TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithDtls10) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); ASSERT_TRUE(ExpectNewFrames(media_expectations)); @@ -1491,46 +1662,60 @@ TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithDtls10) { // Test getting cipher stats and UMA metrics when DTLS 1.0 is negotiated. TEST_P(PeerConnectionIntegrationTest, Dtls10CipherStatsAndUmaMetrics) { PeerConnectionFactory::Options dtls_10_options; - dtls_10_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10; + dtls_10_options.ssl_max_version = SSL_PROTOCOL_DTLS_10; ASSERT_TRUE(CreatePeerConnectionWrappersWithOptions(dtls_10_options, dtls_10_options)); ConnectFakeSignaling(); caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); - EXPECT_TRUE_WAIT(rtc::SSLStreamAdapter::IsAcceptableCipher( - caller()->OldGetStats()->DtlsCipher(), rtc::KT_DEFAULT), - kDefaultTimeout); - EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite), - caller()->OldGetStats()->SrtpCipher(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return DtlsConnected(); }, ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT(WaitUntil( + [&] { + return SSLStreamAdapter::IsAcceptableCipher( + caller()->OldGetStats()->DtlsCipher(), KT_DEFAULT); + }, + ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return caller()->OldGetStats()->SrtpCipher(); }, + ::testing::Eq(SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite))), + IsRtcOk()); } // Test getting cipher stats and UMA metrics when DTLS 1.2 is negotiated. TEST_P(PeerConnectionIntegrationTest, Dtls12CipherStatsAndUmaMetrics) { PeerConnectionFactory::Options dtls_12_options; - dtls_12_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12; + dtls_12_options.ssl_max_version = SSL_PROTOCOL_DTLS_12; ASSERT_TRUE(CreatePeerConnectionWrappersWithOptions(dtls_12_options, dtls_12_options)); ConnectFakeSignaling(); caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); - EXPECT_TRUE_WAIT(rtc::SSLStreamAdapter::IsAcceptableCipher( - caller()->OldGetStats()->DtlsCipher(), rtc::KT_DEFAULT), - kDefaultTimeout); - EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite), - caller()->OldGetStats()->SrtpCipher(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return DtlsConnected(); }, ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT(WaitUntil( + [&] { + return SSLStreamAdapter::IsAcceptableCipher( + caller()->OldGetStats()->DtlsCipher(), KT_DEFAULT); + }, + ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return caller()->OldGetStats()->SrtpCipher(); }, + ::testing::Eq(SrtpCryptoSuiteToName(kDefaultSrtpCryptoSuite))), + IsRtcOk()); } // Test that DTLS 1.0 can be used if the caller supports DTLS 1.2 and the // callee only supports 1.0. TEST_P(PeerConnectionIntegrationTest, CallerDtls12ToCalleeDtls10) { PeerConnectionFactory::Options caller_options; - caller_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12; + caller_options.ssl_max_version = SSL_PROTOCOL_DTLS_12; PeerConnectionFactory::Options callee_options; - callee_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10; + callee_options.ssl_max_version = SSL_PROTOCOL_DTLS_10; ASSERT_TRUE( CreatePeerConnectionWrappersWithOptions(caller_options, callee_options)); ConnectFakeSignaling(); @@ -1539,7 +1724,9 @@ TEST_P(PeerConnectionIntegrationTest, CallerDtls12ToCalleeDtls10) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); ASSERT_TRUE(ExpectNewFrames(media_expectations)); @@ -1549,9 +1736,9 @@ TEST_P(PeerConnectionIntegrationTest, CallerDtls12ToCalleeDtls10) { // callee supports 1.2. TEST_P(PeerConnectionIntegrationTest, CallerDtls10ToCalleeDtls12) { PeerConnectionFactory::Options caller_options; - caller_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_10; + caller_options.ssl_max_version = SSL_PROTOCOL_DTLS_10; PeerConnectionFactory::Options callee_options; - callee_options.ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12; + callee_options.ssl_max_version = SSL_PROTOCOL_DTLS_12; ASSERT_TRUE( CreatePeerConnectionWrappersWithOptions(caller_options, callee_options)); ConnectFakeSignaling(); @@ -1560,7 +1747,9 @@ TEST_P(PeerConnectionIntegrationTest, CallerDtls10ToCalleeDtls12) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); ASSERT_TRUE(ExpectNewFrames(media_expectations)); @@ -1575,7 +1764,7 @@ TEST_P(PeerConnectionIntegrationTest, PeerConnectionFactory::Options callee_options; callee_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = false; - int expected_cipher_suite = rtc::kSrtpAes128CmSha1_80; + int expected_cipher_suite = kSrtpAes128CmSha1_80; TestNegotiatedCipherSuite(caller_options, callee_options, expected_cipher_suite); } @@ -1587,7 +1776,7 @@ TEST_P(PeerConnectionIntegrationTest, false; PeerConnectionFactory::Options callee_options; callee_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = true; - int expected_cipher_suite = rtc::kSrtpAes128CmSha1_80; + int expected_cipher_suite = kSrtpAes128CmSha1_80; TestNegotiatedCipherSuite(caller_options, callee_options, expected_cipher_suite); } @@ -1597,7 +1786,7 @@ TEST_P(PeerConnectionIntegrationTest, Aes128Sha1_32_CipherUsedWhenSupported) { caller_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = true; PeerConnectionFactory::Options callee_options; callee_options.crypto_options.srtp.enable_aes128_sha1_32_crypto_cipher = true; - int expected_cipher_suite = rtc::kSrtpAes128CmSha1_32; + int expected_cipher_suite = kSrtpAes128CmSha1_32; TestNegotiatedCipherSuite(caller_options, callee_options, expected_cipher_suite); } @@ -1639,7 +1828,9 @@ TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithGcmCipher) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); ASSERT_TRUE(ExpectNewFrames(media_expectations)); @@ -1654,34 +1845,46 @@ TEST_P(PeerConnectionIntegrationTest, IceStatesReachCompletion) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceGatheringComplete, - caller()->ice_gathering_state(), kMaxWaitForFramesMs); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceGatheringComplete, - callee()->ice_gathering_state(), kMaxWaitForFramesMs); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return caller()->ice_gathering_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceGatheringComplete), + {.timeout = kMaxWaitForFrames}), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee()->ice_gathering_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceGatheringComplete), + {.timeout = kMaxWaitForFrames}), + IsRtcOk()); // After the best candidate pair is selected and all candidates are signaled, // the ICE connection state should reach "complete". // TODO(deadbeef): Currently, the ICE "controlled" agent (the // answerer/"callee" by default) only reaches "connected". When this is // fixed, this test should be updated. - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kDefaultTimeout); + EXPECT_THAT(WaitUntil([&] { return caller()->ice_connection_state(); }, + ::testing::Eq( + PeerConnectionInterface::kIceConnectionCompleted)), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return callee()->ice_connection_state(); }, + ::testing::Eq( + PeerConnectionInterface::kIceConnectionConnected)), + IsRtcOk()); } -constexpr int kOnlyLocalPorts = cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_RELAY | - cricket::PORTALLOCATOR_DISABLE_TCP; +constexpr int kOnlyLocalPorts = PORTALLOCATOR_DISABLE_STUN | + PORTALLOCATOR_DISABLE_RELAY | + PORTALLOCATOR_DISABLE_TCP; // Use a mock resolver to resolve the hostname back to the original IP on both // sides and check that the ICE connection connects. TEST_P(PeerConnectionIntegrationTest, IceStatesReachCompletionWithRemoteHostname) { auto caller_resolver_factory = - std::make_unique>(); + std::make_unique>(); auto callee_resolver_factory = - std::make_unique>(); + std::make_unique>(); auto callee_async_resolver = std::make_unique>(); auto caller_async_resolver = @@ -1695,17 +1898,18 @@ TEST_P(PeerConnectionIntegrationTest, // P2PTransportChannel. EXPECT_CALL(*caller_resolver_factory, Create()) .WillOnce(Return(ByMove(std::move(caller_async_resolver)))); - webrtc::PeerConnectionDependencies caller_deps(nullptr); + PeerConnectionDependencies caller_deps(nullptr); caller_deps.async_dns_resolver_factory = std::move(caller_resolver_factory); EXPECT_CALL(*callee_resolver_factory, Create()) .WillOnce(Return(ByMove(std::move(callee_async_resolver)))); - webrtc::PeerConnectionDependencies callee_deps(nullptr); + PeerConnectionDependencies callee_deps(nullptr); callee_deps.async_dns_resolver_factory = std::move(callee_resolver_factory); PeerConnectionInterface::RTCConfiguration config; config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; config.rtcp_mux_policy = PeerConnectionInterface::kRtcpMuxPolicyRequire; + config.port_allocator_config.flags = kOnlyLocalPorts; ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndDeps( config, std::move(caller_deps), config, std::move(callee_deps))); @@ -1719,29 +1923,33 @@ TEST_P(PeerConnectionIntegrationTest, // Enable hostname candidates with mDNS names. caller()->SetMdnsResponder( - std::make_unique(network_thread())); + std::make_unique(network_thread())); callee()->SetMdnsResponder( - std::make_unique(network_thread())); - - SetPortAllocatorFlags(kOnlyLocalPorts, kOnlyLocalPorts); + std::make_unique(network_thread())); ConnectFakeSignaling(); caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return caller()->ice_connection_state(); }, + ::testing::Eq( + PeerConnectionInterface::kIceConnectionCompleted)), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return callee()->ice_connection_state(); }, + ::testing::Eq( + PeerConnectionInterface::kIceConnectionConnected)), + IsRtcOk()); // Part of reporting the stats will occur on the network thread, so flush it // before checking NumEvents. SendTask(network_thread(), [] {}); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.CandidatePairType_UDP", - webrtc::kIceCandidatePairHostNameHostName)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.PeerConnection.CandidatePairType_UDP", + kIceCandidatePairHostNameHostName)); DestroyPeerConnections(); } @@ -1758,17 +1966,12 @@ class PeerConnectionIntegrationIceStatesTest } void StartStunServer(const SocketAddress& server_address) { - stun_server_.reset( - cricket::TestStunServer::Create(firewall(), server_address)); + stun_server_ = + TestStunServer::Create(firewall(), server_address, *network_thread()); } bool TestIPv6() { - return (port_allocator_flags_ & cricket::PORTALLOCATOR_ENABLE_IPV6); - } - - void SetPortAllocatorFlags() { - PeerConnectionIntegrationBaseTest::SetPortAllocatorFlags( - port_allocator_flags_, port_allocator_flags_); + return (port_allocator_flags_ & PORTALLOCATOR_ENABLE_IPV6); } std::vector CallerAddresses() { @@ -1803,9 +2006,11 @@ class PeerConnectionIntegrationIceStatesTest } } + uint32_t port_allocator_flags() const { return port_allocator_flags_; } + private: uint32_t port_allocator_flags_; - std::unique_ptr stun_server_; + TestStunServer::StunServerPtr stun_server_; }; // Ensure FakeClockForTest is constructed first (see class for rationale). @@ -1823,12 +2028,13 @@ TEST_P(PeerConnectionIntegrationIceStatesTestWithFakeClock, // Block connections to/from the caller and wait for ICE to become // disconnected. for (const auto& caller_address : CallerAddresses()) { - firewall()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, caller_address); + firewall()->AddRule(false, FP_ANY, FD_ANY, caller_address); } - ASSERT_TRUE(CreatePeerConnectionWrappers()); + PeerConnectionInterface::RTCConfiguration config; + config.port_allocator_config.flags = port_allocator_flags(); + ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); ConnectFakeSignaling(); - SetPortAllocatorFlags(); SetUpNetworkInterfaces(); caller()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); @@ -1836,10 +2042,13 @@ TEST_P(PeerConnectionIntegrationIceStatesTestWithFakeClock, // According to RFC7675, if there is no response within 30 seconds then the // peer should consider the other side to have rejected the connection. This // is signaled by the state transitioning to "failed". - constexpr int kConsentTimeout = 30000; - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionFailed, - caller()->standardized_ice_connection_state(), - kConsentTimeout, FakeClock()); + constexpr TimeDelta kConsentTimeout = TimeDelta::Millis(30000); + ScopedFakeClock& fake_clock = FakeClock(); + ASSERT_THAT( + WaitUntil([&] { return caller()->standardized_ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionFailed), + {.timeout = kConsentTimeout, .clock = &fake_clock}), + IsRtcOk()); } #endif // !defined(THREAD_SANITIZER) @@ -1853,29 +2062,36 @@ TEST_P(PeerConnectionIntegrationIceStatesTestWithFakeClock, #define MAYBE_VerifyBestConnection VerifyBestConnection #endif TEST_P(PeerConnectionIntegrationIceStatesTest, MAYBE_VerifyBestConnection) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); + PeerConnectionInterface::RTCConfiguration config; + config.port_allocator_config.flags = port_allocator_flags(); + ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); ConnectFakeSignaling(); - SetPortAllocatorFlags(); SetUpNetworkInterfaces(); caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return caller()->ice_connection_state(); }, + ::testing::Eq( + PeerConnectionInterface::kIceConnectionCompleted)), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return callee()->ice_connection_state(); }, + ::testing::Eq( + PeerConnectionInterface::kIceConnectionConnected)), + IsRtcOk()); // Part of reporting the stats will occur on the network thread, so flush it // before checking NumEvents. SendTask(network_thread(), [] {}); // TODO(bugs.webrtc.org/9456): Fix it. - const int num_best_ipv4 = webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.IPMetrics", webrtc::kBestConnections_IPv4); - const int num_best_ipv6 = webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.IPMetrics", webrtc::kBestConnections_IPv6); + const int num_best_ipv4 = metrics::NumEvents( + "WebRTC.PeerConnection.IPMetrics", kBestConnections_IPv4); + const int num_best_ipv6 = metrics::NumEvents( + "WebRTC.PeerConnection.IPMetrics", kBestConnections_IPv6); if (TestIPv6()) { // When IPv6 is enabled, we should prefer an IPv6 connection over an IPv4 // connection. @@ -1886,22 +2102,22 @@ TEST_P(PeerConnectionIntegrationIceStatesTest, MAYBE_VerifyBestConnection) { EXPECT_METRIC_EQ(0, num_best_ipv6); } - EXPECT_METRIC_EQ(0, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.CandidatePairType_UDP", - webrtc::kIceCandidatePairHostHost)); - EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.CandidatePairType_UDP", - webrtc::kIceCandidatePairHostPublicHostPublic)); + EXPECT_METRIC_EQ( + 0, metrics::NumEvents("WebRTC.PeerConnection.CandidatePairType_UDP", + kIceCandidatePairHostHost)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.PeerConnection.CandidatePairType_UDP", + kIceCandidatePairHostPublicHostPublic)); } -constexpr uint32_t kFlagsIPv4NoStun = cricket::PORTALLOCATOR_DISABLE_TCP | - cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_RELAY; +constexpr uint32_t kFlagsIPv4NoStun = PORTALLOCATOR_DISABLE_TCP | + PORTALLOCATOR_DISABLE_STUN | + PORTALLOCATOR_DISABLE_RELAY; constexpr uint32_t kFlagsIPv6NoStun = - cricket::PORTALLOCATOR_DISABLE_TCP | cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_ENABLE_IPV6 | cricket::PORTALLOCATOR_DISABLE_RELAY; + PORTALLOCATOR_DISABLE_TCP | PORTALLOCATOR_DISABLE_STUN | + PORTALLOCATOR_ENABLE_IPV6 | PORTALLOCATOR_DISABLE_RELAY; constexpr uint32_t kFlagsIPv4Stun = - cricket::PORTALLOCATOR_DISABLE_TCP | cricket::PORTALLOCATOR_DISABLE_RELAY; + PORTALLOCATOR_DISABLE_TCP | PORTALLOCATOR_DISABLE_RELAY; INSTANTIATE_TEST_SUITE_P( PeerConnectionIntegrationTest, @@ -1930,18 +2146,26 @@ TEST_P(PeerConnectionIntegrationTest, MediaContinuesFlowingAfterIceRestart) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kMaxWaitForFramesMs); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kMaxWaitForFramesMs); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return caller()->ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionCompleted), + {.timeout = kMaxWaitForFrames}), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee()->ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionConnected), + {.timeout = kMaxWaitForFrames}), + IsRtcOk()); // To verify that the ICE restart actually occurs, get // ufrag/password/candidates before and after restart. // Create an SDP string of the first audio candidate for both clients. - const webrtc::IceCandidateCollection* audio_candidates_caller = + const IceCandidateCollection* audio_candidates_caller = caller()->pc()->local_description()->candidates(0); - const webrtc::IceCandidateCollection* audio_candidates_callee = + const IceCandidateCollection* audio_candidates_callee = callee()->pc()->local_description()->candidates(0); ASSERT_GT(audio_candidates_caller->count(), 0u); ASSERT_GT(audio_candidates_callee->count(), 0u); @@ -1951,7 +2175,7 @@ TEST_P(PeerConnectionIntegrationTest, MediaContinuesFlowingAfterIceRestart) { std::string callee_candidate_pre_restart; ASSERT_TRUE( audio_candidates_callee->at(0)->ToString(&callee_candidate_pre_restart)); - const cricket::SessionDescription* desc = + const SessionDescription* desc = caller()->pc()->local_description()->description(); std::string caller_ufrag_pre_restart = desc->transport_infos()[0].description.ice_ufrag; @@ -1963,11 +2187,19 @@ TEST_P(PeerConnectionIntegrationTest, MediaContinuesFlowingAfterIceRestart) { // Have the caller initiate an ICE restart. caller()->SetOfferAnswerOptions(IceRestartOfferAnswerOptions()); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kMaxWaitForFramesMs); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kMaxWaitForFramesMs); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return caller()->ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionCompleted), + {.timeout = kMaxWaitForFrames}), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee()->ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionConnected), + {.timeout = kMaxWaitForFrames}), + IsRtcOk()); // Grab the ufrags/candidates again. audio_candidates_caller = caller()->pc()->local_description()->candidates(0); @@ -2011,15 +2243,17 @@ TEST_P(PeerConnectionIntegrationTest, EndToEndCallWithIceRenomination) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Sanity check that ICE renomination was actually negotiated. - const cricket::SessionDescription* desc = + const SessionDescription* desc = caller()->pc()->local_description()->description(); - for (const cricket::TransportInfo& info : desc->transport_infos()) { + for (const TransportInfo& info : desc->transport_infos()) { ASSERT_THAT(info.description.transport_options, Contains("renomination")); } desc = callee()->pc()->local_description()->description(); - for (const cricket::TransportInfo& info : desc->transport_infos()) { + for (const TransportInfo& info : desc->transport_infos()) { ASSERT_THAT(info.description.transport_options, Contains("renomination")); } MediaExpectations media_expectations; @@ -2049,15 +2283,21 @@ TEST_P(PeerConnectionIntegrationTest, caller()->AddAudioTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_EQ_WAIT(PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return caller()->ice_connection_state(); }, + ::testing::Eq( + PeerConnectionInterface::kIceConnectionCompleted)), + IsRtcOk()); caller()->clear_ice_connection_state_history(); caller()->AddVideoTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); EXPECT_EQ(0u, caller()->ice_connection_state_history().size()); } @@ -2074,7 +2314,9 @@ TEST_P(PeerConnectionIntegrationTest, // video and audio recvonly "m=" sections. caller()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Negotiate again, disabling the video "m=" section (the callee will set the // port to 0 due to offer_to_receive_video = 0). @@ -2085,15 +2327,17 @@ TEST_P(PeerConnectionIntegrationTest, } else { callee()->SetRemoteOfferHandler([this] { callee() - ->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO) + ->GetFirstTransceiverOfType(webrtc::MediaType::VIDEO) ->StopInternal(); }); } caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Sanity check that video "m=" section was actually rejected. - const ContentInfo* answer_video_content = cricket::GetFirstVideoContent( - callee()->pc()->local_description()->description()); + const ContentInfo* answer_video_content = + GetFirstVideoContent(callee()->pc()->local_description()->description()); ASSERT_NE(nullptr, answer_video_content); ASSERT_TRUE(answer_video_content->rejected); @@ -2106,14 +2350,16 @@ TEST_P(PeerConnectionIntegrationTest, } else { // The caller's transceiver is stopped, so we need to add another track. auto caller_transceiver = - caller()->GetFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO); + caller()->GetFirstTransceiverOfType(webrtc::MediaType::VIDEO); EXPECT_EQ(nullptr, caller_transceiver.get()); caller()->AddVideoTrack(); } callee()->AddVideoTrack(); callee()->SetRemoteOfferHandler(nullptr); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Verify the caller receives frames from the newly added stream, and the // callee receives additional frames from the re-enabled video m= section. @@ -2139,12 +2385,21 @@ TEST_F(PeerConnectionIntegrationTestPlanB, auto callee_video_sender = callee()->pc()->CreateSender("video", "callee_stream"); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue(), + {.timeout = kMaxWaitForActivation}), + IsRtcOk()); // Wait for ICE to complete, without any tracks being set. - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kMaxWaitForFramesMs); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kMaxWaitForFramesMs); + EXPECT_THAT( + WaitUntil([&] { return caller()->ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionCompleted), + {.timeout = kMaxWaitForFrames}), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee()->ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionConnected), + {.timeout = kMaxWaitForFrames}), + IsRtcOk()); // Now set the tracks, and expect frames to immediately start flowing. EXPECT_TRUE( caller_audio_sender->SetTrack(caller()->CreateLocalAudioTrack().get())); @@ -2166,10 +2421,10 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, MediaFlowsAfterEarlyWarmupWithAddTransceiver) { ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); - auto audio_result = caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto audio_result = caller()->pc()->AddTransceiver(webrtc::MediaType::AUDIO); ASSERT_EQ(RTCErrorType::NONE, audio_result.error().type()); auto caller_audio_sender = audio_result.MoveValue()->sender(); - auto video_result = caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + auto video_result = caller()->pc()->AddTransceiver(webrtc::MediaType::VIDEO); ASSERT_EQ(RTCErrorType::NONE, video_result.error().type()); auto caller_video_sender = video_result.MoveValue()->sender(); callee()->SetRemoteOfferHandler([this] { @@ -2180,12 +2435,21 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, RtpTransceiverDirection::kSendRecv); }); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue(), + {.timeout = kMaxWaitForActivation}), + IsRtcOk()); // Wait for ICE to complete, without any tracks being set. - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kMaxWaitForFramesMs); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kMaxWaitForFramesMs); + EXPECT_THAT( + WaitUntil([&] { return caller()->ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionCompleted), + {.timeout = kMaxWaitForFrames}), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee()->ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionConnected), + {.timeout = kMaxWaitForFrames}), + IsRtcOk()); // Now set the tracks, and expect frames to immediately start flowing. auto callee_audio_sender = callee()->pc()->GetSenders()[0]; auto callee_video_sender = callee()->pc()->GetSenders()[1]; @@ -2212,14 +2476,20 @@ TEST_F(PeerConnectionIntegrationTestPlanB, CanSendRemoteVideoTrack) { // Just send a video track from the caller. caller()->AddVideoTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue(), + {.timeout = kMaxWaitForActivation}), + IsRtcOk()); ASSERT_EQ(1U, callee()->remote_streams()->count()); // Echo the stream back, and do a new offer/anwer (initiated by callee this // time). callee()->pc()->AddStream(callee()->remote_streams()->at(0)); callee()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kMaxWaitForActivationMs); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue(), + {.timeout = kMaxWaitForActivation}), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalVideo(); @@ -2256,18 +2526,14 @@ TEST_P(PeerConnectionIntegrationTestWithFakeClock, signaling_trip_delay_ms * required_signaling_trips + allowed_internal_delay_ms; - static const rtc::SocketAddress turn_server_1_internal_address{"88.88.88.0", - 3478}; - static const rtc::SocketAddress turn_server_1_external_address{"88.88.88.1", - 0}; - static const rtc::SocketAddress turn_server_2_internal_address{"99.99.99.0", - 3478}; - static const rtc::SocketAddress turn_server_2_external_address{"99.99.99.1", - 0}; - cricket::TestTurnServer* turn_server_1 = CreateTurnServer( + static const SocketAddress turn_server_1_internal_address{"88.88.88.0", 3478}; + static const SocketAddress turn_server_1_external_address{"88.88.88.1", 0}; + static const SocketAddress turn_server_2_internal_address{"99.99.99.0", 3478}; + static const SocketAddress turn_server_2_external_address{"99.99.99.1", 0}; + TestTurnServer* turn_server_1 = CreateTurnServer( turn_server_1_internal_address, turn_server_1_external_address); - cricket::TestTurnServer* turn_server_2 = CreateTurnServer( + TestTurnServer* turn_server_2 = CreateTurnServer( turn_server_2_internal_address, turn_server_2_external_address); // Bypass permission check on received packets so media can be sent before // the candidate is signaled. @@ -2279,21 +2545,21 @@ TEST_P(PeerConnectionIntegrationTestWithFakeClock, }); PeerConnectionInterface::RTCConfiguration client_1_config; - webrtc::PeerConnectionInterface::IceServer ice_server_1; + PeerConnectionInterface::IceServer ice_server_1; ice_server_1.urls.push_back("turn:88.88.88.0:3478"); ice_server_1.username = "test"; ice_server_1.password = "test"; client_1_config.servers.push_back(ice_server_1); - client_1_config.type = webrtc::PeerConnectionInterface::kRelay; + client_1_config.type = PeerConnectionInterface::kRelay; client_1_config.presume_writable_when_fully_relayed = true; PeerConnectionInterface::RTCConfiguration client_2_config; - webrtc::PeerConnectionInterface::IceServer ice_server_2; + PeerConnectionInterface::IceServer ice_server_2; ice_server_2.urls.push_back("turn:99.99.99.0:3478"); ice_server_2.username = "test"; ice_server_2.password = "test"; client_2_config.servers.push_back(ice_server_2); - client_2_config.type = webrtc::PeerConnectionInterface::kRelay; + client_2_config.type = PeerConnectionInterface::kRelay; client_2_config.presume_writable_when_fully_relayed = true; ASSERT_TRUE( @@ -2311,8 +2577,11 @@ TEST_P(PeerConnectionIntegrationTestWithFakeClock, options.offer_to_receive_video = 1; caller()->SetOfferAnswerOptions(options); caller()->CreateAndSetAndSignalOffer(); - EXPECT_TRUE_SIMULATED_WAIT(DtlsConnected(), total_connection_time_ms, - FakeClock()); + EXPECT_THAT( + WaitUntil([&] { return DtlsConnected(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(total_connection_time_ms), + .clock = &FakeClock()}), + IsRtcOk()); // Closing the PeerConnections destroys the ports before the ScopedFakeClock. // If this is not done a DCHECK can be hit in ports.cc, because a large // negative number is calculated for the rtt due to the global clock changing. @@ -2326,22 +2595,20 @@ TEST_P(PeerConnectionIntegrationTestWithFakeClock, caller()->AddAudioTrack(); // Call getStats, assert there are no candidates. - rtc::scoped_refptr first_report = - caller()->NewGetStats(); + scoped_refptr first_report = caller()->NewGetStats(); ASSERT_TRUE(first_report); auto first_candidate_stats = - first_report->GetStatsOfType(); + first_report->GetStatsOfType(); ASSERT_EQ(first_candidate_stats.size(), 0u); // Create an offer at the caller and set it as remote description on the // callee. caller()->CreateAndSetAndSignalOffer(); // Call getStats again, assert there are candidates now. - rtc::scoped_refptr second_report = - caller()->NewGetStats(); + scoped_refptr second_report = caller()->NewGetStats(); ASSERT_TRUE(second_report); auto second_candidate_stats = - second_report->GetStatsOfType(); + second_report->GetStatsOfType(); ASSERT_NE(second_candidate_stats.size(), 0u); // The fake clock ensures that no time has passed so the cache must have been @@ -2358,34 +2625,35 @@ TEST_P(PeerConnectionIntegrationTestWithFakeClock, // Start candidate gathering and wait for it to complete. Candidates are not // signalled. caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_SIMULATED_WAIT(caller()->IceGatheringStateComplete(), - kDefaultTimeout, FakeClock()); + ASSERT_THAT(WaitUntil([&] { return caller()->IceGatheringStateComplete(); }, + ::testing::IsTrue(), {.clock = &FakeClock()}), + IsRtcOk()); // Call getStats, assert there are no candidates. - rtc::scoped_refptr first_report = - caller()->NewGetStats(); + scoped_refptr first_report = caller()->NewGetStats(); ASSERT_TRUE(first_report); auto first_candidate_stats = - first_report->GetStatsOfType(); + first_report->GetStatsOfType(); ASSERT_EQ(first_candidate_stats.size(), 0u); // Add a "fake" candidate. - absl::optional result; + std::optional result; caller()->pc()->AddIceCandidate( - absl::WrapUnique(webrtc::CreateIceCandidate( + absl::WrapUnique(CreateIceCandidate( "", 0, "candidate:2214029314 1 udp 2122260223 127.0.0.1 49152 typ host", nullptr)), [&result](RTCError r) { result = r; }); - ASSERT_TRUE_WAIT(result.has_value(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return result.has_value(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_TRUE(result.value().ok()); // Call getStats again, assert there is a remote candidate now. - rtc::scoped_refptr second_report = - caller()->NewGetStats(); + scoped_refptr second_report = caller()->NewGetStats(); ASSERT_TRUE(second_report); auto second_candidate_stats = - second_report->GetStatsOfType(); + second_report->GetStatsOfType(); ASSERT_EQ(second_candidate_stats.size(), 1u); // The fake clock ensures that no time has passed so the cache must have been @@ -2399,36 +2667,32 @@ TEST_P(PeerConnectionIntegrationTestWithFakeClock, // is actually used by the underlying TURN candidate pair. // Note that turnport_unittest.cc contains more detailed, lower-level tests. TEST_P(PeerConnectionIntegrationTest, TurnCustomizerUsedForTurnConnections) { - static const rtc::SocketAddress turn_server_1_internal_address{"88.88.88.0", - 3478}; - static const rtc::SocketAddress turn_server_1_external_address{"88.88.88.1", - 0}; - static const rtc::SocketAddress turn_server_2_internal_address{"99.99.99.0", - 3478}; - static const rtc::SocketAddress turn_server_2_external_address{"99.99.99.1", - 0}; + static const SocketAddress turn_server_1_internal_address{"88.88.88.0", 3478}; + static const SocketAddress turn_server_1_external_address{"88.88.88.1", 0}; + static const SocketAddress turn_server_2_internal_address{"99.99.99.0", 3478}; + static const SocketAddress turn_server_2_external_address{"99.99.99.1", 0}; CreateTurnServer(turn_server_1_internal_address, turn_server_1_external_address); CreateTurnServer(turn_server_2_internal_address, turn_server_2_external_address); PeerConnectionInterface::RTCConfiguration client_1_config; - webrtc::PeerConnectionInterface::IceServer ice_server_1; + PeerConnectionInterface::IceServer ice_server_1; ice_server_1.urls.push_back("turn:88.88.88.0:3478"); ice_server_1.username = "test"; ice_server_1.password = "test"; client_1_config.servers.push_back(ice_server_1); - client_1_config.type = webrtc::PeerConnectionInterface::kRelay; + client_1_config.type = PeerConnectionInterface::kRelay; auto* customizer1 = CreateTurnCustomizer(); client_1_config.turn_customizer = customizer1; PeerConnectionInterface::RTCConfiguration client_2_config; - webrtc::PeerConnectionInterface::IceServer ice_server_2; + PeerConnectionInterface::IceServer ice_server_2; ice_server_2.urls.push_back("turn:99.99.99.0:3478"); ice_server_2.username = "test"; ice_server_2.password = "test"; client_2_config.servers.push_back(ice_server_2); - client_2_config.type = webrtc::PeerConnectionInterface::kRelay; + client_2_config.type = PeerConnectionInterface::kRelay; auto* customizer2 = CreateTurnCustomizer(); client_2_config.turn_customizer = customizer2; @@ -2443,7 +2707,8 @@ TEST_P(PeerConnectionIntegrationTest, TurnCustomizerUsedForTurnConnections) { options.offer_to_receive_video = 1; caller()->SetOfferAnswerOptions(options); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return DtlsConnected(); }, ::testing::IsTrue()), + IsRtcOk()); ExpectTurnCustomizerCountersIncremented(customizer1); ExpectTurnCustomizerCountersIncremented(customizer2); @@ -2452,26 +2717,25 @@ TEST_P(PeerConnectionIntegrationTest, TurnCustomizerUsedForTurnConnections) { // Verifies that you can use TCP instead of UDP to connect to a TURN server and // send media between the caller and the callee. TEST_P(PeerConnectionIntegrationTest, TCPUsedForTurnConnections) { - static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0", - 3478}; - static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0}; + static const SocketAddress turn_server_internal_address{"88.88.88.0", 3478}; + static const SocketAddress turn_server_external_address{"88.88.88.1", 0}; // Enable TCP for the fake turn server. CreateTurnServer(turn_server_internal_address, turn_server_external_address, - cricket::PROTO_TCP); + PROTO_TCP); - webrtc::PeerConnectionInterface::IceServer ice_server; + PeerConnectionInterface::IceServer ice_server; ice_server.urls.push_back("turn:88.88.88.0:3478?transport=tcp"); ice_server.username = "test"; ice_server.password = "test"; PeerConnectionInterface::RTCConfiguration client_1_config; client_1_config.servers.push_back(ice_server); - client_1_config.type = webrtc::PeerConnectionInterface::kRelay; + client_1_config.type = PeerConnectionInterface::kRelay; PeerConnectionInterface::RTCConfiguration client_2_config; client_2_config.servers.push_back(ice_server); - client_2_config.type = webrtc::PeerConnectionInterface::kRelay; + client_2_config.type = PeerConnectionInterface::kRelay; ASSERT_TRUE( CreatePeerConnectionWrappersWithConfig(client_1_config, client_2_config)); @@ -2481,9 +2745,14 @@ TEST_P(PeerConnectionIntegrationTest, TCPUsedForTurnConnections) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kMaxWaitForFramesMs); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee()->ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionConnected), + {.timeout = kMaxWaitForFrames}), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); @@ -2497,45 +2766,44 @@ TEST_P(PeerConnectionIntegrationTest, TCPUsedForTurnConnections) { // contains more detailed, lower-level tests. TEST_P(PeerConnectionIntegrationTest, SSLCertificateVerifierUsedForTurnConnections) { - static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0", - 3478}; - static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0}; + static const SocketAddress turn_server_internal_address{"88.88.88.0", 3478}; + static const SocketAddress turn_server_external_address{"88.88.88.1", 0}; // Enable TCP-TLS for the fake turn server. We need to pass in 88.88.88.0 so // that host name verification passes on the fake certificate. CreateTurnServer(turn_server_internal_address, turn_server_external_address, - cricket::PROTO_TLS, "88.88.88.0"); + PROTO_TLS, "88.88.88.0"); - webrtc::PeerConnectionInterface::IceServer ice_server; + PeerConnectionInterface::IceServer ice_server; ice_server.urls.push_back("turns:88.88.88.0:3478?transport=tcp"); ice_server.username = "test"; ice_server.password = "test"; PeerConnectionInterface::RTCConfiguration client_1_config; client_1_config.servers.push_back(ice_server); - client_1_config.type = webrtc::PeerConnectionInterface::kRelay; + client_1_config.type = PeerConnectionInterface::kRelay; PeerConnectionInterface::RTCConfiguration client_2_config; client_2_config.servers.push_back(ice_server); // Setting the type to kRelay forces the connection to go through a TURN // server. - client_2_config.type = webrtc::PeerConnectionInterface::kRelay; + client_2_config.type = PeerConnectionInterface::kRelay; // Get a copy to the pointer so we can verify calls later. - rtc::TestCertificateVerifier* client_1_cert_verifier = - new rtc::TestCertificateVerifier(); + TestCertificateVerifier* client_1_cert_verifier = + new TestCertificateVerifier(); client_1_cert_verifier->verify_certificate_ = true; - rtc::TestCertificateVerifier* client_2_cert_verifier = - new rtc::TestCertificateVerifier(); + TestCertificateVerifier* client_2_cert_verifier = + new TestCertificateVerifier(); client_2_cert_verifier->verify_certificate_ = true; // Create the dependencies with the test certificate verifier. - webrtc::PeerConnectionDependencies client_1_deps(nullptr); + PeerConnectionDependencies client_1_deps(nullptr); client_1_deps.tls_cert_verifier = - std::unique_ptr(client_1_cert_verifier); - webrtc::PeerConnectionDependencies client_2_deps(nullptr); + std::unique_ptr(client_1_cert_verifier); + PeerConnectionDependencies client_2_deps(nullptr); client_2_deps.tls_cert_verifier = - std::unique_ptr(client_2_cert_verifier); + std::unique_ptr(client_2_cert_verifier); ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndDeps( client_1_config, std::move(client_1_deps), client_2_config, @@ -2549,7 +2817,8 @@ TEST_P(PeerConnectionIntegrationTest, options.offer_to_receive_video = 1; caller()->SetOfferAnswerOptions(options); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return DtlsConnected(); }, ::testing::IsTrue()), + IsRtcOk()); EXPECT_GT(client_1_cert_verifier->call_count_, 0u); EXPECT_GT(client_2_cert_verifier->call_count_, 0u); @@ -2569,7 +2838,7 @@ TEST_P(PeerConnectionIntegrationTest, IceTransportFactoryUsedForConnections) { /*reset_decoder_factory=*/false); ASSERT_TRUE(wrapper); wrapper->CreateDataChannel(); - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); wrapper->pc()->SetLocalDescription(observer.get(), wrapper->CreateOfferAndWait().release()); } @@ -2590,36 +2859,39 @@ TEST_P(PeerConnectionIntegrationTest, CodecNamesAreCaseInsensitive) { // Remove all but one audio/video codec (opus and VP8), and change the // casing of the caller's generated offer. - caller()->SetGeneratedSdpMunger([](cricket::SessionDescription* description) { - cricket::AudioContentDescription* audio = - GetFirstAudioContentDescription(description); - ASSERT_NE(nullptr, audio); - auto audio_codecs = audio->codecs(); - audio_codecs.erase(std::remove_if(audio_codecs.begin(), audio_codecs.end(), - [](const cricket::AudioCodec& codec) { - return codec.name != "opus"; - }), - audio_codecs.end()); - ASSERT_EQ(1u, audio_codecs.size()); - audio_codecs[0].name = "OpUs"; - audio->set_codecs(audio_codecs); - - cricket::VideoContentDescription* video = - GetFirstVideoContentDescription(description); - ASSERT_NE(nullptr, video); - auto video_codecs = video->codecs(); - video_codecs.erase(std::remove_if(video_codecs.begin(), video_codecs.end(), - [](const cricket::VideoCodec& codec) { - return codec.name != "VP8"; - }), - video_codecs.end()); - ASSERT_EQ(1u, video_codecs.size()); - video_codecs[0].name = "vP8"; - video->set_codecs(video_codecs); - }); + caller()->SetGeneratedSdpMunger( + [](std::unique_ptr& sdp) { + AudioContentDescription* audio = + GetFirstAudioContentDescription(sdp->description()); + ASSERT_NE(nullptr, audio); + auto audio_codecs = audio->codecs(); + audio_codecs.erase( + std::remove_if( + audio_codecs.begin(), audio_codecs.end(), + [](const Codec& codec) { return codec.name != "opus"; }), + audio_codecs.end()); + ASSERT_EQ(1u, audio_codecs.size()); + audio_codecs[0].name = "OpUs"; + audio->set_codecs(audio_codecs); + + VideoContentDescription* video = + GetFirstVideoContentDescription(sdp->description()); + ASSERT_NE(nullptr, video); + auto video_codecs = video->codecs(); + video_codecs.erase( + std::remove_if( + video_codecs.begin(), video_codecs.end(), + [](const Codec& codec) { return codec.name != "VP8"; }), + video_codecs.end()); + ASSERT_EQ(1u, video_codecs.size()); + video_codecs[0].name = "vP8"; + video->set_codecs(video_codecs); + }); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Verify frames are still received end-to-end. MediaExpectations media_expectations; @@ -2632,19 +2904,21 @@ TEST_P(PeerConnectionIntegrationTest, GetSourcesAudio) { ConnectFakeSignaling(); caller()->AddAudioTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Wait for one audio frame to be received by the callee. MediaExpectations media_expectations; media_expectations.CalleeExpectsSomeAudio(1); ASSERT_TRUE(ExpectNewFrames(media_expectations)); ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u); auto receiver = callee()->pc()->GetReceivers()[0]; - ASSERT_EQ(receiver->media_type(), cricket::MEDIA_TYPE_AUDIO); + ASSERT_EQ(receiver->media_type(), webrtc::MediaType::AUDIO); auto sources = receiver->GetSources(); ASSERT_GT(receiver->GetParameters().encodings.size(), 0u); EXPECT_EQ(receiver->GetParameters().encodings[0].ssrc, sources[0].source_id()); - EXPECT_EQ(webrtc::RtpSourceType::SSRC, sources[0].source_type()); + EXPECT_EQ(RtpSourceType::SSRC, sources[0].source_type()); } TEST_P(PeerConnectionIntegrationTest, GetSourcesVideo) { @@ -2652,20 +2926,23 @@ TEST_P(PeerConnectionIntegrationTest, GetSourcesVideo) { ConnectFakeSignaling(); caller()->AddVideoTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - // Wait for one video frame to be received by the callee. + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + // Wait for two video frames to be received by the callee. + // TODO: https://issues.webrtc.org/42220900 - wait for only one frame again MediaExpectations media_expectations; - media_expectations.CalleeExpectsSomeVideo(1); + media_expectations.CalleeExpectsSomeVideo(2); ASSERT_TRUE(ExpectNewFrames(media_expectations)); ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u); auto receiver = callee()->pc()->GetReceivers()[0]; - ASSERT_EQ(receiver->media_type(), cricket::MEDIA_TYPE_VIDEO); + ASSERT_EQ(receiver->media_type(), webrtc::MediaType::VIDEO); auto sources = receiver->GetSources(); ASSERT_GT(receiver->GetParameters().encodings.size(), 0u); ASSERT_GT(sources.size(), 0u); EXPECT_EQ(receiver->GetParameters().encodings[0].ssrc, sources[0].source_id()); - EXPECT_EQ(webrtc::RtpSourceType::SSRC, sources[0].source_type()); + EXPECT_EQ(RtpSourceType::SSRC, sources[0].source_type()); } TEST_P(PeerConnectionIntegrationTest, UnsignaledSsrcGetSourcesAudio) { @@ -2674,17 +2951,23 @@ TEST_P(PeerConnectionIntegrationTest, UnsignaledSsrcGetSourcesAudio) { caller()->AddAudioTrack(); callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u); auto receiver = callee()->pc()->GetReceivers()[0]; std::vector sources; - EXPECT_TRUE_WAIT(([&receiver, &sources]() { - sources = receiver->GetSources(); - return !sources.empty(); - })(), - kDefaultTimeout); + EXPECT_THAT(WaitUntil( + [&] { + return ([&receiver, &sources]() { + sources = receiver->GetSources(); + return !sources.empty(); + })(); + }, + ::testing::IsTrue()), + IsRtcOk()); ASSERT_GT(sources.size(), 0u); - EXPECT_EQ(webrtc::RtpSourceType::SSRC, sources[0].source_type()); + EXPECT_EQ(RtpSourceType::SSRC, sources[0].source_type()); } TEST_P(PeerConnectionIntegrationTest, UnsignaledSsrcGetSourcesVideo) { @@ -2693,17 +2976,23 @@ TEST_P(PeerConnectionIntegrationTest, UnsignaledSsrcGetSourcesVideo) { caller()->AddVideoTrack(); callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u); auto receiver = callee()->pc()->GetReceivers()[0]; std::vector sources; - EXPECT_TRUE_WAIT(([&receiver, &sources]() { - sources = receiver->GetSources(); - return !sources.empty(); - })(), - kDefaultTimeout); + EXPECT_THAT(WaitUntil( + [&] { + return ([&receiver, &sources]() { + sources = receiver->GetSources(); + return !sources.empty(); + })(); + }, + ::testing::IsTrue()), + IsRtcOk()); ASSERT_GT(sources.size(), 0u); - EXPECT_EQ(webrtc::RtpSourceType::SSRC, sources[0].source_type()); + EXPECT_EQ(RtpSourceType::SSRC, sources[0].source_type()); } // Similar to the above test, except instead of waiting until GetSources() is @@ -2718,7 +3007,9 @@ TEST_P(PeerConnectionIntegrationTest, caller()->AddVideoTrack(); callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Wait for one video frame to be received by the callee. MediaExpectations media_expectations; media_expectations.CalleeExpectsSomeVideo(1); @@ -2728,7 +3019,7 @@ TEST_P(PeerConnectionIntegrationTest, std::vector sources = receiver->GetSources(); // SSRC history must not be cleared since the reception of the first frame. ASSERT_GT(sources.size(), 0u); - EXPECT_EQ(webrtc::RtpSourceType::SSRC, sources[0].source_type()); + EXPECT_EQ(RtpSourceType::SSRC, sources[0].source_type()); } TEST_P(PeerConnectionIntegrationTest, UnsignaledSsrcGetParametersAudio) { @@ -2737,16 +3028,22 @@ TEST_P(PeerConnectionIntegrationTest, UnsignaledSsrcGetParametersAudio) { caller()->AddAudioTrack(); callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u); auto receiver = callee()->pc()->GetReceivers()[0]; RtpParameters parameters; - EXPECT_TRUE_WAIT(([&receiver, ¶meters]() { - parameters = receiver->GetParameters(); - return !parameters.encodings.empty() && - parameters.encodings[0].ssrc.has_value(); - })(), - kDefaultTimeout); + EXPECT_THAT(WaitUntil( + [&] { + return ([&receiver, ¶meters]() { + parameters = receiver->GetParameters(); + return !parameters.encodings.empty() && + parameters.encodings[0].ssrc.has_value(); + })(); + }, + ::testing::IsTrue()), + IsRtcOk()); ASSERT_EQ(parameters.encodings.size(), 1u); EXPECT_TRUE(parameters.encodings[0].ssrc.has_value()); } @@ -2757,16 +3054,22 @@ TEST_P(PeerConnectionIntegrationTest, UnsignaledSsrcGetParametersVideo) { caller()->AddVideoTrack(); callee()->SetReceivedSdpMunger(RemoveSsrcsAndMsids); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u); auto receiver = callee()->pc()->GetReceivers()[0]; RtpParameters parameters; - EXPECT_TRUE_WAIT(([&receiver, ¶meters]() { - parameters = receiver->GetParameters(); - return !parameters.encodings.empty() && - parameters.encodings[0].ssrc.has_value(); - })(), - kDefaultTimeout); + EXPECT_THAT(WaitUntil( + [&] { + return ([&receiver, ¶meters]() { + parameters = receiver->GetParameters(); + return !parameters.encodings.empty() && + parameters.encodings[0].ssrc.has_value(); + })(); + }, + ::testing::IsTrue()), + IsRtcOk()); ASSERT_EQ(parameters.encodings.size(), 1u); EXPECT_TRUE(parameters.encodings[0].ssrc.has_value()); } @@ -2791,12 +3094,13 @@ TEST_F(PeerConnectionIntegrationTestPlanB, RemoveAndAddTrackWithNewStreamId) { ConnectFakeSignaling(); // Add track using stream 1, do offer/answer. - rtc::scoped_refptr track = - caller()->CreateLocalAudioTrack(); - rtc::scoped_refptr sender = + scoped_refptr track = caller()->CreateLocalAudioTrack(); + scoped_refptr sender = caller()->AddTrack(track, {"stream_1"}); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); { MediaExpectations media_expectations; media_expectations.CalleeExpectsSomeAudio(1); @@ -2806,7 +3110,9 @@ TEST_F(PeerConnectionIntegrationTestPlanB, RemoveAndAddTrackWithNewStreamId) { caller()->pc()->RemoveTrackOrError(sender); sender = caller()->AddTrack(track, {"stream_2"}); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Wait for additional audio frames to be received by the callee. { MediaExpectations media_expectations; @@ -2819,18 +3125,76 @@ TEST_P(PeerConnectionIntegrationTest, RtcEventLogOutputWriteCalled) { ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); - auto output = std::make_unique>(); - ON_CALL(*output, IsActive()).WillByDefault(::testing::Return(true)); - ON_CALL(*output, Write(::testing::A())) - .WillByDefault(::testing::Return(true)); - EXPECT_CALL(*output, Write(::testing::A())) - .Times(::testing::AtLeast(1)); - EXPECT_TRUE(caller()->pc()->StartRtcEventLog( - std::move(output), webrtc::RtcEventLog::kImmediateOutput)); + auto output = std::make_unique>(); + ON_CALL(*output, IsActive).WillByDefault(Return(true)); + ON_CALL(*output, Write).WillByDefault(Return(true)); + EXPECT_CALL(*output, Write).Times(AtLeast(1)); + EXPECT_TRUE(caller()->pc()->StartRtcEventLog(std::move(output), + RtcEventLog::kImmediateOutput)); caller()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); +} + +TEST_P(PeerConnectionIntegrationTest, RtcEventLogOutputWriteCalledOnStop) { + // This test uses check point to ensure log is written before peer connection + // is destroyed. + // https://google.github.io/googletest/gmock_cook_book.html#UsingCheckPoints + MockFunction test_is_complete; + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + + auto output = std::make_unique>(); + ON_CALL(*output, IsActive).WillByDefault(Return(true)); + ON_CALL(*output, Write).WillByDefault(Return(true)); + InSequence s; + EXPECT_CALL(*output, Write).Times(AtLeast(1)); + EXPECT_CALL(test_is_complete, Call); + + // Use large output period to prevent this test pass for the wrong reason. + EXPECT_TRUE(caller()->pc()->StartRtcEventLog(std::move(output), + /*output_period_ms=*/100'000)); + + caller()->AddAudioVideoTracks(); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + + caller()->pc()->StopRtcEventLog(); + test_is_complete.Call(); +} + +TEST_P(PeerConnectionIntegrationTest, RtcEventLogOutputWriteCalledOnClose) { + // This test uses check point to ensure log is written before peer connection + // is destroyed. + // https://google.github.io/googletest/gmock_cook_book.html#UsingCheckPoints + MockFunction test_is_complete; + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + + auto output = std::make_unique>(); + ON_CALL(*output, IsActive).WillByDefault(Return(true)); + ON_CALL(*output, Write).WillByDefault(Return(true)); + InSequence s; + EXPECT_CALL(*output, Write).Times(AtLeast(1)); + EXPECT_CALL(test_is_complete, Call); + + // Use large output period to prevent this test pass for the wrong reason. + EXPECT_TRUE(caller()->pc()->StartRtcEventLog(std::move(output), + /*output_period_ms=*/100'000)); + + caller()->AddAudioVideoTracks(); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + + caller()->pc()->Close(); + test_is_complete.Call(); } // Test that if candidates are only signaled by applying full session @@ -2848,15 +3212,21 @@ TEST_P(PeerConnectionIntegrationTest, MediaFlowsWhenCandidatesSetOnlyInSdp) { caller()->CreateAndSetAndSignalOffer(); // Wait for all candidates to be gathered on both the caller and callee. - ASSERT_EQ_WAIT(PeerConnectionInterface::kIceGatheringComplete, - caller()->ice_gathering_state(), kDefaultTimeout); - ASSERT_EQ_WAIT(PeerConnectionInterface::kIceGatheringComplete, - callee()->ice_gathering_state(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return caller()->ice_gathering_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceGatheringComplete)), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return callee()->ice_gathering_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceGatheringComplete)), + IsRtcOk()); // The candidates will now be included in the session description, so // signaling them will start the ICE connection. caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Ensure that media flows in both directions. MediaExpectations media_expectations; @@ -2880,7 +3250,9 @@ TEST_P(PeerConnectionIntegrationTest, DisableAndEnableAudioPlayout) { caller()->AddAudioTrack(); callee()->AddAudioTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Pump messages for a second. WAIT(false, 1000); @@ -2900,11 +3272,10 @@ TEST_P(PeerConnectionIntegrationTest, DisableAndEnableAudioPlayout) { double GetAudioEnergyStat(PeerConnectionIntegrationWrapper* pc) { auto report = pc->NewGetStats(); - auto inbound_rtps = - report->GetStatsOfType(); + auto inbound_rtps = report->GetStatsOfType(); RTC_CHECK(!inbound_rtps.empty()); auto* inbound_rtp = inbound_rtps[0]; - if (!inbound_rtp->total_audio_energy.is_defined()) { + if (!inbound_rtp->total_audio_energy.has_value()) { return 0.0; } return *inbound_rtp->total_audio_energy; @@ -2924,10 +3295,14 @@ TEST_P(PeerConnectionIntegrationTest, caller()->pc()->SetAudioPlayout(false); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Wait for the callee to receive audio stats. - EXPECT_TRUE_WAIT(GetAudioEnergyStat(caller()) > 0, kMaxWaitForFramesMs); + EXPECT_THAT(WaitUntil([&] { return GetAudioEnergyStat(caller()); }, + ::testing::Gt(0), {.timeout = kMaxWaitForFrames}), + IsRtcOk()); } #endif // !defined(THREAD_SANITIZER) @@ -2945,7 +3320,9 @@ TEST_P(PeerConnectionIntegrationTest, DisableAndEnableAudioRecording) { caller()->AddAudioTrack(); callee()->AddAudioTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Pump messages for a second. WAIT(false, 1000); @@ -2971,23 +3348,24 @@ TEST_P(PeerConnectionIntegrationTest, options.offer_to_receive_audio = 1; caller()->SetOfferAnswerOptions(options); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return DtlsConnected(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_NE(nullptr, caller()->event_log_factory()); ASSERT_NE(nullptr, callee()->event_log_factory()); - webrtc::FakeRtcEventLog* caller_event_log = + FakeRtcEventLog* caller_event_log = caller()->event_log_factory()->last_log_created(); - webrtc::FakeRtcEventLog* callee_event_log = + FakeRtcEventLog* callee_event_log = callee()->event_log_factory()->last_log_created(); ASSERT_NE(nullptr, caller_event_log); ASSERT_NE(nullptr, callee_event_log); - int caller_ice_config_count = caller_event_log->GetEventCount( - webrtc::RtcEvent::Type::IceCandidatePairConfig); - int caller_ice_event_count = caller_event_log->GetEventCount( - webrtc::RtcEvent::Type::IceCandidatePairEvent); - int callee_ice_config_count = callee_event_log->GetEventCount( - webrtc::RtcEvent::Type::IceCandidatePairConfig); - int callee_ice_event_count = callee_event_log->GetEventCount( - webrtc::RtcEvent::Type::IceCandidatePairEvent); + int caller_ice_config_count = + caller_event_log->GetEventCount(RtcEvent::Type::IceCandidatePairConfig); + int caller_ice_event_count = + caller_event_log->GetEventCount(RtcEvent::Type::IceCandidatePairEvent); + int callee_ice_config_count = + callee_event_log->GetEventCount(RtcEvent::Type::IceCandidatePairConfig); + int callee_ice_event_count = + callee_event_log->GetEventCount(RtcEvent::Type::IceCandidatePairEvent); EXPECT_LT(0, caller_ice_config_count); EXPECT_LT(0, caller_ice_event_count); EXPECT_LT(0, callee_ice_config_count); @@ -2995,26 +3373,25 @@ TEST_P(PeerConnectionIntegrationTest, } TEST_P(PeerConnectionIntegrationTest, RegatherAfterChangingIceTransportType) { - static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0", - 3478}; - static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0}; + static const SocketAddress turn_server_internal_address{"88.88.88.0", 3478}; + static const SocketAddress turn_server_external_address{"88.88.88.1", 0}; CreateTurnServer(turn_server_internal_address, turn_server_external_address); - webrtc::PeerConnectionInterface::IceServer ice_server; + PeerConnectionInterface::IceServer ice_server; ice_server.urls.push_back("turn:88.88.88.0:3478"); ice_server.username = "test"; ice_server.password = "test"; PeerConnectionInterface::RTCConfiguration caller_config; caller_config.servers.push_back(ice_server); - caller_config.type = webrtc::PeerConnectionInterface::kRelay; + caller_config.type = PeerConnectionInterface::kRelay; caller_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY; caller_config.surface_ice_candidates_on_ice_transport_type_changed = true; PeerConnectionInterface::RTCConfiguration callee_config; callee_config.servers.push_back(ice_server); - callee_config.type = webrtc::PeerConnectionInterface::kRelay; + callee_config.type = PeerConnectionInterface::kRelay; callee_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY; callee_config.surface_ice_candidates_on_ice_transport_type_changed = true; @@ -3026,38 +3403,46 @@ TEST_P(PeerConnectionIntegrationTest, RegatherAfterChangingIceTransportType) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Since we are doing continual gathering, the ICE transport does not reach // kIceGatheringComplete (see // P2PTransportChannel::OnCandidatesAllocationDone), and consequently not // kIceConnectionComplete. - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - caller()->ice_connection_state(), kDefaultTimeout); - EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, - callee()->ice_connection_state(), kDefaultTimeout); + EXPECT_THAT(WaitUntil([&] { return caller()->ice_connection_state(); }, + ::testing::Eq( + PeerConnectionInterface::kIceConnectionConnected)), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return callee()->ice_connection_state(); }, + ::testing::Eq( + PeerConnectionInterface::kIceConnectionConnected)), + IsRtcOk()); // Note that we cannot use the metric // `WebRTC.PeerConnection.CandidatePairType_UDP` in this test since this // metric is only populated when we reach kIceConnectionComplete in the // current implementation. - EXPECT_EQ(cricket::RELAY_PORT_TYPE, - caller()->last_candidate_gathered().type()); - EXPECT_EQ(cricket::RELAY_PORT_TYPE, - callee()->last_candidate_gathered().type()); + EXPECT_TRUE(caller()->last_candidate_gathered().is_relay()); + EXPECT_TRUE(callee()->last_candidate_gathered().is_relay()); // Loosen the caller's candidate filter. caller_config = caller()->pc()->GetConfiguration(); - caller_config.type = webrtc::PeerConnectionInterface::kAll; + caller_config.type = PeerConnectionInterface::kAll; caller()->pc()->SetConfiguration(caller_config); // We should have gathered a new host candidate. - EXPECT_EQ_WAIT(cricket::LOCAL_PORT_TYPE, - caller()->last_candidate_gathered().type(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return caller()->last_candidate_gathered().is_local(); }, + ::testing::IsTrue()), + IsRtcOk()); // Loosen the callee's candidate filter. callee_config = callee()->pc()->GetConfiguration(); - callee_config.type = webrtc::PeerConnectionInterface::kAll; + callee_config.type = PeerConnectionInterface::kAll; callee()->pc()->SetConfiguration(callee_config); - EXPECT_EQ_WAIT(cricket::LOCAL_PORT_TYPE, - callee()->last_candidate_gathered().type(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee()->last_candidate_gathered().is_local(); }, + ::testing::IsTrue()), + IsRtcOk()); // Create an offer and verify that it does not contain an ICE restart (i.e new // ice credentials). @@ -3078,25 +3463,24 @@ TEST_P(PeerConnectionIntegrationTest, RegatherAfterChangingIceTransportType) { } TEST_P(PeerConnectionIntegrationTest, OnIceCandidateError) { - static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0", - 3478}; - static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0}; + static const SocketAddress turn_server_internal_address{"88.88.88.0", 3478}; + static const SocketAddress turn_server_external_address{"88.88.88.1", 0}; CreateTurnServer(turn_server_internal_address, turn_server_external_address); - webrtc::PeerConnectionInterface::IceServer ice_server; + PeerConnectionInterface::IceServer ice_server; ice_server.urls.push_back("turn:88.88.88.0:3478"); ice_server.username = "test"; ice_server.password = "123"; PeerConnectionInterface::RTCConfiguration caller_config; caller_config.servers.push_back(ice_server); - caller_config.type = webrtc::PeerConnectionInterface::kRelay; + caller_config.type = PeerConnectionInterface::kRelay; caller_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY; PeerConnectionInterface::RTCConfiguration callee_config; callee_config.servers.push_back(ice_server); - callee_config.type = webrtc::PeerConnectionInterface::kRelay; + callee_config.type = PeerConnectionInterface::kRelay; callee_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY; ASSERT_TRUE( @@ -3107,27 +3491,31 @@ TEST_P(PeerConnectionIntegrationTest, OnIceCandidateError) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(401, caller()->error_event().error_code, kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return caller()->error_event().error_code; }, + ::testing::Eq(401)), + IsRtcOk()); EXPECT_EQ("Unauthorized", caller()->error_event().error_text); EXPECT_EQ("turn:88.88.88.0:3478?transport=udp", caller()->error_event().url); EXPECT_NE(caller()->error_event().address, ""); } TEST_P(PeerConnectionIntegrationTest, OnIceCandidateErrorWithEmptyAddress) { - webrtc::PeerConnectionInterface::IceServer ice_server; + PeerConnectionInterface::IceServer ice_server; ice_server.urls.push_back("turn:127.0.0.1:3478?transport=tcp"); ice_server.username = "test"; ice_server.password = "test"; PeerConnectionInterface::RTCConfiguration caller_config; caller_config.servers.push_back(ice_server); - caller_config.type = webrtc::PeerConnectionInterface::kRelay; + caller_config.type = PeerConnectionInterface::kRelay; caller_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY; PeerConnectionInterface::RTCConfiguration callee_config; callee_config.servers.push_back(ice_server); - callee_config.type = webrtc::PeerConnectionInterface::kRelay; + callee_config.type = PeerConnectionInterface::kRelay; callee_config.continual_gathering_policy = PeerConnection::GATHER_CONTINUALLY; ASSERT_TRUE( @@ -3138,8 +3526,12 @@ TEST_P(PeerConnectionIntegrationTest, OnIceCandidateErrorWithEmptyAddress) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - EXPECT_EQ_WAIT(701, caller()->error_event().error_code, kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return caller()->error_event().error_code; }, + ::testing::Eq(701)), + IsRtcOk()); EXPECT_EQ(caller()->error_event().address, ""); } @@ -3155,19 +3547,25 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, caller()->AddAudioTrack(); callee()->AddAudioTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudio(); ASSERT_TRUE(ExpectNewFrames(media_expectations)); SetSignalIceCandidates(false); // Workaround candidate outrace sdp. caller()->AddVideoTrack(); callee()->AddVideoTrack(); - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); callee()->pc()->SetLocalDescription(observer.get(), callee()->CreateOfferAndWait().release()); - EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue()), + IsRtcOk()); caller()->CreateAndSetAndSignalOffer(); // Implicit rollback. - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_TRUE(ExpectNewFrames(media_expectations)); } @@ -3179,18 +3577,20 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); - auto sld_observer = - rtc::make_ref_counted(); + auto sld_observer = make_ref_counted(); callee()->pc()->SetLocalDescription(sld_observer.get(), callee()->CreateOfferAndWait().release()); - EXPECT_TRUE_WAIT(sld_observer->called(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return sld_observer->called(); }, ::testing::IsTrue()), + IsRtcOk()); EXPECT_EQ(sld_observer->error(), ""); - auto srd_observer = - rtc::make_ref_counted(); + auto srd_observer = make_ref_counted(); callee()->pc()->SetRemoteDescription( srd_observer.get(), caller()->CreateOfferAndWait().release()); - EXPECT_TRUE_WAIT(srd_observer->called(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return srd_observer->called(); }, ::testing::IsTrue()), + IsRtcOk()); EXPECT_EQ(srd_observer->error(), ""); EXPECT_THAT(callee()->peer_connection_signaling_state_history(), @@ -3205,17 +3605,16 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, ConnectFakeSignaling(); caller()->AddVideoTrack(); callee()->AddVideoTrack(); - auto munger = [](cricket::SessionDescription* desc) { - cricket::VideoContentDescription* video = - GetFirstVideoContentDescription(desc); + auto munger = [](std::unique_ptr& sdp) { + VideoContentDescription* video = + GetFirstVideoContentDescription(sdp->description()); auto codecs = video->codecs(); for (auto&& codec : codecs) { if (codec.name == "H264") { std::string value; // The parameter is not supposed to be present in SDP by default. - EXPECT_FALSE( - codec.GetParam(cricket::kH264FmtpSpsPpsIdrInKeyframe, &value)); - codec.SetParam(std::string(cricket::kH264FmtpSpsPpsIdrInKeyframe), + EXPECT_FALSE(codec.GetParam(kH264FmtpSpsPpsIdrInKeyframe, &value)); + codec.SetParam(std::string(kH264FmtpSpsPpsIdrInKeyframe), std::string("")); } } @@ -3226,19 +3625,21 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, // Munge remote answer for SRD. caller()->SetReceivedSdpMunger(munger); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Observe that after munging the parameter is present in generated SDP. - caller()->SetGeneratedSdpMunger([](cricket::SessionDescription* desc) { - cricket::VideoContentDescription* video = - GetFirstVideoContentDescription(desc); - for (auto&& codec : video->codecs()) { - if (codec.name == "H264") { - std::string value; - EXPECT_TRUE( - codec.GetParam(cricket::kH264FmtpSpsPpsIdrInKeyframe, &value)); - } - } - }); + caller()->SetGeneratedSdpMunger( + [](std::unique_ptr& sdp) { + VideoContentDescription* video = + GetFirstVideoContentDescription(sdp->description()); + for (auto&& codec : video->codecs()) { + if (codec.name == "H264") { + std::string value; + EXPECT_TRUE(codec.GetParam(kH264FmtpSpsPpsIdrInKeyframe, &value)); + } + } + }); caller()->CreateOfferAndWait(); } @@ -3248,10 +3649,12 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, config.sdp_semantics = SdpSemantics::kUnifiedPlan; ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); ConnectFakeSignaling(); - caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller()->pc()->AddTransceiver(webrtc::MediaType::AUDIO); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); int current_size = caller()->pc()->GetTransceivers().size(); // Add more tracks until we get close to having issues. // Issues have been seen at: @@ -3260,15 +3663,17 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, while (current_size < 8) { // Double the number of tracks for (int i = 0; i < current_size; i++) { - caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller()->pc()->AddTransceiver(webrtc::MediaType::AUDIO); } current_size = caller()->pc()->GetTransceivers().size(); RTC_LOG(LS_INFO) << "Renegotiating with " << current_size << " tracks"; - auto start_time_ms = rtc::TimeMillis(); + auto start_time_ms = TimeMillis(); caller()->CreateAndSetAndSignalOffer(); // We want to stop when the time exceeds one second. - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - auto elapsed_time_ms = rtc::TimeMillis() - start_time_ms; + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + auto elapsed_time_ms = TimeMillis() - start_time_ms; RTC_LOG(LS_INFO) << "Renegotiating took " << elapsed_time_ms << " ms"; ASSERT_GT(1000, elapsed_time_ms) << "Audio transceivers: Negotiation took too long after " @@ -3282,10 +3687,12 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, config.sdp_semantics = SdpSemantics::kUnifiedPlan; ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); ConnectFakeSignaling(); - caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + caller()->pc()->AddTransceiver(webrtc::MediaType::VIDEO); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); int current_size = caller()->pc()->GetTransceivers().size(); // Add more tracks until we get close to having issues. // Issues have been seen at: @@ -3296,15 +3703,17 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, while (current_size < 8) { // Double the number of tracks for (int i = 0; i < current_size; i++) { - caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + caller()->pc()->AddTransceiver(webrtc::MediaType::VIDEO); } current_size = caller()->pc()->GetTransceivers().size(); RTC_LOG(LS_INFO) << "Renegotiating with " << current_size << " tracks"; - auto start_time_ms = rtc::TimeMillis(); + auto start_time_ms = TimeMillis(); caller()->CreateAndSetAndSignalOffer(); // We want to stop when the time exceeds one second. - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - auto elapsed_time_ms = rtc::TimeMillis() - start_time_ms; + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + auto elapsed_time_ms = TimeMillis() - start_time_ms; RTC_LOG(LS_INFO) << "Renegotiating took " << elapsed_time_ms << " ms"; ASSERT_GT(1000, elapsed_time_ms) << "Video transceivers: Negotiation took too long after " @@ -3321,7 +3730,9 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, caller()->AddAudioTrack(); callee()->AddAudioTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Wait until we can see the audio flowing. MediaExpectations media_expectations; media_expectations.CalleeExpectsSomeAudio(); @@ -3338,15 +3749,17 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, while (current_size < 16) { // Double the number of tracks for (int i = 0; i < current_size; i++) { - caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + caller()->pc()->AddTransceiver(webrtc::MediaType::VIDEO); } current_size = caller()->pc()->GetTransceivers().size(); RTC_LOG(LS_INFO) << "Renegotiating with " << current_size << " tracks"; - auto start_time_ms = rtc::TimeMillis(); + auto start_time_ms = TimeMillis(); caller()->CreateAndSetAndSignalOffer(); // We want to stop when the time exceeds one second. - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - auto elapsed_time_ms = rtc::TimeMillis() - start_time_ms; + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + auto elapsed_time_ms = TimeMillis() - start_time_ms; RTC_LOG(LS_INFO) << "Renegotiating took " << elapsed_time_ms << " ms"; // This is a guard against the test using excessive amounts of time. ASSERT_GT(5000, elapsed_time_ms) @@ -3360,7 +3773,7 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, TEST_F(PeerConnectionIntegrationTestUnifiedPlan, GetParametersHasEncodingsBeforeNegotiation) { ASSERT_TRUE(CreatePeerConnectionWrappers()); - auto result = caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + auto result = caller()->pc()->AddTransceiver(webrtc::MediaType::VIDEO); auto transceiver = result.MoveValue(); auto parameters = transceiver->sender()->GetParameters(); EXPECT_EQ(parameters.encodings.size(), 1u); @@ -3372,7 +3785,7 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, RtpTransceiverInit init; init.send_encodings.push_back({}); init.send_encodings[0].max_bitrate_bps = 12345; - auto result = caller()->pc()->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init); + auto result = caller()->pc()->AddTransceiver(webrtc::MediaType::VIDEO, init); auto transceiver = result.MoveValue(); auto parameters = transceiver->sender()->GetParameters(); ASSERT_EQ(parameters.encodings.size(), 1u); @@ -3418,7 +3831,9 @@ TEST_P(PeerConnectionIntegrationInteropTest, NoMediaLocalToNoMediaRemote) { ConnectFakeSignaling(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); } TEST_P(PeerConnectionIntegrationInteropTest, OneAudioLocalToNoMediaRemote) { @@ -3427,13 +3842,15 @@ TEST_P(PeerConnectionIntegrationInteropTest, OneAudioLocalToNoMediaRemote) { auto audio_sender = caller()->AddAudioTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Verify that one audio receiver has been created on the remote and that it // has the same track ID as the sending track. auto receivers = callee()->pc()->GetReceivers(); ASSERT_EQ(1u, receivers.size()); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, receivers[0]->media_type()); + EXPECT_EQ(webrtc::MediaType::AUDIO, receivers[0]->media_type()); EXPECT_EQ(receivers[0]->track()->id(), audio_sender->track()->id()); MediaExpectations media_expectations; @@ -3448,16 +3865,16 @@ TEST_P(PeerConnectionIntegrationInteropTest, OneAudioOneVideoToNoMediaRemote) { auto audio_sender = caller()->AddAudioTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Verify that one audio and one video receiver have been created on the // remote and that they have the same track IDs as the sending tracks. - auto audio_receivers = - callee()->GetReceiversOfType(cricket::MEDIA_TYPE_AUDIO); + auto audio_receivers = callee()->GetReceiversOfType(webrtc::MediaType::AUDIO); ASSERT_EQ(1u, audio_receivers.size()); EXPECT_EQ(audio_receivers[0]->track()->id(), audio_sender->track()->id()); - auto video_receivers = - callee()->GetReceiversOfType(cricket::MEDIA_TYPE_VIDEO); + auto video_receivers = callee()->GetReceiversOfType(webrtc::MediaType::VIDEO); ASSERT_EQ(1u, video_receivers.size()); EXPECT_EQ(video_receivers[0]->track()->id(), video_sender->track()->id()); @@ -3474,7 +3891,9 @@ TEST_P(PeerConnectionIntegrationInteropTest, callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); @@ -3489,15 +3908,19 @@ TEST_P(PeerConnectionIntegrationInteropTest, callee()->AddVideoTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Verify that only the audio track has been negotiated. - EXPECT_EQ(0u, caller()->GetReceiversOfType(cricket::MEDIA_TYPE_VIDEO).size()); + EXPECT_EQ(0u, caller()->GetReceiversOfType(webrtc::MediaType::VIDEO).size()); // Might also check that the callee's NegotiationNeeded flag is set. // Reverse roles. callee()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); MediaExpectations media_expectations; media_expectations.CallerExpectsSomeVideo(); @@ -3510,13 +3933,18 @@ TEST_P(PeerConnectionIntegrationTest, NewTracksDoNotCauseNewCandidates) { ConnectFakeSignaling(); caller()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return DtlsConnected(); }, ::testing::IsTrue()), + IsRtcOk()); caller()->ExpectCandidates(0); callee()->ExpectCandidates(0); caller()->AddAudioTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); } TEST_P(PeerConnectionIntegrationTest, MediaCallWithoutMediaEngineFails) { @@ -3544,7 +3972,9 @@ TEST_F(PeerConnectionIntegrationTestPlanB, TwoVideoUnifiedPlanToNoMediaPlanB) { caller()->AddVideoTrack(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Verify that there is only one receiver and it corresponds to the first // added track. @@ -3575,7 +4005,9 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, auto audio_transceiver = audio_transceiver_or_error.MoveValue(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); { MediaExpectations media_expectations; media_expectations.CalleeExpectsSomeAudio(); @@ -3586,7 +4018,9 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack()); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); { MediaExpectations media_expectations; media_expectations.CalleeExpectsSomeVideo(); @@ -3605,11 +4039,15 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, auto audio_transceiver = audio_transceiver_or_error.MoveValue(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); audio_transceiver->StopStandard(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_EQ(0U, caller()->pc()->GetTransceivers().size()); EXPECT_EQ(PeerConnectionInterface::kIceGatheringNew, caller()->pc()->ice_gathering_state()); @@ -3630,12 +4068,16 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, auto caller_transceiver = audio_transceiver_or_error.MoveValue(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); caller_transceiver->StopStandard(); auto callee_transceiver = callee()->pc()->GetTransceivers()[0]; caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); EXPECT_EQ(0U, caller()->pc()->GetTransceivers().size()); EXPECT_EQ(0U, callee()->pc()->GetTransceivers().size()); EXPECT_EQ(0U, caller()->pc()->GetSenders().size()); @@ -3657,14 +4099,18 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, auto audio_transceiver = audio_transceiver_or_error.MoveValue(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); auto caller_track = audio_transceiver->receiver()->track(); auto callee_track = callee()->pc()->GetReceivers()[0]->track(); audio_transceiver->StopStandard(); EXPECT_EQ(MediaStreamTrackInterface::TrackState::kEnded, caller_track->state()); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); EXPECT_EQ(MediaStreamTrackInterface::TrackState::kEnded, callee_track->state()); } @@ -3680,14 +4126,18 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, auto audio_transceiver = audio_transceiver_or_error.MoveValue(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); auto caller_track = audio_transceiver->receiver()->track(); auto callee_track = callee()->pc()->GetReceivers()[0]->track(); audio_transceiver->StopStandard(); EXPECT_EQ(MediaStreamTrackInterface::TrackState::kEnded, caller_track->state()); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); EXPECT_EQ(MediaStreamTrackInterface::TrackState::kEnded, callee_track->state()); } @@ -3697,14 +4147,16 @@ TEST_P(PeerConnectionIntegrationTest, EndToEndRtpSenderVideoEncoderSelector) { CreateOneDirectionalPeerConnectionWrappers(/*caller_to_callee=*/true)); ConnectFakeSignaling(); // Add one-directional video, from caller to callee. - rtc::scoped_refptr caller_track = + scoped_refptr caller_track = caller()->CreateLocalVideoTrack(); auto sender = caller()->AddTrack(caller_track); PeerConnectionInterface::RTCOfferAnswerOptions options; options.offer_to_receive_video = 0; caller()->SetOfferAnswerOptions(options); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u); std::unique_ptr encoder_selector = @@ -3721,27 +4173,72 @@ TEST_P(PeerConnectionIntegrationTest, EndToEndRtpSenderVideoEncoderSelector) { EXPECT_TRUE(ExpectNewFrames(media_expectations)); } +TEST_P(PeerConnectionIntegrationTest, + EndToEndRtpSenderVideoEncoderSelectorSwitchCodec) { + ASSERT_TRUE( + CreateOneDirectionalPeerConnectionWrappers(/*caller_to_callee=*/true)); + ConnectFakeSignaling(); + // Add one-directional video, from caller to callee. + scoped_refptr caller_track = + caller()->CreateLocalVideoTrack(); + auto sender = caller()->AddTrack(caller_track); + PeerConnectionInterface::RTCOfferAnswerOptions options; + options.offer_to_receive_video = 0; + caller()->SetOfferAnswerOptions(options); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + ASSERT_EQ(callee()->pc()->GetReceivers().size(), 1u); + + std::unique_ptr encoder_selector = + std::make_unique(); + std::optional next_format; + EXPECT_CALL(*encoder_selector, OnCurrentEncoder) + .WillOnce(::testing::Invoke([&](const SdpVideoFormat& format) { + EXPECT_EQ(format.name, "VP8"); + next_format = SdpVideoFormat::VP9Profile0(); + })) + .WillOnce(::testing::Invoke([&](const SdpVideoFormat& format) { + EXPECT_EQ(format.name, "VP9"); + })); + EXPECT_CALL(*encoder_selector, OnAvailableBitrate) + .WillRepeatedly( + ::testing::Invoke([&](const DataRate& rate) { return next_format; })); + + sender->SetEncoderSelector(std::move(encoder_selector)); + + // Expect video to be received in one direction. + MediaExpectations media_expectations; + media_expectations.CallerExpectsNoVideo(); + media_expectations.CalleeExpectsSomeVideo(); + + EXPECT_TRUE(ExpectNewFrames(media_expectations)); + + caller()->pc()->Close(); +} + int NacksReceivedCount(PeerConnectionIntegrationWrapper& pc) { - rtc::scoped_refptr report = pc.NewGetStats(); + scoped_refptr report = pc.NewGetStats(); auto sender_stats = report->GetStatsOfType(); if (sender_stats.size() != 1) { ADD_FAILURE(); return 0; } - if (!sender_stats[0]->nack_count.is_defined()) { + if (!sender_stats[0]->nack_count.has_value()) { return 0; } return *sender_stats[0]->nack_count; } int NacksSentCount(PeerConnectionIntegrationWrapper& pc) { - rtc::scoped_refptr report = pc.NewGetStats(); + scoped_refptr report = pc.NewGetStats(); auto receiver_stats = report->GetStatsOfType(); if (receiver_stats.size() != 1) { ADD_FAILURE(); return 0; } - if (!receiver_stats[0]->nack_count.is_defined()) { + if (!receiver_stats[0]->nack_count.has_value()) { return 0; } return *receiver_stats[0]->nack_count; @@ -3759,25 +4256,25 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, auto send_transceiver = audio_transceiver_or_error.MoveValue(); // Munge the SDP to include NACK and RRTR on Opus, and remove all other // codecs. - caller()->SetGeneratedSdpMunger([](cricket::SessionDescription* desc) { - for (ContentInfo& content : desc->contents()) { - cricket::AudioContentDescription* media = - content.media_description()->as_audio(); - std::vector codecs = media->codecs(); - std::vector codecs_out; - for (cricket::AudioCodec codec : codecs) { - if (codec.name == "opus") { - codec.AddFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamNack, cricket::kParamValueEmpty)); - codec.AddFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamRrtr, cricket::kParamValueEmpty)); - codecs_out.push_back(codec); + caller()->SetGeneratedSdpMunger( + [](std::unique_ptr& sdp) { + for (ContentInfo& content : sdp->description()->contents()) { + MediaContentDescription* media = content.media_description(); + std::vector codecs = media->codecs(); + std::vector codecs_out; + for (Codec codec : codecs) { + if (codec.name == "opus") { + codec.AddFeedbackParam( + FeedbackParam(kRtcpFbParamNack, kParamValueEmpty)); + codec.AddFeedbackParam( + FeedbackParam(kRtcpFbParamRrtr, kParamValueEmpty)); + codecs_out.push_back(codec); + } + } + EXPECT_FALSE(codecs_out.empty()); + media->set_codecs(codecs_out); } - } - EXPECT_FALSE(codecs_out.empty()); - media->set_codecs(codecs_out); - } - }); + }); caller()->CreateAndSetAndSignalOffer(); // Check for failure in helpers @@ -3792,12 +4289,16 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, // Wait until callee has sent at least one NACK. // Note that due to stats caching, this might only be visible 50 ms // after the nack was in fact sent. - EXPECT_TRUE_WAIT(NacksSentCount(*callee()) > 0, kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return NacksSentCount(*callee()); }, ::testing::Gt(0)), + IsRtcOk()); ASSERT_FALSE(HasFailure()); virtual_socket_server()->set_drop_probability(0.0); // Wait until caller has received at least one NACK - EXPECT_TRUE_WAIT(NacksReceivedCount(*caller()) > 0, kDefaultTimeout); + EXPECT_THAT(WaitUntil([&] { return NacksReceivedCount(*caller()); }, + ::testing::Gt(0)), + IsRtcOk()); } TEST_F(PeerConnectionIntegrationTestUnifiedPlan, VideoPacketLossCausesNack) { @@ -3810,23 +4311,23 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, VideoPacketLossCausesNack) { auto send_transceiver = video_transceiver_or_error.MoveValue(); // Munge the SDP to include NACK and RRTR on VP8, and remove all other // codecs. - caller()->SetGeneratedSdpMunger([](cricket::SessionDescription* desc) { - for (ContentInfo& content : desc->contents()) { - cricket::VideoContentDescription* media = - content.media_description()->as_video(); - std::vector codecs = media->codecs(); - std::vector codecs_out; - for (cricket::VideoCodec codec : codecs) { - if (codec.name == "VP8") { - ASSERT_TRUE(codec.HasFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamNack, cricket::kParamValueEmpty))); - codecs_out.push_back(codec); + caller()->SetGeneratedSdpMunger( + [](std::unique_ptr& sdp) { + for (ContentInfo& content : sdp->description()->contents()) { + MediaContentDescription* media = content.media_description(); + std::vector codecs = media->codecs(); + std::vector codecs_out; + for (const Codec& codec : codecs) { + if (codec.name == "VP8") { + ASSERT_TRUE(codec.HasFeedbackParam( + FeedbackParam(kRtcpFbParamNack, kParamValueEmpty))); + codecs_out.push_back(codec); + } + } + EXPECT_FALSE(codecs_out.empty()); + media->set_codecs(codecs_out); } - } - EXPECT_FALSE(codecs_out.empty()); - media->set_codecs(codecs_out); - } - }); + }); caller()->CreateAndSetAndSignalOffer(); // Check for failure in helpers @@ -3841,11 +4342,571 @@ TEST_F(PeerConnectionIntegrationTestUnifiedPlan, VideoPacketLossCausesNack) { // Wait until callee has sent at least one NACK. // Note that due to stats caching, this might only be visible 50 ms // after the nack was in fact sent. - EXPECT_TRUE_WAIT(NacksSentCount(*callee()) > 0, kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return NacksSentCount(*callee()); }, ::testing::Gt(0)), + IsRtcOk()); ASSERT_FALSE(HasFailure()); // Wait until caller has received at least one NACK - EXPECT_TRUE_WAIT(NacksReceivedCount(*caller()) > 0, kDefaultTimeout); + EXPECT_THAT(WaitUntil([&] { return NacksReceivedCount(*caller()); }, + ::testing::Gt(0)), + IsRtcOk()); +} + +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, PrAnswerStateTransitions) { + RTCConfiguration config; + ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); + ConnectFakeSignaling(); + caller()->pc()->AddTransceiver(caller()->CreateLocalAudioTrack()); + caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack()); + + callee()->SetGeneratedSdpMunger( + [](std::unique_ptr& sdp) { + SetSdpType(sdp, SdpType::kPrAnswer); + }); + std::unique_ptr answer; + caller()->SetReceivedSdpMunger( + [&](std::unique_ptr& sdp) { + answer = sdp->Clone(); + }); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_FALSE(HasFailure()); + EXPECT_EQ(caller()->pc()->signaling_state(), + PeerConnectionInterface::kHaveRemotePrAnswer); + EXPECT_EQ(callee()->pc()->signaling_state(), + PeerConnectionInterface::kHaveLocalPrAnswer); + + // // Apply the pranswer as a definitive one. + SetSdpType(answer, SdpType::kAnswer); + EXPECT_TRUE(caller()->SetRemoteDescription(std::move(answer))); + EXPECT_EQ(caller()->pc()->signaling_state(), + PeerConnectionInterface::kStable); +} + +// Let caller get a prAnswer followed by answer. +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + PrAnswerStateTransitionsAsymmetric) { + RTCConfiguration config; + ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); + ConnectFakeSignaling(); + caller()->pc()->AddTransceiver(caller()->CreateLocalAudioTrack()); + caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack()); + + std::unique_ptr answer; + caller()->SetReceivedSdpMunger( + [&](std::unique_ptr& sdp) { + answer = sdp->Clone(); + SetSdpType(sdp, SdpType::kPrAnswer); + }); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_FALSE(HasFailure()); + EXPECT_EQ(caller()->pc()->signaling_state(), + PeerConnectionInterface::kHaveRemotePrAnswer); + EXPECT_EQ(callee()->pc()->signaling_state(), + PeerConnectionInterface::kStable); + + // // Apply the pranswer as a definitive one. + EXPECT_TRUE(caller()->SetRemoteDescription(std::move(answer))); + EXPECT_EQ(caller()->pc()->signaling_state(), + PeerConnectionInterface::kStable); +} + +int ReassignPayloadIds(std::unique_ptr& sdp) { + int swaps = 0; + for (ContentInfo& content : sdp->description()->contents()) { + if (!content.media_description()) { + continue; + } + std::vector codecs = content.media_description()->codecs(); + int left = 0; + int right = codecs.size() - 1; + while (left < right) { + if (!codecs[left].IsMediaCodec()) { + left++; + continue; + } + if (!codecs[right].IsMediaCodec()) { + right--; + continue; + } + auto tmp = codecs[left].id; + codecs[left].id = codecs[right].id; + codecs[right].id = tmp; + left++; + right--; + swaps++; + } + content.media_description()->set_codecs(codecs); + } + return swaps; +} + +int SetNewSsrcs(std::unique_ptr& sdp) { + int assignments = 0; + std::unordered_set already_used_ssrcs; + for (ContentInfo& content : sdp->description()->contents()) { + if (!content.media_description()) { + continue; + } + for (const auto& stream : content.media_description()->streams()) { + for (const auto& ssrc : stream.ssrcs) { + already_used_ssrcs.insert(ssrc); + } + } + } + + Random random(/* random_seed= */ 77); + auto ssrc_generator = [&]() -> uint32_t { + do { + auto ssrc = random.Rand(1u, 0xFFFFFFF0u); + if (already_used_ssrcs.find(ssrc) == already_used_ssrcs.end()) { + already_used_ssrcs.insert(ssrc); + return ssrc; + } + } while (true); + }; + + for (ContentInfo& content : sdp->description()->contents()) { + if (!content.media_description()) { + continue; + } + for (auto& stream : content.media_description()->mutable_streams()) { + // Only reassign primary ssrc for now... + // but we should maybe also reassign ssrcs for ssrc groups?. + if (stream.ssrcs.size() == 1) { + assignments++; + stream.ssrcs[0] = ssrc_generator(); + } + } + } + return assignments; +} + +void SetNewFingerprint(std::unique_ptr& sdp) { + auto identity = SSLIdentity::Create("NewIdentity", KT_DEFAULT); + auto new_fingerprint = SSLFingerprint::CreateUnique("sha-256", *identity); + for (auto& transport_info : sdp->description()->transport_infos()) { + transport_info.description.identity_fingerprint = + absl::WrapUnique(new SSLFingerprint(*new_fingerprint)); + } +} + +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + PrAnswerStateTransitionsAsymmetricScrambled) { + RTCConfiguration config; + ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); + ConnectFakeSignaling(); + webrtc::RtpEncodingParameters init_send_encodings; + init_send_encodings.active = false; + caller()->pc()->AddTrack(caller()->CreateLocalAudioTrack(), {"name"}, + {init_send_encodings}); + caller()->pc()->AddTrack(caller()->CreateLocalVideoTrack(), {"name"}, + {init_send_encodings}); + callee()->pc()->AddTrack(callee()->CreateLocalAudioTrack(), {"name"}, + {init_send_encodings}); + callee()->pc()->AddTrack(callee()->CreateLocalVideoTrack(), {"name"}, + {init_send_encodings}); + + std::unique_ptr answer; + caller()->SetReceivedSdpMunger( + [&](std::unique_ptr& sdp) { + answer = sdp->Clone(); + SetSdpType(sdp, SdpType::kPrAnswer); + }); + caller()->CreateAndSetAndSignalOffer(); + + ASSERT_FALSE(HasFailure()); + ASSERT_EQ(caller()->pc()->signaling_state(), + PeerConnectionInterface::kHaveRemotePrAnswer); + ASSERT_EQ(callee()->pc()->signaling_state(), + PeerConnectionInterface::kStable); + + // Now scramble the answer sdp so that it (really!) different from the first + // prAnswer. + // Note: this is maybe {possibly...probably?} a spec violation. + ASSERT_GT(SetNewSsrcs(answer), 0); + ASSERT_GT(ReassignPayloadIds(answer), 0); + SetNewFingerprint(answer); + + // Apply the modified answer as a definitive one. + EXPECT_TRUE(caller()->SetRemoteDescription(std::move(answer))); + EXPECT_EQ(caller()->pc()->signaling_state(), + PeerConnectionInterface::kStable); +} + +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + OnlyOnePairWantsCorruptionScorePlumbing) { + // In order for corruption score to be logged, encryption of RTP header + // extensions must be allowed. + CryptoOptions crypto_options; + crypto_options.srtp.enable_encrypted_rtp_header_extensions = true; + PeerConnectionInterface::RTCConfiguration config; + config.crypto_options = crypto_options; + config.offer_extmap_allow_mixed = true; + ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); + ConnectFakeSignaling(); + + // Do normal offer/answer and wait for some frames to be received in each + // direction, and `corruption_score` to be aggregated. + caller()->AddAudioVideoTracks(); + callee()->AddAudioVideoTracks(); + // Negotiate the corruption detection header extension in SDP. + // If caller adds corruption detection header extension to its SDP offer, it + // will receive it from the callee. + caller()->NegotiateCorruptionDetectionHeader(); + + caller()->CreateAndSetAndSignalOffer(); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return caller()->GetCorruptionScoreCount(); }, + ::testing::Gt(0), {.timeout = kMaxWaitForStats}), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->GetCorruptionScoreCount(); }, + ::testing::Eq(0), {.timeout = kMaxWaitForStats}), + IsRtcOk()); + + for (const auto& pair : {caller(), callee()}) { + scoped_refptr report = pair->NewGetStats(); + ASSERT_TRUE(report); + auto inbound_stream_stats = + report->GetStatsOfType(); + for (const auto& stat : inbound_stream_stats) { + if (*stat->kind == "video") { + if (pair == caller()) { + EXPECT_TRUE(stat->total_corruption_probability.has_value()); + EXPECT_TRUE(stat->total_squared_corruption_probability.has_value()); + + double average_corruption_score = + (*stat->total_corruption_probability) / + static_cast(*stat->corruption_measurements); + EXPECT_GE(average_corruption_score, 0.0); + EXPECT_LE(average_corruption_score, 1.0); + } + if (pair == callee()) { + // Since only `caller` requests corruption score calculation the + // callee should not aggregate it. + EXPECT_FALSE(stat->total_corruption_probability.has_value()); + EXPECT_FALSE(stat->total_squared_corruption_probability.has_value()); + } + } + } + } +} + +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + BothPairsWantCorruptionScorePlumbing) { + // In order for corruption score to be logged, encryption of RTP header + // extensions must be allowed. + CryptoOptions crypto_options; + crypto_options.srtp.enable_encrypted_rtp_header_extensions = true; + PeerConnectionInterface::RTCConfiguration config; + config.crypto_options = crypto_options; + config.offer_extmap_allow_mixed = true; + ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); + ConnectFakeSignaling(); + + // Do normal offer/answer and wait for some frames to be received in each + // direction, and `corruption_score` to be aggregated. + caller()->AddAudioVideoTracks(); + callee()->AddAudioVideoTracks(); + + // Negotiate the corruption detection header extension in SDP. + // If caller adds corruption detection header extension to its SDP offer, it + // will receive it from the callee. + caller()->NegotiateCorruptionDetectionHeader(); + callee()->NegotiateCorruptionDetectionHeader(); + + caller()->CreateAndSetAndSignalOffer(); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return caller()->GetCorruptionScoreCount(); }, + ::testing::Gt(0), {.timeout = kMaxWaitForStats}), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->GetCorruptionScoreCount(); }, + ::testing::Gt(0), {.timeout = kMaxWaitForStats}), + IsRtcOk()); + + for (const auto& pair : {caller(), callee()}) { + scoped_refptr report = pair->NewGetStats(); + ASSERT_TRUE(report); + auto inbound_stream_stats = + report->GetStatsOfType(); + for (const auto& stat : inbound_stream_stats) { + if (*stat->kind == "video") { + EXPECT_TRUE(stat->total_corruption_probability.has_value()); + EXPECT_TRUE(stat->total_squared_corruption_probability.has_value()); + + double average_corruption_score = + (*stat->total_corruption_probability) / + static_cast(*stat->corruption_measurements); + EXPECT_GE(average_corruption_score, 0.0); + EXPECT_LE(average_corruption_score, 1.0); + } + } + } +} + +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + CorruptionScorePlumbingShouldNotWorkWhenEncryptionIsOff) { + // In order for corruption score to be logged, encryption of RTP header + // extensions must be allowed. + CryptoOptions crypto_options; + crypto_options.srtp.enable_encrypted_rtp_header_extensions = false; + PeerConnectionInterface::RTCConfiguration config; + config.crypto_options = crypto_options; + config.offer_extmap_allow_mixed = true; + ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); + ConnectFakeSignaling(); + + // Negotiate the corruption detection header extension in SDP. + // If caller adds corruption detection header extension to its SDP offer, it + // will receive it from the callee. + caller()->NegotiateCorruptionDetectionHeader(); + callee()->NegotiateCorruptionDetectionHeader(); + + // Do normal offer/answer and wait for some frames to be received in each + // direction, and `corruption_score` to be aggregated. + caller()->AddAudioVideoTracks(); + callee()->AddAudioVideoTracks(); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return caller()->GetCorruptionScoreCount(); }, + ::testing::Eq(0), {.timeout = kMaxWaitForStats}), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee()->GetCorruptionScoreCount(); }, + ::testing::Eq(0), {.timeout = kMaxWaitForStats}), + IsRtcOk()); + + for (const auto& pair : {caller(), callee()}) { + scoped_refptr report = pair->NewGetStats(); + ASSERT_TRUE(report); + auto inbound_stream_stats = + report->GetStatsOfType(); + for (const auto& stat : inbound_stream_stats) { + if (*stat->kind == "video") { + EXPECT_FALSE(stat->total_corruption_probability.has_value()); + EXPECT_FALSE(stat->total_squared_corruption_probability.has_value()); + } + } + } +} + +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + AbsCaptureTimestampShouldBeMeteredCorrectly) { + metrics::Reset(); + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + caller()->AddVideoTrack(); + auto transceiver = caller()->pc()->GetTransceivers()[0]; + auto extensions = transceiver->GetHeaderExtensionsToNegotiate(); + bool found = false; + for (auto& extension : extensions) { + if (extension.uri == RtpExtension::kAbsoluteCaptureTimeUri) { + extension.direction = RtpTransceiverDirection::kSendRecv; + found = true; + break; + } + } + ASSERT_TRUE(found); + ASSERT_TRUE(transceiver->SetHeaderExtensionsToNegotiate(extensions).ok()); + + caller()->CreateAndSetAndSignalOffer(); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + const auto& negotiated_header_extensions = caller() + ->pc() + ->remote_description() + ->description() + ->contents()[0] + .media_description() + ->rtp_header_extensions(); + ASSERT_TRUE(RtpExtension::FindHeaderExtensionByUri( + negotiated_header_extensions, RtpExtension::kAbsoluteCaptureTimeUri, + RtpExtension::kDiscardEncryptedExtension)); + ASSERT_THAT( + WaitUntil( + [&] { + return metrics::NumSamples("WebRTC.Call.AbsCapture.ExtensionWait"); + }, + ::testing::Gt(0)), + IsRtcOk()); + // Observed deltas are more than 100 msec. Use 1 minute as tolerance; + // this is a check against wrong timebase. + EXPECT_LT(metrics::MinSample("WebRTC.Call.AbsCapture.Delta"), 60'000'000); + ASSERT_THAT( + WaitUntil( + [&] { + return metrics::NumSamples("WebRTC.Call.AbsCapture.OffsetWait"); + }, + ::testing::Gt(0)), + IsRtcOk()); + // On a point-to-point call, we expect the offset to be zero. + EXPECT_LT(metrics::MinSample("WebRTC.Call.AbsCapture.Offset"), 2); +} + +// Test that when SDP is munged to use a PT for a different codec, +// the old codec is added to a subsequent offer with a different PT +// Regression test for https://issues.chromium.org/395077824 +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + MungeOfferCodecAndReOfferWorks) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignaling(); + caller()->AddVideoTrack(); + auto munger = [](std::unique_ptr& sdp) { + auto video = GetFirstVideoContentDescription(sdp->description()); + auto codecs = video->codecs(); + std::optional replacement_codec; + for (auto&& codec : codecs) { + if (codec.name == "AV1") { + replacement_codec = codec; + break; + } + } + if (replacement_codec) { + for (auto&& codec : codecs) { + if (codec.name == "VP9") { + RTC_LOG(LS_INFO) << "Remapping VP9 codec " << codec << " to AV1"; + codec.name = replacement_codec->name; + codec.params = replacement_codec->params; + break; + } + } + video->set_codecs(codecs); + } else { + RTC_LOG(LS_INFO) << "Skipping munge, no AV1 codec found"; + } + }; + caller()->SetGeneratedSdpMunger(munger); + caller()->CreateAndSetAndSignalOffer(); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + caller()->SetGeneratedSdpMunger(nullptr); + auto offer = caller()->CreateOfferAndWait(); + ASSERT_NE(nullptr, offer); + // The offer should be acceptable. + EXPECT_TRUE(caller()->SetLocalDescriptionAndSendSdpMessage(std::move(offer))); +} + +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + SensibleRtxWithDuplicateCodecs) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + caller()->AddVideoTrack(); + // Copied from WPT test webrtc/protocol/rtx-codecs.https.html + std::string remote_offer_string = + "v=0\r\n" + "o=- 1878890426675213188 2 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE video\r\n" + "a=msid-semantic: WMS\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 96 97 98 99\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp:9 IN IP4 0.0.0.0\r\n" + "a=ice-ufrag:RGPK\r\n" + "a=ice-pwd:rAyHEAKC7ckxQgWaRZXukz+Z\r\n" + "a=ice-options:trickle\r\n" + "a=fingerprint:sha-256 " + "8C:29:0A:8F:11:06:BF:1C:58:B3:CA:E6:F1:F1:DC:99:4C:6C:89:E9:FF:BC:D4:38:" + "11:18:1F:40:19:C8:49:37\r\n" + "a=setup:actpass\r\n" + "a=mid:video\r\n" + "a=recvonly\r\n" + "a=rtcp-mux\r\n" + "a=rtpmap:96 VP8/90000\r\n" + "a=rtpmap:97 rtx/90000\r\n" + "a=fmtp:97 apt=98\r\n" + "a=rtpmap:98 VP8/90000\r\n" + "a=rtcp-fb:98 ccm fir\r\n" + "a=rtcp-fb:98 nack\r\n" + "a=rtcp-fb:98 nack pli\r\n" + "a=rtcp-fb:98 goog-remb\r\n" + "a=rtcp-fb:98 transport-cc\r\n" + "a=rtpmap:99 rtx/90000\r\n" + "a=fmtp:99 apt=96\r\n"; + auto srd_observer = make_ref_counted(); + std::unique_ptr remote_offer = + CreateSessionDescription(SdpType::kOffer, remote_offer_string); + EXPECT_TRUE(caller()->SetRemoteDescription(std::move(remote_offer))); + // The resulting SDP answer should have one video codec with a correctly + // associated RTX codec. + std::unique_ptr answer = + caller()->CreateAnswerForTest(); + ASSERT_THAT(answer, NotNull()); + RTC_LOG(LS_ERROR) << "Answer is " << *answer; + ASSERT_THAT(answer->description()->contents().size(), Eq(1)); + auto codecs = + answer->description()->contents()[0].media_description()->codecs(); + std::vector apt_values; + for (const Codec& codec : codecs) { + if (codec.GetResiliencyType() == Codec::ResiliencyType::kRtx) { + const auto apt_it = codec.params.find(kCodecParamAssociatedPayloadType); + int apt_value; + ASSERT_TRUE(FromString(apt_it->second, &apt_value)); + apt_values.push_back(apt_value); + } + } + for (int apt : apt_values) { + EXPECT_THAT(codecs, Contains(Field("id", &Codec::id, apt))); + } +} + +// This test documents the behavior expected in +// https://issues.webrtc.org/412904801. It does not constitute a promise +// that this mechanism will go on working. +TEST_F(PeerConnectionIntegrationTestUnifiedPlan, + MungeRawPacketizationChangesSubsequentSections) { + ASSERT_TRUE(CreatePeerConnectionWrappers()); + // Add first video track. + caller()->AddVideoTrack(); + auto offer = caller()->CreateOfferAndWait(); + EXPECT_EQ(offer->description()->contents().size(), 1U); + // Observe that packetization is NOT raw. + for (const auto& content : offer->description()->contents()) { + for (const auto& codec : content.media_description()->codecs()) { + ASSERT_THAT(codec.packetization, Eq(std::nullopt)); + } + } + // Mangle packetization to be raw. + for (auto& content : offer->description()->contents()) { + std::vector codecs = content.media_description()->codecs(); + bool mangled_raw = false; + for (auto& codec : codecs) { + if (codec.name == "VP8" && codec.type == Codec::Type::kVideo) { + codec.packetization = kPacketizationParamRaw; + mangled_raw = true; + } + } + ASSERT_TRUE(mangled_raw); + content.media_description()->set_codecs(codecs); + } + // Set local description. + auto observer = make_ref_counted(); + caller()->pc()->SetLocalDescription(observer.get(), offer.release()); + // Wait for SLD to complete. + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue()), + IsRtcOk()); + // Add a second video track. + caller()->AddVideoTrack(); + auto offer2 = caller()->CreateOfferAndWait(); + // Observe that packetization is raw on BOTH media sections. + ASSERT_THAT(offer2, NotNull()); + EXPECT_EQ(offer2->description()->contents().size(), 2U); + for (const auto& content : offer2->description()->contents()) { + for (const auto& codec : content.media_description()->codecs()) { + if (codec.type == Codec::Type::kVideo && codec.name == "VP8") { + EXPECT_EQ(codec.packetization, kPacketizationParamRaw); + } else { + EXPECT_THAT(codec.packetization, Eq(std::nullopt)); + } + } + } } } // namespace diff --git a/pc/peer_connection_interface_unittest.cc b/pc/peer_connection_interface_unittest.cc index 3023be1493..d56492afde 100644 --- a/pc/peer_connection_interface_unittest.cc +++ b/pc/peer_connection_interface_unittest.cc @@ -13,31 +13,41 @@ #include #include +#include +#include +#include #include #include #include #include "absl/strings/str_replace.h" -#include "absl/types/optional.h" -#include "api/audio/audio_mixer.h" +#include "absl/strings/string_view.h" +#include "api/audio/audio_device.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" -#include "api/call/call_factory_interface.h" #include "api/create_peerconnection_factory.h" #include "api/data_channel_interface.h" +#include "api/enable_media_with_defaults.h" +#include "api/environment/environment_factory.h" #include "api/jsep.h" +#include "api/make_ref_counted.h" #include "api/media_stream_interface.h" #include "api/media_types.h" #include "api/rtc_error.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/rtc_event_log_output.h" +#include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_direction.h" #include "api/scoped_refptr.h" #include "api/task_queue/default_task_queue_factory.h" +#include "api/test/rtc_error_matchers.h" +#include "api/transport/bitrate_settings.h" +#include "api/transport/enums.h" #include "api/transport/field_trial_based_config.h" +#include "api/units/time_delta.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" @@ -50,19 +60,14 @@ #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" #include "media/base/codec.h" #include "media/base/media_config.h" -#include "media/base/media_engine.h" #include "media/base/stream_params.h" -#include "media/engine/webrtc_media_engine.h" -#include "media/engine/webrtc_media_engine_defaults.h" #include "media/sctp/sctp_transport_internal.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "p2p/base/fake_port_allocator.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" #include "p2p/base/port_allocator.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_info.h" +#include "p2p/test/fake_port_allocator.h" #include "pc/audio_track.h" #include "pc/media_session.h" #include "pc/media_stream.h" @@ -79,14 +84,14 @@ #include "pc/test/test_sdp_strings.h" #include "pc/video_track.h" #include "rtc_base/checks.h" -#include "rtc_base/gunit.h" #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/socket_address.h" +#include "rtc_base/socket_server.h" #include "rtc_base/thread.h" #include "rtc_base/virtual_socket_server.h" #include "test/gmock.h" #include "test/gtest.h" -#include "test/scoped_key_value_config.h" +#include "test/wait_until.h" #ifdef WEBRTC_ANDROID #include "pc/test/android_test_initializer.h" @@ -115,7 +120,6 @@ static const char kVideoTracks[][32] = {"videotrack0", "videotrack1"}; static const char kRecvonly[] = "recvonly"; static const char kSendrecv[] = "sendrecv"; -constexpr uint64_t kTiebreakerDefault = 44444; // Reference SDP with a MediaStream with label "stream1" and audio track with // id "audio_1" and a video track with id "video_1; @@ -421,36 +425,12 @@ static const char kSdpStringMs1Video1[] = "a=ssrc:4 cname:stream1\r\n" "a=ssrc:4 msid:stream1 videotrack1\r\n"; -static const char kDtlsSdesFallbackSdp[] = - "v=0\r\n" - "o=xxxxxx 7 2 IN IP4 0.0.0.0\r\n" - "s=-\r\n" - "c=IN IP4 0.0.0.0\r\n" - "t=0 0\r\n" - "a=group:BUNDLE audio\r\n" - "a=msid-semantic: WMS\r\n" - "m=audio 1 RTP/SAVPF 0\r\n" - "a=sendrecv\r\n" - "a=rtcp-mux\r\n" - "a=mid:audio\r\n" - "a=ssrc:1 cname:stream1\r\n" - "a=ice-ufrag:e5785931\r\n" - "a=ice-pwd:36fb7878390db89481c1d46daa4278d8\r\n" - "a=rtpmap:0 pcmu/8000\r\n" - "a=fingerprint:sha-1 " - "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" - "a=setup:actpass\r\n" - "a=crypto:0 AES_CM_128_HMAC_SHA1_80 " - "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 " - "dummy_session_params\r\n"; - class RtcEventLogOutputNull final : public RtcEventLogOutput { public: bool IsActive() const override { return true; } bool Write(const absl::string_view /*output*/) override { return true; } }; -using ::cricket::StreamParams; using ::testing::Eq; using ::testing::Exactly; using ::testing::SizeIs; @@ -460,12 +440,11 @@ using RTCConfiguration = PeerConnectionInterface::RTCConfiguration; using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions; // Gets the first ssrc of given content type from the ContentInfo. -bool GetFirstSsrc(const cricket::ContentInfo* content_info, int* ssrc) { +bool GetFirstSsrc(const ContentInfo* content_info, int* ssrc) { if (!content_info || !ssrc) { return false; } - const cricket::MediaContentDescription* media_desc = - content_info->media_description(); + const MediaContentDescription* media_desc = content_info->media_description(); if (!media_desc || media_desc->streams().empty()) { return false; } @@ -475,11 +454,9 @@ bool GetFirstSsrc(const cricket::ContentInfo* content_info, int* ssrc) { // Get the ufrags out of an SDP blob. Useful for testing ICE restart // behavior. -std::vector GetUfrags( - const webrtc::SessionDescriptionInterface* desc) { +std::vector GetUfrags(const SessionDescriptionInterface* desc) { std::vector ufrags; - for (const cricket::TransportInfo& info : - desc->description()->transport_infos()) { + for (const TransportInfo& info : desc->description()->transport_infos()) { ufrags.push_back(info.description.ice_ufrag); } return ufrags; @@ -498,10 +475,10 @@ void SetSsrcToZero(std::string* sdp) { } // Check if `streams` contains the specified track. -bool ContainsTrack(const std::vector& streams, +bool ContainsTrack(const std::vector& streams, const std::string& stream_id, const std::string& track_id) { - for (const cricket::StreamParams& params : streams) { + for (const StreamParams& params : streams) { if (params.first_stream_id() == stream_id && params.id == track_id) { return true; } @@ -511,7 +488,7 @@ bool ContainsTrack(const std::vector& streams, // Check if `senders` contains the specified sender, by id. bool ContainsSender( - const std::vector>& senders, + const std::vector>& senders, const std::string& id) { for (const auto& sender : senders) { if (sender->id() == id) { @@ -523,7 +500,7 @@ bool ContainsSender( // Check if `senders` contains the specified sender, by id and stream id. bool ContainsSender( - const std::vector>& senders, + const std::vector>& senders, const std::string& id, const std::string& stream_id) { for (const auto& sender : senders) { @@ -538,28 +515,24 @@ bool ContainsSender( // CreateStreamCollection(1) creates a collection that // correspond to kSdpStringWithStream1. // CreateStreamCollection(2) correspond to kSdpStringWithStream1And2. -rtc::scoped_refptr CreateStreamCollection( - int number_of_streams, - int tracks_per_stream) { - rtc::scoped_refptr local_collection( - StreamCollection::Create()); +scoped_refptr CreateStreamCollection(int number_of_streams, + int tracks_per_stream) { + scoped_refptr local_collection(StreamCollection::Create()); for (int i = 0; i < number_of_streams; ++i) { - rtc::scoped_refptr stream( - webrtc::MediaStream::Create(kStreams[i])); + scoped_refptr stream( + MediaStream::Create(kStreams[i])); for (int j = 0; j < tracks_per_stream; ++j) { // Add a local audio track. - rtc::scoped_refptr audio_track( - webrtc::AudioTrack::Create(kAudioTracks[i * tracks_per_stream + j], - nullptr)); + scoped_refptr audio_track( + AudioTrack::Create(kAudioTracks[i * tracks_per_stream + j], nullptr)); stream->AddTrack(audio_track); // Add a local video track. - rtc::scoped_refptr video_track( - webrtc::VideoTrack::Create(kVideoTracks[i * tracks_per_stream + j], - webrtc::FakeVideoTrackSource::Create(), - rtc::Thread::Current())); + scoped_refptr video_track(VideoTrack::Create( + kVideoTracks[i * tracks_per_stream + j], + FakeVideoTrackSource::Create(), Thread::Current())); stream->AddTrack(video_track); } @@ -579,10 +552,10 @@ bool CompareStreamCollections(StreamCollectionInterface* s1, if (s1->at(i)->id() != s2->at(i)->id()) { return false; } - webrtc::AudioTrackVector audio_tracks1 = s1->at(i)->GetAudioTracks(); - webrtc::AudioTrackVector audio_tracks2 = s2->at(i)->GetAudioTracks(); - webrtc::VideoTrackVector video_tracks1 = s1->at(i)->GetVideoTracks(); - webrtc::VideoTrackVector video_tracks2 = s2->at(i)->GetVideoTracks(); + AudioTrackVector audio_tracks1 = s1->at(i)->GetAudioTracks(); + AudioTrackVector audio_tracks2 = s2->at(i)->GetAudioTracks(); + VideoTrackVector video_tracks1 = s1->at(i)->GetVideoTracks(); + VideoTrackVector video_tracks2 = s2->at(i)->GetVideoTracks(); if (audio_tracks1.size() != audio_tracks2.size()) { return false; @@ -631,48 +604,41 @@ class MockTrackObserver : public ObserverInterface { // constraints are propagated into the PeerConnection's MediaConfig. These // settings are intended for MediaChannel constructors, but that is not // exercised by these unittest. -class PeerConnectionFactoryForTest : public webrtc::PeerConnectionFactory { +class PeerConnectionFactoryForTest : public PeerConnectionFactory { public: - static rtc::scoped_refptr + static scoped_refptr CreatePeerConnectionFactoryForTest() { PeerConnectionFactoryDependencies dependencies; - dependencies.worker_thread = rtc::Thread::Current(); - dependencies.network_thread = rtc::Thread::Current(); - dependencies.signaling_thread = rtc::Thread::Current(); + dependencies.worker_thread = Thread::Current(); + dependencies.network_thread = Thread::Current(); + dependencies.signaling_thread = Thread::Current(); dependencies.task_queue_factory = CreateDefaultTaskQueueFactory(); dependencies.trials = std::make_unique(); - cricket::MediaEngineDependencies media_deps; - media_deps.task_queue_factory = dependencies.task_queue_factory.get(); // Use fake audio device module since we're only testing the interface // level, and using a real one could make tests flaky when run in parallel. - media_deps.adm = FakeAudioCaptureModule::Create(); - SetMediaEngineDefaults(&media_deps); - media_deps.trials = dependencies.trials.get(); - dependencies.media_engine = - cricket::CreateMediaEngine(std::move(media_deps)); - dependencies.call_factory = webrtc::CreateCallFactory(); - dependencies.event_log_factory = std::make_unique( - dependencies.task_queue_factory.get()); - - return rtc::make_ref_counted( + dependencies.adm = FakeAudioCaptureModule::Create(); + EnableMediaWithDefaults(dependencies); + dependencies.event_log_factory = std::make_unique(); + + return make_ref_counted( std::move(dependencies)); } using PeerConnectionFactory::PeerConnectionFactory; private: - rtc::scoped_refptr fake_audio_capture_module_; + scoped_refptr fake_audio_capture_module_; }; // TODO(steveanton): Convert to use the new PeerConnectionWrapper. class PeerConnectionInterfaceBaseTest : public ::testing::Test { protected: explicit PeerConnectionInterfaceBaseTest(SdpSemantics sdp_semantics) - : vss_(new rtc::VirtualSocketServer()), + : vss_(new VirtualSocketServer()), main_(vss_.get()), sdp_semantics_(sdp_semantics) { #ifdef WEBRTC_ANDROID - webrtc::InitializeAndroidObjects(); + InitializeAndroidObjects(); #endif } @@ -680,22 +646,16 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { // Use fake audio capture module since we're only testing the interface // level, and using a real one could make tests flaky when run in parallel. fake_audio_capture_module_ = FakeAudioCaptureModule::Create(); - pc_factory_ = webrtc::CreatePeerConnectionFactory( - rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(), - rtc::scoped_refptr( - fake_audio_capture_module_), - webrtc::CreateBuiltinAudioEncoderFactory(), - webrtc::CreateBuiltinAudioDecoderFactory(), - std::make_unique>(), - std::make_unique>(), + pc_factory_ = CreatePeerConnectionFactory( + Thread::Current(), Thread::Current(), Thread::Current(), + scoped_refptr(fake_audio_capture_module_), + CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory(), + std::make_unique>(), + std::make_unique>(), nullptr /* audio_mixer */, nullptr /* audio_processing */); ASSERT_TRUE(pc_factory_); } @@ -745,17 +705,13 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { pc_->Close(); pc_ = nullptr; } - std::unique_ptr port_allocator( - new cricket::FakePortAllocator( - rtc::Thread::Current(), - std::make_unique(vss_.get()), - &field_trials_)); + auto port_allocator = + std::make_unique(CreateEnvironment(), vss_.get()); port_allocator_ = port_allocator.get(); - port_allocator_->SetIceTiebreaker(kTiebreakerDefault); // Create certificate generator unless DTLS constraint is explicitly set to // false. - std::unique_ptr cert_generator; + std::unique_ptr cert_generator; // These won't be used if encryption is turned off, but that's harmless. fake_certificate_generator_ = new FakeRTCCertificateGenerator(); @@ -828,7 +784,7 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { observer_.SetPeerConnectionInterface(nullptr); } - rtc::scoped_refptr CreateVideoTrack( + scoped_refptr CreateVideoTrack( const std::string& label) { return pc_factory_->CreateVideoTrack(FakeVideoTrackSource::Create(), label); } @@ -841,13 +797,13 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { } void AddVideoStream(const std::string& label) { - rtc::scoped_refptr stream( + scoped_refptr stream( pc_factory_->CreateLocalMediaStream(label)); stream->AddTrack(CreateVideoTrack(label + "v0")); ASSERT_TRUE(pc_->AddStream(stream.get())); } - rtc::scoped_refptr CreateAudioTrack( + scoped_refptr CreateAudioTrack( const std::string& label) { return pc_factory_->CreateAudioTrack(label, nullptr); } @@ -860,7 +816,7 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { } void AddAudioStream(const std::string& label) { - rtc::scoped_refptr stream( + scoped_refptr stream( pc_factory_->CreateLocalMediaStream(label)); stream->AddTrack(CreateAudioTrack(label + "a0")); ASSERT_TRUE(pc_->AddStream(stream.get())); @@ -870,15 +826,15 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { const std::string& audio_track_label, const std::string& video_track_label) { // Create a local stream. - rtc::scoped_refptr stream( + scoped_refptr stream( pc_factory_->CreateLocalMediaStream(stream_id)); stream->AddTrack(CreateAudioTrack(audio_track_label)); stream->AddTrack(CreateVideoTrack(video_track_label)); ASSERT_TRUE(pc_->AddStream(stream.get())); } - rtc::scoped_refptr GetFirstReceiverOfType( - cricket::MediaType media_type) { + scoped_refptr GetFirstReceiverOfType( + webrtc::MediaType media_type) { for (auto receiver : pc_->GetReceivers()) { if (receiver->media_type() == media_type) { return receiver; @@ -890,8 +846,7 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { bool DoCreateOfferAnswer(std::unique_ptr* desc, const RTCOfferAnswerOptions* options, bool offer) { - auto observer = - rtc::make_ref_counted(); + auto observer = make_ref_counted(); if (offer) { pc_->CreateOffer(observer.get(), options ? *options : RTCOfferAnswerOptions()); @@ -899,7 +854,10 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { pc_->CreateAnswer(observer.get(), options ? *options : RTCOfferAnswerOptions()); } - EXPECT_EQ_WAIT(true, observer->called(), kTimeout); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); *desc = observer->MoveDescription(); return observer->result(); } @@ -917,14 +875,17 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { bool DoSetSessionDescription( std::unique_ptr desc, bool local) { - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); if (local) { pc_->SetLocalDescription(observer.get(), desc.release()); } else { pc_->SetRemoteDescription(observer.get(), desc.release()); } if (pc_->signaling_state() != PeerConnectionInterface::kClosed) { - EXPECT_EQ_WAIT(true, observer->called(), kTimeout); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); } return observer->result(); } @@ -943,20 +904,25 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { // It does not verify the values in the StatReports since a RTCP packet might // be required. bool DoGetStats(MediaStreamTrackInterface* track) { - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); if (!pc_->GetStats(observer.get(), track, PeerConnectionInterface::kStatsOutputLevelStandard)) return false; - EXPECT_TRUE_WAIT(observer->called(), kTimeout); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); return observer->called(); } // Call the standards-compliant GetStats function. bool DoGetRTCStats() { - auto callback = - rtc::make_ref_counted(); + auto callback = make_ref_counted(); pc_->GetStats(callback.get()); - EXPECT_TRUE_WAIT(callback->called(), kTimeout); + EXPECT_THAT( + WaitUntil([&] { return callback->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); return callback->called(); } @@ -976,13 +942,12 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { // Verify that RTP Header extensions has been negotiated for audio and video. void VerifyRemoteRtpHeaderExtensions() { - const cricket::MediaContentDescription* desc = - cricket::GetFirstAudioContentDescription( - pc_->remote_description()->description()); + const MediaContentDescription* desc = GetFirstAudioContentDescription( + pc_->remote_description()->description()); ASSERT_TRUE(desc != nullptr); EXPECT_GT(desc->rtp_header_extensions().size(), 0u); - desc = cricket::GetFirstVideoContentDescription( + desc = GetFirstVideoContentDescription( pc_->remote_description()->description()); ASSERT_TRUE(desc != nullptr); EXPECT_GT(desc->rtp_header_extensions().size(), 0u); @@ -994,14 +959,14 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { std::string sdp; EXPECT_TRUE(offer->ToString(&sdp)); std::unique_ptr remote_offer( - webrtc::CreateSessionDescription(SdpType::kOffer, sdp)); + CreateSessionDescription(SdpType::kOffer, sdp)); EXPECT_TRUE(DoSetRemoteDescription(std::move(remote_offer))); EXPECT_EQ(PeerConnectionInterface::kHaveRemoteOffer, observer_.state_); } void CreateAndSetRemoteOffer(const std::string& sdp) { std::unique_ptr remote_offer( - webrtc::CreateSessionDescription(SdpType::kOffer, sdp)); + CreateSessionDescription(SdpType::kOffer, sdp)); EXPECT_TRUE(DoSetRemoteDescription(std::move(remote_offer))); EXPECT_EQ(PeerConnectionInterface::kHaveRemoteOffer, observer_.state_); } @@ -1020,7 +985,7 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { std::string sdp; EXPECT_TRUE(answer->ToString(&sdp)); std::unique_ptr new_answer( - webrtc::CreateSessionDescription(SdpType::kAnswer, sdp)); + CreateSessionDescription(SdpType::kAnswer, sdp)); EXPECT_TRUE(DoSetLocalDescription(std::move(new_answer))); EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_); } @@ -1032,7 +997,7 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { std::string sdp; EXPECT_TRUE(answer->ToString(&sdp)); std::unique_ptr pr_answer( - webrtc::CreateSessionDescription(SdpType::kPrAnswer, sdp)); + CreateSessionDescription(SdpType::kPrAnswer, sdp)); EXPECT_TRUE(DoSetLocalDescription(std::move(pr_answer))); EXPECT_EQ(PeerConnectionInterface::kHaveLocalPrAnswer, observer_.state_); } @@ -1057,17 +1022,20 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { std::string sdp; EXPECT_TRUE(offer->ToString(&sdp)); std::unique_ptr new_offer( - webrtc::CreateSessionDescription(SdpType::kOffer, sdp)); + CreateSessionDescription(SdpType::kOffer, sdp)); EXPECT_TRUE(DoSetLocalDescription(std::move(new_offer))); EXPECT_EQ(PeerConnectionInterface::kHaveLocalOffer, observer_.state_); // Wait for the ice_complete message, so that SDP will have candidates. - EXPECT_TRUE_WAIT(observer_.ice_gathering_complete_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return observer_.ice_gathering_complete_; }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); } void CreateAnswerAsRemoteDescription(const std::string& sdp) { std::unique_ptr answer( - webrtc::CreateSessionDescription(SdpType::kAnswer, sdp)); + CreateSessionDescription(SdpType::kAnswer, sdp)); ASSERT_TRUE(answer); EXPECT_TRUE(DoSetRemoteDescription(std::move(answer))); EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_); @@ -1075,12 +1043,12 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { void CreatePrAnswerAndAnswerAsRemoteDescription(const std::string& sdp) { std::unique_ptr pr_answer( - webrtc::CreateSessionDescription(SdpType::kPrAnswer, sdp)); + CreateSessionDescription(SdpType::kPrAnswer, sdp)); ASSERT_TRUE(pr_answer); EXPECT_TRUE(DoSetRemoteDescription(std::move(pr_answer))); EXPECT_EQ(PeerConnectionInterface::kHaveRemotePrAnswer, observer_.state_); std::unique_ptr answer( - webrtc::CreateSessionDescription(SdpType::kAnswer, sdp)); + CreateSessionDescription(SdpType::kAnswer, sdp)); ASSERT_TRUE(answer); EXPECT_TRUE(DoSetRemoteDescription(std::move(answer))); EXPECT_EQ(PeerConnectionInterface::kStable, observer_.state_); @@ -1092,9 +1060,16 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { void WaitAndVerifyOnAddStream(const std::string& stream_id, int expected_num_tracks) { // Verify that both OnAddStream and OnAddTrack are called. - EXPECT_EQ_WAIT(stream_id, observer_.GetLastAddedStreamId(), kTimeout); - EXPECT_EQ_WAIT(expected_num_tracks, - observer_.CountAddTrackEventsForStream(stream_id), kTimeout); + EXPECT_THAT(WaitUntil([&] { return observer_.GetLastAddedStreamId(); }, + ::testing::Eq(stream_id), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); + EXPECT_THAT( + WaitUntil( + [&] { return observer_.CountAddTrackEventsForStream(stream_id); }, + ::testing::Eq(expected_num_tracks), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); } // Creates an offer and applies it as a local session description. @@ -1124,8 +1099,8 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { std::string mediastream_id = kStreams[0]; - rtc::scoped_refptr stream( - webrtc::MediaStream::Create(mediastream_id)); + scoped_refptr stream( + MediaStream::Create(mediastream_id)); reference_collection_->AddStream(stream); if (number_of_audio_tracks > 0) { @@ -1149,22 +1124,20 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { } return std::unique_ptr( - webrtc::CreateSessionDescription(SdpType::kOffer, sdp_ms1)); + CreateSessionDescription(SdpType::kOffer, sdp_ms1)); } void AddAudioTrack(const std::string& track_id, MediaStreamInterface* stream) { - rtc::scoped_refptr audio_track( - webrtc::AudioTrack::Create(track_id, nullptr)); + scoped_refptr audio_track( + AudioTrack::Create(track_id, nullptr)); ASSERT_TRUE(stream->AddTrack(audio_track)); } void AddVideoTrack(const std::string& track_id, MediaStreamInterface* stream) { - rtc::scoped_refptr video_track( - webrtc::VideoTrack::Create(track_id, - webrtc::FakeVideoTrackSource::Create(), - rtc::Thread::Current())); + scoped_refptr video_track(VideoTrack::Create( + track_id, FakeVideoTrackSource::Create(), Thread::Current())); ASSERT_TRUE(stream->AddTrack(video_track)); } @@ -1201,18 +1174,20 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { const std::string& GetFirstAudioStreamCname( const SessionDescriptionInterface* desc) { - const cricket::AudioContentDescription* audio_desc = - cricket::GetFirstAudioContentDescription(desc->description()); + const AudioContentDescription* audio_desc = + GetFirstAudioContentDescription(desc->description()); return audio_desc->streams()[0].cname; } std::unique_ptr CreateOfferWithOptions( const RTCOfferAnswerOptions& offer_answer_options) { RTC_DCHECK(pc_); - auto observer = - rtc::make_ref_counted(); + auto observer = make_ref_counted(); pc_->CreateOffer(observer.get(), offer_answer_options); - EXPECT_EQ_WAIT(true, observer->called(), kTimeout); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); return observer->MoveDescription(); } @@ -1224,7 +1199,7 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { std::string sdp; EXPECT_TRUE((*desc)->ToString(&sdp)); std::unique_ptr remote_offer( - webrtc::CreateSessionDescription(SdpType::kOffer, sdp)); + CreateSessionDescription(SdpType::kOffer, sdp)); EXPECT_TRUE(DoSetRemoteDescription(std::move(remote_offer))); EXPECT_EQ(PeerConnectionInterface::kHaveRemoteOffer, observer_.state_); } @@ -1237,17 +1212,16 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { std::string sdp; EXPECT_TRUE((*desc)->ToString(&sdp)); std::unique_ptr new_offer( - webrtc::CreateSessionDescription(SdpType::kOffer, sdp)); + CreateSessionDescription(SdpType::kOffer, sdp)); EXPECT_TRUE(DoSetLocalDescription(std::move(new_offer))); EXPECT_EQ(PeerConnectionInterface::kHaveLocalOffer, observer_.state_); } - bool HasCNCodecs(const cricket::ContentInfo* content) { + bool HasCNCodecs(const ContentInfo* content) { RTC_DCHECK(content); RTC_DCHECK(content->media_description()); - for (const cricket::AudioCodec& codec : - content->media_description()->as_audio()->codecs()) { + for (const Codec& codec : content->media_description()->codecs()) { if (codec.name == "CN") { return true; } @@ -1271,18 +1245,17 @@ class PeerConnectionInterfaceBaseTest : public ::testing::Test { } } - rtc::SocketServer* socket_server() const { return vss_.get(); } + SocketServer* socket_server() const { return vss_.get(); } - webrtc::test::ScopedKeyValueConfig field_trials_; - std::unique_ptr vss_; - rtc::AutoSocketServerThread main_; - rtc::scoped_refptr fake_audio_capture_module_; - cricket::FakePortAllocator* port_allocator_ = nullptr; + std::unique_ptr vss_; + AutoSocketServerThread main_; + scoped_refptr fake_audio_capture_module_; + FakePortAllocator* port_allocator_ = nullptr; FakeRTCCertificateGenerator* fake_certificate_generator_ = nullptr; - rtc::scoped_refptr pc_factory_; - rtc::scoped_refptr pc_; + scoped_refptr pc_factory_; + scoped_refptr pc_; MockPeerConnectionObserver observer_; - rtc::scoped_refptr reference_collection_; + scoped_refptr reference_collection_; const SdpSemantics sdp_semantics_; }; @@ -1329,14 +1302,13 @@ TEST_P(PeerConnectionInterfaceTest, TEST_P(PeerConnectionInterfaceTest, CreatePeerConnectionWithDifferentIceTransportsTypes) { CreatePeerConnectionWithIceTransportsType(PeerConnectionInterface::kNone); - EXPECT_EQ(cricket::CF_NONE, port_allocator_->candidate_filter()); + EXPECT_EQ(CF_NONE, port_allocator_->candidate_filter()); CreatePeerConnectionWithIceTransportsType(PeerConnectionInterface::kRelay); - EXPECT_EQ(cricket::CF_RELAY, port_allocator_->candidate_filter()); + EXPECT_EQ(CF_RELAY, port_allocator_->candidate_filter()); CreatePeerConnectionWithIceTransportsType(PeerConnectionInterface::kNoHost); - EXPECT_EQ(cricket::CF_ALL & ~cricket::CF_HOST, - port_allocator_->candidate_filter()); + EXPECT_EQ(CF_ALL & ~CF_HOST, port_allocator_->candidate_filter()); CreatePeerConnectionWithIceTransportsType(PeerConnectionInterface::kAll); - EXPECT_EQ(cricket::CF_ALL, port_allocator_->candidate_filter()); + EXPECT_EQ(CF_ALL, port_allocator_->candidate_filter()); } // Test that when a PeerConnection is created with a nonzero candidate pool @@ -1356,14 +1328,13 @@ TEST_P(PeerConnectionInterfaceTest, CreatePeerConnectionWithPooledCandidates) { config.ice_candidate_pool_size = 1; CreatePeerConnection(config); - const cricket::FakePortAllocatorSession* session = - static_cast( + const FakePortAllocatorSession* session = + static_cast( port_allocator_->GetPooledSession()); ASSERT_NE(nullptr, session); EXPECT_EQ(1UL, session->stun_servers().size()); - EXPECT_LT(0U, session->flags() & cricket::PORTALLOCATOR_DISABLE_TCP); - EXPECT_LT(0U, - session->flags() & cricket::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); + EXPECT_LT(0U, session->flags() & PORTALLOCATOR_DISABLE_TCP); + EXPECT_LT(0U, session->flags() & PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); } // Test that network-related RTCConfiguration members are applied to the @@ -1379,12 +1350,9 @@ TEST_P(PeerConnectionInterfaceTest, CreatePeerConnectionWithPooledCandidates) { TEST_P(PeerConnectionInterfaceTest, CreatePeerConnectionAppliesNetworkConfigToPortAllocator) { // Create fake port allocator. - std::unique_ptr packet_socket_factory( - new rtc::BasicPacketSocketFactory(socket_server())); - std::unique_ptr port_allocator( - new cricket::FakePortAllocator( - rtc::Thread::Current(), packet_socket_factory.get(), &field_trials_)); - cricket::FakePortAllocator* raw_port_allocator = port_allocator.get(); + auto port_allocator = + std::make_unique(CreateEnvironment(), socket_server()); + FakePortAllocator* raw_port_allocator = port_allocator.get(); // Create RTCConfiguration with some network-related fields relevant to // PortAllocator populated. @@ -1399,22 +1367,18 @@ TEST_P(PeerConnectionInterfaceTest, config.prune_turn_ports = true; // Create the PC factory and PC with the above config. - rtc::scoped_refptr pc_factory( - webrtc::CreatePeerConnectionFactory( - rtc::Thread::Current(), rtc::Thread::Current(), - rtc::Thread::Current(), fake_audio_capture_module_, - webrtc::CreateBuiltinAudioEncoderFactory(), - webrtc::CreateBuiltinAudioDecoderFactory(), - std::make_unique>(), - std::make_unique>(), + scoped_refptr pc_factory( + CreatePeerConnectionFactory( + Thread::Current(), Thread::Current(), Thread::Current(), + fake_audio_capture_module_, CreateBuiltinAudioEncoderFactory(), + CreateBuiltinAudioDecoderFactory(), + std::make_unique>(), + std::make_unique>(), nullptr /* audio_mixer */, nullptr /* audio_processing */)); PeerConnectionDependencies pc_dependencies(&observer_); pc_dependencies.allocator = std::move(port_allocator); @@ -1425,13 +1389,12 @@ TEST_P(PeerConnectionInterfaceTest, // Now validate that the config fields set above were applied to the // PortAllocator, as flags or otherwise. - EXPECT_FALSE(raw_port_allocator->flags() & - cricket::PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); + EXPECT_FALSE(raw_port_allocator->flags() & PORTALLOCATOR_ENABLE_IPV6_ON_WIFI); EXPECT_EQ(10, raw_port_allocator->max_ipv6_networks()); - EXPECT_TRUE(raw_port_allocator->flags() & cricket::PORTALLOCATOR_DISABLE_TCP); + EXPECT_TRUE(raw_port_allocator->flags() & PORTALLOCATOR_DISABLE_TCP); EXPECT_TRUE(raw_port_allocator->flags() & - cricket::PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); - EXPECT_EQ(webrtc::PRUNE_BASED_ON_PRIORITY, + PORTALLOCATOR_DISABLE_COSTLY_NETWORKS); + EXPECT_EQ(PRUNE_BASED_ON_PRIORITY, raw_port_allocator->turn_port_prune_policy()); } @@ -1453,8 +1416,7 @@ TEST_P(PeerConnectionInterfaceTest, GetConfigurationAfterCreatePeerConnection) { TEST_P(PeerConnectionInterfaceTest, GetConfigurationAfterSetConfiguration) { PeerConnectionInterface::RTCConfiguration starting_config; starting_config.sdp_semantics = sdp_semantics_; - starting_config.bundle_policy = - webrtc::PeerConnection::kBundlePolicyMaxBundle; + starting_config.bundle_policy = PeerConnection::kBundlePolicyMaxBundle; CreatePeerConnection(starting_config); PeerConnectionInterface::RTCConfiguration config = pc_->GetConfiguration(); @@ -1482,11 +1444,10 @@ TEST_F(PeerConnectionInterfaceTestPlanB, AddStreams) { ASSERT_EQ(2u, pc_->local_streams()->count()); // Test we can add multiple local streams to one peerconnection. - rtc::scoped_refptr stream( + scoped_refptr stream( pc_factory_->CreateLocalMediaStream(kStreamId3)); - rtc::scoped_refptr audio_track( - pc_factory_->CreateAudioTrack( - kStreamId3, static_cast(nullptr))); + scoped_refptr audio_track(pc_factory_->CreateAudioTrack( + kStreamId3, static_cast(nullptr))); stream->AddTrack(audio_track); EXPECT_TRUE(pc_->AddStream(stream.get())); EXPECT_EQ(3u, pc_->local_streams()->count()); @@ -1512,12 +1473,12 @@ TEST_F(PeerConnectionInterfaceTestPlanB, AddedStreamsPresentInOffer) { std::unique_ptr offer; ASSERT_TRUE(DoCreateOffer(&offer, nullptr)); - const cricket::AudioContentDescription* audio_desc = - cricket::GetFirstAudioContentDescription(offer->description()); + const AudioContentDescription* audio_desc = + GetFirstAudioContentDescription(offer->description()); EXPECT_TRUE(ContainsTrack(audio_desc->streams(), kStreamId1, "audio_track")); - const cricket::VideoContentDescription* video_desc = - cricket::GetFirstVideoContentDescription(offer->description()); + const VideoContentDescription* video_desc = + GetFirstVideoContentDescription(offer->description()); EXPECT_TRUE(ContainsTrack(video_desc->streams(), kStreamId1, "video_track")); // Add another stream and ensure the offer includes both the old and new @@ -1525,11 +1486,11 @@ TEST_F(PeerConnectionInterfaceTestPlanB, AddedStreamsPresentInOffer) { AddAudioVideoStream(kStreamId2, "audio_track2", "video_track2"); ASSERT_TRUE(DoCreateOffer(&offer, nullptr)); - audio_desc = cricket::GetFirstAudioContentDescription(offer->description()); + audio_desc = GetFirstAudioContentDescription(offer->description()); EXPECT_TRUE(ContainsTrack(audio_desc->streams(), kStreamId1, "audio_track")); EXPECT_TRUE(ContainsTrack(audio_desc->streams(), kStreamId2, "audio_track2")); - video_desc = cricket::GetFirstVideoContentDescription(offer->description()); + video_desc = GetFirstVideoContentDescription(offer->description()); EXPECT_TRUE(ContainsTrack(video_desc->streams(), kStreamId1, "video_track")); EXPECT_TRUE(ContainsTrack(video_desc->streams(), kStreamId2, "video_track2")); } @@ -1551,9 +1512,9 @@ TEST_F(PeerConnectionInterfaceTestPlanB, RemoveStream) { // in peerconnection_jsep_unittests.cc TEST_F(PeerConnectionInterfaceTestPlanB, AddTrackRemoveTrack) { CreatePeerConnectionWithoutDtls(); - rtc::scoped_refptr audio_track( + scoped_refptr audio_track( CreateAudioTrack("audio_track")); - rtc::scoped_refptr video_track( + scoped_refptr video_track( CreateVideoTrack("video_track")); auto audio_sender = pc_->AddTrack(audio_track, {kStreamId1}).MoveValue(); auto video_sender = pc_->AddTrack(video_track, {kStreamId1}).MoveValue(); @@ -1570,13 +1531,11 @@ TEST_F(PeerConnectionInterfaceTestPlanB, AddTrackRemoveTrack) { std::unique_ptr offer; ASSERT_TRUE(DoCreateOffer(&offer, nullptr)); - const cricket::ContentInfo* audio_content = - cricket::GetFirstAudioContent(offer->description()); + const ContentInfo* audio_content = GetFirstAudioContent(offer->description()); EXPECT_TRUE(ContainsTrack(audio_content->media_description()->streams(), kStreamId1, "audio_track")); - const cricket::ContentInfo* video_content = - cricket::GetFirstVideoContent(offer->description()); + const ContentInfo* video_content = GetFirstVideoContent(offer->description()); EXPECT_TRUE(ContainsTrack(video_content->media_description()->streams(), kStreamId1, "video_track")); @@ -1589,11 +1548,11 @@ TEST_F(PeerConnectionInterfaceTestPlanB, AddTrackRemoveTrack) { // Create a new offer and ensure it doesn't contain the removed senders. ASSERT_TRUE(DoCreateOffer(&offer, nullptr)); - audio_content = cricket::GetFirstAudioContent(offer->description()); + audio_content = GetFirstAudioContent(offer->description()); EXPECT_FALSE(ContainsTrack(audio_content->media_description()->streams(), kStreamId1, "audio_track")); - video_content = cricket::GetFirstVideoContent(offer->description()); + video_content = GetFirstVideoContent(offer->description()); EXPECT_FALSE(ContainsTrack(video_content->media_description()->streams(), kStreamId1, "video_track")); @@ -1608,9 +1567,9 @@ TEST_F(PeerConnectionInterfaceTestPlanB, AddTrackRemoveTrack) { // Test for AddTrack with init_send_encoding. TEST_F(PeerConnectionInterfaceTestPlanB, AddTrackWithSendEncodings) { CreatePeerConnectionWithoutDtls(); - rtc::scoped_refptr audio_track( + scoped_refptr audio_track( CreateAudioTrack("audio_track")); - rtc::scoped_refptr video_track( + scoped_refptr video_track( CreateVideoTrack("video_track")); RtpEncodingParameters audio_encodings; audio_encodings.active = false; @@ -1633,13 +1592,11 @@ TEST_F(PeerConnectionInterfaceTestPlanB, AddTrackWithSendEncodings) { std::unique_ptr offer; ASSERT_TRUE(DoCreateOffer(&offer, nullptr)); - const cricket::ContentInfo* audio_content = - cricket::GetFirstAudioContent(offer->description()); + const ContentInfo* audio_content = GetFirstAudioContent(offer->description()); EXPECT_TRUE(ContainsTrack(audio_content->media_description()->streams(), kStreamId1, "audio_track")); - const cricket::ContentInfo* video_content = - cricket::GetFirstVideoContent(offer->description()); + const ContentInfo* video_content = GetFirstVideoContent(offer->description()); EXPECT_TRUE(ContainsTrack(video_content->media_description()->streams(), kStreamId1, "video_track")); @@ -1660,9 +1617,9 @@ TEST_F(PeerConnectionInterfaceTestPlanB, AddTrackWithSendEncodings) { // expecting a random stream ID to be generated. TEST_P(PeerConnectionInterfaceTest, AddTrackWithoutStream) { CreatePeerConnectionWithoutDtls(); - rtc::scoped_refptr audio_track( + scoped_refptr audio_track( CreateAudioTrack("audio_track")); - rtc::scoped_refptr video_track( + scoped_refptr video_track( CreateVideoTrack("video_track")); auto audio_sender = pc_->AddTrack(audio_track, std::vector()).MoveValue(); @@ -1687,10 +1644,10 @@ TEST_P(PeerConnectionInterfaceTest, AddTrackWithoutStream) { // Test that we can call GetStats() after AddTrack but before connecting // the PeerConnection to a peer. TEST_P(PeerConnectionInterfaceTest, AddTrackBeforeConnecting) { - CreatePeerConnectionWithoutDtls(); - rtc::scoped_refptr audio_track( + CreatePeerConnection(); + scoped_refptr audio_track( CreateAudioTrack("audio_track")); - rtc::scoped_refptr video_track( + scoped_refptr video_track( CreateVideoTrack("video_track")); auto audio_sender = pc_->AddTrack(audio_track, std::vector()); auto video_sender = pc_->AddTrack(video_track, std::vector()); @@ -1698,10 +1655,10 @@ TEST_P(PeerConnectionInterfaceTest, AddTrackBeforeConnecting) { } TEST_P(PeerConnectionInterfaceTest, AttachmentIdIsSetOnAddTrack) { - CreatePeerConnectionWithoutDtls(); - rtc::scoped_refptr audio_track( + CreatePeerConnection(); + scoped_refptr audio_track( CreateAudioTrack("audio_track")); - rtc::scoped_refptr video_track( + scoped_refptr video_track( CreateVideoTrack("video_track")); auto audio_sender = pc_->AddTrack(audio_track, std::vector()); ASSERT_TRUE(audio_sender.ok()); @@ -1720,7 +1677,7 @@ TEST_P(PeerConnectionInterfaceTest, AttachmentIdIsSetOnAddTrack) { // Don't run under Unified Plan since the stream API is not available. TEST_F(PeerConnectionInterfaceTestPlanB, AttachmentIdIsSetOnAddStream) { - CreatePeerConnectionWithoutDtls(); + CreatePeerConnection(); AddVideoStream(kStreamId1); auto senders = pc_->GetSenders(); ASSERT_EQ(1u, senders.size()); @@ -1808,8 +1765,14 @@ TEST_P(PeerConnectionInterfaceTest, IceCandidates) { EXPECT_TRUE(DoCreateAnswer(&answer, nullptr)); EXPECT_TRUE(DoSetLocalDescription(std::move(answer))); - EXPECT_TRUE_WAIT(observer_.last_candidate() != nullptr, kTimeout); - EXPECT_TRUE_WAIT(observer_.ice_gathering_complete_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return observer_.last_candidate(); }, + ::testing::Ne(nullptr), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return observer_.ice_gathering_complete_; }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); EXPECT_TRUE(pc_->AddIceCandidate(observer_.last_candidate())); } @@ -1879,15 +1842,15 @@ TEST_F(PeerConnectionInterfaceTestPlanB, AddTrackAfterAddStream) { MediaStreamInterface* stream = pc_->local_streams()->at(0); // Add video track to the audio-only stream. - rtc::scoped_refptr video_track( + scoped_refptr video_track( CreateVideoTrack("video_label")); stream->AddTrack(video_track); std::unique_ptr offer; ASSERT_TRUE(DoCreateOffer(&offer, nullptr)); - const cricket::MediaContentDescription* video_desc = - cricket::GetFirstVideoContentDescription(offer->description()); + const MediaContentDescription* video_desc = + GetFirstVideoContentDescription(offer->description()); EXPECT_TRUE(video_desc != nullptr); } @@ -1907,8 +1870,8 @@ TEST_F(PeerConnectionInterfaceTestPlanB, RemoveTrackAfterAddStream) { std::unique_ptr offer; ASSERT_TRUE(DoCreateOffer(&offer, nullptr)); - const cricket::MediaContentDescription* video_desc = - cricket::GetFirstVideoContentDescription(offer->description()); + const MediaContentDescription* video_desc = + GetFirstVideoContentDescription(offer->description()); EXPECT_TRUE(video_desc == nullptr); } @@ -1922,8 +1885,8 @@ TEST_F(PeerConnectionInterfaceTestPlanB, CreateSenderWithStream) { std::unique_ptr offer; ASSERT_TRUE(DoCreateOffer(&offer, nullptr)); - const cricket::MediaContentDescription* video_desc = - cricket::GetFirstVideoContentDescription(offer->description()); + const MediaContentDescription* video_desc = + GetFirstVideoContentDescription(offer->description()); ASSERT_TRUE(video_desc != nullptr); ASSERT_EQ(1u, video_desc->streams().size()); EXPECT_EQ(kStreamId1, video_desc->streams()[0].first_stream_id()); @@ -1934,7 +1897,7 @@ TEST_P(PeerConnectionInterfaceTest, GetStatsForSpecificTrack) { InitiateCall(); ASSERT_LT(0u, pc_->GetSenders().size()); ASSERT_LT(0u, pc_->GetReceivers().size()); - rtc::scoped_refptr remote_audio = + scoped_refptr remote_audio = pc_->GetReceivers()[0]->track(); EXPECT_TRUE(DoGetStats(remote_audio.get())); @@ -1952,7 +1915,7 @@ TEST_P(PeerConnectionInterfaceTest, GetStatsForSpecificTrack) { // Test that we can get stats on a video track. TEST_P(PeerConnectionInterfaceTest, GetStatsForVideoTrack) { InitiateCall(); - auto video_receiver = GetFirstReceiverOfType(cricket::MEDIA_TYPE_VIDEO); + auto video_receiver = GetFirstReceiverOfType(webrtc::MediaType::VIDEO); ASSERT_TRUE(video_receiver); EXPECT_TRUE(DoGetStats(video_receiver->track().get())); } @@ -1960,7 +1923,7 @@ TEST_P(PeerConnectionInterfaceTest, GetStatsForVideoTrack) { // Test that we don't get statistics for an invalid track. TEST_P(PeerConnectionInterfaceTest, GetStatsForInvalidTrack) { InitiateCall(); - rtc::scoped_refptr unknown_audio_track( + scoped_refptr unknown_audio_track( pc_factory_->CreateAudioTrack("unknown track", nullptr)); EXPECT_FALSE(DoGetStats(unknown_audio_track.get())); } @@ -1985,7 +1948,7 @@ TEST_P(PeerConnectionInterfaceTest, CreateSctpDataChannel) { RTCConfiguration rtc_config; CreatePeerConnection(rtc_config); - webrtc::DataChannelInit config; + DataChannelInit config; auto channel = pc_->CreateDataChannelOrError("1", &config); EXPECT_TRUE(channel.ok()); EXPECT_TRUE(channel.value()->reliable()); @@ -2005,7 +1968,7 @@ TEST_P(PeerConnectionInterfaceTest, CreateSctpDataChannel) { EXPECT_FALSE(channel.value()->reliable()); EXPECT_FALSE(observer_.renegotiation_needed_); - config.maxRetransmits = absl::nullopt; + config.maxRetransmits = std::nullopt; config.maxRetransmitTime = 0; channel = pc_->CreateDataChannelOrError("4", &config); EXPECT_TRUE(channel.ok()); @@ -2017,7 +1980,7 @@ TEST_P(PeerConnectionInterfaceTest, CreateSctpDataChannelWhenClosed) { RTCConfiguration rtc_config; CreatePeerConnection(rtc_config); pc_->Close(); - webrtc::DataChannelInit config; + DataChannelInit config; auto ret = pc_->CreateDataChannelOrError("1", &config); ASSERT_FALSE(ret.ok()); EXPECT_EQ(ret.error().type(), RTCErrorType::INVALID_STATE); @@ -2029,7 +1992,7 @@ TEST_P(PeerConnectionInterfaceTest, CreateSctpDataChannelWithMinusOne) { RTCConfiguration rtc_config; CreatePeerConnection(rtc_config); - webrtc::DataChannelInit config; + DataChannelInit config; config.maxRetransmitTime = -1; config.maxRetransmits = -1; auto channel = pc_->CreateDataChannelOrError("1", &config); @@ -2044,7 +2007,7 @@ TEST_P(PeerConnectionInterfaceTest, CreatePeerConnection(rtc_config); std::string label = "test"; - webrtc::DataChannelInit config; + DataChannelInit config; config.maxRetransmits = 0; config.maxRetransmitTime = 0; @@ -2059,7 +2022,7 @@ TEST_P(PeerConnectionInterfaceTest, RTCConfiguration rtc_config; CreatePeerConnection(rtc_config); - webrtc::DataChannelInit config; + DataChannelInit config; config.id = 1; config.negotiated = true; @@ -2070,13 +2033,13 @@ TEST_P(PeerConnectionInterfaceTest, channel = pc_->CreateDataChannelOrError("x", &config); EXPECT_FALSE(channel.ok()); - config.id = cricket::kMaxSctpSid; + config.id = kMaxSctpSid; config.negotiated = true; channel = pc_->CreateDataChannelOrError("max", &config); EXPECT_TRUE(channel.ok()); EXPECT_EQ(config.id, channel.value()->id()); - config.id = cricket::kMaxSctpSid + 1; + config.id = kMaxSctpSid + 1; config.negotiated = true; channel = pc_->CreateDataChannelOrError("x", &config); EXPECT_FALSE(channel.ok()); @@ -2113,10 +2076,9 @@ TEST_P(PeerConnectionInterfaceTest, DISABLED_TestRejectSctpDataChannelInAnswer) std::string sdp; EXPECT_TRUE(pc_->local_description()->ToString(&sdp)); std::unique_ptr answer( - webrtc::CreateSessionDescription(SdpType::kAnswer, sdp)); + CreateSessionDescription(SdpType::kAnswer, sdp)); ASSERT_TRUE(answer); - cricket::ContentInfo* data_info = - cricket::GetFirstDataContent(answer->description()); + ContentInfo* data_info = GetFirstDataContent(answer->description()); data_info->rejected = true; DoSetRemoteDescription(std::move(answer)); @@ -2132,49 +2094,27 @@ TEST_P(PeerConnectionInterfaceTest, ReceiveFireFoxOffer) { AddAudioTrack("audio_label"); AddVideoTrack("video_label"); std::unique_ptr desc( - webrtc::CreateSessionDescription(SdpType::kOffer, - webrtc::kFireFoxSdpOffer, nullptr)); + CreateSessionDescription(SdpType::kOffer, kFireFoxSdpOffer, nullptr)); EXPECT_TRUE(DoSetSessionDescription(std::move(desc), false)); CreateAnswerAsLocalDescription(); ASSERT_TRUE(pc_->local_description() != nullptr); ASSERT_TRUE(pc_->remote_description() != nullptr); - const cricket::ContentInfo* content = - cricket::GetFirstAudioContent(pc_->local_description()->description()); + const ContentInfo* content = + GetFirstAudioContent(pc_->local_description()->description()); ASSERT_TRUE(content != nullptr); EXPECT_FALSE(content->rejected); - content = - cricket::GetFirstVideoContent(pc_->local_description()->description()); + content = GetFirstVideoContent(pc_->local_description()->description()); ASSERT_TRUE(content != nullptr); EXPECT_FALSE(content->rejected); #ifdef WEBRTC_HAVE_SCTP - content = - cricket::GetFirstDataContent(pc_->local_description()->description()); + content = GetFirstDataContent(pc_->local_description()->description()); ASSERT_TRUE(content != nullptr); EXPECT_FALSE(content->rejected); #endif } -// Test that fallback from DTLS to SDES is not supported. -// The fallback was previously supported but was removed to simplify the code -// and because it's non-standard. -TEST_P(PeerConnectionInterfaceTest, DtlsSdesFallbackNotSupported) { - RTCConfiguration rtc_config; - CreatePeerConnection(rtc_config); - // Wait for fake certificate to be generated. Previously, this is what caused - // the "a=crypto" lines to be rejected. - AddAudioTrack("audio_label"); - AddVideoTrack("video_label"); - ASSERT_NE(nullptr, fake_certificate_generator_); - EXPECT_EQ_WAIT(1, fake_certificate_generator_->generated_certificates(), - kTimeout); - std::unique_ptr desc( - webrtc::CreateSessionDescription(SdpType::kOffer, kDtlsSdesFallbackSdp, - nullptr)); - EXPECT_FALSE(DoSetSessionDescription(std::move(desc), /*local=*/false)); -} - // Test that we can create an audio only offer and receive an answer with a // limited set of audio codecs and receive an updated offer with more audio // codecs, where the added codecs are not supported. @@ -2184,18 +2124,17 @@ TEST_P(PeerConnectionInterfaceTest, ReceiveUpdatedAudioOfferWithBadCodecs) { CreateOfferAsLocalDescription(); const char* answer_sdp = (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED - ? webrtc::kAudioSdpPlanB - : webrtc::kAudioSdpUnifiedPlan); + ? kAudioSdpPlanB + : kAudioSdpUnifiedPlan); std::unique_ptr answer( - webrtc::CreateSessionDescription(SdpType::kAnswer, answer_sdp, nullptr)); + CreateSessionDescription(SdpType::kAnswer, answer_sdp, nullptr)); EXPECT_TRUE(DoSetSessionDescription(std::move(answer), false)); - const char* reoffer_sdp = - (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED - ? webrtc::kAudioSdpWithUnsupportedCodecsPlanB - : webrtc::kAudioSdpWithUnsupportedCodecsUnifiedPlan); + const char* reoffer_sdp = (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED + ? kAudioSdpWithUnsupportedCodecsPlanB + : kAudioSdpWithUnsupportedCodecsUnifiedPlan); std::unique_ptr updated_offer( - webrtc::CreateSessionDescription(SdpType::kOffer, reoffer_sdp, nullptr)); + CreateSessionDescription(SdpType::kOffer, reoffer_sdp, nullptr)); EXPECT_TRUE(DoSetSessionDescription(std::move(updated_offer), false)); CreateAnswerAsLocalDescription(); } @@ -2213,13 +2152,11 @@ TEST_P(PeerConnectionInterfaceTest, CreateSubsequentRecvOnlyOffer) { std::unique_ptr offer; DoCreateOffer(&offer, nullptr); - const cricket::ContentInfo* video_content = - cricket::GetFirstVideoContent(offer->description()); + const ContentInfo* video_content = GetFirstVideoContent(offer->description()); ASSERT_EQ(RtpTransceiverDirection::kRecvOnly, video_content->media_description()->direction()); - const cricket::ContentInfo* audio_content = - cricket::GetFirstAudioContent(offer->description()); + const ContentInfo* audio_content = GetFirstAudioContent(offer->description()); ASSERT_EQ(RtpTransceiverDirection::kRecvOnly, audio_content->media_description()->direction()); } @@ -2242,13 +2179,11 @@ TEST_P(PeerConnectionInterfaceTest, CreateSubsequentInactiveOffer) { options.offer_to_receive_video = 0; DoCreateOffer(&offer, &options); - const cricket::ContentInfo* video_content = - cricket::GetFirstVideoContent(offer->description()); + const ContentInfo* video_content = GetFirstVideoContent(offer->description()); ASSERT_EQ(RtpTransceiverDirection::kInactive, video_content->media_description()->direction()); - const cricket::ContentInfo* audio_content = - cricket::GetFirstAudioContent(offer->description()); + const ContentInfo* audio_content = GetFirstAudioContent(offer->description()); ASSERT_EQ(RtpTransceiverDirection::kInactive, audio_content->media_description()->direction()); } @@ -2274,7 +2209,7 @@ TEST_P(PeerConnectionInterfaceTest, SetConfigurationChangesCandidateFilter) { PeerConnectionInterface::RTCConfiguration config = pc_->GetConfiguration(); config.type = PeerConnectionInterface::kRelay; EXPECT_TRUE(pc_->SetConfiguration(config).ok()); - EXPECT_EQ(cricket::CF_RELAY, port_allocator_->candidate_filter()); + EXPECT_EQ(CF_RELAY, port_allocator_->candidate_filter()); } TEST_P(PeerConnectionInterfaceTest, SetConfigurationChangesPruneTurnPortsFlag) { @@ -2282,12 +2217,11 @@ TEST_P(PeerConnectionInterfaceTest, SetConfigurationChangesPruneTurnPortsFlag) { config.prune_turn_ports = false; CreatePeerConnection(config); config = pc_->GetConfiguration(); - EXPECT_EQ(webrtc::NO_PRUNE, port_allocator_->turn_port_prune_policy()); + EXPECT_EQ(NO_PRUNE, port_allocator_->turn_port_prune_policy()); config.prune_turn_ports = true; EXPECT_TRUE(pc_->SetConfiguration(config).ok()); - EXPECT_EQ(webrtc::PRUNE_BASED_ON_PRIORITY, - port_allocator_->turn_port_prune_policy()); + EXPECT_EQ(PRUNE_BASED_ON_PRIORITY, port_allocator_->turn_port_prune_policy()); } // Test that the ice check interval can be changed. This does not verify that @@ -2295,7 +2229,7 @@ TEST_P(PeerConnectionInterfaceTest, SetConfigurationChangesPruneTurnPortsFlag) { // require a very complex set of mocks. TEST_P(PeerConnectionInterfaceTest, SetConfigurationChangesIceCheckInterval) { PeerConnectionInterface::RTCConfiguration config; - config.ice_check_min_interval = absl::nullopt; + config.ice_check_min_interval = std::nullopt; CreatePeerConnection(config); config = pc_->GetConfiguration(); config.ice_check_min_interval = 100; @@ -2331,8 +2265,8 @@ TEST_P(PeerConnectionInterfaceTest, config.type = PeerConnectionInterface::kRelay; EXPECT_TRUE(pc_->SetConfiguration(config).ok()); - const cricket::FakePortAllocatorSession* session = - static_cast( + const FakePortAllocatorSession* session = + static_cast( port_allocator_->GetPooledSession()); ASSERT_NE(nullptr, session); EXPECT_EQ(1UL, session->stun_servers().size()); @@ -2376,8 +2310,7 @@ TEST_P(PeerConnectionInterfaceTest, CreateAnswerAsLocalDescription(); // Expect no pooled sessions to be left. - const cricket::PortAllocatorSession* session = - port_allocator_->GetPooledSession(); + const PortAllocatorSession* session = port_allocator_->GetPooledSession(); EXPECT_EQ(nullptr, session); } @@ -2392,8 +2325,7 @@ TEST_P(PeerConnectionInterfaceTest, PooledSessionsDiscardedAfterClose) { pc_->Close(); // Expect no pooled sessions to be left. - const cricket::PortAllocatorSession* session = - port_allocator_->GetPooledSession(); + const PortAllocatorSession* session = port_allocator_->GetPooledSession(); EXPECT_EQ(nullptr, session); } @@ -2509,16 +2441,20 @@ TEST_P(PeerConnectionInterfaceTest, CloseAndTestStreamsAndStates) { EXPECT_EQ(2u, pc_->GetTransceivers().size()); } - auto audio_receiver = GetFirstReceiverOfType(cricket::MEDIA_TYPE_AUDIO); - auto video_receiver = GetFirstReceiverOfType(cricket::MEDIA_TYPE_VIDEO); + auto audio_receiver = GetFirstReceiverOfType(webrtc::MediaType::AUDIO); + auto video_receiver = GetFirstReceiverOfType(webrtc::MediaType::VIDEO); if (sdp_semantics_ == SdpSemantics::kPlanB_DEPRECATED) { ASSERT_TRUE(audio_receiver); ASSERT_TRUE(video_receiver); // Track state may be updated asynchronously. - EXPECT_EQ_WAIT(MediaStreamTrackInterface::kEnded, - audio_receiver->track()->state(), kTimeout); - EXPECT_EQ_WAIT(MediaStreamTrackInterface::kEnded, - video_receiver->track()->state(), kTimeout); + EXPECT_THAT(WaitUntil([&] { return audio_receiver->track()->state(); }, + ::testing::Eq(MediaStreamTrackInterface::kEnded), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return video_receiver->track()->state(); }, + ::testing::Eq(MediaStreamTrackInterface::kEnded), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); } else { ASSERT_FALSE(audio_receiver); ASSERT_FALSE(video_receiver); @@ -2535,8 +2471,7 @@ TEST_F(PeerConnectionInterfaceTestPlanB, CloseAndTestMethods) { CreateAnswerAsLocalDescription(); ASSERT_EQ(1u, pc_->local_streams()->count()); - rtc::scoped_refptr local_stream( - pc_->local_streams()->at(0)); + scoped_refptr local_stream(pc_->local_streams()->at(0)); pc_->Close(); @@ -2556,12 +2491,12 @@ TEST_F(PeerConnectionInterfaceTestPlanB, CloseAndTestMethods) { std::string sdp; ASSERT_TRUE(pc_->remote_description()->ToString(&sdp)); std::unique_ptr remote_offer( - webrtc::CreateSessionDescription(SdpType::kOffer, sdp)); + CreateSessionDescription(SdpType::kOffer, sdp)); EXPECT_FALSE(DoSetRemoteDescription(std::move(remote_offer))); ASSERT_TRUE(pc_->local_description()->ToString(&sdp)); std::unique_ptr local_offer( - webrtc::CreateSessionDescription(SdpType::kOffer, sdp)); + CreateSessionDescription(SdpType::kOffer, sdp)); EXPECT_FALSE(DoSetLocalDescription(std::move(local_offer))); } @@ -2584,7 +2519,7 @@ TEST_P(PeerConnectionInterfaceTest, UpdateRemoteStreams) { CreatePeerConnection(config); CreateAndSetRemoteOffer(GetSdpStringWithStream1()); - rtc::scoped_refptr reference(CreateStreamCollection(1, 1)); + scoped_refptr reference(CreateStreamCollection(1, 1)); EXPECT_TRUE( CompareStreamCollections(observer_.remote_streams(), reference.get())); MediaStreamInterface* remote_stream = observer_.remote_streams()->at(0); @@ -2594,7 +2529,7 @@ TEST_P(PeerConnectionInterfaceTest, UpdateRemoteStreams) { // MediaStream. CreateAndSetRemoteOffer(GetSdpStringWithStream1And2()); - rtc::scoped_refptr reference2(CreateStreamCollection(2, 1)); + scoped_refptr reference2(CreateStreamCollection(2, 1)); EXPECT_TRUE( CompareStreamCollections(observer_.remote_streams(), reference2.get())); } @@ -2619,12 +2554,12 @@ TEST_F(PeerConnectionInterfaceTestPlanB, EXPECT_TRUE(DoSetRemoteDescription(std::move(desc_ms1_two_tracks))); EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(), reference_collection_.get())); - rtc::scoped_refptr audio_track2 = + scoped_refptr audio_track2 = observer_.remote_streams()->at(0)->GetAudioTracks()[1]; - EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, audio_track2->state()); - rtc::scoped_refptr video_track2 = + EXPECT_EQ(MediaStreamTrackInterface::kLive, audio_track2->state()); + scoped_refptr video_track2 = observer_.remote_streams()->at(0)->GetVideoTracks()[1]; - EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, video_track2->state()); + EXPECT_EQ(MediaStreamTrackInterface::kLive, video_track2->state()); // Remove the extra audio and video tracks. std::unique_ptr desc_ms2 = @@ -2638,10 +2573,14 @@ TEST_F(PeerConnectionInterfaceTestPlanB, EXPECT_TRUE(CompareStreamCollections(observer_.remote_streams(), reference_collection_.get())); // Track state may be updated asynchronously. - EXPECT_EQ_WAIT(webrtc::MediaStreamTrackInterface::kEnded, - audio_track2->state(), kTimeout); - EXPECT_EQ_WAIT(webrtc::MediaStreamTrackInterface::kEnded, - video_track2->state(), kTimeout); + EXPECT_THAT(WaitUntil([&] { return audio_track2->state(); }, + ::testing::Eq(MediaStreamTrackInterface::kEnded), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return video_track2->state(); }, + ::testing::Eq(MediaStreamTrackInterface::kEnded), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); } // This tests that remote tracks are ended if a local session description is set @@ -2652,21 +2591,21 @@ TEST_P(PeerConnectionInterfaceTest, RejectMediaContent) { // First create and set a remote offer, then reject its video content in our // answer. CreateAndSetRemoteOffer(kSdpStringWithStream1PlanB); - auto audio_receiver = GetFirstReceiverOfType(cricket::MEDIA_TYPE_AUDIO); + auto audio_receiver = GetFirstReceiverOfType(webrtc::MediaType::AUDIO); ASSERT_TRUE(audio_receiver); - auto video_receiver = GetFirstReceiverOfType(cricket::MEDIA_TYPE_VIDEO); + auto video_receiver = GetFirstReceiverOfType(webrtc::MediaType::VIDEO); ASSERT_TRUE(video_receiver); - rtc::scoped_refptr remote_audio = + scoped_refptr remote_audio = audio_receiver->track(); - EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state()); - rtc::scoped_refptr remote_video = + EXPECT_EQ(MediaStreamTrackInterface::kLive, remote_audio->state()); + scoped_refptr remote_video = video_receiver->track(); EXPECT_EQ(MediaStreamTrackInterface::kLive, remote_video->state()); std::unique_ptr local_answer; EXPECT_TRUE(DoCreateAnswer(&local_answer, nullptr)); - cricket::ContentInfo* video_info = + ContentInfo* video_info = local_answer->description()->GetContentByName("video"); video_info->rejected = true; EXPECT_TRUE(DoSetLocalDescription(std::move(local_answer))); @@ -2679,16 +2618,20 @@ TEST_P(PeerConnectionInterfaceTest, RejectMediaContent) { video_info = local_offer->description()->GetContentByName("video"); ASSERT_TRUE(video_info != nullptr); video_info->rejected = true; - cricket::ContentInfo* audio_info = + ContentInfo* audio_info = local_offer->description()->GetContentByName("audio"); ASSERT_TRUE(audio_info != nullptr); audio_info->rejected = true; EXPECT_TRUE(DoSetLocalDescription(std::move(local_offer))); // Track state may be updated asynchronously. - EXPECT_EQ_WAIT(MediaStreamTrackInterface::kEnded, remote_audio->state(), - kTimeout); - EXPECT_EQ_WAIT(MediaStreamTrackInterface::kEnded, remote_video->state(), - kTimeout); + EXPECT_THAT(WaitUntil([&] { return remote_audio->state(); }, + ::testing::Eq(MediaStreamTrackInterface::kEnded), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return remote_video->state(); }, + ::testing::Eq(MediaStreamTrackInterface::kEnded), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); } // This tests that we won't crash if the remote track has been removed outside @@ -2703,12 +2646,12 @@ TEST_F(PeerConnectionInterfaceTestPlanB, RemoveTrackThenRejectMediaContent) { remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]); std::unique_ptr local_answer( - webrtc::CreateSessionDescription(SdpType::kAnswer, - GetSdpStringWithStream1(), nullptr)); - cricket::ContentInfo* video_info = + CreateSessionDescription(SdpType::kAnswer, GetSdpStringWithStream1(), + nullptr)); + ContentInfo* video_info = local_answer->description()->GetContentByName("video"); video_info->rejected = true; - cricket::ContentInfo* audio_info = + ContentInfo* audio_info = local_answer->description()->GetContentByName("audio"); audio_info->rejected = true; EXPECT_TRUE(DoSetLocalDescription(std::move(local_answer))); @@ -2843,15 +2786,15 @@ TEST_F(PeerConnectionInterfaceTestPlanB, // This tests that a default MediaStream is not created if a remote session // description is updated to not have any MediaStreams. // Don't run under Unified Plan since this behavior is Plan B specific. -TEST_F(PeerConnectionInterfaceTestPlanB, VerifyDefaultStreamIsNotCreated) { +TEST_F(PeerConnectionInterfaceTestPlanB, VerifyDefaultStreamIsNotRecreated) { RTCConfiguration config; CreatePeerConnection(config); CreateAndSetRemoteOffer(GetSdpStringWithStream1()); - rtc::scoped_refptr reference(CreateStreamCollection(1, 1)); + scoped_refptr reference(CreateStreamCollection(1, 1)); EXPECT_TRUE( CompareStreamCollections(observer_.remote_streams(), reference.get())); - CreateAndSetRemoteOffer(kSdpStringWithoutStreams); + CreateAndSetRemoteOffer(kSdpStringWithMsidWithoutStreams); EXPECT_EQ(0u, observer_.remote_streams()->count()); } @@ -2922,7 +2865,7 @@ TEST_F(PeerConnectionInterfaceTestPlanB, LocalDescriptionChanged) { CreatePeerConnection(config); // Create an offer with 1 stream with 2 tracks of each type. - rtc::scoped_refptr stream_collection = + scoped_refptr stream_collection = CreateStreamCollection(1, 2); pc_->AddStream(stream_collection->at(0)); std::unique_ptr offer; @@ -2959,7 +2902,7 @@ TEST_F(PeerConnectionInterfaceTestPlanB, RTCConfiguration config; CreatePeerConnection(config); - rtc::scoped_refptr stream_collection = + scoped_refptr stream_collection = CreateStreamCollection(1, 2); // Add a stream to create the offer, but remove it afterwards. pc_->AddStream(stream_collection->at(0)); @@ -2993,9 +2936,9 @@ TEST_P(PeerConnectionInterfaceTest, ASSERT_TRUE(DoCreateOffer(&offer, nullptr)); // Grab a copy of the offer before it gets passed into the PC. std::unique_ptr modified_offer = - webrtc::CreateSessionDescription( - webrtc::SdpType::kOffer, offer->session_id(), - offer->session_version(), offer->description()->Clone()); + CreateSessionDescription(SdpType::kOffer, offer->session_id(), + offer->session_version(), + offer->description()->Clone()); EXPECT_TRUE(DoSetLocalDescription(std::move(offer))); auto senders = pc_->GetSenders(); @@ -3004,21 +2947,20 @@ TEST_P(PeerConnectionInterfaceTest, EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0])); // Change the ssrc of the audio and video track. - cricket::MediaContentDescription* desc = - cricket::GetFirstAudioContentDescription(modified_offer->description()); - ASSERT_TRUE(desc != nullptr); - for (StreamParams& stream : desc->mutable_streams()) { - for (unsigned int& ssrc : stream.ssrcs) { - ++ssrc; - } - } - - desc = - cricket::GetFirstVideoContentDescription(modified_offer->description()); - ASSERT_TRUE(desc != nullptr); - for (StreamParams& stream : desc->mutable_streams()) { - for (unsigned int& ssrc : stream.ssrcs) { - ++ssrc; + for (auto content : modified_offer->description()->contents()) { + MediaContentDescription* desc = content.media_description(); + ASSERT_TRUE(desc); + for (StreamParams& stream : desc->mutable_streams()) { + for (unsigned int& ssrc : stream.ssrcs) { + unsigned int old_ssrc = ssrc++; + for (auto& group : stream.ssrc_groups) { + for (unsigned int& secondary_ssrc : group.ssrcs) { + if (secondary_ssrc == old_ssrc) { + secondary_ssrc = ssrc; + } + } + } + } } } @@ -3039,7 +2981,7 @@ TEST_F(PeerConnectionInterfaceTestPlanB, RTCConfiguration config; CreatePeerConnection(config); - rtc::scoped_refptr stream_collection = + scoped_refptr stream_collection = CreateStreamCollection(2, 1); pc_->AddStream(stream_collection->at(0)); std::unique_ptr offer; @@ -3052,8 +2994,8 @@ TEST_F(PeerConnectionInterfaceTestPlanB, EXPECT_TRUE(ContainsSender(senders, kVideoTracks[0], kStreams[0])); // Add a new MediaStream but with the same tracks as in the first stream. - rtc::scoped_refptr stream_1( - webrtc::MediaStream::Create(kStreams[1])); + scoped_refptr stream_1( + MediaStream::Create(kStreams[1])); stream_1->AddTrack(stream_collection->at(0)->GetVideoTracks()[0]); stream_1->AddTrack(stream_collection->at(0)->GetAudioTracks()[0]); pc_->AddStream(stream_1.get()); @@ -3156,7 +3098,13 @@ TEST_P(PeerConnectionInterfaceTest, SetConfigurationNotCausingIceRestart) { // 4. Next createOffer should initiate an ICE restart, but only for the other // m= section; it would be pointless to do an ICE restart for the m= section // that was already restarted. -TEST_P(PeerConnectionInterfaceTest, SetConfigurationCausingPartialIceRestart) { +// Disabled because work on PT assignment showed that the restart tries +// to remap an RTX payload type. +// Tracking bug for PT assignment work: https://issues.webrtc.org/360058654 +// The suspected bug is linked below. +// TODO(https://issues.webrtc.org/42233461): Fix PT assignment +TEST_P(PeerConnectionInterfaceTest, + DISABLED_SetConfigurationCausingPartialIceRestart) { PeerConnectionInterface::RTCConfiguration config; config.sdp_semantics = sdp_semantics_; config.type = PeerConnectionInterface::kRelay; @@ -3174,9 +3122,9 @@ TEST_P(PeerConnectionInterfaceTest, SetConfigurationCausingPartialIceRestart) { EXPECT_TRUE(pc_->SetConfiguration(config).ok()); // Do ICE restart for the first m= section, initiated by remote peer. - std::unique_ptr remote_offer( - webrtc::CreateSessionDescription(SdpType::kOffer, - GetSdpStringWithStream1(), nullptr)); + std::unique_ptr remote_offer( + CreateSessionDescription(SdpType::kOffer, GetSdpStringWithStream1(), + nullptr)); ASSERT_TRUE(remote_offer); remote_offer->description()->transport_infos()[0].description.ice_ufrag = "modified"; @@ -3222,7 +3170,7 @@ TEST_P(PeerConnectionInterfaceTest, CurrentAndPendingDescriptions) { // Set remote pranswer. std::unique_ptr remote_pranswer( - webrtc::CreateSessionDescription(SdpType::kPrAnswer, sdp)); + CreateSessionDescription(SdpType::kPrAnswer, sdp)); SessionDescriptionInterface* remote_pranswer_ptr = remote_pranswer.get(); EXPECT_TRUE(DoSetRemoteDescription(std::move(remote_pranswer))); EXPECT_EQ(local_offer_ptr, pc_->pending_local_description()); @@ -3232,7 +3180,7 @@ TEST_P(PeerConnectionInterfaceTest, CurrentAndPendingDescriptions) { // Set remote answer. std::unique_ptr remote_answer( - webrtc::CreateSessionDescription(SdpType::kAnswer, sdp)); + CreateSessionDescription(SdpType::kAnswer, sdp)); SessionDescriptionInterface* remote_answer_ptr = remote_answer.get(); EXPECT_TRUE(DoSetRemoteDescription(std::move(remote_answer))); EXPECT_EQ(nullptr, pc_->pending_local_description()); @@ -3242,7 +3190,7 @@ TEST_P(PeerConnectionInterfaceTest, CurrentAndPendingDescriptions) { // Set remote offer. std::unique_ptr remote_offer( - webrtc::CreateSessionDescription(SdpType::kOffer, sdp)); + CreateSessionDescription(SdpType::kOffer, sdp)); SessionDescriptionInterface* remote_offer_ptr = remote_offer.get(); EXPECT_TRUE(DoSetRemoteDescription(std::move(remote_offer))); EXPECT_EQ(remote_offer_ptr, pc_->pending_remote_description()); @@ -3252,7 +3200,7 @@ TEST_P(PeerConnectionInterfaceTest, CurrentAndPendingDescriptions) { // Set local pranswer. std::unique_ptr local_pranswer( - webrtc::CreateSessionDescription(SdpType::kPrAnswer, sdp)); + CreateSessionDescription(SdpType::kPrAnswer, sdp)); SessionDescriptionInterface* local_pranswer_ptr = local_pranswer.get(); EXPECT_TRUE(DoSetLocalDescription(std::move(local_pranswer))); EXPECT_EQ(remote_offer_ptr, pc_->pending_remote_description()); @@ -3262,7 +3210,7 @@ TEST_P(PeerConnectionInterfaceTest, CurrentAndPendingDescriptions) { // Set local answer. std::unique_ptr local_answer( - webrtc::CreateSessionDescription(SdpType::kAnswer, sdp)); + CreateSessionDescription(SdpType::kAnswer, sdp)); SessionDescriptionInterface* local_answer_ptr = local_answer.get(); EXPECT_TRUE(DoSetLocalDescription(std::move(local_answer))); EXPECT_EQ(nullptr, pc_->pending_remote_description()); @@ -3281,9 +3229,8 @@ TEST_P(PeerConnectionInterfaceTest, // The RtcEventLog will be reset when the PeerConnection is closed. pc_->Close(); - EXPECT_FALSE( - pc_->StartRtcEventLog(std::make_unique(), - webrtc::RtcEventLog::kImmediateOutput)); + EXPECT_FALSE(pc_->StartRtcEventLog(std::make_unique(), + RtcEventLog::kImmediateOutput)); pc_->StopRtcEventLog(); } @@ -3297,7 +3244,7 @@ TEST_P(PeerConnectionInterfaceTest, OffersAndAnswersHaveTrickleIceOption) { options.offer_to_receive_video = 1; std::unique_ptr offer; ASSERT_TRUE(DoCreateOffer(&offer, &options)); - cricket::SessionDescription* desc = offer->description(); + SessionDescription* desc = offer->description(); ASSERT_EQ(2u, desc->transport_infos().size()); EXPECT_TRUE(desc->transport_infos()[0].description.HasOption("trickle")); EXPECT_TRUE(desc->transport_infos()[1].description.HasOption("trickle")); @@ -3326,7 +3273,7 @@ TEST_P(PeerConnectionInterfaceTest, IceRenominationNotOffered) { std::unique_ptr offer; ASSERT_TRUE(DoCreateOffer(&offer, nullptr)); - cricket::SessionDescription* desc = offer->description(); + SessionDescription* desc = offer->description(); EXPECT_EQ(1u, desc->transport_infos().size()); EXPECT_FALSE( desc->transport_infos()[0].description.GetIceParameters().renomination); @@ -3343,7 +3290,7 @@ TEST_P(PeerConnectionInterfaceTest, IceRenominationOptionInOfferAndAnswer) { std::unique_ptr offer; ASSERT_TRUE(DoCreateOffer(&offer, nullptr)); - cricket::SessionDescription* desc = offer->description(); + SessionDescription* desc = offer->description(); EXPECT_EQ(1u, desc->transport_infos().size()); EXPECT_TRUE( desc->transport_infos()[0].description.GetIceParameters().renomination); @@ -3371,9 +3318,9 @@ TEST_P(PeerConnectionInterfaceTest, CreateOfferWithOfferToReceiveConstraints) { std::unique_ptr offer; ASSERT_TRUE(DoCreateOffer(&offer, &options)); - cricket::SessionDescription* desc = offer->description(); - const cricket::ContentInfo* audio = cricket::GetFirstAudioContent(desc); - const cricket::ContentInfo* video = cricket::GetFirstVideoContent(desc); + SessionDescription* desc = offer->description(); + const ContentInfo* audio = GetFirstAudioContent(desc); + const ContentInfo* video = GetFirstVideoContent(desc); ASSERT_NE(nullptr, audio); ASSERT_NE(nullptr, video); EXPECT_FALSE(audio->rejected); @@ -3403,9 +3350,9 @@ TEST_F(PeerConnectionInterfaceTestPlanB, std::unique_ptr answer; ASSERT_TRUE(DoCreateAnswer(&answer, &options)); - cricket::SessionDescription* desc = answer->description(); - const cricket::ContentInfo* audio = cricket::GetFirstAudioContent(desc); - const cricket::ContentInfo* video = cricket::GetFirstVideoContent(desc); + SessionDescription* desc = answer->description(); + const ContentInfo* audio = GetFirstAudioContent(desc); + const ContentInfo* video = GetFirstVideoContent(desc); ASSERT_NE(nullptr, audio); ASSERT_NE(nullptr, video); EXPECT_TRUE(audio->rejected); @@ -3601,7 +3548,7 @@ TEST_P(PeerConnectionInterfaceTest, CreateOfferWithIceRestart) { std::unique_ptr offer; CreateOfferWithOptionsAsLocalDescription(&offer, rtc_options); - std::string mid = cricket::GetFirstAudioContent(offer->description())->name; + auto mid = GetFirstAudioContent(offer->description())->mid(); auto ufrag1 = offer->description()->GetTransportInfoByName(mid)->description.ice_ufrag; auto pwd1 = @@ -3643,14 +3590,14 @@ TEST_P(PeerConnectionInterfaceTest, CreateOfferWithRtpMux) { ASSERT_TRUE(offer); EXPECT_NE(nullptr, GetFirstAudioContent(offer->description())); EXPECT_NE(nullptr, GetFirstVideoContent(offer->description())); - EXPECT_TRUE(offer->description()->HasGroup(cricket::GROUP_TYPE_BUNDLE)); + EXPECT_TRUE(offer->description()->HasGroup(GROUP_TYPE_BUNDLE)); rtc_options.use_rtp_mux = false; offer = CreateOfferWithOptions(rtc_options); ASSERT_TRUE(offer); EXPECT_NE(nullptr, GetFirstAudioContent(offer->description())); EXPECT_NE(nullptr, GetFirstVideoContent(offer->description())); - EXPECT_FALSE(offer->description()->HasGroup(cricket::GROUP_TYPE_BUNDLE)); + EXPECT_FALSE(offer->description()->HasGroup(GROUP_TYPE_BUNDLE)); } // This test ensures OnRenegotiationNeeded is called when we add track with @@ -3662,30 +3609,42 @@ TEST_P(PeerConnectionInterfaceTest, CreateOfferWithRtpMux) { TEST_F(PeerConnectionInterfaceTestPlanB, MediaStreamAddTrackRemoveTrackRenegotiate) { CreatePeerConnectionWithoutDtls(); - rtc::scoped_refptr stream( + scoped_refptr stream( pc_factory_->CreateLocalMediaStream(kStreamId1)); pc_->AddStream(stream.get()); - rtc::scoped_refptr audio_track( + scoped_refptr audio_track( CreateAudioTrack("audio_track")); - rtc::scoped_refptr video_track( + scoped_refptr video_track( CreateVideoTrack("video_track")); stream->AddTrack(audio_track); - EXPECT_TRUE_WAIT(observer_.renegotiation_needed_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return observer_.renegotiation_needed_; }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); observer_.renegotiation_needed_ = false; CreateOfferReceiveAnswer(); stream->AddTrack(video_track); - EXPECT_TRUE_WAIT(observer_.renegotiation_needed_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return observer_.renegotiation_needed_; }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); observer_.renegotiation_needed_ = false; CreateOfferReceiveAnswer(); stream->RemoveTrack(audio_track); - EXPECT_TRUE_WAIT(observer_.renegotiation_needed_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return observer_.renegotiation_needed_; }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); observer_.renegotiation_needed_ = false; CreateOfferReceiveAnswer(); stream->RemoveTrack(video_track); - EXPECT_TRUE_WAIT(observer_.renegotiation_needed_, kTimeout); + EXPECT_THAT(WaitUntil([&] { return observer_.renegotiation_needed_; }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kTimeout)}), + IsRtcOk()); observer_.renegotiation_needed_ = false; } @@ -3740,11 +3699,11 @@ TEST_P(PeerConnectionInterfaceTest, CreatePeerConnection(); AddVideoTrack("video_label"); - std::vector> rtp_senders = + std::vector> rtp_senders = pc_->GetSenders(); ASSERT_EQ(rtp_senders.size(), 1u); - ASSERT_EQ(rtp_senders[0]->media_type(), cricket::MEDIA_TYPE_VIDEO); - rtc::scoped_refptr video_rtp_sender = rtp_senders[0]; + ASSERT_EQ(rtp_senders[0]->media_type(), webrtc::MediaType::VIDEO); + scoped_refptr video_rtp_sender = rtp_senders[0]; RtpParameters parameters = video_rtp_sender->GetParameters(); ASSERT_NE(parameters.degradation_preference, DegradationPreference::MAINTAIN_RESOLUTION); @@ -3771,8 +3730,7 @@ class PeerConnectionMediaConfigTest : public ::testing::Test { void SetUp() override { pcf_ = PeerConnectionFactoryForTest::CreatePeerConnectionFactoryForTest(); } - const cricket::MediaConfig TestCreatePeerConnection( - const RTCConfiguration& config) { + MediaConfig TestCreatePeerConnection(const RTCConfiguration& config) { PeerConnectionDependencies pc_dependencies(&observer_); auto result = pcf_->CreatePeerConnectionOrError(config, std::move(pc_dependencies)); @@ -3781,7 +3739,7 @@ class PeerConnectionMediaConfigTest : public ::testing::Test { return result.value()->GetConfiguration().media_config; } - rtc::scoped_refptr pcf_; + scoped_refptr pcf_; MockPeerConnectionObserver observer_; }; @@ -3804,7 +3762,7 @@ TEST_F(PeerConnectionMediaConfigTest, TestDefaults) { PeerConnectionInterface::RTCConfiguration config; config.sdp_semantics = SdpSemantics::kUnifiedPlan; - const cricket::MediaConfig& media_config = TestCreatePeerConnection(config); + const MediaConfig& media_config = TestCreatePeerConnection(config); EXPECT_TRUE(media_config.enable_dscp); EXPECT_TRUE(media_config.video.enable_cpu_adaptation); @@ -3820,7 +3778,7 @@ TEST_F(PeerConnectionMediaConfigTest, TestDisablePrerendererSmoothingTrue) { config.sdp_semantics = SdpSemantics::kUnifiedPlan; config.set_prerenderer_smoothing(false); - const cricket::MediaConfig& media_config = TestCreatePeerConnection(config); + const MediaConfig& media_config = TestCreatePeerConnection(config); EXPECT_FALSE(media_config.video.enable_prerenderer_smoothing); } @@ -3832,7 +3790,7 @@ TEST_F(PeerConnectionMediaConfigTest, TestEnableExperimentCpuLoadEstimator) { config.sdp_semantics = SdpSemantics::kUnifiedPlan; config.set_experiment_cpu_load_estimator(true); - const cricket::MediaConfig& media_config = TestCreatePeerConnection(config); + const MediaConfig& media_config = TestCreatePeerConnection(config); EXPECT_TRUE(media_config.video.experiment_cpu_load_estimator); } diff --git a/pc/peer_connection_internal.h b/pc/peer_connection_internal.h index c91a44a148..daa0649fb4 100644 --- a/pc/peer_connection_internal.h +++ b/pc/peer_connection_internal.h @@ -13,19 +13,41 @@ #include #include +#include #include #include #include -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" +#include "api/audio/audio_device.h" +#include "api/candidate.h" +#include "api/crypto/crypto_options.h" +#include "api/data_channel_interface.h" +#include "api/field_trials_view.h" +#include "api/jsep.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/sctp_transport_interface.h" #include "call/call.h" -#include "modules/audio_device/include/audio_device.h" +#include "call/payload_type_picker.h" +#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "pc/data_channel_utils.h" #include "pc/jsep_transport_controller.h" #include "pc/peer_connection_message_handler.h" #include "pc/rtp_transceiver.h" #include "pc/rtp_transmission_manager.h" -#include "pc/sctp_data_channel.h" +#include "pc/session_description.h" +#include "pc/transport_stats.h" +#include "pc/usage_pattern.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/thread.h" namespace webrtc { @@ -47,7 +69,7 @@ class PeerConnectionSdpMethods { // bundling, returns false. virtual bool NeedsIceRestart(const std::string& content_name) const = 0; - virtual absl::optional sctp_mid() const = 0; + virtual std::optional sctp_mid() const = 0; // Functions below this comment are known to only be accessed // from SdpOfferAnswerHandler. @@ -72,11 +94,11 @@ class PeerConnectionSdpMethods { virtual JsepTransportController* transport_controller_s() = 0; virtual JsepTransportController* transport_controller_n() = 0; virtual DataChannelController* data_channel_controller() = 0; - virtual cricket::PortAllocator* port_allocator() = 0; + virtual PortAllocator* port_allocator() = 0; virtual LegacyStatsCollector* legacy_stats() = 0; // Returns the observer. Will crash on CHECK if the observer is removed. virtual PeerConnectionObserver* Observer() const = 0; - virtual absl::optional GetSctpSslRole_n() = 0; + virtual std::optional GetSctpSslRole_n() = 0; virtual PeerConnectionInterface::IceConnectionState ice_connection_state_internal() = 0; virtual void SetIceConnectionState( @@ -91,46 +113,51 @@ class PeerConnectionSdpMethods { // sufficient time has passed. virtual bool IsUnifiedPlan() const = 0; virtual bool ValidateBundleSettings( - const cricket::SessionDescription* desc, - const std::map& + const SessionDescription* desc, + const std::map& bundle_groups_by_mid) = 0; - virtual absl::optional GetDataMid() const = 0; // Internal implementation for AddTransceiver family of methods. If // `fire_callback` is set, fires OnRenegotiationNeeded callback if successful. - virtual RTCErrorOr> - AddTransceiver(cricket::MediaType media_type, - rtc::scoped_refptr track, - const RtpTransceiverInit& init, - bool fire_callback = true) = 0; + virtual RTCErrorOr> AddTransceiver( + webrtc::MediaType media_type, + scoped_refptr track, + const RtpTransceiverInit& init, + bool fire_callback = true) = 0; // Asynchronously calls SctpTransport::Start() on the network thread for // `sctp_mid()` if set. Called as part of setting the local description. + virtual RTCError StartSctpTransport(const SctpOptions& options) = 0; + [[deprecated("Call with SctpOptions")]] virtual void StartSctpTransport(int local_port, int remote_port, - int max_message_size) = 0; + int max_message_size) { + StartSctpTransport({.local_port = local_port, + .remote_port = remote_port, + .max_message_size = max_message_size}); + } // Asynchronously adds a remote candidate on the network thread. - virtual void AddRemoteCandidate(const std::string& mid, - const cricket::Candidate& candidate) = 0; + virtual void AddRemoteCandidate(absl::string_view mid, + const Candidate& candidate) = 0; virtual Call* call_ptr() = 0; // Returns true if SRTP (either using DTLS-SRTP or SDES) is required by // this session. virtual bool SrtpRequired() const = 0; - // Configures the data channel transport on the network thread. - // The return value will be unset if an error occurs. If the setup succeeded - // the return value will be set and contain the name of the transport - // (empty string if a name isn't available). - virtual absl::optional SetupDataChannelTransport_n( - absl::string_view mid) = 0; - virtual void TeardownDataChannelTransport_n(RTCError error) = 0; - virtual void SetSctpDataInfo(absl::string_view mid, - absl::string_view transport_name) = 0; - virtual void ResetSctpDataInfo() = 0; - + // Initializes the data channel transport for the peerconnection instance. + // This will have the effect that `sctp_mid()` and `sctp_transport_name()` + // will return a set value (even though it might be an empty string) and the + // dc transport will be initialized on the network thread. + virtual bool CreateDataChannelTransport(absl::string_view mid) = 0; + // Tears down the data channel transport state and clears the `sctp_mid()` and + // `sctp_transport_name()` properties. + virtual void DestroyDataChannelTransport(RTCError error) = 0; virtual const FieldTrialsView& trials() const = 0; virtual void ClearStatsCache() = 0; + // Keeps track of assigned payload types and comes up with reasonable + // suggestions when new PTs need to be assigned. + virtual PayloadTypePicker& payload_type_picker() = 0; }; // Functions defined in this class are called by other objects, @@ -138,14 +165,14 @@ class PeerConnectionSdpMethods { class PeerConnectionInternal : public PeerConnectionInterface, public PeerConnectionSdpMethods { public: - virtual rtc::Thread* network_thread() const = 0; - virtual rtc::Thread* worker_thread() const = 0; + virtual Thread* network_thread() const = 0; + virtual Thread* worker_thread() const = 0; // Returns true if we were the initial offerer. virtual bool initial_offerer() const = 0; virtual std::vector< - rtc::scoped_refptr>> + scoped_refptr>> GetTransceiversInternal() const = 0; // Call on the network thread to fetch stats for all the data channels. @@ -154,32 +181,31 @@ class PeerConnectionInternal : public PeerConnectionInterface, return {}; } - virtual absl::optional sctp_transport_name() const = 0; + virtual std::optional sctp_transport_name() const = 0; - virtual cricket::CandidateStatsList GetPooledCandidateStats() const = 0; + virtual CandidateStatsList GetPooledCandidateStats() const = 0; // Returns a map from transport name to transport stats for all given // transport names. // Must be called on the network thread. - virtual std::map - GetTransportStatsByNames(const std::set& transport_names) = 0; + virtual std::map GetTransportStatsByNames( + const std::set& transport_names) = 0; virtual Call::Stats GetCallStats() = 0; - virtual absl::optional GetAudioDeviceStats() = 0; + virtual std::optional GetAudioDeviceStats() = 0; virtual bool GetLocalCertificate( const std::string& transport_name, - rtc::scoped_refptr* certificate) = 0; - virtual std::unique_ptr GetRemoteSSLCertChain( + scoped_refptr* certificate) = 0; + virtual std::unique_ptr GetRemoteSSLCertChain( const std::string& transport_name) = 0; // Returns true if there was an ICE restart initiated by the remote offer. virtual bool IceRestartPending(const std::string& content_name) const = 0; // Get SSL role for an arbitrary m= section (handles bundling correctly). - virtual bool GetSslRole(const std::string& content_name, - rtc::SSLRole* role) = 0; + virtual bool GetSslRole(const std::string& content_name, SSLRole* role) = 0; // Functions needed by DataChannelController virtual void NoteDataAddedEvent() {} // Handler for sctp data channel state changes. diff --git a/pc/peer_connection_jsep_unittest.cc b/pc/peer_connection_jsep_unittest.cc index 1369253ad6..27aba158e4 100644 --- a/pc/peer_connection_jsep_unittest.cc +++ b/pc/peer_connection_jsep_unittest.cc @@ -13,15 +13,14 @@ #include #include #include +#include #include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/call/call_factory_interface.h" -#include "api/field_trials_view.h" +#include "api/enable_media_with_defaults.h" #include "api/jsep.h" #include "api/media_stream_interface.h" #include "api/media_types.h" @@ -34,24 +33,15 @@ #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" #include "api/task_queue/default_task_queue_factory.h" -#include "api/task_queue/task_queue_factory.h" #include "api/transport/field_trial_based_config.h" -#include "api/transport/sctp_transport_factory_interface.h" -#include "media/base/media_engine.h" #include "media/base/stream_params.h" -#include "media/engine/webrtc_media_engine.h" -#include "media/engine/webrtc_media_engine_defaults.h" -#include "modules/audio_device/include/audio_device.h" #include "p2p/base/p2p_constants.h" -#include "p2p/base/port_allocator.h" #include "p2p/base/transport_info.h" -#include "pc/channel_interface.h" #include "pc/media_session.h" #include "pc/peer_connection_wrapper.h" #include "pc/sdp_utils.h" #include "pc/session_description.h" #include "pc/test/mock_peer_connection_observers.h" -#include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/thread.h" #include "test/gtest.h" #ifdef WEBRTC_ANDROID @@ -70,7 +60,7 @@ namespace webrtc { -using cricket::MediaContentDescription; +using webrtc::MediaContentDescription; using RTCConfiguration = PeerConnectionInterface::RTCConfiguration; using ::testing::Combine; using ::testing::ElementsAre; @@ -79,18 +69,13 @@ using ::testing::Values; PeerConnectionFactoryDependencies CreatePeerConnectionFactoryDependencies() { PeerConnectionFactoryDependencies dependencies; - dependencies.worker_thread = rtc::Thread::Current(); - dependencies.network_thread = rtc::Thread::Current(); - dependencies.signaling_thread = rtc::Thread::Current(); + dependencies.worker_thread = Thread::Current(); + dependencies.network_thread = Thread::Current(); + dependencies.signaling_thread = Thread::Current(); dependencies.task_queue_factory = CreateDefaultTaskQueueFactory(); dependencies.trials = std::make_unique(); - cricket::MediaEngineDependencies media_deps; - media_deps.task_queue_factory = dependencies.task_queue_factory.get(); - media_deps.adm = FakeAudioCaptureModule::Create(); - media_deps.trials = dependencies.trials.get(); - SetMediaEngineDefaults(&media_deps); - dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps)); - dependencies.call_factory = CreateCallFactory(); + dependencies.adm = FakeAudioCaptureModule::Create(); + EnableMediaWithDefaults(dependencies); dependencies.sctp_factory = std::make_unique(); return dependencies; } @@ -100,7 +85,7 @@ class PeerConnectionJsepTest : public ::testing::Test { typedef std::unique_ptr WrapperPtr; PeerConnectionJsepTest() - : vss_(new rtc::VirtualSocketServer()), main_(vss_.get()) { + : vss_(new VirtualSocketServer()), main_(vss_.get()) { #ifdef WEBRTC_ANDROID InitializeAndroidObjects(); #endif @@ -113,7 +98,7 @@ class PeerConnectionJsepTest : public ::testing::Test { } WrapperPtr CreatePeerConnection(const RTCConfiguration& config) { - rtc::scoped_refptr pc_factory = + scoped_refptr pc_factory = CreateModularPeerConnectionFactory( CreatePeerConnectionFactoryDependencies()); auto observer = std::make_unique(); @@ -128,8 +113,8 @@ class PeerConnectionJsepTest : public ::testing::Test { pc_factory, result.MoveValue(), std::move(observer)); } - std::unique_ptr vss_; - rtc::AutoSocketServerThread main_; + std::unique_ptr vss_; + AutoSocketServerThread main_; }; // Tests for JSEP initial offer generation. @@ -147,24 +132,24 @@ TEST_F(PeerConnectionJsepTest, EmptyInitialOffer) { // section. TEST_F(PeerConnectionJsepTest, AudioOnlyInitialOffer) { auto caller = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); auto offer = caller->CreateOffer(); auto contents = offer->description()->contents(); ASSERT_EQ(1u, contents.size()); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, contents[0].media_description()->type()); + EXPECT_EQ(webrtc::MediaType::AUDIO, contents[0].media_description()->type()); } // Test than an initial offer with one video track generates one video media // section TEST_F(PeerConnectionJsepTest, VideoOnlyInitialOffer) { auto caller = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + caller->AddTransceiver(webrtc::MediaType::VIDEO); auto offer = caller->CreateOffer(); auto contents = offer->description()->contents(); ASSERT_EQ(1u, contents.size()); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, contents[0].media_description()->type()); + EXPECT_EQ(webrtc::MediaType::VIDEO, contents[0].media_description()->type()); } // Test that an initial offer with one data channel generates one data media @@ -176,7 +161,7 @@ TEST_F(PeerConnectionJsepTest, DataOnlyInitialOffer) { auto offer = caller->CreateOffer(); auto contents = offer->description()->contents(); ASSERT_EQ(1u, contents.size()); - EXPECT_EQ(cricket::MEDIA_TYPE_DATA, contents[0].media_description()->type()); + EXPECT_EQ(webrtc::MediaType::DATA, contents[0].media_description()->type()); } // Test that creating multiple data channels only results in one data section @@ -196,11 +181,11 @@ TEST_F(PeerConnectionJsepTest, MultipleDataChannelsCreateOnlyOneDataSection) { // JSEP section 5.2.1. TEST_F(PeerConnectionJsepTest, MediaSectionsInInitialOfferOrderedCorrectly) { auto caller = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller->AddTransceiver(webrtc::MediaType::VIDEO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); RtpTransceiverInit init; init.direction = RtpTransceiverDirection::kSendOnly; - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init); + caller->AddTransceiver(webrtc::MediaType::VIDEO, init); auto offer = caller->CreateOffer(); auto contents = offer->description()->contents(); @@ -208,19 +193,19 @@ TEST_F(PeerConnectionJsepTest, MediaSectionsInInitialOfferOrderedCorrectly) { const MediaContentDescription* media_description1 = contents[0].media_description(); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, media_description1->type()); + EXPECT_EQ(webrtc::MediaType::VIDEO, media_description1->type()); EXPECT_EQ(RtpTransceiverDirection::kSendRecv, media_description1->direction()); const MediaContentDescription* media_description2 = contents[1].media_description(); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, media_description2->type()); + EXPECT_EQ(webrtc::MediaType::AUDIO, media_description2->type()); EXPECT_EQ(RtpTransceiverDirection::kSendRecv, media_description2->direction()); const MediaContentDescription* media_description3 = contents[2].media_description(); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, media_description3->type()); + EXPECT_EQ(webrtc::MediaType::VIDEO, media_description3->type()); EXPECT_EQ(RtpTransceiverDirection::kSendOnly, media_description3->direction()); } @@ -228,19 +213,19 @@ TEST_F(PeerConnectionJsepTest, MediaSectionsInInitialOfferOrderedCorrectly) { // Test that media sections in the initial offer have different mids. TEST_F(PeerConnectionJsepTest, MediaSectionsInInitialOfferHaveDifferentMids) { auto caller = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); auto offer = caller->CreateOffer(); auto contents = offer->description()->contents(); ASSERT_EQ(2u, contents.size()); - EXPECT_NE(contents[0].name, contents[1].name); + EXPECT_NE(contents[0].mid(), contents[1].mid()); } TEST_F(PeerConnectionJsepTest, StoppedTransceiverHasNoMediaSectionInInitialOffer) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); transceiver->StopInternal(); auto offer = caller->CreateOffer(); @@ -261,12 +246,12 @@ TEST_F(PeerConnectionJsepTest, SetLocalEmptyOfferCreatesNoTransceivers) { TEST_F(PeerConnectionJsepTest, SetLocalOfferSetsTransceiverMid) { auto caller = CreatePeerConnection(); - auto audio_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - auto video_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + auto audio_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); + auto video_transceiver = caller->AddTransceiver(webrtc::MediaType::VIDEO); auto offer = caller->CreateOffer(); - std::string audio_mid = offer->description()->contents()[0].name; - std::string video_mid = offer->description()->contents()[1].name; + auto audio_mid = offer->description()->contents()[0].mid(); + auto video_mid = offer->description()->contents()[1].mid(); ASSERT_TRUE(caller->SetLocalDescription(std::move(offer))); @@ -280,8 +265,8 @@ TEST_F(PeerConnectionJsepTest, SetLocalOfferSetsTransceiverMid) { // transceivers, one for receiving audio and one for receiving video. TEST_F(PeerConnectionJsepTest, SetRemoteOfferCreatesTransceivers) { auto caller = CreatePeerConnection(); - auto caller_audio = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - auto caller_video = caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + auto caller_audio = caller->AddTransceiver(webrtc::MediaType::AUDIO); + auto caller_video = caller->AddTransceiver(webrtc::MediaType::VIDEO); auto callee = CreatePeerConnection(); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); @@ -289,12 +274,12 @@ TEST_F(PeerConnectionJsepTest, SetRemoteOfferCreatesTransceivers) { auto transceivers = callee->pc()->GetTransceivers(); ASSERT_EQ(2u, transceivers.size()); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, transceivers[0]->media_type()); + EXPECT_EQ(webrtc::MediaType::AUDIO, transceivers[0]->media_type()); EXPECT_EQ(caller_audio->mid(), transceivers[0]->mid()); EXPECT_EQ(RtpTransceiverDirection::kRecvOnly, transceivers[0]->direction()); EXPECT_EQ(0u, transceivers[0]->sender()->stream_ids().size()); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, transceivers[1]->media_type()); + EXPECT_EQ(webrtc::MediaType::VIDEO, transceivers[1]->media_type()); EXPECT_EQ(caller_video->mid(), transceivers[1]->mid()); EXPECT_EQ(RtpTransceiverDirection::kRecvOnly, transceivers[1]->direction()); EXPECT_EQ(0u, transceivers[1]->sender()->stream_ids().size()); @@ -337,7 +322,7 @@ TEST_F(PeerConnectionJsepTest, auto transceivers = callee->pc()->GetTransceivers(); ASSERT_EQ(2u, transceivers.size()); - EXPECT_EQ(absl::nullopt, transceivers[0]->mid()); + EXPECT_EQ(std::nullopt, transceivers[0]->mid()); EXPECT_EQ(caller_audio->mid(), transceivers[1]->mid()); } @@ -349,13 +334,13 @@ TEST_F(PeerConnectionJsepTest, auto caller = CreatePeerConnection(); caller->AddAudioTrack("a"); auto callee = CreatePeerConnection(); - auto transceiver = callee->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = callee->AddTransceiver(webrtc::MediaType::AUDIO); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); auto transceivers = callee->pc()->GetTransceivers(); ASSERT_EQ(2u, transceivers.size()); - EXPECT_EQ(absl::nullopt, transceivers[0]->mid()); + EXPECT_EQ(std::nullopt, transceivers[0]->mid()); EXPECT_EQ(caller->pc()->GetTransceivers()[0]->mid(), transceivers[1]->mid()); EXPECT_EQ(MediaStreamTrackInterface::kAudioKind, transceivers[1]->receiver()->track()->kind()); @@ -374,7 +359,7 @@ TEST_F(PeerConnectionJsepTest, auto transceivers = callee->pc()->GetTransceivers(); ASSERT_EQ(2u, transceivers.size()); - EXPECT_EQ(absl::nullopt, transceivers[0]->mid()); + EXPECT_EQ(std::nullopt, transceivers[0]->mid()); EXPECT_EQ(caller->pc()->GetTransceivers()[0]->mid(), transceivers[1]->mid()); EXPECT_EQ(MediaStreamTrackInterface::kAudioKind, transceivers[1]->receiver()->track()->kind()); @@ -429,14 +414,14 @@ TEST_F(PeerConnectionJsepTest, SetRemoteOfferReusesTransceiversOfBothTypes) { // offered media in the same order and with the same mids. TEST_F(PeerConnectionJsepTest, CreateAnswerHasSameMidsAsOffer) { auto caller = CreatePeerConnection(); - auto first_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); - auto second_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - auto third_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + auto first_transceiver = caller->AddTransceiver(webrtc::MediaType::VIDEO); + auto second_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); + auto third_transceiver = caller->AddTransceiver(webrtc::MediaType::VIDEO); caller->CreateDataChannel("dc"); auto callee = CreatePeerConnection(); auto offer = caller->CreateOffer(); - const auto* offer_data = cricket::GetFirstDataContent(offer->description()); + const auto* offer_data = GetFirstDataContent(offer->description()); ASSERT_TRUE( caller->SetLocalDescription(CloneSessionDescription(offer.get()))); ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); @@ -444,14 +429,14 @@ TEST_F(PeerConnectionJsepTest, CreateAnswerHasSameMidsAsOffer) { auto answer = callee->CreateAnswer(); auto contents = answer->description()->contents(); ASSERT_EQ(4u, contents.size()); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, contents[0].media_description()->type()); - EXPECT_EQ(first_transceiver->mid(), contents[0].name); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, contents[1].media_description()->type()); - EXPECT_EQ(second_transceiver->mid(), contents[1].name); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, contents[2].media_description()->type()); - EXPECT_EQ(third_transceiver->mid(), contents[2].name); - EXPECT_EQ(cricket::MEDIA_TYPE_DATA, contents[3].media_description()->type()); - EXPECT_EQ(offer_data->name, contents[3].name); + EXPECT_EQ(webrtc::MediaType::VIDEO, contents[0].media_description()->type()); + EXPECT_EQ(first_transceiver->mid(), contents[0].mid()); + EXPECT_EQ(webrtc::MediaType::AUDIO, contents[1].media_description()->type()); + EXPECT_EQ(second_transceiver->mid(), contents[1].mid()); + EXPECT_EQ(webrtc::MediaType::VIDEO, contents[2].media_description()->type()); + EXPECT_EQ(third_transceiver->mid(), contents[2].mid()); + EXPECT_EQ(webrtc::MediaType::DATA, contents[3].media_description()->type()); + EXPECT_EQ(offer_data->mid(), contents[3].mid()); } // Test that an answering media section is marked as rejected if the underlying @@ -478,7 +463,7 @@ TEST_F(PeerConnectionJsepTest, CreateAnswerNegotiatesDirection) { auto caller = CreatePeerConnection(); RtpTransceiverInit init; init.direction = RtpTransceiverDirection::kSendOnly; - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init); + caller->AddTransceiver(webrtc::MediaType::AUDIO, init); auto callee = CreatePeerConnection(); callee->AddAudioTrack("a"); @@ -499,7 +484,7 @@ TEST_F(PeerConnectionJsepTest, CreateAnswerNegotiatesDirection) { // property of the transceivers mentioned in the session description. TEST_F(PeerConnectionJsepTest, SetLocalAnswerUpdatesCurrentDirection) { auto caller = CreatePeerConnection(); - auto caller_audio = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto caller_audio = caller->AddTransceiver(webrtc::MediaType::AUDIO); caller_audio->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly); auto callee = CreatePeerConnection(); callee->AddAudioTrack("a"); @@ -543,7 +528,7 @@ TEST_F(PeerConnectionJsepTest, SetRemoteAnswerUpdatesCurrentDirection) { TEST_F(PeerConnectionJsepTest, ChangeDirectionFromRecvOnlyToSendRecvDoesNotBreakVideoNegotiation) { auto caller = CreatePeerConnection(); - auto caller_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + auto caller_transceiver = caller->AddTransceiver(webrtc::MediaType::VIDEO); auto callee = CreatePeerConnection(); caller_transceiver->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly); @@ -561,7 +546,7 @@ TEST_F(PeerConnectionJsepTest, TEST_F(PeerConnectionJsepTest, ChangeDirectionFromRecvOnlyToSendRecvDoesNotBreakAudioNegotiation) { auto caller = CreatePeerConnection(); - auto caller_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto caller_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); caller_transceiver->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly); @@ -602,7 +587,7 @@ TEST_F(PeerConnectionJsepTest, SettingTransceiverInactiveDoesNotStopIt) { TEST_F(PeerConnectionJsepTest, ReOfferMediaSectionForAssociatedStoppedTransceiverIsRejected) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); @@ -624,7 +609,7 @@ TEST_F(PeerConnectionJsepTest, TEST_F(PeerConnectionJsepTest, StoppingTransceiverInOfferStopsTransceiverOnRemoteSide) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); @@ -647,14 +632,14 @@ TEST_F(PeerConnectionJsepTest, TEST_F(PeerConnectionJsepTest, CreateOfferDoesNotRecycleMediaSectionIfFirstStopped) { auto caller = CreatePeerConnection(); - auto first_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto first_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); ASSERT_TRUE( caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); - auto second_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto second_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); first_transceiver->StopInternal(); auto reoffer = caller->CreateOffer(); @@ -670,7 +655,7 @@ TEST_F(PeerConnectionJsepTest, TEST_F(PeerConnectionJsepTest, RecycleMediaSectionWhenStoppingTransceiverOnAnswerer) { auto caller = CreatePeerConnection(); - auto first_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto first_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); @@ -682,7 +667,7 @@ TEST_F(PeerConnectionJsepTest, caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal())); EXPECT_TRUE(first_transceiver->stopped()); // First transceivers are dissociated on caller side. - ASSERT_EQ(absl::nullopt, first_transceiver->mid()); + ASSERT_EQ(std::nullopt, first_transceiver->mid()); // They are disassociated on callee side. ASSERT_EQ(0u, callee->pc()->GetTransceivers().size()); @@ -692,7 +677,7 @@ TEST_F(PeerConnectionJsepTest, callee->AddAudioTrack("audio2"); auto offer = caller->CreateOffer(); auto offer_contents = offer->description()->contents(); - std::string second_mid = offer_contents[0].name; + auto second_mid = offer_contents[0].mid(); ASSERT_EQ(1u, offer_contents.size()); EXPECT_FALSE(offer_contents[0].rejected); EXPECT_NE(first_mid, second_mid); @@ -701,7 +686,7 @@ TEST_F(PeerConnectionJsepTest, // associate the new transceivers. ASSERT_TRUE( caller->SetLocalDescription(CloneSessionDescription(offer.get()))); - EXPECT_EQ(absl::nullopt, first_transceiver->mid()); + EXPECT_EQ(std::nullopt, first_transceiver->mid()); ASSERT_EQ(1u, caller->pc()->GetTransceivers().size()); EXPECT_EQ(second_mid, caller->pc()->GetTransceivers()[0]->mid()); ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); @@ -713,7 +698,7 @@ TEST_F(PeerConnectionJsepTest, auto answer_contents = answer->description()->contents(); ASSERT_EQ(1u, answer_contents.size()); EXPECT_FALSE(answer_contents[0].rejected); - EXPECT_EQ(second_mid, answer_contents[0].name); + EXPECT_EQ(second_mid, answer_contents[0].mid()); // Finishing the negotiation shouldn't add or dissociate any transceivers. ASSERT_TRUE( @@ -732,7 +717,7 @@ TEST_F(PeerConnectionJsepTest, TEST_F(PeerConnectionJsepTest, CreateOfferRecyclesWhenOfferingTwice) { // Do a negotiation with a port 0 for the media section. auto caller = CreatePeerConnection(); - auto first_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto first_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); first_transceiver->StopInternal(); @@ -748,7 +733,7 @@ TEST_F(PeerConnectionJsepTest, CreateOfferRecyclesWhenOfferingTwice) { ASSERT_TRUE(caller->SetLocalDescription(std::move(offer))); ASSERT_EQ(1u, caller->pc()->GetTransceivers().size()); EXPECT_FALSE(caller->pc()->GetTransceivers()[0]->stopped()); - std::string second_mid = offer_contents[0].name; + auto second_mid = offer_contents[0].mid(); // Create another new offer and set the local description again without the // rest of any negotation ocurring. @@ -757,7 +742,7 @@ TEST_F(PeerConnectionJsepTest, CreateOfferRecyclesWhenOfferingTwice) { ASSERT_EQ(1u, second_offer_contents.size()); EXPECT_FALSE(second_offer_contents[0].rejected); // The mid shouldn't change. - EXPECT_EQ(second_mid, second_offer_contents[0].name); + EXPECT_EQ(second_mid, second_offer_contents[0].mid()); ASSERT_TRUE(caller->SetLocalDescription(std::move(second_offer))); // Make sure that the caller's transceivers are associated correctly. @@ -779,15 +764,15 @@ TEST_F(PeerConnectionJsepTest, CreateOfferRecyclesWhenOfferingTwice) { class RecycleMediaSectionTest : public PeerConnectionJsepTest, public ::testing::WithParamInterface< - std::tuple> { + std::tuple> { protected: RecycleMediaSectionTest() { first_type_ = std::get<0>(GetParam()); second_type_ = std::get<1>(GetParam()); } - cricket::MediaType first_type_; - cricket::MediaType second_type_; + webrtc::MediaType first_type_; + webrtc::MediaType second_type_; }; // Test that recycling works properly when a new transceiver recycles an m= @@ -799,7 +784,7 @@ TEST_P(RecycleMediaSectionTest, CurrentLocalAndCurrentRemoteRejected) { ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - std::string first_mid = *first_transceiver->mid(); + auto first_mid = *first_transceiver->mid(); first_transceiver->StopInternal(); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); @@ -813,14 +798,14 @@ TEST_P(RecycleMediaSectionTest, CurrentLocalAndCurrentRemoteRejected) { ASSERT_EQ(1u, offer_contents.size()); EXPECT_FALSE(offer_contents[0].rejected); EXPECT_EQ(second_type_, offer_contents[0].media_description()->type()); - std::string second_mid = offer_contents[0].name; + auto second_mid = offer_contents[0].mid(); EXPECT_NE(first_mid, second_mid); // Setting the local offer will dissociate the previous transceiver and set // the MID for the new transceiver. ASSERT_TRUE( caller->SetLocalDescription(CloneSessionDescription(offer.get()))); - EXPECT_EQ(absl::nullopt, first_transceiver->mid()); + EXPECT_EQ(std::nullopt, first_transceiver->mid()); EXPECT_EQ(second_mid, second_transceiver->mid()); // Setting the remote offer will dissociate the previous transceiver and @@ -836,7 +821,7 @@ TEST_P(RecycleMediaSectionTest, CurrentLocalAndCurrentRemoteRejected) { auto answer_contents = answer->description()->contents(); ASSERT_EQ(1u, answer_contents.size()); EXPECT_FALSE(answer_contents[0].rejected); - EXPECT_EQ(second_mid, answer_contents[0].name); + EXPECT_EQ(second_mid, answer_contents[0].mid()); EXPECT_EQ(second_type_, answer_contents[0].media_description()->type()); // Setting the local answer should succeed. @@ -876,14 +861,14 @@ TEST_P(RecycleMediaSectionTest, CurrentRemoteOnlyRejected) { ASSERT_EQ(1u, offer_contents.size()); EXPECT_FALSE(offer_contents[0].rejected); EXPECT_EQ(second_type_, offer_contents[0].media_description()->type()); - std::string second_mid = offer_contents[0].name; + auto second_mid = offer_contents[0].mid(); EXPECT_NE(first_mid, second_mid); // Setting the local offer will dissociate the previous transceiver and set // the MID for the new transceiver. ASSERT_TRUE( caller->SetLocalDescription(CloneSessionDescription(offer.get()))); - EXPECT_EQ(absl::nullopt, caller_first_transceiver->mid()); + EXPECT_EQ(std::nullopt, caller_first_transceiver->mid()); EXPECT_EQ(second_mid, caller_second_transceiver->mid()); // Setting the remote offer will dissociate the previous transceiver and @@ -899,7 +884,7 @@ TEST_P(RecycleMediaSectionTest, CurrentRemoteOnlyRejected) { auto answer_contents = answer->description()->contents(); ASSERT_EQ(1u, answer_contents.size()); EXPECT_FALSE(answer_contents[0].rejected); - EXPECT_EQ(second_mid, answer_contents[0].name); + EXPECT_EQ(second_mid, answer_contents[0].mid()); EXPECT_EQ(second_type_, answer_contents[0].media_description()->type()); // Setting the local answer should succeed. @@ -939,14 +924,14 @@ TEST_P(RecycleMediaSectionTest, CurrentLocalOnlyRejected) { ASSERT_EQ(1u, offer_contents.size()); EXPECT_FALSE(offer_contents[0].rejected); EXPECT_EQ(second_type_, offer_contents[0].media_description()->type()); - std::string second_mid = offer_contents[0].name; + auto second_mid = offer_contents[0].mid(); EXPECT_NE(first_mid, second_mid); // Setting the local offer will dissociate the previous transceiver and set // the MID for the new transceiver. ASSERT_TRUE( callee->SetLocalDescription(CloneSessionDescription(offer.get()))); - EXPECT_EQ(absl::nullopt, callee_first_transceiver->mid()); + EXPECT_EQ(std::nullopt, callee_first_transceiver->mid()); EXPECT_EQ(second_mid, callee_second_transceiver->mid()); // Setting the remote offer will dissociate the previous transceiver and @@ -962,7 +947,7 @@ TEST_P(RecycleMediaSectionTest, CurrentLocalOnlyRejected) { auto answer_contents = answer->description()->contents(); ASSERT_EQ(1u, answer_contents.size()); EXPECT_FALSE(answer_contents[0].rejected); - EXPECT_EQ(second_mid, answer_contents[0].name); + EXPECT_EQ(second_mid, answer_contents[0].mid()); EXPECT_EQ(second_type_, answer_contents[0].media_description()->type()); // Setting the local answer should succeed. @@ -985,7 +970,7 @@ TEST_P(RecycleMediaSectionTest, PendingLocalRejectedAndNoRemote) { ASSERT_TRUE(caller->SetLocalDescription(caller->CreateOffer())); - std::string first_mid = *caller_first_transceiver->mid(); + auto first_mid = *caller_first_transceiver->mid(); caller_first_transceiver->StopInternal(); // The reoffer will have a rejected m= section. @@ -1000,10 +985,10 @@ TEST_P(RecycleMediaSectionTest, PendingLocalRejectedAndNoRemote) { ASSERT_EQ(2u, reoffer_contents.size()); EXPECT_TRUE(reoffer_contents[0].rejected); EXPECT_EQ(first_type_, reoffer_contents[0].media_description()->type()); - EXPECT_EQ(first_mid, reoffer_contents[0].name); + EXPECT_EQ(first_mid, reoffer_contents[0].mid()); EXPECT_FALSE(reoffer_contents[1].rejected); EXPECT_EQ(second_type_, reoffer_contents[1].media_description()->type()); - std::string second_mid = reoffer_contents[1].name; + auto second_mid = reoffer_contents[1].mid(); EXPECT_NE(first_mid, second_mid); ASSERT_TRUE(caller->SetLocalDescription(std::move(reoffer))); @@ -1023,7 +1008,7 @@ TEST_P(RecycleMediaSectionTest, PendingLocalRejectedAndNotRejectedRemote) { ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); - std::string first_mid = *caller_first_transceiver->mid(); + auto first_mid = *caller_first_transceiver->mid(); caller_first_transceiver->StopInternal(); // The reoffer will have a rejected m= section. @@ -1038,10 +1023,10 @@ TEST_P(RecycleMediaSectionTest, PendingLocalRejectedAndNotRejectedRemote) { ASSERT_EQ(2u, reoffer_contents.size()); EXPECT_TRUE(reoffer_contents[0].rejected); EXPECT_EQ(first_type_, reoffer_contents[0].media_description()->type()); - EXPECT_EQ(first_mid, reoffer_contents[0].name); + EXPECT_EQ(first_mid, reoffer_contents[0].mid()); EXPECT_FALSE(reoffer_contents[1].rejected); EXPECT_EQ(second_type_, reoffer_contents[1].media_description()->type()); - std::string second_mid = reoffer_contents[1].name; + auto second_mid = reoffer_contents[1].mid(); EXPECT_NE(first_mid, second_mid); ASSERT_TRUE(caller->SetLocalDescription(std::move(reoffer))); @@ -1078,10 +1063,10 @@ TEST_P(RecycleMediaSectionTest, PendingRemoteRejectedAndNoLocal) { ASSERT_EQ(2u, reoffer_contents.size()); EXPECT_TRUE(reoffer_contents[0].rejected); EXPECT_EQ(first_type_, reoffer_contents[0].media_description()->type()); - EXPECT_EQ(first_mid, reoffer_contents[0].name); + EXPECT_EQ(first_mid, reoffer_contents[0].mid()); EXPECT_FALSE(reoffer_contents[1].rejected); EXPECT_EQ(second_type_, reoffer_contents[1].media_description()->type()); - std::string second_mid = reoffer_contents[1].name; + auto second_mid = reoffer_contents[1].mid(); EXPECT_NE(first_mid, second_mid); // Note: Cannot actually set the reoffer since the callee is in the signaling @@ -1115,10 +1100,10 @@ TEST_P(RecycleMediaSectionTest, PendingRemoteRejectedAndNotRejectedLocal) { ASSERT_EQ(2u, reoffer_contents.size()); EXPECT_TRUE(reoffer_contents[0].rejected); EXPECT_EQ(first_type_, reoffer_contents[0].media_description()->type()); - EXPECT_EQ(first_mid, reoffer_contents[0].name); + EXPECT_EQ(first_mid, reoffer_contents[0].mid()); EXPECT_FALSE(reoffer_contents[1].rejected); EXPECT_EQ(second_type_, reoffer_contents[1].media_description()->type()); - std::string second_mid = reoffer_contents[1].name; + auto second_mid = reoffer_contents[1].mid(); EXPECT_NE(first_mid, second_mid); // Note: Cannot actually set the reoffer since the callee is in the signaling @@ -1132,15 +1117,15 @@ TEST_P(RecycleMediaSectionTest, PendingRemoteRejectedAndNotRejectedLocal) { INSTANTIATE_TEST_SUITE_P( PeerConnectionJsepTest, RecycleMediaSectionTest, - Combine(Values(cricket::MEDIA_TYPE_AUDIO, cricket::MEDIA_TYPE_VIDEO), - Values(cricket::MEDIA_TYPE_AUDIO, cricket::MEDIA_TYPE_VIDEO))); + Combine(Values(webrtc::MediaType::AUDIO, webrtc::MediaType::VIDEO), + Values(webrtc::MediaType::AUDIO, webrtc::MediaType::VIDEO))); // Test that a new data channel section will not reuse a recycleable audio or // video media section. Additionally, tests that the new section is added to the // end of the session description. TEST_F(PeerConnectionJsepTest, DataChannelDoesNotRecycleMediaSection) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); @@ -1154,9 +1139,9 @@ TEST_F(PeerConnectionJsepTest, DataChannelDoesNotRecycleMediaSection) { auto offer = caller->CreateOffer(); auto offer_contents = offer->description()->contents(); ASSERT_EQ(2u, offer_contents.size()); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, + EXPECT_EQ(webrtc::MediaType::AUDIO, offer_contents[0].media_description()->type()); - EXPECT_EQ(cricket::MEDIA_TYPE_DATA, + EXPECT_EQ(webrtc::MediaType::DATA, offer_contents[1].media_description()->type()); ASSERT_TRUE( @@ -1166,9 +1151,9 @@ TEST_F(PeerConnectionJsepTest, DataChannelDoesNotRecycleMediaSection) { auto answer = callee->CreateAnswer(); auto answer_contents = answer->description()->contents(); ASSERT_EQ(2u, answer_contents.size()); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, + EXPECT_EQ(webrtc::MediaType::AUDIO, answer_contents[0].media_description()->type()); - EXPECT_EQ(cricket::MEDIA_TYPE_DATA, + EXPECT_EQ(webrtc::MediaType::DATA, answer_contents[1].media_description()->type()); } @@ -1187,27 +1172,26 @@ TEST_F(PeerConnectionJsepTest, AudioTrackAddedAfterDataSectionInReoffer) { auto offer = caller->CreateOffer(); auto contents = offer->description()->contents(); ASSERT_EQ(2u, contents.size()); - EXPECT_EQ(cricket::MEDIA_TYPE_DATA, contents[0].media_description()->type()); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, contents[1].media_description()->type()); + EXPECT_EQ(webrtc::MediaType::DATA, contents[0].media_description()->type()); + EXPECT_EQ(webrtc::MediaType::AUDIO, contents[1].media_description()->type()); } // Tests for MID properties. static void RenameSection(size_t mline_index, - const std::string& new_mid, + absl::string_view new_mid, SessionDescriptionInterface* sdesc) { - cricket::SessionDescription* desc = sdesc->description(); - std::string old_mid = desc->contents()[mline_index].name; - desc->contents()[mline_index].name = new_mid; + SessionDescription* desc = sdesc->description(); + std::string old_mid(desc->contents()[mline_index].mid()); + desc->contents()[mline_index].set_mid(new_mid); desc->transport_infos()[mline_index].content_name = new_mid; - const cricket::ContentGroup* bundle = - desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + const ContentGroup* bundle = desc->GetGroupByName(GROUP_TYPE_BUNDLE); if (bundle) { - cricket::ContentGroup new_bundle = *bundle; + ContentGroup new_bundle = *bundle; if (new_bundle.RemoveContentName(old_mid)) { new_bundle.AddContentName(new_mid); } - desc->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); + desc->RemoveGroupByName(GROUP_TYPE_BUNDLE); desc->AddGroup(new_bundle); } } @@ -1240,8 +1224,8 @@ TEST_F(PeerConnectionJsepTest, OfferAnswerWithChangedMids) { auto answer = callee->CreateAnswer(); auto answer_contents = answer->description()->contents(); - EXPECT_EQ(kFirstMid, answer_contents[0].name); - EXPECT_EQ(kSecondMid, answer_contents[1].name); + EXPECT_EQ(kFirstMid, answer_contents[0].mid()); + EXPECT_EQ(kSecondMid, answer_contents[1].mid()); ASSERT_TRUE( callee->SetLocalDescription(CloneSessionDescription(answer.get()))); @@ -1258,8 +1242,7 @@ TEST_F(PeerConnectionJsepTest, CreateOfferGeneratesUniqueMidIfAlreadyTaken) { pc->AddAudioTrack("a"); pc->AddAudioTrack("b"); auto default_offer = pc->CreateOffer(); - std::string default_second_mid = - default_offer->description()->contents()[1].name; + auto default_second_mid = default_offer->description()->contents()[1].mid(); // Now, do an offer/answer with one track which has the MID set to the default // second MID. @@ -1280,8 +1263,8 @@ TEST_F(PeerConnectionJsepTest, CreateOfferGeneratesUniqueMidIfAlreadyTaken) { auto reoffer = caller->CreateOffer(); auto reoffer_contents = reoffer->description()->contents(); - EXPECT_EQ(default_second_mid, reoffer_contents[0].name); - EXPECT_NE(reoffer_contents[0].name, reoffer_contents[1].name); + EXPECT_EQ(default_second_mid, reoffer_contents[0].mid()); + EXPECT_NE(reoffer_contents[0].mid(), reoffer_contents[1].mid()); } // Test that if an audio or video section has the default data section MID, then @@ -1292,8 +1275,7 @@ TEST_F(PeerConnectionJsepTest, auto pc = CreatePeerConnection(); pc->CreateDataChannel("dc"); auto default_offer = pc->CreateOffer(); - std::string default_data_mid = - default_offer->description()->contents()[0].name; + auto default_data_mid = default_offer->description()->contents()[0].mid(); // Now do an offer/answer with one audio track which has a MID set to the // default data MID. @@ -1314,8 +1296,8 @@ TEST_F(PeerConnectionJsepTest, auto reoffer = caller->CreateOffer(); auto reoffer_contents = reoffer->description()->contents(); - EXPECT_EQ(default_data_mid, reoffer_contents[0].name); - EXPECT_NE(reoffer_contents[0].name, reoffer_contents[1].name); + EXPECT_EQ(default_data_mid, reoffer_contents[0].mid()); + EXPECT_NE(reoffer_contents[0].mid(), reoffer_contents[1].mid()); } // Test that a reoffer initiated by the callee adds a new track to the caller. @@ -1362,7 +1344,7 @@ TEST_F(PeerConnectionJsepTest, const std::string kTrackId = "audio_track"; auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); transceiver->sender()->SetTrack(caller->CreateAudioTrack(kTrackId).get()); auto offer = caller->CreateOffer(); @@ -1380,11 +1362,11 @@ TEST_F(PeerConnectionJsepTest, NoMsidInOfferIfTransceiverDirectionHasNoSend) { RtpTransceiverInit init_recvonly; init_recvonly.direction = RtpTransceiverDirection::kRecvOnly; - ASSERT_TRUE(caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init_recvonly)); + ASSERT_TRUE(caller->AddTransceiver(webrtc::MediaType::AUDIO, init_recvonly)); RtpTransceiverInit init_inactive; init_inactive.direction = RtpTransceiverDirection::kInactive; - ASSERT_TRUE(caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init_inactive)); + ASSERT_TRUE(caller->AddTransceiver(webrtc::MediaType::VIDEO, init_inactive)); auto offer = caller->CreateOffer(); auto contents = offer->description()->contents(); @@ -1404,13 +1386,13 @@ TEST_F(PeerConnectionJsepTest, NoMsidInAnswerIfNoRespondingTracks) { // no tracks to send in response. RtpTransceiverInit init_recvonly; init_recvonly.direction = RtpTransceiverDirection::kRecvOnly; - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init_recvonly); + caller->AddTransceiver(webrtc::MediaType::AUDIO, init_recvonly); // sendrecv transceiver will get negotiated to recvonly since the callee has // no tracks to send in response. RtpTransceiverInit init_sendrecv; init_sendrecv.direction = RtpTransceiverDirection::kSendRecv; - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init_sendrecv); + caller->AddTransceiver(webrtc::MediaType::VIDEO, init_sendrecv); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); @@ -1455,7 +1437,7 @@ TEST_F(PeerConnectionJsepTest, IncludeMsidEvenIfDirectionHasChanged) { // any MSID information for that section. TEST_F(PeerConnectionJsepTest, RemoveMsidIfTransceiverStopped) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); @@ -1612,7 +1594,7 @@ TEST_F(PeerConnectionJsepTest, CurrentDirectionResetWhenRtpTransceiverStopped) { auto caller = CreatePeerConnection(); auto callee = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); @@ -1685,19 +1667,19 @@ static void RemoveRtpHeaderExtensionByUri( // Transforms a session description to emulate a legacy endpoint which does not // support a=mid, BUNDLE, and the MID header extension. static void ClearMids(SessionDescriptionInterface* sdesc) { - cricket::SessionDescription* desc = sdesc->description(); - desc->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); - cricket::ContentInfo* audio_content = cricket::GetFirstAudioContent(desc); + SessionDescription* desc = sdesc->description(); + desc->RemoveGroupByName(GROUP_TYPE_BUNDLE); + ContentInfo* audio_content = GetFirstAudioContent(desc); if (audio_content) { - desc->GetTransportInfoByName(audio_content->name)->content_name = ""; - audio_content->name = ""; + desc->GetTransportInfoByName(audio_content->mid())->content_name = ""; + audio_content->set_mid(""); RemoveRtpHeaderExtensionByUri(audio_content->media_description(), RtpExtension::kMidUri); } - cricket::ContentInfo* video_content = cricket::GetFirstVideoContent(desc); + ContentInfo* video_content = GetFirstVideoContent(desc); if (video_content) { - desc->GetTransportInfoByName(video_content->name)->content_name = ""; - video_content->name = ""; + desc->GetTransportInfoByName(video_content->mid())->content_name = ""; + video_content->set_mid(""); RemoveRtpHeaderExtensionByUri(video_content->media_description(), RtpExtension::kMidUri); } @@ -1924,7 +1906,7 @@ TEST_F(PeerConnectionJsepTest, AttemptToRollbackImplicitly) { TEST_F(PeerConnectionJsepTest, RollbackRemovesTransceiver) { auto caller = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); ASSERT_EQ(callee->pc()->GetTransceivers().size(), 1u); @@ -1942,7 +1924,7 @@ TEST_F(PeerConnectionJsepTest, RollbackRemovesTransceiver) { TEST_F(PeerConnectionJsepTest, RollbackKeepsTransceiverAndClearsMid) { auto caller = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); callee->AddAudioTrack("a"); @@ -1951,7 +1933,7 @@ TEST_F(PeerConnectionJsepTest, RollbackKeepsTransceiverAndClearsMid) { // Transceiver can't be removed as track was added to it. EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u); // Mid got cleared to make it reusable. - EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), absl::nullopt); + EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), std::nullopt); // Transceiver should be counted as addTrack-created after rollback. EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u); @@ -1968,7 +1950,7 @@ TEST_F(PeerConnectionJsepTest, RollbackKeepsTransceiverAndClearsMid) { TEST_F(PeerConnectionJsepTest, RollbackKeepsTransceiverAfterAddTrackEvenWhenTrackIsNulled) { auto caller = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); callee->AddAudioTrack("a"); @@ -1979,7 +1961,7 @@ TEST_F(PeerConnectionJsepTest, // Transceiver can't be removed as track was added to it. EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u); // Mid got cleared to make it reusable. - EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), absl::nullopt); + EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), std::nullopt); // Transceiver should be counted as addTrack-created after rollback. EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u); @@ -1988,15 +1970,15 @@ TEST_F(PeerConnectionJsepTest, TEST_F(PeerConnectionJsepTest, RollbackRestoresMid) { auto caller = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); callee->AddAudioTrack("a"); auto offer = callee->CreateOffer(); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u); - EXPECT_NE(callee->pc()->GetTransceivers()[0]->mid(), absl::nullopt); + EXPECT_NE(callee->pc()->GetTransceivers()[0]->mid(), std::nullopt); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateRollback())); - EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), absl::nullopt); + EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), std::nullopt); EXPECT_TRUE(callee->SetLocalDescription(std::move(offer))); } @@ -2011,7 +1993,7 @@ TEST_F(PeerConnectionJsepTest, RollbackRestoresInitSendEncodings) { init.send_encodings.push_back(encoding); encoding.rid = "lo"; init.send_encodings.push_back(encoding); - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init); + caller->AddTransceiver(webrtc::MediaType::VIDEO, init); auto encodings = caller->pc()->GetTransceivers()[0]->sender()->init_send_encodings(); EXPECT_TRUE(caller->SetLocalDescription(caller->CreateOffer())); @@ -2034,8 +2016,8 @@ TEST_F(PeerConnectionJsepTest, RollbackDoesNotAffectSendEncodings) { init.send_encodings.push_back(encoding); encoding.rid = "lo"; init.send_encodings.push_back(encoding); - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init); - callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + caller->AddTransceiver(webrtc::MediaType::VIDEO, init); + callee->AddTransceiver(webrtc::MediaType::VIDEO); callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal()); caller->SetRemoteDescription(callee->CreateAnswerAndSetAsLocal()); auto params = caller->pc()->GetTransceivers()[0]->sender()->GetParameters(); @@ -2076,7 +2058,7 @@ TEST_F(PeerConnectionJsepTest, RollbackRestoresMidAndRemovesTransceiver) { EXPECT_EQ(callee->pc()->GetTransceivers().size(), 1u); EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), mid); EXPECT_EQ(callee->pc()->GetTransceivers()[0]->media_type(), - cricket::MEDIA_TYPE_VIDEO); + webrtc::MediaType::VIDEO); EXPECT_TRUE(callee->SetLocalDescription(std::move(offer))); EXPECT_EQ(callee->observer()->remove_track_events_.size(), callee->observer()->add_track_events_.size()); @@ -2112,14 +2094,14 @@ TEST_F(PeerConnectionJsepTest, ImplicitlyRollbackTransceiversWithSameMids) { config.sdp_semantics = SdpSemantics::kUnifiedPlan; config.enable_implicit_rollback = true; auto caller = CreatePeerConnection(config); - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + caller->AddTransceiver(webrtc::MediaType::VIDEO); auto callee = CreatePeerConnection(config); - callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + callee->AddTransceiver(webrtc::MediaType::VIDEO); EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); auto initial_mid = callee->pc()->GetTransceivers()[0]->mid(); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); EXPECT_EQ(callee->pc()->GetTransceivers().size(), 2u); - EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), absl::nullopt); + EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), std::nullopt); EXPECT_EQ(callee->pc()->GetTransceivers()[1]->mid(), caller->pc()->GetTransceivers()[0]->mid()); EXPECT_TRUE(callee->CreateAnswerAndSetAsLocal()); // Go to stable. @@ -2132,7 +2114,7 @@ TEST_F(PeerConnectionJsepTest, RollbackToNegotiatedStableState) { config.sdp_semantics = SdpSemantics::kUnifiedPlan; config.bundle_policy = PeerConnectionInterface::kBundlePolicyMaxBundle; auto caller = CreatePeerConnection(config); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(config); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); EXPECT_TRUE(callee->CreateAnswerAndSetAsLocal()); @@ -2183,7 +2165,7 @@ TEST_F(PeerConnectionJsepTest, RollbackHasToDestroyTransport) { TEST_F(PeerConnectionJsepTest, RollbackLocalDirectionChange) { auto caller = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); EXPECT_TRUE( @@ -2206,7 +2188,7 @@ TEST_F(PeerConnectionJsepTest, RollbackLocalDirectionChange) { TEST_F(PeerConnectionJsepTest, RollbackRemoteDirectionChange) { auto caller = CreatePeerConnection(); - auto caller_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto caller_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); callee->AddAudioTrack("a"); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); @@ -2236,7 +2218,7 @@ TEST_F(PeerConnectionJsepTest, RollbackRemoteDirectionChange) { TEST_F(PeerConnectionJsepTest, RollbackRestoresFiredDirectionAndOnTrackCanFireAgain) { auto caller = CreatePeerConnection(); - auto caller_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto caller_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); callee->AddAudioTrack("a"); ASSERT_EQ(callee->pc()->GetTransceivers().size(), 1u); @@ -2266,7 +2248,7 @@ TEST_F(PeerConnectionJsepTest, TEST_F(PeerConnectionJsepTest, RollbackFromInactiveToReceivingMakesOnTrackFire) { auto caller = CreatePeerConnection(); - auto caller_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto caller_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); // Perform full O/A so that transceiver is associated. Ontrack fires. EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); @@ -2289,9 +2271,9 @@ TEST_F(PeerConnectionJsepTest, TEST_F(PeerConnectionJsepTest, RollbackAfterMultipleSLD) { auto callee = CreatePeerConnection(); - callee->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + callee->AddTransceiver(webrtc::MediaType::AUDIO); EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); - callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + callee->AddTransceiver(webrtc::MediaType::VIDEO); EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); callee->observer()->clear_legacy_renegotiation_needed(); callee->observer()->clear_latest_negotiation_needed_event(); @@ -2299,15 +2281,15 @@ TEST_F(PeerConnectionJsepTest, RollbackAfterMultipleSLD) { EXPECT_TRUE(callee->observer()->legacy_renegotiation_needed()); EXPECT_TRUE(callee->observer()->has_negotiation_needed_event()); EXPECT_EQ(callee->pc()->GetTransceivers().size(), 2u); - EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), absl::nullopt); - EXPECT_EQ(callee->pc()->GetTransceivers()[1]->mid(), absl::nullopt); + EXPECT_EQ(callee->pc()->GetTransceivers()[0]->mid(), std::nullopt); + EXPECT_EQ(callee->pc()->GetTransceivers()[1]->mid(), std::nullopt); } TEST_F(PeerConnectionJsepTest, NoRollbackNeeded) { auto caller = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); - callee->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + callee->AddTransceiver(webrtc::MediaType::AUDIO); EXPECT_TRUE(caller->CreateOfferAndSetAsLocal()); EXPECT_TRUE(caller->CreateOfferAndSetAsLocal()); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); @@ -2340,7 +2322,7 @@ TEST_F(PeerConnectionJsepTest, DataChannelImplicitRollback) { config.sdp_semantics = SdpSemantics::kUnifiedPlan; config.enable_implicit_rollback = true; auto caller = CreatePeerConnection(config); - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + caller->AddTransceiver(webrtc::MediaType::VIDEO); auto callee = CreatePeerConnection(config); callee->CreateDataChannel("dummy"); EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); @@ -2357,7 +2339,7 @@ TEST_F(PeerConnectionJsepTest, RollbackRemoteDataChannelThenAddTransceiver) { caller->CreateDataChannel("dummy"); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback())); - callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + callee->AddTransceiver(webrtc::MediaType::VIDEO); EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); } @@ -2368,7 +2350,7 @@ TEST_F(PeerConnectionJsepTest, caller->CreateDataChannel("dummy"); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback())); - callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + callee->AddTransceiver(webrtc::MediaType::VIDEO); callee->CreateDataChannel("dummy"); EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); } @@ -2386,7 +2368,7 @@ TEST_F(PeerConnectionJsepTest, RollbackRemoteDataChannelThenAddDataChannel) { TEST_F(PeerConnectionJsepTest, RollbackRemoteTransceiverThenAddDataChannel) { auto caller = CreatePeerConnection(); auto callee = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + caller->AddTransceiver(webrtc::MediaType::VIDEO); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback())); callee->CreateDataChannel("dummy"); @@ -2397,19 +2379,19 @@ TEST_F(PeerConnectionJsepTest, RollbackRemoteTransceiverThenAddDataChannelAndTransceiver) { auto caller = CreatePeerConnection(); auto callee = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + caller->AddTransceiver(webrtc::MediaType::VIDEO); EXPECT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); EXPECT_TRUE(callee->SetRemoteDescription(callee->CreateRollback())); callee->CreateDataChannel("dummy"); - callee->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + callee->AddTransceiver(webrtc::MediaType::VIDEO); EXPECT_TRUE(callee->CreateOfferAndSetAsLocal()); } TEST_F(PeerConnectionJsepTest, BundleOnlySectionDoesNotNeedRtcpMux) { auto caller = CreatePeerConnection(); auto callee = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); + caller->AddTransceiver(webrtc::MediaType::VIDEO); auto offer = caller->CreateOffer(); // Remove rtcp-mux and set bundle-only on the second content. offer->description()->contents()[1].media_description()->set_rtcp_mux(false); diff --git a/pc/peer_connection_media_unittest.cc b/pc/peer_connection_media_unittest.cc index 485541981e..242984a675 100644 --- a/pc/peer_connection_media_unittest.cc +++ b/pc/peer_connection_media_unittest.cc @@ -15,110 +15,107 @@ #include #include #include -#include #include +#include #include #include #include -#include #include #include #include "absl/algorithm/container.h" -#include "absl/types/optional.h" -#include "api/audio_options.h" -#include "api/call/call_factory_interface.h" +#include "api/environment/environment_factory.h" #include "api/jsep.h" #include "api/media_types.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" #include "api/rtc_event_log/rtc_event_log_factory.h" -#include "api/rtc_event_log/rtc_event_log_factory_interface.h" #include "api/rtp_parameters.h" #include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" #include "api/task_queue/default_task_queue_factory.h" -#include "api/task_queue/task_queue_factory.h" #include "media/base/codec.h" #include "media/base/fake_media_engine.h" #include "media/base/media_channel.h" #include "media/base/media_constants.h" #include "media/base/media_engine.h" #include "media/base/stream_params.h" -#include "p2p/base/fake_port_allocator.h" #include "p2p/base/p2p_constants.h" -#include "p2p/base/port_allocator.h" #include "p2p/base/transport_info.h" +#include "p2p/test/fake_port_allocator.h" #include "pc/channel_interface.h" #include "pc/media_session.h" #include "pc/peer_connection_wrapper.h" #include "pc/rtp_media_utils.h" #include "pc/rtp_transceiver.h" #include "pc/session_description.h" +#include "pc/test/enable_fake_media.h" #include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/checks.h" -#include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/ref_counted_object.h" #include "rtc_base/thread.h" #include "test/gtest.h" -#include "test/scoped_key_value_config.h" #ifdef WEBRTC_ANDROID #include "pc/test/android_test_initializer.h" #endif -#include "rtc_base/gunit.h" +#include "api/test/rtc_error_matchers.h" #include "rtc_base/virtual_socket_server.h" #include "test/gmock.h" namespace webrtc { -using cricket::FakeMediaEngine; using RTCConfiguration = PeerConnectionInterface::RTCConfiguration; using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions; using ::testing::Bool; using ::testing::Combine; using ::testing::ElementsAre; +using ::testing::Gt; +using ::testing::HasSubstr; using ::testing::NotNull; using ::testing::Values; -cricket::MediaSendChannelInterface* SendChannelInternal( - rtc::scoped_refptr transceiver) { - auto transceiver_with_internal = static_cast>*>( +RtpTransceiver* RtpTransceiverInternal( + scoped_refptr transceiver) { + auto transceiver_with_internal = static_cast< + RefCountedObject>*>( transceiver.get()); auto transceiver_internal = static_cast(transceiver_with_internal->internal()); + return transceiver_internal; +} + +MediaSendChannelInterface* SendChannelInternal( + scoped_refptr transceiver) { + auto transceiver_internal = RtpTransceiverInternal(transceiver); return transceiver_internal->channel()->media_send_channel(); } -cricket::MediaReceiveChannelInterface* ReceiveChannelInternal( - rtc::scoped_refptr transceiver) { - auto transceiver_with_internal = static_cast>*>( - transceiver.get()); - auto transceiver_internal = - static_cast(transceiver_with_internal->internal()); +MediaReceiveChannelInterface* ReceiveChannelInternal( + scoped_refptr transceiver) { + auto transceiver_internal = RtpTransceiverInternal(transceiver); return transceiver_internal->channel()->media_receive_channel(); } -cricket::FakeVideoMediaSendChannel* VideoMediaSendChannel( - rtc::scoped_refptr transceiver) { - return static_cast( +FakeVideoMediaSendChannel* VideoMediaSendChannel( + scoped_refptr transceiver) { + return static_cast( SendChannelInternal(transceiver)); } -cricket::FakeVideoMediaReceiveChannel* VideoMediaReceiveChannel( - rtc::scoped_refptr transceiver) { - return static_cast( +FakeVideoMediaReceiveChannel* VideoMediaReceiveChannel( + scoped_refptr transceiver) { + return static_cast( ReceiveChannelInternal(transceiver)); } -cricket::FakeVoiceMediaSendChannel* VoiceMediaSendChannel( - rtc::scoped_refptr transceiver) { - return static_cast( +FakeVoiceMediaSendChannel* VoiceMediaSendChannel( + scoped_refptr transceiver) { + return static_cast( SendChannelInternal(transceiver)); } -cricket::FakeVoiceMediaReceiveChannel* VoiceMediaReceiveChannel( - rtc::scoped_refptr transceiver) { - return static_cast( +FakeVoiceMediaReceiveChannel* VoiceMediaReceiveChannel( + scoped_refptr transceiver) { + return static_cast( ReceiveChannelInternal(transceiver)); } @@ -140,7 +137,7 @@ class PeerConnectionMediaBaseTest : public ::testing::Test { typedef std::unique_ptr WrapperPtr; explicit PeerConnectionMediaBaseTest(SdpSemantics sdp_semantics) - : vss_(new rtc::VirtualSocketServer()), + : vss_(new VirtualSocketServer()), main_(vss_.get()), sdp_semantics_(sdp_semantics) { #ifdef WEBRTC_ANDROID @@ -169,28 +166,24 @@ class PeerConnectionMediaBaseTest : public ::testing::Test { PeerConnectionFactoryDependencies factory_dependencies; - factory_dependencies.network_thread = rtc::Thread::Current(); - factory_dependencies.worker_thread = rtc::Thread::Current(); - factory_dependencies.signaling_thread = rtc::Thread::Current(); + factory_dependencies.network_thread = Thread::Current(); + factory_dependencies.worker_thread = Thread::Current(); + factory_dependencies.signaling_thread = Thread::Current(); factory_dependencies.task_queue_factory = CreateDefaultTaskQueueFactory(); - factory_dependencies.media_engine = std::move(media_engine); - factory_dependencies.call_factory = CreateCallFactory(); + EnableFakeMedia(factory_dependencies, std::move(media_engine)); factory_dependencies.event_log_factory = - std::make_unique( - factory_dependencies.task_queue_factory.get()); - + std::make_unique(); auto pc_factory = CreateModularPeerConnectionFactory(std::move(factory_dependencies)); - auto fake_port_allocator = std::make_unique( - rtc::Thread::Current(), - std::make_unique(vss_.get()), - &field_trials_); + auto fake_port_allocator = + std::make_unique(CreateEnvironment(), vss_.get()); auto observer = std::make_unique(); auto modified_config = config; modified_config.sdp_semantics = sdp_semantics_; PeerConnectionDependencies pc_dependencies(observer.get()); pc_dependencies.allocator = std::move(fake_port_allocator); + auto result = pc_factory->CreatePeerConnectionOrError( modified_config, std::move(pc_dependencies)); if (!result.ok()) { @@ -244,9 +237,8 @@ class PeerConnectionMediaBaseTest : public ::testing::Test { RtpTransceiverDirection GetMediaContentDirection( const SessionDescriptionInterface* sdesc, - cricket::MediaType media_type) { - auto* content = - cricket::GetFirstMediaContent(sdesc->description(), media_type); + webrtc::MediaType media_type) { + auto* content = GetFirstMediaContent(sdesc->description(), media_type); RTC_DCHECK(content); return content->media_description()->direction(); } @@ -255,9 +247,8 @@ class PeerConnectionMediaBaseTest : public ::testing::Test { return sdp_semantics_ == SdpSemantics::kUnifiedPlan; } - webrtc::test::ScopedKeyValueConfig field_trials_; - std::unique_ptr vss_; - rtc::AutoSocketServerThread main_; + std::unique_ptr vss_; + AutoSocketServerThread main_; const SdpSemantics sdp_semantics_; }; @@ -280,31 +271,7 @@ class PeerConnectionMediaTestPlanB : public PeerConnectionMediaBaseTest { : PeerConnectionMediaBaseTest(SdpSemantics::kPlanB_DEPRECATED) {} }; -TEST_P(PeerConnectionMediaTest, - FailToSetRemoteDescriptionIfCreateMediaChannelFails) { - auto caller = CreatePeerConnectionWithAudioVideo(); - auto callee = CreatePeerConnectionWithAudioVideo(); - callee->media_engine()->set_fail_create_channel(true); - - std::string error; - ASSERT_FALSE(callee->SetRemoteDescription(caller->CreateOffer(), &error)); - EXPECT_PRED_FORMAT2(AssertStartsWith, error, - "Failed to set remote offer sdp: Failed to create"); -} - -TEST_P(PeerConnectionMediaTest, - FailToSetLocalDescriptionIfCreateMediaChannelFails) { - auto caller = CreatePeerConnectionWithAudioVideo(); - caller->media_engine()->set_fail_create_channel(true); - - std::string error; - ASSERT_FALSE(caller->SetLocalDescription(caller->CreateOffer(), &error)); - EXPECT_PRED_FORMAT2(AssertStartsWith, error, - "Failed to set local offer sdp: Failed to create"); -} - -std::vector GetIds( - const std::vector& streams) { +std::vector GetIds(const std::vector& streams) { std::vector ids; ids.reserve(streams.size()); for (const auto& stream : streams) { @@ -423,9 +390,9 @@ TEST_F(PeerConnectionMediaTestPlanB, SimulcastOffer) { RTCOfferAnswerOptions options; options.num_simulcast_layers = 3; auto offer = caller->CreateOffer(options); - auto* description = cricket::GetFirstMediaContent(offer->description(), - cricket::MEDIA_TYPE_VIDEO) - ->media_description(); + auto* description = + GetFirstMediaContent(offer->description(), webrtc::MediaType::VIDEO) + ->media_description(); ASSERT_EQ(1u, description->streams().size()); ASSERT_TRUE(description->streams()[0].get_ssrc_group("SIM")); EXPECT_EQ(3u, description->streams()[0].get_ssrc_group("SIM")->ssrcs.size()); @@ -434,7 +401,7 @@ TEST_F(PeerConnectionMediaTestPlanB, SimulcastOffer) { caller->SetLocalDescription(std::move(offer)); auto senders = caller->pc()->GetSenders(); ASSERT_EQ(1u, senders.size()); - EXPECT_EQ(cricket::MediaType::MEDIA_TYPE_VIDEO, senders[0]->media_type()); + EXPECT_EQ(webrtc::MediaType::VIDEO, senders[0]->media_type()); EXPECT_EQ(3u, senders[0]->GetParameters().encodings.size()); } @@ -450,9 +417,9 @@ TEST_F(PeerConnectionMediaTestPlanB, SimulcastAnswer) { RTCOfferAnswerOptions options; options.num_simulcast_layers = 3; auto answer = callee->CreateAnswer(options); - auto* description = cricket::GetFirstMediaContent(answer->description(), - cricket::MEDIA_TYPE_VIDEO) - ->media_description(); + auto* description = + GetFirstMediaContent(answer->description(), webrtc::MediaType::VIDEO) + ->media_description(); ASSERT_EQ(1u, description->streams().size()); ASSERT_TRUE(description->streams()[0].get_ssrc_group("SIM")); EXPECT_EQ(3u, description->streams()[0].get_ssrc_group("SIM")->ssrcs.size()); @@ -461,7 +428,7 @@ TEST_F(PeerConnectionMediaTestPlanB, SimulcastAnswer) { callee->SetLocalDescription(std::move(answer)); auto senders = callee->pc()->GetSenders(); ASSERT_EQ(1u, senders.size()); - EXPECT_EQ(cricket::MediaType::MEDIA_TYPE_VIDEO, senders[0]->media_type()); + EXPECT_EQ(webrtc::MediaType::VIDEO, senders[0]->media_type()); EXPECT_EQ(3u, senders[0]->GetParameters().encodings.size()); } @@ -588,48 +555,44 @@ TEST_P(PeerConnectionMediaTest, ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); auto answer = callee->CreateAnswer(); - const auto* audio_content = - cricket::GetFirstAudioContent(answer->description()); + const auto* audio_content = GetFirstAudioContent(answer->description()); ASSERT_TRUE(audio_content); EXPECT_FALSE(audio_content->rejected); - const auto* video_content = - cricket::GetFirstVideoContent(answer->description()); + const auto* video_content = GetFirstVideoContent(answer->description()); ASSERT_TRUE(video_content); EXPECT_FALSE(video_content->rejected); } // Test that raw packetization is not set in the offer by default. TEST_P(PeerConnectionMediaTest, RawPacketizationNotSetInOffer) { - std::vector fake_codecs; - fake_codecs.push_back(cricket::CreateVideoCodec(111, cricket::kVp8CodecName)); - fake_codecs.push_back(cricket::CreateVideoRtxCodec(112, 111)); - fake_codecs.push_back(cricket::CreateVideoCodec(113, cricket::kVp9CodecName)); - fake_codecs.push_back( - cricket::CreateVideoCodec(114, cricket::kH264CodecName)); - fake_codecs.push_back(cricket::CreateVideoCodec(115, "HEVC")); + std::vector fake_codecs; + fake_codecs.push_back(CreateVideoCodec(111, kVp8CodecName)); + fake_codecs.push_back(CreateVideoRtxCodec(112, 111)); + fake_codecs.push_back(CreateVideoCodec(113, kVp9CodecName)); + fake_codecs.push_back(CreateVideoCodec(114, kH264CodecName)); + fake_codecs.push_back(CreateVideoCodec(115, "HEVC")); auto caller_fake_engine = std::make_unique(); caller_fake_engine->SetVideoCodecs(fake_codecs); auto caller = CreatePeerConnectionWithVideo(std::move(caller_fake_engine)); auto offer = caller->CreateOfferAndSetAsLocal(); auto* offer_description = - cricket::GetFirstVideoContentDescription(offer->description()); + GetFirstVideoContentDescription(offer->description()); for (const auto& codec : offer_description->codecs()) { - EXPECT_EQ(codec.packetization, absl::nullopt); + EXPECT_EQ(codec.packetization, std::nullopt); } } // Test that raw packetization is set in the offer and answer for all // video payload when raw_packetization_for_video is true. TEST_P(PeerConnectionMediaTest, RawPacketizationSetInOfferAndAnswer) { - std::vector fake_codecs; - fake_codecs.push_back(cricket::CreateVideoCodec(111, cricket::kVp8CodecName)); - fake_codecs.push_back(cricket::CreateVideoRtxCodec(112, 111)); - fake_codecs.push_back(cricket::CreateVideoCodec(113, cricket::kVp9CodecName)); - fake_codecs.push_back( - cricket::CreateVideoCodec(114, cricket::kH264CodecName)); - fake_codecs.push_back(cricket::CreateVideoCodec(115, "HEVC")); + std::vector fake_codecs; + fake_codecs.push_back(CreateVideoCodec(111, kVp8CodecName)); + fake_codecs.push_back(CreateVideoRtxCodec(112, 111)); + fake_codecs.push_back(CreateVideoCodec(113, kVp9CodecName)); + fake_codecs.push_back(CreateVideoCodec(114, kH264CodecName)); + fake_codecs.push_back(CreateVideoCodec(115, "HEVC")); auto caller_fake_engine = std::make_unique(); caller_fake_engine->SetVideoCodecs(fake_codecs); auto callee_fake_engine = std::make_unique(); @@ -641,10 +604,10 @@ TEST_P(PeerConnectionMediaTest, RawPacketizationSetInOfferAndAnswer) { auto caller = CreatePeerConnectionWithVideo(std::move(caller_fake_engine)); auto offer = caller->CreateOfferAndSetAsLocal(options); auto* offer_description = - cricket::GetFirstVideoContentDescription(offer->description()); + GetFirstVideoContentDescription(offer->description()); for (const auto& codec : offer_description->codecs()) { if (codec.IsMediaCodec()) { - EXPECT_EQ(codec.packetization, cricket::kPacketizationParamRaw); + EXPECT_EQ(codec.packetization, kPacketizationParamRaw); } } @@ -652,10 +615,10 @@ TEST_P(PeerConnectionMediaTest, RawPacketizationSetInOfferAndAnswer) { ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); auto answer = callee->CreateAnswerAndSetAsLocal(options); auto* answer_description = - cricket::GetFirstVideoContentDescription(answer->description()); + GetFirstVideoContentDescription(answer->description()); for (const auto& codec : answer_description->codecs()) { if (codec.IsMediaCodec()) { - EXPECT_EQ(codec.packetization, cricket::kPacketizationParamRaw); + EXPECT_EQ(codec.packetization, kPacketizationParamRaw); } } @@ -666,13 +629,12 @@ TEST_P(PeerConnectionMediaTest, RawPacketizationSetInOfferAndAnswer) { // raw_packetization_for_video is true if it was not set in the offer. TEST_P(PeerConnectionMediaTest, RawPacketizationNotSetInAnswerWhenNotSetInOffer) { - std::vector fake_codecs; - fake_codecs.push_back(cricket::CreateVideoCodec(111, cricket::kVp8CodecName)); - fake_codecs.push_back(cricket::CreateVideoRtxCodec(112, 111)); - fake_codecs.push_back(cricket::CreateVideoCodec(113, cricket::kVp9CodecName)); - fake_codecs.push_back( - cricket::CreateVideoCodec(114, cricket::kH264CodecName)); - fake_codecs.push_back(cricket::CreateVideoCodec(115, "HEVC")); + std::vector fake_codecs; + fake_codecs.push_back(CreateVideoCodec(111, kVp8CodecName)); + fake_codecs.push_back(CreateVideoRtxCodec(112, 111)); + fake_codecs.push_back(CreateVideoCodec(113, kVp9CodecName)); + fake_codecs.push_back(CreateVideoCodec(114, kH264CodecName)); + fake_codecs.push_back(CreateVideoCodec(115, "HEVC")); auto caller_fake_engine = std::make_unique(); caller_fake_engine->SetVideoCodecs(fake_codecs); auto callee_fake_engine = std::make_unique(); @@ -691,9 +653,9 @@ TEST_P(PeerConnectionMediaTest, auto answer = callee->CreateAnswerAndSetAsLocal(callee_options); auto* answer_description = - cricket::GetFirstVideoContentDescription(answer->description()); + GetFirstVideoContentDescription(answer->description()); for (const auto& codec : answer_description->codecs()) { - EXPECT_EQ(codec.packetization, absl::nullopt); + EXPECT_EQ(codec.packetization, std::nullopt); } ASSERT_TRUE(caller->SetRemoteDescription(std::move(answer))); @@ -730,8 +692,8 @@ TEST_P(PeerConnectionMediaOfferDirectionTest, VerifyDirection) { options.offer_to_receive_audio = offer_to_receive_; auto offer = caller->CreateOffer(options); - auto* content = cricket::GetFirstMediaContent(offer->description(), - cricket::MEDIA_TYPE_AUDIO); + auto* content = + GetFirstMediaContent(offer->description(), webrtc::MediaType::AUDIO); if (expected_direction_ == RtpTransceiverDirection::kInactive) { EXPECT_FALSE(content); } else { @@ -786,7 +748,7 @@ TEST_P(PeerConnectionMediaAnswerDirectionTest, VerifyDirection) { // Create the offer with an audio section and set its direction. auto offer = caller->CreateOffer(); - cricket::GetFirstAudioContentDescription(offer->description()) + GetFirstAudioContentDescription(offer->description()) ->set_direction(offer_direction_); auto callee = CreatePeerConnection(); @@ -817,7 +779,7 @@ TEST_P(PeerConnectionMediaAnswerDirectionTest, VerifyDirection) { auto expected_direction = RtpTransceiverDirectionFromSendRecv(negotiate_send, negotiate_recv); EXPECT_EQ(expected_direction, - GetMediaContentDirection(answer.get(), cricket::MEDIA_TYPE_AUDIO)); + GetMediaContentDirection(answer.get(), webrtc::MediaType::AUDIO)); } // Tests that the media section is rejected if and only if the callee has no @@ -836,7 +798,7 @@ TEST_P(PeerConnectionMediaAnswerDirectionTest, VerifyRejected) { // Create the offer with an audio section and set its direction. auto offer = caller->CreateOffer(); - cricket::GetFirstAudioContentDescription(offer->description()) + GetFirstAudioContentDescription(offer->description()) ->set_direction(offer_direction_); auto callee = CreatePeerConnection(); @@ -852,7 +814,7 @@ TEST_P(PeerConnectionMediaAnswerDirectionTest, VerifyRejected) { // The media section is rejected if and only if offer_to_receive is explicitly // set to 0 and there is no media to send. - auto* audio_content = cricket::GetFirstAudioContent(answer->description()); + auto* audio_content = GetFirstAudioContent(answer->description()); ASSERT_TRUE(audio_content); EXPECT_EQ((offer_to_receive_ == 0 && !send_media_), audio_content->rejected); } @@ -878,9 +840,9 @@ TEST_P(PeerConnectionMediaTest, OfferHasDifferentDirectionForAudioVideo) { auto offer = caller->CreateOffer(options); EXPECT_EQ(RtpTransceiverDirection::kRecvOnly, - GetMediaContentDirection(offer.get(), cricket::MEDIA_TYPE_AUDIO)); + GetMediaContentDirection(offer.get(), webrtc::MediaType::AUDIO)); EXPECT_EQ(RtpTransceiverDirection::kSendOnly, - GetMediaContentDirection(offer.get(), cricket::MEDIA_TYPE_VIDEO)); + GetMediaContentDirection(offer.get(), webrtc::MediaType::VIDEO)); } TEST_P(PeerConnectionMediaTest, AnswerHasDifferentDirectionsForAudioVideo) { @@ -902,36 +864,36 @@ TEST_P(PeerConnectionMediaTest, AnswerHasDifferentDirectionsForAudioVideo) { auto answer = callee->CreateAnswer(options); EXPECT_EQ(RtpTransceiverDirection::kRecvOnly, - GetMediaContentDirection(answer.get(), cricket::MEDIA_TYPE_AUDIO)); + GetMediaContentDirection(answer.get(), webrtc::MediaType::AUDIO)); EXPECT_EQ(RtpTransceiverDirection::kSendOnly, - GetMediaContentDirection(answer.get(), cricket::MEDIA_TYPE_VIDEO)); + GetMediaContentDirection(answer.get(), webrtc::MediaType::VIDEO)); } -void AddComfortNoiseCodecsToSend(cricket::FakeMediaEngine* media_engine) { - const cricket::AudioCodec kComfortNoiseCodec8k = - cricket::CreateAudioCodec(102, cricket::kCnCodecName, 8000, 1); - const cricket::AudioCodec kComfortNoiseCodec16k = - cricket::CreateAudioCodec(103, cricket::kCnCodecName, 16000, 1); +void AddComfortNoiseCodecsToSend(FakeMediaEngine* media_engine) { + const Codec kComfortNoiseCodec8k = + CreateAudioCodec(102, kCnCodecName, 8000, 1); + const Codec kComfortNoiseCodec16k = + CreateAudioCodec(103, kCnCodecName, 16000, 1); - auto codecs = media_engine->voice().send_codecs(); + auto codecs = media_engine->voice().LegacySendCodecs(); codecs.push_back(kComfortNoiseCodec8k); codecs.push_back(kComfortNoiseCodec16k); media_engine->SetAudioCodecs(codecs); } -bool HasAnyComfortNoiseCodecs(const cricket::SessionDescription* desc) { - const auto* audio_desc = cricket::GetFirstAudioContentDescription(desc); +bool HasAnyComfortNoiseCodecs(const SessionDescription* desc) { + const auto* audio_desc = GetFirstAudioContentDescription(desc); for (const auto& codec : audio_desc->codecs()) { - if (codec.name == cricket::kCnCodecName) { + if (codec.name == kCnCodecName) { return true; } } return false; } -bool HasPayloadTypeConflict(const cricket::SessionDescription* desc) { +bool HasPayloadTypeConflict(const SessionDescription* desc) { std::set payload_types; - const auto* audio_desc = cricket::GetFirstAudioContentDescription(desc); + const auto* audio_desc = GetFirstAudioContentDescription(desc); if (audio_desc) { for (const auto& codec : audio_desc->codecs()) { if (payload_types.count(codec.id) > 0) { @@ -940,7 +902,7 @@ bool HasPayloadTypeConflict(const cricket::SessionDescription* desc) { payload_types.insert(codec.id); } } - const auto* video_desc = cricket::GetFirstVideoContentDescription(desc); + const auto* video_desc = GetFirstVideoContentDescription(desc); if (video_desc) { for (const auto& codec : video_desc->codecs()) { if (payload_types.count(codec.id) > 0) { @@ -1025,7 +987,7 @@ class PeerConnectionMediaInvalidMediaTest public ::testing::WithParamInterface, + std::function, std::string>>> { protected: PeerConnectionMediaInvalidMediaTest() @@ -1035,7 +997,7 @@ class PeerConnectionMediaInvalidMediaTest expected_error_ = std::get<2>(param); } - std::function mutator_; + std::function mutator_; std::string expected_error_; }; @@ -1067,32 +1029,32 @@ TEST_P(PeerConnectionMediaInvalidMediaTest, FailToSetLocalAnswer) { EXPECT_EQ("Failed to set local answer sdp: " + expected_error_, error); } -void RemoveVideoContentAndUnbundle(cricket::SessionDescription* desc) { +void RemoveVideoContentAndUnbundle(SessionDescription* desc) { // Removing BUNDLE is easier than removing the content in there. desc->RemoveGroupByName("BUNDLE"); - auto content_name = cricket::GetFirstVideoContent(desc)->name; + auto content_name = GetFirstVideoContent(desc)->mid(); desc->RemoveContentByName(content_name); desc->RemoveTransportInfoByName(content_name); } -void RenameVideoContentAndUnbundle(cricket::SessionDescription* desc) { +void RenameVideoContentAndUnbundle(SessionDescription* desc) { // Removing BUNDLE is easier than renaming the content in there. desc->RemoveGroupByName("BUNDLE"); - auto* video_content = cricket::GetFirstVideoContent(desc); - auto* transport_info = desc->GetTransportInfoByName(video_content->name); - video_content->name = "video_renamed"; - transport_info->content_name = video_content->name; + auto* video_content = GetFirstVideoContent(desc); + auto* transport_info = desc->GetTransportInfoByName(video_content->mid()); + video_content->set_mid("video_renamed"); + transport_info->content_name = video_content->mid(); } -void ReverseMediaContent(cricket::SessionDescription* desc) { +void ReverseMediaContent(SessionDescription* desc) { absl::c_reverse(desc->contents()); absl::c_reverse(desc->transport_infos()); } -void ChangeMediaTypeAudioToVideo(cricket::SessionDescription* desc) { - std::string audio_mid = cricket::GetFirstAudioContent(desc)->name; +void ChangeMediaTypeAudioToVideo(SessionDescription* desc) { + auto audio_mid = GetFirstAudioContent(desc)->mid(); desc->RemoveContentByName(audio_mid); - auto* video_content = cricket::GetFirstVideoContent(desc); + auto* video_content = GetFirstVideoContent(desc); desc->AddContent(audio_mid, video_content->type, video_content->media_description()->Clone()); } @@ -1165,23 +1127,22 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, EXPECT_FALSE(caller->SetLocalDescription(caller->CreateOffer())); } -void RenameContent(cricket::SessionDescription* desc, - cricket::MediaType media_type, +void RenameContent(SessionDescription* desc, + webrtc::MediaType media_type, const std::string& new_name) { - auto* content = cricket::GetFirstMediaContent(desc, media_type); + auto* content = GetFirstMediaContent(desc, media_type); RTC_DCHECK(content); - std::string old_name = content->name; - content->name = new_name; + std::string old_name(content->mid()); + content->set_mid(new_name); auto* transport = desc->GetTransportInfoByName(old_name); RTC_DCHECK(transport); transport->content_name = new_name; // Rename the content name in the BUNDLE group. - cricket::ContentGroup new_bundle_group = - *desc->GetGroupByName(cricket::GROUP_TYPE_BUNDLE); + ContentGroup new_bundle_group = *desc->GetGroupByName(GROUP_TYPE_BUNDLE); new_bundle_group.RemoveContentName(old_name); new_bundle_group.AddContentName(new_name); - desc->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); + desc->RemoveGroupByName(GROUP_TYPE_BUNDLE); desc->AddGroup(new_bundle_group); } @@ -1194,15 +1155,13 @@ TEST_P(PeerConnectionMediaTest, AnswerHasSameMidsAsOffer) { auto callee = CreatePeerConnectionWithAudioVideo(); auto offer = caller->CreateOffer(); - RenameContent(offer->description(), cricket::MEDIA_TYPE_AUDIO, kAudioMid); - RenameContent(offer->description(), cricket::MEDIA_TYPE_VIDEO, kVideoMid); + RenameContent(offer->description(), webrtc::MediaType::AUDIO, kAudioMid); + RenameContent(offer->description(), webrtc::MediaType::VIDEO, kVideoMid); ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); auto answer = callee->CreateAnswer(); - EXPECT_EQ(kAudioMid, - cricket::GetFirstAudioContent(answer->description())->name); - EXPECT_EQ(kVideoMid, - cricket::GetFirstVideoContent(answer->description())->name); + EXPECT_EQ(kAudioMid, GetFirstAudioContent(answer->description())->mid()); + EXPECT_EQ(kVideoMid, GetFirstVideoContent(answer->description())->mid()); } // Test that if the callee creates a re-offer, the MIDs are the same as the @@ -1215,16 +1174,14 @@ TEST_P(PeerConnectionMediaTest, ReOfferHasSameMidsAsFirstOffer) { auto callee = CreatePeerConnectionWithAudioVideo(); auto offer = caller->CreateOffer(); - RenameContent(offer->description(), cricket::MEDIA_TYPE_AUDIO, kAudioMid); - RenameContent(offer->description(), cricket::MEDIA_TYPE_VIDEO, kVideoMid); + RenameContent(offer->description(), webrtc::MediaType::AUDIO, kAudioMid); + RenameContent(offer->description(), webrtc::MediaType::VIDEO, kVideoMid); ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); ASSERT_TRUE(callee->SetLocalDescription(callee->CreateAnswer())); auto reoffer = callee->CreateOffer(); - EXPECT_EQ(kAudioMid, - cricket::GetFirstAudioContent(reoffer->description())->name); - EXPECT_EQ(kVideoMid, - cricket::GetFirstVideoContent(reoffer->description())->name); + EXPECT_EQ(kAudioMid, GetFirstAudioContent(reoffer->description())->mid()); + EXPECT_EQ(kVideoMid, GetFirstVideoContent(reoffer->description())->mid()); } // Test that SetRemoteDescription returns an error if there are two m= sections @@ -1234,8 +1191,8 @@ TEST_P(PeerConnectionMediaTest, SetRemoteDescriptionFailsWithDuplicateMids) { auto callee = CreatePeerConnectionWithAudioVideo(); auto offer = caller->CreateOffer(); - RenameContent(offer->description(), cricket::MEDIA_TYPE_AUDIO, "same"); - RenameContent(offer->description(), cricket::MEDIA_TYPE_VIDEO, "same"); + RenameContent(offer->description(), webrtc::MediaType::AUDIO, "same"); + RenameContent(offer->description(), webrtc::MediaType::VIDEO, "same"); std::string error; EXPECT_FALSE(callee->SetRemoteDescription(std::move(offer), &error)); @@ -1248,17 +1205,16 @@ TEST_P(PeerConnectionMediaTest, SetRemoteDescriptionFailsWithDuplicateMids) { // endpoint selected a different payload type or there was a conflict), the RED // fmtp line is modified to refer to the correct payload type. TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeReassigned) { - std::vector caller_fake_codecs; - caller_fake_codecs.push_back(cricket::CreateAudioCodec(100, "foo", 0, 1)); + std::vector caller_fake_codecs; + caller_fake_codecs.push_back(CreateAudioCodec(100, "foo", 0, 1)); auto caller_fake_engine = std::make_unique(); caller_fake_engine->SetAudioCodecs(caller_fake_codecs); auto caller = CreatePeerConnectionWithAudio(std::move(caller_fake_engine)); - std::vector callee_fake_codecs; - callee_fake_codecs.push_back(cricket::CreateAudioCodec(120, "foo", 0, 1)); - callee_fake_codecs.push_back( - cricket::CreateAudioCodec(121, cricket::kRedCodecName, 0, 1)); - callee_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat, + std::vector callee_fake_codecs; + callee_fake_codecs.push_back(CreateAudioCodec(120, "foo", 0, 1)); + callee_fake_codecs.push_back(CreateAudioCodec(121, kRedCodecName, 0, 1)); + callee_fake_codecs.back().SetParam(kCodecParamNotInNameValueFormat, "120/120"); auto callee_fake_engine = std::make_unique(); callee_fake_engine->SetAudioCodecs(callee_fake_codecs); @@ -1269,19 +1225,19 @@ TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeReassigned) { callee->SetRemoteDescription(std::move(offer)); auto answer = callee->CreateAnswerAndSetAsLocal(); auto answer_description = - cricket::GetFirstAudioContentDescription(answer->description()); + GetFirstAudioContentDescription(answer->description()); ASSERT_EQ(1u, answer_description->codecs().size()); // Offer from the callee should respect the established payload type, and // attempt to add RED, which should refer to the correct payload type. offer = callee->CreateOfferAndSetAsLocal(); auto* offer_description = - cricket::GetFirstAudioContentDescription(offer->description()); + GetFirstAudioContentDescription(offer->description()); ASSERT_EQ(2u, offer_description->codecs().size()); for (const auto& codec : offer_description->codecs()) { if (codec.name == "foo") { ASSERT_EQ(100, codec.id); - } else if (codec.name == cricket::kRedCodecName) { + } else if (codec.name == kRedCodecName) { std::string fmtp; ASSERT_TRUE(codec.GetParam("", &fmtp)); EXPECT_EQ("100/100", fmtp); @@ -1291,18 +1247,16 @@ TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeReassigned) { // Test that RED without fmtp does match RED without fmtp. TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeNoFmtpMatchNoFmtp) { - std::vector caller_fake_codecs; - caller_fake_codecs.push_back(cricket::CreateAudioCodec(100, "foo", 0, 1)); - caller_fake_codecs.push_back( - cricket::CreateAudioCodec(101, cricket::kRedCodecName, 0, 1)); + std::vector caller_fake_codecs; + caller_fake_codecs.push_back(CreateAudioCodec(100, "foo", 0, 1)); + caller_fake_codecs.push_back(CreateAudioCodec(101, kRedCodecName, 0, 1)); auto caller_fake_engine = std::make_unique(); caller_fake_engine->SetAudioCodecs(caller_fake_codecs); auto caller = CreatePeerConnectionWithAudio(std::move(caller_fake_engine)); - std::vector callee_fake_codecs; - callee_fake_codecs.push_back(cricket::CreateAudioCodec(120, "foo", 0, 1)); - callee_fake_codecs.push_back( - cricket::CreateAudioCodec(121, cricket::kRedCodecName, 0, 1)); + std::vector callee_fake_codecs; + callee_fake_codecs.push_back(CreateAudioCodec(120, "foo", 0, 1)); + callee_fake_codecs.push_back(CreateAudioCodec(121, kRedCodecName, 0, 1)); auto callee_fake_engine = std::make_unique(); callee_fake_engine->SetAudioCodecs(callee_fake_codecs); auto callee = CreatePeerConnectionWithAudio(std::move(callee_fake_engine)); @@ -1313,19 +1267,19 @@ TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeNoFmtpMatchNoFmtp) { callee->SetRemoteDescription(std::move(offer)); auto answer = callee->CreateAnswerAndSetAsLocal(); auto answer_description = - cricket::GetFirstAudioContentDescription(answer->description()); + GetFirstAudioContentDescription(answer->description()); ASSERT_EQ(2u, answer_description->codecs().size()); // Offer from the callee should respect the established payload type, and // attempt to add RED. offer = callee->CreateOfferAndSetAsLocal(); auto* offer_description = - cricket::GetFirstAudioContentDescription(offer->description()); + GetFirstAudioContentDescription(offer->description()); ASSERT_EQ(2u, offer_description->codecs().size()); for (const auto& codec : offer_description->codecs()) { if (codec.name == "foo") { ASSERT_EQ(100, codec.id); - } else if (codec.name == cricket::kRedCodecName) { + } else if (codec.name == kRedCodecName) { ASSERT_EQ(101, codec.id); } } @@ -1333,19 +1287,17 @@ TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeNoFmtpMatchNoFmtp) { // Test that RED without fmtp does not match RED with fmtp. TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeNoFmtpNoMatchFmtp) { - std::vector caller_fake_codecs; - caller_fake_codecs.push_back(cricket::CreateAudioCodec(100, "foo", 0, 1)); - caller_fake_codecs.push_back( - cricket::CreateAudioCodec(101, cricket::kRedCodecName, 0, 1)); + std::vector caller_fake_codecs; + caller_fake_codecs.push_back(CreateAudioCodec(100, "foo", 0, 1)); + caller_fake_codecs.push_back(CreateAudioCodec(101, kRedCodecName, 0, 1)); auto caller_fake_engine = std::make_unique(); caller_fake_engine->SetAudioCodecs(caller_fake_codecs); auto caller = CreatePeerConnectionWithAudio(std::move(caller_fake_engine)); - std::vector callee_fake_codecs; - callee_fake_codecs.push_back(cricket::CreateAudioCodec(120, "foo", 0, 1)); - callee_fake_codecs.push_back( - cricket::CreateAudioCodec(121, cricket::kRedCodecName, 0, 1)); - callee_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat, + std::vector callee_fake_codecs; + callee_fake_codecs.push_back(CreateAudioCodec(120, "foo", 0, 1)); + callee_fake_codecs.push_back(CreateAudioCodec(121, kRedCodecName, 0, 1)); + callee_fake_codecs.back().SetParam(kCodecParamNotInNameValueFormat, "120/120"); auto callee_fake_engine = std::make_unique(); callee_fake_engine->SetAudioCodecs(callee_fake_codecs); @@ -1357,22 +1309,21 @@ TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeNoFmtpNoMatchFmtp) { callee->SetRemoteDescription(std::move(offer)); auto answer = callee->CreateAnswerAndSetAsLocal(); auto answer_description = - cricket::GetFirstAudioContentDescription(answer->description()); + GetFirstAudioContentDescription(answer->description()); ASSERT_EQ(1u, answer_description->codecs().size()); // Offer from the callee should respect the established payload type, and // attempt to add RED, which should refer to the correct payload type. offer = callee->CreateOfferAndSetAsLocal(); auto* offer_description = - cricket::GetFirstAudioContentDescription(offer->description()); + GetFirstAudioContentDescription(offer->description()); ASSERT_EQ(2u, offer_description->codecs().size()); for (const auto& codec : offer_description->codecs()) { if (codec.name == "foo") { ASSERT_EQ(100, codec.id); - } else if (codec.name == cricket::kRedCodecName) { + } else if (codec.name == kRedCodecName) { std::string fmtp; - ASSERT_TRUE( - codec.GetParam(cricket::kCodecParamNotInNameValueFormat, &fmtp)); + ASSERT_TRUE(codec.GetParam(kCodecParamNotInNameValueFormat, &fmtp)); EXPECT_EQ("100/100", fmtp); } } @@ -1380,22 +1331,20 @@ TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeNoFmtpNoMatchFmtp) { // Test that RED with fmtp must match base codecs. TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeMustMatchBaseCodecs) { - std::vector caller_fake_codecs; - caller_fake_codecs.push_back(cricket::CreateAudioCodec(100, "foo", 0, 1)); - caller_fake_codecs.push_back( - cricket::CreateAudioCodec(101, cricket::kRedCodecName, 0, 1)); - caller_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat, + std::vector caller_fake_codecs; + caller_fake_codecs.push_back(CreateAudioCodec(100, "foo", 0, 1)); + caller_fake_codecs.push_back(CreateAudioCodec(101, kRedCodecName, 0, 1)); + caller_fake_codecs.back().SetParam(kCodecParamNotInNameValueFormat, "100/100"); auto caller_fake_engine = std::make_unique(); caller_fake_engine->SetAudioCodecs(caller_fake_codecs); auto caller = CreatePeerConnectionWithAudio(std::move(caller_fake_engine)); - std::vector callee_fake_codecs; - callee_fake_codecs.push_back(cricket::CreateAudioCodec(120, "foo", 0, 1)); - callee_fake_codecs.push_back( - cricket::CreateAudioCodec(121, cricket::kRedCodecName, 0, 1)); - callee_fake_codecs.push_back(cricket::CreateAudioCodec(122, "bar", 0, 1)); - callee_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat, + std::vector callee_fake_codecs; + callee_fake_codecs.push_back(CreateAudioCodec(120, "foo", 0, 1)); + callee_fake_codecs.push_back(CreateAudioCodec(121, kRedCodecName, 0, 1)); + callee_fake_codecs.push_back(CreateAudioCodec(122, "bar", 0, 1)); + callee_fake_codecs.back().SetParam(kCodecParamNotInNameValueFormat, "122/122"); auto callee_fake_engine = std::make_unique(); callee_fake_engine->SetAudioCodecs(callee_fake_codecs); @@ -1407,29 +1356,27 @@ TEST_P(PeerConnectionMediaTest, RedFmtpPayloadTypeMustMatchBaseCodecs) { callee->SetRemoteDescription(std::move(offer)); auto answer = callee->CreateAnswerAndSetAsLocal(); auto answer_description = - cricket::GetFirstAudioContentDescription(answer->description()); + GetFirstAudioContentDescription(answer->description()); ASSERT_EQ(1u, answer_description->codecs().size()); } // Test behaviour when the RED fmtp attempts to specify different codecs // which is not supported. TEST_P(PeerConnectionMediaTest, RedFmtpPayloadMixed) { - std::vector caller_fake_codecs; - caller_fake_codecs.push_back(cricket::CreateAudioCodec(100, "foo", 0, 1)); - caller_fake_codecs.push_back(cricket::CreateAudioCodec(102, "bar", 0, 1)); - caller_fake_codecs.push_back( - cricket::CreateAudioCodec(101, cricket::kRedCodecName, 0, 1)); - caller_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat, + std::vector caller_fake_codecs; + caller_fake_codecs.push_back(CreateAudioCodec(100, "foo", 0, 1)); + caller_fake_codecs.push_back(CreateAudioCodec(102, "bar", 0, 1)); + caller_fake_codecs.push_back(CreateAudioCodec(101, kRedCodecName, 0, 1)); + caller_fake_codecs.back().SetParam(kCodecParamNotInNameValueFormat, "100/102"); auto caller_fake_engine = std::make_unique(); caller_fake_engine->SetAudioCodecs(caller_fake_codecs); auto caller = CreatePeerConnectionWithAudio(std::move(caller_fake_engine)); - std::vector callee_fake_codecs; - callee_fake_codecs.push_back(cricket::CreateAudioCodec(120, "foo", 0, 1)); - callee_fake_codecs.push_back( - cricket::CreateAudioCodec(121, cricket::kRedCodecName, 0, 1)); - callee_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat, + std::vector callee_fake_codecs; + callee_fake_codecs.push_back(CreateAudioCodec(120, "foo", 0, 1)); + callee_fake_codecs.push_back(CreateAudioCodec(121, kRedCodecName, 0, 1)); + callee_fake_codecs.back().SetParam(kCodecParamNotInNameValueFormat, "120/120"); auto callee_fake_engine = std::make_unique(); callee_fake_engine->SetAudioCodecs(callee_fake_codecs); @@ -1440,7 +1387,7 @@ TEST_P(PeerConnectionMediaTest, RedFmtpPayloadMixed) { callee->SetRemoteDescription(std::move(offer)); auto answer = callee->CreateAnswerAndSetAsLocal(); auto answer_description = - cricket::GetFirstAudioContentDescription(answer->description()); + GetFirstAudioContentDescription(answer->description()); // RED is not negotiated. ASSERT_EQ(1u, answer_description->codecs().size()); } @@ -1448,21 +1395,19 @@ TEST_P(PeerConnectionMediaTest, RedFmtpPayloadMixed) { // Test behaviour when the RED fmtp attempts to negotiate different levels of // redundancy. TEST_P(PeerConnectionMediaTest, RedFmtpPayloadDifferentRedundancy) { - std::vector caller_fake_codecs; - caller_fake_codecs.push_back(cricket::CreateAudioCodec(100, "foo", 0, 1)); - caller_fake_codecs.push_back( - cricket::CreateAudioCodec(101, cricket::kRedCodecName, 0, 1)); - caller_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat, + std::vector caller_fake_codecs; + caller_fake_codecs.push_back(CreateAudioCodec(100, "foo", 0, 1)); + caller_fake_codecs.push_back(CreateAudioCodec(101, kRedCodecName, 0, 1)); + caller_fake_codecs.back().SetParam(kCodecParamNotInNameValueFormat, "100/100"); auto caller_fake_engine = std::make_unique(); caller_fake_engine->SetAudioCodecs(caller_fake_codecs); auto caller = CreatePeerConnectionWithAudio(std::move(caller_fake_engine)); - std::vector callee_fake_codecs; - callee_fake_codecs.push_back(cricket::CreateAudioCodec(120, "foo", 0, 1)); - callee_fake_codecs.push_back( - cricket::CreateAudioCodec(121, cricket::kRedCodecName, 0, 1)); - callee_fake_codecs.back().SetParam(cricket::kCodecParamNotInNameValueFormat, + std::vector callee_fake_codecs; + callee_fake_codecs.push_back(CreateAudioCodec(120, "foo", 0, 1)); + callee_fake_codecs.push_back(CreateAudioCodec(121, kRedCodecName, 0, 1)); + callee_fake_codecs.back().SetParam(kCodecParamNotInNameValueFormat, "120/120/120"); auto callee_fake_engine = std::make_unique(); callee_fake_engine->SetAudioCodecs(callee_fake_codecs); @@ -1473,7 +1418,7 @@ TEST_P(PeerConnectionMediaTest, RedFmtpPayloadDifferentRedundancy) { callee->SetRemoteDescription(std::move(offer)); auto answer = callee->CreateAnswerAndSetAsLocal(); auto answer_description = - cricket::GetFirstAudioContentDescription(answer->description()); + GetFirstAudioContentDescription(answer->description()); // RED is negotiated. ASSERT_EQ(2u, answer_description->codecs().size()); @@ -1481,46 +1426,43 @@ TEST_P(PeerConnectionMediaTest, RedFmtpPayloadDifferentRedundancy) { // attempt to add RED, which should refer to the correct payload type. offer = callee->CreateOfferAndSetAsLocal(); auto* offer_description = - cricket::GetFirstAudioContentDescription(offer->description()); + GetFirstAudioContentDescription(offer->description()); ASSERT_EQ(2u, offer_description->codecs().size()); for (const auto& codec : offer_description->codecs()) { if (codec.name == "foo") { ASSERT_EQ(100, codec.id); - } else if (codec.name == cricket::kRedCodecName) { + } else if (codec.name == kRedCodecName) { std::string fmtp; - ASSERT_TRUE( - codec.GetParam(cricket::kCodecParamNotInNameValueFormat, &fmtp)); + ASSERT_TRUE(codec.GetParam(kCodecParamNotInNameValueFormat, &fmtp)); EXPECT_EQ("100/100", fmtp); } } } template -bool CompareCodecs(const std::vector& capabilities, +bool CompareCodecs(const std::vector& capabilities, const std::vector& codecs) { bool capability_has_rtx = - absl::c_any_of(capabilities, [](const webrtc::RtpCodecCapability& codec) { - return codec.name == cricket::kRtxCodecName; + absl::c_any_of(capabilities, [](const RtpCodecCapability& codec) { + return codec.name == kRtxCodecName; }); - bool codecs_has_rtx = absl::c_any_of(codecs, [](const C& codec) { - return codec.name == cricket::kRtxCodecName; - }); + bool codecs_has_rtx = absl::c_any_of( + codecs, [](const C& codec) { return codec.name == kRtxCodecName; }); std::vector codecs_no_rtx; - absl::c_copy_if( - codecs, std::back_inserter(codecs_no_rtx), - [](const C& codec) { return codec.name != cricket::kRtxCodecName; }); + absl::c_copy_if(codecs, std::back_inserter(codecs_no_rtx), + [](const C& codec) { return codec.name != kRtxCodecName; }); - std::vector capabilities_no_rtx; + std::vector capabilities_no_rtx; absl::c_copy_if(capabilities, std::back_inserter(capabilities_no_rtx), - [](const webrtc::RtpCodecCapability& codec) { - return codec.name != cricket::kRtxCodecName; + [](const RtpCodecCapability& codec) { + return codec.name != kRtxCodecName; }); return capability_has_rtx == codecs_has_rtx && absl::c_equal( capabilities_no_rtx, codecs_no_rtx, - [](const webrtc::RtpCodecCapability& capability, const C& codec) { + [](const RtpCodecCapability& capability, const C& codec) { return codec.MatchesRtpCodec(capability); }); } @@ -1528,48 +1470,26 @@ bool CompareCodecs(const std::vector& capabilities, TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesAudioMissingRecvCodec) { auto fake_engine = std::make_unique(); - auto send_codecs = fake_engine->voice().send_codecs(); - send_codecs.push_back(cricket::CreateAudioCodec(send_codecs.back().id + 1, - "send_only_codec", 0, 1)); + auto send_codecs = fake_engine->voice().LegacySendCodecs(); + send_codecs.push_back( + CreateAudioCodec(send_codecs.back().id + 1, "send_only_codec", 0, 1)); fake_engine->SetAudioSendCodecs(send_codecs); auto caller = CreatePeerConnectionWithAudio(std::move(fake_engine)); auto transceiver = caller->pc()->GetTransceivers().front(); - auto capabilities = caller->pc_factory()->GetRtpSenderCapabilities( - cricket::MediaType::MEDIA_TYPE_AUDIO); + auto capabilities = + caller->pc_factory()->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO); - std::vector codecs; + std::vector codecs; absl::c_copy_if(capabilities.codecs, std::back_inserter(codecs), - [](const webrtc::RtpCodecCapability& codec) { + [](const RtpCodecCapability& codec) { return codec.name.find("_only_") != std::string::npos; }); - auto result = transceiver->SetCodecPreferences(codecs); - EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type()); -} - -TEST_F(PeerConnectionMediaTestUnifiedPlan, - SetCodecPreferencesAudioMissingSendCodec) { - auto fake_engine = std::make_unique(); - auto recv_codecs = fake_engine->voice().recv_codecs(); - recv_codecs.push_back(cricket::CreateAudioCodec(recv_codecs.back().id + 1, - "recv_only_codec", 0, 1)); - fake_engine->SetAudioRecvCodecs(recv_codecs); - auto caller = CreatePeerConnectionWithAudio(std::move(fake_engine)); - - auto transceiver = caller->pc()->GetTransceivers().front(); - auto capabilities = caller->pc_factory()->GetRtpReceiverCapabilities( - cricket::MediaType::MEDIA_TYPE_AUDIO); - - std::vector codecs; - absl::c_copy_if(capabilities.codecs, std::back_inserter(codecs), - [](const webrtc::RtpCodecCapability& codec) { - return codec.name.find("_only_") != std::string::npos; - }); - - auto result = transceiver->SetCodecPreferences(codecs); - EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type()); + // This is OK, however because the codec is send-only and the transciever is + // not send-only, it would get filtered out during negotiation. + EXPECT_THAT(transceiver->SetCodecPreferences(codecs), IsRtcOk()); } TEST_F(PeerConnectionMediaTestUnifiedPlan, @@ -1577,14 +1497,12 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, auto caller = CreatePeerConnectionWithAudio(); auto transceiver = caller->pc()->GetTransceivers().front(); - auto video_codecs = - caller->pc_factory() - ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_VIDEO) - .codecs; - auto codecs = - caller->pc_factory() - ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_AUDIO) - .codecs; + auto video_codecs = caller->pc_factory() + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) + .codecs; + auto codecs = caller->pc_factory() + ->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO) + .codecs; codecs.insert(codecs.end(), video_codecs.begin(), video_codecs.end()); auto result = transceiver->SetCodecPreferences(codecs); EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type()); @@ -1593,32 +1511,31 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesAudioRejectsOnlyRtxRedFec) { auto fake_engine = std::make_unique(); - auto audio_codecs = fake_engine->voice().send_codecs(); - audio_codecs.push_back(cricket::CreateAudioRtxCodec( - audio_codecs.back().id + 1, audio_codecs.back().id)); - audio_codecs.push_back(cricket::CreateAudioCodec( - audio_codecs.back().id + 1, cricket::kRedCodecName, 0, 1)); - audio_codecs.push_back(cricket::CreateAudioCodec( - audio_codecs.back().id + 1, cricket::kUlpfecCodecName, 0, 1)); + auto audio_codecs = fake_engine->voice().LegacySendCodecs(); + audio_codecs.push_back( + CreateAudioRtxCodec(audio_codecs.back().id + 1, audio_codecs.back().id)); + audio_codecs.push_back( + CreateAudioCodec(audio_codecs.back().id + 1, kRedCodecName, 0, 1)); + audio_codecs.push_back( + CreateAudioCodec(audio_codecs.back().id + 1, kUlpfecCodecName, 0, 1)); fake_engine->SetAudioCodecs(audio_codecs); auto caller = CreatePeerConnectionWithAudio(std::move(fake_engine)); - auto transceiver = caller->pc()->GetTransceivers().front(); - auto codecs = - caller->pc_factory() - ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_AUDIO) - .codecs; + auto transceiver = + RtpTransceiverInternal(caller->pc()->GetTransceivers().front()); + auto codecs = caller->pc_factory() + ->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO) + .codecs; auto codecs_only_rtx_red_fec = codecs; - auto it = std::remove_if(codecs_only_rtx_red_fec.begin(), - codecs_only_rtx_red_fec.end(), - [](const webrtc::RtpCodecCapability& codec) { - return !(codec.name == cricket::kRtxCodecName || - codec.name == cricket::kRedCodecName || - codec.name == cricket::kUlpfecCodecName); - }); + auto it = std::remove_if( + codecs_only_rtx_red_fec.begin(), codecs_only_rtx_red_fec.end(), + [](const RtpCodecCapability& codec) { + return !(codec.name == kRtxCodecName || codec.name == kRedCodecName || + codec.name == kUlpfecCodecName); + }); codecs_only_rtx_red_fec.erase(it, codecs_only_rtx_red_fec.end()); - + ASSERT_THAT(codecs_only_rtx_red_fec.size(), Gt(0)); auto result = transceiver->SetCodecPreferences(codecs_only_rtx_red_fec); EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type()); } @@ -1628,7 +1545,7 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesAllAudioCodecs) { auto sender_audio_codecs = caller->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO) + ->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO) .codecs; auto audio_transceiver = caller->pc()->GetTransceivers().front(); @@ -1636,11 +1553,8 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesAllAudioCodecs) { // Normal case, set all capabilities as preferences EXPECT_TRUE(audio_transceiver->SetCodecPreferences(sender_audio_codecs).ok()); auto offer = caller->CreateOffer(); - auto codecs = offer->description() - ->contents()[0] - .media_description() - ->as_audio() - ->codecs(); + auto codecs = + offer->description()->contents()[0].media_description()->codecs(); EXPECT_TRUE(CompareCodecs(sender_audio_codecs, codecs)); } @@ -1650,20 +1564,17 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, auto sender_audio_codecs = caller->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO) + ->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO) .codecs; - std::vector empty_codecs = {}; + std::vector empty_codecs = {}; auto audio_transceiver = caller->pc()->GetTransceivers().front(); // Normal case, reset codec preferences EXPECT_TRUE(audio_transceiver->SetCodecPreferences(empty_codecs).ok()); auto offer = caller->CreateOffer(); - auto codecs = offer->description() - ->contents()[0] - .media_description() - ->as_audio() - ->codecs(); + auto codecs = + offer->description()->contents()[0].media_description()->codecs(); EXPECT_TRUE(CompareCodecs(sender_audio_codecs, codecs)); } @@ -1672,14 +1583,12 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, auto caller = CreatePeerConnectionWithVideo(); auto transceiver = caller->pc()->GetTransceivers().front(); - auto audio_codecs = - caller->pc_factory() - ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_AUDIO) - .codecs; - auto codecs = - caller->pc_factory() - ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_VIDEO) - .codecs; + auto audio_codecs = caller->pc_factory() + ->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO) + .codecs; + auto codecs = caller->pc_factory() + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) + .codecs; codecs.insert(codecs.end(), audio_codecs.begin(), audio_codecs.end()); auto result = transceiver->SetCodecPreferences(codecs); EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION, result.type()); @@ -1688,30 +1597,28 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesVideoRejectsOnlyRtxRedFec) { auto fake_engine = std::make_unique(); - auto video_codecs = fake_engine->video().send_codecs(); - video_codecs.push_back(cricket::CreateVideoRtxCodec( - video_codecs.back().id + 1, video_codecs.back().id)); - video_codecs.push_back(cricket::CreateVideoCodec(video_codecs.back().id + 1, - cricket::kRedCodecName)); - video_codecs.push_back(cricket::CreateVideoCodec(video_codecs.back().id + 1, - cricket::kUlpfecCodecName)); + auto video_codecs = fake_engine->video().LegacySendCodecs(); + video_codecs.push_back( + CreateVideoRtxCodec(video_codecs.back().id + 1, video_codecs.back().id)); + video_codecs.push_back( + CreateVideoCodec(video_codecs.back().id + 1, kRedCodecName)); + video_codecs.push_back( + CreateVideoCodec(video_codecs.back().id + 1, kUlpfecCodecName)); fake_engine->SetVideoCodecs(video_codecs); auto caller = CreatePeerConnectionWithVideo(std::move(fake_engine)); auto transceiver = caller->pc()->GetTransceivers().front(); - auto codecs = - caller->pc_factory() - ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_VIDEO) - .codecs; + auto codecs = caller->pc_factory() + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) + .codecs; auto codecs_only_rtx_red_fec = codecs; - auto it = std::remove_if(codecs_only_rtx_red_fec.begin(), - codecs_only_rtx_red_fec.end(), - [](const webrtc::RtpCodecCapability& codec) { - return !(codec.name == cricket::kRtxCodecName || - codec.name == cricket::kRedCodecName || - codec.name == cricket::kUlpfecCodecName); - }); + auto it = std::remove_if( + codecs_only_rtx_red_fec.begin(), codecs_only_rtx_red_fec.end(), + [](const RtpCodecCapability& codec) { + return !(codec.name == kRtxCodecName || codec.name == kRedCodecName || + codec.name == kUlpfecCodecName); + }); codecs_only_rtx_red_fec.erase(it, codecs_only_rtx_red_fec.end()); auto result = transceiver->SetCodecPreferences(codecs_only_rtx_red_fec); @@ -1723,7 +1630,7 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesAllVideoCodecs) { auto sender_video_codecs = caller->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO) + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) .codecs; auto video_transceiver = caller->pc()->GetTransceivers().front(); @@ -1731,11 +1638,8 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesAllVideoCodecs) { // Normal case, setting preferences to normal capabilities EXPECT_TRUE(video_transceiver->SetCodecPreferences(sender_video_codecs).ok()); auto offer = caller->CreateOffer(); - auto codecs = offer->description() - ->contents()[0] - .media_description() - ->as_video() - ->codecs(); + auto codecs = + offer->description()->contents()[0].media_description()->codecs(); EXPECT_TRUE(CompareCodecs(sender_video_codecs, codecs)); } @@ -1745,21 +1649,18 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, auto sender_video_codecs = caller->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO) + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) .codecs; - std::vector empty_codecs = {}; + std::vector empty_codecs = {}; auto video_transceiver = caller->pc()->GetTransceivers().front(); // Normal case, resetting preferences with empty list of codecs EXPECT_TRUE(video_transceiver->SetCodecPreferences(empty_codecs).ok()); auto offer = caller->CreateOffer(); - auto codecs = offer->description() - ->contents()[0] - .media_description() - ->as_video() - ->codecs(); + auto codecs = + offer->description()->contents()[0].media_description()->codecs(); EXPECT_TRUE(CompareCodecs(sender_video_codecs, codecs)); } @@ -1769,7 +1670,7 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, auto sender_video_codecs = caller->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO) + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) .codecs; auto video_transceiver = caller->pc()->GetTransceivers().front(); @@ -1784,24 +1685,21 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, EXPECT_TRUE(video_transceiver->SetCodecPreferences(duplicate_codec).ok()); auto offer = caller->CreateOffer(); - auto codecs = offer->description() - ->contents()[0] - .media_description() - ->as_video() - ->codecs(); + auto codecs = + offer->description()->contents()[0].media_description()->codecs(); EXPECT_TRUE(CompareCodecs(single_codec, codecs)); } TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesVideoWithRtx) { auto caller_fake_engine = std::make_unique(); - auto caller_video_codecs = caller_fake_engine->video().send_codecs(); - caller_video_codecs.push_back(cricket::CreateVideoCodec( - caller_video_codecs.back().id + 1, cricket::kVp8CodecName)); - caller_video_codecs.push_back(cricket::CreateVideoRtxCodec( + auto caller_video_codecs = caller_fake_engine->video().LegacySendCodecs(); + caller_video_codecs.push_back( + CreateVideoCodec(caller_video_codecs.back().id + 1, kVp8CodecName)); + caller_video_codecs.push_back(CreateVideoRtxCodec( caller_video_codecs.back().id + 1, caller_video_codecs.back().id)); - caller_video_codecs.push_back(cricket::CreateVideoCodec( - caller_video_codecs.back().id + 1, cricket::kVp9CodecName)); - caller_video_codecs.push_back(cricket::CreateVideoRtxCodec( + caller_video_codecs.push_back( + CreateVideoCodec(caller_video_codecs.back().id + 1, kVp9CodecName)); + caller_video_codecs.push_back(CreateVideoRtxCodec( caller_video_codecs.back().id + 1, caller_video_codecs.back().id)); caller_fake_engine->SetVideoCodecs(caller_video_codecs); @@ -1809,31 +1707,27 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesVideoWithRtx) { auto sender_video_codecs = caller->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO) + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) .codecs; auto video_transceiver = caller->pc()->GetTransceivers().front(); // Check that RTX codec is properly added auto video_codecs_vpx_rtx = sender_video_codecs; - auto it = - std::remove_if(video_codecs_vpx_rtx.begin(), video_codecs_vpx_rtx.end(), - [](const webrtc::RtpCodecCapability& codec) { - return codec.name != cricket::kRtxCodecName && - codec.name != cricket::kVp8CodecName && - codec.name != cricket::kVp9CodecName; - }); + auto it = std::remove_if( + video_codecs_vpx_rtx.begin(), video_codecs_vpx_rtx.end(), + [](const RtpCodecCapability& codec) { + return codec.name != kRtxCodecName && codec.name != kVp8CodecName && + codec.name != kVp9CodecName; + }); video_codecs_vpx_rtx.erase(it, video_codecs_vpx_rtx.end()); absl::c_reverse(video_codecs_vpx_rtx); EXPECT_EQ(video_codecs_vpx_rtx.size(), 3u); // VP8, VP9, RTX EXPECT_TRUE( video_transceiver->SetCodecPreferences(video_codecs_vpx_rtx).ok()); auto offer = caller->CreateOffer(); - auto codecs = offer->description() - ->contents()[0] - .media_description() - ->as_video() - ->codecs(); + auto codecs = + offer->description()->contents()[0].media_description()->codecs(); EXPECT_TRUE(CompareCodecs(video_codecs_vpx_rtx, codecs)); EXPECT_EQ(codecs.size(), 4u); @@ -1842,14 +1736,14 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesVideoWithRtx) { TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesVideoCodecsNegotiation) { auto caller_fake_engine = std::make_unique(); - auto caller_video_codecs = caller_fake_engine->video().send_codecs(); - caller_video_codecs.push_back(cricket::CreateVideoCodec( - caller_video_codecs.back().id + 1, cricket::kVp8CodecName)); - caller_video_codecs.push_back(cricket::CreateVideoRtxCodec( + auto caller_video_codecs = caller_fake_engine->video().LegacySendCodecs(); + caller_video_codecs.push_back( + CreateVideoCodec(caller_video_codecs.back().id + 1, kVp8CodecName)); + caller_video_codecs.push_back(CreateVideoRtxCodec( caller_video_codecs.back().id + 1, caller_video_codecs.back().id)); - caller_video_codecs.push_back(cricket::CreateVideoCodec( - caller_video_codecs.back().id + 1, cricket::kVp9CodecName)); - caller_video_codecs.push_back(cricket::CreateVideoRtxCodec( + caller_video_codecs.push_back( + CreateVideoCodec(caller_video_codecs.back().id + 1, kVp9CodecName)); + caller_video_codecs.push_back(CreateVideoRtxCodec( caller_video_codecs.back().id + 1, caller_video_codecs.back().id)); caller_fake_engine->SetVideoCodecs(caller_video_codecs); @@ -1860,27 +1754,24 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, auto callee = CreatePeerConnection(std::move(callee_fake_engine)); auto video_codecs = caller->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO) + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) .codecs; auto send_transceiver = caller->pc()->GetTransceivers().front(); auto video_codecs_vpx = video_codecs; auto it = std::remove_if(video_codecs_vpx.begin(), video_codecs_vpx.end(), - [](const webrtc::RtpCodecCapability& codec) { - return codec.name != cricket::kVp8CodecName && - codec.name != cricket::kVp9CodecName; + [](const RtpCodecCapability& codec) { + return codec.name != kVp8CodecName && + codec.name != kVp9CodecName; }); video_codecs_vpx.erase(it, video_codecs_vpx.end()); EXPECT_EQ(video_codecs_vpx.size(), 2u); // VP8, VP9 EXPECT_TRUE(send_transceiver->SetCodecPreferences(video_codecs_vpx).ok()); auto offer = caller->CreateOfferAndSetAsLocal(); - auto codecs = offer->description() - ->contents()[0] - .media_description() - ->as_video() - ->codecs(); + auto codecs = + offer->description()->contents()[0].media_description()->codecs(); EXPECT_EQ(codecs.size(), 2u); // VP8, VP9 EXPECT_TRUE(CompareCodecs(video_codecs_vpx, codecs)); @@ -1890,9 +1781,9 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, auto recv_transceiver = callee->pc()->GetTransceivers().front(); auto video_codecs_vp8_rtx = video_codecs; it = std::remove_if(video_codecs_vp8_rtx.begin(), video_codecs_vp8_rtx.end(), - [](const webrtc::RtpCodecCapability& codec) { - bool r = codec.name != cricket::kVp8CodecName && - codec.name != cricket::kRtxCodecName; + [](const RtpCodecCapability& codec) { + bool r = codec.name != kVp8CodecName && + codec.name != kRtxCodecName; return r; }); video_codecs_vp8_rtx.erase(it, video_codecs_vp8_rtx.end()); @@ -1901,25 +1792,22 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, auto answer = callee->CreateAnswerAndSetAsLocal(); - auto recv_codecs = answer->description() - ->contents()[0] - .media_description() - ->as_video() - ->codecs(); + auto recv_codecs = + answer->description()->contents()[0].media_description()->codecs(); EXPECT_EQ(recv_codecs.size(), 1u); // VP8 } TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesVideoCodecsNegotiationReverseOrder) { auto caller_fake_engine = std::make_unique(); - auto caller_video_codecs = caller_fake_engine->video().send_codecs(); - caller_video_codecs.push_back(cricket::CreateVideoCodec( - caller_video_codecs.back().id + 1, cricket::kVp8CodecName)); - caller_video_codecs.push_back(cricket::CreateVideoRtxCodec( + auto caller_video_codecs = caller_fake_engine->video().LegacySendCodecs(); + caller_video_codecs.push_back( + CreateVideoCodec(caller_video_codecs.back().id + 1, kVp8CodecName)); + caller_video_codecs.push_back(CreateVideoRtxCodec( caller_video_codecs.back().id + 1, caller_video_codecs.back().id)); - caller_video_codecs.push_back(cricket::CreateVideoCodec( - caller_video_codecs.back().id + 1, cricket::kVp9CodecName)); - caller_video_codecs.push_back(cricket::CreateVideoRtxCodec( + caller_video_codecs.push_back( + CreateVideoCodec(caller_video_codecs.back().id + 1, kVp9CodecName)); + caller_video_codecs.push_back(CreateVideoRtxCodec( caller_video_codecs.back().id + 1, caller_video_codecs.back().id)); caller_fake_engine->SetVideoCodecs(caller_video_codecs); @@ -1930,16 +1818,16 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, auto callee = CreatePeerConnection(std::move(callee_fake_engine)); auto video_codecs = caller->pc_factory() - ->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO) + ->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO) .codecs; auto send_transceiver = caller->pc()->GetTransceivers().front(); auto video_codecs_vpx = video_codecs; auto it = std::remove_if(video_codecs_vpx.begin(), video_codecs_vpx.end(), - [](const webrtc::RtpCodecCapability& codec) { - return codec.name != cricket::kVp8CodecName && - codec.name != cricket::kVp9CodecName; + [](const RtpCodecCapability& codec) { + return codec.name != kVp8CodecName && + codec.name != kVp9CodecName; }); video_codecs_vpx.erase(it, video_codecs_vpx.end()); EXPECT_EQ(video_codecs_vpx.size(), 2u); // VP8, VP9 @@ -1949,11 +1837,8 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, absl::c_reverse(video_codecs_vpx_reverse); auto offer = caller->CreateOfferAndSetAsLocal(); - auto codecs = offer->description() - ->contents()[0] - .media_description() - ->as_video() - ->codecs(); + auto codecs = + offer->description()->contents()[0].media_description()->codecs(); EXPECT_EQ(codecs.size(), 2u); // VP9, VP8 EXPECT_TRUE(CompareCodecs(video_codecs_vpx, codecs)); @@ -1964,11 +1849,8 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, auto answer = callee->CreateAnswerAndSetAsLocal(); - auto recv_codecs = answer->description() - ->contents()[0] - .media_description() - ->as_video() - ->codecs(); + auto recv_codecs = + answer->description()->contents()[0].media_description()->codecs(); EXPECT_TRUE(CompareCodecs(video_codecs_vpx_reverse, recv_codecs)); } @@ -1984,8 +1866,8 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, EXPECT_TRUE(HasAnyComfortNoiseCodecs(offer->description())); auto transceiver = caller->pc()->GetTransceivers().front(); - auto capabilities = caller->pc_factory()->GetRtpSenderCapabilities( - cricket::MediaType::MEDIA_TYPE_AUDIO); + auto capabilities = + caller->pc_factory()->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO); EXPECT_TRUE(transceiver->SetCodecPreferences(capabilities.codecs).ok()); options.voice_activity_detection = false; @@ -1999,16 +1881,16 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, // SetCodecPreferences. TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesAvoidsPayloadTypeConflictInOffer) { - auto fake_engine = std::make_unique(); + auto fake_engine = std::make_unique(); - std::vector audio_codecs; - audio_codecs.emplace_back(cricket::CreateAudioCodec(100, "foo", 0, 1)); - audio_codecs.emplace_back(cricket::CreateAudioRtxCodec(101, 100)); + std::vector audio_codecs; + audio_codecs.emplace_back(CreateAudioCodec(100, "foo", 0, 1)); + audio_codecs.emplace_back(CreateAudioRtxCodec(101, 100)); fake_engine->SetAudioCodecs(audio_codecs); - std::vector video_codecs; - video_codecs.emplace_back(cricket::CreateVideoCodec(100, "bar")); - video_codecs.emplace_back(cricket::CreateVideoRtxCodec(101, 100)); + std::vector video_codecs; + video_codecs.emplace_back(CreateVideoCodec(100, "bar")); + video_codecs.emplace_back(CreateVideoRtxCodec(101, 100)); fake_engine->SetVideoCodecs(video_codecs); auto caller = CreatePeerConnectionWithAudioVideo(std::move(fake_engine)); @@ -2016,40 +1898,40 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, ASSERT_EQ(2u, transceivers.size()); auto audio_transceiver = caller->pc()->GetTransceivers()[0]; - auto capabilities = caller->pc_factory()->GetRtpSenderCapabilities( - cricket::MediaType::MEDIA_TYPE_AUDIO); + auto capabilities = + caller->pc_factory()->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO); EXPECT_TRUE(audio_transceiver->SetCodecPreferences(capabilities.codecs).ok()); auto video_transceiver = caller->pc()->GetTransceivers()[1]; - capabilities = caller->pc_factory()->GetRtpSenderCapabilities( - cricket::MediaType::MEDIA_TYPE_VIDEO); + capabilities = + caller->pc_factory()->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO); EXPECT_TRUE(video_transceiver->SetCodecPreferences(capabilities.codecs).ok()); RTCOfferAnswerOptions options; auto offer = caller->CreateOffer(options); EXPECT_FALSE(HasPayloadTypeConflict(offer->description())); // Sanity check that we got the primary codec and RTX. - EXPECT_EQ(2u, cricket::GetFirstAudioContentDescription(offer->description()) - ->codecs() - .size()); - EXPECT_EQ(2u, cricket::GetFirstVideoContentDescription(offer->description()) - ->codecs() - .size()); + EXPECT_EQ( + 2u, + GetFirstAudioContentDescription(offer->description())->codecs().size()); + EXPECT_EQ( + 2u, + GetFirstVideoContentDescription(offer->description())->codecs().size()); } // Same as above, but preferences set for the answer. TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesAvoidsPayloadTypeConflictInAnswer) { - auto fake_engine = std::make_unique(); + auto fake_engine = std::make_unique(); - std::vector audio_codecs; - audio_codecs.emplace_back(cricket::CreateAudioCodec(100, "foo", 0, 1)); - audio_codecs.emplace_back(cricket::CreateAudioRtxCodec(101, 100)); + std::vector audio_codecs; + audio_codecs.emplace_back(CreateAudioCodec(100, "foo", 0, 1)); + audio_codecs.emplace_back(CreateAudioRtxCodec(101, 100)); fake_engine->SetAudioCodecs(audio_codecs); - std::vector video_codecs; - video_codecs.emplace_back(cricket::CreateVideoCodec(100, "bar")); - video_codecs.emplace_back(cricket::CreateVideoRtxCodec(101, 100)); + std::vector video_codecs; + video_codecs.emplace_back(CreateVideoCodec(100, "bar")); + video_codecs.emplace_back(CreateVideoRtxCodec(101, 100)); fake_engine->SetVideoCodecs(video_codecs); auto caller = CreatePeerConnectionWithAudioVideo(std::move(fake_engine)); @@ -2061,40 +1943,40 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, ASSERT_EQ(2u, transceivers.size()); auto audio_transceiver = caller->pc()->GetTransceivers()[0]; - auto capabilities = caller->pc_factory()->GetRtpSenderCapabilities( - cricket::MediaType::MEDIA_TYPE_AUDIO); + auto capabilities = + caller->pc_factory()->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO); EXPECT_TRUE(audio_transceiver->SetCodecPreferences(capabilities.codecs).ok()); auto video_transceiver = caller->pc()->GetTransceivers()[1]; - capabilities = caller->pc_factory()->GetRtpSenderCapabilities( - cricket::MediaType::MEDIA_TYPE_VIDEO); + capabilities = + caller->pc_factory()->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO); EXPECT_TRUE(video_transceiver->SetCodecPreferences(capabilities.codecs).ok()); auto answer = caller->CreateAnswer(options); EXPECT_FALSE(HasPayloadTypeConflict(answer->description())); // Sanity check that we got the primary codec and RTX. - EXPECT_EQ(2u, cricket::GetFirstAudioContentDescription(answer->description()) - ->codecs() - .size()); - EXPECT_EQ(2u, cricket::GetFirstVideoContentDescription(answer->description()) - ->codecs() - .size()); + EXPECT_EQ( + 2u, + GetFirstAudioContentDescription(answer->description())->codecs().size()); + EXPECT_EQ( + 2u, + GetFirstVideoContentDescription(answer->description())->codecs().size()); } // Same as above, but preferences set for a subsequent offer. TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesAvoidsPayloadTypeConflictInSubsequentOffer) { - auto fake_engine = std::make_unique(); + auto fake_engine = std::make_unique(); - std::vector audio_codecs; - audio_codecs.emplace_back(cricket::CreateAudioCodec(100, "foo", 0, 1)); - audio_codecs.emplace_back(cricket::CreateAudioRtxCodec(101, 100)); + std::vector audio_codecs; + audio_codecs.emplace_back(CreateAudioCodec(100, "foo", 0, 1)); + audio_codecs.emplace_back(CreateAudioRtxCodec(101, 100)); fake_engine->SetAudioCodecs(audio_codecs); - std::vector video_codecs; - video_codecs.emplace_back(cricket::CreateVideoCodec(100, "bar")); - video_codecs.emplace_back(cricket::CreateVideoRtxCodec(101, 100)); + std::vector video_codecs; + video_codecs.emplace_back(CreateVideoCodec(100, "bar")); + video_codecs.emplace_back(CreateVideoRtxCodec(101, 100)); fake_engine->SetVideoCodecs(video_codecs); auto caller = CreatePeerConnectionWithAudioVideo(std::move(fake_engine)); @@ -2107,13 +1989,13 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, ASSERT_EQ(2u, transceivers.size()); auto audio_transceiver = caller->pc()->GetTransceivers()[0]; - auto capabilities = caller->pc_factory()->GetRtpSenderCapabilities( - cricket::MediaType::MEDIA_TYPE_AUDIO); + auto capabilities = + caller->pc_factory()->GetRtpSenderCapabilities(webrtc::MediaType::AUDIO); EXPECT_TRUE(audio_transceiver->SetCodecPreferences(capabilities.codecs).ok()); auto video_transceiver = caller->pc()->GetTransceivers()[1]; - capabilities = caller->pc_factory()->GetRtpSenderCapabilities( - cricket::MediaType::MEDIA_TYPE_VIDEO); + capabilities = + caller->pc_factory()->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO); EXPECT_TRUE(video_transceiver->SetCodecPreferences(capabilities.codecs).ok()); auto reoffer = caller->CreateOffer(options); @@ -2121,17 +2003,82 @@ TEST_F(PeerConnectionMediaTestUnifiedPlan, EXPECT_FALSE(HasPayloadTypeConflict(reoffer->description())); // Sanity check that we got the primary codec and RTX. - EXPECT_EQ(2u, cricket::GetFirstAudioContentDescription(reoffer->description()) - ->codecs() - .size()); - EXPECT_EQ(2u, cricket::GetFirstVideoContentDescription(reoffer->description()) - ->codecs() - .size()); + EXPECT_EQ( + 2u, + GetFirstAudioContentDescription(reoffer->description())->codecs().size()); + EXPECT_EQ( + 2u, + GetFirstVideoContentDescription(reoffer->description())->codecs().size()); +} + +TEST_F(PeerConnectionMediaTestUnifiedPlan, + SetCodecPreferencesRecvOnlyCodecOnSendOnlyTransceiver) { + auto fake_engine = std::make_unique(); + + std::vector audio_codecs; + audio_codecs.emplace_back(CreateAudioCodec(100, "foo", 0, 1)); + fake_engine->SetAudioRecvCodecs(audio_codecs); + + auto caller = CreatePeerConnectionWithAudio(std::move(fake_engine)); + + auto transceivers = caller->pc()->GetTransceivers(); + ASSERT_EQ(1u, transceivers.size()); + + auto audio_transceiver = caller->pc()->GetTransceivers()[0]; + auto error = audio_transceiver->SetDirectionWithError( + RtpTransceiverDirection::kSendOnly); + ASSERT_TRUE(error.ok()); + auto capabilities = caller->pc_factory()->GetRtpReceiverCapabilities( + webrtc::MediaType::AUDIO); + EXPECT_TRUE(audio_transceiver->SetCodecPreferences(capabilities.codecs).ok()); + RTCOfferAnswerOptions options; + EXPECT_TRUE(caller->SetLocalDescription(caller->CreateOffer(options))); + // The transceiver is still sendonly (not stopped) because preferring a codec + // that is not applicable to the sendonly use case is the same as not having + // any codec preferences. + EXPECT_EQ(audio_transceiver->direction(), RtpTransceiverDirection::kSendOnly); +} + +TEST_F(PeerConnectionMediaTestUnifiedPlan, SetCodecPreferencesVideoNoRtx) { + auto fake_engine = std::make_unique(); + + std::vector video_codecs; + video_codecs.emplace_back(CreateVideoCodec(100, "bar")); + video_codecs.emplace_back(CreateVideoRtxCodec(101, 100)); + video_codecs.emplace_back(CreateVideoCodec(102, kRedCodecName)); + fake_engine->SetVideoCodecs(video_codecs); + + auto caller = CreatePeerConnectionWithVideo(std::move(fake_engine)); + + auto transceivers = caller->pc()->GetTransceivers(); + ASSERT_EQ(1u, transceivers.size()); + + auto video_transceiver = caller->pc()->GetTransceivers()[0]; + EXPECT_TRUE(video_transceiver + ->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly) + .ok()); + auto capabilities = + caller->pc_factory()->GetRtpSenderCapabilities(webrtc::MediaType::VIDEO); + auto it = + std::remove_if(capabilities.codecs.begin(), capabilities.codecs.end(), + [](const RtpCodecCapability& codec) { + return codec.name == kRtxCodecName; + }); + capabilities.codecs.erase(it, capabilities.codecs.end()); + EXPECT_EQ(capabilities.codecs.size(), 2u); + EXPECT_TRUE(video_transceiver->SetCodecPreferences(capabilities.codecs).ok()); + + RTCOfferAnswerOptions options; + auto offer = caller->CreateOffer(options); + const auto& content = offer->description()->contents()[0]; + auto& codecs = content.media_description()->codecs(); + ASSERT_EQ(codecs.size(), 2u); + EXPECT_EQ(codecs[0].name, "bar"); + EXPECT_EQ(codecs[1].name, kRedCodecName); } INSTANTIATE_TEST_SUITE_P(PeerConnectionMediaTest, PeerConnectionMediaTest, Values(SdpSemantics::kPlanB_DEPRECATED, SdpSemantics::kUnifiedPlan)); - } // namespace webrtc diff --git a/pc/peer_connection_message_handler.cc b/pc/peer_connection_message_handler.cc index 8ddeddea58..a4494f56ce 100644 --- a/pc/peer_connection_message_handler.cc +++ b/pc/peer_connection_message_handler.cc @@ -10,15 +10,17 @@ #include "pc/peer_connection_message_handler.h" +#include #include #include "api/jsep.h" #include "api/legacy_stats_types.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" #include "api/scoped_refptr.h" -#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" +#include "api/units/time_delta.h" #include "pc/legacy_stats_collector_interface.h" #include "rtc_base/checks.h" @@ -26,8 +28,8 @@ namespace webrtc { namespace { template -rtc::scoped_refptr WrapScoped(T* ptr) { - return rtc::scoped_refptr(ptr); +scoped_refptr WrapScoped(T* ptr) { + return scoped_refptr(ptr); } } // namespace diff --git a/pc/peer_connection_message_handler.h b/pc/peer_connection_message_handler.h index 8bd0e5ebb1..b9498e85e8 100644 --- a/pc/peer_connection_message_handler.h +++ b/pc/peer_connection_message_handler.h @@ -26,7 +26,7 @@ namespace webrtc { class PeerConnectionMessageHandler { public: - explicit PeerConnectionMessageHandler(rtc::Thread* signaling_thread) + explicit PeerConnectionMessageHandler(Thread* signaling_thread) : signaling_thread_(signaling_thread) {} ~PeerConnectionMessageHandler() = default; diff --git a/pc/peer_connection_proxy.h b/pc/peer_connection_proxy.h index 6db27f2dd5..4f809abbce 100644 --- a/pc/peer_connection_proxy.h +++ b/pc/peer_connection_proxy.h @@ -11,12 +11,38 @@ #ifndef PC_PEER_CONNECTION_PROXY_H_ #define PC_PEER_CONNECTION_PROXY_H_ +#include +#include #include +#include #include #include +#include "api/adaptation/resource.h" +#include "api/candidate.h" +#include "api/data_channel_event_observer_interface.h" +#include "api/data_channel_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/jsep.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtc_event_log_output.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/sctp_transport_interface.h" +#include "api/set_local_description_observer_interface.h" +#include "api/set_remote_description_observer_interface.h" +#include "api/stats/rtc_stats_collector_callback.h" +#include "api/transport/bandwidth_estimation_settings.h" +#include "api/transport/bitrate_settings.h" +#include "api/transport/network_control.h" #include "pc/proxy.h" +#include "rtc_base/thread.h" namespace webrtc { @@ -27,45 +53,42 @@ namespace webrtc { // an implementation detail. BEGIN_PROXY_MAP(PeerConnection) PROXY_PRIMARY_THREAD_DESTRUCTOR() -PROXY_METHOD0(rtc::scoped_refptr, local_streams) -PROXY_METHOD0(rtc::scoped_refptr, remote_streams) +PROXY_METHOD0(scoped_refptr, local_streams) +PROXY_METHOD0(scoped_refptr, remote_streams) PROXY_METHOD1(bool, AddStream, MediaStreamInterface*) PROXY_METHOD1(void, RemoveStream, MediaStreamInterface*) -PROXY_METHOD2(RTCErrorOr>, +PROXY_METHOD2(RTCErrorOr>, AddTrack, - rtc::scoped_refptr, + scoped_refptr, const std::vector&) -PROXY_METHOD3(RTCErrorOr>, +PROXY_METHOD3(RTCErrorOr>, AddTrack, - rtc::scoped_refptr, + scoped_refptr, const std::vector&, const std::vector&) -PROXY_METHOD1(RTCError, - RemoveTrackOrError, - rtc::scoped_refptr) -PROXY_METHOD1(RTCErrorOr>, +PROXY_METHOD1(RTCError, RemoveTrackOrError, scoped_refptr) +PROXY_METHOD1(RTCErrorOr>, AddTransceiver, - rtc::scoped_refptr) -PROXY_METHOD2(RTCErrorOr>, + scoped_refptr) +PROXY_METHOD2(RTCErrorOr>, AddTransceiver, - rtc::scoped_refptr, + scoped_refptr, const RtpTransceiverInit&) -PROXY_METHOD1(RTCErrorOr>, +PROXY_METHOD1(RTCErrorOr>, AddTransceiver, - cricket::MediaType) -PROXY_METHOD2(RTCErrorOr>, + webrtc::MediaType) +PROXY_METHOD2(RTCErrorOr>, AddTransceiver, - cricket::MediaType, + webrtc::MediaType, const RtpTransceiverInit&) -PROXY_METHOD2(rtc::scoped_refptr, +PROXY_METHOD2(scoped_refptr, CreateSender, const std::string&, const std::string&) -PROXY_CONSTMETHOD0(std::vector>, - GetSenders) -PROXY_CONSTMETHOD0(std::vector>, +PROXY_CONSTMETHOD0(std::vector>, GetSenders) +PROXY_CONSTMETHOD0(std::vector>, GetReceivers) -PROXY_CONSTMETHOD0(std::vector>, +PROXY_CONSTMETHOD0(std::vector>, GetTransceivers) PROXY_METHOD3(bool, GetStats, @@ -75,14 +98,14 @@ PROXY_METHOD3(bool, PROXY_METHOD1(void, GetStats, RTCStatsCollectorCallback*) PROXY_METHOD2(void, GetStats, - rtc::scoped_refptr, - rtc::scoped_refptr) + scoped_refptr, + scoped_refptr) PROXY_METHOD2(void, GetStats, - rtc::scoped_refptr, - rtc::scoped_refptr) + scoped_refptr, + scoped_refptr) PROXY_METHOD0(void, ClearStatsCache) -PROXY_METHOD2(RTCErrorOr>, +PROXY_METHOD2(RTCErrorOr>, CreateDataChannelOrError, const std::string&, const DataChannelInit*) @@ -108,10 +131,10 @@ PROXY_METHOD2(void, PROXY_METHOD2(void, SetLocalDescription, std::unique_ptr, - rtc::scoped_refptr) + scoped_refptr) PROXY_METHOD1(void, SetLocalDescription, - rtc::scoped_refptr) + scoped_refptr) PROXY_METHOD2(void, SetLocalDescription, SetSessionDescriptionObserver*, @@ -120,7 +143,7 @@ PROXY_METHOD1(void, SetLocalDescription, SetSessionDescriptionObserver*) PROXY_METHOD2(void, SetRemoteDescription, std::unique_ptr, - rtc::scoped_refptr) + scoped_refptr) PROXY_METHOD2(void, SetRemoteDescription, SetSessionDescriptionObserver*, @@ -135,34 +158,41 @@ PROXY_METHOD2(void, AddIceCandidate, std::unique_ptr, std::function) -PROXY_METHOD1(bool, RemoveIceCandidates, const std::vector&) +PROXY_METHOD1(bool, RemoveIceCandidates, const std::vector&) PROXY_METHOD1(RTCError, SetBitrate, const BitrateSettings&) +PROXY_METHOD1(void, + ReconfigureBandwidthEstimation, + const BandwidthEstimationSettings&) PROXY_METHOD1(void, SetAudioPlayout, bool) PROXY_METHOD1(void, SetAudioRecording, bool) // This method will be invoked on the network thread. See // PeerConnectionFactory::CreatePeerConnectionOrError for more details. -PROXY_SECONDARY_METHOD1(rtc::scoped_refptr, +PROXY_SECONDARY_METHOD1(scoped_refptr, LookupDtlsTransportByMid, const std::string&) // This method will be invoked on the network thread. See // PeerConnectionFactory::CreatePeerConnectionOrError for more details. -PROXY_SECONDARY_CONSTMETHOD0(rtc::scoped_refptr, +PROXY_SECONDARY_CONSTMETHOD0(scoped_refptr, GetSctpTransport) PROXY_METHOD0(SignalingState, signaling_state) PROXY_METHOD0(IceConnectionState, ice_connection_state) PROXY_METHOD0(IceConnectionState, standardized_ice_connection_state) PROXY_METHOD0(PeerConnectionState, peer_connection_state) PROXY_METHOD0(IceGatheringState, ice_gathering_state) -PROXY_METHOD0(absl::optional, can_trickle_ice_candidates) -PROXY_METHOD1(void, AddAdaptationResource, rtc::scoped_refptr) +PROXY_METHOD0(std::optional, can_trickle_ice_candidates) +PROXY_METHOD1(void, AddAdaptationResource, scoped_refptr) PROXY_METHOD2(bool, StartRtcEventLog, std::unique_ptr, int64_t) PROXY_METHOD1(bool, StartRtcEventLog, std::unique_ptr) PROXY_METHOD0(void, StopRtcEventLog) +PROXY_METHOD1(void, + SetDataChannelEventObserver, + std::unique_ptr) PROXY_METHOD0(void, Close) -BYPASS_PROXY_CONSTMETHOD0(rtc::Thread*, signaling_thread) +PROXY_METHOD0(NetworkControllerInterface*, GetNetworkController) +BYPASS_PROXY_CONSTMETHOD0(Thread*, signaling_thread) END_PROXY_MAP(PeerConnection) } // namespace webrtc diff --git a/pc/peer_connection_rampup_tests.cc b/pc/peer_connection_rampup_tests.cc index 545a1d53d0..17cc3a2e0d 100644 --- a/pc/peer_connection_rampup_tests.cc +++ b/pc/peer_connection_rampup_tests.cc @@ -9,17 +9,15 @@ */ #include +#include #include #include #include -#include "absl/types/optional.h" -#include "api/audio/audio_mixer.h" -#include "api/audio_codecs/builtin_audio_decoder_factory.h" -#include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/audio_options.h" -#include "api/create_peerconnection_factory.h" +#include "api/enable_media_with_defaults.h" #include "api/jsep.h" +#include "api/make_ref_counted.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" @@ -29,6 +27,7 @@ #include "api/stats/rtcstats_objects.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/metrics/metric.h" +#include "api/test/rtc_error_matchers.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" @@ -39,31 +38,27 @@ #include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "p2p/base/port_allocator.h" #include "p2p/base/port_interface.h" -#include "p2p/base/test_turn_server.h" -#include "p2p/client/basic_port_allocator.h" +#include "p2p/test/test_turn_server.h" #include "pc/peer_connection.h" #include "pc/peer_connection_wrapper.h" #include "pc/test/fake_audio_capture_module.h" #include "pc/test/frame_generator_capturer_video_track_source.h" #include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/checks.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/fake_network.h" #include "rtc_base/firewall_socket_server.h" -#include "rtc_base/gunit.h" -#include "rtc_base/helpers.h" #include "rtc_base/socket_address.h" #include "rtc_base/socket_factory.h" -#include "rtc_base/ssl_certificate.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/test_certificate_verifier.h" #include "rtc_base/thread.h" #include "rtc_base/virtual_socket_server.h" #include "system_wrappers/include/clock.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" namespace webrtc { namespace { @@ -75,8 +70,7 @@ using ::webrtc::test::Unit; static const int kDefaultTestTimeMs = 15000; static const int kRampUpTimeMs = 5000; static const int kPollIntervalTimeMs = 50; -static const int kDefaultTimeoutMs = 10000; -static const rtc::SocketAddress kDefaultLocalAddress("1.1.1.1", 0); +static const SocketAddress kDefaultLocalAddress("1.1.1.1", 0); static const char kTurnInternalAddress[] = "88.88.88.0"; static const char kTurnExternalAddress[] = "88.88.88.1"; static const int kTurnInternalPort = 3478; @@ -110,8 +104,8 @@ class PeerConnectionWrapperForRampUpTest : public PeerConnectionWrapper { using PeerConnectionWrapper::PeerConnectionWrapper; PeerConnectionWrapperForRampUpTest( - rtc::scoped_refptr pc_factory, - rtc::scoped_refptr pc, + scoped_refptr pc_factory, + scoped_refptr pc, std::unique_ptr observer) : PeerConnectionWrapper::PeerConnectionWrapper(pc_factory, pc, @@ -127,28 +121,26 @@ class PeerConnectionWrapperForRampUpTest : public PeerConnectionWrapper { return success; } - rtc::scoped_refptr CreateLocalVideoTrack( + scoped_refptr CreateLocalVideoTrack( FrameGeneratorCapturerVideoTrackSource::Config config, Clock* clock) { video_track_sources_.emplace_back( - rtc::make_ref_counted( + make_ref_counted( config, clock, /*is_screencast=*/false)); video_track_sources_.back()->Start(); - return rtc::scoped_refptr( - pc_factory()->CreateVideoTrack(video_track_sources_.back(), - rtc::CreateRandomUuid())); + return scoped_refptr(pc_factory()->CreateVideoTrack( + video_track_sources_.back(), CreateRandomUuid())); } - rtc::scoped_refptr CreateLocalAudioTrack( - const cricket::AudioOptions options) { - rtc::scoped_refptr source = + scoped_refptr CreateLocalAudioTrack( + const AudioOptions options) { + scoped_refptr source = pc_factory()->CreateAudioSource(options); - return pc_factory()->CreateAudioTrack(rtc::CreateRandomUuid(), - source.get()); + return pc_factory()->CreateAudioTrack(CreateRandomUuid(), source.get()); } private: - std::vector> + std::vector> video_track_sources_; }; @@ -157,30 +149,15 @@ class PeerConnectionRampUpTest : public ::testing::Test { public: PeerConnectionRampUpTest() : clock_(Clock::GetRealTimeClock()), - virtual_socket_server_(new rtc::VirtualSocketServer()), - firewall_socket_server_( - new rtc::FirewallSocketServer(virtual_socket_server_.get())), - firewall_socket_factory_( - new rtc::BasicPacketSocketFactory(firewall_socket_server_.get())), - network_thread_(new rtc::Thread(firewall_socket_server_.get())), - worker_thread_(rtc::Thread::Create()) { - network_thread_->SetName("PCNetworkThread", this); + firewall_socket_server_(&virtual_socket_server_), + network_thread_(&firewall_socket_server_), + worker_thread_(Thread::Create()) { + network_thread_.SetName("PCNetworkThread", this); worker_thread_->SetName("PCWorkerThread", this); - RTC_CHECK(network_thread_->Start()); + RTC_CHECK(network_thread_.Start()); RTC_CHECK(worker_thread_->Start()); - virtual_socket_server_->set_bandwidth(kNetworkBandwidth / 8); - pc_factory_ = CreatePeerConnectionFactory( - network_thread_.get(), worker_thread_.get(), rtc::Thread::Current(), - rtc::scoped_refptr(FakeAudioCaptureModule::Create()), - CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory(), - std::make_unique>(), - std::make_unique>(), - nullptr /* audio_mixer */, nullptr /* audio_processing */); + virtual_socket_server_.set_bandwidth(kNetworkBandwidth / 8); } virtual ~PeerConnectionRampUpTest() { @@ -196,30 +173,40 @@ class PeerConnectionRampUpTest : public ::testing::Test { std::unique_ptr CreatePeerConnectionWrapper(const RTCConfiguration& config) { - auto* fake_network_manager = new rtc::FakeNetworkManager(); - fake_network_manager->AddInterface(kDefaultLocalAddress); - fake_network_managers_.emplace_back(fake_network_manager); + PeerConnectionFactoryDependencies pcf_deps; + pcf_deps.network_thread = network_thread(); + pcf_deps.worker_thread = worker_thread_.get(); + pcf_deps.signaling_thread = Thread::Current(); + pcf_deps.socket_factory = &firewall_socket_server_; + auto network_manager = std::make_unique(); + network_manager->AddInterface(kDefaultLocalAddress); + pcf_deps.network_manager = std::move(network_manager); + pcf_deps.adm = FakeAudioCaptureModule::Create(); + pcf_deps.video_encoder_factory = + std::make_unique>(); + pcf_deps.video_decoder_factory = + std::make_unique>(); + EnableMediaWithDefaults(pcf_deps); + scoped_refptr pc_factory = + CreateModularPeerConnectionFactory(std::move(pcf_deps)); auto observer = std::make_unique(); - webrtc::PeerConnectionDependencies dependencies(observer.get()); - cricket::BasicPortAllocator* port_allocator = - new cricket::BasicPortAllocator(fake_network_manager, - firewall_socket_factory_.get()); - - port_allocator->set_step_delay(cricket::kDefaultStepDelay); - dependencies.allocator = - std::unique_ptr(port_allocator); + PeerConnectionDependencies dependencies(observer.get()); dependencies.tls_cert_verifier = - std::make_unique(); + std::make_unique(); - auto result = pc_factory_->CreatePeerConnectionOrError( + auto result = pc_factory->CreatePeerConnectionOrError( config, std::move(dependencies)); if (!result.ok()) { return nullptr; } return std::make_unique( - pc_factory_, result.MoveValue(), std::move(observer)); + std::move(pc_factory), result.MoveValue(), std::move(observer)); } void SetupOneWayCall() { @@ -228,17 +215,21 @@ class PeerConnectionRampUpTest : public ::testing::Test { FrameGeneratorCapturerVideoTrackSource::Config config; caller_->AddTrack(caller_->CreateLocalVideoTrack(config, clock_)); // Disable highpass filter so that we can get all the test audio frames. - cricket::AudioOptions options; + AudioOptions options; options.highpass_filter = false; caller_->AddTrack(caller_->CreateLocalAudioTrack(options)); // Do the SDP negotiation, and also exchange ice candidates. ASSERT_TRUE(caller_->ExchangeOfferAnswerWith(callee_.get())); - ASSERT_TRUE_WAIT( - caller_->signaling_state() == PeerConnectionInterface::kStable, - kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(caller_->IsIceGatheringDone(), kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(callee_->IsIceGatheringDone(), kDefaultTimeoutMs); + ASSERT_THAT(WaitUntil([&] { return caller_->signaling_state(); }, + ::testing::Eq(PeerConnectionInterface::kStable)), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return caller_->IsIceGatheringDone(); }, + ::testing::IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return callee_->IsIceGatheringDone(); }, + ::testing::IsTrue()), + IsRtcOk()); // Connect an ICE candidate pairs. ASSERT_TRUE( @@ -246,21 +237,25 @@ class PeerConnectionRampUpTest : public ::testing::Test { ASSERT_TRUE( caller_->AddIceCandidates(callee_->observer()->GetAllCandidates())); // This means that ICE and DTLS are connected. - ASSERT_TRUE_WAIT(callee_->IsIceConnected(), kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(caller_->IsIceConnected(), kDefaultTimeoutMs); + ASSERT_THAT(WaitUntil([&] { return callee_->IsIceConnected(); }, + ::testing::IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return caller_->IsIceConnected(); }, + ::testing::IsTrue()), + IsRtcOk()); } - void CreateTurnServer(cricket::ProtocolType type, + void CreateTurnServer(ProtocolType type, const std::string& common_name = "test turn server") { - rtc::Thread* thread = network_thread(); - rtc::SocketFactory* factory = firewall_socket_server_.get(); - std::unique_ptr turn_server; - SendTask(network_thread_.get(), [&] { - static const rtc::SocketAddress turn_server_internal_address{ + Thread* thread = network_thread(); + SocketFactory* factory = &firewall_socket_server_; + std::unique_ptr turn_server; + SendTask(network_thread(), [&] { + static const SocketAddress turn_server_internal_address{ kTurnInternalAddress, kTurnInternalPort}; - static const rtc::SocketAddress turn_server_external_address{ + static const SocketAddress turn_server_external_address{ kTurnExternalAddress, kTurnExternalPort}; - turn_server = std::make_unique( + turn_server = std::make_unique( thread, factory, turn_server_internal_address, turn_server_external_address, type, true /*ignore_bad_certs=*/, common_name); @@ -274,12 +269,12 @@ class PeerConnectionRampUpTest : public ::testing::Test { // bandwidth estimations and prints the bandwidth estimation result as a perf // metric. void RunTest(const std::string& test_string) { - rtc::Thread::Current()->ProcessMessages(kRampUpTimeMs); + Thread::Current()->ProcessMessages(kRampUpTimeMs); int number_of_polls = (kDefaultTestTimeMs - kRampUpTimeMs) / kPollIntervalTimeMs; int total_bwe = 0; for (int i = 0; i < number_of_polls; ++i) { - rtc::Thread::Current()->ProcessMessages(kPollIntervalTimeMs); + Thread::Current()->ProcessMessages(kPollIntervalTimeMs); total_bwe += static_cast(GetCallerAvailableBitrateEstimate()); } double average_bandwidth_estimate = total_bwe / number_of_polls; @@ -291,10 +286,10 @@ class PeerConnectionRampUpTest : public ::testing::Test { ImprovementDirection::kNeitherIsBetter); } - rtc::Thread* network_thread() { return network_thread_.get(); } + Thread* network_thread() { return &network_thread_; } - rtc::FirewallSocketServer* firewall_socket_server() { - return firewall_socket_server_.get(); + FirewallSocketServer* firewall_socket_server() { + return &firewall_socket_server_; } PeerConnectionWrapperForRampUpTest* caller() { return caller_.get(); } @@ -308,16 +303,18 @@ class PeerConnectionRampUpTest : public ::testing::Test { double GetCallerAvailableBitrateEstimate() { auto stats = caller_->GetStats(); auto transport_stats = stats->GetStatsOfType(); - if (transport_stats.size() == 0u || - !transport_stats[0]->selected_candidate_pair_id.is_defined()) { + if (transport_stats.empty() || + !transport_stats[0]->selected_candidate_pair_id.has_value()) { return 0; } std::string selected_ice_id = - transport_stats[0]->selected_candidate_pair_id.ValueToString(); + transport_stats[0] + ->GetAttribute(transport_stats[0]->selected_candidate_pair_id) + .ToString(); // Use the selected ICE candidate pair ID to get the appropriate ICE stats. const RTCIceCandidatePairStats ice_candidate_pair_stats = stats->Get(selected_ice_id)->cast_to(); - if (ice_candidate_pair_stats.available_outgoing_bitrate.is_defined()) { + if (ice_candidate_pair_stats.available_outgoing_bitrate.has_value()) { return *ice_candidate_pair_stats.available_outgoing_bitrate; } // We couldn't get the `available_outgoing_bitrate` for the active candidate @@ -328,7 +325,7 @@ class PeerConnectionRampUpTest : public ::testing::Test { Clock* const clock_; // The turn servers should be accessed & deleted on the network thread to // avoid a race with the socket read/write which occurs on the network thread. - std::vector> turn_servers_; + std::vector> turn_servers_; // `virtual_socket_server_` is used by `network_thread_` so it must be // destroyed later. // TODO(bugs.webrtc.org/7668): We would like to update the virtual network we @@ -343,22 +340,18 @@ class PeerConnectionRampUpTest : public ::testing::Test { // the VirtualSocketServer. The first ramp down time is very noisy and the // second ramp up time can take up to 300 seconds, most likely due to a built // up queue. - std::unique_ptr virtual_socket_server_; - std::unique_ptr firewall_socket_server_; - std::unique_ptr firewall_socket_factory_; - - std::unique_ptr network_thread_; - std::unique_ptr worker_thread_; - // The `pc_factory` uses `network_thread_` & `worker_thread_`, so it must be - // destroyed first. - std::vector> fake_network_managers_; - rtc::scoped_refptr pc_factory_; + VirtualSocketServer virtual_socket_server_; + FirewallSocketServer firewall_socket_server_; + + Thread network_thread_; + std::unique_ptr worker_thread_; + std::unique_ptr caller_; std::unique_ptr callee_; }; TEST_F(PeerConnectionRampUpTest, Bwe_After_TurnOverTCP) { - CreateTurnServer(cricket::ProtocolType::PROTO_TCP); + CreateTurnServer(ProtocolType::PROTO_TCP); PeerConnectionInterface::IceServer ice_server; std::string ice_server_url = "turn:" + std::string(kTurnInternalAddress) + ":" + std::to_string(kTurnInternalPort) + @@ -381,7 +374,7 @@ TEST_F(PeerConnectionRampUpTest, Bwe_After_TurnOverTCP) { } TEST_F(PeerConnectionRampUpTest, Bwe_After_TurnOverUDP) { - CreateTurnServer(cricket::ProtocolType::PROTO_UDP); + CreateTurnServer(ProtocolType::PROTO_UDP); PeerConnectionInterface::IceServer ice_server; std::string ice_server_url = "turn:" + std::string(kTurnInternalAddress) + ":" + std::to_string(kTurnInternalPort); @@ -404,7 +397,7 @@ TEST_F(PeerConnectionRampUpTest, Bwe_After_TurnOverUDP) { } TEST_F(PeerConnectionRampUpTest, Bwe_After_TurnOverTLS) { - CreateTurnServer(cricket::ProtocolType::PROTO_TLS, kTurnInternalAddress); + CreateTurnServer(ProtocolType::PROTO_TLS, kTurnInternalAddress); PeerConnectionInterface::IceServer ice_server; std::string ice_server_url = "turns:" + std::string(kTurnInternalAddress) + ":" + std::to_string(kTurnInternalPort) + diff --git a/pc/peer_connection_rtp_unittest.cc b/pc/peer_connection_rtp_unittest.cc index 4bdb7f1ea6..ad39b76e31 100644 --- a/pc/peer_connection_rtp_unittest.cc +++ b/pc/peer_connection_rtp_unittest.cc @@ -12,16 +12,16 @@ #include #include +#include #include #include #include -#include "absl/types/optional.h" -#include "api/audio/audio_mixer.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/create_peerconnection_factory.h" #include "api/jsep.h" +#include "api/make_ref_counted.h" #include "api/media_stream_interface.h" #include "api/media_types.h" #include "api/peer_connection_interface.h" @@ -33,7 +33,8 @@ #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" #include "api/set_remote_description_observer_interface.h" -#include "api/uma_metrics.h" +#include "api/test/rtc_error_matchers.h" +#include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" @@ -44,23 +45,21 @@ #include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" +#include "media/base/codec.h" #include "media/base/stream_params.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "p2p/base/port_allocator.h" #include "pc/media_session.h" #include "pc/peer_connection_wrapper.h" #include "pc/sdp_utils.h" #include "pc/session_description.h" #include "pc/test/fake_audio_capture_module.h" +#include "pc/test/integration_test_helpers.h" #include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/checks.h" -#include "rtc_base/gunit.h" -#include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/thread.h" #include "system_wrappers/include/metrics.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" // This file contains tests for RTP Media API-related behavior of // `webrtc::PeerConnection`, see https://w3c.github.io/webrtc-pc/#rtp-media-api. @@ -69,20 +68,17 @@ namespace webrtc { using RTCConfiguration = PeerConnectionInterface::RTCConfiguration; using ::testing::ElementsAre; -using ::testing::Pair; using ::testing::UnorderedElementsAre; using ::testing::Values; -const uint32_t kDefaultTimeout = 10000u; - template -class OnSuccessObserver : public webrtc::SetRemoteDescriptionObserverInterface { +class OnSuccessObserver : public SetRemoteDescriptionObserverInterface { public: explicit OnSuccessObserver(MethodFunctor on_success) : on_success_(std::move(on_success)) {} - // webrtc::SetRemoteDescriptionObserverInterface implementation. - void OnSetRemoteDescriptionComplete(webrtc::RTCError error) override { + // SetRemoteDescriptionObserverInterface implementation. + void OnSetRemoteDescriptionComplete(RTCError error) override { RTC_CHECK(error.ok()); on_success_(); } @@ -96,9 +92,9 @@ class PeerConnectionRtpBaseTest : public ::testing::Test { explicit PeerConnectionRtpBaseTest(SdpSemantics sdp_semantics) : sdp_semantics_(sdp_semantics), pc_factory_(CreatePeerConnectionFactory( - rtc::Thread::Current(), - rtc::Thread::Current(), - rtc::Thread::Current(), + Thread::Current(), + Thread::Current(), + Thread::Current(), FakeAudioCaptureModule::Create(), CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory(), @@ -114,7 +110,7 @@ class PeerConnectionRtpBaseTest : public ::testing::Test { Dav1dDecoderTemplateAdapter>>(), nullptr /* audio_mixer */, nullptr /* audio_processing */)) { - webrtc::metrics::Reset(); + metrics::Reset(); } std::unique_ptr CreatePeerConnection() { @@ -142,7 +138,7 @@ class PeerConnectionRtpBaseTest : public ::testing::Test { protected: const SdpSemantics sdp_semantics_; - rtc::scoped_refptr pc_factory_; + scoped_refptr pc_factory_; private: // Private so that tests don't accidentally bypass the SdpSemantics @@ -158,7 +154,7 @@ class PeerConnectionRtpBaseTest : public ::testing::Test { pc_factory_, result.MoveValue(), std::move(observer)); } - rtc::AutoThread main_thread_; + AutoThread main_thread_; }; class PeerConnectionRtpTest @@ -202,7 +198,7 @@ class PeerConnectionRtpTestUnifiedPlan : public PeerConnectionRtpBaseTest { } }; -// These tests cover `webrtc::PeerConnectionObserver` callbacks firing upon +// These tests cover `PeerConnectionObserver` callbacks firing upon // setting the remote description. TEST_P(PeerConnectionRtpTest, AddTrackWithoutStreamFiresOnAddTrack) { @@ -295,7 +291,7 @@ TEST_P(PeerConnectionRtpTest, RemoveTrackWithSharedStreamFiresOnRemoveTrack) { ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOfferAndSetAsLocal())); ASSERT_EQ(callee->observer()->add_track_events_.size(), 2u); EXPECT_EQ( - std::vector>{ + std::vector>{ callee->observer()->add_track_events_[0].receiver}, callee->observer()->remove_track_events_); ASSERT_EQ(1u, callee->observer()->remote_streams()->count()); @@ -326,8 +322,7 @@ TEST_F(PeerConnectionRtpTestPlanB, // Change the stream ID of the sender in the session description. auto offer = caller->CreateOfferAndSetAsLocal(); - auto* audio_desc = - cricket::GetFirstAudioContentDescription(offer->description()); + auto* audio_desc = GetFirstAudioContentDescription(offer->description()); ASSERT_EQ(audio_desc->mutable_streams().size(), 1u); audio_desc->mutable_streams()[0].set_stream_ids({kStreamId2}); ASSERT_TRUE(callee->SetRemoteDescription(std::move(offer))); @@ -351,11 +346,11 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTransceiverCallsOnTrack) { auto caller = CreatePeerConnection(); auto callee = CreatePeerConnection(); - auto audio_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto audio_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); RtpTransceiverInit video_transceiver_init; video_transceiver_init.stream_ids = {kStreamId1, kStreamId2}; auto video_transceiver = - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, video_transceiver_init); + caller->AddTransceiver(webrtc::MediaType::VIDEO, video_transceiver_init); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); @@ -365,9 +360,9 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTransceiverCallsOnTrack) { callee->pc()->GetTransceivers()[0]->mid()); EXPECT_EQ(video_transceiver->mid(), callee->pc()->GetTransceivers()[1]->mid()); - std::vector> audio_streams = + std::vector> audio_streams = callee->pc()->GetTransceivers()[0]->receiver()->streams(); - std::vector> video_streams = + std::vector> video_streams = callee->pc()->GetTransceivers()[1]->receiver()->streams(); ASSERT_EQ(0u, audio_streams.size()); ASSERT_EQ(2u, video_streams.size()); @@ -405,7 +400,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, SetDirectionCallsOnTrack) { auto caller = CreatePeerConnection(); auto callee = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); EXPECT_TRUE( transceiver->SetDirectionWithError(RtpTransceiverDirection::kInactive) .ok()); @@ -436,7 +431,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, SetDirectionHoldCallsOnTrackTwice) { auto caller = CreatePeerConnection(); auto callee = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); EXPECT_EQ(0u, caller->observer()->on_track_transceivers_.size()); @@ -507,7 +502,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, TEST_F(PeerConnectionRtpTestUnifiedPlan, ChangeDirectionInAnswerResultsInRemoveTrackEvent) { auto caller = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); auto callee = CreatePeerConnection(); callee->AddAudioTrack("audio_track", {}); @@ -752,14 +747,18 @@ TEST_F(PeerConnectionRtpTestPlanB, // when the first callback is invoked. callee->pc()->SetRemoteDescription( std::move(srd1_sdp), - rtc::make_ref_counted>( + make_ref_counted>( srd1_callback)); callee->pc()->SetRemoteDescription( std::move(srd2_sdp), - rtc::make_ref_counted>( + make_ref_counted>( srd2_callback)); - EXPECT_TRUE_WAIT(srd1_callback_called, kDefaultTimeout); - EXPECT_TRUE_WAIT(srd2_callback_called, kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return srd1_callback_called; }, ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return srd2_callback_called; }, ::testing::IsTrue()), + IsRtcOk()); } // Tests that a remote track is created with the signaled MSIDs when they are @@ -781,7 +780,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, UnsignaledSsrcCreatesReceiverStreams) { std::vector stream_ids = contents[0].media_description()->streams()[0].stream_ids(); contents[0].media_description()->mutable_streams().clear(); - cricket::StreamParams new_stream; + StreamParams new_stream; new_stream.set_stream_ids(stream_ids); contents[0].media_description()->AddStream(new_stream); @@ -804,8 +803,8 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, TracksDoNotEndWhenSsrcChanges) { // Caller offers to receive audio and video. RtpTransceiverInit init; init.direction = RtpTransceiverDirection::kRecvOnly; - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init); - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init); + caller->AddTransceiver(webrtc::MediaType::AUDIO, init); + caller->AddTransceiver(webrtc::MediaType::VIDEO, init); // Callee wants to send audio and video tracks. callee->AddTrack(callee->CreateAudioTrack("audio_track"), {}); @@ -826,7 +825,8 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, TracksDoNotEndWhenSsrcChanges) { for (size_t i = 0; i < contents.size(); ++i) { auto& mutable_streams = contents[i].media_description()->mutable_streams(); ASSERT_EQ(mutable_streams.size(), 1u); - mutable_streams[0].ssrcs[0] = kFirstMungedSsrc + static_cast(i); + ReplaceFirstSsrc(mutable_streams[0], + kFirstMungedSsrc + static_cast(i)); } ASSERT_TRUE( callee->SetLocalDescription(CloneSessionDescription(answer.get()))); @@ -897,8 +897,7 @@ TEST_F(PeerConnectionRtpTestPlanB, auto offer = caller->CreateOfferAndSetAsLocal(); auto mutable_streams = - cricket::GetFirstAudioContentDescription(offer->description()) - ->mutable_streams(); + GetFirstAudioContentDescription(offer->description())->mutable_streams(); ASSERT_EQ(mutable_streams.size(), 2u); // Clear the IDs in the StreamParams. mutable_streams[0].id.clear(); @@ -934,13 +933,13 @@ TEST_P(PeerConnectionRtpTest, auto caller = CreatePeerConnection(); auto callee = CreatePeerConnection(); - rtc::scoped_refptr observer = - rtc::make_ref_counted(); + scoped_refptr observer = + make_ref_counted(); auto offer = caller->CreateOfferAndSetAsLocal(); callee->pc()->SetRemoteDescription(observer.get(), offer.release()); callee = nullptr; - rtc::Thread::Current()->ProcessMessages(0); + Thread::Current()->ProcessMessages(0); EXPECT_FALSE(observer->called()); } @@ -958,11 +957,11 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTransceiverHasCorrectInitProperties) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - EXPECT_EQ(absl::nullopt, transceiver->mid()); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); + EXPECT_EQ(std::nullopt, transceiver->mid()); EXPECT_FALSE(transceiver->stopped()); EXPECT_EQ(RtpTransceiverDirection::kSendRecv, transceiver->direction()); - EXPECT_EQ(absl::nullopt, transceiver->current_direction()); + EXPECT_EQ(std::nullopt, transceiver->current_direction()); } // Test that adding a transceiver with the audio kind creates an audio sender @@ -971,14 +970,14 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddAudioTransceiverCreatesAudioSenderAndReceiver) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, transceiver->media_type()); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); + EXPECT_EQ(webrtc::MediaType::AUDIO, transceiver->media_type()); ASSERT_TRUE(transceiver->sender()); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, transceiver->sender()->media_type()); + EXPECT_EQ(webrtc::MediaType::AUDIO, transceiver->sender()->media_type()); ASSERT_TRUE(transceiver->receiver()); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, transceiver->receiver()->media_type()); + EXPECT_EQ(webrtc::MediaType::AUDIO, transceiver->receiver()->media_type()); auto track = transceiver->receiver()->track(); ASSERT_TRUE(track); @@ -992,14 +991,14 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddAudioTransceiverCreatesVideoSenderAndReceiver) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, transceiver->media_type()); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::VIDEO); + EXPECT_EQ(webrtc::MediaType::VIDEO, transceiver->media_type()); ASSERT_TRUE(transceiver->sender()); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, transceiver->sender()->media_type()); + EXPECT_EQ(webrtc::MediaType::VIDEO, transceiver->sender()->media_type()); ASSERT_TRUE(transceiver->receiver()); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, transceiver->receiver()->media_type()); + EXPECT_EQ(webrtc::MediaType::VIDEO, transceiver->receiver()->media_type()); auto track = transceiver->receiver()->track(); ASSERT_TRUE(track); @@ -1013,17 +1012,14 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTransceiverShowsInLists) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - EXPECT_EQ( - std::vector>{transceiver}, - caller->pc()->GetTransceivers()); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); + EXPECT_EQ(std::vector>{transceiver}, + caller->pc()->GetTransceivers()); EXPECT_EQ( - std::vector>{ - transceiver->sender()}, + std::vector>{transceiver->sender()}, caller->pc()->GetSenders()); EXPECT_EQ( - std::vector>{ - transceiver->receiver()}, + std::vector>{transceiver->receiver()}, caller->pc()->GetReceivers()); } @@ -1035,7 +1031,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, RtpTransceiverInit init; init.direction = RtpTransceiverDirection::kSendOnly; - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO, init); EXPECT_EQ(RtpTransceiverDirection::kSendOnly, transceiver->direction()); } @@ -1090,7 +1086,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTransceiverWithInvalidKindReturnsError) { auto caller = CreatePeerConnection(); - auto result = caller->pc()->AddTransceiver(cricket::MEDIA_TYPE_DATA); + auto result = caller->pc()->AddTransceiver(webrtc::MediaType::DATA); EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); } @@ -1112,7 +1108,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddAudioTrackCreatesAudioSender) { auto sender = caller->AddTrack(audio_track); ASSERT_TRUE(sender); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, sender->media_type()); + EXPECT_EQ(webrtc::MediaType::AUDIO, sender->media_type()); EXPECT_EQ(audio_track, sender->track()); } @@ -1125,7 +1121,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddVideoTrackCreatesVideoSender) { auto sender = caller->AddTrack(video_track); ASSERT_TRUE(sender); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, sender->media_type()); + EXPECT_EQ(webrtc::MediaType::VIDEO, sender->media_type()); EXPECT_EQ(video_track, sender->track()); } @@ -1149,7 +1145,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddFirstTrackCreatesTransceiver) { TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTrackReusesTransceiver) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); auto audio_track = caller->CreateAudioTrack("a"); auto sender = caller->AddTrack(audio_track); ASSERT_TRUE(sender); @@ -1165,7 +1161,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTrackWithSendEncodingDoesNotReuseTransceiver) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); auto audio_track = caller->CreateAudioTrack("a"); RtpEncodingParameters encoding; auto sender = caller->AddTrack(audio_track, {}, {encoding}); @@ -1199,8 +1195,8 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, TwoAddTrackCreatesTwoTransceivers) { TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTrackReusesTransceiverOfType) { auto caller = CreatePeerConnection(); - auto audio_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - auto video_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO); + auto audio_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); + auto video_transceiver = caller->AddTransceiver(webrtc::MediaType::VIDEO); auto sender = caller->AddVideoTrack("v"); ASSERT_EQ(2u, caller->pc()->GetTransceivers().size()); @@ -1215,7 +1211,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTrackDoesNotReuseTransceiverOfWrongType) { auto caller = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); auto sender = caller->AddVideoTrack("v"); auto transceivers = caller->pc()->GetTransceivers(); @@ -1230,8 +1226,8 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, AddTrackReusesFirstMatchingTransceiver) { auto caller = CreatePeerConnection(); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); + caller->AddTransceiver(webrtc::MediaType::AUDIO); auto sender = caller->AddAudioTrack("a"); auto transceivers = caller->pc()->GetTransceivers(); @@ -1249,7 +1245,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, RtpTransceiverInit init; init.direction = RtpTransceiverDirection::kInactive; - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO, init); EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); @@ -1272,7 +1268,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, RtpTransceiverInit init; init.direction = RtpTransceiverDirection::kRecvOnly; - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO, init); EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); @@ -1493,8 +1489,8 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, CreateAnswerSameTrackLabel) { RtpTransceiverInit recvonly; recvonly.direction = RtpTransceiverDirection::kRecvOnly; - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, recvonly); - caller->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, recvonly); + caller->AddTransceiver(webrtc::MediaType::AUDIO, recvonly); + caller->AddTransceiver(webrtc::MediaType::VIDEO, recvonly); ASSERT_TRUE(callee->SetRemoteDescription(caller->CreateOffer())); @@ -1543,7 +1539,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, EXPECT_FALSE(caller->observer()->legacy_renegotiation_needed()); EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); EXPECT_TRUE(caller->observer()->legacy_renegotiation_needed()); EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); @@ -1562,7 +1558,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, NoRenegotiationNeededAfterTransceiverSetSameDirection) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); caller->observer()->clear_legacy_renegotiation_needed(); caller->observer()->clear_latest_negotiation_needed_event(); @@ -1577,7 +1573,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, NoRenegotiationNeededAfterSetDirectionOnStoppedTransceiver) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); transceiver->StopInternal(); caller->observer()->clear_legacy_renegotiation_needed(); @@ -1592,7 +1588,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, CheckStoppedCurrentDirectionOnStoppedTransceiver) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); transceiver->StopInternal(); EXPECT_TRUE(transceiver->stopping()); @@ -1606,7 +1602,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, CheckForInvalidStateOnStoppingTransceiver) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); transceiver->StopStandard(); EXPECT_TRUE(transceiver->stopping()); @@ -1622,7 +1618,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, CheckForInvalidStateOnStoppedTransceiver) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); transceiver->StopInternal(); EXPECT_TRUE(transceiver->stopping()); @@ -1638,7 +1634,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, CheckForTypeErrorForStoppedOnTransceiver) { auto caller = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); EXPECT_EQ( RTCErrorType::INVALID_PARAMETER, transceiver->SetDirectionWithError(RtpTransceiverDirection::kStopped) @@ -1651,7 +1647,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, SetLocalDescriptionWithStoppedMediaSection) { auto caller = CreatePeerConnection(); auto callee = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); callee->pc()->GetTransceivers()[0]->StopStandard(); ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get())); @@ -1664,7 +1660,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, StopAndNegotiateCausesTransceiverToDisappear) { auto caller = CreatePeerConnection(); auto callee = CreatePeerConnection(); - auto transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); callee->pc()->GetTransceivers()[0]->StopStandard(); ASSERT_TRUE(callee->ExchangeOfferAnswerWith(caller.get())); @@ -1730,7 +1726,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, init.send_encodings[0].ssrc = 1; EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER, caller->pc() - ->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init) + ->AddTransceiver(webrtc::MediaType::AUDIO, init) .error() .type()); init.send_encodings = default_send_encodings; @@ -1749,7 +1745,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, CheckForInvalidEncodingParameters) { init.send_encodings[0].scale_resolution_down_by = 0.5; EXPECT_EQ(RTCErrorType::INVALID_RANGE, caller->pc() - ->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init) + ->AddTransceiver(webrtc::MediaType::VIDEO, init) .error() .type()); init.send_encodings = default_send_encodings; @@ -1757,7 +1753,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, CheckForInvalidEncodingParameters) { init.send_encodings[0].bitrate_priority = 0; EXPECT_EQ(RTCErrorType::INVALID_RANGE, caller->pc() - ->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init) + ->AddTransceiver(webrtc::MediaType::VIDEO, init) .error() .type()); init.send_encodings = default_send_encodings; @@ -1766,7 +1762,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, CheckForInvalidEncodingParameters) { init.send_encodings[0].max_bitrate_bps = 100000; EXPECT_EQ(RTCErrorType::INVALID_RANGE, caller->pc() - ->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init) + ->AddTransceiver(webrtc::MediaType::VIDEO, init) .error() .type()); init.send_encodings = default_send_encodings; @@ -1774,7 +1770,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, CheckForInvalidEncodingParameters) { init.send_encodings[0].num_temporal_layers = 0; EXPECT_EQ(RTCErrorType::INVALID_RANGE, caller->pc() - ->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init) + ->AddTransceiver(webrtc::MediaType::VIDEO, init) .error() .type()); init.send_encodings = default_send_encodings; @@ -1782,7 +1778,37 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, CheckForInvalidEncodingParameters) { init.send_encodings[0].num_temporal_layers = 5; EXPECT_EQ(RTCErrorType::INVALID_RANGE, caller->pc() - ->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init) + ->AddTransceiver(webrtc::MediaType::VIDEO, init) + .error() + .type()); + init.send_encodings = default_send_encodings; + + init.send_encodings[0].scalability_mode = std::nullopt; + init.send_encodings[0].codec = + CreateVideoCodec(SdpVideoFormat("VP8", {})).ToCodecParameters(); + EXPECT_EQ(RTCErrorType::NONE, + caller->pc() + ->AddTransceiver(webrtc::MediaType::VIDEO, init) + .error() + .type()); + init.send_encodings = default_send_encodings; + + init.send_encodings[0].scalability_mode = "L1T2"; + init.send_encodings[0].codec = + CreateVideoCodec(SdpVideoFormat("VP8", {})).ToCodecParameters(); + EXPECT_EQ(RTCErrorType::NONE, + caller->pc() + ->AddTransceiver(webrtc::MediaType::VIDEO, init) + .error() + .type()); + init.send_encodings = default_send_encodings; + + init.send_encodings[0].scalability_mode = "L2T2"; + init.send_encodings[0].codec = + CreateVideoCodec(SdpVideoFormat("VP8", {})).ToCodecParameters(); + EXPECT_EQ(RTCErrorType::UNSUPPORTED_OPERATION, + caller->pc() + ->AddTransceiver(webrtc::MediaType::VIDEO, init) .error() .type()); init.send_encodings = default_send_encodings; @@ -1798,7 +1824,7 @@ TEST_F(PeerConnectionRtpTestUnifiedPlan, SendEncodingsPassedToSender) { init.send_encodings[0].active = false; init.send_encodings[0].max_bitrate_bps = 180000; - auto result = caller->pc()->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, init); + auto result = caller->pc()->AddTransceiver(webrtc::MediaType::AUDIO, init); ASSERT_TRUE(result.ok()); auto init_send_encodings = result.value()->sender()->init_send_encodings(); @@ -1836,13 +1862,13 @@ TEST_F(PeerConnectionMsidSignalingTest, UnifiedPlanTalkingToOurself) { // Offer should have had both a=msid and a=ssrc MSID lines. auto* offer = callee->pc()->remote_description(); - EXPECT_EQ((cricket::kMsidSignalingMediaSection | - cricket::kMsidSignalingSsrcAttribute), + EXPECT_EQ((kMsidSignalingSemantic | kMsidSignalingMediaSection | + kMsidSignalingSsrcAttribute), offer->description()->msid_signaling()); // Answer should have had only a=msid lines. auto* answer = caller->pc()->remote_description(); - EXPECT_EQ(cricket::kMsidSignalingMediaSection, + EXPECT_EQ(kMsidSignalingSemantic | kMsidSignalingMediaSection, answer->description()->msid_signaling()); } @@ -1856,12 +1882,12 @@ TEST_F(PeerConnectionMsidSignalingTest, PlanBOfferToUnifiedPlanAnswer) { // Offer should have only a=ssrc MSID lines. auto* offer = callee->pc()->remote_description(); - EXPECT_EQ(cricket::kMsidSignalingSsrcAttribute, + EXPECT_EQ(kMsidSignalingSemantic | kMsidSignalingSsrcAttribute, offer->description()->msid_signaling()); // Answer should have only a=ssrc MSID lines to match the offer. auto* answer = caller->pc()->remote_description(); - EXPECT_EQ(cricket::kMsidSignalingSsrcAttribute, + EXPECT_EQ(kMsidSignalingSemantic | kMsidSignalingSsrcAttribute, answer->description()->msid_signaling()); } @@ -1884,8 +1910,8 @@ TEST_F(PeerConnectionMsidSignalingTest, UnifiedPlanToPlanBAnswer) { // Offer should have had both a=msid and a=ssrc MSID lines. auto* offer = callee->pc()->remote_description(); - EXPECT_EQ((cricket::kMsidSignalingMediaSection | - cricket::kMsidSignalingSsrcAttribute), + EXPECT_EQ((kMsidSignalingSemantic | kMsidSignalingMediaSection | + kMsidSignalingSsrcAttribute), offer->description()->msid_signaling()); // Callee should always have 1 stream for all of it's receivers. @@ -1907,7 +1933,8 @@ TEST_F(PeerConnectionMsidSignalingTest, PureUnifiedPlanToUs) { auto offer = caller->CreateOffer(); // Simulate a pure Unified Plan offerer by setting the MSID signaling to media // section only. - offer->description()->set_msid_signaling(cricket::kMsidSignalingMediaSection); + offer->description()->set_msid_signaling(kMsidSignalingSemantic | + kMsidSignalingMediaSection); ASSERT_TRUE( caller->SetLocalDescription(CloneSessionDescription(offer.get()))); @@ -1915,7 +1942,7 @@ TEST_F(PeerConnectionMsidSignalingTest, PureUnifiedPlanToUs) { // Answer should have only a=msid to match the offer. auto answer = callee->CreateAnswer(); - EXPECT_EQ(cricket::kMsidSignalingMediaSection, + EXPECT_EQ(kMsidSignalingSemantic | kMsidSignalingMediaSection, answer->description()->msid_signaling()); } diff --git a/pc/peer_connection_signaling_unittest.cc b/pc/peer_connection_signaling_unittest.cc index 8ca59fc20c..645d62dc35 100644 --- a/pc/peer_connection_signaling_unittest.cc +++ b/pc/peer_connection_signaling_unittest.cc @@ -16,20 +16,22 @@ #include #include #include +#include #include #include #include -#include #include #include -#include "absl/types/optional.h" -#include "api/audio/audio_mixer.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_replace.h" +#include "api/audio/audio_device.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/create_peerconnection_factory.h" #include "api/dtls_transport_interface.h" #include "api/jsep.h" +#include "api/make_ref_counted.h" #include "api/media_types.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" @@ -37,8 +39,8 @@ #include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" -#include "api/set_local_description_observer_interface.h" -#include "api/set_remote_description_observer_interface.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" @@ -50,28 +52,24 @@ #include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" #include "media/base/codec.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "p2p/base/port_allocator.h" #include "pc/peer_connection.h" -#include "pc/peer_connection_proxy.h" #include "pc/peer_connection_wrapper.h" #include "pc/sdp_utils.h" #include "pc/session_description.h" +#include "pc/test/fake_audio_capture_module.h" +#include "pc/test/fake_rtc_certificate_generator.h" #include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/checks.h" -#include "rtc_base/rtc_certificate.h" -#include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/string_encode.h" #include "rtc_base/thread.h" +#include "rtc_base/virtual_socket_server.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" + #ifdef WEBRTC_ANDROID #include "pc/test/android_test_initializer.h" #endif -#include "pc/test/fake_audio_capture_module.h" -#include "pc/test/fake_rtc_certificate_generator.h" -#include "rtc_base/gunit.h" -#include "rtc_base/virtual_socket_server.h" namespace webrtc { @@ -80,6 +78,7 @@ using RTCConfiguration = PeerConnectionInterface::RTCConfiguration; using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions; using ::testing::Bool; using ::testing::Combine; +using ::testing::StartsWith; using ::testing::Values; namespace { @@ -93,13 +92,6 @@ class PeerConnectionWrapperForSignalingTest : public PeerConnectionWrapper { bool initial_offerer() { return GetInternalPeerConnection()->initial_offerer(); } - - PeerConnection* GetInternalPeerConnection() { - auto* pci = - static_cast*>( - pc()); - return static_cast(pci->internal()); - } }; class ExecuteFunctionOnCreateSessionDescriptionObserver @@ -132,15 +124,15 @@ class PeerConnectionSignalingBaseTest : public ::testing::Test { typedef std::unique_ptr WrapperPtr; explicit PeerConnectionSignalingBaseTest(SdpSemantics sdp_semantics) - : vss_(new rtc::VirtualSocketServer()), + : vss_(new VirtualSocketServer()), main_(vss_.get()), sdp_semantics_(sdp_semantics) { #ifdef WEBRTC_ANDROID InitializeAndroidObjects(); #endif pc_factory_ = CreatePeerConnectionFactory( - rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(), - rtc::scoped_refptr(FakeAudioCaptureModule::Create()), + Thread::Current(), Thread::Current(), Thread::Current(), + scoped_refptr(FakeAudioCaptureModule::Create()), CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory(), std::make_unique 0; } - std::unique_ptr vss_; - rtc::AutoSocketServerThread main_; - rtc::scoped_refptr pc_factory_; + std::unique_ptr vss_; + AutoSocketServerThread main_; + scoped_refptr pc_factory_; const SdpSemantics sdp_semantics_; }; @@ -343,8 +335,7 @@ TEST_P(PeerConnectionSignalingStateTest, CreateOffer) { } else { std::string error; ASSERT_FALSE(wrapper->CreateOffer(RTCOfferAnswerOptions(), &error)); - EXPECT_PRED_FORMAT2(AssertStartsWith, error, - "CreateOffer called when PeerConnection is closed."); + EXPECT_EQ(error, "CreateOffer called when PeerConnection is closed."); } } @@ -379,9 +370,9 @@ TEST_P(PeerConnectionSignalingStateTest, SetLocalOffer) { std::string error; ASSERT_FALSE(wrapper->SetLocalDescription(std::move(offer), &error)); - EXPECT_PRED_FORMAT2( - AssertStartsWith, error, - "Failed to set local offer sdp: Called in wrong state:"); + EXPECT_THAT( + error, + StartsWith("Failed to set local offer sdp: Called in wrong state:")); } } @@ -398,9 +389,9 @@ TEST_P(PeerConnectionSignalingStateTest, SetLocalPrAnswer) { } else { std::string error; ASSERT_FALSE(wrapper->SetLocalDescription(std::move(pranswer), &error)); - EXPECT_PRED_FORMAT2( - AssertStartsWith, error, - "Failed to set local pranswer sdp: Called in wrong state:"); + EXPECT_THAT( + error, + StartsWith("Failed to set local pranswer sdp: Called in wrong state:")); } } @@ -416,9 +407,9 @@ TEST_P(PeerConnectionSignalingStateTest, SetLocalAnswer) { } else { std::string error; ASSERT_FALSE(wrapper->SetLocalDescription(std::move(answer), &error)); - EXPECT_PRED_FORMAT2( - AssertStartsWith, error, - "Failed to set local answer sdp: Called in wrong state:"); + EXPECT_THAT( + error, + StartsWith("Failed to set local answer sdp: Called in wrong state:")); } } @@ -435,9 +426,9 @@ TEST_P(PeerConnectionSignalingStateTest, SetRemoteOffer) { } else { std::string error; ASSERT_FALSE(wrapper->SetRemoteDescription(std::move(offer), &error)); - EXPECT_PRED_FORMAT2( - AssertStartsWith, error, - "Failed to set remote offer sdp: Called in wrong state:"); + EXPECT_THAT( + error, + StartsWith("Failed to set remote offer sdp: Called in wrong state:")); } } @@ -454,9 +445,10 @@ TEST_P(PeerConnectionSignalingStateTest, SetRemotePrAnswer) { } else { std::string error; ASSERT_FALSE(wrapper->SetRemoteDescription(std::move(pranswer), &error)); - EXPECT_PRED_FORMAT2( - AssertStartsWith, error, - "Failed to set remote pranswer sdp: Called in wrong state:"); + EXPECT_THAT( + error, + StartsWith( + "Failed to set remote pranswer sdp: Called in wrong state:")); } } @@ -472,9 +464,9 @@ TEST_P(PeerConnectionSignalingStateTest, SetRemoteAnswer) { } else { std::string error; ASSERT_FALSE(wrapper->SetRemoteDescription(std::move(answer), &error)); - EXPECT_PRED_FORMAT2( - AssertStartsWith, error, - "Failed to set remote answer sdp: Called in wrong state:"); + EXPECT_THAT( + error, + StartsWith("Failed to set remote answer sdp: Called in wrong state:")); } } @@ -526,8 +518,8 @@ TEST_P(PeerConnectionSignalingTest, auto later_offer = caller->CreateOffer(); EXPECT_EQ(original_id, later_offer->session_id()); - EXPECT_LT(rtc::FromString(original_version), - rtc::FromString(later_offer->session_version())); + EXPECT_LT(FromString(original_version), + FromString(later_offer->session_version())); } TEST_P(PeerConnectionSignalingTest, SessionVersionIncrementedInSubsequentDifferentAnswer) { @@ -546,8 +538,8 @@ TEST_P(PeerConnectionSignalingTest, auto later_answer = callee->CreateAnswer(); EXPECT_EQ(original_id, later_answer->session_id()); - EXPECT_LT(rtc::FromString(original_version), - rtc::FromString(later_answer->session_version())); + EXPECT_LT(FromString(original_version), + FromString(later_answer->session_version())); } TEST_P(PeerConnectionSignalingTest, InitiatorFlagSetOnCallerAndNotOnCallee) { @@ -580,9 +572,9 @@ TEST_P(PeerConnectionSignalingTest, CreateOffersAndShutdown) { options.offer_to_receive_audio = RTCOfferAnswerOptions::kOfferToReceiveMediaTrue; - rtc::scoped_refptr observers[100]; + scoped_refptr observers[100]; for (auto& observer : observers) { - observer = rtc::make_ref_counted(); + observer = make_ref_counted(); caller->pc()->CreateOffer(observer.get(), options); } @@ -593,7 +585,10 @@ TEST_P(PeerConnectionSignalingTest, CreateOffersAndShutdown) { // We expect to have received a notification now even if the PeerConnection // was terminated. The offer creation may or may not have succeeded, but we // must have received a notification. - EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); } } @@ -603,11 +598,13 @@ TEST_P(PeerConnectionSignalingTest, CreateOffersAndShutdown) { // the WebRtcSessionDescriptionFactory is responsible for it. TEST_P(PeerConnectionSignalingTest, CloseCreateOfferAndShutdown) { auto caller = CreatePeerConnection(); - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); caller->pc()->Close(); caller->pc()->CreateOffer(observer.get(), RTCOfferAnswerOptions()); caller.reset(nullptr); - EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout); + EXPECT_THAT(WaitUntil([&] { return observer->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); } TEST_P(PeerConnectionSignalingTest, @@ -622,7 +619,7 @@ TEST_P(PeerConnectionSignalingTest, TEST_P(PeerConnectionSignalingTest, ImplicitCreateOfferAndShutdown) { auto caller = CreatePeerConnection(); - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); caller->pc()->SetLocalDescription(observer); caller.reset(nullptr); // The new observer gets invoked because it is called immediately. @@ -643,7 +640,7 @@ TEST_P(PeerConnectionSignalingTest, TEST_P(PeerConnectionSignalingTest, CloseBeforeImplicitCreateOfferAndShutdown) { auto caller = CreatePeerConnection(); - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); caller->pc()->Close(); caller->pc()->SetLocalDescription(observer); caller.reset(nullptr); @@ -665,7 +662,7 @@ TEST_P(PeerConnectionSignalingTest, TEST_P(PeerConnectionSignalingTest, CloseAfterImplicitCreateOfferAndShutdown) { auto caller = CreatePeerConnection(); - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); caller->pc()->SetLocalDescription(observer); caller->pc()->Close(); caller.reset(nullptr); @@ -679,7 +676,7 @@ TEST_P(PeerConnectionSignalingTest, auto caller = CreatePeerConnection(); auto offer = caller->CreateOffer(RTCOfferAnswerOptions()); - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); caller->pc()->SetLocalDescription(std::move(offer), observer); // The new observer is invoked immediately. EXPECT_TRUE(observer->called()); @@ -697,9 +694,11 @@ TEST_P(PeerConnectionSignalingTest, EXPECT_FALSE(observer->called()); // Process all currently pending messages by waiting for a posted task to run. bool checkpoint_reached = false; - rtc::Thread::Current()->PostTask( + Thread::Current()->PostTask( [&checkpoint_reached] { checkpoint_reached = true; }); - EXPECT_TRUE_WAIT(checkpoint_reached, kWaitTimeout); + EXPECT_THAT(WaitUntil([&] { return checkpoint_reached; }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); // If resolving the observer was pending, it must now have been called. EXPECT_TRUE(observer->called()); } @@ -714,8 +713,7 @@ TEST_P(PeerConnectionSignalingTest, SetRemoteDescriptionExecutesImmediately) { // By not waiting for the observer's callback we can verify that the operation // executed immediately. callee->pc()->SetRemoteDescription( - std::move(offer), - rtc::make_ref_counted()); + std::move(offer), make_ref_counted()); EXPECT_EQ(2u, callee->pc()->GetReceivers().size()); } @@ -728,14 +726,13 @@ TEST_P(PeerConnectionSignalingTest, CreateOfferBlocksSetRemoteDescription) { EXPECT_EQ(0u, callee->pc()->GetReceivers().size()); auto offer_observer = - rtc::make_ref_counted(); + make_ref_counted(); // Synchronously invoke CreateOffer() and SetRemoteDescription(). The // SetRemoteDescription() operation should be chained to be executed // asynchronously, when CreateOffer() completes. callee->pc()->CreateOffer(offer_observer.get(), RTCOfferAnswerOptions()); callee->pc()->SetRemoteDescription( - std::move(offer), - rtc::make_ref_counted()); + std::move(offer), make_ref_counted()); // CreateOffer() is asynchronous; without message processing this operation // should not have completed. EXPECT_FALSE(offer_observer->called()); @@ -743,7 +740,10 @@ TEST_P(PeerConnectionSignalingTest, CreateOfferBlocksSetRemoteDescription) { // yet. EXPECT_EQ(0u, callee->pc()->GetReceivers().size()); // EXPECT_TRUE_WAIT causes messages to be processed... - EXPECT_TRUE_WAIT(offer_observer->called(), kWaitTimeout); + EXPECT_THAT( + WaitUntil([&] { return offer_observer->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); // Now that the offer has been completed, SetRemoteDescription() will have // been executed next in the chain. EXPECT_EQ(2u, callee->pc()->GetReceivers().size()); @@ -763,7 +763,9 @@ TEST_P(PeerConnectionSignalingTest, EXPECT_EQ(PeerConnection::kStable, caller->signaling_state()); // Wait for messages to be processed. - EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout); + EXPECT_THAT(WaitUntil([&] { return observer->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); EXPECT_TRUE(observer->result()); EXPECT_TRUE(caller->pc()->pending_local_description()); EXPECT_EQ(SdpType::kOffer, @@ -788,7 +790,9 @@ TEST_P(PeerConnectionSignalingTest, EXPECT_FALSE(callee->pc()->current_local_description()); // Wait for messages to be processed. - EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout); + EXPECT_THAT(WaitUntil([&] { return observer->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); EXPECT_TRUE(observer->result()); EXPECT_TRUE(callee->pc()->current_local_description()); EXPECT_EQ(SdpType::kAnswer, @@ -806,8 +810,11 @@ TEST_P(PeerConnectionSignalingTest, MockSetSessionDescriptionObserver::Create(); caller->pc()->SetLocalDescription( caller_set_local_description_observer.get()); - EXPECT_TRUE_WAIT(caller_set_local_description_observer->called(), - kWaitTimeout); + EXPECT_THAT( + WaitUntil([&] { return caller_set_local_description_observer->called(); }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); ASSERT_TRUE(caller->pc()->pending_local_description()); // SetRemoteDescription(offer) @@ -823,8 +830,11 @@ TEST_P(PeerConnectionSignalingTest, MockSetSessionDescriptionObserver::Create(); callee->pc()->SetLocalDescription( callee_set_local_description_observer.get()); - EXPECT_TRUE_WAIT(callee_set_local_description_observer->called(), - kWaitTimeout); + EXPECT_THAT( + WaitUntil([&] { return callee_set_local_description_observer->called(); }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); // Chaining guarantees SetRemoteDescription() happened before // SetLocalDescription(). EXPECT_TRUE(callee_set_remote_description_observer->called()); @@ -837,8 +847,12 @@ TEST_P(PeerConnectionSignalingTest, caller_set_remote_description_observer.get(), CloneSessionDescription(callee->pc()->current_local_description()) .release()); - EXPECT_TRUE_WAIT(caller_set_remote_description_observer->called(), - kWaitTimeout); + EXPECT_THAT( + WaitUntil( + [&] { return caller_set_remote_description_observer->called(); }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); EXPECT_EQ(PeerConnection::kStable, caller->signaling_state()); EXPECT_EQ(PeerConnection::kStable, callee->signaling_state()); @@ -854,7 +868,9 @@ TEST_P(PeerConnectionSignalingTest, // The operation should fail asynchronously. EXPECT_FALSE(observer->called()); - EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout); + EXPECT_THAT(WaitUntil([&] { return observer->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); EXPECT_FALSE(observer->result()); // This did not affect the signaling state. EXPECT_EQ(PeerConnection::kClosed, caller->pc()->signaling_state()); @@ -874,7 +890,9 @@ TEST_P(PeerConnectionSignalingTest, // The operation should fail asynchronously. EXPECT_FALSE(observer->called()); - EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout); + EXPECT_THAT(WaitUntil([&] { return observer->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); EXPECT_FALSE(observer->result()); // This did not affect the signaling state. EXPECT_EQ(PeerConnection::kClosed, caller->pc()->signaling_state()); @@ -896,8 +914,8 @@ TEST_P(PeerConnectionSignalingTest, UnsupportedContentType) { "m=bogus 9 FOO 0 8\r\n" "c=IN IP4 0.0.0.0\r\n" "a=mid:bogusmid\r\n"; - std::unique_ptr remote_description = - webrtc::CreateSessionDescription(SdpType::kOffer, sdp, nullptr); + std::unique_ptr remote_description = + CreateSessionDescription(SdpType::kOffer, sdp, nullptr); EXPECT_TRUE(caller->SetRemoteDescription(std::move(remote_description))); @@ -977,21 +995,16 @@ TEST_P(PeerConnectionSignalingTest, ReceiveFlexFec) { "a=ssrc-group:FEC-FR 1224551896 1953032773\r\n" "a=ssrc:1224551896 cname:/exJcmhSLpyu9FgV\r\n" "a=ssrc:1953032773 cname:/exJcmhSLpyu9FgV\r\n"; - std::unique_ptr remote_description = - webrtc::CreateSessionDescription(SdpType::kOffer, sdp, nullptr); + std::unique_ptr remote_description = + CreateSessionDescription(SdpType::kOffer, sdp, nullptr); EXPECT_TRUE(caller->SetRemoteDescription(std::move(remote_description))); auto answer = caller->CreateAnswer(); ASSERT_EQ(answer->description()->contents().size(), 1u); - ASSERT_NE( - answer->description()->contents()[0].media_description()->as_video(), - nullptr); - auto codecs = answer->description() - ->contents()[0] - .media_description() - ->as_video() - ->codecs(); + ASSERT_NE(answer->description()->contents()[0].media_description(), nullptr); + auto codecs = + answer->description()->contents()[0].media_description()->codecs(); ASSERT_EQ(codecs.size(), 2u); EXPECT_EQ(codecs[1].name, "flexfec-03"); @@ -1033,21 +1046,16 @@ TEST_P(PeerConnectionSignalingTest, ReceiveFlexFecReoffer) { "a=ssrc-group:FEC-FR 1224551896 1953032773\r\n" "a=ssrc:1224551896 cname:/exJcmhSLpyu9FgV\r\n" "a=ssrc:1953032773 cname:/exJcmhSLpyu9FgV\r\n"; - std::unique_ptr remote_description = - webrtc::CreateSessionDescription(SdpType::kOffer, sdp, nullptr); + std::unique_ptr remote_description = + CreateSessionDescription(SdpType::kOffer, sdp, nullptr); EXPECT_TRUE(caller->SetRemoteDescription(std::move(remote_description))); auto answer = caller->CreateAnswer(); ASSERT_EQ(answer->description()->contents().size(), 1u); - ASSERT_NE( - answer->description()->contents()[0].media_description()->as_video(), - nullptr); - auto codecs = answer->description() - ->contents()[0] - .media_description() - ->as_video() - ->codecs(); + ASSERT_NE(answer->description()->contents()[0].media_description(), nullptr); + auto codecs = + answer->description()->contents()[0].media_description()->codecs(); ASSERT_EQ(codecs.size(), 2u); EXPECT_EQ(codecs[1].name, "flexfec-03"); EXPECT_EQ(codecs[1].id, 35); @@ -1056,18 +1064,15 @@ TEST_P(PeerConnectionSignalingTest, ReceiveFlexFecReoffer) { // This generates a collision for AV1 which needs to be remapped. auto offer = caller->CreateOffer(RTCOfferAnswerOptions()); - auto offer_codecs = offer->description() - ->contents()[0] - .media_description() - ->as_video() - ->codecs(); + auto offer_codecs = + offer->description()->contents()[0].media_description()->codecs(); auto flexfec_it = std::find_if( offer_codecs.begin(), offer_codecs.end(), - [](const cricket::Codec& codec) { return codec.name == "flexfec-03"; }); + [](const Codec& codec) { return codec.name == "flexfec-03"; }); ASSERT_EQ(flexfec_it->id, 35); - auto av1_it = std::find_if( - offer_codecs.begin(), offer_codecs.end(), - [](const cricket::Codec& codec) { return codec.name == "AV1"; }); + auto av1_it = + std::find_if(offer_codecs.begin(), offer_codecs.end(), + [](const Codec& codec) { return codec.name == "AV1"; }); if (av1_it != offer_codecs.end()) { ASSERT_NE(av1_it->id, 35); } @@ -1104,8 +1109,8 @@ TEST_P(PeerConnectionSignalingTest, MidAttributeMaxLength) { "a=rtcp-fb:102 nack\r\n" "a=rtcp-fb:102 nack pli\r\n" "a=ssrc:1224551896 cname:/exJcmhSLpyu9FgV\r\n"; - std::unique_ptr remote_description = - webrtc::CreateSessionDescription(SdpType::kOffer, sdp, nullptr); + std::unique_ptr remote_description = + CreateSessionDescription(SdpType::kOffer, sdp, nullptr); EXPECT_FALSE(caller->SetRemoteDescription(std::move(remote_description))); } @@ -1138,7 +1143,7 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest, // waiting for it would not ensure synchronicity. RTC_DCHECK(!caller->pc()->GetTransceivers()[0]->mid().has_value()); caller->pc()->SetLocalDescription( - rtc::make_ref_counted().get(), + make_ref_counted().get(), offer.release()); EXPECT_TRUE(caller->pc()->GetTransceivers()[0]->mid().has_value()); } @@ -1155,8 +1160,7 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest, // other tests.) RTC_DCHECK(!caller->pc()->GetTransceivers()[0]->mid().has_value()); caller->pc()->SetLocalDescription( - std::move(offer), - rtc::make_ref_counted()); + std::move(offer), make_ref_counted()); EXPECT_TRUE(caller->pc()->GetTransceivers()[0]->mid().has_value()); } @@ -1168,19 +1172,21 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest, auto offer = caller->CreateOffer(RTCOfferAnswerOptions()); auto offer_observer = - rtc::make_ref_counted( + make_ref_counted( [pc = caller->pc()](SessionDescriptionInterface* desc) { // By not waiting for the observer's callback we can verify that the // operation executed immediately. RTC_DCHECK(!pc->GetTransceivers()[0]->mid().has_value()); pc->SetLocalDescription( - rtc::make_ref_counted() - .get(), + make_ref_counted().get(), desc); EXPECT_TRUE(pc->GetTransceivers()[0]->mid().has_value()); }); caller->pc()->CreateOffer(offer_observer.get(), RTCOfferAnswerOptions()); - EXPECT_TRUE_WAIT(offer_observer->was_called(), kWaitTimeout); + EXPECT_THAT(WaitUntil([&] { return offer_observer->was_called(); }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); } // Test that transports are shown in the sender/receiver API after offer/answer. @@ -1251,7 +1257,7 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest, auto caller = CreatePeerConnection(); EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); auto transceiver = - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, RtpTransceiverInit()); + caller->AddTransceiver(webrtc::MediaType::AUDIO, RtpTransceiverInit()); EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); EXPECT_TRUE(caller->pc()->ShouldFireNegotiationNeededEvent( caller->observer()->latest_negotiation_needed_event())); @@ -1262,10 +1268,10 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest, auto caller = CreatePeerConnection(); EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); auto transceiver = - caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, RtpTransceiverInit()); + caller->AddTransceiver(webrtc::MediaType::AUDIO, RtpTransceiverInit()); EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); caller->pc()->CreateOffer(observer.get(), RTCOfferAnswerOptions()); // For this test to work, the operation has to be pending, i.e. the observer // has not yet been invoked. @@ -1277,7 +1283,9 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest, // When the Operations Chain becomes empty again, a new negotiation needed // event will be generated that is not suppressed. - EXPECT_TRUE_WAIT(observer->called(), kWaitTimeout); + EXPECT_THAT(WaitUntil([&] { return observer->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kWaitTimeout)}), + IsRtcOk()); EXPECT_TRUE(caller->observer()->has_negotiation_needed_event()); EXPECT_TRUE(caller->pc()->ShouldFireNegotiationNeededEvent( caller->observer()->latest_negotiation_needed_event())); @@ -1291,7 +1299,7 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest, EXPECT_FALSE(caller->observer()->has_negotiation_needed_event()); auto transceiver = - callee->AddTransceiver(cricket::MEDIA_TYPE_AUDIO, RtpTransceiverInit()); + callee->AddTransceiver(webrtc::MediaType::AUDIO, RtpTransceiverInit()); EXPECT_TRUE(callee->observer()->has_negotiation_needed_event()); // Change signaling state (to "have-remote-offer") by setting a remote offer. @@ -1339,8 +1347,8 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest, RtxReofferApt) { "a=rtcp-fb:102 nack\r\n" "a=rtcp-fb:102 nack pli\r\n" "a=ssrc:1224551896 cname:/exJcmhSLpyu9FgV\r\n"; - std::unique_ptr remote_description = - webrtc::CreateSessionDescription(SdpType::kOffer, sdp, nullptr); + std::unique_ptr remote_description = + CreateSessionDescription(SdpType::kOffer, sdp, nullptr); EXPECT_TRUE(callee->SetRemoteDescription(std::move(remote_description))); @@ -1350,19 +1358,34 @@ TEST_F(PeerConnectionSignalingUnifiedPlanTest, RtxReofferApt) { callee->pc()->GetTransceivers()[0]->StopStandard(); auto reoffer = callee->CreateOffer(RTCOfferAnswerOptions()); - auto codecs = reoffer->description() - ->contents()[0] - .media_description() - ->as_video() - ->codecs(); + auto codecs = + reoffer->description()->contents()[0].media_description()->codecs(); ASSERT_GT(codecs.size(), 2u); EXPECT_EQ(codecs[0].name, "VP8"); EXPECT_EQ(codecs[1].name, "rtx"); auto apt_it = codecs[1].params.find("apt"); ASSERT_NE(apt_it, codecs[1].params.end()); // The apt should match the id from the remote offer. - EXPECT_EQ(apt_it->second, rtc::ToString(codecs[0].id)); + EXPECT_EQ(apt_it->second, absl::StrCat(codecs[0].id)); EXPECT_EQ(apt_it->second, "102"); } +TEST_F(PeerConnectionSignalingUnifiedPlanTest, LoopbackSdpIsPossible) { + // This is not a recommended way of doing things. + // The test is added because an Android test tries to do it this way, + // and triggered surprising behavior. + auto caller = CreatePeerConnection(); + auto transceiver = + caller->AddTransceiver(webrtc::MediaType::AUDIO, RtpTransceiverInit()); + + auto offer = caller->CreateOffer(RTCOfferAnswerOptions()); + std::string offer_sdp; + ASSERT_TRUE(offer->ToString(&offer_sdp)); + std::string answer_sdp = + absl::StrReplaceAll(offer_sdp, {{"a=setup:actpass", "a=setup:active"}}); + EXPECT_TRUE(caller->SetLocalDescription(std::move(offer))); + auto answer = CreateSessionDescription(SdpType::kAnswer, answer_sdp); + EXPECT_TRUE(caller->SetRemoteDescription(std::move(answer))); +} + } // namespace webrtc diff --git a/pc/peer_connection_simulcast_unittest.cc b/pc/peer_connection_simulcast_unittest.cc index 6b6a96c473..7ff413d497 100644 --- a/pc/peer_connection_simulcast_unittest.cc +++ b/pc/peer_connection_simulcast_unittest.cc @@ -9,22 +9,16 @@ */ #include +#include #include -#include #include -#include // no-presubmit-check TODO(webrtc:8982) #include #include #include #include "absl/algorithm/container.h" -#include "absl/strings/match.h" -#include "absl/strings/string_view.h" -#include "api/audio/audio_mixer.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" -#include "api/audio_codecs/opus_audio_decoder_factory.h" -#include "api/audio_codecs/opus_audio_encoder_factory.h" #include "api/create_peerconnection_factory.h" #include "api/jsep.h" #include "api/media_types.h" @@ -35,7 +29,6 @@ #include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" -#include "api/uma_metrics.h" #include "api/video/video_codec_constants.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" @@ -47,25 +40,16 @@ #include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" -#include "media/base/media_constants.h" #include "media/base/rid_description.h" #include "media/base/stream_params.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "pc/channel_interface.h" #include "pc/peer_connection_wrapper.h" -#include "pc/sdp_utils.h" #include "pc/session_description.h" #include "pc/simulcast_description.h" #include "pc/test/fake_audio_capture_module.h" #include "pc/test/mock_peer_connection_observers.h" #include "pc/test/simulcast_layer_util.h" #include "rtc_base/checks.h" -#include "rtc_base/gunit.h" -#include "rtc_base/strings/string_builder.h" #include "rtc_base/thread.h" -#include "rtc_base/unique_id_generator.h" -#include "system_wrappers/include/metrics.h" #include "test/gmock.h" #include "test/gtest.h" @@ -83,39 +67,11 @@ using ::testing::Property; using ::testing::SizeIs; using ::testing::StartsWith; -using cricket::MediaContentDescription; -using cricket::RidDescription; -using cricket::SimulcastDescription; -using cricket::SimulcastLayer; -using cricket::StreamParams; - -namespace cricket { - -std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) - std::ostream& os, // no-presubmit-check TODO(webrtc:8982) - const SimulcastLayer& layer) { - if (layer.is_paused) { - os << "~"; - } - return os << layer.rid; -} - -} // namespace cricket - -namespace { - -#if RTC_METRICS_ENABLED -std::vector CreateLayers(int num_layers, bool active) { - rtc::UniqueStringGenerator rid_generator; - std::vector rids; - for (int i = 0; i < num_layers; ++i) { - rids.push_back(rid_generator.GenerateString()); - } - return webrtc::CreateLayers(rids, active); -} -#endif - -} // namespace +using webrtc::MediaContentDescription; +using ::webrtc::RidDescription; +using ::webrtc::SimulcastDescription; +using ::webrtc::SimulcastLayer; +using ::webrtc::StreamParams; namespace webrtc { @@ -123,9 +79,9 @@ class PeerConnectionSimulcastTests : public ::testing::Test { public: PeerConnectionSimulcastTests() : pc_factory_(CreatePeerConnectionFactory( - rtc::Thread::Current(), - rtc::Thread::Current(), - rtc::Thread::Current(), + Thread::Current(), + Thread::Current(), + Thread::Current(), FakeAudioCaptureModule::Create(), CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory(), @@ -142,7 +98,7 @@ class PeerConnectionSimulcastTests : public ::testing::Test { nullptr, nullptr)) {} - rtc::scoped_refptr CreatePeerConnection( + scoped_refptr CreatePeerConnection( MockPeerConnectionObserver* observer) { PeerConnectionInterface::RTCConfiguration config; config.sdp_semantics = SdpSemantics::kUnifiedPlan; @@ -179,10 +135,10 @@ class PeerConnectionSimulcastTests : public ::testing::Test { EXPECT_TRUE(local->SetRemoteDescription(std::move(answer), &err)) << err; } - rtc::scoped_refptr AddTransceiver( + scoped_refptr AddTransceiver( PeerConnectionWrapper* pc, const std::vector& layers, - cricket::MediaType media_type = cricket::MEDIA_TYPE_VIDEO) { + webrtc::MediaType media_type = webrtc::MediaType::VIDEO) { auto init = CreateTransceiverInit(layers); return pc->AddTransceiver(media_type, init); } @@ -199,7 +155,7 @@ class PeerConnectionSimulcastTests : public ::testing::Test { } void ValidateTransceiverParameters( - rtc::scoped_refptr transceiver, + scoped_refptr transceiver, const std::vector& layers) { auto parameters = transceiver->sender()->GetParameters(); std::vector result_layers; @@ -211,19 +167,9 @@ class PeerConnectionSimulcastTests : public ::testing::Test { } private: - rtc::scoped_refptr pc_factory_; + scoped_refptr pc_factory_; }; -#if RTC_METRICS_ENABLED -// This class is used to test the metrics emitted for simulcast. -class PeerConnectionSimulcastMetricsTests - : public PeerConnectionSimulcastTests, - public ::testing::WithParamInterface { - protected: - PeerConnectionSimulcastMetricsTests() { webrtc::metrics::Reset(); } -}; -#endif - // Validates that RIDs are supported arguments when adding a transceiver. TEST_F(PeerConnectionSimulcastTests, CanCreateTransceiverWithRid) { auto pc = CreatePeerConnectionWrapper(); @@ -250,7 +196,7 @@ TEST_F(PeerConnectionSimulcastTests, RidsAreAutogeneratedIfNotProvided) { for (RtpEncodingParameters& parameters : init.send_encodings) { parameters.rid = ""; } - auto transceiver = pc->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init); + auto transceiver = pc->AddTransceiver(webrtc::MediaType::VIDEO, init); auto parameters = transceiver->sender()->GetParameters(); ASSERT_EQ(3u, parameters.encodings.size()); EXPECT_THAT(parameters.encodings, @@ -266,7 +212,7 @@ TEST_F(PeerConnectionSimulcastTests, MustSupplyAllOrNoRidsInSimulcast) { auto layers = CreateLayers({"f", "h", "remove"}, true); auto init = CreateTransceiverInit(layers); init.send_encodings[2].rid = ""; - auto error = pc->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init); + auto error = pc->AddTransceiver(webrtc::MediaType::VIDEO, init); EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, error.error().type()); } @@ -276,7 +222,7 @@ TEST_F(PeerConnectionSimulcastTests, ChecksForIllegalRidValues) { auto pc = pc_wrapper->pc(); auto layers = CreateLayers({"f", "h", "~q"}, true); auto init = CreateTransceiverInit(layers); - auto error = pc->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init); + auto error = pc->AddTransceiver(webrtc::MediaType::VIDEO, init); EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, error.error().type()); } @@ -550,7 +496,7 @@ TEST_F(PeerConnectionSimulcastTests, NegotiationDoesNotHaveRidExtensionFails) { for (const SimulcastLayer& layer : layers) { receive_layers.AddLayer(layer); } - cricket::RtpHeaderExtensions extensions; + RtpHeaderExtensions extensions; for (auto extension : mcd_answer->rtp_header_extensions()) { if (extension.uri != RtpExtension::kRidUri) { extensions.push_back(extension); @@ -569,7 +515,7 @@ TEST_F(PeerConnectionSimulcastTests, SimulcastAudioRejected) { auto remote = CreatePeerConnectionWrapper(); auto layers = CreateLayers({"1", "2", "3", "4"}, true); auto transceiver = - AddTransceiver(local.get(), layers, cricket::MEDIA_TYPE_AUDIO); + AddTransceiver(local.get(), layers, webrtc::MediaType::AUDIO); // Should only have the first layer. auto parameters = transceiver->sender()->GetParameters(); EXPECT_EQ(1u, parameters.encodings.size()); @@ -603,27 +549,4 @@ TEST_F(PeerConnectionSimulcastTests, SimulcastSldModificationRejected) { EXPECT_TRUE(modified_offer); EXPECT_TRUE(local->SetLocalDescription(std::move(modified_offer))); } - -#if RTC_METRICS_ENABLED - -const int kMaxLayersInMetricsTest = 8; - -// Checks that the number of send encodings is logged in a metric. -TEST_P(PeerConnectionSimulcastMetricsTests, NumberOfSendEncodingsIsLogged) { - auto local = CreatePeerConnectionWrapper(); - auto num_layers = GetParam(); - auto layers = ::CreateLayers(num_layers, true); - AddTransceiver(local.get(), layers); - EXPECT_EQ(1, metrics::NumSamples( - "WebRTC.PeerConnection.Simulcast.NumberOfSendEncodings")); - EXPECT_EQ(1, metrics::NumEvents( - "WebRTC.PeerConnection.Simulcast.NumberOfSendEncodings", - num_layers)); -} - -INSTANTIATE_TEST_SUITE_P(NumberOfSendEncodings, - PeerConnectionSimulcastMetricsTests, - ::testing::Range(0, kMaxLayersInMetricsTest)); -#endif - } // namespace webrtc diff --git a/pc/peer_connection_stability_integrationtest.cc b/pc/peer_connection_stability_integrationtest.cc new file mode 100644 index 0000000000..61c6ba0ac4 --- /dev/null +++ b/pc/peer_connection_stability_integrationtest.cc @@ -0,0 +1,955 @@ +/* + * Copyright 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Integration tests that verify that certain properties remain the same +// over time. +// It is expected that these tests will have to be changed frequently. +// The error messages when the tests fail are intended to contain C++ code +// that can be pasted into the test when updating it. + +#include + +#include +#include +#include +#include + +#include "absl/strings/str_cat.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/jsep.h" +#include "api/peer_connection_interface.h" +#include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" +#include "api/video_codecs/builtin_video_decoder_factory.h" +#include "api/video_codecs/builtin_video_encoder_factory.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "pc/session_description.h" +#include "pc/test/integration_test_helpers.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/wait_until.h" + +namespace webrtc { + +namespace { + +using ::testing::ElementsAreArray; +using ::testing::Eq; +using ::testing::Not; + +class FactorySignature { + public: + // Constant naming: kWebRtc* is for configurations that occur + // on bots in the WebRTC repository. Other configurations are added + // based on downstream products that need listing. + enum class Id { + kNotRecognized, + kWebRtcTipOfTree, + kWebRtcMoreConfigs1, + kWebRtcAndroid, + kGoogleInternal, + }; + Id id() { return id_; } + FactorySignature() { + ExtractSignatureStrings(); + id_ = RecognizeSignature(); + } + + private: + // Extract a set of strings characterizing the factory in use. + void ExtractSignatureStrings() { + scoped_refptr audio_decoders = + CreateBuiltinAudioDecoderFactory(); + for (const auto& codec : audio_decoders->GetSupportedDecoders()) { + StringBuilder sb; + sb << "Decode audio/"; + sb << codec.format.name << "/" << codec.format.clockrate_hz << "/" + << codec.format.num_channels; + for (const auto& param : codec.format.parameters) { + sb << ";" << param.first << ":" << param.second; + } + signature_.push_back(sb.Release()); + } + scoped_refptr audio_encoders = + CreateBuiltinAudioEncoderFactory(); + for (const auto& codec : audio_encoders->GetSupportedEncoders()) { + StringBuilder sb; + sb << "Encode audio/"; + sb << codec.format.name << "/" << codec.format.clockrate_hz << "/" + << codec.format.num_channels; + for (const auto& param : codec.format.parameters) { + sb << ";" << param.first << ":" << param.second; + } + signature_.push_back(sb.Release()); + } + std::unique_ptr video_decoders = + CreateBuiltinVideoDecoderFactory(); + for (const SdpVideoFormat& format : video_decoders->GetSupportedFormats()) { + StringBuilder sb; + sb << "Decode video/"; + sb << format.name; + for (const auto& kv : format.parameters) { + sb << ";" << kv.first << ":" << kv.second; + } + signature_.push_back(sb.Release()); + } + std::unique_ptr video_encoders = + CreateBuiltinVideoEncoderFactory(); + for (const auto& format : video_encoders->GetSupportedFormats()) { + StringBuilder sb; + sb << "Encode video/"; + // We don't use format.ToString because that includes scalability modes, + // which aren't supposed to influence SDP. + sb << format.name; + for (const auto& kv : format.parameters) { + sb << ";" << kv.first << ":" << kv.second; + } + signature_.push_back(sb.Release()); + } + } + Id RecognizeSignature() { + std::vector webrtc_tip_of_tree = { + "Decode audio/opus/48000/2;minptime:10;useinbandfec:1", + "Decode audio/G722/8000/1", + "Decode audio/PCMU/8000/1", + "Decode audio/PCMA/8000/1", + "Encode audio/opus/48000/2;minptime:10;useinbandfec:1", + "Encode audio/G722/8000/1", + "Encode audio/PCMU/8000/1", + "Encode audio/PCMA/8000/1", + "Decode video/VP8", + "Decode video/VP9;profile-id:0", + "Decode video/VP9;profile-id:2", + "Decode video/VP9;profile-id:1", + "Decode video/VP9;profile-id:3", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:1;profile-level-id:" + "42001f", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:0;profile-level-id:" + "42001f", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:1;profile-level-id:" + "42e01f", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:0;profile-level-id:" + "42e01f", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:1;profile-level-id:" + "4d001f", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:0;profile-level-id:" + "4d001f", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:1;profile-level-id:" + "f4001f", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:0;profile-level-id:" + "f4001f", + "Decode video/AV1;level-idx:5;profile:0;tier:0", + "Decode video/AV1;level-idx:5;profile:1;tier:0", + "Encode video/VP8", + "Encode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:1;profile-level-id:" + "42001f", + "Encode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:0;profile-level-id:" + "42001f", + "Encode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:1;profile-level-id:" + "42e01f", + "Encode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:0;profile-level-id:" + "42e01f", + "Encode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:1;profile-level-id:" + "4d001f", + "Encode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:0;profile-level-id:" + "4d001f", + "Encode video/AV1;level-idx:5;profile:0;tier:0", + "Encode video/VP9;profile-id:0", + "Encode video/VP9;profile-id:2", + }; + if (signature_ == webrtc_tip_of_tree) { + return Id::kWebRtcTipOfTree; + } + std::vector linux_more_configs_1 = { + "Decode audio/opus/48000/2;minptime:10;useinbandfec:1", + "Decode audio/G722/8000/1", + "Decode audio/PCMU/8000/1", + "Decode audio/PCMA/8000/1", + "Encode audio/opus/48000/2;minptime:10;useinbandfec:1", + "Encode audio/G722/8000/1", + "Encode audio/PCMU/8000/1", + "Encode audio/PCMA/8000/1", + "Decode video/VP8", + "Decode video/VP9;profile-id:0", + "Decode video/VP9;profile-id:2", + "Decode video/VP9;profile-id:1", + "Decode video/VP9;profile-id:3", + "Decode video/AV1;level-idx:5;profile:0;tier:0", + "Decode video/AV1;level-idx:5;profile:1;tier:0", + "Encode video/VP8", + "Encode video/AV1;level-idx:5;profile:0;tier:0", + "Encode video/VP9;profile-id:0", + "Encode video/VP9;profile-id:2", + }; + if (signature_ == linux_more_configs_1) { + return Id::kWebRtcMoreConfigs1; + } + std::vector android = { + "Decode audio/opus/48000/2;minptime:10;useinbandfec:1", + "Decode audio/G722/8000/1", + "Decode audio/PCMU/8000/1", + "Decode audio/PCMA/8000/1", + "Encode audio/opus/48000/2;minptime:10;useinbandfec:1", + "Encode audio/G722/8000/1", + "Encode audio/PCMU/8000/1", + "Encode audio/PCMA/8000/1", + "Decode video/VP8", + "Decode video/VP9;profile-id:0", + "Decode video/VP9;profile-id:1", + "Decode video/VP9;profile-id:3", + "Decode video/AV1;level-idx:5;profile:0;tier:0", + "Decode video/AV1;level-idx:5;profile:1;tier:0", + "Encode video/VP8", + "Encode video/AV1;level-idx:5;profile:0;tier:0", + "Encode video/VP9;profile-id:0", + }; + if (signature_ == android) { + return Id::kWebRtcAndroid; + } + std::vector google_internal = { + "Decode audio/opus/48000/2;minptime:10;useinbandfec:1", + "Decode audio/G722/8000/1", + "Decode audio/PCMU/8000/1", + "Decode audio/PCMA/8000/1", + "Encode audio/opus/48000/2;minptime:10;useinbandfec:1", + "Encode audio/G722/8000/1", + "Encode audio/PCMU/8000/1", + "Encode audio/PCMA/8000/1", + "Decode video/VP8", + "Decode video/VP9;profile-id:0", + "Decode video/VP9;profile-id:1", + "Decode video/VP9;profile-id:3", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:1;profile-level-id:" + "42001f", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:0;profile-level-id:" + "42001f", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:1;profile-level-id:" + "42e01f", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:0;profile-level-id:" + "42e01f", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:1;profile-level-id:" + "4d001f", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:0;profile-level-id:" + "4d001f", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:1;profile-level-id:" + "f4001f", + "Decode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:0;profile-level-id:" + "f4001f", + "Encode video/VP8", + "Encode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:1;profile-level-id:" + "42001f", + "Encode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:0;profile-level-id:" + "42001f", + "Encode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:1;profile-level-id:" + "42e01f", + "Encode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:0;profile-level-id:" + "42e01f", + "Encode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:1;profile-level-id:" + "4d001f", + "Encode " + "video/" + "H264;level-asymmetry-allowed:1;packetization-mode:0;profile-level-id:" + "4d001f", + "Encode video/VP9;profile-id:0", + }; + if (signature_ == google_internal) { + return Id::kGoogleInternal; + } + // If unrecognized, produce a debug printout. + StringBuilder sb; + sb << "{\n"; + for (std::string str : signature_) { + sb << "\"" << str << "\",\n"; + } + sb << "}\n"; + RTC_LOG(LS_ERROR) << "New factory signature: " << sb.str(); + return Id::kNotRecognized; + } + + std::vector signature_; + Id id_; +}; + +class ResultingCodecList { + public: + FactorySignature::Id factory_id; + std::vector caller_local; + std::vector caller_remote; + std::vector callee_local; + std::vector callee_remote; +}; + +class PeerConnectionIntegrationTest : public PeerConnectionIntegrationBaseTest { + protected: + PeerConnectionIntegrationTest() + : PeerConnectionIntegrationBaseTest(SdpSemantics::kUnifiedPlan) {} + + std::vector CodecList( + const SessionDescriptionInterface& desc_interface) { + std::vector results; + int media_section_counter = 0; + const SessionDescription* desc = desc_interface.description(); + for (auto& content : desc->contents()) { + ++media_section_counter; + const auto* media_description = content.media_description(); + const auto& codecs = media_description->codecs(); + for (const auto& codec : codecs) { + StringBuilder str; + str << media_section_counter << " " << absl::StrCat(codec); + results.push_back(str.Release()); + } + } + return results; + } + + // This function returns a string with a C++ initializer for a + // ResultingCodecList object. The intended use is to paste the string from the + // log into the source code when updating the test. + std::string DumpAsResultingCodecList(FactorySignature::Id id, + std::vector caller_local, + std::vector caller_remote, + std::vector callee_local, + std::vector callee_remote) { + StringBuilder sb; + // TODO: issues.webrtc.org/397895867 - change kChangeThis to the name of + // the value. Requires adding an AbslStringifier to the enum. + sb << "\n{" << ".factory_id = FactorySignature::Id::kChangeThis" + << static_cast(id) << ",\n" + << ".caller_local = {"; + for (const std::string& str : caller_local) { + sb << "\"" << str << "\",\n"; + } + sb << "},\n .caller_remote = {"; + for (const std::string& str : caller_remote) { + sb << "\"" << str << "\",\n"; + } + sb << "},\n .callee_local = {"; + for (const std::string& str : callee_local) { + sb << "\"" << str << "\",\n"; + } + sb << "},\n .callee_remote = {"; + for (const std::string& str : callee_remote) { + sb << "\"" << str << "\",\n"; + } + sb << "}}\n"; + return sb.Release(); + } +}; + +TEST_F(PeerConnectionIntegrationTest, BasicOfferAnswerPayloadTypesStable) { + FactorySignature factory_signature; + ASSERT_THAT(factory_signature.id(), + Not(Eq(FactorySignature::Id::kNotRecognized))); + ASSERT_TRUE(CreatePeerConnectionWrappers()); + ConnectFakeSignalingForSdpOnly(); + caller()->AddAudioVideoTracks(); + callee()->AddAudioVideoTracks(); + // Start offer/answer exchange and wait for it to complete. + caller()->CreateAndSetAndSignalOffer(); + + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); + + // Extract PT and codec from all media sections, and check that they + // are stable (what was expected). + // Maintenance: In order to get a new golden set of strings, make the list + // empty and run. Gmock will output a valid C++ array initializer for you. + + std::vector golden_answers = { + {.factory_id = FactorySignature::Id::kWebRtcTipOfTree, + .caller_local = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", + "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", + "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", + "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", + "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 " + "[103:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42001f]", + "2 [104:video/rtx/90000/0;apt=103]", + "2 " + "[107:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42001f]", + "2 [108:video/rtx/90000/0;apt=107]", + "2 " + "[109:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42e01f]", + "2 [114:video/rtx/90000/0;apt=109]", + "2 " + "[115:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42e01f]", + "2 [116:video/rtx/90000/0;apt=115]", + "2 " + "[117:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "4d001f]", + "2 [118:video/rtx/90000/0;apt=117]", + "2 " + "[39:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "4d001f]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [45:video/AV1/90000/0;level-idx=5;profile=0;tier=0]", + "2 [46:video/rtx/90000/0;apt=45]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", + "2 [100:video/VP9/90000/0;profile-id=2]", + "2 [101:video/rtx/90000/0;apt=100]", + "2 [119:video/red/90000/0]", + "2 [120:video/rtx/90000/0;apt=119]", + "2 [121:video/ulpfec/90000/0]"}, + .caller_remote = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", + "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", + "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", + "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", + "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 " + "[103:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42001f]", + "2 [104:video/rtx/90000/0;apt=103]", + "2 " + "[107:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42001f]", + "2 [108:video/rtx/90000/0;apt=107]", + "2 " + "[109:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42e01f]", + "2 [114:video/rtx/90000/0;apt=109]", + "2 " + "[115:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42e01f]", + "2 [116:video/rtx/90000/0;apt=115]", + "2 " + "[117:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "4d001f]", + "2 [118:video/rtx/90000/0;apt=117]", + "2 " + "[39:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "4d001f]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [45:video/AV1/90000/0;level-idx=5;profile=0;tier=0]", + "2 [46:video/rtx/90000/0;apt=45]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", + "2 [100:video/VP9/90000/0;profile-id=2]", + "2 [101:video/rtx/90000/0;apt=100]", + "2 [119:video/red/90000/0]", + "2 [120:video/rtx/90000/0;apt=119]", + "2 [121:video/ulpfec/90000/0]"}, + .callee_local = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", + "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", + "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", + "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", + "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 " + "[103:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42001f]", + "2 [104:video/rtx/90000/0;apt=103]", + "2 " + "[107:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42001f]", + "2 [108:video/rtx/90000/0;apt=107]", + "2 " + "[109:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42e01f]", + "2 [114:video/rtx/90000/0;apt=109]", + "2 " + "[115:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42e01f]", + "2 [116:video/rtx/90000/0;apt=115]", + "2 " + "[117:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "4d001f]", + "2 [118:video/rtx/90000/0;apt=117]", + "2 " + "[39:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "4d001f]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [45:video/AV1/90000/0;level-idx=5;profile=0;tier=0]", + "2 [46:video/rtx/90000/0;apt=45]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", + "2 [100:video/VP9/90000/0;profile-id=2]", + "2 [101:video/rtx/90000/0;apt=100]", + "2 [119:video/red/90000/0]", + "2 [120:video/rtx/90000/0;apt=119]", + "2 [121:video/ulpfec/90000/0]"}, + .callee_remote = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", + "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", + "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", + "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", + "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 " + "[103:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42001f]", + "2 [104:video/rtx/90000/0;apt=103]", + "2 " + "[107:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42001f]", + "2 [108:video/rtx/90000/0;apt=107]", + "2 " + "[109:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42e01f]", + "2 [114:video/rtx/90000/0;apt=109]", + "2 " + "[115:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42e01f]", + "2 [116:video/rtx/90000/0;apt=115]", + "2 " + "[117:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "4d001f]", + "2 [118:video/rtx/90000/0;apt=117]", + "2 " + "[39:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "4d001f]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [45:video/AV1/90000/0;level-idx=5;profile=0;tier=0]", + "2 [46:video/rtx/90000/0;apt=45]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", + "2 [100:video/VP9/90000/0;profile-id=2]", + "2 [101:video/rtx/90000/0;apt=100]", + "2 [119:video/red/90000/0]", + "2 [120:video/rtx/90000/0;apt=119]", + "2 [121:video/ulpfec/90000/0]"}}, + + {.factory_id = FactorySignature::Id::kWebRtcMoreConfigs1, + .caller_local = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 [39:video/AV1/90000/0;level-idx=5;profile=0;tier=0]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", + "2 [100:video/VP9/90000/0;profile-id=2]", + "2 [101:video/rtx/90000/0;apt=100]", "2 [103:video/red/90000/0]", + "2 [104:video/rtx/90000/0;apt=103]", + "2 [107:video/ulpfec/90000/0]"}, + .caller_remote = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 [39:video/AV1/90000/0;level-idx=5;profile=0;tier=0]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", + "2 [100:video/VP9/90000/0;profile-id=2]", + "2 [101:video/rtx/90000/0;apt=100]", "2 [103:video/red/90000/0]", + "2 [104:video/rtx/90000/0;apt=103]", + "2 [107:video/ulpfec/90000/0]"}, + .callee_local = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 [39:video/AV1/90000/0;level-idx=5;profile=0;tier=0]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", + "2 [100:video/VP9/90000/0;profile-id=2]", + "2 [101:video/rtx/90000/0;apt=100]", "2 [103:video/red/90000/0]", + "2 [104:video/rtx/90000/0;apt=103]", + "2 [107:video/ulpfec/90000/0]"}, + .callee_remote = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 [39:video/AV1/90000/0;level-idx=5;profile=0;tier=0]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", + "2 [100:video/VP9/90000/0;profile-id=2]", + "2 [101:video/rtx/90000/0;apt=100]", "2 [103:video/red/90000/0]", + "2 [104:video/rtx/90000/0;apt=103]", + "2 [107:video/ulpfec/90000/0]"}}, + {.factory_id = FactorySignature::Id::kWebRtcAndroid, + .caller_local = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 [39:video/AV1/90000/0;level-idx=5;profile=0;tier=0]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", "2 [100:video/red/90000/0]", + "2 [101:video/rtx/90000/0;apt=100]", + "2 [103:video/ulpfec/90000/0]"}, + .caller_remote = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 [39:video/AV1/90000/0;level-idx=5;profile=0;tier=0]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", "2 [100:video/red/90000/0]", + "2 [101:video/rtx/90000/0;apt=100]", + "2 [103:video/ulpfec/90000/0]"}, + .callee_local = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 [39:video/AV1/90000/0;level-idx=5;profile=0;tier=0]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", "2 [100:video/red/90000/0]", + "2 [101:video/rtx/90000/0;apt=100]", + "2 [103:video/ulpfec/90000/0]"}, + .callee_remote = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 [39:video/AV1/90000/0;level-idx=5;profile=0;tier=0]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", "2 [100:video/red/90000/0]", + "2 [101:video/rtx/90000/0;apt=100]", + "2 [103:video/ulpfec/90000/0]"}}, + {.factory_id = FactorySignature::Id::kGoogleInternal, + .caller_local = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", + "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", + "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", + "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", + "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 " + "[100:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-" + "id=42001f]", + "2 [101:video/rtx/90000/0;apt=100]", + "2 " + "[103:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-" + "id=42001f]", + "2 [104:video/rtx/90000/0;apt=103]", + "2 " + "[107:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-" + "id=42e01f]", + "2 [108:video/rtx/90000/0;apt=107]", + "2 " + "[109:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-" + "id=42e01f]", + "2 [114:video/rtx/90000/0;apt=109]", + "2 " + "[115:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-" + "id=4d001f]", + "2 [116:video/rtx/90000/0;apt=115]", + "2 " + "[39:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-" + "id=4d001f]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", + "2 [117:video/red/90000/0]", + "2 [118:video/rtx/90000/0;apt=117]", + "2 [119:video/ulpfec/90000/0]"}, + .caller_remote = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", + "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", + "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", + "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", + "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 " + "[100:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-" + "id=42001f]", + "2 [101:video/rtx/90000/0;apt=100]", + "2 " + "[103:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-" + "id=42001f]", + "2 [104:video/rtx/90000/0;apt=103]", + "2 " + "[107:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-" + "id=42e01f]", + "2 [108:video/rtx/90000/0;apt=107]", + "2 " + "[109:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-" + "id=42e01f]", + "2 [114:video/rtx/90000/0;apt=109]", + "2 " + "[115:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-" + "id=4d001f]", + "2 [116:video/rtx/90000/0;apt=115]", + "2 " + "[39:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-" + "id=4d001f]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", + "2 [117:video/red/90000/0]", + "2 [118:video/rtx/90000/0;apt=117]", + "2 [119:video/ulpfec/90000/0]"}, + .callee_local = + {"1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", + "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", + "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", + "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", + "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 " + "[100:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-" + "id=42001f]", + "2 [101:video/rtx/90000/0;apt=100]", + "2 " + "[103:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-" + "id=42001f]", + "2 [104:video/rtx/90000/0;apt=103]", + "2 " + "[107:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-" + "id=42e01f]", + "2 [108:video/rtx/90000/0;apt=107]", + "2 " + "[109:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-" + "id=42e01f]", + "2 [114:video/rtx/90000/0;apt=109]", + "2 " + "[115:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-" + "id=4d001f]", + "2 [116:video/rtx/90000/0;apt=115]", + "2 " + "[39:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-" + "id=4d001f]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", + "2 [117:video/red/90000/0]", + "2 [118:video/rtx/90000/0;apt=117]", + "2 [119:video/ulpfec/90000/0]"}, + .callee_remote = { + "1 [111:audio/opus/48000/2;minptime=10;useinbandfec=1]", + "1 [63:audio/red/48000/2;=111/111]", + "1 [9:audio/G722/8000/1]", + "1 [0:audio/PCMU/8000/1]", + "1 [8:audio/PCMA/8000/1]", + "1 [13:audio/CN/8000/1]", + "1 [110:audio/telephone-event/48000/1]", + "1 [126:audio/telephone-event/8000/1]", + "2 [96:video/VP8/90000/0]", + "2 [97:video/rtx/90000/0;apt=96]", + "2 " + "[100:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42001f]", + "2 [101:video/rtx/90000/0;apt=100]", + "2 " + "[103:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42001f]", + "2 [104:video/rtx/90000/0;apt=103]", + "2 " + "[107:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "42e01f]", + "2 [108:video/rtx/90000/0;apt=107]", + "2 " + "[109:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "42e01f]", + "2 [114:video/rtx/90000/0;apt=109]", + "2 " + "[115:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=" + "4d001f]", + "2 [116:video/rtx/90000/0;apt=115]", + "2 " + "[39:video/H264/90000/" + "0;level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=" + "4d001f]", + "2 [40:video/rtx/90000/0;apt=39]", + "2 [98:video/VP9/90000/0;profile-id=0]", + "2 [99:video/rtx/90000/0;apt=98]", + "2 [117:video/red/90000/0]", + "2 [118:video/rtx/90000/0;apt=117]", + "2 [119:video/ulpfec/90000/0]"}}}; + auto this_golden_it = + std::find_if(golden_answers.begin(), golden_answers.end(), + [&](const ResultingCodecList& candidate) { + return candidate.factory_id == factory_signature.id(); + }); + ASSERT_THAT(this_golden_it, Not(Eq(golden_answers.end()))) + << "Add this result set to golden_answers:\n" + << DumpAsResultingCodecList( + factory_signature.id(), + CodecList(*caller()->pc()->local_description()), + CodecList(*caller()->pc()->remote_description()), + CodecList(*callee()->pc()->local_description()), + CodecList(*callee()->pc()->remote_description())); + + const ResultingCodecList& this_golden = *this_golden_it; + EXPECT_THAT(CodecList(*caller()->pc()->local_description()), + ElementsAreArray(this_golden.caller_local)); + EXPECT_THAT(CodecList(*caller()->pc()->remote_description()), + ElementsAreArray(this_golden.caller_remote)); + EXPECT_THAT(CodecList(*callee()->pc()->local_description()), + ElementsAreArray(this_golden.callee_local)); + EXPECT_THAT(CodecList(*callee()->pc()->remote_description()), + ElementsAreArray(this_golden.callee_remote)); +} + +} // namespace +} // namespace webrtc diff --git a/pc/peer_connection_svc_integrationtest.cc b/pc/peer_connection_svc_integrationtest.cc index 672f3eef99..de3b035431 100644 --- a/pc/peer_connection_svc_integrationtest.cc +++ b/pc/peer_connection_svc_integrationtest.cc @@ -13,18 +13,23 @@ #include -#include +#include #include #include "absl/strings/match.h" +#include "absl/strings/string_view.h" +#include "api/media_types.h" +#include "api/peer_connection_interface.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" +#include "media/base/media_constants.h" #include "pc/test/integration_test_helpers.h" -#include "rtc_base/gunit.h" -#include "rtc_base/helpers.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" namespace webrtc { @@ -37,14 +42,13 @@ class PeerConnectionSVCIntegrationTest : PeerConnectionIntegrationBaseTest(SdpSemantics::kUnifiedPlan) {} RTCError SetCodecPreferences( - rtc::scoped_refptr transceiver, + scoped_refptr transceiver, absl::string_view codec_name) { - webrtc::RtpCapabilities capabilities = - caller()->pc_factory()->GetRtpSenderCapabilities( - cricket::MEDIA_TYPE_VIDEO); + RtpCapabilities capabilities = + caller()->pc_factory()->GetRtpReceiverCapabilities( + webrtc::MediaType::VIDEO); std::vector codecs; - for (const webrtc::RtpCodecCapability& codec_capability : - capabilities.codecs) { + for (const RtpCodecCapability& codec_capability : capabilities.codecs) { if (codec_capability.name == codec_name) codecs.push_back(codec_capability); } @@ -55,8 +59,8 @@ class PeerConnectionSVCIntegrationTest TEST_F(PeerConnectionSVCIntegrationTest, AddTransceiverAcceptsL1T1) { ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); - webrtc::RtpTransceiverInit init; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + RtpEncodingParameters encoding_parameters; encoding_parameters.scalability_mode = "L1T1"; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = @@ -67,8 +71,8 @@ TEST_F(PeerConnectionSVCIntegrationTest, AddTransceiverAcceptsL1T1) { TEST_F(PeerConnectionSVCIntegrationTest, AddTransceiverAcceptsL3T3) { ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); - webrtc::RtpTransceiverInit init; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + RtpEncodingParameters encoding_parameters; encoding_parameters.scalability_mode = "L3T3"; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = @@ -80,33 +84,32 @@ TEST_F(PeerConnectionSVCIntegrationTest, AddTransceiverRejectsUnknownScalabilityMode) { ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); - webrtc::RtpTransceiverInit init; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + RtpEncodingParameters encoding_parameters; encoding_parameters.scalability_mode = "FOOBAR"; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); EXPECT_FALSE(transceiver_or_error.ok()); EXPECT_EQ(transceiver_or_error.error().type(), - webrtc::RTCErrorType::UNSUPPORTED_OPERATION); + RTCErrorType::UNSUPPORTED_OPERATION); } TEST_F(PeerConnectionSVCIntegrationTest, SetParametersAcceptsL1T3WithVP8) { ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); - webrtc::RtpCapabilities capabilities = - caller()->pc_factory()->GetRtpSenderCapabilities( - cricket::MEDIA_TYPE_VIDEO); + RtpCapabilities capabilities = + caller()->pc_factory()->GetRtpReceiverCapabilities( + webrtc::MediaType::VIDEO); std::vector vp8_codec; - for (const webrtc::RtpCodecCapability& codec_capability : - capabilities.codecs) { - if (codec_capability.name == cricket::kVp8CodecName) + for (const RtpCodecCapability& codec_capability : capabilities.codecs) { + if (codec_capability.name == kVp8CodecName) vp8_codec.push_back(codec_capability); } - webrtc::RtpTransceiverInit init; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + RtpEncodingParameters encoding_parameters; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); @@ -114,52 +117,33 @@ TEST_F(PeerConnectionSVCIntegrationTest, SetParametersAcceptsL1T3WithVP8) { auto transceiver = transceiver_or_error.MoveValue(); EXPECT_TRUE(transceiver->SetCodecPreferences(vp8_codec).ok()); - webrtc::RtpParameters parameters = transceiver->sender()->GetParameters(); + RtpParameters parameters = transceiver->sender()->GetParameters(); ASSERT_EQ(parameters.encodings.size(), 1u); parameters.encodings[0].scalability_mode = "L1T3"; auto result = transceiver->sender()->SetParameters(parameters); EXPECT_TRUE(result.ok()); } -TEST_F(PeerConnectionSVCIntegrationTest, SetParametersRejectsL3T3WithVP8) { - ASSERT_TRUE(CreatePeerConnectionWrappers()); - ConnectFakeSignaling(); - - webrtc::RtpTransceiverInit init; - webrtc::RtpEncodingParameters encoding_parameters; - init.send_encodings.push_back(encoding_parameters); - auto transceiver_or_error = - caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); - ASSERT_TRUE(transceiver_or_error.ok()); - auto transceiver = transceiver_or_error.MoveValue(); - EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp8CodecName).ok()); - - webrtc::RtpParameters parameters = transceiver->sender()->GetParameters(); - ASSERT_EQ(parameters.encodings.size(), 1u); - parameters.encodings[0].scalability_mode = "L3T3"; - auto result = transceiver->sender()->SetParameters(parameters); - EXPECT_FALSE(result.ok()); - EXPECT_EQ(result.type(), webrtc::RTCErrorType::INVALID_MODIFICATION); -} - TEST_F(PeerConnectionSVCIntegrationTest, SetParametersAcceptsL1T3WithVP8AfterNegotiation) { ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); - webrtc::RtpTransceiverInit init; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + RtpEncodingParameters encoding_parameters; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); ASSERT_TRUE(transceiver_or_error.ok()); auto transceiver = transceiver_or_error.MoveValue(); - EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp8CodecName).ok()); + EXPECT_TRUE(SetCodecPreferences(transceiver, kVp8CodecName).ok()); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); - webrtc::RtpParameters parameters = transceiver->sender()->GetParameters(); + RtpParameters parameters = transceiver->sender()->GetParameters(); ASSERT_EQ(parameters.encodings.size(), 1u); parameters.encodings[0].scalability_mode = "L1T3"; auto result = transceiver->sender()->SetParameters(parameters); @@ -171,19 +155,21 @@ TEST_F(PeerConnectionSVCIntegrationTest, ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); - webrtc::RtpTransceiverInit init; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + RtpEncodingParameters encoding_parameters; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); ASSERT_TRUE(transceiver_or_error.ok()); auto transceiver = transceiver_or_error.MoveValue(); - EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp9CodecName).ok()); + EXPECT_TRUE(SetCodecPreferences(transceiver, kVp9CodecName).ok()); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); - webrtc::RtpParameters parameters = transceiver->sender()->GetParameters(); + RtpParameters parameters = transceiver->sender()->GetParameters(); ASSERT_EQ(parameters.encodings.size(), 1u); parameters.encodings[0].scalability_mode = "L3T3"; auto result = transceiver->sender()->SetParameters(parameters); @@ -195,24 +181,26 @@ TEST_F(PeerConnectionSVCIntegrationTest, ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); - webrtc::RtpTransceiverInit init; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + RtpEncodingParameters encoding_parameters; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); ASSERT_TRUE(transceiver_or_error.ok()); auto transceiver = transceiver_or_error.MoveValue(); - EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp8CodecName).ok()); + EXPECT_TRUE(SetCodecPreferences(transceiver, kVp8CodecName).ok()); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); - webrtc::RtpParameters parameters = transceiver->sender()->GetParameters(); + RtpParameters parameters = transceiver->sender()->GetParameters(); ASSERT_EQ(parameters.encodings.size(), 1u); parameters.encodings[0].scalability_mode = "L3T3"; auto result = transceiver->sender()->SetParameters(parameters); EXPECT_FALSE(result.ok()); - EXPECT_EQ(result.type(), webrtc::RTCErrorType::INVALID_MODIFICATION); + EXPECT_EQ(result.type(), RTCErrorType::INVALID_MODIFICATION); } TEST_F(PeerConnectionSVCIntegrationTest, @@ -220,62 +208,64 @@ TEST_F(PeerConnectionSVCIntegrationTest, ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); - webrtc::RtpTransceiverInit init; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + RtpEncodingParameters encoding_parameters; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); ASSERT_TRUE(transceiver_or_error.ok()); auto transceiver = transceiver_or_error.MoveValue(); - EXPECT_TRUE(SetCodecPreferences(transceiver, cricket::kVp9CodecName).ok()); + EXPECT_TRUE(SetCodecPreferences(transceiver, kVp9CodecName).ok()); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); - webrtc::RtpParameters parameters = transceiver->sender()->GetParameters(); + RtpParameters parameters = transceiver->sender()->GetParameters(); ASSERT_EQ(parameters.encodings.size(), 1u); parameters.encodings[0].scalability_mode = "FOOBAR"; auto result = transceiver->sender()->SetParameters(parameters); EXPECT_FALSE(result.ok()); - EXPECT_EQ(result.type(), webrtc::RTCErrorType::INVALID_MODIFICATION); + EXPECT_EQ(result.type(), RTCErrorType::INVALID_MODIFICATION); } TEST_F(PeerConnectionSVCIntegrationTest, FallbackToL1Tx) { ASSERT_TRUE(CreatePeerConnectionWrappers()); ConnectFakeSignaling(); - webrtc::RtpTransceiverInit init; - webrtc::RtpEncodingParameters encoding_parameters; + RtpTransceiverInit init; + RtpEncodingParameters encoding_parameters; init.send_encodings.push_back(encoding_parameters); auto transceiver_or_error = caller()->pc()->AddTransceiver(caller()->CreateLocalVideoTrack(), init); ASSERT_TRUE(transceiver_or_error.ok()); auto caller_transceiver = transceiver_or_error.MoveValue(); - webrtc::RtpCapabilities capabilities = - caller()->pc_factory()->GetRtpSenderCapabilities( - cricket::MEDIA_TYPE_VIDEO); + RtpCapabilities capabilities = + caller()->pc_factory()->GetRtpReceiverCapabilities( + webrtc::MediaType::VIDEO); std::vector send_codecs = capabilities.codecs; // Only keep VP9 in the caller send_codecs.erase(std::partition(send_codecs.begin(), send_codecs.end(), [](const auto& codec) -> bool { - return codec.name == - cricket::kVp9CodecName; + return codec.name == kVp9CodecName; }), send_codecs.end()); ASSERT_FALSE(send_codecs.empty()); caller_transceiver->SetCodecPreferences(send_codecs); // L3T3 should be supported by VP9 - webrtc::RtpParameters parameters = - caller_transceiver->sender()->GetParameters(); + RtpParameters parameters = caller_transceiver->sender()->GetParameters(); ASSERT_EQ(parameters.encodings.size(), 1u); parameters.encodings[0].scalability_mode = "L3T3"; auto result = caller_transceiver->sender()->SetParameters(parameters); EXPECT_TRUE(result.ok()); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); parameters = caller_transceiver->sender()->GetParameters(); ASSERT_TRUE(parameters.encodings[0].scalability_mode.has_value()); @@ -286,8 +276,7 @@ TEST_F(PeerConnectionSVCIntegrationTest, FallbackToL1Tx) { send_codecs = capabilities.codecs; send_codecs.erase(std::partition(send_codecs.begin(), send_codecs.end(), [](const auto& codec) -> bool { - return codec.name == - cricket::kVp8CodecName; + return codec.name == kVp8CodecName; }), send_codecs.end()); ASSERT_FALSE(send_codecs.empty()); @@ -295,7 +284,9 @@ TEST_F(PeerConnectionSVCIntegrationTest, FallbackToL1Tx) { // Renegotiate to force the new codec list to be used caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Fallback should happen and L3T3 is not used anymore parameters = caller_transceiver->sender()->GetParameters(); diff --git a/pc/peer_connection_wrapper.cc b/pc/peer_connection_wrapper.cc index 44f4256b10..4f547c77e7 100644 --- a/pc/peer_connection_wrapper.cc +++ b/pc/peer_connection_wrapper.cc @@ -12,30 +12,45 @@ #include +#include +#include +#include #include #include -#include "absl/types/optional.h" +#include "api/data_channel_interface.h" #include "api/function_view.h" -#include "api/set_remote_description_observer_interface.h" +#include "api/jsep.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/stats/rtc_stats_report.h" +#include "api/test/rtc_error_matchers.h" +#include "pc/peer_connection.h" +#include "pc/peer_connection_proxy.h" #include "pc/sdp_utils.h" #include "pc/test/fake_video_track_source.h" +#include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/checks.h" -#include "rtc_base/gunit.h" #include "rtc_base/logging.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" namespace webrtc { +using ::testing::Eq; using RTCOfferAnswerOptions = PeerConnectionInterface::RTCOfferAnswerOptions; -namespace { -const uint32_t kDefaultTimeout = 10000U; -} - PeerConnectionWrapper::PeerConnectionWrapper( - rtc::scoped_refptr pc_factory, - rtc::scoped_refptr pc, + scoped_refptr pc_factory, + scoped_refptr pc, std::unique_ptr observer) : pc_factory_(std::move(pc_factory)), observer_(std::move(observer)), @@ -63,6 +78,13 @@ MockPeerConnectionObserver* PeerConnectionWrapper::observer() { return observer_.get(); } +PeerConnection* PeerConnectionWrapper::GetInternalPeerConnection() { + auto* pci = + static_cast*>( + pc()); + return static_cast(pci->internal()); +} + std::unique_ptr PeerConnectionWrapper::CreateOffer() { return CreateOffer(RTCOfferAnswerOptions()); @@ -132,11 +154,13 @@ PeerConnectionWrapper::CreateRollback() { } std::unique_ptr PeerConnectionWrapper::CreateSdp( - rtc::FunctionView fn, + FunctionView fn, std::string* error_out) { - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); fn(observer.get()); - EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue()), + IsRtcOk()); if (error_out && !observer->result()) { *error_out = observer->error(); } @@ -153,6 +177,20 @@ bool PeerConnectionWrapper::SetLocalDescription( error_out); } +bool PeerConnectionWrapper::SetLocalDescription( + std::unique_ptr desc, + RTCError* error_out) { + auto observer = make_ref_counted(); + pc()->SetLocalDescription(std::move(desc), observer); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue()), + IsRtcOk()); + bool ok = observer->error().ok(); + if (error_out) + *error_out = std::move(observer->error()); + return ok; +} + bool PeerConnectionWrapper::SetRemoteDescription( std::unique_ptr desc, std::string* error_out) { @@ -166,9 +204,11 @@ bool PeerConnectionWrapper::SetRemoteDescription( bool PeerConnectionWrapper::SetRemoteDescription( std::unique_ptr desc, RTCError* error_out) { - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); pc()->SetRemoteDescription(std::move(desc), observer); - EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue()), + IsRtcOk()); bool ok = observer->error().ok(); if (error_out) *error_out = std::move(observer->error()); @@ -176,11 +216,13 @@ bool PeerConnectionWrapper::SetRemoteDescription( } bool PeerConnectionWrapper::SetSdp( - rtc::FunctionView fn, + FunctionView fn, std::string* error_out) { - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); fn(observer.get()); - EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue()), + IsRtcOk()); if (error_out && !observer->result()) { *error_out = observer->error(); } @@ -234,87 +276,84 @@ bool PeerConnectionWrapper::ExchangeOfferAnswerWith( return set_remote_answer; } -rtc::scoped_refptr -PeerConnectionWrapper::AddTransceiver(cricket::MediaType media_type) { - RTCErrorOr> result = +scoped_refptr PeerConnectionWrapper::AddTransceiver( + webrtc::MediaType media_type) { + RTCErrorOr> result = pc()->AddTransceiver(media_type); EXPECT_EQ(RTCErrorType::NONE, result.error().type()); return result.MoveValue(); } -rtc::scoped_refptr -PeerConnectionWrapper::AddTransceiver(cricket::MediaType media_type, - const RtpTransceiverInit& init) { - RTCErrorOr> result = +scoped_refptr PeerConnectionWrapper::AddTransceiver( + webrtc::MediaType media_type, + const RtpTransceiverInit& init) { + RTCErrorOr> result = pc()->AddTransceiver(media_type, init); EXPECT_EQ(RTCErrorType::NONE, result.error().type()); return result.MoveValue(); } -rtc::scoped_refptr -PeerConnectionWrapper::AddTransceiver( - rtc::scoped_refptr track) { - RTCErrorOr> result = +scoped_refptr PeerConnectionWrapper::AddTransceiver( + scoped_refptr track) { + RTCErrorOr> result = pc()->AddTransceiver(track); EXPECT_EQ(RTCErrorType::NONE, result.error().type()); return result.MoveValue(); } -rtc::scoped_refptr -PeerConnectionWrapper::AddTransceiver( - rtc::scoped_refptr track, +scoped_refptr PeerConnectionWrapper::AddTransceiver( + scoped_refptr track, const RtpTransceiverInit& init) { - RTCErrorOr> result = + RTCErrorOr> result = pc()->AddTransceiver(track, init); EXPECT_EQ(RTCErrorType::NONE, result.error().type()); return result.MoveValue(); } -rtc::scoped_refptr PeerConnectionWrapper::CreateAudioTrack( +scoped_refptr PeerConnectionWrapper::CreateAudioTrack( const std::string& label) { return pc_factory()->CreateAudioTrack(label, nullptr); } -rtc::scoped_refptr PeerConnectionWrapper::CreateVideoTrack( +scoped_refptr PeerConnectionWrapper::CreateVideoTrack( const std::string& label) { return pc_factory()->CreateVideoTrack(FakeVideoTrackSource::Create(), label); } -rtc::scoped_refptr PeerConnectionWrapper::AddTrack( - rtc::scoped_refptr track, +scoped_refptr PeerConnectionWrapper::AddTrack( + scoped_refptr track, const std::vector& stream_ids) { - RTCErrorOr> result = + RTCErrorOr> result = pc()->AddTrack(track, stream_ids); EXPECT_EQ(RTCErrorType::NONE, result.error().type()); return result.MoveValue(); } -rtc::scoped_refptr PeerConnectionWrapper::AddTrack( - rtc::scoped_refptr track, +scoped_refptr PeerConnectionWrapper::AddTrack( + scoped_refptr track, const std::vector& stream_ids, const std::vector& init_send_encodings) { - RTCErrorOr> result = + RTCErrorOr> result = pc()->AddTrack(track, stream_ids, init_send_encodings); EXPECT_EQ(RTCErrorType::NONE, result.error().type()); return result.MoveValue(); } -rtc::scoped_refptr PeerConnectionWrapper::AddAudioTrack( +scoped_refptr PeerConnectionWrapper::AddAudioTrack( const std::string& track_label, const std::vector& stream_ids) { return AddTrack(CreateAudioTrack(track_label), stream_ids); } -rtc::scoped_refptr PeerConnectionWrapper::AddVideoTrack( +scoped_refptr PeerConnectionWrapper::AddVideoTrack( const std::string& track_label, const std::vector& stream_ids) { return AddTrack(CreateVideoTrack(track_label), stream_ids); } -rtc::scoped_refptr -PeerConnectionWrapper::CreateDataChannel( +scoped_refptr PeerConnectionWrapper::CreateDataChannel( const std::string& label, - const absl::optional& config) { + const std::optional& config) { const DataChannelInit* config_ptr = config.has_value() ? &(*config) : nullptr; auto result = pc()->CreateDataChannelOrError(label, config_ptr); if (!result.ok()) { @@ -339,11 +378,12 @@ bool PeerConnectionWrapper::IsIceConnected() { return observer()->ice_connected_; } -rtc::scoped_refptr -PeerConnectionWrapper::GetStats() { - auto callback = rtc::make_ref_counted(); +scoped_refptr PeerConnectionWrapper::GetStats() { + auto callback = make_ref_counted(); pc()->GetStats(callback.get()); - EXPECT_TRUE_WAIT(callback->called(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callback->called(); }, ::testing::IsTrue()), + IsRtcOk()); return callback->report(); } diff --git a/pc/peer_connection_wrapper.h b/pc/peer_connection_wrapper.h index bf40bbcfb8..e9996cbec8 100644 --- a/pc/peer_connection_wrapper.h +++ b/pc/peer_connection_wrapper.h @@ -12,10 +12,10 @@ #define PC_PEER_CONNECTION_WRAPPER_H_ #include +#include #include #include -#include "absl/types/optional.h" #include "api/data_channel_interface.h" #include "api/function_view.h" #include "api/jsep.h" @@ -23,10 +23,12 @@ #include "api/media_types.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" +#include "api/rtp_parameters.h" #include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" #include "api/stats/rtc_stats_report.h" +#include "pc/peer_connection.h" #include "pc/test/mock_peer_connection_observers.h" namespace webrtc { @@ -52,8 +54,8 @@ class PeerConnectionWrapper { // PeerConnection and the MockPeerConnectionObserver should be the observer // that is watching the PeerConnection. PeerConnectionWrapper( - rtc::scoped_refptr pc_factory, - rtc::scoped_refptr pc, + scoped_refptr pc_factory, + scoped_refptr pc, std::unique_ptr observer); virtual ~PeerConnectionWrapper(); @@ -61,6 +63,8 @@ class PeerConnectionWrapper { PeerConnectionInterface* pc(); MockPeerConnectionObserver* observer(); + PeerConnection* GetInternalPeerConnection(); + // Calls the underlying PeerConnection's CreateOffer method and returns the // resulting SessionDescription once it is available. If the method call // failed, null is returned. @@ -95,6 +99,8 @@ class PeerConnectionWrapper { // Returns true if the description was successfully set. bool SetLocalDescription(std::unique_ptr desc, std::string* error_out = nullptr); + bool SetLocalDescription(std::unique_ptr desc, + RTCError* error_out); // Calls the underlying PeerConnection's SetRemoteDescription method with the // given session description and waits for the success/failure response. // Returns true if the description was successfully set. @@ -125,53 +131,51 @@ class PeerConnectionWrapper { // The following are wrappers for the underlying PeerConnection's // AddTransceiver method. They return the result of calling AddTransceiver // with the given arguments, DCHECKing if there is an error. - rtc::scoped_refptr AddTransceiver( - cricket::MediaType media_type); - rtc::scoped_refptr AddTransceiver( - cricket::MediaType media_type, + scoped_refptr AddTransceiver( + webrtc::MediaType media_type); + scoped_refptr AddTransceiver( + webrtc::MediaType media_type, const RtpTransceiverInit& init); - rtc::scoped_refptr AddTransceiver( - rtc::scoped_refptr track); - rtc::scoped_refptr AddTransceiver( - rtc::scoped_refptr track, + scoped_refptr AddTransceiver( + scoped_refptr track); + scoped_refptr AddTransceiver( + scoped_refptr track, const RtpTransceiverInit& init); // Returns a new dummy audio track with the given label. - rtc::scoped_refptr CreateAudioTrack( - const std::string& label); + scoped_refptr CreateAudioTrack(const std::string& label); // Returns a new dummy video track with the given label. - rtc::scoped_refptr CreateVideoTrack( - const std::string& label); + scoped_refptr CreateVideoTrack(const std::string& label); // Wrapper for the underlying PeerConnection's AddTrack method. DCHECKs if // AddTrack fails. - rtc::scoped_refptr AddTrack( - rtc::scoped_refptr track, + scoped_refptr AddTrack( + scoped_refptr track, const std::vector& stream_ids = {}); - rtc::scoped_refptr AddTrack( - rtc::scoped_refptr track, + scoped_refptr AddTrack( + scoped_refptr track, const std::vector& stream_ids, const std::vector& init_send_encodings); // Calls the underlying PeerConnection's AddTrack method with an audio media // stream track not bound to any source. - rtc::scoped_refptr AddAudioTrack( + scoped_refptr AddAudioTrack( const std::string& track_label, const std::vector& stream_ids = {}); // Calls the underlying PeerConnection's AddTrack method with a video media // stream track fed by a FakeVideoTrackSource. - rtc::scoped_refptr AddVideoTrack( + scoped_refptr AddVideoTrack( const std::string& track_label, const std::vector& stream_ids = {}); // Calls the underlying PeerConnection's CreateDataChannel method with default // initialization parameters. - rtc::scoped_refptr CreateDataChannel( + scoped_refptr CreateDataChannel( const std::string& label, - const absl::optional& config = absl::nullopt); + const std::optional& config = std::nullopt); // Returns the signaling state of the underlying PeerConnection. PeerConnectionInterface::SignalingState signaling_state(); @@ -184,18 +188,18 @@ class PeerConnectionWrapper { // Calls GetStats() on the underlying PeerConnection and returns the resulting // report. If GetStats() fails, this method returns null and fails the test. - rtc::scoped_refptr GetStats(); + scoped_refptr GetStats(); private: std::unique_ptr CreateSdp( - rtc::FunctionView fn, + FunctionView fn, std::string* error_out); - bool SetSdp(rtc::FunctionView fn, + bool SetSdp(FunctionView fn, std::string* error_out); - rtc::scoped_refptr pc_factory_; + scoped_refptr pc_factory_; std::unique_ptr observer_; - rtc::scoped_refptr pc_; + scoped_refptr pc_; }; } // namespace webrtc diff --git a/pc/proxy.cc b/pc/proxy.cc deleted file mode 100644 index 5f4e0b8832..0000000000 --- a/pc/proxy.cc +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "pc/proxy.h" - -#include "rtc_base/trace_event.h" - -namespace webrtc { -namespace proxy_internal { -ScopedTrace::ScopedTrace(const char* class_and_method_name) - : class_and_method_name_(class_and_method_name) { - TRACE_EVENT_BEGIN0("webrtc", class_and_method_name_); -} -ScopedTrace::~ScopedTrace() { - TRACE_EVENT_END0("webrtc", class_and_method_name_); -} -} // namespace proxy_internal -} // namespace webrtc diff --git a/pc/proxy.h b/pc/proxy.h index f39b4a59e2..fbc66343ea 100644 --- a/pc/proxy.h +++ b/pc/proxy.h @@ -21,7 +21,7 @@ // // Example usage: // -// class TestInterface : public rtc::RefCountInterface { +// class TestInterface : public RefCountInterface { // public: // std::string FooA() = 0; // std::string FooB(bool arg1) const = 0; @@ -64,30 +64,20 @@ #include #include +#include "api/make_ref_counted.h" #include "api/scoped_refptr.h" #include "api/task_queue/task_queue_base.h" #include "rtc_base/event.h" #include "rtc_base/string_utils.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread.h" +#include "rtc_base/trace_event.h" #if !defined(RTC_DISABLE_PROXY_TRACE_EVENTS) && !defined(WEBRTC_CHROMIUM_BUILD) #define RTC_DISABLE_PROXY_TRACE_EVENTS #endif namespace webrtc { -namespace proxy_internal { - -// Class for tracing the lifetime of MethodCall::Marshal. -class ScopedTrace { - public: - explicit ScopedTrace(const char* class_and_method_name); - ~ScopedTrace(); - - private: - [[maybe_unused]] const char* const class_and_method_name_; -}; -} // namespace proxy_internal template class ReturnType { @@ -123,7 +113,7 @@ class MethodCall { m_(m), args_(std::forward_as_tuple(std::forward(args)...)) {} - R Marshal(rtc::Thread* t) { + R Marshal(Thread* t) { if (t->IsCurrent()) { Invoke(std::index_sequence_for()); } else { @@ -131,7 +121,7 @@ class MethodCall { Invoke(std::index_sequence_for()); event_.Set(); }); - event_.Wait(rtc::Event::kForever); + event_.Wait(Event::kForever); } return r_.moved_result(); } @@ -146,7 +136,7 @@ class MethodCall { Method m_; ReturnType r_; std::tuple args_; - rtc::Event event_; + Event event_; }; template @@ -158,7 +148,7 @@ class ConstMethodCall { m_(m), args_(std::forward_as_tuple(std::forward(args)...)) {} - R Marshal(rtc::Thread* t) { + R Marshal(Thread* t) { if (t->IsCurrent()) { Invoke(std::index_sequence_for()); } else { @@ -166,7 +156,7 @@ class ConstMethodCall { Invoke(std::index_sequence_for()); event_.Set(); }); - event_.Wait(rtc::Event::kForever); + event_.Wait(Event::kForever); } return r_.moved_result(); } @@ -181,7 +171,7 @@ class ConstMethodCall { Method m_; ReturnType r_; std::tuple args_; - rtc::Event event_; + Event event_; }; #define PROXY_STRINGIZE_IMPL(x) #x @@ -216,27 +206,27 @@ class ConstMethodCall { constexpr char class_name##ProxyWithInternal::proxy_name_[]; // clang-format on -#define PRIMARY_PROXY_MAP_BOILERPLATE(class_name) \ - protected: \ - class_name##ProxyWithInternal(rtc::Thread* primary_thread, \ - rtc::scoped_refptr c) \ - : primary_thread_(primary_thread), c_(std::move(c)) {} \ - \ - private: \ - mutable rtc::Thread* primary_thread_; - -#define SECONDARY_PROXY_MAP_BOILERPLATE(class_name) \ - protected: \ - class_name##ProxyWithInternal(rtc::Thread* primary_thread, \ - rtc::Thread* secondary_thread, \ - rtc::scoped_refptr c) \ - : primary_thread_(primary_thread), \ - secondary_thread_(secondary_thread), \ - c_(std::move(c)) {} \ - \ - private: \ - mutable rtc::Thread* primary_thread_; \ - mutable rtc::Thread* secondary_thread_; +#define PRIMARY_PROXY_MAP_BOILERPLATE(class_name) \ + protected: \ + class_name##ProxyWithInternal(Thread* primary_thread, \ + scoped_refptr c) \ + : primary_thread_(primary_thread), c_(std::move(c)) {} \ + \ + private: \ + mutable Thread* primary_thread_; + +#define SECONDARY_PROXY_MAP_BOILERPLATE(class_name) \ + protected: \ + class_name##ProxyWithInternal(Thread* primary_thread, \ + Thread* secondary_thread, \ + scoped_refptr c) \ + : primary_thread_(primary_thread), \ + secondary_thread_(secondary_thread), \ + c_(std::move(c)) {} \ + \ + private: \ + mutable Thread* primary_thread_; \ + mutable Thread* secondary_thread_; // Note that the destructor is protected so that the proxy can only be // destroyed via RefCountInterface. @@ -258,7 +248,7 @@ class ConstMethodCall { void DestroyInternal() { \ c_ = nullptr; \ } \ - rtc::scoped_refptr c_; + scoped_refptr c_; // Note: This doesn't use a unique_ptr, because it intends to handle a corner // case where an object's deletion triggers a callback that calls back into @@ -290,35 +280,35 @@ class ConstMethodCall { PRIMARY_PROXY_MAP_BOILERPLATE(class_name) \ REFCOUNTED_PROXY_MAP_BOILERPLATE(class_name) \ public: \ - static rtc::scoped_refptr Create( \ - rtc::Thread* primary_thread, rtc::scoped_refptr c) { \ - return rtc::make_ref_counted( \ - primary_thread, std::move(c)); \ + static scoped_refptr Create( \ + Thread* primary_thread, scoped_refptr c) { \ + return make_ref_counted(primary_thread, \ + std::move(c)); \ } -#define BEGIN_PROXY_MAP(class_name) \ - PROXY_MAP_BOILERPLATE(class_name) \ - SECONDARY_PROXY_MAP_BOILERPLATE(class_name) \ - REFCOUNTED_PROXY_MAP_BOILERPLATE(class_name) \ - public: \ - static rtc::scoped_refptr Create( \ - rtc::Thread* primary_thread, rtc::Thread* secondary_thread, \ - rtc::scoped_refptr c) { \ - return rtc::make_ref_counted( \ - primary_thread, secondary_thread, std::move(c)); \ +#define BEGIN_PROXY_MAP(class_name) \ + PROXY_MAP_BOILERPLATE(class_name) \ + SECONDARY_PROXY_MAP_BOILERPLATE(class_name) \ + REFCOUNTED_PROXY_MAP_BOILERPLATE(class_name) \ + public: \ + static scoped_refptr Create( \ + Thread* primary_thread, Thread* secondary_thread, \ + scoped_refptr c) { \ + return make_ref_counted( \ + primary_thread, secondary_thread, std::move(c)); \ } -#define PROXY_PRIMARY_THREAD_DESTRUCTOR() \ - private: \ - rtc::Thread* destructor_thread() const { \ - return primary_thread_; \ - } \ - \ +#define PROXY_PRIMARY_THREAD_DESTRUCTOR() \ + private: \ + Thread* destructor_thread() const { \ + return primary_thread_; \ + } \ + \ public: // NOLINTNEXTLINE #define PROXY_SECONDARY_THREAD_DESTRUCTOR() \ private: \ - rtc::Thread* destructor_thread() const { \ + Thread* destructor_thread() const { \ return secondary_thread_; \ } \ \ @@ -329,12 +319,12 @@ class ConstMethodCall { do { \ } while (0) #else // if defined(RTC_DISABLE_PROXY_TRACE_EVENTS) -#define TRACE_BOILERPLATE(method) \ - static constexpr auto class_and_method_name = \ - rtc::MakeCompileTimeString(proxy_name_) \ - .Concat(rtc::MakeCompileTimeString("::")) \ - .Concat(rtc::MakeCompileTimeString(#method)); \ - proxy_internal::ScopedTrace scoped_trace(class_and_method_name.string) +#define TRACE_BOILERPLATE(method) \ + static constexpr auto class_and_method_name = \ + webrtc::MakeCompileTimeString(proxy_name_) \ + .Concat(webrtc::MakeCompileTimeString("::")) \ + .Concat(webrtc::MakeCompileTimeString(#method)); \ + TRACE_EVENT0("webrtc", class_and_method_name.string) #endif // if defined(RTC_DISABLE_PROXY_TRACE_EVENTS) diff --git a/pc/proxy_unittest.cc b/pc/proxy_unittest.cc index ebfde9fecf..a12e581548 100644 --- a/pc/proxy_unittest.cc +++ b/pc/proxy_unittest.cc @@ -14,9 +14,11 @@ #include #include "api/make_ref_counted.h" -#include "rtc_base/gunit.h" -#include "rtc_base/ref_count.h" +#include "api/ref_count.h" +#include "api/scoped_refptr.h" +#include "rtc_base/thread.h" #include "test/gmock.h" +#include "test/gtest.h" using ::testing::_; using ::testing::DoAll; @@ -27,7 +29,7 @@ using ::testing::Return; namespace webrtc { // Interface used for testing here. -class FakeInterface : public rtc::RefCountInterface { +class FakeInterface : public RefCountInterface { public: virtual void VoidMethod0() = 0; virtual std::string Method0() = 0; @@ -43,9 +45,7 @@ class FakeInterface : public rtc::RefCountInterface { // Implementation of the test interface. class Fake : public FakeInterface { public: - static rtc::scoped_refptr Create() { - return rtc::make_ref_counted(); - } + static scoped_refptr Create() { return make_ref_counted(); } // Used to verify destructor is called on the correct thread. MOCK_METHOD(void, Destroy, ()); @@ -95,7 +95,7 @@ class SignalingProxyTest : public ::testing::Test { protected: void SetUp() override { - signaling_thread_ = rtc::Thread::Create(); + signaling_thread_ = Thread::Create(); ASSERT_TRUE(signaling_thread_->Start()); fake_ = Fake::Create(); fake_signaling_proxy_ = @@ -103,9 +103,9 @@ class SignalingProxyTest : public ::testing::Test { } protected: - std::unique_ptr signaling_thread_; - rtc::scoped_refptr fake_signaling_proxy_; - rtc::scoped_refptr fake_; + std::unique_ptr signaling_thread_; + scoped_refptr fake_signaling_proxy_; + scoped_refptr fake_; }; TEST_F(SignalingProxyTest, SignalingThreadDestructor) { @@ -182,8 +182,8 @@ class ProxyTest : public ::testing::Test { protected: void SetUp() override { - signaling_thread_ = rtc::Thread::Create(); - worker_thread_ = rtc::Thread::Create(); + signaling_thread_ = Thread::Create(); + worker_thread_ = Thread::Create(); ASSERT_TRUE(signaling_thread_->Start()); ASSERT_TRUE(worker_thread_->Start()); fake_ = Fake::Create(); @@ -192,10 +192,10 @@ class ProxyTest : public ::testing::Test { } protected: - std::unique_ptr signaling_thread_; - std::unique_ptr worker_thread_; - rtc::scoped_refptr fake_proxy_; - rtc::scoped_refptr fake_; + std::unique_ptr signaling_thread_; + std::unique_ptr worker_thread_; + scoped_refptr fake_proxy_; + scoped_refptr fake_; }; TEST_F(ProxyTest, WorkerThreadDestructor) { diff --git a/pc/remote_audio_source.cc b/pc/remote_audio_source.cc index a516c57617..ecebff5d04 100644 --- a/pc/remote_audio_source.cc +++ b/pc/remote_audio_source.cc @@ -12,17 +12,22 @@ #include +#include #include -#include +#include #include #include "absl/algorithm/container.h" +#include "api/call/audio_sink.h" +#include "api/media_stream_interface.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" +#include "media/base/media_channel.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_format.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/trace_event.h" namespace webrtc { @@ -48,7 +53,7 @@ class RemoteAudioSource::AudioDataProxy : public AudioSinkInterface { } private: - const rtc::scoped_refptr source_; + const scoped_refptr source_; }; RemoteAudioSource::RemoteAudioSource( @@ -70,9 +75,8 @@ RemoteAudioSource::~RemoteAudioSource() { } } -void RemoteAudioSource::Start( - cricket::VoiceMediaReceiveChannelInterface* media_channel, - absl::optional ssrc) { +void RemoteAudioSource::Start(VoiceMediaReceiveChannelInterface* media_channel, + std::optional ssrc) { RTC_DCHECK_RUN_ON(worker_thread_); // Register for callbacks immediately before AddSink so that we always get @@ -85,9 +89,8 @@ void RemoteAudioSource::Start( std::make_unique(this)); } -void RemoteAudioSource::Stop( - cricket::VoiceMediaReceiveChannelInterface* media_channel, - absl::optional ssrc) { +void RemoteAudioSource::Stop(VoiceMediaReceiveChannelInterface* media_channel, + std::optional ssrc) { RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(media_channel); ssrc ? media_channel->SetRawAudioSink(*ssrc, nullptr) @@ -115,8 +118,7 @@ bool RemoteAudioSource::remote() const { void RemoteAudioSource::SetVolume(double volume) { RTC_DCHECK_GE(volume, 0); RTC_DCHECK_LE(volume, 10); - RTC_LOG(LS_INFO) << rtc::StringFormat("RAS::%s({volume=%.2f})", __func__, - volume); + RTC_LOG(LS_INFO) << StringFormat("RAS::%s({volume=%.2f})", __func__, volume); for (auto* observer : audio_observers_) { observer->OnSetVolume(volume); } @@ -159,7 +161,7 @@ void RemoteAudioSource::OnData(const AudioSinkInterface::Data& audio) { // absolute capture timestamp. sink->OnData(audio.data, 16, audio.sample_rate, audio.channels, audio.samples_per_channel, - /*absolute_capture_timestamp_ms=*/absl::nullopt); + /*absolute_capture_timestamp_ms=*/std::nullopt); } } @@ -174,7 +176,7 @@ void RemoteAudioSource::OnAudioChannelGone() { // processed (because the task queue was destroyed shortly after this call), // but that is fine because the task queue destructor will take care of // destroying task which will release the reference on RemoteAudioSource. - rtc::scoped_refptr thiz(this); + scoped_refptr thiz(this); main_thread_->PostTask([thiz = std::move(thiz)] { thiz->sinks_.clear(); thiz->SetState(MediaSourceInterface::kEnded); diff --git a/pc/remote_audio_source.h b/pc/remote_audio_source.h index 0fac606ad4..7b54dca464 100644 --- a/pc/remote_audio_source.h +++ b/pc/remote_audio_source.h @@ -14,9 +14,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/call/audio_sink.h" #include "api/media_stream_interface.h" #include "api/notifier.h" @@ -49,10 +49,10 @@ class RemoteAudioSource : public Notifier { // Register and unregister remote audio source with the underlying media // engine. - void Start(cricket::VoiceMediaReceiveChannelInterface* media_channel, - absl::optional ssrc); - void Stop(cricket::VoiceMediaReceiveChannelInterface* media_channel, - absl::optional ssrc); + void Start(VoiceMediaReceiveChannelInterface* media_channel, + std::optional ssrc); + void Stop(VoiceMediaReceiveChannelInterface* media_channel, + std::optional ssrc); void SetState(SourceState new_state); // MediaSourceInterface implementation. diff --git a/pc/rtc_stats_collector.cc b/pc/rtc_stats_collector.cc index c191f4d65e..a2df878987 100644 --- a/pc/rtc_stats_collector.cc +++ b/pc/rtc_stats_collector.cc @@ -13,55 +13,72 @@ #include #include +#include #include #include #include +#include #include #include -#include #include #include #include "absl/functional/bind_front.h" +#include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/audio/audio_device.h" +#include "api/audio/audio_processing_statistics.h" #include "api/candidate.h" +#include "api/data_channel_interface.h" #include "api/dtls_transport_interface.h" +#include "api/environment/environment.h" +#include "api/make_ref_counted.h" #include "api/media_stream_interface.h" #include "api/media_types.h" #include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/stats/rtc_stats.h" +#include "api/stats/rtc_stats_collector_callback.h" +#include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" +#include "api/transport/enums.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/video_content_type.h" #include "api/video_codecs/scalability_mode.h" #include "common_video/include/quality_limitation_reason.h" #include "media/base/media_channel.h" -#include "media/base/media_channel_impl.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing_statistics.h" +#include "media/base/stream_params.h" #include "modules/rtp_rtcp/include/report_block_data.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "p2p/base/connection_info.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" +#include "p2p/base/transport_description.h" #include "pc/channel_interface.h" #include "pc/data_channel_utils.h" +#include "pc/peer_connection_internal.h" #include "pc/rtc_stats_traversal.h" #include "pc/rtp_receiver_proxy.h" #include "pc/rtp_sender_proxy.h" +#include "pc/rtp_transceiver.h" +#include "pc/transport_stats.h" #include "pc/webrtc_sdp.h" #include "rtc_base/checks.h" +#include "rtc_base/event.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/network_constants.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/socket_address.h" +#include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -72,7 +89,7 @@ namespace { const char kDirectionInbound = 'I'; const char kDirectionOutbound = 'O'; -const char* kAudioPlayoutSingletonId = "AP"; +static constexpr char kAudioPlayoutSingletonId[] = "AP"; // TODO(https://crbug.com/webrtc/10656): Consider making IDs less predictable. std::string RTCCertificateIDFromFingerprint(const std::string& fingerprint) { @@ -85,12 +102,12 @@ std::string RTCCodecStatsIDFromTransportAndCodecParameters( const std::string& transport_id, const RtpCodecParameters& codec_params) { char buf[1024]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); sb << 'C' << direction << transport_id << '_' << codec_params.payload_type; // TODO(https://crbug.com/webrtc/14420): If we stop supporting different FMTP // lines for the same PT and transport, which should be illegal SDP, then we // wouldn't need `fmtp` to be part of the ID here. - rtc::StringBuilder fmtp; + StringBuilder fmtp; if (WriteFmtpParameters(codec_params.parameters, &fmtp)) { sb << '_' << fmtp.Release(); } @@ -98,9 +115,9 @@ std::string RTCCodecStatsIDFromTransportAndCodecParameters( } std::string RTCIceCandidatePairStatsIDFromConnectionInfo( - const cricket::ConnectionInfo& info) { + const ConnectionInfo& info) { char buf[4096]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); sb << "CP" << info.local_candidate.id() << "_" << info.remote_candidate.id(); return sb.str(); } @@ -109,74 +126,61 @@ std::string RTCTransportStatsIDFromTransportChannel( const std::string& transport_name, int channel_component) { char buf[1024]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); sb << 'T' << transport_name << channel_component; return sb.str(); } std::string RTCInboundRtpStreamStatsIDFromSSRC(const std::string& transport_id, - cricket::MediaType media_type, + webrtc::MediaType media_type, uint32_t ssrc) { char buf[1024]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); sb << 'I' << transport_id - << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V') << ssrc; + << (media_type == webrtc::MediaType::AUDIO ? 'A' : 'V') << ssrc; return sb.str(); } std::string RTCOutboundRtpStreamStatsIDFromSSRC(const std::string& transport_id, - cricket::MediaType media_type, + webrtc::MediaType media_type, uint32_t ssrc) { char buf[1024]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); sb << 'O' << transport_id - << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V') << ssrc; + << (media_type == webrtc::MediaType::AUDIO ? 'A' : 'V') << ssrc; return sb.str(); } std::string RTCRemoteInboundRtpStreamStatsIdFromSourceSsrc( - cricket::MediaType media_type, + webrtc::MediaType media_type, uint32_t source_ssrc) { char buf[1024]; - rtc::SimpleStringBuilder sb(buf); - sb << "RI" << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V') + SimpleStringBuilder sb(buf); + sb << "RI" << (media_type == webrtc::MediaType::AUDIO ? 'A' : 'V') << source_ssrc; return sb.str(); } std::string RTCRemoteOutboundRTPStreamStatsIDFromSSRC( - cricket::MediaType media_type, + webrtc::MediaType media_type, uint32_t source_ssrc) { char buf[1024]; - rtc::SimpleStringBuilder sb(buf); - sb << "RO" << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V') + SimpleStringBuilder sb(buf); + sb << "RO" << (media_type == webrtc::MediaType::AUDIO ? 'A' : 'V') << source_ssrc; return sb.str(); } std::string RTCMediaSourceStatsIDFromKindAndAttachment( - cricket::MediaType media_type, + webrtc::MediaType media_type, int attachment_id) { char buf[1024]; - rtc::SimpleStringBuilder sb(buf); - sb << 'S' << (media_type == cricket::MEDIA_TYPE_AUDIO ? 'A' : 'V') + SimpleStringBuilder sb(buf); + sb << 'S' << (media_type == webrtc::MediaType::AUDIO ? 'A' : 'V') << attachment_id; return sb.str(); } -const char* CandidateTypeToRTCIceCandidateType(const std::string& type) { - if (type == cricket::LOCAL_PORT_TYPE) - return "host"; - if (type == cricket::STUN_PORT_TYPE) - return "srflx"; - if (type == cricket::PRFLX_PORT_TYPE) - return "prflx"; - if (type == cricket::RELAY_PORT_TYPE) - return "relay"; - RTC_DCHECK_NOTREACHED(); - return nullptr; -} - const char* DataStateToRTCDataChannelState( DataChannelInterface::DataState state) { switch (state) { @@ -195,15 +199,15 @@ const char* DataStateToRTCDataChannelState( } const char* IceCandidatePairStateToRTCStatsIceCandidatePairState( - cricket::IceCandidatePairState state) { + IceCandidatePairState state) { switch (state) { - case cricket::IceCandidatePairState::WAITING: + case IceCandidatePairState::WAITING: return "waiting"; - case cricket::IceCandidatePairState::IN_PROGRESS: + case IceCandidatePairState::IN_PROGRESS: return "in-progress"; - case cricket::IceCandidatePairState::SUCCEEDED: + case IceCandidatePairState::SUCCEEDED: return "succeeded"; - case cricket::IceCandidatePairState::FAILED: + case IceCandidatePairState::FAILED: return "failed"; default: RTC_DCHECK_NOTREACHED(); @@ -211,13 +215,13 @@ const char* IceCandidatePairStateToRTCStatsIceCandidatePairState( } } -const char* IceRoleToRTCIceRole(cricket::IceRole role) { +const char* IceRoleToRTCIceRole(IceRole role) { switch (role) { - case cricket::IceRole::ICEROLE_UNKNOWN: + case IceRole::ICEROLE_UNKNOWN: return "unknown"; - case cricket::IceRole::ICEROLE_CONTROLLED: + case IceRole::ICEROLE_CONTROLLED: return "controlled"; - case cricket::IceRole::ICEROLE_CONTROLLING: + case IceRole::ICEROLE_CONTROLLING: return "controlling"; default: RTC_DCHECK_NOTREACHED(); @@ -266,52 +270,52 @@ const char* IceTransportStateToRTCIceTransportState(IceTransportState state) { } } -const char* NetworkTypeToStatsType(rtc::AdapterType type) { +const char* NetworkTypeToStatsType(AdapterType type) { switch (type) { - case rtc::ADAPTER_TYPE_CELLULAR: - case rtc::ADAPTER_TYPE_CELLULAR_2G: - case rtc::ADAPTER_TYPE_CELLULAR_3G: - case rtc::ADAPTER_TYPE_CELLULAR_4G: - case rtc::ADAPTER_TYPE_CELLULAR_5G: + case ADAPTER_TYPE_CELLULAR: + case ADAPTER_TYPE_CELLULAR_2G: + case ADAPTER_TYPE_CELLULAR_3G: + case ADAPTER_TYPE_CELLULAR_4G: + case ADAPTER_TYPE_CELLULAR_5G: return "cellular"; - case rtc::ADAPTER_TYPE_ETHERNET: + case ADAPTER_TYPE_ETHERNET: return "ethernet"; - case rtc::ADAPTER_TYPE_WIFI: + case ADAPTER_TYPE_WIFI: return "wifi"; - case rtc::ADAPTER_TYPE_VPN: + case ADAPTER_TYPE_VPN: return "vpn"; - case rtc::ADAPTER_TYPE_UNKNOWN: - case rtc::ADAPTER_TYPE_LOOPBACK: - case rtc::ADAPTER_TYPE_ANY: + case ADAPTER_TYPE_UNKNOWN: + case ADAPTER_TYPE_LOOPBACK: + case ADAPTER_TYPE_ANY: return "unknown"; } RTC_DCHECK_NOTREACHED(); return nullptr; } -absl::string_view NetworkTypeToStatsNetworkAdapterType(rtc::AdapterType type) { +absl::string_view NetworkTypeToStatsNetworkAdapterType(AdapterType type) { switch (type) { - case rtc::ADAPTER_TYPE_CELLULAR: + case ADAPTER_TYPE_CELLULAR: return "cellular"; - case rtc::ADAPTER_TYPE_CELLULAR_2G: + case ADAPTER_TYPE_CELLULAR_2G: return "cellular2g"; - case rtc::ADAPTER_TYPE_CELLULAR_3G: + case ADAPTER_TYPE_CELLULAR_3G: return "cellular3g"; - case rtc::ADAPTER_TYPE_CELLULAR_4G: + case ADAPTER_TYPE_CELLULAR_4G: return "cellular4g"; - case rtc::ADAPTER_TYPE_CELLULAR_5G: + case ADAPTER_TYPE_CELLULAR_5G: return "cellular5g"; - case rtc::ADAPTER_TYPE_ETHERNET: + case ADAPTER_TYPE_ETHERNET: return "ethernet"; - case rtc::ADAPTER_TYPE_WIFI: + case ADAPTER_TYPE_WIFI: return "wifi"; - case rtc::ADAPTER_TYPE_UNKNOWN: + case ADAPTER_TYPE_UNKNOWN: return "unknown"; - case rtc::ADAPTER_TYPE_LOOPBACK: + case ADAPTER_TYPE_LOOPBACK: return "loopback"; - case rtc::ADAPTER_TYPE_ANY: + case ADAPTER_TYPE_ANY: return "any"; - case rtc::ADAPTER_TYPE_VPN: + case ADAPTER_TYPE_VPN: /* should not be handled here. Vpn is modelled as a bool */ break; } @@ -336,14 +340,14 @@ const char* QualityLimitationReasonToRTCQualityLimitationReason( std::map QualityLimitationDurationToRTCQualityLimitationDuration( - std::map durations_ms) { + std::map durations_ms) { std::map result; // The internal duration is defined in milliseconds while the spec defines // the value in seconds: // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-qualitylimitationdurations for (const auto& elem : durations_ms) { result[QualityLimitationReasonToRTCQualityLimitationReason(elem.first)] = - elem.second / static_cast(rtc::kNumMillisecsPerSec); + elem.second / static_cast(kNumMillisecsPerSec); } return result; } @@ -383,7 +387,7 @@ std::string GetCodecIdAndMaybeCreateCodecStats( codec_stats->channels = *codec_params.num_channels; } - rtc::StringBuilder fmtp; + StringBuilder fmtp; if (WriteFmtpParameters(codec_params.parameters, &fmtp)) { codec_stats->sdp_fmtp_line = fmtp.Release(); } @@ -394,7 +398,7 @@ std::string GetCodecIdAndMaybeCreateCodecStats( // Provides the media independent counters (both audio and video). void SetInboundRTPStreamStatsFromMediaReceiverInfo( - const cricket::MediaReceiverInfo& media_receiver_info, + const MediaReceiverInfo& media_receiver_info, RTCInboundRtpStreamStats* inbound_stats) { RTC_DCHECK(inbound_stats); inbound_stats->ssrc = media_receiver_info.ssrc(); @@ -436,18 +440,20 @@ void SetInboundRTPStreamStatsFromMediaReceiverInfo( if (media_receiver_info.fec_bytes_received.has_value()) { inbound_stats->fec_bytes_received = *media_receiver_info.fec_bytes_received; } + inbound_stats->total_processing_delay = + media_receiver_info.total_processing_delay_seconds; } std::unique_ptr CreateInboundAudioStreamStats( - const cricket::VoiceMediaInfo& voice_media_info, - const cricket::VoiceReceiverInfo& voice_receiver_info, + const VoiceMediaInfo& voice_media_info, + const VoiceReceiverInfo& voice_receiver_info, const std::string& transport_id, const std::string& mid, Timestamp timestamp, RTCStatsReport* report) { auto inbound_audio = std::make_unique( /*id=*/RTCInboundRtpStreamStatsIDFromSSRC( - transport_id, cricket::MEDIA_TYPE_AUDIO, voice_receiver_info.ssrc()), + transport_id, webrtc::MediaType::AUDIO, voice_receiver_info.ssrc()), timestamp); SetInboundRTPStreamStatsFromMediaReceiverInfo(voice_receiver_info, inbound_audio.get()); @@ -464,8 +470,8 @@ std::unique_ptr CreateInboundAudioStreamStats( codec_param_it->second, report); } } - inbound_audio->jitter = static_cast(voice_receiver_info.jitter_ms) / - rtc::kNumMillisecsPerSec; + inbound_audio->jitter = + static_cast(voice_receiver_info.jitter_ms) / kNumMillisecsPerSec; inbound_audio->total_samples_received = voice_receiver_info.total_samples_received; inbound_audio->concealed_samples = voice_receiver_info.concealed_samples; @@ -507,13 +513,13 @@ std::unique_ptr CreateInboundAudioStreamStats( : 0; inbound_audio->total_interruption_duration = static_cast(voice_receiver_info.total_interruption_duration_ms) / - rtc::kNumMillisecsPerSec; + kNumMillisecsPerSec; return inbound_audio; } std::unique_ptr CreateAudioPlayoutStats( const AudioDeviceModule::Stats& audio_device_stats, - webrtc::Timestamp timestamp) { + Timestamp timestamp) { auto stats = std::make_unique( /*id=*/kAudioPlayoutSingletonId, timestamp); stats->synthesized_samples_duration = @@ -527,53 +533,59 @@ std::unique_ptr CreateAudioPlayoutStats( } std::unique_ptr -CreateRemoteOutboundAudioStreamStats( - const cricket::VoiceReceiverInfo& voice_receiver_info, +CreateRemoteOutboundMediaStreamStats( + const MediaReceiverInfo& media_receiver_info, const std::string& mid, + webrtc::MediaType media_type, const RTCInboundRtpStreamStats& inbound_audio_stats, - const std::string& transport_id) { - if (!voice_receiver_info.last_sender_report_timestamp_ms.has_value()) { + const std::string& transport_id, + const bool stats_timestamp_with_environment_clock) { + std::optional last_sender_report_timestamp = + stats_timestamp_with_environment_clock + ? media_receiver_info.last_sender_report_timestamp + : media_receiver_info.last_sender_report_utc_timestamp; + if (!last_sender_report_timestamp.has_value()) { // Cannot create `RTCRemoteOutboundRtpStreamStats` when the RTCP SR arrival // timestamp is not available - i.e., until the first sender report is // received. return nullptr; } - RTC_DCHECK_GT(voice_receiver_info.sender_reports_reports_count, 0); + RTC_DCHECK_GT(media_receiver_info.sender_reports_reports_count, 0); // Create. auto stats = std::make_unique( /*id=*/RTCRemoteOutboundRTPStreamStatsIDFromSSRC( - cricket::MEDIA_TYPE_AUDIO, voice_receiver_info.ssrc()), - Timestamp::Millis(*voice_receiver_info.last_sender_report_timestamp_ms)); + media_type, media_receiver_info.ssrc()), + *last_sender_report_timestamp); // Populate. // - RTCRtpStreamStats. - stats->ssrc = voice_receiver_info.ssrc(); - stats->kind = "audio"; + stats->ssrc = media_receiver_info.ssrc(); + stats->kind = webrtc::MediaTypeToString(media_type); stats->transport_id = transport_id; - if (inbound_audio_stats.codec_id.is_defined()) { + if (inbound_audio_stats.codec_id.has_value()) { stats->codec_id = *inbound_audio_stats.codec_id; } // - RTCSentRtpStreamStats. - stats->packets_sent = voice_receiver_info.sender_reports_packets_sent; - stats->bytes_sent = voice_receiver_info.sender_reports_bytes_sent; + stats->packets_sent = media_receiver_info.sender_reports_packets_sent; + stats->bytes_sent = media_receiver_info.sender_reports_bytes_sent; // - RTCRemoteOutboundRtpStreamStats. stats->local_id = inbound_audio_stats.id(); - // last_sender_report_remote_timestamp_ms is set together with - // last_sender_report_timestamp_ms. + // last_sender_report_remote_utc_timestamp_ms is set together with + // last_sender_report_utc_timestamp_ms. RTC_DCHECK( - voice_receiver_info.last_sender_report_remote_timestamp_ms.has_value()); - stats->remote_timestamp = static_cast( - *voice_receiver_info.last_sender_report_remote_timestamp_ms); - stats->reports_sent = voice_receiver_info.sender_reports_reports_count; - if (voice_receiver_info.round_trip_time.has_value()) { + media_receiver_info.last_sender_report_remote_utc_timestamp.has_value()); + stats->remote_timestamp = + media_receiver_info.last_sender_report_remote_utc_timestamp->ms(); + stats->reports_sent = media_receiver_info.sender_reports_reports_count; + if (media_receiver_info.round_trip_time.has_value()) { stats->round_trip_time = - voice_receiver_info.round_trip_time->seconds(); + media_receiver_info.round_trip_time->seconds(); } stats->round_trip_time_measurements = - voice_receiver_info.round_trip_time_measurements; + media_receiver_info.round_trip_time_measurements; stats->total_round_trip_time = - voice_receiver_info.total_round_trip_time.seconds(); + media_receiver_info.total_round_trip_time.seconds(); return stats; } @@ -582,13 +594,13 @@ std::unique_ptr CreateInboundRTPStreamStatsFromVideoReceiverInfo( const std::string& transport_id, const std::string& mid, - const cricket::VideoMediaInfo& video_media_info, - const cricket::VideoReceiverInfo& video_receiver_info, + const VideoMediaInfo& video_media_info, + const VideoReceiverInfo& video_receiver_info, Timestamp timestamp, RTCStatsReport* report) { auto inbound_video = std::make_unique( - RTCInboundRtpStreamStatsIDFromSSRC( - transport_id, cricket::MEDIA_TYPE_VIDEO, video_receiver_info.ssrc()), + RTCInboundRtpStreamStatsIDFromSSRC(transport_id, webrtc::MediaType::VIDEO, + video_receiver_info.ssrc()), timestamp); SetInboundRTPStreamStatsFromMediaReceiverInfo(video_receiver_info, inbound_video.get()); @@ -605,8 +617,8 @@ CreateInboundRTPStreamStatsFromVideoReceiverInfo( codec_param_it->second, report); } } - inbound_video->jitter = static_cast(video_receiver_info.jitter_ms) / - rtc::kNumMillisecsPerSec; + inbound_video->jitter = + static_cast(video_receiver_info.jitter_ms) / kNumMillisecsPerSec; inbound_video->fir_count = static_cast(video_receiver_info.firs_sent); inbound_video->pli_count = @@ -629,6 +641,16 @@ CreateInboundRTPStreamStatsFromVideoReceiverInfo( if (video_receiver_info.qp_sum.has_value()) { inbound_video->qp_sum = *video_receiver_info.qp_sum; } + if (video_receiver_info.corruption_score_sum.has_value()) { + RTC_CHECK(video_receiver_info.corruption_score_squared_sum.has_value()); + RTC_CHECK_GT(video_receiver_info.corruption_score_count, 0); + inbound_video->total_corruption_probability = + *video_receiver_info.corruption_score_sum; + inbound_video->total_squared_corruption_probability = + *video_receiver_info.corruption_score_squared_sum; + inbound_video->corruption_measurements = + video_receiver_info.corruption_score_count; + } if (video_receiver_info.timing_frame_info.has_value()) { inbound_video->goog_timing_frame_info = video_receiver_info.timing_frame_info->ToString(); @@ -648,14 +670,14 @@ CreateInboundRTPStreamStatsFromVideoReceiverInfo( inbound_video->pause_count = video_receiver_info.pause_count; inbound_video->total_pauses_duration = static_cast(video_receiver_info.total_pauses_duration_ms) / - rtc::kNumMillisecsPerSec; + kNumMillisecsPerSec; inbound_video->freeze_count = video_receiver_info.freeze_count; inbound_video->total_freezes_duration = static_cast(video_receiver_info.total_freezes_duration_ms) / - rtc::kNumMillisecsPerSec; + kNumMillisecsPerSec; inbound_video->min_playout_delay = static_cast(video_receiver_info.min_playout_delay_ms) / - rtc::kNumMillisecsPerSec; + kNumMillisecsPerSec; if (video_receiver_info.last_packet_received.has_value()) { inbound_video->last_packet_received_timestamp = video_receiver_info.last_packet_received->ms(); @@ -678,10 +700,10 @@ CreateInboundRTPStreamStatsFromVideoReceiverInfo( *video_receiver_info.power_efficient_decoder; } for (const auto& ssrc_group : video_receiver_info.ssrc_groups) { - if (ssrc_group.semantics == cricket::kFidSsrcGroupSemantics && + if (ssrc_group.semantics == kFidSsrcGroupSemantics && ssrc_group.ssrcs.size() == 2) { inbound_video->rtx_ssrc = ssrc_group.ssrcs[1]; - } else if (ssrc_group.semantics == cricket::kFecFrSsrcGroupSemantics && + } else if (ssrc_group.semantics == kFecFrSsrcGroupSemantics && ssrc_group.ssrcs.size() == 2) { // TODO(bugs.webrtc.org/15002): the ssrc-group might be >= 2 with // multistream support. @@ -695,7 +717,7 @@ CreateInboundRTPStreamStatsFromVideoReceiverInfo( // Provides the media independent counters and information (both audio and // video). void SetOutboundRTPStreamStatsFromMediaSenderInfo( - const cricket::MediaSenderInfo& media_sender_info, + const MediaSenderInfo& media_sender_info, RTCOutboundRtpStreamStats* outbound_stats) { RTC_DCHECK(outbound_stats); outbound_stats->ssrc = media_sender_info.ssrc(); @@ -721,22 +743,21 @@ std::unique_ptr CreateOutboundRTPStreamStatsFromVoiceSenderInfo( const std::string& transport_id, const std::string& mid, - const cricket::VoiceMediaInfo& voice_media_info, - const cricket::VoiceSenderInfo& voice_sender_info, + const VoiceMediaInfo& voice_media_info, + const VoiceSenderInfo& voice_sender_info, Timestamp timestamp, RTCStatsReport* report) { auto outbound_audio = std::make_unique( RTCOutboundRtpStreamStatsIDFromSSRC( - transport_id, cricket::MEDIA_TYPE_AUDIO, voice_sender_info.ssrc()), + transport_id, webrtc::MediaType::AUDIO, voice_sender_info.ssrc()), timestamp); SetOutboundRTPStreamStatsFromMediaSenderInfo(voice_sender_info, outbound_audio.get()); outbound_audio->transport_id = transport_id; outbound_audio->mid = mid; outbound_audio->kind = "audio"; - if (voice_sender_info.target_bitrate.has_value() && - *voice_sender_info.target_bitrate > 0) { - outbound_audio->target_bitrate = *voice_sender_info.target_bitrate; + if (voice_sender_info.target_bitrate.has_value()) { + outbound_audio->target_bitrate = voice_sender_info.target_bitrate->bps(); } if (voice_sender_info.codec_payload_type.has_value()) { auto codec_param_it = voice_media_info.send_codecs.find( @@ -757,13 +778,13 @@ std::unique_ptr CreateOutboundRTPStreamStatsFromVideoSenderInfo( const std::string& transport_id, const std::string& mid, - const cricket::VideoMediaInfo& video_media_info, - const cricket::VideoSenderInfo& video_sender_info, + const VideoMediaInfo& video_media_info, + const VideoSenderInfo& video_sender_info, Timestamp timestamp, RTCStatsReport* report) { auto outbound_video = std::make_unique( RTCOutboundRtpStreamStatsIDFromSSRC( - transport_id, cricket::MEDIA_TYPE_VIDEO, video_sender_info.ssrc()), + transport_id, webrtc::MediaType::VIDEO, video_sender_info.ssrc()), timestamp); SetOutboundRTPStreamStatsFromMediaSenderInfo(video_sender_info, outbound_video.get()); @@ -786,15 +807,14 @@ CreateOutboundRTPStreamStatsFromVideoSenderInfo( static_cast(video_sender_info.plis_received); if (video_sender_info.qp_sum.has_value()) outbound_video->qp_sum = *video_sender_info.qp_sum; - if (video_sender_info.target_bitrate.has_value() && - *video_sender_info.target_bitrate > 0) { - outbound_video->target_bitrate = *video_sender_info.target_bitrate; + if (video_sender_info.target_bitrate.has_value()) { + outbound_video->target_bitrate = video_sender_info.target_bitrate->bps(); } outbound_video->frames_encoded = video_sender_info.frames_encoded; outbound_video->key_frames_encoded = video_sender_info.key_frames_encoded; outbound_video->total_encode_time = static_cast(video_sender_info.total_encode_time_ms) / - rtc::kNumMillisecsPerSec; + kNumMillisecsPerSec; outbound_video->total_encoded_bytes_target = video_sender_info.total_encoded_bytes_target; if (video_sender_info.send_frame_width > 0) { @@ -829,6 +849,9 @@ CreateOutboundRTPStreamStatsFromVideoSenderInfo( if (video_sender_info.rid.has_value()) { outbound_video->rid = *video_sender_info.rid; } + if (video_sender_info.encoding_index.has_value()) { + outbound_video->encoding_index = *video_sender_info.encoding_index; + } if (video_sender_info.power_efficient_encoder.has_value()) { outbound_video->power_efficient_encoder = *video_sender_info.power_efficient_encoder; @@ -838,8 +861,9 @@ CreateOutboundRTPStreamStatsFromVideoSenderInfo( ScalabilityModeToString(*video_sender_info.scalability_mode)); } for (const auto& ssrc_group : video_sender_info.ssrc_groups) { - if (ssrc_group.semantics == cricket::kFidSsrcGroupSemantics && - ssrc_group.ssrcs.size() == 2) { + if (ssrc_group.semantics == kFidSsrcGroupSemantics && + ssrc_group.ssrcs.size() == 2 && + video_sender_info.ssrc() == ssrc_group.ssrcs[0]) { outbound_video->rtx_ssrc = ssrc_group.ssrcs[1]; } } @@ -850,19 +874,23 @@ std::unique_ptr ProduceRemoteInboundRtpStreamStatsFromReportBlockData( const std::string& transport_id, const ReportBlockData& report_block, - cricket::MediaType media_type, + webrtc::MediaType media_type, const std::map& outbound_rtps, - const RTCStatsReport& report) { + const RTCStatsReport& report, + const bool stats_timestamp_with_environment_clock) { // RTCStats' timestamp generally refers to when the metric was sampled, but // for "remote-[outbound/inbound]-rtp" it refers to the local time when the // Report Block was received. + Timestamp arrival_timestamp = stats_timestamp_with_environment_clock + ? report_block.report_block_timestamp() + : report_block.report_block_timestamp_utc(); auto remote_inbound = std::make_unique( RTCRemoteInboundRtpStreamStatsIdFromSourceSsrc( media_type, report_block.source_ssrc()), - report_block.report_block_timestamp_utc()); + arrival_timestamp); remote_inbound->ssrc = report_block.source_ssrc(); remote_inbound->kind = - media_type == cricket::MEDIA_TYPE_AUDIO ? "audio" : "video"; + media_type == webrtc::MediaType::AUDIO ? "audio" : "video"; remote_inbound->packets_lost = report_block.cumulative_lost(); remote_inbound->fraction_lost = report_block.fraction_lost(); if (report_block.num_rtts() > 0) { @@ -889,7 +917,7 @@ ProduceRemoteInboundRtpStreamStatsFromReportBlockData( // transport paired with the RTP transport, otherwise the same // transport is used for RTCP and RTP. remote_inbound->transport_id = - transport.rtcp_transport_stats_id.is_defined() + transport.rtcp_transport_stats_id.has_value() ? *transport.rtcp_transport_stats_id : *outbound_rtp.transport_id; } @@ -897,13 +925,13 @@ ProduceRemoteInboundRtpStreamStatsFromReportBlockData( // codec is switched out on the fly we may have received a Report Block // based on the previous codec and there is no way to tell which point in // time the codec changed for the remote end. - const auto* codec_from_id = outbound_rtp.codec_id.is_defined() + const auto* codec_from_id = outbound_rtp.codec_id.has_value() ? report.Get(*outbound_rtp.codec_id) : nullptr; if (codec_from_id) { remote_inbound->codec_id = *outbound_rtp.codec_id; const auto& codec = codec_from_id->cast_to(); - if (codec.clock_rate.is_defined()) { + if (codec.clock_rate.has_value()) { remote_inbound->jitter = report_block.jitter(*codec.clock_rate).seconds(); } @@ -914,10 +942,10 @@ ProduceRemoteInboundRtpStreamStatsFromReportBlockData( void ProduceCertificateStatsFromSSLCertificateStats( Timestamp timestamp, - const rtc::SSLCertificateStats& certificate_stats, + const SSLCertificateStats& certificate_stats, RTCStatsReport* report) { RTCCertificateStats* prev_certificate_stats = nullptr; - for (const rtc::SSLCertificateStats* s = &certificate_stats; s; + for (const SSLCertificateStats* s = &certificate_stats; s; s = s->issuer.get()) { std::string certificate_stats_id = RTCCertificateIDFromFingerprint(s->fingerprint); @@ -928,20 +956,22 @@ void ProduceCertificateStatsFromSSLCertificateStats( RTC_DCHECK_EQ(s, &certificate_stats); break; } - RTCCertificateStats* certificate_stats = + RTCCertificateStats* current_certificate_stats = new RTCCertificateStats(certificate_stats_id, timestamp); - certificate_stats->fingerprint = s->fingerprint; - certificate_stats->fingerprint_algorithm = s->fingerprint_algorithm; - certificate_stats->base64_certificate = s->base64_certificate; + current_certificate_stats->fingerprint = s->fingerprint; + current_certificate_stats->fingerprint_algorithm = s->fingerprint_algorithm; + current_certificate_stats->base64_certificate = s->base64_certificate; if (prev_certificate_stats) - prev_certificate_stats->issuer_certificate_id = certificate_stats->id(); - report->AddStats(std::unique_ptr(certificate_stats)); - prev_certificate_stats = certificate_stats; + prev_certificate_stats->issuer_certificate_id = + current_certificate_stats->id(); + report->AddStats( + std::unique_ptr(current_certificate_stats)); + prev_certificate_stats = current_certificate_stats; } } const std::string& ProduceIceCandidateStats(Timestamp timestamp, - const cricket::Candidate& candidate, + const Candidate& candidate, bool is_local, const std::string& transport_id, RTCStatsReport* report) { @@ -960,12 +990,10 @@ const std::string& ProduceIceCandidateStats(Timestamp timestamp, if (is_local) { candidate_stats->network_type = NetworkTypeToStatsType(candidate.network_type()); - const std::string& candidate_type = candidate.type(); const std::string& relay_protocol = candidate.relay_protocol(); const std::string& url = candidate.url(); - if (candidate_type == cricket::RELAY_PORT_TYPE || - (candidate_type == cricket::PRFLX_PORT_TYPE && - !relay_protocol.empty())) { + if (candidate.is_relay() || + (candidate.is_prflx() && !relay_protocol.empty())) { RTC_DCHECK(relay_protocol.compare("udp") == 0 || relay_protocol.compare("tcp") == 0 || relay_protocol.compare("tls") == 0); @@ -973,12 +1001,12 @@ const std::string& ProduceIceCandidateStats(Timestamp timestamp, if (!url.empty()) { candidate_stats->url = url; } - } else if (candidate_type == cricket::STUN_PORT_TYPE) { + } else if (candidate.is_stun()) { if (!url.empty()) { candidate_stats->url = url; } } - if (candidate.network_type() == rtc::ADAPTER_TYPE_VPN) { + if (candidate.network_type() == ADAPTER_TYPE_VPN) { candidate_stats->vpn = true; candidate_stats->network_adapter_type = std::string(NetworkTypeToStatsNetworkAdapterType( @@ -990,17 +1018,15 @@ const std::string& ProduceIceCandidateStats(Timestamp timestamp, } } else { // We don't expect to know the adapter type of remote candidates. - RTC_DCHECK_EQ(rtc::ADAPTER_TYPE_UNKNOWN, candidate.network_type()); + RTC_DCHECK_EQ(ADAPTER_TYPE_UNKNOWN, candidate.network_type()); RTC_DCHECK_EQ(0, candidate.relay_protocol().compare("")); - RTC_DCHECK_EQ(rtc::ADAPTER_TYPE_UNKNOWN, - candidate.underlying_type_for_vpn()); + RTC_DCHECK_EQ(ADAPTER_TYPE_UNKNOWN, candidate.underlying_type_for_vpn()); } candidate_stats->ip = candidate.address().ipaddr().ToString(); candidate_stats->address = candidate.address().ipaddr().ToString(); candidate_stats->port = static_cast(candidate.address().port()); candidate_stats->protocol = candidate.protocol(); - candidate_stats->candidate_type = - CandidateTypeToRTCIceCandidateType(candidate.type()); + candidate_stats->candidate_type = candidate.type_name(); candidate_stats->priority = static_cast(candidate.priority()); candidate_stats->foundation = candidate.foundation(); auto related_address = candidate.related_address(); @@ -1009,7 +1035,10 @@ const std::string& ProduceIceCandidateStats(Timestamp timestamp, candidate_stats->related_port = static_cast(related_address.port()); } - candidate_stats->username_fragment = candidate.username(); + const std::string& username = candidate.username(); + if (!username.empty()) { + candidate_stats->username_fragment = username; + } if (candidate.protocol() == "tcp") { candidate_stats->tcp_type = candidate.tcptype(); } @@ -1036,12 +1065,11 @@ void SetAudioProcessingStats(StatsType* stats, } // namespace -rtc::scoped_refptr -RTCStatsCollector::CreateReportFilteredBySelector( +scoped_refptr RTCStatsCollector::CreateReportFilteredBySelector( bool filter_by_sender_selector, - rtc::scoped_refptr report, - rtc::scoped_refptr sender_selector, - rtc::scoped_refptr receiver_selector) { + scoped_refptr report, + scoped_refptr sender_selector, + scoped_refptr receiver_selector) { std::vector rtpstream_ids; if (filter_by_sender_selector) { // Filter mode: RTCStatsCollector::RequestInfo::kSenderSelector @@ -1050,7 +1078,7 @@ RTCStatsCollector::CreateReportFilteredBySelector( auto encodings = sender_selector->GetParametersInternal().encodings; for (const auto* outbound_rtp : report->GetStatsOfType()) { - RTC_DCHECK(outbound_rtp->ssrc.is_defined()); + RTC_DCHECK(outbound_rtp->ssrc.has_value()); auto it = std::find_if(encodings.begin(), encodings.end(), [ssrc = *outbound_rtp->ssrc]( const RtpEncodingParameters& encoding) { @@ -1065,12 +1093,12 @@ RTCStatsCollector::CreateReportFilteredBySelector( // Filter mode: RTCStatsCollector::RequestInfo::kReceiverSelector if (receiver_selector) { // Find the inbound-rtp of the receiver using ssrc lookup. - absl::optional ssrc; + std::optional ssrc; worker_thread_->BlockingCall([&] { ssrc = receiver_selector->ssrc(); }); if (ssrc.has_value()) { for (const auto* inbound_rtp : report->GetStatsOfType()) { - RTC_DCHECK(inbound_rtp->ssrc.is_defined()); + RTC_DCHECK(inbound_rtp->ssrc.has_value()); if (*inbound_rtp->ssrc == *ssrc) { rtpstream_ids.push_back(inbound_rtp->id()); } @@ -1092,20 +1120,20 @@ RTCStatsCollector::CertificateStatsPair::Copy() const { } RTCStatsCollector::RequestInfo::RequestInfo( - rtc::scoped_refptr callback) + scoped_refptr callback) : RequestInfo(FilterMode::kAll, std::move(callback), nullptr, nullptr) {} RTCStatsCollector::RequestInfo::RequestInfo( - rtc::scoped_refptr selector, - rtc::scoped_refptr callback) + scoped_refptr selector, + scoped_refptr callback) : RequestInfo(FilterMode::kSenderSelector, std::move(callback), std::move(selector), nullptr) {} RTCStatsCollector::RequestInfo::RequestInfo( - rtc::scoped_refptr selector, - rtc::scoped_refptr callback) + scoped_refptr selector, + scoped_refptr callback) : RequestInfo(FilterMode::kReceiverSelector, std::move(callback), nullptr, @@ -1113,9 +1141,9 @@ RTCStatsCollector::RequestInfo::RequestInfo( RTCStatsCollector::RequestInfo::RequestInfo( RTCStatsCollector::RequestInfo::FilterMode filter_mode, - rtc::scoped_refptr callback, - rtc::scoped_refptr sender_selector, - rtc::scoped_refptr receiver_selector) + scoped_refptr callback, + scoped_refptr sender_selector, + scoped_refptr receiver_selector) : filter_mode_(filter_mode), callback_(std::move(callback)), sender_selector_(std::move(sender_selector)), @@ -1124,15 +1152,20 @@ RTCStatsCollector::RequestInfo::RequestInfo( RTC_DCHECK(!sender_selector_ || !receiver_selector_); } -rtc::scoped_refptr RTCStatsCollector::Create( +scoped_refptr RTCStatsCollector::Create( PeerConnectionInternal* pc, + const Environment& env, int64_t cache_lifetime_us) { - return rtc::make_ref_counted(pc, cache_lifetime_us); + return make_ref_counted(pc, env, cache_lifetime_us); } RTCStatsCollector::RTCStatsCollector(PeerConnectionInternal* pc, + const Environment& env, int64_t cache_lifetime_us) : pc_(pc), + env_(env), + stats_timestamp_with_environment_clock_( + pc->GetConfiguration().stats_timestamp_with_environment_clock()), signaling_thread_(pc->signaling_thread()), worker_thread_(pc->worker_thread()), network_thread_(pc->network_thread()), @@ -1154,19 +1187,19 @@ RTCStatsCollector::~RTCStatsCollector() { } void RTCStatsCollector::GetStatsReport( - rtc::scoped_refptr callback) { + scoped_refptr callback) { GetStatsReportInternal(RequestInfo(std::move(callback))); } void RTCStatsCollector::GetStatsReport( - rtc::scoped_refptr selector, - rtc::scoped_refptr callback) { + scoped_refptr selector, + scoped_refptr callback) { GetStatsReportInternal(RequestInfo(std::move(selector), std::move(callback))); } void RTCStatsCollector::GetStatsReport( - rtc::scoped_refptr selector, - rtc::scoped_refptr callback) { + scoped_refptr selector, + scoped_refptr callback) { GetStatsReportInternal(RequestInfo(std::move(selector), std::move(callback))); } @@ -1176,7 +1209,7 @@ void RTCStatsCollector::GetStatsReportInternal( requests_.push_back(std::move(request)); // "Now" using a monotonically increasing timer. - int64_t cache_now_us = rtc::TimeMicros(); + int64_t cache_now_us = TimeMicros(); if (cached_report_ && cache_now_us - cache_timestamp_us_ <= cache_lifetime_us_) { // We have a fresh cached report to deliver. Deliver asynchronously, since @@ -1184,17 +1217,23 @@ void RTCStatsCollector::GetStatsReportInternal( // reentrancy problems. signaling_thread_->PostTask( absl::bind_front(&RTCStatsCollector::DeliverCachedReport, - rtc::scoped_refptr(this), - cached_report_, std::move(requests_))); + scoped_refptr(this), cached_report_, + std::move(requests_))); } else if (!num_pending_partial_reports_) { // Only start gathering stats if we're not already gathering stats. In the // case of already gathering stats, `callback_` will be invoked when there // are no more pending partial reports. - // "Now" using a system clock, relative to the UNIX epoch (Jan 1, 1970, - // UTC), in microseconds. The system clock could be modified and is not - // necessarily monotonically increasing. - Timestamp timestamp = Timestamp::Micros(rtc::TimeUTCMicros()); + Timestamp timestamp = + stats_timestamp_with_environment_clock_ + ? + // "Now" using a monotonically increasing timer. + env_.clock().CurrentTime() + : + // "Now" using a system clock, relative to the UNIX epoch (Jan 1, + // 1970, UTC), in microseconds. The system clock could be modified + // and is not necessarily monotonically increasing. + Timestamp::Micros(TimeUTCMicros()); num_pending_partial_reports_ = 2; partial_report_timestamp_us_ = cache_now_us; @@ -1207,7 +1246,7 @@ void RTCStatsCollector::GetStatsReportInternal( // ProducePartialResultsOnNetworkThread() has signaled the // `network_report_event_`. network_report_event_.Reset(); - rtc::scoped_refptr collector(this); + scoped_refptr collector(this); network_thread_->PostTask([collector, sctp_transport_name = pc_->sctp_transport_name(), timestamp]() mutable { @@ -1235,7 +1274,7 @@ void RTCStatsCollector::WaitForPendingRequest() { void RTCStatsCollector::ProducePartialResultsOnSignalingThread( Timestamp timestamp) { RTC_DCHECK_RUN_ON(signaling_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; partial_report_ = RTCStatsReport::Create(timestamp); @@ -1253,7 +1292,7 @@ void RTCStatsCollector::ProducePartialResultsOnSignalingThreadImpl( Timestamp timestamp, RTCStatsReport* partial_report) { RTC_DCHECK_RUN_ON(signaling_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; ProduceMediaSourceStats_s(timestamp, partial_report); ProducePeerConnectionStats_s(timestamp, partial_report); @@ -1262,11 +1301,11 @@ void RTCStatsCollector::ProducePartialResultsOnSignalingThreadImpl( void RTCStatsCollector::ProducePartialResultsOnNetworkThread( Timestamp timestamp, - absl::optional sctp_transport_name) { + std::optional sctp_transport_name) { TRACE_EVENT0("webrtc", "RTCStatsCollector::ProducePartialResultsOnNetworkThread"); RTC_DCHECK_RUN_ON(network_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; // Touching `network_report_` on this thread is safe by this method because // `network_report_event_` is reset before this method is invoked. @@ -1284,7 +1323,7 @@ void RTCStatsCollector::ProducePartialResultsOnNetworkThread( transport_names.insert(*info.transport_name); } - std::map transport_stats_by_name = + std::map transport_stats_by_name = pc_->GetTransportStatsByNames(transport_names); std::map transport_cert_stats = PrepareTransportCertificateStats_n(transport_stats_by_name); @@ -1296,19 +1335,18 @@ void RTCStatsCollector::ProducePartialResultsOnNetworkThread( // Signal that it is now safe to touch `network_report_` on the signaling // thread, and post a task to merge it into the final results. network_report_event_.Set(); - rtc::scoped_refptr collector(this); + scoped_refptr collector(this); signaling_thread_->PostTask( [collector] { collector->MergeNetworkReport_s(); }); } void RTCStatsCollector::ProducePartialResultsOnNetworkThreadImpl( Timestamp timestamp, - const std::map& - transport_stats_by_name, + const std::map& transport_stats_by_name, const std::map& transport_cert_stats, RTCStatsReport* partial_report) { RTC_DCHECK_RUN_ON(network_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; ProduceCertificateStats_n(timestamp, transport_cert_stats, partial_report); ProduceIceCandidateAndPairStats_n(timestamp, transport_stats_by_name, @@ -1324,7 +1362,7 @@ void RTCStatsCollector::MergeNetworkReport_s() { // `network_report_`. This is normally not blocking, but if // WaitForPendingRequest() is called while a request is pending, we might have // to wait until the network thread is done touching `network_report_`. - network_report_event_.Wait(rtc::Event::kForever); + network_report_event_.Wait(Event::kForever); if (!network_report_) { // Normally, MergeNetworkReport_s() is executed because it is posted from // the network thread. But if WaitForPendingRequest() is called while a @@ -1350,8 +1388,8 @@ void RTCStatsCollector::MergeNetworkReport_s() { // Trace WebRTC Stats when getStats is called on Javascript. // This allows access to WebRTC stats from trace logs. To enable them, // select the "webrtc_stats" category when recording traces. - TRACE_EVENT_INSTANT1("webrtc_stats", "webrtc_stats", "report", - cached_report_->ToJson()); + TRACE_EVENT_INSTANT1("webrtc_stats", "webrtc_stats", TRACE_EVENT_SCOPE_GLOBAL, + "report", cached_report_->ToJson()); // Deliver report and clear `requests_`. std::vector requests; @@ -1360,7 +1398,7 @@ void RTCStatsCollector::MergeNetworkReport_s() { } void RTCStatsCollector::DeliverCachedReport( - rtc::scoped_refptr cached_report, + scoped_refptr cached_report, std::vector requests) { RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(!requests.empty()); @@ -1371,8 +1409,8 @@ void RTCStatsCollector::DeliverCachedReport( request.callback()->OnStatsDelivered(cached_report); } else { bool filter_by_sender_selector; - rtc::scoped_refptr sender_selector; - rtc::scoped_refptr receiver_selector; + scoped_refptr sender_selector; + scoped_refptr receiver_selector; if (request.filter_mode() == RequestInfo::FilterMode::kSenderSelector) { filter_by_sender_selector = true; sender_selector = request.sender_selector(); @@ -1394,7 +1432,7 @@ void RTCStatsCollector::ProduceCertificateStats_n( const std::map& transport_cert_stats, RTCStatsReport* report) const { RTC_DCHECK_RUN_ON(network_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const auto& transport_cert_stats_pair : transport_cert_stats) { if (transport_cert_stats_pair.second.local) { @@ -1412,11 +1450,11 @@ void RTCStatsCollector::ProduceDataChannelStats_n( Timestamp timestamp, RTCStatsReport* report) const { RTC_DCHECK_RUN_ON(network_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; std::vector data_stats = pc_->GetDataChannelStats(); for (const auto& stats : data_stats) { auto data_channel_stats = std::make_unique( - "D" + rtc::ToString(stats.internal_id), timestamp); + "D" + absl::StrCat(stats.internal_id), timestamp); data_channel_stats->label = std::move(stats.label); data_channel_stats->protocol = std::move(stats.protocol); if (stats.id >= 0) { @@ -1435,16 +1473,15 @@ void RTCStatsCollector::ProduceDataChannelStats_n( void RTCStatsCollector::ProduceIceCandidateAndPairStats_n( Timestamp timestamp, - const std::map& - transport_stats_by_name, + const std::map& transport_stats_by_name, const Call::Stats& call_stats, RTCStatsReport* report) const { RTC_DCHECK_RUN_ON(network_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const auto& entry : transport_stats_by_name) { const std::string& transport_name = entry.first; - const cricket::TransportStats& transport_stats = entry.second; + const TransportStats& transport_stats = entry.second; for (const auto& channel_stats : transport_stats.channel_stats) { std::string transport_id = RTCTransportStatsIDFromTransportChannel( transport_name, channel_stats.component); @@ -1482,11 +1519,11 @@ void RTCStatsCollector::ProduceIceCandidateAndPairStats_n( static_cast(info.recv_total_bytes); candidate_pair_stats->total_round_trip_time = static_cast(info.total_round_trip_time_ms) / - rtc::kNumMillisecsPerSec; + kNumMillisecsPerSec; if (info.current_round_trip_time_ms.has_value()) { candidate_pair_stats->current_round_trip_time = static_cast(*info.current_round_trip_time_ms) / - rtc::kNumMillisecsPerSec; + kNumMillisecsPerSec; } if (info.best_connection) { // The bandwidth estimations we have are for the selected candidate @@ -1544,7 +1581,7 @@ void RTCStatsCollector::ProduceMediaSourceStats_s( Timestamp timestamp, RTCStatsReport* report) const { RTC_DCHECK_RUN_ON(signaling_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const RtpTransceiverStatsInfo& transceiver_stats_info : transceiver_stats_infos_) { @@ -1559,7 +1596,7 @@ void RTCStatsCollector::ProduceMediaSourceStats_s( // to multiple senders which should result in multiple senders referencing // the same media-source stats. When all media source related metrics are // moved to the track's source (e.g. input frame rate is moved from - // cricket::VideoSenderInfo to VideoTrackSourceInterface::Stats and audio + // webrtc::VideoSenderInfo to VideoTrackSourceInterface::Stats and audio // levels are moved to the corresponding audio track/source object), don't // create separate media source stats objects on a per-attachment basis. std::unique_ptr media_source_stats; @@ -1568,7 +1605,7 @@ void RTCStatsCollector::ProduceMediaSourceStats_s( static_cast(track.get()); auto audio_source_stats = std::make_unique( RTCMediaSourceStatsIDFromKindAndAttachment( - cricket::MEDIA_TYPE_AUDIO, sender_internal->AttachmentId()), + webrtc::MediaType::AUDIO, sender_internal->AttachmentId()), timestamp); // TODO(https://crbug.com/webrtc/10771): We shouldn't need to have an // SSRC assigned (there shouldn't need to exist a send-stream, created @@ -1606,7 +1643,7 @@ void RTCStatsCollector::ProduceMediaSourceStats_s( RTC_DCHECK_EQ(MediaStreamTrackInterface::kVideoKind, track->kind()); auto video_source_stats = std::make_unique( RTCMediaSourceStatsIDFromKindAndAttachment( - cricket::MEDIA_TYPE_VIDEO, sender_internal->AttachmentId()), + webrtc::MediaType::VIDEO, sender_internal->AttachmentId()), timestamp); auto* video_track = static_cast(track.get()); auto* video_source = video_track->GetSource(); @@ -1643,7 +1680,7 @@ void RTCStatsCollector::ProducePeerConnectionStats_s( Timestamp timestamp, RTCStatsReport* report) const { RTC_DCHECK_RUN_ON(signaling_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; auto stats(std::make_unique("P", timestamp)); stats->data_channels_opened = internal_record_.data_channels_opened; @@ -1655,7 +1692,7 @@ void RTCStatsCollector::ProduceAudioPlayoutStats_s( Timestamp timestamp, RTCStatsReport* report) const { RTC_DCHECK_RUN_ON(signaling_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; if (audio_device_stats_) { report->AddStats(CreateAudioPlayoutStats(*audio_device_stats_, timestamp)); @@ -1667,12 +1704,12 @@ void RTCStatsCollector::ProduceRTPStreamStats_n( const std::vector& transceiver_stats_infos, RTCStatsReport* report) const { RTC_DCHECK_RUN_ON(network_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const RtpTransceiverStatsInfo& stats : transceiver_stats_infos) { - if (stats.media_type == cricket::MEDIA_TYPE_AUDIO) { + if (stats.media_type == webrtc::MediaType::AUDIO) { ProduceAudioRTPStreamStats_n(timestamp, stats, report); - } else if (stats.media_type == cricket::MEDIA_TYPE_VIDEO) { + } else if (stats.media_type == webrtc::MediaType::VIDEO) { ProduceVideoRTPStreamStats_n(timestamp, stats, report); } else { RTC_DCHECK_NOTREACHED(); @@ -1685,7 +1722,7 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( const RtpTransceiverStatsInfo& stats, RTCStatsReport* report) const { RTC_DCHECK_RUN_ON(network_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; if (!stats.mid || !stats.transport_name) { return; @@ -1693,11 +1730,11 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( RTC_DCHECK(stats.track_media_info_map.voice_media_info().has_value()); std::string mid = *stats.mid; std::string transport_id = RTCTransportStatsIDFromTransportChannel( - *stats.transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP); + *stats.transport_name, ICE_CANDIDATE_COMPONENT_RTP); // Inbound and remote-outbound. // The remote-outbound stats are based on RTCP sender reports sent from the // remote endpoint providing metrics about the remote outbound streams. - for (const cricket::VoiceReceiverInfo& voice_receiver_info : + for (const VoiceReceiverInfo& voice_receiver_info : stats.track_media_info_map.voice_media_info()->receivers) { if (!voice_receiver_info.connected()) continue; @@ -1706,12 +1743,12 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( *stats.track_media_info_map.voice_media_info(), voice_receiver_info, transport_id, mid, timestamp, report); // TODO(hta): This lookup should look for the sender, not the track. - rtc::scoped_refptr audio_track = + scoped_refptr audio_track = stats.track_media_info_map.GetAudioTrack(voice_receiver_info); if (audio_track) { inbound_audio->track_identifier = audio_track->id(); } - if (audio_device_stats_ && stats.media_type == cricket::MEDIA_TYPE_AUDIO && + if (audio_device_stats_ && stats.media_type == webrtc::MediaType::AUDIO && stats.current_direction && (*stats.current_direction == RtpTransceiverDirection::kSendRecv || *stats.current_direction == RtpTransceiverDirection::kRecvOnly)) { @@ -1724,8 +1761,9 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( continue; } // Remote-outbound. - auto remote_outbound_audio = CreateRemoteOutboundAudioStreamStats( - voice_receiver_info, mid, *inbound_audio_ptr, transport_id); + auto remote_outbound_audio = CreateRemoteOutboundMediaStreamStats( + voice_receiver_info, mid, webrtc::MediaType::AUDIO, *inbound_audio_ptr, + transport_id, stats_timestamp_with_environment_clock_); // Add stats. if (remote_outbound_audio) { // When the remote outbound stats are available, the remote ID for the @@ -1742,21 +1780,21 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( } // Outbound. std::map audio_outbound_rtps; - for (const cricket::VoiceSenderInfo& voice_sender_info : + for (const VoiceSenderInfo& voice_sender_info : stats.track_media_info_map.voice_media_info()->senders) { if (!voice_sender_info.connected()) continue; auto outbound_audio = CreateOutboundRTPStreamStatsFromVoiceSenderInfo( transport_id, mid, *stats.track_media_info_map.voice_media_info(), voice_sender_info, timestamp, report); - rtc::scoped_refptr audio_track = + scoped_refptr audio_track = stats.track_media_info_map.GetAudioTrack(voice_sender_info); if (audio_track) { int attachment_id = stats.track_media_info_map.GetAttachmentIdByTrack(audio_track.get()) .value(); outbound_audio->media_source_id = - RTCMediaSourceStatsIDFromKindAndAttachment(cricket::MEDIA_TYPE_AUDIO, + RTCMediaSourceStatsIDFromKindAndAttachment(webrtc::MediaType::AUDIO, attachment_id); } auto audio_outbound_pair = @@ -1773,12 +1811,13 @@ void RTCStatsCollector::ProduceAudioRTPStreamStats_n( // providing metrics about our Outbound streams. We take advantage of the fact // that RTCOutboundRtpStreamStats, RTCCodecStats and RTCTransport have already // been added to the report. - for (const cricket::VoiceSenderInfo& voice_sender_info : + for (const VoiceSenderInfo& voice_sender_info : stats.track_media_info_map.voice_media_info()->senders) { for (const auto& report_block_data : voice_sender_info.report_block_datas) { report->AddStats(ProduceRemoteInboundRtpStreamStatsFromReportBlockData( - transport_id, report_block_data, cricket::MEDIA_TYPE_AUDIO, - audio_outbound_rtps, *report)); + transport_id, report_block_data, webrtc::MediaType::AUDIO, + audio_outbound_rtps, *report, + stats_timestamp_with_environment_clock_)); } } } @@ -1788,7 +1827,7 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n( const RtpTransceiverStatsInfo& stats, RTCStatsReport* report) const { RTC_DCHECK_RUN_ON(network_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; if (!stats.mid || !stats.transport_name) { return; @@ -1796,42 +1835,61 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n( RTC_DCHECK(stats.track_media_info_map.video_media_info().has_value()); std::string mid = *stats.mid; std::string transport_id = RTCTransportStatsIDFromTransportChannel( - *stats.transport_name, cricket::ICE_CANDIDATE_COMPONENT_RTP); - // Inbound - for (const cricket::VideoReceiverInfo& video_receiver_info : + *stats.transport_name, ICE_CANDIDATE_COMPONENT_RTP); + // Inbound and remote-outbound. + for (const VideoReceiverInfo& video_receiver_info : stats.track_media_info_map.video_media_info()->receivers) { if (!video_receiver_info.connected()) continue; auto inbound_video = CreateInboundRTPStreamStatsFromVideoReceiverInfo( transport_id, mid, *stats.track_media_info_map.video_media_info(), video_receiver_info, timestamp, report); - rtc::scoped_refptr video_track = + scoped_refptr video_track = stats.track_media_info_map.GetVideoTrack(video_receiver_info); if (video_track) { inbound_video->track_identifier = video_track->id(); } - if (!report->TryAddStats(std::move(inbound_video))) { + auto* inbound_video_ptr = report->TryAddStats(std::move(inbound_video)); + if (!inbound_video_ptr) { RTC_LOG(LS_ERROR) << "Unable to add video 'inbound-rtp' to report, ID is not unique."; + continue; + } + // Remote-outbound. + auto remote_outbound_video = CreateRemoteOutboundMediaStreamStats( + video_receiver_info, mid, webrtc::MediaType::VIDEO, *inbound_video_ptr, + transport_id, stats_timestamp_with_environment_clock_); + // Add stats. + if (remote_outbound_video) { + // When the remote outbound stats are available, the remote ID for the + // local inbound stats is set. + auto* remote_outbound_video_ptr = + report->TryAddStats(std::move(remote_outbound_video)); + if (remote_outbound_video_ptr) { + inbound_video_ptr->remote_id = remote_outbound_video_ptr->id(); + } else { + RTC_LOG(LS_ERROR) << "Unable to add video 'remote-outbound-rtp' to " + << "report, ID is not unique."; + } } } // Outbound std::map video_outbound_rtps; - for (const cricket::VideoSenderInfo& video_sender_info : + for (const VideoSenderInfo& video_sender_info : stats.track_media_info_map.video_media_info()->senders) { if (!video_sender_info.connected()) continue; auto outbound_video = CreateOutboundRTPStreamStatsFromVideoSenderInfo( transport_id, mid, *stats.track_media_info_map.video_media_info(), video_sender_info, timestamp, report); - rtc::scoped_refptr video_track = + scoped_refptr video_track = stats.track_media_info_map.GetVideoTrack(video_sender_info); if (video_track) { int attachment_id = stats.track_media_info_map.GetAttachmentIdByTrack(video_track.get()) .value(); outbound_video->media_source_id = - RTCMediaSourceStatsIDFromKindAndAttachment(cricket::MEDIA_TYPE_VIDEO, + RTCMediaSourceStatsIDFromKindAndAttachment(webrtc::MediaType::VIDEO, attachment_id); } auto video_outbound_pair = @@ -1848,34 +1906,34 @@ void RTCStatsCollector::ProduceVideoRTPStreamStats_n( // providing metrics about our Outbound streams. We take advantage of the fact // that RTCOutboundRtpStreamStats, RTCCodecStats and RTCTransport have already // been added to the report. - for (const cricket::VideoSenderInfo& video_sender_info : + for (const VideoSenderInfo& video_sender_info : stats.track_media_info_map.video_media_info()->senders) { for (const auto& report_block_data : video_sender_info.report_block_datas) { report->AddStats(ProduceRemoteInboundRtpStreamStatsFromReportBlockData( - transport_id, report_block_data, cricket::MEDIA_TYPE_VIDEO, - video_outbound_rtps, *report)); + transport_id, report_block_data, webrtc::MediaType::VIDEO, + video_outbound_rtps, *report, + stats_timestamp_with_environment_clock_)); } } } void RTCStatsCollector::ProduceTransportStats_n( Timestamp timestamp, - const std::map& - transport_stats_by_name, + const std::map& transport_stats_by_name, const std::map& transport_cert_stats, RTCStatsReport* report) const { RTC_DCHECK_RUN_ON(network_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const auto& entry : transport_stats_by_name) { const std::string& transport_name = entry.first; - const cricket::TransportStats& transport_stats = entry.second; + const TransportStats& transport_stats = entry.second; // Get reference to RTCP channel, if it exists. std::string rtcp_transport_stats_id; - for (const cricket::TransportChannelStats& channel_stats : + for (const TransportChannelStats& channel_stats : transport_stats.channel_stats) { - if (channel_stats.component == cricket::ICE_CANDIDATE_COMPONENT_RTCP) { + if (channel_stats.component == ICE_CANDIDATE_COMPONENT_RTCP) { rtcp_transport_stats_id = RTCTransportStatsIDFromTransportChannel( transport_name, channel_stats.component); break; @@ -1900,90 +1958,84 @@ void RTCStatsCollector::ProduceTransportStats_n( } // There is one transport stats for each channel. - for (const cricket::TransportChannelStats& channel_stats : + for (const TransportChannelStats& channel_stats : transport_stats.channel_stats) { - auto transport_stats = std::make_unique( + auto channel_transport_stats = std::make_unique( RTCTransportStatsIDFromTransportChannel(transport_name, channel_stats.component), timestamp); - transport_stats->packets_sent = + channel_transport_stats->packets_sent = channel_stats.ice_transport_stats.packets_sent; - transport_stats->packets_received = + channel_transport_stats->packets_received = channel_stats.ice_transport_stats.packets_received; - transport_stats->bytes_sent = + channel_transport_stats->bytes_sent = channel_stats.ice_transport_stats.bytes_sent; - transport_stats->bytes_received = + channel_transport_stats->bytes_received = channel_stats.ice_transport_stats.bytes_received; - transport_stats->dtls_state = + channel_transport_stats->dtls_state = DtlsTransportStateToRTCDtlsTransportState(channel_stats.dtls_state); - transport_stats->selected_candidate_pair_changes = + channel_transport_stats->selected_candidate_pair_changes = channel_stats.ice_transport_stats.selected_candidate_pair_changes; - transport_stats->ice_role = + channel_transport_stats->ice_role = IceRoleToRTCIceRole(channel_stats.ice_transport_stats.ice_role); - transport_stats->ice_local_username_fragment = + channel_transport_stats->ice_local_username_fragment = channel_stats.ice_transport_stats.ice_local_username_fragment; - transport_stats->ice_state = IceTransportStateToRTCIceTransportState( - channel_stats.ice_transport_stats.ice_state); - for (const cricket::ConnectionInfo& info : + channel_transport_stats->ice_state = + IceTransportStateToRTCIceTransportState( + channel_stats.ice_transport_stats.ice_state); + for (const ConnectionInfo& info : channel_stats.ice_transport_stats.connection_infos) { if (info.best_connection) { - transport_stats->selected_candidate_pair_id = + channel_transport_stats->selected_candidate_pair_id = RTCIceCandidatePairStatsIDFromConnectionInfo(info); } } - if (channel_stats.component != cricket::ICE_CANDIDATE_COMPONENT_RTCP && + if (channel_stats.component != ICE_CANDIDATE_COMPONENT_RTCP && !rtcp_transport_stats_id.empty()) { - transport_stats->rtcp_transport_stats_id = rtcp_transport_stats_id; + channel_transport_stats->rtcp_transport_stats_id = + rtcp_transport_stats_id; } if (!local_certificate_id.empty()) - transport_stats->local_certificate_id = local_certificate_id; + channel_transport_stats->local_certificate_id = local_certificate_id; if (!remote_certificate_id.empty()) - transport_stats->remote_certificate_id = remote_certificate_id; + channel_transport_stats->remote_certificate_id = remote_certificate_id; // Crypto information if (channel_stats.ssl_version_bytes) { char bytes[5]; snprintf(bytes, sizeof(bytes), "%04X", channel_stats.ssl_version_bytes); - transport_stats->tls_version = bytes; + channel_transport_stats->tls_version = bytes; } if (channel_stats.dtls_role) { - transport_stats->dtls_role = - *channel_stats.dtls_role == rtc::SSL_CLIENT ? "client" : "server"; + channel_transport_stats->dtls_role = + *channel_stats.dtls_role == SSL_CLIENT ? "client" : "server"; } else { - transport_stats->dtls_role = "unknown"; + channel_transport_stats->dtls_role = "unknown"; } - if (channel_stats.ssl_cipher_suite != rtc::kTlsNullWithNullNull && - rtc::SSLStreamAdapter::SslCipherSuiteToName( - channel_stats.ssl_cipher_suite) - .length()) { - transport_stats->dtls_cipher = - rtc::SSLStreamAdapter::SslCipherSuiteToName( - channel_stats.ssl_cipher_suite); - } - if (channel_stats.srtp_crypto_suite != rtc::kSrtpInvalidCryptoSuite && - rtc::SrtpCryptoSuiteToName(channel_stats.srtp_crypto_suite) - .length()) { - transport_stats->srtp_cipher = - rtc::SrtpCryptoSuiteToName(channel_stats.srtp_crypto_suite); + channel_transport_stats->dtls_cipher = + channel_stats.tls_cipher_suite_name; + if (channel_stats.srtp_crypto_suite != kSrtpInvalidCryptoSuite && + SrtpCryptoSuiteToName(channel_stats.srtp_crypto_suite).length()) { + channel_transport_stats->srtp_cipher = + SrtpCryptoSuiteToName(channel_stats.srtp_crypto_suite); } - report->AddStats(std::move(transport_stats)); + report->AddStats(std::move(channel_transport_stats)); } } } std::map RTCStatsCollector::PrepareTransportCertificateStats_n( - const std::map& - transport_stats_by_name) { + const std::map& transport_stats_by_name) { RTC_DCHECK_RUN_ON(network_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; std::map transport_cert_stats; { MutexLock lock(&cached_certificates_mutex_); // Copy the certificate info from the cache, avoiding expensive - // rtc::SSLCertChain::GetStats() calls. + // webrtc::SSLCertChain::GetStats() calls. for (const auto& pair : cached_certificates_by_transport_) { transport_cert_stats.insert( std::make_pair(pair.first, pair.second.Copy())); @@ -1995,7 +2047,7 @@ RTCStatsCollector::PrepareTransportCertificateStats_n( const std::string& transport_name = entry.first; CertificateStatsPair certificate_stats_pair; - rtc::scoped_refptr local_certificate; + scoped_refptr local_certificate; if (pc_->GetLocalCertificate(transport_name, &local_certificate)) { certificate_stats_pair.local = local_certificate->GetSSLCertificateChain().GetStats(); @@ -2025,17 +2077,13 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { transceiver_stats_infos_.clear(); // These are used to invoke GetStats for all the media channels together in // one worker thread hop. - std::map + std::map voice_send_stats; - std::map + std::map video_send_stats; - std::map + std::map voice_receive_stats; - std::map + std::map video_receive_stats; auto transceivers = pc_->GetTransceiversInternal(); @@ -2043,11 +2091,11 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { // TODO(tommi): See if we can avoid synchronously blocking the signaling // thread while we do this (or avoid the BlockingCall at all). network_thread_->BlockingCall([&] { - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (const auto& transceiver_proxy : transceivers) { RtpTransceiver* transceiver = transceiver_proxy->internal(); - cricket::MediaType media_type = transceiver->media_type(); + webrtc::MediaType media_type = transceiver->media_type(); // Prepare stats entry. The TrackMediaInfoMap will be filled in after the // stats have been fetched on the worker thread. @@ -2056,7 +2104,7 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { stats.transceiver = transceiver; stats.media_type = media_type; - cricket::ChannelInterface* channel = transceiver->channel(); + ChannelInterface* channel = transceiver->channel(); if (!channel) { // The remaining fields require a BaseChannel. continue; @@ -2065,29 +2113,29 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { stats.mid = channel->mid(); stats.transport_name = std::string(channel->transport_name()); - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + if (media_type == webrtc::MediaType::AUDIO) { auto voice_send_channel = channel->voice_media_send_channel(); RTC_DCHECK(voice_send_stats.find(voice_send_channel) == voice_send_stats.end()); voice_send_stats.insert( - std::make_pair(voice_send_channel, cricket::VoiceMediaSendInfo())); + std::make_pair(voice_send_channel, VoiceMediaSendInfo())); auto voice_receive_channel = channel->voice_media_receive_channel(); RTC_DCHECK(voice_receive_stats.find(voice_receive_channel) == voice_receive_stats.end()); - voice_receive_stats.insert(std::make_pair( - voice_receive_channel, cricket::VoiceMediaReceiveInfo())); - } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { + voice_receive_stats.insert( + std::make_pair(voice_receive_channel, VoiceMediaReceiveInfo())); + } else if (media_type == webrtc::MediaType::VIDEO) { auto video_send_channel = channel->video_media_send_channel(); RTC_DCHECK(video_send_stats.find(video_send_channel) == video_send_stats.end()); video_send_stats.insert( - std::make_pair(video_send_channel, cricket::VideoMediaSendInfo())); + std::make_pair(video_send_channel, VideoMediaSendInfo())); auto video_receive_channel = channel->video_media_receive_channel(); RTC_DCHECK(video_receive_stats.find(video_receive_channel) == video_receive_stats.end()); - video_receive_stats.insert(std::make_pair( - video_receive_channel, cricket::VideoMediaReceiveInfo())); + video_receive_stats.insert( + std::make_pair(video_receive_channel, VideoMediaReceiveInfo())); } else { RTC_DCHECK_NOTREACHED(); } @@ -2099,7 +2147,7 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { // TrackMediaInfoMaps, which also needs info from the worker thread. This // minimizes the number of thread jumps. worker_thread_->BlockingCall([&] { - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; for (auto& pair : voice_send_stats) { if (!pair.first->GetStats(&pair.second)) { @@ -2123,45 +2171,50 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { } } - // Create the TrackMediaInfoMap for each transceiver stats object. + // Create the TrackMediaInfoMap for each transceiver stats object + // and keep track of whether we have at least one audio receiver. + bool has_audio_receiver = false; for (auto& stats : transceiver_stats_infos_) { auto transceiver = stats.transceiver; - absl::optional voice_media_info; - absl::optional video_media_info; + std::optional voice_media_info; + std::optional video_media_info; auto channel = transceiver->channel(); if (channel) { - cricket::MediaType media_type = transceiver->media_type(); - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + webrtc::MediaType media_type = transceiver->media_type(); + if (media_type == webrtc::MediaType::AUDIO) { auto voice_send_channel = channel->voice_media_send_channel(); auto voice_receive_channel = channel->voice_media_receive_channel(); - voice_media_info = cricket::VoiceMediaInfo( + voice_media_info = VoiceMediaInfo( std::move(voice_send_stats[voice_send_channel]), std::move(voice_receive_stats[voice_receive_channel])); - } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { + } else if (media_type == webrtc::MediaType::VIDEO) { auto video_send_channel = channel->video_media_send_channel(); auto video_receive_channel = channel->video_media_receive_channel(); - video_media_info = cricket::VideoMediaInfo( + video_media_info = VideoMediaInfo( std::move(video_send_stats[video_send_channel]), std::move(video_receive_stats[video_receive_channel])); } } - std::vector> senders; + std::vector> senders; for (const auto& sender : transceiver->senders()) { - senders.push_back( - rtc::scoped_refptr(sender->internal())); + senders.push_back(scoped_refptr(sender->internal())); } - std::vector> receivers; + std::vector> receivers; for (const auto& receiver : transceiver->receivers()) { receivers.push_back( - rtc::scoped_refptr(receiver->internal())); + scoped_refptr(receiver->internal())); } stats.track_media_info_map.Initialize(std::move(voice_media_info), std::move(video_media_info), senders, receivers); + if (transceiver->media_type() == webrtc::MediaType::AUDIO) { + has_audio_receiver |= !receivers.empty(); + } } call_stats_ = pc_->GetCallStats(); - audio_device_stats_ = pc_->GetAudioDeviceStats(); + audio_device_stats_ = + has_audio_receiver ? pc_->GetAudioDeviceStats() : std::nullopt; }); for (auto& stats : transceiver_stats_infos_) { @@ -2187,14 +2240,4 @@ void RTCStatsCollector::OnSctpDataChannelStateChanged( } } -const char* CandidateTypeToRTCIceCandidateTypeForTesting( - const std::string& type) { - return CandidateTypeToRTCIceCandidateType(type); -} - -const char* DataStateToRTCDataChannelStateForTesting( - DataChannelInterface::DataState state) { - return DataStateToRTCDataChannelState(state); -} - } // namespace webrtc diff --git a/pc/rtc_stats_collector.h b/pc/rtc_stats_collector.h index e94d23944c..016afb45e4 100644 --- a/pc/rtc_stats_collector.h +++ b/pc/rtc_stats_collector.h @@ -16,35 +16,34 @@ #include #include #include +#include #include #include -#include "absl/types/optional.h" +#include "api/audio/audio_device.h" #include "api/data_channel_interface.h" +#include "api/environment/environment.h" #include "api/media_types.h" +#include "api/ref_count.h" +#include "api/rtp_transceiver_direction.h" #include "api/scoped_refptr.h" #include "api/stats/rtc_stats_collector_callback.h" #include "api/stats/rtc_stats_report.h" -#include "api/stats/rtcstats_objects.h" +#include "api/units/timestamp.h" #include "call/call.h" -#include "media/base/media_channel.h" -#include "modules/audio_device/include/audio_device.h" -#include "pc/data_channel_utils.h" #include "pc/peer_connection_internal.h" #include "pc/rtp_receiver.h" #include "pc/rtp_sender.h" #include "pc/rtp_transceiver.h" -#include "pc/sctp_data_channel.h" #include "pc/track_media_info_map.h" #include "pc/transport_stats.h" #include "rtc_base/checks.h" #include "rtc_base/containers/flat_set.h" #include "rtc_base/event.h" -#include "rtc_base/ref_count.h" #include "rtc_base/ssl_certificate.h" -#include "rtc_base/ssl_identity.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" namespace webrtc { @@ -56,11 +55,12 @@ class RtpReceiverInternal; // Stats are gathered on the signaling, worker and network threads // asynchronously. The callback is invoked on the signaling thread. Resulting // reports are cached for `cache_lifetime_` ms. -class RTCStatsCollector : public rtc::RefCountInterface { +class RTCStatsCollector : public RefCountInterface { public: - static rtc::scoped_refptr Create( + static scoped_refptr Create( PeerConnectionInternal* pc, - int64_t cache_lifetime_us = 50 * rtc::kNumMicrosecsPerMillisec); + const Environment& env, + int64_t cache_lifetime_us = 50 * kNumMicrosecsPerMillisec); // Gets a recent stats report. If there is a report cached that is still fresh // it is returned, otherwise new stats are gathered and returned. A report is @@ -69,15 +69,15 @@ class RTCStatsCollector : public rtc::RefCountInterface { // If the optional selector argument is used, stats are filtered according to // stats selection algorithm before delivery. // https://w3c.github.io/webrtc-pc/#dfn-stats-selection-algorithm - void GetStatsReport(rtc::scoped_refptr callback); + void GetStatsReport(scoped_refptr callback); // If `selector` is null the selection algorithm is still applied (interpreted // as: no RTP streams are sent by selector). The result is empty. - void GetStatsReport(rtc::scoped_refptr selector, - rtc::scoped_refptr callback); + void GetStatsReport(scoped_refptr selector, + scoped_refptr callback); // If `selector` is null the selection algorithm is still applied (interpreted // as: no RTP streams are received by selector). The result is empty. - void GetStatsReport(rtc::scoped_refptr selector, - rtc::scoped_refptr callback); + void GetStatsReport(scoped_refptr selector, + scoped_refptr callback); // Clears the cache's reference to the most recent stats report. Subsequently // calling `GetStatsReport` guarantees fresh stats. This method must be called // any time the PeerConnection visibly changes as a result of an API call as @@ -95,12 +95,14 @@ class RTCStatsCollector : public rtc::RefCountInterface { DataChannelInterface::DataState state); protected: - RTCStatsCollector(PeerConnectionInternal* pc, int64_t cache_lifetime_us); + RTCStatsCollector(PeerConnectionInternal* pc, + const Environment& env, + int64_t cache_lifetime_us); ~RTCStatsCollector(); struct CertificateStatsPair { - std::unique_ptr local; - std::unique_ptr remote; + std::unique_ptr local; + std::unique_ptr remote; CertificateStatsPair Copy() const; }; @@ -111,8 +113,7 @@ class RTCStatsCollector : public rtc::RefCountInterface { RTCStatsReport* partial_report); virtual void ProducePartialResultsOnNetworkThreadImpl( Timestamp timestamp, - const std::map& - transport_stats_by_name, + const std::map& transport_stats_by_name, const std::map& transport_cert_stats, RTCStatsReport* partial_report); @@ -122,40 +123,39 @@ class RTCStatsCollector : public rtc::RefCountInterface { enum class FilterMode { kAll, kSenderSelector, kReceiverSelector }; // Constructs with FilterMode::kAll. - explicit RequestInfo( - rtc::scoped_refptr callback); + explicit RequestInfo(scoped_refptr callback); // Constructs with FilterMode::kSenderSelector. The selection algorithm is // applied even if `selector` is null, resulting in an empty report. - RequestInfo(rtc::scoped_refptr selector, - rtc::scoped_refptr callback); + RequestInfo(scoped_refptr selector, + scoped_refptr callback); // Constructs with FilterMode::kReceiverSelector. The selection algorithm is // applied even if `selector` is null, resulting in an empty report. - RequestInfo(rtc::scoped_refptr selector, - rtc::scoped_refptr callback); + RequestInfo(scoped_refptr selector, + scoped_refptr callback); FilterMode filter_mode() const { return filter_mode_; } - rtc::scoped_refptr callback() const { + scoped_refptr callback() const { return callback_; } - rtc::scoped_refptr sender_selector() const { + scoped_refptr sender_selector() const { RTC_DCHECK(filter_mode_ == FilterMode::kSenderSelector); return sender_selector_; } - rtc::scoped_refptr receiver_selector() const { + scoped_refptr receiver_selector() const { RTC_DCHECK(filter_mode_ == FilterMode::kReceiverSelector); return receiver_selector_; } private: RequestInfo(FilterMode filter_mode, - rtc::scoped_refptr callback, - rtc::scoped_refptr sender_selector, - rtc::scoped_refptr receiver_selector); + scoped_refptr callback, + scoped_refptr sender_selector, + scoped_refptr receiver_selector); FilterMode filter_mode_; - rtc::scoped_refptr callback_; - rtc::scoped_refptr sender_selector_; - rtc::scoped_refptr receiver_selector_; + scoped_refptr callback_; + scoped_refptr sender_selector_; + scoped_refptr receiver_selector_; }; void GetStatsReportInternal(RequestInfo request); @@ -168,17 +168,16 @@ class RTCStatsCollector : public rtc::RefCountInterface { // If a BaseChannel is not available (e.g., if signaling has not started), // then `mid` and `transport_name` will be null. struct RtpTransceiverStatsInfo { - rtc::scoped_refptr transceiver; - cricket::MediaType media_type; - absl::optional mid; - absl::optional transport_name; + scoped_refptr transceiver; + webrtc::MediaType media_type; + std::optional mid; + std::optional transport_name; TrackMediaInfoMap track_media_info_map; - absl::optional current_direction; + std::optional current_direction; }; - void DeliverCachedReport( - rtc::scoped_refptr cached_report, - std::vector requests); + void DeliverCachedReport(scoped_refptr cached_report, + std::vector requests); // Produces `RTCCertificateStats`. void ProduceCertificateStats_n( @@ -191,8 +190,7 @@ class RTCStatsCollector : public rtc::RefCountInterface { // Produces `RTCIceCandidatePairStats` and `RTCIceCandidateStats`. void ProduceIceCandidateAndPairStats_n( Timestamp timestamp, - const std::map& - transport_stats_by_name, + const std::map& transport_stats_by_name, const Call::Stats& call_stats, RTCStatsReport* report) const; // Produces RTCMediaSourceStats, including RTCAudioSourceStats and @@ -223,16 +221,14 @@ class RTCStatsCollector : public rtc::RefCountInterface { // Produces `RTCTransportStats`. void ProduceTransportStats_n( Timestamp timestamp, - const std::map& - transport_stats_by_name, + const std::map& transport_stats_by_name, const std::map& transport_cert_stats, RTCStatsReport* report) const; // Helper function to stats-producing functions. std::map PrepareTransportCertificateStats_n( - const std::map& - transport_stats_by_name); + const std::map& transport_stats_by_name); // The results are stored in `transceiver_stats_infos_` and `call_stats_`. void PrepareTransceiverStatsInfosAndCallStats_s_w_n(); @@ -240,39 +236,41 @@ class RTCStatsCollector : public rtc::RefCountInterface { void ProducePartialResultsOnSignalingThread(Timestamp timestamp); void ProducePartialResultsOnNetworkThread( Timestamp timestamp, - absl::optional sctp_transport_name); + std::optional sctp_transport_name); // Merges `network_report_` into `partial_report_` and completes the request. // This is a NO-OP if `network_report_` is null. void MergeNetworkReport_s(); - rtc::scoped_refptr CreateReportFilteredBySelector( + scoped_refptr CreateReportFilteredBySelector( bool filter_by_sender_selector, - rtc::scoped_refptr report, - rtc::scoped_refptr sender_selector, - rtc::scoped_refptr receiver_selector); + scoped_refptr report, + scoped_refptr sender_selector, + scoped_refptr receiver_selector); PeerConnectionInternal* const pc_; - rtc::Thread* const signaling_thread_; - rtc::Thread* const worker_thread_; - rtc::Thread* const network_thread_; + const Environment env_; + const bool stats_timestamp_with_environment_clock_; + Thread* const signaling_thread_; + Thread* const worker_thread_; + Thread* const network_thread_; int num_pending_partial_reports_; int64_t partial_report_timestamp_us_; // Reports that are produced on the signaling thread or the network thread are // merged into this report. It is only touched on the signaling thread. Once // all partial reports are merged this is the result of a request. - rtc::scoped_refptr partial_report_; + scoped_refptr partial_report_; std::vector requests_; // Holds the result of ProducePartialResultsOnNetworkThread(). It is merged // into `partial_report_` on the signaling thread and then nulled by // MergeNetworkReport_s(). Thread-safety is ensured by using // `network_report_event_`. - rtc::scoped_refptr network_report_; + scoped_refptr network_report_; // If set, it is safe to touch the `network_report_` on the signaling thread. // This is reset before async-invoking ProducePartialResultsOnNetworkThread() // and set when ProducePartialResultsOnNetworkThread() is complete, after it // has updated the value of `network_report_`. - rtc::Event network_report_event_; + Event network_report_event_; // Cleared and set in `PrepareTransceiverStatsInfosAndCallStats_s_w_n`, // starting out on the signaling thread, then network. Later read on the @@ -284,8 +282,8 @@ class RTCStatsCollector : public rtc::RefCountInterface { // now get rid of the variable and keep the data scoped within a stats // collection sequence. std::vector transceiver_stats_infos_; - // This cache avoids having to call rtc::SSLCertChain::GetStats(), which can - // relatively expensive. ClearCachedStatsReport() needs to be called on + // This cache avoids having to call webrtc::SSLCertChain::GetStats(), which + // can relatively expensive. ClearCachedStatsReport() needs to be called on // negotiation to ensure the cache is not obsolete. Mutex cached_certificates_mutex_; std::map cached_certificates_by_transport_ @@ -293,7 +291,7 @@ class RTCStatsCollector : public rtc::RefCountInterface { Call::Stats call_stats_; - absl::optional audio_device_stats_; + std::optional audio_device_stats_; // A timestamp, in microseconds, that is based on a timer that is // monotonically increasing. That is, even if the system clock is modified the @@ -301,7 +299,7 @@ class RTCStatsCollector : public rtc::RefCountInterface { // report is. int64_t cache_timestamp_us_; int64_t cache_lifetime_us_; - rtc::scoped_refptr cached_report_; + scoped_refptr cached_report_; // Data recorded and maintained by the stats collector during its lifetime. // Some stats are produced from this record instead of other components. @@ -317,16 +315,11 @@ class RTCStatsCollector : public rtc::RefCountInterface { uint32_t data_channels_closed; // Identifies channels that have been opened, whose internal id is stored in // the set until they have been fully closed. - webrtc::flat_set opened_data_channels; + flat_set opened_data_channels; }; InternalRecord internal_record_; }; -const char* CandidateTypeToRTCIceCandidateTypeForTesting( - const std::string& type); -const char* DataStateToRTCDataChannelStateForTesting( - DataChannelInterface::DataState state); - } // namespace webrtc #endif // PC_RTC_STATS_COLLECTOR_H_ diff --git a/pc/rtc_stats_collector_unittest.cc b/pc/rtc_stats_collector_unittest.cc index 37821ac829..7ec09911ef 100644 --- a/pc/rtc_stats_collector_unittest.cc +++ b/pc/rtc_stats_collector_unittest.cc @@ -13,25 +13,39 @@ #include #include -#include #include +#include #include -#include +#include #include -#include #include #include +#include "absl/strings/str_cat.h" #include "absl/strings/str_replace.h" +#include "api/audio/audio_device.h" +#include "api/audio/audio_processing_statistics.h" #include "api/candidate.h" +#include "api/data_channel_interface.h" #include "api/dtls_transport_interface.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/make_ref_counted.h" #include "api/media_stream_interface.h" #include "api/media_stream_track.h" +#include "api/media_types.h" +#include "api/ref_count.h" #include "api/rtp_parameters.h" #include "api/rtp_transceiver_direction.h" +#include "api/scoped_refptr.h" +#include "api/stats/attribute.h" #include "api/stats/rtc_stats.h" +#include "api/stats/rtc_stats_collector_callback.h" #include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" +#include "api/test/rtc_error_matchers.h" +#include "api/transport/enums.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "api/video/recordable_encoded_frame.h" @@ -41,17 +55,22 @@ #include "api/video/video_source_interface.h" #include "api/video/video_timing.h" #include "api/video_codecs/scalability_mode.h" +#include "call/call.h" #include "common_video/include/quality_limitation_reason.h" +#include "json/reader.h" +#include "json/value.h" #include "media/base/media_channel.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing_statistics.h" +#include "media/base/stream_params.h" #include "modules/rtp_rtcp/include/report_block_data.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "p2p/base/connection_info.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" +#include "p2p/base/transport_description.h" #include "pc/media_stream.h" +#include "pc/peer_connection_internal.h" +#include "pc/sctp_data_channel.h" #include "pc/stream_collection.h" #include "pc/test/fake_data_channel_controller.h" #include "pc/test/fake_peer_connection_for_stats.h" @@ -59,98 +78,46 @@ #include "pc/test/mock_rtp_receiver_internal.h" #include "pc/test/mock_rtp_sender_internal.h" #include "pc/test/rtc_stats_obtainer.h" +#include "pc/transport_stats.h" #include "rtc_base/checks.h" #include "rtc_base/fake_clock.h" #include "rtc_base/fake_ssl_identity.h" -#include "rtc_base/gunit.h" #include "rtc_base/network_constants.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/socket_address.h" +#include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_fingerprint.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/string_encode.h" -#include "rtc_base/strings/json.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" using ::testing::_; -using ::testing::AtLeast; using ::testing::Invoke; using ::testing::Return; namespace webrtc { -// These are used by gtest code, such as if `EXPECT_EQ` fails. -void PrintTo(const RTCCertificateStats& stats, ::std::ostream* os) { - *os << stats.ToJson(); -} - -void PrintTo(const RTCCodecStats& stats, ::std::ostream* os) { - *os << stats.ToJson(); -} - -void PrintTo(const RTCDataChannelStats& stats, ::std::ostream* os) { - *os << stats.ToJson(); -} - -void PrintTo(const RTCIceCandidatePairStats& stats, ::std::ostream* os) { - *os << stats.ToJson(); -} - -void PrintTo(const RTCLocalIceCandidateStats& stats, ::std::ostream* os) { - *os << stats.ToJson(); -} - -void PrintTo(const RTCRemoteIceCandidateStats& stats, ::std::ostream* os) { - *os << stats.ToJson(); -} - -void PrintTo(const RTCPeerConnectionStats& stats, ::std::ostream* os) { - *os << stats.ToJson(); -} - -void PrintTo(const RTCInboundRtpStreamStats& stats, ::std::ostream* os) { - *os << stats.ToJson(); -} - -void PrintTo(const RTCOutboundRtpStreamStats& stats, ::std::ostream* os) { - *os << stats.ToJson(); -} - -void PrintTo(const RTCRemoteInboundRtpStreamStats& stats, ::std::ostream* os) { - *os << stats.ToJson(); -} - -void PrintTo(const RTCAudioSourceStats& stats, ::std::ostream* os) { - *os << stats.ToJson(); -} - -void PrintTo(const RTCVideoSourceStats& stats, ::std::ostream* os) { - *os << stats.ToJson(); -} - -void PrintTo(const RTCTransportStats& stats, ::std::ostream* os) { - *os << stats.ToJson(); -} - namespace { const int64_t kGetStatsReportTimeoutMs = 1000; // Fake data used by `SetupExampleStatsVoiceGraph()` to fill in remote outbound // stats. -constexpr int64_t kRemoteOutboundStatsTimestampMs = 123; -constexpr int64_t kRemoteOutboundStatsRemoteTimestampMs = 456; +constexpr Timestamp kRemoteOutboundStatsTimestamp = Timestamp::Millis(123); +constexpr Timestamp kRemoteOutboundStatsRemoteTimestamp = + Timestamp::Millis(456); constexpr uint32_t kRemoteOutboundStatsPacketsSent = 7u; constexpr uint64_t kRemoteOutboundStatsBytesSent = 8u; constexpr uint64_t kRemoteOutboundStatsReportsCount = 9u; struct CertificateInfo { - rtc::scoped_refptr certificate; + scoped_refptr certificate; std::vector ders; std::vector pems; std::vector fingerprints; @@ -178,13 +145,12 @@ std::unique_ptr CreateFakeCertificateAndInfoFromDers( std::unique_ptr info(new CertificateInfo()); info->ders = ders; for (const std::string& der : ders) { - info->pems.push_back(rtc::SSLIdentity::DerToPem( + info->pems.push_back(SSLIdentity::DerToPem( "CERTIFICATE", reinterpret_cast(der.c_str()), der.length())); } - info->certificate = - rtc::RTCCertificate::Create(std::unique_ptr( - new rtc::FakeSSLIdentity(info->pems))); + info->certificate = RTCCertificate::Create( + std::unique_ptr(new FakeSSLIdentity(info->pems))); // Strip header/footer and newline characters of PEM strings. for (size_t i = 0; i < info->pems.size(); ++i) { absl::StrReplaceAll({{"-----BEGIN CERTIFICATE-----", ""}, @@ -194,10 +160,10 @@ std::unique_ptr CreateFakeCertificateAndInfoFromDers( } // Fingerprints for the whole certificate chain, starting with leaf // certificate. - const rtc::SSLCertChain& chain = info->certificate->GetSSLCertificateChain(); - std::unique_ptr fp; + const SSLCertChain& chain = info->certificate->GetSSLCertificateChain(); + std::unique_ptr fp; for (size_t i = 0; i < chain.GetSize(); i++) { - fp = rtc::SSLFingerprint::Create("sha-1", chain.Get(i)); + fp = SSLFingerprint::Create("sha-1", chain.Get(i)); EXPECT_TRUE(fp); info->fingerprints.push_back(fp->GetRfc4572Fingerprint()); } @@ -205,25 +171,21 @@ std::unique_ptr CreateFakeCertificateAndInfoFromDers( return info; } -std::unique_ptr CreateFakeCandidate( +std::unique_ptr CreateFakeCandidate( const std::string& hostname, int port, const std::string& protocol, - const rtc::AdapterType adapter_type, - const std::string& candidate_type, + const AdapterType adapter_type, + IceCandidateType candidate_type, uint32_t priority, - const rtc::AdapterType underlying_type_for_vpn = - rtc::ADAPTER_TYPE_UNKNOWN) { - std::unique_ptr candidate(new cricket::Candidate()); - candidate->set_address(rtc::SocketAddress(hostname, port)); - candidate->set_protocol(protocol); + const AdapterType underlying_type_for_vpn = ADAPTER_TYPE_UNKNOWN) { + std::unique_ptr candidate(new Candidate( + ICE_CANDIDATE_COMPONENT_RTP, protocol, SocketAddress(hostname, port), + priority, "iceusernamefragment", "" /* pwd */, candidate_type, + 0 /* generation */, "foundationIsAString")); + candidate->set_network_type(adapter_type); candidate->set_underlying_type_for_vpn(underlying_type_for_vpn); - candidate->set_type(candidate_type); - candidate->set_priority(priority); - // Defaults for testing. - candidate->set_foundation("foundationIsAString"); - candidate->set_username("iceusernamefragment"); return candidate; } @@ -244,15 +206,14 @@ class FakeAudioProcessor : public AudioProcessorInterface { class FakeAudioTrackForStats : public MediaStreamTrack { public: - static rtc::scoped_refptr Create( + static scoped_refptr Create( const std::string& id, MediaStreamTrackInterface::TrackState state, bool create_fake_audio_processor) { - auto audio_track_stats = rtc::make_ref_counted(id); + auto audio_track_stats = make_ref_counted(id); audio_track_stats->set_state(state); if (create_fake_audio_processor) { - audio_track_stats->processor_ = - rtc::make_ref_counted(); + audio_track_stats->processor_ = make_ref_counted(); } return audio_track_stats; } @@ -263,25 +224,24 @@ class FakeAudioTrackForStats : public MediaStreamTrack { std::string kind() const override { return MediaStreamTrackInterface::kAudioKind; } - webrtc::AudioSourceInterface* GetSource() const override { return nullptr; } - void AddSink(webrtc::AudioTrackSinkInterface* sink) override {} - void RemoveSink(webrtc::AudioTrackSinkInterface* sink) override {} + AudioSourceInterface* GetSource() const override { return nullptr; } + void AddSink(AudioTrackSinkInterface* sink) override {} + void RemoveSink(AudioTrackSinkInterface* sink) override {} bool GetSignalLevel(int* level) override { return false; } - rtc::scoped_refptr GetAudioProcessor() override { + scoped_refptr GetAudioProcessor() override { return processor_; } private: - rtc::scoped_refptr processor_; + scoped_refptr processor_; }; class FakeVideoTrackSourceForStats : public VideoTrackSourceInterface { public: - static rtc::scoped_refptr Create( - int input_width, - int input_height) { - return rtc::make_ref_counted(input_width, - input_height); + static scoped_refptr Create(int input_width, + int input_height) { + return make_ref_counted(input_width, + input_height); } FakeVideoTrackSourceForStats(int input_width, int input_height) @@ -290,7 +250,7 @@ class FakeVideoTrackSourceForStats : public VideoTrackSourceInterface { // VideoTrackSourceInterface bool is_screencast() const override { return false; } - absl::optional needs_denoising() const override { return false; } + std::optional needs_denoising() const override { return false; } bool GetStats(VideoTrackSourceInterface::Stats* stats) override { stats->input_width = input_width_; stats->input_height = input_height_; @@ -304,16 +264,17 @@ class FakeVideoTrackSourceForStats : public VideoTrackSourceInterface { // NotifierInterface (part of MediaSourceInterface) void RegisterObserver(ObserverInterface* observer) override {} void UnregisterObserver(ObserverInterface* observer) override {} - // rtc::VideoSourceInterface (part of VideoTrackSourceInterface) - void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override {} - void RemoveSink(rtc::VideoSinkInterface* sink) override {} + // webrtc::VideoSourceInterface (part of + // VideoTrackSourceInterface) + void AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) override {} + void RemoveSink(VideoSinkInterface* sink) override {} bool SupportsEncodedOutput() const override { return false; } void GenerateKeyFrame() override {} void AddEncodedSink( - rtc::VideoSinkInterface* sink) override {} + VideoSinkInterface* sink) override {} void RemoveEncodedSink( - rtc::VideoSinkInterface* sink) override {} + VideoSinkInterface* sink) override {} private: int input_width_; @@ -322,62 +283,62 @@ class FakeVideoTrackSourceForStats : public VideoTrackSourceInterface { class FakeVideoTrackForStats : public MediaStreamTrack { public: - static rtc::scoped_refptr Create( + static scoped_refptr Create( const std::string& id, MediaStreamTrackInterface::TrackState state, - rtc::scoped_refptr source) { + scoped_refptr source) { auto video_track = - rtc::make_ref_counted(id, std::move(source)); + make_ref_counted(id, std::move(source)); video_track->set_state(state); return video_track; } FakeVideoTrackForStats(const std::string& id, - rtc::scoped_refptr source) + scoped_refptr source) : MediaStreamTrack(id), source_(source) {} std::string kind() const override { return MediaStreamTrackInterface::kVideoKind; } - void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override {} - void RemoveSink(rtc::VideoSinkInterface* sink) override {} + void AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) override {} + void RemoveSink(VideoSinkInterface* sink) override {} VideoTrackSourceInterface* GetSource() const override { return source_.get(); } private: - rtc::scoped_refptr source_; + scoped_refptr source_; }; -rtc::scoped_refptr CreateFakeTrack( - cricket::MediaType media_type, +scoped_refptr CreateFakeTrack( + webrtc::MediaType media_type, const std::string& track_id, MediaStreamTrackInterface::TrackState track_state, bool create_fake_audio_processor = false) { - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + if (media_type == webrtc::MediaType::AUDIO) { return FakeAudioTrackForStats::Create(track_id, track_state, create_fake_audio_processor); } else { - RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO); + RTC_DCHECK_EQ(media_type, webrtc::MediaType::VIDEO); return FakeVideoTrackForStats::Create(track_id, track_state, nullptr); } } -rtc::scoped_refptr CreateMockSender( - cricket::MediaType media_type, - rtc::scoped_refptr track, +scoped_refptr CreateMockSender( + webrtc::MediaType media_type, + scoped_refptr track, uint32_t ssrc, int attachment_id, std::vector local_stream_ids) { RTC_DCHECK(!track || (track->kind() == MediaStreamTrackInterface::kAudioKind && - media_type == cricket::MEDIA_TYPE_AUDIO) || + media_type == webrtc::MediaType::AUDIO) || (track->kind() == MediaStreamTrackInterface::kVideoKind && - media_type == cricket::MEDIA_TYPE_VIDEO)); - auto sender = rtc::make_ref_counted(); + media_type == webrtc::MediaType::VIDEO)); + auto sender = make_ref_counted(); EXPECT_CALL(*sender, track()).WillRepeatedly(Return(track)); EXPECT_CALL(*sender, ssrc()).WillRepeatedly(Return(ssrc)); EXPECT_CALL(*sender, media_type()).WillRepeatedly(Return(media_type)); @@ -396,24 +357,24 @@ rtc::scoped_refptr CreateMockSender( return sender; } -rtc::scoped_refptr CreateMockReceiver( - const rtc::scoped_refptr& track, +scoped_refptr CreateMockReceiver( + const scoped_refptr& track, uint32_t ssrc, int attachment_id) { - auto receiver = rtc::make_ref_counted(); + auto receiver = make_ref_counted(); EXPECT_CALL(*receiver, track()).WillRepeatedly(Return(track)); EXPECT_CALL(*receiver, ssrc()).WillRepeatedly(Invoke([ssrc]() { return ssrc; })); EXPECT_CALL(*receiver, streams()) .WillRepeatedly( - Return(std::vector>({}))); + Return(std::vector>({}))); EXPECT_CALL(*receiver, media_type()) .WillRepeatedly( Return(track->kind() == MediaStreamTrackInterface::kAudioKind - ? cricket::MEDIA_TYPE_AUDIO - : cricket::MEDIA_TYPE_VIDEO)); + ? webrtc::MediaType::AUDIO + : webrtc::MediaType::VIDEO)); EXPECT_CALL(*receiver, GetParameters()).WillRepeatedly(Invoke([ssrc]() { RtpParameters params; params.encodings.push_back(RtpEncodingParameters()); @@ -428,106 +389,107 @@ rtc::scoped_refptr CreateMockReceiver( class RTCStatsCollectorWrapper { public: explicit RTCStatsCollectorWrapper( - rtc::scoped_refptr pc) + scoped_refptr pc, + const Environment& env) : pc_(pc), stats_collector_( RTCStatsCollector::Create(pc.get(), - 50 * rtc::kNumMicrosecsPerMillisec)) {} + env, + 50 * kNumMicrosecsPerMillisec)) {} - rtc::scoped_refptr stats_collector() { + scoped_refptr stats_collector() { return stats_collector_; } - rtc::scoped_refptr GetStatsReport() { - rtc::scoped_refptr callback = RTCStatsObtainer::Create(); + scoped_refptr GetStatsReport() { + scoped_refptr callback = RTCStatsObtainer::Create(); stats_collector_->GetStatsReport(callback); return WaitForReport(callback); } - rtc::scoped_refptr GetStatsReportWithSenderSelector( - rtc::scoped_refptr selector) { - rtc::scoped_refptr callback = RTCStatsObtainer::Create(); + scoped_refptr GetStatsReportWithSenderSelector( + scoped_refptr selector) { + scoped_refptr callback = RTCStatsObtainer::Create(); stats_collector_->GetStatsReport(selector, callback); return WaitForReport(callback); } - rtc::scoped_refptr GetStatsReportWithReceiverSelector( - rtc::scoped_refptr selector) { - rtc::scoped_refptr callback = RTCStatsObtainer::Create(); + scoped_refptr GetStatsReportWithReceiverSelector( + scoped_refptr selector) { + scoped_refptr callback = RTCStatsObtainer::Create(); stats_collector_->GetStatsReport(selector, callback); return WaitForReport(callback); } - rtc::scoped_refptr GetFreshStatsReport() { + scoped_refptr GetFreshStatsReport() { stats_collector_->ClearCachedStatsReport(); return GetStatsReport(); } - rtc::scoped_refptr SetupLocalTrackAndSender( - cricket::MediaType media_type, + scoped_refptr SetupLocalTrackAndSender( + webrtc::MediaType media_type, const std::string& track_id, uint32_t ssrc, bool add_stream, int attachment_id) { - rtc::scoped_refptr local_stream; + scoped_refptr local_stream; if (add_stream) { local_stream = MediaStream::Create("LocalStreamId"); pc_->mutable_local_streams()->AddStream(local_stream); } - rtc::scoped_refptr track; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + scoped_refptr track; + if (media_type == webrtc::MediaType::AUDIO) { track = CreateFakeTrack(media_type, track_id, MediaStreamTrackInterface::kLive); if (add_stream) { - local_stream->AddTrack(rtc::scoped_refptr( + local_stream->AddTrack(scoped_refptr( static_cast(track.get()))); } } else { track = CreateFakeTrack(media_type, track_id, MediaStreamTrackInterface::kLive); if (add_stream) { - local_stream->AddTrack(rtc::scoped_refptr( + local_stream->AddTrack(scoped_refptr( static_cast(track.get()))); } } - rtc::scoped_refptr sender = + scoped_refptr sender = CreateMockSender(media_type, track, ssrc, attachment_id, {}); EXPECT_CALL(*sender, Stop()); EXPECT_CALL(*sender, SetMediaChannel(_)); + EXPECT_CALL(*sender, SetSendCodecs(_)); pc_->AddSender(sender); return sender; } - rtc::scoped_refptr SetupRemoteTrackAndReceiver( - cricket::MediaType media_type, + scoped_refptr SetupRemoteTrackAndReceiver( + webrtc::MediaType media_type, const std::string& track_id, const std::string& stream_id, uint32_t ssrc) { - rtc::scoped_refptr remote_stream = - MediaStream::Create(stream_id); + scoped_refptr remote_stream = MediaStream::Create(stream_id); pc_->mutable_remote_streams()->AddStream(remote_stream); - rtc::scoped_refptr track; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + scoped_refptr track; + if (media_type == webrtc::MediaType::AUDIO) { track = CreateFakeTrack(media_type, track_id, MediaStreamTrackInterface::kLive); - remote_stream->AddTrack(rtc::scoped_refptr( + remote_stream->AddTrack(scoped_refptr( static_cast(track.get()))); } else { track = CreateFakeTrack(media_type, track_id, MediaStreamTrackInterface::kLive); - remote_stream->AddTrack(rtc::scoped_refptr( + remote_stream->AddTrack(scoped_refptr( static_cast(track.get()))); } - rtc::scoped_refptr receiver = + scoped_refptr receiver = CreateMockReceiver(track, ssrc, 62); EXPECT_CALL(*receiver, streams()) - .WillRepeatedly( - Return(std::vector>( - {remote_stream}))); + .WillRepeatedly(Return( + std::vector>({remote_stream}))); EXPECT_CALL(*receiver, SetMediaChannel(_)).WillRepeatedly(Return()); pc_->AddReceiver(receiver); return receiver; @@ -541,53 +503,53 @@ class RTCStatsCollectorWrapper { // Senders get assigned attachment ID "ssrc + 10". void CreateMockRtpSendersReceiversAndChannels( std::initializer_list< - std::pair> + std::pair> local_audio_track_info_pairs, std::initializer_list< - std::pair> + std::pair> remote_audio_track_info_pairs, std::initializer_list< - std::pair> + std::pair> local_video_track_info_pairs, std::initializer_list< - std::pair> + std::pair> remote_video_track_info_pairs, std::vector local_stream_ids, - std::vector> remote_streams) { - cricket::VoiceMediaInfo voice_media_info; - cricket::VideoMediaInfo video_media_info; + std::vector> remote_streams) { + VoiceMediaInfo voice_media_info; + VideoMediaInfo video_media_info; // Local audio tracks and voice sender infos for (auto& pair : local_audio_track_info_pairs) { MediaStreamTrackInterface* local_audio_track = pair.first; - const cricket::VoiceSenderInfo& voice_sender_info = pair.second; + const VoiceSenderInfo& voice_sender_info = pair.second; RTC_DCHECK_EQ(local_audio_track->kind(), MediaStreamTrackInterface::kAudioKind); voice_media_info.senders.push_back(voice_sender_info); - rtc::scoped_refptr rtp_sender = CreateMockSender( - cricket::MEDIA_TYPE_AUDIO, - rtc::scoped_refptr(local_audio_track), + scoped_refptr rtp_sender = CreateMockSender( + webrtc::MediaType::AUDIO, + scoped_refptr(local_audio_track), voice_sender_info.local_stats[0].ssrc, voice_sender_info.local_stats[0].ssrc + 10, local_stream_ids); EXPECT_CALL(*rtp_sender, SetMediaChannel(_)).WillRepeatedly(Return()); EXPECT_CALL(*rtp_sender, Stop()); + EXPECT_CALL(*rtp_sender, SetSendCodecs(_)); pc_->AddSender(rtp_sender); } // Remote audio tracks and voice receiver infos for (auto& pair : remote_audio_track_info_pairs) { MediaStreamTrackInterface* remote_audio_track = pair.first; - const cricket::VoiceReceiverInfo& voice_receiver_info = pair.second; + const VoiceReceiverInfo& voice_receiver_info = pair.second; RTC_DCHECK_EQ(remote_audio_track->kind(), MediaStreamTrackInterface::kAudioKind); voice_media_info.receivers.push_back(voice_receiver_info); - rtc::scoped_refptr rtp_receiver = - CreateMockReceiver( - rtc::scoped_refptr(remote_audio_track), - voice_receiver_info.local_stats[0].ssrc, - voice_receiver_info.local_stats[0].ssrc + 10); + scoped_refptr rtp_receiver = CreateMockReceiver( + scoped_refptr(remote_audio_track), + voice_receiver_info.local_stats[0].ssrc, + voice_receiver_info.local_stats[0].ssrc + 10); EXPECT_CALL(*rtp_receiver, streams()) .WillRepeatedly(Return(remote_streams)); EXPECT_CALL(*rtp_receiver, SetMediaChannel(_)).WillRepeatedly(Return()); @@ -597,35 +559,35 @@ class RTCStatsCollectorWrapper { // Local video tracks and video sender infos for (auto& pair : local_video_track_info_pairs) { MediaStreamTrackInterface* local_video_track = pair.first; - const cricket::VideoSenderInfo& video_sender_info = pair.second; + const VideoSenderInfo& video_sender_info = pair.second; RTC_DCHECK_EQ(local_video_track->kind(), MediaStreamTrackInterface::kVideoKind); video_media_info.senders.push_back(video_sender_info); video_media_info.aggregated_senders.push_back(video_sender_info); - rtc::scoped_refptr rtp_sender = CreateMockSender( - cricket::MEDIA_TYPE_VIDEO, - rtc::scoped_refptr(local_video_track), + scoped_refptr rtp_sender = CreateMockSender( + webrtc::MediaType::VIDEO, + scoped_refptr(local_video_track), video_sender_info.local_stats[0].ssrc, video_sender_info.local_stats[0].ssrc + 10, local_stream_ids); EXPECT_CALL(*rtp_sender, SetMediaChannel(_)).WillRepeatedly(Return()); EXPECT_CALL(*rtp_sender, Stop()); + EXPECT_CALL(*rtp_sender, SetSendCodecs(_)); pc_->AddSender(rtp_sender); } // Remote video tracks and video receiver infos for (auto& pair : remote_video_track_info_pairs) { MediaStreamTrackInterface* remote_video_track = pair.first; - const cricket::VideoReceiverInfo& video_receiver_info = pair.second; + const VideoReceiverInfo& video_receiver_info = pair.second; RTC_DCHECK_EQ(remote_video_track->kind(), MediaStreamTrackInterface::kVideoKind); video_media_info.receivers.push_back(video_receiver_info); - rtc::scoped_refptr rtp_receiver = - CreateMockReceiver( - rtc::scoped_refptr(remote_video_track), - video_receiver_info.local_stats[0].ssrc, - video_receiver_info.local_stats[0].ssrc + 10); + scoped_refptr rtp_receiver = CreateMockReceiver( + scoped_refptr(remote_video_track), + video_receiver_info.local_stats[0].ssrc, + video_receiver_info.local_stats[0].ssrc + 10); EXPECT_CALL(*rtp_receiver, streams()) .WillRepeatedly(Return(remote_streams)); EXPECT_CALL(*rtp_receiver, SetMediaChannel(_)).WillRepeatedly(Return()); @@ -637,10 +599,14 @@ class RTCStatsCollectorWrapper { } private: - rtc::scoped_refptr WaitForReport( - rtc::scoped_refptr callback) { - EXPECT_TRUE_WAIT(callback->report() != nullptr, kGetStatsReportTimeoutMs); - int64_t after = rtc::TimeUTCMicros(); + scoped_refptr WaitForReport( + scoped_refptr callback) { + EXPECT_THAT( + WaitUntil( + [&] { return callback->report() != nullptr; }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kGetStatsReportTimeoutMs)}), + IsRtcOk()); + int64_t after = TimeUTCMicros(); for (const RTCStats& stats : *callback->report()) { if (stats.type() == RTCRemoteInboundRtpStreamStats::kType || stats.type() == RTCRemoteOutboundRtpStreamStats::kType) { @@ -652,20 +618,20 @@ class RTCStatsCollectorWrapper { return callback->report(); } - rtc::scoped_refptr pc_; - rtc::scoped_refptr stats_collector_; + scoped_refptr pc_; + scoped_refptr stats_collector_; }; class RTCStatsCollectorTest : public ::testing::Test { public: RTCStatsCollectorTest() - : pc_(rtc::make_ref_counted()), - stats_(new RTCStatsCollectorWrapper(pc_)), + : pc_(make_ref_counted()), + stats_(new RTCStatsCollectorWrapper(pc_, CreateEnvironment())), data_channel_controller_( new FakeDataChannelController(pc_->network_thread())) {} void ExpectReportContainsCertificateInfo( - const rtc::scoped_refptr& report, + const scoped_refptr& report, const CertificateInfo& certinfo) { for (size_t i = 0; i < certinfo.fingerprints.size(); ++i) { RTCCertificateStats expected_certificate_stats( @@ -685,7 +651,7 @@ class RTCStatsCollectorTest : public ::testing::Test { } const RTCCertificateStats* GetCertificateStatsFromFingerprint( - const rtc::scoped_refptr& report, + const scoped_refptr& report, const std::string& fingerprint) { auto certificates = report->GetStatsOfType(); for (const auto* certificate : certificates) { @@ -697,10 +663,10 @@ class RTCStatsCollectorTest : public ::testing::Test { } struct ExampleStatsGraph { - rtc::scoped_refptr sender; - rtc::scoped_refptr receiver; + scoped_refptr sender; + scoped_refptr receiver; - rtc::scoped_refptr full_report; + scoped_refptr full_report; std::string send_codec_id; std::string recv_codec_id; std::string outbound_rtp_id; @@ -722,7 +688,7 @@ class RTCStatsCollectorTest : public ::testing::Test { // codec (send) graph.send_codec_id = "COTTransportName1_1"; - cricket::VideoMediaInfo video_media_info; + VideoMediaInfo video_media_info; RtpCodecParameters send_codec; send_codec.payload_type = 1; send_codec.clock_rate = 0; @@ -737,17 +703,15 @@ class RTCStatsCollectorTest : public ::testing::Test { std::make_pair(recv_codec.payload_type, recv_codec)); // outbound-rtp graph.outbound_rtp_id = "OTTransportName1V3"; - video_media_info.senders.push_back(cricket::VideoSenderInfo()); - video_media_info.senders[0].local_stats.push_back( - cricket::SsrcSenderInfo()); + video_media_info.senders.push_back(VideoSenderInfo()); + video_media_info.senders[0].local_stats.push_back(SsrcSenderInfo()); video_media_info.senders[0].local_stats[0].ssrc = 3; video_media_info.senders[0].codec_payload_type = send_codec.payload_type; video_media_info.aggregated_senders.push_back(video_media_info.senders[0]); // inbound-rtp graph.inbound_rtp_id = "ITTransportName1V4"; - video_media_info.receivers.push_back(cricket::VideoReceiverInfo()); - video_media_info.receivers[0].local_stats.push_back( - cricket::SsrcReceiverInfo()); + video_media_info.receivers.push_back(VideoReceiverInfo()); + video_media_info.receivers[0].local_stats.push_back(SsrcReceiverInfo()); video_media_info.receivers[0].local_stats[0].ssrc = 4; video_media_info.receivers[0].codec_payload_type = recv_codec.payload_type; // transport @@ -755,14 +719,14 @@ class RTCStatsCollectorTest : public ::testing::Test { pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info); // outbound-rtp's sender graph.sender = stats_->SetupLocalTrackAndSender( - cricket::MEDIA_TYPE_VIDEO, "LocalVideoTrackID", 3, false, 50); + webrtc::MediaType::VIDEO, "LocalVideoTrackID", 3, false, 50); // inbound-rtp's receiver graph.receiver = stats_->SetupRemoteTrackAndReceiver( - cricket::MEDIA_TYPE_VIDEO, "RemoteVideoTrackID", "RemoteStreamId", 4); + webrtc::MediaType::VIDEO, "RemoteVideoTrackID", "RemoteStreamId", 4); // peer-connection graph.peer_connection_id = "P"; // media-source (kind: video) - graph.media_source_id = "SV" + rtc::ToString(graph.sender->AttachmentId()); + graph.media_source_id = "SV" + absl::StrCat(graph.sender->AttachmentId()); // Expected stats graph: // @@ -812,7 +776,7 @@ class RTCStatsCollectorTest : public ::testing::Test { // codec (send) graph.send_codec_id = "COTTransportName1_1"; - cricket::VoiceMediaInfo media_info; + VoiceMediaInfo media_info; RtpCodecParameters send_codec; send_codec.payload_type = 1; send_codec.clock_rate = 0; @@ -827,23 +791,23 @@ class RTCStatsCollectorTest : public ::testing::Test { std::make_pair(recv_codec.payload_type, recv_codec)); // outbound-rtp graph.outbound_rtp_id = "OTTransportName1A3"; - media_info.senders.push_back(cricket::VoiceSenderInfo()); - media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo()); + media_info.senders.push_back(VoiceSenderInfo()); + media_info.senders[0].local_stats.push_back(SsrcSenderInfo()); media_info.senders[0].local_stats[0].ssrc = kLocalSsrc; media_info.senders[0].codec_payload_type = send_codec.payload_type; // inbound-rtp graph.inbound_rtp_id = "ITTransportName1A4"; - media_info.receivers.push_back(cricket::VoiceReceiverInfo()); - media_info.receivers[0].local_stats.push_back(cricket::SsrcReceiverInfo()); + media_info.receivers.push_back(VoiceReceiverInfo()); + media_info.receivers[0].local_stats.push_back(SsrcReceiverInfo()); media_info.receivers[0].local_stats[0].ssrc = kRemoteSsrc; media_info.receivers[0].codec_payload_type = recv_codec.payload_type; // remote-outbound-rtp if (add_remote_outbound_stats) { graph.remote_outbound_rtp_id = "ROA4"; - media_info.receivers[0].last_sender_report_timestamp_ms = - kRemoteOutboundStatsTimestampMs; - media_info.receivers[0].last_sender_report_remote_timestamp_ms = - kRemoteOutboundStatsRemoteTimestampMs; + media_info.receivers[0].last_sender_report_utc_timestamp = + kRemoteOutboundStatsTimestamp; + media_info.receivers[0].last_sender_report_remote_utc_timestamp = + kRemoteOutboundStatsRemoteTimestamp; media_info.receivers[0].sender_reports_packets_sent = kRemoteOutboundStatsPacketsSent; media_info.receivers[0].sender_reports_bytes_sent = @@ -856,15 +820,15 @@ class RTCStatsCollectorTest : public ::testing::Test { pc_->AddVoiceChannel("VoiceMid", "TransportName", media_info); // outbound-rtp's sender graph.sender = stats_->SetupLocalTrackAndSender( - cricket::MEDIA_TYPE_AUDIO, "LocalAudioTrackID", kLocalSsrc, false, 50); + webrtc::MediaType::AUDIO, "LocalAudioTrackID", kLocalSsrc, false, 50); // inbound-rtp's receiver graph.receiver = stats_->SetupRemoteTrackAndReceiver( - cricket::MEDIA_TYPE_AUDIO, "RemoteAudioTrackID", "RemoteStreamId", + webrtc::MediaType::AUDIO, "RemoteAudioTrackID", "RemoteStreamId", kRemoteSsrc); // peer-connection graph.peer_connection_id = "P"; // media-source (kind: video) - graph.media_source_id = "SA" + rtc::ToString(graph.sender->AttachmentId()); + graph.media_source_id = "SA" + absl::StrCat(graph.sender->AttachmentId()); // Expected stats graph: // @@ -908,27 +872,43 @@ class RTCStatsCollectorTest : public ::testing::Test { } protected: - rtc::ScopedFakeClock fake_clock_; - rtc::AutoThread main_thread_; - rtc::scoped_refptr pc_; + ScopedFakeClock fake_clock_; + AutoThread main_thread_; + scoped_refptr pc_; std::unique_ptr stats_; std::unique_ptr data_channel_controller_; }; TEST_F(RTCStatsCollectorTest, SingleCallback) { - rtc::scoped_refptr result; + scoped_refptr result; stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&result)); - EXPECT_TRUE_WAIT(result != nullptr, kGetStatsReportTimeoutMs); + EXPECT_THAT( + WaitUntil( + [&] { return result != nullptr; }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kGetStatsReportTimeoutMs)}), + IsRtcOk()); } TEST_F(RTCStatsCollectorTest, MultipleCallbacks) { - rtc::scoped_refptr a, b, c; + scoped_refptr a, b, c; stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&a)); stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&b)); stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&c)); - EXPECT_TRUE_WAIT(a != nullptr, kGetStatsReportTimeoutMs); - EXPECT_TRUE_WAIT(b != nullptr, kGetStatsReportTimeoutMs); - EXPECT_TRUE_WAIT(c != nullptr, kGetStatsReportTimeoutMs); + EXPECT_THAT( + WaitUntil( + [&] { return a != nullptr; }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kGetStatsReportTimeoutMs)}), + IsRtcOk()); + EXPECT_THAT( + WaitUntil( + [&] { return b != nullptr; }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kGetStatsReportTimeoutMs)}), + IsRtcOk()); + EXPECT_THAT( + WaitUntil( + [&] { return c != nullptr; }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kGetStatsReportTimeoutMs)}), + IsRtcOk()); EXPECT_EQ(a.get(), b.get()); EXPECT_EQ(b.get(), c.get()); @@ -936,30 +916,42 @@ TEST_F(RTCStatsCollectorTest, MultipleCallbacks) { TEST_F(RTCStatsCollectorTest, CachedStatsReports) { // Caching should ensure `a` and `b` are the same report. - rtc::scoped_refptr a = stats_->GetStatsReport(); - rtc::scoped_refptr b = stats_->GetStatsReport(); + scoped_refptr a = stats_->GetStatsReport(); + scoped_refptr b = stats_->GetStatsReport(); EXPECT_EQ(a.get(), b.get()); // Invalidate cache by clearing it. stats_->stats_collector()->ClearCachedStatsReport(); - rtc::scoped_refptr c = stats_->GetStatsReport(); + scoped_refptr c = stats_->GetStatsReport(); EXPECT_NE(b.get(), c.get()); // Invalidate cache by advancing time. fake_clock_.AdvanceTime(TimeDelta::Millis(51)); - rtc::scoped_refptr d = stats_->GetStatsReport(); + scoped_refptr d = stats_->GetStatsReport(); EXPECT_TRUE(d); EXPECT_NE(c.get(), d.get()); } TEST_F(RTCStatsCollectorTest, MultipleCallbacksWithInvalidatedCacheInBetween) { - rtc::scoped_refptr a, b, c; + scoped_refptr a, b, c; stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&a)); stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&b)); // Cache is invalidated after 50 ms. fake_clock_.AdvanceTime(TimeDelta::Millis(51)); stats_->stats_collector()->GetStatsReport(RTCStatsObtainer::Create(&c)); - EXPECT_TRUE_WAIT(a != nullptr, kGetStatsReportTimeoutMs); - EXPECT_TRUE_WAIT(b != nullptr, kGetStatsReportTimeoutMs); - EXPECT_TRUE_WAIT(c != nullptr, kGetStatsReportTimeoutMs); + EXPECT_THAT( + WaitUntil( + [&] { return a != nullptr; }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kGetStatsReportTimeoutMs)}), + IsRtcOk()); + EXPECT_THAT( + WaitUntil( + [&] { return b != nullptr; }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kGetStatsReportTimeoutMs)}), + IsRtcOk()); + EXPECT_THAT( + WaitUntil( + [&] { return c != nullptr; }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kGetStatsReportTimeoutMs)}), + IsRtcOk()); EXPECT_EQ(a.get(), b.get()); // The act of doing `AdvanceTime` processes all messages. If this was not the // case we might not require `c` to be fresher than `b`. @@ -968,7 +960,7 @@ TEST_F(RTCStatsCollectorTest, MultipleCallbacksWithInvalidatedCacheInBetween) { TEST_F(RTCStatsCollectorTest, ToJsonProducesParseableJson) { ExampleStatsGraph graph = SetupExampleStatsGraphForSelectorTests(); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); std::string json_format = report->ToJson(); Json::CharReaderBuilder builder; @@ -999,7 +991,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCCertificateStatsSingle) { kTransportName, remote_certinfo->certificate->GetSSLCertificateChain().Clone()); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); ExpectReportContainsCertificateInfo(report, *local_certinfo); ExpectReportContainsCertificateInfo(report, *remote_certinfo); @@ -1008,13 +1000,13 @@ TEST_F(RTCStatsCollectorTest, CollectRTCCertificateStatsSingle) { // These SSRC collisions are legal. TEST_F(RTCStatsCollectorTest, ValidSsrcCollisionDoesNotCrash) { // BUNDLE audio/video inbound/outbound. Unique SSRCs needed within the BUNDLE. - cricket::VoiceMediaInfo mid1_info; + VoiceMediaInfo mid1_info; mid1_info.receivers.emplace_back(); mid1_info.receivers[0].add_ssrc(1); mid1_info.senders.emplace_back(); mid1_info.senders[0].add_ssrc(2); pc_->AddVoiceChannel("Mid1", "Transport1", mid1_info); - cricket::VideoMediaInfo mid2_info; + VideoMediaInfo mid2_info; mid2_info.receivers.emplace_back(); mid2_info.receivers[0].add_ssrc(3); mid2_info.senders.emplace_back(); @@ -1022,13 +1014,13 @@ TEST_F(RTCStatsCollectorTest, ValidSsrcCollisionDoesNotCrash) { pc_->AddVideoChannel("Mid2", "Transport1", mid2_info); // Now create a second BUNDLE group with SSRCs colliding with the first group // (but again no collisions within the group). - cricket::VoiceMediaInfo mid3_info; + VoiceMediaInfo mid3_info; mid3_info.receivers.emplace_back(); mid3_info.receivers[0].add_ssrc(1); mid3_info.senders.emplace_back(); mid3_info.senders[0].add_ssrc(2); pc_->AddVoiceChannel("Mid3", "Transport2", mid3_info); - cricket::VideoMediaInfo mid4_info; + VideoMediaInfo mid4_info; mid4_info.receivers.emplace_back(); mid4_info.receivers[0].add_ssrc(3); mid4_info.senders.emplace_back(); @@ -1036,7 +1028,7 @@ TEST_F(RTCStatsCollectorTest, ValidSsrcCollisionDoesNotCrash) { pc_->AddVideoChannel("Mid4", "Transport2", mid4_info); // This should not crash (https://crbug.com/1361612). - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); auto inbound_rtps = report->GetStatsOfType(); auto outbound_rtps = report->GetStatsOfType(); EXPECT_EQ(inbound_rtps.size(), 4u); @@ -1048,25 +1040,25 @@ TEST_F(RTCStatsCollectorTest, ValidSsrcCollisionDoesNotCrash) { // collisions just to make sure we don't crash in even the most extreme cases. TEST_F(RTCStatsCollectorTest, InvalidSsrcCollisionDoesNotCrash) { // One SSRC to rule them all. - cricket::VoiceMediaInfo mid1_info; + VoiceMediaInfo mid1_info; mid1_info.receivers.emplace_back(); mid1_info.receivers[0].add_ssrc(1); mid1_info.senders.emplace_back(); mid1_info.senders[0].add_ssrc(1); pc_->AddVoiceChannel("Mid1", "BundledTransport", mid1_info); - cricket::VideoMediaInfo mid2_info; + VideoMediaInfo mid2_info; mid2_info.receivers.emplace_back(); mid2_info.receivers[0].add_ssrc(1); mid2_info.senders.emplace_back(); mid2_info.senders[0].add_ssrc(1); pc_->AddVideoChannel("Mid2", "BundledTransport", mid2_info); - cricket::VoiceMediaInfo mid3_info; + VoiceMediaInfo mid3_info; mid3_info.receivers.emplace_back(); mid3_info.receivers[0].add_ssrc(1); mid3_info.senders.emplace_back(); mid3_info.senders[0].add_ssrc(1); pc_->AddVoiceChannel("Mid3", "BundledTransport", mid3_info); - cricket::VideoMediaInfo mid4_info; + VideoMediaInfo mid4_info; mid4_info.receivers.emplace_back(); mid4_info.receivers[0].add_ssrc(1); mid4_info.senders.emplace_back(); @@ -1081,11 +1073,11 @@ TEST_F(RTCStatsCollectorTest, InvalidSsrcCollisionDoesNotCrash) { TEST_F(RTCStatsCollectorTest, CollectRTCCodecStatsOnlyIfReferenced) { // Audio - cricket::VoiceMediaInfo voice_media_info; + VoiceMediaInfo voice_media_info; RtpCodecParameters inbound_audio_codec; inbound_audio_codec.payload_type = 1; - inbound_audio_codec.kind = cricket::MEDIA_TYPE_AUDIO; + inbound_audio_codec.kind = webrtc::MediaType::AUDIO; inbound_audio_codec.name = "opus"; inbound_audio_codec.clock_rate = 1337; inbound_audio_codec.num_channels = 1; @@ -1095,7 +1087,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCCodecStatsOnlyIfReferenced) { RtpCodecParameters outbound_audio_codec; outbound_audio_codec.payload_type = 2; - outbound_audio_codec.kind = cricket::MEDIA_TYPE_AUDIO; + outbound_audio_codec.kind = webrtc::MediaType::AUDIO; outbound_audio_codec.name = "isac"; outbound_audio_codec.clock_rate = 1338; outbound_audio_codec.num_channels = 2; @@ -1103,11 +1095,11 @@ TEST_F(RTCStatsCollectorTest, CollectRTCCodecStatsOnlyIfReferenced) { std::make_pair(outbound_audio_codec.payload_type, outbound_audio_codec)); // Video - cricket::VideoMediaInfo video_media_info; + VideoMediaInfo video_media_info; RtpCodecParameters inbound_video_codec; inbound_video_codec.payload_type = 3; - inbound_video_codec.kind = cricket::MEDIA_TYPE_VIDEO; + inbound_video_codec.kind = webrtc::MediaType::VIDEO; inbound_video_codec.name = "H264"; inbound_video_codec.clock_rate = 1339; inbound_video_codec.parameters = {{"level-asymmetry-allowed", "1"}, @@ -1118,29 +1110,29 @@ TEST_F(RTCStatsCollectorTest, CollectRTCCodecStatsOnlyIfReferenced) { RtpCodecParameters outbound_video_codec; outbound_video_codec.payload_type = 4; - outbound_video_codec.kind = cricket::MEDIA_TYPE_VIDEO; + outbound_video_codec.kind = webrtc::MediaType::VIDEO; outbound_video_codec.name = "VP8"; outbound_video_codec.clock_rate = 1340; video_media_info.send_codecs.insert( std::make_pair(outbound_video_codec.payload_type, outbound_video_codec)); // Ensure the above codecs are referenced. - cricket::VoiceReceiverInfo inbound_audio_info; + VoiceReceiverInfo inbound_audio_info; inbound_audio_info.add_ssrc(10); inbound_audio_info.codec_payload_type = 1; voice_media_info.receivers.push_back(inbound_audio_info); - cricket::VoiceSenderInfo outbound_audio_info; + VoiceSenderInfo outbound_audio_info; outbound_audio_info.add_ssrc(20); outbound_audio_info.codec_payload_type = 2; voice_media_info.senders.push_back(outbound_audio_info); - cricket::VideoReceiverInfo inbound_video_info; + VideoReceiverInfo inbound_video_info; inbound_video_info.add_ssrc(30); inbound_video_info.codec_payload_type = 3; video_media_info.receivers.push_back(inbound_video_info); - cricket::VideoSenderInfo outbound_video_info; + VideoSenderInfo outbound_video_info; outbound_video_info.add_ssrc(40); outbound_video_info.codec_payload_type = 4; video_media_info.senders.push_back(outbound_video_info); @@ -1150,7 +1142,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCCodecStatsOnlyIfReferenced) { auto video_channels = pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); RTCCodecStats expected_inbound_audio_codec( "CITTransportName1_1_minptime=10;useinbandfec=1", report->timestamp()); @@ -1230,34 +1222,34 @@ TEST_F(RTCStatsCollectorTest, CodecStatsAreCollectedPerTransport) { // PT=10 RtpCodecParameters outbound_codec_pt10; outbound_codec_pt10.payload_type = 10; - outbound_codec_pt10.kind = cricket::MEDIA_TYPE_VIDEO; + outbound_codec_pt10.kind = webrtc::MediaType::VIDEO; outbound_codec_pt10.name = "VP8"; outbound_codec_pt10.clock_rate = 9000; // PT=11 RtpCodecParameters outbound_codec_pt11; outbound_codec_pt11.payload_type = 11; - outbound_codec_pt11.kind = cricket::MEDIA_TYPE_VIDEO; + outbound_codec_pt11.kind = webrtc::MediaType::VIDEO; outbound_codec_pt11.name = "VP8"; outbound_codec_pt11.clock_rate = 9000; // Insert codecs into `send_codecs` and ensure the PTs are referenced by RTP // streams. - cricket::VideoMediaInfo info_pt10; + VideoMediaInfo info_pt10; info_pt10.send_codecs.insert( std::make_pair(outbound_codec_pt10.payload_type, outbound_codec_pt10)); info_pt10.senders.emplace_back(); info_pt10.senders[0].add_ssrc(42); info_pt10.senders[0].codec_payload_type = outbound_codec_pt10.payload_type; - cricket::VideoMediaInfo info_pt11; + VideoMediaInfo info_pt11; info_pt11.send_codecs.insert( std::make_pair(outbound_codec_pt11.payload_type, outbound_codec_pt11)); info_pt11.senders.emplace_back(); info_pt11.senders[0].add_ssrc(43); info_pt11.senders[0].codec_payload_type = outbound_codec_pt11.payload_type; - cricket::VideoMediaInfo info_pt10_pt11; + VideoMediaInfo info_pt10_pt11; info_pt10_pt11.send_codecs.insert( std::make_pair(outbound_codec_pt10.payload_type, outbound_codec_pt10)); info_pt10_pt11.send_codecs.insert( @@ -1278,7 +1270,7 @@ TEST_F(RTCStatsCollectorTest, CodecStatsAreCollectedPerTransport) { // There should be no duplicate codecs because all codec references are on the // same transport. - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); auto codec_stats = report->GetStatsOfType(); EXPECT_EQ(codec_stats.size(), 2u); @@ -1295,7 +1287,7 @@ TEST_F(RTCStatsCollectorTest, SamePayloadTypeButDifferentFmtpLines) { // PT=111, useinbandfec=0 RtpCodecParameters inbound_codec_pt111_nofec; inbound_codec_pt111_nofec.payload_type = 111; - inbound_codec_pt111_nofec.kind = cricket::MEDIA_TYPE_AUDIO; + inbound_codec_pt111_nofec.kind = webrtc::MediaType::AUDIO; inbound_codec_pt111_nofec.name = "opus"; inbound_codec_pt111_nofec.clock_rate = 48000; inbound_codec_pt111_nofec.parameters.insert( @@ -1304,20 +1296,20 @@ TEST_F(RTCStatsCollectorTest, SamePayloadTypeButDifferentFmtpLines) { // PT=111, useinbandfec=1 RtpCodecParameters inbound_codec_pt111_fec; inbound_codec_pt111_fec.payload_type = 111; - inbound_codec_pt111_fec.kind = cricket::MEDIA_TYPE_AUDIO; + inbound_codec_pt111_fec.kind = webrtc::MediaType::AUDIO; inbound_codec_pt111_fec.name = "opus"; inbound_codec_pt111_fec.clock_rate = 48000; inbound_codec_pt111_fec.parameters.insert( std::make_pair("useinbandfec", "1")); - cricket::VideoMediaInfo info_nofec; + VideoMediaInfo info_nofec; info_nofec.receive_codecs.insert(std::make_pair( inbound_codec_pt111_nofec.payload_type, inbound_codec_pt111_nofec)); info_nofec.receivers.emplace_back(); info_nofec.receivers[0].add_ssrc(123); info_nofec.receivers[0].codec_payload_type = inbound_codec_pt111_nofec.payload_type; - cricket::VideoMediaInfo info_fec; + VideoMediaInfo info_fec; info_fec.receive_codecs.insert(std::make_pair( inbound_codec_pt111_fec.payload_type, inbound_codec_pt111_fec)); info_fec.receivers.emplace_back(); @@ -1331,7 +1323,7 @@ TEST_F(RTCStatsCollectorTest, SamePayloadTypeButDifferentFmtpLines) { // Despite having the same PT we should see two codec stats because their FMTP // lines are different. - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); auto codec_stats = report->GetStatsOfType(); EXPECT_EQ(codec_stats.size(), 2u); @@ -1352,12 +1344,12 @@ TEST_F(RTCStatsCollectorTest, SamePayloadTypeButDifferentFmtpLines) { // PT=112, useinbandfec=1 RtpCodecParameters inbound_codec_pt112_fec; inbound_codec_pt112_fec.payload_type = 112; - inbound_codec_pt112_fec.kind = cricket::MEDIA_TYPE_AUDIO; + inbound_codec_pt112_fec.kind = webrtc::MediaType::AUDIO; inbound_codec_pt112_fec.name = "opus"; inbound_codec_pt112_fec.clock_rate = 48000; inbound_codec_pt112_fec.parameters.insert( std::make_pair("useinbandfec", "1")); - cricket::VideoMediaInfo info_fec_pt112; + VideoMediaInfo info_fec_pt112; info_fec_pt112.receive_codecs.insert(std::make_pair( inbound_codec_pt112_fec.payload_type, inbound_codec_pt112_fec)); info_fec_pt112.receivers.emplace_back(); @@ -1400,7 +1392,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCCertificateStatsMultiple) { kVideoTransport, video_remote_certinfo->certificate->GetSSLCertificateChain().Clone()); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); ExpectReportContainsCertificateInfo(report, *audio_local_certinfo); ExpectReportContainsCertificateInfo(report, *audio_remote_certinfo); ExpectReportContainsCertificateInfo(report, *video_local_certinfo); @@ -1425,14 +1417,14 @@ TEST_F(RTCStatsCollectorTest, CollectRTCCertificateStatsChain) { kTransportName, remote_certinfo->certificate->GetSSLCertificateChain().Clone()); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); ExpectReportContainsCertificateInfo(report, *local_certinfo); ExpectReportContainsCertificateInfo(report, *remote_certinfo); } TEST_F(RTCStatsCollectorTest, CertificateStatsCache) { const char kTransportName[] = "transport"; - rtc::ScopedFakeClock fake_clock; + ScopedFakeClock fake_clock; pc_->AddVoiceChannel("audio", kTransportName); @@ -1448,8 +1440,7 @@ TEST_F(RTCStatsCollectorTest, CertificateStatsCache) { ASSERT_EQ(initial_local_certinfo->fingerprints.size(), 2u); ASSERT_EQ(initial_remote_certinfo->fingerprints.size(), 2u); - rtc::scoped_refptr first_report = - stats_->GetStatsReport(); + scoped_refptr first_report = stats_->GetStatsReport(); const auto* first_local_cert0 = GetCertificateStatsFromFingerprint( first_report, initial_local_certinfo->fingerprints[0]); const auto* first_local_cert1 = GetCertificateStatsFromFingerprint( @@ -1462,10 +1453,10 @@ TEST_F(RTCStatsCollectorTest, CertificateStatsCache) { ASSERT_TRUE(first_local_cert1); ASSERT_TRUE(first_remote_cert0); ASSERT_TRUE(first_remote_cert1); - EXPECT_EQ(first_local_cert0->timestamp().us(), rtc::TimeMicros()); - EXPECT_EQ(first_local_cert1->timestamp().us(), rtc::TimeMicros()); - EXPECT_EQ(first_remote_cert0->timestamp().us(), rtc::TimeMicros()); - EXPECT_EQ(first_remote_cert1->timestamp().us(), rtc::TimeMicros()); + EXPECT_EQ(first_local_cert0->timestamp().us(), TimeMicros()); + EXPECT_EQ(first_local_cert1->timestamp().us(), TimeMicros()); + EXPECT_EQ(first_remote_cert0->timestamp().us(), TimeMicros()); + EXPECT_EQ(first_remote_cert1->timestamp().us(), TimeMicros()); // Replace all certificates. std::unique_ptr updated_local_certinfo = @@ -1488,8 +1479,7 @@ TEST_F(RTCStatsCollectorTest, CertificateStatsCache) { // Advance time to ensure a fresh stats report, but don't clear the // certificate stats cache. fake_clock.AdvanceTime(TimeDelta::Seconds(1)); - rtc::scoped_refptr second_report = - stats_->GetStatsReport(); + scoped_refptr second_report = stats_->GetStatsReport(); // We expect to see the same certificates as before due to not clearing the // certificate cache. const auto* second_local_cert0 = @@ -1514,10 +1504,10 @@ TEST_F(RTCStatsCollectorTest, CertificateStatsCache) { EXPECT_EQ(*second_remote_cert1->fingerprint, initial_remote_certinfo->fingerprints[1]); // But timestamps are up-to-date, because this is a fresh stats report. - EXPECT_EQ(second_local_cert0->timestamp().us(), rtc::TimeMicros()); - EXPECT_EQ(second_local_cert1->timestamp().us(), rtc::TimeMicros()); - EXPECT_EQ(second_remote_cert0->timestamp().us(), rtc::TimeMicros()); - EXPECT_EQ(second_remote_cert1->timestamp().us(), rtc::TimeMicros()); + EXPECT_EQ(second_local_cert0->timestamp().us(), TimeMicros()); + EXPECT_EQ(second_local_cert1->timestamp().us(), TimeMicros()); + EXPECT_EQ(second_remote_cert0->timestamp().us(), TimeMicros()); + EXPECT_EQ(second_remote_cert1->timestamp().us(), TimeMicros()); // The updated certificates are not part of the report yet. EXPECT_FALSE(GetCertificateStatsFromFingerprint( second_report, updated_local_certinfo->fingerprints[0])); @@ -1530,8 +1520,7 @@ TEST_F(RTCStatsCollectorTest, CertificateStatsCache) { // Clear the cache, including the cached certificates. stats_->stats_collector()->ClearCachedStatsReport(); - rtc::scoped_refptr third_report = - stats_->GetStatsReport(); + scoped_refptr third_report = stats_->GetStatsReport(); // Now the old certificates stats should be deleted. EXPECT_FALSE(third_report->Get(first_local_cert0->id())); EXPECT_FALSE(third_report->Get(first_local_cert1->id())); @@ -1553,14 +1542,14 @@ TEST_F(RTCStatsCollectorTest, CollectTwoRTCDataChannelStatsWithPendingId) { // This is not a safe assumption, but in order to make it work for // the test, we reset the ID allocator at test start. SctpDataChannel::ResetInternalIdAllocatorForTesting(-1); - pc_->AddSctpDataChannel(rtc::make_ref_counted( + pc_->AddSctpDataChannel(make_ref_counted( data_channel_controller_->weak_ptr(), /*id=*/-1, DataChannelInterface::kConnecting)); - pc_->AddSctpDataChannel(rtc::make_ref_counted( + pc_->AddSctpDataChannel(make_ref_counted( data_channel_controller_->weak_ptr(), /*id=*/-1, DataChannelInterface::kConnecting)); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); RTCDataChannelStats expected_data_channel0("D0", Timestamp::Zero()); // Default values from MockDataChannel. expected_data_channel0.label = "MockSctpDataChannel"; @@ -1582,7 +1571,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) { // This is not a safe assumption, but in order to make it work for // the test, we reset the ID allocator at test start. SctpDataChannel::ResetInternalIdAllocatorForTesting(-1); - pc_->AddSctpDataChannel(rtc::make_ref_counted( + pc_->AddSctpDataChannel(make_ref_counted( data_channel_controller_->weak_ptr(), 0, "MockSctpDataChannel0", DataChannelInterface::kConnecting, "proto1", 1, 2, 3, 4)); RTCDataChannelStats expected_data_channel0("D0", Timestamp::Zero()); @@ -1595,7 +1584,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) { expected_data_channel0.messages_received = 3; expected_data_channel0.bytes_received = 4; - pc_->AddSctpDataChannel(rtc::make_ref_counted( + pc_->AddSctpDataChannel(make_ref_counted( data_channel_controller_->weak_ptr(), 1, "MockSctpDataChannel1", DataChannelInterface::kOpen, "proto2", 5, 6, 7, 8)); RTCDataChannelStats expected_data_channel1("D1", Timestamp::Zero()); @@ -1608,7 +1597,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) { expected_data_channel1.messages_received = 7; expected_data_channel1.bytes_received = 8; - pc_->AddSctpDataChannel(rtc::make_ref_counted( + pc_->AddSctpDataChannel(make_ref_counted( data_channel_controller_->weak_ptr(), 2, "MockSctpDataChannel2", DataChannelInterface::kClosing, "proto1", 9, 10, 11, 12)); RTCDataChannelStats expected_data_channel2("D2", Timestamp::Zero()); @@ -1621,7 +1610,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) { expected_data_channel2.messages_received = 11; expected_data_channel2.bytes_received = 12; - pc_->AddSctpDataChannel(rtc::make_ref_counted( + pc_->AddSctpDataChannel(make_ref_counted( data_channel_controller_->weak_ptr(), 3, "MockSctpDataChannel3", DataChannelInterface::kClosed, "proto3", 13, 14, 15, 16)); RTCDataChannelStats expected_data_channel3("D3", Timestamp::Zero()); @@ -1634,7 +1623,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) { expected_data_channel3.messages_received = 15; expected_data_channel3.bytes_received = 16; - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); ASSERT_TRUE(report->Get(expected_data_channel0.id())); EXPECT_EQ( @@ -1656,9 +1645,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCDataChannelStats) { TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidateStats) { // Candidates in the first transport stats. - std::unique_ptr a_local_host = CreateFakeCandidate( - "1.2.3.4", 5, "a_local_host's protocol", rtc::ADAPTER_TYPE_VPN, - cricket::LOCAL_PORT_TYPE, 0, rtc::ADAPTER_TYPE_ETHERNET); + std::unique_ptr a_local_host = CreateFakeCandidate( + "1.2.3.4", 5, "a_local_host's protocol", ADAPTER_TYPE_VPN, + IceCandidateType::kHost, 0, ADAPTER_TYPE_ETHERNET); RTCLocalIceCandidateStats expected_a_local_host("I" + a_local_host->id(), Timestamp::Zero()); expected_a_local_host.transport_id = "Ta0"; @@ -1674,9 +1663,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidateStats) { expected_a_local_host.foundation = "foundationIsAString"; expected_a_local_host.username_fragment = "iceusernamefragment"; - std::unique_ptr a_remote_srflx = CreateFakeCandidate( - "6.7.8.9", 10, "remote_srflx's protocol", rtc::ADAPTER_TYPE_UNKNOWN, - cricket::STUN_PORT_TYPE, 1); + std::unique_ptr a_remote_srflx = + CreateFakeCandidate("6.7.8.9", 10, "remote_srflx's protocol", + ADAPTER_TYPE_UNKNOWN, IceCandidateType::kSrflx, 1); RTCRemoteIceCandidateStats expected_a_remote_srflx("I" + a_remote_srflx->id(), Timestamp::Zero()); expected_a_remote_srflx.transport_id = "Ta0"; @@ -1689,9 +1678,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidateStats) { expected_a_remote_srflx.foundation = "foundationIsAString"; expected_a_remote_srflx.username_fragment = "iceusernamefragment"; - std::unique_ptr a_local_prflx = CreateFakeCandidate( - "11.12.13.14", 15, "a_local_prflx's protocol", - rtc::ADAPTER_TYPE_CELLULAR_2G, cricket::PRFLX_PORT_TYPE, 2); + std::unique_ptr a_local_prflx = CreateFakeCandidate( + "11.12.13.14", 15, "a_local_prflx's protocol", ADAPTER_TYPE_CELLULAR_2G, + IceCandidateType::kPrflx, 2); RTCLocalIceCandidateStats expected_a_local_prflx("I" + a_local_prflx->id(), Timestamp::Zero()); expected_a_local_prflx.transport_id = "Ta0"; @@ -1707,9 +1696,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidateStats) { expected_a_local_prflx.foundation = "foundationIsAString"; expected_a_local_prflx.username_fragment = "iceusernamefragment"; - std::unique_ptr a_remote_relay = CreateFakeCandidate( - "16.17.18.19", 20, "a_remote_relay's protocol", rtc::ADAPTER_TYPE_UNKNOWN, - cricket::RELAY_PORT_TYPE, 3); + std::unique_ptr a_remote_relay = + CreateFakeCandidate("16.17.18.19", 20, "a_remote_relay's protocol", + ADAPTER_TYPE_UNKNOWN, IceCandidateType::kRelay, 3); RTCRemoteIceCandidateStats expected_a_remote_relay("I" + a_remote_relay->id(), Timestamp::Zero()); expected_a_remote_relay.transport_id = "Ta0"; @@ -1722,9 +1711,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidateStats) { expected_a_remote_relay.foundation = "foundationIsAString"; expected_a_remote_relay.username_fragment = "iceusernamefragment"; - std::unique_ptr a_local_relay = CreateFakeCandidate( - "16.17.18.19", 21, "a_local_relay's protocol", rtc::ADAPTER_TYPE_UNKNOWN, - cricket::RELAY_PORT_TYPE, 1); + std::unique_ptr a_local_relay = + CreateFakeCandidate("16.17.18.19", 21, "a_local_relay's protocol", + ADAPTER_TYPE_UNKNOWN, IceCandidateType::kRelay, 1); a_local_relay->set_relay_protocol("tcp"); a_local_relay->set_url("turn:url1"); @@ -1745,9 +1734,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidateStats) { expected_a_local_relay.foundation = "foundationIsAString"; expected_a_local_relay.username_fragment = "iceusernamefragment"; - std::unique_ptr a_local_relay_prflx = CreateFakeCandidate( - "11.12.13.20", 22, "a_local_relay_prflx's protocol", - rtc::ADAPTER_TYPE_UNKNOWN, cricket::PRFLX_PORT_TYPE, 1); + std::unique_ptr a_local_relay_prflx = + CreateFakeCandidate("11.12.13.20", 22, "a_local_relay_prflx's protocol", + ADAPTER_TYPE_UNKNOWN, IceCandidateType::kPrflx, 1); a_local_relay_prflx->set_relay_protocol("udp"); RTCLocalIceCandidateStats expected_a_local_relay_prflx( @@ -1767,10 +1756,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidateStats) { expected_a_local_relay_prflx.username_fragment = "iceusernamefragment"; // A non-paired local candidate. - std::unique_ptr a_local_host_not_paired = - CreateFakeCandidate("1.2.3.4", 4404, "a_local_host_not_paired's protocol", - rtc::ADAPTER_TYPE_VPN, cricket::LOCAL_PORT_TYPE, 0, - rtc::ADAPTER_TYPE_ETHERNET); + std::unique_ptr a_local_host_not_paired = CreateFakeCandidate( + "1.2.3.4", 4404, "a_local_host_not_paired's protocol", ADAPTER_TYPE_VPN, + IceCandidateType::kHost, 0, ADAPTER_TYPE_ETHERNET); RTCLocalIceCandidateStats expected_a_local_host_not_paired( "I" + a_local_host_not_paired->id(), Timestamp::Zero()); expected_a_local_host_not_paired.transport_id = "Ta0"; @@ -1788,9 +1776,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidateStats) { expected_a_local_host_not_paired.username_fragment = "iceusernamefragment"; // Candidates in the second transport stats. - std::unique_ptr b_local = + std::unique_ptr b_local = CreateFakeCandidate("42.42.42.42", 42, "b_local's protocol", - rtc::ADAPTER_TYPE_WIFI, cricket::LOCAL_PORT_TYPE, 42); + ADAPTER_TYPE_WIFI, IceCandidateType::kHost, 42); RTCLocalIceCandidateStats expected_b_local("I" + b_local->id(), Timestamp::Zero()); expected_b_local.transport_id = "Tb0"; @@ -1806,9 +1794,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidateStats) { expected_b_local.foundation = "foundationIsAString"; expected_b_local.username_fragment = "iceusernamefragment"; - std::unique_ptr b_remote = CreateFakeCandidate( - "42.42.42.42", 42, "b_remote's protocol", rtc::ADAPTER_TYPE_UNKNOWN, - cricket::LOCAL_PORT_TYPE, 42); + std::unique_ptr b_remote = + CreateFakeCandidate("42.42.42.42", 42, "b_remote's protocol", + ADAPTER_TYPE_UNKNOWN, IceCandidateType::kHost, 42); RTCRemoteIceCandidateStats expected_b_remote("I" + b_remote->id(), Timestamp::Zero()); expected_b_remote.transport_id = "Tb0"; @@ -1822,49 +1810,49 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidateStats) { expected_b_remote.username_fragment = "iceusernamefragment"; // Add candidate pairs to connection. - cricket::TransportChannelStats a_transport_channel_stats; + TransportChannelStats a_transport_channel_stats; a_transport_channel_stats.ice_transport_stats.connection_infos.push_back( - cricket::ConnectionInfo()); + ConnectionInfo()); a_transport_channel_stats.ice_transport_stats.connection_infos[0] - .local_candidate = *a_local_host.get(); + .local_candidate = *a_local_host; a_transport_channel_stats.ice_transport_stats.connection_infos[0] - .remote_candidate = *a_remote_srflx.get(); + .remote_candidate = *a_remote_srflx; a_transport_channel_stats.ice_transport_stats.connection_infos.push_back( - cricket::ConnectionInfo()); + ConnectionInfo()); a_transport_channel_stats.ice_transport_stats.connection_infos[1] - .local_candidate = *a_local_prflx.get(); + .local_candidate = *a_local_prflx; a_transport_channel_stats.ice_transport_stats.connection_infos[1] - .remote_candidate = *a_remote_relay.get(); + .remote_candidate = *a_remote_relay; a_transport_channel_stats.ice_transport_stats.connection_infos.push_back( - cricket::ConnectionInfo()); + ConnectionInfo()); a_transport_channel_stats.ice_transport_stats.connection_infos[2] - .local_candidate = *a_local_relay.get(); + .local_candidate = *a_local_relay; a_transport_channel_stats.ice_transport_stats.connection_infos[2] - .remote_candidate = *a_remote_relay.get(); + .remote_candidate = *a_remote_relay; a_transport_channel_stats.ice_transport_stats.connection_infos.push_back( - cricket::ConnectionInfo()); + ConnectionInfo()); a_transport_channel_stats.ice_transport_stats.connection_infos[3] - .local_candidate = *a_local_relay_prflx.get(); + .local_candidate = *a_local_relay_prflx; a_transport_channel_stats.ice_transport_stats.connection_infos[3] - .remote_candidate = *a_remote_relay.get(); + .remote_candidate = *a_remote_relay; a_transport_channel_stats.ice_transport_stats.candidate_stats_list.push_back( - cricket::CandidateStats(*a_local_host_not_paired.get())); + CandidateStats(*a_local_host_not_paired)); pc_->AddVoiceChannel("audio", "a"); pc_->SetTransportStats("a", a_transport_channel_stats); - cricket::TransportChannelStats b_transport_channel_stats; + TransportChannelStats b_transport_channel_stats; b_transport_channel_stats.ice_transport_stats.connection_infos.push_back( - cricket::ConnectionInfo()); + ConnectionInfo()); b_transport_channel_stats.ice_transport_stats.connection_infos[0] - .local_candidate = *b_local.get(); + .local_candidate = *b_local; b_transport_channel_stats.ice_transport_stats.connection_infos[0] - .remote_candidate = *b_remote.get(); + .remote_candidate = *b_remote; pc_->AddVideoChannel("video", "b"); pc_->SetTransportStats("b", b_transport_channel_stats); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); ASSERT_TRUE(report->Get(expected_a_local_host.id())); EXPECT_EQ(expected_a_local_host, report->Get(expected_a_local_host.id()) @@ -1907,21 +1895,21 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidateStats) { TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidatePairStats) { const char kTransportName[] = "transport"; - std::unique_ptr local_candidate = - CreateFakeCandidate("42.42.42.42", 42, "protocol", rtc::ADAPTER_TYPE_WIFI, - cricket::LOCAL_PORT_TYPE, 42); + std::unique_ptr local_candidate = + CreateFakeCandidate("42.42.42.42", 42, "protocol", ADAPTER_TYPE_WIFI, + IceCandidateType::kHost, 42); local_candidate->set_username("local_iceusernamefragment"); - std::unique_ptr remote_candidate = CreateFakeCandidate( - "42.42.42.42", 42, "protocol", rtc::ADAPTER_TYPE_UNKNOWN, - cricket::STUN_PORT_TYPE, 42); - remote_candidate->set_related_address(rtc::SocketAddress("192.168.2.1", 43)); + std::unique_ptr remote_candidate = + CreateFakeCandidate("42.42.42.42", 42, "protocol", ADAPTER_TYPE_UNKNOWN, + IceCandidateType::kSrflx, 42); + remote_candidate->set_related_address(SocketAddress("192.168.2.1", 43)); remote_candidate->set_username("remote_iceusernamefragment"); - cricket::ConnectionInfo connection_info; + ConnectionInfo connection_info; connection_info.best_connection = false; - connection_info.local_candidate = *local_candidate.get(); - connection_info.remote_candidate = *remote_candidate.get(); + connection_info.local_candidate = *local_candidate; + connection_info.remote_candidate = *remote_candidate; connection_info.writable = true; connection_info.sent_discarded_packets = 3; connection_info.sent_total_packets = 10; @@ -1930,33 +1918,33 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidatePairStats) { connection_info.sent_total_bytes = 42; connection_info.recv_total_bytes = 1234; connection_info.total_round_trip_time_ms = 0; - connection_info.current_round_trip_time_ms = absl::nullopt; + connection_info.current_round_trip_time_ms = std::nullopt; connection_info.recv_ping_requests = 2020; connection_info.sent_ping_requests_total = 2222; connection_info.sent_ping_requests_before_first_response = 2000; connection_info.recv_ping_responses = 4321; connection_info.sent_ping_responses = 1000; - connection_info.state = cricket::IceCandidatePairState::IN_PROGRESS; + connection_info.state = IceCandidatePairState::IN_PROGRESS; connection_info.priority = 5555; connection_info.nominated = false; connection_info.last_data_received = Timestamp::Millis(2500); connection_info.last_data_sent = Timestamp::Millis(5200); - cricket::TransportChannelStats transport_channel_stats; - transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP; + TransportChannelStats transport_channel_stats; + transport_channel_stats.component = ICE_CANDIDATE_COMPONENT_RTP; transport_channel_stats.ice_transport_stats.connection_infos.push_back( connection_info); pc_->AddVideoChannel("video", kTransportName); pc_->SetTransportStats(kTransportName, transport_channel_stats); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); RTCIceCandidatePairStats expected_pair( "CP" + local_candidate->id() + "_" + remote_candidate->id(), report->timestamp()); expected_pair.transport_id = - "Ttransport" + rtc::ToString(cricket::ICE_CANDIDATE_COMPONENT_RTP); + "Ttransport" + absl::StrCat(ICE_CANDIDATE_COMPONENT_RTP); expected_pair.local_candidate_id = "I" + local_candidate->id(); expected_pair.remote_candidate_id = "I" + remote_candidate->id(); expected_pair.state = "in-progress"; @@ -2030,7 +2018,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidatePairStats) { EXPECT_TRUE(report->Get(*expected_pair.transport_id)); // Set bandwidth and "GetStats" again. - webrtc::Call::Stats call_stats; + Call::Stats call_stats; const int kSendBandwidth = 888; call_stats.send_bandwidth_bps = kSendBandwidth; const int kRecvBandwidth = 999; @@ -2085,7 +2073,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCIceCandidatePairStats) { TEST_F(RTCStatsCollectorTest, CollectRTCPeerConnectionStats) { { - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); RTCPeerConnectionStats expected("P", report->timestamp()); expected.data_channels_opened = 0; expected.data_channels_closed = 0; @@ -2094,12 +2082,12 @@ TEST_F(RTCStatsCollectorTest, CollectRTCPeerConnectionStats) { } FakeDataChannelController controller(pc_->network_thread()); - rtc::scoped_refptr dummy_channel_a = SctpDataChannel::Create( + scoped_refptr dummy_channel_a = SctpDataChannel::Create( controller.weak_ptr(), "DummyChannelA", false, InternalDataChannelInit(), - rtc::Thread::Current(), rtc::Thread::Current()); - rtc::scoped_refptr dummy_channel_b = SctpDataChannel::Create( + Thread::Current(), Thread::Current()); + scoped_refptr dummy_channel_b = SctpDataChannel::Create( controller.weak_ptr(), "DummyChannelB", false, InternalDataChannelInit(), - rtc::Thread::Current(), rtc::Thread::Current()); + Thread::Current(), Thread::Current()); stats_->stats_collector()->OnSctpDataChannelStateChanged( dummy_channel_a->internal_id(), DataChannelInterface::DataState::kOpen); @@ -2108,8 +2096,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCPeerConnectionStats) { dummy_channel_b->internal_id(), DataChannelInterface::DataState::kClosed); { - rtc::scoped_refptr report = - stats_->GetFreshStatsReport(); + scoped_refptr report = stats_->GetFreshStatsReport(); RTCPeerConnectionStats expected("P", report->timestamp()); expected.data_channels_opened = 1; expected.data_channels_closed = 0; @@ -2123,8 +2110,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCPeerConnectionStats) { dummy_channel_b->internal_id(), DataChannelInterface::DataState::kClosed); { - rtc::scoped_refptr report = - stats_->GetFreshStatsReport(); + scoped_refptr report = stats_->GetFreshStatsReport(); RTCPeerConnectionStats expected("P", report->timestamp()); expected.data_channels_opened = 2; expected.data_channels_closed = 1; @@ -2138,8 +2124,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCPeerConnectionStats) { dummy_channel_b->internal_id(), DataChannelInterface::DataState::kOpen); { - rtc::scoped_refptr report = - stats_->GetFreshStatsReport(); + scoped_refptr report = stats_->GetFreshStatsReport(); RTCPeerConnectionStats expected("P", report->timestamp()); expected.data_channels_opened = 3; expected.data_channels_closed = 1; @@ -2153,8 +2138,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCPeerConnectionStats) { dummy_channel_b->internal_id(), DataChannelInterface::DataState::kClosed); { - rtc::scoped_refptr report = - stats_->GetFreshStatsReport(); + scoped_refptr report = stats_->GetFreshStatsReport(); RTCPeerConnectionStats expected("P", report->timestamp()); expected.data_channels_opened = 3; expected.data_channels_closed = 3; @@ -2164,11 +2148,10 @@ TEST_F(RTCStatsCollectorTest, CollectRTCPeerConnectionStats) { } TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Audio) { - cricket::VoiceMediaInfo voice_media_info; + VoiceMediaInfo voice_media_info; - voice_media_info.receivers.push_back(cricket::VoiceReceiverInfo()); - voice_media_info.receivers[0].local_stats.push_back( - cricket::SsrcReceiverInfo()); + voice_media_info.receivers.push_back(VoiceReceiverInfo()); + voice_media_info.receivers[0].local_stats.push_back(SsrcReceiverInfo()); voice_media_info.receivers[0].local_stats[0].ssrc = 1; voice_media_info.receivers[0].packets_lost = -1; // Signed per RFC3550 voice_media_info.receivers[0].packets_discarded = 7788; @@ -2198,11 +2181,12 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Audio) { voice_media_info.receivers[0].relative_packet_arrival_delay_seconds = 16; voice_media_info.receivers[0].interruption_count = 7788; voice_media_info.receivers[0].total_interruption_duration_ms = 778899; - voice_media_info.receivers[0].last_packet_received = absl::nullopt; + voice_media_info.receivers[0].last_packet_received = std::nullopt; + voice_media_info.receivers[0].total_processing_delay_seconds = 0.123; RtpCodecParameters codec_parameters; codec_parameters.payload_type = 42; - codec_parameters.kind = cricket::MEDIA_TYPE_AUDIO; + codec_parameters.kind = webrtc::MediaType::AUDIO; codec_parameters.name = "dummy"; codec_parameters.clock_rate = 0; voice_media_info.receive_codecs.insert( @@ -2211,14 +2195,14 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Audio) { auto voice_media_channels = pc_->AddVoiceChannel("AudioMid", "TransportName", voice_media_info); stats_->SetupRemoteTrackAndReceiver( - cricket::MEDIA_TYPE_AUDIO, "RemoteAudioTrackID", "RemoteStreamId", 1); + webrtc::MediaType::AUDIO, "RemoteAudioTrackID", "RemoteStreamId", 1); // Needed for playoutId to be populated. pc_->SetAudioDeviceStats(AudioDeviceModule::Stats()); pc_->GetTransceiversInternal()[0]->internal()->set_current_direction( RtpTransceiverDirection::kSendRecv); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); RTCInboundRtpStreamStats expected_audio("ITTransportName1A1", report->timestamp()); @@ -2256,6 +2240,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Audio) { expected_audio.relative_packet_arrival_delay = 16; expected_audio.interruption_count = 7788; expected_audio.total_interruption_duration = 778.899; + expected_audio.total_processing_delay = 0.123; expected_audio.playout_id = "AP"; ASSERT_TRUE(report->Get(expected_audio.id())); @@ -2282,16 +2267,15 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Audio) { } TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Audio_PlayoutId) { - cricket::VoiceMediaInfo voice_media_info; + VoiceMediaInfo voice_media_info; - voice_media_info.receivers.push_back(cricket::VoiceReceiverInfo()); - voice_media_info.receivers[0].local_stats.push_back( - cricket::SsrcReceiverInfo()); + voice_media_info.receivers.push_back(VoiceReceiverInfo()); + voice_media_info.receivers[0].local_stats.push_back(SsrcReceiverInfo()); voice_media_info.receivers[0].local_stats[0].ssrc = 1; pc_->AddVoiceChannel("AudioMid", "TransportName", voice_media_info); stats_->SetupRemoteTrackAndReceiver( - cricket::MEDIA_TYPE_AUDIO, "RemoteAudioTrackID", "RemoteStreamId", 1); + webrtc::MediaType::AUDIO, "RemoteAudioTrackID", "RemoteStreamId", 1); // Needed for playoutId to be populated. pc_->SetAudioDeviceStats(AudioDeviceModule::Stats()); @@ -2299,32 +2283,30 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Audio_PlayoutId) { // We do not expect a playout id when only sending. pc_->GetTransceiversInternal()[0]->internal()->set_current_direction( RtpTransceiverDirection::kSendOnly); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); ASSERT_TRUE(report->Get("ITTransportName1A1")); auto stats = report->Get("ITTransportName1A1")->cast_to(); - ASSERT_FALSE(stats.playout_id.is_defined()); + ASSERT_FALSE(stats.playout_id.has_value()); } { // We do expect a playout id when receiving. pc_->GetTransceiversInternal()[0]->internal()->set_current_direction( RtpTransceiverDirection::kRecvOnly); - rtc::scoped_refptr report = - stats_->GetFreshStatsReport(); + scoped_refptr report = stats_->GetFreshStatsReport(); ASSERT_TRUE(report->Get("ITTransportName1A1")); auto stats = report->Get("ITTransportName1A1")->cast_to(); - ASSERT_TRUE(stats.playout_id.is_defined()); + ASSERT_TRUE(stats.playout_id.has_value()); EXPECT_EQ(*stats.playout_id, "AP"); } } TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Video) { - cricket::VideoMediaInfo video_media_info; + VideoMediaInfo video_media_info; - video_media_info.receivers.push_back(cricket::VideoReceiverInfo()); - video_media_info.receivers[0].local_stats.push_back( - cricket::SsrcReceiverInfo()); + video_media_info.receivers.push_back(VideoReceiverInfo()); + video_media_info.receivers[0].local_stats.push_back(SsrcReceiverInfo()); video_media_info.receivers[0].local_stats[0].ssrc = 1; video_media_info.receivers[0].packets_received = 2; video_media_info.receivers[0].packets_lost = 42; @@ -2338,13 +2320,12 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Video) { video_media_info.receivers[0].frames_decoded = 9; video_media_info.receivers[0].key_frames_decoded = 3; video_media_info.receivers[0].frames_dropped = 13; - video_media_info.receivers[0].qp_sum = absl::nullopt; - video_media_info.receivers[0].total_decode_time = - webrtc::TimeDelta::Seconds(9); - video_media_info.receivers[0].total_processing_delay = - webrtc::TimeDelta::Millis(600); - video_media_info.receivers[0].total_assembly_time = - webrtc::TimeDelta::Millis(500); + video_media_info.receivers[0].qp_sum = std::nullopt; + video_media_info.receivers[0].corruption_score_sum = std::nullopt; + video_media_info.receivers[0].corruption_score_squared_sum = std::nullopt; + video_media_info.receivers[0].total_decode_time = TimeDelta::Seconds(9); + video_media_info.receivers[0].total_processing_delay = TimeDelta::Millis(600); + video_media_info.receivers[0].total_assembly_time = TimeDelta::Millis(500); video_media_info.receivers[0].frames_assembled_from_multiple_packets = 23; video_media_info.receivers[0].total_inter_frame_delay = 0.123; video_media_info.receivers[0].total_squared_inter_frame_delay = 0.00456; @@ -2357,11 +2338,11 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Video) { video_media_info.receivers[0].jitter_buffer_target_delay_seconds = 1.1; video_media_info.receivers[0].jitter_buffer_minimum_delay_seconds = 0.999; video_media_info.receivers[0].jitter_buffer_emitted_count = 13; - video_media_info.receivers[0].last_packet_received = absl::nullopt; + video_media_info.receivers[0].last_packet_received = std::nullopt; video_media_info.receivers[0].content_type = VideoContentType::UNSPECIFIED; video_media_info.receivers[0].estimated_playout_ntp_timestamp_ms = - absl::nullopt; - video_media_info.receivers[0].decoder_implementation_name = absl::nullopt; + std::nullopt; + video_media_info.receivers[0].decoder_implementation_name = std::nullopt; video_media_info.receivers[0].min_playout_delay_ms = 50; video_media_info.receivers[0].power_efficient_decoder = false; video_media_info.receivers[0].retransmitted_packets_received = 17; @@ -2369,9 +2350,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Video) { video_media_info.receivers[0].fec_packets_received = 32; video_media_info.receivers[0].fec_bytes_received = 54; video_media_info.receivers[0].ssrc_groups.push_back( - {cricket::kFidSsrcGroupSemantics, {1, 4404}}); + {kFidSsrcGroupSemantics, {1, 4404}}); video_media_info.receivers[0].ssrc_groups.push_back( - {cricket::kFecFrSsrcGroupSemantics, {1, 5505}}); + {kFecFrSsrcGroupSemantics, {1, 5505}}); // Note: these two values intentionally differ, // only the decoded one should show up. @@ -2380,7 +2361,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Video) { RtpCodecParameters codec_parameters; codec_parameters.payload_type = 42; - codec_parameters.kind = cricket::MEDIA_TYPE_VIDEO; + codec_parameters.kind = webrtc::MediaType::VIDEO; codec_parameters.name = "dummy"; codec_parameters.clock_rate = 0; video_media_info.receive_codecs.insert( @@ -2389,9 +2370,9 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Video) { auto video_media_channels = pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info); stats_->SetupRemoteTrackAndReceiver( - cricket::MEDIA_TYPE_VIDEO, "RemoteVideoTrackID", "RemoteStreamId", 1); + webrtc::MediaType::VIDEO, "RemoteVideoTrackID", "RemoteStreamId", 1); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); RTCInboundRtpStreamStats expected_video("ITTransportName1V1", report->timestamp()); @@ -2413,6 +2394,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Video) { expected_video.key_frames_decoded = 3; expected_video.frames_dropped = 13; // `expected_video.qp_sum` should be undefined. + // `corruption_score` related metrics should be undefined. expected_video.total_decode_time = 9.0; expected_video.total_processing_delay = 0.6; expected_video.total_assembly_time = 0.5; @@ -2449,6 +2431,12 @@ TEST_F(RTCStatsCollectorTest, CollectRTCInboundRtpStreamStats_Video) { // Set previously undefined values and "GetStats" again. video_media_info.receivers[0].qp_sum = 9; expected_video.qp_sum = 9; + video_media_info.receivers[0].corruption_score_sum = 0.5; + video_media_info.receivers[0].corruption_score_squared_sum = 0.25; + video_media_info.receivers[0].corruption_score_count = 5; + expected_video.total_corruption_probability = 0.5; + expected_video.total_squared_corruption_probability = 0.25; + expected_video.corruption_measurements = 5; video_media_info.receivers[0].last_packet_received = Timestamp::Seconds(1); expected_video.last_packet_received_timestamp = 1000.0; video_media_info.receivers[0].content_type = VideoContentType::SCREENSHARE; @@ -2481,7 +2469,11 @@ TEST_F(RTCStatsCollectorTest, CollectRTCAudioPlayoutStats) { audio_device_stats.total_playout_delay_s = 5; pc_->SetAudioDeviceStats(audio_device_stats); - rtc::scoped_refptr report = stats_->GetStatsReport(); + pc_->AddVoiceChannel("AudioMid", "TransportName", {}); + stats_->SetupRemoteTrackAndReceiver( + webrtc::MediaType::AUDIO, "RemoteAudioTrackID", "RemoteStreamId", 1); + + scoped_refptr report = stats_->GetStatsReport(); auto stats_of_track_type = report->GetStatsOfType(); ASSERT_EQ(1U, stats_of_track_type.size()); @@ -2499,11 +2491,10 @@ TEST_F(RTCStatsCollectorTest, CollectRTCAudioPlayoutStats) { } TEST_F(RTCStatsCollectorTest, CollectGoogTimingFrameInfo) { - cricket::VideoMediaInfo video_media_info; + VideoMediaInfo video_media_info; - video_media_info.receivers.push_back(cricket::VideoReceiverInfo()); - video_media_info.receivers[0].local_stats.push_back( - cricket::SsrcReceiverInfo()); + video_media_info.receivers.push_back(VideoReceiverInfo()); + video_media_info.receivers[0].local_stats.push_back(SsrcReceiverInfo()); video_media_info.receivers[0].local_stats[0].ssrc = 1; TimingFrameInfo timing_frame_info; timing_frame_info.rtp_timestamp = 1; @@ -2524,21 +2515,21 @@ TEST_F(RTCStatsCollectorTest, CollectGoogTimingFrameInfo) { pc_->AddVideoChannel("Mid0", "Transport0", video_media_info); stats_->SetupRemoteTrackAndReceiver( - cricket::MEDIA_TYPE_VIDEO, "RemoteVideoTrackID", "RemoteStreamId", 1); + webrtc::MediaType::VIDEO, "RemoteVideoTrackID", "RemoteStreamId", 1); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); auto inbound_rtps = report->GetStatsOfType(); ASSERT_EQ(inbound_rtps.size(), 1u); - ASSERT_TRUE(inbound_rtps[0]->goog_timing_frame_info.is_defined()); + ASSERT_TRUE(inbound_rtps[0]->goog_timing_frame_info.has_value()); EXPECT_EQ(*inbound_rtps[0]->goog_timing_frame_info, "1,2,3,4,5,6,7,8,9,10,11,12,13,1,0"); } TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Audio) { - cricket::VoiceMediaInfo voice_media_info; + VoiceMediaInfo voice_media_info; - voice_media_info.senders.push_back(cricket::VoiceSenderInfo()); - voice_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo()); + voice_media_info.senders.push_back(VoiceSenderInfo()); + voice_media_info.senders[0].local_stats.push_back(SsrcSenderInfo()); voice_media_info.senders[0].local_stats[0].ssrc = 1; voice_media_info.senders[0].packets_sent = 2; voice_media_info.senders[0].total_packet_send_delay = TimeDelta::Seconds(1); @@ -2547,24 +2538,24 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Audio) { voice_media_info.senders[0].header_and_padding_bytes_sent = 12; voice_media_info.senders[0].retransmitted_bytes_sent = 30; voice_media_info.senders[0].nacks_received = 31; - voice_media_info.senders[0].target_bitrate = 32000; + voice_media_info.senders[0].target_bitrate = DataRate::BitsPerSec(32'000); voice_media_info.senders[0].codec_payload_type = 42; voice_media_info.senders[0].active = true; RtpCodecParameters codec_parameters; codec_parameters.payload_type = 42; - codec_parameters.kind = cricket::MEDIA_TYPE_AUDIO; + codec_parameters.kind = webrtc::MediaType::AUDIO; codec_parameters.name = "dummy"; codec_parameters.clock_rate = 0; voice_media_info.send_codecs.insert( std::make_pair(codec_parameters.payload_type, codec_parameters)); pc_->AddVoiceChannel("AudioMid", "TransportName", voice_media_info); - stats_->SetupLocalTrackAndSender(cricket::MEDIA_TYPE_AUDIO, + stats_->SetupLocalTrackAndSender(webrtc::MediaType::AUDIO, "LocalAudioTrackID", 1, true, /*attachment_id=*/50); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); RTCOutboundRtpStreamStats expected_audio("OTTransportName1A1", report->timestamp()); @@ -2599,11 +2590,13 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Audio) { } TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Video) { - cricket::VideoMediaInfo video_media_info; + VideoMediaInfo video_media_info; - video_media_info.senders.push_back(cricket::VideoSenderInfo()); - video_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo()); + video_media_info.senders.push_back(VideoSenderInfo()); + video_media_info.senders[0].local_stats.push_back(SsrcSenderInfo()); video_media_info.senders[0].local_stats[0].ssrc = 1; + video_media_info.senders[0].rid = "q"; + video_media_info.senders[0].encoding_index = 0; video_media_info.senders[0].firs_received = 2; video_media_info.senders[0].plis_received = 3; video_media_info.senders[0].nacks_received = 4; @@ -2617,16 +2610,16 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Video) { video_media_info.senders[0].key_frames_encoded = 3; video_media_info.senders[0].total_encode_time_ms = 9000; video_media_info.senders[0].total_encoded_bytes_target = 1234; - video_media_info.senders[0].total_packet_send_delay = - webrtc::TimeDelta::Seconds(10); + video_media_info.senders[0].total_packet_send_delay = TimeDelta::Seconds(10); video_media_info.senders[0].quality_limitation_reason = QualityLimitationReason::kBandwidth; - video_media_info.senders[0].quality_limitation_durations_ms - [webrtc::QualityLimitationReason::kBandwidth] = 300; + video_media_info.senders[0] + .quality_limitation_durations_ms[QualityLimitationReason::kBandwidth] = + 300; video_media_info.senders[0].quality_limitation_resolution_changes = 56u; - video_media_info.senders[0].qp_sum = absl::nullopt; + video_media_info.senders[0].qp_sum = std::nullopt; video_media_info.senders[0].content_type = VideoContentType::UNSPECIFIED; - video_media_info.senders[0].encoder_implementation_name = absl::nullopt; + video_media_info.senders[0].encoder_implementation_name = std::nullopt; video_media_info.senders[0].power_efficient_encoder = false; video_media_info.senders[0].send_frame_width = 200; video_media_info.senders[0].send_frame_height = 100; @@ -2636,32 +2629,47 @@ TEST_F(RTCStatsCollectorTest, CollectRTCOutboundRtpStreamStats_Video) { video_media_info.senders[0].active = false; video_media_info.senders[0].scalability_mode = ScalabilityMode::kL3T3_KEY; video_media_info.senders[0].ssrc_groups.push_back( - {cricket::kFidSsrcGroupSemantics, {1, 4404}}); + {kFidSsrcGroupSemantics, {1, 4404}}); video_media_info.aggregated_senders.push_back(video_media_info.senders[0]); RtpCodecParameters codec_parameters; codec_parameters.payload_type = 42; - codec_parameters.kind = cricket::MEDIA_TYPE_AUDIO; + codec_parameters.kind = webrtc::MediaType::AUDIO; codec_parameters.name = "dummy"; codec_parameters.clock_rate = 0; video_media_info.send_codecs.insert( std::make_pair(codec_parameters.payload_type, codec_parameters)); + // Copy the sender info into a second simulcast layer and update ssrc, rid and + // simulcast index to be unique. + video_media_info.senders.push_back(video_media_info.senders[0]); + video_media_info.senders[1].local_stats[0].ssrc = 2; + video_media_info.senders[1].rid = "h"; + video_media_info.senders[1].encoding_index = 1; auto video_media_channels = pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info); - stats_->SetupLocalTrackAndSender(cricket::MEDIA_TYPE_VIDEO, + stats_->SetupLocalTrackAndSender(webrtc::MediaType::VIDEO, "LocalVideoTrackID", 1, true, /*attachment_id=*/50); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); auto stats_of_my_type = report->GetStatsOfType(); - ASSERT_EQ(1U, stats_of_my_type.size()); + ASSERT_EQ(2U, stats_of_my_type.size()); + std::string id_of_first_ssrc; + for (const auto* outbound_rtp : stats_of_my_type) { + if (outbound_rtp->ssrc.value_or(0) == 1) { + id_of_first_ssrc = outbound_rtp->id(); + break; + } + } - RTCOutboundRtpStreamStats expected_video(stats_of_my_type[0]->id(), + RTCOutboundRtpStreamStats expected_video(id_of_first_ssrc, report->timestamp()); expected_video.media_source_id = "SV50"; // `expected_video.remote_id` should be undefined. expected_video.mid = "VideoMid"; + expected_video.rid = "q"; + expected_video.encoding_index = 0; expected_video.ssrc = 1; expected_video.kind = "video"; expected_video.transport_id = "TTransportName1"; @@ -2730,32 +2738,30 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) { pc_->AddVoiceChannel("audio", kTransportName); - std::unique_ptr rtp_local_candidate = - CreateFakeCandidate("42.42.42.42", 42, "protocol", rtc::ADAPTER_TYPE_WIFI, - cricket::LOCAL_PORT_TYPE, 42); - std::unique_ptr rtp_remote_candidate = - CreateFakeCandidate("42.42.42.42", 42, "protocol", - rtc::ADAPTER_TYPE_UNKNOWN, cricket::LOCAL_PORT_TYPE, - 42); - std::unique_ptr rtcp_local_candidate = - CreateFakeCandidate("42.42.42.42", 42, "protocol", rtc::ADAPTER_TYPE_WIFI, - cricket::LOCAL_PORT_TYPE, 42); - std::unique_ptr rtcp_remote_candidate = - CreateFakeCandidate("42.42.42.42", 42, "protocol", - rtc::ADAPTER_TYPE_UNKNOWN, cricket::LOCAL_PORT_TYPE, - 42); - - cricket::ConnectionInfo rtp_connection_info; + std::unique_ptr rtp_local_candidate = + CreateFakeCandidate("42.42.42.42", 42, "protocol", ADAPTER_TYPE_WIFI, + IceCandidateType::kHost, 42); + std::unique_ptr rtp_remote_candidate = + CreateFakeCandidate("42.42.42.42", 42, "protocol", ADAPTER_TYPE_UNKNOWN, + IceCandidateType::kHost, 42); + std::unique_ptr rtcp_local_candidate = + CreateFakeCandidate("42.42.42.42", 42, "protocol", ADAPTER_TYPE_WIFI, + IceCandidateType::kHost, 42); + std::unique_ptr rtcp_remote_candidate = + CreateFakeCandidate("42.42.42.42", 42, "protocol", ADAPTER_TYPE_UNKNOWN, + IceCandidateType::kHost, 42); + + ConnectionInfo rtp_connection_info; rtp_connection_info.best_connection = false; - rtp_connection_info.local_candidate = *rtp_local_candidate.get(); - rtp_connection_info.remote_candidate = *rtp_remote_candidate.get(); + rtp_connection_info.local_candidate = *rtp_local_candidate; + rtp_connection_info.remote_candidate = *rtp_remote_candidate; rtp_connection_info.sent_total_bytes = 42; rtp_connection_info.recv_total_bytes = 1337; rtp_connection_info.sent_total_packets = 3; rtp_connection_info.sent_discarded_packets = 2; rtp_connection_info.packets_received = 4; - cricket::TransportChannelStats rtp_transport_channel_stats; - rtp_transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP; + TransportChannelStats rtp_transport_channel_stats; + rtp_transport_channel_stats.component = ICE_CANDIDATE_COMPONENT_RTP; rtp_transport_channel_stats.ice_transport_stats.connection_infos.push_back( rtp_connection_info); rtp_transport_channel_stats.dtls_state = DtlsTransportState::kNew; @@ -2770,10 +2776,10 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) { pc_->SetTransportStats(kTransportName, {rtp_transport_channel_stats}); // Get stats without RTCP, an active connection or certificates. - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); RTCTransportStats expected_rtp_transport( - "Ttransport" + rtc::ToString(cricket::ICE_CANDIDATE_COMPONENT_RTP), + "Ttransport" + absl::StrCat(ICE_CANDIDATE_COMPONENT_RTP), report->timestamp()); expected_rtp_transport.bytes_sent = 42; expected_rtp_transport.packets_sent = 1; @@ -2791,18 +2797,17 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) { expected_rtp_transport, report->Get(expected_rtp_transport.id())->cast_to()); - cricket::ConnectionInfo rtcp_connection_info; + ConnectionInfo rtcp_connection_info; rtcp_connection_info.best_connection = false; - rtcp_connection_info.local_candidate = *rtcp_local_candidate.get(); - rtcp_connection_info.remote_candidate = *rtcp_remote_candidate.get(); + rtcp_connection_info.local_candidate = *rtcp_local_candidate; + rtcp_connection_info.remote_candidate = *rtcp_remote_candidate; rtcp_connection_info.sent_total_bytes = 1337; rtcp_connection_info.recv_total_bytes = 42; rtcp_connection_info.sent_total_packets = 3; rtcp_connection_info.sent_discarded_packets = 2; rtcp_connection_info.packets_received = 4; - cricket::TransportChannelStats rtcp_transport_channel_stats; - rtcp_transport_channel_stats.component = - cricket::ICE_CANDIDATE_COMPONENT_RTCP; + TransportChannelStats rtcp_transport_channel_stats; + rtcp_transport_channel_stats.component = ICE_CANDIDATE_COMPONENT_RTCP; rtcp_transport_channel_stats.ice_transport_stats.connection_infos.push_back( rtcp_connection_info); rtcp_transport_channel_stats.dtls_state = DtlsTransportState::kConnecting; @@ -2821,7 +2826,7 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStats) { report = stats_->GetFreshStatsReport(); RTCTransportStats expected_rtcp_transport( - "Ttransport" + rtc::ToString(cricket::ICE_CANDIDATE_COMPONENT_RTCP), + "Ttransport" + absl::StrCat(ICE_CANDIDATE_COMPONENT_RTCP), report->timestamp()); expected_rtcp_transport.bytes_sent = 1337; expected_rtcp_transport.packets_sent = 1; @@ -2902,27 +2907,25 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStatsWithCrypto) { pc_->AddVoiceChannel("audio", kTransportName); - std::unique_ptr rtp_local_candidate = - CreateFakeCandidate("42.42.42.42", 42, "protocol", rtc::ADAPTER_TYPE_WIFI, - cricket::LOCAL_PORT_TYPE, 42); - std::unique_ptr rtp_remote_candidate = - CreateFakeCandidate("42.42.42.42", 42, "protocol", - rtc::ADAPTER_TYPE_UNKNOWN, cricket::LOCAL_PORT_TYPE, - 42); - std::unique_ptr rtcp_local_candidate = - CreateFakeCandidate("42.42.42.42", 42, "protocol", rtc::ADAPTER_TYPE_WIFI, - cricket::LOCAL_PORT_TYPE, 42); - std::unique_ptr rtcp_remote_candidate = - CreateFakeCandidate("42.42.42.42", 42, "protocol", - rtc::ADAPTER_TYPE_UNKNOWN, cricket::LOCAL_PORT_TYPE, - 42); - - cricket::ConnectionInfo rtp_connection_info; + std::unique_ptr rtp_local_candidate = + CreateFakeCandidate("42.42.42.42", 42, "protocol", ADAPTER_TYPE_WIFI, + IceCandidateType::kHost, 42); + std::unique_ptr rtp_remote_candidate = + CreateFakeCandidate("42.42.42.42", 42, "protocol", ADAPTER_TYPE_UNKNOWN, + IceCandidateType::kHost, 42); + std::unique_ptr rtcp_local_candidate = + CreateFakeCandidate("42.42.42.42", 42, "protocol", ADAPTER_TYPE_WIFI, + IceCandidateType::kHost, 42); + std::unique_ptr rtcp_remote_candidate = + CreateFakeCandidate("42.42.42.42", 42, "protocol", ADAPTER_TYPE_UNKNOWN, + IceCandidateType::kHost, 42); + + ConnectionInfo rtp_connection_info; rtp_connection_info.best_connection = false; - rtp_connection_info.local_candidate = *rtp_local_candidate.get(); - rtp_connection_info.remote_candidate = *rtp_remote_candidate.get(); - cricket::TransportChannelStats rtp_transport_channel_stats; - rtp_transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP; + rtp_connection_info.local_candidate = *rtp_local_candidate; + rtp_connection_info.remote_candidate = *rtp_remote_candidate; + TransportChannelStats rtp_transport_channel_stats; + rtp_transport_channel_stats.component = ICE_CANDIDATE_COMPONENT_RTP; rtp_transport_channel_stats.ice_transport_stats.connection_infos.push_back( rtp_connection_info); // The state must be connected in order for crypto parameters to show up. @@ -2930,23 +2933,23 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStatsWithCrypto) { rtp_transport_channel_stats.ice_transport_stats .selected_candidate_pair_changes = 1; rtp_transport_channel_stats.ssl_version_bytes = 0x0203; - rtp_transport_channel_stats.dtls_role = rtc::SSL_CLIENT; + rtp_transport_channel_stats.dtls_role = SSL_CLIENT; rtp_transport_channel_stats.ice_transport_stats.ice_role = - cricket::ICEROLE_CONTROLLING; + ICEROLE_CONTROLLING; rtp_transport_channel_stats.ice_transport_stats.ice_local_username_fragment = "thelocalufrag"; rtp_transport_channel_stats.ice_transport_stats.ice_state = IceTransportState::kConnected; - // 0x2F is TLS_RSA_WITH_AES_128_CBC_SHA according to IANA - rtp_transport_channel_stats.ssl_cipher_suite = 0x2F; - rtp_transport_channel_stats.srtp_crypto_suite = rtc::kSrtpAes128CmSha1_80; + rtp_transport_channel_stats.tls_cipher_suite_name = + "TLS_RSA_WITH_AES_128_CBC_SHA"; + rtp_transport_channel_stats.srtp_crypto_suite = kSrtpAes128CmSha1_80; pc_->SetTransportStats(kTransportName, {rtp_transport_channel_stats}); // Get stats - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); RTCTransportStats expected_rtp_transport( - "Ttransport" + rtc::ToString(cricket::ICE_CANDIDATE_COMPONENT_RTP), + "Ttransport" + absl::StrCat(ICE_CANDIDATE_COMPONENT_RTP), report->timestamp()); expected_rtp_transport.dtls_state = "connected"; expected_rtp_transport.selected_candidate_pair_changes = 1; @@ -2971,10 +2974,10 @@ TEST_F(RTCStatsCollectorTest, CollectRTCTransportStatsWithCrypto) { } TEST_F(RTCStatsCollectorTest, CollectNoStreamRTCOutboundRtpStreamStats_Audio) { - cricket::VoiceMediaInfo voice_media_info; + VoiceMediaInfo voice_media_info; - voice_media_info.senders.push_back(cricket::VoiceSenderInfo()); - voice_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo()); + voice_media_info.senders.push_back(VoiceSenderInfo()); + voice_media_info.senders[0].local_stats.push_back(SsrcSenderInfo()); voice_media_info.senders[0].local_stats[0].ssrc = 1; voice_media_info.senders[0].packets_sent = 2; voice_media_info.senders[0].total_packet_send_delay = TimeDelta::Seconds(0.5); @@ -2988,7 +2991,7 @@ TEST_F(RTCStatsCollectorTest, CollectNoStreamRTCOutboundRtpStreamStats_Audio) { RtpCodecParameters codec_parameters; codec_parameters.payload_type = 42; - codec_parameters.kind = cricket::MEDIA_TYPE_AUDIO; + codec_parameters.kind = webrtc::MediaType::AUDIO; codec_parameters.name = "dummy"; codec_parameters.clock_rate = 0; voice_media_info.send_codecs.insert( @@ -2996,11 +2999,11 @@ TEST_F(RTCStatsCollectorTest, CollectNoStreamRTCOutboundRtpStreamStats_Audio) { // Emulates the case where AddTrack is used without an associated MediaStream pc_->AddVoiceChannel("AudioMid", "TransportName", voice_media_info); - stats_->SetupLocalTrackAndSender(cricket::MEDIA_TYPE_AUDIO, + stats_->SetupLocalTrackAndSender(webrtc::MediaType::AUDIO, "LocalAudioTrackID", 1, false, /*attachment_id=*/50); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); RTCOutboundRtpStreamStats expected_audio("OTTransportName1A1", report->timestamp()); @@ -3031,9 +3034,9 @@ TEST_F(RTCStatsCollectorTest, RTCAudioSourceStatsCollectedForSenderWithTrack) { const uint32_t kSsrc = 4; const int kAttachmentId = 42; - cricket::VoiceMediaInfo voice_media_info; - voice_media_info.senders.push_back(cricket::VoiceSenderInfo()); - voice_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo()); + VoiceMediaInfo voice_media_info; + voice_media_info.senders.push_back(VoiceSenderInfo()); + voice_media_info.senders[0].local_stats.push_back(SsrcSenderInfo()); voice_media_info.senders[0].local_stats[0].ssrc = kSsrc; voice_media_info.senders[0].audio_level = 32767; // [0,32767] voice_media_info.senders[0].total_input_energy = 2.0; @@ -3042,11 +3045,11 @@ TEST_F(RTCStatsCollectorTest, RTCAudioSourceStatsCollectedForSenderWithTrack) { voice_media_info.senders[0].apm_statistics.echo_return_loss_enhancement = 52.0; pc_->AddVoiceChannel("AudioMid", "TransportName", voice_media_info); - stats_->SetupLocalTrackAndSender(cricket::MEDIA_TYPE_AUDIO, + stats_->SetupLocalTrackAndSender(webrtc::MediaType::AUDIO, "LocalAudioTrackID", kSsrc, false, kAttachmentId); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); RTCAudioSourceStats expected_audio("SA42", report->timestamp()); expected_audio.track_identifier = "LocalAudioTrackID"; @@ -3068,14 +3071,14 @@ TEST_F(RTCStatsCollectorTest, RTCVideoSourceStatsCollectedForSenderWithTrack) { const int kVideoSourceWidth = 12; const int kVideoSourceHeight = 34; - cricket::VideoMediaInfo video_media_info; - video_media_info.aggregated_senders.push_back(cricket::VideoSenderInfo()); - video_media_info.senders.push_back(cricket::VideoSenderInfo()); - video_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo()); + VideoMediaInfo video_media_info; + video_media_info.aggregated_senders.push_back(VideoSenderInfo()); + video_media_info.senders.push_back(VideoSenderInfo()); + video_media_info.senders[0].local_stats.push_back(SsrcSenderInfo()); video_media_info.senders[0].local_stats[0].ssrc = kSsrc; video_media_info.senders[0].framerate_input = 29.0; video_media_info.aggregated_senders[0].local_stats.push_back( - cricket::SsrcSenderInfo()); + SsrcSenderInfo()); video_media_info.aggregated_senders[0].local_stats[0].ssrc = kSsrc; video_media_info.aggregated_senders[0].framerate_input = 29.0; video_media_info.aggregated_senders[0].frames = 10001; @@ -3085,13 +3088,14 @@ TEST_F(RTCStatsCollectorTest, RTCVideoSourceStatsCollectedForSenderWithTrack) { kVideoSourceHeight); auto video_track = FakeVideoTrackForStats::Create( "LocalVideoTrackID", MediaStreamTrackInterface::kLive, video_source); - rtc::scoped_refptr sender = CreateMockSender( - cricket::MEDIA_TYPE_VIDEO, video_track, kSsrc, kAttachmentId, {}); + scoped_refptr sender = CreateMockSender( + webrtc::MediaType::VIDEO, video_track, kSsrc, kAttachmentId, {}); EXPECT_CALL(*sender, Stop()); EXPECT_CALL(*sender, SetMediaChannel(_)); + EXPECT_CALL(*sender, SetSendCodecs(_)); pc_->AddSender(sender); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); RTCVideoSourceStats expected_video("SV42", report->timestamp()); expected_video.track_identifier = "LocalVideoTrackID"; @@ -3119,9 +3123,9 @@ TEST_F(RTCStatsCollectorTest, const int kVideoSourceWidth = 12; const int kVideoSourceHeight = 34; - cricket::VideoMediaInfo video_media_info; - video_media_info.senders.push_back(cricket::VideoSenderInfo()); - video_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo()); + VideoMediaInfo video_media_info; + video_media_info.senders.push_back(VideoSenderInfo()); + video_media_info.senders[0].local_stats.push_back(SsrcSenderInfo()); video_media_info.senders[0].framerate_input = 29.0; pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info); @@ -3129,17 +3133,18 @@ TEST_F(RTCStatsCollectorTest, kVideoSourceHeight); auto video_track = FakeVideoTrackForStats::Create( "LocalVideoTrackID", MediaStreamTrackInterface::kLive, video_source); - rtc::scoped_refptr sender = CreateMockSender( - cricket::MEDIA_TYPE_VIDEO, video_track, kNoSsrc, kAttachmentId, {}); + scoped_refptr sender = CreateMockSender( + webrtc::MediaType::VIDEO, video_track, kNoSsrc, kAttachmentId, {}); EXPECT_CALL(*sender, Stop()); EXPECT_CALL(*sender, SetMediaChannel(_)); + EXPECT_CALL(*sender, SetSendCodecs(_)); pc_->AddSender(sender); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); ASSERT_TRUE(report->Get("SV42")); auto video_stats = report->Get("SV42")->cast_to(); - EXPECT_FALSE(video_stats.frames_per_second.is_defined()); - EXPECT_FALSE(video_stats.frames.is_defined()); + EXPECT_FALSE(video_stats.frames_per_second.has_value()); + EXPECT_FALSE(video_stats.frames.has_value()); } // The track not having a source is not expected to be true in practise, but @@ -3149,9 +3154,9 @@ TEST_F(RTCStatsCollectorTest, const uint32_t kSsrc = 4; const int kAttachmentId = 42; - cricket::VideoMediaInfo video_media_info; - video_media_info.senders.push_back(cricket::VideoSenderInfo()); - video_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo()); + VideoMediaInfo video_media_info; + video_media_info.senders.push_back(VideoSenderInfo()); + video_media_info.senders[0].local_stats.push_back(SsrcSenderInfo()); video_media_info.senders[0].local_stats[0].ssrc = kSsrc; video_media_info.senders[0].framerate_input = 29.0; pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info); @@ -3159,17 +3164,18 @@ TEST_F(RTCStatsCollectorTest, auto video_track = FakeVideoTrackForStats::Create( "LocalVideoTrackID", MediaStreamTrackInterface::kLive, /*source=*/nullptr); - rtc::scoped_refptr sender = CreateMockSender( - cricket::MEDIA_TYPE_VIDEO, video_track, kSsrc, kAttachmentId, {}); + scoped_refptr sender = CreateMockSender( + webrtc::MediaType::VIDEO, video_track, kSsrc, kAttachmentId, {}); EXPECT_CALL(*sender, Stop()); EXPECT_CALL(*sender, SetMediaChannel(_)); + EXPECT_CALL(*sender, SetSendCodecs(_)); pc_->AddSender(sender); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); ASSERT_TRUE(report->Get("SV42")); auto video_stats = report->Get("SV42")->cast_to(); - EXPECT_FALSE(video_stats.width.is_defined()); - EXPECT_FALSE(video_stats.height.is_defined()); + EXPECT_FALSE(video_stats.width.has_value()); + EXPECT_FALSE(video_stats.height.has_value()); } TEST_F(RTCStatsCollectorTest, @@ -3177,39 +3183,39 @@ TEST_F(RTCStatsCollectorTest, const uint32_t kSsrc = 4; const int kAttachmentId = 42; - cricket::VoiceMediaInfo voice_media_info; - voice_media_info.senders.push_back(cricket::VoiceSenderInfo()); - voice_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo()); + VoiceMediaInfo voice_media_info; + voice_media_info.senders.push_back(VoiceSenderInfo()); + voice_media_info.senders[0].local_stats.push_back(SsrcSenderInfo()); voice_media_info.senders[0].local_stats[0].ssrc = kSsrc; pc_->AddVoiceChannel("AudioMid", "TransportName", voice_media_info); - rtc::scoped_refptr sender = CreateMockSender( - cricket::MEDIA_TYPE_AUDIO, /*track=*/nullptr, kSsrc, kAttachmentId, {}); + scoped_refptr sender = CreateMockSender( + webrtc::MediaType::AUDIO, /*track=*/nullptr, kSsrc, kAttachmentId, {}); EXPECT_CALL(*sender, Stop()); EXPECT_CALL(*sender, SetMediaChannel(_)); + EXPECT_CALL(*sender, SetSendCodecs(_)); pc_->AddSender(sender); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); EXPECT_FALSE(report->Get("SA42")); } -// Parameterized tests on cricket::MediaType (audio or video). +// Parameterized tests on webrtc::MediaType (audio or video). class RTCStatsCollectorTestWithParamKind : public RTCStatsCollectorTest, - public ::testing::WithParamInterface { + public ::testing::WithParamInterface { public: RTCStatsCollectorTestWithParamKind() : media_type_(GetParam()) { - RTC_DCHECK(media_type_ == cricket::MEDIA_TYPE_AUDIO || - media_type_ == cricket::MEDIA_TYPE_VIDEO); + RTC_DCHECK(media_type_ == webrtc::MediaType::AUDIO || + media_type_ == webrtc::MediaType::VIDEO); } std::string MediaTypeCharStr() const { switch (media_type_) { - case cricket::MEDIA_TYPE_AUDIO: + case webrtc::MediaType::AUDIO: return "A"; - case cricket::MEDIA_TYPE_VIDEO: + case webrtc::MediaType::VIDEO: return "V"; - case cricket::MEDIA_TYPE_DATA: - case cricket::MEDIA_TYPE_UNSUPPORTED: + default: RTC_DCHECK_NOTREACHED(); return "?"; } @@ -3217,12 +3223,11 @@ class RTCStatsCollectorTestWithParamKind std::string MediaTypeKind() const { switch (media_type_) { - case cricket::MEDIA_TYPE_AUDIO: + case webrtc::MediaType::AUDIO: return "audio"; - case cricket::MEDIA_TYPE_VIDEO: + case webrtc::MediaType::VIDEO: return "video"; - case cricket::MEDIA_TYPE_DATA: - case cricket::MEDIA_TYPE_UNSUPPORTED: + default: RTC_DCHECK_NOTREACHED(); return ""; } @@ -3233,13 +3238,13 @@ class RTCStatsCollectorTestWithParamKind void AddSenderInfoAndMediaChannel( std::string transport_name, const std::vector& report_block_datas, - absl::optional codec) { + std::optional codec) { switch (media_type_) { - case cricket::MEDIA_TYPE_AUDIO: { - cricket::VoiceMediaInfo voice_media_info; + case webrtc::MediaType::AUDIO: { + VoiceMediaInfo voice_media_info; for (const auto& report_block_data : report_block_datas) { - cricket::VoiceSenderInfo sender; - sender.local_stats.push_back(cricket::SsrcSenderInfo()); + VoiceSenderInfo sender; + sender.local_stats.push_back(SsrcSenderInfo()); sender.local_stats[0].ssrc = report_block_data.source_ssrc(); if (codec.has_value()) { sender.codec_payload_type = codec->payload_type; @@ -3252,11 +3257,11 @@ class RTCStatsCollectorTestWithParamKind pc_->AddVoiceChannel("mid", transport_name, voice_media_info); return; } - case cricket::MEDIA_TYPE_VIDEO: { - cricket::VideoMediaInfo video_media_info; + case webrtc::MediaType::VIDEO: { + VideoMediaInfo video_media_info; for (const auto& report_block_data : report_block_datas) { - cricket::VideoSenderInfo sender; - sender.local_stats.push_back(cricket::SsrcSenderInfo()); + VideoSenderInfo sender; + sender.local_stats.push_back(SsrcSenderInfo()); sender.local_stats[0].ssrc = report_block_data.source_ssrc(); if (codec.has_value()) { sender.codec_payload_type = codec->payload_type; @@ -3270,14 +3275,14 @@ class RTCStatsCollectorTestWithParamKind pc_->AddVideoChannel("mid", transport_name, video_media_info); return; } - case cricket::MEDIA_TYPE_DATA: - case cricket::MEDIA_TYPE_UNSUPPORTED: + case webrtc::MediaType::DATA: + default: RTC_DCHECK_NOTREACHED(); } } protected: - cricket::MediaType media_type_; + webrtc::MediaType media_type_; }; // Verifies RTCRemoteInboundRtpStreamStats members that don't require @@ -3285,6 +3290,7 @@ class RTCStatsCollectorTestWithParamKind TEST_P(RTCStatsCollectorTestWithParamKind, RTCRemoteInboundRtpStreamStatsCollectedFromReportBlock) { const Timestamp kReportBlockTimestampUtc = Timestamp::Micros(123456789); + const Timestamp kReportBlockTimestamp = Timestamp::Micros(12345678); const uint8_t kFractionLost = 12; const TimeDelta kRoundTripTimeSample1 = TimeDelta::Millis(1'234); const TimeDelta kRoundTripTimeSample2 = TimeDelta::Seconds(13); @@ -3302,7 +3308,8 @@ TEST_P(RTCStatsCollectorTestWithParamKind, report_block.SetCumulativeLost(7); report_block.SetFractionLost(kFractionLost); ReportBlockData report_block_data; - report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc); + report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc, + kReportBlockTimestamp); report_block_data.AddRoundTripTimeSample(kRoundTripTimeSample1); // Only the last sample should be exposed as the // `RTCRemoteInboundRtpStreamStats::round_trip_time`. @@ -3310,9 +3317,9 @@ TEST_P(RTCStatsCollectorTestWithParamKind, report_block_datas.push_back(report_block_data); } AddSenderInfoAndMediaChannel("TransportName", report_block_datas, - absl::nullopt); + std::nullopt); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); for (auto ssrc : ssrcs) { std::string stream_id = "" + std::to_string(ssrc); RTCRemoteInboundRtpStreamStats expected_remote_inbound_rtp( @@ -3352,32 +3359,35 @@ TEST_P(RTCStatsCollectorTestWithParamKind, TEST_P(RTCStatsCollectorTestWithParamKind, RTCRemoteInboundRtpStreamStatsRttMissingBeforeMeasurement) { constexpr Timestamp kReportBlockTimestampUtc = Timestamp::Micros(123456789); + const Timestamp kReportBlockTimestamp = Timestamp::Micros(12345678); rtcp::ReportBlock report_block; // The remote-inbound-rtp SSRC and the outbound-rtp SSRC is the same as the // `source_ssrc`, "SSRC of the RTP packet sender". report_block.SetMediaSsrc(12); ReportBlockData report_block_data; // AddRoundTripTimeSample() not called. - report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc); + report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc, + kReportBlockTimestamp); AddSenderInfoAndMediaChannel("TransportName", {report_block_data}, - absl::nullopt); + std::nullopt); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); std::string remote_inbound_rtp_id = "RI" + MediaTypeCharStr() + "12"; ASSERT_TRUE(report->Get(remote_inbound_rtp_id)); auto& remote_inbound_rtp = report->Get(remote_inbound_rtp_id) ->cast_to(); - EXPECT_TRUE(remote_inbound_rtp.round_trip_time_measurements.is_defined()); + EXPECT_TRUE(remote_inbound_rtp.round_trip_time_measurements.has_value()); EXPECT_EQ(0, *remote_inbound_rtp.round_trip_time_measurements); - EXPECT_FALSE(remote_inbound_rtp.round_trip_time.is_defined()); + EXPECT_FALSE(remote_inbound_rtp.round_trip_time.has_value()); } TEST_P(RTCStatsCollectorTestWithParamKind, RTCRemoteInboundRtpStreamStatsWithTimestampFromReportBlock) { const Timestamp kReportBlockTimestampUtc = Timestamp::Micros(123456789); + const Timestamp kReportBlockTimestamp = Timestamp::Micros(12345678); fake_clock_.SetTime(kReportBlockTimestampUtc); rtcp::ReportBlock report_block; @@ -3385,15 +3395,16 @@ TEST_P(RTCStatsCollectorTestWithParamKind, // `source_ssrc`, "SSRC of the RTP packet sender". report_block.SetMediaSsrc(12); ReportBlockData report_block_data; - report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc); + report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc, + kReportBlockTimestamp); AddSenderInfoAndMediaChannel("TransportName", {report_block_data}, - absl::nullopt); + std::nullopt); // Advance time, it should be OK to have fresher reports than report blocks. fake_clock_.AdvanceTime(TimeDelta::Micros(1234)); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); std::string remote_inbound_rtp_id = "RI" + MediaTypeCharStr() + "12"; ASSERT_TRUE(report->Get(remote_inbound_rtp_id)); @@ -3410,6 +3421,7 @@ TEST_P(RTCStatsCollectorTestWithParamKind, TEST_P(RTCStatsCollectorTestWithParamKind, RTCRemoteInboundRtpStreamStatsWithCodecBasedMembers) { const Timestamp kReportBlockTimestampUtc = Timestamp::Micros(123456789); + const Timestamp kReportBlockTimestamp = Timestamp::Micros(12345678); fake_clock_.SetTime(kReportBlockTimestampUtc); rtcp::ReportBlock report_block; @@ -3418,7 +3430,8 @@ TEST_P(RTCStatsCollectorTestWithParamKind, report_block.SetMediaSsrc(12); report_block.SetJitter(5000); ReportBlockData report_block_data; - report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc); + report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc, + kReportBlockTimestamp); RtpCodecParameters codec; codec.payload_type = 3; @@ -3427,17 +3440,17 @@ TEST_P(RTCStatsCollectorTestWithParamKind, AddSenderInfoAndMediaChannel("TransportName", {report_block_data}, codec); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); std::string remote_inbound_rtp_id = "RI" + MediaTypeCharStr() + "12"; ASSERT_TRUE(report->Get(remote_inbound_rtp_id)); auto& remote_inbound_rtp = report->Get(remote_inbound_rtp_id) ->cast_to(); - EXPECT_TRUE(remote_inbound_rtp.codec_id.is_defined()); + EXPECT_TRUE(remote_inbound_rtp.codec_id.has_value()); EXPECT_TRUE(report->Get(*remote_inbound_rtp.codec_id)); - EXPECT_TRUE(remote_inbound_rtp.jitter.is_defined()); + EXPECT_TRUE(remote_inbound_rtp.jitter.has_value()); // The jitter (in seconds) is the report block's jitter divided by the codec's // clock rate. EXPECT_EQ(5.0, *remote_inbound_rtp.jitter); @@ -3446,6 +3459,7 @@ TEST_P(RTCStatsCollectorTestWithParamKind, TEST_P(RTCStatsCollectorTestWithParamKind, RTCRemoteInboundRtpStreamStatsWithRtcpTransport) { const Timestamp kReportBlockTimestampUtc = Timestamp::Micros(123456789); + const Timestamp kReportBlockTimestamp = Timestamp::Micros(12345678); fake_clock_.SetTime(kReportBlockTimestampUtc); rtcp::ReportBlock report_block; @@ -3453,28 +3467,28 @@ TEST_P(RTCStatsCollectorTestWithParamKind, // `source_ssrc`, "SSRC of the RTP packet sender". report_block.SetMediaSsrc(12); ReportBlockData report_block_data; - report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc); + report_block_data.SetReportBlock(0, report_block, kReportBlockTimestampUtc, + kReportBlockTimestamp); - cricket::TransportChannelStats rtp_transport_channel_stats; - rtp_transport_channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP; + TransportChannelStats rtp_transport_channel_stats; + rtp_transport_channel_stats.component = ICE_CANDIDATE_COMPONENT_RTP; rtp_transport_channel_stats.dtls_state = DtlsTransportState::kNew; - cricket::TransportChannelStats rtcp_transport_channel_stats; - rtcp_transport_channel_stats.component = - cricket::ICE_CANDIDATE_COMPONENT_RTCP; + TransportChannelStats rtcp_transport_channel_stats; + rtcp_transport_channel_stats.component = ICE_CANDIDATE_COMPONENT_RTCP; rtcp_transport_channel_stats.dtls_state = DtlsTransportState::kNew; pc_->SetTransportStats("TransportName", {rtp_transport_channel_stats, rtcp_transport_channel_stats}); AddSenderInfoAndMediaChannel("TransportName", {report_block_data}, - absl::nullopt); + std::nullopt); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); std::string remote_inbound_rtp_id = "RI" + MediaTypeCharStr() + "12"; ASSERT_TRUE(report->Get(remote_inbound_rtp_id)); auto& remote_inbound_rtp = report->Get(remote_inbound_rtp_id) ->cast_to(); - EXPECT_TRUE(remote_inbound_rtp.transport_id.is_defined()); + EXPECT_TRUE(remote_inbound_rtp.transport_id.has_value()); EXPECT_EQ("TTransportName2", // 2 for RTCP *remote_inbound_rtp.transport_id); EXPECT_TRUE(report->Get(*remote_inbound_rtp.transport_id)); @@ -3482,8 +3496,8 @@ TEST_P(RTCStatsCollectorTestWithParamKind, INSTANTIATE_TEST_SUITE_P(All, RTCStatsCollectorTestWithParamKind, - ::testing::Values(cricket::MEDIA_TYPE_AUDIO, // "/0" - cricket::MEDIA_TYPE_VIDEO)); // "/1" + ::testing::Values(webrtc::MediaType::AUDIO, // "/0" + webrtc::MediaType::VIDEO)); // "/1" // Checks that no remote outbound stats are collected if not available in // `VoiceMediaInfo`. @@ -3494,7 +3508,7 @@ TEST_F(RTCStatsCollectorTest, EXPECT_FALSE(graph.full_report->Get(graph.remote_outbound_rtp_id)); // Also check that no other remote outbound report is created (in case the // expected ID is incorrect). - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); ASSERT_NE(report->begin(), report->end()) << "No reports have been generated."; for (const auto& stats : *report) { @@ -3512,10 +3526,9 @@ TEST_F(RTCStatsCollectorTest, RTCRemoteOutboundRtpAudioStreamStatsCollected) { const auto& remote_outbound_rtp = graph.full_report->Get(graph.remote_outbound_rtp_id) ->cast_to(); - EXPECT_EQ(remote_outbound_rtp.timestamp(), - Timestamp::Millis(kRemoteOutboundStatsTimestampMs)); + EXPECT_EQ(remote_outbound_rtp.timestamp(), kRemoteOutboundStatsTimestamp); EXPECT_FLOAT_EQ(*remote_outbound_rtp.remote_timestamp, - static_cast(kRemoteOutboundStatsRemoteTimestampMs)); + kRemoteOutboundStatsRemoteTimestamp.ms()); EXPECT_EQ(*remote_outbound_rtp.packets_sent, kRemoteOutboundStatsPacketsSent); EXPECT_EQ(*remote_outbound_rtp.bytes_sent, kRemoteOutboundStatsBytesSent); EXPECT_EQ(*remote_outbound_rtp.reports_sent, @@ -3527,47 +3540,48 @@ TEST_F(RTCStatsCollectorTest, const uint32_t kSsrc = 4; const int kAttachmentId = 42; - cricket::VideoMediaInfo video_media_info; - video_media_info.senders.push_back(cricket::VideoSenderInfo()); - video_media_info.senders[0].local_stats.push_back(cricket::SsrcSenderInfo()); + VideoMediaInfo video_media_info; + video_media_info.senders.push_back(VideoSenderInfo()); + video_media_info.senders[0].local_stats.push_back(SsrcSenderInfo()); video_media_info.senders[0].local_stats[0].ssrc = kSsrc; video_media_info.senders[0].framerate_input = 29.0; pc_->AddVideoChannel("VideoMid", "TransportName", video_media_info); - rtc::scoped_refptr sender = CreateMockSender( - cricket::MEDIA_TYPE_VIDEO, /*track=*/nullptr, kSsrc, kAttachmentId, {}); + scoped_refptr sender = CreateMockSender( + webrtc::MediaType::VIDEO, /*track=*/nullptr, kSsrc, kAttachmentId, {}); EXPECT_CALL(*sender, Stop()); EXPECT_CALL(*sender, SetMediaChannel(_)); + EXPECT_CALL(*sender, SetSendCodecs(_)); pc_->AddSender(sender); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); EXPECT_FALSE(report->Get("SV42")); } // Test collecting echo return loss stats from the audio processor attached to // the track, rather than the voice sender info. TEST_F(RTCStatsCollectorTest, CollectEchoReturnLossFromTrackAudioProcessor) { - rtc::scoped_refptr local_stream = + scoped_refptr local_stream = MediaStream::Create("LocalStreamId"); pc_->mutable_local_streams()->AddStream(local_stream); // Local audio track - rtc::scoped_refptr local_audio_track = - CreateFakeTrack(cricket::MEDIA_TYPE_AUDIO, "LocalAudioTrackID", + scoped_refptr local_audio_track = + CreateFakeTrack(webrtc::MediaType::AUDIO, "LocalAudioTrackID", MediaStreamTrackInterface::kEnded, /*create_fake_audio_processor=*/true); - local_stream->AddTrack(rtc::scoped_refptr( + local_stream->AddTrack(scoped_refptr( static_cast(local_audio_track.get()))); - cricket::VoiceSenderInfo voice_sender_info_ssrc1; - voice_sender_info_ssrc1.local_stats.push_back(cricket::SsrcSenderInfo()); + VoiceSenderInfo voice_sender_info_ssrc1; + voice_sender_info_ssrc1.local_stats.push_back(SsrcSenderInfo()); voice_sender_info_ssrc1.local_stats[0].ssrc = 1; stats_->CreateMockRtpSendersReceiversAndChannels( {std::make_pair(local_audio_track.get(), voice_sender_info_ssrc1)}, {}, {}, {}, {local_stream->id()}, {}); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); RTCAudioSourceStats expected_audio("SA11", report->timestamp()); expected_audio.track_identifier = "LocalAudioTrackID"; @@ -3594,7 +3608,7 @@ TEST_F(RTCStatsCollectorTest, GetStatsWithSenderSelector) { // | | // v v // codec (send) transport - rtc::scoped_refptr sender_report = + scoped_refptr sender_report = stats_->GetStatsReportWithSenderSelector(graph.sender); EXPECT_TRUE(sender_report); EXPECT_EQ(sender_report->timestamp(), graph.full_report->timestamp()); @@ -3618,7 +3632,7 @@ TEST_F(RTCStatsCollectorTest, GetStatsWithReceiverSelector) { // | | // v v // transport codec (recv) - rtc::scoped_refptr receiver_report = + scoped_refptr receiver_report = stats_->GetStatsReportWithReceiverSelector(graph.receiver); EXPECT_TRUE(receiver_report); EXPECT_EQ(receiver_report->size(), 3u); @@ -3634,7 +3648,7 @@ TEST_F(RTCStatsCollectorTest, GetStatsWithReceiverSelector) { TEST_F(RTCStatsCollectorTest, GetStatsWithNullSenderSelector) { ExampleStatsGraph graph = SetupExampleStatsGraphForSelectorTests(); - rtc::scoped_refptr empty_report = + scoped_refptr empty_report = stats_->GetStatsReportWithSenderSelector(nullptr); EXPECT_TRUE(empty_report); EXPECT_EQ(empty_report->timestamp(), graph.full_report->timestamp()); @@ -3643,7 +3657,7 @@ TEST_F(RTCStatsCollectorTest, GetStatsWithNullSenderSelector) { TEST_F(RTCStatsCollectorTest, GetStatsWithNullReceiverSelector) { ExampleStatsGraph graph = SetupExampleStatsGraphForSelectorTests(); - rtc::scoped_refptr empty_report = + scoped_refptr empty_report = stats_->GetStatsReportWithReceiverSelector(nullptr); EXPECT_TRUE(empty_report); EXPECT_EQ(empty_report->timestamp(), graph.full_report->timestamp()); @@ -3653,15 +3667,15 @@ TEST_F(RTCStatsCollectorTest, GetStatsWithNullReceiverSelector) { // Before SetLocalDescription() senders don't have an SSRC. // To simulate this case we create a mock sender with SSRC=0. TEST_F(RTCStatsCollectorTest, RtpIsMissingWhileSsrcIsZero) { - rtc::scoped_refptr track = - CreateFakeTrack(cricket::MEDIA_TYPE_AUDIO, "audioTrack", - MediaStreamTrackInterface::kLive); - rtc::scoped_refptr sender = - CreateMockSender(cricket::MEDIA_TYPE_AUDIO, track, 0, 49, {}); + scoped_refptr track = CreateFakeTrack( + webrtc::MediaType::AUDIO, "audioTrack", MediaStreamTrackInterface::kLive); + scoped_refptr sender = + CreateMockSender(webrtc::MediaType::AUDIO, track, 0, 49, {}); EXPECT_CALL(*sender, Stop()); + EXPECT_CALL(*sender, SetSendCodecs(_)); pc_->AddSender(sender); - rtc::scoped_refptr report = stats_->GetStatsReport(); + scoped_refptr report = stats_->GetStatsReport(); auto outbound_rtps = report->GetStatsOfType(); EXPECT_TRUE(outbound_rtps.empty()); @@ -3670,17 +3684,16 @@ TEST_F(RTCStatsCollectorTest, RtpIsMissingWhileSsrcIsZero) { // We may also be in a case where the SSRC has been assigned but no // `voice_sender_info` stats exist yet. TEST_F(RTCStatsCollectorTest, DoNotCrashIfSsrcIsKnownButInfosAreStillMissing) { - rtc::scoped_refptr track = - CreateFakeTrack(cricket::MEDIA_TYPE_AUDIO, "audioTrack", - MediaStreamTrackInterface::kLive); - rtc::scoped_refptr sender = - CreateMockSender(cricket::MEDIA_TYPE_AUDIO, track, 4711, 49, {}); + scoped_refptr track = CreateFakeTrack( + webrtc::MediaType::AUDIO, "audioTrack", MediaStreamTrackInterface::kLive); + scoped_refptr sender = + CreateMockSender(webrtc::MediaType::AUDIO, track, 4711, 49, {}); EXPECT_CALL(*sender, Stop()); + EXPECT_CALL(*sender, SetSendCodecs(_)); pc_->AddSender(sender); // We do not generate any matching voice_sender_info stats. - rtc::scoped_refptr report = stats_->GetStatsReport(); - + scoped_refptr report = stats_->GetStatsReport(); auto outbound_rtps = report->GetStatsOfType(); EXPECT_TRUE(outbound_rtps.empty()); } @@ -3691,7 +3704,7 @@ class RecursiveCallback : public RTCStatsCollectorCallback { explicit RecursiveCallback(RTCStatsCollectorWrapper* stats) : stats_(stats) {} void OnStatsDelivered( - const rtc::scoped_refptr& report) override { + const scoped_refptr& report) override { stats_->GetStatsReport(); called_ = true; } @@ -3706,37 +3719,49 @@ class RecursiveCallback : public RTCStatsCollectorCallback { // Test that nothing bad happens if a callback causes GetStatsReport to be // called again recursively. Regression test for crbug.com/webrtc/8973. TEST_F(RTCStatsCollectorTest, DoNotCrashWhenGetStatsCalledDuringCallback) { - auto callback1 = rtc::make_ref_counted(stats_.get()); - auto callback2 = rtc::make_ref_counted(stats_.get()); + auto callback1 = make_ref_counted(stats_.get()); + auto callback2 = make_ref_counted(stats_.get()); stats_->stats_collector()->GetStatsReport(callback1); stats_->stats_collector()->GetStatsReport(callback2); - EXPECT_TRUE_WAIT(callback1->called(), kGetStatsReportTimeoutMs); - EXPECT_TRUE_WAIT(callback2->called(), kGetStatsReportTimeoutMs); + EXPECT_THAT( + WaitUntil( + [&] { return callback1->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kGetStatsReportTimeoutMs)}), + IsRtcOk()); + EXPECT_THAT( + WaitUntil( + [&] { return callback2->called(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kGetStatsReportTimeoutMs)}), + IsRtcOk()); } class RTCTestStats : public RTCStats { public: - WEBRTC_RTCSTATS_DECL(); + WEBRTC_RTCSTATS_DECL(RTCTestStats); RTCTestStats(const std::string& id, Timestamp timestamp) - : RTCStats(id, timestamp), dummy_stat("dummyStat") {} + : RTCStats(id, timestamp) {} - RTCStatsMember dummy_stat; + std::optional dummy_stat; }; -WEBRTC_RTCSTATS_IMPL(RTCTestStats, RTCStats, "test-stats", &dummy_stat) +WEBRTC_RTCSTATS_IMPL(RTCTestStats, + RTCStats, + "test-stats", + AttributeInit("dummyStat", &dummy_stat)) // Overrides the stats collection to verify thread usage and that the resulting // partial reports are merged. class FakeRTCStatsCollector : public RTCStatsCollector, public RTCStatsCollectorCallback { public: - static rtc::scoped_refptr Create( + static scoped_refptr Create( PeerConnectionInternal* pc, + const Environment& env, int64_t cache_lifetime_us) { - return rtc::scoped_refptr( - new rtc::RefCountedObject(pc, - cache_lifetime_us)); + return scoped_refptr( + new RefCountedObject(pc, env, + cache_lifetime_us)); } // Since FakeRTCStatsCollector inherits twice from RefCountInterface, once via @@ -3747,19 +3772,23 @@ class FakeRTCStatsCollector : public RTCStatsCollector, // Satisfying the implementation of these methods and associating them with a // reference counter, will be done by RefCountedObject. virtual void AddRef() const = 0; - virtual rtc::RefCountReleaseStatus Release() const = 0; + virtual RefCountReleaseStatus Release() const = 0; // RTCStatsCollectorCallback implementation. void OnStatsDelivered( - const rtc::scoped_refptr& report) override { + const scoped_refptr& report) override { EXPECT_TRUE(signaling_thread_->IsCurrent()); MutexLock lock(&lock_); delivered_report_ = report; } void VerifyThreadUsageAndResultsMerging() { - GetStatsReport(rtc::scoped_refptr(this)); - EXPECT_TRUE_WAIT(HasVerifiedResults(), kGetStatsReportTimeoutMs); + GetStatsReport(scoped_refptr(this)); + EXPECT_THAT( + WaitUntil( + [&] { return HasVerifiedResults(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kGetStatsReportTimeoutMs)}), + IsRtcOk()); } bool HasVerifiedResults() { @@ -3780,8 +3809,10 @@ class FakeRTCStatsCollector : public RTCStatsCollector, } protected: - FakeRTCStatsCollector(PeerConnectionInternal* pc, int64_t cache_lifetime) - : RTCStatsCollector(pc, cache_lifetime), + FakeRTCStatsCollector(PeerConnectionInternal* pc, + const Environment& env, + int64_t cache_lifetime) + : RTCStatsCollector(pc, env, cache_lifetime), signaling_thread_(pc->signaling_thread()), worker_thread_(pc->worker_thread()), network_thread_(pc->network_thread()) {} @@ -3801,8 +3832,7 @@ class FakeRTCStatsCollector : public RTCStatsCollector, } void ProducePartialResultsOnNetworkThreadImpl( Timestamp timestamp, - const std::map& - transport_stats_by_name, + const std::map& transport_stats_by_name, const std::map& transport_cert_stats, RTCStatsReport* partial_report) override { EXPECT_TRUE(network_thread_->IsCurrent()); @@ -3817,22 +3847,22 @@ class FakeRTCStatsCollector : public RTCStatsCollector, } private: - rtc::Thread* const signaling_thread_; - rtc::Thread* const worker_thread_; - rtc::Thread* const network_thread_; + Thread* const signaling_thread_; + Thread* const worker_thread_; + Thread* const network_thread_; Mutex lock_; - rtc::scoped_refptr delivered_report_; + scoped_refptr delivered_report_; int produced_on_signaling_thread_ = 0; int produced_on_network_thread_ = 0; }; TEST(RTCStatsCollectorTestWithFakeCollector, ThreadUsageAndResultsMerging) { - rtc::AutoThread main_thread_; - auto pc = rtc::make_ref_counted(); - rtc::scoped_refptr stats_collector( - FakeRTCStatsCollector::Create(pc.get(), - 50 * rtc::kNumMicrosecsPerMillisec)); + AutoThread main_thread_; + auto pc = make_ref_counted(); + scoped_refptr stats_collector( + FakeRTCStatsCollector::Create(pc.get(), CreateEnvironment(), + 50 * kNumMicrosecsPerMillisec)); stats_collector->VerifyThreadUsageAndResultsMerging(); } diff --git a/pc/rtc_stats_integrationtest.cc b/pc/rtc_stats_integrationtest.cc index 648efab69a..7562f803a1 100644 --- a/pc/rtc_stats_integrationtest.cc +++ b/pc/rtc_stats_integrationtest.cc @@ -9,9 +9,9 @@ */ #include -#include #include +#include #include #include #include @@ -22,24 +22,27 @@ #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/audio_options.h" #include "api/data_channel_interface.h" +#include "api/make_ref_counted.h" #include "api/peer_connection_interface.h" #include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" +#include "api/stats/attribute.h" #include "api/stats/rtc_stats.h" #include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" #include "pc/rtc_stats_traversal.h" #include "pc/test/peer_connection_test_wrapper.h" #include "pc/test/rtc_stats_obtainer.h" #include "rtc_base/checks.h" -#include "rtc_base/event_tracer.h" -#include "rtc_base/gunit.h" #include "rtc_base/thread.h" #include "rtc_base/trace_event.h" #include "rtc_base/virtual_socket_server.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" using ::testing::Contains; @@ -49,71 +52,18 @@ namespace { const int64_t kGetStatsTimeoutMs = 10000; -const unsigned char* GetCategoryEnabledHandler(const char* name) { - if (strcmp("webrtc_stats", name) != 0) { - return reinterpret_cast(""); - } - return reinterpret_cast(name); -} - -class RTCStatsReportTraceListener { - public: - static void SetUp() { - if (!traced_report_) - traced_report_ = new RTCStatsReportTraceListener(); - traced_report_->last_trace_ = ""; - SetupEventTracer(&GetCategoryEnabledHandler, - &RTCStatsReportTraceListener::AddTraceEventHandler); - } - - static const std::string& last_trace() { - RTC_DCHECK(traced_report_); - return traced_report_->last_trace_; - } - - private: - static void AddTraceEventHandler( - char phase, - const unsigned char* category_enabled, - const char* name, - unsigned long long id, // NOLINT(runtime/int) - int num_args, - const char** arg_names, - const unsigned char* arg_types, - const unsigned long long* arg_values, // NOLINT(runtime/int) - unsigned char flags) { - RTC_DCHECK(traced_report_); - EXPECT_STREQ("webrtc_stats", - reinterpret_cast(category_enabled)); - EXPECT_STREQ("webrtc_stats", name); - EXPECT_EQ(1, num_args); - EXPECT_STREQ("report", arg_names[0]); - EXPECT_EQ(TRACE_VALUE_TYPE_COPY_STRING, arg_types[0]); - - traced_report_->last_trace_ = reinterpret_cast(arg_values[0]); - } - - static RTCStatsReportTraceListener* traced_report_; - std::string last_trace_; -}; - -RTCStatsReportTraceListener* RTCStatsReportTraceListener::traced_report_ = - nullptr; - class RTCStatsIntegrationTest : public ::testing::Test { public: RTCStatsIntegrationTest() - : network_thread_(new rtc::Thread(&virtual_socket_server_)), - worker_thread_(rtc::Thread::Create()) { - RTCStatsReportTraceListener::SetUp(); - + : network_thread_(new Thread(&virtual_socket_server_)), + worker_thread_(Thread::Create()) { RTC_CHECK(network_thread_->Start()); RTC_CHECK(worker_thread_->Start()); - caller_ = rtc::make_ref_counted( + caller_ = make_ref_counted( "caller", &virtual_socket_server_, network_thread_.get(), worker_thread_.get()); - callee_ = rtc::make_ref_counted( + callee_ = make_ref_counted( "callee", &virtual_socket_server_, network_thread_.get(), worker_thread_.get()); } @@ -132,8 +82,8 @@ class RTCStatsIntegrationTest : public ::testing::Test { PeerConnectionTestWrapper::Connect(caller_.get(), callee_.get()); // Get user media for audio and video - caller_->GetAndAddUserMedia(true, cricket::AudioOptions(), true); - callee_->GetAndAddUserMedia(true, cricket::AudioOptions(), true); + caller_->GetAndAddUserMedia(true, AudioOptions(), true); + callee_->GetAndAddUserMedia(true, AudioOptions(), true); // Create data channels DataChannelInit init; @@ -146,58 +96,64 @@ class RTCStatsIntegrationTest : public ::testing::Test { callee_->WaitForCallEstablished(); } - rtc::scoped_refptr GetStatsFromCaller() { + scoped_refptr GetStatsFromCaller() { return GetStats(caller_->pc()); } - rtc::scoped_refptr GetStatsFromCaller( - rtc::scoped_refptr selector) { + scoped_refptr GetStatsFromCaller( + scoped_refptr selector) { return GetStats(caller_->pc(), selector); } - rtc::scoped_refptr GetStatsFromCaller( - rtc::scoped_refptr selector) { + scoped_refptr GetStatsFromCaller( + scoped_refptr selector) { return GetStats(caller_->pc(), selector); } - rtc::scoped_refptr GetStatsFromCallee() { + scoped_refptr GetStatsFromCallee() { return GetStats(callee_->pc()); } - rtc::scoped_refptr GetStatsFromCallee( - rtc::scoped_refptr selector) { + scoped_refptr GetStatsFromCallee( + scoped_refptr selector) { return GetStats(callee_->pc(), selector); } - rtc::scoped_refptr GetStatsFromCallee( - rtc::scoped_refptr selector) { + scoped_refptr GetStatsFromCallee( + scoped_refptr selector) { return GetStats(callee_->pc(), selector); } protected: - static rtc::scoped_refptr GetStats( + static scoped_refptr GetStats( PeerConnectionInterface* pc) { - rtc::scoped_refptr stats_obtainer = - RTCStatsObtainer::Create(); + scoped_refptr stats_obtainer = RTCStatsObtainer::Create(); pc->GetStats(stats_obtainer.get()); - EXPECT_TRUE_WAIT(stats_obtainer->report() != nullptr, kGetStatsTimeoutMs); + EXPECT_THAT( + WaitUntil([&] { return stats_obtainer->report() != nullptr; }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kGetStatsTimeoutMs)}), + IsRtcOk()); return stats_obtainer->report(); } template - static rtc::scoped_refptr GetStats( + static scoped_refptr GetStats( PeerConnectionInterface* pc, - rtc::scoped_refptr selector) { - rtc::scoped_refptr stats_obtainer = - RTCStatsObtainer::Create(); + scoped_refptr selector) { + scoped_refptr stats_obtainer = RTCStatsObtainer::Create(); pc->GetStats(selector, stats_obtainer); - EXPECT_TRUE_WAIT(stats_obtainer->report() != nullptr, kGetStatsTimeoutMs); + EXPECT_THAT( + WaitUntil([&] { return stats_obtainer->report() != nullptr; }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kGetStatsTimeoutMs)}), + IsRtcOk()); return stats_obtainer->report(); } // `network_thread_` uses `virtual_socket_server_` so they must be // constructed/destructed in the correct order. - rtc::VirtualSocketServer virtual_socket_server_; - std::unique_ptr network_thread_; - std::unique_ptr worker_thread_; - rtc::scoped_refptr caller_; - rtc::scoped_refptr callee_; + VirtualSocketServer virtual_socket_server_; + std::unique_ptr network_thread_; + std::unique_ptr worker_thread_; + scoped_refptr caller_; + scoped_refptr callee_; }; class RTCStatsVerifier { @@ -206,106 +162,112 @@ class RTCStatsVerifier { : report_(report), stats_(stats), all_tests_successful_(true) { RTC_CHECK(report_); RTC_CHECK(stats_); - for (const RTCStatsMemberInterface* member : stats_->Members()) { - untested_members_.insert(member); + for (const auto& attribute : stats_->Attributes()) { + untested_attribute_names_.insert(attribute.name()); } } - void MarkMemberTested(const RTCStatsMemberInterface& member, - bool test_successful) { - untested_members_.erase(&member); + template + void MarkAttributeTested(const std::optional& field, + bool test_successful) { + untested_attribute_names_.erase(stats_->GetAttribute(field).name()); all_tests_successful_ &= test_successful; } - void TestMemberIsDefined(const RTCStatsMemberInterface& member) { - EXPECT_TRUE(member.is_defined()) - << stats_->type() << "." << member.name() << "[" << stats_->id() - << "] was undefined."; - MarkMemberTested(member, member.is_defined()); + template + void TestAttributeIsDefined(const std::optional& field) { + EXPECT_TRUE(field.has_value()) + << stats_->type() << "." << stats_->GetAttribute(field).name() << "[" + << stats_->id() << "] was undefined."; + MarkAttributeTested(field, field.has_value()); } - void TestMemberIsUndefined(const RTCStatsMemberInterface& member) { - EXPECT_FALSE(member.is_defined()) - << stats_->type() << "." << member.name() << "[" << stats_->id() - << "] was defined (" << member.ValueToString() << ")."; - MarkMemberTested(member, !member.is_defined()); + template + void TestAttributeIsUndefined(const std::optional& field) { + Attribute attribute = stats_->GetAttribute(field); + EXPECT_FALSE(field.has_value()) + << stats_->type() << "." << attribute.name() << "[" << stats_->id() + << "] was defined (" << attribute.ToString() << ")."; + MarkAttributeTested(field, !field.has_value()); } template - void TestMemberIsPositive(const RTCStatsMemberInterface& member) { - EXPECT_TRUE(member.is_defined()) - << stats_->type() << "." << member.name() << "[" << stats_->id() - << "] was undefined."; - if (!member.is_defined()) { - MarkMemberTested(member, false); + void TestAttributeIsPositive(const std::optional& field) { + Attribute attribute = stats_->GetAttribute(field); + EXPECT_TRUE(field.has_value()) << stats_->type() << "." << attribute.name() + << "[" << stats_->id() << "] was undefined."; + if (!field.has_value()) { + MarkAttributeTested(field, false); return; } - bool is_positive = *member.cast_to>() > T(0); + bool is_positive = field.value() > T(0); EXPECT_TRUE(is_positive) - << stats_->type() << "." << member.name() << "[" << stats_->id() - << "] was not positive (" << member.ValueToString() << ")."; - MarkMemberTested(member, is_positive); + << stats_->type() << "." << attribute.name() << "[" << stats_->id() + << "] was not positive (" << attribute.ToString() << ")."; + MarkAttributeTested(field, is_positive); } template - void TestMemberIsNonNegative(const RTCStatsMemberInterface& member) { - EXPECT_TRUE(member.is_defined()) - << stats_->type() << "." << member.name() << "[" << stats_->id() - << "] was undefined."; - if (!member.is_defined()) { - MarkMemberTested(member, false); + void TestAttributeIsNonNegative(const std::optional& field) { + Attribute attribute = stats_->GetAttribute(field); + EXPECT_TRUE(field.has_value()) << stats_->type() << "." << attribute.name() + << "[" << stats_->id() << "] was undefined."; + if (!field.has_value()) { + MarkAttributeTested(field, false); return; } - bool is_non_negative = *member.cast_to>() >= T(0); + bool is_non_negative = field.value() >= T(0); EXPECT_TRUE(is_non_negative) - << stats_->type() << "." << member.name() << "[" << stats_->id() - << "] was not non-negative (" << member.ValueToString() << ")."; - MarkMemberTested(member, is_non_negative); + << stats_->type() << "." << attribute.name() << "[" << stats_->id() + << "] was not non-negative (" << attribute.ToString() << ")."; + MarkAttributeTested(field, is_non_negative); } - void TestMemberIsIDReference(const RTCStatsMemberInterface& member, - const char* expected_type) { - TestMemberIsIDReference(member, expected_type, false); + template + void TestAttributeIsIDReference(const std::optional& field, + const char* expected_type) { + TestAttributeIsIDReference(field, expected_type, false); } - void TestMemberIsOptionalIDReference(const RTCStatsMemberInterface& member, - const char* expected_type) { - TestMemberIsIDReference(member, expected_type, true); + template + void TestAttributeIsOptionalIDReference(const std::optional& field, + const char* expected_type) { + TestAttributeIsIDReference(field, expected_type, true); } - bool ExpectAllMembersSuccessfullyTested() { - if (untested_members_.empty()) + bool ExpectAllAttributesSuccessfullyTested() { + if (untested_attribute_names_.empty()) return all_tests_successful_; - for (const RTCStatsMemberInterface* member : untested_members_) { - EXPECT_TRUE(false) << stats_->type() << "." << member->name() << "[" - << stats_->id() << "] was not tested."; + for (const char* name : untested_attribute_names_) { + EXPECT_TRUE(false) << stats_->type() << "." << name << "[" << stats_->id() + << "] was not tested."; } return false; } private: - void TestMemberIsIDReference(const RTCStatsMemberInterface& member, - const char* expected_type, - bool optional) { - if (optional && !member.is_defined()) { - MarkMemberTested(member, true); + template + void TestAttributeIsIDReference(const std::optional& field, + const char* expected_type, + bool optional) { + if (optional && !field.has_value()) { + MarkAttributeTested(field, true); return; } + Attribute attribute = stats_->GetAttribute(field); bool valid_reference = false; - if (member.is_defined()) { - if (member.type() == RTCStatsMemberInterface::kString) { + if (attribute.has_value()) { + if (attribute.holds_alternative()) { // A single ID. - const RTCStatsMember& id = - member.cast_to>(); - const RTCStats* referenced_stats = report_->Get(*id); + const RTCStats* referenced_stats = + report_->Get(attribute.get()); valid_reference = referenced_stats && referenced_stats->type() == expected_type; - } else if (member.type() == RTCStatsMemberInterface::kSequenceString) { + } else if (attribute.holds_alternative>()) { // A vector of IDs. valid_reference = true; - const RTCStatsMember>& ids = - member.cast_to>>(); - for (const std::string& id : *ids) { + for (const std::string& id : + attribute.get>()) { const RTCStats* referenced_stats = report_->Get(id); if (!referenced_stats || referenced_stats->type() != expected_type) { valid_reference = false; @@ -315,17 +277,16 @@ class RTCStatsVerifier { } } EXPECT_TRUE(valid_reference) - << stats_->type() << "." << member.name() + << stats_->type() << "." << attribute.name() << " is not a reference to an " "existing dictionary of type " - << expected_type << " (value: " - << (member.is_defined() ? member.ValueToString() : "null") << ")."; - MarkMemberTested(member, valid_reference); + << expected_type << " (value: " << attribute.ToString() << ")."; + MarkAttributeTested(field, valid_reference); } - rtc::scoped_refptr report_; + scoped_refptr report_; const RTCStats* stats_; - std::set untested_members_; + std::set untested_attribute_names_; bool all_tests_successful_; }; @@ -429,122 +390,129 @@ class RTCStatsReportVerifier { bool VerifyRTCCertificateStats(const RTCCertificateStats& certificate) { RTCStatsVerifier verifier(report_.get(), &certificate); - verifier.TestMemberIsDefined(certificate.fingerprint); - verifier.TestMemberIsDefined(certificate.fingerprint_algorithm); - verifier.TestMemberIsDefined(certificate.base64_certificate); - verifier.TestMemberIsOptionalIDReference(certificate.issuer_certificate_id, - RTCCertificateStats::kType); - return verifier.ExpectAllMembersSuccessfullyTested(); + verifier.TestAttributeIsDefined(certificate.fingerprint); + verifier.TestAttributeIsDefined(certificate.fingerprint_algorithm); + verifier.TestAttributeIsDefined(certificate.base64_certificate); + verifier.TestAttributeIsOptionalIDReference( + certificate.issuer_certificate_id, RTCCertificateStats::kType); + return verifier.ExpectAllAttributesSuccessfullyTested(); } bool VerifyRTCCodecStats(const RTCCodecStats& codec) { RTCStatsVerifier verifier(report_.get(), &codec); - verifier.TestMemberIsIDReference(codec.transport_id, - RTCTransportStats::kType); - verifier.TestMemberIsDefined(codec.payload_type); - verifier.TestMemberIsDefined(codec.mime_type); - verifier.TestMemberIsPositive(codec.clock_rate); + verifier.TestAttributeIsIDReference(codec.transport_id, + RTCTransportStats::kType); + verifier.TestAttributeIsDefined(codec.payload_type); + verifier.TestAttributeIsDefined(codec.mime_type); + verifier.TestAttributeIsPositive(codec.clock_rate); if (codec.mime_type->rfind("audio", 0) == 0) - verifier.TestMemberIsPositive(codec.channels); + verifier.TestAttributeIsPositive(codec.channels); else - verifier.TestMemberIsUndefined(codec.channels); + verifier.TestAttributeIsUndefined(codec.channels); // sdp_fmtp_line is an optional field. - verifier.MarkMemberTested(codec.sdp_fmtp_line, true); - return verifier.ExpectAllMembersSuccessfullyTested(); + verifier.MarkAttributeTested(codec.sdp_fmtp_line, true); + return verifier.ExpectAllAttributesSuccessfullyTested(); } bool VerifyRTCDataChannelStats(const RTCDataChannelStats& data_channel) { RTCStatsVerifier verifier(report_.get(), &data_channel); - verifier.TestMemberIsDefined(data_channel.label); - verifier.TestMemberIsDefined(data_channel.protocol); - verifier.TestMemberIsDefined(data_channel.data_channel_identifier); - verifier.TestMemberIsDefined(data_channel.state); - verifier.TestMemberIsNonNegative(data_channel.messages_sent); - verifier.TestMemberIsNonNegative(data_channel.bytes_sent); - verifier.TestMemberIsNonNegative(data_channel.messages_received); - verifier.TestMemberIsNonNegative(data_channel.bytes_received); - return verifier.ExpectAllMembersSuccessfullyTested(); + verifier.TestAttributeIsDefined(data_channel.label); + verifier.TestAttributeIsDefined(data_channel.protocol); + verifier.TestAttributeIsDefined(data_channel.data_channel_identifier); + verifier.TestAttributeIsDefined(data_channel.state); + verifier.TestAttributeIsNonNegative(data_channel.messages_sent); + verifier.TestAttributeIsNonNegative(data_channel.bytes_sent); + verifier.TestAttributeIsNonNegative( + data_channel.messages_received); + verifier.TestAttributeIsNonNegative(data_channel.bytes_received); + return verifier.ExpectAllAttributesSuccessfullyTested(); } bool VerifyRTCIceCandidatePairStats( const RTCIceCandidatePairStats& candidate_pair, bool is_selected_pair) { RTCStatsVerifier verifier(report_.get(), &candidate_pair); - verifier.TestMemberIsIDReference(candidate_pair.transport_id, - RTCTransportStats::kType); - verifier.TestMemberIsIDReference(candidate_pair.local_candidate_id, - RTCLocalIceCandidateStats::kType); - verifier.TestMemberIsIDReference(candidate_pair.remote_candidate_id, - RTCRemoteIceCandidateStats::kType); - verifier.TestMemberIsDefined(candidate_pair.state); - verifier.TestMemberIsNonNegative(candidate_pair.priority); - verifier.TestMemberIsDefined(candidate_pair.nominated); - verifier.TestMemberIsDefined(candidate_pair.writable); - verifier.TestMemberIsNonNegative(candidate_pair.packets_sent); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsIDReference(candidate_pair.transport_id, + RTCTransportStats::kType); + verifier.TestAttributeIsIDReference(candidate_pair.local_candidate_id, + RTCLocalIceCandidateStats::kType); + verifier.TestAttributeIsIDReference(candidate_pair.remote_candidate_id, + RTCRemoteIceCandidateStats::kType); + verifier.TestAttributeIsDefined(candidate_pair.state); + verifier.TestAttributeIsNonNegative(candidate_pair.priority); + verifier.TestAttributeIsDefined(candidate_pair.nominated); + verifier.TestAttributeIsDefined(candidate_pair.writable); + verifier.TestAttributeIsNonNegative(candidate_pair.packets_sent); + verifier.TestAttributeIsNonNegative( candidate_pair.packets_discarded_on_send); - verifier.TestMemberIsNonNegative(candidate_pair.packets_received); - verifier.TestMemberIsNonNegative(candidate_pair.bytes_sent); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( + candidate_pair.packets_received); + verifier.TestAttributeIsNonNegative(candidate_pair.bytes_sent); + verifier.TestAttributeIsNonNegative( candidate_pair.bytes_discarded_on_send); - verifier.TestMemberIsNonNegative(candidate_pair.bytes_received); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( + candidate_pair.bytes_received); + verifier.TestAttributeIsNonNegative( candidate_pair.total_round_trip_time); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( candidate_pair.current_round_trip_time); if (is_selected_pair) { - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( candidate_pair.available_outgoing_bitrate); // A pair should be nominated in order to be selected. EXPECT_TRUE(*candidate_pair.nominated); } else { - verifier.TestMemberIsUndefined(candidate_pair.available_outgoing_bitrate); + verifier.TestAttributeIsUndefined( + candidate_pair.available_outgoing_bitrate); } - verifier.TestMemberIsUndefined(candidate_pair.available_incoming_bitrate); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsUndefined( + candidate_pair.available_incoming_bitrate); + verifier.TestAttributeIsNonNegative( candidate_pair.requests_received); - verifier.TestMemberIsNonNegative(candidate_pair.requests_sent); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative(candidate_pair.requests_sent); + verifier.TestAttributeIsNonNegative( candidate_pair.responses_received); - verifier.TestMemberIsNonNegative(candidate_pair.responses_sent); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( + candidate_pair.responses_sent); + verifier.TestAttributeIsNonNegative( candidate_pair.consent_requests_sent); - verifier.TestMemberIsDefined(candidate_pair.last_packet_received_timestamp); - verifier.TestMemberIsDefined(candidate_pair.last_packet_sent_timestamp); + verifier.TestAttributeIsDefined( + candidate_pair.last_packet_received_timestamp); + verifier.TestAttributeIsDefined(candidate_pair.last_packet_sent_timestamp); - return verifier.ExpectAllMembersSuccessfullyTested(); + return verifier.ExpectAllAttributesSuccessfullyTested(); } bool VerifyRTCIceCandidateStats(const RTCIceCandidateStats& candidate) { RTCStatsVerifier verifier(report_.get(), &candidate); - verifier.TestMemberIsIDReference(candidate.transport_id, - RTCTransportStats::kType); - verifier.TestMemberIsDefined(candidate.is_remote); + verifier.TestAttributeIsIDReference(candidate.transport_id, + RTCTransportStats::kType); + verifier.TestAttributeIsDefined(candidate.is_remote); if (*candidate.is_remote) { - verifier.TestMemberIsUndefined(candidate.network_type); - verifier.TestMemberIsUndefined(candidate.network_adapter_type); - verifier.TestMemberIsUndefined(candidate.vpn); + verifier.TestAttributeIsUndefined(candidate.network_type); + verifier.TestAttributeIsUndefined(candidate.network_adapter_type); + verifier.TestAttributeIsUndefined(candidate.vpn); } else { - verifier.TestMemberIsDefined(candidate.network_type); - verifier.TestMemberIsDefined(candidate.network_adapter_type); - verifier.TestMemberIsDefined(candidate.vpn); + verifier.TestAttributeIsDefined(candidate.network_type); + verifier.TestAttributeIsDefined(candidate.network_adapter_type); + verifier.TestAttributeIsDefined(candidate.vpn); } - verifier.TestMemberIsDefined(candidate.ip); - verifier.TestMemberIsDefined(candidate.address); - verifier.TestMemberIsNonNegative(candidate.port); - verifier.TestMemberIsDefined(candidate.protocol); - verifier.TestMemberIsDefined(candidate.candidate_type); - verifier.TestMemberIsNonNegative(candidate.priority); - verifier.TestMemberIsUndefined(candidate.url); - verifier.TestMemberIsUndefined(candidate.relay_protocol); - verifier.TestMemberIsDefined(candidate.foundation); - verifier.TestMemberIsUndefined(candidate.related_address); - verifier.TestMemberIsUndefined(candidate.related_port); - verifier.TestMemberIsDefined(candidate.username_fragment); - verifier.TestMemberIsUndefined(candidate.tcp_type); - return verifier.ExpectAllMembersSuccessfullyTested(); + verifier.TestAttributeIsDefined(candidate.ip); + verifier.TestAttributeIsDefined(candidate.address); + verifier.TestAttributeIsNonNegative(candidate.port); + verifier.TestAttributeIsDefined(candidate.protocol); + verifier.TestAttributeIsDefined(candidate.candidate_type); + verifier.TestAttributeIsNonNegative(candidate.priority); + verifier.TestAttributeIsUndefined(candidate.url); + verifier.TestAttributeIsUndefined(candidate.relay_protocol); + verifier.TestAttributeIsDefined(candidate.foundation); + verifier.TestAttributeIsUndefined(candidate.related_address); + verifier.TestAttributeIsUndefined(candidate.related_port); + verifier.TestAttributeIsDefined(candidate.username_fragment); + verifier.TestAttributeIsUndefined(candidate.tcp_type); + return verifier.ExpectAllAttributesSuccessfullyTested(); } bool VerifyRTCLocalIceCandidateStats( @@ -560,226 +528,242 @@ class RTCStatsReportVerifier { bool VerifyRTCPeerConnectionStats( const RTCPeerConnectionStats& peer_connection) { RTCStatsVerifier verifier(report_.get(), &peer_connection); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( peer_connection.data_channels_opened); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( peer_connection.data_channels_closed); - return verifier.ExpectAllMembersSuccessfullyTested(); + return verifier.ExpectAllAttributesSuccessfullyTested(); } void VerifyRTCRtpStreamStats(const RTCRtpStreamStats& stream, RTCStatsVerifier& verifier) { - verifier.TestMemberIsDefined(stream.ssrc); - verifier.TestMemberIsDefined(stream.kind); - verifier.TestMemberIsIDReference(stream.transport_id, - RTCTransportStats::kType); - verifier.TestMemberIsIDReference(stream.codec_id, RTCCodecStats::kType); + verifier.TestAttributeIsDefined(stream.ssrc); + verifier.TestAttributeIsDefined(stream.kind); + verifier.TestAttributeIsIDReference(stream.transport_id, + RTCTransportStats::kType); + verifier.TestAttributeIsIDReference(stream.codec_id, RTCCodecStats::kType); } void VerifyRTCSentRtpStreamStats(const RTCSentRtpStreamStats& sent_stream, RTCStatsVerifier& verifier) { VerifyRTCRtpStreamStats(sent_stream, verifier); - verifier.TestMemberIsNonNegative(sent_stream.packets_sent); - verifier.TestMemberIsNonNegative(sent_stream.bytes_sent); + verifier.TestAttributeIsNonNegative(sent_stream.packets_sent); + verifier.TestAttributeIsNonNegative(sent_stream.bytes_sent); } bool VerifyRTCInboundRtpStreamStats( const RTCInboundRtpStreamStats& inbound_stream) { RTCStatsVerifier verifier(report_.get(), &inbound_stream); VerifyRTCReceivedRtpStreamStats(inbound_stream, verifier); - verifier.TestMemberIsOptionalIDReference( + verifier.TestAttributeIsOptionalIDReference( inbound_stream.remote_id, RTCRemoteOutboundRtpStreamStats::kType); - verifier.TestMemberIsDefined(inbound_stream.mid); - verifier.TestMemberIsDefined(inbound_stream.track_identifier); - if (inbound_stream.kind.is_defined() && *inbound_stream.kind == "video") { - verifier.TestMemberIsNonNegative(inbound_stream.qp_sum); - verifier.TestMemberIsDefined(inbound_stream.decoder_implementation); - verifier.TestMemberIsDefined(inbound_stream.power_efficient_decoder); + verifier.TestAttributeIsDefined(inbound_stream.mid); + verifier.TestAttributeIsDefined(inbound_stream.track_identifier); + if (inbound_stream.kind.has_value() && *inbound_stream.kind == "video") { + verifier.TestAttributeIsNonNegative(inbound_stream.qp_sum); + verifier.TestAttributeIsDefined(inbound_stream.decoder_implementation); + verifier.TestAttributeIsDefined(inbound_stream.power_efficient_decoder); } else { - verifier.TestMemberIsUndefined(inbound_stream.qp_sum); - verifier.TestMemberIsUndefined(inbound_stream.decoder_implementation); - verifier.TestMemberIsUndefined(inbound_stream.power_efficient_decoder); + verifier.TestAttributeIsUndefined(inbound_stream.qp_sum); + verifier.TestAttributeIsUndefined(inbound_stream.decoder_implementation); + verifier.TestAttributeIsUndefined(inbound_stream.power_efficient_decoder); } - verifier.TestMemberIsNonNegative(inbound_stream.packets_received); - if (inbound_stream.kind.is_defined() && *inbound_stream.kind == "audio") { - verifier.TestMemberIsNonNegative( + // As long as the corruption detection RTP header extension is not activated + // it should not aggregate any corruption score. The tests where this header + // extension is enabled are located in pc/peer_connection_integrationtest.cc + verifier.TestAttributeIsUndefined( + inbound_stream.total_corruption_probability); + verifier.TestAttributeIsUndefined( + inbound_stream.total_squared_corruption_probability); + verifier.TestAttributeIsUndefined(inbound_stream.corruption_measurements); + verifier.TestAttributeIsNonNegative( + inbound_stream.packets_received); + if (inbound_stream.kind.has_value() && *inbound_stream.kind == "audio") { + verifier.TestAttributeIsNonNegative( inbound_stream.packets_discarded); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.fec_packets_received); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.fec_packets_discarded); - verifier.TestMemberIsUndefined(inbound_stream.fec_bytes_received); + verifier.TestAttributeIsUndefined(inbound_stream.fec_bytes_received); } else { - verifier.TestMemberIsUndefined(inbound_stream.packets_discarded); + verifier.TestAttributeIsUndefined(inbound_stream.packets_discarded); // FEC stats are only present when FlexFEC was negotiated which is guarded // by the WebRTC-FlexFEC-03-Advertised/Enabled/ field trial and off by // default. - verifier.TestMemberIsUndefined(inbound_stream.fec_bytes_received); - verifier.TestMemberIsUndefined(inbound_stream.fec_packets_received); - verifier.TestMemberIsUndefined(inbound_stream.fec_packets_discarded); - verifier.TestMemberIsUndefined(inbound_stream.fec_ssrc); + verifier.TestAttributeIsUndefined(inbound_stream.fec_bytes_received); + verifier.TestAttributeIsUndefined(inbound_stream.fec_packets_received); + verifier.TestAttributeIsUndefined(inbound_stream.fec_packets_discarded); + verifier.TestAttributeIsUndefined(inbound_stream.fec_ssrc); } - verifier.TestMemberIsNonNegative(inbound_stream.bytes_received); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( + inbound_stream.bytes_received); + verifier.TestAttributeIsNonNegative( inbound_stream.header_bytes_received); - verifier.TestMemberIsDefined(inbound_stream.last_packet_received_timestamp); - if (inbound_stream.frames_received.ValueOrDefault(0) > 0) { - verifier.TestMemberIsNonNegative(inbound_stream.frame_width); - verifier.TestMemberIsNonNegative(inbound_stream.frame_height); + verifier.TestAttributeIsDefined( + inbound_stream.last_packet_received_timestamp); + if (inbound_stream.frames_received.value_or(0) > 0) { + verifier.TestAttributeIsNonNegative(inbound_stream.frame_width); + verifier.TestAttributeIsNonNegative( + inbound_stream.frame_height); } else { - verifier.TestMemberIsUndefined(inbound_stream.frame_width); - verifier.TestMemberIsUndefined(inbound_stream.frame_height); + verifier.TestAttributeIsUndefined(inbound_stream.frame_width); + verifier.TestAttributeIsUndefined(inbound_stream.frame_height); } - if (inbound_stream.frames_per_second.is_defined()) { - verifier.TestMemberIsNonNegative( + if (inbound_stream.frames_per_second.has_value()) { + verifier.TestAttributeIsNonNegative( inbound_stream.frames_per_second); } else { - verifier.TestMemberIsUndefined(inbound_stream.frames_per_second); + verifier.TestAttributeIsUndefined(inbound_stream.frames_per_second); } - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.jitter_buffer_delay); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.jitter_buffer_emitted_count); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.jitter_buffer_target_delay); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.jitter_buffer_minimum_delay); - if (inbound_stream.kind.is_defined() && *inbound_stream.kind == "video") { - verifier.TestMemberIsUndefined(inbound_stream.total_samples_received); - verifier.TestMemberIsUndefined(inbound_stream.concealed_samples); - verifier.TestMemberIsUndefined(inbound_stream.silent_concealed_samples); - verifier.TestMemberIsUndefined(inbound_stream.concealment_events); - verifier.TestMemberIsUndefined( + verifier.TestAttributeIsNonNegative( + inbound_stream.total_processing_delay); + if (inbound_stream.kind.has_value() && *inbound_stream.kind == "video") { + verifier.TestAttributeIsUndefined(inbound_stream.total_samples_received); + verifier.TestAttributeIsUndefined(inbound_stream.concealed_samples); + verifier.TestAttributeIsUndefined( + inbound_stream.silent_concealed_samples); + verifier.TestAttributeIsUndefined(inbound_stream.concealment_events); + verifier.TestAttributeIsUndefined( inbound_stream.inserted_samples_for_deceleration); - verifier.TestMemberIsUndefined( + verifier.TestAttributeIsUndefined( inbound_stream.removed_samples_for_acceleration); - verifier.TestMemberIsUndefined(inbound_stream.audio_level); - verifier.TestMemberIsUndefined(inbound_stream.total_audio_energy); - verifier.TestMemberIsUndefined(inbound_stream.total_samples_duration); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsUndefined(inbound_stream.audio_level); + verifier.TestAttributeIsUndefined(inbound_stream.total_audio_energy); + verifier.TestAttributeIsUndefined(inbound_stream.total_samples_duration); + verifier.TestAttributeIsNonNegative( inbound_stream.frames_received); - verifier.TestMemberIsNonNegative(inbound_stream.fir_count); - verifier.TestMemberIsNonNegative(inbound_stream.pli_count); - verifier.TestMemberIsNonNegative(inbound_stream.nack_count); + verifier.TestAttributeIsNonNegative(inbound_stream.fir_count); + verifier.TestAttributeIsNonNegative(inbound_stream.pli_count); + verifier.TestAttributeIsNonNegative(inbound_stream.nack_count); } else { - verifier.TestMemberIsUndefined(inbound_stream.fir_count); - verifier.TestMemberIsUndefined(inbound_stream.pli_count); - verifier.TestMemberIsUndefined(inbound_stream.nack_count); - verifier.TestMemberIsPositive( + verifier.TestAttributeIsUndefined(inbound_stream.fir_count); + verifier.TestAttributeIsUndefined(inbound_stream.pli_count); + verifier.TestAttributeIsUndefined(inbound_stream.nack_count); + verifier.TestAttributeIsPositive( inbound_stream.total_samples_received); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.concealed_samples); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.silent_concealed_samples); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.concealment_events); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.inserted_samples_for_deceleration); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.removed_samples_for_acceleration); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.jitter_buffer_target_delay); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.jitter_buffer_minimum_delay); - verifier.TestMemberIsPositive(inbound_stream.audio_level); - verifier.TestMemberIsPositive(inbound_stream.total_audio_energy); - verifier.TestMemberIsPositive( + verifier.TestAttributeIsPositive(inbound_stream.audio_level); + verifier.TestAttributeIsPositive( + inbound_stream.total_audio_energy); + verifier.TestAttributeIsPositive( inbound_stream.total_samples_duration); - verifier.TestMemberIsUndefined(inbound_stream.frames_received); + verifier.TestAttributeIsUndefined(inbound_stream.frames_received); } // RTX stats are typically only defined for video where RTX is negotiated. - if (inbound_stream.kind.is_defined() && *inbound_stream.kind == "video") { - verifier.TestMemberIsNonNegative( + if (inbound_stream.kind.has_value() && *inbound_stream.kind == "video") { + verifier.TestAttributeIsNonNegative( inbound_stream.retransmitted_packets_received); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.retransmitted_bytes_received); - verifier.TestMemberIsNonNegative(inbound_stream.rtx_ssrc); + verifier.TestAttributeIsNonNegative(inbound_stream.rtx_ssrc); } else { - verifier.TestMemberIsUndefined( + verifier.TestAttributeIsUndefined( inbound_stream.retransmitted_packets_received); - verifier.TestMemberIsUndefined( + verifier.TestAttributeIsUndefined( inbound_stream.retransmitted_bytes_received); - verifier.TestMemberIsUndefined(inbound_stream.rtx_ssrc); - verifier.TestMemberIsUndefined(inbound_stream.fec_ssrc); + verifier.TestAttributeIsUndefined(inbound_stream.rtx_ssrc); + verifier.TestAttributeIsUndefined(inbound_stream.fec_ssrc); } // Test runtime too short to get an estimate (at least two RTCP sender // reports need to be received). - verifier.MarkMemberTested(inbound_stream.estimated_playout_timestamp, true); - if (inbound_stream.kind.is_defined() && *inbound_stream.kind == "video") { - verifier.TestMemberIsDefined(inbound_stream.frames_decoded); - verifier.TestMemberIsDefined(inbound_stream.key_frames_decoded); - verifier.TestMemberIsNonNegative(inbound_stream.frames_dropped); - verifier.TestMemberIsNonNegative( + verifier.MarkAttributeTested(inbound_stream.estimated_playout_timestamp, + true); + if (inbound_stream.kind.has_value() && *inbound_stream.kind == "video") { + verifier.TestAttributeIsDefined(inbound_stream.frames_decoded); + verifier.TestAttributeIsDefined(inbound_stream.key_frames_decoded); + verifier.TestAttributeIsNonNegative( + inbound_stream.frames_dropped); + verifier.TestAttributeIsNonNegative( inbound_stream.total_decode_time); - verifier.TestMemberIsNonNegative( - inbound_stream.total_processing_delay); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.total_assembly_time); - verifier.TestMemberIsDefined( + verifier.TestAttributeIsDefined( inbound_stream.frames_assembled_from_multiple_packets); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.total_inter_frame_delay); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.total_squared_inter_frame_delay); - verifier.TestMemberIsNonNegative(inbound_stream.pause_count); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative(inbound_stream.pause_count); + verifier.TestAttributeIsNonNegative( inbound_stream.total_pauses_duration); - verifier.TestMemberIsNonNegative(inbound_stream.freeze_count); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( + inbound_stream.freeze_count); + verifier.TestAttributeIsNonNegative( inbound_stream.total_freezes_duration); // The integration test is not set up to test screen share; don't require // this to be present. - verifier.MarkMemberTested(inbound_stream.content_type, true); - verifier.TestMemberIsUndefined(inbound_stream.jitter_buffer_flushes); - verifier.TestMemberIsUndefined( + verifier.MarkAttributeTested(inbound_stream.content_type, true); + verifier.TestAttributeIsUndefined(inbound_stream.jitter_buffer_flushes); + verifier.TestAttributeIsUndefined( inbound_stream.delayed_packet_outage_samples); - verifier.TestMemberIsUndefined( + verifier.TestAttributeIsUndefined( inbound_stream.relative_packet_arrival_delay); - verifier.TestMemberIsUndefined(inbound_stream.interruption_count); - verifier.TestMemberIsUndefined( + verifier.TestAttributeIsUndefined(inbound_stream.interruption_count); + verifier.TestAttributeIsUndefined( inbound_stream.total_interruption_duration); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.min_playout_delay); - verifier.TestMemberIsDefined(inbound_stream.goog_timing_frame_info); + verifier.TestAttributeIsDefined(inbound_stream.goog_timing_frame_info); } else { - verifier.TestMemberIsUndefined(inbound_stream.frames_decoded); - verifier.TestMemberIsUndefined(inbound_stream.key_frames_decoded); - verifier.TestMemberIsUndefined(inbound_stream.frames_dropped); - verifier.TestMemberIsUndefined(inbound_stream.total_decode_time); - verifier.TestMemberIsUndefined(inbound_stream.total_processing_delay); - verifier.TestMemberIsUndefined(inbound_stream.total_assembly_time); - verifier.TestMemberIsUndefined( + verifier.TestAttributeIsUndefined(inbound_stream.frames_decoded); + verifier.TestAttributeIsUndefined(inbound_stream.key_frames_decoded); + verifier.TestAttributeIsUndefined(inbound_stream.frames_dropped); + verifier.TestAttributeIsUndefined(inbound_stream.total_decode_time); + verifier.TestAttributeIsUndefined(inbound_stream.total_assembly_time); + verifier.TestAttributeIsUndefined( inbound_stream.frames_assembled_from_multiple_packets); - verifier.TestMemberIsUndefined(inbound_stream.total_inter_frame_delay); - verifier.TestMemberIsUndefined( + verifier.TestAttributeIsUndefined(inbound_stream.total_inter_frame_delay); + verifier.TestAttributeIsUndefined( inbound_stream.total_squared_inter_frame_delay); - verifier.TestMemberIsUndefined(inbound_stream.pause_count); - verifier.TestMemberIsUndefined(inbound_stream.total_pauses_duration); - verifier.TestMemberIsUndefined(inbound_stream.freeze_count); - verifier.TestMemberIsUndefined(inbound_stream.total_freezes_duration); - verifier.TestMemberIsUndefined(inbound_stream.content_type); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsUndefined(inbound_stream.pause_count); + verifier.TestAttributeIsUndefined(inbound_stream.total_pauses_duration); + verifier.TestAttributeIsUndefined(inbound_stream.freeze_count); + verifier.TestAttributeIsUndefined(inbound_stream.total_freezes_duration); + verifier.TestAttributeIsUndefined(inbound_stream.content_type); + verifier.TestAttributeIsNonNegative( inbound_stream.jitter_buffer_flushes); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.delayed_packet_outage_samples); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.relative_packet_arrival_delay); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.interruption_count); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( inbound_stream.total_interruption_duration); - verifier.TestMemberIsUndefined(inbound_stream.min_playout_delay); - verifier.TestMemberIsUndefined(inbound_stream.goog_timing_frame_info); + verifier.TestAttributeIsUndefined(inbound_stream.min_playout_delay); + verifier.TestAttributeIsUndefined(inbound_stream.goog_timing_frame_info); } - if (inbound_stream.kind.is_defined() && *inbound_stream.kind == "audio") { - verifier.TestMemberIsDefined(inbound_stream.playout_id); + if (inbound_stream.kind.has_value() && *inbound_stream.kind == "audio") { + verifier.TestAttributeIsDefined(inbound_stream.playout_id); } else { - verifier.TestMemberIsUndefined(inbound_stream.playout_id); + verifier.TestAttributeIsUndefined(inbound_stream.playout_id); } - return verifier.ExpectAllMembersSuccessfullyTested(); + return verifier.ExpectAllAttributesSuccessfullyTested(); } bool VerifyRTCOutboundRtpStreamStats( @@ -787,122 +771,133 @@ class RTCStatsReportVerifier { RTCStatsVerifier verifier(report_.get(), &outbound_stream); VerifyRTCSentRtpStreamStats(outbound_stream, verifier); - verifier.TestMemberIsDefined(outbound_stream.mid); - verifier.TestMemberIsDefined(outbound_stream.active); - if (outbound_stream.kind.is_defined() && *outbound_stream.kind == "video") { - verifier.TestMemberIsIDReference(outbound_stream.media_source_id, - RTCVideoSourceStats::kType); - verifier.TestMemberIsNonNegative(outbound_stream.fir_count); - verifier.TestMemberIsNonNegative(outbound_stream.pli_count); + verifier.TestAttributeIsDefined(outbound_stream.mid); + verifier.TestAttributeIsDefined(outbound_stream.active); + if (outbound_stream.kind.has_value() && *outbound_stream.kind == "video") { + verifier.TestAttributeIsIDReference(outbound_stream.media_source_id, + RTCVideoSourceStats::kType); + verifier.TestAttributeIsNonNegative(outbound_stream.fir_count); + verifier.TestAttributeIsNonNegative(outbound_stream.pli_count); if (*outbound_stream.frames_encoded > 0) { - verifier.TestMemberIsNonNegative(outbound_stream.qp_sum); + verifier.TestAttributeIsNonNegative(outbound_stream.qp_sum); } else { - verifier.TestMemberIsUndefined(outbound_stream.qp_sum); + verifier.TestAttributeIsUndefined(outbound_stream.qp_sum); } } else { - verifier.TestMemberIsUndefined(outbound_stream.fir_count); - verifier.TestMemberIsUndefined(outbound_stream.pli_count); - verifier.TestMemberIsIDReference(outbound_stream.media_source_id, - RTCAudioSourceStats::kType); - verifier.TestMemberIsUndefined(outbound_stream.qp_sum); + verifier.TestAttributeIsUndefined(outbound_stream.fir_count); + verifier.TestAttributeIsUndefined(outbound_stream.pli_count); + verifier.TestAttributeIsIDReference(outbound_stream.media_source_id, + RTCAudioSourceStats::kType); + verifier.TestAttributeIsUndefined(outbound_stream.qp_sum); } - verifier.TestMemberIsNonNegative(outbound_stream.nack_count); - verifier.TestMemberIsOptionalIDReference( + verifier.TestAttributeIsNonNegative(outbound_stream.nack_count); + verifier.TestAttributeIsOptionalIDReference( outbound_stream.remote_id, RTCRemoteInboundRtpStreamStats::kType); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( outbound_stream.total_packet_send_delay); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( outbound_stream.retransmitted_packets_sent); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( outbound_stream.header_bytes_sent); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( outbound_stream.retransmitted_bytes_sent); - verifier.TestMemberIsNonNegative(outbound_stream.target_bitrate); - if (outbound_stream.kind.is_defined() && *outbound_stream.kind == "video") { - verifier.TestMemberIsDefined(outbound_stream.frames_encoded); - verifier.TestMemberIsDefined(outbound_stream.key_frames_encoded); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative(outbound_stream.target_bitrate); + if (outbound_stream.kind.has_value() && *outbound_stream.kind == "video") { + verifier.TestAttributeIsDefined(outbound_stream.frames_encoded); + verifier.TestAttributeIsDefined(outbound_stream.key_frames_encoded); + verifier.TestAttributeIsNonNegative( outbound_stream.total_encode_time); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( outbound_stream.total_encoded_bytes_target); - verifier.TestMemberIsDefined(outbound_stream.quality_limitation_reason); - verifier.TestMemberIsDefined( + verifier.TestAttributeIsDefined( + outbound_stream.quality_limitation_reason); + verifier.TestAttributeIsDefined( outbound_stream.quality_limitation_durations); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( outbound_stream.quality_limitation_resolution_changes); // The integration test is not set up to test screen share; don't require // this to be present. - verifier.MarkMemberTested(outbound_stream.content_type, true); - verifier.TestMemberIsDefined(outbound_stream.encoder_implementation); - verifier.TestMemberIsDefined(outbound_stream.power_efficient_encoder); + verifier.MarkAttributeTested(outbound_stream.content_type, true); + verifier.TestAttributeIsDefined(outbound_stream.encoder_implementation); + verifier.TestAttributeIsDefined(outbound_stream.power_efficient_encoder); // Unless an implementation-specific amount of time has passed and at // least one frame has been encoded, undefined is reported. Because it // is hard to tell what is the case here, we treat FPS as optional. // TODO(hbos): Update the tests to run until all implemented metrics // should be populated. - if (outbound_stream.frames_per_second.is_defined()) { - verifier.TestMemberIsNonNegative( + if (outbound_stream.frames_per_second.has_value()) { + verifier.TestAttributeIsNonNegative( outbound_stream.frames_per_second); } else { - verifier.TestMemberIsUndefined(outbound_stream.frames_per_second); + verifier.TestAttributeIsUndefined(outbound_stream.frames_per_second); } - verifier.TestMemberIsNonNegative(outbound_stream.frame_height); - verifier.TestMemberIsNonNegative(outbound_stream.frame_width); - verifier.TestMemberIsNonNegative(outbound_stream.frames_sent); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( + outbound_stream.frame_height); + verifier.TestAttributeIsNonNegative( + outbound_stream.frame_width); + verifier.TestAttributeIsNonNegative( + outbound_stream.frames_sent); + verifier.TestAttributeIsNonNegative( outbound_stream.huge_frames_sent); - verifier.MarkMemberTested(outbound_stream.rid, true); - verifier.TestMemberIsDefined(outbound_stream.scalability_mode); - verifier.TestMemberIsNonNegative(outbound_stream.rtx_ssrc); + // RID is N/A because this test uses singlecast. + verifier.TestAttributeIsUndefined(outbound_stream.rid); + // In singlecast, the only encoding that exists has index 0. + verifier.TestAttributeIsDefined(outbound_stream.encoding_index); + EXPECT_TRUE(outbound_stream.encoding_index.has_value() && + outbound_stream.encoding_index.value() == 0); + verifier.TestAttributeIsDefined(outbound_stream.scalability_mode); + verifier.TestAttributeIsNonNegative(outbound_stream.rtx_ssrc); } else { - verifier.TestMemberIsUndefined(outbound_stream.frames_encoded); - verifier.TestMemberIsUndefined(outbound_stream.key_frames_encoded); - verifier.TestMemberIsUndefined(outbound_stream.total_encode_time); - verifier.TestMemberIsUndefined( + verifier.TestAttributeIsUndefined(outbound_stream.frames_encoded); + verifier.TestAttributeIsUndefined(outbound_stream.key_frames_encoded); + verifier.TestAttributeIsUndefined(outbound_stream.total_encode_time); + verifier.TestAttributeIsUndefined( outbound_stream.total_encoded_bytes_target); - verifier.TestMemberIsUndefined(outbound_stream.quality_limitation_reason); - verifier.TestMemberIsUndefined( + verifier.TestAttributeIsUndefined( + outbound_stream.quality_limitation_reason); + verifier.TestAttributeIsUndefined( outbound_stream.quality_limitation_durations); - verifier.TestMemberIsUndefined( + verifier.TestAttributeIsUndefined( outbound_stream.quality_limitation_resolution_changes); - verifier.TestMemberIsUndefined(outbound_stream.content_type); - // TODO(hbos): Implement for audio as well. - verifier.TestMemberIsUndefined(outbound_stream.encoder_implementation); - verifier.TestMemberIsUndefined(outbound_stream.power_efficient_encoder); - verifier.TestMemberIsUndefined(outbound_stream.rid); - verifier.TestMemberIsUndefined(outbound_stream.frames_per_second); - verifier.TestMemberIsUndefined(outbound_stream.frame_height); - verifier.TestMemberIsUndefined(outbound_stream.frame_width); - verifier.TestMemberIsUndefined(outbound_stream.frames_sent); - verifier.TestMemberIsUndefined(outbound_stream.huge_frames_sent); - verifier.TestMemberIsUndefined(outbound_stream.scalability_mode); - verifier.TestMemberIsUndefined(outbound_stream.rtx_ssrc); + verifier.TestAttributeIsUndefined(outbound_stream.content_type); + verifier.TestAttributeIsUndefined(outbound_stream.encoder_implementation); + verifier.TestAttributeIsUndefined( + outbound_stream.power_efficient_encoder); + verifier.TestAttributeIsUndefined(outbound_stream.rid); + verifier.TestAttributeIsUndefined(outbound_stream.encoding_index); + verifier.TestAttributeIsUndefined(outbound_stream.frames_per_second); + verifier.TestAttributeIsUndefined(outbound_stream.frame_height); + verifier.TestAttributeIsUndefined(outbound_stream.frame_width); + verifier.TestAttributeIsUndefined(outbound_stream.frames_sent); + verifier.TestAttributeIsUndefined(outbound_stream.huge_frames_sent); + verifier.TestAttributeIsUndefined(outbound_stream.scalability_mode); + verifier.TestAttributeIsUndefined(outbound_stream.rtx_ssrc); } - return verifier.ExpectAllMembersSuccessfullyTested(); + return verifier.ExpectAllAttributesSuccessfullyTested(); } void VerifyRTCReceivedRtpStreamStats( const RTCReceivedRtpStreamStats& received_rtp, RTCStatsVerifier& verifier) { VerifyRTCRtpStreamStats(received_rtp, verifier); - verifier.TestMemberIsNonNegative(received_rtp.jitter); - verifier.TestMemberIsDefined(received_rtp.packets_lost); + verifier.TestAttributeIsNonNegative(received_rtp.jitter); + verifier.TestAttributeIsDefined(received_rtp.packets_lost); } bool VerifyRTCRemoteInboundRtpStreamStats( const RTCRemoteInboundRtpStreamStats& remote_inbound_stream) { RTCStatsVerifier verifier(report_.get(), &remote_inbound_stream); VerifyRTCReceivedRtpStreamStats(remote_inbound_stream, verifier); - verifier.TestMemberIsDefined(remote_inbound_stream.fraction_lost); - verifier.TestMemberIsIDReference(remote_inbound_stream.local_id, - RTCOutboundRtpStreamStats::kType); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsDefined(remote_inbound_stream.fraction_lost); + verifier.TestAttributeIsIDReference(remote_inbound_stream.local_id, + RTCOutboundRtpStreamStats::kType); + verifier.TestAttributeIsNonNegative( remote_inbound_stream.round_trip_time); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( remote_inbound_stream.total_round_trip_time); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( remote_inbound_stream.round_trip_time_measurements); - return verifier.ExpectAllMembersSuccessfullyTested(); + return verifier.ExpectAllAttributesSuccessfullyTested(); } bool VerifyRTCRemoteOutboundRtpStreamStats( @@ -910,19 +905,25 @@ class RTCStatsReportVerifier { RTCStatsVerifier verifier(report_.get(), &remote_outbound_stream); VerifyRTCRtpStreamStats(remote_outbound_stream, verifier); VerifyRTCSentRtpStreamStats(remote_outbound_stream, verifier); - verifier.TestMemberIsIDReference(remote_outbound_stream.local_id, - RTCOutboundRtpStreamStats::kType); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsIDReference(remote_outbound_stream.local_id, + RTCInboundRtpStreamStats::kType); + verifier.TestAttributeIsNonNegative( remote_outbound_stream.remote_timestamp); - verifier.TestMemberIsDefined(remote_outbound_stream.reports_sent); - return verifier.ExpectAllMembersSuccessfullyTested(); + verifier.TestAttributeIsDefined(remote_outbound_stream.reports_sent); + // RTT-related attributes need DLRR. + verifier.MarkAttributeTested(remote_outbound_stream.round_trip_time, true); + verifier.MarkAttributeTested( + remote_outbound_stream.round_trip_time_measurements, true); + verifier.MarkAttributeTested(remote_outbound_stream.total_round_trip_time, + true); + return verifier.ExpectAllAttributesSuccessfullyTested(); } void VerifyRTCMediaSourceStats(const RTCMediaSourceStats& media_source, RTCStatsVerifier* verifier) { - verifier->TestMemberIsDefined(media_source.track_identifier); - verifier->TestMemberIsDefined(media_source.kind); - if (media_source.kind.is_defined()) { + verifier->TestAttributeIsDefined(media_source.track_identifier); + verifier->TestAttributeIsDefined(media_source.kind); + if (media_source.kind.has_value()) { EXPECT_TRUE((*media_source.kind == "audio" && media_source.type() == RTCAudioSourceStats::kType) || (*media_source.kind == "video" && @@ -936,16 +937,18 @@ class RTCStatsReportVerifier { // Audio level, unlike audio energy, only gets updated at a certain // frequency, so we don't require that one to be positive to avoid a race // (https://crbug.com/webrtc/10962). - verifier.TestMemberIsNonNegative(audio_source.audio_level); - verifier.TestMemberIsPositive(audio_source.total_audio_energy); - verifier.TestMemberIsPositive(audio_source.total_samples_duration); + verifier.TestAttributeIsNonNegative(audio_source.audio_level); + verifier.TestAttributeIsPositive(audio_source.total_audio_energy); + verifier.TestAttributeIsPositive( + audio_source.total_samples_duration); // TODO(hbos): `echo_return_loss` and `echo_return_loss_enhancement` are // flaky on msan bot (sometimes defined, sometimes undefined). Should the // test run until available or is there a way to have it always be // defined? crbug.com/627816 - verifier.MarkMemberTested(audio_source.echo_return_loss, true); - verifier.MarkMemberTested(audio_source.echo_return_loss_enhancement, true); - return verifier.ExpectAllMembersSuccessfullyTested(); + verifier.MarkAttributeTested(audio_source.echo_return_loss, true); + verifier.MarkAttributeTested(audio_source.echo_return_loss_enhancement, + true); + return verifier.ExpectAllAttributesSuccessfullyTested(); } bool VerifyRTCVideoSourceStats(const RTCVideoSourceStats& video_source) { @@ -953,80 +956,77 @@ class RTCStatsReportVerifier { VerifyRTCMediaSourceStats(video_source, &verifier); // TODO(hbos): This integration test uses fakes that doesn't support // VideoTrackSourceInterface::Stats. When this is fixed we should - // TestMemberIsNonNegative() for `width` and `height` instead to - // reflect real code. - verifier.TestMemberIsUndefined(video_source.width); - verifier.TestMemberIsUndefined(video_source.height); - verifier.TestMemberIsNonNegative(video_source.frames); - verifier.TestMemberIsNonNegative(video_source.frames_per_second); - return verifier.ExpectAllMembersSuccessfullyTested(); + // TestAttributeIsNonNegative() for `width` and `height` instead + // to reflect real code. + verifier.TestAttributeIsUndefined(video_source.width); + verifier.TestAttributeIsUndefined(video_source.height); + verifier.TestAttributeIsNonNegative(video_source.frames); + verifier.TestAttributeIsNonNegative(video_source.frames_per_second); + return verifier.ExpectAllAttributesSuccessfullyTested(); } bool VerifyRTCTransportStats(const RTCTransportStats& transport) { RTCStatsVerifier verifier(report_.get(), &transport); - verifier.TestMemberIsNonNegative(transport.bytes_sent); - verifier.TestMemberIsNonNegative(transport.packets_sent); - verifier.TestMemberIsNonNegative(transport.bytes_received); - verifier.TestMemberIsNonNegative(transport.packets_received); - verifier.TestMemberIsOptionalIDReference(transport.rtcp_transport_stats_id, - RTCTransportStats::kType); - verifier.TestMemberIsDefined(transport.dtls_state); - verifier.TestMemberIsIDReference(transport.selected_candidate_pair_id, - RTCIceCandidatePairStats::kType); - verifier.TestMemberIsIDReference(transport.local_certificate_id, - RTCCertificateStats::kType); - verifier.TestMemberIsIDReference(transport.remote_certificate_id, - RTCCertificateStats::kType); - verifier.TestMemberIsDefined(transport.tls_version); - verifier.TestMemberIsDefined(transport.dtls_cipher); - verifier.TestMemberIsDefined(transport.dtls_role); - verifier.TestMemberIsDefined(transport.srtp_cipher); - verifier.TestMemberIsPositive( + verifier.TestAttributeIsNonNegative(transport.bytes_sent); + verifier.TestAttributeIsNonNegative(transport.packets_sent); + verifier.TestAttributeIsNonNegative(transport.bytes_received); + verifier.TestAttributeIsNonNegative(transport.packets_received); + verifier.TestAttributeIsOptionalIDReference( + transport.rtcp_transport_stats_id, RTCTransportStats::kType); + verifier.TestAttributeIsDefined(transport.dtls_state); + verifier.TestAttributeIsIDReference(transport.selected_candidate_pair_id, + RTCIceCandidatePairStats::kType); + verifier.TestAttributeIsIDReference(transport.local_certificate_id, + RTCCertificateStats::kType); + verifier.TestAttributeIsIDReference(transport.remote_certificate_id, + RTCCertificateStats::kType); + verifier.TestAttributeIsDefined(transport.tls_version); + verifier.TestAttributeIsDefined(transport.dtls_cipher); + verifier.TestAttributeIsDefined(transport.dtls_role); + verifier.TestAttributeIsDefined(transport.srtp_cipher); + verifier.TestAttributeIsPositive( transport.selected_candidate_pair_changes); - verifier.TestMemberIsDefined(transport.ice_role); - verifier.TestMemberIsDefined(transport.ice_local_username_fragment); - verifier.TestMemberIsDefined(transport.ice_state); - return verifier.ExpectAllMembersSuccessfullyTested(); + verifier.TestAttributeIsDefined(transport.ice_role); + verifier.TestAttributeIsDefined(transport.ice_local_username_fragment); + verifier.TestAttributeIsDefined(transport.ice_state); + return verifier.ExpectAllAttributesSuccessfullyTested(); } bool VerifyRTCAudioPlayoutStats(const RTCAudioPlayoutStats& audio_playout) { RTCStatsVerifier verifier(report_.get(), &audio_playout); - verifier.TestMemberIsDefined(audio_playout.kind); - if (audio_playout.kind.is_defined()) { + verifier.TestAttributeIsDefined(audio_playout.kind); + if (audio_playout.kind.has_value()) { EXPECT_EQ(*audio_playout.kind, "audio"); } - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( audio_playout.synthesized_samples_events); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( audio_playout.synthesized_samples_duration); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( audio_playout.total_samples_count); - verifier.TestMemberIsNonNegative( + verifier.TestAttributeIsNonNegative( audio_playout.total_samples_duration); - verifier.TestMemberIsNonNegative(audio_playout.total_playout_delay); - return verifier.ExpectAllMembersSuccessfullyTested(); + verifier.TestAttributeIsNonNegative( + audio_playout.total_playout_delay); + return verifier.ExpectAllAttributesSuccessfullyTested(); } private: - rtc::scoped_refptr report_; + scoped_refptr report_; }; #ifdef WEBRTC_HAVE_SCTP TEST_F(RTCStatsIntegrationTest, GetStatsFromCaller) { StartCall(); - rtc::scoped_refptr report = GetStatsFromCaller(); + scoped_refptr report = GetStatsFromCaller(); RTCStatsReportVerifier(report.get()).VerifyReport({}); - -#if RTC_TRACE_EVENTS_ENABLED - EXPECT_EQ(report->ToJson(), RTCStatsReportTraceListener::last_trace()); -#endif } TEST_F(RTCStatsIntegrationTest, GetStatsFromCallee) { StartCall(); - rtc::scoped_refptr report; + scoped_refptr report; // Wait for round trip time measurements to be defined. constexpr int kMaxWaitMs = 10000; auto GetStatsReportAndReturnTrueIfRttIsDefined = [&report, this] { @@ -1034,15 +1034,15 @@ TEST_F(RTCStatsIntegrationTest, GetStatsFromCallee) { auto inbound_stats = report->GetStatsOfType(); return !inbound_stats.empty() && - inbound_stats.front()->round_trip_time.is_defined() && - inbound_stats.front()->round_trip_time_measurements.is_defined(); + inbound_stats.front()->round_trip_time.has_value() && + inbound_stats.front()->round_trip_time_measurements.has_value(); }; - EXPECT_TRUE_WAIT(GetStatsReportAndReturnTrueIfRttIsDefined(), kMaxWaitMs); + EXPECT_THAT( + WaitUntil([&] { return GetStatsReportAndReturnTrueIfRttIsDefined(); }, + ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kMaxWaitMs)}), + IsRtcOk()); RTCStatsReportVerifier(report.get()).VerifyReport({}); - -#if RTC_TRACE_EVENTS_ENABLED - EXPECT_EQ(report->ToJson(), RTCStatsReportTraceListener::last_trace()); -#endif } // These tests exercise the integration of the stats selection algorithm inside @@ -1051,7 +1051,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsFromCallee) { TEST_F(RTCStatsIntegrationTest, GetStatsWithSenderSelector) { StartCall(); ASSERT_FALSE(caller_->pc()->GetSenders().empty()); - rtc::scoped_refptr report = + scoped_refptr report = GetStatsFromCaller(caller_->pc()->GetSenders()[0]); std::vector allowed_missing_stats = { // TODO(hbos): Include RTC[Audio/Video]ReceiverStats when implemented. @@ -1069,7 +1069,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsWithReceiverSelector) { StartCall(); ASSERT_FALSE(caller_->pc()->GetReceivers().empty()); - rtc::scoped_refptr report = + scoped_refptr report = GetStatsFromCaller(caller_->pc()->GetReceivers()[0]); std::vector allowed_missing_stats = { // TODO(hbos): Include RTC[Audio/Video]SenderStats when implemented. @@ -1089,7 +1089,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsWithInvalidSenderSelector) { ASSERT_FALSE(callee_->pc()->GetSenders().empty()); // The selector is invalid for the caller because it belongs to the callee. auto invalid_selector = callee_->pc()->GetSenders()[0]; - rtc::scoped_refptr report = + scoped_refptr report = GetStatsFromCaller(invalid_selector); EXPECT_FALSE(report->size()); } @@ -1100,7 +1100,7 @@ TEST_F(RTCStatsIntegrationTest, GetStatsWithInvalidReceiverSelector) { ASSERT_FALSE(callee_->pc()->GetReceivers().empty()); // The selector is invalid for the caller because it belongs to the callee. auto invalid_selector = callee_->pc()->GetReceivers()[0]; - rtc::scoped_refptr report = + scoped_refptr report = GetStatsFromCaller(invalid_selector); EXPECT_FALSE(report->size()); } @@ -1112,62 +1112,51 @@ TEST_F(RTCStatsIntegrationTest, DISABLED_GetStatsWhileDestroyingPeerConnection) { StartCall(); - rtc::scoped_refptr stats_obtainer = - RTCStatsObtainer::Create(); + scoped_refptr stats_obtainer = RTCStatsObtainer::Create(); caller_->pc()->GetStats(stats_obtainer.get()); // This will destroy the peer connection. caller_ = nullptr; // Any pending stats requests should have completed in the act of destroying // the peer connection. ASSERT_TRUE(stats_obtainer->report()); -#if RTC_TRACE_EVENTS_ENABLED - EXPECT_EQ(stats_obtainer->report()->ToJson(), - RTCStatsReportTraceListener::last_trace()); -#endif } TEST_F(RTCStatsIntegrationTest, GetsStatsWhileClosingPeerConnection) { StartCall(); - rtc::scoped_refptr stats_obtainer = - RTCStatsObtainer::Create(); + scoped_refptr stats_obtainer = RTCStatsObtainer::Create(); caller_->pc()->GetStats(stats_obtainer.get()); caller_->pc()->Close(); ASSERT_TRUE(stats_obtainer->report()); -#if RTC_TRACE_EVENTS_ENABLED - EXPECT_EQ(stats_obtainer->report()->ToJson(), - RTCStatsReportTraceListener::last_trace()); -#endif } // GetStatsReferencedIds() is optimized to recognize what is or isn't a -// referenced ID based on dictionary type information and knowing what members -// are used as references, as opposed to iterating all members to find the ones -// with the "Id" or "Ids" suffix. As such, GetStatsReferencedIds() is tested as -// an integration test instead of a unit test in order to guard against adding -// new references and forgetting to update GetStatsReferencedIds(). +// referenced ID based on dictionary type information and knowing what +// attributes are used as references, as opposed to iterating all attributes to +// find the ones with the "Id" or "Ids" suffix. As such, GetStatsReferencedIds() +// is tested as an integration test instead of a unit test in order to guard +// against adding new references and forgetting to update +// GetStatsReferencedIds(). TEST_F(RTCStatsIntegrationTest, GetStatsReferencedIds) { StartCall(); - rtc::scoped_refptr report = GetStatsFromCallee(); + scoped_refptr report = GetStatsFromCallee(); for (const RTCStats& stats : *report) { - // Find all references by looking at all string members with the "Id" or + // Find all references by looking at all string attributes with the "Id" or // "Ids" suffix. std::set expected_ids; - for (const auto* member : stats.Members()) { - if (!member->is_defined()) + for (const auto& attribute : stats.Attributes()) { + if (!attribute.has_value()) continue; - if (member->type() == RTCStatsMemberInterface::kString) { - if (absl::EndsWith(member->name(), "Id")) { - const auto& id = member->cast_to>(); - expected_ids.insert(&(*id)); + if (attribute.holds_alternative()) { + if (absl::EndsWith(attribute.name(), "Id")) { + expected_ids.insert(&attribute.get()); } - } else if (member->type() == RTCStatsMemberInterface::kSequenceString) { - if (absl::EndsWith(member->name(), "Ids")) { - const auto& ids = - member->cast_to>>(); - for (const std::string& id : *ids) + } else if (attribute.holds_alternative>()) { + if (absl::EndsWith(attribute.name(), "Ids")) { + for (const std::string& id : + attribute.get>()) expected_ids.insert(&id); } } @@ -1184,16 +1173,17 @@ TEST_F(RTCStatsIntegrationTest, GetStatsReferencedIds) { } } -TEST_F(RTCStatsIntegrationTest, GetStatsContainsNoDuplicateMembers) { +TEST_F(RTCStatsIntegrationTest, GetStatsContainsNoDuplicateAttributes) { StartCall(); - rtc::scoped_refptr report = GetStatsFromCallee(); + scoped_refptr report = GetStatsFromCallee(); for (const RTCStats& stats : *report) { - std::set member_names; - for (const auto* member : stats.Members()) { - EXPECT_TRUE(member_names.find(member->name()) == member_names.end()) - << member->name() << " is a duplicate!"; - member_names.insert(member->name()); + std::set attribute_names; + for (const auto& attribute : stats.Attributes()) { + EXPECT_TRUE(attribute_names.find(attribute.name()) == + attribute_names.end()) + << attribute.name() << " is a duplicate!"; + attribute_names.insert(attribute.name()); } } } diff --git a/pc/rtc_stats_traversal.cc b/pc/rtc_stats_traversal.cc index 04de55028c..a28d606a28 100644 --- a/pc/rtc_stats_traversal.cc +++ b/pc/rtc_stats_traversal.cc @@ -11,10 +11,14 @@ #include "pc/rtc_stats_traversal.h" #include +#include #include #include #include +#include "api/scoped_refptr.h" +#include "api/stats/rtc_stats.h" +#include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" #include "rtc_base/checks.h" @@ -42,18 +46,18 @@ void TraverseAndTakeVisitedStats(RTCStatsReport* report, } } -void AddIdIfDefined(const RTCStatsMember& id, +void AddIdIfDefined(const std::optional& id, std::vector* neighbor_ids) { - if (id.is_defined()) + if (id.has_value()) neighbor_ids->push_back(&(*id)); } } // namespace -rtc::scoped_refptr TakeReferencedStats( - rtc::scoped_refptr report, +scoped_refptr TakeReferencedStats( + scoped_refptr report, const std::vector& ids) { - rtc::scoped_refptr result = + scoped_refptr result = RTCStatsReport::Create(report->timestamp()); for (const auto& id : ids) { TraverseAndTakeVisitedStats(report.get(), result.get(), id); diff --git a/pc/rtc_stats_traversal.h b/pc/rtc_stats_traversal.h index ec4d51cc52..931614013d 100644 --- a/pc/rtc_stats_traversal.h +++ b/pc/rtc_stats_traversal.h @@ -25,8 +25,8 @@ namespace webrtc { // `ids`, returning them as a new stats report. // This is meant to be used to implement the stats selection algorithm. // https://w3c.github.io/webrtc-pc/#dfn-stats-selection-algorithm -rtc::scoped_refptr TakeReferencedStats( - rtc::scoped_refptr report, +scoped_refptr TakeReferencedStats( + scoped_refptr report, const std::vector& ids); // Gets pointers to the string values of any members in `stats` that are used as diff --git a/pc/rtc_stats_traversal_unittest.cc b/pc/rtc_stats_traversal_unittest.cc index 72ad255564..2964ead154 100644 --- a/pc/rtc_stats_traversal_unittest.cc +++ b/pc/rtc_stats_traversal_unittest.cc @@ -11,9 +11,14 @@ #include "pc/rtc_stats_traversal.h" #include +#include #include +#include "api/scoped_refptr.h" +#include "api/stats/rtc_stats.h" +#include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" +#include "api/units/timestamp.h" #include "test/gtest.h" // This file contains tests for TakeReferencedStats(). @@ -47,7 +52,7 @@ class RTCStatsTraversalTest : public ::testing::Test { for (const RTCStats* start_node : start_nodes) { start_ids.push_back(start_node->id()); } - result_ = webrtc::TakeReferencedStats(initial_report_, start_ids); + result_ = ::webrtc::TakeReferencedStats(initial_report_, start_ids); } void EXPECT_VISITED(const RTCStats* stats) { @@ -69,8 +74,8 @@ class RTCStatsTraversalTest : public ::testing::Test { } protected: - rtc::scoped_refptr initial_report_; - rtc::scoped_refptr result_; + scoped_refptr initial_report_; + scoped_refptr result_; // Raw pointers to stats owned by the reports. RTCTransportStats* transport_; RTCIceCandidatePairStats* candidate_pair_; diff --git a/pc/rtcp_mux_filter.cc b/pc/rtcp_mux_filter.cc index a8cf717b28..7680c8d193 100644 --- a/pc/rtcp_mux_filter.cc +++ b/pc/rtcp_mux_filter.cc @@ -10,9 +10,10 @@ #include "pc/rtcp_mux_filter.h" +#include "pc/session_description.h" #include "rtc_base/logging.h" -namespace cricket { +namespace webrtc { RtcpMuxFilter::RtcpMuxFilter() : state_(ST_INIT), offer_enable_(false) {} @@ -44,7 +45,7 @@ bool RtcpMuxFilter::SetOffer(bool offer_enable, ContentSource src) { } offer_enable_ = offer_enable; - state_ = (src == CS_LOCAL) ? ST_SENTOFFER : ST_RECEIVEDOFFER; + state_ = (src == webrtc::CS_LOCAL) ? ST_SENTOFFER : ST_RECEIVEDOFFER; return true; } @@ -62,7 +63,7 @@ bool RtcpMuxFilter::SetProvisionalAnswer(bool answer_enable, if (offer_enable_) { if (answer_enable) { - if (src == CS_REMOTE) + if (src == webrtc::CS_REMOTE) state_ = ST_RECEIVEDPRANSWER; else // CS_LOCAL state_ = ST_SENTPRANSWER; @@ -70,7 +71,7 @@ bool RtcpMuxFilter::SetProvisionalAnswer(bool answer_enable, // The provisional answer doesn't want to use RTCP mux. // Go back to the original state after the offer was set and wait for next // provisional or final answer. - if (src == CS_REMOTE) + if (src == webrtc::CS_REMOTE) state_ = ST_SENTOFFER; else // CS_LOCAL state_ = ST_RECEIVEDOFFER; @@ -110,15 +111,15 @@ bool RtcpMuxFilter::SetAnswer(bool answer_enable, ContentSource src) { bool RtcpMuxFilter::ExpectOffer(bool offer_enable, ContentSource source) { return ((state_ == ST_INIT) || (state_ == ST_ACTIVE && offer_enable == offer_enable_) || - (state_ == ST_SENTOFFER && source == CS_LOCAL) || - (state_ == ST_RECEIVEDOFFER && source == CS_REMOTE)); + (state_ == ST_SENTOFFER && source == webrtc::CS_LOCAL) || + (state_ == ST_RECEIVEDOFFER && source == webrtc::CS_REMOTE)); } bool RtcpMuxFilter::ExpectAnswer(ContentSource source) { - return ((state_ == ST_SENTOFFER && source == CS_REMOTE) || - (state_ == ST_RECEIVEDOFFER && source == CS_LOCAL) || - (state_ == ST_SENTPRANSWER && source == CS_LOCAL) || - (state_ == ST_RECEIVEDPRANSWER && source == CS_REMOTE)); + return ((state_ == ST_SENTOFFER && source == webrtc::CS_REMOTE) || + (state_ == ST_RECEIVEDOFFER && source == webrtc::CS_LOCAL) || + (state_ == ST_SENTPRANSWER && source == webrtc::CS_LOCAL) || + (state_ == ST_RECEIVEDPRANSWER && source == webrtc::CS_REMOTE)); } -} // namespace cricket +} // namespace webrtc diff --git a/pc/rtcp_mux_filter.h b/pc/rtcp_mux_filter.h index 48050de3d8..29838fc0f5 100644 --- a/pc/rtcp_mux_filter.h +++ b/pc/rtcp_mux_filter.h @@ -13,7 +13,7 @@ #include "pc/session_description.h" -namespace cricket { +namespace webrtc { // RTCP Muxer, as defined in RFC 5761 (http://tools.ietf.org/html/rfc5761) class RtcpMuxFilter { @@ -73,6 +73,14 @@ class RtcpMuxFilter { bool offer_enable_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::RtcpMuxFilter; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // PC_RTCP_MUX_FILTER_H_ diff --git a/pc/rtcp_mux_filter_unittest.cc b/pc/rtcp_mux_filter_unittest.cc index 586da23df8..d314b16192 100644 --- a/pc/rtcp_mux_filter_unittest.cc +++ b/pc/rtcp_mux_filter_unittest.cc @@ -10,21 +10,22 @@ #include "pc/rtcp_mux_filter.h" +#include "pc/session_description.h" #include "test/gtest.h" TEST(RtcpMuxFilterTest, IsActiveSender) { - cricket::RtcpMuxFilter filter; + webrtc::RtcpMuxFilter filter; // Init state - not active EXPECT_FALSE(filter.IsActive()); EXPECT_FALSE(filter.IsProvisionallyActive()); EXPECT_FALSE(filter.IsFullyActive()); // After sent offer, demux should not be active. - filter.SetOffer(true, cricket::CS_LOCAL); + filter.SetOffer(true, webrtc::CS_LOCAL); EXPECT_FALSE(filter.IsActive()); EXPECT_FALSE(filter.IsProvisionallyActive()); EXPECT_FALSE(filter.IsFullyActive()); // Remote accepted, filter is now active. - filter.SetAnswer(true, cricket::CS_REMOTE); + filter.SetAnswer(true, webrtc::CS_REMOTE); EXPECT_TRUE(filter.IsActive()); EXPECT_FALSE(filter.IsProvisionallyActive()); EXPECT_TRUE(filter.IsFullyActive()); @@ -32,40 +33,40 @@ TEST(RtcpMuxFilterTest, IsActiveSender) { // Test that we can receive provisional answer and final answer. TEST(RtcpMuxFilterTest, ReceivePrAnswer) { - cricket::RtcpMuxFilter filter; - filter.SetOffer(true, cricket::CS_LOCAL); + webrtc::RtcpMuxFilter filter; + filter.SetOffer(true, webrtc::CS_LOCAL); // Received provisional answer with mux enabled. - EXPECT_TRUE(filter.SetProvisionalAnswer(true, cricket::CS_REMOTE)); + EXPECT_TRUE(filter.SetProvisionalAnswer(true, webrtc::CS_REMOTE)); // We are now provisionally active since both sender and receiver support mux. EXPECT_TRUE(filter.IsActive()); EXPECT_TRUE(filter.IsProvisionallyActive()); EXPECT_FALSE(filter.IsFullyActive()); // Received provisional answer with mux disabled. - EXPECT_TRUE(filter.SetProvisionalAnswer(false, cricket::CS_REMOTE)); + EXPECT_TRUE(filter.SetProvisionalAnswer(false, webrtc::CS_REMOTE)); // We are now inactive since the receiver doesn't support mux. EXPECT_FALSE(filter.IsActive()); EXPECT_FALSE(filter.IsProvisionallyActive()); EXPECT_FALSE(filter.IsFullyActive()); // Received final answer with mux enabled. - EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_REMOTE)); + EXPECT_TRUE(filter.SetAnswer(true, webrtc::CS_REMOTE)); EXPECT_TRUE(filter.IsActive()); EXPECT_FALSE(filter.IsProvisionallyActive()); EXPECT_TRUE(filter.IsFullyActive()); } TEST(RtcpMuxFilterTest, IsActiveReceiver) { - cricket::RtcpMuxFilter filter; + webrtc::RtcpMuxFilter filter; // Init state - not active. EXPECT_FALSE(filter.IsActive()); EXPECT_FALSE(filter.IsProvisionallyActive()); EXPECT_FALSE(filter.IsFullyActive()); // After received offer, demux should not be active - filter.SetOffer(true, cricket::CS_REMOTE); + filter.SetOffer(true, webrtc::CS_REMOTE); EXPECT_FALSE(filter.IsActive()); EXPECT_FALSE(filter.IsProvisionallyActive()); EXPECT_FALSE(filter.IsFullyActive()); // We accept, filter is now active - filter.SetAnswer(true, cricket::CS_LOCAL); + filter.SetAnswer(true, webrtc::CS_LOCAL); EXPECT_TRUE(filter.IsActive()); EXPECT_FALSE(filter.IsProvisionallyActive()); EXPECT_TRUE(filter.IsFullyActive()); @@ -73,20 +74,20 @@ TEST(RtcpMuxFilterTest, IsActiveReceiver) { // Test that we can send provisional answer and final answer. TEST(RtcpMuxFilterTest, SendPrAnswer) { - cricket::RtcpMuxFilter filter; - filter.SetOffer(true, cricket::CS_REMOTE); + webrtc::RtcpMuxFilter filter; + filter.SetOffer(true, webrtc::CS_REMOTE); // Send provisional answer with mux enabled. - EXPECT_TRUE(filter.SetProvisionalAnswer(true, cricket::CS_LOCAL)); + EXPECT_TRUE(filter.SetProvisionalAnswer(true, webrtc::CS_LOCAL)); EXPECT_TRUE(filter.IsActive()); EXPECT_TRUE(filter.IsProvisionallyActive()); EXPECT_FALSE(filter.IsFullyActive()); // Received provisional answer with mux disabled. - EXPECT_TRUE(filter.SetProvisionalAnswer(false, cricket::CS_LOCAL)); + EXPECT_TRUE(filter.SetProvisionalAnswer(false, webrtc::CS_LOCAL)); EXPECT_FALSE(filter.IsActive()); EXPECT_FALSE(filter.IsProvisionallyActive()); EXPECT_FALSE(filter.IsFullyActive()); // Send final answer with mux enabled. - EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_LOCAL)); + EXPECT_TRUE(filter.SetAnswer(true, webrtc::CS_LOCAL)); EXPECT_TRUE(filter.IsActive()); EXPECT_FALSE(filter.IsProvisionallyActive()); EXPECT_TRUE(filter.IsFullyActive()); @@ -96,97 +97,97 @@ TEST(RtcpMuxFilterTest, SendPrAnswer) { // We can not disable the filter later since that would mean we need to // recreate a rtcp transport channel. TEST(RtcpMuxFilterTest, EnableFilterDuringUpdate) { - cricket::RtcpMuxFilter filter; + webrtc::RtcpMuxFilter filter; EXPECT_FALSE(filter.IsActive()); - EXPECT_TRUE(filter.SetOffer(false, cricket::CS_REMOTE)); - EXPECT_TRUE(filter.SetAnswer(false, cricket::CS_LOCAL)); + EXPECT_TRUE(filter.SetOffer(false, webrtc::CS_REMOTE)); + EXPECT_TRUE(filter.SetAnswer(false, webrtc::CS_LOCAL)); EXPECT_FALSE(filter.IsActive()); - EXPECT_TRUE(filter.SetOffer(true, cricket::CS_REMOTE)); - EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_LOCAL)); + EXPECT_TRUE(filter.SetOffer(true, webrtc::CS_REMOTE)); + EXPECT_TRUE(filter.SetAnswer(true, webrtc::CS_LOCAL)); EXPECT_TRUE(filter.IsActive()); - EXPECT_FALSE(filter.SetOffer(false, cricket::CS_REMOTE)); - EXPECT_FALSE(filter.SetAnswer(false, cricket::CS_LOCAL)); + EXPECT_FALSE(filter.SetOffer(false, webrtc::CS_REMOTE)); + EXPECT_FALSE(filter.SetAnswer(false, webrtc::CS_LOCAL)); EXPECT_TRUE(filter.IsActive()); } // Test that SetOffer can be called twice. TEST(RtcpMuxFilterTest, SetOfferTwice) { - cricket::RtcpMuxFilter filter; + webrtc::RtcpMuxFilter filter; - EXPECT_TRUE(filter.SetOffer(true, cricket::CS_REMOTE)); - EXPECT_TRUE(filter.SetOffer(true, cricket::CS_REMOTE)); - EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_LOCAL)); + EXPECT_TRUE(filter.SetOffer(true, webrtc::CS_REMOTE)); + EXPECT_TRUE(filter.SetOffer(true, webrtc::CS_REMOTE)); + EXPECT_TRUE(filter.SetAnswer(true, webrtc::CS_LOCAL)); EXPECT_TRUE(filter.IsActive()); - cricket::RtcpMuxFilter filter2; - EXPECT_TRUE(filter2.SetOffer(false, cricket::CS_LOCAL)); - EXPECT_TRUE(filter2.SetOffer(false, cricket::CS_LOCAL)); - EXPECT_TRUE(filter2.SetAnswer(false, cricket::CS_REMOTE)); + webrtc::RtcpMuxFilter filter2; + EXPECT_TRUE(filter2.SetOffer(false, webrtc::CS_LOCAL)); + EXPECT_TRUE(filter2.SetOffer(false, webrtc::CS_LOCAL)); + EXPECT_TRUE(filter2.SetAnswer(false, webrtc::CS_REMOTE)); EXPECT_FALSE(filter2.IsActive()); } // Test that the filter can be enabled twice. TEST(RtcpMuxFilterTest, EnableFilterTwiceDuringUpdate) { - cricket::RtcpMuxFilter filter; + webrtc::RtcpMuxFilter filter; - EXPECT_TRUE(filter.SetOffer(true, cricket::CS_REMOTE)); - EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_LOCAL)); + EXPECT_TRUE(filter.SetOffer(true, webrtc::CS_REMOTE)); + EXPECT_TRUE(filter.SetAnswer(true, webrtc::CS_LOCAL)); EXPECT_TRUE(filter.IsActive()); - EXPECT_TRUE(filter.SetOffer(true, cricket::CS_REMOTE)); - EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_LOCAL)); + EXPECT_TRUE(filter.SetOffer(true, webrtc::CS_REMOTE)); + EXPECT_TRUE(filter.SetAnswer(true, webrtc::CS_LOCAL)); EXPECT_TRUE(filter.IsActive()); } // Test that the filter can be kept disabled during updates. TEST(RtcpMuxFilterTest, KeepFilterDisabledDuringUpdate) { - cricket::RtcpMuxFilter filter; + webrtc::RtcpMuxFilter filter; - EXPECT_TRUE(filter.SetOffer(false, cricket::CS_REMOTE)); - EXPECT_TRUE(filter.SetAnswer(false, cricket::CS_LOCAL)); + EXPECT_TRUE(filter.SetOffer(false, webrtc::CS_REMOTE)); + EXPECT_TRUE(filter.SetAnswer(false, webrtc::CS_LOCAL)); EXPECT_FALSE(filter.IsActive()); - EXPECT_TRUE(filter.SetOffer(false, cricket::CS_REMOTE)); - EXPECT_TRUE(filter.SetAnswer(false, cricket::CS_LOCAL)); + EXPECT_TRUE(filter.SetOffer(false, webrtc::CS_REMOTE)); + EXPECT_TRUE(filter.SetAnswer(false, webrtc::CS_LOCAL)); EXPECT_FALSE(filter.IsActive()); } // Test that we can SetActive and then can't deactivate. TEST(RtcpMuxFilterTest, SetActiveCantDeactivate) { - cricket::RtcpMuxFilter filter; + webrtc::RtcpMuxFilter filter; filter.SetActive(); EXPECT_TRUE(filter.IsActive()); - EXPECT_FALSE(filter.SetOffer(false, cricket::CS_LOCAL)); + EXPECT_FALSE(filter.SetOffer(false, webrtc::CS_LOCAL)); EXPECT_TRUE(filter.IsActive()); - EXPECT_TRUE(filter.SetOffer(true, cricket::CS_LOCAL)); + EXPECT_TRUE(filter.SetOffer(true, webrtc::CS_LOCAL)); EXPECT_TRUE(filter.IsActive()); - EXPECT_FALSE(filter.SetProvisionalAnswer(false, cricket::CS_REMOTE)); + EXPECT_FALSE(filter.SetProvisionalAnswer(false, webrtc::CS_REMOTE)); EXPECT_TRUE(filter.IsActive()); - EXPECT_TRUE(filter.SetProvisionalAnswer(true, cricket::CS_REMOTE)); + EXPECT_TRUE(filter.SetProvisionalAnswer(true, webrtc::CS_REMOTE)); EXPECT_TRUE(filter.IsActive()); - EXPECT_FALSE(filter.SetAnswer(false, cricket::CS_REMOTE)); + EXPECT_FALSE(filter.SetAnswer(false, webrtc::CS_REMOTE)); EXPECT_TRUE(filter.IsActive()); - EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_REMOTE)); + EXPECT_TRUE(filter.SetAnswer(true, webrtc::CS_REMOTE)); EXPECT_TRUE(filter.IsActive()); - EXPECT_FALSE(filter.SetOffer(false, cricket::CS_REMOTE)); + EXPECT_FALSE(filter.SetOffer(false, webrtc::CS_REMOTE)); EXPECT_TRUE(filter.IsActive()); - EXPECT_TRUE(filter.SetOffer(true, cricket::CS_REMOTE)); + EXPECT_TRUE(filter.SetOffer(true, webrtc::CS_REMOTE)); EXPECT_TRUE(filter.IsActive()); - EXPECT_FALSE(filter.SetProvisionalAnswer(false, cricket::CS_LOCAL)); + EXPECT_FALSE(filter.SetProvisionalAnswer(false, webrtc::CS_LOCAL)); EXPECT_TRUE(filter.IsActive()); - EXPECT_TRUE(filter.SetProvisionalAnswer(true, cricket::CS_LOCAL)); + EXPECT_TRUE(filter.SetProvisionalAnswer(true, webrtc::CS_LOCAL)); EXPECT_TRUE(filter.IsActive()); - EXPECT_FALSE(filter.SetAnswer(false, cricket::CS_LOCAL)); + EXPECT_FALSE(filter.SetAnswer(false, webrtc::CS_LOCAL)); EXPECT_TRUE(filter.IsActive()); - EXPECT_TRUE(filter.SetAnswer(true, cricket::CS_LOCAL)); + EXPECT_TRUE(filter.SetAnswer(true, webrtc::CS_LOCAL)); EXPECT_TRUE(filter.IsActive()); } diff --git a/pc/rtp_media_utils.cc b/pc/rtp_media_utils.cc index 52c5bb0eac..4246f56e1c 100644 --- a/pc/rtp_media_utils.cc +++ b/pc/rtp_media_utils.cc @@ -10,6 +10,7 @@ #include "pc/rtp_media_utils.h" +#include "api/rtp_transceiver_direction.h" #include "rtc_base/checks.h" namespace webrtc { diff --git a/pc/rtp_media_utils.h b/pc/rtp_media_utils.h index 240274fe05..02a491aa86 100644 --- a/pc/rtp_media_utils.h +++ b/pc/rtp_media_utils.h @@ -11,8 +11,6 @@ #ifndef PC_RTP_MEDIA_UTILS_H_ #define PC_RTP_MEDIA_UTILS_H_ -#include // no-presubmit-check TODO(webrtc:8982) - #include "api/rtp_transceiver_direction.h" namespace webrtc { @@ -51,14 +49,6 @@ RtpTransceiverDirection RtpTransceiverDirectionIntersection( RtpTransceiverDirection lhs, RtpTransceiverDirection rhs); -#ifdef WEBRTC_UNIT_TEST -inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) - std::ostream& os, // no-presubmit-check TODO(webrtc:8982) - RtpTransceiverDirection direction) { - return os << RtpTransceiverDirectionToString(direction); -} -#endif // WEBRTC_UNIT_TEST - } // namespace webrtc #endif // PC_RTP_MEDIA_UTILS_H_ diff --git a/pc/rtp_media_utils_unittest.cc b/pc/rtp_media_utils_unittest.cc index 5ee49e356d..3ffebc352c 100644 --- a/pc/rtp_media_utils_unittest.cc +++ b/pc/rtp_media_utils_unittest.cc @@ -12,6 +12,7 @@ #include +#include "api/rtp_transceiver_direction.h" #include "test/gtest.h" namespace webrtc { diff --git a/pc/rtp_parameters_conversion.cc b/pc/rtp_parameters_conversion.cc index 2463cefe58..75a69bcaa9 100644 --- a/pc/rtp_parameters_conversion.cc +++ b/pc/rtp_parameters_conversion.cc @@ -10,275 +10,91 @@ #include "pc/rtp_parameters_conversion.h" -#include -#include +#include #include -#include -#include +#include -#include "api/array_view.h" #include "api/media_types.h" -#include "api/rtc_error.h" +#include "api/rtp_parameters.h" #include "media/base/codec.h" #include "media/base/media_constants.h" -#include "media/base/rtp_utils.h" -#include "rtc_base/checks.h" +#include "pc/session_description.h" #include "rtc_base/logging.h" -#include "rtc_base/strings/string_builder.h" namespace webrtc { -RTCErrorOr ToCricketFeedbackParam( - const RtcpFeedback& feedback) { - switch (feedback.type) { - case RtcpFeedbackType::CCM: - if (!feedback.message_type) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Missing message type in CCM RtcpFeedback."); - } else if (*feedback.message_type != RtcpFeedbackMessageType::FIR) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Invalid message type in CCM RtcpFeedback."); - } - return cricket::FeedbackParam(cricket::kRtcpFbParamCcm, - cricket::kRtcpFbCcmParamFir); - case RtcpFeedbackType::LNTF: - if (feedback.message_type) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_PARAMETER, - "Didn't expect message type in LNTF RtcpFeedback."); - } - return cricket::FeedbackParam(cricket::kRtcpFbParamLntf); - case RtcpFeedbackType::NACK: - if (!feedback.message_type) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Missing message type in NACK RtcpFeedback."); - } - switch (*feedback.message_type) { - case RtcpFeedbackMessageType::GENERIC_NACK: - return cricket::FeedbackParam(cricket::kRtcpFbParamNack); - case RtcpFeedbackMessageType::PLI: - return cricket::FeedbackParam(cricket::kRtcpFbParamNack, - cricket::kRtcpFbNackParamPli); - default: - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Invalid message type in NACK RtcpFeedback."); - } - case RtcpFeedbackType::REMB: - if (feedback.message_type) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_PARAMETER, - "Didn't expect message type in REMB RtcpFeedback."); - } - return cricket::FeedbackParam(cricket::kRtcpFbParamRemb); - case RtcpFeedbackType::TRANSPORT_CC: - if (feedback.message_type) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_PARAMETER, - "Didn't expect message type in transport-cc RtcpFeedback."); - } - return cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc); - } - RTC_CHECK_NOTREACHED(); -} - -RTCErrorOr ToCricketCodec(const RtpCodecParameters& codec) { - switch (codec.kind) { - case cricket::MEDIA_TYPE_AUDIO: - if (codec.kind != cricket::MEDIA_TYPE_AUDIO) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_PARAMETER, - "Can't use video codec with audio sender or receiver."); - } - if (!codec.num_channels) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Missing number of channels for audio codec."); - } - if (*codec.num_channels <= 0) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_RANGE, - "Number of channels must be positive."); - } - if (!codec.clock_rate) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Missing codec clock rate."); - } - if (*codec.clock_rate <= 0) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_RANGE, - "Clock rate must be positive."); - } - break; - case cricket::MEDIA_TYPE_VIDEO: - if (codec.kind != cricket::MEDIA_TYPE_VIDEO) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_PARAMETER, - "Can't use audio codec with video sender or receiver."); - } - if (codec.num_channels) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Video codec shouldn't have num_channels."); - } - if (!codec.clock_rate) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Missing codec clock rate."); - } - if (*codec.clock_rate != cricket::kVideoCodecClockrate) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Video clock rate must be 90000."); - } - break; - default: - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Unknown codec type"); - } - - if (!cricket::IsValidRtpPayloadType(codec.payload_type)) { - char buf[40]; - rtc::SimpleStringBuilder sb(buf); - sb << "Invalid payload type: " << codec.payload_type; - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_RANGE, sb.str()); - } - - cricket::Codec cricket_codec = [&]() { - if (codec.kind == cricket::MEDIA_TYPE_AUDIO) { - return cricket::CreateAudioCodec(codec.payload_type, codec.name, - *codec.clock_rate, *codec.num_channels); - } - RTC_DCHECK(codec.kind == cricket::MEDIA_TYPE_VIDEO); - return cricket::CreateVideoCodec(codec.payload_type, codec.name); - }(); - - for (const RtcpFeedback& feedback : codec.rtcp_feedback) { - auto result = ToCricketFeedbackParam(feedback); - if (!result.ok()) { - return result.MoveError(); - } - cricket_codec.AddFeedbackParam(result.MoveValue()); - } - cricket_codec.params = codec.parameters; - return std::move(cricket_codec); -} - -RTCErrorOr> ToCricketCodecs( - const std::vector& codecs) { - std::vector cricket_codecs; - std::set seen_payload_types; - for (const RtpCodecParameters& codec : codecs) { - auto result = ToCricketCodec(codec); - if (!result.ok()) { - return result.MoveError(); - } - if (!seen_payload_types.insert(codec.payload_type).second) { - char buf[40]; - rtc::SimpleStringBuilder sb(buf); - sb << "Duplicate payload type: " << codec.payload_type; - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, sb.str()); - } - cricket_codecs.push_back(result.MoveValue()); - } - return std::move(cricket_codecs); -} - -RTCErrorOr ToCricketStreamParamsVec( - const std::vector& encodings) { - if (encodings.size() > 1u) { - LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_PARAMETER, - "ORTC API implementation doesn't currently " - "support simulcast or layered encodings."); - } else if (encodings.empty()) { - return cricket::StreamParamsVec(); - } - cricket::StreamParamsVec cricket_streams; - const RtpEncodingParameters& encoding = encodings[0]; - if (encoding.ssrc) { - cricket::StreamParams stream_params; - stream_params.add_ssrc(*encoding.ssrc); - cricket_streams.push_back(std::move(stream_params)); - } - return std::move(cricket_streams); -} - -absl::optional ToRtcpFeedback( - const cricket::FeedbackParam& cricket_feedback) { - if (cricket_feedback.id() == cricket::kRtcpFbParamCcm) { - if (cricket_feedback.param() == cricket::kRtcpFbCcmParamFir) { +std::optional ToRtcpFeedback( + const FeedbackParam& cricket_feedback) { + if (cricket_feedback.id() == kRtcpFbParamCcm) { + if (cricket_feedback.param() == kRtcpFbCcmParamFir) { return RtcpFeedback(RtcpFeedbackType::CCM, RtcpFeedbackMessageType::FIR); } else { RTC_LOG(LS_WARNING) << "Unsupported parameter for CCM RTCP feedback: " << cricket_feedback.param(); - return absl::nullopt; + return std::nullopt; } - } else if (cricket_feedback.id() == cricket::kRtcpFbParamLntf) { + } else if (cricket_feedback.id() == kRtcpFbParamLntf) { if (cricket_feedback.param().empty()) { return RtcpFeedback(RtcpFeedbackType::LNTF); } else { RTC_LOG(LS_WARNING) << "Unsupported parameter for LNTF RTCP feedback: " << cricket_feedback.param(); - return absl::nullopt; + return std::nullopt; } - } else if (cricket_feedback.id() == cricket::kRtcpFbParamNack) { + } else if (cricket_feedback.id() == kRtcpFbParamNack) { if (cricket_feedback.param().empty()) { return RtcpFeedback(RtcpFeedbackType::NACK, RtcpFeedbackMessageType::GENERIC_NACK); - } else if (cricket_feedback.param() == cricket::kRtcpFbNackParamPli) { + } else if (cricket_feedback.param() == kRtcpFbNackParamPli) { return RtcpFeedback(RtcpFeedbackType::NACK, RtcpFeedbackMessageType::PLI); } else { RTC_LOG(LS_WARNING) << "Unsupported parameter for NACK RTCP feedback: " << cricket_feedback.param(); - return absl::nullopt; + return std::nullopt; } - } else if (cricket_feedback.id() == cricket::kRtcpFbParamRemb) { + } else if (cricket_feedback.id() == kRtcpFbParamRemb) { if (!cricket_feedback.param().empty()) { RTC_LOG(LS_WARNING) << "Unsupported parameter for REMB RTCP feedback: " << cricket_feedback.param(); - return absl::nullopt; + return std::nullopt; } else { return RtcpFeedback(RtcpFeedbackType::REMB); } - } else if (cricket_feedback.id() == cricket::kRtcpFbParamTransportCc) { + } else if (cricket_feedback.id() == kRtcpFbParamTransportCc) { if (!cricket_feedback.param().empty()) { RTC_LOG(LS_WARNING) << "Unsupported parameter for transport-cc RTCP feedback: " << cricket_feedback.param(); - return absl::nullopt; + return std::nullopt; } else { return RtcpFeedback(RtcpFeedbackType::TRANSPORT_CC); } } RTC_LOG(LS_WARNING) << "Unsupported RTCP feedback type: " << cricket_feedback.id(); - return absl::nullopt; -} - -std::vector ToRtpEncodings( - const cricket::StreamParamsVec& stream_params) { - std::vector rtp_encodings; - for (const cricket::StreamParams& stream_param : stream_params) { - RtpEncodingParameters rtp_encoding; - rtp_encoding.ssrc.emplace(stream_param.first_ssrc()); - rtp_encodings.push_back(std::move(rtp_encoding)); - } - return rtp_encodings; + return std::nullopt; } -RtpCodecCapability ToRtpCodecCapability(const cricket::Codec& cricket_codec) { +RtpCodecCapability ToRtpCodecCapability(const Codec& cricket_codec) { RtpCodecCapability codec; codec.name = cricket_codec.name; - codec.kind = cricket_codec.type == cricket::Codec::Type::kAudio - ? cricket::MEDIA_TYPE_AUDIO - : cricket::MEDIA_TYPE_VIDEO; + codec.kind = cricket_codec.type == Codec::Type::kAudio + ? webrtc::MediaType::AUDIO + : webrtc::MediaType::VIDEO; codec.clock_rate.emplace(cricket_codec.clockrate); codec.preferred_payload_type.emplace(cricket_codec.id); - for (const cricket::FeedbackParam& cricket_feedback : + for (const FeedbackParam& cricket_feedback : cricket_codec.feedback_params.params()) { - absl::optional feedback = ToRtcpFeedback(cricket_feedback); + std::optional feedback = ToRtcpFeedback(cricket_feedback); if (feedback) { codec.rtcp_feedback.push_back(feedback.value()); } } switch (cricket_codec.type) { - case cricket::Codec::Type::kAudio: + case Codec::Type::kAudio: codec.num_channels = static_cast(cricket_codec.channels); break; - case cricket::Codec::Type::kVideo: + case Codec::Type::kVideo: codec.scalability_modes = cricket_codec.scalability_modes; break; } @@ -287,58 +103,38 @@ RtpCodecCapability ToRtpCodecCapability(const cricket::Codec& cricket_codec) { return codec; } -RtpCodecParameters ToRtpCodecParameters(const cricket::Codec& cricket_codec) { - RtpCodecParameters codec_param; - codec_param.name = cricket_codec.name; - codec_param.kind = cricket_codec.type == cricket::Codec::Type::kAudio - ? cricket::MEDIA_TYPE_AUDIO - : cricket::MEDIA_TYPE_VIDEO; - codec_param.clock_rate.emplace(cricket_codec.clockrate); - codec_param.payload_type = cricket_codec.id; - for (const cricket::FeedbackParam& cricket_feedback : - cricket_codec.feedback_params.params()) { - absl::optional feedback = ToRtcpFeedback(cricket_feedback); - if (feedback) { - codec_param.rtcp_feedback.push_back(feedback.value()); - } - } - switch (cricket_codec.type) { - case cricket::Codec::Type::kAudio: - codec_param.num_channels = static_cast(cricket_codec.channels); - break; - case cricket::Codec::Type::kVideo: - // Nothing to do. - break; - } - codec_param.parameters = cricket_codec.params; - return codec_param; -} - RtpCapabilities ToRtpCapabilities( - const std::vector& cricket_codecs, - const cricket::RtpHeaderExtensions& cricket_extensions) { + const std::vector& cricket_codecs, + const RtpHeaderExtensions& cricket_extensions) { RtpCapabilities capabilities; bool have_red = false; bool have_ulpfec = false; bool have_flexfec = false; bool have_rtx = false; - for (const cricket::Codec& cricket_codec : cricket_codecs) { - if (cricket_codec.name == cricket::kRedCodecName) { + for (const Codec& cricket_codec : cricket_codecs) { + if (cricket_codec.name == kRedCodecName) { + if (have_red) { + // There should only be one RED codec entry in caps. + continue; + } have_red = true; - } else if (cricket_codec.name == cricket::kUlpfecCodecName) { + } else if (cricket_codec.name == kUlpfecCodecName) { have_ulpfec = true; - } else if (cricket_codec.name == cricket::kFlexfecCodecName) { + } else if (cricket_codec.name == kFlexfecCodecName) { have_flexfec = true; - } else if (cricket_codec.name == cricket::kRtxCodecName) { + } else if (cricket_codec.name == kRtxCodecName) { if (have_rtx) { - // There should only be one RTX codec entry + // There should only be one RTX codec entry in caps. continue; } have_rtx = true; } auto codec_capability = ToRtpCodecCapability(cricket_codec); - if (cricket_codec.name == cricket::kRtxCodecName) { - // RTX codec should not have any parameter + if (cricket_codec.name == kRtxCodecName || + cricket_codec.name == kRedCodecName) { + // For RTX this removes the APT which points to a payload type. + // For RED this removes the redundancy spec which points to a payload + // type. codec_capability.parameters.clear(); } capabilities.codecs.push_back(codec_capability); @@ -359,20 +155,4 @@ RtpCapabilities ToRtpCapabilities( return capabilities; } -RtpParameters ToRtpParameters( - const std::vector& cricket_codecs, - const cricket::RtpHeaderExtensions& cricket_extensions, - const cricket::StreamParamsVec& stream_params) { - RtpParameters rtp_parameters; - for (const cricket::Codec& cricket_codec : cricket_codecs) { - rtp_parameters.codecs.push_back(ToRtpCodecParameters(cricket_codec)); - } - for (const RtpExtension& cricket_extension : cricket_extensions) { - rtp_parameters.header_extensions.emplace_back(cricket_extension.uri, - cricket_extension.id); - } - rtp_parameters.encodings = ToRtpEncodings(stream_params); - return rtp_parameters; -} - } // namespace webrtc diff --git a/pc/rtp_parameters_conversion.h b/pc/rtp_parameters_conversion.h index 2cc39dd0e6..e32bc2e03c 100644 --- a/pc/rtp_parameters_conversion.h +++ b/pc/rtp_parameters_conversion.h @@ -11,9 +11,9 @@ #ifndef PC_RTP_PARAMETERS_CONVERSION_H_ #define PC_RTP_PARAMETERS_CONVERSION_H_ +#include #include -#include "absl/types/optional.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" #include "media/base/codec.h" @@ -22,75 +22,26 @@ namespace webrtc { -// NOTE: Some functions are templated for convenience, such that template-based -// code dealing with AudioContentDescription and VideoContentDescription can -// use this easily. Such methods are usable with cricket::AudioCodec and -// cricket::VideoCodec. - -//*************************************************************************** -// Functions for converting from new webrtc:: structures to old cricket:: -// structures. -// -// As the return values imply, all of these functions do validation of the -// parameters and return an error if they're invalid. It's expected that any -// default values (such as video clock rate of 90000) have been filled by the -// time the webrtc:: structure is being converted to the cricket:: one. -// -// These are expected to be used when parameters are passed into an RtpSender -// or RtpReceiver, and need to be validated and converted so they can be -// applied to the media engine level. -//*************************************************************************** - -// Returns error on invalid input. Certain message types are only valid for -// certain feedback types. -RTCErrorOr ToCricketFeedbackParam( - const RtcpFeedback& feedback); - -// Verifies that the codec kind is correct, and it has mandatory parameters -// filled, with values in valid ranges. -RTCErrorOr ToCricketCodec(const RtpCodecParameters& codec); - -// Verifies that payload types aren't duplicated, in addition to normal -// validation. -RTCErrorOr> ToCricketCodecs( - const std::vector& codecs); - -// SSRCs are allowed to be ommitted. This may be used for receive parameters -// where SSRCs are unsignaled. -RTCErrorOr ToCricketStreamParamsVec( - const std::vector& encodings); - //***************************************************************************** -// Functions for converting from old cricket:: structures to new webrtc:: -// structures. Unlike the above functions, these are permissive with regards to +// Functions for converting from old webrtc:: structures to new webrtc:: +// structures. These are permissive with regards to // input validation; it's assumed that any necessary validation already // occurred. // -// These are expected to be used either to convert from audio/video engine -// capabilities to RtpCapabilities, or to convert from already-parsed SDP -// (in the form of cricket:: structures) to webrtc:: structures. The latter -// functionality is not yet implemented. +// These are expected to be used to convert from audio/video engine +// capabilities to RtpCapabilities. //***************************************************************************** // Returns empty value if `cricket_feedback` is a feedback type not // supported/recognized. -absl::optional ToRtcpFeedback( - const cricket::FeedbackParam& cricket_feedback); +std::optional ToRtcpFeedback( + const FeedbackParam& cricket_feedback); -std::vector ToRtpEncodings( - const cricket::StreamParamsVec& stream_params); - -RtpCodecParameters ToRtpCodecParameters(const cricket::Codec& cricket_codec); -RtpCodecCapability ToRtpCodecCapability(const cricket::Codec& cricket_codec); +RtpCodecCapability ToRtpCodecCapability(const Codec& cricket_codec); RtpCapabilities ToRtpCapabilities( - const std::vector& cricket_codecs, - const cricket::RtpHeaderExtensions& cricket_extensions); - -RtpParameters ToRtpParameters( - const std::vector& cricket_codecs, - const cricket::RtpHeaderExtensions& cricket_extensions, - const cricket::StreamParamsVec& stream_params); + const std::vector& cricket_codecs, + const RtpHeaderExtensions& cricket_extensions); } // namespace webrtc diff --git a/pc/rtp_parameters_conversion_unittest.cc b/pc/rtp_parameters_conversion_unittest.cc index 9b48bfacc9..407c535173 100644 --- a/pc/rtp_parameters_conversion_unittest.cc +++ b/pc/rtp_parameters_conversion_unittest.cc @@ -10,12 +10,15 @@ #include "pc/rtp_parameters_conversion.h" -#include #include +#include #include #include "api/media_types.h" +#include "api/rtp_parameters.h" #include "media/base/codec.h" +#include "media/base/media_constants.h" +#include "pc/session_description.h" #include "test/gmock.h" #include "test/gtest.h" @@ -23,335 +26,15 @@ using ::testing::UnorderedElementsAre; namespace webrtc { -TEST(RtpParametersConversionTest, ToCricketFeedbackParam) { - auto result = ToCricketFeedbackParam( - {RtcpFeedbackType::CCM, RtcpFeedbackMessageType::FIR}); - EXPECT_EQ(cricket::FeedbackParam("ccm", "fir"), result.value()); - - result = ToCricketFeedbackParam(RtcpFeedback(RtcpFeedbackType::LNTF)); - EXPECT_EQ(cricket::FeedbackParam("goog-lntf"), result.value()); - - result = ToCricketFeedbackParam( - {RtcpFeedbackType::NACK, RtcpFeedbackMessageType::GENERIC_NACK}); - EXPECT_EQ(cricket::FeedbackParam("nack"), result.value()); - - result = ToCricketFeedbackParam( - {RtcpFeedbackType::NACK, RtcpFeedbackMessageType::PLI}); - EXPECT_EQ(cricket::FeedbackParam("nack", "pli"), result.value()); - - result = ToCricketFeedbackParam(RtcpFeedback(RtcpFeedbackType::REMB)); - EXPECT_EQ(cricket::FeedbackParam("goog-remb"), result.value()); - - result = ToCricketFeedbackParam(RtcpFeedback(RtcpFeedbackType::TRANSPORT_CC)); - EXPECT_EQ(cricket::FeedbackParam("transport-cc"), result.value()); -} - -TEST(RtpParametersConversionTest, ToCricketFeedbackParamErrors) { - // CCM with missing or invalid message type. - auto result = ToCricketFeedbackParam(RtcpFeedback(RtcpFeedbackType::CCM)); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); - - result = ToCricketFeedbackParam( - {RtcpFeedbackType::CCM, RtcpFeedbackMessageType::PLI}); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); - - // LNTF with message type (should be left empty). - result = ToCricketFeedbackParam( - {RtcpFeedbackType::LNTF, RtcpFeedbackMessageType::GENERIC_NACK}); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); - - // NACK with missing or invalid message type. - result = ToCricketFeedbackParam(RtcpFeedback(RtcpFeedbackType::NACK)); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); - - result = ToCricketFeedbackParam( - {RtcpFeedbackType::NACK, RtcpFeedbackMessageType::FIR}); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); - - // REMB with message type (should be left empty). - result = ToCricketFeedbackParam( - {RtcpFeedbackType::REMB, RtcpFeedbackMessageType::GENERIC_NACK}); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); - - // TRANSPORT_CC with message type (should be left empty). - result = ToCricketFeedbackParam( - {RtcpFeedbackType::TRANSPORT_CC, RtcpFeedbackMessageType::FIR}); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); -} - -TEST(RtpParametersConversionTest, ToAudioCodec) { - RtpCodecParameters codec; - codec.name = "AuDiO"; - codec.kind = cricket::MEDIA_TYPE_AUDIO; - codec.payload_type = 120; - codec.clock_rate.emplace(36000); - codec.num_channels.emplace(6); - codec.parameters["foo"] = "bar"; - codec.rtcp_feedback.emplace_back(RtcpFeedbackType::TRANSPORT_CC); - auto result = ToCricketCodec(codec); - ASSERT_TRUE(result.ok()); - - EXPECT_EQ("AuDiO", result.value().name); - EXPECT_EQ(120, result.value().id); - EXPECT_EQ(36000, result.value().clockrate); - EXPECT_EQ(6u, result.value().channels); - ASSERT_EQ(1u, result.value().params.size()); - EXPECT_EQ("bar", result.value().params["foo"]); - EXPECT_EQ(1u, result.value().feedback_params.params().size()); - EXPECT_TRUE(result.value().feedback_params.Has( - cricket::FeedbackParam("transport-cc"))); -} - -TEST(RtpParametersConversionTest, ToVideoCodec) { - RtpCodecParameters codec; - codec.name = "coolcodec"; - codec.kind = cricket::MEDIA_TYPE_VIDEO; - codec.payload_type = 101; - codec.clock_rate.emplace(90000); - codec.parameters["foo"] = "bar"; - codec.parameters["PING"] = "PONG"; - codec.rtcp_feedback.emplace_back(RtcpFeedbackType::LNTF); - codec.rtcp_feedback.emplace_back(RtcpFeedbackType::TRANSPORT_CC); - codec.rtcp_feedback.emplace_back(RtcpFeedbackType::NACK, - RtcpFeedbackMessageType::PLI); - auto result = ToCricketCodec(codec); - ASSERT_TRUE(result.ok()); - - EXPECT_EQ("coolcodec", result.value().name); - EXPECT_EQ(101, result.value().id); - EXPECT_EQ(90000, result.value().clockrate); - ASSERT_EQ(2u, result.value().params.size()); - EXPECT_EQ("bar", result.value().params["foo"]); - EXPECT_EQ("PONG", result.value().params["PING"]); - EXPECT_EQ(3u, result.value().feedback_params.params().size()); - EXPECT_TRUE( - result.value().feedback_params.Has(cricket::FeedbackParam("goog-lntf"))); - EXPECT_TRUE(result.value().feedback_params.Has( - cricket::FeedbackParam("transport-cc"))); - EXPECT_TRUE(result.value().feedback_params.Has( - cricket::FeedbackParam("nack", "pli"))); -} - -// Trying to convert to an AudioCodec if the kind is "video" should fail. -TEST(RtpParametersConversionTest, ToCricketCodecInvalidKind) { - RtpCodecParameters audio_codec; - audio_codec.name = "opus"; - audio_codec.kind = cricket::MEDIA_TYPE_VIDEO; - audio_codec.payload_type = 111; - audio_codec.clock_rate.emplace(48000); - audio_codec.num_channels.emplace(2); - - RtpCodecParameters video_codec; - video_codec.name = "VP8"; - video_codec.kind = cricket::MEDIA_TYPE_AUDIO; - video_codec.payload_type = 102; - video_codec.clock_rate.emplace(90000); - - auto audio_result = ToCricketCodec(audio_codec); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, audio_result.error().type()); - - auto video_result = ToCricketCodec(video_codec); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, video_result.error().type()); - - // Sanity check that if the kind is correct, the conversion succeeds. - audio_codec.kind = cricket::MEDIA_TYPE_AUDIO; - video_codec.kind = cricket::MEDIA_TYPE_VIDEO; - audio_result = ToCricketCodec(audio_codec); - EXPECT_TRUE(audio_result.ok()); - video_result = ToCricketCodec(video_codec); - EXPECT_TRUE(video_result.ok()); -} - -TEST(RtpParametersConversionTest, ToAudioCodecInvalidParameters) { - // Missing channels. - RtpCodecParameters codec; - codec.name = "opus"; - codec.kind = cricket::MEDIA_TYPE_AUDIO; - codec.payload_type = 111; - codec.clock_rate.emplace(48000); - auto result = ToCricketCodec(codec); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); - - // Negative number of channels. - codec.num_channels.emplace(-1); - result = ToCricketCodec(codec); - EXPECT_EQ(RTCErrorType::INVALID_RANGE, result.error().type()); - - // Missing clock rate. - codec.num_channels.emplace(2); - codec.clock_rate.reset(); - result = ToCricketCodec(codec); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); - - // Negative clock rate. - codec.clock_rate.emplace(-48000); - result = ToCricketCodec(codec); - EXPECT_EQ(RTCErrorType::INVALID_RANGE, result.error().type()); - - // Sanity check that conversion succeeds if these errors are fixed. - codec.clock_rate.emplace(48000); - result = ToCricketCodec(codec); - EXPECT_TRUE(result.ok()); -} - -TEST(RtpParametersConversionTest, ToVideoCodecInvalidParameters) { - // Missing clock rate. - RtpCodecParameters codec; - codec.name = "VP8"; - codec.kind = cricket::MEDIA_TYPE_VIDEO; - codec.payload_type = 102; - auto result = ToCricketCodec(codec); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); - - // Invalid clock rate. - codec.clock_rate.emplace(48000); - result = ToCricketCodec(codec); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); - - // Channels set (should be unset). - codec.clock_rate.emplace(90000); - codec.num_channels.emplace(2); - result = ToCricketCodec(codec); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); - - // Sanity check that conversion succeeds if these errors are fixed. - codec.num_channels.reset(); - result = ToCricketCodec(codec); - EXPECT_TRUE(result.ok()); -} - -TEST(RtpParametersConversionTest, ToCricketCodecInvalidPayloadType) { - RtpCodecParameters codec; - codec.name = "VP8"; - codec.kind = cricket::MEDIA_TYPE_VIDEO; - codec.clock_rate.emplace(90000); - - codec.payload_type = -1000; - auto result = ToCricketCodec(codec); - EXPECT_EQ(RTCErrorType::INVALID_RANGE, result.error().type()); - - // Max payload type is 127. - codec.payload_type = 128; - result = ToCricketCodec(codec); - EXPECT_EQ(RTCErrorType::INVALID_RANGE, result.error().type()); - - // Sanity check that conversion succeeds with a valid payload type. - codec.payload_type = 127; - result = ToCricketCodec(codec); - EXPECT_TRUE(result.ok()); -} - -// There are already tests for ToCricketFeedbackParam, but ensure that those -// errors are propagated from ToCricketCodec. -TEST(RtpParametersConversionTest, ToCricketCodecInvalidRtcpFeedback) { - RtpCodecParameters codec; - codec.name = "VP8"; - codec.kind = cricket::MEDIA_TYPE_VIDEO; - codec.clock_rate.emplace(90000); - codec.payload_type = 99; - codec.rtcp_feedback.emplace_back(RtcpFeedbackType::CCM, - RtcpFeedbackMessageType::PLI); - - auto result = ToCricketCodec(codec); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); - - // Sanity check that conversion succeeds without invalid feedback. - codec.rtcp_feedback.clear(); - result = ToCricketCodec(codec); - EXPECT_TRUE(result.ok()); -} - -TEST(RtpParametersConversionTest, ToCricketCodecs) { - std::vector codecs; - RtpCodecParameters codec; - codec.name = "VP8"; - codec.kind = cricket::MEDIA_TYPE_VIDEO; - codec.clock_rate.emplace(90000); - codec.payload_type = 99; - codecs.push_back(codec); - - codec.name = "VP9"; - codec.payload_type = 100; - codecs.push_back(codec); - - auto result = ToCricketCodecs(codecs); - ASSERT_TRUE(result.ok()); - ASSERT_EQ(2u, result.value().size()); - EXPECT_EQ("VP8", result.value()[0].name); - EXPECT_EQ(99, result.value()[0].id); - EXPECT_EQ("VP9", result.value()[1].name); - EXPECT_EQ(100, result.value()[1].id); -} - -TEST(RtpParametersConversionTest, ToCricketCodecsDuplicatePayloadType) { - std::vector codecs; - RtpCodecParameters codec; - codec.name = "VP8"; - codec.kind = cricket::MEDIA_TYPE_VIDEO; - codec.clock_rate.emplace(90000); - codec.payload_type = 99; - codecs.push_back(codec); - - codec.name = "VP9"; - codec.payload_type = 99; - codecs.push_back(codec); - - auto result = ToCricketCodecs(codecs); - EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type()); - - // Sanity check that this succeeds without the duplicate payload type. - codecs[1].payload_type = 120; - result = ToCricketCodecs(codecs); - EXPECT_TRUE(result.ok()); -} - -TEST(RtpParametersConversionTest, ToCricketStreamParamsVecSimple) { - std::vector encodings; - RtpEncodingParameters encoding; - encoding.ssrc.emplace(0xbaadf00d); - encodings.push_back(encoding); - auto result = ToCricketStreamParamsVec(encodings); - ASSERT_TRUE(result.ok()); - ASSERT_EQ(1u, result.value().size()); - EXPECT_EQ(1u, result.value()[0].ssrcs.size()); - EXPECT_EQ(0xbaadf00d, result.value()[0].first_ssrc()); -} - -// No encodings should be accepted; an endpoint may want to prepare a -// decoder/encoder without having something to receive/send yet. -TEST(RtpParametersConversionTest, ToCricketStreamParamsVecNoEncodings) { - std::vector encodings; - auto result = ToCricketStreamParamsVec(encodings); - ASSERT_TRUE(result.ok()); - EXPECT_EQ(0u, result.value().size()); -} - -// An encoding without SSRCs should be accepted. This could be the case when -// SSRCs aren't signaled and payload-type based demuxing is used. -TEST(RtpParametersConversionTest, ToCricketStreamParamsVecMissingSsrcs) { - std::vector encodings = {{}}; - // Creates RtxParameters with empty SSRC. - auto result = ToCricketStreamParamsVec(encodings); - ASSERT_TRUE(result.ok()); - EXPECT_EQ(0u, result.value().size()); -} - -// TODO(deadbeef): Update this test when we support multiple encodings. -TEST(RtpParametersConversionTest, ToCricketStreamParamsVecMultipleEncodings) { - std::vector encodings = {{}, {}}; - auto result = ToCricketStreamParamsVec(encodings); - EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER, result.error().type()); -} - TEST(RtpParametersConversionTest, ToRtcpFeedback) { - absl::optional result = ToRtcpFeedback({"ccm", "fir"}); + std::optional result = ToRtcpFeedback({"ccm", "fir"}); EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::CCM, RtcpFeedbackMessageType::FIR), *result); - result = ToRtcpFeedback(cricket::FeedbackParam("goog-lntf")); + result = ToRtcpFeedback(FeedbackParam("goog-lntf")); EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::LNTF), *result); - result = ToRtcpFeedback(cricket::FeedbackParam("nack")); + result = ToRtcpFeedback(FeedbackParam("nack")); EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::NACK, RtcpFeedbackMessageType::GENERIC_NACK), *result); @@ -360,19 +43,19 @@ TEST(RtpParametersConversionTest, ToRtcpFeedback) { EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::NACK, RtcpFeedbackMessageType::PLI), *result); - result = ToRtcpFeedback(cricket::FeedbackParam("goog-remb")); + result = ToRtcpFeedback(FeedbackParam("goog-remb")); EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::REMB), *result); - result = ToRtcpFeedback(cricket::FeedbackParam("transport-cc")); + result = ToRtcpFeedback(FeedbackParam("transport-cc")); EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::TRANSPORT_CC), *result); } TEST(RtpParametersConversionTest, ToRtcpFeedbackErrors) { // CCM with missing or invalid message type. - absl::optional result = ToRtcpFeedback({"ccm", "pli"}); + std::optional result = ToRtcpFeedback({"ccm", "pli"}); EXPECT_FALSE(result); - result = ToRtcpFeedback(cricket::FeedbackParam("ccm")); + result = ToRtcpFeedback(FeedbackParam("ccm")); EXPECT_FALSE(result); // LNTF with message type (should be left empty). @@ -392,19 +75,18 @@ TEST(RtpParametersConversionTest, ToRtcpFeedbackErrors) { EXPECT_FALSE(result); // Unknown message type. - result = ToRtcpFeedback(cricket::FeedbackParam("foo")); + result = ToRtcpFeedback(FeedbackParam("foo")); EXPECT_FALSE(result); } TEST(RtpParametersConversionTest, ToAudioRtpCodecCapability) { - cricket::AudioCodec cricket_codec = - cricket::CreateAudioCodec(50, "foo", 22222, 4); + Codec cricket_codec = CreateAudioCodec(50, "foo", 22222, 4); cricket_codec.params["foo"] = "bar"; - cricket_codec.feedback_params.Add(cricket::FeedbackParam("transport-cc")); + cricket_codec.feedback_params.Add(FeedbackParam("transport-cc")); RtpCodecCapability codec = ToRtpCodecCapability(cricket_codec); EXPECT_EQ("foo", codec.name); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, codec.kind); + EXPECT_EQ(webrtc::MediaType::AUDIO, codec.kind); EXPECT_EQ(50, codec.preferred_payload_type); EXPECT_EQ(22222, codec.clock_rate); EXPECT_EQ(4, codec.num_channels); @@ -416,17 +98,17 @@ TEST(RtpParametersConversionTest, ToAudioRtpCodecCapability) { } TEST(RtpParametersConversionTest, ToVideoRtpCodecCapability) { - cricket::VideoCodec cricket_codec = cricket::CreateVideoCodec(101, "VID"); + Codec cricket_codec = CreateVideoCodec(101, "VID"); cricket_codec.clockrate = 80000; cricket_codec.params["foo"] = "bar"; cricket_codec.params["ANOTHER"] = "param"; - cricket_codec.feedback_params.Add(cricket::FeedbackParam("transport-cc")); - cricket_codec.feedback_params.Add(cricket::FeedbackParam("goog-lntf")); + cricket_codec.feedback_params.Add(FeedbackParam("transport-cc")); + cricket_codec.feedback_params.Add(FeedbackParam("goog-lntf")); cricket_codec.feedback_params.Add({"nack", "pli"}); RtpCodecCapability codec = ToRtpCodecCapability(cricket_codec); EXPECT_EQ("VID", codec.name); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, codec.kind); + EXPECT_EQ(webrtc::MediaType::VIDEO, codec.kind); EXPECT_EQ(101, codec.preferred_payload_type); EXPECT_EQ(80000, codec.clock_rate); ASSERT_EQ(2u, codec.parameters.size()); @@ -440,80 +122,12 @@ TEST(RtpParametersConversionTest, ToVideoRtpCodecCapability) { codec.rtcp_feedback[2]); } -TEST(RtpParametersConversionTest, ToRtpEncodingsWithEmptyStreamParamsVec) { - cricket::StreamParamsVec streams; - auto rtp_encodings = ToRtpEncodings(streams); - ASSERT_EQ(0u, rtp_encodings.size()); -} - -TEST(RtpParametersConversionTest, ToRtpEncodingsWithMultipleStreamParams) { - cricket::StreamParamsVec streams; - cricket::StreamParams stream1; - stream1.ssrcs.push_back(1111u); - - cricket::StreamParams stream2; - stream2.ssrcs.push_back(2222u); - - streams.push_back(stream1); - streams.push_back(stream2); - - auto rtp_encodings = ToRtpEncodings(streams); - ASSERT_EQ(2u, rtp_encodings.size()); - EXPECT_EQ(1111u, rtp_encodings[0].ssrc); - EXPECT_EQ(2222u, rtp_encodings[1].ssrc); -} - -TEST(RtpParametersConversionTest, ToAudioRtpCodecParameters) { - cricket::AudioCodec cricket_codec = - cricket::CreateAudioCodec(50, "foo", 22222, 4); - cricket_codec.params["foo"] = "bar"; - cricket_codec.feedback_params.Add(cricket::FeedbackParam("transport-cc")); - RtpCodecParameters codec = ToRtpCodecParameters(cricket_codec); - - EXPECT_EQ("foo", codec.name); - EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, codec.kind); - EXPECT_EQ(50, codec.payload_type); - EXPECT_EQ(22222, codec.clock_rate); - EXPECT_EQ(4, codec.num_channels); - ASSERT_EQ(1u, codec.parameters.size()); - EXPECT_EQ("bar", codec.parameters["foo"]); - EXPECT_EQ(1u, codec.rtcp_feedback.size()); - EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::TRANSPORT_CC), - codec.rtcp_feedback[0]); -} - -TEST(RtpParametersConversionTest, ToVideoRtpCodecParameters) { - cricket::VideoCodec cricket_codec = cricket::CreateVideoCodec(101, "VID"); - cricket_codec.clockrate = 80000; - cricket_codec.params["foo"] = "bar"; - cricket_codec.params["ANOTHER"] = "param"; - cricket_codec.feedback_params.Add(cricket::FeedbackParam("transport-cc")); - cricket_codec.feedback_params.Add(cricket::FeedbackParam("goog-lntf")); - cricket_codec.feedback_params.Add({"nack", "pli"}); - RtpCodecParameters codec = ToRtpCodecParameters(cricket_codec); - - EXPECT_EQ("VID", codec.name); - EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, codec.kind); - EXPECT_EQ(101, codec.payload_type); - EXPECT_EQ(80000, codec.clock_rate); - ASSERT_EQ(2u, codec.parameters.size()); - EXPECT_EQ("bar", codec.parameters["foo"]); - EXPECT_EQ("param", codec.parameters["ANOTHER"]); - EXPECT_EQ(3u, codec.rtcp_feedback.size()); - EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::TRANSPORT_CC), - codec.rtcp_feedback[0]); - EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::LNTF), codec.rtcp_feedback[1]); - EXPECT_EQ(RtcpFeedback(RtcpFeedbackType::NACK, RtcpFeedbackMessageType::PLI), - codec.rtcp_feedback[2]); -} - // An unknown feedback param should just be ignored. TEST(RtpParametersConversionTest, ToRtpCodecCapabilityUnknownFeedbackParam) { - cricket::AudioCodec cricket_codec = - cricket::CreateAudioCodec(50, "foo", 22222, 4); + Codec cricket_codec = CreateAudioCodec(50, "foo", 22222, 4); cricket_codec.params["foo"] = "bar"; cricket_codec.feedback_params.Add({"unknown", "param"}); - cricket_codec.feedback_params.Add(cricket::FeedbackParam("transport-cc")); + cricket_codec.feedback_params.Add(FeedbackParam("transport-cc")); RtpCodecCapability codec = ToRtpCodecCapability(cricket_codec); ASSERT_EQ(1u, codec.rtcp_feedback.size()); @@ -525,21 +139,17 @@ TEST(RtpParametersConversionTest, ToRtpCodecCapabilityUnknownFeedbackParam) { // test that the result of ToRtpCodecCapability ends up in the result, and that // the "fec" list is assembled correctly. TEST(RtpParametersConversionTest, ToRtpCapabilities) { - cricket::VideoCodec vp8 = cricket::CreateVideoCodec(101, "VP8"); - vp8.clockrate = 90000; - - cricket::VideoCodec red = cricket::CreateVideoCodec(102, "red"); - red.clockrate = 90000; + Codec vp8 = CreateVideoCodec(101, "VP8"); - cricket::VideoCodec ulpfec = cricket::CreateVideoCodec(103, "ulpfec"); - ulpfec.clockrate = 90000; + Codec red = CreateVideoCodec(102, "red"); + // Note: fmtp not usually done for video-red but we want it filtered. + red.SetParam(kCodecParamNotInNameValueFormat, "101/101"); - cricket::VideoCodec flexfec = cricket::CreateVideoCodec(102, "flexfec-03"); - flexfec.clockrate = 90000; - - cricket::VideoCodec rtx = cricket::CreateVideoRtxCodec(014, 101); - - cricket::VideoCodec rtx2 = cricket::CreateVideoRtxCodec(105, 109); + Codec red2 = CreateVideoCodec(127, "red"); + Codec ulpfec = CreateVideoCodec(103, "ulpfec"); + Codec flexfec = CreateVideoCodec(102, "flexfec-03"); + Codec rtx = CreateVideoRtxCodec(014, 101); + Codec rtx2 = CreateVideoRtxCodec(105, 109); RtpCapabilities capabilities = ToRtpCapabilities({vp8, ulpfec, rtx, rtx2}, {{"uri", 1}, {"uri2", 3}}); @@ -555,48 +165,19 @@ TEST(RtpParametersConversionTest, ToRtpCapabilities) { EXPECT_EQ(3, capabilities.header_extensions[1].preferred_id); EXPECT_EQ(0u, capabilities.fec.size()); - capabilities = ToRtpCapabilities({vp8, red, ulpfec, rtx}, - cricket::RtpHeaderExtensions()); + capabilities = + ToRtpCapabilities({vp8, red, red2, ulpfec, rtx}, RtpHeaderExtensions()); EXPECT_EQ(4u, capabilities.codecs.size()); EXPECT_THAT( capabilities.fec, UnorderedElementsAre(FecMechanism::RED, FecMechanism::RED_AND_ULPFEC)); - capabilities = - ToRtpCapabilities({vp8, red, flexfec}, cricket::RtpHeaderExtensions()); + capabilities = ToRtpCapabilities({vp8, red, flexfec}, RtpHeaderExtensions()); EXPECT_EQ(3u, capabilities.codecs.size()); EXPECT_THAT(capabilities.fec, UnorderedElementsAre(FecMechanism::RED, FecMechanism::FLEXFEC)); -} - -TEST(RtpParametersConversionTest, ToRtpParameters) { - cricket::VideoCodec vp8 = cricket::CreateVideoCodec(101, "VP8"); - vp8.clockrate = 90000; - - cricket::VideoCodec red = cricket::CreateVideoCodec(102, "red"); - red.clockrate = 90000; - - cricket::VideoCodec ulpfec = cricket::CreateVideoCodec(103, "ulpfec"); - ulpfec.clockrate = 90000; - - cricket::StreamParamsVec streams; - cricket::StreamParams stream; - stream.ssrcs.push_back(1234u); - streams.push_back(stream); - - RtpParameters rtp_parameters = - ToRtpParameters({vp8, red, ulpfec}, {{"uri", 1}, {"uri2", 3}}, streams); - ASSERT_EQ(3u, rtp_parameters.codecs.size()); - EXPECT_EQ("VP8", rtp_parameters.codecs[0].name); - EXPECT_EQ("red", rtp_parameters.codecs[1].name); - EXPECT_EQ("ulpfec", rtp_parameters.codecs[2].name); - ASSERT_EQ(2u, rtp_parameters.header_extensions.size()); - EXPECT_EQ("uri", rtp_parameters.header_extensions[0].uri); - EXPECT_EQ(1, rtp_parameters.header_extensions[0].id); - EXPECT_EQ("uri2", rtp_parameters.header_extensions[1].uri); - EXPECT_EQ(3, rtp_parameters.header_extensions[1].id); - ASSERT_EQ(1u, rtp_parameters.encodings.size()); - EXPECT_EQ(1234u, rtp_parameters.encodings[0].ssrc); + EXPECT_EQ(capabilities.codecs[1].name, "red"); + EXPECT_TRUE(capabilities.codecs[1].parameters.empty()); } } // namespace webrtc diff --git a/pc/rtp_receiver.cc b/pc/rtp_receiver.cc index a2b3353c0e..e09c89301c 100644 --- a/pc/rtp_receiver.cc +++ b/pc/rtp_receiver.cc @@ -12,9 +12,13 @@ #include +#include +#include #include #include +#include "api/media_stream_interface.h" +#include "api/scoped_refptr.h" #include "pc/media_stream.h" #include "pc/media_stream_proxy.h" #include "rtc_base/thread.h" @@ -22,19 +26,20 @@ namespace webrtc { // This function is only expected to be called on the signalling thread. +// On the other hand, some test or even production setups may use +// several signaling threads. int RtpReceiverInternal::GenerateUniqueId() { - static int g_unique_id = 0; + static std::atomic g_unique_id{0}; return ++g_unique_id; } -std::vector> +std::vector> RtpReceiverInternal::CreateStreamsFromIds(std::vector stream_ids) { - std::vector> streams( - stream_ids.size()); + std::vector> streams(stream_ids.size()); for (size_t i = 0; i < stream_ids.size(); ++i) { streams[i] = MediaStreamProxy::Create( - rtc::Thread::Current(), MediaStream::Create(std::move(stream_ids[i]))); + Thread::Current(), MediaStream::Create(std::move(stream_ids[i]))); } return streams; } diff --git a/pc/rtp_receiver.h b/pc/rtp_receiver.h index 16ab011f14..db3ba3e0e0 100644 --- a/pc/rtp_receiver.h +++ b/pc/rtp_receiver.h @@ -10,31 +10,22 @@ // This file contains classes that implement RtpReceiverInterface. // An RtpReceiver associates a MediaStreamTrackInterface with an underlying -// transport (provided by cricket::VoiceChannel/cricket::VideoChannel) +// transport (provided by webrtc::VoiceChannel/webrtc::VideoChannel) #ifndef PC_RTP_RECEIVER_H_ #define PC_RTP_RECEIVER_H_ #include +#include #include #include -#include "absl/types/optional.h" -#include "api/crypto/frame_decryptor_interface.h" #include "api/dtls_transport_interface.h" #include "api/media_stream_interface.h" -#include "api/media_types.h" -#include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/scoped_refptr.h" -#include "api/video/video_frame.h" -#include "api/video/video_sink_interface.h" -#include "api/video/video_source_interface.h" #include "media/base/media_channel.h" -#include "media/base/video_broadcaster.h" -#include "pc/video_track_source.h" -#include "rtc_base/thread.h" namespace webrtc { @@ -53,8 +44,7 @@ class RtpReceiverInternal : public RtpReceiverInterface { // * SetMediaChannel(nullptr) must be called before the media channel is // destroyed. // * This method must be invoked on the worker thread. - virtual void SetMediaChannel( - cricket::MediaReceiveChannelInterface* media_channel) = 0; + virtual void SetMediaChannel(MediaReceiveChannelInterface* media_channel) = 0; // Configures the RtpReceiver with the underlying media channel, with the // given SSRC as the stream identifier. @@ -65,10 +55,10 @@ class RtpReceiverInternal : public RtpReceiverInterface { virtual void SetupUnsignaledMediaChannel() = 0; virtual void set_transport( - rtc::scoped_refptr dtls_transport) = 0; + scoped_refptr dtls_transport) = 0; // This SSRC is used as an identifier for the receiver between the API layer // and the WebRtcVideoEngine, WebRtcVoiceEngine layer. - virtual absl::optional ssrc() const = 0; + virtual std::optional ssrc() const = 0; // Call this to notify the RtpReceiver when the first packet has been received // on the corresponding channel. @@ -82,7 +72,7 @@ class RtpReceiverInternal : public RtpReceiverInterface { // set_stream_ids() as soon as downstream projects are no longer dependent on // stream objects. virtual void SetStreams( - const std::vector>& streams) = 0; + const std::vector>& streams) = 0; // Returns an ID that changes if the attached track changes, but // otherwise remains constant. Used to generate IDs for stats. @@ -92,8 +82,8 @@ class RtpReceiverInternal : public RtpReceiverInterface { protected: static int GenerateUniqueId(); - static std::vector> - CreateStreamsFromIds(std::vector stream_ids); + static std::vector> CreateStreamsFromIds( + std::vector stream_ids); }; } // namespace webrtc diff --git a/pc/rtp_receiver_proxy.h b/pc/rtp_receiver_proxy.h index d4114e0f0b..223b307030 100644 --- a/pc/rtp_receiver_proxy.h +++ b/pc/rtp_receiver_proxy.h @@ -11,10 +11,19 @@ #ifndef PC_RTP_RECEIVER_PROXY_H_ #define PC_RTP_RECEIVER_PROXY_H_ +#include #include #include +#include "api/crypto/frame_decryptor_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/frame_transformer_interface.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/transport/rtp/rtp_source.h" #include "pc/proxy.h" namespace webrtc { @@ -24,29 +33,28 @@ namespace webrtc { // an implementation detail. BEGIN_PROXY_MAP(RtpReceiver) PROXY_PRIMARY_THREAD_DESTRUCTOR() -BYPASS_PROXY_CONSTMETHOD0(rtc::scoped_refptr, track) -PROXY_CONSTMETHOD0(rtc::scoped_refptr, dtls_transport) +BYPASS_PROXY_CONSTMETHOD0(scoped_refptr, track) +PROXY_CONSTMETHOD0(scoped_refptr, dtls_transport) PROXY_CONSTMETHOD0(std::vector, stream_ids) -PROXY_CONSTMETHOD0(std::vector>, - streams) -BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type) +PROXY_CONSTMETHOD0(std::vector>, streams) +BYPASS_PROXY_CONSTMETHOD0(webrtc::MediaType, media_type) BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_SECONDARY_CONSTMETHOD0(RtpParameters, GetParameters) PROXY_METHOD1(void, SetObserver, RtpReceiverObserverInterface*) PROXY_SECONDARY_METHOD1(void, SetJitterBufferMinimumDelay, - absl::optional) + std::optional) PROXY_SECONDARY_CONSTMETHOD0(std::vector, GetSources) // TODO(bugs.webrtc.org/12772): Remove. PROXY_SECONDARY_METHOD1(void, SetFrameDecryptor, - rtc::scoped_refptr) + scoped_refptr) // TODO(bugs.webrtc.org/12772): Remove. -PROXY_SECONDARY_CONSTMETHOD0(rtc::scoped_refptr, +PROXY_SECONDARY_CONSTMETHOD0(scoped_refptr, GetFrameDecryptor) PROXY_SECONDARY_METHOD1(void, - SetDepacketizerToDecoderFrameTransformer, - rtc::scoped_refptr) + SetFrameTransformer, + scoped_refptr) END_PROXY_MAP(RtpReceiver) } // namespace webrtc diff --git a/pc/rtp_sender.cc b/pc/rtp_sender.cc index cdae1595b3..31bbba0493 100644 --- a/pc/rtp_sender.cc +++ b/pc/rtp_sender.cc @@ -12,20 +12,42 @@ #include #include +#include +#include +#include +#include +#include #include #include #include #include "absl/algorithm/container.h" #include "api/audio_options.h" +#include "api/crypto/frame_encryptor_interface.h" +#include "api/dtmf_sender_interface.h" +#include "api/environment/environment.h" +#include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" #include "api/media_stream_interface.h" #include "api/priority.h" #include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "media/base/audio_source.h" +#include "media/base/codec.h" +#include "media/base/media_channel.h" #include "media/base/media_engine.h" +#include "pc/dtmf_sender.h" #include "pc/legacy_stats_collector_interface.h" #include "rtc_base/checks.h" -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" +#include "rtc_base/event.h" #include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread.h" #include "rtc_base/trace_event.h" namespace webrtc { @@ -91,7 +113,7 @@ RtpParameters RestoreEncodingLayers( class SignalingThreadCallback { public: - SignalingThreadCallback(rtc::Thread* signaling_thread, + SignalingThreadCallback(Thread* signaling_thread, SetParametersCallback callback) : signaling_thread_(signaling_thread), callback_(std::move(callback)) {} SignalingThreadCallback(SignalingThreadCallback&& other) @@ -115,17 +137,17 @@ class SignalingThreadCallback { if (!signaling_thread_->IsCurrent()) { signaling_thread_->PostTask( [callback = std::move(callback_), error]() mutable { - webrtc::InvokeSetParametersCallback(callback, error); + InvokeSetParametersCallback(callback, error); }); callback_ = nullptr; return; } - webrtc::InvokeSetParametersCallback(callback_, error); + InvokeSetParametersCallback(callback_, error); callback_ = nullptr; } - rtc::Thread* signaling_thread_; + Thread* signaling_thread_; SetParametersCallback callback_; }; @@ -148,10 +170,12 @@ bool UnimplementedRtpParameterHasValue(const RtpParameters& parameters) { return false; } -RtpSenderBase::RtpSenderBase(rtc::Thread* worker_thread, +RtpSenderBase::RtpSenderBase(const Environment& env, + Thread* worker_thread, const std::string& id, SetStreamsObserver* set_streams_observer) - : signaling_thread_(rtc::Thread::Current()), + : env_(env), + signaling_thread_(Thread::Current()), worker_thread_(worker_thread), id_(id), set_streams_observer_(set_streams_observer) { @@ -160,7 +184,7 @@ RtpSenderBase::RtpSenderBase(rtc::Thread* worker_thread, } void RtpSenderBase::SetFrameEncryptor( - rtc::scoped_refptr frame_encryptor) { + scoped_refptr frame_encryptor) { RTC_DCHECK_RUN_ON(signaling_thread_); frame_encryptor_ = std::move(frame_encryptor); // Special Case: Set the frame encryptor to any value on any existing channel. @@ -187,8 +211,7 @@ void RtpSenderBase::SetEncoderSelectorOnChannel() { } } -void RtpSenderBase::SetMediaChannel( - cricket::MediaSendChannelInterface* media_channel) { +void RtpSenderBase::SetMediaChannel(MediaSendChannelInterface* media_channel) { RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); media_channel_ = media_channel; @@ -226,7 +249,7 @@ RtpParameters RtpSenderBase::GetParametersInternalWithAllLayers() const { RtpParameters RtpSenderBase::GetParameters() const { RTC_DCHECK_RUN_ON(signaling_thread_); RtpParameters result = GetParametersInternal(); - last_transaction_id_ = rtc::CreateRandomUuid(); + last_transaction_id_ = CreateRandomUuid(); result.transaction_id = last_transaction_id_.value(); return result; } @@ -243,16 +266,17 @@ void RtpSenderBase::SetParametersInternal(const RtpParameters& parameters, "Attempted to set an unimplemented parameter of RtpParameters."); RTC_LOG(LS_ERROR) << error.message() << " (" << ::webrtc::ToString(error.type()) << ")"; - webrtc::InvokeSetParametersCallback(callback, error); + InvokeSetParametersCallback(callback, error); return; } if (!media_channel_ || !ssrc_) { - auto result = cricket::CheckRtpParametersInvalidModificationAndValues( - init_parameters_, parameters, codec_preferences_, absl::nullopt); + auto result = CheckRtpParametersInvalidModificationAndValues( + init_parameters_, parameters, send_codecs_, std::nullopt, + env_.field_trials()); if (result.ok()) { init_parameters_ = parameters; } - webrtc::InvokeSetParametersCallback(callback, result); + InvokeSetParametersCallback(callback, result); return; } auto task = [&, callback = std::move(callback), @@ -265,16 +289,16 @@ void RtpSenderBase::SetParametersInternal(const RtpParameters& parameters, old_parameters.encodings); } - RTCError result = cricket::CheckRtpParametersInvalidModificationAndValues( - old_parameters, rtp_parameters); + RTCError result = CheckRtpParametersInvalidModificationAndValues( + old_parameters, rtp_parameters, env_.field_trials()); if (!result.ok()) { - webrtc::InvokeSetParametersCallback(callback, result); + InvokeSetParametersCallback(callback, result); return; } result = CheckCodecParameters(rtp_parameters); if (!result.ok()) { - webrtc::InvokeSetParametersCallback(callback, result); + InvokeSetParametersCallback(callback, result); return; } @@ -298,8 +322,9 @@ RTCError RtpSenderBase::SetParametersInternalWithAllLayers( "Attempted to set an unimplemented parameter of RtpParameters."); } if (!media_channel_ || !ssrc_) { - auto result = cricket::CheckRtpParametersInvalidModificationAndValues( - init_parameters_, parameters, codec_preferences_, absl::nullopt); + auto result = CheckRtpParametersInvalidModificationAndValues( + init_parameters_, parameters, send_codecs_, std::nullopt, + env_.field_trials()); if (result.ok()) { init_parameters_ = parameters; } @@ -339,23 +364,21 @@ RTCError RtpSenderBase::CheckSetParameters(const RtpParameters& parameters) { } RTCError RtpSenderBase::CheckCodecParameters(const RtpParameters& parameters) { - absl::optional send_codec = media_channel_->GetSendCodec(); + std::optional send_codec = media_channel_->GetSendCodec(); // Match the currently used codec against the codec preferences to gather // the SVC capabilities. - absl::optional send_codec_with_svc_info; - if (send_codec && send_codec->type == cricket::Codec::Type::kVideo) { - auto codec_match = - absl::c_find_if(codec_preferences_, [&](auto& codec_preference) { - return send_codec->Matches(codec_preference); - }); - if (codec_match != codec_preferences_.end()) { + std::optional send_codec_with_svc_info; + if (send_codec && send_codec->type == Codec::Type::kVideo) { + auto codec_match = absl::c_find_if( + send_codecs_, [&](auto& codec) { return send_codec->Matches(codec); }); + if (codec_match != send_codecs_.end()) { send_codec_with_svc_info = *codec_match; } } - return cricket::CheckScalabilityModeValues(parameters, codec_preferences_, - send_codec_with_svc_info); + return CheckScalabilityModeValues(parameters, send_codecs_, + send_codec_with_svc_info); } RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) { @@ -369,7 +392,7 @@ RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) { // blocking call is required to keep them working. The encoder configuration // also involves another thread with an asynchronous task, thus we still do // need to wait for the callback to be resolved this way. - std::unique_ptr done_event = std::make_unique(); + std::unique_ptr done_event = std::make_unique(); SetParametersInternal( parameters, [done = done_event.get(), &result](RTCError error) { @@ -377,7 +400,7 @@ RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) { done->Set(); }, true); - done_event->Wait(rtc::Event::kForever); + done_event->Wait(Event::kForever); last_transaction_id_.reset(); return result; } @@ -389,7 +412,7 @@ void RtpSenderBase::SetParametersAsync(const RtpParameters& parameters, TRACE_EVENT0("webrtc", "RtpSenderBase::SetParametersAsync"); RTCError result = CheckSetParameters(parameters); if (!result.ok()) { - webrtc::InvokeSetParametersCallback(callback, result); + InvokeSetParametersCallback(callback, result); return; } @@ -399,11 +422,28 @@ void RtpSenderBase::SetParametersAsync(const RtpParameters& parameters, signaling_thread_, [this, callback = std::move(callback)](RTCError error) mutable { last_transaction_id_.reset(); - webrtc::InvokeSetParametersCallback(callback, error); + InvokeSetParametersCallback(callback, error); }), false); } +void RtpSenderBase::SetObserver(RtpSenderObserverInterface* observer) { + RTC_DCHECK_RUN_ON(signaling_thread_); + observer_ = observer; + // Deliver any notifications the observer may have missed by being set late. + if (sent_first_packet_ && observer_) { + observer_->OnFirstPacketSent(media_type()); + } +} + +void RtpSenderBase::NotifyFirstPacketSent() { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (observer_) { + observer_->OnFirstPacketSent(media_type()); + } + sent_first_packet_ = true; +} + void RtpSenderBase::set_stream_ids(const std::vector& stream_ids) { stream_ids_.clear(); absl::c_copy_if(stream_ids, std::back_inserter(stream_ids_), @@ -442,7 +482,7 @@ bool RtpSenderBase::SetTrack(MediaStreamTrackInterface* track) { // Attach to new track. bool prev_can_send_track = can_send_track(); // Keep a reference to the old track to keep it alive until we call SetSend. - rtc::scoped_refptr old_track = track_; + scoped_refptr old_track = track_; track_ = track; if (track_) { track_->RegisterObserver(this); @@ -500,7 +540,7 @@ void RtpSenderBase::SetSsrc(uint32_t ssrc) { init_parameters_.degradation_preference; media_channel_->SetRtpSendParameters(ssrc_, current_parameters, nullptr); init_parameters_.encodings.clear(); - init_parameters_.degradation_preference = absl::nullopt; + init_parameters_.degradation_preference = std::nullopt; }); } // Attempt to attach the frame decryptor to the current media channel. @@ -508,7 +548,7 @@ void RtpSenderBase::SetSsrc(uint32_t ssrc) { SetFrameEncryptor(frame_encryptor_); } if (frame_transformer_) { - SetEncoderToPacketizerFrameTransformer(frame_transformer_); + SetFrameTransformer(frame_transformer_); } if (encoder_selector_) { SetEncoderSelectorOnChannel(); @@ -575,15 +615,21 @@ RTCError RtpSenderBase::DisableEncodingLayers( RTCError result = SetParametersInternalWithAllLayers(parameters); if (result.ok()) { - disabled_rids_.insert(disabled_rids_.end(), rids.begin(), rids.end()); + for (const auto& rid : rids) { + // Avoid inserting duplicates. + if (std::find(disabled_rids_.begin(), disabled_rids_.end(), rid) == + disabled_rids_.end()) { + disabled_rids_.push_back(rid); + } + } // Invalidate any transaction upon success. last_transaction_id_.reset(); } return result; } -void RtpSenderBase::SetEncoderToPacketizerFrameTransformer( - rtc::scoped_refptr frame_transformer) { +void RtpSenderBase::SetFrameTransformer( + scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(signaling_thread_); frame_transformer_ = std::move(frame_transformer); if (media_channel_ && ssrc_ && !stopped_) { @@ -608,7 +654,7 @@ void LocalAudioSinkAdapter::OnData( int sample_rate, size_t number_of_channels, size_t number_of_frames, - absl::optional absolute_capture_timestamp_ms) { + std::optional absolute_capture_timestamp_ms) { TRACE_EVENT2("webrtc", "LocalAudioSinkAdapter::OnData", "sample_rate", sample_rate, "number_of_frames", number_of_frames); MutexLock lock(&lock_); @@ -619,30 +665,32 @@ void LocalAudioSinkAdapter::OnData( } } -void LocalAudioSinkAdapter::SetSink(cricket::AudioSource::Sink* sink) { +void LocalAudioSinkAdapter::SetSink(AudioSource::Sink* sink) { MutexLock lock(&lock_); RTC_DCHECK(!sink || !sink_); sink_ = sink; } -rtc::scoped_refptr AudioRtpSender::Create( - rtc::Thread* worker_thread, +scoped_refptr AudioRtpSender::Create( + const webrtc::Environment& env, + Thread* worker_thread, const std::string& id, LegacyStatsCollectorInterface* stats, SetStreamsObserver* set_streams_observer) { - return rtc::make_ref_counted(worker_thread, id, stats, - set_streams_observer); + return make_ref_counted(env, worker_thread, id, stats, + set_streams_observer); } -AudioRtpSender::AudioRtpSender(rtc::Thread* worker_thread, +AudioRtpSender::AudioRtpSender(const webrtc::Environment& env, + Thread* worker_thread, const std::string& id, LegacyStatsCollectorInterface* legacy_stats, SetStreamsObserver* set_streams_observer) - : RtpSenderBase(worker_thread, id, set_streams_observer), + : RtpSenderBase(env, worker_thread, id, set_streams_observer), legacy_stats_(legacy_stats), - dtmf_sender_(DtmfSender::Create(rtc::Thread::Current(), this)), + dtmf_sender_(DtmfSender::Create(Thread::Current(), this)), dtmf_sender_proxy_( - DtmfSenderProxy::Create(rtc::Thread::Current(), dtmf_sender_)), + DtmfSenderProxy::Create(Thread::Current(), dtmf_sender_)), sink_adapter_(new LocalAudioSinkAdapter()) {} AudioRtpSender::~AudioRtpSender() { @@ -717,7 +765,7 @@ void AudioRtpSender::RemoveTrackFromStats() { } } -rtc::scoped_refptr AudioRtpSender::GetDtmfSender() const { +scoped_refptr AudioRtpSender::GetDtmfSender() const { RTC_DCHECK_RUN_ON(signaling_thread_); return dtmf_sender_proxy_; } @@ -738,7 +786,7 @@ void AudioRtpSender::SetSend() { RTC_LOG(LS_ERROR) << "SetAudioSend: No audio channel exists."; return; } - cricket::AudioOptions options; + AudioOptions options; #if !defined(WEBRTC_CHROMIUM_BUILD) && !defined(WEBRTC_WEBKIT_BUILD) // TODO(tommi): Remove this hack when we move CreateAudioSource out of // PeerConnection. This is a bit of a strange way to apply local audio @@ -769,7 +817,7 @@ void AudioRtpSender::ClearSend() { RTC_LOG(LS_WARNING) << "ClearAudioSend: No audio channel exists."; return; } - cricket::AudioOptions options; + AudioOptions options; bool success = worker_thread_->BlockingCall([&] { return voice_media_channel()->SetAudioSend(ssrc_, false, &options, nullptr); }); @@ -778,18 +826,20 @@ void AudioRtpSender::ClearSend() { } } -rtc::scoped_refptr VideoRtpSender::Create( - rtc::Thread* worker_thread, +scoped_refptr VideoRtpSender::Create( + const Environment& env, + Thread* worker_thread, const std::string& id, SetStreamsObserver* set_streams_observer) { - return rtc::make_ref_counted(worker_thread, id, - set_streams_observer); + return make_ref_counted(env, worker_thread, id, + set_streams_observer); } -VideoRtpSender::VideoRtpSender(rtc::Thread* worker_thread, +VideoRtpSender::VideoRtpSender(const Environment& env, + Thread* worker_thread, const std::string& id, SetStreamsObserver* set_streams_observer) - : RtpSenderBase(worker_thread, id, set_streams_observer) {} + : RtpSenderBase(env, worker_thread, id, set_streams_observer) {} VideoRtpSender::~VideoRtpSender() { Stop(); @@ -814,7 +864,7 @@ void VideoRtpSender::AttachTrack() { cached_track_content_hint_ = video_track()->content_hint(); } -rtc::scoped_refptr VideoRtpSender::GetDtmfSender() const { +scoped_refptr VideoRtpSender::GetDtmfSender() const { RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DLOG(LS_ERROR) << "Tried to get DTMF sender from video sender."; return nullptr; @@ -856,7 +906,7 @@ void VideoRtpSender::SetSend() { RTC_LOG(LS_ERROR) << "SetVideoSend: No video channel exists."; return; } - cricket::VideoOptions options; + VideoOptions options; VideoTrackSourceInterface* source = video_track()->GetSource(); if (source) { options.is_screencast = source->is_screencast(); diff --git a/pc/rtp_sender.h b/pc/rtp_sender.h index d29c3760e6..ef7310ff5e 100644 --- a/pc/rtp_sender.h +++ b/pc/rtp_sender.h @@ -19,13 +19,15 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/crypto/frame_encryptor_interface.h" #include "api/dtls_transport_interface.h" #include "api/dtmf_sender_interface.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "api/frame_transformer_interface.h" #include "api/media_stream_interface.h" #include "api/media_types.h" @@ -54,8 +56,7 @@ class RtpSenderInternal : public RtpSenderInterface { // A VoiceMediaChannel should be used for audio RtpSenders and // a VideoMediaChannel should be used for video RtpSenders. // Must call SetMediaChannel(nullptr) before the media channel is destroyed. - virtual void SetMediaChannel( - cricket::MediaSendChannelInterface* media_channel) = 0; + virtual void SetMediaChannel(MediaSendChannelInterface* media_channel) = 0; // Used to set the SSRC of the sender, once a local description has been set. // If `ssrc` is 0, this indiates that the sender should disconnect from the @@ -67,7 +68,7 @@ class RtpSenderInternal : public RtpSenderInterface { virtual void set_init_send_encodings( const std::vector& init_send_encodings) = 0; virtual void set_transport( - rtc::scoped_refptr dtls_transport) = 0; + scoped_refptr dtls_transport) = 0; virtual void Stop() = 0; @@ -86,9 +87,7 @@ class RtpSenderInternal : public RtpSenderInterface { const RtpParameters& parameters) = 0; // Additional checks that are specific to the current codec settings - virtual RTCError CheckCodecParameters(const RtpParameters& parameters) { - return webrtc::RTCError::OK(); - } + virtual RTCError CheckCodecParameters(const RtpParameters& parameters) = 0; // Returns an ID that changes every time SetTrack() is called, but // otherwise remains constant. Used to generate IDs for stats. @@ -104,8 +103,10 @@ class RtpSenderInternal : public RtpSenderInterface { // Used by the owning transceiver to inform the sender on the currently // selected codecs. - virtual void SetCodecPreferences( - std::vector codec_preferences) = 0; + virtual void SetSendCodecs(std::vector send_codecs) = 0; + virtual std::vector GetSendCodecs() const = 0; + + virtual void NotifyFirstPacketSent() = 0; }; // Shared implementation for RtpSenderInternal interface. @@ -121,11 +122,10 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { // A VoiceMediaChannel should be used for audio RtpSenders and // a VideoMediaChannel should be used for video RtpSenders. // Must call SetMediaChannel(nullptr) before the media channel is destroyed. - void SetMediaChannel( - cricket::MediaSendChannelInterface* media_channel) override; + void SetMediaChannel(MediaSendChannelInterface* media_channel) override; bool SetTrack(MediaStreamTrackInterface* track) override; - rtc::scoped_refptr track() const override { + scoped_refptr track() const override { // This method is currently called from the worker thread by // RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n. // RTC_DCHECK_RUN_ON(signaling_thread_); @@ -182,19 +182,18 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { } void set_transport( - rtc::scoped_refptr dtls_transport) override { + scoped_refptr dtls_transport) override { dtls_transport_ = dtls_transport; } - rtc::scoped_refptr dtls_transport() const override { + scoped_refptr dtls_transport() const override { RTC_DCHECK_RUN_ON(signaling_thread_); return dtls_transport_; } void SetFrameEncryptor( - rtc::scoped_refptr frame_encryptor) override; + scoped_refptr frame_encryptor) override; - rtc::scoped_refptr GetFrameEncryptor() - const override { + scoped_refptr GetFrameEncryptor() const override { return frame_encryptor_; } @@ -209,8 +208,8 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { // If the specified list is empty, this is a no-op. RTCError DisableEncodingLayers(const std::vector& rid) override; - void SetEncoderToPacketizerFrameTransformer( - rtc::scoped_refptr frame_transformer) override; + void SetFrameTransformer( + scoped_refptr frame_transformer) override; void SetEncoderSelector( std::unique_ptr @@ -223,16 +222,20 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { is_transceiver_stopped_ = true; } - void SetCodecPreferences( - std::vector codec_preferences) override { - codec_preferences_ = codec_preferences; + void SetSendCodecs(std::vector send_codecs) override { + send_codecs_ = send_codecs; } + std::vector GetSendCodecs() const override { return send_codecs_; } + + void NotifyFirstPacketSent() override; + void SetObserver(RtpSenderObserverInterface* observer) override; protected: // If `set_streams_observer` is not null, it is invoked when SetStreams() // is called. `set_streams_observer` is not owned by this object. If not // null, it must be valid at least until this sender becomes stopped. - RtpSenderBase(rtc::Thread* worker_thread, + RtpSenderBase(const Environment& env, + Thread* worker_thread, const std::string& id, SetStreamsObserver* set_streams_observer); // TODO(bugs.webrtc.org/8694): Since SSRC == 0 is technically valid, figure @@ -253,8 +256,9 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { virtual void AddTrackToStats() {} virtual void RemoveTrackFromStats() {} - rtc::Thread* const signaling_thread_; - rtc::Thread* const worker_thread_; + const Environment env_; + Thread* const signaling_thread_; + Thread* const worker_thread_; uint32_t ssrc_ = 0; bool stopped_ RTC_GUARDED_BY(signaling_thread_) = false; bool is_transceiver_stopped_ RTC_GUARDED_BY(signaling_thread_) = false; @@ -263,29 +267,31 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { std::vector stream_ids_; RtpParameters init_parameters_; - std::vector codec_preferences_; + std::vector send_codecs_; // TODO(tommi): `media_channel_` and several other member variables in this // class (ssrc_, stopped_, etc) are accessed from more than one thread without // a guard or lock. Internally there are also several Invoke()s that we could // remove since the upstream code may already be performing several operations // on the worker thread. - cricket::MediaSendChannelInterface* media_channel_ = nullptr; - rtc::scoped_refptr track_; + MediaSendChannelInterface* media_channel_ = nullptr; + scoped_refptr track_; - rtc::scoped_refptr dtls_transport_; - rtc::scoped_refptr frame_encryptor_; + scoped_refptr dtls_transport_; + scoped_refptr frame_encryptor_; // `last_transaction_id_` is used to verify that `SetParameters` is receiving // the parameters object that was last returned from `GetParameters`. // As such, it is used for internal verification and is not observable by the // the client. It is marked as mutable to enable `GetParameters` to be a // const method. - mutable absl::optional last_transaction_id_; + mutable std::optional last_transaction_id_; std::vector disabled_rids_; SetStreamsObserver* set_streams_observer_ = nullptr; + RtpSenderObserverInterface* observer_ = nullptr; + bool sent_first_packet_ = false; - rtc::scoped_refptr frame_transformer_; + scoped_refptr frame_transformer_; std::unique_ptr encoder_selector_; @@ -295,7 +301,7 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { // LocalAudioSinkAdapter receives data callback as a sink to the local // AudioTrack, and passes the data to the sink of AudioSource. class LocalAudioSinkAdapter : public AudioTrackSinkInterface, - public cricket::AudioSource { + public AudioSource { public: LocalAudioSinkAdapter(); virtual ~LocalAudioSinkAdapter(); @@ -307,7 +313,7 @@ class LocalAudioSinkAdapter : public AudioTrackSinkInterface, int sample_rate, size_t number_of_channels, size_t number_of_frames, - absl::optional absolute_capture_timestamp_ms) override; + std::optional absolute_capture_timestamp_ms) override; // AudioSinkInterface implementation. void OnData(const void* audio_data, @@ -317,16 +323,16 @@ class LocalAudioSinkAdapter : public AudioTrackSinkInterface, size_t number_of_frames) override { OnData(audio_data, bits_per_sample, sample_rate, number_of_channels, number_of_frames, - /*absolute_capture_timestamp_ms=*/absl::nullopt); + /*absolute_capture_timestamp_ms=*/std::nullopt); } // AudioSinkInterface implementation. int NumPreferredChannels() const override { return num_preferred_channels_; } - // cricket::AudioSource implementation. - void SetSink(cricket::AudioSource::Sink* sink) override; + // webrtc::AudioSource implementation. + void SetSink(AudioSource::Sink* sink) override; - cricket::AudioSource::Sink* sink_; + AudioSource::Sink* sink_; // Critical section protecting `sink_`. Mutex lock_; int num_preferred_channels_ = -1; @@ -341,8 +347,9 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { // If `set_streams_observer` is not null, it is invoked when SetStreams() // is called. `set_streams_observer` is not owned by this object. If not // null, it must be valid at least until this sender becomes stopped. - static rtc::scoped_refptr Create( - rtc::Thread* worker_thread, + static scoped_refptr Create( + const Environment& env, + Thread* worker_thread, const std::string& id, LegacyStatsCollectorInterface* stats, SetStreamsObserver* set_streams_observer); @@ -355,18 +362,19 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { // ObserverInterface implementation. void OnChanged() override; - cricket::MediaType media_type() const override { - return cricket::MEDIA_TYPE_AUDIO; + webrtc::MediaType media_type() const override { + return webrtc::MediaType::AUDIO; } std::string track_kind() const override { return MediaStreamTrackInterface::kAudioKind; } - rtc::scoped_refptr GetDtmfSender() const override; + scoped_refptr GetDtmfSender() const override; RTCError GenerateKeyFrame(const std::vector& rids) override; protected: - AudioRtpSender(rtc::Thread* worker_thread, + AudioRtpSender(const Environment& env, + Thread* worker_thread, const std::string& id, LegacyStatsCollectorInterface* legacy_stats, SetStreamsObserver* set_streams_observer); @@ -381,21 +389,21 @@ class AudioRtpSender : public DtmfProviderInterface, public RtpSenderBase { void RemoveTrackFromStats() override; private: - cricket::VoiceMediaSendChannelInterface* voice_media_channel() { + VoiceMediaSendChannelInterface* voice_media_channel() { return media_channel_->AsVoiceSendChannel(); } - rtc::scoped_refptr audio_track() const { - return rtc::scoped_refptr( + scoped_refptr audio_track() const { + return scoped_refptr( static_cast(track_.get())); } LegacyStatsCollectorInterface* legacy_stats_ = nullptr; - rtc::scoped_refptr dtmf_sender_; - rtc::scoped_refptr dtmf_sender_proxy_; + scoped_refptr dtmf_sender_; + scoped_refptr dtmf_sender_proxy_; bool cached_track_enabled_ = false; // Used to pass the data callback from the `track_` to the other end of - // cricket::AudioSource. + // webrtc::AudioSource. std::unique_ptr sink_adapter_; }; @@ -406,8 +414,9 @@ class VideoRtpSender : public RtpSenderBase { // If `set_streams_observer` is not null, it is invoked when SetStreams() // is called. `set_streams_observer` is not owned by this object. If not // null, it must be valid at least until this sender becomes stopped. - static rtc::scoped_refptr Create( - rtc::Thread* worker_thread, + static scoped_refptr Create( + const Environment& env, + Thread* worker_thread, const std::string& id, SetStreamsObserver* set_streams_observer); virtual ~VideoRtpSender(); @@ -415,18 +424,19 @@ class VideoRtpSender : public RtpSenderBase { // ObserverInterface implementation void OnChanged() override; - cricket::MediaType media_type() const override { - return cricket::MEDIA_TYPE_VIDEO; + webrtc::MediaType media_type() const override { + return webrtc::MediaType::VIDEO; } std::string track_kind() const override { return MediaStreamTrackInterface::kVideoKind; } - rtc::scoped_refptr GetDtmfSender() const override; + scoped_refptr GetDtmfSender() const override; RTCError GenerateKeyFrame(const std::vector& rids) override; protected: - VideoRtpSender(rtc::Thread* worker_thread, + VideoRtpSender(const Environment& env, + Thread* worker_thread, const std::string& id, SetStreamsObserver* set_streams_observer); @@ -437,11 +447,11 @@ class VideoRtpSender : public RtpSenderBase { void AttachTrack() override; private: - cricket::VideoMediaSendChannelInterface* video_media_channel() { + VideoMediaSendChannelInterface* video_media_channel() { return media_channel_->AsVideoSendChannel(); } - rtc::scoped_refptr video_track() const { - return rtc::scoped_refptr( + scoped_refptr video_track() const { + return scoped_refptr( static_cast(track_.get())); } diff --git a/pc/rtp_sender_proxy.h b/pc/rtp_sender_proxy.h index 39862eb133..ec2f8a9053 100644 --- a/pc/rtp_sender_proxy.h +++ b/pc/rtp_sender_proxy.h @@ -26,10 +26,10 @@ namespace webrtc { BEGIN_PRIMARY_PROXY_MAP(RtpSender) PROXY_PRIMARY_THREAD_DESTRUCTOR() PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*) -PROXY_CONSTMETHOD0(rtc::scoped_refptr, track) -PROXY_CONSTMETHOD0(rtc::scoped_refptr, dtls_transport) +PROXY_CONSTMETHOD0(scoped_refptr, track) +PROXY_CONSTMETHOD0(scoped_refptr, dtls_transport) PROXY_CONSTMETHOD0(uint32_t, ssrc) -BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type) +BYPASS_PROXY_CONSTMETHOD0(webrtc::MediaType, media_type) BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_CONSTMETHOD0(std::vector, stream_ids) PROXY_CONSTMETHOD0(std::vector, init_send_encodings) @@ -39,16 +39,14 @@ PROXY_METHOD2(void, SetParametersAsync, const RtpParameters&, SetParametersCallback) -PROXY_CONSTMETHOD0(rtc::scoped_refptr, GetDtmfSender) -PROXY_METHOD1(void, - SetFrameEncryptor, - rtc::scoped_refptr) -PROXY_CONSTMETHOD0(rtc::scoped_refptr, - GetFrameEncryptor) +PROXY_CONSTMETHOD0(scoped_refptr, GetDtmfSender) +PROXY_METHOD1(void, SetFrameEncryptor, scoped_refptr) +PROXY_METHOD1(void, SetObserver, RtpSenderObserverInterface*) +PROXY_CONSTMETHOD0(scoped_refptr, GetFrameEncryptor) PROXY_METHOD1(void, SetStreams, const std::vector&) PROXY_METHOD1(void, - SetEncoderToPacketizerFrameTransformer, - rtc::scoped_refptr) + SetFrameTransformer, + scoped_refptr) PROXY_METHOD1(void, SetEncoderSelector, std::unique_ptr) diff --git a/pc/rtp_sender_receiver_unittest.cc b/pc/rtp_sender_receiver_unittest.cc index 3092e53c2d..4c5722bd64 100644 --- a/pc/rtp_sender_receiver_unittest.cc +++ b/pc/rtp_sender_receiver_unittest.cc @@ -13,26 +13,29 @@ #include #include #include +#include #include #include #include #include "absl/algorithm/container.h" -#include "absl/memory/memory.h" -#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_options.h" #include "api/crypto/crypto_options.h" #include "api/crypto/frame_decryptor_interface.h" #include "api/crypto/frame_encryptor_interface.h" #include "api/dtmf_sender_interface.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/make_ref_counted.h" #include "api/media_stream_interface.h" #include "api/rtc_error.h" -#include "api/rtc_event_log/rtc_event_log.h" #include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/scoped_refptr.h" #include "api/test/fake_frame_decryptor.h" #include "api/test/fake_frame_encryptor.h" +#include "api/test/rtc_error_matchers.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "api/video/video_bitrate_allocator_factory.h" #include "api/video/video_codec_constants.h" @@ -45,12 +48,11 @@ #include "media/base/stream_params.h" #include "media/base/test_utils.h" #include "media/engine/fake_webrtc_call.h" -#include "p2p/base/dtls_transport_internal.h" -#include "p2p/base/fake_dtls_transport.h" #include "p2p/base/p2p_constants.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "p2p/dtls/fake_dtls_transport.h" #include "pc/audio_rtp_receiver.h" #include "pc/audio_track.h" -#include "pc/channel.h" #include "pc/dtls_srtp_transport.h" #include "pc/local_audio_source.h" #include "pc/media_stream.h" @@ -60,19 +62,12 @@ #include "pc/video_rtp_receiver.h" #include "pc/video_track.h" #include "rtc_base/checks.h" -#include "rtc_base/gunit.h" #include "rtc_base/thread.h" +#include "rtc_base/unique_id_generator.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/run_loop.h" -#include "test/scoped_key_value_config.h" - -using ::testing::_; -using ::testing::ContainerEq; -using ::testing::Exactly; -using ::testing::InvokeWithoutArgs; -using ::testing::Return; -using RidList = std::vector; +#include "test/wait_until.h" namespace { @@ -85,7 +80,6 @@ static const uint32_t kAudioSsrc = 99; static const uint32_t kAudioSsrc2 = 101; static const uint32_t kVideoSsrcSimulcast = 102; static const uint32_t kVideoSimulcastLayerCount = 2; -static const int kDefaultTimeout = 10000; // 10 seconds. class MockSetStreamsObserver : public webrtc::RtpSenderBase::SetStreamsObserver { @@ -97,59 +91,61 @@ class MockSetStreamsObserver namespace webrtc { +using ::testing::ContainerEq; +using RidList = std::vector; + class RtpSenderReceiverTest : public ::testing::Test, public ::testing::WithParamInterface> { public: RtpSenderReceiverTest() - : network_thread_(rtc::Thread::Current()), - worker_thread_(rtc::Thread::Current()), + : network_thread_(Thread::Current()), + worker_thread_(Thread::Current()), video_bitrate_allocator_factory_( - webrtc::CreateBuiltinVideoBitrateAllocatorFactory()), + CreateBuiltinVideoBitrateAllocatorFactory()), // Create fake media engine/etc. so we can create channels to use to // test RtpSenders/RtpReceivers. - media_engine_(std::make_unique()), - fake_call_(worker_thread_, network_thread_), + media_engine_(std::make_unique()), + fake_call_(env_, worker_thread_, network_thread_), local_stream_(MediaStream::Create(kStreamId1)) { - rtp_dtls_transport_ = std::make_unique( - "fake_dtls_transport", cricket::ICE_CANDIDATE_COMPONENT_RTP); + rtp_dtls_transport_ = std::make_unique( + "fake_dtls_transport", ICE_CANDIDATE_COMPONENT_RTP); rtp_transport_ = CreateDtlsSrtpTransport(); // Create the channels, discard the result; we get them later. // Fake media channels are owned by the media engine. voice_media_send_channel_ = media_engine_->voice().CreateSendChannel( - &fake_call_, cricket::MediaConfig(), cricket::AudioOptions(), - webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); + &fake_call_, MediaConfig(), AudioOptions(), CryptoOptions(), + AudioCodecPairId::Create()); video_media_send_channel_ = media_engine_->video().CreateSendChannel( - &fake_call_, cricket::MediaConfig(), cricket::VideoOptions(), - webrtc::CryptoOptions(), video_bitrate_allocator_factory_.get()); + &fake_call_, MediaConfig(), VideoOptions(), CryptoOptions(), + video_bitrate_allocator_factory_.get()); voice_media_receive_channel_ = media_engine_->voice().CreateReceiveChannel( - &fake_call_, cricket::MediaConfig(), cricket::AudioOptions(), - webrtc::CryptoOptions(), webrtc::AudioCodecPairId::Create()); + &fake_call_, MediaConfig(), AudioOptions(), CryptoOptions(), + AudioCodecPairId::Create()); video_media_receive_channel_ = media_engine_->video().CreateReceiveChannel( - &fake_call_, cricket::MediaConfig(), cricket::VideoOptions(), - webrtc::CryptoOptions()); + &fake_call_, MediaConfig(), VideoOptions(), CryptoOptions()); // Create streams for predefined SSRCs. Streams need to exist in order // for the senders and receievers to apply parameters to them. // Normally these would be created by SetLocalDescription and // SetRemoteDescription. voice_media_send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kAudioSsrc)); + StreamParams::CreateLegacy(kAudioSsrc)); voice_media_receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(kAudioSsrc)); + StreamParams::CreateLegacy(kAudioSsrc)); voice_media_send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kAudioSsrc2)); + StreamParams::CreateLegacy(kAudioSsrc2)); voice_media_receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(kAudioSsrc2)); + StreamParams::CreateLegacy(kAudioSsrc2)); video_media_send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kVideoSsrc)); + StreamParams::CreateLegacy(kVideoSsrc)); video_media_receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(kVideoSsrc)); + StreamParams::CreateLegacy(kVideoSsrc)); video_media_send_channel_->AddSendStream( - cricket::StreamParams::CreateLegacy(kVideoSsrc2)); + StreamParams::CreateLegacy(kVideoSsrc2)); video_media_receive_channel_->AddRecvStream( - cricket::StreamParams::CreateLegacy(kVideoSsrc2)); + StreamParams::CreateLegacy(kVideoSsrc2)); } ~RtpSenderReceiverTest() { @@ -162,9 +158,9 @@ class RtpSenderReceiverTest audio_track_ = nullptr; } - std::unique_ptr CreateDtlsSrtpTransport() { - auto dtls_srtp_transport = std::make_unique( - /*rtcp_mux_required=*/true, field_trials_); + std::unique_ptr CreateDtlsSrtpTransport() { + auto dtls_srtp_transport = std::make_unique( + /*rtcp_mux_required=*/true, env_.field_trials()); dtls_srtp_transport->SetDtlsTransports(rtp_dtls_transport_.get(), /*rtcp_dtls_transport=*/nullptr); return dtls_srtp_transport; @@ -172,9 +168,9 @@ class RtpSenderReceiverTest // Needed to use DTMF sender. void AddDtmfCodec() { - cricket::AudioSenderParameter params; - const cricket::AudioCodec kTelephoneEventCodec = - cricket::CreateAudioCodec(106, "telephone-event", 8000, 1); + AudioSenderParameter params; + const Codec kTelephoneEventCodec = + CreateAudioCodec(106, "telephone-event", 8000, 1); params.codecs.push_back(kTelephoneEventCodec); voice_media_send_channel()->SetSenderParameters(params); } @@ -182,24 +178,22 @@ class RtpSenderReceiverTest void AddVideoTrack() { AddVideoTrack(false); } void AddVideoTrack(bool is_screencast) { - rtc::scoped_refptr source( + scoped_refptr source( FakeVideoTrackSource::Create(is_screencast)); - video_track_ = - VideoTrack::Create(kVideoTrackId, source, rtc::Thread::Current()); + video_track_ = VideoTrack::Create(kVideoTrackId, source, Thread::Current()); EXPECT_TRUE(local_stream_->AddTrack(video_track_)); } void CreateAudioRtpSender() { CreateAudioRtpSender(nullptr); } - void CreateAudioRtpSender( - const rtc::scoped_refptr& source) { + void CreateAudioRtpSender(const scoped_refptr& source) { audio_track_ = AudioTrack::Create(kAudioTrackId, source); EXPECT_TRUE(local_stream_->AddTrack(audio_track_)); std::unique_ptr set_streams_observer = std::make_unique(); - audio_rtp_sender_ = - AudioRtpSender::Create(worker_thread_, audio_track_->id(), nullptr, - set_streams_observer.get()); + audio_rtp_sender_ = AudioRtpSender::Create( + CreateEnvironment(), worker_thread_, audio_track_->id(), nullptr, + set_streams_observer.get()); ASSERT_TRUE(audio_rtp_sender_->SetTrack(audio_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); audio_rtp_sender_->SetStreams({local_stream_->id()}); @@ -209,8 +203,8 @@ class RtpSenderReceiverTest } void CreateAudioRtpSenderWithNoTrack() { - audio_rtp_sender_ = - AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr); + audio_rtp_sender_ = AudioRtpSender::Create( + CreateEnvironment(), worker_thread_, /*id=*/"", nullptr, nullptr); audio_rtp_sender_->SetMediaChannel(voice_media_send_channel_.get()); } @@ -220,16 +214,16 @@ class RtpSenderReceiverTest void CreateVideoRtpSender() { CreateVideoRtpSender(false); } - cricket::StreamParams CreateSimulcastStreamParams(int num_layers) { + StreamParams CreateSimulcastStreamParams(int num_layers) { std::vector ssrcs; ssrcs.reserve(num_layers); for (int i = 0; i < num_layers; ++i) { ssrcs.push_back(kVideoSsrcSimulcast + i); } - return cricket::CreateSimStreamParams("cname", ssrcs); + return CreateSimStreamParams("cname", ssrcs); } - uint32_t CreateVideoRtpSender(const cricket::StreamParams& stream_params) { + uint32_t CreateVideoRtpSender(const StreamParams& stream_params) { video_media_send_channel_->AddSendStream(stream_params); uint32_t primary_ssrc = stream_params.first_ssrc(); CreateVideoRtpSender(primary_ssrc); @@ -243,13 +237,12 @@ class RtpSenderReceiverTest uint32_t CreateVideoRtpSenderWithSimulcast( const std::vector& rids) { - cricket::StreamParams stream_params = - CreateSimulcastStreamParams(rids.size()); - std::vector rid_descriptions; - absl::c_transform( - rids, std::back_inserter(rid_descriptions), [](const std::string& rid) { - return cricket::RidDescription(rid, cricket::RidDirection::kSend); - }); + StreamParams stream_params = CreateSimulcastStreamParams(rids.size()); + std::vector rid_descriptions; + absl::c_transform(rids, std::back_inserter(rid_descriptions), + [](const std::string& rid) { + return RidDescription(rid, RidDirection::kSend); + }); stream_params.set_rids(rid_descriptions); return CreateVideoRtpSender(stream_params); } @@ -258,8 +251,9 @@ class RtpSenderReceiverTest AddVideoTrack(is_screencast); std::unique_ptr set_streams_observer = std::make_unique(); - video_rtp_sender_ = VideoRtpSender::Create( - worker_thread_, video_track_->id(), set_streams_observer.get()); + video_rtp_sender_ = + VideoRtpSender::Create(CreateEnvironment(), worker_thread_, + video_track_->id(), set_streams_observer.get()); ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); video_rtp_sender_->SetStreams({local_stream_->id()}); @@ -268,8 +262,8 @@ class RtpSenderReceiverTest VerifyVideoChannelInput(ssrc); } void CreateVideoRtpSenderWithNoTrack() { - video_rtp_sender_ = - VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr); + video_rtp_sender_ = VideoRtpSender::Create( + CreateEnvironment(), worker_thread_, /*id=*/"", nullptr); video_rtp_sender_->SetMediaChannel(video_media_send_channel()); } @@ -284,9 +278,9 @@ class RtpSenderReceiverTest } void CreateAudioRtpReceiver( - std::vector> streams = {}) { - audio_rtp_receiver_ = rtc::make_ref_counted( - rtc::Thread::Current(), kAudioTrackId, streams, + std::vector> streams = {}) { + audio_rtp_receiver_ = make_ref_counted( + Thread::Current(), kAudioTrackId, streams, /*is_unified_plan=*/true); audio_rtp_receiver_->SetMediaChannel(voice_media_receive_channel()); audio_rtp_receiver_->SetupMediaChannel(kAudioSsrc); @@ -295,9 +289,9 @@ class RtpSenderReceiverTest } void CreateVideoRtpReceiver( - std::vector> streams = {}) { - video_rtp_receiver_ = rtc::make_ref_counted( - rtc::Thread::Current(), kVideoTrackId, streams); + std::vector> streams = {}) { + video_rtp_receiver_ = make_ref_counted( + Thread::Current(), kVideoTrackId, streams); video_rtp_receiver_->SetMediaChannel(video_media_receive_channel()); video_rtp_receiver_->SetupMediaChannel(kVideoSsrc); video_track_ = video_rtp_receiver_->video_track(); @@ -305,19 +299,18 @@ class RtpSenderReceiverTest } void CreateVideoRtpReceiverWithSimulcast( - std::vector> streams = {}, + std::vector> streams = {}, int num_layers = kVideoSimulcastLayerCount) { std::vector ssrcs; ssrcs.reserve(num_layers); for (int i = 0; i < num_layers; ++i) ssrcs.push_back(kVideoSsrcSimulcast + i); - cricket::StreamParams stream_params = - cricket::CreateSimStreamParams("cname", ssrcs); + StreamParams stream_params = CreateSimStreamParams("cname", ssrcs); video_media_receive_channel_->AddRecvStream(stream_params); uint32_t primary_ssrc = stream_params.first_ssrc(); - video_rtp_receiver_ = rtc::make_ref_counted( - rtc::Thread::Current(), kVideoTrackId, streams); + video_rtp_receiver_ = make_ref_counted( + Thread::Current(), kVideoTrackId, streams); video_rtp_receiver_->SetMediaChannel(video_media_receive_channel()); video_rtp_receiver_->SetupMediaChannel(primary_ssrc); video_track_ = video_rtp_receiver_->video_track(); @@ -429,7 +422,7 @@ class RtpSenderReceiverTest // This test assumes that some layers have already been disabled. void RunSetLastLayerAsInactiveTest(VideoRtpSender* sender) { auto parameters = sender->GetParameters(); - if (parameters.encodings.size() == 0) { + if (parameters.encodings.empty()) { return; } @@ -450,7 +443,8 @@ class RtpSenderReceiverTest void RunDisableSimulcastLayersWithoutMediaEngineTest( const std::vector& all_layers, const std::vector& disabled_layers) { - auto sender = VideoRtpSender::Create(rtc::Thread::Current(), "1", nullptr); + auto sender = VideoRtpSender::Create(CreateEnvironment(), Thread::Current(), + "1", nullptr); RtpParameters parameters; parameters.encodings.resize(all_layers.size()); for (size_t i = 0; i < all_layers.size(); ++i) { @@ -485,62 +479,59 @@ class RtpSenderReceiverTest // Check that minimum Jitter Buffer delay is propagated to the underlying // `media_channel`. void VerifyRtpReceiverDelayBehaviour( - cricket::MediaReceiveChannelInterface* media_channel, + MediaReceiveChannelInterface* media_channel, RtpReceiverInterface* receiver, uint32_t ssrc) { receiver->SetJitterBufferMinimumDelay(/*delay_seconds=*/0.5); - absl::optional delay_ms = + std::optional delay_ms = media_channel->GetBaseMinimumPlayoutDelayMs(ssrc); // In milliseconds. EXPECT_DOUBLE_EQ(0.5, delay_ms.value_or(0) / 1000.0); } protected: - cricket::FakeVideoMediaSendChannel* video_media_send_channel() { - return static_cast( + FakeVideoMediaSendChannel* video_media_send_channel() { + return static_cast( video_media_send_channel_.get()); } - cricket::FakeVoiceMediaSendChannel* voice_media_send_channel() { - return static_cast( + FakeVoiceMediaSendChannel* voice_media_send_channel() { + return static_cast( voice_media_send_channel_.get()); } - cricket::FakeVideoMediaReceiveChannel* video_media_receive_channel() { - return static_cast( + FakeVideoMediaReceiveChannel* video_media_receive_channel() { + return static_cast( video_media_receive_channel_.get()); } - cricket::FakeVoiceMediaReceiveChannel* voice_media_receive_channel() { - return static_cast( + FakeVoiceMediaReceiveChannel* voice_media_receive_channel() { + return static_cast( voice_media_receive_channel_.get()); } test::RunLoop run_loop_; - rtc::Thread* const network_thread_; - rtc::Thread* const worker_thread_; - webrtc::RtcEventLogNull event_log_; + Thread* const network_thread_; + Thread* const worker_thread_; + const Environment env_ = CreateEnvironment(); // The `rtp_dtls_transport_` and `rtp_transport_` should be destroyed after // the `channel_manager`. - std::unique_ptr rtp_dtls_transport_; - std::unique_ptr rtp_transport_; - std::unique_ptr + std::unique_ptr rtp_dtls_transport_; + std::unique_ptr rtp_transport_; + std::unique_ptr video_bitrate_allocator_factory_; - std::unique_ptr media_engine_; - rtc::UniqueRandomIdGenerator ssrc_generator_; - cricket::FakeCall fake_call_; - std::unique_ptr - voice_media_send_channel_; - std::unique_ptr - video_media_send_channel_; - std::unique_ptr + std::unique_ptr media_engine_; + UniqueRandomIdGenerator ssrc_generator_; + FakeCall fake_call_; + std::unique_ptr voice_media_send_channel_; + std::unique_ptr video_media_send_channel_; + std::unique_ptr voice_media_receive_channel_; - std::unique_ptr + std::unique_ptr video_media_receive_channel_; - rtc::scoped_refptr audio_rtp_sender_; - rtc::scoped_refptr video_rtp_sender_; - rtc::scoped_refptr audio_rtp_receiver_; - rtc::scoped_refptr video_rtp_receiver_; - rtc::scoped_refptr local_stream_; - rtc::scoped_refptr video_track_; - rtc::scoped_refptr audio_track_; - webrtc::test::ScopedKeyValueConfig field_trials_; + scoped_refptr audio_rtp_sender_; + scoped_refptr video_rtp_sender_; + scoped_refptr audio_rtp_receiver_; + scoped_refptr video_rtp_receiver_; + scoped_refptr local_stream_; + scoped_refptr video_track_; + scoped_refptr audio_track_; }; // Test that `voice_channel_` is updated when an audio track is associated @@ -583,7 +574,7 @@ TEST_F(RtpSenderReceiverTest, AddAndDestroyVideoRtpReceiverWithStreams) { // Test that the AudioRtpSender applies options from the local audio source. TEST_F(RtpSenderReceiverTest, LocalAudioSourceOptionsApplied) { - cricket::AudioOptions options; + AudioOptions options; options.echo_cancellation = true; auto source = LocalAudioSource::Create(&options); CreateAudioRtpSender(source); @@ -651,15 +642,13 @@ TEST_F(RtpSenderReceiverTest, LocalVideoTrackDisable) { TEST_F(RtpSenderReceiverTest, RemoteVideoTrackState) { CreateVideoRtpReceiver(); - EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, video_track_->state()); - EXPECT_EQ(webrtc::MediaSourceInterface::kLive, - video_track_->GetSource()->state()); + EXPECT_EQ(MediaStreamTrackInterface::kLive, video_track_->state()); + EXPECT_EQ(MediaSourceInterface::kLive, video_track_->GetSource()->state()); DestroyVideoRtpReceiver(); - EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, video_track_->state()); - EXPECT_EQ(webrtc::MediaSourceInterface::kEnded, - video_track_->GetSource()->state()); + EXPECT_EQ(MediaStreamTrackInterface::kEnded, video_track_->state()); + EXPECT_EQ(MediaSourceInterface::kEnded, video_track_->GetSource()->state()); DestroyVideoRtpReceiver(); } @@ -733,7 +722,7 @@ TEST_F(RtpSenderReceiverTest, VideoRtpReceiverDelay) { // doesn't have both a track and SSRC. TEST_F(RtpSenderReceiverTest, AudioSenderWithoutTrackAndSsrc) { CreateAudioRtpSenderWithNoTrack(); - rtc::scoped_refptr track = + scoped_refptr track = AudioTrack::Create(kAudioTrackId, nullptr); // Track but no SSRC. @@ -765,7 +754,7 @@ TEST_F(RtpSenderReceiverTest, VideoSenderWithoutTrackAndSsrc) { // has a track and SSRC, when the SSRC is set first. TEST_F(RtpSenderReceiverTest, AudioSenderEarlyWarmupSsrcThenTrack) { CreateAudioRtpSenderWithNoTrack(); - rtc::scoped_refptr track = + scoped_refptr track = AudioTrack::Create(kAudioTrackId, nullptr); audio_rtp_sender_->SetSsrc(kAudioSsrc); audio_rtp_sender_->SetTrack(track.get()); @@ -778,7 +767,7 @@ TEST_F(RtpSenderReceiverTest, AudioSenderEarlyWarmupSsrcThenTrack) { // has a track and SSRC, when the SSRC is set last. TEST_F(RtpSenderReceiverTest, AudioSenderEarlyWarmupTrackThenSsrc) { CreateAudioRtpSenderWithNoTrack(); - rtc::scoped_refptr track = + scoped_refptr track = AudioTrack::Create(kAudioTrackId, nullptr); audio_rtp_sender_->SetTrack(track.get()); audio_rtp_sender_->SetSsrc(kAudioSsrc); @@ -888,9 +877,9 @@ TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParametersAsync) { RtpParameters params = audio_rtp_sender_->GetParameters(); EXPECT_EQ(1u, params.encodings.size()); - absl::optional result; + std::optional result; audio_rtp_sender_->SetParametersAsync( - params, [&result](webrtc::RTCError error) { result = error; }); + params, [&result](RTCError error) { result = error; }); run_loop_.Flush(); EXPECT_TRUE(result->ok()); @@ -898,8 +887,8 @@ TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParametersAsync) { } TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParametersBeforeNegotiation) { - audio_rtp_sender_ = - AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr); + audio_rtp_sender_ = AudioRtpSender::Create( + CreateEnvironment(), worker_thread_, /*id=*/"", nullptr, nullptr); RtpParameters params = audio_rtp_sender_->GetParameters(); ASSERT_EQ(1u, params.encodings.size()); @@ -915,16 +904,16 @@ TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParametersBeforeNegotiation) { TEST_F(RtpSenderReceiverTest, AudioSenderCanSetParametersAsyncBeforeNegotiation) { - audio_rtp_sender_ = - AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr); + audio_rtp_sender_ = AudioRtpSender::Create( + CreateEnvironment(), worker_thread_, /*id=*/"", nullptr, nullptr); - absl::optional result; + std::optional result; RtpParameters params = audio_rtp_sender_->GetParameters(); ASSERT_EQ(1u, params.encodings.size()); params.encodings[0].max_bitrate_bps = 90000; audio_rtp_sender_->SetParametersAsync( - params, [&result](webrtc::RTCError error) { result = error; }); + params, [&result](RTCError error) { result = error; }); run_loop_.Flush(); EXPECT_TRUE(result->ok()); @@ -932,7 +921,7 @@ TEST_F(RtpSenderReceiverTest, EXPECT_EQ(params.encodings[0].max_bitrate_bps, 90000); audio_rtp_sender_->SetParametersAsync( - params, [&result](webrtc::RTCError error) { result = error; }); + params, [&result](RTCError error) { result = error; }); run_loop_.Flush(); EXPECT_TRUE(result->ok()); @@ -946,7 +935,8 @@ TEST_F(RtpSenderReceiverTest, AudioSenderInitParametersMovedAfterNegotiation) { std::unique_ptr set_streams_observer = std::make_unique(); audio_rtp_sender_ = AudioRtpSender::Create( - worker_thread_, audio_track_->id(), nullptr, set_streams_observer.get()); + CreateEnvironment(), worker_thread_, audio_track_->id(), nullptr, + set_streams_observer.get()); ASSERT_TRUE(audio_rtp_sender_->SetTrack(audio_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); audio_rtp_sender_->SetStreams({local_stream_->id()}); @@ -961,8 +951,7 @@ TEST_F(RtpSenderReceiverTest, AudioSenderInitParametersMovedAfterNegotiation) { // Simulate the setLocalDescription call std::vector ssrcs(1, 1); - cricket::StreamParams stream_params = - cricket::CreateSimStreamParams("cname", ssrcs); + StreamParams stream_params = CreateSimStreamParams("cname", ssrcs); voice_media_send_channel()->AddSendStream(stream_params); audio_rtp_sender_->SetMediaChannel( voice_media_send_channel()->AsVoiceSendChannel()); @@ -977,8 +966,8 @@ TEST_F(RtpSenderReceiverTest, AudioSenderInitParametersMovedAfterNegotiation) { TEST_F(RtpSenderReceiverTest, AudioSenderMustCallGetParametersBeforeSetParametersBeforeNegotiation) { - audio_rtp_sender_ = - AudioRtpSender::Create(worker_thread_, /*id=*/"", nullptr, nullptr); + audio_rtp_sender_ = AudioRtpSender::Create( + CreateEnvironment(), worker_thread_, /*id=*/"", nullptr, nullptr); RtpParameters params; RTCError result = audio_rtp_sender_->SetParameters(params); @@ -1016,13 +1005,13 @@ TEST_F(RtpSenderReceiverTest, RtpParameters params = audio_rtp_sender_->GetParameters(); EXPECT_EQ(1u, params.encodings.size()); - absl::optional result; + std::optional result; audio_rtp_sender_->SetParametersAsync( - params, [&result](webrtc::RTCError error) { result = error; }); + params, [&result](RTCError error) { result = error; }); run_loop_.Flush(); EXPECT_TRUE(result->ok()); audio_rtp_sender_->SetParametersAsync( - params, [&result](webrtc::RTCError error) { result = error; }); + params, [&result](RTCError error) { result = error; }); run_loop_.Flush(); EXPECT_EQ(RTCErrorType::INVALID_STATE, result->type()); @@ -1081,7 +1070,7 @@ TEST_F(RtpSenderReceiverTest, SetAudioMaxSendBitrate) { CreateAudioRtpSender(); EXPECT_EQ(-1, voice_media_send_channel()->max_bps()); - webrtc::RtpParameters params = audio_rtp_sender_->GetParameters(); + RtpParameters params = audio_rtp_sender_->GetParameters(); EXPECT_EQ(1U, params.encodings.size()); EXPECT_FALSE(params.encodings[0].max_bitrate_bps); params.encodings[0].max_bitrate_bps = 1000; @@ -1106,10 +1095,9 @@ TEST_F(RtpSenderReceiverTest, SetAudioMaxSendBitrate) { TEST_F(RtpSenderReceiverTest, SetAudioBitratePriority) { CreateAudioRtpSender(); - webrtc::RtpParameters params = audio_rtp_sender_->GetParameters(); + RtpParameters params = audio_rtp_sender_->GetParameters(); EXPECT_EQ(1U, params.encodings.size()); - EXPECT_EQ(webrtc::kDefaultBitratePriority, - params.encodings[0].bitrate_priority); + EXPECT_EQ(kDefaultBitratePriority, params.encodings[0].bitrate_priority); double new_bitrate_priority = 2.0; params.encodings[0].bitrate_priority = new_bitrate_priority; EXPECT_TRUE(audio_rtp_sender_->SetParameters(params).ok()); @@ -1140,9 +1128,9 @@ TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersAsync) { RtpParameters params = video_rtp_sender_->GetParameters(); EXPECT_EQ(1u, params.encodings.size()); - absl::optional result; + std::optional result; video_rtp_sender_->SetParametersAsync( - params, [&result](webrtc::RTCError error) { result = error; }); + params, [&result](RTCError error) { result = error; }); run_loop_.Flush(); EXPECT_TRUE(result->ok()); @@ -1150,8 +1138,8 @@ TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersAsync) { } TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersBeforeNegotiation) { - video_rtp_sender_ = - VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr); + video_rtp_sender_ = VideoRtpSender::Create( + CreateEnvironment(), worker_thread_, /*id=*/"", nullptr); RtpParameters params = video_rtp_sender_->GetParameters(); ASSERT_EQ(1u, params.encodings.size()); @@ -1167,22 +1155,22 @@ TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersBeforeNegotiation) { TEST_F(RtpSenderReceiverTest, VideoSenderCanSetParametersAsyncBeforeNegotiation) { - video_rtp_sender_ = - VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr); + video_rtp_sender_ = VideoRtpSender::Create( + CreateEnvironment(), worker_thread_, /*id=*/"", nullptr); - absl::optional result; + std::optional result; RtpParameters params = video_rtp_sender_->GetParameters(); ASSERT_EQ(1u, params.encodings.size()); params.encodings[0].max_bitrate_bps = 90000; video_rtp_sender_->SetParametersAsync( - params, [&result](webrtc::RTCError error) { result = error; }); + params, [&result](RTCError error) { result = error; }); run_loop_.Flush(); EXPECT_TRUE(result->ok()); params = video_rtp_sender_->GetParameters(); EXPECT_EQ(params.encodings[0].max_bitrate_bps, 90000); video_rtp_sender_->SetParametersAsync( - params, [&result](webrtc::RTCError error) { result = error; }); + params, [&result](RTCError error) { result = error; }); run_loop_.Flush(); EXPECT_TRUE(result->ok()); @@ -1194,8 +1182,9 @@ TEST_F(RtpSenderReceiverTest, VideoSenderInitParametersMovedAfterNegotiation) { std::unique_ptr set_streams_observer = std::make_unique(); - video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, video_track_->id(), - set_streams_observer.get()); + video_rtp_sender_ = + VideoRtpSender::Create(CreateEnvironment(), worker_thread_, + video_track_->id(), set_streams_observer.get()); ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); video_rtp_sender_->SetStreams({local_stream_->id()}); @@ -1215,8 +1204,7 @@ TEST_F(RtpSenderReceiverTest, VideoSenderInitParametersMovedAfterNegotiation) { ssrcs.reserve(2); for (int i = 0; i < 2; ++i) ssrcs.push_back(kVideoSsrcSimulcast + i); - cricket::StreamParams stream_params = - cricket::CreateSimStreamParams("cname", ssrcs); + StreamParams stream_params = CreateSimStreamParams("cname", ssrcs); video_media_send_channel()->AddSendStream(stream_params); video_rtp_sender_->SetMediaChannel( video_media_send_channel()->AsVideoSendChannel()); @@ -1236,8 +1224,9 @@ TEST_F(RtpSenderReceiverTest, std::unique_ptr set_streams_observer = std::make_unique(); - video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, video_track_->id(), - set_streams_observer.get()); + video_rtp_sender_ = + VideoRtpSender::Create(CreateEnvironment(), worker_thread_, + video_track_->id(), set_streams_observer.get()); ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); video_rtp_sender_->SetStreams({local_stream_->id()}); @@ -1256,8 +1245,7 @@ TEST_F(RtpSenderReceiverTest, ssrcs.reserve(2); for (int i = 0; i < 2; ++i) ssrcs.push_back(kVideoSsrcSimulcast + i); - cricket::StreamParams stream_params = - cricket::CreateSimStreamParams("cname", ssrcs); + StreamParams stream_params = CreateSimStreamParams("cname", ssrcs); video_media_send_channel()->AddSendStream(stream_params); video_rtp_sender_->SetMediaChannel( video_media_send_channel()->AsVideoSendChannel()); @@ -1279,8 +1267,9 @@ TEST_F(RtpSenderReceiverDeathTest, std::unique_ptr set_streams_observer = std::make_unique(); - video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, video_track_->id(), - set_streams_observer.get()); + video_rtp_sender_ = + VideoRtpSender::Create(CreateEnvironment(), worker_thread_, + video_track_->id(), set_streams_observer.get()); ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); video_rtp_sender_->SetStreams({local_stream_->id()}); @@ -1300,8 +1289,7 @@ TEST_F(RtpSenderReceiverDeathTest, ssrcs.reserve(2); for (int i = 0; i < 2; ++i) ssrcs.push_back(kVideoSsrcSimulcast + i); - cricket::StreamParams stream_params = - cricket::StreamParams::CreateLegacy(kVideoSsrc); + StreamParams stream_params = StreamParams::CreateLegacy(kVideoSsrc); video_media_send_channel()->AddSendStream(stream_params); video_rtp_sender_->SetMediaChannel( video_media_send_channel()->AsVideoSendChannel()); @@ -1311,8 +1299,8 @@ TEST_F(RtpSenderReceiverDeathTest, TEST_F(RtpSenderReceiverTest, VideoSenderMustCallGetParametersBeforeSetParametersBeforeNegotiation) { - video_rtp_sender_ = - VideoRtpSender::Create(worker_thread_, /*id=*/"", nullptr); + video_rtp_sender_ = VideoRtpSender::Create( + CreateEnvironment(), worker_thread_, /*id=*/"", nullptr); RtpParameters params; RTCError result = video_rtp_sender_->SetParameters(params); @@ -1350,13 +1338,13 @@ TEST_F(RtpSenderReceiverTest, RtpParameters params = video_rtp_sender_->GetParameters(); EXPECT_EQ(1u, params.encodings.size()); - absl::optional result; + std::optional result; video_rtp_sender_->SetParametersAsync( - params, [&result](webrtc::RTCError error) { result = error; }); + params, [&result](RTCError error) { result = error; }); run_loop_.Flush(); EXPECT_TRUE(result->ok()); video_rtp_sender_->SetParametersAsync( - params, [&result](webrtc::RTCError error) { result = error; }); + params, [&result](RTCError error) { result = error; }); run_loop_.Flush(); EXPECT_EQ(RTCErrorType::INVALID_STATE, result->type()); @@ -1453,7 +1441,7 @@ TEST_F(RtpSenderReceiverTest, VideoSenderDetectInvalidNumTemporalLayers) { CreateVideoRtpSender(); RtpParameters params = video_rtp_sender_->GetParameters(); - params.encodings[0].num_temporal_layers = webrtc::kMaxTemporalStreams + 1; + params.encodings[0].num_temporal_layers = kMaxTemporalStreams + 1; RTCError result = video_rtp_sender_->SetParameters(params); EXPECT_EQ(RTCErrorType::INVALID_RANGE, result.type()); @@ -1536,7 +1524,7 @@ TEST_F(RtpSenderReceiverTest, SetVideoMinMaxSendBitrate) { CreateVideoRtpSender(); EXPECT_EQ(-1, video_media_send_channel()->max_bps()); - webrtc::RtpParameters params = video_rtp_sender_->GetParameters(); + RtpParameters params = video_rtp_sender_->GetParameters(); EXPECT_EQ(1U, params.encodings.size()); EXPECT_FALSE(params.encodings[0].min_bitrate_bps); EXPECT_FALSE(params.encodings[0].max_bitrate_bps); @@ -1589,10 +1577,9 @@ TEST_F(RtpSenderReceiverTest, SetVideoMinMaxSendBitrateSimulcast) { TEST_F(RtpSenderReceiverTest, SetVideoBitratePriority) { CreateVideoRtpSender(); - webrtc::RtpParameters params = video_rtp_sender_->GetParameters(); + RtpParameters params = video_rtp_sender_->GetParameters(); EXPECT_EQ(1U, params.encodings.size()); - EXPECT_EQ(webrtc::kDefaultBitratePriority, - params.encodings[0].bitrate_priority); + EXPECT_EQ(kDefaultBitratePriority, params.encodings[0].bitrate_priority); double new_bitrate_priority = 2.0; params.encodings[0].bitrate_priority = new_bitrate_priority; EXPECT_TRUE(video_rtp_sender_->SetParameters(params).ok()); @@ -1715,8 +1702,9 @@ TEST_F(RtpSenderReceiverTest, // Setting detailed overrides the default non-screencast mode. This should be // applied even if the track is set on construction. video_track_->set_content_hint(VideoTrackInterface::ContentHint::kDetailed); - video_rtp_sender_ = VideoRtpSender::Create(worker_thread_, video_track_->id(), - set_streams_observer.get()); + video_rtp_sender_ = + VideoRtpSender::Create(CreateEnvironment(), worker_thread_, + video_track_->id(), set_streams_observer.get()); ASSERT_TRUE(video_rtp_sender_->SetTrack(video_track_.get())); EXPECT_CALL(*set_streams_observer, OnSetStreams()); video_rtp_sender_->SetStreams({local_stream_->id()}); @@ -1725,7 +1713,7 @@ TEST_F(RtpSenderReceiverTest, video_track_->set_enabled(true); // Sender is not ready to send (no SSRC) so no option should have been set. - EXPECT_EQ(absl::nullopt, video_media_send_channel()->options().is_screencast); + EXPECT_EQ(std::nullopt, video_media_send_channel()->options().is_screencast); // Verify that the content hint is accounted for when video_rtp_sender_ does // get enabled. @@ -1781,8 +1769,11 @@ TEST_F(RtpSenderReceiverTest, InsertDtmf) { dtmf_sender->InsertDtmf("012", expected_duration, 100); // Verify - ASSERT_EQ_WAIT(3U, voice_media_send_channel()->dtmf_info_queue().size(), - kDefaultTimeout); + ASSERT_THAT( + WaitUntil( + [&] { return voice_media_send_channel()->dtmf_info_queue().size(); }, + ::testing::Eq(3U)), + IsRtcOk()); const uint32_t send_ssrc = voice_media_send_channel()->send_streams()[0].first_ssrc(); EXPECT_TRUE(CompareDtmfInfo(voice_media_send_channel()->dtmf_info_queue()[0], @@ -1796,7 +1787,7 @@ TEST_F(RtpSenderReceiverTest, InsertDtmf) { // Validate that the default FrameEncryptor setting is nullptr. TEST_F(RtpSenderReceiverTest, AudioSenderCanSetFrameEncryptor) { CreateAudioRtpSender(); - rtc::scoped_refptr fake_frame_encryptor( + scoped_refptr fake_frame_encryptor( new FakeFrameEncryptor()); EXPECT_EQ(nullptr, audio_rtp_sender_->GetFrameEncryptor()); audio_rtp_sender_->SetFrameEncryptor(fake_frame_encryptor); @@ -1808,7 +1799,7 @@ TEST_F(RtpSenderReceiverTest, AudioSenderCanSetFrameEncryptor) { // nothing. TEST_F(RtpSenderReceiverTest, AudioSenderCannotSetFrameEncryptorAfterStop) { CreateAudioRtpSender(); - rtc::scoped_refptr fake_frame_encryptor( + scoped_refptr fake_frame_encryptor( new FakeFrameEncryptor()); EXPECT_EQ(nullptr, audio_rtp_sender_->GetFrameEncryptor()); audio_rtp_sender_->Stop(); @@ -1819,8 +1810,8 @@ TEST_F(RtpSenderReceiverTest, AudioSenderCannotSetFrameEncryptorAfterStop) { // Validate that the default FrameEncryptor setting is nullptr. TEST_F(RtpSenderReceiverTest, AudioReceiverCanSetFrameDecryptor) { CreateAudioRtpReceiver(); - rtc::scoped_refptr fake_frame_decryptor( - rtc::make_ref_counted()); + scoped_refptr fake_frame_decryptor( + make_ref_counted()); EXPECT_EQ(nullptr, audio_rtp_receiver_->GetFrameDecryptor()); audio_rtp_receiver_->SetFrameDecryptor(fake_frame_decryptor); EXPECT_EQ(fake_frame_decryptor.get(), @@ -1831,8 +1822,8 @@ TEST_F(RtpSenderReceiverTest, AudioReceiverCanSetFrameDecryptor) { // Validate that the default FrameEncryptor setting is nullptr. TEST_F(RtpSenderReceiverTest, AudioReceiverCannotSetFrameDecryptorAfterStop) { CreateAudioRtpReceiver(); - rtc::scoped_refptr fake_frame_decryptor( - rtc::make_ref_counted()); + scoped_refptr fake_frame_decryptor( + make_ref_counted()); EXPECT_EQ(nullptr, audio_rtp_receiver_->GetFrameDecryptor()); audio_rtp_receiver_->SetMediaChannel(nullptr); audio_rtp_receiver_->SetFrameDecryptor(fake_frame_decryptor); @@ -1843,7 +1834,7 @@ TEST_F(RtpSenderReceiverTest, AudioReceiverCannotSetFrameDecryptorAfterStop) { // Validate that the default FrameEncryptor setting is nullptr. TEST_F(RtpSenderReceiverTest, VideoSenderCanSetFrameEncryptor) { CreateVideoRtpSender(); - rtc::scoped_refptr fake_frame_encryptor( + scoped_refptr fake_frame_encryptor( new FakeFrameEncryptor()); EXPECT_EQ(nullptr, video_rtp_sender_->GetFrameEncryptor()); video_rtp_sender_->SetFrameEncryptor(fake_frame_encryptor); @@ -1855,7 +1846,7 @@ TEST_F(RtpSenderReceiverTest, VideoSenderCanSetFrameEncryptor) { // nothing. TEST_F(RtpSenderReceiverTest, VideoSenderCannotSetFrameEncryptorAfterStop) { CreateVideoRtpSender(); - rtc::scoped_refptr fake_frame_encryptor( + scoped_refptr fake_frame_encryptor( new FakeFrameEncryptor()); EXPECT_EQ(nullptr, video_rtp_sender_->GetFrameEncryptor()); video_rtp_sender_->Stop(); @@ -1866,8 +1857,8 @@ TEST_F(RtpSenderReceiverTest, VideoSenderCannotSetFrameEncryptorAfterStop) { // Validate that the default FrameEncryptor setting is nullptr. TEST_F(RtpSenderReceiverTest, VideoReceiverCanSetFrameDecryptor) { CreateVideoRtpReceiver(); - rtc::scoped_refptr fake_frame_decryptor( - rtc::make_ref_counted()); + scoped_refptr fake_frame_decryptor( + make_ref_counted()); EXPECT_EQ(nullptr, video_rtp_receiver_->GetFrameDecryptor()); video_rtp_receiver_->SetFrameDecryptor(fake_frame_decryptor); EXPECT_EQ(fake_frame_decryptor.get(), @@ -1878,8 +1869,8 @@ TEST_F(RtpSenderReceiverTest, VideoReceiverCanSetFrameDecryptor) { // Validate that the default FrameEncryptor setting is nullptr. TEST_F(RtpSenderReceiverTest, VideoReceiverCannotSetFrameDecryptorAfterStop) { CreateVideoRtpReceiver(); - rtc::scoped_refptr fake_frame_decryptor( - rtc::make_ref_counted()); + scoped_refptr fake_frame_decryptor( + make_ref_counted()); EXPECT_EQ(nullptr, video_rtp_receiver_->GetFrameDecryptor()); video_rtp_receiver_->SetMediaChannel(nullptr); video_rtp_receiver_->SetFrameDecryptor(fake_frame_decryptor); @@ -1905,8 +1896,8 @@ TEST_F(RtpSenderReceiverTest, // Checks that the senders SetStreams eliminates duplicate stream ids. TEST_F(RtpSenderReceiverTest, SenderSetStreamsEliminatesDuplicateIds) { AddVideoTrack(); - video_rtp_sender_ = - VideoRtpSender::Create(worker_thread_, video_track_->id(), nullptr); + video_rtp_sender_ = VideoRtpSender::Create( + CreateEnvironment(), worker_thread_, video_track_->id(), nullptr); video_rtp_sender_->SetStreams({"1", "2", "1"}); EXPECT_EQ(video_rtp_sender_->stream_ids().size(), 2u); } diff --git a/pc/rtp_transceiver.cc b/pc/rtp_transceiver.cc index 815ec9dece..6dae6e6146 100644 --- a/pc/rtp_transceiver.cc +++ b/pc/rtp_transceiver.cc @@ -12,25 +12,50 @@ #include +#include +#include #include +#include +#include #include #include #include #include #include "absl/algorithm/container.h" -#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" #include "api/audio_codecs/audio_codec_pair_id.h" -#include "api/field_trials_view.h" +#include "api/audio_options.h" +#include "api/crypto/crypto_options.h" +#include "api/jsep.h" +#include "api/media_types.h" +#include "api/rtc_error.h" #include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_direction.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "api/video_codecs/scalability_mode.h" #include "media/base/codec.h" +#include "media/base/codec_comparators.h" #include "media/base/media_channel.h" -#include "media/base/media_channel_impl.h" -#include "media/base/media_constants.h" +#include "media/base/media_config.h" #include "media/base/media_engine.h" #include "pc/channel.h" +#include "pc/channel_interface.h" +#include "pc/codec_vendor.h" +#include "pc/connection_context.h" #include "pc/rtp_media_utils.h" +#include "pc/rtp_receiver.h" +#include "pc/rtp_receiver_proxy.h" +#include "pc/rtp_sender.h" +#include "pc/rtp_sender_proxy.h" +#include "pc/rtp_transport_internal.h" #include "pc/session_description.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -39,144 +64,124 @@ namespace webrtc { namespace { -RTCError VerifyCodecPreferences( - const std::vector& codecs, - const std::vector& send_codecs, - const std::vector& recv_codecs) { - // If the intersection between codecs and - // RTCRtpSender.getCapabilities(kind).codecs or the intersection between - // codecs and RTCRtpReceiver.getCapabilities(kind).codecs only contains RTX, - // RED or FEC codecs or is an empty set, throw InvalidModificationError. - // This ensures that we always have something to offer, regardless of - // transceiver.direction. - - if (!absl::c_any_of(codecs, [&recv_codecs](const RtpCodecCapability& codec) { - return codec.name != cricket::kRtxCodecName && - codec.name != cricket::kRedCodecName && - codec.name != cricket::kFlexfecCodecName && - absl::c_any_of(recv_codecs, - [&codec](const cricket::Codec& recv_codec) { - return recv_codec.MatchesRtpCodec(codec); - }); - })) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, - "Invalid codec preferences: Missing codec from recv " - "codec capabilities."); - } +bool HasAnyMediaCodec(const std::vector& codecs) { + return absl::c_any_of(codecs, [](const RtpCodecCapability& codec) { + return codec.IsMediaCodec(); + }); +} - if (!absl::c_any_of(codecs, [&send_codecs](const RtpCodecCapability& codec) { - return codec.name != cricket::kRtxCodecName && - codec.name != cricket::kRedCodecName && - codec.name != cricket::kFlexfecCodecName && - absl::c_any_of(send_codecs, - [&codec](const cricket::Codec& send_codec) { - return send_codec.MatchesRtpCodec(codec); +RTCError VerifyCodecPreferences(const std::vector& codecs, + const std::vector& send_codecs, + const std::vector& recv_codecs) { + // `codec_capabilities` is the union of `send_codecs` and `recv_codecs`. + std::vector codec_capabilities; + codec_capabilities.reserve(send_codecs.size() + recv_codecs.size()); + codec_capabilities.insert(codec_capabilities.end(), send_codecs.begin(), + send_codecs.end()); + codec_capabilities.insert(codec_capabilities.end(), recv_codecs.begin(), + recv_codecs.end()); + // If a media codec is not recognized from `codec_capabilities`, throw + // InvalidModificationError. + if (!absl::c_all_of(codecs, [&codec_capabilities]( + const RtpCodecCapability& codec) { + return !codec.IsMediaCodec() || + absl::c_any_of(codec_capabilities, + [&codec](const Codec& codec_capability) { + return IsSameRtpCodec(codec_capability, codec); }); })) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION, - "Invalid codec preferences: Missing codec from send " - "codec capabilities."); - } - - // Let codecCapabilities be the union of - // RTCRtpSender.getCapabilities(kind).codecs and - // RTCRtpReceiver.getCapabilities(kind).codecs. For each codec in codecs, If - // codec is not in codecCapabilities, throw InvalidModificationError. - for (const auto& codec_preference : codecs) { - bool is_recv_codec = absl::c_any_of( - recv_codecs, [&codec_preference](const cricket::Codec& codec) { - return codec.MatchesRtpCodec(codec_preference); - }); - - bool is_send_codec = absl::c_any_of( - send_codecs, [&codec_preference](const cricket::Codec& codec) { - return codec.MatchesRtpCodec(codec_preference); - }); - - if (!is_recv_codec && !is_send_codec) { - LOG_AND_RETURN_ERROR( - RTCErrorType::INVALID_MODIFICATION, - std::string("Invalid codec preferences: invalid codec with name \"") + - codec_preference.name + "\"."); - } + "Invalid codec preferences: Missing codec from codec " + "capabilities."); } - - // Check we have a real codec (not just rtx, red or fec) - if (absl::c_all_of(codecs, [](const RtpCodecCapability& codec) { - return codec.name == cricket::kRtxCodecName || - codec.name == cricket::kRedCodecName || - codec.name == cricket::kUlpfecCodecName; - })) { + // If `codecs` only contains entries for RTX, RED, FEC or Comfort Noise, throw + // InvalidModificationError. + if (!HasAnyMediaCodec(codecs)) { LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_MODIFICATION, "Invalid codec preferences: codec list must have a non " - "RTX, RED or FEC entry."); + "RTX, RED, FEC or Comfort Noise entry."); } - return RTCError::OK(); } -// Matches the list of codecs as capabilities (potentially without SVC related -// information) to the list of send codecs and returns the list of codecs with -// all the SVC related information. -std::vector MatchCodecPreferences( - const std::vector& codecs, - const std::vector& send_codecs) { - std::vector result; - - for (const auto& codec_preference : codecs) { - for (const cricket::VideoCodec& send_codec : send_codecs) { - if (send_codec.MatchesRtpCodec(codec_preference)) { - result.push_back(send_codec); - } - } - } - - return result; -} - TaskQueueBase* GetCurrentTaskQueueOrThread() { TaskQueueBase* current = TaskQueueBase::Current(); if (!current) - current = rtc::ThreadManager::Instance()->CurrentThread(); + current = ThreadManager::Instance()->CurrentThread(); return current; } } // namespace -RtpTransceiver::RtpTransceiver(cricket::MediaType media_type, - ConnectionContext* context) +RtpTransceiver::RtpTransceiver(webrtc::MediaType media_type, + ConnectionContext* context, + CodecLookupHelper* codec_lookup_helper) : thread_(GetCurrentTaskQueueOrThread()), unified_plan_(false), media_type_(media_type), - context_(context) { - RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO); + context_(context), + codec_lookup_helper_(codec_lookup_helper) { + RTC_DCHECK(media_type == webrtc::MediaType::AUDIO || + media_type == webrtc::MediaType::VIDEO); + RTC_DCHECK(context_); + RTC_DCHECK(codec_lookup_helper_); } RtpTransceiver::RtpTransceiver( - rtc::scoped_refptr> sender, - rtc::scoped_refptr> - receiver, + scoped_refptr> sender, + scoped_refptr> receiver, ConnectionContext* context, + CodecLookupHelper* codec_lookup_helper, std::vector header_extensions_to_negotiate, std::function on_negotiation_needed) : thread_(GetCurrentTaskQueueOrThread()), unified_plan_(true), media_type_(sender->media_type()), context_(context), + codec_lookup_helper_(codec_lookup_helper), header_extensions_to_negotiate_( std::move(header_extensions_to_negotiate)), on_negotiation_needed_(std::move(on_negotiation_needed)) { - RTC_DCHECK(media_type_ == cricket::MEDIA_TYPE_AUDIO || - media_type_ == cricket::MEDIA_TYPE_VIDEO); + RTC_DCHECK(context_); + RTC_DCHECK(media_type_ == webrtc::MediaType::AUDIO || + media_type_ == webrtc::MediaType::VIDEO); RTC_DCHECK_EQ(sender->media_type(), receiver->media_type()); - sender->internal()->SetCodecPreferences( - sender->media_type() == cricket::MEDIA_TYPE_VIDEO - ? media_engine()->video().send_codecs(false) - : media_engine()->voice().send_codecs()); + sender->internal()->SetSendCodecs( + sender->media_type() == webrtc::MediaType::VIDEO + ? codec_vendor().video_send_codecs().codecs() + : codec_vendor().audio_send_codecs().codecs()); senders_.push_back(sender); receivers_.push_back(receiver); + + // Set default header extensions depending on whether simulcast/SVC is used. + RtpParameters parameters = sender->internal()->GetParametersInternal(); + bool uses_simulcast = parameters.encodings.size() > 1; + bool uses_svc = !parameters.encodings.empty() && + parameters.encodings[0].scalability_mode.has_value() && + parameters.encodings[0].scalability_mode != + ScalabilityModeToString(ScalabilityMode::kL1T1); + if (uses_simulcast || uses_svc) { + // Enable DD and VLA extensions, can be deactivated by the API. + // Skip this if the GFD extension was enabled via field trial + // for backward compability reasons. + bool uses_gfd = + absl::c_find_if( + header_extensions_to_negotiate_, + [](const RtpHeaderExtensionCapability& ext) { + return ext.uri == RtpExtension::kGenericFrameDescriptorUri00 && + ext.direction != webrtc::RtpTransceiverDirection::kStopped; + }) != header_extensions_to_negotiate_.end(); + if (!uses_gfd) { + for (RtpHeaderExtensionCapability& ext : + header_extensions_to_negotiate_) { + if (ext.uri == RtpExtension::kVideoLayersAllocationUri || + ext.uri == RtpExtension::kDependencyDescriptorUri) { + ext.direction = RtpTransceiverDirection::kSendRecv; + } + } + } + } } RtpTransceiver::~RtpTransceiver() { @@ -194,26 +199,28 @@ RtpTransceiver::~RtpTransceiver() { RTCError RtpTransceiver::CreateChannel( absl::string_view mid, Call* call_ptr, - const cricket::MediaConfig& media_config, + const MediaConfig& media_config, bool srtp_required, CryptoOptions crypto_options, - const cricket::AudioOptions& audio_options, - const cricket::VideoOptions& video_options, + const AudioOptions& audio_options, + const VideoOptions& video_options, VideoBitrateAllocatorFactory* video_bitrate_allocator_factory, std::function transport_lookup) { RTC_DCHECK_RUN_ON(thread_); + RTC_DCHECK(!channel()); + if (!media_engine()) { // TODO(hta): Must be a better way return RTCError(RTCErrorType::INTERNAL_ERROR, "No media engine for mid=" + std::string(mid)); } - std::unique_ptr new_channel; - if (media_type() == cricket::MEDIA_TYPE_AUDIO) { + + std::unique_ptr new_channel; + if (media_type() == webrtc::MediaType::AUDIO) { // TODO(bugs.webrtc.org/11992): CreateVideoChannel internally switches to // the worker thread. We shouldn't be using the `call_ptr_` hack here but // simply be on the worker thread and use `call_` (update upstream code). RTC_DCHECK(call_ptr); - RTC_DCHECK(media_engine()); // TODO(bugs.webrtc.org/11992): Remove this workaround after updates in // PeerConnection and add the expectation that we're already on the right // thread. @@ -222,21 +229,14 @@ RTCError RtpTransceiver::CreateChannel( AudioCodecPairId codec_pair_id = AudioCodecPairId::Create(); - std::unique_ptr - media_send_channel = media_engine()->voice().CreateSendChannel( + std::unique_ptr media_send_channel = + media_engine()->voice().CreateSendChannel( call_ptr, media_config, audio_options, crypto_options, codec_pair_id); - if (!media_send_channel) { - // TODO(bugs.webrtc.org/14912): Consider CHECK or reporting failure - return; - } - std::unique_ptr - media_receive_channel = media_engine()->voice().CreateReceiveChannel( + std::unique_ptr media_receive_channel = + media_engine()->voice().CreateReceiveChannel( call_ptr, media_config, audio_options, crypto_options, codec_pair_id); - if (!media_receive_channel) { - return; - } // Note that this is safe because both sending and // receiving channels will be deleted at the same time. media_send_channel->SetSsrcListChangedCallback( @@ -245,14 +245,14 @@ RTCError RtpTransceiver::CreateChannel( receive_channel->ChooseReceiverReportSsrc(choices); }); - new_channel = std::make_unique( + new_channel = std::make_unique( context()->worker_thread(), context()->network_thread(), context()->signaling_thread(), std::move(media_send_channel), std::move(media_receive_channel), mid, srtp_required, crypto_options, context()->ssrc_generator()); }); } else { - RTC_DCHECK_EQ(cricket::MEDIA_TYPE_VIDEO, media_type()); + RTC_DCHECK_EQ(webrtc::MediaType::VIDEO, media_type()); // TODO(bugs.webrtc.org/11992): CreateVideoChannel internally switches to // the worker thread. We shouldn't be using the `call_ptr_` hack here but @@ -260,20 +260,13 @@ RTCError RtpTransceiver::CreateChannel( context()->worker_thread()->BlockingCall([&] { RTC_DCHECK_RUN_ON(context()->worker_thread()); - std::unique_ptr - media_send_channel = media_engine()->video().CreateSendChannel( + std::unique_ptr media_send_channel = + media_engine()->video().CreateSendChannel( call_ptr, media_config, video_options, crypto_options, video_bitrate_allocator_factory); - if (!media_send_channel) { - return; - } - - std::unique_ptr - media_receive_channel = media_engine()->video().CreateReceiveChannel( + std::unique_ptr media_receive_channel = + media_engine()->video().CreateReceiveChannel( call_ptr, media_config, video_options, crypto_options); - if (!media_receive_channel) { - return; - } // Note that this is safe because both sending and // receiving channels will be deleted at the same time. media_send_channel->SetSsrcListChangedCallback( @@ -282,24 +275,19 @@ RTCError RtpTransceiver::CreateChannel( receive_channel->ChooseReceiverReportSsrc(choices); }); - new_channel = std::make_unique( + new_channel = std::make_unique( context()->worker_thread(), context()->network_thread(), context()->signaling_thread(), std::move(media_send_channel), std::move(media_receive_channel), mid, srtp_required, crypto_options, context()->ssrc_generator()); }); } - if (!new_channel) { - // TODO(hta): Must be a better way - return RTCError(RTCErrorType::INTERNAL_ERROR, - "Failed to create channel for mid=" + std::string(mid)); - } SetChannel(std::move(new_channel), transport_lookup); return RTCError::OK(); } void RtpTransceiver::SetChannel( - std::unique_ptr channel, + std::unique_ptr channel, std::function transport_lookup) { RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(channel); @@ -314,8 +302,7 @@ void RtpTransceiver::SetChannel( RTC_DCHECK_EQ(media_type(), channel->media_type()); signaling_thread_safety_ = PendingTaskSafetyFlag::Create(); - - std::unique_ptr channel_to_delete; + channel_ = std::move(channel); // An alternative to this, could be to require SetChannel to be called // on the network thread. The channel object operates for the most part @@ -327,22 +314,19 @@ void RtpTransceiver::SetChannel( // helps with keeping the channel implementation requirements being met and // avoids synchronization for accessing the pointer or network related state. context()->network_thread()->BlockingCall([&]() { - if (channel_) { - channel_->SetFirstPacketReceivedCallback(nullptr); - channel_->SetRtpTransport(nullptr); - channel_to_delete = std::move(channel_); - } - - channel_ = std::move(channel); - channel_->SetRtpTransport(transport_lookup(channel_->mid())); channel_->SetFirstPacketReceivedCallback( [thread = thread_, flag = signaling_thread_safety_, this]() mutable { thread->PostTask( SafeTask(std::move(flag), [this]() { OnFirstPacketReceived(); })); }); + channel_->SetFirstPacketSentCallback( + [thread = thread_, flag = signaling_thread_safety_, this]() mutable { + thread->PostTask( + SafeTask(std::move(flag), [this]() { OnFirstPacketSent(); })); + }); }); - PushNewMediaChannelAndDeleteChannel(nullptr); + PushNewMediaChannel(); RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2); } @@ -356,57 +340,61 @@ void RtpTransceiver::ClearChannel() { RTC_LOG_THREAD_BLOCK_COUNT(); - if (channel_) { - signaling_thread_safety_->SetNotAlive(); - signaling_thread_safety_ = nullptr; - } - std::unique_ptr channel_to_delete; + signaling_thread_safety_->SetNotAlive(); + signaling_thread_safety_ = nullptr; context()->network_thread()->BlockingCall([&]() { - if (channel_) { - channel_->SetFirstPacketReceivedCallback(nullptr); - channel_->SetRtpTransport(nullptr); - channel_to_delete = std::move(channel_); - } + channel_->SetFirstPacketReceivedCallback(nullptr); + channel_->SetFirstPacketSentCallback(nullptr); + channel_->SetRtpTransport(nullptr); }); RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); - PushNewMediaChannelAndDeleteChannel(std::move(channel_to_delete)); + DeleteChannel(); RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2); } -void RtpTransceiver::PushNewMediaChannelAndDeleteChannel( - std::unique_ptr channel_to_delete) { - // The clumsy combination of pushing down media channel and deleting - // the channel is due to the desire to do both things in one Invoke(). - if (!channel_to_delete && senders_.empty() && receivers_.empty()) { +void RtpTransceiver::PushNewMediaChannel() { + RTC_DCHECK(channel_); + if (senders_.empty() && receivers_.empty()) { return; } context()->worker_thread()->BlockingCall([&]() { - // Push down the new media_channel, if any, otherwise clear it. - auto* media_send_channel = - channel_ ? channel_->media_send_channel() : nullptr; + // Push down the new media_channel. + auto* media_send_channel = channel_->media_send_channel(); for (const auto& sender : senders_) { sender->internal()->SetMediaChannel(media_send_channel); } - auto* media_receive_channel = - channel_ ? channel_->media_receive_channel() : nullptr; + auto* media_receive_channel = channel_->media_receive_channel(); for (const auto& receiver : receivers_) { receiver->internal()->SetMediaChannel(media_receive_channel); } + }); +} - // Destroy the channel, if we had one, now _after_ updating the receivers - // who might have had references to the previous channel. - if (channel_to_delete) { - channel_to_delete.reset(nullptr); +void RtpTransceiver::DeleteChannel() { + RTC_DCHECK(channel_); + // Ensure that channel_ is not reachable via transceiver, but is deleted + // only after clearing the references in senders_ and receivers_. + context()->worker_thread()->BlockingCall([&]() { + auto channel_to_delete = std::move(channel_); + // Clear the media channel reference from senders and receivers. + for (const auto& sender : senders_) { + sender->internal()->SetMediaChannel(nullptr); } + for (const auto& receiver : receivers_) { + receiver->internal()->SetMediaChannel(nullptr); + } + // The channel is destroyed here, on the worker thread as it needs to + // be. + channel_to_delete.reset(); }); } void RtpTransceiver::AddSender( - rtc::scoped_refptr> sender) { + scoped_refptr> sender) { RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(!stopped_); RTC_DCHECK(!unified_plan_); @@ -414,14 +402,11 @@ void RtpTransceiver::AddSender( RTC_DCHECK_EQ(media_type(), sender->media_type()); RTC_DCHECK(!absl::c_linear_search(senders_, sender)); - std::vector send_codecs = - media_type() == cricket::MEDIA_TYPE_VIDEO - ? media_engine()->video().send_codecs(false) - : media_engine()->voice().send_codecs(); - sender->internal()->SetCodecPreferences( - codec_preferences_.empty() - ? send_codecs - : MatchCodecPreferences(codec_preferences_, send_codecs)); + std::vector send_codecs = + media_type() == webrtc::MediaType::VIDEO + ? codec_vendor().video_send_codecs().codecs() + : codec_vendor().audio_send_codecs().codecs(); + sender->internal()->SetSendCodecs(send_codecs); senders_.push_back(sender); } @@ -440,8 +425,7 @@ bool RtpTransceiver::RemoveSender(RtpSenderInterface* sender) { } void RtpTransceiver::AddReceiver( - rtc::scoped_refptr> - receiver) { + scoped_refptr> receiver) { RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(!stopped_); RTC_DCHECK(!unified_plan_); @@ -472,24 +456,23 @@ bool RtpTransceiver::RemoveReceiver(RtpReceiverInterface* receiver) { return true; } -rtc::scoped_refptr RtpTransceiver::sender_internal() const { +scoped_refptr RtpTransceiver::sender_internal() const { RTC_DCHECK(unified_plan_); RTC_CHECK_EQ(1u, senders_.size()); - return rtc::scoped_refptr(senders_[0]->internal()); + return scoped_refptr(senders_[0]->internal()); } -rtc::scoped_refptr RtpTransceiver::receiver_internal() - const { +scoped_refptr RtpTransceiver::receiver_internal() const { RTC_DCHECK(unified_plan_); RTC_CHECK_EQ(1u, receivers_.size()); - return rtc::scoped_refptr(receivers_[0]->internal()); + return scoped_refptr(receivers_[0]->internal()); } -cricket::MediaType RtpTransceiver::media_type() const { +webrtc::MediaType RtpTransceiver::media_type() const { return media_type_; } -absl::optional RtpTransceiver::mid() const { +std::optional RtpTransceiver::mid() const { return mid_; } @@ -499,13 +482,19 @@ void RtpTransceiver::OnFirstPacketReceived() { } } -rtc::scoped_refptr RtpTransceiver::sender() const { +void RtpTransceiver::OnFirstPacketSent() { + for (const auto& sender : senders_) { + sender->internal()->NotifyFirstPacketSent(); + } +} + +scoped_refptr RtpTransceiver::sender() const { RTC_DCHECK(unified_plan_); RTC_CHECK_EQ(1u, senders_.size()); return senders_[0]; } -rtc::scoped_refptr RtpTransceiver::receiver() const { +scoped_refptr RtpTransceiver::receiver() const { RTC_DCHECK(unified_plan_); RTC_CHECK_EQ(1u, receivers_.size()); return receivers_[0]; @@ -526,7 +515,7 @@ void RtpTransceiver::set_current_direction(RtpTransceiverDirection direction) { } void RtpTransceiver::set_fired_direction( - absl::optional direction) { + std::optional direction) { fired_direction_ = direction; } @@ -542,7 +531,7 @@ bool RtpTransceiver::stopping() const { RtpTransceiverDirection RtpTransceiver::direction() const { if (unified_plan_ && stopping()) - return webrtc::RtpTransceiverDirection::kStopped; + return RtpTransceiverDirection::kStopped; return direction_; } @@ -567,16 +556,15 @@ RTCError RtpTransceiver::SetDirectionWithError( return RTCError::OK(); } -absl::optional RtpTransceiver::current_direction() +std::optional RtpTransceiver::current_direction() const { if (unified_plan_ && stopped()) - return webrtc::RtpTransceiverDirection::kStopped; + return RtpTransceiverDirection::kStopped; return current_direction_; } -absl::optional RtpTransceiver::fired_direction() - const { +std::optional RtpTransceiver::fired_direction() const { return fired_direction_; } @@ -604,7 +592,7 @@ void RtpTransceiver::StopSendingAndReceiving() { }); stopping_ = true; - direction_ = webrtc::RtpTransceiverDirection::kInactive; + direction_ = RtpTransceiverDirection::kInactive; } RTCError RtpTransceiver::StopStandard() { @@ -660,49 +648,115 @@ void RtpTransceiver::StopTransceiverProcedure() { // 3. Set transceiver.[[Receptive]] to false. // 4. Set transceiver.[[CurrentDirection]] to null. - current_direction_ = absl::nullopt; + current_direction_ = std::nullopt; } RTCError RtpTransceiver::SetCodecPreferences( - rtc::ArrayView codec_capabilities) { + ArrayView codec_capabilities) { RTC_DCHECK(unified_plan_); // 3. If codecs is an empty list, set transceiver's [[PreferredCodecs]] slot // to codecs and abort these steps. if (codec_capabilities.empty()) { codec_preferences_.clear(); - senders_.front()->internal()->SetCodecPreferences( - media_type() == cricket::MEDIA_TYPE_VIDEO - ? media_engine()->video().send_codecs(false) - : media_engine()->voice().send_codecs()); + sendrecv_codec_preferences_.clear(); + sendonly_codec_preferences_.clear(); + recvonly_codec_preferences_.clear(); return RTCError::OK(); } - // 4. Remove any duplicate values in codecs. std::vector codecs; absl::c_remove_copy_if(codec_capabilities, std::back_inserter(codecs), [&codecs](const RtpCodecCapability& codec) { return absl::c_linear_search(codecs, codec); }); - - // 6. to 8. - RTCError result; - std::vector recv_codecs, send_codecs; - if (media_type_ == cricket::MEDIA_TYPE_AUDIO) { - send_codecs = media_engine()->voice().send_codecs(); - recv_codecs = media_engine()->voice().recv_codecs(); - } else if (media_type_ == cricket::MEDIA_TYPE_VIDEO) { - send_codecs = media_engine()->video().send_codecs(context()->use_rtx()); - recv_codecs = media_engine()->video().recv_codecs(context()->use_rtx()); + // TODO(https://crbug.com/webrtc/391530822): Move logic in + // MediaSessionDescriptionFactory to this level. + return UpdateCodecPreferencesCaches(codecs); +} + +RTCError RtpTransceiver::UpdateCodecPreferencesCaches( + const std::vector& codecs) { + // Get codec capabilities from media engine. + std::vector send_codecs, recv_codecs; + if (media_type_ == webrtc::MediaType::AUDIO) { + send_codecs = codec_vendor().audio_send_codecs().codecs(); + recv_codecs = codec_vendor().audio_recv_codecs().codecs(); + } else if (media_type_ == webrtc::MediaType::VIDEO) { + send_codecs = codec_vendor().video_send_codecs().codecs(); + recv_codecs = codec_vendor().video_recv_codecs().codecs(); } - result = VerifyCodecPreferences(codecs, send_codecs, recv_codecs); - - if (result.ok()) { - senders_.front()->internal()->SetCodecPreferences( - MatchCodecPreferences(codecs, send_codecs)); - codec_preferences_ = codecs; + RTCError error = VerifyCodecPreferences(codecs, send_codecs, recv_codecs); + if (!error.ok()) { + return error; + } + codec_preferences_ = codecs; + // Update the filtered views of `codec_preferences_` so that we don't have + // to query codec capabilities when calling filtered_codec_preferences() or + // every time the direction changes. + sendrecv_codec_preferences_.clear(); + sendonly_codec_preferences_.clear(); + recvonly_codec_preferences_.clear(); + for (const RtpCodecCapability& codec : codec_preferences_) { + if (!codec.IsMediaCodec()) { + // Non-media codecs don't need to be filtered at this level. + sendrecv_codec_preferences_.push_back(codec); + sendonly_codec_preferences_.push_back(codec); + recvonly_codec_preferences_.push_back(codec); + continue; + } + // Is this a send codec, receive codec or both? + bool is_send_codec = + absl::c_any_of(send_codecs, [&codec](const Codec& send_codec) { + return IsSameRtpCodecIgnoringLevel(send_codec, codec); + }); + bool is_recv_codec = + absl::c_any_of(recv_codecs, [&codec](const Codec& recv_codec) { + return IsSameRtpCodecIgnoringLevel(recv_codec, codec); + }); + // The codec being neither for sending or receving is not possible because + // of prior validation by VerifyCodecPreferences(). + RTC_CHECK(is_send_codec || is_recv_codec); + if (is_send_codec && is_recv_codec) { + sendrecv_codec_preferences_.push_back(codec); + } + if (is_send_codec) { + sendonly_codec_preferences_.push_back(codec); + } + if (is_recv_codec) { + recvonly_codec_preferences_.push_back(codec); + } + } + // If filtering results in an empty list this is the same as not having any + // preferences. + if (!HasAnyMediaCodec(sendrecv_codec_preferences_)) { + sendrecv_codec_preferences_.clear(); } + if (!HasAnyMediaCodec(sendonly_codec_preferences_)) { + sendonly_codec_preferences_.clear(); + } + if (!HasAnyMediaCodec(recvonly_codec_preferences_)) { + recvonly_codec_preferences_.clear(); + } + return RTCError::OK(); +} - return result; +std::vector RtpTransceiver::codec_preferences() const { + return codec_preferences_; +} + +std::vector RtpTransceiver::filtered_codec_preferences() + const { + switch (direction_) { + case RtpTransceiverDirection::kSendRecv: + case RtpTransceiverDirection::kInactive: + case RtpTransceiverDirection::kStopped: + return sendrecv_codec_preferences_; + case RtpTransceiverDirection::kSendOnly: + return sendonly_codec_preferences_; + case RtpTransceiverDirection::kRecvOnly: + return recvonly_codec_preferences_; + } + return codec_preferences_; } std::vector @@ -741,7 +795,7 @@ bool IsMandatoryHeaderExtension(const std::string& uri) { } RTCError RtpTransceiver::SetHeaderExtensionsToNegotiate( - rtc::ArrayView header_extensions) { + ArrayView header_extensions) { // https://w3c.github.io/webrtc-extensions/#dom-rtcrtptransceiver-setheaderextensionstonegotiate if (header_extensions.size() != header_extensions_to_negotiate_.size()) { return RTCError(RTCErrorType::INVALID_MODIFICATION, @@ -777,7 +831,7 @@ RTCError RtpTransceiver::SetHeaderExtensionsToNegotiate( void RtpTransceiver::OnNegotiationUpdate( SdpType sdp_type, - const cricket::MediaContentDescription* content) { + const MediaContentDescription* content) { RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(content); if (sdp_type == SdpType::kAnswer) diff --git a/pc/rtp_transceiver.h b/pc/rtp_transceiver.h index deda5d7d61..1002a8fe79 100644 --- a/pc/rtp_transceiver.h +++ b/pc/rtp_transceiver.h @@ -15,11 +15,11 @@ #include #include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/audio_options.h" #include "api/crypto/crypto_options.h" @@ -39,6 +39,7 @@ #include "media/base/media_config.h" #include "media/base/media_engine.h" #include "pc/channel_interface.h" +#include "pc/codec_vendor.h" #include "pc/connection_context.h" #include "pc/proxy.h" #include "pc/rtp_receiver.h" @@ -49,10 +50,6 @@ #include "pc/session_description.h" #include "rtc_base/thread_annotations.h" -namespace cricket { -class MediaEngineInterface; -} - namespace webrtc { class PeerConnectionSdpMethods; @@ -91,17 +88,19 @@ class RtpTransceiver : public RtpTransceiverInterface { // channel set. // `media_type` specifies the type of RtpTransceiver (and, by transitivity, // the type of senders, receivers, and channel). Can either by audio or video. - RtpTransceiver(cricket::MediaType media_type, ConnectionContext* context); + RtpTransceiver(webrtc::MediaType media_type, + ConnectionContext* context, + CodecLookupHelper* codec_lookup_helper); // Construct a Unified Plan-style RtpTransceiver with the given sender and // receiver. The media type will be derived from the media types of the sender // and receiver. The sender and receiver should have the same media type. // `HeaderExtensionsToNegotiate` is used for initializing the return value of // HeaderExtensionsToNegotiate(). RtpTransceiver( - rtc::scoped_refptr> sender, - rtc::scoped_refptr> - receiver, + scoped_refptr> sender, + scoped_refptr> receiver, ConnectionContext* context, + CodecLookupHelper* codec_lookup_helper, std::vector HeaderExtensionsToNegotiate, std::function on_negotiation_needed); ~RtpTransceiver() override; @@ -114,17 +113,17 @@ class RtpTransceiver : public RtpTransceiverInterface { // Returns the Voice/VideoChannel set for this transceiver. May be null if // the transceiver is not in the currently set local/remote description. - cricket::ChannelInterface* channel() const { return channel_.get(); } + ChannelInterface* channel() const { return channel_.get(); } // Creates the Voice/VideoChannel and sets it. RTCError CreateChannel( absl::string_view mid, Call* call_ptr, - const cricket::MediaConfig& media_config, + const MediaConfig& media_config, bool srtp_required, CryptoOptions crypto_options, - const cricket::AudioOptions& audio_options, - const cricket::VideoOptions& video_options, + const AudioOptions& audio_options, + const VideoOptions& video_options, VideoBitrateAllocatorFactory* video_bitrate_allocator_factory, std::function transport_lookup); @@ -153,7 +152,7 @@ class RtpTransceiver : public RtpTransceiverInterface { // The callback allows us to combine the transport lookup with network // state initialization of the channel object. // ClearChannel() must be used before calling SetChannel() again. - void SetChannel(std::unique_ptr channel, + void SetChannel(std::unique_ptr channel, std::function transport_lookup); @@ -163,14 +162,14 @@ class RtpTransceiver : public RtpTransceiverInterface { // Adds an RtpSender of the appropriate type to be owned by this transceiver. // Must not be null. void AddSender( - rtc::scoped_refptr> sender); + scoped_refptr> sender); // Removes the given RtpSender. Returns false if the sender is not owned by // this transceiver. bool RemoveSender(RtpSenderInterface* sender); // Returns a vector of the senders owned by this transceiver. - std::vector>> + std::vector>> senders() const { return senders_; } @@ -178,40 +177,39 @@ class RtpTransceiver : public RtpTransceiverInterface { // Adds an RtpReceiver of the appropriate type to be owned by this // transceiver. Must not be null. void AddReceiver( - rtc::scoped_refptr> + scoped_refptr> receiver); - // Removes the given RtpReceiver. Returns false if the sender is not owned by - // this transceiver. + // Removes the given RtpReceiver. Returns false if the receiver is not owned + // by this transceiver. bool RemoveReceiver(RtpReceiverInterface* receiver); // Returns a vector of the receivers owned by this transceiver. - std::vector< - rtc::scoped_refptr>> + std::vector>> receivers() const { return receivers_; } // Returns the backing object for the transceiver's Unified Plan sender. - rtc::scoped_refptr sender_internal() const; + scoped_refptr sender_internal() const; // Returns the backing object for the transceiver's Unified Plan receiver. - rtc::scoped_refptr receiver_internal() const; + scoped_refptr receiver_internal() const; // RtpTransceivers are not associated until they have a corresponding media // section set in SetLocalDescription or SetRemoteDescription. Therefore, // when setting a local offer we need a way to remember which transceiver was // used to create which media section in the offer. Storing the mline index // in CreateOffer is specified in JSEP to allow us to do that. - absl::optional mline_index() const { return mline_index_; } - void set_mline_index(absl::optional mline_index) { + std::optional mline_index() const { return mline_index_; } + void set_mline_index(std::optional mline_index) { mline_index_ = mline_index; } // Sets the MID for this transceiver. If the MID is not null, then the // transceiver is considered "associated" with the media section that has the // same MID. - void set_mid(const absl::optional& mid) { mid_ = mid; } + void set_mid(const std::optional& mid) { mid_ = mid; } // Sets the intended direction for this transceiver. Intended to be used // internally over SetDirection since this does not trigger a negotiation @@ -228,7 +226,7 @@ class RtpTransceiver : public RtpTransceiverInterface { // Sets the fired direction for this transceiver. The fired direction is null // until SetRemoteDescription is called or an answer is set (either local or // remote) after which the only valid reason to go back to null is rollback. - void set_fired_direction(absl::optional direction); + void set_fired_direction(std::optional direction); // According to JSEP rules for SetRemoteDescription, RtpTransceivers can be // reused only if they were added by AddTrack. @@ -260,31 +258,33 @@ class RtpTransceiver : public RtpTransceiverInterface { void StopTransceiverProcedure(); // RtpTransceiverInterface implementation. - cricket::MediaType media_type() const override; - absl::optional mid() const override; - rtc::scoped_refptr sender() const override; - rtc::scoped_refptr receiver() const override; + webrtc::MediaType media_type() const override; + std::optional mid() const override; + scoped_refptr sender() const override; + scoped_refptr receiver() const override; bool stopped() const override; bool stopping() const override; RtpTransceiverDirection direction() const override; RTCError SetDirectionWithError( RtpTransceiverDirection new_direction) override; - absl::optional current_direction() const override; - absl::optional fired_direction() const override; + std::optional current_direction() const override; + std::optional fired_direction() const override; RTCError StopStandard() override; void StopInternal() override; - RTCError SetCodecPreferences( - rtc::ArrayView codecs) override; - std::vector codec_preferences() const override { - return codec_preferences_; - } + RTCError SetCodecPreferences(ArrayView codecs) override; + // TODO(https://crbug.com/webrtc/391275081): Delete codec_preferences() in + // favor of filtered_codec_preferences() because it's not used anywhere. + std::vector codec_preferences() const override; + // A direction()-filtered view of codec_preferences(). If this filtering + // results in not having any media codecs, an empty list is returned to mean + // "no preferences". + std::vector filtered_codec_preferences() const; std::vector GetHeaderExtensionsToNegotiate() const override; std::vector GetNegotiatedHeaderExtensions() const override; RTCError SetHeaderExtensionsToNegotiate( - rtc::ArrayView header_extensions) - override; + ArrayView header_extensions) override; // Called on the signaling thread when the local or remote content description // is updated. Used to update the negotiated header extensions. @@ -294,39 +294,47 @@ class RtpTransceiver : public RtpTransceiverInterface { // method. This will happen with the ownership of the channel object being // moved into the transceiver. void OnNegotiationUpdate(SdpType sdp_type, - const cricket::MediaContentDescription* content); + const MediaContentDescription* content); private: - cricket::MediaEngineInterface* media_engine() const { + MediaEngineInterface* media_engine() const { return context_->media_engine(); } ConnectionContext* context() const { return context_; } + CodecVendor& codec_vendor() { + return *codec_lookup_helper_->GetCodecVendor(); + } void OnFirstPacketReceived(); + void OnFirstPacketSent(); void StopSendingAndReceiving(); - // Delete a channel, and ensure that references to its media channel + // Tell the senders and receivers about possibly-new media channels + // in a newly created `channel_`. + void PushNewMediaChannel(); + // Delete `channel_`, and ensure that references to its media channels // are updated before deleting it. - void PushNewMediaChannelAndDeleteChannel( - std::unique_ptr channel_to_delete); + void DeleteChannel(); + + RTCError UpdateCodecPreferencesCaches( + const std::vector& codecs); // Enforce that this object is created, used and destroyed on one thread. TaskQueueBase* const thread_; const bool unified_plan_; - const cricket::MediaType media_type_; - rtc::scoped_refptr signaling_thread_safety_; - std::vector>> + const webrtc::MediaType media_type_; + scoped_refptr signaling_thread_safety_; + std::vector>> senders_; - std::vector< - rtc::scoped_refptr>> + std::vector>> receivers_; bool stopped_ RTC_GUARDED_BY(thread_) = false; bool stopping_ RTC_GUARDED_BY(thread_) = false; bool is_pc_closed_ = false; RtpTransceiverDirection direction_ = RtpTransceiverDirection::kInactive; - absl::optional current_direction_; - absl::optional fired_direction_; - absl::optional mid_; - absl::optional mline_index_; + std::optional current_direction_; + std::optional fired_direction_; + std::optional mid_; + std::optional mline_index_; bool created_by_addtrack_ = false; bool reused_for_addtrack_ = false; bool has_ever_been_used_to_send_ = false; @@ -334,16 +342,19 @@ class RtpTransceiver : public RtpTransceiverInterface { // Accessed on both thread_ and the network thread. Considered safe // because all access on the network thread is within an invoke() // from thread_. - std::unique_ptr channel_ = nullptr; + std::unique_ptr channel_ = nullptr; ConnectionContext* const context_; + CodecLookupHelper* const codec_lookup_helper_; std::vector codec_preferences_; + std::vector sendrecv_codec_preferences_; + std::vector sendonly_codec_preferences_; + std::vector recvonly_codec_preferences_; std::vector header_extensions_to_negotiate_; // `negotiated_header_extensions_` is read and written to on the signaling // thread from the SdpOfferAnswerHandler class (e.g. // PushdownMediaDescription(). - cricket::RtpHeaderExtensions negotiated_header_extensions_ - RTC_GUARDED_BY(thread_); + RtpHeaderExtensions negotiated_header_extensions_ RTC_GUARDED_BY(thread_); const std::function on_negotiation_needed_; }; @@ -351,29 +362,27 @@ class RtpTransceiver : public RtpTransceiverInterface { BEGIN_PRIMARY_PROXY_MAP(RtpTransceiver) PROXY_PRIMARY_THREAD_DESTRUCTOR() -BYPASS_PROXY_CONSTMETHOD0(cricket::MediaType, media_type) -PROXY_CONSTMETHOD0(absl::optional, mid) -PROXY_CONSTMETHOD0(rtc::scoped_refptr, sender) -PROXY_CONSTMETHOD0(rtc::scoped_refptr, receiver) +BYPASS_PROXY_CONSTMETHOD0(webrtc::MediaType, media_type) +PROXY_CONSTMETHOD0(std::optional, mid) +PROXY_CONSTMETHOD0(scoped_refptr, sender) +PROXY_CONSTMETHOD0(scoped_refptr, receiver) PROXY_CONSTMETHOD0(bool, stopped) PROXY_CONSTMETHOD0(bool, stopping) PROXY_CONSTMETHOD0(RtpTransceiverDirection, direction) -PROXY_METHOD1(webrtc::RTCError, SetDirectionWithError, RtpTransceiverDirection) -PROXY_CONSTMETHOD0(absl::optional, current_direction) -PROXY_CONSTMETHOD0(absl::optional, fired_direction) -PROXY_METHOD0(webrtc::RTCError, StopStandard) +PROXY_METHOD1(RTCError, SetDirectionWithError, RtpTransceiverDirection) +PROXY_CONSTMETHOD0(std::optional, current_direction) +PROXY_CONSTMETHOD0(std::optional, fired_direction) +PROXY_METHOD0(RTCError, StopStandard) PROXY_METHOD0(void, StopInternal) -PROXY_METHOD1(webrtc::RTCError, - SetCodecPreferences, - rtc::ArrayView) +PROXY_METHOD1(RTCError, SetCodecPreferences, ArrayView) PROXY_CONSTMETHOD0(std::vector, codec_preferences) PROXY_CONSTMETHOD0(std::vector, GetHeaderExtensionsToNegotiate) PROXY_CONSTMETHOD0(std::vector, GetNegotiatedHeaderExtensions) -PROXY_METHOD1(webrtc::RTCError, +PROXY_METHOD1(RTCError, SetHeaderExtensionsToNegotiate, - rtc::ArrayView) + ArrayView) END_PROXY_MAP(RtpTransceiver) } // namespace webrtc diff --git a/pc/rtp_transceiver_unittest.cc b/pc/rtp_transceiver_unittest.cc index 8b4a2389ce..c9b42be8e0 100644 --- a/pc/rtp_transceiver_unittest.cc +++ b/pc/rtp_transceiver_unittest.cc @@ -13,14 +13,37 @@ #include "pc/rtp_transceiver.h" #include +#include +#include #include +#include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/environment/environment_factory.h" +#include "api/jsep.h" +#include "api/make_ref_counted.h" +#include "api/media_types.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" #include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" +#include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/sdp_video_format.h" +#include "media/base/codec.h" +#include "media/base/codec_comparators.h" #include "media/base/fake_media_engine.h" -#include "media/base/media_engine.h" +#include "pc/codec_vendor.h" +#include "pc/connection_context.h" +#include "pc/rtp_parameters_conversion.h" +#include "pc/rtp_receiver.h" +#include "pc/rtp_receiver_proxy.h" +#include "pc/rtp_sender.h" +#include "pc/rtp_sender_proxy.h" +#include "pc/session_description.h" +#include "pc/test/enable_fake_media.h" +#include "pc/test/fake_codec_lookup_helper.h" #include "pc/test/mock_channel_interface.h" #include "pc/test/mock_rtp_receiver_internal.h" #include "pc/test/mock_rtp_sender_internal.h" @@ -31,10 +54,12 @@ using ::testing::_; using ::testing::ElementsAre; using ::testing::Field; +using ::testing::NiceMock; using ::testing::Optional; using ::testing::Property; using ::testing::Return; using ::testing::ReturnRef; +using ::testing::SizeIs; namespace webrtc { @@ -44,38 +69,47 @@ class RtpTransceiverTest : public testing::Test { public: RtpTransceiverTest() : dependencies_(MakeDependencies()), - context_(ConnectionContext::Create(&dependencies_)) {} + context_( + ConnectionContext::Create(CreateEnvironment(), &dependencies_)), + codec_lookup_helper_(context_.get()) {} protected: - cricket::MediaEngineInterface* media_engine() { - return context_->media_engine(); + FakeMediaEngine* media_engine() { + // We know this cast is safe because we supplied the fake implementation + // in MakeDependencies(). + return static_cast(context_->media_engine()); } ConnectionContext* context() { return context_.get(); } + CodecLookupHelper* codec_lookup_helper() { return &codec_lookup_helper_; } + FakeCodecLookupHelper* fake_codec_lookup_helper() { + return &codec_lookup_helper_; + } private: - rtc::AutoThread main_thread_; + AutoThread main_thread_; static PeerConnectionFactoryDependencies MakeDependencies() { PeerConnectionFactoryDependencies d; - d.network_thread = rtc::Thread::Current(); - d.worker_thread = rtc::Thread::Current(); - d.signaling_thread = rtc::Thread::Current(); - d.media_engine = std::make_unique(); + d.network_thread = Thread::Current(); + d.worker_thread = Thread::Current(); + d.signaling_thread = Thread::Current(); + EnableFakeMedia(d, std::make_unique()); return d; } PeerConnectionFactoryDependencies dependencies_; - rtc::scoped_refptr context_; + scoped_refptr context_; + FakeCodecLookupHelper codec_lookup_helper_; }; // Checks that a channel cannot be set on a stopped `RtpTransceiver`. TEST_F(RtpTransceiverTest, CannotSetChannelOnStoppedTransceiver) { const std::string content_name("my_mid"); - auto transceiver = rtc::make_ref_counted( - cricket::MediaType::MEDIA_TYPE_AUDIO, context()); - auto channel1 = std::make_unique(); + auto transceiver = make_ref_counted( + webrtc::MediaType::AUDIO, context(), codec_lookup_helper()); + auto channel1 = std::make_unique>(); EXPECT_CALL(*channel1, media_type()) - .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO)); + .WillRepeatedly(Return(webrtc::MediaType::AUDIO)); EXPECT_CALL(*channel1, mid()).WillRepeatedly(ReturnRef(content_name)); EXPECT_CALL(*channel1, SetFirstPacketReceivedCallback(_)); EXPECT_CALL(*channel1, SetRtpTransport(_)).WillRepeatedly(Return(true)); @@ -90,13 +124,14 @@ TEST_F(RtpTransceiverTest, CannotSetChannelOnStoppedTransceiver) { transceiver->StopInternal(); EXPECT_EQ(channel1_ptr, transceiver->channel()); - auto channel2 = std::make_unique(); + auto channel2 = std::make_unique>(); EXPECT_CALL(*channel2, media_type()) - .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO)); + .WillRepeatedly(Return(webrtc::MediaType::AUDIO)); // Clear the current channel - required to allow SetChannel() EXPECT_CALL(*channel1_ptr, SetFirstPacketReceivedCallback(_)); transceiver->ClearChannel(); + ASSERT_EQ(nullptr, transceiver->channel()); // Channel can no longer be set, so this call should be a no-op. transceiver->SetChannel(std::move(channel2), [](const std::string&) { return nullptr; }); @@ -106,11 +141,11 @@ TEST_F(RtpTransceiverTest, CannotSetChannelOnStoppedTransceiver) { // Checks that a channel can be unset on a stopped `RtpTransceiver` TEST_F(RtpTransceiverTest, CanUnsetChannelOnStoppedTransceiver) { const std::string content_name("my_mid"); - auto transceiver = rtc::make_ref_counted( - cricket::MediaType::MEDIA_TYPE_VIDEO, context()); - auto channel = std::make_unique(); + auto transceiver = make_ref_counted( + webrtc::MediaType::VIDEO, context(), codec_lookup_helper()); + auto channel = std::make_unique>(); EXPECT_CALL(*channel, media_type()) - .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_VIDEO)); + .WillRepeatedly(Return(webrtc::MediaType::VIDEO)); EXPECT_CALL(*channel, mid()).WillRepeatedly(ReturnRef(content_name)); EXPECT_CALL(*channel, SetFirstPacketReceivedCallback(_)) .WillRepeatedly(testing::Return()); @@ -134,59 +169,399 @@ TEST_F(RtpTransceiverTest, CanUnsetChannelOnStoppedTransceiver) { class RtpTransceiverUnifiedPlanTest : public RtpTransceiverTest { public: - RtpTransceiverUnifiedPlanTest() - : transceiver_(rtc::make_ref_counted( - RtpSenderProxyWithInternal::Create( - rtc::Thread::Current(), - sender_), - RtpReceiverProxyWithInternal::Create( - rtc::Thread::Current(), - rtc::Thread::Current(), - receiver_), - context(), - media_engine()->voice().GetRtpHeaderExtensions(), - /* on_negotiation_needed= */ [] {})) {} - - static rtc::scoped_refptr MockReceiver() { - auto receiver = rtc::make_ref_counted(); + static scoped_refptr MockReceiver( + webrtc::MediaType media_type) { + auto receiver = make_ref_counted>(); EXPECT_CALL(*receiver.get(), media_type()) - .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO)); + .WillRepeatedly(Return(media_type)); return receiver; } - static rtc::scoped_refptr MockSender() { - auto sender = rtc::make_ref_counted(); - EXPECT_CALL(*sender.get(), media_type()) - .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO)); + static scoped_refptr MockSender( + webrtc::MediaType media_type) { + auto sender = make_ref_counted>(); + EXPECT_CALL(*sender.get(), media_type()).WillRepeatedly(Return(media_type)); return sender; } - rtc::AutoThread main_thread_; - rtc::scoped_refptr receiver_ = MockReceiver(); - rtc::scoped_refptr sender_ = MockSender(); - rtc::scoped_refptr transceiver_; + scoped_refptr CreateTransceiver( + scoped_refptr sender, + scoped_refptr receiver) { + return make_ref_counted( + RtpSenderProxyWithInternal::Create( + Thread::Current(), std::move(sender)), + RtpReceiverProxyWithInternal::Create( + Thread::Current(), Thread::Current(), std::move(receiver)), + context(), codec_lookup_helper(), + media_engine()->voice().GetRtpHeaderExtensions(), + /* on_negotiation_needed= */ [] {}); + } + + protected: + AutoThread main_thread_; }; // Basic tests for Stop() TEST_F(RtpTransceiverUnifiedPlanTest, StopSetsDirection) { - EXPECT_CALL(*receiver_.get(), Stop()); - EXPECT_CALL(*receiver_.get(), SetMediaChannel(_)); - EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped()); - EXPECT_CALL(*sender_.get(), Stop()); - - EXPECT_EQ(RtpTransceiverDirection::kInactive, transceiver_->direction()); - EXPECT_FALSE(transceiver_->current_direction()); - transceiver_->StopStandard(); - EXPECT_EQ(RtpTransceiverDirection::kStopped, transceiver_->direction()); - EXPECT_FALSE(transceiver_->current_direction()); - transceiver_->StopTransceiverProcedure(); - EXPECT_TRUE(transceiver_->current_direction()); - EXPECT_EQ(RtpTransceiverDirection::kStopped, transceiver_->direction()); + scoped_refptr receiver = + MockReceiver(webrtc::MediaType::AUDIO); + scoped_refptr sender = + MockSender(webrtc::MediaType::AUDIO); + scoped_refptr transceiver = + CreateTransceiver(sender, receiver); + + EXPECT_CALL(*receiver.get(), Stop()); + EXPECT_CALL(*receiver.get(), SetMediaChannel(_)); + EXPECT_CALL(*sender.get(), SetTransceiverAsStopped()); + EXPECT_CALL(*sender.get(), Stop()); + + EXPECT_EQ(RtpTransceiverDirection::kInactive, transceiver->direction()); + EXPECT_FALSE(transceiver->current_direction()); + transceiver->StopStandard(); + EXPECT_EQ(RtpTransceiverDirection::kStopped, transceiver->direction()); + EXPECT_FALSE(transceiver->current_direction()); + transceiver->StopTransceiverProcedure(); + EXPECT_TRUE(transceiver->current_direction()); + EXPECT_EQ(RtpTransceiverDirection::kStopped, transceiver->direction()); EXPECT_EQ(RtpTransceiverDirection::kStopped, - *transceiver_->current_direction()); + *transceiver->current_direction()); +} + +class RtpTransceiverFilteredCodecPreferencesTest + : public RtpTransceiverUnifiedPlanTest { + public: + RtpTransceiverFilteredCodecPreferencesTest() + : transceiver_( + CreateTransceiver(MockSender(webrtc::MediaType::VIDEO), + MockReceiver(webrtc::MediaType::VIDEO))) {} + + struct H264CodecCapabilities { + Codec cricket_sendrecv_codec; + RtpCodecCapability sendrecv_codec; + Codec cricket_sendonly_codec; + RtpCodecCapability sendonly_codec; + Codec cricket_recvonly_codec; + RtpCodecCapability recvonly_codec; + Codec cricket_rtx_codec; + RtpCodecCapability rtx_codec; + }; + + // This function must be called after modifying the media factory's + // capabilities, since the transceiver picks up codecs from the factory + // at transceiver create time. + void RecreateTransceiver() { + fake_codec_lookup_helper()->Reset(); + transceiver_ = CreateTransceiver(MockSender(webrtc::MediaType::VIDEO), + MockReceiver(webrtc::MediaType::VIDEO)); + } + + // For H264, the profile and level IDs are entangled. This function uses + // profile-level-id values that are not equal even when levels are ignored. + H264CodecCapabilities ConfigureH264CodecCapabilities() { + Codec cricket_sendrecv_codec = + CreateVideoCodec(SdpVideoFormat("H264", + {{"level-asymmetry-allowed", "1"}, + {"packetization-mode", "1"}, + {"profile-level-id", "42f00b"}}, + {ScalabilityMode::kL1T1})); + Codec cricket_sendonly_codec = + CreateVideoCodec(SdpVideoFormat("H264", + {{"level-asymmetry-allowed", "1"}, + {"packetization-mode", "1"}, + {"profile-level-id", "640034"}}, + {ScalabilityMode::kL1T1})); + Codec cricket_recvonly_codec = + CreateVideoCodec(SdpVideoFormat("H264", + {{"level-asymmetry-allowed", "1"}, + {"packetization-mode", "1"}, + {"profile-level-id", "f4001f"}}, + {ScalabilityMode::kL1T1})); + Codec cricket_rtx_codec = + CreateVideoRtxCodec(Codec::kIdNotSet, Codec::kIdNotSet); + media_engine()->SetVideoSendCodecs( + {cricket_sendrecv_codec, cricket_sendonly_codec, cricket_rtx_codec}); + media_engine()->SetVideoRecvCodecs( + {cricket_sendrecv_codec, cricket_recvonly_codec, cricket_rtx_codec}); + H264CodecCapabilities capabilities = { + .cricket_sendrecv_codec = cricket_sendrecv_codec, + .sendrecv_codec = ToRtpCodecCapability(cricket_sendrecv_codec), + .cricket_sendonly_codec = cricket_sendonly_codec, + .sendonly_codec = ToRtpCodecCapability(cricket_sendonly_codec), + .cricket_recvonly_codec = cricket_recvonly_codec, + .recvonly_codec = ToRtpCodecCapability(cricket_recvonly_codec), + .cricket_rtx_codec = cricket_rtx_codec, + .rtx_codec = ToRtpCodecCapability(cricket_rtx_codec), + }; + EXPECT_FALSE(IsSameRtpCodecIgnoringLevel( + capabilities.cricket_sendrecv_codec, capabilities.sendonly_codec)); + EXPECT_FALSE(IsSameRtpCodecIgnoringLevel( + capabilities.cricket_sendrecv_codec, capabilities.recvonly_codec)); + EXPECT_FALSE(IsSameRtpCodecIgnoringLevel( + capabilities.cricket_sendonly_codec, capabilities.recvonly_codec)); + // Because RtpTransceiver buffers codec information in a CodecVendor, + // we must recreate it after changing the supported codecs. + RecreateTransceiver(); + return capabilities; + } + +#ifdef RTC_ENABLE_H265 + struct H265CodecCapabilities { + // The level-id from sender getCapabilities() or receiver getCapabilities(). + static constexpr const char* kSendOnlyLevel = "180"; + static constexpr const char* kRecvOnlyLevel = "156"; + // A valid H265 level-id, but one not present in either getCapabilities(). + static constexpr const char* kLevelNotInCapabilities = "135"; + + Codec cricket_sendonly_codec; + RtpCodecCapability sendonly_codec; + Codec cricket_recvonly_codec; + RtpCodecCapability recvonly_codec; + }; + + // For H265, the profile and level IDs are separate and are ignored by + // IsSameRtpCodecIgnoringLevel(). + H265CodecCapabilities ConfigureH265CodecCapabilities() { + Codec cricket_sendonly_codec = CreateVideoCodec( + SdpVideoFormat("H265", + {{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", H265CodecCapabilities::kSendOnlyLevel}, + {"tx-mode", "SRST"}}, + {ScalabilityMode::kL1T1})); + Codec cricket_recvonly_codec = CreateVideoCodec( + SdpVideoFormat("H265", + {{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", H265CodecCapabilities::kRecvOnlyLevel}, + {"tx-mode", "SRST"}}, + {ScalabilityMode::kL1T1})); + media_engine()->SetVideoSendCodecs({cricket_sendonly_codec}); + media_engine()->SetVideoRecvCodecs({cricket_recvonly_codec}); + // Because RtpTransceiver buffers codec information in a CodecVendor, + // we must recreate it after changing the supported codecs. + RecreateTransceiver(); + return { + .cricket_sendonly_codec = cricket_sendonly_codec, + .sendonly_codec = ToRtpCodecCapability(cricket_sendonly_codec), + .cricket_recvonly_codec = cricket_recvonly_codec, + .recvonly_codec = ToRtpCodecCapability(cricket_recvonly_codec), + }; + } +#endif // RTC_ENABLE_H265 + + protected: + scoped_refptr transceiver_; +}; + +TEST_F(RtpTransceiverFilteredCodecPreferencesTest, EmptyByDefault) { + ConfigureH264CodecCapabilities(); + + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kSendRecv), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), SizeIs(0)); + + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kSendOnly), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), SizeIs(0)); + + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), SizeIs(0)); + + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kInactive), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), SizeIs(0)); +} + +TEST_F(RtpTransceiverFilteredCodecPreferencesTest, OrderIsMaintained) { + const auto codecs = ConfigureH264CodecCapabilities(); + std::vector codec_capabilities = {codecs.sendrecv_codec, + codecs.rtx_codec}; + EXPECT_THAT(transceiver_->SetCodecPreferences(codec_capabilities), IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), + ElementsAre(codec_capabilities[0], codec_capabilities[1])); + // Reverse order. + codec_capabilities = {codecs.rtx_codec, codecs.sendrecv_codec}; + EXPECT_THAT(transceiver_->SetCodecPreferences(codec_capabilities), IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), + ElementsAre(codec_capabilities[0], codec_capabilities[1])); +} + +TEST_F(RtpTransceiverFilteredCodecPreferencesTest, + FiltersCodecsBasedOnDirection) { + const auto codecs = ConfigureH264CodecCapabilities(); + std::vector codec_capabilities = { + codecs.sendonly_codec, codecs.sendrecv_codec, codecs.recvonly_codec}; + EXPECT_THAT(transceiver_->SetCodecPreferences(codec_capabilities), IsRtcOk()); + + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kSendRecv), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), + ElementsAre(codecs.sendrecv_codec)); + + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kSendOnly), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), + ElementsAre(codecs.sendonly_codec, codecs.sendrecv_codec)); + + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), + ElementsAre(codecs.sendrecv_codec, codecs.recvonly_codec)); + + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kInactive), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), + ElementsAre(codecs.sendrecv_codec)); +} + +TEST_F(RtpTransceiverFilteredCodecPreferencesTest, + RtxIsIncludedAfterFiltering) { + const auto codecs = ConfigureH264CodecCapabilities(); + std::vector codec_capabilities = {codecs.recvonly_codec, + codecs.rtx_codec}; + EXPECT_THAT(transceiver_->SetCodecPreferences(codec_capabilities), IsRtcOk()); + + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), + ElementsAre(codecs.recvonly_codec, codecs.rtx_codec)); +} + +TEST_F(RtpTransceiverFilteredCodecPreferencesTest, + NoMediaIsTheSameAsNoPreference) { + const auto codecs = ConfigureH264CodecCapabilities(); + std::vector codec_capabilities = {codecs.recvonly_codec, + codecs.rtx_codec}; + EXPECT_THAT(transceiver_->SetCodecPreferences(codec_capabilities), IsRtcOk()); + + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kSendOnly), + IsRtcOk()); + // After filtering the only codec that remains is RTX which is not a media + // codec, this is the same as not having any preferences. + EXPECT_THAT(transceiver_->filtered_codec_preferences(), SizeIs(0)); + + // But the preferences are remembered in case the direction changes such that + // we do have a media codec. + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), + ElementsAre(codecs.recvonly_codec, codecs.rtx_codec)); +} + +TEST_F(RtpTransceiverFilteredCodecPreferencesTest, + H264LevelIdsIgnoredByFilter) { + // Baseline 3.1 and 5.2 are compatible when ignoring level IDs. + Codec baseline_3_1 = + CreateVideoCodec(SdpVideoFormat("H264", + {{"level-asymmetry-allowed", "1"}, + {"packetization-mode", "1"}, + {"profile-level-id", "42001f"}}, + {ScalabilityMode::kL1T1})); + Codec baseline_5_2 = + CreateVideoCodec(SdpVideoFormat("H264", + {{"level-asymmetry-allowed", "1"}, + {"packetization-mode", "1"}, + {"profile-level-id", "420034"}}, + {ScalabilityMode::kL1T1})); + // High is NOT compatible with baseline. + Codec high_3_1 = + CreateVideoCodec(SdpVideoFormat("H264", + {{"level-asymmetry-allowed", "1"}, + {"packetization-mode", "1"}, + {"profile-level-id", "64001f"}}, + {ScalabilityMode::kL1T1})); + // Configure being able to both send and receive Baseline but using different + // level IDs in either direction, while the High profile is "truly" recvonly. + media_engine()->SetVideoSendCodecs({baseline_3_1}); + media_engine()->SetVideoRecvCodecs({baseline_5_2, high_3_1}); + // Because RtpTransceiver buffers codec information in a CodecVendor, + // we must recreate it after changing the supported codecs. + RecreateTransceiver(); + + // Prefer to "sendrecv" Baseline 5.2. Even though we can only send 3.1 this + // codec is not filtered out due to 5.2 and 3.1 being compatible when ignoring + // level IDs. + std::vector codec_capabilities = { + ToRtpCodecCapability(baseline_5_2)}; + EXPECT_THAT(transceiver_->SetCodecPreferences(codec_capabilities), IsRtcOk()); + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kSendRecv), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), + ElementsAre(codec_capabilities[0])); + // Prefer to "sendrecv" High 3.1. This gets filtered out because we cannot + // send it (Baseline 3.1 is not compatible with it). + codec_capabilities = {ToRtpCodecCapability(high_3_1)}; + EXPECT_THAT(transceiver_->SetCodecPreferences(codec_capabilities), IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), SizeIs(0)); + // Change direction to "recvonly" to avoid High 3.1 being filtered out. + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), + ElementsAre(codec_capabilities[0])); } -class RtpTransceiverTestForHeaderExtensions : public RtpTransceiverTest { +#ifdef RTC_ENABLE_H265 +TEST_F(RtpTransceiverFilteredCodecPreferencesTest, + H265LevelIdIsIgnoredByFilter) { + const auto codecs = ConfigureH265CodecCapabilities(); + std::vector codec_capabilities = {codecs.sendonly_codec, + codecs.recvonly_codec}; + EXPECT_THAT(transceiver_->SetCodecPreferences(codec_capabilities), IsRtcOk()); + // Regardless of direction, both codecs are preferred due to ignoring levels. + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kSendOnly), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), + ElementsAre(codec_capabilities[0], codec_capabilities[1])); + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kRecvOnly), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), + ElementsAre(codec_capabilities[0], codec_capabilities[1])); + EXPECT_THAT( + transceiver_->SetDirectionWithError(RtpTransceiverDirection::kSendRecv), + IsRtcOk()); + EXPECT_THAT(transceiver_->filtered_codec_preferences(), + ElementsAre(codec_capabilities[0], codec_capabilities[1])); +} + +TEST_F(RtpTransceiverFilteredCodecPreferencesTest, + H265LevelIdHasToBeFromSenderOrReceiverCapabilities) { + ConfigureH265CodecCapabilities(); + Codec cricket_codec = CreateVideoCodec(SdpVideoFormat( + "H265", + {{"profile-id", "1"}, + {"tier-flag", "0"}, + {"level-id", H265CodecCapabilities::kLevelNotInCapabilities}, + {"tx-mode", "SRST"}}, + {ScalabilityMode::kL1T1})); + + std::vector codec_capabilities = { + ToRtpCodecCapability(cricket_codec)}; + EXPECT_THAT(transceiver_->SetCodecPreferences(codec_capabilities), + IsRtcErrorWithTypeAndMessage( + RTCErrorType::INVALID_MODIFICATION, + "Invalid codec preferences: Missing codec from codec " + "capabilities.")); +} +#endif // RTC_ENABLE_H265 + +class RtpTransceiverTestForHeaderExtensions + : public RtpTransceiverUnifiedPlanTest { public: RtpTransceiverTestForHeaderExtensions() : extensions_( @@ -202,43 +577,31 @@ class RtpTransceiverTestForHeaderExtensions : public RtpTransceiverTest { RtpHeaderExtensionCapability(RtpExtension::kVideoRotationUri, 4, RtpTransceiverDirection::kSendRecv)}), - transceiver_(rtc::make_ref_counted( + transceiver_(make_ref_counted( RtpSenderProxyWithInternal::Create( - rtc::Thread::Current(), + Thread::Current(), sender_), RtpReceiverProxyWithInternal::Create( - rtc::Thread::Current(), - rtc::Thread::Current(), + Thread::Current(), + Thread::Current(), receiver_), context(), + codec_lookup_helper(), extensions_, /* on_negotiation_needed= */ [] {})) {} - static rtc::scoped_refptr MockReceiver() { - auto receiver = rtc::make_ref_counted(); - EXPECT_CALL(*receiver.get(), media_type()) - .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO)); - return receiver; - } - - static rtc::scoped_refptr MockSender() { - auto sender = rtc::make_ref_counted(); - EXPECT_CALL(*sender.get(), media_type()) - .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO)); - return sender; - } - void ClearChannel() { EXPECT_CALL(*sender_.get(), SetMediaChannel(_)); transceiver_->ClearChannel(); } - rtc::AutoThread main_thread_; - rtc::scoped_refptr receiver_ = MockReceiver(); - rtc::scoped_refptr sender_ = MockSender(); + scoped_refptr receiver_ = + MockReceiver(webrtc::MediaType::AUDIO); + scoped_refptr sender_ = + MockSender(webrtc::MediaType::AUDIO); std::vector extensions_; - rtc::scoped_refptr transceiver_; + scoped_refptr transceiver_; }; TEST_F(RtpTransceiverTestForHeaderExtensions, OffersChannelManagerList) { @@ -377,11 +740,11 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, EXPECT_CALL(*sender_.get(), SetMediaChannel(_)); EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped()); EXPECT_CALL(*sender_.get(), Stop()); - auto mock_channel = std::make_unique(); + auto mock_channel = std::make_unique>(); auto mock_channel_ptr = mock_channel.get(); EXPECT_CALL(*mock_channel, SetFirstPacketReceivedCallback(_)); EXPECT_CALL(*mock_channel, media_type()) - .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO)); + .WillRepeatedly(Return(webrtc::MediaType::AUDIO)); EXPECT_CALL(*mock_channel, voice_media_send_channel()) .WillRepeatedly(Return(nullptr)); EXPECT_CALL(*mock_channel, mid()).WillRepeatedly(ReturnRef(content_name)); @@ -410,19 +773,19 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, ReturnsNegotiatedHdrExts) { EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped()); EXPECT_CALL(*sender_.get(), Stop()); - auto mock_channel = std::make_unique(); + auto mock_channel = std::make_unique>(); auto mock_channel_ptr = mock_channel.get(); EXPECT_CALL(*mock_channel, SetFirstPacketReceivedCallback(_)); EXPECT_CALL(*mock_channel, media_type()) - .WillRepeatedly(Return(cricket::MediaType::MEDIA_TYPE_AUDIO)); + .WillRepeatedly(Return(webrtc::MediaType::AUDIO)); EXPECT_CALL(*mock_channel, voice_media_send_channel()) .WillRepeatedly(Return(nullptr)); EXPECT_CALL(*mock_channel, mid()).WillRepeatedly(ReturnRef(content_name)); EXPECT_CALL(*mock_channel, SetRtpTransport(_)).WillRepeatedly(Return(true)); - cricket::RtpHeaderExtensions extensions = {webrtc::RtpExtension("uri1", 1), - webrtc::RtpExtension("uri2", 2)}; - cricket::AudioContentDescription description; + RtpHeaderExtensions extensions = {RtpExtension("uri1", 1), + RtpExtension("uri2", 2)}; + AudioContentDescription description; description.set_rtp_header_extensions(extensions); transceiver_->OnNegotiationUpdate(SdpType::kAnswer, &description); @@ -449,9 +812,9 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, EXPECT_CALL(*sender_.get(), SetTransceiverAsStopped()); EXPECT_CALL(*sender_.get(), Stop()); - cricket::RtpHeaderExtensions extensions = {webrtc::RtpExtension("uri1", 1), - webrtc::RtpExtension("uri2", 2)}; - cricket::AudioContentDescription description; + RtpHeaderExtensions extensions = {RtpExtension("uri1", 1), + RtpExtension("uri2", 2)}; + AudioContentDescription description; description.set_rtp_header_extensions(extensions); transceiver_->OnNegotiationUpdate(SdpType::kAnswer, &description); @@ -464,8 +827,7 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, RtpTransceiverDirection::kStopped), Field(&RtpHeaderExtensionCapability::direction, RtpTransceiverDirection::kStopped))); - extensions = {webrtc::RtpExtension("uri3", 4), - webrtc::RtpExtension("uri5", 6)}; + extensions = {RtpExtension("uri3", 4), RtpExtension("uri5", 6)}; description.set_rtp_header_extensions(extensions); transceiver_->OnNegotiationUpdate(SdpType::kAnswer, &description); @@ -480,6 +842,81 @@ TEST_F(RtpTransceiverTestForHeaderExtensions, RtpTransceiverDirection::kStopped))); } +TEST_F(RtpTransceiverTestForHeaderExtensions, + SimulcastOrSvcEnablesExtensionsByDefault) { + std::vector extensions = { + {RtpExtension::kDependencyDescriptorUri, 1, + RtpTransceiverDirection::kStopped}, + {RtpExtension::kVideoLayersAllocationUri, 2, + RtpTransceiverDirection::kStopped}, + }; + + // Default is stopped. + auto sender = make_ref_counted>(); + auto transceiver = make_ref_counted( + RtpSenderProxyWithInternal::Create(Thread::Current(), + sender), + RtpReceiverProxyWithInternal::Create( + Thread::Current(), Thread::Current(), receiver_), + context(), codec_lookup_helper(), extensions, + /* on_negotiation_needed= */ [] {}); + std::vector header_extensions = + transceiver->GetHeaderExtensionsToNegotiate(); + ASSERT_EQ(header_extensions.size(), 2u); + EXPECT_EQ(header_extensions[0].uri, RtpExtension::kDependencyDescriptorUri); + EXPECT_EQ(header_extensions[0].direction, RtpTransceiverDirection::kStopped); + EXPECT_EQ(header_extensions[1].uri, RtpExtension::kVideoLayersAllocationUri); + EXPECT_EQ(header_extensions[1].direction, RtpTransceiverDirection::kStopped); + + // Simulcast, i.e. more than one encoding. + RtpParameters simulcast_parameters; + simulcast_parameters.encodings.resize(2); + auto simulcast_sender = make_ref_counted>(); + EXPECT_CALL(*simulcast_sender, GetParametersInternal()) + .WillRepeatedly(Return(simulcast_parameters)); + auto simulcast_transceiver = make_ref_counted( + RtpSenderProxyWithInternal::Create(Thread::Current(), + simulcast_sender), + RtpReceiverProxyWithInternal::Create( + Thread::Current(), Thread::Current(), receiver_), + context(), codec_lookup_helper(), extensions, + /* on_negotiation_needed= */ [] {}); + auto simulcast_extensions = + simulcast_transceiver->GetHeaderExtensionsToNegotiate(); + ASSERT_EQ(simulcast_extensions.size(), 2u); + EXPECT_EQ(simulcast_extensions[0].uri, + RtpExtension::kDependencyDescriptorUri); + EXPECT_EQ(simulcast_extensions[0].direction, + RtpTransceiverDirection::kSendRecv); + EXPECT_EQ(simulcast_extensions[1].uri, + RtpExtension::kVideoLayersAllocationUri); + EXPECT_EQ(simulcast_extensions[1].direction, + RtpTransceiverDirection::kSendRecv); + + // SVC, a single encoding with a scalabilityMode other than L1T1. + webrtc::RtpParameters svc_parameters; + svc_parameters.encodings.resize(1); + svc_parameters.encodings[0].scalability_mode = "L3T3"; + + auto svc_sender = make_ref_counted>(); + EXPECT_CALL(*svc_sender, GetParametersInternal()) + .WillRepeatedly(Return(svc_parameters)); + auto svc_transceiver = make_ref_counted( + RtpSenderProxyWithInternal::Create(Thread::Current(), + svc_sender), + RtpReceiverProxyWithInternal::Create( + Thread::Current(), Thread::Current(), receiver_), + context(), codec_lookup_helper(), extensions, + /* on_negotiation_needed= */ [] {}); + std::vector svc_extensions = + svc_transceiver->GetHeaderExtensionsToNegotiate(); + ASSERT_EQ(svc_extensions.size(), 2u); + EXPECT_EQ(svc_extensions[0].uri, RtpExtension::kDependencyDescriptorUri); + EXPECT_EQ(svc_extensions[0].direction, RtpTransceiverDirection::kSendRecv); + EXPECT_EQ(svc_extensions[1].uri, RtpExtension::kVideoLayersAllocationUri); + EXPECT_EQ(svc_extensions[1].direction, RtpTransceiverDirection::kSendRecv); +} + } // namespace } // namespace webrtc diff --git a/pc/rtp_transmission_manager.cc b/pc/rtp_transmission_manager.cc index 96b308842b..8c6d2a8fa8 100644 --- a/pc/rtp_transmission_manager.cc +++ b/pc/rtp_transmission_manager.cc @@ -10,18 +10,41 @@ #include "pc/rtp_transmission_manager.h" -#include +#include +#include +#include +#include #include +#include -#include "absl/types/optional.h" +#include "api/environment/environment.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_direction.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "media/base/media_channel.h" +#include "media/base/media_engine.h" #include "pc/audio_rtp_receiver.h" #include "pc/channel_interface.h" +#include "pc/codec_vendor.h" +#include "pc/connection_context.h" #include "pc/legacy_stats_collector_interface.h" +#include "pc/rtp_receiver.h" +#include "pc/rtp_receiver_proxy.h" +#include "pc/rtp_sender.h" +#include "pc/rtp_sender_proxy.h" +#include "pc/rtp_transceiver.h" +#include "pc/usage_pattern.h" #include "pc/video_rtp_receiver.h" #include "rtc_base/checks.h" -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/logging.h" namespace webrtc { @@ -34,14 +57,18 @@ static const char kDefaultVideoSenderId[] = "defaultv0"; } // namespace RtpTransmissionManager::RtpTransmissionManager( + const Environment& env, bool is_unified_plan, ConnectionContext* context, + CodecLookupHelper* codec_lookup_helper, UsagePattern* usage_pattern, PeerConnectionObserver* observer, LegacyStatsCollectorInterface* legacy_stats, std::function on_negotiation_needed) - : is_unified_plan_(is_unified_plan), + : env_(env), + is_unified_plan_(is_unified_plan), context_(context), + codec_lookup_helper_(codec_lookup_helper), usage_pattern_(usage_pattern), observer_(observer), legacy_stats_(legacy_stats), @@ -72,7 +99,7 @@ PeerConnectionObserver* RtpTransmissionManager::Observer() const { return observer_; } -cricket::VoiceMediaSendChannelInterface* +VoiceMediaSendChannelInterface* RtpTransmissionManager::voice_media_send_channel() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!IsUnifiedPlan()); @@ -84,7 +111,7 @@ RtpTransmissionManager::voice_media_send_channel() const { } } -cricket::VideoMediaSendChannelInterface* +VideoMediaSendChannelInterface* RtpTransmissionManager::video_media_send_channel() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!IsUnifiedPlan()); @@ -95,7 +122,7 @@ RtpTransmissionManager::video_media_send_channel() const { return nullptr; } } -cricket::VoiceMediaReceiveChannelInterface* +VoiceMediaReceiveChannelInterface* RtpTransmissionManager::voice_media_receive_channel() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!IsUnifiedPlan()); @@ -107,7 +134,7 @@ RtpTransmissionManager::voice_media_receive_channel() const { } } -cricket::VideoMediaReceiveChannelInterface* +VideoMediaReceiveChannelInterface* RtpTransmissionManager::video_media_receive_channel() const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!IsUnifiedPlan()); @@ -119,9 +146,8 @@ RtpTransmissionManager::video_media_receive_channel() const { } } -RTCErrorOr> -RtpTransmissionManager::AddTrack( - rtc::scoped_refptr track, +RTCErrorOr> RtpTransmissionManager::AddTrack( + scoped_refptr track, const std::vector& stream_ids, const std::vector* init_send_encodings) { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -131,9 +157,9 @@ RtpTransmissionManager::AddTrack( : AddTrackPlanB(track, stream_ids, init_send_encodings)); } -RTCErrorOr> +RTCErrorOr> RtpTransmissionManager::AddTrackPlanB( - rtc::scoped_refptr track, + scoped_refptr track, const std::vector& stream_ids, const std::vector* init_send_encodings) { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -144,12 +170,12 @@ RtpTransmissionManager::AddTrackPlanB( } std::vector adjusted_stream_ids = stream_ids; if (adjusted_stream_ids.empty()) { - adjusted_stream_ids.push_back(rtc::CreateRandomUuid()); + adjusted_stream_ids.push_back(CreateRandomUuid()); } - cricket::MediaType media_type = + webrtc::MediaType media_type = (track->kind() == MediaStreamTrackInterface::kAudioKind - ? cricket::MEDIA_TYPE_AUDIO - : cricket::MEDIA_TYPE_VIDEO); + ? webrtc::MediaType::AUDIO + : webrtc::MediaType::VIDEO); auto new_sender = CreateSender( media_type, track->id(), track, adjusted_stream_ids, init_send_encodings @@ -175,19 +201,19 @@ RtpTransmissionManager::AddTrackPlanB( new_sender->internal()->SetSsrc(sender_info->first_ssrc); } } - return rtc::scoped_refptr(new_sender); + return scoped_refptr(new_sender); } -RTCErrorOr> +RTCErrorOr> RtpTransmissionManager::AddTrackUnifiedPlan( - rtc::scoped_refptr track, + scoped_refptr track, const std::vector& stream_ids, const std::vector* init_send_encodings) { auto transceiver = FindFirstTransceiverForAddedTrack(track, init_send_encodings); if (transceiver) { RTC_LOG(LS_INFO) << "Reusing an existing " - << cricket::MediaTypeToString(transceiver->media_type()) + << webrtc::MediaTypeToString(transceiver->media_type()) << " transceiver for AddTrack."; if (transceiver->stopping()) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, @@ -205,25 +231,25 @@ RtpTransmissionManager::AddTrackUnifiedPlan( transceiver->internal()->sender_internal()->set_stream_ids(stream_ids); transceiver->internal()->set_reused_for_addtrack(true); } else { - cricket::MediaType media_type = + webrtc::MediaType media_type = (track->kind() == MediaStreamTrackInterface::kAudioKind - ? cricket::MEDIA_TYPE_AUDIO - : cricket::MEDIA_TYPE_VIDEO); - RTC_LOG(LS_INFO) << "Adding " << cricket::MediaTypeToString(media_type) + ? webrtc::MediaType::AUDIO + : webrtc::MediaType::VIDEO); + RTC_LOG(LS_INFO) << "Adding " << webrtc::MediaTypeToString(media_type) << " transceiver in response to a call to AddTrack."; std::string sender_id = track->id(); // Avoid creating a sender with an existing ID by generating a random ID. // This can happen if this is the second time AddTrack has created a sender // for this track. if (FindSenderById(sender_id)) { - sender_id = rtc::CreateRandomUuid(); + sender_id = CreateRandomUuid(); } auto sender = CreateSender( media_type, sender_id, track, stream_ids, init_send_encodings ? *init_send_encodings : std::vector(1, RtpEncodingParameters{})); - auto receiver = CreateReceiver(media_type, rtc::CreateRandomUuid()); + auto receiver = CreateReceiver(media_type, CreateRandomUuid()); transceiver = CreateAndAddTransceiver(sender, receiver); transceiver->internal()->set_created_by_addtrack(true); transceiver->internal()->set_direction(RtpTransceiverDirection::kSendRecv); @@ -231,28 +257,29 @@ RtpTransmissionManager::AddTrackUnifiedPlan( return transceiver->sender(); } -rtc::scoped_refptr> +scoped_refptr> RtpTransmissionManager::CreateSender( - cricket::MediaType media_type, + webrtc::MediaType media_type, const std::string& id, - rtc::scoped_refptr track, + scoped_refptr track, const std::vector& stream_ids, const std::vector& send_encodings) { RTC_DCHECK_RUN_ON(signaling_thread()); - rtc::scoped_refptr> sender; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + scoped_refptr> sender; + if (media_type == webrtc::MediaType::AUDIO) { RTC_DCHECK(!track || (track->kind() == MediaStreamTrackInterface::kAudioKind)); sender = RtpSenderProxyWithInternal::Create( signaling_thread(), - AudioRtpSender::Create(worker_thread(), id, legacy_stats_, this)); + AudioRtpSender::Create(env_, worker_thread(), id, legacy_stats_, this)); NoteUsageEvent(UsageEvent::AUDIO_ADDED); } else { - RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO); + RTC_DCHECK_EQ(media_type, webrtc::MediaType::VIDEO); RTC_DCHECK(!track || (track->kind() == MediaStreamTrackInterface::kVideoKind)); sender = RtpSenderProxyWithInternal::Create( - signaling_thread(), VideoRtpSender::Create(worker_thread(), id, this)); + signaling_thread(), + VideoRtpSender::Create(env_, worker_thread(), id, this)); NoteUsageEvent(UsageEvent::VIDEO_ADDED); } bool set_track_succeeded = sender->SetTrack(track.get()); @@ -262,35 +289,33 @@ RtpTransmissionManager::CreateSender( return sender; } -rtc::scoped_refptr> -RtpTransmissionManager::CreateReceiver(cricket::MediaType media_type, +scoped_refptr> +RtpTransmissionManager::CreateReceiver(webrtc::MediaType media_type, const std::string& receiver_id) { RTC_DCHECK_RUN_ON(signaling_thread()); - rtc::scoped_refptr> - receiver; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + scoped_refptr> receiver; + if (media_type == webrtc::MediaType::AUDIO) { receiver = RtpReceiverProxyWithInternal::Create( signaling_thread(), worker_thread(), - rtc::make_ref_counted(worker_thread(), receiver_id, - std::vector({}), - IsUnifiedPlan())); + make_ref_counted(worker_thread(), receiver_id, + std::vector({}), + IsUnifiedPlan())); NoteUsageEvent(UsageEvent::AUDIO_ADDED); } else { - RTC_DCHECK_EQ(media_type, cricket::MEDIA_TYPE_VIDEO); + RTC_DCHECK_EQ(media_type, webrtc::MediaType::VIDEO); receiver = RtpReceiverProxyWithInternal::Create( signaling_thread(), worker_thread(), - rtc::make_ref_counted(worker_thread(), receiver_id, - std::vector({}))); + make_ref_counted(worker_thread(), receiver_id, + std::vector({}))); NoteUsageEvent(UsageEvent::VIDEO_ADDED); } return receiver; } -rtc::scoped_refptr> +scoped_refptr> RtpTransmissionManager::CreateAndAddTransceiver( - rtc::scoped_refptr> sender, - rtc::scoped_refptr> - receiver) { + scoped_refptr> sender, + scoped_refptr> receiver) { RTC_DCHECK_RUN_ON(signaling_thread()); // Ensure that the new sender does not have an ID that is already in use by // another sender. @@ -299,9 +324,9 @@ RtpTransmissionManager::CreateAndAddTransceiver( RTC_DCHECK(!FindSenderById(sender->id())); auto transceiver = RtpTransceiverProxyWithInternal::Create( signaling_thread(), - rtc::make_ref_counted( - sender, receiver, context_, - sender->media_type() == cricket::MEDIA_TYPE_AUDIO + make_ref_counted( + sender, receiver, context_, codec_lookup_helper_, + sender->media_type() == webrtc::MediaType::AUDIO ? media_engine()->voice().GetRtpHeaderExtensions() : media_engine()->video().GetRtpHeaderExtensions(), [this_weak_ptr = weak_ptr_factory_.GetWeakPtr()]() { @@ -313,9 +338,9 @@ RtpTransmissionManager::CreateAndAddTransceiver( return transceiver; } -rtc::scoped_refptr> +scoped_refptr> RtpTransmissionManager::FindFirstTransceiverForAddedTrack( - rtc::scoped_refptr track, + scoped_refptr track, const std::vector* init_send_encodings) { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(track); @@ -324,8 +349,7 @@ RtpTransmissionManager::FindFirstTransceiverForAddedTrack( } for (auto transceiver : transceivers()->List()) { if (!transceiver->sender()->track() && - cricket::MediaTypeToString(transceiver->media_type()) == - track->kind() && + webrtc::MediaTypeToString(transceiver->media_type()) == track->kind() && !transceiver->internal()->has_ever_been_used_to_send() && !transceiver->stopped()) { return transceiver; @@ -334,10 +358,10 @@ RtpTransmissionManager::FindFirstTransceiverForAddedTrack( return nullptr; } -std::vector>> +std::vector>> RtpTransmissionManager::GetSendersInternal() const { RTC_DCHECK_RUN_ON(signaling_thread()); - std::vector>> + std::vector>> all_senders; for (const auto& transceiver : transceivers_.List()) { if (IsUnifiedPlan() && transceiver->internal()->stopped()) @@ -349,12 +373,10 @@ RtpTransmissionManager::GetSendersInternal() const { return all_senders; } -std::vector< - rtc::scoped_refptr>> +std::vector>> RtpTransmissionManager::GetReceiversInternal() const { RTC_DCHECK_RUN_ON(signaling_thread()); - std::vector< - rtc::scoped_refptr>> + std::vector>> all_receivers; for (const auto& transceiver : transceivers_.List()) { if (IsUnifiedPlan() && transceiver->internal()->stopped()) @@ -367,14 +389,14 @@ RtpTransmissionManager::GetReceiversInternal() const { return all_receivers; } -rtc::scoped_refptr> +scoped_refptr> RtpTransmissionManager::GetAudioTransceiver() const { RTC_DCHECK_RUN_ON(signaling_thread()); // This method only works with Plan B SDP, where there is a single // audio/video transceiver. RTC_DCHECK(!IsUnifiedPlan()); for (auto transceiver : transceivers_.List()) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { + if (transceiver->media_type() == webrtc::MediaType::AUDIO) { return transceiver; } } @@ -382,14 +404,14 @@ RtpTransmissionManager::GetAudioTransceiver() const { return nullptr; } -rtc::scoped_refptr> +scoped_refptr> RtpTransmissionManager::GetVideoTransceiver() const { RTC_DCHECK_RUN_ON(signaling_thread()); // This method only works with Plan B SDP, where there is a single // audio/video transceiver. RTC_DCHECK(!IsUnifiedPlan()); for (auto transceiver : transceivers_.List()) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { + if (transceiver->media_type() == webrtc::MediaType::VIDEO) { return transceiver; } } @@ -411,8 +433,8 @@ void RtpTransmissionManager::AddAudioTrack(AudioTrackInterface* track, } // Normal case; we've never seen this track before. - auto new_sender = CreateSender(cricket::MEDIA_TYPE_AUDIO, track->id(), - rtc::scoped_refptr(track), + auto new_sender = CreateSender(webrtc::MediaType::AUDIO, track->id(), + scoped_refptr(track), {stream->id()}, {{}}); new_sender->internal()->SetMediaChannel(voice_media_send_channel()); GetAudioTransceiver()->internal()->AddSender(new_sender); @@ -458,8 +480,8 @@ void RtpTransmissionManager::AddVideoTrack(VideoTrackInterface* track, } // Normal case; we've never seen this track before. - auto new_sender = CreateSender(cricket::MEDIA_TYPE_VIDEO, track->id(), - rtc::scoped_refptr(track), + auto new_sender = CreateSender(webrtc::MediaType::VIDEO, track->id(), + scoped_refptr(track), {stream->id()}, {{}}); new_sender->internal()->SetMediaChannel(video_media_send_channel()); GetVideoTransceiver()->internal()->AddSender(new_sender); @@ -487,11 +509,11 @@ void RtpTransmissionManager::CreateAudioReceiver( MediaStreamInterface* stream, const RtpSenderInfo& remote_sender_info) { RTC_DCHECK(!closed_); - std::vector> streams; - streams.push_back(rtc::scoped_refptr(stream)); + std::vector> streams; + streams.push_back(scoped_refptr(stream)); // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use // the constructor taking stream IDs instead. - auto audio_receiver = rtc::make_ref_counted( + auto audio_receiver = make_ref_counted( worker_thread(), remote_sender_info.sender_id, streams, IsUnifiedPlan(), voice_media_receive_channel()); if (remote_sender_info.sender_id == kDefaultAudioSenderId) { @@ -511,17 +533,17 @@ void RtpTransmissionManager::CreateVideoReceiver( MediaStreamInterface* stream, const RtpSenderInfo& remote_sender_info) { RTC_DCHECK(!closed_); - std::vector> streams; - streams.push_back(rtc::scoped_refptr(stream)); + std::vector> streams; + streams.push_back(scoped_refptr(stream)); // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use // the constructor taking stream IDs instead. - auto video_receiver = rtc::make_ref_counted( + auto video_receiver = make_ref_counted( worker_thread(), remote_sender_info.sender_id, streams); video_receiver->SetupMediaChannel( remote_sender_info.sender_id == kDefaultVideoSenderId - ? absl::nullopt - : absl::optional(remote_sender_info.first_ssrc), + ? std::nullopt + : std::optional(remote_sender_info.first_ssrc), video_media_receive_channel()); auto receiver = RtpReceiverProxyWithInternal::Create( @@ -533,7 +555,7 @@ void RtpTransmissionManager::CreateVideoReceiver( // TODO(deadbeef): Keep RtpReceivers around even if track goes away in remote // description. -rtc::scoped_refptr +scoped_refptr RtpTransmissionManager::RemoveAndStopReceiver( const RtpSenderInfo& remote_sender_info) { auto receiver = FindReceiverById(remote_sender_info.sender_id); @@ -542,7 +564,7 @@ RtpTransmissionManager::RemoveAndStopReceiver( << remote_sender_info.sender_id << " doesn't exist."; return nullptr; } - if (receiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { + if (receiver->media_type() == webrtc::MediaType::AUDIO) { GetAudioTransceiver()->internal()->RemoveReceiver(receiver.get()); } else { GetVideoTransceiver()->internal()->RemoveReceiver(receiver.get()); @@ -553,15 +575,15 @@ RtpTransmissionManager::RemoveAndStopReceiver( void RtpTransmissionManager::OnRemoteSenderAdded( const RtpSenderInfo& sender_info, MediaStreamInterface* stream, - cricket::MediaType media_type) { + webrtc::MediaType media_type) { RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_LOG(LS_INFO) << "Creating " << cricket::MediaTypeToString(media_type) + RTC_LOG(LS_INFO) << "Creating " << webrtc::MediaTypeToString(media_type) << " receiver for track_id=" << sender_info.sender_id << " and stream_id=" << sender_info.stream_id; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + if (media_type == webrtc::MediaType::AUDIO) { CreateAudioReceiver(stream, sender_info); - } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { + } else if (media_type == webrtc::MediaType::VIDEO) { CreateVideoReceiver(stream, sender_info); } else { RTC_DCHECK_NOTREACHED() << "Invalid media type"; @@ -571,27 +593,27 @@ void RtpTransmissionManager::OnRemoteSenderAdded( void RtpTransmissionManager::OnRemoteSenderRemoved( const RtpSenderInfo& sender_info, MediaStreamInterface* stream, - cricket::MediaType media_type) { + webrtc::MediaType media_type) { RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_LOG(LS_INFO) << "Removing " << cricket::MediaTypeToString(media_type) + RTC_LOG(LS_INFO) << "Removing " << webrtc::MediaTypeToString(media_type) << " receiver for track_id=" << sender_info.sender_id << " and stream_id=" << sender_info.stream_id; - rtc::scoped_refptr receiver; - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + scoped_refptr receiver; + if (media_type == webrtc::MediaType::AUDIO) { // When the MediaEngine audio channel is destroyed, the RemoteAudioSource // will be notified which will end the AudioRtpReceiver::track(). receiver = RemoveAndStopReceiver(sender_info); - rtc::scoped_refptr audio_track = + scoped_refptr audio_track = stream->FindAudioTrack(sender_info.sender_id); if (audio_track) { stream->RemoveTrack(audio_track); } - } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { + } else if (media_type == webrtc::MediaType::VIDEO) { // Stopping or destroying a VideoRtpReceiver will end the // VideoRtpReceiver::track(). receiver = RemoveAndStopReceiver(sender_info); - rtc::scoped_refptr video_track = + scoped_refptr video_track = stream->FindVideoTrack(sender_info.sender_id); if (video_track) { // There's no guarantee the track is still available, e.g. the track may @@ -609,7 +631,7 @@ void RtpTransmissionManager::OnRemoteSenderRemoved( void RtpTransmissionManager::OnLocalSenderAdded( const RtpSenderInfo& sender_info, - cricket::MediaType media_type) { + webrtc::MediaType media_type) { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!IsUnifiedPlan()); auto sender = FindSenderById(sender_info.sender_id); @@ -632,7 +654,7 @@ void RtpTransmissionManager::OnLocalSenderAdded( void RtpTransmissionManager::OnLocalSenderRemoved( const RtpSenderInfo& sender_info, - cricket::MediaType media_type) { + webrtc::MediaType media_type) { RTC_DCHECK_RUN_ON(signaling_thread()); auto sender = FindSenderById(sender_info.sender_id); if (!sender) { @@ -654,20 +676,19 @@ void RtpTransmissionManager::OnLocalSenderRemoved( } std::vector* RtpTransmissionManager::GetRemoteSenderInfos( - cricket::MediaType media_type) { - RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO); - return (media_type == cricket::MEDIA_TYPE_AUDIO) - ? &remote_audio_sender_infos_ - : &remote_video_sender_infos_; + webrtc::MediaType media_type) { + RTC_DCHECK(media_type == webrtc::MediaType::AUDIO || + media_type == webrtc::MediaType::VIDEO); + return (media_type == webrtc::MediaType::AUDIO) ? &remote_audio_sender_infos_ + : &remote_video_sender_infos_; } std::vector* RtpTransmissionManager::GetLocalSenderInfos( - cricket::MediaType media_type) { - RTC_DCHECK(media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO); - return (media_type == cricket::MEDIA_TYPE_AUDIO) ? &local_audio_sender_infos_ - : &local_video_sender_infos_; + webrtc::MediaType media_type) { + RTC_DCHECK(media_type == webrtc::MediaType::AUDIO || + media_type == webrtc::MediaType::VIDEO); + return (media_type == webrtc::MediaType::AUDIO) ? &local_audio_sender_infos_ + : &local_video_sender_infos_; } const RtpSenderInfo* RtpTransmissionManager::FindSenderInfo( @@ -683,7 +704,7 @@ const RtpSenderInfo* RtpTransmissionManager::FindSenderInfo( return nullptr; } -rtc::scoped_refptr> +scoped_refptr> RtpTransmissionManager::FindSenderForTrack( MediaStreamTrackInterface* track) const { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -697,7 +718,7 @@ RtpTransmissionManager::FindSenderForTrack( return nullptr; } -rtc::scoped_refptr> +scoped_refptr> RtpTransmissionManager::FindSenderById(const std::string& sender_id) const { RTC_DCHECK_RUN_ON(signaling_thread()); for (const auto& transceiver : transceivers_.List()) { @@ -710,7 +731,7 @@ RtpTransmissionManager::FindSenderById(const std::string& sender_id) const { return nullptr; } -rtc::scoped_refptr> +scoped_refptr> RtpTransmissionManager::FindReceiverById(const std::string& receiver_id) const { RTC_DCHECK_RUN_ON(signaling_thread()); for (const auto& transceiver : transceivers_.List()) { @@ -723,7 +744,7 @@ RtpTransmissionManager::FindReceiverById(const std::string& receiver_id) const { return nullptr; } -cricket::MediaEngineInterface* RtpTransmissionManager::media_engine() const { +MediaEngineInterface* RtpTransmissionManager::media_engine() const { return context_->media_engine(); } diff --git a/pc/rtp_transmission_manager.h b/pc/rtp_transmission_manager.h index 5a4bf83526..6afb454ab3 100644 --- a/pc/rtp_transmission_manager.h +++ b/pc/rtp_transmission_manager.h @@ -17,6 +17,7 @@ #include #include +#include "api/environment/environment.h" #include "api/media_stream_interface.h" #include "api/media_types.h" #include "api/peer_connection_interface.h" @@ -27,6 +28,8 @@ #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "media/base/media_channel.h" +#include "pc/codec_vendor.h" +#include "pc/connection_context.h" #include "pc/legacy_stats_collector_interface.h" #include "pc/rtp_receiver.h" #include "pc/rtp_receiver_proxy.h" @@ -37,12 +40,9 @@ #include "pc/usage_pattern.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" +#include "rtc_base/unique_id_generator.h" #include "rtc_base/weak_ptr.h" -namespace rtc { -class Thread; -} - namespace webrtc { // This class contains information about @@ -70,8 +70,10 @@ struct RtpSenderInfo { // RtpTransceiver. class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { public: - RtpTransmissionManager(bool is_unified_plan, + RtpTransmissionManager(const Environment& env, + bool is_unified_plan, ConnectionContext* context, + CodecLookupHelper* codec_lookup_helper, UsagePattern* usage_pattern, PeerConnectionObserver* observer, LegacyStatsCollectorInterface* legacy_stats, @@ -88,53 +90,52 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { void OnSetStreams() override; // Add a new track, creating transceiver if required. - RTCErrorOr> AddTrack( - rtc::scoped_refptr track, + RTCErrorOr> AddTrack( + scoped_refptr track, const std::vector& stream_ids, const std::vector* init_send_encodings); // Create a new RTP sender. Does not associate with a transceiver. - rtc::scoped_refptr> - CreateSender(cricket::MediaType media_type, - const std::string& id, - rtc::scoped_refptr track, - const std::vector& stream_ids, - const std::vector& send_encodings); + scoped_refptr> CreateSender( + webrtc::MediaType media_type, + const std::string& id, + scoped_refptr track, + const std::vector& stream_ids, + const std::vector& send_encodings); // Create a new RTP receiver. Does not associate with a transceiver. - rtc::scoped_refptr> - CreateReceiver(cricket::MediaType media_type, const std::string& receiver_id); + scoped_refptr> + CreateReceiver(webrtc::MediaType media_type, const std::string& receiver_id); // Create a new RtpTransceiver of the given type and add it to the list of // registered transceivers. - rtc::scoped_refptr> + scoped_refptr> CreateAndAddTransceiver( - rtc::scoped_refptr> sender, - rtc::scoped_refptr> + scoped_refptr> sender, + scoped_refptr> receiver); // Returns the first RtpTransceiver suitable for a newly added track, if such // transceiver is available. - rtc::scoped_refptr> + scoped_refptr> FindFirstTransceiverForAddedTrack( - rtc::scoped_refptr track, + scoped_refptr track, const std::vector* init_send_encodings); // Returns the list of senders currently associated with some // registered transceiver - std::vector>> + std::vector>> GetSendersInternal() const; // Returns the list of receivers currently associated with a transceiver - std::vector< - rtc::scoped_refptr>> + std::vector>> GetReceiversInternal() const; // Plan B: Get the transceiver containing all audio senders and receivers - rtc::scoped_refptr> + scoped_refptr> GetAudioTransceiver() const; // Plan B: Get the transceiver containing all video senders and receivers - rtc::scoped_refptr> + scoped_refptr> GetVideoTransceiver() const; // Add an audio track, reusing or creating the sender. @@ -153,14 +154,14 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { // implementation and triggers CreateAudioReceiver or CreateVideoReceiver. void OnRemoteSenderAdded(const RtpSenderInfo& sender_info, MediaStreamInterface* stream, - cricket::MediaType media_type); + webrtc::MediaType media_type); // Triggered when a remote sender has been removed from a remote session // description. It removes the remote sender with id `sender_id` from a remote // MediaStream and triggers DestroyAudioReceiver or DestroyVideoReceiver. void OnRemoteSenderRemoved(const RtpSenderInfo& sender_info, MediaStreamInterface* stream, - cricket::MediaType media_type); + webrtc::MediaType media_type); // Triggered when a local sender has been seen for the first time in a local // session description. @@ -168,7 +169,7 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { // streams in the local SessionDescription can be mapped to a MediaStreamTrack // in a MediaStream in `local_streams_` void OnLocalSenderAdded(const RtpSenderInfo& sender_info, - cricket::MediaType media_type); + webrtc::MediaType media_type); // Triggered when a local sender has been removed from a local session // description. @@ -176,26 +177,25 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { // has been removed from the local SessionDescription and the stream can be // mapped to a MediaStreamTrack in a MediaStream in `local_streams_`. void OnLocalSenderRemoved(const RtpSenderInfo& sender_info, - cricket::MediaType media_type); + webrtc::MediaType media_type); std::vector* GetRemoteSenderInfos( - cricket::MediaType media_type); - std::vector* GetLocalSenderInfos( - cricket::MediaType media_type); + webrtc::MediaType media_type); + std::vector* GetLocalSenderInfos(webrtc::MediaType media_type); const RtpSenderInfo* FindSenderInfo(const std::vector& infos, const std::string& stream_id, const std::string& sender_id) const; // Return the RtpSender with the given track attached. - rtc::scoped_refptr> + scoped_refptr> FindSenderForTrack(MediaStreamTrackInterface* track) const; // Return the RtpSender with the given id, or null if none exists. - rtc::scoped_refptr> - FindSenderById(const std::string& sender_id) const; + scoped_refptr> FindSenderById( + const std::string& sender_id) const; // Return the RtpReceiver with the given id, or null if none exists. - rtc::scoped_refptr> + scoped_refptr> FindReceiverById(const std::string& receiver_id) const; TransceiverList* transceivers() { return &transceivers_; } @@ -203,29 +203,27 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { // Plan B helpers for getting the voice/video media channels for the single // audio/video transceiver, if it exists. - cricket::VoiceMediaSendChannelInterface* voice_media_send_channel() const; - cricket::VideoMediaSendChannelInterface* video_media_send_channel() const; - cricket::VoiceMediaReceiveChannelInterface* voice_media_receive_channel() - const; - cricket::VideoMediaReceiveChannelInterface* video_media_receive_channel() - const; + VoiceMediaSendChannelInterface* voice_media_send_channel() const; + VideoMediaSendChannelInterface* video_media_send_channel() const; + VoiceMediaReceiveChannelInterface* voice_media_receive_channel() const; + VideoMediaReceiveChannelInterface* video_media_receive_channel() const; private: - rtc::Thread* signaling_thread() const { return context_->signaling_thread(); } - rtc::Thread* worker_thread() const { return context_->worker_thread(); } + Thread* signaling_thread() const { return context_->signaling_thread(); } + Thread* worker_thread() const { return context_->worker_thread(); } bool IsUnifiedPlan() const { return is_unified_plan_; } void NoteUsageEvent(UsageEvent event) { usage_pattern_->NoteUsageEvent(event); } // AddTrack implementation when Unified Plan is specified. - RTCErrorOr> AddTrackUnifiedPlan( - rtc::scoped_refptr track, + RTCErrorOr> AddTrackUnifiedPlan( + scoped_refptr track, const std::vector& stream_ids, const std::vector* init_send_encodings); // AddTrack implementation when Plan B is specified. - RTCErrorOr> AddTrackPlanB( - rtc::scoped_refptr track, + RTCErrorOr> AddTrackPlanB( + scoped_refptr track, const std::vector& stream_ids, const std::vector* init_send_encodings); @@ -238,18 +236,19 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { void CreateVideoReceiver(MediaStreamInterface* stream, const RtpSenderInfo& remote_sender_info) RTC_RUN_ON(signaling_thread()); - rtc::scoped_refptr RemoveAndStopReceiver( + scoped_refptr RemoveAndStopReceiver( const RtpSenderInfo& remote_sender_info) RTC_RUN_ON(signaling_thread()); PeerConnectionObserver* Observer() const; void OnNegotiationNeeded(); - cricket::MediaEngineInterface* media_engine() const; + MediaEngineInterface* media_engine() const; - rtc::UniqueRandomIdGenerator* ssrc_generator() const { + UniqueRandomIdGenerator* ssrc_generator() const { return context_->ssrc_generator(); } + const Environment env_; TransceiverList transceivers_; // These lists store sender info seen in local/remote descriptions. @@ -265,11 +264,12 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { bool closed_ = false; bool const is_unified_plan_; ConnectionContext* context_; + CodecLookupHelper* codec_lookup_helper_; UsagePattern* usage_pattern_; PeerConnectionObserver* observer_; LegacyStatsCollectorInterface* const legacy_stats_; std::function on_negotiation_needed_; - rtc::WeakPtrFactory weak_ptr_factory_ + WeakPtrFactory weak_ptr_factory_ RTC_GUARDED_BY(signaling_thread()); }; diff --git a/pc/rtp_transport.cc b/pc/rtp_transport.cc index 653b51fd9e..2ab4ef55a7 100644 --- a/pc/rtp_transport.cc +++ b/pc/rtp_transport.cc @@ -13,16 +13,29 @@ #include #include +#include +#include #include -#include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "api/units/timestamp.h" +#include "call/rtp_demuxer.h" #include "media/base/rtp_utils.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "p2p/base/packet_transport_internal.h" +#include "pc/session_description.h" #include "rtc_base/checks.h" +#include "rtc_base/containers/flat_set.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" +#include "rtc_base/network/ecn_marking.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" +#include "rtc_base/socket.h" #include "rtc_base/trace_event.h" namespace webrtc { @@ -36,11 +49,11 @@ const std::string& RtpTransport::transport_name() const { return rtp_packet_transport_->transport_name(); } -int RtpTransport::SetRtpOption(rtc::Socket::Option opt, int value) { +int RtpTransport::SetRtpOption(Socket::Option opt, int value) { return rtp_packet_transport_->SetOption(opt, value); } -int RtpTransport::SetRtcpOption(rtc::Socket::Option opt, int value) { +int RtpTransport::SetRtcpOption(Socket::Option opt, int value) { if (rtcp_packet_transport_) { return rtcp_packet_transport_->SetOption(opt, value); } @@ -48,24 +61,27 @@ int RtpTransport::SetRtcpOption(rtc::Socket::Option opt, int value) { } void RtpTransport::SetRtpPacketTransport( - rtc::PacketTransportInternal* new_packet_transport) { + PacketTransportInternal* new_packet_transport) { if (new_packet_transport == rtp_packet_transport_) { return; } if (rtp_packet_transport_) { rtp_packet_transport_->SignalReadyToSend.disconnect(this); - rtp_packet_transport_->SignalReadPacket.disconnect(this); + rtp_packet_transport_->DeregisterReceivedPacketCallback(this); rtp_packet_transport_->SignalNetworkRouteChanged.disconnect(this); rtp_packet_transport_->SignalWritableState.disconnect(this); rtp_packet_transport_->SignalSentPacket.disconnect(this); // Reset the network route of the old transport. - SendNetworkRouteChanged(absl::optional()); + SendNetworkRouteChanged(std::optional()); } if (new_packet_transport) { new_packet_transport->SignalReadyToSend.connect( this, &RtpTransport::OnReadyToSend); - new_packet_transport->SignalReadPacket.connect(this, - &RtpTransport::OnReadPacket); + new_packet_transport->RegisterReceivedPacketCallback( + this, [&](PacketTransportInternal* transport, + const ReceivedIpPacket& packet) { + OnReadPacket(transport, packet); + }); new_packet_transport->SignalNetworkRouteChanged.connect( this, &RtpTransport::OnNetworkRouteChanged); new_packet_transport->SignalWritableState.connect( @@ -77,31 +93,32 @@ void RtpTransport::SetRtpPacketTransport( } rtp_packet_transport_ = new_packet_transport; - // Assumes the transport is ready to send if it is writable. If we are wrong, - // ready to send will be updated the next time we try to send. SetReadyToSend(false, rtp_packet_transport_ && rtp_packet_transport_->writable()); } void RtpTransport::SetRtcpPacketTransport( - rtc::PacketTransportInternal* new_packet_transport) { + PacketTransportInternal* new_packet_transport) { if (new_packet_transport == rtcp_packet_transport_) { return; } if (rtcp_packet_transport_) { rtcp_packet_transport_->SignalReadyToSend.disconnect(this); - rtcp_packet_transport_->SignalReadPacket.disconnect(this); + rtcp_packet_transport_->DeregisterReceivedPacketCallback(this); rtcp_packet_transport_->SignalNetworkRouteChanged.disconnect(this); rtcp_packet_transport_->SignalWritableState.disconnect(this); rtcp_packet_transport_->SignalSentPacket.disconnect(this); // Reset the network route of the old transport. - SendNetworkRouteChanged(absl::optional()); + SendNetworkRouteChanged(std::optional()); } if (new_packet_transport) { new_packet_transport->SignalReadyToSend.connect( this, &RtpTransport::OnReadyToSend); - new_packet_transport->SignalReadPacket.connect(this, - &RtpTransport::OnReadPacket); + new_packet_transport->RegisterReceivedPacketCallback( + this, [&](PacketTransportInternal* transport, + const ReceivedIpPacket& packet) { + OnReadPacket(transport, packet); + }); new_packet_transport->SignalNetworkRouteChanged.connect( this, &RtpTransport::OnNetworkRouteChanged); new_packet_transport->SignalWritableState.connect( @@ -113,44 +130,47 @@ void RtpTransport::SetRtcpPacketTransport( } rtcp_packet_transport_ = new_packet_transport; - // Assumes the transport is ready to send if it is writable. If we are wrong, - // ready to send will be updated the next time we try to send. + // Assumes the transport is ready to send if it is writable. SetReadyToSend(true, rtcp_packet_transport_ && rtcp_packet_transport_->writable()); } bool RtpTransport::IsWritable(bool rtcp) const { - rtc::PacketTransportInternal* transport = rtcp && !rtcp_mux_enabled_ - ? rtcp_packet_transport_ - : rtp_packet_transport_; + PacketTransportInternal* transport = rtcp && !rtcp_mux_enabled_ + ? rtcp_packet_transport_ + : rtp_packet_transport_; return transport && transport->writable(); } -bool RtpTransport::SendRtpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, +bool RtpTransport::SendRtpPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options, int flags) { return SendPacket(false, packet, options, flags); } -bool RtpTransport::SendRtcpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, +bool RtpTransport::SendRtcpPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options, int flags) { return SendPacket(true, packet, options, flags); } bool RtpTransport::SendPacket(bool rtcp, - rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, + CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options, int flags) { - rtc::PacketTransportInternal* transport = rtcp && !rtcp_mux_enabled_ - ? rtcp_packet_transport_ - : rtp_packet_transport_; + PacketTransportInternal* transport = rtcp && !rtcp_mux_enabled_ + ? rtcp_packet_transport_ + : rtp_packet_transport_; int ret = transport->SendPacket(packet->cdata(), packet->size(), options, flags); if (ret != static_cast(packet->size())) { - if (transport->GetError() == ENOTCONN) { - RTC_LOG(LS_WARNING) << "Got ENOTCONN from transport."; - SetReadyToSend(rtcp, false); + if (set_ready_to_send_false_if_send_fail_) { + // TODO: webrtc:361124449 - Remove SetReadyToSend if field trial + // WebRTC-SetReadyToSendFalseIfSendFail succeed 2024-12-01. + if (transport->GetError() == ENOTCONN) { + RTC_LOG(LS_WARNING) << "Got ENOTCONN from transport."; + SetReadyToSend(rtcp, false); + } } return false; } @@ -158,7 +178,7 @@ bool RtpTransport::SendPacket(bool rtcp, } void RtpTransport::UpdateRtpHeaderExtensionMap( - const cricket::RtpHeaderExtensions& header_extensions) { + const RtpHeaderExtensions& header_extensions) { header_extension_map_ = RtpHeaderExtensionMap(header_extensions); } @@ -180,12 +200,17 @@ bool RtpTransport::UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) { return true; } -void RtpTransport::DemuxPacket(rtc::CopyOnWriteBuffer packet, - int64_t packet_time_us) { - webrtc::RtpPacketReceived parsed_packet( - &header_extension_map_, packet_time_us == -1 - ? Timestamp::MinusInfinity() - : Timestamp::Micros(packet_time_us)); +flat_set RtpTransport::GetSsrcsForSink(RtpPacketSinkInterface* sink) { + return rtp_demuxer_.GetSsrcsForSink(sink); +} + +void RtpTransport::DemuxPacket(CopyOnWriteBuffer packet, + webrtc::Timestamp arrival_time, + EcnMarking ecn) { + RtpPacketReceived parsed_packet(&header_extension_map_); + parsed_packet.set_arrival_time(arrival_time); + parsed_packet.set_ecn(ecn); + if (!parsed_packet.Parse(std::move(packet))) { RTC_LOG(LS_ERROR) << "Failed to parse the incoming RTP packet before demuxing. Drop it."; @@ -206,68 +231,79 @@ bool RtpTransport::IsTransportWritable() { (!rtcp_packet_transport || rtcp_packet_transport->writable()); } -void RtpTransport::OnReadyToSend(rtc::PacketTransportInternal* transport) { +void RtpTransport::OnReadyToSend(PacketTransportInternal* transport) { SetReadyToSend(transport == rtcp_packet_transport_, true); } void RtpTransport::OnNetworkRouteChanged( - absl::optional network_route) { + std::optional network_route) { SendNetworkRouteChanged(network_route); } -void RtpTransport::OnWritableState( - rtc::PacketTransportInternal* packet_transport) { +void RtpTransport::OnWritableState(PacketTransportInternal* packet_transport) { RTC_DCHECK(packet_transport == rtp_packet_transport_ || packet_transport == rtcp_packet_transport_); SendWritableState(IsTransportWritable()); } -void RtpTransport::OnSentPacket(rtc::PacketTransportInternal* packet_transport, - const rtc::SentPacket& sent_packet) { +void RtpTransport::OnSentPacket(PacketTransportInternal* packet_transport, + const SentPacketInfo& sent_packet) { RTC_DCHECK(packet_transport == rtp_packet_transport_ || packet_transport == rtcp_packet_transport_); + if (processing_sent_packet_) { + TaskQueueBase::Current()->PostTask(SafeTask( + safety_.flag(), [this, sent_packet] { SendSentPacket(sent_packet); })); + return; + } + processing_sent_packet_ = true; SendSentPacket(sent_packet); + processing_sent_packet_ = false; } -void RtpTransport::OnRtpPacketReceived(rtc::CopyOnWriteBuffer packet, - int64_t packet_time_us) { - DemuxPacket(packet, packet_time_us); +void RtpTransport::OnRtpPacketReceived( + const ReceivedIpPacket& received_packet) { + CopyOnWriteBuffer payload(received_packet.payload()); + DemuxPacket( + payload, + received_packet.arrival_time().value_or(Timestamp::MinusInfinity()), + received_packet.ecn()); } -void RtpTransport::OnRtcpPacketReceived(rtc::CopyOnWriteBuffer packet, - int64_t packet_time_us) { - SendRtcpPacketReceived(&packet, packet_time_us); +void RtpTransport::OnRtcpPacketReceived( + const ReceivedIpPacket& received_packet) { + CopyOnWriteBuffer payload(received_packet.payload()); + // TODO(bugs.webrtc.org/15368): Propagate timestamp and maybe received packet + // further. + SendRtcpPacketReceived(&payload, received_packet.arrival_time() + ? received_packet.arrival_time()->us() + : -1); } -void RtpTransport::OnReadPacket(rtc::PacketTransportInternal* transport, - const char* data, - size_t len, - const int64_t& packet_time_us, - int flags) { +void RtpTransport::OnReadPacket(PacketTransportInternal* transport, + const ReceivedIpPacket& received_packet) { TRACE_EVENT0("webrtc", "RtpTransport::OnReadPacket"); // When using RTCP multiplexing we might get RTCP packets on the RTP // transport. We check the RTP payload type to determine if it is RTCP. - auto array_view = rtc::MakeArrayView(data, len); - cricket::RtpPacketType packet_type = cricket::InferRtpPacketType(array_view); + RtpPacketType packet_type = InferRtpPacketType(received_packet.payload()); // Filter out the packet that is neither RTP nor RTCP. - if (packet_type == cricket::RtpPacketType::kUnknown) { + if (packet_type == RtpPacketType::kUnknown) { return; } // Protect ourselves against crazy data. - if (!cricket::IsValidRtpPacketSize(packet_type, len)) { + if (!IsValidRtpPacketSize(packet_type, received_packet.payload().size())) { RTC_LOG(LS_ERROR) << "Dropping incoming " - << cricket::RtpPacketTypeToString(packet_type) - << " packet: wrong size=" << len; + << RtpPacketTypeToString(packet_type) + << " packet: wrong size=" + << received_packet.payload().size(); return; } - rtc::CopyOnWriteBuffer packet(data, len); - if (packet_type == cricket::RtpPacketType::kRtcp) { - OnRtcpPacketReceived(std::move(packet), packet_time_us); + if (packet_type == RtpPacketType::kRtcp) { + OnRtcpPacketReceived(received_packet); } else { - OnRtpPacketReceived(std::move(packet), packet_time_us); + OnRtpPacketReceived(received_packet); } } diff --git a/pc/rtp_transport.h b/pc/rtp_transport.h index 456c91c370..4394b16b3b 100644 --- a/pc/rtp_transport.h +++ b/pc/rtp_transport.h @@ -14,10 +14,12 @@ #include #include +#include #include -#include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/task_queue/pending_task_safety_flag.h" +#include "api/units/timestamp.h" #include "call/rtp_demuxer.h" #include "call/video_receive_stream.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" @@ -25,63 +27,62 @@ #include "pc/rtp_transport_internal.h" #include "pc/session_description.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/containers/flat_set.h" #include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network/ecn_marking.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" #include "rtc_base/socket.h" -namespace rtc { +namespace webrtc { class CopyOnWriteBuffer; -struct PacketOptions; -class PacketTransportInternal; - -} // namespace rtc - -namespace webrtc { class RtpTransport : public RtpTransportInternal { public: RtpTransport(const RtpTransport&) = delete; RtpTransport& operator=(const RtpTransport&) = delete; - explicit RtpTransport(bool rtcp_mux_enabled) - : rtcp_mux_enabled_(rtcp_mux_enabled) {} + RtpTransport(bool rtcp_mux_enabled, const FieldTrialsView& field_trials) + : set_ready_to_send_false_if_send_fail_( + field_trials.IsEnabled("WebRTC-SetReadyToSendFalseIfSendFail")), + rtcp_mux_enabled_(rtcp_mux_enabled) {} bool rtcp_mux_enabled() const override { return rtcp_mux_enabled_; } void SetRtcpMuxEnabled(bool enable) override; const std::string& transport_name() const override; - int SetRtpOption(rtc::Socket::Option opt, int value) override; - int SetRtcpOption(rtc::Socket::Option opt, int value) override; + int SetRtpOption(Socket::Option opt, int value) override; + int SetRtcpOption(Socket::Option opt, int value) override; - rtc::PacketTransportInternal* rtp_packet_transport() const { + PacketTransportInternal* rtp_packet_transport() const { return rtp_packet_transport_; } - void SetRtpPacketTransport(rtc::PacketTransportInternal* rtp); + void SetRtpPacketTransport(PacketTransportInternal* rtp); - rtc::PacketTransportInternal* rtcp_packet_transport() const { + PacketTransportInternal* rtcp_packet_transport() const { return rtcp_packet_transport_; } - void SetRtcpPacketTransport(rtc::PacketTransportInternal* rtcp); + void SetRtcpPacketTransport(PacketTransportInternal* rtcp); bool IsReadyToSend() const override { return ready_to_send_; } bool IsWritable(bool rtcp) const override; - bool SendRtpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, + bool SendRtpPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options, int flags) override; - bool SendRtcpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, + bool SendRtcpPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options, int flags) override; bool IsSrtpActive() const override { return false; } void UpdateRtpHeaderExtensionMap( - const cricket::RtpHeaderExtensions& header_extensions) override; + const RtpHeaderExtensions& header_extensions) override; bool RegisterRtpDemuxerSink(const RtpDemuxerCriteria& criteria, RtpPacketSinkInterface* sink) override; @@ -90,32 +91,29 @@ class RtpTransport : public RtpTransportInternal { protected: // These methods will be used in the subclasses. - void DemuxPacket(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us); + void DemuxPacket(CopyOnWriteBuffer packet, + Timestamp arrival_time, + EcnMarking ecn); bool SendPacket(bool rtcp, - rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, + CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options, int flags); + flat_set GetSsrcsForSink(RtpPacketSinkInterface* sink); // Overridden by SrtpTransport. - virtual void OnNetworkRouteChanged( - absl::optional network_route); - virtual void OnRtpPacketReceived(rtc::CopyOnWriteBuffer packet, - int64_t packet_time_us); - virtual void OnRtcpPacketReceived(rtc::CopyOnWriteBuffer packet, - int64_t packet_time_us); + virtual void OnNetworkRouteChanged(std::optional network_route); + virtual void OnRtpPacketReceived(const ReceivedIpPacket& packet); + virtual void OnRtcpPacketReceived(const ReceivedIpPacket& packet); // Overridden by SrtpTransport and DtlsSrtpTransport. - virtual void OnWritableState(rtc::PacketTransportInternal* packet_transport); + virtual void OnWritableState(PacketTransportInternal* packet_transport); private: - void OnReadyToSend(rtc::PacketTransportInternal* transport); - void OnSentPacket(rtc::PacketTransportInternal* packet_transport, - const rtc::SentPacket& sent_packet); - void OnReadPacket(rtc::PacketTransportInternal* transport, - const char* data, - size_t len, - const int64_t& packet_time_us, - int flags); + void OnReadyToSend(PacketTransportInternal* transport); + void OnSentPacket(PacketTransportInternal* packet_transport, + const SentPacketInfo& sent_packet); + void OnReadPacket(PacketTransportInternal* transport, + const ReceivedIpPacket& received_packet); // Updates "ready to send" for an individual channel and fires // SignalReadyToSend. @@ -125,10 +123,11 @@ class RtpTransport : public RtpTransportInternal { bool IsTransportWritable(); + const bool set_ready_to_send_false_if_send_fail_; bool rtcp_mux_enabled_; - rtc::PacketTransportInternal* rtp_packet_transport_ = nullptr; - rtc::PacketTransportInternal* rtcp_packet_transport_ = nullptr; + PacketTransportInternal* rtp_packet_transport_ = nullptr; + PacketTransportInternal* rtcp_packet_transport_ = nullptr; bool ready_to_send_ = false; bool rtp_ready_to_send_ = false; @@ -140,6 +139,7 @@ class RtpTransport : public RtpTransportInternal { RtpHeaderExtensionMap header_extension_map_; // Guard against recursive "ready to send" signals bool processing_ready_to_send_ = false; + bool processing_sent_packet_ = false; ScopedTaskSafety safety_; }; diff --git a/pc/rtp_transport_internal.h b/pc/rtp_transport_internal.h index 4114fa9340..faac226fda 100644 --- a/pc/rtp_transport_internal.h +++ b/pc/rtp_transport_internal.h @@ -11,23 +11,25 @@ #ifndef PC_RTP_TRANSPORT_INTERNAL_H_ #define PC_RTP_TRANSPORT_INTERNAL_H_ +#include +#include #include #include +#include "absl/functional/any_invocable.h" #include "call/rtp_demuxer.h" -#include "p2p/base/ice_transport_internal.h" #include "pc/session_description.h" #include "rtc_base/callback_list.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" -#include "rtc_base/ssl_stream_adapter.h" - -namespace rtc { -class CopyOnWriteBuffer; -struct PacketOptions; -} // namespace rtc +#include "rtc_base/socket.h" +#include "rtc_base/third_party/sigslot/sigslot.h" namespace webrtc { +class CopyOnWriteBuffer; + // This class is an internal interface; it is not accessible to API consumers // but is accessible to internal classes in order to send and receive RTP and // RTCP packets belonging to a single RTP session. Additional convenience and @@ -41,8 +43,8 @@ class RtpTransportInternal : public sigslot::has_slots<> { virtual const std::string& transport_name() const = 0; // Sets socket options on the underlying RTP or RTCP transports. - virtual int SetRtpOption(rtc::Socket::Option opt, int value) = 0; - virtual int SetRtcpOption(rtc::Socket::Option opt, int value) = 0; + virtual int SetRtpOption(Socket::Option opt, int value) = 0; + virtual int SetRtcpOption(Socket::Option opt, int value) = 0; virtual bool rtcp_mux_enabled() const = 0; @@ -64,7 +66,7 @@ class RtpTransportInternal : public sigslot::has_slots<> { // BaseChannel through the RtpDemuxer callback. void SubscribeRtcpPacketReceived( const void* tag, - absl::AnyInvocable callback) { + absl::AnyInvocable callback) { callback_list_rtcp_packet_received_.AddReceiver(tag, std::move(callback)); } // There doesn't seem to be a need to unsubscribe from this signal. @@ -72,7 +74,7 @@ class RtpTransportInternal : public sigslot::has_slots<> { // Called whenever a RTP packet that can not be demuxed by the transport is // received. void SetUnDemuxableRtpPacketReceivedHandler( - absl::AnyInvocable callback) { + absl::AnyInvocable callback) { callback_undemuxable_rtp_packet_received_ = std::move(callback); } @@ -80,7 +82,7 @@ class RtpTransportInternal : public sigslot::has_slots<> { // The argument is an optional network route. void SubscribeNetworkRouteChanged( const void* tag, - absl::AnyInvocable)> callback) { + absl::AnyInvocable)> callback) { callback_list_network_route_changed_.AddReceiver(tag, std::move(callback)); } void UnsubscribeNetworkRouteChanged(const void* tag) { @@ -98,7 +100,7 @@ class RtpTransportInternal : public sigslot::has_slots<> { } void SubscribeSentPacket( const void* tag, - absl::AnyInvocable callback) { + absl::AnyInvocable callback) { callback_list_sent_packet_.AddReceiver(tag, std::move(callback)); } void UnsubscribeSentPacket(const void* tag) { @@ -109,12 +111,12 @@ class RtpTransportInternal : public sigslot::has_slots<> { // TODO(zhihuang): Pass the `packet` by copy so that the original data // wouldn't be modified. - virtual bool SendRtpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, + virtual bool SendRtpPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options, int flags) = 0; - virtual bool SendRtcpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, + virtual bool SendRtcpPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options, int flags) = 0; // This method updates the RTP header extension map so that the RTP transport @@ -128,7 +130,7 @@ class RtpTransportInternal : public sigslot::has_slots<> { // UpdateRecvEncryptedHeaderExtensionIds, // CacheRtpAbsSendTimeHeaderExtension, virtual void UpdateRtpHeaderExtensionMap( - const cricket::RtpHeaderExtensions& header_extensions) = 0; + const RtpHeaderExtensions& header_extensions) = 0; virtual bool IsSrtpActive() const = 0; @@ -139,34 +141,33 @@ class RtpTransportInternal : public sigslot::has_slots<> { protected: void SendReadyToSend(bool arg) { callback_list_ready_to_send_.Send(arg); } - void SendRtcpPacketReceived(rtc::CopyOnWriteBuffer* buffer, + void SendRtcpPacketReceived(CopyOnWriteBuffer* buffer, int64_t packet_time_us) { callback_list_rtcp_packet_received_.Send(buffer, packet_time_us); } void NotifyUnDemuxableRtpPacketReceived(RtpPacketReceived& packet) { callback_undemuxable_rtp_packet_received_(packet); } - void SendNetworkRouteChanged(absl::optional route) { + void SendNetworkRouteChanged(std::optional route) { callback_list_network_route_changed_.Send(route); } void SendWritableState(bool state) { callback_list_writable_state_.Send(state); } - void SendSentPacket(const rtc::SentPacket& packet) { + void SendSentPacket(const SentPacketInfo& packet) { callback_list_sent_packet_.Send(packet); } private: CallbackList callback_list_ready_to_send_; - CallbackList - callback_list_rtcp_packet_received_; - absl::AnyInvocable + CallbackList callback_list_rtcp_packet_received_; + absl::AnyInvocable callback_undemuxable_rtp_packet_received_ = [](RtpPacketReceived& packet) {}; - CallbackList> + CallbackList> callback_list_network_route_changed_; CallbackList callback_list_writable_state_; - CallbackList callback_list_sent_packet_; + CallbackList callback_list_sent_packet_; }; } // namespace webrtc diff --git a/pc/rtp_transport_unittest.cc b/pc/rtp_transport_unittest.cc index 5b6a8309e0..c4d207d6c4 100644 --- a/pc/rtp_transport_unittest.cc +++ b/pc/rtp_transport_unittest.cc @@ -10,19 +10,33 @@ #include "pc/rtp_transport.h" -#include - -#include "p2p/base/fake_packet_transport.h" +#include +#include +#include + +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" +#include "call/rtp_demuxer.h" +#include "p2p/base/packet_transport_internal.h" +#include "p2p/test/fake_packet_transport.h" #include "pc/test/rtp_transport_test_util.h" #include "rtc_base/buffer.h" #include "rtc_base/containers/flat_set.h" -#include "rtc_base/gunit.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network/ecn_marking.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "test/explicit_key_value_config.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/run_loop.h" +#include "test/wait_until.h" namespace webrtc { +using test::ExplicitKeyValueConfig; + constexpr bool kMuxDisabled = false; constexpr bool kMuxEnabled = true; constexpr uint16_t kLocalNetId = 1; @@ -37,7 +51,7 @@ class SignalObserver : public sigslot::has_slots<> { transport->SubscribeReadyToSend( this, [this](bool ready) { OnReadyToSend(ready); }); transport->SubscribeNetworkRouteChanged( - this, [this](absl::optional route) { + this, [this](std::optional route) { OnNetworkRouteChanged(route); }); if (transport->rtp_packet_transport()) { @@ -54,13 +68,13 @@ class SignalObserver : public sigslot::has_slots<> { bool ready() const { return ready_; } void OnReadyToSend(bool ready) { ready_ = ready; } - absl::optional network_route() { return network_route_; } - void OnNetworkRouteChanged(absl::optional network_route) { + std::optional network_route() { return network_route_; } + void OnNetworkRouteChanged(std::optional network_route) { network_route_ = network_route; } - void OnSentPacket(rtc::PacketTransportInternal* packet_transport, - const rtc::SentPacket& sent_packet) { + void OnSentPacket(PacketTransportInternal* packet_transport, + const SentPacketInfo& sent_packet) { if (packet_transport == transport_->rtp_packet_transport()) { rtp_transport_sent_count_++; } else { @@ -78,15 +92,16 @@ class SignalObserver : public sigslot::has_slots<> { int rtcp_transport_sent_count_ = 0; RtpTransport* transport_ = nullptr; bool ready_ = false; - absl::optional network_route_; + std::optional network_route_; }; TEST(RtpTransportTest, SettingRtcpAndRtpSignalsReady) { - RtpTransport transport(kMuxDisabled); + RtpTransport transport(kMuxDisabled, ExplicitKeyValueConfig("")); + SignalObserver observer(&transport); - rtc::FakePacketTransport fake_rtcp("fake_rtcp"); + FakePacketTransport fake_rtcp("fake_rtcp"); fake_rtcp.SetWritable(true); - rtc::FakePacketTransport fake_rtp("fake_rtp"); + FakePacketTransport fake_rtp("fake_rtp"); fake_rtp.SetWritable(true); transport.SetRtcpPacketTransport(&fake_rtcp); // rtcp ready @@ -96,11 +111,11 @@ TEST(RtpTransportTest, SettingRtcpAndRtpSignalsReady) { } TEST(RtpTransportTest, SettingRtpAndRtcpSignalsReady) { - RtpTransport transport(kMuxDisabled); + RtpTransport transport(kMuxDisabled, ExplicitKeyValueConfig("")); SignalObserver observer(&transport); - rtc::FakePacketTransport fake_rtcp("fake_rtcp"); + FakePacketTransport fake_rtcp("fake_rtcp"); fake_rtcp.SetWritable(true); - rtc::FakePacketTransport fake_rtp("fake_rtp"); + FakePacketTransport fake_rtp("fake_rtp"); fake_rtp.SetWritable(true); transport.SetRtpPacketTransport(&fake_rtp); // rtp ready @@ -110,9 +125,9 @@ TEST(RtpTransportTest, SettingRtpAndRtcpSignalsReady) { } TEST(RtpTransportTest, SettingRtpWithRtcpMuxEnabledSignalsReady) { - RtpTransport transport(kMuxEnabled); + RtpTransport transport(kMuxEnabled, ExplicitKeyValueConfig("")); SignalObserver observer(&transport); - rtc::FakePacketTransport fake_rtp("fake_rtp"); + FakePacketTransport fake_rtp("fake_rtp"); fake_rtp.SetWritable(true); transport.SetRtpPacketTransport(&fake_rtp); // rtp ready @@ -120,9 +135,9 @@ TEST(RtpTransportTest, SettingRtpWithRtcpMuxEnabledSignalsReady) { } TEST(RtpTransportTest, DisablingRtcpMuxSignalsNotReady) { - RtpTransport transport(kMuxEnabled); + RtpTransport transport(kMuxEnabled, ExplicitKeyValueConfig("")); SignalObserver observer(&transport); - rtc::FakePacketTransport fake_rtp("fake_rtp"); + FakePacketTransport fake_rtp("fake_rtp"); fake_rtp.SetWritable(true); transport.SetRtpPacketTransport(&fake_rtp); // rtp ready @@ -133,9 +148,9 @@ TEST(RtpTransportTest, DisablingRtcpMuxSignalsNotReady) { } TEST(RtpTransportTest, EnablingRtcpMuxSignalsReady) { - RtpTransport transport(kMuxDisabled); + RtpTransport transport(kMuxDisabled, ExplicitKeyValueConfig("")); SignalObserver observer(&transport); - rtc::FakePacketTransport fake_rtp("fake_rtp"); + FakePacketTransport fake_rtp("fake_rtp"); fake_rtp.SetWritable(true); transport.SetRtpPacketTransport(&fake_rtp); // rtp ready @@ -147,20 +162,20 @@ TEST(RtpTransportTest, EnablingRtcpMuxSignalsReady) { // Tests the SignalNetworkRoute is fired when setting a packet transport. TEST(RtpTransportTest, SetRtpTransportWithNetworkRouteChanged) { - RtpTransport transport(kMuxDisabled); + RtpTransport transport(kMuxDisabled, ExplicitKeyValueConfig("")); SignalObserver observer(&transport); - rtc::FakePacketTransport fake_rtp("fake_rtp"); + FakePacketTransport fake_rtp("fake_rtp"); EXPECT_FALSE(observer.network_route()); - rtc::NetworkRoute network_route; + NetworkRoute network_route; // Set a non-null RTP transport with a new network route. network_route.connected = true; - network_route.local = rtc::RouteEndpoint::CreateWithNetworkId(kLocalNetId); - network_route.remote = rtc::RouteEndpoint::CreateWithNetworkId(kRemoteNetId); + network_route.local = RouteEndpoint::CreateWithNetworkId(kLocalNetId); + network_route.remote = RouteEndpoint::CreateWithNetworkId(kRemoteNetId); network_route.last_sent_packet_id = kLastPacketId; network_route.packet_overhead = kTransportOverheadPerPacket; - fake_rtp.SetNetworkRoute(absl::optional(network_route)); + fake_rtp.SetNetworkRoute(std::optional(network_route)); transport.SetRtpPacketTransport(&fake_rtp); ASSERT_TRUE(observer.network_route()); EXPECT_TRUE(observer.network_route()->connected); @@ -176,20 +191,20 @@ TEST(RtpTransportTest, SetRtpTransportWithNetworkRouteChanged) { } TEST(RtpTransportTest, SetRtcpTransportWithNetworkRouteChanged) { - RtpTransport transport(kMuxDisabled); + RtpTransport transport(kMuxDisabled, ExplicitKeyValueConfig("")); SignalObserver observer(&transport); - rtc::FakePacketTransport fake_rtcp("fake_rtcp"); + FakePacketTransport fake_rtcp("fake_rtcp"); EXPECT_FALSE(observer.network_route()); - rtc::NetworkRoute network_route; + NetworkRoute network_route; // Set a non-null RTCP transport with a new network route. network_route.connected = true; - network_route.local = rtc::RouteEndpoint::CreateWithNetworkId(kLocalNetId); - network_route.remote = rtc::RouteEndpoint::CreateWithNetworkId(kRemoteNetId); + network_route.local = RouteEndpoint::CreateWithNetworkId(kLocalNetId); + network_route.remote = RouteEndpoint::CreateWithNetworkId(kRemoteNetId); network_route.last_sent_packet_id = kLastPacketId; network_route.packet_overhead = kTransportOverheadPerPacket; - fake_rtcp.SetNetworkRoute(absl::optional(network_route)); + fake_rtcp.SetNetworkRoute(std::optional(network_route)); transport.SetRtcpPacketTransport(&fake_rtcp); ASSERT_TRUE(observer.network_route()); EXPECT_TRUE(observer.network_route()->connected); @@ -209,9 +224,9 @@ TEST(RtpTransportTest, SetRtcpTransportWithNetworkRouteChanged) { TEST(RtpTransportTest, RtcpPacketSentOverCorrectTransport) { // If the RTCP-mux is not enabled, RTCP packets are expected to be sent over // the RtcpPacketTransport. - RtpTransport transport(kMuxDisabled); - rtc::FakePacketTransport fake_rtcp("fake_rtcp"); - rtc::FakePacketTransport fake_rtp("fake_rtp"); + RtpTransport transport(kMuxDisabled, ExplicitKeyValueConfig("")); + FakePacketTransport fake_rtcp("fake_rtcp"); + FakePacketTransport fake_rtp("fake_rtp"); transport.SetRtcpPacketTransport(&fake_rtcp); // rtcp ready transport.SetRtpPacketTransport(&fake_rtp); // rtp ready SignalObserver observer(&transport); @@ -219,21 +234,21 @@ TEST(RtpTransportTest, RtcpPacketSentOverCorrectTransport) { fake_rtp.SetDestination(&fake_rtp, true); fake_rtcp.SetDestination(&fake_rtcp, true); - rtc::CopyOnWriteBuffer packet; - EXPECT_TRUE(transport.SendRtcpPacket(&packet, rtc::PacketOptions(), 0)); + CopyOnWriteBuffer packet; + EXPECT_TRUE(transport.SendRtcpPacket(&packet, AsyncSocketPacketOptions(), 0)); EXPECT_EQ(1, observer.rtcp_transport_sent_count()); // The RTCP packets are expected to be sent over RtpPacketTransport if // RTCP-mux is enabled. transport.SetRtcpMuxEnabled(true); - EXPECT_TRUE(transport.SendRtcpPacket(&packet, rtc::PacketOptions(), 0)); + EXPECT_TRUE(transport.SendRtcpPacket(&packet, AsyncSocketPacketOptions(), 0)); EXPECT_EQ(1, observer.rtp_transport_sent_count()); } TEST(RtpTransportTest, ChangingReadyToSendStateOnlySignalsWhenChanged) { - RtpTransport transport(kMuxEnabled); + RtpTransport transport(kMuxEnabled, ExplicitKeyValueConfig("")); TransportObserver observer(&transport); - rtc::FakePacketTransport fake_rtp("fake_rtp"); + FakePacketTransport fake_rtp("fake_rtp"); fake_rtp.SetWritable(true); // State changes, so we should signal. @@ -256,8 +271,8 @@ TEST(RtpTransportTest, ChangingReadyToSendStateOnlySignalsWhenChanged) { // Test that SignalPacketReceived fires with rtcp=true when a RTCP packet is // received. TEST(RtpTransportTest, SignalDemuxedRtcp) { - RtpTransport transport(kMuxDisabled); - rtc::FakePacketTransport fake_rtp("fake_rtp"); + RtpTransport transport(kMuxDisabled, ExplicitKeyValueConfig("")); + FakePacketTransport fake_rtp("fake_rtp"); fake_rtp.SetDestination(&fake_rtp, true); transport.SetRtpPacketTransport(&fake_rtp); TransportObserver observer(&transport); @@ -265,7 +280,7 @@ TEST(RtpTransportTest, SignalDemuxedRtcp) { // An rtcp packet. const unsigned char data[] = {0x80, 73, 0, 0}; const int len = 4; - const rtc::PacketOptions options; + const AsyncSocketPacketOptions options; const int flags = 0; fake_rtp.SendPacket(reinterpret_cast(data), len, options, flags); EXPECT_EQ(0, observer.rtp_count()); @@ -279,8 +294,8 @@ static const int kRtpLen = 12; // Test that SignalPacketReceived fires with rtcp=false when a RTP packet with a // handled payload type is received. TEST(RtpTransportTest, SignalHandledRtpPayloadType) { - RtpTransport transport(kMuxDisabled); - rtc::FakePacketTransport fake_rtp("fake_rtp"); + RtpTransport transport(kMuxDisabled, ExplicitKeyValueConfig("")); + FakePacketTransport fake_rtp("fake_rtp"); fake_rtp.SetDestination(&fake_rtp, true); transport.SetRtpPacketTransport(&fake_rtp); TransportObserver observer(&transport); @@ -290,9 +305,9 @@ TEST(RtpTransportTest, SignalHandledRtpPayloadType) { transport.RegisterRtpDemuxerSink(demuxer_criteria, &observer); // An rtp packet. - const rtc::PacketOptions options; + const AsyncSocketPacketOptions options; const int flags = 0; - rtc::Buffer rtp_data(kRtpData, kRtpLen); + Buffer rtp_data(kRtpData, kRtpLen); fake_rtp.SendPacket(rtp_data.data(), kRtpLen, options, flags); EXPECT_EQ(1, observer.rtp_count()); EXPECT_EQ(0, observer.un_demuxable_rtp_count()); @@ -301,11 +316,34 @@ TEST(RtpTransportTest, SignalHandledRtpPayloadType) { transport.UnregisterRtpDemuxerSink(&observer); } +TEST(RtpTransportTest, ReceivedPacketEcnMarkingPropagatedToDemuxedPacket) { + RtpTransport transport(kMuxDisabled, ExplicitKeyValueConfig("")); + // Setup FakePacketTransport to send packets to itself. + FakePacketTransport fake_rtp("fake_rtp"); + fake_rtp.SetDestination(&fake_rtp, true); + transport.SetRtpPacketTransport(&fake_rtp); + TransportObserver observer(&transport); + RtpDemuxerCriteria demuxer_criteria; + // Add a payload type of kRtpData. + demuxer_criteria.payload_types().insert(0x11); + transport.RegisterRtpDemuxerSink(demuxer_criteria, &observer); + + AsyncSocketPacketOptions options; + options.ecn_1 = true; + const int flags = 0; + Buffer rtp_data(kRtpData, kRtpLen); + fake_rtp.SendPacket(rtp_data.data(), kRtpLen, options, flags); + ASSERT_EQ(observer.rtp_count(), 1); + EXPECT_EQ(observer.last_recv_rtp_packet().ecn(), EcnMarking::kEct1); + + transport.UnregisterRtpDemuxerSink(&observer); +} + // Test that SignalPacketReceived does not fire when a RTP packet with an // unhandled payload type is received. TEST(RtpTransportTest, DontSignalUnhandledRtpPayloadType) { - RtpTransport transport(kMuxDisabled); - rtc::FakePacketTransport fake_rtp("fake_rtp"); + RtpTransport transport(kMuxDisabled, ExplicitKeyValueConfig("")); + FakePacketTransport fake_rtp("fake_rtp"); fake_rtp.SetDestination(&fake_rtp, true); transport.SetRtpPacketTransport(&fake_rtp); TransportObserver observer(&transport); @@ -314,9 +352,9 @@ TEST(RtpTransportTest, DontSignalUnhandledRtpPayloadType) { demuxer_criteria.payload_types().insert(0x12); transport.RegisterRtpDemuxerSink(demuxer_criteria, &observer); - const rtc::PacketOptions options; + const AsyncSocketPacketOptions options; const int flags = 0; - rtc::Buffer rtp_data(kRtpData, kRtpLen); + Buffer rtp_data(kRtpData, kRtpLen); fake_rtp.SendPacket(rtp_data.data(), kRtpLen, options, flags); EXPECT_EQ(0, observer.rtp_count()); EXPECT_EQ(1, observer.un_demuxable_rtp_count()); @@ -325,17 +363,43 @@ TEST(RtpTransportTest, DontSignalUnhandledRtpPayloadType) { transport.UnregisterRtpDemuxerSink(&observer); } +TEST(RtpTransportTest, DontChangeReadyToSendStateOnSendFailure) { + // ReadyToSendState should only care about if transport is writable unless the + // field trial WebRTC-SetReadyToSendFalseIfSendFail/Enabled/ is set. + RtpTransport transport(kMuxEnabled, ExplicitKeyValueConfig("")); + TransportObserver observer(&transport); + + FakePacketTransport fake_rtp("fake_rtp"); + fake_rtp.SetDestination(&fake_rtp, true); + transport.SetRtpPacketTransport(&fake_rtp); + fake_rtp.SetWritable(true); + EXPECT_TRUE(observer.ready_to_send()); + EXPECT_EQ(observer.ready_to_send_signal_count(), 1); + CopyOnWriteBuffer packet; + EXPECT_TRUE(transport.SendRtpPacket(&packet, AsyncSocketPacketOptions(), 0)); + + // The fake RTP will return -1 due to ENOTCONN. + fake_rtp.SetError(ENOTCONN); + EXPECT_FALSE(transport.SendRtpPacket(&packet, AsyncSocketPacketOptions(), 0)); + // Ready to send state should not have changed. + EXPECT_TRUE(observer.ready_to_send()); + EXPECT_EQ(observer.ready_to_send_signal_count(), 1); +} + TEST(RtpTransportTest, RecursiveSetSendDoesNotCrash) { const int kShortTimeout = 100; test::RunLoop loop; - RtpTransport transport(kMuxEnabled); - rtc::FakePacketTransport fake_rtp("fake_rtp"); + + RtpTransport transport( + kMuxEnabled, + ExplicitKeyValueConfig("WebRTC-SetReadyToSendFalseIfSendFail/Enabled/")); + FakePacketTransport fake_rtp("fake_rtp"); transport.SetRtpPacketTransport(&fake_rtp); TransportObserver observer(&transport); observer.SetActionOnReadyToSend([&](bool ready) { - const rtc::PacketOptions options; + const AsyncSocketPacketOptions options; const int flags = 0; - rtc::CopyOnWriteBuffer rtp_data(kRtpData, kRtpLen); + CopyOnWriteBuffer rtp_data(kRtpData, kRtpLen); transport.SendRtpPacket(&rtp_data, options, flags); }); // The fake RTP will have no destination, so will return -1. @@ -345,8 +409,38 @@ TEST(RtpTransportTest, RecursiveSetSendDoesNotCrash) { EXPECT_TRUE(observer.ready_to_send()); EXPECT_EQ(observer.ready_to_send_signal_count(), 1); // After the wait, the ready-to-send false is observed. - EXPECT_EQ_WAIT(observer.ready_to_send_signal_count(), 2, kShortTimeout); + EXPECT_THAT(WaitUntil([&] { return observer.ready_to_send_signal_count(); }, + ::testing::Eq(2), + {.timeout = webrtc::TimeDelta::Millis(kShortTimeout)}), + IsRtcOk()); EXPECT_FALSE(observer.ready_to_send()); } +TEST(RtpTransportTest, RecursiveOnSentPacketDoesNotCrash) { + const int kShortTimeout = 100; + test::RunLoop loop; + RtpTransport transport(kMuxDisabled, ExplicitKeyValueConfig("")); + FakePacketTransport fake_rtp("fake_rtp"); + transport.SetRtpPacketTransport(&fake_rtp); + fake_rtp.SetDestination(&fake_rtp, true); + TransportObserver observer(&transport); + const AsyncSocketPacketOptions options; + const int flags = 0; + + fake_rtp.SetWritable(true); + observer.SetActionOnSentPacket([&]() { + CopyOnWriteBuffer rtp_data(kRtpData, kRtpLen); + if (observer.sent_packet_count() < 2) { + transport.SendRtpPacket(&rtp_data, options, flags); + } + }); + CopyOnWriteBuffer rtp_data(kRtpData, kRtpLen); + transport.SendRtpPacket(&rtp_data, options, flags); + EXPECT_EQ(observer.sent_packet_count(), 1); + EXPECT_THAT( + WaitUntil([&] { return observer.sent_packet_count(); }, ::testing::Eq(2), + {.timeout = webrtc::TimeDelta::Millis(kShortTimeout)}), + IsRtcOk()); +} + } // namespace webrtc diff --git a/pc/scenario_tests/goog_cc_test.cc b/pc/scenario_tests/goog_cc_test.cc index ea96408ac7..2e8ff64e8f 100644 --- a/pc/scenario_tests/goog_cc_test.cc +++ b/pc/scenario_tests/goog_cc_test.cc @@ -10,6 +10,7 @@ #include "api/stats/rtc_stats_collector_callback.h" #include "api/stats/rtcstats_objects.h" +#include "api/units/data_rate.h" #include "pc/test/mock_peer_connection_observers.h" #include "test/field_trial.h" #include "test/gtest.h" @@ -42,7 +43,7 @@ TEST(GoogCcPeerScenarioTest, MAYBE_NoBweChangeFromVideoUnmute) { auto* callee = s.CreateClient(PeerScenarioClient::Config()); BuiltInNetworkBehaviorConfig net_conf; - net_conf.link_capacity_kbps = 350; + net_conf.link_capacity = DataRate::KilobitsPerSec(350); net_conf.queue_delay_ms = 50; auto send_node = s.net()->CreateEmulatedNode(net_conf); auto ret_node = s.net()->CreateEmulatedNode(net_conf); @@ -50,7 +51,7 @@ TEST(GoogCcPeerScenarioTest, MAYBE_NoBweChangeFromVideoUnmute) { PeerScenarioClient::VideoSendTrackConfig video_conf; video_conf.generator.squares_video->framerate = 15; auto video = caller->CreateVideo("VIDEO", video_conf); - auto audio = caller->CreateAudio("AUDIO", cricket::AudioOptions()); + auto audio = caller->CreateAudio("AUDIO", AudioOptions()); // Start ICE and exchange SDP. s.SimpleConnection(caller, callee, {send_node}, {ret_node}); @@ -73,8 +74,7 @@ TEST(GoogCcPeerScenarioTest, MAYBE_NoBweChangeFromVideoUnmute) { ASSERT_EQ(num_video_streams, 1); // Exactly 1 video stream. auto get_bwe = [&] { - auto callback = - rtc::make_ref_counted(); + auto callback = make_ref_counted(); caller->pc()->GetStats(callback.get()); s.net()->time_controller()->Wait([&] { return callback->called(); }); auto stats = diff --git a/pc/sctp_data_channel.cc b/pc/sctp_data_channel.cc index 8fdbf4cb92..0faa6bbb65 100644 --- a/pc/sctp_data_channel.cc +++ b/pc/sctp_data_channel.cc @@ -10,17 +10,36 @@ #include "pc/sctp_data_channel.h" +#include +#include +#include #include #include +#include #include #include +#include "absl/functional/any_invocable.h" +#include "api/data_channel_interface.h" +#include "api/make_ref_counted.h" +#include "api/priority.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/transport/data_channel_transport_interface.h" #include "media/sctp/sctp_transport_internal.h" +#include "pc/data_channel_utils.h" #include "pc/proxy.h" +#include "pc/sctp_utils.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" +#include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/system/unused.h" #include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/weak_ptr.h" namespace webrtc { @@ -42,15 +61,13 @@ BYPASS_PROXY_METHOD0(void, UnregisterObserver) BYPASS_PROXY_CONSTMETHOD0(std::string, label) BYPASS_PROXY_CONSTMETHOD0(bool, reliable) BYPASS_PROXY_CONSTMETHOD0(bool, ordered) -BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime) -BYPASS_PROXY_CONSTMETHOD0(uint16_t, maxRetransmits) -BYPASS_PROXY_CONSTMETHOD0(absl::optional, maxRetransmitsOpt) -BYPASS_PROXY_CONSTMETHOD0(absl::optional, maxPacketLifeTime) +BYPASS_PROXY_CONSTMETHOD0(std::optional, maxRetransmitsOpt) +BYPASS_PROXY_CONSTMETHOD0(std::optional, maxPacketLifeTime) BYPASS_PROXY_CONSTMETHOD0(std::string, protocol) BYPASS_PROXY_CONSTMETHOD0(bool, negotiated) // Can't bypass the proxy since the id may change. PROXY_SECONDARY_CONSTMETHOD0(int, id) -BYPASS_PROXY_CONSTMETHOD0(Priority, priority) +BYPASS_PROXY_CONSTMETHOD0(PriorityValue, priority) BYPASS_PROXY_CONSTMETHOD0(DataState, state) BYPASS_PROXY_CONSTMETHOD0(RTCError, error) PROXY_SECONDARY_CONSTMETHOD0(uint32_t, messages_sent) @@ -84,7 +101,7 @@ InternalDataChannelInit::InternalDataChannelInit(const DataChannelInit& base) if (*maxRetransmits < 0) { RTC_LOG(LS_ERROR) << "Accepting maxRetransmits < 0 for backwards compatibility"; - maxRetransmits = absl::nullopt; + maxRetransmits = std::nullopt; } else if (*maxRetransmits > std::numeric_limits::max()) { maxRetransmits = std::numeric_limits::max(); } @@ -94,7 +111,7 @@ InternalDataChannelInit::InternalDataChannelInit(const DataChannelInit& base) if (*maxRetransmitTime < 0) { RTC_LOG(LS_ERROR) << "Accepting maxRetransmitTime < 0 for backwards compatibility"; - maxRetransmitTime = absl::nullopt; + maxRetransmitTime = std::nullopt; } else if (*maxRetransmitTime > std::numeric_limits::max()) { maxRetransmitTime = std::numeric_limits::max(); } @@ -118,23 +135,21 @@ bool InternalDataChannelInit::IsValid() const { return true; } -StreamId SctpSidAllocator::AllocateSid(rtc::SSLRole role) { +std::optional SctpSidAllocator::AllocateSid(SSLRole role) { RTC_DCHECK_RUN_ON(&sequence_checker_); - int potential_sid = (role == rtc::SSL_CLIENT) ? 0 : 1; - while (potential_sid <= static_cast(cricket::kMaxSctpSid)) { + int potential_sid = (role == SSL_CLIENT) ? 0 : 1; + while (potential_sid <= static_cast(kMaxSctpSid)) { StreamId sid(potential_sid); if (used_sids_.insert(sid).second) return sid; potential_sid += 2; } RTC_LOG(LS_ERROR) << "SCTP sid allocation pool exhausted."; - return StreamId(); + return std::nullopt; } bool SctpSidAllocator::ReserveSid(StreamId sid) { RTC_DCHECK_RUN_ON(&sequence_checker_); - if (!sid.HasValue() || sid.stream_id_int() > cricket::kMaxSctpSid) - return false; return used_sids_.insert(sid).second; } @@ -163,7 +178,7 @@ class SctpDataChannel::ObserverAdapter : public DataChannelObserver { public: explicit ObserverAdapter( SctpDataChannel* channel, - rtc::scoped_refptr signaling_safety) + scoped_refptr signaling_safety) : channel_(channel), signaling_safety_(std::move(signaling_safety)) {} bool IsInsideCallback() const { @@ -267,37 +282,37 @@ class SctpDataChannel::ObserverAdapter : public DataChannelObserver { bool IsOkToCallOnTheNetworkThread() override { return true; } - rtc::Thread* signaling_thread() const { return signaling_thread_; } - rtc::Thread* network_thread() const { return channel_->network_thread_; } + Thread* signaling_thread() const { return signaling_thread_; } + Thread* network_thread() const { return channel_->network_thread_; } DataChannelObserver* delegate_ RTC_GUARDED_BY(signaling_thread()) = nullptr; SctpDataChannel* const channel_; // Make sure to keep our own signaling_thread_ pointer to avoid dereferencing // `channel_` in the `RTC_DCHECK_RUN_ON` checks on the signaling thread. - rtc::Thread* const signaling_thread_{channel_->signaling_thread_}; + Thread* const signaling_thread_{channel_->signaling_thread_}; ScopedTaskSafety safety_; - rtc::scoped_refptr signaling_safety_; + scoped_refptr signaling_safety_; CachedGetters* cached_getters_ RTC_GUARDED_BY(signaling_thread()) = nullptr; }; // static -rtc::scoped_refptr SctpDataChannel::Create( - rtc::WeakPtr controller, +scoped_refptr SctpDataChannel::Create( + WeakPtr controller, const std::string& label, bool connected_to_transport, const InternalDataChannelInit& config, - rtc::Thread* signaling_thread, - rtc::Thread* network_thread) { + Thread* signaling_thread, + Thread* network_thread) { RTC_DCHECK(config.IsValid()); - return rtc::make_ref_counted( - config, std::move(controller), label, connected_to_transport, - signaling_thread, network_thread); + return make_ref_counted(config, std::move(controller), label, + connected_to_transport, + signaling_thread, network_thread); } // static -rtc::scoped_refptr SctpDataChannel::CreateProxy( - rtc::scoped_refptr channel, - rtc::scoped_refptr signaling_safety) { +scoped_refptr SctpDataChannel::CreateProxy( + scoped_refptr channel, + scoped_refptr signaling_safety) { // Copy thread params to local variables before `std::move()`. auto* signaling_thread = channel->signaling_thread_; auto* network_thread = channel->network_thread_; @@ -309,14 +324,14 @@ rtc::scoped_refptr SctpDataChannel::CreateProxy( SctpDataChannel::SctpDataChannel( const InternalDataChannelInit& config, - rtc::WeakPtr controller, + WeakPtr controller, const std::string& label, bool connected_to_transport, - rtc::Thread* signaling_thread, - rtc::Thread* network_thread) + Thread* signaling_thread, + Thread* network_thread) : signaling_thread_(signaling_thread), network_thread_(network_thread), - id_n_(config.id), + id_n_(config.id == -1 ? std::nullopt : std::make_optional(config.id)), internal_id_(GenerateUniqueId()), label_(label), protocol_(config.protocol), @@ -362,7 +377,7 @@ void SctpDataChannel::RegisterObserver(DataChannelObserver* observer) { // Check if we should set up an observer adapter that will make sure that // callbacks are delivered on the signaling thread rather than directly // on the network thread. - const auto* current_thread = rtc::Thread::Current(); + const auto* current_thread = Thread::Current(); // TODO(webrtc:11547): Eventually all DataChannelObserver implementations // should be called on the network thread and IsOkToCallOnTheNetworkThread(). if (!observer->IsOkToCallOnTheNetworkThread()) { @@ -386,7 +401,7 @@ void SctpDataChannel::RegisterObserver(DataChannelObserver* observer) { // a reference to ourselves while the task is in flight. We can't use // `SafeTask(network_safety_, ...)` for this since we can't assume that we // have a transport (network_safety_ represents the transport connection). - rtc::scoped_refptr me(this); + scoped_refptr me(this); auto register_observer = [me = std::move(me), observer = observer] { RTC_DCHECK_RUN_ON(me->network_thread_); me->observer_ = observer; @@ -402,7 +417,7 @@ void SctpDataChannel::RegisterObserver(DataChannelObserver* observer) { void SctpDataChannel::UnregisterObserver() { // Note: As with `RegisterObserver`, the proxy is being bypassed. - const auto* current_thread = rtc::Thread::Current(); + const auto* current_thread = Thread::Current(); // Callers must not be invoking the unregistration from the network thread // (assuming a multi-threaded environment where we have a dedicated network // thread). That would indicate non-network related work happening on the @@ -451,20 +466,11 @@ bool SctpDataChannel::ordered() const { return ordered_; } -uint16_t SctpDataChannel::maxRetransmitTime() const { - return max_retransmit_time_ ? *max_retransmit_time_ - : static_cast(-1); -} - -uint16_t SctpDataChannel::maxRetransmits() const { - return max_retransmits_ ? *max_retransmits_ : static_cast(-1); -} - -absl::optional SctpDataChannel::maxPacketLifeTime() const { +std::optional SctpDataChannel::maxPacketLifeTime() const { return max_retransmit_time_; } -absl::optional SctpDataChannel::maxRetransmitsOpt() const { +std::optional SctpDataChannel::maxRetransmitsOpt() const { return max_retransmits_; } @@ -478,16 +484,19 @@ bool SctpDataChannel::negotiated() const { int SctpDataChannel::id() const { RTC_DCHECK_RUN_ON(network_thread_); - return id_n_.stream_id_int(); + return id_n_.has_value() ? id_n_->stream_id_int() : -1; } -Priority SctpDataChannel::priority() const { - return priority_ ? *priority_ : Priority::kLow; +PriorityValue SctpDataChannel::priority() const { + return priority_.value_or(PriorityValue(Priority::kLow)); } uint64_t SctpDataChannel::buffered_amount() const { RTC_DCHECK_RUN_ON(network_thread_); - return queued_send_data_.byte_count(); + if (controller_ != nullptr && id_n_.has_value()) { + return controller_->buffered_amount(*id_n_); + } + return 0u; } void SctpDataChannel::Close() { @@ -509,7 +518,7 @@ SctpDataChannel::DataState SctpDataChannel::state() const { // getting put behind other messages on the network thread and eventually // fetch a different state value (since pending messages might cause the // state to change in the meantime). - const auto* current_thread = rtc::Thread::Current(); + const auto* current_thread = Thread::Current(); if (current_thread == signaling_thread_ && observer_adapter_ && observer_adapter_->IsInsideCallback()) { return observer_adapter_->cached_state(); @@ -526,7 +535,7 @@ SctpDataChannel::DataState SctpDataChannel::state() const { } RTCError SctpDataChannel::error() const { - const auto* current_thread = rtc::Thread::Current(); + const auto* current_thread = Thread::Current(); if (current_thread == signaling_thread_ && observer_adapter_ && observer_adapter_->IsInsideCallback()) { return observer_adapter_->cached_error(); @@ -576,20 +585,14 @@ bool SctpDataChannel::Send(const DataBuffer& buffer) { // RTC_RUN_ON(network_thread_); RTCError SctpDataChannel::SendImpl(DataBuffer buffer) { + // The caller increases the cached `bufferedAmount` even if there are errors. + expected_buffer_amount_ += buffer.size(); + if (state_ != kOpen) { error_ = RTCError(RTCErrorType::INVALID_STATE); return error_; } - // If the queue is non-empty, we're waiting for SignalReadyToSend, - // so just add to the end of the queue and keep waiting. - if (!queued_send_data_.Empty()) { - error_ = QueueSendDataMessage(buffer) - ? RTCError::OK() - : RTCError(RTCErrorType::RESOURCE_EXHAUSTED); - return error_; - } - return SendDataMessage(buffer, true); } @@ -615,8 +618,7 @@ void SctpDataChannel::SendAsync( void SctpDataChannel::SetSctpSid_n(StreamId sid) { RTC_DCHECK_RUN_ON(network_thread_); - RTC_DCHECK(!id_n_.HasValue()); - RTC_DCHECK(sid.HasValue()); + RTC_DCHECK(!id_n_.has_value()); RTC_DCHECK_NE(handshake_state_, kHandshakeWaitingForAck); RTC_DCHECK_EQ(state_, kConnecting); id_n_ = sid; @@ -628,8 +630,11 @@ void SctpDataChannel::OnClosingProcedureStartedRemotely() { // Don't bother sending queued data since the side that initiated the // closure wouldn't receive it anyway. See crbug.com/559394 for a lengthy // discussion about this. - queued_send_data_.Clear(); - queued_control_data_.Clear(); + + // Note that this is handled by the SctpTransport, when an incoming stream + // reset notification comes in, the outgoing stream is closed, which + // discards data. + // Just need to change state to kClosing, SctpTransport will handle the // rest of the closing procedure and OnClosingProcedureComplete will be // called later. @@ -643,7 +648,9 @@ void SctpDataChannel::OnClosingProcedureComplete() { // If the closing procedure is complete, we should have finished sending // all pending data and transitioned to kClosing already. RTC_DCHECK_EQ(state_, kClosing); - RTC_DCHECK(queued_send_data_.Empty()); + if (controller_ && id_n_.has_value()) { + RTC_DCHECK_EQ(controller_->buffered_amount(*id_n_), 0); + } SetState(kClosed); } @@ -661,6 +668,17 @@ void SctpDataChannel::OnTransportChannelClosed(RTCError error) { CloseAbruptlyWithError(std::move(error)); } +void SctpDataChannel::OnBufferedAmountLow() { + RTC_DCHECK_RUN_ON(network_thread_); + MaybeSendOnBufferedAmountChanged(); + + if (state_ == DataChannelInterface::kClosing && !started_closing_procedure_ && + id_n_.has_value() && buffered_amount() == 0) { + started_closing_procedure_ = true; + controller_->RemoveSctpDataStream(*id_n_); + } +} + DataChannelStats SctpDataChannel::GetStats() const { RTC_DCHECK_RUN_ON(network_thread_); DataChannelStats stats{internal_id_, id(), label(), @@ -670,26 +688,27 @@ DataChannelStats SctpDataChannel::GetStats() const { } void SctpDataChannel::OnDataReceived(DataMessageType type, - const rtc::CopyOnWriteBuffer& payload) { + const CopyOnWriteBuffer& payload) { RTC_DCHECK_RUN_ON(network_thread_); + RTC_DCHECK(id_n_.has_value()); if (type == DataMessageType::kControl) { if (handshake_state_ != kHandshakeWaitingForAck) { // Ignore it if we are not expecting an ACK message. RTC_LOG(LS_WARNING) << "DataChannel received unexpected CONTROL message, sid = " - << id_n_.stream_id_int(); + << id_n_->stream_id_int(); return; } if (ParseDataChannelOpenAckMessage(payload)) { // We can send unordered as soon as we receive the ACK message. handshake_state_ = kHandshakeReady; RTC_LOG(LS_INFO) << "DataChannel received OPEN_ACK message, sid = " - << id_n_.stream_id_int(); + << id_n_->stream_id_int(); } else { RTC_LOG(LS_WARNING) << "DataChannel failed to parse OPEN_ACK message, sid = " - << id_n_.stream_id_int(); + << id_n_->stream_id_int(); } return; } @@ -698,7 +717,7 @@ void SctpDataChannel::OnDataReceived(DataMessageType type, type == DataMessageType::kText); RTC_DLOG(LS_VERBOSE) << "DataChannel received DATA message, sid = " - << id_n_.stream_id_int(); + << id_n_->stream_id_int(); // We can send unordered as soon as we receive any DATA message since the // remote side must have received the OPEN (and old clients do not send // OPEN_ACK). @@ -731,10 +750,7 @@ void SctpDataChannel::OnDataReceived(DataMessageType type, void SctpDataChannel::OnTransportReady() { RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(connected_to_transport()); - RTC_DCHECK(id_n_.HasValue()); - - SendQueuedControlMessages(); - SendQueuedDataMessages(); + RTC_DCHECK(id_n_.has_value()); UpdateState(); } @@ -748,10 +764,6 @@ void SctpDataChannel::CloseAbruptlyWithError(RTCError error) { network_safety_->SetNotAlive(); - // Closing abruptly means any queued data gets thrown away. - queued_send_data_.Clear(); - queued_control_data_.Clear(); - // Still go to "kClosing" before "kClosed", since observers may be expecting // that. SetState(kClosing); @@ -778,13 +790,13 @@ void SctpDataChannel::UpdateState() { case kConnecting: { if (connected_to_transport() && controller_) { if (handshake_state_ == kHandshakeShouldSendOpen) { - rtc::CopyOnWriteBuffer payload; + CopyOnWriteBuffer payload; WriteDataChannelOpenMessage(label_, protocol_, priority_, ordered_, max_retransmits_, max_retransmit_time_, &payload); SendControlMessage(payload); } else if (handshake_state_ == kHandshakeShouldSendAck) { - rtc::CopyOnWriteBuffer payload; + CopyOnWriteBuffer payload; WriteDataChannelOpenAckMessage(&payload); SendControlMessage(payload); } @@ -796,7 +808,7 @@ void SctpDataChannel::UpdateState() { DeliverQueuedReceivedData(); } } else { - RTC_DCHECK(!id_n_.HasValue()); + RTC_DCHECK(!id_n_.has_value()); } break; } @@ -804,24 +816,22 @@ void SctpDataChannel::UpdateState() { break; } case kClosing: { - if (connected_to_transport() && controller_) { + if (connected_to_transport() && controller_ && id_n_.has_value()) { // Wait for all queued data to be sent before beginning the closing // procedure. - if (queued_send_data_.Empty() && queued_control_data_.Empty()) { + if (controller_->buffered_amount(*id_n_) == 0) { // For SCTP data channels, we need to wait for the closing procedure // to complete; after calling RemoveSctpDataStream, // OnClosingProcedureComplete will end up called asynchronously // afterwards. - if (!started_closing_procedure_ && id_n_.HasValue()) { + if (!started_closing_procedure_ && id_n_.has_value()) { started_closing_procedure_ = true; - controller_->RemoveSctpDataStream(id_n_); + controller_->RemoveSctpDataStream(*id_n_); } } } else { // When we're not connected to a transport, we'll transition // directly to the `kClosed` state from here. - queued_send_data_.Clear(); - queued_control_data_.Clear(); SetState(kClosed); } break; @@ -860,29 +870,57 @@ void SctpDataChannel::DeliverQueuedReceivedData() { } } -// RTC_RUN_ON(network_thread_). -void SctpDataChannel::SendQueuedDataMessages() { - if (queued_send_data_.Empty()) { +// RTC_RUN_ON(network_thread_) +void SctpDataChannel::MaybeSendOnBufferedAmountChanged() { + // The `buffered_amount` in the signaling thread (RTCDataChannel in Blink) + // has a cached variant of the SCTP socket's buffered_amount, which it + // increases for every data sent and decreased when `OnBufferedAmountChange` + // is sent. + // + // To ensure it's consistent, this object maintains its own view of that value + // and if it changes with a reasonable amount (10kb, or down to zero), send + // the `OnBufferedAmountChange` to update the caller's cached variable. + if (!controller_ || !id_n_.has_value() || !observer_) { return; } - RTC_DCHECK(state_ == kOpen || state_ == kClosing); + // This becomes the resolution of how often the bufferedAmount is updated on + // the signaling thread and exists to avoid doing cross-thread communication + // too often. On benchmarks, Chrome handle around 300Mbps, which with this + // size results in a rate of ~400 updates per second - a reasonable number. + static constexpr int64_t kMinBufferedAmountDiffToTriggerCallback = 100 * 1024; + size_t actual_buffer_amount = controller_->buffered_amount(*id_n_); + if (actual_buffer_amount > expected_buffer_amount_) { + RTC_DLOG(LS_ERROR) << "Actual buffer_amount larger than expected"; + return; + } - while (!queued_send_data_.Empty()) { - std::unique_ptr buffer = queued_send_data_.PopFront(); - if (!SendDataMessage(*buffer, false).ok()) { - // Return the message to the front of the queue if sending is aborted. - queued_send_data_.PushFront(std::move(buffer)); - break; - } + // Fire OnBufferedAmountChange to decrease the cached view if it represents a + // big enough change (to reduce the frequency of cross-thread communication), + // or if it reaches zero. + if ((actual_buffer_amount == 0 && expected_buffer_amount_ != 0) || + (expected_buffer_amount_ - actual_buffer_amount > + kMinBufferedAmountDiffToTriggerCallback)) { + uint64_t diff = expected_buffer_amount_ - actual_buffer_amount; + expected_buffer_amount_ = actual_buffer_amount; + observer_->OnBufferedAmountChange(diff); } + + // The threshold is always updated to ensure it's lower than what it's now. + // This ensures that this function will be called again, until the channel is + // completely drained. + controller_->SetBufferedAmountLowThreshold( + *id_n_, + actual_buffer_amount > kMinBufferedAmountDiffToTriggerCallback + ? actual_buffer_amount - kMinBufferedAmountDiffToTriggerCallback + : 0); } // RTC_RUN_ON(network_thread_). RTCError SctpDataChannel::SendDataMessage(const DataBuffer& buffer, bool queue_if_blocked) { SendDataParams send_params; - if (!controller_) { + if (!controller_ || !id_n_.has_value()) { error_ = RTCError(RTCErrorType::INVALID_STATE); return error_; } @@ -901,26 +939,14 @@ RTCError SctpDataChannel::SendDataMessage(const DataBuffer& buffer, send_params.type = buffer.binary ? DataMessageType::kBinary : DataMessageType::kText; - error_ = controller_->SendData(id_n_, send_params, buffer.data); + error_ = controller_->SendData(*id_n_, send_params, buffer.data); + MaybeSendOnBufferedAmountChanged(); if (error_.ok()) { ++messages_sent_; bytes_sent_ += buffer.size(); - - if (observer_ && buffer.size() > 0) { - observer_->OnBufferedAmountChange(buffer.size()); - } return error_; } - if (error_.type() == RTCErrorType::RESOURCE_EXHAUSTED) { - if (!queue_if_blocked) - return error_; - - if (QueueSendDataMessage(buffer)) { - error_ = RTCError::OK(); - return error_; - } - } // Close the channel if the error is not SDR_BLOCK, or if queuing the // message failed. RTC_LOG(LS_ERROR) << "Closing the DataChannel due to a failure to send data, " @@ -933,33 +959,9 @@ RTCError SctpDataChannel::SendDataMessage(const DataBuffer& buffer, } // RTC_RUN_ON(network_thread_). -bool SctpDataChannel::QueueSendDataMessage(const DataBuffer& buffer) { - size_t start_buffered_amount = queued_send_data_.byte_count(); - if (start_buffered_amount + buffer.size() > - DataChannelInterface::MaxSendQueueSize()) { - RTC_LOG(LS_ERROR) << "Can't buffer any more data for the data channel."; - error_ = RTCError(RTCErrorType::RESOURCE_EXHAUSTED); - return false; - } - queued_send_data_.PushBack(std::make_unique(buffer)); - return true; -} - -// RTC_RUN_ON(network_thread_). -void SctpDataChannel::SendQueuedControlMessages() { - PacketQueue control_packets; - control_packets.Swap(&queued_control_data_); - - while (!control_packets.Empty()) { - std::unique_ptr buf = control_packets.PopFront(); - SendControlMessage(buf->data); - } -} - -// RTC_RUN_ON(network_thread_). -bool SctpDataChannel::SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) { +bool SctpDataChannel::SendControlMessage(const CopyOnWriteBuffer& buffer) { RTC_DCHECK(connected_to_transport()); - RTC_DCHECK(id_n_.HasValue()); + RTC_DCHECK(id_n_.has_value()); RTC_DCHECK(controller_); bool is_open_message = handshake_state_ == kHandshakeShouldSendOpen; @@ -972,18 +974,16 @@ bool SctpDataChannel::SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) { send_params.ordered = ordered_ || is_open_message; send_params.type = DataMessageType::kControl; - RTCError err = controller_->SendData(id_n_, send_params, buffer); + RTCError err = controller_->SendData(*id_n_, send_params, buffer); if (err.ok()) { RTC_DLOG(LS_VERBOSE) << "Sent CONTROL message on channel " - << id_n_.stream_id_int(); + << id_n_->stream_id_int(); if (handshake_state_ == kHandshakeShouldSendAck) { handshake_state_ = kHandshakeReady; } else if (handshake_state_ == kHandshakeShouldSendOpen) { handshake_state_ = kHandshakeWaitingForAck; } - } else if (err.type() == RTCErrorType::RESOURCE_EXHAUSTED) { - queued_control_data_.PushBack(std::make_unique(buffer, true)); } else { RTC_LOG(LS_ERROR) << "Closing the DataChannel due to a failure to send" " the CONTROL message, send_result = " diff --git a/pc/sctp_data_channel.h b/pc/sctp_data_channel.h index 13bebd4612..71ccd2cac0 100644 --- a/pc/sctp_data_channel.h +++ b/pc/sctp_data_channel.h @@ -14,10 +14,10 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/data_channel_interface.h" #include "api/priority.h" #include "api/rtc_error.h" @@ -46,15 +46,18 @@ class SctpDataChannelControllerInterface { // Sends the data to the transport. virtual RTCError SendData(StreamId sid, const SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload) = 0; + const CopyOnWriteBuffer& payload) = 0; // Adds the data channel SID to the transport for SCTP. - virtual void AddSctpDataStream(StreamId sid) = 0; + virtual void AddSctpDataStream(StreamId sid, PriorityValue priority) = 0; // Begins the closing procedure by sending an outgoing stream reset. Still // need to wait for callbacks to tell when this completes. virtual void RemoveSctpDataStream(StreamId sid) = 0; // Notifies the controller of state changes. virtual void OnChannelStateChanged(SctpDataChannel* data_channel, DataChannelInterface::DataState state) = 0; + virtual size_t buffered_amount(StreamId sid) const = 0; + virtual size_t buffered_amount_low_threshold(StreamId sid) const = 0; + virtual void SetBufferedAmountLowThreshold(StreamId sid, size_t bytes) = 0; protected: virtual ~SctpDataChannelControllerInterface() {} @@ -75,7 +78,7 @@ struct InternalDataChannelInit : public DataChannelInit { // stream ids in situations where we cannot determine the SSL role from the // transport for purposes of generating a stream ID. // See: https://www.rfc-editor.org/rfc/rfc8832.html#name-protocol-overview - absl::optional fallback_ssl_role; + std::optional fallback_ssl_role; }; // Helper class to allocate unique IDs for SCTP DataChannels. @@ -85,8 +88,8 @@ class SctpSidAllocator { // Gets the first unused odd/even id based on the DTLS role. If `role` is // SSL_CLIENT, the allocated id starts from 0 and takes even numbers; // otherwise, the id starts from 1 and takes odd numbers. - // If a `StreamId` cannot be allocated, `StreamId::HasValue()` will be false. - StreamId AllocateSid(rtc::SSLRole role); + // If a `StreamId` cannot be allocated, `std::nullopt` is returned. + std::optional AllocateSid(SSLRole role); // Attempts to reserve a specific sid. Returns false if it's unavailable. bool ReserveSid(StreamId sid); @@ -126,13 +129,13 @@ class SctpSidAllocator { // OnClosingProcedureComplete callback and transition to kClosed. class SctpDataChannel : public DataChannelInterface { public: - static rtc::scoped_refptr Create( - rtc::WeakPtr controller, + static scoped_refptr Create( + WeakPtr controller, const std::string& label, bool connected_to_transport, const InternalDataChannelInit& config, - rtc::Thread* signaling_thread, - rtc::Thread* network_thread); + Thread* signaling_thread, + Thread* network_thread); // Instantiates an API proxy for a SctpDataChannel instance that will be // handed out to external callers. @@ -141,9 +144,9 @@ class SctpDataChannel : public DataChannelInterface { // callbacks after the peerconnection has been closed. The data controller // will update the flag when closed, which will cancel any pending event // notifications. - static rtc::scoped_refptr CreateProxy( - rtc::scoped_refptr channel, - rtc::scoped_refptr signaling_safety); + static scoped_refptr CreateProxy( + scoped_refptr channel, + scoped_refptr signaling_safety); void RegisterObserver(DataChannelObserver* observer) override; void UnregisterObserver() override; @@ -152,16 +155,12 @@ class SctpDataChannel : public DataChannelInterface { bool reliable() const override; bool ordered() const override; - // Backwards compatible accessors - uint16_t maxRetransmitTime() const override; - uint16_t maxRetransmits() const override; - - absl::optional maxPacketLifeTime() const override; - absl::optional maxRetransmitsOpt() const override; + std::optional maxPacketLifeTime() const override; + std::optional maxRetransmitsOpt() const override; std::string protocol() const override; bool negotiated() const override; int id() const override; - Priority priority() const override; + PriorityValue priority() const override; uint64_t buffered_amount() const override; void Close() override; @@ -187,8 +186,7 @@ class SctpDataChannel : public DataChannelInterface { // already finished. void OnTransportReady(); - void OnDataReceived(DataMessageType type, - const rtc::CopyOnWriteBuffer& payload); + void OnDataReceived(DataMessageType type, const CopyOnWriteBuffer& payload); // Sets the SCTP sid and adds to transport layer if not set yet. Should only // be called once. @@ -207,6 +205,9 @@ class SctpDataChannel : public DataChannelInterface { // This method makes sure the DataChannel is disconnected and changes state // to kClosed. void OnTransportChannelClosed(RTCError error); + // Called when the amount of data buffered to be sent falls to or below the + // threshold set when calling `SetBufferedAmountLowThreshold`. + void OnBufferedAmountLow(); DataChannelStats GetStats() const; @@ -215,7 +216,7 @@ class SctpDataChannel : public DataChannelInterface { // stats purposes (see also `GetStats()`). int internal_id() const { return internal_id_; } - StreamId sid_n() const { + std::optional sid_n() const { RTC_DCHECK_RUN_ON(network_thread_); return id_n_; } @@ -226,11 +227,11 @@ class SctpDataChannel : public DataChannelInterface { protected: SctpDataChannel(const InternalDataChannelInit& config, - rtc::WeakPtr controller, + WeakPtr controller, const std::string& label, bool connected_to_transport, - rtc::Thread* signaling_thread, - rtc::Thread* network_thread); + Thread* signaling_thread, + Thread* network_thread); ~SctpDataChannel() override; private: @@ -251,31 +252,30 @@ class SctpDataChannel : public DataChannelInterface { void DeliverQueuedReceivedData() RTC_RUN_ON(network_thread_); - void SendQueuedDataMessages() RTC_RUN_ON(network_thread_); RTCError SendDataMessage(const DataBuffer& buffer, bool queue_if_blocked) RTC_RUN_ON(network_thread_); - bool QueueSendDataMessage(const DataBuffer& buffer) - RTC_RUN_ON(network_thread_); - void SendQueuedControlMessages() RTC_RUN_ON(network_thread_); - bool SendControlMessage(const rtc::CopyOnWriteBuffer& buffer) + bool SendControlMessage(const CopyOnWriteBuffer& buffer) RTC_RUN_ON(network_thread_); bool connected_to_transport() const RTC_RUN_ON(network_thread_) { return network_safety_->alive(); } + void MaybeSendOnBufferedAmountChanged() RTC_RUN_ON(network_thread_); - rtc::Thread* const signaling_thread_; - rtc::Thread* const network_thread_; - StreamId id_n_ RTC_GUARDED_BY(network_thread_); + Thread* const signaling_thread_; + Thread* const network_thread_; + std::optional id_n_ RTC_GUARDED_BY(network_thread_) = std::nullopt; const int internal_id_; const std::string label_; const std::string protocol_; - const absl::optional max_retransmit_time_; - const absl::optional max_retransmits_; - const absl::optional priority_; + const std::optional max_retransmit_time_; + const std::optional max_retransmits_; + const std::optional priority_; const bool negotiated_; const bool ordered_; + // See the body of `MaybeSendOnBufferedAmountChanged`. + size_t expected_buffer_amount_ = 0; DataChannelObserver* observer_ RTC_GUARDED_BY(network_thread_) = nullptr; std::unique_ptr observer_adapter_; @@ -285,18 +285,14 @@ class SctpDataChannel : public DataChannelInterface { uint64_t bytes_sent_ RTC_GUARDED_BY(network_thread_) = 0; uint32_t messages_received_ RTC_GUARDED_BY(network_thread_) = 0; uint64_t bytes_received_ RTC_GUARDED_BY(network_thread_) = 0; - rtc::WeakPtr controller_ + WeakPtr controller_ RTC_GUARDED_BY(network_thread_); HandshakeState handshake_state_ RTC_GUARDED_BY(network_thread_) = kHandshakeInit; // Did we already start the graceful SCTP closing procedure? bool started_closing_procedure_ RTC_GUARDED_BY(network_thread_) = false; - // Control messages that always have to get sent out before any queued - // data. - PacketQueue queued_control_data_ RTC_GUARDED_BY(network_thread_); PacketQueue queued_received_data_ RTC_GUARDED_BY(network_thread_); - PacketQueue queued_send_data_ RTC_GUARDED_BY(network_thread_); - rtc::scoped_refptr network_safety_ = + scoped_refptr network_safety_ = PendingTaskSafetyFlag::CreateDetachedInactive(); }; diff --git a/pc/sctp_transport.cc b/pc/sctp_transport.cc index 7f55e39d9e..6f56026443 100644 --- a/pc/sctp_transport.cc +++ b/pc/sctp_transport.cc @@ -11,30 +11,48 @@ #include "pc/sctp_transport.h" #include +#include +#include +#include #include -#include "absl/types/optional.h" #include "api/dtls_transport_interface.h" +#include "api/priority.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "api/sctp_transport_interface.h" #include "api/sequence_checker.h" +#include "api/transport/data_channel_transport_interface.h" +#include "media/sctp/sctp_transport_internal.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "pc/dtls_transport.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" +#include "rtc_base/thread.h" namespace webrtc { -SctpTransport::SctpTransport( - std::unique_ptr internal) - : owner_thread_(rtc::Thread::Current()), - info_(SctpTransportState::kNew), - internal_sctp_transport_(std::move(internal)) { +SctpTransport::SctpTransport(std::unique_ptr internal, + scoped_refptr dtls_transport) + : owner_thread_(Thread::Current()), + info_(SctpTransportState::kConnecting, + dtls_transport, + /*max_message_size=*/std::nullopt, + /*max_channels=*/std::nullopt), + internal_sctp_transport_(std::move(internal)), + dtls_transport_(dtls_transport) { RTC_DCHECK(internal_sctp_transport_.get()); + RTC_DCHECK(dtls_transport_.get()); + + dtls_transport_->internal()->SubscribeDtlsTransportState( + [this](DtlsTransportInternal* transport, DtlsTransportState state) { + OnDtlsStateChange(transport, state); + }); + + internal_sctp_transport_->SetDtlsTransport(dtls_transport->internal()); internal_sctp_transport_->SetOnConnectedCallback( [this]() { OnAssociationChangeCommunicationUp(); }); - - if (dtls_transport_) { - UpdateInformation(SctpTransportState::kConnecting); - } else { - UpdateInformation(SctpTransportState::kNew); - } } SctpTransport::~SctpTransport() { @@ -67,16 +85,16 @@ void SctpTransport::UnregisterObserver() { observer_ = nullptr; } -RTCError SctpTransport::OpenChannel(int channel_id) { +RTCError SctpTransport::OpenChannel(int channel_id, PriorityValue priority) { RTC_DCHECK_RUN_ON(owner_thread_); RTC_DCHECK(internal_sctp_transport_); - internal_sctp_transport_->OpenStream(channel_id); + internal_sctp_transport_->OpenStream(channel_id, priority); return RTCError::OK(); } RTCError SctpTransport::SendData(int channel_id, const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) { + const CopyOnWriteBuffer& buffer) { RTC_DCHECK_RUN_ON(owner_thread_); return internal_sctp_transport_->SendData(channel_id, params, buffer); } @@ -100,8 +118,24 @@ bool SctpTransport::IsReadyToSend() const { return internal_sctp_transport_->ReadyToSendData(); } -rtc::scoped_refptr SctpTransport::dtls_transport() - const { +size_t SctpTransport::buffered_amount(int channel_id) const { + RTC_DCHECK_RUN_ON(owner_thread_); + RTC_DCHECK(internal_sctp_transport_); + return internal_sctp_transport_->buffered_amount(channel_id); +} + +size_t SctpTransport::buffered_amount_low_threshold(int channel_id) const { + RTC_DCHECK_RUN_ON(owner_thread_); + return internal_sctp_transport_->buffered_amount_low_threshold(channel_id); +} + +void SctpTransport::SetBufferedAmountLowThreshold(int channel_id, + size_t bytes) { + RTC_DCHECK_RUN_ON(owner_thread_); + internal_sctp_transport_->SetBufferedAmountLowThreshold(channel_id, bytes); +} + +scoped_refptr SctpTransport::dtls_transport() const { RTC_DCHECK_RUN_ON(owner_thread_); return dtls_transport_; } @@ -117,39 +151,13 @@ void SctpTransport::Clear() { UpdateInformation(SctpTransportState::kClosed); } -void SctpTransport::SetDtlsTransport( - rtc::scoped_refptr transport) { - RTC_DCHECK_RUN_ON(owner_thread_); - SctpTransportState next_state = info_.state(); - dtls_transport_ = transport; - if (internal_sctp_transport_) { - if (transport) { - internal_sctp_transport_->SetDtlsTransport(transport->internal()); - - transport->internal()->SubscribeDtlsTransportState( - [this](cricket::DtlsTransportInternal* transport, - DtlsTransportState state) { - OnDtlsStateChange(transport, state); - }); - if (info_.state() == SctpTransportState::kNew) { - next_state = SctpTransportState::kConnecting; - } - } else { - internal_sctp_transport_->SetDtlsTransport(nullptr); - } - } - - UpdateInformation(next_state); -} - -void SctpTransport::Start(int local_port, - int remote_port, - int max_message_size) { +void SctpTransport::Start(const SctpOptions& options) { RTC_DCHECK_RUN_ON(owner_thread_); - info_ = SctpTransportInformation(info_.state(), info_.dtls_transport(), - max_message_size, info_.MaxChannels()); + info_ = + SctpTransportInformation(info_.state(), info_.dtls_transport(), + options.max_message_size, info_.MaxChannels()); - if (!internal()->Start(local_port, remote_port, max_message_size)) { + if (!internal()->Start(options)) { RTC_LOG(LS_ERROR) << "Failed to push down SCTP parameters, closing."; UpdateInformation(SctpTransportState::kClosed); } @@ -189,7 +197,7 @@ void SctpTransport::OnAssociationChangeCommunicationUp() { UpdateInformation(SctpTransportState::kConnected); } -void SctpTransport::OnDtlsStateChange(cricket::DtlsTransportInternal* transport, +void SctpTransport::OnDtlsStateChange(DtlsTransportInternal* transport, DtlsTransportState state) { RTC_DCHECK_RUN_ON(owner_thread_); RTC_CHECK(transport == dtls_transport_->internal()); diff --git a/pc/sctp_transport.h b/pc/sctp_transport.h index 35e7656100..cc43aaafc4 100644 --- a/pc/sctp_transport.h +++ b/pc/sctp_transport.h @@ -11,64 +11,69 @@ #ifndef PC_SCTP_TRANSPORT_H_ #define PC_SCTP_TRANSPORT_H_ +#include #include #include "api/dtls_transport_interface.h" +#include "api/priority.h" +#include "api/rtc_error.h" #include "api/scoped_refptr.h" #include "api/sctp_transport_interface.h" #include "api/sequence_checker.h" #include "api/transport/data_channel_transport_interface.h" #include "media/sctp/sctp_transport_internal.h" -#include "p2p/base/dtls_transport_internal.h" +#include "p2p/dtls/dtls_transport_internal.h" #include "pc/dtls_transport.h" -#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" namespace webrtc { -// This implementation wraps a cricket::SctpTransport, and takes +// This implementation wraps a webrtc::SctpTransport, and takes // ownership of it. // This object must be constructed and updated on the networking thread, -// the same thread as the one the cricket::SctpTransportInternal object +// the same thread as the one the webrtc::SctpTransportInternal object // lives on. class SctpTransport : public SctpTransportInterface, public DataChannelTransportInterface { public: - explicit SctpTransport( - std::unique_ptr internal); + SctpTransport(std::unique_ptr internal, + scoped_refptr dtls_transport); // SctpTransportInterface - rtc::scoped_refptr dtls_transport() const override; + scoped_refptr dtls_transport() const override; SctpTransportInformation Information() const override; void RegisterObserver(SctpTransportObserverInterface* observer) override; void UnregisterObserver() override; // DataChannelTransportInterface - RTCError OpenChannel(int channel_id) override; + RTCError OpenChannel(int channel_id, PriorityValue priority) override; RTCError SendData(int channel_id, const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) override; + const CopyOnWriteBuffer& buffer) override; RTCError CloseChannel(int channel_id) override; void SetDataSink(DataChannelSink* sink) override; bool IsReadyToSend() const override; + size_t buffered_amount(int channel_id) const override; + size_t buffered_amount_low_threshold(int channel_id) const override; + void SetBufferedAmountLowThreshold(int channel_id, size_t bytes) override; // Internal functions void Clear(); - void SetDtlsTransport(rtc::scoped_refptr); - // Initialize the cricket::SctpTransport. This can be called from + // Initialize the webrtc::SctpTransport. This can be called from // the signaling thread. - void Start(int local_port, int remote_port, int max_message_size); + void Start(const SctpOptions& options); // TODO(https://bugs.webrtc.org/10629): Move functions that need - // internal() to be functions on the webrtc::SctpTransport interface, + // internal() to be functions on the SctpTransport interface, // and make the internal() function private. - cricket::SctpTransportInternal* internal() { + SctpTransportInternal* internal() { RTC_DCHECK_RUN_ON(owner_thread_); return internal_sctp_transport_.get(); } - const cricket::SctpTransportInternal* internal() const { + const SctpTransportInternal* internal() const { RTC_DCHECK_RUN_ON(owner_thread_); return internal_sctp_transport_.get(); } @@ -82,19 +87,18 @@ class SctpTransport : public SctpTransportInterface, void OnAssociationChangeCommunicationUp(); void OnInternalClosingProcedureStartedRemotely(int sid); void OnInternalClosingProcedureComplete(int sid); - void OnDtlsStateChange(cricket::DtlsTransportInternal* transport, + void OnDtlsStateChange(DtlsTransportInternal* transport, DtlsTransportState state); // NOTE: `owner_thread_` is the thread that the SctpTransport object is // constructed on. In the context of PeerConnection, it's the network thread. - rtc::Thread* const owner_thread_; + Thread* const owner_thread_; SctpTransportInformation info_ RTC_GUARDED_BY(owner_thread_); - std::unique_ptr internal_sctp_transport_ + std::unique_ptr internal_sctp_transport_ RTC_GUARDED_BY(owner_thread_); SctpTransportObserverInterface* observer_ RTC_GUARDED_BY(owner_thread_) = nullptr; - rtc::scoped_refptr dtls_transport_ - RTC_GUARDED_BY(owner_thread_); + scoped_refptr dtls_transport_ RTC_GUARDED_BY(owner_thread_); }; } // namespace webrtc diff --git a/pc/sctp_transport_unittest.cc b/pc/sctp_transport_unittest.cc index d18543f20c..c47690c64c 100644 --- a/pc/sctp_transport_unittest.cc +++ b/pc/sctp_transport_unittest.cc @@ -10,59 +10,68 @@ #include "pc/sctp_transport.h" +#include +#include +#include +#include #include #include #include "absl/memory/memory.h" -#include "absl/types/optional.h" #include "api/dtls_transport_interface.h" +#include "api/make_ref_counted.h" +#include "api/priority.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "api/sctp_transport_interface.h" +#include "api/test/rtc_error_matchers.h" #include "api/transport/data_channel_transport_interface.h" -#include "media/base/media_channel.h" -#include "p2p/base/fake_dtls_transport.h" +#include "media/sctp/sctp_transport_internal.h" #include "p2p/base/p2p_constants.h" -#include "p2p/base/packet_transport_internal.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "p2p/dtls/fake_dtls_transport.h" #include "pc/dtls_transport.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/gunit.h" +#include "rtc_base/thread.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" + +namespace webrtc { -constexpr int kDefaultTimeout = 1000; // milliseconds constexpr int kTestMaxSctpStreams = 1234; -using cricket::FakeDtlsTransport; using ::testing::ElementsAre; -namespace webrtc { - namespace { -class FakeCricketSctpTransport : public cricket::SctpTransportInternal { +class FakeCricketSctpTransport : public SctpTransportInternal { public: void SetOnConnectedCallback(std::function callback) override { on_connected_callback_ = std::move(callback); } void SetDataChannelSink(DataChannelSink* sink) override {} - void SetDtlsTransport(rtc::PacketTransportInternal* transport) override {} - bool Start(int local_port, int remote_port, int max_message_size) override { - return true; - } - bool OpenStream(int sid) override { return true; } + void SetDtlsTransport(DtlsTransportInternal* transport) override {} + bool Start(const SctpOptions& options) override { return true; } + bool OpenStream(int sid, PriorityValue priority) override { return true; } bool ResetStream(int sid) override { return true; } RTCError SendData(int sid, const SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload) override { + const CopyOnWriteBuffer& payload) override { return RTCError::OK(); } bool ReadyToSendData() override { return true; } void set_debug_name_for_testing(const char* debug_name) override {} int max_message_size() const override { return 0; } - absl::optional max_outbound_streams() const override { + std::optional max_outbound_streams() const override { return max_outbound_streams_; } - absl::optional max_inbound_streams() const override { + std::optional max_inbound_streams() const override { return max_inbound_streams_; } + size_t buffered_amount(int sid) const override { return 0; } + size_t buffered_amount_low_threshold(int sid) const override { return 0; } + void SetBufferedAmountLowThreshold(int sid, size_t bytes) override {} void SendSignalAssociationChangeCommunicationUp() { ASSERT_TRUE(on_connected_callback_); @@ -75,8 +84,8 @@ class FakeCricketSctpTransport : public cricket::SctpTransportInternal { void set_max_inbound_streams(int streams) { max_inbound_streams_ = streams; } private: - absl::optional max_outbound_streams_; - absl::optional max_inbound_streams_; + std::optional max_outbound_streams_; + std::optional max_inbound_streams_; std::function on_connected_callback_; }; @@ -92,7 +101,7 @@ class TestSctpTransportObserver : public SctpTransportObserverInterface { } SctpTransportState State() { - if (states_.size() > 0) { + if (!states_.empty()) { return states_[states_.size() - 1]; } else { return SctpTransportState::kNew; @@ -101,7 +110,7 @@ class TestSctpTransportObserver : public SctpTransportObserverInterface { const std::vector& States() { return states_; } - const SctpTransportInformation LastReceivedInformation() { return info_; } + SctpTransportInformation LastReceivedInformation() { return info_; } private: std::vector states_; @@ -114,19 +123,16 @@ class SctpTransportTest : public ::testing::Test { SctpTransportObserverInterface* observer() { return &observer_; } void CreateTransport() { + std::unique_ptr cricket_transport = + std::make_unique("audio", + ICE_CANDIDATE_COMPONENT_RTP); + dtls_transport_ = + make_ref_counted(std::move(cricket_transport)); + auto cricket_sctp_transport = absl::WrapUnique(new FakeCricketSctpTransport()); - transport_ = - rtc::make_ref_counted(std::move(cricket_sctp_transport)); - } - - void AddDtlsTransport() { - std::unique_ptr cricket_transport = - std::make_unique( - "audio", cricket::ICE_CANDIDATE_COMPONENT_RTP); - dtls_transport_ = - rtc::make_ref_counted(std::move(cricket_transport)); - transport_->SetDtlsTransport(dtls_transport_); + transport_ = make_ref_counted( + std::move(cricket_sctp_transport), dtls_transport_); } void CompleteSctpHandshake() { @@ -141,21 +147,26 @@ class SctpTransportTest : public ::testing::Test { return static_cast(transport_->internal()); } - rtc::AutoThread main_thread_; - rtc::scoped_refptr transport_; - rtc::scoped_refptr dtls_transport_; + AutoThread main_thread_; + scoped_refptr transport_; + scoped_refptr dtls_transport_; TestSctpTransportObserver observer_; }; TEST(SctpTransportSimpleTest, CreateClearDelete) { - rtc::AutoThread main_thread; - std::unique_ptr fake_cricket_sctp_transport = + AutoThread main_thread; + std::unique_ptr cricket_transport = + std::make_unique("audio", ICE_CANDIDATE_COMPONENT_RTP); + scoped_refptr dtls_transport = + make_ref_counted(std::move(cricket_transport)); + + std::unique_ptr fake_cricket_sctp_transport = absl::WrapUnique(new FakeCricketSctpTransport()); - rtc::scoped_refptr sctp_transport = - rtc::make_ref_counted( - std::move(fake_cricket_sctp_transport)); + scoped_refptr sctp_transport = make_ref_counted( + std::move(fake_cricket_sctp_transport), dtls_transport); ASSERT_TRUE(sctp_transport->internal()); - ASSERT_EQ(SctpTransportState::kNew, sctp_transport->Information().state()); + ASSERT_EQ(SctpTransportState::kConnecting, + sctp_transport->Information().state()); sctp_transport->Clear(); ASSERT_FALSE(sctp_transport->internal()); ASSERT_EQ(SctpTransportState::kClosed, sctp_transport->Information().state()); @@ -164,35 +175,35 @@ TEST(SctpTransportSimpleTest, CreateClearDelete) { TEST_F(SctpTransportTest, EventsObservedWhenConnecting) { CreateTransport(); transport()->RegisterObserver(observer()); - AddDtlsTransport(); CompleteSctpHandshake(); - ASSERT_EQ_WAIT(SctpTransportState::kConnected, observer_.State(), - kDefaultTimeout); - EXPECT_THAT(observer_.States(), ElementsAre(SctpTransportState::kConnecting, - SctpTransportState::kConnected)); + ASSERT_THAT(WaitUntil([&] { return observer_.State(); }, + ::testing::Eq(SctpTransportState::kConnected)), + IsRtcOk()); + EXPECT_THAT(observer_.States(), ElementsAre(SctpTransportState::kConnected)); } TEST_F(SctpTransportTest, CloseWhenClearing) { CreateTransport(); transport()->RegisterObserver(observer()); - AddDtlsTransport(); CompleteSctpHandshake(); - ASSERT_EQ_WAIT(SctpTransportState::kConnected, observer_.State(), - kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return observer_.State(); }, + ::testing::Eq(SctpTransportState::kConnected)), + IsRtcOk()); transport()->Clear(); - ASSERT_EQ_WAIT(SctpTransportState::kClosed, observer_.State(), - kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return observer_.State(); }, + ::testing::Eq(SctpTransportState::kClosed)), + IsRtcOk()); } TEST_F(SctpTransportTest, MaxChannelsSignalled) { CreateTransport(); transport()->RegisterObserver(observer()); - AddDtlsTransport(); EXPECT_FALSE(transport()->Information().MaxChannels()); EXPECT_FALSE(observer_.LastReceivedInformation().MaxChannels()); CompleteSctpHandshake(); - ASSERT_EQ_WAIT(SctpTransportState::kConnected, observer_.State(), - kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return observer_.State(); }, + ::testing::Eq(SctpTransportState::kConnected)), + IsRtcOk()); EXPECT_TRUE(transport()->Information().MaxChannels()); EXPECT_EQ(kTestMaxSctpStreams, *(transport()->Information().MaxChannels())); EXPECT_TRUE(observer_.LastReceivedInformation().MaxChannels()); @@ -203,14 +214,14 @@ TEST_F(SctpTransportTest, MaxChannelsSignalled) { TEST_F(SctpTransportTest, CloseWhenTransportCloses) { CreateTransport(); transport()->RegisterObserver(observer()); - AddDtlsTransport(); CompleteSctpHandshake(); - ASSERT_EQ_WAIT(SctpTransportState::kConnected, observer_.State(), - kDefaultTimeout); - static_cast(dtls_transport_->internal()) + ASSERT_THAT(WaitUntil([&] { return observer_.State(); }, + ::testing::Eq(SctpTransportState::kConnected)), + IsRtcOk()); + static_cast(dtls_transport_->internal()) ->SetDtlsState(DtlsTransportState::kClosed); - ASSERT_EQ_WAIT(SctpTransportState::kClosed, observer_.State(), - kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return observer_.State(); }, + ::testing::Eq(SctpTransportState::kClosed)), + IsRtcOk()); } - } // namespace webrtc diff --git a/pc/sctp_utils.cc b/pc/sctp_utils.cc index 54742c27a7..bc248cd2c3 100644 --- a/pc/sctp_utils.cc +++ b/pc/sctp_utils.cc @@ -13,8 +13,9 @@ #include #include +#include +#include -#include "absl/types/optional.h" #include "api/priority.h" #include "rtc_base/byte_buffer.h" #include "rtc_base/copy_on_write_buffer.h" @@ -46,7 +47,7 @@ enum DataChannelPriority { DCO_PRIORITY_HIGH = 1024, }; -bool IsOpenMessage(const rtc::CopyOnWriteBuffer& payload) { +bool IsOpenMessage(const CopyOnWriteBuffer& payload) { // Format defined at // https://www.rfc-editor.org/rfc/rfc8832#section-5.1 if (payload.size() < 1) { @@ -58,13 +59,13 @@ bool IsOpenMessage(const rtc::CopyOnWriteBuffer& payload) { return message_type == DATA_CHANNEL_OPEN_MESSAGE_TYPE; } -bool ParseDataChannelOpenMessage(const rtc::CopyOnWriteBuffer& payload, +bool ParseDataChannelOpenMessage(const CopyOnWriteBuffer& payload, std::string* label, DataChannelInit* config) { // Format defined at // http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04 - rtc::ByteBufferReader buffer(payload.data(), payload.size()); + ByteBufferReader buffer(payload); uint8_t message_type; if (!buffer.ReadUInt8(&message_type)) { RTC_LOG(LS_WARNING) << "Could not read OPEN message type."; @@ -88,17 +89,7 @@ bool ParseDataChannelOpenMessage(const rtc::CopyOnWriteBuffer& payload, << "Could not read OPEN message reliabilility prioirty."; return false; } - // Parse priority as defined in - // https://w3c.github.io/webrtc-priority/#rtcdatachannel-processing-steps - if (priority <= DCO_PRIORITY_VERY_LOW) { - config->priority = Priority::kVeryLow; - } else if (priority <= DCO_PRIORITY_LOW) { - config->priority = Priority::kLow; - } else if (priority <= DCO_PRIORITY_MEDIUM) { - config->priority = Priority::kMedium; - } else { - config->priority = Priority::kHigh; - } + config->priority = PriorityValue(priority); uint32_t reliability_param; if (!buffer.ReadUInt32(&reliability_param)) { @@ -132,8 +123,8 @@ bool ParseDataChannelOpenMessage(const rtc::CopyOnWriteBuffer& payload, config->ordered = false; } - config->maxRetransmits = absl::nullopt; - config->maxRetransmitTime = absl::nullopt; + config->maxRetransmits = std::nullopt; + config->maxRetransmitTime = std::nullopt; switch (channel_type) { case DCOMCT_ORDERED_PARTIAL_RTXS: case DCOMCT_UNORDERED_PARTIAL_RTXS: @@ -147,7 +138,7 @@ bool ParseDataChannelOpenMessage(const rtc::CopyOnWriteBuffer& payload, return true; } -bool ParseDataChannelOpenAckMessage(const rtc::CopyOnWriteBuffer& payload) { +bool ParseDataChannelOpenAckMessage(const CopyOnWriteBuffer& payload) { if (payload.size() < 1) { RTC_LOG(LS_WARNING) << "Could not read OPEN_ACK message type."; return false; @@ -164,7 +155,7 @@ bool ParseDataChannelOpenAckMessage(const rtc::CopyOnWriteBuffer& payload) { bool WriteDataChannelOpenMessage(const std::string& label, const DataChannelInit& config, - rtc::CopyOnWriteBuffer* payload) { + CopyOnWriteBuffer* payload) { return WriteDataChannelOpenMessage(label, config.protocol, config.priority, config.ordered, config.maxRetransmits, config.maxRetransmitTime, payload); @@ -172,34 +163,18 @@ bool WriteDataChannelOpenMessage(const std::string& label, bool WriteDataChannelOpenMessage(const std::string& label, const std::string& protocol, - absl::optional opt_priority, + std::optional opt_priority, bool ordered, - absl::optional max_retransmits, - absl::optional max_retransmit_time, - rtc::CopyOnWriteBuffer* payload) { + std::optional max_retransmits, + std::optional max_retransmit_time, + CopyOnWriteBuffer* payload) { // Format defined at // http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-09#section-5.1 uint8_t channel_type = 0; uint32_t reliability_param = 0; - uint16_t priority = 0; // Set priority according to // https://tools.ietf.org/html/draft-ietf-rtcweb-data-channel-12#section-6.4 - if (opt_priority) { - switch (*opt_priority) { - case Priority::kVeryLow: - priority = DCO_PRIORITY_VERY_LOW; - break; - case Priority::kLow: - priority = DCO_PRIORITY_LOW; - break; - case Priority::kMedium: - priority = DCO_PRIORITY_MEDIUM; - break; - case Priority::kHigh: - priority = DCO_PRIORITY_HIGH; - break; - } - } + PriorityValue priority = opt_priority.value_or(PriorityValue(Priority::kLow)); if (ordered) { if (max_retransmits) { channel_type = DCOMCT_ORDERED_PARTIAL_RTXS; @@ -222,11 +197,11 @@ bool WriteDataChannelOpenMessage(const std::string& label, } } - rtc::ByteBufferWriter buffer(NULL, 20 + label.length() + protocol.length()); + ByteBufferWriter buffer(NULL, 20 + label.length() + protocol.length()); // TODO(tommi): Add error handling and check resulting length. buffer.WriteUInt8(DATA_CHANNEL_OPEN_MESSAGE_TYPE); buffer.WriteUInt8(channel_type); - buffer.WriteUInt16(priority); + buffer.WriteUInt16(priority.value()); buffer.WriteUInt32(reliability_param); buffer.WriteUInt16(static_cast(label.length())); buffer.WriteUInt16(static_cast(protocol.length())); @@ -236,7 +211,7 @@ bool WriteDataChannelOpenMessage(const std::string& label, return true; } -void WriteDataChannelOpenAckMessage(rtc::CopyOnWriteBuffer* payload) { +void WriteDataChannelOpenAckMessage(CopyOnWriteBuffer* payload) { uint8_t data = DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE; payload->SetData(&data, sizeof(data)); } diff --git a/pc/sctp_utils.h b/pc/sctp_utils.h index 868a8be826..098f6e9659 100644 --- a/pc/sctp_utils.h +++ b/pc/sctp_utils.h @@ -14,6 +14,7 @@ #include #include "api/data_channel_interface.h" +#include "api/priority.h" #include "api/transport/data_channel_transport_interface.h" #include "media/base/media_channel.h" #include "media/sctp/sctp_transport_internal.h" @@ -21,11 +22,8 @@ #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/ssl_stream_adapter.h" // For SSLRole -namespace rtc { -class CopyOnWriteBuffer; -} // namespace rtc - namespace webrtc { +class CopyOnWriteBuffer; struct DataChannelInit; // Wraps the `uint16_t` sctp data channel stream id value and does range @@ -36,58 +34,42 @@ struct DataChannelInit; class StreamId { public: StreamId() = default; - explicit StreamId(int id) - : id_(id >= cricket::kMinSctpSid && id <= cricket::kSpecMaxSctpSid - ? absl::optional(static_cast(id)) - : absl::nullopt) {} + explicit StreamId(uint16_t id) : id_(id) {} StreamId(const StreamId& sid) = default; StreamId& operator=(const StreamId& sid) = default; - - // Returns `true` if a valid stream id is contained, in the range of - // kMinSctpSid - kSpecMaxSctpSid ([0..0xffff]). Note that this - // is different than having `kMaxSctpSid` as the upper bound, which is - // the limit that is internally used by `SctpSidAllocator`. Sid values may - // be assigned to `StreamId` outside of `SctpSidAllocator` and have a higher - // id value than supplied by `SctpSidAllocator`, yet is still valid. - bool HasValue() const { return id_.has_value(); } - // Provided for compatibility with existing code that hasn't been updated // to use `StreamId` directly. New code should not use 'int' for the stream // id but rather `StreamId` directly. - int stream_id_int() const { - return id_.has_value() ? static_cast(id_.value().value()) : -1; - } - - void reset() { id_ = absl::nullopt; } + int stream_id_int() const { return static_cast(id_.value()); } bool operator==(const StreamId& sid) const { return id_ == sid.id_; } bool operator<(const StreamId& sid) const { return id_ < sid.id_; } bool operator!=(const StreamId& sid) const { return !(operator==(sid)); } private: - absl::optional id_; + dcsctp::StreamID id_; }; // Read the message type and return true if it's an OPEN message. -bool IsOpenMessage(const rtc::CopyOnWriteBuffer& payload); +bool IsOpenMessage(const CopyOnWriteBuffer& payload); -bool ParseDataChannelOpenMessage(const rtc::CopyOnWriteBuffer& payload, +bool ParseDataChannelOpenMessage(const CopyOnWriteBuffer& payload, std::string* label, DataChannelInit* config); -bool ParseDataChannelOpenAckMessage(const rtc::CopyOnWriteBuffer& payload); +bool ParseDataChannelOpenAckMessage(const CopyOnWriteBuffer& payload); bool WriteDataChannelOpenMessage(const std::string& label, const std::string& protocol, - absl::optional priority, + std::optional priority, bool ordered, - absl::optional max_retransmits, - absl::optional max_retransmit_time, - rtc::CopyOnWriteBuffer* payload); + std::optional max_retransmits, + std::optional max_retransmit_time, + CopyOnWriteBuffer* payload); bool WriteDataChannelOpenMessage(const std::string& label, const DataChannelInit& config, - rtc::CopyOnWriteBuffer* payload); -void WriteDataChannelOpenAckMessage(rtc::CopyOnWriteBuffer* payload); + CopyOnWriteBuffer* payload); +void WriteDataChannelOpenAckMessage(CopyOnWriteBuffer* payload); } // namespace webrtc diff --git a/pc/sctp_utils_unittest.cc b/pc/sctp_utils_unittest.cc index 3e49824b45..5e7d099632 100644 --- a/pc/sctp_utils_unittest.cc +++ b/pc/sctp_utils_unittest.cc @@ -13,8 +13,10 @@ #include #include +#include +#include -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" #include "api/priority.h" #include "media/sctp/sctp_transport_internal.h" #include "rtc_base/byte_buffer.h" @@ -25,7 +27,7 @@ using webrtc::StreamId; class SctpUtilsTest : public ::testing::Test { public: - void VerifyOpenMessageFormat(const rtc::CopyOnWriteBuffer& packet, + void VerifyOpenMessageFormat(const webrtc::CopyOnWriteBuffer& packet, const std::string& label, const webrtc::DataChannelInit& config) { uint8_t message_type; @@ -35,7 +37,7 @@ class SctpUtilsTest : public ::testing::Test { uint16_t label_length; uint16_t protocol_length; - rtc::ByteBufferReader buffer(packet.data(), packet.size()); + webrtc::ByteBufferReader buffer(packet); ASSERT_TRUE(buffer.ReadUInt8(&message_type)); EXPECT_EQ(0x03, message_type); @@ -55,9 +57,10 @@ class SctpUtilsTest : public ::testing::Test { if (config.priority) { // Exact values are checked by round-trip conversion, but // all values defined are greater than zero. - EXPECT_GT(priority, 0); + EXPECT_EQ(priority, config.priority->value()); } else { - EXPECT_EQ(priority, 0); + EXPECT_EQ(priority, + webrtc::PriorityValue(webrtc::Priority::kLow).value()); } ASSERT_TRUE(buffer.ReadUInt32(&reliability)); @@ -72,11 +75,11 @@ class SctpUtilsTest : public ::testing::Test { EXPECT_EQ(label.size(), label_length); EXPECT_EQ(config.protocol.size(), protocol_length); - std::string label_output; - ASSERT_TRUE(buffer.ReadString(&label_output, label_length)); + absl::string_view label_output; + ASSERT_TRUE(buffer.ReadStringView(&label_output, label_length)); EXPECT_EQ(label, label_output); - std::string protocol_output; - ASSERT_TRUE(buffer.ReadString(&protocol_output, protocol_length)); + absl::string_view protocol_output; + ASSERT_TRUE(buffer.ReadStringView(&protocol_output, protocol_length)); EXPECT_EQ(config.protocol, protocol_output); } }; @@ -86,7 +89,7 @@ TEST_F(SctpUtilsTest, WriteParseOpenMessageWithOrderedReliable) { std::string label = "abc"; config.protocol = "y"; - rtc::CopyOnWriteBuffer packet; + webrtc::CopyOnWriteBuffer packet; ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet)); VerifyOpenMessageFormat(packet, label, config); @@ -110,7 +113,7 @@ TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmitTime) { config.maxRetransmitTime = 10; config.protocol = "y"; - rtc::CopyOnWriteBuffer packet; + webrtc::CopyOnWriteBuffer packet; ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet)); VerifyOpenMessageFormat(packet, label, config); @@ -133,7 +136,7 @@ TEST_F(SctpUtilsTest, WriteParseOpenMessageWithMaxRetransmits) { config.maxRetransmits = 10; config.protocol = "y"; - rtc::CopyOnWriteBuffer packet; + webrtc::CopyOnWriteBuffer packet; ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet)); VerifyOpenMessageFormat(packet, label, config); @@ -154,9 +157,9 @@ TEST_F(SctpUtilsTest, WriteParseOpenMessageWithPriority) { webrtc::DataChannelInit config; std::string label = "abc"; config.protocol = "y"; - config.priority = webrtc::Priority::kVeryLow; + config.priority = webrtc::PriorityValue(webrtc::Priority::kVeryLow); - rtc::CopyOnWriteBuffer packet; + webrtc::CopyOnWriteBuffer packet; ASSERT_TRUE(webrtc::WriteDataChannelOpenMessage(label, config, &packet)); VerifyOpenMessageFormat(packet, label, config); @@ -172,11 +175,11 @@ TEST_F(SctpUtilsTest, WriteParseOpenMessageWithPriority) { } TEST_F(SctpUtilsTest, WriteParseAckMessage) { - rtc::CopyOnWriteBuffer packet; + webrtc::CopyOnWriteBuffer packet; webrtc::WriteDataChannelOpenAckMessage(&packet); uint8_t message_type; - rtc::ByteBufferReader buffer(packet.data(), packet.size()); + webrtc::ByteBufferReader buffer(packet); ASSERT_TRUE(buffer.ReadUInt8(&message_type)); EXPECT_EQ(0x02, message_type); @@ -184,59 +187,27 @@ TEST_F(SctpUtilsTest, WriteParseAckMessage) { } TEST_F(SctpUtilsTest, TestIsOpenMessage) { - rtc::CopyOnWriteBuffer open(1); + webrtc::CopyOnWriteBuffer open(1); open.MutableData()[0] = 0x03; EXPECT_TRUE(webrtc::IsOpenMessage(open)); - rtc::CopyOnWriteBuffer openAck(1); + webrtc::CopyOnWriteBuffer openAck(1); openAck.MutableData()[0] = 0x02; EXPECT_FALSE(webrtc::IsOpenMessage(openAck)); - rtc::CopyOnWriteBuffer invalid(1); + webrtc::CopyOnWriteBuffer invalid(1); invalid.MutableData()[0] = 0x01; EXPECT_FALSE(webrtc::IsOpenMessage(invalid)); - rtc::CopyOnWriteBuffer empty; + webrtc::CopyOnWriteBuffer empty; EXPECT_FALSE(webrtc::IsOpenMessage(empty)); } TEST(SctpSidTest, Basics) { // These static asserts are mostly here to aid with readability (i.e. knowing // what these constants represent). - static_assert(cricket::kMinSctpSid == 0, "Min stream id should be 0"); - static_assert(cricket::kMaxSctpSid <= cricket::kSpecMaxSctpSid, ""); - static_assert( - cricket::kSpecMaxSctpSid == std::numeric_limits::max(), - "Max legal sctp stream value should be 0xffff"); - - // cricket::kMaxSctpSid is a chosen value in the webrtc implementation, - // the highest generated `sid` value chosen for resource reservation reasons. - // It's one less than kMaxSctpStreams (1024) or 1023 since sid values are - // zero based. - - EXPECT_TRUE(!StreamId(-1).HasValue()); - EXPECT_TRUE(!StreamId(-2).HasValue()); - EXPECT_TRUE(StreamId(cricket::kMinSctpSid).HasValue()); - EXPECT_TRUE(StreamId(cricket::kMinSctpSid + 1).HasValue()); - EXPECT_TRUE(StreamId(cricket::kSpecMaxSctpSid).HasValue()); - EXPECT_TRUE(StreamId(cricket::kMaxSctpSid).HasValue()); - - // Two illegal values are equal (both not valid). - EXPECT_EQ(StreamId(-1), StreamId(-2)); - // Two different, but legal, values, are not equal. - EXPECT_NE(StreamId(1), StreamId(2)); - // Test operator<() for container compatibility. - EXPECT_LT(StreamId(1), StreamId(2)); - - // Test assignment, value() and reset(). - StreamId sid1; - StreamId sid2(cricket::kMaxSctpSid); - EXPECT_NE(sid1, sid2); - sid1 = sid2; - EXPECT_EQ(sid1, sid2); - - EXPECT_EQ(sid1.stream_id_int(), cricket::kMaxSctpSid); - EXPECT_TRUE(sid1.HasValue()); - sid1.reset(); - EXPECT_FALSE(sid1.HasValue()); + static_assert(webrtc::kMinSctpSid == 0, "Min stream id should be 0"); + static_assert(webrtc::kMaxSctpSid <= webrtc::kSpecMaxSctpSid, ""); + static_assert(webrtc::kSpecMaxSctpSid == std::numeric_limits::max(), + "Max legal sctp stream value should be 0xffff"); } diff --git a/pc/sdp_munging_detector.cc b/pc/sdp_munging_detector.cc new file mode 100644 index 0000000000..2497a3066d --- /dev/null +++ b/pc/sdp_munging_detector.cc @@ -0,0 +1,538 @@ +/* + * Copyright 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/sdp_munging_detector.h" + +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "api/jsep.h" +#include "api/media_types.h" +#include "api/uma_metrics.h" +#include "media/base/codec.h" +#include "media/base/media_constants.h" +#include "media/base/stream_params.h" +#include "p2p/base/transport_description.h" +#include "p2p/base/transport_info.h" +#include "pc/session_description.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +namespace { + +SdpMungingType DetermineTransportModification( + const TransportInfos& last_created_transport_infos, + const TransportInfos& transport_infos_to_set) { + if (last_created_transport_infos.size() != transport_infos_to_set.size()) { + RTC_LOG(LS_WARNING) << "SDP munging: Number of transport-infos does not " + "match last created description."; + // Number of transports should always match number of contents so this + // should never happen. + return SdpMungingType::kNumberOfContents; + } + for (size_t i = 0; i < last_created_transport_infos.size(); i++) { + if (last_created_transport_infos[i].description.ice_ufrag != + transport_infos_to_set[i].description.ice_ufrag) { + RTC_LOG(LS_WARNING) + << "SDP munging: ice-ufrag does not match last created description."; + return SdpMungingType::kIceUfrag; + } + if (last_created_transport_infos[i].description.ice_pwd != + transport_infos_to_set[i].description.ice_pwd) { + RTC_LOG(LS_WARNING) + << "SDP munging: ice-pwd does not match last created description."; + return SdpMungingType::kIcePwd; + } + if (last_created_transport_infos[i].description.ice_mode != + transport_infos_to_set[i].description.ice_mode) { + RTC_LOG(LS_WARNING) + << "SDP munging: ice mode does not match last created description."; + return SdpMungingType::kIceMode; + } + if (last_created_transport_infos[i].description.connection_role != + transport_infos_to_set[i].description.connection_role) { + RTC_LOG(LS_WARNING) + << "SDP munging: DTLS role does not match last created description."; + return SdpMungingType::kDtlsSetup; + } + if (last_created_transport_infos[i].description.transport_options != + transport_infos_to_set[i].description.transport_options) { + RTC_LOG(LS_WARNING) << "SDP munging: ice_options does not match last " + "created description."; + bool created_renomination = + absl::c_find( + last_created_transport_infos[i].description.transport_options, + ICE_OPTION_RENOMINATION) != + last_created_transport_infos[i].description.transport_options.end(); + bool set_renomination = + absl::c_find(transport_infos_to_set[i].description.transport_options, + ICE_OPTION_RENOMINATION) != + transport_infos_to_set[i].description.transport_options.end(); + if (!created_renomination && set_renomination) { + return SdpMungingType::kIceOptionsRenomination; + } + return SdpMungingType::kIceOptions; + } + } + return SdpMungingType::kNoModification; +} + +SdpMungingType DetermineAudioSdpMungingType( + const MediaContentDescription* last_created_media_description, + const MediaContentDescription* media_description_to_set) { + RTC_DCHECK(last_created_media_description); + RTC_DCHECK(media_description_to_set); + // Removing codecs should be done via setCodecPreferences or negotiation, not + // munging. + if (last_created_media_description->codecs().size() > + media_description_to_set->codecs().size()) { + RTC_LOG(LS_WARNING) << "SDP munging: audio codecs removed."; + return SdpMungingType::kAudioCodecsRemoved; + } + // Adding audio codecs is measured after the more specific multiopus and L16 + // checks. + + // Opus stereo modification required to enabled stereo playout for opus. + bool created_opus_stereo = + absl::c_find_if(last_created_media_description->codecs(), + [](const Codec codec) { + std::string value; + return codec.name == kOpusCodecName && + codec.GetParam(kCodecParamStereo, &value) && + value == kParamValueTrue; + }) != last_created_media_description->codecs().end(); + bool set_opus_stereo = + absl::c_find_if( + media_description_to_set->codecs(), [](const Codec codec) { + std::string value; + return codec.name == kOpusCodecName && + codec.GetParam(kCodecParamStereo, &value) && + value == kParamValueTrue; + }) != media_description_to_set->codecs().end(); + if (!created_opus_stereo && set_opus_stereo) { + RTC_LOG(LS_WARNING) << "SDP munging: Opus stereo enabled."; + return SdpMungingType::kAudioCodecsFmtpOpusStereo; + } + + // Nonstandard 5.1/7.1 opus variant. + bool created_multiopus = + absl::c_find_if(last_created_media_description->codecs(), + [](const Codec codec) { + return codec.name == "multiopus"; + }) != last_created_media_description->codecs().end(); + bool set_multiopus = + absl::c_find_if(media_description_to_set->codecs(), + [](const Codec codec) { + return codec.name == "multiopus"; + }) != media_description_to_set->codecs().end(); + if (!created_multiopus && set_multiopus) { + RTC_LOG(LS_WARNING) << "SDP munging: multiopus enabled."; + return SdpMungingType::kAudioCodecsAddedMultiOpus; + } + + // L16. + bool created_l16 = + absl::c_find_if(last_created_media_description->codecs(), + [](const Codec codec) { + return codec.name == kL16CodecName; + }) != last_created_media_description->codecs().end(); + bool set_l16 = absl::c_find_if(media_description_to_set->codecs(), + [](const Codec codec) { + return codec.name == kL16CodecName; + }) != media_description_to_set->codecs().end(); + if (!created_l16 && set_l16) { + RTC_LOG(LS_WARNING) << "SDP munging: L16 enabled."; + return SdpMungingType::kAudioCodecsAddedL16; + } + + if (last_created_media_description->codecs().size() < + media_description_to_set->codecs().size()) { + RTC_LOG(LS_WARNING) << "SDP munging: audio codecs added."; + return SdpMungingType::kAudioCodecsAdded; + } + + // Audio NACK is not offered by default. + bool created_nack = + absl::c_find_if( + last_created_media_description->codecs(), [](const Codec codec) { + return codec.HasFeedbackParam(FeedbackParam(kRtcpFbParamNack)); + }) != last_created_media_description->codecs().end(); + bool set_nack = + absl::c_find_if( + media_description_to_set->codecs(), [](const Codec codec) { + return codec.HasFeedbackParam(FeedbackParam(kRtcpFbParamNack)); + }) != media_description_to_set->codecs().end(); + if (!created_nack && set_nack) { + RTC_LOG(LS_WARNING) << "SDP munging: audio nack enabled."; + return SdpMungingType::kAudioCodecsRtcpFbAudioNack; + } + + // RRTR is not offered by default. + bool created_rrtr = + absl::c_find_if( + last_created_media_description->codecs(), [](const Codec codec) { + return codec.HasFeedbackParam(FeedbackParam(kRtcpFbParamRrtr)); + }) != last_created_media_description->codecs().end(); + bool set_rrtr = + absl::c_find_if( + media_description_to_set->codecs(), [](const Codec codec) { + return codec.HasFeedbackParam(FeedbackParam(kRtcpFbParamRrtr)); + }) != media_description_to_set->codecs().end(); + if (!created_rrtr && set_rrtr) { + RTC_LOG(LS_WARNING) << "SDP munging: audio rrtr enabled."; + return SdpMungingType::kAudioCodecsRtcpFbRrtr; + } + + // Opus FEC is on by default. Should not be munged, can be controlled by + // the other side. + bool created_opus_fec = + absl::c_find_if(last_created_media_description->codecs(), + [](const Codec codec) { + std::string value; + return codec.name == kOpusCodecName && + codec.GetParam(kCodecParamUseInbandFec, + &value) && + value == kParamValueTrue; + }) != last_created_media_description->codecs().end(); + bool set_opus_fec = + absl::c_find_if( + media_description_to_set->codecs(), [](const Codec codec) { + std::string value; + return codec.name == kOpusCodecName && + codec.GetParam(kCodecParamUseInbandFec, &value) && + value == kParamValueTrue; + }) != media_description_to_set->codecs().end(); + if (created_opus_fec && !set_opus_fec) { + RTC_LOG(LS_WARNING) << "SDP munging: Opus FEC disabled."; + return SdpMungingType::kAudioCodecsFmtpOpusFec; + } + // Opus DTX is off by default. Should not be munged, can be controlled by + // the other side. + bool created_opus_dtx = + absl::c_find_if(last_created_media_description->codecs(), + [](const Codec codec) { + std::string value; + return codec.name == kOpusCodecName && + codec.GetParam(kCodecParamUseDtx, &value) && + value == kParamValueTrue; + }) != last_created_media_description->codecs().end(); + bool set_opus_dtx = + absl::c_find_if( + media_description_to_set->codecs(), [](const Codec codec) { + std::string value; + return codec.name == kOpusCodecName && + codec.GetParam(kCodecParamUseDtx, &value) && + value == kParamValueTrue; + }) != media_description_to_set->codecs().end(); + if (!created_opus_dtx && set_opus_dtx) { + RTC_LOG(LS_WARNING) << "SDP munging: Opus DTX enabled."; + return SdpMungingType::kAudioCodecsFmtpOpusDtx; + } + + // Opus CBR is off by default. Should not be munged, can be controlled by + // the other side. + bool created_opus_cbr = + absl::c_find_if(last_created_media_description->codecs(), + [](const Codec codec) { + std::string value; + return codec.name == kOpusCodecName && + codec.GetParam(kCodecParamCbr, &value) && + value == kParamValueTrue; + }) != last_created_media_description->codecs().end(); + bool set_opus_cbr = + absl::c_find_if( + media_description_to_set->codecs(), [](const Codec codec) { + std::string value; + return codec.name == kOpusCodecName && + codec.GetParam(kCodecParamCbr, &value) && + value == kParamValueTrue; + }) != media_description_to_set->codecs().end(); + if (!created_opus_cbr && set_opus_cbr) { + RTC_LOG(LS_WARNING) << "SDP munging: Opus CBR enabled."; + return SdpMungingType::kAudioCodecsFmtpOpusCbr; + } + return SdpMungingType::kNoModification; +} + +SdpMungingType DetermineVideoSdpMungingType( + const MediaContentDescription* last_created_media_description, + const MediaContentDescription* media_description_to_set) { + RTC_DCHECK(last_created_media_description); + RTC_DCHECK(media_description_to_set); + // Removing codecs should be done via setCodecPreferences or negotiation, not + // munging. + if (last_created_media_description->codecs().size() > + media_description_to_set->codecs().size()) { + RTC_LOG(LS_WARNING) << "SDP munging: video codecs removed."; + return SdpMungingType::kVideoCodecsRemoved; + } + if (last_created_media_description->codecs().size() < + media_description_to_set->codecs().size()) { + RTC_LOG(LS_WARNING) << "SDP munging: video codecs added."; + return SdpMungingType::kVideoCodecsAdded; + } + + // Simulcast munging. + if (last_created_media_description->streams().size() == 1 && + media_description_to_set->streams().size() == 1) { + bool created_sim = + absl::c_find_if( + last_created_media_description->streams()[0].ssrc_groups, + [](const SsrcGroup group) { + return group.semantics == kSimSsrcGroupSemantics; + }) != + last_created_media_description->streams()[0].ssrc_groups.end(); + bool set_sim = + absl::c_find_if(media_description_to_set->streams()[0].ssrc_groups, + [](const SsrcGroup group) { + return group.semantics == kSimSsrcGroupSemantics; + }) != + media_description_to_set->streams()[0].ssrc_groups.end(); + if (!created_sim && set_sim) { + RTC_LOG(LS_WARNING) << "SDP munging: legacy simulcast group created."; + return SdpMungingType::kVideoCodecsLegacySimulcast; + } + } + + // sps-pps-idr-in-keyframe. + bool created_sps_pps_idr_in_keyframe = + absl::c_find_if(last_created_media_description->codecs(), + [](const Codec codec) { + std::string value; + return codec.name == kH264CodecName && + codec.GetParam(kH264FmtpSpsPpsIdrInKeyframe, + &value) && + value == kParamValueTrue; + }) != last_created_media_description->codecs().end(); + bool set_sps_pps_idr_in_keyframe = + absl::c_find_if( + media_description_to_set->codecs(), [](const Codec codec) { + std::string value; + return codec.name == kH264CodecName && + codec.GetParam(kH264FmtpSpsPpsIdrInKeyframe, &value) && + value == kParamValueTrue; + }) != media_description_to_set->codecs().end(); + if (!created_sps_pps_idr_in_keyframe && set_sps_pps_idr_in_keyframe) { + RTC_LOG(LS_WARNING) << "SDP munging: sps-pps-idr-in-keyframe enabled."; + return SdpMungingType::kVideoCodecsFmtpH264SpsPpsIdrInKeyframe; + } + + return SdpMungingType::kNoModification; +} + +} // namespace + +// Determine if the SDP was modified between createOffer and +// setLocalDescription. +SdpMungingType DetermineSdpMungingType( + const SessionDescriptionInterface* sdesc, + const SessionDescriptionInterface* last_created_desc) { + if (!sdesc || !sdesc->description()) { + RTC_LOG(LS_WARNING) << "SDP munging: Failed to parse session description."; + return SdpMungingType::kUnknownModification; + } + + if (!last_created_desc || !last_created_desc->description()) { + RTC_LOG(LS_WARNING) << "SDP munging: SetLocalDescription called without " + "CreateOffer or CreateAnswer."; + if (sdesc->GetType() == SdpType::kOffer) { + return SdpMungingType::kWithoutCreateOffer; + } else { // answer or pranswer. + return SdpMungingType::kWithoutCreateAnswer; + } + } + + // TODO: crbug.com/40567530 - we currently allow answer->pranswer + // so can not check sdesc->GetType() == last_created_desc->GetType(). + + SdpMungingType type; + + // TODO: crbug.com/40567530 - change Chromium so that pointer comparison works + // at least for implicit local description. + if (sdesc->description() == last_created_desc->description()) { + return SdpMungingType::kNoModification; + } + + // Validate contents. + const auto& last_created_contents = + last_created_desc->description()->contents(); + const auto& contents_to_set = sdesc->description()->contents(); + if (last_created_contents.size() != contents_to_set.size()) { + RTC_LOG(LS_WARNING) << "SDP munging: Number of m= sections does not match " + "last created description."; + return SdpMungingType::kNumberOfContents; + } + for (size_t content_index = 0; content_index < last_created_contents.size(); + content_index++) { + // TODO: crbug.com/40567530 - more checks are needed here. + if (last_created_contents[content_index].mid() != + contents_to_set[content_index].mid()) { + RTC_LOG(LS_WARNING) << "SDP munging: mid does not match " + "last created description."; + return SdpMungingType::kMid; + } + + auto* last_created_media_description = + last_created_contents[content_index].media_description(); + auto* media_description_to_set = + contents_to_set[content_index].media_description(); + if (!(last_created_media_description && media_description_to_set)) { + continue; + } + // Validate video and audio contents. + webrtc::MediaType media_type = last_created_media_description->type(); + if (media_type == webrtc::MediaType::VIDEO) { + type = DetermineVideoSdpMungingType(last_created_media_description, + media_description_to_set); + if (type != SdpMungingType::kNoModification) { + return type; + } + } else if (media_type == webrtc::MediaType::AUDIO) { + type = DetermineAudioSdpMungingType(last_created_media_description, + media_description_to_set); + if (type != SdpMungingType::kNoModification) { + return type; + } + } + + // Validate codecs. We should have bailed out earlier if codecs were added + // or removed. + auto last_created_codecs = last_created_media_description->codecs(); + auto codecs_to_set = media_description_to_set->codecs(); + if (last_created_codecs.size() == codecs_to_set.size()) { + for (size_t i = 0; i < last_created_codecs.size(); i++) { + if (last_created_codecs[i] == codecs_to_set[i]) { + continue; + } + // Codec position swapped. + for (size_t j = i + 1; j < last_created_codecs.size(); j++) { + if (last_created_codecs[i] == codecs_to_set[j]) { + return media_type == webrtc::MediaType::AUDIO + ? SdpMungingType::kAudioCodecsReordered + : SdpMungingType::kVideoCodecsReordered; + } + } + // Same codec but id changed. + if (last_created_codecs[i].name == codecs_to_set[i].name && + last_created_codecs[i].id != codecs_to_set[i].id) { + return SdpMungingType::kPayloadTypes; + } + if (last_created_codecs[i].params != codecs_to_set[i].params) { + return media_type == webrtc::MediaType::AUDIO + ? SdpMungingType::kAudioCodecsFmtp + : SdpMungingType::kVideoCodecsFmtp; + } + if (last_created_codecs[i].feedback_params != + codecs_to_set[i].feedback_params) { + return media_type == webrtc::MediaType::AUDIO + ? SdpMungingType::kAudioCodecsRtcpFb + : SdpMungingType::kVideoCodecsRtcpFb; + } + // At this point clockrate or channels changed. This should already be + // rejected later in the process so ignore for munging. + } + } + + // Validate media streams. + if (last_created_media_description->streams().size() != + media_description_to_set->streams().size()) { + RTC_LOG(LS_WARNING) << "SDP munging: streams size does not match last " + "created description."; + return SdpMungingType::kSsrcs; + } + for (size_t i = 0; i < last_created_media_description->streams().size(); + i++) { + if (last_created_media_description->streams()[i].ssrcs != + media_description_to_set->streams()[i].ssrcs) { + RTC_LOG(LS_WARNING) + << "SDP munging: SSRCs do not match last created description."; + return SdpMungingType::kSsrcs; + } + } + + // Validate RTP header extensions. + auto last_created_extensions = + last_created_media_description->rtp_header_extensions(); + auto extensions_to_set = media_description_to_set->rtp_header_extensions(); + if (last_created_extensions.size() < extensions_to_set.size()) { + RTC_LOG(LS_WARNING) << "SDP munging: RTP header extension added."; + return SdpMungingType::kRtpHeaderExtensionAdded; + } + if (last_created_extensions.size() > extensions_to_set.size()) { + RTC_LOG(LS_WARNING) << "SDP munging: RTP header extension removed."; + return SdpMungingType::kRtpHeaderExtensionRemoved; + } + for (size_t i = 0; i < last_created_extensions.size(); i++) { + if (!(last_created_extensions[i].id == extensions_to_set[i].id)) { + RTC_LOG(LS_WARNING) << "SDP munging: header extension modified."; + return SdpMungingType::kRtpHeaderExtensionModified; + } + } + } + + // Validate transport descriptions. + type = DetermineTransportModification( + last_created_desc->description()->transport_infos(), + sdesc->description()->transport_infos()); + if (type != SdpMungingType::kNoModification) { + return type; + } + + // TODO: crbug.com/40567530 - this serializes the descriptions back to a SDP + // string which is very complex and we not should be be forced to rely on + // string equality. + std::string serialized_description; + std::string serialized_last_description; + if (sdesc->ToString(&serialized_description) && + last_created_desc->ToString(&serialized_last_description) && + serialized_description == serialized_last_description) { + return SdpMungingType::kNoModification; + } + return SdpMungingType::kUnknownModification; +} + +// Similar to DetermineSdpMungingType, but only checks whether the ICE ufrag or +// pwd of the SDP has been modified between createOffer and setLocalDescription. +bool HasUfragSdpMunging(const SessionDescriptionInterface* sdesc, + const SessionDescriptionInterface* last_created_desc) { + if (!sdesc || !sdesc->description()) { + RTC_LOG(LS_WARNING) << "SDP munging: Failed to parse session description."; + return false; + } + + if (!last_created_desc || !last_created_desc->description()) { + RTC_LOG(LS_WARNING) << "SDP munging: SetLocalDescription called without " + "CreateOffer or CreateAnswer."; + return false; + } + TransportInfos last_created_transport_infos = + last_created_desc->description()->transport_infos(); + TransportInfos transport_infos_to_set = + sdesc->description()->transport_infos(); + for (size_t i = 0; i < std::min(last_created_transport_infos.size(), + transport_infos_to_set.size()); + i++) { + if (last_created_transport_infos[i].description.ice_ufrag != + transport_infos_to_set[i].description.ice_ufrag) { + return true; + } + if (last_created_transport_infos[i].description.ice_pwd != + transport_infos_to_set[i].description.ice_pwd) { + return true; + } + } + return false; +} + +} // namespace webrtc diff --git a/pc/sdp_munging_detector.h b/pc/sdp_munging_detector.h new file mode 100644 index 0000000000..ec8af1a634 --- /dev/null +++ b/pc/sdp_munging_detector.h @@ -0,0 +1,29 @@ +/* + * Copyright 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_SDP_MUNGING_DETECTOR_H_ +#define PC_SDP_MUNGING_DETECTOR_H_ + +#include "api/jsep.h" +#include "api/uma_metrics.h" + +namespace webrtc { +// Determines if and how the SDP was modified. +SdpMungingType DetermineSdpMungingType( + const SessionDescriptionInterface* sdesc, + const SessionDescriptionInterface* last_created_desc); + +// Determines if the ICE ufrag or pwd of the SDP were modified. +bool HasUfragSdpMunging(const SessionDescriptionInterface* sdesc, + const SessionDescriptionInterface* last_created_desc); + +} // namespace webrtc + +#endif // PC_SDP_MUNGING_DETECTOR_H_ diff --git a/pc/sdp_offer_answer.cc b/pc/sdp_offer_answer.cc index 06303de441..2423fa8c33 100644 --- a/pc/sdp_offer_answer.cc +++ b/pc/sdp_offer_answer.cc @@ -12,81 +12,117 @@ #include #include +#include +#include #include #include #include +#include #include +#include #include #include +#include #include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "absl/strings/match.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_split.h" #include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/candidate.h" #include "api/crypto/crypto_options.h" -#include "api/dtls_transport_interface.h" -#include "api/field_trials_view.h" +#include "api/jsep.h" +#include "api/jsep_ice_candidate.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" #include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_direction.h" +#include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/set_local_description_observer_interface.h" +#include "api/set_remote_description_observer_interface.h" +#include "api/uma_metrics.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" +#include "api/video/video_codec_constants.h" +#include "call/payload_type.h" #include "media/base/codec.h" +#include "media/base/codec_comparators.h" +#include "media/base/media_constants.h" +#include "media/base/media_engine.h" #include "media/base/rid_description.h" +#include "media/base/stream_params.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/p2p_transport_channel.h" -#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" #include "p2p/base/transport_description.h" #include "p2p/base/transport_description_factory.h" #include "p2p/base/transport_info.h" #include "pc/channel_interface.h" +#include "pc/codec_vendor.h" +#include "pc/connection_context.h" #include "pc/dtls_transport.h" +#include "pc/jsep_transport_controller.h" #include "pc/legacy_stats_collector.h" +#include "pc/media_options.h" +#include "pc/media_session.h" #include "pc/media_stream.h" +#include "pc/media_stream_observer.h" #include "pc/media_stream_proxy.h" #include "pc/peer_connection_internal.h" #include "pc/peer_connection_message_handler.h" #include "pc/rtp_media_utils.h" -#include "pc/rtp_receiver_proxy.h" +#include "pc/rtp_receiver.h" #include "pc/rtp_sender.h" #include "pc/rtp_sender_proxy.h" +#include "pc/rtp_transceiver.h" +#include "pc/rtp_transmission_manager.h" +#include "pc/sdp_munging_detector.h" +#include "pc/session_description.h" #include "pc/simulcast_description.h" +#include "pc/stream_collection.h" +#include "pc/transceiver_list.h" #include "pc/usage_pattern.h" #include "pc/webrtc_session_description_factory.h" -#include "rtc_base/helpers.h" +#include "rtc_base/checks.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/logging.h" +#include "rtc_base/operations_chain.h" #include "rtc_base/rtc_certificate.h" +#include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/thread.h" #include "rtc_base/trace_event.h" +#include "rtc_base/weak_ptr.h" #include "system_wrappers/include/metrics.h" -using cricket::ContentInfo; -using cricket::ContentInfos; -using cricket::MediaContentDescription; -using cricket::MediaProtocolType; -using cricket::RidDescription; -using cricket::RidDirection; -using cricket::SessionDescription; -using cricket::SimulcastDescription; -using cricket::SimulcastLayer; -using cricket::SimulcastLayerList; -using cricket::StreamParams; -using cricket::TransportInfo; - -using cricket::LOCAL_PORT_TYPE; -using cricket::PRFLX_PORT_TYPE; -using cricket::RELAY_PORT_TYPE; -using cricket::STUN_PORT_TYPE; +using ::webrtc::ContentInfo; +using ::webrtc::ContentInfos; +using webrtc::MediaContentDescription; +using ::webrtc::MediaProtocolType; +using ::webrtc::RidDescription; +using ::webrtc::RidDirection; +using ::webrtc::SessionDescription; +using ::webrtc::SimulcastDescription; +using ::webrtc::SimulcastLayer; +using ::webrtc::SimulcastLayerList; +using ::webrtc::StreamParams; +using ::webrtc::TransportInfo; namespace webrtc { namespace { -typedef webrtc::PeerConnectionInterface::RTCOfferAnswerOptions - RTCOfferAnswerOptions; +typedef PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions; // Error messages const char kInvalidSdp[] = "Invalid session description."; @@ -104,7 +140,7 @@ const char kSdpWithoutIceUfragPwd[] = "Called with SDP without ice-ufrag and ice-pwd."; const char kSdpWithoutDtlsFingerprint[] = "Called with SDP without DTLS fingerprint."; -const char kSdpWithoutSdesCrypto[] = "Called with SDP without SDES crypto."; +const char kSdpWithoutCrypto[] = "Called with SDP without crypto setup."; const char kSessionError[] = "Session error code: "; const char kSessionErrorDesc[] = "Session error description: "; @@ -125,12 +161,12 @@ void NoteAddIceCandidateResult(int result) { kAddIceCandidateMax); } -std::map GetBundleGroupsByMid( +std::map GetBundleGroupsByMid( const SessionDescription* desc) { - std::vector bundle_groups = - desc->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); - std::map bundle_groups_by_mid; - for (const cricket::ContentGroup* bundle_group : bundle_groups) { + std::vector bundle_groups = + desc->GetGroupsByName(GROUP_TYPE_BUNDLE); + std::map bundle_groups_by_mid; + for (const ContentGroup* bundle_group : bundle_groups) { for (const std::string& content_name : bundle_group->content_names()) { bundle_groups_by_mid[content_name] = bundle_group; } @@ -152,15 +188,15 @@ bool CheckForRemoteIceRestart(const SessionDescriptionInterface* old_desc, return false; } // If the content isn't rejected, check if ufrag and password has changed. - const cricket::TransportDescription* new_transport_desc = + const TransportDescription* new_transport_desc = new_sd->GetTransportDescriptionByName(content_name); - const cricket::TransportDescription* old_transport_desc = + const TransportDescription* old_transport_desc = old_sd->GetTransportDescriptionByName(content_name); if (!new_transport_desc || !old_transport_desc) { // No transport description exists. This is not an ICE restart. return false; } - if (cricket::IceCredentialsChanged( + if (IceCredentialsChanged( old_transport_desc->ice_ufrag, old_transport_desc->ice_pwd, new_transport_desc->ice_ufrag, new_transport_desc->ice_pwd)) { RTC_LOG(LS_INFO) << "Remote peer requests ICE restart for " << content_name @@ -172,18 +208,18 @@ bool CheckForRemoteIceRestart(const SessionDescriptionInterface* old_desc, // Generates a string error message for SetLocalDescription/SetRemoteDescription // from an RTCError. -std::string GetSetDescriptionErrorMessage(cricket::ContentSource source, +std::string GetSetDescriptionErrorMessage(ContentSource source, SdpType type, const RTCError& error) { - rtc::StringBuilder oss; - oss << "Failed to set " << (source == cricket::CS_LOCAL ? "local" : "remote") - << " " << SdpTypeToString(type) << " sdp: "; + StringBuilder oss; + oss << "Failed to set " << (source == CS_LOCAL ? "local" : "remote") << " " + << SdpTypeToString(type) << " sdp: "; RTC_DCHECK(!absl::StartsWith(error.message(), oss.str())) << error.message(); oss << error.message(); return oss.Release(); } -std::string GetStreamIdsString(rtc::ArrayView stream_ids) { +std::string GetStreamIdsString(ArrayView stream_ids) { std::string output = "streams=["; const char* separator = ""; for (const auto& stream_id : stream_ids) { @@ -206,11 +242,11 @@ const ContentInfo* FindTransceiverMSection( // If the direction is "recvonly" or "inactive", treat the description // as containing no streams. // See: https://code.google.com/p/webrtc/issues/detail?id=5054 -std::vector GetActiveStreams( - const cricket::MediaContentDescription* desc) { +std::vector GetActiveStreams( + const MediaContentDescription* desc) { return RtpTransceiverDirectionHasSend(desc->direction()) ? desc->streams() - : std::vector(); + : std::vector(); } // Logic to decide if an m= section can be recycled. This means that the new @@ -246,7 +282,7 @@ bool MediaSectionsInSameOrder(const SessionDescription& current_desc, } for (size_t i = 0; i < current_desc.contents().size(); ++i) { - const cricket::ContentInfo* secondary_content_info = nullptr; + const ContentInfo* secondary_content_info = nullptr; if (secondary_current_desc && i < secondary_current_desc->contents().size()) { secondary_content_info = &secondary_current_desc->contents()[i]; @@ -258,7 +294,7 @@ bool MediaSectionsInSameOrder(const SessionDescription& current_desc, // valid for the MID and media type to change. continue; } - if (new_desc.contents()[i].name != current_desc.contents()[i].name) { + if (new_desc.contents()[i].mid() != current_desc.contents()[i].mid()) { return false; } const MediaContentDescription* new_desc_mdesc = @@ -276,26 +312,23 @@ bool MediaSectionsHaveSameCount(const SessionDescription& desc1, const SessionDescription& desc2) { return desc1.contents().size() == desc2.contents().size(); } -// Checks that each non-rejected content has SDES crypto keys or a DTLS +// Checks that each non-rejected content has a DTLS // fingerprint, unless it's in a BUNDLE group, in which case only the // BUNDLE-tag section (first media section/description in the BUNDLE group) // needs a ufrag and pwd. Mismatches, such as replying with a DTLS fingerprint // to SDES keys, will be caught in JsepTransport negotiation, and backstopped // by Channel's `srtp_required` check. -RTCError VerifyCrypto(const SessionDescription* desc, - bool dtls_enabled, - const std::map& - bundle_groups_by_mid) { - for (const cricket::ContentInfo& content_info : desc->contents()) { +RTCError VerifyCrypto( + const SessionDescription* desc, + bool dtls_enabled, + const std::map& bundle_groups_by_mid) { + for (const ContentInfo& content_info : desc->contents()) { if (content_info.rejected) { continue; } -#if !defined(WEBRTC_FUCHSIA) - RTC_CHECK(dtls_enabled) << "SDES protocol is only allowed in Fuchsia"; -#endif - const std::string& mid = content_info.name; + const auto mid = content_info.mid(); auto it = bundle_groups_by_mid.find(mid); - const cricket::ContentGroup* bundle = + const ContentGroup* bundle = it != bundle_groups_by_mid.end() ? it->second : nullptr; if (bundle && mid != *(bundle->FirstContentName())) { // This isn't the first media section in the BUNDLE group, so it's not @@ -318,10 +351,7 @@ RTCError VerifyCrypto(const SessionDescription* desc, kSdpWithoutDtlsFingerprint); } } else { - if (media->cryptos().empty()) { - LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - kSdpWithoutSdesCrypto); - } + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kSdpWithoutCrypto); } } return RTCError::OK(); @@ -332,15 +362,14 @@ RTCError VerifyCrypto(const SessionDescription* desc, // media section/description in the BUNDLE group) needs a ufrag and pwd. bool VerifyIceUfragPwdPresent( const SessionDescription* desc, - const std::map& - bundle_groups_by_mid) { - for (const cricket::ContentInfo& content_info : desc->contents()) { + const std::map& bundle_groups_by_mid) { + for (const ContentInfo& content_info : desc->contents()) { if (content_info.rejected) { continue; } - const std::string& mid = content_info.name; + const auto mid = content_info.mid(); auto it = bundle_groups_by_mid.find(mid); - const cricket::ContentGroup* bundle = + const ContentGroup* bundle = it != bundle_groups_by_mid.end() ? it->second : nullptr; if (bundle && mid != *(bundle->FirstContentName())) { // This isn't the first media section in the BUNDLE group, so it's not @@ -366,21 +395,21 @@ bool VerifyIceUfragPwdPresent( return true; } -RTCError ValidateMids(const cricket::SessionDescription& description) { +RTCError ValidateMids(const SessionDescription& description) { std::set mids; - for (const cricket::ContentInfo& content : description.contents()) { - if (content.name.empty()) { + for (const ContentInfo& content : description.contents()) { + if (content.mid().empty()) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "A media section is missing a MID attribute."); } - if (content.name.size() > kMidMaxSize) { + if (content.mid().size() > kMidMaxSize) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "The MID attribute exceeds the maximum supported " "length of 16 characters."); } - if (!mids.insert(content.name).second) { + if (!mids.insert(content.mid()).second) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, - "Duplicate a=mid value '" + content.name + "'."); + "Duplicate a=mid value '" + content.mid() + "'."); } } return RTCError::OK(); @@ -396,7 +425,7 @@ RTCError FindDuplicateCodecParameters( LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "A BUNDLE group contains a codec collision for " "payload_type='" + - rtc::ToString(codec_parameters.payload_type) + + absl::StrCat(codec_parameters.payload_type) + ". All codecs must share the same type, " "encoding name, clock rate and parameters."); } @@ -405,16 +434,15 @@ RTCError FindDuplicateCodecParameters( return RTCError::OK(); } -RTCError ValidateBundledPayloadTypes( - const cricket::SessionDescription& description) { +RTCError ValidateBundledPayloadTypes(const SessionDescription& description) { // https://www.rfc-editor.org/rfc/rfc8843#name-payload-type-pt-value-reuse // ... all codecs associated with the payload type number MUST share an // identical codec configuration. This means that the codecs MUST share // the same media type, encoding name, clock rate, and any parameter // that can affect the codec configuration and packetization. - std::vector bundle_groups = - description.GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); - for (const cricket::ContentGroup* bundle_group : bundle_groups) { + std::vector bundle_groups = + description.GetGroupsByName(GROUP_TYPE_BUNDLE); + for (const ContentGroup* bundle_group : bundle_groups) { std::map payload_to_codec_parameters; for (const std::string& content_name : bundle_group->content_names()) { const ContentInfo* content_description = @@ -424,7 +452,7 @@ RTCError ValidateBundledPayloadTypes( "A BUNDLE group contains a MID='" + content_name + "' matching no m= section."); } - const cricket::MediaContentDescription* media_description = + const MediaContentDescription* media_description = content_description->media_description(); RTC_DCHECK(media_description); if (content_description->rejected || !media_description || @@ -432,18 +460,9 @@ RTCError ValidateBundledPayloadTypes( continue; } const auto type = media_description->type(); - if (type == cricket::MEDIA_TYPE_AUDIO) { - RTC_DCHECK(media_description->as_audio()); - for (const auto& c : media_description->as_audio()->codecs()) { - auto error = FindDuplicateCodecParameters( - c.ToCodecParameters(), payload_to_codec_parameters); - if (!error.ok()) { - return error; - } - } - } else if (type == cricket::MEDIA_TYPE_VIDEO) { - RTC_DCHECK(media_description->as_video()); - for (const auto& c : media_description->as_video()->codecs()) { + if (type == webrtc::MediaType::AUDIO || + type == webrtc::MediaType::VIDEO) { + for (const auto& c : media_description->codecs()) { auto error = FindDuplicateCodecParameters( c.ToCodecParameters(), payload_to_codec_parameters); if (!error.ok()) { @@ -467,7 +486,7 @@ RTCError FindDuplicateHeaderExtensionIds( RTCErrorType::INVALID_PARAMETER, "A BUNDLE group contains a codec collision for " "header extension id=" + - rtc::ToString(extension.id) + + absl::StrCat(extension.id) + ". The id must be the same across all bundled media descriptions"); } id_to_extension.insert(std::make_pair(extension.id, extension)); @@ -475,13 +494,13 @@ RTCError FindDuplicateHeaderExtensionIds( } RTCError ValidateBundledRtpHeaderExtensions( - const cricket::SessionDescription& description) { + const SessionDescription& description) { // https://www.rfc-editor.org/rfc/rfc8843#name-rtp-header-extensions-consi // ... the identifier used for a given extension MUST identify the same // extension across all the bundled media descriptions. - std::vector bundle_groups = - description.GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); - for (const cricket::ContentGroup* bundle_group : bundle_groups) { + std::vector bundle_groups = + description.GetGroupsByName(GROUP_TYPE_BUNDLE); + for (const ContentGroup* bundle_group : bundle_groups) { std::map id_to_extension; for (const std::string& content_name : bundle_group->content_names()) { const ContentInfo* content_description = @@ -491,7 +510,7 @@ RTCError ValidateBundledRtpHeaderExtensions( "A BUNDLE group contains a MID='" + content_name + "' matching no m= section."); } - const cricket::MediaContentDescription* media_description = + const MediaContentDescription* media_description = content_description->media_description(); RTC_DCHECK(media_description); if (content_description->rejected || !media_description || @@ -512,7 +531,7 @@ RTCError ValidateBundledRtpHeaderExtensions( } RTCError ValidateRtpHeaderExtensionsForSpecSimulcast( - const cricket::SessionDescription& description) { + const SessionDescription& description) { for (const ContentInfo& content : description.contents()) { if (content.type != MediaProtocolType::kRtp || content.rejected) { continue; @@ -535,20 +554,20 @@ RTCError ValidateRtpHeaderExtensionsForSpecSimulcast( return RTCError::OK(); } -RTCError ValidateSsrcGroups(const cricket::SessionDescription& description) { +RTCError ValidateSsrcGroups(const SessionDescription& description) { for (const ContentInfo& content : description.contents()) { if (content.type != MediaProtocolType::kRtp) { continue; } for (const StreamParams& stream : content.media_description()->streams()) { - for (const cricket::SsrcGroup& group : stream.ssrc_groups) { + for (const SsrcGroup& group : stream.ssrc_groups) { // Validate the number of SSRCs for standard SSRC group semantics such // as FID and FEC-FR and the non-standard SIM group. - if ((group.semantics == cricket::kFidSsrcGroupSemantics && + if ((group.semantics == kFidSsrcGroupSemantics && group.ssrcs.size() != 2) || - (group.semantics == cricket::kFecFrSsrcGroupSemantics && + (group.semantics == kFecFrSsrcGroupSemantics && group.ssrcs.size() != 2) || - (group.semantics == cricket::kSimSsrcGroupSemantics && + (group.semantics == kSimSsrcGroupSemantics && group.ssrcs.size() > kMaxSimulcastStreams)) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, "The media section with MID='" + content.mid() + @@ -562,6 +581,34 @@ RTCError ValidateSsrcGroups(const cricket::SessionDescription& description) { return RTCError::OK(); } +RTCError ValidatePayloadTypes(const SessionDescription& description) { + for (const ContentInfo& content : description.contents()) { + if (content.type != MediaProtocolType::kRtp) { + continue; + } + const auto media_description = content.media_description(); + RTC_DCHECK(media_description); + if (content.rejected || !media_description || + !media_description->has_codecs()) { + continue; + } + const auto type = media_description->type(); + if (type == webrtc::MediaType::AUDIO || type == webrtc::MediaType::VIDEO) { + for (const auto& codec : media_description->codecs()) { + if (!PayloadType::IsValid(codec.id, media_description->rtcp_mux())) { + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_PARAMETER, + "The media section with MID='" + content.mid() + + "' used an invalid payload type " + absl::StrCat(codec.id) + + " for codec '" + codec.name + ", rtcp-mux:" + + (media_description->rtcp_mux() ? "enabled" : "disabled")); + } + } + } + } + return RTCError::OK(); +} + bool IsValidOfferToReceiveMedia(int value) { typedef PeerConnectionInterface::RTCOfferAnswerOptions Options; return (value >= Options::kUndefined) && @@ -595,6 +642,14 @@ std::vector GetSendEncodingsFromRemoteDescription( RtpEncodingParameters parameters; parameters.rid = layer.rid; parameters.active = !layer.is_paused; + // If a payload type has been specified for this rid, set the codec + // corresponding to that payload type. + auto rid_desc = std::find_if( + desc.receive_rids().begin(), desc.receive_rids().end(), + [&layer](const RidDescription& rid) { return rid.rid == layer.rid; }); + if (rid_desc != desc.receive_rids().end() && !rid_desc->codecs.empty()) { + parameters.codec = rid_desc->codecs[0].ToCodecParameters(); + } result.push_back(parameters); } @@ -603,7 +658,7 @@ std::vector GetSendEncodingsFromRemoteDescription( RTCError UpdateSimulcastLayerStatusInSender( const std::vector& layers, - rtc::scoped_refptr sender) { + scoped_refptr sender) { RTC_DCHECK(sender); RtpParameters parameters = sender->GetParametersInternalWithAllLayers(); std::vector disabled_layers; @@ -647,8 +702,7 @@ bool SimulcastIsRejected(const ContentInfo* local_content, return simulcast_offered && (!simulcast_answered || !rids_supported); } -RTCError DisableSimulcastInSender( - rtc::scoped_refptr sender) { +RTCError DisableSimulcastInSender(scoped_refptr sender) { RTC_DCHECK(sender); RtpParameters parameters = sender->GetParametersInternalWithAllLayers(); if (parameters.encodings.size() <= 1) { @@ -665,16 +719,19 @@ RTCError DisableSimulcastInSender( // The SDP parser used to populate these values by default for the 'content // name' if an a=mid line was absent. -absl::string_view GetDefaultMidForPlanB(cricket::MediaType media_type) { +absl::string_view GetDefaultMidForPlanB(webrtc::MediaType media_type) { switch (media_type) { - case cricket::MEDIA_TYPE_AUDIO: - return cricket::CN_AUDIO; - case cricket::MEDIA_TYPE_VIDEO: - return cricket::CN_VIDEO; - case cricket::MEDIA_TYPE_DATA: - return cricket::CN_DATA; - case cricket::MEDIA_TYPE_UNSUPPORTED: + case webrtc::MediaType::AUDIO: + return CN_AUDIO; + case webrtc::MediaType::VIDEO: + return CN_VIDEO; + case webrtc::MediaType::DATA: + return CN_DATA; + case webrtc::MediaType::UNSUPPORTED: return "not supported"; + default: + // Fall through to RTC_CHECK_NOTREACHED + break; } RTC_DCHECK_NOTREACHED(); return ""; @@ -682,19 +739,19 @@ absl::string_view GetDefaultMidForPlanB(cricket::MediaType media_type) { // Add options to |[audio/video]_media_description_options| from `senders`. void AddPlanBRtpSenderOptions( - const std::vector>>& senders, - cricket::MediaDescriptionOptions* audio_media_description_options, - cricket::MediaDescriptionOptions* video_media_description_options, + const std::vector< + scoped_refptr>>& senders, + MediaDescriptionOptions* audio_media_description_options, + MediaDescriptionOptions* video_media_description_options, int num_sim_layers) { for (const auto& sender : senders) { - if (sender->media_type() == cricket::MEDIA_TYPE_AUDIO) { + if (sender->media_type() == webrtc::MediaType::AUDIO) { if (audio_media_description_options) { audio_media_description_options->AddAudioSender( sender->id(), sender->internal()->stream_ids()); } } else { - RTC_DCHECK(sender->media_type() == cricket::MEDIA_TYPE_VIDEO); + RTC_DCHECK(sender->media_type() == webrtc::MediaType::VIDEO); if (video_media_description_options) { video_media_description_options->AddVideoSender( sender->id(), sender->internal()->stream_ids(), {}, @@ -704,7 +761,7 @@ void AddPlanBRtpSenderOptions( } } -cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForTransceiver( +MediaDescriptionOptions GetMediaDescriptionOptionsForTransceiver( RtpTransceiver* transceiver, const std::string& mid, bool is_create_offer) { @@ -713,10 +770,10 @@ cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForTransceiver( // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-createoffer. bool stopped = is_create_offer ? transceiver->stopping() : transceiver->stopped(); - cricket::MediaDescriptionOptions media_description_options( + MediaDescriptionOptions media_description_options( transceiver->media_type(), mid, transceiver->direction(), stopped); media_description_options.codec_preferences = - transceiver->codec_preferences(); + transceiver->filtered_codec_preferences(); media_description_options.header_extensions = transceiver->GetHeaderExtensionsToNegotiate(); // This behavior is specified in JSEP. The gist is that: @@ -729,7 +786,7 @@ cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForTransceiver( return media_description_options; } - cricket::SenderOptions sender_options; + SenderOptions sender_options; sender_options.track_id = transceiver->sender()->id(); sender_options.stream_ids = transceiver->sender()->stream_ids(); @@ -749,7 +806,17 @@ cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForTransceiver( if (encoding.rid.empty()) { continue; } - send_rids.push_back(RidDescription(encoding.rid, RidDirection::kSend)); + auto send_rid = RidDescription(encoding.rid, RidDirection::kSend); + if (encoding.codec) { + auto send_codecs = transceiver->sender_internal()->GetSendCodecs(); + for (const Codec& codec : send_codecs) { + if (IsSameRtpCodecIgnoringLevel(codec, *encoding.codec)) { + send_rid.codecs.push_back(codec); + break; + } + } + } + send_rids.push_back(send_rid); send_layers.AddLayer(SimulcastLayer(encoding.rid, !encoding.active)); } @@ -781,7 +848,7 @@ const ContentInfo* GetContentByIndex(const SessionDescriptionInterface* sdesc, // m= sectionss (in other words, nothing that involves a map/array). void ExtractSharedMediaSessionOptions( const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options, - cricket::MediaSessionOptions* session_options) { + MediaSessionOptions* session_options) { session_options->vad_enabled = rtc_options.voice_activity_detection; session_options->bundle_enabled = rtc_options.use_rtp_mux; session_options->raw_packetization_for_video = @@ -791,7 +858,7 @@ void ExtractSharedMediaSessionOptions( // Generate a RTCP CNAME when a PeerConnection is created. std::string GenerateRtcpCname() { std::string cname; - if (!rtc::CreateRandomString(kRtcpCnameLength, &cname)) { + if (!CreateRandomString(kRtcpCnameLength, &cname)) { RTC_LOG(LS_ERROR) << "Failed to generate CNAME."; RTC_DCHECK_NOTREACHED(); } @@ -799,8 +866,8 @@ std::string GenerateRtcpCname() { } // Check if we can send `new_stream` on a PeerConnection. -bool CanAddLocalMediaStream(webrtc::StreamCollectionInterface* current_streams, - webrtc::MediaStreamInterface* new_stream) { +bool CanAddLocalMediaStream(StreamCollectionInterface* current_streams, + MediaStreamInterface* new_stream) { if (!new_stream || !current_streams) { return false; } @@ -812,8 +879,8 @@ bool CanAddLocalMediaStream(webrtc::StreamCollectionInterface* current_streams, return true; } -rtc::scoped_refptr LookupDtlsTransportByMid( - rtc::Thread* network_thread, +scoped_refptr LookupDtlsTransportByMid( + Thread* network_thread, JsepTransportController* controller, const std::string& mid) { // TODO(tommi): Can we post this (and associated operations where this @@ -826,7 +893,7 @@ rtc::scoped_refptr LookupDtlsTransportByMid( [controller, &mid] { return controller->LookupDtlsTransportByMid(mid); }); } -bool ContentHasHeaderExtension(const cricket::ContentInfo& content_info, +bool ContentHasHeaderExtension(const ContentInfo& content_info, absl::string_view header_extension_uri) { for (const RtpExtension& rtp_header_extension : content_info.media_description()->rtp_header_extensions()) { @@ -837,10 +904,25 @@ bool ContentHasHeaderExtension(const cricket::ContentInfo& content_info, return false; } +// Matches the given host with the wildcard host. A wildcard `*` matches the +// current character and anything that comes after it. For instance, 127.0.* +// matches any string that starts with `127.0.`. For IPv6 addresses, the +// wildcard host should also start with `[` and end with `]` or a wildcard. +bool WildcardHostPrefixMatch(absl::string_view host, + absl::string_view wildcard_host) { + for (size_t i = 0; i < std::min(host.size(), wildcard_host.size()); ++i) { + if (host[i] == wildcard_host[i]) { + continue; + } + return wildcard_host[i] == '*'; + } + return host.size() == wildcard_host.size(); +} + } // namespace void UpdateRtpHeaderExtensionPreferencesFromSdpMunging( - const cricket::SessionDescription* description, + const SessionDescription* description, TransceiverList* transceivers) { // This integrates the RTP Header Extension Control API and local SDP munging // for backward compability reasons. If something was enabled in the local @@ -849,7 +931,7 @@ void UpdateRtpHeaderExtensionPreferencesFromSdpMunging( RTC_DCHECK(description); RTC_DCHECK(transceivers); for (const auto& content : description->contents()) { - auto transceiver = transceivers->FindByMid(content.name); + auto transceiver = transceivers->FindByMid(content.mid()); if (!transceiver) { continue; } @@ -876,7 +958,7 @@ class SdpOfferAnswerHandler::RemoteDescriptionOperation { RemoteDescriptionOperation( SdpOfferAnswerHandler* handler, std::unique_ptr desc, - rtc::scoped_refptr observer, + scoped_refptr observer, std::function operations_chain_callback) : handler_(handler), desc_(std::move(desc)), @@ -907,7 +989,7 @@ class SdpOfferAnswerHandler::RemoteDescriptionOperation { if (!error_.ok() && type_ != static_cast(-1)) { std::string error_message = - GetSetDescriptionErrorMessage(cricket::CS_REMOTE, type_, error_); + GetSetDescriptionErrorMessage(CS_REMOTE, type_, error_); RTC_LOG(LS_ERROR) << error_message; error_.set_message(error_message); } @@ -967,8 +1049,8 @@ class SdpOfferAnswerHandler::RemoteDescriptionOperation { RTC_DCHECK(ok()); RTC_DCHECK(bundle_groups_by_mid_.empty()) << "Already called?"; bundle_groups_by_mid_ = GetBundleGroupsByMid(description()); - error_ = handler_->ValidateSessionDescription( - desc_.get(), cricket::CS_REMOTE, bundle_groups_by_mid_); + error_ = handler_->ValidateSessionDescription(desc_.get(), CS_REMOTE, + bundle_groups_by_mid_); return ok(); } @@ -1005,15 +1087,13 @@ class SdpOfferAnswerHandler::RemoteDescriptionOperation { const auto* remote_description = handler_->remote_description(); - const cricket::SessionDescription* session_desc = - remote_description->description(); + const SessionDescription* session_desc = remote_description->description(); // Transport and Media channels will be created only when offer is set. if (unified_plan_) { error_ = handler_->UpdateTransceiversAndDataChannels( - cricket::CS_REMOTE, *remote_description, - handler_->local_description(), old_remote_description(), - bundle_groups_by_mid_); + CS_REMOTE, *remote_description, handler_->local_description(), + old_remote_description(), bundle_groups_by_mid_); } else { // Media channels will be created only when offer is set. These may use // new transports just created by PushdownTransportDescription. @@ -1032,8 +1112,8 @@ class SdpOfferAnswerHandler::RemoteDescriptionOperation { bool UpdateSessionState() { RTC_DCHECK(ok()); error_ = handler_->UpdateSessionState( - type_, cricket::CS_REMOTE, - handler_->remote_description()->description(), bundle_groups_by_mid_); + type_, CS_REMOTE, handler_->remote_description()->description(), + bundle_groups_by_mid_); if (!ok()) SetAsSessionError(); return ok(); @@ -1051,7 +1131,7 @@ class SdpOfferAnswerHandler::RemoteDescriptionOperation { // Convenience getter for desc_->GetType(). SdpType type() const { return type_; } bool unified_plan() const { return unified_plan_; } - cricket::SessionDescription* description() { return desc_->description(); } + SessionDescription* description() { return desc_->description(); } const SessionDescriptionInterface* old_remote_description() const { RTC_DCHECK(!desc_) << "Called before replacing the remote description"; @@ -1065,8 +1145,8 @@ class SdpOfferAnswerHandler::RemoteDescriptionOperation { // Returns a reference to a cached map of bundle groups ordered by mid. // Note that this will only be valid after a successful call to // `IsDescriptionValid`. - const std::map& - bundle_groups_by_mid() const { + const std::map& bundle_groups_by_mid() + const { RTC_DCHECK(ok()); return bundle_groups_by_mid_; } @@ -1104,10 +1184,10 @@ class SdpOfferAnswerHandler::RemoteDescriptionOperation { // is taking place since methods that depend on `old_remote_description()` // for updating the state, need it. std::unique_ptr replaced_remote_description_; - rtc::scoped_refptr observer_; + scoped_refptr observer_; std::function operations_chain_callback_; RTCError error_ = RTCError::OK(); - std::map bundle_groups_by_mid_; + std::map bundle_groups_by_mid_; SdpType type_; const bool unified_plan_; }; @@ -1118,8 +1198,8 @@ class SdpOfferAnswerHandler::ImplicitCreateSessionDescriptionObserver : public CreateSessionDescriptionObserver { public: ImplicitCreateSessionDescriptionObserver( - rtc::WeakPtr sdp_handler, - rtc::scoped_refptr + WeakPtr sdp_handler, + scoped_refptr set_local_description_observer) : sdp_handler_(std::move(sdp_handler)), set_local_description_observer_( @@ -1164,8 +1244,8 @@ class SdpOfferAnswerHandler::ImplicitCreateSessionDescriptionObserver private: bool was_called_ = false; - rtc::WeakPtr sdp_handler_; - rtc::scoped_refptr + WeakPtr sdp_handler_; + scoped_refptr set_local_description_observer_; std::function operation_complete_callback_; }; @@ -1177,7 +1257,7 @@ class CreateSessionDescriptionObserverOperationWrapper : public CreateSessionDescriptionObserver { public: CreateSessionDescriptionObserverOperationWrapper( - rtc::scoped_refptr observer, + scoped_refptr observer, std::function operation_complete_callback) : observer_(std::move(observer)), operation_complete_callback_(std::move(operation_complete_callback)) { @@ -1213,10 +1293,35 @@ class CreateSessionDescriptionObserverOperationWrapper #if RTC_DCHECK_IS_ON bool was_called_ = false; #endif // RTC_DCHECK_IS_ON - rtc::scoped_refptr observer_; + scoped_refptr observer_; std::function operation_complete_callback_; }; +// Wraps a session description observer so a Clone of the last created +// offer/answer can be stored. +class CreateDescriptionObserverWrapperWithCreationCallback + : public CreateSessionDescriptionObserver { + public: + CreateDescriptionObserverWrapperWithCreationCallback( + std::function callback, + scoped_refptr observer) + : callback_(callback), observer_(observer) { + RTC_DCHECK(observer_); + } + void OnSuccess(SessionDescriptionInterface* desc) override { + callback_(desc); + observer_->OnSuccess(desc); + } + void OnFailure(RTCError error) override { + callback_(nullptr); + observer_->OnFailure(std::move(error)); + } + + private: + std::function callback_; + scoped_refptr observer_; +}; + // Wrapper for SetSessionDescriptionObserver that invokes the success or failure // callback in a posted message handled by the peer connection. This introduces // a delay that prevents recursive API calls by the observer, but this also @@ -1230,8 +1335,8 @@ class SdpOfferAnswerHandler::SetSessionDescriptionObserverAdapter public SetRemoteDescriptionObserverInterface { public: SetSessionDescriptionObserverAdapter( - rtc::WeakPtr handler, - rtc::scoped_refptr inner_observer) + WeakPtr handler, + scoped_refptr inner_observer) : handler_(std::move(handler)), inner_observer_(std::move(inner_observer)) {} @@ -1257,8 +1362,8 @@ class SdpOfferAnswerHandler::SetSessionDescriptionObserverAdapter } } - rtc::WeakPtr handler_; - rtc::scoped_refptr inner_observer_; + WeakPtr handler_; + scoped_refptr inner_observer_; }; class SdpOfferAnswerHandler::LocalIceCredentialsToReplace { @@ -1316,7 +1421,7 @@ SdpOfferAnswerHandler::SdpOfferAnswerHandler(PeerConnectionSdpMethods* pc, context_(context), local_streams_(StreamCollection::Create()), remote_streams_(StreamCollection::Create()), - operations_chain_(rtc::OperationsChain::Create()), + operations_chain_(OperationsChain::Create()), rtcp_cname_(GenerateRtcpCname()), local_ice_credentials_to_replace_(new LocalIceCredentialsToReplace()), weak_ptr_factory_(this) { @@ -1334,17 +1439,25 @@ SdpOfferAnswerHandler::~SdpOfferAnswerHandler() {} std::unique_ptr SdpOfferAnswerHandler::Create( PeerConnectionSdpMethods* pc, const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies& dependencies, - ConnectionContext* context) { + std::unique_ptr cert_generator, + std::unique_ptr + video_bitrate_allocator_factory, + ConnectionContext* context, + CodecLookupHelper* codec_lookup_helper) { auto handler = absl::WrapUnique(new SdpOfferAnswerHandler(pc, context)); - handler->Initialize(configuration, dependencies, context); + handler->Initialize(configuration, std::move(cert_generator), + std::move(video_bitrate_allocator_factory), context, + codec_lookup_helper); return handler; } void SdpOfferAnswerHandler::Initialize( const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies& dependencies, - ConnectionContext* context) { + std::unique_ptr cert_generator, + std::unique_ptr + video_bitrate_allocator_factory, + ConnectionContext* context, + CodecLookupHelper* codec_lookup_helper) { RTC_DCHECK_RUN_ON(signaling_thread()); // 100 kbps is used by default, but can be overriden by a non-standard // RTCConfiguration value (not available on Web). @@ -1361,7 +1474,7 @@ void SdpOfferAnswerHandler::Initialize( configuration.audio_jitter_buffer_min_delay_ms; // Obtain a certificate from RTCConfiguration if any were provided (optional). - rtc::scoped_refptr certificate; + scoped_refptr certificate; if (!configuration.certificates.empty()) { // TODO(hbos,torbjorng): Decide on certificate-selection strategy instead of // just picking the first one. The decision should be made based on the DTLS @@ -1372,33 +1485,34 @@ void SdpOfferAnswerHandler::Initialize( webrtc_session_desc_factory_ = std::make_unique( context, this, pc_->session_id(), pc_->dtls_enabled(), - std::move(dependencies.cert_generator), std::move(certificate), - [this](const rtc::scoped_refptr& certificate) { + std::move(cert_generator), std::move(certificate), + [this](const scoped_refptr& certificate) { RTC_DCHECK_RUN_ON(signaling_thread()); transport_controller_s()->SetLocalCertificate(certificate); }, - pc_->trials()); + codec_lookup_helper, pc_->trials()); if (pc_->options()->disable_encryption) { - webrtc_session_desc_factory_->SetSdesPolicy(cricket::SEC_DISABLED); + RTC_LOG(LS_INFO) + << "Disabling encryption. This should only be done in tests."; + webrtc_session_desc_factory_->SetInsecureForTesting(); } webrtc_session_desc_factory_->set_enable_encrypted_rtp_header_extensions( pc_->GetCryptoOptions().srtp.enable_encrypted_rtp_header_extensions); webrtc_session_desc_factory_->set_is_unified_plan(IsUnifiedPlan()); - if (dependencies.video_bitrate_allocator_factory) { - video_bitrate_allocator_factory_ = - std::move(dependencies.video_bitrate_allocator_factory); - } else { + video_bitrate_allocator_factory_ = std::move(video_bitrate_allocator_factory); + if (!video_bitrate_allocator_factory_) { video_bitrate_allocator_factory_ = CreateBuiltinVideoBitrateAllocatorFactory(); } + codec_lookup_helper_ = codec_lookup_helper; } // ================================================================== // Access to pc_ variables -cricket::MediaEngineInterface* SdpOfferAnswerHandler::media_engine() const { +MediaEngineInterface* SdpOfferAnswerHandler::media_engine() const { RTC_DCHECK(context_); return context_->media_engine(); } @@ -1437,10 +1551,10 @@ const DataChannelController* SdpOfferAnswerHandler::data_channel_controller() const { return pc_->data_channel_controller(); } -cricket::PortAllocator* SdpOfferAnswerHandler::port_allocator() { +PortAllocator* SdpOfferAnswerHandler::port_allocator() { return pc_->port_allocator(); } -const cricket::PortAllocator* SdpOfferAnswerHandler::port_allocator() const { +const PortAllocator* SdpOfferAnswerHandler::port_allocator() const { return pc_->port_allocator(); } RtpTransmissionManager* SdpOfferAnswerHandler::rtp_manager() { @@ -1468,11 +1582,11 @@ void SdpOfferAnswerHandler::RestartIce() { UpdateNegotiationNeeded(); } -rtc::Thread* SdpOfferAnswerHandler::signaling_thread() const { +Thread* SdpOfferAnswerHandler::signaling_thread() const { return context_->signaling_thread(); } -rtc::Thread* SdpOfferAnswerHandler::network_thread() const { +Thread* SdpOfferAnswerHandler::network_thread() const { return context_->network_thread(); } @@ -1486,7 +1600,7 @@ void SdpOfferAnswerHandler::CreateOffer( operations_chain_->ChainOperation( [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), observer_refptr = - rtc::scoped_refptr(observer), + scoped_refptr(observer), options](std::function operations_chain_callback) { // Abort early if `this_weak_ptr` is no longer valid. if (!this_weak_ptr) { @@ -1497,9 +1611,10 @@ void SdpOfferAnswerHandler::CreateOffer( return; } // The operation completes asynchronously when the wrapper is invoked. - auto observer_wrapper = rtc::make_ref_counted< - CreateSessionDescriptionObserverOperationWrapper>( - std::move(observer_refptr), std::move(operations_chain_callback)); + auto observer_wrapper = + make_ref_counted( + std::move(observer_refptr), + std::move(operations_chain_callback)); this_weak_ptr->DoCreateOffer(options, observer_wrapper); }); } @@ -1513,8 +1628,7 @@ void SdpOfferAnswerHandler::SetLocalDescription( // lambda will execute immediately. operations_chain_->ChainOperation( [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), - observer_refptr = - rtc::scoped_refptr(observer), + observer_refptr = scoped_refptr(observer), desc = std::unique_ptr(desc_ptr)]( std::function operations_chain_callback) mutable { // Abort early if `this_weak_ptr` is no longer valid. @@ -1529,7 +1643,7 @@ void SdpOfferAnswerHandler::SetLocalDescription( // `observer_refptr` is invoked in a posted message. this_weak_ptr->DoSetLocalDescription( std::move(desc), - rtc::make_ref_counted( + make_ref_counted( this_weak_ptr, observer_refptr)); // For backwards-compatability reasons, we declare the operation as // completed here (rather than in a post), so that the operation chain @@ -1542,7 +1656,7 @@ void SdpOfferAnswerHandler::SetLocalDescription( void SdpOfferAnswerHandler::SetLocalDescription( std::unique_ptr desc, - rtc::scoped_refptr observer) { + scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); // Chain this operation. If asynchronous operations are pending on the chain, // this operation will be queued to be invoked, otherwise the contents of the @@ -1570,19 +1684,18 @@ void SdpOfferAnswerHandler::SetLocalDescription( void SdpOfferAnswerHandler::SetLocalDescription( SetSessionDescriptionObserver* observer) { RTC_DCHECK_RUN_ON(signaling_thread()); - SetLocalDescription( - rtc::make_ref_counted( - weak_ptr_factory_.GetWeakPtr(), - rtc::scoped_refptr(observer))); + SetLocalDescription(make_ref_counted( + weak_ptr_factory_.GetWeakPtr(), + scoped_refptr(observer))); } void SdpOfferAnswerHandler::SetLocalDescription( - rtc::scoped_refptr observer) { + scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); // The `create_sdp_observer` handles performing DoSetLocalDescription() with // the resulting description as well as completing the operation. auto create_sdp_observer = - rtc::make_ref_counted( + make_ref_counted( weak_ptr_factory_.GetWeakPtr(), observer); // Chain this operation. If asynchronous operations are pending on the chain, // this operation will be queued to be invoked, otherwise the contents of the @@ -1633,8 +1746,7 @@ void SdpOfferAnswerHandler::SetLocalDescription( RTCError SdpOfferAnswerHandler::ApplyLocalDescription( std::unique_ptr desc, - const std::map& - bundle_groups_by_mid) { + const std::map& bundle_groups_by_mid) { TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::ApplyLocalDescription"); RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(desc); @@ -1681,14 +1793,14 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( } } - RTCError error = PushdownTransportDescription(cricket::CS_LOCAL, type); + RTCError error = PushdownTransportDescription(CS_LOCAL, type); if (!error.ok()) { return error; } if (IsUnifiedPlan()) { error = UpdateTransceiversAndDataChannels( - cricket::CS_LOCAL, *local_description(), old_local_description, + CS_LOCAL, *local_description(), old_local_description, remote_description(), bundle_groups_by_mid); if (!error.ok()) { RTC_LOG(LS_ERROR) << error.message() << " (" << SdpTypeToString(type) @@ -1696,8 +1808,8 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( return error; } if (ConfiguredForMedia()) { - std::vector> remove_list; - std::vector> removed_streams; + std::vector> remove_list; + std::vector> removed_streams; for (const auto& transceiver_ext : transceivers()->List()) { auto transceiver = transceiver_ext->internal(); if (transceiver->stopped()) { @@ -1767,8 +1879,7 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( RemoveUnusedChannels(local_description()->description()); } - error = UpdateSessionState(type, cricket::CS_LOCAL, - local_description()->description(), + error = UpdateSessionState(type, CS_LOCAL, local_description()->description(), bundle_groups_by_mid); if (!error.ok()) { RTC_LOG(LS_ERROR) << error.message() << " (" << SdpTypeToString(type) @@ -1798,7 +1909,7 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( if (!result.second) { LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_PARAMETER, - "Duplicate ssrc " + rtc::ToString(ssrc) + " is not allowed"); + "Duplicate ssrc " + absl::StrCat(ssrc) + " is not allowed"); } } } @@ -1819,7 +1930,7 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( if (!content) { continue; } - cricket::ChannelInterface* channel = transceiver->channel(); + ChannelInterface* channel = transceiver->channel(); if (content->rejected || !channel || channel->local_streams().empty()) { // 0 is a special value meaning "this sender has no associated send // stream". Need to call this so the sender won't attempt to configure @@ -1847,26 +1958,26 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( // Update state and SSRC of local MediaStreams and DataChannels based on the // local session description. - const cricket::ContentInfo* audio_content = + const ContentInfo* audio_content = GetFirstAudioContent(local_description()->description()); if (audio_content) { if (audio_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_AUDIO); + RemoveSenders(webrtc::MediaType::AUDIO); } else { - const cricket::AudioContentDescription* audio_desc = - audio_content->media_description()->as_audio(); + const MediaContentDescription* audio_desc = + audio_content->media_description(); UpdateLocalSenders(audio_desc->streams(), audio_desc->type()); } } - const cricket::ContentInfo* video_content = + const ContentInfo* video_content = GetFirstVideoContent(local_description()->description()); if (video_content) { if (video_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_VIDEO); + RemoveSenders(webrtc::MediaType::VIDEO); } else { - const cricket::VideoContentDescription* video_desc = - video_content->media_description()->as_video(); + const MediaContentDescription* video_desc = + video_content->media_description(); UpdateLocalSenders(video_desc->streams(), video_desc->type()); } } @@ -1897,8 +2008,7 @@ void SdpOfferAnswerHandler::SetRemoteDescription( // lambda will execute immediately. operations_chain_->ChainOperation( [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), - observer_refptr = - rtc::scoped_refptr(observer), + observer_refptr = scoped_refptr(observer), desc = std::unique_ptr(desc_ptr)]( std::function operations_chain_callback) mutable { // Abort early if `this_weak_ptr` is no longer valid. @@ -1914,7 +2024,7 @@ void SdpOfferAnswerHandler::SetRemoteDescription( this_weak_ptr->DoSetRemoteDescription( std::make_unique( this_weak_ptr.get(), std::move(desc), - rtc::make_ref_counted( + make_ref_counted( this_weak_ptr, observer_refptr), std::move(operations_chain_callback))); }); @@ -1922,7 +2032,7 @@ void SdpOfferAnswerHandler::SetRemoteDescription( void SdpOfferAnswerHandler::SetRemoteDescription( std::unique_ptr desc, - rtc::scoped_refptr observer) { + scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); // Chain this operation. If asynchronous operations are pending on the chain, // this operation will be queued to be invoked, otherwise the contents of the @@ -1972,11 +2082,13 @@ RTCError SdpOfferAnswerHandler::ReplaceRemoteDescription( // The session description to apply now must be accessed by // `remote_description()`. - const cricket::SessionDescription* session_desc = - remote_description()->description(); + const SessionDescription* session_desc = remote_description()->description(); + + const auto* local = local_description(); // NOTE: This will perform a BlockingCall() to the network thread. - return transport_controller_s()->SetRemoteDescription(sdp_type, session_desc); + return transport_controller_s()->SetRemoteDescription( + sdp_type, local ? local->description() : nullptr, session_desc); } void SdpOfferAnswerHandler::ApplyRemoteDescription( @@ -2005,7 +2117,7 @@ void SdpOfferAnswerHandler::ApplyRemoteDescription( return; if (operation->old_remote_description()) { - for (const cricket::ContentInfo& content : + for (const ContentInfo& content : operation->old_remote_description()->description()->contents()) { // Check if this new SessionDescription contains new ICE ufrag and // password that indicates the remote peer requests an ICE restart. @@ -2013,9 +2125,9 @@ void SdpOfferAnswerHandler::ApplyRemoteDescription( // remote description, this should reset pending_ice_restarts and compare // against the current description. if (CheckForRemoteIceRestart(operation->old_remote_description(), - remote_description(), content.name)) { + remote_description(), content.mid())) { if (operation->type() == SdpType::kOffer) { - pending_ice_restarts_.insert(content.name); + pending_ice_restarts_.insert(content.mid()); } } else { // We retain all received candidates only if ICE is not restarted. @@ -2026,7 +2138,7 @@ void SdpOfferAnswerHandler::ApplyRemoteDescription( // description plus any candidates added since then. We should remove // this once we're sure it won't break anything. WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription( - operation->old_remote_description(), content.name, + operation->old_remote_description(), content.mid(), mutable_remote_description()); } } @@ -2057,24 +2169,16 @@ void SdpOfferAnswerHandler::ApplyRemoteDescription( if (operation->unified_plan()) { ApplyRemoteDescriptionUpdateTransceiverState(operation->type()); } - - const cricket::AudioContentDescription* audio_desc = - GetFirstAudioContentDescription(remote_description()->description()); - const cricket::VideoContentDescription* video_desc = - GetFirstVideoContentDescription(remote_description()->description()); - - // Check if the descriptions include streams, just in case the peer supports - // MSID, but doesn't indicate so with "a=msid-semantic". - if (remote_description()->description()->msid_supported() || - (audio_desc && !audio_desc->streams().empty()) || - (video_desc && !video_desc->streams().empty())) { - remote_peer_supports_msid_ = true; - } + remote_peer_supports_msid_ = + remote_description()->description()->msid_signaling() != + kMsidSignalingNotUsed; if (!operation->unified_plan()) { PlanBUpdateSendersAndReceivers( - GetFirstAudioContent(remote_description()->description()), audio_desc, - GetFirstVideoContent(remote_description()->description()), video_desc); + GetFirstAudioContent(remote_description()->description()), + GetFirstAudioContentDescription(remote_description()->description()), + GetFirstVideoContent(remote_description()->description()), + GetFirstVideoContentDescription(remote_description()->description())); } if (operation->type() == SdpType::kAnswer) { @@ -2099,11 +2203,11 @@ void SdpOfferAnswerHandler::ApplyRemoteDescriptionUpdateTransceiverState( if (!ConfiguredForMedia()) { return; } - std::vector> + std::vector> now_receiving_transceivers; - std::vector> remove_list; - std::vector> added_streams; - std::vector> removed_streams; + std::vector> remove_list; + std::vector> added_streams; + std::vector> removed_streams; for (const auto& transceiver_ext : transceivers()->List()) { const auto transceiver = transceiver_ext->internal(); const ContentInfo* content = @@ -2132,7 +2236,7 @@ void SdpOfferAnswerHandler::ApplyRemoteDescriptionUpdateTransceiverState( stream_ids = media_desc->streams()[0].stream_ids(); } - RTC_LOG(LS_INFO) << "Processing the MSIDs for MID=" << content->name + RTC_LOG(LS_INFO) << "Processing the MSIDs for MID=" << content->mid() << " (" << GetStreamIdsString(stream_ids) << ")."; SetAssociatedRemoteStreams(transceiver->receiver_internal(), stream_ids, &added_streams, &removed_streams); @@ -2143,7 +2247,7 @@ void SdpOfferAnswerHandler::ApplyRemoteDescriptionUpdateTransceiverState( if (!transceiver->fired_direction() || !RtpTransceiverDirectionHasRecv(*transceiver->fired_direction())) { RTC_LOG(LS_INFO) << "Processing the addition of a remote track for MID=" - << content->name << "."; + << content->mid() << "."; // Since the transceiver is passed to the user in an // OnTrack event, we must use the proxied transceiver. now_receiving_transceivers.push_back(transceiver_ext); @@ -2187,7 +2291,7 @@ void SdpOfferAnswerHandler::ApplyRemoteDescriptionUpdateTransceiverState( // 2.2.8.1.12: If the media description is rejected, and transceiver is // not already stopped, stop the RTCRtpTransceiver transceiver. if (content->rejected && !transceiver->stopped()) { - RTC_LOG(LS_INFO) << "Stopping transceiver for MID=" << content->name + RTC_LOG(LS_INFO) << "Stopping transceiver for MID=" << content->mid() << " since the media section was rejected."; transceiver->StopTransceiverProcedure(); } @@ -2221,16 +2325,16 @@ void SdpOfferAnswerHandler::ApplyRemoteDescriptionUpdateTransceiverState( } void SdpOfferAnswerHandler::PlanBUpdateSendersAndReceivers( - const cricket::ContentInfo* audio_content, - const cricket::AudioContentDescription* audio_desc, - const cricket::ContentInfo* video_content, - const cricket::VideoContentDescription* video_desc) { + const ContentInfo* audio_content, + const AudioContentDescription* audio_desc, + const ContentInfo* video_content, + const VideoContentDescription* video_desc) { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(!IsUnifiedPlan()); // We wait to signal new streams until we finish processing the description, // since only at that point will new streams have all their tracks. - rtc::scoped_refptr new_streams(StreamCollection::Create()); + scoped_refptr new_streams(StreamCollection::Create()); // TODO(steveanton): When removing RTP senders/receivers in response to a // rejected media section, there is some cleanup logic that expects the @@ -2244,7 +2348,7 @@ void SdpOfferAnswerHandler::PlanBUpdateSendersAndReceivers( // and MediaStreams. if (audio_content) { if (audio_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_AUDIO); + RemoveSenders(webrtc::MediaType::AUDIO); } else { bool default_audio_track_needed = !remote_peer_supports_msid_ && @@ -2259,7 +2363,7 @@ void SdpOfferAnswerHandler::PlanBUpdateSendersAndReceivers( // and MediaStreams. if (video_content) { if (video_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_VIDEO); + RemoveSenders(webrtc::MediaType::VIDEO); } else { bool default_video_track_needed = !remote_peer_supports_msid_ && @@ -2275,15 +2379,42 @@ void SdpOfferAnswerHandler::PlanBUpdateSendersAndReceivers( for (size_t i = 0; i < new_streams->count(); ++i) { MediaStreamInterface* new_stream = new_streams->at(i); pc_->legacy_stats()->AddStream(new_stream); - observer->OnAddStream(rtc::scoped_refptr(new_stream)); + observer->OnAddStream(scoped_refptr(new_stream)); } UpdateEndedRemoteMediaStreams(); } +void SdpOfferAnswerHandler::ReportInitialSdpMunging(bool had_local_description, + SdpType type) { + // Report SDP munging of the initial call to setLocalDescription separately. + if (!had_local_description) { + switch (type) { + case SdpType::kOffer: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.PeerConnection.SdpMunging.Offer.Initial", + last_sdp_munging_type_, SdpMungingType::kMaxValue); + break; + case SdpType::kAnswer: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.PeerConnection.SdpMunging.Answer.Initial", + last_sdp_munging_type_, SdpMungingType::kMaxValue); + break; + case SdpType::kPrAnswer: + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.PeerConnection.SdpMunging.PrAnswer.Initial", + last_sdp_munging_type_, SdpMungingType::kMaxValue); + break; + case SdpType::kRollback: + // Rollback does not have SDP so can not be munged. + break; + } + } +} + void SdpOfferAnswerHandler::DoSetLocalDescription( std::unique_ptr desc, - rtc::scoped_refptr observer) { + scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoSetLocalDescription"); @@ -2320,21 +2451,74 @@ void SdpOfferAnswerHandler::DoSetLocalDescription( return; } - std::map bundle_groups_by_mid = + std::map bundle_groups_by_mid = GetBundleGroupsByMid(desc->description()); - RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_LOCAL, - bundle_groups_by_mid); + RTCError error = + ValidateSessionDescription(desc.get(), CS_LOCAL, bundle_groups_by_mid); if (!error.ok()) { - std::string error_message = GetSetDescriptionErrorMessage( - cricket::CS_LOCAL, desc->GetType(), error); + std::string error_message = + GetSetDescriptionErrorMessage(CS_LOCAL, desc->GetType(), error); RTC_LOG(LS_ERROR) << error_message; observer->OnSetLocalDescriptionComplete( - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); + RTCError(error.type(), std::move(error_message))); return; } - // Grab the description type before moving ownership to ApplyLocalDescription, - // which may destroy it before returning. + SessionDescriptionInterface* last_created_desc = + desc->GetType() == SdpType::kOffer ? last_created_offer_.get() + : last_created_answer_.get(); + + // Determine if SDP munging was done. This is not yet acted upon. + bool had_local_description = !!local_description(); + SdpMungingType sdp_munging_type = + DetermineSdpMungingType(desc.get(), last_created_desc); + + if (!disable_sdp_munging_checks_ && + HasUfragSdpMunging(desc.get(), last_created_desc)) { + has_sdp_munged_ufrag_ = true; + if (pc_->trials().IsEnabled("WebRTC-NoSdpMangleUfrag")) { + RTC_LOG(LS_ERROR) << "Rejecting SDP because of ufrag modification"; + observer->OnSetLocalDescriptionComplete( + RTCError(RTCErrorType::INVALID_MODIFICATION, + "SDP is modified in a non-acceptable way")); + last_sdp_munging_type_ = sdp_munging_type; + ReportInitialSdpMunging(had_local_description, desc->GetType()); + return; + } + } + + // Workaround for isses.webrtc.org/412904801 - detect if packetization:raw + // was added in SDP munging, and if so, push it back into the codec vendor. + std::vector> codecs_mangled_to_raw; + // Ignore if other mungings have been applied. + if (last_created_desc && + last_created_desc->description()->contents().size() == + desc->description()->contents().size()) { + for (size_t i = 0; i < desc->description()->contents().size(); ++i) { + auto last_created_codecs = last_created_desc->description() + ->contents()[i] + .media_description() + ->codecs(); + auto new_codecs = + desc->description()->contents()[i].media_description()->codecs(); + if (last_created_codecs.size() != new_codecs.size()) { + continue; // Ignore added codecs + } + for (size_t j = 0; j < new_codecs.size(); ++j) { + if (new_codecs[j].type == Codec::Type::kVideo && + new_codecs[j].name == last_created_codecs[j].name && + new_codecs[j].id == last_created_codecs[j].id && + !last_created_codecs[j].packetization && + new_codecs[j].packetization == kPacketizationParamRaw) { + codecs_mangled_to_raw.push_back( + std::pair(last_created_codecs[j], new_codecs[j])); + } + } + } + } + + // Grab the description type before moving ownership to + // ApplyLocalDescription, which may destroy it before returning. const SdpType type = desc->GetType(); error = ApplyLocalDescription(std::move(desc), bundle_groups_by_mid); @@ -2346,7 +2530,7 @@ void SdpOfferAnswerHandler::DoSetLocalDescription( // so that future calls to SetLocalDescription/SetRemoteDescription fail. SetSessionError(SessionError::kContent, error.message()); std::string error_message = - GetSetDescriptionErrorMessage(cricket::CS_LOCAL, type, error); + GetSetDescriptionErrorMessage(CS_LOCAL, type, error); RTC_LOG(LS_ERROR) << error_message; observer->OnSetLocalDescriptionComplete( RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); @@ -2354,6 +2538,13 @@ void SdpOfferAnswerHandler::DoSetLocalDescription( } RTC_DCHECK(local_description()); + // Workaround for issues.webrtc.org/412904801 part 2. + // If application was successful, we change the codec vendor's codec + // tables according to the mangle. + // Note that this depends on there being a single codec vendor for all MIDs. + codec_lookup_helper_->GetCodecVendor()->ModifyVideoCodecs( + codecs_mangled_to_raw); + if (local_description()->GetType() == SdpType::kAnswer) { RemoveStoppedTransceivers(); @@ -2363,12 +2554,21 @@ void SdpOfferAnswerHandler::DoSetLocalDescription( [this] { port_allocator()->DiscardCandidatePool(); }); } + // Clear last created offer/answer and update SDP munging type. + last_created_offer_.reset(nullptr); + last_created_answer_.reset(nullptr); + last_sdp_munging_type_ = sdp_munging_type; + + // Report SDP munging of the initial call to setLocalDescription separately. + ReportInitialSdpMunging(had_local_description, + local_description()->GetType()); + observer->OnSetLocalDescriptionComplete(RTCError::OK()); pc_->NoteUsageEvent(UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED); // Check if negotiation is needed. We must do this after informing the - // observer that SetLocalDescription() has completed to ensure negotiation is - // not needed prior to the promise resolving. + // observer that SetLocalDescription() has completed to ensure negotiation + // is not needed prior to the promise resolving. if (IsUnifiedPlan()) { bool was_negotiation_needed = is_negotiation_needed_; UpdateNegotiationNeeded(); @@ -2381,15 +2581,15 @@ void SdpOfferAnswerHandler::DoSetLocalDescription( } } - // MaybeStartGathering needs to be called after informing the observer so that - // we don't signal any candidates before signaling that SetLocalDescription - // completed. + // MaybeStartGathering needs to be called after informing the observer so + // that we don't signal any candidates before signaling that + // SetLocalDescription completed. transport_controller_s()->MaybeStartGathering(); } void SdpOfferAnswerHandler::DoCreateOffer( const PeerConnectionInterface::RTCOfferAnswerOptions& options, - rtc::scoped_refptr observer) { + scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoCreateOffer"); @@ -2438,9 +2638,20 @@ void SdpOfferAnswerHandler::DoCreateOffer( } } - cricket::MediaSessionOptions session_options; + MediaSessionOptions session_options; GetOptionsForOffer(options, &session_options); - webrtc_session_desc_factory_->CreateOffer(observer.get(), options, + auto observer_wrapper = + make_ref_counted( + [this](const SessionDescriptionInterface* desc) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (desc) { + last_created_offer_ = desc->Clone(); + } else { + last_created_offer_.reset(nullptr); + } + }, + std::move(observer)); + webrtc_session_desc_factory_->CreateOffer(observer_wrapper.get(), options, session_options); } @@ -2455,7 +2666,7 @@ void SdpOfferAnswerHandler::CreateAnswer( operations_chain_->ChainOperation( [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), observer_refptr = - rtc::scoped_refptr(observer), + scoped_refptr(observer), options](std::function operations_chain_callback) { // Abort early if `this_weak_ptr` is no longer valid. if (!this_weak_ptr) { @@ -2466,16 +2677,17 @@ void SdpOfferAnswerHandler::CreateAnswer( return; } // The operation completes asynchronously when the wrapper is invoked. - auto observer_wrapper = rtc::make_ref_counted< - CreateSessionDescriptionObserverOperationWrapper>( - std::move(observer_refptr), std::move(operations_chain_callback)); + auto observer_wrapper = + make_ref_counted( + std::move(observer_refptr), + std::move(operations_chain_callback)); this_weak_ptr->DoCreateAnswer(options, observer_wrapper); }); } void SdpOfferAnswerHandler::DoCreateAnswer( const PeerConnectionInterface::RTCOfferAnswerOptions& options, - rtc::scoped_refptr observer) { + scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoCreateAnswer"); if (!observer) { @@ -2524,9 +2736,21 @@ void SdpOfferAnswerHandler::DoCreateAnswer( } } - cricket::MediaSessionOptions session_options; + MediaSessionOptions session_options; GetOptionsForAnswer(options, &session_options); - webrtc_session_desc_factory_->CreateAnswer(observer.get(), session_options); + auto observer_wrapper = + make_ref_counted( + [this](const SessionDescriptionInterface* desc) { + RTC_DCHECK_RUN_ON(signaling_thread()); + if (desc) { + last_created_answer_ = desc->Clone(); + } else { + last_created_answer_.reset(nullptr); + } + }, + std::move(observer)); + webrtc_session_desc_factory_->CreateAnswer(observer_wrapper.get(), + session_options); } void SdpOfferAnswerHandler::DoSetRemoteDescription( @@ -2584,17 +2808,17 @@ void SdpOfferAnswerHandler::SetRemoteDescriptionPostProcess(bool was_answer) { } void SdpOfferAnswerHandler::SetAssociatedRemoteStreams( - rtc::scoped_refptr receiver, + scoped_refptr receiver, const std::vector& stream_ids, - std::vector>* added_streams, - std::vector>* removed_streams) { + std::vector>* added_streams, + std::vector>* removed_streams) { RTC_DCHECK_RUN_ON(signaling_thread()); - std::vector> media_streams; + std::vector> media_streams; for (const std::string& stream_id : stream_ids) { - rtc::scoped_refptr stream( + scoped_refptr stream( remote_streams_->find(stream_id)); if (!stream) { - stream = MediaStreamProxy::Create(rtc::Thread::Current(), + stream = MediaStreamProxy::Create(Thread::Current(), MediaStream::Create(stream_id)); remote_streams_->AddStream(stream); added_streams->push_back(stream); @@ -2604,15 +2828,15 @@ void SdpOfferAnswerHandler::SetAssociatedRemoteStreams( // Special case: "a=msid" missing, use random stream ID. if (media_streams.empty() && !(remote_description()->description()->msid_signaling() & - cricket::kMsidSignalingMediaSection)) { + kMsidSignalingMediaSection)) { if (!missing_msid_default_stream_) { missing_msid_default_stream_ = MediaStreamProxy::Create( - rtc::Thread::Current(), MediaStream::Create(rtc::CreateRandomUuid())); + Thread::Current(), MediaStream::Create(CreateRandomUuid())); added_streams->push_back(missing_msid_default_stream_); } media_streams.push_back(missing_msid_default_stream_); } - std::vector> previous_streams = + std::vector> previous_streams = receiver->streams(); // SetStreams() will add/remove the receiver's track to/from the streams. // This differs from the spec - the spec uses an "addList" and "removeList" @@ -2739,7 +2963,7 @@ void SdpOfferAnswerHandler::AddIceCandidate( } bool SdpOfferAnswerHandler::RemoveIceCandidates( - const std::vector& candidates) { + const std::vector& candidates) { TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::RemoveIceCandidates"); RTC_DCHECK_RUN_ON(signaling_thread()); if (pc_->IsClosed()) { @@ -2786,7 +3010,7 @@ void SdpOfferAnswerHandler::AddLocalIceCandidate( } void SdpOfferAnswerHandler::RemoveLocalIceCandidates( - const std::vector& candidates) { + const std::vector& candidates) { RTC_DCHECK_RUN_ON(signaling_thread()); if (local_description()) { mutable_local_description()->RemoveCandidates(candidates); @@ -2854,10 +3078,9 @@ void SdpOfferAnswerHandler::ChangeSignalingState( RTCError SdpOfferAnswerHandler::UpdateSessionState( SdpType type, - cricket::ContentSource source, - const cricket::SessionDescription* description, - const std::map& - bundle_groups_by_mid) { + ContentSource source, + const SessionDescription* description, + const std::map& bundle_groups_by_mid) { RTC_DCHECK_RUN_ON(signaling_thread()); // If there's already a pending error then no state transition should @@ -2872,11 +3095,11 @@ RTCError SdpOfferAnswerHandler::UpdateSessionState( // Update the signaling state according to the specified state machine (see // https://w3c.github.io/webrtc-pc/#rtcsignalingstate-enum). if (type == SdpType::kOffer) { - ChangeSignalingState(source == cricket::CS_LOCAL + ChangeSignalingState(source == CS_LOCAL ? PeerConnectionInterface::kHaveLocalOffer : PeerConnectionInterface::kHaveRemoteOffer); } else if (type == SdpType::kPrAnswer) { - ChangeSignalingState(source == cricket::CS_LOCAL + ChangeSignalingState(source == CS_LOCAL ? PeerConnectionInterface::kHaveLocalPrAnswer : PeerConnectionInterface::kHaveRemotePrAnswer); } else { @@ -2927,7 +3150,7 @@ bool SdpOfferAnswerHandler::ShouldFireNegotiationNeededEvent( return true; } -rtc::scoped_refptr +scoped_refptr SdpOfferAnswerHandler::local_streams() { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_CHECK(!IsUnifiedPlan()) << "local_streams is not available with Unified " @@ -2936,7 +3159,7 @@ SdpOfferAnswerHandler::local_streams() { return local_streams_; } -rtc::scoped_refptr +scoped_refptr SdpOfferAnswerHandler::remote_streams() { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_CHECK(!IsUnifiedPlan()) << "remote_streams is not available with Unified " @@ -2956,8 +3179,7 @@ bool SdpOfferAnswerHandler::AddStream(MediaStreamInterface* local_stream) { return false; } - local_streams_->AddStream( - rtc::scoped_refptr(local_stream)); + local_streams_->AddStream(scoped_refptr(local_stream)); auto observer = std::make_unique( local_stream, [this](AudioTrackInterface* audio_track, @@ -3066,50 +3288,52 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { state != PeerConnectionInterface::kHaveRemoteOffer) { LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_STATE, - (rtc::StringBuilder("Called in wrong signalingState: ") + (StringBuilder("Called in wrong signalingState: ") << (PeerConnectionInterface::AsString(signaling_state()))) .Release()); } RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(IsUnifiedPlan()); - std::vector> + std::vector> now_receiving_transceivers; - std::vector> all_added_streams; - std::vector> all_removed_streams; - std::vector> removed_receivers; + std::vector> all_added_streams; + std::vector> all_removed_streams; + std::vector> removed_receivers; for (auto&& transceivers_stable_state_pair : transceivers()->StableStates()) { auto transceiver = transceivers_stable_state_pair.first; - auto state = transceivers_stable_state_pair.second; + auto stable_state = transceivers_stable_state_pair.second; - if (state.did_set_fired_direction()) { + if (stable_state.did_set_fired_direction()) { // If this rollback triggers going from not receiving to receving again, // we need to fire "ontrack". bool previously_fired_direction_is_recv = transceiver->fired_direction().has_value() && RtpTransceiverDirectionHasRecv(*transceiver->fired_direction()); bool currently_fired_direction_is_recv = - state.fired_direction().has_value() && - RtpTransceiverDirectionHasRecv(state.fired_direction().value()); + stable_state.fired_direction().has_value() && + RtpTransceiverDirectionHasRecv( + stable_state.fired_direction().value()); if (!previously_fired_direction_is_recv && currently_fired_direction_is_recv) { now_receiving_transceivers.push_back(transceiver); } - transceiver->internal()->set_fired_direction(state.fired_direction()); + transceiver->internal()->set_fired_direction( + stable_state.fired_direction()); } - if (state.remote_stream_ids()) { - std::vector> added_streams; - std::vector> removed_streams; + if (stable_state.remote_stream_ids()) { + std::vector> added_streams; + std::vector> removed_streams; SetAssociatedRemoteStreams(transceiver->internal()->receiver_internal(), - state.remote_stream_ids().value(), + stable_state.remote_stream_ids().value(), &added_streams, &removed_streams); all_added_streams.insert(all_added_streams.end(), added_streams.begin(), added_streams.end()); all_removed_streams.insert(all_removed_streams.end(), removed_streams.begin(), removed_streams.end()); - if (!state.has_m_section() && !state.newly_created()) { + if (!stable_state.has_m_section() && !stable_state.newly_created()) { continue; } } @@ -3125,7 +3349,7 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { transceiver->receiver()) { removed_receivers.push_back(transceiver->receiver()); } - if (state.newly_created()) { + if (stable_state.newly_created()) { if (transceiver->internal()->reused_for_addtrack()) { transceiver->internal()->set_created_by_addtrack(true); } else { @@ -3133,15 +3357,15 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { transceivers()->Remove(transceiver); } } - if (state.init_send_encodings()) { + if (stable_state.init_send_encodings()) { transceiver->internal()->sender_internal()->set_init_send_encodings( - state.init_send_encodings().value()); + stable_state.init_send_encodings().value()); } transceiver->internal()->sender_internal()->set_transport(nullptr); transceiver->internal()->receiver_internal()->set_transport(nullptr); - if (state.has_m_section()) { - transceiver->internal()->set_mid(state.mid()); - transceiver->internal()->set_mline_index(state.mline_index()); + if (stable_state.has_m_section()) { + transceiver->internal()->set_mid(stable_state.mid()); + transceiver->internal()->set_mline_index(stable_state.mline_index()); } } RTCError e = transport_controller_s()->RollbackTransports(); @@ -3200,7 +3424,7 @@ void SdpOfferAnswerHandler::OnOperationsChainEmpty() { } } -absl::optional SdpOfferAnswerHandler::is_caller() const { +std::optional SdpOfferAnswerHandler::is_caller() const { RTC_DCHECK_RUN_ON(signaling_thread()); return is_caller_; } @@ -3222,7 +3446,7 @@ bool SdpOfferAnswerHandler::NeedsIceRestart( return pc_->NeedsIceRestart(content_name); } -absl::optional SdpOfferAnswerHandler::GetDtlsRole( +std::optional SdpOfferAnswerHandler::GetDtlsRole( const std::string& mid) const { RTC_DCHECK_RUN_ON(signaling_thread()); return transport_controller_s()->GetDtlsRole(mid); @@ -3300,11 +3524,11 @@ void SdpOfferAnswerHandler::AllocateSctpSids() { return; } - absl::optional guessed_role = GuessSslRole(); + std::optional guessed_role = GuessSslRole(); network_thread()->BlockingCall( [&, data_channel_controller = data_channel_controller()] { RTC_DCHECK_RUN_ON(network_thread()); - absl::optional role = pc_->GetSctpSslRole_n(); + std::optional role = pc_->GetSctpSslRole_n(); if (!role) role = guessed_role; if (role) @@ -3312,10 +3536,10 @@ void SdpOfferAnswerHandler::AllocateSctpSids() { }); } -absl::optional SdpOfferAnswerHandler::GuessSslRole() const { +std::optional SdpOfferAnswerHandler::GuessSslRole() const { RTC_DCHECK_RUN_ON(signaling_thread()); if (!pc_->sctp_mid()) - return absl::nullopt; + return std::nullopt; // TODO(bugs.webrtc.org/13668): This guesswork is guessing wrong (returning // SSL_CLIENT = ACTIVE) if remote offer has role ACTIVE, but we'll be able @@ -3352,7 +3576,7 @@ absl::optional SdpOfferAnswerHandler::GuessSslRole() const { // * "Guessing" should always be correct if we get an SCTP session and are not // the offerer. - return is_caller() ? rtc::SSL_SERVER : rtc::SSL_CLIENT; + return is_caller() ? SSL_SERVER : SSL_CLIENT; } bool SdpOfferAnswerHandler::CheckIfNegotiationIsNeeded() { @@ -3374,16 +3598,16 @@ bool SdpOfferAnswerHandler::CheckIfNegotiationIsNeeded() { // 4. If connection has created any RTCDataChannels, and no m= section in // description has been negotiated yet for data, return true. if (data_channel_controller()->HasUsedDataChannels()) { - const cricket::ContentInfo* data_content = - cricket::GetFirstDataContent(description->description()->contents()); + const ContentInfo* data_content = + GetFirstDataContent(description->description()->contents()); if (!data_content) { return true; } // The remote end might have rejected the data content. - const cricket::ContentInfo* remote_data_content = + const ContentInfo* remote_data_content = current_remote_description() ? current_remote_description()->description()->GetContentByName( - data_content->name) + data_content->mid()) : nullptr; if (remote_data_content && remote_data_content->rejected) { return true; @@ -3516,9 +3740,8 @@ void SdpOfferAnswerHandler::GenerateNegotiationNeededEvent() { RTCError SdpOfferAnswerHandler::ValidateSessionDescription( const SessionDescriptionInterface* sdesc, - cricket::ContentSource source, - const std::map& - bundle_groups_by_mid) { + ContentSource source, + const std::map& bundle_groups_by_mid) { // An assumption is that a check for session error is done at a higher level. RTC_DCHECK_EQ(SessionError::kNone, session_error()); @@ -3527,11 +3750,11 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( } SdpType type = sdesc->GetType(); - if ((source == cricket::CS_LOCAL && !ExpectSetLocalDescription(type)) || - (source == cricket::CS_REMOTE && !ExpectSetRemoteDescription(type))) { + if ((source == CS_LOCAL && !ExpectSetLocalDescription(type)) || + (source == CS_REMOTE && !ExpectSetRemoteDescription(type))) { LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_STATE, - (rtc::StringBuilder("Called in wrong state: ") + (StringBuilder("Called in wrong state: ") << PeerConnectionInterface::AsString(signaling_state())) .Release()); } @@ -3542,9 +3765,7 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( } // Verify crypto settings. - std::string crypto_error; - if (webrtc_session_desc_factory_->SdesPolicy() == cricket::SEC_REQUIRED || - pc_->dtls_enabled()) { + if (pc_->dtls_enabled()) { RTCError crypto_error = VerifyCrypto( sdesc->description(), pc_->dtls_enabled(), bundle_groups_by_mid); if (!crypto_error.ok()) { @@ -3566,18 +3787,13 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( // Validate that there are no collisions of bundled header extensions ids. error = ValidateBundledRtpHeaderExtensions(*sdesc->description()); - RTC_HISTOGRAM_BOOLEAN("WebRTC.PeerConnection.ValidBundledExtensionIds", - error.ok()); - // TODO(bugs.webrtc.org/14782): remove killswitch after rollout. - if (!error.ok() && !pc_->trials().IsDisabled( - "WebRTC-PreventBundleHeaderExtensionIdCollision")) { + if (!error.ok()) { return error; } - // TODO(crbug.com/1459124): remove killswitch after rollout. + // Validate the SSRC groups. error = ValidateSsrcGroups(*sdesc->description()); - if (!error.ok() && - !pc_->trials().IsDisabled("WebRTC-PreventSsrcGroupsWithUnexpectedSize")) { + if (!error.ok()) { return error; } @@ -3587,6 +3803,11 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( kBundleWithoutRtcpMux); } + error = ValidatePayloadTypes(*sdesc->description()); + if (!error.ok()) { + return error; + } + // TODO(skvlad): When the local rtcp-mux policy is Require, reject any // m-lines that do not rtcp-mux enabled. @@ -3594,9 +3815,9 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { // With an answer we want to compare the new answer session description // with the offer's session description from the current negotiation. - const cricket::SessionDescription* offer_desc = - (source == cricket::CS_LOCAL) ? remote_description()->description() - : local_description()->description(); + const SessionDescription* offer_desc = + (source == CS_LOCAL) ? remote_description()->description() + : local_description()->description(); if (!MediaSectionsHaveSameCount(*offer_desc, *sdesc->description()) || !MediaSectionsInSameOrder(*offer_desc, nullptr, *sdesc->description(), type)) { @@ -3612,8 +3833,8 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( // them has a 0 port for a media section, but the other does not. This is // important to check against in the case that we are recycling an m= // section. - const cricket::SessionDescription* current_desc = nullptr; - const cricket::SessionDescription* secondary_current_desc = nullptr; + const SessionDescription* current_desc = nullptr; + const SessionDescription* secondary_current_desc = nullptr; if (local_description()) { current_desc = local_description()->description(); if (remote_description()) { @@ -3638,8 +3859,8 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( // media section. for (const ContentInfo& content : sdesc->description()->contents()) { const MediaContentDescription& desc = *content.media_description(); - if ((desc.type() == cricket::MEDIA_TYPE_AUDIO || - desc.type() == cricket::MEDIA_TYPE_VIDEO) && + if ((desc.type() == webrtc::MediaType::AUDIO || + desc.type() == webrtc::MediaType::VIDEO) && desc.streams().size() > 1u) { LOG_AND_RETURN_ERROR( RTCErrorType::INVALID_PARAMETER, @@ -3659,12 +3880,11 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( } RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( - cricket::ContentSource source, + ContentSource source, const SessionDescriptionInterface& new_session, const SessionDescriptionInterface* old_local_description, const SessionDescriptionInterface* old_remote_description, - const std::map& - bundle_groups_by_mid) { + const std::map& bundle_groups_by_mid) { TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels"); RTC_DCHECK_RUN_ON(signaling_thread()); @@ -3686,21 +3906,21 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( const ContentInfos& new_contents = new_session.description()->contents(); for (size_t i = 0; i < new_contents.size(); ++i) { - const cricket::ContentInfo& new_content = new_contents[i]; - cricket::MediaType media_type = new_content.media_description()->type(); - mid_generator_.AddKnownId(new_content.name); - auto it = bundle_groups_by_mid.find(new_content.name); - const cricket::ContentGroup* bundle_group = + const ContentInfo& new_content = new_contents[i]; + webrtc::MediaType media_type = new_content.media_description()->type(); + mid_generator_.AddKnownId(new_content.mid()); + auto it = bundle_groups_by_mid.find(new_content.mid()); + const ContentGroup* bundle_group = it != bundle_groups_by_mid.end() ? it->second : nullptr; - if (media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO) { - const cricket::ContentInfo* old_local_content = nullptr; + if (media_type == webrtc::MediaType::AUDIO || + media_type == webrtc::MediaType::VIDEO) { + const ContentInfo* old_local_content = nullptr; if (old_local_description && i < old_local_description->description()->contents().size()) { old_local_content = &old_local_description->description()->contents()[i]; } - const cricket::ContentInfo* old_remote_content = nullptr; + const ContentInfo* old_remote_content = nullptr; if (old_remote_description && i < old_remote_description->description()->contents().size()) { old_remote_content = @@ -3724,7 +3944,7 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( // Handle locally rejected content. This code path is only needed for apps // that SDP munge. Remote rejected content is handled in // ApplyRemoteDescriptionUpdateTransceiverState(). - if (source == cricket::ContentSource::CS_LOCAL && new_content.rejected) { + if (source == ContentSource::CS_LOCAL && new_content.rejected) { // Local offer. if (new_session.GetType() == SdpType::kOffer) { // If the RtpTransceiver API was used, it would already have made the @@ -3754,18 +3974,20 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( if (!error.ok()) { return error; } - } else if (media_type == cricket::MEDIA_TYPE_DATA) { - if (pc_->GetDataMid() && new_content.name != *(pc_->GetDataMid())) { + } else if (media_type == webrtc::MediaType::DATA) { + const auto data_mid = pc_->sctp_mid(); + if (data_mid && new_content.mid() != data_mid.value()) { // Ignore all but the first data section. RTC_LOG(LS_INFO) << "Ignoring data media section with MID=" - << new_content.name; + << new_content.mid(); continue; } - RTCError error = UpdateDataChannel(source, new_content, bundle_group); + RTCError error = + UpdateDataChannelTransport(source, new_content, bundle_group); if (!error.ok()) { return error; } - } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { + } else if (media_type == webrtc::MediaType::UNSUPPORTED) { RTC_LOG(LS_INFO) << "Ignoring unsupported media type"; } else { LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, @@ -3776,9 +3998,9 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( return RTCError::OK(); } -RTCErrorOr>> +RTCErrorOr>> SdpOfferAnswerHandler::AssociateTransceiver( - cricket::ContentSource source, + ContentSource source, SdpType type, size_t mline_index, const ContentInfo& content, @@ -3793,10 +4015,9 @@ SdpOfferAnswerHandler::AssociateTransceiver( // should have been removed by RemoveStoppedtransceivers()-> if (IsMediaSectionBeingRecycled(type, content, old_local_content, old_remote_content)) { - const std::string& old_mid = - (old_local_content && old_local_content->rejected) - ? old_local_content->name - : old_remote_content->name; + const auto old_mid = (old_local_content && old_local_content->rejected) + ? old_local_content->mid() + : old_remote_content->mid(); auto old_transceiver = transceivers()->FindByMid(old_mid); // The transceiver should be disassociated in RemoveStoppedTransceivers() RTC_DCHECK(!old_transceiver); @@ -3804,8 +4025,8 @@ SdpOfferAnswerHandler::AssociateTransceiver( #endif const MediaContentDescription* media_desc = content.media_description(); - auto transceiver = transceivers()->FindByMid(content.name); - if (source == cricket::CS_LOCAL) { + auto transceiver = transceivers()->FindByMid(content.mid()); + if (source == CS_LOCAL) { // Find the RtpTransceiver that corresponds to this m= section, using the // mapping between transceivers and m= section indices established when // creating the offer. @@ -3818,7 +4039,7 @@ SdpOfferAnswerHandler::AssociateTransceiver( "Transceiver not found based on m-line index"); } } else { - RTC_DCHECK_EQ(source, cricket::CS_REMOTE); + RTC_DCHECK_EQ(source, CS_REMOTE); // If the m= section is sendrecv or recvonly, and there are RtpTransceivers // of the same type... // When simulcast is requested, a transceiver cannot be associated because @@ -3832,11 +4053,11 @@ SdpOfferAnswerHandler::AssociateTransceiver( // recvonly direction. if (!transceiver) { RTC_LOG(LS_INFO) << "Adding " - << cricket::MediaTypeToString(media_desc->type()) - << " transceiver for MID=" << content.name + << webrtc::MediaTypeToString(media_desc->type()) + << " transceiver for MID=" << content.mid() << " at i=" << mline_index << " in response to the remote description."; - std::string sender_id = rtc::CreateRandomUuid(); + std::string sender_id = CreateRandomUuid(); std::vector send_encodings = GetSendEncodingsFromRemoteDescription(*media_desc); auto sender = rtp_manager()->CreateSender(media_desc->type(), sender_id, @@ -3845,7 +4066,7 @@ SdpOfferAnswerHandler::AssociateTransceiver( if (!media_desc->streams().empty()) { receiver_id = media_desc->streams()[0].id; } else { - receiver_id = rtc::CreateRandomUuid(); + receiver_id = CreateRandomUuid(); } auto receiver = rtp_manager()->CreateReceiver(media_desc->type(), receiver_id); @@ -3881,7 +4102,7 @@ SdpOfferAnswerHandler::AssociateTransceiver( if (media_desc->HasSimulcast()) { std::vector layers = - source == cricket::CS_LOCAL + source == CS_LOCAL ? media_desc->simulcast_description().send_layers().GetAllLayers() : media_desc->simulcast_description() .receive_layers() @@ -3894,7 +4115,7 @@ SdpOfferAnswerHandler::AssociateTransceiver( } } if (type == SdpType::kOffer) { - bool state_changes = transceiver->internal()->mid() != content.name || + bool state_changes = transceiver->internal()->mid() != content.mid() || transceiver->internal()->mline_index() != mline_index; if (state_changes) { transceivers() @@ -3907,20 +4128,19 @@ SdpOfferAnswerHandler::AssociateTransceiver( // setting the value of the RtpTransceiver's mid property to the MID of the m= // section, and establish a mapping between the transceiver and the index of // the m= section. - transceiver->internal()->set_mid(content.name); + transceiver->internal()->set_mid(content.mid()); transceiver->internal()->set_mline_index(mline_index); return std::move(transceiver); } RTCError SdpOfferAnswerHandler::UpdateTransceiverChannel( - rtc::scoped_refptr> - transceiver, - const cricket::ContentInfo& content, - const cricket::ContentGroup* bundle_group) { + scoped_refptr> transceiver, + const ContentInfo& content, + const ContentGroup* bundle_group) { TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::UpdateTransceiverChannel"); RTC_DCHECK(IsUnifiedPlan()); RTC_DCHECK(transceiver); - cricket::ChannelInterface* channel = transceiver->internal()->channel(); + ChannelInterface* channel = transceiver->internal()->channel(); if (content.rejected) { if (channel) { transceiver->internal()->ClearChannel(); @@ -3928,7 +4148,7 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiverChannel( } else { if (!channel) { auto error = transceiver->internal()->CreateChannel( - content.name, pc_->call_ptr(), pc_->configuration()->media_config, + content.mid(), pc_->call_ptr(), pc_->configuration()->media_config, pc_->SrtpRequired(), pc_->GetCryptoOptions(), audio_options(), video_options(), video_bitrate_allocator_factory_.get(), [&](absl::string_view mid) { @@ -3943,20 +4163,20 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiverChannel( return RTCError::OK(); } -RTCError SdpOfferAnswerHandler::UpdateDataChannel( - cricket::ContentSource source, - const cricket::ContentInfo& content, - const cricket::ContentGroup* bundle_group) { +RTCError SdpOfferAnswerHandler::UpdateDataChannelTransport( + ContentSource source, + const ContentInfo& content, + const ContentGroup* bundle_group) { if (content.rejected) { RTC_LOG(LS_INFO) << "Rejected data channel transport with mid=" << content.mid(); - rtc::StringBuilder sb; + StringBuilder sb; sb << "Rejected data channel transport with mid=" << content.mid(); RTCError error(RTCErrorType::OPERATION_ERROR_WITH_DATA, sb.Release()); error.set_error_detail(RTCErrorDetailType::DATA_CHANNEL_FAILURE); - DestroyDataChannelTransport(error); - } else if (!CreateDataChannel(content.name)) { + pc_->DestroyDataChannelTransport(error); + } else if (!pc_->CreateDataChannelTransport(content.mid())) { LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, "Failed to create data channel."); } @@ -3988,29 +4208,29 @@ bool SdpOfferAnswerHandler::ExpectSetRemoteDescription(SdpType type) { } void SdpOfferAnswerHandler::FillInMissingRemoteMids( - cricket::SessionDescription* new_remote_description) { + SessionDescription* new_remote_description) { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(new_remote_description); - const cricket::ContentInfos no_infos; - const cricket::ContentInfos& local_contents = + const ContentInfos no_infos; + const ContentInfos& local_contents = (local_description() ? local_description()->description()->contents() : no_infos); - const cricket::ContentInfos& remote_contents = + const ContentInfos& remote_contents = (remote_description() ? remote_description()->description()->contents() : no_infos); for (size_t i = 0; i < new_remote_description->contents().size(); ++i) { - cricket::ContentInfo& content = new_remote_description->contents()[i]; - if (!content.name.empty()) { + ContentInfo& content = new_remote_description->contents()[i]; + if (!content.mid().empty()) { continue; } std::string new_mid; absl::string_view source_explanation; if (IsUnifiedPlan()) { if (i < local_contents.size()) { - new_mid = local_contents[i].name; + new_mid = local_contents[i].mid(); source_explanation = "from the matching local media section"; } else if (i < remote_contents.size()) { - new_mid = remote_contents[i].name; + new_mid = remote_contents[i].mid(); source_explanation = "from the matching previous remote media section"; } else { new_mid = mid_generator_.GenerateString(); @@ -4022,7 +4242,7 @@ void SdpOfferAnswerHandler::FillInMissingRemoteMids( source_explanation = "to match pre-existing behavior"; } RTC_DCHECK(!new_mid.empty()); - content.name = new_mid; + content.set_mid(new_mid); new_remote_description->transport_infos()[i].content_name = new_mid; RTC_LOG(LS_INFO) << "SetRemoteDescription: Remote media section at i=" << i << " is missing an a=mid line. Filling in the value '" @@ -4030,9 +4250,9 @@ void SdpOfferAnswerHandler::FillInMissingRemoteMids( } } -rtc::scoped_refptr> +scoped_refptr> SdpOfferAnswerHandler::FindAvailableTransceiverToReceive( - cricket::MediaType media_type) const { + webrtc::MediaType media_type) const { RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(IsUnifiedPlan()); // From JSEP section 5.10 (Applying a Remote Description): @@ -4050,8 +4270,7 @@ SdpOfferAnswerHandler::FindAvailableTransceiverToReceive( return nullptr; } -const cricket::ContentInfo* -SdpOfferAnswerHandler::FindMediaSectionForTransceiver( +const ContentInfo* SdpOfferAnswerHandler::FindMediaSectionForTransceiver( const RtpTransceiver* transceiver, const SessionDescriptionInterface* sdesc) const { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -4066,14 +4285,14 @@ SdpOfferAnswerHandler::FindMediaSectionForTransceiver( } else { // Plan B only allows at most one audio and one video section, so use the // first media section of that type. - return cricket::GetFirstMediaContent(sdesc->description()->contents(), - transceiver->media_type()); + return GetFirstMediaContent(sdesc->description()->contents(), + transceiver->media_type()); } } void SdpOfferAnswerHandler::GetOptionsForOffer( const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { + MediaSessionOptions* session_options) { RTC_DCHECK_RUN_ON(signaling_thread()); ExtractSharedMediaSessionOptions(offer_answer_options, session_options); @@ -4108,7 +4327,7 @@ void SdpOfferAnswerHandler::GetOptionsForOffer( void SdpOfferAnswerHandler::GetOptionsForPlanBOffer( const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { + MediaSessionOptions* session_options) { bool offer_new_data_description = data_channel_controller()->HasUsedDataChannels(); bool send_audio = false; @@ -4146,9 +4365,9 @@ void SdpOfferAnswerHandler::GetOptionsForPlanBOffer( (offer_answer_options.offer_to_receive_video > 0); } } - absl::optional audio_index; - absl::optional video_index; - absl::optional data_index; + std::optional audio_index; + std::optional video_index; + std::optional data_index; // If a current description exists, generate m= sections in the same order, // using the first audio/video/data section that appears and rejecting // extraneous ones. @@ -4163,8 +4382,8 @@ void SdpOfferAnswerHandler::GetOptionsForPlanBOffer( if (ConfiguredForMedia()) { // Add audio/video/data m= sections to the end if needed. if (!audio_index && offer_new_audio_description) { - cricket::MediaDescriptionOptions options( - cricket::MEDIA_TYPE_AUDIO, cricket::CN_AUDIO, + MediaDescriptionOptions options( + webrtc::MediaType::AUDIO, CN_AUDIO, RtpTransceiverDirectionFromSendRecv(send_audio, recv_audio), false); options.header_extensions = media_engine()->voice().GetRtpHeaderExtensions(); @@ -4172,19 +4391,19 @@ void SdpOfferAnswerHandler::GetOptionsForPlanBOffer( audio_index = session_options->media_description_options.size() - 1; } if (!video_index && offer_new_video_description) { - cricket::MediaDescriptionOptions options( - cricket::MEDIA_TYPE_VIDEO, cricket::CN_VIDEO, + MediaDescriptionOptions options( + webrtc::MediaType::VIDEO, CN_VIDEO, RtpTransceiverDirectionFromSendRecv(send_video, recv_video), false); options.header_extensions = media_engine()->video().GetRtpHeaderExtensions(); session_options->media_description_options.push_back(options); video_index = session_options->media_description_options.size() - 1; } - cricket::MediaDescriptionOptions* audio_media_description_options = + MediaDescriptionOptions* audio_media_description_options = !audio_index ? nullptr : &session_options->media_description_options[*audio_index]; - cricket::MediaDescriptionOptions* video_media_description_options = + MediaDescriptionOptions* video_media_description_options = !video_index ? nullptr : &session_options->media_description_options[*video_index]; @@ -4196,13 +4415,13 @@ void SdpOfferAnswerHandler::GetOptionsForPlanBOffer( } if (!data_index && offer_new_data_description) { session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData(cricket::CN_DATA)); + GetMediaDescriptionOptionsForActiveData(CN_DATA)); } } void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( const RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { + MediaSessionOptions* session_options) { // Rules for generating an offer are dictated by JSEP sections 5.2.1 (Initial // Offers) and 5.2.2 (Subsequent Offers). RTC_DCHECK_EQ(session_options->media_description_options.size(), 0); @@ -4235,12 +4454,12 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( (current_local_content && current_local_content->rejected) || (current_remote_content && current_remote_content->rejected); const std::string& mid = - (local_content ? local_content->name : remote_content->name); - cricket::MediaType media_type = + (local_content ? local_content->mid() : remote_content->mid()); + webrtc::MediaType media_type = (local_content ? local_content->media_description()->type() : remote_content->media_description()->type()); - if (media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO) { + if (media_type == webrtc::MediaType::AUDIO || + media_type == webrtc::MediaType::VIDEO) { // A media section is considered eligible for recycling if it is marked as // rejected in either the current local or current remote description. auto transceiver = transceivers()->FindByMid(mid); @@ -4248,19 +4467,18 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( // No associated transceiver. The media section has been stopped. recycleable_mline_indices.push(i); session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions(media_type, mid, - RtpTransceiverDirection::kInactive, - /*stopped=*/true)); + MediaDescriptionOptions(media_type, mid, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); } else { // NOTE: a stopping transceiver should be treated as a stopped one in // createOffer as specified in // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-createoffer. if (had_been_rejected && transceiver->stopping()) { session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions( - transceiver->media_type(), mid, - RtpTransceiverDirection::kInactive, - /*stopped=*/true)); + MediaDescriptionOptions(transceiver->media_type(), mid, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); recycleable_mline_indices.push(i); } else { session_options->media_description_options.push_back( @@ -4275,23 +4493,26 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( transceiver->internal()->set_mline_index(i); } } - } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { + } else if (media_type == webrtc::MediaType::UNSUPPORTED) { RTC_DCHECK(local_content->rejected); session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions(media_type, mid, - RtpTransceiverDirection::kInactive, - /*stopped=*/true)); + MediaDescriptionOptions(media_type, mid, + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); } else { - RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type); + RTC_CHECK_EQ(webrtc::MediaType::DATA, media_type); if (had_been_rejected) { session_options->media_description_options.push_back( GetMediaDescriptionOptionsForRejectedData(mid)); } else { - RTC_CHECK(pc_->GetDataMid()); - if (mid == *(pc_->GetDataMid())) { + const auto data_mid = pc_->sctp_mid(); + if (data_mid.has_value() && mid == data_mid.value()) { session_options->media_description_options.push_back( GetMediaDescriptionOptionsForActiveData(mid)); } else { + if (!data_mid.has_value()) { + RTC_LOG(LS_ERROR) << "Datachannel transport not available: " << mid; + } session_options->media_description_options.push_back( GetMediaDescriptionOptionsForRejectedData(mid)); } @@ -4329,15 +4550,15 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( } // Lastly, add a m-section if we have requested local data channels and an // m section does not already exist. - if (!pc_->GetDataMid() && data_channel_controller()->HasDataChannels()) { + if (!pc_->sctp_mid() && data_channel_controller()->HasDataChannels()) { // Attempt to recycle a stopped m-line. - // TODO(crbug.com/1442604): GetDataMid() should return the mid if one was + // TODO(crbug.com/1442604): sctp_mid() should return the mid if one was // ever created but rejected. bool recycled = false; for (size_t i = 0; i < session_options->media_description_options.size(); i++) { auto media_description = session_options->media_description_options[i]; - if (media_description.type == cricket::MEDIA_TYPE_DATA && + if (media_description.type == webrtc::MediaType::DATA && media_description.stopped) { session_options->media_description_options[i] = GetMediaDescriptionOptionsForActiveData(media_description.mid); @@ -4355,7 +4576,7 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( void SdpOfferAnswerHandler::GetOptionsForAnswer( const RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { + MediaSessionOptions* session_options) { RTC_DCHECK_RUN_ON(signaling_thread()); ExtractSharedMediaSessionOptions(offer_answer_options, session_options); @@ -4380,7 +4601,7 @@ void SdpOfferAnswerHandler::GetOptionsForAnswer( void SdpOfferAnswerHandler::GetOptionsForPlanBAnswer( const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { + MediaSessionOptions* session_options) { bool send_audio = false; bool recv_audio = false; bool send_video = false; @@ -4409,9 +4630,9 @@ void SdpOfferAnswerHandler::GetOptionsForPlanBAnswer( } } - absl::optional audio_index; - absl::optional video_index; - absl::optional data_index; + std::optional audio_index; + std::optional video_index; + std::optional data_index; // Generate m= sections that match those in the offer. // Note that mediasession.cc will handle intersection our preferred @@ -4422,10 +4643,10 @@ void SdpOfferAnswerHandler::GetOptionsForPlanBAnswer( RtpTransceiverDirectionFromSendRecv(send_video, recv_video), &audio_index, &video_index, &data_index, session_options); - cricket::MediaDescriptionOptions* audio_media_description_options = + MediaDescriptionOptions* audio_media_description_options = !audio_index ? nullptr : &session_options->media_description_options[*audio_index]; - cricket::MediaDescriptionOptions* video_media_description_options = + MediaDescriptionOptions* video_media_description_options = !video_index ? nullptr : &session_options->media_description_options[*video_index]; @@ -4439,47 +4660,49 @@ void SdpOfferAnswerHandler::GetOptionsForPlanBAnswer( void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanAnswer( const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) { + MediaSessionOptions* session_options) { // Rules for generating an answer are dictated by JSEP sections 5.3.1 (Initial // Answers) and 5.3.2 (Subsequent Answers). RTC_DCHECK(remote_description()); RTC_DCHECK(remote_description()->GetType() == SdpType::kOffer); for (const ContentInfo& content : remote_description()->description()->contents()) { - cricket::MediaType media_type = content.media_description()->type(); - if (media_type == cricket::MEDIA_TYPE_AUDIO || - media_type == cricket::MEDIA_TYPE_VIDEO) { - auto transceiver = transceivers()->FindByMid(content.name); + webrtc::MediaType media_type = content.media_description()->type(); + if (media_type == webrtc::MediaType::AUDIO || + media_type == webrtc::MediaType::VIDEO) { + auto transceiver = transceivers()->FindByMid(content.mid()); if (transceiver) { session_options->media_description_options.push_back( GetMediaDescriptionOptionsForTransceiver( - transceiver->internal(), content.name, + transceiver->internal(), content.mid(), /*is_create_offer=*/false)); } else { // This should only happen with rejected transceivers. RTC_DCHECK(content.rejected); session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions(media_type, content.name, - RtpTransceiverDirection::kInactive, - /*stopped=*/true)); + MediaDescriptionOptions(media_type, content.mid(), + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); } - } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { + } else if (media_type == webrtc::MediaType::UNSUPPORTED) { RTC_DCHECK(content.rejected); session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions(media_type, content.name, - RtpTransceiverDirection::kInactive, - /*stopped=*/true)); + MediaDescriptionOptions(media_type, content.mid(), + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); } else { - RTC_CHECK_EQ(cricket::MEDIA_TYPE_DATA, media_type); + RTC_CHECK_EQ(webrtc::MediaType::DATA, media_type); // Reject all data sections if data channels are disabled. // Reject a data section if it has already been rejected. // Reject all data sections except for the first one. - if (content.rejected || content.name != *(pc_->GetDataMid())) { + auto sctp_mid = pc_->sctp_mid(); + if (content.rejected || + (sctp_mid.has_value() && content.mid() != *sctp_mid)) { session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForRejectedData(content.name)); + GetMediaDescriptionOptionsForRejectedData(content.mid())); } else { session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData(content.name)); + GetMediaDescriptionOptionsForActiveData(content.mid())); } } } @@ -4501,7 +4724,7 @@ const char* SdpOfferAnswerHandler::SessionErrorToString( std::string SdpOfferAnswerHandler::GetSessionErrorMsg() { RTC_DCHECK_RUN_ON(signaling_thread()); - rtc::StringBuilder desc; + StringBuilder desc; desc << kSessionError << SessionErrorToString(session_error()) << ". "; desc << kSessionErrorDesc << session_error_desc() << "."; return desc.Release(); @@ -4523,9 +4746,9 @@ RTCError SdpOfferAnswerHandler::HandleLegacyOfferOptions( if (options.offer_to_receive_audio == 0) { RemoveRecvDirectionFromReceivingTransceiversOfType( - cricket::MEDIA_TYPE_AUDIO); + webrtc::MediaType::AUDIO); } else if (options.offer_to_receive_audio == 1) { - AddUpToOneReceivingTransceiverOfType(cricket::MEDIA_TYPE_AUDIO); + AddUpToOneReceivingTransceiverOfType(webrtc::MediaType::AUDIO); } else if (options.offer_to_receive_audio > 1) { LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_PARAMETER, "offer_to_receive_audio > 1 is not supported."); @@ -4533,9 +4756,9 @@ RTCError SdpOfferAnswerHandler::HandleLegacyOfferOptions( if (options.offer_to_receive_video == 0) { RemoveRecvDirectionFromReceivingTransceiversOfType( - cricket::MEDIA_TYPE_VIDEO); + webrtc::MediaType::VIDEO); } else if (options.offer_to_receive_video == 1) { - AddUpToOneReceivingTransceiverOfType(cricket::MEDIA_TYPE_VIDEO); + AddUpToOneReceivingTransceiverOfType(webrtc::MediaType::VIDEO); } else if (options.offer_to_receive_video > 1) { LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_PARAMETER, "offer_to_receive_video > 1 is not supported."); @@ -4545,12 +4768,12 @@ RTCError SdpOfferAnswerHandler::HandleLegacyOfferOptions( } void SdpOfferAnswerHandler::RemoveRecvDirectionFromReceivingTransceiversOfType( - cricket::MediaType media_type) { + webrtc::MediaType media_type) { for (const auto& transceiver : GetReceivingTransceiversOfType(media_type)) { RtpTransceiverDirection new_direction = RtpTransceiverDirectionWithRecvSet(transceiver->direction(), false); if (new_direction != transceiver->direction()) { - RTC_LOG(LS_INFO) << "Changing " << cricket::MediaTypeToString(media_type) + RTC_LOG(LS_INFO) << "Changing " << webrtc::MediaTypeToString(media_type) << " transceiver (MID=" << transceiver->mid().value_or("") << ") from " << RtpTransceiverDirectionToString( @@ -4564,11 +4787,11 @@ void SdpOfferAnswerHandler::RemoveRecvDirectionFromReceivingTransceiversOfType( } void SdpOfferAnswerHandler::AddUpToOneReceivingTransceiverOfType( - cricket::MediaType media_type) { + webrtc::MediaType media_type) { RTC_DCHECK_RUN_ON(signaling_thread()); if (GetReceivingTransceiversOfType(media_type).empty()) { RTC_LOG(LS_INFO) - << "Adding one recvonly " << cricket::MediaTypeToString(media_type) + << "Adding one recvonly " << webrtc::MediaTypeToString(media_type) << " transceiver since CreateOffer specified offer_to_receive=1"; RtpTransceiverInit init; init.direction = RtpTransceiverDirection::kRecvOnly; @@ -4577,11 +4800,10 @@ void SdpOfferAnswerHandler::AddUpToOneReceivingTransceiverOfType( } } -std::vector>> +std::vector>> SdpOfferAnswerHandler::GetReceivingTransceiversOfType( - cricket::MediaType media_type) { - std::vector< - rtc::scoped_refptr>> + webrtc::MediaType media_type) { + std::vector>> receiving_transceivers; for (const auto& transceiver : transceivers()->List()) { if (!transceiver->stopped() && transceiver->media_type() == media_type && @@ -4593,14 +4815,13 @@ SdpOfferAnswerHandler::GetReceivingTransceiversOfType( } void SdpOfferAnswerHandler::ProcessRemovalOfRemoteTrack( - rtc::scoped_refptr> - transceiver, - std::vector>* remove_list, - std::vector>* removed_streams) { + scoped_refptr> transceiver, + std::vector>* remove_list, + std::vector>* removed_streams) { RTC_DCHECK(transceiver->mid()); RTC_LOG(LS_INFO) << "Processing the removal of a track for MID=" << *transceiver->mid(); - std::vector> previous_streams = + std::vector> previous_streams = transceiver->internal()->receiver_internal()->streams(); // This will remove the remote track from the streams. transceiver->internal()->receiver_internal()->set_stream_ids({}); @@ -4609,8 +4830,8 @@ void SdpOfferAnswerHandler::ProcessRemovalOfRemoteTrack( } void SdpOfferAnswerHandler::RemoveRemoteStreamsIfEmpty( - const std::vector>& remote_streams, - std::vector>* removed_streams) { + const std::vector>& remote_streams, + std::vector>* removed_streams) { RTC_DCHECK_RUN_ON(signaling_thread()); // TODO(https://crbug.com/webrtc/9480): When we use stream IDs instead of // streams, see if the stream was removed by checking if this was the last @@ -4624,16 +4845,16 @@ void SdpOfferAnswerHandler::RemoveRemoteStreamsIfEmpty( } } -void SdpOfferAnswerHandler::RemoveSenders(cricket::MediaType media_type) { +void SdpOfferAnswerHandler::RemoveSenders(webrtc::MediaType media_type) { RTC_DCHECK_RUN_ON(signaling_thread()); - UpdateLocalSenders(std::vector(), media_type); - UpdateRemoteSendersList(std::vector(), false, - media_type, nullptr); + UpdateLocalSenders(std::vector(), media_type); + UpdateRemoteSendersList(std::vector(), false, media_type, + nullptr); } void SdpOfferAnswerHandler::UpdateLocalSenders( - const std::vector& streams, - cricket::MediaType media_type) { + const std::vector& streams, + webrtc::MediaType media_type) { TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::UpdateLocalSenders"); RTC_DCHECK_RUN_ON(signaling_thread()); std::vector* current_senders = @@ -4645,8 +4866,7 @@ void SdpOfferAnswerHandler::UpdateLocalSenders( sender_it != current_senders->end(); /* incremented manually */) { const RtpSenderInfo& info = *sender_it; - const cricket::StreamParams* params = - cricket::GetStreamBySsrc(streams, info.first_ssrc); + const StreamParams* params = GetStreamBySsrc(streams, info.first_ssrc); if (!params || params->id != info.sender_id || params->first_stream_id() != info.stream_id) { rtp_manager()->OnLocalSenderRemoved(info, media_type); @@ -4657,7 +4877,7 @@ void SdpOfferAnswerHandler::UpdateLocalSenders( } // Find new and active senders. - for (const cricket::StreamParams& params : streams) { + for (const StreamParams& params : streams) { // The sync_label is the MediaStream label and the `stream.id` is the // sender id. const std::string& stream_id = params.first_stream_id(); @@ -4673,9 +4893,9 @@ void SdpOfferAnswerHandler::UpdateLocalSenders( } void SdpOfferAnswerHandler::UpdateRemoteSendersList( - const cricket::StreamParamsVec& streams, + const StreamParamsVec& streams, bool default_sender_needed, - cricket::MediaType media_type, + webrtc::MediaType media_type, StreamCollection* new_streams) { TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::UpdateRemoteSendersList"); RTC_DCHECK_RUN_ON(signaling_thread()); @@ -4690,8 +4910,7 @@ void SdpOfferAnswerHandler::UpdateRemoteSendersList( sender_it != current_senders->end(); /* incremented manually */) { const RtpSenderInfo& info = *sender_it; - const cricket::StreamParams* params = - cricket::GetStreamBySsrc(streams, info.first_ssrc); + const StreamParams* params = GetStreamBySsrc(streams, info.first_ssrc); std::string params_stream_id; if (params) { params_stream_id = @@ -4712,7 +4931,7 @@ void SdpOfferAnswerHandler::UpdateRemoteSendersList( } // Find new and active senders. - for (const cricket::StreamParams& params : streams) { + for (const StreamParams& params : streams) { if (!params.has_ssrcs()) { // The remote endpoint has streams, but didn't signal ssrcs. For an active // sender, this means it is coming from a Unified Plan endpoint,so we just @@ -4732,11 +4951,11 @@ void SdpOfferAnswerHandler::UpdateRemoteSendersList( const std::string& sender_id = params.id; uint32_t ssrc = params.first_ssrc(); - rtc::scoped_refptr stream( + scoped_refptr stream( remote_streams_->find(stream_id)); if (!stream) { // This is a new MediaStream. Create a new remote MediaStream. - stream = MediaStreamProxy::Create(rtc::Thread::Current(), + stream = MediaStreamProxy::Create(Thread::Current(), MediaStream::Create(stream_id)); remote_streams_->AddStream(stream); new_streams->AddStream(stream); @@ -4753,16 +4972,16 @@ void SdpOfferAnswerHandler::UpdateRemoteSendersList( // Add default sender if necessary. if (default_sender_needed) { - rtc::scoped_refptr default_stream( + scoped_refptr default_stream( remote_streams_->find(kDefaultStreamId)); if (!default_stream) { // Create the new default MediaStream. default_stream = MediaStreamProxy::Create( - rtc::Thread::Current(), MediaStream::Create(kDefaultStreamId)); + Thread::Current(), MediaStream::Create(kDefaultStreamId)); remote_streams_->AddStream(default_stream); new_streams->AddStream(default_stream); } - std::string default_sender_id = (media_type == cricket::MEDIA_TYPE_AUDIO) + std::string default_sender_id = (media_type == webrtc::MediaType::AUDIO) ? kDefaultAudioSenderId : kDefaultVideoSenderId; const RtpSenderInfo* default_sender_info = rtp_manager()->FindSenderInfo( @@ -4783,7 +5002,7 @@ void SdpOfferAnswerHandler::EnableSending() { return; } for (const auto& transceiver : transceivers()->ListInternal()) { - cricket::ChannelInterface* channel = transceiver->channel(); + ChannelInterface* channel = transceiver->channel(); if (channel) { channel->Enable(true); } @@ -4792,13 +5011,11 @@ void SdpOfferAnswerHandler::EnableSending() { RTCError SdpOfferAnswerHandler::PushdownMediaDescription( SdpType type, - cricket::ContentSource source, - const std::map& - bundle_groups_by_mid) { + ContentSource source, + const std::map& bundle_groups_by_mid) { TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::PushdownMediaDescription"); const SessionDescriptionInterface* sdesc = - (source == cricket::CS_LOCAL ? local_description() - : remote_description()); + (source == CS_LOCAL ? local_description() : remote_description()); RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(sdesc); @@ -4815,13 +5032,14 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription( // Push down the new SDP media section for each audio/video transceiver. auto rtp_transceivers = transceivers()->ListInternal(); - std::vector< - std::pair> + std::vector> channels; + bool use_ccfb = false; + bool seen_ccfb = false; for (const auto& transceiver : rtp_transceivers) { const ContentInfo* content_info = FindMediaSectionForTransceiver(transceiver, sdesc); - cricket::ChannelInterface* channel = transceiver->channel(); + ChannelInterface* channel = transceiver->channel(); if (!channel || !content_info || content_info->rejected) { continue; } @@ -4830,6 +5048,17 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription( if (!content_desc) { continue; } + // RFC 8888 says that the ccfb must be consistent across the description. + if (seen_ccfb) { + if (use_ccfb != content_desc->rtcp_fb_ack_ccfb()) { + RTC_LOG(LS_ERROR) + << "Warning: Inconsistent CCFB flag - CCFB turned off"; + use_ccfb = false; + } + } else { + use_ccfb = content_desc->rtcp_fb_ack_ccfb(); + seen_ccfb = true; + } transceiver->OnNegotiationUpdate(type, content_desc); channels.push_back(std::make_pair(channel, content_desc)); @@ -4848,7 +5077,7 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription( for (const auto& entry : channels) { std::string error; bool success = context_->worker_thread()->BlockingCall([&]() { - return (source == cricket::CS_LOCAL) + return (source == CS_LOCAL) ? entry.first->SetLocalContent(entry.second, type, error) : entry.first->SetRemoteContent(entry.second, type, error); }); @@ -4856,14 +5085,25 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription( LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); } } + // If local and remote are both set, we assume that it's safe to trigger + // CCFB. + if (context_->env().field_trials().IsEnabled( + "WebRTC-RFC8888CongestionControlFeedback")) { + if (use_ccfb && local_description() && remote_description()) { + // The call and the congestion controller live on the worker thread. + context_->worker_thread()->PostTask([call = pc_->call_ptr()] { + call->EnableSendCongestionControlFeedbackAccordingToRfc8888(); + }); + } + } } // Need complete offer/answer with an SCTP m= section before starting SCTP, // according to https://tools.ietf.org/html/draft-ietf-mmusic-sctp-sdp-19 if (pc_->sctp_mid() && local_description() && remote_description()) { - auto local_sctp_description = cricket::GetFirstSctpDataContentDescription( - local_description()->description()); - auto remote_sctp_description = cricket::GetFirstSctpDataContentDescription( - remote_description()->description()); + auto local_sctp_description = + GetFirstSctpDataContentDescription(local_description()->description()); + auto remote_sctp_description = + GetFirstSctpDataContentDescription(remote_description()->description()); if (local_sctp_description && remote_sctp_description) { int max_message_size; // A remote max message size of zero means "any size supported". @@ -4875,9 +5115,9 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription( std::min(local_sctp_description->max_message_size(), remote_sctp_description->max_message_size()); } - pc_->StartSctpTransport(local_sctp_description->port(), - remote_sctp_description->port(), - max_message_size); + pc_->StartSctpTransport({.local_port = local_sctp_description->port(), + .remote_port = remote_sctp_description->port(), + .max_message_size = max_message_size}); } } @@ -4885,21 +5125,23 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription( } RTCError SdpOfferAnswerHandler::PushdownTransportDescription( - cricket::ContentSource source, + ContentSource source, SdpType type) { TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::PushdownTransportDescription"); RTC_DCHECK_RUN_ON(signaling_thread()); - if (source == cricket::CS_LOCAL) { + if (source == CS_LOCAL) { const SessionDescriptionInterface* sdesc = local_description(); RTC_DCHECK(sdesc); - return transport_controller_s()->SetLocalDescription(type, - sdesc->description()); + const auto* remote = remote_description(); + return transport_controller_s()->SetLocalDescription( + type, sdesc->description(), remote ? remote->description() : nullptr); } else { const SessionDescriptionInterface* sdesc = remote_description(); RTC_DCHECK(sdesc); - return transport_controller_s()->SetRemoteDescription(type, - sdesc->description()); + const auto* local = local_description(); + return transport_controller_s()->SetRemoteDescription( + type, local ? local->description() : nullptr, sdesc->description()); } } @@ -4932,8 +5174,8 @@ void SdpOfferAnswerHandler::RemoveStoppedTransceivers() { (remote_content && remote_content->rejected)) { RTC_LOG(LS_INFO) << "Dissociating transceiver" " since the media section is being recycled."; - transceiver->internal()->set_mid(absl::nullopt); - transceiver->internal()->set_mline_index(absl::nullopt); + transceiver->internal()->set_mid(std::nullopt); + transceiver->internal()->set_mline_index(std::nullopt); } else if (!local_content && !remote_content) { // TODO(bugs.webrtc.org/11973): Consider if this should be removed already // See https://github.com/w3c/webrtc-pc/issues/2576 @@ -4950,42 +5192,39 @@ void SdpOfferAnswerHandler::RemoveUnusedChannels( if (ConfiguredForMedia()) { // Destroy video channel first since it may have a pointer to the // voice channel. - const cricket::ContentInfo* video_info = - cricket::GetFirstVideoContent(desc); + const ContentInfo* video_info = GetFirstVideoContent(desc); if (!video_info || video_info->rejected) { rtp_manager()->GetVideoTransceiver()->internal()->ClearChannel(); } - const cricket::ContentInfo* audio_info = - cricket::GetFirstAudioContent(desc); + const ContentInfo* audio_info = GetFirstAudioContent(desc); if (!audio_info || audio_info->rejected) { rtp_manager()->GetAudioTransceiver()->internal()->ClearChannel(); } } - const cricket::ContentInfo* data_info = cricket::GetFirstDataContent(desc); + const ContentInfo* data_info = GetFirstDataContent(desc); if (!data_info) { RTCError error(RTCErrorType::OPERATION_ERROR_WITH_DATA, "No data channel section in the description."); error.set_error_detail(RTCErrorDetailType::DATA_CHANNEL_FAILURE); - DestroyDataChannelTransport(error); + pc_->DestroyDataChannelTransport(error); } else if (data_info->rejected) { - rtc::StringBuilder sb; - sb << "Rejected data channel with mid=" << data_info->name << "."; + StringBuilder sb; + sb << "Rejected data channel with mid=" << data_info->mid() << "."; RTCError error(RTCErrorType::OPERATION_ERROR_WITH_DATA, sb.Release()); error.set_error_detail(RTCErrorDetailType::DATA_CHANNEL_FAILURE); - DestroyDataChannelTransport(error); + pc_->DestroyDataChannelTransport(error); } } void SdpOfferAnswerHandler::UpdateEndedRemoteMediaStreams() { RTC_DCHECK_RUN_ON(signaling_thread()); - std::vector> streams_to_remove; + std::vector> streams_to_remove; for (size_t i = 0; i < remote_streams_->count(); ++i) { MediaStreamInterface* stream = remote_streams_->at(i); if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) { - streams_to_remove.push_back( - rtc::scoped_refptr(stream)); + streams_to_remove.push_back(scoped_refptr(stream)); } } @@ -5029,21 +5268,21 @@ bool SdpOfferAnswerHandler::UseCandidate( const IceCandidateInterface* candidate) { RTC_DCHECK_RUN_ON(signaling_thread()); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; - RTCErrorOr result = + RTCErrorOr result = FindContentInfo(remote_description(), candidate); if (!result.ok()) return false; - const cricket::Candidate& c = candidate->candidate(); - RTCError error = cricket::VerifyCandidate(c); + const Candidate& c = candidate->candidate(); + RTCError error = VerifyCandidate(c); if (!error.ok()) { RTC_LOG(LS_WARNING) << "Invalid candidate: " << c.ToString(); return true; } - pc_->AddRemoteCandidate(result.value()->name, c); + pc_->AddRemoteCandidate(result.value()->mid(), c); return true; } @@ -5067,7 +5306,7 @@ bool SdpOfferAnswerHandler::ReadyToUseRemoteCandidate( return false; } - RTCErrorOr result = + RTCErrorOr result = FindContentInfo(current_remote_desc, candidate); if (!result.ok()) { RTC_LOG(LS_ERROR) << "ReadyToUseRemoteCandidate: Invalid candidate. " @@ -5077,16 +5316,52 @@ bool SdpOfferAnswerHandler::ReadyToUseRemoteCandidate( return false; } + if (has_sdp_munged_ufrag_) { + // The format is + // restricted_addresses::|: + // The host can contain wildcards that match anything that comes after it. + // The port should either be a wildcard or a specific port. + const std::string restricted_addresses = + pc_->trials().Lookup("WebRTC-NoSdpMangleUfragRestrictedAddresses"); + const std::string port = candidate->candidate().address().PortAsString(); + const std::string host = candidate->candidate().address().HostAsURIString(); + const std::vector restricted_address_list = + absl::StrSplit(restricted_addresses, '|'); + for (const absl::string_view restricted_address : restricted_address_list) { + const std::pair address = + absl::StrSplit(restricted_address, ':'); + if ((address.second == port || address.second == "*") && + WildcardHostPrefixMatch(host, address.first)) { + RTC_LOG(LS_ERROR) << "ReadyToUseRemoteCandidate: Candidate not valid " + "because of SDP munging."; + *valid = false; + // There might be other types of SDP munging, but here we're only + // interested in IceUfrag and IcePwd. + SdpMungingType sdp_munging_type = + last_sdp_munging_type_ == SdpMungingType::kIcePwd + ? SdpMungingType::kIcePwd + : SdpMungingType::kIceUfrag; + RTC_HISTOGRAM_ENUMERATION_SPARSE( + "WebRTC.PeerConnection.RestrictedCandidates.SdpMungingType", + sdp_munging_type, SdpMungingType::kMaxValue); + RTC_HISTOGRAM_ENUMERATION_SPARSE( + "WebRTC.PeerConnection.RestrictedCandidates.Port", + candidate->candidate().address().port(), 65536); + return false; + } + } + } + return true; } -RTCErrorOr SdpOfferAnswerHandler::FindContentInfo( +RTCErrorOr SdpOfferAnswerHandler::FindContentInfo( const SessionDescriptionInterface* description, const IceCandidateInterface* candidate) { if (!candidate->sdp_mid().empty()) { auto& contents = description->description()->contents(); - auto it = absl::c_find_if( - contents, [candidate](const cricket::ContentInfo& content_info) { + auto it = + absl::c_find_if(contents, [candidate](const ContentInfo& content_info) { return content_info.mid() == candidate->sdp_mid(); }); if (it == contents.end()) { @@ -5106,9 +5381,9 @@ RTCErrorOr SdpOfferAnswerHandler::FindContentInfo( } else { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_RANGE, "Media line index (" + - rtc::ToString(candidate->sdp_mline_index()) + + absl::StrCat(candidate->sdp_mline_index()) + ") out of range (number of mlines: " + - rtc::ToString(content_size) + ")."); + absl::StrCat(content_size) + ")."); } } @@ -5121,12 +5396,12 @@ RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) { // Creating the media channels. Transports should already have been created // at this point. RTC_DCHECK_RUN_ON(signaling_thread()); - const cricket::ContentInfo* voice = cricket::GetFirstAudioContent(&desc); + const ContentInfo* voice = GetFirstAudioContent(&desc); if (voice && !voice->rejected && !rtp_manager()->GetAudioTransceiver()->internal()->channel()) { auto error = rtp_manager()->GetAudioTransceiver()->internal()->CreateChannel( - voice->name, pc_->call_ptr(), pc_->configuration()->media_config, + voice->mid(), pc_->call_ptr(), pc_->configuration()->media_config, pc_->SrtpRequired(), pc_->GetCryptoOptions(), audio_options(), video_options(), video_bitrate_allocator_factory_.get(), [&](absl::string_view mid) { @@ -5138,12 +5413,12 @@ RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) { } } - const cricket::ContentInfo* video = cricket::GetFirstVideoContent(&desc); + const ContentInfo* video = GetFirstVideoContent(&desc); if (video && !video->rejected && !rtp_manager()->GetVideoTransceiver()->internal()->channel()) { auto error = rtp_manager()->GetVideoTransceiver()->internal()->CreateChannel( - video->name, pc_->call_ptr(), pc_->configuration()->media_config, + video->mid(), pc_->call_ptr(), pc_->configuration()->media_config, pc_->SrtpRequired(), pc_->GetCryptoOptions(), audio_options(), video_options(), @@ -5156,8 +5431,9 @@ RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) { } } - const cricket::ContentInfo* data = cricket::GetFirstDataContent(&desc); - if (data && !data->rejected && !CreateDataChannel(data->name)) { + const ContentInfo* data = GetFirstDataContent(&desc); + if (data && !data->rejected && + !pc_->CreateDataChannelTransport(data->mid())) { LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, "Failed to create data channel."); } @@ -5165,34 +5441,7 @@ RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) { return RTCError::OK(); } -bool SdpOfferAnswerHandler::CreateDataChannel(const std::string& mid) { - RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_DCHECK(!pc_->sctp_mid().has_value() || mid == pc_->sctp_mid().value()); - RTC_LOG(LS_INFO) << "Creating data channel, mid=" << mid; - - absl::optional transport_name = - context_->network_thread()->BlockingCall([&] { - RTC_DCHECK_RUN_ON(context_->network_thread()); - return pc_->SetupDataChannelTransport_n(mid); - }); - if (!transport_name) - return false; - - pc_->SetSctpDataInfo(mid, *transport_name); - return true; -} - -void SdpOfferAnswerHandler::DestroyDataChannelTransport(RTCError error) { - RTC_DCHECK_RUN_ON(signaling_thread()); - context_->network_thread()->BlockingCall( - [&, data_channel_controller = data_channel_controller()] { - RTC_DCHECK_RUN_ON(context_->network_thread()); - pc_->TeardownDataChannelTransport_n(error); - }); - pc_->ResetSctpDataInfo(); -} - -void SdpOfferAnswerHandler::DestroyAllChannels() { +void SdpOfferAnswerHandler::DestroyMediaChannels() { RTC_DCHECK_RUN_ON(signaling_thread()); if (!transceivers()) { return; @@ -5206,43 +5455,39 @@ void SdpOfferAnswerHandler::DestroyAllChannels() { RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); for (const auto& transceiver : list) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { + if (transceiver->media_type() == webrtc::MediaType::VIDEO) { transceiver->internal()->ClearChannel(); } } for (const auto& transceiver : list) { - if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { + if (transceiver->media_type() == webrtc::MediaType::AUDIO) { transceiver->internal()->ClearChannel(); } } - - DestroyDataChannelTransport({}); } void SdpOfferAnswerHandler::GenerateMediaDescriptionOptions( const SessionDescriptionInterface* session_desc, RtpTransceiverDirection audio_direction, RtpTransceiverDirection video_direction, - absl::optional* audio_index, - absl::optional* video_index, - absl::optional* data_index, - cricket::MediaSessionOptions* session_options) { + std::optional* audio_index, + std::optional* video_index, + std::optional* data_index, + MediaSessionOptions* session_options) { RTC_DCHECK_RUN_ON(signaling_thread()); - for (const cricket::ContentInfo& content : - session_desc->description()->contents()) { + for (const ContentInfo& content : session_desc->description()->contents()) { if (IsAudioContent(&content)) { // If we already have an audio m= section, reject this extra one. if (*audio_index) { session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions( - cricket::MEDIA_TYPE_AUDIO, content.name, - RtpTransceiverDirection::kInactive, /*stopped=*/true)); + MediaDescriptionOptions(webrtc::MediaType::AUDIO, content.mid(), + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); } else { bool stopped = (audio_direction == RtpTransceiverDirection::kInactive); session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_AUDIO, - content.name, audio_direction, - stopped)); + MediaDescriptionOptions(webrtc::MediaType::AUDIO, content.mid(), + audio_direction, stopped)); *audio_index = session_options->media_description_options.size() - 1; } session_options->media_description_options.back().header_extensions = @@ -5251,66 +5496,63 @@ void SdpOfferAnswerHandler::GenerateMediaDescriptionOptions( // If we already have an video m= section, reject this extra one. if (*video_index) { session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions( - cricket::MEDIA_TYPE_VIDEO, content.name, - RtpTransceiverDirection::kInactive, /*stopped=*/true)); + MediaDescriptionOptions(webrtc::MediaType::VIDEO, content.mid(), + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); } else { bool stopped = (video_direction == RtpTransceiverDirection::kInactive); session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_VIDEO, - content.name, video_direction, - stopped)); + MediaDescriptionOptions(webrtc::MediaType::VIDEO, content.mid(), + video_direction, stopped)); *video_index = session_options->media_description_options.size() - 1; } session_options->media_description_options.back().header_extensions = media_engine()->video().GetRtpHeaderExtensions(); } else if (IsUnsupportedContent(&content)) { session_options->media_description_options.push_back( - cricket::MediaDescriptionOptions(cricket::MEDIA_TYPE_UNSUPPORTED, - content.name, - RtpTransceiverDirection::kInactive, - /*stopped=*/true)); + MediaDescriptionOptions(webrtc::MediaType::UNSUPPORTED, content.mid(), + RtpTransceiverDirection::kInactive, + /*stopped=*/true)); } else { RTC_DCHECK(IsDataContent(&content)); // If we already have an data m= section, reject this extra one. if (*data_index) { session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForRejectedData(content.name)); + GetMediaDescriptionOptionsForRejectedData(content.mid())); } else { session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData(content.name)); + GetMediaDescriptionOptionsForActiveData(content.mid())); *data_index = session_options->media_description_options.size() - 1; } } } } -cricket::MediaDescriptionOptions +MediaDescriptionOptions SdpOfferAnswerHandler::GetMediaDescriptionOptionsForActiveData( const std::string& mid) const { RTC_DCHECK_RUN_ON(signaling_thread()); // Direction for data sections is meaningless, but legacy endpoints might // expect sendrecv. - cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid, - RtpTransceiverDirection::kSendRecv, - /*stopped=*/false); + MediaDescriptionOptions options(webrtc::MediaType::DATA, mid, + RtpTransceiverDirection::kSendRecv, + /*stopped=*/false); return options; } -cricket::MediaDescriptionOptions +MediaDescriptionOptions SdpOfferAnswerHandler::GetMediaDescriptionOptionsForRejectedData( const std::string& mid) const { RTC_DCHECK_RUN_ON(signaling_thread()); - cricket::MediaDescriptionOptions options(cricket::MEDIA_TYPE_DATA, mid, - RtpTransceiverDirection::kInactive, - /*stopped=*/true); + MediaDescriptionOptions options(webrtc::MediaType::DATA, mid, + RtpTransceiverDirection::kInactive, + /*stopped=*/true); return options; } bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( - cricket::ContentSource source, - const std::map& - bundle_groups_by_mid) { + ContentSource source, + const std::map& bundle_groups_by_mid) { TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState"); RTC_DCHECK_RUN_ON(signaling_thread()); @@ -5323,21 +5565,20 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( // are using the same payload type(s). For more context // see https://bugs.chromium.org/p/webrtc/issues/detail?id=11477 const SessionDescriptionInterface* sdesc = - (source == cricket::CS_LOCAL ? local_description() - : remote_description()); + (source == CS_LOCAL ? local_description() : remote_description()); struct PayloadTypes { std::set audio_payload_types; std::set video_payload_types; bool pt_demuxing_possible_audio = true; bool pt_demuxing_possible_video = true; }; - std::map payload_types_by_bundle; + std::map payload_types_by_bundle; // If the MID is missing from *any* receiving m= section, this is set to true. bool mid_header_extension_missing_audio = false; bool mid_header_extension_missing_video = false; for (auto& content_info : sdesc->description()->contents()) { - auto it = bundle_groups_by_mid.find(content_info.name); - const cricket::ContentGroup* bundle_group = + auto it = bundle_groups_by_mid.find(content_info.mid()); + const ContentGroup* bundle_group = it != bundle_groups_by_mid.end() ? it->second : nullptr; // If this m= section isn't bundled, it's safe to demux by payload type // since other m= sections using the same payload type will also be using @@ -5347,53 +5588,49 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( } PayloadTypes* payload_types = &payload_types_by_bundle[bundle_group]; if (content_info.rejected || - (source == cricket::ContentSource::CS_LOCAL && + (source == ContentSource::CS_LOCAL && !RtpTransceiverDirectionHasRecv( content_info.media_description()->direction())) || - (source == cricket::ContentSource::CS_REMOTE && + (source == ContentSource::CS_REMOTE && !RtpTransceiverDirectionHasSend( content_info.media_description()->direction()))) { // Ignore transceivers that are not receiving. continue; } - switch (content_info.media_description()->type()) { - case cricket::MediaType::MEDIA_TYPE_AUDIO: { - if (!mid_header_extension_missing_audio) { - mid_header_extension_missing_audio = - !ContentHasHeaderExtension(content_info, RtpExtension::kMidUri); - } - const cricket::AudioContentDescription* audio_desc = - content_info.media_description()->as_audio(); - for (const cricket::AudioCodec& audio : audio_desc->codecs()) { - if (payload_types->audio_payload_types.count(audio.id)) { + const webrtc::MediaType media_type = + content_info.media_description()->type(); + if (media_type == webrtc::MediaType::AUDIO || + media_type == webrtc::MediaType::VIDEO) { + if (media_type == webrtc::MediaType::AUDIO && + !mid_header_extension_missing_audio) { + mid_header_extension_missing_audio = + !ContentHasHeaderExtension(content_info, RtpExtension::kMidUri); + } else if (media_type == webrtc::MediaType::VIDEO && + !mid_header_extension_missing_video) { + mid_header_extension_missing_video = + !ContentHasHeaderExtension(content_info, RtpExtension::kMidUri); + } + const MediaContentDescription* media_desc = + content_info.media_description(); + for (const Codec& codec : media_desc->codecs()) { + if (media_type == webrtc::MediaType::AUDIO) { + if (payload_types->audio_payload_types.count(codec.id)) { // Two m= sections are using the same payload type, thus demuxing // by payload type is not possible. - payload_types->pt_demuxing_possible_audio = false; + if (media_type == webrtc::MediaType::AUDIO) { + payload_types->pt_demuxing_possible_audio = false; + } } - payload_types->audio_payload_types.insert(audio.id); - } - break; - } - case cricket::MediaType::MEDIA_TYPE_VIDEO: { - if (!mid_header_extension_missing_video) { - mid_header_extension_missing_video = - !ContentHasHeaderExtension(content_info, RtpExtension::kMidUri); - } - const cricket::VideoContentDescription* video_desc = - content_info.media_description()->as_video(); - for (const cricket::VideoCodec& video : video_desc->codecs()) { - if (payload_types->video_payload_types.count(video.id)) { + payload_types->audio_payload_types.insert(codec.id); + } else if (media_type == webrtc::MediaType::VIDEO) { + if (payload_types->video_payload_types.count(codec.id)) { // Two m= sections are using the same payload type, thus demuxing // by payload type is not possible. payload_types->pt_demuxing_possible_video = false; } - payload_types->video_payload_types.insert(video.id); + payload_types->video_payload_types.insert(codec.id); } - break; } - default: - // Ignore data channels. - continue; } } @@ -5418,32 +5655,32 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( // Gather all updates ahead of time so that all channels can be updated in a // single BlockingCall; necessary due to thread guards. - std::vector> channels_to_update; + std::vector> channels_to_update; for (const auto& transceiver : transceivers()->ListInternal()) { - cricket::ChannelInterface* channel = transceiver->channel(); + ChannelInterface* channel = transceiver->channel(); const ContentInfo* content = FindMediaSectionForTransceiver(transceiver, sdesc); if (!channel || !content) { continue; } - const cricket::MediaType media_type = channel->media_type(); - if (media_type != cricket::MediaType::MEDIA_TYPE_AUDIO && - media_type != cricket::MediaType::MEDIA_TYPE_VIDEO) { + const webrtc::MediaType media_type = channel->media_type(); + if (media_type != webrtc::MediaType::AUDIO && + media_type != webrtc::MediaType::VIDEO) { continue; } RtpTransceiverDirection local_direction = content->media_description()->direction(); - if (source == cricket::CS_REMOTE) { + if (source == CS_REMOTE) { local_direction = RtpTransceiverDirectionReversed(local_direction); } auto bundle_it = bundle_groups_by_mid.find(channel->mid()); - const cricket::ContentGroup* bundle_group = + const ContentGroup* bundle_group = bundle_it != bundle_groups_by_mid.end() ? bundle_it->second : nullptr; bool pt_demux_enabled = RtpTransceiverDirectionHasRecv(local_direction); - if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO) { + if (media_type == webrtc::MediaType::AUDIO) { pt_demux_enabled &= !bundle_group || (bundled_pt_demux_allowed_audio && @@ -5452,7 +5689,7 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( pt_demuxing_has_been_used_audio_ = true; } } else { - RTC_DCHECK_EQ(media_type, cricket::MediaType::MEDIA_TYPE_VIDEO); + RTC_DCHECK_EQ(media_type, webrtc::MediaType::VIDEO); pt_demux_enabled &= !bundle_group || (bundled_pt_demux_allowed_video && diff --git a/pc/sdp_offer_answer.h b/pc/sdp_offer_answer.h index 80a21391b9..e5604bf37d 100644 --- a/pc/sdp_offer_answer.h +++ b/pc/sdp_offer_answer.h @@ -17,11 +17,11 @@ #include #include #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/audio_options.h" #include "api/candidate.h" #include "api/jsep.h" @@ -39,14 +39,16 @@ #include "api/uma_metrics.h" #include "api/video/video_bitrate_allocator_factory.h" #include "media/base/media_channel.h" +#include "media/base/media_engine.h" #include "media/base/stream_params.h" #include "p2p/base/port_allocator.h" +#include "pc/codec_vendor.h" #include "pc/connection_context.h" #include "pc/data_channel_controller.h" #include "pc/jsep_transport_controller.h" +#include "pc/media_options.h" #include "pc/media_session.h" #include "pc/media_stream_observer.h" -#include "pc/peer_connection_internal.h" #include "pc/rtp_receiver.h" #include "pc/rtp_transceiver.h" #include "pc/rtp_transmission_manager.h" @@ -55,8 +57,8 @@ #include "pc/stream_collection.h" #include "pc/transceiver_list.h" #include "pc/webrtc_session_description_factory.h" -#include "rtc_base/checks.h" #include "rtc_base/operations_chain.h" +#include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" @@ -80,8 +82,11 @@ class SdpOfferAnswerHandler : public SdpStateProvider { static std::unique_ptr Create( PeerConnectionSdpMethods* pc, const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies& dependencies, - ConnectionContext* context); + std::unique_ptr cert_generator, + std::unique_ptr + video_bitrate_allocator_factory, + ConnectionContext* context, + CodecLookupHelper* codec_lookup_helper); void ResetSessionDescFactory() { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -112,8 +117,7 @@ class SdpOfferAnswerHandler : public SdpStateProvider { bool NeedsIceRestart(const std::string& content_name) const override; bool IceRestartPending(const std::string& content_name) const override; - absl::optional GetDtlsRole( - const std::string& mid) const override; + std::optional GetDtlsRole(const std::string& mid) const override; void RestartIce(); @@ -127,16 +131,16 @@ class SdpOfferAnswerHandler : public SdpStateProvider { void SetLocalDescription( std::unique_ptr desc, - rtc::scoped_refptr observer); + scoped_refptr observer); void SetLocalDescription( - rtc::scoped_refptr observer); + scoped_refptr observer); void SetLocalDescription(SetSessionDescriptionObserver* observer, SessionDescriptionInterface* desc); void SetLocalDescription(SetSessionDescriptionObserver* observer); void SetRemoteDescription( std::unique_ptr desc, - rtc::scoped_refptr observer); + scoped_refptr observer); void SetRemoteDescription(SetSessionDescriptionObserver* observer, SessionDescriptionInterface* desc); @@ -146,17 +150,16 @@ class SdpOfferAnswerHandler : public SdpStateProvider { bool AddIceCandidate(const IceCandidateInterface* candidate); void AddIceCandidate(std::unique_ptr candidate, std::function callback); - bool RemoveIceCandidates(const std::vector& candidates); + bool RemoveIceCandidates(const std::vector& candidates); // Adds a locally generated candidate to the local description. void AddLocalIceCandidate(const JsepIceCandidate* candidate); - void RemoveLocalIceCandidates( - const std::vector& candidates); + void RemoveLocalIceCandidates(const std::vector& candidates); bool ShouldFireNegotiationNeededEvent(uint32_t event_id); bool AddStream(MediaStreamInterface* local_stream); void RemoveStream(MediaStreamInterface* local_stream); - absl::optional is_caller() const; + std::optional is_caller() const; bool HasNewIceCredentials(); void UpdateNegotiationNeeded(); void AllocateSctpSids(); @@ -164,13 +167,13 @@ class SdpOfferAnswerHandler : public SdpStateProvider { // directly getting the information from the transport. // This is used for allocating stream ids for data channels. // See also `InternalDataChannelInit::fallback_ssl_role`. - absl::optional GuessSslRole() const; + std::optional GuessSslRole() const; - // Destroys all BaseChannels and destroys the SCTP data channel, if present. - void DestroyAllChannels(); + // Destroys all media BaseChannels. + void DestroyMediaChannels(); - rtc::scoped_refptr local_streams(); - rtc::scoped_refptr remote_streams(); + scoped_refptr local_streams(); + scoped_refptr remote_streams(); bool initial_offerer() { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -180,6 +183,11 @@ class SdpOfferAnswerHandler : public SdpStateProvider { return false; } + SdpMungingType sdp_munging_type() const { return last_sdp_munging_type_; } + void DisableSdpMungingChecksForTesting() { + disable_sdp_munging_checks_ = true; + } + private: class RemoteDescriptionOperation; class ImplicitCreateSessionDescriptionObserver; @@ -210,11 +218,14 @@ class SdpOfferAnswerHandler : public SdpStateProvider { // once. Modifies dependencies. void Initialize( const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies& dependencies, - ConnectionContext* context); - - rtc::Thread* signaling_thread() const; - rtc::Thread* network_thread() const; + std::unique_ptr cert_generator, + std::unique_ptr + video_bitrate_allocator_factory, + ConnectionContext* context, + CodecLookupHelper* codec_lookup_helper); + + Thread* signaling_thread() const; + Thread* network_thread() const; // Non-const versions of local_description()/remote_description(), for use // internally. SessionDescriptionInterface* mutable_local_description() @@ -232,8 +243,7 @@ class SdpOfferAnswerHandler : public SdpStateProvider { // that return an RTCError instead of invoking a callback. RTCError ApplyLocalDescription( std::unique_ptr desc, - const std::map& - bundle_groups_by_mid); + const std::map& bundle_groups_by_mid); void ApplyRemoteDescription( std::unique_ptr operation); @@ -248,23 +258,23 @@ class SdpOfferAnswerHandler : public SdpStateProvider { // Part of ApplyRemoteDescription steps specific to plan b. void PlanBUpdateSendersAndReceivers( - const cricket::ContentInfo* audio_content, - const cricket::AudioContentDescription* audio_desc, - const cricket::ContentInfo* video_content, - const cricket::VideoContentDescription* video_desc); + const ContentInfo* audio_content, + const AudioContentDescription* audio_desc, + const ContentInfo* video_content, + const VideoContentDescription* video_desc); // Implementation of the offer/answer exchange operations. These are chained // onto the `operations_chain_` when the public CreateOffer(), CreateAnswer(), // SetLocalDescription() and SetRemoteDescription() methods are invoked. void DoCreateOffer( const PeerConnectionInterface::RTCOfferAnswerOptions& options, - rtc::scoped_refptr observer); + scoped_refptr observer); void DoCreateAnswer( const PeerConnectionInterface::RTCOfferAnswerOptions& options, - rtc::scoped_refptr observer); + scoped_refptr observer); void DoSetLocalDescription( std::unique_ptr desc, - rtc::scoped_refptr observer); + scoped_refptr observer); void DoSetRemoteDescription( std::unique_ptr operation); @@ -278,12 +288,11 @@ class SdpOfferAnswerHandler : public SdpStateProvider { RTCError UpdateSessionState( SdpType type, - cricket::ContentSource source, - const cricket::SessionDescription* description, - const std::map& - bundle_groups_by_mid); + ContentSource source, + const SessionDescription* description, + const std::map& bundle_groups_by_mid); - bool IsUnifiedPlan() const RTC_RUN_ON(signaling_thread()); + bool IsUnifiedPlan() const; // Signals from MediaStreamObserver. void OnAudioTrackAdded(AudioTrackInterface* track, @@ -306,62 +315,60 @@ class SdpOfferAnswerHandler : public SdpStateProvider { // Runs the algorithm **set the associated remote streams** specified in // https://w3c.github.io/webrtc-pc/#set-associated-remote-streams. void SetAssociatedRemoteStreams( - rtc::scoped_refptr receiver, + scoped_refptr receiver, const std::vector& stream_ids, - std::vector>* added_streams, - std::vector>* removed_streams); + std::vector>* added_streams, + std::vector>* removed_streams); bool CheckIfNegotiationIsNeeded(); void GenerateNegotiationNeededEvent(); // Helper method which verifies SDP. RTCError ValidateSessionDescription( const SessionDescriptionInterface* sdesc, - cricket::ContentSource source, - const std::map& - bundle_groups_by_mid) RTC_RUN_ON(signaling_thread()); + ContentSource source, + const std::map& bundle_groups_by_mid) + RTC_RUN_ON(signaling_thread()); // Updates the local RtpTransceivers according to the JSEP rules. Called as // part of setting the local/remote description. RTCError UpdateTransceiversAndDataChannels( - cricket::ContentSource source, + ContentSource source, const SessionDescriptionInterface& new_session, const SessionDescriptionInterface* old_local_description, const SessionDescriptionInterface* old_remote_description, - const std::map& - bundle_groups_by_mid); + const std::map& bundle_groups_by_mid); // Associate the given transceiver according to the JSEP rules. - RTCErrorOr< - rtc::scoped_refptr>> - AssociateTransceiver(cricket::ContentSource source, + RTCErrorOr>> + AssociateTransceiver(ContentSource source, SdpType type, size_t mline_index, - const cricket::ContentInfo& content, - const cricket::ContentInfo* old_local_content, - const cricket::ContentInfo* old_remote_content) + const ContentInfo& content, + const ContentInfo* old_local_content, + const ContentInfo* old_remote_content) RTC_RUN_ON(signaling_thread()); // Returns the media section in the given session description that is // associated with the RtpTransceiver. Returns null if none found or this // RtpTransceiver is not associated. Logic varies depending on the // SdpSemantics specified in the configuration. - const cricket::ContentInfo* FindMediaSectionForTransceiver( + const ContentInfo* FindMediaSectionForTransceiver( const RtpTransceiver* transceiver, const SessionDescriptionInterface* sdesc) const; // Either creates or destroys the transceiver's BaseChannel according to the // given media section. RTCError UpdateTransceiverChannel( - rtc::scoped_refptr> + scoped_refptr> transceiver, - const cricket::ContentInfo& content, - const cricket::ContentGroup* bundle_group) RTC_RUN_ON(signaling_thread()); + const ContentInfo& content, + const ContentGroup* bundle_group) RTC_RUN_ON(signaling_thread()); // Either creates or destroys the local data channel according to the given // media section. - RTCError UpdateDataChannel(cricket::ContentSource source, - const cricket::ContentInfo& content, - const cricket::ContentGroup* bundle_group) + RTCError UpdateDataChannelTransport(ContentSource source, + const ContentInfo& content, + const ContentGroup* bundle_group) RTC_RUN_ON(signaling_thread()); // Check if a call to SetLocalDescription is acceptable with a session // description of the given type. @@ -374,44 +381,40 @@ class SdpOfferAnswerHandler : public SdpStateProvider { // unique. To support legacy end points that do not supply a=mid lines, this // method will modify the session description to add MIDs generated according // to the SDP semantics. - void FillInMissingRemoteMids(cricket::SessionDescription* remote_description); + void FillInMissingRemoteMids(SessionDescription* remote_description); // Returns an RtpTransceiver, if available, that can be used to receive the // given media type according to JSEP rules. - rtc::scoped_refptr> - FindAvailableTransceiverToReceive(cricket::MediaType media_type) const; + scoped_refptr> + FindAvailableTransceiverToReceive(webrtc::MediaType media_type) const; // Returns a MediaSessionOptions struct with options decided by `options`, // the local MediaStreams and DataChannels. void GetOptionsForOffer(const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options); + MediaSessionOptions* session_options); void GetOptionsForPlanBOffer( const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); + MediaSessionOptions* session_options) RTC_RUN_ON(signaling_thread()); void GetOptionsForUnifiedPlanOffer( const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); + MediaSessionOptions* session_options) RTC_RUN_ON(signaling_thread()); // Returns a MediaSessionOptions struct with options decided by // `constraints`, the local MediaStreams and DataChannels. void GetOptionsForAnswer(const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options); + MediaSessionOptions* session_options); void GetOptionsForPlanBAnswer( const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); + MediaSessionOptions* session_options) RTC_RUN_ON(signaling_thread()); void GetOptionsForUnifiedPlanAnswer( const PeerConnectionInterface::RTCOfferAnswerOptions& offer_answer_options, - cricket::MediaSessionOptions* session_options) - RTC_RUN_ON(signaling_thread()); + MediaSessionOptions* session_options) RTC_RUN_ON(signaling_thread()); const char* SessionErrorToString(SessionError error) const; std::string GetSessionErrorMsg(); @@ -425,12 +428,11 @@ class SdpOfferAnswerHandler : public SdpStateProvider { RTCError HandleLegacyOfferOptions( const PeerConnectionInterface::RTCOfferAnswerOptions& options); void RemoveRecvDirectionFromReceivingTransceiversOfType( - cricket::MediaType media_type) RTC_RUN_ON(signaling_thread()); - void AddUpToOneReceivingTransceiverOfType(cricket::MediaType media_type); + webrtc::MediaType media_type) RTC_RUN_ON(signaling_thread()); + void AddUpToOneReceivingTransceiverOfType(webrtc::MediaType media_type); - std::vector< - rtc::scoped_refptr>> - GetReceivingTransceiversOfType(cricket::MediaType media_type) + std::vector>> + GetReceivingTransceiversOfType(webrtc::MediaType media_type) RTC_RUN_ON(signaling_thread()); // Runs the algorithm specified in @@ -441,26 +443,25 @@ class SdpOfferAnswerHandler : public SdpStateProvider { // `removed_streams` is the list of streams which no longer have a receiving // track so should be removed. void ProcessRemovalOfRemoteTrack( - const rtc::scoped_refptr> + const scoped_refptr> transceiver, - std::vector>* remove_list, - std::vector>* removed_streams); + std::vector>* remove_list, + std::vector>* removed_streams); void RemoveRemoteStreamsIfEmpty( - const std::vector>& - remote_streams, - std::vector>* removed_streams); + const std::vector>& remote_streams, + std::vector>* removed_streams); // Remove all local and remote senders of type `media_type`. // Called when a media type is rejected (m-line set to port 0). - void RemoveSenders(cricket::MediaType media_type); + void RemoveSenders(webrtc::MediaType media_type); // Loops through the vector of `streams` and finds added and removed // StreamParams since last time this method was called. // For each new or removed StreamParam, OnLocalSenderSeen or // OnLocalSenderRemoved is invoked. - void UpdateLocalSenders(const std::vector& streams, - cricket::MediaType media_type); + void UpdateLocalSenders(const std::vector& streams, + webrtc::MediaType media_type); // Makes sure a MediaStreamTrack is created for each StreamParam in `streams`, // and existing MediaStreamTracks are removed if there is no corresponding @@ -468,11 +469,10 @@ class SdpOfferAnswerHandler : public SdpStateProvider { // is created if it doesn't exist; if false, it's removed if it exists. // `media_type` is the type of the `streams` and can be either audio or video. // If a new MediaStream is created it is added to `new_streams`. - void UpdateRemoteSendersList( - const std::vector& streams, - bool default_track_needed, - cricket::MediaType media_type, - StreamCollection* new_streams); + void UpdateRemoteSendersList(const std::vector& streams, + bool default_track_needed, + webrtc::MediaType media_type, + StreamCollection* new_streams); // Enables media channels to allow sending of media. // This enables media to flow on all configured audio/video channels. @@ -481,17 +481,15 @@ class SdpOfferAnswerHandler : public SdpStateProvider { // down to all of the channels, and start SCTP if needed. RTCError PushdownMediaDescription( SdpType type, - cricket::ContentSource source, - const std::map& - bundle_groups_by_mid); + ContentSource source, + const std::map& bundle_groups_by_mid); - RTCError PushdownTransportDescription(cricket::ContentSource source, - SdpType type); + RTCError PushdownTransportDescription(ContentSource source, SdpType type); // Helper function to remove stopped transceivers. void RemoveStoppedTransceivers(); // Deletes the corresponding channel of contents that don't exist in `desc`. // `desc` can be null. This means that all channels are deleted. - void RemoveUnusedChannels(const cricket::SessionDescription* desc); + void RemoveUnusedChannels(const SessionDescription* desc); // Finds remote MediaStreams without any tracks and removes them from // `remote_streams_` and notifies the observer that the MediaStreams no longer @@ -513,7 +511,7 @@ class SdpOfferAnswerHandler : public SdpStateProvider { const SessionDescriptionInterface* remote_desc, bool* valid); - RTCErrorOr FindContentInfo( + RTCErrorOr FindContentInfo( const SessionDescriptionInterface* description, const IceCandidateInterface* candidate) RTC_RUN_ON(signaling_thread()); @@ -524,13 +522,7 @@ class SdpOfferAnswerHandler : public SdpStateProvider { // Allocates media channels based on the `desc`. If `desc` doesn't have // the BUNDLE option, this method will disable BUNDLE in PortAllocator. // This method will also delete any existing media channels before creating. - RTCError CreateChannels(const cricket::SessionDescription& desc); - - bool CreateDataChannel(const std::string& mid); - - // Destroys the RTP data channel transport and/or the SCTP data channel - // transport and clears it. - void DestroyDataChannelTransport(RTCError error); + RTCError CreateChannels(const SessionDescription& desc); // Generates MediaDescriptionOptions for the `session_opts` based on existing // local description or remote description. @@ -538,27 +530,26 @@ class SdpOfferAnswerHandler : public SdpStateProvider { const SessionDescriptionInterface* session_desc, RtpTransceiverDirection audio_direction, RtpTransceiverDirection video_direction, - absl::optional* audio_index, - absl::optional* video_index, - absl::optional* data_index, - cricket::MediaSessionOptions* session_options); + std::optional* audio_index, + std::optional* video_index, + std::optional* data_index, + MediaSessionOptions* session_options); // Generates the active MediaDescriptionOptions for the local data channel // given the specified MID. - cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForActiveData( + MediaDescriptionOptions GetMediaDescriptionOptionsForActiveData( const std::string& mid) const; // Generates the rejected MediaDescriptionOptions for the local data channel // given the specified MID. - cricket::MediaDescriptionOptions GetMediaDescriptionOptionsForRejectedData( + MediaDescriptionOptions GetMediaDescriptionOptionsForRejectedData( const std::string& mid) const; // Based on number of transceivers per media type, enabled or disable // payload type based demuxing in the affected channels. bool UpdatePayloadTypeDemuxingState( - cricket::ContentSource source, - const std::map& - bundle_groups_by_mid); + ContentSource source, + const std::map& bundle_groups_by_mid); // Updates the error state, signaling if necessary. void SetSessionError(SessionError error, const std::string& error_desc); @@ -569,15 +560,17 @@ class SdpOfferAnswerHandler : public SdpStateProvider { AddIceCandidateResult AddIceCandidateInternal( const IceCandidateInterface* candidate); + void ReportInitialSdpMunging(bool had_local_description, SdpType type); + // ================================================================== // Access to pc_ variables - cricket::MediaEngineInterface* media_engine() const; + MediaEngineInterface* media_engine() const; TransceiverList* transceivers(); const TransceiverList* transceivers() const; DataChannelController* data_channel_controller(); const DataChannelController* data_channel_controller() const; - cricket::PortAllocator* port_allocator(); - const cricket::PortAllocator* port_allocator() const; + PortAllocator* port_allocator(); + const PortAllocator* port_allocator() const; RtpTransmissionManager* rtp_manager(); const RtpTransmissionManager* rtp_manager() const; JsepTransportController* transport_controller_s() @@ -589,8 +582,8 @@ class SdpOfferAnswerHandler : public SdpStateProvider { const JsepTransportController* transport_controller_n() const RTC_RUN_ON(network_thread()); // =================================================================== - const cricket::AudioOptions& audio_options() { return audio_options_; } - const cricket::VideoOptions& video_options() { return video_options_; } + const AudioOptions& audio_options() { return audio_options_; } + const VideoOptions& video_options() { return video_options_; } bool ConfiguredForMedia() const; PeerConnectionSdpMethods* const pc_; @@ -607,18 +600,23 @@ class SdpOfferAnswerHandler : public SdpStateProvider { RTC_GUARDED_BY(signaling_thread()); std::unique_ptr pending_remote_description_ RTC_GUARDED_BY(signaling_thread()); + std::unique_ptr last_created_offer_ + RTC_GUARDED_BY(signaling_thread()); + std::unique_ptr last_created_answer_ + RTC_GUARDED_BY(signaling_thread()); + SdpMungingType last_sdp_munging_type_ = SdpMungingType::kNoModification; PeerConnectionInterface::SignalingState signaling_state_ RTC_GUARDED_BY(signaling_thread()) = PeerConnectionInterface::kStable; // Whether this peer is the caller. Set when the local description is applied. - absl::optional is_caller_ RTC_GUARDED_BY(signaling_thread()); + std::optional is_caller_ RTC_GUARDED_BY(signaling_thread()); // Streams added via AddStream. - const rtc::scoped_refptr local_streams_ + const scoped_refptr local_streams_ RTC_GUARDED_BY(signaling_thread()); // Streams created as a result of SetRemoteDescription. - const rtc::scoped_refptr remote_streams_ + const scoped_refptr remote_streams_ RTC_GUARDED_BY(signaling_thread()); std::vector> stream_observers_ @@ -629,7 +627,7 @@ class SdpOfferAnswerHandler : public SdpStateProvider { // SetRemoteDescription() is invoked while CreateOffer() is still pending, the // SRD operation will not start until CreateOffer() has completed. See // https://w3c.github.io/webrtc-pc/#dfn-operations-chain. - rtc::scoped_refptr operations_chain_ + scoped_refptr operations_chain_ RTC_GUARDED_BY(signaling_thread()); // One PeerConnection has only one RTCP CNAME. @@ -638,7 +636,7 @@ class SdpOfferAnswerHandler : public SdpStateProvider { // MIDs will be generated using this generator which will keep track of // all the MIDs that have been seen over the life of the PeerConnection. - rtc::UniqueStringGenerator mid_generator_ RTC_GUARDED_BY(signaling_thread()); + UniqueStringGenerator mid_generator_ RTC_GUARDED_BY(signaling_thread()); // List of content names for which the remote side triggered an ICE restart. std::set pending_ice_restarts_ @@ -664,7 +662,7 @@ class SdpOfferAnswerHandler : public SdpStateProvider { // line we create and use a stream with a random ID for our receivers. This is // to support legacy endpoints that do not support the a=msid attribute (as // opposed to streamless tracks with "a=msid:-"). - rtc::scoped_refptr missing_msid_default_stream_ + scoped_refptr missing_msid_default_stream_ RTC_GUARDED_BY(signaling_thread()); SessionError session_error_ RTC_GUARDED_BY(signaling_thread()) = @@ -672,22 +670,31 @@ class SdpOfferAnswerHandler : public SdpStateProvider { std::string session_error_desc_ RTC_GUARDED_BY(signaling_thread()); // Member variables for caching global options. - cricket::AudioOptions audio_options_ RTC_GUARDED_BY(signaling_thread()); - cricket::VideoOptions video_options_ RTC_GUARDED_BY(signaling_thread()); + AudioOptions audio_options_ RTC_GUARDED_BY(signaling_thread()); + VideoOptions video_options_ RTC_GUARDED_BY(signaling_thread()); // A video bitrate allocator factory. // This can be injected using the PeerConnectionDependencies, // or else the CreateBuiltinVideoBitrateAllocatorFactory() will be called. // Note that one can still choose to override this in a MediaEngine // if one wants too. - std::unique_ptr - video_bitrate_allocator_factory_ RTC_GUARDED_BY(signaling_thread()); + std::unique_ptr video_bitrate_allocator_factory_ + RTC_GUARDED_BY(signaling_thread()); // Whether we are the initial offerer on the association. This // determines the SSL role. - absl::optional initial_offerer_ RTC_GUARDED_BY(signaling_thread()); + std::optional initial_offerer_ RTC_GUARDED_BY(signaling_thread()); + + // Whether SDP munging checks are enabled or not. + // Some tests will be detected as SDP munging, so offer the option + // to disable. + bool disable_sdp_munging_checks_ = false; + CodecLookupHelper* codec_lookup_helper_ = nullptr; + + // Whether the username fragment or the password of the SDP was munged. + bool has_sdp_munged_ufrag_ = false; - rtc::WeakPtrFactory weak_ptr_factory_ + WeakPtrFactory weak_ptr_factory_ RTC_GUARDED_BY(signaling_thread()); }; diff --git a/pc/sdp_offer_answer_unittest.cc b/pc/sdp_offer_answer_unittest.cc index 52dd755752..6eeda1b195 100644 --- a/pc/sdp_offer_answer_unittest.cc +++ b/pc/sdp_offer_answer_unittest.cc @@ -8,19 +8,38 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include #include +#include +#include #include #include +#include "absl/strings/match.h" +#include "absl/strings/numbers.h" +#include "absl/strings/str_cat.h" #include "absl/strings/str_replace.h" -#include "api/audio/audio_mixer.h" +#include "absl/strings/str_split.h" +#include "absl/strings/string_view.h" +#include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/create_peerconnection_factory.h" +#include "api/field_trials.h" +#include "api/field_trials_view.h" +#include "api/jsep.h" #include "api/media_types.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" +#include "api/uma_metrics.h" +#include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" @@ -31,17 +50,21 @@ #include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "p2p/base/port_allocator.h" +#include "media/base/codec.h" +#include "media/base/media_constants.h" +#include "media/base/stream_params.h" +#include "p2p/base/transport_description.h" #include "pc/peer_connection_wrapper.h" -#include "pc/session_description.h" #include "pc/test/fake_audio_capture_module.h" +#include "pc/test/fake_rtc_certificate_generator.h" +#include "pc/test/integration_test_helpers.h" #include "pc/test/mock_peer_connection_observers.h" -#include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/strings/string_format.h" #include "rtc_base/thread.h" #include "system_wrappers/include/metrics.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" // This file contains unit tests that relate to the behavior of the // SdpOfferAnswer module. @@ -51,12 +74,17 @@ namespace webrtc { +using ::testing::Eq; +using ::testing::IsTrue; using RTCConfiguration = PeerConnectionInterface::RTCConfiguration; +using ::testing::ElementsAre; +using ::testing::Pair; +using ::testing::SizeIs; namespace { -std::unique_ptr CreateAndStartThread() { - auto thread = rtc::Thread::Create(); +std::unique_ptr CreateAndStartThread() { + auto thread = Thread::Create(); thread->Start(); return thread; } @@ -87,40 +115,58 @@ class SdpOfferAnswerTest : public ::testing::Test { OpenH264DecoderTemplateAdapter, Dav1dDecoderTemplateAdapter>>(), nullptr /* audio_mixer */, - nullptr /* audio_processing */)) { - webrtc::metrics::Reset(); + nullptr /* audio_processing */, + nullptr /* audio_frame_processor */)) { + metrics::Reset(); } - std::unique_ptr CreatePeerConnection() { + std::unique_ptr CreatePeerConnection( + std::unique_ptr field_trials = nullptr) { RTCConfiguration config; config.sdp_semantics = SdpSemantics::kUnifiedPlan; - return CreatePeerConnection(config); + return CreatePeerConnection(config, std::move(field_trials)); } std::unique_ptr CreatePeerConnection( - const RTCConfiguration& config) { + const RTCConfiguration& config, + std::unique_ptr field_trials) { auto observer = std::make_unique(); - auto result = pc_factory_->CreatePeerConnectionOrError( - config, PeerConnectionDependencies(observer.get())); + PeerConnectionDependencies pc_deps(observer.get()); + pc_deps.trials = std::move(field_trials); + auto result = + pc_factory_->CreatePeerConnectionOrError(config, std::move(pc_deps)); EXPECT_TRUE(result.ok()); observer->SetPeerConnectionInterface(result.value().get()); return std::make_unique( pc_factory_, result.MoveValue(), std::move(observer)); } + std::optional FindFirstSendCodecWithName( + webrtc::MediaType media_type, + const std::string& name) const { + std::vector codecs = + pc_factory_->GetRtpSenderCapabilities(media_type).codecs; + for (const auto& codec : codecs) { + if (absl::EqualsIgnoreCase(codec.name, name)) { + return codec; + } + } + return std::nullopt; + } + protected: - std::unique_ptr signaling_thread_; - rtc::scoped_refptr pc_factory_; + std::unique_ptr signaling_thread_; + scoped_refptr pc_factory_; private: - rtc::AutoThread main_thread_; + AutoThread main_thread_; }; TEST_F(SdpOfferAnswerTest, OnTrackReturnsProxiedObject) { auto caller = CreatePeerConnection(); auto callee = CreatePeerConnection(); - auto audio_transceiver = caller->AddTransceiver(cricket::MEDIA_TYPE_AUDIO); + auto audio_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); // Verify that caller->observer->OnTrack() has been called with a @@ -167,9 +213,10 @@ TEST_F(SdpOfferAnswerTest, BundleRejectsCodecCollisionsAudioVideo) { pc->SetRemoteDescription(std::move(desc), &error); // There is no error yet but the metrics counter will increase. EXPECT_TRUE(error.ok()); + EXPECT_METRIC_EQ( - 1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.ValidBundledPayloadTypes", false)); + 1, metrics::NumEvents("WebRTC.PeerConnection.ValidBundledPayloadTypes", + false)); // Tolerate codec collisions in rejected m-lines. pc = CreatePeerConnection(); @@ -178,9 +225,9 @@ TEST_F(SdpOfferAnswerTest, BundleRejectsCodecCollisionsAudioVideo) { absl::StrReplaceAll(sdp, {{"m=video 9 ", "m=video 0 "}})); pc->SetRemoteDescription(std::move(rejected_offer), &error); EXPECT_TRUE(error.ok()); - EXPECT_METRIC_EQ(1, - webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.ValidBundledPayloadTypes", true)); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.PeerConnection.ValidBundledPayloadTypes", + true)); } TEST_F(SdpOfferAnswerTest, BundleRejectsCodecCollisionsVideoFmtp) { @@ -221,8 +268,8 @@ TEST_F(SdpOfferAnswerTest, BundleRejectsCodecCollisionsVideoFmtp) { pc->SetRemoteDescription(std::move(desc), &error); EXPECT_TRUE(error.ok()); EXPECT_METRIC_EQ( - 1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.ValidBundledPayloadTypes", false)); + 1, metrics::NumEvents("WebRTC.PeerConnection.ValidBundledPayloadTypes", + false)); } TEST_F(SdpOfferAnswerTest, BundleCodecCollisionInDifferentBundlesAllowed) { @@ -264,8 +311,8 @@ TEST_F(SdpOfferAnswerTest, BundleCodecCollisionInDifferentBundlesAllowed) { pc->SetRemoteDescription(std::move(desc), &error); EXPECT_TRUE(error.ok()); EXPECT_METRIC_EQ( - 0, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.ValidBundledPayloadTypes", false)); + 0, metrics::NumEvents("WebRTC.PeerConnection.ValidBundledPayloadTypes", + false)); } TEST_F(SdpOfferAnswerTest, BundleMeasuresHeaderExtensionIdCollision) { @@ -304,9 +351,6 @@ TEST_F(SdpOfferAnswerTest, BundleMeasuresHeaderExtensionIdCollision) { RTCError error; pc->SetRemoteDescription(std::move(desc), &error); EXPECT_TRUE(error.ok()); - EXPECT_METRIC_EQ(1, - webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.ValidBundledExtensionIds", true)); } // extmap:3 is used with two different URIs which is not allowed. @@ -345,9 +389,6 @@ TEST_F(SdpOfferAnswerTest, BundleRejectsHeaderExtensionIdCollision) { pc->SetRemoteDescription(std::move(desc), &error); EXPECT_FALSE(error.ok()); EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER); - EXPECT_METRIC_EQ( - 1, webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.ValidBundledExtensionIds", false)); } // transport-wide cc is negotiated with two different ids 3 and 4. @@ -388,9 +429,6 @@ TEST_F(SdpOfferAnswerTest, BundleAcceptsDifferentIdsForSameExtension) { RTCError error; pc->SetRemoteDescription(std::move(desc), &error); EXPECT_TRUE(error.ok()); - EXPECT_METRIC_EQ(1, - webrtc::metrics::NumEvents( - "WebRTC.PeerConnection.ValidBundledExtensionIds", true)); } TEST_F(SdpOfferAnswerTest, LargeMidsAreRejected) { @@ -482,8 +520,6 @@ TEST_F(SdpOfferAnswerTest, RollbackPreservesAddTrackMid) { EXPECT_EQ(saved_mid, first_transceiver->mid()); } -<<<<<<< HEAD -======= #ifdef WEBRTC_HAVE_SCTP TEST_F(SdpOfferAnswerTest, RejectedDataChannelsDoNotGetReoffered) { @@ -574,7 +610,7 @@ TEST_F(SdpOfferAnswerTest, SimulcastAnswerWithNoRidsIsRejected) { rid2.rid = "2"; init.send_encodings.push_back(rid2); - auto transceiver = pc->AddTransceiver(cricket::MEDIA_TYPE_VIDEO, init); + auto transceiver = pc->AddTransceiver(webrtc::MediaType::VIDEO, init); EXPECT_TRUE(pc->CreateOfferAndSetAsLocal()); auto mid = pc->pc()->local_description()->description()->contents()[0].mid(); @@ -621,6 +657,108 @@ TEST_F(SdpOfferAnswerTest, SimulcastAnswerWithNoRidsIsRejected) { EXPECT_TRUE(pc->SetRemoteDescription(std::move(rejected_answer))); } +TEST_F(SdpOfferAnswerTest, SimulcastOfferWithMixedCodec) { + auto pc = CreatePeerConnection( + FieldTrials::CreateNoGlobal("WebRTC-MixedCodecSimulcast/Enabled/")); + + std::optional vp8_codec_capability = + FindFirstSendCodecWithName(webrtc::MediaType::VIDEO, kVp8CodecName); + ASSERT_TRUE(vp8_codec_capability); + std::optional vp9_codec_capability = + FindFirstSendCodecWithName(webrtc::MediaType::VIDEO, kVp9CodecName); + ASSERT_TRUE(vp9_codec_capability); + + RtpTransceiverInit init; + RtpEncodingParameters rid1; + rid1.rid = "1"; + rid1.codec = *vp8_codec_capability; + init.send_encodings.push_back(rid1); + RtpEncodingParameters rid2; + rid2.rid = "2"; + rid2.codec = *vp9_codec_capability; + init.send_encodings.push_back(rid2); + + auto transceiver = pc->AddTransceiver(webrtc::MediaType::VIDEO, init); + auto offer = pc->CreateOffer(); + auto& offer_contents = offer->description()->contents(); + auto send_codecs = offer_contents[0].media_description()->codecs(); + // Verify that the serialized SDP includes pt=. + std::string sdp; + offer->ToString(&sdp); + const Codec* vp8_send_codec = nullptr; + const Codec* vp9_send_codec = nullptr; + for (auto& codec : send_codecs) { + if (codec.name == vp8_codec_capability->name && !vp8_send_codec) { + vp8_send_codec = &codec; + } + if (codec.name == vp9_codec_capability->name && !vp9_send_codec) { + vp9_send_codec = &codec; + } + } + ASSERT_TRUE(vp8_send_codec); + ASSERT_TRUE(vp9_send_codec); + EXPECT_THAT(sdp, testing::HasSubstr("a=rid:1 send pt=" + + std::to_string(vp8_send_codec->id))); + EXPECT_THAT(sdp, testing::HasSubstr("a=rid:2 send pt=" + + std::to_string(vp9_send_codec->id))); + // Verify that SDP containing pt= can be parsed correctly. + auto offer2 = CreateSessionDescription(SdpType::kOffer, sdp); + auto& offer_contents2 = offer2->description()->contents(); + auto send_rids2 = offer_contents2[0].media_description()->streams()[0].rids(); + EXPECT_EQ(send_rids2[0].codecs.size(), 1u); + EXPECT_EQ(send_rids2[0].codecs[0], *vp8_send_codec); + EXPECT_EQ(send_rids2[1].codecs.size(), 1u); + EXPECT_EQ(send_rids2[1].codecs[0], *vp9_send_codec); +} + +TEST_F(SdpOfferAnswerTest, SimulcastAnswerWithPayloadType) { + auto pc = CreatePeerConnection( + FieldTrials::CreateNoGlobal("WebRTC-MixedCodecSimulcast/Enabled/")); + + // A SDP offer with recv simulcast with payload type + std::string sdp = + "v=0\r\n" + "o=- 4131505339648218884 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=ice-ufrag:zGWFZ+fVXDeN6UoI/136\r\n" + "a=ice-pwd:9AUNgUqRNI5LSIrC1qFD2iTR\r\n" + "a=fingerprint:sha-256 " + "AD:52:52:E0:B1:37:34:21:0E:15:8E:B7:56:56:7B:B4:39:0E:6D:1C:F5:84:A7:EE:" + "B5:27:3E:30:B1:7D:69:42\r\n" + "a=setup:passive\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 96 97\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp:9 IN IP4 0.0.0.0\r\n" + "a=mid:0\r\n" + "a=extmap:9 urn:ietf:params:rtp-hdrext:sdes:mid\r\n" + "a=extmap:10 urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id\r\n" + "a=recvonly\r\n" + "a=rtcp-mux\r\n" + "a=rtcp-rsize\r\n" + "a=rtpmap:96 VP8/90000\r\n" + "a=rtpmap:97 VP9/90000\r\n" + "a=rid:1 recv pt=96\r\n" + "a=rid:2 recv pt=97\r\n" + "a=simulcast:recv 1;2\r\n"; + + auto offer = CreateSessionDescription(SdpType::kOffer, sdp); + EXPECT_TRUE(pc->SetRemoteDescription(std::move(offer))); + + auto transceiver = pc->pc()->GetTransceivers()[0]; + EXPECT_TRUE( + transceiver->SetDirectionWithError(RtpTransceiverDirection::kSendOnly) + .ok()); + + // Check the generated SDP. + auto answer = pc->CreateAnswer(); + answer->ToString(&sdp); + EXPECT_THAT(sdp, testing::HasSubstr("a=rid:1 send pt=96\r\n")); + EXPECT_THAT(sdp, testing::HasSubstr("a=rid:2 send pt=97\r\n")); + + EXPECT_TRUE(pc->SetLocalDescription(std::move(answer))); +} + TEST_F(SdpOfferAnswerTest, ExpectAllSsrcsSpecifiedInSsrcGroupFid) { auto pc = CreatePeerConnection(); std::string sdp = @@ -829,7 +967,7 @@ TEST_F(SdpOfferAnswerTest, ASSERT_EQ(video_stream.ssrc_groups.size(), 1u); video_stream.ssrcs[1] = audio_ssrc; video_stream.ssrc_groups[0].ssrcs[1] = audio_ssrc; - video_stream.ssrc_groups[0].semantics = cricket::kSimSsrcGroupSemantics; + video_stream.ssrc_groups[0].semantics = kSimSsrcGroupSemantics; std::string sdp; offer->ToString(&sdp); @@ -839,11 +977,11 @@ TEST_F(SdpOfferAnswerTest, size_t end = sdp.rfind("\r\n"); end = sdp.rfind("\r\n", end - 2); end = sdp.rfind("\r\n", end - 2); - EXPECT_EQ(sdp.substr(end + 2), "a=ssrc:" + rtc::ToString(audio_ssrc) + + EXPECT_EQ(sdp.substr(end + 2), "a=ssrc:" + absl::StrCat(audio_ssrc) + " cname:" + video_stream.cname + "\r\n" "a=ssrc:" + - rtc::ToString(audio_ssrc) + + absl::StrCat(audio_ssrc) + " msid:- video_track\r\n"); auto modified_offer = @@ -870,7 +1008,7 @@ TEST_F(SdpOfferAnswerTest, ASSERT_EQ(video_stream.ssrc_groups.size(), 1u); video_stream.ssrcs.push_back(audio_ssrc); video_stream.ssrc_groups[0].ssrcs.push_back(audio_ssrc); - video_stream.ssrc_groups[0].semantics = cricket::kSimSsrcGroupSemantics; + video_stream.ssrc_groups[0].semantics = kSimSsrcGroupSemantics; std::string sdp; offer->ToString(&sdp); @@ -880,11 +1018,11 @@ TEST_F(SdpOfferAnswerTest, size_t end = sdp.rfind("\r\n"); end = sdp.rfind("\r\n", end - 2); end = sdp.rfind("\r\n", end - 2); - EXPECT_EQ(sdp.substr(end + 2), "a=ssrc:" + rtc::ToString(audio_ssrc) + + EXPECT_EQ(sdp.substr(end + 2), "a=ssrc:" + absl::StrCat(audio_ssrc) + " cname:" + video_stream.cname + "\r\n" "a=ssrc:" + - rtc::ToString(audio_ssrc) + + absl::StrCat(audio_ssrc) + " msid:- video_track\r\n"); auto modified_offer = @@ -910,7 +1048,7 @@ TEST_F(SdpOfferAnswerTest, AllowOnlyOneSsrcGroupPerSemanticAndPrimarySsrc) { ASSERT_EQ(video_stream.ssrc_groups.size(), 1u); video_stream.ssrcs.push_back(audio_ssrc); video_stream.ssrc_groups.push_back( - {cricket::kFidSsrcGroupSemantics, {video_stream.ssrcs[0], audio_ssrc}}); + {kFidSsrcGroupSemantics, {video_stream.ssrcs[0], audio_ssrc}}); std::string sdp; offer->ToString(&sdp); @@ -920,11 +1058,11 @@ TEST_F(SdpOfferAnswerTest, AllowOnlyOneSsrcGroupPerSemanticAndPrimarySsrc) { size_t end = sdp.rfind("\r\n"); end = sdp.rfind("\r\n", end - 2); end = sdp.rfind("\r\n", end - 2); - EXPECT_EQ(sdp.substr(end + 2), "a=ssrc:" + rtc::ToString(audio_ssrc) + + EXPECT_EQ(sdp.substr(end + 2), "a=ssrc:" + absl::StrCat(audio_ssrc) + " cname:" + video_stream.cname + "\r\n" "a=ssrc:" + - rtc::ToString(audio_ssrc) + + absl::StrCat(audio_ssrc) + " msid:- video_track\r\n"); auto modified_offer = @@ -961,5 +1099,1605 @@ TEST_F(SdpOfferAnswerTest, OfferWithRtxAndNoMsidIsNotRejected) { EXPECT_TRUE(pc->SetRemoteDescription(std::move(offer))); } ->>>>>>> 5993_88 +TEST_F(SdpOfferAnswerTest, RejectsAnswerWithInvalidTransport) { + auto pc1 = CreatePeerConnection(); + pc1->AddAudioTrack("audio_track", {}); + auto pc2 = CreatePeerConnection(); + pc2->AddAudioTrack("anotheraudio_track", {}); + + auto initial_offer = pc1->CreateOfferAndSetAsLocal(); + ASSERT_EQ(initial_offer->description()->contents().size(), 1u); + auto mid = initial_offer->description()->contents()[0].mid(); + + EXPECT_TRUE(pc2->SetRemoteDescription(std::move(initial_offer))); + auto initial_answer = pc2->CreateAnswerAndSetAsLocal(); + + std::string sdp; + initial_answer->ToString(&sdp); + EXPECT_TRUE(pc1->SetRemoteDescription(std::move(initial_answer))); + + auto transceivers = pc1->pc()->GetTransceivers(); + ASSERT_EQ(transceivers.size(), 1u); + // This stops the only transport. + transceivers[0]->StopStandard(); + + auto subsequent_offer = pc1->CreateOfferAndSetAsLocal(); + // But the remote answers with a non-rejected m-line which is not valid. + auto bad_answer = CreateSessionDescription( + SdpType::kAnswer, + absl::StrReplaceAll(sdp, {{"a=group:BUNDLE " + mid + "\r\n", ""}})); + + RTCError error; + pc1->SetRemoteDescription(std::move(bad_answer), &error); + EXPECT_FALSE(error.ok()); + EXPECT_EQ(error.type(), RTCErrorType::INVALID_PARAMETER); +} + +TEST_F(SdpOfferAnswerTest, SdpMungingWithInvalidPayloadTypeIsRejected) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + ASSERT_EQ(offer->description()->contents().size(), 1u); + auto* audio = offer->description()->contents()[0].media_description(); + ASSERT_GT(audio->codecs().size(), 0u); + EXPECT_TRUE(audio->rtcp_mux()); + auto codecs = audio->codecs(); + for (int invalid_payload_type = 64; invalid_payload_type < 96; + invalid_payload_type++) { + codecs[0].id = + invalid_payload_type; // The range [64-95] is disallowed with rtcp_mux. + audio->set_codecs(codecs); + // ASSERT to avoid getting into a bad state. + ASSERT_FALSE(pc->SetLocalDescription(offer->Clone())); + ASSERT_FALSE(pc->SetRemoteDescription(offer->Clone())); + } +} + +TEST_F(SdpOfferAnswerTest, MsidSignalingInSubsequentOfferAnswer) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=msid-semantic: WMS\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=audio 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp:9 IN IP4 0.0.0.0\r\n" + "a=recvonly\r\n" + "a=rtcp-mux\r\n" + "a=rtpmap:111 opus/48000/2\r\n"; + + auto offer = CreateSessionDescription(SdpType::kOffer, sdp); + EXPECT_TRUE(pc->SetRemoteDescription(std::move(offer))); + + // Check the generated SDP. + auto answer = pc->CreateAnswer(); + answer->ToString(&sdp); + EXPECT_NE(std::string::npos, sdp.find("a=msid:- audio_track\r\n")); + + EXPECT_TRUE(pc->SetLocalDescription(std::move(answer))); + + // Check the local description object. + auto local_description = pc->pc()->local_description(); + ASSERT_EQ(local_description->description()->contents().size(), 1u); + auto streams = local_description->description() + ->contents()[0] + .media_description() + ->streams(); + ASSERT_EQ(streams.size(), 1u); + EXPECT_EQ(streams[0].id, "audio_track"); + + // Check the serialization of the local description. + local_description->ToString(&sdp); + EXPECT_NE(std::string::npos, sdp.find("a=msid:- audio_track\r\n")); +} + +// Regression test for crbug.com/328522463 +// where the stream parameters got recreated which changed the ssrc. +TEST_F(SdpOfferAnswerTest, MsidSignalingUnknownRespondsWithMsidAndKeepsSsrc) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {"default"}); + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE 0\r\n" + // "a=msid-semantic: WMS *\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "m=audio 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp:9 IN IP4 0.0.0.0\r\n" + "a=recvonly\r\n" + "a=rtcp-mux\r\n" + "a=mid:0\r\n" + "a=rtpmap:111 opus/48000/2\r\n"; + + auto offer = CreateSessionDescription(SdpType::kOffer, sdp); + EXPECT_TRUE(pc->SetRemoteDescription(std::move(offer))); + auto first_transceiver = pc->pc()->GetTransceivers()[0]; + EXPECT_TRUE(first_transceiver + ->SetDirectionWithError(RtpTransceiverDirection::kSendOnly) + .ok()); + // Check the generated *serialized* SDP. + auto answer = pc->CreateAnswer(); + const auto& answer_contents = answer->description()->contents(); + ASSERT_EQ(answer_contents.size(), 1u); + auto answer_streams = answer_contents[0].media_description()->streams(); + ASSERT_EQ(answer_streams.size(), 1u); + std::string first_stream_serialized = answer_streams[0].ToString(); + uint32_t first_ssrc = answer_contents[0].media_description()->first_ssrc(); + + answer->ToString(&sdp); + EXPECT_TRUE( + pc->SetLocalDescription(CreateSessionDescription(SdpType::kAnswer, sdp))); + + auto reoffer = pc->CreateOffer(); + const auto& offer_contents = reoffer->description()->contents(); + ASSERT_EQ(offer_contents.size(), 1u); + + auto offer_streams = offer_contents[0].media_description()->streams(); + ASSERT_EQ(offer_streams.size(), 1u); + std::string second_stream_serialized = offer_streams[0].ToString(); + uint32_t second_ssrc = offer_contents[0].media_description()->first_ssrc(); + + EXPECT_EQ(first_ssrc, second_ssrc); + EXPECT_EQ(first_stream_serialized, second_stream_serialized); + EXPECT_TRUE(pc->SetLocalDescription(std::move(reoffer))); +} + +// Runs for each payload type in the valid dynamic ranges. +class SdpOfferAnswerWithPayloadTypeTest + : public SdpOfferAnswerTest, + public testing::WithParamInterface { + public: + static std::vector GetAllPayloadTypesInValidDynamicRange() { + std::vector payload_types; + // The lower range is [35, 63]. + for (int pt = 35; pt <= 63; ++pt) { + payload_types.push_back(pt); + } + // The upper range is [96, 127]. + for (int pt = 96; pt <= 127; ++pt) { + payload_types.push_back(pt); + } + return payload_types; + } +}; + +TEST_P(SdpOfferAnswerWithPayloadTypeTest, + FollowUpOfferDoesNotRepurposePayloadType) { + int payload_type = GetParam(); + std::string payload_type_str = absl::StrCat(payload_type); + + auto pc = CreatePeerConnection(); + std::string sdp = + "v=0\r\n" + "o=- 8506393630701383055 2 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE 0\r\n" + "a=extmap-allow-mixed\r\n" + "a=msid-semantic: WMS\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF " + + payload_type_str + + "\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp:9 IN IP4 0.0.0.0\r\n" + "a=ice-ufrag:7ZPs\r\n" + "a=ice-pwd:3/ZaqZrZaVzg1Tfju5x3CGeJ\r\n" + "a=ice-options:trickle\r\n" + "a=fingerprint:sha-256 7D:29:C5:B8:D2:30:57:F3:0D:CA:0A:8E:4B:6A:AE:53:26" + ":9F:14:DF:47:8E:0C:A3:EC:8D:B1:71:B5:D5:5A:9C\r\n" + "a=setup:actpass\r\n" + "a=mid:0\r\n" + "a=extmap:9 urn:ietf:params:rtp-hdrext:sdes:mid\r\n" + "a=sendrecv\r\n" + "a=msid:- e2628265-b712-40de-81c9-76d49b7079a0\r\n" + "a=rtcp-mux\r\n" + "a=rtcp-rsize\r\n" + "a=rtpmap:" + + payload_type_str + + " VP9/90000\r\n" + "a=rtcp-fb:" + + payload_type_str + + " goog-remb\r\n" + "a=rtcp-fb:" + + payload_type_str + + " transport-cc\r\n" + "a=rtcp-fb:" + + payload_type_str + + " ccm fir\r\n" + "a=rtcp-fb:" + + payload_type_str + + " nack\r\n" + "a=rtcp-fb:" + + payload_type_str + + " nack pli\r\n" + "a=fmtp:" + + payload_type_str + + " profile-id=0\r\n" + "a=ssrc:2245042191 cname:A206VC6FXsn47EwJ\r\n" + "a=ssrc:2245042191 msid:- e2628265-b712-40de-81c9-76d49b7079a0\r\n"; + + // Set remote offer with given PT for VP9. + EXPECT_TRUE( + pc->SetRemoteDescription(CreateSessionDescription(SdpType::kOffer, sdp))); + // The answer should accept the PT for VP9. + auto answer = pc->CreateAnswer(); + { + const auto* mid_0 = answer->description()->GetContentDescriptionByName("0"); + ASSERT_TRUE(mid_0); + ASSERT_THAT(mid_0->codecs(), SizeIs(1)); + const auto& codec = mid_0->codecs()[0]; + EXPECT_EQ(codec.name, "VP9"); + EXPECT_EQ(codec.id, payload_type); + std::string param; + EXPECT_TRUE(codec.GetParam("profile-id", ¶m)); + EXPECT_EQ(param, "0"); + } + + EXPECT_TRUE(pc->SetLocalDescription(std::move(answer))); + // The follow-up offer should continue to use the same PT for VP9. + auto offer = pc->CreateOffer(); + { + const auto* mid_0 = offer->description()->GetContentDescriptionByName("0"); + ASSERT_TRUE(mid_0); + // We should have more codecs to offer than the one previously negotiated. + const auto& codecs = mid_0->codecs(); + ASSERT_GT(codecs.size(), 1u); + // The previously negotiated PT should still map to the same VP9 codec. + auto it = std::find_if( + codecs.begin(), codecs.end(), + [&](const Codec& codec) { return codec.id == payload_type; }); + ASSERT_TRUE(it != codecs.end()); + const auto& vp9_codec = *it; + EXPECT_EQ(vp9_codec.name, "VP9"); + EXPECT_EQ(vp9_codec.id, payload_type); + std::string param; + EXPECT_TRUE(vp9_codec.GetParam("profile-id", ¶m)); + EXPECT_EQ(param, "0"); + // None of the other codecs should collide with our VP9 PT. + for (const auto& codec : codecs) { + if (codec == vp9_codec) { + continue; + } + EXPECT_NE(codec.id, vp9_codec.id); + } + } + // Last sanity check: it's always possible to set an unmunged local offer. + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer))); +} + +INSTANTIATE_TEST_SUITE_P( + SdpOfferAnswerWithPayloadTypeTest, + SdpOfferAnswerWithPayloadTypeTest, + ::testing::ValuesIn(SdpOfferAnswerWithPayloadTypeTest:: + GetAllPayloadTypesInValidDynamicRange()), + ::testing::PrintToStringParamName()); + +// Test variant with boolean order for audio-video and video-audio. +class SdpOfferAnswerShuffleMediaTypes + : public SdpOfferAnswerTest, + public testing::WithParamInterface { + public: + SdpOfferAnswerShuffleMediaTypes() : SdpOfferAnswerTest() {} +}; + +TEST_P(SdpOfferAnswerShuffleMediaTypes, + RecyclingWithDifferentKindAndSameMidFailsAnswer) { + bool audio_first = GetParam(); + auto pc1 = CreatePeerConnection(); + auto pc2 = CreatePeerConnection(); + if (audio_first) { + pc1->AddAudioTrack("audio_track", {}); + pc2->AddVideoTrack("video_track", {}); + } else { + pc2->AddAudioTrack("audio_track", {}); + pc1->AddVideoTrack("video_track", {}); + } + + auto initial_offer = pc1->CreateOfferAndSetAsLocal(); + ASSERT_EQ(initial_offer->description()->contents().size(), 1u); + auto mid1 = initial_offer->description()->contents()[0].mid(); + std::string rejected_answer_sdp = + "v=0\r\n" + "o=- 8621259572628890423 2 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=" + + std::string(audio_first ? "audio" : "video") + + " 0 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n"; + auto rejected_answer = + CreateSessionDescription(SdpType::kAnswer, rejected_answer_sdp); + EXPECT_TRUE(pc1->SetRemoteDescription(std::move(rejected_answer))); + + auto offer = + pc2->CreateOfferAndSetAsLocal(); // This will generate a mid=0 too + ASSERT_EQ(offer->description()->contents().size(), 1u); + auto mid2 = offer->description()->contents()[0].mid(); + EXPECT_EQ(mid1, mid2); // Check that the mids collided. + EXPECT_TRUE(pc1->SetRemoteDescription(std::move(offer))); + auto answer = pc1->CreateAnswer(); + EXPECT_FALSE(pc1->SetLocalDescription(std::move(answer))); +} + +// Similar to the previous test but with implicit rollback and creating +// an offer, triggering a different codepath. +TEST_P(SdpOfferAnswerShuffleMediaTypes, + RecyclingWithDifferentKindAndSameMidFailsOffer) { + bool audio_first = GetParam(); + auto pc1 = CreatePeerConnection(); + auto pc2 = CreatePeerConnection(); + if (audio_first) { + pc1->AddAudioTrack("audio_track", {}); + pc2->AddVideoTrack("video_track", {}); + } else { + pc2->AddAudioTrack("audio_track", {}); + pc1->AddVideoTrack("video_track", {}); + } + + auto initial_offer = pc1->CreateOfferAndSetAsLocal(); + ASSERT_EQ(initial_offer->description()->contents().size(), 1u); + auto mid1 = initial_offer->description()->contents()[0].mid(); + std::string rejected_answer_sdp = + "v=0\r\n" + "o=- 8621259572628890423 2 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=" + + std::string(audio_first ? "audio" : "video") + + " 0 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n"; + auto rejected_answer = + CreateSessionDescription(SdpType::kAnswer, rejected_answer_sdp); + EXPECT_TRUE(pc1->SetRemoteDescription(std::move(rejected_answer))); + + auto offer = + pc2->CreateOfferAndSetAsLocal(); // This will generate a mid=0 too + ASSERT_EQ(offer->description()->contents().size(), 1u); + auto mid2 = offer->description()->contents()[0].mid(); + EXPECT_EQ(mid1, mid2); // Check that the mids collided. + EXPECT_TRUE(pc1->SetRemoteDescription(std::move(offer))); + EXPECT_FALSE(pc1->CreateOffer()); +} + +INSTANTIATE_TEST_SUITE_P(SdpOfferAnswerShuffleMediaTypes, + SdpOfferAnswerShuffleMediaTypes, + ::testing::Values(true, false)); + +TEST_F(SdpOfferAnswerTest, OfferWithNoCompatibleCodecsIsRejectedInAnswer) { + auto pc = CreatePeerConnection(); + // An offer with no common codecs. This should reject both contents + // in the answer without throwing an error. + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=audio 9 RTP/SAVPF 97\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=sendrecv\r\n" + "a=rtpmap:97 x-unknown/90000\r\n" + "a=rtcp-mux\r\n" + "m=video 9 RTP/SAVPF 98\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=sendrecv\r\n" + "a=rtpmap:98 H263-1998/90000\r\n" + "a=fmtp:98 CIF=1;QCIF=1\r\n" + "a=rtcp-mux\r\n"; + + auto desc = CreateSessionDescription(SdpType::kOffer, sdp); + ASSERT_NE(desc, nullptr); + RTCError error; + pc->SetRemoteDescription(std::move(desc), &error); + EXPECT_TRUE(error.ok()); + + auto answer = pc->CreateAnswer(); + auto answer_contents = answer->description()->contents(); + ASSERT_EQ(answer_contents.size(), 2u); + EXPECT_EQ(answer_contents[0].rejected, true); + EXPECT_EQ(answer_contents[1].rejected, true); +} + +TEST_F(SdpOfferAnswerTest, OfferWithRejectedMlineWithoutFingerprintIsAccepted) { + auto pc = CreatePeerConnection(); + // A rejected m-line without fingerprint. + // The answer does not require one. + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=audio 0 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=sendrecv\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=rtcp-mux\r\n"; + auto desc = CreateSessionDescription(SdpType::kOffer, sdp); + ASSERT_NE(desc, nullptr); + RTCError error; + pc->SetRemoteDescription(std::move(desc), &error); + EXPECT_TRUE(error.ok()); + + auto answer = pc->CreateAnswer(); + EXPECT_TRUE(pc->SetLocalDescription(std::move(answer))); +} + +TEST_F(SdpOfferAnswerTest, MidBackfillAnswer) { + auto pc = CreatePeerConnection(); + // An offer without a mid backfills the mid. This is currently + // done with a per-peerconnection counter that starts from 0. + // JSEP says to only include the mid in the answer if it was in the offer + // but due to backfill it is always present. + // TODO: https://issues.webrtc.org/issues/338529222 - don't respond with mid. + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=sendrecv\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + // "a=mid:0\r\n" + "a=rtcp-mux\r\n"; + auto desc = CreateSessionDescription(SdpType::kOffer, sdp); + ASSERT_NE(desc, nullptr); + RTCError error; + pc->SetRemoteDescription(std::move(desc), &error); + EXPECT_TRUE(error.ok()); + auto offer_contents = + pc->pc()->remote_description()->description()->contents(); + ASSERT_EQ(offer_contents.size(), 1u); + EXPECT_EQ(offer_contents[0].mid(), "0"); + auto answer = pc->CreateAnswerAndSetAsLocal(); + auto answer_contents = answer->description()->contents(); + ASSERT_EQ(answer_contents.size(), 1u); + EXPECT_EQ(answer_contents[0].mid(), offer_contents[0].mid()); +} + +TEST_F(SdpOfferAnswerTest, MidBackfillDoesNotCheckAgainstBundleGroup) { + auto pc = CreatePeerConnection(); + // An offer with a BUNDLE group specifying a mid that is not present + // in the offer. This is not rejected due to the mid being backfilled + // starting at 0. + // TODO: https://issues.webrtc.org/issues/338528603 - reject this. + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=group:BUNDLE 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=sendrecv\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + // "a=mid:0\r\n" + "a=rtcp-mux\r\n"; + auto desc = CreateSessionDescription(SdpType::kOffer, sdp); + ASSERT_NE(desc, nullptr); + RTCError error; + pc->SetRemoteDescription(std::move(desc), &error); + EXPECT_TRUE(error.ok()); + EXPECT_TRUE(pc->CreateAnswerAndSetAsLocal()); +} + +TEST_F(SdpOfferAnswerTest, ReducedSizeNegotiated) { + auto caller = CreatePeerConnection(); + auto callee = CreatePeerConnection(); + + auto audio_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); + auto video_transceiver = caller->AddTransceiver(webrtc::MediaType::VIDEO); + + ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); + auto receivers = callee->pc()->GetReceivers(); + ASSERT_EQ(receivers.size(), 2u); + auto audio_recv_param = receivers[0]->GetParameters(); + EXPECT_TRUE(audio_recv_param.rtcp.reduced_size); + auto video_recv_param = receivers[1]->GetParameters(); + EXPECT_TRUE(video_recv_param.rtcp.reduced_size); + + auto senders = caller->pc()->GetSenders(); + ASSERT_EQ(senders.size(), 2u); + auto audio_send_param = senders[0]->GetParameters(); + EXPECT_TRUE(audio_send_param.rtcp.reduced_size); + auto video_send_param = senders[1]->GetParameters(); + EXPECT_TRUE(video_send_param.rtcp.reduced_size); +} + +TEST_F(SdpOfferAnswerTest, ReducedSizeNotNegotiated) { + auto caller = CreatePeerConnection(); + auto callee = CreatePeerConnection(); + + auto audio_transceiver = caller->AddTransceiver(webrtc::MediaType::AUDIO); + auto video_transceiver = caller->AddTransceiver(webrtc::MediaType::VIDEO); + + auto offer = caller->CreateOfferAndSetAsLocal(); + ASSERT_NE(offer, nullptr); + std::string sdp; + offer->ToString(&sdp); + // Remove rtcp-rsize attribute. + auto modified_offer = CreateSessionDescription( + SdpType::kOffer, absl::StrReplaceAll(sdp, {{"a=rtcp-rsize\r\n", ""}})); + EXPECT_TRUE(callee->SetRemoteDescription(std::move(modified_offer))); + auto answer = callee->CreateAnswerAndSetAsLocal(); + EXPECT_TRUE(caller->SetRemoteDescription(std::move(answer))); + + auto receivers = callee->pc()->GetReceivers(); + ASSERT_EQ(receivers.size(), 2u); + auto audio_recv_param = receivers[0]->GetParameters(); + EXPECT_FALSE(audio_recv_param.rtcp.reduced_size); + auto video_recv_param = receivers[1]->GetParameters(); + EXPECT_FALSE(video_recv_param.rtcp.reduced_size); + + auto senders = caller->pc()->GetSenders(); + ASSERT_EQ(senders.size(), 2u); + auto audio_send_param = senders[0]->GetParameters(); + EXPECT_FALSE(audio_send_param.rtcp.reduced_size); + auto video_send_param = senders[1]->GetParameters(); + EXPECT_FALSE(video_send_param.rtcp.reduced_size); +} + +TEST_F(SdpOfferAnswerTest, PayloadTypeMatchingWithSubsequentOfferAnswer) { + auto caller = CreatePeerConnection(); + auto callee = CreatePeerConnection(); + + // 1. Restrict codecs and set a local description and remote description. + // with a different payload type. + auto video_transceiver = caller->AddTransceiver(webrtc::MediaType::VIDEO); + std::vector codec_caps = + pc_factory_->GetRtpReceiverCapabilities(webrtc::MediaType::VIDEO).codecs; + codec_caps.erase(std::remove_if(codec_caps.begin(), codec_caps.end(), + [](const RtpCodecCapability& codec) { + return !absl::EqualsIgnoreCase(codec.name, + "VP8"); + }), + codec_caps.end()); + EXPECT_TRUE(video_transceiver->SetCodecPreferences(codec_caps).ok()); + + auto offer1 = caller->CreateOfferAndSetAsLocal(); + + // 2. Add additional supported but not offered codec before SRD + auto& contents = offer1->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + std::vector codecs = media_description->codecs(); + ASSERT_EQ(codecs.size(), 1u); + ASSERT_NE(codecs[0].id, 127); + auto av1 = CreateVideoCodec(SdpVideoFormat("AV1", {})); + av1.id = 127; + codecs.insert(codecs.begin(), av1); + media_description->set_codecs(codecs); + EXPECT_TRUE(callee->SetRemoteDescription(std::move(offer1))); + + auto answer1 = callee->CreateAnswerAndSetAsLocal(); + EXPECT_TRUE(caller->SetRemoteDescription(std::move(answer1))); + + // 3. sCP to reenable that codec. Payload type is not matched at this point. + codec_caps = + pc_factory_->GetRtpReceiverCapabilities(webrtc::MediaType::VIDEO).codecs; + codec_caps.erase( + std::remove_if(codec_caps.begin(), codec_caps.end(), + [](const RtpCodecCapability& codec) { + return !(absl::EqualsIgnoreCase(codec.name, "VP8") || + absl::EqualsIgnoreCase(codec.name, "AV1")); + }), + codec_caps.end()); + EXPECT_TRUE(video_transceiver->SetCodecPreferences(codec_caps).ok()); + auto offer2 = caller->CreateOffer(); + auto& contents2 = offer2->description()->contents(); + ASSERT_EQ(contents2.size(), 1u); + auto* media_description2 = contents2[0].media_description(); + codecs = media_description2->codecs(); + ASSERT_EQ(codecs.size(), 2u); + EXPECT_EQ(codecs[1].name, av1.name); + // At this point, the value 127 may or may not have been chosen. + + // 4. O/A triggered by remote. This "locks in" the payload type. + auto offer3 = callee->CreateOfferAndSetAsLocal(); + EXPECT_TRUE(caller->SetRemoteDescription(std::move(offer3))); + EXPECT_TRUE(caller->CreateAnswerAndSetAsLocal()); + + // 5. Subsequent offer has the payload type. + auto offer4 = caller->CreateOfferAndSetAsLocal(); + auto& contents4 = offer4->description()->contents(); + ASSERT_EQ(contents4.size(), 1u); + auto* media_description4 = contents4[0].media_description(); + ASSERT_TRUE(media_description4); + codecs = media_description4->codecs(); + ASSERT_EQ(codecs.size(), 2u); + EXPECT_EQ(codecs[1].name, av1.name); + EXPECT_EQ(codecs[1].id, av1.id); +} + +class SdpOfferAnswerMungingTest : public SdpOfferAnswerTest { + public: + SdpOfferAnswerMungingTest() : SdpOfferAnswerTest() { metrics::Reset(); } +}; + +TEST_F(SdpOfferAnswerMungingTest, DISABLED_ReportUMAMetricsWithNoMunging) { + auto caller = CreatePeerConnection(); + auto callee = CreatePeerConnection(); + + caller->AddTransceiver(webrtc::MediaType::AUDIO); + caller->AddTransceiver(webrtc::MediaType::VIDEO); + + // Negotiate, gather candidates, then exchange ICE candidates. + ASSERT_TRUE(caller->ExchangeOfferAnswerWith(callee.get())); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kNoModification, 1))); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Answer.Initial"), + ElementsAre(Pair(SdpMungingType::kNoModification, 1))); + + EXPECT_THAT(WaitUntil([&] { return caller->IsIceGatheringDone(); }, IsTrue(), + {.timeout = kDefaultTimeout}), + IsRtcOk()); + EXPECT_THAT(WaitUntil([&] { return callee->IsIceGatheringDone(); }, IsTrue(), + {.timeout = kDefaultTimeout}), + IsRtcOk()); + for (const auto& candidate : caller->observer()->GetAllCandidates()) { + callee->pc()->AddIceCandidate(candidate); + } + for (const auto& candidate : callee->observer()->GetAllCandidates()) { + caller->pc()->AddIceCandidate(candidate); + } + EXPECT_THAT( + WaitUntil([&] { return caller->pc()->peer_connection_state(); }, + Eq(PeerConnectionInterface::PeerConnectionState::kConnected), + {.timeout = kDefaultTimeout}), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return callee->pc()->peer_connection_state(); }, + Eq(PeerConnectionInterface::PeerConnectionState::kConnected), + {.timeout = kDefaultTimeout}), + IsRtcOk()); + + caller->pc()->Close(); + callee->pc()->Close(); + + EXPECT_THAT( + metrics::Samples( + "WebRTC.PeerConnection.SdpMunging.Offer.ConnectionEstablished"), + ElementsAre(Pair(SdpMungingType::kNoModification, 1))); + EXPECT_THAT( + metrics::Samples( + "WebRTC.PeerConnection.SdpMunging.Answer.ConnectionEstablished"), + ElementsAre(Pair(SdpMungingType::kNoModification, 1))); + + EXPECT_THAT(metrics::Samples( + "WebRTC.PeerConnection.SdpMunging.Offer.ConnectionClosed"), + ElementsAre(Pair(SdpMungingType::kNoModification, 1))); + EXPECT_THAT(metrics::Samples( + "WebRTC.PeerConnection.SdpMunging.Answer.ConnectionClosed"), + ElementsAre(Pair(SdpMungingType::kNoModification, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, + InitialSetLocalDescriptionWithoutCreateOffer) { + RTCConfiguration config; + config.certificates.push_back( + FakeRTCCertificateGenerator::GenerateCertificate()); + auto pc = CreatePeerConnection(config, nullptr); + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=fingerprint:sha-1 " + "D9:AB:00:AA:12:7B:62:54:CF:AD:3B:55:F7:60:BC:F3:40:A7:0B:5B\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n"; + auto offer = CreateSessionDescription(SdpType::kOffer, sdp); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kWithoutCreateOffer, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, + InitialSetLocalDescriptionWithoutCreateAnswer) { + RTCConfiguration config; + config.certificates.push_back( + FakeRTCCertificateGenerator::GenerateCertificate()); + auto pc = CreatePeerConnection(config, nullptr); + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=fingerprint:sha-1 " + "D9:AB:00:AA:12:7B:62:54:CF:AD:3B:55:F7:60:BC:F3:40:A7:0B:5B\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=audio 9 UDP/TLS/RTP/SAVPF 111\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendrecv\r\n" + "a=mid:0\r\n" + "a=rtpmap:111 opus/48000/2\r\n"; + auto offer = CreateSessionDescription(SdpType::kOffer, sdp); + EXPECT_TRUE(pc->SetRemoteDescription(std::move(offer))); + + RTCError error; + auto answer = CreateSessionDescription(SdpType::kAnswer, sdp); + answer->description()->transport_infos()[0].description.connection_role = + CONNECTIONROLE_ACTIVE; + EXPECT_TRUE(pc->SetLocalDescription(std::move(answer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Answer.Initial"), + ElementsAre(Pair(SdpMungingType::kWithoutCreateAnswer, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, IceUfrag) { + auto pc = CreatePeerConnection( + FieldTrials::CreateNoGlobal("WebRTC-NoSdpMangleUfrag/Enabled/")); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& transport_infos = offer->description()->transport_infos(); + ASSERT_EQ(transport_infos.size(), 1u); + transport_infos[0].description.ice_ufrag = + "amungediceufragthisshouldberejected"; + RTCError error; + // Ufrag is rejected. + EXPECT_FALSE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kIceUfrag, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, IceUfragCheckDisabledByFieldTrial) { + auto pc = CreatePeerConnection( + FieldTrials::CreateNoGlobal("WebRTC-NoSdpMangleUfrag/Disabled/")); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& transport_infos = offer->description()->transport_infos(); + ASSERT_EQ(transport_infos.size(), 1u); + transport_infos[0].description.ice_ufrag = + "amungediceufragthisshouldberejected"; + RTCError error; + // Ufrag is not rejected. + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kIceUfrag, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, IceUfragWithCheckDisabledForTesting) { + auto pc = CreatePeerConnection(); + pc->GetInternalPeerConnection()->DisableSdpMungingChecksForTesting(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& transport_infos = offer->description()->transport_infos(); + ASSERT_EQ(transport_infos.size(), 1u); + transport_infos[0].description.ice_ufrag = + "amungediceufragthisshouldberejected"; + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kIceUfrag, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, IcePwdCheckDisabledByFieldTrial) { + auto pc = CreatePeerConnection( + FieldTrials::CreateNoGlobal("WebRTC-NoSdpMangleUfrag/Disabled/")); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& transport_infos = offer->description()->transport_infos(); + ASSERT_EQ(transport_infos.size(), 1u); + transport_infos[0].description.ice_pwd = "amungedicepwdthisshouldberejected"; + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kIcePwd, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, IcePwd) { + auto pc = CreatePeerConnection( + FieldTrials::CreateNoGlobal("WebRTC-NoSdpMangleUfrag/Enabled/")); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& transport_infos = offer->description()->transport_infos(); + ASSERT_EQ(transport_infos.size(), 1u); + transport_infos[0].description.ice_pwd = "amungedicepwdthisshouldberejected"; + RTCError error; + EXPECT_FALSE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kIcePwd, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, IceUfragRestrictedAddresses) { + RTCConfiguration config; + config.certificates.push_back( + FakeRTCCertificateGenerator::GenerateCertificate()); + auto caller = CreatePeerConnection( + config, + FieldTrials::CreateNoGlobal("WebRTC-NoSdpMangleUfragRestrictedAddresses/" + "127.0.0.1:12345|127.0.0.*:23456|*:34567/")); + auto callee = CreatePeerConnection(); + caller->AddAudioTrack("audio_track", {}); + auto offer = caller->CreateOffer(); + auto& transport_infos = offer->description()->transport_infos(); + ASSERT_EQ(transport_infos.size(), 1u); + transport_infos[0].description.ice_ufrag = "amungediceufrag"; + + EXPECT_TRUE(caller->SetLocalDescription(offer->Clone())); + EXPECT_TRUE(callee->SetRemoteDescription(std::move(offer))); + + auto answer = callee->CreateAnswer(); + EXPECT_TRUE(callee->SetLocalDescription(answer->Clone())); + EXPECT_TRUE(caller->SetRemoteDescription(std::move(answer))); + + static constexpr const char tmpl[] = + "candidate:a0+B/1 1 udp 2130706432 %s typ host"; + + // Addresses to test. First field is the address in string format, + // second field is the expected outcome (success or failure). + const std::vector> address_tests = { + {"127.0.0.1:12345", false}, {"127.0.0.2:23456", false}, + {"8.8.8.8:34567", false}, {"127.0.0.2:12345", true}, + {"127.0.1.1:23456", true}, {"8.8.8.8:3456", true}, + }; + + int num_blocked = 0; + for (const auto& address_test : address_tests) { + std::optional result; + const std::string candidate = StringFormat( + tmpl, absl::StrReplaceAll(address_test.first, {{":", " "}}).c_str()); + caller->pc()->AddIceCandidate( + std::unique_ptr( + CreateIceCandidate("", 0, candidate, nullptr)), + [&result](RTCError error) { result = error; }); + + ASSERT_THAT( + WaitUntil([&] { return result.has_value(); }, ::testing::IsTrue()), + IsRtcOk()); + if (address_test.second == true) { + EXPECT_TRUE(result.value().ok()); + } else { + std::pair host = + absl::StrSplit(address_test.first, ":"); + int port; + ASSERT_TRUE(absl::SimpleAtoi(host.second, &port)); + EXPECT_FALSE(result.value().ok()); + EXPECT_EQ(result.value().type(), RTCErrorType::UNSUPPORTED_OPERATION); + num_blocked++; + EXPECT_THAT( + metrics::Samples( + "WebRTC.PeerConnection.RestrictedCandidates.SdpMungingType"), + ElementsAre(Pair(SdpMungingType::kIceUfrag, num_blocked))); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.RestrictedCandidates.Port"), + Contains(Pair(port, 1))); + } + } +} + +TEST_F(SdpOfferAnswerMungingTest, IceUfragSdpRejectedAndRestrictedAddresses) { + RTCConfiguration config; + config.certificates.push_back( + FakeRTCCertificateGenerator::GenerateCertificate()); + auto caller = CreatePeerConnection( + config, + FieldTrials::CreateNoGlobal("WebRTC-NoSdpMangleUfragRestrictedAddresses/" + "127.0.0.1:12345|127.0.0.*:23456|*:34567/" + "WebRTC-NoSdpMangleUfrag/Enabled/")); + auto callee = CreatePeerConnection(); + caller->AddAudioTrack("audio_track", {}); + auto offer = caller->CreateOffer(); + auto& transport_infos = offer->description()->transport_infos(); + ASSERT_EQ(transport_infos.size(), 1u); + transport_infos[0].description.ice_ufrag = "amungediceufrag"; + + EXPECT_FALSE(caller->SetLocalDescription(offer->Clone())); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kIceUfrag, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, IceMode) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& transport_infos = offer->description()->transport_infos(); + ASSERT_EQ(transport_infos.size(), 1u); + transport_infos[0].description.ice_mode = ICEMODE_LITE; + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kIceMode, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, IceOptions) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& transport_infos = offer->description()->transport_infos(); + ASSERT_EQ(transport_infos.size(), 1u); + transport_infos[0].description.transport_options.push_back( + "something-unsupported"); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kIceOptions, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, IceOptionsRenomination) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& transport_infos = offer->description()->transport_infos(); + ASSERT_EQ(transport_infos.size(), 1u); + transport_infos[0].description.transport_options.push_back( + ICE_OPTION_RENOMINATION); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kIceOptionsRenomination, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, DtlsRole) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& transport_infos = offer->description()->transport_infos(); + ASSERT_EQ(transport_infos.size(), 1u); + transport_infos[0].description.connection_role = CONNECTIONROLE_PASSIVE; + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kDtlsSetup, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, RemoveContent) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto name = contents[0].mid(); + EXPECT_TRUE(offer->description()->RemoveContentByName(contents[0].mid())); + std::string sdp; + offer->ToString(&sdp); + auto modified_offer = CreateSessionDescription( + SdpType::kOffer, + absl::StrReplaceAll(sdp, {{"a=group:BUNDLE " + name, "a=group:BUNDLE"}})); + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(modified_offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kNumberOfContents, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, Mid) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + std::string name(contents[0].mid()); + contents[0].set_mid("amungedmid"); + + auto& transport_infos = offer->description()->transport_infos(); + ASSERT_EQ(transport_infos.size(), 1u); + transport_infos[0].content_name = "amungedmid"; + std::string sdp; + offer->ToString(&sdp); + auto modified_offer = CreateSessionDescription( + SdpType::kOffer, + absl::StrReplaceAll( + sdp, {{"a=group:BUNDLE " + name, "a=group:BUNDLE amungedmid"}})); + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(modified_offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kMid, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, LegacySimulcast) { + auto pc = CreatePeerConnection(); + pc->AddVideoTrack("video_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + uint32_t ssrc = media_description->first_ssrc(); + ASSERT_EQ(media_description->streams().size(), 1u); + const std::string& cname = media_description->streams()[0].cname; + + std::string sdp; + offer->ToString(&sdp); + sdp += "a=ssrc-group:SIM " + absl::StrCat(ssrc) + " " + + absl::StrCat(ssrc + 1) + "\r\n" + // + "a=ssrc-group:FID " + absl::StrCat(ssrc + 1) + " " + + absl::StrCat(ssrc + 2) + "\r\n" + // + "a=ssrc:" + absl::StrCat(ssrc + 1) + " msid:- video_track\r\n" + // + "a=ssrc:" + absl::StrCat(ssrc + 1) + " cname:" + cname + "\r\n" + // + "a=ssrc:" + absl::StrCat(ssrc + 2) + " msid:- video_track\r\n" + // + "a=ssrc:" + absl::StrCat(ssrc + 2) + " cname:" + cname + "\r\n"; + auto modified_offer = CreateSessionDescription(SdpType::kOffer, sdp); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(modified_offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kVideoCodecsLegacySimulcast, 1))); +} + +#ifdef WEBRTC_USE_H264 +TEST_F(SdpOfferAnswerMungingTest, H264SpsPpsIdrInKeyFrame) { + auto pc = CreatePeerConnection(); + pc->AddVideoTrack("video_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + std::vector codecs = media_description->codecs(); + for (auto& codec : codecs) { + if (codec.name == webrtc::kH264CodecName) { + codec.SetParam(webrtc::kH264FmtpSpsPpsIdrInKeyframe, + webrtc::kParamValueTrue); + } + } + media_description->set_codecs(codecs); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre( + Pair(SdpMungingType::kVideoCodecsFmtpH264SpsPpsIdrInKeyframe, 1))); +} +#endif // WEBRTC_USE_H264 + +TEST_F(SdpOfferAnswerMungingTest, OpusStereo) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + std::vector codecs = media_description->codecs(); + for (auto& codec : codecs) { + if (codec.name == kOpusCodecName) { + codec.SetParam(kCodecParamStereo, kParamValueTrue); + } + } + media_description->set_codecs(codecs); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kAudioCodecsFmtpOpusStereo, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, OpusFec) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + std::vector codecs = media_description->codecs(); + for (auto& codec : codecs) { + if (codec.name == kOpusCodecName) { + // Enabled by default so we need to remove the parameter. + EXPECT_TRUE(codec.RemoveParam(kCodecParamUseInbandFec)); + } + } + media_description->set_codecs(codecs); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kAudioCodecsFmtpOpusFec, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, OpusDtx) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + std::vector codecs = media_description->codecs(); + for (auto& codec : codecs) { + if (codec.name == kOpusCodecName) { + codec.SetParam(kCodecParamUseDtx, kParamValueTrue); + } + } + media_description->set_codecs(codecs); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kAudioCodecsFmtpOpusDtx, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, OpusCbr) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + std::vector codecs = media_description->codecs(); + for (auto& codec : codecs) { + if (codec.name == kOpusCodecName) { + codec.SetParam(kCodecParamCbr, kParamValueTrue); + } + } + media_description->set_codecs(codecs); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kAudioCodecsFmtpOpusCbr, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, AudioCodecsRemoved) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + std::vector codecs = media_description->codecs(); + codecs.pop_back(); + media_description->set_codecs(codecs); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kAudioCodecsRemoved, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, AudioCodecsAdded) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + std::vector codecs = media_description->codecs(); + auto codec = CreateAudioCodec(SdpAudioFormat("pcmu", 8000, 1, {})); + codec.id = 19; // IANA reserved payload type, should not conflict. + codecs.push_back(codec); + media_description->set_codecs(codecs); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kAudioCodecsAdded, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, VideoCodecsRemoved) { + auto pc = CreatePeerConnection(); + pc->AddVideoTrack("video_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + std::vector codecs = media_description->codecs(); + codecs.pop_back(); + media_description->set_codecs(codecs); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kVideoCodecsRemoved, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, VideoCodecsAdded) { + auto pc = CreatePeerConnection(); + pc->AddVideoTrack("video_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + std::vector codecs = media_description->codecs(); + auto codec = CreateVideoCodec(SdpVideoFormat("VP8", {})); + codec.id = 19; // IANA reserved payload type, should not conflict. + codecs.push_back(codec); + media_description->set_codecs(codecs); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kVideoCodecsAdded, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, MultiOpus) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + std::vector codecs = media_description->codecs(); + auto multiopus = + CreateAudioCodec(SdpAudioFormat("multiopus", 48000, 4, + {{"channel_mapping", "0,1,2,3"}, + {"coupled_streams", "2"}, + {"num_streams", "2"}})); + multiopus.id = 19; // IANA reserved payload type, should not conflict. + codecs.push_back(multiopus); + media_description->set_codecs(codecs); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kAudioCodecsAddedMultiOpus, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, L16) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + std::vector codecs = media_description->codecs(); + auto l16 = CreateAudioCodec(SdpAudioFormat("L16", 48000, 2, {})); + l16.id = 19; // IANA reserved payload type, should not conflict. + codecs.push_back(l16); + media_description->set_codecs(codecs); + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kAudioCodecsAddedL16, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, AudioSsrc) { + // Note: same applies to video but is harder to write since one needs to + // modify the ssrc-group too. + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + ASSERT_EQ(media_description->streams().size(), 1u); + media_description->mutable_streams()[0].ssrcs[0] = 4404; + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kSsrcs, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, HeaderExtensionAdded) { + auto pc = CreatePeerConnection(); + pc->AddVideoTrack("video_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + // VLA is off by default, id=42 should be unused. + media_description->AddRtpHeaderExtension( + {RtpExtension::kVideoLayersAllocationUri, 42}); + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kRtpHeaderExtensionAdded, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, HeaderExtensionRemoved) { + auto pc = CreatePeerConnection(); + pc->AddVideoTrack("video_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + media_description->ClearRtpHeaderExtensions(); + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kRtpHeaderExtensionRemoved, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, HeaderExtensionModified) { + auto pc = CreatePeerConnection(); + pc->AddVideoTrack("video_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + auto extensions = media_description->rtp_header_extensions(); + ASSERT_GT(extensions.size(), 0u); + extensions[0].id = 42; // id=42 should be unused. + media_description->set_rtp_header_extensions(extensions); + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kRtpHeaderExtensionModified, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, PayloadTypeChanged) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + auto codecs = media_description->codecs(); + ASSERT_GT(codecs.size(), 0u); + codecs[0].id = 19; // IANA reserved payload type, should not conflict. + media_description->set_codecs(codecs); + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kPayloadTypes, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, AudioCodecsReordered) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + auto codecs = media_description->codecs(); + ASSERT_GT(codecs.size(), 1u); + std::swap(codecs[0], codecs[1]); + media_description->set_codecs(codecs); + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kAudioCodecsReordered, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, VideoCodecsReordered) { + auto pc = CreatePeerConnection(); + pc->AddVideoTrack("video_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + auto codecs = media_description->codecs(); + ASSERT_GT(codecs.size(), 1u); + std::swap(codecs[0], codecs[1]); + media_description->set_codecs(codecs); + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kVideoCodecsReordered, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, AudioCodecsFmtp) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + auto codecs = media_description->codecs(); + ASSERT_GT(codecs.size(), 0u); + codecs[0].params["dont"] = "munge"; + media_description->set_codecs(codecs); + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kAudioCodecsFmtp, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, VideoCodecsFmtp) { + auto pc = CreatePeerConnection(); + pc->AddVideoTrack("video_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + auto codecs = media_description->codecs(); + ASSERT_GT(codecs.size(), 0u); + codecs[0].params["dont"] = "munge"; + media_description->set_codecs(codecs); + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kVideoCodecsFmtp, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, AudioCodecsRtcpFb) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + auto codecs = media_description->codecs(); + ASSERT_GT(codecs.size(), 0u); + codecs[0].feedback_params.Add({"dont", "munge"}); + media_description->set_codecs(codecs); + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kAudioCodecsRtcpFb, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, AudioCodecsRtcpFbNack) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + auto codecs = media_description->codecs(); + ASSERT_GT(codecs.size(), 0u); + codecs[0].feedback_params.Add(FeedbackParam("nack")); + media_description->set_codecs(codecs); + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kAudioCodecsRtcpFbAudioNack, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, AudioCodecsRtcpFbRrtr) { + auto pc = CreatePeerConnection(); + pc->AddAudioTrack("audio_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + auto codecs = media_description->codecs(); + ASSERT_GT(codecs.size(), 0u); + codecs[0].feedback_params.Add(FeedbackParam("rrtr")); + media_description->set_codecs(codecs); + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kAudioCodecsRtcpFbRrtr, 1))); +} + +TEST_F(SdpOfferAnswerMungingTest, VideoCodecsRtcpFb) { + auto pc = CreatePeerConnection(); + pc->AddVideoTrack("video_track", {}); + + auto offer = pc->CreateOffer(); + auto& contents = offer->description()->contents(); + ASSERT_EQ(contents.size(), 1u); + auto* media_description = contents[0].media_description(); + ASSERT_TRUE(media_description); + auto codecs = media_description->codecs(); + ASSERT_GT(codecs.size(), 0u); + codecs[0].feedback_params.Add({"dont", "munge"}); + media_description->set_codecs(codecs); + + RTCError error; + EXPECT_TRUE(pc->SetLocalDescription(std::move(offer), &error)); + EXPECT_THAT( + metrics::Samples("WebRTC.PeerConnection.SdpMunging.Offer.Initial"), + ElementsAre(Pair(SdpMungingType::kVideoCodecsRtcpFb, 1))); +} + } // namespace webrtc diff --git a/pc/sdp_state_provider.h b/pc/sdp_state_provider.h index 23ffc91bd9..85c2e19ab9 100644 --- a/pc/sdp_state_provider.h +++ b/pc/sdp_state_provider.h @@ -45,8 +45,7 @@ class SdpStateProvider { // Whether an ICE restart was indicated in the remote offer. // Used in CreateAnswer. virtual bool IceRestartPending(const std::string& content_name) const = 0; - virtual absl::optional GetDtlsRole( - const std::string& mid) const = 0; + virtual std::optional GetDtlsRole(const std::string& mid) const = 0; }; } // namespace webrtc diff --git a/pc/sdp_utils.cc b/pc/sdp_utils.cc index ca61f0013f..33281e5c71 100644 --- a/pc/sdp_utils.cc +++ b/pc/sdp_utils.cc @@ -10,10 +10,13 @@ #include "pc/sdp_utils.h" +#include #include -#include +#include "api/jsep.h" #include "api/jsep_session_description.h" +#include "p2p/base/transport_info.h" +#include "pc/session_description.h" #include "rtc_base/checks.h" namespace webrtc { @@ -39,11 +42,10 @@ std::unique_ptr CloneSessionDescriptionAsType( return std::move(clone); } -bool SdpContentsAll(SdpContentPredicate pred, - const cricket::SessionDescription* desc) { +bool SdpContentsAll(SdpContentPredicate pred, const SessionDescription* desc) { RTC_DCHECK(desc); for (const auto& content : desc->contents()) { - const auto* transport_info = desc->GetTransportInfoByName(content.name); + const auto* transport_info = desc->GetTransportInfoByName(content.mid()); if (!pred(&content, transport_info)) { return false; } @@ -51,21 +53,19 @@ bool SdpContentsAll(SdpContentPredicate pred, return true; } -bool SdpContentsNone(SdpContentPredicate pred, - const cricket::SessionDescription* desc) { +bool SdpContentsNone(SdpContentPredicate pred, const SessionDescription* desc) { return SdpContentsAll( - [pred](const cricket::ContentInfo* content_info, - const cricket::TransportInfo* transport_info) { + [pred](const ContentInfo* content_info, + const TransportInfo* transport_info) { return !pred(content_info, transport_info); }, desc); } -void SdpContentsForEach(SdpContentMutator fn, - cricket::SessionDescription* desc) { +void SdpContentsForEach(SdpContentMutator fn, SessionDescription* desc) { RTC_DCHECK(desc); for (auto& content : desc->contents()) { - auto* transport_info = desc->GetTransportInfoByName(content.name); + auto* transport_info = desc->GetTransportInfoByName(content.mid()); fn(&content, transport_info); } } diff --git a/pc/sdp_utils.h b/pc/sdp_utils.h index effd7cd034..14f5a1cc68 100644 --- a/pc/sdp_utils.h +++ b/pc/sdp_utils.h @@ -33,29 +33,26 @@ CloneSessionDescriptionAsType(const SessionDescriptionInterface* sdesc, // Function that takes a single session description content with its // corresponding transport and produces a boolean. -typedef std::function +typedef std::function SdpContentPredicate; // Returns true if the predicate returns true for all contents in the given // session description. -bool SdpContentsAll(SdpContentPredicate pred, - const cricket::SessionDescription* desc); +bool SdpContentsAll(SdpContentPredicate pred, const SessionDescription* desc); // Returns true if the predicate returns true for none of the contents in the // given session description. -bool SdpContentsNone(SdpContentPredicate pred, - const cricket::SessionDescription* desc); +bool SdpContentsNone(SdpContentPredicate pred, const SessionDescription* desc); // Function that takes a single session description content with its // corresponding transport and can mutate the content and/or the transport. -typedef std::function +typedef std::function SdpContentMutator; // Applies the mutator function over all contents in the given session // description. -void SdpContentsForEach(SdpContentMutator fn, - cricket::SessionDescription* desc); +void SdpContentsForEach(SdpContentMutator fn, SessionDescription* desc); } // namespace webrtc diff --git a/pc/session_description.cc b/pc/session_description.cc index e1152eb107..96ea3d9deb 100644 --- a/pc/session_description.cc +++ b/pc/session_description.cc @@ -10,32 +10,38 @@ #include "pc/session_description.h" +#include +#include +#include +#include +#include + #include "absl/algorithm/container.h" #include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "p2p/base/transport_info.h" #include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" -namespace cricket { +namespace webrtc { namespace { ContentInfo* FindContentInfoByName(ContentInfos* contents, - const std::string& name) { + absl::string_view name) { RTC_DCHECK(contents); for (ContentInfo& content : *contents) { - if (content.name == name) { + if (content.mid() == name) { return &content; } } return nullptr; } -} // namespace - const ContentInfo* FindContentInfoByName(const ContentInfos& contents, - const std::string& name) { + absl::string_view name) { for (ContentInfos::const_iterator content = contents.begin(); content != contents.end(); ++content) { - if (content->name == name) { + if (content->mid() == name) { return &(*content); } } @@ -52,6 +58,8 @@ const ContentInfo* FindContentInfoByType(const ContentInfos& contents, return nullptr; } +} // namespace + ContentGroup::ContentGroup(const std::string& semantics) : semantics_(semantics) {} @@ -85,7 +93,7 @@ bool ContentGroup::RemoveContentName(absl::string_view content_name) { } std::string ContentGroup::ToString() const { - rtc::StringBuilder acc; + StringBuilder acc; acc << semantics_ << "("; if (!content_names_.empty()) { for (const auto& name : content_names_) { @@ -117,7 +125,7 @@ ContentInfo* SessionDescription::GetContentByName(const std::string& name) { } const MediaContentDescription* SessionDescription::GetContentDescriptionByName( - const std::string& name) const { + absl::string_view name) const { const ContentInfo* cinfo = FindContentInfoByName(contents_, name); if (cinfo == NULL) { return NULL; @@ -127,7 +135,7 @@ const MediaContentDescription* SessionDescription::GetContentDescriptionByName( } MediaContentDescription* SessionDescription::GetContentDescriptionByName( - const std::string& name) { + absl::string_view name) { ContentInfo* cinfo = FindContentInfoByName(&contents_, name); if (cinfo == NULL) { return NULL; @@ -149,10 +157,7 @@ void SessionDescription::AddContent( const std::string& name, MediaProtocolType type, std::unique_ptr description) { - ContentInfo content(type); - content.name = name; - content.set_media_description(std::move(description)); - AddContent(std::move(content)); + AddContent(ContentInfo(type, name, std::move(description))); } void SessionDescription::AddContent( @@ -160,11 +165,7 @@ void SessionDescription::AddContent( MediaProtocolType type, bool rejected, std::unique_ptr description) { - ContentInfo content(type); - content.name = name; - content.rejected = rejected; - content.set_media_description(std::move(description)); - AddContent(std::move(content)); + AddContent(ContentInfo(type, name, std::move(description), rejected)); } void SessionDescription::AddContent( @@ -173,12 +174,8 @@ void SessionDescription::AddContent( bool rejected, bool bundle_only, std::unique_ptr description) { - ContentInfo content(type); - content.name = name; - content.rejected = rejected; - content.bundle_only = bundle_only; - content.set_media_description(std::move(description)); - AddContent(std::move(content)); + AddContent( + ContentInfo(type, name, std::move(description), rejected, bundle_only)); } void SessionDescription::AddContent(ContentInfo&& content) { @@ -193,7 +190,7 @@ void SessionDescription::AddContent(ContentInfo&& content) { bool SessionDescription::RemoveContentByName(const std::string& name) { for (ContentInfos::iterator content = contents_.begin(); content != contents_.end(); ++content) { - if (content->name == name) { + if (content->mid() == name) { contents_.erase(content); return true; } @@ -285,21 +282,12 @@ ContentInfo::~ContentInfo() {} // Copy operator. ContentInfo::ContentInfo(const ContentInfo& o) - : name(o.name), - type(o.type), + : type(o.type), rejected(o.rejected), bundle_only(o.bundle_only), + mid_(o.mid_), description_(o.description_->Clone()) {} -ContentInfo& ContentInfo::operator=(const ContentInfo& o) { - name = o.name; - type = o.type; - rejected = o.rejected; - bundle_only = o.bundle_only; - description_ = o.description_->Clone(); - return *this; -} - const MediaContentDescription* ContentInfo::media_description() const { return description_.get(); } @@ -308,4 +296,4 @@ MediaContentDescription* ContentInfo::media_description() { return description_.get(); } -} // namespace cricket +} // namespace webrtc diff --git a/pc/session_description.h b/pc/session_description.h index 31992be083..b60acdd3b0 100644 --- a/pc/session_description.h +++ b/pc/session_description.h @@ -23,7 +23,6 @@ #include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "api/crypto_params.h" #include "api/media_types.h" #include "api/rtp_parameters.h" #include "api/rtp_transceiver_direction.h" @@ -41,12 +40,9 @@ #include "rtc_base/socket_address.h" #include "rtc_base/system/rtc_export.h" -namespace cricket { +namespace webrtc { -typedef std::vector AudioCodecs; -typedef std::vector VideoCodecs; -typedef std::vector CryptoParamsVec; -typedef std::vector RtpHeaderExtensions; +using RtpHeaderExtensions = std::vector; // Options to control how session descriptions are generated. const int kAutoBandwidth = -1; @@ -63,7 +59,7 @@ class MediaContentDescription { MediaContentDescription() = default; virtual ~MediaContentDescription() = default; - virtual MediaType type() const = 0; + virtual webrtc::MediaType type() const = 0; // Try to cast this media description to an AudioContentDescription. Returns // nullptr if the cast fails. @@ -83,8 +79,6 @@ class MediaContentDescription { return nullptr; } - virtual bool has_codecs() const = 0; - // Copy operator that returns an unique_ptr. // Not a virtual function. // If a type-specific variant of Clone() is desired, override it, or @@ -100,8 +94,8 @@ class MediaContentDescription { protocol_ = std::string(protocol); } - webrtc::RtpTransceiverDirection direction() const { return direction_; } - void set_direction(webrtc::RtpTransceiverDirection direction) { + RtpTransceiverDirection direction() const { return direction_; } + void set_direction(RtpTransceiverDirection direction) { direction_ = direction; } @@ -120,6 +114,13 @@ class MediaContentDescription { remote_estimate_ = remote_estimate; } + // Support of RFC 8888 feedback messages. + // This is a transport-wide property, but is signalled in SDP + // at the m-line level; its mux category is IDENTICAL-PER-PT, + // and only wildcard is allowed. RFC 8888 section 6. + bool rtcp_fb_ack_ccfb() const { return rtcp_fb_ack_ccfb_; } + void set_rtcp_fb_ack_ccfb(bool enable) { rtcp_fb_ack_ccfb_ = enable; } + int bandwidth() const { return bandwidth_; } void set_bandwidth(int bandwidth) { bandwidth_ = bandwidth; } std::string bandwidth_type() const { return bandwidth_type_; } @@ -127,12 +128,6 @@ class MediaContentDescription { bandwidth_type_ = bandwidth_type; } - const std::vector& cryptos() const { return cryptos_; } - void AddCrypto(const CryptoParams& params) { cryptos_.push_back(params); } - void set_cryptos(const std::vector& cryptos) { - cryptos_ = cryptos; - } - // List of RTP header extensions. URIs are **NOT** guaranteed to be unique // as they can appear twice when both encrypted and non-encrypted extensions // are present. @@ -145,7 +140,7 @@ class MediaContentDescription { rtp_header_extensions_ = extensions; rtp_header_extensions_set_ = true; } - void AddRtpHeaderExtension(const webrtc::RtpExtension& ext) { + void AddRtpHeaderExtension(const RtpExtension& ext) { rtp_header_extensions_.push_back(ext); rtp_header_extensions_set_ = true; } @@ -195,10 +190,10 @@ class MediaContentDescription { // https://tools.ietf.org/html/rfc4566#section-5.7 // May be present at the media or session level of SDP. If present at both // levels, the media-level attribute overwrites the session-level one. - void set_connection_address(const rtc::SocketAddress& address) { + void set_connection_address(const SocketAddress& address) { connection_address_ = address; } - const rtc::SocketAddress& connection_address() const { + const SocketAddress& connection_address() const { return connection_address_; } @@ -234,54 +229,14 @@ class MediaContentDescription { receive_rids_ = rids; } - protected: - bool rtcp_mux_ = false; - bool rtcp_reduced_size_ = false; - bool remote_estimate_ = false; - int bandwidth_ = kAutoBandwidth; - std::string bandwidth_type_ = kApplicationSpecificBandwidth; - std::string protocol_; - std::vector cryptos_; - std::vector rtp_header_extensions_; - bool rtp_header_extensions_set_ = false; - StreamParamsVec send_streams_; - bool conference_mode_ = false; - webrtc::RtpTransceiverDirection direction_ = - webrtc::RtpTransceiverDirection::kSendRecv; - rtc::SocketAddress connection_address_; - ExtmapAllowMixed extmap_allow_mixed_enum_ = kMedia; - - SimulcastDescription simulcast_; - std::vector receive_rids_; - - private: - // Copy function that returns a raw pointer. Caller will assert ownership. - // Should only be called by the Clone() function. Must be implemented - // by each final subclass. - virtual MediaContentDescription* CloneInternal() const = 0; -}; - -template -class MediaContentDescriptionImpl : public MediaContentDescription { - public: - void set_protocol(absl::string_view protocol) override { - RTC_DCHECK(IsRtpProtocol(protocol)); - protocol_ = std::string(protocol); - } - // Codecs should be in preference order (most preferred codec first). const std::vector& codecs() const { return codecs_; } void set_codecs(const std::vector& codecs) { codecs_ = codecs; } - bool has_codecs() const override { return !codecs_.empty(); } + virtual bool has_codecs() const { return !codecs_.empty(); } bool HasCodec(int id) { - bool found = false; - for (auto it = codecs_.begin(); it != codecs_.end(); ++it) { - if (it->id == id) { - found = true; - break; - } - } - return found; + return absl::c_find_if(codecs_, [id](const Codec codec) { + return codec.id == id; + }) != codecs_.end(); } void AddCodec(const Codec& codec) { codecs_.push_back(codec); } void AddOrReplaceCodec(const Codec& codec) { @@ -299,32 +254,69 @@ class MediaContentDescriptionImpl : public MediaContentDescription { } } + protected: + // TODO(bugs.webrtc.org/15214): move all RTP related things to + // RtpMediaDescription that the SCTP content description does + // not inherit from. + std::string protocol_; + private: + bool rtcp_mux_ = false; + bool rtcp_reduced_size_ = false; + bool remote_estimate_ = false; + bool rtcp_fb_ack_ccfb_ = false; + int bandwidth_ = kAutoBandwidth; + std::string bandwidth_type_ = kApplicationSpecificBandwidth; + + std::vector rtp_header_extensions_; + bool rtp_header_extensions_set_ = false; + StreamParamsVec send_streams_; + bool conference_mode_ = false; + RtpTransceiverDirection direction_ = RtpTransceiverDirection::kSendRecv; + SocketAddress connection_address_; + ExtmapAllowMixed extmap_allow_mixed_enum_ = kMedia; + + SimulcastDescription simulcast_; + std::vector receive_rids_; + + // Copy function that returns a raw pointer. Caller will assert ownership. + // Should only be called by the Clone() function. Must be implemented + // by each final subclass. + virtual MediaContentDescription* CloneInternal() const = 0; + std::vector codecs_; }; -class AudioContentDescription : public MediaContentDescriptionImpl { - public: - AudioContentDescription() {} +class RtpMediaContentDescription : public MediaContentDescription {}; - virtual MediaType type() const { return MEDIA_TYPE_AUDIO; } - virtual AudioContentDescription* as_audio() { return this; } - virtual const AudioContentDescription* as_audio() const { return this; } +class AudioContentDescription : public RtpMediaContentDescription { + public: + void set_protocol(absl::string_view protocol) override { + RTC_DCHECK(IsRtpProtocol(protocol)); + protocol_ = std::string(protocol); + } + webrtc::MediaType type() const override { return webrtc::MediaType::AUDIO; } + AudioContentDescription* as_audio() override { return this; } + const AudioContentDescription* as_audio() const override { return this; } private: - virtual AudioContentDescription* CloneInternal() const { + AudioContentDescription* CloneInternal() const override { return new AudioContentDescription(*this); } }; -class VideoContentDescription : public MediaContentDescriptionImpl { +class VideoContentDescription : public RtpMediaContentDescription { public: - virtual MediaType type() const { return MEDIA_TYPE_VIDEO; } - virtual VideoContentDescription* as_video() { return this; } - virtual const VideoContentDescription* as_video() const { return this; } + void set_protocol(absl::string_view protocol) override { + RTC_DCHECK(IsRtpProtocol(protocol)); + protocol_ = std::string(protocol); + } + webrtc::MediaType type() const override { return webrtc::MediaType::VIDEO; } + VideoContentDescription* as_video() override { return this; } + const VideoContentDescription* as_video() const override { return this; } private: - virtual VideoContentDescription* CloneInternal() const { + VideoContentDescription* CloneInternal() const override { return new VideoContentDescription(*this); } }; @@ -337,7 +329,7 @@ class SctpDataContentDescription : public MediaContentDescription { use_sctpmap_(o.use_sctpmap_), port_(o.port_), max_message_size_(o.max_message_size_) {} - MediaType type() const override { return MEDIA_TYPE_DATA; } + webrtc::MediaType type() const override { return webrtc::MediaType::DATA; } SctpDataContentDescription* as_sctp() override { return this; } const SctpDataContentDescription* as_sctp() const override { return this; } @@ -371,7 +363,9 @@ class UnsupportedContentDescription : public MediaContentDescription { public: explicit UnsupportedContentDescription(absl::string_view media_type) : media_type_(media_type) {} - MediaType type() const override { return MEDIA_TYPE_UNSUPPORTED; } + webrtc::MediaType type() const override { + return webrtc::MediaType::UNSUPPORTED; + } UnsupportedContentDescription* as_unsupported() override { return this; } const UnsupportedContentDescription* as_unsupported() const override { @@ -406,32 +400,40 @@ enum class MediaProtocolType { class RTC_EXPORT ContentInfo { public: explicit ContentInfo(MediaProtocolType type) : type(type) {} + ContentInfo(MediaProtocolType type, + absl::string_view mid, + std::unique_ptr description, + bool rejected = false, + bool bundle_only = false) + : type(type), + rejected(rejected), + bundle_only(bundle_only), + mid_(mid), + description_(std::move(description)) {} ~ContentInfo(); - // Copy + + // Copy ctor and assignment will clone `description_`. ContentInfo(const ContentInfo& o); - ContentInfo& operator=(const ContentInfo& o); + // Const ref assignment operator removed. Instead, use the explicit ctor. + ContentInfo& operator=(const ContentInfo& o) = delete; + ContentInfo(ContentInfo&& o) = default; ContentInfo& operator=(ContentInfo&& o) = default; - // Alias for `name`. - std::string mid() const { return name; } - void set_mid(const std::string& mid) { this->name = mid; } + // TODO(tommi): change return type to string_view. + const std::string& mid() const { return mid_; } + void set_mid(absl::string_view mid) { mid_ = std::string(mid); } // Alias for `description`. MediaContentDescription* media_description(); const MediaContentDescription* media_description() const; - void set_media_description(std::unique_ptr desc) { - description_ = std::move(desc); - } - - // TODO(bugs.webrtc.org/8620): Rename this to mid. - std::string name; MediaProtocolType type; bool rejected = false; bool bundle_only = false; private: + std::string mid_; friend class SessionDescription; std::unique_ptr description_; }; @@ -469,18 +471,23 @@ class ContentGroup { typedef std::vector ContentInfos; typedef std::vector ContentGroups; -const ContentInfo* FindContentInfoByName(const ContentInfos& contents, - const std::string& name); -const ContentInfo* FindContentInfoByType(const ContentInfos& contents, - const std::string& type); - -// Determines how the MSID will be signaled in the SDP. These can be used as -// flags to indicate both or none. +// Determines how the MSID will be signaled in the SDP. +// These can be used as bit flags to indicate both or the special value none. enum MsidSignaling { - // Signal MSID with one a=msid line in the media section. + // MSID is not signaled. This is not a bit flag and must be compared for + // equality. + kMsidSignalingNotUsed = 0x0, + // Signal MSID with at least one a=msid line in the media section. + // This requires unified plan. kMsidSignalingMediaSection = 0x1, // Signal MSID with a=ssrc: msid lines in the media section. - kMsidSignalingSsrcAttribute = 0x2 + // This should only be used with plan-b but is signalled in + // offers for backward compability reasons. + kMsidSignalingSsrcAttribute = 0x2, + // Signal MSID with a=msid-semantic: WMS in the session section. + // This is deprecated but signalled for backward compability reasons. + // It is typically combined with 0x1 or 0x2. + kMsidSignalingSemantic = 0x4 }; // Describes a collection of contents, each with its own name and @@ -499,8 +506,8 @@ class SessionDescription { const ContentInfo* GetContentByName(const std::string& name) const; ContentInfo* GetContentByName(const std::string& name); const MediaContentDescription* GetContentDescriptionByName( - const std::string& name) const; - MediaContentDescription* GetContentDescriptionByName(const std::string& name); + absl::string_view name) const; + MediaContentDescription* GetContentDescriptionByName(absl::string_view name); const ContentInfo* FirstContentByType(MediaProtocolType type) const; const ContentInfo* FirstContent() const; @@ -554,9 +561,6 @@ class SessionDescription { void RemoveGroupByName(const std::string& name); // Global attributes. - void set_msid_supported(bool supported) { msid_supported_ = supported; } - bool msid_supported() const { return msid_supported_; } - // Determines how the MSIDs were/will be signaled. Flag value composed of // MsidSignaling bits (see enum above). void set_msid_signaling(int msid_signaling) { @@ -588,10 +592,7 @@ class SessionDescription { ContentInfos contents_; TransportInfos transport_infos_; ContentGroups content_groups_; - bool msid_supported_ = true; - // Default to what Plan B would do. - // TODO(bugs.webrtc.org/8530): Change default to kMsidSignalingMediaSection. - int msid_signaling_ = kMsidSignalingSsrcAttribute; + int msid_signaling_ = kMsidSignalingMediaSection | kMsidSignalingSemantic; bool extmap_allow_mixed_ = true; }; @@ -599,6 +600,36 @@ class SessionDescription { // received from the remote client. enum ContentSource { CS_LOCAL, CS_REMOTE }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::AudioContentDescription; +using ::webrtc::ContentGroup; +using ::webrtc::ContentGroups; +using ::webrtc::ContentInfo; +using ::webrtc::ContentInfos; +using ::webrtc::ContentNames; +using ::webrtc::ContentSource; +using ::webrtc::CS_LOCAL; +using ::webrtc::CS_REMOTE; +using ::webrtc::kAutoBandwidth; +using ::webrtc::kMsidSignalingMediaSection; +using ::webrtc::kMsidSignalingNotUsed; +using ::webrtc::kMsidSignalingSemantic; +using ::webrtc::kMsidSignalingSsrcAttribute; +using ::webrtc::MediaContentDescription; +using ::webrtc::MediaProtocolType; +using ::webrtc::MsidSignaling; +using ::webrtc::RtpHeaderExtensions; +using ::webrtc::RtpMediaContentDescription; +using ::webrtc::SctpDataContentDescription; +using ::webrtc::SessionDescription; +using ::webrtc::UnsupportedContentDescription; +using ::webrtc::VideoContentDescription; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // PC_SESSION_DESCRIPTION_H_ diff --git a/pc/session_description_unittest.cc b/pc/session_description_unittest.cc index 4d0913bad2..4bde5711a8 100644 --- a/pc/session_description_unittest.cc +++ b/pc/session_description_unittest.cc @@ -9,9 +9,12 @@ */ #include "pc/session_description.h" +#include +#include + #include "test/gtest.h" -namespace cricket { +namespace webrtc { TEST(MediaContentDescriptionTest, ExtmapAllowMixedDefaultValue) { VideoContentDescription video_desc; @@ -130,4 +133,4 @@ TEST(SessionDescriptionTest, AddContentTransfersExtmapAllowMixedSetting) { ->extmap_allow_mixed_enum()); } -} // namespace cricket +} // namespace webrtc diff --git a/pc/simulcast_description.cc b/pc/simulcast_description.cc index ec87415677..87e4e2044c 100644 --- a/pc/simulcast_description.cc +++ b/pc/simulcast_description.cc @@ -10,9 +10,13 @@ #include "pc/simulcast_description.h" +#include +#include + +#include "absl/strings/string_view.h" #include "rtc_base/checks.h" -namespace cricket { +namespace webrtc { SimulcastLayer::SimulcastLayer(absl::string_view rid, bool is_paused) : rid{rid}, is_paused{is_paused} { @@ -54,4 +58,4 @@ std::vector SimulcastLayerList::GetAllLayers() const { return result; } -} // namespace cricket +} // namespace webrtc diff --git a/pc/simulcast_description.h b/pc/simulcast_description.h index 7caf164de5..3de057e152 100644 --- a/pc/simulcast_description.h +++ b/pc/simulcast_description.h @@ -18,7 +18,7 @@ #include "absl/strings/string_view.h" -namespace cricket { +namespace webrtc { // Describes a Simulcast Layer. // Each simulcast layer has a rid as the identifier and a paused flag. @@ -110,6 +110,16 @@ class SimulcastDescription final { SimulcastLayerList receive_layers_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::SimulcastDescription; +using ::webrtc::SimulcastLayer; +using ::webrtc::SimulcastLayerList; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // PC_SIMULCAST_DESCRIPTION_H_ diff --git a/pc/sdp_serializer.cc b/pc/simulcast_sdp_serializer.cc similarity index 73% rename from pc/sdp_serializer.cc rename to pc/simulcast_sdp_serializer.cc index 31c624b12c..ba90556d6c 100644 --- a/pc/sdp_serializer.cc +++ b/pc/simulcast_sdp_serializer.cc @@ -8,28 +8,36 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "pc/sdp_serializer.h" +#include "pc/simulcast_sdp_serializer.h" +#include #include +#include #include -#include #include #include #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "media/base/codec.h" +#include "media/base/codec_comparators.h" +#include "media/base/rid_description.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "pc/session_description.h" +#include "pc/simulcast_description.h" #include "rtc_base/checks.h" +#include "rtc_base/logging.h" #include "rtc_base/string_encode.h" #include "rtc_base/string_to_number.h" #include "rtc_base/strings/string_builder.h" -using cricket::RidDescription; -using cricket::RidDirection; -using cricket::SimulcastDescription; -using cricket::SimulcastLayer; -using cricket::SimulcastLayerList; +using ::webrtc::RidDescription; +using ::webrtc::RidDirection; +using ::webrtc::SimulcastDescription; +using ::webrtc::SimulcastLayer; +using ::webrtc::SimulcastLayerList; namespace webrtc { @@ -59,8 +67,8 @@ RTCError ParseError(absl::string_view message) { // These methods serialize simulcast according to the specification: // https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-13#section-5.1 -rtc::StringBuilder& operator<<(rtc::StringBuilder& builder, - const SimulcastLayer& simulcast_layer) { +StringBuilder& operator<<(StringBuilder& builder, + const SimulcastLayer& simulcast_layer) { if (simulcast_layer.is_paused) { builder << kSimulcastPausedStream; } @@ -68,8 +76,8 @@ rtc::StringBuilder& operator<<(rtc::StringBuilder& builder, return builder; } -rtc::StringBuilder& operator<<( - rtc::StringBuilder& builder, +StringBuilder& operator<<( + StringBuilder& builder, const std::vector& layer_alternatives) { bool first = true; for (const SimulcastLayer& rid : layer_alternatives) { @@ -82,8 +90,8 @@ rtc::StringBuilder& operator<<( return builder; } -rtc::StringBuilder& operator<<(rtc::StringBuilder& builder, - const SimulcastLayerList& simulcast_layers) { +StringBuilder& operator<<(StringBuilder& builder, + const SimulcastLayerList& simulcast_layers) { bool first = true; for (const auto& alternatives : simulcast_layers) { if (!first) { @@ -102,8 +110,7 @@ rtc::StringBuilder& operator<<(rtc::StringBuilder& builder, // sc-id = [sc-id-paused] rid-id // rid-id = 1*(alpha-numeric / "-" / "_") ; see: I-D.ietf-mmusic-rid RTCErrorOr ParseSimulcastLayerList(const std::string& str) { - std::vector tokens = - rtc::split(str, kDelimiterSemicolonChar); + std::vector tokens = split(str, kDelimiterSemicolonChar); if (tokens.empty()) { return ParseError("Layer list cannot be empty."); } @@ -115,7 +122,7 @@ RTCErrorOr ParseSimulcastLayerList(const std::string& str) { } std::vector rid_tokens = - rtc::split(token, kDelimiterCommaChar); + split(token, kDelimiterCommaChar); if (rid_tokens.empty()) { return ParseError("Simulcast alternative layer list is malformed."); @@ -139,12 +146,13 @@ RTCErrorOr ParseSimulcastLayerList(const std::string& str) { } webrtc::RTCError ParseRidPayloadList(const std::string& payload_list, - RidDescription* rid_description) { + RidDescription* rid_description, + std::vector* rid_payload_types) { RTC_DCHECK(rid_description); - std::vector& payload_types = rid_description->payload_types; + RTC_DCHECK(rid_payload_types); // Check that the description doesn't have any payload types or restrictions. // If the pt= field is specified, it must be first and must not repeat. - if (!payload_types.empty()) { + if (!rid_payload_types->empty()) { return ParseError("Multiple pt= found in RID Description."); } if (!rid_description->restrictions.empty()) { @@ -158,22 +166,22 @@ webrtc::RTCError ParseRidPayloadList(const std::string& payload_list, // Tokenize the ',' delimited list std::vector string_payloads; - rtc::tokenize(payload_list, kDelimiterCommaChar, &string_payloads); + tokenize(payload_list, kDelimiterCommaChar, &string_payloads); if (string_payloads.empty()) { return ParseError("Payload list must have at least one value."); } for (const std::string& payload_type : string_payloads) { - absl::optional value = rtc::StringToNumber(payload_type); + std::optional value = StringToNumber(payload_type); if (!value.has_value()) { return ParseError("Invalid payload type: " + payload_type); } // Check if the value already appears in the payload list. - if (absl::c_linear_search(payload_types, value.value())) { + if (absl::c_linear_search(*rid_payload_types, value.value())) { return ParseError("Duplicate payload type in list: " + payload_type); } - payload_types.push_back(value.value()); + rid_payload_types->push_back(value.value()); } return RTCError::OK(); @@ -181,9 +189,9 @@ webrtc::RTCError ParseRidPayloadList(const std::string& payload_list, } // namespace -std::string SdpSerializer::SerializeSimulcastDescription( - const cricket::SimulcastDescription& simulcast) const { - rtc::StringBuilder sb; +std::string SimulcastSdpSerializer::SerializeSimulcastDescription( + const SimulcastDescription& simulcast) const { + StringBuilder sb; std::string delimiter; if (!simulcast.send_layers().empty()) { @@ -196,7 +204,7 @@ std::string SdpSerializer::SerializeSimulcastDescription( << simulcast.receive_layers(); } - return sb.str(); + return sb.Release(); } // https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-13#section-5.1 @@ -210,10 +218,11 @@ std::string SdpSerializer::SerializeSimulcastDescription( // sc-id-paused = "~" // sc-id = [sc-id-paused] rid-id // rid-id = 1*(alpha-numeric / "-" / "_") ; see: I-D.ietf-mmusic-rid -RTCErrorOr SdpSerializer::DeserializeSimulcastDescription( +RTCErrorOr +SimulcastSdpSerializer::DeserializeSimulcastDescription( absl::string_view string) const { std::vector tokens; - rtc::tokenize(std::string(string), kDelimiterSpaceChar, &tokens); + tokenize(std::string(string), kDelimiterSpaceChar, &tokens); if (tokens.size() != 2 && tokens.size() != 4) { return ParseError("Must have one or two pairs."); @@ -264,19 +273,42 @@ RTCErrorOr SdpSerializer::DeserializeSimulcastDescription( return std::move(simulcast); } -std::string SdpSerializer::SerializeRidDescription( +std::string SimulcastSdpSerializer::SerializeRidDescription( + const MediaContentDescription& media_desc, const RidDescription& rid_description) const { RTC_DCHECK(!rid_description.rid.empty()); RTC_DCHECK(rid_description.direction == RidDirection::kSend || rid_description.direction == RidDirection::kReceive); - rtc::StringBuilder builder; + StringBuilder builder; builder << rid_description.rid << kDelimiterSpace << (rid_description.direction == RidDirection::kSend ? kSendDirection : kReceiveDirection); - const auto& payload_types = rid_description.payload_types; + // Convert `rid_descriptions.codecs` into a list of payload types based on + // looking up codecs from the media description, as opposed to trusting the + // `rid_descriptions.codecs[i].id` directly as these are typically wrong. + std::vector payload_types; + for (const Codec& codec : rid_description.codecs) { + RtpCodec rtp_codec = codec.ToCodecParameters(); + const auto it = std::find_if( + media_desc.codecs().begin(), media_desc.codecs().end(), + [&rtp_codec](const Codec& m_section_codec) { + return IsSameRtpCodecIgnoringLevel(m_section_codec, rtp_codec); + }); + // The desired codec from setParameters() may not have been negotiated, e.g. + // if excluded with setCodecPreferences(). + if (it == media_desc.codecs().end()) { + break; + } + if (it->id == Codec::kIdNotSet) { + RTC_DCHECK_NOTREACHED(); + break; + } + payload_types.push_back(it->id); + } + const auto& restrictions = rid_description.restrictions; // First property is separated by ' ', the next ones by ';'. @@ -304,7 +336,7 @@ std::string SdpSerializer::SerializeRidDescription( propertyDelimiter = kDelimiterSemicolon; } - return builder.str(); + return builder.Release(); } // https://tools.ietf.org/html/draft-ietf-mmusic-rid-15#section-10 @@ -319,10 +351,11 @@ std::string SdpSerializer::SerializeRidDescription( // rid-param = 1*(alpha-numeric / "-") [ "=" param-val ] // param-val = *( %x20-58 / %x60-7E ) // ; Any printable character except semicolon -RTCErrorOr SdpSerializer::DeserializeRidDescription( +RTCErrorOr SimulcastSdpSerializer::DeserializeRidDescription( + const MediaContentDescription& media_desc, absl::string_view string) const { std::vector tokens; - rtc::tokenize(std::string(string), kDelimiterSpaceChar, &tokens); + tokenize(std::string(string), kDelimiterSpaceChar, &tokens); if (tokens.size() < 2) { return ParseError("RID Description must contain ."); @@ -344,11 +377,12 @@ RTCErrorOr SdpSerializer::DeserializeRidDescription( : RidDirection::kReceive; RidDescription rid_description(tokens[0], direction); + std::vector rid_payload_types; // If there is a third argument it is a payload list and/or restriction list. if (tokens.size() == 3) { std::vector restrictions; - rtc::tokenize(tokens[2], kDelimiterSemicolonChar, &restrictions); + tokenize(tokens[2], kDelimiterSemicolonChar, &restrictions); // Check for malformed restriction list, such as ';' or ';;;' etc. if (restrictions.empty()) { @@ -358,7 +392,7 @@ RTCErrorOr SdpSerializer::DeserializeRidDescription( // Parse the restrictions. The payload indicator (pt) can only appear first. for (const std::string& restriction : restrictions) { std::vector parts; - rtc::tokenize(restriction, kDelimiterEqualChar, &parts); + tokenize(restriction, kDelimiterEqualChar, &parts); if (parts.empty() || parts.size() > 2) { return ParseError("Invalid format for restriction: " + restriction); } @@ -368,8 +402,9 @@ RTCErrorOr SdpSerializer::DeserializeRidDescription( // unprintable characters, etc. which will not generate errors here but // will (most-likely) be ignored by components down stream. if (parts[0] == kPayloadType) { - RTCError error = ParseRidPayloadList( - parts.size() > 1 ? parts[1] : std::string(), &rid_description); + RTCError error = + ParseRidPayloadList(parts.size() > 1 ? parts[1] : std::string(), + &rid_description, &rid_payload_types); if (!error.ok()) { return std::move(error); } @@ -388,6 +423,25 @@ RTCErrorOr SdpSerializer::DeserializeRidDescription( } } + // Look up any referenced codecs from the media section and add them to + // `rid_description.codecs`. + for (const int& payload_type : rid_payload_types) { + const auto it = + std::find_if(media_desc.codecs().begin(), media_desc.codecs().end(), + [&payload_type](const Codec& m_section_codec) { + return m_section_codec.id == payload_type; + }); + if (it == media_desc.codecs().end()) { + // This RID has a payload type that doesn't map to any known codec. While + // this is an error on the part of the entity that generated the SDP, this + // information falls into the "FYI" category and does not really change + // anything, so it's safe to ignore it. + RTC_LOG(LS_WARNING) << "A RID contains an unknown payload type."; + continue; + } + rid_description.codecs.push_back(*it); + } + return std::move(rid_description); } diff --git a/pc/sdp_serializer.h b/pc/simulcast_sdp_serializer.h similarity index 74% rename from pc/sdp_serializer.h rename to pc/simulcast_sdp_serializer.h index 559fac0e37..84d28ddbc0 100644 --- a/pc/sdp_serializer.h +++ b/pc/simulcast_sdp_serializer.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef PC_SDP_SERIALIZER_H_ -#define PC_SDP_SERIALIZER_H_ +#ifndef PC_SIMULCAST_SDP_SERIALIZER_H_ +#define PC_SIMULCAST_SDP_SERIALIZER_H_ #include @@ -21,41 +21,43 @@ namespace webrtc { -// This class should serialize components of the SDP (and not the SDP itself). +// This class serializes simulcast components of the SDP. // Example: // SimulcastDescription can be serialized and deserialized by this class. // The serializer will know how to translate the data to spec-compliant // format without knowing about the SDP attribute details (a=simulcast:) // Usage: // Consider the SDP attribute for simulcast a=simulcast:. -// The SDP serializtion code (webrtcsdp.h) should use `SdpSerializer` to +// The SDP serializtion code (webrtc_sdp.h) should use `SdpSerializer` to // serialize and deserialize the section. // This class will allow testing the serialization of components without // having to serialize the entire SDP while hiding implementation details -// from callers of sdp serialization (webrtcsdp.h). -class SdpSerializer { +// from callers of sdp serialization (webrtc_sdp.h). +class SimulcastSdpSerializer { public: // Serialization for the Simulcast description according to // https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-13#section-5.1 std::string SerializeSimulcastDescription( - const cricket::SimulcastDescription& simulcast) const; + const SimulcastDescription& simulcast) const; // Deserialization for the SimulcastDescription according to // https://tools.ietf.org/html/draft-ietf-mmusic-sdp-simulcast-13#section-5.1 - RTCErrorOr DeserializeSimulcastDescription( + RTCErrorOr DeserializeSimulcastDescription( absl::string_view string) const; // Serialization for the RID description according to // https://tools.ietf.org/html/draft-ietf-mmusic-rid-15#section-10 std::string SerializeRidDescription( - const cricket::RidDescription& rid_description) const; + const MediaContentDescription& media_desc, + const RidDescription& rid_description) const; // Deserialization for the RidDescription according to // https://tools.ietf.org/html/draft-ietf-mmusic-rid-15#section-10 - RTCErrorOr DeserializeRidDescription( + RTCErrorOr DeserializeRidDescription( + const MediaContentDescription& media_desc, absl::string_view string) const; }; } // namespace webrtc -#endif // PC_SDP_SERIALIZER_H_ +#endif // PC_SIMULCAST_SDP_SERIALIZER_H_ diff --git a/pc/sdp_serializer_unittest.cc b/pc/simulcast_sdp_serializer_unittest.cc similarity index 79% rename from pc/sdp_serializer_unittest.cc rename to pc/simulcast_sdp_serializer_unittest.cc index c907ecbd6c..c92f08d638 100644 --- a/pc/sdp_serializer_unittest.cc +++ b/pc/simulcast_sdp_serializer_unittest.cc @@ -8,24 +8,27 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "pc/sdp_serializer.h" +#include "pc/simulcast_sdp_serializer.h" #include #include #include -#include #include +#include "media/base/codec.h" +#include "media/base/rid_description.h" +#include "pc/session_description.h" +#include "pc/simulcast_description.h" #include "test/gtest.h" -using cricket::RidDescription; -using cricket::RidDirection; -using cricket::SimulcastDescription; -using cricket::SimulcastLayer; -using cricket::SimulcastLayerList; using ::testing::TestWithParam; using ::testing::ValuesIn; +using ::webrtc::RidDescription; +using ::webrtc::RidDirection; +using ::webrtc::SimulcastDescription; +using ::webrtc::SimulcastLayer; +using ::webrtc::SimulcastLayerList; namespace webrtc { @@ -90,7 +93,7 @@ void ExpectEqual(const SimulcastDescription& expected, void ExpectEqual(const RidDescription& expected, const RidDescription& actual) { EXPECT_EQ(expected.rid, actual.rid); EXPECT_EQ(expected.direction, actual.direction); - ExpectEqual(expected.payload_types, actual.payload_types); + ExpectEqual(expected.codecs, actual.codecs); ExpectEqual(expected.restrictions, actual.restrictions); } } // namespace @@ -102,7 +105,7 @@ class SimulcastSdpSerializerTest : public TestWithParam { // `expected` - The expected output Simulcast to compare to. void TestDeserialization(const std::string& str, const SimulcastDescription& expected) const { - SdpSerializer deserializer; + SimulcastSdpSerializer deserializer; auto result = deserializer.DeserializeSimulcastDescription(str); EXPECT_TRUE(result.ok()); ExpectEqual(expected, result.value()); @@ -113,7 +116,7 @@ class SimulcastSdpSerializerTest : public TestWithParam { // `expected` - The expected output string to compare to. void TestSerialization(const SimulcastDescription& simulcast, const std::string& expected) const { - SdpSerializer serializer; + SimulcastSdpSerializer serializer; auto result = serializer.SerializeSimulcastDescription(simulcast); EXPECT_EQ(expected, result); } @@ -214,7 +217,7 @@ TEST_F(SimulcastSdpSerializerTest, Deserialize_PausedStreams) { // Parameterized negative test case for deserialization with invalid inputs. TEST_P(SimulcastSdpSerializerTest, SimulcastDeserializationFailed) { - SdpSerializer deserializer; + SimulcastSdpSerializer deserializer; auto result = deserializer.DeserializeSimulcastDescription(GetParam()); EXPECT_FALSE(result.ok()); } @@ -284,10 +287,11 @@ class RidDescriptionSdpSerializerTest : public TestWithParam { // Runs a test for deserializing Rid Descriptions. // `str` - The serialized Rid Description to parse. // `expected` - The expected output RidDescription to compare to. - void TestDeserialization(const std::string& str, + void TestDeserialization(const MediaContentDescription& media_desc, + const std::string& str, const RidDescription& expected) const { - SdpSerializer deserializer; - auto result = deserializer.DeserializeRidDescription(str); + SimulcastSdpSerializer deserializer; + auto result = deserializer.DeserializeRidDescription(media_desc, str); EXPECT_TRUE(result.ok()); ExpectEqual(expected, result.value()); } @@ -295,10 +299,12 @@ class RidDescriptionSdpSerializerTest : public TestWithParam { // Runs a test for serializing RidDescriptions. // `rid_description` - The RidDescription to serialize. // `expected` - The expected output string to compare to. - void TestSerialization(const RidDescription& rid_description, + void TestSerialization(const MediaContentDescription& media_desc, + const RidDescription& rid_description, const std::string& expected) const { - SdpSerializer serializer; - auto result = serializer.SerializeRidDescription(rid_description); + SimulcastSdpSerializer serializer; + auto result = + serializer.SerializeRidDescription(media_desc, rid_description); EXPECT_EQ(expected, result); } }; @@ -306,27 +312,36 @@ class RidDescriptionSdpSerializerTest : public TestWithParam { // Test serialization for RidDescription that only specifies send. TEST_F(RidDescriptionSdpSerializerTest, Serialize_OnlyDirectionSend) { RidDescription rid_description("1", RidDirection::kSend); - TestSerialization(rid_description, "1 send"); + TestSerialization(VideoContentDescription(), rid_description, "1 send"); } // Test serialization for RidDescription that only specifies receive. TEST_F(RidDescriptionSdpSerializerTest, Serialize_OnlyDirectionReceive) { RidDescription rid_description("2", RidDirection::kReceive); - TestSerialization(rid_description, "2 recv"); + TestSerialization(VideoContentDescription(), rid_description, "2 recv"); } // Test serialization for RidDescription with format list. TEST_F(RidDescriptionSdpSerializerTest, Serialize_FormatList) { + Codec vp8 = CreateVideoCodec(101, "VP8"); + Codec vp9 = CreateVideoCodec(102, "VP9"); + VideoContentDescription video_desc; + video_desc.set_codecs({vp8, vp9}); + RidDescription rid_description("3", RidDirection::kSend); - rid_description.payload_types = {102, 101}; - TestSerialization(rid_description, "3 send pt=102,101"); + rid_description.codecs = {vp9, vp8}; + TestSerialization(video_desc, rid_description, "3 send pt=102,101"); } // Test serialization for RidDescription with format list. TEST_F(RidDescriptionSdpSerializerTest, Serialize_FormatListSingleFormat) { + Codec vp8 = CreateVideoCodec(100, "VP8"); + VideoContentDescription video_desc; + video_desc.set_codecs({vp8}); + RidDescription rid_description("4", RidDirection::kReceive); - rid_description.payload_types = {100}; - TestSerialization(rid_description, "4 recv pt=100"); + rid_description.codecs = {vp8}; + TestSerialization(video_desc, rid_description, "4 recv pt=100"); } // Test serialization for RidDescription with restriction list. @@ -335,63 +350,97 @@ TEST_F(RidDescriptionSdpSerializerTest, Serialize_AttributeList) { RidDescription rid_description("5", RidDirection::kSend); rid_description.restrictions["max-width"] = "1280"; rid_description.restrictions["max-height"] = "720"; - TestSerialization(rid_description, "5 send max-height=720;max-width=1280"); + TestSerialization(VideoContentDescription(), rid_description, + "5 send max-height=720;max-width=1280"); } // Test serialization for RidDescription with format list and attribute list. // Note: restriction list will be sorted because it is stored in a map. TEST_F(RidDescriptionSdpSerializerTest, Serialize_FormatAndAttributeList) { + Codec vp8 = CreateVideoCodec(103, "VP8"); + Codec vp9 = CreateVideoCodec(104, "VP9"); + VideoContentDescription video_desc; + video_desc.set_codecs({vp8, vp9}); + RidDescription rid_description("6", RidDirection::kSend); - rid_description.payload_types = {103, 104}; + rid_description.codecs = {vp8, vp9}; rid_description.restrictions["max-mbps"] = "108000"; rid_description.restrictions["max-br"] = "64000"; - TestSerialization(rid_description, + TestSerialization(video_desc, rid_description, "6 send pt=103,104;max-br=64000;max-mbps=108000"); } // Test serialization for attribute list that has key with no value. // Note: restriction list will be sorted because it is stored in a map. TEST_F(RidDescriptionSdpSerializerTest, Serialize_RestrictionWithoutValue) { + Codec vp8 = CreateVideoCodec(103, "VP8"); + VideoContentDescription video_desc; + video_desc.set_codecs({vp8}); + RidDescription rid_description("7", RidDirection::kReceive); - rid_description.payload_types = {103}; + rid_description.codecs = {vp8}; rid_description.restrictions["max-width"] = "1280"; rid_description.restrictions["max-height"] = "720"; rid_description.restrictions["max-myval"] = ""; - TestSerialization(rid_description, + TestSerialization(video_desc, rid_description, "7 recv pt=103;max-height=720;max-myval;max-width=1280"); } // Test simulcast deserialization with simple send stream. TEST_F(RidDescriptionSdpSerializerTest, Deserialize_SimpleSendCase) { RidDescription rid_description("1", RidDirection::kSend); - TestDeserialization("1 send", rid_description); + TestDeserialization(VideoContentDescription(), "1 send", rid_description); } // Test simulcast deserialization with simple receive stream. TEST_F(RidDescriptionSdpSerializerTest, Deserialize_SimpleReceiveCase) { RidDescription rid_description("2", RidDirection::kReceive); - TestDeserialization("2 recv", rid_description); + TestDeserialization(VideoContentDescription(), "2 recv", rid_description); } // Test simulcast deserialization with single format. TEST_F(RidDescriptionSdpSerializerTest, Deserialize_WithFormat) { + Codec vp8 = CreateVideoCodec(101, "VP8"); + VideoContentDescription video_desc; + video_desc.set_codecs({vp8}); + + RidDescription rid_description("3", RidDirection::kSend); + rid_description.codecs = {vp8}; + TestDeserialization(video_desc, "3 send pt=101", rid_description); +} + +// Test simulcast deserialization with valid format syntax (`pt=97`) but using +// a payload type that does not refer to any codec in the media description. +TEST_F(RidDescriptionSdpSerializerTest, Deserialize_ReferencingUnknownCodec) { + Codec vp8 = CreateVideoCodec(101, "VP8"); + VideoContentDescription video_desc; + video_desc.set_codecs({vp8}); + RidDescription rid_description("3", RidDirection::kSend); - rid_description.payload_types = {101}; - TestDeserialization("3 send pt=101", rid_description); + rid_description.codecs = {}; // pt=97 is ignored resulting in an empty list. + TestDeserialization(video_desc, "3 send pt=97", rid_description); } // Test simulcast deserialization with multiple formats. TEST_F(RidDescriptionSdpSerializerTest, Deserialize_WithMultipleFormats) { + Codec vp8 = CreateVideoCodec(101, "VP8"); + Codec vp9 = CreateVideoCodec(102, "VP9"); + Codec av1 = CreateVideoCodec(103, "AV1"); + Codec h264 = CreateVideoCodec(104, "H264"); + VideoContentDescription video_desc; + video_desc.set_codecs({vp8, vp9, av1, h264}); + RidDescription rid_description("4", RidDirection::kSend); - rid_description.payload_types = {103, 104, 101, 102}; - TestDeserialization("4 send pt=103,104,101,102", rid_description); + rid_description.codecs = {av1, h264, vp8, vp9}; + TestDeserialization(video_desc, "4 send pt=103,104,101,102", rid_description); } // Test simulcast deserialization with restriction. TEST_F(RidDescriptionSdpSerializerTest, Deserialize_WithRestriction) { RidDescription rid_description("5", RidDirection::kReceive); rid_description.restrictions["max-height"] = "720"; - TestDeserialization("5 recv max-height=720", rid_description); + TestDeserialization(VideoContentDescription(), "5 recv max-height=720", + rid_description); } // Test simulcast deserialization with multiple restrictions. @@ -402,6 +451,7 @@ TEST_F(RidDescriptionSdpSerializerTest, Deserialize_WithMultipleRestrictions) { rid_description.restrictions["max-fr"] = "60"; rid_description.restrictions["max-bps"] = "14000"; TestDeserialization( + VideoContentDescription(), "6 recv max-height=720;max-width=1920;max-bps=14000;max-fr=60", rid_description); } @@ -411,26 +461,37 @@ TEST_F(RidDescriptionSdpSerializerTest, Deserialize_WithCustomRestrictions) { RidDescription rid_description("7", RidDirection::kSend); rid_description.restrictions["foo"] = "bar"; rid_description.restrictions["max-height"] = "720"; - TestDeserialization("7 send max-height=720;foo=bar", rid_description); + TestDeserialization(VideoContentDescription(), + "7 send max-height=720;foo=bar", rid_description); } // Test simulcast deserialization with multiple formats and restrictions. TEST_F(RidDescriptionSdpSerializerTest, Deserialize_WithFormatAndRestrictions) { + Codec av1 = CreateVideoCodec(103, "AV1"); + Codec h264 = CreateVideoCodec(104, "H264"); + VideoContentDescription video_desc; + video_desc.set_codecs({av1, h264}); + RidDescription rid_description("8", RidDirection::kSend); - rid_description.payload_types = {104, 103}; + rid_description.codecs = {h264, av1}; rid_description.restrictions["max-height"] = "720"; rid_description.restrictions["max-width"] = "1920"; - TestDeserialization("8 send pt=104,103;max-height=720;max-width=1920", + TestDeserialization(video_desc, + "8 send pt=104,103;max-height=720;max-width=1920", rid_description); } // Test simulcast deserialization with restriction that has no value. TEST_F(RidDescriptionSdpSerializerTest, Deserialize_RestrictionHasNoValue) { + Codec h264 = CreateVideoCodec(104, "H264"); + VideoContentDescription video_desc; + video_desc.set_codecs({h264}); + RidDescription rid_description("9", RidDirection::kReceive); - rid_description.payload_types = {104}; + rid_description.codecs = {h264}; rid_description.restrictions["max-height"]; rid_description.restrictions["max-width"] = "1920"; - TestDeserialization("9 recv pt=104;max-height;max-width=1920", + TestDeserialization(video_desc, "9 recv pt=104;max-height;max-width=1920", rid_description); } @@ -442,13 +503,18 @@ TEST_F(RidDescriptionSdpSerializerTest, Deserialize_RestrictionHasNoValue) { TEST_F(RidDescriptionSdpSerializerTest, Deserialize_AmbiguousCase) { RidDescription rid_description("1", RidDirection::kSend); rid_description.restrictions["recv"]; // No value. - TestDeserialization("1 send recv", rid_description); + TestDeserialization(VideoContentDescription(), "1 send recv", + rid_description); } // Parameterized negative test case for deserialization with invalid inputs. TEST_P(RidDescriptionSdpSerializerTest, RidDescriptionDeserializationFailed) { - SdpSerializer deserializer; - auto result = deserializer.DeserializeRidDescription(GetParam()); + VideoContentDescription video_desc; + video_desc.set_codecs( + {CreateVideoCodec(101, "VP8"), CreateVideoCodec(102, "VP9")}); + + SimulcastSdpSerializer deserializer; + auto result = deserializer.DeserializeRidDescription(video_desc, GetParam()); EXPECT_FALSE(result.ok()); } diff --git a/pc/slow_peer_connection_integration_test.cc b/pc/slow_peer_connection_integration_test.cc index fd9d3417df..d306989bc9 100644 --- a/pc/slow_peer_connection_integration_test.cc +++ b/pc/slow_peer_connection_integration_test.cc @@ -15,21 +15,22 @@ #include #include +#include #include #include #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/dtmf_sender_interface.h" #include "api/peer_connection_interface.h" #include "api/rtp_receiver_interface.h" #include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" #include "api/units/time_delta.h" #include "p2p/base/port_allocator.h" #include "p2p/base/port_interface.h" -#include "p2p/base/stun_server.h" -#include "p2p/base/test_stun_server.h" +#include "p2p/test/stun_server.h" +#include "p2p/test/test_stun_server.h" #include "pc/test/integration_test_helpers.h" #include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/fake_clock.h" @@ -38,10 +39,10 @@ #include "rtc_base/gunit.h" #include "rtc_base/logging.h" #include "rtc_base/socket_address.h" -#include "rtc_base/ssl_certificate.h" #include "rtc_base/test_certificate_verifier.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" namespace webrtc { @@ -61,13 +62,13 @@ class PeerConnectionIntegrationTest // where order of construction is finely controlled. // This also ensures peerconnection is closed before switching back to non-fake // clock, avoiding other races and DCHECK failures such as in rtp_sender.cc. -class FakeClockForTest : public rtc::ScopedFakeClock { +class FakeClockForTest : public ScopedFakeClock { protected: FakeClockForTest() { // Some things use a time of "0" as a special value, so we need to start out // the fake clock at a nonzero time. // TODO(deadbeef): Fix this. - AdvanceTime(webrtc::TimeDelta::Seconds(1000)); + AdvanceTime(TimeDelta::Seconds(1000)); } // Explicit handle. @@ -104,23 +105,33 @@ TEST_P(PeerConnectionIntegrationTest, callee()->AddAudioVideoTracks(); // Start offer/answer exchange and wait for it to complete. caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Should be one receiver each for audio/video. EXPECT_EQ(2U, caller()->rtp_receiver_observers().size()); EXPECT_EQ(2U, callee()->rtp_receiver_observers().size()); // Wait for all "first packet received" callbacks to be fired. - EXPECT_TRUE_WAIT( - absl::c_all_of(caller()->rtp_receiver_observers(), - [](const std::unique_ptr& o) { - return o->first_packet_received(); - }), - kMaxWaitForFramesMs); - EXPECT_TRUE_WAIT( - absl::c_all_of(callee()->rtp_receiver_observers(), - [](const std::unique_ptr& o) { - return o->first_packet_received(); - }), - kMaxWaitForFramesMs); + EXPECT_THAT(WaitUntil( + [&] { + return absl::c_all_of( + caller()->rtp_receiver_observers(), + [](const std::unique_ptr& o) { + return o->first_packet_received(); + }); + }, + ::testing::IsTrue(), {.timeout = kMaxWaitForFrames}), + IsRtcOk()); + EXPECT_THAT(WaitUntil( + [&] { + return absl::c_all_of( + callee()->rtp_receiver_observers(), + [](const std::unique_ptr& o) { + return o->first_packet_received(); + }); + }, + ::testing::IsTrue(), {.timeout = kMaxWaitForFrames}), + IsRtcOk()); // If new observers are set after the first packet was already received, the // callback should still be invoked. caller()->ResetRtpReceiverObservers(); @@ -161,45 +172,44 @@ class DummyDtmfObserver : public DtmfSenderObserverInterface { TEST_P(PeerConnectionIntegrationTest, SSLCertificateVerifierFailureUsedForTurnConnectionsFailsConnection) { - static const rtc::SocketAddress turn_server_internal_address{"88.88.88.0", - 3478}; - static const rtc::SocketAddress turn_server_external_address{"88.88.88.1", 0}; + static const SocketAddress turn_server_internal_address{"88.88.88.0", 3478}; + static const SocketAddress turn_server_external_address{"88.88.88.1", 0}; // Enable TCP-TLS for the fake turn server. We need to pass in 88.88.88.0 so // that host name verification passes on the fake certificate. CreateTurnServer(turn_server_internal_address, turn_server_external_address, - cricket::PROTO_TLS, "88.88.88.0"); + PROTO_TLS, "88.88.88.0"); - webrtc::PeerConnectionInterface::IceServer ice_server; + PeerConnectionInterface::IceServer ice_server; ice_server.urls.push_back("turns:88.88.88.0:3478?transport=tcp"); ice_server.username = "test"; ice_server.password = "test"; PeerConnectionInterface::RTCConfiguration client_1_config; client_1_config.servers.push_back(ice_server); - client_1_config.type = webrtc::PeerConnectionInterface::kRelay; + client_1_config.type = PeerConnectionInterface::kRelay; PeerConnectionInterface::RTCConfiguration client_2_config; client_2_config.servers.push_back(ice_server); // Setting the type to kRelay forces the connection to go through a TURN // server. - client_2_config.type = webrtc::PeerConnectionInterface::kRelay; + client_2_config.type = PeerConnectionInterface::kRelay; // Get a copy to the pointer so we can verify calls later. - rtc::TestCertificateVerifier* client_1_cert_verifier = - new rtc::TestCertificateVerifier(); + TestCertificateVerifier* client_1_cert_verifier = + new TestCertificateVerifier(); client_1_cert_verifier->verify_certificate_ = false; - rtc::TestCertificateVerifier* client_2_cert_verifier = - new rtc::TestCertificateVerifier(); + TestCertificateVerifier* client_2_cert_verifier = + new TestCertificateVerifier(); client_2_cert_verifier->verify_certificate_ = false; // Create the dependencies with the test certificate verifier. - webrtc::PeerConnectionDependencies client_1_deps(nullptr); + PeerConnectionDependencies client_1_deps(nullptr); client_1_deps.tls_cert_verifier = - std::unique_ptr(client_1_cert_verifier); - webrtc::PeerConnectionDependencies client_2_deps(nullptr); + std::unique_ptr(client_1_cert_verifier); + PeerConnectionDependencies client_2_deps(nullptr); client_2_deps.tls_cert_verifier = - std::unique_ptr(client_2_cert_verifier); + std::unique_ptr(client_2_cert_verifier); ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndDeps( client_1_config, std::move(client_1_deps), client_2_config, @@ -217,7 +227,7 @@ TEST_P(PeerConnectionIntegrationTest, // TODO(bugs.webrtc.org/9219): When IceConnectionState is implemented // properly, should be able to just wait for a state of "failed" instead of // waiting a fixed 10 seconds. - WAIT_(DtlsConnected(), kDefaultTimeout, wait_res); + WAIT_(DtlsConnected(), kDefaultTimeout.ms(), wait_res); ASSERT_FALSE(wait_res); EXPECT_GT(client_1_cert_verifier->call_count_, 0u); @@ -234,7 +244,9 @@ TEST_P(PeerConnectionIntegrationTest, GetCaptureStartNtpTimeWithOldStatsApi) { // Do offer/answer, wait for the callee to receive some frames. caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Get the remote audio track created on the receiver, so they can be used as // GetStats filters. @@ -244,9 +256,15 @@ TEST_P(PeerConnectionIntegrationTest, GetCaptureStartNtpTimeWithOldStatsApi) { // Get the audio output level stats. Note that the level is not available // until an RTCP packet has been received. - EXPECT_TRUE_WAIT(callee()->OldGetStatsForTrack(remote_audio_track.get()) - ->CaptureStartNtpTime() > 0, - 2 * kMaxWaitForFramesMs); + EXPECT_THAT( + WaitUntil( + [&] { + return callee() + ->OldGetStatsForTrack(remote_audio_track.get()) + ->CaptureStartNtpTime(); + }, + ::testing::Gt(0), {.timeout = 2 * kMaxWaitForFrames}), + IsRtcOk()); } // Test that firewalling the ICE connection causes the clients to identify the @@ -262,17 +280,12 @@ class PeerConnectionIntegrationIceStatesTest } void StartStunServer(const SocketAddress& server_address) { - stun_server_.reset( - cricket::TestStunServer::Create(firewall(), server_address)); + stun_server_ = + TestStunServer::Create(firewall(), server_address, *network_thread()); } bool TestIPv6() { - return (port_allocator_flags_ & cricket::PORTALLOCATOR_ENABLE_IPV6); - } - - void SetPortAllocatorFlags() { - PeerConnectionIntegrationBaseTest::SetPortAllocatorFlags( - port_allocator_flags_, port_allocator_flags_); + return (port_allocator_flags_ & PORTALLOCATOR_ENABLE_IPV6); } std::vector CallerAddresses() { @@ -307,9 +320,11 @@ class PeerConnectionIntegrationIceStatesTest } } + uint32_t port_allocator_flags() const { return port_allocator_flags_; } + private: uint32_t port_allocator_flags_; - std::unique_ptr stun_server_; + TestStunServer::StunServerPtr stun_server_; }; // Ensure FakeClockForTest is constructed first (see class for rationale). @@ -326,7 +341,7 @@ class PeerConnectionIntegrationIceStatesTestWithFakeClock // to time out. TEST_P(PeerConnectionIntegrationIceStatesTestWithFakeClock, VerifyIceStates) { const SocketAddress kStunServerAddress = - SocketAddress("99.99.99.1", cricket::STUN_SERVER_PORT); + SocketAddress("99.99.99.1", STUN_SERVER_PORT); StartStunServer(kStunServerAddress); PeerConnectionInterface::RTCConfiguration config; @@ -335,10 +350,10 @@ TEST_P(PeerConnectionIntegrationIceStatesTestWithFakeClock, VerifyIceStates) { "stun:" + kStunServerAddress.HostAsURIString() + ":" + kStunServerAddress.PortAsString()); config.servers.push_back(ice_stun_server); + config.port_allocator_config.flags = port_allocator_flags(); ASSERT_TRUE(CreatePeerConnectionWrappersWithConfig(config, config)); ConnectFakeSignaling(); - SetPortAllocatorFlags(); SetUpNetworkInterfaces(); caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); @@ -357,12 +372,16 @@ TEST_P(PeerConnectionIntegrationIceStatesTestWithFakeClock, VerifyIceStates) { // background. caller()->CreateAndSetAndSignalOffer(); - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kDefaultTimeout, - FakeClock()); - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionCompleted, - caller()->standardized_ice_connection_state(), - kDefaultTimeout, FakeClock()); + ASSERT_THAT( + WaitUntil([&] { return caller()->ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionCompleted), + {.clock = &FakeClock()}), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return caller()->standardized_ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionCompleted), + {.clock = &FakeClock()}), + IsRtcOk()); // Verify that the observer was notified of the intermediate transitions. EXPECT_THAT(caller()->ice_connection_state_history(), @@ -384,40 +403,55 @@ TEST_P(PeerConnectionIntegrationIceStatesTestWithFakeClock, VerifyIceStates) { // Block connections to/from the caller and wait for ICE to become // disconnected. for (const auto& caller_address : CallerAddresses()) { - firewall()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, caller_address); + firewall()->AddRule(false, FP_ANY, FD_ANY, caller_address); } RTC_LOG(LS_INFO) << "Firewall rules applied"; - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionDisconnected, - caller()->ice_connection_state(), kDefaultTimeout, - FakeClock()); - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionDisconnected, - caller()->standardized_ice_connection_state(), - kDefaultTimeout, FakeClock()); + ScopedFakeClock& fake_clock = FakeClock(); + ASSERT_THAT( + WaitUntil( + [&] { return caller()->ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionDisconnected), + {.timeout = TimeDelta::Seconds(10), .clock = &fake_clock}), + IsRtcOk()); + ASSERT_THAT( + WaitUntil( + [&] { return caller()->standardized_ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionDisconnected), + {.timeout = TimeDelta::Seconds(10), .clock = &fake_clock}), + IsRtcOk()); // Let ICE re-establish by removing the firewall rules. firewall()->ClearRules(); RTC_LOG(LS_INFO) << "Firewall rules cleared"; - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionCompleted, - caller()->ice_connection_state(), kDefaultTimeout, - FakeClock()); - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionCompleted, - caller()->standardized_ice_connection_state(), - kDefaultTimeout, FakeClock()); + ASSERT_THAT( + WaitUntil([&] { return caller()->ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionCompleted), + {.clock = &fake_clock}), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return caller()->standardized_ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionCompleted), + {.clock = &fake_clock}), + IsRtcOk()); // According to RFC7675, if there is no response within 30 seconds then the // peer should consider the other side to have rejected the connection. This // is signaled by the state transitioning to "failed". - constexpr int kConsentTimeout = 30000; + constexpr TimeDelta kConsentTimeout = TimeDelta::Millis(30000); for (const auto& caller_address : CallerAddresses()) { - firewall()->AddRule(false, rtc::FP_ANY, rtc::FD_ANY, caller_address); + firewall()->AddRule(false, FP_ANY, FD_ANY, caller_address); } RTC_LOG(LS_INFO) << "Firewall rules applied again"; - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionFailed, - caller()->ice_connection_state(), kConsentTimeout, - FakeClock()); - ASSERT_EQ_SIMULATED_WAIT(PeerConnectionInterface::kIceConnectionFailed, - caller()->standardized_ice_connection_state(), - kConsentTimeout, FakeClock()); + ASSERT_THAT( + WaitUntil([&] { return caller()->ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionFailed), + {.timeout = kConsentTimeout, .clock = &fake_clock}), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return caller()->standardized_ice_connection_state(); }, + ::testing::Eq(PeerConnectionInterface::kIceConnectionFailed), + {.timeout = kConsentTimeout, .clock = &fake_clock}), + IsRtcOk()); } #endif @@ -429,7 +463,9 @@ TEST_P(PeerConnectionIntegrationTest, CallTransferredForCallee) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Keep the original peer around which will still send packets to the // receiving client. These SRTP packets will be dropped. @@ -443,7 +479,9 @@ TEST_P(PeerConnectionIntegrationTest, CallTransferredForCallee) { ConnectFakeSignaling(); caller()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Wait for some additional frames to be transmitted end-to-end. MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); @@ -458,7 +496,9 @@ TEST_P(PeerConnectionIntegrationTest, CallTransferredForCaller) { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Keep the original peer around which will still send packets to the // receiving client. These SRTP packets will be dropped. @@ -473,7 +513,9 @@ TEST_P(PeerConnectionIntegrationTest, CallTransferredForCaller) { callee()->AddAudioVideoTracks(); caller()->SetOfferAnswerOptions(IceRestartOfferAnswerOptions()); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return SignalingStateStable(); }, ::testing::IsTrue()), + IsRtcOk()); // Wait for some additional frames to be transmitted end-to-end. MediaExpectations media_expectations; media_expectations.ExpectBidirectionalAudioAndVideo(); @@ -485,14 +527,14 @@ INSTANTIATE_TEST_SUITE_P(PeerConnectionIntegrationTest, Values(SdpSemantics::kPlanB_DEPRECATED, SdpSemantics::kUnifiedPlan)); -constexpr uint32_t kFlagsIPv4NoStun = cricket::PORTALLOCATOR_DISABLE_TCP | - cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_DISABLE_RELAY; +constexpr uint32_t kFlagsIPv4NoStun = PORTALLOCATOR_DISABLE_TCP | + PORTALLOCATOR_DISABLE_STUN | + PORTALLOCATOR_DISABLE_RELAY; constexpr uint32_t kFlagsIPv6NoStun = - cricket::PORTALLOCATOR_DISABLE_TCP | cricket::PORTALLOCATOR_DISABLE_STUN | - cricket::PORTALLOCATOR_ENABLE_IPV6 | cricket::PORTALLOCATOR_DISABLE_RELAY; + PORTALLOCATOR_DISABLE_TCP | PORTALLOCATOR_DISABLE_STUN | + PORTALLOCATOR_ENABLE_IPV6 | PORTALLOCATOR_DISABLE_RELAY; constexpr uint32_t kFlagsIPv4Stun = - cricket::PORTALLOCATOR_DISABLE_TCP | cricket::PORTALLOCATOR_DISABLE_RELAY; + PORTALLOCATOR_DISABLE_TCP | PORTALLOCATOR_DISABLE_RELAY; INSTANTIATE_TEST_SUITE_P( PeerConnectionIntegrationTest, diff --git a/pc/srtp_filter.cc b/pc/srtp_filter.cc deleted file mode 100644 index b8be63cd22..0000000000 --- a/pc/srtp_filter.cc +++ /dev/null @@ -1,280 +0,0 @@ -/* - * Copyright 2009 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "pc/srtp_filter.h" - -#include - -#include - -#include "absl/strings/match.h" -#include "rtc_base/logging.h" -#include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/third_party/base64/base64.h" -#include "rtc_base/zero_memory.h" - -namespace cricket { - -SrtpFilter::SrtpFilter() {} - -SrtpFilter::~SrtpFilter() {} - -bool SrtpFilter::IsActive() const { - return state_ >= ST_ACTIVE; -} - -bool SrtpFilter::Process(const std::vector& cryptos, - webrtc::SdpType type, - ContentSource source) { - bool ret = false; - switch (type) { - case webrtc::SdpType::kOffer: - ret = SetOffer(cryptos, source); - break; - case webrtc::SdpType::kPrAnswer: - ret = SetProvisionalAnswer(cryptos, source); - break; - case webrtc::SdpType::kAnswer: - ret = SetAnswer(cryptos, source); - break; - default: - break; - } - - if (!ret) { - return false; - } - - return true; -} - -bool SrtpFilter::SetOffer(const std::vector& offer_params, - ContentSource source) { - if (!ExpectOffer(source)) { - RTC_LOG(LS_ERROR) << "Wrong state to update SRTP offer"; - return false; - } - return StoreParams(offer_params, source); -} - -bool SrtpFilter::SetAnswer(const std::vector& answer_params, - ContentSource source) { - return DoSetAnswer(answer_params, source, true); -} - -bool SrtpFilter::SetProvisionalAnswer( - const std::vector& answer_params, - ContentSource source) { - return DoSetAnswer(answer_params, source, false); -} - -bool SrtpFilter::ExpectOffer(ContentSource source) { - return ((state_ == ST_INIT) || (state_ == ST_ACTIVE) || - (state_ == ST_SENTOFFER && source == CS_LOCAL) || - (state_ == ST_SENTUPDATEDOFFER && source == CS_LOCAL) || - (state_ == ST_RECEIVEDOFFER && source == CS_REMOTE) || - (state_ == ST_RECEIVEDUPDATEDOFFER && source == CS_REMOTE)); -} - -bool SrtpFilter::StoreParams(const std::vector& params, - ContentSource source) { - offer_params_ = params; - if (state_ == ST_INIT) { - state_ = (source == CS_LOCAL) ? ST_SENTOFFER : ST_RECEIVEDOFFER; - } else if (state_ == ST_ACTIVE) { - state_ = - (source == CS_LOCAL) ? ST_SENTUPDATEDOFFER : ST_RECEIVEDUPDATEDOFFER; - } - return true; -} - -bool SrtpFilter::ExpectAnswer(ContentSource source) { - return ((state_ == ST_SENTOFFER && source == CS_REMOTE) || - (state_ == ST_RECEIVEDOFFER && source == CS_LOCAL) || - (state_ == ST_SENTUPDATEDOFFER && source == CS_REMOTE) || - (state_ == ST_RECEIVEDUPDATEDOFFER && source == CS_LOCAL) || - (state_ == ST_SENTPRANSWER_NO_CRYPTO && source == CS_LOCAL) || - (state_ == ST_SENTPRANSWER && source == CS_LOCAL) || - (state_ == ST_RECEIVEDPRANSWER_NO_CRYPTO && source == CS_REMOTE) || - (state_ == ST_RECEIVEDPRANSWER && source == CS_REMOTE)); -} - -bool SrtpFilter::DoSetAnswer(const std::vector& answer_params, - ContentSource source, - bool final) { - if (!ExpectAnswer(source)) { - RTC_LOG(LS_ERROR) << "Invalid state for SRTP answer"; - return false; - } - - // If the answer doesn't requests crypto complete the negotiation of an - // unencrypted session. - // Otherwise, finalize the parameters and apply them. - if (answer_params.empty()) { - if (final) { - return ResetParams(); - } else { - // Need to wait for the final answer to decide if - // we should go to Active state. - state_ = (source == CS_LOCAL) ? ST_SENTPRANSWER_NO_CRYPTO - : ST_RECEIVEDPRANSWER_NO_CRYPTO; - return true; - } - } - CryptoParams selected_params; - if (!NegotiateParams(answer_params, &selected_params)) - return false; - - const CryptoParams& new_send_params = - (source == CS_REMOTE) ? selected_params : answer_params[0]; - const CryptoParams& new_recv_params = - (source == CS_REMOTE) ? answer_params[0] : selected_params; - if (!ApplySendParams(new_send_params) || !ApplyRecvParams(new_recv_params)) { - return false; - } - applied_send_params_ = new_send_params; - applied_recv_params_ = new_recv_params; - - if (final) { - offer_params_.clear(); - state_ = ST_ACTIVE; - } else { - state_ = (source == CS_LOCAL) ? ST_SENTPRANSWER : ST_RECEIVEDPRANSWER; - } - return true; -} - -bool SrtpFilter::NegotiateParams(const std::vector& answer_params, - CryptoParams* selected_params) { - // We're processing an accept. We should have exactly one set of params, - // unless the offer didn't mention crypto, in which case we shouldn't be here. - bool ret = (answer_params.size() == 1U && !offer_params_.empty()); - if (ret) { - // We should find a match between the answer params and the offered params. - std::vector::const_iterator it; - for (it = offer_params_.begin(); it != offer_params_.end(); ++it) { - if (answer_params[0].Matches(*it)) { - break; - } - } - - if (it != offer_params_.end()) { - *selected_params = *it; - } else { - ret = false; - } - } - - if (!ret) { - RTC_LOG(LS_WARNING) << "Invalid parameters in SRTP answer"; - } - return ret; -} - -bool SrtpFilter::ResetParams() { - offer_params_.clear(); - applied_send_params_ = CryptoParams(); - applied_recv_params_ = CryptoParams(); - send_crypto_suite_ = absl::nullopt; - recv_crypto_suite_ = absl::nullopt; - send_key_.Clear(); - recv_key_.Clear(); - state_ = ST_INIT; - return true; -} - -bool SrtpFilter::ApplySendParams(const CryptoParams& send_params) { - if (applied_send_params_.crypto_suite == send_params.crypto_suite && - applied_send_params_.key_params == send_params.key_params) { - RTC_LOG(LS_INFO) << "Applying the same SRTP send parameters again. No-op."; - - // We do not want to reset the ROC if the keys are the same. So just return. - return true; - } - - send_crypto_suite_ = rtc::SrtpCryptoSuiteFromName(send_params.crypto_suite); - if (send_crypto_suite_ == rtc::kSrtpInvalidCryptoSuite) { - RTC_LOG(LS_WARNING) << "Unknown crypto suite(s) received:" - " send crypto_suite " - << send_params.crypto_suite; - return false; - } - - int send_key_len, send_salt_len; - if (!rtc::GetSrtpKeyAndSaltLengths(*send_crypto_suite_, &send_key_len, - &send_salt_len)) { - RTC_LOG(LS_ERROR) << "Could not get lengths for crypto suite(s):" - " send crypto_suite " - << send_params.crypto_suite; - return false; - } - - send_key_ = rtc::ZeroOnFreeBuffer(send_key_len + send_salt_len); - return ParseKeyParams(send_params.key_params, send_key_.data(), - send_key_.size()); -} - -bool SrtpFilter::ApplyRecvParams(const CryptoParams& recv_params) { - if (applied_recv_params_.crypto_suite == recv_params.crypto_suite && - applied_recv_params_.key_params == recv_params.key_params) { - RTC_LOG(LS_INFO) << "Applying the same SRTP recv parameters again. No-op."; - - // We do not want to reset the ROC if the keys are the same. So just return. - return true; - } - - recv_crypto_suite_ = rtc::SrtpCryptoSuiteFromName(recv_params.crypto_suite); - if (recv_crypto_suite_ == rtc::kSrtpInvalidCryptoSuite) { - RTC_LOG(LS_WARNING) << "Unknown crypto suite(s) received:" - " recv crypto_suite " - << recv_params.crypto_suite; - return false; - } - - int recv_key_len, recv_salt_len; - if (!rtc::GetSrtpKeyAndSaltLengths(*recv_crypto_suite_, &recv_key_len, - &recv_salt_len)) { - RTC_LOG(LS_ERROR) << "Could not get lengths for crypto suite(s):" - " recv crypto_suite " - << recv_params.crypto_suite; - return false; - } - - recv_key_ = rtc::ZeroOnFreeBuffer(recv_key_len + recv_salt_len); - return ParseKeyParams(recv_params.key_params, recv_key_.data(), - recv_key_.size()); -} - -bool SrtpFilter::ParseKeyParams(const std::string& key_params, - uint8_t* key, - size_t len) { - // example key_params: "inline:YUJDZGVmZ2hpSktMbW9QUXJzVHVWd3l6MTIzNDU2" - - // Fail if key-method is wrong. - if (!absl::StartsWith(key_params, "inline:")) { - return false; - } - - // Fail if base64 decode fails, or the key is the wrong size. - std::string key_b64(key_params.substr(7)), key_str; - if (!rtc::Base64::Decode(key_b64, rtc::Base64::DO_STRICT, &key_str, - nullptr) || - key_str.size() != len) { - return false; - } - - memcpy(key, key_str.c_str(), len); - // TODO(bugs.webrtc.org/8905): Switch to ZeroOnFreeBuffer for storing - // sensitive data. - rtc::ExplicitZeroMemory(&key_str[0], key_str.size()); - return true; -} - -} // namespace cricket diff --git a/pc/srtp_filter.h b/pc/srtp_filter.h deleted file mode 100644 index 59c43f624b..0000000000 --- a/pc/srtp_filter.h +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright 2009 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef PC_SRTP_FILTER_H_ -#define PC_SRTP_FILTER_H_ - -#include -#include - -#include -#include -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/crypto_params.h" -#include "api/jsep.h" -#include "api/sequence_checker.h" -#include "pc/session_description.h" -#include "rtc_base/buffer.h" -#include "rtc_base/ssl_stream_adapter.h" - -// Forward declaration to avoid pulling in libsrtp headers here -struct srtp_event_data_t; -struct srtp_ctx_t_; - -namespace cricket { - -// A helper class used to negotiate SDES crypto params. -// TODO(zhihuang): Find a better name for this class, like "SdesNegotiator". -class SrtpFilter { - public: - enum Mode { PROTECT, UNPROTECT }; - enum Error { - ERROR_NONE, - ERROR_FAIL, - ERROR_AUTH, - ERROR_REPLAY, - }; - - SrtpFilter(); - ~SrtpFilter(); - - // Whether the filter is active (i.e. crypto has been properly negotiated). - bool IsActive() const; - - // Handle the offer/answer negotiation of the crypto parameters internally. - // TODO(zhihuang): Make SetOffer/ProvisionalAnswer/Answer private as helper - // methods once start using Process. - bool Process(const std::vector& cryptos, - webrtc::SdpType type, - ContentSource source); - - // Indicates which crypto algorithms and keys were contained in the offer. - // offer_params should contain a list of available parameters to use, or none, - // if crypto is not desired. This must be called before SetAnswer. - bool SetOffer(const std::vector& offer_params, - ContentSource source); - // Same as SetAnwer. But multiple calls are allowed to SetProvisionalAnswer - // after a call to SetOffer. - bool SetProvisionalAnswer(const std::vector& answer_params, - ContentSource source); - // Indicates which crypto algorithms and keys were contained in the answer. - // answer_params should contain the negotiated parameters, which may be none, - // if crypto was not desired or could not be negotiated (and not required). - // This must be called after SetOffer. If crypto negotiation completes - // successfully, this will advance the filter to the active state. - bool SetAnswer(const std::vector& answer_params, - ContentSource source); - - bool ResetParams(); - - static bool ParseKeyParams(const std::string& params, - uint8_t* key, - size_t len); - - absl::optional send_crypto_suite() { return send_crypto_suite_; } - absl::optional recv_crypto_suite() { return recv_crypto_suite_; } - - rtc::ArrayView send_key() { return send_key_; } - rtc::ArrayView recv_key() { return recv_key_; } - - protected: - bool ExpectOffer(ContentSource source); - - bool StoreParams(const std::vector& params, - ContentSource source); - - bool ExpectAnswer(ContentSource source); - - bool DoSetAnswer(const std::vector& answer_params, - ContentSource source, - bool final); - - bool NegotiateParams(const std::vector& answer_params, - CryptoParams* selected_params); - - private: - bool ApplySendParams(const CryptoParams& send_params); - - bool ApplyRecvParams(const CryptoParams& recv_params); - - enum State { - ST_INIT, // SRTP filter unused. - ST_SENTOFFER, // Offer with SRTP parameters sent. - ST_RECEIVEDOFFER, // Offer with SRTP parameters received. - ST_SENTPRANSWER_NO_CRYPTO, // Sent provisional answer without crypto. - // Received provisional answer without crypto. - ST_RECEIVEDPRANSWER_NO_CRYPTO, - ST_ACTIVE, // Offer and answer set. - // SRTP filter is active but new parameters are offered. - // When the answer is set, the state transitions to ST_ACTIVE or ST_INIT. - ST_SENTUPDATEDOFFER, - // SRTP filter is active but new parameters are received. - // When the answer is set, the state transitions back to ST_ACTIVE. - ST_RECEIVEDUPDATEDOFFER, - // SRTP filter is active but the sent answer is only provisional. - // When the final answer is set, the state transitions to ST_ACTIVE or - // ST_INIT. - ST_SENTPRANSWER, - // SRTP filter is active but the received answer is only provisional. - // When the final answer is set, the state transitions to ST_ACTIVE or - // ST_INIT. - ST_RECEIVEDPRANSWER - }; - State state_ = ST_INIT; - std::vector offer_params_; - CryptoParams applied_send_params_; - CryptoParams applied_recv_params_; - absl::optional send_crypto_suite_; - absl::optional recv_crypto_suite_; - rtc::ZeroOnFreeBuffer send_key_; - rtc::ZeroOnFreeBuffer recv_key_; -}; - -} // namespace cricket - -#endif // PC_SRTP_FILTER_H_ diff --git a/pc/srtp_filter_unittest.cc b/pc/srtp_filter_unittest.cc deleted file mode 100644 index fed023199f..0000000000 --- a/pc/srtp_filter_unittest.cc +++ /dev/null @@ -1,472 +0,0 @@ -/* - * Copyright 2004 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "pc/srtp_filter.h" - -#include - -#include "api/crypto_params.h" -#include "rtc_base/ssl_stream_adapter.h" -#include "test/gtest.h" - -using cricket::CryptoParams; -using cricket::CS_LOCAL; -using cricket::CS_REMOTE; - -namespace rtc { - -static const char kTestKeyParams1[] = - "inline:WVNfX19zZW1jdGwgKCkgewkyMjA7fQp9CnVubGVz"; -static const char kTestKeyParams2[] = - "inline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBR"; -static const char kTestKeyParams3[] = - "inline:1234X19zZW1jdGwgKCkgewkyMjA7fQp9CnVubGVz"; -static const char kTestKeyParams4[] = - "inline:4567QCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBR"; -static const char kTestKeyParamsGcm1[] = - "inline:e166KFlKzJsGW0d5apX+rrI05vxbrvMJEzFI14aTDCa63IRTlLK4iH66uOI="; -static const char kTestKeyParamsGcm2[] = - "inline:6X0oCd55zfz4VgtOwsuqcFq61275PDYN5uwuu3p7ZUHbfUY2FMpdP4m2PEo="; -static const char kTestKeyParamsGcm3[] = - "inline:YKlABGZWMgX32xuMotrG0v0T7G83veegaVzubQ=="; -static const char kTestKeyParamsGcm4[] = - "inline:gJ6tWoUym2v+/F6xjr7xaxiS3QbJJozl3ZD/0A=="; -static const cricket::CryptoParams kTestCryptoParams1(1, - "AES_CM_128_HMAC_SHA1_80", - kTestKeyParams1, - ""); -static const cricket::CryptoParams kTestCryptoParams2(1, - "AES_CM_128_HMAC_SHA1_80", - kTestKeyParams2, - ""); -static const cricket::CryptoParams kTestCryptoParamsGcm1(1, - "AEAD_AES_256_GCM", - kTestKeyParamsGcm1, - ""); -static const cricket::CryptoParams kTestCryptoParamsGcm2(1, - "AEAD_AES_256_GCM", - kTestKeyParamsGcm2, - ""); -static const cricket::CryptoParams kTestCryptoParamsGcm3(1, - "AEAD_AES_128_GCM", - kTestKeyParamsGcm3, - ""); -static const cricket::CryptoParams kTestCryptoParamsGcm4(1, - "AEAD_AES_128_GCM", - kTestKeyParamsGcm4, - ""); - -class SrtpFilterTest : public ::testing::Test { - protected: - SrtpFilterTest() {} - static std::vector MakeVector(const CryptoParams& params) { - std::vector vec; - vec.push_back(params); - return vec; - } - - void TestSetParams(const std::vector& params1, - const std::vector& params2) { - EXPECT_TRUE(f1_.SetOffer(params1, CS_LOCAL)); - EXPECT_TRUE(f2_.SetOffer(params1, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); - EXPECT_FALSE(f2_.IsActive()); - EXPECT_TRUE(f2_.SetAnswer(params2, CS_LOCAL)); - EXPECT_TRUE(f1_.SetAnswer(params2, CS_REMOTE)); - EXPECT_TRUE(f1_.IsActive()); - EXPECT_TRUE(f2_.IsActive()); - } - - void VerifyKeysAreEqual(ArrayView key1, - ArrayView key2) { - EXPECT_EQ(key1.size(), key2.size()); - EXPECT_EQ(0, memcmp(key1.data(), key2.data(), key1.size())); - } - - void VerifyCryptoParamsMatch(const std::string& cs1, const std::string& cs2) { - EXPECT_EQ(rtc::SrtpCryptoSuiteFromName(cs1), f1_.send_crypto_suite()); - EXPECT_EQ(rtc::SrtpCryptoSuiteFromName(cs2), f2_.send_crypto_suite()); - VerifyKeysAreEqual(f1_.send_key(), f2_.recv_key()); - VerifyKeysAreEqual(f2_.send_key(), f1_.recv_key()); - } - - cricket::SrtpFilter f1_; - cricket::SrtpFilter f2_; -}; - -// Test that we can set up the session and keys properly. -TEST_F(SrtpFilterTest, TestGoodSetupOneCryptoSuite) { - EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL)); - EXPECT_FALSE(f1_.IsActive()); - EXPECT_TRUE(f1_.SetAnswer(MakeVector(kTestCryptoParams2), CS_REMOTE)); - EXPECT_TRUE(f1_.IsActive()); -} - -TEST_F(SrtpFilterTest, TestGoodSetupOneCryptoSuiteGcm) { - EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParamsGcm1), CS_LOCAL)); - EXPECT_FALSE(f1_.IsActive()); - EXPECT_TRUE(f1_.SetAnswer(MakeVector(kTestCryptoParamsGcm2), CS_REMOTE)); - EXPECT_TRUE(f1_.IsActive()); -} - -// Test that we can set up things with multiple params. -TEST_F(SrtpFilterTest, TestGoodSetupMultipleCryptoSuites) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer(MakeVector(kTestCryptoParams2)); - offer.push_back(kTestCryptoParams1); - offer[1].tag = 2; - offer[1].crypto_suite = kCsAesCm128HmacSha1_32; - answer[0].tag = 2; - answer[0].crypto_suite = kCsAesCm128HmacSha1_32; - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_FALSE(f1_.IsActive()); - EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_TRUE(f1_.IsActive()); -} - -TEST_F(SrtpFilterTest, TestGoodSetupMultipleCryptoSuitesGcm) { - std::vector offer(MakeVector(kTestCryptoParamsGcm1)); - std::vector answer(MakeVector(kTestCryptoParamsGcm3)); - offer.push_back(kTestCryptoParamsGcm4); - offer[1].tag = 2; - answer[0].tag = 2; - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_FALSE(f1_.IsActive()); - EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_TRUE(f1_.IsActive()); -} - -// Test that we handle the cases where crypto is not desired. -TEST_F(SrtpFilterTest, TestGoodSetupNoCryptoSuites) { - std::vector offer, answer; - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); -} - -// Test that we handle the cases where crypto is not desired by the remote side. -TEST_F(SrtpFilterTest, TestGoodSetupNoAnswerCryptoSuites) { - std::vector answer; - EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL)); - EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); -} - -// Test that we fail if we call the functions the wrong way. -TEST_F(SrtpFilterTest, TestBadSetup) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer(MakeVector(kTestCryptoParams2)); - EXPECT_FALSE(f1_.SetAnswer(answer, CS_LOCAL)); - EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_FALSE(f1_.SetAnswer(answer, CS_LOCAL)); - EXPECT_FALSE(f1_.IsActive()); -} - -// Test that we can set offer multiple times from the same source. -TEST_F(SrtpFilterTest, TestGoodSetupMultipleOffers) { - EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL)); - EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams2), CS_LOCAL)); - EXPECT_FALSE(f1_.IsActive()); - EXPECT_TRUE(f1_.SetAnswer(MakeVector(kTestCryptoParams2), CS_REMOTE)); - EXPECT_TRUE(f1_.IsActive()); - EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL)); - EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams2), CS_LOCAL)); - EXPECT_TRUE(f1_.SetAnswer(MakeVector(kTestCryptoParams2), CS_REMOTE)); - - EXPECT_TRUE(f2_.SetOffer(MakeVector(kTestCryptoParams1), CS_REMOTE)); - EXPECT_TRUE(f2_.SetOffer(MakeVector(kTestCryptoParams2), CS_REMOTE)); - EXPECT_FALSE(f2_.IsActive()); - EXPECT_TRUE(f2_.SetAnswer(MakeVector(kTestCryptoParams2), CS_LOCAL)); - EXPECT_TRUE(f2_.IsActive()); - EXPECT_TRUE(f2_.SetOffer(MakeVector(kTestCryptoParams1), CS_REMOTE)); - EXPECT_TRUE(f2_.SetOffer(MakeVector(kTestCryptoParams2), CS_REMOTE)); - EXPECT_TRUE(f2_.SetAnswer(MakeVector(kTestCryptoParams2), CS_LOCAL)); -} -// Test that we can't set offer multiple times from different sources. -TEST_F(SrtpFilterTest, TestBadSetupMultipleOffers) { - EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL)); - EXPECT_FALSE(f1_.SetOffer(MakeVector(kTestCryptoParams2), CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); - EXPECT_TRUE(f1_.SetAnswer(MakeVector(kTestCryptoParams1), CS_REMOTE)); - EXPECT_TRUE(f1_.IsActive()); - EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams2), CS_LOCAL)); - EXPECT_FALSE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_REMOTE)); - EXPECT_TRUE(f1_.SetAnswer(MakeVector(kTestCryptoParams2), CS_REMOTE)); - - EXPECT_TRUE(f2_.SetOffer(MakeVector(kTestCryptoParams2), CS_REMOTE)); - EXPECT_FALSE(f2_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL)); - EXPECT_FALSE(f2_.IsActive()); - EXPECT_TRUE(f2_.SetAnswer(MakeVector(kTestCryptoParams2), CS_LOCAL)); - EXPECT_TRUE(f2_.IsActive()); - EXPECT_TRUE(f2_.SetOffer(MakeVector(kTestCryptoParams2), CS_REMOTE)); - EXPECT_FALSE(f2_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL)); - EXPECT_TRUE(f2_.SetAnswer(MakeVector(kTestCryptoParams2), CS_LOCAL)); -} - -// Test that we fail if we have params in the answer when none were offered. -TEST_F(SrtpFilterTest, TestNoAnswerCryptoSuites) { - std::vector offer; - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_FALSE(f1_.SetAnswer(MakeVector(kTestCryptoParams2), CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); -} - -// Test that we fail if we have too many params in our answer. -TEST_F(SrtpFilterTest, TestMultipleAnswerCryptoSuites) { - std::vector answer(MakeVector(kTestCryptoParams2)); - answer.push_back(kTestCryptoParams2); - answer[1].tag = 2; - answer[1].crypto_suite = kCsAesCm128HmacSha1_32; - EXPECT_TRUE(f1_.SetOffer(MakeVector(kTestCryptoParams1), CS_LOCAL)); - EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); -} - -// Test that we fail if we don't support the crypto suite. -TEST_F(SrtpFilterTest, TestInvalidCryptoSuite) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer(MakeVector(kTestCryptoParams2)); - offer[0].crypto_suite = answer[0].crypto_suite = "FOO"; - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); -} - -// Test that we fail if we can't agree on a tag. -TEST_F(SrtpFilterTest, TestNoMatchingTag) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer(MakeVector(kTestCryptoParams2)); - answer[0].tag = 99; - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); -} - -// Test that we fail if we can't agree on a crypto suite. -TEST_F(SrtpFilterTest, TestNoMatchingCryptoSuite) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer(MakeVector(kTestCryptoParams2)); - answer[0].tag = 2; - answer[0].crypto_suite = "FOO"; - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); -} - -// Test that we fail keys with bad base64 content. -TEST_F(SrtpFilterTest, TestInvalidKeyData) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer(MakeVector(kTestCryptoParams2)); - answer[0].key_params = "inline:!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"; - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); -} - -// Test that we fail keys with the wrong key-method. -TEST_F(SrtpFilterTest, TestWrongKeyMethod) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer(MakeVector(kTestCryptoParams2)); - answer[0].key_params = "outline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBR"; - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); -} - -// Test that we fail keys of the wrong length. -TEST_F(SrtpFilterTest, TestKeyTooShort) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer(MakeVector(kTestCryptoParams2)); - answer[0].key_params = "inline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtx"; - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); -} - -// Test that we fail keys of the wrong length. -TEST_F(SrtpFilterTest, TestKeyTooLong) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer(MakeVector(kTestCryptoParams2)); - answer[0].key_params = "inline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBRABCD"; - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); -} - -// Test that we fail keys with lifetime or MKI set (since we don't support) -TEST_F(SrtpFilterTest, TestUnsupportedOptions) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer(MakeVector(kTestCryptoParams2)); - answer[0].key_params = - "inline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBR|2^20|1:4"; - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_FALSE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); -} - -// Test that we can encrypt/decrypt after negotiating AES_CM_128_HMAC_SHA1_80. -TEST_F(SrtpFilterTest, TestProtect_AES_CM_128_HMAC_SHA1_80) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer(MakeVector(kTestCryptoParams2)); - offer.push_back(kTestCryptoParams1); - offer[1].tag = 2; - offer[1].crypto_suite = kCsAesCm128HmacSha1_32; - TestSetParams(offer, answer); - VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80); -} - -// Test that we can encrypt/decrypt after negotiating AES_CM_128_HMAC_SHA1_32. -TEST_F(SrtpFilterTest, TestProtect_AES_CM_128_HMAC_SHA1_32) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer(MakeVector(kTestCryptoParams2)); - offer.push_back(kTestCryptoParams1); - offer[1].tag = 2; - offer[1].crypto_suite = kCsAesCm128HmacSha1_32; - answer[0].tag = 2; - answer[0].crypto_suite = kCsAesCm128HmacSha1_32; - TestSetParams(offer, answer); - VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_32, kCsAesCm128HmacSha1_32); -} - -// Test that we can change encryption parameters. -TEST_F(SrtpFilterTest, TestChangeParameters) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer(MakeVector(kTestCryptoParams2)); - - TestSetParams(offer, answer); - VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80); - - // Change the key parameters and crypto_suite. - offer[0].key_params = kTestKeyParams3; - offer[0].crypto_suite = kCsAesCm128HmacSha1_32; - answer[0].key_params = kTestKeyParams4; - answer[0].crypto_suite = kCsAesCm128HmacSha1_32; - - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_TRUE(f2_.SetOffer(offer, CS_REMOTE)); - EXPECT_TRUE(f1_.IsActive()); - EXPECT_TRUE(f1_.IsActive()); - - // Test that the old keys are valid until the negotiation is complete. - VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80); - - // Complete the negotiation and test that we can still understand each other. - EXPECT_TRUE(f2_.SetAnswer(answer, CS_LOCAL)); - EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE)); - - VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_32, kCsAesCm128HmacSha1_32); -} - -// Test that we can send and receive provisional answers with crypto enabled. -// Also test that we can change the crypto. -TEST_F(SrtpFilterTest, TestProvisionalAnswer) { - std::vector offer(MakeVector(kTestCryptoParams1)); - offer.push_back(kTestCryptoParams1); - offer[1].tag = 2; - offer[1].crypto_suite = kCsAesCm128HmacSha1_32; - std::vector answer(MakeVector(kTestCryptoParams2)); - - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_TRUE(f2_.SetOffer(offer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); - EXPECT_FALSE(f2_.IsActive()); - EXPECT_TRUE(f2_.SetProvisionalAnswer(answer, CS_LOCAL)); - EXPECT_TRUE(f1_.SetProvisionalAnswer(answer, CS_REMOTE)); - EXPECT_TRUE(f1_.IsActive()); - EXPECT_TRUE(f2_.IsActive()); - VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80); - - answer[0].key_params = kTestKeyParams4; - answer[0].tag = 2; - answer[0].crypto_suite = kCsAesCm128HmacSha1_32; - EXPECT_TRUE(f2_.SetAnswer(answer, CS_LOCAL)); - EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_TRUE(f1_.IsActive()); - EXPECT_TRUE(f2_.IsActive()); - VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_32, kCsAesCm128HmacSha1_32); -} - -// Test that a provisional answer doesn't need to contain a crypto. -TEST_F(SrtpFilterTest, TestProvisionalAnswerWithoutCrypto) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer; - - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_TRUE(f2_.SetOffer(offer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); - EXPECT_FALSE(f2_.IsActive()); - EXPECT_TRUE(f2_.SetProvisionalAnswer(answer, CS_LOCAL)); - EXPECT_TRUE(f1_.SetProvisionalAnswer(answer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); - EXPECT_FALSE(f2_.IsActive()); - - answer.push_back(kTestCryptoParams2); - EXPECT_TRUE(f2_.SetAnswer(answer, CS_LOCAL)); - EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_TRUE(f1_.IsActive()); - EXPECT_TRUE(f2_.IsActive()); - VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80); -} - -// Test that if we get a new local offer after a provisional answer -// with no crypto, that we are in an inactive state. -TEST_F(SrtpFilterTest, TestLocalOfferAfterProvisionalAnswerWithoutCrypto) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer; - - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_TRUE(f2_.SetOffer(offer, CS_REMOTE)); - EXPECT_TRUE(f1_.SetProvisionalAnswer(answer, CS_REMOTE)); - EXPECT_TRUE(f2_.SetProvisionalAnswer(answer, CS_LOCAL)); - EXPECT_FALSE(f1_.IsActive()); - EXPECT_FALSE(f2_.IsActive()); - // The calls to set an offer after a provisional answer fail, so the - // state doesn't change. - EXPECT_FALSE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_FALSE(f2_.SetOffer(offer, CS_REMOTE)); - EXPECT_FALSE(f1_.IsActive()); - EXPECT_FALSE(f2_.IsActive()); - - answer.push_back(kTestCryptoParams2); - EXPECT_TRUE(f2_.SetAnswer(answer, CS_LOCAL)); - EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE)); - EXPECT_TRUE(f1_.IsActive()); - EXPECT_TRUE(f2_.IsActive()); - VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80); -} - -// Test that we can disable encryption. -TEST_F(SrtpFilterTest, TestDisableEncryption) { - std::vector offer(MakeVector(kTestCryptoParams1)); - std::vector answer(MakeVector(kTestCryptoParams2)); - - TestSetParams(offer, answer); - VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80); - - offer.clear(); - answer.clear(); - EXPECT_TRUE(f1_.SetOffer(offer, CS_LOCAL)); - EXPECT_TRUE(f2_.SetOffer(offer, CS_REMOTE)); - EXPECT_TRUE(f1_.IsActive()); - EXPECT_TRUE(f2_.IsActive()); - - // Test that the old keys are valid until the negotiation is complete. - VerifyCryptoParamsMatch(kCsAesCm128HmacSha1_80, kCsAesCm128HmacSha1_80); - - // Complete the negotiation. - EXPECT_TRUE(f2_.SetAnswer(answer, CS_LOCAL)); - EXPECT_TRUE(f1_.SetAnswer(answer, CS_REMOTE)); - - EXPECT_FALSE(f1_.IsActive()); - EXPECT_FALSE(f2_.IsActive()); -} - -} // namespace rtc diff --git a/pc/srtp_session.cc b/pc/srtp_session.cc index 5408d3e0da..7fe7e04f56 100644 --- a/pc/srtp_session.cc +++ b/pc/srtp_session.cc @@ -12,28 +12,36 @@ #include +#include +#include #include -#include +#include -#include "absl/base/attributes.h" -#include "absl/base/const_init.h" #include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/field_trials_view.h" #include "modules/rtp_rtcp/source/rtp_util.h" #include "pc/external_hmac.h" +#include "rtc_base/buffer.h" #include "rtc_base/byte_order.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/string_encode.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/metrics.h" #include "third_party/libsrtp/include/srtp.h" #include "third_party/libsrtp/include/srtp_priv.h" -namespace cricket { +#ifndef SRTP_SRCTP_INDEX_LEN +#define SRTP_SRCTP_INDEX_LEN 4 +#endif + +namespace webrtc { namespace { class LibSrtpInitializer { @@ -44,6 +52,12 @@ class LibSrtpInitializer { static LibSrtpInitializer* const instance = new LibSrtpInitializer(); return *instance; } + + // There is only one global log handler in libsrtp so we can not resolve this + // to a particular session. + static void LibSrtpLogHandler(srtp_log_level_t level, + const char* msg, + void* data); void ProhibitLibsrtpInitialization(); // These methods are responsible for initializing libsrtp (if the usage count @@ -52,35 +66,58 @@ class LibSrtpInitializer { // // Returns true if successful (will always be successful if already inited). bool IncrementLibsrtpUsageCountAndMaybeInit( - srtp_event_handler_func_t* handler); + srtp_event_handler_func_t* event_handler); void DecrementLibsrtpUsageCountAndMaybeDeinit(); private: LibSrtpInitializer() = default; - webrtc::Mutex mutex_; + Mutex mutex_; int usage_count_ RTC_GUARDED_BY(mutex_) = 0; }; +void LibSrtpInitializer::LibSrtpLogHandler(srtp_log_level_t level, + const char* msg, + void* data) { + RTC_DCHECK(data == nullptr); + if (level == srtp_log_level_error) { + RTC_LOG(LS_ERROR) << "SRTP log: " << msg; + } else if (level == srtp_log_level_warning) { + RTC_LOG(LS_WARNING) << "SRTP log: " << msg; + } else if (level == srtp_log_level_info) { + RTC_LOG(LS_INFO) << "SRTP log: " << msg; + } else if (level == srtp_log_level_debug) { + RTC_LOG(LS_VERBOSE) << "SRTP log: " << msg; + } +} + void LibSrtpInitializer::ProhibitLibsrtpInitialization() { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); ++usage_count_; } bool LibSrtpInitializer::IncrementLibsrtpUsageCountAndMaybeInit( - srtp_event_handler_func_t* handler) { - webrtc::MutexLock lock(&mutex_); + srtp_event_handler_func_t* event_handler) { + MutexLock lock(&mutex_); + RTC_DCHECK(event_handler); RTC_DCHECK_GE(usage_count_, 0); if (usage_count_ == 0) { int err; + + err = srtp_install_log_handler(&LibSrtpInitializer::LibSrtpLogHandler, + nullptr); + if (err != srtp_err_status_ok) { + RTC_LOG(LS_ERROR) << "Failed to install libsrtp log handler, err=" << err; + return false; + } err = srtp_init(); if (err != srtp_err_status_ok) { RTC_LOG(LS_ERROR) << "Failed to init SRTP, err=" << err; return false; } - err = srtp_install_event_handler(handler); + err = srtp_install_event_handler(event_handler); if (err != srtp_err_status_ok) { RTC_LOG(LS_ERROR) << "Failed to install SRTP event handler, err=" << err; return false; @@ -97,12 +134,17 @@ bool LibSrtpInitializer::IncrementLibsrtpUsageCountAndMaybeInit( } void LibSrtpInitializer::DecrementLibsrtpUsageCountAndMaybeDeinit() { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); RTC_DCHECK_GE(usage_count_, 1); if (--usage_count_ == 0) { - int err = srtp_shutdown(); - if (err) { + int err = srtp_install_log_handler(nullptr, nullptr); + if (err != srtp_err_status_ok) { + RTC_LOG(LS_ERROR) << "Failed to uninstall libsrtp log handler, err=" + << err; + } + err = srtp_shutdown(); + if (err != srtp_err_status_ok) { RTC_LOG(LS_ERROR) << "srtp_shutdown failed. err=" << err; } } @@ -110,8 +152,6 @@ void LibSrtpInitializer::DecrementLibsrtpUsageCountAndMaybeDeinit() { } // namespace -using ::webrtc::ParseRtpSequenceNumber; - // One more than the maximum libsrtp error code. Required by // RTC_HISTOGRAM_ENUMERATION. Keep this in sync with srtp_error_status_t defined // in srtp.h. @@ -119,7 +159,7 @@ constexpr int kSrtpErrorCodeBoundary = 28; SrtpSession::SrtpSession() {} -SrtpSession::SrtpSession(const webrtc::FieldTrialsView& field_trials) { +SrtpSession::SrtpSession(const FieldTrialsView& field_trials) { dump_plain_rtp_ = field_trials.IsEnabled("WebRTC-Debugging-RtpDump"); } @@ -134,31 +174,63 @@ SrtpSession::~SrtpSession() { } bool SrtpSession::SetSend(int crypto_suite, - const uint8_t* key, - size_t len, + const ZeroOnFreeBuffer& key, const std::vector& extension_ids) { - return SetKey(ssrc_any_outbound, crypto_suite, key, len, extension_ids); + return SetKey(ssrc_any_outbound, crypto_suite, key, extension_ids); } bool SrtpSession::UpdateSend(int crypto_suite, - const uint8_t* key, - size_t len, + const ZeroOnFreeBuffer& key, const std::vector& extension_ids) { - return UpdateKey(ssrc_any_outbound, crypto_suite, key, len, extension_ids); + return UpdateKey(ssrc_any_outbound, crypto_suite, key, extension_ids); } -bool SrtpSession::SetRecv(int crypto_suite, - const uint8_t* key, - size_t len, - const std::vector& extension_ids) { - return SetKey(ssrc_any_inbound, crypto_suite, key, len, extension_ids); +bool SrtpSession::SetReceive(int crypto_suite, + const ZeroOnFreeBuffer& key, + const std::vector& extension_ids) { + return SetKey(ssrc_any_inbound, crypto_suite, key, extension_ids); } -bool SrtpSession::UpdateRecv(int crypto_suite, - const uint8_t* key, - size_t len, - const std::vector& extension_ids) { - return UpdateKey(ssrc_any_inbound, crypto_suite, key, len, extension_ids); +bool SrtpSession::UpdateReceive(int crypto_suite, + const ZeroOnFreeBuffer& key, + const std::vector& extension_ids) { + return UpdateKey(ssrc_any_inbound, crypto_suite, key, extension_ids); +} + +bool SrtpSession::ProtectRtp(CopyOnWriteBuffer& buffer) { + RTC_DCHECK(thread_checker_.IsCurrent()); + if (!session_) { + RTC_LOG(LS_WARNING) << "Failed to protect SRTP packet: no SRTP Session"; + return false; + } + + // Note: the need_len differs from the libsrtp recommendatіon to ensure + // SRTP_MAX_TRAILER_LEN bytes of free space after the data. WebRTC + // never includes a MKI, therefore the amount of bytes added by the + // srtp_protect call is known in advance and depends on the cipher suite. + size_t need_len = buffer.size() + rtp_auth_tag_len_; // NOLINT + if (buffer.capacity() < need_len) { + RTC_LOG(LS_WARNING) << "Failed to protect SRTP packet: The buffer length " + << buffer.capacity() << " is less than the needed " + << need_len; + return false; + } + if (dump_plain_rtp_) { + DumpPacket(buffer, /*outbound=*/true); + } + + int out_len = buffer.size(); + int err = srtp_protect(session_, buffer.MutableData(), &out_len); + int seq_num = webrtc::ParseRtpSequenceNumber(buffer); + if (err != srtp_err_status_ok) { + RTC_LOG(LS_WARNING) << "Failed to protect SRTP packet, seqnum=" << seq_num + << ", err=" << err + << ", last seqnum=" << last_send_seq_num_; + return false; + } + buffer.SetSize(out_len); + last_send_seq_num_ = seq_num; + return true; } bool SrtpSession::ProtectRtp(void* p, int in_len, int max_len, int* out_len) { @@ -184,8 +256,8 @@ bool SrtpSession::ProtectRtp(void* p, int in_len, int max_len, int* out_len) { *out_len = in_len; int err = srtp_protect(session_, p, out_len); - int seq_num = ParseRtpSequenceNumber( - rtc::MakeArrayView(reinterpret_cast(p), in_len)); + int seq_num = webrtc::ParseRtpSequenceNumber( + MakeArrayView(reinterpret_cast(p), in_len)); if (err != srtp_err_status_ok) { RTC_LOG(LS_WARNING) << "Failed to protect SRTP packet, seqnum=" << seq_num << ", err=" << err @@ -196,15 +268,57 @@ bool SrtpSession::ProtectRtp(void* p, int in_len, int max_len, int* out_len) { return true; } -bool SrtpSession::ProtectRtp(void* p, +bool SrtpSession::ProtectRtp(CopyOnWriteBuffer& buffer, int64_t* index) { + if (!ProtectRtp(buffer)) { + return false; + } + return (index) ? GetSendStreamPacketIndex(buffer, index) : true; +} + +bool SrtpSession::ProtectRtp(void* data, int in_len, int max_len, int* out_len, int64_t* index) { - if (!ProtectRtp(p, in_len, max_len, out_len)) { + CopyOnWriteBuffer buffer(static_cast(data), in_len, max_len); + if (!ProtectRtp(buffer)) { return false; } - return (index) ? GetSendStreamPacketIndex(p, in_len, index) : true; + *out_len = buffer.size(); + return (index) ? GetSendStreamPacketIndex(buffer, index) : true; +} + +bool SrtpSession::ProtectRtcp(CopyOnWriteBuffer& buffer) { + RTC_DCHECK(thread_checker_.IsCurrent()); + if (!session_) { + RTC_LOG(LS_WARNING) << "Failed to protect SRTCP packet: no SRTP Session"; + return false; + } + + // Note: the need_len differs from the libsrtp recommendatіon to ensure + // SRTP_MAX_TRAILER_LEN bytes of free space after the data. WebRTC + // never includes a MKI, therefore the amount of bytes added by the + // srtp_protect_rtp call is known in advance and depends on the cipher suite. + size_t need_len = + buffer.size() + sizeof(uint32_t) + rtcp_auth_tag_len_; // NOLINT + if (buffer.capacity() < need_len) { + RTC_LOG(LS_WARNING) + << "Failed to protect SRTCP packet: The buffer capacity " + << buffer.capacity() << " is less than the needed " << need_len; + return false; + } + if (dump_plain_rtp_) { + DumpPacket(buffer, /*outbound=*/true); + } + + int out_len = buffer.size(); + int err = srtp_protect_rtcp(session_, buffer.MutableData(), &out_len); + if (err != srtp_err_status_ok) { + RTC_LOG(LS_WARNING) << "Failed to protect SRTCP packet, err=" << err; + return false; + } + buffer.SetSize(out_len); + return true; } bool SrtpSession::ProtectRtcp(void* p, int in_len, int max_len, int* out_len) { @@ -237,6 +351,36 @@ bool SrtpSession::ProtectRtcp(void* p, int in_len, int max_len, int* out_len) { return true; } +bool SrtpSession::UnprotectRtp(CopyOnWriteBuffer& buffer) { + RTC_DCHECK(thread_checker_.IsCurrent()); + if (!session_) { + RTC_LOG(LS_WARNING) << "Failed to unprotect SRTP packet: no SRTP Session"; + return false; + } + int out_len = buffer.size(); + + int err = srtp_unprotect(session_, buffer.MutableData(), &out_len); + if (err != srtp_err_status_ok) { + // Limit the error logging to avoid excessive logs when there are lots of + // bad packets. + const int kFailureLogThrottleCount = 100; + if (decryption_failure_count_ % kFailureLogThrottleCount == 0) { + RTC_LOG(LS_WARNING) << "Failed to unprotect SRTP packet, err=" << err + << ", previous failure count: " + << decryption_failure_count_; + } + ++decryption_failure_count_; + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SrtpUnprotectError", + static_cast(err), kSrtpErrorCodeBoundary); + return false; + } + buffer.SetSize(out_len); + if (dump_plain_rtp_) { + DumpPacket(buffer, /*outbound=*/false); + } + return true; +} + bool SrtpSession::UnprotectRtp(void* p, int in_len, int* out_len) { RTC_DCHECK(thread_checker_.IsCurrent()); if (!session_) { @@ -266,6 +410,28 @@ bool SrtpSession::UnprotectRtp(void* p, int in_len, int* out_len) { return true; } +bool SrtpSession::UnprotectRtcp(CopyOnWriteBuffer& buffer) { + RTC_DCHECK(thread_checker_.IsCurrent()); + if (!session_) { + RTC_LOG(LS_WARNING) << "Failed to unprotect SRTCP packet: no SRTP Session"; + return false; + } + + int out_len = buffer.size(); + int err = srtp_unprotect_rtcp(session_, buffer.MutableData(), &out_len); + if (err != srtp_err_status_ok) { + RTC_LOG(LS_WARNING) << "Failed to unprotect SRTCP packet, err=" << err; + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.SrtcpUnprotectError", + static_cast(err), kSrtpErrorCodeBoundary); + return false; + } + buffer.SetSize(out_len); + if (dump_plain_rtp_) { + DumpPacket(buffer, /*outbound=*/false); + } + return true; +} + bool SrtpSession::UnprotectRtcp(void* p, int in_len, int* out_len) { RTC_DCHECK(thread_checker_.IsCurrent()); if (!session_) { @@ -332,26 +498,34 @@ bool SrtpSession::IsExternalAuthActive() const { return external_auth_active_; } -bool SrtpSession::GetSendStreamPacketIndex(void* p, - int in_len, +bool SrtpSession::RemoveSsrcFromSession(uint32_t ssrc) { + RTC_DCHECK(session_); + // libSRTP expects the SSRC to be in network byte order. + return srtp_remove_stream(session_, htonl(ssrc)) == srtp_err_status_ok; +} + +bool SrtpSession::GetSendStreamPacketIndex(CopyOnWriteBuffer& buffer, int64_t* index) { RTC_DCHECK(thread_checker_.IsCurrent()); - srtp_hdr_t* hdr = reinterpret_cast(p); - srtp_stream_ctx_t* stream = srtp_get_stream(session_, hdr->ssrc); - if (!stream) { + + uint32_t ssrc = webrtc::ParseRtpSsrc(buffer); + uint32_t roc; + if (srtp_get_stream_roc(session_, ssrc, &roc) != srtp_err_status_ok) { return false; } + // Calculate the extended sequence number. + uint16_t seq_num = webrtc::ParseRtpSequenceNumber(buffer); + int64_t extended_seq_num = (roc << 16) + seq_num; - // Shift packet index, put into network byte order - *index = static_cast(rtc::NetworkToHost64( - srtp_rdbx_get_packet_index(&stream->rtp_rdbx) << 16)); + // Shift extended sequence number, put into network byte order + *index = + static_cast(webrtc::NetworkToHost64(extended_seq_num << 16)); return true; } bool SrtpSession::DoSetKey(int type, int crypto_suite, - const uint8_t* key, - size_t len, + const ZeroOnFreeBuffer& key, const std::vector& extension_ids) { RTC_DCHECK(thread_checker_.IsCurrent()); @@ -368,7 +542,7 @@ bool SrtpSession::DoSetKey(int type, return false; } - if (!key || len != static_cast(policy.rtp.cipher_key_len)) { + if (key.size() != static_cast(policy.rtp.cipher_key_len)) { RTC_LOG(LS_ERROR) << "Failed to " << (session_ ? "update" : "create") << " SRTP session: invalid key"; return false; @@ -376,7 +550,7 @@ bool SrtpSession::DoSetKey(int type, policy.ssrc.type = static_cast(type); policy.ssrc.value = 0; - policy.key = const_cast(key); + policy.key = const_cast(key.data()); // TODO(astor) parse window size from WSH session-param policy.window_size = 1024; policy.allow_repeat_tx = 1; @@ -387,7 +561,7 @@ bool SrtpSession::DoSetKey(int type, // Enable external HMAC authentication only for outgoing streams and only // for cipher suites that support it (i.e. only non-GCM cipher suites). if (type == ssrc_any_outbound && IsExternalAuthEnabled() && - !rtc::IsGcmCryptoSuite(crypto_suite)) { + !webrtc::IsGcmCryptoSuite(crypto_suite)) { policy.rtp.auth_type = EXTERNAL_HMAC_SHA1; } if (!extension_ids.empty()) { @@ -420,8 +594,7 @@ bool SrtpSession::DoSetKey(int type, bool SrtpSession::SetKey(int type, int crypto_suite, - const uint8_t* key, - size_t len, + const ZeroOnFreeBuffer& key, const std::vector& extension_ids) { RTC_DCHECK(thread_checker_.IsCurrent()); if (session_) { @@ -439,13 +612,12 @@ bool SrtpSession::SetKey(int type, return false; } - return DoSetKey(type, crypto_suite, key, len, extension_ids); + return DoSetKey(type, crypto_suite, key, extension_ids); } bool SrtpSession::UpdateKey(int type, int crypto_suite, - const uint8_t* key, - size_t len, + const ZeroOnFreeBuffer& key, const std::vector& extension_ids) { RTC_DCHECK(thread_checker_.IsCurrent()); if (!session_) { @@ -453,7 +625,7 @@ bool SrtpSession::UpdateKey(int type, return false; } - return DoSetKey(type, crypto_suite, key, len, extension_ids); + return DoSetKey(type, crypto_suite, key, extension_ids); } void ProhibitLibsrtpInitialization() { @@ -496,25 +668,29 @@ void SrtpSession::HandleEventThunk(srtp_event_data_t* ev) { // extracted by searching for RTP_DUMP // grep RTP_DUMP chrome_debug.log > in.txt // and converted to pcap using -// text2pcap -D -u 1000,2000 -t %H:%M:%S. in.txt out.pcap +// text2pcap -D -u 1000,2000 -t %H:%M:%S.%f in.txt out.pcap // The resulting file can be replayed using the WebRTC video_replay tool and // be inspected in Wireshark using the RTP, VP8 and H264 dissectors. -void SrtpSession::DumpPacket(const void* buf, int len, bool outbound) { - int64_t time_of_day = rtc::TimeUTCMillis() % (24 * 3600 * 1000); +void SrtpSession::DumpPacket(const CopyOnWriteBuffer& buffer, bool outbound) { + int64_t time_of_day = webrtc::TimeUTCMillis() % (24 * 3600 * 1000); int64_t hours = time_of_day / (3600 * 1000); int64_t minutes = (time_of_day / (60 * 1000)) % 60; int64_t seconds = (time_of_day / 1000) % 60; int64_t millis = time_of_day % 1000; - RTC_LOG(LS_VERBOSE) << "\n" - << (outbound ? "O" : "I") << " " << std::setfill('0') - << std::setw(2) << hours << ":" << std::setfill('0') - << std::setw(2) << minutes << ":" << std::setfill('0') - << std::setw(2) << seconds << "." << std::setfill('0') - << std::setw(3) << millis << " " - << "000000 " - << rtc::hex_encode_with_delimiter( - absl::string_view((const char*)buf, len), ' ') - << " # RTP_DUMP"; -} - -} // namespace cricket + RTC_LOG(LS_VERBOSE) + << "\n" + << (outbound ? "O" : "I") << " " << std::setfill('0') << std::setw(2) + << hours << ":" << std::setfill('0') << std::setw(2) << minutes << ":" + << std::setfill('0') << std::setw(2) << seconds << "." + << std::setfill('0') << std::setw(3) << millis << " " << "000000 " + << webrtc::hex_encode_with_delimiter( + absl::string_view(buffer.data(), buffer.size()), ' ') + << " # RTP_DUMP"; +} + +void SrtpSession::DumpPacket(const void* buf, int len, bool outbound) { + const CopyOnWriteBuffer buffer(static_cast(buf), len, len); + DumpPacket(buffer, outbound); +} + +} // namespace webrtc diff --git a/pc/srtp_session.h b/pc/srtp_session.h index 60f1860ada..1134e4393f 100644 --- a/pc/srtp_session.h +++ b/pc/srtp_session.h @@ -17,15 +17,15 @@ #include #include "api/field_trials_view.h" -#include "api/scoped_refptr.h" #include "api/sequence_checker.h" -#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/buffer.h" +#include "rtc_base/copy_on_write_buffer.h" // Forward declaration to avoid pulling in libsrtp headers here struct srtp_event_data_t; -struct srtp_ctx_t_; +struct srtp_ctx_t_; // Trailing _ is required. -namespace cricket { +namespace webrtc { // Prohibits webrtc from initializing libsrtp. This can be used if libsrtp is // initialized by another library or explicitly. Note that this must be called @@ -36,7 +36,7 @@ void ProhibitLibsrtpInitialization(); class SrtpSession { public: SrtpSession(); - explicit SrtpSession(const webrtc::FieldTrialsView& field_trials); + explicit SrtpSession(const FieldTrialsView& field_trials); ~SrtpSession(); SrtpSession(const SrtpSession&) = delete; @@ -45,39 +45,51 @@ class SrtpSession { // Configures the session for sending data using the specified // crypto suite and key. Receiving must be done by a separate session. bool SetSend(int crypto_suite, - const uint8_t* key, - size_t len, + const ZeroOnFreeBuffer& key, const std::vector& extension_ids); bool UpdateSend(int crypto_suite, - const uint8_t* key, - size_t len, + const ZeroOnFreeBuffer& key, const std::vector& extension_ids); // Configures the session for receiving data using the specified // crypto suite and key. Sending must be done by a separate session. - bool SetRecv(int crypto_suite, - const uint8_t* key, - size_t len, - const std::vector& extension_ids); - bool UpdateRecv(int crypto_suite, - const uint8_t* key, - size_t len, + bool SetReceive(int crypto_suite, + const ZeroOnFreeBuffer& key, const std::vector& extension_ids); + bool UpdateReceive(int crypto_suite, + const ZeroOnFreeBuffer& key, + const std::vector& extension_ids); // Encrypts/signs an individual RTP/RTCP packet, in-place. // If an HMAC is used, this will increase the packet size. - bool ProtectRtp(void* data, int in_len, int max_len, int* out_len); + [[deprecated("Pass CopyOnWriteBuffer")]] bool ProtectRtp(void* data, + int in_len, + int max_len, + int* out_len); + bool ProtectRtp(CopyOnWriteBuffer& buffer); // Overloaded version, outputs packet index. - bool ProtectRtp(void* data, - int in_len, - int max_len, - int* out_len, - int64_t* index); - bool ProtectRtcp(void* data, int in_len, int max_len, int* out_len); + [[deprecated("Pass CopyOnWriteBuffer")]] bool ProtectRtp(void* data, + int in_len, + int max_len, + int* out_len, + int64_t* index); + bool ProtectRtp(CopyOnWriteBuffer& buffer, int64_t* index); + + [[deprecated("Pass CopyOnWriteBuffer")]] bool ProtectRtcp(void* data, + int in_len, + int max_len, + int* out_len); + bool ProtectRtcp(CopyOnWriteBuffer& buffer); // Decrypts/verifies an invidiual RTP/RTCP packet. // If an HMAC is used, this will decrease the packet size. - bool UnprotectRtp(void* data, int in_len, int* out_len); - bool UnprotectRtcp(void* data, int in_len, int* out_len); + [[deprecated("Pass CopyOnWriteBuffer")]] bool UnprotectRtp(void* data, + int in_len, + int* out_len); + bool UnprotectRtp(CopyOnWriteBuffer& buffer); + [[deprecated("Pass CopyOnWriteBuffer")]] bool UnprotectRtcp(void* data, + int in_len, + int* out_len); + bool UnprotectRtcp(CopyOnWriteBuffer& buffer); // Helper method to get authentication params. bool GetRtpAuthParams(uint8_t** key, int* key_len, int* tag_len); @@ -97,33 +109,41 @@ class SrtpSession { // been set. bool IsExternalAuthActive() const; + // Removes a SSRC from the underlying libSRTP session. + // Note: this should only be done for SSRCs that are received. + // Removing SSRCs that were sent and then reusing them leads to + // cryptographic weaknesses described in + // https://www.rfc-editor.org/rfc/rfc3711#section-8 + // https://www.rfc-editor.org/rfc/rfc7714#section-8.4 + bool RemoveSsrcFromSession(uint32_t ssrc); + private: bool DoSetKey(int type, int crypto_suite, - const uint8_t* key, - size_t len, + const ZeroOnFreeBuffer& key, const std::vector& extension_ids); bool SetKey(int type, int crypto_suite, - const uint8_t* key, - size_t len, + const ZeroOnFreeBuffer& key, const std::vector& extension_ids); bool UpdateKey(int type, int crypto_suite, - const uint8_t* key, - size_t len, + const ZeroOnFreeBuffer& key, const std::vector& extension_ids); // Returns send stream current packet index from srtp db. - bool GetSendStreamPacketIndex(void* data, int in_len, int64_t* index); + bool GetSendStreamPacketIndex(CopyOnWriteBuffer& buffer, int64_t* index); // Writes unencrypted packets in text2pcap format to the log file // for debugging. - void DumpPacket(const void* buf, int len, bool outbound); + void DumpPacket(const CopyOnWriteBuffer& buffer, bool outbound); + [[deprecated("Pass CopyOnWriteBuffer")]] void DumpPacket(const void* buf, + int len, + bool outbound); void HandleEvent(const srtp_event_data_t* ev); static void HandleEventThunk(srtp_event_data_t* ev); - webrtc::SequenceChecker thread_checker_; + SequenceChecker thread_checker_; srtp_ctx_t_* session_ = nullptr; // Overhead of the SRTP auth tag for RTP and RTCP in bytes. @@ -141,6 +161,15 @@ class SrtpSession { bool dump_plain_rtp_ = false; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::ProhibitLibsrtpInitialization; +using ::webrtc::SrtpSession; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // PC_SRTP_SESSION_H_ diff --git a/pc/srtp_session_unittest.cc b/pc/srtp_session_unittest.cc index 16a840a307..fd58ce79b5 100644 --- a/pc/srtp_session_unittest.cc +++ b/pc/srtp_session_unittest.cc @@ -12,12 +12,17 @@ #include -#include +#include +#include +#include +#include #include "media/base/fake_rtp.h" #include "pc/test/srtp_test_util.h" +#include "rtc_base/buffer.h" #include "rtc_base/byte_order.h" -#include "rtc_base/ssl_stream_adapter.h" // For rtc::SRTP_* +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/ssl_stream_adapter.h" // For webrtc::SRTP_* #include "system_wrappers/include/metrics.h" #include "test/gmock.h" #include "test/gtest.h" @@ -27,7 +32,7 @@ using ::testing::ElementsAre; using ::testing::Pair; -namespace rtc { +namespace webrtc { std::vector kEncryptedHeaderExtensionIds; @@ -41,126 +46,130 @@ class SrtpSessionTest : public ::testing::Test { virtual void SetUp() { rtp_len_ = sizeof(kPcmuFrame); rtcp_len_ = sizeof(kRtcpReport); - memcpy(rtp_packet_, kPcmuFrame, rtp_len_); - memcpy(rtcp_packet_, kRtcpReport, rtcp_len_); + rtp_packet_.EnsureCapacity(rtp_len_ + 10); + rtp_packet_.SetData(kPcmuFrame, rtp_len_); + rtcp_packet_.EnsureCapacity(rtcp_len_ + 4 + 10); + rtcp_packet_.SetData(kRtcpReport, rtcp_len_); } - void TestProtectRtp(const std::string& cs) { - int out_len = 0; - EXPECT_TRUE( - s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len)); - EXPECT_EQ(out_len, rtp_len_ + rtp_auth_tag_len(cs)); - EXPECT_NE(0, memcmp(rtp_packet_, kPcmuFrame, rtp_len_)); - rtp_len_ = out_len; + void TestProtectRtp(int crypto_suite) { + EXPECT_TRUE(s1_.ProtectRtp(rtp_packet_)); + EXPECT_EQ(rtp_packet_.size(), + rtp_len_ + webrtc::rtp_auth_tag_len(crypto_suite)); + // Check that Protect changed the content (up to the original length). + EXPECT_NE(0, std::memcmp(kPcmuFrame, rtp_packet_.data(), rtp_len_)); + rtp_len_ = rtp_packet_.size(); } - void TestProtectRtcp(const std::string& cs) { - int out_len = 0; - EXPECT_TRUE(s1_.ProtectRtcp(rtcp_packet_, rtcp_len_, sizeof(rtcp_packet_), - &out_len)); - EXPECT_EQ(out_len, rtcp_len_ + 4 + rtcp_auth_tag_len(cs)); // NOLINT - EXPECT_NE(0, memcmp(rtcp_packet_, kRtcpReport, rtcp_len_)); - rtcp_len_ = out_len; + void TestProtectRtcp(int crypto_suite) { + EXPECT_TRUE(s1_.ProtectRtcp(rtcp_packet_)); + EXPECT_EQ(rtcp_packet_.size(), + rtcp_len_ + 4 + webrtc::rtcp_auth_tag_len(crypto_suite)); + // Check that Protect changed the content (up to the original length). + EXPECT_NE(0, std::memcmp(kRtcpReport, rtcp_packet_.data(), rtcp_len_)); + rtcp_len_ = rtcp_packet_.size(); } - void TestUnprotectRtp(const std::string& cs) { - int out_len = 0, expected_len = sizeof(kPcmuFrame); - EXPECT_TRUE(s2_.UnprotectRtp(rtp_packet_, rtp_len_, &out_len)); - EXPECT_EQ(expected_len, out_len); - EXPECT_EQ(0, memcmp(rtp_packet_, kPcmuFrame, out_len)); + void TestUnprotectRtp(int crypto_suite) { + EXPECT_TRUE(s2_.UnprotectRtp(rtp_packet_)); + EXPECT_EQ(rtp_packet_.size(), sizeof(kPcmuFrame)); + EXPECT_EQ(0, + std::memcmp(kPcmuFrame, rtp_packet_.data(), rtp_packet_.size())); } - void TestUnprotectRtcp(const std::string& cs) { - int out_len = 0, expected_len = sizeof(kRtcpReport); - EXPECT_TRUE(s2_.UnprotectRtcp(rtcp_packet_, rtcp_len_, &out_len)); - EXPECT_EQ(expected_len, out_len); - EXPECT_EQ(0, memcmp(rtcp_packet_, kRtcpReport, out_len)); + void TestUnprotectRtcp(int crypto_suite) { + EXPECT_TRUE(s2_.UnprotectRtcp(rtcp_packet_)); + EXPECT_EQ(rtcp_packet_.size(), sizeof(kRtcpReport)); + EXPECT_EQ( + 0, std::memcmp(kRtcpReport, rtcp_packet_.data(), rtcp_packet_.size())); } - webrtc::test::ScopedKeyValueConfig field_trials_; - cricket::SrtpSession s1_; - cricket::SrtpSession s2_; - char rtp_packet_[sizeof(kPcmuFrame) + 10]; - char rtcp_packet_[sizeof(kRtcpReport) + 4 + 10]; - int rtp_len_; - int rtcp_len_; + test::ScopedKeyValueConfig field_trials_; + SrtpSession s1_; + SrtpSession s2_; + CopyOnWriteBuffer rtp_packet_; + CopyOnWriteBuffer rtcp_packet_; + size_t rtp_len_; + size_t rtcp_len_; }; // Test that we can set up the session and keys properly. TEST_F(SrtpSessionTest, TestGoodSetup) { - EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen, - kEncryptedHeaderExtensionIds)); - EXPECT_TRUE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen, + EXPECT_TRUE(s1_.SetSend(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, kEncryptedHeaderExtensionIds)); + EXPECT_TRUE(s2_.SetReceive(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, + kEncryptedHeaderExtensionIds)); } // Test that we can't change the keys once set. TEST_F(SrtpSessionTest, TestBadSetup) { - EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen, - kEncryptedHeaderExtensionIds)); - EXPECT_TRUE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen, + EXPECT_TRUE(s1_.SetSend(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, kEncryptedHeaderExtensionIds)); - EXPECT_FALSE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey2, kTestKeyLen, - kEncryptedHeaderExtensionIds)); - EXPECT_FALSE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey2, kTestKeyLen, + EXPECT_TRUE(s2_.SetReceive(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, + kEncryptedHeaderExtensionIds)); + EXPECT_FALSE(s1_.SetSend(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey2, kEncryptedHeaderExtensionIds)); + EXPECT_FALSE(s2_.SetReceive(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey2, + kEncryptedHeaderExtensionIds)); } // Test that we fail keys of the wrong length. TEST_F(SrtpSessionTest, TestKeysTooShort) { - EXPECT_FALSE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, 1, - kEncryptedHeaderExtensionIds)); - EXPECT_FALSE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey1, 1, - kEncryptedHeaderExtensionIds)); + EXPECT_FALSE( + s1_.SetSend(webrtc::kSrtpAes128CmSha1_80, + ZeroOnFreeBuffer(webrtc::kTestKey1.data(), 1), + kEncryptedHeaderExtensionIds)); + EXPECT_FALSE( + s2_.SetReceive(webrtc::kSrtpAes128CmSha1_80, + ZeroOnFreeBuffer(webrtc::kTestKey1.data(), 1), + kEncryptedHeaderExtensionIds)); } // Test that we can encrypt and decrypt RTP/RTCP using AES_CM_128_HMAC_SHA1_80. TEST_F(SrtpSessionTest, TestProtect_AES_CM_128_HMAC_SHA1_80) { - EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen, - kEncryptedHeaderExtensionIds)); - EXPECT_TRUE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen, + EXPECT_TRUE(s1_.SetSend(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, kEncryptedHeaderExtensionIds)); - TestProtectRtp(kCsAesCm128HmacSha1_80); - TestProtectRtcp(kCsAesCm128HmacSha1_80); - TestUnprotectRtp(kCsAesCm128HmacSha1_80); - TestUnprotectRtcp(kCsAesCm128HmacSha1_80); + EXPECT_TRUE(s2_.SetReceive(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, + kEncryptedHeaderExtensionIds)); + TestProtectRtp(webrtc::kSrtpAes128CmSha1_80); + TestProtectRtcp(webrtc::kSrtpAes128CmSha1_80); + TestUnprotectRtp(webrtc::kSrtpAes128CmSha1_80); + TestUnprotectRtcp(webrtc::kSrtpAes128CmSha1_80); } // Test that we can encrypt and decrypt RTP/RTCP using AES_CM_128_HMAC_SHA1_32. TEST_F(SrtpSessionTest, TestProtect_AES_CM_128_HMAC_SHA1_32) { - EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_32, kTestKey1, kTestKeyLen, + EXPECT_TRUE(s1_.SetSend(webrtc::kSrtpAes128CmSha1_32, webrtc::kTestKey1, kEncryptedHeaderExtensionIds)); - EXPECT_TRUE(s2_.SetRecv(kSrtpAes128CmSha1_32, kTestKey1, kTestKeyLen, - kEncryptedHeaderExtensionIds)); - TestProtectRtp(kCsAesCm128HmacSha1_32); - TestProtectRtcp(kCsAesCm128HmacSha1_32); - TestUnprotectRtp(kCsAesCm128HmacSha1_32); - TestUnprotectRtcp(kCsAesCm128HmacSha1_32); + EXPECT_TRUE(s2_.SetReceive(webrtc::kSrtpAes128CmSha1_32, webrtc::kTestKey1, + kEncryptedHeaderExtensionIds)); + TestProtectRtp(webrtc::kSrtpAes128CmSha1_32); + TestProtectRtcp(webrtc::kSrtpAes128CmSha1_32); + TestUnprotectRtp(webrtc::kSrtpAes128CmSha1_32); + TestUnprotectRtcp(webrtc::kSrtpAes128CmSha1_32); } TEST_F(SrtpSessionTest, TestGetSendStreamPacketIndex) { - EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_32, kTestKey1, kTestKeyLen, + EXPECT_TRUE(s1_.SetSend(webrtc::kSrtpAes128CmSha1_32, webrtc::kTestKey1, kEncryptedHeaderExtensionIds)); int64_t index; - int out_len = 0; - EXPECT_TRUE(s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), - &out_len, &index)); + EXPECT_TRUE(s1_.ProtectRtp(rtp_packet_, &index)); // `index` will be shifted by 16. - int64_t be64_index = static_cast(NetworkToHost64(1 << 16)); + int64_t be64_index = static_cast(webrtc::NetworkToHost64(1 << 16)); EXPECT_EQ(be64_index, index); } // Test that we fail to unprotect if someone tampers with the RTP/RTCP paylaods. TEST_F(SrtpSessionTest, TestTamperReject) { - int out_len; - EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen, - kEncryptedHeaderExtensionIds)); - EXPECT_TRUE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen, + EXPECT_TRUE(s1_.SetSend(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, kEncryptedHeaderExtensionIds)); - TestProtectRtp(kCsAesCm128HmacSha1_80); - TestProtectRtcp(kCsAesCm128HmacSha1_80); - rtp_packet_[0] = 0x12; - rtcp_packet_[1] = 0x34; - EXPECT_FALSE(s2_.UnprotectRtp(rtp_packet_, rtp_len_, &out_len)); + EXPECT_TRUE(s2_.SetReceive(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, + kEncryptedHeaderExtensionIds)); + TestProtectRtp(webrtc::kSrtpAes128CmSha1_80); + rtp_packet_.MutableData()[0] = 0x12; + EXPECT_FALSE(s2_.UnprotectRtp(rtp_packet_)); EXPECT_METRIC_THAT( webrtc::metrics::Samples("WebRTC.PeerConnection.SrtpUnprotectError"), ElementsAre(Pair(srtp_err_status_bad_param, 1))); - EXPECT_FALSE(s2_.UnprotectRtcp(rtcp_packet_, rtcp_len_, &out_len)); + + TestProtectRtcp(webrtc::kSrtpAes128CmSha1_80); + rtcp_packet_.MutableData()[1] = 0x34; + EXPECT_FALSE(s2_.UnprotectRtcp(rtcp_packet_)); EXPECT_METRIC_THAT( webrtc::metrics::Samples("WebRTC.PeerConnection.SrtcpUnprotectError"), ElementsAre(Pair(srtp_err_status_auth_fail, 1))); @@ -168,16 +177,15 @@ TEST_F(SrtpSessionTest, TestTamperReject) { // Test that we fail to unprotect if the payloads are not authenticated. TEST_F(SrtpSessionTest, TestUnencryptReject) { - int out_len; - EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen, - kEncryptedHeaderExtensionIds)); - EXPECT_TRUE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen, + EXPECT_TRUE(s1_.SetSend(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, kEncryptedHeaderExtensionIds)); - EXPECT_FALSE(s2_.UnprotectRtp(rtp_packet_, rtp_len_, &out_len)); + EXPECT_TRUE(s2_.SetReceive(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, + kEncryptedHeaderExtensionIds)); + EXPECT_FALSE(s2_.UnprotectRtp(rtp_packet_)); EXPECT_METRIC_THAT( webrtc::metrics::Samples("WebRTC.PeerConnection.SrtpUnprotectError"), ElementsAre(Pair(srtp_err_status_auth_fail, 1))); - EXPECT_FALSE(s2_.UnprotectRtcp(rtcp_packet_, rtcp_len_, &out_len)); + EXPECT_FALSE(s2_.UnprotectRtcp(rtcp_packet_)); EXPECT_METRIC_THAT( webrtc::metrics::Samples("WebRTC.PeerConnection.SrtcpUnprotectError"), ElementsAre(Pair(srtp_err_status_cant_check, 1))); @@ -185,60 +193,61 @@ TEST_F(SrtpSessionTest, TestUnencryptReject) { // Test that we fail when using buffers that are too small. TEST_F(SrtpSessionTest, TestBuffersTooSmall) { - int out_len; - EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen, + EXPECT_TRUE(s1_.SetSend(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, kEncryptedHeaderExtensionIds)); - EXPECT_FALSE(s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_) - 10, - &out_len)); - EXPECT_FALSE(s1_.ProtectRtcp(rtcp_packet_, rtcp_len_, - sizeof(rtcp_packet_) - 14, &out_len)); + // This buffer does not have extra capacity which we treat as an error. + CopyOnWriteBuffer rtp_packet(rtp_packet_.data(), rtp_packet_.size(), + rtp_packet_.size()); + EXPECT_FALSE(s1_.ProtectRtp(rtp_packet)); + // This buffer does not have extra capacity which we treat as an error. + CopyOnWriteBuffer rtcp_packet(rtcp_packet_.data(), rtcp_packet_.size(), + rtcp_packet_.size()); + EXPECT_FALSE(s1_.ProtectRtcp(rtcp_packet)); } TEST_F(SrtpSessionTest, TestReplay) { - static const uint16_t kMaxSeqnum = static_cast(-1); + static const uint16_t kMaxSeqnum = std::numeric_limits::max() - 1; static const uint16_t seqnum_big = 62275; static const uint16_t seqnum_small = 10; static const uint16_t replay_window = 1024; - int out_len; - EXPECT_TRUE(s1_.SetSend(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen, - kEncryptedHeaderExtensionIds)); - EXPECT_TRUE(s2_.SetRecv(kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen, + EXPECT_TRUE(s1_.SetSend(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, kEncryptedHeaderExtensionIds)); + EXPECT_TRUE(s2_.SetReceive(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, + kEncryptedHeaderExtensionIds)); // Initial sequence number. - SetBE16(reinterpret_cast(rtp_packet_) + 2, seqnum_big); - EXPECT_TRUE( - s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len)); + webrtc::SetBE16(rtp_packet_.MutableData() + 2, seqnum_big); + EXPECT_TRUE(s1_.ProtectRtp(rtp_packet_)); + rtp_packet_.SetData(kPcmuFrame, sizeof(kPcmuFrame)); // Replay within the 1024 window should succeed. - SetBE16(reinterpret_cast(rtp_packet_) + 2, - seqnum_big - replay_window + 1); - EXPECT_TRUE( - s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len)); + webrtc::SetBE16(rtp_packet_.MutableData() + 2, + seqnum_big - replay_window + 1); + EXPECT_TRUE(s1_.ProtectRtp(rtp_packet_)); + rtp_packet_.SetData(kPcmuFrame, sizeof(kPcmuFrame)); // Replay out side of the 1024 window should fail. - SetBE16(reinterpret_cast(rtp_packet_) + 2, - seqnum_big - replay_window - 1); - EXPECT_FALSE( - s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len)); + webrtc::SetBE16(rtp_packet_.MutableData() + 2, + seqnum_big - replay_window - 1); + EXPECT_FALSE(s1_.ProtectRtp(rtp_packet_)); + rtp_packet_.SetData(kPcmuFrame, sizeof(kPcmuFrame)); // Increment sequence number to a small number. - SetBE16(reinterpret_cast(rtp_packet_) + 2, seqnum_small); - EXPECT_TRUE( - s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len)); + webrtc::SetBE16(rtp_packet_.MutableData() + 2, seqnum_small); + EXPECT_TRUE(s1_.ProtectRtp(rtp_packet_)); // Replay around 0 but out side of the 1024 window should fail. - SetBE16(reinterpret_cast(rtp_packet_) + 2, - kMaxSeqnum + seqnum_small - replay_window - 1); - EXPECT_FALSE( - s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len)); + webrtc::SetBE16(rtp_packet_.MutableData() + 2, + kMaxSeqnum + seqnum_small - replay_window - 1); + EXPECT_FALSE(s1_.ProtectRtp(rtp_packet_)); + rtp_packet_.SetData(kPcmuFrame, sizeof(kPcmuFrame)); // Replay around 0 but within the 1024 window should succeed. for (uint16_t seqnum = 65000; seqnum < 65003; ++seqnum) { - SetBE16(reinterpret_cast(rtp_packet_) + 2, seqnum); - EXPECT_TRUE( - s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len)); + webrtc::SetBE16(rtp_packet_.MutableData() + 2, seqnum); + EXPECT_TRUE(s1_.ProtectRtp(rtp_packet_)); + rtp_packet_.SetData(kPcmuFrame, sizeof(kPcmuFrame)); } // Go back to normal sequence nubmer. @@ -246,9 +255,138 @@ TEST_F(SrtpSessionTest, TestReplay) { // without the fix, the loop above would keep incrementing local sequence // number in libsrtp, eventually the new sequence number would go out side // of the window. - SetBE16(reinterpret_cast(rtp_packet_) + 2, seqnum_small + 1); - EXPECT_TRUE( - s1_.ProtectRtp(rtp_packet_, rtp_len_, sizeof(rtp_packet_), &out_len)); + webrtc::SetBE16(rtp_packet_.MutableData() + 2, seqnum_small + 1); + EXPECT_TRUE(s1_.ProtectRtp(rtp_packet_)); +} + +TEST_F(SrtpSessionTest, RemoveSsrc) { + EXPECT_TRUE(s1_.SetSend(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, + kEncryptedHeaderExtensionIds)); + EXPECT_TRUE(s2_.SetReceive(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, + kEncryptedHeaderExtensionIds)); + // Encrypt and decrypt the packet once. + EXPECT_TRUE(s1_.ProtectRtp(rtp_packet_)); + EXPECT_TRUE(s2_.UnprotectRtp(rtp_packet_)); + EXPECT_EQ(sizeof(kPcmuFrame), rtp_packet_.size()); + EXPECT_EQ(0, std::memcmp(kPcmuFrame, rtp_packet_.data(), rtp_packet_.size())); + + // Recreate the original packet and encrypt again. + rtp_packet_.SetData(kPcmuFrame, sizeof(kPcmuFrame)); + EXPECT_TRUE(s1_.ProtectRtp(rtp_packet_)); + // Attempting to decrypt will fail as a replay attack. + // (srtp_err_status_replay_fail) since the sequence number was already seen. + EXPECT_FALSE(s2_.UnprotectRtp(rtp_packet_)); + + // Remove the fake packet SSRC 1 from the session. + EXPECT_TRUE(s2_.RemoveSsrcFromSession(1)); + EXPECT_FALSE(s2_.RemoveSsrcFromSession(1)); + + // Since the SRTP state was discarded, this is no longer a replay attack. + EXPECT_TRUE(s2_.UnprotectRtp(rtp_packet_)); + EXPECT_EQ(sizeof(kPcmuFrame), rtp_packet_.size()); + EXPECT_EQ(0, std::memcmp(kPcmuFrame, rtp_packet_.data(), rtp_packet_.size())); + EXPECT_TRUE(s2_.RemoveSsrcFromSession(1)); +} + +TEST_F(SrtpSessionTest, ProtectUnprotectWrapAroundRocMismatch) { + // This unit tests demonstrates why you should be careful when + // choosing the initial RTP sequence number as there can be decryption + // failures when it wraps around with packet loss. Pick your starting + // sequence number in the lower half of the range for robustness reasons, + // see packet_sequencer.cc for the code doing so. + EXPECT_TRUE(s1_.SetSend(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, + kEncryptedHeaderExtensionIds)); + EXPECT_TRUE(s2_.SetReceive(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, + kEncryptedHeaderExtensionIds)); + // Buffers include enough room for the 10 byte SRTP auth tag so we can + // encrypt in place. + unsigned char kFrame1[] = { + // clang-format off + // PT=0, SN=65535, TS=0, SSRC=1 + 0x80, 0x00, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, + 0xBE, 0xEF, // data bytes + // Space for the SRTP auth tag + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + // clang-format on + }; + CopyOnWriteBuffer packet1(kFrame1, sizeof(kFrame1) - 10, sizeof(kFrame1)); + unsigned char kFrame2[] = { + // clang-format off + // PT=0, SN=1, TS=0, SSRC=1 + 0x80, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, + 0xBE, 0xEF, // data bytes + // Space for the SRTP auth tag + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + // clang-format on + }; + CopyOnWriteBuffer packet2(kFrame2, sizeof(kFrame2) - 10, sizeof(kFrame1)); + const unsigned char kPayload[] = {0xBE, 0xEF}; + + // Encrypt the frames in-order. There is a sequence number rollover from + // 65535 to 1 (skipping 0) and the second packet gets encrypted with a + // roll-over counter (ROC) of 1. See + // https://datatracker.ietf.org/doc/html/rfc3711#section-3.3.1 + EXPECT_TRUE(s1_.ProtectRtp(packet1)); + EXPECT_EQ(packet1.size(), 24u); + EXPECT_TRUE(s1_.ProtectRtp(packet2)); + EXPECT_EQ(packet2.size(), 24u); + + // If we decrypt frame 2 first it will have a ROC of 1 but the receiver + // does not know this is a rollover so will attempt with a ROC of 0. + // Note: If libsrtp is modified to attempt to decrypt with ROC=1 for this + // case, this test will fail and needs to be modified accordingly to unblock + // the roll. See https://issues.webrtc.org/353565743 for details. + EXPECT_FALSE(s2_.UnprotectRtp(packet2)); + // Decrypt frame 1. + EXPECT_TRUE(s2_.UnprotectRtp(packet1)); + ASSERT_EQ(packet1.size(), 14u); + EXPECT_EQ(0, std::memcmp(packet1.data() + 12, kPayload, sizeof(kPayload))); + // Now decrypt frame 2 again. A rollover is detected which increases + // the ROC to 1 so this succeeds. + EXPECT_TRUE(s2_.UnprotectRtp(packet2)); + ASSERT_EQ(packet2.size(), 14u); + EXPECT_EQ(0, std::memcmp(packet2.data() + 12, kPayload, sizeof(kPayload))); +} + +TEST_F(SrtpSessionTest, ProtectGetPacketIndex) { + EXPECT_TRUE(s1_.SetSend(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, + kEncryptedHeaderExtensionIds)); + EXPECT_TRUE(s2_.SetReceive(webrtc::kSrtpAes128CmSha1_80, webrtc::kTestKey1, + kEncryptedHeaderExtensionIds)); + // Buffers include enough room for the 10 byte SRTP auth tag so we can + // encrypt in place. + unsigned char kFrame1[] = { + // clang-format off + // PT=0, SN=65535, TS=0, SSRC=1 + 0x80, 0x00, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, + 0xBE, 0xEF, // data bytes + // Space for the SRTP auth tag + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + // clang-format on + }; + CopyOnWriteBuffer packet1(kFrame1, sizeof(kFrame1) - 10, sizeof(kFrame1)); + unsigned char kFrame2[] = { + // clang-format off + // PT=0, SN=1, TS=0, SSRC=1 + 0x80, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, + 0xBE, 0xEF, // data bytes + // Space for the SRTP auth tag + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + // clang-format on + }; + CopyOnWriteBuffer packet2(kFrame2, sizeof(kFrame2) - 10, sizeof(kFrame1)); + + // Encrypt the frames in-order. There is a sequence number rollover from + // 65535 to 1 (skipping 0) and the second packet gets encrypted with a + // roll-over counter (ROC) of 1. See + // https://datatracker.ietf.org/doc/html/rfc3711#section-3.3.1 + int64_t index; + EXPECT_TRUE(s1_.ProtectRtp(packet1, &index)); + EXPECT_EQ(packet1.size(), 24u); + EXPECT_EQ(index, 0xffff00000000); // ntohl(65535 << 16) + EXPECT_TRUE(s1_.ProtectRtp(packet2, &index)); + EXPECT_EQ(packet2.size(), 24u); + EXPECT_EQ(index, 0x10001000000); // ntohl(65537 << 16) } -} // namespace rtc +} // namespace webrtc diff --git a/pc/srtp_transport.cc b/pc/srtp_transport.cc index cc20216672..62f4a1400e 100644 --- a/pc/srtp_transport.cc +++ b/pc/srtp_transport.cc @@ -10,140 +10,61 @@ #include "pc/srtp_transport.h" -#include - -#include +#include +#include #include #include -#include "absl/strings/match.h" +#include "api/field_trials_view.h" +#include "api/units/timestamp.h" +#include "call/rtp_demuxer.h" #include "media/base/rtp_utils.h" #include "modules/rtp_rtcp/source/rtp_util.h" +#include "p2p/base/packet_transport_internal.h" #include "pc/rtp_transport.h" #include "pc/srtp_session.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/third_party/base64/base64.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network_route.h" #include "rtc_base/trace_event.h" -#include "rtc_base/zero_memory.h" namespace webrtc { SrtpTransport::SrtpTransport(bool rtcp_mux_enabled, const FieldTrialsView& field_trials) - : RtpTransport(rtcp_mux_enabled), field_trials_(field_trials) {} - -RTCError SrtpTransport::SetSrtpSendKey(const cricket::CryptoParams& params) { - if (send_params_) { - LOG_AND_RETURN_ERROR( - webrtc::RTCErrorType::UNSUPPORTED_OPERATION, - "Setting the SRTP send key twice is currently unsupported."); - } - if (recv_params_ && recv_params_->crypto_suite != params.crypto_suite) { - LOG_AND_RETURN_ERROR( - webrtc::RTCErrorType::UNSUPPORTED_OPERATION, - "The send key and receive key must have the same cipher suite."); - } - - send_crypto_suite_ = rtc::SrtpCryptoSuiteFromName(params.crypto_suite); - if (*send_crypto_suite_ == rtc::kSrtpInvalidCryptoSuite) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Invalid SRTP crypto suite"); - } + : RtpTransport(rtcp_mux_enabled, field_trials), + field_trials_(field_trials) {} - int send_key_len, send_salt_len; - if (!rtc::GetSrtpKeyAndSaltLengths(*send_crypto_suite_, &send_key_len, - &send_salt_len)) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Could not get lengths for crypto suite(s):" - " send crypto_suite "); - } - - send_key_ = rtc::ZeroOnFreeBuffer(send_key_len + send_salt_len); - if (!ParseKeyParams(params.key_params, send_key_.data(), send_key_.size())) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Failed to parse the crypto key params"); - } - - if (!MaybeSetKeyParams()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Failed to set the crypto key params"); - } - send_params_ = params; - return RTCError::OK(); -} - -RTCError SrtpTransport::SetSrtpReceiveKey(const cricket::CryptoParams& params) { - if (recv_params_) { - LOG_AND_RETURN_ERROR( - webrtc::RTCErrorType::UNSUPPORTED_OPERATION, - "Setting the SRTP send key twice is currently unsupported."); - } - if (send_params_ && send_params_->crypto_suite != params.crypto_suite) { - LOG_AND_RETURN_ERROR( - webrtc::RTCErrorType::UNSUPPORTED_OPERATION, - "The send key and receive key must have the same cipher suite."); - } - - recv_crypto_suite_ = rtc::SrtpCryptoSuiteFromName(params.crypto_suite); - if (*recv_crypto_suite_ == rtc::kSrtpInvalidCryptoSuite) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Invalid SRTP crypto suite"); - } - - int recv_key_len, recv_salt_len; - if (!rtc::GetSrtpKeyAndSaltLengths(*recv_crypto_suite_, &recv_key_len, - &recv_salt_len)) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Could not get lengths for crypto suite(s):" - " recv crypto_suite "); - } - - recv_key_ = rtc::ZeroOnFreeBuffer(recv_key_len + recv_salt_len); - if (!ParseKeyParams(params.key_params, recv_key_.data(), recv_key_.size())) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Failed to parse the crypto key params"); - } - - if (!MaybeSetKeyParams()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Failed to set the crypto key params"); - } - recv_params_ = params; - return RTCError::OK(); -} - -bool SrtpTransport::SendRtpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, +bool SrtpTransport::SendRtpPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options, int flags) { + RTC_DCHECK(packet); if (!IsSrtpActive()) { RTC_LOG(LS_ERROR) << "Failed to send the packet because SRTP transport is inactive."; return false; } - rtc::PacketOptions updated_options = options; + AsyncSocketPacketOptions updated_options = options; TRACE_EVENT0("webrtc", "SRTP Encode"); + // If ENABLE_EXTERNAL_AUTH flag is on then packet authentication is not done + // inside libsrtp for a RTP packet. A external HMAC module will be writing + // a fake HMAC value. This is ONLY done for a RTP packet. + // Socket layer will update rtp sendtime extension header if present in + // packet with current time before updating the HMAC. bool res; - uint8_t* data = packet->MutableData(); - int len = rtc::checked_cast(packet->size()); -// If ENABLE_EXTERNAL_AUTH flag is on then packet authentication is not done -// inside libsrtp for a RTP packet. A external HMAC module will be writing -// a fake HMAC value. This is ONLY done for a RTP packet. -// Socket layer will update rtp sendtime extension header if present in -// packet with current time before updating the HMAC. #if !defined(ENABLE_EXTERNAL_AUTH) - res = ProtectRtp(data, len, static_cast(packet->capacity()), &len); + res = ProtectRtp(*packet); #else if (!IsExternalAuthActive()) { - res = ProtectRtp(data, len, static_cast(packet->capacity()), &len); + res = ProtectRtp(*packet); } else { updated_options.packet_time_params.rtp_sendtime_extension_id = rtp_abs_sendtime_extn_id_; - res = ProtectRtp(data, len, static_cast(packet->capacity()), &len, + res = ProtectRtp(*packet, &updated_options.packet_time_params.srtp_packet_index); // If protection succeeds, let's get auth params from srtp. if (res) { @@ -163,19 +84,18 @@ bool SrtpTransport::SendRtpPacket(rtc::CopyOnWriteBuffer* packet, if (!res) { uint16_t seq_num = ParseRtpSequenceNumber(*packet); uint32_t ssrc = ParseRtpSsrc(*packet); - RTC_LOG(LS_ERROR) << "Failed to protect RTP packet: size=" << len + RTC_LOG(LS_ERROR) << "Failed to protect RTP packet: size=" << packet->size() << ", seqnum=" << seq_num << ", SSRC=" << ssrc; return false; } - // Update the length of the packet now that we've added the auth tag. - packet->SetSize(len); return SendPacket(/*rtcp=*/false, packet, updated_options, flags); } -bool SrtpTransport::SendRtcpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, +bool SrtpTransport::SendRtcpPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options, int flags) { + RTC_DCHECK(packet); if (!IsSrtpActive()) { RTC_LOG(LS_ERROR) << "Failed to send the packet because SRTP transport is inactive."; @@ -183,72 +103,67 @@ bool SrtpTransport::SendRtcpPacket(rtc::CopyOnWriteBuffer* packet, } TRACE_EVENT0("webrtc", "SRTP Encode"); - uint8_t* data = packet->MutableData(); - int len = rtc::checked_cast(packet->size()); - if (!ProtectRtcp(data, len, static_cast(packet->capacity()), &len)) { + if (!ProtectRtcp(*packet)) { int type = -1; - cricket::GetRtcpType(data, len, &type); - RTC_LOG(LS_ERROR) << "Failed to protect RTCP packet: size=" << len - << ", type=" << type; + GetRtcpType(packet->data(), packet->size(), &type); + RTC_LOG(LS_ERROR) << "Failed to protect RTCP packet: size=" + << packet->size() << ", type=" << type; return false; } - // Update the length of the packet now that we've added the auth tag. - packet->SetSize(len); return SendPacket(/*rtcp=*/true, packet, options, flags); } -void SrtpTransport::OnRtpPacketReceived(rtc::CopyOnWriteBuffer packet, - int64_t packet_time_us) { +void SrtpTransport::OnRtpPacketReceived(const ReceivedIpPacket& packet) { TRACE_EVENT0("webrtc", "SrtpTransport::OnRtpPacketReceived"); if (!IsSrtpActive()) { RTC_LOG(LS_WARNING) << "Inactive SRTP transport received an RTP packet. Drop it."; return; } - char* data = packet.MutableData(); - int len = rtc::checked_cast(packet.size()); - if (!UnprotectRtp(data, len, &len)) { + + CopyOnWriteBuffer payload(packet.payload()); + if (!UnprotectRtp(payload)) { // Limit the error logging to avoid excessive logs when there are lots of // bad packets. const int kFailureLogThrottleCount = 100; if (decryption_failure_count_ % kFailureLogThrottleCount == 0) { - RTC_LOG(LS_ERROR) << "Failed to unprotect RTP packet: size=" << len - << ", seqnum=" << ParseRtpSequenceNumber(packet) - << ", SSRC=" << ParseRtpSsrc(packet) + RTC_LOG(LS_ERROR) << "Failed to unprotect RTP packet: size=" + << payload.size() + << ", seqnum=" << ParseRtpSequenceNumber(payload) + << ", SSRC=" << ParseRtpSsrc(payload) << ", previous failure count: " << decryption_failure_count_; } ++decryption_failure_count_; return; } - packet.SetSize(len); - DemuxPacket(std::move(packet), packet_time_us); + DemuxPacket(std::move(payload), + packet.arrival_time().value_or(Timestamp::MinusInfinity()), + packet.ecn()); } -void SrtpTransport::OnRtcpPacketReceived(rtc::CopyOnWriteBuffer packet, - int64_t packet_time_us) { +void SrtpTransport::OnRtcpPacketReceived(const ReceivedIpPacket& packet) { TRACE_EVENT0("webrtc", "SrtpTransport::OnRtcpPacketReceived"); if (!IsSrtpActive()) { RTC_LOG(LS_WARNING) << "Inactive SRTP transport received an RTCP packet. Drop it."; return; } - char* data = packet.MutableData(); - int len = rtc::checked_cast(packet.size()); - if (!UnprotectRtcp(data, len, &len)) { + CopyOnWriteBuffer payload(packet.payload()); + if (!UnprotectRtcp(payload)) { int type = -1; - cricket::GetRtcpType(data, len, &type); - RTC_LOG(LS_ERROR) << "Failed to unprotect RTCP packet: size=" << len - << ", type=" << type; + GetRtcpType(payload.data(), payload.size(), &type); + RTC_LOG(LS_ERROR) << "Failed to unprotect RTCP packet: size=" + << payload.size() << ", type=" << type; return; } - packet.SetSize(len); - SendRtcpPacketReceived(&packet, packet_time_us); + SendRtcpPacketReceived( + &payload, packet.arrival_time() ? packet.arrival_time()->us() : -1); } void SrtpTransport::OnNetworkRouteChanged( - absl::optional network_route) { + std::optional network_route) { // Only append the SRTP overhead when there is a selected network route. if (network_route) { int srtp_overhead = 0; @@ -260,23 +175,20 @@ void SrtpTransport::OnNetworkRouteChanged( SendNetworkRouteChanged(network_route); } -void SrtpTransport::OnWritableState( - rtc::PacketTransportInternal* packet_transport) { +void SrtpTransport::OnWritableState(PacketTransportInternal* packet_transport) { SendWritableState(IsWritable(/*rtcp=*/false) && IsWritable(/*rtcp=*/true)); } bool SrtpTransport::SetRtpParams(int send_crypto_suite, - const uint8_t* send_key, - int send_key_len, + const ZeroOnFreeBuffer& send_key, const std::vector& send_extension_ids, int recv_crypto_suite, - const uint8_t* recv_key, - int recv_key_len, + const ZeroOnFreeBuffer& recv_key, const std::vector& recv_extension_ids) { // If parameters are being set for the first time, we should create new SRTP - // sessions and call "SetSend/SetRecv". Otherwise we should call - // "UpdateSend"/"UpdateRecv" on the existing sessions, which will internally - // call "srtp_update". + // sessions and call "SetSend/SetReceive". Otherwise we should call + // "UpdateSend"/"UpdateReceive" on the existing sessions, which will + // internally call "srtp_update". bool new_sessions = false; if (!send_session_) { RTC_DCHECK(!recv_session_); @@ -285,19 +197,18 @@ bool SrtpTransport::SetRtpParams(int send_crypto_suite, } bool ret = new_sessions ? send_session_->SetSend(send_crypto_suite, send_key, - send_key_len, send_extension_ids) + send_extension_ids) : send_session_->UpdateSend(send_crypto_suite, send_key, - send_key_len, send_extension_ids); + send_extension_ids); if (!ret) { ResetParams(); return false; } - ret = new_sessions - ? recv_session_->SetRecv(recv_crypto_suite, recv_key, recv_key_len, - recv_extension_ids) - : recv_session_->UpdateRecv(recv_crypto_suite, recv_key, - recv_key_len, recv_extension_ids); + ret = new_sessions ? recv_session_->SetReceive(recv_crypto_suite, recv_key, + recv_extension_ids) + : recv_session_->UpdateReceive(recv_crypto_suite, recv_key, + recv_extension_ids); if (!ret) { ResetParams(); return false; @@ -312,12 +223,10 @@ bool SrtpTransport::SetRtpParams(int send_crypto_suite, } bool SrtpTransport::SetRtcpParams(int send_crypto_suite, - const uint8_t* send_key, - int send_key_len, + const ZeroOnFreeBuffer& send_key, const std::vector& send_extension_ids, int recv_crypto_suite, - const uint8_t* recv_key, - int recv_key_len, + const ZeroOnFreeBuffer& recv_key, const std::vector& recv_extension_ids) { // This can only be called once, but can be safely called after // SetRtpParams @@ -326,15 +235,15 @@ bool SrtpTransport::SetRtcpParams(int send_crypto_suite, return false; } - send_rtcp_session_.reset(new cricket::SrtpSession(field_trials_)); - if (!send_rtcp_session_->SetSend(send_crypto_suite, send_key, send_key_len, + send_rtcp_session_.reset(new SrtpSession(field_trials_)); + if (!send_rtcp_session_->SetSend(send_crypto_suite, send_key, send_extension_ids)) { return false; } - recv_rtcp_session_.reset(new cricket::SrtpSession(field_trials_)); - if (!recv_rtcp_session_->SetRecv(recv_crypto_suite, recv_key, recv_key_len, - recv_extension_ids)) { + recv_rtcp_session_.reset(new SrtpSession(field_trials_)); + if (!recv_rtcp_session_->SetReceive(recv_crypto_suite, recv_key, + recv_extension_ids)) { return false; } @@ -364,70 +273,63 @@ void SrtpTransport::ResetParams() { } void SrtpTransport::CreateSrtpSessions() { - send_session_.reset(new cricket::SrtpSession(field_trials_)); - recv_session_.reset(new cricket::SrtpSession(field_trials_)); + send_session_.reset(new SrtpSession(field_trials_)); + recv_session_.reset(new SrtpSession(field_trials_)); if (external_auth_enabled_) { send_session_->EnableExternalAuth(); } } -bool SrtpTransport::ProtectRtp(void* p, int in_len, int max_len, int* out_len) { +bool SrtpTransport::ProtectRtp(CopyOnWriteBuffer& buffer) { if (!IsSrtpActive()) { RTC_LOG(LS_WARNING) << "Failed to ProtectRtp: SRTP not active"; return false; } RTC_CHECK(send_session_); - return send_session_->ProtectRtp(p, in_len, max_len, out_len); + return send_session_->ProtectRtp(buffer); } -bool SrtpTransport::ProtectRtp(void* p, - int in_len, - int max_len, - int* out_len, - int64_t* index) { +bool SrtpTransport::ProtectRtp(CopyOnWriteBuffer& buffer, int64_t* index) { if (!IsSrtpActive()) { RTC_LOG(LS_WARNING) << "Failed to ProtectRtp: SRTP not active"; return false; } RTC_CHECK(send_session_); - return send_session_->ProtectRtp(p, in_len, max_len, out_len, index); + return send_session_->ProtectRtp(buffer, index); } -bool SrtpTransport::ProtectRtcp(void* p, - int in_len, - int max_len, - int* out_len) { +bool SrtpTransport::ProtectRtcp(CopyOnWriteBuffer& buffer) { if (!IsSrtpActive()) { RTC_LOG(LS_WARNING) << "Failed to ProtectRtcp: SRTP not active"; return false; } if (send_rtcp_session_) { - return send_rtcp_session_->ProtectRtcp(p, in_len, max_len, out_len); + return send_rtcp_session_->ProtectRtcp(buffer); } else { RTC_CHECK(send_session_); - return send_session_->ProtectRtcp(p, in_len, max_len, out_len); + return send_session_->ProtectRtcp(buffer); } } -bool SrtpTransport::UnprotectRtp(void* p, int in_len, int* out_len) { +bool SrtpTransport::UnprotectRtp(CopyOnWriteBuffer& buffer) { if (!IsSrtpActive()) { RTC_LOG(LS_WARNING) << "Failed to UnprotectRtp: SRTP not active"; return false; } RTC_CHECK(recv_session_); - return recv_session_->UnprotectRtp(p, in_len, out_len); + return recv_session_->UnprotectRtp(buffer); } -bool SrtpTransport::UnprotectRtcp(void* p, int in_len, int* out_len) { +bool SrtpTransport::UnprotectRtcp(CopyOnWriteBuffer& buffer) { if (!IsSrtpActive()) { RTC_LOG(LS_WARNING) << "Failed to UnprotectRtcp: SRTP not active"; return false; } if (recv_rtcp_session_) { - return recv_rtcp_session_->UnprotectRtcp(p, in_len, out_len); + return recv_rtcp_session_->UnprotectRtcp(buffer); } else { RTC_CHECK(recv_session_); - return recv_session_->UnprotectRtcp(p, in_len, out_len); + return recv_session_->UnprotectRtcp(buffer); } } @@ -474,42 +376,6 @@ bool SrtpTransport::IsExternalAuthActive() const { return send_session_->IsExternalAuthActive(); } -bool SrtpTransport::MaybeSetKeyParams() { - if (!send_crypto_suite_ || !recv_crypto_suite_) { - return true; - } - - return SetRtpParams(*send_crypto_suite_, send_key_.data(), - static_cast(send_key_.size()), std::vector(), - *recv_crypto_suite_, recv_key_.data(), - static_cast(recv_key_.size()), std::vector()); -} - -bool SrtpTransport::ParseKeyParams(const std::string& key_params, - uint8_t* key, - size_t len) { - // example key_params: "inline:YUJDZGVmZ2hpSktMbW9QUXJzVHVWd3l6MTIzNDU2" - - // Fail if key-method is wrong. - if (!absl::StartsWith(key_params, "inline:")) { - return false; - } - - // Fail if base64 decode fails, or the key is the wrong size. - std::string key_b64(key_params.substr(7)), key_str; - if (!rtc::Base64::Decode(key_b64, rtc::Base64::DO_STRICT, &key_str, - nullptr) || - key_str.size() != len) { - return false; - } - - memcpy(key, key_str.c_str(), len); - // TODO(bugs.webrtc.org/8905): Switch to ZeroOnFreeBuffer for storing - // sensitive data. - rtc::ExplicitZeroMemory(&key_str[0], key_str.size()); - return true; -} - void SrtpTransport::MaybeUpdateWritableState() { bool writable = IsWritable(/*rtcp=*/true) && IsWritable(/*rtcp=*/false); // Only fire the signal if the writable state changes. @@ -519,4 +385,19 @@ void SrtpTransport::MaybeUpdateWritableState() { } } +bool SrtpTransport::UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) { + if (recv_session_ && + field_trials_.IsEnabled("WebRTC-SrtpRemoveReceiveStream")) { + // Remove the SSRCs explicitly registered with the demuxer + // (via SDP negotiation) from the SRTP session. + for (const auto ssrc : GetSsrcsForSink(sink)) { + if (!recv_session_->RemoveSsrcFromSession(ssrc)) { + RTC_LOG(LS_WARNING) + << "Could not remove SSRC " << ssrc << " from SRTP session."; + } + } + } + return RtpTransport::UnregisterRtpDemuxerSink(sink); +} + } // namespace webrtc diff --git a/pc/srtp_transport.h b/pc/srtp_transport.h index 46c11ed56d..1dddf3eabb 100644 --- a/pc/srtp_transport.h +++ b/pc/srtp_transport.h @@ -15,19 +15,19 @@ #include #include +#include #include #include -#include "absl/types/optional.h" -#include "api/crypto_params.h" #include "api/field_trials_view.h" -#include "api/rtc_error.h" +#include "call/rtp_demuxer.h" #include "p2p/base/packet_transport_internal.h" #include "pc/rtp_transport.h" #include "pc/srtp_session.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/buffer.h" #include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/network_route.h" namespace webrtc { @@ -41,15 +41,12 @@ class SrtpTransport : public RtpTransport { virtual ~SrtpTransport() = default; - virtual RTCError SetSrtpSendKey(const cricket::CryptoParams& params); - virtual RTCError SetSrtpReceiveKey(const cricket::CryptoParams& params); - - bool SendRtpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, + bool SendRtpPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options, int flags) override; - bool SendRtcpPacket(rtc::CopyOnWriteBuffer* packet, - const rtc::PacketOptions& options, + bool SendRtcpPacket(CopyOnWriteBuffer* packet, + const AsyncSocketPacketOptions& options, int flags) override; // The transport becomes active if the send_session_ and recv_session_ are @@ -62,24 +59,20 @@ class SrtpTransport : public RtpTransport { // packet encryption. The keys can either come from SDES negotiation or DTLS // handshake. bool SetRtpParams(int send_crypto_suite, - const uint8_t* send_key, - int send_key_len, + const ZeroOnFreeBuffer& send_key, const std::vector& send_extension_ids, int recv_crypto_suite, - const uint8_t* recv_key, - int recv_key_len, + const ZeroOnFreeBuffer& recv_key, const std::vector& recv_extension_ids); // Create new send/recv sessions and set the negotiated crypto keys for RTCP // packet encryption. The keys can either come from SDES negotiation or DTLS // handshake. bool SetRtcpParams(int send_crypto_suite, - const uint8_t* send_key, - int send_key_len, + const ZeroOnFreeBuffer& send_key, const std::vector& send_extension_ids, int recv_crypto_suite, - const uint8_t* recv_key, - int recv_key_len, + const ZeroOnFreeBuffer& recv_key, const std::vector& recv_extension_ids); void ResetParams(); @@ -109,6 +102,10 @@ class SrtpTransport : public RtpTransport { rtp_abs_sendtime_extn_id_ = rtp_abs_sendtime_extn_id; } + // In addition to unregistering the sink, the SRTP transport + // disassociates all SSRCs of the sink from libSRTP. + bool UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) override; + protected: // If the writable state changed, fire the SignalWritableState. void MaybeUpdateWritableState(); @@ -117,48 +114,35 @@ class SrtpTransport : public RtpTransport { void ConnectToRtpTransport(); void CreateSrtpSessions(); - void OnRtpPacketReceived(rtc::CopyOnWriteBuffer packet, - int64_t packet_time_us) override; - void OnRtcpPacketReceived(rtc::CopyOnWriteBuffer packet, - int64_t packet_time_us) override; + void OnRtpPacketReceived(const ReceivedIpPacket& packet) override; + void OnRtcpPacketReceived(const ReceivedIpPacket& packet) override; void OnNetworkRouteChanged( - absl::optional network_route) override; + std::optional network_route) override; // Override the RtpTransport::OnWritableState. - void OnWritableState(rtc::PacketTransportInternal* packet_transport) override; - - bool ProtectRtp(void* data, int in_len, int max_len, int* out_len); + void OnWritableState(PacketTransportInternal* packet_transport) override; + bool ProtectRtp(CopyOnWriteBuffer& buffer); // Overloaded version, outputs packet index. - bool ProtectRtp(void* data, - int in_len, - int max_len, - int* out_len, - int64_t* index); - bool ProtectRtcp(void* data, int in_len, int max_len, int* out_len); + bool ProtectRtp(CopyOnWriteBuffer& buffer, int64_t* index); + bool ProtectRtcp(CopyOnWriteBuffer& buffer); // Decrypts/verifies an invidiual RTP/RTCP packet. // If an HMAC is used, this will decrease the packet size. - bool UnprotectRtp(void* data, int in_len, int* out_len); - - bool UnprotectRtcp(void* data, int in_len, int* out_len); - - bool MaybeSetKeyParams(); - bool ParseKeyParams(const std::string& key_params, uint8_t* key, size_t len); + bool UnprotectRtp(CopyOnWriteBuffer& buffer); + bool UnprotectRtcp(CopyOnWriteBuffer& buffer); const std::string content_name_; - std::unique_ptr send_session_; - std::unique_ptr recv_session_; - std::unique_ptr send_rtcp_session_; - std::unique_ptr recv_rtcp_session_; - - absl::optional send_params_; - absl::optional recv_params_; - absl::optional send_crypto_suite_; - absl::optional recv_crypto_suite_; - rtc::ZeroOnFreeBuffer send_key_; - rtc::ZeroOnFreeBuffer recv_key_; + std::unique_ptr send_session_; + std::unique_ptr recv_session_; + std::unique_ptr send_rtcp_session_; + std::unique_ptr recv_rtcp_session_; + + std::optional send_crypto_suite_; + std::optional recv_crypto_suite_; + ZeroOnFreeBuffer send_key_; + ZeroOnFreeBuffer recv_key_; bool writable_ = false; diff --git a/pc/srtp_transport_unittest.cc b/pc/srtp_transport_unittest.cc index ac8be8762b..91558324fd 100644 --- a/pc/srtp_transport_unittest.cc +++ b/pc/srtp_transport_unittest.cc @@ -12,37 +12,42 @@ #include +#include +#include #include #include "call/rtp_demuxer.h" #include "media/base/fake_rtp.h" -#include "p2p/base/dtls_transport_internal.h" -#include "p2p/base/fake_packet_transport.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "p2p/test/fake_packet_transport.h" #include "pc/test/rtp_transport_test_util.h" #include "pc/test/srtp_test_util.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/buffer.h" #include "rtc_base/byte_order.h" #include "rtc_base/checks.h" #include "rtc_base/containers/flat_set.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" -using rtc::kSrtpAeadAes128Gcm; -using rtc::kTestKey1; -using rtc::kTestKey2; -using rtc::kTestKeyLen; +using ::webrtc::kSrtpAeadAes128Gcm; +using ::webrtc::kTestKey1; +using ::webrtc::kTestKey2; namespace webrtc { -static const uint8_t kTestKeyGcm128_1[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ12"; -static const uint8_t kTestKeyGcm128_2[] = "21ZYXWVUTSRQPONMLKJIHGFEDCBA"; -static const int kTestKeyGcm128Len = 28; // 128 bits key + 96 bits salt. -static const uint8_t kTestKeyGcm256_1[] = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqr"; -static const uint8_t kTestKeyGcm256_2[] = - "rqponmlkjihgfedcbaZYXWVUTSRQPONMLKJIHGFEDCBA"; -static const int kTestKeyGcm256Len = 44; // 256 bits key + 96 bits salt. +// 128 bits key + 96 bits salt. +static const ZeroOnFreeBuffer kTestKeyGcm128_1{ + "ABCDEFGHIJKLMNOPQRSTUVWXYZ12", 28}; +static const ZeroOnFreeBuffer kTestKeyGcm128_2{ + "21ZYXWVUTSRQPONMLKJIHGFEDCBA", 28}; +// 256 bits key + 96 bits salt. +static const ZeroOnFreeBuffer kTestKeyGcm256_1{ + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqr", 44}; +static const ZeroOnFreeBuffer kTestKeyGcm256_2{ + "rqponmlkjihgfedcbaZYXWVUTSRQPONMLKJIHGFEDCBA", 44}; class SrtpTransportTest : public ::testing::Test, public sigslot::has_slots<> { protected: @@ -50,9 +55,9 @@ class SrtpTransportTest : public ::testing::Test, public sigslot::has_slots<> { bool rtcp_mux_enabled = true; rtp_packet_transport1_ = - std::make_unique("fake_packet_transport1"); + std::make_unique("fake_packet_transport1"); rtp_packet_transport2_ = - std::make_unique("fake_packet_transport2"); + std::make_unique("fake_packet_transport2"); bool asymmetric = false; rtp_packet_transport1_->SetDestination(rtp_packet_transport2_.get(), @@ -67,13 +72,11 @@ class SrtpTransportTest : public ::testing::Test, public sigslot::has_slots<> { srtp_transport2_->SetRtpPacketTransport(rtp_packet_transport2_.get()); srtp_transport1_->SubscribeRtcpPacketReceived( - &rtp_sink1_, - [this](rtc::CopyOnWriteBuffer* buffer, int64_t packet_time_ms) { + &rtp_sink1_, [this](CopyOnWriteBuffer* buffer, int64_t packet_time_ms) { rtp_sink1_.OnRtcpPacketReceived(buffer, packet_time_ms); }); srtp_transport2_->SubscribeRtcpPacketReceived( - &rtp_sink2_, - [this](rtc::CopyOnWriteBuffer* buffer, int64_t packet_time_ms) { + &rtp_sink2_, [this](CopyOnWriteBuffer* buffer, int64_t packet_time_ms) { rtp_sink2_.OnRtcpPacketReceived(buffer, packet_time_ms); }); @@ -98,14 +101,14 @@ class SrtpTransportTest : public ::testing::Test, public sigslot::has_slots<> { // unprotect would fail. Check accessing the information about the // tag instead, similar to what the actual code would do that relies // on external auth. - void TestRtpAuthParams(SrtpTransport* transport, const std::string& cs) { + void TestRtpAuthParams(SrtpTransport* transport, int crypto_suite) { int overhead; EXPECT_TRUE(transport->GetSrtpOverhead(&overhead)); - switch (rtc::SrtpCryptoSuiteFromName(cs)) { - case rtc::kSrtpAes128CmSha1_32: + switch (crypto_suite) { + case kSrtpAes128CmSha1_32: EXPECT_EQ(32 / 8, overhead); // 32-bit tag. break; - case rtc::kSrtpAes128CmSha1_80: + case kSrtpAes128CmSha1_80: EXPECT_EQ(80 / 8, overhead); // 80-bit tag. break; default: @@ -122,38 +125,36 @@ class SrtpTransportTest : public ::testing::Test, public sigslot::has_slots<> { EXPECT_EQ(overhead, tag_len); } - void TestSendRecvRtpPacket(const std::string& cipher_suite_name) { + void TestSendRecvRtpPacket(int crypto_suite) { size_t rtp_len = sizeof(kPcmuFrame); - size_t packet_size = rtp_len + rtc::rtp_auth_tag_len(cipher_suite_name); - rtc::Buffer rtp_packet_buffer(packet_size); + size_t packet_size = rtp_len + rtp_auth_tag_len(crypto_suite); + Buffer rtp_packet_buffer(packet_size); char* rtp_packet_data = rtp_packet_buffer.data(); memcpy(rtp_packet_data, kPcmuFrame, rtp_len); // In order to be able to run this test function multiple times we can not // use the same sequence number twice. Increase the sequence number by one. - rtc::SetBE16(reinterpret_cast(rtp_packet_data) + 2, - ++sequence_number_); - rtc::CopyOnWriteBuffer rtp_packet1to2(rtp_packet_data, rtp_len, - packet_size); - rtc::CopyOnWriteBuffer rtp_packet2to1(rtp_packet_data, rtp_len, - packet_size); + SetBE16(reinterpret_cast(rtp_packet_data) + 2, + ++sequence_number_); + CopyOnWriteBuffer rtp_packet1to2(rtp_packet_data, rtp_len, packet_size); + CopyOnWriteBuffer rtp_packet2to1(rtp_packet_data, rtp_len, packet_size); char original_rtp_data[sizeof(kPcmuFrame)]; memcpy(original_rtp_data, rtp_packet_data, rtp_len); - rtc::PacketOptions options; + AsyncSocketPacketOptions options; // Send a packet from `srtp_transport1_` to `srtp_transport2_` and verify // that the packet can be successfully received and decrypted. ASSERT_TRUE(srtp_transport1_->SendRtpPacket(&rtp_packet1to2, options, - cricket::PF_SRTP_BYPASS)); + PF_SRTP_BYPASS)); if (srtp_transport1_->IsExternalAuthActive()) { - TestRtpAuthParams(srtp_transport1_.get(), cipher_suite_name); + TestRtpAuthParams(srtp_transport1_.get(), crypto_suite); } else { ASSERT_TRUE(rtp_sink2_.last_recv_rtp_packet().data()); EXPECT_EQ(0, memcmp(rtp_sink2_.last_recv_rtp_packet().data(), original_rtp_data, rtp_len)); // Get the encrypted packet from underneath packet transport and verify // the data is actually encrypted. - auto fake_rtp_packet_transport = static_cast( + auto fake_rtp_packet_transport = static_cast( srtp_transport1_->rtp_packet_transport()); EXPECT_NE(0, memcmp(fake_rtp_packet_transport->last_sent_packet()->data(), original_rtp_data, rtp_len)); @@ -161,126 +162,117 @@ class SrtpTransportTest : public ::testing::Test, public sigslot::has_slots<> { // Do the same thing in the opposite direction; ASSERT_TRUE(srtp_transport2_->SendRtpPacket(&rtp_packet2to1, options, - cricket::PF_SRTP_BYPASS)); + PF_SRTP_BYPASS)); if (srtp_transport2_->IsExternalAuthActive()) { - TestRtpAuthParams(srtp_transport2_.get(), cipher_suite_name); + TestRtpAuthParams(srtp_transport2_.get(), crypto_suite); } else { ASSERT_TRUE(rtp_sink1_.last_recv_rtp_packet().data()); EXPECT_EQ(0, memcmp(rtp_sink1_.last_recv_rtp_packet().data(), original_rtp_data, rtp_len)); - auto fake_rtp_packet_transport = static_cast( + auto fake_rtp_packet_transport = static_cast( srtp_transport2_->rtp_packet_transport()); EXPECT_NE(0, memcmp(fake_rtp_packet_transport->last_sent_packet()->data(), original_rtp_data, rtp_len)); } } - void TestSendRecvRtcpPacket(const std::string& cipher_suite_name) { + void TestSendRecvRtcpPacket(int crypto_suite) { size_t rtcp_len = sizeof(::kRtcpReport); - size_t packet_size = - rtcp_len + 4 + rtc::rtcp_auth_tag_len(cipher_suite_name); - rtc::Buffer rtcp_packet_buffer(packet_size); + size_t packet_size = rtcp_len + 4 + rtcp_auth_tag_len(crypto_suite); + Buffer rtcp_packet_buffer(packet_size); char* rtcp_packet_data = rtcp_packet_buffer.data(); memcpy(rtcp_packet_data, ::kRtcpReport, rtcp_len); - rtc::CopyOnWriteBuffer rtcp_packet1to2(rtcp_packet_data, rtcp_len, - packet_size); - rtc::CopyOnWriteBuffer rtcp_packet2to1(rtcp_packet_data, rtcp_len, - packet_size); + CopyOnWriteBuffer rtcp_packet1to2(rtcp_packet_data, rtcp_len, packet_size); + CopyOnWriteBuffer rtcp_packet2to1(rtcp_packet_data, rtcp_len, packet_size); - rtc::PacketOptions options; + AsyncSocketPacketOptions options; // Send a packet from `srtp_transport1_` to `srtp_transport2_` and verify // that the packet can be successfully received and decrypted. ASSERT_TRUE(srtp_transport1_->SendRtcpPacket(&rtcp_packet1to2, options, - cricket::PF_SRTP_BYPASS)); + PF_SRTP_BYPASS)); ASSERT_TRUE(rtp_sink2_.last_recv_rtcp_packet().data()); EXPECT_EQ(0, memcmp(rtp_sink2_.last_recv_rtcp_packet().data(), rtcp_packet_data, rtcp_len)); // Get the encrypted packet from underneath packet transport and verify the // data is actually encrypted. - auto fake_rtp_packet_transport = static_cast( + auto fake_rtp_packet_transport = static_cast( srtp_transport1_->rtp_packet_transport()); EXPECT_NE(0, memcmp(fake_rtp_packet_transport->last_sent_packet()->data(), rtcp_packet_data, rtcp_len)); // Do the same thing in the opposite direction; ASSERT_TRUE(srtp_transport2_->SendRtcpPacket(&rtcp_packet2to1, options, - cricket::PF_SRTP_BYPASS)); + PF_SRTP_BYPASS)); ASSERT_TRUE(rtp_sink1_.last_recv_rtcp_packet().data()); EXPECT_EQ(0, memcmp(rtp_sink1_.last_recv_rtcp_packet().data(), rtcp_packet_data, rtcp_len)); - fake_rtp_packet_transport = static_cast( + fake_rtp_packet_transport = static_cast( srtp_transport2_->rtp_packet_transport()); EXPECT_NE(0, memcmp(fake_rtp_packet_transport->last_sent_packet()->data(), rtcp_packet_data, rtcp_len)); } void TestSendRecvPacket(bool enable_external_auth, - int cs, - const uint8_t* key1, - int key1_len, - const uint8_t* key2, - int key2_len, - const std::string& cipher_suite_name) { - EXPECT_EQ(key1_len, key2_len); - EXPECT_EQ(cipher_suite_name, rtc::SrtpCryptoSuiteToName(cs)); + int crypto_suite, + const ZeroOnFreeBuffer& key1, + const ZeroOnFreeBuffer& key2) { + EXPECT_EQ(key1.size(), key2.size()); if (enable_external_auth) { srtp_transport1_->EnableExternalAuth(); srtp_transport2_->EnableExternalAuth(); } std::vector extension_ids; EXPECT_TRUE(srtp_transport1_->SetRtpParams( - cs, key1, key1_len, extension_ids, cs, key2, key2_len, extension_ids)); + crypto_suite, key1, extension_ids, crypto_suite, key2, extension_ids)); EXPECT_TRUE(srtp_transport2_->SetRtpParams( - cs, key2, key2_len, extension_ids, cs, key1, key1_len, extension_ids)); + crypto_suite, key2, extension_ids, crypto_suite, key1, extension_ids)); EXPECT_TRUE(srtp_transport1_->SetRtcpParams( - cs, key1, key1_len, extension_ids, cs, key2, key2_len, extension_ids)); + crypto_suite, key1, extension_ids, crypto_suite, key2, extension_ids)); EXPECT_TRUE(srtp_transport2_->SetRtcpParams( - cs, key2, key2_len, extension_ids, cs, key1, key1_len, extension_ids)); + crypto_suite, key2, extension_ids, crypto_suite, key1, extension_ids)); EXPECT_TRUE(srtp_transport1_->IsSrtpActive()); EXPECT_TRUE(srtp_transport2_->IsSrtpActive()); - if (rtc::IsGcmCryptoSuite(cs)) { + if (IsGcmCryptoSuite(crypto_suite)) { EXPECT_FALSE(srtp_transport1_->IsExternalAuthActive()); EXPECT_FALSE(srtp_transport2_->IsExternalAuthActive()); } else if (enable_external_auth) { EXPECT_TRUE(srtp_transport1_->IsExternalAuthActive()); EXPECT_TRUE(srtp_transport2_->IsExternalAuthActive()); } - TestSendRecvRtpPacket(cipher_suite_name); - TestSendRecvRtcpPacket(cipher_suite_name); + TestSendRecvRtpPacket(crypto_suite); + TestSendRecvRtcpPacket(crypto_suite); } void TestSendRecvPacketWithEncryptedHeaderExtension( - const std::string& cs, + int crypto_suite, const std::vector& encrypted_header_ids) { size_t rtp_len = sizeof(kPcmuFrameWithExtensions); - size_t packet_size = rtp_len + rtc::rtp_auth_tag_len(cs); - rtc::Buffer rtp_packet_buffer(packet_size); + size_t packet_size = rtp_len + rtp_auth_tag_len(crypto_suite); + Buffer rtp_packet_buffer(packet_size); char* rtp_packet_data = rtp_packet_buffer.data(); memcpy(rtp_packet_data, kPcmuFrameWithExtensions, rtp_len); // In order to be able to run this test function multiple times we can not // use the same sequence number twice. Increase the sequence number by one. - rtc::SetBE16(reinterpret_cast(rtp_packet_data) + 2, - ++sequence_number_); - rtc::CopyOnWriteBuffer rtp_packet1to2(rtp_packet_data, rtp_len, - packet_size); - rtc::CopyOnWriteBuffer rtp_packet2to1(rtp_packet_data, rtp_len, - packet_size); + SetBE16(reinterpret_cast(rtp_packet_data) + 2, + ++sequence_number_); + CopyOnWriteBuffer rtp_packet1to2(rtp_packet_data, rtp_len, packet_size); + CopyOnWriteBuffer rtp_packet2to1(rtp_packet_data, rtp_len, packet_size); char original_rtp_data[sizeof(kPcmuFrameWithExtensions)]; memcpy(original_rtp_data, rtp_packet_data, rtp_len); - rtc::PacketOptions options; + AsyncSocketPacketOptions options; // Send a packet from `srtp_transport1_` to `srtp_transport2_` and verify // that the packet can be successfully received and decrypted. ASSERT_TRUE(srtp_transport1_->SendRtpPacket(&rtp_packet1to2, options, - cricket::PF_SRTP_BYPASS)); + PF_SRTP_BYPASS)); ASSERT_TRUE(rtp_sink2_.last_recv_rtp_packet().data()); EXPECT_EQ(0, memcmp(rtp_sink2_.last_recv_rtp_packet().data(), original_rtp_data, rtp_len)); // Get the encrypted packet from underneath packet transport and verify the // data and header extension are actually encrypted. - auto fake_rtp_packet_transport = static_cast( + auto fake_rtp_packet_transport = static_cast( srtp_transport1_->rtp_packet_transport()); EXPECT_NE(0, memcmp(fake_rtp_packet_transport->last_sent_packet()->data(), original_rtp_data, rtp_len)); @@ -292,11 +284,11 @@ class SrtpTransportTest : public ::testing::Test, public sigslot::has_slots<> { // Do the same thing in the opposite direction; ASSERT_TRUE(srtp_transport2_->SendRtpPacket(&rtp_packet2to1, options, - cricket::PF_SRTP_BYPASS)); + PF_SRTP_BYPASS)); ASSERT_TRUE(rtp_sink1_.last_recv_rtp_packet().data()); EXPECT_EQ(0, memcmp(rtp_sink1_.last_recv_rtp_packet().data(), original_rtp_data, rtp_len)); - fake_rtp_packet_transport = static_cast( + fake_rtp_packet_transport = static_cast( srtp_transport2_->rtp_packet_transport()); EXPECT_NE(0, memcmp(fake_rtp_packet_transport->last_sent_packet()->data(), original_rtp_data, rtp_len)); @@ -307,42 +299,40 @@ class SrtpTransportTest : public ::testing::Test, public sigslot::has_slots<> { original_rtp_data, rtp_len, encrypted_header_ids, false); } - void TestSendRecvEncryptedHeaderExtension(int cs, - const uint8_t* key1, - int key1_len, - const uint8_t* key2, - int key2_len, - const std::string& cs_name) { + void TestSendRecvEncryptedHeaderExtension( + int crypto_suite, + const ZeroOnFreeBuffer& key1, + const ZeroOnFreeBuffer& key2) { std::vector encrypted_headers; encrypted_headers.push_back(kHeaderExtensionIDs[0]); // Don't encrypt header ids 2 and 3. encrypted_headers.push_back(kHeaderExtensionIDs[1]); - EXPECT_EQ(key1_len, key2_len); - EXPECT_EQ(cs_name, rtc::SrtpCryptoSuiteToName(cs)); - EXPECT_TRUE(srtp_transport1_->SetRtpParams(cs, key1, key1_len, - encrypted_headers, cs, key2, - key2_len, encrypted_headers)); - EXPECT_TRUE(srtp_transport2_->SetRtpParams(cs, key2, key2_len, - encrypted_headers, cs, key1, - key1_len, encrypted_headers)); + EXPECT_EQ(key1.size(), key2.size()); + EXPECT_TRUE(srtp_transport1_->SetRtpParams(crypto_suite, key1, + encrypted_headers, crypto_suite, + key2, encrypted_headers)); + EXPECT_TRUE(srtp_transport2_->SetRtpParams(crypto_suite, key2, + encrypted_headers, crypto_suite, + key1, encrypted_headers)); EXPECT_TRUE(srtp_transport1_->IsSrtpActive()); EXPECT_TRUE(srtp_transport2_->IsSrtpActive()); EXPECT_FALSE(srtp_transport1_->IsExternalAuthActive()); EXPECT_FALSE(srtp_transport2_->IsExternalAuthActive()); - TestSendRecvPacketWithEncryptedHeaderExtension(cs_name, encrypted_headers); + TestSendRecvPacketWithEncryptedHeaderExtension(crypto_suite, + encrypted_headers); } std::unique_ptr srtp_transport1_; std::unique_ptr srtp_transport2_; - std::unique_ptr rtp_packet_transport1_; - std::unique_ptr rtp_packet_transport2_; + std::unique_ptr rtp_packet_transport1_; + std::unique_ptr rtp_packet_transport2_; TransportObserver rtp_sink1_; TransportObserver rtp_sink2_; int sequence_number_ = 0; - webrtc::test::ScopedKeyValueConfig field_trials_; + test::ScopedKeyValueConfig field_trials_; }; class SrtpTransportTestWithExternalAuth @@ -352,61 +342,53 @@ class SrtpTransportTestWithExternalAuth TEST_P(SrtpTransportTestWithExternalAuth, SendAndRecvPacket_AES_CM_128_HMAC_SHA1_80) { bool enable_external_auth = GetParam(); - TestSendRecvPacket(enable_external_auth, rtc::kSrtpAes128CmSha1_80, kTestKey1, - kTestKeyLen, kTestKey2, kTestKeyLen, - rtc::kCsAesCm128HmacSha1_80); + TestSendRecvPacket(enable_external_auth, kSrtpAes128CmSha1_80, kTestKey1, + kTestKey2); } TEST_F(SrtpTransportTest, SendAndRecvPacketWithHeaderExtension_AES_CM_128_HMAC_SHA1_80) { - TestSendRecvEncryptedHeaderExtension(rtc::kSrtpAes128CmSha1_80, kTestKey1, - kTestKeyLen, kTestKey2, kTestKeyLen, - rtc::kCsAesCm128HmacSha1_80); + TestSendRecvEncryptedHeaderExtension(kSrtpAes128CmSha1_80, kTestKey1, + kTestKey2); } TEST_P(SrtpTransportTestWithExternalAuth, SendAndRecvPacket_AES_CM_128_HMAC_SHA1_32) { bool enable_external_auth = GetParam(); - TestSendRecvPacket(enable_external_auth, rtc::kSrtpAes128CmSha1_32, kTestKey1, - kTestKeyLen, kTestKey2, kTestKeyLen, - rtc::kCsAesCm128HmacSha1_32); + TestSendRecvPacket(enable_external_auth, kSrtpAes128CmSha1_32, kTestKey1, + kTestKey2); } TEST_F(SrtpTransportTest, SendAndRecvPacketWithHeaderExtension_AES_CM_128_HMAC_SHA1_32) { - TestSendRecvEncryptedHeaderExtension(rtc::kSrtpAes128CmSha1_32, kTestKey1, - kTestKeyLen, kTestKey2, kTestKeyLen, - rtc::kCsAesCm128HmacSha1_32); + TestSendRecvEncryptedHeaderExtension(kSrtpAes128CmSha1_32, kTestKey1, + kTestKey2); } TEST_P(SrtpTransportTestWithExternalAuth, SendAndRecvPacket_kSrtpAeadAes128Gcm) { bool enable_external_auth = GetParam(); - TestSendRecvPacket(enable_external_auth, rtc::kSrtpAeadAes128Gcm, - kTestKeyGcm128_1, kTestKeyGcm128Len, kTestKeyGcm128_2, - kTestKeyGcm128Len, rtc::kCsAeadAes128Gcm); + TestSendRecvPacket(enable_external_auth, kSrtpAeadAes128Gcm, kTestKeyGcm128_1, + kTestKeyGcm128_2); } TEST_F(SrtpTransportTest, SendAndRecvPacketWithHeaderExtension_kSrtpAeadAes128Gcm) { - TestSendRecvEncryptedHeaderExtension( - rtc::kSrtpAeadAes128Gcm, kTestKeyGcm128_1, kTestKeyGcm128Len, - kTestKeyGcm128_2, kTestKeyGcm128Len, rtc::kCsAeadAes128Gcm); + TestSendRecvEncryptedHeaderExtension(kSrtpAeadAes128Gcm, kTestKeyGcm128_1, + kTestKeyGcm128_2); } TEST_P(SrtpTransportTestWithExternalAuth, SendAndRecvPacket_kSrtpAeadAes256Gcm) { bool enable_external_auth = GetParam(); - TestSendRecvPacket(enable_external_auth, rtc::kSrtpAeadAes256Gcm, - kTestKeyGcm256_1, kTestKeyGcm256Len, kTestKeyGcm256_2, - kTestKeyGcm256Len, rtc::kCsAeadAes256Gcm); + TestSendRecvPacket(enable_external_auth, kSrtpAeadAes256Gcm, kTestKeyGcm256_1, + kTestKeyGcm256_2); } TEST_F(SrtpTransportTest, SendAndRecvPacketWithHeaderExtension_kSrtpAeadAes256Gcm) { - TestSendRecvEncryptedHeaderExtension( - rtc::kSrtpAeadAes256Gcm, kTestKeyGcm256_1, kTestKeyGcm256Len, - kTestKeyGcm256_2, kTestKeyGcm256Len, rtc::kCsAeadAes256Gcm); + TestSendRecvEncryptedHeaderExtension(kSrtpAeadAes256Gcm, kTestKeyGcm256_1, + kTestKeyGcm256_2); } // Run all tests both with and without external auth enabled. @@ -418,11 +400,78 @@ INSTANTIATE_TEST_SUITE_P(ExternalAuth, TEST_F(SrtpTransportTest, TestSetParamsKeyTooShort) { std::vector extension_ids; EXPECT_FALSE(srtp_transport1_->SetRtpParams( - rtc::kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen - 1, extension_ids, - rtc::kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen - 1, extension_ids)); + kSrtpAes128CmSha1_80, + ZeroOnFreeBuffer(kTestKey1.data(), kTestKey1.size() - 1), + extension_ids, kSrtpAes128CmSha1_80, + ZeroOnFreeBuffer(kTestKey1.data(), kTestKey1.size() - 1), + extension_ids)); EXPECT_FALSE(srtp_transport1_->SetRtcpParams( - rtc::kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen - 1, extension_ids, - rtc::kSrtpAes128CmSha1_80, kTestKey1, kTestKeyLen - 1, extension_ids)); + kSrtpAes128CmSha1_80, + ZeroOnFreeBuffer(kTestKey1.data(), kTestKey1.size() - 1), + extension_ids, kSrtpAes128CmSha1_80, + ZeroOnFreeBuffer(kTestKey1.data(), kTestKey1.size() - 1), + extension_ids)); +} + +TEST_F(SrtpTransportTest, RemoveSrtpReceiveStream) { + test::ScopedKeyValueConfig field_trials( + "WebRTC-SrtpRemoveReceiveStream/Enabled/"); + auto srtp_transport = + std::make_unique(/*rtcp_mux_enabled=*/true, field_trials); + auto rtp_packet_transport = + std::make_unique("fake_packet_transport_loopback"); + + bool asymmetric = false; + rtp_packet_transport->SetDestination(rtp_packet_transport.get(), asymmetric); + srtp_transport->SetRtpPacketTransport(rtp_packet_transport.get()); + + TransportObserver rtp_sink; + + std::vector extension_ids; + EXPECT_TRUE(srtp_transport->SetRtpParams(kSrtpAeadAes128Gcm, kTestKeyGcm128_1, + extension_ids, kSrtpAeadAes128Gcm, + kTestKeyGcm128_1, extension_ids)); + + RtpDemuxerCriteria demuxer_criteria; + uint32_t ssrc = 0x1; // SSRC of kPcmuFrame + demuxer_criteria.ssrcs().insert(ssrc); + EXPECT_TRUE( + srtp_transport->RegisterRtpDemuxerSink(demuxer_criteria, &rtp_sink)); + + // Create a packet and try to send it three times. + size_t rtp_len = sizeof(kPcmuFrame); + size_t packet_size = rtp_len + rtp_auth_tag_len(kSrtpAeadAes128Gcm); + Buffer rtp_packet_buffer(packet_size); + char* rtp_packet_data = rtp_packet_buffer.data(); + memcpy(rtp_packet_data, kPcmuFrame, rtp_len); + + // First attempt will succeed. + CopyOnWriteBuffer first_try(rtp_packet_data, rtp_len, packet_size); + EXPECT_TRUE(srtp_transport->SendRtpPacket( + &first_try, AsyncSocketPacketOptions(), PF_SRTP_BYPASS)); + EXPECT_EQ(rtp_sink.rtp_count(), 1); + + // Second attempt will be rejected by libSRTP as a replay attack + // (srtp_err_status_replay_fail) since the sequence number was already seen. + // Hence the packet never reaches the sink. + CopyOnWriteBuffer second_try(rtp_packet_data, rtp_len, packet_size); + EXPECT_TRUE(srtp_transport->SendRtpPacket( + &second_try, AsyncSocketPacketOptions(), PF_SRTP_BYPASS)); + EXPECT_EQ(rtp_sink.rtp_count(), 1); + + // Reset the sink. + EXPECT_TRUE(srtp_transport->UnregisterRtpDemuxerSink(&rtp_sink)); + EXPECT_TRUE( + srtp_transport->RegisterRtpDemuxerSink(demuxer_criteria, &rtp_sink)); + + // Third attempt will succeed again since libSRTP does not remember seeing + // the sequence number after the reset. + CopyOnWriteBuffer third_try(rtp_packet_data, rtp_len, packet_size); + EXPECT_TRUE(srtp_transport->SendRtpPacket( + &third_try, AsyncSocketPacketOptions(), PF_SRTP_BYPASS)); + EXPECT_EQ(rtp_sink.rtp_count(), 2); + // Clear the sink to clean up. + srtp_transport->UnregisterRtpDemuxerSink(&rtp_sink); } } // namespace webrtc diff --git a/pc/stream_collection.h b/pc/stream_collection.h index f0f3f07b4b..d417b77e91 100644 --- a/pc/stream_collection.h +++ b/pc/stream_collection.h @@ -22,13 +22,12 @@ namespace webrtc { // Implementation of StreamCollection. class StreamCollection : public StreamCollectionInterface { public: - static rtc::scoped_refptr Create() { - return rtc::make_ref_counted(); + static scoped_refptr Create() { + return make_ref_counted(); } - static rtc::scoped_refptr Create( - StreamCollection* streams) { - return rtc::make_ref_counted(streams); + static scoped_refptr Create(StreamCollection* streams) { + return make_ref_counted(streams); } virtual size_t count() { return media_streams_.size(); } @@ -69,7 +68,7 @@ class StreamCollection : public StreamCollectionInterface { return NULL; } - void AddStream(rtc::scoped_refptr stream) { + void AddStream(scoped_refptr stream) { for (StreamVector::iterator it = media_streams_.begin(); it != media_streams_.end(); ++it) { if ((*it)->id().compare(stream->id()) == 0) @@ -92,7 +91,7 @@ class StreamCollection : public StreamCollectionInterface { StreamCollection() {} explicit StreamCollection(StreamCollection* original) : media_streams_(original->media_streams_) {} - typedef std::vector > StreamVector; + typedef std::vector > StreamVector; StreamVector media_streams_; }; diff --git a/pc/test/android_test_initializer.cc b/pc/test/android_test_initializer.cc index 963544cb4b..db180acce2 100644 --- a/pc/test/android_test_initializer.cc +++ b/pc/test/android_test_initializer.cc @@ -37,9 +37,9 @@ void EnsureInitializedOnce() { JavaVM* jvm = NULL; RTC_CHECK_EQ(0, jni->GetJavaVM(&jvm)); - RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()"; + RTC_CHECK(webrtc::InitializeSSL()) << "Failed to InitializeSSL()"; - webrtc::JVM::Initialize(jvm); + JVM::Initialize(jvm); } } // anonymous namespace diff --git a/pc/test/enable_fake_media.cc b/pc/test/enable_fake_media.cc new file mode 100644 index 0000000000..3102e5faa1 --- /dev/null +++ b/pc/test/enable_fake_media.cc @@ -0,0 +1,61 @@ +/* + * Copyright 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/test/enable_fake_media.h" + +#include +#include + +#include "absl/base/nullability.h" +#include "api/environment/environment.h" +#include "api/peer_connection_interface.h" +#include "call/call.h" +#include "call/call_config.h" +#include "media/base/fake_media_engine.h" +#include "media/base/media_engine.h" +#include "pc/media_factory.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +void EnableFakeMedia( + PeerConnectionFactoryDependencies& deps, + absl_nonnull std::unique_ptr fake_media_engine) { + class FakeMediaFactory : public MediaFactory { + public: + explicit FakeMediaFactory( + absl_nonnull std::unique_ptr fake) + : fake_(std::move(fake)) {} + + std::unique_ptr CreateCall(CallConfig config) override { + return Call::Create(std::move(config)); + } + + std::unique_ptr CreateMediaEngine( + const Environment& /*env*/, + PeerConnectionFactoryDependencies& /*dependencies*/) { + RTC_CHECK(fake_ != nullptr) + << "CreateMediaEngine can be called at most once."; + return std::move(fake_); + } + + private: + absl_nullable std::unique_ptr fake_; + }; + + deps.media_factory = + std::make_unique(std::move(fake_media_engine)); +} + +void EnableFakeMedia(PeerConnectionFactoryDependencies& deps) { + EnableFakeMedia(deps, std::make_unique()); +} + +} // namespace webrtc diff --git a/pc/test/enable_fake_media.h b/pc/test/enable_fake_media.h new file mode 100644 index 0000000000..d8b5bfdc71 --- /dev/null +++ b/pc/test/enable_fake_media.h @@ -0,0 +1,38 @@ +/* + * Copyright 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Enables fake media support for PeerConnnectionFactory created from `deps` for +// testing purposes. Such fake media support ignores media dependencies in the +// `PeerConnectionFactoryDependencies`. Allows to test PeerConnection and +// PeerConnectionFactory in the presence of the media, but doesn't test media +// support itself. + +#ifndef PC_TEST_ENABLE_FAKE_MEDIA_H_ +#define PC_TEST_ENABLE_FAKE_MEDIA_H_ + +#include + +#include "absl/base/nullability.h" +#include "api/peer_connection_interface.h" +#include "media/base/fake_media_engine.h" + +namespace webrtc { + +// Enables media support backed by the 'fake_media_engine'. +void EnableFakeMedia( + PeerConnectionFactoryDependencies& deps, + absl_nonnull std::unique_ptr fake_media_engine); + +// Enables media support backed by unspecified lightweight fake implementation. +void EnableFakeMedia(PeerConnectionFactoryDependencies& deps); + +} // namespace webrtc + +#endif // PC_TEST_ENABLE_FAKE_MEDIA_H_ diff --git a/pc/test/fake_audio_capture_module.cc b/pc/test/fake_audio_capture_module.cc index 6ffa18c886..b9e429471b 100644 --- a/pc/test/fake_audio_capture_module.cc +++ b/pc/test/fake_audio_capture_module.cc @@ -12,9 +12,15 @@ #include +#include + +#include "api/audio/audio_device_defines.h" #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/units/time_delta.h" #include "rtc_base/checks.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" @@ -52,8 +58,8 @@ FakeAudioCaptureModule::~FakeAudioCaptureModule() { } } -rtc::scoped_refptr FakeAudioCaptureModule::Create() { - auto capture_module = rtc::make_ref_counted(); +webrtc::scoped_refptr FakeAudioCaptureModule::Create() { + auto capture_module = webrtc::make_ref_counted(); if (!capture_module->Initialize()) { return nullptr; } @@ -421,7 +427,7 @@ bool FakeAudioCaptureModule::ShouldStartProcessing() { void FakeAudioCaptureModule::UpdateProcessing(bool start) { if (start) { if (!process_thread_) { - process_thread_ = rtc::Thread::Create(); + process_thread_ = webrtc::Thread::Create(); process_thread_->Start(); } process_thread_->PostTask([this] { StartProcessP(); }); @@ -453,7 +459,7 @@ void FakeAudioCaptureModule::ProcessFrameP() { { webrtc::MutexLock lock(&mutex_); if (!started_) { - next_frame_time_ = rtc::TimeMillis(); + next_frame_time_ = webrtc::TimeMillis(); started_ = true; } @@ -467,7 +473,7 @@ void FakeAudioCaptureModule::ProcessFrameP() { } next_frame_time_ += kTimePerFrameMs; - const int64_t current_time = rtc::TimeMillis(); + const int64_t current_time = webrtc::TimeMillis(); const int64_t wait_time = (next_frame_time_ > current_time) ? next_frame_time_ - current_time : 0; process_thread_->PostDelayedTask([this] { ProcessFrameP(); }, diff --git a/pc/test/fake_audio_capture_module.h b/pc/test/fake_audio_capture_module.h index c04373cdfd..b5b1574818 100644 --- a/pc/test/fake_audio_capture_module.h +++ b/pc/test/fake_audio_capture_module.h @@ -24,18 +24,16 @@ #include #include +#include +#include "api/audio/audio_device.h" +#include "api/audio/audio_device_defines.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" -namespace rtc { -class Thread; -} // namespace rtc - class FakeAudioCaptureModule : public webrtc::AudioDeviceModule { public: typedef uint16_t Sample; @@ -46,7 +44,7 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule { static const size_t kNumberBytesPerSample = sizeof(Sample); // Creates a FakeAudioCaptureModule or returns NULL on failure. - static rtc::scoped_refptr Create(); + static webrtc::scoped_refptr Create(); // Returns the number of frames that have been successfully pulled by the // instance. Note that correctly detecting success can only be done if the @@ -137,7 +135,7 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule { int32_t GetPlayoutUnderrunCount() const override { return -1; } - absl::optional GetStats() const override { + std::optional GetStats() const override { return webrtc::AudioDeviceModule::Stats(); } #if defined(WEBRTC_IOS) @@ -214,7 +212,7 @@ class FakeAudioCaptureModule : public webrtc::AudioDeviceModule { bool started_ RTC_GUARDED_BY(mutex_); int64_t next_frame_time_ RTC_GUARDED_BY(process_thread_checker_); - std::unique_ptr process_thread_; + std::unique_ptr process_thread_; // Buffer for storing samples received from the webrtc::AudioTransport. char rec_buffer_[kNumberSamples * kNumberBytesPerSample]; diff --git a/pc/test/fake_audio_capture_module_unittest.cc b/pc/test/fake_audio_capture_module_unittest.cc index 64141b13a9..8a81e88bbc 100644 --- a/pc/test/fake_audio_capture_module_unittest.cc +++ b/pc/test/fake_audio_capture_module_unittest.cc @@ -13,11 +13,17 @@ #include #include +#include +#include "api/audio/audio_device_defines.h" #include "api/scoped_refptr.h" -#include "rtc_base/gunit.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport { protected: @@ -99,7 +105,7 @@ class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport { return pull_iterations_; } - rtc::scoped_refptr fake_audio_capture_module_; + webrtc::scoped_refptr fake_audio_capture_module_; private: bool RecordedDataReceived() const { return rec_buffer_bytes_ != 0; } @@ -115,7 +121,7 @@ class FakeAdmTest : public ::testing::Test, public webrtc::AudioTransport { return min_buffer_size; } - rtc::AutoThread main_thread_; + webrtc::AutoThread main_thread_; mutable webrtc::Mutex mutex_; @@ -151,7 +157,10 @@ TEST_F(FakeAdmTest, PlayoutTest) { EXPECT_EQ(0, fake_audio_capture_module_->PlayoutDelay(&delay_ms)); EXPECT_EQ(0, delay_ms); - EXPECT_TRUE_WAIT(pull_iterations() > 0, kMsInSecond); + EXPECT_THAT( + webrtc::WaitUntil([&] { return pull_iterations(); }, ::testing::Gt(0), + {.timeout = webrtc::TimeDelta::Millis(kMsInSecond)}), + webrtc::IsRtcOk()); EXPECT_GE(0, push_iterations()); EXPECT_EQ(0, fake_audio_capture_module_->StopPlayout()); @@ -174,7 +183,10 @@ TEST_F(FakeAdmTest, RecordTest) { EXPECT_EQ(0, fake_audio_capture_module_->StartRecording()); EXPECT_TRUE(fake_audio_capture_module_->Recording()); - EXPECT_TRUE_WAIT(push_iterations() > 0, kMsInSecond); + EXPECT_THAT( + webrtc::WaitUntil([&] { return push_iterations(); }, ::testing::Gt(0), + {.timeout = webrtc::TimeDelta::Millis(kMsInSecond)}), + webrtc::IsRtcOk()); EXPECT_GE(0, pull_iterations()); EXPECT_EQ(0, fake_audio_capture_module_->StopRecording()); @@ -190,8 +202,14 @@ TEST_F(FakeAdmTest, DuplexTest) { EXPECT_EQ(0, fake_audio_capture_module_->InitRecording()); EXPECT_EQ(0, fake_audio_capture_module_->StartRecording()); - EXPECT_TRUE_WAIT(push_iterations() > 0, kMsInSecond); - EXPECT_TRUE_WAIT(pull_iterations() > 0, kMsInSecond); + EXPECT_THAT( + webrtc::WaitUntil([&] { return push_iterations(); }, ::testing::Gt(0), + {.timeout = webrtc::TimeDelta::Millis(kMsInSecond)}), + webrtc::IsRtcOk()); + EXPECT_THAT( + webrtc::WaitUntil([&] { return pull_iterations(); }, ::testing::Gt(0), + {.timeout = webrtc::TimeDelta::Millis(kMsInSecond)}), + webrtc::IsRtcOk()); EXPECT_EQ(0, fake_audio_capture_module_->StopPlayout()); EXPECT_EQ(0, fake_audio_capture_module_->StopRecording()); diff --git a/pc/test/fake_codec_lookup_helper.h b/pc/test/fake_codec_lookup_helper.h new file mode 100644 index 0000000000..1a793e6043 --- /dev/null +++ b/pc/test/fake_codec_lookup_helper.h @@ -0,0 +1,54 @@ +/* + * Copyright 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_TEST_FAKE_CODEC_LOOKUP_HELPER_H_ +#define PC_TEST_FAKE_CODEC_LOOKUP_HELPER_H_ + +#include + +#include "call/payload_type.h" +#include "pc/codec_vendor.h" +#include "pc/connection_context.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +class FakeCodecLookupHelper : public CodecLookupHelper { + public: + explicit FakeCodecLookupHelper(ConnectionContext* context) + : context_(context), + codec_vendor_(std::make_unique<::webrtc::CodecVendor>( + context->media_engine(), + context->use_rtx(), + context->env().field_trials())) {} + webrtc::PayloadTypeSuggester* PayloadTypeSuggester() override { + // Not used in this test. + RTC_CHECK_NOTREACHED(); + return nullptr; + } + + CodecVendor* GetCodecVendor() override { return codec_vendor_.get(); } + // Recreate the codec vendor. + // Used by tests that manipulate the factory's codecs and expect the + // result to show up in the codec vendor's output. + void Reset() { + codec_vendor_ = std::make_unique<::webrtc::CodecVendor>( + context_->media_engine(), context_->use_rtx(), + context_->env().field_trials()); + } + + private: + ConnectionContext* context_; + std::unique_ptr<::webrtc::CodecVendor> codec_vendor_; +}; + +} // namespace webrtc + +#endif // PC_TEST_FAKE_CODEC_LOOKUP_HELPER_H_ diff --git a/pc/test/fake_data_channel_controller.h b/pc/test/fake_data_channel_controller.h index c489a34324..1fa865cdf7 100644 --- a/pc/test/fake_data_channel_controller.h +++ b/pc/test/fake_data_channel_controller.h @@ -11,19 +11,32 @@ #ifndef PC_TEST_FAKE_DATA_CHANNEL_CONTROLLER_H_ #define PC_TEST_FAKE_DATA_CHANNEL_CONTROLLER_H_ +#include #include #include #include +#include "absl/algorithm/container.h" +#include "absl/strings/string_view.h" +#include "api/data_channel_interface.h" +#include "api/priority.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/transport/data_channel_transport_interface.h" #include "pc/sctp_data_channel.h" +#include "pc/sctp_utils.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/weak_ptr.h" class FakeDataChannelController : public webrtc::SctpDataChannelControllerInterface { public: - explicit FakeDataChannelController(rtc::Thread* network_thread) - : signaling_thread_(rtc::Thread::Current()), + explicit FakeDataChannelController(webrtc::Thread* network_thread) + : signaling_thread_(webrtc::Thread::Current()), network_thread_(network_thread), send_blocked_(false), transport_available_(false), @@ -37,30 +50,30 @@ class FakeDataChannelController }); } - rtc::WeakPtr weak_ptr() { + webrtc::WeakPtr weak_ptr() { RTC_DCHECK_RUN_ON(network_thread_); return weak_factory_.GetWeakPtr(); } - rtc::scoped_refptr CreateDataChannel( + webrtc::scoped_refptr CreateDataChannel( absl::string_view label, webrtc::InternalDataChannelInit init) { - rtc::scoped_refptr channel = + webrtc::scoped_refptr channel = network_thread_->BlockingCall([&]() { RTC_DCHECK_RUN_ON(network_thread_); - rtc::WeakPtr my_weak_ptr = weak_ptr(); + webrtc::WeakPtr my_weak_ptr = weak_ptr(); // Explicitly associate the weak ptr instance with the current thread // to catch early any inappropriate referencing of it on the network // thread. RTC_CHECK(my_weak_ptr); - rtc::scoped_refptr channel = + webrtc::scoped_refptr channel = webrtc::SctpDataChannel::Create( std::move(my_weak_ptr), std::string(label), transport_available_, init, signaling_thread_, network_thread_); - if (transport_available_ && channel->sid_n().HasValue()) { - AddSctpDataStream(channel->sid_n()); + if (transport_available_ && channel->sid_n().has_value()) { + AddSctpDataStream(*channel->sid_n(), channel->priority()); } if (ready_to_send_) { network_thread_->PostTask([channel = channel] { @@ -78,7 +91,7 @@ class FakeDataChannelController webrtc::RTCError SendData(webrtc::StreamId sid, const webrtc::SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload) override { + const webrtc::CopyOnWriteBuffer& payload) override { RTC_DCHECK_RUN_ON(network_thread_); RTC_CHECK(ready_to_send_); RTC_CHECK(transport_available_); @@ -95,9 +108,9 @@ class FakeDataChannelController return webrtc::RTCError::OK(); } - void AddSctpDataStream(webrtc::StreamId sid) override { + void AddSctpDataStream(webrtc::StreamId sid, + webrtc::PriorityValue priority) override { RTC_DCHECK_RUN_ON(network_thread_); - RTC_CHECK(sid.HasValue()); if (!transport_available_) { return; } @@ -106,7 +119,6 @@ class FakeDataChannelController void RemoveSctpDataStream(webrtc::StreamId sid) override { RTC_DCHECK_RUN_ON(network_thread_); - RTC_CHECK(sid.HasValue()); known_stream_ids_.erase(sid); // Unlike the real SCTP transport, act like the closing procedure finished // instantly. @@ -130,6 +142,13 @@ class FakeDataChannelController } } + size_t buffered_amount(webrtc::StreamId sid) const override { return 0; } + size_t buffered_amount_low_threshold(webrtc::StreamId sid) const override { + return 0; + } + void SetBufferedAmountLowThreshold(webrtc::StreamId sid, + size_t bytes) override {} + // Set true to emulate the SCTP stream being blocked by congestion control. void set_send_blocked(bool blocked) { network_thread_->BlockingCall([&]() { @@ -219,8 +238,8 @@ class FakeDataChannelController } private: - rtc::Thread* const signaling_thread_; - rtc::Thread* const network_thread_; + webrtc::Thread* const signaling_thread_; + webrtc::Thread* const network_thread_; webrtc::StreamId last_sid_ RTC_GUARDED_BY(network_thread_); webrtc::SendDataParams last_send_data_params_ RTC_GUARDED_BY(network_thread_); bool send_blocked_ RTC_GUARDED_BY(network_thread_); @@ -232,7 +251,7 @@ class FakeDataChannelController std::set connected_channels_ RTC_GUARDED_BY(network_thread_); std::set known_stream_ids_ RTC_GUARDED_BY(network_thread_); - rtc::WeakPtrFactory weak_factory_ + webrtc::WeakPtrFactory weak_factory_ RTC_GUARDED_BY(network_thread_){this}; }; #endif // PC_TEST_FAKE_DATA_CHANNEL_CONTROLLER_H_ diff --git a/pc/test/fake_peer_connection_base.h b/pc/test/fake_peer_connection_base.h index 743c18122a..10e54cab6b 100644 --- a/pc/test/fake_peer_connection_base.h +++ b/pc/test/fake_peer_connection_base.h @@ -11,16 +11,58 @@ #ifndef PC_TEST_FAKE_PEER_CONNECTION_BASE_H_ #define PC_TEST_FAKE_PEER_CONNECTION_BASE_H_ +#include #include #include +#include #include #include +#include #include -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" +#include "api/adaptation/resource.h" +#include "api/audio/audio_device.h" +#include "api/candidate.h" +#include "api/crypto/crypto_options.h" +#include "api/data_channel_event_observer_interface.h" +#include "api/data_channel_interface.h" +#include "api/dtls_transport_interface.h" #include "api/field_trials_view.h" +#include "api/jsep.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtc_event_log_output.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" #include "api/sctp_transport_interface.h" +#include "api/set_remote_description_observer_interface.h" +#include "api/stats/rtc_stats_collector_callback.h" +#include "api/transport/bandwidth_estimation_settings.h" +#include "api/transport/bitrate_settings.h" +#include "api/transport/network_control.h" +#include "call/call.h" +#include "call/payload_type_picker.h" +#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "pc/jsep_transport_controller.h" #include "pc/peer_connection_internal.h" +#include "pc/peer_connection_message_handler.h" +#include "pc/rtp_transceiver.h" +#include "pc/rtp_transmission_manager.h" +#include "pc/session_description.h" +#include "pc/transport_stats.h" +#include "pc/usage_pattern.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/thread.h" #include "test/scoped_key_value_config.h" namespace webrtc { @@ -33,11 +75,11 @@ class FakePeerConnectionBase : public PeerConnectionInternal { public: // PeerConnectionInterface implementation. - rtc::scoped_refptr local_streams() override { + scoped_refptr local_streams() override { return nullptr; } - rtc::scoped_refptr remote_streams() override { + scoped_refptr remote_streams() override { return nullptr; } @@ -45,63 +87,62 @@ class FakePeerConnectionBase : public PeerConnectionInternal { void RemoveStream(MediaStreamInterface* stream) override {} - RTCErrorOr> AddTrack( - rtc::scoped_refptr track, + RTCErrorOr> AddTrack( + scoped_refptr track, const std::vector& stream_ids) override { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); } - RTCErrorOr> AddTrack( - rtc::scoped_refptr track, + RTCErrorOr> AddTrack( + scoped_refptr track, const std::vector& stream_ids, const std::vector& init_send_encodings) override { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); } RTCError RemoveTrackOrError( - rtc::scoped_refptr sender) override { + scoped_refptr sender) override { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); } - RTCErrorOr> AddTransceiver( - rtc::scoped_refptr track) override { + RTCErrorOr> AddTransceiver( + scoped_refptr track) override { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); } - RTCErrorOr> AddTransceiver( - rtc::scoped_refptr track, + RTCErrorOr> AddTransceiver( + scoped_refptr track, const RtpTransceiverInit& init) override { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); } - RTCErrorOr> AddTransceiver( - cricket::MediaType media_type) override { + RTCErrorOr> AddTransceiver( + webrtc::MediaType media_type) override { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); } - RTCErrorOr> AddTransceiver( - cricket::MediaType media_type, + RTCErrorOr> AddTransceiver( + webrtc::MediaType media_type, const RtpTransceiverInit& init) override { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); } - rtc::scoped_refptr CreateSender( + scoped_refptr CreateSender( const std::string& kind, const std::string& stream_id) override { return nullptr; } - std::vector> GetSenders() - const override { + std::vector> GetSenders() const override { return {}; } - std::vector> GetReceivers() + std::vector> GetReceivers() const override { return {}; } - std::vector> GetTransceivers() + std::vector> GetTransceivers() const override { return {}; } @@ -113,20 +154,18 @@ class FakePeerConnectionBase : public PeerConnectionInternal { } void GetStats(RTCStatsCollectorCallback* callback) override {} - void GetStats( - rtc::scoped_refptr selector, - rtc::scoped_refptr callback) override {} - void GetStats( - rtc::scoped_refptr selector, - rtc::scoped_refptr callback) override {} + void GetStats(scoped_refptr selector, + scoped_refptr callback) override {} + void GetStats(scoped_refptr selector, + scoped_refptr callback) override {} void ClearStatsCache() override {} - rtc::scoped_refptr GetSctpTransport() const { + scoped_refptr GetSctpTransport() const { return nullptr; } - RTCErrorOr> CreateDataChannelOrError( + RTCErrorOr> CreateDataChannelOrError( const std::string& label, const DataChannelInit* config) override { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, @@ -174,8 +213,9 @@ class FakePeerConnectionBase : public PeerConnectionInternal { void SetRemoteDescription( std::unique_ptr desc, - rtc::scoped_refptr observer) - override {} + scoped_refptr observer) override {} + + bool ShouldFireNegotiationNeededEvent(uint32_t event_id) { return true; } RTCConfiguration GetConfiguration() override { return RTCConfiguration(); } @@ -188,8 +228,7 @@ class FakePeerConnectionBase : public PeerConnectionInternal { return false; } - bool RemoveIceCandidates( - const std::vector& candidates) override { + bool RemoveIceCandidates(const std::vector& candidates) override { return false; } @@ -197,11 +236,14 @@ class FakePeerConnectionBase : public PeerConnectionInternal { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); } + void ReconfigureBandwidthEstimation( + const BandwidthEstimationSettings& settings) override {} + void SetAudioPlayout(bool playout) override {} void SetAudioRecording(bool recording) override {} - rtc::scoped_refptr LookupDtlsTransportByMid( + scoped_refptr LookupDtlsTransportByMid( const std::string& mid) { return nullptr; } @@ -224,7 +266,9 @@ class FakePeerConnectionBase : public PeerConnectionInternal { return IceGatheringState::kIceGatheringNew; } - absl::optional can_trickle_ice_candidates() { return absl::nullopt; } + std::optional can_trickle_ice_candidates() { return std::nullopt; } + + void AddAdaptationResource(scoped_refptr resource) {} bool StartRtcEventLog(std::unique_ptr output, int64_t output_period_ms) override { @@ -235,52 +279,52 @@ class FakePeerConnectionBase : public PeerConnectionInternal { return false; } + void SetDataChannelEventObserver( + std::unique_ptr observer) override {} + void StopRtcEventLog() override {} void Close() override {} // PeerConnectionInternal implementation. - rtc::Thread* network_thread() const override { return nullptr; } - rtc::Thread* worker_thread() const override { return nullptr; } - rtc::Thread* signaling_thread() const override { return nullptr; } + Thread* network_thread() const override { return nullptr; } + Thread* worker_thread() const override { return nullptr; } + Thread* signaling_thread() const override { return nullptr; } std::string session_id() const override { return ""; } bool initial_offerer() const override { return false; } - std::vector< - rtc::scoped_refptr>> + std::vector>> GetTransceiversInternal() const override { return {}; } - absl::optional sctp_transport_name() const override { - return absl::nullopt; + std::optional sctp_transport_name() const override { + return std::nullopt; } - absl::optional sctp_mid() const override { - return absl::nullopt; - } + std::optional sctp_mid() const override { return std::nullopt; } - std::map GetTransportStatsByNames( + std::map GetTransportStatsByNames( const std::set& transport_names) override { return {}; } Call::Stats GetCallStats() override { return Call::Stats(); } - absl::optional GetAudioDeviceStats() override { - return absl::nullopt; + std::optional GetAudioDeviceStats() override { + return std::nullopt; } bool GetLocalCertificate( const std::string& transport_name, - rtc::scoped_refptr* certificate) override { + scoped_refptr* certificate) override { return false; } - std::unique_ptr GetRemoteSSLCertChain( + std::unique_ptr GetRemoteSSLCertChain( const std::string& transport_name) override { return nullptr; } @@ -293,8 +337,7 @@ class FakePeerConnectionBase : public PeerConnectionInternal { return false; } - bool GetSslRole(const std::string& content_name, - rtc::SSLRole* role) override { + bool GetSslRole(const std::string& content_name, SSLRole* role) override { return false; } const PeerConnectionInterface::RTCConfiguration* configuration() @@ -317,12 +360,10 @@ class FakePeerConnectionBase : public PeerConnectionInternal { JsepTransportController* transport_controller_s() override { return nullptr; } JsepTransportController* transport_controller_n() override { return nullptr; } DataChannelController* data_channel_controller() override { return nullptr; } - cricket::PortAllocator* port_allocator() override { return nullptr; } + PortAllocator* port_allocator() override { return nullptr; } LegacyStatsCollector* legacy_stats() override { return nullptr; } PeerConnectionObserver* Observer() const override { return nullptr; } - absl::optional GetSctpSslRole_n() override { - return absl::nullopt; - } + std::optional GetSctpSslRole_n() override { return std::nullopt; } PeerConnectionInterface::IceConnectionState ice_connection_state_internal() override { return PeerConnectionInterface::IceConnectionState::kIceConnectionNew; @@ -332,47 +373,54 @@ class FakePeerConnectionBase : public PeerConnectionInternal { void NoteUsageEvent(UsageEvent event) override {} bool IsClosed() const override { return false; } bool IsUnifiedPlan() const override { return true; } - bool ValidateBundleSettings( - const cricket::SessionDescription* desc, - const std::map& - bundle_groups_by_mid) override { + bool ValidateBundleSettings(const SessionDescription* desc, + const std::map& + bundle_groups_by_mid) override { return false; } - absl::optional GetDataMid() const override { - return absl::nullopt; - } - RTCErrorOr> AddTransceiver( - cricket::MediaType media_type, - rtc::scoped_refptr track, + RTCErrorOr> AddTransceiver( + webrtc::MediaType media_type, + scoped_refptr track, const RtpTransceiverInit& init, bool fire_callback = true) override { return RTCError(RTCErrorType::INTERNAL_ERROR, ""); } - void StartSctpTransport(int local_port, - int remote_port, - int max_message_size) override {} + RTCError StartSctpTransport(const SctpOptions& options) override { + return RTCError::OK(); + } - void AddRemoteCandidate(const std::string& mid, - const cricket::Candidate& candidate) override {} + void AddRemoteCandidate(absl::string_view mid, + const Candidate& candidate) override {} Call* call_ptr() override { return nullptr; } bool SrtpRequired() const override { return false; } - absl::optional SetupDataChannelTransport_n( - absl::string_view mid) override { - return absl::nullopt; + bool CreateDataChannelTransport(absl::string_view mid) override { + return false; } - void TeardownDataChannelTransport_n(RTCError error) override {} - void SetSctpDataInfo(absl::string_view mid, - absl::string_view transport_name) override {} - void ResetSctpDataInfo() override {} + void DestroyDataChannelTransport(RTCError error) override {} const FieldTrialsView& trials() const override { return field_trials_; } + NetworkControllerInterface* GetNetworkController() override { + return nullptr; + } + + PayloadTypePicker& payload_type_picker() override { + return payload_type_picker_; + } + + CandidateStatsList GetPooledCandidateStats() const override { return {}; } + protected: - webrtc::test::ScopedKeyValueConfig field_trials_; + test::ScopedKeyValueConfig field_trials_; + PayloadTypePicker payload_type_picker_; }; +static_assert( + !std::is_abstract_v>, + ""); + } // namespace webrtc #endif // PC_TEST_FAKE_PEER_CONNECTION_BASE_H_ diff --git a/pc/test/fake_peer_connection_for_stats.h b/pc/test/fake_peer_connection_for_stats.h index 7302182912..809b03efc1 100644 --- a/pc/test/fake_peer_connection_for_stats.h +++ b/pc/test/fake_peer_connection_for_stats.h @@ -13,36 +13,68 @@ #include #include +#include #include #include #include #include +#include "absl/strings/string_view.h" +#include "api/audio/audio_device.h" +#include "api/audio_options.h" +#include "api/crypto/crypto_options.h" +#include "api/environment/environment_factory.h" +#include "api/make_ref_counted.h" +#include "api/media_types.h" +#include "api/peer_connection_interface.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "call/call.h" +#include "call/payload_type_picker.h" #include "media/base/fake_media_engine.h" #include "media/base/media_channel.h" +#include "p2p/base/p2p_constants.h" +#include "p2p/base/port.h" #include "pc/channel.h" +#include "pc/connection_context.h" +#include "pc/data_channel_utils.h" +#include "pc/rtp_receiver.h" +#include "pc/rtp_receiver_proxy.h" +#include "pc/rtp_sender.h" +#include "pc/rtp_sender_proxy.h" +#include "pc/rtp_transceiver.h" +#include "pc/sctp_data_channel.h" #include "pc/stream_collection.h" +#include "pc/test/enable_fake_media.h" +#include "pc/test/fake_codec_lookup_helper.h" #include "pc/test/fake_data_channel_controller.h" #include "pc/test/fake_peer_connection_base.h" +#include "pc/transport_stats.h" +#include "rtc_base/checks.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/thread.h" +#include "rtc_base/unique_id_generator.h" namespace webrtc { // Fake VoiceMediaChannel where the result of GetStats can be configured. -class FakeVoiceMediaSendChannelForStats - : public cricket::FakeVoiceMediaSendChannel { +class FakeVoiceMediaSendChannelForStats : public FakeVoiceMediaSendChannel { public: explicit FakeVoiceMediaSendChannelForStats(TaskQueueBase* network_thread) - : cricket::FakeVoiceMediaSendChannel(cricket::AudioOptions(), - network_thread) {} + : FakeVoiceMediaSendChannel(AudioOptions(), network_thread) {} - void SetStats(const cricket::VoiceMediaInfo& voice_info) { - send_stats_ = cricket::VoiceMediaSendInfo(); + void SetStats(const VoiceMediaInfo& voice_info) { + send_stats_ = VoiceMediaSendInfo(); send_stats_->senders = voice_info.senders; send_stats_->send_codecs = voice_info.send_codecs; } // VoiceMediaChannel overrides. - bool GetStats(cricket::VoiceMediaSendInfo* info) override { + bool GetStats(VoiceMediaSendInfo* info) override { if (send_stats_) { *info = *send_stats_; return true; @@ -51,25 +83,24 @@ class FakeVoiceMediaSendChannelForStats } private: - absl::optional send_stats_; + std::optional send_stats_; }; class FakeVoiceMediaReceiveChannelForStats - : public cricket::FakeVoiceMediaReceiveChannel { + : public FakeVoiceMediaReceiveChannel { public: explicit FakeVoiceMediaReceiveChannelForStats(TaskQueueBase* network_thread) - : cricket::FakeVoiceMediaReceiveChannel(cricket::AudioOptions(), - network_thread) {} + : FakeVoiceMediaReceiveChannel(AudioOptions(), network_thread) {} - void SetStats(const cricket::VoiceMediaInfo& voice_info) { - receive_stats_ = cricket::VoiceMediaReceiveInfo(); + void SetStats(const VoiceMediaInfo& voice_info) { + receive_stats_ = VoiceMediaReceiveInfo(); receive_stats_->receivers = voice_info.receivers; receive_stats_->receive_codecs = voice_info.receive_codecs; receive_stats_->device_underrun_count = voice_info.device_underrun_count; } // VoiceMediaChannel overrides. - bool GetStats(cricket::VoiceMediaReceiveInfo* info, + bool GetStats(VoiceMediaReceiveInfo* info, bool get_and_clear_legacy_stats) override { if (receive_stats_) { *info = *receive_stats_; @@ -79,26 +110,24 @@ class FakeVoiceMediaReceiveChannelForStats } private: - absl::optional receive_stats_; + std::optional receive_stats_; }; // Fake VideoMediaChannel where the result of GetStats can be configured. -class FakeVideoMediaSendChannelForStats - : public cricket::FakeVideoMediaSendChannel { +class FakeVideoMediaSendChannelForStats : public FakeVideoMediaSendChannel { public: explicit FakeVideoMediaSendChannelForStats(TaskQueueBase* network_thread) - : cricket::FakeVideoMediaSendChannel(cricket::VideoOptions(), - network_thread) {} + : FakeVideoMediaSendChannel(VideoOptions(), network_thread) {} - void SetStats(const cricket::VideoMediaInfo& video_info) { - send_stats_ = cricket::VideoMediaSendInfo(); + void SetStats(const VideoMediaInfo& video_info) { + send_stats_ = VideoMediaSendInfo(); send_stats_->senders = video_info.senders; send_stats_->aggregated_senders = video_info.aggregated_senders; send_stats_->send_codecs = video_info.send_codecs; } // VideoMediaChannel overrides. - bool GetStats(cricket::VideoMediaSendInfo* info) override { + bool GetStats(VideoMediaSendInfo* info) override { if (send_stats_) { *info = *send_stats_; return true; @@ -107,24 +136,23 @@ class FakeVideoMediaSendChannelForStats } private: - absl::optional send_stats_; + std::optional send_stats_; }; class FakeVideoMediaReceiveChannelForStats - : public cricket::FakeVideoMediaReceiveChannel { + : public FakeVideoMediaReceiveChannel { public: explicit FakeVideoMediaReceiveChannelForStats(TaskQueueBase* network_thread) - : cricket::FakeVideoMediaReceiveChannel(cricket::VideoOptions(), - network_thread) {} + : FakeVideoMediaReceiveChannel(VideoOptions(), network_thread) {} - void SetStats(const cricket::VideoMediaInfo& video_info) { - receive_stats_ = cricket::VideoMediaReceiveInfo(); + void SetStats(const VideoMediaInfo& video_info) { + receive_stats_ = VideoMediaReceiveInfo(); receive_stats_->receivers = video_info.receivers; receive_stats_->receive_codecs = video_info.receive_codecs; } // VideoMediaChannel overrides. - bool GetStats(cricket::VideoMediaReceiveInfo* info) override { + bool GetStats(VideoMediaReceiveInfo* info) override { if (receive_stats_) { *info = *receive_stats_; return true; @@ -133,25 +161,24 @@ class FakeVideoMediaReceiveChannelForStats } private: - absl::optional receive_stats_; + std::optional receive_stats_; }; constexpr bool kDefaultRtcpMuxRequired = true; constexpr bool kDefaultSrtpRequired = true; -class VoiceChannelForTesting : public cricket::VoiceChannel { +class VoiceChannelForTesting : public VoiceChannel { public: VoiceChannelForTesting( - rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, - std::unique_ptr send_channel, - std::unique_ptr - receive_channel, + Thread* worker_thread, + Thread* network_thread, + Thread* signaling_thread, + std::unique_ptr send_channel, + std::unique_ptr receive_channel, const std::string& content_name, bool srtp_required, - webrtc::CryptoOptions crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator, + CryptoOptions crypto_options, + UniqueRandomIdGenerator* ssrc_generator, std::string transport_name) : VoiceChannel(worker_thread, network_thread, @@ -172,19 +199,18 @@ class VoiceChannelForTesting : public cricket::VoiceChannel { const std::string test_transport_name_; }; -class VideoChannelForTesting : public cricket::VideoChannel { +class VideoChannelForTesting : public VideoChannel { public: VideoChannelForTesting( - rtc::Thread* worker_thread, - rtc::Thread* network_thread, - rtc::Thread* signaling_thread, - std::unique_ptr send_channel, - std::unique_ptr - receive_channel, + Thread* worker_thread, + Thread* network_thread, + Thread* signaling_thread, + std::unique_ptr send_channel, + std::unique_ptr receive_channel, const std::string& content_name, bool srtp_required, - webrtc::CryptoOptions crypto_options, - rtc::UniqueRandomIdGenerator* ssrc_generator, + CryptoOptions crypto_options, + UniqueRandomIdGenerator* ssrc_generator, std::string transport_name) : VideoChannel(worker_thread, network_thread, @@ -214,15 +240,17 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { // TODO(steveanton): Add support for specifying separate threads to test // multi-threading correctness. FakePeerConnectionForStats() - : network_thread_(rtc::Thread::Current()), - worker_thread_(rtc::Thread::Current()), - signaling_thread_(rtc::Thread::Current()), + : network_thread_(Thread::Current()), + worker_thread_(Thread::Current()), + signaling_thread_(Thread::Current()), // TODO(hta): remove separate thread variables and use context. dependencies_(MakeDependencies()), - context_(ConnectionContext::Create(&dependencies_)), + context_( + ConnectionContext::Create(CreateEnvironment(), &dependencies_)), local_streams_(StreamCollection::Create()), remote_streams_(StreamCollection::Create()), - data_channel_controller_(network_thread_) {} + data_channel_controller_(network_thread_), + codec_lookup_helper_(context_.get()) {} ~FakePeerConnectionForStats() { for (auto transceiver : transceivers_) { @@ -232,23 +260,23 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { static PeerConnectionFactoryDependencies MakeDependencies() { PeerConnectionFactoryDependencies dependencies; - dependencies.network_thread = rtc::Thread::Current(); - dependencies.worker_thread = rtc::Thread::Current(); - dependencies.signaling_thread = rtc::Thread::Current(); - dependencies.media_engine = std::make_unique(); + dependencies.network_thread = Thread::Current(); + dependencies.worker_thread = Thread::Current(); + dependencies.signaling_thread = Thread::Current(); + EnableFakeMedia(dependencies); return dependencies; } - rtc::scoped_refptr mutable_local_streams() { + scoped_refptr mutable_local_streams() { return local_streams_; } - rtc::scoped_refptr mutable_remote_streams() { + scoped_refptr mutable_remote_streams() { return remote_streams_; } - rtc::scoped_refptr AddSender( - rtc::scoped_refptr sender) { + scoped_refptr AddSender( + scoped_refptr sender) { // TODO(steveanton): Switch tests to use RtpTransceivers directly. auto sender_proxy = RtpSenderProxyWithInternal::Create( signaling_thread_, sender); @@ -258,14 +286,14 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { return sender_proxy; } - void RemoveSender(rtc::scoped_refptr sender) { + void RemoveSender(scoped_refptr sender) { GetOrCreateFirstTransceiverOfType(sender->media_type()) ->internal() ->RemoveSender(sender.get()); } - rtc::scoped_refptr AddReceiver( - rtc::scoped_refptr receiver) { + scoped_refptr AddReceiver( + scoped_refptr receiver) { // TODO(steveanton): Switch tests to use RtpTransceivers directly. auto receiver_proxy = RtpReceiverProxyWithInternal::Create( @@ -276,7 +304,7 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { return receiver_proxy; } - void RemoveReceiver(rtc::scoped_refptr receiver) { + void RemoveReceiver(scoped_refptr receiver) { GetOrCreateFirstTransceiverOfType(receiver->media_type()) ->internal() ->RemoveReceiver(receiver.get()); @@ -284,10 +312,9 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { std::pair - AddVoiceChannel( - const std::string& mid, - const std::string& transport_name, - cricket::VoiceMediaInfo initial_stats = cricket::VoiceMediaInfo()) { + AddVoiceChannel(const std::string& mid, + const std::string& transport_name, + VoiceMediaInfo initial_stats = VoiceMediaInfo()) { auto voice_media_send_channel = std::make_unique(network_thread_); auto voice_media_receive_channel = @@ -298,14 +325,13 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { worker_thread_, network_thread_, signaling_thread_, std::move(voice_media_send_channel), std::move(voice_media_receive_channel), mid, kDefaultSrtpRequired, - webrtc::CryptoOptions(), context_->ssrc_generator(), transport_name); + CryptoOptions(), context_->ssrc_generator(), transport_name); auto transceiver = - GetOrCreateFirstTransceiverOfType(cricket::MEDIA_TYPE_AUDIO) - ->internal(); + GetOrCreateFirstTransceiverOfType(webrtc::MediaType::AUDIO)->internal(); if (transceiver->channel()) { // This transceiver already has a channel, create a new one. transceiver = - CreateTransceiverOfType(cricket::MEDIA_TYPE_AUDIO)->internal(); + CreateTransceiverOfType(webrtc::MediaType::AUDIO)->internal(); } RTC_DCHECK(!transceiver->channel()); transceiver->SetChannel(std::move(voice_channel), @@ -318,10 +344,9 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { std::pair - AddVideoChannel( - const std::string& mid, - const std::string& transport_name, - cricket::VideoMediaInfo initial_stats = cricket::VideoMediaInfo()) { + AddVideoChannel(const std::string& mid, + const std::string& transport_name, + VideoMediaInfo initial_stats = VideoMediaInfo()) { auto video_media_send_channel = std::make_unique(network_thread_); auto video_media_receive_channel = @@ -332,14 +357,13 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { worker_thread_, network_thread_, signaling_thread_, std::move(video_media_send_channel), std::move(video_media_receive_channel), mid, kDefaultSrtpRequired, - webrtc::CryptoOptions(), context_->ssrc_generator(), transport_name); + CryptoOptions(), context_->ssrc_generator(), transport_name); auto transceiver = - GetOrCreateFirstTransceiverOfType(cricket::MEDIA_TYPE_VIDEO) - ->internal(); + GetOrCreateFirstTransceiverOfType(webrtc::MediaType::VIDEO)->internal(); if (transceiver->channel()) { // This transceiver already has a channel, create a new one. transceiver = - CreateTransceiverOfType(cricket::MEDIA_TYPE_VIDEO)->internal(); + CreateTransceiverOfType(webrtc::MediaType::VIDEO)->internal(); } RTC_DCHECK(!transceiver->channel()); transceiver->SetChannel(std::move(video_channel), @@ -359,24 +383,23 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { // TODO(bugs.webrtc.org/11547): Supply a separate network thread. AddSctpDataChannel(SctpDataChannel::Create( data_channel_controller_.weak_ptr(), label, false, init, - rtc::Thread::Current(), rtc::Thread::Current())); + Thread::Current(), Thread::Current())); } - void AddSctpDataChannel(rtc::scoped_refptr data_channel) { + void AddSctpDataChannel(scoped_refptr data_channel) { sctp_data_channels_.push_back(data_channel); } void SetTransportStats(const std::string& transport_name, - const cricket::TransportChannelStats& channel_stats) { - SetTransportStats( - transport_name, - std::vector{channel_stats}); + const TransportChannelStats& channel_stats) { + SetTransportStats(transport_name, + std::vector{channel_stats}); } void SetTransportStats( const std::string& transport_name, - const std::vector& channel_stats_list) { - cricket::TransportStats transport_stats; + const std::vector& channel_stats_list) { + TransportStats transport_stats; transport_stats.transport_name = transport_name; transport_stats.channel_stats = channel_stats_list; transport_stats_by_name_[transport_name] = transport_stats; @@ -385,34 +408,32 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { void SetCallStats(const Call::Stats& call_stats) { call_stats_ = call_stats; } void SetAudioDeviceStats( - absl::optional audio_device_stats) { + std::optional audio_device_stats) { audio_device_stats_ = audio_device_stats; } - void SetLocalCertificate( - const std::string& transport_name, - rtc::scoped_refptr certificate) { + void SetLocalCertificate(const std::string& transport_name, + scoped_refptr certificate) { local_certificates_by_transport_[transport_name] = certificate; } void SetRemoteCertChain(const std::string& transport_name, - std::unique_ptr chain) { + std::unique_ptr chain) { remote_cert_chains_by_transport_[transport_name] = std::move(chain); } // PeerConnectionInterface overrides. - rtc::scoped_refptr local_streams() override { + scoped_refptr local_streams() override { return local_streams_; } - rtc::scoped_refptr remote_streams() override { + scoped_refptr remote_streams() override { return remote_streams_; } - std::vector> GetSenders() - const override { - std::vector> senders; + std::vector> GetSenders() const override { + std::vector> senders; for (auto transceiver : transceivers_) { for (auto sender : transceiver->internal()->senders()) { senders.push_back(sender); @@ -421,9 +442,9 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { return senders; } - std::vector> GetReceivers() + std::vector> GetReceivers() const override { - std::vector> receivers; + std::vector> receivers; for (auto transceiver : transceivers_) { for (auto receiver : transceiver->internal()->receivers()) { receivers.push_back(receiver); @@ -434,14 +455,13 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { // PeerConnectionInternal overrides. - rtc::Thread* network_thread() const override { return network_thread_; } + Thread* network_thread() const override { return network_thread_; } - rtc::Thread* worker_thread() const override { return worker_thread_; } + Thread* worker_thread() const override { return worker_thread_; } - rtc::Thread* signaling_thread() const override { return signaling_thread_; } + Thread* signaling_thread() const override { return signaling_thread_; } - std::vector< - rtc::scoped_refptr>> + std::vector>> GetTransceiversInternal() const override { return transceivers_; } @@ -454,14 +474,12 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { return stats; } - cricket::CandidateStatsList GetPooledCandidateStats() const override { - return {}; - } + CandidateStatsList GetPooledCandidateStats() const override { return {}; } - std::map GetTransportStatsByNames( + std::map GetTransportStatsByNames( const std::set& transport_names) override { RTC_DCHECK_RUN_ON(network_thread_); - std::map transport_stats_by_name; + std::map transport_stats_by_name; for (const std::string& transport_name : transport_names) { transport_stats_by_name[transport_name] = GetTransportStatsByName(transport_name); @@ -471,13 +489,13 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { Call::Stats GetCallStats() override { return call_stats_; } - absl::optional GetAudioDeviceStats() override { + std::optional GetAudioDeviceStats() override { return audio_device_stats_; } bool GetLocalCertificate( const std::string& transport_name, - rtc::scoped_refptr* certificate) override { + scoped_refptr* certificate) override { auto it = local_certificates_by_transport_.find(transport_name); if (it != local_certificates_by_transport_.end()) { *certificate = it->second; @@ -487,7 +505,7 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { } } - std::unique_ptr GetRemoteSSLCertChain( + std::unique_ptr GetRemoteSSLCertChain( const std::string& transport_name) override { auto it = remote_cert_chains_by_transport_.find(transport_name); if (it != remote_cert_chains_by_transport_.end()) { @@ -496,26 +514,26 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { return nullptr; } } + PayloadTypePicker& payload_type_picker() { return payload_type_picker_; } private: - cricket::TransportStats GetTransportStatsByName( - const std::string& transport_name) { + TransportStats GetTransportStatsByName(const std::string& transport_name) { auto it = transport_stats_by_name_.find(transport_name); if (it != transport_stats_by_name_.end()) { // If specific transport stats have been specified, return those. return it->second; } // Otherwise, generate some dummy stats. - cricket::TransportChannelStats channel_stats; - channel_stats.component = cricket::ICE_CANDIDATE_COMPONENT_RTP; - cricket::TransportStats transport_stats; + TransportChannelStats channel_stats; + channel_stats.component = ICE_CANDIDATE_COMPONENT_RTP; + TransportStats transport_stats; transport_stats.transport_name = transport_name; transport_stats.channel_stats.push_back(channel_stats); return transport_stats; } - rtc::scoped_refptr> - GetOrCreateFirstTransceiverOfType(cricket::MediaType media_type) { + scoped_refptr> + GetOrCreateFirstTransceiverOfType(webrtc::MediaType media_type) { for (auto transceiver : transceivers_) { if (transceiver->internal()->media_type() == media_type) { return transceiver; @@ -524,43 +542,45 @@ class FakePeerConnectionForStats : public FakePeerConnectionBase { return CreateTransceiverOfType(media_type); } - rtc::scoped_refptr> - CreateTransceiverOfType(cricket::MediaType media_type) { + scoped_refptr> + CreateTransceiverOfType(webrtc::MediaType media_type) { auto transceiver = RtpTransceiverProxyWithInternal::Create( signaling_thread_, - rtc::make_ref_counted(media_type, context_.get())); + make_ref_counted(media_type, context_.get(), + &codec_lookup_helper_)); transceivers_.push_back(transceiver); return transceiver; } - rtc::Thread* const network_thread_; - rtc::Thread* const worker_thread_; - rtc::Thread* const signaling_thread_; + Thread* const network_thread_; + Thread* const worker_thread_; + Thread* const signaling_thread_; PeerConnectionFactoryDependencies dependencies_; - rtc::scoped_refptr context_; + scoped_refptr context_; - rtc::scoped_refptr local_streams_; - rtc::scoped_refptr remote_streams_; + scoped_refptr local_streams_; + scoped_refptr remote_streams_; - std::vector< - rtc::scoped_refptr>> + std::vector>> transceivers_; FakeDataChannelController data_channel_controller_; - std::vector> sctp_data_channels_; + std::vector> sctp_data_channels_; - std::map transport_stats_by_name_; + std::map transport_stats_by_name_; Call::Stats call_stats_; - absl::optional audio_device_stats_; + std::optional audio_device_stats_; - std::map> + std::map> local_certificates_by_transport_; - std::map> + std::map> remote_cert_chains_by_transport_; + PayloadTypePicker payload_type_picker_; + FakeCodecLookupHelper codec_lookup_helper_; }; } // namespace webrtc diff --git a/pc/test/fake_periodic_video_source.h b/pc/test/fake_periodic_video_source.h index 452a8f6c30..3b8a0b3807 100644 --- a/pc/test/fake_periodic_video_source.h +++ b/pc/test/fake_periodic_video_source.h @@ -11,19 +11,27 @@ #ifndef PC_TEST_FAKE_PERIODIC_VIDEO_SOURCE_H_ #define PC_TEST_FAKE_PERIODIC_VIDEO_SOURCE_H_ +#include #include +#include "api/sequence_checker.h" +#include "api/units/time_delta.h" +#include "api/video/video_frame.h" +#include "api/video/video_rotation.h" +#include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "media/base/fake_frame_source.h" #include "media/base/video_broadcaster.h" +#include "rtc_base/checks.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/repeating_task.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/time_utils.h" namespace webrtc { -class FakePeriodicVideoSource final - : public rtc::VideoSourceInterface { +class FakePeriodicVideoSource final : public VideoSourceInterface { public: static constexpr int kDefaultFrameIntervalMs = 33; static constexpr int kDefaultWidth = 640; @@ -39,11 +47,10 @@ class FakePeriodicVideoSource final FakePeriodicVideoSource() : FakePeriodicVideoSource(Config()) {} explicit FakePeriodicVideoSource(Config config) - : frame_source_( - config.width, - config.height, - config.frame_interval_ms * rtc::kNumMicrosecsPerMillisec, - config.timestamp_offset_ms * rtc::kNumMicrosecsPerMillisec), + : frame_source_(config.width, + config.height, + config.frame_interval_ms * kNumMicrosecsPerMillisec, + config.timestamp_offset_ms * kNumMicrosecsPerMillisec), task_queue_(std::make_unique( "FakePeriodicVideoTrackSource")) { frame_source_.SetRotation(config.rotation); @@ -60,18 +67,18 @@ class FakePeriodicVideoSource final }); } - rtc::VideoSinkWants wants() const { + VideoSinkWants wants() const { MutexLock lock(&mutex_); return wants_; } - void RemoveSink(rtc::VideoSinkInterface* sink) override { + void RemoveSink(VideoSinkInterface* sink) override { RTC_DCHECK(thread_checker_.IsCurrent()); broadcaster_.RemoveSink(sink); } - void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override { + void AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) override { RTC_DCHECK(thread_checker_.IsCurrent()); { MutexLock lock(&mutex_); @@ -89,10 +96,10 @@ class FakePeriodicVideoSource final private: SequenceChecker thread_checker_{SequenceChecker::kDetached}; - rtc::VideoBroadcaster broadcaster_; - cricket::FakeFrameSource frame_source_; + VideoBroadcaster broadcaster_; + FakeFrameSource frame_source_; mutable Mutex mutex_; - rtc::VideoSinkWants wants_ RTC_GUARDED_BY(&mutex_); + VideoSinkWants wants_ RTC_GUARDED_BY(&mutex_); std::unique_ptr task_queue_; RepeatingTaskHandle repeating_task_handle_; diff --git a/pc/test/fake_periodic_video_track_source.h b/pc/test/fake_periodic_video_track_source.h index f91144d1cc..54dbaaf0d6 100644 --- a/pc/test/fake_periodic_video_track_source.h +++ b/pc/test/fake_periodic_video_track_source.h @@ -11,6 +11,8 @@ #ifndef PC_TEST_FAKE_PERIODIC_VIDEO_TRACK_SOURCE_H_ #define PC_TEST_FAKE_PERIODIC_VIDEO_TRACK_SOURCE_H_ +#include "api/video/video_frame.h" +#include "api/video/video_source_interface.h" #include "pc/test/fake_periodic_video_source.h" #include "pc/video_track_source.h" @@ -35,7 +37,7 @@ class FakePeriodicVideoTrackSource : public VideoTrackSource { } protected: - rtc::VideoSourceInterface* source() override { return &source_; } + VideoSourceInterface* source() override { return &source_; } private: FakePeriodicVideoSource source_; diff --git a/pc/test/fake_rtc_certificate_generator.h b/pc/test/fake_rtc_certificate_generator.h index 61da26a12f..64879c188d 100644 --- a/pc/test/fake_rtc_certificate_generator.h +++ b/pc/test/fake_rtc_certificate_generator.h @@ -11,20 +11,24 @@ #ifndef PC_TEST_FAKE_RTC_CERTIFICATE_GENERATOR_H_ #define PC_TEST_FAKE_RTC_CERTIFICATE_GENERATOR_H_ +#include +#include #include #include -#include "absl/types/optional.h" -#include "api/peer_connection_interface.h" +#include "api/scoped_refptr.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" +#include "rtc_base/checks.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/ssl_identity.h" // RSA with mod size 1024, pub exp 0x10001. -static const rtc::RTCCertificatePEM kRsaPems[] = { - rtc::RTCCertificatePEM( - "-----BEGIN RSA PRIVATE KEY-----\n" +static const webrtc::RTCCertificatePEM kRsaPems[] = { + webrtc::RTCCertificatePEM( + "-----BEGIN RSA PRI" // Linebreak to avoid detection of private + "VATE KEY-----\n" // keys by linters. "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMYRkbhmI7kVA/rM\n" "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n" "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n" @@ -51,8 +55,9 @@ static const rtc::RTCCertificatePEM kRsaPems[] = { "LJE/mGw3MyFHEqi81jh95J+ypl6xKW6Rm8jKLR87gUvCaVYn/Z4/P3AqcQTB7wOv\n" "UD0A8qfhfDM+LK6rPAnCsVN0NRDY3jvd6rzix9M=\n" "-----END CERTIFICATE-----\n"), - rtc::RTCCertificatePEM( - "-----BEGIN RSA PRIVATE KEY-----\n" + webrtc::RTCCertificatePEM( + "-----BEGIN RSA PRI" // Linebreak to avoid detection of private + "VATE KEY-----\n" // keys by linters. "MIICXQIBAAKBgQDeYqlyJ1wuiMsi905e3X81/WA/G3ym50PIDZBVtSwZi7JVQPgj\n" "Bl8CPZMvDh9EwB4Ji9ytA8dZZbQ4WbJWPr73zPpJSCvQqz6sOXSlenBRi72acNaQ\n" "sOR/qPvviJx5I6Hqo4qemfnjZhAW85a5BpgrAwKgMLIQTHCTLWwVSyrDrwIDAQAB\n" @@ -88,9 +93,10 @@ static const rtc::RTCCertificatePEM kRsaPems[] = { // `SSLIdentity::Create` and invoking `identity->PrivateKeyToPEMString()`, // `identity->PublicKeyToPEMString()` and // `identity->certificate().ToPEMString()`. -static const rtc::RTCCertificatePEM kEcdsaPems[] = { - rtc::RTCCertificatePEM( - "-----BEGIN PRIVATE KEY-----\n" +static const webrtc::RTCCertificatePEM kEcdsaPems[] = { + webrtc::RTCCertificatePEM( + "-----BEGIN PRI" // Linebreak to avoid detection of private + "VATE KEY-----\n" // keys by linters. "MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg+qaRsR5uHtqG689M\n" "A3PHSJNeVpyi5wUKCft62h0UWy+hRANCAAS5Mjc85q9fVq4ln+zOPlaEC/Rzj5Pb\n" "MVZtf1x/8k2KsbmyZoAMDX2yer/atEuXmItMe3yd6/DXnvboU//D3Lyt\n" @@ -103,8 +109,9 @@ static const rtc::RTCCertificatePEM kEcdsaPems[] = { "vK0wCgYIKoZIzj0EAwIDSQAwRgIhAIIc3+CqfkZ9lLwTj1PvUtt3KhnqF2kD0War\n" "cCoTBbCxAiEAyp9Cn4vo2ZBhRIVDKyoxmwak8Z0PAVhJAQaWCgoY2D4=\n" "-----END CERTIFICATE-----\n"), - rtc::RTCCertificatePEM( - "-----BEGIN PRIVATE KEY-----\n" + webrtc::RTCCertificatePEM( + "-----BEGIN PRI" // Linebreak to avoid detection of private + "VATE KEY-----\n" // keys by linters. "MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQghL/G4JRYnuDNbQuh\n" "LqkytcE39Alsq6FItDVFgOesfCmhRANCAATd53FjPLyVUcwYguEPbSJM03fP6Rx5\n" "GY1dEZ00+ZykjJI83VfDAyvmpRuGahNtBH0hc+7xkDCbeo6TM0tN35xr\n" @@ -119,7 +126,7 @@ static const rtc::RTCCertificatePEM kEcdsaPems[] = { "-----END CERTIFICATE-----\n")}; class FakeRTCCertificateGenerator - : public rtc::RTCCertificateGeneratorInterface { + : public webrtc::RTCCertificateGeneratorInterface { public: FakeRTCCertificateGenerator() : should_fail_(false), should_wait_(false) {} @@ -135,34 +142,34 @@ class FakeRTCCertificateGenerator int generated_certificates() { return generated_certificates_; } int generated_failures() { return generated_failures_; } - void GenerateCertificateAsync(const rtc::KeyParams& key_params, - const absl::optional& expires_ms, + void GenerateCertificateAsync(const webrtc::KeyParams& key_params, + const std::optional& expires_ms, Callback callback) override { // The certificates are created from constant PEM strings and use its coded // expiration time, we do not support modifying it. RTC_DCHECK(!expires_ms); // Only supports RSA-1024-0x10001 and ECDSA-P256. - if (key_params.type() == rtc::KT_RSA) { + if (key_params.type() == webrtc::KT_RSA) { RTC_DCHECK_EQ(key_params.rsa_params().mod_size, 1024); RTC_DCHECK_EQ(key_params.rsa_params().pub_exp, 0x10001); } else { - RTC_DCHECK_EQ(key_params.type(), rtc::KT_ECDSA); - RTC_DCHECK_EQ(key_params.ec_curve(), rtc::EC_NIST_P256); + RTC_DCHECK_EQ(key_params.type(), webrtc::KT_ECDSA); + RTC_DCHECK_EQ(key_params.ec_curve(), webrtc::EC_NIST_P256); } - rtc::KeyType key_type = key_params.type(); + webrtc::KeyType key_type = key_params.type(); webrtc::TaskQueueBase::Current()->PostTask( [this, key_type, callback = std::move(callback)]() mutable { GenerateCertificate(key_type, std::move(callback)); }); } - static rtc::scoped_refptr GenerateCertificate() { - switch (rtc::KT_DEFAULT) { - case rtc::KT_RSA: - return rtc::RTCCertificate::FromPEM(kRsaPems[0]); - case rtc::KT_ECDSA: - return rtc::RTCCertificate::FromPEM(kEcdsaPems[0]); + static webrtc::scoped_refptr GenerateCertificate() { + switch (webrtc::KT_DEFAULT) { + case webrtc::KT_RSA: + return webrtc::RTCCertificate::FromPEM(kRsaPems[0]); + case webrtc::KT_ECDSA: + return webrtc::RTCCertificate::FromPEM(kEcdsaPems[0]); default: RTC_DCHECK_NOTREACHED(); return nullptr; @@ -170,25 +177,26 @@ class FakeRTCCertificateGenerator } private: - const rtc::RTCCertificatePEM& get_pem(const rtc::KeyType& key_type) const { + const webrtc::RTCCertificatePEM& get_pem( + const webrtc::KeyType& key_type) const { switch (key_type) { - case rtc::KT_RSA: + case webrtc::KT_RSA: return kRsaPems[key_index_]; - case rtc::KT_ECDSA: + case webrtc::KT_ECDSA: return kEcdsaPems[key_index_]; default: RTC_DCHECK_NOTREACHED(); return kEcdsaPems[key_index_]; } } - const std::string& get_key(const rtc::KeyType& key_type) const { + const std::string& get_key(const webrtc::KeyType& key_type) const { return get_pem(key_type).private_key(); } - const std::string& get_cert(const rtc::KeyType& key_type) const { + const std::string& get_cert(const webrtc::KeyType& key_type) const { return get_pem(key_type).certificate(); } - void GenerateCertificate(rtc::KeyType key_type, Callback callback) { + void GenerateCertificate(webrtc::KeyType key_type, Callback callback) { // If the certificate generation should be stalled, re-post this same // message to the queue with a small delay so as to wait in a loop until // set_should_wait(false) is called. @@ -204,8 +212,8 @@ class FakeRTCCertificateGenerator ++generated_failures_; std::move(callback)(nullptr); } else { - rtc::scoped_refptr certificate = - rtc::RTCCertificate::FromPEM(get_pem(key_type)); + webrtc::scoped_refptr certificate = + webrtc::RTCCertificate::FromPEM(get_pem(key_type)); RTC_DCHECK(certificate); ++generated_certificates_; std::move(callback)(std::move(certificate)); diff --git a/pc/test/fake_video_track_renderer.h b/pc/test/fake_video_track_renderer.h index f6e341b4b2..b45727b0cc 100644 --- a/pc/test/fake_video_track_renderer.h +++ b/pc/test/fake_video_track_renderer.h @@ -12,20 +12,22 @@ #define PC_TEST_FAKE_VIDEO_TRACK_RENDERER_H_ #include "api/media_stream_interface.h" +#include "api/scoped_refptr.h" +#include "api/video/video_source_interface.h" #include "media/base/fake_video_renderer.h" namespace webrtc { -class FakeVideoTrackRenderer : public cricket::FakeVideoRenderer { +class FakeVideoTrackRenderer : public FakeVideoRenderer { public: explicit FakeVideoTrackRenderer(VideoTrackInterface* video_track) : video_track_(video_track) { - video_track_->AddOrUpdateSink(this, rtc::VideoSinkWants()); + video_track_->AddOrUpdateSink(this, VideoSinkWants()); } ~FakeVideoTrackRenderer() { video_track_->RemoveSink(this); } private: - rtc::scoped_refptr video_track_; + scoped_refptr video_track_; }; } // namespace webrtc diff --git a/pc/test/fake_video_track_source.h b/pc/test/fake_video_track_source.h index 2042c39175..df56f625ca 100644 --- a/pc/test/fake_video_track_source.h +++ b/pc/test/fake_video_track_source.h @@ -11,7 +11,10 @@ #ifndef PC_TEST_FAKE_VIDEO_TRACK_SOURCE_H_ #define PC_TEST_FAKE_VIDEO_TRACK_SOURCE_H_ -#include "api/media_stream_interface.h" +#include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "api/video/video_frame.h" +#include "api/video/video_source_interface.h" #include "media/base/video_broadcaster.h" #include "pc/video_track_source.h" @@ -21,13 +24,11 @@ namespace webrtc { // injection of frames. class FakeVideoTrackSource : public VideoTrackSource { public: - static rtc::scoped_refptr Create(bool is_screencast) { - return rtc::make_ref_counted(is_screencast); + static scoped_refptr Create(bool is_screencast) { + return make_ref_counted(is_screencast); } - static rtc::scoped_refptr Create() { - return Create(false); - } + static scoped_refptr Create() { return Create(false); } bool is_screencast() const override { return is_screencast_; } @@ -40,13 +41,13 @@ class FakeVideoTrackSource : public VideoTrackSource { : VideoTrackSource(false /* remote */), is_screencast_(is_screencast) {} ~FakeVideoTrackSource() override = default; - rtc::VideoSourceInterface* source() override { + VideoSourceInterface* source() override { return &video_broadcaster_; } private: const bool is_screencast_; - rtc::VideoBroadcaster video_broadcaster_; + VideoBroadcaster video_broadcaster_; }; } // namespace webrtc diff --git a/pc/test/frame_generator_capturer_video_track_source.h b/pc/test/frame_generator_capturer_video_track_source.h index 79a5b3474a..4e9d7bec33 100644 --- a/pc/test/frame_generator_capturer_video_track_source.h +++ b/pc/test/frame_generator_capturer_video_track_source.h @@ -12,12 +12,17 @@ #define PC_TEST_FRAME_GENERATOR_CAPTURER_VIDEO_TRACK_SOURCE_H_ #include +#include #include +#include "api/media_stream_interface.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/task_queue/task_queue_factory.h" #include "api/test/create_frame_generator.h" +#include "api/video/video_frame.h" +#include "api/video/video_source_interface.h" #include "pc/video_track_source.h" +#include "system_wrappers/include/clock.h" #include "test/frame_generator_capturer.h" namespace webrtc { @@ -49,7 +54,7 @@ class FrameGeneratorCapturerVideoTrackSource : public VideoTrackSource { video_capturer_ = std::make_unique( clock, test::CreateSquareFrameGenerator(config.width, config.height, - absl::nullopt, + std::nullopt, config.num_squares_generated), config.frames_per_second, *task_queue_factory_); video_capturer_->Init(); @@ -77,7 +82,7 @@ class FrameGeneratorCapturerVideoTrackSource : public VideoTrackSource { bool is_screencast() const override { return is_screencast_; } protected: - rtc::VideoSourceInterface* source() override { + VideoSourceInterface* source() override { return video_capturer_.get(); } diff --git a/pc/test/integration_test_helpers.cc b/pc/test/integration_test_helpers.cc index 3f07c9e826..23a712a3c8 100644 --- a/pc/test/integration_test_helpers.cc +++ b/pc/test/integration_test_helpers.cc @@ -10,6 +10,35 @@ #include "pc/test/integration_test_helpers.h" +#include +#include +#include +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/audio/builtin_audio_processing_builder.h" +#include "api/enable_media_with_defaults.h" +#include "api/field_trials_view.h" +#include "api/jsep.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_event_log/rtc_event_log_factory.h" +#include "api/sequence_checker.h" +#include "api/stats/rtcstats_objects.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "api/units/time_delta.h" +#include "logging/rtc_event_log/fake_rtc_event_log_factory.h" +#include "pc/peer_connection_factory.h" +#include "pc/test/fake_audio_capture_module.h" +#include "rtc_base/checks.h" +#include "rtc_base/fake_network.h" +#include "rtc_base/socket_server.h" +#include "rtc_base/thread.h" +#include "test/gtest.h" + namespace webrtc { PeerConnectionInterface::RTCOfferAnswerOptions IceRestartOfferAnswerOptions() { @@ -18,16 +47,16 @@ PeerConnectionInterface::RTCOfferAnswerOptions IceRestartOfferAnswerOptions() { return options; } -void RemoveSsrcsAndMsids(cricket::SessionDescription* desc) { - for (ContentInfo& content : desc->contents()) { +void RemoveSsrcsAndMsids(std::unique_ptr& sdp) { + for (ContentInfo& content : sdp->description()->contents()) { content.media_description()->mutable_streams().clear(); } - desc->set_msid_supported(false); - desc->set_msid_signaling(0); + sdp->description()->set_msid_signaling(0); } -void RemoveSsrcsAndKeepMsids(cricket::SessionDescription* desc) { - for (ContentInfo& content : desc->contents()) { +void RemoveSsrcsAndKeepMsids( + std::unique_ptr& sdp) { + for (ContentInfo& content : sdp->description()->contents()) { std::string track_id; std::vector stream_ids; if (!content.media_description()->streams().empty()) { @@ -44,9 +73,16 @@ void RemoveSsrcsAndKeepMsids(cricket::SessionDescription* desc) { } } +void SetSdpType(std::unique_ptr& sdp, + SdpType sdpType) { + std::string str; + sdp->ToString(&str); + sdp = CreateSessionDescription(sdpType, str); +} + int FindFirstMediaStatsIndexByKind( const std::string& kind, - const std::vector& inbound_rtps) { + const std::vector& inbound_rtps) { for (size_t i = 0; i < inbound_rtps.size(); i++) { if (*inbound_rtps[i]->kind == kind) { return i; @@ -55,6 +91,13 @@ int FindFirstMediaStatsIndexByKind( return -1; } +void ReplaceFirstSsrc(StreamParams& stream, uint32_t ssrc) { + stream.ssrcs[0] = ssrc; + for (auto& group : stream.ssrc_groups) { + group.ssrcs[0] = ssrc; + } +} + TaskQueueMetronome::TaskQueueMetronome(TimeDelta tick_period) : tick_period_(tick_period) {} @@ -87,4 +130,168 @@ TimeDelta TaskQueueMetronome::TickPeriod() const { return tick_period_; } +// Implementation of PeerConnectionIntegrationWrapper functions +void PeerConnectionIntegrationWrapper::StartWatchingDelayStats() { + // Get the baseline numbers for audio_packets and audio_delay. + auto received_stats = NewGetStats(); + auto rtp_stats = + received_stats->GetStatsOfType()[0]; + ASSERT_TRUE(rtp_stats->relative_packet_arrival_delay.has_value()); + ASSERT_TRUE(rtp_stats->packets_received.has_value()); + rtp_stats_id_ = rtp_stats->id(); + audio_packets_stat_ = *rtp_stats->packets_received; + audio_delay_stat_ = *rtp_stats->relative_packet_arrival_delay; + audio_samples_stat_ = *rtp_stats->total_samples_received; + audio_concealed_stat_ = *rtp_stats->concealed_samples; +} + +void PeerConnectionIntegrationWrapper::UpdateDelayStats(std::string tag, + int desc_size) { + auto report = NewGetStats(); + auto rtp_stats = report->GetAs(rtp_stats_id_); + ASSERT_TRUE(rtp_stats); + auto delta_packets = *rtp_stats->packets_received - audio_packets_stat_; + auto delta_rpad = + *rtp_stats->relative_packet_arrival_delay - audio_delay_stat_; + auto recent_delay = delta_packets > 0 ? delta_rpad / delta_packets : -1; + // The purpose of these checks is to sound the alarm early if we introduce + // serious regressions. The numbers are not acceptable for production, but + // occur on slow bots. + // + // An average relative packet arrival delay over the renegotiation of + // > 100 ms indicates that something is dramatically wrong, and will impact + // quality for sure. + // Worst bots: + // linux_x86_dbg at 0.206 +#if !defined(NDEBUG) + EXPECT_GT(0.25, recent_delay) << tag << " size " << desc_size; +#else + EXPECT_GT(0.1, recent_delay) << tag << " size " << desc_size; +#endif + auto delta_samples = *rtp_stats->total_samples_received - audio_samples_stat_; + auto delta_concealed = *rtp_stats->concealed_samples - audio_concealed_stat_; + // These limits should be adjusted down as we improve: + // + // Concealing more than 4000 samples during a renegotiation is unacceptable. + // But some bots are slow. + + // Worst bots: + // linux_more_configs bot at conceal count 5184 + // android_arm_rel at conceal count 9241 + // linux_x86_dbg at 15174 +#if !defined(NDEBUG) + EXPECT_GT(18000U, delta_concealed) << "Concealed " << delta_concealed + << " of " << delta_samples << " samples"; +#else + EXPECT_GT(15000U, delta_concealed) << "Concealed " << delta_concealed + << " of " << delta_samples << " samples"; +#endif + // Concealing more than 20% of samples during a renegotiation is + // unacceptable. + // Worst bots: + // Nondebug: Linux32 Release at conceal rate 0.606597 (CI run) + // Debug: linux_x86_dbg bot at conceal rate 0.854 + // internal bot at conceal rate 0.967 (b/294020344) + // TODO(https://crbug.com/webrtc/15393): Improve audio quality during + // renegotiation so that we can reduce these thresholds, 99% is not even + // close to the 20% deemed unacceptable above or the 0% that would be ideal. + if (delta_samples > 0) { +#if !defined(NDEBUG) + EXPECT_LT(1.0 * delta_concealed / delta_samples, 0.99) + << "Concealed " << delta_concealed << " of " << delta_samples + << " samples"; +#else + EXPECT_LT(1.0 * delta_concealed / delta_samples, 0.7) + << "Concealed " << delta_concealed << " of " << delta_samples + << " samples"; +#endif + } + // Increment trailing counters + audio_packets_stat_ = *rtp_stats->packets_received; + audio_delay_stat_ = *rtp_stats->relative_packet_arrival_delay; + audio_samples_stat_ = *rtp_stats->total_samples_received; + audio_concealed_stat_ = *rtp_stats->concealed_samples; +} + +bool PeerConnectionIntegrationWrapper::Init( + const PeerConnectionFactory::Options* options, + const PeerConnectionInterface::RTCConfiguration* config, + PeerConnectionDependencies dependencies, + SocketServer* socket_server, + Thread* network_thread, + Thread* worker_thread, + std::unique_ptr field_trials, + std::unique_ptr event_log_factory, + bool reset_encoder_factory, + bool reset_decoder_factory, + bool create_media_engine) { + // There's an error in this test code if Init ends up being called twice. + RTC_DCHECK(!peer_connection_); + RTC_DCHECK(!peer_connection_factory_); + + auto network_manager = std::make_unique(); + fake_network_manager_ = network_manager.get(); + fake_network_manager_->AddInterface(kDefaultLocalAddress); + + network_thread_ = network_thread; + + fake_audio_capture_module_ = FakeAudioCaptureModule::Create(); + if (!fake_audio_capture_module_) { + return false; + } + Thread* const signaling_thread = Thread::Current(); + + PeerConnectionFactoryDependencies pc_factory_dependencies; + pc_factory_dependencies.network_thread = network_thread; + pc_factory_dependencies.worker_thread = worker_thread; + pc_factory_dependencies.signaling_thread = signaling_thread; + pc_factory_dependencies.socket_factory = socket_server; + pc_factory_dependencies.network_manager = std::move(network_manager); + pc_factory_dependencies.task_queue_factory = CreateDefaultTaskQueueFactory(); + pc_factory_dependencies.trials = std::move(field_trials); + pc_factory_dependencies.decode_metronome = + std::make_unique(TimeDelta::Millis(8)); + + pc_factory_dependencies.adm = fake_audio_capture_module_; + if (create_media_engine) { + // Standard creation method for APM may return a null pointer when + // AudioProcessing is disabled with a build flag. Bypass that flag by + // explicitly injecting the factory. + pc_factory_dependencies.audio_processing_builder = + std::make_unique(); + EnableMediaWithDefaults(pc_factory_dependencies); + } + + if (reset_encoder_factory) { + pc_factory_dependencies.video_encoder_factory.reset(); + } + if (reset_decoder_factory) { + pc_factory_dependencies.video_decoder_factory.reset(); + } + + if (event_log_factory) { + event_log_factory_ = event_log_factory.get(); + pc_factory_dependencies.event_log_factory = std::move(event_log_factory); + } else { + pc_factory_dependencies.event_log_factory = + std::make_unique(); + } + peer_connection_factory_ = + CreateModularPeerConnectionFactory(std::move(pc_factory_dependencies)); + + if (!peer_connection_factory_) { + fake_network_manager_ = nullptr; + return false; + } + if (options) { + peer_connection_factory_->SetOptions(*options); + } + if (config) { + sdp_semantics_ = config->sdp_semantics; + } + + peer_connection_ = CreatePeerConnection(config, std::move(dependencies)); + return peer_connection_.get() != nullptr; +} + } // namespace webrtc diff --git a/pc/test/integration_test_helpers.h b/pc/test/integration_test_helpers.h index e66b978b9c..63ba2108b4 100644 --- a/pc/test/integration_test_helpers.h +++ b/pc/test/integration_test_helpers.h @@ -17,79 +17,58 @@ #include #include -#include +#include #include #include +#include #include #include #include #include -#include "absl/algorithm/container.h" +#include "absl/functional/any_invocable.h" #include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/audio_options.h" -#include "api/call/call_factory_interface.h" #include "api/candidate.h" #include "api/crypto/crypto_options.h" #include "api/data_channel_interface.h" +#include "api/field_trials.h" #include "api/field_trials_view.h" #include "api/ice_transport_interface.h" #include "api/jsep.h" +#include "api/make_ref_counted.h" #include "api/media_stream_interface.h" #include "api/media_types.h" +#include "api/metronome/metronome.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" -#include "api/rtc_event_log/rtc_event_log_factory.h" -#include "api/rtc_event_log/rtc_event_log_factory_interface.h" #include "api/rtc_event_log_output.h" +#include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" -#include "api/stats/rtc_stats.h" +#include "api/sequence_checker.h" #include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" -#include "api/task_queue/default_task_queue_factory.h" #include "api/task_queue/pending_task_safety_flag.h" -#include "api/task_queue/task_queue_factory.h" #include "api/test/mock_async_dns_resolver.h" -#include "api/transport/field_trial_based_config.h" -#include "api/uma_metrics.h" +#include "api/test/rtc_error_matchers.h" #include "api/units/time_delta.h" #include "api/video/video_rotation.h" -#include "api/video_codecs/sdp_video_format.h" -#include "api/video_codecs/video_decoder_factory.h" -#include "api/video_codecs/video_encoder_factory.h" -#include "call/call.h" #include "logging/rtc_event_log/fake_rtc_event_log_factory.h" -#include "media/base/media_engine.h" #include "media/base/stream_params.h" -#include "media/engine/fake_webrtc_video_engine.h" -#include "media/engine/webrtc_media_engine.h" -#include "media/engine/webrtc_media_engine_defaults.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "modules/audio_processing/test/audio_processing_builder_for_testing.h" -#include "p2p/base/fake_ice_transport.h" #include "p2p/base/ice_transport_internal.h" -#include "p2p/base/mock_async_resolver.h" -#include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" -#include "p2p/base/port_allocator.h" #include "p2p/base/port_interface.h" -#include "p2p/base/test_stun_server.h" -#include "p2p/base/test_turn_customizer.h" -#include "p2p/base/test_turn_server.h" -#include "p2p/client/basic_port_allocator.h" -#include "pc/dtmf_sender.h" -#include "pc/local_audio_source.h" -#include "pc/media_session.h" +#include "p2p/test/fake_ice_transport.h" +#include "p2p/test/test_turn_customizer.h" +#include "p2p/test/test_turn_server.h" #include "pc/peer_connection.h" #include "pc/peer_connection_factory.h" #include "pc/peer_connection_proxy.h" -#include "pc/rtp_media_utils.h" #include "pc/session_description.h" #include "pc/test/fake_audio_capture_module.h" #include "pc/test/fake_periodic_video_source.h" @@ -99,36 +78,27 @@ #include "pc/test/mock_peer_connection_observers.h" #include "pc/video_track_source.h" #include "rtc_base/checks.h" -#include "rtc_base/event.h" -#include "rtc_base/fake_clock.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/fake_mdns_responder.h" #include "rtc_base/fake_network.h" #include "rtc_base/firewall_socket_server.h" -#include "rtc_base/gunit.h" -#include "rtc_base/helpers.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" -#include "rtc_base/mdns_responder_interface.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/socket_address.h" +#include "rtc_base/socket_factory.h" +#include "rtc_base/socket_server.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/task_queue_for_test.h" -#include "rtc_base/task_utils/repeating_task.h" -#include "rtc_base/test_certificate_verifier.h" #include "rtc_base/thread.h" -#include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" #include "rtc_base/virtual_socket_server.h" #include "system_wrappers/include/metrics.h" #include "test/gmock.h" -#include "test/scoped_key_value_config.h" +#include "test/gtest.h" +#include "test/wait_until.h" namespace webrtc { -using ::cricket::ContentInfo; -using ::cricket::StreamParams; -using ::rtc::SocketAddress; using ::testing::_; using ::testing::Combine; using ::testing::Contains; @@ -142,10 +112,11 @@ using ::testing::UnorderedElementsAreArray; using ::testing::Values; using RTCConfiguration = PeerConnectionInterface::RTCConfiguration; -static const int kDefaultTimeout = 10000; -static const int kMaxWaitForStatsMs = 3000; -static const int kMaxWaitForActivationMs = 5000; -static const int kMaxWaitForFramesMs = 10000; +constexpr TimeDelta kDefaultTimeout = TimeDelta::Millis(10000); +constexpr TimeDelta kLongTimeout = TimeDelta::Millis(60000); +constexpr TimeDelta kMaxWaitForStats = TimeDelta::Millis(3000); +constexpr TimeDelta kMaxWaitForActivation = TimeDelta::Millis(5000); +constexpr TimeDelta kMaxWaitForFrames = TimeDelta::Millis(10000); // Default number of audio/video frames to wait for before considering a test // successful. static const int kDefaultExpectedAudioFrameCount = 3; @@ -155,8 +126,8 @@ static const char kDataChannelLabel[] = "data_channel"; // SRTP cipher name negotiated by the tests. This must be updated if the // default changes. -static const int kDefaultSrtpCryptoSuite = rtc::kSrtpAes128CmSha1_80; -static const int kDefaultSrtpCryptoSuiteGcm = rtc::kSrtpAeadAes256Gcm; +static const int kDefaultSrtpCryptoSuite = kSrtpAes128CmSha1_80; +static const int kDefaultSrtpCryptoSuiteGcm = kSrtpAeadAes256Gcm; static const SocketAddress kDefaultLocalAddress("192.168.1.1", 0); @@ -166,22 +137,31 @@ PeerConnectionInterface::RTCOfferAnswerOptions IceRestartOfferAnswerOptions(); // Remove all stream information (SSRCs, track IDs, etc.) and "msid-semantic" // attribute from received SDP, simulating a legacy endpoint. -void RemoveSsrcsAndMsids(cricket::SessionDescription* desc); +void RemoveSsrcsAndMsids(std::unique_ptr& desc); // Removes all stream information besides the stream ids, simulating an // endpoint that only signals a=msid lines to convey stream_ids. -void RemoveSsrcsAndKeepMsids(cricket::SessionDescription* desc); +void RemoveSsrcsAndKeepMsids( + std::unique_ptr& desc); + +// Set SdpType. +void SetSdpType(std::unique_ptr& sdp, + SdpType sdpType); + +// Replaces the stream's primary SSRC and updates the first SSRC of all +// ssrc-groups. +void ReplaceFirstSsrc(StreamParams& stream, uint32_t ssrc); int FindFirstMediaStatsIndexByKind( const std::string& kind, - const std::vector& inbound_rtps); + const std::vector& inbound_rtps); -class TaskQueueMetronome : public webrtc::Metronome { +class TaskQueueMetronome : public Metronome { public: explicit TaskQueueMetronome(TimeDelta tick_period); ~TaskQueueMetronome() override; - // webrtc::Metronome implementation. + // Metronome implementation. void RequestCallOnNextTick(absl::AnyInvocable callback) override; TimeDelta TickPeriod() const override; @@ -204,12 +184,12 @@ class SignalingMessageReceiver { virtual ~SignalingMessageReceiver() {} }; -class MockRtpReceiverObserver : public webrtc::RtpReceiverObserverInterface { +class MockRtpReceiverObserver : public RtpReceiverObserverInterface { public: - explicit MockRtpReceiverObserver(cricket::MediaType media_type) + explicit MockRtpReceiverObserver(webrtc::MediaType media_type) : expected_media_type_(media_type) {} - void OnFirstPacketReceived(cricket::MediaType media_type) override { + void OnFirstPacketReceived(webrtc::MediaType media_type) override { ASSERT_EQ(expected_media_type_, media_type); first_packet_received_ = true; } @@ -220,7 +200,26 @@ class MockRtpReceiverObserver : public webrtc::RtpReceiverObserverInterface { private: bool first_packet_received_ = false; - cricket::MediaType expected_media_type_; + webrtc::MediaType expected_media_type_; +}; + +class MockRtpSenderObserver : public RtpSenderObserverInterface { + public: + explicit MockRtpSenderObserver(webrtc::MediaType media_type) + : expected_media_type_(media_type) {} + + void OnFirstPacketSent(webrtc::MediaType media_type) override { + ASSERT_EQ(expected_media_type_, media_type); + first_packet_sent_ = true; + } + + bool first_packet_sent() const { return first_packet_sent_; } + + virtual ~MockRtpSenderObserver() {} + + private: + bool first_packet_sent_ = false; + webrtc::MediaType expected_media_type_; }; // Helper class that wraps a peer connection, observes it, and can accept @@ -231,14 +230,23 @@ class MockRtpReceiverObserver : public webrtc::RtpReceiverObserverInterface { // advertise support of any codecs. // TODO(steveanton): See how this could become a subclass of // PeerConnectionWrapper defined in peerconnectionwrapper.h. -class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, +class PeerConnectionIntegrationWrapper : public PeerConnectionObserver, public SignalingMessageReceiver { public: - webrtc::PeerConnectionFactoryInterface* pc_factory() const { + PeerConnectionFactoryInterface* pc_factory() const { return peer_connection_factory_.get(); } - webrtc::PeerConnectionInterface* pc() const { return peer_connection_.get(); } + PeerConnectionInterface* pc() const { return peer_connection_.get(); } + + // Return the PC implementation, so that non-public interfaces + // can be used in tests. + PeerConnection* pc_internal() const { + auto* pci = + static_cast*>( + pc()); + return static_cast(pci->internal()); + } // If a signaling message receiver is set (via ConnectFakeSignaling), this // will set the whole offer/answer exchange in motion. Just need to wait for @@ -262,14 +270,16 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, // used to test SDP being applied that a PeerConnection would normally not // generate, but a non-JSEP endpoint might. void SetReceivedSdpMunger( - std::function munger) { + std::function&)> + munger) { received_sdp_munger_ = std::move(munger); } // Similar to the above, but this is run on SDP immediately after it's // generated. void SetGeneratedSdpMunger( - std::function munger) { + std::function&)> + munger) { generated_sdp_munger_ = std::move(munger); } @@ -311,8 +321,8 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, ice_gathering_state_history() const { return ice_gathering_state_history_; } - std::vector - ice_candidate_pair_change_history() const { + std::vector ice_candidate_pair_change_history() + const { return ice_candidate_pair_change_history_; } @@ -326,50 +336,50 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, void AddAudioVideoTracks() { AddAudioTrack(); AddVideoTrack(); + ResetRtpSenderObservers(); } - rtc::scoped_refptr AddAudioTrack() { + scoped_refptr AddAudioTrack() { return AddTrack(CreateLocalAudioTrack()); } - rtc::scoped_refptr AddVideoTrack() { + scoped_refptr AddVideoTrack() { return AddTrack(CreateLocalVideoTrack()); } - rtc::scoped_refptr CreateLocalAudioTrack() { - cricket::AudioOptions options; + scoped_refptr CreateLocalAudioTrack() { + AudioOptions options; // Disable highpass filter so that we can get all the test audio frames. options.highpass_filter = false; - rtc::scoped_refptr source = + scoped_refptr source = peer_connection_factory_->CreateAudioSource(options); // TODO(perkj): Test audio source when it is implemented. Currently audio // always use the default input. - return peer_connection_factory_->CreateAudioTrack(rtc::CreateRandomUuid(), + return peer_connection_factory_->CreateAudioTrack(CreateRandomUuid(), source.get()); } - rtc::scoped_refptr CreateLocalVideoTrack() { - webrtc::FakePeriodicVideoSource::Config config; - config.timestamp_offset_ms = rtc::TimeMillis(); + scoped_refptr CreateLocalVideoTrack() { + FakePeriodicVideoSource::Config config; + config.timestamp_offset_ms = TimeMillis(); return CreateLocalVideoTrackInternal(config); } - rtc::scoped_refptr - CreateLocalVideoTrackWithConfig( - webrtc::FakePeriodicVideoSource::Config config) { + scoped_refptr CreateLocalVideoTrackWithConfig( + FakePeriodicVideoSource::Config config) { return CreateLocalVideoTrackInternal(config); } - rtc::scoped_refptr - CreateLocalVideoTrackWithRotation(webrtc::VideoRotation rotation) { - webrtc::FakePeriodicVideoSource::Config config; + scoped_refptr CreateLocalVideoTrackWithRotation( + VideoRotation rotation) { + FakePeriodicVideoSource::Config config; config.rotation = rotation; - config.timestamp_offset_ms = rtc::TimeMillis(); + config.timestamp_offset_ms = TimeMillis(); return CreateLocalVideoTrackInternal(config); } - rtc::scoped_refptr AddTrack( - rtc::scoped_refptr track, + scoped_refptr AddTrack( + scoped_refptr track, const std::vector& stream_ids = {}) { EXPECT_TRUE(track); if (!track) { @@ -384,9 +394,9 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, } } - std::vector> GetReceiversOfType( - cricket::MediaType media_type) { - std::vector> receivers; + std::vector> GetReceiversOfType( + webrtc::MediaType media_type) { + std::vector> receivers; for (const auto& receiver : pc()->GetReceivers()) { if (receiver->media_type() == media_type) { receivers.push_back(receiver); @@ -395,8 +405,8 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, return receivers; } - rtc::scoped_refptr GetFirstTransceiverOfType( - cricket::MediaType media_type) { + scoped_refptr GetFirstTransceiverOfType( + webrtc::MediaType media_type) { for (auto transceiver : pc()->GetTransceivers()) { if (transceiver->receiver()->media_type() == media_type) { return transceiver; @@ -406,22 +416,22 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, } bool SignalingStateStable() { - return pc()->signaling_state() == webrtc::PeerConnectionInterface::kStable; + return pc()->signaling_state() == PeerConnectionInterface::kStable; } bool IceGatheringStateComplete() { return pc()->ice_gathering_state() == - webrtc::PeerConnectionInterface::kIceGatheringComplete; + PeerConnectionInterface::kIceGatheringComplete; } void CreateDataChannel() { CreateDataChannel(nullptr); } - void CreateDataChannel(const webrtc::DataChannelInit* init) { + void CreateDataChannel(const DataChannelInit* init) { CreateDataChannel(kDataChannelLabel, init); } void CreateDataChannel(const std::string& label, - const webrtc::DataChannelInit* init) { + const DataChannelInit* init) { auto data_channel_or_error = pc()->CreateDataChannelOrError(label, init); ASSERT_TRUE(data_channel_or_error.ok()); data_channels_.push_back(data_channel_or_error.MoveValue()); @@ -432,18 +442,18 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, // Return the last observed data channel. DataChannelInterface* data_channel() { - if (data_channels_.size() == 0) { + if (data_channels_.empty()) { return nullptr; } return data_channels_.back().get(); } // Return all data channels. - std::vector>& data_channels() { + std::vector>& data_channels() { return data_channels_; } - const MockDataChannelObserver* data_observer() const { - if (data_observers_.size() == 0) { + MockDataChannelObserver* data_observer() const { + if (data_observers_.empty()) { return nullptr; } return data_observers_.back().get(); @@ -453,6 +463,10 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, return data_observers_; } + std::unique_ptr CreateAnswerForTest() { + return CreateAnswer(); + } + int audio_frames_received() const { return fake_audio_capture_module_->frames_received(); } @@ -478,28 +492,31 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, // Returns a MockStatsObserver in a state after stats gathering finished, // which can be used to access the gathered stats. - rtc::scoped_refptr OldGetStatsForTrack( - webrtc::MediaStreamTrackInterface* track) { - auto observer = rtc::make_ref_counted(); + scoped_refptr OldGetStatsForTrack( + MediaStreamTrackInterface* track) { + auto observer = make_ref_counted(); EXPECT_TRUE(peer_connection_->GetStats( observer.get(), nullptr, PeerConnectionInterface::kStatsOutputLevelStandard)); - EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue()), + IsRtcOk()); return observer; } // Version that doesn't take a track "filter", and gathers all stats. - rtc::scoped_refptr OldGetStats() { + scoped_refptr OldGetStats() { return OldGetStatsForTrack(nullptr); } // Synchronously gets stats and returns them. If it times out, fails the test // and returns null. - rtc::scoped_refptr NewGetStats() { - auto callback = - rtc::make_ref_counted(); + scoped_refptr NewGetStats() { + auto callback = make_ref_counted(); peer_connection_->GetStats(callback.get()); - EXPECT_TRUE_WAIT(callback->called(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return callback->called(); }, ::testing::IsTrue()), + IsRtcOk()); return callback->report(); } @@ -524,10 +541,10 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, return static_cast(rendered_width()) / rendered_height(); } - webrtc::VideoRotation rendered_rotation() { + VideoRotation rendered_rotation() { EXPECT_FALSE(fake_video_renderers_.empty()); return fake_video_renderers_.empty() - ? webrtc::kVideoRotation_0 + ? kVideoRotation_0 : fake_video_renderers_.begin()->second->rotation(); } @@ -570,20 +587,20 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, return pc()->local_streams().get(); } - webrtc::PeerConnectionInterface::SignalingState signaling_state() { + PeerConnectionInterface::SignalingState signaling_state() { return pc()->signaling_state(); } - webrtc::PeerConnectionInterface::IceConnectionState ice_connection_state() { + PeerConnectionInterface::IceConnectionState ice_connection_state() { return pc()->ice_connection_state(); } - webrtc::PeerConnectionInterface::IceConnectionState + PeerConnectionInterface::IceConnectionState standardized_ice_connection_state() { return pc()->standardized_ice_connection_state(); } - webrtc::PeerConnectionInterface::IceGatheringState ice_gathering_state() { + PeerConnectionInterface::IceGatheringState ice_gathering_state() { return pc()->ice_gathering_state(); } @@ -598,7 +615,7 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, void ResetRtpReceiverObservers() { rtp_receiver_observers_.clear(); - for (const rtc::scoped_refptr& receiver : + for (const scoped_refptr& receiver : pc()->GetReceivers()) { std::unique_ptr observer( new MockRtpReceiverObserver(receiver->media_type())); @@ -607,26 +624,41 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, } } - rtc::FakeNetworkManager* network_manager() const { - return fake_network_manager_.get(); + const std::vector>& + rtp_sender_observers() { + return rtp_sender_observers_; } - cricket::PortAllocator* port_allocator() const { return port_allocator_; } - webrtc::FakeRtcEventLogFactory* event_log_factory() const { + void ResetRtpSenderObservers() { + rtp_sender_observers_.clear(); + for (const scoped_refptr& sender : pc()->GetSenders()) { + std::unique_ptr observer( + new MockRtpSenderObserver(sender->media_type())); + sender->SetObserver(observer.get()); + rtp_sender_observers_.push_back(std::move(observer)); + } + } + + FakeNetworkManager* network_manager() const { return fake_network_manager_; } + + FakeRtcEventLogFactory* event_log_factory() const { return event_log_factory_; } - const cricket::Candidate& last_candidate_gathered() const { - return last_candidate_gathered_; + Candidate last_candidate_gathered() const { + if (last_gathered_ice_candidate_) { + return last_gathered_ice_candidate_->candidate(); + } + return Candidate(); } - const cricket::IceCandidateErrorEvent& error_event() const { - return error_event_; + const IceCandidateInterface* last_gathered_ice_candidate() const { + return last_gathered_ice_candidate_.get(); } + const IceCandidateErrorEvent& error_event() const { return error_event_; } // Sets the mDNS responder for the owned fake network manager and keeps a // reference to the responder. - void SetMdnsResponder( - std::unique_ptr mdns_responder) { + void SetMdnsResponder(std::unique_ptr mdns_responder) { RTC_DCHECK(mdns_responder != nullptr); mdns_responder_ = mdns_responder.get(); network_manager()->set_mdns_responder(std::move(mdns_responder)); @@ -634,99 +666,118 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, // Returns null on failure. std::unique_ptr CreateOfferAndWait() { - auto observer = - rtc::make_ref_counted(); + auto observer = make_ref_counted(); pc()->CreateOffer(observer.get(), offer_answer_options_); return WaitForDescriptionFromObserver(observer.get()); } bool Rollback() { return SetRemoteDescription( - webrtc::CreateSessionDescription(SdpType::kRollback, "")); + CreateSessionDescription(SdpType::kRollback, "")); } // Functions for querying stats. - void StartWatchingDelayStats() { - // Get the baseline numbers for audio_packets and audio_delay. - auto received_stats = NewGetStats(); - auto rtp_stats = - received_stats->GetStatsOfType()[0]; - ASSERT_TRUE(rtp_stats->relative_packet_arrival_delay.is_defined()); - ASSERT_TRUE(rtp_stats->packets_received.is_defined()); - rtp_stats_id_ = rtp_stats->id(); - audio_packets_stat_ = *rtp_stats->packets_received; - audio_delay_stat_ = *rtp_stats->relative_packet_arrival_delay; - audio_samples_stat_ = *rtp_stats->total_samples_received; - audio_concealed_stat_ = *rtp_stats->concealed_samples; - } - - void UpdateDelayStats(std::string tag, int desc_size) { - auto report = NewGetStats(); - auto rtp_stats = - report->GetAs(rtp_stats_id_); - ASSERT_TRUE(rtp_stats); - auto delta_packets = *rtp_stats->packets_received - audio_packets_stat_; - auto delta_rpad = - *rtp_stats->relative_packet_arrival_delay - audio_delay_stat_; - auto recent_delay = delta_packets > 0 ? delta_rpad / delta_packets : -1; - // The purpose of these checks is to sound the alarm early if we introduce - // serious regressions. The numbers are not acceptable for production, but - // occur on slow bots. - // - // An average relative packet arrival delay over the renegotiation of - // > 100 ms indicates that something is dramatically wrong, and will impact - // quality for sure. - // Worst bots: - // linux_x86_dbg at 0.206 -#if !defined(NDEBUG) - EXPECT_GT(0.25, recent_delay) << tag << " size " << desc_size; -#else - EXPECT_GT(0.1, recent_delay) << tag << " size " << desc_size; -#endif - auto delta_samples = - *rtp_stats->total_samples_received - audio_samples_stat_; - auto delta_concealed = - *rtp_stats->concealed_samples - audio_concealed_stat_; - // These limits should be adjusted down as we improve: - // - // Concealing more than 4000 samples during a renegotiation is unacceptable. - // But some bots are slow. - - // Worst bots: - // linux_more_configs bot at conceal count 5184 - // android_arm_rel at conceal count 9241 - // linux_x86_dbg at 15174 -#if !defined(NDEBUG) - EXPECT_GT(18000U, delta_concealed) << "Concealed " << delta_concealed - << " of " << delta_samples << " samples"; -#else - EXPECT_GT(15000U, delta_concealed) << "Concealed " << delta_concealed - << " of " << delta_samples << " samples"; -#endif - // Concealing more than 20% of samples during a renegotiation is - // unacceptable. - // Worst bots: - // Nondebug: Linux32 Release at conceal rate 0.606597 (CI run) - // Debug: linux_x86_dbg bot at conceal rate 0.854 - // internal bot at conceal rate 0.967 (b/294020344) - // TODO(https://crbug.com/webrtc/15393): Improve audio quality during - // renegotiation so that we can reduce these thresholds, 99% is not even - // close to the 20% deemed unacceptable above or the 0% that would be ideal. - if (delta_samples > 0) { -#if !defined(NDEBUG) - EXPECT_LT(1.0 * delta_concealed / delta_samples, 0.99) - << "Concealed " << delta_concealed << " of " << delta_samples - << " samples"; -#else - EXPECT_LT(1.0 * delta_concealed / delta_samples, 0.7) - << "Concealed " << delta_concealed << " of " << delta_samples - << " samples"; -#endif + void StartWatchingDelayStats(); + + void UpdateDelayStats(std::string tag, int desc_size); + + // Sets number of candidates expected + void ExpectCandidates(int candidate_count) { + candidates_expected_ = candidate_count; + } + + bool SetRemoteDescription(std::unique_ptr desc) { + auto observer = make_ref_counted(); + std::string sdp; + EXPECT_TRUE(desc->ToString(&sdp)); + RTC_LOG(LS_INFO) << debug_name_ + << ": SetRemoteDescription SDP: type=" << desc->type() + << " contents=\n" + << sdp; + pc()->SetRemoteDescription(std::move(desc), observer); // desc.release()); + RemoveUnusedVideoRenderers(); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue()), + IsRtcOk()); + auto err = observer->error(); + if (!err.ok()) { + RTC_LOG(LS_WARNING) << debug_name_ + << ": SetRemoteDescription error: " << err.message(); } - // Increment trailing counters - audio_packets_stat_ = *rtp_stats->packets_received; - audio_delay_stat_ = *rtp_stats->relative_packet_arrival_delay; - audio_samples_stat_ = *rtp_stats->total_samples_received; - audio_concealed_stat_ = *rtp_stats->concealed_samples; + return observer->error().ok(); + } + + void NegotiateCorruptionDetectionHeader() { + for (const auto& transceiver : pc()->GetTransceivers()) { + if (transceiver->media_type() != webrtc::MediaType::VIDEO) { + continue; + } + auto extensions = transceiver->GetHeaderExtensionsToNegotiate(); + for (auto& extension : extensions) { + if (extension.uri == RtpExtension::kCorruptionDetectionUri) { + extension.direction = RtpTransceiverDirection::kSendRecv; + } + } + transceiver->SetHeaderExtensionsToNegotiate(extensions); + } + } + + uint32_t GetCorruptionScoreCount() { + scoped_refptr report = NewGetStats(); + auto inbound_stream_stats = + report->GetStatsOfType(); + for (const auto& stat : inbound_stream_stats) { + if (*stat->kind == "video") { + return stat->corruption_measurements.value_or(0); + } + } + return 0; + } + + void set_connection_change_callback( + std::function func) { + connection_change_callback_ = std::move(func); + } + + std::optional tls_version() { + return network_thread_->BlockingCall([&] { + return pc() + ->GetSctpTransport() + ->dtls_transport() + ->Information() + .tls_version(); + }); + } + + std::optional dtls_transport_role() { + return network_thread_->BlockingCall([&] { + return pc()->GetSctpTransport()->dtls_transport()->Information().role(); + }); + } + + // Setting the local description and sending the SDP message over the fake + // signaling channel are combined into the same method because the SDP + // message needs to be sent as soon as SetLocalDescription finishes, without + // waiting for the observer to be called. This ensures that ICE candidates + // don't outrace the description. + bool SetLocalDescriptionAndSendSdpMessage( + std::unique_ptr desc) { + auto observer = make_ref_counted(); + RTC_LOG(LS_INFO) << debug_name_ << ": SetLocalDescriptionAndSendSdpMessage"; + SdpType type = desc->GetType(); + std::string sdp; + EXPECT_TRUE(desc->ToString(&sdp)); + RTC_LOG(LS_INFO) << debug_name_ << ": local SDP type=" << desc->type() + << " contents=\n" + << sdp; + pc()->SetLocalDescription(observer.get(), desc.release()); + RemoveUnusedVideoRenderers(); + // As mentioned above, we need to send the message immediately after + // SetLocalDescription. + SendSdpMessage(type, sdp); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue()), + IsRtcOk()); + return true; } private: @@ -736,97 +787,19 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, bool Init(const PeerConnectionFactory::Options* options, const PeerConnectionInterface::RTCConfiguration* config, - webrtc::PeerConnectionDependencies dependencies, - rtc::SocketServer* socket_server, - rtc::Thread* network_thread, - rtc::Thread* worker_thread, - std::unique_ptr event_log_factory, + PeerConnectionDependencies dependencies, + SocketServer* socket_server, + Thread* network_thread, + Thread* worker_thread, + std::unique_ptr field_trials, + std::unique_ptr event_log_factory, bool reset_encoder_factory, bool reset_decoder_factory, - bool create_media_engine) { - // There's an error in this test code if Init ends up being called twice. - RTC_DCHECK(!peer_connection_); - RTC_DCHECK(!peer_connection_factory_); - - fake_network_manager_.reset(new rtc::FakeNetworkManager()); - fake_network_manager_->AddInterface(kDefaultLocalAddress); - - socket_factory_.reset(new rtc::BasicPacketSocketFactory(socket_server)); - - std::unique_ptr port_allocator( - new cricket::BasicPortAllocator(fake_network_manager_.get(), - socket_factory_.get())); - port_allocator_ = port_allocator.get(); - fake_audio_capture_module_ = FakeAudioCaptureModule::Create(); - if (!fake_audio_capture_module_) { - return false; - } - rtc::Thread* const signaling_thread = rtc::Thread::Current(); - - webrtc::PeerConnectionFactoryDependencies pc_factory_dependencies; - pc_factory_dependencies.network_thread = network_thread; - pc_factory_dependencies.worker_thread = worker_thread; - pc_factory_dependencies.signaling_thread = signaling_thread; - pc_factory_dependencies.task_queue_factory = - webrtc::CreateDefaultTaskQueueFactory(); - pc_factory_dependencies.trials = std::make_unique(); - pc_factory_dependencies.metronome = - std::make_unique(TimeDelta::Millis(8)); - cricket::MediaEngineDependencies media_deps; - media_deps.task_queue_factory = - pc_factory_dependencies.task_queue_factory.get(); - media_deps.adm = fake_audio_capture_module_; - webrtc::SetMediaEngineDefaults(&media_deps); - - if (reset_encoder_factory) { - media_deps.video_encoder_factory.reset(); - } - if (reset_decoder_factory) { - media_deps.video_decoder_factory.reset(); - } - - if (!media_deps.audio_processing) { - // If the standard Creation method for APM returns a null pointer, instead - // use the builder for testing to create an APM object. - media_deps.audio_processing = AudioProcessingBuilderForTesting().Create(); - } - - media_deps.trials = pc_factory_dependencies.trials.get(); - - if (create_media_engine) { - pc_factory_dependencies.media_engine = - cricket::CreateMediaEngine(std::move(media_deps)); - } - pc_factory_dependencies.call_factory = webrtc::CreateCallFactory(); - if (event_log_factory) { - event_log_factory_ = event_log_factory.get(); - pc_factory_dependencies.event_log_factory = std::move(event_log_factory); - } else { - pc_factory_dependencies.event_log_factory = - std::make_unique( - pc_factory_dependencies.task_queue_factory.get()); - } - peer_connection_factory_ = webrtc::CreateModularPeerConnectionFactory( - std::move(pc_factory_dependencies)); - - if (!peer_connection_factory_) { - return false; - } - if (options) { - peer_connection_factory_->SetOptions(*options); - } - if (config) { - sdp_semantics_ = config->sdp_semantics; - } - - dependencies.allocator = std::move(port_allocator); - peer_connection_ = CreatePeerConnection(config, std::move(dependencies)); - return peer_connection_.get() != nullptr; - } + bool create_media_engine); - rtc::scoped_refptr CreatePeerConnection( + scoped_refptr CreatePeerConnection( const PeerConnectionInterface::RTCConfiguration* config, - webrtc::PeerConnectionDependencies dependencies) { + PeerConnectionDependencies dependencies) { PeerConnectionInterface::RTCConfiguration modified_config; modified_config.sdp_semantics = sdp_semantics_; // If `config` is null, this will result in a default configuration being @@ -848,32 +821,26 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, : nullptr; } - void set_signaling_message_receiver( - SignalingMessageReceiver* signaling_message_receiver) { - signaling_message_receiver_ = signaling_message_receiver; - } - void set_signaling_delay_ms(int delay_ms) { signaling_delay_ms_ = delay_ms; } void set_signal_ice_candidates(bool signal) { signal_ice_candidates_ = signal; } - rtc::scoped_refptr CreateLocalVideoTrackInternal( - webrtc::FakePeriodicVideoSource::Config config) { + scoped_refptr CreateLocalVideoTrackInternal( + FakePeriodicVideoSource::Config config) { // Set max frame rate to 10fps to reduce the risk of test flakiness. // TODO(deadbeef): Do something more robust. config.frame_interval_ms = 100; video_track_sources_.emplace_back( - rtc::make_ref_counted( - config, false /* remote */)); - rtc::scoped_refptr track = + make_ref_counted(config, + false /* remote */)); + scoped_refptr track = peer_connection_factory_->CreateVideoTrack(video_track_sources_.back(), - rtc::CreateRandomUuid()); + CreateRandomUuid()); if (!local_video_renderer_) { - local_video_renderer_.reset( - new webrtc::FakeVideoTrackRenderer(track.get())); + local_video_renderer_.reset(new FakeVideoTrackRenderer(track.get())); } return track; } @@ -881,9 +848,9 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, void HandleIncomingOffer(const std::string& msg) { RTC_LOG(LS_INFO) << debug_name_ << ": HandleIncomingOffer"; std::unique_ptr desc = - webrtc::CreateSessionDescription(SdpType::kOffer, msg); + CreateSessionDescription(SdpType::kOffer, msg); if (received_sdp_munger_) { - received_sdp_munger_(desc->description()); + received_sdp_munger_(desc); } EXPECT_TRUE(SetRemoteDescription(std::move(desc))); @@ -898,14 +865,19 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, EXPECT_TRUE(SetLocalDescriptionAndSendSdpMessage(std::move(answer))); } - void HandleIncomingAnswer(const std::string& msg) { - RTC_LOG(LS_INFO) << debug_name_ << ": HandleIncomingAnswer"; + void HandleIncomingAnswer(SdpType type, const std::string& msg) { + RTC_LOG(LS_INFO) << debug_name_ << ": HandleIncomingAnswer of type " + << SdpTypeToString(type); std::unique_ptr desc = - webrtc::CreateSessionDescription(SdpType::kAnswer, msg); + CreateSessionDescription(type, msg); if (received_sdp_munger_) { - received_sdp_munger_(desc->description()); + received_sdp_munger_(desc); + if (!desc) { + // Answer was "taken" by munger...so that it can be applied later ? + RTC_LOG(LS_INFO) << debug_name_ << ": answer NOT applied"; + return; + } } - EXPECT_TRUE(SetRemoteDescription(std::move(desc))); // Set the RtpReceiverObserver after receivers are created. ResetRtpReceiverObservers(); @@ -913,55 +885,26 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, // Returns null on failure. std::unique_ptr CreateAnswer() { - auto observer = - rtc::make_ref_counted(); + auto observer = make_ref_counted(); pc()->CreateAnswer(observer.get(), offer_answer_options_); return WaitForDescriptionFromObserver(observer.get()); } std::unique_ptr WaitForDescriptionFromObserver( MockCreateSessionDescriptionObserver* observer) { - EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return observer->called(); }, ::testing::IsTrue()), + IsRtcOk()); if (!observer->result()) { return nullptr; } auto description = observer->MoveDescription(); if (generated_sdp_munger_) { - generated_sdp_munger_(description->description()); + generated_sdp_munger_(description); } return description; } - // Setting the local description and sending the SDP message over the fake - // signaling channel are combined into the same method because the SDP - // message needs to be sent as soon as SetLocalDescription finishes, without - // waiting for the observer to be called. This ensures that ICE candidates - // don't outrace the description. - bool SetLocalDescriptionAndSendSdpMessage( - std::unique_ptr desc) { - auto observer = rtc::make_ref_counted(); - RTC_LOG(LS_INFO) << debug_name_ << ": SetLocalDescriptionAndSendSdpMessage"; - SdpType type = desc->GetType(); - std::string sdp; - EXPECT_TRUE(desc->ToString(&sdp)); - RTC_LOG(LS_INFO) << debug_name_ << ": local SDP contents=\n" << sdp; - pc()->SetLocalDescription(observer.get(), desc.release()); - RemoveUnusedVideoRenderers(); - // As mentioned above, we need to send the message immediately after - // SetLocalDescription. - SendSdpMessage(type, sdp); - EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout); - return true; - } - - bool SetRemoteDescription(std::unique_ptr desc) { - auto observer = rtc::make_ref_counted(); - RTC_LOG(LS_INFO) << debug_name_ << ": SetRemoteDescription"; - pc()->SetRemoteDescription(observer.get(), desc.release()); - RemoveUnusedVideoRenderers(); - EXPECT_TRUE_WAIT(observer->called(), kDefaultTimeout); - return observer->result(); - } // This is a work around to remove unused fake_video_renderers from // transceivers that have either stopped or are no longer receiving. @@ -975,7 +918,7 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, // Note - we don't check for direction here. This function is called // before direction is set, and in that case, we should not remove // the renderer. - if (transceiver->receiver()->media_type() == cricket::MEDIA_TYPE_VIDEO) { + if (transceiver->receiver()->media_type() == webrtc::MediaType::VIDEO) { active_renderers.insert(transceiver->receiver()->track()->id()); } } @@ -996,7 +939,7 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, if (signaling_delay_ms_ == 0) { RelaySdpMessageIfReceiverExists(type, msg); } else { - rtc::Thread::Current()->PostDelayedTask( + Thread::Current()->PostDelayedTask( SafeTask(task_safety_.flag(), [this, type, msg] { RelaySdpMessageIfReceiverExists(type, msg); @@ -1019,7 +962,7 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, if (signaling_delay_ms_ == 0) { RelayIceMessageIfReceiverExists(sdp_mid, sdp_mline_index, msg); } else { - rtc::Thread::Current()->PostDelayedTask( + Thread::Current()->PostDelayedTask( SafeTask(task_safety_.flag(), [this, sdp_mid, sdp_mline_index, msg] { RelayIceMessageIfReceiverExists(sdp_mid, sdp_mline_index, @@ -1039,11 +982,17 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, } // SignalingMessageReceiver callbacks. + public: + void set_signaling_message_receiver( + SignalingMessageReceiver* signaling_message_receiver) { + signaling_message_receiver_ = signaling_message_receiver; + } + void ReceiveSdpMessage(SdpType type, const std::string& msg) override { if (type == SdpType::kOffer) { HandleIncomingOffer(msg); } else { - HandleIncomingAnswer(msg); + HandleIncomingAnswer(type, msg); } } @@ -1051,25 +1000,28 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, int sdp_mline_index, const std::string& msg) override { RTC_LOG(LS_INFO) << debug_name_ << ": ReceiveIceMessage"; - absl::optional result; - pc()->AddIceCandidate(absl::WrapUnique(webrtc::CreateIceCandidate( + std::optional result; + pc()->AddIceCandidate(absl::WrapUnique(CreateIceCandidate( sdp_mid, sdp_mline_index, msg, nullptr)), [&result](RTCError r) { result = r; }); - EXPECT_TRUE_WAIT(result.has_value(), kDefaultTimeout); + EXPECT_THAT( + WaitUntil([&] { return result.has_value(); }, ::testing::IsTrue()), + IsRtcOk()); EXPECT_TRUE(result.value().ok()); } + private: // PeerConnectionObserver callbacks. void OnSignalingChange( - webrtc::PeerConnectionInterface::SignalingState new_state) override { + PeerConnectionInterface::SignalingState new_state) override { EXPECT_EQ(pc()->signaling_state(), new_state); peer_connection_signaling_state_history_.push_back(new_state); } - void OnAddTrack(rtc::scoped_refptr receiver, - const std::vector>& + void OnAddTrack(scoped_refptr receiver, + const std::vector>& streams) override { - if (receiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { - rtc::scoped_refptr video_track( + if (receiver->media_type() == webrtc::MediaType::VIDEO) { + scoped_refptr video_track( static_cast(receiver->track().get())); ASSERT_TRUE(fake_video_renderers_.find(video_track->id()) == fake_video_renderers_.end()); @@ -1077,9 +1029,8 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, std::make_unique(video_track.get()); } } - void OnRemoveTrack( - rtc::scoped_refptr receiver) override { - if (receiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { + void OnRemoveTrack(scoped_refptr receiver) override { + if (receiver->media_type() == webrtc::MediaType::VIDEO) { auto it = fake_video_renderers_.find(receiver->track()->id()); if (it != fake_video_renderers_.end()) { fake_video_renderers_.erase(it); @@ -1090,44 +1041,48 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, } void OnRenegotiationNeeded() override {} void OnIceConnectionChange( - webrtc::PeerConnectionInterface::IceConnectionState new_state) override { + PeerConnectionInterface::IceConnectionState new_state) override { EXPECT_EQ(pc()->ice_connection_state(), new_state); ice_connection_state_history_.push_back(new_state); } void OnStandardizedIceConnectionChange( - webrtc::PeerConnectionInterface::IceConnectionState new_state) override { + PeerConnectionInterface::IceConnectionState new_state) override { standardized_ice_connection_state_history_.push_back(new_state); } + void OnConnectionChange( - webrtc::PeerConnectionInterface::PeerConnectionState new_state) override { + PeerConnectionInterface::PeerConnectionState new_state) override { peer_connection_state_history_.push_back(new_state); + if (connection_change_callback_) { + connection_change_callback_(new_state); + } } void OnIceGatheringChange( - webrtc::PeerConnectionInterface::IceGatheringState new_state) override { + PeerConnectionInterface::IceGatheringState new_state) override { EXPECT_EQ(pc()->ice_gathering_state(), new_state); ice_gathering_state_history_.push_back(new_state); } void OnIceSelectedCandidatePairChanged( - const cricket::CandidatePairChangeEvent& event) { + const CandidatePairChangeEvent& event) { ice_candidate_pair_change_history_.push_back(event); } - void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override { + void OnIceCandidate(const IceCandidateInterface* candidate) override { RTC_LOG(LS_INFO) << debug_name_ << ": OnIceCandidate"; if (remote_async_dns_resolver_) { const auto& local_candidate = candidate->candidate(); if (local_candidate.address().IsUnresolvedIP()) { - RTC_DCHECK(local_candidate.type() == cricket::LOCAL_PORT_TYPE); + RTC_DCHECK(local_candidate.is_local()); const auto resolved_ip = mdns_responder_->GetMappedAddressForName( local_candidate.address().hostname()); RTC_DCHECK(!resolved_ip.IsNil()); remote_async_dns_resolved_addr_ = local_candidate.address(); remote_async_dns_resolved_addr_.SetResolvedIP(resolved_ip); EXPECT_CALL(*remote_async_dns_resolver_, Start(_, _)) - .WillOnce([](const rtc::SocketAddress& addr, + .WillOnce([](const SocketAddress& addr, absl::AnyInvocable callback) { callback(); }); EXPECT_CALL(*remote_async_dns_resolver_, result()) .WillOnce(ReturnRef(remote_async_dns_resolver_result_)); @@ -1137,6 +1092,9 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, } } + // Check if we expected to have a candidate. + EXPECT_GT(candidates_expected_, 1); + candidates_expected_--; std::string ice_sdp; EXPECT_TRUE(candidate->ToString(&ice_sdp)); if (signaling_message_receiver_ == nullptr || !signal_ice_candidates_) { @@ -1144,74 +1102,87 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, return; } SendIceMessage(candidate->sdp_mid(), candidate->sdp_mline_index(), ice_sdp); - last_candidate_gathered_ = candidate->candidate(); + last_gathered_ice_candidate_ = + CreateIceCandidate(candidate->sdp_mid(), candidate->sdp_mline_index(), + candidate->candidate()); } + void OnIceCandidateError(const std::string& address, int port, const std::string& url, int error_code, const std::string& error_text) override { - error_event_ = cricket::IceCandidateErrorEvent(address, port, url, - error_code, error_text); + error_event_ = + IceCandidateErrorEvent(address, port, url, error_code, error_text); } void OnDataChannel( - rtc::scoped_refptr data_channel) override { + scoped_refptr data_channel) override { RTC_LOG(LS_INFO) << debug_name_ << ": OnDataChannel"; data_channels_.push_back(data_channel); data_observers_.push_back( std::make_unique(data_channel.get())); } + bool IdExists(const RtpHeaderExtensions& extensions, int id) { + for (const auto& extension : extensions) { + if (extension.id == id) { + return true; + } + } + return false; + } std::string debug_name_; - std::unique_ptr fake_network_manager_; - std::unique_ptr socket_factory_; + // Network manager is owned by the `peer_connection_factory_`. + FakeNetworkManager* fake_network_manager_ = nullptr; + Thread* network_thread_; + // Reference to the mDNS responder owned by `fake_network_manager_` after set. - webrtc::FakeMdnsResponder* mdns_responder_ = nullptr; + FakeMdnsResponder* mdns_responder_ = nullptr; - rtc::scoped_refptr peer_connection_; - rtc::scoped_refptr - peer_connection_factory_; + scoped_refptr peer_connection_; + scoped_refptr peer_connection_factory_; - cricket::PortAllocator* port_allocator_; // Needed to keep track of number of frames sent. - rtc::scoped_refptr fake_audio_capture_module_; + scoped_refptr fake_audio_capture_module_; // Needed to keep track of number of frames received. - std::map> + std::map> fake_video_renderers_; // Needed to ensure frames aren't received for removed tracks. - std::vector> + std::vector> removed_fake_video_renderers_; // For remote peer communication. SignalingMessageReceiver* signaling_message_receiver_ = nullptr; int signaling_delay_ms_ = 0; bool signal_ice_candidates_ = true; - cricket::Candidate last_candidate_gathered_; - cricket::IceCandidateErrorEvent error_event_; + std::unique_ptr last_gathered_ice_candidate_; + IceCandidateErrorEvent error_event_; // Store references to the video sources we've created, so that we can stop // them, if required. - std::vector> - video_track_sources_; + std::vector> video_track_sources_; // `local_video_renderer_` attached to the first created local video track. - std::unique_ptr local_video_renderer_; + std::unique_ptr local_video_renderer_; SdpSemantics sdp_semantics_; PeerConnectionInterface::RTCOfferAnswerOptions offer_answer_options_; - std::function received_sdp_munger_; - std::function generated_sdp_munger_; + std::function&)> + received_sdp_munger_; + std::function&)> + generated_sdp_munger_; std::function remote_offer_handler_; MockAsyncDnsResolver* remote_async_dns_resolver_ = nullptr; // Result variables for the mock DNS resolver NiceMock remote_async_dns_resolver_result_; - rtc::SocketAddress remote_async_dns_resolved_addr_; + SocketAddress remote_async_dns_resolved_addr_; // All data channels either created or observed on this peerconnection - std::vector> data_channels_; + std::vector> data_channels_; std::vector> data_observers_; std::vector> rtp_receiver_observers_; + std::vector> rtp_sender_observers_; std::vector ice_connection_state_history_; @@ -1221,11 +1192,13 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, peer_connection_state_history_; std::vector ice_gathering_state_history_; - std::vector - ice_candidate_pair_change_history_; + std::vector ice_candidate_pair_change_history_; std::vector peer_connection_signaling_state_history_; - webrtc::FakeRtcEventLogFactory* event_log_factory_; + FakeRtcEventLogFactory* event_log_factory_; + + // Number of ICE candidates expected. The default is no limit. + int candidates_expected_ = std::numeric_limits::max(); // Variables for tracking delay stats on an audio track int audio_packets_stat_ = 0; @@ -1234,12 +1207,15 @@ class PeerConnectionIntegrationWrapper : public webrtc::PeerConnectionObserver, uint64_t audio_concealed_stat_ = 0; std::string rtp_stats_id_; + std::function + connection_change_callback_ = nullptr; + ScopedTaskSafety task_safety_; friend class PeerConnectionIntegrationBaseTest; }; -class MockRtcEventLogOutput : public webrtc::RtcEventLogOutput { +class MockRtcEventLogOutput : public RtcEventLogOutput { public: virtual ~MockRtcEventLogOutput() = default; MOCK_METHOD(bool, IsActive, (), (const, override)); @@ -1351,29 +1327,29 @@ class MediaExpectations { int callee_video_frames_expected_ = 0; }; -class MockIceTransport : public webrtc::IceTransportInterface { +class MockIceTransport : public IceTransportInterface { public: MockIceTransport(const std::string& name, int component) - : internal_(std::make_unique( - name, - component, - nullptr /* network_thread */)) {} + : internal_( + std::make_unique(name, + component, + nullptr /* network_thread */)) {} ~MockIceTransport() = default; - cricket::IceTransportInternal* internal() { return internal_.get(); } + IceTransportInternal* internal() { return internal_.get(); } private: - std::unique_ptr internal_; + std::unique_ptr internal_; }; class MockIceTransportFactory : public IceTransportFactory { public: ~MockIceTransportFactory() override = default; - rtc::scoped_refptr CreateIceTransport( + scoped_refptr CreateIceTransport( const std::string& transport_name, int component, IceTransportInit init) { RecordIceTransportCreated(); - return rtc::make_ref_counted(transport_name, component); + return make_ref_counted(transport_name, component); } MOCK_METHOD(void, RecordIceTransportCreated, ()); }; @@ -1384,22 +1360,20 @@ class MockIceTransportFactory : public IceTransportFactory { // of everything else (including "PeerConnectionFactory"s). class PeerConnectionIntegrationBaseTest : public ::testing::Test { public: - PeerConnectionIntegrationBaseTest( - SdpSemantics sdp_semantics, - absl::optional field_trials = absl::nullopt) + static constexpr char kCallerName[] = "Caller"; + static constexpr char kCalleeName[] = "Callee"; + + explicit PeerConnectionIntegrationBaseTest(SdpSemantics sdp_semantics) : sdp_semantics_(sdp_semantics), - ss_(new rtc::VirtualSocketServer()), - fss_(new rtc::FirewallSocketServer(ss_.get())), - network_thread_(new rtc::Thread(fss_.get())), - worker_thread_(rtc::Thread::Create()), - // TODO(bugs.webrtc.org/10335): Pass optional ScopedKeyValueConfig. - field_trials_(new test::ScopedKeyValueConfig( - field_trials.has_value() ? *field_trials : "")) { + ss_(new VirtualSocketServer()), + fss_(new FirewallSocketServer(ss_.get())), + network_thread_(new Thread(fss_.get())), + worker_thread_(Thread::Create()) { network_thread_->SetName("PCNetworkThread", this); worker_thread_->SetName("PCWorkerThread", this); RTC_CHECK(network_thread_->Start()); RTC_CHECK(worker_thread_->Start()); - webrtc::metrics::Reset(); + metrics::Reset(); } ~PeerConnectionIntegrationBaseTest() { @@ -1436,13 +1410,30 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { // are connected. This is an important distinction. Once we have separate // ICE and DTLS state, this check needs to use the DTLS state. return (callee()->ice_connection_state() == - webrtc::PeerConnectionInterface::kIceConnectionConnected || + PeerConnectionInterface::kIceConnectionConnected || callee()->ice_connection_state() == - webrtc::PeerConnectionInterface::kIceConnectionCompleted) && + PeerConnectionInterface::kIceConnectionCompleted) && (caller()->ice_connection_state() == - webrtc::PeerConnectionInterface::kIceConnectionConnected || + PeerConnectionInterface::kIceConnectionConnected || caller()->ice_connection_state() == - webrtc::PeerConnectionInterface::kIceConnectionCompleted); + PeerConnectionInterface::kIceConnectionCompleted); + } + + // Sets field trials to pass to created PeerConnectionWrapper. + // Must be called before PeerConnectionWrappers are created. + void SetFieldTrials(absl::string_view field_trials) { + RTC_CHECK(caller_ == nullptr); + RTC_CHECK(callee_ == nullptr); + field_trials_ = std::string(field_trials); + } + + // Sets field trials to pass to created PeerConnectionWrapper key:ed on + // debug_name. Must be called before PeerConnectionWrappers are created. + void SetFieldTrials(absl::string_view debug_name, + absl::string_view field_trials) { + RTC_CHECK(caller_ == nullptr); + RTC_CHECK(callee_ == nullptr); + field_trials_overrides_[std::string(debug_name)] = field_trials; } // When `event_log_factory` is null, the default implementation of the event @@ -1451,8 +1442,8 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { const std::string& debug_name, const PeerConnectionFactory::Options* options, const RTCConfiguration* config, - webrtc::PeerConnectionDependencies dependencies, - std::unique_ptr event_log_factory, + PeerConnectionDependencies dependencies, + std::unique_ptr event_log_factory, bool reset_encoder_factory, bool reset_decoder_factory, bool create_media_engine = true) { @@ -1468,8 +1459,14 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { std::unique_ptr client( new PeerConnectionIntegrationWrapper(debug_name)); + std::string field_trials = field_trials_; + auto it = field_trials_overrides_.find(debug_name); + if (it != field_trials_overrides_.end()) { + field_trials = it->second; + } if (!client->Init(options, &modified_config, std::move(dependencies), fss_.get(), network_thread_.get(), worker_thread_.get(), + FieldTrials::CreateNoGlobal(field_trials), std::move(event_log_factory), reset_encoder_factory, reset_decoder_factory, create_media_engine)) { return nullptr; @@ -1482,10 +1479,10 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { const std::string& debug_name, const PeerConnectionFactory::Options* options, const RTCConfiguration* config, - webrtc::PeerConnectionDependencies dependencies) { + PeerConnectionDependencies dependencies) { return CreatePeerConnectionWrapper( debug_name, options, config, std::move(dependencies), - std::make_unique(), + std::make_unique(), /*reset_encoder_factory=*/false, /*reset_decoder_factory=*/false); } @@ -1506,49 +1503,50 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { // callee PeerConnections. SdpSemantics original_semantics = sdp_semantics_; sdp_semantics_ = caller_semantics; - caller_ = CreatePeerConnectionWrapper( - "Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr), - nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); + caller_ = CreatePeerConnectionWrapper(kCallerName, nullptr, nullptr, + PeerConnectionDependencies(nullptr), + nullptr, + /*reset_encoder_factory=*/false, + /*reset_decoder_factory=*/false); sdp_semantics_ = callee_semantics; - callee_ = CreatePeerConnectionWrapper( - "Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr), - nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); + callee_ = CreatePeerConnectionWrapper(kCalleeName, nullptr, nullptr, + PeerConnectionDependencies(nullptr), + nullptr, + /*reset_encoder_factory=*/false, + /*reset_decoder_factory=*/false); sdp_semantics_ = original_semantics; return caller_ && callee_; } bool CreatePeerConnectionWrappersWithConfig( const PeerConnectionInterface::RTCConfiguration& caller_config, - const PeerConnectionInterface::RTCConfiguration& callee_config) { + const PeerConnectionInterface::RTCConfiguration& callee_config, + bool create_media_engine = true) { caller_ = CreatePeerConnectionWrapper( - "Caller", nullptr, &caller_config, - webrtc::PeerConnectionDependencies(nullptr), nullptr, + kCallerName, nullptr, &caller_config, + PeerConnectionDependencies(nullptr), nullptr, /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); + /*reset_decoder_factory=*/false, create_media_engine); callee_ = CreatePeerConnectionWrapper( - "Callee", nullptr, &callee_config, - webrtc::PeerConnectionDependencies(nullptr), nullptr, + kCalleeName, nullptr, &callee_config, + PeerConnectionDependencies(nullptr), nullptr, /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); + /*reset_decoder_factory=*/false, create_media_engine); return caller_ && callee_; } bool CreatePeerConnectionWrappersWithConfigAndDeps( const PeerConnectionInterface::RTCConfiguration& caller_config, - webrtc::PeerConnectionDependencies caller_dependencies, + PeerConnectionDependencies caller_dependencies, const PeerConnectionInterface::RTCConfiguration& callee_config, - webrtc::PeerConnectionDependencies callee_dependencies) { + PeerConnectionDependencies callee_dependencies) { caller_ = - CreatePeerConnectionWrapper("Caller", nullptr, &caller_config, + CreatePeerConnectionWrapper(kCallerName, nullptr, &caller_config, std::move(caller_dependencies), nullptr, /*reset_encoder_factory=*/false, /*reset_decoder_factory=*/false); callee_ = - CreatePeerConnectionWrapper("Callee", nullptr, &callee_config, + CreatePeerConnectionWrapper(kCalleeName, nullptr, &callee_config, std::move(callee_dependencies), nullptr, /*reset_encoder_factory=*/false, /*reset_decoder_factory=*/false); @@ -1558,27 +1556,27 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { bool CreatePeerConnectionWrappersWithOptions( const PeerConnectionFactory::Options& caller_options, const PeerConnectionFactory::Options& callee_options) { - caller_ = CreatePeerConnectionWrapper( - "Caller", &caller_options, nullptr, - webrtc::PeerConnectionDependencies(nullptr), nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); - callee_ = CreatePeerConnectionWrapper( - "Callee", &callee_options, nullptr, - webrtc::PeerConnectionDependencies(nullptr), nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false); + caller_ = CreatePeerConnectionWrapper(kCallerName, &caller_options, nullptr, + PeerConnectionDependencies(nullptr), + nullptr, + /*reset_encoder_factory=*/false, + /*reset_decoder_factory=*/false); + callee_ = CreatePeerConnectionWrapper(kCalleeName, &callee_options, nullptr, + PeerConnectionDependencies(nullptr), + nullptr, + /*reset_encoder_factory=*/false, + /*reset_decoder_factory=*/false); return caller_ && callee_; } bool CreatePeerConnectionWrappersWithFakeRtcEventLog() { PeerConnectionInterface::RTCConfiguration default_config; caller_ = CreatePeerConnectionWrapperWithFakeRtcEventLog( - "Caller", nullptr, &default_config, - webrtc::PeerConnectionDependencies(nullptr)); + kCallerName, nullptr, &default_config, + PeerConnectionDependencies(nullptr)); callee_ = CreatePeerConnectionWrapperWithFakeRtcEventLog( - "Callee", nullptr, &default_config, - webrtc::PeerConnectionDependencies(nullptr)); + kCalleeName, nullptr, &default_config, + PeerConnectionDependencies(nullptr)); return caller_ && callee_; } @@ -1588,7 +1586,7 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { new FakeRTCCertificateGenerator()); cert_generator->use_alternate_key(); - webrtc::PeerConnectionDependencies dependencies(nullptr); + PeerConnectionDependencies dependencies(nullptr); dependencies.cert_generator = std::move(cert_generator); return CreatePeerConnectionWrapper("New Peer", nullptr, nullptr, std::move(dependencies), nullptr, @@ -1598,12 +1596,12 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { bool CreateOneDirectionalPeerConnectionWrappers(bool caller_to_callee) { caller_ = CreatePeerConnectionWrapper( - "Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr), + kCallerName, nullptr, nullptr, PeerConnectionDependencies(nullptr), nullptr, /*reset_encoder_factory=*/!caller_to_callee, /*reset_decoder_factory=*/caller_to_callee); callee_ = CreatePeerConnectionWrapper( - "Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr), + kCalleeName, nullptr, nullptr, PeerConnectionDependencies(nullptr), nullptr, /*reset_encoder_factory=*/caller_to_callee, /*reset_decoder_factory=*/!caller_to_callee); @@ -1611,31 +1609,31 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { } bool CreatePeerConnectionWrappersWithoutMediaEngine() { - caller_ = CreatePeerConnectionWrapper( - "Caller", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr), - nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false, - /*create_media_engine=*/false); - callee_ = CreatePeerConnectionWrapper( - "Callee", nullptr, nullptr, webrtc::PeerConnectionDependencies(nullptr), - nullptr, - /*reset_encoder_factory=*/false, - /*reset_decoder_factory=*/false, - /*create_media_engine=*/false); + caller_ = CreatePeerConnectionWrapper(kCallerName, nullptr, nullptr, + PeerConnectionDependencies(nullptr), + nullptr, + /*reset_encoder_factory=*/false, + /*reset_decoder_factory=*/false, + /*create_media_engine=*/false); + callee_ = CreatePeerConnectionWrapper(kCalleeName, nullptr, nullptr, + PeerConnectionDependencies(nullptr), + nullptr, + /*reset_encoder_factory=*/false, + /*reset_decoder_factory=*/false, + /*create_media_engine=*/false); return caller_ && callee_; } - cricket::TestTurnServer* CreateTurnServer( - rtc::SocketAddress internal_address, - rtc::SocketAddress external_address, - cricket::ProtocolType type = cricket::ProtocolType::PROTO_UDP, + TestTurnServer* CreateTurnServer( + SocketAddress internal_address, + SocketAddress external_address, + ProtocolType type = ProtocolType::PROTO_UDP, const std::string& common_name = "test turn server") { - rtc::Thread* thread = network_thread(); - rtc::SocketFactory* socket_factory = fss_.get(); - std::unique_ptr turn_server; + Thread* thread = network_thread(); + SocketFactory* socket_factory = fss_.get(); + std::unique_ptr turn_server; SendTask(network_thread(), [&] { - turn_server = std::make_unique( + turn_server = std::make_unique( thread, socket_factory, internal_address, external_address, type, /*ignore_bad_certs=*/true, common_name); }); @@ -1644,11 +1642,10 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { return turn_servers_.back().get(); } - cricket::TestTurnCustomizer* CreateTurnCustomizer() { - std::unique_ptr turn_customizer; - SendTask(network_thread(), [&] { - turn_customizer = std::make_unique(); - }); + TestTurnCustomizer* CreateTurnCustomizer() { + std::unique_ptr turn_customizer; + SendTask(network_thread(), + [&] { turn_customizer = std::make_unique(); }); turn_customizers_.push_back(std::move(turn_customizer)); // Interactions with the turn customizer should be done on the network // thread. @@ -1658,7 +1655,7 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { // Checks that the function counters for a TestTurnCustomizer are greater than // 0. void ExpectTurnCustomizerCountersIncremented( - cricket::TestTurnCustomizer* turn_customizer) { + TestTurnCustomizer* turn_customizer) { SendTask(network_thread(), [turn_customizer] { EXPECT_GT(turn_customizer->allow_channel_data_cnt_, 0u); EXPECT_GT(turn_customizer->modify_cnt_, 0u); @@ -1692,7 +1689,7 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { // Messages may get lost on the unreliable DataChannel, so we send multiple // times to avoid test flakiness. - void SendRtpDataWithRetries(webrtc::DataChannelInterface* dc, + void SendRtpDataWithRetries(DataChannelInterface* dc, const std::string& data, int retries) { for (int i = 0; i < retries; ++i) { @@ -1700,9 +1697,9 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { } } - rtc::Thread* network_thread() { return network_thread_.get(); } + Thread* network_thread() { return network_thread_.get(); } - rtc::VirtualSocketServer* virtual_socket_server() { return ss_.get(); } + VirtualSocketServer* virtual_socket_server() { return ss_.get(); } PeerConnectionIntegrationWrapper* caller() { return caller_.get(); } @@ -1740,16 +1737,7 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { return old; } - void SetPortAllocatorFlags(uint32_t caller_flags, uint32_t callee_flags) { - SendTask(network_thread(), [this, caller_flags] { - caller()->port_allocator()->set_flags(caller_flags); - }); - SendTask(network_thread(), [this, callee_flags] { - callee()->port_allocator()->set_flags(callee_flags); - }); - } - - rtc::FirewallSocketServer* firewall() const { return fss_.get(); } + FirewallSocketServer* firewall() const { return fss_.get(); } // Expects the provided number of new frames to be received within // kMaxWaitForFramesMs. The new expected frames are specified in @@ -1789,15 +1777,19 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { } // Wait for the expected frames. - EXPECT_TRUE_WAIT(caller()->audio_frames_received() >= - total_caller_audio_frames_expected && - caller()->min_video_frames_received_per_track() >= - total_caller_video_frames_expected && - callee()->audio_frames_received() >= - total_callee_audio_frames_expected && - callee()->min_video_frames_received_per_track() >= - total_callee_video_frames_expected, - kMaxWaitForFramesMs); + EXPECT_THAT(WaitUntil( + [&] { + return caller()->audio_frames_received() >= + total_caller_audio_frames_expected && + caller()->min_video_frames_received_per_track() >= + total_caller_video_frames_expected && + callee()->audio_frames_received() >= + total_callee_audio_frames_expected && + callee()->min_video_frames_received_per_track() >= + total_callee_video_frames_expected; + }, + ::testing::IsTrue(), {.timeout = kMaxWaitForFrames}), + IsRtcOk()); bool expectations_correct = caller()->audio_frames_received() >= total_caller_audio_frames_expected && @@ -1876,9 +1868,12 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { caller()->AddAudioVideoTracks(); callee()->AddAudioVideoTracks(); caller()->CreateAndSetAndSignalOffer(); - ASSERT_TRUE_WAIT(DtlsConnected(), kDefaultTimeout); - EXPECT_EQ_WAIT(rtc::SrtpCryptoSuiteToName(expected_cipher_suite), - caller()->OldGetStats()->SrtpCipher(), kDefaultTimeout); + ASSERT_THAT(WaitUntil([&] { return DtlsConnected(); }, ::testing::IsTrue()), + IsRtcOk()); + EXPECT_THAT( + WaitUntil([&] { return caller()->OldGetStats()->SrtpCipher(); }, + ::testing::Eq(SrtpCryptoSuiteToName(expected_cipher_suite))), + IsRtcOk()); } void TestGcmNegotiationUsesCipherSuite(bool local_gcm_enabled, @@ -1899,29 +1894,28 @@ class PeerConnectionIntegrationBaseTest : public ::testing::Test { expected_cipher_suite); } - const FieldTrialsView& trials() const { return *field_trials_.get(); } - protected: SdpSemantics sdp_semantics_; private: - rtc::AutoThread main_thread_; // Used as the signal thread by most tests. + AutoThread main_thread_; // Used as the signal thread by most tests. // `ss_` is used by `network_thread_` so it must be destroyed later. - std::unique_ptr ss_; - std::unique_ptr fss_; + std::unique_ptr ss_; + std::unique_ptr fss_; // `network_thread_` and `worker_thread_` are used by both // `caller_` and `callee_` so they must be destroyed // later. - std::unique_ptr network_thread_; - std::unique_ptr worker_thread_; + std::unique_ptr network_thread_; + std::unique_ptr worker_thread_; // The turn servers and turn customizers should be accessed & deleted on the // network thread to avoid a race with the socket read/write that occurs // on the network thread. - std::vector> turn_servers_; - std::vector> turn_customizers_; + std::vector> turn_servers_; + std::vector> turn_customizers_; std::unique_ptr caller_; std::unique_ptr callee_; - std::unique_ptr field_trials_; + std::string field_trials_; + std::map field_trials_overrides_; }; } // namespace webrtc diff --git a/pc/test/mock_channel_interface.h b/pc/test/mock_channel_interface.h index 6b85ed8d11..b86f168433 100644 --- a/pc/test/mock_channel_interface.h +++ b/pc/test/mock_channel_interface.h @@ -11,21 +11,27 @@ #ifndef PC_TEST_MOCK_CHANNEL_INTERFACE_H_ #define PC_TEST_MOCK_CHANNEL_INTERFACE_H_ +#include #include #include +#include "absl/strings/string_view.h" +#include "api/jsep.h" +#include "api/media_types.h" #include "media/base/media_channel.h" +#include "media/base/stream_params.h" #include "pc/channel_interface.h" +#include "pc/rtp_transport_internal.h" #include "test/gmock.h" -namespace cricket { +namespace webrtc { // Mock class for BaseChannel. -// Use this class in unit tests to avoid dependecy on a specific +// Use this class in unit tests to avoid dependency on a specific // implementation of BaseChannel. -class MockChannelInterface : public cricket::ChannelInterface { +class MockChannelInterface : public ChannelInterface { public: - MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); + MOCK_METHOD(MediaType, media_type, (), (const, override)); MOCK_METHOD(VideoChannel*, AsVideoChannel, (), (override)); MOCK_METHOD(VoiceChannel*, AsVoiceChannel, (), (override)); MOCK_METHOD(MediaSendChannelInterface*, media_send_channel, (), (override)); @@ -56,17 +62,17 @@ class MockChannelInterface : public cricket::ChannelInterface { SetFirstPacketReceivedCallback, (std::function), (override)); + MOCK_METHOD(void, + SetFirstPacketSentCallback, + (std::function), + (override)); MOCK_METHOD(bool, SetLocalContent, - (const cricket::MediaContentDescription*, - webrtc::SdpType, - std::string&), + (const webrtc::MediaContentDescription*, SdpType, std::string&), (override)); MOCK_METHOD(bool, SetRemoteContent, - (const cricket::MediaContentDescription*, - webrtc::SdpType, - std::string&), + (const webrtc::MediaContentDescription*, SdpType, std::string&), (override)); MOCK_METHOD(bool, SetPayloadTypeDemuxingEnabled, (bool), (override)); MOCK_METHOD(const std::vector&, @@ -77,12 +83,17 @@ class MockChannelInterface : public cricket::ChannelInterface { remote_streams, (), (const, override)); - MOCK_METHOD(bool, - SetRtpTransport, - (webrtc::RtpTransportInternal*), - (override)); + MOCK_METHOD(bool, SetRtpTransport, (RtpTransportInternal*), (override)); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::MockChannelInterface; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // PC_TEST_MOCK_CHANNEL_INTERFACE_H_ diff --git a/pc/test/mock_data_channel.h b/pc/test/mock_data_channel.h index ef781fe8ae..f2822cb6e2 100644 --- a/pc/test/mock_data_channel.h +++ b/pc/test/mock_data_channel.h @@ -11,20 +11,22 @@ #ifndef PC_TEST_MOCK_DATA_CHANNEL_H_ #define PC_TEST_MOCK_DATA_CHANNEL_H_ +#include #include #include #include "pc/sctp_data_channel.h" +#include "rtc_base/thread.h" +#include "rtc_base/weak_ptr.h" #include "test/gmock.h" namespace webrtc { class MockSctpDataChannel : public SctpDataChannel { public: - MockSctpDataChannel( - rtc::WeakPtr controller, - int id, - DataState state) + MockSctpDataChannel(WeakPtr controller, + int id, + DataState state) : MockSctpDataChannel(std::move(controller), id, "MockSctpDataChannel", @@ -35,7 +37,7 @@ class MockSctpDataChannel : public SctpDataChannel { 0, 0) {} MockSctpDataChannel( - rtc::WeakPtr controller, + WeakPtr controller, int id, const std::string& label, DataState state, @@ -45,8 +47,8 @@ class MockSctpDataChannel : public SctpDataChannel { uint32_t messages_received, uint64_t bytes_received, const InternalDataChannelInit& config = InternalDataChannelInit(), - rtc::Thread* signaling_thread = rtc::Thread::Current(), - rtc::Thread* network_thread = rtc::Thread::Current()) + Thread* signaling_thread = Thread::Current(), + Thread* network_thread = Thread::Current()) : SctpDataChannel(config, std::move(controller), label, diff --git a/pc/test/mock_peer_connection_internal.h b/pc/test/mock_peer_connection_internal.h index 58d13ede2f..a52c9a4a18 100644 --- a/pc/test/mock_peer_connection_internal.h +++ b/pc/test/mock_peer_connection_internal.h @@ -11,14 +11,57 @@ #ifndef PC_TEST_MOCK_PEER_CONNECTION_INTERNAL_H_ #define PC_TEST_MOCK_PEER_CONNECTION_INTERNAL_H_ +#include #include #include +#include #include #include #include -#include "modules/audio_device/include/audio_device.h" +#include "absl/strings/string_view.h" +#include "api/adaptation/resource.h" +#include "api/audio/audio_device.h" +#include "api/candidate.h" +#include "api/crypto/crypto_options.h" +#include "api/data_channel_event_observer_interface.h" +#include "api/data_channel_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/field_trials_view.h" +#include "api/jsep.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtc_event_log_output.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/sctp_transport_interface.h" +#include "api/set_remote_description_observer_interface.h" +#include "api/stats/rtc_stats_collector_callback.h" +#include "api/transport/bandwidth_estimation_settings.h" +#include "api/transport/bitrate_settings.h" +#include "api/transport/network_control.h" +#include "call/call.h" +#include "call/payload_type_picker.h" +#include "p2p/base/port.h" +#include "p2p/base/port_allocator.h" +#include "pc/data_channel_utils.h" +#include "pc/jsep_transport_controller.h" #include "pc/peer_connection_internal.h" +#include "pc/peer_connection_message_handler.h" +#include "pc/rtp_transceiver.h" +#include "pc/rtp_transmission_manager.h" +#include "pc/session_description.h" +#include "pc/transport_stats.h" +#include "pc/usage_pattern.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/thread.h" #include "test/gmock.h" namespace webrtc { @@ -28,61 +71,61 @@ class MockPeerConnectionInternal : public PeerConnectionInternal { MockPeerConnectionInternal() {} ~MockPeerConnectionInternal() = default; // PeerConnectionInterface - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, local_streams, (), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, remote_streams, (), (override)); MOCK_METHOD(bool, AddStream, (MediaStreamInterface*), (override)); MOCK_METHOD(void, RemoveStream, (MediaStreamInterface*), (override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, AddTrack, - (rtc::scoped_refptr, + (webrtc::scoped_refptr, const std::vector&), (override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, AddTrack, - (rtc::scoped_refptr, + (webrtc::scoped_refptr, const std::vector&, const std::vector&), (override)); MOCK_METHOD(RTCError, RemoveTrackOrError, - (rtc::scoped_refptr), + (webrtc::scoped_refptr), (override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, AddTransceiver, - (rtc::scoped_refptr), + (webrtc::scoped_refptr), (override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, AddTransceiver, - (rtc::scoped_refptr, + (webrtc::scoped_refptr, const RtpTransceiverInit&), (override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, AddTransceiver, - (cricket::MediaType), + (webrtc::MediaType), (override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, AddTransceiver, - (cricket::MediaType, const RtpTransceiverInit&), + (webrtc::MediaType, const RtpTransceiverInit&), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, CreateSender, (const std::string&, const std::string&), (override)); - MOCK_METHOD(std::vector>, + MOCK_METHOD(std::vector>, GetSenders, (), (const, override)); - MOCK_METHOD(std::vector>, + MOCK_METHOD(std::vector>, GetReceivers, (), (const, override)); - MOCK_METHOD(std::vector>, + MOCK_METHOD(std::vector>, GetTransceivers, (), (const, override)); @@ -93,16 +136,16 @@ class MockPeerConnectionInternal : public PeerConnectionInternal { MOCK_METHOD(void, GetStats, (RTCStatsCollectorCallback*), (override)); MOCK_METHOD(void, GetStats, - (rtc::scoped_refptr, - rtc::scoped_refptr), + (webrtc::scoped_refptr, + webrtc::scoped_refptr), (override)); MOCK_METHOD(void, GetStats, - (rtc::scoped_refptr, - rtc::scoped_refptr), + (webrtc::scoped_refptr, + webrtc::scoped_refptr), (override)); MOCK_METHOD(void, ClearStatsCache, (), (override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, CreateDataChannelOrError, (const std::string&, const DataChannelInit*), (override)); @@ -151,7 +194,11 @@ class MockPeerConnectionInternal : public PeerConnectionInternal { MOCK_METHOD(void, SetRemoteDescription, (std::unique_ptr, - rtc::scoped_refptr), + webrtc::scoped_refptr), + (override)); + MOCK_METHOD(bool, + ShouldFireNegotiationNeededEvent, + (uint32_t event_id), (override)); MOCK_METHOD(PeerConnectionInterface::RTCConfiguration, GetConfiguration, @@ -167,16 +214,20 @@ class MockPeerConnectionInternal : public PeerConnectionInternal { (override)); MOCK_METHOD(bool, RemoveIceCandidates, - (const std::vector&), + (const std::vector&), (override)); MOCK_METHOD(RTCError, SetBitrate, (const BitrateSettings&), (override)); + MOCK_METHOD(void, + ReconfigureBandwidthEstimation, + (const BandwidthEstimationSettings&), + (override)); MOCK_METHOD(void, SetAudioPlayout, (bool), (override)); MOCK_METHOD(void, SetAudioRecording, (bool), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, LookupDtlsTransportByMid, (const std::string&), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, GetSctpTransport, (), (const, override)); @@ -188,7 +239,11 @@ class MockPeerConnectionInternal : public PeerConnectionInternal { (override)); MOCK_METHOD(PeerConnectionState, peer_connection_state, (), (override)); MOCK_METHOD(IceGatheringState, ice_gathering_state, (), (override)); - MOCK_METHOD(absl::optional, can_trickle_ice_candidates, (), (override)); + MOCK_METHOD(void, + AddAdaptationResource, + (webrtc::scoped_refptr), + (override)); + MOCK_METHOD(std::optional, can_trickle_ice_candidates, (), (override)); MOCK_METHOD(bool, StartRtcEventLog, (std::unique_ptr, int64_t), @@ -197,14 +252,18 @@ class MockPeerConnectionInternal : public PeerConnectionInternal { StartRtcEventLog, (std::unique_ptr), (override)); + MOCK_METHOD(void, + SetDataChannelEventObserver, + (std::unique_ptr), + (override)); MOCK_METHOD(void, StopRtcEventLog, (), (override)); MOCK_METHOD(void, Close, (), (override)); - MOCK_METHOD(rtc::Thread*, signaling_thread, (), (const, override)); + MOCK_METHOD(Thread*, signaling_thread, (), (const, override)); // PeerConnectionSdpMethods MOCK_METHOD(std::string, session_id, (), (const, override)); MOCK_METHOD(bool, NeedsIceRestart, (const std::string&), (const, override)); - MOCK_METHOD(absl::optional, sctp_mid, (), (const, override)); + MOCK_METHOD(std::optional, sctp_mid, (), (const, override)); MOCK_METHOD(PeerConnectionInterface::RTCConfiguration*, configuration, (), @@ -228,10 +287,10 @@ class MockPeerConnectionInternal : public PeerConnectionInternal { MOCK_METHOD(JsepTransportController*, transport_controller_s, (), (override)); MOCK_METHOD(JsepTransportController*, transport_controller_n, (), (override)); MOCK_METHOD(DataChannelController*, data_channel_controller, (), (override)); - MOCK_METHOD(cricket::PortAllocator*, port_allocator, (), (override)); + MOCK_METHOD(PortAllocator*, port_allocator, (), (override)); MOCK_METHOD(LegacyStatsCollector*, legacy_stats, (), (override)); MOCK_METHOD(PeerConnectionObserver*, Observer, (), (const, override)); - MOCK_METHOD(absl::optional, GetSctpSslRole_n, (), (override)); + MOCK_METHOD(std::optional, GetSctpSslRole_n, (), (override)); MOCK_METHOD(PeerConnectionInterface::IceConnectionState, ice_connection_state_internal, (), @@ -245,43 +304,37 @@ class MockPeerConnectionInternal : public PeerConnectionInternal { MOCK_METHOD(bool, IsUnifiedPlan, (), (const, override)); MOCK_METHOD(bool, ValidateBundleSettings, - (const cricket::SessionDescription*, - (const std::map&)), + (const webrtc::SessionDescription*, + (const std::map&)), (override)); - MOCK_METHOD(absl::optional, GetDataMid, (), (const, override)); - MOCK_METHOD(RTCErrorOr>, + MOCK_METHOD(RTCErrorOr>, AddTransceiver, - (cricket::MediaType, - rtc::scoped_refptr, + (webrtc::MediaType, + webrtc::scoped_refptr, const RtpTransceiverInit&, bool), (override)); - MOCK_METHOD(void, StartSctpTransport, (int, int, int), (override)); + MOCK_METHOD(RTCError, StartSctpTransport, (const SctpOptions&), (override)); MOCK_METHOD(void, AddRemoteCandidate, - (const std::string&, const cricket::Candidate&), + (absl::string_view, const webrtc::Candidate&), (override)); MOCK_METHOD(Call*, call_ptr, (), (override)); MOCK_METHOD(bool, SrtpRequired, (), (const, override)); - MOCK_METHOD(absl::optional, - SetupDataChannelTransport_n, - (absl::string_view mid), - (override)); - MOCK_METHOD(void, TeardownDataChannelTransport_n, (RTCError), (override)); - MOCK_METHOD(void, - SetSctpDataInfo, - (absl::string_view, absl::string_view), + MOCK_METHOD(bool, + CreateDataChannelTransport, + (absl::string_view), (override)); - MOCK_METHOD(void, ResetSctpDataInfo, (), (override)); + MOCK_METHOD(void, DestroyDataChannelTransport, (RTCError error), (override)); MOCK_METHOD(const FieldTrialsView&, trials, (), (const, override)); // PeerConnectionInternal - MOCK_METHOD(rtc::Thread*, network_thread, (), (const, override)); - MOCK_METHOD(rtc::Thread*, worker_thread, (), (const, override)); + MOCK_METHOD(Thread*, network_thread, (), (const, override)); + MOCK_METHOD(Thread*, worker_thread, (), (const, override)); MOCK_METHOD(bool, initial_offerer, (), (const, override)); MOCK_METHOD( std::vector< - rtc::scoped_refptr>>, + scoped_refptr>>, GetTransceiversInternal, (), (const, override)); @@ -289,41 +342,47 @@ class MockPeerConnectionInternal : public PeerConnectionInternal { GetDataChannelStats, (), (const, override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, sctp_transport_name, (), (const, override)); - MOCK_METHOD(cricket::CandidateStatsList, + MOCK_METHOD(CandidateStatsList, GetPooledCandidateStats, (), (const, override)); - MOCK_METHOD((std::map), + MOCK_METHOD((std::map), GetTransportStatsByNames, (const std::set&), (override)); MOCK_METHOD(Call::Stats, GetCallStats, (), (override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, GetAudioDeviceStats, (), (override)); MOCK_METHOD(bool, GetLocalCertificate, - (const std::string&, rtc::scoped_refptr*), + (const std::string&, + webrtc::scoped_refptr*), (override)); - MOCK_METHOD(std::unique_ptr, + MOCK_METHOD(std::unique_ptr, GetRemoteSSLCertChain, (const std::string&), (override)); MOCK_METHOD(bool, IceRestartPending, (const std::string&), (const, override)); MOCK_METHOD(bool, GetSslRole, - (const std::string&, rtc::SSLRole*), + (const std::string&, webrtc::SSLRole*), (override)); MOCK_METHOD(void, NoteDataAddedEvent, (), (override)); MOCK_METHOD(void, OnSctpDataChannelStateChanged, (int channel_id, DataChannelInterface::DataState), (override)); + MOCK_METHOD(NetworkControllerInterface*, + GetNetworkController, + (), + (override)); + MOCK_METHOD(PayloadTypePicker&, payload_type_picker, (), (override)); }; } // namespace webrtc diff --git a/pc/test/mock_peer_connection_observers.h b/pc/test/mock_peer_connection_observers.h index e9d97a97f6..6bd77437b2 100644 --- a/pc/test/mock_peer_connection_observers.h +++ b/pc/test/mock_peer_connection_observers.h @@ -14,16 +14,37 @@ #ifndef PC_TEST_MOCK_PEER_CONNECTION_OBSERVERS_H_ #define PC_TEST_MOCK_PEER_CONNECTION_OBSERVERS_H_ +#include +#include +#include #include #include +#include #include #include #include +#include "api/candidate.h" #include "api/data_channel_interface.h" +#include "api/jsep.h" #include "api/jsep_ice_candidate.h" +#include "api/legacy_stats_types.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/set_local_description_observer_interface.h" +#include "api/set_remote_description_observer_interface.h" +#include "api/stats/rtc_stats_collector_callback.h" +#include "api/stats/rtc_stats_report.h" #include "pc/stream_collection.h" #include "rtc_base/checks.h" +#include "rtc_base/string_encode.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -31,12 +52,12 @@ class MockPeerConnectionObserver : public PeerConnectionObserver { public: struct AddTrackEvent { explicit AddTrackEvent( - rtc::scoped_refptr event_receiver, - std::vector> event_streams) + scoped_refptr event_receiver, + std::vector> event_streams) : receiver(std::move(event_receiver)), streams(std::move(event_streams)) { for (auto stream : streams) { - std::vector> tracks; + std::vector> tracks; for (auto audio_track : stream->GetAudioTracks()) { tracks.push_back(audio_track); } @@ -47,12 +68,12 @@ class MockPeerConnectionObserver : public PeerConnectionObserver { } } - rtc::scoped_refptr receiver; - std::vector> streams; + scoped_refptr receiver; + std::vector> streams; // This map records the tracks present in each stream at the time the // OnAddTrack callback was issued. - std::map, - std::vector>> + std::map, + std::vector>> snapshotted_stream_tracks; }; @@ -77,12 +98,11 @@ class MockPeerConnectionObserver : public PeerConnectionObserver { StreamCollectionInterface* remote_streams() const { return remote_streams_.get(); } - void OnAddStream(rtc::scoped_refptr stream) override { + void OnAddStream(scoped_refptr stream) override { last_added_stream_ = stream; remote_streams_->AddStream(stream); } - void OnRemoveStream( - rtc::scoped_refptr stream) override { + void OnRemoveStream(scoped_refptr stream) override { last_removed_stream_ = stream; remote_streams_->RemoveStream(stream.get()); } @@ -91,7 +111,7 @@ class MockPeerConnectionObserver : public PeerConnectionObserver { latest_negotiation_needed_event_ = event_id; } void OnDataChannel( - rtc::scoped_refptr data_channel) override { + scoped_refptr data_channel) override { last_datachannel_ = data_channel; } @@ -125,7 +145,7 @@ class MockPeerConnectionObserver : public PeerConnectionObserver { } void OnIceCandidatesRemoved( - const std::vector& candidates) override { + const std::vector& candidates) override { num_candidates_removed_++; callback_triggered_ = true; } @@ -134,8 +154,8 @@ class MockPeerConnectionObserver : public PeerConnectionObserver { callback_triggered_ = true; } - void OnAddTrack(rtc::scoped_refptr receiver, - const std::vector>& + void OnAddTrack(scoped_refptr receiver, + const std::vector>& streams) override { RTC_DCHECK(receiver); num_added_tracks_++; @@ -143,18 +163,16 @@ class MockPeerConnectionObserver : public PeerConnectionObserver { add_track_events_.push_back(AddTrackEvent(receiver, streams)); } - void OnTrack( - rtc::scoped_refptr transceiver) override { + void OnTrack(scoped_refptr transceiver) override { on_track_transceivers_.push_back(transceiver); } - void OnRemoveTrack( - rtc::scoped_refptr receiver) override { + void OnRemoveTrack(scoped_refptr receiver) override { remove_track_events_.push_back(receiver); } - std::vector> GetAddTrackReceivers() { - std::vector> receivers; + std::vector> GetAddTrackReceivers() { + std::vector> receivers; for (const AddTrackEvent& event : add_track_events_) { receivers.push_back(event.receiver); } @@ -227,34 +245,33 @@ class MockPeerConnectionObserver : public PeerConnectionObserver { return latest_negotiation_needed_event_.value_or(0u); } void clear_latest_negotiation_needed_event() { - latest_negotiation_needed_event_ = absl::nullopt; + latest_negotiation_needed_event_ = std::nullopt; } - rtc::scoped_refptr pc_; + scoped_refptr pc_; PeerConnectionInterface::SignalingState state_; std::vector> candidates_; - rtc::scoped_refptr last_datachannel_; - rtc::scoped_refptr remote_streams_; + scoped_refptr last_datachannel_; + scoped_refptr remote_streams_; bool renegotiation_needed_ = false; - absl::optional latest_negotiation_needed_event_; + std::optional latest_negotiation_needed_event_; bool ice_gathering_complete_ = false; bool ice_connected_ = false; bool callback_triggered_ = false; int num_added_tracks_ = 0; std::string last_added_track_label_; std::vector add_track_events_; - std::vector> remove_track_events_; - std::vector> - on_track_transceivers_; + std::vector> remove_track_events_; + std::vector> on_track_transceivers_; int num_candidates_removed_ = 0; private: - rtc::scoped_refptr last_added_stream_; - rtc::scoped_refptr last_removed_stream_; + scoped_refptr last_added_stream_; + scoped_refptr last_removed_stream_; }; class MockCreateSessionDescriptionObserver - : public webrtc::CreateSessionDescriptionObserver { + : public CreateSessionDescriptionObserver { public: MockCreateSessionDescriptionObserver() : called_(false), @@ -266,7 +283,7 @@ class MockCreateSessionDescriptionObserver error_ = ""; desc_.reset(desc); } - void OnFailure(webrtc::RTCError error) override { + void OnFailure(RTCError error) override { MutexLock lock(&mutex_); called_ = true; error_ = error.message(); @@ -295,11 +312,10 @@ class MockCreateSessionDescriptionObserver std::unique_ptr desc_ RTC_GUARDED_BY(mutex_); }; -class MockSetSessionDescriptionObserver - : public webrtc::SetSessionDescriptionObserver { +class MockSetSessionDescriptionObserver : public SetSessionDescriptionObserver { public: - static rtc::scoped_refptr Create() { - return rtc::make_ref_counted(); + static scoped_refptr Create() { + return make_ref_counted(); } MockSetSessionDescriptionObserver() @@ -312,7 +328,7 @@ class MockSetSessionDescriptionObserver called_ = true; error_ = ""; } - void OnFailure(webrtc::RTCError error) override { + void OnFailure(RTCError error) override { MutexLock lock(&mutex_); called_ = true; error_ = error.message(); @@ -353,7 +369,7 @@ class FakeSetLocalDescriptionObserver private: // Set on complete, on success this is set to an RTCError::OK() error. - absl::optional error_; + std::optional error_; }; class FakeSetRemoteDescriptionObserver @@ -372,17 +388,17 @@ class FakeSetRemoteDescriptionObserver private: // Set on complete, on success this is set to an RTCError::OK() error. - absl::optional error_; + std::optional error_; }; -class MockDataChannelObserver : public webrtc::DataChannelObserver { +class MockDataChannelObserver : public DataChannelObserver { public: struct Message { std::string data; bool binary; }; - explicit MockDataChannelObserver(webrtc::DataChannelInterface* channel) + explicit MockDataChannelObserver(DataChannelInterface* channel) : channel_(channel) { channel_->RegisterObserver(this); states_.push_back(channel_->state()); @@ -391,7 +407,13 @@ class MockDataChannelObserver : public webrtc::DataChannelObserver { void OnBufferedAmountChange(uint64_t previous_amount) override {} - void OnStateChange() override { states_.push_back(channel_->state()); } + void OnStateChange() override { + states_.push_back(channel_->state()); + if (state_change_callback_) { + state_change_callback_(states_.back()); + } + } + void OnMessage(const DataBuffer& buffer) override { messages_.push_back( {std::string(buffer.data.data(), buffer.data.size()), @@ -418,13 +440,19 @@ class MockDataChannelObserver : public webrtc::DataChannelObserver { return states_; } + void set_state_change_callback( + std::function func) { + state_change_callback_ = std::move(func); + } + private: - rtc::scoped_refptr channel_; + scoped_refptr channel_; std::vector states_; std::vector messages_; + std::function state_change_callback_; }; -class MockStatsObserver : public webrtc::StatsObserver { +class MockStatsObserver : public StatsObserver { public: MockStatsObserver() : called_(false), stats_() {} virtual ~MockStatsObserver() {} @@ -520,7 +548,7 @@ class MockStatsObserver : public webrtc::StatsObserver { const StatsReport::Value* v = report->FindValue(name); if (v) { // TODO(tommi): We should really just be using an int here :-/ - *value = rtc::FromString(v->ToString()); + *value = FromString(v->ToString()); } return v != nullptr; } @@ -531,7 +559,7 @@ class MockStatsObserver : public webrtc::StatsObserver { const StatsReport::Value* v = report->FindValue(name); if (v) { // TODO(tommi): We should really just be using an int here :-/ - *value = rtc::FromString(v->ToString()); + *value = FromString(v->ToString()); } return v != nullptr; } @@ -576,22 +604,22 @@ class MockStatsObserver : public webrtc::StatsObserver { }; // Helper class that just stores the report from the callback. -class MockRTCStatsCollectorCallback : public webrtc::RTCStatsCollectorCallback { +class MockRTCStatsCollectorCallback : public RTCStatsCollectorCallback { public: - rtc::scoped_refptr report() { return report_; } + scoped_refptr report() { return report_; } bool called() const { return called_; } protected: void OnStatsDelivered( - const rtc::scoped_refptr& report) override { + const scoped_refptr& report) override { report_ = report; called_ = true; } private: bool called_ = false; - rtc::scoped_refptr report_; + scoped_refptr report_; }; } // namespace webrtc diff --git a/pc/test/mock_rtp_receiver_internal.h b/pc/test/mock_rtp_receiver_internal.h index e76b56755d..046e7f7e88 100644 --- a/pc/test/mock_rtp_receiver_internal.h +++ b/pc/test/mock_rtp_receiver_internal.h @@ -11,10 +11,20 @@ #ifndef PC_TEST_MOCK_RTP_RECEIVER_INTERNAL_H_ #define PC_TEST_MOCK_RTP_RECEIVER_INTERNAL_H_ +#include +#include #include #include -#include "absl/types/optional.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/transport/rtp/rtp_source.h" +#include "media/base/media_channel.h" #include "pc/rtp_receiver.h" #include "test/gmock.h" @@ -24,33 +34,33 @@ namespace webrtc { class MockRtpReceiverInternal : public RtpReceiverInternal { public: // RtpReceiverInterface methods. - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, track, (), (const, override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, dtls_transport, (), (const, override)); MOCK_METHOD(std::vector, stream_ids, (), (const, override)); - MOCK_METHOD(std::vector>, + MOCK_METHOD(std::vector>, streams, (), (const, override)); - MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); + MOCK_METHOD(webrtc::MediaType, media_type, (), (const, override)); MOCK_METHOD(std::string, id, (), (const, override)); MOCK_METHOD(RtpParameters, GetParameters, (), (const, override)); MOCK_METHOD(void, SetObserver, (RtpReceiverObserverInterface*), (override)); MOCK_METHOD(void, SetJitterBufferMinimumDelay, - (absl::optional), + (std::optional), (override)); MOCK_METHOD(std::vector, GetSources, (), (const, override)); MOCK_METHOD(void, SetFrameDecryptor, - (rtc::scoped_refptr), + (webrtc::scoped_refptr), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, GetFrameDecryptor, (), (const, override)); @@ -59,20 +69,20 @@ class MockRtpReceiverInternal : public RtpReceiverInternal { MOCK_METHOD(void, Stop, (), (override)); MOCK_METHOD(void, SetMediaChannel, - (cricket::MediaReceiveChannelInterface*), + (webrtc::MediaReceiveChannelInterface*), (override)); MOCK_METHOD(void, SetupMediaChannel, (uint32_t), (override)); MOCK_METHOD(void, SetupUnsignaledMediaChannel, (), (override)); - MOCK_METHOD(absl::optional, ssrc, (), (const, override)); + MOCK_METHOD(std::optional, ssrc, (), (const, override)); MOCK_METHOD(void, NotifyFirstPacketReceived, (), (override)); MOCK_METHOD(void, set_stream_ids, (std::vector), (override)); MOCK_METHOD(void, set_transport, - (rtc::scoped_refptr), + (webrtc::scoped_refptr), (override)); MOCK_METHOD(void, SetStreams, - (const std::vector>&), + (const std::vector>&), (override)); MOCK_METHOD(int, AttachmentId, (), (const, override)); }; diff --git a/pc/test/mock_rtp_sender_internal.h b/pc/test/mock_rtp_sender_internal.h index 4cfb2cfeaf..4743d7f85d 100644 --- a/pc/test/mock_rtp_sender_internal.h +++ b/pc/test/mock_rtp_sender_internal.h @@ -11,10 +11,24 @@ #ifndef PC_TEST_MOCK_RTP_SENDER_INTERNAL_H_ #define PC_TEST_MOCK_RTP_SENDER_INTERNAL_H_ +#include #include #include #include +#include "api/crypto/frame_encryptor_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/dtmf_sender_interface.h" +#include "api/frame_transformer_interface.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "media/base/codec.h" +#include "media/base/media_channel.h" #include "pc/rtp_sender.h" #include "test/gmock.h" @@ -25,16 +39,16 @@ class MockRtpSenderInternal : public RtpSenderInternal { public: // RtpSenderInterface methods. MOCK_METHOD(bool, SetTrack, (MediaStreamTrackInterface*), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, track, (), (const, override)); MOCK_METHOD(uint32_t, ssrc, (), (const, override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, dtls_transport, (), (const, override)); - MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); + MOCK_METHOD(webrtc::MediaType, media_type, (), (const, override)); MOCK_METHOD(std::string, id, (), (const, override)); MOCK_METHOD(std::vector, stream_ids, (), (const, override)); MOCK_METHOD(std::vector, @@ -43,7 +57,7 @@ class MockRtpSenderInternal : public RtpSenderInternal { (const, override)); MOCK_METHOD(void, set_transport, - (rtc::scoped_refptr), + (webrtc::scoped_refptr), (override)); MOCK_METHOD(RtpParameters, GetParameters, (), (const, override)); MOCK_METHOD(RtpParameters, GetParametersInternal, (), (const, override)); @@ -68,33 +82,32 @@ class MockRtpSenderInternal : public RtpSenderInternal { CheckCodecParameters, (const RtpParameters&), (override)); - MOCK_METHOD(void, - SetCodecPreferences, - (std::vector), - (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(void, SetSendCodecs, (std::vector), (override)); + MOCK_METHOD(std::vector, GetSendCodecs, (), (const, override)); + MOCK_METHOD(scoped_refptr, GetDtmfSender, (), (const, override)); MOCK_METHOD(void, SetFrameEncryptor, - (rtc::scoped_refptr), + (webrtc::scoped_refptr), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, GetFrameEncryptor, (), (const, override)); MOCK_METHOD(void, - SetEncoderToPacketizerFrameTransformer, - (rtc::scoped_refptr), + SetFrameTransformer, + (webrtc::scoped_refptr), (override)); MOCK_METHOD(void, SetEncoderSelector, (std::unique_ptr), (override)); + MOCK_METHOD(void, SetObserver, (RtpSenderObserverInterface*), (override)); // RtpSenderInternal methods. - MOCK_METHOD1(SetMediaChannel, void(cricket::MediaSendChannelInterface*)); + MOCK_METHOD1(SetMediaChannel, void(webrtc::MediaSendChannelInterface*)); MOCK_METHOD1(SetSsrc, void(uint32_t)); MOCK_METHOD1(set_stream_ids, void(const std::vector&)); MOCK_METHOD1(SetStreams, void(const std::vector&)); @@ -105,6 +118,7 @@ class MockRtpSenderInternal : public RtpSenderInternal { MOCK_METHOD1(DisableEncodingLayers, RTCError(const std::vector&)); MOCK_METHOD0(SetTransceiverAsStopped, void()); + MOCK_METHOD(void, NotifyFirstPacketSent, (), (override)); }; } // namespace webrtc diff --git a/pc/test/mock_voice_media_receive_channel_interface.h b/pc/test/mock_voice_media_receive_channel_interface.h index adb1201239..7eca40c94b 100644 --- a/pc/test/mock_voice_media_receive_channel_interface.h +++ b/pc/test/mock_voice_media_receive_channel_interface.h @@ -10,20 +10,27 @@ #ifndef PC_TEST_MOCK_VOICE_MEDIA_RECEIVE_CHANNEL_INTERFACE_H_ #define PC_TEST_MOCK_VOICE_MEDIA_RECEIVE_CHANNEL_INTERFACE_H_ +#include #include +#include #include -#include +#include #include #include "api/call/audio_sink.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/frame_transformer_interface.h" +#include "api/media_types.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/transport/rtp/rtp_source.h" #include "media/base/media_channel.h" -#include "media/base/media_channel_impl.h" +#include "media/base/stream_params.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "rtc_base/gunit.h" #include "test/gmock.h" -#include "test/gtest.h" -namespace cricket { +namespace webrtc { class MockVoiceMediaReceiveChannelInterface : public VoiceMediaReceiveChannelInterface { @@ -35,17 +42,17 @@ class MockVoiceMediaReceiveChannelInterface // VoiceMediaReceiveChannelInterface MOCK_METHOD(bool, SetReceiverParameters, - (const AudioReceiverParameters& params), + (const webrtc::AudioReceiverParameters& params), (override)); - MOCK_METHOD(webrtc::RtpParameters, + MOCK_METHOD(RtpParameters, GetRtpReceiverParameters, (uint32_t ssrc), (const, override)); - MOCK_METHOD(std::vector, + MOCK_METHOD(std::vector, GetSources, (uint32_t ssrc), (const, override)); - MOCK_METHOD(webrtc::RtpParameters, + MOCK_METHOD(RtpParameters, GetDefaultRtpReceiveParameters, (), (const, override)); @@ -57,16 +64,18 @@ class MockVoiceMediaReceiveChannelInterface MOCK_METHOD(bool, SetDefaultOutputVolume, (double volume), (override)); MOCK_METHOD(void, SetRawAudioSink, - (uint32_t ssrc, std::unique_ptr sink), + (uint32_t ssrc, std::unique_ptr sink), (override)); MOCK_METHOD(void, SetDefaultRawAudioSink, - (std::unique_ptr sink), + (std::unique_ptr sink), (override)); MOCK_METHOD(bool, GetStats, - (VoiceMediaReceiveInfo * stats, bool reset_legacy), + (webrtc::VoiceMediaReceiveInfo * stats, bool reset_legacy), (override)); + MOCK_METHOD(::webrtc::RtcpMode, RtcpMode, (), (const, override)); + MOCK_METHOD(void, SetRtcpMode, (::webrtc::RtcpMode mode), (override)); MOCK_METHOD(void, SetReceiveNackEnabled, (bool enabled), (override)); MOCK_METHOD(void, SetReceiveNonSenderRttEnabled, (bool enabled), (override)); @@ -79,19 +88,22 @@ class MockVoiceMediaReceiveChannelInterface AsVoiceReceiveChannel, (), (override)); - MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); - MOCK_METHOD(bool, AddRecvStream, (const StreamParams& sp), (override)); + MOCK_METHOD(MediaType, media_type, (), (const, override)); + MOCK_METHOD(bool, + AddRecvStream, + (const webrtc::StreamParams& sp), + (override)); MOCK_METHOD(bool, RemoveRecvStream, (uint32_t ssrc), (override)); MOCK_METHOD(void, ResetUnsignaledRecvStream, (), (override)); MOCK_METHOD(void, SetInterface, - (MediaChannelNetworkInterface * iface), + (webrtc::MediaChannelNetworkInterface * iface), (override)); MOCK_METHOD(void, OnPacketReceived, - (const webrtc::RtpPacketReceived& packet), + (const RtpPacketReceived& packet), (override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, GetUnsignaledSsrc, (), (const, override)); @@ -101,23 +113,21 @@ class MockVoiceMediaReceiveChannelInterface (override)); MOCK_METHOD(void, OnDemuxerCriteriaUpdatePending, (), (override)); MOCK_METHOD(void, OnDemuxerCriteriaUpdateComplete, (), (override)); - MOCK_METHOD( - void, - SetFrameDecryptor, - (uint32_t ssrc, - rtc::scoped_refptr frame_decryptor), - (override)); - MOCK_METHOD( - void, - SetDepacketizerToDecoderFrameTransformer, - (uint32_t ssrc, - rtc::scoped_refptr frame_transformer), - (override)); + MOCK_METHOD(void, + SetFrameDecryptor, + (uint32_t ssrc, + scoped_refptr frame_decryptor), + (override)); + MOCK_METHOD(void, + SetDepacketizerToDecoderFrameTransformer, + (uint32_t ssrc, + scoped_refptr frame_transformer), + (override)); MOCK_METHOD(bool, SetBaseMinimumPlayoutDelayMs, (uint32_t ssrc, int delay_ms), (override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, GetBaseMinimumPlayoutDelayMs, (uint32_t ssrc), (const, override)); @@ -125,6 +135,14 @@ class MockVoiceMediaReceiveChannelInterface static_assert(!std::is_abstract_v, ""); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::MockVoiceMediaReceiveChannelInterface; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // PC_TEST_MOCK_VOICE_MEDIA_RECEIVE_CHANNEL_INTERFACE_H_ diff --git a/pc/test/peer_connection_test_wrapper.cc b/pc/test/peer_connection_test_wrapper.cc index 1a3dd3109a..94f9b6530e 100644 --- a/pc/test/peer_connection_test_wrapper.cc +++ b/pc/test/peer_connection_test_wrapper.cc @@ -13,22 +13,42 @@ #include #include +#include #include #include #include #include "absl/strings/match.h" -#include "absl/types/optional.h" -#include "api/audio/audio_mixer.h" +#include "api/audio/audio_device.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/audio_options.h" #include "api/create_peerconnection_factory.h" +#include "api/data_channel_interface.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/field_trials_view.h" +#include "api/jsep.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" #include "api/media_types.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" +#include "api/video/resolution.h" +#include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h" +#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "api/video_codecs/video_encoder_factory_template.h" #include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h" @@ -36,32 +56,37 @@ #include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" #include "media/engine/simulcast_encoder_adapter.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "p2p/base/fake_port_allocator.h" -#include "p2p/base/port_allocator.h" +#include "p2p/test/fake_port_allocator.h" +#include "pc/test/fake_audio_capture_module.h" #include "pc/test/fake_periodic_video_source.h" +#include "pc/test/fake_periodic_video_track_source.h" #include "pc/test/fake_rtc_certificate_generator.h" +#include "pc/test/fake_video_track_renderer.h" #include "pc/test/mock_peer_connection_observers.h" -#include "rtc_base/gunit.h" #include "rtc_base/logging.h" #include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/socket_server.h" #include "rtc_base/string_encode.h" #include "rtc_base/time_utils.h" +#include "test/gmock.h" #include "test/gtest.h" - -using webrtc::FakeVideoTrackRenderer; -using webrtc::IceCandidateInterface; -using webrtc::MediaStreamInterface; -using webrtc::MediaStreamTrackInterface; -using webrtc::MockSetSessionDescriptionObserver; -using webrtc::PeerConnectionInterface; -using webrtc::RtpReceiverInterface; -using webrtc::SdpType; -using webrtc::SessionDescriptionInterface; -using webrtc::VideoTrackInterface; +#include "test/wait_until.h" namespace { + +using ::webrtc::Environment; +using ::webrtc::FakeVideoTrackRenderer; +using ::webrtc::FieldTrialsView; +using ::webrtc::IceCandidateInterface; +using ::webrtc::MediaStreamInterface; +using ::webrtc::MediaStreamTrackInterface; +using ::webrtc::MockSetSessionDescriptionObserver; +using ::webrtc::PeerConnectionInterface; +using ::webrtc::RtpReceiverInterface; +using ::webrtc::SdpType; +using ::webrtc::SessionDescriptionInterface; +using ::webrtc::VideoTrackInterface; + const char kStreamIdBase[] = "stream_id"; const char kVideoTrackLabelBase[] = "video_track"; const char kAudioTrackLabelBase[] = "audio_track"; @@ -75,13 +100,14 @@ class FuzzyMatchedVideoEncoderFactory : public webrtc::VideoEncoderFactory { return factory_.GetSupportedFormats(); } - std::unique_ptr CreateVideoEncoder( + std::unique_ptr Create( + const Environment& env, const webrtc::SdpVideoFormat& format) override { - if (absl::optional original_format = + if (std::optional original_format = webrtc::FuzzyMatchSdpVideoFormat(factory_.GetSupportedFormats(), format)) { return std::make_unique( - &factory_, *original_format); + env, &factory_, nullptr, *original_format); } return nullptr; @@ -89,7 +115,7 @@ class FuzzyMatchedVideoEncoderFactory : public webrtc::VideoEncoderFactory { CodecSupport QueryCodecSupport( const webrtc::SdpVideoFormat& format, - absl::optional scalability_mode) const override { + std::optional scalability_mode) const override { return factory_.QueryCodecSupport(format, scalability_mode); } @@ -117,9 +143,9 @@ void PeerConnectionTestWrapper::Connect(PeerConnectionTestWrapper* caller, PeerConnectionTestWrapper::PeerConnectionTestWrapper( const std::string& name, - rtc::SocketServer* socket_server, - rtc::Thread* network_thread, - rtc::Thread* worker_thread) + webrtc::SocketServer* socket_server, + webrtc::Thread* network_thread, + webrtc::Thread* worker_thread) : name_(name), socket_server_(socket_server), network_thread_(network_thread), @@ -144,13 +170,13 @@ PeerConnectionTestWrapper::~PeerConnectionTestWrapper() { bool PeerConnectionTestWrapper::CreatePc( const webrtc::PeerConnectionInterface::RTCConfiguration& config, - rtc::scoped_refptr audio_encoder_factory, - rtc::scoped_refptr audio_decoder_factory) { - std::unique_ptr port_allocator( - new cricket::FakePortAllocator( - network_thread_, - std::make_unique(socket_server_), - &field_trials_)); + webrtc::scoped_refptr audio_encoder_factory, + webrtc::scoped_refptr audio_decoder_factory, + std::unique_ptr video_encoder_factory, + std::unique_ptr video_decoder_factory, + std::unique_ptr field_trials) { + auto port_allocator = std::make_unique( + CreateEnvironment(field_trials.get()), socket_server_, network_thread_); RTC_DCHECK_RUN_ON(&pc_thread_checker_); @@ -160,21 +186,18 @@ bool PeerConnectionTestWrapper::CreatePc( } peer_connection_factory_ = webrtc::CreatePeerConnectionFactory( - network_thread_, worker_thread_, rtc::Thread::Current(), - rtc::scoped_refptr(fake_audio_capture_module_), + network_thread_, worker_thread_, webrtc::Thread::Current(), + webrtc::scoped_refptr( + fake_audio_capture_module_), audio_encoder_factory, audio_decoder_factory, - std::make_unique(), - std::make_unique>(), - nullptr /* audio_mixer */, nullptr /* audio_processing */); + std::move(video_encoder_factory), std::move(video_decoder_factory), + nullptr /* audio_mixer */, nullptr /* audio_processing */, nullptr, + std::move(field_trials)); if (!peer_connection_factory_) { return false; } - std::unique_ptr cert_generator( + std::unique_ptr cert_generator( new FakeRTCCertificateGenerator()); webrtc::PeerConnectionDependencies deps(this); deps.allocator = std::move(port_allocator); @@ -189,7 +212,23 @@ bool PeerConnectionTestWrapper::CreatePc( } } -rtc::scoped_refptr +bool PeerConnectionTestWrapper::CreatePc( + const webrtc::PeerConnectionInterface::RTCConfiguration& config, + webrtc::scoped_refptr audio_encoder_factory, + webrtc::scoped_refptr audio_decoder_factory, + std::unique_ptr field_trials) { + return CreatePc(config, std::move(audio_encoder_factory), + std::move(audio_decoder_factory), + std::make_unique(), + std::make_unique>(), + std::move(field_trials)); +} + +webrtc::scoped_refptr PeerConnectionTestWrapper::CreateDataChannel( const std::string& label, const webrtc::DataChannelInit& init) { @@ -203,9 +242,9 @@ PeerConnectionTestWrapper::CreateDataChannel( return result.MoveValue(); } -absl::optional +std::optional PeerConnectionTestWrapper::FindFirstSendCodecWithName( - cricket::MediaType media_type, + webrtc::MediaType media_type, const std::string& name) const { std::vector codecs = peer_connection_factory_->GetRtpSenderCapabilities(media_type).codecs; @@ -214,11 +253,14 @@ PeerConnectionTestWrapper::FindFirstSendCodecWithName( return codec; } } - return absl::nullopt; + return std::nullopt; } void PeerConnectionTestWrapper::WaitForNegotiation() { - EXPECT_TRUE_WAIT(!pending_negotiation_, kMaxWait); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return !pending_negotiation_; }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); } void PeerConnectionTestWrapper::OnSignalingChange( @@ -229,8 +271,8 @@ void PeerConnectionTestWrapper::OnSignalingChange( } void PeerConnectionTestWrapper::OnAddTrack( - rtc::scoped_refptr receiver, - const std::vector>& streams) { + webrtc::scoped_refptr receiver, + const std::vector>& streams) { RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": OnAddTrack"; if (receiver->track()->kind() == MediaStreamTrackInterface::kVideoKind) { auto* video_track = @@ -248,7 +290,7 @@ void PeerConnectionTestWrapper::OnIceCandidate( } void PeerConnectionTestWrapper::OnDataChannel( - rtc::scoped_refptr data_channel) { + webrtc::scoped_refptr data_channel) { SignalOnDataChannel(data_channel.get()); } @@ -297,7 +339,7 @@ void PeerConnectionTestWrapper::SetLocalDescription(SdpType type, << ": SetLocalDescription " << webrtc::SdpTypeToString(type) << " " << sdp; - auto observer = rtc::make_ref_counted(); + auto observer = webrtc::make_ref_counted(); peer_connection_->SetLocalDescription( observer.get(), webrtc::CreateSessionDescription(type, sdp).release()); } @@ -308,7 +350,7 @@ void PeerConnectionTestWrapper::SetRemoteDescription(SdpType type, << ": SetRemoteDescription " << webrtc::SdpTypeToString(type) << " " << sdp; - auto observer = rtc::make_ref_counted(); + auto observer = webrtc::make_ref_counted(); peer_connection_->SetRemoteDescription( observer.get(), webrtc::CreateSessionDescription(type, sdp).release()); } @@ -321,15 +363,26 @@ void PeerConnectionTestWrapper::AddIceCandidate(const std::string& sdp_mid, EXPECT_TRUE(peer_connection_->AddIceCandidate(owned_candidate.get())); } -void PeerConnectionTestWrapper::WaitForCallEstablished() { - WaitForConnection(); - WaitForAudio(); - WaitForVideo(); +bool PeerConnectionTestWrapper::WaitForCallEstablished() { + if (!WaitForConnection()) + return false; + if (!WaitForAudio()) + return false; + if (!WaitForVideo()) + return false; + return true; } -void PeerConnectionTestWrapper::WaitForConnection() { - EXPECT_TRUE_WAIT(CheckForConnection(), kMaxWait); +bool PeerConnectionTestWrapper::WaitForConnection() { + EXPECT_THAT(webrtc::WaitUntil( + [&] { return CheckForConnection(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); + if (testing::Test::HasFailure()) { + return false; + } RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": Connected."; + return true; } bool PeerConnectionTestWrapper::CheckForConnection() { @@ -339,10 +392,17 @@ bool PeerConnectionTestWrapper::CheckForConnection() { PeerConnectionInterface::kIceConnectionCompleted); } -void PeerConnectionTestWrapper::WaitForAudio() { - EXPECT_TRUE_WAIT(CheckForAudio(), kMaxWait); +bool PeerConnectionTestWrapper::WaitForAudio() { + EXPECT_THAT( + webrtc::WaitUntil([&] { return CheckForAudio(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); + if (testing::Test::HasFailure()) { + return false; + } RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": Got enough audio frames."; + return true; } bool PeerConnectionTestWrapper::CheckForAudio() { @@ -350,10 +410,17 @@ bool PeerConnectionTestWrapper::CheckForAudio() { kTestAudioFrameCount); } -void PeerConnectionTestWrapper::WaitForVideo() { - EXPECT_TRUE_WAIT(CheckForVideo(), kMaxWait); +bool PeerConnectionTestWrapper::WaitForVideo() { + EXPECT_THAT( + webrtc::WaitUntil([&] { return CheckForVideo(); }, ::testing::IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(kMaxWait)}), + webrtc::IsRtcOk()); + if (testing::Test::HasFailure()) { + return false; + } RTC_LOG(LS_INFO) << "PeerConnectionTestWrapper " << name_ << ": Got enough video frames."; + return true; } bool PeerConnectionTestWrapper::CheckForVideo() { @@ -365,9 +432,9 @@ bool PeerConnectionTestWrapper::CheckForVideo() { void PeerConnectionTestWrapper::GetAndAddUserMedia( bool audio, - const cricket::AudioOptions& audio_options, + const webrtc::AudioOptions& audio_options, bool video) { - rtc::scoped_refptr stream = + webrtc::scoped_refptr stream = GetUserMedia(audio, audio_options, video); for (const auto& audio_track : stream->GetAudioTracks()) { EXPECT_TRUE(peer_connection_->AddTrack(audio_track, {stream->id()}).ok()); @@ -377,24 +444,24 @@ void PeerConnectionTestWrapper::GetAndAddUserMedia( } } -rtc::scoped_refptr +webrtc::scoped_refptr PeerConnectionTestWrapper::GetUserMedia( bool audio, - const cricket::AudioOptions& audio_options, + const webrtc::AudioOptions& audio_options, bool video, webrtc::Resolution resolution) { std::string stream_id = - kStreamIdBase + rtc::ToString(num_get_user_media_calls_++); - rtc::scoped_refptr stream = + kStreamIdBase + absl::StrCat(num_get_user_media_calls_++); + webrtc::scoped_refptr stream = peer_connection_factory_->CreateLocalMediaStream(stream_id); if (audio) { - cricket::AudioOptions options = audio_options; + webrtc::AudioOptions options = audio_options; // Disable highpass filter so that we can get all the test audio frames. options.highpass_filter = false; - rtc::scoped_refptr source = + webrtc::scoped_refptr source = peer_connection_factory_->CreateAudioSource(options); - rtc::scoped_refptr audio_track( + webrtc::scoped_refptr audio_track( peer_connection_factory_->CreateAudioTrack(kAudioTrackLabelBase, source.get())); stream->AddTrack(audio_track); @@ -404,16 +471,17 @@ PeerConnectionTestWrapper::GetUserMedia( // Set max frame rate to 10fps to reduce the risk of the tests to be flaky. webrtc::FakePeriodicVideoSource::Config config; config.frame_interval_ms = 100; - config.timestamp_offset_ms = rtc::TimeMillis(); + config.timestamp_offset_ms = webrtc::TimeMillis(); config.width = resolution.width; config.height = resolution.height; - auto source = rtc::make_ref_counted( - config, /* remote */ false); + auto source = + webrtc::make_ref_counted( + config, /* remote */ false); fake_video_sources_.push_back(source); std::string videotrack_label = stream_id + kVideoTrackLabelBase; - rtc::scoped_refptr video_track( + webrtc::scoped_refptr video_track( peer_connection_factory_->CreateVideoTrack(source, videotrack_label)); stream->AddTrack(video_track); diff --git a/pc/test/peer_connection_test_wrapper.h b/pc/test/peer_connection_test_wrapper.h index 751c9462d3..30fa5c407f 100644 --- a/pc/test/peer_connection_test_wrapper.h +++ b/pc/test/peer_connection_test_wrapper.h @@ -12,6 +12,7 @@ #define PC_TEST_PEER_CONNECTION_TEST_WRAPPER_H_ #include +#include #include #include @@ -19,8 +20,10 @@ #include "api/audio_codecs/audio_encoder_factory.h" #include "api/audio_options.h" #include "api/data_channel_interface.h" +#include "api/field_trials_view.h" #include "api/jsep.h" #include "api/media_stream_interface.h" +#include "api/media_types.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" @@ -28,13 +31,15 @@ #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/video/resolution.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" #include "pc/test/fake_audio_capture_module.h" #include "pc/test/fake_periodic_video_source.h" #include "pc/test/fake_periodic_video_track_source.h" #include "pc/test/fake_video_track_renderer.h" +#include "rtc_base/socket_server.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" -#include "test/scoped_key_value_config.h" class PeerConnectionTestWrapper : public webrtc::PeerConnectionObserver, @@ -45,28 +50,36 @@ class PeerConnectionTestWrapper PeerConnectionTestWrapper* callee); PeerConnectionTestWrapper(const std::string& name, - rtc::SocketServer* socket_server, - rtc::Thread* network_thread, - rtc::Thread* worker_thread); + webrtc::SocketServer* socket_server, + webrtc::Thread* network_thread, + webrtc::Thread* worker_thread); virtual ~PeerConnectionTestWrapper(); bool CreatePc( const webrtc::PeerConnectionInterface::RTCConfiguration& config, - rtc::scoped_refptr audio_encoder_factory, - rtc::scoped_refptr audio_decoder_factory); + webrtc::scoped_refptr audio_encoder_factory, + webrtc::scoped_refptr audio_decoder_factory, + std::unique_ptr field_trials = nullptr); + bool CreatePc( + const webrtc::PeerConnectionInterface::RTCConfiguration& config, + webrtc::scoped_refptr audio_encoder_factory, + webrtc::scoped_refptr audio_decoder_factory, + std::unique_ptr video_encoder_factory, + std::unique_ptr video_decoder_factory, + std::unique_ptr field_trials = nullptr); - rtc::scoped_refptr pc_factory() + webrtc::scoped_refptr pc_factory() const { return peer_connection_factory_; } webrtc::PeerConnectionInterface* pc() { return peer_connection_.get(); } - rtc::scoped_refptr CreateDataChannel( + webrtc::scoped_refptr CreateDataChannel( const std::string& label, const webrtc::DataChannelInit& init); - absl::optional FindFirstSendCodecWithName( - cricket::MediaType media_type, + std::optional FindFirstSendCodecWithName( + webrtc::MediaType media_type, const std::string& name) const; void WaitForNegotiation(); @@ -75,11 +88,11 @@ class PeerConnectionTestWrapper void OnSignalingChange( webrtc::PeerConnectionInterface::SignalingState new_state) override; void OnAddTrack( - rtc::scoped_refptr receiver, - const std::vector>& + webrtc::scoped_refptr receiver, + const std::vector>& streams) override; - void OnDataChannel( - rtc::scoped_refptr data_channel) override; + void OnDataChannel(webrtc::scoped_refptr + data_channel) override; void OnRenegotiationNeeded() override {} void OnIceConnectionChange( webrtc::PeerConnectionInterface::IceConnectionState new_state) override {} @@ -100,12 +113,12 @@ class PeerConnectionTestWrapper void AddIceCandidate(const std::string& sdp_mid, int sdp_mline_index, const std::string& candidate); - void WaitForCallEstablished(); - void WaitForConnection(); - void WaitForAudio(); - void WaitForVideo(); + bool WaitForCallEstablished(); + bool WaitForConnection(); + bool WaitForAudio(); + bool WaitForVideo(); void GetAndAddUserMedia(bool audio, - const cricket::AudioOptions& audio_options, + const webrtc::AudioOptions& audio_options, bool video); // sigslots @@ -114,9 +127,9 @@ class PeerConnectionTestWrapper sigslot::signal1 SignalOnSdpReady; sigslot::signal1 SignalOnDataChannel; - rtc::scoped_refptr GetUserMedia( + webrtc::scoped_refptr GetUserMedia( bool audio, - const cricket::AudioOptions& audio_options, + const webrtc::AudioOptions& audio_options, bool video, webrtc::Resolution resolution = { .width = webrtc::FakePeriodicVideoSource::kDefaultWidth, @@ -130,20 +143,19 @@ class PeerConnectionTestWrapper bool CheckForAudio(); bool CheckForVideo(); - webrtc::test::ScopedKeyValueConfig field_trials_; std::string name_; - rtc::SocketServer* const socket_server_; - rtc::Thread* const network_thread_; - rtc::Thread* const worker_thread_; + webrtc::SocketServer* const socket_server_; + webrtc::Thread* const network_thread_; + webrtc::Thread* const worker_thread_; webrtc::SequenceChecker pc_thread_checker_; - rtc::scoped_refptr peer_connection_; - rtc::scoped_refptr + webrtc::scoped_refptr peer_connection_; + webrtc::scoped_refptr peer_connection_factory_; - rtc::scoped_refptr fake_audio_capture_module_; + webrtc::scoped_refptr fake_audio_capture_module_; std::unique_ptr renderer_; int num_get_user_media_calls_ = 0; bool pending_negotiation_; - std::vector> + std::vector> fake_video_sources_; }; diff --git a/pc/test/rtc_stats_obtainer.h b/pc/test/rtc_stats_obtainer.h index b1cc701a06..2013539184 100644 --- a/pc/test/rtc_stats_obtainer.h +++ b/pc/test/rtc_stats_obtainer.h @@ -12,42 +12,42 @@ #define PC_TEST_RTC_STATS_OBTAINER_H_ #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/stats/rtc_stats_collector_callback.h" #include "api/stats/rtc_stats_report.h" -#include "rtc_base/gunit.h" +#include "test/gtest.h" namespace webrtc { class RTCStatsObtainer : public RTCStatsCollectorCallback { public: - static rtc::scoped_refptr Create( - rtc::scoped_refptr* report_ptr = nullptr) { - return rtc::make_ref_counted(report_ptr); + static scoped_refptr Create( + scoped_refptr* report_ptr = nullptr) { + return make_ref_counted(report_ptr); } void OnStatsDelivered( - const rtc::scoped_refptr& report) override { + const scoped_refptr& report) override { EXPECT_TRUE(thread_checker_.IsCurrent()); report_ = report; if (report_ptr_) *report_ptr_ = report_; } - rtc::scoped_refptr report() const { + scoped_refptr report() const { EXPECT_TRUE(thread_checker_.IsCurrent()); return report_; } protected: - explicit RTCStatsObtainer( - rtc::scoped_refptr* report_ptr) + explicit RTCStatsObtainer(scoped_refptr* report_ptr) : report_ptr_(report_ptr) {} private: SequenceChecker thread_checker_; - rtc::scoped_refptr report_; - rtc::scoped_refptr* report_ptr_; + scoped_refptr report_; + scoped_refptr* report_ptr_; }; } // namespace webrtc diff --git a/pc/test/rtp_transport_test_util.h b/pc/test/rtp_transport_test_util.h index 593ee002c9..82d2505168 100644 --- a/pc/test/rtp_transport_test_util.h +++ b/pc/test/rtp_transport_test_util.h @@ -11,11 +11,15 @@ #ifndef PC_TEST_RTP_TRANSPORT_TEST_UTIL_H_ #define PC_TEST_RTP_TRANSPORT_TEST_UTIL_H_ +#include #include +#include "absl/functional/any_invocable.h" #include "call/rtp_packet_sink_interface.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "pc/rtp_transport_internal.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network/sent_packet.h" namespace webrtc { @@ -27,29 +31,33 @@ class TransportObserver : public RtpPacketSinkInterface { explicit TransportObserver(RtpTransportInternal* rtp_transport) { rtp_transport->SubscribeRtcpPacketReceived( - this, [this](rtc::CopyOnWriteBuffer* buffer, int64_t packet_time_ms) { + this, [this](CopyOnWriteBuffer* buffer, int64_t packet_time_ms) { OnRtcpPacketReceived(buffer, packet_time_ms); }); rtp_transport->SubscribeReadyToSend( this, [this](bool arg) { OnReadyToSend(arg); }); rtp_transport->SetUnDemuxableRtpPacketReceivedHandler( - [this](webrtc::RtpPacketReceived& packet) { - OnUndemuxableRtpPacket(packet); - }); + [this](RtpPacketReceived& packet) { OnUndemuxableRtpPacket(packet); }); + rtp_transport->SubscribeSentPacket(this, + [this](const SentPacketInfo& packet) { + sent_packet_count_++; + if (action_on_sent_packet_) { + action_on_sent_packet_(); + } + }); } // RtpPacketInterface override. void OnRtpPacket(const RtpPacketReceived& packet) override { rtp_count_++; - last_recv_rtp_packet_ = packet.Buffer(); + last_recv_rtp_packet_ = packet; } void OnUndemuxableRtpPacket(const RtpPacketReceived& packet) { un_demuxable_rtp_count_++; } - void OnRtcpPacketReceived(rtc::CopyOnWriteBuffer* packet, - int64_t packet_time_us) { + void OnRtcpPacketReceived(CopyOnWriteBuffer* packet, int64_t packet_time_us) { rtcp_count_++; last_recv_rtcp_packet_ = *packet; } @@ -57,14 +65,13 @@ class TransportObserver : public RtpPacketSinkInterface { int rtp_count() const { return rtp_count_; } int un_demuxable_rtp_count() const { return un_demuxable_rtp_count_; } int rtcp_count() const { return rtcp_count_; } + int sent_packet_count() const { return sent_packet_count_; } - rtc::CopyOnWriteBuffer last_recv_rtp_packet() { + const RtpPacketReceived& last_recv_rtp_packet() { return last_recv_rtp_packet_; } - rtc::CopyOnWriteBuffer last_recv_rtcp_packet() { - return last_recv_rtcp_packet_; - } + CopyOnWriteBuffer last_recv_rtcp_packet() { return last_recv_rtcp_packet_; } void OnReadyToSend(bool ready) { if (action_on_ready_to_send_) { @@ -81,16 +88,21 @@ class TransportObserver : public RtpPacketSinkInterface { void SetActionOnReadyToSend(absl::AnyInvocable action) { action_on_ready_to_send_ = std::move(action); } + void SetActionOnSentPacket(absl::AnyInvocable action) { + action_on_sent_packet_ = std::move(action); + } private: bool ready_to_send_ = false; int rtp_count_ = 0; int un_demuxable_rtp_count_ = 0; int rtcp_count_ = 0; + int sent_packet_count_ = 0; int ready_to_send_signal_count_ = 0; - rtc::CopyOnWriteBuffer last_recv_rtp_packet_; - rtc::CopyOnWriteBuffer last_recv_rtcp_packet_; + RtpPacketReceived last_recv_rtp_packet_; + CopyOnWriteBuffer last_recv_rtcp_packet_; absl::AnyInvocable action_on_ready_to_send_; + absl::AnyInvocable action_on_sent_packet_; }; } // namespace webrtc diff --git a/pc/test/simulcast_layer_util.cc b/pc/test/simulcast_layer_util.cc index 6ce09b5e9b..aebc71358f 100644 --- a/pc/test/simulcast_layer_util.cc +++ b/pc/test/simulcast_layer_util.cc @@ -10,33 +10,40 @@ #include "pc/test/simulcast_layer_util.h" +#include +#include +#include + #include "absl/algorithm/container.h" +#include "api/jsep.h" +#include "api/rtp_parameters.h" +#include "api/rtp_transceiver_interface.h" +#include "pc/session_description.h" +#include "pc/simulcast_description.h" #include "rtc_base/checks.h" namespace webrtc { -std::vector CreateLayers( - const std::vector& rids, - const std::vector& active) { +std::vector CreateLayers(const std::vector& rids, + const std::vector& active) { RTC_DCHECK_EQ(rids.size(), active.size()); - std::vector result; + std::vector result; absl::c_transform(rids, active, std::back_inserter(result), [](const std::string& rid, bool is_active) { - return cricket::SimulcastLayer(rid, !is_active); + return SimulcastLayer(rid, !is_active); }); return result; } -std::vector CreateLayers( - const std::vector& rids, - bool active) { +std::vector CreateLayers(const std::vector& rids, + bool active) { return CreateLayers(rids, std::vector(rids.size(), active)); } RtpTransceiverInit CreateTransceiverInit( - const std::vector& layers) { + const std::vector& layers) { RtpTransceiverInit init; - for (const cricket::SimulcastLayer& layer : layers) { + for (const SimulcastLayer& layer : layers) { RtpEncodingParameters encoding; encoding.rid = layer.rid; encoding.active = !layer.is_paused; @@ -45,10 +52,10 @@ RtpTransceiverInit CreateTransceiverInit( return init; } -cricket::SimulcastDescription RemoveSimulcast(SessionDescriptionInterface* sd) { +SimulcastDescription RemoveSimulcast(SessionDescriptionInterface* sd) { auto mcd = sd->description()->contents()[0].media_description(); auto result = mcd->simulcast_description(); - mcd->set_simulcast_description(cricket::SimulcastDescription()); + mcd->set_simulcast_description(SimulcastDescription()); return result; } diff --git a/pc/test/simulcast_layer_util.h b/pc/test/simulcast_layer_util.h index 6822e3c9fd..c3918c2c99 100644 --- a/pc/test/simulcast_layer_util.h +++ b/pc/test/simulcast_layer_util.h @@ -16,23 +16,20 @@ #include "api/jsep.h" #include "api/rtp_transceiver_interface.h" -#include "pc/session_description.h" #include "pc/simulcast_description.h" namespace webrtc { -std::vector CreateLayers( - const std::vector& rids, - const std::vector& active); +std::vector CreateLayers(const std::vector& rids, + const std::vector& active); -std::vector CreateLayers( - const std::vector& rids, - bool active); +std::vector CreateLayers(const std::vector& rids, + bool active); RtpTransceiverInit CreateTransceiverInit( - const std::vector& layers); + const std::vector& layers); -cricket::SimulcastDescription RemoveSimulcast(SessionDescriptionInterface* sd); +SimulcastDescription RemoveSimulcast(SessionDescriptionInterface* sd); } // namespace webrtc diff --git a/pc/test/srtp_test_util.h b/pc/test/srtp_test_util.h index ae02310eba..e3118cd947 100644 --- a/pc/test/srtp_test_util.h +++ b/pc/test/srtp_test_util.h @@ -11,35 +11,57 @@ #ifndef PC_TEST_SRTP_TEST_UTIL_H_ #define PC_TEST_SRTP_TEST_UTIL_H_ -#include +#include -namespace rtc { +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/ssl_stream_adapter.h" + +namespace webrtc { + +static const ZeroOnFreeBuffer kTestKey1{ + "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234", 30}; +static const ZeroOnFreeBuffer kTestKey2{ + "4321ZYXWVUTSRQPONMLKJIHGFEDCBA", 30}; -extern const char kCsAesCm128HmacSha1_32[]; -extern const char kCsAeadAes128Gcm[]; -extern const char kCsAeadAes256Gcm[]; - -static const uint8_t kTestKey1[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234"; -static const uint8_t kTestKey2[] = "4321ZYXWVUTSRQPONMLKJIHGFEDCBA"; -static const int kTestKeyLen = 30; - -static int rtp_auth_tag_len(const std::string& cs) { - if (cs == kCsAesCm128HmacSha1_32) { - return 4; - } else if (cs == kCsAeadAes128Gcm || cs == kCsAeadAes256Gcm) { - return 16; - } else { - return 10; +static int rtp_auth_tag_len(int crypto_suite) { + switch (crypto_suite) { + case webrtc::kSrtpAes128CmSha1_32: + return 4; + case webrtc::kSrtpAes128CmSha1_80: + return 10; + case webrtc::kSrtpAeadAes128Gcm: + case webrtc::kSrtpAeadAes256Gcm: + return 16; + default: + RTC_CHECK_NOTREACHED(); } } -static int rtcp_auth_tag_len(const std::string& cs) { - if (cs == kCsAeadAes128Gcm || cs == kCsAeadAes256Gcm) { - return 16; - } else { - return 10; + +static int rtcp_auth_tag_len(int crypto_suite) { + switch (crypto_suite) { + case webrtc::kSrtpAes128CmSha1_32: + case webrtc::kSrtpAes128CmSha1_80: + return 10; + case webrtc::kSrtpAeadAes128Gcm: + case webrtc::kSrtpAeadAes256Gcm: + return 16; + default: + RTC_CHECK_NOTREACHED(); } } +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::kTestKey1; +using ::webrtc::kTestKey2; +using ::webrtc::rtcp_auth_tag_len; +using ::webrtc::rtp_auth_tag_len; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // PC_TEST_SRTP_TEST_UTIL_H_ diff --git a/pc/test/svc_e2e_tests.cc b/pc/test/svc_e2e_tests.cc index ae35c7f676..60469c76b5 100644 --- a/pc/test/svc_e2e_tests.cc +++ b/pc/test/svc_e2e_tests.cc @@ -7,17 +7,24 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +#include +#include #include +#include #include +#include #include #include -#include "api/media_stream_interface.h" +#include "absl/strings/string_view.h" +#include "api/function_view.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" #include "api/test/create_network_emulation_manager.h" #include "api/test/create_peer_connection_quality_test_frame_generator.h" #include "api/test/create_peerconnection_quality_test_fixture.h" -#include "api/test/frame_generator_interface.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/network_emulation_manager.h" #include "api/test/pclf/media_configuration.h" @@ -26,26 +33,28 @@ #include "api/test/peerconnection_quality_test_fixture.h" #include "api/test/simulated_network.h" #include "api/test/time_controller.h" +#include "api/test/video_quality_analyzer_interface.h" +#include "api/units/time_delta.h" +#include "api/video/encoded_image.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/vp9_profile.h" -#include "call/simulated_network.h" -#include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "media/base/media_constants.h" #include "modules/video_coding/svc/scalability_mode_util.h" +#include "rtc_base/checks.h" #include "rtc_base/containers/flat_map.h" +#include "rtc_base/logging.h" +#include "system_wrappers/include/clock.h" #include "system_wrappers/include/field_trial.h" #include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h" -#include "test/pc/e2e/network_quality_metrics_reporter.h" +#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h" #include "test/testsupport/file_utils.h" namespace webrtc { namespace { -using ::cricket::kAv1CodecName; -using ::cricket::kH264CodecName; -using ::cricket::kVp8CodecName; -using ::cricket::kVp9CodecName; using ::testing::Combine; using ::testing::Optional; using ::testing::UnitTest; @@ -63,17 +72,15 @@ CreateTestFixture(absl::string_view test_case_name, TimeController& time_controller, std::pair network_links, - rtc::FunctionView alice_configurer, - rtc::FunctionView bob_configurer, + FunctionView alice_configurer, + FunctionView bob_configurer, std::unique_ptr video_quality_analyzer = nullptr) { auto fixture = webrtc_pc_e2e::CreatePeerConnectionE2EQualityTestFixture( std::string(test_case_name), time_controller, nullptr, std::move(video_quality_analyzer)); - auto alice = std::make_unique( - network_links.first->network_dependencies()); - auto bob = std::make_unique( - network_links.second->network_dependencies()); + auto alice = std::make_unique(*network_links.first); + auto bob = std::make_unique(*network_links.second); alice_configurer(alice.get()); bob_configurer(bob.get()); fixture->AddPeer(std::move(alice)); @@ -94,7 +101,7 @@ enum class UseDependencyDescriptor { struct SvcTestParameters { static SvcTestParameters Create(const std::string& codec_name, const std::string& scalability_mode_str) { - absl::optional scalability_mode = + std::optional scalability_mode = ScalabilityModeFromString(scalability_mode_str); RTC_CHECK(scalability_mode.has_value()) << "Unsupported scalability mode: " << scalability_mode_str; @@ -122,9 +129,9 @@ class SvcTest : public testing::TestWithParam< } static VideoCodecConfig ToVideoCodecConfig(absl::string_view codec) { - if (codec == cricket::kVp9CodecName) { + if (codec == kVp9CodecName) { return VideoCodecConfig( - cricket::kVp9CodecName, + kVp9CodecName, {{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}}); } @@ -160,10 +167,9 @@ std::string SvcTestNameGenerator( // encoder and decoder level. class SvcVideoQualityAnalyzer : public DefaultVideoQualityAnalyzer { public: - using SpatialTemporalLayerCounts = - webrtc::flat_map>; + using SpatialTemporalLayerCounts = flat_map>; - explicit SvcVideoQualityAnalyzer(webrtc::Clock* clock) + explicit SvcVideoQualityAnalyzer(Clock* clock) : DefaultVideoQualityAnalyzer(clock, test::GetGlobalMetricsLogger(), DefaultVideoQualityAnalyzerOptions{ @@ -177,8 +183,8 @@ class SvcVideoQualityAnalyzer : public DefaultVideoQualityAnalyzer { const EncodedImage& encoded_image, const EncoderStats& stats, bool discarded) override { - absl::optional spatial_id = encoded_image.SpatialIndex(); - absl::optional temporal_id = encoded_image.TemporalIndex(); + std::optional spatial_id = encoded_image.SpatialIndex(); + std::optional temporal_id = encoded_image.TemporalIndex(); encoder_layers_seen_[spatial_id.value_or(0)][temporal_id.value_or(0)]++; DefaultVideoQualityAnalyzer::OnFrameEncoded( peer_name, frame_id, encoded_image, stats, discarded); @@ -187,8 +193,8 @@ class SvcVideoQualityAnalyzer : public DefaultVideoQualityAnalyzer { void OnFramePreDecode(absl::string_view peer_name, uint16_t frame_id, const EncodedImage& input_image) override { - absl::optional spatial_id = input_image.SpatialIndex(); - absl::optional temporal_id = input_image.TemporalIndex(); + std::optional spatial_id = input_image.SpatialIndex(); + std::optional temporal_id = input_image.TemporalIndex(); if (!spatial_id) { decoder_layers_seen_[0][temporal_id.value_or(0)]++; } else { @@ -207,11 +213,11 @@ class SvcVideoQualityAnalyzer : public DefaultVideoQualityAnalyzer { void OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) override { + const scoped_refptr& report) override { // Extract the scalability mode reported in the stats. auto outbound_stats = report->GetStatsOfType(); for (const auto& stat : outbound_stats) { - if (stat->scalability_mode.is_defined()) { + if (stat->scalability_mode.has_value()) { reported_scalability_mode_ = *stat->scalability_mode; } } @@ -223,14 +229,14 @@ class SvcVideoQualityAnalyzer : public DefaultVideoQualityAnalyzer { const SpatialTemporalLayerCounts& decoder_layers_seen() const { return decoder_layers_seen_; } - const absl::optional reported_scalability_mode() const { + const std::optional reported_scalability_mode() const { return reported_scalability_mode_; } private: SpatialTemporalLayerCounts encoder_layers_seen_; SpatialTemporalLayerCounts decoder_layers_seen_; - absl::optional reported_scalability_mode_; + std::optional reported_scalability_mode_; }; MATCHER_P2(HasSpatialAndTemporalLayers, @@ -315,9 +321,9 @@ TEST_P(SvcTest, ScalabilityModeSupported) { if (UseDependencyDescriptor()) { trials += "WebRTC-DependencyDescriptorAdvertised/Enabled/"; } - webrtc::test::ScopedFieldTrials override_trials(AppendFieldTrials(trials)); + test::ScopedFieldTrials override_trials(AppendFieldTrials(trials)); std::unique_ptr network_emulation_manager = - CreateNetworkEmulationManager(webrtc::TimeMode::kSimulated); + CreateNetworkEmulationManager({.time_mode = TimeMode::kSimulated}); auto analyzer = std::make_unique( network_emulation_manager->time_controller()->GetClock()); SvcVideoQualityAnalyzer* analyzer_ptr = analyzer.get(); @@ -337,10 +343,9 @@ TEST_P(SvcTest, ScalabilityModeSupported) { RtpEncodingParameters parameters; parameters.scalability_mode = SvcTestParameters().scalability_mode; video.encoding_params.push_back(parameters); - alice->AddVideoConfig( - std::move(video), - CreateScreenShareFrameGenerator( - video, ScreenShareConfig(TimeDelta::Seconds(5)))); + auto generator = CreateScreenShareFrameGenerator( + video, ScreenShareConfig(TimeDelta::Seconds(5))); + alice->AddVideoConfig(std::move(video), std::move(generator)); alice->SetVideoCodecs({video_codec_config}); }, [](PeerConfigurer* bob) {}, std::move(analyzer)); @@ -456,6 +461,7 @@ INSTANTIATE_TEST_SUITE_P( Values(UseDependencyDescriptor::Disabled, UseDependencyDescriptor::Enabled)), SvcTestNameGenerator); +#endif INSTANTIATE_TEST_SUITE_P( SvcTestAV1, @@ -475,16 +481,17 @@ INSTANTIATE_TEST_SUITE_P( SvcTestParameters::Create(kAv1CodecName, "L2T3h"), SvcTestParameters::Create(kAv1CodecName, "L2T3_KEY"), // SvcTestParameters::Create(kAv1CodecName, "L2T3_KEY_SHIFT"), - SvcTestParameters::Create(kAv1CodecName, "L3T1"), - SvcTestParameters::Create(kAv1CodecName, "L3T1h"), - SvcTestParameters::Create(kAv1CodecName, "L3T1_KEY"), - SvcTestParameters::Create(kAv1CodecName, "L3T2"), - SvcTestParameters::Create(kAv1CodecName, "L3T2h"), - SvcTestParameters::Create(kAv1CodecName, "L3T2_KEY"), + // TODO(bugs.webrtc.org/15666): Investigate and reenable AV1 + // L3 tests. SvcTestParameters::Create(kAv1CodecName, "L3T1"), + // SvcTestParameters::Create(kAv1CodecName, "L3T1h"), + // SvcTestParameters::Create(kAv1CodecName, "L3T1_KEY"), + // SvcTestParameters::Create(kAv1CodecName, "L3T2"), + // SvcTestParameters::Create(kAv1CodecName, "L3T2h"), + // SvcTestParameters::Create(kAv1CodecName, "L3T2_KEY"), // SvcTestParameters::Create(kAv1CodecName, "L3T2_KEY_SHIFT"), - SvcTestParameters::Create(kAv1CodecName, "L3T3"), - SvcTestParameters::Create(kAv1CodecName, "L3T3h"), - SvcTestParameters::Create(kAv1CodecName, "L3T3_KEY"), + // SvcTestParameters::Create(kAv1CodecName, "L3T3"), + // SvcTestParameters::Create(kAv1CodecName, "L3T3h"), + // SvcTestParameters::Create(kAv1CodecName, "L3T3_KEY"), // SvcTestParameters::Create(kAv1CodecName, "L3T3_KEY_SHIFT"), SvcTestParameters::Create(kAv1CodecName, "S2T1"), SvcTestParameters::Create(kAv1CodecName, "S2T1h"), @@ -492,16 +499,16 @@ INSTANTIATE_TEST_SUITE_P( SvcTestParameters::Create(kAv1CodecName, "S2T2h"), SvcTestParameters::Create(kAv1CodecName, "S2T3"), SvcTestParameters::Create(kAv1CodecName, "S2T3h"), - SvcTestParameters::Create(kAv1CodecName, "S3T1"), - SvcTestParameters::Create(kAv1CodecName, "S3T1h"), - SvcTestParameters::Create(kAv1CodecName, "S3T2"), - SvcTestParameters::Create(kAv1CodecName, "S3T2h"), - SvcTestParameters::Create(kAv1CodecName, "S3T3"), - SvcTestParameters::Create(kAv1CodecName, "S3T3h"), + // TODO(bugs.webrtc.org/15666): Investigate and reenable AV1 + // S3 tests. + // SvcTestParameters::Create(kAv1CodecName, "S3T1"), + // SvcTestParameters::Create(kAv1CodecName, "S3T1h"), + // SvcTestParameters::Create(kAv1CodecName, "S3T2"), + // SvcTestParameters::Create(kAv1CodecName, "S3T2h"), + // SvcTestParameters::Create(kAv1CodecName, "S3T3"), + // SvcTestParameters::Create(kAv1CodecName, "S3T3h"), }), Values(UseDependencyDescriptor::Enabled)), SvcTestNameGenerator); -#endif - } // namespace webrtc diff --git a/pc/track_media_info_map.cc b/pc/track_media_info_map.cc index ac24d07f61..a1eef98fe7 100644 --- a/pc/track_media_info_map.cc +++ b/pc/track_media_info_map.cc @@ -11,13 +11,20 @@ #include "pc/track_media_info_map.h" #include +#include +#include #include -#include #include +#include "api/array_view.h" +#include "api/media_stream_interface.h" #include "api/media_types.h" #include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "media/base/media_channel.h" #include "media/base/stream_params.h" +#include "pc/rtp_receiver.h" +#include "pc/rtp_sender.h" #include "rtc_base/checks.h" #include "rtc_base/thread.h" @@ -38,8 +45,8 @@ const V* FindAddressOrNull(const std::map& map, const K& key) { } void GetAudioAndVideoTrackBySsrc( - rtc::ArrayView> rtp_senders, - rtc::ArrayView> rtp_receivers, + ArrayView> rtp_senders, + ArrayView> rtp_receivers, std::map* local_audio_track_by_ssrc, std::map* local_video_track_by_ssrc, std::map* remote_audio_track_by_ssrc, @@ -51,7 +58,7 @@ void GetAudioAndVideoTrackBySsrc( RTC_DCHECK(remote_audio_track_by_ssrc->empty()); RTC_DCHECK(remote_video_track_by_ssrc->empty()); for (const auto& rtp_sender : rtp_senders) { - cricket::MediaType media_type = rtp_sender->media_type(); + webrtc::MediaType media_type = rtp_sender->media_type(); MediaStreamTrackInterface* track = rtp_sender->track().get(); if (!track) { continue; @@ -59,7 +66,7 @@ void GetAudioAndVideoTrackBySsrc( // TODO(deadbeef): `ssrc` should be removed in favor of `GetParameters`. uint32_t ssrc = rtp_sender->ssrc(); if (ssrc != 0) { - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + if (media_type == webrtc::MediaType::AUDIO) { RTC_DCHECK(local_audio_track_by_ssrc->find(ssrc) == local_audio_track_by_ssrc->end()); (*local_audio_track_by_ssrc)[ssrc] = @@ -73,21 +80,21 @@ void GetAudioAndVideoTrackBySsrc( } } for (const auto& rtp_receiver : rtp_receivers) { - cricket::MediaType media_type = rtp_receiver->media_type(); + webrtc::MediaType media_type = rtp_receiver->media_type(); MediaStreamTrackInterface* track = rtp_receiver->track().get(); RTC_DCHECK(track); RtpParameters params = rtp_receiver->GetParameters(); for (const RtpEncodingParameters& encoding : params.encodings) { if (!encoding.ssrc) { - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + if (media_type == webrtc::MediaType::AUDIO) { *unsignaled_audio_track = static_cast(track); } else { - RTC_DCHECK(media_type == cricket::MEDIA_TYPE_VIDEO); + RTC_DCHECK(media_type == webrtc::MediaType::VIDEO); *unsignaled_video_track = static_cast(track); } continue; } - if (media_type == cricket::MEDIA_TYPE_AUDIO) { + if (media_type == webrtc::MediaType::AUDIO) { RTC_DCHECK(remote_audio_track_by_ssrc->find(*encoding.ssrc) == remote_audio_track_by_ssrc->end()); (*remote_audio_track_by_ssrc)[*encoding.ssrc] = @@ -107,11 +114,11 @@ void GetAudioAndVideoTrackBySsrc( TrackMediaInfoMap::TrackMediaInfoMap() = default; void TrackMediaInfoMap::Initialize( - absl::optional voice_media_info, - absl::optional video_media_info, - rtc::ArrayView> rtp_senders, - rtc::ArrayView> rtp_receivers) { - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + std::optional voice_media_info, + std::optional video_media_info, + ArrayView> rtp_senders, + ArrayView> rtp_receivers) { + Thread::ScopedDisallowBlockingCalls no_blocking_calls; RTC_DCHECK(!is_initialized_); is_initialized_ = true; voice_media_info_ = std::move(voice_media_info); @@ -215,59 +222,59 @@ void TrackMediaInfoMap::Initialize( } } -const cricket::VoiceSenderInfo* TrackMediaInfoMap::GetVoiceSenderInfoBySsrc( +const VoiceSenderInfo* TrackMediaInfoMap::GetVoiceSenderInfoBySsrc( uint32_t ssrc) const { RTC_DCHECK(is_initialized_); return FindValueOrNull(voice_info_by_sender_ssrc_, ssrc); } -const cricket::VoiceReceiverInfo* TrackMediaInfoMap::GetVoiceReceiverInfoBySsrc( +const VoiceReceiverInfo* TrackMediaInfoMap::GetVoiceReceiverInfoBySsrc( uint32_t ssrc) const { RTC_DCHECK(is_initialized_); return FindValueOrNull(voice_info_by_receiver_ssrc_, ssrc); } -const cricket::VideoSenderInfo* TrackMediaInfoMap::GetVideoSenderInfoBySsrc( +const VideoSenderInfo* TrackMediaInfoMap::GetVideoSenderInfoBySsrc( uint32_t ssrc) const { RTC_DCHECK(is_initialized_); return FindValueOrNull(video_info_by_sender_ssrc_, ssrc); } -const cricket::VideoReceiverInfo* TrackMediaInfoMap::GetVideoReceiverInfoBySsrc( +const VideoReceiverInfo* TrackMediaInfoMap::GetVideoReceiverInfoBySsrc( uint32_t ssrc) const { RTC_DCHECK(is_initialized_); return FindValueOrNull(video_info_by_receiver_ssrc_, ssrc); } -rtc::scoped_refptr TrackMediaInfoMap::GetAudioTrack( - const cricket::VoiceSenderInfo& voice_sender_info) const { +scoped_refptr TrackMediaInfoMap::GetAudioTrack( + const VoiceSenderInfo& voice_sender_info) const { RTC_DCHECK(is_initialized_); return FindValueOrNull(audio_track_by_sender_info_, &voice_sender_info); } -rtc::scoped_refptr TrackMediaInfoMap::GetAudioTrack( - const cricket::VoiceReceiverInfo& voice_receiver_info) const { +scoped_refptr TrackMediaInfoMap::GetAudioTrack( + const VoiceReceiverInfo& voice_receiver_info) const { RTC_DCHECK(is_initialized_); return FindValueOrNull(audio_track_by_receiver_info_, &voice_receiver_info); } -rtc::scoped_refptr TrackMediaInfoMap::GetVideoTrack( - const cricket::VideoSenderInfo& video_sender_info) const { +scoped_refptr TrackMediaInfoMap::GetVideoTrack( + const VideoSenderInfo& video_sender_info) const { RTC_DCHECK(is_initialized_); return FindValueOrNull(video_track_by_sender_info_, &video_sender_info); } -rtc::scoped_refptr TrackMediaInfoMap::GetVideoTrack( - const cricket::VideoReceiverInfo& video_receiver_info) const { +scoped_refptr TrackMediaInfoMap::GetVideoTrack( + const VideoReceiverInfo& video_receiver_info) const { RTC_DCHECK(is_initialized_); return FindValueOrNull(video_track_by_receiver_info_, &video_receiver_info); } -absl::optional TrackMediaInfoMap::GetAttachmentIdByTrack( +std::optional TrackMediaInfoMap::GetAttachmentIdByTrack( const MediaStreamTrackInterface* track) const { RTC_DCHECK(is_initialized_); auto it = attachment_id_by_track_.find(track); - return it != attachment_id_by_track_.end() ? absl::optional(it->second) - : absl::nullopt; + return it != attachment_id_by_track_.end() ? std::optional(it->second) + : std::nullopt; } } // namespace webrtc diff --git a/pc/track_media_info_map.h b/pc/track_media_info_map.h index 98f8548a10..eb202553d0 100644 --- a/pc/track_media_info_map.h +++ b/pc/track_media_info_map.h @@ -15,10 +15,10 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/media_stream_interface.h" #include "api/scoped_refptr.h" @@ -40,63 +40,56 @@ class TrackMediaInfoMap { // Takes ownership of the "infos". Does not affect the lifetime of the senders // or receivers, but TrackMediaInfoMap will keep their associated tracks alive // through reference counting until the map is destroyed. - void Initialize( - absl::optional voice_media_info, - absl::optional video_media_info, - rtc::ArrayView> rtp_senders, - rtc::ArrayView> rtp_receivers); + void Initialize(std::optional voice_media_info, + std::optional video_media_info, + ArrayView> rtp_senders, + ArrayView> rtp_receivers); - const absl::optional& voice_media_info() const { + const std::optional& voice_media_info() const { RTC_DCHECK(is_initialized_); return voice_media_info_; } - const absl::optional& video_media_info() const { + const std::optional& video_media_info() const { RTC_DCHECK(is_initialized_); return video_media_info_; } - const cricket::VoiceSenderInfo* GetVoiceSenderInfoBySsrc(uint32_t ssrc) const; - const cricket::VoiceReceiverInfo* GetVoiceReceiverInfoBySsrc( - uint32_t ssrc) const; - const cricket::VideoSenderInfo* GetVideoSenderInfoBySsrc(uint32_t ssrc) const; - const cricket::VideoReceiverInfo* GetVideoReceiverInfoBySsrc( - uint32_t ssrc) const; + const VoiceSenderInfo* GetVoiceSenderInfoBySsrc(uint32_t ssrc) const; + const VoiceReceiverInfo* GetVoiceReceiverInfoBySsrc(uint32_t ssrc) const; + const VideoSenderInfo* GetVideoSenderInfoBySsrc(uint32_t ssrc) const; + const VideoReceiverInfo* GetVideoReceiverInfoBySsrc(uint32_t ssrc) const; - rtc::scoped_refptr GetAudioTrack( - const cricket::VoiceSenderInfo& voice_sender_info) const; - rtc::scoped_refptr GetAudioTrack( - const cricket::VoiceReceiverInfo& voice_receiver_info) const; - rtc::scoped_refptr GetVideoTrack( - const cricket::VideoSenderInfo& video_sender_info) const; - rtc::scoped_refptr GetVideoTrack( - const cricket::VideoReceiverInfo& video_receiver_info) const; + scoped_refptr GetAudioTrack( + const VoiceSenderInfo& voice_sender_info) const; + scoped_refptr GetAudioTrack( + const VoiceReceiverInfo& voice_receiver_info) const; + scoped_refptr GetVideoTrack( + const VideoSenderInfo& video_sender_info) const; + scoped_refptr GetVideoTrack( + const VideoReceiverInfo& video_receiver_info) const; // TODO(hta): Remove this function, and redesign the callers not to need it. // It is not going to work if a track is attached multiple times, and // it is not going to work if a received track is attached as a sending // track (loopback). - absl::optional GetAttachmentIdByTrack( + std::optional GetAttachmentIdByTrack( const MediaStreamTrackInterface* track) const; private: bool is_initialized_ = false; - absl::optional voice_media_info_; - absl::optional video_media_info_; + std::optional voice_media_info_; + std::optional video_media_info_; // These maps map info objects to their corresponding tracks. They are always // the inverse of the maps above. One info object always maps to only one // track. The use of scoped_refptr<> here ensures the tracks outlive // TrackMediaInfoMap. - std::map> + std::map> audio_track_by_sender_info_; - std::map> + std::map> audio_track_by_receiver_info_; - std::map> + std::map> video_track_by_sender_info_; - std::map> + std::map> video_track_by_receiver_info_; // Map of tracks to attachment IDs. // Necessary because senders and receivers live on the signaling thread, @@ -105,10 +98,10 @@ class TrackMediaInfoMap { // thread jumping. std::map attachment_id_by_track_; // These maps map SSRCs to the corresponding voice or video info objects. - std::map voice_info_by_sender_ssrc_; - std::map voice_info_by_receiver_ssrc_; - std::map video_info_by_sender_ssrc_; - std::map video_info_by_receiver_ssrc_; + std::map voice_info_by_sender_ssrc_; + std::map voice_info_by_receiver_ssrc_; + std::map video_info_by_sender_ssrc_; + std::map video_info_by_receiver_ssrc_; }; } // namespace webrtc diff --git a/pc/track_media_info_map_unittest.cc b/pc/track_media_info_map_unittest.cc index bffa3eb866..a9487cc507 100644 --- a/pc/track_media_info_map_unittest.cc +++ b/pc/track_media_info_map_unittest.cc @@ -14,21 +14,25 @@ #include #include +#include #include -#include #include #include +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" #include "api/media_types.h" #include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/test/mock_video_track.h" #include "media/base/media_channel.h" #include "pc/audio_track.h" +#include "pc/rtp_receiver.h" +#include "pc/rtp_sender.h" #include "pc/test/fake_video_track_source.h" #include "pc/test/mock_rtp_receiver_internal.h" #include "pc/test/mock_rtp_sender_internal.h" #include "pc/video_track.h" -#include "rtc_base/checks.h" #include "rtc_base/thread.h" #include "test/gmock.h" #include "test/gtest.h" @@ -50,17 +54,17 @@ RtpParameters CreateRtpParametersWithSsrcs( return params; } -rtc::scoped_refptr CreateMockRtpSender( - cricket::MediaType media_type, +scoped_refptr CreateMockRtpSender( + webrtc::MediaType media_type, std::initializer_list ssrcs, - rtc::scoped_refptr track) { + scoped_refptr track) { uint32_t first_ssrc; if (ssrcs.size()) { first_ssrc = *ssrcs.begin(); } else { first_ssrc = 0; } - auto sender = rtc::make_ref_counted(); + auto sender = make_ref_counted(); EXPECT_CALL(*sender, track()) .WillRepeatedly(::testing::Return(std::move(track))); EXPECT_CALL(*sender, ssrc()).WillRepeatedly(::testing::Return(first_ssrc)); @@ -72,11 +76,11 @@ rtc::scoped_refptr CreateMockRtpSender( return sender; } -rtc::scoped_refptr CreateMockRtpReceiver( - cricket::MediaType media_type, +scoped_refptr CreateMockRtpReceiver( + webrtc::MediaType media_type, std::initializer_list ssrcs, - rtc::scoped_refptr track) { - auto receiver = rtc::make_ref_counted(); + scoped_refptr track) { + auto receiver = make_ref_counted(); EXPECT_CALL(*receiver, track()) .WillRepeatedly(::testing::Return(std::move(track))); EXPECT_CALL(*receiver, media_type()) @@ -87,14 +91,12 @@ rtc::scoped_refptr CreateMockRtpReceiver( return receiver; } -rtc::scoped_refptr CreateVideoTrack( - const std::string& id) { +scoped_refptr CreateVideoTrack(const std::string& id) { return VideoTrack::Create(id, FakeVideoTrackSource::Create(false), - rtc::Thread::Current()); + Thread::Current()); } -rtc::scoped_refptr CreateMockVideoTrack( - const std::string& id) { +scoped_refptr CreateMockVideoTrack(const std::string& id) { auto track = MockVideoTrack::Create(); EXPECT_CALL(*track, kind()) .WillRepeatedly(::testing::Return(VideoTrack::kVideoKind)); @@ -117,26 +119,26 @@ class TrackMediaInfoMapTest : public ::testing::Test { void AddRtpSenderWithSsrcs(std::initializer_list ssrcs, MediaStreamTrackInterface* local_track) { - rtc::scoped_refptr rtp_sender = CreateMockRtpSender( + scoped_refptr rtp_sender = CreateMockRtpSender( local_track->kind() == MediaStreamTrackInterface::kAudioKind - ? cricket::MEDIA_TYPE_AUDIO - : cricket::MEDIA_TYPE_VIDEO, - ssrcs, rtc::scoped_refptr(local_track)); + ? webrtc::MediaType::AUDIO + : webrtc::MediaType::VIDEO, + ssrcs, scoped_refptr(local_track)); rtp_senders_.push_back(rtp_sender); if (local_track->kind() == MediaStreamTrackInterface::kAudioKind) { - cricket::VoiceSenderInfo voice_sender_info; + VoiceSenderInfo voice_sender_info; size_t i = 0; for (uint32_t ssrc : ssrcs) { - voice_sender_info.local_stats.push_back(cricket::SsrcSenderInfo()); + voice_sender_info.local_stats.push_back(SsrcSenderInfo()); voice_sender_info.local_stats[i++].ssrc = ssrc; } voice_media_info_.senders.push_back(voice_sender_info); } else { - cricket::VideoSenderInfo video_sender_info; + VideoSenderInfo video_sender_info; size_t i = 0; for (uint32_t ssrc : ssrcs) { - video_sender_info.local_stats.push_back(cricket::SsrcSenderInfo()); + video_sender_info.local_stats.push_back(SsrcSenderInfo()); video_sender_info.local_stats[i++].ssrc = ssrc; } video_media_info_.senders.push_back(video_sender_info); @@ -148,24 +150,24 @@ class TrackMediaInfoMapTest : public ::testing::Test { MediaStreamTrackInterface* remote_track) { auto rtp_receiver = CreateMockRtpReceiver( remote_track->kind() == MediaStreamTrackInterface::kAudioKind - ? cricket::MEDIA_TYPE_AUDIO - : cricket::MEDIA_TYPE_VIDEO, - ssrcs, rtc::scoped_refptr(remote_track)); + ? webrtc::MediaType::AUDIO + : webrtc::MediaType::VIDEO, + ssrcs, scoped_refptr(remote_track)); rtp_receivers_.push_back(rtp_receiver); if (remote_track->kind() == MediaStreamTrackInterface::kAudioKind) { - cricket::VoiceReceiverInfo voice_receiver_info; + VoiceReceiverInfo voice_receiver_info; size_t i = 0; for (uint32_t ssrc : ssrcs) { - voice_receiver_info.local_stats.push_back(cricket::SsrcReceiverInfo()); + voice_receiver_info.local_stats.push_back(SsrcReceiverInfo()); voice_receiver_info.local_stats[i++].ssrc = ssrc; } voice_media_info_.receivers.push_back(voice_receiver_info); } else { - cricket::VideoReceiverInfo video_receiver_info; + VideoReceiverInfo video_receiver_info; size_t i = 0; for (uint32_t ssrc : ssrcs) { - video_receiver_info.local_stats.push_back(cricket::SsrcReceiverInfo()); + video_receiver_info.local_stats.push_back(SsrcReceiverInfo()); video_receiver_info.local_stats[i++].ssrc = ssrc; } video_media_info_.receivers.push_back(video_receiver_info); @@ -180,18 +182,18 @@ class TrackMediaInfoMapTest : public ::testing::Test { } private: - rtc::AutoThread main_thread_; - cricket::VoiceMediaInfo voice_media_info_; - cricket::VideoMediaInfo video_media_info_; + AutoThread main_thread_; + VoiceMediaInfo voice_media_info_; + VideoMediaInfo video_media_info_; protected: - std::vector> rtp_senders_; - std::vector> rtp_receivers_; + std::vector> rtp_senders_; + std::vector> rtp_receivers_; TrackMediaInfoMap map_; - rtc::scoped_refptr local_audio_track_; - rtc::scoped_refptr remote_audio_track_; - rtc::scoped_refptr local_video_track_; - rtc::scoped_refptr remote_video_track_; + scoped_refptr local_audio_track_; + scoped_refptr remote_audio_track_; + scoped_refptr local_video_track_; + scoped_refptr remote_video_track_; }; } // namespace @@ -325,7 +327,7 @@ TEST_F(TrackMediaInfoMapTest, GetAttachmentIdByTrack) { InitializeMap(); EXPECT_EQ(rtp_senders_[0]->AttachmentId(), map_.GetAttachmentIdByTrack(local_audio_track_.get())); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, map_.GetAttachmentIdByTrack(local_video_track_.get())); } diff --git a/pc/transceiver_list.cc b/pc/transceiver_list.cc index 250dfbc9e2..4e7df5ec1a 100644 --- a/pc/transceiver_list.cc +++ b/pc/transceiver_list.cc @@ -10,8 +10,16 @@ #include "pc/transceiver_list.h" +#include +#include #include +#include +#include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "pc/rtp_transceiver.h" #include "rtc_base/checks.h" namespace webrtc { @@ -22,8 +30,8 @@ void TransceiverStableState::set_newly_created() { } void TransceiverStableState::SetMSectionIfUnset( - absl::optional mid, - absl::optional mline_index) { + std::optional mid, + std::optional mline_index) { if (!has_m_section_) { mid_ = mid; mline_index_ = mline_index; @@ -53,7 +61,7 @@ std::vector TransceiverList::ListInternal() const { } RtpTransceiverProxyRefPtr TransceiverList::FindBySender( - rtc::scoped_refptr sender) const { + scoped_refptr sender) const { RTC_DCHECK_RUN_ON(&sequence_checker_); for (auto transceiver : transceivers_) { if (transceiver->sender() == sender) { diff --git a/pc/transceiver_list.h b/pc/transceiver_list.h index 848ccc2c3b..1abb7d2a4f 100644 --- a/pc/transceiver_list.h +++ b/pc/transceiver_list.h @@ -15,10 +15,10 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/media_types.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" @@ -32,7 +32,7 @@ namespace webrtc { -typedef rtc::scoped_refptr> +typedef scoped_refptr> RtpTransceiverProxyRefPtr; // Captures partial state to be used for rollback. Applicable only in @@ -41,21 +41,21 @@ class TransceiverStableState { public: TransceiverStableState() {} void set_newly_created(); - void SetMSectionIfUnset(absl::optional mid, - absl::optional mline_index); + void SetMSectionIfUnset(std::optional mid, + std::optional mline_index); void SetRemoteStreamIds(const std::vector& ids); void SetInitSendEncodings( const std::vector& encodings); void SetFiredDirection( - absl::optional fired_direction) { + std::optional fired_direction) { fired_direction_ = fired_direction; } - absl::optional mid() const { return mid_; } - absl::optional mline_index() const { return mline_index_; } - absl::optional> remote_stream_ids() const { + std::optional mid() const { return mid_; } + std::optional mline_index() const { return mline_index_; } + std::optional> remote_stream_ids() const { return remote_stream_ids_; } - absl::optional> init_send_encodings() + std::optional> init_send_encodings() const { return init_send_encodings_; } @@ -64,16 +64,16 @@ class TransceiverStableState { bool did_set_fired_direction() const { return fired_direction_.has_value(); } // Because fired_direction() is nullable, did_set_fired_direction() is used to // distinguish beteen "no value" and "null value". - absl::optional fired_direction() const { + std::optional fired_direction() const { RTC_DCHECK(did_set_fired_direction()); return fired_direction_.value(); } private: - absl::optional mid_; - absl::optional mline_index_; - absl::optional> remote_stream_ids_; - absl::optional> init_send_encodings_; + std::optional mid_; + std::optional mline_index_; + std::optional> remote_stream_ids_; + std::optional> init_send_encodings_; // Indicates that mid value from stable state has been captured and // that rollback has to restore the transceiver. Also protects against // subsequent overwrites. @@ -84,7 +84,7 @@ class TransceiverStableState { bool newly_created_ = false; // `fired_direction_` is nullable, so an optional of an optional is used to // distinguish between null and not set (sorry if this hurts your eyes). - absl::optional> fired_direction_; + std::optional> fired_direction_; }; // This class encapsulates the active list of transceivers on a @@ -94,7 +94,7 @@ class TransceiverStableState { class TransceiverList { public: // Returns a copy of the currently active list of transceivers. The - // list consists of rtc::scoped_refptrs, which will keep the transceivers + // list consists of webrtc::scoped_refptrs, which will keep the transceivers // from being deallocated, even if they are removed from the TransceiverList. std::vector List() const { RTC_DCHECK_RUN_ON(&sequence_checker_); @@ -122,7 +122,7 @@ class TransceiverList { transceivers_.end()); } RtpTransceiverProxyRefPtr FindBySender( - rtc::scoped_refptr sender) const; + scoped_refptr sender) const; RtpTransceiverProxyRefPtr FindByMid(const std::string& mid) const; RtpTransceiverProxyRefPtr FindByMLineIndex(size_t mline_index) const; diff --git a/pc/transport_stats.cc b/pc/transport_stats.cc index 8049c07a77..983848d401 100644 --- a/pc/transport_stats.cc +++ b/pc/transport_stats.cc @@ -9,7 +9,7 @@ */ #include "pc/transport_stats.h" -namespace cricket { +namespace webrtc { TransportChannelStats::TransportChannelStats() = default; @@ -18,4 +18,4 @@ TransportChannelStats::TransportChannelStats(const TransportChannelStats&) = TransportChannelStats::~TransportChannelStats() = default; -} // namespace cricket +} // namespace webrtc diff --git a/pc/transport_stats.h b/pc/transport_stats.h index e554385954..1771d1fa8e 100644 --- a/pc/transport_stats.h +++ b/pc/transport_stats.h @@ -11,16 +11,17 @@ #ifndef PC_TRANSPORT_STATS_H_ #define PC_TRANSPORT_STATS_H_ +#include +#include #include #include +#include "absl/strings/string_view.h" #include "api/dtls_transport_interface.h" -#include "p2p/base/dtls_transport_internal.h" #include "p2p/base/ice_transport_internal.h" -#include "p2p/base/port.h" #include "rtc_base/ssl_stream_adapter.h" -namespace cricket { +namespace webrtc { struct TransportChannelStats { TransportChannelStats(); @@ -29,11 +30,13 @@ struct TransportChannelStats { int component = 0; int ssl_version_bytes = 0; - int srtp_crypto_suite = rtc::kSrtpInvalidCryptoSuite; - int ssl_cipher_suite = rtc::kTlsNullWithNullNull; - absl::optional dtls_role; - webrtc::DtlsTransportState dtls_state = webrtc::DtlsTransportState::kNew; + int srtp_crypto_suite = webrtc::kSrtpInvalidCryptoSuite; + int ssl_cipher_suite = webrtc::kTlsNullWithNullNull; + std::optional tls_cipher_suite_name; + std::optional dtls_role; + DtlsTransportState dtls_state = DtlsTransportState::kNew; IceTransportStats ice_transport_stats; + uint16_t ssl_peer_signature_algorithm = webrtc::kSslSignatureAlgorithmUnknown; }; // Information about all the channels of a transport. @@ -46,6 +49,16 @@ struct TransportStats { TransportChannelStatsList channel_stats; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::TransportChannelStats; +using ::webrtc::TransportChannelStatsList; +using ::webrtc::TransportStats; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // PC_TRANSPORT_STATS_H_ diff --git a/pc/typed_codec_vendor.cc b/pc/typed_codec_vendor.cc new file mode 100644 index 0000000000..486bd16657 --- /dev/null +++ b/pc/typed_codec_vendor.cc @@ -0,0 +1,156 @@ +/* + * Copyright 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "pc/typed_codec_vendor.h" + +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/audio_codecs/audio_format.h" +#include "api/field_trials_view.h" +#include "api/media_types.h" +#include "media/base/codec.h" +#include "media/base/codec_list.h" +#include "media/base/media_constants.h" +#include "media/base/media_engine.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +namespace { + +// Create the voice codecs. Do not allocate payload types at this time. +std::vector CollectAudioCodecs( + const std::vector& specs) { + std::vector out; + + // Only generate CN payload types for these clockrates: + std::map> generate_cn = {{8000, false}}; + // Only generate telephone-event payload types for these clockrates: + std::map> generate_dtmf = {{8000, false}, + {48000, false}}; + + for (const auto& spec : specs) { + Codec codec = CreateAudioCodec(spec.format); + if (spec.info.supports_network_adaption) { + codec.AddFeedbackParam( + FeedbackParam(kRtcpFbParamTransportCc, kParamValueEmpty)); + } + + if (spec.info.allow_comfort_noise) { + // Generate a CN entry if the decoder allows it and we support the + // clockrate. + auto cn = generate_cn.find(spec.format.clockrate_hz); + if (cn != generate_cn.end()) { + cn->second = true; + } + } + + // Generate a telephone-event entry if we support the clockrate. + auto dtmf = generate_dtmf.find(spec.format.clockrate_hz); + if (dtmf != generate_dtmf.end()) { + dtmf->second = true; + } + + out.push_back(codec); + + // TODO(hta): Don't assign RED codecs until we know that the PT for Opus + // is final + if (codec.name == kOpusCodecName) { + // We don't know the PT to put into the RED fmtp parameter yet. + // Leave it out. + Codec red_codec = CreateAudioCodec({kRedCodecName, 48000, 2}); + out.push_back(red_codec); + } + } + + // Add CN codecs after "proper" audio codecs. + for (const auto& cn : generate_cn) { + if (cn.second) { + Codec cn_codec = CreateAudioCodec({kCnCodecName, cn.first, 1}); + out.push_back(cn_codec); + } + } + + // Add telephone-event codecs last. + for (const auto& dtmf : generate_dtmf) { + if (dtmf.second) { + Codec dtmf_codec = CreateAudioCodec({kDtmfCodecName, dtmf.first, 1}); + out.push_back(dtmf_codec); + } + } + return out; +} + +} // namespace + +TypedCodecVendor::TypedCodecVendor(MediaEngineInterface* media_engine, + MediaType type, + bool is_sender, + bool rtx_enabled, + const FieldTrialsView& trials) { + if (trials.IsEnabled("WebRTC-PayloadTypesInTransport")) { + // Get the capabilities from the factory and compute the codecs. + if (type == MediaType::AUDIO) { + if (is_sender) { + if (media_engine->voice().encoder_factory()) { + codecs_ = CodecList::CreateFromTrustedData(CollectAudioCodecs( + media_engine->voice().encoder_factory()->GetSupportedEncoders())); + } else { + RTC_LOG(LS_WARNING) + << "No voice encoder factory. Should only happen in test."; + } + } else { + if (media_engine->voice().decoder_factory()) { + codecs_ = CodecList::CreateFromTrustedData(CollectAudioCodecs( + media_engine->voice().decoder_factory()->GetSupportedDecoders())); + } else { + RTC_LOG(LS_WARNING) + << "No voice decoder factory. Should only happen in test."; + } + } + } else { + // Use legacy mechanisms for getting codecs from video engine. + // TODO: https://issues.webrtc.org/360058654 - apply late assign to video. + if (is_sender) { + codecs_ = CodecList::CreateFromTrustedData( + media_engine->video().LegacySendCodecs(rtx_enabled)); + } else { + codecs_ = CodecList::CreateFromTrustedData( + media_engine->video().LegacyRecvCodecs(rtx_enabled)); + } + } + } else { + // Use current mechanisms for getting codecs from media engine. + if (type == MediaType::AUDIO) { + if (is_sender) { + codecs_ = CodecList::CreateFromTrustedData( + media_engine->voice().LegacySendCodecs()); + } else { + codecs_ = CodecList::CreateFromTrustedData( + media_engine->voice().LegacyRecvCodecs()); + } + } else { + if (is_sender) { + codecs_ = CodecList::CreateFromTrustedData( + media_engine->video().LegacySendCodecs(rtx_enabled)); + } else { + codecs_ = CodecList::CreateFromTrustedData( + media_engine->video().LegacyRecvCodecs(rtx_enabled)); + } + } + } +} + +} // namespace webrtc diff --git a/pc/typed_codec_vendor.h b/pc/typed_codec_vendor.h new file mode 100644 index 0000000000..41165abfe2 --- /dev/null +++ b/pc/typed_codec_vendor.h @@ -0,0 +1,54 @@ +/* + * Copyright 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef PC_TYPED_CODEC_VENDOR_H_ +#define PC_TYPED_CODEC_VENDOR_H_ + +#include "api/field_trials_view.h" +#include "api/media_types.h" +#include "media/base/codec_list.h" +#include "media/base/media_engine.h" + +namespace webrtc { + +// This class vends codecs of a specific type only. +// It is intended to eventually be owned by the RtpSender and RtpReceiver +// objects. +class TypedCodecVendor { + public: + // Constructor for the case where media engine is not provided. The resulting + // vendor will always return an empty codec list. + TypedCodecVendor() {} + TypedCodecVendor(MediaEngineInterface* media_engine, + MediaType type, + bool is_sender, + bool rtx_enabled, + const FieldTrialsView& trials); + const CodecList& codecs() const { return codecs_; } + void set_codecs(const CodecList& codecs) { codecs_ = codecs; } + // For easy initialization, copying is allowed. + TypedCodecVendor(const TypedCodecVendor& from) = default; + TypedCodecVendor& operator=(const TypedCodecVendor& from) = default; + + private: + CodecList codecs_; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::TypedCodecVendor; +} // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // PC_TYPED_CODEC_VENDOR_H_ diff --git a/pc/usage_pattern.cc b/pc/usage_pattern.cc index 848472148f..e96a3943a5 100644 --- a/pc/usage_pattern.cc +++ b/pc/usage_pattern.cc @@ -25,24 +25,14 @@ void UsagePattern::ReportUsagePattern(PeerConnectionObserver* observer) const { RTC_HISTOGRAM_ENUMERATION_SPARSE("WebRTC.PeerConnection.UsagePattern", usage_event_accumulator_, static_cast(UsageEvent::MAX_VALUE)); - const int bad_bits = - static_cast(UsageEvent::SET_LOCAL_DESCRIPTION_SUCCEEDED) | - static_cast(UsageEvent::CANDIDATE_COLLECTED); - const int good_bits = - static_cast(UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED) | - static_cast(UsageEvent::REMOTE_CANDIDATE_ADDED) | - static_cast(UsageEvent::ICE_STATE_CONNECTED); - if ((usage_event_accumulator_ & bad_bits) == bad_bits && - (usage_event_accumulator_ & good_bits) == 0) { - // If called after close(), we can't report, because observer may have - // been deallocated, and therefore pointer is null. Write to log instead. - if (observer) { - observer->OnInterestingUsage(usage_event_accumulator_); - } else { - RTC_LOG(LS_INFO) << "Interesting usage signature " - << usage_event_accumulator_ - << " observed after observer shutdown"; - } + // If called after close(), we can't report, because observer may have + // been deallocated, and therefore pointer is null. Write to log instead. + if (observer) { + observer->OnInterestingUsage(usage_event_accumulator_); + } else { + RTC_LOG(LS_INFO) << "Interesting usage signature " + << usage_event_accumulator_ + << " observed after observer shutdown"; } } diff --git a/pc/used_ids.h b/pc/used_ids.h index 1236a786df..fe80531006 100644 --- a/pc/used_ids.h +++ b/pc/used_ids.h @@ -18,7 +18,7 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" -namespace cricket { +namespace webrtc { template class UsedIds { public: @@ -96,6 +96,16 @@ class UsedPayloadTypes : public UsedIds { : UsedIds(kFirstDynamicPayloadTypeLowerRange, kLastDynamicPayloadTypeUpperRange) {} + // Check if a payload type is valid. The range [64-95] is forbidden + // when rtcp-mux is used. + static bool IsIdValid(Codec codec, bool rtcp_mux) { + if (rtcp_mux && (codec.id > kLastDynamicPayloadTypeLowerRange && + codec.id < kFirstDynamicPayloadTypeUpperRange)) { + return false; + } + return codec.id >= 0 && codec.id <= kLastDynamicPayloadTypeUpperRange; + } + protected: bool IsIdUsed(int new_id) override { // Range marked for RTCP avoidance is "used". @@ -115,7 +125,7 @@ class UsedPayloadTypes : public UsedIds { // Helper class used for finding duplicate RTP Header extension ids among // audio and video extensions. -class UsedRtpHeaderExtensionIds : public UsedIds { +class UsedRtpHeaderExtensionIds : public UsedIds { public: enum class IdDomain { // Only allocate IDs that fit in one-byte header extensions. @@ -126,26 +136,23 @@ class UsedRtpHeaderExtensionIds : public UsedIds { }; explicit UsedRtpHeaderExtensionIds(IdDomain id_domain) - : UsedIds( - webrtc::RtpExtension::kMinId, - id_domain == IdDomain::kTwoByteAllowed - ? webrtc::RtpExtension::kMaxId - : webrtc::RtpExtension::kOneByteHeaderExtensionMaxId), + : UsedIds(RtpExtension::kMinId, + id_domain == IdDomain::kTwoByteAllowed + ? RtpExtension::kMaxId + : RtpExtension::kOneByteHeaderExtensionMaxId), id_domain_(id_domain), - next_extension_id_(webrtc::RtpExtension::kOneByteHeaderExtensionMaxId) { - } + next_extension_id_(RtpExtension::kOneByteHeaderExtensionMaxId) {} private: // Returns the first unused id in reverse order from the max id of one byte - // header extensions. This hopefully reduce the risk of more collisions. We + // header extensions. This hopefully reduces the risk of more collisions. We // want to change the default ids as little as possible. If no unused id is // found and two byte header extensions are enabled (i.e., - // `extmap_allow_mixed_` is true), search for unused ids from 15 to 255. + // `extmap_allow_mixed_` is true), search for unused ids from 16 to 255. int FindUnusedId() override { - if (next_extension_id_ <= - webrtc::RtpExtension::kOneByteHeaderExtensionMaxId) { + if (next_extension_id_ <= RtpExtension::kOneByteHeaderExtensionMaxId) { // First search in reverse order from the max id of one byte header - // extensions. + // extensions (14). while (IsIdUsed(next_extension_id_) && next_extension_id_ >= min_allowed_id_) { --next_extension_id_; @@ -155,13 +162,12 @@ class UsedRtpHeaderExtensionIds : public UsedIds { if (id_domain_ == IdDomain::kTwoByteAllowed) { if (next_extension_id_ < min_allowed_id_) { // We have searched among all one-byte IDs without finding an unused ID, - // continue at the first two-byte ID. - next_extension_id_ = - webrtc::RtpExtension::kOneByteHeaderExtensionMaxId + 1; + // continue at the first two-byte ID (16; avoid 15 since it is somewhat + // special per https://www.rfc-editor.org/rfc/rfc8285#section-4.2 + next_extension_id_ = RtpExtension::kOneByteHeaderExtensionMaxId + 2; } - if (next_extension_id_ > - webrtc::RtpExtension::kOneByteHeaderExtensionMaxId) { + if (next_extension_id_ > RtpExtension::kOneByteHeaderExtensionMaxId) { while (IsIdUsed(next_extension_id_) && next_extension_id_ <= max_allowed_id_) { ++next_extension_id_; @@ -177,6 +183,16 @@ class UsedRtpHeaderExtensionIds : public UsedIds { int next_extension_id_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::UsedIds; +using ::webrtc::UsedPayloadTypes; +using ::webrtc::UsedRtpHeaderExtensionIds; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // PC_USED_IDS_H_ diff --git a/pc/used_ids_unittest.cc b/pc/used_ids_unittest.cc index 6362f2773a..fd73501ab2 100644 --- a/pc/used_ids_unittest.cc +++ b/pc/used_ids_unittest.cc @@ -11,17 +11,19 @@ #include "pc/used_ids.h" #include "absl/strings/string_view.h" +#include "api/rtp_parameters.h" +#include "rtc_base/checks.h" #include "test/gtest.h" -using cricket::UsedIds; -using cricket::UsedRtpHeaderExtensionIds; +using ::webrtc::UsedIds; +using ::webrtc::UsedRtpHeaderExtensionIds; struct Foo { int id; }; TEST(UsedIdsTest, UniqueIdsAreUnchanged) { - UsedIds used_ids(1, 5); + webrtc::UsedIds used_ids(1, 5); for (int i = 1; i <= 5; ++i) { Foo id = {i}; used_ids.FindAndSetIdUsed(&id); @@ -30,7 +32,7 @@ TEST(UsedIdsTest, UniqueIdsAreUnchanged) { } TEST(UsedIdsTest, IdsOutsideRangeAreUnchanged) { - UsedIds used_ids(1, 5); + webrtc::UsedIds used_ids(1, 5); Foo id_11 = {11}; Foo id_12 = {12}; @@ -51,7 +53,7 @@ TEST(UsedIdsTest, IdsOutsideRangeAreUnchanged) { } TEST(UsedIdsTest, CollisionsAreReassignedIdsInReverseOrder) { - UsedIds used_ids(1, 10); + webrtc::UsedIds used_ids(1, 10); Foo id_1 = {1}; Foo id_2 = {2}; Foo id_2_collision = {2}; @@ -119,7 +121,8 @@ TEST_F(UsedRtpHeaderExtensionIdsTest, TwoByteIdsAllowed) { UsedRtpHeaderExtensionIds::IdDomain::kTwoByteAllowed); // Fill all one byte IDs. - for (int i = 1; i < 15; ++i) { + for (int i = 1; i <= webrtc::RtpExtension::kOneByteHeaderExtensionMaxId; + ++i) { webrtc::RtpExtension id("", i); used_ids.FindAndSetIdUsed(&id); } @@ -131,11 +134,11 @@ TEST_F(UsedRtpHeaderExtensionIdsTest, TwoByteIdsAllowed) { // Expect to reassign to two-byte header extension IDs. used_ids.FindAndSetIdUsed(&id1_collision); - EXPECT_EQ(id1_collision.id, 15); + EXPECT_EQ(id1_collision.id, 16); used_ids.FindAndSetIdUsed(&id2_collision); - EXPECT_EQ(id2_collision.id, 16); + EXPECT_EQ(id2_collision.id, 17); used_ids.FindAndSetIdUsed(&id3_collision); - EXPECT_EQ(id3_collision.id, 17); + EXPECT_EQ(id3_collision.id, 18); } // Death tests. diff --git a/pc/video_rtp_receiver.cc b/pc/video_rtp_receiver.cc index 4432982027..0e18ade560 100644 --- a/pc/video_rtp_receiver.cc +++ b/pc/video_rtp_receiver.cc @@ -12,18 +12,36 @@ #include +#include +#include #include #include #include +#include "api/crypto/frame_decryptor_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/transport/rtp/rtp_source.h" #include "api/video/recordable_encoded_frame.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "media/base/media_channel.h" +#include "pc/media_stream_track_proxy.h" +#include "pc/video_rtp_track_source.h" #include "pc/video_track.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/thread.h" namespace webrtc { -VideoRtpReceiver::VideoRtpReceiver(rtc::Thread* worker_thread, +VideoRtpReceiver::VideoRtpReceiver(Thread* worker_thread, std::string receiver_id, std::vector stream_ids) : VideoRtpReceiver(worker_thread, @@ -31,14 +49,14 @@ VideoRtpReceiver::VideoRtpReceiver(rtc::Thread* worker_thread, CreateStreamsFromIds(std::move(stream_ids))) {} VideoRtpReceiver::VideoRtpReceiver( - rtc::Thread* worker_thread, + Thread* worker_thread, const std::string& receiver_id, - const std::vector>& streams) + const std::vector>& streams) : worker_thread_(worker_thread), id_(receiver_id), - source_(rtc::make_ref_counted(&source_callback_)), + source_(make_ref_counted(&source_callback_)), track_(VideoTrackProxyWithInternal::Create( - rtc::Thread::Current(), + Thread::Current(), worker_thread, VideoTrack::Create(receiver_id, source_, worker_thread))), attachment_id_(GenerateUniqueId()) { @@ -60,14 +78,13 @@ std::vector VideoRtpReceiver::stream_ids() const { return stream_ids; } -rtc::scoped_refptr VideoRtpReceiver::dtls_transport() - const { +scoped_refptr VideoRtpReceiver::dtls_transport() const { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); return dtls_transport_; } -std::vector> -VideoRtpReceiver::streams() const { +std::vector> VideoRtpReceiver::streams() + const { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); return streams_; } @@ -83,7 +100,7 @@ RtpParameters VideoRtpReceiver::GetParameters() const { } void VideoRtpReceiver::SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) { + scoped_refptr frame_decryptor) { RTC_DCHECK_RUN_ON(worker_thread_); frame_decryptor_ = std::move(frame_decryptor); // Special Case: Set the frame decryptor to any value on any existing channel. @@ -92,14 +109,14 @@ void VideoRtpReceiver::SetFrameDecryptor( } } -rtc::scoped_refptr -VideoRtpReceiver::GetFrameDecryptor() const { +scoped_refptr VideoRtpReceiver::GetFrameDecryptor() + const { RTC_DCHECK_RUN_ON(worker_thread_); return frame_decryptor_; } -void VideoRtpReceiver::SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) { +void VideoRtpReceiver::SetFrameTransformer( + scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(worker_thread_); frame_transformer_ = std::move(frame_transformer); if (media_channel_) { @@ -114,7 +131,7 @@ void VideoRtpReceiver::Stop() { track_->internal()->set_ended(); } -void VideoRtpReceiver::RestartMediaChannel(absl::optional ssrc) { +void VideoRtpReceiver::RestartMediaChannel(std::optional ssrc) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); MediaSourceInterface::SourceState state = source_->state(); // TODO(tommi): Can we restart the media channel without blocking? @@ -126,7 +143,7 @@ void VideoRtpReceiver::RestartMediaChannel(absl::optional ssrc) { } void VideoRtpReceiver::RestartMediaChannel_w( - absl::optional ssrc, + std::optional ssrc, MediaSourceInterface::SourceState state) { RTC_DCHECK_RUN_ON(worker_thread_); if (!media_channel_) { @@ -168,7 +185,7 @@ void VideoRtpReceiver::RestartMediaChannel_w( } } -void VideoRtpReceiver::SetSink(rtc::VideoSinkInterface* sink) { +void VideoRtpReceiver::SetSink(VideoSinkInterface* sink) { RTC_DCHECK_RUN_ON(worker_thread_); if (signaled_ssrc_) { media_channel_->SetSink(*signaled_ssrc_, sink); @@ -184,10 +201,10 @@ void VideoRtpReceiver::SetupMediaChannel(uint32_t ssrc) { void VideoRtpReceiver::SetupUnsignaledMediaChannel() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - RestartMediaChannel(absl::nullopt); + RestartMediaChannel(std::nullopt); } -absl::optional VideoRtpReceiver::ssrc() const { +std::optional VideoRtpReceiver::ssrc() const { RTC_DCHECK_RUN_ON(worker_thread_); if (!signaled_ssrc_.has_value() && media_channel_) { return media_channel_->GetUnsignaledSsrc(); @@ -201,13 +218,13 @@ void VideoRtpReceiver::set_stream_ids(std::vector stream_ids) { } void VideoRtpReceiver::set_transport( - rtc::scoped_refptr dtls_transport) { + scoped_refptr dtls_transport) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); dtls_transport_ = std::move(dtls_transport); } void VideoRtpReceiver::SetStreams( - const std::vector>& streams) { + const std::vector>& streams) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); // Remove remote track from any streams that are going away. for (const auto& existing_stream : streams_) { @@ -250,7 +267,7 @@ void VideoRtpReceiver::SetObserver(RtpReceiverObserverInterface* observer) { } void VideoRtpReceiver::SetJitterBufferMinimumDelay( - absl::optional delay_seconds) { + std::optional delay_seconds) { RTC_DCHECK_RUN_ON(worker_thread_); delay_.Set(delay_seconds); if (media_channel_ && signaled_ssrc_) @@ -259,7 +276,7 @@ void VideoRtpReceiver::SetJitterBufferMinimumDelay( } void VideoRtpReceiver::SetMediaChannel( - cricket::MediaReceiveChannelInterface* media_channel) { + MediaReceiveChannelInterface* media_channel) { RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); @@ -268,7 +285,7 @@ void VideoRtpReceiver::SetMediaChannel( } void VideoRtpReceiver::SetMediaChannel_w( - cricket::MediaReceiveChannelInterface* media_channel) { + MediaReceiveChannelInterface* media_channel) { RTC_DCHECK_RUN_ON(worker_thread_); if (media_channel == media_channel_) return; @@ -326,8 +343,8 @@ std::vector VideoRtpReceiver::GetSources() const { } void VideoRtpReceiver::SetupMediaChannel( - absl::optional ssrc, - cricket::MediaReceiveChannelInterface* media_channel) { + std::optional ssrc, + MediaReceiveChannelInterface* media_channel) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RTC_DCHECK(media_channel); MediaSourceInterface::SourceState state = source_->state(); diff --git a/pc/video_rtp_receiver.h b/pc/video_rtp_receiver.h index ef88016052..5bd2e6a766 100644 --- a/pc/video_rtp_receiver.h +++ b/pc/video_rtp_receiver.h @@ -13,10 +13,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "api/crypto/frame_decryptor_interface.h" #include "api/dtls_transport_interface.h" #include "api/frame_transformer_interface.h" @@ -46,30 +46,29 @@ class VideoRtpReceiver : public RtpReceiverInternal { public: // An SSRC of 0 will create a receiver that will match the first SSRC it // sees. Must be called on signaling thread. - VideoRtpReceiver(rtc::Thread* worker_thread, + VideoRtpReceiver(Thread* worker_thread, std::string receiver_id, std::vector streams_ids); // TODO(hbos): Remove this when streams() is removed. // https://crbug.com/webrtc/9480 VideoRtpReceiver( - rtc::Thread* worker_thread, + Thread* worker_thread, const std::string& receiver_id, - const std::vector>& streams); + const std::vector>& streams); virtual ~VideoRtpReceiver(); - rtc::scoped_refptr video_track() const { return track_; } + scoped_refptr video_track() const { return track_; } // RtpReceiverInterface implementation - rtc::scoped_refptr track() const override { + scoped_refptr track() const override { return track_; } - rtc::scoped_refptr dtls_transport() const override; + scoped_refptr dtls_transport() const override; std::vector stream_ids() const override; - std::vector> streams() - const override; - cricket::MediaType media_type() const override { - return cricket::MEDIA_TYPE_VIDEO; + std::vector> streams() const override; + webrtc::MediaType media_type() const override { + return webrtc::MediaType::VIDEO; } std::string id() const override { return id_; } @@ -77,33 +76,31 @@ class VideoRtpReceiver : public RtpReceiverInternal { RtpParameters GetParameters() const override; void SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) override; + scoped_refptr frame_decryptor) override; - rtc::scoped_refptr GetFrameDecryptor() - const override; + scoped_refptr GetFrameDecryptor() const override; - void SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) override; + void SetFrameTransformer( + scoped_refptr frame_transformer) override; // RtpReceiverInternal implementation. void Stop() override; void SetupMediaChannel(uint32_t ssrc) override; void SetupUnsignaledMediaChannel() override; - absl::optional ssrc() const override; + std::optional ssrc() const override; void NotifyFirstPacketReceived() override; void set_stream_ids(std::vector stream_ids) override; void set_transport( - rtc::scoped_refptr dtls_transport) override; - void SetStreams(const std::vector>& - streams) override; + scoped_refptr dtls_transport) override; + void SetStreams( + const std::vector>& streams) override; void SetObserver(RtpReceiverObserverInterface* observer) override; void SetJitterBufferMinimumDelay( - absl::optional delay_seconds) override; + std::optional delay_seconds) override; - void SetMediaChannel( - cricket::MediaReceiveChannelInterface* media_channel) override; + void SetMediaChannel(MediaReceiveChannelInterface* media_channel) override; int AttachmentId() const override { return attachment_id_; } @@ -111,18 +108,17 @@ class VideoRtpReceiver : public RtpReceiverInternal { // Combines SetMediaChannel, SetupMediaChannel and // SetupUnsignaledMediaChannel. - void SetupMediaChannel(absl::optional ssrc, - cricket::MediaReceiveChannelInterface* media_channel); + void SetupMediaChannel(std::optional ssrc, + MediaReceiveChannelInterface* media_channel); private: - void RestartMediaChannel(absl::optional ssrc) + void RestartMediaChannel(std::optional ssrc) RTC_RUN_ON(&signaling_thread_checker_); - void RestartMediaChannel_w(absl::optional ssrc, + void RestartMediaChannel_w(std::optional ssrc, MediaSourceInterface::SourceState state) RTC_RUN_ON(worker_thread_); - void SetSink(rtc::VideoSinkInterface* sink) - RTC_RUN_ON(worker_thread_); - void SetMediaChannel_w(cricket::MediaReceiveChannelInterface* media_channel) + void SetSink(VideoSinkInterface* sink) RTC_RUN_ON(worker_thread_); + void SetMediaChannel_w(MediaReceiveChannelInterface* media_channel) RTC_RUN_ON(worker_thread_); // VideoRtpTrackSource::Callback @@ -146,28 +142,28 @@ class VideoRtpReceiver : public RtpReceiverInternal { } source_callback_{this}; RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_; - rtc::Thread* const worker_thread_; + Thread* const worker_thread_; const std::string id_; - cricket::VideoMediaReceiveChannelInterface* media_channel_ + VideoMediaReceiveChannelInterface* media_channel_ RTC_GUARDED_BY(worker_thread_) = nullptr; - absl::optional signaled_ssrc_ RTC_GUARDED_BY(worker_thread_); + std::optional signaled_ssrc_ RTC_GUARDED_BY(worker_thread_); // `source_` is held here to be able to change the state of the source when // the VideoRtpReceiver is stopped. - const rtc::scoped_refptr source_; - const rtc::scoped_refptr> track_; - std::vector> streams_ + const scoped_refptr source_; + const scoped_refptr> track_; + std::vector> streams_ RTC_GUARDED_BY(&signaling_thread_checker_); RtpReceiverObserverInterface* observer_ RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr; bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) = false; const int attachment_id_; - rtc::scoped_refptr frame_decryptor_ + scoped_refptr frame_decryptor_ RTC_GUARDED_BY(worker_thread_); - rtc::scoped_refptr dtls_transport_ + scoped_refptr dtls_transport_ RTC_GUARDED_BY(&signaling_thread_checker_); - rtc::scoped_refptr frame_transformer_ + scoped_refptr frame_transformer_ RTC_GUARDED_BY(worker_thread_); // Stores the minimum jitter buffer delay. Handles caching cases // if `SetJitterBufferMinimumDelay` is called before start. diff --git a/pc/video_rtp_receiver_unittest.cc b/pc/video_rtp_receiver_unittest.cc index 5ff736084f..b5131ad1aa 100644 --- a/pc/video_rtp_receiver_unittest.cc +++ b/pc/video_rtp_receiver_unittest.cc @@ -10,15 +10,23 @@ #include "pc/video_rtp_receiver.h" +#include #include #include +#include +#include +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/scoped_refptr.h" #include "api/task_queue/task_queue_base.h" #include "api/video/recordable_encoded_frame.h" #include "api/video/test/mock_recordable_encoded_frame.h" +#include "api/video/video_sink_interface.h" #include "media/base/fake_media_engine.h" #include "media/base/media_channel.h" #include "rtc_base/task_queue_for_test.h" +#include "rtc_base/thread.h" #include "test/gmock.h" #include "test/gtest.h" @@ -35,11 +43,10 @@ namespace { class VideoRtpReceiverTest : public testing::Test { protected: - class MockVideoMediaSendChannel : public cricket::FakeVideoMediaSendChannel { + class MockVideoMediaSendChannel : public FakeVideoMediaSendChannel { public: - MockVideoMediaSendChannel( - const cricket::VideoOptions& options, - TaskQueueBase* network_thread = rtc::Thread::Current()) + MockVideoMediaSendChannel(const VideoOptions& options, + TaskQueueBase* network_thread = Thread::Current()) : FakeVideoMediaSendChannel(options, network_thread) {} MOCK_METHOD(void, GenerateSendKeyFrame, @@ -47,12 +54,11 @@ class VideoRtpReceiverTest : public testing::Test { (override)); }; - class MockVideoMediaReceiveChannel - : public cricket::FakeVideoMediaReceiveChannel { + class MockVideoMediaReceiveChannel : public FakeVideoMediaReceiveChannel { public: MockVideoMediaReceiveChannel( - const cricket::VideoOptions& options, - TaskQueueBase* network_thread = rtc::Thread::Current()) + const VideoOptions& options, + TaskQueueBase* network_thread = Thread::Current()) : FakeVideoMediaReceiveChannel(options, network_thread) {} MOCK_METHOD(void, SetRecordableEncodedFrameCallback, @@ -65,15 +71,15 @@ class VideoRtpReceiverTest : public testing::Test { MOCK_METHOD(void, RequestRecvKeyFrame, (uint32_t), (override)); }; - class MockVideoSink : public rtc::VideoSinkInterface { + class MockVideoSink : public VideoSinkInterface { public: MOCK_METHOD(void, OnFrame, (const RecordableEncodedFrame&), (override)); }; VideoRtpReceiverTest() - : worker_thread_(rtc::Thread::Create()), - channel_(cricket::VideoOptions()), - receiver_(rtc::make_ref_counted( + : worker_thread_(Thread::Create()), + channel_(VideoOptions()), + receiver_(make_ref_counted( worker_thread_.get(), std::string("receiver"), std::vector({"stream"}))) { @@ -89,19 +95,19 @@ class VideoRtpReceiverTest : public testing::Test { SetMediaChannel(nullptr); } - void SetMediaChannel(cricket::MediaReceiveChannelInterface* media_channel) { + void SetMediaChannel(MediaReceiveChannelInterface* media_channel) { SendTask(worker_thread_.get(), [&]() { receiver_->SetMediaChannel(media_channel); }); } - webrtc::VideoTrackSourceInterface* Source() { + VideoTrackSourceInterface* Source() { return receiver_->streams()[0]->FindVideoTrack("receiver")->GetSource(); } - rtc::AutoThread main_thread_; - std::unique_ptr worker_thread_; + AutoThread main_thread_; + std::unique_ptr worker_thread_; NiceMock channel_; - rtc::scoped_refptr receiver_; + scoped_refptr receiver_; }; TEST_F(VideoRtpReceiverTest, SupportsEncodedOutput) { @@ -117,7 +123,7 @@ TEST_F(VideoRtpReceiverTest, GenerateKeyFrameOnChannelSwitchUnlessGenerateKeyframeCalled) { // A channel switch without previous call to GenerateKeyFrame shouldn't // cause a call to happen on the new channel. - MockVideoMediaReceiveChannel channel2{cricket::VideoOptions()}; + MockVideoMediaReceiveChannel channel2{VideoOptions()}; EXPECT_CALL(channel_, RequestRecvKeyFrame).Times(0); EXPECT_CALL(channel2, RequestRecvKeyFrame).Times(0); SetMediaChannel(&channel2); @@ -127,12 +133,12 @@ TEST_F(VideoRtpReceiverTest, // re-generate it as we don't know if it was eventually received EXPECT_CALL(channel2, RequestRecvKeyFrame).Times(1); Source()->GenerateKeyFrame(); - MockVideoMediaReceiveChannel channel3{cricket::VideoOptions()}; + MockVideoMediaReceiveChannel channel3{VideoOptions()}; EXPECT_CALL(channel3, RequestRecvKeyFrame); SetMediaChannel(&channel3); // Switching to a new channel should now not cause calls to GenerateKeyFrame. - StrictMock channel4{cricket::VideoOptions()}; + StrictMock channel4{VideoOptions()}; SetMediaChannel(&channel4); // We must call SetMediaChannel(nullptr) here since the mock media channels @@ -160,7 +166,7 @@ TEST_F(VideoRtpReceiverTest, DisablesEnablesEncodedOutputOnChannelSwitch) { EXPECT_CALL(channel_, ClearRecordableEncodedFrameCallback); MockVideoSink sink; Source()->AddEncodedSink(&sink); - MockVideoMediaReceiveChannel channel2{cricket::VideoOptions()}; + MockVideoMediaReceiveChannel channel2{VideoOptions()}; EXPECT_CALL(channel2, SetRecordableEncodedFrameCallback); SetMediaChannel(&channel2); Mock::VerifyAndClearExpectations(&channel2); @@ -169,7 +175,7 @@ TEST_F(VideoRtpReceiverTest, DisablesEnablesEncodedOutputOnChannelSwitch) { // to NOT set the callback again. EXPECT_CALL(channel2, ClearRecordableEncodedFrameCallback); Source()->RemoveEncodedSink(&sink); - StrictMock channel3{cricket::VideoOptions()}; + StrictMock channel3{VideoOptions()}; SetMediaChannel(&channel3); // We must call SetMediaChannel(nullptr) here since the mock media channels diff --git a/pc/video_rtp_track_source.cc b/pc/video_rtp_track_source.cc index e4b333c7c2..7537de9aa7 100644 --- a/pc/video_rtp_track_source.cc +++ b/pc/video_rtp_track_source.cc @@ -14,7 +14,14 @@ #include +#include "api/sequence_checker.h" +#include "api/video/recordable_encoded_frame.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "pc/video_track_source.h" #include "rtc_base/checks.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -26,17 +33,17 @@ void VideoRtpTrackSource::ClearCallback() { callback_ = nullptr; } -rtc::VideoSourceInterface* VideoRtpTrackSource::source() { +VideoSourceInterface* VideoRtpTrackSource::source() { return &broadcaster_; } -rtc::VideoSinkInterface* VideoRtpTrackSource::sink() { +VideoSinkInterface* VideoRtpTrackSource::sink() { return &broadcaster_; } void VideoRtpTrackSource::BroadcastRecordableEncodedFrame( const RecordableEncodedFrame& frame) const { MutexLock lock(&mu_); - for (rtc::VideoSinkInterface* sink : encoded_sinks_) { + for (VideoSinkInterface* sink : encoded_sinks_) { sink->OnFrame(frame); } } @@ -53,7 +60,7 @@ void VideoRtpTrackSource::GenerateKeyFrame() { } void VideoRtpTrackSource::AddEncodedSink( - rtc::VideoSinkInterface* sink) { + VideoSinkInterface* sink) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); RTC_DCHECK(sink); size_t size = 0; @@ -70,7 +77,7 @@ void VideoRtpTrackSource::AddEncodedSink( } void VideoRtpTrackSource::RemoveEncodedSink( - rtc::VideoSinkInterface* sink) { + VideoSinkInterface* sink) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); size_t size = 0; { diff --git a/pc/video_rtp_track_source.h b/pc/video_rtp_track_source.h index bf7da99f98..96051016b5 100644 --- a/pc/video_rtp_track_source.h +++ b/pc/video_rtp_track_source.h @@ -58,8 +58,8 @@ class VideoRtpTrackSource : public VideoTrackSource { const RecordableEncodedFrame& frame) const; // VideoTrackSource - rtc::VideoSourceInterface* source() override; - rtc::VideoSinkInterface* sink(); + VideoSourceInterface* source() override; + VideoSinkInterface* sink(); // Returns true. This method can be called on any thread. bool SupportsEncodedOutput() const override; @@ -69,11 +69,11 @@ class VideoRtpTrackSource : public VideoTrackSource { // Adds an encoded sink. Must be called on the worker thread. void AddEncodedSink( - rtc::VideoSinkInterface* sink) override; + VideoSinkInterface* sink) override; // Removes an encoded sink. Must be called on the worker thread. void RemoveEncodedSink( - rtc::VideoSinkInterface* sink) override; + VideoSinkInterface* sink) override; private: RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_{ @@ -81,9 +81,9 @@ class VideoRtpTrackSource : public VideoTrackSource { // `broadcaster_` is needed since the decoder can only handle one sink. // It might be better if the decoder can handle multiple sinks and consider // the VideoSinkWants. - rtc::VideoBroadcaster broadcaster_; + VideoBroadcaster broadcaster_; mutable Mutex mu_; - std::vector*> encoded_sinks_ + std::vector*> encoded_sinks_ RTC_GUARDED_BY(mu_); Callback* callback_ RTC_GUARDED_BY(worker_sequence_checker_); }; diff --git a/pc/video_rtp_track_source_unittest.cc b/pc/video_rtp_track_source_unittest.cc index 13728c7eff..4aefb17380 100644 --- a/pc/video_rtp_track_source_unittest.cc +++ b/pc/video_rtp_track_source_unittest.cc @@ -10,12 +10,16 @@ #include "pc/video_rtp_track_source.h" -#include "absl/types/optional.h" +#include + +#include "api/make_ref_counted.h" #include "api/scoped_refptr.h" #include "api/units/timestamp.h" #include "api/video/color_space.h" #include "api/video/encoded_image.h" +#include "api/video/recordable_encoded_frame.h" #include "api/video/video_codec_type.h" +#include "api/video/video_sink_interface.h" #include "test/gmock.h" #include "test/gtest.h" @@ -28,14 +32,14 @@ class MockCallback : public VideoRtpTrackSource::Callback { MOCK_METHOD(void, OnEncodedSinkEnabled, (bool), (override)); }; -class MockSink : public rtc::VideoSinkInterface { +class MockSink : public VideoSinkInterface { public: MOCK_METHOD(void, OnFrame, (const RecordableEncodedFrame&), (override)); }; -rtc::scoped_refptr MakeSource( +scoped_refptr MakeSource( VideoRtpTrackSource::Callback* callback) { - return rtc::make_ref_counted(callback); + return make_ref_counted(callback); } TEST(VideoRtpTrackSourceTest, CreatesWithRemoteAtttributeSet) { @@ -109,12 +113,15 @@ TEST(VideoRtpTrackSourceTest, NoCallbacksAfterClearedCallback) { class TestFrame : public RecordableEncodedFrame { public: - rtc::scoped_refptr encoded_buffer() + scoped_refptr encoded_buffer() const override { return nullptr; } - absl::optional color_space() const override { - return absl::nullopt; + std::optional color_space() const override { + return std::nullopt; + } + std::optional video_rotation() const override { + return std::nullopt; } VideoCodecType codec() const override { return kVideoCodecGeneric; } bool is_key_frame() const override { return false; } diff --git a/pc/video_track.cc b/pc/video_track.cc index 0bf8687af3..ad2ce051cc 100644 --- a/pc/video_track.cc +++ b/pc/video_track.cc @@ -10,20 +10,30 @@ #include "pc/video_track.h" +#include #include -#include +#include "absl/strings/string_view.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/media_stream_track.h" #include "api/notifier.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" -#include "rtc_base/checks.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "media/base/video_source_base.h" +#include "pc/video_track_source_proxy.h" +#include "rtc_base/thread.h" namespace webrtc { VideoTrack::VideoTrack( absl::string_view label, - rtc::scoped_refptr< - VideoTrackSourceProxyWithInternal> source, - rtc::Thread* worker_thread) + scoped_refptr> + source, + Thread* worker_thread) : MediaStreamTrack(label), worker_thread_(worker_thread), video_source_(std::move(source)), @@ -47,16 +57,16 @@ std::string VideoTrack::kind() const { // AddOrUpdateSink and RemoveSink should be called on the worker // thread. -void VideoTrack::AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { +void VideoTrack::AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) { RTC_DCHECK_RUN_ON(worker_thread_); VideoSourceBaseGuarded::AddOrUpdateSink(sink, wants); - rtc::VideoSinkWants modified_wants = wants; + VideoSinkWants modified_wants = wants; modified_wants.black_frames = !enabled_w_; video_source_->internal()->AddOrUpdateSink(sink, modified_wants); } -void VideoTrack::RemoveSink(rtc::VideoSinkInterface* sink) { +void VideoTrack::RemoveSink(VideoSinkInterface* sink) { RTC_DCHECK_RUN_ON(worker_thread_); VideoSourceBaseGuarded::RemoveSink(sink); video_source_->internal()->RemoveSink(sink); @@ -98,7 +108,7 @@ bool VideoTrack::set_enabled(bool enable) { RTC_DCHECK_RUN_ON(worker_thread_); enabled_w_ = enable; for (auto& sink_pair : sink_pairs()) { - rtc::VideoSinkWants modified_wants = sink_pair.wants; + VideoSinkWants modified_wants = sink_pair.wants; modified_wants.black_frames = !enable; video_source_->AddOrUpdateSink(sink_pair.sink, modified_wants); } @@ -123,22 +133,21 @@ MediaStreamTrackInterface::TrackState VideoTrack::state() const { void VideoTrack::OnChanged() { RTC_DCHECK_RUN_ON(&signaling_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + Thread::ScopedDisallowBlockingCalls no_blocking_calls; MediaSourceInterface::SourceState state = video_source_->state(); set_state(state == MediaSourceInterface::kEnded ? kEnded : kLive); } -rtc::scoped_refptr VideoTrack::Create( +scoped_refptr VideoTrack::Create( absl::string_view id, - rtc::scoped_refptr source, - rtc::Thread* worker_thread) { - rtc::scoped_refptr< - VideoTrackSourceProxyWithInternal> + scoped_refptr source, + Thread* worker_thread) { + scoped_refptr> source_proxy = VideoTrackSourceProxy::Create( - rtc::Thread::Current(), worker_thread, std::move(source)); + Thread::Current(), worker_thread, std::move(source)); - return rtc::make_ref_counted(id, std::move(source_proxy), - worker_thread); + return make_ref_counted(id, std::move(source_proxy), + worker_thread); } } // namespace webrtc diff --git a/pc/video_track.h b/pc/video_track.h index 13a51c454b..4c415505e4 100644 --- a/pc/video_track.h +++ b/pc/video_track.h @@ -11,9 +11,9 @@ #ifndef PC_VIDEO_TRACK_H_ #define PC_VIDEO_TRACK_H_ +#include #include -#include "absl/types/optional.h" #include "api/media_stream_interface.h" #include "api/media_stream_track.h" #include "api/scoped_refptr.h" @@ -34,17 +34,17 @@ namespace webrtc { // conflicting access, so we'd need to override those methods anyway in this // class in order to make sure things are correctly checked. class VideoTrack : public MediaStreamTrack, - public rtc::VideoSourceBaseGuarded, + public VideoSourceBaseGuarded, public ObserverInterface { public: - static rtc::scoped_refptr Create( + static scoped_refptr Create( absl::string_view label, - rtc::scoped_refptr source, - rtc::Thread* worker_thread); + scoped_refptr source, + Thread* worker_thread); - void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override; - void RemoveSink(rtc::VideoSinkInterface* sink) override; + void AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) override; + void RemoveSink(VideoSinkInterface* sink) override; void RequestRefreshFrame() override; VideoTrackSourceInterface* GetSource() const override; @@ -61,18 +61,18 @@ class VideoTrack : public MediaStreamTrack, protected: VideoTrack( absl::string_view id, - rtc::scoped_refptr< + scoped_refptr< VideoTrackSourceProxyWithInternal> source, - rtc::Thread* worker_thread); + Thread* worker_thread); ~VideoTrack(); private: // Implements ObserverInterface. Observes `video_source_` state. void OnChanged() override; - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker signaling_thread_; - rtc::Thread* const worker_thread_; - const rtc::scoped_refptr< + RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_; + Thread* const worker_thread_; + const scoped_refptr< VideoTrackSourceProxyWithInternal> video_source_; ContentHint content_hint_ RTC_GUARDED_BY(&signaling_thread_); diff --git a/pc/video_track_source.cc b/pc/video_track_source.cc index d4b7f55055..d9b5f1937c 100644 --- a/pc/video_track_source.cc +++ b/pc/video_track_source.cc @@ -10,6 +10,11 @@ #include "pc/video_track_source.h" +#include "api/media_stream_interface.h" +#include "api/sequence_checker.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" #include "rtc_base/checks.h" namespace webrtc { @@ -25,14 +30,13 @@ void VideoTrackSource::SetState(SourceState new_state) { } } -void VideoTrackSource::AddOrUpdateSink( - rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { +void VideoTrackSource::AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) { RTC_DCHECK(worker_thread_checker_.IsCurrent()); source()->AddOrUpdateSink(sink, wants); } -void VideoTrackSource::RemoveSink(rtc::VideoSinkInterface* sink) { +void VideoTrackSource::RemoveSink(VideoSinkInterface* sink) { RTC_DCHECK(worker_thread_checker_.IsCurrent()); source()->RemoveSink(sink); } diff --git a/pc/video_track_source.h b/pc/video_track_source.h index 6aae178f37..644c11f27e 100644 --- a/pc/video_track_source.h +++ b/pc/video_track_source.h @@ -11,7 +11,8 @@ #ifndef PC_VIDEO_TRACK_SOURCE_H_ #define PC_VIDEO_TRACK_SOURCE_H_ -#include "absl/types/optional.h" +#include + #include "api/media_stream_interface.h" #include "api/notifier.h" #include "api/sequence_checker.h" @@ -41,25 +42,23 @@ class RTC_EXPORT VideoTrackSource : public Notifier { bool remote() const override { return remote_; } bool is_screencast() const override { return false; } - absl::optional needs_denoising() const override { - return absl::nullopt; - } + std::optional needs_denoising() const override { return std::nullopt; } bool GetStats(Stats* stats) override { return false; } - void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override; - void RemoveSink(rtc::VideoSinkInterface* sink) override; + void AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) override; + void RemoveSink(VideoSinkInterface* sink) override; bool SupportsEncodedOutput() const override { return false; } void GenerateKeyFrame() override {} void AddEncodedSink( - rtc::VideoSinkInterface* sink) override {} + VideoSinkInterface* sink) override {} void RemoveEncodedSink( - rtc::VideoSinkInterface* sink) override {} + VideoSinkInterface* sink) override {} protected: - virtual rtc::VideoSourceInterface* source() = 0; + virtual VideoSourceInterface* source() = 0; private: RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_{ diff --git a/pc/video_track_source_proxy.cc b/pc/video_track_source_proxy.cc index c3e95e23cc..d772f59f83 100644 --- a/pc/video_track_source_proxy.cc +++ b/pc/video_track_source_proxy.cc @@ -17,13 +17,13 @@ namespace webrtc { -rtc::scoped_refptr CreateVideoTrackSourceProxy( - rtc::Thread* signaling_thread, - rtc::Thread* worker_thread, +scoped_refptr CreateVideoTrackSourceProxy( + Thread* signaling_thread, + Thread* worker_thread, VideoTrackSourceInterface* source) { return VideoTrackSourceProxy::Create( signaling_thread, worker_thread, - rtc::scoped_refptr(source)); + scoped_refptr(source)); } } // namespace webrtc diff --git a/pc/video_track_source_proxy.h b/pc/video_track_source_proxy.h index 8500a98766..8b8e97116f 100644 --- a/pc/video_track_source_proxy.h +++ b/pc/video_track_source_proxy.h @@ -11,7 +11,8 @@ #ifndef PC_VIDEO_TRACK_SOURCE_PROXY_H_ #define PC_VIDEO_TRACK_SOURCE_PROXY_H_ -#include "absl/types/optional.h" +#include + #include "api/media_stream_interface.h" #include "api/video/recordable_encoded_frame.h" #include "api/video/video_frame.h" @@ -32,13 +33,13 @@ PROXY_PRIMARY_THREAD_DESTRUCTOR() PROXY_CONSTMETHOD0(SourceState, state) BYPASS_PROXY_CONSTMETHOD0(bool, remote) BYPASS_PROXY_CONSTMETHOD0(bool, is_screencast) -PROXY_CONSTMETHOD0(absl::optional, needs_denoising) +PROXY_CONSTMETHOD0(std::optional, needs_denoising) PROXY_METHOD1(bool, GetStats, Stats*) PROXY_SECONDARY_METHOD2(void, AddOrUpdateSink, - rtc::VideoSinkInterface*, - const rtc::VideoSinkWants&) -PROXY_SECONDARY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) + VideoSinkInterface*, + const VideoSinkWants&) +PROXY_SECONDARY_METHOD1(void, RemoveSink, VideoSinkInterface*) PROXY_SECONDARY_METHOD0(void, RequestRefreshFrame) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) @@ -46,13 +47,13 @@ PROXY_CONSTMETHOD0(bool, SupportsEncodedOutput) PROXY_SECONDARY_METHOD0(void, GenerateKeyFrame) PROXY_SECONDARY_METHOD1(void, AddEncodedSink, - rtc::VideoSinkInterface*) + VideoSinkInterface*) PROXY_SECONDARY_METHOD1(void, RemoveEncodedSink, - rtc::VideoSinkInterface*) + VideoSinkInterface*) PROXY_SECONDARY_METHOD1(void, ProcessConstraints, - const webrtc::VideoTrackSourceConstraints&) + const VideoTrackSourceConstraints&) END_PROXY_MAP(VideoTrackSource) } // namespace webrtc diff --git a/pc/video_track_unittest.cc b/pc/video_track_unittest.cc index e75fd034b3..c003a51bb5 100644 --- a/pc/video_track_unittest.cc +++ b/pc/video_track_unittest.cc @@ -12,10 +12,14 @@ #include +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/scoped_refptr.h" #include "media/base/fake_frame_source.h" #include "pc/test/fake_video_track_renderer.h" #include "pc/test/fake_video_track_source.h" #include "pc/video_track_source.h" +#include "rtc_base/thread.h" #include "rtc_base/time_utils.h" #include "test/gtest.h" @@ -29,19 +33,19 @@ using webrtc::VideoTrackSource; class VideoTrackTest : public ::testing::Test { public: - VideoTrackTest() : frame_source_(640, 480, rtc::kNumMicrosecsPerSec / 30) { + VideoTrackTest() : frame_source_(640, 480, webrtc::kNumMicrosecsPerSec / 30) { static const char kVideoTrackId[] = "track_id"; - video_track_source_ = rtc::make_ref_counted( + video_track_source_ = webrtc::make_ref_counted( /*is_screencast=*/false); video_track_ = VideoTrack::Create(kVideoTrackId, video_track_source_, - rtc::Thread::Current()); + webrtc::Thread::Current()); } protected: - rtc::AutoThread main_thread_; - rtc::scoped_refptr video_track_source_; - rtc::scoped_refptr video_track_; - cricket::FakeFrameSource frame_source_; + webrtc::AutoThread main_thread_; + webrtc::scoped_refptr video_track_source_; + webrtc::scoped_refptr video_track_; + webrtc::FakeFrameSource frame_source_; }; // VideoTrack::Create will create an API proxy around the source object. diff --git a/pc/webrtc_sdp.cc b/pc/webrtc_sdp.cc index 71cd18cfb6..a87a82c6a9 100644 --- a/pc/webrtc_sdp.cc +++ b/pc/webrtc_sdp.cc @@ -18,9 +18,9 @@ #include #include #include +#include #include #include -#include #include #include #include @@ -28,14 +28,13 @@ #include "absl/algorithm/container.h" #include "absl/strings/ascii.h" #include "absl/strings/match.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" #include "api/candidate.h" -#include "api/crypto_params.h" +#include "api/jsep.h" #include "api/jsep_ice_candidate.h" #include "api/jsep_session_description.h" #include "api/media_types.h" -// for RtpExtension -#include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" #include "api/rtp_transceiver_direction.h" @@ -45,7 +44,6 @@ #include "media/base/rtp_utils.h" #include "media/base/stream_params.h" #include "media/sctp/sctp_transport_internal.h" -#include "p2p/base/candidate_pair_interface.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" @@ -54,51 +52,49 @@ #include "p2p/base/transport_info.h" #include "pc/media_protocol_names.h" #include "pc/media_session.h" -#include "pc/sdp_serializer.h" #include "pc/session_description.h" #include "pc/simulcast_description.h" +#include "pc/simulcast_sdp_serializer.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" +#include "rtc_base/net_helpers.h" #include "rtc_base/network_constants.h" #include "rtc_base/socket_address.h" #include "rtc_base/ssl_fingerprint.h" #include "rtc_base/string_encode.h" -#include "rtc_base/string_utils.h" #include "rtc_base/strings/string_builder.h" -using cricket::AudioContentDescription; -using cricket::Candidate; -using cricket::Candidates; -using cricket::ContentInfo; -using cricket::CryptoParams; -using cricket::ICE_CANDIDATE_COMPONENT_RTCP; -using cricket::ICE_CANDIDATE_COMPONENT_RTP; -using cricket::kApplicationSpecificBandwidth; -using cricket::kCodecParamMaxPTime; -using cricket::kCodecParamMinPTime; -using cricket::kCodecParamPTime; -using cricket::kTransportSpecificBandwidth; -using cricket::MediaContentDescription; -using cricket::MediaProtocolType; -using cricket::MediaType; -using cricket::RidDescription; -using cricket::RtpHeaderExtensions; -using cricket::SctpDataContentDescription; -using cricket::SimulcastDescription; -using cricket::SimulcastLayer; -using cricket::SimulcastLayerList; -using cricket::SsrcGroup; -using cricket::StreamParams; -using cricket::StreamParamsVec; -using cricket::TransportDescription; -using cricket::TransportInfo; -using cricket::UnsupportedContentDescription; -using cricket::VideoContentDescription; -using rtc::SocketAddress; +using ::webrtc::AudioContentDescription; +using webrtc::Candidate; +using ::webrtc::Candidates; +using ::webrtc::ContentInfo; +using ::webrtc::ICE_CANDIDATE_COMPONENT_RTCP; +using ::webrtc::ICE_CANDIDATE_COMPONENT_RTP; +using ::webrtc::kApplicationSpecificBandwidth; +using ::webrtc::kCodecParamMaxPTime; +using ::webrtc::kCodecParamMinPTime; +using ::webrtc::kCodecParamPTime; +using ::webrtc::kTransportSpecificBandwidth; +using ::webrtc::MediaContentDescription; +using ::webrtc::MediaProtocolType; +using ::webrtc::RidDescription; +using ::webrtc::RtpHeaderExtensions; +using ::webrtc::SctpDataContentDescription; +using ::webrtc::SimulcastDescription; +using ::webrtc::SimulcastLayer; +using ::webrtc::SimulcastLayerList; +using ::webrtc::SocketAddress; +using ::webrtc::SsrcGroup; +using ::webrtc::StreamParams; +using ::webrtc::StreamParamsVec; +using ::webrtc::TransportDescription; +using ::webrtc::TransportInfo; +using ::webrtc::UnsupportedContentDescription; +using ::webrtc::VideoContentDescription; // TODO(deadbeef): Switch to using anonymous namespace rather than declaring // everything "static". @@ -157,7 +153,6 @@ static const char kSsrcAttributeMsid[] = "msid"; static const char kDefaultMsid[] = "default"; static const char kNoStreamMsid[] = "-"; static const char kAttributeSsrcGroup[] = "ssrc-group"; -static const char kAttributeCrypto[] = "crypto"; static const char kAttributeCandidate[] = "candidate"; static const char kAttributeCandidateTyp[] = "typ"; static const char kAttributeCandidateRaddr[] = "raddr"; @@ -208,9 +203,10 @@ static const char kCandidatePrflx[] = "prflx"; static const char kCandidateRelay[] = "relay"; static const char kTcpCandidateType[] = "tcptype"; -// rtc::StringBuilder doesn't have a << overload for chars, while rtc::split and -// rtc::tokenize_first both take a char delimiter. To handle both cases these -// constants come in pairs of a chars and length-one strings. +// webrtc::StringBuilder doesn't have a << overload for chars, while +// webrtc::split and webrtc::tokenize_first both take a char delimiter. To +// handle both cases these constants come in pairs of a chars and length-one +// strings. static const char kSdpDelimiterEqual[] = "="; static const char kSdpDelimiterEqualChar = '='; static const char kSdpDelimiterSpace[] = " "; @@ -240,9 +236,9 @@ static const char kAttrGroup[] = "a=group:BUNDLE"; static const char kConnectionNettype[] = "IN"; static const char kConnectionIpv4Addrtype[] = "IP4"; static const char kConnectionIpv6Addrtype[] = "IP6"; -static const char kMediaTypeVideo[] = "video"; -static const char kMediaTypeAudio[] = "audio"; -static const char kMediaTypeData[] = "application"; +static const char kSdpMediaTypeVideo[] = "video"; +static const char kSdpMediaTypeAudio[] = "audio"; +static const char kSdpMediaTypeData[] = "application"; static const char kMediaPortRejected[] = "0"; // draft-ietf-mmusic-trickle-ice-01 // When no candidates have been gathered, set the connection @@ -267,29 +263,27 @@ struct SsrcInfo { std::string stream_id; std::string track_id; }; -typedef std::vector SsrcInfoVec; -typedef std::vector SsrcGroupVec; +using SsrcInfoVec = std::vector; +using SsrcGroupVec = std::vector; -template -static void AddFmtpLine(const T& codec, std::string* message); static void BuildMediaDescription(const ContentInfo* content_info, const TransportInfo* transport_info, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, const std::vector& candidates, int msid_signaling, std::string* message); -static void BuildMediaLine(const cricket::MediaType media_type, +static void BuildMediaLine(const webrtc::MediaType media_type, const ContentInfo* content_info, const MediaContentDescription* media_desc, std::string* message); static void BuildRtpContentAttributes(const MediaContentDescription* media_desc, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, int msid_signaling, std::string* message); static void BuildRtpHeaderExtensions(const RtpHeaderExtensions& extensions, std::string* message); static void BuildRtpmap(const MediaContentDescription* media_desc, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, std::string* message); static void BuildCandidate(const std::vector& candidates, bool include_ufrag, @@ -306,21 +300,21 @@ static bool ParseSessionDescription(absl::string_view message, std::string* session_version, TransportDescription* session_td, RtpHeaderExtensions* session_extmaps, - rtc::SocketAddress* connection_addr, - cricket::SessionDescription* desc, + SocketAddress* connection_addr, + SessionDescription* desc, SdpParseError* error); static bool ParseMediaDescription( absl::string_view message, const TransportDescription& session_td, const RtpHeaderExtensions& session_extmaps, size_t* pos, - const rtc::SocketAddress& session_connection_addr, - cricket::SessionDescription* desc, + const SocketAddress& session_connection_addr, + SessionDescription* desc, std::vector>* candidates, SdpParseError* error); static bool ParseContent( absl::string_view message, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, int mline_index, absl::string_view protocol, const std::vector& payload_types, @@ -333,7 +327,7 @@ static bool ParseContent( std::vector>* candidates, SdpParseError* error); static bool ParseGroupAttribute(absl::string_view line, - cricket::SessionDescription* desc, + SessionDescription* desc, SdpParseError* error); static bool ParseSsrcAttribute(absl::string_view line, SsrcInfoVec* ssrc_infos, @@ -342,16 +336,13 @@ static bool ParseSsrcAttribute(absl::string_view line, static bool ParseSsrcGroupAttribute(absl::string_view line, SsrcGroupVec* ssrc_groups, SdpParseError* error); -static bool ParseCryptoAttribute(absl::string_view line, - MediaContentDescription* media_desc, - SdpParseError* error); static bool ParseRtpmapAttribute(absl::string_view line, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, const std::vector& payload_types, MediaContentDescription* media_desc, SdpParseError* error); static bool ParseFmtpAttributes(absl::string_view line, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, MediaContentDescription* media_desc, SdpParseError* error); static bool ParseFmtpParam(absl::string_view line, @@ -359,11 +350,11 @@ static bool ParseFmtpParam(absl::string_view line, std::string* value, SdpParseError* error); static bool ParsePacketizationAttribute(absl::string_view line, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, MediaContentDescription* media_desc, SdpParseError* error); static bool ParseRtcpFbAttribute(absl::string_view line, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, MediaContentDescription* media_desc, SdpParseError* error); static bool ParseIceOptions(absl::string_view line, @@ -374,18 +365,19 @@ static bool ParseExtmap(absl::string_view line, SdpParseError* error); static bool ParseFingerprintAttribute( absl::string_view line, - std::unique_ptr* fingerprint, + std::unique_ptr* fingerprint, SdpParseError* error); static bool ParseDtlsSetup(absl::string_view line, - cricket::ConnectionRole* role, + ConnectionRole* role, SdpParseError* error); static bool ParseMsidAttribute(absl::string_view line, std::vector* stream_ids, std::string* track_id, SdpParseError* error); -static void RemoveInvalidRidDescriptions(const std::vector& payload_types, - std::vector* rids); +static void RemoveDuplicateRidDescriptions( + const std::vector& payload_types, + std::vector* rids); static SimulcastLayerList RemoveRidsFromSimulcastLayerList( const std::set& to_remove, @@ -452,7 +444,7 @@ static bool ParseFailed(std::string description, SdpParseError* error) { static bool ParseFailedExpectFieldNum(absl::string_view line, int expected_fields, SdpParseError* error) { - rtc::StringBuilder description; + StringBuilder description; description << "Expects " << expected_fields << " fields."; return ParseFailed(line, description.Release(), error); } @@ -462,7 +454,7 @@ static bool ParseFailedExpectFieldNum(absl::string_view line, static bool ParseFailedExpectMinFieldNum(absl::string_view line, int expected_min_fields, SdpParseError* error) { - rtc::StringBuilder description; + StringBuilder description; description << "Expects at least " << expected_min_fields << " fields."; return ParseFailed(line, description.Release(), error); } @@ -472,7 +464,7 @@ static bool ParseFailedExpectMinFieldNum(absl::string_view line, static bool ParseFailedGetValue(absl::string_view line, absl::string_view attribute, SdpParseError* error) { - rtc::StringBuilder description; + StringBuilder description; description << "Failed to get the value of attribute: " << attribute; return ParseFailed(line, description.Release(), error); } @@ -486,7 +478,7 @@ static bool ParseFailedExpectLine(absl::string_view message, const char line_type, absl::string_view line_value, SdpParseError* error) { - rtc::StringBuilder description; + StringBuilder description; description << "Expect line: " << std::string(1, line_type) << "=" << line_value; return ParseFailed(message, line_start, description.Release(), error); @@ -511,11 +503,11 @@ static absl::string_view TrimReturnChar(absl::string_view line) { // Gets line of `message` starting at `pos`, and checks overall SDP syntax. On // success, advances `pos` to the next line. -static absl::optional GetLine(absl::string_view message, - size_t* pos) { +static std::optional GetLine(absl::string_view message, + size_t* pos) { size_t line_end = message.find(kNewLineChar, *pos); if (line_end == absl::string_view::npos) { - return absl::nullopt; + return std::nullopt; } absl::string_view line = TrimReturnChar(message.substr(*pos, line_end - *pos)); @@ -536,7 +528,7 @@ static absl::optional GetLine(absl::string_view message, if (line.length() < 3 || !islower(static_cast(line[0])) || line[1] != kSdpDelimiterEqualChar || (line[0] != kLineTypeSessionName && line[2] == kSdpDelimiterSpaceChar)) { - return absl::nullopt; + return std::nullopt; } *pos = line_end + 1; return line; @@ -545,13 +537,13 @@ static absl::optional GetLine(absl::string_view message, // Init `os` to "`type`=`value`". static void InitLine(const char type, absl::string_view value, - rtc::StringBuilder* os) { + StringBuilder* os) { os->Clear(); *os << std::string(1, type) << kSdpDelimiterEqual << value; } // Init `os` to "a=`attribute`". -static void InitAttrLine(absl::string_view attribute, rtc::StringBuilder* os) { +static void InitAttrLine(absl::string_view attribute, StringBuilder* os) { InitLine(kLineTypeAttributes, attribute, os); } @@ -559,7 +551,7 @@ static void InitAttrLine(absl::string_view attribute, rtc::StringBuilder* os) { static void AddAttributeLine(absl::string_view attribute, int value, std::string* message) { - rtc::StringBuilder os; + StringBuilder os; InitAttrLine(attribute, &os); os << kSdpDelimiterColon << value; AddLine(os.str(), message); @@ -579,12 +571,12 @@ static bool IsLineType(absl::string_view line, const char type) { return IsLineType(line, type, 0); } -static absl::optional +static std::optional GetLineWithType(absl::string_view message, size_t* pos, const char type) { if (IsLineType(message, type, *pos)) { return GetLine(message, pos); } - return absl::nullopt; + return std::nullopt; } static bool HasAttribute(absl::string_view line, absl::string_view attribute) { @@ -609,7 +601,7 @@ static bool AddSsrcLine(uint32_t ssrc_id, std::string* message) { // RFC 5576 // a=ssrc: : - rtc::StringBuilder os; + StringBuilder os; InitAttrLine(kAttributeSsrc, &os); os << kSdpDelimiterColon << ssrc_id << kSdpDelimiterSpace << attribute << kSdpDelimiterColon << value; @@ -622,7 +614,7 @@ static bool GetValue(absl::string_view message, std::string* value, SdpParseError* error) { std::string leftpart; - if (!rtc::tokenize_first(message, kSdpDelimiterColonChar, &leftpart, value)) { + if (!tokenize_first(message, kSdpDelimiterColonChar, &leftpart, value)) { return ParseFailedGetValue(message, attribute, error); } // The left part should end with the expected attribute. @@ -644,7 +636,7 @@ static bool GetSingleTokenValue(absl::string_view message, return false; } if (!absl::c_all_of(absl::string_view(*value), IsTokenChar)) { - rtc::StringBuilder description; + StringBuilder description; description << "Illegal character found in the value of " << attribute; return ParseFailed(message, description.Release(), error); } @@ -662,8 +654,8 @@ static bool GetValueFromString(absl::string_view line, absl::string_view s, T* t, SdpParseError* error) { - if (!rtc::FromString(s, t)) { - rtc::StringBuilder description; + if (!FromString(s, t)) { + StringBuilder description; description << "Invalid value: " << s << "."; return ParseFailed(line, description.Release(), error); } @@ -675,7 +667,7 @@ static bool GetPayloadTypeFromString(absl::string_view line, int* payload_type, SdpParseError* error) { return GetValueFromString(line, s, payload_type, error) && - cricket::IsValidRtpPayloadType(*payload_type); + IsValidRtpPayloadType(*payload_type); } // Creates a StreamParams track in the case when no SSRC lines are signaled. @@ -716,15 +708,15 @@ void CreateTracksFromSsrcInfos(const SsrcInfoVec& ssrc_infos, } std::vector stream_ids; std::string track_id; - if (msid_signaling & cricket::kMsidSignalingMediaSection) { + if (msid_signaling & kMsidSignalingMediaSection) { // This is the case with Unified Plan SDP msid signaling. stream_ids = msid_stream_ids; track_id = std::string(msid_track_id); - } else if (msid_signaling & cricket::kMsidSignalingSsrcAttribute) { + } else if (msid_signaling & kMsidSignalingSsrcAttribute) { // This is the case with Plan B SDP msid signaling. stream_ids.push_back(ssrc_info.stream_id); track_id = ssrc_info.track_id; - } else { + } else if (msid_signaling == kMsidSignalingNotUsed) { // Since no media streams isn't supported with older SDP signaling, we // use a default stream id. stream_ids.push_back(kDefaultMsid); @@ -749,7 +741,7 @@ void CreateTracksFromSsrcInfos(const SsrcInfoVec& ssrc_infos, // msid attribute, use default/random values. This happens after // deduplication. if (stream.id.empty()) { - stream.id = rtc::CreateRandomString(8); + stream.id = CreateRandomString(8); } } } @@ -764,29 +756,6 @@ void GetMediaStreamIds(const ContentInfo* content, } } -// RFC 5245 -// It is RECOMMENDED that default candidates be chosen based on the -// likelihood of those candidates to work with the peer that is being -// contacted. It is RECOMMENDED that relayed > reflexive > host. -static const int kPreferenceUnknown = 0; -static const int kPreferenceHost = 1; -static const int kPreferenceReflexive = 2; -static const int kPreferenceRelayed = 3; - -static int GetCandidatePreferenceFromType(absl::string_view type) { - int preference = kPreferenceUnknown; - if (type == cricket::LOCAL_PORT_TYPE) { - preference = kPreferenceHost; - } else if (type == cricket::STUN_PORT_TYPE) { - preference = kPreferenceReflexive; - } else if (type == cricket::RELAY_PORT_TYPE) { - preference = kPreferenceRelayed; - } else { - RTC_DCHECK_NOTREACHED(); - } - return preference; -} - // Get ip and port of the default destination from the `candidates` with the // given value of `component_id`. The default candidate should be the one most // likely to work, typically IPv4 relay. @@ -802,17 +771,17 @@ static void GetDefaultDestination(const std::vector& candidates, *addr_type = kConnectionIpv4Addrtype; *port = kDummyPort; *ip = kDummyAddress; - int current_preference = kPreferenceUnknown; + int current_preference = 0; // Start with lowest preference int current_family = AF_UNSPEC; for (const Candidate& candidate : candidates) { if (candidate.component() != component_id) { continue; } // Default destination should be UDP only. - if (candidate.protocol() != cricket::UDP_PROTOCOL_NAME) { + if (candidate.protocol() != UDP_PROTOCOL_NAME) { continue; } - const int preference = GetCandidatePreferenceFromType(candidate.type()); + const int preference = candidate.type_preference(); const int family = candidate.address().ipaddr().family(); // See if this candidate is more preferable then the current one if it's the // same family. Or if the current family is IPv4 already so we could safely @@ -836,7 +805,7 @@ static void GetDefaultDestination(const std::vector& candidates, // Gets "a=rtcp" line if found default RTCP candidate from `candidates`. static std::string GetRtcpLine(const std::vector& candidates) { - std::string rtcp_line, rtcp_port, rtcp_ip, addr_type; + std::string rtcp_port, rtcp_ip, addr_type; GetDefaultDestination(candidates, ICE_CANDIDATE_COMPONENT_RTCP, &rtcp_port, &rtcp_ip, &addr_type); // Found default RTCP candidate. @@ -847,12 +816,11 @@ static std::string GetRtcpLine(const std::vector& candidates) { // RFC 3605 // rtcp-attribute = "a=rtcp:" port [nettype space addrtype space // connection-address] CRLF - rtc::StringBuilder os; + StringBuilder os; InitAttrLine(kAttributeRtcp, &os); os << kSdpDelimiterColon << rtcp_port << " " << kConnectionNettype << " " << addr_type << " " << rtcp_ip; - rtcp_line = os.str(); - return rtcp_line; + return os.Release(); } // Get candidates according to the mline index from SessionDescriptionInterface. @@ -874,7 +842,7 @@ static bool IsValidPort(int port) { } std::string SdpSerialize(const JsepSessionDescription& jdesc) { - const cricket::SessionDescription* desc = jdesc.description(); + const SessionDescription* desc = jdesc.description(); if (!desc) { return ""; } @@ -887,7 +855,7 @@ std::string SdpSerialize(const JsepSessionDescription& jdesc) { // RFC 4566 // o= // - rtc::StringBuilder os; + StringBuilder os; InitLine(kLineTypeOrigin, kSessionOriginUsername, &os); const std::string& session_id = jdesc.session_id().empty() ? kSessionOriginSessionId : jdesc.session_id(); @@ -904,9 +872,9 @@ std::string SdpSerialize(const JsepSessionDescription& jdesc) { AddLine(kTimeDescription, &message); // BUNDLE Groups - std::vector groups = - desc->GetGroupsByName(cricket::GROUP_TYPE_BUNDLE); - for (const cricket::ContentGroup* group : groups) { + std::vector groups = + desc->GetGroupsByName(GROUP_TYPE_BUNDLE); + for (const ContentGroup* group : groups) { std::string group_line = kAttrGroup; RTC_DCHECK(group != NULL); for (const std::string& content_name : group->content_names()) { @@ -922,31 +890,39 @@ std::string SdpSerialize(const JsepSessionDescription& jdesc) { AddLine(os.str(), &message); } - // MediaStream semantics - InitAttrLine(kAttributeMsidSemantics, &os); - os << kSdpDelimiterColon << " " << kMediaStreamSemantic; + // MediaStream semantics. + // TODO(bugs.webrtc.org/10421): Change to & webrtc::kMsidSignalingSemantic + // when we think it's safe to do so, so that we gradually fade out this old + // line that was removed from the specification. + if (desc->msid_signaling() != kMsidSignalingNotUsed) { + InitAttrLine(kAttributeMsidSemantics, &os); + os << kSdpDelimiterColon << " " << kMediaStreamSemantic; - std::set media_stream_ids; - const ContentInfo* audio_content = GetFirstAudioContent(desc); - if (audio_content) - GetMediaStreamIds(audio_content, &media_stream_ids); + // TODO(bugs.webrtc.org/10421): this code only looks at the first + // audio/video content. Fixing that might result in much larger SDP and the + // msid-semantic line should eventually go away so this is not worth fixing. + std::set media_stream_ids; + const ContentInfo* audio_content = GetFirstAudioContent(desc); + if (audio_content) + GetMediaStreamIds(audio_content, &media_stream_ids); - const ContentInfo* video_content = GetFirstVideoContent(desc); - if (video_content) - GetMediaStreamIds(video_content, &media_stream_ids); + const ContentInfo* video_content = GetFirstVideoContent(desc); + if (video_content) + GetMediaStreamIds(video_content, &media_stream_ids); - for (const std::string& id : media_stream_ids) { - os << " " << id; + for (const std::string& id : media_stream_ids) { + os << " " << id; + } + AddLine(os.str(), &message); } - AddLine(os.str(), &message); // a=ice-lite // // TODO(deadbeef): It's weird that we need to iterate TransportInfos for // this, when it's a session-level attribute. It really should be moved to a // session-level structure like SessionDescription. - for (const cricket::TransportInfo& transport : desc->transport_infos()) { - if (transport.description.ice_mode == cricket::ICEMODE_LITE) { + for (const TransportInfo& transport : desc->transport_infos()) { + if (transport.description.ice_mode == ICEMODE_LITE) { InitAttrLine(kAttributeIceLite, &os); AddLine(os.str(), &message); break; @@ -958,7 +934,7 @@ std::string SdpSerialize(const JsepSessionDescription& jdesc) { for (const ContentInfo& content : desc->contents()) { std::vector candidates; GetCandidatesByMindex(jdesc, ++mline_index, &candidates); - BuildMediaDescription(&content, desc->GetTransportInfoByName(content.name), + BuildMediaDescription(&content, desc->GetTransportInfoByName(content.mid()), content.media_description()->type(), candidates, desc->msid_signaling(), &message); } @@ -972,9 +948,9 @@ std::string SdpSerializeCandidate(const IceCandidateInterface& candidate) { } // Serializes a cricket Candidate. -std::string SdpSerializeCandidate(const cricket::Candidate& candidate) { +std::string SdpSerializeCandidate(const Candidate& candidate) { std::string message; - std::vector candidates(1, candidate); + std::vector candidates(1, candidate); BuildCandidate(candidates, true, &message); // From WebRTC draft section 4.8.1.1 candidate-attribute will be // just candidate: not a=candidate:CRLF @@ -992,8 +968,8 @@ bool SdpDeserialize(absl::string_view message, std::string session_version; TransportDescription session_td("", ""); RtpHeaderExtensions session_extmaps; - rtc::SocketAddress session_connection_addr; - auto desc = std::make_unique(); + SocketAddress session_connection_addr; + auto desc = std::make_unique(); size_t current_pos = 0; // Session Description @@ -1033,7 +1009,7 @@ bool SdpDeserializeCandidate(absl::string_view message, bool SdpDeserializeCandidate(absl::string_view transport_name, absl::string_view message, - cricket::Candidate* candidate, + Candidate* candidate, SdpParseError* error) { RTC_DCHECK(candidate != nullptr); if (!ParseCandidate(message, candidate, error, true)) { @@ -1076,11 +1052,11 @@ bool ParseCandidate(absl::string_view message, std::string candidate_value; // `first_line` must be in the form of "candidate:". - if (!rtc::tokenize_first(first_line, kSdpDelimiterColonChar, - &attribute_candidate, &candidate_value) || + if (!tokenize_first(first_line, kSdpDelimiterColonChar, &attribute_candidate, + &candidate_value) || attribute_candidate != kAttributeCandidate) { if (is_raw) { - rtc::StringBuilder description; + StringBuilder description; description << "Expect line: " << kAttributeCandidate << ":" ""; @@ -1092,7 +1068,7 @@ bool ParseCandidate(absl::string_view message, } std::vector fields = - rtc::split(candidate_value, kSdpDelimiterSpaceChar); + split(candidate_value, kSdpDelimiterSpaceChar); // RFC 5245 // a=candidate: @@ -1125,34 +1101,33 @@ bool ParseCandidate(absl::string_view message, } SocketAddress address(connection_address, port); - absl::optional protocol = - cricket::StringToProto(transport); + std::optional protocol = StringToProto(transport); if (!protocol) { return ParseFailed(first_line, "Unsupported transport type.", error); } bool tcp_protocol = false; switch (*protocol) { // Supported protocols. - case cricket::PROTO_UDP: + case PROTO_UDP: break; - case cricket::PROTO_TCP: - case cricket::PROTO_SSLTCP: + case PROTO_TCP: + case PROTO_SSLTCP: tcp_protocol = true; break; default: return ParseFailed(first_line, "Unsupported transport type.", error); } - std::string candidate_type; + IceCandidateType candidate_type; const absl::string_view type = fields[7]; if (type == kCandidateHost) { - candidate_type = cricket::LOCAL_PORT_TYPE; + candidate_type = IceCandidateType::kHost; } else if (type == kCandidateSrflx) { - candidate_type = cricket::STUN_PORT_TYPE; + candidate_type = IceCandidateType::kSrflx; } else if (type == kCandidateRelay) { - candidate_type = cricket::RELAY_PORT_TYPE; + candidate_type = IceCandidateType::kRelay; } else if (type == kCandidatePrflx) { - candidate_type = cricket::PRFLX_PORT_TYPE; + candidate_type = IceCandidateType::kPrflx; } else { return ParseFailed(first_line, "Unsupported candidate type.", error); } @@ -1168,15 +1143,15 @@ bool ParseCandidate(absl::string_view message, } if (fields.size() >= (current_position + 2) && fields[current_position] == kAttributeCandidateRport) { - int port = 0; - if (!GetValueFromString(first_line, fields[++current_position], &port, - error)) { + int related_port = 0; + if (!GetValueFromString(first_line, fields[++current_position], + &related_port, error)) { return false; } - if (!IsValidPort(port)) { + if (!IsValidPort(related_port)) { return ParseFailed(first_line, "Invalid port number.", error); } - related_address.SetPort(port); + related_address.SetPort(related_port); ++current_position; } @@ -1188,9 +1163,8 @@ bool ParseCandidate(absl::string_view message, tcptype = fields[++current_position]; ++current_position; - if (tcptype != cricket::TCPTYPE_ACTIVE_STR && - tcptype != cricket::TCPTYPE_PASSIVE_STR && - tcptype != cricket::TCPTYPE_SIMOPEN_STR) { + if (tcptype != TCPTYPE_ACTIVE_STR && tcptype != TCPTYPE_PASSIVE_STR && + tcptype != TCPTYPE_SIMOPEN_STR) { return ParseFailed(first_line, "Invalid TCP candidate type.", error); } @@ -1201,7 +1175,7 @@ bool ParseCandidate(absl::string_view message, // We allow the tcptype to be missing, for backwards compatibility, // treating it as a passive candidate. // TODO(bugs.webrtc.org/11466): Treat a missing tcptype as an error? - tcptype = cricket::TCPTYPE_PASSIVE_STR; + tcptype = TCPTYPE_PASSIVE_STR; } // Extension @@ -1232,15 +1206,15 @@ bool ParseCandidate(absl::string_view message, if (!GetValueFromString(first_line, fields[++i], &network_cost, error)) { return false; } - network_cost = std::min(network_cost, rtc::kNetworkCostMax); + network_cost = std::min(network_cost, kNetworkCostMax); } else { // Skip the unknown extension. ++i; } } - *candidate = Candidate(component_id, cricket::ProtoToString(*protocol), - address, priority, username, password, candidate_type, + *candidate = Candidate(component_id, ProtoToString(*protocol), address, + priority, username, password, candidate_type, generation, foundation, network_id, network_cost); candidate->set_related_address(related_address); candidate->set_tcptype(tcptype); @@ -1255,7 +1229,7 @@ bool ParseIceOptions(absl::string_view line, return false; } std::vector fields = - rtc::split(ice_options, kSdpDelimiterSpaceChar); + split(ice_options, kSdpDelimiterSpaceChar); for (size_t i = 0; i < fields.size(); ++i) { transport_options->emplace_back(fields[i]); } @@ -1269,14 +1243,14 @@ bool ParseSctpPort(absl::string_view line, // a=sctp-port const size_t expected_min_fields = 2; std::vector fields = - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar); + split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar); if (fields.size() < expected_min_fields) { - fields = rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); + fields = split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); } if (fields.size() < expected_min_fields) { return ParseFailedExpectMinFieldNum(line, expected_min_fields, error); } - if (!rtc::FromString(fields[1], sctp_port)) { + if (!FromString(fields[1], sctp_port)) { return ParseFailed(line, "Invalid sctp port value.", error); } return true; @@ -1289,11 +1263,11 @@ bool ParseSctpMaxMessageSize(absl::string_view line, // a=max-message-size:199999 const size_t expected_min_fields = 2; std::vector fields = - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar); + split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar); if (fields.size() < expected_min_fields) { return ParseFailedExpectMinFieldNum(line, expected_min_fields, error); } - if (!rtc::FromString(fields[1], max_message_size)) { + if (!FromString(fields[1], max_message_size)) { return ParseFailed(line, "Invalid SCTP max message size.", error); } return true; @@ -1305,7 +1279,7 @@ bool ParseExtmap(absl::string_view line, // RFC 5285 // a=extmap:["/"] std::vector fields = - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); + split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); const size_t expected_min_fields = 2; if (fields.size() < expected_min_fields) { return ParseFailedExpectMinFieldNum(line, expected_min_fields, error); @@ -1317,7 +1291,7 @@ bool ParseExtmap(absl::string_view line, return false; } std::vector sub_fields = - rtc::split(value_direction, kSdpDelimiterSlashChar); + split(value_direction, kSdpDelimiterSlashChar); int value = 0; if (!GetValueFromString(line, sub_fields[0], &value, error)) { return false; @@ -1347,16 +1321,14 @@ bool ParseExtmap(absl::string_view line, static void BuildSctpContentAttributes( std::string* message, - const cricket::SctpDataContentDescription* data_desc) { - rtc::StringBuilder os; + const SctpDataContentDescription* data_desc) { + StringBuilder os; if (data_desc->use_sctpmap()) { // draft-ietf-mmusic-sctp-sdp-04 // a=sctpmap:sctpmap-number protocol [streams] - rtc::StringBuilder os; InitAttrLine(kAttributeSctpmap, &os); os << kSdpDelimiterColon << data_desc->port() << kSdpDelimiterSpace - << kDefaultSctpmapProtocol << kSdpDelimiterSpace - << cricket::kMaxSctpStreams; + << kDefaultSctpmapProtocol << kSdpDelimiterSpace << kMaxSctpStreams; AddLine(os.str(), message); } else { // draft-ietf-mmusic-sctp-sdp-23 @@ -1376,7 +1348,7 @@ void BuildIceUfragPwd(const TransportInfo* transport_info, std::string* message) { RTC_DCHECK(transport_info); - rtc::StringBuilder os; + StringBuilder os; // RFC 5245 // ice-pwd-att = "ice-pwd" ":" password // ice-ufrag-att = "ice-ufrag" ":" ufrag @@ -1398,7 +1370,7 @@ void BuildDtlsFingerprintSetup(const TransportInfo* transport_info, std::string* message) { RTC_DCHECK(transport_info); - rtc::StringBuilder os; + StringBuilder os; // RFC 4572 // fingerprint-attribute = // "fingerprint" ":" hash-func SP fingerprint @@ -1414,12 +1386,11 @@ void BuildDtlsFingerprintSetup(const TransportInfo* transport_info, AddLine(os.str(), message); // Inserting setup attribute. - if (transport_info->description.connection_role != - cricket::CONNECTIONROLE_NONE) { + if (transport_info->description.connection_role != CONNECTIONROLE_NONE) { // Making sure we are not using "passive" mode. - cricket::ConnectionRole role = transport_info->description.connection_role; + ConnectionRole role = transport_info->description.connection_role; std::string dtls_role_str; - const bool success = cricket::ConnectionRoleToString(role, &dtls_role_str); + const bool success = ConnectionRoleToString(role, &dtls_role_str); RTC_DCHECK(success); InitAttrLine(kAttributeSetup, &os); os << kSdpDelimiterColon << dtls_role_str; @@ -1427,47 +1398,40 @@ void BuildDtlsFingerprintSetup(const TransportInfo* transport_info, } } -void BuildMediaLine(const cricket::MediaType media_type, +void BuildMediaLine(const webrtc::MediaType media_type, const ContentInfo* content_info, const MediaContentDescription* media_desc, std::string* message) { - rtc::StringBuilder os; + StringBuilder os; // RFC 4566 // m= // fmt is a list of payload type numbers that MAY be used in the session. std::string type; std::string fmt; - if (media_type == cricket::MEDIA_TYPE_VIDEO) { - type = kMediaTypeVideo; - const VideoContentDescription* video_desc = media_desc->as_video(); - for (const cricket::VideoCodec& codec : video_desc->codecs()) { + if (media_type == webrtc::MediaType::AUDIO || + media_type == webrtc::MediaType::VIDEO) { + type = media_type == webrtc::MediaType::AUDIO ? kSdpMediaTypeAudio + : kSdpMediaTypeVideo; + for (const Codec& codec : media_desc->codecs()) { fmt.append(" "); - fmt.append(rtc::ToString(codec.id)); + fmt.append(absl::StrCat(codec.id)); } - } else if (media_type == cricket::MEDIA_TYPE_AUDIO) { - type = kMediaTypeAudio; - const AudioContentDescription* audio_desc = media_desc->as_audio(); - for (const cricket::AudioCodec& codec : audio_desc->codecs()) { - fmt.append(" "); - fmt.append(rtc::ToString(codec.id)); - } - } else if (media_type == cricket::MEDIA_TYPE_DATA) { - type = kMediaTypeData; - const cricket::SctpDataContentDescription* sctp_data_desc = - media_desc->as_sctp(); + } else if (media_type == webrtc::MediaType::DATA) { + type = kSdpMediaTypeData; + const SctpDataContentDescription* sctp_data_desc = media_desc->as_sctp(); if (sctp_data_desc) { fmt.append(" "); if (sctp_data_desc->use_sctpmap()) { - fmt.append(rtc::ToString(sctp_data_desc->port())); + fmt.append(absl::StrCat(sctp_data_desc->port())); } else { fmt.append(kDefaultSctpmapProtocol); } } else { RTC_DCHECK_NOTREACHED() << "Data description without SCTP"; } - } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { + } else if (media_type == webrtc::MediaType::UNSUPPORTED) { const UnsupportedContentDescription* unsupported_desc = media_desc->as_unsupported(); type = unsupported_desc->media_type(); @@ -1494,7 +1458,7 @@ void BuildMediaLine(const cricket::MediaType media_type, if (content_info->rejected || content_info->bundle_only) { port = kMediaPortRejected; } else if (!media_desc->connection_address().IsNil()) { - port = rtc::ToString(media_desc->connection_address().port()); + port = absl::StrCat(media_desc->connection_address().port()); } // Add the m and c lines. @@ -1505,7 +1469,7 @@ void BuildMediaLine(const cricket::MediaType media_type, void BuildMediaDescription(const ContentInfo* content_info, const TransportInfo* transport_info, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, const std::vector& candidates, int msid_signaling, std::string* message) { @@ -1513,7 +1477,7 @@ void BuildMediaDescription(const ContentInfo* content_info, if (!content_info) { return; } - rtc::StringBuilder os; + StringBuilder os; const MediaContentDescription* media_desc = content_info->media_description(); RTC_DCHECK(media_desc); @@ -1557,7 +1521,7 @@ void BuildMediaDescription(const ContentInfo* content_info, } // Add the a=rtcp line. - if (cricket::IsRtpProtocol(media_desc->protocol())) { + if (IsRtpProtocol(media_desc->protocol())) { std::string rtcp_line = GetRtcpLine(candidates); if (!rtcp_line.empty()) { AddLine(rtcp_line, message); @@ -1585,24 +1549,23 @@ void BuildMediaDescription(const ContentInfo* content_info, // identification-tag = token // Use the content name as the mid identification-tag. InitAttrLine(kAttributeMid, &os); - os << kSdpDelimiterColon << content_info->name; + os << kSdpDelimiterColon << content_info->mid(); AddLine(os.str(), message); - if (cricket::IsDtlsSctp(media_desc->protocol())) { - const cricket::SctpDataContentDescription* data_desc = - media_desc->as_sctp(); + if (IsDtlsSctp(media_desc->protocol())) { + const SctpDataContentDescription* data_desc = media_desc->as_sctp(); BuildSctpContentAttributes(message, data_desc); - } else if (cricket::IsRtpProtocol(media_desc->protocol())) { + } else if (IsRtpProtocol(media_desc->protocol())) { BuildRtpContentAttributes(media_desc, media_type, msid_signaling, message); } } void BuildRtpContentAttributes(const MediaContentDescription* media_desc, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, int msid_signaling, std::string* message) { - SdpSerializer serializer; - rtc::StringBuilder os; + SimulcastSdpSerializer serializer; + StringBuilder os; // RFC 8285 // a=extmap-allow-mixed // The attribute MUST be either on session level or media level. We support @@ -1646,7 +1609,7 @@ void BuildRtpContentAttributes(const MediaContentDescription* media_desc, // line for every media stream, with a special msid-id value of "-" // representing no streams. The value of "msid-appdata" MUST be identical for // all lines. - if (msid_signaling & cricket::kMsidSignalingMediaSection) { + if (msid_signaling & kMsidSignalingMediaSection) { const StreamParamsVec& streams = media_desc->streams(); if (streams.size() == 1u) { const StreamParams& track = streams[0]; @@ -1691,18 +1654,6 @@ void BuildRtpContentAttributes(const MediaContentDescription* media_desc, AddLine(os.str(), message); } - // RFC 4568 - // a=crypto: [] - for (const CryptoParams& crypto_params : media_desc->cryptos()) { - InitAttrLine(kAttributeCrypto, &os); - os << kSdpDelimiterColon << crypto_params.tag << " " - << crypto_params.crypto_suite << " " << crypto_params.key_params; - if (!crypto_params.session_params.empty()) { - os << " " << crypto_params.session_params; - } - AddLine(os.str(), message); - } - // RFC 4566 // a=rtpmap: / // [/] @@ -1719,7 +1670,7 @@ void BuildRtpContentAttributes(const MediaContentDescription* media_desc, InitAttrLine(kAttributeSsrcGroup, &os); os << kSdpDelimiterColon << ssrc_group.semantics; for (uint32_t ssrc : ssrc_group.ssrcs) { - os << kSdpDelimiterSpace << rtc::ToString(ssrc); + os << kSdpDelimiterSpace << absl::StrCat(ssrc); } AddLine(os.str(), message); } @@ -1729,7 +1680,7 @@ void BuildRtpContentAttributes(const MediaContentDescription* media_desc, // a=ssrc: cname: AddSsrcLine(ssrc, kSsrcAttributeCname, track.cname, message); - if (msid_signaling & cricket::kMsidSignalingSsrcAttribute) { + if (msid_signaling & kMsidSignalingSsrcAttribute) { // draft-alvestrand-mmusic-msid-00 // a=ssrc: msid:identifier [appdata] // The appdata consists of the "id" attribute of a MediaStreamTrack, @@ -1755,7 +1706,7 @@ void BuildRtpContentAttributes(const MediaContentDescription* media_desc, for (const RidDescription& rid_description : track.rids()) { InitAttrLine(kAttributeRid, &os); os << kSdpDelimiterColon - << serializer.SerializeRidDescription(rid_description); + << serializer.SerializeRidDescription(*media_desc, rid_description); AddLine(os.str(), message); } } @@ -1763,7 +1714,7 @@ void BuildRtpContentAttributes(const MediaContentDescription* media_desc, for (const RidDescription& rid_description : media_desc->receive_rids()) { InitAttrLine(kAttributeRid, &os); os << kSdpDelimiterColon - << serializer.SerializeRidDescription(rid_description); + << serializer.SerializeRidDescription(*media_desc, rid_description); AddLine(os.str(), message); } @@ -1780,7 +1731,7 @@ void BuildRtpContentAttributes(const MediaContentDescription* media_desc, void BuildRtpHeaderExtensions(const RtpHeaderExtensions& extensions, std::string* message) { - rtc::StringBuilder os; + StringBuilder os; // RFC 8285 // a=extmap:["/"] @@ -1797,7 +1748,7 @@ void BuildRtpHeaderExtensions(const RtpHeaderExtensions& extensions, } } -void WriteFmtpHeader(int payload_type, rtc::StringBuilder* os) { +void WriteFmtpHeader(int payload_type, StringBuilder* os) { // fmtp header: a=fmtp:`payload_type` // Add a=fmtp InitAttrLine(kAttributeFmtp, os); @@ -1805,7 +1756,7 @@ void WriteFmtpHeader(int payload_type, rtc::StringBuilder* os) { *os << kSdpDelimiterColon << payload_type; } -void WritePacketizationHeader(int payload_type, rtc::StringBuilder* os) { +void WritePacketizationHeader(int payload_type, StringBuilder* os) { // packetization header: a=packetization:`payload_type` // Add a=packetization InitAttrLine(kAttributePacketization, os); @@ -1813,7 +1764,7 @@ void WritePacketizationHeader(int payload_type, rtc::StringBuilder* os) { *os << kSdpDelimiterColon << payload_type; } -void WriteRtcpFbHeader(int payload_type, rtc::StringBuilder* os) { +void WriteRtcpFbHeader(int payload_type, StringBuilder* os) { // rtcp-fb header: a=rtcp-fb:`payload_type` // /> // Add a=rtcp-fb @@ -1829,7 +1780,7 @@ void WriteRtcpFbHeader(int payload_type, rtc::StringBuilder* os) { void WriteFmtpParameter(absl::string_view parameter_name, absl::string_view parameter_value, - rtc::StringBuilder* os) { + StringBuilder* os) { if (parameter_name.empty()) { // RFC 2198 and RFC 4733 don't use key-value pairs. *os << parameter_value; @@ -1847,8 +1798,8 @@ bool IsFmtpParam(absl::string_view name) { return name != kCodecParamPTime && name != kCodecParamMaxPTime; } -bool WriteFmtpParameters(const cricket::CodecParameterMap& parameters, - rtc::StringBuilder* os) { +bool WriteFmtpParameters(const webrtc::CodecParameterMap& parameters, + StringBuilder* os) { bool empty = true; const char* delimiter = ""; // No delimiter before first parameter. for (const auto& entry : parameters) { @@ -1867,9 +1818,8 @@ bool WriteFmtpParameters(const cricket::CodecParameterMap& parameters, return !empty; } -template -void AddFmtpLine(const T& codec, std::string* message) { - rtc::StringBuilder os; +void AddFmtpLine(const Codec& codec, std::string* message) { + StringBuilder os; WriteFmtpHeader(codec.id, &os); os << kSdpDelimiterSpace; // Create FMTP line and check that it's nonempty. @@ -1879,21 +1829,19 @@ void AddFmtpLine(const T& codec, std::string* message) { return; } -template -void AddPacketizationLine(const T& codec, std::string* message) { +void AddPacketizationLine(const Codec& codec, std::string* message) { if (!codec.packetization) { return; } - rtc::StringBuilder os; + StringBuilder os; WritePacketizationHeader(codec.id, &os); os << " " << *codec.packetization; AddLine(os.str(), message); } -template -void AddRtcpFbLines(const T& codec, std::string* message) { - for (const cricket::FeedbackParam& param : codec.feedback_params.params()) { - rtc::StringBuilder os; +void AddRtcpFbLines(const Codec& codec, std::string* message) { + for (const FeedbackParam& param : codec.feedback_params.params()) { + StringBuilder os; WriteRtcpFbHeader(codec.id, &os); os << " " << param.id(); if (!param.param().empty()) { @@ -1913,44 +1861,44 @@ bool GetMinValue(const std::vector& values, int* value) { } bool GetParameter(const std::string& name, - const cricket::CodecParameterMap& params, + const webrtc::CodecParameterMap& params, int* value) { std::map::const_iterator found = params.find(name); if (found == params.end()) { return false; } - if (!rtc::FromString(found->second, value)) { + if (!FromString(found->second, value)) { return false; } return true; } void BuildRtpmap(const MediaContentDescription* media_desc, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, std::string* message) { RTC_DCHECK(message != NULL); RTC_DCHECK(media_desc != NULL); - rtc::StringBuilder os; - if (media_type == cricket::MEDIA_TYPE_VIDEO) { - for (const cricket::VideoCodec& codec : media_desc->as_video()->codecs()) { + StringBuilder os; + if (media_type == webrtc::MediaType::VIDEO) { + for (const Codec& codec : media_desc->codecs()) { // RFC 4566 // a=rtpmap: / // [/] if (codec.id != kWildcardPayloadType) { InitAttrLine(kAttributeRtpmap, &os); os << kSdpDelimiterColon << codec.id << " " << codec.name << "/" - << cricket::kVideoCodecClockrate; + << kVideoCodecClockrate; AddLine(os.str(), message); } AddPacketizationLine(codec, message); AddRtcpFbLines(codec, message); AddFmtpLine(codec, message); } - } else if (media_type == cricket::MEDIA_TYPE_AUDIO) { + } else if (media_type == webrtc::MediaType::AUDIO) { std::vector ptimes; std::vector maxptimes; int max_minptime = 0; - for (const cricket::AudioCodec& codec : media_desc->as_audio()->codecs()) { + for (const Codec& codec : media_desc->codecs()) { RTC_DCHECK(!codec.name.empty()); // RFC 4566 // a=rtpmap: / @@ -1993,12 +1941,19 @@ void BuildRtpmap(const MediaContentDescription* media_desc, AddAttributeLine(kCodecParamPTime, ptime, message); } } + if (media_desc->rtcp_fb_ack_ccfb()) { + // RFC 8888 section 6 + InitAttrLine(kAttributeRtcpFb, &os); + os << kSdpDelimiterColon; + os << "* ack ccfb"; + AddLine(os.str(), message); + } } void BuildCandidate(const std::vector& candidates, bool include_ufrag, std::string* message) { - rtc::StringBuilder os; + StringBuilder os; for (const Candidate& candidate : candidates) { // RFC 5245 @@ -2008,13 +1963,13 @@ void BuildCandidate(const std::vector& candidates, // *(SP extension-att-name SP extension-att-value) std::string type; // Map the cricket candidate type to "host" / "srflx" / "prflx" / "relay" - if (candidate.type() == cricket::LOCAL_PORT_TYPE) { + if (candidate.is_local()) { type = kCandidateHost; - } else if (candidate.type() == cricket::STUN_PORT_TYPE) { + } else if (candidate.is_stun()) { type = kCandidateSrflx; - } else if (candidate.type() == cricket::RELAY_PORT_TYPE) { + } else if (candidate.is_relay()) { type = kCandidateRelay; - } else if (candidate.type() == cricket::PRFLX_PORT_TYPE) { + } else if (candidate.is_prflx()) { type = kCandidatePrflx; // Peer reflexive candidate may be signaled for being removed. } else { @@ -2044,7 +1999,7 @@ void BuildCandidate(const std::vector& candidates, // Note that we allow the tcptype to be missing, for backwards // compatibility; the implementation treats this as a passive candidate. // TODO(bugs.webrtc.org/11466): Treat a missing tcptype as an error? - if (candidate.protocol() == cricket::TCP_PROTOCOL_NAME && + if (candidate.protocol() == TCP_PROTOCOL_NAME && !candidate.tcptype().empty()) { os << kTcpCandidateType << " " << candidate.tcptype() << " "; } @@ -2070,7 +2025,7 @@ void BuildCandidate(const std::vector& candidates, void BuildIceOptions(const std::vector& transport_options, std::string* message) { if (!transport_options.empty()) { - rtc::StringBuilder os; + StringBuilder os; InitAttrLine(kAttributeIceOption, &os); os << kSdpDelimiterColon << transport_options[0]; for (size_t i = 1; i < transport_options.size(); ++i) { @@ -2081,7 +2036,7 @@ void BuildIceOptions(const std::vector& transport_options, } bool ParseConnectionData(absl::string_view line, - rtc::SocketAddress* addr, + SocketAddress* addr, SdpParseError* error) { // Parse the line from left to right. std::string token; @@ -2089,13 +2044,12 @@ bool ParseConnectionData(absl::string_view line, // RFC 4566 // c= // Skip the "c=" - if (!rtc::tokenize_first(line, kSdpDelimiterEqualChar, &token, &rightpart)) { + if (!tokenize_first(line, kSdpDelimiterEqualChar, &token, &rightpart)) { return ParseFailed(line, "Failed to parse the network type.", error); } // Extract and verify the - if (!rtc::tokenize_first(rightpart, kSdpDelimiterSpaceChar, &token, - &rightpart) || + if (!tokenize_first(rightpart, kSdpDelimiterSpaceChar, &token, &rightpart) || token != kConnectionNettype) { return ParseFailed(line, "Failed to parse the connection data. The network type " @@ -2104,8 +2058,7 @@ bool ParseConnectionData(absl::string_view line, } // Extract the "" and "". - if (!rtc::tokenize_first(rightpart, kSdpDelimiterSpaceChar, &token, - &rightpart)) { + if (!tokenize_first(rightpart, kSdpDelimiterSpaceChar, &token, &rightpart)) { return ParseFailed(line, "Failed to parse the address type.", error); } @@ -2137,12 +2090,12 @@ bool ParseSessionDescription(absl::string_view message, std::string* session_version, TransportDescription* session_td, RtpHeaderExtensions* session_extmaps, - rtc::SocketAddress* connection_addr, - cricket::SessionDescription* desc, + SocketAddress* connection_addr, + SessionDescription* desc, SdpParseError* error) { - absl::optional line; + std::optional line; - desc->set_msid_supported(false); + desc->set_msid_signaling(kMsidSignalingNotUsed); desc->set_extmap_allow_mixed(false); // RFC 4566 // v= (protocol version) @@ -2160,7 +2113,7 @@ bool ParseSessionDescription(absl::string_view message, error); } std::vector fields = - rtc::split(line->substr(kLinePrefixLength), kSdpDelimiterSpaceChar); + split(line->substr(kLinePrefixLength), kSdpDelimiterSpaceChar); const size_t expected_fields = 6; if (fields.size() != expected_fields) { return ParseFailedExpectFieldNum(*line, expected_fields, error); @@ -2197,7 +2150,7 @@ bool ParseSessionDescription(absl::string_view message, // RFC 4566 // c=* (connection information -- not required if included in // all media) - if (absl::optional cline = + if (std::optional cline = GetLineWithType(message, pos, kLineTypeConnection); cline.has_value()) { if (!ParseConnectionData(*cline, connection_addr, error)) { @@ -2242,7 +2195,7 @@ bool ParseSessionDescription(absl::string_view message, // RFC 4566 // a=* (zero or more session attribute lines) - while (absl::optional aline = + while (std::optional aline = GetLineWithType(message, pos, kLineTypeAttributes)) { if (HasAttribute(*aline, kAttributeGroup)) { if (!ParseGroupAttribute(*aline, desc, error)) { @@ -2258,7 +2211,7 @@ bool ParseSessionDescription(absl::string_view message, return false; } } else if (HasAttribute(*aline, kAttributeIceLite)) { - session_td->ice_mode = cricket::ICEMODE_LITE; + session_td->ice_mode = ICEMODE_LITE; } else if (HasAttribute(*aline, kAttributeIceOption)) { if (!ParseIceOptions(*aline, &(session_td->transport_options), error)) { return false; @@ -2270,7 +2223,7 @@ bool ParseSessionDescription(absl::string_view message, "Can't have multiple fingerprint attributes at the same level.", error); } - std::unique_ptr fingerprint; + std::unique_ptr fingerprint; if (!ParseFingerprintAttribute(*aline, &fingerprint, error)) { return false; } @@ -2284,8 +2237,9 @@ bool ParseSessionDescription(absl::string_view message, if (!GetValue(*aline, kAttributeMsidSemantics, &semantics, error)) { return false; } - desc->set_msid_supported( - CaseInsensitiveFind(semantics, kMediaStreamSemantic)); + if (CaseInsensitiveFind(semantics, kMediaStreamSemantic)) { + desc->set_msid_signaling(kMsidSignalingSemantic); + } } else if (HasAttribute(*aline, kAttributeExtmapAllowMixed)) { desc->set_extmap_allow_mixed(true); } else if (HasAttribute(*aline, kAttributeExtmap)) { @@ -2300,19 +2254,19 @@ bool ParseSessionDescription(absl::string_view message, } bool ParseGroupAttribute(absl::string_view line, - cricket::SessionDescription* desc, + SessionDescription* desc, SdpParseError* error) { RTC_DCHECK(desc != NULL); // RFC 5888 and draft-holmberg-mmusic-sdp-bundle-negotiation-00 // a=group:BUNDLE video voice std::vector fields = - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); + split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); std::string semantics; if (!GetValue(fields[0], kAttributeGroup, &semantics, error)) { return false; } - cricket::ContentGroup group(semantics); + ContentGroup group(semantics); for (size_t i = 1; i < fields.size(); ++i) { group.AddContentName(fields[i]); } @@ -2322,10 +2276,10 @@ bool ParseGroupAttribute(absl::string_view line, static bool ParseFingerprintAttribute( absl::string_view line, - std::unique_ptr* fingerprint, + std::unique_ptr* fingerprint, SdpParseError* error) { std::vector fields = - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); + split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); const size_t expected_fields = 2; if (fields.size() != expected_fields) { return ParseFailedExpectFieldNum(line, expected_fields, error); @@ -2342,8 +2296,7 @@ static bool ParseFingerprintAttribute( absl::c_transform(algorithm, algorithm.begin(), ::tolower); // The second field is the digest value. De-hexify it. - *fingerprint = - rtc::SSLFingerprint::CreateUniqueFromRfc4572(algorithm, fields[1]); + *fingerprint = SSLFingerprint::CreateUniqueFromRfc4572(algorithm, fields[1]); if (!*fingerprint) { return ParseFailed(line, "Failed to create fingerprint from the digest.", error); @@ -2353,18 +2306,17 @@ static bool ParseFingerprintAttribute( } static bool ParseDtlsSetup(absl::string_view line, - cricket::ConnectionRole* role_ptr, + ConnectionRole* role_ptr, SdpParseError* error) { // setup-attr = "a=setup:" role // role = "active" / "passive" / "actpass" / "holdconn" std::vector fields = - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar); + split(line.substr(kLinePrefixLength), kSdpDelimiterColonChar); const size_t expected_fields = 2; if (fields.size() != expected_fields) { return ParseFailedExpectFieldNum(line, expected_fields, error); } - if (absl::optional role = - cricket::StringToConnectionRole(fields[1]); + if (std::optional role = StringToConnectionRole(fields[1]); role.has_value()) { *role_ptr = *role; return true; @@ -2384,9 +2336,10 @@ static bool ParseMsidAttribute(absl::string_view line, // Note that JSEP stipulates not sending msid-appdata so // a=msid: // is supported for backward compability reasons only. + // RFC 8830 section 2 states that duplicate a=msid:stream track is illegal. std::vector fields; - size_t num_fields = rtc::tokenize(line.substr(kLinePrefixLength), - kSdpDelimiterSpaceChar, &fields); + size_t num_fields = + tokenize(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar, &fields); if (num_fields < 1 || num_fields > 2) { return ParseFailed(line, "Expected a stream ID and optionally a track ID", error); @@ -2432,57 +2385,19 @@ static bool ParseMsidAttribute(absl::string_view line, return true; } -static void RemoveInvalidRidDescriptions(const std::vector& payload_types, - std::vector* rids) { +static void RemoveDuplicateRidDescriptions( + const std::vector& payload_types, + std::vector* rids) { RTC_DCHECK(rids); std::set to_remove; std::set unique_rids; - - // Check the rids to see which ones should be removed. + // Find duplicate RIDs to remove. for (RidDescription& rid : *rids) { - // In the case of a duplicate, the entire "a=rid" line, and all "a=rid" - // lines with rid-ids that duplicate this line, are discarded and MUST NOT - // be included in the SDP Answer. - auto pair = unique_rids.insert(rid.rid); - // Insert will "fail" if element already exists. - if (!pair.second) { + if (!unique_rids.insert(rid.rid).second) { to_remove.insert(rid.rid); continue; } - - // If the "a=rid" line contains a "pt=", the list of payload types - // is verified against the list of valid payload types for the media - // section (that is, those listed on the "m=" line). Any PT missing - // from the "m=" line is discarded from the set of values in the - // "pt=". If no values are left in the "pt=" parameter after this - // processing, then the "a=rid" line is discarded. - if (rid.payload_types.empty()) { - // If formats were not specified, rid should not be removed. - continue; - } - - // Note: Spec does not mention how to handle duplicate formats. - // Media section does not handle duplicates either. - std::set removed_formats; - for (int payload_type : rid.payload_types) { - if (!absl::c_linear_search(payload_types, payload_type)) { - removed_formats.insert(payload_type); - } - } - - rid.payload_types.erase( - std::remove_if(rid.payload_types.begin(), rid.payload_types.end(), - [&removed_formats](int format) { - return removed_formats.count(format) > 0; - }), - rid.payload_types.end()); - - // If all formats were removed then remove the rid alogether. - if (rid.payload_types.empty()) { - to_remove.insert(rid.rid); - } } - // Remove every rid description that appears in the to_remove list. if (!to_remove.empty()) { rids->erase(std::remove_if(rids->begin(), rids->end(), @@ -2546,7 +2461,7 @@ static void RemoveInvalidRidsFromSimulcast( for (const SimulcastLayer& send_layer : all_send_layers) { if (absl::c_none_of(valid_rids, [&send_layer](const RidDescription& rid) { return send_layer.rid == rid.rid && - rid.direction == cricket::RidDirection::kSend; + rid.direction == RidDirection::kSend; })) { to_remove.insert(send_layer.rid); } @@ -2554,11 +2469,11 @@ static void RemoveInvalidRidsFromSimulcast( // Add any rid that is not in the valid list to the remove set. for (const SimulcastLayer& receive_layer : all_receive_layers) { - if (absl::c_none_of( - valid_rids, [&receive_layer](const RidDescription& rid) { - return receive_layer.rid == rid.rid && - rid.direction == cricket::RidDirection::kReceive; - })) { + if (absl::c_none_of(valid_rids, + [&receive_layer](const RidDescription& rid) { + return receive_layer.rid == rid.rid && + rid.direction == RidDirection::kReceive; + })) { to_remove.insert(receive_layer.rid); } } @@ -2607,7 +2522,7 @@ static const StaticPayloadAudioCodec kStaticPayloadAudioCodecs[] = { }; void MaybeCreateStaticPayloadAudioCodecs(const std::vector& fmts, - AudioContentDescription* media_desc) { + MediaContentDescription* media_desc) { if (!media_desc) { return; } @@ -2619,16 +2534,53 @@ void MaybeCreateStaticPayloadAudioCodecs(const std::vector& fmts, std::string encoding_name = kStaticPayloadAudioCodecs[payload_type].name; int clock_rate = kStaticPayloadAudioCodecs[payload_type].clockrate; size_t channels = kStaticPayloadAudioCodecs[payload_type].channels; - media_desc->AddCodec(cricket::CreateAudioCodec( - payload_type, encoding_name, clock_rate, channels)); + media_desc->AddCodec( + CreateAudioCodec(payload_type, encoding_name, clock_rate, channels)); } } } -template -static std::unique_ptr ParseContentDescription( +static void BackfillCodecParameters(std::vector& codecs) { + for (auto& codec : codecs) { + std::string unused_value; + if (absl::EqualsIgnoreCase(kVp9CodecName, codec.name)) { + // https://datatracker.ietf.org/doc/html/draft-ietf-payload-vp9#section-6 + // profile-id defaults to "0" + if (!codec.GetParam(kVP9ProfileId, &unused_value)) { + codec.SetParam(kVP9ProfileId, "0"); + } + } else if (absl::EqualsIgnoreCase(kH264CodecName, codec.name)) { + // https://www.rfc-editor.org/rfc/rfc6184#section-6.2 + // packetization-mode defaults to "0" + if (!codec.GetParam(kH264FmtpPacketizationMode, &unused_value)) { + codec.SetParam(kH264FmtpPacketizationMode, "0"); + } + } else if (absl::EqualsIgnoreCase(kAv1CodecName, codec.name)) { + // https://aomediacodec.github.io/av1-rtp-spec/#72-sdp-parameters + if (!codec.GetParam(kAv1FmtpProfile, &unused_value)) { + codec.SetParam(kAv1FmtpProfile, "0"); + } + if (!codec.GetParam(kAv1FmtpLevelIdx, &unused_value)) { + codec.SetParam(kAv1FmtpLevelIdx, "5"); + } + if (!codec.GetParam(kAv1FmtpTier, &unused_value)) { + codec.SetParam(kAv1FmtpTier, "0"); + } + } else if (absl::EqualsIgnoreCase(kH265CodecName, codec.name)) { + // https://datatracker.ietf.org/doc/html/draft-aboba-avtcore-hevc-webrtc + if (!codec.GetParam(kH265FmtpLevelId, &unused_value)) { + codec.SetParam(kH265FmtpLevelId, "93"); + } + if (!codec.GetParam(kH265FmtpTxMode, &unused_value)) { + codec.SetParam(kH265FmtpTxMode, "SRST"); + } + } + } +} + +static std::unique_ptr ParseContentDescription( absl::string_view message, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, int mline_index, absl::string_view protocol, const std::vector& payload_types, @@ -2638,8 +2590,17 @@ static std::unique_ptr ParseContentDescription( int* msid_signaling, TransportDescription* transport, std::vector>* candidates, - webrtc::SdpParseError* error) { - auto media_desc = std::make_unique(); + SdpParseError* error) { + std::unique_ptr media_desc; + if (media_type == webrtc::MediaType::AUDIO) { + media_desc = std::make_unique(); + } else if (media_type == webrtc::MediaType::VIDEO) { + media_desc = std::make_unique(); + } else { + RTC_DCHECK_NOTREACHED(); + return nullptr; + } + media_desc->set_extmap_allow_mixed_enum(MediaContentDescription::kNo); if (!ParseContent(message, media_type, mline_index, protocol, payload_types, pos, content_name, bundle_only, msid_signaling, @@ -2655,37 +2616,54 @@ static std::unique_ptr ParseContentDescription( for (int pt : payload_types) { payload_type_preferences[pt] = preference--; } - std::vector codecs = media_desc->codecs(); - absl::c_sort(codecs, [&payload_type_preferences](const cricket::Codec& a, - const cricket::Codec& b) { - return payload_type_preferences[a.id] > payload_type_preferences[b.id]; - }); + std::vector codecs = media_desc->codecs(); + absl::c_sort( + codecs, [&payload_type_preferences](const Codec& a, const Codec& b) { + return payload_type_preferences[a.id] > payload_type_preferences[b.id]; + }); + // Backfill any default parameters. + BackfillCodecParameters(codecs); + media_desc->set_codecs(codecs); return media_desc; } +bool HasDuplicateMsidLines(SessionDescription* desc) { + std::set> seen_msids; + for (const ContentInfo& content : desc->contents()) { + for (const StreamParams& stream : content.media_description()->streams()) { + auto msid = std::pair(stream.first_stream_id(), stream.id); + if (seen_msids.find(msid) != seen_msids.end()) { + return true; + } + seen_msids.insert(std::move(msid)); + } + } + return false; +} + bool ParseMediaDescription( absl::string_view message, const TransportDescription& session_td, const RtpHeaderExtensions& session_extmaps, size_t* pos, - const rtc::SocketAddress& session_connection_addr, - cricket::SessionDescription* desc, + const SocketAddress& session_connection_addr, + SessionDescription* desc, std::vector>* candidates, SdpParseError* error) { RTC_DCHECK(desc != NULL); int mline_index = -1; - int msid_signaling = 0; + int msid_signaling = desc->msid_signaling(); // Zero or more media descriptions // RFC 4566 // m= - while (absl::optional mline = + while (std::optional mline = GetLineWithType(message, pos, kLineTypeMedia)) { ++mline_index; std::vector fields = - rtc::split(mline->substr(kLinePrefixLength), kSdpDelimiterSpaceChar); + split(mline->substr(kLinePrefixLength), kSdpDelimiterSpaceChar); const size_t expected_min_fields = 4; if (fields.size() < expected_min_fields) { @@ -2700,14 +2678,14 @@ bool ParseMediaDescription( } int port = 0; - if (!rtc::FromString(fields[1], &port) || !IsValidPort(port)) { + if (!FromString(fields[1], &port) || !IsValidPort(port)) { return ParseFailed(*mline, "The port number is invalid", error); } absl::string_view protocol = fields[2]; // std::vector payload_types; - if (cricket::IsRtpProtocol(protocol)) { + if (IsRtpProtocol(protocol)) { for (size_t j = 3; j < fields.size(); ++j) { int pl = 0; if (!GetPayloadTypeFromString(*mline, fields[j], &pl, error)) { @@ -2727,23 +2705,24 @@ bool ParseMediaDescription( std::unique_ptr content; std::string content_name; bool bundle_only = false; - int section_msid_signaling = 0; + int section_msid_signaling = kMsidSignalingNotUsed; absl::string_view media_type = fields[0]; - if ((media_type == kMediaTypeVideo || media_type == kMediaTypeAudio) && - !cricket::IsRtpProtocol(protocol)) { + if ((media_type == kSdpMediaTypeVideo || + media_type == kSdpMediaTypeAudio) && + !IsRtpProtocol(protocol)) { return ParseFailed(*mline, "Unsupported protocol for media type", error); } - if (media_type == kMediaTypeVideo) { - content = ParseContentDescription( - message, cricket::MEDIA_TYPE_VIDEO, mline_index, protocol, + if (media_type == kSdpMediaTypeVideo) { + content = ParseContentDescription( + message, webrtc::MediaType::VIDEO, mline_index, protocol, payload_types, pos, &content_name, &bundle_only, §ion_msid_signaling, &transport, candidates, error); - } else if (media_type == kMediaTypeAudio) { - content = ParseContentDescription( - message, cricket::MEDIA_TYPE_AUDIO, mline_index, protocol, + } else if (media_type == kSdpMediaTypeAudio) { + content = ParseContentDescription( + message, webrtc::MediaType::AUDIO, mline_index, protocol, payload_types, pos, &content_name, &bundle_only, §ion_msid_signaling, &transport, candidates, error); - } else if (media_type == kMediaTypeData && cricket::IsDtlsSctp(protocol)) { + } else if (media_type == kSdpMediaTypeData && IsDtlsSctp(protocol)) { // The draft-03 format is: // m=application DTLS/SCTP ... // use_sctpmap should be false. @@ -2755,15 +2734,15 @@ bool ParseMediaDescription( // according to draft-ietf-mmusic-sctp-sdp-26 data_desc->set_max_message_size(kDefaultSctpMaxMessageSize); int p; - if (rtc::FromString(fields[3], &p)) { + if (FromString(fields[3], &p)) { data_desc->set_port(p); } else if (fields[3] == kDefaultSctpmapProtocol) { data_desc->set_use_sctpmap(false); } - if (!ParseContent(message, cricket::MEDIA_TYPE_DATA, mline_index, - protocol, payload_types, pos, &content_name, - &bundle_only, §ion_msid_signaling, data_desc.get(), - &transport, candidates, error)) { + if (!ParseContent(message, webrtc::MediaType::DATA, mline_index, protocol, + payload_types, pos, &content_name, &bundle_only, + §ion_msid_signaling, data_desc.get(), &transport, + candidates, error)) { return false; } data_desc->set_protocol(protocol); @@ -2772,7 +2751,7 @@ bool ParseMediaDescription( RTC_LOG(LS_WARNING) << "Unsupported media type: " << *mline; auto unsupported_desc = std::make_unique(media_type); - if (!ParseContent(message, cricket::MEDIA_TYPE_UNSUPPORTED, mline_index, + if (!ParseContent(message, webrtc::MediaType::UNSUPPORTED, mline_index, protocol, payload_types, pos, &content_name, &bundle_only, §ion_msid_signaling, unsupported_desc.get(), &transport, candidates, @@ -2810,7 +2789,7 @@ bool ParseMediaDescription( if (content->as_unsupported()) { content_rejected = true; - } else if (cricket::IsRtpProtocol(protocol) && !content->as_sctp()) { + } else if (IsRtpProtocol(protocol) && !content->as_sctp()) { content->set_protocol(std::string(protocol)); // Set the extmap. if (!session_extmaps.empty() && @@ -2832,7 +2811,7 @@ bool ParseMediaDescription( // Use the session level connection address if the media level addresses are // not specified. - rtc::SocketAddress address; + SocketAddress address; address = content->connection_address().IsNil() ? session_connection_addr : content->connection_address(); @@ -2840,12 +2819,17 @@ bool ParseMediaDescription( content->set_connection_address(address); desc->AddContent(content_name, - cricket::IsDtlsSctp(protocol) ? MediaProtocolType::kSctp - : MediaProtocolType::kRtp, + IsDtlsSctp(protocol) ? MediaProtocolType::kSctp + : MediaProtocolType::kRtp, content_rejected, bundle_only, std::move(content)); // Create TransportInfo with the media level "ice-pwd" and "ice-ufrag". desc->AddTransportInfo(TransportInfo(content_name, transport)); } + // Apply whole-description sanity checks + if (HasDuplicateMsidLines(desc)) { + ParseFailed(message, *pos, "Duplicate a=msid lines detected", error); + return false; + } desc->set_msid_signaling(msid_signaling); @@ -2857,23 +2841,7 @@ bool ParseMediaDescription( return true; } -bool VerifyCodec(const cricket::Codec& codec) { - // Codec has not been populated correctly unless the name has been set. This - // can happen if an SDP has an fmtp or rtcp-fb with a payload type but doesn't - // have a corresponding "rtpmap" line. - return !codec.name.empty(); -} - -bool VerifyAudioCodecs(const AudioContentDescription* audio_desc) { - return absl::c_all_of(audio_desc->codecs(), &VerifyCodec); -} - -bool VerifyVideoCodecs(const VideoContentDescription* video_desc) { - return absl::c_all_of(video_desc->codecs(), &VerifyCodec); -} - -void AddParameters(const cricket::CodecParameterMap& parameters, - cricket::Codec* codec) { +void AddParameters(const webrtc::CodecParameterMap& parameters, Codec* codec) { for (const auto& entry : parameters) { const std::string& key = entry.first; const std::string& value = entry.second; @@ -2881,14 +2849,13 @@ void AddParameters(const cricket::CodecParameterMap& parameters, } } -void AddFeedbackParameter(const cricket::FeedbackParam& feedback_param, - cricket::Codec* codec) { +void AddFeedbackParameter(const FeedbackParam& feedback_param, Codec* codec) { codec->AddFeedbackParam(feedback_param); } -void AddFeedbackParameters(const cricket::FeedbackParams& feedback_params, - cricket::Codec* codec) { - for (const cricket::FeedbackParam& param : feedback_params.params()) { +void AddFeedbackParameters(const FeedbackParams& feedback_params, + Codec* codec) { + for (const FeedbackParam& param : feedback_params.params()) { codec->AddFeedbackParam(param); } } @@ -2896,28 +2863,26 @@ void AddFeedbackParameters(const cricket::FeedbackParams& feedback_params, // Gets the current codec setting associated with `payload_type`. If there // is no Codec associated with that payload type it returns an empty codec // with that payload type. -template -T GetCodecWithPayloadType(cricket::MediaType type, - const std::vector& codecs, - int payload_type) { - const T* codec = FindCodecById(codecs, payload_type); +Codec GetCodecWithPayloadType(webrtc::MediaType type, + const std::vector& codecs, + int payload_type) { + const Codec* codec = FindCodecById(codecs, payload_type); if (codec) return *codec; // Return empty codec with `payload_type`. - if (type == cricket::MEDIA_TYPE_AUDIO) { - return cricket::CreateAudioCodec(payload_type, "", 0, 0); + if (type == webrtc::MediaType::AUDIO) { + return CreateAudioCodec(payload_type, "", 0, 0); } else { - return cricket::CreateVideoCodec(payload_type, ""); + return CreateVideoCodec(payload_type, ""); } } // Updates or creates a new codec entry in the media description. -template -void AddOrReplaceCodec(MediaContentDescription* content_desc, const U& codec) { - T* desc = static_cast(content_desc); - std::vector codecs = desc->codecs(); +void AddOrReplaceCodec(MediaContentDescription* content_desc, + const Codec& codec) { + std::vector codecs = content_desc->codecs(); bool found = false; - for (U& existing_codec : codecs) { + for (Codec& existing_codec : codecs) { if (codec.id == existing_codec.id) { // Overwrite existing codec with the new codec. existing_codec = codec; @@ -2926,98 +2891,98 @@ void AddOrReplaceCodec(MediaContentDescription* content_desc, const U& codec) { } } if (!found) { - desc->AddCodec(codec); + content_desc->AddCodec(codec); return; } - desc->set_codecs(codecs); + content_desc->set_codecs(codecs); } // Adds or updates existing codec corresponding to `payload_type` according // to `parameters`. -template void UpdateCodec(MediaContentDescription* content_desc, int payload_type, - const cricket::CodecParameterMap& parameters) { + const webrtc::CodecParameterMap& parameters) { // Codec might already have been populated (from rtpmap). - U new_codec = GetCodecWithPayloadType(content_desc->type(), - static_cast(content_desc)->codecs(), - payload_type); + Codec new_codec = GetCodecWithPayloadType( + content_desc->type(), content_desc->codecs(), payload_type); AddParameters(parameters, &new_codec); - AddOrReplaceCodec(content_desc, new_codec); + AddOrReplaceCodec(content_desc, new_codec); } // Adds or updates existing codec corresponding to `payload_type` according // to `feedback_param`. -template void UpdateCodec(MediaContentDescription* content_desc, int payload_type, - const cricket::FeedbackParam& feedback_param) { + const FeedbackParam& feedback_param) { // Codec might already have been populated (from rtpmap). - U new_codec = GetCodecWithPayloadType(content_desc->type(), - static_cast(content_desc)->codecs(), - payload_type); + Codec new_codec = GetCodecWithPayloadType( + content_desc->type(), content_desc->codecs(), payload_type); AddFeedbackParameter(feedback_param, &new_codec); - AddOrReplaceCodec(content_desc, new_codec); + AddOrReplaceCodec(content_desc, new_codec); } // Adds or updates existing video codec corresponding to `payload_type` // according to `packetization`. -void UpdateVideoCodecPacketization(VideoContentDescription* video_desc, +void UpdateVideoCodecPacketization(MediaContentDescription* desc, int payload_type, absl::string_view packetization) { - if (packetization != cricket::kPacketizationParamRaw) { + if (packetization != kPacketizationParamRaw) { // Ignore unsupported packetization attribute. return; } // Codec might already have been populated (from rtpmap). - cricket::VideoCodec codec = GetCodecWithPayloadType( - video_desc->type(), video_desc->codecs(), payload_type); + Codec codec = + GetCodecWithPayloadType(desc->type(), desc->codecs(), payload_type); codec.packetization = std::string(packetization); - AddOrReplaceCodec(video_desc, - codec); + AddOrReplaceCodec(desc, codec); } -template -absl::optional PopWildcardCodec(std::vector* codecs) { +std::optional PopWildcardCodec(std::vector* codecs) { + RTC_DCHECK(codecs); for (auto iter = codecs->begin(); iter != codecs->end(); ++iter) { if (iter->id == kWildcardPayloadType) { - T wildcard_codec = *iter; + Codec wildcard_codec = *iter; codecs->erase(iter); return wildcard_codec; } } - return absl::nullopt; + return std::nullopt; } -template -void UpdateFromWildcardCodecs(cricket::MediaContentDescriptionImpl* desc) { +void UpdateFromWildcardCodecs(MediaContentDescription* desc) { + RTC_DCHECK(desc); auto codecs = desc->codecs(); - absl::optional wildcard_codec = PopWildcardCodec(&codecs); + std::optional wildcard_codec = PopWildcardCodec(&codecs); if (!wildcard_codec) { return; } for (auto& codec : codecs) { AddFeedbackParameters(wildcard_codec->feedback_params, &codec); } + // Special treatment for transport-wide feedback params. + if (wildcard_codec->feedback_params.Has({"ack", "ccfb"})) { + desc->set_rtcp_fb_ack_ccfb(true); + } desc->set_codecs(codecs); } void AddAudioAttribute(const std::string& name, absl::string_view value, - AudioContentDescription* audio_desc) { + MediaContentDescription* desc) { + RTC_DCHECK(desc); if (value.empty()) { return; } - std::vector codecs = audio_desc->codecs(); - for (cricket::AudioCodec& codec : codecs) { + std::vector codecs = desc->codecs(); + for (Codec& codec : codecs) { codec.params[name] = std::string(value); } - audio_desc->set_codecs(codecs); + desc->set_codecs(codecs); } bool ParseContent(absl::string_view message, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, int mline_index, absl::string_view protocol, const std::vector& payload_types, @@ -3033,8 +2998,8 @@ bool ParseContent(absl::string_view message, RTC_DCHECK(content_name != NULL); RTC_DCHECK(transport != NULL); - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - MaybeCreateStaticPayloadAudioCodecs(payload_types, media_desc->as_audio()); + if (media_type == webrtc::MediaType::AUDIO) { + MaybeCreateStaticPayloadAudioCodecs(payload_types, media_desc); } // The media level "ice-ufrag" and "ice-pwd". @@ -3049,13 +3014,13 @@ bool ParseContent(absl::string_view message, std::string ptime_as_string; std::vector stream_ids; std::string track_id; - SdpSerializer deserializer; + SimulcastSdpSerializer deserializer; std::vector rids; SimulcastDescription simulcast; // Loop until the next m line while (!IsLineType(message, kLineTypeMedia, *pos)) { - absl::optional line = GetLine(message, pos); + std::optional line = GetLine(message, pos); if (!line.has_value()) { if (*pos >= message.size()) { break; // Done parsing @@ -3069,9 +3034,9 @@ bool ParseContent(absl::string_view message, if (IsLineType(*line, kLineTypeSessionBandwidth)) { std::string bandwidth; std::string bandwidth_type; - if (!rtc::tokenize_first(line->substr(kLinePrefixLength), - kSdpDelimiterColonChar, &bandwidth_type, - &bandwidth)) { + if (!tokenize_first(line->substr(kLinePrefixLength), + kSdpDelimiterColonChar, &bandwidth_type, + &bandwidth)) { return ParseFailed( *line, "b= syntax error, does not match b=:.", @@ -3113,7 +3078,7 @@ bool ParseContent(absl::string_view message, // Parse the media level connection data. if (IsLineType(*line, kLineTypeConnection)) { - rtc::SocketAddress addr; + SocketAddress addr; if (!ParseConnectionData(*line, &addr, error)) { return false; } @@ -3168,7 +3133,7 @@ bool ParseContent(absl::string_view message, return false; } } else if (HasAttribute(*line, kAttributeFingerprint)) { - std::unique_ptr fingerprint; + std::unique_ptr fingerprint; if (!ParseFingerprintAttribute(*line, &fingerprint, error)) { return false; } @@ -3177,8 +3142,7 @@ bool ParseContent(absl::string_view message, if (!ParseDtlsSetup(*line, &(transport->connection_role), error)) { return false; } - } else if (cricket::IsDtlsSctp(protocol) && - media_type == cricket::MEDIA_TYPE_DATA) { + } else if (IsDtlsSctp(protocol) && media_type == webrtc::MediaType::DATA) { // // SCTP specific attributes // @@ -3202,7 +3166,7 @@ bool ParseContent(absl::string_view message, // Ignore a=sctpmap: from early versions of draft-ietf-mmusic-sctp-sdp continue; } - } else if (cricket::IsRtpProtocol(protocol)) { + } else if (IsRtpProtocol(protocol)) { // // RTP specific attributes // @@ -3220,10 +3184,6 @@ bool ParseContent(absl::string_view message, if (!ParseSsrcAttribute(*line, &ssrc_infos, msid_signaling, error)) { return false; } - } else if (HasAttribute(*line, kAttributeCrypto)) { - if (!ParseCryptoAttribute(*line, media_desc, error)) { - return false; - } } else if (HasAttribute(*line, kAttributeRtpmap)) { if (!ParseRtpmapAttribute(*line, media_type, payload_types, media_desc, error)) { @@ -3277,7 +3237,7 @@ bool ParseContent(absl::string_view message, if (!ParseMsidAttribute(*line, &stream_ids, &track_id, error)) { return false; } - *msid_signaling |= cricket::kMsidSignalingMediaSection; + *msid_signaling |= kMsidSignalingMediaSection; } else if (HasAttribute(*line, kAttributeRid)) { const size_t kRidPrefixLength = kLinePrefixLength + arraysize(kAttributeRid); @@ -3287,7 +3247,7 @@ bool ParseContent(absl::string_view message, } RTCErrorOr error_or_rid_description = deserializer.DeserializeRidDescription( - line->substr(kRidPrefixLength)); + *media_desc, line->substr(kRidPrefixLength)); // Malformed a=rid lines are discarded. if (!error_or_rid_description.ok()) { @@ -3337,8 +3297,8 @@ bool ParseContent(absl::string_view message, } } - // Remove duplicate or inconsistent rids. - RemoveInvalidRidDescriptions(payload_types, &rids); + // Remove duplicate rids. + RemoveDuplicateRidDescriptions(payload_types, &rids); // If simulcast is specifed, split the rids into send and receive. // Rids that do not appear in simulcast attribute will be removed. @@ -3384,15 +3344,15 @@ bool ParseContent(absl::string_view message, if (!ssrc_infos.empty()) { CreateTracksFromSsrcInfos(ssrc_infos, stream_ids, track_id, &tracks, *msid_signaling); - } else if (media_type != cricket::MEDIA_TYPE_DATA && - (*msid_signaling & cricket::kMsidSignalingMediaSection)) { + } else if (media_type != webrtc::MediaType::DATA && + (*msid_signaling & kMsidSignalingMediaSection)) { // If the stream_ids/track_id was signaled but SSRCs were unsignaled we // still create a track. This isn't done for data media types because // StreamParams aren't used for SCTP streams, and RTP data channels don't // support unsignaled SSRCs. // If track id was not specified, create a random one. if (track_id.empty()) { - track_id = rtc::CreateRandomString(8); + track_id = CreateRandomString(8); } CreateTrackWithNoSsrcs(stream_ids, track_id, send_rids, &tracks); } @@ -3415,27 +3375,17 @@ bool ParseContent(absl::string_view message, media_desc->AddStream(track); } - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - AudioContentDescription* audio_desc = media_desc->as_audio(); - UpdateFromWildcardCodecs(audio_desc); - - // Verify audio codec ensures that no audio codec has been populated with - // only fmtp. - if (!VerifyAudioCodecs(audio_desc)) { - return ParseFailed("Failed to parse audio codecs correctly.", error); - } - AddAudioAttribute(kCodecParamMaxPTime, maxptime_as_string, audio_desc); - AddAudioAttribute(kCodecParamPTime, ptime_as_string, audio_desc); + UpdateFromWildcardCodecs(media_desc); + // Codec has not been populated correctly unless the name has been set. This + // can happen if an SDP has an fmtp or rtcp-fb with a payload type but doesn't + // have a corresponding "rtpmap" line. This should lead to a parse error. + if (!absl::c_all_of(media_desc->codecs(), + [](const Codec codec) { return !codec.name.empty(); })) { + return ParseFailed("Failed to parse codecs correctly.", error); } - - if (media_type == cricket::MEDIA_TYPE_VIDEO) { - VideoContentDescription* video_desc = media_desc->as_video(); - UpdateFromWildcardCodecs(video_desc); - // Verify video codec ensures that no video codec has been populated with - // only rtcp-fb. - if (!VerifyVideoCodecs(video_desc)) { - return ParseFailed("Failed to parse video codecs correctly.", error); - } + if (media_type == webrtc::MediaType::AUDIO) { + AddAudioAttribute(kCodecParamMaxPTime, maxptime_as_string, media_desc); + AddAudioAttribute(kCodecParamPTime, ptime_as_string, media_desc); } // RFC 5245 @@ -3462,8 +3412,8 @@ bool ParseSsrcAttribute(absl::string_view line, // a=ssrc: // a=ssrc: : std::string field1, field2; - if (!rtc::tokenize_first(line.substr(kLinePrefixLength), - kSdpDelimiterSpaceChar, &field1, &field2)) { + if (!tokenize_first(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar, + &field1, &field2)) { const size_t expected_fields = 2; return ParseFailedExpectFieldNum(line, expected_fields, error); } @@ -3480,9 +3430,8 @@ bool ParseSsrcAttribute(absl::string_view line, std::string attribute; std::string value; - if (!rtc::tokenize_first(field2, kSdpDelimiterColonChar, &attribute, - &value)) { - rtc::StringBuilder description; + if (!tokenize_first(field2, kSdpDelimiterColonChar, &attribute, &value)) { + StringBuilder description; description << "Failed to get the ssrc attribute value from " << field2 << ". Expected format :."; return ParseFailed(line, description.Release(), error); @@ -3511,7 +3460,7 @@ bool ParseSsrcAttribute(absl::string_view line, // draft-alvestrand-mmusic-msid-00 // msid:identifier [appdata] std::vector fields = - rtc::split(value, kSdpDelimiterSpaceChar); + split(value, kSdpDelimiterSpaceChar); if (fields.size() < 1 || fields.size() > 2) { return ParseFailed( line, "Expected format \"msid:[ ]\".", error); @@ -3520,7 +3469,7 @@ bool ParseSsrcAttribute(absl::string_view line, if (fields.size() == 2) { ssrc_info.track_id = std::string(fields[1]); } - *msid_signaling |= cricket::kMsidSignalingSsrcAttribute; + *msid_signaling |= kMsidSignalingSsrcAttribute; } else { RTC_LOG(LS_INFO) << "Ignored unknown ssrc-specific attribute: " << line; } @@ -3534,7 +3483,7 @@ bool ParseSsrcGroupAttribute(absl::string_view line, // RFC 5576 // a=ssrc-group: ... std::vector fields = - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); + split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); const size_t expected_min_fields = 2; if (fields.size() < expected_min_fields) { return ParseFailedExpectMinFieldNum(line, expected_min_fields, error); @@ -3560,37 +3509,6 @@ bool ParseSsrcGroupAttribute(absl::string_view line, return true; } -bool ParseCryptoAttribute(absl::string_view line, - MediaContentDescription* media_desc, - SdpParseError* error) { - std::vector fields = - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); - // RFC 4568 - // a=crypto: [] - const size_t expected_min_fields = 3; - if (fields.size() < expected_min_fields) { - return ParseFailedExpectMinFieldNum(line, expected_min_fields, error); - } - std::string tag_value; - if (!GetValue(fields[0], kAttributeCrypto, &tag_value, error)) { - return false; - } - int tag = 0; - if (!GetValueFromString(line, tag_value, &tag, error)) { - return false; - } - const absl::string_view crypto_suite = fields[1]; - const absl::string_view key_params = fields[2]; - absl::string_view session_params; - if (fields.size() > 3) { - session_params = fields[3]; - } - - media_desc->AddCrypto( - CryptoParams(tag, crypto_suite, key_params, session_params)); - return true; -} - // Updates or creates a new codec entry in the audio description with according // to `name`, `clockrate`, `bitrate`, and `channels`. void UpdateCodec(int payload_type, @@ -3598,41 +3516,39 @@ void UpdateCodec(int payload_type, int clockrate, int bitrate, size_t channels, - AudioContentDescription* audio_desc) { + MediaContentDescription* desc) { // Codec may already be populated with (only) optional parameters // (from an fmtp). - cricket::AudioCodec codec = GetCodecWithPayloadType( - audio_desc->type(), audio_desc->codecs(), payload_type); + Codec codec = + GetCodecWithPayloadType(desc->type(), desc->codecs(), payload_type); codec.name = std::string(name); codec.clockrate = clockrate; codec.bitrate = bitrate; codec.channels = channels; - AddOrReplaceCodec(audio_desc, - codec); + AddOrReplaceCodec(desc, codec); } // Updates or creates a new codec entry in the video description according to // `name`, `width`, `height`, and `framerate`. void UpdateCodec(int payload_type, absl::string_view name, - VideoContentDescription* video_desc) { + MediaContentDescription* desc) { // Codec may already be populated with (only) optional parameters // (from an fmtp). - cricket::VideoCodec codec = GetCodecWithPayloadType( - video_desc->type(), video_desc->codecs(), payload_type); + Codec codec = + GetCodecWithPayloadType(desc->type(), desc->codecs(), payload_type); codec.name = std::string(name); - AddOrReplaceCodec(video_desc, - codec); + AddOrReplaceCodec(desc, codec); } bool ParseRtpmapAttribute(absl::string_view line, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, const std::vector& payload_types, MediaContentDescription* media_desc, SdpParseError* error) { static const int kFirstDynamicPayloadTypeLowerRange = 35; std::vector fields = - rtc::split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); + split(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar); // RFC 4566 // a=rtpmap: /[/] const size_t expected_min_fields = 2; @@ -3655,7 +3571,7 @@ bool ParseRtpmapAttribute(absl::string_view line, << line; return true; } - std::vector codec_params = rtc::split(fields[1], '/'); + std::vector codec_params = split(fields[1], '/'); // /[/] // 2 mandatory fields if (codec_params.size() < 2 || codec_params.size() > 3) { @@ -3670,13 +3586,12 @@ bool ParseRtpmapAttribute(absl::string_view line, return false; } - if (media_type == cricket::MEDIA_TYPE_VIDEO) { - VideoContentDescription* video_desc = media_desc->as_video(); - for (const cricket::VideoCodec& existing_codec : video_desc->codecs()) { + if (media_type == webrtc::MediaType::VIDEO) { + for (const Codec& existing_codec : media_desc->codecs()) { if (!existing_codec.name.empty() && payload_type == existing_codec.id && (!absl::EqualsIgnoreCase(encoding_name, existing_codec.name) || clock_rate != existing_codec.clockrate)) { - rtc::StringBuilder description; + StringBuilder description; description << "Duplicate " << (payload_type < kFirstDynamicPayloadTypeLowerRange @@ -3686,8 +3601,8 @@ bool ParseRtpmapAttribute(absl::string_view line, return ParseFailed(line, description.Release(), error); } } - UpdateCodec(payload_type, encoding_name, video_desc); - } else if (media_type == cricket::MEDIA_TYPE_AUDIO) { + UpdateCodec(payload_type, encoding_name, media_desc); + } else if (media_type == webrtc::MediaType::AUDIO) { // RFC 4566 // For audio streams, indicates the number // of audio channels. This parameter is OPTIONAL and may be @@ -3703,13 +3618,12 @@ bool ParseRtpmapAttribute(absl::string_view line, return ParseFailed(line, "At most 24 channels are supported.", error); } - AudioContentDescription* audio_desc = media_desc->as_audio(); - for (const cricket::AudioCodec& existing_codec : audio_desc->codecs()) { + for (const Codec& existing_codec : media_desc->codecs()) { // TODO(crbug.com/1338902) re-add checks for clockrate and number of // channels. if (!existing_codec.name.empty() && payload_type == existing_codec.id && (!absl::EqualsIgnoreCase(encoding_name, existing_codec.name))) { - rtc::StringBuilder description; + StringBuilder description; description << "Duplicate " << (payload_type < kFirstDynamicPayloadTypeLowerRange @@ -3720,7 +3634,7 @@ bool ParseRtpmapAttribute(absl::string_view line, } } UpdateCodec(payload_type, encoding_name, clock_rate, 0, channels, - audio_desc); + media_desc); } return true; } @@ -3729,7 +3643,7 @@ bool ParseFmtpParam(absl::string_view line, std::string* parameter, std::string* value, SdpParseError* error) { - if (!rtc::tokenize_first(line, kSdpDelimiterEqualChar, parameter, value)) { + if (!tokenize_first(line, kSdpDelimiterEqualChar, parameter, value)) { // Support for non-key-value lines like RFC 2198 or RFC 4733. *parameter = ""; *value = std::string(line); @@ -3739,12 +3653,33 @@ bool ParseFmtpParam(absl::string_view line, return true; } +bool ParseFmtpParameterSet(absl::string_view line_params, + webrtc::CodecParameterMap& codec_params, + SdpParseError* error) { + // Parse out format specific parameters. + for (absl::string_view param : + split(line_params, kSdpDelimiterSemicolonChar)) { + std::string name; + std::string value; + if (!ParseFmtpParam(absl::StripAsciiWhitespace(param), &name, &value, + error)) { + return false; + } + if (codec_params.find(name) != codec_params.end()) { + RTC_LOG(LS_INFO) << "Overwriting duplicate fmtp parameter with key \"" + << name << "\"."; + } + codec_params[name] = value; + } + return true; +} + bool ParseFmtpAttributes(absl::string_view line, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, MediaContentDescription* media_desc, SdpParseError* error) { - if (media_type != cricket::MEDIA_TYPE_AUDIO && - media_type != cricket::MEDIA_TYPE_VIDEO) { + if (media_type != webrtc::MediaType::AUDIO && + media_type != webrtc::MediaType::VIDEO) { return true; } @@ -3755,9 +3690,8 @@ bool ParseFmtpAttributes(absl::string_view line, // a=fmtp: // At least two fields, whereas the second one is any of the optional // parameters. - if (!rtc::tokenize_first(line.substr(kLinePrefixLength), - kSdpDelimiterSpaceChar, &line_payload, - &line_params)) { + if (!tokenize_first(line.substr(kLinePrefixLength), kSdpDelimiterSpaceChar, + &line_payload, &line_params)) { ParseFailedExpectMinFieldNum(line, 2, error); return false; } @@ -3775,41 +3709,27 @@ bool ParseFmtpAttributes(absl::string_view line, } // Parse out format specific parameters. - cricket::CodecParameterMap codec_params; - for (absl::string_view param : - rtc::split(line_params, kSdpDelimiterSemicolonChar)) { - std::string name; - std::string value; - if (!ParseFmtpParam(absl::StripAsciiWhitespace(param), &name, &value, - error)) { - return false; - } - if (codec_params.find(name) != codec_params.end()) { - RTC_LOG(LS_INFO) << "Overwriting duplicate fmtp parameter with key \"" - << name << "\"."; - } - codec_params[name] = value; + webrtc::CodecParameterMap codec_params; + if (!ParseFmtpParameterSet(line_params, codec_params, error)) { + return false; } - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - UpdateCodec( - media_desc, payload_type, codec_params); - } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { - UpdateCodec( - media_desc, payload_type, codec_params); + if (media_type == webrtc::MediaType::AUDIO || + media_type == webrtc::MediaType::VIDEO) { + UpdateCodec(media_desc, payload_type, codec_params); } return true; } bool ParsePacketizationAttribute(absl::string_view line, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, MediaContentDescription* media_desc, SdpParseError* error) { - if (media_type != cricket::MEDIA_TYPE_VIDEO) { + if (media_type != webrtc::MediaType::VIDEO) { return true; } std::vector packetization_fields = - rtc::split(line, kSdpDelimiterSpaceChar); + split(line, kSdpDelimiterSpaceChar); if (packetization_fields.size() < 2) { return ParseFailedGetValue(line, kAttributePacketization, error); } @@ -3824,21 +3744,20 @@ bool ParsePacketizationAttribute(absl::string_view line, return false; } absl::string_view packetization = packetization_fields[1]; - UpdateVideoCodecPacketization(media_desc->as_video(), payload_type, - packetization); + UpdateVideoCodecPacketization(media_desc, payload_type, packetization); return true; } bool ParseRtcpFbAttribute(absl::string_view line, - const cricket::MediaType media_type, + const webrtc::MediaType media_type, MediaContentDescription* media_desc, SdpParseError* error) { - if (media_type != cricket::MEDIA_TYPE_AUDIO && - media_type != cricket::MEDIA_TYPE_VIDEO) { + if (media_type != webrtc::MediaType::AUDIO && + media_type != webrtc::MediaType::VIDEO) { return true; } std::vector rtcp_fb_fields = - rtc::split(line, kSdpDelimiterSpaceChar); + split(line, kSdpDelimiterSpaceChar); if (rtcp_fb_fields.size() < 2) { return ParseFailedGetValue(line, kAttributeRtcpFb, error); } @@ -3860,14 +3779,11 @@ bool ParseRtcpFbAttribute(absl::string_view line, ++iter) { param.append(iter->data(), iter->length()); } - const cricket::FeedbackParam feedback_param(id, param); + const FeedbackParam feedback_param(id, param); - if (media_type == cricket::MEDIA_TYPE_AUDIO) { - UpdateCodec( - media_desc, payload_type, feedback_param); - } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { - UpdateCodec( - media_desc, payload_type, feedback_param); + if (media_type == webrtc::MediaType::AUDIO || + media_type == webrtc::MediaType::VIDEO) { + UpdateCodec(media_desc, payload_type, feedback_param); } return true; } diff --git a/pc/webrtc_sdp.h b/pc/webrtc_sdp.h index f7759bd139..884b45414a 100644 --- a/pc/webrtc_sdp.h +++ b/pc/webrtc_sdp.h @@ -31,14 +31,6 @@ #include "rtc_base/strings/string_builder.h" #include "rtc_base/system/rtc_export.h" -namespace cricket { -class Candidate; -} // namespace cricket - -namespace rtc { -class StringBuilder; -} // namespace rtc - namespace webrtc { class IceCandidateInterface; class JsepIceCandidate; @@ -58,8 +50,7 @@ std::string SdpSerializeCandidate(const IceCandidateInterface& candidate); // Serializes a cricket Candidate. // candidate - The candidate to be serialized. -RTC_EXPORT std::string SdpSerializeCandidate( - const cricket::Candidate& candidate); +RTC_EXPORT std::string SdpSerializeCandidate(const Candidate& candidate); // Deserializes the passed in SDP string to a JsepSessionDescription. // message - SDP string to be Deserialized. @@ -91,7 +82,7 @@ RTC_EXPORT bool SdpDeserializeCandidate(absl::string_view message, // return - true on success, false on failure. RTC_EXPORT bool SdpDeserializeCandidate(absl::string_view transport_name, absl::string_view message, - cricket::Candidate* candidate, + Candidate* candidate, SdpParseError* error); // Parses `message` according to the grammar defined in RFC 5245, Section 15.1 @@ -101,7 +92,7 @@ RTC_EXPORT bool SdpDeserializeCandidate(absl::string_view transport_name, // If `is_raw` is false, `message` is expected to be prefixed with "a=". // If `is_raw` is true, no prefix is expected in `messaage`. RTC_EXPORT bool ParseCandidate(absl::string_view message, - cricket::Candidate* candidate, + Candidate* candidate, SdpParseError* error, bool is_raw); @@ -109,8 +100,13 @@ RTC_EXPORT bool ParseCandidate(absl::string_view message, // parameters are not considered to be part of the FMTP line, see the function // IsFmtpParam(). Returns true if the set of FMTP parameters is nonempty, false // otherwise. -bool WriteFmtpParameters(const cricket::CodecParameterMap& parameters, - rtc::StringBuilder* os); +bool WriteFmtpParameters(const webrtc::CodecParameterMap& parameters, + StringBuilder* os); + +// Parses a string into an FMTP parameter set, in key-value format. +bool ParseFmtpParameterSet(absl::string_view line_params, + webrtc::CodecParameterMap& codec_params, + SdpParseError* error); } // namespace webrtc diff --git a/pc/webrtc_sdp_unittest.cc b/pc/webrtc_sdp_unittest.cc index 1c0c4dde94..ba696d4b41 100644 --- a/pc/webrtc_sdp_unittest.cc +++ b/pc/webrtc_sdp_unittest.cc @@ -14,6 +14,7 @@ #include #include #include +#include #include #include #include @@ -21,11 +22,12 @@ #include "absl/algorithm/container.h" #include "absl/memory/memory.h" +#include "absl/strings/str_cat.h" #include "absl/strings/str_replace.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" -#include "api/crypto_params.h" +#include "api/candidate.h" +#include "api/jsep.h" #include "api/jsep_session_description.h" #include "api/media_types.h" #include "api/rtp_parameters.h" @@ -46,7 +48,6 @@ #include "rtc_base/message_digest.h" #include "rtc_base/socket_address.h" #include "rtc_base/ssl_fingerprint.h" -#include "rtc_base/string_encode.h" #include "test/gmock.h" #include "test/gtest.h" @@ -55,44 +56,37 @@ #endif #include "pc/webrtc_sdp.h" -using cricket::AudioCodec; -using cricket::AudioContentDescription; -using cricket::Candidate; -using cricket::ContentGroup; -using cricket::ContentInfo; -using cricket::CryptoParams; -using cricket::ICE_CANDIDATE_COMPONENT_RTCP; -using cricket::ICE_CANDIDATE_COMPONENT_RTP; -using cricket::kFecSsrcGroupSemantics; -using cricket::LOCAL_PORT_TYPE; -using cricket::MediaProtocolType; -using cricket::RELAY_PORT_TYPE; -using cricket::RidDescription; -using cricket::RidDirection; -using cricket::SctpDataContentDescription; -using cricket::SessionDescription; -using cricket::SimulcastDescription; -using cricket::SimulcastLayer; -using cricket::StreamParams; -using cricket::STUN_PORT_TYPE; -using cricket::TransportDescription; -using cricket::TransportInfo; -using cricket::VideoCodec; -using cricket::VideoContentDescription; using ::testing::ElementsAre; using ::testing::Field; +using ::testing::Property; +using ::webrtc::AudioContentDescription; +using webrtc::Candidate; +using ::webrtc::ContentGroup; +using ::webrtc::ContentInfo; +using ::webrtc::ICE_CANDIDATE_COMPONENT_RTCP; +using ::webrtc::ICE_CANDIDATE_COMPONENT_RTP; using webrtc::IceCandidateCollection; using webrtc::IceCandidateInterface; +using webrtc::IceCandidateType; using webrtc::JsepIceCandidate; using webrtc::JsepSessionDescription; +using ::webrtc::kFecSsrcGroupSemantics; +using ::webrtc::MediaProtocolType; +using ::webrtc::RidDescription; +using ::webrtc::RidDirection; using webrtc::RtpExtension; using webrtc::RtpTransceiverDirection; +using ::webrtc::SctpDataContentDescription; using webrtc::SdpParseError; using webrtc::SdpType; +using ::webrtc::SessionDescription; using webrtc::SessionDescriptionInterface; - -typedef std::vector AudioCodecs; -typedef std::vector Candidates; +using ::webrtc::SimulcastDescription; +using ::webrtc::SimulcastLayer; +using ::webrtc::StreamParams; +using ::webrtc::TransportDescription; +using ::webrtc::TransportInfo; +using ::webrtc::VideoContentDescription; static const uint32_t kDefaultSctpPort = 5000; static const uint16_t kUnusualSctpPort = 9556; @@ -107,13 +101,6 @@ static const char kCandidateFoundation1[] = "a0+B/1"; static const char kCandidateFoundation2[] = "a0+B/2"; static const char kCandidateFoundation3[] = "a0+B/3"; static const char kCandidateFoundation4[] = "a0+B/4"; -static const char kAttributeCryptoVoice[] = - "a=crypto:1 AES_CM_128_HMAC_SHA1_32 " - "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 " - "dummy_session_params\r\n"; -static const char kAttributeCryptoVideo[] = - "a=crypto:1 AES_CM_128_HMAC_SHA1_80 " - "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n"; static const char kFingerprint[] = "a=fingerprint:sha-1 " "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n"; @@ -146,10 +133,6 @@ struct CodecParams { int maxaveragebitrate; }; -// TODO(deadbeef): In these reference strings, use "a=fingerprint" by default -// instead of "a=crypto", and have an explicit test for adding "a=crypto". -// Currently it's the other way around. - // Reference sdp string static const char kSdpFullString[] = "v=0\r\n" @@ -176,18 +159,17 @@ static const char kSdpFullString[] = "raddr 192.168.1.5 rport 2348 " "generation 2\r\n" "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" "a=mid:audio_content_name\r\n" "a=sendrecv\r\n" + "a=msid:local_stream_1 audio_track_id_1\r\n" "a=rtcp-mux\r\n" "a=rtcp-rsize\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_32 " - "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 " - "dummy_session_params\r\n" "a=rtpmap:111 opus/48000/2\r\n" "a=rtpmap:103 ISAC/16000\r\n" "a=rtpmap:104 ISAC/32000\r\n" "a=ssrc:1 cname:stream_1_cname\r\n" - "a=ssrc:1 msid:local_stream_1 audio_track_id_1\r\n" "m=video 3457 RTP/SAVPF 120\r\n" "c=IN IP4 74.125.224.39\r\n" "a=rtcp:3456 IN IP4 74.125.224.39\r\n" @@ -204,16 +186,15 @@ static const char kSdpFullString[] = "a=candidate:a0+B/4 1 udp 2130706432 74.125.224.39 3457 typ relay " "generation 2\r\n" "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" "a=mid:video_content_name\r\n" "a=sendrecv\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_80 " - "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n" + "a=msid:local_stream_1 video_track_id_1\r\n" "a=rtpmap:120 VP8/90000\r\n" "a=ssrc-group:FEC 2 3\r\n" "a=ssrc:2 cname:stream_1_cname\r\n" - "a=ssrc:2 msid:local_stream_1 video_track_id_1\r\n" - "a=ssrc:3 cname:stream_1_cname\r\n" - "a=ssrc:3 msid:local_stream_1 video_track_id_1\r\n"; + "a=ssrc:3 cname:stream_1_cname\r\n"; // SDP reference string without the candidates. static const char kSdpString[] = @@ -227,32 +208,32 @@ static const char kSdpString[] = "c=IN IP4 0.0.0.0\r\n" "a=rtcp:9 IN IP4 0.0.0.0\r\n" "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:audio_content_name\r\n" "a=sendrecv\r\n" + "a=msid:local_stream_1 audio_track_id_1\r\n" "a=rtcp-mux\r\n" "a=rtcp-rsize\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_32 " - "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 " - "dummy_session_params\r\n" "a=rtpmap:111 opus/48000/2\r\n" "a=rtpmap:103 ISAC/16000\r\n" "a=rtpmap:104 ISAC/32000\r\n" "a=ssrc:1 cname:stream_1_cname\r\n" - "a=ssrc:1 msid:local_stream_1 audio_track_id_1\r\n" "m=video 9 RTP/SAVPF 120\r\n" "c=IN IP4 0.0.0.0\r\n" "a=rtcp:9 IN IP4 0.0.0.0\r\n" "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:video_content_name\r\n" "a=sendrecv\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_80 " - "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n" + "a=msid:local_stream_1 video_track_id_1\r\n" "a=rtpmap:120 VP8/90000\r\n" "a=ssrc-group:FEC 2 3\r\n" "a=ssrc:2 cname:stream_1_cname\r\n" - "a=ssrc:2 msid:local_stream_1 video_track_id_1\r\n" - "a=ssrc:3 cname:stream_1_cname\r\n" - "a=ssrc:3 msid:local_stream_1 video_track_id_1\r\n"; + "a=ssrc:3 cname:stream_1_cname\r\n"; // draft-ietf-mmusic-sctp-sdp-03 static const char kSdpSctpDataChannelString[] = @@ -260,6 +241,9 @@ static const char kSdpSctpDataChannelString[] = "c=IN IP4 0.0.0.0\r\n" "a=ice-ufrag:ufrag_data\r\n" "a=ice-pwd:pwd_data\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:data_content_name\r\n" "a=sctpmap:5000 webrtc-datachannel 1024\r\n"; @@ -272,6 +256,9 @@ static const char kSdpSctpDataChannelStringWithSctpPort[] = "c=IN IP4 0.0.0.0\r\n" "a=ice-ufrag:ufrag_data\r\n" "a=ice-pwd:pwd_data\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:data_content_name\r\n"; // draft-ietf-mmusic-sctp-sdp-26 @@ -281,6 +268,9 @@ static const char kSdpSctpDataChannelStringWithSctpColonPort[] = "c=IN IP4 0.0.0.0\r\n" "a=ice-ufrag:ufrag_data\r\n" "a=ice-pwd:pwd_data\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:data_content_name\r\n"; static const char kSdpSctpDataChannelWithCandidatesString[] = @@ -295,6 +285,9 @@ static const char kSdpSctpDataChannelWithCandidatesString[] = "generation 2\r\n" "a=ice-ufrag:ufrag_data\r\n" "a=ice-pwd:pwd_data\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:data_content_name\r\n" "a=sctpmap:5000 webrtc-datachannel 1024\r\n"; @@ -323,6 +316,9 @@ static const char kSdpAudioString[] = "c=IN IP4 0.0.0.0\r\n" "a=rtcp:9 IN IP4 0.0.0.0\r\n" "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:audio_content_name\r\n" "a=sendrecv\r\n" "a=rtpmap:111 opus/48000/2\r\n" @@ -334,6 +330,9 @@ static const char kSdpVideoString[] = "c=IN IP4 0.0.0.0\r\n" "a=rtcp:9 IN IP4 0.0.0.0\r\n" "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:video_content_name\r\n" "a=sendrecv\r\n" "a=rtpmap:120 VP8/90000\r\n" @@ -367,32 +366,31 @@ static const char kBundleOnlySdpFullString[] = "raddr 192.168.1.5 rport 2348 " "generation 2\r\n" "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:audio_content_name\r\n" + "a=msid:local_stream_1 audio_track_id_1\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" "a=rtcp-rsize\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_32 " - "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 " - "dummy_session_params\r\n" "a=rtpmap:111 opus/48000/2\r\n" "a=rtpmap:103 ISAC/16000\r\n" "a=rtpmap:104 ISAC/32000\r\n" "a=ssrc:1 cname:stream_1_cname\r\n" - "a=ssrc:1 msid:local_stream_1 audio_track_id_1\r\n" "m=video 0 RTP/SAVPF 120\r\n" "c=IN IP4 0.0.0.0\r\n" "a=rtcp:9 IN IP4 0.0.0.0\r\n" "a=bundle-only\r\n" "a=mid:video_content_name\r\n" + "a=msid:local_stream_1 video_track_id_1\r\n" "a=sendrecv\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_80 " - "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n" "a=rtpmap:120 VP8/90000\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" "a=ssrc-group:FEC 2 3\r\n" "a=ssrc:2 cname:stream_1_cname\r\n" - "a=ssrc:2 msid:local_stream_1 video_track_id_1\r\n" - "a=ssrc:3 cname:stream_1_cname\r\n" - "a=ssrc:3 msid:local_stream_1 video_track_id_1\r\n"; + "a=ssrc:3 cname:stream_1_cname\r\n"; // Plan B SDP reference string, with 2 streams, 2 audio tracks and 3 video // tracks. @@ -421,13 +419,13 @@ static const char kPlanBSdpFullString[] = "raddr 192.168.1.5 rport 2348 " "generation 2\r\n" "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:audio_content_name\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" "a=rtcp-rsize\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_32 " - "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 " - "dummy_session_params\r\n" "a=rtpmap:111 opus/48000/2\r\n" "a=rtpmap:103 ISAC/16000\r\n" "a=rtpmap:104 ISAC/32000\r\n" @@ -451,10 +449,11 @@ static const char kPlanBSdpFullString[] = "a=candidate:a0+B/4 1 udp 2130706432 74.125.224.39 3457 typ relay " "generation 2\r\n" "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:video_content_name\r\n" "a=sendrecv\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_80 " - "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n" "a=rtpmap:120 VP8/90000\r\n" "a=ssrc-group:FEC 2 3\r\n" "a=ssrc:2 cname:stream_1_cname\r\n" @@ -494,14 +493,14 @@ static const char kUnifiedPlanSdpFullString[] = "raddr 192.168.1.5 rport 2348 " "generation 2\r\n" "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:audio_content_name\r\n" "a=msid:local_stream_1 audio_track_id_1\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" "a=rtcp-rsize\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_32 " - "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 " - "dummy_session_params\r\n" "a=rtpmap:111 opus/48000/2\r\n" "a=rtpmap:103 ISAC/16000\r\n" "a=rtpmap:104 ISAC/32000\r\n" @@ -523,11 +522,12 @@ static const char kUnifiedPlanSdpFullString[] = "a=candidate:a0+B/4 1 udp 2130706432 74.125.224.39 3457 typ relay " "generation 2\r\n" "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:video_content_name\r\n" "a=msid:local_stream_1 video_track_id_1\r\n" "a=sendrecv\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_80 " - "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n" "a=rtpmap:120 VP8/90000\r\n" "a=ssrc-group:FEC 2 3\r\n" "a=ssrc:2 cname:stream_1_cname\r\n" @@ -537,14 +537,14 @@ static const char kUnifiedPlanSdpFullString[] = "c=IN IP4 0.0.0.0\r\n" "a=rtcp:9 IN IP4 0.0.0.0\r\n" "a=ice-ufrag:ufrag_voice_2\r\na=ice-pwd:pwd_voice_2\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:audio_content_name_2\r\n" "a=msid:local_stream_2 audio_track_id_2\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" "a=rtcp-rsize\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_32 " - "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 " - "dummy_session_params\r\n" "a=rtpmap:111 opus/48000/2\r\n" "a=rtpmap:103 ISAC/16000\r\n" "a=rtpmap:104 ISAC/32000\r\n" @@ -554,11 +554,12 @@ static const char kUnifiedPlanSdpFullString[] = "c=IN IP4 0.0.0.0\r\n" "a=rtcp:9 IN IP4 0.0.0.0\r\n" "a=ice-ufrag:ufrag_video_2\r\na=ice-pwd:pwd_video_2\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:video_content_name_2\r\n" "a=msid:local_stream_2 video_track_id_2\r\n" "a=sendrecv\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_80 " - "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n" "a=rtpmap:120 VP8/90000\r\n" "a=ssrc:5 cname:stream_2_cname\r\n" // Video track 3, stream 2. @@ -566,11 +567,12 @@ static const char kUnifiedPlanSdpFullString[] = "c=IN IP4 0.0.0.0\r\n" "a=rtcp:9 IN IP4 0.0.0.0\r\n" "a=ice-ufrag:ufrag_video_3\r\na=ice-pwd:pwd_video_3\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:video_content_name_3\r\n" "a=msid:local_stream_2 video_track_id_3\r\n" "a=sendrecv\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_80 " - "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n" "a=rtpmap:120 VP8/90000\r\n" "a=ssrc:6 cname:stream_2_cname\r\n"; @@ -607,14 +609,14 @@ static const char kUnifiedPlanSdpFullStringWithSpecialMsid[] = "raddr 192.168.1.5 rport 2348 " "generation 2\r\n" "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:audio_content_name\r\n" "a=sendrecv\r\n" "a=msid:local_stream_1 audio_track_id_1\r\n" "a=rtcp-mux\r\n" "a=rtcp-rsize\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_32 " - "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 " - "dummy_session_params\r\n" "a=rtpmap:111 opus/48000/2\r\n" "a=rtpmap:103 ISAC/16000\r\n" "a=rtpmap:104 ISAC/32000\r\n" @@ -625,15 +627,15 @@ static const char kUnifiedPlanSdpFullStringWithSpecialMsid[] = "c=IN IP4 0.0.0.0\r\n" "a=rtcp:9 IN IP4 0.0.0.0\r\n" "a=ice-ufrag:ufrag_voice_2\r\na=ice-pwd:pwd_voice_2\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:audio_content_name_2\r\n" "a=sendrecv\r\n" "a=msid:local_stream_1 audio_track_id_2\r\n" "a=msid:local_stream_2 audio_track_id_2\r\n" "a=rtcp-mux\r\n" "a=rtcp-rsize\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_32 " - "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 " - "dummy_session_params\r\n" "a=rtpmap:111 opus/48000/2\r\n" "a=rtpmap:103 ISAC/16000\r\n" "a=rtpmap:104 ISAC/32000\r\n" @@ -646,14 +648,14 @@ static const char kUnifiedPlanSdpFullStringWithSpecialMsid[] = "c=IN IP4 0.0.0.0\r\n" "a=rtcp:9 IN IP4 0.0.0.0\r\n" "a=ice-ufrag:ufrag_voice_3\r\na=ice-pwd:pwd_voice_3\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:audio_content_name_3\r\n" "a=sendrecv\r\n" "a=msid:- audio_track_id_3\r\n" "a=rtcp-mux\r\n" "a=rtcp-rsize\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_32 " - "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 " - "dummy_session_params\r\n" "a=rtpmap:111 opus/48000/2\r\n" "a=rtpmap:103 ISAC/16000\r\n" "a=rtpmap:104 ISAC/32000\r\n" @@ -686,14 +688,14 @@ static const char kUnifiedPlanSdpFullStringNoSsrc[] = "raddr 192.168.1.5 rport 2348 " "generation 2\r\n" "a=ice-ufrag:ufrag_voice\r\na=ice-pwd:pwd_voice\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:audio_content_name\r\n" "a=msid:local_stream_1 audio_track_id_1\r\n" "a=sendrecv\r\n" "a=rtcp-mux\r\n" "a=rtcp-rsize\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_32 " - "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 " - "dummy_session_params\r\n" "a=rtpmap:111 opus/48000/2\r\n" "a=rtpmap:103 ISAC/16000\r\n" "a=rtpmap:104 ISAC/32000\r\n" @@ -714,11 +716,12 @@ static const char kUnifiedPlanSdpFullStringNoSsrc[] = "a=candidate:a0+B/4 1 udp 2130706432 74.125.224.39 3457 typ relay " "generation 2\r\n" "a=ice-ufrag:ufrag_video\r\na=ice-pwd:pwd_video\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=mid:video_content_name\r\n" "a=msid:local_stream_1 video_track_id_1\r\n" "a=sendrecv\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_80 " - "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n" "a=rtpmap:120 VP8/90000\r\n" // Audio track 2, stream 2. "m=audio 9 RTP/SAVPF 111 103 104\r\n" @@ -730,9 +733,6 @@ static const char kUnifiedPlanSdpFullStringNoSsrc[] = "a=sendrecv\r\n" "a=rtcp-mux\r\n" "a=rtcp-rsize\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_32 " - "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 " - "dummy_session_params\r\n" "a=rtpmap:111 opus/48000/2\r\n" "a=rtpmap:103 ISAC/16000\r\n" "a=rtpmap:104 ISAC/32000\r\n" @@ -744,8 +744,6 @@ static const char kUnifiedPlanSdpFullStringNoSsrc[] = "a=mid:video_content_name_2\r\n" "a=msid:local_stream_2 video_track_id_2\r\n" "a=sendrecv\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_80 " - "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n" "a=rtpmap:120 VP8/90000\r\n" // Video track 3, stream 2. "m=video 9 RTP/SAVPF 120\r\n" @@ -755,8 +753,6 @@ static const char kUnifiedPlanSdpFullStringNoSsrc[] = "a=mid:video_content_name_3\r\n" "a=msid:local_stream_2 video_track_id_3\r\n" "a=sendrecv\r\n" - "a=crypto:1 AES_CM_128_HMAC_SHA1_80 " - "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32\r\n" "a=rtpmap:120 VP8/90000\r\n"; // One candidate reference string as per W3c spec. @@ -953,6 +949,14 @@ static void ReplaceRejected(bool audio_rejected, } } +static TransportDescription MakeTransportDescription(std::string ufrag, + std::string pwd) { + webrtc::SSLFingerprint fingerprint(webrtc::DIGEST_SHA_1, kIdentityDigest); + return TransportDescription(std::vector(), ufrag, pwd, + webrtc::ICEMODE_FULL, webrtc::CONNECTIONROLE_NONE, + &fingerprint); +} + // WebRtcSdpTest class WebRtcSdpTest : public ::testing::Test { @@ -969,7 +973,7 @@ class WebRtcSdpTest : public ::testing::Test { audio_stream.set_stream_ids({kStreamId1}); audio_stream.ssrcs.push_back(kAudioTrack1Ssrc); audio_desc_->AddStream(audio_stream); - rtc::SocketAddress audio_addr("74.125.127.126", 2345); + webrtc::SocketAddress audio_addr("74.125.127.126", 2345); audio_desc_->set_connection_address(audio_addr); desc_.AddContent(kAudioContentName, MediaProtocolType::kRtp, absl::WrapUnique(audio_desc_)); @@ -982,91 +986,93 @@ class WebRtcSdpTest : public ::testing::Test { video_stream.set_stream_ids({kStreamId1}); video_stream.ssrcs.push_back(kVideoTrack1Ssrc1); video_stream.ssrcs.push_back(kVideoTrack1Ssrc2); - cricket::SsrcGroup ssrc_group(kFecSsrcGroupSemantics, video_stream.ssrcs); + webrtc::SsrcGroup ssrc_group(webrtc::kFecSsrcGroupSemantics, + video_stream.ssrcs); video_stream.ssrc_groups.push_back(ssrc_group); video_desc_->AddStream(video_stream); - rtc::SocketAddress video_addr("74.125.224.39", 3457); + webrtc::SocketAddress video_addr("74.125.224.39", 3457); video_desc_->set_connection_address(video_addr); desc_.AddContent(kVideoContentName, MediaProtocolType::kRtp, absl::WrapUnique(video_desc_)); - // TransportInfo + // TransportInfo, with fingerprint + webrtc::SSLFingerprint fingerprint(webrtc::DIGEST_SHA_1, kIdentityDigest); desc_.AddTransportInfo(TransportInfo( - kAudioContentName, TransportDescription(kUfragVoice, kPwdVoice))); + kAudioContentName, MakeTransportDescription(kUfragVoice, kPwdVoice))); desc_.AddTransportInfo(TransportInfo( - kVideoContentName, TransportDescription(kUfragVideo, kPwdVideo))); + kVideoContentName, MakeTransportDescription(kUfragVideo, kPwdVideo))); // v4 host int port = 1234; - rtc::SocketAddress address("192.168.1.5", port++); - Candidate candidate1(ICE_CANDIDATE_COMPONENT_RTP, "udp", address, - kCandidatePriority, "", "", LOCAL_PORT_TYPE, + webrtc::SocketAddress address("192.168.1.5", port++); + Candidate candidate1(webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", address, + kCandidatePriority, "", "", IceCandidateType::kHost, kCandidateGeneration, kCandidateFoundation1); address.SetPort(port++); - Candidate candidate2(ICE_CANDIDATE_COMPONENT_RTCP, "udp", address, - kCandidatePriority, "", "", LOCAL_PORT_TYPE, + Candidate candidate2(webrtc::ICE_CANDIDATE_COMPONENT_RTCP, "udp", address, + kCandidatePriority, "", "", IceCandidateType::kHost, kCandidateGeneration, kCandidateFoundation1); address.SetPort(port++); - Candidate candidate3(ICE_CANDIDATE_COMPONENT_RTCP, "udp", address, - kCandidatePriority, "", "", LOCAL_PORT_TYPE, + Candidate candidate3(webrtc::ICE_CANDIDATE_COMPONENT_RTCP, "udp", address, + kCandidatePriority, "", "", IceCandidateType::kHost, kCandidateGeneration, kCandidateFoundation1); address.SetPort(port++); - Candidate candidate4(ICE_CANDIDATE_COMPONENT_RTP, "udp", address, - kCandidatePriority, "", "", LOCAL_PORT_TYPE, + Candidate candidate4(webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", address, + kCandidatePriority, "", "", IceCandidateType::kHost, kCandidateGeneration, kCandidateFoundation1); // v6 host - rtc::SocketAddress v6_address("::1", port++); - cricket::Candidate candidate5(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", - v6_address, kCandidatePriority, "", "", - cricket::LOCAL_PORT_TYPE, - kCandidateGeneration, kCandidateFoundation2); + webrtc::SocketAddress v6_address("::1", port++); + webrtc::Candidate candidate5(webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + v6_address, kCandidatePriority, "", "", + IceCandidateType::kHost, kCandidateGeneration, + kCandidateFoundation2); v6_address.SetPort(port++); - cricket::Candidate candidate6(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp", - v6_address, kCandidatePriority, "", "", - cricket::LOCAL_PORT_TYPE, - kCandidateGeneration, kCandidateFoundation2); + webrtc::Candidate candidate6(webrtc::ICE_CANDIDATE_COMPONENT_RTCP, "udp", + v6_address, kCandidatePriority, "", "", + IceCandidateType::kHost, kCandidateGeneration, + kCandidateFoundation2); v6_address.SetPort(port++); - cricket::Candidate candidate7(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp", - v6_address, kCandidatePriority, "", "", - cricket::LOCAL_PORT_TYPE, - kCandidateGeneration, kCandidateFoundation2); + webrtc::Candidate candidate7(webrtc::ICE_CANDIDATE_COMPONENT_RTCP, "udp", + v6_address, kCandidatePriority, "", "", + IceCandidateType::kHost, kCandidateGeneration, + kCandidateFoundation2); v6_address.SetPort(port++); - cricket::Candidate candidate8(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", - v6_address, kCandidatePriority, "", "", - cricket::LOCAL_PORT_TYPE, - kCandidateGeneration, kCandidateFoundation2); + webrtc::Candidate candidate8(webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + v6_address, kCandidatePriority, "", "", + IceCandidateType::kHost, kCandidateGeneration, + kCandidateFoundation2); // stun int port_stun = 2345; - rtc::SocketAddress address_stun("74.125.127.126", port_stun++); - rtc::SocketAddress rel_address_stun("192.168.1.5", port_stun++); - cricket::Candidate candidate9(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", - address_stun, kCandidatePriority, "", "", - STUN_PORT_TYPE, kCandidateGeneration, - kCandidateFoundation3); + webrtc::SocketAddress address_stun("74.125.127.126", port_stun++); + webrtc::SocketAddress rel_address_stun("192.168.1.5", port_stun++); + webrtc::Candidate candidate9(webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + address_stun, kCandidatePriority, "", "", + IceCandidateType::kSrflx, kCandidateGeneration, + kCandidateFoundation3); candidate9.set_related_address(rel_address_stun); address_stun.SetPort(port_stun++); rel_address_stun.SetPort(port_stun++); - cricket::Candidate candidate10(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp", - address_stun, kCandidatePriority, "", "", - STUN_PORT_TYPE, kCandidateGeneration, - kCandidateFoundation3); + webrtc::Candidate candidate10(webrtc::ICE_CANDIDATE_COMPONENT_RTCP, "udp", + address_stun, kCandidatePriority, "", "", + IceCandidateType::kSrflx, + kCandidateGeneration, kCandidateFoundation3); candidate10.set_related_address(rel_address_stun); // relay int port_relay = 3456; - rtc::SocketAddress address_relay("74.125.224.39", port_relay++); - cricket::Candidate candidate11(cricket::ICE_CANDIDATE_COMPONENT_RTCP, "udp", - address_relay, kCandidatePriority, "", "", - cricket::RELAY_PORT_TYPE, - kCandidateGeneration, kCandidateFoundation4); + webrtc::SocketAddress address_relay("74.125.224.39", port_relay++); + webrtc::Candidate candidate11(webrtc::ICE_CANDIDATE_COMPONENT_RTCP, "udp", + address_relay, kCandidatePriority, "", "", + IceCandidateType::kRelay, + kCandidateGeneration, kCandidateFoundation4); address_relay.SetPort(port_relay++); - cricket::Candidate candidate12(cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", - address_relay, kCandidatePriority, "", "", - RELAY_PORT_TYPE, kCandidateGeneration, - kCandidateFoundation4); + webrtc::Candidate candidate12(webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + address_relay, kCandidatePriority, "", "", + IceCandidateType::kRelay, + kCandidateGeneration, kCandidateFoundation4); // voice candidates_.push_back(candidate1); @@ -1106,9 +1112,9 @@ class WebRtcSdpTest : public ::testing::Test { const IceCandidateCollection* video_candidates_collection = jdesc_.candidates(1); ASSERT_NE(nullptr, video_candidates_collection); - std::vector video_candidates; + std::vector video_candidates; for (size_t i = 0; i < video_candidates_collection->count(); ++i) { - cricket::Candidate c = video_candidates_collection->at(i)->candidate(); + webrtc::Candidate c = video_candidates_collection->at(i)->candidate(); c.set_transport_name("video_content_name"); video_candidates.push_back(c); } @@ -1125,13 +1131,13 @@ class WebRtcSdpTest : public ::testing::Test { desc_.transport_infos()[1].description.ice_ufrag.clear(); desc_.transport_infos()[1].description.ice_pwd.clear(); desc_.transport_infos()[1].description.connection_role = - cricket::CONNECTIONROLE_NONE; + webrtc::CONNECTIONROLE_NONE; // Set bundle-only flag. desc_.contents()[1].bundle_only = true; // Add BUNDLE group. - ContentGroup group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup group(webrtc::GROUP_TYPE_BUNDLE); group.AddContentName(kAudioContentName); group.AddContentName(kVideoContentName); desc_.AddGroup(group); @@ -1173,7 +1179,8 @@ class WebRtcSdpTest : public ::testing::Test { absl::WrapUnique(audio_desc_)); desc_.AddContent(kVideoContentName, MediaProtocolType::kRtp, absl::WrapUnique(video_desc_)); - + desc_.set_msid_signaling(webrtc::kMsidSignalingSsrcAttribute | + webrtc::kMsidSignalingSemantic); ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), jdesc_.session_version())); } @@ -1193,8 +1200,9 @@ class WebRtcSdpTest : public ::testing::Test { audio_desc_2->AddStream(audio_track_2); desc_.AddContent(kAudioContentName2, MediaProtocolType::kRtp, absl::WrapUnique(audio_desc_2)); - desc_.AddTransportInfo(TransportInfo( - kAudioContentName2, TransportDescription(kUfragVoice2, kPwdVoice2))); + desc_.AddTransportInfo( + TransportInfo(kAudioContentName2, + MakeTransportDescription(kUfragVoice2, kPwdVoice2))); // Video track 2, in stream 2. VideoContentDescription* video_desc_2 = CreateVideoContentDescription(); StreamParams video_track_2; @@ -1207,8 +1215,9 @@ class WebRtcSdpTest : public ::testing::Test { video_desc_2->AddStream(video_track_2); desc_.AddContent(kVideoContentName2, MediaProtocolType::kRtp, absl::WrapUnique(video_desc_2)); - desc_.AddTransportInfo(TransportInfo( - kVideoContentName2, TransportDescription(kUfragVideo2, kPwdVideo2))); + desc_.AddTransportInfo( + TransportInfo(kVideoContentName2, + MakeTransportDescription(kUfragVideo2, kPwdVideo2))); // Video track 3, in stream 2. VideoContentDescription* video_desc_3 = CreateVideoContentDescription(); @@ -1222,9 +1231,11 @@ class WebRtcSdpTest : public ::testing::Test { video_desc_3->AddStream(video_track_3); desc_.AddContent(kVideoContentName3, MediaProtocolType::kRtp, absl::WrapUnique(video_desc_3)); - desc_.AddTransportInfo(TransportInfo( - kVideoContentName3, TransportDescription(kUfragVideo3, kPwdVideo3))); - desc_.set_msid_signaling(cricket::kMsidSignalingMediaSection); + desc_.AddTransportInfo( + TransportInfo(kVideoContentName3, + MakeTransportDescription(kUfragVideo3, kPwdVideo3))); + desc_.set_msid_signaling(webrtc::kMsidSignalingMediaSection | + webrtc::kMsidSignalingSemantic); ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), jdesc_.session_version())); @@ -1236,14 +1247,10 @@ class WebRtcSdpTest : public ::testing::Test { AudioContentDescription* audio = new AudioContentDescription(); audio->set_rtcp_mux(true); audio->set_rtcp_reduced_size(true); - audio->AddCrypto(CryptoParams( - 1, "AES_CM_128_HMAC_SHA1_32", - "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32", - "dummy_session_params")); - audio->set_protocol(cricket::kMediaProtocolSavpf); - audio->AddCodec(cricket::CreateAudioCodec(111, "opus", 48000, 2)); - audio->AddCodec(cricket::CreateAudioCodec(103, "ISAC", 16000, 1)); - audio->AddCodec(cricket::CreateAudioCodec(104, "ISAC", 32000, 1)); + audio->set_protocol(webrtc::kMediaProtocolSavpf); + audio->AddCodec(webrtc::CreateAudioCodec(111, "opus", 48000, 2)); + audio->AddCodec(webrtc::CreateAudioCodec(103, "ISAC", 16000, 1)); + audio->AddCodec(webrtc::CreateAudioCodec(104, "ISAC", 32000, 1)); return audio; } @@ -1266,8 +1273,9 @@ class WebRtcSdpTest : public ::testing::Test { audio_desc_2->AddStream(audio_track_2); desc_.AddContent(kAudioContentName2, MediaProtocolType::kRtp, absl::WrapUnique(audio_desc_2)); - desc_.AddTransportInfo(TransportInfo( - kAudioContentName2, TransportDescription(kUfragVoice2, kPwdVoice2))); + desc_.AddTransportInfo( + TransportInfo(kAudioContentName2, + MakeTransportDescription(kUfragVoice2, kPwdVoice2))); // Audio track 3 has no stream ids. AudioContentDescription* audio_desc_3 = CreateAudioContentDescription(); @@ -1279,8 +1287,9 @@ class WebRtcSdpTest : public ::testing::Test { audio_desc_3->AddStream(audio_track_3); desc_.AddContent(kAudioContentName3, MediaProtocolType::kRtp, absl::WrapUnique(audio_desc_3)); - desc_.AddTransportInfo(TransportInfo( - kAudioContentName3, TransportDescription(kUfragVoice3, kPwdVoice3))); + desc_.AddTransportInfo( + TransportInfo(kAudioContentName3, + MakeTransportDescription(kUfragVoice3, kPwdVoice3))); desc_.set_msid_signaling(msid_signaling); ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), jdesc_.session_version())); @@ -1304,7 +1313,8 @@ class WebRtcSdpTest : public ::testing::Test { absl::WrapUnique(audio_desc)); // Enable signaling a=msid lines. - desc_.set_msid_signaling(cricket::kMsidSignalingMediaSection); + desc_.set_msid_signaling(webrtc::kMsidSignalingMediaSection | + webrtc::kMsidSignalingSemantic); ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), jdesc_.session_version())); } @@ -1313,16 +1323,14 @@ class WebRtcSdpTest : public ::testing::Test { // configuration. VideoContentDescription* CreateVideoContentDescription() { VideoContentDescription* video = new VideoContentDescription(); - video->AddCrypto(CryptoParams( - 1, "AES_CM_128_HMAC_SHA1_80", - "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32", "")); - video->set_protocol(cricket::kMediaProtocolSavpf); - video->AddCodec(cricket::CreateVideoCodec(120, "VP8")); + video->set_protocol(webrtc::kMediaProtocolSavpf); + video->AddCodec(webrtc::CreateVideoCodec(120, "VP8")); return video; } - template - void CompareMediaContentDescription(const MCD* cd1, const MCD* cd2) { + void CompareMediaContentDescription( + const webrtc::MediaContentDescription* cd1, + const webrtc::MediaContentDescription* cd2) { // type EXPECT_EQ(cd1->type(), cd2->type()); @@ -1335,30 +1343,16 @@ class WebRtcSdpTest : public ::testing::Test { // rtcp_reduced_size EXPECT_EQ(cd1->rtcp_reduced_size(), cd2->rtcp_reduced_size()); - // cryptos - EXPECT_EQ(cd1->cryptos().size(), cd2->cryptos().size()); - if (cd1->cryptos().size() != cd2->cryptos().size()) { - ADD_FAILURE(); - return; - } - for (size_t i = 0; i < cd1->cryptos().size(); ++i) { - const CryptoParams c1 = cd1->cryptos().at(i); - const CryptoParams c2 = cd2->cryptos().at(i); - EXPECT_TRUE(c1.Matches(c2)); - EXPECT_EQ(c1.key_params, c2.key_params); - EXPECT_EQ(c1.session_params, c2.session_params); - } - // protocol // Use an equivalence class here, for old and new versions of the // protocol description. - if (cd1->protocol() == cricket::kMediaProtocolDtlsSctp || - cd1->protocol() == cricket::kMediaProtocolUdpDtlsSctp || - cd1->protocol() == cricket::kMediaProtocolTcpDtlsSctp) { + if (cd1->protocol() == webrtc::kMediaProtocolDtlsSctp || + cd1->protocol() == webrtc::kMediaProtocolUdpDtlsSctp || + cd1->protocol() == webrtc::kMediaProtocolTcpDtlsSctp) { const bool cd2_is_also_dtls_sctp = - cd2->protocol() == cricket::kMediaProtocolDtlsSctp || - cd2->protocol() == cricket::kMediaProtocolUdpDtlsSctp || - cd2->protocol() == cricket::kMediaProtocolTcpDtlsSctp; + cd2->protocol() == webrtc::kMediaProtocolDtlsSctp || + cd2->protocol() == webrtc::kMediaProtocolUdpDtlsSctp || + cd2->protocol() == webrtc::kMediaProtocolTcpDtlsSctp; EXPECT_TRUE(cd2_is_also_dtls_sctp); } else { EXPECT_EQ(cd1->protocol(), cd2->protocol()); @@ -1422,33 +1416,27 @@ class WebRtcSdpTest : public ::testing::Test { return; } for (size_t i = 0; i < desc1.contents().size(); ++i) { - const cricket::ContentInfo& c1 = desc1.contents().at(i); - const cricket::ContentInfo& c2 = desc2.contents().at(i); + const webrtc::ContentInfo& c1 = desc1.contents().at(i); + const webrtc::ContentInfo& c2 = desc2.contents().at(i); // ContentInfo properties. - EXPECT_EQ(c1.name, c2.name); + EXPECT_EQ(c1.mid(), c2.mid()); EXPECT_EQ(c1.type, c2.type); EXPECT_EQ(c1.rejected, c2.rejected); EXPECT_EQ(c1.bundle_only, c2.bundle_only); - ASSERT_EQ(IsAudioContent(&c1), IsAudioContent(&c2)); - if (IsAudioContent(&c1)) { - const AudioContentDescription* acd1 = - c1.media_description()->as_audio(); - const AudioContentDescription* acd2 = - c2.media_description()->as_audio(); - CompareMediaContentDescription(acd1, acd2); + ASSERT_EQ(webrtc::IsAudioContent(&c1), webrtc::IsAudioContent(&c2)); + if (webrtc::IsAudioContent(&c1)) { + CompareMediaContentDescription(c1.media_description(), + c2.media_description()); } - ASSERT_EQ(IsVideoContent(&c1), IsVideoContent(&c2)); - if (IsVideoContent(&c1)) { - const VideoContentDescription* vcd1 = - c1.media_description()->as_video(); - const VideoContentDescription* vcd2 = - c2.media_description()->as_video(); - CompareMediaContentDescription(vcd1, vcd2); + ASSERT_EQ(webrtc::IsVideoContent(&c1), webrtc::IsVideoContent(&c2)); + if (webrtc::IsVideoContent(&c1)) { + CompareMediaContentDescription(c1.media_description(), + c2.media_description()); } - ASSERT_EQ(IsDataContent(&c1), IsDataContent(&c2)); + ASSERT_EQ(webrtc::IsDataContent(&c1), webrtc::IsDataContent(&c2)); if (c1.media_description()->as_sctp()) { ASSERT_TRUE(c2.media_description()->as_sctp()); const SctpDataContentDescription* scd1 = @@ -1464,42 +1452,42 @@ class WebRtcSdpTest : public ::testing::Test { } // group - const cricket::ContentGroups groups1 = desc1.groups(); - const cricket::ContentGroups groups2 = desc2.groups(); + const webrtc::ContentGroups groups1 = desc1.groups(); + const webrtc::ContentGroups groups2 = desc2.groups(); EXPECT_EQ(groups1.size(), groups1.size()); if (groups1.size() != groups2.size()) { ADD_FAILURE(); return; } for (size_t i = 0; i < groups1.size(); ++i) { - const cricket::ContentGroup group1 = groups1.at(i); - const cricket::ContentGroup group2 = groups2.at(i); + const webrtc::ContentGroup group1 = groups1.at(i); + const webrtc::ContentGroup group2 = groups2.at(i); EXPECT_EQ(group1.semantics(), group2.semantics()); - const cricket::ContentNames names1 = group1.content_names(); - const cricket::ContentNames names2 = group2.content_names(); + const webrtc::ContentNames names1 = group1.content_names(); + const webrtc::ContentNames names2 = group2.content_names(); EXPECT_EQ(names1.size(), names2.size()); if (names1.size() != names2.size()) { ADD_FAILURE(); return; } - cricket::ContentNames::const_iterator iter1 = names1.begin(); - cricket::ContentNames::const_iterator iter2 = names2.begin(); + webrtc::ContentNames::const_iterator iter1 = names1.begin(); + webrtc::ContentNames::const_iterator iter2 = names2.begin(); while (iter1 != names1.end()) { EXPECT_EQ(*iter1++, *iter2++); } } // transport info - const cricket::TransportInfos transports1 = desc1.transport_infos(); - const cricket::TransportInfos transports2 = desc2.transport_infos(); + const webrtc::TransportInfos transports1 = desc1.transport_infos(); + const webrtc::TransportInfos transports2 = desc2.transport_infos(); EXPECT_EQ(transports1.size(), transports2.size()); if (transports1.size() != transports2.size()) { ADD_FAILURE(); return; } for (size_t i = 0; i < transports1.size(); ++i) { - const cricket::TransportInfo transport1 = transports1.at(i); - const cricket::TransportInfo transport2 = transports2.at(i); + const webrtc::TransportInfo transport1 = transports1.at(i); + const webrtc::TransportInfo transport2 = transports2.at(i); EXPECT_EQ(transport1.content_name, transport2.content_name); EXPECT_EQ(transport1.description.ice_ufrag, transport2.description.ice_ufrag); @@ -1507,8 +1495,14 @@ class WebRtcSdpTest : public ::testing::Test { EXPECT_EQ(transport1.description.ice_mode, transport2.description.ice_mode); if (transport1.description.identity_fingerprint) { - EXPECT_EQ(*transport1.description.identity_fingerprint, - *transport2.description.identity_fingerprint); + if (!transport2.description.identity_fingerprint) { + ADD_FAILURE() << "transport[" << i + << "]: left transport has fingerprint, right transport " + "does not have it"; + } else { + EXPECT_EQ(*transport1.description.identity_fingerprint, + *transport2.description.identity_fingerprint); + } } else { EXPECT_EQ(transport1.description.identity_fingerprint.get(), transport2.description.identity_fingerprint.get()); @@ -1518,7 +1512,7 @@ class WebRtcSdpTest : public ::testing::Test { } // global attributes - EXPECT_EQ(desc1.msid_supported(), desc2.msid_supported()); + EXPECT_EQ(desc1.msid_signaling(), desc2.msid_signaling()); EXPECT_EQ(desc1.extmap_allow_mixed(), desc2.extmap_allow_mixed()); } @@ -1569,7 +1563,7 @@ class WebRtcSdpTest : public ::testing::Test { RTC_DCHECK_NOTREACHED(); } TransportInfo transport_info(content_name, - TransportDescription(ufrag, pwd)); + MakeTransportDescription(ufrag, pwd)); SessionDescription* desc = const_cast(jdesc->description()); desc->RemoveTransportInfoByName(content_name); @@ -1589,7 +1583,7 @@ class WebRtcSdpTest : public ::testing::Test { void AddIceOptions(const std::string& content_name, const std::vector& transport_options) { ASSERT_TRUE(desc_.GetTransportInfoByName(content_name) != NULL); - cricket::TransportInfo transport_info = + webrtc::TransportInfo transport_info = *(desc_.GetTransportInfoByName(content_name)); desc_.RemoveTransportInfoByName(content_name); transport_info.description.transport_options = transport_options; @@ -1600,7 +1594,7 @@ class WebRtcSdpTest : public ::testing::Test { const std::string& ice_ufrag, const std::string& ice_pwd) { ASSERT_TRUE(desc_.GetTransportInfoByName(content_name) != NULL); - cricket::TransportInfo transport_info = + webrtc::TransportInfo transport_info = *(desc_.GetTransportInfoByName(content_name)); desc_.RemoveTransportInfoByName(content_name); transport_info.description.ice_ufrag = ice_ufrag; @@ -1608,22 +1602,6 @@ class WebRtcSdpTest : public ::testing::Test { desc_.AddTransportInfo(transport_info); } - void AddFingerprint() { - desc_.RemoveTransportInfoByName(kAudioContentName); - desc_.RemoveTransportInfoByName(kVideoContentName); - rtc::SSLFingerprint fingerprint(rtc::DIGEST_SHA_1, kIdentityDigest); - desc_.AddTransportInfo(TransportInfo( - kAudioContentName, - TransportDescription(std::vector(), kUfragVoice, kPwdVoice, - cricket::ICEMODE_FULL, - cricket::CONNECTIONROLE_NONE, &fingerprint))); - desc_.AddTransportInfo(TransportInfo( - kVideoContentName, - TransportDescription(std::vector(), kUfragVideo, kPwdVideo, - cricket::ICEMODE_FULL, - cricket::CONNECTIONROLE_NONE, &fingerprint))); - } - void AddExtmap(bool encrypted) { audio_desc_ = new AudioContentDescription(*audio_desc_); video_desc_ = new VideoContentDescription(*video_desc_); @@ -1639,16 +1617,10 @@ class WebRtcSdpTest : public ::testing::Test { absl::WrapUnique(video_desc_)); } - void RemoveCryptos() { - audio_desc_->set_cryptos(std::vector()); - video_desc_->set_cryptos(std::vector()); - } - // Removes everything in StreamParams from the session description that is // used for a=ssrc lines. void RemoveSsrcSignalingFromStreamParams() { - for (cricket::ContentInfo& content_info : - jdesc_.description()->contents()) { + for (webrtc::ContentInfo& content_info : jdesc_.description()->contents()) { // With Unified Plan there should be one StreamParams per m= section. StreamParams& stream = content_info.media_description()->mutable_streams()[0]; @@ -1735,12 +1707,12 @@ class WebRtcSdpTest : public ::testing::Test { new SctpDataContentDescription()); sctp_desc_ = data.get(); sctp_desc_->set_use_sctpmap(use_sctpmap); - sctp_desc_->set_protocol(cricket::kMediaProtocolUdpDtlsSctp); + sctp_desc_->set_protocol(webrtc::kMediaProtocolUdpDtlsSctp); sctp_desc_->set_port(kDefaultSctpPort); desc_.AddContent(kDataContentName, MediaProtocolType::kSctp, std::move(data)); desc_.AddTransportInfo(TransportInfo( - kDataContentName, TransportDescription(kUfragData, kPwdData))); + kDataContentName, MakeTransportDescription(kUfragData, kPwdData))); } bool TestDeserializeDirection(RtpTransceiverDirection direction) { @@ -1825,12 +1797,12 @@ class WebRtcSdpTest : public ::testing::Test { } } - void VerifyCodecParameter(const cricket::CodecParameterMap& params, + void VerifyCodecParameter(const webrtc::CodecParameterMap& params, const std::string& name, int expected_value) { - cricket::CodecParameterMap::const_iterator found = params.find(name); + webrtc::CodecParameterMap::const_iterator found = params.find(name); ASSERT_TRUE(found != params.end()); - EXPECT_EQ(found->second, rtc::ToString(expected_value)); + EXPECT_EQ(found->second, absl::StrCat(expected_value)); } void TestDeserializeCodecParams(const CodecParams& params, @@ -1882,10 +1854,10 @@ class WebRtcSdpTest : public ::testing::Test { EXPECT_TRUE(webrtc::SdpDeserialize(sdp, jdesc_output, &error)); const AudioContentDescription* acd = - GetFirstAudioContentDescription(jdesc_output->description()); + webrtc::GetFirstAudioContentDescription(jdesc_output->description()); ASSERT_TRUE(acd); ASSERT_FALSE(acd->codecs().empty()); - cricket::AudioCodec opus = acd->codecs()[0]; + webrtc::Codec opus = acd->codecs()[0]; EXPECT_EQ("opus", opus.name); EXPECT_EQ(111, opus.id); VerifyCodecParameter(opus.params, "minptime", params.min_ptime); @@ -1894,13 +1866,12 @@ class WebRtcSdpTest : public ::testing::Test { VerifyCodecParameter(opus.params, "useinbandfec", params.useinband); VerifyCodecParameter(opus.params, "maxaveragebitrate", params.maxaveragebitrate); - for (size_t i = 0; i < acd->codecs().size(); ++i) { - cricket::AudioCodec codec = acd->codecs()[i]; + for (const auto& codec : acd->codecs()) { VerifyCodecParameter(codec.params, "ptime", params.ptime); VerifyCodecParameter(codec.params, "maxptime", params.max_ptime); } - cricket::AudioCodec dtmf = acd->codecs()[3]; + webrtc::Codec dtmf = acd->codecs()[3]; EXPECT_EQ("telephone-event", dtmf.name); EXPECT_EQ(105, dtmf.id); EXPECT_EQ(3u, @@ -1909,18 +1880,18 @@ class WebRtcSdpTest : public ::testing::Test { EXPECT_EQ(dtmf.params.begin()->second, "0-15,66,70"); const VideoContentDescription* vcd = - GetFirstVideoContentDescription(jdesc_output->description()); + webrtc::GetFirstVideoContentDescription(jdesc_output->description()); ASSERT_TRUE(vcd); ASSERT_FALSE(vcd->codecs().empty()); - cricket::VideoCodec vp8 = vcd->codecs()[0]; + webrtc::Codec vp8 = vcd->codecs()[0]; EXPECT_EQ("VP8", vp8.name); EXPECT_EQ(99, vp8.id); - cricket::VideoCodec rtx = vcd->codecs()[1]; + webrtc::Codec rtx = vcd->codecs()[1]; EXPECT_EQ("RTX", rtx.name); EXPECT_EQ(95, rtx.id); VerifyCodecParameter(rtx.params, "apt", vp8.id); // VP9 is listed last in the m= line so should come after VP8 and RTX. - cricket::VideoCodec vp9 = vcd->codecs()[2]; + webrtc::Codec vp9 = vcd->codecs()[2]; EXPECT_EQ("VP9", vp9.name); EXPECT_EQ(96, vp9.id); } @@ -1955,31 +1926,31 @@ class WebRtcSdpTest : public ::testing::Test { SdpParseError error; EXPECT_TRUE(webrtc::SdpDeserialize(sdp, jdesc_output, &error)); const AudioContentDescription* acd = - GetFirstAudioContentDescription(jdesc_output->description()); + webrtc::GetFirstAudioContentDescription(jdesc_output->description()); ASSERT_TRUE(acd); ASSERT_FALSE(acd->codecs().empty()); - cricket::AudioCodec opus = acd->codecs()[0]; + webrtc::Codec opus = acd->codecs()[0]; EXPECT_EQ(111, opus.id); - EXPECT_TRUE(opus.HasFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamNack, cricket::kParamValueEmpty))); + EXPECT_TRUE(opus.HasFeedbackParam(webrtc::FeedbackParam( + webrtc::kRtcpFbParamNack, webrtc::kParamValueEmpty))); const VideoContentDescription* vcd = - GetFirstVideoContentDescription(jdesc_output->description()); + webrtc::GetFirstVideoContentDescription(jdesc_output->description()); ASSERT_TRUE(vcd); ASSERT_FALSE(vcd->codecs().empty()); - cricket::VideoCodec vp8 = vcd->codecs()[0]; + webrtc::Codec vp8 = vcd->codecs()[0]; EXPECT_EQ(vp8.name, "VP8"); EXPECT_EQ(101, vp8.id); - EXPECT_TRUE(vp8.HasFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamLntf, cricket::kParamValueEmpty))); - EXPECT_TRUE(vp8.HasFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamNack, cricket::kParamValueEmpty))); - EXPECT_TRUE(vp8.HasFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamNack, cricket::kRtcpFbNackParamPli))); - EXPECT_TRUE(vp8.HasFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty))); - EXPECT_TRUE(vp8.HasFeedbackParam(cricket::FeedbackParam( - cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir))); + EXPECT_TRUE(vp8.HasFeedbackParam(webrtc::FeedbackParam( + webrtc::kRtcpFbParamLntf, webrtc::kParamValueEmpty))); + EXPECT_TRUE(vp8.HasFeedbackParam(webrtc::FeedbackParam( + webrtc::kRtcpFbParamNack, webrtc::kParamValueEmpty))); + EXPECT_TRUE(vp8.HasFeedbackParam(webrtc::FeedbackParam( + webrtc::kRtcpFbParamNack, webrtc::kRtcpFbNackParamPli))); + EXPECT_TRUE(vp8.HasFeedbackParam(webrtc::FeedbackParam( + webrtc::kRtcpFbParamRemb, webrtc::kParamValueEmpty))); + EXPECT_TRUE(vp8.HasFeedbackParam(webrtc::FeedbackParam( + webrtc::kRtcpFbParamCcm, webrtc::kRtcpFbCcmParamFir))); } // Two SDP messages can mean the same thing but be different strings, e.g. @@ -2001,8 +1972,8 @@ class WebRtcSdpTest : public ::testing::Test { // 'connection address' field, previously set from the candidates, must also // be reset. void MakeDescriptionWithoutCandidates(JsepSessionDescription* jdesc) { - rtc::SocketAddress audio_addr("0.0.0.0", 9); - rtc::SocketAddress video_addr("0.0.0.0", 9); + webrtc::SocketAddress audio_addr("0.0.0.0", 9); + webrtc::SocketAddress video_addr("0.0.0.0", 9); audio_desc_->set_connection_address(audio_addr); video_desc_->set_connection_address(video_addr); ASSERT_TRUE(jdesc->Initialize(desc_.Clone(), kSessionId, kSessionVersion)); @@ -2013,7 +1984,7 @@ class WebRtcSdpTest : public ::testing::Test { AudioContentDescription* audio_desc_; VideoContentDescription* video_desc_; SctpDataContentDescription* sctp_desc_; - Candidates candidates_; + std::vector candidates_; std::unique_ptr jcandidate_; JsepSessionDescription jdesc_; }; @@ -2046,39 +2017,6 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionEmpty) { EXPECT_EQ("", webrtc::SdpSerialize(jdesc_empty)); } -// This tests serialization of SDP with a=crypto and a=fingerprint, as would be -// the case in a DTLS offer. -TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithFingerprint) { - AddFingerprint(); - JsepSessionDescription jdesc_with_fingerprint(kDummyType); - MakeDescriptionWithoutCandidates(&jdesc_with_fingerprint); - std::string message = webrtc::SdpSerialize(jdesc_with_fingerprint); - - std::string sdp_with_fingerprint = kSdpString; - InjectAfter(kAttributeIcePwdVoice, kFingerprint, &sdp_with_fingerprint); - InjectAfter(kAttributeIcePwdVideo, kFingerprint, &sdp_with_fingerprint); - - EXPECT_EQ(sdp_with_fingerprint, message); -} - -// This tests serialization of SDP with a=fingerprint with no a=crypto, as would -// be the case in a DTLS answer. -TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithFingerprintNoCryptos) { - AddFingerprint(); - RemoveCryptos(); - JsepSessionDescription jdesc_with_fingerprint(kDummyType); - MakeDescriptionWithoutCandidates(&jdesc_with_fingerprint); - std::string message = webrtc::SdpSerialize(jdesc_with_fingerprint); - - std::string sdp_with_fingerprint = kSdpString; - Replace(kAttributeCryptoVoice, "", &sdp_with_fingerprint); - Replace(kAttributeCryptoVideo, "", &sdp_with_fingerprint); - InjectAfter(kAttributeIcePwdVoice, kFingerprint, &sdp_with_fingerprint); - InjectAfter(kAttributeIcePwdVideo, kFingerprint, &sdp_with_fingerprint); - - EXPECT_EQ(sdp_with_fingerprint, message); -} - TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithoutCandidates) { // JsepSessionDescription with desc but without candidates. JsepSessionDescription jdesc_no_candidates(kDummyType); @@ -2088,11 +2026,11 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithoutCandidates) { } TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBundles) { - ContentGroup group1(cricket::GROUP_TYPE_BUNDLE); + ContentGroup group1(webrtc::GROUP_TYPE_BUNDLE); group1.AddContentName(kAudioContentName); group1.AddContentName(kVideoContentName); desc_.AddGroup(group1); - ContentGroup group2(cricket::GROUP_TYPE_BUNDLE); + ContentGroup group2(webrtc::GROUP_TYPE_BUNDLE); group2.AddContentName(kAudioContentName2); desc_.AddGroup(group2); ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), @@ -2107,10 +2045,12 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBundles) { } TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBandwidth) { - VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_); + VideoContentDescription* vcd = + webrtc::GetFirstVideoContentDescription(&desc_); vcd->set_bandwidth(100 * 1000 + 755); // Integer division will drop the 755. vcd->set_bandwidth_type("AS"); - AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_); + AudioContentDescription* acd = + webrtc::GetFirstAudioContentDescription(&desc_); acd->set_bandwidth(555); acd->set_bandwidth_type("TIAS"); ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), @@ -2126,7 +2066,8 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithBandwidth) { // Should default to b=AS if bandwidth_type isn't set. TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithMissingBandwidthType) { - VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_); + VideoContentDescription* vcd = + webrtc::GetFirstVideoContentDescription(&desc_); vcd->set_bandwidth(100 * 1000); ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), jdesc_.session_version())); @@ -2198,7 +2139,7 @@ void MutateJsepSctpPort(JsepSessionDescription* jdesc, const SessionDescription& desc, int port) { // Take our pre-built session description and change the SCTP port. - std::unique_ptr mutant = desc.Clone(); + std::unique_ptr mutant = desc.Clone(); SctpDataContentDescription* dcdesc = mutant->GetContentDescriptionByName(kDataContentName)->as_sctp(); dcdesc->set_port(port); @@ -2221,7 +2162,7 @@ TEST_F(WebRtcSdpTest, SerializeWithSctpDataChannelAndNewPort) { expected_sdp.append(kSdpSctpDataChannelString); absl::StrReplaceAll( - {{rtc::ToString(kDefaultSctpPort), rtc::ToString(kNewPort)}}, + {{absl::StrCat(kDefaultSctpPort), absl::StrCat(kNewPort)}}, &expected_sdp); EXPECT_EQ(expected_sdp, message); @@ -2233,16 +2174,16 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithExtmapAllowMixed) { } TEST_F(WebRtcSdpTest, SerializeMediaContentDescriptionWithExtmapAllowMixed) { - cricket::MediaContentDescription* video_desc = + webrtc::MediaContentDescription* video_desc = jdesc_.description()->GetContentDescriptionByName(kVideoContentName); ASSERT_TRUE(video_desc); - cricket::MediaContentDescription* audio_desc = + webrtc::MediaContentDescription* audio_desc = jdesc_.description()->GetContentDescriptionByName(kAudioContentName); ASSERT_TRUE(audio_desc); video_desc->set_extmap_allow_mixed_enum( - cricket::MediaContentDescription::kMedia); + webrtc::MediaContentDescription::kMedia); audio_desc->set_extmap_allow_mixed_enum( - cricket::MediaContentDescription::kMedia); + webrtc::MediaContentDescription::kMedia); TestSerialize(jdesc_); } @@ -2295,21 +2236,22 @@ TEST_F(WebRtcSdpTest, SerializeCandidates) { } TEST_F(WebRtcSdpTest, SerializeHostnameCandidate) { - rtc::SocketAddress address("a.test", 1234); - cricket::Candidate candidate( - cricket::ICE_CANDIDATE_COMPONENT_RTP, "udp", address, kCandidatePriority, - "", "", LOCAL_PORT_TYPE, kCandidateGeneration, kCandidateFoundation1); + webrtc::SocketAddress address("a.test", 1234); + webrtc::Candidate candidate(webrtc::ICE_CANDIDATE_COMPONENT_RTP, "udp", + address, kCandidatePriority, "", "", + IceCandidateType::kHost, kCandidateGeneration, + kCandidateFoundation1); JsepIceCandidate jcandidate(std::string("audio_content_name"), 0, candidate); std::string message = webrtc::SdpSerializeCandidate(jcandidate); EXPECT_EQ(std::string(kRawHostnameCandidate), message); } TEST_F(WebRtcSdpTest, SerializeTcpCandidates) { - Candidate candidate(ICE_CANDIDATE_COMPONENT_RTP, "tcp", - rtc::SocketAddress("192.168.1.5", 9), kCandidatePriority, - "", "", LOCAL_PORT_TYPE, kCandidateGeneration, - kCandidateFoundation1); - candidate.set_tcptype(cricket::TCPTYPE_ACTIVE_STR); + Candidate candidate(webrtc::ICE_CANDIDATE_COMPONENT_RTP, "tcp", + webrtc::SocketAddress("192.168.1.5", 9), + kCandidatePriority, "", "", IceCandidateType::kHost, + kCandidateGeneration, kCandidateFoundation1); + candidate.set_tcptype(webrtc::TCPTYPE_ACTIVE_STR); std::unique_ptr jcandidate( new JsepIceCandidate(std::string("audio_content_name"), 0, candidate)); @@ -2329,7 +2271,7 @@ TEST_F(WebRtcSdpTest, ParseTcpCandidateWithoutTcptype) { JsepIceCandidate jcandidate(kDummyMid, kDummyIndex); EXPECT_TRUE(SdpDeserializeCandidate(missing_tcptype, &jcandidate)); - EXPECT_EQ(std::string(cricket::TCPTYPE_PASSIVE_STR), + EXPECT_EQ(std::string(webrtc::TCPTYPE_PASSIVE_STR), jcandidate.candidate().tcptype()); } @@ -2344,7 +2286,9 @@ TEST_F(WebRtcSdpTest, ParseSslTcpCandidate) { } TEST_F(WebRtcSdpTest, SerializeSessionDescriptionWithH264) { - cricket::VideoCodec h264_codec = cricket::CreateVideoCodec("H264"); + webrtc::Codec h264_codec = webrtc::CreateVideoCodec("H264"); + // Id must be valid, but value doesn't matter. + h264_codec.id = 123; h264_codec.SetParam("profile-level-id", "42e01f"); h264_codec.SetParam("level-asymmetry-allowed", "1"); h264_codec.SetParam("packetization-mode", "1"); @@ -2430,14 +2374,14 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutRtpmap) { JsepSessionDescription jdesc(kDummyType); EXPECT_TRUE(SdpDeserialize(kSdpNoRtpmapString, &jdesc)); - cricket::AudioContentDescription* audio = - cricket::GetFirstAudioContentDescription(jdesc.description()); - AudioCodecs ref_codecs; + webrtc::AudioContentDescription* audio = + webrtc::GetFirstAudioContentDescription(jdesc.description()); + webrtc::Codecs ref_codecs; // The codecs in the AudioContentDescription should be in the same order as // the payload types (s) on the m= line. - ref_codecs.push_back(cricket::CreateAudioCodec(0, "PCMU", 8000, 1)); - ref_codecs.push_back(cricket::CreateAudioCodec(18, "G729", 8000, 1)); - ref_codecs.push_back(cricket::CreateAudioCodec(103, "ISAC", 16000, 1)); + ref_codecs.push_back(webrtc::CreateAudioCodec(0, "PCMU", 8000, 1)); + ref_codecs.push_back(webrtc::CreateAudioCodec(18, "G729", 8000, 1)); + ref_codecs.push_back(webrtc::CreateAudioCodec(103, "ISAC", 16000, 1)); EXPECT_EQ(ref_codecs, audio->codecs()); } @@ -2453,18 +2397,18 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutRtpmapButWithFmtp) { JsepSessionDescription jdesc(kDummyType); EXPECT_TRUE(SdpDeserialize(kSdpNoRtpmapString, &jdesc)); - cricket::AudioContentDescription* audio = - cricket::GetFirstAudioContentDescription(jdesc.description()); + webrtc::AudioContentDescription* audio = + webrtc::GetFirstAudioContentDescription(jdesc.description()); - cricket::AudioCodec g729 = audio->codecs()[0]; + webrtc::Codec g729 = audio->codecs()[0]; EXPECT_EQ("G729", g729.name); EXPECT_EQ(8000, g729.clockrate); EXPECT_EQ(18, g729.id); - cricket::CodecParameterMap::iterator found = g729.params.find("annexb"); + webrtc::CodecParameterMap::iterator found = g729.params.find("annexb"); ASSERT_TRUE(found != g729.params.end()); EXPECT_EQ(found->second, "yes"); - cricket::AudioCodec isac = audio->codecs()[1]; + webrtc::Codec isac = audio->codecs()[1]; EXPECT_EQ("ISAC", isac.name); EXPECT_EQ(103, isac.id); EXPECT_EQ(16000, isac.clockrate); @@ -2472,8 +2416,6 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutRtpmapButWithFmtp) { // Ensure that we can deserialize SDP with a=fingerprint properly. TEST_F(WebRtcSdpTest, DeserializeJsepSessionDescriptionWithFingerprint) { - // Add a DTLS a=fingerprint attribute to our session description. - AddFingerprint(); JsepSessionDescription new_jdesc(kDummyType); ASSERT_TRUE(new_jdesc.Initialize(desc_.Clone(), jdesc_.session_id(), jdesc_.session_version())); @@ -2493,7 +2435,7 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithBundle) { "a=group:BUNDLE audio_content_name video_content_name\r\n", &sdp_with_bundle); EXPECT_TRUE(SdpDeserialize(sdp_with_bundle, &jdesc_with_bundle)); - ContentGroup group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup group(webrtc::GROUP_TYPE_BUNDLE); group.AddContentName(kAudioContentName); group.AddContentName(kVideoContentName); desc_.AddGroup(group); @@ -2510,9 +2452,11 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithBandwidth) { InjectAfter("a=mid:audio_content_name\r\na=sendrecv\r\n", "b=AS:50\r\n", &sdp_with_bandwidth); EXPECT_TRUE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth)); - VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_); + VideoContentDescription* vcd = + webrtc::GetFirstVideoContentDescription(&desc_); vcd->set_bandwidth(100 * 1000); - AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_); + AudioContentDescription* acd = + webrtc::GetFirstAudioContentDescription(&desc_); acd->set_bandwidth(50 * 1000); ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), jdesc_.session_version())); @@ -2527,9 +2471,11 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithTiasBandwidth) { InjectAfter("a=mid:audio_content_name\r\na=sendrecv\r\n", "b=TIAS:50000\r\n", &sdp_with_bandwidth); EXPECT_TRUE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth)); - VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_); + VideoContentDescription* vcd = + webrtc::GetFirstVideoContentDescription(&desc_); vcd->set_bandwidth(100 * 1000); - AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_); + AudioContentDescription* acd = + webrtc::GetFirstAudioContentDescription(&desc_); acd->set_bandwidth(50 * 1000); ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), jdesc_.session_version())); @@ -2545,9 +2491,11 @@ TEST_F(WebRtcSdpTest, InjectAfter("a=mid:audio_content_name\r\na=sendrecv\r\n", "b=unknown:50000\r\n", &sdp_with_bandwidth); EXPECT_TRUE(SdpDeserialize(sdp_with_bandwidth, &jdesc_with_bandwidth)); - VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_); + VideoContentDescription* vcd = + webrtc::GetFirstVideoContentDescription(&desc_); vcd->set_bandwidth(-1); - AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_); + AudioContentDescription* acd = + webrtc::GetFirstAudioContentDescription(&desc_); acd->set_bandwidth(-1); ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), jdesc_.session_version())); @@ -2648,16 +2596,16 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutExtmapAllowMixed) { } TEST_F(WebRtcSdpTest, DeserializeMediaContentDescriptionWithExtmapAllowMixed) { - cricket::MediaContentDescription* video_desc = + webrtc::MediaContentDescription* video_desc = jdesc_.description()->GetContentDescriptionByName(kVideoContentName); ASSERT_TRUE(video_desc); - cricket::MediaContentDescription* audio_desc = + webrtc::MediaContentDescription* audio_desc = jdesc_.description()->GetContentDescriptionByName(kAudioContentName); ASSERT_TRUE(audio_desc); video_desc->set_extmap_allow_mixed_enum( - cricket::MediaContentDescription::kMedia); + webrtc::MediaContentDescription::kMedia); audio_desc->set_extmap_allow_mixed_enum( - cricket::MediaContentDescription::kMedia); + webrtc::MediaContentDescription::kMedia); std::string sdp_with_extmap_allow_mixed = kSdpFullString; InjectAfter("a=mid:audio_content_name\r\n", kExtmapAllowMixed, @@ -2710,11 +2658,11 @@ TEST_F(WebRtcSdpTest, DeserializeCandidate) { sdp = kSdpTcpActiveCandidate; EXPECT_TRUE(SdpDeserializeCandidate(sdp, &jcandidate)); - // Make a cricket::Candidate equivalent to kSdpTcpCandidate string. - Candidate candidate(ICE_CANDIDATE_COMPONENT_RTP, "tcp", - rtc::SocketAddress("192.168.1.5", 9), kCandidatePriority, - "", "", LOCAL_PORT_TYPE, kCandidateGeneration, - kCandidateFoundation1); + // Make a webrtc::Candidate equivalent to kSdpTcpCandidate string. + Candidate candidate(webrtc::ICE_CANDIDATE_COMPONENT_RTP, "tcp", + webrtc::SocketAddress("192.168.1.5", 9), + kCandidatePriority, "", "", IceCandidateType::kHost, + kCandidateGeneration, kCandidateFoundation1); std::unique_ptr jcandidate_template( new JsepIceCandidate(std::string("audio_content_name"), 0, candidate)); EXPECT_TRUE( @@ -2861,7 +2809,7 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsButWrongMediaType) { void MutateJsepSctpMaxMessageSize(const SessionDescription& desc, int new_value, JsepSessionDescription* jdesc) { - std::unique_ptr mutant = desc.Clone(); + std::unique_ptr mutant = desc.Clone(); SctpDataContentDescription* dcdesc = mutant->GetContentDescriptionByName(kDataContentName)->as_sctp(); dcdesc->set_max_message_size(new_value); @@ -2965,7 +2913,7 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelAndUnusualPort) { std::string sdp_with_data = kSdpString; sdp_with_data.append(kSdpSctpDataChannelString); absl::StrReplaceAll( - {{rtc::ToString(kDefaultSctpPort), rtc::ToString(kUnusualSctpPort)}}, + {{absl::StrCat(kDefaultSctpPort), absl::StrCat(kUnusualSctpPort)}}, &sdp_with_data); JsepSessionDescription jdesc_output(kDummyType); @@ -2988,7 +2936,7 @@ TEST_F(WebRtcSdpTest, std::string sdp_with_data = kSdpString; sdp_with_data.append(kSdpSctpDataChannelStringWithSctpPort); absl::StrReplaceAll( - {{rtc::ToString(kDefaultSctpPort), rtc::ToString(kUnusualSctpPort)}}, + {{absl::StrCat(kDefaultSctpPort), absl::StrCat(kUnusualSctpPort)}}, &sdp_with_data); JsepSessionDescription jdesc_output(kDummyType); @@ -3000,7 +2948,8 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelsAndBandwidth) { bool use_sctpmap = true; AddSctpDataChannel(use_sctpmap); JsepSessionDescription jdesc(kDummyType); - SctpDataContentDescription* dcd = GetFirstSctpDataContentDescription(&desc_); + SctpDataContentDescription* dcd = + webrtc::GetFirstSctpDataContentDescription(&desc_); dcd->set_bandwidth(100 * 1000); ASSERT_TRUE(jdesc.Initialize(desc_.Clone(), kSessionId, kSessionVersion)); @@ -3046,7 +2995,7 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutEndLineBreak) { // Deserialize SdpParseError error; EXPECT_FALSE(webrtc::SdpDeserialize(sdp, &jdesc, &error)); - const std::string lastline = "a=ssrc:3 msid:local_stream_1 video_track_id_1"; + const std::string lastline = "a=ssrc:3 cname:stream_1_cname"; EXPECT_EQ(lastline, error.line); EXPECT_EQ("Invalid SDP line.", error.description); } @@ -3083,12 +3032,12 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithConferenceFlag) { EXPECT_TRUE(SdpDeserialize(kSdpConferenceString, &jdesc)); // Verify - cricket::AudioContentDescription* audio = - cricket::GetFirstAudioContentDescription(jdesc.description()); + webrtc::AudioContentDescription* audio = + webrtc::GetFirstAudioContentDescription(jdesc.description()); EXPECT_TRUE(audio->conference_mode()); - cricket::VideoContentDescription* video = - cricket::GetFirstVideoContentDescription(jdesc.description()); + webrtc::VideoContentDescription* video = + webrtc::GetFirstVideoContentDescription(jdesc.description()); EXPECT_TRUE(video->conference_mode()); } @@ -3102,12 +3051,12 @@ TEST_F(WebRtcSdpTest, SerializeSdpWithConferenceFlag) { EXPECT_TRUE(SdpDeserialize(reserialized, &jdesc)); // Verify. - cricket::AudioContentDescription* audio = - cricket::GetFirstAudioContentDescription(jdesc.description()); + webrtc::AudioContentDescription* audio = + webrtc::GetFirstAudioContentDescription(jdesc.description()); EXPECT_TRUE(audio->conference_mode()); - cricket::VideoContentDescription* video = - cricket::GetFirstVideoContentDescription(jdesc.description()); + webrtc::VideoContentDescription* video = + webrtc::GetFirstVideoContentDescription(jdesc.description()); EXPECT_TRUE(video->conference_mode()); } @@ -3116,16 +3065,16 @@ TEST_F(WebRtcSdpTest, SerializeAndDeserializeRemoteNetEstimate) { // By default remote estimates are disabled. JsepSessionDescription dst(kDummyType); SdpDeserialize(webrtc::SdpSerialize(jdesc_), &dst); - EXPECT_FALSE(cricket::GetFirstVideoContentDescription(dst.description()) + EXPECT_FALSE(webrtc::GetFirstVideoContentDescription(dst.description()) ->remote_estimate()); } { // When remote estimate is enabled, the setting is propagated via SDP. - cricket::GetFirstVideoContentDescription(jdesc_.description()) + webrtc::GetFirstVideoContentDescription(jdesc_.description()) ->set_remote_estimate(true); JsepSessionDescription dst(kDummyType); SdpDeserialize(webrtc::SdpSerialize(jdesc_), &dst); - EXPECT_TRUE(cricket::GetFirstVideoContentDescription(dst.description()) + EXPECT_TRUE(webrtc::GetFirstVideoContentDescription(dst.description()) ->remote_estimate()); } } @@ -3188,8 +3137,6 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithInvalidAttributeValue) { // ssrc ExpectParseFailure("a=ssrc:1", "a=ssrc:badvalue"); ExpectParseFailure("a=ssrc-group:FEC 2 3", "a=ssrc-group:FEC badvalue 3"); - // crypto - ExpectParseFailure("a=crypto:1 ", "a=crypto:badvalue "); // rtpmap ExpectParseFailure("a=rtpmap:111 ", "a=rtpmap:badvalue "); ExpectParseFailure("opus/48000/2", "opus/badvalue/2"); @@ -3243,7 +3190,7 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithReorderedPltypes) { EXPECT_TRUE(SdpDeserialize(kSdpWithReorderedPlTypesString, &jdesc_output)); const AudioContentDescription* acd = - GetFirstAudioContentDescription(jdesc_output.description()); + webrtc::GetFirstAudioContentDescription(jdesc_output.description()); ASSERT_TRUE(acd); ASSERT_FALSE(acd->codecs().empty()); EXPECT_EQ("ISAC", acd->codecs()[0].name); @@ -3297,13 +3244,13 @@ TEST_F(WebRtcSdpTest, DeserializeVideoFmtp) { webrtc::SdpDeserialize(kSdpWithFmtpString, &jdesc_output, &error)); const VideoContentDescription* vcd = - GetFirstVideoContentDescription(jdesc_output.description()); + webrtc::GetFirstVideoContentDescription(jdesc_output.description()); ASSERT_TRUE(vcd); ASSERT_FALSE(vcd->codecs().empty()); - cricket::VideoCodec vp8 = vcd->codecs()[0]; + webrtc::Codec vp8 = vcd->codecs()[0]; EXPECT_EQ("VP8", vp8.name); EXPECT_EQ(120, vp8.id); - cricket::CodecParameterMap::iterator found = + webrtc::CodecParameterMap::iterator found = vp8.params.find("x-google-min-bitrate"); ASSERT_TRUE(found != vp8.params.end()); EXPECT_EQ(found->second, "10"); @@ -3331,13 +3278,13 @@ TEST_F(WebRtcSdpTest, DeserializeVideoFmtpWithSprops) { webrtc::SdpDeserialize(kSdpWithFmtpString, &jdesc_output, &error)); const VideoContentDescription* vcd = - GetFirstVideoContentDescription(jdesc_output.description()); + webrtc::GetFirstVideoContentDescription(jdesc_output.description()); ASSERT_TRUE(vcd); ASSERT_FALSE(vcd->codecs().empty()); - cricket::VideoCodec h264 = vcd->codecs()[0]; + webrtc::Codec h264 = vcd->codecs()[0]; EXPECT_EQ("H264", h264.name); EXPECT_EQ(98, h264.id); - cricket::CodecParameterMap::const_iterator found = + webrtc::CodecParameterMap::const_iterator found = h264.params.find("profile-level-id"); ASSERT_TRUE(found != h264.params.end()); EXPECT_EQ(found->second, "42A01E"); @@ -3364,13 +3311,13 @@ TEST_F(WebRtcSdpTest, DeserializeVideoFmtpWithSpace) { webrtc::SdpDeserialize(kSdpWithFmtpString, &jdesc_output, &error)); const VideoContentDescription* vcd = - GetFirstVideoContentDescription(jdesc_output.description()); + webrtc::GetFirstVideoContentDescription(jdesc_output.description()); ASSERT_TRUE(vcd); ASSERT_FALSE(vcd->codecs().empty()); - cricket::VideoCodec vp8 = vcd->codecs()[0]; + webrtc::Codec vp8 = vcd->codecs()[0]; EXPECT_EQ("VP8", vp8.name); EXPECT_EQ(120, vp8.id); - cricket::CodecParameterMap::iterator found = + webrtc::CodecParameterMap::iterator found = vp8.params.find("x-google-min-bitrate"); ASSERT_TRUE(found != vp8.params.end()); EXPECT_EQ(found->second, "10"); @@ -3402,35 +3349,36 @@ TEST_F(WebRtcSdpTest, DeserializePacketizationAttributeWithIllegalValue) { &error)); AudioContentDescription* acd = - GetFirstAudioContentDescription(jdesc_output.description()); + webrtc::GetFirstAudioContentDescription(jdesc_output.description()); ASSERT_TRUE(acd); ASSERT_THAT(acd->codecs(), testing::SizeIs(1)); - cricket::AudioCodec opus = acd->codecs()[0]; + webrtc::Codec opus = acd->codecs()[0]; EXPECT_EQ(opus.name, "opus"); EXPECT_EQ(opus.id, 111); const VideoContentDescription* vcd = - GetFirstVideoContentDescription(jdesc_output.description()); + webrtc::GetFirstVideoContentDescription(jdesc_output.description()); ASSERT_TRUE(vcd); ASSERT_THAT(vcd->codecs(), testing::SizeIs(3)); - cricket::VideoCodec vp8 = vcd->codecs()[0]; + webrtc::Codec vp8 = vcd->codecs()[0]; EXPECT_EQ(vp8.name, "VP8"); EXPECT_EQ(vp8.id, 120); EXPECT_EQ(vp8.packetization, "raw"); - cricket::VideoCodec vp9 = vcd->codecs()[1]; + webrtc::Codec vp9 = vcd->codecs()[1]; EXPECT_EQ(vp9.name, "VP9"); EXPECT_EQ(vp9.id, 121); - EXPECT_EQ(vp9.packetization, absl::nullopt); - cricket::VideoCodec h264 = vcd->codecs()[2]; + EXPECT_EQ(vp9.packetization, std::nullopt); + webrtc::Codec h264 = vcd->codecs()[2]; EXPECT_EQ(h264.name, "H264"); EXPECT_EQ(h264.id, 122); - EXPECT_EQ(h264.packetization, absl::nullopt); + EXPECT_EQ(h264.packetization, std::nullopt); } TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithUnknownParameter) { - AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_); + AudioContentDescription* acd = + webrtc::GetFirstAudioContentDescription(&desc_); - cricket::AudioCodecs codecs = acd->codecs(); + webrtc::Codecs codecs = acd->codecs(); codecs[0].params["unknown-future-parameter"] = "SomeFutureValue"; acd->set_codecs(codecs); @@ -3445,9 +3393,10 @@ TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithUnknownParameter) { } TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithKnownFmtpParameter) { - AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_); + AudioContentDescription* acd = + webrtc::GetFirstAudioContentDescription(&desc_); - cricket::AudioCodecs codecs = acd->codecs(); + webrtc::Codecs codecs = acd->codecs(); codecs[0].params["stereo"] = "1"; acd->set_codecs(codecs); @@ -3461,9 +3410,10 @@ TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithKnownFmtpParameter) { } TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithPTimeAndMaxPTime) { - AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_); + AudioContentDescription* acd = + webrtc::GetFirstAudioContentDescription(&desc_); - cricket::AudioCodecs codecs = acd->codecs(); + webrtc::Codecs codecs = acd->codecs(); codecs[0].params["ptime"] = "20"; codecs[0].params["maxptime"] = "120"; acd->set_codecs(codecs); @@ -3480,11 +3430,12 @@ TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithPTimeAndMaxPTime) { } TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithTelephoneEvent) { - AudioContentDescription* acd = GetFirstAudioContentDescription(&desc_); + AudioContentDescription* acd = + webrtc::GetFirstAudioContentDescription(&desc_); - cricket::AudioCodecs codecs = acd->codecs(); - cricket::AudioCodec dtmf = - cricket::CreateAudioCodec(105, "telephone-event", 8000, 1); + webrtc::Codecs codecs = acd->codecs(); + webrtc::Codec dtmf = + webrtc::CreateAudioCodec(105, "telephone-event", 8000, 1); dtmf.params[""] = "0-15"; codecs.push_back(dtmf); acd->set_codecs(codecs); @@ -3503,9 +3454,10 @@ TEST_F(WebRtcSdpTest, SerializeAudioFmtpWithTelephoneEvent) { } TEST_F(WebRtcSdpTest, SerializeVideoFmtp) { - VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_); + VideoContentDescription* vcd = + webrtc::GetFirstVideoContentDescription(&desc_); - cricket::VideoCodecs codecs = vcd->codecs(); + webrtc::Codecs codecs = vcd->codecs(); codecs[0].params["x-google-min-bitrate"] = "10"; vcd->set_codecs(codecs); @@ -3519,9 +3471,10 @@ TEST_F(WebRtcSdpTest, SerializeVideoFmtp) { } TEST_F(WebRtcSdpTest, SerializeVideoPacketizationAttribute) { - VideoContentDescription* vcd = GetFirstVideoContentDescription(&desc_); + VideoContentDescription* vcd = + webrtc::GetFirstVideoContentDescription(&desc_); - cricket::VideoCodecs codecs = vcd->codecs(); + webrtc::Codecs codecs = vcd->codecs(); codecs[0].packetization = "raw"; vcd->set_codecs(codecs); @@ -3539,24 +3492,24 @@ TEST_F(WebRtcSdpTest, DeserializeAndSerializeSdpWithIceLite) { JsepSessionDescription jdesc_with_icelite(kDummyType); std::string sdp_with_icelite = kSdpFullString; EXPECT_TRUE(SdpDeserialize(sdp_with_icelite, &jdesc_with_icelite)); - cricket::SessionDescription* desc = jdesc_with_icelite.description(); - const cricket::TransportInfo* tinfo1 = + webrtc::SessionDescription* desc = jdesc_with_icelite.description(); + const webrtc::TransportInfo* tinfo1 = desc->GetTransportInfoByName("audio_content_name"); - EXPECT_EQ(cricket::ICEMODE_FULL, tinfo1->description.ice_mode); - const cricket::TransportInfo* tinfo2 = + EXPECT_EQ(webrtc::ICEMODE_FULL, tinfo1->description.ice_mode); + const webrtc::TransportInfo* tinfo2 = desc->GetTransportInfoByName("video_content_name"); - EXPECT_EQ(cricket::ICEMODE_FULL, tinfo2->description.ice_mode); + EXPECT_EQ(webrtc::ICEMODE_FULL, tinfo2->description.ice_mode); // Add "a=ice-lite" and deserialize, making sure it's ICE lite. InjectAfter(kSessionTime, "a=ice-lite\r\n", &sdp_with_icelite); EXPECT_TRUE(SdpDeserialize(sdp_with_icelite, &jdesc_with_icelite)); desc = jdesc_with_icelite.description(); - const cricket::TransportInfo* atinfo = + const webrtc::TransportInfo* atinfo = desc->GetTransportInfoByName("audio_content_name"); - EXPECT_EQ(cricket::ICEMODE_LITE, atinfo->description.ice_mode); - const cricket::TransportInfo* vtinfo = + EXPECT_EQ(webrtc::ICEMODE_LITE, atinfo->description.ice_mode); + const webrtc::TransportInfo* vtinfo = desc->GetTransportInfoByName("video_content_name"); - EXPECT_EQ(cricket::ICEMODE_LITE, vtinfo->description.ice_mode); + EXPECT_EQ(webrtc::ICEMODE_LITE, vtinfo->description.ice_mode); // Now that we know deserialization works, we can use TestSerialize to test // serialization. @@ -3575,20 +3528,19 @@ TEST_F(WebRtcSdpTest, RoundTripSdpWithSctpDataChannelsWithCandidates) { } TEST_F(WebRtcSdpTest, SerializeDtlsSetupAttribute) { - AddFingerprint(); TransportInfo audio_transport_info = *(desc_.GetTransportInfoByName(kAudioContentName)); - EXPECT_EQ(cricket::CONNECTIONROLE_NONE, + EXPECT_EQ(webrtc::CONNECTIONROLE_NONE, audio_transport_info.description.connection_role); audio_transport_info.description.connection_role = - cricket::CONNECTIONROLE_ACTIVE; + webrtc::CONNECTIONROLE_ACTIVE; TransportInfo video_transport_info = *(desc_.GetTransportInfoByName(kVideoContentName)); - EXPECT_EQ(cricket::CONNECTIONROLE_NONE, + EXPECT_EQ(webrtc::CONNECTIONROLE_NONE, video_transport_info.description.connection_role); video_transport_info.description.connection_role = - cricket::CONNECTIONROLE_ACTIVE; + webrtc::CONNECTIONROLE_ACTIVE; desc_.RemoveTransportInfoByName(kAudioContentName); desc_.RemoveTransportInfoByName(kVideoContentName); @@ -3601,9 +3553,6 @@ TEST_F(WebRtcSdpTest, SerializeDtlsSetupAttribute) { std::string message = webrtc::SdpSerialize(jdesc_); std::string sdp_with_dtlssetup = kSdpFullString; - // Fingerprint attribute is necessary to add DTLS setup attribute. - InjectAfter(kAttributeIcePwdVoice, kFingerprint, &sdp_with_dtlssetup); - InjectAfter(kAttributeIcePwdVideo, kFingerprint, &sdp_with_dtlssetup); // Now adding `setup` attribute. InjectAfter(kFingerprint, "a=setup:active\r\n", &sdp_with_dtlssetup); EXPECT_EQ(sdp_with_dtlssetup, message); @@ -3614,14 +3563,14 @@ TEST_F(WebRtcSdpTest, DeserializeDtlsSetupAttributeActpass) { std::string sdp_with_dtlssetup = kSdpFullString; InjectAfter(kSessionTime, "a=setup:actpass\r\n", &sdp_with_dtlssetup); EXPECT_TRUE(SdpDeserialize(sdp_with_dtlssetup, &jdesc_with_dtlssetup)); - cricket::SessionDescription* desc = jdesc_with_dtlssetup.description(); - const cricket::TransportInfo* atinfo = + webrtc::SessionDescription* desc = jdesc_with_dtlssetup.description(); + const webrtc::TransportInfo* atinfo = desc->GetTransportInfoByName("audio_content_name"); - EXPECT_EQ(cricket::CONNECTIONROLE_ACTPASS, + EXPECT_EQ(webrtc::CONNECTIONROLE_ACTPASS, atinfo->description.connection_role); - const cricket::TransportInfo* vtinfo = + const webrtc::TransportInfo* vtinfo = desc->GetTransportInfoByName("video_content_name"); - EXPECT_EQ(cricket::CONNECTIONROLE_ACTPASS, + EXPECT_EQ(webrtc::CONNECTIONROLE_ACTPASS, vtinfo->description.connection_role); } @@ -3630,29 +3579,27 @@ TEST_F(WebRtcSdpTest, DeserializeDtlsSetupAttributeActive) { std::string sdp_with_dtlssetup = kSdpFullString; InjectAfter(kSessionTime, "a=setup:active\r\n", &sdp_with_dtlssetup); EXPECT_TRUE(SdpDeserialize(sdp_with_dtlssetup, &jdesc_with_dtlssetup)); - cricket::SessionDescription* desc = jdesc_with_dtlssetup.description(); - const cricket::TransportInfo* atinfo = + webrtc::SessionDescription* desc = jdesc_with_dtlssetup.description(); + const webrtc::TransportInfo* atinfo = desc->GetTransportInfoByName("audio_content_name"); - EXPECT_EQ(cricket::CONNECTIONROLE_ACTIVE, - atinfo->description.connection_role); - const cricket::TransportInfo* vtinfo = + EXPECT_EQ(webrtc::CONNECTIONROLE_ACTIVE, atinfo->description.connection_role); + const webrtc::TransportInfo* vtinfo = desc->GetTransportInfoByName("video_content_name"); - EXPECT_EQ(cricket::CONNECTIONROLE_ACTIVE, - vtinfo->description.connection_role); + EXPECT_EQ(webrtc::CONNECTIONROLE_ACTIVE, vtinfo->description.connection_role); } TEST_F(WebRtcSdpTest, DeserializeDtlsSetupAttributePassive) { JsepSessionDescription jdesc_with_dtlssetup(kDummyType); std::string sdp_with_dtlssetup = kSdpFullString; InjectAfter(kSessionTime, "a=setup:passive\r\n", &sdp_with_dtlssetup); EXPECT_TRUE(SdpDeserialize(sdp_with_dtlssetup, &jdesc_with_dtlssetup)); - cricket::SessionDescription* desc = jdesc_with_dtlssetup.description(); - const cricket::TransportInfo* atinfo = + webrtc::SessionDescription* desc = jdesc_with_dtlssetup.description(); + const webrtc::TransportInfo* atinfo = desc->GetTransportInfoByName("audio_content_name"); - EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE, + EXPECT_EQ(webrtc::CONNECTIONROLE_PASSIVE, atinfo->description.connection_role); - const cricket::TransportInfo* vtinfo = + const webrtc::TransportInfo* vtinfo = desc->GetTransportInfoByName("video_content_name"); - EXPECT_EQ(cricket::CONNECTIONROLE_PASSIVE, + EXPECT_EQ(webrtc::CONNECTIONROLE_PASSIVE, vtinfo->description.connection_role); } @@ -3662,9 +3609,9 @@ TEST_F(WebRtcSdpTest, MediaContentOrderMaintainedRoundTrip) { JsepSessionDescription jdesc(kDummyType); const std::string media_content_sdps[3] = {kSdpAudioString, kSdpVideoString, kSdpSctpDataChannelString}; - const cricket::MediaType media_types[3] = {cricket::MEDIA_TYPE_AUDIO, - cricket::MEDIA_TYPE_VIDEO, - cricket::MEDIA_TYPE_DATA}; + const webrtc::MediaType media_types[3] = {webrtc::MediaType::AUDIO, + webrtc::MediaType::VIDEO, + webrtc::MediaType::DATA}; // Verifies all 6 permutations. for (size_t i = 0; i < 6; ++i) { @@ -3677,17 +3624,17 @@ TEST_F(WebRtcSdpTest, MediaContentOrderMaintainedRoundTrip) { media_content_in_sdp[2] = (media_content_in_sdp[0] + (i + 1) % 2 + 1) % 3; std::string sdp_string = kSdpSessionString; - for (size_t i = 0; i < 3; ++i) - sdp_string += media_content_sdps[media_content_in_sdp[i]]; + for (size_t j = 0; j < 3; ++j) + sdp_string += media_content_sdps[media_content_in_sdp[j]]; EXPECT_TRUE(SdpDeserialize(sdp_string, &jdesc)); - cricket::SessionDescription* desc = jdesc.description(); + webrtc::SessionDescription* desc = jdesc.description(); EXPECT_EQ(3u, desc->contents().size()); - for (size_t i = 0; i < 3; ++i) { - const cricket::MediaContentDescription* mdesc = - desc->contents()[i].media_description(); - EXPECT_EQ(media_types[media_content_in_sdp[i]], mdesc->type()); + for (size_t j = 0; j < 3; ++j) { + const webrtc::MediaContentDescription* mdesc = + desc->contents()[j].media_description(); + EXPECT_EQ(media_types[media_content_in_sdp[j]], mdesc->type()); } std::string serialized_sdp = webrtc::SdpSerialize(jdesc); @@ -3761,16 +3708,17 @@ TEST_F(WebRtcSdpTest, SerializeUnifiedPlanSessionDescription) { TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionSpecialMsid) { // Create both msid lines for Plan B and Unified Plan support. MakeUnifiedPlanDescriptionMultipleStreamIds( - cricket::kMsidSignalingMediaSection | - cricket::kMsidSignalingSsrcAttribute); + webrtc::kMsidSignalingMediaSection | webrtc::kMsidSignalingSsrcAttribute | + webrtc::kMsidSignalingSemantic); JsepSessionDescription deserialized_description(kDummyType); EXPECT_TRUE(SdpDeserialize(kUnifiedPlanSdpFullStringWithSpecialMsid, &deserialized_description)); EXPECT_TRUE(CompareSessionDescription(jdesc_, deserialized_description)); - EXPECT_EQ(cricket::kMsidSignalingMediaSection | - cricket::kMsidSignalingSsrcAttribute, + EXPECT_EQ(webrtc::kMsidSignalingMediaSection | + webrtc::kMsidSignalingSsrcAttribute | + webrtc::kMsidSignalingSemantic, deserialized_description.description()->msid_signaling()); } @@ -3781,8 +3729,8 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionSpecialMsid) { TEST_F(WebRtcSdpTest, SerializeSessionDescriptionSpecialMsid) { // Create both msid lines for Plan B and Unified Plan support. MakeUnifiedPlanDescriptionMultipleStreamIds( - cricket::kMsidSignalingMediaSection | - cricket::kMsidSignalingSsrcAttribute); + webrtc::kMsidSignalingMediaSection | webrtc::kMsidSignalingSsrcAttribute | + webrtc::kMsidSignalingSemantic); std::string serialized_sdp = webrtc::SdpSerialize(jdesc_); // We explicitly test that the serialized SDP string is equal to the hard // coded SDP string. This is necessary, because in the parser "a=msid" lines @@ -3798,7 +3746,7 @@ TEST_F(WebRtcSdpTest, SerializeSessionDescriptionSpecialMsid) { TEST_F(WebRtcSdpTest, UnifiedPlanDeserializeSessionDescriptionSpecialMsid) { // Only create a=msid lines for strictly Unified Plan stream ID support. MakeUnifiedPlanDescriptionMultipleStreamIds( - cricket::kMsidSignalingMediaSection); + webrtc::kMsidSignalingMediaSection | webrtc::kMsidSignalingSemantic); JsepSessionDescription deserialized_description(kDummyType); std::string unified_plan_sdp_string = @@ -3816,7 +3764,7 @@ TEST_F(WebRtcSdpTest, UnifiedPlanDeserializeSessionDescriptionSpecialMsid) { TEST_F(WebRtcSdpTest, UnifiedPlanSerializeSessionDescriptionSpecialMsid) { // Only create a=msid lines for strictly Unified Plan stream ID support. MakeUnifiedPlanDescriptionMultipleStreamIds( - cricket::kMsidSignalingMediaSection); + webrtc::kMsidSignalingMediaSection | webrtc::kMsidSignalingSemantic); TestSerialize(jdesc_); } @@ -3848,7 +3796,8 @@ TEST_F(WebRtcSdpTest, SerializeUnifiedPlanSessionDescriptionNoSsrcSignaling) { TEST_F(WebRtcSdpTest, EmptyDescriptionHasNoMsidSignaling) { JsepSessionDescription jsep_desc(kDummyType); ASSERT_TRUE(SdpDeserialize(kSdpSessionString, &jsep_desc)); - EXPECT_EQ(0, jsep_desc.description()->msid_signaling()); + EXPECT_EQ(webrtc::kMsidSignalingSemantic, + jsep_desc.description()->msid_signaling()); } TEST_F(WebRtcSdpTest, DataChannelOnlyHasNoMsidSignaling) { @@ -3856,20 +3805,22 @@ TEST_F(WebRtcSdpTest, DataChannelOnlyHasNoMsidSignaling) { std::string sdp = kSdpSessionString; sdp += kSdpSctpDataChannelString; ASSERT_TRUE(SdpDeserialize(sdp, &jsep_desc)); - EXPECT_EQ(0, jsep_desc.description()->msid_signaling()); + EXPECT_EQ(webrtc::kMsidSignalingSemantic, + jsep_desc.description()->msid_signaling()); } TEST_F(WebRtcSdpTest, PlanBHasSsrcAttributeMsidSignaling) { JsepSessionDescription jsep_desc(kDummyType); ASSERT_TRUE(SdpDeserialize(kPlanBSdpFullString, &jsep_desc)); - EXPECT_EQ(cricket::kMsidSignalingSsrcAttribute, - jsep_desc.description()->msid_signaling()); + EXPECT_EQ( + webrtc::kMsidSignalingSsrcAttribute | webrtc::kMsidSignalingSemantic, + jsep_desc.description()->msid_signaling()); } TEST_F(WebRtcSdpTest, UnifiedPlanHasMediaSectionMsidSignaling) { JsepSessionDescription jsep_desc(kDummyType); ASSERT_TRUE(SdpDeserialize(kUnifiedPlanSdpFullString, &jsep_desc)); - EXPECT_EQ(cricket::kMsidSignalingMediaSection, + EXPECT_EQ(webrtc::kMsidSignalingMediaSection | webrtc::kMsidSignalingSemantic, jsep_desc.description()->msid_signaling()); } @@ -3878,7 +3829,7 @@ const char kSsrcAttributeMsidLine[] = "a=ssrc:1 msid:local_stream_1 audio_track_id_1"; TEST_F(WebRtcSdpTest, SerializeOnlyMediaSectionMsid) { - jdesc_.description()->set_msid_signaling(cricket::kMsidSignalingMediaSection); + jdesc_.description()->set_msid_signaling(webrtc::kMsidSignalingMediaSection); std::string sdp = webrtc::SdpSerialize(jdesc_); EXPECT_NE(std::string::npos, sdp.find(kMediaSectionMsidLine)); @@ -3886,8 +3837,7 @@ TEST_F(WebRtcSdpTest, SerializeOnlyMediaSectionMsid) { } TEST_F(WebRtcSdpTest, SerializeOnlySsrcAttributeMsid) { - jdesc_.description()->set_msid_signaling( - cricket::kMsidSignalingSsrcAttribute); + jdesc_.description()->set_msid_signaling(webrtc::kMsidSignalingSsrcAttribute); std::string sdp = webrtc::SdpSerialize(jdesc_); EXPECT_EQ(std::string::npos, sdp.find(kMediaSectionMsidLine)); @@ -3895,15 +3845,21 @@ TEST_F(WebRtcSdpTest, SerializeOnlySsrcAttributeMsid) { } TEST_F(WebRtcSdpTest, SerializeBothMediaSectionAndSsrcAttributeMsid) { - jdesc_.description()->set_msid_signaling( - cricket::kMsidSignalingMediaSection | - cricket::kMsidSignalingSsrcAttribute); + jdesc_.description()->set_msid_signaling(webrtc::kMsidSignalingMediaSection | + webrtc::kMsidSignalingSsrcAttribute); std::string sdp = webrtc::SdpSerialize(jdesc_); EXPECT_NE(std::string::npos, sdp.find(kMediaSectionMsidLine)); EXPECT_NE(std::string::npos, sdp.find(kSsrcAttributeMsidLine)); } +TEST_F(WebRtcSdpTest, SerializeWithoutMsidSemantics) { + jdesc_.description()->set_msid_signaling(webrtc::kMsidSignalingNotUsed); + std::string sdp = webrtc::SdpSerialize(jdesc_); + + EXPECT_EQ(std::string::npos, sdp.find("a=msid-semantic:")); +} + // Regression test for integer overflow bug: // https://bugs.chromium.org/p/chromium/issues/detail?id=648071 TEST_F(WebRtcSdpTest, DeserializeLargeBandwidthLimit) { @@ -3953,9 +3909,9 @@ TEST_F(WebRtcSdpTest, BandwidthLimitOfNegativeOneIgnored) { JsepSessionDescription jdesc_output(kDummyType); EXPECT_TRUE(SdpDeserialize(kSdpWithBandwidthOfNegativeOne, &jdesc_output)); const VideoContentDescription* vcd = - GetFirstVideoContentDescription(jdesc_output.description()); + webrtc::GetFirstVideoContentDescription(jdesc_output.description()); ASSERT_TRUE(vcd); - EXPECT_EQ(cricket::kAutoBandwidth, vcd->bandwidth()); + EXPECT_EQ(webrtc::kAutoBandwidth, vcd->bandwidth()); } // Test that "ufrag"/"pwd" in the candidate line itself are ignored, and only @@ -3983,7 +3939,7 @@ TEST_F(WebRtcSdpTest, IceCredentialsInCandidateStringIgnored) { const IceCandidateCollection* candidates = jdesc_output.candidates(0); ASSERT_NE(nullptr, candidates); ASSERT_EQ(1U, candidates->count()); - cricket::Candidate c = candidates->at(0)->candidate(); + webrtc::Candidate c = candidates->at(0)->candidate(); EXPECT_EQ("ufrag_voice", c.username()); EXPECT_EQ("pwd_voice", c.password()); } @@ -4012,7 +3968,7 @@ TEST_F(WebRtcSdpTest, AttributeWithPartialMatchingNameIsIgnored) { const IceCandidateCollection* candidates = jdesc_output.candidates(0); ASSERT_NE(nullptr, candidates); ASSERT_EQ(1U, candidates->count()); - cricket::Candidate c = candidates->at(0)->candidate(); + webrtc::Candidate c = candidates->at(0)->candidate(); EXPECT_EQ("ufrag_voice", c.username()); EXPECT_EQ("pwd_voice", c.password()); } @@ -4330,6 +4286,27 @@ TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithMissingStreamId) { EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output)); } +TEST_F(WebRtcSdpTest, DeserializeMsidAttributeWithDuplicateStreamIdAndTrackId) { + std::string sdp = + "v=0\r\n" + "o=- 18446744069414584320 18446462598732840960 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "a=mid:0\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id track_id\r\n" + "m=audio 9 RTP/SAVPF 111\r\n" + "a=mid:1\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtpmap:111 opus/48000/2\r\n" + "a=msid:stream_id track_id\r\n"; + + JsepSessionDescription jdesc_output(kDummyType); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc_output)); +} + // Tests that if both session-level address and media-level address exist, use // the media-level address. TEST_F(WebRtcSdpTest, ParseConnectionData) { @@ -4470,16 +4447,15 @@ TEST_F(WebRtcSdpTest, DeserializeEmptySessionName) { // Simulcast malformed input test for invalid format. TEST_F(WebRtcSdpTest, DeserializeSimulcastNegative_EmptyAttribute) { - ExpectParseFailureWithNewLines( - "a=ssrc:3 msid:local_stream_1 video_track_id_1\r\n", "a=simulcast:\r\n", - "a=simulcast:"); + ExpectParseFailureWithNewLines("a=ssrc:3 cname:stream_1_cname\r\n", + "a=simulcast:\r\n", "a=simulcast:"); } // Tests that duplicate simulcast entries in the SDP triggers a parse failure. TEST_F(WebRtcSdpTest, DeserializeSimulcastNegative_DuplicateAttribute) { - ExpectParseFailureWithNewLines( - "a=ssrc:3 msid:local_stream_1 video_track_id_1\r\n", - "a=simulcast:send 1\r\na=simulcast:recv 2\r\n", "a=simulcast:"); + ExpectParseFailureWithNewLines("a=ssrc:3 cname:stream_1_cname\r\n", + "a=simulcast:send 1\r\na=simulcast:recv 2\r\n", + "a=simulcast:"); } // Validates that deserialization uses the a=simulcast: attribute @@ -4495,8 +4471,8 @@ TEST_F(WebRtcSdpTest, TestDeserializeSimulcastAttribute) { JsepSessionDescription output(kDummyType); SdpParseError error; EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error)); - const cricket::ContentInfos& contents = output.description()->contents(); - const cricket::MediaContentDescription* media = + const webrtc::ContentInfos& contents = output.description()->contents(); + const webrtc::MediaContentDescription* media = contents.back().media_description(); EXPECT_TRUE(media->HasSimulcast()); EXPECT_EQ(2ul, media->simulcast_description().send_layers().size()); @@ -4516,8 +4492,8 @@ TEST_F(WebRtcSdpTest, TestDeserializeSimulcastAttributeRemovesUnknownRids) { JsepSessionDescription output(kDummyType); SdpParseError error; EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error)); - const cricket::ContentInfos& contents = output.description()->contents(); - const cricket::MediaContentDescription* media = + const webrtc::ContentInfos& contents = output.description()->contents(); + const webrtc::MediaContentDescription* media = contents.back().media_description(); EXPECT_TRUE(media->HasSimulcast()); const SimulcastDescription& simulcast = media->simulcast_description(); @@ -4554,8 +4530,8 @@ TEST_F(WebRtcSdpTest, JsepSessionDescription output(kDummyType); SdpParseError error; EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error)); - const cricket::ContentInfos& contents = output.description()->contents(); - const cricket::MediaContentDescription* media = + const webrtc::ContentInfos& contents = output.description()->contents(); + const webrtc::MediaContentDescription* media = contents.back().media_description(); EXPECT_TRUE(media->HasSimulcast()); const SimulcastDescription& simulcast = media->simulcast_description(); @@ -4580,8 +4556,8 @@ TEST_F(WebRtcSdpTest, TestDeserializeIgnoresEmptyRidLines) { JsepSessionDescription output(kDummyType); SdpParseError error; EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error)); - const cricket::ContentInfos& contents = output.description()->contents(); - const cricket::MediaContentDescription* media = + const webrtc::ContentInfos& contents = output.description()->contents(); + const webrtc::MediaContentDescription* media = contents.back().media_description(); EXPECT_TRUE(media->HasSimulcast()); const SimulcastDescription& simulcast = media->simulcast_description(); @@ -4606,8 +4582,8 @@ TEST_F(WebRtcSdpTest, TestDeserializeIgnoresMalformedRidLines) { JsepSessionDescription output(kDummyType); SdpParseError error; EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error)); - const cricket::ContentInfos& contents = output.description()->contents(); - const cricket::MediaContentDescription* media = + const webrtc::ContentInfos& contents = output.description()->contents(); + const webrtc::MediaContentDescription* media = contents.back().media_description(); EXPECT_TRUE(media->HasSimulcast()); const SimulcastDescription& simulcast = media->simulcast_description(); @@ -4620,30 +4596,32 @@ TEST_F(WebRtcSdpTest, TestDeserializeIgnoresMalformedRidLines) { CompareRidDescriptionIds(rids, {"5"}); } -// Removes RIDs that specify a different format than the m= section. -TEST_F(WebRtcSdpTest, TestDeserializeRemovesRidsWithInvalidCodec) { +// Ignores codecs from RIDs where the PTs are missing from the m= section. +TEST_F(WebRtcSdpTest, TestDeserializeIgnoresInvalidPayloadTypesInRid) { std::string sdp = kUnifiedPlanSdpFullStringNoSsrc; - sdp += "a=rid:1 send pt=121,120\r\n"; // Should remove 121 and keep RID. - sdp += "a=rid:2 send pt=121\r\n"; // Should remove RID altogether. + sdp += "a=rid:1 send pt=121,120\r\n"; // Should remove 121 and keep 120. + sdp += "a=rid:2 send pt=121\r\n"; // Should remove 121. sdp += "a=simulcast:send 1;2\r\n"; JsepSessionDescription output(kDummyType); SdpParseError error; EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error)); - const cricket::ContentInfos& contents = output.description()->contents(); - const cricket::MediaContentDescription* media = + const webrtc::ContentInfos& contents = output.description()->contents(); + const webrtc::MediaContentDescription* media = contents.back().media_description(); EXPECT_TRUE(media->HasSimulcast()); const SimulcastDescription& simulcast = media->simulcast_description(); EXPECT_TRUE(simulcast.receive_layers().empty()); - EXPECT_EQ(1ul, simulcast.send_layers().size()); - EXPECT_EQ(1ul, simulcast.send_layers().GetAllLayers().size()); + EXPECT_EQ(2ul, simulcast.send_layers().size()); + EXPECT_EQ(2ul, simulcast.send_layers().GetAllLayers().size()); EXPECT_EQ("1", simulcast.send_layers()[0][0].rid); EXPECT_EQ(1ul, media->streams().size()); const std::vector& rids = media->streams()[0].rids(); - EXPECT_EQ(1ul, rids.size()); + EXPECT_EQ(2ul, rids.size()); EXPECT_EQ("1", rids[0].rid); - EXPECT_EQ(1ul, rids[0].payload_types.size()); - EXPECT_EQ(120, rids[0].payload_types[0]); + EXPECT_EQ(1ul, rids[0].codecs.size()); + EXPECT_EQ(120, rids[0].codecs[0].id); + EXPECT_EQ("2", rids[1].rid); + EXPECT_EQ(0ul, rids[1].codecs.size()); } // Ignores duplicate rid lines @@ -4658,8 +4636,8 @@ TEST_F(WebRtcSdpTest, TestDeserializeIgnoresDuplicateRidLines) { JsepSessionDescription output(kDummyType); SdpParseError error; EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error)); - const cricket::ContentInfos& contents = output.description()->contents(); - const cricket::MediaContentDescription* media = + const webrtc::ContentInfos& contents = output.description()->contents(); + const webrtc::MediaContentDescription* media = contents.back().media_description(); EXPECT_TRUE(media->HasSimulcast()); const SimulcastDescription& simulcast = media->simulcast_description(); @@ -4681,8 +4659,8 @@ TEST_F(WebRtcSdpTest, TestDeserializeRidSendDirection) { JsepSessionDescription output(kDummyType); SdpParseError error; EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error)); - const cricket::ContentInfos& contents = output.description()->contents(); - const cricket::MediaContentDescription* media = + const webrtc::ContentInfos& contents = output.description()->contents(); + const webrtc::MediaContentDescription* media = contents.back().media_description(); EXPECT_FALSE(media->HasSimulcast()); } @@ -4695,8 +4673,8 @@ TEST_F(WebRtcSdpTest, TestDeserializeRidRecvDirection) { JsepSessionDescription output(kDummyType); SdpParseError error; EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error)); - const cricket::ContentInfos& contents = output.description()->contents(); - const cricket::MediaContentDescription* media = + const webrtc::ContentInfos& contents = output.description()->contents(); + const webrtc::MediaContentDescription* media = contents.back().media_description(); EXPECT_FALSE(media->HasSimulcast()); } @@ -4713,8 +4691,8 @@ TEST_F(WebRtcSdpTest, TestDeserializeIgnoresWrongRidDirectionLines) { JsepSessionDescription output(kDummyType); SdpParseError error; EXPECT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error)); - const cricket::ContentInfos& contents = output.description()->contents(); - const cricket::MediaContentDescription* media = + const webrtc::ContentInfos& contents = output.description()->contents(); + const webrtc::MediaContentDescription* media = contents.back().media_description(); EXPECT_TRUE(media->HasSimulcast()); const SimulcastDescription& simulcast = media->simulcast_description(); @@ -4774,8 +4752,8 @@ TEST_F(WebRtcSdpTest, ParseNoMid) { ASSERT_TRUE(webrtc::SdpDeserialize(sdp, &output, &error)); EXPECT_THAT(output.description()->contents(), - ElementsAre(Field("name", &cricket::ContentInfo::name, ""), - Field("name", &cricket::ContentInfo::name, ""))); + ElementsAre(Property("name", &webrtc::ContentInfo::mid, ""), + Property("name", &webrtc::ContentInfo::mid, ""))); } TEST_F(WebRtcSdpTest, SerializeWithDefaultSctpProtocol) { @@ -4783,15 +4761,14 @@ TEST_F(WebRtcSdpTest, SerializeWithDefaultSctpProtocol) { JsepSessionDescription jsep_desc(kDummyType); MakeDescriptionWithoutCandidates(&jsep_desc); std::string message = webrtc::SdpSerialize(jsep_desc); - EXPECT_NE(std::string::npos, - message.find(cricket::kMediaProtocolUdpDtlsSctp)); + EXPECT_NE(std::string::npos, message.find(webrtc::kMediaProtocolUdpDtlsSctp)); } TEST_F(WebRtcSdpTest, DeserializeWithAllSctpProtocols) { AddSctpDataChannel(false); - std::string protocols[] = {cricket::kMediaProtocolDtlsSctp, - cricket::kMediaProtocolUdpDtlsSctp, - cricket::kMediaProtocolTcpDtlsSctp}; + std::string protocols[] = {webrtc::kMediaProtocolDtlsSctp, + webrtc::kMediaProtocolUdpDtlsSctp, + webrtc::kMediaProtocolTcpDtlsSctp}; for (const auto& protocol : protocols) { sctp_desc_->set_protocol(protocol); JsepSessionDescription jsep_desc(kDummyType); @@ -4813,6 +4790,7 @@ TEST_F(WebRtcSdpTest, DeserializeSessionDescriptionWithoutCname) { EXPECT_TRUE(SdpDeserialize(sdp_without_cname, &new_jdesc)); audio_desc_->mutable_streams()[0].cname = ""; + audio_desc_->mutable_streams()[0].ssrcs = {}; ASSERT_TRUE(jdesc_.Initialize(desc_.Clone(), jdesc_.session_id(), jdesc_.session_version())); EXPECT_TRUE(CompareSessionDescription(jdesc_, new_jdesc)); @@ -4878,8 +4856,8 @@ TEST_F(WebRtcSdpTest, DeserializeSdpWithUnsupportedMediaType) { EXPECT_TRUE(jdesc_output.description()->contents()[0].rejected); EXPECT_TRUE(jdesc_output.description()->contents()[1].rejected); - EXPECT_EQ(jdesc_output.description()->contents()[0].name, "bogusmid"); - EXPECT_EQ(jdesc_output.description()->contents()[1].name, "somethingmid"); + EXPECT_EQ(jdesc_output.description()->contents()[0].mid(), "bogusmid"); + EXPECT_EQ(jdesc_output.description()->contents()[1].mid(), "somethingmid"); } TEST_F(WebRtcSdpTest, MediaTypeProtocolMismatch) { @@ -5021,7 +4999,7 @@ TEST_F(WebRtcSdpTest, ParseSessionLevelExtmapAttributes) { EXPECT_TRUE(SdpDeserialize(sdp, &jdesc)); ASSERT_EQ(1u, jdesc.description()->contents().size()); const auto content = jdesc.description()->contents()[0]; - const auto* audio_description = content.media_description()->as_audio(); + const auto* audio_description = content.media_description(); ASSERT_NE(audio_description, nullptr); const auto& extensions = audio_description->rtp_header_extensions(); ASSERT_EQ(1u, extensions.size()); @@ -5083,3 +5061,103 @@ TEST_F(WebRtcSdpTest, RejectDuplicateSsrcInSsrcGroup) { JsepSessionDescription jdesc(kDummyType); EXPECT_FALSE(SdpDeserialize(sdp, &jdesc)); } + +TEST_F(WebRtcSdpTest, ExpectsTLineBeforeAttributeLine) { + // https://www.rfc-editor.org/rfc/rfc4566#page-9 + // says a= attributes must come last. + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "a=thisisnottherightplace\r\n" + "t=0 0\r\n"; + JsepSessionDescription jdesc(kDummyType); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc)); +} + +TEST_F(WebRtcSdpTest, IgnoresUnknownAttributeLines) { + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=somethingthatisnotunderstood\r\n"; + JsepSessionDescription jdesc(kDummyType); + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc)); +} + +TEST_F(WebRtcSdpTest, BackfillsDefaultFmtpValues) { + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE 0\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 96 97 98 99\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:0\r\n" + "a=rtpmap:96 H264/90000\r\n" + "a=rtpmap:97 VP9/90000\r\n" + "a=rtpmap:98 AV1/90000\r\n" + "a=rtpmap:99 H265/90000\r\n" + "a=ssrc:1234 cname:test\r\n"; + JsepSessionDescription jdesc(kDummyType); + EXPECT_TRUE(SdpDeserialize(sdp, &jdesc)); + ASSERT_EQ(1u, jdesc.description()->contents().size()); + const auto content = jdesc.description()->contents()[0]; + const auto* description = content.media_description(); + ASSERT_NE(description, nullptr); + const std::vector codecs = description->codecs(); + ASSERT_EQ(codecs.size(), 4u); + std::string value; + + EXPECT_EQ(codecs[0].name, "H264"); + EXPECT_TRUE(codecs[0].GetParam("packetization-mode", &value)); + EXPECT_EQ(value, "0"); + + EXPECT_EQ(codecs[1].name, "VP9"); + EXPECT_TRUE(codecs[1].GetParam("profile-id", &value)); + EXPECT_EQ(value, "0"); + + EXPECT_EQ(codecs[2].name, "AV1"); + EXPECT_TRUE(codecs[2].GetParam("profile", &value)); + EXPECT_EQ(value, "0"); + EXPECT_TRUE(codecs[2].GetParam("level-idx", &value)); + EXPECT_EQ(value, "5"); + EXPECT_TRUE(codecs[2].GetParam("tier", &value)); + EXPECT_EQ(value, "0"); + + EXPECT_EQ(codecs[3].name, "H265"); + EXPECT_TRUE(codecs[3].GetParam("level-id", &value)); + EXPECT_EQ(value, "93"); + EXPECT_TRUE(codecs[3].GetParam("tx-mode", &value)); + EXPECT_EQ(value, "SRST"); +} + +TEST_F(WebRtcSdpTest, ParsesKeyValueFmtpParameterSet) { + std::string params = "key1=value1;key2=value2"; + webrtc::CodecParameterMap codec_params; + SdpParseError error; + + ASSERT_TRUE(webrtc::ParseFmtpParameterSet(params, codec_params, &error)); + EXPECT_EQ(2U, codec_params.size()); + EXPECT_EQ(codec_params["key1"], "value1"); + EXPECT_EQ(codec_params["key2"], "value2"); +} + +TEST_F(WebRtcSdpTest, ParsesNonKeyValueFmtpParameterSet) { + std::string params = "not-in-key-value-format"; + webrtc::CodecParameterMap codec_params; + SdpParseError error; + + ASSERT_TRUE(webrtc::ParseFmtpParameterSet(params, codec_params, &error)); + EXPECT_EQ(1U, codec_params.size()); + EXPECT_EQ(codec_params[""], "not-in-key-value-format"); +} diff --git a/pc/webrtc_session_description_factory.cc b/pc/webrtc_session_description_factory.cc index 3d398e3b6e..0ca42e4802 100644 --- a/pc/webrtc_session_description_factory.cc +++ b/pc/webrtc_session_description_factory.cc @@ -12,30 +12,41 @@ #include +#include +#include +#include +#include #include #include -#include #include #include #include "absl/algorithm/container.h" -#include "absl/types/optional.h" +#include "absl/functional/any_invocable.h" +#include "absl/strings/str_cat.h" +#include "api/field_trials_view.h" #include "api/jsep.h" #include "api/jsep_session_description.h" +#include "api/peer_connection_interface.h" #include "api/rtc_error.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "pc/codec_vendor.h" #include "pc/connection_context.h" +#include "pc/media_options.h" +#include "pc/media_session.h" #include "pc/sdp_state_provider.h" #include "pc/session_description.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/string_encode.h" #include "rtc_base/unique_id_generator.h" -using cricket::MediaSessionOptions; -using rtc::UniqueRandomIdGenerator; +using ::webrtc::MediaSessionOptions; +using webrtc::UniqueRandomIdGenerator; namespace webrtc { namespace { @@ -48,23 +59,23 @@ static const uint64_t kInitSessionVersion = 2; // Check that each sender has a unique ID. static bool ValidMediaSessionOptions( - const cricket::MediaSessionOptions& session_options) { - std::vector sorted_senders; - for (const cricket::MediaDescriptionOptions& media_description_options : + const MediaSessionOptions& session_options) { + std::vector sorted_senders; + for (const MediaDescriptionOptions& media_description_options : session_options.media_description_options) { sorted_senders.insert(sorted_senders.end(), media_description_options.sender_options.begin(), media_description_options.sender_options.end()); } - absl::c_sort(sorted_senders, [](const cricket::SenderOptions& sender1, - const cricket::SenderOptions& sender2) { - return sender1.track_id < sender2.track_id; - }); - return absl::c_adjacent_find(sorted_senders, - [](const cricket::SenderOptions& sender1, - const cricket::SenderOptions& sender2) { - return sender1.track_id == sender2.track_id; - }) == sorted_senders.end(); + absl::c_sort(sorted_senders, + [](const SenderOptions& sender1, const SenderOptions& sender2) { + return sender1.track_id < sender2.track_id; + }); + return absl::c_adjacent_find( + sorted_senders, + [](const SenderOptions& sender1, const SenderOptions& sender2) { + return sender1.track_id == sender2.track_id; + }) == sorted_senders.end(); } } // namespace @@ -76,9 +87,8 @@ void WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription( if (!source_desc) { return; } - const cricket::ContentInfos& contents = - source_desc->description()->contents(); - const cricket::ContentInfo* cinfo = + const ContentInfos& contents = source_desc->description()->contents(); + const ContentInfo* cinfo = source_desc->description()->GetContentByName(content_name); if (!cinfo) { return; @@ -104,17 +114,19 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory( const SdpStateProvider* sdp_info, const std::string& session_id, bool dtls_enabled, - std::unique_ptr cert_generator, - rtc::scoped_refptr certificate, - std::function&)> + std::unique_ptr cert_generator, + scoped_refptr certificate, + std::function&)> on_certificate_ready, + CodecLookupHelper* codec_lookup_helper, const FieldTrialsView& field_trials) : signaling_thread_(context->signaling_thread()), transport_desc_factory_(field_trials), session_desc_factory_(context->media_engine(), context->use_rtx(), context->ssrc_generator(), - &transport_desc_factory_), + &transport_desc_factory_, + codec_lookup_helper), // RFC 4566 suggested a Network Time Protocol (NTP) format timestamp // as the session id and session version. To simplify, it should be fine // to just use a random number as session id and start version from @@ -128,13 +140,10 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory( RTC_DCHECK(signaling_thread_); if (!dtls_enabled) { - SetSdesPolicy(cricket::SEC_REQUIRED); - RTC_LOG(LS_VERBOSE) << "DTLS-SRTP disabled."; + RTC_LOG(LS_INFO) << "DTLS-SRTP disabled"; + transport_desc_factory_.SetInsecureForTesting(); return; } - - // SRTP-SDES is disabled if DTLS is on. - SetSdesPolicy(cricket::SEC_DISABLED); if (certificate) { // Use `certificate`. certificate_request_state_ = CERTIFICATE_WAITING; @@ -142,31 +151,32 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory( RTC_LOG(LS_VERBOSE) << "DTLS-SRTP enabled; has certificate parameter."; RTC_LOG(LS_INFO) << "Using certificate supplied to the constructor."; SetCertificate(certificate); - } else { - // Generate certificate. - certificate_request_state_ = CERTIFICATE_WAITING; - - auto callback = [weak_ptr = weak_factory_.GetWeakPtr()]( - rtc::scoped_refptr certificate) { - if (!weak_ptr) { - return; - } - if (certificate) { - weak_ptr->SetCertificate(std::move(certificate)); - } else { - weak_ptr->OnCertificateRequestFailed(); - } - }; + return; + } + // Generate certificate. + RTC_DCHECK(cert_generator_); + certificate_request_state_ = CERTIFICATE_WAITING; + + auto callback = [weak_ptr = weak_factory_.GetWeakPtr()]( + scoped_refptr certificate) { + if (!weak_ptr) { + return; + } + if (certificate) { + weak_ptr->SetCertificate(std::move(certificate)); + } else { + weak_ptr->OnCertificateRequestFailed(); + } + }; - rtc::KeyParams key_params = rtc::KeyParams(); - RTC_LOG(LS_VERBOSE) - << "DTLS-SRTP enabled; sending DTLS identity request (key type: " - << key_params.type() << ")."; + KeyParams key_params = KeyParams(); + RTC_LOG(LS_VERBOSE) + << "DTLS-SRTP enabled; sending DTLS identity request (key type: " + << key_params.type() << ")."; - // Request certificate. This happens asynchronously on a different thread. - cert_generator_->GenerateCertificateAsync(key_params, absl::nullopt, - std::move(callback)); - } + // Request certificate. This happens asynchronously on a different thread. + cert_generator_->GenerateCertificateAsync(key_params, std::nullopt, + std::move(callback)); } WebRtcSessionDescriptionFactory::~WebRtcSessionDescriptionFactory() { @@ -189,20 +199,20 @@ WebRtcSessionDescriptionFactory::~WebRtcSessionDescriptionFactory() { void WebRtcSessionDescriptionFactory::CreateOffer( CreateSessionDescriptionObserver* observer, const PeerConnectionInterface::RTCOfferAnswerOptions& options, - const cricket::MediaSessionOptions& session_options) { + const MediaSessionOptions& session_options) { RTC_DCHECK_RUN_ON(signaling_thread_); std::string error = "CreateOffer"; if (certificate_request_state_ == CERTIFICATE_FAILED) { error += kFailedDueToIdentityFailed; - RTC_LOG(LS_ERROR) << error; - PostCreateSessionDescriptionFailed(observer, error); + PostCreateSessionDescriptionFailed( + observer, RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error))); return; } if (!ValidMediaSessionOptions(session_options)) { error += " called with invalid session options"; - RTC_LOG(LS_ERROR) << error; - PostCreateSessionDescriptionFailed(observer, error); + PostCreateSessionDescriptionFailed( + observer, RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error))); return; } @@ -219,31 +229,31 @@ void WebRtcSessionDescriptionFactory::CreateOffer( void WebRtcSessionDescriptionFactory::CreateAnswer( CreateSessionDescriptionObserver* observer, - const cricket::MediaSessionOptions& session_options) { + const MediaSessionOptions& session_options) { std::string error = "CreateAnswer"; if (certificate_request_state_ == CERTIFICATE_FAILED) { error += kFailedDueToIdentityFailed; - RTC_LOG(LS_ERROR) << error; - PostCreateSessionDescriptionFailed(observer, error); + PostCreateSessionDescriptionFailed( + observer, RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error))); return; } if (!sdp_info_->remote_description()) { error += " can't be called before SetRemoteDescription."; - RTC_LOG(LS_ERROR) << error; - PostCreateSessionDescriptionFailed(observer, error); + PostCreateSessionDescriptionFailed( + observer, RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error))); return; } if (sdp_info_->remote_description()->GetType() != SdpType::kOffer) { error += " failed because remote_description is not an offer."; - RTC_LOG(LS_ERROR) << error; - PostCreateSessionDescriptionFailed(observer, error); + PostCreateSessionDescriptionFailed( + observer, RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error))); return; } if (!ValidMediaSessionOptions(session_options)) { error += " called with invalid session options."; - RTC_LOG(LS_ERROR) << error; - PostCreateSessionDescriptionFailed(observer, error); + PostCreateSessionDescriptionFailed( + observer, RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error))); return; } @@ -258,21 +268,12 @@ void WebRtcSessionDescriptionFactory::CreateAnswer( } } -void WebRtcSessionDescriptionFactory::SetSdesPolicy( - cricket::SecurePolicy secure_policy) { - session_desc_factory_.set_secure(secure_policy); -} - -cricket::SecurePolicy WebRtcSessionDescriptionFactory::SdesPolicy() const { - return session_desc_factory_.secure(); -} - void WebRtcSessionDescriptionFactory::InternalCreateOffer( CreateSessionDescriptionRequest request) { if (sdp_info_->local_description()) { // If the needs-ice-restart flag is set as described by JSEP, we should // generate an offer with a new ufrag/password to trigger an ICE restart. - for (cricket::MediaDescriptionOptions& options : + for (MediaDescriptionOptions& options : request.options.media_description_options) { if (sdp_info_->NeedsIceRestart(options.mid)) { options.transport_options.ice_restart = true; @@ -280,16 +281,16 @@ void WebRtcSessionDescriptionFactory::InternalCreateOffer( } } - std::unique_ptr desc = - session_desc_factory_.CreateOffer( - request.options, sdp_info_->local_description() - ? sdp_info_->local_description()->description() - : nullptr); - if (!desc) { - PostCreateSessionDescriptionFailed(request.observer.get(), - "Failed to initialize the offer."); + auto result = session_desc_factory_.CreateOfferOrError( + request.options, sdp_info_->local_description() + ? sdp_info_->local_description()->description() + : nullptr); + if (!result.ok()) { + PostCreateSessionDescriptionFailed(request.observer.get(), result.error()); return; } + std::unique_ptr desc = std::move(result.value()); + RTC_CHECK(desc); // RFC 3264 // When issuing an offer that modifies the session, @@ -303,9 +304,9 @@ void WebRtcSessionDescriptionFactory::InternalCreateOffer( RTC_DCHECK(session_version_ + 1 > session_version_); auto offer = std::make_unique( SdpType::kOffer, std::move(desc), session_id_, - rtc::ToString(session_version_++)); + absl::StrCat(session_version_++)); if (sdp_info_->local_description()) { - for (const cricket::MediaDescriptionOptions& options : + for (const MediaDescriptionOptions& options : request.options.media_description_options) { if (!options.transport_options.ice_restart) { CopyCandidatesFromSessionDescription(sdp_info_->local_description(), @@ -320,7 +321,7 @@ void WebRtcSessionDescriptionFactory::InternalCreateOffer( void WebRtcSessionDescriptionFactory::InternalCreateAnswer( CreateSessionDescriptionRequest request) { if (sdp_info_->remote_description()) { - for (cricket::MediaDescriptionOptions& options : + for (MediaDescriptionOptions& options : request.options.media_description_options) { // According to http://tools.ietf.org/html/rfc5245#section-9.2.1.1 // an answer should also contain new ICE ufrag and password if an offer @@ -329,29 +330,28 @@ void WebRtcSessionDescriptionFactory::InternalCreateAnswer( sdp_info_->IceRestartPending(options.mid); // We should pass the current DTLS role to the transport description // factory, if there is already an existing ongoing session. - absl::optional dtls_role = - sdp_info_->GetDtlsRole(options.mid); + std::optional dtls_role = sdp_info_->GetDtlsRole(options.mid); if (dtls_role) { options.transport_options.prefer_passive_role = - (rtc::SSL_SERVER == *dtls_role); + (SSL_SERVER == *dtls_role); } } } - std::unique_ptr desc = - session_desc_factory_.CreateAnswer( - sdp_info_->remote_description() - ? sdp_info_->remote_description()->description() - : nullptr, - request.options, - sdp_info_->local_description() - ? sdp_info_->local_description()->description() - : nullptr); - if (!desc) { - PostCreateSessionDescriptionFailed(request.observer.get(), - "Failed to initialize the answer."); + auto result = session_desc_factory_.CreateAnswerOrError( + sdp_info_->remote_description() + ? sdp_info_->remote_description()->description() + : nullptr, + request.options, + sdp_info_->local_description() + ? sdp_info_->local_description()->description() + : nullptr); + if (!result.ok()) { + PostCreateSessionDescriptionFailed(request.observer.get(), result.error()); return; } + std::unique_ptr desc = std::move(result.value()); + RTC_CHECK(desc); // RFC 3264 // If the answer is different from the offer in any way (different IP @@ -363,11 +363,11 @@ void WebRtcSessionDescriptionFactory::InternalCreateAnswer( RTC_DCHECK(session_version_ + 1 > session_version_); auto answer = std::make_unique( SdpType::kAnswer, std::move(desc), session_id_, - rtc::ToString(session_version_++)); + absl::StrCat(session_version_++)); if (sdp_info_->local_description()) { // Include all local ICE candidates in the SessionDescription unless // the remote peer has requested an ICE restart. - for (const cricket::MediaDescriptionOptions& options : + for (const MediaDescriptionOptions& options : request.options.media_description_options) { if (!options.transport_options.ice_restart) { CopyCandidatesFromSessionDescription(sdp_info_->local_description(), @@ -387,31 +387,27 @@ void WebRtcSessionDescriptionFactory::FailPendingRequests( create_session_description_requests_.front(); PostCreateSessionDescriptionFailed( request.observer.get(), - ((request.type == CreateSessionDescriptionRequest::kOffer) - ? "CreateOffer" - : "CreateAnswer") + - reason); + RTCError(RTCErrorType::INTERNAL_ERROR, + ((request.type == CreateSessionDescriptionRequest::kOffer) + ? "CreateOffer" + : "CreateAnswer") + + reason)); create_session_description_requests_.pop(); } } void WebRtcSessionDescriptionFactory::PostCreateSessionDescriptionFailed( CreateSessionDescriptionObserver* observer, - const std::string& error) { - Post([observer = - rtc::scoped_refptr(observer), - error]() mutable { - observer->OnFailure( - RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error))); - }); - RTC_LOG(LS_ERROR) << "Create SDP failed: " << error; + RTCError error) { + Post([observer = scoped_refptr(observer), + error]() mutable { observer->OnFailure(error); }); + RTC_LOG(LS_ERROR) << "CreateSessionDescription failed: " << error.message(); } void WebRtcSessionDescriptionFactory::PostCreateSessionDescriptionSucceeded( CreateSessionDescriptionObserver* observer, std::unique_ptr description) { - Post([observer = - rtc::scoped_refptr(observer), + Post([observer = scoped_refptr(observer), description = std::move(description)]() mutable { observer->OnSuccess(description.release()); }); @@ -443,7 +439,7 @@ void WebRtcSessionDescriptionFactory::OnCertificateRequestFailed() { } void WebRtcSessionDescriptionFactory::SetCertificate( - rtc::scoped_refptr certificate) { + scoped_refptr certificate) { RTC_DCHECK(certificate); RTC_LOG(LS_VERBOSE) << "Setting new certificate."; @@ -452,7 +448,6 @@ void WebRtcSessionDescriptionFactory::SetCertificate( on_certificate_ready_(certificate); transport_desc_factory_.set_certificate(std::move(certificate)); - transport_desc_factory_.set_secure(cricket::SEC_ENABLED); while (!create_session_description_requests_.empty()) { if (create_session_description_requests_.front().type == @@ -464,4 +459,5 @@ void WebRtcSessionDescriptionFactory::SetCertificate( create_session_description_requests_.pop(); } } + } // namespace webrtc diff --git a/pc/webrtc_session_description_factory.h b/pc/webrtc_session_description_factory.h index 122a720162..3f3e6b62aa 100644 --- a/pc/webrtc_session_description_factory.h +++ b/pc/webrtc_session_description_factory.h @@ -19,17 +19,19 @@ #include #include "absl/functional/any_invocable.h" +#include "api/field_trials_view.h" #include "api/jsep.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" #include "api/scoped_refptr.h" #include "api/task_queue/task_queue_base.h" -#include "p2p/base/transport_description.h" #include "p2p/base/transport_description_factory.h" +#include "pc/codec_vendor.h" +#include "pc/media_options.h" #include "pc/media_session.h" #include "pc/sdp_state_provider.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/rtc_certificate_generator.h" -#include "rtc_base/unique_id_generator.h" #include "rtc_base/weak_ptr.h" namespace webrtc { @@ -49,10 +51,11 @@ class WebRtcSessionDescriptionFactory { const SdpStateProvider* sdp_info, const std::string& session_id, bool dtls_enabled, - std::unique_ptr cert_generator, - rtc::scoped_refptr certificate, - std::function&)> + std::unique_ptr cert_generator, + scoped_refptr certificate, + std::function&)> on_certificate_ready, + CodecLookupHelper* codec_lookup_helper, const FieldTrialsView& field_trials); ~WebRtcSessionDescriptionFactory(); @@ -69,12 +72,9 @@ class WebRtcSessionDescriptionFactory { void CreateOffer( CreateSessionDescriptionObserver* observer, const PeerConnectionInterface::RTCOfferAnswerOptions& options, - const cricket::MediaSessionOptions& session_options); + const MediaSessionOptions& session_options); void CreateAnswer(CreateSessionDescriptionObserver* observer, - const cricket::MediaSessionOptions& session_options); - - void SetSdesPolicy(cricket::SecurePolicy secure_policy); - cricket::SecurePolicy SdesPolicy() const; + const MediaSessionOptions& session_options); void set_enable_encrypted_rtp_header_extensions(bool enable) { session_desc_factory_.set_enable_encrypted_rtp_header_extensions(enable); @@ -88,6 +88,9 @@ class WebRtcSessionDescriptionFactory { bool waiting_for_certificate_for_testing() const { return certificate_request_state_ == CERTIFICATE_WAITING; } + void SetInsecureForTesting() { + transport_desc_factory_.SetInsecureForTesting(); + } private: enum CertificateRequestState { @@ -105,12 +108,12 @@ class WebRtcSessionDescriptionFactory { CreateSessionDescriptionRequest(Type type, CreateSessionDescriptionObserver* observer, - const cricket::MediaSessionOptions& options) + const MediaSessionOptions& options) : type(type), observer(observer), options(options) {} Type type; - rtc::scoped_refptr observer; - cricket::MediaSessionOptions options; + scoped_refptr observer; + MediaSessionOptions options; }; void InternalCreateOffer(CreateSessionDescriptionRequest request); @@ -119,7 +122,7 @@ class WebRtcSessionDescriptionFactory { void FailPendingRequests(const std::string& reason); void PostCreateSessionDescriptionFailed( CreateSessionDescriptionObserver* observer, - const std::string& error); + RTCError error); void PostCreateSessionDescriptionSucceeded( CreateSessionDescriptionObserver* observer, std::unique_ptr description); @@ -128,23 +131,24 @@ class WebRtcSessionDescriptionFactory { void Post(absl::AnyInvocable callback); void OnCertificateRequestFailed(); - void SetCertificate(rtc::scoped_refptr certificate); + void SetCertificate(scoped_refptr certificate); std::queue create_session_description_requests_; TaskQueueBase* const signaling_thread_; - cricket::TransportDescriptionFactory transport_desc_factory_; - cricket::MediaSessionDescriptionFactory session_desc_factory_; + TransportDescriptionFactory transport_desc_factory_; + MediaSessionDescriptionFactory session_desc_factory_; uint64_t session_version_; - const std::unique_ptr cert_generator_; + const std::unique_ptr cert_generator_; const SdpStateProvider* sdp_info_; const std::string session_id_; CertificateRequestState certificate_request_state_; std::queue> callbacks_; - std::function&)> + std::function&)> on_certificate_ready_; - rtc::WeakPtrFactory weak_factory_{this}; + + WeakPtrFactory weak_factory_{this}; }; } // namespace webrtc diff --git a/pylintrc b/pylintrc index 852445a1ce..5f82b2c9ac 100644 --- a/pylintrc +++ b/pylintrc @@ -93,13 +93,13 @@ ignore-docstrings=yes [FORMAT] # Maximum number of characters on a single line. -max-line-length=80 +max-line-length=79 # Maximum number of lines in a module max-module-lines=1000 -# We use two spaces for indents, instead of the usual four spaces or tab. -indent-string=' ' +# Use four spaces for indents. +indent-string=' ' [BASIC] @@ -107,43 +107,29 @@ indent-string=' ' # List of builtins function names that should not be used, separated by a comma bad-functions=map,filter,apply,input -# Regular expression which should only match correct module names -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Regular expression which should only match correct module level names -# (CAPS_WITH_UNDER) -const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Regular expression which should only match correct class names -# (CapWords) -class-rgx=[A-Z_][a-zA-Z0-9]+$ - -# Regular expression which should only match correct function names -# The Chromium standard is different than PEP-8, so we need to redefine this to -# only allow: -# - CapWords -# - main: Standard for main function. -function-rgx=([A-Z_][a-zA-Z0-9]{2,60}|main)$ - -# Regular expression which should only match correct method names -# The Chromium standard is different than PEP-8, so we need to redefine this to -# only allow: -# - CapWords, starting with a capital letter. No underscores in function -# names. Can also have a "_" prefix (private method) or a "test" prefix -# (unit test). -# - Methods that look like __xyz__, which are used to do things like -# __init__, __del__, etc. -# - setUp, tearDown: For unit tests. -method-rgx=((_|test)?[A-Z][a-zA-Z0-9]{2,60}|__[a-z]+__|setUp|tearDown)$ - -# Regular expression which should only match correct instance attribute names -attr-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct argument names -argument-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct variable names -variable-rgx=[a-z_][a-z0-9_]{0,30}$ +# Naming style matching correct module names. +module-naming-style=snake_case + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct method names. +method-rgx=([a-z_][a-z0-9_]{2,}|setUp|tearDown)$ + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Naming style matching correct variable names. +variable-naming-style=snake_case # Regular expression which should only match correct list comprehension / # generator expression variable names diff --git a/pylintrc_old_style b/pylintrc_old_style new file mode 100644 index 0000000000..69b599b2f6 --- /dev/null +++ b/pylintrc_old_style @@ -0,0 +1,216 @@ +# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +# This file is mostly based on the contents of +# https://cs.chromium.org/chromium/tools/depot_tools/pylintrc +# and (since the above doesn't properly support naming style checks) +# https://cs.chromium.org/chromium/src/third_party/chromite/pylintrc + +[MESSAGES CONTROL] + +# Disable the message, report, category or checker with the given id(s). +# TODO(kjellander): Reduce this list to as small as possible. +disable= + E0611, + I0010, + I0011, + W0232, + C0413, + bad-continuation, + broad-except, + duplicate-code, + eval-used, + exec-used, + fixme, + import-error, + import-outside-toplevel, + missing-docstring, + no-init, + no-member, + too-few-public-methods, + too-many-ancestors, + too-many-arguments, + too-many-branches, + too-many-function-args, + too-many-instance-attributes, + too-many-lines, + too-many-locals, + too-many-public-methods, + too-many-return-statements, + too-many-statements, + + +[REPORTS] + +# Don't write out full reports, just messages. +reports=no + + +[VARIABLES] + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# A regular expression matching the beginning of the name of dummy variables +# (i.e. not used). +dummy-variables-rgx=_|dummy + + +[TYPECHECK] + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# List of classes names for which member attributes should not be checked +# (useful for classes with attributes dynamically set). +ignored-classes=hashlib,numpy + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[SIMILARITIES] + +# Minimum lines number of a similarity. +min-similarity-lines=4 + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + + +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=80 + +# Maximum number of lines in a module +max-module-lines=1000 + +# Use four spaces for indents. +# indent-string=' ' + + +[BASIC] + +# List of builtins function names that should not be used, separated by a comma +bad-functions=map,filter,apply,input + +# Regular expression which should only match correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression which should only match correct module level names +# (CAPS_WITH_UNDER) +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Regular expression which should only match correct class names +# (CapWords) +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression which should only match correct function names +# The Chromium standard is different than PEP-8, so we need to redefine this to +# only allow: +# - CapWords +# - main: Standard for main function. +function-rgx=([A-Z_][a-zA-Z0-9]{2,60}|main)$ + +# Regular expression which should only match correct method names +# The Chromium standard is different than PEP-8, so we need to redefine this to +# only allow: +# - CapWords, starting with a capital letter. No underscores in function +# names. Can also have a "_" prefix (private method) or a "test" prefix +# (unit test). +# - Methods that look like __xyz__, which are used to do things like +# __init__, __del__, etc. +# - setUp, tearDown: For unit tests. +method-rgx=((_|test)?[A-Z][a-zA-Z0-9]{2,60}|__[a-z]+__|setUp|tearDown)$ + +# Regular expression which should only match correct instance attribute names +attr-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct argument names +argument-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct variable names +variable-rgx=[a-z_][a-z0-9_]{0,30}$ + +# Regular expression which should only match correct list comprehension / +# generator expression variable names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Regular expression which should only match functions or classes name which do +# not require a docstring +no-docstring-rgx=__.*__ + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.* + +# Maximum number of locals for function / method body +max-locals=15 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of branch for function / method body +max-branchs=12 + +# Maximum number of statements in function / method body +max-statements=50 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + + +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub,TERMIOS,Bastion,rexec + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/resources/.gitignore b/resources/.gitignore index 1f729ae457..80580705f9 100644 --- a/resources/.gitignore +++ b/resources/.gitignore @@ -1,5 +1,6 @@ **/*.aecdump **/*.bin +**/*.binarypb **/*.byte **/*.chn **/*.dat diff --git a/resources/BUILD.gn b/resources/BUILD.gn new file mode 100644 index 0000000000..6d6beab721 --- /dev/null +++ b/resources/BUILD.gn @@ -0,0 +1,180 @@ +# Copyright (c) 2025 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../webrtc.gni") + +modules_tests_resources = [ + "audio_coding/testfile16kHz.pcm", + "audio_coding/testfile32kHz.pcm", + "audio_coding/teststereo32kHz.pcm", + "foreman_cif.yuv", +] +group("modules_tests_data") { + data = modules_tests_resources +} +if (is_ios) { + bundle_data("modules_tests_bundle_data") { + testonly = true + sources = modules_tests_resources + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + } +} + +modules_unittests_resources = [ + "audio_coding/neteq_opus.rtp", + "audio_coding/neteq_opus_dtx.rtp", + "audio_coding/neteq_universal_new.rtp", + "audio_coding/speech_4_channels_48k_one_second.wav", + "audio_coding/speech_mono_16kHz.pcm", + "audio_coding/speech_mono_32_48kHz.pcm", + "audio_coding/testfile16kHz.pcm", + "audio_coding/testfile32kHz.pcm", + "audio_coding/testfile_fake_stereo_32kHz.pcm", + "audio_coding/teststereo32kHz.pcm", + "audio_device/audio_short16.pcm", + "audio_device/audio_short44.pcm", + "audio_device/audio_short48.pcm", + "audio_processing/agc/agc_audio.pcm", + "audio_processing/agc/agc_no_circular_buffer.dat", + "audio_processing/agc/agc_pitch_gain.dat", + "audio_processing/agc/agc_pitch_lag.dat", + "audio_processing/agc/agc_spectral_peak.dat", + "audio_processing/agc/agc_vad.dat", + "audio_processing/agc/agc_voicing_prob.dat", + "audio_processing/agc/agc_with_circular_buffer.dat", + "audio_processing/output_data_fixed.pb", + "audio_processing/output_data_float.pb", + "audio_processing/output_data_float_avx2.pb", + "audio_processing/output_data_mac.pb", + "audio_processing/transient/ajm-macbook-1-spke16m.pcm", + "audio_processing/transient/audio16kHz.pcm", + "audio_processing/transient/audio32kHz.pcm", + "audio_processing/transient/audio48kHz.pcm", + "audio_processing/transient/audio8kHz.pcm", + "audio_processing/transient/detect16kHz.dat", + "audio_processing/transient/detect32kHz.dat", + "audio_processing/transient/detect48kHz.dat", + "audio_processing/transient/detect8kHz.dat", + "audio_processing/transient/double-utils.dat", + "audio_processing/transient/float-utils.dat", + "audio_processing/transient/suppressed16kHz.pcm", + "audio_processing/transient/suppressed32kHz.pcm", + "audio_processing/transient/suppressed8kHz.pcm", + "audio_processing/transient/wpd0.dat", + "audio_processing/transient/wpd1.dat", + "audio_processing/transient/wpd2.dat", + "audio_processing/transient/wpd3.dat", + "audio_processing/transient/wpd4.dat", + "audio_processing/transient/wpd5.dat", + "audio_processing/transient/wpd6.dat", + "audio_processing/transient/wpd7.dat", + "far16_stereo.pcm", + "far176_stereo.pcm", + "far192_stereo.pcm", + "far22_stereo.pcm", + "far32_stereo.pcm", + "far44_stereo.pcm", + "far48_stereo.pcm", + "far88_stereo.pcm", + "far8_stereo.pcm", + "far96_stereo.pcm", + "foreman_cif.yuv", + "foreman_cif_short.yuv", + "near16_stereo.pcm", + "near176_stereo.pcm", + "near192_stereo.pcm", + "near22_stereo.pcm", + "near32_stereo.pcm", + "near44_stereo.pcm", + "near48_mono.pcm", + "near48_stereo.pcm", + "near88_stereo.pcm", + "near8_stereo.pcm", + "near96_stereo.pcm", + "ref03.aecdump", + "remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_0_AST.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_0_TOF.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_1_AST.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_1_TOF.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_0_AST.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_0_TOF.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_1_AST.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_1_TOF.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingDelay1_0_AST.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingDelay1_0_TOF.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingLoss1_0_AST.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingLoss1_0_TOF.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_Multi1_1_AST.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_Multi1_1_TOF.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_0_AST.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_0_TOF.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_1_AST.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_1_TOF.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyDelay_0_AST.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyDelay_0_TOF.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyLoss_0_AST.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyLoss_0_TOF.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_UnlimitedSpeed_0_AST.bin", + "remote_bitrate_estimator/VideoSendersTest_BweTest_UnlimitedSpeed_0_TOF.bin", + "short_mixed_mono_48.dat", + "short_mixed_mono_48.pcm", + "short_mixed_mono_48_arm.dat", + "short_mixed_stereo_48.dat", + "short_mixed_stereo_48.pcm", + "voice_engine/audio_tiny48.wav", +] +group("modules_unittests_data") { + data = modules_unittests_resources +} +if (is_ios) { + bundle_data("modules_unittests_bundle_data") { + testonly = true + sources = modules_unittests_resources + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + } +} + +webrtc_perf_tests_resources = [ + "ConferenceMotion_1280_720_50.yuv", + "audio_coding/speech_mono_16kHz.pcm", + "audio_coding/speech_mono_32_48kHz.pcm", + "audio_coding/testfile32kHz.pcm", + "difficult_photo_1850_1110.yuv", + "foreman_cif.yuv", + "paris_qcif.yuv", + "photo_1850_1110.yuv", + "presentation_1850_1110.yuv", + "voice_engine/audio_long16.pcm", + "web_screenshot_1850_1110.yuv", +] +group("webrtc_perf_tests_data") { + data = webrtc_perf_tests_resources +} +if (is_ios) { + bundle_data("webrtc_perf_tests_bundle_data") { + testonly = true + sources = webrtc_perf_tests_resources + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + } +} + +video_engine_tests_resources = [ + "ConferenceMotion_1280_720_50.yuv", + "foreman_cif_short.yuv", + "voice_engine/audio_long16.pcm", +] +group("video_engine_tests_data") { + data = video_engine_tests_resources +} +if (is_ios) { + bundle_data("video_engine_tests_bundle_data") { + testonly = true + sources = video_engine_tests_resources + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + } +} diff --git a/resources/audio_coding/F00.BIT20.sha1 b/resources/audio_coding/F00.BIT20.sha1 deleted file mode 100644 index dcbaa09516..0000000000 --- a/resources/audio_coding/F00.BIT20.sha1 +++ /dev/null @@ -1 +0,0 @@ -27e30ed5e0d449db1cd729c382e18455ad3834fe \ No newline at end of file diff --git a/resources/audio_coding/F00.BIT30.sha1 b/resources/audio_coding/F00.BIT30.sha1 deleted file mode 100644 index 874ce9db19..0000000000 --- a/resources/audio_coding/F00.BIT30.sha1 +++ /dev/null @@ -1 +0,0 @@ -6bafbd8667365939c327eef627fa0f94a75e5fa9 \ No newline at end of file diff --git a/resources/audio_coding/F00.INP.sha1 b/resources/audio_coding/F00.INP.sha1 deleted file mode 100644 index 3166f99eae..0000000000 --- a/resources/audio_coding/F00.INP.sha1 +++ /dev/null @@ -1 +0,0 @@ -44bb846907928e958531e805758767bf9353e03c \ No newline at end of file diff --git a/resources/audio_coding/F00.OUT20.sha1 b/resources/audio_coding/F00.OUT20.sha1 deleted file mode 100644 index 9b9f4bdd46..0000000000 --- a/resources/audio_coding/F00.OUT20.sha1 +++ /dev/null @@ -1 +0,0 @@ -bb80d3c48c42bb14c8e613a8576cb77283cda3a7 \ No newline at end of file diff --git a/resources/audio_coding/F00.OUT30.sha1 b/resources/audio_coding/F00.OUT30.sha1 deleted file mode 100644 index cd62333757..0000000000 --- a/resources/audio_coding/F00.OUT30.sha1 +++ /dev/null @@ -1 +0,0 @@ -450b538234feff02f35b3e57fbeea5d93a41abe0 \ No newline at end of file diff --git a/resources/audio_coding/F01.BIT20.sha1 b/resources/audio_coding/F01.BIT20.sha1 deleted file mode 100644 index 7a267240c7..0000000000 --- a/resources/audio_coding/F01.BIT20.sha1 +++ /dev/null @@ -1 +0,0 @@ -a6230cfa6786ba3d81510aabcaa68bd9b1610d1a \ No newline at end of file diff --git a/resources/audio_coding/F01.BIT30.sha1 b/resources/audio_coding/F01.BIT30.sha1 deleted file mode 100644 index e5a0e5a356..0000000000 --- a/resources/audio_coding/F01.BIT30.sha1 +++ /dev/null @@ -1 +0,0 @@ -4f706d94506e0a4dc56f63940caa588e2531a455 \ No newline at end of file diff --git a/resources/audio_coding/F01.INP.sha1 b/resources/audio_coding/F01.INP.sha1 deleted file mode 100644 index a1c1b207bc..0000000000 --- a/resources/audio_coding/F01.INP.sha1 +++ /dev/null @@ -1 +0,0 @@ -23780ea4c8893813787f7ab1dc7ab68075ad89c0 \ No newline at end of file diff --git a/resources/audio_coding/F01.OUT20.sha1 b/resources/audio_coding/F01.OUT20.sha1 deleted file mode 100644 index db6100bc73..0000000000 --- a/resources/audio_coding/F01.OUT20.sha1 +++ /dev/null @@ -1 +0,0 @@ -1ffcead0db4293f5f36abfdc7cd959d6733358d4 \ No newline at end of file diff --git a/resources/audio_coding/F01.OUT30.sha1 b/resources/audio_coding/F01.OUT30.sha1 deleted file mode 100644 index 2c8e141a06..0000000000 --- a/resources/audio_coding/F01.OUT30.sha1 +++ /dev/null @@ -1 +0,0 @@ -37b5e29f294c3612d4425775b422d7dc9b346965 \ No newline at end of file diff --git a/resources/audio_coding/F02.BIT20.sha1 b/resources/audio_coding/F02.BIT20.sha1 deleted file mode 100644 index 98f5fc5d13..0000000000 --- a/resources/audio_coding/F02.BIT20.sha1 +++ /dev/null @@ -1 +0,0 @@ -ea44732065b09eec558af1957da21c9061c19c08 \ No newline at end of file diff --git a/resources/audio_coding/F02.BIT30.sha1 b/resources/audio_coding/F02.BIT30.sha1 deleted file mode 100644 index 6e645690f3..0000000000 --- a/resources/audio_coding/F02.BIT30.sha1 +++ /dev/null @@ -1 +0,0 @@ -40a5e2850e9e838429911cc3dbb1ff6c9bf38387 \ No newline at end of file diff --git a/resources/audio_coding/F02.INP.sha1 b/resources/audio_coding/F02.INP.sha1 deleted file mode 100644 index d6862202dd..0000000000 --- a/resources/audio_coding/F02.INP.sha1 +++ /dev/null @@ -1 +0,0 @@ -57bf2aaa27527d51085e040ccead4b99f160bb2f \ No newline at end of file diff --git a/resources/audio_coding/F02.OUT20.sha1 b/resources/audio_coding/F02.OUT20.sha1 deleted file mode 100644 index 8f9fb5a334..0000000000 --- a/resources/audio_coding/F02.OUT20.sha1 +++ /dev/null @@ -1 +0,0 @@ -13bb10aca91ddd8af71751944e18243735bdda98 \ No newline at end of file diff --git a/resources/audio_coding/F02.OUT30.sha1 b/resources/audio_coding/F02.OUT30.sha1 deleted file mode 100644 index 5427662ae7..0000000000 --- a/resources/audio_coding/F02.OUT30.sha1 +++ /dev/null @@ -1 +0,0 @@ -21764cf8bb5e635e842eaf92403b22ce04732805 \ No newline at end of file diff --git a/resources/audio_coding/F03.BIT20.sha1 b/resources/audio_coding/F03.BIT20.sha1 deleted file mode 100644 index 0ccdc7d90f..0000000000 --- a/resources/audio_coding/F03.BIT20.sha1 +++ /dev/null @@ -1 +0,0 @@ -c145a1a211103145fbb21cf1c78ab90abce83228 \ No newline at end of file diff --git a/resources/audio_coding/F03.BIT30.sha1 b/resources/audio_coding/F03.BIT30.sha1 deleted file mode 100644 index 565c4c67c1..0000000000 --- a/resources/audio_coding/F03.BIT30.sha1 +++ /dev/null @@ -1 +0,0 @@ -2ccd6fe240e41906c9f06e05cbde486c1f2bebf7 \ No newline at end of file diff --git a/resources/audio_coding/F03.INP.sha1 b/resources/audio_coding/F03.INP.sha1 deleted file mode 100644 index 80dbb9ca25..0000000000 --- a/resources/audio_coding/F03.INP.sha1 +++ /dev/null @@ -1 +0,0 @@ -fb7ad10e20f2de4334f8c122c8582f18b91f0552 \ No newline at end of file diff --git a/resources/audio_coding/F03.OUT20.sha1 b/resources/audio_coding/F03.OUT20.sha1 deleted file mode 100644 index 210e4f69a0..0000000000 --- a/resources/audio_coding/F03.OUT20.sha1 +++ /dev/null @@ -1 +0,0 @@ -db4e9d9e09b55c3fe5536628c1f2f07f4bad068b \ No newline at end of file diff --git a/resources/audio_coding/F03.OUT30.sha1 b/resources/audio_coding/F03.OUT30.sha1 deleted file mode 100644 index c8a733ce68..0000000000 --- a/resources/audio_coding/F03.OUT30.sha1 +++ /dev/null @@ -1 +0,0 @@ -f48b05dce8cb0370cda4b93da45b4edc3fdd42d0 \ No newline at end of file diff --git a/resources/audio_coding/READ.ME b/resources/audio_coding/READ.ME index 4a021a6a3f..0a19b38416 100644 --- a/resources/audio_coding/READ.ME +++ b/resources/audio_coding/READ.ME @@ -2,17 +2,3 @@ Test files for Audio Coding Module testfile32kHz.pcm - mono speech file samples at 32 kHz teststereo32kHz.pcm - stereo speech file samples at 32 kHz - -Test and reference vectors to verify correct execution of PacketCable -iLBC Fixed Point Reference Code - -Version 1.0.6 -Format: all .INP and .OUT files contain 16 bit sampled data using the -Intel (PC) format. The .BIT files are stored in the appropriate byte -sequence (big-endian format). - -*.INP - input files -*.BIT20 - bit stream files 20 ms mode -*.OUT20 - output files 20 ms mode (on a channel without packet loss) -*.BIT30 - bit stream files 30 ms mode -*.OUT30 - output files 30 ms mode (on a channel without packet loss) diff --git a/resources/audio_processing/output_data_float.pb.sha1 b/resources/audio_processing/output_data_float.pb.sha1 index 85e628cb76..5f828c5fb9 100644 --- a/resources/audio_processing/output_data_float.pb.sha1 +++ b/resources/audio_processing/output_data_float.pb.sha1 @@ -1 +1 @@ -d7dadc14736be65465a79054ce29413cd4cc1ccd \ No newline at end of file +8d8c2cbec8325f4d323030eba4dbab1c4aad79e9 \ No newline at end of file diff --git a/resources/audio_processing/output_data_float_avx2.pb.sha1 b/resources/audio_processing/output_data_float_avx2.pb.sha1 index 63f9887265..0e7ca37448 100644 --- a/resources/audio_processing/output_data_float_avx2.pb.sha1 +++ b/resources/audio_processing/output_data_float_avx2.pb.sha1 @@ -1 +1 @@ -40df0b4e636bb59fe258b93f8aab7fd2d3f6440d \ No newline at end of file +b1755a6258421b38d35fe8767ada159833aeee1c \ No newline at end of file diff --git a/resources/deflicker_before_cif_short.yuv.sha1 b/resources/deflicker_before_cif_short.yuv.sha1 deleted file mode 100644 index d2f5a9364f..0000000000 --- a/resources/deflicker_before_cif_short.yuv.sha1 +++ /dev/null @@ -1 +0,0 @@ -849f88896b1d00c2625c247e9e06a19d2ae0175c \ No newline at end of file diff --git a/resources/foremanColorEnhanced_cif_short.yuv.sha1 b/resources/foremanColorEnhanced_cif_short.yuv.sha1 deleted file mode 100644 index f2514ae6ad..0000000000 --- a/resources/foremanColorEnhanced_cif_short.yuv.sha1 +++ /dev/null @@ -1 +0,0 @@ -ce229fea854fbce532fe430b5b5a8c9b5db65d94 \ No newline at end of file diff --git a/resources/near48_mono.pcm.sha1 b/resources/near48_mono.pcm.sha1 new file mode 100644 index 0000000000..f9254c7436 --- /dev/null +++ b/resources/near48_mono.pcm.sha1 @@ -0,0 +1 @@ +2b752cdcb86095a0c405724aa1ce4ef910e06d10 \ No newline at end of file diff --git a/resources/rtc_event_log/rtc_event_log_500kbps.binarypb.sha1 b/resources/rtc_event_log/rtc_event_log_500kbps.binarypb.sha1 new file mode 100644 index 0000000000..b190e48733 --- /dev/null +++ b/resources/rtc_event_log/rtc_event_log_500kbps.binarypb.sha1 @@ -0,0 +1 @@ +56e4ff9ea2b0fb92e88c7126f1df1283787ae0e5 \ No newline at end of file diff --git a/rtc_base/BUILD.gn b/rtc_base/BUILD.gn index 80ab5a6fe9..88164115c4 100644 --- a/rtc_base/BUILD.gn +++ b/rtc_base/BUILD.gn @@ -13,14 +13,6 @@ if (is_android) { import("//build/config/android/rules.gni") } -if (!rtc_build_ssl) { - config("external_ssl_library") { - assert(rtc_ssl_root != "", - "You must specify rtc_ssl_root when rtc_build_ssl==0.") - include_dirs = [ rtc_ssl_root ] - } -} - rtc_source_set("protobuf_utils") { sources = [ "protobuf_utils.h" ] if (rtc_enable_protobuf) { @@ -38,11 +30,9 @@ rtc_source_set("bitstream_reader") { ":checks", ":safe_conversions", "../api:array_view", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/numeric:bits", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -82,8 +72,8 @@ rtc_source_set("buffer") { ":type_traits", ":zero_memory", "../api:array_view", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_source_set("byte_order") { @@ -102,7 +92,6 @@ rtc_source_set("moving_max_counter") { visibility = [ "*" ] sources = [ "numerics/moving_max_counter.h" ] deps = [ ":checks" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("one_time_event") { @@ -119,8 +108,10 @@ rtc_source_set("strong_alias") { rtc_source_set("swap_queue") { visibility = [ "*" ] sources = [ "swap_queue.h" ] - deps = [ ":checks" ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] + deps = [ + ":checks", + "//third_party/abseil-cpp/absl/base:core_headers", + ] } rtc_source_set("macromagic") { @@ -140,10 +131,8 @@ rtc_library("bit_buffer") { deps = [ ":checks", "../api/units:data_size", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/numeric:bits", - "//third_party/abseil-cpp/absl/strings:strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -156,8 +145,10 @@ rtc_library("byte_buffer") { deps = [ ":buffer", ":byte_order", + "../api:array_view", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("buffer_queue") { @@ -187,8 +178,8 @@ rtc_library("copy_on_write_buffer") { ":type_traits", "../api:scoped_refptr", "system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("event_tracer") { @@ -196,6 +187,8 @@ rtc_library("event_tracer") { sources = [ "event_tracer.cc", "event_tracer.h", + "trace_categories.cc", + "trace_categories.h", "trace_event.h", ] deps = [ @@ -206,11 +199,12 @@ rtc_library("event_tracer") { ":platform_thread_types", ":rtc_event", ":timeutils", + "..:tracing", "../api:sequence_checker", "synchronization:mutex", "system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("histogram_percentile_counter") { @@ -220,7 +214,6 @@ rtc_library("histogram_percentile_counter") { "numerics/histogram_percentile_counter.h", ] deps = [ ":checks" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("race_checker") { @@ -262,7 +255,6 @@ rtc_library("bitrate_tracker") { "../api/units:timestamp", "system:rtc_export", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("frequency_tracker") { @@ -278,7 +270,6 @@ rtc_library("frequency_tracker") { "../api/units:timestamp", "system:rtc_export", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("rate_statistics") { @@ -295,7 +286,6 @@ rtc_library("rate_statistics") { ":safe_conversions", "system:rtc_export", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("rate_tracker") { @@ -320,7 +310,6 @@ rtc_library("sample_counter") { ":checks", ":safe_conversions", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("timestamp_aligner") { @@ -369,6 +358,7 @@ rtc_source_set("refcount") { ] deps = [ ":macromagic", + "../api:ref_count", "../api:scoped_refptr", ] } @@ -399,11 +389,7 @@ rtc_library("platform_thread") { ":rtc_event", ":timeutils", "../api:sequence_checker", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -416,7 +402,7 @@ rtc_library("rtc_event") { deps = [ ":checks", "../api/units:time_delta", - "system:rtc_export", # Only Chromium's rtc::Event use RTC_EXPORT. + "system:rtc_export", # Only Chromium's webrtc::Event use RTC_EXPORT. "//base", # Dependency on chromium's waitable_event. ] } else { @@ -431,7 +417,6 @@ rtc_library("rtc_event") { "synchronization:yield_policy", "system:warn_current_thread_is_deadlocked", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } } @@ -448,14 +433,12 @@ rtc_library("logging") { ":platform_thread_types", ":stringutils", ":timeutils", + ":type_traits", "../api/units:timestamp", "synchronization:mutex", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/meta:type_traits", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (build_with_chromium) { @@ -499,18 +482,17 @@ rtc_library("checks") { ] deps = [ ":safe_compare", + ":type_traits", "../api:scoped_refptr", "system:inline", "system:rtc_export", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (build_with_chromium) { sources += [ "../../webrtc_overrides/rtc_base/checks_overrides.cc" ] deps += [ "//base" ] } - absl_deps = [ - "//third_party/abseil-cpp/absl/meta:type_traits", - "//third_party/abseil-cpp/absl/strings", - ] if (is_android) { libs += [ "log" ] } @@ -525,15 +507,13 @@ rtc_library("rate_limiter") { ":macromagic", ":rate_statistics", "../system_wrappers", - "../system_wrappers:field_trial", "synchronization:mutex", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("sanitizer") { sources = [ "sanitizer.h" ] - absl_deps = [ "//third_party/abseil-cpp/absl/meta:type_traits" ] + deps = [] } rtc_source_set("bounded_inline_vector") { @@ -606,6 +586,7 @@ rtc_library("stringutils") { "string_to_number.h", "string_utils.cc", "string_utils.h", + "strings/str_join.h", "strings/string_builder.cc", "strings/string_builder.h", "strings/string_format.cc", @@ -616,21 +597,9 @@ rtc_library("stringutils") { ":macromagic", ":safe_minmax", "../api:array_view", - ] - absl_deps = [ + "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("audio_format_to_string") { - sources = [ - "strings/audio_format_to_string.cc", - "strings/audio_format_to_string.h", - ] - deps = [ - ":stringutils", - "../api/audio_codecs:audio_codecs_api", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -638,23 +607,6 @@ rtc_source_set("type_traits") { sources = [ "type_traits.h" ] } -rtc_library("rtc_task_queue") { - visibility = [ "*" ] - sources = [ - "task_queue.cc", - "task_queue.h", - ] - deps = [ - ":macromagic", - "../api/task_queue", - "system:rtc_export", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/memory", - ] -} - rtc_source_set("rtc_operations_chain") { visibility = [ "*" ] sources = [ @@ -671,37 +623,6 @@ rtc_source_set("rtc_operations_chain") { "../api:sequence_checker", "system:no_unique_address", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] -} - -if (rtc_enable_libevent) { - rtc_library("rtc_task_queue_libevent") { - visibility = [ "../api/task_queue:default_task_queue_factory" ] - sources = [ - "task_queue_libevent.cc", - "task_queue_libevent.h", - ] - deps = [ - ":checks", - ":logging", - ":macromagic", - ":platform_thread", - ":platform_thread_types", - ":safe_conversions", - ":timeutils", - "../api/task_queue", - "../api/units:time_delta", - "synchronization:mutex", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/strings", - ] - if (rtc_build_libevent) { - deps += [ "//third_party/libevent" ] - } - } } if (is_mac || is_ios) { @@ -719,10 +640,8 @@ if (is_mac || is_ios) { "../api/units:time_delta", "synchronization:mutex", "system:gcd_helpers", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } } @@ -746,11 +665,8 @@ if (is_win) { "../api/units:time_delta", "../api/units:timestamp", "synchronization:mutex", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } } @@ -772,10 +688,8 @@ rtc_library("rtc_task_queue_stdlib") { "../api/task_queue", "../api/units:time_delta", "synchronization:mutex", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -786,8 +700,13 @@ if (rtc_include_tests) { sources = [ "task_queue_stdlib_unittest.cc" ] deps = [ ":gunit_helpers", + ":logging", + ":rtc_event", ":rtc_task_queue_stdlib", + "../api/task_queue", "../api/task_queue:task_queue_test", + "../api/units:time_delta", + "../system_wrappers", "../test:test_main", "../test:test_support", ] @@ -800,6 +719,8 @@ rtc_library("weak_ptr") { "weak_ptr.h", ] deps = [ + ":checks", + ":macromagic", ":refcount", "../api:scoped_refptr", "../api:sequence_checker", @@ -818,6 +739,7 @@ rtc_library("rtc_numerics") { "numerics/moving_average.h", "numerics/moving_percentile_filter.h", "numerics/percentile_filter.h", + "numerics/rational.h", "numerics/running_statistics.h", "numerics/sequence_number_unwrapper.h", "numerics/sequence_number_util.h", @@ -826,7 +748,6 @@ rtc_library("rtc_numerics") { ":checks", ":mod_ops", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("rtc_stats_counters") { @@ -837,12 +758,12 @@ rtc_library("rtc_stats_counters") { "numerics/sample_stats.h", ] deps = [ + ":checks", "../api/numerics", "../api/units:data_rate", "../api/units:time_delta", "../api/units:timestamp", ] - absl_deps = [] } config("rtc_json_suppressions") { @@ -856,21 +777,23 @@ config("rtc_json_suppressions") { } rtc_library("rtc_json") { + testonly = true public_configs = [ ":rtc_json_suppressions" ] - poisonous = [ "rtc_json" ] defines = [] sources = [ "strings/json.cc", "strings/json.h", ] - deps = [ ":stringutils" ] + deps = [ + ":stringutils", + "//third_party/abseil-cpp/absl/strings:string_view", + ] all_dependent_configs = [ "//third_party/jsoncpp:jsoncpp_config" ] if (rtc_build_json) { deps += [ "//third_party/jsoncpp" ] } else { include_dirs = [ "$rtc_jsoncpp_root" ] } - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("net_helpers") { @@ -879,7 +802,10 @@ rtc_library("net_helpers") { "net_helpers.cc", "net_helpers.h", ] - deps = [ "system:rtc_export" ] + deps = [ + "system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", + ] if (is_android) { deps += [ ":ifaddrs_android" ] } @@ -889,7 +815,6 @@ rtc_library("net_helpers") { "win:windows_version", ] } - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("net_test_helpers") { @@ -900,7 +825,10 @@ rtc_library("net_test_helpers") { "net_test_helpers.cc", "net_test_helpers.h", ] - deps = [ "system:rtc_export" ] + deps = [ + ":net_helpers", + "system:rtc_export", + ] if (is_android) { deps += [ ":ifaddrs_android" ] } @@ -912,20 +840,6 @@ rtc_library("net_test_helpers") { } } -rtc_library("async_resolver_interface") { - visibility = [ "*" ] - sources = [ - "async_resolver_interface.cc", - "async_resolver_interface.h", - ] - deps = [ - ":checks", - ":socket_address", - "system:rtc_export", - "third_party/sigslot", - ] -} - rtc_library("async_dns_resolver") { sources = [ "async_dns_resolver.cc", @@ -940,6 +854,7 @@ rtc_library("async_dns_resolver") { "../api:make_ref_counted", "../api:sequence_checker", "../api/task_queue:pending_task_safety_flag", + "system:rtc_export", ] } @@ -949,8 +864,15 @@ rtc_library("async_dns_resolver_unittests") { deps = [ ":async_dns_resolver", ":gunit_helpers", + ":logging", + ":net_helpers", + ":socket_address", + ":threading", + "../api:rtc_error_matchers", + "../api/units:time_delta", "../test:run_loop", "../test:test_support", + "../test:wait_until", ] } @@ -965,11 +887,11 @@ rtc_library("ip_address") { ":net_helpers", ":stringutils", "system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (is_win) { deps += [ ":win32" ] } - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("socket_address") { @@ -987,11 +909,11 @@ rtc_library("socket_address") { ":safe_conversions", ":stringutils", "system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (is_win) { deps += [ ":win32" ] } - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("null_socket_server") { @@ -1023,8 +945,6 @@ rtc_library("threading") { visibility = [ "*" ] sources = [ - "async_resolver.cc", - "async_resolver.h", "internal/default_socket_server.cc", "internal/default_socket_server.h", "network_monitor.cc", @@ -1036,16 +956,8 @@ rtc_library("threading") { "thread.cc", "thread.h", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/cleanup", - "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - ] deps = [ - ":async_resolver_interface", + ":async_dns_resolver", ":byte_order", ":checks", ":criticalsection", @@ -1059,10 +971,11 @@ rtc_library("threading") { ":platform_thread_types", ":refcount", ":rtc_event", - ":rtc_task_queue", + ":socket", ":socket_address", ":socket_server", ":timeutils", + "../api:async_dns_resolver", "../api:function_view", "../api:location", "../api:refcountedbase", @@ -1070,12 +983,20 @@ rtc_library("threading") { "../api:sequence_checker", "../api/task_queue", "../api/task_queue:pending_task_safety_flag", + "../api/transport:ecn_marking", "../api/units:time_delta", + "../api/units:timestamp", "../system_wrappers:field_trial", + "./network:ecn_marking", "synchronization:mutex", "system:no_unique_address", "system:rtc_export", "third_party/sigslot", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/cleanup", + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (is_android) { deps += [ ":ifaddrs_android" ] @@ -1103,8 +1024,8 @@ rtc_library("async_socket") { ":socket", ":socket_address", "third_party/sigslot", + "//third_party/abseil-cpp/absl/memory", ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } rtc_library("socket") { @@ -1113,8 +1034,15 @@ rtc_library("socket") { "socket.h", ] deps = [ + ":buffer", + ":checks", + ":ip_address", ":macromagic", + ":net_helpers", ":socket_address", + "../api/units:timestamp", + "./network:ecn_marking", + "system:rtc_export", "third_party/sigslot", ] if (is_win) { @@ -1141,7 +1069,7 @@ if (is_android) { "log", "GLESv2", ] - absl_deps = [ "//third_party/abseil-cpp/absl/cleanup" ] + deps = [ "//third_party/abseil-cpp/absl/cleanup" ] } } @@ -1219,8 +1147,8 @@ if (!build_with_chromium) { ":checks", ":file_rotating_stream", ":logging", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } } @@ -1237,26 +1165,29 @@ rtc_library("network") { ":logging", ":macromagic", ":mdns_responder_interface", + ":net_helpers", + ":network_constants", ":socket", + ":socket_address", ":socket_factory", ":stringutils", ":threading", "../api:array_view", "../api:field_trials_view", + "../api:scoped_refptr", "../api:sequence_checker", + "../api/environment", "../api/task_queue:pending_task_safety_flag", - "../api/transport:field_trial_based_config", "../api/units:time_delta", "experiments:field_trial_parser", - "memory:always_valid_pointer", "system:rtc_export", "third_party/sigslot", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/base:nullability", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (is_win) { @@ -1278,8 +1209,10 @@ rtc_library("net_helper") { "net_helper.cc", "net_helper.h", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] - deps = [ "system:rtc_export" ] + deps = [ + "system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", + ] } rtc_library("socket_adapters") { @@ -1293,14 +1226,15 @@ rtc_library("socket_adapters") { ":buffer", ":byte_buffer", ":checks", - ":crypt_string", - ":http_common", ":logging", + ":socket", + ":socket_address", ":stringutils", ":zero_memory", "../api:array_view", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("network_route") { @@ -1342,6 +1276,7 @@ rtc_library("async_udp_socket") { ] deps = [ ":async_packet_socket", + ":buffer", ":checks", ":logging", ":macromagic", @@ -1350,11 +1285,13 @@ rtc_library("async_udp_socket") { ":socket_factory", ":timeutils", "../api:sequence_checker", + "../api/units:time_delta", + "../api/units:timestamp", "../system_wrappers:field_trial", + "network:received_packet", "network:sent_packet", "system:no_unique_address", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("async_packet_socket") { @@ -1365,39 +1302,68 @@ rtc_library("async_packet_socket") { ] deps = [ ":callback_list", + ":checks", ":dscp", + ":macromagic", ":socket", + ":socket_address", ":timeutils", "../api:sequence_checker", + "network:received_packet", "network:sent_packet", "system:no_unique_address", "system:rtc_export", "third_party/sigslot", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] } +if (rtc_include_tests) { + rtc_library("async_packet_socket_unittest") { + testonly = true + visibility = [ "*" ] + sources = [ "async_packet_socket_unittest.cc" ] + deps = [ + ":async_packet_socket", + ":gunit_helpers", + ":socket_address", + "../test:test_support", + "network:received_packet", + "third_party/sigslot", + ] + } + + rtc_library("async_udp_socket_unittest") { + testonly = true + visibility = [ "*" ] + sources = [ "async_udp_socket_unittest.cc" ] + deps = [ + ":async_packet_socket", + ":async_udp_socket", + ":gunit_helpers", + ":rtc_base_tests_utils", + ":socket", + ":socket_address", + "../test:test_support", + "network:received_packet", + "third_party/sigslot", + "//third_party/abseil-cpp/absl/memory", + ] + } +} + rtc_library("mdns_responder_interface") { sources = [ "mdns_responder_interface.h" ] - deps = [ ":ip_address" ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + deps = [ + ":ip_address", + "//third_party/abseil-cpp/absl/strings:string_view", + ] } rtc_library("dscp") { sources = [ "dscp.h" ] } -rtc_library("proxy_info") { - visibility = [ "*" ] - sources = [ - "proxy_info.cc", - "proxy_info.h", - ] - deps = [ - ":crypt_string", - ":socket_address", - ] -} - rtc_library("file_rotating_stream") { sources = [ "file_rotating_stream.cc", @@ -1408,11 +1374,9 @@ rtc_library("file_rotating_stream") { ":logging", ":stringutils", "system:file_wrapper", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -1430,14 +1394,14 @@ rtc_library("unique_id_generator") { "unique_id_generator.h", ] deps = [ - ":ssl", + ":crypto_random", ":stringutils", "../api:array_view", "../api:sequence_checker", "synchronization:mutex", "system:no_unique_address", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("crc32") { @@ -1445,8 +1409,10 @@ rtc_library("crc32") { "crc32.cc", "crc32.h", ] - deps = [ ":macromagic" ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + deps = [ + ":macromagic", + "//third_party/abseil-cpp/absl/strings:string_view", + ] } rtc_library("stream") { @@ -1458,10 +1424,15 @@ rtc_library("stream") { deps = [ ":buffer", ":checks", + ":logging", + ":macromagic", ":threading", "../api:array_view", + "../api:sequence_checker", + "system:no_unique_address", "system:rtc_export", "third_party/sigslot", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] } @@ -1477,81 +1448,101 @@ rtc_library("rtc_certificate_generator") { ":threading", "../api:scoped_refptr", "system:rtc_export", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/types:optional", ] } -rtc_library("ssl") { +rtc_source_set("ssl_header") { + visibility = [ "*" ] + sources = [ "openssl.h" ] + deps = [] + if (is_win) { + deps += [ ":win32" ] + } +} + +rtc_source_set("digest") { visibility = [ "*" ] sources = [ - "helpers.cc", - "helpers.h", "message_digest.cc", "message_digest.h", - "openssl.h", - "openssl_adapter.cc", - "openssl_adapter.h", "openssl_digest.cc", "openssl_digest.h", + ] + deps = [ + ":checks", + ":ssl_header", + ":stringutils", + "//third_party/abseil-cpp/absl/strings:string_view", + ] + + # If we are building the SSL library ourselves, we know it's BoringSSL. + if (rtc_build_ssl) { + deps += [ "//third_party/boringssl" ] + } else { + configs += [ "..:external_ssl_library" ] + } +} + +rtc_library("crypto_random") { + visibility = [ "*" ] + sources = [ + "crypto_random.cc", + "crypto_random.h", + ] + deps = [ + ":checks", + ":logging", + "synchronization:mutex", + "system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", + ] + + # If we are building the SSL library ourselves, we know it's BoringSSL. + if (rtc_build_ssl) { + deps += [ "//third_party/boringssl" ] + } else { + configs += [ "..:external_ssl_library" ] + } +} + +rtc_library("ssl") { + visibility = [ "*" ] + sources = [ "openssl_key_pair.cc", "openssl_key_pair.h", - "openssl_session_cache.cc", - "openssl_session_cache.h", - "openssl_stream_adapter.cc", - "openssl_stream_adapter.h", "openssl_utility.cc", "openssl_utility.h", "rtc_certificate.cc", "rtc_certificate.h", - "ssl_adapter.cc", - "ssl_adapter.h", "ssl_certificate.cc", "ssl_certificate.h", "ssl_fingerprint.cc", "ssl_fingerprint.h", "ssl_identity.cc", "ssl_identity.h", - "ssl_stream_adapter.cc", - "ssl_stream_adapter.h", ] deps = [ - ":async_socket", + ":base64", ":buffer", ":checks", ":copy_on_write_buffer", + ":digest", ":logging", ":macromagic", ":safe_conversions", - ":socket", - ":socket_address", - ":stream", + ":ssl_header", ":stringutils", - ":threading", ":timeutils", "../api:array_view", "../api:refcountedbase", "../api:scoped_refptr", - "../api/task_queue:pending_task_safety_flag", - "../api/units:time_delta", - "../system_wrappers:field_trial", - "synchronization:mutex", "system:rtc_export", - "task_utils:repeating_task", - "third_party/base64", - "third_party/sigslot", - ] - - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] # If we are building the SSL library ourselves, we know it's BoringSSL. @@ -1563,7 +1554,10 @@ rtc_library("ssl") { "boringssl_identity.h", ] - deps += [ "//third_party/boringssl" ] + deps += [ + ":crypto_random", + "//third_party/boringssl", + ] } else { sources += [ "openssl_certificate.cc", @@ -1571,8 +1565,9 @@ rtc_library("ssl") { "openssl_identity.cc", "openssl_identity.h", ] + deps += [ ":crypto_random" ] - configs += [ ":external_ssl_library" ] + configs += [ "..:external_ssl_library" ] } if (build_with_chromium) { @@ -1586,29 +1581,54 @@ rtc_library("ssl") { } } -rtc_library("crypt_string") { +rtc_library("ssl_adapter") { + visibility = [ "*" ] sources = [ - "crypt_string.cc", - "crypt_string.h", + "openssl_adapter.cc", + "openssl_adapter.h", + "openssl_session_cache.cc", + "openssl_session_cache.h", + "openssl_stream_adapter.cc", + "openssl_stream_adapter.h", + "ssl_adapter.cc", + "ssl_adapter.h", + "ssl_stream_adapter.cc", + "ssl_stream_adapter.h", ] -} -rtc_library("http_common") { - sources = [ - "http_common.cc", - "http_common.h", - ] deps = [ - ":crypt_string", + ":async_socket", + ":buffer", + ":checks", + ":digest", ":logging", + ":safe_conversions", + ":socket", ":socket_address", ":ssl", + ":ssl_header", + ":stream", ":stringutils", - ":zero_memory", - "third_party/base64", + ":threading", + ":timeutils", + "../api:array_view", + "../api:field_trials_view", + "../api:sequence_checker", + "../api/task_queue:pending_task_safety_flag", + "../api/units:time_delta", + "system:rtc_export", + "task_utils:repeating_task", + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] + # If we are building the SSL library ourselves, we know it's BoringSSL. + if (rtc_build_ssl) { + deps += [ "//third_party/boringssl" ] + } else { + configs += [ "..:external_ssl_library" ] + } } rtc_source_set("gtest_prod") { @@ -1617,18 +1637,15 @@ rtc_source_set("gtest_prod") { rtc_library("gunit_helpers") { testonly = true - sources = [ - "gunit.cc", - "gunit.h", - ] + sources = [ "gunit.h" ] deps = [ ":logging", ":rtc_base_tests_utils", ":stringutils", ":threading", "../test:test_support", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("testclient") { @@ -1638,11 +1655,18 @@ rtc_library("testclient") { "test_client.h", ] deps = [ + ":async_packet_socket", ":async_udp_socket", + ":buffer", ":gunit_helpers", ":rtc_base_tests_utils", + ":socket", + ":socket_address", ":threading", ":timeutils", + "../api/units:time_delta", + "../api/units:timestamp", + "network:received_packet", "synchronization:mutex", ] } @@ -1676,20 +1700,10 @@ rtc_library("rtc_base_tests_utils") { "memory_stream.h", "memory_usage.cc", "memory_usage.h", - "nat_server.cc", - "nat_server.h", - "nat_socket_factory.cc", - "nat_socket_factory.h", - "nat_types.cc", - "nat_types.h", "proxy_server.cc", "proxy_server.h", "server_socket_adapters.cc", "server_socket_adapters.h", - "sigslot_tester.h", - "socket_stream.cc", - "socket_stream.h", - "test_base64.h", "test_certificate_verifier.h", "test_echo_server.cc", "test_echo_server.h", @@ -1703,13 +1717,17 @@ rtc_library("rtc_base_tests_utils") { ":async_socket", ":async_tcp_socket", ":async_udp_socket", + ":buffer", ":byte_buffer", ":checks", + ":digest", ":ip_address", ":logging", ":macromagic", ":mdns_responder_interface", + ":net_helpers", ":network", + ":network_constants", ":rtc_event", ":socket", ":socket_adapters", @@ -1722,23 +1740,24 @@ rtc_library("rtc_base_tests_utils") { ":stringutils", ":threading", ":timeutils", + "../api:array_view", "../api:make_ref_counted", "../api:refcountedbase", "../api:scoped_refptr", + "../api:sequence_checker", "../api/task_queue", "../api/units:time_delta", "../api/units:timestamp", "../test:scoped_key_value_config", "memory:always_valid_pointer", "memory:fifo_buffer", + "network:received_packet", "synchronization:mutex", "third_party/sigslot", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (is_fuchsia) { deps += [ "//third_party/fuchsia-sdk/sdk/pkg/zx" ] @@ -1754,20 +1773,40 @@ rtc_library("task_queue_for_test") { ] deps = [ ":checks", - ":macromagic", ":rtc_event", - ":rtc_task_queue", "../api:function_view", "../api/task_queue", "../api/task_queue:default_task_queue_factory", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/cleanup", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("base64") { + sources = [ + "base64.cc", + "base64.h", + ] + deps = [ + "../api:array_view", + "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } if (rtc_include_tests) { + if (rtc_enable_google_benchmarks) { + rtc_test("base64_benchmark") { + sources = [ "base64_benchmark.cc" ] + deps = [ + ":base64", + "../test:benchmark_main", + "//third_party/google_benchmark", + ] + } + } + rtc_library("sigslot_unittest") { testonly = true sources = [ "sigslot_unittest.cc" ] @@ -1794,11 +1833,16 @@ if (rtc_include_tests) { sources = [ "operations_chain_unittest.cc" ] deps = [ + ":checks", ":gunit_helpers", ":rtc_event", ":rtc_operations_chain", ":threading", + "../api:rtc_error_matchers", + "../api:scoped_refptr", + "../api/units:time_delta", "../test:test_support", + "../test:wait_until", ] } @@ -1832,22 +1876,27 @@ if (rtc_include_tests) { ":rtc_base_tests_utils", ":socket", ":socket_address", + ":socket_factory", ":socket_server", ":testclient", ":threading", ":timeutils", + "../api:rtc_error_matchers", + "../api/transport:ecn_marking", "../api/units:time_delta", + "../api/units:timestamp", "../system_wrappers", "../test:field_trial", "../test:fileutils", "../test:test_main", "../test:test_support", + "../test:wait_until", + "network:ecn_marking", + "system:file_wrapper", "third_party/sigslot", "//testing/gtest", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -1888,6 +1937,7 @@ if (rtc_include_tests) { "string_encode_unittest.cc", "string_to_number_unittest.cc", "string_utils_unittest.cc", + "strings/str_join_unittest.cc", "strings/string_builder_unittest.cc", "strings/string_format_unittest.cc", "strong_alias_unittest.cc", @@ -1901,6 +1951,7 @@ if (rtc_include_tests) { deps = [ ":async_packet_socket", ":async_udp_socket", + ":base64", ":bit_buffer", ":bitrate_tracker", ":bitstream_reader", @@ -1912,6 +1963,7 @@ if (rtc_include_tests) { ":checks", ":copy_on_write_buffer", ":criticalsection", + ":crypto_random", ":divide_round", ":event_tracer", ":frequency_tracker", @@ -1922,6 +1974,7 @@ if (rtc_include_tests) { ":macromagic", ":mod_ops", ":moving_max_counter", + ":net_helpers", ":null_socket_server", ":one_time_event", ":platform_thread", @@ -1933,7 +1986,6 @@ if (rtc_include_tests) { ":rtc_base_tests_utils", ":rtc_event", ":rtc_numerics", - ":rtc_task_queue", ":safe_compare", ":safe_minmax", ":sample_counter", @@ -1952,6 +2004,7 @@ if (rtc_include_tests) { ":zero_memory", "../api:array_view", "../api:make_ref_counted", + "../api:ref_count", "../api:scoped_refptr", "../api/numerics", "../api/units:data_rate", @@ -1960,24 +2013,20 @@ if (rtc_include_tests) { "../api/units:time_delta", "../api/units:timestamp", "../system_wrappers", - "../test:field_trial", "../test:fileutils", "../test:test_main", "../test:test_support", "containers:flat_map", "containers:unittests", "memory:unittests", + "network:received_packet", "synchronization:mutex", "task_utils:repeating_task", - "third_party/base64", "third_party/sigslot", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/numeric:bits", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (is_win) { @@ -1993,14 +2042,13 @@ if (rtc_include_tests) { ":gunit_helpers", ":rtc_base_tests_utils", ":rtc_event", - ":rtc_task_queue", ":task_queue_for_test", ":timeutils", "../api/units:time_delta", "../test:test_main", "../test:test_support", + "//third_party/abseil-cpp/absl/memory", ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } rtc_library("weak_ptr_unittests") { @@ -2032,12 +2080,13 @@ if (rtc_include_tests) { "numerics/sequence_number_util_unittest.cc", ] deps = [ + ":mod_ops", ":rtc_numerics", ":timeutils", "../test:test_main", "../test:test_support", + "//third_party/abseil-cpp/absl/algorithm:container", ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } rtc_library("rtc_json_unittests") { @@ -2059,20 +2108,17 @@ if (rtc_include_tests) { sources = [ "crc32_unittest.cc", + "crypto_random_unittest.cc", "data_rate_limiter_unittest.cc", "fake_clock_unittest.cc", - "helpers_unittest.cc", "ip_address_unittest.cc", "memory_usage_unittest.cc", "message_digest_unittest.cc", - "nat_unittest.cc", "network_route_unittest.cc", "network_unittest.cc", - "proxy_unittest.cc", "rolling_accumulator_unittest.cc", "rtc_certificate_generator_unittest.cc", "rtc_certificate_unittest.cc", - "sigslot_tester_unittest.cc", "test_client_unittest.cc", "thread_unittest.cc", "unique_id_generator_unittest.cc", @@ -2085,7 +2131,9 @@ if (rtc_include_tests) { ":buffer_queue", ":checks", ":crc32", + ":crypto_random", ":data_rate_limiter", + ":digest", ":gunit_helpers", ":ifaddrs_converter", ":ip_address", @@ -2094,6 +2142,7 @@ if (rtc_include_tests) { ":net_helpers", ":net_test_helpers", ":network", + ":network_constants", ":network_route", ":null_socket_server", ":refcount", @@ -2108,6 +2157,8 @@ if (rtc_include_tests) { ":socket_factory", ":socket_server", ":ssl", + ":ssl_adapter", + ":ssl_header", ":stream", ":stringutils", ":testclient", @@ -2117,6 +2168,11 @@ if (rtc_include_tests) { "../api:array_view", "../api:field_trials_view", "../api:make_ref_counted", + "../api:rtc_error_matchers", + "../api:scoped_refptr", + "../api:sequence_checker", + "../api/environment", + "../api/environment:environment_factory", "../api/task_queue", "../api/task_queue:pending_task_safety_flag", "../api/task_queue:task_queue_test", @@ -2127,9 +2183,17 @@ if (rtc_include_tests) { "../test:scoped_key_value_config", "../test:test_main", "../test:test_support", + "../test:wait_until", "memory:fifo_buffer", + "network:received_packet", "synchronization:mutex", "third_party/sigslot", + "//testing/gtest", + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/functional:any_invocable", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (rtc_enable_google_benchmarks) { deps += [ "synchronization:synchronization_unittests" ] @@ -2138,7 +2202,7 @@ if (rtc_include_tests) { sources += [ "win32_unittest.cc" ] deps += [ ":win32" ] } - if (is_posix || is_fuchsia) { + if (is_posix || is_fuchsia || is_win) { sources += [ "openssl_adapter_unittest.cc", "openssl_session_cache_unittest.cc", @@ -2147,14 +2211,8 @@ if (rtc_include_tests) { "ssl_identity_unittest.cc", "ssl_stream_adapter_unittest.cc", ] + deps += [ ":callback_list" ] } - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] if (build_with_chromium) { include_dirs = [ "../../boringssl/src/include" ] @@ -2162,7 +2220,7 @@ if (rtc_include_tests) { if (rtc_build_ssl) { deps += [ "//third_party/boringssl" ] } else { - configs += [ ":external_ssl_library" ] + configs += [ "..:external_ssl_library" ] } } } diff --git a/rtc_base/DEPS b/rtc_base/DEPS index 3a77b5502a..0e7237d329 100644 --- a/rtc_base/DEPS +++ b/rtc_base/DEPS @@ -2,14 +2,43 @@ include_rules = [ "+json", "+system_wrappers", "+third_party/jsoncpp", - "+third_party/libevent", ] specific_include_rules = { + "checks.h": [ + "+absl/strings/has_absl_stringify.h", + ], "protobuf_utils.h": [ "+third_party/protobuf", ], "gunit\.h": [ "+testing/base/public/gunit.h" ], + "trace_categories\.h": [ + "+third_party/perfetto", + ], + "event_tracer\.cc": [ + "+third_party/perfetto", + ], + "logging.h": [ + "+absl/strings/has_absl_stringify.h", + ], + "trace_event\.h": [ + "+third_party/perfetto", + ], + "openssl_adapter.cc": [ + "+openssl", + ], + "openssl_stream_adapter.cc": [ + "+openssl", + ], + "openssl_stream_adapter.h": [ + "+openssl", + ], + "openssl_session_cache\.h": [ + "+openssl", + ], + "base64_benchmark\.cc": [ + "+benchmark", + ], } diff --git a/rtc_base/async_dns_resolver.cc b/rtc_base/async_dns_resolver.cc index 8cbd21cb6d..625721a2d9 100644 --- a/rtc_base/async_dns_resolver.cc +++ b/rtc_base/async_dns_resolver.cc @@ -30,7 +30,7 @@ namespace { #ifdef __native_client__ int ResolveHostname(absl::string_view hostname, int family, - std::vector* addresses) { + std::vector* addresses) { RTC_DCHECK_NOTREACHED(); RTC_LOG(LS_WARNING) << "ResolveHostname() is not implemented for NaCl"; return -1; @@ -38,7 +38,7 @@ int ResolveHostname(absl::string_view hostname, #else // notdef(__native_client__) int ResolveHostname(absl::string_view hostname, int family, - std::vector& addresses) { + std::vector& addresses) { addresses.clear(); struct addrinfo* result = nullptr; struct addrinfo hints = {0}; @@ -69,7 +69,7 @@ int ResolveHostname(absl::string_view hostname, struct addrinfo* cursor = result; for (; cursor; cursor = cursor->ai_next) { if (family == AF_UNSPEC || cursor->ai_family == family) { - rtc::IPAddress ip; + IPAddress ip; if (IPFromAddrInfo(cursor, &ip)) { addresses.push_back(ip); } @@ -99,15 +99,15 @@ void PostTaskToGlobalQueue( } // namespace -class AsyncDnsResolver::State : public rtc::RefCountedBase { +class AsyncDnsResolver::State : public RefCountedBase { public: enum class Status { kActive, // Running request, or able to be passed one kFinished, // Request has finished processing kDead // The owning AsyncDnsResolver has been deleted }; - static rtc::scoped_refptr Create() { - return rtc::make_ref_counted(); + static scoped_refptr Create() { + return make_ref_counted(); } // Execute the passed function if the state is Active. @@ -135,13 +135,13 @@ AsyncDnsResolver::~AsyncDnsResolver() { state_->Kill(); } -void AsyncDnsResolver::Start(const rtc::SocketAddress& addr, +void AsyncDnsResolver::Start(const SocketAddress& addr, absl::AnyInvocable callback) { Start(addr, addr.family(), std::move(callback)); } // Start address resolution of the hostname in `addr` matching `family`. -void AsyncDnsResolver::Start(const rtc::SocketAddress& addr, +void AsyncDnsResolver::Start(const SocketAddress& addr, int family, absl::AnyInvocable callback) { RTC_DCHECK_RUN_ON(&result_.sequence_checker_); @@ -150,14 +150,14 @@ void AsyncDnsResolver::Start(const rtc::SocketAddress& addr, auto thread_function = [this, addr, family, flag = safety_.flag(), caller_task_queue = webrtc::TaskQueueBase::Current(), state = state_] { - std::vector addresses; + std::vector addresses; int error = ResolveHostname(addr.hostname(), family, addresses); // We assume that the caller task queue is still around if the // AsyncDnsResolver has not been destroyed. state->Finish([this, error, flag, caller_task_queue, - addresses = std::move(addresses)]() { + addresses = std::move(addresses)]() mutable { caller_task_queue->PostTask( - SafeTask(flag, [this, error, addresses = std::move(addresses)] { + SafeTask(flag, [this, error, addresses = std::move(addresses)]() { RTC_DCHECK_RUN_ON(&result_.sequence_checker_); result_.addresses_ = addresses; result_.error_ = error; @@ -169,8 +169,7 @@ void AsyncDnsResolver::Start(const rtc::SocketAddress& addr, PostTaskToGlobalQueue( std::make_unique>(thread_function)); #else - rtc::PlatformThread::SpawnDetached(std::move(thread_function), - "AsyncResolver"); + PlatformThread::SpawnDetached(std::move(thread_function), "AsyncResolver"); #endif } @@ -178,9 +177,8 @@ const AsyncDnsResolverResult& AsyncDnsResolver::result() const { return result_; } -bool AsyncDnsResolverResultImpl::GetResolvedAddress( - int family, - rtc::SocketAddress* addr) const { +bool AsyncDnsResolverResultImpl::GetResolvedAddress(int family, + SocketAddress* addr) const { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK(addr); if (error_ != 0 || addresses_.empty()) diff --git a/rtc_base/async_dns_resolver.h b/rtc_base/async_dns_resolver.h index c15af7a1cb..674e140892 100644 --- a/rtc_base/async_dns_resolver.h +++ b/rtc_base/async_dns_resolver.h @@ -16,6 +16,7 @@ #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "rtc_base/ref_counted_object.h" +#include "rtc_base/system/rtc_export.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -25,27 +26,27 @@ namespace webrtc { class AsyncDnsResolverResultImpl : public AsyncDnsResolverResult { public: - bool GetResolvedAddress(int family, rtc::SocketAddress* addr) const override; + bool GetResolvedAddress(int family, SocketAddress* addr) const override; // Returns error from resolver. int GetError() const override; private: friend class AsyncDnsResolver; RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_; - rtc::SocketAddress addr_ RTC_GUARDED_BY(sequence_checker_); - std::vector addresses_ RTC_GUARDED_BY(sequence_checker_); + SocketAddress addr_ RTC_GUARDED_BY(sequence_checker_); + std::vector addresses_ RTC_GUARDED_BY(sequence_checker_); int error_ RTC_GUARDED_BY(sequence_checker_); }; -class AsyncDnsResolver : public AsyncDnsResolverInterface { +class RTC_EXPORT AsyncDnsResolver : public AsyncDnsResolverInterface { public: AsyncDnsResolver(); ~AsyncDnsResolver(); // Start address resolution of the hostname in `addr`. - void Start(const rtc::SocketAddress& addr, + void Start(const SocketAddress& addr, absl::AnyInvocable callback) override; // Start address resolution of the hostname in `addr` matching `family`. - void Start(const rtc::SocketAddress& addr, + void Start(const SocketAddress& addr, int family, absl::AnyInvocable callback) override; const AsyncDnsResolverResult& result() const override; @@ -53,7 +54,7 @@ class AsyncDnsResolver : public AsyncDnsResolverInterface { private: class State; ScopedTaskSafety safety_; // To check for client going away - rtc::scoped_refptr state_; // To check for "this" going away + scoped_refptr state_; // To check for "this" going away AsyncDnsResolverResultImpl result_; absl::AnyInvocable callback_; }; diff --git a/rtc_base/async_dns_resolver_unittest.cc b/rtc_base/async_dns_resolver_unittest.cc index 11f8b1b6f4..e9f1d2b463 100644 --- a/rtc_base/async_dns_resolver_unittest.cc +++ b/rtc_base/async_dns_resolver_unittest.cc @@ -10,13 +10,25 @@ #include "rtc_base/async_dns_resolver.h" -#include "rtc_base/gunit.h" +#include + +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" +#include "rtc_base/logging.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/thread.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/run_loop.h" +#include "test/wait_until.h" namespace webrtc { namespace { -const int kDefaultTimeout = 1000; + +using ::testing::IsTrue; + +const webrtc::TimeDelta kDefaultTimeout = webrtc::TimeDelta::Millis(1000); const int kPortNumber = 3027; TEST(AsyncDnsResolver, ConstructorWorks) { @@ -26,15 +38,17 @@ TEST(AsyncDnsResolver, ConstructorWorks) { TEST(AsyncDnsResolver, ResolvingLocalhostWorks) { test::RunLoop loop; // Ensure that posting back to main thread works AsyncDnsResolver resolver; - rtc::SocketAddress address("localhost", - kPortNumber); // Port number does not matter - rtc::SocketAddress resolved_address; + SocketAddress address("localhost", + kPortNumber); // Port number does not matter + SocketAddress resolved_address; bool done = false; resolver.Start(address, [&done] { done = true; }); - ASSERT_TRUE_WAIT(done, kDefaultTimeout); + ASSERT_THAT( + WaitUntil([&] { return done; }, IsTrue(), {.timeout = kDefaultTimeout}), + IsRtcOk()); EXPECT_EQ(resolver.result().GetError(), 0); if (resolver.result().GetResolvedAddress(AF_INET, &resolved_address)) { - EXPECT_EQ(resolved_address, rtc::SocketAddress("127.0.0.1", kPortNumber)); + EXPECT_EQ(resolved_address, SocketAddress("127.0.0.1", kPortNumber)); } else { RTC_LOG(LS_INFO) << "Resolution gave no address, skipping test"; } @@ -44,13 +58,13 @@ TEST(AsyncDnsResolver, ResolveAfterDeleteDoesNotReturn) { test::RunLoop loop; std::unique_ptr resolver = std::make_unique(); - rtc::SocketAddress address("localhost", - kPortNumber); // Port number does not matter - rtc::SocketAddress resolved_address; + SocketAddress address("localhost", + kPortNumber); // Port number does not matter + SocketAddress resolved_address; bool done = false; resolver->Start(address, [&done] { done = true; }); resolver.reset(); // Deletes resolver. - rtc::Thread::Current()->SleepMs(1); // Allows callback to execute + Thread::Current()->SleepMs(1); // Allows callback to execute EXPECT_FALSE(done); // Expect no result. } diff --git a/rtc_base/async_packet_socket.cc b/rtc_base/async_packet_socket.cc index f50138cb62..2ead920705 100644 --- a/rtc_base/async_packet_socket.cc +++ b/rtc_base/async_packet_socket.cc @@ -10,7 +10,25 @@ #include "rtc_base/async_packet_socket.h" -namespace rtc { +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/sequence_checker.h" +#include "rtc_base/checks.h" +#include "rtc_base/dscp.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/network/sent_packet.h" + +namespace webrtc { + +AsyncSocketPacketOptions::AsyncSocketPacketOptions() = default; +AsyncSocketPacketOptions::AsyncSocketPacketOptions(DiffServCodePoint dscp) + : dscp(dscp) {} +AsyncSocketPacketOptions::AsyncSocketPacketOptions( + const AsyncSocketPacketOptions& other) = default; +AsyncSocketPacketOptions::~AsyncSocketPacketOptions() = default; PacketTimeUpdateParams::PacketTimeUpdateParams() = default; @@ -19,16 +37,11 @@ PacketTimeUpdateParams::PacketTimeUpdateParams( PacketTimeUpdateParams::~PacketTimeUpdateParams() = default; -PacketOptions::PacketOptions() = default; -PacketOptions::PacketOptions(DiffServCodePoint dscp) : dscp(dscp) {} -PacketOptions::PacketOptions(const PacketOptions& other) = default; -PacketOptions::~PacketOptions() = default; - AsyncPacketSocket::~AsyncPacketSocket() = default; void AsyncPacketSocket::SubscribeCloseEvent( const void* removal_tag, - std::function callback) { + std::function callback) { RTC_DCHECK_RUN_ON(&network_checker_); on_close_.AddReceiver(removal_tag, std::move(callback)); } @@ -38,12 +51,33 @@ void AsyncPacketSocket::UnsubscribeCloseEvent(const void* removal_tag) { on_close_.RemoveReceivers(removal_tag); } +void AsyncPacketSocket::RegisterReceivedPacketCallback( + absl::AnyInvocable + received_packet_callback) { + RTC_DCHECK_RUN_ON(&network_checker_); + RTC_CHECK(!received_packet_callback_); + received_packet_callback_ = std::move(received_packet_callback); +} + +void AsyncPacketSocket::DeregisterReceivedPacketCallback() { + RTC_DCHECK_RUN_ON(&network_checker_); + received_packet_callback_ = nullptr; +} + +void AsyncPacketSocket::NotifyPacketReceived(const ReceivedIpPacket& packet) { + RTC_DCHECK_RUN_ON(&network_checker_); + if (received_packet_callback_) { + received_packet_callback_(this, packet); + return; + } +} + void CopySocketInformationToPacketInfo(size_t packet_size_bytes, const AsyncPacketSocket& socket_from, - bool is_connectionless, - rtc::PacketInfo* info) { + PacketInfo* info) { info->packet_size_bytes = packet_size_bytes; info->ip_overhead_bytes = socket_from.GetLocalAddress().ipaddr().overhead(); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/async_packet_socket.h b/rtc_base/async_packet_socket.h index 0d3ceb94e7..e06de43d52 100644 --- a/rtc_base/async_packet_socket.h +++ b/rtc_base/async_packet_socket.h @@ -11,19 +11,26 @@ #ifndef RTC_BASE_ASYNC_PACKET_SOCKET_H_ #define RTC_BASE_ASYNC_PACKET_SOCKET_H_ +#include +#include +#include #include +#include "absl/functional/any_invocable.h" #include "api/sequence_checker.h" #include "rtc_base/callback_list.h" +#include "rtc_base/checks.h" #include "rtc_base/dscp.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/time_utils.h" +#include "rtc_base/thread_annotations.h" -namespace rtc { +namespace webrtc { // This structure holds the info needed to update the packet send time header // extension, including the information needed to update the authentication tag @@ -41,17 +48,23 @@ struct PacketTimeUpdateParams { // This structure holds meta information for the packet which is about to send // over network. -struct RTC_EXPORT PacketOptions { - PacketOptions(); - explicit PacketOptions(DiffServCodePoint dscp); - PacketOptions(const PacketOptions& other); - ~PacketOptions(); +struct RTC_EXPORT AsyncSocketPacketOptions { + AsyncSocketPacketOptions(); + explicit AsyncSocketPacketOptions(DiffServCodePoint dscp); + AsyncSocketPacketOptions(const AsyncSocketPacketOptions& other); + ~AsyncSocketPacketOptions(); DiffServCodePoint dscp = DSCP_NO_CHANGE; + + // Packet will be sent with ECN(1), RFC-3168, Section 5. + // Intended to be used with L4S + // https://www.rfc-editor.org/rfc/rfc9331.html + bool ecn_1 = false; + // When used with RTP packets (for example, webrtc::PacketOptions), the value // should be 16 bits. A value of -1 represents "not set". int64_t packet_id = -1; - PacketTimeUpdateParams packet_time_params; + webrtc::PacketTimeUpdateParams packet_time_params; // PacketInfo is passed to SentPacket when signaling this packet is sent. PacketInfo info_signaled_after_sent; // True if this is a batchable packet. Batchable packets are collected at low @@ -88,11 +101,13 @@ class RTC_EXPORT AsyncPacketSocket : public sigslot::has_slots<> { virtual SocketAddress GetRemoteAddress() const = 0; // Send a packet. - virtual int Send(const void* pv, size_t cb, const PacketOptions& options) = 0; + virtual int Send(const void* pv, + size_t cb, + const AsyncSocketPacketOptions& options) = 0; virtual int SendTo(const void* pv, size_t cb, const SocketAddress& addr, - const PacketOptions& options) = 0; + const AsyncSocketPacketOptions& options) = 0; // Close the socket. virtual int Close() = 0; @@ -112,22 +127,17 @@ class RTC_EXPORT AsyncPacketSocket : public sigslot::has_slots<> { // Register a callback to be called when the socket is closed. void SubscribeCloseEvent( const void* removal_tag, - std::function callback); + std::function callback); void UnsubscribeCloseEvent(const void* removal_tag); - // Emitted each time a packet is read. Used only for UDP and - // connected TCP sockets. - sigslot::signal5 - SignalReadPacket; + void RegisterReceivedPacketCallback( + absl::AnyInvocable + received_packet_callback); + void DeregisterReceivedPacketCallback(); // Emitted each time a packet is sent. - sigslot::signal2 SignalSentPacket; + sigslot::signal2 SignalSentPacket; // Emitted when the socket is currently able to send. sigslot::signal1 SignalReadyToSend; @@ -155,12 +165,17 @@ class RTC_EXPORT AsyncPacketSocket : public sigslot::has_slots<> { on_close_.Send(this, err); } - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker network_checker_{ - webrtc::SequenceChecker::kDetached}; + void NotifyPacketReceived(const ReceivedIpPacket& packet); + + RTC_NO_UNIQUE_ADDRESS SequenceChecker network_checker_{ + SequenceChecker::kDetached}; private: - webrtc::CallbackList on_close_ + CallbackList on_close_ RTC_GUARDED_BY(&network_checker_); + absl::AnyInvocable + received_packet_callback_ RTC_GUARDED_BY(&network_checker_); }; // Listen socket, producing an AsyncPacketSocket when a peer connects. @@ -183,9 +198,20 @@ class RTC_EXPORT AsyncListenSocket : public sigslot::has_slots<> { void CopySocketInformationToPacketInfo(size_t packet_size_bytes, const AsyncPacketSocket& socket_from, - bool is_connectionless, - rtc::PacketInfo* info); + PacketInfo* info); + +} // namespace webrtc +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::AsyncListenSocket; +using ::webrtc::AsyncPacketSocket; +using ::webrtc::CopySocketInformationToPacketInfo; +using ::webrtc::PacketTimeUpdateParams; +using PacketOptions = ::webrtc::AsyncSocketPacketOptions; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_ASYNC_PACKET_SOCKET_H_ diff --git a/rtc_base/async_packet_socket_unittest.cc b/rtc_base/async_packet_socket_unittest.cc new file mode 100644 index 0000000000..9ed491efb5 --- /dev/null +++ b/rtc_base/async_packet_socket_unittest.cc @@ -0,0 +1,65 @@ +/* + * Copyright 2023 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/async_packet_socket.h" + +#include "rtc_base/socket_address.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::MockFunction; + +class MockAsyncPacketSocket : public AsyncPacketSocket { + public: + ~MockAsyncPacketSocket() = default; + + MOCK_METHOD(SocketAddress, GetLocalAddress, (), (const, override)); + MOCK_METHOD(SocketAddress, GetRemoteAddress, (), (const, override)); + MOCK_METHOD(int, + Send, + (const void* pv, + size_t cb, + const webrtc::AsyncSocketPacketOptions& options), + (override)); + + MOCK_METHOD(int, + SendTo, + (const void* pv, + size_t cb, + const SocketAddress& addr, + const webrtc::AsyncSocketPacketOptions& options), + (override)); + MOCK_METHOD(int, Close, (), (override)); + MOCK_METHOD(State, GetState, (), (const, override)); + MOCK_METHOD(int, GetOption, (Socket::Option opt, int* value), (override)); + MOCK_METHOD(int, SetOption, (Socket::Option opt, int value), (override)); + MOCK_METHOD(int, GetError, (), (const, override)); + MOCK_METHOD(void, SetError, (int error), (override)); + + using AsyncPacketSocket::NotifyPacketReceived; +}; + +TEST(AsyncPacketSocket, RegisteredCallbackReceivePacketsFromNotify) { + MockAsyncPacketSocket mock_socket; + MockFunction + received_packet; + + EXPECT_CALL(received_packet, Call); + mock_socket.RegisterReceivedPacketCallback(received_packet.AsStdFunction()); + mock_socket.NotifyPacketReceived(ReceivedIpPacket({}, SocketAddress())); +} + +} // namespace +} // namespace webrtc diff --git a/rtc_base/async_resolver.cc b/rtc_base/async_resolver.cc deleted file mode 100644 index f79200bc05..0000000000 --- a/rtc_base/async_resolver.cc +++ /dev/null @@ -1,239 +0,0 @@ -/* - * Copyright 2008 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/async_resolver.h" - -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "api/ref_counted_base.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_annotations.h" - -#if defined(WEBRTC_WIN) -#include -#include - -#include "rtc_base/win32.h" -#endif -#if defined(WEBRTC_POSIX) && !defined(__native_client__) -#if defined(WEBRTC_ANDROID) -#include "rtc_base/ifaddrs_android.h" -#else -#include -#endif -#endif // defined(WEBRTC_POSIX) && !defined(__native_client__) - -#include "api/task_queue/task_queue_base.h" -#include "rtc_base/ip_address.h" -#include "rtc_base/logging.h" -#include "rtc_base/platform_thread.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/third_party/sigslot/sigslot.h" // for signal_with_thread... - -#if defined(WEBRTC_MAC) || defined(WEBRTC_IOS) -#include -#endif - -namespace rtc { - -#if defined(WEBRTC_MAC) || defined(WEBRTC_IOS) -namespace { - -void GlobalGcdRunTask(void* context) { - std::unique_ptr> task( - static_cast*>(context)); - std::move (*task)(); -} - -// Post a task into the system-defined global concurrent queue. -void PostTaskToGlobalQueue( - std::unique_ptr> task) { - dispatch_async_f( - dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), - task.release(), &GlobalGcdRunTask); -} - -} // namespace -#endif - -int ResolveHostname(absl::string_view hostname, - int family, - std::vector* addresses) { -#ifdef __native_client__ - RTC_DCHECK_NOTREACHED(); - RTC_LOG(LS_WARNING) << "ResolveHostname() is not implemented for NaCl"; - return -1; -#else // __native_client__ - if (!addresses) { - return -1; - } - addresses->clear(); - struct addrinfo* result = nullptr; - struct addrinfo hints = {0}; - hints.ai_family = family; - // `family` here will almost always be AF_UNSPEC, because `family` comes from - // AsyncResolver::addr_.family(), which comes from a SocketAddress constructed - // with a hostname. When a SocketAddress is constructed with a hostname, its - // family is AF_UNSPEC. However, if someday in the future we construct - // a SocketAddress with both a hostname and a family other than AF_UNSPEC, - // then it would be possible to get a specific family value here. - - // The behavior of AF_UNSPEC is roughly "get both ipv4 and ipv6", as - // documented by the various operating systems: - // Linux: http://man7.org/linux/man-pages/man3/getaddrinfo.3.html - // Windows: https://msdn.microsoft.com/en-us/library/windows/desktop/ - // ms738520(v=vs.85).aspx - // Mac: https://developer.apple.com/legacy/library/documentation/Darwin/ - // Reference/ManPages/man3/getaddrinfo.3.html - // Android (source code, not documentation): - // https://android.googlesource.com/platform/bionic/+/ - // 7e0bfb511e85834d7c6cb9631206b62f82701d60/libc/netbsd/net/getaddrinfo.c#1657 - hints.ai_flags = AI_ADDRCONFIG; - int ret = - getaddrinfo(std::string(hostname).c_str(), nullptr, &hints, &result); - if (ret != 0) { - return ret; - } - struct addrinfo* cursor = result; - for (; cursor; cursor = cursor->ai_next) { - if (family == AF_UNSPEC || cursor->ai_family == family) { - IPAddress ip; - if (IPFromAddrInfo(cursor, &ip)) { - addresses->push_back(ip); - } - } - } - freeaddrinfo(result); - return 0; -#endif // !__native_client__ -} - -struct AsyncResolver::State : public RefCountedBase { - webrtc::Mutex mutex; - enum class Status { - kLive, - kDead - } status RTC_GUARDED_BY(mutex) = Status::kLive; -}; - -AsyncResolver::AsyncResolver() : error_(-1), state_(new State) {} - -AsyncResolver::~AsyncResolver() { - RTC_DCHECK_RUN_ON(&sequence_checker_); - - // Ensure the thread isn't using a stale reference to the current task queue, - // or calling into ResolveDone post destruction. - webrtc::MutexLock lock(&state_->mutex); - state_->status = State::Status::kDead; -} - -void RunResolution(void* obj) { - std::function* function_ptr = - static_cast*>(obj); - (*function_ptr)(); - delete function_ptr; -} - -void AsyncResolver::Start(const SocketAddress& addr) { - Start(addr, addr.family()); -} - -void AsyncResolver::Start(const SocketAddress& addr, int family) { - RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK(!destroy_called_); - addr_ = addr; - auto thread_function = [this, addr, family, - caller_task_queue = webrtc::TaskQueueBase::Current(), - state = state_] { - std::vector addresses; - int error = ResolveHostname(addr.hostname(), family, &addresses); - webrtc::MutexLock lock(&state->mutex); - if (state->status == State::Status::kLive) { - caller_task_queue->PostTask( - [this, error, addresses = std::move(addresses), state] { - bool live; - { - // ResolveDone can lead to instance destruction, so make sure - // we don't deadlock. - webrtc::MutexLock lock(&state->mutex); - live = state->status == State::Status::kLive; - } - if (live) { - RTC_DCHECK_RUN_ON(&sequence_checker_); - ResolveDone(std::move(addresses), error); - } - }); - } - }; -#if defined(WEBRTC_MAC) || defined(WEBRTC_IOS) - PostTaskToGlobalQueue( - std::make_unique>(thread_function)); -#else - PlatformThread::SpawnDetached(std::move(thread_function), "AsyncResolver"); -#endif -} - -bool AsyncResolver::GetResolvedAddress(int family, SocketAddress* addr) const { - RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK(!destroy_called_); - if (error_ != 0 || addresses_.empty()) - return false; - - *addr = addr_; - for (size_t i = 0; i < addresses_.size(); ++i) { - if (family == addresses_[i].family()) { - addr->SetResolvedIP(addresses_[i]); - return true; - } - } - return false; -} - -int AsyncResolver::GetError() const { - RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK(!destroy_called_); - return error_; -} - -void AsyncResolver::Destroy(bool wait) { - // Some callers have trouble guaranteeing that Destroy is called on the - // sequence guarded by `sequence_checker_`. - // RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK(!destroy_called_); - destroy_called_ = true; - MaybeSelfDestruct(); -} - -const std::vector& AsyncResolver::addresses() const { - RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK(!destroy_called_); - return addresses_; -} - -void AsyncResolver::ResolveDone(std::vector addresses, int error) { - addresses_ = addresses; - error_ = error; - recursion_check_ = true; - SignalDone(this); - MaybeSelfDestruct(); -} - -void AsyncResolver::MaybeSelfDestruct() { - if (!recursion_check_) { - delete this; - } else { - recursion_check_ = false; - } -} - -} // namespace rtc diff --git a/rtc_base/async_resolver.h b/rtc_base/async_resolver.h deleted file mode 100644 index 46be43860e..0000000000 --- a/rtc_base/async_resolver.h +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2008 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_ASYNC_RESOLVER_H_ -#define RTC_BASE_ASYNC_RESOLVER_H_ - -#if defined(WEBRTC_POSIX) -#include -#elif WEBRTC_WIN -#include // NOLINT -#endif - -#include - -#include "api/sequence_checker.h" -#include "api/task_queue/pending_task_safety_flag.h" -#include "rtc_base/async_resolver_interface.h" -#include "rtc_base/event.h" -#include "rtc_base/ip_address.h" -#include "rtc_base/socket_address.h" -#include "rtc_base/system/no_unique_address.h" -#include "rtc_base/system/rtc_export.h" -#include "rtc_base/thread.h" -#include "rtc_base/thread_annotations.h" - -namespace rtc { - -// AsyncResolver will perform async DNS resolution, signaling the result on -// the SignalDone from AsyncResolverInterface when the operation completes. -// -// This class is thread-compatible, and all methods and destruction needs to -// happen from the same rtc::Thread, except for Destroy which is allowed to -// happen on another context provided it's not happening concurrently to another -// public API call, and is the last access to the object. -class RTC_EXPORT AsyncResolver : public AsyncResolverInterface { - public: - AsyncResolver(); - ~AsyncResolver() override; - - void Start(const SocketAddress& addr) override; - void Start(const SocketAddress& addr, int family) override; - bool GetResolvedAddress(int family, SocketAddress* addr) const override; - int GetError() const override; - void Destroy(bool wait) override; - - const std::vector& addresses() const; - - private: - // Fwd decl. - struct State; - - void ResolveDone(std::vector addresses, int error) - RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_); - void MaybeSelfDestruct(); - - SocketAddress addr_ RTC_GUARDED_BY(sequence_checker_); - std::vector addresses_ RTC_GUARDED_BY(sequence_checker_); - int error_ RTC_GUARDED_BY(sequence_checker_); - bool recursion_check_ = - false; // Protects against SignalDone calling into Destroy. - bool destroy_called_ = false; - scoped_refptr state_; - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_; -}; - -} // namespace rtc - -#endif // RTC_BASE_ASYNC_RESOLVER_H_ diff --git a/rtc_base/async_resolver_interface.cc b/rtc_base/async_resolver_interface.cc deleted file mode 100644 index ff8c87e346..0000000000 --- a/rtc_base/async_resolver_interface.cc +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright 2015 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/async_resolver_interface.h" - -namespace rtc { - -AsyncResolverInterface::AsyncResolverInterface() {} - -AsyncResolverInterface::~AsyncResolverInterface() = default; - -} // namespace rtc diff --git a/rtc_base/async_resolver_interface.h b/rtc_base/async_resolver_interface.h deleted file mode 100644 index 851fa38ce1..0000000000 --- a/rtc_base/async_resolver_interface.h +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2013 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_ASYNC_RESOLVER_INTERFACE_H_ -#define RTC_BASE_ASYNC_RESOLVER_INTERFACE_H_ - -#include "rtc_base/checks.h" -#include "rtc_base/socket_address.h" -#include "rtc_base/system/rtc_export.h" -#include "rtc_base/third_party/sigslot/sigslot.h" - -namespace rtc { - -// This interface defines the methods to resolve the address asynchronously. -// TODO(bugs.webrtc.org/12598): Deprecate and remove. -class RTC_EXPORT AsyncResolverInterface { - public: - AsyncResolverInterface(); - virtual ~AsyncResolverInterface(); - - // Start address resolution of the hostname in `addr`. - virtual void Start(const SocketAddress& addr) = 0; - // Start address resolution of the hostname in `addr` matching `family`. - virtual void Start(const SocketAddress& addr, int family) = 0; - // Returns true iff the address from `Start` was successfully resolved. - // If the address was successfully resolved, sets `addr` to a copy of the - // address from `Start` with the IP address set to the top most resolved - // address of `family` (`addr` will have both hostname and the resolved ip). - virtual bool GetResolvedAddress(int family, SocketAddress* addr) const = 0; - // Returns error from resolver. - virtual int GetError() const = 0; - // Delete the resolver. - virtual void Destroy(bool wait) = 0; - // Returns top most resolved IPv4 address if address is resolved successfully. - // Otherwise returns address set in SetAddress. - SocketAddress address() const { - SocketAddress addr; - GetResolvedAddress(AF_INET, &addr); - return addr; - } - - // This signal is fired when address resolve process is completed. - sigslot::signal1 SignalDone; -}; - -} // namespace rtc - -#endif diff --git a/rtc_base/async_socket.cc b/rtc_base/async_socket.cc index 7289b5c959..fe6c0ef2f3 100644 --- a/rtc_base/async_socket.cc +++ b/rtc_base/async_socket.cc @@ -13,7 +13,7 @@ #include "absl/memory/memory.h" #include "rtc_base/checks.h" -namespace rtc { +namespace webrtc { AsyncSocketAdapter::AsyncSocketAdapter(Socket* socket) : socket_(absl::WrapUnique(socket)) { @@ -110,4 +110,4 @@ void AsyncSocketAdapter::OnCloseEvent(Socket* socket, int err) { SignalCloseEvent(this, err); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/async_socket.h b/rtc_base/async_socket.h index 0772cb8d97..efd0ced760 100644 --- a/rtc_base/async_socket.h +++ b/rtc_base/async_socket.h @@ -20,7 +20,7 @@ #include "rtc_base/socket_address.h" #include "rtc_base/third_party/sigslot/sigslot.h" -namespace rtc { +namespace webrtc { class AsyncSocketAdapter : public Socket, public sigslot::has_slots<> { public: @@ -60,6 +60,14 @@ class AsyncSocketAdapter : public Socket, public sigslot::has_slots<> { const std::unique_ptr socket_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::AsyncSocketAdapter; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_ASYNC_SOCKET_H_ diff --git a/rtc_base/async_tcp_socket.cc b/rtc_base/async_tcp_socket.cc index 367c5b04e7..420d3595ca 100644 --- a/rtc_base/async_tcp_socket.cc +++ b/rtc_base/async_tcp_socket.cc @@ -14,6 +14,8 @@ #include #include +#include +#include #include #include "api/array_view.h" @@ -27,7 +29,7 @@ #include #endif // WEBRTC_POSIX -namespace rtc { +namespace webrtc { static const size_t kMaxPacketSize = 64 * 1024; @@ -44,11 +46,10 @@ static const size_t kMinimumRecvSize = 128; static const int kListenBacklog = 5; // Binds and connects `socket` -Socket* AsyncTCPSocketBase::ConnectSocket( - rtc::Socket* socket, - const rtc::SocketAddress& bind_address, - const rtc::SocketAddress& remote_address) { - std::unique_ptr owned_socket(socket); +Socket* AsyncTCPSocketBase::ConnectSocket(Socket* socket, + const SocketAddress& bind_address, + const SocketAddress& remote_address) { + std::unique_ptr owned_socket(socket); if (socket->Bind(bind_address) < 0) { RTC_LOG(LS_ERROR) << "Bind() failed with error " << socket->GetError(); return nullptr; @@ -120,7 +121,7 @@ void AsyncTCPSocketBase::SetError(int error) { int AsyncTCPSocketBase::SendTo(const void* pv, size_t cb, const SocketAddress& addr, - const rtc::PacketOptions& options) { + const AsyncSocketPacketOptions& options) { const SocketAddress& remote_address = GetRemoteAddress(); if (addr == remote_address) return Send(pv, cb, options); @@ -132,7 +133,7 @@ int AsyncTCPSocketBase::SendTo(const void* pv, int AsyncTCPSocketBase::FlushOutBuffer() { RTC_DCHECK_GT(outbuf_.size(), 0); - rtc::ArrayView view = outbuf_; + ArrayView view = outbuf_; int res; while (view.size() > 0) { res = socket_->Send(view.data(), view.size()); @@ -209,15 +210,17 @@ void AsyncTCPSocketBase::OnReadEvent(Socket* socket) { return; } - size_t size = inbuf_.size(); - ProcessInput(inbuf_.data(), &size); - - if (size > inbuf_.size()) { + size_t processed = ProcessInput(inbuf_); + size_t bytes_remaining = inbuf_.size() - processed; + if (processed > inbuf_.size()) { RTC_LOG(LS_ERROR) << "input buffer overflow"; RTC_DCHECK_NOTREACHED(); inbuf_.Clear(); } else { - inbuf_.SetSize(size); + if (bytes_remaining > 0) { + memmove(inbuf_.data(), inbuf_.data() + processed, bytes_remaining); + } + inbuf_.SetSize(bytes_remaining); } } @@ -253,7 +256,7 @@ AsyncTCPSocket::AsyncTCPSocket(Socket* socket) int AsyncTCPSocket::Send(const void* pv, size_t cb, - const rtc::PacketOptions& options) { + const AsyncSocketPacketOptions& options) { if (cb > kBufSize) { SetError(EMSGSIZE); return -1; @@ -263,7 +266,7 @@ int AsyncTCPSocket::Send(const void* pv, if (!IsOutBufferEmpty()) return static_cast(cb); - PacketLength pkt_len = HostToNetwork16(static_cast(cb)); + PacketLength pkt_len = webrtc::HostToNetwork16(static_cast(cb)); AppendToOutBuffer(&pkt_len, kPacketLenSize); AppendToOutBuffer(pv, cb); @@ -274,33 +277,33 @@ int AsyncTCPSocket::Send(const void* pv, return res; } - rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis(), - options.info_signaled_after_sent); - CopySocketInformationToPacketInfo(cb, *this, false, &sent_packet.info); + SentPacketInfo sent_packet(options.packet_id, TimeMillis(), + options.info_signaled_after_sent); + CopySocketInformationToPacketInfo(cb, *this, &sent_packet.info); SignalSentPacket(this, sent_packet); // We claim to have sent the whole thing, even if we only sent partial return static_cast(cb); } -void AsyncTCPSocket::ProcessInput(char* data, size_t* len) { +size_t AsyncTCPSocket::ProcessInput(ArrayView data) { SocketAddress remote_addr(GetRemoteAddress()); + size_t processed_bytes = 0; while (true) { - if (*len < kPacketLenSize) - return; - - PacketLength pkt_len = rtc::GetBE16(data); - if (*len < kPacketLenSize + pkt_len) - return; - - SignalReadPacket(this, data + kPacketLenSize, pkt_len, remote_addr, - TimeMicros()); - - *len -= kPacketLenSize + pkt_len; - if (*len > 0) { - memmove(data, data + kPacketLenSize + pkt_len, *len); - } + size_t bytes_left = data.size() - processed_bytes; + if (bytes_left < kPacketLenSize) + return processed_bytes; + + PacketLength pkt_len = webrtc::GetBE16(data.data() + processed_bytes); + if (bytes_left < kPacketLenSize + pkt_len) + return processed_bytes; + + ReceivedIpPacket received_packet( + data.subview(processed_bytes + kPacketLenSize, pkt_len), remote_addr, + Timestamp::Micros(TimeMicros())); + NotifyPacketReceived(received_packet); + processed_bytes += kPacketLenSize + pkt_len; } } @@ -332,8 +335,8 @@ SocketAddress AsyncTcpListenSocket::GetLocalAddress() const { void AsyncTcpListenSocket::OnReadEvent(Socket* socket) { RTC_DCHECK(socket_.get() == socket); - rtc::SocketAddress address; - rtc::Socket* new_socket = socket->Accept(&address); + SocketAddress address; + Socket* new_socket = socket->Accept(&address); if (!new_socket) { // TODO(stefan): Do something better like forwarding the error // to the user. @@ -351,4 +354,4 @@ void AsyncTcpListenSocket::HandleIncomingConnection(Socket* socket) { SignalNewConnection(this, new AsyncTCPSocket(socket)); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/async_tcp_socket.h b/rtc_base/async_tcp_socket.h index 541080fba7..daf890cdaa 100644 --- a/rtc_base/async_tcp_socket.h +++ b/rtc_base/async_tcp_socket.h @@ -13,14 +13,16 @@ #include +#include #include +#include "api/array_view.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/buffer.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" -namespace rtc { +namespace webrtc { // Simulates UDP semantics over TCP. Send and Recv packet sizes // are preserved, and drops packets silently on Send, rather than @@ -36,15 +38,16 @@ class AsyncTCPSocketBase : public AsyncPacketSocket { // Pure virtual methods to send and recv data. int Send(const void* pv, size_t cb, - const rtc::PacketOptions& options) override = 0; - virtual void ProcessInput(char* data, size_t* len) = 0; + const AsyncSocketPacketOptions& options) override = 0; + // Must return the number of bytes processed. + virtual size_t ProcessInput(ArrayView data) = 0; SocketAddress GetLocalAddress() const override; SocketAddress GetRemoteAddress() const override; int SendTo(const void* pv, size_t cb, const SocketAddress& addr, - const rtc::PacketOptions& options) override; + const AsyncSocketPacketOptions& options) override; int Close() override; State GetState() const override; @@ -98,8 +101,8 @@ class AsyncTCPSocket : public AsyncTCPSocketBase { int Send(const void* pv, size_t cb, - const rtc::PacketOptions& options) override; - void ProcessInput(char* data, size_t* len) override; + const AsyncSocketPacketOptions& options) override; + size_t ProcessInput(ArrayView) override; }; class AsyncTcpListenSocket : public AsyncListenSocket { @@ -109,7 +112,7 @@ class AsyncTcpListenSocket : public AsyncListenSocket { State GetState() const override; SocketAddress GetLocalAddress() const override; - virtual void HandleIncomingConnection(rtc::Socket* socket); + virtual void HandleIncomingConnection(Socket* socket); private: // Called by the underlying socket @@ -118,6 +121,16 @@ class AsyncTcpListenSocket : public AsyncListenSocket { std::unique_ptr socket_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::AsyncTcpListenSocket; +using ::webrtc::AsyncTCPSocket; +using ::webrtc::AsyncTCPSocketBase; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_ASYNC_TCP_SOCKET_H_ diff --git a/rtc_base/async_tcp_socket_unittest.cc b/rtc_base/async_tcp_socket_unittest.cc index af82238a37..149780de9e 100644 --- a/rtc_base/async_tcp_socket_unittest.cc +++ b/rtc_base/async_tcp_socket_unittest.cc @@ -16,12 +16,12 @@ #include "rtc_base/gunit.h" #include "rtc_base/virtual_socket_server.h" -namespace rtc { +namespace webrtc { class AsyncTCPSocketTest : public ::testing::Test, public sigslot::has_slots<> { public: AsyncTCPSocketTest() - : vss_(new rtc::VirtualSocketServer()), + : vss_(new webrtc::VirtualSocketServer()), socket_(vss_->CreateSocket(SOCK_STREAM)), tcp_socket_(new AsyncTCPSocket(socket_, true)), ready_to_send_(false) { @@ -29,7 +29,9 @@ class AsyncTCPSocketTest : public ::testing::Test, public sigslot::has_slots<> { &AsyncTCPSocketTest::OnReadyToSend); } - void OnReadyToSend(rtc::AsyncPacketSocket* socket) { ready_to_send_ = true; } + void OnReadyToSend(webrtc::AsyncPacketSocket* socket) { + ready_to_send_ = true; + } protected: std::unique_ptr vss_; @@ -44,4 +46,4 @@ TEST_F(AsyncTCPSocketTest, OnWriteEvent) { EXPECT_TRUE(ready_to_send_); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/async_udp_socket.cc b/rtc_base/async_udp_socket.cc index af7ae56fb6..f0846875f9 100644 --- a/rtc_base/async_udp_socket.cc +++ b/rtc_base/async_udp_socket.cc @@ -10,20 +10,24 @@ #include "rtc_base/async_udp_socket.h" - +#include +#include +#include + +#include "api/sequence_checker.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/network/sent_packet.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/socket_factory.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/field_trial.h" - -namespace rtc { -// Returns true if the experiement "WebRTC-SCM-Timestamp" is explicitly -// disabled. -static bool IsScmTimeStampExperimentDisabled() { - return webrtc::field_trial::IsDisabled("WebRTC-SCM-Timestamp"); -} +namespace webrtc { AsyncUDPSocket* AsyncUDPSocket::Create(Socket* socket, const SocketAddress& bind_address) { @@ -60,10 +64,10 @@ SocketAddress AsyncUDPSocket::GetRemoteAddress() const { int AsyncUDPSocket::Send(const void* pv, size_t cb, - const rtc::PacketOptions& options) { - rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis(), - options.info_signaled_after_sent); - CopySocketInformationToPacketInfo(cb, *this, false, &sent_packet.info); + const AsyncSocketPacketOptions& options) { + SentPacketInfo sent_packet(options.packet_id, TimeMillis(), + options.info_signaled_after_sent); + webrtc::CopySocketInformationToPacketInfo(cb, *this, &sent_packet.info); int ret = socket_->Send(pv, cb); SignalSentPacket(this, sent_packet); return ret; @@ -72,10 +76,19 @@ int AsyncUDPSocket::Send(const void* pv, int AsyncUDPSocket::SendTo(const void* pv, size_t cb, const SocketAddress& addr, - const rtc::PacketOptions& options) { - rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis(), - options.info_signaled_after_sent); - CopySocketInformationToPacketInfo(cb, *this, true, &sent_packet.info); + const AsyncSocketPacketOptions& options) { + SentPacketInfo sent_packet(options.packet_id, TimeMillis(), + options.info_signaled_after_sent); + webrtc::CopySocketInformationToPacketInfo(cb, *this, &sent_packet.info); + if (has_set_ect1_options_ != options.ecn_1) { + // It is unclear what is most efficient, setting options on every sent + // packet or when changed. Potentially, can separate send sockets be used? + // This is the easier implementation. + if (socket_->SetOption(Socket::Option::OPT_SEND_ECN, + options.ecn_1 ? 1 : 0) == 0) { + has_set_ect1_options_ = options.ecn_1; + } + } int ret = socket_->SendTo(pv, cb, addr); SignalSentPacket(this, sent_packet); return ret; @@ -109,10 +122,8 @@ void AsyncUDPSocket::OnReadEvent(Socket* socket) { RTC_DCHECK(socket_.get() == socket); RTC_DCHECK_RUN_ON(&sequence_checker_); - SocketAddress remote_addr; - int64_t timestamp = -1; - int len = socket_->RecvFrom(buf_, BUF_SIZE, &remote_addr, ×tamp); - + Socket::ReceiveBuffer receive_buffer(buffer_); + int len = socket_->RecvFrom(receive_buffer); if (len < 0) { // An error here typically means we got an ICMP error in response to our // send datagram, indicating the remote address was unreachable. @@ -123,25 +134,29 @@ void AsyncUDPSocket::OnReadEvent(Socket* socket) { << "] receive failed with error " << socket_->GetError(); return; } - if (timestamp == -1) { + if (len == 0) { + // Spurios wakeup. + return; + } + + if (!receive_buffer.arrival_time) { // Timestamp from socket is not available. - timestamp = TimeMicros(); + receive_buffer.arrival_time = Timestamp::Micros(TimeMicros()); } else { if (!socket_time_offset_) { + // Estimate timestamp offset from first packet arrival time. socket_time_offset_ = - !IsScmTimeStampExperimentDisabled() ? TimeMicros() - timestamp : 0; + Timestamp::Micros(TimeMicros()) - *receive_buffer.arrival_time; } - timestamp += *socket_time_offset_; + *receive_buffer.arrival_time += *socket_time_offset_; } - - // TODO: Make sure that we got all of the packet. - // If we did not, then we should resize our buffer to be large enough. - SignalReadPacket(this, buf_, static_cast(len), remote_addr, - timestamp); + NotifyPacketReceived( + ReceivedIpPacket(receive_buffer.payload, receive_buffer.source_address, + receive_buffer.arrival_time, receive_buffer.ecn)); } void AsyncUDPSocket::OnWriteEvent(Socket* socket) { SignalReadyToSend(this); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/async_udp_socket.h b/rtc_base/async_udp_socket.h index 4198b25c4d..8f30ce7fa0 100644 --- a/rtc_base/async_udp_socket.h +++ b/rtc_base/async_udp_socket.h @@ -13,19 +13,20 @@ #include -#include #include +#include -#include "absl/types/optional.h" #include "api/sequence_checker.h" +#include "api/units/time_delta.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/buffer.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" #include "rtc_base/socket_factory.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" -namespace rtc { +namespace webrtc { // Provides the ability to receive packets asynchronously. Sends are not // buffered since it is acceptable to drop packets under high load. @@ -47,11 +48,11 @@ class AsyncUDPSocket : public AsyncPacketSocket { SocketAddress GetRemoteAddress() const override; int Send(const void* pv, size_t cb, - const rtc::PacketOptions& options) override; + const AsyncSocketPacketOptions& options) override; int SendTo(const void* pv, size_t cb, const SocketAddress& addr, - const rtc::PacketOptions& options) override; + const AsyncSocketPacketOptions& options) override; int Close() override; State GetState() const override; @@ -66,13 +67,22 @@ class AsyncUDPSocket : public AsyncPacketSocket { // Called when the underlying socket is ready to send. void OnWriteEvent(Socket* socket); - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; std::unique_ptr socket_; - static constexpr int BUF_SIZE = 64 * 1024; - char buf_[BUF_SIZE] RTC_GUARDED_BY(sequence_checker_); - absl::optional socket_time_offset_ RTC_GUARDED_BY(sequence_checker_); + bool has_set_ect1_options_ = false; + Buffer buffer_ RTC_GUARDED_BY(sequence_checker_); + std::optional socket_time_offset_ + RTC_GUARDED_BY(sequence_checker_); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::AsyncUDPSocket; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_ASYNC_UDP_SOCKET_H_ diff --git a/rtc_base/async_udp_socket_unittest.cc b/rtc_base/async_udp_socket_unittest.cc index 7df1ca0a46..cf895dede7 100644 --- a/rtc_base/async_udp_socket_unittest.cc +++ b/rtc_base/async_udp_socket_unittest.cc @@ -10,41 +10,46 @@ #include "rtc_base/async_udp_socket.h" +#include #include -#include -#include "rtc_base/gunit.h" -#include "rtc_base/physical_socket_server.h" +#include "absl/memory/memory.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/virtual_socket_server.h" - -namespace rtc { - -class AsyncUdpSocketTest : public ::testing::Test, public sigslot::has_slots<> { - public: - AsyncUdpSocketTest() - : pss_(new rtc::PhysicalSocketServer), - vss_(new rtc::VirtualSocketServer(pss_.get())), - socket_(vss_->CreateSocket(SOCK_DGRAM)), - udp_socket_(new AsyncUDPSocket(socket_)), - ready_to_send_(false) { - udp_socket_->SignalReadyToSend.connect(this, - &AsyncUdpSocketTest::OnReadyToSend); - } - - void OnReadyToSend(rtc::AsyncPacketSocket* socket) { ready_to_send_ = true; } - - protected: - std::unique_ptr pss_; - std::unique_ptr vss_; - Socket* socket_; - std::unique_ptr udp_socket_; - bool ready_to_send_; -}; - -TEST_F(AsyncUdpSocketTest, OnWriteEvent) { - EXPECT_FALSE(ready_to_send_); - socket_->SignalWriteEvent(socket_); - EXPECT_TRUE(ready_to_send_); +#include "test/gtest.h" + +namespace webrtc { + +static const SocketAddress kAddr("22.22.22.22", 0); + +TEST(AsyncUDPSocketTest, SetSocketOptionIfEctChange) { + VirtualSocketServer socket_server; + Socket* socket = socket_server.CreateSocket(kAddr.family(), SOCK_DGRAM); + std::unique_ptr udp__socket = + absl::WrapUnique(AsyncUDPSocket::Create(socket, kAddr)); + + int ect = 0; + socket->GetOption(Socket::OPT_SEND_ECN, &ect); + ASSERT_EQ(ect, 0); + + uint8_t buffer[] = "hello"; + AsyncSocketPacketOptions packet_options; + packet_options.ecn_1 = false; + udp__socket->SendTo(buffer, 5, kAddr, packet_options); + socket->GetOption(Socket::OPT_SEND_ECN, &ect); + EXPECT_EQ(ect, 0); + + packet_options.ecn_1 = true; + udp__socket->SendTo(buffer, 5, kAddr, packet_options); + socket->GetOption(Socket::OPT_SEND_ECN, &ect); + EXPECT_EQ(ect, 1); + + packet_options.ecn_1 = false; + udp__socket->SendTo(buffer, 5, kAddr, packet_options); + socket->GetOption(Socket::OPT_SEND_ECN, &ect); + EXPECT_EQ(ect, 0); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/base64.cc b/rtc_base/base64.cc new file mode 100644 index 0000000000..eaa4d2bfd6 --- /dev/null +++ b/rtc_base/base64.cc @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/base64.h" + +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/strings/ascii.h" +#include "absl/strings/escaping.h" +#include "absl/strings/string_view.h" + +namespace webrtc { + +namespace { + +bool IsStrictBase64(absl::string_view data) { + // Strict base64 must be a multiple of 4 bytes and have no whitespace. + return data.size() % 4 == 0 && absl::c_none_of(data, absl::ascii_isspace); +} +} // namespace + +std::optional Base64Decode(absl::string_view data, + Base64DecodeOptions options) { + // absl::Base64Unescape is forgiving. Return nullopt if the input is not + // strict. + if (options == Base64DecodeOptions::kStrict && !IsStrictBase64(data)) { + return std::nullopt; + } + + std::string dest; + return absl::Base64Unescape(data, &dest) ? std::make_optional(std::move(dest)) + : std::nullopt; +} + +} // namespace webrtc diff --git a/rtc_base/base64.h b/rtc_base/base64.h new file mode 100644 index 0000000000..24c1d97e0c --- /dev/null +++ b/rtc_base/base64.h @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_BASE64_H_ +#define RTC_BASE_BASE64_H_ + +#include +#include +#include + +#include "absl/strings/escaping.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" + +namespace webrtc { + +inline std::string Base64Encode(ArrayView data) { + return absl::Base64Escape(absl::string_view( + reinterpret_cast(data.data()), data.size())); +} + +inline std::string Base64Encode(absl::string_view data) { + return absl::Base64Escape(data); +} + +enum class Base64DecodeOptions { + kStrict, + // Matches https://infra.spec.whatwg.org/#forgiving-base64-decode. + kForgiving, +}; + +// Returns the decoded data if successful, or std::nullopt if the decoding +// failed. +std::optional Base64Decode( + absl::string_view data, + Base64DecodeOptions options = Base64DecodeOptions::kStrict); + +} // namespace webrtc + +#endif // RTC_BASE_BASE64_H_ diff --git a/rtc_base/base64_benchmark.cc b/rtc_base/base64_benchmark.cc new file mode 100644 index 0000000000..0c434d5bc3 --- /dev/null +++ b/rtc_base/base64_benchmark.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include +#include + +#include "benchmark/benchmark.h" +#include "rtc_base/base64.h" + +namespace webrtc { +namespace { + +void BM_Base64Encode(benchmark::State& state) { + std::vector data(state.range(0)); + for (auto _ : state) { + Base64Encode(data); + } +} + +void BM_Base64Decode(benchmark::State& state) { + std::vector data(state.range(0)); + std::string encoded = Base64Encode(data); + for (auto _ : state) { + Base64Decode(encoded); + } +} + +void BM_Base64DecodeForgiving(benchmark::State& state) { + std::vector data(state.range(0)); + std::string encoded = Base64Encode(data); + // Add a newline every 64 chars. + for (size_t i = 0; i < encoded.size(); i += 64) { + encoded.insert(i, "\n"); + } + for (auto _ : state) { + Base64Decode(encoded, Base64DecodeOptions::kForgiving); + } +} + +BENCHMARK(BM_Base64Encode)->Range(64, 8 << 20); +BENCHMARK(BM_Base64Decode)->Range(64, 8 << 20); +BENCHMARK(BM_Base64DecodeForgiving)->Range(64, 8 << 20); + +} // namespace +} // namespace webrtc diff --git a/rtc_base/base64_unittest.cc b/rtc_base/base64_unittest.cc index 6722a1f5d9..51c90d5570 100644 --- a/rtc_base/base64_unittest.cc +++ b/rtc_base/base64_unittest.cc @@ -1,5 +1,5 @@ /* - * Copyright 2011 The WebRTC Project Authors. All rights reserved. + * Copyright (c) 2025 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,1447 +8,134 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "rtc_base/third_party/base64/base64.h" +#include "rtc_base/base64.h" -#include -#include +#include +#include -#include - -#include "rtc_base/logging.h" -#include "rtc_base/test_base64.h" +#include "absl/strings/string_view.h" +#include "test/gmock.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { -static struct { - size_t plain_length; - const char* plaintext; - const char* cyphertext; -} base64_tests[] = { - - // Basic bit patterns; - // values obtained with "echo -n '...' | uuencode -m test" - - {1, "\000", "AA=="}, - {1, "\001", "AQ=="}, - {1, "\002", "Ag=="}, - {1, "\004", "BA=="}, - {1, "\010", "CA=="}, - {1, "\020", "EA=="}, - {1, "\040", "IA=="}, - {1, "\100", "QA=="}, - {1, "\200", "gA=="}, - - {1, "\377", "/w=="}, - {1, "\376", "/g=="}, - {1, "\375", "/Q=="}, - {1, "\373", "+w=="}, - {1, "\367", "9w=="}, - {1, "\357", "7w=="}, - {1, "\337", "3w=="}, - {1, "\277", "vw=="}, - {1, "\177", "fw=="}, - {2, "\000\000", "AAA="}, - {2, "\000\001", "AAE="}, - {2, "\000\002", "AAI="}, - {2, "\000\004", "AAQ="}, - {2, "\000\010", "AAg="}, - {2, "\000\020", "ABA="}, - {2, "\000\040", "ACA="}, - {2, "\000\100", "AEA="}, - {2, "\000\200", "AIA="}, - {2, "\001\000", "AQA="}, - {2, "\002\000", "AgA="}, - {2, "\004\000", "BAA="}, - {2, "\010\000", "CAA="}, - {2, "\020\000", "EAA="}, - {2, "\040\000", "IAA="}, - {2, "\100\000", "QAA="}, - {2, "\200\000", "gAA="}, - - {2, "\377\377", "//8="}, - {2, "\377\376", "//4="}, - {2, "\377\375", "//0="}, - {2, "\377\373", "//s="}, - {2, "\377\367", "//c="}, - {2, "\377\357", "/+8="}, - {2, "\377\337", "/98="}, - {2, "\377\277", "/78="}, - {2, "\377\177", "/38="}, - {2, "\376\377", "/v8="}, - {2, "\375\377", "/f8="}, - {2, "\373\377", "+/8="}, - {2, "\367\377", "9/8="}, - {2, "\357\377", "7/8="}, - {2, "\337\377", "3/8="}, - {2, "\277\377", "v/8="}, - {2, "\177\377", "f/8="}, - - {3, "\000\000\000", "AAAA"}, - {3, "\000\000\001", "AAAB"}, - {3, "\000\000\002", "AAAC"}, - {3, "\000\000\004", "AAAE"}, - {3, "\000\000\010", "AAAI"}, - {3, "\000\000\020", "AAAQ"}, - {3, "\000\000\040", "AAAg"}, - {3, "\000\000\100", "AABA"}, - {3, "\000\000\200", "AACA"}, - {3, "\000\001\000", "AAEA"}, - {3, "\000\002\000", "AAIA"}, - {3, "\000\004\000", "AAQA"}, - {3, "\000\010\000", "AAgA"}, - {3, "\000\020\000", "ABAA"}, - {3, "\000\040\000", "ACAA"}, - {3, "\000\100\000", "AEAA"}, - {3, "\000\200\000", "AIAA"}, - {3, "\001\000\000", "AQAA"}, - {3, "\002\000\000", "AgAA"}, - {3, "\004\000\000", "BAAA"}, - {3, "\010\000\000", "CAAA"}, - {3, "\020\000\000", "EAAA"}, - {3, "\040\000\000", "IAAA"}, - {3, "\100\000\000", "QAAA"}, - {3, "\200\000\000", "gAAA"}, - - {3, "\377\377\377", "////"}, - {3, "\377\377\376", "///+"}, - {3, "\377\377\375", "///9"}, - {3, "\377\377\373", "///7"}, - {3, "\377\377\367", "///3"}, - {3, "\377\377\357", "///v"}, - {3, "\377\377\337", "///f"}, - {3, "\377\377\277", "//+/"}, - {3, "\377\377\177", "//9/"}, - {3, "\377\376\377", "//7/"}, - {3, "\377\375\377", "//3/"}, - {3, "\377\373\377", "//v/"}, - {3, "\377\367\377", "//f/"}, - {3, "\377\357\377", "/+//"}, - {3, "\377\337\377", "/9//"}, - {3, "\377\277\377", "/7//"}, - {3, "\377\177\377", "/3//"}, - {3, "\376\377\377", "/v//"}, - {3, "\375\377\377", "/f//"}, - {3, "\373\377\377", "+///"}, - {3, "\367\377\377", "9///"}, - {3, "\357\377\377", "7///"}, - {3, "\337\377\377", "3///"}, - {3, "\277\377\377", "v///"}, - {3, "\177\377\377", "f///"}, - - // Random numbers: values obtained with - // - // #! /bin/bash - // dd bs=$1 count=1 if=/dev/random of=/tmp/bar.random - // od -N $1 -t o1 /tmp/bar.random - // uuencode -m test < /tmp/bar.random - // - // where $1 is the number of bytes (2, 3) - - {2, "\243\361", "o/E="}, - {2, "\024\167", "FHc="}, - {2, "\313\252", "y6o="}, - {2, "\046\041", "JiE="}, - {2, "\145\236", "ZZ4="}, - {2, "\254\325", "rNU="}, - {2, "\061\330", "Mdg="}, - {2, "\245\032", "pRo="}, - {2, "\006\000", "BgA="}, - {2, "\375\131", "/Vk="}, - {2, "\303\210", "w4g="}, - {2, "\040\037", "IB8="}, - {2, "\261\372", "sfo="}, - {2, "\335\014", "3Qw="}, - {2, "\233\217", "m48="}, - {2, "\373\056", "+y4="}, - {2, "\247\232", "p5o="}, - {2, "\107\053", "Rys="}, - {2, "\204\077", "hD8="}, - {2, "\276\211", "vok="}, - {2, "\313\110", "y0g="}, - {2, "\363\376", "8/4="}, - {2, "\251\234", "qZw="}, - {2, "\103\262", "Q7I="}, - {2, "\142\312", "Yso="}, - {2, "\067\211", "N4k="}, - {2, "\220\001", "kAE="}, - {2, "\152\240", "aqA="}, - {2, "\367\061", "9zE="}, - {2, "\133\255", "W60="}, - {2, "\176\035", "fh0="}, - {2, "\032\231", "Gpk="}, - - {3, "\013\007\144", "Cwdk"}, - {3, "\030\112\106", "GEpG"}, - {3, "\047\325\046", "J9Um"}, - {3, "\310\160\022", "yHAS"}, - {3, "\131\100\237", "WUCf"}, - {3, "\064\342\134", "NOJc"}, - {3, "\010\177\004", "CH8E"}, - {3, "\345\147\205", "5WeF"}, - {3, "\300\343\360", "wOPw"}, - {3, "\061\240\201", "MaCB"}, - {3, "\225\333\044", "ldsk"}, - {3, "\215\137\352", "jV/q"}, - {3, "\371\147\160", "+Wdw"}, - {3, "\030\320\051", "GNAp"}, - {3, "\044\174\241", "JHyh"}, - {3, "\260\127\037", "sFcf"}, - {3, "\111\045\033", "SSUb"}, - {3, "\202\114\107", "gkxH"}, - {3, "\057\371\042", "L/ki"}, - {3, "\223\247\244", "k6ek"}, - {3, "\047\216\144", "J45k"}, - {3, "\203\070\327", "gzjX"}, - {3, "\247\140\072", "p2A6"}, - {3, "\124\115\116", "VE1O"}, - {3, "\157\162\050", "b3Io"}, - {3, "\357\223\004", "75ME"}, - {3, "\052\117\156", "Kk9u"}, - {3, "\347\154\000", "52wA"}, - {3, "\303\012\142", "wwpi"}, - {3, "\060\035\362", "MB3y"}, - {3, "\130\226\361", "WJbx"}, - {3, "\173\013\071", "ews5"}, - {3, "\336\004\027", "3gQX"}, - {3, "\357\366\234", "7/ac"}, - {3, "\353\304\111", "68RJ"}, - {3, "\024\264\131", "FLRZ"}, - {3, "\075\114\251", "PUyp"}, - {3, "\315\031\225", "zRmV"}, - {3, "\154\201\276", "bIG+"}, - {3, "\200\066\072", "gDY6"}, - {3, "\142\350\267", "Yui3"}, - {3, "\033\000\166", "GwB2"}, - {3, "\210\055\077", "iC0/"}, - {3, "\341\037\124", "4R9U"}, - {3, "\161\103\152", "cUNq"}, - {3, "\270\142\131", "uGJZ"}, - {3, "\337\076\074", "3z48"}, - {3, "\375\106\362", "/Uby"}, - {3, "\227\301\127", "l8FX"}, - {3, "\340\002\234", "4AKc"}, - {3, "\121\064\033", "UTQb"}, - {3, "\157\134\143", "b1xj"}, - {3, "\247\055\327", "py3X"}, - {3, "\340\142\005", "4GIF"}, - {3, "\060\260\143", "MLBj"}, - {3, "\075\203\170", "PYN4"}, - {3, "\143\160\016", "Y3AO"}, - {3, "\313\013\063", "ywsz"}, - {3, "\174\236\135", "fJ5d"}, - {3, "\103\047\026", "QycW"}, - {3, "\365\005\343", "9QXj"}, - {3, "\271\160\223", "uXCT"}, - {3, "\362\255\172", "8q16"}, - {3, "\113\012\015", "SwoN"}, - - // various lengths, generated by this python script: - // - // from string import lowercase as lc - // for i in range(27): - // print '{ %2d, "%s",%s "%s" },' % (i, lc[:i], ' ' * (26-i), - // lc[:i].encode('base64').strip()) - - {0, "abcdefghijklmnopqrstuvwxyz", ""}, - {1, "abcdefghijklmnopqrstuvwxyz", "YQ=="}, - {2, "abcdefghijklmnopqrstuvwxyz", "YWI="}, - {3, "abcdefghijklmnopqrstuvwxyz", "YWJj"}, - {4, "abcdefghijklmnopqrstuvwxyz", "YWJjZA=="}, - {5, "abcdefghijklmnopqrstuvwxyz", "YWJjZGU="}, - {6, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVm"}, - {7, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZw=="}, - {8, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2g="}, - {9, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hp"}, - {10, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpag=="}, - {11, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpams="}, - {12, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamts"}, - {13, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbQ=="}, - {14, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW4="}, - {15, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5v"}, - {16, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcA=="}, - {17, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHE="}, - {18, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFy"}, - {19, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFycw=="}, - {20, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3Q="}, - {21, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1"}, - {22, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dg=="}, - {23, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnc="}, - {24, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4"}, - {25, "abcdefghijklmnopqrstuvwxy", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eQ=="}, - {26, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXo="}, -}; -#if 0 -static struct { - const char* plaintext; - const char* cyphertext; -} base64_strings[] = { - - // The first few Google quotes - // Cyphertext created with "uuencode - GNU sharutils 4.2.1" - { - "Everyone! We're teetering on the brink of disaster." - " - Sergey Brin, 6/24/99, regarding the company's state " - "after the unleashing of Netscape/Google search", - - "RXZlcnlvbmUhICBXZSdyZSB0ZWV0ZXJpbmcgb24gdGhlIGJyaW5rIG9mIGRp" - "c2FzdGVyLiAtIFNlcmdleSBCcmluLCA2LzI0Lzk5LCByZWdhcmRpbmcgdGhl" - "IGNvbXBhbnkncyBzdGF0ZSBhZnRlciB0aGUgdW5sZWFzaGluZyBvZiBOZXRz" - "Y2FwZS9Hb29nbGUgc2VhcmNo" }, - - { - "I'm not sure why we're still alive, but we seem to be." - " - Larry Page, 6/24/99, while hiding in the kitchenette " - "during the Netscape traffic overflow", - - "SSdtIG5vdCBzdXJlIHdoeSB3ZSdyZSBzdGlsbCBhbGl2ZSwgYnV0IHdlIHNl" - "ZW0gdG8gYmUuIC0gTGFycnkgUGFnZSwgNi8yNC85OSwgd2hpbGUgaGlkaW5n" - "IGluIHRoZSBraXRjaGVuZXR0ZSBkdXJpbmcgdGhlIE5ldHNjYXBlIHRyYWZm" - "aWMgb3ZlcmZsb3c" }, - - { - "I think kids want porn." - " - Sergey Brin, 6/99, on why Google shouldn't prioritize a " - "filtered search for children and families", - - "SSB0aGluayBraWRzIHdhbnQgcG9ybi4gLSBTZXJnZXkgQnJpbiwgNi85OSwg" - "b24gd2h5IEdvb2dsZSBzaG91bGRuJ3QgcHJpb3JpdGl6ZSBhIGZpbHRlcmVk" - "IHNlYXJjaCBmb3IgY2hpbGRyZW4gYW5kIGZhbWlsaWVz" }, -}; -#endif -// Compare bytes 0..len-1 of x and y. If not equal, abort with verbose error -// message showing position and numeric value that differed. -// Handles embedded nulls just like any other byte. -// Only added because string.compare() in gcc-3.3.3 seems to misbehave with -// embedded nulls. -// TODO: switch back to string.compare() if/when gcc is fixed -#define EXPECT_EQ_ARRAY(len, x, y, msg) \ - for (size_t j = 0; j < len; ++j) { \ - if (x[j] != y[j]) { \ - RTC_LOG(LS_ERROR) << "" #x << " != " #y << " byte " << j \ - << " msg: " << msg; \ - } \ - } - -size_t Base64Escape(const unsigned char* src, - size_t szsrc, - char* dest, - size_t szdest) { - std::string escaped; - Base64::EncodeFromArray((const char*)src, szsrc, &escaped); - memcpy(dest, escaped.data(), std::min(escaped.size(), szdest)); - return escaped.size(); -} - -size_t Base64Unescape(const char* src, - size_t szsrc, - char* dest, - size_t szdest) { - std::string unescaped; - EXPECT_TRUE( - Base64::DecodeFromArray(src, szsrc, Base64::DO_LAX, &unescaped, nullptr)); - memcpy(dest, unescaped.data(), std::min(unescaped.size(), szdest)); - return unescaped.size(); -} - -size_t Base64Unescape(const char* src, size_t szsrc, std::string* s) { - EXPECT_TRUE(Base64::DecodeFromArray(src, szsrc, Base64::DO_LAX, s, nullptr)); - return s->size(); -} - -TEST(Base64, EncodeDecodeBattery) { - RTC_LOG(LS_VERBOSE) << "Testing base-64"; +using ::testing::Eq; +using ::testing::Optional; +using ::testing::SizeIs; +using ::testing::TestWithParam; - size_t i; - - // Check the short strings; this tests the math (and boundaries) - for (i = 0; i < sizeof(base64_tests) / sizeof(base64_tests[0]); ++i) { - char encode_buffer[100]; - size_t encode_length; - char decode_buffer[100]; - size_t decode_length; - size_t cypher_length; - - RTC_LOG(LS_VERBOSE) << "B64: " << base64_tests[i].cyphertext; - - const unsigned char* unsigned_plaintext = - reinterpret_cast(base64_tests[i].plaintext); - - cypher_length = strlen(base64_tests[i].cyphertext); - - // The basic escape function: - memset(encode_buffer, 0, sizeof(encode_buffer)); - encode_length = - Base64Escape(unsigned_plaintext, base64_tests[i].plain_length, - encode_buffer, sizeof(encode_buffer)); - // Is it of the expected length? - EXPECT_EQ(encode_length, cypher_length); - - // Is it the expected encoded value? - EXPECT_STREQ(encode_buffer, base64_tests[i].cyphertext); - - // If we encode it into a buffer of exactly the right length... - memset(encode_buffer, 0, sizeof(encode_buffer)); - encode_length = - Base64Escape(unsigned_plaintext, base64_tests[i].plain_length, - encode_buffer, cypher_length); - // Is it still of the expected length? - EXPECT_EQ(encode_length, cypher_length); - - // And is the value still correct? (i.e., not losing the last byte) - EXPECT_STREQ(encode_buffer, base64_tests[i].cyphertext); - - // If we decode it back: - memset(decode_buffer, 0, sizeof(decode_buffer)); - decode_length = Base64Unescape(encode_buffer, cypher_length, decode_buffer, - sizeof(decode_buffer)); - - // Is it of the expected length? - EXPECT_EQ(decode_length, base64_tests[i].plain_length); - - // Is it the expected decoded value? - EXPECT_EQ(0, - memcmp(decode_buffer, base64_tests[i].plaintext, decode_length)); - - // Our decoder treats the padding '=' characters at the end as - // optional. If encode_buffer has any, run some additional - // tests that fiddle with them. - char* first_equals = strchr(encode_buffer, '='); - if (first_equals) { - // How many equals signs does the string start with? - int equals = (*(first_equals + 1) == '=') ? 2 : 1; - - // Try chopping off the equals sign(s) entirely. The decoder - // should still be okay with this. - std::string decoded2("this junk should also be ignored"); - *first_equals = '\0'; - EXPECT_NE(0U, Base64Unescape(encode_buffer, first_equals - encode_buffer, - &decoded2)); - EXPECT_EQ(decoded2.size(), base64_tests[i].plain_length); - EXPECT_EQ_ARRAY(decoded2.size(), decoded2.data(), - base64_tests[i].plaintext, i); - - size_t len; - - // try putting some extra stuff after the equals signs, or in between them - if (equals == 2) { - snprintf(first_equals, 6, " = = "); - len = first_equals - encode_buffer + 5; - } else { - snprintf(first_equals, 6, " = "); - len = first_equals - encode_buffer + 3; - } - decoded2.assign("this junk should be ignored"); - EXPECT_NE(0U, Base64Unescape(encode_buffer, len, &decoded2)); - EXPECT_EQ(decoded2.size(), base64_tests[i].plain_length); - EXPECT_EQ_ARRAY(decoded2.size(), decoded2, base64_tests[i].plaintext, i); - } - } +TEST(Base64Test, Encode) { + std::string data{0x64, 0x65, 0x66}; + EXPECT_THAT(Base64Encode(data), Eq("ZGVm")); } -// here's a weird case: a giant base64 encoded stream which broke our base64 -// decoding. Let's test it explicitly. -const char SpecificTest[] = - "/9j/4AAQSkZJRgABAgEASABIAAD/" - "4Q0HRXhpZgAATU0AKgAAAAgADAEOAAIAAAAgAAAAngEPAAI\n" - "AAAAFAAAAvgEQAAIAAAAJAAAAwwESAAMAAAABAAEAAAEaAAUAAAABAAAAzAEbAAUAAAABAAAA1" - "A\n" - "EoAAMAAAABAAIAAAExAAIAAAAUAAAA3AEyAAIAAAAUAAAA8AE8AAIAAAAQAAABBAITAAMAAAAB" - "A\n" - "AIAAIdpAAQAAAABAAABFAAAAsQgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgAFNPTlk" - "A\n" - "RFNDLVAyMDAAAAAASAAAAAEAAABIAAAAAUFkb2JlIFBob3Rvc2hvcCA3LjAAMjAwNzowMTozMC" - "A\n" - "yMzoxMDowNABNYWMgT1MgWCAxMC40LjgAAByCmgAFAAAAAQAAAmqCnQAFAAAAAQAAAnKIIgADA" - "A\n" - "AAAQACAACIJwADAAAAAQBkAACQAAAHAAAABDAyMjCQAwACAAAAFAAAAnqQBAACAAAAFAAAAo6R" - "A\n" - "QAHAAAABAECAwCRAgAFAAAAAQAAAqKSBAAKAAAAAQAAAqqSBQAFAAAAAQAAArKSBwADAAAAAQA" - "F\n" - "AACSCAADAAAAAQAAAACSCQADAAAAAQAPAACSCgAFAAAAAQAAArqgAAAHAAAABDAxMDCgAQADAA" - "A\n" - "AAf//" - "AACgAgAEAAAAAQAAAGSgAwAEAAAAAQAAAGSjAAAHAAAAAQMAAACjAQAHAAAAAQEAAACkAQ\n" - "ADAAAAAQAAAACkAgADAAAAAQAAAACkAwADAAAAAQAAAACkBgADAAAAAQAAAACkCAADAAAAAQAA" - "A\n" - "ACkCQADAAAAAQAAAACkCgADAAAAAQAAAAAAAAAAAAAACgAAAZAAAAAcAAAACjIwMDc6MDE6MjA" - "g\n" - "MjM6MDU6NTIAMjAwNzowMToyMCAyMzowNTo1MgAAAAAIAAAAAQAAAAAAAAAKAAAAMAAAABAAAA" - "B\n" - "PAAAACgAAAAYBAwADAAAAAQAGAAABGgAFAAAAAQAAAxIBGwAFAAAAAQAAAxoBKAADAAAAAQACA" - "A\n" - "ACAQAEAAAAAQAAAyICAgAEAAAAAQAACd0AAAAAAAAASAAAAAEAAABIAAAAAf/Y/" - "+AAEEpGSUYAA\n" - "QIBAEgASAAA/+0ADEFkb2JlX0NNAAL/7gAOQWRvYmUAZIAAAAAB/" - "9sAhAAMCAgICQgMCQkMEQsK\n" - "CxEVDwwMDxUYExMVExMYEQwMDAwMDBEMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAQ0LCw" - "0\n" - "ODRAODhAUDg4OFBQODg4OFBEMDAwMDBERDAwMDAwMEQwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMD" - "A\n" - "wMDAz/wAARCABkAGQDASIAAhEBAxEB/90ABAAH/" - "8QBPwAAAQUBAQEBAQEAAAAAAAAAAwABAgQFB\n" - "gcICQoLAQABBQEBAQEBAQAAAAAAAAABAAIDBAUGBwgJCgsQAAEEAQMCBAIFBwYIBQMMMwEAAhE" - "D\n" - "BCESMQVBUWETInGBMgYUkaGxQiMkFVLBYjM0coLRQwclklPw4fFjczUWorKDJkSTVGRFwqN0Nh" - "f\n" - "SVeJl8rOEw9N14/" - "NGJ5SkhbSVxNTk9KW1xdXl9VZmdoaWprbG1ub2N0dXZ3eHl6e3x9fn9xEAAg\n" - "IBAgQEAwQFBgcHBgU1AQACEQMhMRIEQVFhcSITBTKBkRShsUIjwVLR8DMkYuFygpJDUxVjczTx" - "J\n" - "QYWorKDByY1wtJEk1SjF2RFVTZ0ZeLys4TD03Xj80aUpIW0lcTU5PSltcXV5fVWZnaGlqa2xtb" - "m\n" - "9ic3R1dnd4eXp7fH/" - "9oADAMBAAIRAxEAPwDy7bKNTUXNLz9EaJPDWMjxH4ozhtpYwaACT8ShaaW\n" - "bW0uEc9/JFfjj0Q4Hk/PRDxwX7y47W9z/" - "AN9Cv4+O3ILK2DcRqT2CaSvEbcl1Jbz37KG1dBldLo\n" - "qaS4l9xGjG9v6yoDAdYIaIjUk+AREgo4y5sapirb8Yl0NHHdKvBNm4yA1o5Pc+" - "SPEFvCWqB3HZF\n" - "Hj2SbWQ/" - "afGFP0bHP8ATY0uc4w1o1JPkkimGiS2KvqlnmBkOZQTyydzgPMM9v8A0lp4v1Nx9gF1\n" - "tpdqJaGtH/S3I0i3lISXW/8AMqnd/O2bfg2eUkqVYf/" - "Q8zuncO4Bj7lZ+n7f5Mj5KsJcY8NUZ4d\n" - "uEDVo1HkeU0rg3Om4H2rabCWUN7DQuK1n5FWKW4uCwG92gDRJBS6exhxmMboQI+" - "Cv4WFTQ42Bs2\n" - "fvnkkqEmy2YxoMMbpVzaz6jt+RbpHZs8lzkHqrasKkYOKP0jgDfZ4N/" - "wDM1tNrcWfSPmRyq9uNV\n" - "DnFg2s97i7UkjxKVrq0eVz3spZsja+ASDzwsh9jnOk/" - "JFzb3XZD3v1c4yT8UACTCniKDUnKz5Nj\n" - "G33XV1DV73BrT8dF23SejV4zg9g33cOsPb+SxVvqv9ViwNy8vS0iWs/" - "daf8A0Y5dpTi1sADGxCR\n" - "K1o0YBEmInlXWYbDBcDLdPJXa8f71Yrx2jnUoAqLnfZK5hJaW2vdwEk5a/wD/0fN6Ia/" - "e76IiVf\n" - "xavUL7CPpnT4LNbYXAVjuQt/AqDmNYO/" - "Kjnoy4hr5J8SwMhrRMaeSvbsxrfUazcOw4UX0Cisem2\n" - "SBoD4+" - "Kz8nC6llbSLCRrubJA8kwUWbUDa29X1PMa7aQWjuDC0MXMdbDbhI7eazBiUfZ6GOYRe1s\n" - "WvGgJ8Vbw2+m4Bx9s6JpNHuuGo1FF53r/" - "SHYua61gLse0lzXeBP5rkvqx0o5vVWz7WY49QkiQSP\n" - "oN/tLoevW/ogxv0HA7tJ0AnhT+pdDGYVl/wCdcTPkGn2NU0JWNWvlgAbHV6fEqdu2gR/" - "r2WlWwt\n" - "AA5VXAEsLXTqJafArQY5rRr9LiPBJiZsZCI1pJjxCi0j4oncSICSkWwzwkjeaSch//" - "0vO7sP7Lm\n" - "enO9ogtd5FbPT3Q5pCpZVc4ld3Lmn3O8j9EI2BYdunKjOobMQIyI+rusc2wx4d0eutwGnHh/" - "uQc\n" - "Ha7ladj6mVANGvcqOgz0Go7HJ12/GEHcwvB/dPY6ImbbaMaASGuIBjkN7qofs9Ubg9g7OI9p/" - "t/\n" - "RTSmhTHr0v6eSz6UgCPP2/wAVu9Ex2V49dVY2iACB4BZeVXQ/" - "AJ3gzGnnOi2+kACpru8flUsNmt\n" - "zHRf6xfWCnoeAfTh2ZaQKazx/" - "Ke7+QxcKz61fWA2uuObaC4zGhaPJrXBL64ZFmR124O09ENraPK\n" - "N3/AH5GqxIrZVUyp2K2vfdkENsDnxuex9m4Ox9n82xSgNd9D+p/XR1npgseR9ppOy4Dx/NfH/" - "CL\n" - "oQJGunmvMv8AFq3KHVcq3HkYQbD2nuSf0I/rMavSg6TLjLigQhJ7Z58v9QkmlsTOqSCn/" - "9PzL7R\n" - "d6Qq3n0wZ2zotXpT9xLfFYvkr/S7jXeB8E0jRkhKpC3q8LcJ/kmCrTnkuAPCq4do9Q/" - "ytVbuAeY\n" - "Gg5lQybQK+" - "82GBqEQUA1kOHPYf3LLsoyN36G5w8iUfHxepbXE2l0cApALgLHzBq9UxhTXU5hMC1\n" - "ktnSCup6S4Ctk+C5XqVGcaHPfuiuHkeTTuWz0+9zaKiH6CC0/yXBSQ2a/" - "MxojV57634rq+v2PLY\n" - "be1r2nsYG13/" - "AFKxbfCBMcr0brGAzrGEwCG31ncx0SfBzf7S4+zoHUWWsJq3hz9oLfcBH77R9H+\n" - "0pA13u/qPgDp/Q6ri39JlfpXkDx+h/" - "msWn1L6wdO6bSbcrIbU2Q0xLnSe21kuVejJspbVS5+4bd\n" - "ocBAkD/orG+tP1ar67Wy7GtZTm1SCXfRsb+a18fRe38x6SG3/44H1Z3f0y2I+l6DoSXD/" - "8xPrDs\n" - "3enVu3bdnqN3R+//USSVo//" - "1PLohhce+gRWS0Nsby3lRgFkKxQyW7SgUh3em5Tbq2uB9wWw1wey\n" - "J1XGV2XYdm5k7e4WzidXY9oMwo5RZ4T6Hd1ixwfp96PWbAJBVTHzK7O6Ky5oJB1HZMqmUEFlkG" - "y\n" - "xpa4zI1Hkq31dy7bMN9BAc3HeWAnnbyxEycmuup1jiAGglZ31PyrmZ9tQg1WtNj54EHR3/" - "S2qTH\n" - "1Yc5GgD1FFtzPdWGkd2AyflogZmRmsz6PSrbXbdo+" - "txOrP337f3fzVo15DK2uyrTtqpBOnBKx6b\n" - "7MjJsz7tHWOAYP3WD6LU6cqGjFCNl1MmvLcxv6YtDTLSAqP27LrdtYHXFnJZI+" - "Tp3MWg68OpDPv\n" - "UMUM2lkQBoouKQ6swjE9Nml+1sz1PW+z6xt27zuj+skrX2ZvqR5z8kkuOfdPt43/1fMm/" - "grFG6f\n" - "Lss9JA7JG7tnZs/SfJUrfS3foJ9TvHCopJsV8nWx/t24bJn8Fo/5TjWJXMJIS+i+G36TsZ/" - "7Q9P\n" - "8ATfzfeOFofVSZv2/zvt+O3X/v65dJPjt/BiyfN1/wn0zre79nVej/ADG8ep4x2/" - "6Srjd6TdviF\n" - "52ko8m6/Ht9X1KnftEo+POwxzK8mSTF46vrH6T1/OEl5Okkl//Z/" - "+0uHFBob3Rvc2hvcCAzLjAA\n" - "OEJJTQQEAAAAAAArHAIAAAIAAhwCeAAfICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA" - "A\n" - "4QklNBCUAAAAAABD7Caa9B0wqNp2P4sxXqayFOEJJTQPqAAAAAB2wPD94bWwgdmVyc2lvbj0iM" - "S\n" - "4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPCFET0NUWVBFIHBsaXN0IFBVQkxJQyAiLS8vQXBwbGUg" - "Q\n" - "29tcHV0ZXIvL0RURCBQTElTVCAxLjAvL0VOIiAiaHR0cDovL3d3dy5hcHBsZS5jb20vRFREcy9" - "Q\n" - "cm9wZXJ0eUxpc3QtMS4wLmR0ZCI+" - "CjxwbGlzdCB2ZXJzaW9uPSIxLjAiPgo8ZGljdD4KCTxrZXk\n" - "+Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1Ib3Jpem9udGFsUmVzPC9rZXk+" - "Cgk8ZGljdD\n" - "4KCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuY3JlYXRvcjwva2V5PgoJCTxzdHJpbmc+" - "Y\n" - "29tLmFwcGxlLnByaW50aW5nbWFuYWdlcjwvc3RyaW5nPgoJCTxrZXk+" - "Y29tLmFwcGxlLnByaW50\n" - "LnRpY2tldC5pdGVtQXJyYXk8L2tleT4KCQk8YXJyYXk+" - "CgkJCTxkaWN0PgoJCQkJPGtleT5jb20\n" - "uYXBwbGUucHJpbnQuUGFnZUZvcm1hdC5QTUhvcml6b250YWxSZXM8L2tleT4KCQkJCTxyZWFsP" - "j\n" - "cyPC9yZWFsPgoJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNsaWVudDwva2V5PgoJ" - "C\n" - "QkJPHN0cmluZz5jb20uYXBwbGUucHJpbnRpbmdtYW5hZ2VyPC9zdHJpbmc+" - "CgkJCQk8a2V5PmNv\n" - "bS5hcHBsZS5wcmludC50aWNrZXQubW9kRGF0ZTwva2V5PgoJCQkJPGRhdGU+" - "MjAwNy0wMS0zMFQ\n" - "yMjowODo0MVo8L2RhdGU+" - "CgkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuc3RhdGVGbG\n" - "FnPC9rZXk+CgkJCQk8aW50ZWdlcj4wPC9pbnRlZ2VyPgoJCQk8L2RpY3Q+" - "CgkJPC9hcnJheT4KC\n" - "TwvZGljdD4KCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1PcmllbnRhdGlvbjwv\n" - "a2V5PgoJPGRpY3Q+" - "CgkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNyZWF0b3I8L2tleT4\n" - "KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZXI8L3N0cmluZz4KCQk8a2V5PmNvb" - "S\n" - "5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycmF5PC9rZXk+" - "CgkJPGFycmF5PgoJCQk8ZGljdD4KC\n" - "QkJCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1PcmllbnRhdGlvbjwva2V5PgoJ\n" - "CQkJPGludGVnZXI+MTwvaW50ZWdlcj4KCQkJCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LnRpY2tldC5\n" - "jbGllbnQ8L2tleT4KCQkJCTxzdHJpbmc+" - "Y29tLmFwcGxlLnByaW50aW5nbWFuYWdlcjwvc3RyaW\n" - "5nPgoJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0Lm1vZERhdGU8L2tleT4KCQkJCTxk" - "Y\n" - "XRlPjIwMDctMDEtMzBUMjI6MDg6NDFaPC9kYXRlPgoJCQkJPGtleT5jb20uYXBwbGUucHJpbnQ" - "u\n" - "dGlja2V0LnN0YXRlRmxhZzwva2V5PgoJCQkJPGludGVnZXI+" - "MDwvaW50ZWdlcj4KCQkJPC9kaWN\n" - "0PgoJCTwvYXJyYXk+Cgk8L2RpY3Q+" - "Cgk8a2V5PmNvbS5hcHBsZS5wcmludC5QYWdlRm9ybWF0Ll\n" - "BNU2NhbGluZzwva2V5PgoJPGRpY3Q+" - "CgkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNyZ\n" - "WF0b3I8L2tleT4KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZXI8L3N0cmluZz4" - "K\n" - "CQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycmF5PC9rZXk+" - "CgkJPGFycmF5Pgo\n" - "JCQk8ZGljdD4KCQkJCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1TY2FsaW5nPC\n" - "9rZXk+" - "CgkJCQk8cmVhbD4xPC9yZWFsPgoJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0L\n" - "mNsaWVudDwva2V5PgoJCQkJPHN0cmluZz5jb20uYXBwbGUucHJpbnRpbmdtYW5hZ2VyPC9zdHJ" - "p\n" - "bmc+" - "CgkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQubW9kRGF0ZTwva2V5PgoJCQkJPGR\n" - "hdGU+MjAwNy0wMS0zMFQyMjowODo0MVo8L2RhdGU+" - "CgkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC\n" - "50aWNrZXQuc3RhdGVGbGFnPC9rZXk+" - "CgkJCQk8aW50ZWdlcj4wPC9pbnRlZ2VyPgoJCQk8L2RpY\n" - "3Q+CgkJPC9hcnJheT4KCTwvZGljdD4KCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQu\n" - "UE1WZXJ0aWNhbFJlczwva2V5PgoJPGRpY3Q+" - "CgkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V\n" - "0LmNyZWF0b3I8L2tleT4KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZXI8L3N0c" - "m\n" - "luZz4KCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycmF5PC9rZXk+" - "CgkJPGFyc\n" - "mF5PgoJCQk8ZGljdD4KCQkJCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1WZXJ0\n" - "aWNhbFJlczwva2V5PgoJCQkJPHJlYWw+NzI8L3JlYWw+" - "CgkJCQk8a2V5PmNvbS5hcHBsZS5wcml\n" - "udC50aWNrZXQuY2xpZW50PC9rZXk+" - "CgkJCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbm\n" - "FnZXI8L3N0cmluZz4KCQkJCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LnRpY2tldC5tb2REYXRlPC9rZ\n" - "Xk+CgkJCQk8ZGF0ZT4yMDA3LTAxLTMwVDIyOjA4OjQxWjwvZGF0ZT4KCQkJCTxrZXk+" - "Y29tLmFw\n" - "cGxlLnByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCTxpbnRlZ2VyPjA8L2ludGVnZX" - "I\n" - "+CgkJCTwvZGljdD4KCQk8L2FycmF5PgoJPC9kaWN0PgoJPGtleT5jb20uYXBwbGUucHJpbnQuU" - "G\n" - "FnZUZvcm1hdC5QTVZlcnRpY2FsU2NhbGluZzwva2V5PgoJPGRpY3Q+" - "CgkJPGtleT5jb20uYXBwb\n" - "GUucHJpbnQudGlja2V0LmNyZWF0b3I8L2tleT4KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGl" - "u\n" - "Z21hbmFnZXI8L3N0cmluZz4KCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycm" - "F\n" - "5PC9rZXk+CgkJPGFycmF5PgoJCQk8ZGljdD4KCQkJCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LlBhZ2\n" - "VGb3JtYXQuUE1WZXJ0aWNhbFNjYWxpbmc8L2tleT4KCQkJCTxyZWFsPjE8L3JlYWw+" - "CgkJCQk8a\n" - "2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuY2xpZW50PC9rZXk+" - "CgkJCQk8c3RyaW5nPmNvbS5h\n" - "cHBsZS5wcmludGluZ21hbmFnZXI8L3N0cmluZz4KCQkJCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LnR\n" - "pY2tldC5tb2REYXRlPC9rZXk+" - "CgkJCQk8ZGF0ZT4yMDA3LTAxLTMwVDIyOjA4OjQxWjwvZGF0ZT\n" - "4KCQkJCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCTxpb\n" - "nRlZ2VyPjA8L2ludGVnZXI+" - "CgkJCTwvZGljdD4KCQk8L2FycmF5PgoJPC9kaWN0PgoJPGtleT5j\n" - "b20uYXBwbGUucHJpbnQuc3ViVGlja2V0LnBhcGVyX2luZm9fdGlja2V0PC9rZXk+" - "Cgk8ZGljdD4\n" - "KCQk8a2V5PmNvbS5hcHBsZS5wcmludC5QYWdlRm9ybWF0LlBNQWRqdXN0ZWRQYWdlUmVjdDwva" - "2\n" - "V5PgoJCTxkaWN0PgoJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuY3JlYXRvcjwva2V5" - "P\n" - "goJCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZXI8L3N0cmluZz4KCQkJPGtleT5" - "j\n" - "b20uYXBwbGUucHJpbnQudGlja2V0Lml0ZW1BcnJheTwva2V5PgoJCQk8YXJyYXk+" - "CgkJCQk8ZGl\n" - "jdD4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC5QYWdlRm9ybWF0LlBNQWRqdXN0ZWRQYWdlU" - "m\n" - "VjdDwva2V5PgoJCQkJCTxhcnJheT4KCQkJCQkJPHJlYWw+" - "MC4wPC9yZWFsPgoJCQkJCQk8cmVhb\n" - "D4wLjA8L3JlYWw+CgkJCQkJCTxyZWFsPjczNDwvcmVhbD4KCQkJCQkJPHJlYWw+" - "NTc2PC9yZWFs\n" - "PgoJCQkJCTwvYXJyYXk+" - "CgkJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNsaWVudDw\n" - "va2V5PgoJCQkJCTxzdHJpbmc+" - "Y29tLmFwcGxlLnByaW50aW5nbWFuYWdlcjwvc3RyaW5nPgoJCQ\n" - "kJCTxrZXk+Y29tLmFwcGxlLnByaW50LnRpY2tldC5tb2REYXRlPC9rZXk+CgkJCQkJPGRhdGU+" - "M\n" - "jAwNy0wMS0zMFQyMjowODo0MVo8L2RhdGU+" - "CgkJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlj\n" - "a2V0LnN0YXRlRmxhZzwva2V5PgoJCQkJCTxpbnRlZ2VyPjA8L2ludGVnZXI+" - "CgkJCQk8L2RpY3Q\n" - "+CgkJCTwvYXJyYXk+CgkJPC9kaWN0PgoJCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYX\n" - "QuUE1BZGp1c3RlZFBhcGVyUmVjdDwva2V5PgoJCTxkaWN0PgoJCQk8a2V5PmNvbS5hcHBsZS5w" - "c\n" - "mludC50aWNrZXQuY3JlYXRvcjwva2V5PgoJCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21" - "h\n" - "bmFnZXI8L3N0cmluZz4KCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0Lml0ZW1BcnJheT" - "w\n" - "va2V5PgoJCQk8YXJyYXk+" - "CgkJCQk8ZGljdD4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC5QYW\n" - "dlRm9ybWF0LlBNQWRqdXN0ZWRQYXBlclJlY3Q8L2tleT4KCQkJCQk8YXJyYXk+" - "CgkJCQkJCTxyZ\n" - "WFsPi0xODwvcmVhbD4KCQkJCQkJPHJlYWw+" - "LTE4PC9yZWFsPgoJCQkJCQk8cmVhbD43NzQ8L3Jl\n" - "YWw+CgkJCQkJCTxyZWFsPjU5NDwvcmVhbD4KCQkJCQk8L2FycmF5PgoJCQkJCTxrZXk+" - "Y29tLmF\n" - "wcGxlLnByaW50LnRpY2tldC5jbGllbnQ8L2tleT4KCQkJCQk8c3RyaW5nPmNvbS5hcHBsZS5wc" - "m\n" - "ludGluZ21hbmFnZXI8L3N0cmluZz4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQu" - "b\n" - "W9kRGF0ZTwva2V5PgoJCQkJCTxkYXRlPjIwMDctMDEtMzBUMjI6MDg6NDFaPC9kYXRlPgoJCQk" - "J\n" - "CTxrZXk+" - "Y29tLmFwcGxlLnByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCQk8aW50ZWd\n" - "lcj4wPC9pbnRlZ2VyPgoJCQkJPC9kaWN0PgoJCQk8L2FycmF5PgoJCTwvZGljdD4KCQk8a2V5P" - "m\n" - "NvbS5hcHBsZS5wcmludC5QYXBlckluZm8uUE1QYXBlck5hbWU8L2tleT4KCQk8ZGljdD4KCQkJ" - "P\n" - "GtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNyZWF0b3I8L2tleT4KCQkJPHN0cmluZz5jb20" - "u\n" - "YXBwbGUucHJpbnQucG0uUG9zdFNjcmlwdDwvc3RyaW5nPgoJCQk8a2V5PmNvbS5hcHBsZS5wcm" - "l\n" - "udC50aWNrZXQuaXRlbUFycmF5PC9rZXk+" - "CgkJCTxhcnJheT4KCQkJCTxkaWN0PgoJCQkJCTxrZX\n" - "k+" - "Y29tLmFwcGxlLnByaW50LlBhcGVySW5mby5QTVBhcGVyTmFtZTwva2V5PgoJCQkJCTxzdHJpb" - "\n" - "mc+bmEtbGV0dGVyPC9zdHJpbmc+" - "CgkJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNs\n" - "aWVudDwva2V5PgoJCQkJCTxzdHJpbmc+" - "Y29tLmFwcGxlLnByaW50LnBtLlBvc3RTY3JpcHQ8L3N\n" - "0cmluZz4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQubW9kRGF0ZTwva2V5PgoJC" - "Q\n" - "kJCTxkYXRlPjIwMDMtMDctMDFUMTc6NDk6MzZaPC9kYXRlPgoJCQkJCTxrZXk+" - "Y29tLmFwcGxlL\n" - "nByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCQk8aW50ZWdlcj4xPC9pbnRlZ2VyPgo" - "J\n" - "CQkJPC9kaWN0PgoJCQk8L2FycmF5PgoJCTwvZGljdD4KCQk8a2V5PmNvbS5hcHBsZS5wcmludC" - "5\n" - "QYXBlckluZm8uUE1VbmFkanVzdGVkUGFnZVJlY3Q8L2tleT4KCQk8ZGljdD4KCQkJPGtleT5jb" - "2\n" - "0uYXBwbGUucHJpbnQudGlja2V0LmNyZWF0b3I8L2tleT4KCQkJPHN0cmluZz5jb20uYXBwbGUu" - "c\n" - "HJpbnQucG0uUG9zdFNjcmlwdDwvc3RyaW5nPgoJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWN" - "r\n" - "ZXQuaXRlbUFycmF5PC9rZXk+CgkJCTxhcnJheT4KCQkJCTxkaWN0PgoJCQkJCTxrZXk+" - "Y29tLmF\n" - "wcGxlLnByaW50LlBhcGVySW5mby5QTVVuYWRqdXN0ZWRQYWdlUmVjdDwva2V5PgoJCQkJCTxhc" - "n\n" - "JheT4KCQkJCQkJPHJlYWw+MC4wPC9yZWFsPgoJCQkJCQk8cmVhbD4wLjA8L3JlYWw+" - "CgkJCQkJC\n" - "TxyZWFsPjczNDwvcmVhbD4KCQkJCQkJPHJlYWw+NTc2PC9yZWFsPgoJCQkJCTwvYXJyYXk+" - "CgkJ\n" - "CQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNsaWVudDwva2V5PgoJCQkJCTxzdHJpbm" - "c\n" - "+Y29tLmFwcGxlLnByaW50aW5nbWFuYWdlcjwvc3RyaW5nPgoJCQkJCTxrZXk+" - "Y29tLmFwcGxlLn\n" - "ByaW50LnRpY2tldC5tb2REYXRlPC9rZXk+CgkJCQkJPGRhdGU+" - "MjAwNy0wMS0zMFQyMjowODo0M\n" - "Vo8L2RhdGU+" - "CgkJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LnN0YXRlRmxhZzwva2V5\n" - "PgoJCQkJCTxpbnRlZ2VyPjA8L2ludGVnZXI+CgkJCQk8L2RpY3Q+CgkJCTwvYXJyYXk+" - "CgkJPC9\n" - "kaWN0PgoJCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LlBhcGVySW5mby5QTVVuYWRqdXN0ZWRQYXBlcl\n" - "JlY3Q8L2tleT4KCQk8ZGljdD4KCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNyZWF0" - "b\n" - "3I8L2tleT4KCQkJPHN0cmluZz5jb20uYXBwbGUucHJpbnQucG0uUG9zdFNjcmlwdDwvc3RyaW5" - "n\n" - "PgoJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycmF5PC9rZXk+" - "CgkJCTxhcnJ\n" - "heT4KCQkJCTxkaWN0PgoJCQkJCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LlBhcGVySW5mby5QTVVuYW\n" - "RqdXN0ZWRQYXBlclJlY3Q8L2tleT4KCQkJCQk8YXJyYXk+" - "CgkJCQkJCTxyZWFsPi0xODwvcmVhb\n" - "D4KCQkJCQkJPHJlYWw+LTE4PC9yZWFsPgoJCQkJCQk8cmVhbD43NzQ8L3JlYWw+" - "CgkJCQkJCTxy\n" - "ZWFsPjU5NDwvcmVhbD4KCQkJCQk8L2FycmF5PgoJCQkJCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LnR\n" - "pY2tldC5jbGllbnQ8L2tleT4KCQkJCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZ" - "X\n" - "I8L3N0cmluZz4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQubW9kRGF0ZTwva2V5" - "P\n" - "goJCQkJCTxkYXRlPjIwMDctMDEtMzBUMjI6MDg6NDFaPC9kYXRlPgoJCQkJCTxrZXk+" - "Y29tLmFw\n" - "cGxlLnByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCQk8aW50ZWdlcj4wPC9pbnRlZ2" - "V\n" - "yPgoJCQkJPC9kaWN0PgoJCQk8L2FycmF5PgoJCTwvZGljdD4KCQk8a2V5PmNvbS5hcHBsZS5wc" - "m\n" - "ludC5QYXBlckluZm8ucHBkLlBNUGFwZXJOYW1lPC9rZXk+CgkJPGRpY3Q+CgkJCTxrZXk+" - "Y29tL\n" - "mFwcGxlLnByaW50LnRpY2tldC5jcmVhdG9yPC9rZXk+CgkJCTxzdHJpbmc+" - "Y29tLmFwcGxlLnBy\n" - "aW50LnBtLlBvc3RTY3JpcHQ8L3N0cmluZz4KCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2" - "V\n" - "0Lml0ZW1BcnJheTwva2V5PgoJCQk8YXJyYXk+" - "CgkJCQk8ZGljdD4KCQkJCQk8a2V5PmNvbS5hcH\n" - "BsZS5wcmludC5QYXBlckluZm8ucHBkLlBNUGFwZXJOYW1lPC9rZXk+" - "CgkJCQkJPHN0cmluZz5VU\n" - "yBMZXR0ZXI8L3N0cmluZz4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuY2xpZW5" - "0\n" - "PC9rZXk+" - "CgkJCQkJPHN0cmluZz5jb20uYXBwbGUucHJpbnQucG0uUG9zdFNjcmlwdDwvc3RyaW5\n" - "nPgoJCQkJCTxrZXk+Y29tLmFwcGxlLnByaW50LnRpY2tldC5tb2REYXRlPC9rZXk+" - "CgkJCQkJPG\n" - "RhdGU+MjAwMy0wNy0wMVQxNzo0OTozNlo8L2RhdGU+" - "CgkJCQkJPGtleT5jb20uYXBwbGUucHJpb\n" - "nQudGlja2V0LnN0YXRlRmxhZzwva2V5PgoJCQkJCTxpbnRlZ2VyPjE8L2ludGVnZXI+" - "CgkJCQk8\n" - "L2RpY3Q+CgkJCTwvYXJyYXk+CgkJPC9kaWN0PgoJCTxrZXk+" - "Y29tLmFwcGxlLnByaW50LnRpY2t\n" - "ldC5BUElWZXJzaW9uPC9rZXk+CgkJPHN0cmluZz4wMC4yMDwvc3RyaW5nPgoJCTxrZXk+" - "Y29tLm\n" - "FwcGxlLnByaW50LnRpY2tldC5wcml2YXRlTG9jazwva2V5PgoJCTxmYWxzZS8+" - "CgkJPGtleT5jb\n" - "20uYXBwbGUucHJpbnQudGlja2V0LnR5cGU8L2tleT4KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcml" - "u\n" - "dC5QYXBlckluZm9UaWNrZXQ8L3N0cmluZz4KCTwvZGljdD4KCTxrZXk+" - "Y29tLmFwcGxlLnByaW5\n" - "0LnRpY2tldC5BUElWZXJzaW9uPC9rZXk+Cgk8c3RyaW5nPjAwLjIwPC9zdHJpbmc+" - "Cgk8a2V5Pm\n" - "NvbS5hcHBsZS5wcmludC50aWNrZXQucHJpdmF0ZUxvY2s8L2tleT4KCTxmYWxzZS8+" - "Cgk8a2V5P\n" - "mNvbS5hcHBsZS5wcmludC50aWNrZXQudHlwZTwva2V5PgoJPHN0cmluZz5jb20uYXBwbGUucHJ" - "p\n" - "bnQuUGFnZUZvcm1hdFRpY2tldDwvc3RyaW5nPgo8L2RpY3Q+CjwvcGxpc3Q+" - "CjhCSU0D6QAAAAA\n" - "AeAADAAAASABIAAAAAALeAkD/7v/uAwYCUgNnBSgD/" - "AACAAAASABIAAAAAALYAigAAQAAAGQAAA\n" - "ABAAMDAwAAAAF//" - "wABAAEAAAAAAAAAAAAAAABoCAAZAZAAAAAAACAAAAAAAAAAAAAAAAAAAAAAA\n" - "AAAAAAAAAAAADhCSU0D7QAAAAAAEABIAAAAAQABAEgAAAABAAE4QklNBCYAAAAAAA4AAAAAAAA" - "A\n" - "AAAAP4AAADhCSU0EDQAAAAAABAAAAB44QklNBBkAAAAAAAQAAAAeOEJJTQPzAAAAAAAJAAAAAA" - "A\n" - "AAAABADhCSU0ECgAAAAAAAQAAOEJJTScQAAAAAAAKAAEAAAAAAAAAAThCSU0D9QAAAAAASAAvZ" - "m\n" - "YAAQBsZmYABgAAAAAAAQAvZmYAAQChmZoABgAAAAAAAQAyAAAAAQBaAAAABgAAAAAAAQA1AAAA" - "A\n" - "QAtAAAABgAAAAAAAThCSU0D+AAAAAAAcAAA/////////////////////////////wPoAAAAAP/" - "/\n" - "//////////////////////////8D6AAAAAD/////////////////////////////A+gAAAAA//" - "/\n" - "//////////////////////////" - "wPoAAA4QklNBAgAAAAAABAAAAABAAACQAAAAkAAAAAAOEJJTQ\n" - "QeAAAAAAAEAAAAADhCSU0EGgAAAAADRQAAAAYAAAAAAAAAAAAAAGQAAABkAAAACABEAFMAQwAw" - "A\n" - "DIAMwAyADUAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAGQAAABkAAAAAAAAAAA" - "A\n" - "AAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAEAAAAAAABudWxsAAAAAgAAAAZib3VuZH" - "N\n" - "PYmpjAAAAAQAAAAAAAFJjdDEAAAAEAAAAAFRvcCBsb25nAAAAAAAAAABMZWZ0bG9uZwAAAAAAA" - "A\n" - "AAQnRvbWxvbmcAAABkAAAAAFJnaHRsb25nAAAAZAAAAAZzbGljZXNWbExzAAAAAU9iamMAAAAB" - "A\n" - "AAAAAAFc2xpY2UAAAASAAAAB3NsaWNlSURsb25nAAAAAAAAAAdncm91cElEbG9uZwAAAAAAAAA" - "G\n" - "b3JpZ2luZW51bQAAAAxFU2xpY2VPcmlnaW4AAAANYXV0b0dlbmVyYXRlZAAAAABUeXBlZW51bQ" - "A\n" - "AAApFU2xpY2VUeXBlAAAAAEltZyAAAAAGYm91bmRzT2JqYwAAAAEAAAAAAABSY3QxAAAABAAAA" - "A\n" - "BUb3AgbG9uZwAAAAAAAAAATGVmdGxvbmcAAAAAAAAAAEJ0b21sb25nAAAAZAAAAABSZ2h0bG9u" - "Z\n" - "wAAAGQAAAADdXJsVEVYVAAAAAEAAAAAAABudWxsVEVYVAAAAAEAAAAAAABNc2dlVEVYVAAAAAE" - "A\n" - "AAAAAAZhbHRUYWdURVhUAAAAAQAAAAAADmNlbGxUZXh0SXNIVE1MYm9vbAEAAAAIY2VsbFRleH" - "R\n" - "URVhUAAAAAQAAAAAACWhvcnpBbGlnbmVudW0AAAAPRVNsaWNlSG9yekFsaWduAAAAB2RlZmF1b" - "H\n" - "QAAAAJdmVydEFsaWduZW51bQAAAA9FU2xpY2VWZXJ0QWxpZ24AAAAHZGVmYXVsdAAAAAtiZ0Nv" - "b\n" - "G9yVHlwZWVudW0AAAARRVNsaWNlQkdDb2xvclR5cGUAAAAATm9uZQAAAAl0b3BPdXRzZXRsb25" - "n\n" - "AAAAAAAAAApsZWZ0T3V0c2V0bG9uZwAAAAAAAAAMYm90dG9tT3V0c2V0bG9uZwAAAAAAAAALcm" - "l\n" - "naHRPdXRzZXRsb25nAAAAAAA4QklNBBEAAAAAAAEBADhCSU0EFAAAAAAABAAAAAE4QklNBAwAA" - "A\n" - "AACfkAAAABAAAAZAAAAGQAAAEsAAB1MAAACd0AGAAB/9j/4AAQSkZJRgABAgEASABIAAD/" - "7QAMQ\n" - "WRvYmVfQ00AAv/uAA5BZG9iZQBkgAAAAAH/" - "2wCEAAwICAgJCAwJCQwRCwoLERUPDAwPFRgTExUT\n" - "ExgRDAwMDAwMEQwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwBDQsLDQ4NEA4OEBQODg4UFA" - "4\n" - "ODg4UEQwMDAwMEREMDAwMDAwRDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDP/" - "AABEIAGQAZA\n" - "MBIgACEQEDEQH/3QAEAAf/xAE/" - "AAABBQEBAQEBAQAAAAAAAAADAAECBAUGBwgJCgsBAAEFAQEBA\n" - "QEBAAAAAAAAAAEAAgMEBQYHCAkKCxAAAQQBAwIEAgUHBggFAwwzAQACEQMEIRIxBUFRYRMicYE" - "y\n" - "BhSRobFCIyQVUsFiMzRygtFDByWSU/" - "Dh8WNzNRaisoMmRJNUZEXCo3Q2F9JV4mXys4TD03Xj80Y\n" - "nlKSFtJXE1OT0pbXF1eX1VmZ2hpamtsbW5vY3R1dnd4eXp7fH1+" - "f3EQACAgECBAQDBAUGBwcGBT\n" - "UBAAIRAyExEgRBUWFxIhMFMoGRFKGxQiPBUtHwMyRi4XKCkkNTFWNzNPElBhaisoMHJjXC0kST" - "V\n" - "KMXZEVVNnRl4vKzhMPTdePzRpSkhbSVxNTk9KW1xdXl9VZmdoaWprbG1ub2JzdHV2d3h5ent8f" - "/\n" - "2gAMAwEAAhEDEQA/" - "APLtso1NRc0vP0Rok8NYyPEfijOG2ljBoAJPxKFppZtbS4Rz38kV+OPRDge\n" - "T89EPHBfvLjtb3P8A30K/j47cgsrYNxGpPYJpK8RtyXUlvPfsobV0GV0uippLiX3EaMb2/" - "rKgMB\n" - "1ghoiNST4BESCjjLmxqmKtvxiXQ0cd0q8E2bjIDWjk9z5I8QW8JaoHcdkUePZJtZD9p8YU/" - "Rsc/\n" - "wBNjS5zjDWjUk+SSKYaJLYq+qWeYGQ5lBPLJ3OA8wz2/wDSWni/" - "U3H2AXW2l2oloa0f9LcjSLeU\n" - "hJdb/wAyqd387Zt+DZ5SSpVh/9DzO6dw7gGPuVn6ft/" - "kyPkqwlxjw1Rnh24QNWjUeR5TSuDc6bg\n" - "fatpsJZQ3sNC4rWfkVYpbi4LAb3aANEkFLp7GHGYxuhAj4K/" - "hYVNDjYGzZ++eSSoSbLZjGgwxul\n" - "XNrPqO35FukdmzyXOQeqtqwqRg4o/SOAN9ng3/" - "AMzW02txZ9I+ZHKr241UOcWDaz3uLtSSPEpWu\n" - "rR5XPeylmyNr4BIPPCyH2Oc6T8kXNvddkPe/" - "VzjJPxQAJMKeIoNScrPk2MbfddXUNXvcGtPx0Xb\n" - "dJ6NXjOD2Dfdw6w9v5LFW+q/1WLA3Ly9LSJaz91p/" - "wDRjl2lOLWwAMbEJErWjRgESYieVdZhsMF\n" - "wMt08ldrx/vVivHaOdSgCoud9krmElpba93ASTlr/AP/R83ohr97voiJV/" - "Fq9QvsI+mdPgs1thc\n" - "BWO5C38CoOY1g78qOejLiGvknxLAyGtExp5K9uzGt9RrNw7DhRfQKKx6bZIGgPj4rPycLqWVtI" - "s\n" - "JGu5skDyTBRZtQNrb1fU8xrtpBaO4MLQxcx1sNuEjt5rMGJR9noY5hF7Wxa8aAnxVvDb6bgHH2" - "z\n" - "omk0e64ajUUXnev9Idi5rrWAux7SXNd4E/muS+rHSjm9VbPtZjj1CSJBI+g3+0uh69b+iDG/" - "QcD\n" - "u0nQCeFP6l0MZhWX/" - "AJ1xM+QafY1TQlY1a+WABsdXp8Sp27aBH+vZaVbC0ADlVcASwtdOolp8Ct\n" - "BjmtGv0uI8EmJmxkIjWkmPEKLSPiidxIgJKRbDPCSN5pJyH//S87uw/" - "suZ6c72iC13kVs9PdDmk\n" - "KllVziV3cuafc7yP0QjYFh26cqM6hsxAjIj6u6xzbDHh3R663AaceH+" - "5BwdruVp2PqZUA0a9yo6\n" - "DPQajscnXb8YQdzC8H909joiZttoxoBIa4gGOQ3uqh+z1RuD2Ds4j2n+39FNKaFMevS/" - "p5LPpSA\n" - "I8/b/ABW70THZXj11VjaIAIHgFl5VdD8AneDMaec6Lb6QAKmu7x+VSw2a3MdF/" - "rF9YKeh4B9OHZ\n" - "lpAprPH8p7v5DFwrPrV9YDa645toLjMaFo8mtcEvrhkWZHXbg7T0Q2to8o3f8AfkarEitlVTKn" - "Y\n" - "ra992QQ2wOfG57H2bg7H2fzbFKA130P6n9dHWemCx5H2mk7LgPH818f8IuhAka6ea8y/" - "wAWrcod\n" - "VyrceRhBsPae5J/Qj+sxq9KDpMuMuKBCEntnny/1CSaWxM6pIKf/0/" - "MvtF3pCrefTBnbOi1elP3\n" - "Et8Vi+Sv9LuNd4HwTSNGSEqkLerwtwn+SYKtOeS4A8Krh2j1D/" - "K1Vu4B5gaDmVDJtAr7zYYGoRB\n" - "QDWQ4c9h/" - "csuyjI3fobnDyJR8fF6ltcTaXRwCkAuAsfMGr1TGFNdTmEwLWS2dIK6npLgK2T4Lle\n" - "pUZxoc9+6K4eR5NO5bPT73NoqIfoILT/JcFJDZr8zGiNXnvrfiur6/" - "Y8tht7WvaexgbXf8AUrFt\n" - "8IExyvRusYDOsYTAIbfWdzHRJ8HN/" - "tLj7OgdRZawmreHP2gt9wEfvtH0f7SkDXe7+o+AOn9DquL\n" - "f0mV+leQPH6H+axafUvrB07ptJtyshtTZDTEudJ7bWS5V6MmyltVLn7ht2hwECQP+isb60/" - "Vqvr\n" - "tbLsa1lObVIJd9Gxv5rXx9F7fzHpIbf/jgfVnd/TLYj6XoOhJcP/zE+sOzd6dW7dt2eo3dH7/" - "9R\n" - "JJWj//" - "U8uiGFx76BFZLQ2xvLeVGAWQrFDJbtKBSHd6blNura4H3BbDXB7InVcZXZdh2bmTt7hbO\n" - "J1dj2gzCjlFnhPod3WLHB+" - "n3o9ZsAkFVMfMrs7orLmgkHUdkyqZQQWWQbLGlrjMjUeSrfV3Ltsw\n" - "30EBzcd5YCedvLETJya66nWOIAaCVnfU/" - "KuZn21CDVa02PngQdHf9LapMfVhzkaAPUUW3M91YaR\n" - "3YDJ+WiBmZGazPo9Kttdt2j63E6s/fft/d/NWjXkMra7KtO2qkE6cErHpvsyMmzPu0dY4Bg/" - "dYP\n" - "otTpyoaMUI2XUya8tzG/pi0NMtICo/" - "bsut21gdcWclkj5OncxaDrw6kM+9QxQzaWRAGii4pDqzC\n" - "MT02aX7WzPU9b7PrG3bvO6P6yStfZm+pHnPySS4590+3jf/" - "V8yb+CsUbp8uyz0kDskbu2dmz9J8\n" - "lSt9Ld+gn1O8cKikmxXydbH+3bhsmfwWj/lONYlcwkhL6L4bfpOxn/tD0/wBN/N944Wh9VJm/" - "b/\n" - "O+347df+/rl0k+O38GLJ83X/CfTOt7v2dV6P8AMbx6njHb/" - "pKuN3pN2+IXnaSjybr8e31fUqd+0\n" - "Sj487DHMryZJMXjq+sfpPX84SXk6SSX/" - "9kAOEJJTQQhAAAAAABVAAAAAQEAAAAPAEEAZABvAGIA\n" - "ZQAgAFAAaABvAHQAbwBzAGgAbwBwAAAAEwBBAGQAbwBiAGUAIABQAGgAbwB0AG8AcwBoAG8AcA" - "A\n" - "gADcALgAwAAAAAQA4QklNBAYAAAAAAAcABQAAAAEBAP/" - "hFWdodHRwOi8vbnMuYWRvYmUuY29tL3\n" - "hhcC8xLjAvADw/eHBhY2tldCBiZWdpbj0n77u/" - "JyBpZD0nVzVNME1wQ2VoaUh6cmVTek5UY3prY\n" - "zlkJz8+Cjw/YWRvYmUteGFwLWZpbHRlcnMgZXNjPSJDUiI/" - "Pgo8eDp4YXBtZXRhIHhtbG5zOng9\n" - "J2Fkb2JlOm5zOm1ldGEvJyB4OnhhcHRrPSdYTVAgdG9vbGtpdCAyLjguMi0zMywgZnJhbWV3b3" - "J\n" - "rIDEuNSc+" - "CjxyZGY6UkRGIHhtbG5zOnJkZj0naHR0cDovL3d3dy53My5vcmcvMTk5OS8wMi8yMi\n" - "1yZGYtc3ludGF4LW5zIycgeG1sbnM6aVg9J2h0dHA6Ly9ucy5hZG9iZS5jb20vaVgvMS4wLyc+" - "C\n" - "gogPHJkZjpEZXNjcmlwdGlvbiBhYm91dD0ndXVpZDoyMmQwMmIwYS1iMjQ5LTExZGItOGFmOC0" - "5\n" - "MWQ1NDAzZjkyZjknCiAgeG1sbnM6cGRmPSdodHRwOi8vbnMuYWRvYmUuY29tL3BkZi8xLjMvJz" - "4\n" - "KICA8IS0tIHBkZjpTdWJqZWN0IGlzIGFsaWFzZWQgLS0+" - "CiA8L3JkZjpEZXNjcmlwdGlvbj4KCi\n" - "A8cmRmOkRlc2NyaXB0aW9uIGFib3V0PSd1dWlkOjIyZDAyYjBhLWIyNDktMTFkYi04YWY4LTkx" - "Z\n" - "DU0MDNmOTJmOScKICB4bWxuczpwaG90b3Nob3A9J2h0dHA6Ly9ucy5hZG9iZS5jb20vcGhvdG9" - "z\n" - "aG9wLzEuMC8nPgogIDwhLS0gcGhvdG9zaG9wOkNhcHRpb24gaXMgYWxpYXNlZCAtLT4KIDwvcm" - "R\n" - "mOkRlc2NyaXB0aW9uPgoKIDxyZGY6RGVzY3JpcHRpb24gYWJvdXQ9J3V1aWQ6MjJkMDJiMGEtY" - "j\n" - "I0OS0xMWRiLThhZjgtOTFkNTQwM2Y5MmY5JwogIHhtbG5zOnhhcD0naHR0cDovL25zLmFkb2Jl" - "L\n" - "mNvbS94YXAvMS4wLyc+" - "CiAgPCEtLSB4YXA6RGVzY3JpcHRpb24gaXMgYWxpYXNlZCAtLT4KIDwv\n" - "cmRmOkRlc2NyaXB0aW9uPgoKIDxyZGY6RGVzY3JpcHRpb24gYWJvdXQ9J3V1aWQ6MjJkMDJiMG" - "E\n" - "tYjI0OS0xMWRiLThhZjgtOTFkNTQwM2Y5MmY5JwogIHhtbG5zOnhhcE1NPSdodHRwOi8vbnMuY" - "W\n" - "RvYmUuY29tL3hhcC8xLjAvbW0vJz4KICA8eGFwTU06RG9jdW1lbnRJRD5hZG9iZTpkb2NpZDpw" - "a\n" - "G90b3Nob3A6MjJkMDJiMDYtYjI0OS0xMWRiLThhZjgtOTFkNTQwM2Y5MmY5PC94YXBNTTpEb2N" - "1\n" - "bWVudElEPgogPC9yZGY6RGVzY3JpcHRpb24+" - "CgogPHJkZjpEZXNjcmlwdGlvbiBhYm91dD0ndXV\n" - "pZDoyMmQwMmIwYS1iMjQ5LTExZGItOGFmOC05MWQ1NDAzZjkyZjknCiAgeG1sbnM6ZGM9J2h0d" - "H\n" - "A6Ly9wdXJsLm9yZy9kYy9lbGVtZW50cy8xLjEvJz4KICA8ZGM6ZGVzY3JpcHRpb24+" - "CiAgIDxyZ\n" - "GY6QWx0PgogICAgPHJkZjpsaSB4bWw6bGFuZz0neC1kZWZhdWx0Jz4gICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgPC9yZGY6bGk+" - "CiAgIDwvcmRmOkFsdD4KICA8L2RjOmRlc2NyaXB0aW9\n" - "uPgogPC9yZGY6RGVzY3JpcHRpb24+Cgo8L3JkZjpSREY+" - "CjwveDp4YXBtZXRhPgogICAgICAgIC\n" - "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" - "A\n" - "ogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAK" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" - "I\n" - "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICA" - "g\n" - "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" - "A\n" - "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgI" - "C\n" - "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKPD94cGFja2V0IGVuZD0ndyc/" - "P\n" - "v/uAA5BZG9iZQBkQAAAAAH/" - "2wCEAAQDAwMDAwQDAwQGBAMEBgcFBAQFBwgGBgcGBggKCAkJCQkI\n" - "CgoMDAwMDAoMDAwMDAwMDAwMDAwMDAwMDAwMDAwBBAUFCAcIDwoKDxQODg4UFA4ODg4UEQwMDA" - "w\n" - "MEREMDAwMDAwRDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDP/" - "AABEIAGQAZAMBEQACEQEDEQ\n" - "H/3QAEAA3/" - "xAGiAAAABwEBAQEBAAAAAAAAAAAEBQMCBgEABwgJCgsBAAICAwEBAQEBAAAAAAAAA\n" - "AEAAgMEBQYHCAkKCxAAAgEDAwIEAgYHAwQCBgJzAQIDEQQABSESMUFRBhNhInGBFDKRoQcVsUI" - "j\n" - "wVLR4TMWYvAkcoLxJUM0U5KismNzwjVEJ5OjszYXVGR0w9LiCCaDCQoYGYSURUaktFbTVSga8u" - "P\n" - "zxNTk9GV1hZWltcXV5fVmdoaWprbG1ub2N0dXZ3eHl6e3x9fn9zhIWGh4iJiouMjY6PgpOUlZa" - "X\n" - "mJmam5ydnp+" - "So6SlpqeoqaqrrK2ur6EQACAgECAwUFBAUGBAgDA20BAAIRAwQhEjFBBVETYSIGc\n" - "YGRMqGx8BTB0eEjQhVSYnLxMyQ0Q4IWklMlomOywgdz0jXiRIMXVJMICQoYGSY2RRonZHRVN/" - "Kj\n" - "s8MoKdPj84SUpLTE1OT0ZXWFlaW1xdXl9UZWZnaGlqa2xtbm9kdXZ3eHl6e3x9fn9zhIWGh4iJ" - "i\n" - "ouMjY6Pg5SVlpeYmZqbnJ2en5KjpKWmp6ipqqusra6vr/2gAMAwEAAhEDEQA/" - "APBnplwPAdR+GB\n" - "KY6dYtNG1w39yh4+xb+zIksgEfFaRSSoIx8f7RPRRkSWQimM+lRmwWVXFWYigHxUUVoMiJM+" - "Fj0\n" - "tg0RBegLE0Wu+3c+GTBazFCGI7HtSp9slbFYYzyoBsegw2hY1Afl3wqqRqahk+" - "0tDgKpgu4DAUU\n" - "+HY+GRS2ePiMKtUB3G+KGuONq//" - "Q8OzpFbW5WnxMop4k9crG5ZnZNJkEOn21utVRYw7HxZtz+OR\n" - "vdsrZ2lRtci4aVxFEQA0neg/" - "ZXxJpTITNNuOFss0vSotYNvZ2qGRkPKSTqiU8Sdqk5SZU5Ix8XJ\n" - "NNZ8k6bp8TtM73OputUtYq0Unux/" - "hkRkJOzZLCAN2KR+VpbtSkCBaDnIzdlWu59u+XeJTjeASk8\n" - "+juZOESEAVqx8BvU/" - "PJibScTrTy09560hkWOGFd2YgFnPQKD19zhOSkxw2l8Vm6XAiYb8gg+k5O\n" - "9mnhoon9H3cs5s7WF5pp29OGGMFndyaAKBuTiEEPQLD8h/" - "NDmNdYlttNkYjlbFjcXCr3LLH8II8\n" - "C2WUGviZvon/OPWkm3RNSv72SYllMkKxQRV67CQMSKYQAxMkR/" - "wBC56d61P0heel4cYuVOXWvTp\n" - "h4Qjjf/9Hw5qBYyISaqjBV+QpvkAzKcki4HomnIxck/" - "wBhtlR2bhunvlDywddMUl4zW+kQ9FQ8X\n" - "nfuSewrtmPkycPvc/" - "DhMhvyegXOrWWhmLQPKlsj6xIAiLCoZkY96nv7npmJvI2XOjQFMl0fyRqM\n" - "NoxvZvrGt33wlATwiMnVnY1LEdSfuyXF3KIDmUu88w2XlnTl8raAlb2ZFfVL0jdYRtQnxc7BfD" - "C\n" - "OaJR7nm3me5tdOtjbMvp3ZRXkV6chVQRX79hmVjgZG+" - "jgZ5jHGhzecXF5LPL6jEjstSSaDM51Ka\n" - "6MZ9S1C0sEBe8uZo4YCBXdjxGw60wEWyEqfUHkT8vLXRJFuLdTcaqfhlvWUErtukZ3ABPUjIXT" - "E\n" - "m3rGmeV2Tk5UKz/AG/E/wAcgZKya20C3b02kjYtH8AqCygbkUH0nLYlgUb+gbWtPbpXt/" - "n2ybB/\n" - "/9Lw4oaVxGd+PxH3qBkGaY3KyiSP01IkiUclH8sg+LKydm6INvZvKsFu+" - "kWtvD8LRoFNRup6moO\n" - "aqd277HsGW+XPLmn6XM17FF6l7vW4fd2Zuu+" - "RFls2tmUNrLJb7TSBertGQGqetDkxE0na0pvtHs\n" - "QkszWyiGAG5laYlnkeMVHJj8sA5rPk+SvMepTalqlxd3B5zTOXdj/" - "MxqafLpm5xioh5nPK5kpRG\n" - "pkcKAST0A6k5NpfUP5K/ki1ssHmHzF+71KRQ8Nud/Qibb/kYw6/" - "yjbrXISlSH07YaHbWyxx2kXE\n" - "KACB2zHJtLI7XSelBRvH2xCpvaaTDHXkOTVBPcUG2479RlsdmJVPRtvV+ylenQ0y62FP/" - "9PxRpo\n" - "WG5FxKKxKFDA+GVS5NsebLdFsRePc3siVW4f4QR0QVAGYeSXR2unhtZ6s60K6jt+MMSFwtF2+" - "xX\n" - "wr7eGUGLlRPQMsE2vxQm7itxKg3VCfT2+" - "nb8cDYaCDtfOXmCCcROrQrUhkkCHYn6emRMqZxjbLd\n" - "F1+W/" - "4xajHzjNCtQKMffETWUdngX5p+QZ9A8xS6hbo0ui37NNDPT7DOalHpsCD08Rmyw5ARTpdV\n" - "gIPEF35MeRn80ed4S5EdrpKm9kZ15K0iH92hB7Me/tmS60vt/" - "QrCYyekiBdgSTXcjqV9q9MokFD\n" - "N7S3aFVVR8RoK9zldqndvAY6nffr/AGYQqLhjdpCoIAZW22HavU/LJBUP9WblX0xTw7fOmWsX/" - "9\n" - "Tw7FdvMqWkQ3Z1qfED+mQIbI77PX/" - "LFis9vBajZm2Y+x65rMh3t30Bsze400aVaIbSLk6r8CMRT\n" - "l/" - "NmOcllnGDD9Y8uecNfEEiXrMgDGWAyGOOu5WlB+" - "vMrHODTlxZCdjsyFdB006VpVtLasurQxBL\n" - "64WiLI4/" - "aFT1ANOXemV5piR2b9NiljB4yyHy9CLOVI5GJhB+CvXY9R8xmINzs5HNZ+Z96BZpbxA\n" - "fVJo39UFefwopYgL4nMiMd2qZoIn/AJx00u3t/" - "Lt7qpp9Yv5GLf5MUTERqfbvmzBeezjd9H+VlL\n" - "wSQzBqsvOGQD7L12rXsemPNxmXQSxxIPU2nFV4HYqR1xEUWj4ZAxBryr2G+" - "J2VGDZlLrxUH6KZA\n" - "Fkqb15VFelfwy+2FP8A/" - "9Xxlf6AdA182Yk9eFeLxSjoVfcfSMo4uIOfkweFOnpvlWYrLEwNFAA+\n" - "nMOYdrhFvQLeSO7coBXiK8iKiv07Zj8Ac4QtNrW1njUcKcT+yAR/" - "xGmR4WcsStLpTuPU9IFaEsV\n" - "BP3k4m2AgBzSwyQNcIwNTE1aI3wnam9O2Ug7s5Ckk/" - "NDndeVXa2H78MqqV6jmeBp9+ZWKXqDjZ4\n" - "+gvVvy30qCy0qzsLRBCnBI2VdgUTqPvOZ7y+Q7pz+bn5q6d+VflZxZlJ/" - "NN4ypptk5qtB9qRwDX\n" - "gn/AAx2y2ItpfKFv+eH5qNeTajJ5ovVaVywSqvEtTUKqupAA6D2y0BNPtv/AJx//" - "M5PzL8mJeXT\n" - "L+ndPf6rqarSpkAqsnEAAeoN6DpkJRYci9lROSgSUUH9o9K5Tw0ztfSHnXkOtK9q+PHwydq//" - "9b\n" - "yxrVoZNBtNSA5zRMPXmH8j0CLXuBmHE+" - "qneamHpEuqYeV7pzFVTRgQK5XMNmnlb1vyyY5QA1OwJ\n" - "+eUF2seTOLu5s7azVIVAkpVn/" - "hhnIALG73Yz5jvb1dICqzpDNIqyFD8SxH7R28cxibZCiWOsdJs\n" - "PTM6XNstPhnkjIhcHuJBVfvOCiUSn0TfWrTTLjyw8guA/PifTO3xcxxA8a5ZAbimvJP0m3p/" - "kFF\n" - "WxhmpWQJ9NW3zZPHz5vlb/nIDVbrWfzO1RJhxGnpDaRL/" - "khA1T7ktmSOTAJhZaAUtLawsbayl8v\n" - "xWi3Gpay0cF3HPcFRJJHJMXVrcJ8UaAFG5LWjF8tAYW9H/wCcOo9bTzxrt/" - "owkTyksZW5gkIKvI\n" - "7k26nvyReRJHyyBWT7dWQyOWlbnK2526e1O1MqIUFE84uPLkOdK9RXI0E2/wD/1/DA1bURZLY/" - "W\n" - "ZDZqwb0eXw7dMgIi7bjllVXsz7yNcfWC0Vd3Ip92Y2UOz0cnsPlwyx8xQ/" - "u24sMxCadoJp9LOXk\n" - "VX/" - "uwRUE0BI8cokbLMyoKouHu2MaKGXw7fLDwgoGSkbHpaNZyLLHRSKcFFQQRvUdMlwUFOQyLzr\n" - "ztpCaba6fPau4ijv4OURY8AjVFKV7ZZiO+7Vnh6XvXkSWNbW2WTb92KDxIFMzwHlZc3zX+" - "fuizW\n" - "f5p3ty8XGDU4YLmCQiisyII3+4rvl8UB5ffEghRGvOm7AbnvWvjk1fen/" - "ONPldPKP5aWOpPCfr2\n" - "uE31y6q2wbaMEn+VAMDSdyzrzj+avlHyTp0l/r2rxWFuHWJuIeacu4qFCRgsajfBwsty89/" - "6Gr/\n" - "ACa9an+JL/hSnrfoubhXwpXpjwhaL//" - "Q8E1AqtcAZMs8l6i1nqMa1oSVP0VynKLDmaWdSfQXl69\n" - "jF1Jv8MhDb5rpB3AO7INRRLhhGp4R05FgaGvTMU8200xS70zVDMRp2pTIOvBmB3PgQP15kxIcn" - "D\n" - "LH/" - "EEz0rRvOJhldr9pQtCqyd6VrShGTqw5d4ARv9jHfOGl+ZJNMluLkyenaFbiRdqFYW5nrWuwO\n" - "MKB5MdSMRxnhlu9N8p6lLFpti63FUjCtFJTrDKvse2bEDZ4XJ9RZB+YPli2/" - "Mjy5bxoUi1a0YS2\n" - "85UOwIXiy9jRu+TBppfOF1+V3m22vrdpNPM8cs/oo0VJlUqQPjValR3+IZNNvtLS9Yu9Mi0/" - "TJr\n" - "kyp6QhWVVCIWRATsKBemwwFrDzT87fybs/" - "wA1bW21PRb+DTvNlgGSRp6iC8i3KJJx+y6n7D0Pwm\n" - "hxBZXT55/6Fi/Nf0PW+qWXq+t6X1X67F6vD/ftK04V/wBl344U8b//" - "0fBapxheVh9ocV+nviqY2\n" - "/qQJDew/" - "bioWHiuQ8m0bbvaPKGtQ6jaxSo9JloCK75gZI0Xb4sgkHo8MouoAvP94BsRmGY7uWJU\n" - "gzbypOQpNOvIdK4Nw2WCE2tXulTkjEEbdafgclxMhFBas93dwyQzsWDghlJFONKHJCZtjOFBJf" - "y\n" - "j1y9vPL9zpbIs0WkXL2sUjA8hDXlGCRXtt07ZuYvL5KJeo6bfajbkzWkcToR8dqshZ6in2fhNK" - "/\n" - "PDTUlXmHVvMdr5o0v9H2kdrqGpfu7m0nkY87Uf7tkKAU4/" - "s03ynLkEBbfihx7dGT6va67LbRMNR\n" - "aKOBuUTKgIBXoK1BOYR1M3aQ0mOt9yxUeZNdtJhFapLqMluSXkg5oxJrUMW5KevQ9MmNXXNqOi" - "H\n" - "Rr/Hmv8A1r9I/oj95w+r+j9Yf1+NP5+nXtTD+dF8tkfkOlv/0vC3ph7f0/" - "alcVTbS4A8QibuKb5\n" - "RI05EBYRFpdX3ly79a2qYCavH/" - "EY7TCYyMD5PSdD8+wXUSn1ArDqOhBzFlipz4ZwWbaV5htbsgF\n" - "qg9crMXKErGyYwajFGzxyHlGSePbbwyqg5UZlCaxrFpaWU95LIqrEjMAT4Dp9OShGy1ZslBhv/" - "A\n" - "Dj9rd/a+aL+xUK+m38L3d0HrxRo2HFtu5D8c27y8t30raarbWkU+u6g4gsNORn+EcUaSh2Pc0/" - "4\n" - "lgtAjezzbT9SutY1i782al8Nxdyotqh6xWybIg+jc5q8s+I27bFDgFPQp9RE+nrag70+" - "L6crrZu\n" - "4jajokdv6LW/Dii1Wo61PXKQN3KPK0L+h4/rnD/K5V78a5LhXxd3/0/" - "DMXXtwxVNtL9Xkaf3f7N\n" - "etfbKMjdjtkZ9D6ufrlK0+HpX8coF9HJ26sXvfqXrf7i/U+uften/d/" - "wCyrmQL6uOav0pvpP8Ai\n" - "b1F+rV59+vH6a5XLhcjH4nRmY/xpxHP0/UptWvT6Mx/RbmjxWK+aP8AFf1M/" - "pCv1Kvxen9inavf\n" - "MrFwXtzcLUeLXq5Mv/I3nz1b0v8AjofuKVry9KrUpTanOlf9jmQ68va/zH9b/COn/o7/" - "AI431mP\n" - "65SvLh+zWvbl9rMfNfC34K4kmj9T6lD6FKclp/DNYXZx5srsPrHor6nXvkgxTPS/" - "U+rv6dPU5mt\n" - "fngFN5ulv+l/pL/Lp/scerHo//2Q==\n"; - -static std::string gCommandLine; - -TEST(Base64, LargeSample) { - RTC_LOG(LS_VERBOSE) << "Testing specific base64 file"; - - char unescaped[64 * 1024]; - - // unescape that massive blob above - size_t size = Base64Unescape(SpecificTest, sizeof(SpecificTest), unescaped, - sizeof(unescaped)); - - EXPECT_EQ(size, sizeof(testbase64)); - EXPECT_EQ(0, memcmp(testbase64, unescaped, sizeof(testbase64))); +TEST(Base64Test, EncodeDecode) { + std::string data{0x01, 0x02, 0x03, 0x04, 0x05}; + EXPECT_THAT(Base64Decode(Base64Encode(data)), Optional(Eq(data))); } -bool DecodeTest(const char* encoded, - size_t expect_unparsed, - const char* decoded, - Base64::DecodeFlags flags) { - std::string result; - size_t consumed = 0, encoded_len = strlen(encoded); - bool success = - Base64::DecodeFromArray(encoded, encoded_len, flags, &result, &consumed); - size_t unparsed = encoded_len - consumed; - EXPECT_EQ(expect_unparsed, unparsed) - << "\"" << encoded << "\" -> \"" << decoded << "\""; - EXPECT_STREQ(decoded, result.c_str()); - return success; +TEST(Base64Test, DecodeCertificate) { + // Certificate data often contains newlines, which are not valid base64 + // characters but parsable using the forgiving option. + constexpr absl::string_view kExampleCertificateData = + "MIIB6TCCAVICAQYwDQYJKoZIhvcNAQEEBQAwWzELMAkGA1UEBhMCQVUxEzARBgNV\n" + "BAgTClF1ZWVuc2xhbmQxGjAYBgNVBAoTEUNyeXB0U29mdCBQdHkgTHRkMRswGQYD\n" + "VQQDExJUZXN0IENBICgxMDI0IGJpdCkwHhcNMDAxMDE2MjIzMTAzWhcNMDMwMTE0\n" + "MjIzMTAzWjBjMQswCQYDVQQGEwJBVTETMBEGA1UECBMKUXVlZW5zbGFuZDEaMBgG\n" + "A1UEChMRQ3J5cHRTb2Z0IFB0eSBMdGQxIzAhBgNVBAMTGlNlcnZlciB0ZXN0IGNl\n" + "cnQgKDUxMiBiaXQpMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAJ+zw4Qnlf8SMVIP\n" + "Fe9GEcStgOY2Ww/dgNdhjeD8ckUJNP5VZkVDTGiXav6ooKXfX3j/7tdkuD8Ey2//\n" + "Kv7+ue0CAwEAATANBgkqhkiG9w0BAQQFAAOBgQCT0grFQeZaqYb5EYfk20XixZV4\n" + "GmyAbXMftG1Eo7qGiMhYzRwGNWxEYojf5PZkYZXvSqZ/ZXHXa4g59jK/rJNnaVGM\n" + "k+xIX8mxQvlV0n5O9PIha5BX5teZnkHKgL8aKKLKW1BK7YTngsfSzzaeame5iKfz\n" + "itAE+OjGF+PFKbwX8Q==\n"; + + EXPECT_THAT( + Base64Decode(kExampleCertificateData, Base64DecodeOptions::kForgiving), + Optional(SizeIs(493))); + EXPECT_THAT( + Base64Decode(kExampleCertificateData, Base64DecodeOptions::kStrict), + Eq(std::nullopt)); } -#define Flags(x, y, z) \ - Base64::DO_PARSE_##x | Base64::DO_PAD_##y | Base64::DO_TERM_##z - -TEST(Base64, DecodeParseOptions) { - // Trailing whitespace - EXPECT_TRUE(DecodeTest("YWJjZA== ", 1, "abcd", Flags(STRICT, YES, CHAR))); - EXPECT_TRUE(DecodeTest("YWJjZA== ", 0, "abcd", Flags(WHITE, YES, CHAR))); - EXPECT_TRUE(DecodeTest("YWJjZA== ", 0, "abcd", Flags(ANY, YES, CHAR))); - - // Embedded whitespace - EXPECT_FALSE(DecodeTest("YWJjZA= =", 3, "abcd", Flags(STRICT, YES, CHAR))); - EXPECT_TRUE(DecodeTest("YWJjZA= =", 0, "abcd", Flags(WHITE, YES, CHAR))); - EXPECT_TRUE(DecodeTest("YWJjZA= =", 0, "abcd", Flags(ANY, YES, CHAR))); +struct Base64DecodeTestCase { + std::string name; + std::string data; + std::optional result; +}; - // Embedded non-base64 characters - EXPECT_FALSE(DecodeTest("YWJjZA=*=", 3, "abcd", Flags(STRICT, YES, CHAR))); - EXPECT_FALSE(DecodeTest("YWJjZA=*=", 3, "abcd", Flags(WHITE, YES, CHAR))); - EXPECT_TRUE(DecodeTest("YWJjZA=*=", 0, "abcd", Flags(ANY, YES, CHAR))); +const Base64DecodeTestCase kBase64DecodeTestCases[] = { + {"InvalidCharacters", "invalid;;;", std::nullopt}, + {"InvalidLength", "abcde", std::nullopt}, + {"ValidInput", "abcd", "i\xB7\x1D"}, + {"ValidInputPadding", "abc=", "i\xB7"}, + {"EmptyInput", "", ""}, +}; - // Unexpected padding characters - EXPECT_FALSE(DecodeTest("YW=JjZA==", 7, "a", Flags(STRICT, YES, CHAR))); - EXPECT_FALSE(DecodeTest("YW=JjZA==", 7, "a", Flags(WHITE, YES, CHAR))); - EXPECT_TRUE(DecodeTest("YW=JjZA==", 0, "abcd", Flags(ANY, YES, CHAR))); +using Base64DecodeTest = TestWithParam; +INSTANTIATE_TEST_SUITE_P( + Base64DecodeTest, + Base64DecodeTest, + testing::ValuesIn(kBase64DecodeTestCases), + [](const auto& info) { return info.param.name; }); + +TEST_P(Base64DecodeTest, TestDecodeStrict) { + absl::string_view data = GetParam().data; + EXPECT_THAT(Base64Decode(data, Base64DecodeOptions::kStrict), + Eq(GetParam().result)); } -TEST(Base64, DecodePadOptions) { - // Padding - EXPECT_TRUE(DecodeTest("YWJjZA==", 0, "abcd", Flags(STRICT, YES, CHAR))); - EXPECT_TRUE(DecodeTest("YWJjZA==", 0, "abcd", Flags(STRICT, ANY, CHAR))); - EXPECT_TRUE(DecodeTest("YWJjZA==", 2, "abcd", Flags(STRICT, NO, CHAR))); - - // Incomplete padding - EXPECT_FALSE(DecodeTest("YWJjZA=", 1, "abcd", Flags(STRICT, YES, CHAR))); - EXPECT_TRUE(DecodeTest("YWJjZA=", 1, "abcd", Flags(STRICT, ANY, CHAR))); - EXPECT_TRUE(DecodeTest("YWJjZA=", 1, "abcd", Flags(STRICT, NO, CHAR))); - - // No padding - EXPECT_FALSE(DecodeTest("YWJjZA", 0, "abcd", Flags(STRICT, YES, CHAR))); - EXPECT_TRUE(DecodeTest("YWJjZA", 0, "abcd", Flags(STRICT, ANY, CHAR))); - EXPECT_TRUE(DecodeTest("YWJjZA", 0, "abcd", Flags(STRICT, NO, CHAR))); +TEST_P(Base64DecodeTest, TestDecodeForgiving) { + // Test default value is strict. + EXPECT_THAT(Base64Decode(GetParam().data), Eq(GetParam().result)); } -TEST(Base64, DecodeTerminateOptions) { - // Complete quantum - EXPECT_TRUE(DecodeTest("YWJj", 0, "abc", Flags(STRICT, NO, BUFFER))); - EXPECT_TRUE(DecodeTest("YWJj", 0, "abc", Flags(STRICT, NO, CHAR))); - EXPECT_TRUE(DecodeTest("YWJj", 0, "abc", Flags(STRICT, NO, ANY))); +const Base64DecodeTestCase kBase64DecodeForgivingTestCases[] = { + { + "ForgivingPadding", + "abc", + "i\xB7", + }, + { + "WhitespaceForgivenTab", + "ab\tcd", + "i\xB7\x1D", + }, + { + "WhitespaceForgivenSpace", + "a bc d", + "i\xB7\x1D", + }, + { + "WhitespaceForgivenNewline", + "a\nbc\nd", + "i\xB7\x1D", + }, + { + "WhitespaceForgivenCarriageReturn", + "a\r\nbc\rd", + "i\xB7\x1D", + }, + {"WhitespaceForgivenLineFeed", "a\fbcd", "i\xB7\x1D"}, +}; - // Complete quantum with trailing data - EXPECT_FALSE(DecodeTest("YWJj*", 1, "abc", Flags(STRICT, NO, BUFFER))); - EXPECT_TRUE(DecodeTest("YWJj*", 1, "abc", Flags(STRICT, NO, CHAR))); - EXPECT_TRUE(DecodeTest("YWJj*", 1, "abc", Flags(STRICT, NO, ANY))); +using Base64DecodeForgivingTest = TestWithParam; +INSTANTIATE_TEST_SUITE_P( + Base64DecodeTest, + Base64DecodeForgivingTest, + testing::ValuesIn(kBase64DecodeForgivingTestCases), + [](const auto& info) { return info.param.name; }); - // Incomplete quantum - EXPECT_FALSE(DecodeTest("YWJ", 0, "ab", Flags(STRICT, NO, BUFFER))); - EXPECT_FALSE(DecodeTest("YWJ", 0, "ab", Flags(STRICT, NO, CHAR))); - EXPECT_TRUE(DecodeTest("YWJ", 0, "ab", Flags(STRICT, NO, ANY))); +TEST_P(Base64DecodeForgivingTest, TestDecodeForgiving) { + EXPECT_THAT(Base64Decode(GetParam().data, Base64DecodeOptions::kForgiving), + Eq(GetParam().result)); } -TEST(Base64, GetNextBase64Char) { - // The table looks like this: - // "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" - char next_char; - EXPECT_TRUE(Base64::GetNextBase64Char('A', &next_char)); - EXPECT_EQ('B', next_char); - EXPECT_TRUE(Base64::GetNextBase64Char('Z', &next_char)); - EXPECT_EQ('a', next_char); - EXPECT_TRUE(Base64::GetNextBase64Char('/', &next_char)); - EXPECT_EQ('A', next_char); - EXPECT_FALSE(Base64::GetNextBase64Char('&', &next_char)); - EXPECT_FALSE(Base64::GetNextBase64Char('Z', nullptr)); +TEST_P(Base64DecodeForgivingTest, TestDecodeStrictFails) { + // Test default value is strict. + EXPECT_THAT(Base64Decode(GetParam().data), Eq(std::nullopt)); } } // namespace -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/bit_buffer.cc b/rtc_base/bit_buffer.cc index fd57e136b4..e94a3c06eb 100644 --- a/rtc_base/bit_buffer.cc +++ b/rtc_base/bit_buffer.cc @@ -48,7 +48,7 @@ uint8_t WritePartialByte(uint8_t source, } // namespace -namespace rtc { +namespace webrtc { BitBufferWriter::BitBufferWriter(uint8_t* bytes, size_t byte_count) : writable_bytes_(bytes), @@ -227,4 +227,4 @@ bool BitBufferWriter::WriteString(absl::string_view data) { return success; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/bit_buffer.h b/rtc_base/bit_buffer.h index fe50b2b76e..17084a4063 100644 --- a/rtc_base/bit_buffer.h +++ b/rtc_base/bit_buffer.h @@ -17,7 +17,7 @@ #include "absl/strings/string_view.h" #include "api/units/data_size.h" -namespace rtc { +namespace webrtc { // A BitBuffer API for write operations. Supports symmetric write APIs to the // reading APIs of BitstreamReader. @@ -25,8 +25,7 @@ namespace rtc { // Byte order is assumed big-endian/network. class BitBufferWriter { public: - static constexpr webrtc::DataSize kMaxLeb128Length = - webrtc::DataSize::Bytes(10); + static constexpr DataSize kMaxLeb128Length = webrtc::DataSize::Bytes(10); // Constructs a bit buffer for the writable buffer of `bytes`. BitBufferWriter(uint8_t* bytes, size_t byte_count); @@ -95,6 +94,14 @@ class BitBufferWriter { size_t bit_offset_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::BitBufferWriter; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_BIT_BUFFER_H_ diff --git a/rtc_base/bit_buffer_unittest.cc b/rtc_base/bit_buffer_unittest.cc index 7dfe0c808d..923a3df30a 100644 --- a/rtc_base/bit_buffer_unittest.cc +++ b/rtc_base/bit_buffer_unittest.cc @@ -19,7 +19,7 @@ #include "test/gmock.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { using ::testing::ElementsAre; using ::webrtc::BitstreamReader; @@ -165,7 +165,7 @@ TEST(BitBufferWriterTest, SymmetricReadWrite) { // That should be all that fits in the buffer. EXPECT_FALSE(buffer.WriteBits(1, 1)); - BitstreamReader reader(rtc::MakeArrayView(bytes, 4)); + BitstreamReader reader(MakeArrayView(bytes, 4)); EXPECT_EQ(reader.ReadBits(3), 0x2u); EXPECT_EQ(reader.ReadBits(2), 0x1u); EXPECT_EQ(reader.ReadBits(7), 0x53u); @@ -253,4 +253,4 @@ TEST(BitBufferWriterTest, WriteStringTooSmallBuffer) { EXPECT_FALSE(writer.WriteString("abc")); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/bitrate_tracker.cc b/rtc_base/bitrate_tracker.cc index 340e444f24..0146d3c564 100644 --- a/rtc_base/bitrate_tracker.cc +++ b/rtc_base/bitrate_tracker.cc @@ -10,7 +10,8 @@ #include "rtc_base/bitrate_tracker.h" -#include "absl/types/optional.h" +#include + #include "api/units/data_rate.h" #include "api/units/timestamp.h" #include "rtc_base/rate_statistics.h" @@ -20,11 +21,11 @@ namespace webrtc { BitrateTracker::BitrateTracker(TimeDelta max_window_size) : impl_(max_window_size.ms(), RateStatistics::kBpsScale) {} -absl::optional BitrateTracker::Rate(Timestamp now) const { - if (absl::optional rate = impl_.Rate(now.ms())) { +std::optional BitrateTracker::Rate(Timestamp now) const { + if (std::optional rate = impl_.Rate(now.ms())) { return DataRate::BitsPerSec(*rate); } - return absl::nullopt; + return std::nullopt; } bool BitrateTracker::SetWindowSize(TimeDelta window_size, Timestamp now) { diff --git a/rtc_base/bitrate_tracker.h b/rtc_base/bitrate_tracker.h index a54bd9a561..9570cd66fe 100644 --- a/rtc_base/bitrate_tracker.h +++ b/rtc_base/bitrate_tracker.h @@ -14,7 +14,8 @@ #include #include -#include "absl/types/optional.h" +#include + #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" @@ -50,7 +51,7 @@ class RTC_EXPORT BitrateTracker { // Returns bitrate, moving averaging window as needed. // Returns nullopt when bitrate can't be measured. - absl::optional Rate(Timestamp now) const; + std::optional Rate(Timestamp now) const; // Update the size of the averaging window. The maximum allowed value for // `window_size` is `max_window_size` as supplied in the constructor. diff --git a/rtc_base/bitrate_tracker_unittest.cc b/rtc_base/bitrate_tracker_unittest.cc index 2129aebfdd..61f569d633 100644 --- a/rtc_base/bitrate_tracker_unittest.cc +++ b/rtc_base/bitrate_tracker_unittest.cc @@ -12,8 +12,8 @@ #include #include +#include -#include "absl/types/optional.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -34,7 +34,7 @@ TEST(BitrateTrackerTest, ReturnsNulloptInitially) { Timestamp now = Timestamp::Seconds(12'345); BitrateTracker stats(kWindow); - EXPECT_EQ(stats.Rate(now), absl::nullopt); + EXPECT_EQ(stats.Rate(now), std::nullopt); } TEST(BitrateTrackerTest, ReturnsNulloptAfterSingleDataPoint) { @@ -44,7 +44,7 @@ TEST(BitrateTrackerTest, ReturnsNulloptAfterSingleDataPoint) { stats.Update(1'500, now); now += TimeDelta::Millis(10); - EXPECT_EQ(stats.Rate(now), absl::nullopt); + EXPECT_EQ(stats.Rate(now), std::nullopt); } TEST(BitrateTrackerTest, ReturnsRateAfterTwoMeasurements) { @@ -75,14 +75,14 @@ TEST(BitrateTrackerTest, MeasuresConstantRate) { DataSize total_size = kPacketSize; DataRate last_error = DataRate::PlusInfinity(); for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kInterval) { - SCOPED_TRACE(i); + SCOPED_TRACE(ToString(i)); now += kInterval; total_size += kPacketSize; stats.Update(kPacketSize, now); // Until window is full, bitrate is measured over a smaller window and might // look larger than the constant rate. - absl::optional bitrate = stats.Rate(now); + std::optional bitrate = stats.Rate(now); ASSERT_THAT(bitrate, AllOf(Ge(kConstantRate), Le(total_size / (now - start)))); @@ -95,7 +95,7 @@ TEST(BitrateTrackerTest, MeasuresConstantRate) { // Once window is full, bitrate measurment should be stable. for (TimeDelta i = TimeDelta::Zero(); i < kInterval; i += TimeDelta::Millis(1)) { - SCOPED_TRACE(i); + SCOPED_TRACE(ToString(i)); EXPECT_EQ(stats.Rate(now + i), kConstantRate); } } @@ -111,20 +111,20 @@ TEST(BitrateTrackerTest, IncreasingThenDecreasingBitrate) { stats.Update(kLargePacketSize, now); for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kLargeInterval) { - SCOPED_TRACE(i); + SCOPED_TRACE(ToString(i)); now += kLargeInterval; stats.Update(kLargePacketSize, now); } - absl::optional last_bitrate = stats.Rate(now); + std::optional last_bitrate = stats.Rate(now); EXPECT_EQ(last_bitrate, kLargePacketSize / kLargeInterval); // Decrease bitrate with smaller measurments. for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kLargeInterval) { - SCOPED_TRACE(i); + SCOPED_TRACE(ToString(i)); now += kLargeInterval; stats.Update(kSmallPacketSize, now); - absl::optional bitrate = stats.Rate(now); + std::optional bitrate = stats.Rate(now); EXPECT_LT(bitrate, last_bitrate); last_bitrate = bitrate; @@ -133,11 +133,11 @@ TEST(BitrateTrackerTest, IncreasingThenDecreasingBitrate) { // Increase bitrate with more frequent measurments. for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kSmallInterval) { - SCOPED_TRACE(i); + SCOPED_TRACE(ToString(i)); now += kSmallInterval; stats.Update(kSmallPacketSize, now); - absl::optional bitrate = stats.Rate(now); + std::optional bitrate = stats.Rate(now); EXPECT_GE(bitrate, last_bitrate); last_bitrate = bitrate; @@ -162,17 +162,17 @@ TEST(BitrateTrackerTest, ResetAfterSilence) { now += kWindow + kEpsilon; // Silence over window size should trigger auto reset for coming sample. - EXPECT_EQ(stats.Rate(now), absl::nullopt); + EXPECT_EQ(stats.Rate(now), std::nullopt); stats.Update(kPacketSize, now); // Single measurment after reset is not enough to estimate the rate. - EXPECT_EQ(stats.Rate(now), absl::nullopt); + EXPECT_EQ(stats.Rate(now), std::nullopt); // Manual reset, add the same check again. stats.Reset(); - EXPECT_EQ(stats.Rate(now), absl::nullopt); + EXPECT_EQ(stats.Rate(now), std::nullopt); now += kInterval; stats.Update(kPacketSize, now); - EXPECT_EQ(stats.Rate(now), absl::nullopt); + EXPECT_EQ(stats.Rate(now), std::nullopt); } TEST(BitrateTrackerTest, HandlesChangingWindowSize) { @@ -223,17 +223,17 @@ TEST(BitrateTrackerTest, HandlesZeroCounts) { BitrateTracker stats(kWindow); stats.Update(kPacketSize, now); - ASSERT_EQ(stats.Rate(now), absl::nullopt); + ASSERT_EQ(stats.Rate(now), std::nullopt); now += kInterval; stats.Update(0, now); - absl::optional last_bitrate = stats.Rate(now); + std::optional last_bitrate = stats.Rate(now); EXPECT_GT(last_bitrate, DataRate::Zero()); now += kInterval; while (now < start + kWindow) { - SCOPED_TRACE(now - start); + SCOPED_TRACE(ToString(now - start)); stats.Update(0, now); - absl::optional bitrate = stats.Rate(now); + std::optional bitrate = stats.Rate(now); EXPECT_GT(bitrate, DataRate::Zero()); // As window expands, average bitrate decreases. EXPECT_LT(bitrate, last_bitrate); @@ -260,7 +260,7 @@ TEST(BitrateTrackerTest, ReturnsNulloptWhenOverflows) { now += kEpsilon; stats.Update(very_large_number, now); - EXPECT_EQ(stats.Rate(now), absl::nullopt); + EXPECT_EQ(stats.Rate(now), std::nullopt); } } // namespace diff --git a/rtc_base/bitstream_reader.cc b/rtc_base/bitstream_reader.cc index 3e1b94d8d4..94be1f0d8e 100644 --- a/rtc_base/bitstream_reader.cc +++ b/rtc_base/bitstream_reader.cc @@ -26,7 +26,7 @@ uint64_t BitstreamReader::ReadBits(int bits) { set_last_read_is_verified(false); if (remaining_bits_ < bits) { - remaining_bits_ -= bits; + Invalidate(); return 0; } @@ -64,10 +64,11 @@ uint64_t BitstreamReader::ReadBits(int bits) { int BitstreamReader::ReadBit() { set_last_read_is_verified(false); - --remaining_bits_; - if (remaining_bits_ < 0) { + if (remaining_bits_ <= 0) { + Invalidate(); return 0; } + --remaining_bits_; int bit_position = remaining_bits_ % 8; if (bit_position == 0) { @@ -120,7 +121,7 @@ uint32_t BitstreamReader::ReadExponentialGolomb() { // The bit count of the value is the number of zeros + 1. // However the first '1' was already read above. return (uint32_t{1} << zero_bit_count) + - rtc::dchecked_cast(ReadBits(zero_bit_count)) - 1; + dchecked_cast(ReadBits(zero_bit_count)) - 1; } int BitstreamReader::ReadSignedExponentialGolomb() { diff --git a/rtc_base/bitstream_reader.h b/rtc_base/bitstream_reader.h index c367b9dc9f..43d7a4c8ed 100644 --- a/rtc_base/bitstream_reader.h +++ b/rtc_base/bitstream_reader.h @@ -30,7 +30,7 @@ namespace webrtc { class BitstreamReader { public: explicit BitstreamReader( - rtc::ArrayView bytes ABSL_ATTRIBUTE_LIFETIME_BOUND); + ArrayView bytes ABSL_ATTRIBUTE_LIFETIME_BOUND); explicit BitstreamReader( absl::string_view bytes ABSL_ATTRIBUTE_LIFETIME_BOUND); BitstreamReader(const BitstreamReader&) = default; @@ -64,7 +64,7 @@ class BitstreamReader { !std::is_same::value && sizeof(T) <= 8>::type* = nullptr> ABSL_MUST_USE_RESULT T Read() { - return rtc::dchecked_cast(ReadBits(sizeof(T) * 8)); + return dchecked_cast(ReadBits(sizeof(T) * 8)); } // Reads single bit as boolean. @@ -123,12 +123,13 @@ class BitstreamReader { mutable bool last_read_is_verified_ = true; }; -inline BitstreamReader::BitstreamReader(rtc::ArrayView bytes) - : bytes_(bytes.data()), remaining_bits_(bytes.size() * 8) {} +inline BitstreamReader::BitstreamReader(ArrayView bytes) + : bytes_(bytes.data()), + remaining_bits_(checked_cast(bytes.size() * 8)) {} inline BitstreamReader::BitstreamReader(absl::string_view bytes) : bytes_(reinterpret_cast(bytes.data())), - remaining_bits_(bytes.size() * 8) {} + remaining_bits_(checked_cast(bytes.size() * 8)) {} inline BitstreamReader::~BitstreamReader() { RTC_DCHECK(last_read_is_verified_) << "Latest calls to Read or ConsumeBit " diff --git a/rtc_base/bitstream_reader_unittest.cc b/rtc_base/bitstream_reader_unittest.cc index 46309b2a13..334218e383 100644 --- a/rtc_base/bitstream_reader_unittest.cc +++ b/rtc_base/bitstream_reader_unittest.cc @@ -15,9 +15,9 @@ #include #include +#include #include "absl/numeric/bits.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "rtc_base/checks.h" #include "test/gmock.h" @@ -28,26 +28,26 @@ namespace { TEST(BitstreamReaderTest, InDebugModeRequiresToCheckOkStatusBeforeDestruction) { const uint8_t bytes[32] = {}; - absl::optional reader(absl::in_place, bytes); + std::optional reader(std::in_place, bytes); EXPECT_GE(reader->ReadBits(7), 0u); #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(OS_ANDROID) - EXPECT_DEATH(reader = absl::nullopt, ""); + EXPECT_DEATH(reader = std::nullopt, ""); #endif EXPECT_TRUE(reader->Ok()); - reader = absl::nullopt; + reader = std::nullopt; } TEST(BitstreamReaderTest, InDebugModeMayCheckRemainingBitsInsteadOfOkStatus) { const uint8_t bytes[32] = {}; - absl::optional reader(absl::in_place, bytes); + std::optional reader(std::in_place, bytes); EXPECT_GE(reader->ReadBit(), 0); #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(OS_ANDROID) - EXPECT_DEATH(reader = absl::nullopt, ""); + EXPECT_DEATH(reader = std::nullopt, ""); #endif EXPECT_GE(reader->RemainingBitCount(), 0); - reader = absl::nullopt; + reader = std::nullopt; } TEST(BitstreamReaderTest, ConsumeBits) { @@ -198,14 +198,14 @@ TEST(BitstreamReaderTest, ReadBits) { } TEST(BitstreamReaderTest, ReadZeroBits) { - BitstreamReader reader(rtc::ArrayView(nullptr, 0)); + BitstreamReader reader(ArrayView(nullptr, 0)); EXPECT_EQ(reader.ReadBits(0), 0u); EXPECT_TRUE(reader.Ok()); } TEST(BitstreamReaderTest, ReadBitFromEmptyArray) { - BitstreamReader reader(rtc::ArrayView(nullptr, 0)); + BitstreamReader reader(ArrayView(nullptr, 0)); // Trying to read from the empty array shouldn't dereference the pointer, // i.e. shouldn't crash. @@ -214,7 +214,7 @@ TEST(BitstreamReaderTest, ReadBitFromEmptyArray) { } TEST(BitstreamReaderTest, ReadBitsFromEmptyArray) { - BitstreamReader reader(rtc::ArrayView(nullptr, 0)); + BitstreamReader reader(ArrayView(nullptr, 0)); // Trying to read from the empty array shouldn't dereference the pointer, // i.e. shouldn't crash. @@ -325,12 +325,12 @@ TEST(BitstreamReaderTest, NoGolombOverread) { const uint8_t bytes[] = {0x00, 0xFF, 0xFF}; // Make sure the bit buffer correctly enforces byte length on golomb reads. // If it didn't, the above buffer would be valid at 3 bytes. - BitstreamReader reader1(rtc::MakeArrayView(bytes, 1)); + BitstreamReader reader1(MakeArrayView(bytes, 1)); // When parse fails, `ReadExponentialGolomb` may return any number. reader1.ReadExponentialGolomb(); EXPECT_FALSE(reader1.Ok()); - BitstreamReader reader2(rtc::MakeArrayView(bytes, 2)); + BitstreamReader reader2(MakeArrayView(bytes, 2)); reader2.ReadExponentialGolomb(); EXPECT_FALSE(reader2.Ok()); diff --git a/rtc_base/boringssl_certificate.cc b/rtc_base/boringssl_certificate.cc index a866224496..016b4dabba 100644 --- a/rtc_base/boringssl_certificate.cc +++ b/rtc_base/boringssl_certificate.cc @@ -10,7 +10,11 @@ #include "rtc_base/boringssl_certificate.h" -#include "absl/strings/string_view.h" +#include +#include +#include +#include +#include #if defined(WEBRTC_WIN) // Must be included first before openssl headers. @@ -18,6 +22,7 @@ #endif // WEBRTC_WIN #include +#include #include #include #include @@ -26,20 +31,19 @@ #include #include -#include -#include -#include -#include - +#include "absl/strings/string_view.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/logging.h" #include "rtc_base/message_digest.h" #include "rtc_base/openssl_digest.h" #include "rtc_base/openssl_key_pair.h" #include "rtc_base/openssl_utility.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_identity.h" -namespace rtc { +namespace webrtc { namespace { // List of OIDs of signature algorithms accepted by WebRTC. @@ -261,7 +265,7 @@ std::unique_ptr BoringSSLCertificate::Generate( SSLIdentityParams actual_params(params); if (actual_params.common_name.empty()) { // Use a random string, arbitrarily 8 chars long. - actual_params.common_name = CreateRandomString(8); + actual_params.common_name = webrtc::CreateRandomString(8); } bssl::UniquePtr cert_buffer = MakeCertificate(key_pair->pkey(), actual_params); @@ -343,30 +347,23 @@ bool BoringSSLCertificate::GetSignatureDigestAlgorithm( } bool BoringSSLCertificate::ComputeDigest(absl::string_view algorithm, - unsigned char* digest, - size_t size, - size_t* length) const { - return ComputeDigest(cert_buffer_.get(), algorithm, digest, size, length); -} + Buffer& digest) const { + RTC_DCHECK_GT(digest.capacity(), 0); -bool BoringSSLCertificate::ComputeDigest(const CRYPTO_BUFFER* cert_buffer, - absl::string_view algorithm, - unsigned char* digest, - size_t size, - size_t* length) { const EVP_MD* md = nullptr; unsigned int n = 0; if (!OpenSSLDigest::GetDigestEVP(algorithm, &md)) { return false; } - if (size < static_cast(EVP_MD_size(md))) { + if (digest.capacity() < static_cast(EVP_MD_size(md))) { return false; } - if (!EVP_Digest(CRYPTO_BUFFER_data(cert_buffer), - CRYPTO_BUFFER_len(cert_buffer), digest, &n, md, nullptr)) { + if (!EVP_Digest(CRYPTO_BUFFER_data(cert_buffer_.get()), + CRYPTO_BUFFER_len(cert_buffer_.get()), digest.data(), &n, md, + nullptr)) { return false; } - *length = n; + digest.SetSize(n); return true; } @@ -409,4 +406,4 @@ int64_t BoringSSLCertificate::CertificateExpirationTime() const { return ret; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/boringssl_certificate.h b/rtc_base/boringssl_certificate.h index bd331686b7..926dd332ec 100644 --- a/rtc_base/boringssl_certificate.h +++ b/rtc_base/boringssl_certificate.h @@ -20,12 +20,12 @@ #include "absl/strings/string_view.h" #include "rtc_base/buffer.h" +#include "rtc_base/openssl.h" +#include "rtc_base/openssl_key_pair.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" -namespace rtc { - -class OpenSSLKeyPair; +namespace webrtc { // BoringSSLCertificate encapsulates a BoringSSL CRYPTO_BUFFER object holding a // certificate, which is also reference counted inside the BoringSSL library. @@ -57,16 +57,7 @@ class BoringSSLCertificate final : public SSLCertificate { // Compute the digest of the certificate given `algorithm`. bool ComputeDigest(absl::string_view algorithm, - unsigned char* digest, - size_t size, - size_t* length) const override; - - // Compute the digest of a certificate as a CRYPTO_BUFFER. - static bool ComputeDigest(const CRYPTO_BUFFER* cert_buffer, - absl::string_view algorithm, - unsigned char* digest, - size_t size, - size_t* length); + Buffer& digest) const override; bool GetSignatureDigestAlgorithm(std::string* algorithm) const override; @@ -77,6 +68,14 @@ class BoringSSLCertificate final : public SSLCertificate { bssl::UniquePtr cert_buffer_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::BoringSSLCertificate; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_BORINGSSL_CERTIFICATE_H_ diff --git a/rtc_base/boringssl_identity.cc b/rtc_base/boringssl_identity.cc index a61524a679..5c8f60e362 100644 --- a/rtc_base/boringssl_identity.cc +++ b/rtc_base/boringssl_identity.cc @@ -17,19 +17,24 @@ #include #include +#include #include +#include #include #include #include "absl/memory/memory.h" #include "absl/strings/string_view.h" +#include "rtc_base/boringssl_certificate.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/openssl.h" +#include "rtc_base/openssl_key_pair.h" #include "rtc_base/openssl_utility.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_identity.h" -namespace rtc { +namespace webrtc { BoringSSLIdentity::BoringSSLIdentity( std::unique_ptr key_pair, @@ -113,7 +118,7 @@ std::unique_ptr BoringSSLIdentity::CreateFromPEMChainStrings( absl::string_view certificate_chain) { bssl::UniquePtr bio( BIO_new_mem_buf(certificate_chain.data(), - rtc::dchecked_cast(certificate_chain.size()))); + webrtc::dchecked_cast(certificate_chain.size()))); if (!bio) { return nullptr; } @@ -129,6 +134,7 @@ std::unique_ptr BoringSSLIdentity::CreateFromPEMChainStrings( uint32_t err = ERR_peek_error(); if (ERR_GET_LIB(err) == ERR_LIB_PEM && ERR_GET_REASON(err) == PEM_R_NO_START_LINE) { + err = ERR_get_error(); break; } RTC_LOG(LS_ERROR) << "Failed to parse certificate from PEM string."; @@ -213,4 +219,4 @@ bool BoringSSLIdentity::operator!=(const BoringSSLIdentity& other) const { return !(*this == other); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/boringssl_identity.h b/rtc_base/boringssl_identity.h index ffc8812af2..1c6f5ab377 100644 --- a/rtc_base/boringssl_identity.h +++ b/rtc_base/boringssl_identity.h @@ -23,7 +23,7 @@ #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" -namespace rtc { +namespace webrtc { // Holds a keypair and certificate together, and a method to generate them // consistently. Uses CRYPTO_BUFFER instead of X509, which offers binary size @@ -72,6 +72,14 @@ class BoringSSLIdentity final : public SSLIdentity { std::unique_ptr cert_chain_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::BoringSSLIdentity; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_BORINGSSL_IDENTITY_H_ diff --git a/rtc_base/bounded_inline_vector_impl.h b/rtc_base/bounded_inline_vector_impl.h index 3539ace5bc..82b11b8db6 100644 --- a/rtc_base/bounded_inline_vector_impl.h +++ b/rtc_base/bounded_inline_vector_impl.h @@ -46,13 +46,9 @@ void InitializeElements(T* data, U&& element, Us&&... elements) { } // Default initializes uninitialized array elements. -// TODO(kwiberg): Replace with std::uninitialized_default_construct_n() (C++17). template void DefaultInitializeElements(T* data, int size) { - for (int i = 0; i < size; ++i) { - // Placement new, because we construct a new object in uninitialized memory. - ::new (&data[i]) T; - } + std::uninitialized_default_construct_n(data, size); } // Copies from source to uninitialized destination. Caller is responsible for @@ -74,12 +70,7 @@ void MoveElements(T* src_data, int src_size, T* dst_data, int* dst_size) { if /*constexpr*/ (std::is_trivially_move_constructible::value) { std::memcpy(dst_data, src_data, src_size * sizeof(T)); } else { - // TODO(kwiberg): Use std::uninitialized_move_n() instead (C++17). - for (int i = 0; i < src_size; ++i) { - // Placement new, because we create a new object in uninitialized - // memory. - ::new (&dst_data[i]) T(std::move(src_data[i])); - } + std::uninitialized_move_n(src_data, src_size, dst_data); } *dst_size = src_size; } diff --git a/rtc_base/buffer.h b/rtc_base/buffer.h index 6663c687b8..95e8857ed4 100644 --- a/rtc_base/buffer.h +++ b/rtc_base/buffer.h @@ -25,7 +25,7 @@ #include "rtc_base/type_traits.h" #include "rtc_base/zero_memory.h" -namespace rtc { +namespace webrtc { namespace internal { @@ -323,7 +323,7 @@ class BufferT { const size_t old_size = size_; SetSize(old_size + max_elements); U* base_ptr = data() + old_size; - size_t written_elements = setter(rtc::ArrayView(base_ptr, max_elements)); + size_t written_elements = setter(ArrayView(base_ptr, max_elements)); RTC_CHECK_LE(written_elements, max_elements); size_ = old_size + written_elements; @@ -447,6 +447,18 @@ using Buffer = BufferT; template using ZeroOnFreeBuffer = BufferT; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +template +using BufferT = ::webrtc::BufferT; +using ::webrtc::Buffer; +template +using ZeroOnFreeBuffer = ::webrtc::ZeroOnFreeBuffer; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_BUFFER_H_ diff --git a/rtc_base/buffer_queue.cc b/rtc_base/buffer_queue.cc index 7879e933c7..c23365c130 100644 --- a/rtc_base/buffer_queue.cc +++ b/rtc_base/buffer_queue.cc @@ -15,7 +15,10 @@ #include -namespace rtc { +#include "api/sequence_checker.h" +#include "rtc_base/buffer.h" + +namespace webrtc { BufferQueue::BufferQueue(size_t capacity, size_t default_size) : capacity_(capacity), default_size_(default_size) {} @@ -82,4 +85,4 @@ bool BufferQueue::WriteBack(const void* buffer, return true; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/buffer_queue.h b/rtc_base/buffer_queue.h index b018e160a1..fdcc2ba893 100644 --- a/rtc_base/buffer_queue.h +++ b/rtc_base/buffer_queue.h @@ -21,7 +21,7 @@ #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" -namespace rtc { +namespace webrtc { class BufferQueue final { public: @@ -58,13 +58,21 @@ class BufferQueue final { } private: - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; const size_t capacity_; const size_t default_size_; std::deque queue_ RTC_GUARDED_BY(sequence_checker_); std::vector free_list_ RTC_GUARDED_BY(sequence_checker_); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::BufferQueue; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_BUFFER_QUEUE_H_ diff --git a/rtc_base/buffer_queue_unittest.cc b/rtc_base/buffer_queue_unittest.cc index 2a2f8cc4db..b0ebdd782d 100644 --- a/rtc_base/buffer_queue_unittest.cc +++ b/rtc_base/buffer_queue_unittest.cc @@ -14,7 +14,7 @@ #include "test/gtest.h" -namespace rtc { +namespace webrtc { TEST(BufferQueueTest, TestAll) { const size_t kSize = 16; @@ -86,4 +86,4 @@ TEST(BufferQueueTest, TestAll) { EXPECT_EQ(0u, queue2.size()); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/buffer_unittest.cc b/rtc_base/buffer_unittest.cc index b56118afde..e6f5d8eb92 100644 --- a/rtc_base/buffer_unittest.cc +++ b/rtc_base/buffer_unittest.cc @@ -18,7 +18,7 @@ #include "test/gmock.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { @@ -102,7 +102,7 @@ TEST(BufferTest, TestAppendData) { EXPECT_EQ(buf, Buffer(exp)); Buffer buf2; buf2.AppendData(buf); - buf2.AppendData(rtc::ArrayView(buf)); + buf2.AppendData(ArrayView(buf)); const int8_t exp2[] = {0x4, 0x5, 0x6, 0xa, 0xb, 0x4, 0x5, 0x6, 0xa, 0xb}; EXPECT_EQ(buf2, Buffer(exp2)); } @@ -242,7 +242,7 @@ TEST(BufferTest, TestClear) { } TEST(BufferTest, TestLambdaSetAppend) { - auto setter = [](rtc::ArrayView av) { + auto setter = [](ArrayView av) { for (int i = 0; i != 15; ++i) av[i] = kTestData[i]; return 15; @@ -262,7 +262,7 @@ TEST(BufferTest, TestLambdaSetAppend) { } TEST(BufferTest, TestLambdaSetAppendSigned) { - auto setter = [](rtc::ArrayView av) { + auto setter = [](ArrayView av) { for (int i = 0; i != 15; ++i) av[i] = kTestData[i]; return 15; @@ -282,7 +282,7 @@ TEST(BufferTest, TestLambdaSetAppendSigned) { } TEST(BufferTest, TestLambdaAppendEmpty) { - auto setter = [](rtc::ArrayView av) { + auto setter = [](ArrayView av) { for (int i = 0; i != 15; ++i) av[i] = kTestData[i]; return 15; @@ -300,7 +300,7 @@ TEST(BufferTest, TestLambdaAppendEmpty) { } TEST(BufferTest, TestLambdaAppendPartial) { - auto setter = [](rtc::ArrayView av) { + auto setter = [](ArrayView av) { for (int i = 0; i != 7; ++i) av[i] = kTestData[i]; return 7; @@ -316,7 +316,7 @@ TEST(BufferTest, TestLambdaAppendPartial) { TEST(BufferTest, TestMutableLambdaSetAppend) { uint8_t magic_number = 17; - auto setter = [magic_number](rtc::ArrayView av) mutable { + auto setter = [magic_number](ArrayView av) mutable { for (int i = 0; i != 15; ++i) { av[i] = magic_number; ++magic_number; @@ -489,7 +489,7 @@ TEST(ZeroOnFreeBufferTest, TestZeroOnSetData) { TEST(ZeroOnFreeBufferTest, TestZeroOnSetDataFromSetter) { static constexpr size_t offset = 1; - const auto setter = [](rtc::ArrayView av) { + const auto setter = [](ArrayView av) { for (int i = 0; i != 2; ++i) av[i] = kTestData[offset + i]; return 2; @@ -545,4 +545,4 @@ TEST(ZeroOnFreeBufferTest, TestZeroOnClear) { } } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/byte_buffer.cc b/rtc_base/byte_buffer.cc index e1278337a4..0f5ea525f6 100644 --- a/rtc_base/byte_buffer.cc +++ b/rtc_base/byte_buffer.cc @@ -12,30 +12,29 @@ #include -namespace rtc { +#include +#include -ByteBufferWriter::ByteBufferWriter() : ByteBufferWriterT() {} +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "rtc_base/byte_order.h" -ByteBufferWriter::ByteBufferWriter(const char* bytes, size_t len) - : ByteBufferWriterT(bytes, len) {} +namespace webrtc { -ByteBufferReader::ByteBufferReader(const char* bytes, size_t len) { - Construct(bytes, len); -} +ByteBufferWriter::ByteBufferWriter() : ByteBufferWriterT() {} -ByteBufferReader::ByteBufferReader(const char* bytes) { - Construct(bytes, strlen(bytes)); -} +ByteBufferWriter::ByteBufferWriter(const uint8_t* bytes, size_t len) + : ByteBufferWriterT(bytes, len) {} -ByteBufferReader::ByteBufferReader(const Buffer& buf) { - Construct(buf.data(), buf.size()); +ByteBufferReader::ByteBufferReader(ArrayView bytes) { + Construct(bytes.data(), bytes.size()); } ByteBufferReader::ByteBufferReader(const ByteBufferWriter& buf) { - Construct(buf.Data(), buf.Length()); + Construct(reinterpret_cast(buf.Data()), buf.Length()); } -void ByteBufferReader::Construct(const char* bytes, size_t len) { +void ByteBufferReader::Construct(const uint8_t* bytes, size_t len) { bytes_ = bytes; size_ = len; start_ = 0; @@ -46,7 +45,7 @@ bool ByteBufferReader::ReadUInt8(uint8_t* val) { if (!val) return false; - return ReadBytes(reinterpret_cast(val), 1); + return ReadBytes(val, 1); } bool ByteBufferReader::ReadUInt16(uint16_t* val) { @@ -54,10 +53,10 @@ bool ByteBufferReader::ReadUInt16(uint16_t* val) { return false; uint16_t v; - if (!ReadBytes(reinterpret_cast(&v), 2)) { + if (!ReadBytes(reinterpret_cast(&v), 2)) { return false; } else { - *val = NetworkToHost16(v); + *val = webrtc::NetworkToHost16(v); return true; } } @@ -67,13 +66,13 @@ bool ByteBufferReader::ReadUInt24(uint32_t* val) { return false; uint32_t v = 0; - char* read_into = reinterpret_cast(&v); + uint8_t* read_into = reinterpret_cast(&v); ++read_into; if (!ReadBytes(read_into, 3)) { return false; } else { - *val = NetworkToHost32(v); + *val = webrtc::NetworkToHost32(v); return true; } } @@ -83,10 +82,10 @@ bool ByteBufferReader::ReadUInt32(uint32_t* val) { return false; uint32_t v; - if (!ReadBytes(reinterpret_cast(&v), 4)) { + if (!ReadBytes(reinterpret_cast(&v), 4)) { return false; } else { - *val = NetworkToHost32(v); + *val = webrtc::NetworkToHost32(v); return true; } } @@ -96,10 +95,10 @@ bool ByteBufferReader::ReadUInt64(uint64_t* val) { return false; uint64_t v; - if (!ReadBytes(reinterpret_cast(&v), 8)) { + if (!ReadBytes(reinterpret_cast(&v), 8)) { return false; } else { - *val = NetworkToHost64(v); + *val = webrtc::NetworkToHost64(v); return true; } } @@ -112,14 +111,14 @@ bool ByteBufferReader::ReadUVarint(uint64_t* val) { // continuation byte (msb=1) if there are more bytes to be read. uint64_t v = 0; for (int i = 0; i < 64; i += 7) { - char byte; + uint8_t byte; if (!ReadBytes(&byte, 1)) { return false; } // Read the first 7 bits of the byte, then offset by bits read so far. v |= (static_cast(byte) & 0x7F) << i; - // True if the msb is not a continuation byte. - if (static_cast(byte) < 0x80) { + // Return if the msb is not a continuation byte. + if (byte < 0x80) { *val = v; return true; } @@ -134,20 +133,38 @@ bool ByteBufferReader::ReadString(std::string* val, size_t len) { if (len > Length()) { return false; } else { - val->append(bytes_ + start_, len); + val->append(reinterpret_cast(bytes_ + start_), len); start_ += len; return true; } } -bool ByteBufferReader::ReadBytes(char* val, size_t len) { +bool ByteBufferReader::ReadStringView(absl::string_view* val, size_t len) { + if (!val || len > Length()) + return false; + *val = absl::string_view(reinterpret_cast(bytes_ + start_), len); + start_ += len; + return true; +} + +bool ByteBufferReader::ReadBytes(ArrayView val) { + if (val.size() == 0) { + return true; + } + return ReadBytes(val.data(), val.size()); +} + +// Private function supporting the other Read* functions. +bool ByteBufferReader::ReadBytes(uint8_t* val, size_t len) { if (len > Length()) { return false; - } else { - memcpy(val, bytes_ + start_, len); - start_ += len; + } + if (len == 0) { return true; } + memcpy(val, bytes_ + start_, len); + start_ += len; + return true; } bool ByteBufferReader::Consume(size_t size) { @@ -157,4 +174,4 @@ bool ByteBufferReader::Consume(size_t size) { return true; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/byte_buffer.h b/rtc_base/byte_buffer.h index 9bcbb838aa..b962f4574d 100644 --- a/rtc_base/byte_buffer.h +++ b/rtc_base/byte_buffer.h @@ -16,48 +16,67 @@ #include +#include "absl/base/attributes.h" #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "rtc_base/buffer.h" #include "rtc_base/byte_order.h" // Reads/Writes from/to buffer using network byte order (big endian) -namespace rtc { +namespace webrtc { template class ByteBufferWriterT { + using value_type = typename BufferClassT::value_type; + public: ByteBufferWriterT() { Construct(nullptr, kDefaultCapacity); } - ByteBufferWriterT(const char* bytes, size_t len) { Construct(bytes, len); } + ByteBufferWriterT(const value_type* bytes, size_t len) { + Construct(bytes, len); + } ByteBufferWriterT(const ByteBufferWriterT&) = delete; ByteBufferWriterT& operator=(const ByteBufferWriterT&) = delete; - const char* Data() const { return buffer_.data(); } + const value_type* Data() const { return buffer_.data(); } size_t Length() const { return buffer_.size(); } size_t Capacity() const { return buffer_.capacity(); } + ArrayView DataView() const { + return MakeArrayView(Data(), Length()); + } + // Accessor that returns a string_view, independent of underlying type. + // Intended to provide access for existing users that expect char* + // when the underlying type changes to uint8_t. + // TODO(bugs.webrtc.org/15665): Delete when users are converted. + absl::string_view DataAsStringView() const { + return absl::string_view(reinterpret_cast(Data()), Length()); + } + const char* DataAsCharPointer() const { + return reinterpret_cast(Data()); + } // Write value to the buffer. Resizes the buffer when it is // neccessary. void WriteUInt8(uint8_t val) { - WriteBytes(reinterpret_cast(&val), 1); + WriteBytesInternal(reinterpret_cast(&val), 1); } void WriteUInt16(uint16_t val) { - uint16_t v = HostToNetwork16(val); - WriteBytes(reinterpret_cast(&v), 2); + uint16_t v = webrtc::HostToNetwork16(val); + WriteBytesInternal(reinterpret_cast(&v), 2); } void WriteUInt24(uint32_t val) { - uint32_t v = HostToNetwork32(val); - char* start = reinterpret_cast(&v); + uint32_t v = webrtc::HostToNetwork32(val); + value_type* start = reinterpret_cast(&v); ++start; - WriteBytes(start, 3); + WriteBytesInternal(start, 3); } void WriteUInt32(uint32_t val) { - uint32_t v = HostToNetwork32(val); - WriteBytes(reinterpret_cast(&v), 4); + uint32_t v = webrtc::HostToNetwork32(val); + WriteBytesInternal(reinterpret_cast(&v), 4); } void WriteUInt64(uint64_t val) { - uint64_t v = HostToNetwork64(val); - WriteBytes(reinterpret_cast(&v), 8); + uint64_t v = webrtc::HostToNetwork64(val); + WriteBytesInternal(reinterpret_cast(&v), 8); } // Serializes an unsigned varint in the format described by // https://developers.google.com/protocol-buffers/docs/encoding#varints @@ -66,22 +85,31 @@ class ByteBufferWriterT { while (val >= 0x80) { // Write 7 bits at a time, then set the msb to a continuation byte // (msb=1). - char byte = static_cast(val) | 0x80; - WriteBytes(&byte, 1); + value_type byte = static_cast(val) | 0x80; + WriteBytesInternal(&byte, 1); val >>= 7; } - char last_byte = static_cast(val); - WriteBytes(&last_byte, 1); + value_type last_byte = static_cast(val); + WriteBytesInternal(&last_byte, 1); } void WriteString(absl::string_view val) { - WriteBytes(val.data(), val.size()); + WriteBytesInternal(reinterpret_cast(val.data()), + val.size()); + } + // Write an array of bytes (uint8_t) + [[deprecated("issues.webrtc.org/4225170 - use Write(ArrayView)")]] + void WriteBytes(const uint8_t* val, size_t len) { + WriteBytesInternal(reinterpret_cast(val), len); } - void WriteBytes(const char* val, size_t len) { buffer_.AppendData(val, len); } - // Reserves the given number of bytes and returns a char* that can be written - // into. Useful for functions that require a char* buffer and not a - // ByteBufferWriter. - char* ReserveWriteBuffer(size_t len) { + void Write(ArrayView data) { + WriteBytesInternal(data.data(), data.size()); + } + + // Reserves the given number of bytes and returns a value_type* that can be + // written into. Useful for functions that require a value_type* buffer and + // not a ByteBufferWriter. + value_type* ReserveWriteBuffer(size_t len) { buffer_.SetSize(buffer_.size() + len); return buffer_.data(); } @@ -92,10 +120,12 @@ class ByteBufferWriterT { // Clears the contents of the buffer. After this, Length() will be 0. void Clear() { buffer_.Clear(); } + BufferClassT Extract() && { return std::move(buffer_); } + private: static constexpr size_t kDefaultCapacity = 4096; - void Construct(const char* bytes, size_t size) { + void Construct(const value_type* bytes, size_t size) { if (bytes) { buffer_.AppendData(bytes, size); } else { @@ -103,16 +133,20 @@ class ByteBufferWriterT { } } + void WriteBytesInternal(const value_type* val, size_t len) { + buffer_.AppendData(val, len); + } + BufferClassT buffer_; // There are sensible ways to define these, but they aren't needed in our code // base. }; -class ByteBufferWriter : public ByteBufferWriterT> { +class ByteBufferWriter : public ByteBufferWriterT> { public: ByteBufferWriter(); - ByteBufferWriter(const char* bytes, size_t len); + ByteBufferWriter(const uint8_t* bytes, size_t len); ByteBufferWriter(const ByteBufferWriter&) = delete; ByteBufferWriter& operator=(const ByteBufferWriter&) = delete; @@ -122,22 +156,21 @@ class ByteBufferWriter : public ByteBufferWriterT> { // valid during the lifetime of the reader. class ByteBufferReader { public: - ByteBufferReader(const char* bytes, size_t len); - - // Initializes buffer from a zero-terminated string. - explicit ByteBufferReader(const char* bytes); - - explicit ByteBufferReader(const Buffer& buf); + explicit ByteBufferReader( + ArrayView bytes ABSL_ATTRIBUTE_LIFETIME_BOUND); explicit ByteBufferReader(const ByteBufferWriter& buf); ByteBufferReader(const ByteBufferReader&) = delete; ByteBufferReader& operator=(const ByteBufferReader&) = delete; - // Returns start of unprocessed data. - const char* Data() const { return bytes_ + start_; } + const uint8_t* Data() const { return bytes_ + start_; } // Returns number of unprocessed bytes. size_t Length() const { return end_ - start_; } + // Returns a view of the unprocessed data. Does not move current position. + ArrayView DataView() const { + return ArrayView(bytes_ + start_, end_ - start_); + } // Read a next value from the buffer. Return false if there isn't // enough data left for the specified type. @@ -147,11 +180,14 @@ class ByteBufferReader { bool ReadUInt32(uint32_t* val); bool ReadUInt64(uint64_t* val); bool ReadUVarint(uint64_t* val); - bool ReadBytes(char* val, size_t len); - + // Copies the val.size() next bytes into val.data(). + bool ReadBytes(ArrayView val); // Appends next `len` bytes from the buffer to `val`. Returns false // if there is less than `len` bytes left. bool ReadString(std::string* val, size_t len); + // Same as `ReadString` except that the returned string_view will point into + // the internal buffer (no additional buffer allocation). + bool ReadStringView(absl::string_view* val, size_t len); // Moves current position `size` bytes forward. Returns false if // there is less than `size` bytes left in the buffer. Consume doesn't @@ -159,15 +195,26 @@ class ByteBufferReader { // after this call. bool Consume(size_t size); - protected: - void Construct(const char* bytes, size_t size); + private: + void Construct(const uint8_t* bytes, size_t size); + bool ReadBytes(uint8_t* val, size_t len); - const char* bytes_; + const uint8_t* bytes_; size_t size_; size_t start_; size_t end_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::ByteBufferReader; +using ::webrtc::ByteBufferWriter; +using ::webrtc::ByteBufferWriterT; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_BYTE_BUFFER_H_ diff --git a/rtc_base/byte_buffer_unittest.cc b/rtc_base/byte_buffer_unittest.cc index 4f8043c98f..6bf532deb9 100644 --- a/rtc_base/byte_buffer_unittest.cc +++ b/rtc_base/byte_buffer_unittest.cc @@ -12,46 +12,69 @@ #include -#include "rtc_base/arraysize.h" +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "rtc_base/buffer.h" #include "rtc_base/byte_order.h" +#include "test/gmock.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { + +using ::testing::ElementsAre; +using ::testing::ElementsAreArray; + +TEST(ByteBufferTest, WriterAccessors) { + // To be changed into ByteBufferWriter when base type is converted. + ByteBufferWriterT> buffer; + buffer.WriteString("abc"); + EXPECT_EQ(buffer.Length(), 3U); + EXPECT_THAT(buffer.DataView(), ElementsAre('a', 'b', 'c')); + EXPECT_EQ(absl::string_view("abc"), buffer.DataAsStringView()); + + buffer.WriteUInt8(0); + EXPECT_STREQ(buffer.DataAsCharPointer(), "abc"); + EXPECT_STREQ(reinterpret_cast(buffer.Data()), "abc"); +} TEST(ByteBufferTest, TestByteOrder) { uint16_t n16 = 1; uint32_t n32 = 1; uint64_t n64 = 1; - EXPECT_EQ(n16, NetworkToHost16(HostToNetwork16(n16))); - EXPECT_EQ(n32, NetworkToHost32(HostToNetwork32(n32))); - EXPECT_EQ(n64, NetworkToHost64(HostToNetwork64(n64))); + EXPECT_EQ(n16, webrtc::NetworkToHost16(webrtc::HostToNetwork16(n16))); + EXPECT_EQ(n32, webrtc::NetworkToHost32(webrtc::HostToNetwork32(n32))); + EXPECT_EQ(n64, webrtc::NetworkToHost64(webrtc::HostToNetwork64(n64))); - if (IsHostBigEndian()) { + if (webrtc::IsHostBigEndian()) { // The host is the network (big) endian. - EXPECT_EQ(n16, HostToNetwork16(n16)); - EXPECT_EQ(n32, HostToNetwork32(n32)); - EXPECT_EQ(n64, HostToNetwork64(n64)); + EXPECT_EQ(n16, webrtc::HostToNetwork16(n16)); + EXPECT_EQ(n32, webrtc::HostToNetwork32(n32)); + EXPECT_EQ(n64, webrtc::HostToNetwork64(n64)); // GetBE converts big endian to little endian here. - EXPECT_EQ(n16 >> 8, GetBE16(&n16)); - EXPECT_EQ(n32 >> 24, GetBE32(&n32)); - EXPECT_EQ(n64 >> 56, GetBE64(&n64)); + EXPECT_EQ(n16 >> 8, webrtc::GetBE16(&n16)); + EXPECT_EQ(n32 >> 24, webrtc::GetBE32(&n32)); + EXPECT_EQ(n64 >> 56, webrtc::GetBE64(&n64)); } else { // The host is little endian. - EXPECT_NE(n16, HostToNetwork16(n16)); - EXPECT_NE(n32, HostToNetwork32(n32)); - EXPECT_NE(n64, HostToNetwork64(n64)); + EXPECT_NE(n16, webrtc::HostToNetwork16(n16)); + EXPECT_NE(n32, webrtc::HostToNetwork32(n32)); + EXPECT_NE(n64, webrtc::HostToNetwork64(n64)); // GetBE converts little endian to big endian here. - EXPECT_EQ(GetBE16(&n16), HostToNetwork16(n16)); - EXPECT_EQ(GetBE32(&n32), HostToNetwork32(n32)); - EXPECT_EQ(GetBE64(&n64), HostToNetwork64(n64)); + EXPECT_EQ(webrtc::GetBE16(&n16), webrtc::HostToNetwork16(n16)); + EXPECT_EQ(webrtc::GetBE32(&n32), webrtc::HostToNetwork32(n32)); + EXPECT_EQ(webrtc::GetBE64(&n64), webrtc::HostToNetwork64(n64)); // GetBE converts little endian to big endian here. - EXPECT_EQ(n16 << 8, GetBE16(&n16)); - EXPECT_EQ(n32 << 24, GetBE32(&n32)); - EXPECT_EQ(n64 << 56, GetBE64(&n64)); + EXPECT_EQ(n16 << 8, webrtc::GetBE16(&n16)); + EXPECT_EQ(n32 << 24, webrtc::GetBE32(&n32)); + EXPECT_EQ(n64 << 56, webrtc::GetBE64(&n64)); } } @@ -83,92 +106,100 @@ TEST(ByteBufferTest, TestBufferLength) { TEST(ByteBufferTest, TestReadWriteBuffer) { ByteBufferWriter buffer; - ByteBufferReader read_buf(nullptr, 0); + ByteBufferReader read_buf(ArrayView(nullptr, 0)); uint8_t ru8; EXPECT_FALSE(read_buf.ReadUInt8(&ru8)); // Write and read uint8_t. uint8_t wu8 = 1; buffer.WriteUInt8(wu8); - ByteBufferReader read_buf1(buffer.Data(), buffer.Length()); + ByteBufferReader read_buf1(buffer); EXPECT_TRUE(read_buf1.ReadUInt8(&ru8)); EXPECT_EQ(wu8, ru8); - EXPECT_EQ(0U, read_buf1.Length()); + EXPECT_EQ(read_buf1.Length(), 0U); buffer.Clear(); // Write and read uint16_t. uint16_t wu16 = (1 << 8) + 1; buffer.WriteUInt16(wu16); - ByteBufferReader read_buf2(buffer.Data(), buffer.Length()); + ByteBufferReader read_buf2(buffer); uint16_t ru16; EXPECT_TRUE(read_buf2.ReadUInt16(&ru16)); EXPECT_EQ(wu16, ru16); - EXPECT_EQ(0U, read_buf2.Length()); + EXPECT_EQ(read_buf2.Length(), 0U); buffer.Clear(); // Write and read uint24. uint32_t wu24 = (3 << 16) + (2 << 8) + 1; buffer.WriteUInt24(wu24); - ByteBufferReader read_buf3(buffer.Data(), buffer.Length()); + ByteBufferReader read_buf3(buffer); uint32_t ru24; EXPECT_TRUE(read_buf3.ReadUInt24(&ru24)); EXPECT_EQ(wu24, ru24); - EXPECT_EQ(0U, read_buf3.Length()); + EXPECT_EQ(read_buf3.Length(), 0U); buffer.Clear(); // Write and read uint32_t. uint32_t wu32 = (4 << 24) + (3 << 16) + (2 << 8) + 1; buffer.WriteUInt32(wu32); - ByteBufferReader read_buf4(buffer.Data(), buffer.Length()); + ByteBufferReader read_buf4(buffer); uint32_t ru32; EXPECT_TRUE(read_buf4.ReadUInt32(&ru32)); EXPECT_EQ(wu32, ru32); - EXPECT_EQ(0U, read_buf3.Length()); + EXPECT_EQ(read_buf3.Length(), 0U); buffer.Clear(); // Write and read uint64_t. uint32_t another32 = (8 << 24) + (7 << 16) + (6 << 8) + 5; uint64_t wu64 = (static_cast(another32) << 32) + wu32; buffer.WriteUInt64(wu64); - ByteBufferReader read_buf5(buffer.Data(), buffer.Length()); + ByteBufferReader read_buf5(buffer); uint64_t ru64; EXPECT_TRUE(read_buf5.ReadUInt64(&ru64)); EXPECT_EQ(wu64, ru64); - EXPECT_EQ(0U, read_buf5.Length()); + EXPECT_EQ(read_buf5.Length(), 0U); buffer.Clear(); // Write and read string. std::string write_string("hello"); buffer.WriteString(write_string); - ByteBufferReader read_buf6(buffer.Data(), buffer.Length()); + ByteBufferReader read_buf6(buffer); std::string read_string; EXPECT_TRUE(read_buf6.ReadString(&read_string, write_string.size())); EXPECT_EQ(write_string, read_string); - EXPECT_EQ(0U, read_buf6.Length()); + EXPECT_EQ(read_buf6.Length(), 0U); buffer.Clear(); // Write and read bytes - char write_bytes[] = "foo"; + uint8_t write_bytes[] = {3, 2, 1}; + buffer.Write(ArrayView(write_bytes, 3)); + ByteBufferReader read_buf7(buffer); + uint8_t read_bytes[3]; + EXPECT_TRUE(read_buf7.ReadBytes(read_bytes)); + EXPECT_THAT(read_bytes, ElementsAreArray(write_bytes)); + EXPECT_EQ(read_buf7.Length(), 0U); + buffer.Clear(); + + // Write and read bytes with deprecated function + // TODO: issues.webrtc.org/42225170 - delete +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" buffer.WriteBytes(write_bytes, 3); - ByteBufferReader read_buf7(buffer.Data(), buffer.Length()); - char read_bytes[3]; - EXPECT_TRUE(read_buf7.ReadBytes(read_bytes, 3)); - for (int i = 0; i < 3; ++i) { - EXPECT_EQ(write_bytes[i], read_bytes[i]); - } - EXPECT_EQ(0U, read_buf7.Length()); +#pragma clang diagnostic pop + ByteBufferReader read_buf75(buffer); + EXPECT_TRUE(read_buf75.ReadBytes(read_bytes)); + EXPECT_THAT(read_bytes, ElementsAreArray(write_bytes)); + EXPECT_EQ(read_buf75.Length(), 0U); buffer.Clear(); // Write and read reserved buffer space - char* write_dst = buffer.ReserveWriteBuffer(3); + uint8_t* write_dst = buffer.ReserveWriteBuffer(3); memcpy(write_dst, write_bytes, 3); - ByteBufferReader read_buf8(buffer.Data(), buffer.Length()); + ByteBufferReader read_buf8(buffer); memset(read_bytes, 0, 3); - EXPECT_TRUE(read_buf8.ReadBytes(read_bytes, 3)); - for (int i = 0; i < 3; ++i) { - EXPECT_EQ(write_bytes[i], read_bytes[i]); - } - EXPECT_EQ(0U, read_buf8.Length()); + EXPECT_TRUE(read_buf8.ReadBytes(read_bytes)); + EXPECT_THAT(read_bytes, ElementsAreArray(write_dst, 3)); + EXPECT_EQ(read_buf8.Length(), 0U); buffer.Clear(); // Write and read in order. @@ -177,7 +208,7 @@ TEST(ByteBufferTest, TestReadWriteBuffer) { buffer.WriteUInt24(wu24); buffer.WriteUInt32(wu32); buffer.WriteUInt64(wu64); - ByteBufferReader read_buf9(buffer.Data(), buffer.Length()); + ByteBufferReader read_buf9(buffer); EXPECT_TRUE(read_buf9.ReadUInt8(&ru8)); EXPECT_EQ(wu8, ru8); EXPECT_TRUE(read_buf9.ReadUInt16(&ru16)); @@ -188,10 +219,80 @@ TEST(ByteBufferTest, TestReadWriteBuffer) { EXPECT_EQ(wu32, ru32); EXPECT_TRUE(read_buf9.ReadUInt64(&ru64)); EXPECT_EQ(wu64, ru64); - EXPECT_EQ(0U, read_buf9.Length()); + EXPECT_EQ(read_buf9.Length(), 0U); buffer.Clear(); } +TEST(ByteBufferTest, TestWriteCArray) { + // Write and read data + const uint8_t write_data[3] = {3, 2, 1}; + ByteBufferWriter buffer; + buffer.Write(write_data); + EXPECT_EQ(buffer.Length(), 3U); + ByteBufferReader read_buf10(buffer); + uint8_t read_bytes[3] = {}; + EXPECT_TRUE(read_buf10.ReadBytes(read_bytes)); + EXPECT_THAT(read_bytes, ElementsAreArray(write_data)); + EXPECT_EQ(read_buf10.Length(), 0U); +} + +TEST(ByteBufferTest, TestWriteBuffer) { + const uint8_t write_data[3] = {3, 2, 1}; + // Write and read buffer + Buffer write_buffer(write_data); + ByteBufferWriter buffer; + buffer.Write(write_buffer); + ByteBufferReader read_buf11(buffer); + uint8_t read_bytes[3] = {}; + EXPECT_TRUE(read_buf11.ReadBytes(read_bytes)); + EXPECT_THAT(read_bytes, ElementsAreArray(write_buffer)); + EXPECT_EQ(read_buf11.Length(), 0U); +} + +TEST(ByteBufferTest, TestWriteArrayView) { + const uint8_t write_data[3] = {3, 2, 1}; + // Write and read arrayview + ArrayView write_view(write_data); + ByteBufferWriter buffer; + buffer.Write(write_view); + ByteBufferReader read_buf12(buffer); + uint8_t read_bytes[3] = {}; + EXPECT_TRUE(read_buf12.ReadBytes(read_bytes)); + EXPECT_THAT(read_bytes, ElementsAreArray(write_view)); + EXPECT_EQ(read_buf12.Length(), 0U); +} + +TEST(ByteBufferTest, TestWriteConsume) { + ByteBufferWriter writer; + // Write and read uint8_t. + uint8_t wu8 = 1; + writer.WriteUInt8(wu8); + Buffer consumed = std::move(writer).Extract(); + EXPECT_THAT(consumed, ElementsAre(wu8)); +} + +TEST(ByteBufferTest, TestReadStringView) { + const absl::string_view tests[] = {"hello", " ", "string_view"}; + std::string buffer; + for (const auto& test : tests) + buffer += test; + + ArrayView bytes(reinterpret_cast(&buffer[0]), + buffer.size()); + + ByteBufferReader read_buf(bytes); + size_t consumed = 0; + for (const auto& test : tests) { + absl::string_view sv; + EXPECT_TRUE(read_buf.ReadStringView(&sv, test.length())); + EXPECT_EQ(sv.compare(test), 0); + // The returned string view should point directly into the original + // string. + EXPECT_EQ(&sv[0], &buffer[0 + consumed]); + consumed += sv.size(); + } +} + TEST(ByteBufferTest, TestReadWriteUVarint) { ByteBufferWriter write_buffer; size_t size = 0; @@ -217,34 +318,63 @@ TEST(ByteBufferTest, TestReadWriteUVarint) { size += 6; EXPECT_EQ(size, write_buffer.Length()); - ByteBufferReader read_buffer(write_buffer.Data(), write_buffer.Length()); + ByteBufferReader read_buffer(write_buffer); EXPECT_EQ(size, read_buffer.Length()); uint64_t val1, val2, val3, val4, val5; ASSERT_TRUE(read_buffer.ReadUVarint(&val1)); - EXPECT_EQ(1u, val1); + EXPECT_EQ(val1, 1U); --size; EXPECT_EQ(size, read_buffer.Length()); ASSERT_TRUE(read_buffer.ReadUVarint(&val2)); - EXPECT_EQ(2u, val2); + EXPECT_EQ(val2, 2U); --size; EXPECT_EQ(size, read_buffer.Length()); ASSERT_TRUE(read_buffer.ReadUVarint(&val3)); - EXPECT_EQ(27u, val3); + EXPECT_EQ(val3, 27U); --size; EXPECT_EQ(size, read_buffer.Length()); ASSERT_TRUE(read_buffer.ReadUVarint(&val4)); - EXPECT_EQ(149u, val4); + EXPECT_EQ(val4, 149U); size -= 2; EXPECT_EQ(size, read_buffer.Length()); ASSERT_TRUE(read_buffer.ReadUVarint(&val5)); - EXPECT_EQ(68719476736u, val5); + EXPECT_EQ(val5, 68719476736U); size -= 6; EXPECT_EQ(size, read_buffer.Length()); } -} // namespace rtc +TEST(ByteBufferTest, ReadFromArrayView) { + const uint8_t buf[] = {'a', 'b', 'c'}; + ArrayView view(buf, 3); + + ByteBufferReader read_buffer(view); + uint8_t val; + EXPECT_TRUE(read_buffer.ReadUInt8(&val)); + EXPECT_EQ(val, 'a'); + EXPECT_TRUE(read_buffer.ReadUInt8(&val)); + EXPECT_EQ(val, 'b'); + EXPECT_TRUE(read_buffer.ReadUInt8(&val)); + EXPECT_EQ(val, 'c'); + EXPECT_FALSE(read_buffer.ReadUInt8(&val)); +} + +TEST(ByteBufferTest, ReadToArrayView) { + const uint8_t buf[] = {'a', 'b', 'c'}; + ArrayView stored_view(buf, 3); + ByteBufferReader read_buffer(stored_view); + uint8_t result[] = {'1', '2', '3'}; + EXPECT_TRUE(read_buffer.ReadBytes(MakeArrayView(result, 2))); + EXPECT_EQ(result[0], 'a'); + EXPECT_EQ(result[1], 'b'); + EXPECT_EQ(result[2], '3'); + EXPECT_TRUE(read_buffer.ReadBytes(MakeArrayView(&result[2], 1))); + EXPECT_EQ(result[2], 'c'); + EXPECT_FALSE(read_buffer.ReadBytes(MakeArrayView(result, 1))); +} + +} // namespace webrtc diff --git a/rtc_base/byte_order.h b/rtc_base/byte_order.h index b8f8ae9f7a..95c8ebc864 100644 --- a/rtc_base/byte_order.h +++ b/rtc_base/byte_order.h @@ -44,8 +44,8 @@ #include #include #else -#include -#endif // defined(WEBRTC_WIN) +#include // no-presubmit-check +#endif // defined(WEBRTC_WIN) #if defined(WEBRTC_ARCH_LITTLE_ENDIAN) #define htobe16(v) htons(v) @@ -96,7 +96,7 @@ #error "Missing byte order functions for this arch." #endif // defined(WEBRTC_MAC) -namespace rtc { +namespace webrtc { // Reading and writing of little and big-endian numbers from memory @@ -207,6 +207,34 @@ inline uint64_t NetworkToHost64(uint64_t n) { return be64toh(n); } +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::Get8; +using ::webrtc::GetBE16; +using ::webrtc::GetBE32; +using ::webrtc::GetBE64; +using ::webrtc::GetLE16; +using ::webrtc::GetLE32; +using ::webrtc::GetLE64; +using ::webrtc::HostToNetwork16; +using ::webrtc::HostToNetwork32; +using ::webrtc::HostToNetwork64; +using ::webrtc::IsHostBigEndian; +using ::webrtc::NetworkToHost16; +using ::webrtc::NetworkToHost32; +using ::webrtc::NetworkToHost64; +using ::webrtc::Set8; +using ::webrtc::SetBE16; +using ::webrtc::SetBE32; +using ::webrtc::SetBE64; +using ::webrtc::SetLE16; +using ::webrtc::SetLE32; +using ::webrtc::SetLE64; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_BYTE_ORDER_H_ diff --git a/rtc_base/byte_order_unittest.cc b/rtc_base/byte_order_unittest.cc index 0db230f3ec..da31631452 100644 --- a/rtc_base/byte_order_unittest.cc +++ b/rtc_base/byte_order_unittest.cc @@ -14,7 +14,7 @@ #include "test/gtest.h" -namespace rtc { +namespace webrtc { // Test memory set functions put values into memory in expected order. TEST(ByteOrderTest, TestSet) { @@ -80,4 +80,4 @@ TEST(ByteOrderTest, TestGet) { EXPECT_EQ(UINT64_C(0xefcdab8967452301), GetLE64(buf)); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/callback_list.cc b/rtc_base/callback_list.cc index c452c79b38..e938f96eee 100644 --- a/rtc_base/callback_list.cc +++ b/rtc_base/callback_list.cc @@ -67,8 +67,7 @@ void CallbackListReceivers::RemoveReceivers(const void* removal_tag) { } } -void CallbackListReceivers::Foreach( - rtc::FunctionView fv) { +void CallbackListReceivers::Foreach(FunctionView fv) { RTC_CHECK(!send_in_progress_); bool removals_detected = false; send_in_progress_ = true; diff --git a/rtc_base/callback_list.h b/rtc_base/callback_list.h index a9d71a6562..73a6974a84 100644 --- a/rtc_base/callback_list.h +++ b/rtc_base/callback_list.h @@ -49,7 +49,7 @@ class RTC_EXPORT CallbackListReceivers { void RemoveReceivers(const void* removal_tag); - void Foreach(rtc::FunctionView fv); + void Foreach(FunctionView fv); private: // Special protected pointer value that's used as a removal_tag for diff --git a/rtc_base/checks.cc b/rtc_base/checks.cc index e732a2659d..6fa0514c36 100644 --- a/rtc_base/checks.cc +++ b/rtc_base/checks.cc @@ -59,7 +59,7 @@ void AppendFormat(std::string* s, const char* fmt, ...) { } } // namespace -namespace rtc { +namespace webrtc { namespace webrtc_checks_impl { #if !defined(WEBRTC_CHROMIUM_BUILD) @@ -78,8 +78,8 @@ RTC_NORETURN void WriteFatalLog(absl::string_view output) { abort(); } -RTC_NORETURN void WriteFatalLog(const char* file, - int line, +RTC_NORETURN void WriteFatalLog(const char* /* file */, + int /* line */, absl::string_view output) { WriteFatalLog(output); } @@ -224,17 +224,17 @@ RTC_NORETURN void UnreachableCodeReached() { #endif // !RTC_DCHECK_IS_ON } // namespace webrtc_checks_impl -} // namespace rtc +} // namespace webrtc // Function to call from the C version of the RTC_CHECK and RTC_DCHECK macros. RTC_NORETURN void rtc_FatalMessage(const char* file, int line, const char* msg) { #if RTC_CHECK_MSG_ENABLED - static constexpr rtc::webrtc_checks_impl::CheckArgType t[] = { - rtc::webrtc_checks_impl::CheckArgType::kEnd}; - rtc::webrtc_checks_impl::FatalLog(file, line, msg, t); + static constexpr webrtc::webrtc_checks_impl::CheckArgType t[] = { + webrtc::webrtc_checks_impl::CheckArgType::kEnd}; + webrtc::webrtc_checks_impl::FatalLog(file, line, msg, t); #else - rtc::webrtc_checks_impl::FatalLog(file, line); + webrtc::webrtc_checks_impl::FatalLog(file, line); #endif } diff --git a/rtc_base/checks.h b/rtc_base/checks.h index 99fee97d0a..91414807f6 100644 --- a/rtc_base/checks.h +++ b/rtc_base/checks.h @@ -11,7 +11,7 @@ #ifndef RTC_BASE_CHECKS_H_ #define RTC_BASE_CHECKS_H_ -// If you for some reson need to know if DCHECKs are on, test the value of +// If you for some reason need to know if DCHECKs are on, test the value of // RTC_DCHECK_IS_ON. (Test its value, not if it's defined; it'll always be // defined, to either a true or a false value.) #if !defined(NDEBUG) || defined(DCHECK_ALWAYS_ON) @@ -52,14 +52,18 @@ RTC_NORETURN void rtc_FatalMessage(const char* file, int line, const char* msg); #ifdef __cplusplus // C++ version. +#include #include +#include -#include "absl/meta/type_traits.h" +#include "absl/strings/has_absl_stringify.h" +#include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" #include "api/scoped_refptr.h" #include "rtc_base/numerics/safe_compare.h" #include "rtc_base/system/inline.h" #include "rtc_base/system/rtc_export.h" +#include "rtc_base/type_traits.h" // The macros here print a message to stderr and abort under various // conditions. All will accept additional stream messages. For example: @@ -98,7 +102,10 @@ RTC_NORETURN void rtc_FatalMessage(const char* file, int line, const char* msg); // // - RTC_FATAL() aborts unconditionally. -namespace rtc { +// TODO(bugs.webrtc.org/42232595): Remove this macro once Chrome has migrated. +#define RTC_CHECKS_IN_WEBRTC_NAMESPACE 1 + +namespace webrtc { namespace webrtc_checks_impl { enum class CheckArgType : int8_t { kEnd = 0, @@ -202,22 +209,24 @@ inline Val MakeVal(const void* x) { template inline Val MakeVal( - const rtc::scoped_refptr& p) { + const webrtc::scoped_refptr& p) { return {p.get()}; } // The enum class types are not implicitly convertible to arithmetic types. template ::value && - !std::is_arithmetic::value>* = nullptr> -inline decltype(MakeVal(std::declval>())) MakeVal( + std::enable_if_t::value && + !absl::HasAbslStringify::value && + !std::is_arithmetic::value>* = nullptr> +inline decltype(MakeVal(std::declval>())) MakeVal( T x) { - return {static_cast>(x)}; + return {static_cast>(x)}; } -template ()))* = nullptr> +template ::value>* = nullptr> ToStringVal MakeVal(const T& x) { - return {ToLogString(x)}; + return {absl::StrCat(x)}; } // Ephemeral type that represents the result of the logging << operator. @@ -230,16 +239,16 @@ class LogStreamer<> final { public: template ())), - absl::enable_if_t::value || - std::is_enum::value>* = nullptr> + std::enable_if_t::value || + std::is_enum::value>* = nullptr> RTC_FORCE_INLINE LogStreamer operator<<(U arg) const { return LogStreamer(MakeVal(arg), this); } template ())), - absl::enable_if_t::value && - !std::is_enum::value>* = nullptr> + std::enable_if_t::value && + !std::is_enum::value>* = nullptr> RTC_FORCE_INLINE LogStreamer operator<<(const U& arg) const { return LogStreamer(MakeVal(arg), this); } @@ -282,16 +291,16 @@ class LogStreamer final { template ())), - absl::enable_if_t::value || - std::is_enum::value>* = nullptr> + std::enable_if_t::value || + std::is_enum::value>* = nullptr> RTC_FORCE_INLINE LogStreamer operator<<(U arg) const { return LogStreamer(MakeVal(arg), this); } template ())), - absl::enable_if_t::value && - !std::is_enum::value>* = nullptr> + std::enable_if_t::value && + !std::is_enum::value>* = nullptr> RTC_FORCE_INLINE LogStreamer operator<<(const U& arg) const { return LogStreamer(MakeVal(arg), this); } @@ -375,17 +384,17 @@ RTC_NORETURN RTC_EXPORT void UnreachableCodeReached(); // in a particularly convoluted way with an extra ?: because that appears to be // the simplest construct that keeps Visual Studio from complaining about // condition being unused). -#define RTC_EAT_STREAM_PARAMETERS(ignored) \ - (true ? true : ((void)(ignored), true)) \ - ? static_cast(0) \ - : ::rtc::webrtc_checks_impl::FatalLogCall("", 0, "") & \ - ::rtc::webrtc_checks_impl::LogStreamer<>() +#define RTC_EAT_STREAM_PARAMETERS(ignored) \ + (true ? true : ((void)(ignored), true)) \ + ? static_cast(0) \ + : ::webrtc::webrtc_checks_impl::FatalLogCall("", 0, "") & \ + ::webrtc::webrtc_checks_impl::LogStreamer<>() // Call RTC_EAT_STREAM_PARAMETERS with an argument that fails to compile if // values of the same types as `a` and `b` can't be compared with the given // operation, and that would evaluate `a` and `b` if evaluated. #define RTC_EAT_STREAM_PARAMETERS_OP(op, a, b) \ - RTC_EAT_STREAM_PARAMETERS(((void)::rtc::Safe##op(a, b))) + RTC_EAT_STREAM_PARAMETERS(((void)::webrtc::Safe##op(a, b))) // RTC_CHECK dies with a fatal error if condition is not true. It is *not* // controlled by NDEBUG or anything else, so the check will be executed @@ -397,34 +406,34 @@ RTC_NORETURN RTC_EXPORT void UnreachableCodeReached(); // RTC_CHECK_OP is a helper macro for binary operators. // Don't use this macro directly in your code, use RTC_CHECK_EQ et al below. #if RTC_CHECK_MSG_ENABLED -#define RTC_CHECK(condition) \ - (condition) ? static_cast(0) \ - : ::rtc::webrtc_checks_impl::FatalLogCall( \ - __FILE__, __LINE__, #condition) & \ - ::rtc::webrtc_checks_impl::LogStreamer<>() +#define RTC_CHECK(condition) \ + (condition) ? static_cast(0) \ + : ::webrtc::webrtc_checks_impl::FatalLogCall( \ + __FILE__, __LINE__, #condition) & \ + ::webrtc::webrtc_checks_impl::LogStreamer<>() #define RTC_CHECK_OP(name, op, val1, val2) \ - ::rtc::Safe##name((val1), (val2)) \ + ::webrtc::Safe##name((val1), (val2)) \ ? static_cast(0) \ - : ::rtc::webrtc_checks_impl::FatalLogCall( \ + : ::webrtc::webrtc_checks_impl::FatalLogCall( \ __FILE__, __LINE__, #val1 " " #op " " #val2) & \ - ::rtc::webrtc_checks_impl::LogStreamer<>() << (val1) << (val2) + ::webrtc::webrtc_checks_impl::LogStreamer<>() << (val1) << (val2) #else -#define RTC_CHECK(condition) \ - (condition) ? static_cast(0) \ - : true ? ::rtc::webrtc_checks_impl::FatalLogCall(__FILE__, __LINE__, \ - "") & \ - ::rtc::webrtc_checks_impl::LogStreamer<>() \ - : ::rtc::webrtc_checks_impl::FatalLogCall("", 0, "") & \ - ::rtc::webrtc_checks_impl::LogStreamer<>() - -#define RTC_CHECK_OP(name, op, val1, val2) \ - ::rtc::Safe##name((val1), (val2)) ? static_cast(0) \ - : true ? ::rtc::webrtc_checks_impl::FatalLogCall(__FILE__, __LINE__, \ - "") & \ - ::rtc::webrtc_checks_impl::LogStreamer<>() \ - : ::rtc::webrtc_checks_impl::FatalLogCall("", 0, "") & \ - ::rtc::webrtc_checks_impl::LogStreamer<>() +#define RTC_CHECK(condition) \ + (condition) ? static_cast(0) \ + : true ? ::webrtc::webrtc_checks_impl::FatalLogCall(__FILE__, \ + __LINE__, "") & \ + ::webrtc::webrtc_checks_impl::LogStreamer<>() \ + : ::webrtc::webrtc_checks_impl::FatalLogCall("", 0, "") & \ + ::webrtc::webrtc_checks_impl::LogStreamer<>() + +#define RTC_CHECK_OP(name, op, val1, val2) \ + ::webrtc::Safe##name((val1), (val2)) ? static_cast(0) \ + : true ? ::webrtc::webrtc_checks_impl::FatalLogCall(__FILE__, \ + __LINE__, "") & \ + ::webrtc::webrtc_checks_impl::LogStreamer<>() \ + : ::webrtc::webrtc_checks_impl::FatalLogCall("", 0, "") & \ + ::webrtc::webrtc_checks_impl::LogStreamer<>() #endif #define RTC_CHECK_EQ(val1, val2) RTC_CHECK_OP(Eq, ==, val1, val2) @@ -460,16 +469,16 @@ RTC_NORETURN RTC_EXPORT void UnreachableCodeReached(); // Kills the process with an error message. Never returns. Use when you wish to // assert that a point in the code is never reached. -#define RTC_CHECK_NOTREACHED() \ - do { \ - ::rtc::webrtc_checks_impl::UnreachableCodeReached( \ - RTC_UNREACHABLE_FILE_AND_LINE_CALL_ARGS); \ +#define RTC_CHECK_NOTREACHED() \ + do { \ + ::webrtc::webrtc_checks_impl::UnreachableCodeReached( \ + RTC_UNREACHABLE_FILE_AND_LINE_CALL_ARGS); \ } while (0) -#define RTC_FATAL() \ - ::rtc::webrtc_checks_impl::FatalLogCall(__FILE__, __LINE__, \ - "FATAL()") & \ - ::rtc::webrtc_checks_impl::LogStreamer<>() +#define RTC_FATAL() \ + ::webrtc::webrtc_checks_impl::FatalLogCall(__FILE__, __LINE__, \ + "FATAL()") & \ + ::webrtc::webrtc_checks_impl::LogStreamer<>() // Performs the integer division a/b and returns the result. CHECKs that the // remainder is zero. @@ -479,7 +488,15 @@ inline T CheckedDivExact(T a, T b) { return a / b; } +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::CheckedDivExact; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #else // __cplusplus not defined // C version. Lacks many features compared to the C++ version, but usage diff --git a/rtc_base/checks_unittest.cc b/rtc_base/checks_unittest.cc index 95deba9f1c..4a3dfa422b 100644 --- a/rtc_base/checks_unittest.cc +++ b/rtc_base/checks_unittest.cc @@ -10,8 +10,14 @@ #include "rtc_base/checks.h" +#include "test/gmock.h" #include "test/gtest.h" +namespace { + +using ::testing::HasSubstr; +using ::testing::Not; + TEST(ChecksTest, ExpressionNotEvaluatedWhenCheckPassing) { int i = 0; RTC_CHECK(true) << "i=" << ++i; @@ -19,6 +25,14 @@ TEST(ChecksTest, ExpressionNotEvaluatedWhenCheckPassing) { } #if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) + +struct StructWithStringfy { + template + friend void AbslStringify(Sink& sink, const StructWithStringfy& /*self*/) { + sink.Append("absl-stringify"); + } +}; + TEST(ChecksDeathTest, Checks) { #if RTC_CHECK_MSG_ENABLED EXPECT_DEATH(RTC_FATAL() << "message", @@ -44,6 +58,9 @@ TEST(ChecksDeathTest, Checks) { "# last system error: \\w+\n" "# Check failed: false\n" "# Hi there!"); + + StructWithStringfy t; + EXPECT_DEATH(RTC_CHECK(false) << t, HasSubstr("absl-stringify")); #else EXPECT_DEATH(RTC_FATAL() << "message", "\n\n#\n" @@ -68,6 +85,12 @@ TEST(ChecksDeathTest, Checks) { "# last system error: \\w+\n" "# Check failed.\n" "# "); + + // Should compile, but shouldn't try to stringify 't' + StructWithStringfy t; + EXPECT_DEATH(RTC_CHECK(false) << t, Not(HasSubstr("absl-stringify"))); #endif // RTC_CHECK_MSG_ENABLED } #endif // GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) + +} // namespace diff --git a/rtc_base/containers/BUILD.gn b/rtc_base/containers/BUILD.gn index 621b6122a3..a2a21157b7 100644 --- a/rtc_base/containers/BUILD.gn +++ b/rtc_base/containers/BUILD.gn @@ -19,8 +19,8 @@ rtc_library("flat_containers_internal") { deps = [ "..:checks", "../system:no_unique_address", + "//third_party/abseil-cpp/absl/algorithm:container", ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] visibility = [ ":*" ] } @@ -52,5 +52,4 @@ rtc_library("unittests") { "//testing/gmock:gmock", "//testing/gtest:gtest", ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } diff --git a/rtc_base/copy_on_write_buffer.cc b/rtc_base/copy_on_write_buffer.cc index d8ab53cb24..51bdb17f07 100644 --- a/rtc_base/copy_on_write_buffer.cc +++ b/rtc_base/copy_on_write_buffer.cc @@ -14,7 +14,7 @@ #include "absl/strings/string_view.h" -namespace rtc { +namespace webrtc { CopyOnWriteBuffer::CopyOnWriteBuffer() : offset_(0), size_(0) { RTC_DCHECK(IsConsistent()); @@ -124,4 +124,4 @@ void CopyOnWriteBuffer::UnshareAndEnsureCapacity(size_t new_capacity) { RTC_DCHECK(IsConsistent()); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/copy_on_write_buffer.h b/rtc_base/copy_on_write_buffer.h index 8332ee6f62..6dfa289147 100644 --- a/rtc_base/copy_on_write_buffer.h +++ b/rtc_base/copy_on_write_buffer.h @@ -27,7 +27,7 @@ #include "rtc_base/system/rtc_export.h" #include "rtc_base/type_traits.h" -namespace rtc { +namespace webrtc { class RTC_EXPORT CopyOnWriteBuffer { public: @@ -132,6 +132,8 @@ class RTC_EXPORT CopyOnWriteBuffer { return buffer_->data() + offset_; } + bool empty() const { return size_ == 0; } + size_t size() const { RTC_DCHECK(IsConsistent()); return size_; @@ -142,6 +144,9 @@ class RTC_EXPORT CopyOnWriteBuffer { return buffer_ ? buffer_->capacity() - offset_ : 0; } + const uint8_t* begin() const { return data(); } + const uint8_t* end() const { return data() + size_; } + CopyOnWriteBuffer& operator=(const CopyOnWriteBuffer& buf) { RTC_DCHECK(IsConsistent()); RTC_DCHECK(buf.IsConsistent()); @@ -302,7 +307,7 @@ class RTC_EXPORT CopyOnWriteBuffer { } } - // buffer_ is either null, or points to an rtc::Buffer with capacity > 0. + // buffer_ is either null, or points to an webrtc::Buffer with capacity > 0. scoped_refptr buffer_; // This buffer may represent a slice of a original data. size_t offset_; // Offset of a current slice in the original data in buffer_. @@ -311,6 +316,14 @@ class RTC_EXPORT CopyOnWriteBuffer { // Should be 0 if the buffer_ is empty. }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::CopyOnWriteBuffer; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_COPY_ON_WRITE_BUFFER_H_ diff --git a/rtc_base/copy_on_write_buffer_unittest.cc b/rtc_base/copy_on_write_buffer_unittest.cc index 8a9fc4e2f4..25d6d9bcba 100644 --- a/rtc_base/copy_on_write_buffer_unittest.cc +++ b/rtc_base/copy_on_write_buffer_unittest.cc @@ -14,7 +14,7 @@ #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { @@ -46,11 +46,20 @@ void EnsureBuffersDontShareData(const CopyOnWriteBuffer& buf1, TEST(CopyOnWriteBufferTest, TestCreateEmptyData) { CopyOnWriteBuffer buf(static_cast(nullptr), 0); + EXPECT_TRUE(buf.empty()); EXPECT_EQ(buf.size(), 0u); EXPECT_EQ(buf.capacity(), 0u); EXPECT_EQ(buf.data(), nullptr); } +TEST(CopyOnWriteBufferTest, CreateEmptyDataWithCapacity) { + CopyOnWriteBuffer buf(0, 16); + EXPECT_TRUE(buf.empty()); + EXPECT_EQ(buf.size(), 0u); + EXPECT_EQ(buf.capacity(), 16u); + EXPECT_NE(buf.MutableData(), nullptr); +} + TEST(CopyOnWriteBufferTest, TestMoveConstruct) { EXPECT_TRUE(std::is_nothrow_move_constructible_v); @@ -60,9 +69,11 @@ TEST(CopyOnWriteBufferTest, TestMoveConstruct) { const uint8_t* buf1_data = buf1.cdata(); CopyOnWriteBuffer buf2(std::move(buf1)); + EXPECT_TRUE(buf1.empty()); EXPECT_EQ(buf1.size(), 0u); EXPECT_EQ(buf1.capacity(), 0u); EXPECT_EQ(buf1.data(), nullptr); + EXPECT_FALSE(buf2.empty()); EXPECT_EQ(buf2.size(), buf1_size); EXPECT_EQ(buf2.capacity(), buf1_capacity); EXPECT_EQ(buf2.data(), buf1_data); @@ -127,6 +138,7 @@ TEST(CopyOnWriteBufferTest, SetEmptyData) { buf.SetData(nullptr, 0); EXPECT_EQ(0u, buf.size()); + EXPECT_TRUE(buf.empty()); } TEST(CopyOnWriteBufferTest, SetDataNoMoreThanCapacityDoesntCauseReallocation) { @@ -358,8 +370,8 @@ TEST(CopyOnWriteBufferTest, SlicesAreIndependent) { TEST(CopyOnWriteBufferTest, AcceptsVectorLikeTypes) { std::vector a = {1, 2}; std::vector b = {3, 4}; - rtc::ArrayView c(a); - rtc::ArrayView d(b); + ArrayView c(a); + ArrayView d(b); CopyOnWriteBuffer a_buf(a); CopyOnWriteBuffer b_buf(b); @@ -375,4 +387,4 @@ TEST(CopyOnWriteBufferTest, AcceptsVectorLikeTypes) { EXPECT_EQ(all.size(), 8U); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/cpu_time.cc b/rtc_base/cpu_time.cc index d3fee50c49..b0d187c411 100644 --- a/rtc_base/cpu_time.cc +++ b/rtc_base/cpu_time.cc @@ -10,6 +10,8 @@ #include "rtc_base/cpu_time.h" +#include + #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" @@ -39,7 +41,7 @@ const int64_t kNanosecsPerFiletime = 100; } // namespace #endif -namespace rtc { +namespace webrtc { int64_t GetProcessCpuTimeNanos() { #if defined(WEBRTC_FUCHSIA) @@ -56,7 +58,7 @@ int64_t GetProcessCpuTimeNanos() { #elif defined(WEBRTC_LINUX) struct timespec ts; if (clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &ts) == 0) { - return ts.tv_sec * kNumNanosecsPerSec + ts.tv_nsec; + return ts.tv_sec * webrtc::kNumNanosecsPerSec + ts.tv_nsec; } else { RTC_LOG_ERR(LS_ERROR) << "clock_gettime() failed."; } @@ -104,7 +106,7 @@ int64_t GetThreadCpuTimeNanos() { #elif defined(WEBRTC_LINUX) struct timespec ts; if (clock_gettime(CLOCK_THREAD_CPUTIME_ID, &ts) == 0) { - return ts.tv_sec * kNumNanosecsPerSec + ts.tv_nsec; + return ts.tv_sec * webrtc::kNumNanosecsPerSec + ts.tv_nsec; } else { RTC_LOG_ERR(LS_ERROR) << "clock_gettime() failed."; } @@ -142,4 +144,4 @@ int64_t GetThreadCpuTimeNanos() { return -1; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/cpu_time.h b/rtc_base/cpu_time.h index f712f623a0..870a5a5ffa 100644 --- a/rtc_base/cpu_time.h +++ b/rtc_base/cpu_time.h @@ -13,7 +13,7 @@ #include -namespace rtc { +namespace webrtc { // Returns total CPU time of a current process in nanoseconds. // Time base is unknown, therefore use only to calculate deltas. @@ -23,6 +23,15 @@ int64_t GetProcessCpuTimeNanos(); // Time base is unknown, therefore use only to calculate deltas. int64_t GetThreadCpuTimeNanos(); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::GetProcessCpuTimeNanos; +using ::webrtc::GetThreadCpuTimeNanos; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_CPU_TIME_H_ diff --git a/rtc_base/cpu_time_unittest.cc b/rtc_base/cpu_time_unittest.cc index 94f82f4306..fbdd5faa39 100644 --- a/rtc_base/cpu_time_unittest.cc +++ b/rtc_base/cpu_time_unittest.cc @@ -10,6 +10,8 @@ #include "rtc_base/cpu_time.h" +#include + #include "rtc_base/platform_thread.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/sleep.h" @@ -33,15 +35,15 @@ const int kWorkingThreads = 2; void WorkingFunction(int64_t* counter) { *counter = 0; int64_t stop_cpu_time = - rtc::GetThreadCpuTimeNanos() + - kProcessingTimeMillisecs * rtc::kNumNanosecsPerMillisec; - while (rtc::GetThreadCpuTimeNanos() < stop_cpu_time) { + webrtc::GetThreadCpuTimeNanos() + + kProcessingTimeMillisecs * webrtc::kNumNanosecsPerMillisec; + while (webrtc::GetThreadCpuTimeNanos() < stop_cpu_time) { (*counter)++; } } } // namespace -namespace rtc { +namespace webrtc { // A minimal test which can be run on instrumented builds, so that they're at // least exercising the code to check for memory leaks/etc. @@ -78,13 +80,13 @@ TEST(CpuTimeTest, MAYBE_TEST(TwoThreads)) { // Therefore GetThreadCpuTime is not a wall clock. EXPECT_LE(thread_duration_nanos, (kProcessingTimeMillisecs - kAllowedErrorMillisecs) * - kNumNanosecsPerMillisec); + webrtc::kNumNanosecsPerMillisec); // Total process time is at least twice working threads' CPU time. // Therefore process and thread times are correctly related. EXPECT_GE(process_duration_nanos, kWorkingThreads * (kProcessingTimeMillisecs - kAllowedErrorMillisecs) * - kNumNanosecsPerMillisec); + webrtc::kNumNanosecsPerMillisec); } TEST(CpuTimeTest, MAYBE_TEST(Sleeping)) { @@ -96,7 +98,7 @@ TEST(CpuTimeTest, MAYBE_TEST(Sleeping)) { // Therefore GetProcessCpuTime is not a wall clock. EXPECT_LE(process_duration_nanos, (kProcessingTimeMillisecs - kAllowedErrorMillisecs) * - kNumNanosecsPerMillisec); + webrtc::kNumNanosecsPerMillisec); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/crc32.cc b/rtc_base/crc32.cc index 42f86cb268..700a0f4952 100644 --- a/rtc_base/crc32.cc +++ b/rtc_base/crc32.cc @@ -12,7 +12,7 @@ #include "rtc_base/arraysize.h" -namespace rtc { +namespace webrtc { // This implementation is based on the sample implementation in RFC 1952. @@ -47,4 +47,4 @@ uint32_t UpdateCrc32(uint32_t start, const void* buf, size_t len) { return c ^ 0xFFFFFFFF; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/crc32.h b/rtc_base/crc32.h index 93376a5a12..6d208b9664 100644 --- a/rtc_base/crc32.h +++ b/rtc_base/crc32.h @@ -18,7 +18,7 @@ #include "absl/strings/string_view.h" -namespace rtc { +namespace webrtc { // Updates a CRC32 checksum with `len` bytes from `buf`. `initial` holds the // checksum result from the previous update; for the first call, it should be 0. @@ -32,6 +32,15 @@ inline uint32_t ComputeCrc32(absl::string_view str) { return ComputeCrc32(str.data(), str.size()); } +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::ComputeCrc32; +using ::webrtc::UpdateCrc32; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_CRC32_H_ diff --git a/rtc_base/crc32_unittest.cc b/rtc_base/crc32_unittest.cc index 60997b27ae..36314f78c9 100644 --- a/rtc_base/crc32_unittest.cc +++ b/rtc_base/crc32_unittest.cc @@ -14,7 +14,7 @@ #include "test/gtest.h" -namespace rtc { +namespace webrtc { TEST(Crc32Test, TestBasic) { EXPECT_EQ(0U, ComputeCrc32("")); @@ -34,4 +34,4 @@ TEST(Crc32Test, TestMultipleUpdates) { EXPECT_EQ(0x171A3F5FU, c); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/crypt_string.cc b/rtc_base/crypt_string.cc deleted file mode 100644 index cf38a5bbaa..0000000000 --- a/rtc_base/crypt_string.cc +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2015 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/crypt_string.h" - -namespace rtc { - -size_t EmptyCryptStringImpl::GetLength() const { - return 0; -} - -void EmptyCryptStringImpl::CopyTo(char* dest, bool nullterminate) const { - if (nullterminate) { - *dest = '\0'; - } -} - -std::string EmptyCryptStringImpl::UrlEncode() const { - return ""; -} - -CryptStringImpl* EmptyCryptStringImpl::Copy() const { - return new EmptyCryptStringImpl(); -} - -void EmptyCryptStringImpl::CopyRawTo(std::vector* dest) const { - dest->clear(); -} - -CryptString::CryptString() : impl_(new EmptyCryptStringImpl()) {} - -CryptString::CryptString(const CryptString& other) - : impl_(other.impl_->Copy()) {} - -CryptString::CryptString(const CryptStringImpl& impl) : impl_(impl.Copy()) {} - -CryptString::~CryptString() = default; - -} // namespace rtc diff --git a/rtc_base/crypt_string.h b/rtc_base/crypt_string.h deleted file mode 100644 index 470bd00c42..0000000000 --- a/rtc_base/crypt_string.h +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2004 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_CRYPT_STRING_H_ -#define RTC_BASE_CRYPT_STRING_H_ - -#include - -#include -#include -#include - -namespace rtc { - -class CryptStringImpl { - public: - virtual ~CryptStringImpl() {} - virtual size_t GetLength() const = 0; - virtual void CopyTo(char* dest, bool nullterminate) const = 0; - virtual std::string UrlEncode() const = 0; - virtual CryptStringImpl* Copy() const = 0; - virtual void CopyRawTo(std::vector* dest) const = 0; -}; - -class EmptyCryptStringImpl : public CryptStringImpl { - public: - ~EmptyCryptStringImpl() override {} - size_t GetLength() const override; - void CopyTo(char* dest, bool nullterminate) const override; - std::string UrlEncode() const override; - CryptStringImpl* Copy() const override; - void CopyRawTo(std::vector* dest) const override; -}; - -class CryptString { - public: - CryptString(); - size_t GetLength() const { return impl_->GetLength(); } - void CopyTo(char* dest, bool nullterminate) const { - impl_->CopyTo(dest, nullterminate); - } - CryptString(const CryptString& other); - explicit CryptString(const CryptStringImpl& impl); - ~CryptString(); - CryptString& operator=(const CryptString& other) { - if (this != &other) { - impl_.reset(other.impl_->Copy()); - } - return *this; - } - void Clear() { impl_.reset(new EmptyCryptStringImpl()); } - std::string UrlEncode() const { return impl_->UrlEncode(); } - void CopyRawTo(std::vector* dest) const { - return impl_->CopyRawTo(dest); - } - - private: - std::unique_ptr impl_; -}; - -} // namespace rtc - -#endif // RTC_BASE_CRYPT_STRING_H_ diff --git a/rtc_base/helpers.cc b/rtc_base/crypto_random.cc similarity index 91% rename from rtc_base/helpers.cc rename to rtc_base/crypto_random.cc index 84cbe5fba1..c14315cfe9 100644 --- a/rtc_base/helpers.cc +++ b/rtc_base/crypto_random.cc @@ -8,13 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" #include #include #include #include +#include #include "absl/strings/string_view.h" #include "rtc_base/checks.h" @@ -24,7 +25,7 @@ // Protect against max macro inclusion. #undef max -namespace rtc { +namespace webrtc { namespace { @@ -33,7 +34,7 @@ class SecureRandomGenerator : public RandomGenerator { public: SecureRandomGenerator() {} ~SecureRandomGenerator() override {} - bool Init(const void* seed, size_t len) override { return true; } + bool Init(const void* /* seed */, size_t /* len */) override { return true; } bool Generate(void* buf, size_t len) override { return (RAND_bytes(reinterpret_cast(buf), len) > 0); } @@ -44,7 +45,7 @@ class TestRandomGenerator : public RandomGenerator { public: TestRandomGenerator() : seed_(7) {} ~TestRandomGenerator() override {} - bool Init(const void* seed, size_t len) override { return true; } + bool Init(const void* /* seed */, size_t /* len */) override { return true; } bool Generate(void* buf, size_t len) override { for (size_t i = 0; i < len; ++i) { static_cast(buf)[i] = static_cast(GetRandom()); @@ -74,8 +75,8 @@ static const char kUuidDigit17[4] = {'8', '9', 'a', 'b'}; // Lock for the global random generator, only needed to serialize changing the // generator. -webrtc::Mutex& GetRandomGeneratorLock() { - static webrtc::Mutex& mutex = *new webrtc::Mutex(); +Mutex& GetRandomGeneratorLock() { + static Mutex& mutex = *new Mutex(); return mutex; } @@ -95,17 +96,17 @@ RandomGenerator& Rng() { } // namespace void SetDefaultRandomGenerator() { - webrtc::MutexLock lock(&GetRandomGeneratorLock()); + MutexLock lock(&GetRandomGeneratorLock()); GetGlobalRng().reset(new SecureRandomGenerator()); } void SetRandomGenerator(std::unique_ptr generator) { - webrtc::MutexLock lock(&GetRandomGeneratorLock()); + MutexLock lock(&GetRandomGeneratorLock()); GetGlobalRng() = std::move(generator); } void SetRandomTestMode(bool test) { - webrtc::MutexLock lock(&GetRandomGeneratorLock()); + MutexLock lock(&GetRandomGeneratorLock()); if (!test) { GetGlobalRng().reset(new SecureRandomGenerator()); } else { @@ -224,8 +225,4 @@ double CreateRandomDouble() { std::numeric_limits::epsilon()); } -double GetNextMovingAverage(double prev_average, double cur, double ratio) { - return (ratio * prev_average + cur) / (ratio + 1); -} - -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/helpers.h b/rtc_base/crypto_random.h similarity index 78% rename from rtc_base/helpers.h rename to rtc_base/crypto_random.h index 51ca672ab5..aaa180356f 100644 --- a/rtc_base/helpers.h +++ b/rtc_base/crypto_random.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef RTC_BASE_HELPERS_H_ -#define RTC_BASE_HELPERS_H_ +#ifndef RTC_BASE_CRYPTO_RANDOM_H_ +#define RTC_BASE_CRYPTO_RANDOM_H_ #include #include @@ -20,7 +20,7 @@ #include "absl/strings/string_view.h" #include "rtc_base/system/rtc_export.h" -namespace rtc { +namespace webrtc { // Interface for RNG implementations. class RandomGenerator { @@ -83,10 +83,25 @@ uint32_t CreateRandomNonZeroId(); // Generates a random double between 0.0 (inclusive) and 1.0 (exclusive). double CreateRandomDouble(); -// Compute moving average with the given ratio between the previous average -// value and the current value. -double GetNextMovingAverage(double prev_average, double cur, double ratio); +} // namespace webrtc +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::CreateRandomData; +using ::webrtc::CreateRandomDouble; +using ::webrtc::CreateRandomId; +using ::webrtc::CreateRandomId64; +using ::webrtc::CreateRandomNonZeroId; +using ::webrtc::CreateRandomString; +using ::webrtc::CreateRandomUuid; +using ::webrtc::InitRandom; +using ::webrtc::RandomGenerator; +using ::webrtc::SetDefaultRandomGenerator; +using ::webrtc::SetRandomGenerator; +using ::webrtc::SetRandomTestMode; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES -#endif // RTC_BASE_HELPERS_H_ +#endif // RTC_BASE_CRYPTO_RANDOM_H_ diff --git a/rtc_base/helpers_unittest.cc b/rtc_base/crypto_random_unittest.cc similarity index 98% rename from rtc_base/helpers_unittest.cc rename to rtc_base/crypto_random_unittest.cc index 015b4d0a7c..f3e80ce1ba 100644 --- a/rtc_base/helpers_unittest.cc +++ b/rtc_base/crypto_random_unittest.cc @@ -8,18 +8,19 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" #include #include #include +#include #include "rtc_base/buffer.h" #include "test/gmock.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { using ::testing::_; @@ -168,4 +169,4 @@ TEST(RandomTest, TestSetRandomGenerator) { } } // namespace -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/data_rate_limiter.cc b/rtc_base/data_rate_limiter.cc index 7288257d76..387f925eb6 100644 --- a/rtc_base/data_rate_limiter.cc +++ b/rtc_base/data_rate_limiter.cc @@ -10,7 +10,9 @@ #include "rtc_base/data_rate_limiter.h" -namespace rtc { +#include + +namespace webrtc { bool DataRateLimiter::CanUse(size_t desired, double time) { return ((time > period_end_ && desired <= max_per_period_) || @@ -26,4 +28,4 @@ void DataRateLimiter::Use(size_t used, double time) { used_in_period_ += used; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/data_rate_limiter.h b/rtc_base/data_rate_limiter.h index 502a2f74b2..088cb63755 100644 --- a/rtc_base/data_rate_limiter.h +++ b/rtc_base/data_rate_limiter.h @@ -15,7 +15,7 @@ #include "rtc_base/system/rtc_export.h" -namespace rtc { +namespace webrtc { // Limits the rate of use to a certain maximum quantity per period of // time. Use, for example, for simple bandwidth throttling. @@ -53,6 +53,14 @@ class RTC_EXPORT DataRateLimiter { double period_start_; double period_end_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::DataRateLimiter; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_DATA_RATE_LIMITER_H_ diff --git a/rtc_base/data_rate_limiter_unittest.cc b/rtc_base/data_rate_limiter_unittest.cc index efd472889c..3b5a48f930 100644 --- a/rtc_base/data_rate_limiter_unittest.cc +++ b/rtc_base/data_rate_limiter_unittest.cc @@ -12,7 +12,7 @@ #include "test/gtest.h" -namespace rtc { +namespace webrtc { TEST(RateLimiterTest, TestCanUse) { // Diet: Can eat 2,000 calories per day. @@ -57,4 +57,4 @@ TEST(RateLimiterTest, TestCanUse) { EXPECT_FALSE(limiter.CanUse(1001, tuesday)); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/deprecated/recursive_critical_section.cc b/rtc_base/deprecated/recursive_critical_section.cc index 540819888e..d9c033c834 100644 --- a/rtc_base/deprecated/recursive_critical_section.cc +++ b/rtc_base/deprecated/recursive_critical_section.cc @@ -10,12 +10,10 @@ #include "rtc_base/deprecated/recursive_critical_section.h" -#include - #include "rtc_base/checks.h" #include "rtc_base/platform_thread_types.h" -#include "rtc_base/synchronization/yield.h" #include "rtc_base/system/unused.h" +#include "rtc_base/thread_annotations.h" #if RTC_DCHECK_IS_ON #define RTC_CS_DEBUG_CODE(x) x @@ -23,7 +21,7 @@ #define RTC_CS_DEBUG_CODE(x) #endif // !RTC_DCHECK_IS_ON -namespace rtc { +namespace webrtc { RecursiveCriticalSection::RecursiveCriticalSection() { #if defined(WEBRTC_WIN) @@ -215,4 +213,4 @@ CritScope::~CritScope() { cs_->Leave(); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/deprecated/recursive_critical_section.h b/rtc_base/deprecated/recursive_critical_section.h index da1e92b9b0..e347e483e1 100644 --- a/rtc_base/deprecated/recursive_critical_section.h +++ b/rtc_base/deprecated/recursive_critical_section.h @@ -11,11 +11,10 @@ #ifndef RTC_BASE_DEPRECATED_RECURSIVE_CRITICAL_SECTION_H_ #define RTC_BASE_DEPRECATED_RECURSIVE_CRITICAL_SECTION_H_ -#include - #include "rtc_base/platform_thread_types.h" #include "rtc_base/thread_annotations.h" +// IWYU pragma: begin_keep #if defined(WEBRTC_WIN) // clang-format off // clang formating would change include order. @@ -39,8 +38,9 @@ #if defined(WEBRTC_MAC) && !RTC_USE_NATIVE_MUTEX_ON_MAC #include #endif +// IWYU pragma: end_keep -namespace rtc { +namespace webrtc { // NOTE: This class is deprecated. Please use webrtc::Mutex instead! // Search using https://www.google.com/?q=recursive+lock+considered+harmful @@ -102,6 +102,15 @@ class RTC_SCOPED_LOCKABLE CritScope { const RecursiveCriticalSection* const cs_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::CritScope; +using ::webrtc::RecursiveCriticalSection; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_DEPRECATED_RECURSIVE_CRITICAL_SECTION_H_ diff --git a/rtc_base/deprecated/recursive_critical_section_unittest.cc b/rtc_base/deprecated/recursive_critical_section_unittest.cc index 38f003d555..4090eb16a7 100644 --- a/rtc_base/deprecated/recursive_critical_section_unittest.cc +++ b/rtc_base/deprecated/recursive_critical_section_unittest.cc @@ -13,25 +13,26 @@ #include #include +#include #include #include -#include #include #include -#include "absl/base/attributes.h" +#include "api/units/time_delta.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/platform_thread.h" #include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { -constexpr webrtc::TimeDelta kLongTime = webrtc::TimeDelta::Seconds(10); +constexpr TimeDelta kLongTime = TimeDelta::Seconds(10); constexpr int kNumThreads = 16; constexpr int kOperationsToRun = 1000; @@ -189,7 +190,7 @@ class PerfTestData { ~PerfTestData() {} void AddToCounter(int add) { - rtc::CritScope cs(&lock_); + CritScope cs(&lock_); my_counter_ += add; if (my_counter_ == expected_count_) event_->Set(); @@ -285,4 +286,4 @@ TEST(RecursiveCriticalSectionTest, DISABLED_Performance) { t.Stop(); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/dscp.h b/rtc_base/dscp.h index 3c39ca66da..db3eb74cb5 100644 --- a/rtc_base/dscp.h +++ b/rtc_base/dscp.h @@ -11,7 +11,7 @@ #ifndef RTC_BASE_DSCP_H_ #define RTC_BASE_DSCP_H_ -namespace rtc { +namespace webrtc { // Differentiated Services Code Point. // See http://tools.ietf.org/html/rfc2474 for details. enum DiffServCodePoint { @@ -40,6 +40,37 @@ enum DiffServCodePoint { DSCP_CS7 = 56, // Control messages }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::DiffServCodePoint; +using ::webrtc::DSCP_AF11; +using ::webrtc::DSCP_AF12; +using ::webrtc::DSCP_AF13; +using ::webrtc::DSCP_AF21; +using ::webrtc::DSCP_AF22; +using ::webrtc::DSCP_AF23; +using ::webrtc::DSCP_AF31; +using ::webrtc::DSCP_AF32; +using ::webrtc::DSCP_AF33; +using ::webrtc::DSCP_AF41; +using ::webrtc::DSCP_AF42; +using ::webrtc::DSCP_AF43; +using ::webrtc::DSCP_CS0; +using ::webrtc::DSCP_CS1; +using ::webrtc::DSCP_CS2; +using ::webrtc::DSCP_CS3; +using ::webrtc::DSCP_CS4; +using ::webrtc::DSCP_CS5; +using ::webrtc::DSCP_CS6; +using ::webrtc::DSCP_CS7; +using ::webrtc::DSCP_DEFAULT; +using ::webrtc::DSCP_EF; +using ::webrtc::DSCP_NO_CHANGE; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_DSCP_H_ diff --git a/rtc_base/event.cc b/rtc_base/event.cc index c2f6f8abab..b78e249cc0 100644 --- a/rtc_base/event.cc +++ b/rtc_base/event.cc @@ -21,13 +21,14 @@ #error "Must define either WEBRTC_WIN or WEBRTC_POSIX." #endif -#include "absl/types/optional.h" +#include + #include "rtc_base/checks.h" #include "rtc_base/synchronization/yield_policy.h" #include "rtc_base/system/warn_current_thread_is_deadlocked.h" #include "rtc_base/time_utils.h" -namespace rtc { +namespace webrtc { using ::webrtc::TimeDelta; @@ -124,7 +125,7 @@ timespec GetTimespec(TimeDelta duration_from_now) { timeval tv; gettimeofday(&tv, nullptr); ts.tv_sec = tv.tv_sec; - ts.tv_nsec = tv.tv_usec * kNumNanosecsPerMicrosec; + ts.tv_nsec = tv.tv_usec * webrtc::kNumNanosecsPerMicrosec; #endif // Add the specified number of milliseconds to it. @@ -148,27 +149,26 @@ bool Event::Wait(TimeDelta give_up_after, TimeDelta warn_after) { // Instant when we'll log a warning message (because we've been waiting so // long it might be a bug), but not yet give up waiting. nullopt if we // shouldn't log a warning. - const absl::optional warn_ts = - warn_after >= give_up_after - ? absl::nullopt - : absl::make_optional(GetTimespec(warn_after)); + const std::optional warn_ts = + warn_after >= give_up_after ? std::nullopt + : std::make_optional(GetTimespec(warn_after)); // Instant when we'll stop waiting and return an error. nullopt if we should // never give up. - const absl::optional give_up_ts = + const std::optional give_up_ts = give_up_after.IsPlusInfinity() - ? absl::nullopt - : absl::make_optional(GetTimespec(give_up_after)); + ? std::nullopt + : std::make_optional(GetTimespec(give_up_after)); ScopedYieldPolicy::YieldExecution(); pthread_mutex_lock(&event_mutex_); // Wait for `event_cond_` to trigger and `event_status_` to be set, with the // given timeout (or without a timeout if none is given). - const auto wait = [&](const absl::optional timeout_ts) { + const auto wait = [&](const std::optional timeout_ts) { int error = 0; while (!event_status_ && error == 0) { - if (timeout_ts == absl::nullopt) { + if (timeout_ts == std::nullopt) { error = pthread_cond_wait(&event_cond_, &event_mutex_); } else { #if USE_PTHREAD_COND_TIMEDWAIT_MONOTONIC_NP @@ -184,7 +184,7 @@ bool Event::Wait(TimeDelta give_up_after, TimeDelta warn_after) { }; int error; - if (warn_ts == absl::nullopt) { + if (warn_ts == std::nullopt) { error = wait(give_up_ts); } else { error = wait(warn_ts); @@ -207,4 +207,4 @@ bool Event::Wait(TimeDelta give_up_after, TimeDelta warn_after) { #endif -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/event.h b/rtc_base/event.h index 12f6a7dca2..0f5527e411 100644 --- a/rtc_base/event.h +++ b/rtc_base/event.h @@ -23,12 +23,12 @@ #include "rtc_base/synchronization/yield_policy.h" -namespace rtc { +namespace webrtc { // RTC_DISALLOW_WAIT() utility // -// Sets a stack-scoped flag that disallows use of `rtc::Event::Wait` by means -// of raising a DCHECK when a call to `rtc::Event::Wait()` is made.. +// Sets a stack-scoped flag that disallows use of `webrtc::Event::Wait` by means +// of raising a DCHECK when a call to `webrtc::Event::Wait()` is made.. // This is useful to guard synchronization-free scopes against regressions. // // Example of what this would catch (`ScopeToProtect` calls `Foo`): @@ -56,8 +56,8 @@ namespace rtc { class Event { public: // TODO(bugs.webrtc.org/14366): Consider removing this redundant alias. - static constexpr webrtc::TimeDelta kForever = - webrtc::TimeDelta::PlusInfinity(); + static constexpr TimeDelta kForever = TimeDelta::PlusInfinity(); + static constexpr TimeDelta kDefaultWarnDuration = TimeDelta::Seconds(3); Event(); Event(bool manual_reset, bool initially_signaled); @@ -78,12 +78,12 @@ class Event { // // Returns true if the event was signaled, false if there was a timeout or // some other error. - bool Wait(webrtc::TimeDelta give_up_after, webrtc::TimeDelta warn_after); + bool Wait(TimeDelta give_up_after, TimeDelta warn_after); // Waits with the given timeout and a reasonable default warning timeout. - bool Wait(webrtc::TimeDelta give_up_after) { + bool Wait(TimeDelta give_up_after) { return Wait(give_up_after, give_up_after.IsPlusInfinity() - ? webrtc::TimeDelta::Seconds(3) + ? kDefaultWarnDuration : kForever); } @@ -99,7 +99,7 @@ class Event { }; // These classes are provided for compatibility with Chromium. -// The rtc::Event implementation is overriden inside of Chromium for the +// The webrtc::Event implementation is overriden inside of Chromium for the // purposes of detecting when threads are blocked that shouldn't be as well as // to use the more accurate event implementation that's there than is provided // by default on some platforms (e.g. Windows). @@ -128,10 +128,20 @@ class ScopedDisallowWait { public: void YieldExecution() override { RTC_DCHECK_NOTREACHED(); } } handler_; - rtc::ScopedYieldPolicy policy{&handler_}; + webrtc::ScopedYieldPolicy policy{&handler_}; }; #endif +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::Event; +using ::webrtc::ScopedAllowBaseSyncPrimitives; +using ::webrtc::ScopedAllowBaseSyncPrimitivesForTesting; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_EVENT_H_ diff --git a/rtc_base/event_tracer.cc b/rtc_base/event_tracer.cc index 992a2b5e08..f42ee2cebf 100644 --- a/rtc_base/event_tracer.cc +++ b/rtc_base/event_tracer.cc @@ -7,11 +7,19 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ + #include "rtc_base/event_tracer.h" +#include + +#include "rtc_base/trace_event.h" + +#if defined(RTC_USE_PERFETTO) +#include "rtc_base/trace_categories.h" +#include "third_party/perfetto/include/perfetto/tracing/tracing.h" +#else #include #include -#include #include #include @@ -28,21 +36,27 @@ #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" -#include "rtc_base/trace_event.h" - -// This is a guesstimate that should be enough in most cases. -static const size_t kEventLoggerArgsStrBufferInitialSize = 256; -static const size_t kTraceArgBufferLength = 32; +#endif namespace webrtc { namespace { +#if !defined(RTC_USE_PERFETTO) GetCategoryEnabledPtr g_get_category_enabled_ptr = nullptr; AddTraceEventPtr g_add_trace_event_ptr = nullptr; +#endif } // namespace +#if defined(RTC_USE_PERFETTO) +void RegisterPerfettoTrackEvents() { + if (perfetto::Tracing::IsInitialized()) { + TrackEvent::Register(); + } +} +#else + void SetupEventTracer(GetCategoryEnabledPtr get_category_enabled_ptr, AddTraceEventPtr add_trace_event_ptr) { g_get_category_enabled_ptr = get_category_enabled_ptr; @@ -73,10 +87,26 @@ void EventTracer::AddTraceEvent(char phase, arg_names, arg_types, arg_values, flags); } } +#endif -} // namespace webrtc +#if defined(RTC_USE_PERFETTO) +// TODO(bugs.webrtc.org/15917): Implement for perfetto. +namespace tracing { +void SetupInternalTracer(bool enable_all_categories) {} +bool StartInternalCapture(absl::string_view filename) { + return false; +} +void StartInternalCaptureToFile(FILE* file) {} +void StopInternalCapture() {} +void ShutdownInternalTracer() {} + +} // namespace tracing +#else + +// This is a guesstimate that should be enough in most cases. +static const size_t kEventLoggerArgsStrBufferInitialSize = 256; +static const size_t kTraceArgBufferLength = 32; -namespace rtc { namespace tracing { namespace { @@ -96,8 +126,8 @@ class EventLogger final { const unsigned char* arg_types, const unsigned long long* arg_values, uint64_t timestamp, - int pid, - rtc::PlatformThreadId thread_id) { + int /* pid */, + PlatformThreadId thread_id) { std::vector args(num_args); for (int i = 0; i < num_args; ++i) { TraceArg& arg = args[i]; @@ -114,7 +144,7 @@ class EventLogger final { arg.value.as_string = str_copy; } } - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); trace_events_.push_back( {name, category_enabled, phase, args, timestamp, 1, thread_id}); } @@ -123,15 +153,14 @@ class EventLogger final { // https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview void Log() { RTC_DCHECK(output_file_); - static constexpr webrtc::TimeDelta kLoggingInterval = - webrtc::TimeDelta::Millis(100); + static constexpr TimeDelta kLoggingInterval = TimeDelta::Millis(100); fprintf(output_file_, "{ \"traceEvents\": [\n"); bool has_logged_event = false; while (true) { bool shutting_down = shutdown_event_.Wait(kLoggingInterval); std::vector events; { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); trace_events_.swap(events); } std::string args_str; @@ -191,7 +220,7 @@ class EventLogger final { output_file_ = file; output_file_owned_ = owned; { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); // Since the atomic fast-path for adding events to the queue can be // bypassed while the logging thread is shutting down there may be some // stale events in the queue, hence the vector needs to be cleared to not @@ -206,12 +235,14 @@ class EventLogger final { // Finally start, everything should be set up now. logging_thread_ = PlatformThread::SpawnJoinable([this] { Log(); }, "EventTracingThread"); - TRACE_EVENT_INSTANT0("webrtc", "EventLogger::Start"); + TRACE_EVENT_INSTANT0("webrtc", "EventLogger::Start", + TRACE_EVENT_SCOPE_GLOBAL); } void Stop() { RTC_DCHECK(thread_checker_.IsCurrent()); - TRACE_EVENT_INSTANT0("webrtc", "EventLogger::Stop"); + TRACE_EVENT_INSTANT0("webrtc", "EventLogger::Stop", + TRACE_EVENT_SCOPE_GLOBAL); // Try to stop. Abort if we're not currently logging. int one = 1; if (g_event_logging_active.compare_exchange_strong(one, 0)) @@ -251,7 +282,7 @@ class EventLogger final { std::vector args; uint64_t timestamp; int pid; - rtc::PlatformThreadId tid; + PlatformThreadId tid; }; static std::string TraceArgValueAsString(TraceArg arg) { @@ -314,11 +345,11 @@ class EventLogger final { return output; } - webrtc::Mutex mutex_; + Mutex mutex_; std::vector trace_events_ RTC_GUARDED_BY(mutex_); - rtc::PlatformThread logging_thread_; - rtc::Event shutdown_event_; - webrtc::SequenceChecker thread_checker_; + PlatformThread logging_thread_; + Event shutdown_event_; + SequenceChecker thread_checker_; FILE* output_file_ = nullptr; bool output_file_owned_ = false; }; @@ -344,19 +375,19 @@ const unsigned char* InternalEnableAllCategories(const char* name) { void InternalAddTraceEvent(char phase, const unsigned char* category_enabled, const char* name, - unsigned long long id, + unsigned long long /* id */, int num_args, const char** arg_names, const unsigned char* arg_types, const unsigned long long* arg_values, - unsigned char flags) { + unsigned char /* flags */) { // Fast path for when event tracing is inactive. if (g_event_logging_active.load() == 0) return; - g_event_logger.load()->AddTraceEvent( - name, category_enabled, phase, num_args, arg_names, arg_types, arg_values, - rtc::TimeMicros(), 1, rtc::CurrentThreadId()); + g_event_logger.load()->AddTraceEvent(name, category_enabled, phase, num_args, + arg_names, arg_types, arg_values, + TimeMicros(), 1, CurrentThreadId()); } } // namespace @@ -365,9 +396,9 @@ void SetupInternalTracer(bool enable_all_categories) { EventLogger* null_logger = nullptr; RTC_CHECK( g_event_logger.compare_exchange_strong(null_logger, new EventLogger())); - webrtc::SetupEventTracer(enable_all_categories ? InternalEnableAllCategories - : InternalGetCategoryEnabled, - InternalAddTraceEvent); + SetupEventTracer(enable_all_categories ? InternalEnableAllCategories + : InternalGetCategoryEnabled, + InternalAddTraceEvent); } void StartInternalCaptureToFile(FILE* file) { @@ -405,8 +436,11 @@ void ShutdownInternalTracer() { RTC_DCHECK(old_logger); RTC_CHECK(g_event_logger.compare_exchange_strong(old_logger, nullptr)); delete old_logger; - webrtc::SetupEventTracer(nullptr, nullptr); + SetupEventTracer(nullptr, nullptr); } } // namespace tracing -} // namespace rtc + +#endif // defined(RTC_USE_PERFETTO) + +} // namespace webrtc diff --git a/rtc_base/event_tracer.h b/rtc_base/event_tracer.h index dc2eaed669..941c44c0f7 100644 --- a/rtc_base/event_tracer.h +++ b/rtc_base/event_tracer.h @@ -8,6 +8,9 @@ * be found in the AUTHORS file in the root of the source tree. */ +#ifndef RTC_BASE_EVENT_TRACER_H_ +#define RTC_BASE_EVENT_TRACER_H_ + // This file defines the interface for event tracing in WebRTC. // // Event log handlers are set through SetupEventTracer(). User of this API will @@ -23,9 +26,6 @@ // // Parameters for the above two functions are described in trace_event.h. -#ifndef RTC_BASE_EVENT_TRACER_H_ -#define RTC_BASE_EVENT_TRACER_H_ - #include #include "absl/strings/string_view.h" @@ -33,6 +33,9 @@ namespace webrtc { +#if defined(RTC_USE_PERFETTO) +void RegisterPerfettoTrackEvents(); +#else typedef const unsigned char* (*GetCategoryEnabledPtr)(const char* name); typedef void (*AddTraceEventPtr)(char phase, const unsigned char* category_enabled, @@ -67,12 +70,11 @@ class EventTracer { const unsigned long long* arg_values, unsigned char flags); }; +#endif -} // namespace webrtc - -namespace rtc { namespace tracing { // Set up internal event tracer. +// TODO(webrtc:15917): Implement for perfetto. RTC_EXPORT void SetupInternalTracer(bool enable_all_categories = true); RTC_EXPORT bool StartInternalCapture(absl::string_view filename); RTC_EXPORT void StartInternalCaptureToFile(FILE* file); @@ -80,6 +82,21 @@ RTC_EXPORT void StopInternalCapture(); // Make sure we run this, this will tear down the internal tracing. RTC_EXPORT void ShutdownInternalTracer(); } // namespace tracing + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +namespace tracing { +using ::webrtc::tracing::SetupInternalTracer; +using ::webrtc::tracing::ShutdownInternalTracer; +using ::webrtc::tracing::StartInternalCapture; +using ::webrtc::tracing::StartInternalCaptureToFile; +using ::webrtc::tracing::StopInternalCapture; +} // namespace tracing } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_EVENT_TRACER_H_ diff --git a/rtc_base/event_tracer_unittest.cc b/rtc_base/event_tracer_unittest.cc index d0783c3f2d..eae19a2588 100644 --- a/rtc_base/event_tracer_unittest.cc +++ b/rtc_base/event_tracer_unittest.cc @@ -50,16 +50,16 @@ class TestStatistics { namespace webrtc { TEST(EventTracerTest, EventTracerDisabled) { - { TRACE_EVENT0("test", "EventTracerDisabled"); } + { TRACE_EVENT0("webrtc-test", "EventTracerDisabled"); } EXPECT_FALSE(TestStatistics::Get()->Count()); TestStatistics::Get()->Reset(); } -#if RTC_TRACE_EVENTS_ENABLED +#if RTC_TRACE_EVENTS_ENABLED && !defined(RTC_USE_PERFETTO) TEST(EventTracerTest, ScopedTraceEvent) { SetupEventTracer( [](const char* /*name*/) { - return reinterpret_cast("test"); + return reinterpret_cast("webrtc-test"); }, [](char /*phase*/, const unsigned char* /*category_enabled*/, const char* /*name*/, unsigned long long /*id*/, int /*num_args*/, diff --git a/rtc_base/event_unittest.cc b/rtc_base/event_unittest.cc index 17f50dc2d1..44a829f7bd 100644 --- a/rtc_base/event_unittest.cc +++ b/rtc_base/event_unittest.cc @@ -15,32 +15,32 @@ #include "system_wrappers/include/clock.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { TEST(EventTest, InitiallySignaled) { Event event(false, true); - ASSERT_TRUE(event.Wait(webrtc::TimeDelta::Zero())); + ASSERT_TRUE(event.Wait(TimeDelta::Zero())); } TEST(EventTest, ManualReset) { Event event(true, false); - ASSERT_FALSE(event.Wait(webrtc::TimeDelta::Zero())); + ASSERT_FALSE(event.Wait(TimeDelta::Zero())); event.Set(); - ASSERT_TRUE(event.Wait(webrtc::TimeDelta::Zero())); - ASSERT_TRUE(event.Wait(webrtc::TimeDelta::Zero())); + ASSERT_TRUE(event.Wait(TimeDelta::Zero())); + ASSERT_TRUE(event.Wait(TimeDelta::Zero())); event.Reset(); - ASSERT_FALSE(event.Wait(webrtc::TimeDelta::Zero())); + ASSERT_FALSE(event.Wait(TimeDelta::Zero())); } TEST(EventTest, AutoReset) { Event event; - ASSERT_FALSE(event.Wait(webrtc::TimeDelta::Zero())); + ASSERT_FALSE(event.Wait(TimeDelta::Zero())); event.Set(); - ASSERT_TRUE(event.Wait(webrtc::TimeDelta::Zero())); - ASSERT_FALSE(event.Wait(webrtc::TimeDelta::Zero())); + ASSERT_TRUE(event.Wait(TimeDelta::Zero())); + ASSERT_FALSE(event.Wait(TimeDelta::Zero())); } class SignalerThread { @@ -50,7 +50,7 @@ class SignalerThread { reader_ = reader; thread_ = PlatformThread::SpawnJoinable( [this] { - while (!stop_event_.Wait(webrtc::TimeDelta::Zero())) { + while (!stop_event_.Wait(TimeDelta::Zero())) { writer_->Set(); reader_->Wait(Event::kForever); } @@ -68,12 +68,11 @@ class SignalerThread { }; TEST(EventTest, UnsignaledWaitDoesNotReturnBeforeTimeout) { - constexpr webrtc::TimeDelta kDuration = webrtc::TimeDelta::Micros(10'499); + constexpr TimeDelta kDuration = TimeDelta::Micros(10'499); Event event; - auto begin = webrtc::Clock::GetRealTimeClock()->CurrentTime(); + auto begin = Clock::GetRealTimeClock()->CurrentTime(); EXPECT_FALSE(event.Wait(kDuration)); - EXPECT_GE(webrtc::Clock::GetRealTimeClock()->CurrentTime(), - begin + kDuration); + EXPECT_GE(Clock::GetRealTimeClock()->CurrentTime(), begin + kDuration); } // These tests are disabled by default and only intended to be run manually. @@ -82,7 +81,7 @@ TEST(EventTest, DISABLED_PerformanceSingleThread) { Event event; for (int i = 0; i < kNumIterations; ++i) { event.Set(); - event.Wait(webrtc::TimeDelta::Zero()); + event.Wait(TimeDelta::Zero()); } } @@ -103,7 +102,7 @@ TEST(EventTest, DISABLED_PerformanceMultiThread) { } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -// Tests that we crash if we attempt to call rtc::Event::Wait while we're +// Tests that we crash if we attempt to call webrtc::Event::Wait while we're // not allowed to (as per `RTC_DISALLOW_WAIT()`). TEST(EventTestDeathTest, DisallowEventWait) { Event event; @@ -112,4 +111,4 @@ TEST(EventTestDeathTest, DisallowEventWait) { } #endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/experiments/BUILD.gn b/rtc_base/experiments/BUILD.gn index ac542cc301..9f5573c0a1 100644 --- a/rtc_base/experiments/BUILD.gn +++ b/rtc_base/experiments/BUILD.gn @@ -16,11 +16,7 @@ rtc_library("alr_experiment") { deps = [ "..:logging", "../../api:field_trials_view", - "../../api/transport:field_trial_based_config", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -43,27 +39,9 @@ rtc_library("field_trial_parser") { "../../rtc_base:logging", "../../rtc_base:safe_conversions", "../../rtc_base:stringutils", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("quality_rampup_experiment") { - sources = [ - "quality_rampup_experiment.cc", - "quality_rampup_experiment.h", - ] - deps = [ - ":field_trial_parser", - "..:logging", - "../../api:field_trials_view", - "../../api/transport:field_trial_based_config", - "../../system_wrappers:field_trial", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("quality_scaler_settings") { @@ -75,25 +53,7 @@ rtc_library("quality_scaler_settings") { ":field_trial_parser", "..:logging", "../../api:field_trials_view", - "../../api/transport:field_trial_based_config", - "../../system_wrappers:field_trial", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] -} - -rtc_library("bandwidth_quality_scaler_settings") { - sources = [ - "bandwidth_quality_scaler_settings.cc", - "bandwidth_quality_scaler_settings.h", - ] - deps = [ - ":field_trial_parser", - "..:logging", - "../../api:field_trials_view", - "../../api/transport:field_trial_based_config", - "../../system_wrappers:field_trial", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("quality_scaling_experiment") { @@ -103,10 +63,12 @@ rtc_library("quality_scaling_experiment") { ] deps = [ "..:logging", + "../../api:field_trials_view", + "../../api/transport:field_trial_based_config", "../../api/video_codecs:video_codecs_api", - "../../system_wrappers:field_trial", + "../../rtc_base/experiments:field_trial_parser", + "//third_party/abseil-cpp/absl/strings", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("normalize_simulcast_size_experiment") { @@ -116,9 +78,8 @@ rtc_library("normalize_simulcast_size_experiment") { ] deps = [ "..:logging", - "../../system_wrappers:field_trial", + "../../api:field_trials_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("balanced_degradation_settings") { @@ -133,20 +94,6 @@ rtc_library("balanced_degradation_settings") { "../../api/video_codecs:video_codecs_api", "../../system_wrappers:field_trial", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] -} - -rtc_library("cpu_speed_experiment") { - sources = [ - "cpu_speed_experiment.cc", - "cpu_speed_experiment.h", - ] - deps = [ - ":field_trial_parser", - "..:logging", - "../../system_wrappers:field_trial", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("encoder_info_settings") { @@ -157,27 +104,12 @@ rtc_library("encoder_info_settings") { deps = [ ":field_trial_parser", "..:logging", + "../../api:field_trials_view", "../../api/video_codecs:video_codecs_api", - "../../system_wrappers:field_trial", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } -rtc_library("rtt_mult_experiment") { - sources = [ - "rtt_mult_experiment.cc", - "rtt_mult_experiment.h", - ] - deps = [ - "..:logging", - "../../system_wrappers:field_trial", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] -} - rtc_library("rate_control_settings") { sources = [ "rate_control_settings.cc", @@ -188,15 +120,10 @@ rtc_library("rate_control_settings") { "..:logging", "..:safe_conversions", "../../api:field_trials_view", - "../../api/transport:field_trial_based_config", "../../api/units:data_size", "../../api/video_codecs:video_codecs_api", - "../../system_wrappers:field_trial", "../../video/config:encoder_config", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -208,9 +135,7 @@ rtc_library("keyframe_interval_settings_experiment") { deps = [ ":field_trial_parser", "../../api:field_trials_view", - "../../api/transport:field_trial_based_config", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("stable_target_rate_experiment") { @@ -221,9 +146,7 @@ rtc_library("stable_target_rate_experiment") { deps = [ ":field_trial_parser", "../../api:field_trials_view", - "../../api/transport:field_trial_based_config", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("min_video_bitrate_experiment") { @@ -233,13 +156,12 @@ rtc_library("min_video_bitrate_experiment") { ] deps = [ ":field_trial_parser", + "../../api:field_trials_view", "../../api/units:data_rate", "../../api/video:video_frame", "../../rtc_base:checks", "../../rtc_base:logging", - "../../system_wrappers:field_trial", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (rtc_include_tests && !build_with_chromium) { @@ -248,8 +170,6 @@ if (rtc_include_tests && !build_with_chromium) { sources = [ "balanced_degradation_settings_unittest.cc", - "bandwidth_quality_scaler_settings_unittest.cc", - "cpu_speed_experiment_unittest.cc", "encoder_info_settings_unittest.cc", "field_trial_list_unittest.cc", "field_trial_parser_unittest.cc", @@ -257,28 +177,22 @@ if (rtc_include_tests && !build_with_chromium) { "keyframe_interval_settings_unittest.cc", "min_video_bitrate_experiment_unittest.cc", "normalize_simulcast_size_experiment_unittest.cc", - "quality_rampup_experiment_unittest.cc", "quality_scaler_settings_unittest.cc", "quality_scaling_experiment_unittest.cc", "rate_control_settings_unittest.cc", - "rtt_mult_experiment_unittest.cc", "stable_target_rate_experiment_unittest.cc", "struct_parameters_parser_unittest.cc", ] deps = [ ":balanced_degradation_settings", - ":bandwidth_quality_scaler_settings", - ":cpu_speed_experiment", ":encoder_info_settings", ":field_trial_parser", ":keyframe_interval_settings_experiment", ":min_video_bitrate_experiment", ":normalize_simulcast_size_experiment", - ":quality_rampup_experiment", ":quality_scaler_settings", ":quality_scaling_experiment", ":rate_control_settings", - ":rtt_mult_experiment", ":stable_target_rate_experiment", "..:gunit_helpers", "../:rtc_base_tests_utils", @@ -286,15 +200,13 @@ if (rtc_include_tests && !build_with_chromium) { "../../api/video:video_frame", "../../api/video_codecs:video_codecs_api", "../../system_wrappers:field_trial", + "../../test:explicit_key_value_config", "../../test:field_trial", "../../test:scoped_key_value_config", "../../test:test_main", "../../test:test_support", "../../video/config:encoder_config", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } } diff --git a/rtc_base/experiments/OWNERS b/rtc_base/experiments/OWNERS index 0a3b89533d..27d5a0f8f8 100644 --- a/rtc_base/experiments/OWNERS +++ b/rtc_base/experiments/OWNERS @@ -4,10 +4,8 @@ srte@webrtc.org per-file audio_allocation_settings*=srte@webrtc.org per-file congestion_controller_experiment*=srte@webrtc.org -per-file cpu_speed_experiment*=asapersson@webrtc.org per-file field_trial*=srte@webrtc.org per-file keyframe_interval_settings*=brandtr@webrtc.org per-file normalize_simulcast_size_experiment*=asapersson@webrtc.org per-file quality_scaling_experiment*=asapersson@webrtc.org -per-file rtt_mult_experiment*=mhoro@webrtc.org per-file rate_control_settings*=srte@webrtc.org diff --git a/rtc_base/experiments/alr_experiment.cc b/rtc_base/experiments/alr_experiment.cc index f5d36f6867..1c5ca14abb 100644 --- a/rtc_base/experiments/alr_experiment.cc +++ b/rtc_base/experiments/alr_experiment.cc @@ -16,21 +16,16 @@ #include #include "absl/strings/string_view.h" -#include "api/transport/field_trial_based_config.h" +#include "api/field_trials_view.h" #include "rtc_base/logging.h" namespace webrtc { +namespace { -const char AlrExperimentSettings::kScreenshareProbingBweExperimentName[] = - "WebRTC-ProbingScreenshareBwe"; -const char AlrExperimentSettings::kStrictPacingAndProbingExperimentName[] = - "WebRTC-StrictPacingAndProbing"; -const char kDefaultProbingScreenshareBweSettings[] = "1.0,2875,80,40,-60,3"; +constexpr absl::string_view kDefaultProbingScreenshareBweSettings = + "1.0,2875,80,40,-60,3"; -bool AlrExperimentSettings::MaxOneFieldTrialEnabled() { - return AlrExperimentSettings::MaxOneFieldTrialEnabled( - FieldTrialBasedConfig()); -} +} // namespace bool AlrExperimentSettings::MaxOneFieldTrialEnabled( const FieldTrialsView& key_value_config) { @@ -39,17 +34,11 @@ bool AlrExperimentSettings::MaxOneFieldTrialEnabled( key_value_config.Lookup(kScreenshareProbingBweExperimentName).empty(); } -absl::optional -AlrExperimentSettings::CreateFromFieldTrial(absl::string_view experiment_name) { - return AlrExperimentSettings::CreateFromFieldTrial(FieldTrialBasedConfig(), - experiment_name); -} - -absl::optional +std::optional AlrExperimentSettings::CreateFromFieldTrial( const FieldTrialsView& key_value_config, absl::string_view experiment_name) { - absl::optional ret; + std::optional ret; std::string group_name = key_value_config.Lookup(experiment_name); const std::string kIgnoredSuffix = "_Dogfood"; diff --git a/rtc_base/experiments/alr_experiment.h b/rtc_base/experiments/alr_experiment.h index 048fd90cab..bc4514e010 100644 --- a/rtc_base/experiments/alr_experiment.h +++ b/rtc_base/experiments/alr_experiment.h @@ -13,8 +13,9 @@ #include +#include + #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/field_trials_view.h" namespace webrtc { @@ -30,14 +31,14 @@ struct AlrExperimentSettings { // reserved value to indicate absence of experiment. int group_id; - static const char kScreenshareProbingBweExperimentName[]; - static const char kStrictPacingAndProbingExperimentName[]; - static absl::optional CreateFromFieldTrial( - absl::string_view experiment_name); - static absl::optional CreateFromFieldTrial( + static constexpr absl::string_view kScreenshareProbingBweExperimentName = + "WebRTC-ProbingScreenshareBwe"; + static constexpr absl::string_view kStrictPacingAndProbingExperimentName = + "WebRTC-StrictPacingAndProbing"; + + static std::optional CreateFromFieldTrial( const FieldTrialsView& key_value_config, absl::string_view experiment_name); - static bool MaxOneFieldTrialEnabled(); static bool MaxOneFieldTrialEnabled(const FieldTrialsView& key_value_config); private: diff --git a/rtc_base/experiments/balanced_degradation_settings.cc b/rtc_base/experiments/balanced_degradation_settings.cc index 1652e31704..8e0b513ddd 100644 --- a/rtc_base/experiments/balanced_degradation_settings.cc +++ b/rtc_base/experiments/balanced_degradation_settings.cc @@ -144,11 +144,11 @@ std::vector GetValidOrDefault( return DefaultConfigs(); } -absl::optional GetThresholds( +std::optional GetThresholds( VideoCodecType type, const BalancedDegradationSettings::Config& config) { - absl::optional low; - absl::optional high; + std::optional low; + std::optional high; switch (type) { case kVideoCodecVP8: @@ -159,6 +159,8 @@ absl::optional GetThresholds( low = config.vp9.GetQpLow(); high = config.vp9.GetQpHigh(); break; + case kVideoCodecH265: + // TODO(bugs.webrtc.org/13485): Use H264 QP thresholds for now. case kVideoCodecH264: low = config.h264.GetQpLow(); high = config.h264.GetQpHigh(); @@ -177,23 +179,25 @@ absl::optional GetThresholds( if (low && high) { RTC_LOG(LS_INFO) << "QP thresholds: low: " << *low << ", high: " << *high; - return absl::optional( + return std::optional( VideoEncoder::QpThresholds(*low, *high)); } - return absl::nullopt; + return std::nullopt; } int GetFps(VideoCodecType type, - const absl::optional& config) { + const std::optional& config) { if (!config.has_value()) { return std::numeric_limits::max(); } - absl::optional fps; + std::optional fps; switch (type) { case kVideoCodecVP8: fps = config->vp8.GetFps(); break; + case kVideoCodecH265: + // TODO(bugs.webrtc.org/13485): Use VP9 bitrate limits for now. case kVideoCodecVP9: fps = config->vp9.GetFps(); break; @@ -215,17 +219,19 @@ int GetFps(VideoCodecType type, return (framerate == kMaxFps) ? std::numeric_limits::max() : framerate; } -absl::optional GetKbps( +std::optional GetKbps( VideoCodecType type, - const absl::optional& config) { + const std::optional& config) { if (!config.has_value()) - return absl::nullopt; + return std::nullopt; - absl::optional kbps; + std::optional kbps; switch (type) { case kVideoCodecVP8: kbps = config->vp8.GetKbps(); break; + case kVideoCodecH265: + // TODO(bugs.webrtc.org/13485): Use VP9 bitrate limits for now. case kVideoCodecVP9: kbps = config->vp9.GetKbps(); break; @@ -245,20 +251,22 @@ absl::optional GetKbps( if (kbps.has_value()) return kbps; - return config->kbps > 0 ? absl::optional(config->kbps) : absl::nullopt; + return config->kbps > 0 ? std::optional(config->kbps) : std::nullopt; } -absl::optional GetKbpsRes( +std::optional GetKbpsRes( VideoCodecType type, - const absl::optional& config) { + const std::optional& config) { if (!config.has_value()) - return absl::nullopt; + return std::nullopt; - absl::optional kbps_res; + std::optional kbps_res; switch (type) { case kVideoCodecVP8: kbps_res = config->vp8.GetKbpsRes(); break; + case kVideoCodecH265: + // TODO(bugs.webrtc.org/13485): Use VP9 bitrate limits for now. case kVideoCodecVP9: kbps_res = config->vp9.GetKbpsRes(); break; @@ -278,34 +286,34 @@ absl::optional GetKbpsRes( if (kbps_res.has_value()) return kbps_res; - return config->kbps_res > 0 ? absl::optional(config->kbps_res) - : absl::nullopt; + return config->kbps_res > 0 ? std::optional(config->kbps_res) + : std::nullopt; } } // namespace -absl::optional BalancedDegradationSettings::CodecTypeSpecific::GetQpLow() +std::optional BalancedDegradationSettings::CodecTypeSpecific::GetQpLow() const { - return (qp_low > 0) ? absl::optional(qp_low) : absl::nullopt; + return (qp_low > 0) ? std::optional(qp_low) : std::nullopt; } -absl::optional BalancedDegradationSettings::CodecTypeSpecific::GetQpHigh() +std::optional BalancedDegradationSettings::CodecTypeSpecific::GetQpHigh() const { - return (qp_high > 0) ? absl::optional(qp_high) : absl::nullopt; + return (qp_high > 0) ? std::optional(qp_high) : std::nullopt; } -absl::optional BalancedDegradationSettings::CodecTypeSpecific::GetFps() +std::optional BalancedDegradationSettings::CodecTypeSpecific::GetFps() const { - return (fps > 0) ? absl::optional(fps) : absl::nullopt; + return (fps > 0) ? std::optional(fps) : std::nullopt; } -absl::optional BalancedDegradationSettings::CodecTypeSpecific::GetKbps() +std::optional BalancedDegradationSettings::CodecTypeSpecific::GetKbps() const { - return (kbps > 0) ? absl::optional(kbps) : absl::nullopt; + return (kbps > 0) ? std::optional(kbps) : std::nullopt; } -absl::optional BalancedDegradationSettings::CodecTypeSpecific::GetKbpsRes() +std::optional BalancedDegradationSettings::CodecTypeSpecific::GetKbpsRes() const { - return (kbps_res > 0) ? absl::optional(kbps_res) : absl::nullopt; + return (kbps_res > 0) ? std::optional(kbps_res) : std::nullopt; } BalancedDegradationSettings::Config::Config() = default; @@ -407,32 +415,32 @@ int BalancedDegradationSettings::MinFps(VideoCodecType type, int pixels) const { return GetFps(type, GetMinFpsConfig(pixels)); } -absl::optional +std::optional BalancedDegradationSettings::GetMinFpsConfig(int pixels) const { for (const auto& config : configs_) { if (pixels <= config.pixels) return config; } - return absl::nullopt; + return std::nullopt; } int BalancedDegradationSettings::MaxFps(VideoCodecType type, int pixels) const { return GetFps(type, GetMaxFpsConfig(pixels)); } -absl::optional +std::optional BalancedDegradationSettings::GetMaxFpsConfig(int pixels) const { for (size_t i = 0; i < configs_.size() - 1; ++i) { if (pixels <= configs_[i].pixels) return configs_[i + 1]; } - return absl::nullopt; + return std::nullopt; } bool BalancedDegradationSettings::CanAdaptUp(VideoCodecType type, int pixels, uint32_t bitrate_bps) const { - absl::optional min_kbps = GetKbps(type, GetMaxFpsConfig(pixels)); + std::optional min_kbps = GetKbps(type, GetMaxFpsConfig(pixels)); if (!min_kbps.has_value() || bitrate_bps == 0) { return true; // No limit configured or bitrate provided. } @@ -443,25 +451,25 @@ bool BalancedDegradationSettings::CanAdaptUpResolution( VideoCodecType type, int pixels, uint32_t bitrate_bps) const { - absl::optional min_kbps = GetKbpsRes(type, GetMaxFpsConfig(pixels)); + std::optional min_kbps = GetKbpsRes(type, GetMaxFpsConfig(pixels)); if (!min_kbps.has_value() || bitrate_bps == 0) { return true; // No limit configured or bitrate provided. } return bitrate_bps >= static_cast(min_kbps.value() * 1000); } -absl::optional BalancedDegradationSettings::MinFpsDiff(int pixels) const { +std::optional BalancedDegradationSettings::MinFpsDiff(int pixels) const { for (const auto& config : configs_) { if (pixels <= config.pixels) { return (config.fps_diff > kNoFpsDiff) - ? absl::optional(config.fps_diff) - : absl::nullopt; + ? std::optional(config.fps_diff) + : std::nullopt; } } - return absl::nullopt; + return std::nullopt; } -absl::optional +std::optional BalancedDegradationSettings::GetQpThresholds(VideoCodecType type, int pixels) const { return GetThresholds(type, GetConfig(pixels)); diff --git a/rtc_base/experiments/balanced_degradation_settings.h b/rtc_base/experiments/balanced_degradation_settings.h index 2bca73dfb9..6993681f5a 100644 --- a/rtc_base/experiments/balanced_degradation_settings.h +++ b/rtc_base/experiments/balanced_degradation_settings.h @@ -11,9 +11,9 @@ #ifndef RTC_BASE_EXPERIMENTS_BALANCED_DEGRADATION_SETTINGS_H_ #define RTC_BASE_EXPERIMENTS_BALANCED_DEGRADATION_SETTINGS_H_ +#include #include -#include "absl/types/optional.h" #include "api/field_trials_view.h" #include "api/video_codecs/video_encoder.h" @@ -40,11 +40,11 @@ class BalancedDegradationSettings { kbps == o.kbps && kbps_res == o.kbps_res; } - absl::optional GetQpLow() const; - absl::optional GetQpHigh() const; - absl::optional GetFps() const; - absl::optional GetKbps() const; - absl::optional GetKbpsRes() const; + std::optional GetQpLow() const; + std::optional GetQpHigh() const; + std::optional GetFps() const; + std::optional GetKbps() const; + std::optional GetKbpsRes() const; // Optional settings. int qp_low = 0; @@ -123,16 +123,15 @@ class BalancedDegradationSettings { uint32_t bitrate_bps) const; // Gets the min framerate diff from `configs_` based on `pixels`. - absl::optional MinFpsDiff(int pixels) const; + std::optional MinFpsDiff(int pixels) const; // Gets QpThresholds for the codec `type` based on `pixels`. - absl::optional GetQpThresholds( - VideoCodecType type, - int pixels) const; + std::optional GetQpThresholds(VideoCodecType type, + int pixels) const; private: - absl::optional GetMinFpsConfig(int pixels) const; - absl::optional GetMaxFpsConfig(int pixels) const; + std::optional GetMinFpsConfig(int pixels) const; + std::optional GetMaxFpsConfig(int pixels) const; Config GetConfig(int pixels) const; std::vector configs_; diff --git a/rtc_base/experiments/balanced_degradation_settings_unittest.cc b/rtc_base/experiments/balanced_degradation_settings_unittest.cc index 996c06f8f2..a9ad43398a 100644 --- a/rtc_base/experiments/balanced_degradation_settings_unittest.cc +++ b/rtc_base/experiments/balanced_degradation_settings_unittest.cc @@ -70,7 +70,6 @@ TEST(BalancedDegradationSettings, GetsDefaultConfigIfNoList) { EXPECT_FALSE(settings.GetQpThresholds(kVideoCodecH264, 1)); EXPECT_FALSE(settings.GetQpThresholds(kVideoCodecAV1, 1)); EXPECT_FALSE(settings.GetQpThresholds(kVideoCodecGeneric, 1)); - EXPECT_FALSE(settings.GetQpThresholds(kVideoCodecMultiplex, 1)); } TEST(BalancedDegradationSettings, GetsConfig) { @@ -399,7 +398,6 @@ TEST(BalancedDegradationSettings, CanAdaptUpWithCodecType) { EXPECT_TRUE(s.CanAdaptUp(kVideoCodecAV1, 1000, 77000)); EXPECT_FALSE(s.CanAdaptUp(kVideoCodecGeneric, 1000, 24000)); EXPECT_TRUE(s.CanAdaptUp(kVideoCodecGeneric, 1000, 25000)); - EXPECT_TRUE(s.CanAdaptUp(kVideoCodecMultiplex, 1000, 1)); // Not configured. } TEST(BalancedDegradationSettings, CanAdaptUpResolution) { @@ -435,8 +433,6 @@ TEST(BalancedDegradationSettings, CanAdaptUpResolutionWithCodecType) { EXPECT_TRUE(s.CanAdaptUpResolution(kVideoCodecAV1, 1000, 77000)); EXPECT_FALSE(s.CanAdaptUpResolution(kVideoCodecGeneric, 1000, 24000)); EXPECT_TRUE(s.CanAdaptUpResolution(kVideoCodecGeneric, 1000, 25000)); - EXPECT_TRUE(s.CanAdaptUpResolution(kVideoCodecMultiplex, 1000, - 1)); // Not configured. } TEST(BalancedDegradationSettings, GetsFpsDiff) { diff --git a/rtc_base/experiments/bandwidth_quality_scaler_settings.cc b/rtc_base/experiments/bandwidth_quality_scaler_settings.cc deleted file mode 100644 index 0a9df493ed..0000000000 --- a/rtc_base/experiments/bandwidth_quality_scaler_settings.cc +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/experiments/bandwidth_quality_scaler_settings.h" - -#include "api/transport/field_trial_based_config.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -BandwidthQualityScalerSettings::BandwidthQualityScalerSettings( - const FieldTrialsView* const key_value_config) - : bitrate_state_update_interval_s_("bitrate_state_update_interval_s_") { - ParseFieldTrial( - {&bitrate_state_update_interval_s_}, - key_value_config->Lookup("WebRTC-Video-BandwidthQualityScalerSettings")); -} - -BandwidthQualityScalerSettings -BandwidthQualityScalerSettings::ParseFromFieldTrials() { - FieldTrialBasedConfig field_trial_config; - return BandwidthQualityScalerSettings(&field_trial_config); -} - -absl::optional -BandwidthQualityScalerSettings::BitrateStateUpdateInterval() const { - if (bitrate_state_update_interval_s_ && - bitrate_state_update_interval_s_.Value() <= 0) { - RTC_LOG(LS_WARNING) - << "Unsupported bitrate_state_update_interval_s_ value, ignored."; - return absl::nullopt; - } - return bitrate_state_update_interval_s_.GetOptional(); -} - -} // namespace webrtc diff --git a/rtc_base/experiments/bandwidth_quality_scaler_settings.h b/rtc_base/experiments/bandwidth_quality_scaler_settings.h deleted file mode 100644 index 21e115df01..0000000000 --- a/rtc_base/experiments/bandwidth_quality_scaler_settings.h +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_EXPERIMENTS_BANDWIDTH_QUALITY_SCALER_SETTINGS_H_ -#define RTC_BASE_EXPERIMENTS_BANDWIDTH_QUALITY_SCALER_SETTINGS_H_ - -#include "absl/types/optional.h" -#include "api/field_trials_view.h" -#include "rtc_base/experiments/field_trial_parser.h" - -namespace webrtc { - -class BandwidthQualityScalerSettings final { - public: - static BandwidthQualityScalerSettings ParseFromFieldTrials(); - - absl::optional BitrateStateUpdateInterval() const; - - private: - explicit BandwidthQualityScalerSettings( - const FieldTrialsView* const key_value_config); - - FieldTrialOptional bitrate_state_update_interval_s_; -}; - -} // namespace webrtc - -#endif // RTC_BASE_EXPERIMENTS_BANDWIDTH_QUALITY_SCALER_SETTINGS_H_ diff --git a/rtc_base/experiments/bandwidth_quality_scaler_settings_unittest.cc b/rtc_base/experiments/bandwidth_quality_scaler_settings_unittest.cc deleted file mode 100644 index fab22cede0..0000000000 --- a/rtc_base/experiments/bandwidth_quality_scaler_settings_unittest.cc +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/experiments/bandwidth_quality_scaler_settings.h" - -#include "test/field_trial.h" -#include "test/gtest.h" - -namespace webrtc { -namespace { - -TEST(BandwidthQualityScalerSettingsTest, ValuesNotSetByDefault) { - const auto settings = BandwidthQualityScalerSettings::ParseFromFieldTrials(); - EXPECT_FALSE(settings.BitrateStateUpdateInterval()); -} - -TEST(BandwidthQualityScalerSettingsTest, ParseBitrateStateUpdateInterval) { - test::ScopedFieldTrials field_trials( - "WebRTC-Video-BandwidthQualityScalerSettings/" - "bitrate_state_update_interval_s_:100/"); - EXPECT_EQ(100u, BandwidthQualityScalerSettings::ParseFromFieldTrials() - .BitrateStateUpdateInterval()); -} - -TEST(BandwidthQualityScalerSettingsTest, ParseAll) { - test::ScopedFieldTrials field_trials( - "WebRTC-Video-BandwidthQualityScalerSettings/" - "bitrate_state_update_interval_s_:100/"); - EXPECT_EQ(100u, BandwidthQualityScalerSettings::ParseFromFieldTrials() - .BitrateStateUpdateInterval()); -} - -TEST(BandwidthQualityScalerSettingsTest, DoesNotParseIncorrectValue) { - test::ScopedFieldTrials field_trials( - "WebRTC-Video-BandwidthQualityScalerSettings/" - "bitrate_state_update_interval_s_:??/"); - const auto settings = BandwidthQualityScalerSettings::ParseFromFieldTrials(); - EXPECT_FALSE(settings.BitrateStateUpdateInterval()); -} - -} // namespace -} // namespace webrtc diff --git a/rtc_base/experiments/cpu_speed_experiment.cc b/rtc_base/experiments/cpu_speed_experiment.cc deleted file mode 100644 index 7e61255260..0000000000 --- a/rtc_base/experiments/cpu_speed_experiment.cc +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/experiments/cpu_speed_experiment.h" - -#include - -#include "rtc_base/experiments/field_trial_list.h" -#include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" - -namespace webrtc { -namespace { -constexpr char kFieldTrial[] = "WebRTC-VP8-CpuSpeed-Arm"; -constexpr int kMinSetting = -16; -constexpr int kMaxSetting = -1; - -std::vector GetValidOrEmpty( - const std::vector& configs) { - if (configs.empty()) { - return {}; - } - - for (const auto& config : configs) { - if (config.cpu_speed < kMinSetting || config.cpu_speed > kMaxSetting) { - RTC_LOG(LS_WARNING) << "Unsupported cpu speed setting, value ignored."; - return {}; - } - } - - for (size_t i = 1; i < configs.size(); ++i) { - if (configs[i].pixels < configs[i - 1].pixels || - configs[i].cpu_speed > configs[i - 1].cpu_speed) { - RTC_LOG(LS_WARNING) << "Invalid parameter value provided."; - return {}; - } - } - - return configs; -} - -bool HasLeCores(const std::vector& configs) { - for (const auto& config : configs) { - if (config.cpu_speed_le_cores == 0) - return false; - } - return true; -} -} // namespace - -CpuSpeedExperiment::CpuSpeedExperiment() : cores_("cores") { - FieldTrialStructList configs( - {FieldTrialStructMember("pixels", [](Config* c) { return &c->pixels; }), - FieldTrialStructMember("cpu_speed", - [](Config* c) { return &c->cpu_speed; }), - FieldTrialStructMember( - "cpu_speed_le_cores", - [](Config* c) { return &c->cpu_speed_le_cores; })}, - {}); - ParseFieldTrial({&configs, &cores_}, field_trial::FindFullName(kFieldTrial)); - - configs_ = GetValidOrEmpty(configs.Get()); -} - -CpuSpeedExperiment::~CpuSpeedExperiment() {} - -absl::optional CpuSpeedExperiment::GetValue(int pixels, - int num_cores) const { - if (configs_.empty()) - return absl::nullopt; - - bool use_le = HasLeCores(configs_) && cores_ && num_cores <= cores_.Value(); - - for (const auto& config : configs_) { - if (pixels <= config.pixels) - return use_le ? absl::optional(config.cpu_speed_le_cores) - : absl::optional(config.cpu_speed); - } - return absl::optional(kMinSetting); -} - -} // namespace webrtc diff --git a/rtc_base/experiments/cpu_speed_experiment.h b/rtc_base/experiments/cpu_speed_experiment.h deleted file mode 100644 index 24ec0c0ec6..0000000000 --- a/rtc_base/experiments/cpu_speed_experiment.h +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_EXPERIMENTS_CPU_SPEED_EXPERIMENT_H_ -#define RTC_BASE_EXPERIMENTS_CPU_SPEED_EXPERIMENT_H_ - -#include - -#include "absl/types/optional.h" -#include "rtc_base/experiments/field_trial_parser.h" - -namespace webrtc { - -class CpuSpeedExperiment { - public: - CpuSpeedExperiment(); - ~CpuSpeedExperiment(); - - // Example: - // WebRTC-VP8-CpuSpeed-Arm/pixels:100|200|300,cpu_speed:-1|-2|-3/ - // pixels <= 100 -> cpu speed: -1 - // pixels <= 200 -> cpu speed: -2 - // pixels <= 300 -> cpu speed: -3 - - // WebRTC-VP8-CpuSpeed-Arm/pixels:100|200|300,cpu_speed:-1|-2|-3/, - // cpu_speed_le_cores:-4|-5|-6,cores:3/ - // If `num_cores` > 3 - // pixels <= 100 -> cpu speed: -1 - // pixels <= 200 -> cpu speed: -2 - // pixels <= 300 -> cpu speed: -3 - // else - // pixels <= 100 -> cpu speed: -4 - // pixels <= 200 -> cpu speed: -5 - // pixels <= 300 -> cpu speed: -6 - - struct Config { - int pixels = 0; // The video frame size. - int cpu_speed = 0; // The `cpu_speed` to be used if the frame size is less - // than or equal to `pixels`. - // Optional. - int cpu_speed_le_cores = 0; // Same as `cpu_speed` above but only used if - // `num_cores` <= `cores_`. - }; - - // Gets the cpu speed based on `pixels` and `num_cores`. - absl::optional GetValue(int pixels, int num_cores) const; - - private: - std::vector configs_; - - // Threshold for when to use `cpu_speed_le_cores`. - FieldTrialOptional cores_; -}; - -} // namespace webrtc - -#endif // RTC_BASE_EXPERIMENTS_CPU_SPEED_EXPERIMENT_H_ diff --git a/rtc_base/experiments/cpu_speed_experiment_unittest.cc b/rtc_base/experiments/cpu_speed_experiment_unittest.cc deleted file mode 100644 index 2105da3818..0000000000 --- a/rtc_base/experiments/cpu_speed_experiment_unittest.cc +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/experiments/cpu_speed_experiment.h" - -#include "rtc_base/gunit.h" -#include "test/field_trial.h" -#include "test/gmock.h" - -namespace webrtc { - -TEST(CpuSpeedExperimentTest, NoValueIfNotEnabled) { - CpuSpeedExperiment cpu_speed_config; - EXPECT_FALSE(cpu_speed_config.GetValue(1, /*num_cores=*/1)); -} - -TEST(CpuSpeedExperimentTest, GetValue) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/pixels:1000,cpu_speed:-12,cores:4/"); - - CpuSpeedExperiment cpu_speed_config; - EXPECT_EQ(-12, cpu_speed_config.GetValue(1, /*num_cores=*/1)); - EXPECT_EQ(-12, cpu_speed_config.GetValue(1000, /*num_cores=*/1)); - EXPECT_EQ(-16, cpu_speed_config.GetValue(1001, /*num_cores=*/1)); -} - -TEST(CpuSpeedExperimentTest, GetValueWithList) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/pixels:1000|2000|3000,cpu_speed:-1|-10|-16/"); - - CpuSpeedExperiment cpu_speed_config; - EXPECT_EQ(-1, cpu_speed_config.GetValue(1, /*num_cores=*/1)); - EXPECT_EQ(-1, cpu_speed_config.GetValue(1000, /*num_cores=*/1)); - EXPECT_EQ(-10, cpu_speed_config.GetValue(1001, /*num_cores=*/1)); - EXPECT_EQ(-10, cpu_speed_config.GetValue(2000, /*num_cores=*/1)); - EXPECT_EQ(-16, cpu_speed_config.GetValue(2001, /*num_cores=*/1)); - EXPECT_EQ(-16, cpu_speed_config.GetValue(3000, /*num_cores=*/1)); - EXPECT_EQ(-16, cpu_speed_config.GetValue(3001, /*num_cores=*/1)); -} - -TEST(CpuSpeedExperimentTest, GetValueWithCores) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/" - "pixels:1000|2000|3000,cpu_speed:-1|-10|-16," - "cpu_speed_le_cores:-5|-11|-16,cores:2/"); - - CpuSpeedExperiment cpu_speed_config; - EXPECT_EQ(-5, cpu_speed_config.GetValue(1000, /*num_cores=*/1)); - EXPECT_EQ(-11, cpu_speed_config.GetValue(2000, /*num_cores=*/2)); - EXPECT_EQ(-1, cpu_speed_config.GetValue(1000, /*num_cores=*/3)); - EXPECT_EQ(-10, cpu_speed_config.GetValue(2000, /*num_cores=*/4)); -} - -TEST(CpuSpeedExperimentTest, GetValueWithCoresUnconfigured) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/" - "pixels:1000|2000|3000,cpu_speed:-1|-10|-16," - "cpu_speed_le_cores:-5|-11|-16/"); - - CpuSpeedExperiment cpu_speed_config; - EXPECT_EQ(-1, cpu_speed_config.GetValue(1000, /*num_cores=*/1)); - EXPECT_EQ(-10, cpu_speed_config.GetValue(2000, /*num_cores=*/2)); -} - -TEST(CpuSpeedExperimentTest, GetValueFailsForTooSmallValue) { - // Supported range: [-16, -1]. - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/pixels:1000|2000|3000,cpu_speed:-1|-10|-17/"); - - CpuSpeedExperiment cpu_speed_config; - EXPECT_FALSE(cpu_speed_config.GetValue(1, /*num_cores=*/1)); -} - -TEST(CpuSpeedExperimentTest, GetValueFailsForTooLargeValue) { - // Supported range: [-16, -1]. - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/pixels:1000|2000|3000,cpu_speed:0|-10|-16/"); - - CpuSpeedExperiment cpu_speed_config; - EXPECT_FALSE(cpu_speed_config.GetValue(1, /*num_cores=*/1)); -} - -TEST(CpuSpeedExperimentTest, GetValueFailsIfPixelsDecreases) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/pixels:1000|999|3000,cpu_speed:-5|-10|-16/"); - - CpuSpeedExperiment cpu_speed_config; - EXPECT_FALSE(cpu_speed_config.GetValue(1, /*num_cores=*/1)); -} - -TEST(CpuSpeedExperimentTest, GetValueFailsIfCpuSpeedIncreases) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-VP8-CpuSpeed-Arm/pixels:1000|2000|3000,cpu_speed:-5|-4|-16/"); - - CpuSpeedExperiment cpu_speed_config; - EXPECT_FALSE(cpu_speed_config.GetValue(1, /*num_cores=*/1)); -} - -} // namespace webrtc diff --git a/rtc_base/experiments/encoder_info_settings.cc b/rtc_base/experiments/encoder_info_settings.cc index 00974838ae..c5f544ee07 100644 --- a/rtc_base/experiments/encoder_info_settings.cc +++ b/rtc_base/experiments/encoder_info_settings.cc @@ -13,9 +13,9 @@ #include #include "absl/strings/string_view.h" +#include "api/field_trials_view.h" #include "rtc_base/experiments/field_trial_list.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { @@ -38,24 +38,45 @@ constexpr float kDefaultMinBitratebps = 30000; std::vector EncoderInfoSettings::GetDefaultSinglecastBitrateLimits( VideoCodecType codec_type) { - // Specific limits for VP9. Determining specific limits for AV1 via - // field trial experiment is a work in progress. Other codecs use VP8 limits. - if (codec_type == kVideoCodecVP9) { + if (codec_type == kVideoCodecAV1) { + // AV1 singlecast max bitrate limits are higher than AV1 SVC max limits. + // This is because in singlecast we normally have just one receiver, BWE is + // known end-to-end and the encode target bitrate guarantees delivery of + // video. + // The min bitrate limits are not used in singlecast (used in SVC/simulcast + // to de-/activate spatial layers) and are set to zero. Send resolution in + // singlecast is assumed to be regulated by QP-based quality scaler. + return { + {320 * 180, 0, 0, 256000}, {480 * 270, 176000, 0, 384000}, + {640 * 360, 256000, 0, 512000}, {960 * 540, 384000, 0, 1024000}, + {1280 * 720, 576000, 0, 1536000}, {1920 * 1080, 1000000, 0, 3700000}}; + } + + if (codec_type == kVideoCodecVP9 || codec_type == kVideoCodecH265) { + // VP9 singlecast bitrate limits are derived ~directly from VP9 SVC bitrate + // limits. The current max limits are unnecessarily too strict for + // singlecast, where BWE is known end-to-end, especially for low + // resolutions. + // TODO(crbugs.com/39206082): Consider fine-tuning H.265 to have its own + // bitrate settings separate from VP9. return {{320 * 180, 0, 30000, 150000}, {480 * 270, 120000, 30000, 300000}, {640 * 360, 190000, 30000, 420000}, {960 * 540, 350000, 30000, 1000000}, - {1280 * 720, 480000, 30000, 1500000}}; + {1280 * 720, 480000, 30000, 1500000}, + {1920 * 1080, 1000000, 30000, 3700000}}; } + // VP8 and other codecs. return {{320 * 180, 0, 30000, 300000}, {480 * 270, 200000, 30000, 500000}, {640 * 360, 300000, 30000, 800000}, {960 * 540, 500000, 30000, 1500000}, - {1280 * 720, 900000, 30000, 2500000}}; + {1280 * 720, 900000, 30000, 2500000}, + {1920 * 1080, 2000000, 30000, 5000000}}; } -absl::optional +std::optional EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( VideoCodecType codec_type, int frame_size_pixels) { @@ -67,29 +88,39 @@ EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( // Return the suitable bitrate limits for specified resolution when qp is // untrusted, they are experimental values. -// TODO(bugs.webrtc.org/12942): Maybe we need to add other codecs(VP8/VP9) -// experimental values. std::vector -EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted() { - // Specific limits for H264/AVC - return {{0 * 0, 0, 0, 0}, - {320 * 180, 0, 30000, 300000}, - {480 * 270, 300000, 30000, 500000}, - {640 * 360, 500000, 30000, 800000}, - {960 * 540, 800000, 30000, 1500000}, - {1280 * 720, 1500000, 30000, 2500000}, - {1920 * 1080, 2500000, 30000, 4000000}}; +EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted( + VideoCodecType codec_type) { + if (codec_type == kVideoCodecH265) { + // Similar settings from the simulcast bitate limits for H.265. + return {{0 * 0, 0, 0, 0}, + {320 * 180, 0, 30000, 150000}, + {480 * 270, 150000, 30000, 300000}, + {640 * 360, 300000, 30000, 420000}, + {960 * 540, 420000, 30000, 1000000}, + {1280 * 720, 1000000, 30000, 1500000}, + {1920 * 1080, 1500000, 30000, 3300000}}; + } else { + // Settings for H.264. Other codecs will not work in QP-untrusted mode. + return {{0 * 0, 0, 0, 0}, + {320 * 180, 0, 30000, 300000}, + {480 * 270, 300000, 30000, 500000}, + {640 * 360, 500000, 30000, 800000}, + {960 * 540, 800000, 30000, 1500000}, + {1280 * 720, 1500000, 30000, 2500000}, + {1920 * 1080, 2500000, 30000, 4000000}}; + } } // Through linear interpolation, return the bitrate limit corresponding to the // specified |frame_size_pixels|. -absl::optional +std::optional EncoderInfoSettings::GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted( - absl::optional frame_size_pixels, + std::optional frame_size_pixels, const std::vector& resolution_bitrate_limits) { if (!frame_size_pixels.has_value() || frame_size_pixels.value() <= 0) { - return absl::nullopt; + return std::nullopt; } std::vector bitrate_limits = @@ -103,7 +134,7 @@ EncoderInfoSettings::GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted( }); if (bitrate_limits.empty()) { - return absl::nullopt; + return std::nullopt; } int interpolation_index = -1; @@ -126,6 +157,12 @@ EncoderInfoSettings::GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted( return bitrate_limits[interpolation_index]; } + // 0 means our resolution is smaller than the smallest resolution in the list, + // we will select smallest data as the return result. + if (interpolation_index == 0) { + return *bitrate_limits.begin(); + } + // No matching resolution, do a linear interpolate. int lower_pixel_count = bitrate_limits[interpolation_index - 1].frame_size_pixels; @@ -153,11 +190,12 @@ EncoderInfoSettings::GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted( << " min_start_bitrate_bps = " << min_start_bitrate_bps << " min_bitrate_bps = " << kDefaultMinBitratebps << " max_bitrate_bps = " << max_bitrate_bps; - return absl::nullopt; + return std::nullopt; } } -EncoderInfoSettings::EncoderInfoSettings(absl::string_view name) +EncoderInfoSettings::EncoderInfoSettings(const FieldTrialsView& field_trials, + absl::string_view name) : requested_resolution_alignment_("requested_resolution_alignment"), apply_alignment_to_all_simulcast_layers_( "apply_alignment_to_all_simulcast_layers") { @@ -176,25 +214,25 @@ EncoderInfoSettings::EncoderInfoSettings(absl::string_view name) [](BitrateLimit* b) { return &b->max_bitrate_bps; })}, {}); - std::string name_str(name); - if (field_trial::FindFullName(name_str).empty()) { + std::string experiment_string = field_trials.Lookup(name); + if (experiment_string.empty()) { // Encoder name not found, use common string applying to all encoders. - name_str = "WebRTC-GetEncoderInfoOverride"; + experiment_string = field_trials.Lookup("WebRTC-GetEncoderInfoOverride"); } ParseFieldTrial({&bitrate_limits, &requested_resolution_alignment_, &apply_alignment_to_all_simulcast_layers_}, - field_trial::FindFullName(name_str)); + experiment_string); resolution_bitrate_limits_ = ToResolutionBitrateLimits(bitrate_limits.Get()); } -absl::optional EncoderInfoSettings::requested_resolution_alignment() +std::optional EncoderInfoSettings::requested_resolution_alignment() const { if (requested_resolution_alignment_ && requested_resolution_alignment_.Value() < 1) { RTC_LOG(LS_WARNING) << "Unsupported alignment value, ignored."; - return absl::nullopt; + return std::nullopt; } return requested_resolution_alignment_.GetOptional(); } @@ -202,17 +240,22 @@ absl::optional EncoderInfoSettings::requested_resolution_alignment() EncoderInfoSettings::~EncoderInfoSettings() {} SimulcastEncoderAdapterEncoderInfoSettings:: - SimulcastEncoderAdapterEncoderInfoSettings() + SimulcastEncoderAdapterEncoderInfoSettings( + const FieldTrialsView& field_trials) : EncoderInfoSettings( + field_trials, "WebRTC-SimulcastEncoderAdapter-GetEncoderInfoOverride") {} -LibvpxVp8EncoderInfoSettings::LibvpxVp8EncoderInfoSettings() - : EncoderInfoSettings("WebRTC-VP8-GetEncoderInfoOverride") {} +LibvpxVp8EncoderInfoSettings::LibvpxVp8EncoderInfoSettings( + const FieldTrialsView& field_trials) + : EncoderInfoSettings(field_trials, "WebRTC-VP8-GetEncoderInfoOverride") {} -LibvpxVp9EncoderInfoSettings::LibvpxVp9EncoderInfoSettings() - : EncoderInfoSettings("WebRTC-VP9-GetEncoderInfoOverride") {} +LibvpxVp9EncoderInfoSettings::LibvpxVp9EncoderInfoSettings( + const FieldTrialsView& field_trials) + : EncoderInfoSettings(field_trials, "WebRTC-VP9-GetEncoderInfoOverride") {} -LibaomAv1EncoderInfoSettings::LibaomAv1EncoderInfoSettings() - : EncoderInfoSettings("WebRTC-Av1-GetEncoderInfoOverride") {} +LibaomAv1EncoderInfoSettings::LibaomAv1EncoderInfoSettings( + const FieldTrialsView& field_trials) + : EncoderInfoSettings(field_trials, "WebRTC-Av1-GetEncoderInfoOverride") {} } // namespace webrtc diff --git a/rtc_base/experiments/encoder_info_settings.h b/rtc_base/experiments/encoder_info_settings.h index 416bf6be7a..a8d00bebd1 100644 --- a/rtc_base/experiments/encoder_info_settings.h +++ b/rtc_base/experiments/encoder_info_settings.h @@ -11,11 +11,12 @@ #ifndef RTC_BASE_EXPERIMENTS_ENCODER_INFO_SETTINGS_H_ #define RTC_BASE_EXPERIMENTS_ENCODER_INFO_SETTINGS_H_ +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/video_codecs/video_encoder.h" #include "rtc_base/experiments/field_trial_parser.h" @@ -33,7 +34,7 @@ class EncoderInfoSettings { int max_bitrate_bps = 0; // The maximum bitrate. }; - absl::optional requested_resolution_alignment() const; + std::optional requested_resolution_alignment() const; bool apply_alignment_to_all_simulcast_layers() const { return apply_alignment_to_all_simulcast_layers_.Get(); } @@ -45,21 +46,22 @@ class EncoderInfoSettings { static std::vector GetDefaultSinglecastBitrateLimits(VideoCodecType codec_type); - static absl::optional + static std::optional GetDefaultSinglecastBitrateLimitsForResolution(VideoCodecType codec_type, int frame_size_pixels); static std::vector - GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted(); + GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted(VideoCodecType codec_type); - static absl::optional + static std::optional GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted( - absl::optional frame_size_pixels, + std::optional frame_size_pixels, const std::vector& resolution_bitrate_limits); protected: - explicit EncoderInfoSettings(absl::string_view name); + EncoderInfoSettings(const FieldTrialsView& field_trials, + absl::string_view name); private: FieldTrialOptional requested_resolution_alignment_; @@ -70,28 +72,29 @@ class EncoderInfoSettings { // EncoderInfo settings for SimulcastEncoderAdapter. class SimulcastEncoderAdapterEncoderInfoSettings : public EncoderInfoSettings { public: - SimulcastEncoderAdapterEncoderInfoSettings(); + explicit SimulcastEncoderAdapterEncoderInfoSettings( + const FieldTrialsView& field_trials); ~SimulcastEncoderAdapterEncoderInfoSettings() override {} }; // EncoderInfo settings for LibvpxVp8Encoder. class LibvpxVp8EncoderInfoSettings : public EncoderInfoSettings { public: - LibvpxVp8EncoderInfoSettings(); + explicit LibvpxVp8EncoderInfoSettings(const FieldTrialsView& field_trials); ~LibvpxVp8EncoderInfoSettings() override {} }; // EncoderInfo settings for LibvpxVp9Encoder. class LibvpxVp9EncoderInfoSettings : public EncoderInfoSettings { public: - LibvpxVp9EncoderInfoSettings(); + explicit LibvpxVp9EncoderInfoSettings(const FieldTrialsView& field_trials); ~LibvpxVp9EncoderInfoSettings() override {} }; // EncoderInfo settings for LibaomAv1Encoder. class LibaomAv1EncoderInfoSettings : public EncoderInfoSettings { public: - LibaomAv1EncoderInfoSettings(); + explicit LibaomAv1EncoderInfoSettings(const FieldTrialsView& field_trials); ~LibaomAv1EncoderInfoSettings() override {} }; diff --git a/rtc_base/experiments/encoder_info_settings_unittest.cc b/rtc_base/experiments/encoder_info_settings_unittest.cc index 929c777821..70c0b7f0ba 100644 --- a/rtc_base/experiments/encoder_info_settings_unittest.cc +++ b/rtc_base/experiments/encoder_info_settings_unittest.cc @@ -11,60 +11,64 @@ #include "rtc_base/experiments/encoder_info_settings.h" #include "rtc_base/gunit.h" -#include "test/field_trial.h" +#include "test/explicit_key_value_config.h" #include "test/gmock.h" namespace webrtc { +using test::ExplicitKeyValueConfig; + TEST(SimulcastEncoderAdapterSettingsTest, NoValuesWithoutFieldTrial) { - SimulcastEncoderAdapterEncoderInfoSettings settings; - EXPECT_EQ(absl::nullopt, settings.requested_resolution_alignment()); + ExplicitKeyValueConfig field_trials(""); + + SimulcastEncoderAdapterEncoderInfoSettings settings(field_trials); + EXPECT_EQ(std::nullopt, settings.requested_resolution_alignment()); EXPECT_FALSE(settings.apply_alignment_to_all_simulcast_layers()); EXPECT_TRUE(settings.resolution_bitrate_limits().empty()); } TEST(SimulcastEncoderAdapterSettingsTest, NoValueForInvalidAlignment) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-SimulcastEncoderAdapter-GetEncoderInfoOverride/" "requested_resolution_alignment:0/"); - SimulcastEncoderAdapterEncoderInfoSettings settings; - EXPECT_EQ(absl::nullopt, settings.requested_resolution_alignment()); + SimulcastEncoderAdapterEncoderInfoSettings settings(field_trials); + EXPECT_EQ(std::nullopt, settings.requested_resolution_alignment()); } TEST(SimulcastEncoderAdapterSettingsTest, GetResolutionAlignment) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-SimulcastEncoderAdapter-GetEncoderInfoOverride/" "requested_resolution_alignment:2/"); - SimulcastEncoderAdapterEncoderInfoSettings settings; + SimulcastEncoderAdapterEncoderInfoSettings settings(field_trials); EXPECT_EQ(2u, settings.requested_resolution_alignment()); EXPECT_FALSE(settings.apply_alignment_to_all_simulcast_layers()); EXPECT_TRUE(settings.resolution_bitrate_limits().empty()); } TEST(SimulcastEncoderAdapterSettingsTest, GetApplyAlignment) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-SimulcastEncoderAdapter-GetEncoderInfoOverride/" "requested_resolution_alignment:3," "apply_alignment_to_all_simulcast_layers/"); - SimulcastEncoderAdapterEncoderInfoSettings settings; + SimulcastEncoderAdapterEncoderInfoSettings settings(field_trials); EXPECT_EQ(3u, settings.requested_resolution_alignment()); EXPECT_TRUE(settings.apply_alignment_to_all_simulcast_layers()); EXPECT_TRUE(settings.resolution_bitrate_limits().empty()); } TEST(SimulcastEncoderAdapterSettingsTest, GetResolutionBitrateLimits) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-SimulcastEncoderAdapter-GetEncoderInfoOverride/" "frame_size_pixels:123," "min_start_bitrate_bps:11000," "min_bitrate_bps:44000," "max_bitrate_bps:77000/"); - SimulcastEncoderAdapterEncoderInfoSettings settings; - EXPECT_EQ(absl::nullopt, settings.requested_resolution_alignment()); + SimulcastEncoderAdapterEncoderInfoSettings settings(field_trials); + EXPECT_EQ(std::nullopt, settings.requested_resolution_alignment()); EXPECT_FALSE(settings.apply_alignment_to_all_simulcast_layers()); EXPECT_THAT(settings.resolution_bitrate_limits(), ::testing::ElementsAre(VideoEncoder::ResolutionBitrateLimits{ @@ -72,14 +76,14 @@ TEST(SimulcastEncoderAdapterSettingsTest, GetResolutionBitrateLimits) { } TEST(SimulcastEncoderAdapterSettingsTest, GetResolutionBitrateLimitsWithList) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-SimulcastEncoderAdapter-GetEncoderInfoOverride/" "frame_size_pixels:123|456|789," "min_start_bitrate_bps:11000|22000|33000," "min_bitrate_bps:44000|55000|66000," "max_bitrate_bps:77000|88000|99000/"); - SimulcastEncoderAdapterEncoderInfoSettings settings; + SimulcastEncoderAdapterEncoderInfoSettings settings(field_trials); EXPECT_THAT( settings.resolution_bitrate_limits(), ::testing::ElementsAre( @@ -89,14 +93,59 @@ TEST(SimulcastEncoderAdapterSettingsTest, GetResolutionBitrateLimitsWithList) { } TEST(EncoderSettingsTest, CommonSettingsUsedIfEncoderNameUnspecified) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-VP8-GetEncoderInfoOverride/requested_resolution_alignment:2/" "WebRTC-GetEncoderInfoOverride/requested_resolution_alignment:3/"); - LibvpxVp8EncoderInfoSettings vp8_settings; + LibvpxVp8EncoderInfoSettings vp8_settings(field_trials); EXPECT_EQ(2u, vp8_settings.requested_resolution_alignment()); - LibvpxVp9EncoderInfoSettings vp9_settings; + LibvpxVp9EncoderInfoSettings vp9_settings(field_trials); EXPECT_EQ(3u, vp9_settings.requested_resolution_alignment()); } +TEST(GetSinglecastBitrateLimitForResolutionWhenQpIsUntrustedTests, + LinearInterpolationUnderflow) { + std::optional frame_size_pixels = 480 * 360; + std::vector resolution_bitrate_limits( + {{1280 * 720, 1500000, 30000, 2500000}, + {1920 * 1080, 2500000, 30000, 4000000}}); + + const auto resolutionBitrateLimit = EncoderInfoSettings:: + GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted( + frame_size_pixels, resolution_bitrate_limits); + EXPECT_TRUE(resolutionBitrateLimit.has_value()); + EXPECT_EQ(resolutionBitrateLimit.value(), resolution_bitrate_limits.front()); +} + +TEST(GetSinglecastBitrateLimitForResolutionWhenQpIsUntrustedTests, + LinearInterpolationOverflow) { + std::optional frame_size_pixels = 4096 * 2160; + std::vector resolution_bitrate_limits( + {{1280 * 720, 1500000, 30000, 2500000}, + {1920 * 1080, 2500000, 30000, 4000000}}); + + const auto resolutionBitrateLimit = EncoderInfoSettings:: + GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted( + frame_size_pixels, resolution_bitrate_limits); + EXPECT_TRUE(resolutionBitrateLimit.has_value()); + EXPECT_EQ(resolutionBitrateLimit.value(), resolution_bitrate_limits.back()); +} + +TEST(GetSinglecastBitrateLimitForResolutionWhenQpIsUntrustedTests, + LinearInterpolationExactMatch) { + std::optional frame_size_pixels = 1920 * 1080; + VideoEncoder::ResolutionBitrateLimits expected_match{1920 * 1080, 2500000, + 30000, 4000000}; + std::vector resolution_bitrate_limits( + {{1280 * 720, 1500000, 30000, 2500000}, + expected_match, + {4096 * 2160, 4000000, 30000, 8000000}}); + + const auto resolutionBitrateLimit = EncoderInfoSettings:: + GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted( + frame_size_pixels, resolution_bitrate_limits); + EXPECT_TRUE(resolutionBitrateLimit.has_value()); + EXPECT_EQ(resolutionBitrateLimit.value(), expected_match); +} + } // namespace webrtc diff --git a/rtc_base/experiments/field_trial_list.cc b/rtc_base/experiments/field_trial_list.cc index 72cd79f2d2..26c17b50b9 100644 --- a/rtc_base/experiments/field_trial_list.cc +++ b/rtc_base/experiments/field_trial_list.cc @@ -35,7 +35,7 @@ bool FieldTrialListWrapper::Used() { return GetList()->Used(); } -bool FieldTrialStructListBase::Parse(absl::optional str_value) { +bool FieldTrialStructListBase::Parse(std::optional str_value) { RTC_DCHECK_NOTREACHED(); return true; } diff --git a/rtc_base/experiments/field_trial_list.h b/rtc_base/experiments/field_trial_list.h index 63403cc51d..910f25402e 100644 --- a/rtc_base/experiments/field_trial_list.h +++ b/rtc_base/experiments/field_trial_list.h @@ -65,7 +65,7 @@ class FieldTrialList : public FieldTrialListBase { const std::vector* operator->() const { return &values_; } protected: - bool Parse(absl::optional str_value) override { + bool Parse(std::optional str_value) override { parse_got_called_ = true; if (!str_value) { @@ -75,8 +75,8 @@ class FieldTrialList : public FieldTrialListBase { std::vector new_values_; - for (const absl::string_view token : rtc::split(str_value.value(), '|')) { - absl::optional value = ParseTypedParameter(token); + for (const absl::string_view token : split(str_value.value(), '|')) { + std::optional value = ParseTypedParameter(token); if (value) { new_values_.push_back(*value); } else { @@ -180,7 +180,7 @@ class FieldTrialStructListBase : public FieldTrialParameterInterface { // user-supplied values, we return -1. int ValidateAndGetLength(); - bool Parse(absl::optional str_value) override; + bool Parse(std::optional str_value) override; std::vector> sub_lists_; }; diff --git a/rtc_base/experiments/field_trial_parser.cc b/rtc_base/experiments/field_trial_parser.cc index 78d5489f5e..1288c72c78 100644 --- a/rtc_base/experiments/field_trial_parser.cc +++ b/rtc_base/experiments/field_trial_parser.cc @@ -60,7 +60,7 @@ void ParseFieldTrial( while (!tail.empty()) { size_t key_end = tail.find_first_of(",:"); absl::string_view key = tail.substr(0, key_end); - absl::optional opt_value; + std::optional opt_value; if (key_end == absl::string_view::npos) { tail = ""; } else if (tail[key_end] == ':') { @@ -112,17 +112,17 @@ void ParseFieldTrial( } template <> -absl::optional ParseTypedParameter(absl::string_view str) { +std::optional ParseTypedParameter(absl::string_view str) { if (str == "true" || str == "1") { return true; } else if (str == "false" || str == "0") { return false; } - return absl::nullopt; + return std::nullopt; } template <> -absl::optional ParseTypedParameter(absl::string_view str) { +std::optional ParseTypedParameter(absl::string_view str) { double value; char unit[2]{0, 0}; if (sscanf(std::string(str).c_str(), "%lf%1s", &value, unit) >= 1) { @@ -130,56 +130,56 @@ absl::optional ParseTypedParameter(absl::string_view str) { return value / 100; return value; } else { - return absl::nullopt; + return std::nullopt; } } template <> -absl::optional ParseTypedParameter(absl::string_view str) { +std::optional ParseTypedParameter(absl::string_view str) { int64_t value; if (sscanf(std::string(str).c_str(), "%" SCNd64, &value) == 1) { - if (rtc::IsValueInRangeForNumericType(value)) { + if (IsValueInRangeForNumericType(value)) { return static_cast(value); } } - return absl::nullopt; + return std::nullopt; } template <> -absl::optional ParseTypedParameter(absl::string_view str) { +std::optional ParseTypedParameter(absl::string_view str) { int64_t value; if (sscanf(std::string(str).c_str(), "%" SCNd64, &value) == 1) { - if (rtc::IsValueInRangeForNumericType(value)) { + if (IsValueInRangeForNumericType(value)) { return static_cast(value); } } - return absl::nullopt; + return std::nullopt; } template <> -absl::optional ParseTypedParameter( +std::optional ParseTypedParameter( absl::string_view str) { return std::string(str); } template <> -absl::optional> ParseTypedParameter>( +std::optional> ParseTypedParameter>( absl::string_view str) { return ParseOptionalParameter(str); } template <> -absl::optional> ParseTypedParameter>( +std::optional> ParseTypedParameter>( absl::string_view str) { return ParseOptionalParameter(str); } template <> -absl::optional> -ParseTypedParameter>(absl::string_view str) { +std::optional> +ParseTypedParameter>(absl::string_view str) { return ParseOptionalParameter(str); } template <> -absl::optional> -ParseTypedParameter>(absl::string_view str) { +std::optional> ParseTypedParameter>( + absl::string_view str) { return ParseOptionalParameter(str); } @@ -197,10 +197,10 @@ webrtc::FieldTrialFlag::operator bool() const { return value_; } -bool FieldTrialFlag::Parse(absl::optional str_value) { +bool FieldTrialFlag::Parse(std::optional str_value) { // Only set the flag if there is no argument provided. if (str_value) { - absl::optional opt_value = ParseTypedParameter(*str_value); + std::optional opt_value = ParseTypedParameter(*str_value); if (!opt_value) return false; value_ = *opt_value; @@ -224,14 +224,14 @@ AbstractFieldTrialEnum::AbstractFieldTrialEnum(const AbstractFieldTrialEnum&) = default; AbstractFieldTrialEnum::~AbstractFieldTrialEnum() = default; -bool AbstractFieldTrialEnum::Parse(absl::optional str_value) { +bool AbstractFieldTrialEnum::Parse(std::optional str_value) { if (str_value) { auto it = enum_mapping_.find(*str_value); if (it != enum_mapping_.end()) { value_ = it->second; return true; } - absl::optional value = ParseTypedParameter(*str_value); + std::optional value = ParseTypedParameter(*str_value); if (value.has_value() && (valid_values_.find(*value) != valid_values_.end())) { value_ = *value; diff --git a/rtc_base/experiments/field_trial_parser.h b/rtc_base/experiments/field_trial_parser.h index 822895e70b..890da8052f 100644 --- a/rtc_base/experiments/field_trial_parser.h +++ b/rtc_base/experiments/field_trial_parser.h @@ -14,12 +14,12 @@ #include #include +#include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" // Field trial parser functionality. Provides funcitonality to parse field trial // argument strings in key:value format. Each parameter is described using @@ -51,7 +51,7 @@ class FieldTrialParameterInterface { std::initializer_list fields, absl::string_view trial_string); void MarkAsUsed() { used_ = true; } - virtual bool Parse(absl::optional str_value) = 0; + virtual bool Parse(std::optional str_value) = 0; virtual void ParseDone() {} @@ -68,10 +68,10 @@ void ParseFieldTrial( std::initializer_list fields, absl::string_view trial_string); -// Specialize this in code file for custom types. Should return absl::nullopt if +// Specialize this in code file for custom types. Should return std::nullopt if // the given string cannot be properly parsed. template -absl::optional ParseTypedParameter(absl::string_view); +std::optional ParseTypedParameter(absl::string_view); // This class uses the ParseTypedParameter function to implement a parameter // implementation with an enforced default value. @@ -87,9 +87,9 @@ class FieldTrialParameter : public FieldTrialParameterInterface { void SetForTest(T value) { value_ = value; } protected: - bool Parse(absl::optional str_value) override { + bool Parse(std::optional str_value) override { if (str_value) { - absl::optional value = ParseTypedParameter(*str_value); + std::optional value = ParseTypedParameter(*str_value); if (value.has_value()) { value_ = value.value(); return true; @@ -110,8 +110,8 @@ class FieldTrialConstrained : public FieldTrialParameterInterface { public: FieldTrialConstrained(absl::string_view key, T default_value, - absl::optional lower_limit, - absl::optional upper_limit) + std::optional lower_limit, + std::optional upper_limit) : FieldTrialParameterInterface(key), value_(default_value), lower_limit_(lower_limit), @@ -121,9 +121,9 @@ class FieldTrialConstrained : public FieldTrialParameterInterface { const T* operator->() const { return &value_; } protected: - bool Parse(absl::optional str_value) override { + bool Parse(std::optional str_value) override { if (str_value) { - absl::optional value = ParseTypedParameter(*str_value); + std::optional value = ParseTypedParameter(*str_value); if (value && (!lower_limit_ || *value >= *lower_limit_) && (!upper_limit_ || *value <= *upper_limit_)) { value_ = *value; @@ -135,8 +135,8 @@ class FieldTrialConstrained : public FieldTrialParameterInterface { private: T value_; - absl::optional lower_limit_; - absl::optional upper_limit_; + std::optional lower_limit_; + std::optional upper_limit_; }; class AbstractFieldTrialEnum : public FieldTrialParameterInterface { @@ -148,7 +148,7 @@ class AbstractFieldTrialEnum : public FieldTrialParameterInterface { AbstractFieldTrialEnum(const AbstractFieldTrialEnum&); protected: - bool Parse(absl::optional str_value) override; + bool Parse(std::optional str_value) override; protected: int value_; @@ -181,35 +181,35 @@ class FieldTrialEnum : public AbstractFieldTrialEnum { }; // This class uses the ParseTypedParameter function to implement an optional -// parameter implementation that can default to absl::nullopt. +// parameter implementation that can default to std::nullopt. template class FieldTrialOptional : public FieldTrialParameterInterface { public: explicit FieldTrialOptional(absl::string_view key) : FieldTrialParameterInterface(key) {} - FieldTrialOptional(absl::string_view key, absl::optional default_value) + FieldTrialOptional(absl::string_view key, std::optional default_value) : FieldTrialParameterInterface(key), value_(default_value) {} - absl::optional GetOptional() const { return value_; } + std::optional GetOptional() const { return value_; } const T& Value() const { return value_.value(); } const T& operator*() const { return value_.value(); } const T* operator->() const { return &value_.value(); } explicit operator bool() const { return value_.has_value(); } protected: - bool Parse(absl::optional str_value) override { + bool Parse(std::optional str_value) override { if (str_value) { - absl::optional value = ParseTypedParameter(*str_value); + std::optional value = ParseTypedParameter(*str_value); if (!value.has_value()) return false; value_ = value.value(); } else { - value_ = absl::nullopt; + value_ = std::nullopt; } return true; } private: - absl::optional value_; + std::optional value_; }; // Equivalent to a FieldTrialParameter in the case that both key and value @@ -223,47 +223,46 @@ class FieldTrialFlag : public FieldTrialParameterInterface { explicit operator bool() const; protected: - bool Parse(absl::optional str_value) override; + bool Parse(std::optional str_value) override; private: bool value_; }; template -absl::optional> ParseOptionalParameter( - absl::string_view str) { +std::optional> ParseOptionalParameter(absl::string_view str) { if (str.empty()) - return absl::optional(); + return std::optional(); auto parsed = ParseTypedParameter(str); if (parsed.has_value()) return parsed; - return absl::nullopt; + return std::nullopt; } template <> -absl::optional ParseTypedParameter(absl::string_view str); +std::optional ParseTypedParameter(absl::string_view str); template <> -absl::optional ParseTypedParameter(absl::string_view str); +std::optional ParseTypedParameter(absl::string_view str); template <> -absl::optional ParseTypedParameter(absl::string_view str); +std::optional ParseTypedParameter(absl::string_view str); template <> -absl::optional ParseTypedParameter(absl::string_view str); +std::optional ParseTypedParameter(absl::string_view str); template <> -absl::optional ParseTypedParameter( +std::optional ParseTypedParameter( absl::string_view str); template <> -absl::optional> ParseTypedParameter>( +std::optional> ParseTypedParameter>( absl::string_view str); template <> -absl::optional> ParseTypedParameter>( +std::optional> ParseTypedParameter>( absl::string_view str); template <> -absl::optional> -ParseTypedParameter>(absl::string_view str); +std::optional> +ParseTypedParameter>(absl::string_view str); template <> -absl::optional> -ParseTypedParameter>(absl::string_view str); +std::optional> ParseTypedParameter>( + absl::string_view str); // Accepts true, false, else parsed with sscanf %i, true if != 0. extern template class FieldTrialParameter; diff --git a/rtc_base/experiments/field_trial_parser_unittest.cc b/rtc_base/experiments/field_trial_parser_unittest.cc index 73d115357f..99683507c0 100644 --- a/rtc_base/experiments/field_trial_parser_unittest.cc +++ b/rtc_base/experiments/field_trial_parser_unittest.cc @@ -116,8 +116,8 @@ TEST(FieldTrialParserTest, IgnoresInvalid) { EXPECT_EQ(exp.hash.Get(), "a80"); } TEST(FieldTrialParserTest, IgnoresOutOfRange) { - FieldTrialConstrained low("low", 10, absl::nullopt, 100); - FieldTrialConstrained high("high", 10, 5, absl::nullopt); + FieldTrialConstrained low("low", 10, std::nullopt, 100); + FieldTrialConstrained high("high", 10, 5, std::nullopt); ParseFieldTrial({&low, &high}, "low:1000,high:0"); EXPECT_EQ(low.Get(), 10); EXPECT_EQ(high.Get(), 10); @@ -141,7 +141,7 @@ TEST(FieldTrialParserTest, ReadsValuesFromFieldWithoutKey) { EXPECT_EQ(req.Get(), 30); } TEST(FieldTrialParserTest, ParsesOptionalParameters) { - FieldTrialOptional max_count("c", absl::nullopt); + FieldTrialOptional max_count("c", std::nullopt); ParseFieldTrial({&max_count}, ""); EXPECT_FALSE(max_count.GetOptional().has_value()); ParseFieldTrial({&max_count}, "c:10"); @@ -153,7 +153,7 @@ TEST(FieldTrialParserTest, ParsesOptionalParameters) { ParseFieldTrial({&max_count}, "c:"); EXPECT_EQ(max_count.GetOptional().value(), 20); - FieldTrialOptional max_size("c", absl::nullopt); + FieldTrialOptional max_size("c", std::nullopt); ParseFieldTrial({&max_size}, ""); EXPECT_FALSE(max_size.GetOptional().has_value()); ParseFieldTrial({&max_size}, "c:10"); diff --git a/rtc_base/experiments/field_trial_units.cc b/rtc_base/experiments/field_trial_units.cc index 92af46a9e3..ddb954a28d 100644 --- a/rtc_base/experiments/field_trial_units.cc +++ b/rtc_base/experiments/field_trial_units.cc @@ -12,10 +12,10 @@ #include #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" // Large enough to fit "seconds", the longest supported unit name. #define RTC_TRIAL_UNIT_LENGTH_STR "7" @@ -29,7 +29,7 @@ struct ValueWithUnit { std::string unit; }; -absl::optional ParseValueWithUnit(absl::string_view str) { +std::optional ParseValueWithUnit(absl::string_view str) { if (str == "inf") { return ValueWithUnit{std::numeric_limits::infinity(), ""}; } else if (str == "-inf") { @@ -43,13 +43,13 @@ absl::optional ParseValueWithUnit(absl::string_view str) { return ValueWithUnit{double_val, unit_char}; } } - return absl::nullopt; + return std::nullopt; } } // namespace template <> -absl::optional ParseTypedParameter(absl::string_view str) { - absl::optional result = ParseValueWithUnit(str); +std::optional ParseTypedParameter(absl::string_view str) { + std::optional result = ParseValueWithUnit(str); if (result) { if (result->unit.empty() || result->unit == "kbps") { return DataRate::KilobitsPerSec(result->value); @@ -57,23 +57,22 @@ absl::optional ParseTypedParameter(absl::string_view str) { return DataRate::BitsPerSec(result->value); } } - return absl::nullopt; + return std::nullopt; } template <> -absl::optional ParseTypedParameter(absl::string_view str) { - absl::optional result = ParseValueWithUnit(str); +std::optional ParseTypedParameter(absl::string_view str) { + std::optional result = ParseValueWithUnit(str); if (result) { if (result->unit.empty() || result->unit == "bytes") return DataSize::Bytes(result->value); } - return absl::nullopt; + return std::nullopt; } template <> -absl::optional ParseTypedParameter( - absl::string_view str) { - absl::optional result = ParseValueWithUnit(str); +std::optional ParseTypedParameter(absl::string_view str) { + std::optional result = ParseValueWithUnit(str); if (result) { if (result->unit == "s" || result->unit == "seconds") { return TimeDelta::Seconds(result->value); @@ -83,22 +82,22 @@ absl::optional ParseTypedParameter( return TimeDelta::Millis(result->value); } } - return absl::nullopt; + return std::nullopt; } template <> -absl::optional> -ParseTypedParameter>(absl::string_view str) { +std::optional> +ParseTypedParameter>(absl::string_view str) { return ParseOptionalParameter(str); } template <> -absl::optional> -ParseTypedParameter>(absl::string_view str) { +std::optional> +ParseTypedParameter>(absl::string_view str) { return ParseOptionalParameter(str); } template <> -absl::optional> -ParseTypedParameter>(absl::string_view str) { +std::optional> +ParseTypedParameter>(absl::string_view str) { return ParseOptionalParameter(str); } diff --git a/rtc_base/experiments/field_trial_units.h b/rtc_base/experiments/field_trial_units.h index 408367c031..0bab5feeb6 100644 --- a/rtc_base/experiments/field_trial_units.h +++ b/rtc_base/experiments/field_trial_units.h @@ -19,11 +19,11 @@ namespace webrtc { template <> -absl::optional ParseTypedParameter(absl::string_view str); +std::optional ParseTypedParameter(absl::string_view str); template <> -absl::optional ParseTypedParameter(absl::string_view str); +std::optional ParseTypedParameter(absl::string_view str); template <> -absl::optional ParseTypedParameter(absl::string_view str); +std::optional ParseTypedParameter(absl::string_view str); extern template class FieldTrialParameter; extern template class FieldTrialParameter; diff --git a/rtc_base/experiments/field_trial_units_unittest.cc b/rtc_base/experiments/field_trial_units_unittest.cc index 8996663d8e..82ef580017 100644 --- a/rtc_base/experiments/field_trial_units_unittest.cc +++ b/rtc_base/experiments/field_trial_units_unittest.cc @@ -9,10 +9,10 @@ */ #include "rtc_base/experiments/field_trial_units.h" +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "rtc_base/experiments/field_trial_parser.h" #include "test/gtest.h" @@ -24,7 +24,7 @@ struct DummyExperiment { FieldTrialParameter period = FieldTrialParameter("p", TimeDelta::Millis(100)); FieldTrialOptional max_buffer = - FieldTrialOptional("b", absl::nullopt); + FieldTrialOptional("b", std::nullopt); explicit DummyExperiment(absl::string_view field_trial) { ParseFieldTrial({&target_rate, &max_buffer, &period}, field_trial); diff --git a/rtc_base/experiments/keyframe_interval_settings.cc b/rtc_base/experiments/keyframe_interval_settings.cc index 413e2a91d5..760870bc79 100644 --- a/rtc_base/experiments/keyframe_interval_settings.cc +++ b/rtc_base/experiments/keyframe_interval_settings.cc @@ -10,7 +10,7 @@ #include "rtc_base/experiments/keyframe_interval_settings.h" -#include "api/transport/field_trial_based_config.h" +#include "api/field_trials_view.h" namespace webrtc { @@ -21,19 +21,13 @@ constexpr char kFieldTrialName[] = "WebRTC-KeyframeInterval"; } // namespace KeyframeIntervalSettings::KeyframeIntervalSettings( - const FieldTrialsView* const key_value_config) + const FieldTrialsView& key_value_config) : min_keyframe_send_interval_ms_("min_keyframe_send_interval_ms") { ParseFieldTrial({&min_keyframe_send_interval_ms_}, - key_value_config->Lookup(kFieldTrialName)); + key_value_config.Lookup(kFieldTrialName)); } -KeyframeIntervalSettings KeyframeIntervalSettings::ParseFromFieldTrials() { - FieldTrialBasedConfig field_trial_config; - return KeyframeIntervalSettings(&field_trial_config); -} - -absl::optional KeyframeIntervalSettings::MinKeyframeSendIntervalMs() - const { +std::optional KeyframeIntervalSettings::MinKeyframeSendIntervalMs() const { return min_keyframe_send_interval_ms_.GetOptional(); } } // namespace webrtc diff --git a/rtc_base/experiments/keyframe_interval_settings.h b/rtc_base/experiments/keyframe_interval_settings.h index aff7854516..a9c1971ac2 100644 --- a/rtc_base/experiments/keyframe_interval_settings.h +++ b/rtc_base/experiments/keyframe_interval_settings.h @@ -11,26 +11,25 @@ #ifndef RTC_BASE_EXPERIMENTS_KEYFRAME_INTERVAL_SETTINGS_H_ #define RTC_BASE_EXPERIMENTS_KEYFRAME_INTERVAL_SETTINGS_H_ -#include "absl/types/optional.h" +#include + #include "api/field_trials_view.h" #include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { -// TODO(bugs.webrtc.org/10427): Remove and replace with proper configuration +// TODO: bugs.webrtc.org/42220470 - Remove and replace with proper configuration // parameter, or move to using FIR if intent is to avoid triggering multiple // times to PLIs corresponding to the same request when RTT is large. class KeyframeIntervalSettings final { public: - static KeyframeIntervalSettings ParseFromFieldTrials(); + explicit KeyframeIntervalSettings(const FieldTrialsView& key_value_config); // Sender side. // The encoded keyframe send rate is <= 1/MinKeyframeSendIntervalMs(). - absl::optional MinKeyframeSendIntervalMs() const; + std::optional MinKeyframeSendIntervalMs() const; private: - explicit KeyframeIntervalSettings(const FieldTrialsView* key_value_config); - FieldTrialOptional min_keyframe_send_interval_ms_; }; diff --git a/rtc_base/experiments/keyframe_interval_settings_unittest.cc b/rtc_base/experiments/keyframe_interval_settings_unittest.cc index 25cebbcd70..6aca483df0 100644 --- a/rtc_base/experiments/keyframe_interval_settings_unittest.cc +++ b/rtc_base/experiments/keyframe_interval_settings_unittest.cc @@ -10,33 +10,29 @@ #include "rtc_base/experiments/keyframe_interval_settings.h" -#include "test/field_trial.h" +#include "test/explicit_key_value_config.h" #include "test/gtest.h" namespace webrtc { namespace { +using test::ExplicitKeyValueConfig; + TEST(KeyframeIntervalSettingsTest, ParsesMinKeyframeSendIntervalMs) { - EXPECT_FALSE(KeyframeIntervalSettings::ParseFromFieldTrials() + EXPECT_FALSE(KeyframeIntervalSettings(ExplicitKeyValueConfig("")) .MinKeyframeSendIntervalMs()); - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-KeyframeInterval/min_keyframe_send_interval_ms:100/"); - EXPECT_EQ(KeyframeIntervalSettings::ParseFromFieldTrials() - .MinKeyframeSendIntervalMs(), + EXPECT_EQ(KeyframeIntervalSettings(field_trials).MinKeyframeSendIntervalMs(), 100); } TEST(KeyframeIntervalSettingsTest, DoesNotParseIncorrectValues) { - EXPECT_FALSE(KeyframeIntervalSettings::ParseFromFieldTrials() - .MinKeyframeSendIntervalMs()); - - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-KeyframeInterval/min_keyframe_send_interval_ms:a/"); - EXPECT_FALSE(KeyframeIntervalSettings::ParseFromFieldTrials() - .MinKeyframeSendIntervalMs()); - EXPECT_FALSE(KeyframeIntervalSettings::ParseFromFieldTrials() - .MinKeyframeSendIntervalMs()); + EXPECT_FALSE( + KeyframeIntervalSettings(field_trials).MinKeyframeSendIntervalMs()); } } // namespace diff --git a/rtc_base/experiments/min_video_bitrate_experiment.cc b/rtc_base/experiments/min_video_bitrate_experiment.cc index f37c4e9c76..dd216b8b27 100644 --- a/rtc_base/experiments/min_video_bitrate_experiment.cc +++ b/rtc_base/experiments/min_video_bitrate_experiment.cc @@ -12,10 +12,10 @@ #include +#include "api/field_trials_view.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -26,19 +26,20 @@ const char kForcedFallbackFieldTrial[] = "WebRTC-VP8-Forced-Fallback-Encoder-v2"; const char kMinVideoBitrateExperiment[] = "WebRTC-Video-MinVideoBitrate"; -absl::optional GetFallbackMinBpsFromFieldTrial(VideoCodecType type) { +std::optional GetFallbackMinBpsFromFieldTrial( + const FieldTrialsView& field_trials, + VideoCodecType type) { if (type != kVideoCodecVP8) { - return absl::nullopt; + return std::nullopt; } - if (!webrtc::field_trial::IsEnabled(kForcedFallbackFieldTrial)) { - return absl::nullopt; + if (!field_trials.IsEnabled(kForcedFallbackFieldTrial)) { + return std::nullopt; } - const std::string group = - webrtc::field_trial::FindFullName(kForcedFallbackFieldTrial); + const std::string group = field_trials.Lookup(kForcedFallbackFieldTrial); if (group.empty()) { - return absl::nullopt; + return std::nullopt; } int min_pixels; // Ignored. @@ -46,25 +47,27 @@ absl::optional GetFallbackMinBpsFromFieldTrial(VideoCodecType type) { int min_bps; if (sscanf(group.c_str(), "Enabled-%d,%d,%d", &min_pixels, &max_pixels, &min_bps) != 3) { - return absl::nullopt; + return std::nullopt; } if (min_bps <= 0) { - return absl::nullopt; + return std::nullopt; } return min_bps; } } // namespace -absl::optional GetExperimentalMinVideoBitrate(VideoCodecType type) { - const absl::optional fallback_min_bitrate_bps = - GetFallbackMinBpsFromFieldTrial(type); +std::optional GetExperimentalMinVideoBitrate( + const FieldTrialsView& field_trials, + VideoCodecType type) { + const std::optional fallback_min_bitrate_bps = + GetFallbackMinBpsFromFieldTrial(field_trials, type); if (fallback_min_bitrate_bps) { return DataRate::BitsPerSec(*fallback_min_bitrate_bps); } - if (webrtc::field_trial::IsEnabled(kMinVideoBitrateExperiment)) { + if (field_trials.IsEnabled(kMinVideoBitrateExperiment)) { webrtc::FieldTrialFlag enabled("Enabled"); // Backwards-compatibility with an old experiment - a generic minimum which, @@ -80,7 +83,7 @@ absl::optional GetExperimentalMinVideoBitrate(VideoCodecType type) { webrtc::ParseFieldTrial( {&enabled, &min_video_bitrate, &min_bitrate_vp8, &min_bitrate_vp9, &min_bitrate_av1, &min_bitrate_h264}, - webrtc::field_trial::FindFullName(kMinVideoBitrateExperiment)); + field_trials.Lookup(kMinVideoBitrateExperiment)); if (min_video_bitrate) { if (min_bitrate_vp8 || min_bitrate_vp9 || min_bitrate_av1 || @@ -94,6 +97,8 @@ absl::optional GetExperimentalMinVideoBitrate(VideoCodecType type) { switch (type) { case kVideoCodecVP8: return min_bitrate_vp8.GetOptional(); + case kVideoCodecH265: + // TODO(bugs.webrtc.org/13485): Use VP9 bitrate limits for now. case kVideoCodecVP9: return min_bitrate_vp9.GetOptional(); case kVideoCodecAV1: @@ -101,14 +106,13 @@ absl::optional GetExperimentalMinVideoBitrate(VideoCodecType type) { case kVideoCodecH264: return min_bitrate_h264.GetOptional(); case kVideoCodecGeneric: - case kVideoCodecMultiplex: - return absl::nullopt; + return std::nullopt; } RTC_DCHECK_NOTREACHED(); } - return absl::nullopt; + return std::nullopt; } } // namespace webrtc diff --git a/rtc_base/experiments/min_video_bitrate_experiment.h b/rtc_base/experiments/min_video_bitrate_experiment.h index 9ea8783894..af6acb3cf6 100644 --- a/rtc_base/experiments/min_video_bitrate_experiment.h +++ b/rtc_base/experiments/min_video_bitrate_experiment.h @@ -11,7 +11,9 @@ #ifndef RTC_BASE_EXPERIMENTS_MIN_VIDEO_BITRATE_EXPERIMENT_H_ #define RTC_BASE_EXPERIMENTS_MIN_VIDEO_BITRATE_EXPERIMENT_H_ -#include "absl/types/optional.h" +#include + +#include "api/field_trials_view.h" #include "api/units/data_rate.h" #include "api/video/video_codec_type.h" @@ -21,7 +23,9 @@ extern const int kDefaultMinVideoBitrateBps; // Return the experiment-driven minimum video bitrate. // If no experiment is effective, returns nullopt. -absl::optional GetExperimentalMinVideoBitrate(VideoCodecType type); +std::optional GetExperimentalMinVideoBitrate( + const FieldTrialsView& field_trials, + VideoCodecType type); } // namespace webrtc diff --git a/rtc_base/experiments/min_video_bitrate_experiment_unittest.cc b/rtc_base/experiments/min_video_bitrate_experiment_unittest.cc index 9792bad61c..182669dc02 100644 --- a/rtc_base/experiments/min_video_bitrate_experiment_unittest.cc +++ b/rtc_base/experiments/min_video_bitrate_experiment_unittest.cc @@ -10,151 +10,133 @@ #include "rtc_base/experiments/min_video_bitrate_experiment.h" -#include "absl/types/optional.h" +#include + #include "api/units/data_rate.h" #include "api/video/video_codec_type.h" -#include "test/field_trial.h" +#include "test/explicit_key_value_config.h" #include "test/gtest.h" namespace webrtc { namespace { +using test::ExplicitKeyValueConfig; + TEST(GetExperimentalMinVideoBitrateTest, NulloptForAllCodecsIfFieldTrialUndefined) { - test::ScopedFieldTrials field_trials(""); - - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecGeneric), - absl::nullopt); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP8), - absl::nullopt); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP9), - absl::nullopt); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecH264), - absl::nullopt); - EXPECT_EQ( - GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecMultiplex), - absl::nullopt); + ExplicitKeyValueConfig field_trials(""); + + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecGeneric), + std::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecVP8), + std::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecVP9), + std::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecH264), + std::nullopt); } TEST(GetExperimentalMinVideoBitrateTest, NulloptForAllCodecsIfFieldTrialDisabled) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Video-MinVideoBitrate/Disabled,br:123kbps/"); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecGeneric), - absl::nullopt); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP8), - absl::nullopt); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP9), - absl::nullopt); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecH264), - absl::nullopt); - EXPECT_EQ( - GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecMultiplex), - absl::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecGeneric), + std::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecVP8), + std::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecVP9), + std::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecH264), + std::nullopt); } TEST(GetExperimentalMinVideoBitrateTest, BrForAllCodecsIfDefined) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Video-MinVideoBitrate/Enabled,br:123kbps/"); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecGeneric), - absl::make_optional(DataRate::KilobitsPerSec(123))); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP8), - absl::make_optional(DataRate::KilobitsPerSec(123))); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP9), - absl::make_optional(DataRate::KilobitsPerSec(123))); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecH264), - absl::make_optional(DataRate::KilobitsPerSec(123))); - EXPECT_EQ( - GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecMultiplex), - absl::make_optional(DataRate::KilobitsPerSec(123))); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecGeneric), + DataRate::KilobitsPerSec(123)); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecVP8), + DataRate::KilobitsPerSec(123)); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecVP9), + DataRate::KilobitsPerSec(123)); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecH264), + DataRate::KilobitsPerSec(123)); } TEST(GetExperimentalMinVideoBitrateTest, BrTrumpsSpecificCodecConfigs) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Video-MinVideoBitrate/" "Enabled,br:123kbps,vp8_br:100kbps,vp9_br:200kbps,h264_br:300kbps/"); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecGeneric), - absl::make_optional(DataRate::KilobitsPerSec(123))); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP8), - absl::make_optional(DataRate::KilobitsPerSec(123))); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP9), - absl::make_optional(DataRate::KilobitsPerSec(123))); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecH264), - absl::make_optional(DataRate::KilobitsPerSec(123))); - EXPECT_EQ( - GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecMultiplex), - absl::make_optional(DataRate::KilobitsPerSec(123))); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecGeneric), + DataRate::KilobitsPerSec(123)); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecVP8), + DataRate::KilobitsPerSec(123)); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecVP9), + DataRate::KilobitsPerSec(123)); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecH264), + DataRate::KilobitsPerSec(123)); } TEST(GetExperimentalMinVideoBitrateTest, SpecificCodecConfigsIgnoredIfExpDisabled) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Video-MinVideoBitrate/" "Disabled,vp8_br:100kbps,vp9_br:200kbps,h264_br:300kbps/"); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecGeneric), - absl::nullopt); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP8), - absl::nullopt); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP9), - absl::nullopt); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecH264), - absl::nullopt); - EXPECT_EQ( - GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecMultiplex), - absl::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecGeneric), + std::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecVP8), + std::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecVP9), + std::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecH264), + std::nullopt); } TEST(GetExperimentalMinVideoBitrateTest, SpecificCodecConfigsUsedIfExpEnabled) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Video-MinVideoBitrate/" "Enabled,vp8_br:100kbps,vp9_br:200kbps,h264_br:300kbps/"); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecGeneric), - absl::nullopt); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP8), - absl::make_optional(DataRate::KilobitsPerSec(100))); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP9), - absl::make_optional(DataRate::KilobitsPerSec(200))); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecH264), - absl::make_optional(DataRate::KilobitsPerSec(300))); - EXPECT_EQ( - GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecMultiplex), - absl::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecGeneric), + std::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecVP8), + DataRate::KilobitsPerSec(100)); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecVP9), + DataRate::KilobitsPerSec(200)); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecH264), + DataRate::KilobitsPerSec(300)); } TEST(GetExperimentalMinVideoBitrateTest, Vp8BitrateValueTakenFromFallbackIfAvailable) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Video-MinVideoBitrate/" "Enabled,vp8_br:100kbps,vp9_br:200kbps,h264_br:300kbps/" "WebRTC-VP8-Forced-Fallback-Encoder-v2/" "Enabled-444444,555555,666666/"); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP8), - absl::make_optional(DataRate::BitsPerSec(666666))); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecVP8), + DataRate::BitsPerSec(666666)); } TEST(GetExperimentalMinVideoBitrateTest, NonVp8BitrateValuesTakenFromMinVideoBitrate) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-Video-MinVideoBitrate/" "Enabled,vp8_br:100kbps,vp9_br:200kbps,h264_br:300kbps/" "WebRTC-VP8-Forced-Fallback-Encoder-v2/" "Enabled-444444,555555,666666/"); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecGeneric), - absl::nullopt); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecVP9), - absl::make_optional(DataRate::KilobitsPerSec(200))); - EXPECT_EQ(GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecH264), - absl::make_optional(DataRate::KilobitsPerSec(300))); - EXPECT_EQ( - GetExperimentalMinVideoBitrate(VideoCodecType::kVideoCodecMultiplex), - absl::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecGeneric), + std::nullopt); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecVP9), + DataRate::KilobitsPerSec(200)); + EXPECT_EQ(GetExperimentalMinVideoBitrate(field_trials, kVideoCodecH264), + DataRate::KilobitsPerSec(300)); } } // namespace diff --git a/rtc_base/experiments/normalize_simulcast_size_experiment.cc b/rtc_base/experiments/normalize_simulcast_size_experiment.cc index ce817a0797..0e54357176 100644 --- a/rtc_base/experiments/normalize_simulcast_size_experiment.cc +++ b/rtc_base/experiments/normalize_simulcast_size_experiment.cc @@ -14,8 +14,8 @@ #include +#include "api/field_trials_view.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { @@ -24,26 +24,27 @@ constexpr int kMinSetting = 0; constexpr int kMaxSetting = 5; } // namespace -absl::optional NormalizeSimulcastSizeExperiment::GetBase2Exponent() { - if (!webrtc::field_trial::IsEnabled(kFieldTrial)) - return absl::nullopt; +std::optional NormalizeSimulcastSizeExperiment::GetBase2Exponent( + const FieldTrialsView& field_trials) { + if (!field_trials.IsEnabled(kFieldTrial)) + return std::nullopt; - const std::string group = webrtc::field_trial::FindFullName(kFieldTrial); + const std::string group = field_trials.Lookup(kFieldTrial); if (group.empty()) - return absl::nullopt; + return std::nullopt; int exponent; if (sscanf(group.c_str(), "Enabled-%d", &exponent) != 1) { RTC_LOG(LS_WARNING) << "No parameter provided."; - return absl::nullopt; + return std::nullopt; } if (exponent < kMinSetting || exponent > kMaxSetting) { RTC_LOG(LS_WARNING) << "Unsupported exp value provided, value ignored."; - return absl::nullopt; + return std::nullopt; } - return absl::optional(exponent); + return std::optional(exponent); } } // namespace webrtc diff --git a/rtc_base/experiments/normalize_simulcast_size_experiment.h b/rtc_base/experiments/normalize_simulcast_size_experiment.h index 6b358202b2..79eada103e 100644 --- a/rtc_base/experiments/normalize_simulcast_size_experiment.h +++ b/rtc_base/experiments/normalize_simulcast_size_experiment.h @@ -11,13 +11,16 @@ #ifndef RTC_BASE_EXPERIMENTS_NORMALIZE_SIMULCAST_SIZE_EXPERIMENT_H_ #define RTC_BASE_EXPERIMENTS_NORMALIZE_SIMULCAST_SIZE_EXPERIMENT_H_ -#include "absl/types/optional.h" +#include + +#include "api/field_trials_view.h" namespace webrtc { class NormalizeSimulcastSizeExperiment { public: // Returns the base two exponent from field trial. - static absl::optional GetBase2Exponent(); + static std::optional GetBase2Exponent( + const FieldTrialsView& field_trials); }; } // namespace webrtc diff --git a/rtc_base/experiments/normalize_simulcast_size_experiment_unittest.cc b/rtc_base/experiments/normalize_simulcast_size_experiment_unittest.cc index 34e07427cf..afafa01176 100644 --- a/rtc_base/experiments/normalize_simulcast_size_experiment_unittest.cc +++ b/rtc_base/experiments/normalize_simulcast_size_experiment_unittest.cc @@ -10,50 +10,58 @@ #include "rtc_base/experiments/normalize_simulcast_size_experiment.h" -#include "test/field_trial.h" +#include "test/explicit_key_value_config.h" #include "test/gtest.h" namespace webrtc { +using test::ExplicitKeyValueConfig; + TEST(NormalizeSimulcastSizeExperimentTest, GetExponent) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-NormalizeSimulcastResolution/Enabled-2/"); - EXPECT_EQ(2, NormalizeSimulcastSizeExperiment::GetBase2Exponent()); + EXPECT_EQ(2, + NormalizeSimulcastSizeExperiment::GetBase2Exponent(field_trials)); } TEST(NormalizeSimulcastSizeExperimentTest, GetExponentWithTwoParameters) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-NormalizeSimulcastResolution/Enabled-3-4/"); - EXPECT_EQ(3, NormalizeSimulcastSizeExperiment::GetBase2Exponent()); + EXPECT_EQ(3, + NormalizeSimulcastSizeExperiment::GetBase2Exponent(field_trials)); } TEST(NormalizeSimulcastSizeExperimentTest, GetExponentFailsIfNotEnabled) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-NormalizeSimulcastResolution/Disabled/"); - EXPECT_FALSE(NormalizeSimulcastSizeExperiment::GetBase2Exponent()); + EXPECT_FALSE( + NormalizeSimulcastSizeExperiment::GetBase2Exponent(field_trials)); } TEST(NormalizeSimulcastSizeExperimentTest, GetExponentFailsForInvalidFieldTrial) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-NormalizeSimulcastResolution/Enabled-invalid/"); - EXPECT_FALSE(NormalizeSimulcastSizeExperiment::GetBase2Exponent()); + EXPECT_FALSE( + NormalizeSimulcastSizeExperiment::GetBase2Exponent(field_trials)); } TEST(NormalizeSimulcastSizeExperimentTest, GetExponentFailsForNegativeOutOfBoundValue) { // Supported range: [0, 5]. - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-NormalizeSimulcastResolution/Enabled--1/"); - EXPECT_FALSE(NormalizeSimulcastSizeExperiment::GetBase2Exponent()); + EXPECT_FALSE( + NormalizeSimulcastSizeExperiment::GetBase2Exponent(field_trials)); } TEST(NormalizeSimulcastSizeExperimentTest, GetExponentFailsForPositiveOutOfBoundValue) { // Supported range: [0, 5]. - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-NormalizeSimulcastResolution/Enabled-6/"); - EXPECT_FALSE(NormalizeSimulcastSizeExperiment::GetBase2Exponent()); + EXPECT_FALSE( + NormalizeSimulcastSizeExperiment::GetBase2Exponent(field_trials)); } } // namespace webrtc diff --git a/rtc_base/experiments/quality_rampup_experiment.cc b/rtc_base/experiments/quality_rampup_experiment.cc deleted file mode 100644 index 509ba91dc3..0000000000 --- a/rtc_base/experiments/quality_rampup_experiment.cc +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/experiments/quality_rampup_experiment.h" - -#include - -#include "api/transport/field_trial_based_config.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -QualityRampupExperiment::QualityRampupExperiment( - const FieldTrialsView* const key_value_config) - : min_pixels_("min_pixels"), - min_duration_ms_("min_duration_ms"), - max_bitrate_factor_("max_bitrate_factor") { - ParseFieldTrial( - {&min_pixels_, &min_duration_ms_, &max_bitrate_factor_}, - key_value_config->Lookup("WebRTC-Video-QualityRampupSettings")); -} - -QualityRampupExperiment QualityRampupExperiment::ParseSettings() { - FieldTrialBasedConfig field_trial_config; - return QualityRampupExperiment(&field_trial_config); -} - -absl::optional QualityRampupExperiment::MinPixels() const { - return min_pixels_.GetOptional(); -} - -absl::optional QualityRampupExperiment::MinDurationMs() const { - return min_duration_ms_.GetOptional(); -} - -absl::optional QualityRampupExperiment::MaxBitrateFactor() const { - return max_bitrate_factor_.GetOptional(); -} - -void QualityRampupExperiment::SetMaxBitrate(int pixels, - uint32_t max_bitrate_kbps) { - if (!min_pixels_ || pixels < min_pixels_.Value() || max_bitrate_kbps == 0) { - return; - } - max_bitrate_kbps_ = std::max(max_bitrate_kbps_.value_or(0), max_bitrate_kbps); -} - -bool QualityRampupExperiment::BwHigh(int64_t now_ms, - uint32_t available_bw_kbps) { - if (!min_pixels_ || !min_duration_ms_ || !max_bitrate_kbps_) { - return false; - } - - if (available_bw_kbps < - max_bitrate_kbps_.value() * MaxBitrateFactor().value_or(1)) { - start_ms_.reset(); - return false; - } - - if (!start_ms_) - start_ms_ = now_ms; - - return (now_ms - *start_ms_) >= min_duration_ms_.Value(); -} - -void QualityRampupExperiment::Reset() { - start_ms_.reset(); - max_bitrate_kbps_.reset(); -} - -bool QualityRampupExperiment::Enabled() const { - return min_pixels_ && min_duration_ms_; -} - -} // namespace webrtc diff --git a/rtc_base/experiments/quality_rampup_experiment.h b/rtc_base/experiments/quality_rampup_experiment.h deleted file mode 100644 index e8048a3c1c..0000000000 --- a/rtc_base/experiments/quality_rampup_experiment.h +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_EXPERIMENTS_QUALITY_RAMPUP_EXPERIMENT_H_ -#define RTC_BASE_EXPERIMENTS_QUALITY_RAMPUP_EXPERIMENT_H_ - -#include "absl/types/optional.h" -#include "api/field_trials_view.h" -#include "rtc_base/experiments/field_trial_parser.h" - -namespace webrtc { - -class QualityRampupExperiment final { - public: - static QualityRampupExperiment ParseSettings(); - - absl::optional MinPixels() const; - absl::optional MinDurationMs() const; - absl::optional MaxBitrateFactor() const; - - // Sets the max bitrate and the frame size. - // The call has no effect if the frame size is less than `min_pixels_`. - void SetMaxBitrate(int pixels, uint32_t max_bitrate_kbps); - - // Returns true if the available bandwidth is a certain percentage - // (max_bitrate_factor_) above `max_bitrate_kbps_` for `min_duration_ms_`. - bool BwHigh(int64_t now_ms, uint32_t available_bw_kbps); - - void Reset(); - bool Enabled() const; - - private: - explicit QualityRampupExperiment( - const FieldTrialsView* const key_value_config); - - FieldTrialOptional min_pixels_; - FieldTrialOptional min_duration_ms_; - FieldTrialOptional max_bitrate_factor_; - - absl::optional start_ms_; - absl::optional max_bitrate_kbps_; -}; - -} // namespace webrtc - -#endif // RTC_BASE_EXPERIMENTS_QUALITY_RAMPUP_EXPERIMENT_H_ diff --git a/rtc_base/experiments/quality_rampup_experiment_unittest.cc b/rtc_base/experiments/quality_rampup_experiment_unittest.cc deleted file mode 100644 index b0ede34791..0000000000 --- a/rtc_base/experiments/quality_rampup_experiment_unittest.cc +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/experiments/quality_rampup_experiment.h" - -#include "test/field_trial.h" -#include "test/gtest.h" - -namespace webrtc { -namespace { - -class QualityRampupExperimentTest : public ::testing::Test { - protected: - int64_t NowMs() const { return current_ms_; } - int64_t AdvanceMs(int64_t delta_ms) { - current_ms_ += delta_ms; - return current_ms_; - } - int64_t current_ms_ = 2345; -}; - -TEST_F(QualityRampupExperimentTest, ValuesNotSetByDefault) { - const auto settings = QualityRampupExperiment::ParseSettings(); - EXPECT_FALSE(settings.MinPixels()); - EXPECT_FALSE(settings.MinDurationMs()); - EXPECT_FALSE(settings.MaxBitrateFactor()); -} - -TEST_F(QualityRampupExperimentTest, ParseMinPixels) { - test::ScopedFieldTrials field_trials( - "WebRTC-Video-QualityRampupSettings/min_pixels:10000/"); - EXPECT_EQ(10000, QualityRampupExperiment::ParseSettings().MinPixels()); -} - -TEST_F(QualityRampupExperimentTest, ParseMinDuration) { - test::ScopedFieldTrials field_trials( - "WebRTC-Video-QualityRampupSettings/min_duration_ms:987/"); - EXPECT_EQ(987, QualityRampupExperiment::ParseSettings().MinDurationMs()); -} - -TEST_F(QualityRampupExperimentTest, ParseMaxBitrateFactor) { - test::ScopedFieldTrials field_trials( - "WebRTC-Video-QualityRampupSettings/max_bitrate_factor:1.23/"); - EXPECT_EQ(1.23, QualityRampupExperiment::ParseSettings().MaxBitrateFactor()); -} - -TEST_F(QualityRampupExperimentTest, ReportsBwHighWhenDurationPassed) { - test::ScopedFieldTrials field_trials( - "WebRTC-Video-QualityRampupSettings/" - "min_pixels:10000,min_duration_ms:2000/"); - auto exp = QualityRampupExperiment::ParseSettings(); - EXPECT_EQ(10000, exp.MinPixels()); - EXPECT_EQ(2000, exp.MinDurationMs()); - - const uint32_t kMaxKbps = 800; - exp.SetMaxBitrate(/*pixels*/ 10000, kMaxKbps); - - const uint32_t kAvailableKbps = kMaxKbps; - EXPECT_FALSE(exp.BwHigh(NowMs(), kAvailableKbps)); - EXPECT_FALSE(exp.BwHigh(AdvanceMs(2000 - 1), kAvailableKbps)); - EXPECT_TRUE(exp.BwHigh(AdvanceMs(1), kAvailableKbps)); -} - -TEST_F(QualityRampupExperimentTest, UsesMaxSetBitrate) { - test::ScopedFieldTrials field_trials( - "WebRTC-Video-QualityRampupSettings/" - "min_pixels:10000,min_duration_ms:2000/"); - auto exp = QualityRampupExperiment::ParseSettings(); - - const uint32_t kMaxKbps = 800; - exp.SetMaxBitrate(/*pixels*/ 10000, kMaxKbps); - exp.SetMaxBitrate(/*pixels*/ 10000, kMaxKbps - 1); - - EXPECT_FALSE(exp.BwHigh(NowMs(), kMaxKbps - 1)); - EXPECT_FALSE(exp.BwHigh(AdvanceMs(2000), kMaxKbps - 1)); - EXPECT_FALSE(exp.BwHigh(AdvanceMs(1), kMaxKbps)); - EXPECT_TRUE(exp.BwHigh(AdvanceMs(2000), kMaxKbps)); -} - -TEST_F(QualityRampupExperimentTest, DoesNotReportBwHighIfBelowMinPixels) { - test::ScopedFieldTrials field_trials( - "WebRTC-Video-QualityRampupSettings/" - "min_pixels:10000,min_duration_ms:2000/"); - auto exp = QualityRampupExperiment::ParseSettings(); - - const uint32_t kMaxKbps = 800; - exp.SetMaxBitrate(/*pixels*/ 9999, kMaxKbps); - - const uint32_t kAvailableKbps = kMaxKbps; - EXPECT_FALSE(exp.BwHigh(NowMs(), kAvailableKbps)); - EXPECT_FALSE(exp.BwHigh(AdvanceMs(2000), kAvailableKbps)); -} - -TEST_F(QualityRampupExperimentTest, ReportsBwHighWithMaxBitrateFactor) { - test::ScopedFieldTrials field_trials( - "WebRTC-Video-QualityRampupSettings/" - "min_pixels:10000,min_duration_ms:2000,max_bitrate_factor:1.5/"); - auto exp = QualityRampupExperiment::ParseSettings(); - EXPECT_EQ(10000, exp.MinPixels()); - EXPECT_EQ(2000, exp.MinDurationMs()); - EXPECT_EQ(1.5, exp.MaxBitrateFactor()); - - const uint32_t kMaxKbps = 800; - exp.SetMaxBitrate(/*pixels*/ 10000, kMaxKbps); - - const uint32_t kAvailableKbps = kMaxKbps * 1.5; - EXPECT_FALSE(exp.BwHigh(NowMs(), kAvailableKbps - 1)); - EXPECT_FALSE(exp.BwHigh(AdvanceMs(2000), kAvailableKbps - 1)); - EXPECT_FALSE(exp.BwHigh(AdvanceMs(1), kAvailableKbps)); - EXPECT_TRUE(exp.BwHigh(AdvanceMs(2000), kAvailableKbps)); -} - -TEST_F(QualityRampupExperimentTest, ReportsBwHigh) { - test::ScopedFieldTrials field_trials( - "WebRTC-Video-QualityRampupSettings/" - "min_pixels:10000,min_duration_ms:2000/"); - auto exp = QualityRampupExperiment::ParseSettings(); - - const uint32_t kMaxKbps = 800; - exp.SetMaxBitrate(/*pixels*/ 10000, kMaxKbps); - - const uint32_t kAvailableKbps = kMaxKbps; - EXPECT_FALSE(exp.BwHigh(NowMs(), kAvailableKbps)); - EXPECT_FALSE(exp.BwHigh(AdvanceMs(2000 - 1), kAvailableKbps)); - EXPECT_FALSE(exp.BwHigh(AdvanceMs(1), kAvailableKbps - 1)); // Below, reset. - EXPECT_FALSE(exp.BwHigh(AdvanceMs(1), kAvailableKbps)); - EXPECT_FALSE(exp.BwHigh(AdvanceMs(2000 - 1), kAvailableKbps)); - EXPECT_TRUE(exp.BwHigh(AdvanceMs(1), kAvailableKbps)); -} - -} // namespace -} // namespace webrtc diff --git a/rtc_base/experiments/quality_scaler_settings.cc b/rtc_base/experiments/quality_scaler_settings.cc index 85c99255ab..0193a699ef 100644 --- a/rtc_base/experiments/quality_scaler_settings.cc +++ b/rtc_base/experiments/quality_scaler_settings.cc @@ -10,7 +10,7 @@ #include "rtc_base/experiments/quality_scaler_settings.h" -#include "api/transport/field_trial_based_config.h" +#include "api/field_trials_view.h" #include "rtc_base/logging.h" namespace webrtc { @@ -20,7 +20,7 @@ const double kMinScaleFactor = 0.01; } // namespace QualityScalerSettings::QualityScalerSettings( - const FieldTrialsView* const key_value_config) + const FieldTrialsView& field_trials) : sampling_period_ms_("sampling_period_ms"), average_qp_window_("average_qp_window"), min_frames_("min_frames"), @@ -28,73 +28,67 @@ QualityScalerSettings::QualityScalerSettings( scale_factor_("scale_factor"), initial_bitrate_interval_ms_("initial_bitrate_interval_ms"), initial_bitrate_factor_("initial_bitrate_factor") { - ParseFieldTrial( - {&sampling_period_ms_, &average_qp_window_, &min_frames_, - &initial_scale_factor_, &scale_factor_, &initial_bitrate_interval_ms_, - &initial_bitrate_factor_}, - key_value_config->Lookup("WebRTC-Video-QualityScalerSettings")); + ParseFieldTrial({&sampling_period_ms_, &average_qp_window_, &min_frames_, + &initial_scale_factor_, &scale_factor_, + &initial_bitrate_interval_ms_, &initial_bitrate_factor_}, + field_trials.Lookup("WebRTC-Video-QualityScalerSettings")); } -QualityScalerSettings QualityScalerSettings::ParseFromFieldTrials() { - FieldTrialBasedConfig field_trial_config; - return QualityScalerSettings(&field_trial_config); -} - -absl::optional QualityScalerSettings::SamplingPeriodMs() const { +std::optional QualityScalerSettings::SamplingPeriodMs() const { if (sampling_period_ms_ && sampling_period_ms_.Value() <= 0) { RTC_LOG(LS_WARNING) << "Unsupported sampling_period_ms value, ignored."; - return absl::nullopt; + return std::nullopt; } return sampling_period_ms_.GetOptional(); } -absl::optional QualityScalerSettings::AverageQpWindow() const { +std::optional QualityScalerSettings::AverageQpWindow() const { if (average_qp_window_ && average_qp_window_.Value() <= 0) { RTC_LOG(LS_WARNING) << "Unsupported average_qp_window value, ignored."; - return absl::nullopt; + return std::nullopt; } return average_qp_window_.GetOptional(); } -absl::optional QualityScalerSettings::MinFrames() const { +std::optional QualityScalerSettings::MinFrames() const { if (min_frames_ && min_frames_.Value() < kMinFrames) { RTC_LOG(LS_WARNING) << "Unsupported min_frames value, ignored."; - return absl::nullopt; + return std::nullopt; } return min_frames_.GetOptional(); } -absl::optional QualityScalerSettings::InitialScaleFactor() const { +std::optional QualityScalerSettings::InitialScaleFactor() const { if (initial_scale_factor_ && initial_scale_factor_.Value() < kMinScaleFactor) { RTC_LOG(LS_WARNING) << "Unsupported initial_scale_factor value, ignored."; - return absl::nullopt; + return std::nullopt; } return initial_scale_factor_.GetOptional(); } -absl::optional QualityScalerSettings::ScaleFactor() const { +std::optional QualityScalerSettings::ScaleFactor() const { if (scale_factor_ && scale_factor_.Value() < kMinScaleFactor) { RTC_LOG(LS_WARNING) << "Unsupported scale_factor value, ignored."; - return absl::nullopt; + return std::nullopt; } return scale_factor_.GetOptional(); } -absl::optional QualityScalerSettings::InitialBitrateIntervalMs() const { +std::optional QualityScalerSettings::InitialBitrateIntervalMs() const { if (initial_bitrate_interval_ms_ && initial_bitrate_interval_ms_.Value() < 0) { RTC_LOG(LS_WARNING) << "Unsupported bitrate_interval value, ignored."; - return absl::nullopt; + return std::nullopt; } return initial_bitrate_interval_ms_.GetOptional(); } -absl::optional QualityScalerSettings::InitialBitrateFactor() const { +std::optional QualityScalerSettings::InitialBitrateFactor() const { if (initial_bitrate_factor_ && initial_bitrate_factor_.Value() < kMinScaleFactor) { RTC_LOG(LS_WARNING) << "Unsupported initial_bitrate_factor value, ignored."; - return absl::nullopt; + return std::nullopt; } return initial_bitrate_factor_.GetOptional(); } diff --git a/rtc_base/experiments/quality_scaler_settings.h b/rtc_base/experiments/quality_scaler_settings.h index 99827aac6b..428f18f0dc 100644 --- a/rtc_base/experiments/quality_scaler_settings.h +++ b/rtc_base/experiments/quality_scaler_settings.h @@ -11,7 +11,8 @@ #ifndef RTC_BASE_EXPERIMENTS_QUALITY_SCALER_SETTINGS_H_ #define RTC_BASE_EXPERIMENTS_QUALITY_SCALER_SETTINGS_H_ -#include "absl/types/optional.h" +#include + #include "api/field_trials_view.h" #include "rtc_base/experiments/field_trial_parser.h" @@ -19,19 +20,17 @@ namespace webrtc { class QualityScalerSettings final { public: - static QualityScalerSettings ParseFromFieldTrials(); + explicit QualityScalerSettings(const FieldTrialsView& field_trials); - absl::optional SamplingPeriodMs() const; - absl::optional AverageQpWindow() const; - absl::optional MinFrames() const; - absl::optional InitialScaleFactor() const; - absl::optional ScaleFactor() const; - absl::optional InitialBitrateIntervalMs() const; - absl::optional InitialBitrateFactor() const; + std::optional SamplingPeriodMs() const; + std::optional AverageQpWindow() const; + std::optional MinFrames() const; + std::optional InitialScaleFactor() const; + std::optional ScaleFactor() const; + std::optional InitialBitrateIntervalMs() const; + std::optional InitialBitrateFactor() const; private: - explicit QualityScalerSettings(const FieldTrialsView* const key_value_config); - FieldTrialOptional sampling_period_ms_; FieldTrialOptional average_qp_window_; FieldTrialOptional min_frames_; diff --git a/rtc_base/experiments/quality_scaler_settings_unittest.cc b/rtc_base/experiments/quality_scaler_settings_unittest.cc index 9da770c1b5..578fe97b03 100644 --- a/rtc_base/experiments/quality_scaler_settings_unittest.cc +++ b/rtc_base/experiments/quality_scaler_settings_unittest.cc @@ -10,14 +10,15 @@ #include "rtc_base/experiments/quality_scaler_settings.h" -#include "test/field_trial.h" #include "test/gtest.h" +#include "test/scoped_key_value_config.h" namespace webrtc { namespace { TEST(QualityScalerSettingsTest, ValuesNotSetByDefault) { - const auto settings = QualityScalerSettings::ParseFromFieldTrials(); + webrtc::test::ScopedKeyValueConfig field_trials(""); + const auto settings = QualityScalerSettings(field_trials); EXPECT_FALSE(settings.MinFrames()); EXPECT_FALSE(settings.InitialScaleFactor()); EXPECT_FALSE(settings.ScaleFactor()); @@ -26,46 +27,42 @@ TEST(QualityScalerSettingsTest, ValuesNotSetByDefault) { } TEST(QualityScalerSettingsTest, ParseMinFrames) { - test::ScopedFieldTrials field_trials( + test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScalerSettings/min_frames:100/"); - EXPECT_EQ(100, QualityScalerSettings::ParseFromFieldTrials().MinFrames()); + EXPECT_EQ(100, QualityScalerSettings(field_trials).MinFrames()); } TEST(QualityScalerSettingsTest, ParseInitialScaleFactor) { - test::ScopedFieldTrials field_trials( + test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScalerSettings/initial_scale_factor:1.5/"); - EXPECT_EQ(1.5, - QualityScalerSettings::ParseFromFieldTrials().InitialScaleFactor()); + EXPECT_EQ(1.5, QualityScalerSettings(field_trials).InitialScaleFactor()); } TEST(QualityScalerSettingsTest, ParseScaleFactor) { - test::ScopedFieldTrials field_trials( + test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScalerSettings/scale_factor:1.1/"); - EXPECT_EQ(1.1, QualityScalerSettings::ParseFromFieldTrials().ScaleFactor()); + EXPECT_EQ(1.1, QualityScalerSettings(field_trials).ScaleFactor()); } TEST(QualityScalerSettingsTest, ParseInitialBitrateInterval) { - test::ScopedFieldTrials field_trials( + test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScalerSettings/initial_bitrate_interval_ms:1000/"); - EXPECT_EQ( - 1000, - QualityScalerSettings::ParseFromFieldTrials().InitialBitrateIntervalMs()); + EXPECT_EQ(1000, + QualityScalerSettings(field_trials).InitialBitrateIntervalMs()); } TEST(QualityScalerSettingsTest, ParseInitialBitrateFactor) { - test::ScopedFieldTrials field_trials( + test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScalerSettings/initial_bitrate_factor:0.75/"); - EXPECT_EQ( - 0.75, - QualityScalerSettings::ParseFromFieldTrials().InitialBitrateFactor()); + EXPECT_EQ(0.75, QualityScalerSettings(field_trials).InitialBitrateFactor()); } TEST(QualityScalerSettingsTest, ParseAll) { - test::ScopedFieldTrials field_trials( + test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScalerSettings/" "min_frames:100,initial_scale_factor:1.5,scale_factor:0.9," "initial_bitrate_interval_ms:5500,initial_bitrate_factor:0.7/"); - const auto settings = QualityScalerSettings::ParseFromFieldTrials(); + const auto settings = QualityScalerSettings(field_trials); EXPECT_EQ(100, settings.MinFrames()); EXPECT_EQ(1.5, settings.InitialScaleFactor()); EXPECT_EQ(0.9, settings.ScaleFactor()); @@ -74,11 +71,11 @@ TEST(QualityScalerSettingsTest, ParseAll) { } TEST(QualityScalerSettingsTest, DoesNotParseIncorrectValue) { - test::ScopedFieldTrials field_trials( + test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScalerSettings/" "min_frames:a,initial_scale_factor:b,scale_factor:c," "initial_bitrate_interval_ms:d,initial_bitrate_factor:e/"); - const auto settings = QualityScalerSettings::ParseFromFieldTrials(); + const auto settings = QualityScalerSettings(field_trials); EXPECT_FALSE(settings.MinFrames()); EXPECT_FALSE(settings.InitialScaleFactor()); EXPECT_FALSE(settings.ScaleFactor()); @@ -87,11 +84,11 @@ TEST(QualityScalerSettingsTest, DoesNotParseIncorrectValue) { } TEST(QualityScalerSettingsTest, DoesNotReturnTooSmallValue) { - test::ScopedFieldTrials field_trials( + test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScalerSettings/" "min_frames:0,initial_scale_factor:0.0,scale_factor:0.0," "initial_bitrate_interval_ms:-1,initial_bitrate_factor:0.0/"); - const auto settings = QualityScalerSettings::ParseFromFieldTrials(); + const auto settings = QualityScalerSettings(field_trials); EXPECT_FALSE(settings.MinFrames()); EXPECT_FALSE(settings.InitialScaleFactor()); EXPECT_FALSE(settings.ScaleFactor()); diff --git a/rtc_base/experiments/quality_scaling_experiment.cc b/rtc_base/experiments/quality_scaling_experiment.cc index 7d5722bbe3..be563639e0 100644 --- a/rtc_base/experiments/quality_scaling_experiment.cc +++ b/rtc_base/experiments/quality_scaling_experiment.cc @@ -13,11 +13,17 @@ #include +#include "absl/strings/match.h" +#include "api/field_trials_view.h" +#include "api/transport/field_trial_based_config.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { + +// This experiment controls QP thresholds for VP8, VP9, H264 and Generic codecs. +// Generic includes H265X but not standard H265. constexpr char kFieldTrial[] = "WebRTC-Video-QualityScaling"; constexpr int kMinQp = 1; constexpr int kMaxVp8Qp = 127; @@ -26,33 +32,68 @@ constexpr int kMaxH264Qp = 51; constexpr int kMaxGenericQp = 255; #if !defined(WEBRTC_IOS) +// On non-iOS, this default string is used unless explicitly overriden. +// TODO(https://crbug.com/400338987): For use cases that does not explicitly +// turn the QP experiment on (e.g. Chrome), it does not make sense for this QP +// threshold to override the QP thresholds provided by the encoder +// implementation - we should trust that an encoder implementation that reports +// its own QP thresholds would know best, and only use these as a fallback for +// when the encoder does not specify any. constexpr char kDefaultQualityScalingSetttings[] = "Enabled-29,95,149,205,24,37,26,36,0.9995,0.9999,1"; #endif -absl::optional GetThresholds(int low, - int high, - int max) { +std::optional GetThresholds(int low, + int high, + int max) { if (low < kMinQp || high > max || high < low) - return absl::nullopt; + return std::nullopt; RTC_LOG(LS_INFO) << "QP thresholds: low: " << low << ", high: " << high; - return absl::optional( + return std::optional( VideoEncoder::QpThresholds(low, high)); } + +// This experiment controls QP thresholds for standard H265 (not H265X). +// - Only for debugging/experimentation. Once QP thresholds have been determined +// it is up to the encoder implementation to provide +// VideoEncoder::EncoderInfo::scaling_settings. +// +// Example usage: +// --force-fieldtrials=WebRTC-H265-QualityScaling/low_qp:27,high_qp:35/ +struct WebRTCH265QualityScaling { + static constexpr char kFieldTrialName[] = "WebRTC-H265-QualityScaling"; + + WebRTCH265QualityScaling(const FieldTrialsView& field_trials) + : low_qp("low_qp"), high_qp("high_qp") { + ParseFieldTrial({&low_qp, &high_qp}, field_trials.Lookup(kFieldTrialName)); + } + + bool IsEnabled() const { return low_qp && high_qp; } + VideoEncoder::QpThresholds ToQpThresholds() const { + RTC_DCHECK(IsEnabled()); + return VideoEncoder::QpThresholds(*low_qp, *high_qp); + } + + FieldTrialOptional low_qp; + FieldTrialOptional high_qp; +}; } // namespace -bool QualityScalingExperiment::Enabled() { +bool QualityScalingExperiment::Enabled(const FieldTrialsView& field_trials) { + WebRTCH265QualityScaling h265_quality_scaling(field_trials); + return #if defined(WEBRTC_IOS) - return webrtc::field_trial::IsEnabled(kFieldTrial); + absl::StartsWith(field_trials.Lookup(kFieldTrial), "Enabled") || #else - return !webrtc::field_trial::IsDisabled(kFieldTrial); + !absl::StartsWith(field_trials.Lookup(kFieldTrial), "Disabled") || #endif + h265_quality_scaling.IsEnabled(); } -absl::optional -QualityScalingExperiment::ParseSettings() { - std::string group = webrtc::field_trial::FindFullName(kFieldTrial); +std::optional +QualityScalingExperiment::ParseSettings(const FieldTrialsView& field_trials) { + std::string group = field_trials.Lookup(kFieldTrial); // TODO(http://crbug.com/webrtc/12401): Completely remove the experiment code // after few releases. #if !defined(WEBRTC_IOS) @@ -65,16 +106,23 @@ QualityScalingExperiment::ParseSettings() { &s.h264_high, &s.generic_low, &s.generic_high, &s.alpha_high, &s.alpha_low, &s.drop) != 11) { RTC_LOG(LS_WARNING) << "Invalid number of parameters provided."; - return absl::nullopt; + return std::nullopt; } return s; } -absl::optional -QualityScalingExperiment::GetQpThresholds(VideoCodecType codec_type) { - const auto settings = ParseSettings(); +std::optional +QualityScalingExperiment::GetQpThresholds(VideoCodecType codec_type, + const FieldTrialsView& field_trials) { + if (codec_type == kVideoCodecH265) { + WebRTCH265QualityScaling h265_quality_scaling(field_trials); + if (h265_quality_scaling.IsEnabled()) { + return h265_quality_scaling.ToQpThresholds(); + } + } + const auto settings = ParseSettings(field_trials); if (!settings) - return absl::nullopt; + return std::nullopt; switch (codec_type) { case kVideoCodecVP8: @@ -87,12 +135,13 @@ QualityScalingExperiment::GetQpThresholds(VideoCodecType codec_type) { return GetThresholds(settings->generic_low, settings->generic_high, kMaxGenericQp); default: - return absl::nullopt; + return std::nullopt; } } -QualityScalingExperiment::Config QualityScalingExperiment::GetConfig() { - const auto settings = ParseSettings(); +QualityScalingExperiment::Config QualityScalingExperiment::GetConfig( + const FieldTrialsView& field_trials) { + const auto settings = ParseSettings(field_trials); if (!settings) return Config(); diff --git a/rtc_base/experiments/quality_scaling_experiment.h b/rtc_base/experiments/quality_scaling_experiment.h index 31d8292b5c..0d197c14fe 100644 --- a/rtc_base/experiments/quality_scaling_experiment.h +++ b/rtc_base/experiments/quality_scaling_experiment.h @@ -10,7 +10,9 @@ #ifndef RTC_BASE_EXPERIMENTS_QUALITY_SCALING_EXPERIMENT_H_ #define RTC_BASE_EXPERIMENTS_QUALITY_SCALING_EXPERIMENT_H_ -#include "absl/types/optional.h" +#include + +#include "api/field_trials_view.h" #include "api/video_codecs/video_encoder.h" namespace webrtc { @@ -40,17 +42,19 @@ class QualityScalingExperiment { }; // Returns true if the experiment is enabled. - static bool Enabled(); + static bool Enabled(const FieldTrialsView& field_trials); // Returns settings from field trial. - static absl::optional ParseSettings(); + static std::optional ParseSettings( + const FieldTrialsView& field_trials); // Returns QpThresholds for the `codec_type`. - static absl::optional GetQpThresholds( - VideoCodecType codec_type); + static std::optional GetQpThresholds( + VideoCodecType codec_type, + const FieldTrialsView& field_trials); // Returns parsed values. If the parsing fails, default values are returned. - static Config GetConfig(); + static Config GetConfig(const FieldTrialsView& field_trials); }; } // namespace webrtc diff --git a/rtc_base/experiments/quality_scaling_experiment_unittest.cc b/rtc_base/experiments/quality_scaling_experiment_unittest.cc index 4507f1514f..0c1450557a 100644 --- a/rtc_base/experiments/quality_scaling_experiment_unittest.cc +++ b/rtc_base/experiments/quality_scaling_experiment_unittest.cc @@ -10,8 +10,8 @@ #include "rtc_base/experiments/quality_scaling_experiment.h" -#include "test/field_trial.h" #include "test/gtest.h" +#include "test/scoped_key_value_config.h" namespace webrtc { namespace { @@ -41,28 +41,28 @@ void ExpectEqualConfig(QualityScalingExperiment::Config a, #if !defined(WEBRTC_IOS) // TODO(bugs.webrtc.org/12401): investigate why QualityScaler kicks in on iOS. TEST(QualityScalingExperimentTest, DefaultEnabledWithoutFieldTrial) { - webrtc::test::ScopedFieldTrials field_trials(""); - EXPECT_TRUE(QualityScalingExperiment::Enabled()); + webrtc::test::ScopedKeyValueConfig field_trials(""); + EXPECT_TRUE(QualityScalingExperiment::Enabled(field_trials)); } #else TEST(QualityScalingExperimentTest, DefaultDisabledWithoutFieldTrialIOS) { - webrtc::test::ScopedFieldTrials field_trials(""); - EXPECT_FALSE(QualityScalingExperiment::Enabled()); + webrtc::test::ScopedKeyValueConfig field_trials(""); + EXPECT_FALSE(QualityScalingExperiment::Enabled(field_trials)); } #endif TEST(QualityScalingExperimentTest, EnabledWithFieldTrial) { - webrtc::test::ScopedFieldTrials field_trials( + webrtc::test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScaling/Enabled/"); - EXPECT_TRUE(QualityScalingExperiment::Enabled()); + EXPECT_TRUE(QualityScalingExperiment::Enabled(field_trials)); } TEST(QualityScalingExperimentTest, ParseSettings) { const QualityScalingExperiment::Settings kExpected = {1, 2, 3, 4, 5, 6, 7, 8, 0.9f, 0.99f, 1}; - webrtc::test::ScopedFieldTrials field_trials( + webrtc::test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScaling/Enabled-1,2,3,4,5,6,7,8,0.9,0.99,1/"); - const auto settings = QualityScalingExperiment::ParseSettings(); + const auto settings = QualityScalingExperiment::ParseSettings(field_trials); EXPECT_TRUE(settings); ExpectEqualSettings(kExpected, *settings); } @@ -70,117 +70,117 @@ TEST(QualityScalingExperimentTest, ParseSettings) { #if !defined(WEBRTC_IOS) // TODO(bugs.webrtc.org/12401): investigate why QualityScaler kicks in on iOS. TEST(QualityScalingExperimentTest, ParseSettingsUsesDefaultsWithoutFieldTrial) { - webrtc::test::ScopedFieldTrials field_trials(""); + webrtc::test::ScopedKeyValueConfig field_trials(""); // Uses some default hard coded values. - EXPECT_TRUE(QualityScalingExperiment::ParseSettings()); + EXPECT_TRUE(QualityScalingExperiment::ParseSettings(field_trials)); } #else TEST(QualityScalingExperimentTest, ParseSettingsFailsWithoutFieldTrial) { - webrtc::test::ScopedFieldTrials field_trials(""); - EXPECT_FALSE(QualityScalingExperiment::ParseSettings()); + webrtc::test::ScopedKeyValueConfig field_trials(""); + EXPECT_FALSE(QualityScalingExperiment::ParseSettings(field_trials)); } #endif TEST(QualityScalingExperimentTest, ParseSettingsFailsWithInvalidFieldTrial) { - webrtc::test::ScopedFieldTrials field_trials( + webrtc::test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScaling/Enabled-invalid/"); - EXPECT_FALSE(QualityScalingExperiment::ParseSettings()); + EXPECT_FALSE(QualityScalingExperiment::ParseSettings(field_trials)); } TEST(QualityScalingExperimentTest, GetConfig) { - webrtc::test::ScopedFieldTrials field_trials( + webrtc::test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScaling/Enabled-1,2,3,4,5,6,7,8,0.9,0.99,0/"); - const auto config = QualityScalingExperiment::GetConfig(); + const auto config = QualityScalingExperiment::GetConfig(field_trials); EXPECT_EQ(0.9f, config.alpha_high); EXPECT_EQ(0.99f, config.alpha_low); EXPECT_FALSE(config.use_all_drop_reasons); } TEST(QualityScalingExperimentTest, GetsDefaultConfigForInvalidFieldTrial) { - webrtc::test::ScopedFieldTrials field_trials( + webrtc::test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScaling/Enabled-invalid/"); - const auto config = QualityScalingExperiment::GetConfig(); + const auto config = QualityScalingExperiment::GetConfig(field_trials); ExpectEqualConfig(config, QualityScalingExperiment::Config()); } TEST(QualityScalingExperimentTest, GetsDefaultAlphaForInvalidValue) { QualityScalingExperiment::Config expected_config; expected_config.use_all_drop_reasons = true; - webrtc::test::ScopedFieldTrials field_trials( + webrtc::test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScaling/Enabled-1,2,3,4,5,6,7,8,0.99,0.9,1/"); - const auto config = QualityScalingExperiment::GetConfig(); + const auto config = QualityScalingExperiment::GetConfig(field_trials); ExpectEqualConfig(config, expected_config); } TEST(QualityScalingExperimentTest, GetVp8Thresholds) { - webrtc::test::ScopedFieldTrials field_trials( + webrtc::test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScaling/Enabled-1,2,3,4,5,6,0,0,0.9,0.99,1/"); const auto thresholds = - QualityScalingExperiment::GetQpThresholds(kVideoCodecVP8); + QualityScalingExperiment::GetQpThresholds(kVideoCodecVP8, field_trials); EXPECT_TRUE(thresholds); EXPECT_EQ(1, thresholds->low); EXPECT_EQ(2, thresholds->high); } TEST(QualityScalingExperimentTest, GetThresholdsFailsForInvalidVp8Value) { - webrtc::test::ScopedFieldTrials field_trials( + webrtc::test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScaling/Enabled-0,0,3,4,5,6,7,8,0.9,0.99,1/"); const auto thresholds = - QualityScalingExperiment::GetQpThresholds(kVideoCodecVP8); + QualityScalingExperiment::GetQpThresholds(kVideoCodecVP8, field_trials); EXPECT_FALSE(thresholds); } TEST(QualityScalingExperimentTest, GetVp9Thresholds) { - webrtc::test::ScopedFieldTrials field_trials( + webrtc::test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScaling/Enabled-1,2,3,4,5,6,0,0,0.9,0.99,1/"); const auto thresholds = - QualityScalingExperiment::GetQpThresholds(kVideoCodecVP9); + QualityScalingExperiment::GetQpThresholds(kVideoCodecVP9, field_trials); EXPECT_TRUE(thresholds); EXPECT_EQ(3, thresholds->low); EXPECT_EQ(4, thresholds->high); } TEST(QualityScalingExperimentTest, GetThresholdsFailsForInvalidVp9Value) { - webrtc::test::ScopedFieldTrials field_trials( + webrtc::test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScaling/Enabled-1,2,0,0,5,6,7,8,0.9,0.99,1/"); const auto thresholds = - QualityScalingExperiment::GetQpThresholds(kVideoCodecVP9); + QualityScalingExperiment::GetQpThresholds(kVideoCodecVP9, field_trials); EXPECT_FALSE(thresholds); } TEST(QualityScalingExperimentTest, GetH264Thresholds) { - webrtc::test::ScopedFieldTrials field_trials( + webrtc::test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScaling/Enabled-1,2,3,4,5,6,0,0,0.9,0.99,1/"); const auto thresholds = - QualityScalingExperiment::GetQpThresholds(kVideoCodecH264); + QualityScalingExperiment::GetQpThresholds(kVideoCodecH264, field_trials); EXPECT_TRUE(thresholds); EXPECT_EQ(5, thresholds->low); EXPECT_EQ(6, thresholds->high); } TEST(QualityScalingExperimentTest, GetThresholdsFailsForInvalidH264Value) { - webrtc::test::ScopedFieldTrials field_trials( + webrtc::test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScaling/Enabled-1,2,3,4,0,0,7,8,0.9,0.99,1/"); const auto thresholds = - QualityScalingExperiment::GetQpThresholds(kVideoCodecH264); + QualityScalingExperiment::GetQpThresholds(kVideoCodecH264, field_trials); EXPECT_FALSE(thresholds); } TEST(QualityScalingExperimentTest, GetGenericThresholds) { - webrtc::test::ScopedFieldTrials field_trials( + webrtc::test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScaling/Enabled-1,2,3,4,0,0,7,8,0.9,0.99,1/"); - const auto thresholds = - QualityScalingExperiment::GetQpThresholds(kVideoCodecGeneric); + const auto thresholds = QualityScalingExperiment::GetQpThresholds( + kVideoCodecGeneric, field_trials); EXPECT_TRUE(thresholds); EXPECT_EQ(7, thresholds->low); EXPECT_EQ(8, thresholds->high); } TEST(QualityScalingExperimentTest, GetThresholdsFailsForInvalidGenericValue) { - webrtc::test::ScopedFieldTrials field_trials( + webrtc::test::ScopedKeyValueConfig field_trials( "WebRTC-Video-QualityScaling/Enabled-1,2,3,4,5,6,0,0,0.9,0.99,1/"); - const auto thresholds = - QualityScalingExperiment::GetQpThresholds(kVideoCodecGeneric); + const auto thresholds = QualityScalingExperiment::GetQpThresholds( + kVideoCodecGeneric, field_trials); EXPECT_FALSE(thresholds); } } // namespace webrtc diff --git a/rtc_base/experiments/rate_control_settings.cc b/rtc_base/experiments/rate_control_settings.cc index 84e7b1bcc1..2aaf0e1aca 100644 --- a/rtc_base/experiments/rate_control_settings.cc +++ b/rtc_base/experiments/rate_control_settings.cc @@ -16,7 +16,6 @@ #include #include "absl/strings/match.h" -#include "api/transport/field_trial_based_config.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -34,11 +33,6 @@ const char kCongestionWindowDefaultFieldTrialString[] = const char kUseBaseHeavyVp8Tl3RateAllocationFieldTrialName[] = "WebRTC-UseBaseHeavyVP8TL3RateAllocation"; -bool IsEnabled(const FieldTrialsView* const key_value_config, - absl::string_view key) { - return absl::StartsWith(key_value_config->Lookup(key), "Enabled"); -} - } // namespace constexpr char CongestionWindowConfig::kKey[]; @@ -75,34 +69,23 @@ std::unique_ptr VideoRateControlConfig::Parser() { } RateControlSettings::RateControlSettings( - const FieldTrialsView* const key_value_config) { + const FieldTrialsView& key_value_config) { std::string congestion_window_config = - key_value_config->Lookup(CongestionWindowConfig::kKey).empty() - ? kCongestionWindowDefaultFieldTrialString - : key_value_config->Lookup(CongestionWindowConfig::kKey); + key_value_config.Lookup(CongestionWindowConfig::kKey); + if (congestion_window_config.empty()) { + congestion_window_config = kCongestionWindowDefaultFieldTrialString; + } congestion_window_config_ = CongestionWindowConfig::Parse(congestion_window_config); - video_config_.vp8_base_heavy_tl3_alloc = IsEnabled( - key_value_config, kUseBaseHeavyVp8Tl3RateAllocationFieldTrialName); + video_config_.vp8_base_heavy_tl3_alloc = key_value_config.IsEnabled( + kUseBaseHeavyVp8Tl3RateAllocationFieldTrialName); video_config_.Parser()->Parse( - key_value_config->Lookup(VideoRateControlConfig::kKey)); + key_value_config.Lookup(VideoRateControlConfig::kKey)); } RateControlSettings::~RateControlSettings() = default; RateControlSettings::RateControlSettings(RateControlSettings&&) = default; -RateControlSettings RateControlSettings::ParseFromFieldTrials() { - FieldTrialBasedConfig field_trial_config; - return RateControlSettings(&field_trial_config); -} - -RateControlSettings RateControlSettings::ParseFromKeyValueConfig( - const FieldTrialsView* const key_value_config) { - FieldTrialBasedConfig field_trial_config; - return RateControlSettings(key_value_config ? key_value_config - : &field_trial_config); -} - bool RateControlSettings::UseCongestionWindow() const { return static_cast(congestion_window_config_.queue_size_ms); } @@ -127,12 +110,12 @@ uint32_t RateControlSettings::CongestionWindowMinPushbackTargetBitrateBps() kDefaultMinPushbackTargetBitrateBps); } -absl::optional -RateControlSettings::CongestionWindowInitialDataWindow() const { +std::optional RateControlSettings::CongestionWindowInitialDataWindow() + const { return congestion_window_config_.initial_data_window; } -absl::optional RateControlSettings::GetPacingFactor() const { +std::optional RateControlSettings::GetPacingFactor() const { return video_config_.pacing_factor; } @@ -140,18 +123,18 @@ bool RateControlSettings::UseAlrProbing() const { return video_config_.alr_probing; } -absl::optional RateControlSettings::LibvpxVp8QpMax() const { +std::optional RateControlSettings::LibvpxVp8QpMax() const { if (video_config_.vp8_qp_max && (*video_config_.vp8_qp_max < 0 || *video_config_.vp8_qp_max > 63)) { RTC_LOG(LS_WARNING) << "Unsupported vp8_qp_max_ value, ignored."; - return absl::nullopt; + return std::nullopt; } return video_config_.vp8_qp_max; } -absl::optional RateControlSettings::LibvpxVp8MinPixels() const { +std::optional RateControlSettings::LibvpxVp8MinPixels() const { if (video_config_.vp8_min_pixels && *video_config_.vp8_min_pixels < 1) { - return absl::nullopt; + return std::nullopt; } return video_config_.vp8_min_pixels; } diff --git a/rtc_base/experiments/rate_control_settings.h b/rtc_base/experiments/rate_control_settings.h index 05e942d39f..d48da1c45a 100644 --- a/rtc_base/experiments/rate_control_settings.h +++ b/rtc_base/experiments/rate_control_settings.h @@ -11,7 +11,8 @@ #ifndef RTC_BASE_EXPERIMENTS_RATE_CONTROL_SETTINGS_H_ #define RTC_BASE_EXPERIMENTS_RATE_CONTROL_SETTINGS_H_ -#include "absl/types/optional.h" +#include + #include "api/field_trials_view.h" #include "api/units/data_size.h" #include "api/video_codecs/video_codec.h" @@ -22,9 +23,9 @@ namespace webrtc { struct CongestionWindowConfig { static constexpr char kKey[] = "WebRTC-CongestionWindow"; - absl::optional queue_size_ms; - absl::optional min_bitrate_bps; - absl::optional initial_data_window; + std::optional queue_size_ms; + std::optional min_bitrate_bps; + std::optional initial_data_window; bool drop_frame_only = false; std::unique_ptr Parser(); static CongestionWindowConfig Parse(absl::string_view config); @@ -32,10 +33,10 @@ struct CongestionWindowConfig { struct VideoRateControlConfig { static constexpr char kKey[] = "WebRTC-VideoRateControl"; - absl::optional pacing_factor; + std::optional pacing_factor; bool alr_probing = false; - absl::optional vp8_qp_max; - absl::optional vp8_min_pixels; + std::optional vp8_qp_max; + std::optional vp8_min_pixels; bool trust_vp8 = true; bool trust_vp9 = true; bool bitrate_adjuster = true; @@ -48,12 +49,9 @@ struct VideoRateControlConfig { class RateControlSettings final { public: - ~RateControlSettings(); + explicit RateControlSettings(const FieldTrialsView& key_value_config); RateControlSettings(RateControlSettings&&); - - static RateControlSettings ParseFromFieldTrials(); - static RateControlSettings ParseFromKeyValueConfig( - const FieldTrialsView* const key_value_config); + ~RateControlSettings(); // When CongestionWindowPushback is enabled, the pacer is oblivious to // the congestion window. The relation between outstanding data and @@ -63,13 +61,13 @@ class RateControlSettings final { bool UseCongestionWindowPushback() const; bool UseCongestionWindowDropFrameOnly() const; uint32_t CongestionWindowMinPushbackTargetBitrateBps() const; - absl::optional CongestionWindowInitialDataWindow() const; + std::optional CongestionWindowInitialDataWindow() const; - absl::optional GetPacingFactor() const; + std::optional GetPacingFactor() const; bool UseAlrProbing() const; - absl::optional LibvpxVp8QpMax() const; - absl::optional LibvpxVp8MinPixels() const; + std::optional LibvpxVp8QpMax() const; + std::optional LibvpxVp8MinPixels() const; bool LibvpxVp8TrustedRateController() const; bool Vp8BoostBaseLayerQuality() const; bool Vp8DynamicRateSettings() const; @@ -82,8 +80,6 @@ class RateControlSettings final { bool BitrateAdjusterCanUseNetworkHeadroom() const; private: - explicit RateControlSettings(const FieldTrialsView* const key_value_config); - CongestionWindowConfig congestion_window_config_; VideoRateControlConfig video_config_; }; diff --git a/rtc_base/experiments/rate_control_settings_unittest.cc b/rtc_base/experiments/rate_control_settings_unittest.cc index 91ebf531bd..72470cbd7f 100644 --- a/rtc_base/experiments/rate_control_settings_unittest.cc +++ b/rtc_base/experiments/rate_control_settings_unittest.cc @@ -11,7 +11,8 @@ #include "rtc_base/experiments/rate_control_settings.h" #include "api/video_codecs/video_codec.h" -#include "test/field_trial.h" +#include "test/explicit_key_value_config.h" +#include "test/gmock.h" #include "test/gtest.h" #include "video/config/video_encoder_config.h" @@ -19,171 +20,130 @@ namespace webrtc { namespace { +using test::ExplicitKeyValueConfig; +using ::testing::DoubleEq; +using ::testing::Optional; + +RateControlSettings ParseFrom(absl::string_view field_trials) { + return RateControlSettings(ExplicitKeyValueConfig(field_trials)); +} + TEST(RateControlSettingsTest, CongestionWindow) { - EXPECT_TRUE( - RateControlSettings::ParseFromFieldTrials().UseCongestionWindow()); + EXPECT_TRUE(ParseFrom("").UseCongestionWindow()); - test::ScopedFieldTrials field_trials( - "WebRTC-CongestionWindow/QueueSize:100/"); - const RateControlSettings settings_after = - RateControlSettings::ParseFromFieldTrials(); - EXPECT_TRUE(settings_after.UseCongestionWindow()); - EXPECT_EQ(settings_after.GetCongestionWindowAdditionalTimeMs(), 100); + const RateControlSettings settings = + ParseFrom("WebRTC-CongestionWindow/QueueSize:100/"); + EXPECT_TRUE(settings.UseCongestionWindow()); + EXPECT_EQ(settings.GetCongestionWindowAdditionalTimeMs(), 100); } TEST(RateControlSettingsTest, CongestionWindowPushback) { - EXPECT_TRUE(RateControlSettings::ParseFromFieldTrials() - .UseCongestionWindowPushback()); + EXPECT_TRUE(ParseFrom("").UseCongestionWindowPushback()); - test::ScopedFieldTrials field_trials( - "WebRTC-CongestionWindow/QueueSize:100,MinBitrate:100000/"); - const RateControlSettings settings_after = - RateControlSettings::ParseFromFieldTrials(); - EXPECT_TRUE(settings_after.UseCongestionWindowPushback()); - EXPECT_EQ(settings_after.CongestionWindowMinPushbackTargetBitrateBps(), - 100000u); + const RateControlSettings settings = + ParseFrom("WebRTC-CongestionWindow/QueueSize:100,MinBitrate:100000/"); + EXPECT_TRUE(settings.UseCongestionWindowPushback()); + EXPECT_EQ(settings.CongestionWindowMinPushbackTargetBitrateBps(), 100000u); } TEST(RateControlSettingsTest, CongestionWindowPushbackDropframe) { - EXPECT_TRUE(RateControlSettings::ParseFromFieldTrials() - .UseCongestionWindowPushback()); + EXPECT_TRUE(ParseFrom("").UseCongestionWindowPushback()); - test::ScopedFieldTrials field_trials( + const RateControlSettings settings = ParseFrom( "WebRTC-CongestionWindow/" "QueueSize:100,MinBitrate:100000,DropFrame:true/"); - const RateControlSettings settings_after = - RateControlSettings::ParseFromFieldTrials(); - EXPECT_TRUE(settings_after.UseCongestionWindowPushback()); - EXPECT_EQ(settings_after.CongestionWindowMinPushbackTargetBitrateBps(), - 100000u); - EXPECT_TRUE(settings_after.UseCongestionWindowDropFrameOnly()); + EXPECT_TRUE(settings.UseCongestionWindowPushback()); + EXPECT_EQ(settings.CongestionWindowMinPushbackTargetBitrateBps(), 100000u); + EXPECT_TRUE(settings.UseCongestionWindowDropFrameOnly()); } TEST(RateControlSettingsTest, CongestionWindowPushbackDefaultConfig) { - const RateControlSettings settings = - RateControlSettings::ParseFromFieldTrials(); + const RateControlSettings settings = ParseFrom(""); EXPECT_TRUE(settings.UseCongestionWindowPushback()); EXPECT_EQ(settings.CongestionWindowMinPushbackTargetBitrateBps(), 30000u); EXPECT_TRUE(settings.UseCongestionWindowDropFrameOnly()); } TEST(RateControlSettingsTest, PacingFactor) { - EXPECT_FALSE(RateControlSettings::ParseFromFieldTrials().GetPacingFactor()); + EXPECT_FALSE(ParseFrom("").GetPacingFactor()); - test::ScopedFieldTrials field_trials( - "WebRTC-VideoRateControl/pacing_factor:1.2/"); - const RateControlSettings settings_after = - RateControlSettings::ParseFromFieldTrials(); - // Need to explicitly dereference the absl::optional - // for the EXPECT_DOUBLE_EQ to compile. - ASSERT_TRUE(settings_after.GetPacingFactor()); - EXPECT_DOUBLE_EQ(*settings_after.GetPacingFactor(), 1.2); + EXPECT_THAT( + ParseFrom("WebRTC-VideoRateControl/pacing_factor:1.2/").GetPacingFactor(), + Optional(DoubleEq(1.2))); } TEST(RateControlSettingsTest, AlrProbing) { - EXPECT_FALSE(RateControlSettings::ParseFromFieldTrials().UseAlrProbing()); + EXPECT_FALSE(ParseFrom("").UseAlrProbing()); - test::ScopedFieldTrials field_trials( - "WebRTC-VideoRateControl/alr_probing:1/"); - EXPECT_TRUE(RateControlSettings::ParseFromFieldTrials().UseAlrProbing()); + EXPECT_TRUE( + ParseFrom("WebRTC-VideoRateControl/alr_probing:1/").UseAlrProbing()); } TEST(RateControlSettingsTest, LibvpxVp8QpMax) { - EXPECT_FALSE(RateControlSettings::ParseFromFieldTrials().LibvpxVp8QpMax()); + EXPECT_FALSE(ParseFrom("").LibvpxVp8QpMax()); - test::ScopedFieldTrials field_trials( - "WebRTC-VideoRateControl/vp8_qp_max:50/"); - EXPECT_EQ(RateControlSettings::ParseFromFieldTrials().LibvpxVp8QpMax(), 50); + EXPECT_EQ( + ParseFrom("WebRTC-VideoRateControl/vp8_qp_max:50/").LibvpxVp8QpMax(), 50); } TEST(RateControlSettingsTest, DoesNotGetTooLargeLibvpxVp8QpMaxValue) { - test::ScopedFieldTrials field_trials( - "WebRTC-VideoRateControl/vp8_qp_max:70/"); - EXPECT_FALSE(RateControlSettings::ParseFromFieldTrials().LibvpxVp8QpMax()); + EXPECT_FALSE( + ParseFrom("WebRTC-VideoRateControl/vp8_qp_max:70/").LibvpxVp8QpMax()); } TEST(RateControlSettingsTest, LibvpxVp8MinPixels) { - EXPECT_FALSE( - RateControlSettings::ParseFromFieldTrials().LibvpxVp8MinPixels()); + EXPECT_FALSE(ParseFrom("").LibvpxVp8MinPixels()); - test::ScopedFieldTrials field_trials( - "WebRTC-VideoRateControl/vp8_min_pixels:50000/"); - EXPECT_EQ(RateControlSettings::ParseFromFieldTrials().LibvpxVp8MinPixels(), + EXPECT_EQ(ParseFrom("WebRTC-VideoRateControl/vp8_min_pixels:50000/") + .LibvpxVp8MinPixels(), 50000); } TEST(RateControlSettingsTest, DoesNotGetTooSmallLibvpxVp8MinPixelValue) { - test::ScopedFieldTrials field_trials( - "WebRTC-VideoRateControl/vp8_min_pixels:0/"); - EXPECT_FALSE( - RateControlSettings::ParseFromFieldTrials().LibvpxVp8MinPixels()); + EXPECT_FALSE(ParseFrom("WebRTC-VideoRateControl/vp8_min_pixels:0/") + .LibvpxVp8MinPixels()); } TEST(RateControlSettingsTest, LibvpxTrustedRateController) { - const RateControlSettings settings_before = - RateControlSettings::ParseFromFieldTrials(); - EXPECT_TRUE(settings_before.LibvpxVp8TrustedRateController()); - EXPECT_TRUE(settings_before.LibvpxVp9TrustedRateController()); + const RateControlSettings default_settings = ParseFrom(""); + EXPECT_TRUE(default_settings.LibvpxVp8TrustedRateController()); + EXPECT_TRUE(default_settings.LibvpxVp9TrustedRateController()); - test::ScopedFieldTrials field_trials( - "WebRTC-VideoRateControl/trust_vp8:0,trust_vp9:0/"); - const RateControlSettings settings_after = - RateControlSettings::ParseFromFieldTrials(); - EXPECT_FALSE(settings_after.LibvpxVp8TrustedRateController()); - EXPECT_FALSE(settings_after.LibvpxVp9TrustedRateController()); + const RateControlSettings settings = + ParseFrom("WebRTC-VideoRateControl/trust_vp8:0,trust_vp9:0/"); + EXPECT_FALSE(settings.LibvpxVp8TrustedRateController()); + EXPECT_FALSE(settings.LibvpxVp9TrustedRateController()); } TEST(RateControlSettingsTest, Vp8BaseHeavyTl3RateAllocationLegacyKey) { - const RateControlSettings settings_before = - RateControlSettings::ParseFromFieldTrials(); - EXPECT_FALSE(settings_before.Vp8BaseHeavyTl3RateAllocation()); + EXPECT_FALSE(ParseFrom("").Vp8BaseHeavyTl3RateAllocation()); - test::ScopedFieldTrials field_trials( - "WebRTC-UseBaseHeavyVP8TL3RateAllocation/Enabled/"); - const RateControlSettings settings_after = - RateControlSettings::ParseFromFieldTrials(); - EXPECT_TRUE(settings_after.Vp8BaseHeavyTl3RateAllocation()); + EXPECT_TRUE(ParseFrom("WebRTC-UseBaseHeavyVP8TL3RateAllocation/Enabled/") + .Vp8BaseHeavyTl3RateAllocation()); } TEST(RateControlSettingsTest, Vp8BaseHeavyTl3RateAllocationVideoRateControlKey) { - const RateControlSettings settings_before = - RateControlSettings::ParseFromFieldTrials(); - EXPECT_FALSE(settings_before.Vp8BaseHeavyTl3RateAllocation()); + EXPECT_FALSE(ParseFrom("").Vp8BaseHeavyTl3RateAllocation()); - test::ScopedFieldTrials field_trials( - "WebRTC-VideoRateControl/vp8_base_heavy_tl3_alloc:1/"); - const RateControlSettings settings_after = - RateControlSettings::ParseFromFieldTrials(); - EXPECT_TRUE(settings_after.Vp8BaseHeavyTl3RateAllocation()); + EXPECT_TRUE(ParseFrom("WebRTC-VideoRateControl/vp8_base_heavy_tl3_alloc:1/") + .Vp8BaseHeavyTl3RateAllocation()); } TEST(RateControlSettingsTest, Vp8BaseHeavyTl3RateAllocationVideoRateControlKeyOverridesLegacyKey) { - const RateControlSettings settings_before = - RateControlSettings::ParseFromFieldTrials(); - EXPECT_FALSE(settings_before.Vp8BaseHeavyTl3RateAllocation()); + EXPECT_FALSE(ParseFrom("").Vp8BaseHeavyTl3RateAllocation()); - test::ScopedFieldTrials field_trials( - "WebRTC-UseBaseHeavyVP8TL3RateAllocation/Enabled/WebRTC-VideoRateControl/" - "vp8_base_heavy_tl3_alloc:0/"); - const RateControlSettings settings_after = - RateControlSettings::ParseFromFieldTrials(); - EXPECT_FALSE(settings_after.Vp8BaseHeavyTl3RateAllocation()); + EXPECT_FALSE(ParseFrom("WebRTC-UseBaseHeavyVP8TL3RateAllocation/Enabled/" + "WebRTC-VideoRateControl/vp8_base_heavy_tl3_alloc:0/") + .Vp8BaseHeavyTl3RateAllocation()); } TEST(RateControlSettingsTest, UseEncoderBitrateAdjuster) { - // Should be on by default. - EXPECT_TRUE( - RateControlSettings::ParseFromFieldTrials().UseEncoderBitrateAdjuster()); - - { - // Can be turned off via field trial. - test::ScopedFieldTrials field_trials( - "WebRTC-VideoRateControl/bitrate_adjuster:false/"); - EXPECT_FALSE(RateControlSettings::ParseFromFieldTrials() - .UseEncoderBitrateAdjuster()); - } + EXPECT_TRUE(ParseFrom("").UseEncoderBitrateAdjuster()); + + EXPECT_FALSE(ParseFrom("WebRTC-VideoRateControl/bitrate_adjuster:false/") + .UseEncoderBitrateAdjuster()); } } // namespace diff --git a/rtc_base/experiments/rtt_mult_experiment.cc b/rtc_base/experiments/rtt_mult_experiment.cc deleted file mode 100644 index e15b928937..0000000000 --- a/rtc_base/experiments/rtt_mult_experiment.cc +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "rtc_base/experiments/rtt_mult_experiment.h" - -#include - -#include -#include - -#include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" - -namespace webrtc { - -namespace { -const char kRttMultExperiment[] = "WebRTC-RttMult"; -} // namespace - -bool RttMultExperiment::RttMultEnabled() { - return !field_trial::IsDisabled(kRttMultExperiment); -} - -absl::optional -RttMultExperiment::GetRttMultValue() { - if (!RttMultExperiment::RttMultEnabled()) { - return absl::nullopt; - } - return RttMultExperiment::Settings{.rtt_mult_setting = 0.9, - .rtt_mult_add_cap_ms = 200.0}; -} - -} // namespace webrtc diff --git a/rtc_base/experiments/rtt_mult_experiment.h b/rtc_base/experiments/rtt_mult_experiment.h deleted file mode 100644 index a868e63e8d..0000000000 --- a/rtc_base/experiments/rtt_mult_experiment.h +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef RTC_BASE_EXPERIMENTS_RTT_MULT_EXPERIMENT_H_ -#define RTC_BASE_EXPERIMENTS_RTT_MULT_EXPERIMENT_H_ - -#include "absl/types/optional.h" - -namespace webrtc { - -class RttMultExperiment { - public: - struct Settings { - float rtt_mult_setting; // Jitter buffer size is increased by this factor - // times the estimated RTT. - float rtt_mult_add_cap_ms; // Jitter buffer size increase is capped by this - // value. - }; - - // Returns true if the experiment is enabled. - static bool RttMultEnabled(); - - // Returns rtt_mult value and rtt_mult addition cap value from field trial. - static absl::optional GetRttMultValue(); -}; - -} // namespace webrtc - -#endif // RTC_BASE_EXPERIMENTS_RTT_MULT_EXPERIMENT_H_ diff --git a/rtc_base/experiments/rtt_mult_experiment_unittest.cc b/rtc_base/experiments/rtt_mult_experiment_unittest.cc deleted file mode 100644 index a6798a1411..0000000000 --- a/rtc_base/experiments/rtt_mult_experiment_unittest.cc +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/experiments/rtt_mult_experiment.h" - -#include "test/field_trial.h" -#include "test/gtest.h" - -namespace webrtc { - -TEST(RttMultExperimentTest, RttMultEnabledByDefault) { - EXPECT_TRUE(RttMultExperiment::RttMultEnabled()); - ASSERT_TRUE(RttMultExperiment::GetRttMultValue()); - EXPECT_EQ(0.9f, RttMultExperiment::GetRttMultValue()->rtt_mult_setting); - EXPECT_EQ(200.0f, RttMultExperiment::GetRttMultValue()->rtt_mult_add_cap_ms); -} - -TEST(RttMultExperimentTest, RttMultDisabledByFieldTrial) { - webrtc::test::ScopedFieldTrials field_trials("WebRTC-RttMult/Disabled/"); - EXPECT_FALSE(RttMultExperiment::RttMultEnabled()); - EXPECT_FALSE(RttMultExperiment::GetRttMultValue()); -} - -} // namespace webrtc diff --git a/rtc_base/experiments/stable_target_rate_experiment.cc b/rtc_base/experiments/stable_target_rate_experiment.cc index fa04fa35b4..d554b5d355 100644 --- a/rtc_base/experiments/stable_target_rate_experiment.cc +++ b/rtc_base/experiments/stable_target_rate_experiment.cc @@ -10,7 +10,8 @@ #include "rtc_base/experiments/stable_target_rate_experiment.h" -#include "api/transport/field_trial_based_config.h" +#include "api/field_trials_view.h" +#include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { namespace { @@ -18,17 +19,15 @@ constexpr char kFieldTrialName[] = "WebRTC-StableTargetRate"; } // namespace StableTargetRateExperiment::StableTargetRateExperiment( - const FieldTrialsView* const key_value_config, - double default_video_hysteresis, - double default_screenshare_hysteresis) + const FieldTrialsView& key_value_config) : enabled_("enabled", false), video_hysteresis_factor_("video_hysteresis_factor", - default_video_hysteresis), + /*default_value=*/1.2), screenshare_hysteresis_factor_("screenshare_hysteresis_factor", - default_screenshare_hysteresis) { + /*default_value=*/1.35) { ParseFieldTrial( {&enabled_, &video_hysteresis_factor_, &screenshare_hysteresis_factor_}, - key_value_config->Lookup(kFieldTrialName)); + key_value_config.Lookup(kFieldTrialName)); } StableTargetRateExperiment::StableTargetRateExperiment( @@ -36,18 +35,6 @@ StableTargetRateExperiment::StableTargetRateExperiment( StableTargetRateExperiment::StableTargetRateExperiment( StableTargetRateExperiment&&) = default; -StableTargetRateExperiment StableTargetRateExperiment::ParseFromFieldTrials() { - FieldTrialBasedConfig config; - return ParseFromKeyValueConfig(&config); -} - -StableTargetRateExperiment StableTargetRateExperiment::ParseFromKeyValueConfig( - const FieldTrialsView* const key_value_config) { - return StableTargetRateExperiment(key_value_config, - /*default_video_hysteresis=*/1.2, - /*default_screenshare_hysteresis=*/1.35); -} - bool StableTargetRateExperiment::IsEnabled() const { return enabled_.Get(); } diff --git a/rtc_base/experiments/stable_target_rate_experiment.h b/rtc_base/experiments/stable_target_rate_experiment.h index be0f9da129..ced3eb8b81 100644 --- a/rtc_base/experiments/stable_target_rate_experiment.h +++ b/rtc_base/experiments/stable_target_rate_experiment.h @@ -18,22 +18,15 @@ namespace webrtc { class StableTargetRateExperiment { public: + explicit StableTargetRateExperiment(const FieldTrialsView& field_trials); StableTargetRateExperiment(const StableTargetRateExperiment&); StableTargetRateExperiment(StableTargetRateExperiment&&); - static StableTargetRateExperiment ParseFromFieldTrials(); - static StableTargetRateExperiment ParseFromKeyValueConfig( - const FieldTrialsView* const key_value_config); bool IsEnabled() const; double GetVideoHysteresisFactor() const; double GetScreenshareHysteresisFactor() const; private: - explicit StableTargetRateExperiment( - const FieldTrialsView* const key_value_config, - double default_video_hysteresis, - double default_screenshare_hysteresis); - FieldTrialParameter enabled_; FieldTrialParameter video_hysteresis_factor_; FieldTrialParameter screenshare_hysteresis_factor_; diff --git a/rtc_base/experiments/stable_target_rate_experiment_unittest.cc b/rtc_base/experiments/stable_target_rate_experiment_unittest.cc index 854398e910..f4b63c30b6 100644 --- a/rtc_base/experiments/stable_target_rate_experiment_unittest.cc +++ b/rtc_base/experiments/stable_target_rate_experiment_unittest.cc @@ -10,46 +10,45 @@ #include "rtc_base/experiments/stable_target_rate_experiment.h" -#include "test/field_trial.h" +#include "test/explicit_key_value_config.h" #include "test/gtest.h" namespace webrtc { +using test::ExplicitKeyValueConfig; + TEST(StableBweExperimentTest, Default) { - StableTargetRateExperiment config = - StableTargetRateExperiment::ParseFromFieldTrials(); + ExplicitKeyValueConfig field_trials(""); + StableTargetRateExperiment config(field_trials); EXPECT_FALSE(config.IsEnabled()); EXPECT_EQ(config.GetVideoHysteresisFactor(), 1.2); EXPECT_EQ(config.GetScreenshareHysteresisFactor(), 1.35); } TEST(StableBweExperimentTest, EnabledNoHysteresis) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-StableTargetRate/enabled:true/"); + ExplicitKeyValueConfig field_trials("WebRTC-StableTargetRate/enabled:true/"); - StableTargetRateExperiment config = - StableTargetRateExperiment::ParseFromFieldTrials(); + StableTargetRateExperiment config(field_trials); EXPECT_TRUE(config.IsEnabled()); EXPECT_EQ(config.GetVideoHysteresisFactor(), 1.2); EXPECT_EQ(config.GetScreenshareHysteresisFactor(), 1.35); } TEST(StableBweExperimentTest, EnabledWithHysteresis) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-StableTargetRate/" "enabled:true," "video_hysteresis_factor:1.1," "screenshare_hysteresis_factor:1.2/"); - StableTargetRateExperiment config = - StableTargetRateExperiment::ParseFromFieldTrials(); + StableTargetRateExperiment config(field_trials); EXPECT_TRUE(config.IsEnabled()); EXPECT_EQ(config.GetVideoHysteresisFactor(), 1.1); EXPECT_EQ(config.GetScreenshareHysteresisFactor(), 1.2); } TEST(StableBweExperimentTest, HysteresisOverrideVideoRateHystersis) { - webrtc::test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig field_trials( "WebRTC-StableTargetRate/" "enabled:true," "video_hysteresis_factor:1.1," @@ -57,8 +56,7 @@ TEST(StableBweExperimentTest, HysteresisOverrideVideoRateHystersis) { "WebRTC-VideoRateControl/video_hysteresis:1.3," "screenshare_hysteresis:1.4/"); - StableTargetRateExperiment config = - StableTargetRateExperiment::ParseFromFieldTrials(); + StableTargetRateExperiment config(field_trials); EXPECT_TRUE(config.IsEnabled()); EXPECT_EQ(config.GetVideoHysteresisFactor(), 1.1); EXPECT_EQ(config.GetScreenshareHysteresisFactor(), 1.2); diff --git a/rtc_base/experiments/struct_parameters_parser.cc b/rtc_base/experiments/struct_parameters_parser.cc index 011df3eaba..bb46453a87 100644 --- a/rtc_base/experiments/struct_parameters_parser.cc +++ b/rtc_base/experiments/struct_parameters_parser.cc @@ -26,16 +26,16 @@ size_t FindOrEnd(absl::string_view str, size_t start, char delimiter) { namespace struct_parser_impl { namespace { inline void StringEncode(std::string* target, bool val) { - *target += rtc::ToString(val); + *target += BoolToString(val); } inline void StringEncode(std::string* target, double val) { - *target += rtc::ToString(val); + *target += absl::StrCat(val); } inline void StringEncode(std::string* target, int val) { - *target += rtc::ToString(val); + *target += absl::StrCat(val); } inline void StringEncode(std::string* target, unsigned val) { - *target += rtc::ToString(val); + *target += absl::StrCat(val); } inline void StringEncode(std::string* target, DataRate val) { *target += webrtc::ToString(val); @@ -48,7 +48,7 @@ inline void StringEncode(std::string* target, TimeDelta val) { } template -inline void StringEncode(std::string* sb, absl::optional val) { +inline void StringEncode(std::string* sb, std::optional val) { if (val) StringEncode(sb, *val); } @@ -69,16 +69,16 @@ template class TypedParser; template class TypedParser; template class TypedParser; template class TypedParser; -template class TypedParser>; -template class TypedParser>; -template class TypedParser>; +template class TypedParser>; +template class TypedParser>; +template class TypedParser>; template class TypedParser; template class TypedParser; template class TypedParser; -template class TypedParser>; -template class TypedParser>; -template class TypedParser>; +template class TypedParser>; +template class TypedParser>; +template class TypedParser>; } // namespace struct_parser_impl StructParametersParser::StructParametersParser( diff --git a/rtc_base/experiments/struct_parameters_parser.h b/rtc_base/experiments/struct_parameters_parser.h index f5f8340209..3d0b05bdc2 100644 --- a/rtc_base/experiments/struct_parameters_parser.h +++ b/rtc_base/experiments/struct_parameters_parser.h @@ -13,13 +13,13 @@ #include #include #include +#include #include #include #include #include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/field_trial_units.h" #include "rtc_base/string_encode.h" @@ -54,16 +54,16 @@ extern template class TypedParser; extern template class TypedParser; extern template class TypedParser; extern template class TypedParser; -extern template class TypedParser>; -extern template class TypedParser>; -extern template class TypedParser>; +extern template class TypedParser>; +extern template class TypedParser>; +extern template class TypedParser>; extern template class TypedParser; extern template class TypedParser; extern template class TypedParser; -extern template class TypedParser>; -extern template class TypedParser>; -extern template class TypedParser>; +extern template class TypedParser>; +extern template class TypedParser>; +extern template class TypedParser>; template void AddMembers(MemberParameter* out, const char* key, T* member) { diff --git a/rtc_base/experiments/struct_parameters_parser_unittest.cc b/rtc_base/experiments/struct_parameters_parser_unittest.cc index 0824bd3b27..918bcb6e28 100644 --- a/rtc_base/experiments/struct_parameters_parser_unittest.cc +++ b/rtc_base/experiments/struct_parameters_parser_unittest.cc @@ -19,8 +19,8 @@ struct DummyConfig { int retries = 5; unsigned size = 3; bool ping = 0; - absl::optional duration; - absl::optional latency = TimeDelta::Millis(100); + std::optional duration; + std::optional latency = TimeDelta::Millis(100); std::unique_ptr Parser(); }; diff --git a/rtc_base/fake_clock.cc b/rtc_base/fake_clock.cc index 652a5afa3a..0ab8b52e94 100644 --- a/rtc_base/fake_clock.cc +++ b/rtc_base/fake_clock.cc @@ -13,32 +13,32 @@ #include "rtc_base/checks.h" #include "rtc_base/thread.h" -namespace rtc { +namespace webrtc { int64_t FakeClock::TimeNanos() const { - webrtc::MutexLock lock(&lock_); + MutexLock lock(&lock_); return time_ns_; } -void FakeClock::SetTime(webrtc::Timestamp new_time) { - webrtc::MutexLock lock(&lock_); +void FakeClock::SetTime(Timestamp new_time) { + MutexLock lock(&lock_); RTC_DCHECK(new_time.us() * 1000 >= time_ns_); time_ns_ = new_time.us() * 1000; } -void FakeClock::AdvanceTime(webrtc::TimeDelta delta) { - webrtc::MutexLock lock(&lock_); +void FakeClock::AdvanceTime(TimeDelta delta) { + MutexLock lock(&lock_); time_ns_ += delta.ns(); } -void ThreadProcessingFakeClock::SetTime(webrtc::Timestamp time) { +void ThreadProcessingFakeClock::SetTime(Timestamp time) { clock_.SetTime(time); // If message queues are waiting in a socket select() with a timeout provided // by the OS, they should wake up and dispatch all messages that are ready. ThreadManager::ProcessAllMessageQueuesForTesting(); } -void ThreadProcessingFakeClock::AdvanceTime(webrtc::TimeDelta delta) { +void ThreadProcessingFakeClock::AdvanceTime(TimeDelta delta) { clock_.AdvanceTime(delta); ThreadManager::ProcessAllMessageQueuesForTesting(); } @@ -59,4 +59,4 @@ ScopedFakeClock::~ScopedFakeClock() { SetClockForTesting(prev_clock_); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/fake_clock.h b/rtc_base/fake_clock.h index edb507becb..66050c766c 100644 --- a/rtc_base/fake_clock.h +++ b/rtc_base/fake_clock.h @@ -19,7 +19,7 @@ #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" -namespace rtc { +namespace webrtc { // Fake clock for use with unit tests, which does not tick on its own. // Starts at time 0. @@ -38,20 +38,20 @@ class FakeClock : public ClockInterface { // Methods that can be used by the test to control the time. // Should only be used to set a time in the future. - void SetTime(webrtc::Timestamp new_time); + void SetTime(Timestamp new_time); - void AdvanceTime(webrtc::TimeDelta delta); + void AdvanceTime(TimeDelta delta); private: - mutable webrtc::Mutex lock_; + mutable Mutex lock_; int64_t time_ns_ RTC_GUARDED_BY(lock_) = 0; }; class ThreadProcessingFakeClock : public ClockInterface { public: int64_t TimeNanos() const override { return clock_.TimeNanos(); } - void SetTime(webrtc::Timestamp time); - void AdvanceTime(webrtc::TimeDelta delta); + void SetTime(Timestamp time); + void AdvanceTime(TimeDelta delta); private: FakeClock clock_; @@ -78,6 +78,17 @@ class ScopedFakeClock : public ThreadProcessingFakeClock { ClockInterface* prev_clock_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::FakeClock; +using ::webrtc::ScopedBaseFakeClock; +using ::webrtc::ScopedFakeClock; +using ::webrtc::ThreadProcessingFakeClock; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_FAKE_CLOCK_H_ diff --git a/rtc_base/fake_clock_unittest.cc b/rtc_base/fake_clock_unittest.cc index fc1d5100e6..e138b74c75 100644 --- a/rtc_base/fake_clock_unittest.cc +++ b/rtc_base/fake_clock_unittest.cc @@ -12,26 +12,26 @@ #include "test/gtest.h" -namespace rtc { +namespace webrtc { TEST(ScopedFakeClockTest, OverridesGlobalClock) { const int64_t kFixedTimeUs = 100000; - int64_t real_time_us = rtc::TimeMicros(); + int64_t real_time_us = TimeMicros(); EXPECT_NE(real_time_us, 0); { ScopedFakeClock scoped; - EXPECT_EQ(rtc::TimeMicros(), 0); + EXPECT_EQ(TimeMicros(), 0); - scoped.AdvanceTime(webrtc::TimeDelta::Millis(1)); - EXPECT_EQ(rtc::TimeMicros(), 1000); + scoped.AdvanceTime(TimeDelta::Millis(1)); + EXPECT_EQ(TimeMicros(), 1000); - scoped.SetTime(webrtc::Timestamp::Micros(kFixedTimeUs)); - EXPECT_EQ(rtc::TimeMicros(), kFixedTimeUs); + scoped.SetTime(Timestamp::Micros(kFixedTimeUs)); + EXPECT_EQ(TimeMicros(), kFixedTimeUs); - scoped.AdvanceTime(webrtc::TimeDelta::Millis(1)); - EXPECT_EQ(rtc::TimeMicros(), kFixedTimeUs + 1000); + scoped.AdvanceTime(TimeDelta::Millis(1)); + EXPECT_EQ(TimeMicros(), kFixedTimeUs + 1000); } - EXPECT_NE(rtc::TimeMicros(), kFixedTimeUs + 1000); - EXPECT_GE(rtc::TimeMicros(), real_time_us); + EXPECT_NE(TimeMicros(), kFixedTimeUs + 1000); + EXPECT_GE(TimeMicros(), real_time_us); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/fake_mdns_responder.h b/rtc_base/fake_mdns_responder.h index 706c11b913..ee9517b457 100644 --- a/rtc_base/fake_mdns_responder.h +++ b/rtc_base/fake_mdns_responder.h @@ -27,10 +27,10 @@ namespace webrtc { // depends on, e.g., using WeakPtrFactory or PendingTaskSafetyFlag. class FakeMdnsResponder : public MdnsResponderInterface { public: - explicit FakeMdnsResponder(rtc::Thread* thread) : thread_(thread) {} + explicit FakeMdnsResponder(Thread* thread) : thread_(thread) {} ~FakeMdnsResponder() = default; - void CreateNameForAddress(const rtc::IPAddress& addr, + void CreateNameForAddress(const IPAddress& addr, NameCreatedCallback callback) override { std::string name; if (addr_name_map_.find(addr) != addr_name_map_.end()) { @@ -41,7 +41,7 @@ class FakeMdnsResponder : public MdnsResponderInterface { } thread_->PostTask([callback, addr, name]() { callback(addr, name); }); } - void RemoveNameForAddress(const rtc::IPAddress& addr, + void RemoveNameForAddress(const IPAddress& addr, NameRemovedCallback callback) override { auto it = addr_name_map_.find(addr); if (it != addr_name_map_.end()) { @@ -51,19 +51,19 @@ class FakeMdnsResponder : public MdnsResponderInterface { thread_->PostTask([callback, result]() { callback(result); }); } - rtc::IPAddress GetMappedAddressForName(absl::string_view name) const { + IPAddress GetMappedAddressForName(absl::string_view name) const { for (const auto& addr_name_pair : addr_name_map_) { if (addr_name_pair.second == name) { return addr_name_pair.first; } } - return rtc::IPAddress(); + return IPAddress(); } private: uint32_t next_available_id_ = 0; - std::map addr_name_map_; - rtc::Thread* const thread_; + std::map addr_name_map_; + Thread* const thread_; }; } // namespace webrtc diff --git a/rtc_base/fake_network.h b/rtc_base/fake_network.h index bc0330606f..6e1521a54c 100644 --- a/rtc_base/fake_network.h +++ b/rtc_base/fake_network.h @@ -12,18 +12,22 @@ #define RTC_BASE_FAKE_NETWORK_H_ #include +#include #include #include #include -#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "rtc_base/ip_address.h" #include "rtc_base/mdns_responder_interface.h" +#include "rtc_base/net_helpers.h" #include "rtc_base/network.h" +#include "rtc_base/network_constants.h" #include "rtc_base/socket_address.h" #include "rtc_base/string_encode.h" #include "rtc_base/thread.h" -namespace rtc { +namespace webrtc { const int kFakeIPv4NetworkPrefixLength = 24; const int kFakeIPv6NetworkPrefixLength = 64; @@ -36,13 +40,13 @@ class FakeNetworkManager : public NetworkManagerBase { struct Iface { SocketAddress socket_address; AdapterType adapter_type; - absl::optional underlying_vpn_adapter_type; + std::optional underlying_vpn_adapter_type; }; typedef std::vector IfaceList; void AddInterface(const SocketAddress& iface) { // Ensure a unique name for the interface if its name is not given. - AddInterface(iface, "test" + rtc::ToString(next_index_++)); + AddInterface(iface, "test" + absl::StrCat(next_index_++)); } void AddInterface(const SocketAddress& iface, absl::string_view if_name) { @@ -53,7 +57,7 @@ class FakeNetworkManager : public NetworkManagerBase { const SocketAddress& iface, absl::string_view if_name, AdapterType type, - absl::optional underlying_vpn_adapter_type = absl::nullopt) { + std::optional underlying_vpn_adapter_type = std::nullopt) { SocketAddress address(if_name, 0); address.SetResolvedIP(iface.ipaddr()); ifaces_.push_back({address, type, underlying_vpn_adapter_type}); @@ -85,13 +89,13 @@ class FakeNetworkManager : public NetworkManagerBase { using NetworkManagerBase::set_default_local_addresses; using NetworkManagerBase::set_enumeration_permission; - // rtc::NetworkManager override. - webrtc::MdnsResponderInterface* GetMdnsResponder() const override { + // webrtc::NetworkManager override. + MdnsResponderInterface* GetMdnsResponder() const override { return mdns_responder_.get(); } void set_mdns_responder( - std::unique_ptr mdns_responder) { + std::unique_ptr mdns_responder) { mdns_responder_ = std::move(mdns_responder); } @@ -107,7 +111,8 @@ class FakeNetworkManager : public NetworkManagerBase { } else if (it->socket_address.ipaddr().family() == AF_INET6) { prefix_length = kFakeIPv6NetworkPrefixLength; } - IPAddress prefix = TruncateIP(it->socket_address.ipaddr(), prefix_length); + IPAddress prefix = + webrtc::TruncateIP(it->socket_address.ipaddr(), prefix_length); auto net = std::make_unique( it->socket_address.hostname(), it->socket_address.hostname(), prefix, prefix_length, it->adapter_type); @@ -131,9 +136,19 @@ class FakeNetworkManager : public NetworkManagerBase { int start_count_ = 0; bool sent_first_update_ = false; - std::unique_ptr mdns_responder_; + std::unique_ptr mdns_responder_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::FakeNetworkManager; +using ::webrtc::kFakeIPv4NetworkPrefixLength; +using ::webrtc::kFakeIPv6NetworkPrefixLength; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_FAKE_NETWORK_H_ diff --git a/rtc_base/fake_ssl_identity.cc b/rtc_base/fake_ssl_identity.cc index 73c843a2e7..31ce4400b4 100644 --- a/rtc_base/fake_ssl_identity.cc +++ b/rtc_base/fake_ssl_identity.cc @@ -10,15 +10,21 @@ #include "rtc_base/fake_ssl_identity.h" +#include +#include #include #include #include +#include #include "absl/strings/string_view.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" #include "rtc_base/message_digest.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_identity.h" -namespace rtc { +namespace webrtc { FakeSSLCertificate::FakeSSLCertificate(absl::string_view pem_string) : pem_string_(pem_string), @@ -63,12 +69,12 @@ bool FakeSSLCertificate::GetSignatureDigestAlgorithm( } bool FakeSSLCertificate::ComputeDigest(absl::string_view algorithm, - unsigned char* digest, - size_t size, - size_t* length) const { - *length = rtc::ComputeDigest(algorithm, pem_string_.c_str(), - pem_string_.size(), digest, size); - return (*length != 0); + Buffer& digest) const { + size_t length = ::webrtc::ComputeDigest(algorithm, pem_string_.c_str(), + pem_string_.size(), digest.data(), + digest.capacity()); + digest.SetSize(length); + return length != 0; } FakeSSLIdentity::FakeSSLIdentity(absl::string_view pem_string) @@ -118,4 +124,4 @@ bool FakeSSLIdentity::operator==(const SSLIdentity& other) const { return false; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/fake_ssl_identity.h b/rtc_base/fake_ssl_identity.h index 2b4ae2e57a..24c8ad6aaf 100644 --- a/rtc_base/fake_ssl_identity.h +++ b/rtc_base/fake_ssl_identity.h @@ -11,14 +11,18 @@ #ifndef RTC_BASE_FAKE_SSL_IDENTITY_H_ #define RTC_BASE_FAKE_SSL_IDENTITY_H_ +#include +#include #include +#include #include #include "absl/strings/string_view.h" +#include "rtc_base/buffer.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" -namespace rtc { +namespace webrtc { class FakeSSLCertificate : public SSLCertificate { public: @@ -36,9 +40,7 @@ class FakeSSLCertificate : public SSLCertificate { int64_t CertificateExpirationTime() const override; bool GetSignatureDigestAlgorithm(std::string* algorithm) const override; bool ComputeDigest(absl::string_view algorithm, - unsigned char* digest, - size_t size, - size_t* length) const override; + Buffer& digest) const override; void SetCertificateExpirationTime(int64_t expiration_time); @@ -78,6 +80,15 @@ class FakeSSLIdentity : public SSLIdentity { std::unique_ptr cert_chain_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::FakeSSLCertificate; +using ::webrtc::FakeSSLIdentity; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_FAKE_SSL_IDENTITY_H_ diff --git a/rtc_base/file_rotating_stream.cc b/rtc_base/file_rotating_stream.cc index c56396f157..cf7d1ad78b 100644 --- a/rtc_base/file_rotating_stream.cc +++ b/rtc_base/file_rotating_stream.cc @@ -10,33 +10,38 @@ #include "rtc_base/file_rotating_stream.h" +#include #include +#include #include -#include +#include #include "absl/strings/string_view.h" +#include "rtc_base/system/file_wrapper.h" +// IWYU pragma: begin_keep #if defined(WEBRTC_WIN) #include #include "rtc_base/string_utils.h" +#include "rtc_base/strings/string_builder.h" #else #include #include #include #endif // WEBRTC_WIN +// IWYU pragma: end_keep + +#include #include "absl/algorithm/container.h" #include "absl/strings/match.h" -#include "absl/types/optional.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/strings/string_builder.h" // Note: We use fprintf for logging in the write paths of this stream to avoid // infinite loops when logging. -namespace rtc { +namespace webrtc { namespace { @@ -52,7 +57,7 @@ bool DeleteFile(absl::string_view file); bool MoveFile(absl::string_view old_file, absl::string_view new_file); bool IsFile(absl::string_view file); bool IsFolder(absl::string_view file); -absl::optional GetFileSize(absl::string_view file); +std::optional GetFileSize(absl::string_view file); #if defined(WEBRTC_WIN) @@ -110,11 +115,11 @@ bool IsFolder(absl::string_view file) { FILE_ATTRIBUTE_DIRECTORY; } -absl::optional GetFileSize(absl::string_view file) { +std::optional GetFileSize(absl::string_view file) { WIN32_FILE_ATTRIBUTE_DATA data = {0}; if (::GetFileAttributesExW(ToUtf16(file).c_str(), GetFileExInfoStandard, &data) == 0) - return absl::nullopt; + return std::nullopt; return data.nFileSizeLow; } @@ -168,10 +173,10 @@ bool IsFolder(absl::string_view file) { return res == 0 && S_ISDIR(st.st_mode); } -absl::optional GetFileSize(absl::string_view file) { +std::optional GetFileSize(absl::string_view file) { struct stat st; if (::stat(std::string(file).c_str(), &st) != 0) - return absl::nullopt; + return std::nullopt; return st.st_size; } @@ -281,7 +286,7 @@ bool FileRotatingStream::OpenCurrentFile() { // We should always be writing to the zero-th file. RTC_DCHECK_EQ(current_file_index_, 0); int error; - file_ = webrtc::FileWrapper::OpenWriteOnly(file_path, &error); + file_ = FileWrapper::OpenWriteOnly(file_path, &error); if (!file_.is_open()) { std::fprintf(stderr, "Failed to open: %s Error: %d\n", file_path.c_str(), error); @@ -409,7 +414,7 @@ size_t FileRotatingStreamReader::ReadAll(void* buffer, size_t size) const { size_t done = 0; for (const auto& file_name : file_names_) { if (done < size) { - webrtc::FileWrapper f = webrtc::FileWrapper::OpenReadOnly(file_name); + FileWrapper f = FileWrapper::OpenReadOnly(file_name); if (!f.is_open()) { break; } @@ -425,4 +430,4 @@ CallSessionFileRotatingStreamReader::CallSessionFileRotatingStreamReader( absl::string_view dir_path) : FileRotatingStreamReader(dir_path, kCallSessionLogPrefix) {} -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/file_rotating_stream.h b/rtc_base/file_rotating_stream.h index 6ae2753098..de122efcd1 100644 --- a/rtc_base/file_rotating_stream.h +++ b/rtc_base/file_rotating_stream.h @@ -13,14 +13,13 @@ #include -#include #include #include #include "absl/strings/string_view.h" #include "rtc_base/system/file_wrapper.h" -namespace rtc { +namespace webrtc { // FileRotatingStream writes to a file in the directory specified in the // constructor. It rotates the files once the current file is full. The @@ -91,7 +90,7 @@ class FileRotatingStream { const std::string file_prefix_; // File we're currently writing to. - webrtc::FileWrapper file_; + FileWrapper file_; // Convenience storage for file names so we don't generate them over and over. std::vector file_names_; size_t max_file_size_; @@ -168,6 +167,17 @@ class CallSessionFileRotatingStreamReader : public FileRotatingStreamReader { CallSessionFileRotatingStreamReader(absl::string_view dir_path); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::CallSessionFileRotatingStream; +using ::webrtc::CallSessionFileRotatingStreamReader; +using ::webrtc::FileRotatingStream; +using ::webrtc::FileRotatingStreamReader; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_FILE_ROTATING_STREAM_H_ diff --git a/rtc_base/file_rotating_stream_unittest.cc b/rtc_base/file_rotating_stream_unittest.cc index 1d1e5b62cb..1422fd1db1 100644 --- a/rtc_base/file_rotating_stream_unittest.cc +++ b/rtc_base/file_rotating_stream_unittest.cc @@ -12,15 +12,18 @@ #include +#include #include #include +#include #include "absl/strings/string_view.h" #include "rtc_base/arraysize.h" +#include "rtc_base/system/file_wrapper.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" -namespace rtc { +namespace webrtc { namespace { @@ -96,7 +99,7 @@ class MAYBE_FileRotatingStreamTest : public ::testing::Test { absl::string_view file_path) { size_t expected_length = expected_contents.size(); std::unique_ptr buffer(new uint8_t[expected_length + 1]); - webrtc::FileWrapper f = webrtc::FileWrapper::OpenReadOnly(file_path); + FileWrapper f = FileWrapper::OpenReadOnly(file_path); ASSERT_TRUE(f.is_open()); size_t size_read = f.Read(buffer.get(), expected_length + 1); EXPECT_EQ(size_read, expected_length); @@ -131,7 +134,7 @@ TEST_F(MAYBE_FileRotatingStreamTest, EmptyWrite) { WriteAndFlush("a", 0); std::string logfile_path = stream_->GetFilePath(0); - webrtc::FileWrapper f = webrtc::FileWrapper::OpenReadOnly(logfile_path); + FileWrapper f = FileWrapper::OpenReadOnly(logfile_path); ASSERT_TRUE(f.is_open()); char buf[1]; EXPECT_EQ(0u, f.Read(buf, sizeof(buf))); @@ -391,4 +394,4 @@ TEST_F(MAYBE_CallSessionFileRotatingStreamTest, WriteAndReadFirstHalf) { } } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/firewall_socket_server.cc b/rtc_base/firewall_socket_server.cc index db88d19a15..1301081bdf 100644 --- a/rtc_base/firewall_socket_server.cc +++ b/rtc_base/firewall_socket_server.cc @@ -21,7 +21,7 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" -namespace rtc { +namespace webrtc { class FirewallSocket : public AsyncSocketAdapter { public: @@ -163,19 +163,19 @@ void FirewallSocketServer::AddRule(bool allow, r.p = p; r.src = src; r.dst = dst; - webrtc::MutexLock scope(&mutex_); + MutexLock scope(&mutex_); rules_.push_back(r); } void FirewallSocketServer::ClearRules() { - webrtc::MutexLock scope(&mutex_); + MutexLock scope(&mutex_); rules_.clear(); } bool FirewallSocketServer::Check(FirewallProtocol p, const SocketAddress& src, const SocketAddress& dst) { - webrtc::MutexLock scope(&mutex_); + MutexLock scope(&mutex_); for (size_t i = 0; i < rules_.size(); ++i) { const Rule& r = rules_[i]; if ((r.p != p) && (r.p != FP_ANY)) @@ -194,11 +194,11 @@ bool FirewallSocketServer::Check(FirewallProtocol p, } void FirewallSocketServer::SetUnbindableIps( - const std::vector& unbindable_ips) { + const std::vector& unbindable_ips) { unbindable_ips_ = unbindable_ips; } -bool FirewallSocketServer::IsBindableIp(const rtc::IPAddress& ip) { +bool FirewallSocketServer::IsBindableIp(const IPAddress& ip) { return !absl::c_linear_search(unbindable_ips_, ip); } @@ -210,8 +210,7 @@ void FirewallSocketServer::SetMessageQueue(Thread* queue) { server_->SetMessageQueue(queue); } -bool FirewallSocketServer::Wait(webrtc::TimeDelta max_wait_duration, - bool process_io) { +bool FirewallSocketServer::Wait(TimeDelta max_wait_duration, bool process_io) { return server_->Wait(max_wait_duration, process_io); } @@ -236,12 +235,12 @@ FirewallManager::~FirewallManager() { } void FirewallManager::AddServer(FirewallSocketServer* server) { - webrtc::MutexLock scope(&mutex_); + MutexLock scope(&mutex_); servers_.push_back(server); } void FirewallManager::RemoveServer(FirewallSocketServer* server) { - webrtc::MutexLock scope(&mutex_); + MutexLock scope(&mutex_); servers_.erase(std::remove(servers_.begin(), servers_.end(), server), servers_.end()); } @@ -250,7 +249,7 @@ void FirewallManager::AddRule(bool allow, FirewallProtocol p, FirewallDirection d, const SocketAddress& addr) { - webrtc::MutexLock scope(&mutex_); + MutexLock scope(&mutex_); for (std::vector::const_iterator it = servers_.begin(); it != servers_.end(); ++it) { (*it)->AddRule(allow, p, d, addr); @@ -258,11 +257,11 @@ void FirewallManager::AddRule(bool allow, } void FirewallManager::ClearRules() { - webrtc::MutexLock scope(&mutex_); + MutexLock scope(&mutex_); for (std::vector::const_iterator it = servers_.begin(); it != servers_.end(); ++it) { (*it)->ClearRules(); } } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/firewall_socket_server.h b/rtc_base/firewall_socket_server.h index 63f9e1ac6c..1aa613598a 100644 --- a/rtc_base/firewall_socket_server.h +++ b/rtc_base/firewall_socket_server.h @@ -19,7 +19,7 @@ #include "rtc_base/socket_server.h" #include "rtc_base/synchronization/mutex.h" -namespace rtc { +namespace webrtc { class FirewallManager; @@ -73,13 +73,13 @@ class FirewallSocketServer : public SocketServer { // No matter how many addresses are added (including INADDR_ANY), the server // will still allow creating outgoing TCP connections, since they don't // require explicitly binding a socket. - void SetUnbindableIps(const std::vector& unbindable_ips); - bool IsBindableIp(const rtc::IPAddress& ip); + void SetUnbindableIps(const std::vector& unbindable_ips); + bool IsBindableIp(const IPAddress& ip); Socket* CreateSocket(int family, int type) override; void SetMessageQueue(Thread* queue) override; - bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override; + bool Wait(TimeDelta max_wait_duration, bool process_io) override; void WakeUp() override; Socket* WrapSocket(Socket* sock, int type); @@ -87,7 +87,7 @@ class FirewallSocketServer : public SocketServer { private: SocketServer* server_; FirewallManager* manager_; - webrtc::Mutex mutex_; + Mutex mutex_; struct Rule { bool allow; FirewallProtocol p; @@ -96,7 +96,7 @@ class FirewallSocketServer : public SocketServer { SocketAddress dst; }; std::vector rules_; - std::vector unbindable_ips_; + std::vector unbindable_ips_; bool should_delete_server_; bool udp_sockets_enabled_; bool tcp_sockets_enabled_; @@ -120,10 +120,27 @@ class FirewallManager { void ClearRules(); private: - webrtc::Mutex mutex_; + Mutex mutex_; std::vector servers_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::FD_ANY; +using ::webrtc::FD_IN; +using ::webrtc::FD_OUT; +using ::webrtc::FirewallDirection; +using ::webrtc::FirewallManager; +using ::webrtc::FirewallProtocol; +using ::webrtc::FirewallSocketServer; +using ::webrtc::FP_ANY; +using ::webrtc::FP_TCP; +using ::webrtc::FP_UDP; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_FIREWALL_SOCKET_SERVER_H_ diff --git a/rtc_base/frequency_tracker.cc b/rtc_base/frequency_tracker.cc index c3be30e3b8..d99c67b907 100644 --- a/rtc_base/frequency_tracker.cc +++ b/rtc_base/frequency_tracker.cc @@ -10,7 +10,8 @@ #include "rtc_base/frequency_tracker.h" -#include "absl/types/optional.h" +#include + #include "api/units/frequency.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -21,11 +22,11 @@ namespace webrtc { FrequencyTracker::FrequencyTracker(TimeDelta max_window_size) : impl_(max_window_size.ms(), 1'000'000) {} -absl::optional FrequencyTracker::Rate(Timestamp now) const { - if (absl::optional rate = impl_.Rate(now.ms())) { +std::optional FrequencyTracker::Rate(Timestamp now) const { + if (std::optional rate = impl_.Rate(now.ms())) { return Frequency::MilliHertz(*rate); } - return absl::nullopt; + return std::nullopt; } void FrequencyTracker::Update(int64_t count, Timestamp now) { diff --git a/rtc_base/frequency_tracker.h b/rtc_base/frequency_tracker.h index 3ee2ab0e1a..6039c53abb 100644 --- a/rtc_base/frequency_tracker.h +++ b/rtc_base/frequency_tracker.h @@ -14,7 +14,8 @@ #include #include -#include "absl/types/optional.h" +#include + #include "api/units/frequency.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -46,7 +47,7 @@ class RTC_EXPORT FrequencyTracker { // Returns rate, moving averaging window as needed. // Returns nullopt when rate can't be measured. - absl::optional Rate(Timestamp now) const; + std::optional Rate(Timestamp now) const; private: RateStatistics impl_; diff --git a/rtc_base/frequency_tracker_unittest.cc b/rtc_base/frequency_tracker_unittest.cc index 00788c3ee8..af9eeffa19 100644 --- a/rtc_base/frequency_tracker_unittest.cc +++ b/rtc_base/frequency_tracker_unittest.cc @@ -12,8 +12,8 @@ #include #include +#include -#include "absl/types/optional.h" #include "api/units/frequency.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -34,7 +34,7 @@ TEST(FrequencyTrackerTest, ReturnsNulloptInitially) { Timestamp now = Timestamp::Seconds(12'345); FrequencyTracker stats(kWindow); - EXPECT_EQ(stats.Rate(now), absl::nullopt); + EXPECT_EQ(stats.Rate(now), std::nullopt); } TEST(FrequencyTrackerTest, ReturnsNulloptAfterSingleDataPoint) { @@ -44,7 +44,7 @@ TEST(FrequencyTrackerTest, ReturnsNulloptAfterSingleDataPoint) { stats.Update(now); now += TimeDelta::Millis(10); - EXPECT_EQ(stats.Rate(now), absl::nullopt); + EXPECT_EQ(stats.Rate(now), std::nullopt); } TEST(FrequencyTrackerTest, ReturnsRateAfterTwoMeasurements) { @@ -70,13 +70,13 @@ TEST(FrequencyTrackerTest, MeasuresConstantRate) { stats.Update(now); Frequency last_error = Frequency::PlusInfinity(); for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kInterval) { - SCOPED_TRACE(i); + SCOPED_TRACE(ToString(i)); now += kInterval; stats.Update(now); // Until window is full, rate is measured over a smaller window and might // look larger than the constant rate. - absl::optional rate = stats.Rate(now); + std::optional rate = stats.Rate(now); ASSERT_GE(rate, kConstantRate); // Expect the estimation error to decrease as the window is extended. @@ -88,7 +88,7 @@ TEST(FrequencyTrackerTest, MeasuresConstantRate) { // Once window is full, rate measurment should be stable. for (TimeDelta i = TimeDelta::Zero(); i < kInterval; i += TimeDelta::Millis(1)) { - SCOPED_TRACE(i); + SCOPED_TRACE(ToString(i)); EXPECT_EQ(stats.Rate(now + i), kConstantRate); } } @@ -123,20 +123,20 @@ TEST(FrequencyTrackerTest, IncreasingThenDecreasingRate) { stats.Update(kLargeSize, now); for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kLargeInterval) { - SCOPED_TRACE(i); + SCOPED_TRACE(ToString(i)); now += kLargeInterval; stats.Update(kLargeSize, now); } - absl::optional last_rate = stats.Rate(now); + std::optional last_rate = stats.Rate(now); EXPECT_EQ(last_rate, kLargeSize / kLargeInterval); // Decrease rate with smaller measurments. for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kLargeInterval) { - SCOPED_TRACE(i); + SCOPED_TRACE(ToString(i)); now += kLargeInterval; stats.Update(kSmallSize, now); - absl::optional rate = stats.Rate(now); + std::optional rate = stats.Rate(now); EXPECT_LT(rate, last_rate); last_rate = rate; @@ -145,11 +145,11 @@ TEST(FrequencyTrackerTest, IncreasingThenDecreasingRate) { // Increase rate with more frequent measurments. for (TimeDelta i = TimeDelta::Zero(); i < kWindow; i += kSmallInterval) { - SCOPED_TRACE(i); + SCOPED_TRACE(ToString(i)); now += kSmallInterval; stats.Update(kSmallSize, now); - absl::optional rate = stats.Rate(now); + std::optional rate = stats.Rate(now); EXPECT_GE(rate, last_rate); last_rate = rate; @@ -174,17 +174,17 @@ TEST(FrequencyTrackerTest, ResetAfterSilence) { now += kWindow + kEpsilon; // Silence over window size should trigger auto reset for coming sample. - EXPECT_EQ(pixel_rate.Rate(now), absl::nullopt); + EXPECT_EQ(pixel_rate.Rate(now), std::nullopt); pixel_rate.Update(kPixels, now); // Single measurment after reset is not enough to estimate the rate. - EXPECT_EQ(pixel_rate.Rate(now), absl::nullopt); + EXPECT_EQ(pixel_rate.Rate(now), std::nullopt); // Manual reset, add the same check again. pixel_rate.Reset(); - EXPECT_EQ(pixel_rate.Rate(now), absl::nullopt); + EXPECT_EQ(pixel_rate.Rate(now), std::nullopt); now += kInterval; pixel_rate.Update(kPixels, now); - EXPECT_EQ(pixel_rate.Rate(now), absl::nullopt); + EXPECT_EQ(pixel_rate.Rate(now), std::nullopt); } TEST(FrequencyTrackerTest, ReturnsNulloptWhenOverflows) { @@ -196,7 +196,7 @@ TEST(FrequencyTrackerTest, ReturnsNulloptWhenOverflows) { now += kEpsilon; stats.Update(very_large_number, now); - EXPECT_EQ(stats.Rate(now), absl::nullopt); + EXPECT_EQ(stats.Rate(now), std::nullopt); } } // namespace diff --git a/rtc_base/gunit.cc b/rtc_base/gunit.cc deleted file mode 100644 index 7cd60fe9ee..0000000000 --- a/rtc_base/gunit.cc +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/gunit.h" - -#include - -#include "absl/strings/match.h" -#include "absl/strings/string_view.h" - -::testing::AssertionResult AssertStartsWith(const char* text_expr, - const char* prefix_expr, - absl::string_view text, - absl::string_view prefix) { - if (absl::StartsWith(text, prefix)) { - return ::testing::AssertionSuccess(); - } else { - return ::testing::AssertionFailure() - << text_expr << "\nwhich is\n\"" << text - << "\"\ndoes not start with\n" - << prefix_expr << "\nwhich is\n\"" << prefix << "\""; - } -} - -::testing::AssertionResult AssertStringContains(const char* str_expr, - const char* substr_expr, - absl::string_view str, - absl::string_view substr) { - if (str.find(substr) != absl::string_view::npos) { - return ::testing::AssertionSuccess(); - } else { - return ::testing::AssertionFailure() - << str_expr << "\nwhich is\n\"" << str << "\"\ndoes not contain\n" - << substr_expr << "\nwhich is\n\"" << substr << "\""; - } -} diff --git a/rtc_base/gunit.h b/rtc_base/gunit.h index 6bc1419729..5d259b39fc 100644 --- a/rtc_base/gunit.h +++ b/rtc_base/gunit.h @@ -18,152 +18,34 @@ #include "test/gtest.h" // Wait until "ex" is true, or "timeout" expires. -#define WAIT(ex, timeout) \ - for (int64_t start = rtc::SystemTimeMillis(); \ - !(ex) && rtc::SystemTimeMillis() < start + (timeout);) { \ - rtc::Thread::Current()->ProcessMessages(0); \ - rtc::Thread::Current()->SleepMs(1); \ +#define WAIT(ex, timeout) \ + for (int64_t wait_start = ::webrtc::SystemTimeMillis(); \ + !(ex) && ::webrtc::SystemTimeMillis() < wait_start + (timeout);) { \ + ::webrtc::Thread::Current()->ProcessMessages(0); \ + ::webrtc::Thread::Current()->SleepMs(1); \ } // This returns the result of the test in res, so that we don't re-evaluate // the expression in the XXXX_WAIT macros below, since that causes problems // when the expression is only true the first time you check it. -#define WAIT_(ex, timeout, res) \ - do { \ - int64_t start = rtc::SystemTimeMillis(); \ - res = (ex) && true; \ - while (!res && rtc::SystemTimeMillis() < start + (timeout)) { \ - rtc::Thread::Current()->ProcessMessages(0); \ - rtc::Thread::Current()->SleepMs(1); \ - res = (ex) && true; \ - } \ +#define WAIT_(ex, timeout, res) \ + do { \ + int64_t wait_start = ::webrtc::SystemTimeMillis(); \ + res = (ex) && true; \ + while (!res && ::webrtc::SystemTimeMillis() < wait_start + (timeout)) { \ + ::webrtc::Thread::Current()->ProcessMessages(0); \ + ::webrtc::Thread::Current()->SleepMs(1); \ + res = (ex) && true; \ + } \ } while (0) -// The typical EXPECT_XXXX and ASSERT_XXXXs, but done until true or a timeout. -// One can add failure message by appending "<< msg". -#define EXPECT_TRUE_WAIT(ex, timeout) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ - if (bool res = true) { \ - WAIT_(ex, timeout, res); \ - if (!res) \ - goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ - } else \ - GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : EXPECT_TRUE(ex) - -#define EXPECT_EQ_WAIT(v1, v2, timeout) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ - if (bool res = true) { \ - WAIT_(v1 == v2, timeout, res); \ - if (!res) \ - goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ - } else \ - GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : EXPECT_EQ(v1, v2) - -#define ASSERT_TRUE_WAIT(ex, timeout) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ - if (bool res = true) { \ - WAIT_(ex, timeout, res); \ - if (!res) \ - goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ - } else \ - GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : ASSERT_TRUE(ex) - -#define ASSERT_EQ_WAIT(v1, v2, timeout) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ - if (bool res = true) { \ - WAIT_(v1 == v2, timeout, res); \ - if (!res) \ - goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ - } else \ - GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : ASSERT_EQ(v1, v2) - -// Version with a "soft" timeout and a margin. This logs if the timeout is -// exceeded, but it only fails if the expression still isn't true after the -// margin time passes. -#define EXPECT_TRUE_WAIT_MARGIN(ex, timeout, margin) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ - if (bool res = true) { \ - WAIT_(ex, timeout, res); \ - if (res) \ - break; \ - RTC_LOG(LS_WARNING) << "Expression " << #ex << " still not true after " \ - << (timeout) << "ms; waiting an additional " << margin \ - << "ms"; \ - WAIT_(ex, margin, res); \ - if (!res) \ - goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ - } else \ - GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : EXPECT_TRUE(ex) - // Wait until "ex" is true, or "timeout" expires, using fake clock where // messages are processed every millisecond. // TODO(pthatcher): Allow tests to control how many milliseconds to advance. -#define SIMULATED_WAIT(ex, timeout, clock) \ - for (int64_t start = rtc::TimeMillis(); \ - !(ex) && rtc::TimeMillis() < start + (timeout);) { \ - (clock).AdvanceTime(webrtc::TimeDelta::Millis(1)); \ +#define SIMULATED_WAIT(ex, timeout, clock) \ + for (int64_t wait_start = ::webrtc::TimeMillis(); \ + !(ex) && ::webrtc::TimeMillis() < wait_start + (timeout);) { \ + (clock).AdvanceTime(webrtc::TimeDelta::Millis(1)); \ } -// This returns the result of the test in res, so that we don't re-evaluate -// the expression in the XXXX_WAIT macros below, since that causes problems -// when the expression is only true the first time you check it. -#define SIMULATED_WAIT_(ex, timeout, res, clock) \ - do { \ - int64_t start = rtc::TimeMillis(); \ - res = (ex); \ - while (!res && rtc::TimeMillis() < start + (timeout)) { \ - (clock).AdvanceTime(webrtc::TimeDelta::Millis(1)); \ - res = (ex); \ - } \ - } while (0) - -// The typical EXPECT_XXXX, but done until true or a timeout with a fake clock. -#define EXPECT_TRUE_SIMULATED_WAIT(ex, timeout, clock) \ - do { \ - bool res; \ - SIMULATED_WAIT_(ex, timeout, res, clock); \ - if (!res) { \ - EXPECT_TRUE(ex); \ - } \ - } while (0) - -#define EXPECT_EQ_SIMULATED_WAIT(v1, v2, timeout, clock) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ - if (bool res = true) { \ - SIMULATED_WAIT_(v1 == v2, timeout, res, clock); \ - if (!res) \ - goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ - } else \ - GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : EXPECT_EQ(v1, v2) - -#define ASSERT_TRUE_SIMULATED_WAIT(ex, timeout, clock) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ - if (bool res = true) { \ - SIMULATED_WAIT_(ex, timeout, res, clock); \ - if (!res) \ - goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ - } else \ - GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : ASSERT_TRUE(ex) - -#define ASSERT_EQ_SIMULATED_WAIT(v1, v2, timeout, clock) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ - if (bool res = true) { \ - SIMULATED_WAIT_(v1 == v2, timeout, res, clock); \ - if (!res) \ - goto GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__); \ - } else \ - GTEST_CONCAT_TOKEN_(gunit_label_, __LINE__) : ASSERT_EQ(v1, v2) - -// Usage: EXPECT_PRED_FORMAT2(AssertStartsWith, text, "prefix"); -testing::AssertionResult AssertStartsWith(const char* text_expr, - const char* prefix_expr, - absl::string_view text, - absl::string_view prefix); - -// Usage: EXPECT_PRED_FORMAT2(AssertStringContains, str, "substring"); -testing::AssertionResult AssertStringContains(const char* str_expr, - const char* substr_expr, - absl::string_view str, - absl::string_view substr); - #endif // RTC_BASE_GUNIT_H_ diff --git a/rtc_base/http_common.cc b/rtc_base/http_common.cc deleted file mode 100644 index 621b854b53..0000000000 --- a/rtc_base/http_common.cc +++ /dev/null @@ -1,555 +0,0 @@ -/* - * Copyright 2004 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "absl/strings/string_view.h" - -#if defined(WEBRTC_WIN) -#include -#include -#include - -#define SECURITY_WIN32 -#include -#endif - -#include // for isspace -#include // for sprintf - -#include // for pair -#include - -#include "absl/strings/match.h" -#include "rtc_base/crypt_string.h" // for CryptString -#include "rtc_base/http_common.h" -#include "rtc_base/logging.h" -#include "rtc_base/message_digest.h" -#include "rtc_base/socket_address.h" -#include "rtc_base/string_utils.h" -#include "rtc_base/strings/string_builder.h" -#include "rtc_base/third_party/base64/base64.h" // for Base64 -#include "rtc_base/zero_memory.h" // for ExplicitZeroMemory - -namespace rtc { -namespace { -#if defined(WEBRTC_WIN) && !defined(WINUWP) -/////////////////////////////////////////////////////////////////////////////// -// ConstantToLabel can be used to easily generate string names from constant -// values. This can be useful for logging descriptive names of error messages. -// Usage: -// const ConstantToLabel LIBRARY_ERRORS[] = { -// KLABEL(SOME_ERROR), -// KLABEL(SOME_OTHER_ERROR), -// ... -// LASTLABEL -// } -// -// int err = LibraryFunc(); -// LOG(LS_ERROR) << "LibraryFunc returned: " -// << GetErrorName(err, LIBRARY_ERRORS); -struct ConstantToLabel { - int value; - const char* label; -}; - -const char* LookupLabel(int value, const ConstantToLabel entries[]) { - for (int i = 0; entries[i].label; ++i) { - if (value == entries[i].value) { - return entries[i].label; - } - } - return 0; -} - -std::string GetErrorName(int err, const ConstantToLabel* err_table) { - if (err == 0) - return "No error"; - - if (err_table != 0) { - if (const char* value = LookupLabel(err, err_table)) - return value; - } - - char buffer[16]; - snprintf(buffer, sizeof(buffer), "0x%08x", err); - return buffer; -} - -#define KLABEL(x) \ - { x, #x } -#define LASTLABEL \ - { 0, 0 } - -const ConstantToLabel SECURITY_ERRORS[] = { - KLABEL(SEC_I_COMPLETE_AND_CONTINUE), - KLABEL(SEC_I_COMPLETE_NEEDED), - KLABEL(SEC_I_CONTEXT_EXPIRED), - KLABEL(SEC_I_CONTINUE_NEEDED), - KLABEL(SEC_I_INCOMPLETE_CREDENTIALS), - KLABEL(SEC_I_RENEGOTIATE), - KLABEL(SEC_E_CERT_EXPIRED), - KLABEL(SEC_E_INCOMPLETE_MESSAGE), - KLABEL(SEC_E_INSUFFICIENT_MEMORY), - KLABEL(SEC_E_INTERNAL_ERROR), - KLABEL(SEC_E_INVALID_HANDLE), - KLABEL(SEC_E_INVALID_TOKEN), - KLABEL(SEC_E_LOGON_DENIED), - KLABEL(SEC_E_NO_AUTHENTICATING_AUTHORITY), - KLABEL(SEC_E_NO_CREDENTIALS), - KLABEL(SEC_E_NOT_OWNER), - KLABEL(SEC_E_OK), - KLABEL(SEC_E_SECPKG_NOT_FOUND), - KLABEL(SEC_E_TARGET_UNKNOWN), - KLABEL(SEC_E_UNKNOWN_CREDENTIALS), - KLABEL(SEC_E_UNSUPPORTED_FUNCTION), - KLABEL(SEC_E_UNTRUSTED_ROOT), - KLABEL(SEC_E_WRONG_PRINCIPAL), - LASTLABEL}; -#undef KLABEL -#undef LASTLABEL -#endif // defined(WEBRTC_WIN) && !defined(WINUWP) - -typedef std::pair HttpAttribute; -typedef std::vector HttpAttributeList; - -inline bool IsEndOfAttributeName(size_t pos, absl::string_view data) { - if (pos >= data.size()) - return true; - if (isspace(static_cast(data[pos]))) - return true; - // The reason for this complexity is that some attributes may contain trailing - // equal signs (like base64 tokens in Negotiate auth headers) - if ((pos + 1 < data.size()) && (data[pos] == '=') && - !isspace(static_cast(data[pos + 1])) && - (data[pos + 1] != '=')) { - return true; - } - return false; -} - -void HttpParseAttributes(absl::string_view data, - HttpAttributeList& attributes) { - size_t pos = 0; - const size_t len = data.size(); - while (true) { - // Skip leading whitespace - while ((pos < len) && isspace(static_cast(data[pos]))) { - ++pos; - } - - // End of attributes? - if (pos >= len) - return; - - // Find end of attribute name - size_t start = pos; - while (!IsEndOfAttributeName(pos, data)) { - ++pos; - } - - HttpAttribute attribute; - attribute.first.assign(data.data() + start, data.data() + pos); - - // Attribute has value? - if ((pos < len) && (data[pos] == '=')) { - ++pos; // Skip '=' - // Check if quoted value - if ((pos < len) && (data[pos] == '"')) { - while (++pos < len) { - if (data[pos] == '"') { - ++pos; - break; - } - if ((data[pos] == '\\') && (pos + 1 < len)) - ++pos; - attribute.second.append(1, data[pos]); - } - } else { - while ((pos < len) && !isspace(static_cast(data[pos])) && - (data[pos] != ',')) { - attribute.second.append(1, data[pos++]); - } - } - } - - attributes.push_back(attribute); - if ((pos < len) && (data[pos] == ',')) - ++pos; // Skip ',' - } -} - -bool HttpHasAttribute(const HttpAttributeList& attributes, - absl::string_view name, - std::string* value) { - for (HttpAttributeList::const_iterator it = attributes.begin(); - it != attributes.end(); ++it) { - if (it->first == name) { - if (value) { - *value = it->second; - } - return true; - } - } - return false; -} - -bool HttpHasNthAttribute(HttpAttributeList& attributes, - size_t index, - std::string* name, - std::string* value) { - if (index >= attributes.size()) - return false; - - if (name) - *name = attributes[index].first; - if (value) - *value = attributes[index].second; - return true; -} - -std::string quote(absl::string_view str) { - std::string result; - result.push_back('"'); - for (size_t i = 0; i < str.size(); ++i) { - if ((str[i] == '"') || (str[i] == '\\')) - result.push_back('\\'); - result.push_back(str[i]); - } - result.push_back('"'); - return result; -} - -#if defined(WEBRTC_WIN) && !defined(WINUWP) -struct NegotiateAuthContext : public HttpAuthContext { - CredHandle cred; - CtxtHandle ctx; - size_t steps; - bool specified_credentials; - - NegotiateAuthContext(absl::string_view auth, CredHandle c1, CtxtHandle c2) - : HttpAuthContext(auth), - cred(c1), - ctx(c2), - steps(0), - specified_credentials(false) {} - - ~NegotiateAuthContext() override { - DeleteSecurityContext(&ctx); - FreeCredentialsHandle(&cred); - } -}; -#endif // defined(WEBRTC_WIN) && !defined(WINUWP) - -} // anonymous namespace - -HttpAuthResult HttpAuthenticate(absl::string_view challenge, - const SocketAddress& server, - absl::string_view method, - absl::string_view uri, - absl::string_view username, - const CryptString& password, - HttpAuthContext*& context, - std::string& response, - std::string& auth_method) { - HttpAttributeList args; - HttpParseAttributes(challenge, args); - HttpHasNthAttribute(args, 0, &auth_method, nullptr); - - if (context && (context->auth_method != auth_method)) - return HAR_IGNORE; - - // BASIC - if (absl::EqualsIgnoreCase(auth_method, "basic")) { - if (context) - return HAR_CREDENTIALS; // Bad credentials - if (username.empty()) - return HAR_CREDENTIALS; // Missing credentials - - context = new HttpAuthContext(auth_method); - - // TODO(bugs.webrtc.org/8905): Convert sensitive to a CryptString and also - // return response as CryptString so contents get securely deleted - // automatically. - // std::string decoded = username + ":" + password; - size_t len = username.size() + password.GetLength() + 2; - char* sensitive = new char[len]; - size_t pos = strcpyn(sensitive, len, username); - pos += strcpyn(sensitive + pos, len - pos, ":"); - password.CopyTo(sensitive + pos, true); - - response = auth_method; - response.append(" "); - // TODO: create a sensitive-source version of Base64::encode - response.append(Base64::Encode(sensitive)); - ExplicitZeroMemory(sensitive, len); - delete[] sensitive; - return HAR_RESPONSE; - } - - // DIGEST - if (absl::EqualsIgnoreCase(auth_method, "digest")) { - if (context) - return HAR_CREDENTIALS; // Bad credentials - if (username.empty()) - return HAR_CREDENTIALS; // Missing credentials - - context = new HttpAuthContext(auth_method); - - std::string cnonce, ncount; - char buffer[256]; - snprintf(buffer, sizeof(buffer), "%d", static_cast(time(0))); - cnonce = MD5(buffer); - ncount = "00000001"; - - std::string realm, nonce, qop, opaque; - HttpHasAttribute(args, "realm", &realm); - HttpHasAttribute(args, "nonce", &nonce); - bool has_qop = HttpHasAttribute(args, "qop", &qop); - bool has_opaque = HttpHasAttribute(args, "opaque", &opaque); - - // TODO(bugs.webrtc.org/8905): Convert sensitive to a CryptString and also - // return response as CryptString so contents get securely deleted - // automatically. - // std::string A1 = username + ":" + realm + ":" + password; - size_t len = username.size() + realm.size() + password.GetLength() + 3; - char* sensitive = new char[len]; // A1 - size_t pos = strcpyn(sensitive, len, username); - pos += strcpyn(sensitive + pos, len - pos, ":"); - pos += strcpyn(sensitive + pos, len - pos, realm); - pos += strcpyn(sensitive + pos, len - pos, ":"); - password.CopyTo(sensitive + pos, true); - - std::string A2 = std::string(method) + ":" + std::string(uri); - std::string middle; - if (has_qop) { - qop = "auth"; - middle = nonce + ":" + ncount + ":" + cnonce + ":" + qop; - } else { - middle = nonce; - } - std::string HA1 = MD5(sensitive); - ExplicitZeroMemory(sensitive, len); - delete[] sensitive; - std::string HA2 = MD5(A2); - std::string dig_response = MD5(HA1 + ":" + middle + ":" + HA2); - - rtc::StringBuilder ss; - ss << auth_method; - ss << " username=" << quote(username); - ss << ", realm=" << quote(realm); - ss << ", nonce=" << quote(nonce); - ss << ", uri=" << quote(uri); - if (has_qop) { - ss << ", qop=" << qop; - ss << ", nc=" << ncount; - ss << ", cnonce=" << quote(cnonce); - } - ss << ", response=\"" << dig_response << "\""; - if (has_opaque) { - ss << ", opaque=" << quote(opaque); - } - response = ss.str(); - return HAR_RESPONSE; - } - -#if defined(WEBRTC_WIN) && !defined(WINUWP) -#if 1 - bool want_negotiate = absl::EqualsIgnoreCase(auth_method, "negotiate"); - bool want_ntlm = absl::EqualsIgnoreCase(auth_method, "ntlm"); - // SPNEGO & NTLM - if (want_negotiate || want_ntlm) { - const size_t MAX_MESSAGE = 12000, MAX_SPN = 256; - char out_buf[MAX_MESSAGE], spn[MAX_SPN]; - -#if 0 // Requires funky windows versions - DWORD len = MAX_SPN; - if (DsMakeSpn("HTTP", server.HostAsURIString().c_str(), nullptr, - server.port(), - 0, &len, spn) != ERROR_SUCCESS) { - RTC_LOG_F(LS_WARNING) << "(Negotiate) - DsMakeSpn failed"; - return HAR_IGNORE; - } -#else - snprintf(spn, MAX_SPN, "HTTP/%s", server.ToString().c_str()); -#endif - - SecBuffer out_sec; - out_sec.pvBuffer = out_buf; - out_sec.cbBuffer = sizeof(out_buf); - out_sec.BufferType = SECBUFFER_TOKEN; - - SecBufferDesc out_buf_desc; - out_buf_desc.ulVersion = 0; - out_buf_desc.cBuffers = 1; - out_buf_desc.pBuffers = &out_sec; - - const ULONG NEG_FLAGS_DEFAULT = - // ISC_REQ_ALLOCATE_MEMORY - ISC_REQ_CONFIDENTIALITY - //| ISC_REQ_EXTENDED_ERROR - //| ISC_REQ_INTEGRITY - | ISC_REQ_REPLAY_DETECT | ISC_REQ_SEQUENCE_DETECT - //| ISC_REQ_STREAM - //| ISC_REQ_USE_SUPPLIED_CREDS - ; - - ::TimeStamp lifetime; - SECURITY_STATUS ret = S_OK; - ULONG ret_flags = 0, flags = NEG_FLAGS_DEFAULT; - - bool specify_credentials = !username.empty(); - size_t steps = 0; - - // uint32_t now = Time(); - - NegotiateAuthContext* neg = static_cast(context); - if (neg) { - const size_t max_steps = 10; - if (++neg->steps >= max_steps) { - RTC_LOG(LS_WARNING) << "AsyncHttpsProxySocket::Authenticate(Negotiate) " - "too many retries"; - return HAR_ERROR; - } - steps = neg->steps; - - std::string challenge, decoded_challenge; - if (HttpHasNthAttribute(args, 1, &challenge, nullptr) && - Base64::Decode(challenge, Base64::DO_STRICT, &decoded_challenge, - nullptr)) { - SecBuffer in_sec; - in_sec.pvBuffer = const_cast(decoded_challenge.data()); - in_sec.cbBuffer = static_cast(decoded_challenge.size()); - in_sec.BufferType = SECBUFFER_TOKEN; - - SecBufferDesc in_buf_desc; - in_buf_desc.ulVersion = 0; - in_buf_desc.cBuffers = 1; - in_buf_desc.pBuffers = &in_sec; - - ret = InitializeSecurityContextA( - &neg->cred, &neg->ctx, spn, flags, 0, SECURITY_NATIVE_DREP, - &in_buf_desc, 0, &neg->ctx, &out_buf_desc, &ret_flags, &lifetime); - if (FAILED(ret)) { - RTC_LOG(LS_ERROR) << "InitializeSecurityContext returned: " - << GetErrorName(ret, SECURITY_ERRORS); - return HAR_ERROR; - } - } else if (neg->specified_credentials) { - // Try again with default credentials - specify_credentials = false; - delete context; - context = neg = 0; - } else { - return HAR_CREDENTIALS; - } - } - - if (!neg) { - unsigned char userbuf[256], passbuf[256], domainbuf[16]; - SEC_WINNT_AUTH_IDENTITY_A auth_id, *pauth_id = 0; - if (specify_credentials) { - memset(&auth_id, 0, sizeof(auth_id)); - size_t len = password.GetLength() + 1; - char* sensitive = new char[len]; - password.CopyTo(sensitive, true); - absl::string_view::size_type pos = username.find('\\'); - if (pos == absl::string_view::npos) { - auth_id.UserLength = static_cast( - std::min(sizeof(userbuf) - 1, username.size())); - memcpy(userbuf, username.data(), auth_id.UserLength); - userbuf[auth_id.UserLength] = 0; - auth_id.DomainLength = 0; - domainbuf[auth_id.DomainLength] = 0; - auth_id.PasswordLength = static_cast( - std::min(sizeof(passbuf) - 1, password.GetLength())); - memcpy(passbuf, sensitive, auth_id.PasswordLength); - passbuf[auth_id.PasswordLength] = 0; - } else { - auth_id.UserLength = static_cast( - std::min(sizeof(userbuf) - 1, username.size() - pos - 1)); - memcpy(userbuf, username.data() + pos + 1, auth_id.UserLength); - userbuf[auth_id.UserLength] = 0; - auth_id.DomainLength = - static_cast(std::min(sizeof(domainbuf) - 1, pos)); - memcpy(domainbuf, username.data(), auth_id.DomainLength); - domainbuf[auth_id.DomainLength] = 0; - auth_id.PasswordLength = static_cast( - std::min(sizeof(passbuf) - 1, password.GetLength())); - memcpy(passbuf, sensitive, auth_id.PasswordLength); - passbuf[auth_id.PasswordLength] = 0; - } - ExplicitZeroMemory(sensitive, len); - delete[] sensitive; - auth_id.User = userbuf; - auth_id.Domain = domainbuf; - auth_id.Password = passbuf; - auth_id.Flags = SEC_WINNT_AUTH_IDENTITY_ANSI; - pauth_id = &auth_id; - RTC_LOG(LS_VERBOSE) - << "Negotiate protocol: Using specified credentials"; - } else { - RTC_LOG(LS_VERBOSE) << "Negotiate protocol: Using default credentials"; - } - - CredHandle cred; - ret = AcquireCredentialsHandleA( - 0, const_cast(want_negotiate ? NEGOSSP_NAME_A : NTLMSP_NAME_A), - SECPKG_CRED_OUTBOUND, 0, pauth_id, 0, 0, &cred, &lifetime); - if (ret != SEC_E_OK) { - RTC_LOG(LS_ERROR) << "AcquireCredentialsHandle error: " - << GetErrorName(ret, SECURITY_ERRORS); - return HAR_IGNORE; - } - - // CSecBufferBundle<5, CSecBufferBase::FreeSSPI> sb_out; - - CtxtHandle ctx; - ret = InitializeSecurityContextA(&cred, 0, spn, flags, 0, - SECURITY_NATIVE_DREP, 0, 0, &ctx, - &out_buf_desc, &ret_flags, &lifetime); - if (FAILED(ret)) { - RTC_LOG(LS_ERROR) << "InitializeSecurityContext returned: " - << GetErrorName(ret, SECURITY_ERRORS); - FreeCredentialsHandle(&cred); - return HAR_IGNORE; - } - - RTC_DCHECK(!context); - context = neg = new NegotiateAuthContext(auth_method, cred, ctx); - neg->specified_credentials = specify_credentials; - neg->steps = steps; - } - - if ((ret == SEC_I_COMPLETE_NEEDED) || - (ret == SEC_I_COMPLETE_AND_CONTINUE)) { - ret = CompleteAuthToken(&neg->ctx, &out_buf_desc); - RTC_LOG(LS_VERBOSE) << "CompleteAuthToken returned: " - << GetErrorName(ret, SECURITY_ERRORS); - if (FAILED(ret)) { - return HAR_ERROR; - } - } - - std::string decoded(out_buf, out_buf + out_sec.cbBuffer); - response = auth_method; - response.append(" "); - response.append(Base64::Encode(decoded)); - return HAR_RESPONSE; - } -#endif -#endif // defined(WEBRTC_WIN) && !defined(WINUWP) - - return HAR_IGNORE; -} - -////////////////////////////////////////////////////////////////////// - -} // namespace rtc diff --git a/rtc_base/http_common.h b/rtc_base/http_common.h deleted file mode 100644 index 06e42c6703..0000000000 --- a/rtc_base/http_common.h +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2004 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_HTTP_COMMON_H_ -#define RTC_BASE_HTTP_COMMON_H_ - -#include - -#include "absl/strings/string_view.h" - -namespace rtc { - -class CryptString; -class SocketAddress; - -////////////////////////////////////////////////////////////////////// -// Http Authentication -////////////////////////////////////////////////////////////////////// - -struct HttpAuthContext { - std::string auth_method; - HttpAuthContext(absl::string_view auth) : auth_method(auth) {} - virtual ~HttpAuthContext() {} -}; - -enum HttpAuthResult { HAR_RESPONSE, HAR_IGNORE, HAR_CREDENTIALS, HAR_ERROR }; - -// 'context' is used by this function to record information between calls. -// Start by passing a null pointer, then pass the same pointer each additional -// call. When the authentication attempt is finished, delete the context. -// TODO(bugs.webrtc.org/8905): Change "response" to "ZeroOnFreeBuffer". -HttpAuthResult HttpAuthenticate(absl::string_view challenge, - const SocketAddress& server, - absl::string_view method, - absl::string_view uri, - absl::string_view username, - const CryptString& password, - HttpAuthContext*& context, - std::string& response, - std::string& auth_method); - -////////////////////////////////////////////////////////////////////// - -} // namespace rtc - -#endif // RTC_BASE_HTTP_COMMON_H_ diff --git a/rtc_base/ifaddrs_android.cc b/rtc_base/ifaddrs_android.cc index 6474fb7244..8388bfec71 100644 --- a/rtc_base/ifaddrs_android.cc +++ b/rtc_base/ifaddrs_android.cc @@ -15,11 +15,11 @@ #include #include #include -#include +#include // no-presubmit-check #include #include #include -#include +#include // no-presubmit-check #include #include #include @@ -37,7 +37,7 @@ const int kMaxReadSize = 4096; } // namespace -namespace rtc { +namespace webrtc { int set_ifname(struct ifaddrs* ifaddr, int interface) { char buf[IFNAMSIZ] = {0}; @@ -223,5 +223,5 @@ void freeifaddrs(struct ifaddrs* addrs) { } } -} // namespace rtc +} // namespace webrtc #endif // defined(WEBRTC_ANDROID) diff --git a/rtc_base/ifaddrs_android.h b/rtc_base/ifaddrs_android.h index 1a5b662f06..82d7369a6f 100644 --- a/rtc_base/ifaddrs_android.h +++ b/rtc_base/ifaddrs_android.h @@ -12,7 +12,7 @@ #define RTC_BASE_IFADDRS_ANDROID_H_ #include -#include +#include // no-presubmit-check // Implementation of getifaddrs for Android. // Fills out a list of ifaddr structs (see below) which contain information @@ -28,11 +28,11 @@ struct ifaddrs { // We don't need them (yet?). }; -namespace rtc { +namespace webrtc { int getifaddrs(struct ifaddrs** result); void freeifaddrs(struct ifaddrs* addrs); -} // namespace rtc +} // namespace webrtc #endif // RTC_BASE_IFADDRS_ANDROID_H_ diff --git a/rtc_base/ifaddrs_converter.cc b/rtc_base/ifaddrs_converter.cc index d963efd6ef..de949aa575 100644 --- a/rtc_base/ifaddrs_converter.cc +++ b/rtc_base/ifaddrs_converter.cc @@ -10,7 +10,9 @@ #include "rtc_base/ifaddrs_converter.h" -namespace rtc { +#include "rtc_base/ip_address.h" + +namespace webrtc { IfAddrsConverter::IfAddrsConverter() {} @@ -29,7 +31,7 @@ bool IfAddrsConverter::ConvertIfAddrsToIPAddress( return true; } case AF_INET6: { - int ip_attributes = IPV6_ADDRESS_FLAG_NONE; + int ip_attributes = webrtc::IPV6_ADDRESS_FLAG_NONE; if (!ConvertNativeAttributesToIPAttributes(interface, &ip_attributes)) { return false; } @@ -47,9 +49,9 @@ bool IfAddrsConverter::ConvertIfAddrsToIPAddress( } bool IfAddrsConverter::ConvertNativeAttributesToIPAttributes( - const struct ifaddrs* interface, + const struct ifaddrs* /* interface */, int* ip_attributes) { - *ip_attributes = IPV6_ADDRESS_FLAG_NONE; + *ip_attributes = webrtc::IPV6_ADDRESS_FLAG_NONE; return true; } @@ -59,4 +61,4 @@ IfAddrsConverter* CreateIfAddrsConverter() { return new IfAddrsConverter(); } #endif -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/ifaddrs_converter.h b/rtc_base/ifaddrs_converter.h index bd6957d01e..6e8cd63c85 100644 --- a/rtc_base/ifaddrs_converter.h +++ b/rtc_base/ifaddrs_converter.h @@ -11,15 +11,17 @@ #ifndef RTC_BASE_IFADDRS_CONVERTER_H_ #define RTC_BASE_IFADDRS_CONVERTER_H_ +// IWYU pragma: begin_exports #if defined(WEBRTC_ANDROID) #include "rtc_base/ifaddrs_android.h" -#else +#elif defined(WEBRTC_POSIX) #include #endif // WEBRTC_ANDROID +// IWYU pragma: end_exports #include "rtc_base/ip_address.h" -namespace rtc { +namespace webrtc { // This class converts native interface addresses to our internal IPAddress // class. Subclasses should override ConvertNativeToIPAttributes to implement @@ -40,6 +42,15 @@ class IfAddrsConverter { IfAddrsConverter* CreateIfAddrsConverter(); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::CreateIfAddrsConverter; +using ::webrtc::IfAddrsConverter; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_IFADDRS_CONVERTER_H_ diff --git a/rtc_base/internal/default_socket_server.cc b/rtc_base/internal/default_socket_server.cc index 5632b989fc..043c2ce850 100644 --- a/rtc_base/internal/default_socket_server.cc +++ b/rtc_base/internal/default_socket_server.cc @@ -20,14 +20,14 @@ #include "rtc_base/physical_socket_server.h" #endif -namespace rtc { +namespace webrtc { std::unique_ptr CreateDefaultSocketServer() { #if defined(__native_client__) - return std::unique_ptr(new rtc::NullSocketServer); + return std::unique_ptr(new webrtc::NullSocketServer); #else - return std::unique_ptr(new rtc::PhysicalSocketServer); + return std::unique_ptr(new PhysicalSocketServer); #endif } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/internal/default_socket_server.h b/rtc_base/internal/default_socket_server.h index 5b3489f613..173db0b250 100644 --- a/rtc_base/internal/default_socket_server.h +++ b/rtc_base/internal/default_socket_server.h @@ -15,10 +15,18 @@ #include "rtc_base/socket_server.h" -namespace rtc { +namespace webrtc { std::unique_ptr CreateDefaultSocketServer(); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::CreateDefaultSocketServer; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_INTERNAL_DEFAULT_SOCKET_SERVER_H_ diff --git a/rtc_base/ip_address.cc b/rtc_base/ip_address.cc index 9e436e3c78..92be158e70 100644 --- a/rtc_base/ip_address.cc +++ b/rtc_base/ip_address.cc @@ -8,30 +8,27 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include +#include +#include #if defined(WEBRTC_POSIX) -#include -#include - -#include "absl/strings/string_view.h" #ifdef OPENBSD #include #endif #ifndef __native_client__ -#include #endif #include #endif +#include "absl/strings/string_view.h" #include "rtc_base/byte_order.h" #include "rtc_base/ip_address.h" #include "rtc_base/net_helpers.h" #include "rtc_base/string_utils.h" -#if defined(WEBRTC_WIN) -#include "rtc_base/win32.h" -#endif // WEBRTC_WIN - -namespace rtc { +namespace webrtc { // Prefixes used for categorizing IPv6 addresses. static const in6_addr kV4MappedPrefix = { @@ -49,7 +46,7 @@ static in_addr ExtractMappedAddress(const in6_addr& addr); uint32_t IPAddress::v4AddressAsHostOrderInteger() const { if (family_ == AF_INET) { - return NetworkToHost32(u_.ip4.s_addr); + return webrtc::NetworkToHost32(u_.ip4.s_addr); } else { return 0; } @@ -115,8 +112,8 @@ bool IPAddress::operator<(const IPAddress& other) const { // Comparing addresses of the same family. switch (family_) { case AF_INET: { - return NetworkToHost32(u_.ip4.s_addr) < - NetworkToHost32(other.u_.ip4.s_addr); + return webrtc::NetworkToHost32(u_.ip4.s_addr) < + webrtc::NetworkToHost32(other.u_.ip4.s_addr); } case AF_INET6: { return memcmp(&u_.ip6.s6_addr, &other.u_.ip6.s6_addr, 16) < 0; @@ -143,7 +140,7 @@ std::string IPAddress::ToString() const { if (family_ == AF_INET6) { src = &u_.ip6; } - if (!rtc::inet_ntop(family_, src, buf, sizeof(buf))) { + if (!inet_ntop(family_, src, buf, sizeof(buf))) { return std::string(); } return std::string(buf); @@ -215,7 +212,7 @@ std::string InterfaceAddress::ToString() const { std::string result = IPAddress::ToString(); if (family() == AF_INET6) - result += "|flags:0x" + rtc::ToHex(ipv6_flags()); + result += "|flags:0x" + ToHex(ipv6_flags()); return result; } @@ -282,9 +279,9 @@ bool IPFromString(absl::string_view str, IPAddress* out) { return false; } in_addr addr; - if (rtc::inet_pton(AF_INET, str, &addr) == 0) { + if (inet_pton(AF_INET, str, &addr) == 0) { in6_addr addr6; - if (rtc::inet_pton(AF_INET6, str, &addr6) == 0) { + if (inet_pton(AF_INET6, str, &addr6) == 0) { *out = IPAddress(); return false; } @@ -374,9 +371,9 @@ IPAddress TruncateIP(const IPAddress& ip, int length) { return IPAddress(INADDR_ANY); } int mask = (0xFFFFFFFF << (32 - length)); - uint32_t host_order_ip = NetworkToHost32(ip.ipv4_address().s_addr); + uint32_t host_order_ip = webrtc::NetworkToHost32(ip.ipv4_address().s_addr); in_addr masked; - masked.s_addr = HostToNetwork32(host_order_ip & mask); + masked.s_addr = webrtc::HostToNetwork32(host_order_ip & mask); return IPAddress(masked); } else if (ip.family() == AF_INET6) { if (length > 127) { @@ -393,8 +390,8 @@ IPAddress TruncateIP(const IPAddress& ip, int length) { uint32_t* v6_as_ints = reinterpret_cast(&v6addr.s6_addr); for (int i = 0; i < 4; ++i) { if (i == position) { - uint32_t host_order_inner = NetworkToHost32(v6_as_ints[i]); - v6_as_ints[i] = HostToNetwork32(host_order_inner & inner_mask); + uint32_t host_order_inner = webrtc::NetworkToHost32(v6_as_ints[i]); + v6_as_ints[i] = webrtc::HostToNetwork32(host_order_inner & inner_mask); } else if (i > position) { v6_as_ints[i] = 0; } @@ -409,7 +406,7 @@ int CountIPMaskBits(const IPAddress& mask) { int bits = 0; switch (mask.family()) { case AF_INET: { - word_to_count = NetworkToHost32(mask.ipv4_address().s_addr); + word_to_count = webrtc::NetworkToHost32(mask.ipv4_address().s_addr); break; } case AF_INET6: { @@ -423,7 +420,7 @@ int CountIPMaskBits(const IPAddress& mask) { } } if (i < 4) { - word_to_count = NetworkToHost32(v6_as_ints[i]); + word_to_count = webrtc::NetworkToHost32(v6_as_ints[i]); } bits = (i * 32); break; @@ -557,22 +554,22 @@ int IPAddressPrecedence(const IPAddress& ip) { IPAddress GetLoopbackIP(int family) { if (family == AF_INET) { - return rtc::IPAddress(INADDR_LOOPBACK); + return IPAddress(INADDR_LOOPBACK); } if (family == AF_INET6) { - return rtc::IPAddress(in6addr_loopback); + return IPAddress(in6addr_loopback); } - return rtc::IPAddress(); + return IPAddress(); } IPAddress GetAnyIP(int family) { if (family == AF_INET) { - return rtc::IPAddress(INADDR_ANY); + return IPAddress(INADDR_ANY); } if (family == AF_INET6) { - return rtc::IPAddress(in6addr_any); + return IPAddress(in6addr_any); } - return rtc::IPAddress(); + return IPAddress(); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/ip_address.h b/rtc_base/ip_address.h index 897e165565..83074cc5a2 100644 --- a/rtc_base/ip_address.h +++ b/rtc_base/ip_address.h @@ -11,16 +11,15 @@ #ifndef RTC_BASE_IP_ADDRESS_H_ #define RTC_BASE_IP_ADDRESS_H_ +#include #if defined(WEBRTC_POSIX) #include #include -#include -#include +#include // IWYU pragma: export #include "absl/strings/string_view.h" #endif #if defined(WEBRTC_WIN) -#include #include #endif #include @@ -32,8 +31,9 @@ #include "rtc_base/win32.h" #endif #include "absl/strings/string_view.h" +#include "rtc_base/net_helpers.h" #include "rtc_base/system/rtc_export.h" -namespace rtc { +namespace webrtc { enum IPv6AddressFlag { IPV6_ADDRESS_FLAG_NONE = 0x00, @@ -62,7 +62,7 @@ class RTC_EXPORT IPAddress { explicit IPAddress(uint32_t ip_in_host_byte_order) : family_(AF_INET) { memset(&u_, 0, sizeof(u_)); - u_.ip4.s_addr = HostToNetwork32(ip_in_host_byte_order); + u_.ip4.s_addr = webrtc::HostToNetwork32(ip_in_host_byte_order); } IPAddress(const IPAddress& other) : family_(other.family_) { @@ -82,13 +82,6 @@ class RTC_EXPORT IPAddress { bool operator<(const IPAddress& other) const; bool operator>(const IPAddress& other) const; -#ifdef WEBRTC_UNIT_TEST - inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) - std::ostream& os) { // no-presubmit-check TODO(webrtc:8982) - return os << ToString(); - } -#endif // WEBRTC_UNIT_TEST - int family() const { return family_; } in_addr ipv4_address() const; in6_addr ipv6_address() const; @@ -162,8 +155,8 @@ RTC_EXPORT bool IPFromString(absl::string_view str, int flags, InterfaceAddress* out); bool IPIsAny(const IPAddress& ip); -bool IPIsLoopback(const IPAddress& ip); -bool IPIsLinkLocal(const IPAddress& ip); +RTC_EXPORT bool IPIsLoopback(const IPAddress& ip); +RTC_EXPORT bool IPIsLinkLocal(const IPAddress& ip); // Identify a private network address like "192.168.111.222" // (see https://en.wikipedia.org/wiki/Private_network ) bool IPIsPrivateNetwork(const IPAddress& ip); @@ -201,6 +194,42 @@ IPAddress GetAnyIP(int family); // counted. int CountIPMaskBits(const IPAddress& mask); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::CountIPMaskBits; +using ::webrtc::GetAnyIP; +using ::webrtc::GetLoopbackIP; +using ::webrtc::HashIP; +using ::webrtc::InterfaceAddress; +using ::webrtc::IPAddress; +using ::webrtc::IPAddressPrecedence; +using ::webrtc::IPFromAddrInfo; +using ::webrtc::IPFromString; +using ::webrtc::IPIs6Bone; +using ::webrtc::IPIs6To4; +using ::webrtc::IPIsAny; +using ::webrtc::IPIsLinkLocal; +using ::webrtc::IPIsLoopback; +using ::webrtc::IPIsMacBased; +using ::webrtc::IPIsPrivate; +using ::webrtc::IPIsPrivateNetwork; +using ::webrtc::IPIsSharedNetwork; +using ::webrtc::IPIsSiteLocal; +using ::webrtc::IPIsTeredo; +using ::webrtc::IPIsULA; +using ::webrtc::IPIsUnspec; +using ::webrtc::IPIsV4Compatibility; +using ::webrtc::IPIsV4Mapped; +using ::webrtc::IPV6_ADDRESS_FLAG_DEPRECATED; +using ::webrtc::IPV6_ADDRESS_FLAG_NONE; +using ::webrtc::IPV6_ADDRESS_FLAG_TEMPORARY; +using ::webrtc::IPv6AddressFlag; +using ::webrtc::TruncateIP; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_IP_ADDRESS_H_ diff --git a/rtc_base/ip_address_unittest.cc b/rtc_base/ip_address_unittest.cc index 9ca05c95fe..5f5988d260 100644 --- a/rtc_base/ip_address_unittest.cc +++ b/rtc_base/ip_address_unittest.cc @@ -13,7 +13,7 @@ #include "absl/strings/string_view.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { static const unsigned int kIPv4AddrSize = 4; static const unsigned int kIPv6AddrSize = 16; @@ -61,10 +61,6 @@ static const std::string kIPv6PublicAddrAnonymizedString = "2401:fa00:4:x:x:x:x:x"; static const std::string kIPv6PublicAddr2AnonymizedString = "2401:0:0:x:x:x:x:x"; -static const std::string kIPv4MappedAnyAddrString = "::ffff:0:0"; -static const std::string kIPv4MappedRFC1918AddrString = "::ffff:c0a8:701"; -static const std::string kIPv4MappedLoopbackAddrString = "::ffff:7f00:1"; -static const std::string kIPv4MappedPublicAddrString = "::ffff:102:0304"; static const std::string kIPv4MappedV4StyleAddrString = "::ffff:192.168.7.1"; static const std::string kIPv4BrokenString1 = "192.168.7."; @@ -526,7 +522,7 @@ TEST(IPAddressTest, TestIPFromAddrInfo) { test_info.ai_next = &next_info; // Check that we can get an IPv4 address out. test_info.ai_addr = reinterpret_cast(&expected4); - expected4.sin_addr.s_addr = HostToNetwork32(kIPv4PublicAddr); + expected4.sin_addr.s_addr = webrtc::HostToNetwork32(kIPv4PublicAddr); expected4.sin_family = AF_INET; IPAddress expected(kIPv4PublicAddr); IPAddress addr; @@ -978,4 +974,4 @@ TEST(IPAddressTest, TestInterfaceAddress) { EXPECT_NE(addr1, addr5); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/java/src/org/webrtc/Logging.java b/rtc_base/java/src/org/webrtc/Logging.java index e7a9921f4d..fd070dfe6c 100644 --- a/rtc_base/java/src/org/webrtc/Logging.java +++ b/rtc_base/java/src/org/webrtc/Logging.java @@ -24,8 +24,8 @@ * log messages will then be redirected to the injected Loggable, except those with a severity lower * than the specified severity, which will be discarded. * - * It is also possible to switch to native logging (rtc::LogMessage) if one of the following static - * functions are called from the app: + * It is also possible to switch to native logging (webrtc::LogMessage) if one + * of the following static functions are called from the app: * - Logging.enableLogThreads * - Logging.enableLogTimeStamps * - Logging.enableLogToDebugOutput @@ -62,31 +62,6 @@ static void deleteInjectedLoggable() { loggable = null; } - // TODO(solenberg): Remove once dependent projects updated. - @Deprecated - public enum TraceLevel { - TRACE_NONE(0x0000), - TRACE_STATEINFO(0x0001), - TRACE_WARNING(0x0002), - TRACE_ERROR(0x0004), - TRACE_CRITICAL(0x0008), - TRACE_APICALL(0x0010), - TRACE_DEFAULT(0x00ff), - TRACE_MODULECALL(0x0020), - TRACE_MEMORY(0x0100), - TRACE_TIMER(0x0200), - TRACE_STREAM(0x0400), - TRACE_DEBUG(0x0800), - TRACE_INFO(0x1000), - TRACE_TERSEINFO(0x2000), - TRACE_ALL(0xffff); - - public final int level; - TraceLevel(int level) { - this.level = level; - } - } - // Keep in sync with webrtc/rtc_base/logging.h:LoggingSeverity. public enum Severity { LS_VERBOSE, LS_INFO, LS_WARNING, LS_ERROR, LS_NONE } @@ -98,15 +73,11 @@ public static void enableLogTimeStamps() { nativeEnableLogTimeStamps(); } - // TODO(solenberg): Remove once dependent projects updated. - @Deprecated - public static void enableTracing(String path, EnumSet levels) {} - // Enable diagnostic logging for messages of `severity` to the platform debug // output. On Android, the output will be directed to Logcat. // Note: this function starts collecting the output of the RTC_LOG() macros. // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") + @SuppressWarnings({"EnumOrdinal", "NoSynchronizedMethodCheck"}) public static synchronized void enableLogToDebugOutput(Severity severity) { if (loggable != null) { throw new IllegalStateException( @@ -117,6 +88,7 @@ public static synchronized void enableLogToDebugOutput(Severity severity) { loggingEnabled = true; } + @SuppressWarnings("EnumOrdinal") public static void log(Severity severity, String tag, String message) { if (tag == null || message == null) { throw new IllegalArgumentException("Logging tag or message may not be null."); diff --git a/rtc_base/log_sinks.cc b/rtc_base/log_sinks.cc index f511948ed3..7f5b3835a7 100644 --- a/rtc_base/log_sinks.cc +++ b/rtc_base/log_sinks.cc @@ -17,8 +17,10 @@ #include "absl/strings/string_view.h" #include "rtc_base/checks.h" +#include "rtc_base/file_rotating_stream.h" +#include "rtc_base/logging.h" -namespace rtc { +namespace webrtc { FileRotatingLogSink::FileRotatingLogSink(absl::string_view log_dir_path, absl::string_view log_prefix, @@ -83,4 +85,4 @@ CallSessionFileRotatingLogSink::CallSessionFileRotatingLogSink( CallSessionFileRotatingLogSink::~CallSessionFileRotatingLogSink() {} -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/log_sinks.h b/rtc_base/log_sinks.h index 62a93b85a8..a1b4b872f1 100644 --- a/rtc_base/log_sinks.h +++ b/rtc_base/log_sinks.h @@ -20,7 +20,7 @@ #include "rtc_base/file_rotating_stream.h" #include "rtc_base/logging.h" -namespace rtc { +namespace webrtc { // Log sink that uses a FileRotatingStream to write to disk. // Init() must be called before adding this sink. @@ -75,6 +75,15 @@ class CallSessionFileRotatingLogSink : public FileRotatingLogSink { const CallSessionFileRotatingLogSink&) = delete; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::CallSessionFileRotatingLogSink; +using ::webrtc::FileRotatingLogSink; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_LOG_SINKS_H_ diff --git a/rtc_base/logging.cc b/rtc_base/logging.cc index 61a3c667ba..1a07e9641e 100644 --- a/rtc_base/logging.cc +++ b/rtc_base/logging.cc @@ -12,6 +12,10 @@ #include +#include +#include +#include + #if RTC_LOG_ENABLED() #if defined(WEBRTC_WIN) @@ -53,7 +57,7 @@ static const int kMaxLogLineSize = 1024 - 60; #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" -namespace rtc { +namespace webrtc { namespace { // By default, release builds don't log, debug builds at info level @@ -78,16 +82,16 @@ const char* FilenameFromPath(const char* file) { } // Global lock for log subsystem, only needed to serialize access to streams_. -webrtc::Mutex& GetLoggingLock() { - static webrtc::Mutex& mutex = *new webrtc::Mutex(); +Mutex& GetLoggingLock() { + static Mutex& mutex = *new Mutex(); return mutex; } } // namespace std::string LogLineRef::DefaultLogLine() const { - rtc::StringBuilder log_output; - if (timestamp_ != webrtc::Timestamp::MinusInfinity()) { + StringBuilder log_output; + if (timestamp_ != Timestamp::MinusInfinity()) { // TODO(kwiberg): Switch to absl::StrFormat, if binary size is ok. char timestamp[50]; // Maximum string length of an int64_t is 20. int len = @@ -145,7 +149,7 @@ LogMessage::LogMessage(const char* file, // Also ensure WallClockStartTime is initialized, so that it matches // LogStartTime. WallClockStartTime(); - log_line_.set_timestamp(webrtc::Timestamp::Millis(time)); + log_line_.set_timestamp(Timestamp::Millis(time)); } if (log_thread_) { @@ -212,7 +216,7 @@ LogMessage::~LogMessage() { OutputToDebug(log_line_); } - webrtc::MutexLock lock(&GetLoggingLock()); + MutexLock lock(&GetLoggingLock()); for (LogSink* entry = streams_; entry != nullptr; entry = entry->next_) { if (log_line_.severity() >= entry->min_severity_) { entry->OnLogMessage(log_line_); @@ -220,13 +224,13 @@ LogMessage::~LogMessage() { } } -void LogMessage::AddTag(const char* tag) { +void LogMessage::AddTag([[maybe_unused]] const char* tag) { #ifdef WEBRTC_ANDROID log_line_.set_tag(tag); #endif } -rtc::StringBuilder& LogMessage::stream() { +StringBuilder& LogMessage::stream() { return print_stream_; } @@ -257,7 +261,7 @@ void LogMessage::LogTimestamps(bool on) { void LogMessage::LogToDebug(LoggingSeverity min_sev) { g_dbg_sev = min_sev; - webrtc::MutexLock lock(&GetLoggingLock()); + MutexLock lock(&GetLoggingLock()); UpdateMinLogSeverity(); } @@ -266,7 +270,7 @@ void LogMessage::SetLogToStderr(bool log_to_stderr) { } int LogMessage::GetLogToStream(LogSink* stream) { - webrtc::MutexLock lock(&GetLoggingLock()); + MutexLock lock(&GetLoggingLock()); LoggingSeverity sev = LS_NONE; for (LogSink* entry = streams_; entry != nullptr; entry = entry->next_) { if (stream == nullptr || stream == entry) { @@ -277,7 +281,7 @@ int LogMessage::GetLogToStream(LogSink* stream) { } void LogMessage::AddLogToStream(LogSink* stream, LoggingSeverity min_sev) { - webrtc::MutexLock lock(&GetLoggingLock()); + MutexLock lock(&GetLoggingLock()); stream->min_severity_ = min_sev; stream->next_ = streams_; streams_ = stream; @@ -286,7 +290,7 @@ void LogMessage::AddLogToStream(LogSink* stream, LoggingSeverity min_sev) { } void LogMessage::RemoveLogToStream(LogSink* stream) { - webrtc::MutexLock lock(&GetLoggingLock()); + MutexLock lock(&GetLoggingLock()); for (LogSink** entry = &streams_; *entry != nullptr; entry = &(*entry)->next_) { if (*entry == stream) { @@ -513,9 +517,6 @@ void Log(const LogArgType* fmt, ...) { case LogArgType::kDouble: log_message.stream() << va_arg(args, double); break; - case LogArgType::kLongDouble: - log_message.stream() << va_arg(args, long double); - break; case LogArgType::kCharP: { const char* s = va_arg(args, const char*); log_message.stream() << (s ? s : "(null)"); @@ -528,8 +529,8 @@ void Log(const LogArgType* fmt, ...) { log_message.stream() << *va_arg(args, const absl::string_view*); break; case LogArgType::kVoidP: - log_message.stream() << rtc::ToHex( - reinterpret_cast(va_arg(args, const void*))); + log_message.stream() + << ToHex(reinterpret_cast(va_arg(args, const void*))); break; default: RTC_DCHECK_NOTREACHED(); @@ -542,10 +543,10 @@ void Log(const LogArgType* fmt, ...) { } } // namespace webrtc_logging_impl -} // namespace rtc +} // namespace webrtc #endif -namespace rtc { +namespace webrtc { // Default implementation, override is recomended. void LogSink::OnLogMessage(const LogLineRef& log_line) { #if defined(WEBRTC_ANDROID) @@ -583,4 +584,4 @@ void LogSink::OnLogMessage(absl::string_view msg, void LogSink::OnLogMessage(absl::string_view msg) { OnLogMessage(std::string(msg)); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/logging.h b/rtc_base/logging.h index b171cfe11e..b011acbefa 100644 --- a/rtc_base/logging.h +++ b/rtc_base/logging.h @@ -21,7 +21,7 @@ // RTC_LOG(sev) logs the given stream at severity "sev", which must be a // compile-time constant of the LoggingSeverity type, without the namespace // prefix. -// RTC_LOG_IF(sev, condition) logs the given stream at severitye "sev" if +// RTC_LOG_IF(sev, condition) logs the given stream at severity "sev" if // "condition" is true. // RTC_LOG_V(sev) Like RTC_LOG(), but sev is a run-time variable of the // LoggingSeverity type (basically, it just doesn't prepend the namespace). @@ -51,19 +51,21 @@ #include #include +#include #include // no-presubmit-check TODO(webrtc:8982) #include #include #include #include "absl/base/attributes.h" -#include "absl/meta/type_traits.h" +#include "absl/strings/has_absl_stringify.h" +#include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/units/timestamp.h" #include "rtc_base/platform_thread_types.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/system/inline.h" +#include "rtc_base/type_traits.h" #if !defined(NDEBUG) || defined(DLOG_ALWAYS_ON) #define RTC_DLOG_IS_ON 1 @@ -77,7 +79,7 @@ #define RTC_LOG_ENABLED() 1 #endif -namespace rtc { +namespace webrtc { ////////////////////////////////////////////////////////////////////// // The meanings of the levels are: @@ -118,8 +120,8 @@ class LogLineRef { absl::string_view message() const { return message_; } absl::string_view filename() const { return filename_; } int line() const { return line_; } - absl::optional thread_id() const { return thread_id_; } - webrtc::Timestamp timestamp() const { return timestamp_; } + std::optional thread_id() const { return thread_id_; } + Timestamp timestamp() const { return timestamp_; } absl::string_view tag() const { return tag_; } LoggingSeverity severity() const { return severity_; } @@ -134,18 +136,18 @@ class LogLineRef { void set_message(std::string message) { message_ = std::move(message); } void set_filename(absl::string_view filename) { filename_ = filename; } void set_line(int line) { line_ = line; } - void set_thread_id(absl::optional thread_id) { + void set_thread_id(std::optional thread_id) { thread_id_ = thread_id; } - void set_timestamp(webrtc::Timestamp timestamp) { timestamp_ = timestamp; } + void set_timestamp(Timestamp timestamp) { timestamp_ = timestamp; } void set_tag(absl::string_view tag) { tag_ = tag; } void set_severity(LoggingSeverity severity) { severity_ = severity; } std::string message_; absl::string_view filename_; int line_ = 0; - absl::optional thread_id_; - webrtc::Timestamp timestamp_ = webrtc::Timestamp::MinusInfinity(); + std::optional thread_id_; + Timestamp timestamp_ = Timestamp::MinusInfinity(); // The default Android debug output tag. absl::string_view tag_ = "libjingle"; // The severity level of this message @@ -173,7 +175,7 @@ class LogSink { virtual void OnLogMessage(const LogLineRef& line); private: - friend class ::rtc::LogMessage; + friend class LogMessage; #if RTC_LOG_ENABLED() // Members for LogMessage class to keep linked list of the registered sinks. LogSink* next_ = nullptr; @@ -282,6 +284,7 @@ inline Val MakeVal( inline Val MakeVal(double x) { return {x}; } + inline Val MakeVal(long double x) { return {x}; } @@ -313,11 +316,12 @@ inline Val MakeVal( // The enum class types are not implicitly convertible to arithmetic types. template ::value && - !std::is_arithmetic::value>* = nullptr> -inline decltype(MakeVal(std::declval>())) MakeVal( + std::enable_if_t::value && + !absl::HasAbslStringify::value && + !std::is_arithmetic::value>* = nullptr> +inline decltype(MakeVal(std::declval>())) MakeVal( T x) { - return {static_cast>(x)}; + return {static_cast>(x)}; } #ifdef WEBRTC_ANDROID @@ -327,33 +331,25 @@ inline Val MakeVal( } #endif -template -struct has_to_log_string : std::false_type {}; -template -struct has_to_log_string())), - std::string>::value>> : std::true_type {}; - -template ::value>* = nullptr> +template ::value>* = nullptr> ToStringVal MakeVal(const T& x) { - return {ToLogString(x)}; + return {absl::StrCat(x)}; } // Handle arbitrary types other than the above by falling back to stringstream. // TODO(bugs.webrtc.org/9278): Get rid of this overload when callers don't need // it anymore. No in-tree caller does, but some external callers still do. -template < - typename T, - typename T1 = absl::decay_t, - absl::enable_if_t::value && - !std::is_same::value && - !std::is_same::value && - !has_to_log_string::value && +template , + std::enable_if_t::value && // + !std::is_same::value && // + !std::is_same::value && // + !absl::HasAbslStringify::value && #ifdef WEBRTC_ANDROID - !std::is_same::value && + !std::is_same::value && // #endif - !std::is_same::value>* = nullptr> + !std::is_same::value>* = nullptr> ToStringVal MakeVal(const T& x) { std::ostringstream os; // no-presubmit-check TODO(webrtc:8982) os << x; @@ -376,18 +372,7 @@ class LogStreamer; template <> class LogStreamer<> final { public: - template ())), - absl::enable_if_t::value || - std::is_enum::value>* = nullptr> - RTC_FORCE_INLINE LogStreamer operator<<(U arg) const { - return LogStreamer(MakeVal(arg), this); - } - - template ())), - absl::enable_if_t::value && - !std::is_enum::value>* = nullptr> + template ()))> RTC_FORCE_INLINE LogStreamer operator<<(const U& arg) const { return LogStreamer(MakeVal(arg), this); } @@ -407,18 +392,7 @@ class LogStreamer final { RTC_FORCE_INLINE LogStreamer(T arg, const LogStreamer* prior) : arg_(arg), prior_(prior) {} - template ())), - absl::enable_if_t::value || - std::is_enum::value>* = nullptr> - RTC_FORCE_INLINE LogStreamer operator<<(U arg) const { - return LogStreamer(MakeVal(arg), this); - } - - template ())), - absl::enable_if_t::value && - !std::is_enum::value>* = nullptr> + template ()))> RTC_FORCE_INLINE LogStreamer operator<<(const U& arg) const { return LogStreamer(MakeVal(arg), this); } @@ -457,7 +431,7 @@ class LogMessageVoidify { // This has to be an operator with a precedence lower than << but // higher than ?: template - void operator&(LogStreamer&& streamer) {} + void operator&(LogStreamer&& /* streamer */) {} }; } // namespace webrtc_logging_impl @@ -493,7 +467,7 @@ class LogMessage { LogMessage& operator=(const LogMessage&) = delete; void AddTag(const char* tag); - rtc::StringBuilder& stream(); + StringBuilder& stream(); // Returns the time at which this function was called for the first time. // The time will be used as the logging start time. // If this is not called externally, the LogMessage ctor also calls it, in @@ -559,7 +533,7 @@ class LogMessage { ~LogMessage() = default; inline void AddTag(const char* tag) {} - inline rtc::StringBuilder& stream() { return print_stream_; } + inline StringBuilder& stream() { return print_stream_; } inline static int64_t LogStartTime() { return 0; } inline static uint32_t WallClockStartTime() { return 0; } inline static void LogThreads(bool on = true) {} @@ -635,29 +609,30 @@ class LogMessage { #endif // RTC_LOG_ENABLED() // The stringbuilder that buffers the formatted message before output - rtc::StringBuilder print_stream_; + StringBuilder print_stream_; }; ////////////////////////////////////////////////////////////////////// // Logging Helpers ////////////////////////////////////////////////////////////////////// -#define RTC_LOG_FILE_LINE(sev, file, line) \ - ::rtc::webrtc_logging_impl::LogCall() & \ - ::rtc::webrtc_logging_impl::LogStreamer<>() \ - << ::rtc::webrtc_logging_impl::LogMetadata(file, line, sev) +#define RTC_LOG_FILE_LINE(sev, file, line) \ + ::webrtc::webrtc_logging_impl::LogCall() & \ + ::webrtc::webrtc_logging_impl::LogStreamer<>() \ + << ::webrtc::webrtc_logging_impl::LogMetadata(file, line, sev) -#define RTC_LOG(sev) \ - !::rtc::LogMessage::IsNoop<::rtc::sev>() && \ - RTC_LOG_FILE_LINE(::rtc::sev, __FILE__, __LINE__) +#define RTC_LOG(sev) \ + !::webrtc::LogMessage::IsNoop<::webrtc::sev>() && \ + RTC_LOG_FILE_LINE(::webrtc::sev, __FILE__, __LINE__) -#define RTC_LOG_IF(sev, condition) \ - !::rtc::LogMessage::IsNoop<::rtc::sev>() && (condition) && \ - RTC_LOG_FILE_LINE(::rtc::sev, __FILE__, __LINE__) +#define RTC_LOG_IF(sev, condition) \ + !::webrtc::LogMessage::IsNoop<::webrtc::sev>() && (condition) && \ + RTC_LOG_FILE_LINE(::webrtc::sev, __FILE__, __LINE__) // The _V version is for when a variable is passed in. -#define RTC_LOG_V(sev) \ - !::rtc::LogMessage::IsNoop(sev) && RTC_LOG_FILE_LINE(sev, __FILE__, __LINE__) +#define RTC_LOG_V(sev) \ + !::webrtc::LogMessage::IsNoop(sev) && \ + RTC_LOG_FILE_LINE(sev, __FILE__, __LINE__) // The _F version prefixes the message with the current function name. #if (defined(__GNUC__) && !defined(NDEBUG)) || defined(WANT_PRETTY_LOG_F) @@ -673,19 +648,19 @@ class LogMessage { #define RTC_LOG_T_F(sev) RTC_LOG(sev) << this << ": " << __FUNCTION__ << ": " #endif -#define RTC_LOG_CHECK_LEVEL(sev) ::rtc::LogCheckLevel(::rtc::sev) -#define RTC_LOG_CHECK_LEVEL_V(sev) ::rtc::LogCheckLevel(sev) +#define RTC_LOG_CHECK_LEVEL(sev) ::webrtc::LogCheckLevel(::webrtc::sev) +#define RTC_LOG_CHECK_LEVEL_V(sev) ::webrtc::LogCheckLevel(sev) inline bool LogCheckLevel(LoggingSeverity sev) { return (LogMessage::GetMinLogSeverity() <= sev); } -#define RTC_LOG_E(sev, ctx, err) \ - !::rtc::LogMessage::IsNoop<::rtc::sev>() && \ - ::rtc::webrtc_logging_impl::LogCall() & \ - ::rtc::webrtc_logging_impl::LogStreamer<>() \ - << ::rtc::webrtc_logging_impl::LogMetadataErr { \ - {__FILE__, __LINE__, ::rtc::sev}, ::rtc::ERRCTX_##ctx, (err) \ +#define RTC_LOG_E(sev, ctx, err) \ + !::webrtc::LogMessage::IsNoop<::webrtc::sev>() && \ + ::webrtc::webrtc_logging_impl::LogCall() & \ + ::webrtc::webrtc_logging_impl::LogStreamer<>() \ + << ::webrtc::webrtc_logging_impl::LogMetadataErr { \ + {__FILE__, __LINE__, ::webrtc::sev}, ::webrtc::ERRCTX_##ctx, (err) \ } #define RTC_LOG_T(sev) RTC_LOG(sev) << this << ": " @@ -718,12 +693,12 @@ inline const char* AdaptString(const std::string& str) { } } // namespace webrtc_logging_impl -#define RTC_LOG_TAG(sev, tag) \ - !::rtc::LogMessage::IsNoop(sev) && \ - ::rtc::webrtc_logging_impl::LogCall() & \ - ::rtc::webrtc_logging_impl::LogStreamer<>() \ - << ::rtc::webrtc_logging_impl::LogMetadataTag { \ - sev, ::rtc::webrtc_logging_impl::AdaptString(tag) \ +#define RTC_LOG_TAG(sev, tag) \ + !::webrtc::LogMessage::IsNoop(sev) && \ + ::webrtc::webrtc_logging_impl::LogCall() & \ + ::webrtc::webrtc_logging_impl::LogStreamer<>() \ + << ::webrtc::webrtc_logging_impl::LogMetadataTag { \ + sev, ::webrtc::webrtc_logging_impl::AdaptString(tag) \ } #else @@ -742,10 +717,10 @@ inline const char* AdaptString(const std::string& str) { #define RTC_DLOG_F(sev) RTC_LOG_F(sev) #define RTC_DLOG_IF_F(sev, condition) RTC_LOG_IF_F(sev, condition) #else -#define RTC_DLOG_EAT_STREAM_PARAMS() \ - while (false) \ - ::rtc::webrtc_logging_impl::LogMessageVoidify() & \ - (::rtc::webrtc_logging_impl::LogStreamer<>()) +#define RTC_DLOG_EAT_STREAM_PARAMS() \ + while (false) \ + ::webrtc::webrtc_logging_impl::LogMessageVoidify() & \ + (::webrtc::webrtc_logging_impl::LogStreamer<>()) #define RTC_DLOG(sev) RTC_DLOG_EAT_STREAM_PARAMS() #define RTC_DLOG_IF(sev, condition) RTC_DLOG_EAT_STREAM_PARAMS() #define RTC_DLOG_V(sev) RTC_DLOG_EAT_STREAM_PARAMS() @@ -753,6 +728,22 @@ inline const char* AdaptString(const std::string& str) { #define RTC_DLOG_IF_F(sev, condition) RTC_DLOG_EAT_STREAM_PARAMS() #endif +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::LoggingSeverity; +using ::webrtc::LogLineRef; +using ::webrtc::LogMessage; +using ::webrtc::LogSink; +using ::webrtc::LS_ERROR; +using ::webrtc::LS_INFO; +using ::webrtc::LS_NONE; +using ::webrtc::LS_VERBOSE; +using ::webrtc::LS_WARNING; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_LOGGING_H_ diff --git a/rtc_base/logging_unittest.cc b/rtc_base/logging_unittest.cc index b05907e74a..de250403c9 100644 --- a/rtc_base/logging_unittest.cc +++ b/rtc_base/logging_unittest.cc @@ -12,23 +12,22 @@ #if RTC_LOG_ENABLED() -#include - -#include +#include #include "absl/strings/string_view.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -#include "rtc_base/event.h" #include "rtc_base/platform_thread.h" #include "rtc_base/time_utils.h" #include "test/gmock.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { +using ::testing::HasSubstr; + #if defined(WEBRTC_WIN) constexpr char kFakeFilePath[] = "some\\path\\myfile.cc"; #else @@ -95,8 +94,8 @@ TEST(LogTest, SingleStream) { RTC_LOG(LS_INFO) << "INFO"; RTC_LOG(LS_VERBOSE) << "VERBOSE"; - EXPECT_NE(std::string::npos, str.find("INFO")); - EXPECT_EQ(std::string::npos, str.find("VERBOSE")); + EXPECT_THAT(str, HasSubstr("INFO")); + EXPECT_THAT(str, Not(HasSubstr("VERBOSE"))); int i = 1; long l = 2l; @@ -112,32 +111,32 @@ TEST(LogTest, SingleStream) { const char* null_string = nullptr; void* p = reinterpret_cast(0xabcd); - // Log all suported types(except doubles/floats) as a sanity-check. + // Log all supported types(except doubles/floats) as a sanity-check. RTC_LOG(LS_INFO) << "|" << i << "|" << l << "|" << ll << "|" << u << "|" << ul << "|" << ull << "|" << s1.c_str() << "|" << s2 << "|" << absl::string_view(s3) << "|" << p << "|" << null_string << "|"; // Signed integers - EXPECT_NE(std::string::npos, str.find("|1|")); - EXPECT_NE(std::string::npos, str.find("|2|")); - EXPECT_NE(std::string::npos, str.find("|3|")); + EXPECT_THAT(str, HasSubstr("|1|")); + EXPECT_THAT(str, HasSubstr("|2|")); + EXPECT_THAT(str, HasSubstr("|3|")); // Unsigned integers - EXPECT_NE(std::string::npos, str.find("|4|")); - EXPECT_NE(std::string::npos, str.find("|5|")); - EXPECT_NE(std::string::npos, str.find("|6|")); + EXPECT_THAT(str, HasSubstr("|4|")); + EXPECT_THAT(str, HasSubstr("|5|")); + EXPECT_THAT(str, HasSubstr("|6|")); // Strings - EXPECT_NE(std::string::npos, str.find("|char*|")); - EXPECT_NE(std::string::npos, str.find("|std::string|")); - EXPECT_NE(std::string::npos, str.find("|absl::stringview|")); + EXPECT_THAT(str, HasSubstr("|char*|")); + EXPECT_THAT(str, HasSubstr("|std::string|")); + EXPECT_THAT(str, HasSubstr("|absl::stringview|")); // void* - EXPECT_NE(std::string::npos, str.find("|abcd|")); + EXPECT_THAT(str, HasSubstr("|abcd|")); // null char* - EXPECT_NE(std::string::npos, str.find("|(null)|")); + EXPECT_THAT(str, HasSubstr("|(null)|")); LogMessage::RemoveLogToStream(&stream); EXPECT_EQ(LS_NONE, LogMessage::GetLogToStream(&stream)); @@ -150,7 +149,7 @@ TEST(LogTest, LogIfLogIfConditionIsTrue) { LogMessage::AddLogToStream(&stream, LS_INFO); RTC_LOG_IF(LS_INFO, true) << "Hello"; - EXPECT_NE(std::string::npos, str.find("Hello")); + EXPECT_THAT(str, HasSubstr("Hello")); LogMessage::RemoveLogToStream(&stream); } @@ -161,7 +160,7 @@ TEST(LogTest, LogIfDontLogIfConditionIsFalse) { LogMessage::AddLogToStream(&stream, LS_INFO); RTC_LOG_IF(LS_INFO, false) << "Hello"; - EXPECT_EQ(std::string::npos, str.find("Hello")); + EXPECT_THAT(str, Not(HasSubstr("Hello"))); LogMessage::RemoveLogToStream(&stream); } @@ -172,8 +171,8 @@ TEST(LogTest, LogIfFLogIfConditionIsTrue) { LogMessage::AddLogToStream(&stream, LS_INFO); RTC_LOG_IF_F(LS_INFO, true) << "Hello"; - EXPECT_NE(std::string::npos, str.find(__FUNCTION__)); - EXPECT_NE(std::string::npos, str.find("Hello")); + EXPECT_THAT(str, HasSubstr(__FUNCTION__)); + EXPECT_THAT(str, HasSubstr("Hello")); LogMessage::RemoveLogToStream(&stream); } @@ -184,8 +183,8 @@ TEST(LogTest, LogIfFDontLogIfConditionIsFalse) { LogMessage::AddLogToStream(&stream, LS_INFO); RTC_LOG_IF_F(LS_INFO, false) << "Not"; - EXPECT_EQ(std::string::npos, str.find(__FUNCTION__)); - EXPECT_EQ(std::string::npos, str.find("Not")); + EXPECT_THAT(str, Not(HasSubstr(__FUNCTION__))); + EXPECT_THAT(str, Not(HasSubstr("Not"))); LogMessage::RemoveLogToStream(&stream); } @@ -289,9 +288,9 @@ TEST(LogTest, CheckFilePathParsed) { #if defined(WEBRTC_ANDROID) EXPECT_NE(nullptr, strstr(tag, "myfile.cc")); - EXPECT_NE(std::string::npos, str.find("100")); + EXPECT_THAT(str, HasSubstr("100")); #else - EXPECT_NE(std::string::npos, str.find("(myfile.cc:100)")); + EXPECT_THAT(str, HasSubstr("(myfile.cc:100)")); #endif LogMessage::RemoveLogToStream(&stream); } @@ -304,8 +303,8 @@ TEST(LogTest, CheckTagAddedToStringInDefaultOnLogMessageAndroid) { EXPECT_EQ(LS_INFO, LogMessage::GetLogToStream(&stream)); RTC_LOG_TAG(LS_INFO, "my_tag") << "INFO"; - EXPECT_NE(std::string::npos, str.find("INFO")); - EXPECT_NE(std::string::npos, str.find("my_tag")); + EXPECT_THAT(str, HasSubstr("INFO")); + EXPECT_THAT(str, HasSubstr("my_tag")); } #endif @@ -316,7 +315,9 @@ TEST(LogTest, Perf) { LogMessage::AddLogToStream(&stream, LS_VERBOSE); const std::string message(80, 'X'); - { LogMessageForTesting sanity_check_msg(__FILE__, __LINE__, LS_VERBOSE); } + { + LogMessageForTesting sanity_check_msg(__FILE__, __LINE__, LS_VERBOSE); + } // We now know how many bytes the logging framework will tag onto every msg. const size_t logging_overhead = str.size(); @@ -346,10 +347,10 @@ TEST(LogTest, EnumsAreSupported) { LogSinkImpl stream(&str); LogMessage::AddLogToStream(&stream, LS_INFO); RTC_LOG(LS_INFO) << "[" << TestEnum::kValue0 << "]"; - EXPECT_NE(std::string::npos, str.find("[0]")); - EXPECT_EQ(std::string::npos, str.find("[1]")); + EXPECT_THAT(str, HasSubstr("[0]")); + EXPECT_THAT(str, Not(HasSubstr("[1]"))); RTC_LOG(LS_INFO) << "[" << TestEnum::kValue1 << "]"; - EXPECT_NE(std::string::npos, str.find("[1]")); + EXPECT_THAT(str, HasSubstr("[1]")); LogMessage::RemoveLogToStream(&stream); } @@ -367,20 +368,51 @@ TEST(LogTest, NoopSeverityDoesNotRunStringFormatting) { EXPECT_FALSE(was_called); } -struct TestStruct {}; -std::string ToLogString(TestStruct foo) { - return "bar"; -} +struct StructWithStringfy { + template + friend void AbslStringify(Sink& sink, const StructWithStringfy& /*self*/) { + sink.Append("absl-stringify"); + } +}; -TEST(LogTest, ToLogStringUsedForUnknownTypes) { +TEST(LogTest, UseAbslStringForCustomTypes) { std::string str; LogSinkImpl stream(&str); LogMessage::AddLogToStream(&stream, LS_INFO); - TestStruct t; + StructWithStringfy t; + RTC_LOG(LS_INFO) << t; - EXPECT_THAT(str, ::testing::HasSubstr("bar")); + + EXPECT_THAT(str, HasSubstr("absl-stringify")); + + LogMessage::RemoveLogToStream(&stream); +} + +enum class TestEnumStringify { kValue0 = 0, kValue1 = 1 }; + +template +void AbslStringify(Sink& sink, TestEnumStringify value) { + switch (value) { + case TestEnumStringify::kValue0: + sink.Append("kValue0"); + break; + case TestEnumStringify::kValue1: + sink.Append("kValue1"); + break; + } +} + +TEST(LogTest, EnumSupportsAbslStringify) { + std::string str; + LogSinkImpl stream(&str); + LogMessage::AddLogToStream(&stream, LS_INFO); + RTC_LOG(LS_INFO) << "[" << TestEnumStringify::kValue0 << "]"; + EXPECT_THAT(str, HasSubstr("[kValue0]")); + EXPECT_THAT(str, Not(HasSubstr("[kValue1]"))); + RTC_LOG(LS_INFO) << "[" << TestEnumStringify::kValue1 << "]"; + EXPECT_THAT(str, HasSubstr("[kValue1]")); LogMessage::RemoveLogToStream(&stream); } -} // namespace rtc +} // namespace webrtc #endif // RTC_LOG_ENABLED() diff --git a/rtc_base/mac_ifaddrs_converter.cc b/rtc_base/mac_ifaddrs_converter.cc index eb82cf2024..dcf0daeefa 100644 --- a/rtc_base/mac_ifaddrs_converter.cc +++ b/rtc_base/mac_ifaddrs_converter.cc @@ -170,7 +170,7 @@ struct in6_ifreq { #endif // WEBRTC_IOS -namespace rtc { +namespace webrtc { namespace { @@ -279,4 +279,4 @@ IfAddrsConverter* CreateIfAddrsConverter() { return new MacIfAddrsConverter(); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/mdns_responder_interface.h b/rtc_base/mdns_responder_interface.h index 14ef9a202d..fb4c1c94cc 100644 --- a/rtc_base/mdns_responder_interface.h +++ b/rtc_base/mdns_responder_interface.h @@ -24,7 +24,7 @@ namespace webrtc { class MdnsResponderInterface { public: using NameCreatedCallback = - std::function; + std::function; using NameRemovedCallback = std::function; MdnsResponderInterface() = default; @@ -34,7 +34,7 @@ class MdnsResponderInterface { // there is no name mapped to it by this responder, and initializes the // reference count of this name to one. Otherwise the existing name mapped to // `addr` is returned and its reference count is incremented by one. - virtual void CreateNameForAddress(const rtc::IPAddress& addr, + virtual void CreateNameForAddress(const IPAddress& addr, NameCreatedCallback callback) = 0; // Decrements the reference count of the mapped name of `addr`, if // there is a map created previously via CreateNameForAddress; asynchronously @@ -42,7 +42,7 @@ class MdnsResponderInterface { // true via `callback` if the decremented reference count reaches zero. // Otherwise no operation is done and false is returned via `callback` // asynchronously. - virtual void RemoveNameForAddress(const rtc::IPAddress& addr, + virtual void RemoveNameForAddress(const IPAddress& addr, NameRemovedCallback callback) = 0; }; diff --git a/rtc_base/memory/BUILD.gn b/rtc_base/memory/BUILD.gn index ee3baa40fb..9a55af3ea9 100644 --- a/rtc_base/memory/BUILD.gn +++ b/rtc_base/memory/BUILD.gn @@ -33,10 +33,13 @@ rtc_library("fifo_buffer") { "fifo_buffer.h", ] deps = [ + "..:checks", + "..:macromagic", "..:stream", "..:threading", + "../../api:array_view", + "../../api:sequence_checker", "../../api/task_queue:pending_task_safety_flag", - "../synchronization:mutex", ] } @@ -51,6 +54,9 @@ rtc_library("unittests") { ":aligned_malloc", ":always_valid_pointer", ":fifo_buffer", + "..:stream", + "..:threading", + "../../api:array_view", "../../test:test_support", ] } diff --git a/rtc_base/memory/fifo_buffer.cc b/rtc_base/memory/fifo_buffer.cc index c159bc979f..8c102001e3 100644 --- a/rtc_base/memory/fifo_buffer.cc +++ b/rtc_base/memory/fifo_buffer.cc @@ -11,13 +11,20 @@ #include "rtc_base/memory/fifo_buffer.h" #include - +#include +#include +#include + +#include "api/array_view.h" +#include "api/sequence_checker.h" +#include "rtc_base/checks.h" +#include "rtc_base/stream.h" #include "rtc_base/thread.h" -namespace rtc { +namespace webrtc { FifoBuffer::FifoBuffer(size_t size) - : state_(SS_OPEN), + : state_(webrtc::SS_OPEN), buffer_(new char[size]), buffer_length_(size), data_length_(0), @@ -27,7 +34,7 @@ FifoBuffer::FifoBuffer(size_t size) } FifoBuffer::FifoBuffer(size_t size, Thread* owner) - : state_(SS_OPEN), + : state_(webrtc::SS_OPEN), buffer_(new char[size]), buffer_length_(size), data_length_(0), @@ -39,25 +46,25 @@ FifoBuffer::FifoBuffer(size_t size, Thread* owner) FifoBuffer::~FifoBuffer() {} bool FifoBuffer::GetBuffered(size_t* size) const { - webrtc::MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&callback_sequence_); *size = data_length_; return true; } StreamState FifoBuffer::GetState() const { - webrtc::MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&callback_sequence_); return state_; } -StreamResult FifoBuffer::Read(rtc::ArrayView buffer, +StreamResult FifoBuffer::Read(ArrayView buffer, size_t& bytes_read, int& error) { - webrtc::MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&callback_sequence_); const bool was_writable = data_length_ < buffer_length_; size_t copy = 0; StreamResult result = ReadLocked(buffer.data(), buffer.size(), ©); - if (result == SR_SUCCESS) { + if (result == webrtc::SR_SUCCESS) { // If read was successful then adjust the read position and number of // bytes buffered. read_position_ = (read_position_ + copy) % buffer_length_; @@ -66,40 +73,40 @@ StreamResult FifoBuffer::Read(rtc::ArrayView buffer, // if we were full before, and now we're not, post an event if (!was_writable && copy > 0) { - PostEvent(SE_WRITE, 0); + PostEvent(webrtc::SE_WRITE, 0); } } return result; } -StreamResult FifoBuffer::Write(rtc::ArrayView buffer, +StreamResult FifoBuffer::Write(ArrayView buffer, size_t& bytes_written, int& error) { - webrtc::MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&callback_sequence_); const bool was_readable = (data_length_ > 0); size_t copy = 0; StreamResult result = WriteLocked(buffer.data(), buffer.size(), ©); - if (result == SR_SUCCESS) { + if (result == webrtc::SR_SUCCESS) { // If write was successful then adjust the number of readable bytes. data_length_ += copy; bytes_written = copy; // if we didn't have any data to read before, and now we do, post an event if (!was_readable && copy > 0) { - PostEvent(SE_READ, 0); + PostEvent(webrtc::SE_READ, 0); } } return result; } void FifoBuffer::Close() { - webrtc::MutexLock lock(&mutex_); - state_ = SS_CLOSED; + RTC_DCHECK_RUN_ON(&callback_sequence_); + state_ = webrtc::SS_CLOSED; } const void* FifoBuffer::GetReadData(size_t* size) { - webrtc::MutexLock lock(&mutex_); + RTC_DCHECK_RUN_ON(&callback_sequence_); *size = (read_position_ + data_length_ <= buffer_length_) ? data_length_ : buffer_length_ - read_position_; @@ -107,19 +114,20 @@ const void* FifoBuffer::GetReadData(size_t* size) { } void FifoBuffer::ConsumeReadData(size_t size) { - webrtc::MutexLock lock(&mutex_); - RTC_DCHECK(size <= data_length_); + RTC_DCHECK_RUN_ON(&callback_sequence_); + RTC_DCHECK_LE(size, data_length_); const bool was_writable = data_length_ < buffer_length_; read_position_ = (read_position_ + size) % buffer_length_; data_length_ -= size; if (!was_writable && size > 0) { - PostEvent(SE_WRITE, 0); + PostEvent(webrtc::SE_WRITE, 0); } } void* FifoBuffer::GetWriteBuffer(size_t* size) { - webrtc::MutexLock lock(&mutex_); - if (state_ == SS_CLOSED) { + RTC_DCHECK_RUN_ON(&callback_sequence_); + + if (state_ == webrtc::SS_CLOSED) { return nullptr; } @@ -138,12 +146,12 @@ void* FifoBuffer::GetWriteBuffer(size_t* size) { } void FifoBuffer::ConsumeWriteBuffer(size_t size) { - webrtc::MutexLock lock(&mutex_); - RTC_DCHECK(size <= buffer_length_ - data_length_); + RTC_DCHECK_RUN_ON(&callback_sequence_); + RTC_DCHECK_LE(size, buffer_length_ - data_length_); const bool was_readable = (data_length_ > 0); data_length_ += size; if (!was_readable && size > 0) { - PostEvent(SE_READ, 0); + PostEvent(webrtc::SE_READ, 0); } } @@ -151,7 +159,7 @@ StreamResult FifoBuffer::ReadLocked(void* buffer, size_t bytes, size_t* bytes_read) { if (data_length_ == 0) { - return (state_ != SS_CLOSED) ? SR_BLOCK : SR_EOS; + return (state_ != webrtc::SS_CLOSED) ? webrtc::SR_BLOCK : webrtc::SR_EOS; } const size_t available = data_length_; @@ -165,18 +173,18 @@ StreamResult FifoBuffer::ReadLocked(void* buffer, if (bytes_read) { *bytes_read = copy; } - return SR_SUCCESS; + return webrtc::SR_SUCCESS; } StreamResult FifoBuffer::WriteLocked(const void* buffer, size_t bytes, size_t* bytes_written) { - if (state_ == SS_CLOSED) { - return SR_EOS; + if (state_ == webrtc::SS_CLOSED) { + return webrtc::SR_EOS; } if (data_length_ >= buffer_length_) { - return SR_BLOCK; + return webrtc::SR_BLOCK; } const size_t available = buffer_length_ - data_length_; @@ -191,7 +199,7 @@ StreamResult FifoBuffer::WriteLocked(const void* buffer, if (bytes_written) { *bytes_written = copy; } - return SR_SUCCESS; + return webrtc::SR_SUCCESS; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/memory/fifo_buffer.h b/rtc_base/memory/fifo_buffer.h index a225c688ac..a36717d1cb 100644 --- a/rtc_base/memory/fifo_buffer.h +++ b/rtc_base/memory/fifo_buffer.h @@ -11,13 +11,18 @@ #ifndef RTC_BASE_MEMORY_FIFO_BUFFER_H_ #define RTC_BASE_MEMORY_FIFO_BUFFER_H_ +#include +#include #include +#include "api/array_view.h" +#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "rtc_base/stream.h" -#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" -namespace rtc { +namespace webrtc { // FifoBuffer allows for efficient, thread-safe buffering of data between // writer and reader. @@ -37,10 +42,10 @@ class FifoBuffer final : public StreamInterface { // StreamInterface methods StreamState GetState() const override; - StreamResult Read(rtc::ArrayView buffer, + StreamResult Read(ArrayView buffer, size_t& bytes_read, int& error) override; - StreamResult Write(rtc::ArrayView buffer, + StreamResult Write(ArrayView buffer, size_t& bytes_written, int& error) override; void Close() override; @@ -78,41 +83,50 @@ class FifoBuffer final : public StreamInterface { private: void PostEvent(int events, int err) { - owner_->PostTask(webrtc::SafeTask( - task_safety_.flag(), - [this, events, err]() { SignalEvent(this, events, err); })); + RTC_DCHECK_RUN_ON(owner_); + owner_->PostTask( + webrtc::SafeTask(task_safety_.flag(), [this, events, err]() { + RTC_DCHECK_RUN_ON(&callback_sequence_); + FireEvent(events, err); + })); } // Helper method that implements Read. Caller must acquire a lock // when calling this method. StreamResult ReadLocked(void* buffer, size_t bytes, size_t* bytes_read) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(callback_sequence_); // Helper method that implements Write. Caller must acquire a lock // when calling this method. StreamResult WriteLocked(const void* buffer, size_t bytes, size_t* bytes_written) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(callback_sequence_); - webrtc::ScopedTaskSafety task_safety_; + ScopedTaskSafety task_safety_; // keeps the opened/closed state of the stream - StreamState state_ RTC_GUARDED_BY(mutex_); + StreamState state_ RTC_GUARDED_BY(callback_sequence_); // the allocated buffer - std::unique_ptr buffer_ RTC_GUARDED_BY(mutex_); + std::unique_ptr buffer_ RTC_GUARDED_BY(callback_sequence_); // size of the allocated buffer const size_t buffer_length_; // amount of readable data in the buffer - size_t data_length_ RTC_GUARDED_BY(mutex_); + size_t data_length_ RTC_GUARDED_BY(callback_sequence_); // offset to the readable data - size_t read_position_ RTC_GUARDED_BY(mutex_); + size_t read_position_ RTC_GUARDED_BY(callback_sequence_); // stream callbacks are dispatched on this thread Thread* const owner_; - // object lock - mutable webrtc::Mutex mutex_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::FifoBuffer; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_MEMORY_FIFO_BUFFER_H_ diff --git a/rtc_base/memory/fifo_buffer_unittest.cc b/rtc_base/memory/fifo_buffer_unittest.cc index 27eb8d8b45..c78cf90cd5 100644 --- a/rtc_base/memory/fifo_buffer_unittest.cc +++ b/rtc_base/memory/fifo_buffer_unittest.cc @@ -12,12 +12,17 @@ #include +#include + +#include "api/array_view.h" +#include "rtc_base/stream.h" +#include "rtc_base/thread.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { TEST(FifoBufferTest, TestAll) { - rtc::AutoThread main_thread; + AutoThread main_thread; const size_t kSize = 16; const uint8_t in[kSize * 2 + 1] = "0123456789ABCDEFGHIJKLMNOPQRSTUV"; uint8_t out[kSize * 2]; @@ -29,57 +34,49 @@ TEST(FifoBufferTest, TestAll) { // Test assumptions about base state EXPECT_EQ(SS_OPEN, buf.GetState()); int error; - EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); + EXPECT_EQ(SR_BLOCK, buf.Read(MakeArrayView(out, kSize), bytes, error)); EXPECT_TRUE(nullptr != buf.GetWriteBuffer(&bytes)); EXPECT_EQ(kSize, bytes); buf.ConsumeWriteBuffer(0); // Try a full write - EXPECT_EQ(SR_SUCCESS, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Write(MakeArrayView(in, kSize), bytes, error)); EXPECT_EQ(kSize, bytes); // Try a write that should block - EXPECT_EQ(SR_BLOCK, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); + EXPECT_EQ(SR_BLOCK, buf.Write(MakeArrayView(in, kSize), bytes, error)); // Try a full read - EXPECT_EQ(SR_SUCCESS, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Read(MakeArrayView(out, kSize), bytes, error)); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(in, out, kSize)); // Try a read that should block - EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); + EXPECT_EQ(SR_BLOCK, buf.Read(MakeArrayView(out, kSize), bytes, error)); // Try a too-big write - EXPECT_EQ(SR_SUCCESS, - buf.Write(rtc::MakeArrayView(in, kSize * 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Write(MakeArrayView(in, kSize * 2), bytes, error)); EXPECT_EQ(bytes, kSize); // Try a too-big read - EXPECT_EQ(SR_SUCCESS, - buf.Read(rtc::MakeArrayView(out, kSize * 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Read(MakeArrayView(out, kSize * 2), bytes, error)); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(in, out, kSize)); // Try some small writes and reads - EXPECT_EQ(SR_SUCCESS, - buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Write(MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, - buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Read(MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_SUCCESS, - buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Write(MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, - buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Write(MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, - buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Read(MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_SUCCESS, - buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Read(MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); @@ -92,28 +89,22 @@ TEST(FifoBufferTest, TestAll) { // RRRRXXXXXXXXRRRR ....01234567.... // ....RRRRRRRR.... ................ EXPECT_EQ(SR_SUCCESS, - buf.Write(rtc::MakeArrayView(in, kSize * 3 / 4), bytes, error)); + buf.Write(MakeArrayView(in, kSize * 3 / 4), bytes, error)); EXPECT_EQ(kSize * 3 / 4, bytes); - EXPECT_EQ(SR_SUCCESS, - buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Read(MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_SUCCESS, - buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Write(MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, - buf.Read(rtc::MakeArrayView(out, kSize / 4), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Read(MakeArrayView(out, kSize / 4), bytes, error)); EXPECT_EQ(kSize / 4, bytes); EXPECT_EQ(0, memcmp(in + kSize / 2, out, kSize / 4)); - EXPECT_EQ(SR_SUCCESS, - buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Write(MakeArrayView(in, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); - EXPECT_EQ(SR_SUCCESS, - buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Read(MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_SUCCESS, - buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Read(MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); @@ -122,16 +113,16 @@ TEST(FifoBufferTest, TestAll) { buf.ConsumeWriteBuffer(0); // Try using GetReadData to do a full read - EXPECT_EQ(SR_SUCCESS, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Write(MakeArrayView(in, kSize), bytes, error)); q = buf.GetReadData(&bytes); EXPECT_TRUE(nullptr != q); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(q, in, kSize)); buf.ConsumeReadData(kSize); - EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); + EXPECT_EQ(SR_BLOCK, buf.Read(MakeArrayView(out, kSize), bytes, error)); // Try using GetReadData to do some small reads - EXPECT_EQ(SR_SUCCESS, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Write(MakeArrayView(in, kSize), bytes, error)); q = buf.GetReadData(&bytes); EXPECT_TRUE(nullptr != q); EXPECT_EQ(kSize, bytes); @@ -142,7 +133,7 @@ TEST(FifoBufferTest, TestAll) { EXPECT_EQ(kSize / 2, bytes); EXPECT_EQ(0, memcmp(q, in + kSize / 2, kSize / 2)); buf.ConsumeReadData(kSize / 2); - EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); + EXPECT_EQ(SR_BLOCK, buf.Read(MakeArrayView(out, kSize), bytes, error)); // Try using GetReadData in a wraparound case // WWWWWWWWWWWWWWWW 0123456789ABCDEF @@ -150,11 +141,10 @@ TEST(FifoBufferTest, TestAll) { // WWWWWWWW....XXXX 01234567....CDEF // ............RRRR 01234567........ // RRRRRRRR........ ................ - EXPECT_EQ(SR_SUCCESS, buf.Write(rtc::MakeArrayView(in, kSize), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Write(MakeArrayView(in, kSize), bytes, error)); EXPECT_EQ(SR_SUCCESS, - buf.Read(rtc::MakeArrayView(out, kSize * 3 / 4), bytes, error)); - EXPECT_EQ(SR_SUCCESS, - buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); + buf.Read(MakeArrayView(out, kSize * 3 / 4), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Write(MakeArrayView(in, kSize / 2), bytes, error)); q = buf.GetReadData(&bytes); EXPECT_TRUE(nullptr != q); EXPECT_EQ(kSize / 4, bytes); @@ -176,7 +166,7 @@ TEST(FifoBufferTest, TestAll) { EXPECT_EQ(kSize, bytes); memcpy(p, in, kSize); buf.ConsumeWriteBuffer(kSize); - EXPECT_EQ(SR_SUCCESS, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Read(MakeArrayView(out, kSize), bytes, error)); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(in, out, kSize)); @@ -191,7 +181,7 @@ TEST(FifoBufferTest, TestAll) { EXPECT_EQ(kSize / 2, bytes); memcpy(p, in + kSize / 2, kSize / 2); buf.ConsumeWriteBuffer(kSize / 2); - EXPECT_EQ(SR_SUCCESS, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Read(MakeArrayView(out, kSize), bytes, error)); EXPECT_EQ(kSize, bytes); EXPECT_EQ(0, memcmp(in, out, kSize)); @@ -202,9 +192,8 @@ TEST(FifoBufferTest, TestAll) { // WWWW....XXXXXXXX 4567....89AB0123 // RRRR....RRRRRRRR ................ EXPECT_EQ(SR_SUCCESS, - buf.Write(rtc::MakeArrayView(in, kSize * 3 / 4), bytes, error)); - EXPECT_EQ(SR_SUCCESS, - buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); + buf.Write(MakeArrayView(in, kSize * 3 / 4), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Read(MakeArrayView(out, kSize / 2), bytes, error)); p = buf.GetWriteBuffer(&bytes); EXPECT_TRUE(nullptr != p); EXPECT_EQ(kSize / 4, bytes); @@ -216,28 +205,26 @@ TEST(FifoBufferTest, TestAll) { memcpy(p, in + kSize / 4, kSize / 4); buf.ConsumeWriteBuffer(kSize / 4); EXPECT_EQ(SR_SUCCESS, - buf.Read(rtc::MakeArrayView(out, kSize * 3 / 4), bytes, error)); + buf.Read(MakeArrayView(out, kSize * 3 / 4), bytes, error)); EXPECT_EQ(kSize * 3 / 4, bytes); EXPECT_EQ(0, memcmp(in + kSize / 2, out, kSize / 4)); EXPECT_EQ(0, memcmp(in, out + kSize / 4, kSize / 4)); // Check that the stream is now empty - EXPECT_EQ(SR_BLOCK, buf.Read(rtc::MakeArrayView(out, kSize), bytes, error)); + EXPECT_EQ(SR_BLOCK, buf.Read(MakeArrayView(out, kSize), bytes, error)); // Write to the stream, close it, read the remaining bytes - EXPECT_EQ(SR_SUCCESS, - buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Write(MakeArrayView(in, kSize / 2), bytes, error)); buf.Close(); EXPECT_EQ(SS_CLOSED, buf.GetState()); - EXPECT_EQ(SR_EOS, buf.Write(rtc::MakeArrayView(in, kSize / 2), bytes, error)); - EXPECT_EQ(SR_SUCCESS, - buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); + EXPECT_EQ(SR_EOS, buf.Write(MakeArrayView(in, kSize / 2), bytes, error)); + EXPECT_EQ(SR_SUCCESS, buf.Read(MakeArrayView(out, kSize / 2), bytes, error)); EXPECT_EQ(0, memcmp(in, out, kSize / 2)); - EXPECT_EQ(SR_EOS, buf.Read(rtc::MakeArrayView(out, kSize / 2), bytes, error)); + EXPECT_EQ(SR_EOS, buf.Read(MakeArrayView(out, kSize / 2), bytes, error)); } TEST(FifoBufferTest, FullBufferCheck) { - rtc::AutoThread main_thread; + AutoThread main_thread; FifoBuffer buff(10); buff.ConsumeWriteBuffer(10); @@ -246,4 +233,4 @@ TEST(FifoBufferTest, FullBufferCheck) { EXPECT_EQ(0U, free); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/memory_stream.cc b/rtc_base/memory_stream.cc index 8ceab7aa9b..ad6793e07c 100644 --- a/rtc_base/memory_stream.cc +++ b/rtc_base/memory_stream.cc @@ -14,20 +14,23 @@ #include #include +#include +#include "api/array_view.h" #include "rtc_base/checks.h" +#include "rtc_base/stream.h" -namespace rtc { +namespace webrtc { StreamState MemoryStream::GetState() const { - return SS_OPEN; + return webrtc::SS_OPEN; } -StreamResult MemoryStream::Read(rtc::ArrayView buffer, +StreamResult MemoryStream::Read(ArrayView buffer, size_t& bytes_read, int& error) { if (seek_position_ >= data_length_) { - return SR_EOS; + return webrtc::SR_EOS; } size_t available = data_length_ - seek_position_; size_t bytes; @@ -40,10 +43,10 @@ StreamResult MemoryStream::Read(rtc::ArrayView buffer, memcpy(buffer.data(), &buffer_[seek_position_], bytes); seek_position_ += bytes; bytes_read = bytes; - return SR_SUCCESS; + return webrtc::SR_SUCCESS; } -StreamResult MemoryStream::Write(rtc::ArrayView buffer, +StreamResult MemoryStream::Write(ArrayView buffer, size_t& bytes_written, int& error) { size_t available = buffer_length_ - seek_position_; @@ -54,7 +57,7 @@ StreamResult MemoryStream::Write(rtc::ArrayView buffer, size_t new_buffer_length = std::max( ((seek_position_ + buffer.size()) | 0xFF) + 1, buffer_length_ * 2); StreamResult result = DoReserve(new_buffer_length, &error); - if (SR_SUCCESS != result) { + if (webrtc::SR_SUCCESS != result) { return result; } RTC_DCHECK(buffer_length_ >= new_buffer_length); @@ -71,7 +74,7 @@ StreamResult MemoryStream::Write(rtc::ArrayView buffer, data_length_ = seek_position_; } bytes_written = bytes; - return SR_SUCCESS; + return webrtc::SR_SUCCESS; } void MemoryStream::Close() { @@ -102,7 +105,7 @@ bool MemoryStream::GetSize(size_t* size) const { } bool MemoryStream::ReserveSize(size_t size) { - return (SR_SUCCESS == DoReserve(size, nullptr)); + return (webrtc::SR_SUCCESS == DoReserve(size, nullptr)); } /////////////////////////////////////////////////////////////////////////////// @@ -123,7 +126,7 @@ void MemoryStream::SetData(const void* data, size_t length) { StreamResult MemoryStream::DoReserve(size_t size, int* error) { if (buffer_length_ >= size) - return SR_SUCCESS; + return webrtc::SR_SUCCESS; if (char* new_buffer = new char[size]) { if (buffer_ != nullptr && data_length_ > 0) { @@ -132,13 +135,13 @@ StreamResult MemoryStream::DoReserve(size_t size, int* error) { delete[] buffer_; buffer_ = new_buffer; buffer_length_ = size; - return SR_SUCCESS; + return webrtc::SR_SUCCESS; } if (error) { *error = ENOMEM; } - return SR_ERROR; + return webrtc::SR_ERROR; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/memory_stream.h b/rtc_base/memory_stream.h index 07e07f0694..c01d590286 100644 --- a/rtc_base/memory_stream.h +++ b/rtc_base/memory_stream.h @@ -13,9 +13,12 @@ #include +#include + +#include "api/array_view.h" #include "rtc_base/stream.h" -namespace rtc { +namespace webrtc { // MemoryStream dynamically resizes to accomodate written data. @@ -25,10 +28,10 @@ class MemoryStream final : public StreamInterface { ~MemoryStream() override; StreamState GetState() const override; - StreamResult Read(rtc::ArrayView buffer, + StreamResult Read(ArrayView buffer, size_t& bytes_read, int& error) override; - StreamResult Write(rtc::ArrayView buffer, + StreamResult Write(ArrayView buffer, size_t& bytes_written, int& error) override; void Close() override; @@ -54,6 +57,14 @@ class MemoryStream final : public StreamInterface { size_t seek_position_ = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::MemoryStream; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_MEMORY_STREAM_H_ diff --git a/rtc_base/memory_usage.cc b/rtc_base/memory_usage.cc index 2d33d98eed..0adb56ed09 100644 --- a/rtc_base/memory_usage.cc +++ b/rtc_base/memory_usage.cc @@ -10,6 +10,8 @@ #include "rtc_base/memory_usage.h" +#include + #if defined(WEBRTC_LINUX) #include @@ -29,7 +31,7 @@ #include "rtc_base/logging.h" -namespace rtc { +namespace webrtc { int64_t GetProcessResidentSizeBytes() { #if defined(WEBRTC_LINUX) @@ -82,4 +84,4 @@ int64_t GetProcessResidentSizeBytes() { #endif } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/memory_usage.h b/rtc_base/memory_usage.h index c49323c825..bd1b5ea49b 100644 --- a/rtc_base/memory_usage.h +++ b/rtc_base/memory_usage.h @@ -12,13 +12,21 @@ #include -namespace rtc { +namespace webrtc { // Returns current memory used by the process in bytes (working set size on // Windows and resident set size on other platforms). // Returns -1 on failure. int64_t GetProcessResidentSizeBytes(); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::GetProcessResidentSizeBytes; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_MEMORY_USAGE_H_ diff --git a/rtc_base/memory_usage_unittest.cc b/rtc_base/memory_usage_unittest.cc index a928efbb61..72ae0b3e53 100644 --- a/rtc_base/memory_usage_unittest.cc +++ b/rtc_base/memory_usage_unittest.cc @@ -10,13 +10,15 @@ #include "rtc_base/memory_usage.h" +#include + #include "test/gtest.h" -namespace rtc { +namespace webrtc { TEST(GetMemoryUsage, SimpleTest) { int64_t used_bytes = GetProcessResidentSizeBytes(); EXPECT_GE(used_bytes, 0); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/message_digest.cc b/rtc_base/message_digest.cc index 56abcd2c7b..d25ab403d5 100644 --- a/rtc_base/message_digest.cc +++ b/rtc_base/message_digest.cc @@ -14,12 +14,13 @@ #include #include +#include #include "absl/strings/string_view.h" #include "rtc_base/openssl_digest.h" #include "rtc_base/string_encode.h" -namespace rtc { +namespace webrtc { // From RFC 4572. const char DIGEST_MD5[] = "md5"; @@ -180,4 +181,4 @@ std::string ComputeHmac(absl::string_view alg, return output; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/message_digest.h b/rtc_base/message_digest.h index 632b9af075..22a86c7af4 100644 --- a/rtc_base/message_digest.h +++ b/rtc_base/message_digest.h @@ -17,7 +17,7 @@ #include "absl/strings/string_view.h" -namespace rtc { +namespace webrtc { // Definitions for the digest algorithms. extern const char DIGEST_MD5[]; @@ -128,6 +128,25 @@ bool ComputeHmac(absl::string_view alg, absl::string_view input, std::string* output); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::ComputeDigest; +using ::webrtc::ComputeHmac; +using ::webrtc::DIGEST_MD5; +using ::webrtc::DIGEST_SHA_1; +using ::webrtc::DIGEST_SHA_224; +using ::webrtc::DIGEST_SHA_256; +using ::webrtc::DIGEST_SHA_384; +using ::webrtc::DIGEST_SHA_512; +using ::webrtc::IsFips180DigestAlgorithm; +using ::webrtc::MD5; +using ::webrtc::MessageDigest; +using ::webrtc::MessageDigestFactory; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_MESSAGE_DIGEST_H_ diff --git a/rtc_base/message_digest_unittest.cc b/rtc_base/message_digest_unittest.cc index b296783d4e..78962607d2 100644 --- a/rtc_base/message_digest_unittest.cc +++ b/rtc_base/message_digest_unittest.cc @@ -10,11 +10,14 @@ #include "rtc_base/message_digest.h" +#include +#include + #include "absl/strings/string_view.h" #include "rtc_base/string_encode.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { // Test vectors from RFC 1321. TEST(MessageDigestTest, TestMd5Digest) { @@ -48,13 +51,13 @@ TEST(MessageDigestTest, TestSha1Digest) { "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq")); // Test the raw buffer versions of the APIs; also check output buffer size. - char output[20]; - EXPECT_EQ(sizeof(output), - ComputeDigest(DIGEST_SHA_1, "abc", 3, output, sizeof(output))); + char output[EVP_MAX_MD_SIZE]; + EXPECT_EQ(static_cast(SHA_DIGEST_LENGTH), + ComputeDigest(DIGEST_SHA_1, "abc", 3, output, SHA_DIGEST_LENGTH)); EXPECT_EQ("a9993e364706816aba3e25717850c26c9cd0d89d", - hex_encode(absl::string_view(output, sizeof(output)))); - EXPECT_EQ(0U, - ComputeDigest(DIGEST_SHA_1, "abc", 3, output, sizeof(output) - 1)); + hex_encode(absl::string_view(output, SHA_DIGEST_LENGTH))); + EXPECT_EQ( + 0U, ComputeDigest(DIGEST_SHA_1, "abc", 3, output, SHA_DIGEST_LENGTH - 1)); } // Test that we fail properly if a bad digest algorithm is specified. @@ -136,15 +139,57 @@ TEST(MessageDigestTest, TestSha1Hmac) { // Test the raw buffer versions of the APIs; also check output buffer size. std::string key(20, '\x0b'); std::string input("Hi There"); - char output[20]; - EXPECT_EQ(sizeof(output), + char output[EVP_MAX_MD_SIZE]; + EXPECT_EQ(static_cast(SHA_DIGEST_LENGTH), ComputeHmac(DIGEST_SHA_1, key.c_str(), key.size(), input.c_str(), - input.size(), output, sizeof(output))); + input.size(), output, SHA_DIGEST_LENGTH)); EXPECT_EQ("b617318655057264e28bc0b6fb378c8ef146be00", - hex_encode(absl::string_view(output, sizeof(output)))); + hex_encode(absl::string_view(output, SHA_DIGEST_LENGTH))); EXPECT_EQ(0U, ComputeHmac(DIGEST_SHA_1, key.c_str(), key.size(), input.c_str(), - input.size(), output, sizeof(output) - 1)); + input.size(), output, SHA_DIGEST_LENGTH - 1)); +} + +// Test vectors from RFC 4231. +// https://datatracker.ietf.org/doc/html/rfc4231#section-4.2 +TEST(MessageDigestTest, TestSha2Hmac) { + std::string key(20, '\x0b'); + std::string input("Hi There"); + char output[EVP_MAX_MD_SIZE]; + + EXPECT_EQ( + "896fb1128abbdf196832107cd49df33f" + "47b4b1169912ba4f53684b22", + ComputeHmac(DIGEST_SHA_224, key, input)); + EXPECT_EQ( + "b0344c61d8db38535ca8afceaf0bf12b" + "881dc200c9833da726e9376c2e32cff7", + ComputeHmac(DIGEST_SHA_256, key, input)); + + // Test the raw buffer versions of the APIs; also check output buffer size. + // SHA-224 + EXPECT_EQ(static_cast(SHA224_DIGEST_LENGTH), + ComputeHmac(DIGEST_SHA_224, key.c_str(), key.size(), input.c_str(), + input.size(), output, SHA224_DIGEST_LENGTH)); + EXPECT_EQ( + "896fb1128abbdf196832107cd49df33f" + "47b4b1169912ba4f53684b22", + hex_encode(absl::string_view(output, SHA224_DIGEST_LENGTH))); + EXPECT_EQ(0U, + ComputeHmac(DIGEST_SHA_224, key.c_str(), key.size(), input.c_str(), + input.size(), output, SHA224_DIGEST_LENGTH - 1)); + + // SHA-256 + EXPECT_EQ(static_cast(SHA256_DIGEST_LENGTH), + ComputeHmac(DIGEST_SHA_256, key.c_str(), key.size(), input.c_str(), + input.size(), output, SHA256_DIGEST_LENGTH)); + EXPECT_EQ( + "b0344c61d8db38535ca8afceaf0bf12b" + "881dc200c9833da726e9376c2e32cff7", + hex_encode(absl::string_view(output, SHA256_DIGEST_LENGTH))); + EXPECT_EQ(0U, + ComputeHmac(DIGEST_SHA_256, key.c_str(), key.size(), input.c_str(), + input.size(), output, SHA256_DIGEST_LENGTH - 1)); } TEST(MessageDigestTest, TestBadHmac) { @@ -153,4 +198,4 @@ TEST(MessageDigestTest, TestBadHmac) { EXPECT_EQ("", ComputeHmac("sha-9000", "key", "abc")); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/nat_unittest.cc b/rtc_base/nat_unittest.cc deleted file mode 100644 index 19e53543ba..0000000000 --- a/rtc_base/nat_unittest.cc +++ /dev/null @@ -1,408 +0,0 @@ -/* - * Copyright 2004 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include -#include -#include -#include - -#include "absl/memory/memory.h" -#include "rtc_base/async_packet_socket.h" -#include "rtc_base/async_tcp_socket.h" -#include "rtc_base/async_udp_socket.h" -#include "rtc_base/gunit.h" -#include "rtc_base/ip_address.h" -#include "rtc_base/logging.h" -#include "rtc_base/nat_server.h" -#include "rtc_base/nat_socket_factory.h" -#include "rtc_base/nat_types.h" -#include "rtc_base/net_helpers.h" -#include "rtc_base/net_test_helpers.h" -#include "rtc_base/network.h" -#include "rtc_base/physical_socket_server.h" -#include "rtc_base/socket.h" -#include "rtc_base/socket_address.h" -#include "rtc_base/socket_factory.h" -#include "rtc_base/socket_server.h" -#include "rtc_base/test_client.h" -#include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" -#include "rtc_base/virtual_socket_server.h" -#include "test/gtest.h" -#include "test/scoped_key_value_config.h" - -namespace rtc { -namespace { - -bool CheckReceive(TestClient* client, - bool should_receive, - const char* buf, - size_t size) { - return (should_receive) ? client->CheckNextPacket(buf, size, 0) - : client->CheckNoPacket(); -} - -TestClient* CreateTestClient(SocketFactory* factory, - const SocketAddress& local_addr) { - return new TestClient( - absl::WrapUnique(AsyncUDPSocket::Create(factory, local_addr))); -} - -TestClient* CreateTCPTestClient(Socket* socket) { - return new TestClient(std::make_unique(socket)); -} - -// Tests that when sending from internal_addr to external_addrs through the -// NAT type specified by nat_type, all external addrs receive the sent packet -// and, if exp_same is true, all use the same mapped-address on the NAT. -void TestSend(SocketServer* internal, - const SocketAddress& internal_addr, - SocketServer* external, - const SocketAddress external_addrs[4], - NATType nat_type, - bool exp_same) { - Thread th_int(internal); - Thread th_ext(external); - - SocketAddress server_addr = internal_addr; - server_addr.SetPort(0); // Auto-select a port - NATServer* nat = new NATServer(nat_type, internal, server_addr, server_addr, - external, external_addrs[0]); - NATSocketFactory* natsf = new NATSocketFactory( - internal, nat->internal_udp_address(), nat->internal_tcp_address()); - - TestClient* in = CreateTestClient(natsf, internal_addr); - TestClient* out[4]; - for (int i = 0; i < 4; i++) - out[i] = CreateTestClient(external, external_addrs[i]); - - th_int.Start(); - th_ext.Start(); - - const char* buf = "filter_test"; - size_t len = strlen(buf); - - in->SendTo(buf, len, out[0]->address()); - SocketAddress trans_addr; - EXPECT_TRUE(out[0]->CheckNextPacket(buf, len, &trans_addr)); - - for (int i = 1; i < 4; i++) { - in->SendTo(buf, len, out[i]->address()); - SocketAddress trans_addr2; - EXPECT_TRUE(out[i]->CheckNextPacket(buf, len, &trans_addr2)); - bool are_same = (trans_addr == trans_addr2); - ASSERT_EQ(are_same, exp_same) << "same translated address"; - ASSERT_NE(AF_UNSPEC, trans_addr.family()); - ASSERT_NE(AF_UNSPEC, trans_addr2.family()); - } - - th_int.Stop(); - th_ext.Stop(); - - delete nat; - delete natsf; - delete in; - for (int i = 0; i < 4; i++) - delete out[i]; -} - -// Tests that when sending from external_addrs to internal_addr, the packet -// is delivered according to the specified filter_ip and filter_port rules. -void TestRecv(SocketServer* internal, - const SocketAddress& internal_addr, - SocketServer* external, - const SocketAddress external_addrs[4], - NATType nat_type, - bool filter_ip, - bool filter_port) { - Thread th_int(internal); - Thread th_ext(external); - - SocketAddress server_addr = internal_addr; - server_addr.SetPort(0); // Auto-select a port - NATServer* nat = new NATServer(nat_type, internal, server_addr, server_addr, - external, external_addrs[0]); - NATSocketFactory* natsf = new NATSocketFactory( - internal, nat->internal_udp_address(), nat->internal_tcp_address()); - - TestClient* in = CreateTestClient(natsf, internal_addr); - TestClient* out[4]; - for (int i = 0; i < 4; i++) - out[i] = CreateTestClient(external, external_addrs[i]); - - th_int.Start(); - th_ext.Start(); - - const char* buf = "filter_test"; - size_t len = strlen(buf); - - in->SendTo(buf, len, out[0]->address()); - SocketAddress trans_addr; - EXPECT_TRUE(out[0]->CheckNextPacket(buf, len, &trans_addr)); - - out[1]->SendTo(buf, len, trans_addr); - EXPECT_TRUE(CheckReceive(in, !filter_ip, buf, len)); - - out[2]->SendTo(buf, len, trans_addr); - EXPECT_TRUE(CheckReceive(in, !filter_port, buf, len)); - - out[3]->SendTo(buf, len, trans_addr); - EXPECT_TRUE(CheckReceive(in, !filter_ip && !filter_port, buf, len)); - - th_int.Stop(); - th_ext.Stop(); - - delete nat; - delete natsf; - delete in; - for (int i = 0; i < 4; i++) - delete out[i]; -} - -// Tests that NATServer allocates bindings properly. -void TestBindings(SocketServer* internal, - const SocketAddress& internal_addr, - SocketServer* external, - const SocketAddress external_addrs[4]) { - TestSend(internal, internal_addr, external, external_addrs, NAT_OPEN_CONE, - true); - TestSend(internal, internal_addr, external, external_addrs, - NAT_ADDR_RESTRICTED, true); - TestSend(internal, internal_addr, external, external_addrs, - NAT_PORT_RESTRICTED, true); - TestSend(internal, internal_addr, external, external_addrs, NAT_SYMMETRIC, - false); -} - -// Tests that NATServer filters packets properly. -void TestFilters(SocketServer* internal, - const SocketAddress& internal_addr, - SocketServer* external, - const SocketAddress external_addrs[4]) { - TestRecv(internal, internal_addr, external, external_addrs, NAT_OPEN_CONE, - false, false); - TestRecv(internal, internal_addr, external, external_addrs, - NAT_ADDR_RESTRICTED, true, false); - TestRecv(internal, internal_addr, external, external_addrs, - NAT_PORT_RESTRICTED, true, true); - TestRecv(internal, internal_addr, external, external_addrs, NAT_SYMMETRIC, - true, true); -} - -bool TestConnectivity(const SocketAddress& src, const IPAddress& dst) { - // The physical NAT tests require connectivity to the selected ip from the - // internal address used for the NAT. Things like firewalls can break that, so - // check to see if it's worth even trying with this ip. - std::unique_ptr pss(new PhysicalSocketServer()); - std::unique_ptr client(pss->CreateSocket(src.family(), SOCK_DGRAM)); - std::unique_ptr server(pss->CreateSocket(src.family(), SOCK_DGRAM)); - if (client->Bind(SocketAddress(src.ipaddr(), 0)) != 0 || - server->Bind(SocketAddress(dst, 0)) != 0) { - return false; - } - const char* buf = "hello other socket"; - size_t len = strlen(buf); - int sent = client->SendTo(buf, len, server->GetLocalAddress()); - SocketAddress addr; - const size_t kRecvBufSize = 64; - char recvbuf[kRecvBufSize]; - Thread::Current()->SleepMs(100); - int received = server->RecvFrom(recvbuf, kRecvBufSize, &addr, nullptr); - return received == sent && ::memcmp(buf, recvbuf, len) == 0; -} - -void TestPhysicalInternal(const SocketAddress& int_addr) { - webrtc::test::ScopedKeyValueConfig field_trials; - rtc::AutoThread main_thread; - PhysicalSocketServer socket_server; - BasicNetworkManager network_manager(nullptr, &socket_server, &field_trials); - network_manager.StartUpdating(); - // Process pending messages so the network list is updated. - Thread::Current()->ProcessMessages(0); - - std::vector networks = network_manager.GetNetworks(); - networks.erase(std::remove_if(networks.begin(), networks.end(), - [](const rtc::Network* network) { - return rtc::kDefaultNetworkIgnoreMask & - network->type(); - }), - networks.end()); - if (networks.empty()) { - RTC_LOG(LS_WARNING) << "Not enough network adapters for test."; - return; - } - - SocketAddress ext_addr1(int_addr); - SocketAddress ext_addr2; - // Find an available IP with matching family. The test breaks if int_addr - // can't talk to ip, so check for connectivity as well. - for (const Network* const network : networks) { - const IPAddress& ip = network->GetBestIP(); - if (ip.family() == int_addr.family() && TestConnectivity(int_addr, ip)) { - ext_addr2.SetIP(ip); - break; - } - } - if (ext_addr2.IsNil()) { - RTC_LOG(LS_WARNING) << "No available IP of same family as " - << int_addr.ToString(); - return; - } - - RTC_LOG(LS_INFO) << "selected ip " << ext_addr2.ipaddr().ToString(); - - SocketAddress ext_addrs[4] = { - SocketAddress(ext_addr1), SocketAddress(ext_addr2), - SocketAddress(ext_addr1), SocketAddress(ext_addr2)}; - - std::unique_ptr int_pss(new PhysicalSocketServer()); - std::unique_ptr ext_pss(new PhysicalSocketServer()); - - TestBindings(int_pss.get(), int_addr, ext_pss.get(), ext_addrs); - TestFilters(int_pss.get(), int_addr, ext_pss.get(), ext_addrs); -} - -TEST(NatTest, TestPhysicalIPv4) { - TestPhysicalInternal(SocketAddress("127.0.0.1", 0)); -} - -TEST(NatTest, TestPhysicalIPv6) { - if (HasIPv6Enabled()) { - TestPhysicalInternal(SocketAddress("::1", 0)); - } else { - RTC_LOG(LS_WARNING) << "No IPv6, skipping"; - } -} - -namespace { - -class TestVirtualSocketServer : public VirtualSocketServer { - public: - // Expose this publicly - IPAddress GetNextIP(int af) { return VirtualSocketServer::GetNextIP(af); } -}; - -} // namespace - -void TestVirtualInternal(int family) { - rtc::AutoThread main_thread; - std::unique_ptr int_vss( - new TestVirtualSocketServer()); - std::unique_ptr ext_vss( - new TestVirtualSocketServer()); - - SocketAddress int_addr; - SocketAddress ext_addrs[4]; - int_addr.SetIP(int_vss->GetNextIP(family)); - ext_addrs[0].SetIP(ext_vss->GetNextIP(int_addr.family())); - ext_addrs[1].SetIP(ext_vss->GetNextIP(int_addr.family())); - ext_addrs[2].SetIP(ext_addrs[0].ipaddr()); - ext_addrs[3].SetIP(ext_addrs[1].ipaddr()); - - TestBindings(int_vss.get(), int_addr, ext_vss.get(), ext_addrs); - TestFilters(int_vss.get(), int_addr, ext_vss.get(), ext_addrs); -} - -TEST(NatTest, TestVirtualIPv4) { - TestVirtualInternal(AF_INET); -} - -TEST(NatTest, TestVirtualIPv6) { - if (HasIPv6Enabled()) { - TestVirtualInternal(AF_INET6); - } else { - RTC_LOG(LS_WARNING) << "No IPv6, skipping"; - } -} - -class NatTcpTest : public ::testing::Test, public sigslot::has_slots<> { - public: - NatTcpTest() - : int_addr_("192.168.0.1", 0), - ext_addr_("10.0.0.1", 0), - connected_(false), - int_vss_(new TestVirtualSocketServer()), - ext_vss_(new TestVirtualSocketServer()), - int_thread_(new Thread(int_vss_.get())), - ext_thread_(new Thread(ext_vss_.get())), - nat_(new NATServer(NAT_OPEN_CONE, - int_vss_.get(), - int_addr_, - int_addr_, - ext_vss_.get(), - ext_addr_)), - natsf_(new NATSocketFactory(int_vss_.get(), - nat_->internal_udp_address(), - nat_->internal_tcp_address())) { - int_thread_->Start(); - ext_thread_->Start(); - } - - void OnConnectEvent(Socket* socket) { connected_ = true; } - - void OnAcceptEvent(Socket* socket) { - accepted_.reset(server_->Accept(nullptr)); - } - - void OnCloseEvent(Socket* socket, int error) {} - - void ConnectEvents() { - server_->SignalReadEvent.connect(this, &NatTcpTest::OnAcceptEvent); - client_->SignalConnectEvent.connect(this, &NatTcpTest::OnConnectEvent); - } - - SocketAddress int_addr_; - SocketAddress ext_addr_; - bool connected_; - std::unique_ptr int_vss_; - std::unique_ptr ext_vss_; - std::unique_ptr int_thread_; - std::unique_ptr ext_thread_; - std::unique_ptr nat_; - std::unique_ptr natsf_; - std::unique_ptr client_; - std::unique_ptr server_; - std::unique_ptr accepted_; -}; - -TEST_F(NatTcpTest, DISABLED_TestConnectOut) { - server_.reset(ext_vss_->CreateSocket(AF_INET, SOCK_STREAM)); - server_->Bind(ext_addr_); - server_->Listen(5); - - client_.reset(natsf_->CreateSocket(AF_INET, SOCK_STREAM)); - EXPECT_GE(0, client_->Bind(int_addr_)); - EXPECT_GE(0, client_->Connect(server_->GetLocalAddress())); - - ConnectEvents(); - - EXPECT_TRUE_WAIT(connected_, 1000); - EXPECT_EQ(client_->GetRemoteAddress(), server_->GetLocalAddress()); - EXPECT_EQ(accepted_->GetRemoteAddress().ipaddr(), ext_addr_.ipaddr()); - - std::unique_ptr in(CreateTCPTestClient(client_.release())); - std::unique_ptr out( - CreateTCPTestClient(accepted_.release())); - - const char* buf = "test_packet"; - size_t len = strlen(buf); - - in->Send(buf, len); - SocketAddress trans_addr; - EXPECT_TRUE(out->CheckNextPacket(buf, len, &trans_addr)); - - out->Send(buf, len); - EXPECT_TRUE(in->CheckNextPacket(buf, len, &trans_addr)); -} - -} // namespace -} // namespace rtc diff --git a/rtc_base/net_helper.cc b/rtc_base/net_helper.cc index 4afee7bfb0..eca36e68ca 100644 --- a/rtc_base/net_helper.cc +++ b/rtc_base/net_helper.cc @@ -12,7 +12,7 @@ #include "absl/strings/string_view.h" -namespace cricket { +namespace webrtc { const char UDP_PROTOCOL_NAME[] = "udp"; const char TCP_PROTOCOL_NAME[] = "tcp"; @@ -30,4 +30,4 @@ int GetProtocolOverhead(absl::string_view protocol) { } } -} // namespace cricket +} // namespace webrtc diff --git a/rtc_base/net_helper.h b/rtc_base/net_helper.h index 2dce91196c..95d22603ef 100644 --- a/rtc_base/net_helper.h +++ b/rtc_base/net_helper.h @@ -10,14 +10,13 @@ #ifndef RTC_BASE_NET_HELPER_H_ #define RTC_BASE_NET_HELPER_H_ -#include #include "absl/strings/string_view.h" #include "rtc_base/system/rtc_export.h" // This header contains helper functions and constants used by different types // of transports. -namespace cricket { +namespace webrtc { RTC_EXPORT extern const char UDP_PROTOCOL_NAME[]; RTC_EXPORT extern const char TCP_PROTOCOL_NAME[]; @@ -30,6 +29,20 @@ constexpr int kUdpHeaderSize = 8; // Get the transport layer overhead per packet based on the protocol. int GetProtocolOverhead(absl::string_view protocol); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::GetProtocolOverhead; +using ::webrtc::kTcpHeaderSize; +using ::webrtc::kUdpHeaderSize; +using ::webrtc::SSLTCP_PROTOCOL_NAME; +using ::webrtc::TCP_PROTOCOL_NAME; +using ::webrtc::TLS_PROTOCOL_NAME; +using ::webrtc::UDP_PROTOCOL_NAME; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NET_HELPER_H_ diff --git a/rtc_base/net_helpers.cc b/rtc_base/net_helpers.cc index 00cd434a58..51db569a88 100644 --- a/rtc_base/net_helpers.cc +++ b/rtc_base/net_helpers.cc @@ -10,7 +10,6 @@ #include "rtc_base/net_helpers.h" -#include #include #include "absl/strings/string_view.h" @@ -24,7 +23,7 @@ #include #endif // defined(WEBRTC_POSIX) && !defined(__native_client__) -namespace rtc { +namespace webrtc { const char* inet_ntop(int af, const void* src, char* dst, socklen_t size) { #if defined(WEBRTC_WIN) @@ -42,4 +41,4 @@ int inet_pton(int af, absl::string_view src, void* dst) { return ::inet_pton(af, src_str.c_str(), dst); #endif } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/net_helpers.h b/rtc_base/net_helpers.h index a529f4f03f..d27c263e93 100644 --- a/rtc_base/net_helpers.h +++ b/rtc_base/net_helpers.h @@ -12,7 +12,7 @@ #define RTC_BASE_NET_HELPERS_H_ #if defined(WEBRTC_POSIX) -#include +#include // IWYU pragma: export #elif WEBRTC_WIN #include // NOLINT @@ -20,15 +20,23 @@ #endif #include "absl/strings/string_view.h" -#include "rtc_base/system/rtc_export.h" -namespace rtc { +namespace webrtc { // rtc namespaced wrappers for inet_ntop and inet_pton so we can avoid // the windows-native versions of these. const char* inet_ntop(int af, const void* src, char* dst, socklen_t size); int inet_pton(int af, absl::string_view src, void* dst); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::inet_ntop; +using ::webrtc::inet_pton; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NET_HELPERS_H_ diff --git a/rtc_base/net_test_helpers.cc b/rtc_base/net_test_helpers.cc index 806d7dee60..f21a0ebb04 100644 --- a/rtc_base/net_test_helpers.cc +++ b/rtc_base/net_test_helpers.cc @@ -10,8 +10,7 @@ #include "rtc_base/net_test_helpers.h" -#include -#include +#include "rtc_base/net_helpers.h" #if defined(WEBRTC_WIN) #include @@ -20,7 +19,6 @@ #include "rtc_base/win/windows_version.h" #endif #if defined(WEBRTC_POSIX) && !defined(__native_client__) -#include #if defined(WEBRTC_ANDROID) #include "rtc_base/ifaddrs_android.h" #else @@ -28,7 +26,7 @@ #endif #endif // defined(WEBRTC_POSIX) && !defined(__native_client__) -namespace rtc { +namespace webrtc { bool HasIPv4Enabled() { #if defined(WEBRTC_POSIX) && !defined(__native_client__) @@ -55,10 +53,10 @@ bool HasIPv6Enabled() { // WinUWP always has IPv6 capability. return true; #elif defined(WEBRTC_WIN) - if (rtc::rtc_win::GetVersion() >= rtc::rtc_win::Version::VERSION_VISTA) { + if (rtc_win::GetVersion() >= rtc_win::Version::VERSION_VISTA) { return true; } - if (rtc::rtc_win::GetVersion() < rtc::rtc_win::Version::VERSION_XP) { + if (rtc_win::GetVersion() < rtc_win::Version::VERSION_XP) { return false; } DWORD protbuff_size = 4096; @@ -108,4 +106,4 @@ bool HasIPv6Enabled() { return true; #endif } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/net_test_helpers.h b/rtc_base/net_test_helpers.h index 2e8320c4d8..3fb2a15ceb 100644 --- a/rtc_base/net_test_helpers.h +++ b/rtc_base/net_test_helpers.h @@ -13,11 +13,18 @@ #include "rtc_base/system/rtc_export.h" -namespace rtc { +namespace webrtc { RTC_EXPORT bool HasIPv4Enabled(); RTC_EXPORT bool HasIPv6Enabled(); -} // namespace rtc +} // namespace webrtc + +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using webrtc::HasIPv4Enabled; +using webrtc::HasIPv6Enabled; +} +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NET_TEST_HELPERS_H_ diff --git a/rtc_base/network.cc b/rtc_base/network.cc index 4be6eed759..19741366f8 100644 --- a/rtc_base/network.cc +++ b/rtc_base/network.cc @@ -10,43 +10,61 @@ #include "rtc_base/network.h" -#include "absl/strings/string_view.h" -#include "rtc_base/experiments/field_trial_parser.h" - -#if defined(WEBRTC_POSIX) -#include -#endif // WEBRTC_POSIX - -#if defined(WEBRTC_WIN) -#include - -#include "rtc_base/win32.h" -#elif !defined(__native_client__) -#include "rtc_base/ifaddrs_converter.h" -#endif - +#include +#include +#include +#include +#include #include +#include +#include +#include #include "absl/algorithm/container.h" -#include "absl/memory/memory.h" +#include "absl/base/nullability.h" #include "absl/strings/match.h" #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" -#include "api/transport/field_trial_based_config.h" #include "api/units/time_delta.h" #include "rtc_base/checks.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" -#include "rtc_base/memory/always_valid_pointer.h" +#include "rtc_base/mdns_responder_interface.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/network_constants.h" #include "rtc_base/network_monitor.h" +#include "rtc_base/network_monitor_factory.h" #include "rtc_base/socket.h" // includes something that makes windows happy -#include "rtc_base/string_encode.h" -#include "rtc_base/string_utils.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/socket_factory.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/thread.h" -namespace rtc { +// IWYU pragma: begin_keep +#if defined(WEBRTC_POSIX) +#include +#endif // WEBRTC_POSIX + +#if defined(WEBRTC_WIN) +#include + +#include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/string_utils.h" +#include "rtc_base/win32.h" +#elif !defined(__native_client__) +#include "rtc_base/ifaddrs_converter.h" +#endif +// IWYU pragma: end_keep + +namespace webrtc { namespace { +using ::webrtc::Environment; using ::webrtc::SafeTask; +using ::webrtc::SocketFactory; using ::webrtc::TimeDelta; // List of MAC addresses of known VPN (for windows). @@ -83,8 +101,9 @@ bool SortNetworks(const Network* a, const Network* b) { // After type, networks are sorted by IP address precedence values // from RFC 3484-bis - if (IPAddressPrecedence(ip_a) != IPAddressPrecedence(ip_b)) { - return IPAddressPrecedence(ip_a) > IPAddressPrecedence(ip_b); + if (webrtc::IPAddressPrecedence(ip_a) != webrtc::IPAddressPrecedence(ip_b)) { + return webrtc::IPAddressPrecedence(ip_a) > + webrtc::IPAddressPrecedence(ip_b); } // TODO(mallinath) - Add VPN and Link speed conditions while sorting. @@ -99,32 +118,37 @@ uint16_t ComputeNetworkCostByType(int type, bool add_network_cost_to_vpn) { // TODO(jonaso) : Rollout support for cellular network cost using A/B // experiment to make sure it does not introduce regressions. - int vpnCost = (is_vpn && add_network_cost_to_vpn) ? kNetworkCostVpn : 0; + int vpnCost = + (is_vpn && add_network_cost_to_vpn) ? webrtc::kNetworkCostVpn : 0; switch (type) { - case rtc::ADAPTER_TYPE_ETHERNET: - case rtc::ADAPTER_TYPE_LOOPBACK: - return kNetworkCostMin + vpnCost; - case rtc::ADAPTER_TYPE_WIFI: - return kNetworkCostLow + vpnCost; - case rtc::ADAPTER_TYPE_CELLULAR: - return kNetworkCostCellular + vpnCost; - case rtc::ADAPTER_TYPE_CELLULAR_2G: - return (use_differentiated_cellular_costs ? kNetworkCostCellular2G - : kNetworkCostCellular) + + case webrtc::ADAPTER_TYPE_ETHERNET: + case webrtc::ADAPTER_TYPE_LOOPBACK: + return webrtc::kNetworkCostMin + vpnCost; + case webrtc::ADAPTER_TYPE_WIFI: + return webrtc::kNetworkCostLow + vpnCost; + case webrtc::ADAPTER_TYPE_CELLULAR: + return webrtc::kNetworkCostCellular + vpnCost; + case webrtc::ADAPTER_TYPE_CELLULAR_2G: + return (use_differentiated_cellular_costs + ? webrtc::kNetworkCostCellular2G + : webrtc::kNetworkCostCellular) + vpnCost; - case rtc::ADAPTER_TYPE_CELLULAR_3G: - return (use_differentiated_cellular_costs ? kNetworkCostCellular3G - : kNetworkCostCellular) + + case webrtc::ADAPTER_TYPE_CELLULAR_3G: + return (use_differentiated_cellular_costs + ? webrtc::kNetworkCostCellular3G + : webrtc::kNetworkCostCellular) + vpnCost; - case rtc::ADAPTER_TYPE_CELLULAR_4G: - return (use_differentiated_cellular_costs ? kNetworkCostCellular4G - : kNetworkCostCellular) + + case webrtc::ADAPTER_TYPE_CELLULAR_4G: + return (use_differentiated_cellular_costs + ? webrtc::kNetworkCostCellular4G + : webrtc::kNetworkCostCellular) + vpnCost; - case rtc::ADAPTER_TYPE_CELLULAR_5G: - return (use_differentiated_cellular_costs ? kNetworkCostCellular5G - : kNetworkCostCellular) + + case webrtc::ADAPTER_TYPE_CELLULAR_5G: + return (use_differentiated_cellular_costs + ? webrtc::kNetworkCostCellular5G + : webrtc::kNetworkCostCellular) + vpnCost; - case rtc::ADAPTER_TYPE_ANY: + case webrtc::ADAPTER_TYPE_ANY: // Candidates gathered from the any-address/wildcard ports, as backups, // are given the maximum cost so that if there are other candidates with // known interface types, we would not select candidate pairs using these @@ -134,13 +158,13 @@ uint16_t ComputeNetworkCostByType(int type, // ADAPTER_TYPE_CELLULAR would then have a higher cost. See // P2PTransportChannel::SortConnectionsAndUpdateState for how we rank and // select candidate pairs, where the network cost is among the criteria. - return kNetworkCostMax + vpnCost; - case rtc::ADAPTER_TYPE_VPN: + return webrtc::kNetworkCostMax + vpnCost; + case webrtc::ADAPTER_TYPE_VPN: // The cost of a VPN should be computed using its underlying network type. RTC_DCHECK_NOTREACHED(); - return kNetworkCostUnknown; + return webrtc::kNetworkCostUnknown; default: - return kNetworkCostUnknown + vpnCost; + return webrtc::kNetworkCostUnknown + vpnCost; } } @@ -153,19 +177,19 @@ bool IsIgnoredIPv6(bool allow_mac_based_ipv6, const InterfaceAddress& ip) { // Link-local addresses require scope id to be bound successfully. // However, our IPAddress structure doesn't carry that so the // information is lost and causes binding failure. - if (IPIsLinkLocal(ip)) { + if (webrtc::IPIsLinkLocal(ip)) { RTC_LOG(LS_VERBOSE) << "Ignore link local IP:" << ip.ToSensitiveString(); return true; } // Any MAC based IPv6 should be avoided to prevent the MAC tracking. - if (IPIsMacBased(ip) && !allow_mac_based_ipv6) { + if (webrtc::IPIsMacBased(ip) && !allow_mac_based_ipv6) { RTC_LOG(LS_INFO) << "Ignore Mac based IP:" << ip.ToSensitiveString(); return true; } // Ignore deprecated IPv6. - if (ip.ipv6_flags() & IPV6_ADDRESS_FLAG_DEPRECATED) { + if (ip.ipv6_flags() & webrtc::IPV6_ADDRESS_FLAG_DEPRECATED) { RTC_LOG(LS_INFO) << "Ignore deprecated IP:" << ip.ToSensitiveString(); return true; } @@ -177,8 +201,8 @@ bool IsIgnoredIPv6(bool allow_mac_based_ipv6, const InterfaceAddress& ip) { // Note: consider changing to const Network* as arguments // if/when considering other changes that should not trigger // OnNetworksChanged. -bool ShouldAdapterChangeTriggerNetworkChange(rtc::AdapterType old_type, - rtc::AdapterType new_type) { +bool ShouldAdapterChangeTriggerNetworkChange(AdapterType old_type, + AdapterType new_type) { // skip triggering OnNetworksChanged if // changing from one cellular to another. if (Network::IsCellular(old_type) && Network::IsCellular(new_type)) @@ -226,7 +250,7 @@ bool CompareNetworks(const std::unique_ptr& a, std::string MakeNetworkKey(absl::string_view name, const IPAddress& prefix, int prefix_length) { - rtc::StringBuilder ost; + StringBuilder ost; ost << name << "%" << prefix.ToString() << "/" << prefix_length; return ost.Release(); } @@ -251,23 +275,23 @@ AdapterType GetAdapterTypeFromName(absl::string_view network_name) { // Note that we have a more robust way to determine if a network interface // is a loopback interface by checking the flag IFF_LOOPBACK in ifa_flags of // an ifaddr struct. See ConvertIfAddrs in this file. - return ADAPTER_TYPE_LOOPBACK; + return webrtc::ADAPTER_TYPE_LOOPBACK; } if (MatchTypeNameWithIndexPattern(network_name, "eth")) { - return ADAPTER_TYPE_ETHERNET; + return webrtc::ADAPTER_TYPE_ETHERNET; } if (MatchTypeNameWithIndexPattern(network_name, "wlan") || MatchTypeNameWithIndexPattern(network_name, "v4-wlan")) { - return ADAPTER_TYPE_WIFI; + return webrtc::ADAPTER_TYPE_WIFI; } if (MatchTypeNameWithIndexPattern(network_name, "ipsec") || MatchTypeNameWithIndexPattern(network_name, "tun") || MatchTypeNameWithIndexPattern(network_name, "utun") || MatchTypeNameWithIndexPattern(network_name, "tap")) { - return ADAPTER_TYPE_VPN; + return webrtc::ADAPTER_TYPE_VPN; } #if defined(WEBRTC_IOS) // Cell networks are pdp_ipN on iOS. @@ -292,7 +316,7 @@ AdapterType GetAdapterTypeFromName(absl::string_view network_name) { } #endif - return ADAPTER_TYPE_UNKNOWN; + return webrtc::ADAPTER_TYPE_UNKNOWN; } NetworkManager::EnumerationPermission NetworkManager::enumeration_permission() @@ -300,22 +324,17 @@ NetworkManager::EnumerationPermission NetworkManager::enumeration_permission() return ENUMERATION_ALLOWED; } -bool NetworkManager::GetDefaultLocalAddress(int family, IPAddress* addr) const { +bool NetworkManager::GetDefaultLocalAddress(int /* family */, + IPAddress* /* addr */) const { return false; } -webrtc::MdnsResponderInterface* NetworkManager::GetMdnsResponder() const { +MdnsResponderInterface* NetworkManager::GetMdnsResponder() const { return nullptr; } -NetworkManagerBase::NetworkManagerBase( - const webrtc::FieldTrialsView* field_trials) - : field_trials_(field_trials), - enumeration_permission_(NetworkManager::ENUMERATION_ALLOWED), - signal_network_preference_change_( - field_trials - ? field_trials->IsEnabled("WebRTC-SignalNetworkPreferenceChange") - : false) {} +NetworkManagerBase::NetworkManagerBase() + : enumeration_permission_(NetworkManager::ENUMERATION_ALLOWED) {} NetworkManager::EnumerationPermission NetworkManagerBase::enumeration_permission() const { @@ -335,9 +354,9 @@ std::unique_ptr NetworkManagerBase::CreateNetwork( std::vector NetworkManagerBase::GetAnyAddressNetworks() { std::vector networks; if (!ipv4_any_address_network_) { - const rtc::IPAddress ipv4_any_address(INADDR_ANY); - ipv4_any_address_network_ = - CreateNetwork("any", "any", ipv4_any_address, 0, ADAPTER_TYPE_ANY); + const IPAddress ipv4_any_address(INADDR_ANY); + ipv4_any_address_network_ = CreateNetwork("any", "any", ipv4_any_address, 0, + webrtc::ADAPTER_TYPE_ANY); ipv4_any_address_network_->set_default_local_address_provider(this); ipv4_any_address_network_->set_mdns_responder_provider(this); ipv4_any_address_network_->AddIP(ipv4_any_address); @@ -345,9 +364,9 @@ std::vector NetworkManagerBase::GetAnyAddressNetworks() { networks.push_back(ipv4_any_address_network_.get()); if (!ipv6_any_address_network_) { - const rtc::IPAddress ipv6_any_address(in6addr_any); - ipv6_any_address_network_ = - CreateNetwork("any", "any", ipv6_any_address, 0, ADAPTER_TYPE_ANY); + const IPAddress ipv6_any_address(in6addr_any); + ipv6_any_address_network_ = CreateNetwork("any", "any", ipv6_any_address, 0, + webrtc::ADAPTER_TYPE_ANY); ipv6_any_address_network_->set_default_local_address_provider(this); ipv6_any_address_network_->set_mdns_responder_provider(this); ipv6_any_address_network_->AddIP(ipv6_any_address); @@ -377,7 +396,7 @@ void NetworkManagerBase::MergeNetworkList( // AddressList in this map will track IP addresses for all Networks // with the same key. std::map consolidated_address_list; - absl::c_sort(new_networks, rtc::webrtc_network_internal::CompareNetworks); + absl::c_sort(new_networks, webrtc_network_internal::CompareNetworks); // First, build a set of network-keys to the ipaddresses. for (auto& network : new_networks) { bool might_add_to_merged_list = false; @@ -427,7 +446,7 @@ void NetworkManagerBase::MergeNetworkList( Network* existing_net = existing->second.get(); *changed = existing_net->SetIPs(kv.second.ips, *changed); merged_list.push_back(existing_net); - if (net->type() != ADAPTER_TYPE_UNKNOWN && + if (net->type() != webrtc::ADAPTER_TYPE_UNKNOWN && net->type() != existing_net->type()) { if (ShouldAdapterChangeTriggerNetworkChange(existing_net->type(), net->type())) { @@ -441,9 +460,6 @@ void NetworkManagerBase::MergeNetworkList( } if (net->network_preference() != existing_net->network_preference()) { existing_net->set_network_preference(net->network_preference()); - if (signal_network_preference_change_) { - *changed = true; - } } RTC_DCHECK(net->active()); } @@ -517,12 +533,11 @@ bool NetworkManagerBase::GetDefaultLocalAddress(int family, return false; } -Network* NetworkManagerBase::GetNetworkFromAddress( - const rtc::IPAddress& ip) const { +Network* NetworkManagerBase::GetNetworkFromAddress(const IPAddress& ip) const { for (Network* network : networks_) { const auto& ips = network->GetIPs(); if (absl::c_any_of(ips, [&](const InterfaceAddress& existing_ip) { - return ip == static_cast(existing_ip); + return ip == static_cast(existing_ip); })) { return network; } @@ -530,8 +545,7 @@ Network* NetworkManagerBase::GetNetworkFromAddress( return nullptr; } -bool NetworkManagerBase::IsVpnMacAddress( - rtc::ArrayView address) { +bool NetworkManagerBase::IsVpnMacAddress(ArrayView address) { if (address.data() == nullptr && address.size() == 0) { return false; } @@ -545,17 +559,16 @@ bool NetworkManagerBase::IsVpnMacAddress( } BasicNetworkManager::BasicNetworkManager( - NetworkMonitorFactory* network_monitor_factory, - SocketFactory* socket_factory, - const webrtc::FieldTrialsView* field_trials_view) - : NetworkManagerBase(field_trials_view), - field_trials_(field_trials_view), + const Environment& env, + SocketFactory* absl_nonnull socket_factory, + NetworkMonitorFactory* absl_nullable network_monitor_factory) + : env_(env), network_monitor_factory_(network_monitor_factory), socket_factory_(socket_factory), allow_mac_based_ipv6_( - field_trials()->IsEnabled("WebRTC-AllowMACBasedIPv6")), + env_.field_trials().IsEnabled("WebRTC-AllowMACBasedIPv6")), bind_using_ifname_( - !field_trials()->IsDisabled("WebRTC-BindUsingInterfaceName")) { + !env_.field_trials().IsDisabled("WebRTC-BindUsingInterfaceName")) { RTC_DCHECK(socket_factory_); } @@ -586,8 +599,8 @@ NetworkMonitorInterface::InterfaceInfo BasicNetworkManager::GetInterfaceInfo( struct ifaddrs* cursor) const { if (cursor->ifa_flags & IFF_LOOPBACK) { return { - .adapter_type = ADAPTER_TYPE_LOOPBACK, - .underlying_type_for_vpn = ADAPTER_TYPE_UNKNOWN, + .adapter_type = webrtc::ADAPTER_TYPE_LOOPBACK, + .underlying_type_for_vpn = webrtc::ADAPTER_TYPE_UNKNOWN, .network_preference = NetworkPreference::NEUTRAL, .available = true, }; @@ -595,7 +608,7 @@ NetworkMonitorInterface::InterfaceInfo BasicNetworkManager::GetInterfaceInfo( return network_monitor_->GetInterfaceInfo(cursor->ifa_name); } else { return {.adapter_type = GetAdapterTypeFromName(cursor->ifa_name), - .underlying_type_for_vpn = ADAPTER_TYPE_UNKNOWN, + .underlying_type_for_vpn = webrtc::ADAPTER_TYPE_UNKNOWN, .network_preference = NetworkPreference::NEUTRAL, .available = true}; } @@ -625,7 +638,7 @@ void BasicNetworkManager::ConvertIfAddrs( continue; } // Convert to InterfaceAddress. - // TODO(webrtc:13114): Convert ConvertIfAddrs to use rtc::Netmask. + // TODO(webrtc:13114): Convert ConvertIfAddrs to use webrtc::Netmask. if (!ifaddrs_converter->ConvertIfAddrsToIPAddress(cursor, &ip, &mask)) { continue; } @@ -645,8 +658,8 @@ void BasicNetworkManager::ConvertIfAddrs( reinterpret_cast(cursor->ifa_addr)->sin6_scope_id; } - int prefix_length = CountIPMaskBits(mask); - prefix = TruncateIP(ip, prefix_length); + int prefix_length = webrtc::CountIPMaskBits(mask); + prefix = webrtc::TruncateIP(ip, prefix_length); std::string key = MakeNetworkKey(std::string(cursor->ifa_name), prefix, prefix_length); @@ -681,10 +694,10 @@ void BasicNetworkManager::ConvertIfAddrs( NetworkMonitorInterface::InterfaceInfo if_info = GetInterfaceInfo(cursor); // Check manually configured VPN override. - if (if_info.adapter_type != ADAPTER_TYPE_VPN && + if (if_info.adapter_type != webrtc::ADAPTER_TYPE_VPN && IsConfiguredVpn(prefix, prefix_length)) { if_info.underlying_type_for_vpn = if_info.adapter_type; - if_info.adapter_type = ADAPTER_TYPE_VPN; + if_info.adapter_type = webrtc::ADAPTER_TYPE_VPN; } auto network = CreateNetwork(cursor->ifa_name, cursor->ifa_name, prefix, @@ -799,7 +812,7 @@ bool BasicNetworkManager::CreateNetworks( wcslen(adapter_addrs->Description)); for (; address; address = address->Next) { - std::string name = rtc::ToString(count); + std::string name = absl::StrCat(count); #if !defined(NDEBUG) name = ToUtf8(adapter_addrs->FriendlyName, wcslen(adapter_addrs->FriendlyName)); @@ -825,7 +838,7 @@ bool BasicNetworkManager::CreateNetworks( // PrefixOrigin is equal to IpPrefixOriginRouterAdvertisement and // SuffixOrigin equal to IpSuffixOriginRandom. int ip_address_attributes = IPV6_ADDRESS_FLAG_NONE; - if (IpAddressAttributesEnabled(field_trials_.get())) { + if (IpAddressAttributesEnabled(&env_.field_trials())) { if (address->PrefixOrigin == IpPrefixOriginRouterAdvertisement && address->SuffixOrigin == IpSuffixOriginRandom) { ip_address_attributes |= IPV6_ADDRESS_FLAG_TEMPORARY; @@ -884,7 +897,7 @@ bool BasicNetworkManager::CreateNetworks( adapter_type = ADAPTER_TYPE_VPN; } if (adapter_type != ADAPTER_TYPE_VPN && - IsVpnMacAddress(rtc::ArrayView( + IsVpnMacAddress(webrtc::ArrayView( reinterpret_cast( adapter_addrs->PhysicalAddress), adapter_addrs->PhysicalAddressLength))) { @@ -970,7 +983,7 @@ void BasicNetworkManager::StartUpdating() { })); } else { RTC_DCHECK(task_safety_flag_ == nullptr); - task_safety_flag_ = webrtc::PendingTaskSafetyFlag::Create(); + task_safety_flag_ = PendingTaskSafetyFlag::Create(); thread_->PostTask(SafeTask(task_safety_flag_, [this] { RTC_DCHECK_RUN_ON(thread_); UpdateNetworksContinually(); @@ -1000,7 +1013,7 @@ void BasicNetworkManager::StartNetworkMonitor() { } if (!network_monitor_) { network_monitor_.reset( - network_monitor_factory_->CreateNetworkMonitor(*field_trials())); + network_monitor_factory_->CreateNetworkMonitor(env_.field_trials())); if (!network_monitor_) { return; } @@ -1164,32 +1177,32 @@ IPAddress Network::GetBestIP() const { for (const InterfaceAddress& ip : ips_) { // Ignore any address which has been deprecated already. - if (ip.ipv6_flags() & IPV6_ADDRESS_FLAG_DEPRECATED) + if (ip.ipv6_flags() & webrtc::IPV6_ADDRESS_FLAG_DEPRECATED) continue; - if (IPIsLinkLocal(ip)) { + if (webrtc::IPIsLinkLocal(ip)) { link_local_ip = ip; continue; } // ULA address should only be returned when we have no other // global IP. - if (IPIsULA(static_cast(ip))) { + if (webrtc::IPIsULA(static_cast(ip))) { ula_ip = ip; continue; } selected_ip = ip; // Search could stop once a temporary non-deprecated one is found. - if (ip.ipv6_flags() & IPV6_ADDRESS_FLAG_TEMPORARY) + if (ip.ipv6_flags() & webrtc::IPV6_ADDRESS_FLAG_TEMPORARY) break; } - if (IPIsUnspec(selected_ip)) { - if (!IPIsUnspec(link_local_ip)) { + if (webrtc::IPIsUnspec(selected_ip)) { + if (!webrtc::IPIsUnspec(link_local_ip)) { // No proper global IPv6 address found, use link local address instead. selected_ip = link_local_ip; - } else if (!IPIsUnspec(ula_ip)) { + } else if (!webrtc::IPIsUnspec(ula_ip)) { // No proper global and link local address found, use ULA instead. selected_ip = ula_ip; } @@ -1198,20 +1211,14 @@ IPAddress Network::GetBestIP() const { return static_cast(selected_ip); } -webrtc::MdnsResponderInterface* Network::GetMdnsResponder() const { +MdnsResponderInterface* Network::GetMdnsResponder() const { if (mdns_responder_provider_ == nullptr) { return nullptr; } return mdns_responder_provider_->GetMdnsResponder(); } -uint16_t Network::GetCost(const webrtc::FieldTrialsView* field_trials) const { - return GetCost( - *webrtc::AlwaysValidPointer(field_trials)); -} - -uint16_t Network::GetCost(const webrtc::FieldTrialsView& field_trials) const { +uint16_t Network::GetCost(const FieldTrialsView& field_trials) const { AdapterType type = IsVpn() ? underlying_type_for_vpn_ : type_; const bool use_differentiated_cellular_costs = field_trials.IsEnabled("WebRTC-UseDifferentiatedCellularCosts"); @@ -1223,59 +1230,59 @@ uint16_t Network::GetCost(const webrtc::FieldTrialsView& field_trials) const { } // This is the inverse of ComputeNetworkCostByType(). -std::pair -Network::GuessAdapterFromNetworkCost(int network_cost) { +std::pair Network::GuessAdapterFromNetworkCost( + int network_cost) { switch (network_cost) { - case kNetworkCostMin: - return {rtc::ADAPTER_TYPE_ETHERNET, false}; - case kNetworkCostMin + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_ETHERNET, true}; - case kNetworkCostLow: - return {rtc::ADAPTER_TYPE_WIFI, false}; - case kNetworkCostLow + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_WIFI, true}; - case kNetworkCostCellular: - return {rtc::ADAPTER_TYPE_CELLULAR, false}; - case kNetworkCostCellular + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_CELLULAR, true}; - case kNetworkCostCellular2G: - return {rtc::ADAPTER_TYPE_CELLULAR_2G, false}; - case kNetworkCostCellular2G + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_CELLULAR_2G, true}; - case kNetworkCostCellular3G: - return {rtc::ADAPTER_TYPE_CELLULAR_3G, false}; - case kNetworkCostCellular3G + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_CELLULAR_3G, true}; - case kNetworkCostCellular4G: - return {rtc::ADAPTER_TYPE_CELLULAR_4G, false}; - case kNetworkCostCellular4G + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_CELLULAR_4G, true}; - case kNetworkCostCellular5G: - return {rtc::ADAPTER_TYPE_CELLULAR_5G, false}; - case kNetworkCostCellular5G + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_CELLULAR_5G, true}; - case kNetworkCostUnknown: - return {rtc::ADAPTER_TYPE_UNKNOWN, false}; - case kNetworkCostUnknown + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_UNKNOWN, true}; - case kNetworkCostMax: - return {rtc::ADAPTER_TYPE_ANY, false}; - case kNetworkCostMax + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_ANY, true}; + case webrtc::kNetworkCostMin: + return {webrtc::ADAPTER_TYPE_ETHERNET, false}; + case webrtc::kNetworkCostMin + webrtc::kNetworkCostVpn: + return {webrtc::ADAPTER_TYPE_ETHERNET, true}; + case webrtc::kNetworkCostLow: + return {webrtc::ADAPTER_TYPE_WIFI, false}; + case webrtc::kNetworkCostLow + webrtc::kNetworkCostVpn: + return {webrtc::ADAPTER_TYPE_WIFI, true}; + case webrtc::kNetworkCostCellular: + return {webrtc::ADAPTER_TYPE_CELLULAR, false}; + case webrtc::kNetworkCostCellular + webrtc::kNetworkCostVpn: + return {webrtc::ADAPTER_TYPE_CELLULAR, true}; + case webrtc::kNetworkCostCellular2G: + return {webrtc::ADAPTER_TYPE_CELLULAR_2G, false}; + case webrtc::kNetworkCostCellular2G + webrtc::kNetworkCostVpn: + return {webrtc::ADAPTER_TYPE_CELLULAR_2G, true}; + case webrtc::kNetworkCostCellular3G: + return {webrtc::ADAPTER_TYPE_CELLULAR_3G, false}; + case webrtc::kNetworkCostCellular3G + webrtc::kNetworkCostVpn: + return {webrtc::ADAPTER_TYPE_CELLULAR_3G, true}; + case webrtc::kNetworkCostCellular4G: + return {webrtc::ADAPTER_TYPE_CELLULAR_4G, false}; + case webrtc::kNetworkCostCellular4G + webrtc::kNetworkCostVpn: + return {webrtc::ADAPTER_TYPE_CELLULAR_4G, true}; + case webrtc::kNetworkCostCellular5G: + return {webrtc::ADAPTER_TYPE_CELLULAR_5G, false}; + case webrtc::kNetworkCostCellular5G + webrtc::kNetworkCostVpn: + return {webrtc::ADAPTER_TYPE_CELLULAR_5G, true}; + case webrtc::kNetworkCostUnknown: + return {webrtc::ADAPTER_TYPE_UNKNOWN, false}; + case webrtc::kNetworkCostUnknown + webrtc::kNetworkCostVpn: + return {webrtc::ADAPTER_TYPE_UNKNOWN, true}; + case webrtc::kNetworkCostMax: + return {webrtc::ADAPTER_TYPE_ANY, false}; + case webrtc::kNetworkCostMax + webrtc::kNetworkCostVpn: + return {webrtc::ADAPTER_TYPE_ANY, true}; } RTC_LOG(LS_VERBOSE) << "Unknown network cost: " << network_cost; - return {rtc::ADAPTER_TYPE_UNKNOWN, false}; + return {webrtc::ADAPTER_TYPE_UNKNOWN, false}; } std::string Network::ToString() const { - rtc::StringBuilder ss; + StringBuilder ss; // Print out the first space-terminated token of the network desc, plus // the IP address. ss << "Net[" << description_.substr(0, description_.find(' ')) << ":" << prefix_.ToSensitiveString() << "/" << prefix_length_ << ":" - << AdapterTypeToString(type_); + << webrtc::AdapterTypeToString(type_); if (IsVpn()) { - ss << "/" << AdapterTypeToString(underlying_type_for_vpn_); + ss << "/" << webrtc::AdapterTypeToString(underlying_type_for_vpn_); } ss << ":id=" << id_ << "]"; return ss.Release(); @@ -1294,7 +1301,7 @@ bool BasicNetworkManager::IsConfiguredVpn(IPAddress prefix, RTC_DCHECK_RUN_ON(thread_); for (const auto& vpn : vpn_) { if (prefix_length >= vpn.prefix_length()) { - auto copy = TruncateIP(prefix, vpn.prefix_length()); + auto copy = webrtc::TruncateIP(prefix, vpn.prefix_length()); if (copy == vpn.address()) { return true; } @@ -1303,4 +1310,4 @@ bool BasicNetworkManager::IsConfiguredVpn(IPAddress prefix, return false; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/network.h b/rtc_base/network.h index 4a97a45d4d..f8ccfc754a 100644 --- a/rtc_base/network.h +++ b/rtc_base/network.h @@ -13,45 +13,46 @@ #include -#include #include #include #include +#include #include -#include "absl/base/attributes.h" +#include "absl/base/nullability.h" #include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/environment/environment.h" #include "api/field_trials_view.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" -#include "api/transport/field_trial_based_config.h" +#include "rtc_base/checks.h" #include "rtc_base/ip_address.h" #include "rtc_base/mdns_responder_interface.h" -#include "rtc_base/memory/always_valid_pointer.h" +#include "rtc_base/network_constants.h" #include "rtc_base/network_monitor.h" #include "rtc_base/network_monitor_factory.h" #include "rtc_base/socket_factory.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" #if defined(WEBRTC_POSIX) +#include "rtc_base/ifaddrs_converter.h" struct ifaddrs; #endif // defined(WEBRTC_POSIX) -namespace rtc { +namespace webrtc { extern const char kPublicIPv4Host[]; extern const char kPublicIPv6Host[]; -class IfAddrsConverter; class Network; -class NetworkMonitorInterface; -class Thread; // By default, ignore loopback interfaces on the host. -const int kDefaultNetworkIgnoreMask = ADAPTER_TYPE_LOOPBACK; +const int kDefaultNetworkIgnoreMask = webrtc::ADAPTER_TYPE_LOOPBACK; namespace webrtc_network_internal { bool CompareNetworks(const std::unique_ptr& a, @@ -69,6 +70,7 @@ std::string MakeNetworkKey(absl::string_view name, // name (e.g., "wlan0"). Can be used by NetworkManager subclasses when other // mechanisms fail to determine the type. RTC_EXPORT AdapterType GetAdapterTypeFromName(absl::string_view network_name); +RTC_EXPORT AdapterType GetAdapterTypeFromName(absl::string_view network_name); class DefaultLocalAddressProvider { public: @@ -88,7 +90,7 @@ class MdnsResponderProvider { // addresses of ICE host candidates by mDNS hostnames. // // The provider MUST outlive the mDNS responder. - virtual webrtc::MdnsResponderInterface* GetMdnsResponder() const = 0; + virtual MdnsResponderInterface* GetMdnsResponder() const = 0; }; // Network/mask in CIDR representation. @@ -183,197 +185,9 @@ class RTC_EXPORT NetworkManager : public DefaultLocalAddressProvider, }; // MdnsResponderProvider interface. - webrtc::MdnsResponderInterface* GetMdnsResponder() const override; - - virtual void set_vpn_list(const std::vector& vpn) {} -}; - -// Base class for NetworkManager implementations. -class RTC_EXPORT NetworkManagerBase : public NetworkManager { - public: - NetworkManagerBase(const webrtc::FieldTrialsView* field_trials = nullptr); - - std::vector GetNetworks() const override; - std::vector GetAnyAddressNetworks() override; - - EnumerationPermission enumeration_permission() const override; - - bool GetDefaultLocalAddress(int family, IPAddress* ipaddr) const override; - - // Check if MAC address in |bytes| is one of the pre-defined - // MAC addresses for know VPNs. - static bool IsVpnMacAddress(rtc::ArrayView address); - - protected: - // Updates `networks_` with the networks listed in `list`. If - // `networks_map_` already has a Network object for a network listed - // in the `list` then it is reused. Accept ownership of the Network - // objects in the `list`. `changed` will be set to true if there is - // any change in the network list. - void MergeNetworkList(std::vector> list, - bool* changed); - - // `stats` will be populated even if |*changed| is false. - void MergeNetworkList(std::vector> list, - bool* changed, - NetworkManager::Stats* stats); - - void set_enumeration_permission(EnumerationPermission state) { - enumeration_permission_ = state; - } - - void set_default_local_addresses(const IPAddress& ipv4, - const IPAddress& ipv6); - - Network* GetNetworkFromAddress(const rtc::IPAddress& ip) const; + MdnsResponderInterface* GetMdnsResponder() const override; - // To enable subclasses to get the networks list, without interfering with - // refactoring of the interface GetNetworks method. - const std::vector& GetNetworksInternal() const { return networks_; } - - std::unique_ptr CreateNetwork(absl::string_view name, - absl::string_view description, - const IPAddress& prefix, - int prefix_length, - AdapterType type) const; - - const webrtc::FieldTrialsView* field_trials() const { - return field_trials_.get(); - } - - private: - friend class NetworkTest; - webrtc::AlwaysValidPointer - field_trials_; - EnumerationPermission enumeration_permission_; - - std::vector networks_; - - std::map> networks_map_; - - std::unique_ptr ipv4_any_address_network_; - std::unique_ptr ipv6_any_address_network_; - - IPAddress default_local_ipv4_address_; - IPAddress default_local_ipv6_address_; - // We use 16 bits to save the bandwidth consumption when sending the network - // id over the Internet. It is OK that the 16-bit integer overflows to get a - // network id 0 because we only compare the network ids in the old and the new - // best connections in the transport channel. - uint16_t next_available_network_id_ = 1; - - // True if calling network_preference() with a changed value - // should result in firing the SignalNetworkChanged signal. - bool signal_network_preference_change_ = false; -}; - -// Basic implementation of the NetworkManager interface that gets list -// of networks using OS APIs. -class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, - public NetworkBinderInterface, - public sigslot::has_slots<> { - public: - // This is used by lots of downstream code. - BasicNetworkManager(SocketFactory* socket_factory, - const webrtc::FieldTrialsView* field_trials = nullptr) - : BasicNetworkManager(/* network_monitor_factory= */ nullptr, - socket_factory, - field_trials) {} - - BasicNetworkManager(NetworkMonitorFactory* network_monitor_factory, - SocketFactory* socket_factory, - const webrtc::FieldTrialsView* field_trials = nullptr); - ~BasicNetworkManager() override; - - void StartUpdating() override; - void StopUpdating() override; - - void DumpNetworks() override; - - bool started() { return start_count_ > 0; } - - // Sets the network ignore list, which is empty by default. Any network on the - // ignore list will be filtered from network enumeration results. - // Should be called only before initialization. - void set_network_ignore_list(const std::vector& list) { - RTC_DCHECK(thread_ == nullptr); - network_ignore_list_ = list; - } - - // Set a list of manually configured VPN's. - void set_vpn_list(const std::vector& vpn) override; - - // Check if |prefix| is configured as VPN. - bool IsConfiguredVpn(IPAddress prefix, int prefix_length) const; - - // Bind a socket to interface that ip address belong to. - // Implementation look up interface name and calls - // BindSocketToNetwork on NetworkMonitor. - // The interface name is needed as e.g ipv4 over ipv6 addresses - // are not exposed using Android functions, but it is possible - // bind an ipv4 address to the interface. - NetworkBindingResult BindSocketToNetwork(int socket_fd, - const IPAddress& address) override; - - protected: -#if defined(WEBRTC_POSIX) - // Separated from CreateNetworks for tests. - void ConvertIfAddrs(ifaddrs* interfaces, - IfAddrsConverter* converter, - bool include_ignored, - std::vector>* networks) const - RTC_RUN_ON(thread_); - NetworkMonitorInterface::InterfaceInfo GetInterfaceInfo( - struct ifaddrs* cursor) const RTC_RUN_ON(thread_); -#endif // defined(WEBRTC_POSIX) - - // Creates a network object for each network available on the machine. - bool CreateNetworks(bool include_ignored, - std::vector>* networks) const - RTC_RUN_ON(thread_); - - // Determines if a network should be ignored. This should only be determined - // based on the network's property instead of any individual IP. - bool IsIgnoredNetwork(const Network& network) const RTC_RUN_ON(thread_); - - // This function connects a UDP socket to a public address and returns the - // local address associated it. Since it binds to the "any" address - // internally, it returns the default local address on a multi-homed endpoint. - IPAddress QueryDefaultLocalAddress(int family) const RTC_RUN_ON(thread_); - - private: - friend class NetworkTest; - - // Creates a network monitor and listens for network updates. - void StartNetworkMonitor() RTC_RUN_ON(thread_); - // Stops and removes the network monitor. - void StopNetworkMonitor() RTC_RUN_ON(thread_); - // Called when it receives updates from the network monitor. - void OnNetworksChanged(); - - // Updates the networks and reschedules the next update. - void UpdateNetworksContinually() RTC_RUN_ON(thread_); - // Only updates the networks; does not reschedule the next update. - void UpdateNetworksOnce() RTC_RUN_ON(thread_); - - Thread* thread_ = nullptr; - bool sent_first_update_ = true; - int start_count_ = 0; - - webrtc::AlwaysValidPointer - field_trials_; - std::vector network_ignore_list_; - NetworkMonitorFactory* const network_monitor_factory_; - SocketFactory* const socket_factory_; - std::unique_ptr network_monitor_ - RTC_GUARDED_BY(thread_); - bool allow_mac_based_ipv6_ RTC_GUARDED_BY(thread_) = false; - bool bind_using_ifname_ RTC_GUARDED_BY(thread_) = false; - - std::vector vpn_; - rtc::scoped_refptr task_safety_flag_; + virtual void set_vpn_list(const std::vector& /* vpn */) {} }; // Represents a Unix-type network interface, with a name and single address. @@ -387,7 +201,7 @@ class RTC_EXPORT Network { description, prefix, prefix_length, - rtc::ADAPTER_TYPE_UNKNOWN) {} + webrtc::ADAPTER_TYPE_UNKNOWN) {} Network(absl::string_view name, absl::string_view description, @@ -399,7 +213,7 @@ class RTC_EXPORT Network { ~Network(); // This signal is fired whenever type() or underlying_type_for_vpn() changes. - // Mutable, to support connecting on the const Network passed to cricket::Port + // Mutable, to support connecting on the const Network passed to webrtc::Port // constructor. mutable sigslot::signal1 SignalTypeChanged; @@ -460,7 +274,7 @@ class RTC_EXPORT Network { // Adds an active IP address to this network. Does not check for duplicates. void AddIP(const InterfaceAddress& ip) { ips_.push_back(ip); } - void AddIP(const IPAddress& ip) { ips_.push_back(rtc::InterfaceAddress(ip)); } + void AddIP(const IPAddress& ip) { ips_.push_back(InterfaceAddress(ip)); } // Sets the network's IP address list. Returns true if new IP addresses were // detected. Passing true to already_changed skips this check. @@ -473,7 +287,7 @@ class RTC_EXPORT Network { // addresses of host candidates by mDNS names in ICE gathering. After a // name-address mapping is created by the mDNS responder, queries for the // created name will be resolved by the responder. - webrtc::MdnsResponderInterface* GetMdnsResponder() const; + MdnsResponderInterface* GetMdnsResponder() const; // Returns the scope-id of the network's address. // Should only be relevant for link-local IPv6 addresses. @@ -500,8 +314,8 @@ class RTC_EXPORT Network { return; } type_ = type; - if (type != ADAPTER_TYPE_VPN) { - underlying_type_for_vpn_ = ADAPTER_TYPE_UNKNOWN; + if (type != webrtc::ADAPTER_TYPE_VPN) { + underlying_type_for_vpn_ = webrtc::ADAPTER_TYPE_UNKNOWN; } SignalTypeChanged(this); } @@ -514,17 +328,17 @@ class RTC_EXPORT Network { SignalTypeChanged(this); } - bool IsVpn() const { return type_ == ADAPTER_TYPE_VPN; } + bool IsVpn() const { return type_ == webrtc::ADAPTER_TYPE_VPN; } bool IsCellular() const { return IsCellular(type_); } static bool IsCellular(AdapterType type) { switch (type) { - case ADAPTER_TYPE_CELLULAR: - case ADAPTER_TYPE_CELLULAR_2G: - case ADAPTER_TYPE_CELLULAR_3G: - case ADAPTER_TYPE_CELLULAR_4G: - case ADAPTER_TYPE_CELLULAR_5G: + case webrtc::ADAPTER_TYPE_CELLULAR: + case webrtc::ADAPTER_TYPE_CELLULAR_2G: + case webrtc::ADAPTER_TYPE_CELLULAR_3G: + case webrtc::ADAPTER_TYPE_CELLULAR_4G: + case webrtc::ADAPTER_TYPE_CELLULAR_5G: return true; default: return false; @@ -535,10 +349,7 @@ class RTC_EXPORT Network { // Twice per Network in BasicPortAllocator if // PORTALLOCATOR_DISABLE_COSTLY_NETWORKS. Once in Port::Construct() (and when // Port::OnNetworkTypeChanged is called). - ABSL_DEPRECATED( - "Use the version with field trials, see bugs.webrtc.org/webrtc:10335") - uint16_t GetCost(const webrtc::FieldTrialsView* field_trials = nullptr) const; - uint16_t GetCost(const webrtc::FieldTrialsView& field_trials) const; + uint16_t GetCost(const FieldTrialsView& field_trials) const; // A unique id assigned by the network manager, which may be signaled // to the remote side in the candidate. @@ -569,8 +380,8 @@ class RTC_EXPORT Network { SignalNetworkPreferenceChanged(this); } - static std::pair - GuessAdapterFromNetworkCost(int network_cost); + static std::pair GuessAdapterFromNetworkCost( + int network_cost); // Debugging description of this network std::string ToString() const; @@ -587,7 +398,7 @@ class RTC_EXPORT Network { int scope_id_; bool ignored_; AdapterType type_; - AdapterType underlying_type_for_vpn_ = ADAPTER_TYPE_UNKNOWN; + AdapterType underlying_type_for_vpn_ = webrtc::ADAPTER_TYPE_UNKNOWN; int preference_; bool active_ = true; uint16_t id_ = 0; @@ -596,6 +407,194 @@ class RTC_EXPORT Network { friend class NetworkManager; }; +// Base class for NetworkManager implementations. +class RTC_EXPORT NetworkManagerBase : public NetworkManager { + public: + NetworkManagerBase(); + + std::vector GetNetworks() const override; + std::vector GetAnyAddressNetworks() override; + + EnumerationPermission enumeration_permission() const override; + + bool GetDefaultLocalAddress(int family, IPAddress* ipaddr) const override; + + // Check if MAC address in |bytes| is one of the pre-defined + // MAC addresses for know VPNs. + static bool IsVpnMacAddress(ArrayView address); + + protected: + // Updates `networks_` with the networks listed in `list`. If + // `networks_map_` already has a Network object for a network listed + // in the `list` then it is reused. Accept ownership of the Network + // objects in the `list`. `changed` will be set to true if there is + // any change in the network list. + void MergeNetworkList(std::vector> list, + bool* changed); + + // `stats` will be populated even if |*changed| is false. + void MergeNetworkList(std::vector> list, + bool* changed, + NetworkManager::Stats* stats); + + void set_enumeration_permission(EnumerationPermission state) { + enumeration_permission_ = state; + } + + void set_default_local_addresses(const IPAddress& ipv4, + const IPAddress& ipv6); + + Network* GetNetworkFromAddress(const IPAddress& ip) const; + + // To enable subclasses to get the networks list, without interfering with + // refactoring of the interface GetNetworks method. + const std::vector& GetNetworksInternal() const { return networks_; } + + std::unique_ptr CreateNetwork(absl::string_view name, + absl::string_view description, + const IPAddress& prefix, + int prefix_length, + AdapterType type) const; + + private: + friend class NetworkTest; + EnumerationPermission enumeration_permission_; + + std::vector networks_; + + std::map> networks_map_; + + std::unique_ptr ipv4_any_address_network_; + std::unique_ptr ipv6_any_address_network_; + + IPAddress default_local_ipv4_address_; + IPAddress default_local_ipv6_address_; + // We use 16 bits to save the bandwidth consumption when sending the network + // id over the Internet. It is OK that the 16-bit integer overflows to get a + // network id 0 because we only compare the network ids in the old and the new + // best connections in the transport channel. + uint16_t next_available_network_id_ = 1; +}; + +// Basic implementation of the NetworkManager interface that gets list +// of networks using OS APIs. +class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, + public NetworkBinderInterface, + public sigslot::has_slots<> { + public: + BasicNetworkManager( + const Environment& env, + SocketFactory* absl_nonnull socket_factory, + NetworkMonitorFactory* absl_nullable network_monitor_factory = nullptr); + + ~BasicNetworkManager() override; + + void StartUpdating() override; + void StopUpdating() override; + + void DumpNetworks() override; + + bool started() { return start_count_ > 0; } + + // Sets the network ignore list, which is empty by default. Any network on the + // ignore list will be filtered from network enumeration results. + // Should be called only before initialization. + void set_network_ignore_list(const std::vector& list) { + RTC_DCHECK(thread_ == nullptr); + network_ignore_list_ = list; + } + + // Set a list of manually configured VPN's. + void set_vpn_list(const std::vector& vpn) override; + + // Check if |prefix| is configured as VPN. + bool IsConfiguredVpn(IPAddress prefix, int prefix_length) const; + + // Bind a socket to interface that ip address belong to. + // Implementation look up interface name and calls + // BindSocketToNetwork on NetworkMonitor. + // The interface name is needed as e.g ipv4 over ipv6 addresses + // are not exposed using Android functions, but it is possible + // bind an ipv4 address to the interface. + NetworkBindingResult BindSocketToNetwork(int socket_fd, + const IPAddress& address) override; + + protected: +#if defined(WEBRTC_POSIX) + // Separated from CreateNetworks for tests. + void ConvertIfAddrs(ifaddrs* interfaces, + IfAddrsConverter* converter, + bool include_ignored, + std::vector>* networks) const + RTC_RUN_ON(thread_); + NetworkMonitorInterface::InterfaceInfo GetInterfaceInfo( + struct ifaddrs* cursor) const RTC_RUN_ON(thread_); +#endif // defined(WEBRTC_POSIX) + + // Creates a network object for each network available on the machine. + bool CreateNetworks(bool include_ignored, + std::vector>* networks) const + RTC_RUN_ON(thread_); + + // Determines if a network should be ignored. This should only be determined + // based on the network's property instead of any individual IP. + bool IsIgnoredNetwork(const Network& network) const RTC_RUN_ON(thread_); + + // This function connects a UDP socket to a public address and returns the + // local address associated it. Since it binds to the "any" address + // internally, it returns the default local address on a multi-homed endpoint. + IPAddress QueryDefaultLocalAddress(int family) const RTC_RUN_ON(thread_); + + private: + friend class NetworkTest; + + // Creates a network monitor and listens for network updates. + void StartNetworkMonitor() RTC_RUN_ON(thread_); + // Stops and removes the network monitor. + void StopNetworkMonitor() RTC_RUN_ON(thread_); + // Called when it receives updates from the network monitor. + void OnNetworksChanged(); + + // Updates the networks and reschedules the next update. + void UpdateNetworksContinually() RTC_RUN_ON(thread_); + // Only updates the networks; does not reschedule the next update. + void UpdateNetworksOnce() RTC_RUN_ON(thread_); + + const Environment env_; + Thread* thread_ = nullptr; + bool sent_first_update_ = true; + int start_count_ = 0; + std::vector network_ignore_list_; + NetworkMonitorFactory* absl_nullable const network_monitor_factory_; + SocketFactory* absl_nonnull const socket_factory_; + std::unique_ptr network_monitor_ + RTC_GUARDED_BY(thread_); + bool allow_mac_based_ipv6_ RTC_GUARDED_BY(thread_) = false; + bool bind_using_ifname_ RTC_GUARDED_BY(thread_) = false; + + std::vector vpn_; + scoped_refptr task_safety_flag_; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::BasicNetworkManager; +using ::webrtc::DefaultLocalAddressProvider; +using ::webrtc::GetAdapterTypeFromName; +using ::webrtc::kDefaultNetworkIgnoreMask; +using ::webrtc::kPublicIPv4Host; +using ::webrtc::kPublicIPv6Host; +using ::webrtc::MakeNetworkKey; +using ::webrtc::MdnsResponderProvider; +using ::webrtc::Network; +using ::webrtc::NetworkManager; +using ::webrtc::NetworkManagerBase; +using ::webrtc::NetworkMask; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NETWORK_H_ diff --git a/rtc_base/network/BUILD.gn b/rtc_base/network/BUILD.gn index 35ae3d45f7..8d18cbc560 100644 --- a/rtc_base/network/BUILD.gn +++ b/rtc_base/network/BUILD.gn @@ -14,5 +14,27 @@ rtc_library("sent_packet") { "sent_packet.h", ] deps = [ "../system:rtc_export" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_source_set("ecn_marking") { + visibility = [ "*" ] + sources = [ "ecn_marking.h" ] + deps = [ "../../api/transport:ecn_marking" ] +} + +rtc_library("received_packet") { + visibility = [ "*" ] + sources = [ + "received_packet.cc", + "received_packet.h", + ] + deps = [ + ":ecn_marking", + "..:checks", + "..:socket_address", + "../../api:array_view", + "../../api/transport:ecn_marking", + "../../api/units:timestamp", + "../system:rtc_export", + ] } diff --git a/rtc_base/network/ecn_marking.h b/rtc_base/network/ecn_marking.h new file mode 100644 index 0000000000..d181eb7ddd --- /dev/null +++ b/rtc_base/network/ecn_marking.h @@ -0,0 +1,22 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef RTC_BASE_NETWORK_ECN_MARKING_H_ +#define RTC_BASE_NETWORK_ECN_MARKING_H_ + +// // TODO: bugs.webrtc.org/42225697 - delete this file. +#include "api/transport/ecn_marking.h" + +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::EcnMarking; +} // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // RTC_BASE_NETWORK_ECN_MARKING_H_ diff --git a/rtc_base/network/received_packet.cc b/rtc_base/network/received_packet.cc new file mode 100644 index 0000000000..3f08e2d099 --- /dev/null +++ b/rtc_base/network/received_packet.cc @@ -0,0 +1,57 @@ +/* + * Copyright 2023 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/network/received_packet.h" + +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/transport/ecn_marking.h" +#include "api/units/timestamp.h" +#include "rtc_base/checks.h" +#include "rtc_base/socket_address.h" + +namespace webrtc { + +ReceivedIpPacket::ReceivedIpPacket(ArrayView payload, + const SocketAddress& source_address, + std::optional arrival_time, + EcnMarking ecn, + DecryptionInfo decryption) + : payload_(payload), + arrival_time_(std::move(arrival_time)), + source_address_(source_address), + ecn_(ecn), + decryption_info_(decryption) {} + +ReceivedIpPacket ReceivedIpPacket::CopyAndSet( + DecryptionInfo decryption_info) const { + return ReceivedIpPacket(payload_, source_address_, arrival_time_, ecn_, + decryption_info); +} + +// static +ReceivedIpPacket ReceivedIpPacket::CreateFromLegacy( + const uint8_t* data, + size_t size, + int64_t packet_time_us, + const SocketAddress& source_address) { + RTC_DCHECK(packet_time_us == -1 || packet_time_us >= 0); + return ReceivedIpPacket( + MakeArrayView(data, size), source_address, + (packet_time_us >= 0) + ? std::optional(Timestamp::Micros(packet_time_us)) + : std::nullopt); +} + +} // namespace webrtc diff --git a/rtc_base/network/received_packet.h b/rtc_base/network/received_packet.h new file mode 100644 index 0000000000..5de54cd1c2 --- /dev/null +++ b/rtc_base/network/received_packet.h @@ -0,0 +1,98 @@ +/* + * Copyright 2023 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef RTC_BASE_NETWORK_RECEIVED_PACKET_H_ +#define RTC_BASE_NETWORK_RECEIVED_PACKET_H_ + +#include +#include +#include + +#include "api/array_view.h" +#include "api/transport/ecn_marking.h" +#include "api/units/timestamp.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// ReceivedPacket represent a received IP packet. +// It contains a payload and metadata. +// ReceivedPacket itself does not put constraints on what payload contains. For +// example it may contains STUN, SCTP, SRTP, RTP, RTCP.... etc. +class RTC_EXPORT ReceivedIpPacket { + public: + enum DecryptionInfo { + kNotDecrypted, // Payload has not yet been decrypted or encryption is not + // used. + kDtlsDecrypted, // Payload has been Dtls decrypted + kSrtpEncrypted // Payload is SRTP encrypted. + }; + + // Caller must keep memory pointed to by payload and address valid for the + // lifetime of this ReceivedPacket. + ReceivedIpPacket(ArrayView payload, + const webrtc::SocketAddress& source_address, + std::optional arrival_time = std::nullopt, + EcnMarking ecn = EcnMarking::kNotEct, + DecryptionInfo decryption = kNotDecrypted); + + ReceivedIpPacket CopyAndSet(DecryptionInfo decryption_info) const; + + // Address/port of the packet sender. + const webrtc::SocketAddress& source_address() const { + return source_address_; + } + ArrayView payload() const { return payload_; } + + // Timestamp when this packet was received. Not available on all socket + // implementations. + std::optional arrival_time() const { + return arrival_time_; + } + + // L4S Explicit Congestion Notification. + EcnMarking ecn() const { return ecn_; } + + const DecryptionInfo& decryption_info() const { return decryption_info_; } + + static ReceivedIpPacket CreateFromLegacy( + const char* data, + size_t size, + int64_t packet_time_us, + const webrtc::SocketAddress& addr = webrtc::SocketAddress()) { + return CreateFromLegacy(reinterpret_cast(data), size, + packet_time_us, addr); + } + + static ReceivedIpPacket CreateFromLegacy( + const uint8_t* data, + size_t size, + int64_t packet_time_us, + const webrtc::SocketAddress& = webrtc::SocketAddress()); + + private: + ArrayView payload_; + std::optional arrival_time_; + const webrtc::SocketAddress& source_address_; + EcnMarking ecn_; + DecryptionInfo decryption_info_; +}; + +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ReceivedPacket = ::webrtc::ReceivedIpPacket; +} // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#endif // RTC_BASE_NETWORK_RECEIVED_PACKET_H_ diff --git a/rtc_base/network/sent_packet.cc b/rtc_base/network/sent_packet.cc index 8cc49737ef..893accf8d4 100644 --- a/rtc_base/network/sent_packet.cc +++ b/rtc_base/network/sent_packet.cc @@ -10,18 +10,20 @@ #include "rtc_base/network/sent_packet.h" -namespace rtc { +#include + +namespace webrtc { PacketInfo::PacketInfo() = default; PacketInfo::PacketInfo(const PacketInfo& info) = default; PacketInfo::~PacketInfo() = default; -SentPacket::SentPacket() = default; -SentPacket::SentPacket(int64_t packet_id, int64_t send_time_ms) +SentPacketInfo::SentPacketInfo() = default; +SentPacketInfo::SentPacketInfo(int64_t packet_id, int64_t send_time_ms) : packet_id(packet_id), send_time_ms(send_time_ms) {} -SentPacket::SentPacket(int64_t packet_id, - int64_t send_time_ms, - const rtc::PacketInfo& info) +SentPacketInfo::SentPacketInfo(int64_t packet_id, + int64_t send_time_ms, + const PacketInfo& info) : packet_id(packet_id), send_time_ms(send_time_ms), info(info) {} -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/network/sent_packet.h b/rtc_base/network/sent_packet.h index 457fb969cf..805b442d75 100644 --- a/rtc_base/network/sent_packet.h +++ b/rtc_base/network/sent_packet.h @@ -14,10 +14,11 @@ #include #include -#include "absl/types/optional.h" +#include + #include "rtc_base/system/rtc_export.h" -namespace rtc { +namespace webrtc { enum class PacketType { kUnknown, @@ -43,27 +44,41 @@ struct RTC_EXPORT PacketInfo { bool included_in_feedback = false; bool included_in_allocation = false; + // `is_media` is true if this is an audio or video packet, excluding + // retransmissions. + bool is_media = false; PacketType packet_type = PacketType::kUnknown; PacketInfoProtocolType protocol = PacketInfoProtocolType::kUnknown; - // A unique id assigned by the network manager, and absl::nullopt if not set. - absl::optional network_id; + // A unique id assigned by the network manager, and std::nullopt if not set. + std::optional network_id; size_t packet_size_bytes = 0; size_t turn_overhead_bytes = 0; size_t ip_overhead_bytes = 0; }; -struct RTC_EXPORT SentPacket { - SentPacket(); - SentPacket(int64_t packet_id, int64_t send_time_ms); - SentPacket(int64_t packet_id, - int64_t send_time_ms, - const rtc::PacketInfo& info); +struct RTC_EXPORT SentPacketInfo { + SentPacketInfo(); + SentPacketInfo(int64_t packet_id, int64_t send_time_ms); + SentPacketInfo(int64_t packet_id, + int64_t send_time_ms, + const PacketInfo& info); int64_t packet_id = -1; int64_t send_time_ms = -1; - rtc::PacketInfo info; + PacketInfo info; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::PacketInfo; +using SentPacket = ::webrtc::SentPacketInfo; +using ::webrtc::PacketInfoProtocolType; +using ::webrtc::PacketType; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NETWORK_SENT_PACKET_H_ diff --git a/rtc_base/network_constants.cc b/rtc_base/network_constants.cc index 9e6ab520e5..7f81951122 100644 --- a/rtc_base/network_constants.cc +++ b/rtc_base/network_constants.cc @@ -12,7 +12,7 @@ #include "rtc_base/checks.h" -namespace rtc { +namespace webrtc { std::string AdapterTypeToString(AdapterType type) { switch (type) { @@ -44,4 +44,4 @@ std::string AdapterTypeToString(AdapterType type) { } } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/network_constants.h b/rtc_base/network_constants.h index 578b9710d0..884141c4b3 100644 --- a/rtc_base/network_constants.h +++ b/rtc_base/network_constants.h @@ -15,7 +15,7 @@ #include -namespace rtc { +namespace webrtc { constexpr uint16_t kNetworkCostMax = 999; constexpr uint16_t kNetworkCostCellular2G = 980; @@ -67,6 +67,38 @@ constexpr AdapterType kAllAdapterTypes[] = { ADAPTER_TYPE_CELLULAR_5G, }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::ADAPTER_TYPE_ANY; +using ::webrtc::ADAPTER_TYPE_CELLULAR; +using ::webrtc::ADAPTER_TYPE_CELLULAR_2G; +using ::webrtc::ADAPTER_TYPE_CELLULAR_3G; +using ::webrtc::ADAPTER_TYPE_CELLULAR_4G; +using ::webrtc::ADAPTER_TYPE_CELLULAR_5G; +using ::webrtc::ADAPTER_TYPE_ETHERNET; +using ::webrtc::ADAPTER_TYPE_LOOPBACK; +using ::webrtc::ADAPTER_TYPE_UNKNOWN; +using ::webrtc::ADAPTER_TYPE_VPN; +using ::webrtc::ADAPTER_TYPE_WIFI; +using ::webrtc::AdapterType; +using ::webrtc::AdapterTypeToString; +using ::webrtc::kAllAdapterTypes; +using ::webrtc::kNetworkCostCellular; +using ::webrtc::kNetworkCostCellular2G; +using ::webrtc::kNetworkCostCellular3G; +using ::webrtc::kNetworkCostCellular4G; +using ::webrtc::kNetworkCostCellular5G; +using ::webrtc::kNetworkCostHigh; +using ::webrtc::kNetworkCostLow; +using ::webrtc::kNetworkCostMax; +using ::webrtc::kNetworkCostMin; +using ::webrtc::kNetworkCostUnknown; +using ::webrtc::kNetworkCostVpn; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NETWORK_CONSTANTS_H_ diff --git a/rtc_base/network_monitor.cc b/rtc_base/network_monitor.cc index 70c2ad5020..0733d8eea6 100644 --- a/rtc_base/network_monitor.cc +++ b/rtc_base/network_monitor.cc @@ -12,7 +12,7 @@ #include "rtc_base/checks.h" -namespace rtc { +namespace webrtc { const char* NetworkPreferenceToString(NetworkPreference preference) { switch (preference) { @@ -27,4 +27,4 @@ const char* NetworkPreferenceToString(NetworkPreference preference) { NetworkMonitorInterface::NetworkMonitorInterface() {} NetworkMonitorInterface::~NetworkMonitorInterface() {} -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/network_monitor.h b/rtc_base/network_monitor.h index 605854f6ea..f954a4db22 100644 --- a/rtc_base/network_monitor.h +++ b/rtc_base/network_monitor.h @@ -15,11 +15,10 @@ #include #include "absl/strings/string_view.h" +#include "rtc_base/ip_address.h" #include "rtc_base/network_constants.h" -namespace rtc { - -class IPAddress; +namespace webrtc { enum class NetworkBindingResult { SUCCESS = 0, // No error @@ -78,7 +77,7 @@ class NetworkMonitorInterface { AdapterType adapter_type; // Is ADAPTER_TYPE_UNKNOWN unless adapter_type == ADAPTER_TYPE_VPN. - AdapterType underlying_type_for_vpn = ADAPTER_TYPE_UNKNOWN; + AdapterType underlying_type_for_vpn = webrtc::ADAPTER_TYPE_UNKNOWN; // The OS/firmware specific preference of this interface. NetworkPreference network_preference = NetworkPreference::NEUTRAL; @@ -113,9 +112,9 @@ class NetworkMonitorInterface { // Bind a socket to an interface specified by ip address and/or interface // name. Only implemented on Android. virtual NetworkBindingResult BindSocketToNetwork( - int socket_fd, - const IPAddress& address, - absl::string_view interface_name) { + int /* socket_fd */, + const IPAddress& /* address */, + absl::string_view /* interface_name */) { return NetworkBindingResult::NOT_IMPLEMENTED; } @@ -134,6 +133,18 @@ class NetworkMonitorInterface { std::function networks_changed_callback_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::NetworkBinderInterface; +using ::webrtc::NetworkBindingResult; +using ::webrtc::NetworkMonitorInterface; +using ::webrtc::NetworkPreference; +using ::webrtc::NetworkPreferenceToString; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NETWORK_MONITOR_H_ diff --git a/rtc_base/network_monitor_factory.cc b/rtc_base/network_monitor_factory.cc index 9fac4d95a0..0b75e29a1b 100644 --- a/rtc_base/network_monitor_factory.cc +++ b/rtc_base/network_monitor_factory.cc @@ -10,9 +10,9 @@ #include "rtc_base/network_monitor_factory.h" -namespace rtc { +namespace webrtc { NetworkMonitorFactory::NetworkMonitorFactory() {} NetworkMonitorFactory::~NetworkMonitorFactory() {} -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/network_monitor_factory.h b/rtc_base/network_monitor_factory.h index c76ed97d8c..8cdcdb7b93 100644 --- a/rtc_base/network_monitor_factory.h +++ b/rtc_base/network_monitor_factory.h @@ -11,15 +11,15 @@ #ifndef RTC_BASE_NETWORK_MONITOR_FACTORY_H_ #define RTC_BASE_NETWORK_MONITOR_FACTORY_H_ +#include "rtc_base/network_monitor.h" namespace webrtc { class FieldTrialsView; } // namespace webrtc -namespace rtc { +namespace webrtc { // Forward declaring this so it's not part of the API surface; it's only // expected to be used by Android/iOS SDK code. -class NetworkMonitorInterface; /* * NetworkMonitorFactory creates NetworkMonitors. @@ -29,7 +29,7 @@ class NetworkMonitorInterface; class NetworkMonitorFactory { public: virtual NetworkMonitorInterface* CreateNetworkMonitor( - const webrtc::FieldTrialsView& field_trials) = 0; + const FieldTrialsView& field_trials) = 0; virtual ~NetworkMonitorFactory(); @@ -37,6 +37,14 @@ class NetworkMonitorFactory { NetworkMonitorFactory(); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::NetworkMonitorFactory; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NETWORK_MONITOR_FACTORY_H_ diff --git a/rtc_base/network_route.cc b/rtc_base/network_route.cc index 9762dc2eb7..1581e466bf 100644 --- a/rtc_base/network_route.cc +++ b/rtc_base/network_route.cc @@ -10,7 +10,7 @@ #include "rtc_base/network_route.h" -namespace rtc { +namespace webrtc { bool RouteEndpoint::operator==(const RouteEndpoint& other) const { return adapter_type_ == other.adapter_type_ && @@ -24,4 +24,4 @@ bool NetworkRoute::operator==(const NetworkRoute& other) const { last_sent_packet_id == other.last_sent_packet_id; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/network_route.h b/rtc_base/network_route.h index 17b43e5b69..7f437cfbcb 100644 --- a/rtc_base/network_route.h +++ b/rtc_base/network_route.h @@ -23,7 +23,7 @@ // the media code can rely on and the network code can implement, and both can // depend on that, but not depend on each other. Then, move this file to that // directory. -namespace rtc { +namespace webrtc { class RouteEndpoint { public: @@ -42,7 +42,7 @@ class RouteEndpoint { // Used by tests. static RouteEndpoint CreateWithNetworkId(uint16_t network_id) { - return RouteEndpoint(ADAPTER_TYPE_UNKNOWN, + return RouteEndpoint(webrtc::ADAPTER_TYPE_UNKNOWN, /* adapter_id = */ 0, network_id, /* uses_turn = */ false); } @@ -58,7 +58,7 @@ class RouteEndpoint { bool operator==(const RouteEndpoint& other) const; private: - AdapterType adapter_type_ = ADAPTER_TYPE_UNKNOWN; + AdapterType adapter_type_ = webrtc::ADAPTER_TYPE_UNKNOWN; uint16_t adapter_id_ = 0; uint16_t network_id_ = 0; bool uses_turn_ = false; @@ -75,21 +75,31 @@ struct NetworkRoute { int packet_overhead = 0; RTC_NO_INLINE inline std::string DebugString() const { - rtc::StringBuilder oss; + StringBuilder oss; oss << "[ connected: " << connected << " local: [ " << local.adapter_id() << "/" << local.network_id() << " " - << AdapterTypeToString(local.adapter_type()) + << webrtc::AdapterTypeToString(local.adapter_type()) << " turn: " << local.uses_turn() << " ] remote: [ " << remote.adapter_id() << "/" << remote.network_id() << " " - << AdapterTypeToString(remote.adapter_type()) + << webrtc::AdapterTypeToString(remote.adapter_type()) << " turn: " << remote.uses_turn() << " ] packet_overhead_bytes: " << packet_overhead << " ]"; return oss.Release(); } bool operator==(const NetworkRoute& other) const; + bool operator!=(const NetworkRoute& other) { return !operator==(other); } }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::NetworkRoute; +using ::webrtc::RouteEndpoint; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NETWORK_ROUTE_H_ diff --git a/rtc_base/network_route_unittest.cc b/rtc_base/network_route_unittest.cc index 485683b71f..0f39a48009 100644 --- a/rtc_base/network_route_unittest.cc +++ b/rtc_base/network_route_unittest.cc @@ -10,10 +10,9 @@ #include "rtc_base/network_route.h" -#include "rtc_base/gunit.h" -#include "test/gmock.h" +#include "test/gtest.h" -namespace rtc { +namespace webrtc { TEST(NetworkRoute, Equals) { NetworkRoute r1; @@ -21,4 +20,4 @@ TEST(NetworkRoute, Equals) { EXPECT_TRUE(r1 == r2); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/network_unittest.cc b/rtc_base/network_unittest.cc index d1999f349c..02c6ee0013 100644 --- a/rtc_base/network_unittest.cc +++ b/rtc_base/network_unittest.cc @@ -12,51 +12,74 @@ #include -#include +#include +#include #include +#include +#include #include #include "absl/algorithm/container.h" #include "absl/strings/match.h" #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/sequence_checker.h" +#include "api/test/rtc_error_matchers.h" #include "rtc_base/checks.h" +#include "rtc_base/ip_address.h" #include "rtc_base/net_helpers.h" #include "rtc_base/net_test_helpers.h" +#include "rtc_base/network_constants.h" #include "rtc_base/network_monitor.h" #include "rtc_base/network_monitor_factory.h" #include "rtc_base/physical_socket_server.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "test/gtest.h" +#include "test/wait_until.h" + +// IWYU pragma: begin_keep #if defined(WEBRTC_POSIX) #include #include +#include #include "rtc_base/ifaddrs_converter.h" #endif // defined(WEBRTC_POSIX) -#include "rtc_base/gunit.h" #include "test/gmock.h" #if defined(WEBRTC_WIN) #include "rtc_base/logging.h" // For RTC_LOG_GLE #endif +// IWYU pragma: end_keep #include "test/field_trial.h" #include "test/scoped_key_value_config.h" using ::testing::Contains; +using ::testing::IsTrue; using ::testing::Not; using ::testing::UnorderedElementsAre; using ::testing::UnorderedElementsAreArray; - -namespace rtc { +using ::webrtc::CreateEnvironment; +using ::webrtc::Environment; +using ::webrtc::test::ScopedFieldTrials; +using ::webrtc::test::ScopedKeyValueConfig; #define MAYBE_SKIP_IPV4 \ - if (!HasIPv4Enabled()) { \ + if (!::webrtc::HasIPv4Enabled()) { \ RTC_LOG(LS_INFO) << "No IPv4... skipping"; \ return; \ } +namespace webrtc { + namespace { IPAddress IPFromString(absl::string_view str) { IPAddress ip; - RTC_CHECK(IPFromString(str, &ip)); + RTC_CHECK(webrtc::IPFromString(str, &ip)); return ip; } @@ -67,13 +90,13 @@ class FakeNetworkMonitor : public NetworkMonitorInterface { bool started() { return started_; } InterfaceInfo GetInterfaceInfo(absl::string_view if_name) override { InterfaceInfo if_info = { - .adapter_type = ADAPTER_TYPE_UNKNOWN, + .adapter_type = webrtc::ADAPTER_TYPE_UNKNOWN, .available = absl::c_count(unavailable_adapters_, if_name) == 0, }; if (absl::StartsWith(if_name, "wifi")) { - if_info.adapter_type = ADAPTER_TYPE_WIFI; + if_info.adapter_type = webrtc::ADAPTER_TYPE_WIFI; } else if (absl::StartsWith(if_name, "cellular")) { - if_info.adapter_type = ADAPTER_TYPE_CELLULAR; + if_info.adapter_type = webrtc::ADAPTER_TYPE_CELLULAR; } return if_info; } @@ -93,7 +116,7 @@ class FakeNetworkMonitor : public NetworkMonitorInterface { } for (auto const& iter : adapters_) { - if (if_name.find(iter) != absl::string_view::npos) { + if (absl::StrContains(if_name, iter)) { return NetworkBindingResult::SUCCESS; } } @@ -121,12 +144,12 @@ class FakeNetworkMonitorFactory : public NetworkMonitorFactory { public: FakeNetworkMonitorFactory() {} NetworkMonitorInterface* CreateNetworkMonitor( - const webrtc::FieldTrialsView& field_trials) override { + const FieldTrialsView& field_trials) override { return new FakeNetworkMonitor(); } }; -bool SameNameAndPrefix(const rtc::Network& a, const rtc::Network& b) { +bool SameNameAndPrefix(const Network& a, const Network& b) { if (a.name() != b.name()) { RTC_LOG(LS_INFO) << "Different interface names."; return false; @@ -224,7 +247,7 @@ class NetworkTest : public ::testing::Test, public sigslot::has_slots<> { ipv6_addr->sin6_family = AF_INET6; ipv6_addr->sin6_scope_id = scope_id; IPAddress ip; - IPFromString(ip_string, &ip); + webrtc::IPFromString(ip_string, &ip); ipv6_addr->sin6_addr = ip.ipv6_address(); return ipv6_addr; } @@ -267,7 +290,7 @@ class NetworkTest : public ::testing::Test, public sigslot::has_slots<> { memset(ipv4_addr, 0, sizeof(struct sockaddr_in)); ipv4_addr->sin_family = AF_INET; IPAddress ip; - IPFromString(ip_string, &ip); + webrtc::IPFromString(ip_string, &ip); ipv4_addr->sin_addr = ip.ipv4_address(); return ipv4_addr; } @@ -316,19 +339,14 @@ class NetworkTest : public ::testing::Test, public sigslot::has_slots<> { #endif // defined(WEBRTC_POSIX) protected: - webrtc::test::ScopedKeyValueConfig field_trials_; - rtc::AutoThread main_thread_; + ScopedKeyValueConfig field_trials_; + AutoThread main_thread_; bool callback_called_; }; class TestBasicNetworkManager : public BasicNetworkManager { public: - TestBasicNetworkManager(NetworkMonitorFactory* network_monitor_factory, - SocketFactory* socket_factory, - const webrtc::FieldTrialsView& field_trials) - : BasicNetworkManager(network_monitor_factory, - socket_factory, - &field_trials) {} + using BasicNetworkManager::BasicNetworkManager; using BasicNetworkManager::QueryDefaultLocalAddress; using BasicNetworkManager::set_default_local_addresses; }; @@ -347,11 +365,13 @@ TEST_F(NetworkTest, TestNetworkConstruct) { TEST_F(NetworkTest, TestIsIgnoredNetworkIgnoresIPsStartingWith0) { Network ipv4_network1("test_eth0", "Test Network Adapter 1", - IPAddress(0x12345600U), 24, ADAPTER_TYPE_ETHERNET); + IPAddress(0x12345600U), 24, + webrtc::ADAPTER_TYPE_ETHERNET); Network ipv4_network2("test_eth1", "Test Network Adapter 2", - IPAddress(0x010000U), 24, ADAPTER_TYPE_ETHERNET); + IPAddress(0x010000U), 24, + webrtc::ADAPTER_TYPE_ETHERNET); PhysicalSocketServer socket_server; - BasicNetworkManager network_manager(&socket_server); + BasicNetworkManager network_manager(CreateEnvironment(), &socket_server); network_manager.StartUpdating(); EXPECT_FALSE(IsIgnoredNetwork(network_manager, ipv4_network1)); EXPECT_TRUE(IsIgnoredNetwork(network_manager, ipv4_network2)); @@ -363,13 +383,14 @@ TEST_F(NetworkTest, TestIgnoreList) { 24); Network include_me("include_me", "Include me please!", IPAddress(0x12345600U), 24); + const Environment env = CreateEnvironment(); PhysicalSocketServer socket_server; - BasicNetworkManager default_network_manager(&socket_server); + BasicNetworkManager default_network_manager(env, &socket_server); default_network_manager.StartUpdating(); EXPECT_FALSE(IsIgnoredNetwork(default_network_manager, ignore_me)); EXPECT_FALSE(IsIgnoredNetwork(default_network_manager, include_me)); - BasicNetworkManager ignoring_network_manager(&socket_server); + BasicNetworkManager ignoring_network_manager(env, &socket_server); std::vector ignore_list; ignore_list.push_back("ignore_me"); ignoring_network_manager.set_network_ignore_list(ignore_list); @@ -381,7 +402,7 @@ TEST_F(NetworkTest, TestIgnoreList) { // Test is failing on Windows opt: b/11288214 TEST_F(NetworkTest, DISABLED_TestCreateNetworks) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); std::vector> result = GetNetworks(manager, true); // We should be able to bind to any addresses we find. for (auto it = result.begin(); it != result.end(); ++it) { @@ -390,7 +411,7 @@ TEST_F(NetworkTest, DISABLED_TestCreateNetworks) { IPAddress ip = (*it)->GetBestIP(); SocketAddress bindaddress(ip, 0); bindaddress.SetScopeID((*it)->scope_id()); - // TODO(thaloun): Use rtc::Socket once it supports IPv6. + // TODO(thaloun): Use webrtc::Socket once it supports IPv6. int fd = static_cast(socket(ip.family(), SOCK_STREAM, IPPROTO_TCP)); if (fd > 0) { size_t ipsize = bindaddress.ToSockAddrStorage(&storage); @@ -415,7 +436,7 @@ TEST_F(NetworkTest, DISABLED_TestCreateNetworks) { // ALLOWED. TEST_F(NetworkTest, TestUpdateNetworks) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(nullptr, &socket_server, &field_trials_); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.SignalNetworksChanged.connect(static_cast(this), &NetworkTest::OnNetworksChanged); EXPECT_EQ(NetworkManager::ENUMERATION_ALLOWED, @@ -455,7 +476,7 @@ TEST_F(NetworkTest, TestBasicMergeNetworkList) { ipv4_network1.AddIP(IPAddress(0x12345678)); ipv4_network2.AddIP(IPAddress(0x00010004)); PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); // Add ipv4_network1 to the list of networks. std::vector> list; @@ -468,7 +489,7 @@ TEST_F(NetworkTest, TestBasicMergeNetworkList) { EXPECT_EQ(stats.ipv4_network_count, 1); list.clear(); // It is fine to call .clear() on a moved-from vector. - std::vector current = manager.GetNetworks(); + std::vector current = manager.GetNetworks(); EXPECT_EQ(1U, current.size()); EXPECT_TRUE(SameNameAndPrefix(ipv4_network1, *current[0])); const Network* net1 = current[0]; @@ -532,24 +553,25 @@ TEST_F(NetworkTest, TestBasicMergeNetworkList) { void SetupNetworks(std::vector>* list) { IPAddress ip; IPAddress prefix; - EXPECT_TRUE(IPFromString("abcd::1234:5678:abcd:ef12", &ip)); - EXPECT_TRUE(IPFromString("abcd::", &prefix)); + EXPECT_TRUE(webrtc::IPFromString("abcd::1234:5678:abcd:ef12", &ip)); + EXPECT_TRUE(webrtc::IPFromString("abcd::", &prefix)); // First, fake link-locals. Network ipv6_eth0_linklocalnetwork("test_eth0", "Test NetworkAdapter 1", prefix, 64); ipv6_eth0_linklocalnetwork.AddIP(ip); - EXPECT_TRUE(IPFromString("abcd::5678:abcd:ef12:3456", &ip)); + EXPECT_TRUE(webrtc::IPFromString("abcd::5678:abcd:ef12:3456", &ip)); Network ipv6_eth1_linklocalnetwork("test_eth1", "Test NetworkAdapter 2", prefix, 64); ipv6_eth1_linklocalnetwork.AddIP(ip); // Public networks: - EXPECT_TRUE(IPFromString("2401:fa00:4:1000:be30:5bff:fee5:c3", &ip)); - prefix = TruncateIP(ip, 64); + EXPECT_TRUE(webrtc::IPFromString("2401:fa00:4:1000:be30:5bff:fee5:c3", &ip)); + prefix = webrtc::TruncateIP(ip, 64); Network ipv6_eth0_publicnetwork1_ip1("test_eth0", "Test NetworkAdapter 1", prefix, 64); ipv6_eth0_publicnetwork1_ip1.AddIP(ip); - EXPECT_TRUE(IPFromString("2400:4030:1:2c00:be30:abcd:efab:cdef", &ip)); - prefix = TruncateIP(ip, 64); + EXPECT_TRUE( + webrtc::IPFromString("2400:4030:1:2c00:be30:abcd:efab:cdef", &ip)); + prefix = webrtc::TruncateIP(ip, 64); Network ipv6_eth1_publicnetwork1_ip1("test_eth1", "Test NetworkAdapter 1", prefix, 64); ipv6_eth1_publicnetwork1_ip1.AddIP(ip); @@ -562,7 +584,7 @@ void SetupNetworks(std::vector>* list) { // Test that the basic network merging case works. TEST_F(NetworkTest, TestIPv6MergeNetworkList) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.SignalNetworksChanged.connect(static_cast(this), &NetworkTest::OnNetworksChanged); std::vector> networks; @@ -584,7 +606,7 @@ TEST_F(NetworkTest, TestIPv6MergeNetworkList) { // objects remain in the result list. TEST_F(NetworkTest, TestNoChangeMerge) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.SignalNetworksChanged.connect(static_cast(this), &NetworkTest::OnNetworksChanged); std::vector> networks; @@ -615,22 +637,23 @@ TEST_F(NetworkTest, TestNoChangeMerge) { // IP changed. TEST_F(NetworkTest, MergeWithChangedIP) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.SignalNetworksChanged.connect(static_cast(this), &NetworkTest::OnNetworksChanged); std::vector> original_list; SetupNetworks(&original_list); // Make a network that we're going to change. IPAddress ip; - EXPECT_TRUE(IPFromString("2401:fa01:4:1000:be30:faa:fee:faa", &ip)); - IPAddress prefix = TruncateIP(ip, 64); + EXPECT_TRUE(webrtc::IPFromString("2401:fa01:4:1000:be30:faa:fee:faa", &ip)); + IPAddress prefix = webrtc::TruncateIP(ip, 64); std::unique_ptr network_to_change = std::make_unique( "test_eth0", "Test Network Adapter 1", prefix, 64); std::unique_ptr changed_network = std::make_unique(*network_to_change); network_to_change->AddIP(ip); IPAddress changed_ip; - EXPECT_TRUE(IPFromString("2401:fa01:4:1000:be30:f00:f00:f00", &changed_ip)); + EXPECT_TRUE( + webrtc::IPFromString("2401:fa01:4:1000:be30:f00:f00:f00", &changed_ip)); changed_network->AddIP(changed_ip); const Network* const network_to_change_ptr = network_to_change.get(); original_list.push_back(std::move(network_to_change)); @@ -652,7 +675,7 @@ TEST_F(NetworkTest, MergeWithChangedIP) { TEST_F(NetworkTest, TestMultipleIPMergeNetworkList) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.SignalNetworksChanged.connect(static_cast(this), &NetworkTest::OnNetworksChanged); std::vector> original_list; @@ -665,12 +688,13 @@ TEST_F(NetworkTest, TestMultipleIPMergeNetworkList) { IPAddress check_ip; IPAddress prefix; // Add a second IP to the public network on eth0 (2401:fa00:4:1000/64). - EXPECT_TRUE(IPFromString("2401:fa00:4:1000:be30:5bff:fee5:c6", &ip)); - prefix = TruncateIP(ip, 64); + EXPECT_TRUE(webrtc::IPFromString("2401:fa00:4:1000:be30:5bff:fee5:c6", &ip)); + prefix = webrtc::TruncateIP(ip, 64); Network ipv6_eth0_publicnetwork1_ip2("test_eth0", "Test NetworkAdapter 1", prefix, 64); // This is the IP that already existed in the public network on eth0. - EXPECT_TRUE(IPFromString("2401:fa00:4:1000:be30:5bff:fee5:c3", &check_ip)); + EXPECT_TRUE( + webrtc::IPFromString("2401:fa00:4:1000:be30:5bff:fee5:c3", &check_ip)); ipv6_eth0_publicnetwork1_ip2.AddIP(ip); std::vector> second_list; @@ -706,7 +730,7 @@ TEST_F(NetworkTest, TestMultipleIPMergeNetworkList) { // Test that merge correctly distinguishes multiple networks on an interface. TEST_F(NetworkTest, TestMultiplePublicNetworksOnOneInterfaceMerge) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.SignalNetworksChanged.connect(static_cast(this), &NetworkTest::OnNetworksChanged); std::vector> original_list; @@ -717,8 +741,8 @@ TEST_F(NetworkTest, TestMultiplePublicNetworksOnOneInterfaceMerge) { IPAddress ip; IPAddress prefix; // A second network for eth0. - EXPECT_TRUE(IPFromString("2400:4030:1:2c00:be30:5bff:fee5:c3", &ip)); - prefix = TruncateIP(ip, 64); + EXPECT_TRUE(webrtc::IPFromString("2400:4030:1:2c00:be30:5bff:fee5:c3", &ip)); + prefix = webrtc::TruncateIP(ip, 64); Network ipv6_eth0_publicnetwork2_ip1("test_eth0", "Test NetworkAdapter 1", prefix, 64); ipv6_eth0_publicnetwork2_ip1.AddIP(ip); @@ -749,7 +773,7 @@ TEST_F(NetworkTest, TestMultiplePublicNetworksOnOneInterfaceMerge) { // Test that DumpNetworks does not crash. TEST_F(NetworkTest, TestCreateAndDumpNetworks) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.StartUpdating(); std::vector> list = GetNetworks(manager, true); bool changed; @@ -759,7 +783,7 @@ TEST_F(NetworkTest, TestCreateAndDumpNetworks) { TEST_F(NetworkTest, TestIPv6Toggle) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.StartUpdating(); bool ipv6_found = false; for (const auto& network : GetNetworks(manager, true)) { @@ -775,15 +799,16 @@ TEST_F(NetworkTest, TestIPv6Toggle) { // IPv6 comes first. TEST_F(NetworkTest, IPv6NetworksPreferredOverIPv4) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); Network ipv4_network1("test_eth0", "Test Network Adapter 1", IPAddress(0x12345600U), 24); ipv4_network1.AddIP(IPAddress(0x12345600U)); IPAddress ip; IPAddress prefix; - EXPECT_TRUE(IPFromString("2400:4030:1:2c00:be30:abcd:efab:cdef", &ip)); - prefix = TruncateIP(ip, 64); + EXPECT_TRUE( + webrtc::IPFromString("2400:4030:1:2c00:be30:abcd:efab:cdef", &ip)); + prefix = webrtc::TruncateIP(ip, 64); Network ipv6_eth1_publicnetwork1_ip1("test_eth1", "Test NetworkAdapter 2", prefix, 64); ipv6_eth1_publicnetwork1_ip1.AddIP(ip); @@ -805,7 +830,7 @@ TEST_F(NetworkTest, IPv6NetworksPreferredOverIPv4) { // to be preference-ordered by name. For example, "eth0" before "eth1". TEST_F(NetworkTest, NetworksSortedByInterfaceName) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server, &field_trials_); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); auto eth0 = std::make_unique("test_eth0", "Test Network Adapter 1", IPAddress(0x65432100U), 24); eth0->AddIP(IPAddress(0x65432100U)); @@ -829,20 +854,20 @@ TEST_F(NetworkTest, NetworksSortedByInterfaceName) { TEST_F(NetworkTest, TestNetworkAdapterTypes) { Network wifi("wlan0", "Wireless Adapter", IPAddress(0x12345600U), 24, - ADAPTER_TYPE_WIFI); - EXPECT_EQ(ADAPTER_TYPE_WIFI, wifi.type()); + webrtc::ADAPTER_TYPE_WIFI); + EXPECT_EQ(webrtc::ADAPTER_TYPE_WIFI, wifi.type()); Network ethernet("eth0", "Ethernet", IPAddress(0x12345600U), 24, - ADAPTER_TYPE_ETHERNET); - EXPECT_EQ(ADAPTER_TYPE_ETHERNET, ethernet.type()); + webrtc::ADAPTER_TYPE_ETHERNET); + EXPECT_EQ(webrtc::ADAPTER_TYPE_ETHERNET, ethernet.type()); Network cellular("test_cell", "Cellular Adapter", IPAddress(0x12345600U), 24, - ADAPTER_TYPE_CELLULAR); - EXPECT_EQ(ADAPTER_TYPE_CELLULAR, cellular.type()); + webrtc::ADAPTER_TYPE_CELLULAR); + EXPECT_EQ(webrtc::ADAPTER_TYPE_CELLULAR, cellular.type()); Network vpn("bridge_test", "VPN Adapter", IPAddress(0x12345600U), 24, - ADAPTER_TYPE_VPN); - EXPECT_EQ(ADAPTER_TYPE_VPN, vpn.type()); + webrtc::ADAPTER_TYPE_VPN); + EXPECT_EQ(webrtc::ADAPTER_TYPE_VPN, vpn.type()); Network unknown("test", "Test Adapter", IPAddress(0x12345600U), 24, - ADAPTER_TYPE_UNKNOWN); - EXPECT_EQ(ADAPTER_TYPE_UNKNOWN, unknown.type()); + webrtc::ADAPTER_TYPE_UNKNOWN); + EXPECT_EQ(webrtc::ADAPTER_TYPE_UNKNOWN, unknown.type()); } #if defined(WEBRTC_POSIX) @@ -854,7 +879,7 @@ TEST_F(NetworkTest, TestConvertIfAddrsNoAddress) { std::vector> result; PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.StartUpdating(); CallConvertIfAddrs(manager, &list, true, &result); EXPECT_TRUE(result.empty()); @@ -871,7 +896,7 @@ TEST_F(NetworkTest, TestConvertIfAddrsMultiAddressesOnOneInterface) { "FFFF:FFFF:FFFF:FFFF::", 0); std::vector> result; PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.StartUpdating(); CallConvertIfAddrs(manager, list, true, &result); EXPECT_EQ(1U, result.size()); @@ -893,7 +918,7 @@ TEST_F(NetworkTest, TestConvertIfAddrsNotRunning) { std::vector> result; PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.StartUpdating(); CallConvertIfAddrs(manager, &list, true, &result); EXPECT_TRUE(result.empty()); @@ -908,7 +933,7 @@ TEST_F(NetworkTest, TestConvertIfAddrsGetsNullAddr) { std::vector> result; PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.StartUpdating(); CallConvertIfAddrs(manager, &list, true, &result); EXPECT_TRUE(result.empty()); @@ -920,26 +945,27 @@ TEST_F(NetworkTest, TestGetAdapterTypeFromNetworkMonitor) { char if_name[20] = "wifi0"; std::string ipv6_address = "1000:2000:3000:4000:0:0:0:1"; std::string ipv6_mask = "FFFF:FFFF:FFFF:FFFF::"; + const Environment env = CreateEnvironment(); PhysicalSocketServer socket_server; - BasicNetworkManager manager_without_monitor(nullptr, &socket_server, - &field_trials_); + BasicNetworkManager manager_without_monitor( + env, &socket_server, /*network_monitor_factory=*/nullptr); manager_without_monitor.StartUpdating(); // A network created without a network monitor will get UNKNOWN type. ifaddrs* addr_list = InstallIpv6Network(if_name, ipv6_address, ipv6_mask, manager_without_monitor); - EXPECT_EQ(ADAPTER_TYPE_UNKNOWN, GetAdapterType(manager_without_monitor)); + EXPECT_EQ(webrtc::ADAPTER_TYPE_UNKNOWN, + GetAdapterType(manager_without_monitor)); ReleaseIfAddrs(addr_list); // With the fake network monitor the type should be correctly determined. FakeNetworkMonitorFactory factory; - BasicNetworkManager manager_with_monitor(&factory, &socket_server, - &field_trials_); + BasicNetworkManager manager_with_monitor(env, &socket_server, &factory); manager_with_monitor.StartUpdating(); // Add the same ipv6 address as before but it has the right network type // detected by the network monitor now. addr_list = InstallIpv6Network(if_name, ipv6_address, ipv6_mask, manager_with_monitor); - EXPECT_EQ(ADAPTER_TYPE_WIFI, GetAdapterType(manager_with_monitor)); + EXPECT_EQ(webrtc::ADAPTER_TYPE_WIFI, GetAdapterType(manager_with_monitor)); ReleaseIfAddrs(addr_list); } @@ -953,32 +979,32 @@ TEST_F(NetworkTest, TestGetAdapterTypeFromNameMatching) { std::string ipv6_address2 = "1000:2000:3000:8000:0:0:0:1"; std::string ipv6_mask = "FFFF:FFFF:FFFF:FFFF::"; PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.StartUpdating(); // IPSec interface; name is in form "ipsec". char if_name[20] = "ipsec11"; ifaddrs* addr_list = InstallIpv6Network(if_name, ipv6_address1, ipv6_mask, manager); - EXPECT_EQ(ADAPTER_TYPE_VPN, GetAdapterType(manager)); + EXPECT_EQ(webrtc::ADAPTER_TYPE_VPN, GetAdapterType(manager)); ClearNetworks(manager); ReleaseIfAddrs(addr_list); strcpy(if_name, "lo0"); addr_list = InstallIpv6Network(if_name, ipv6_address1, ipv6_mask, manager); - EXPECT_EQ(ADAPTER_TYPE_LOOPBACK, GetAdapterType(manager)); + EXPECT_EQ(webrtc::ADAPTER_TYPE_LOOPBACK, GetAdapterType(manager)); ClearNetworks(manager); ReleaseIfAddrs(addr_list); strcpy(if_name, "eth0"); addr_list = InstallIpv4Network(if_name, ipv4_address1, ipv4_mask, manager); - EXPECT_EQ(ADAPTER_TYPE_ETHERNET, GetAdapterType(manager)); + EXPECT_EQ(webrtc::ADAPTER_TYPE_ETHERNET, GetAdapterType(manager)); ClearNetworks(manager); ReleaseIfAddrs(addr_list); strcpy(if_name, "wlan0"); addr_list = InstallIpv6Network(if_name, ipv6_address1, ipv6_mask, manager); - EXPECT_EQ(ADAPTER_TYPE_WIFI, GetAdapterType(manager)); + EXPECT_EQ(webrtc::ADAPTER_TYPE_WIFI, GetAdapterType(manager)); ClearNetworks(manager); ReleaseIfAddrs(addr_list); @@ -1031,7 +1057,7 @@ TEST_F(NetworkTest, TestNetworkMonitorIsAdapterAvailable) { // Sanity check that both interfaces are included by default. FakeNetworkMonitorFactory factory; PhysicalSocketServer socket_server; - BasicNetworkManager manager(&factory, &socket_server, &field_trials_); + BasicNetworkManager manager(CreateEnvironment(), &socket_server, &factory); manager.StartUpdating(); CallConvertIfAddrs(manager, list, /*include_ignored=*/false, &result); EXPECT_EQ(2u, result.size()); @@ -1057,17 +1083,19 @@ TEST_F(NetworkTest, TestNetworkMonitorIsAdapterAvailable) { // prefix/length into a single Network. TEST_F(NetworkTest, TestMergeNetworkList) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); std::vector> list; // Create 2 IPAddress classes with only last digit different. IPAddress ip1, ip2; - EXPECT_TRUE(IPFromString("2400:4030:1:2c00:be30:0:0:1", &ip1)); - EXPECT_TRUE(IPFromString("2400:4030:1:2c00:be30:0:0:2", &ip2)); + EXPECT_TRUE(webrtc::IPFromString("2400:4030:1:2c00:be30:0:0:1", &ip1)); + EXPECT_TRUE(webrtc::IPFromString("2400:4030:1:2c00:be30:0:0:2", &ip2)); // Create 2 networks with the same prefix and length. - auto net1 = std::make_unique("em1", "em1", TruncateIP(ip1, 64), 64); - auto net2 = std::make_unique("em1", "em1", TruncateIP(ip1, 64), 64); + auto net1 = + std::make_unique("em1", "em1", webrtc::TruncateIP(ip1, 64), 64); + auto net2 = + std::make_unique("em1", "em1", webrtc::TruncateIP(ip1, 64), 64); // Add different IP into each. net1->AddIP(ip1); @@ -1093,7 +1121,7 @@ TEST_F(NetworkTest, TestMergeNetworkList) { // a network becomes inactive and then active again. TEST_F(NetworkTest, TestMergeNetworkListWithInactiveNetworks) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); Network network1("test_wifi", "Test Network Adapter 1", IPAddress(0x12345600U), 24); Network network2("test_eth0", "Test Network Adapter 2", @@ -1142,17 +1170,18 @@ TEST_F(NetworkTest, TestIPv6Selection) { std::string ipstr; ipstr = "2401:fa00:4:1000:be30:5bff:fee5:c3"; - ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_DEPRECATED, &ip)); + ASSERT_TRUE( + webrtc::IPFromString(ipstr, webrtc::IPV6_ADDRESS_FLAG_DEPRECATED, &ip)); // Create a network with this prefix. - Network ipv6_network("test_eth0", "Test NetworkAdapter", TruncateIP(ip, 64), - 64); + Network ipv6_network("test_eth0", "Test NetworkAdapter", + webrtc::TruncateIP(ip, 64), 64); EXPECT_EQ(AF_INET6, ipv6_network.family()); // When there is no address added, it should return an unspecified // address. EXPECT_EQ(ipv6_network.GetBestIP(), IPAddress()); - EXPECT_TRUE(IPIsUnspec(ipv6_network.GetBestIP())); + EXPECT_TRUE(webrtc::IPIsUnspec(ipv6_network.GetBestIP())); // Deprecated one should not be returned. ipv6_network.AddIP(ip); @@ -1161,19 +1190,20 @@ TEST_F(NetworkTest, TestIPv6Selection) { // Add ULA one. ULA is unique local address which is starting either // with 0xfc or 0xfd. ipstr = "fd00:fa00:4:1000:be30:5bff:fee5:c4"; - ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_NONE, &ip)); + ASSERT_TRUE(webrtc::IPFromString(ipstr, webrtc::IPV6_ADDRESS_FLAG_NONE, &ip)); ipv6_network.AddIP(ip); EXPECT_EQ(ipv6_network.GetBestIP(), static_cast(ip)); // Add global one. ipstr = "2401:fa00:4:1000:be30:5bff:fee5:c5"; - ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_NONE, &ip)); + ASSERT_TRUE(webrtc::IPFromString(ipstr, webrtc::IPV6_ADDRESS_FLAG_NONE, &ip)); ipv6_network.AddIP(ip); EXPECT_EQ(ipv6_network.GetBestIP(), static_cast(ip)); // Add global dynamic temporary one. ipstr = "2401:fa00:4:1000:be30:5bff:fee5:c6"; - ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_TEMPORARY, &ip)); + ASSERT_TRUE( + webrtc::IPFromString(ipstr, webrtc::IPV6_ADDRESS_FLAG_TEMPORARY, &ip)); ipv6_network.AddIP(ip); EXPECT_EQ(ipv6_network.GetBestIP(), static_cast(ip)); } @@ -1184,16 +1214,18 @@ TEST_F(NetworkTest, TestGetBestIPWithPreferGlobalIPv6ToLinkLocalEnabled) { std::string ipstr; ipstr = "2401:fa00:4:1000:be30:5bff:fee5:c3"; - ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_DEPRECATED, &ip)); + ASSERT_TRUE( + webrtc::IPFromString(ipstr, webrtc::IPV6_ADDRESS_FLAG_DEPRECATED, &ip)); // Create a network with this prefix. - Network ipv6_network("test_eth0", "Test NetworkAdapter", TruncateIP(ip, 64), - 64, ADAPTER_TYPE_UNKNOWN); + Network ipv6_network("test_eth0", "Test NetworkAdapter", + webrtc::TruncateIP(ip, 64), 64, + webrtc::ADAPTER_TYPE_UNKNOWN); // When there is no address added, it should return an unspecified // address. EXPECT_EQ(ipv6_network.GetBestIP(), IPAddress()); - EXPECT_TRUE(IPIsUnspec(ipv6_network.GetBestIP())); + EXPECT_TRUE(webrtc::IPIsUnspec(ipv6_network.GetBestIP())); // Deprecated one should not be returned. ipv6_network.AddIP(ip); @@ -1202,39 +1234,43 @@ TEST_F(NetworkTest, TestGetBestIPWithPreferGlobalIPv6ToLinkLocalEnabled) { // Add ULA one. ULA is unique local address which is starting either // with 0xfc or 0xfd. ipstr = "fd00:fa00:4:1000:be30:5bff:fee5:c4"; - ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_NONE, &ip)); + ASSERT_TRUE(webrtc::IPFromString(ipstr, webrtc::IPV6_ADDRESS_FLAG_NONE, &ip)); ipv6_network.AddIP(ip); EXPECT_EQ(ipv6_network.GetBestIP(), static_cast(ip)); // Add link local one. ipstr = "fe80::aabb:ccff:fedd:eeff"; - ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_NONE, &link_local)); + ASSERT_TRUE( + webrtc::IPFromString(ipstr, webrtc::IPV6_ADDRESS_FLAG_NONE, &link_local)); ipv6_network.AddIP(link_local); EXPECT_EQ(ipv6_network.GetBestIP(), static_cast(link_local)); // Add global one. ipstr = "2401:fa00:4:1000:be30:5bff:fee5:c5"; - ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_NONE, &ip)); + ASSERT_TRUE(webrtc::IPFromString(ipstr, webrtc::IPV6_ADDRESS_FLAG_NONE, &ip)); ipv6_network.AddIP(ip); EXPECT_EQ(ipv6_network.GetBestIP(), static_cast(ip)); // Add another link local address, then the compatible address is still global // one. ipstr = "fe80::aabb:ccff:fedd:eedd"; - ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_NONE, &link_local)); + ASSERT_TRUE( + webrtc::IPFromString(ipstr, webrtc::IPV6_ADDRESS_FLAG_NONE, &link_local)); ipv6_network.AddIP(link_local); EXPECT_EQ(ipv6_network.GetBestIP(), static_cast(ip)); // Add global dynamic temporary one. ipstr = "2401:fa00:4:1000:be30:5bff:fee5:c6"; - ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_TEMPORARY, &ip)); + ASSERT_TRUE( + webrtc::IPFromString(ipstr, webrtc::IPV6_ADDRESS_FLAG_TEMPORARY, &ip)); ipv6_network.AddIP(ip); EXPECT_EQ(ipv6_network.GetBestIP(), static_cast(ip)); // Add another link local address, then the compatible address is still global // dynamic one. ipstr = "fe80::aabb:ccff:fedd:eedd"; - ASSERT_TRUE(IPFromString(ipstr, IPV6_ADDRESS_FLAG_NONE, &link_local)); + ASSERT_TRUE( + webrtc::IPFromString(ipstr, webrtc::IPV6_ADDRESS_FLAG_NONE, &link_local)); ipv6_network.AddIP(link_local); EXPECT_EQ(ipv6_network.GetBestIP(), static_cast(ip)); } @@ -1242,13 +1278,14 @@ TEST_F(NetworkTest, TestGetBestIPWithPreferGlobalIPv6ToLinkLocalEnabled) { TEST_F(NetworkTest, TestNetworkMonitoring) { FakeNetworkMonitorFactory factory; PhysicalSocketServer socket_server; - BasicNetworkManager manager(&factory, &socket_server, &field_trials_); + BasicNetworkManager manager(CreateEnvironment(), &socket_server, &factory); manager.SignalNetworksChanged.connect(static_cast(this), &NetworkTest::OnNetworksChanged); manager.StartUpdating(); FakeNetworkMonitor* network_monitor = GetNetworkMonitor(manager); EXPECT_TRUE(network_monitor && network_monitor->started()); - EXPECT_TRUE_WAIT(callback_called_, 1000); + EXPECT_THAT(webrtc::WaitUntil([&] { return callback_called_; }, IsTrue()), + webrtc::IsRtcOk()); callback_called_ = false; // Clear the networks so that there will be network changes below. @@ -1256,7 +1293,8 @@ TEST_F(NetworkTest, TestNetworkMonitoring) { // Network manager is started, so the callback is called when the network // monitor fires the network-change event. network_monitor->InovkeNetworksChangedCallbackForTesting(); - EXPECT_TRUE_WAIT(callback_called_, 1000); + EXPECT_THAT(webrtc::WaitUntil([&] { return callback_called_; }, IsTrue()), + webrtc::IsRtcOk()); // Network manager is stopped. manager.StopUpdating(); @@ -1274,11 +1312,13 @@ TEST_F(NetworkTest, MAYBE_DefaultLocalAddress) { IPAddress ip; FakeNetworkMonitorFactory factory; PhysicalSocketServer socket_server; - TestBasicNetworkManager manager(&factory, &socket_server, field_trials_); + TestBasicNetworkManager manager(CreateEnvironment(), &socket_server, + &factory); manager.SignalNetworksChanged.connect(static_cast(this), &NetworkTest::OnNetworksChanged); manager.StartUpdating(); - EXPECT_TRUE_WAIT(callback_called_, 1000); + EXPECT_THAT(webrtc::WaitUntil([&] { return callback_called_; }, IsTrue()), + webrtc::IsRtcOk()); // Make sure we can query default local address when an address for such // address family exists. @@ -1288,7 +1328,7 @@ TEST_F(NetworkTest, MAYBE_DefaultLocalAddress) { if (network->GetBestIP().family() == AF_INET) { EXPECT_TRUE(QueryDefaultLocalAddress(manager, AF_INET) != IPAddress()); } else if (network->GetBestIP().family() == AF_INET6 && - !IPIsLoopback(network->GetBestIP())) { + !webrtc::IPIsLoopback(network->GetBestIP())) { // Existence of an IPv6 loopback address doesn't mean it has IPv6 network // enabled. EXPECT_TRUE(QueryDefaultLocalAddress(manager, AF_INET6) != IPAddress()); @@ -1296,23 +1336,23 @@ TEST_F(NetworkTest, MAYBE_DefaultLocalAddress) { } // GetDefaultLocalAddress should return the valid default address after set. - manager.set_default_local_addresses(GetLoopbackIP(AF_INET), - GetLoopbackIP(AF_INET6)); + manager.set_default_local_addresses(webrtc::GetLoopbackIP(AF_INET), + webrtc::GetLoopbackIP(AF_INET6)); EXPECT_TRUE(manager.GetDefaultLocalAddress(AF_INET, &ip)); - EXPECT_EQ(ip, GetLoopbackIP(AF_INET)); + EXPECT_EQ(ip, webrtc::GetLoopbackIP(AF_INET)); EXPECT_TRUE(manager.GetDefaultLocalAddress(AF_INET6, &ip)); - EXPECT_EQ(ip, GetLoopbackIP(AF_INET6)); + EXPECT_EQ(ip, webrtc::GetLoopbackIP(AF_INET6)); // More tests on GetDefaultLocalAddress with ipv6 addresses where the set // default address may be different from the best IP address of any network. InterfaceAddress ip1; - EXPECT_TRUE(IPFromString("abcd::1234:5678:abcd:1111", - IPV6_ADDRESS_FLAG_TEMPORARY, &ip1)); + EXPECT_TRUE(webrtc::IPFromString("abcd::1234:5678:abcd:1111", + webrtc::IPV6_ADDRESS_FLAG_TEMPORARY, &ip1)); // Create a network with a prefix of ip1. - Network ipv6_network("test_eth0", "Test NetworkAdapter", TruncateIP(ip1, 64), - 64); + Network ipv6_network("test_eth0", "Test NetworkAdapter", + webrtc::TruncateIP(ip1, 64), 64); IPAddress ip2; - EXPECT_TRUE(IPFromString("abcd::1234:5678:abcd:2222", &ip2)); + EXPECT_TRUE(webrtc::IPFromString("abcd::1234:5678:abcd:2222", &ip2)); ipv6_network.AddIP(ip1); ipv6_network.AddIP(ip2); std::vector> list; @@ -1322,13 +1362,13 @@ TEST_F(NetworkTest, MAYBE_DefaultLocalAddress) { // If the set default address is not in any network, GetDefaultLocalAddress // should return it. IPAddress ip3; - EXPECT_TRUE(IPFromString("abcd::1234:5678:abcd:3333", &ip3)); - manager.set_default_local_addresses(GetLoopbackIP(AF_INET), ip3); + EXPECT_TRUE(webrtc::IPFromString("abcd::1234:5678:abcd:3333", &ip3)); + manager.set_default_local_addresses(webrtc::GetLoopbackIP(AF_INET), ip3); EXPECT_TRUE(manager.GetDefaultLocalAddress(AF_INET6, &ip)); EXPECT_EQ(ip3, ip); // If the set default address is in a network, GetDefaultLocalAddress will // return the best IP in that network. - manager.set_default_local_addresses(GetLoopbackIP(AF_INET), ip2); + manager.set_default_local_addresses(webrtc::GetLoopbackIP(AF_INET), ip2); EXPECT_TRUE(manager.GetDefaultLocalAddress(AF_INET6, &ip)); EXPECT_EQ(static_cast(ip1), ip); @@ -1339,12 +1379,13 @@ TEST_F(NetworkTest, MAYBE_DefaultLocalAddress) { // when changing from cellular_X to cellular_Y. TEST_F(NetworkTest, TestWhenNetworkListChangeReturnsChangedFlag) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); IPAddress ip1; - EXPECT_TRUE(IPFromString("2400:4030:1:2c00:be30:0:0:1", &ip1)); - auto net1 = std::make_unique("em1", "em1", TruncateIP(ip1, 64), 64); - net1->set_type(ADAPTER_TYPE_CELLULAR_3G); + EXPECT_TRUE(webrtc::IPFromString("2400:4030:1:2c00:be30:0:0:1", &ip1)); + auto net1 = + std::make_unique("em1", "em1", webrtc::TruncateIP(ip1, 64), 64); + net1->set_type(webrtc::ADAPTER_TYPE_CELLULAR_3G); net1->AddIP(ip1); std::vector> list; list.push_back(std::move(net1)); @@ -1355,14 +1396,14 @@ TEST_F(NetworkTest, TestWhenNetworkListChangeReturnsChangedFlag) { EXPECT_TRUE(changed); std::vector list2 = manager.GetNetworks(); EXPECT_EQ(list2.size(), 1uL); - EXPECT_EQ(ADAPTER_TYPE_CELLULAR_3G, list2[0]->type()); + EXPECT_EQ(webrtc::ADAPTER_TYPE_CELLULAR_3G, list2[0]->type()); } // Modify net1 from 3G to 4G { - auto net2 = - std::make_unique("em1", "em1", TruncateIP(ip1, 64), 64); - net2->set_type(ADAPTER_TYPE_CELLULAR_4G); + auto net2 = std::make_unique("em1", "em1", + webrtc::TruncateIP(ip1, 64), 64); + net2->set_type(webrtc::ADAPTER_TYPE_CELLULAR_4G); net2->AddIP(ip1); list.clear(); list.push_back(std::move(net2)); @@ -1374,14 +1415,14 @@ TEST_F(NetworkTest, TestWhenNetworkListChangeReturnsChangedFlag) { EXPECT_FALSE(changed); std::vector list2 = manager.GetNetworks(); ASSERT_EQ(list2.size(), 1uL); - EXPECT_EQ(ADAPTER_TYPE_CELLULAR_4G, list2[0]->type()); + EXPECT_EQ(webrtc::ADAPTER_TYPE_CELLULAR_4G, list2[0]->type()); } // Don't modify. { - auto net2 = - std::make_unique("em1", "em1", TruncateIP(ip1, 64), 64); - net2->set_type(ADAPTER_TYPE_CELLULAR_4G); + auto net2 = std::make_unique("em1", "em1", + webrtc::TruncateIP(ip1, 64), 64); + net2->set_type(webrtc::ADAPTER_TYPE_CELLULAR_4G); net2->AddIP(ip1); list.clear(); list.push_back(std::move(net2)); @@ -1392,7 +1433,7 @@ TEST_F(NetworkTest, TestWhenNetworkListChangeReturnsChangedFlag) { EXPECT_FALSE(changed); std::vector list2 = manager.GetNetworks(); ASSERT_EQ(list2.size(), 1uL); - EXPECT_EQ(ADAPTER_TYPE_CELLULAR_4G, list2[0]->type()); + EXPECT_EQ(webrtc::ADAPTER_TYPE_CELLULAR_4G, list2[0]->type()); } } @@ -1401,7 +1442,7 @@ TEST_F(NetworkTest, IgnoresMACBasedIPv6Address) { std::string ipv6_address = "2607:fc20:f340:1dc8:214:22ff:fe01:2345"; std::string ipv6_mask = "FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF"; PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.StartUpdating(); // IPSec interface; name is in form "ipsec". @@ -1415,12 +1456,11 @@ TEST_F(NetworkTest, IgnoresMACBasedIPv6Address) { } TEST_F(NetworkTest, WebRTC_AllowMACBasedIPv6Address) { - webrtc::test::ScopedFieldTrials field_trials( - "WebRTC-AllowMACBasedIPv6/Enabled/"); + ScopedFieldTrials field_trials("WebRTC-AllowMACBasedIPv6/Enabled/"); std::string ipv6_address = "2607:fc20:f340:1dc8:214:22ff:fe01:2345"; std::string ipv6_mask = "FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF"; PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.StartUpdating(); // IPSec interface; name is in form "ipsec". @@ -1447,7 +1487,7 @@ TEST_F(NetworkTest, WebRTC_BindUsingInterfaceName) { // Sanity check that both interfaces are included by default. FakeNetworkMonitorFactory factory; PhysicalSocketServer socket_server; - BasicNetworkManager manager(&factory, &socket_server, &field_trials_); + BasicNetworkManager manager(CreateEnvironment(), &socket_server, &factory); manager.StartUpdating(); CallConvertIfAddrs(manager, list, /*include_ignored=*/false, &result); EXPECT_EQ(2u, result.size()); @@ -1460,9 +1500,9 @@ TEST_F(NetworkTest, WebRTC_BindUsingInterfaceName) { FakeNetworkMonitor* network_monitor = GetNetworkMonitor(manager); IPAddress ipv6; - EXPECT_TRUE(IPFromString("1000:2000:3000:4000:0:0:0:1", &ipv6)); + EXPECT_TRUE(webrtc::IPFromString("1000:2000:3000:4000:0:0:0:1", &ipv6)); IPAddress ipv4; - EXPECT_TRUE(IPFromString("192.168.0.2", &ipv4)); + EXPECT_TRUE(webrtc::IPFromString("192.168.0.2", &ipv4)); // The network monitor only knwos about the ipv6 address, interface. network_monitor->set_adapters({"wlan0"}); @@ -1478,15 +1518,15 @@ TEST_F(NetworkTest, WebRTC_BindUsingInterfaceName) { TEST_F(NetworkTest, NetworkCostVpn_Default) { IPAddress ip1; - EXPECT_TRUE(IPFromString("2400:4030:1:2c00:be30:0:0:1", &ip1)); - webrtc::test::ScopedKeyValueConfig field_trials; + EXPECT_TRUE(webrtc::IPFromString("2400:4030:1:2c00:be30:0:0:1", &ip1)); + ScopedKeyValueConfig field_trials; - Network* net1 = new Network("em1", "em1", TruncateIP(ip1, 64), 64); - net1->set_type(ADAPTER_TYPE_VPN); - net1->set_underlying_type_for_vpn(ADAPTER_TYPE_ETHERNET); + Network* net1 = new Network("em1", "em1", webrtc::TruncateIP(ip1, 64), 64); + net1->set_type(webrtc::ADAPTER_TYPE_VPN); + net1->set_underlying_type_for_vpn(webrtc::ADAPTER_TYPE_ETHERNET); - Network* net2 = new Network("em1", "em1", TruncateIP(ip1, 64), 64); - net2->set_type(ADAPTER_TYPE_ETHERNET); + Network* net2 = new Network("em1", "em1", webrtc::TruncateIP(ip1, 64), 64); + net2->set_type(webrtc::ADAPTER_TYPE_ETHERNET); EXPECT_EQ(net1->GetCost(field_trials), net2->GetCost(field_trials)); delete net1; @@ -1494,18 +1534,17 @@ TEST_F(NetworkTest, NetworkCostVpn_Default) { } TEST_F(NetworkTest, NetworkCostVpn_VpnMoreExpensive) { - webrtc::test::ScopedKeyValueConfig field_trials( - "WebRTC-AddNetworkCostToVpn/Enabled/"); + ScopedKeyValueConfig field_trials("WebRTC-AddNetworkCostToVpn/Enabled/"); IPAddress ip1; - EXPECT_TRUE(IPFromString("2400:4030:1:2c00:be30:0:0:1", &ip1)); + EXPECT_TRUE(webrtc::IPFromString("2400:4030:1:2c00:be30:0:0:1", &ip1)); - Network* net1 = new Network("em1", "em1", TruncateIP(ip1, 64), 64); - net1->set_type(ADAPTER_TYPE_VPN); - net1->set_underlying_type_for_vpn(ADAPTER_TYPE_ETHERNET); + Network* net1 = new Network("em1", "em1", webrtc::TruncateIP(ip1, 64), 64); + net1->set_type(webrtc::ADAPTER_TYPE_VPN); + net1->set_underlying_type_for_vpn(webrtc::ADAPTER_TYPE_ETHERNET); - Network* net2 = new Network("em1", "em1", TruncateIP(ip1, 64), 64); - net2->set_type(ADAPTER_TYPE_ETHERNET); + Network* net2 = new Network("em1", "em1", webrtc::TruncateIP(ip1, 64), 64); + net2->set_type(webrtc::ADAPTER_TYPE_ETHERNET); EXPECT_GT(net1->GetCost(field_trials), net2->GetCost(field_trials)); delete net1; @@ -1513,40 +1552,40 @@ TEST_F(NetworkTest, NetworkCostVpn_VpnMoreExpensive) { } TEST_F(NetworkTest, GuessAdapterFromNetworkCost) { - webrtc::test::ScopedKeyValueConfig field_trials( + ScopedKeyValueConfig field_trials( "WebRTC-AddNetworkCostToVpn/Enabled/" "WebRTC-UseDifferentiatedCellularCosts/Enabled/"); IPAddress ip1; - EXPECT_TRUE(IPFromString("2400:4030:1:2c00:be30:0:0:1", &ip1)); + EXPECT_TRUE(webrtc::IPFromString("2400:4030:1:2c00:be30:0:0:1", &ip1)); - for (auto type : kAllAdapterTypes) { - if (type == rtc::ADAPTER_TYPE_VPN) + for (auto type : webrtc::kAllAdapterTypes) { + if (type == webrtc::ADAPTER_TYPE_VPN) continue; - Network net1("em1", "em1", TruncateIP(ip1, 64), 64); + Network net1("em1", "em1", webrtc::TruncateIP(ip1, 64), 64); net1.set_type(type); auto [guess, vpn] = Network::GuessAdapterFromNetworkCost(net1.GetCost(field_trials)); EXPECT_FALSE(vpn); - if (type == rtc::ADAPTER_TYPE_LOOPBACK) { - EXPECT_EQ(guess, rtc::ADAPTER_TYPE_ETHERNET); + if (type == webrtc::ADAPTER_TYPE_LOOPBACK) { + EXPECT_EQ(guess, webrtc::ADAPTER_TYPE_ETHERNET); } else { EXPECT_EQ(type, guess); } } // VPN - for (auto type : kAllAdapterTypes) { - if (type == rtc::ADAPTER_TYPE_VPN) + for (auto type : webrtc::kAllAdapterTypes) { + if (type == webrtc::ADAPTER_TYPE_VPN) continue; - Network net1("em1", "em1", TruncateIP(ip1, 64), 64); - net1.set_type(rtc::ADAPTER_TYPE_VPN); + Network net1("em1", "em1", webrtc::TruncateIP(ip1, 64), 64); + net1.set_type(webrtc::ADAPTER_TYPE_VPN); net1.set_underlying_type_for_vpn(type); auto [guess, vpn] = Network::GuessAdapterFromNetworkCost(net1.GetCost(field_trials)); EXPECT_TRUE(vpn); - if (type == rtc::ADAPTER_TYPE_LOOPBACK) { - EXPECT_EQ(guess, rtc::ADAPTER_TYPE_ETHERNET); + if (type == webrtc::ADAPTER_TYPE_LOOPBACK) { + EXPECT_EQ(guess, webrtc::ADAPTER_TYPE_ETHERNET); } else { EXPECT_EQ(type, guess); } @@ -1554,9 +1593,10 @@ TEST_F(NetworkTest, GuessAdapterFromNetworkCost) { } TEST_F(NetworkTest, VpnList) { + const Environment env = CreateEnvironment(); PhysicalSocketServer socket_server; { - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(env, &socket_server); manager.set_vpn_list({NetworkMask(IPFromString("192.168.0.0"), 16)}); manager.StartUpdating(); EXPECT_TRUE(manager.IsConfiguredVpn(IPFromString("192.168.1.1"), 32)); @@ -1568,7 +1608,7 @@ TEST_F(NetworkTest, VpnList) { EXPECT_FALSE(manager.IsConfiguredVpn(IPFromString("192.168.0.0"), 15)); } { - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(env, &socket_server); manager.set_vpn_list({NetworkMask(IPFromString("192.168.0.0"), 24)}); manager.StartUpdating(); EXPECT_FALSE(manager.IsConfiguredVpn(IPFromString("192.168.1.1"), 32)); @@ -1580,7 +1620,7 @@ TEST_F(NetworkTest, VpnList) { // TODO(webrtc:13114): Implement the InstallIpv4Network for windows. TEST_F(NetworkTest, VpnListOverrideAdapterType) { PhysicalSocketServer socket_server; - BasicNetworkManager manager(&socket_server); + BasicNetworkManager manager(CreateEnvironment(), &socket_server); manager.set_vpn_list({NetworkMask(IPFromString("192.168.0.0"), 16)}); manager.StartUpdating(); @@ -1590,8 +1630,8 @@ TEST_F(NetworkTest, VpnListOverrideAdapterType) { std::vector list = manager.GetNetworks(); ASSERT_EQ(1u, list.size()); - EXPECT_EQ(ADAPTER_TYPE_VPN, list[0]->type()); - EXPECT_EQ(ADAPTER_TYPE_ETHERNET, list[0]->underlying_type_for_vpn()); + EXPECT_EQ(webrtc::ADAPTER_TYPE_VPN, list[0]->type()); + EXPECT_EQ(webrtc::ADAPTER_TYPE_ETHERNET, list[0]->underlying_type_for_vpn()); ClearNetworks(manager); ReleaseIfAddrs(addr_list); } @@ -1605,8 +1645,8 @@ TEST_F(NetworkTest, HardcodedVpn) { EXPECT_TRUE(NetworkManagerBase::IsVpnMacAddress(cisco)); EXPECT_TRUE(NetworkManagerBase::IsVpnMacAddress(global)); - EXPECT_FALSE(NetworkManagerBase::IsVpnMacAddress( - rtc::ArrayView(cisco, 5))); + EXPECT_FALSE( + NetworkManagerBase::IsVpnMacAddress(ArrayView(cisco, 5))); EXPECT_FALSE(NetworkManagerBase::IsVpnMacAddress(five_bytes)); EXPECT_FALSE(NetworkManagerBase::IsVpnMacAddress(unknown)); EXPECT_FALSE(NetworkManagerBase::IsVpnMacAddress(nullptr)); @@ -1701,4 +1741,4 @@ TEST(CompareNetworks, TransitivityOfIncomparabilityTest) { EXPECT_FALSE(webrtc_network_internal::CompareNetworks(network_f, network_d)); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/null_socket_server.cc b/rtc_base/null_socket_server.cc index 366349db3a..47896a4213 100644 --- a/rtc_base/null_socket_server.cc +++ b/rtc_base/null_socket_server.cc @@ -15,13 +15,13 @@ #include "rtc_base/event.h" #include "rtc_base/socket_server.h" -namespace rtc { +namespace webrtc { NullSocketServer::NullSocketServer() = default; NullSocketServer::~NullSocketServer() {} -bool NullSocketServer::Wait(webrtc::TimeDelta max_wait_duration, - bool process_io) { +bool NullSocketServer::Wait(TimeDelta max_wait_duration, + bool /* process_io */) { // Wait with the given timeout. Do not log a warning if we end up waiting for // a long time; that just means no one has any work for us, which is perfectly // legitimate. @@ -33,9 +33,9 @@ void NullSocketServer::WakeUp() { event_.Set(); } -rtc::Socket* NullSocketServer::CreateSocket(int /* family */, int /* type */) { +Socket* NullSocketServer::CreateSocket(int /* family */, int /* type */) { RTC_DCHECK_NOTREACHED(); return nullptr; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/null_socket_server.h b/rtc_base/null_socket_server.h index 87f49f436e..747c6d01d0 100644 --- a/rtc_base/null_socket_server.h +++ b/rtc_base/null_socket_server.h @@ -16,14 +16,14 @@ #include "rtc_base/socket_server.h" #include "rtc_base/system/rtc_export.h" -namespace rtc { +namespace webrtc { class RTC_EXPORT NullSocketServer : public SocketServer { public: NullSocketServer(); ~NullSocketServer() override; - bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override; + bool Wait(TimeDelta max_wait_duration, bool process_io) override; void WakeUp() override; Socket* CreateSocket(int family, int type) override; @@ -32,6 +32,14 @@ class RTC_EXPORT NullSocketServer : public SocketServer { Event event_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::NullSocketServer; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NULL_SOCKET_SERVER_H_ diff --git a/rtc_base/null_socket_server_unittest.cc b/rtc_base/null_socket_server_unittest.cc index 58a6211aba..b89a2415e6 100644 --- a/rtc_base/null_socket_server_unittest.cc +++ b/rtc_base/null_socket_server_unittest.cc @@ -14,31 +14,38 @@ #include +#include "api/test/rtc_error_matchers.h" #include "api/units/time_delta.h" -#include "rtc_base/gunit.h" +#include "rtc_base/socket_server.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" -namespace rtc { +namespace webrtc { TEST(NullSocketServerTest, WaitAndSet) { + AutoThread main_thread; NullSocketServer ss; auto thread = Thread::Create(); EXPECT_TRUE(thread->Start()); thread->PostTask([&ss] { ss.WakeUp(); }); // The process_io will be ignored. const bool process_io = true; - EXPECT_TRUE_WAIT(ss.Wait(SocketServer::kForever, process_io), 5'000); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return ss.Wait(SocketServer::kForever, process_io); }, + ::testing::IsTrue(), {.timeout = TimeDelta::Millis(5'000)}), + webrtc::IsRtcOk()); } TEST(NullSocketServerTest, TestWait) { NullSocketServer ss; int64_t start = TimeMillis(); - ss.Wait(webrtc::TimeDelta::Millis(200), true); + ss.Wait(TimeDelta::Millis(200), true); // The actual wait time is dependent on the resolution of the timer used by // the Event class. Allow for the event to signal ~20ms early. EXPECT_GE(TimeSince(start), 180); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/numerics/divide_round.h b/rtc_base/numerics/divide_round.h index 90c67fca3c..4143902b44 100644 --- a/rtc_base/numerics/divide_round.h +++ b/rtc_base/numerics/divide_round.h @@ -40,7 +40,7 @@ inline auto constexpr DivideRoundToNearest(Dividend dividend, Divisor divisor) { auto half_of_divisor = divisor / 2; auto quotient = dividend / divisor; auto remainder = dividend % divisor; - if (rtc::SafeGt(-remainder, half_of_divisor)) { + if (SafeGt(-remainder, half_of_divisor)) { --quotient; } return quotient; @@ -49,7 +49,7 @@ inline auto constexpr DivideRoundToNearest(Dividend dividend, Divisor divisor) { auto half_of_divisor = (divisor - 1) / 2; auto quotient = dividend / divisor; auto remainder = dividend % divisor; - if (rtc::SafeGt(remainder, half_of_divisor)) { + if (SafeGt(remainder, half_of_divisor)) { ++quotient; } return quotient; diff --git a/rtc_base/numerics/divide_round_unittest.cc b/rtc_base/numerics/divide_round_unittest.cc index 00548e1cb2..c0934e64af 100644 --- a/rtc_base/numerics/divide_round_unittest.cc +++ b/rtc_base/numerics/divide_round_unittest.cc @@ -10,7 +10,9 @@ #include "rtc_base/numerics/divide_round.h" +#include #include +#include #include "test/gtest.h" diff --git a/rtc_base/numerics/event_based_exponential_moving_average.cc b/rtc_base/numerics/event_based_exponential_moving_average.cc index b426fdeed7..27b2066a97 100644 --- a/rtc_base/numerics/event_based_exponential_moving_average.cc +++ b/rtc_base/numerics/event_based_exponential_moving_average.cc @@ -11,6 +11,8 @@ #include "rtc_base/numerics/event_based_exponential_moving_average.h" #include +#include +#include #include "rtc_base/checks.h" @@ -22,7 +24,7 @@ constexpr double ninetyfive_percent_confidence = 1.96; } // namespace -namespace rtc { +namespace webrtc { // `half_time` specifies how much weight will be given to old samples, // a sample gets exponentially less weight so that it's 50% @@ -79,4 +81,4 @@ double EventBasedExponentialMovingAverage::GetConfidenceInterval() const { sqrt(sample_variance_ * estimator_variance_); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/numerics/event_based_exponential_moving_average.h b/rtc_base/numerics/event_based_exponential_moving_average.h index 69f4e614cb..478263c2c2 100644 --- a/rtc_base/numerics/event_based_exponential_moving_average.h +++ b/rtc_base/numerics/event_based_exponential_moving_average.h @@ -14,10 +14,9 @@ #include #include #include +#include -#include "absl/types/optional.h" - -namespace rtc { +namespace webrtc { /** * This class implements exponential moving average for time series @@ -63,9 +62,17 @@ class EventBasedExponentialMovingAverage { double sample_variance_ = std::numeric_limits::infinity(); // This is the ratio between variance of the estimate and variance of samples. double estimator_variance_ = 1; - absl::optional last_observation_timestamp_; + std::optional last_observation_timestamp_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::EventBasedExponentialMovingAverage; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NUMERICS_EVENT_BASED_EXPONENTIAL_MOVING_AVERAGE_H_ diff --git a/rtc_base/numerics/event_based_exponential_moving_average_unittest.cc b/rtc_base/numerics/event_based_exponential_moving_average_unittest.cc index 967be41213..14ef9cb5e7 100644 --- a/rtc_base/numerics/event_based_exponential_moving_average_unittest.cc +++ b/rtc_base/numerics/event_based_exponential_moving_average_unittest.cc @@ -11,6 +11,8 @@ #include "rtc_base/numerics/event_based_exponential_moving_average.h" #include +#include +#include #include "test/gtest.h" @@ -21,7 +23,7 @@ constexpr double kError = 0.1; } // namespace -namespace rtc { +namespace webrtc { TEST(EventBasedExponentialMovingAverageTest, NoValue) { EventBasedExponentialMovingAverage average(kHalfTime); @@ -224,4 +226,4 @@ TEST(EventBasedExponentialMovingAverageTest, SimultaneousSamples) { average.AddSample(time, value); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/numerics/event_rate_counter.cc b/rtc_base/numerics/event_rate_counter.cc index d7b7293918..52c9de64fc 100644 --- a/rtc_base/numerics/event_rate_counter.cc +++ b/rtc_base/numerics/event_rate_counter.cc @@ -10,6 +10,10 @@ #include "rtc_base/numerics/event_rate_counter.h" #include +#include + +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" namespace webrtc { diff --git a/rtc_base/numerics/event_rate_counter.h b/rtc_base/numerics/event_rate_counter.h index 60ec3ba416..ef107ba514 100644 --- a/rtc_base/numerics/event_rate_counter.h +++ b/rtc_base/numerics/event_rate_counter.h @@ -10,6 +10,10 @@ #ifndef RTC_BASE_NUMERICS_EVENT_RATE_COUNTER_H_ #define RTC_BASE_NUMERICS_EVENT_RATE_COUNTER_H_ +#include + +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "rtc_base/numerics/sample_stats.h" namespace webrtc { diff --git a/rtc_base/numerics/exp_filter.cc b/rtc_base/numerics/exp_filter.cc index a58250abc4..5dbca9bbe2 100644 --- a/rtc_base/numerics/exp_filter.cc +++ b/rtc_base/numerics/exp_filter.cc @@ -12,7 +12,7 @@ #include -namespace rtc { +namespace webrtc { const float ExpFilter::kValueUndefined = -1.0f; @@ -40,4 +40,4 @@ float ExpFilter::Apply(float exp, float sample) { void ExpFilter::UpdateBase(float alpha) { alpha_ = alpha; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/numerics/exp_filter.h b/rtc_base/numerics/exp_filter.h index 6bded80d02..fe411c489c 100644 --- a/rtc_base/numerics/exp_filter.h +++ b/rtc_base/numerics/exp_filter.h @@ -11,7 +11,7 @@ #ifndef RTC_BASE_NUMERICS_EXP_FILTER_H_ #define RTC_BASE_NUMERICS_EXP_FILTER_H_ -namespace rtc { +namespace webrtc { // This class can be used, for example, for smoothing the result of bandwidth // estimation and packet loss estimation. @@ -43,6 +43,14 @@ class ExpFilter { float filtered_; // Current filter output. const float max_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::ExpFilter; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NUMERICS_EXP_FILTER_H_ diff --git a/rtc_base/numerics/exp_filter_unittest.cc b/rtc_base/numerics/exp_filter_unittest.cc index f5b436f1b9..b3de3a3569 100644 --- a/rtc_base/numerics/exp_filter_unittest.cc +++ b/rtc_base/numerics/exp_filter_unittest.cc @@ -14,7 +14,7 @@ #include "test/gtest.h" -namespace rtc { +namespace webrtc { TEST(ExpFilterTest, FirstTimeOutputEqualInput) { // No max value defined. @@ -69,4 +69,4 @@ TEST(ExpfilterTest, OutputLimitedByMax) { EXPECT_FLOAT_EQ(value, filter.filtered()); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/numerics/histogram_percentile_counter.cc b/rtc_base/numerics/histogram_percentile_counter.cc index 29d2341c85..d93d318a59 100644 --- a/rtc_base/numerics/histogram_percentile_counter.cc +++ b/rtc_base/numerics/histogram_percentile_counter.cc @@ -12,10 +12,13 @@ #include #include +#include +#include +#include #include "rtc_base/checks.h" -namespace rtc { +namespace webrtc { HistogramPercentileCounter::HistogramPercentileCounter( uint32_t long_tail_boundary) : histogram_low_(size_t{long_tail_boundary}), @@ -48,12 +51,12 @@ void HistogramPercentileCounter::Add(uint32_t value) { Add(value, 1); } -absl::optional HistogramPercentileCounter::GetPercentile( +std::optional HistogramPercentileCounter::GetPercentile( float fraction) { RTC_CHECK_LE(fraction, 1.0); RTC_CHECK_GE(fraction, 0.0); if (total_elements_ == 0) - return absl::nullopt; + return std::nullopt; size_t elements_to_skip = static_cast( std::max(0.0f, std::ceil(total_elements_ * fraction) - 1)); if (elements_to_skip >= total_elements_) @@ -73,7 +76,7 @@ absl::optional HistogramPercentileCounter::GetPercentile( } } RTC_DCHECK_NOTREACHED(); - return absl::nullopt; + return std::nullopt; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/numerics/histogram_percentile_counter.h b/rtc_base/numerics/histogram_percentile_counter.h index 4787f2ef98..f1cb02e3da 100644 --- a/rtc_base/numerics/histogram_percentile_counter.h +++ b/rtc_base/numerics/histogram_percentile_counter.h @@ -15,11 +15,10 @@ #include #include +#include #include -#include "absl/types/optional.h" - -namespace rtc { +namespace webrtc { // Calculates percentiles on the stream of data. Use `Add` methods to add new // values. Use `GetPercentile` to get percentile of the currently added values. class HistogramPercentileCounter { @@ -32,7 +31,7 @@ class HistogramPercentileCounter { void Add(uint32_t value, size_t count); void Add(const HistogramPercentileCounter& other); // Argument should be from 0 to 1. - absl::optional GetPercentile(float fraction); + std::optional GetPercentile(float fraction); private: std::vector histogram_low_; @@ -41,5 +40,13 @@ class HistogramPercentileCounter { size_t total_elements_; size_t total_elements_low_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::HistogramPercentileCounter; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NUMERICS_HISTOGRAM_PERCENTILE_COUNTER_H_ diff --git a/rtc_base/numerics/histogram_percentile_counter_unittest.cc b/rtc_base/numerics/histogram_percentile_counter_unittest.cc index fc36b59208..54ffa9b34c 100644 --- a/rtc_base/numerics/histogram_percentile_counter_unittest.cc +++ b/rtc_base/numerics/histogram_percentile_counter_unittest.cc @@ -17,7 +17,7 @@ #include "test/gtest.h" TEST(HistogramPercentileCounterTest, ReturnsCorrectPercentiles) { - rtc::HistogramPercentileCounter counter(10); + webrtc::HistogramPercentileCounter counter(10); const std::vector kTestValues = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20}; @@ -37,7 +37,7 @@ TEST(HistogramPercentileCounterTest, ReturnsCorrectPercentiles) { } TEST(HistogramPercentileCounterTest, HandlesEmptySequence) { - rtc::HistogramPercentileCounter counter(10); + webrtc::HistogramPercentileCounter counter(10); EXPECT_FALSE(counter.GetPercentile(0.5f)); counter.Add(1u); EXPECT_TRUE(counter.GetPercentile(0.5f)); diff --git a/rtc_base/numerics/moving_average.cc b/rtc_base/numerics/moving_average.cc index c825839227..17c5d9cd9a 100644 --- a/rtc_base/numerics/moving_average.cc +++ b/rtc_base/numerics/moving_average.cc @@ -11,10 +11,13 @@ #include "rtc_base/numerics/moving_average.h" #include +#include +#include +#include #include "rtc_base/checks.h" -namespace rtc { +namespace webrtc { MovingAverage::MovingAverage(size_t window_size) : history_(window_size, 0) { // Limit window size to avoid overflow. @@ -31,21 +34,21 @@ void MovingAverage::AddSample(int sample) { history_[index] = sample; } -absl::optional MovingAverage::GetAverageRoundedDown() const { +std::optional MovingAverage::GetAverageRoundedDown() const { if (count_ == 0) - return absl::nullopt; + return std::nullopt; return sum_ / Size(); } -absl::optional MovingAverage::GetAverageRoundedToClosest() const { +std::optional MovingAverage::GetAverageRoundedToClosest() const { if (count_ == 0) - return absl::nullopt; + return std::nullopt; return (sum_ + Size() / 2) / Size(); } -absl::optional MovingAverage::GetUnroundedAverage() const { +std::optional MovingAverage::GetUnroundedAverage() const { if (count_ == 0) - return absl::nullopt; + return std::nullopt; return sum_ / static_cast(Size()); } @@ -57,4 +60,4 @@ void MovingAverage::Reset() { size_t MovingAverage::Size() const { return std::min(count_, history_.size()); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/numerics/moving_average.h b/rtc_base/numerics/moving_average.h index 41ce60348e..6011d0c23f 100644 --- a/rtc_base/numerics/moving_average.h +++ b/rtc_base/numerics/moving_average.h @@ -14,11 +14,10 @@ #include #include +#include #include -#include "absl/types/optional.h" - -namespace rtc { +namespace webrtc { // Calculates average over fixed size window. If there are less than window // size elements, calculates average of all inserted so far elements. @@ -38,13 +37,13 @@ class MovingAverage { // Returns rounded down average of last `window_size` elements or all // elements if there are not enough of them. Returns nullopt if there were // no elements added. - absl::optional GetAverageRoundedDown() const; + std::optional GetAverageRoundedDown() const; // Same as above but rounded to the closest integer. - absl::optional GetAverageRoundedToClosest() const; + std::optional GetAverageRoundedToClosest() const; // Returns unrounded average over the window. - absl::optional GetUnroundedAverage() const; + std::optional GetUnroundedAverage() const; // Resets to the initial state before any elements were added. void Reset(); @@ -62,5 +61,13 @@ class MovingAverage { std::vector history_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::MovingAverage; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NUMERICS_MOVING_AVERAGE_H_ diff --git a/rtc_base/numerics/moving_average_unittest.cc b/rtc_base/numerics/moving_average_unittest.cc index 9bc9a1aef8..5b691e7476 100644 --- a/rtc_base/numerics/moving_average_unittest.cc +++ b/rtc_base/numerics/moving_average_unittest.cc @@ -10,26 +10,28 @@ #include "rtc_base/numerics/moving_average.h" +#include + #include "test/gtest.h" namespace test { TEST(MovingAverageTest, EmptyAverage) { - rtc::MovingAverage moving_average(1); + webrtc::MovingAverage moving_average(1); EXPECT_EQ(0u, moving_average.Size()); - EXPECT_EQ(absl::nullopt, moving_average.GetAverageRoundedDown()); + EXPECT_EQ(std::nullopt, moving_average.GetAverageRoundedDown()); } // Test single value. TEST(MovingAverageTest, OneElement) { - rtc::MovingAverage moving_average(1); + webrtc::MovingAverage moving_average(1); moving_average.AddSample(3); EXPECT_EQ(1u, moving_average.Size()); EXPECT_EQ(3, *moving_average.GetAverageRoundedDown()); } TEST(MovingAverageTest, GetAverage) { - rtc::MovingAverage moving_average(1024); + webrtc::MovingAverage moving_average(1024); moving_average.AddSample(1); moving_average.AddSample(1); moving_average.AddSample(3); @@ -39,7 +41,7 @@ TEST(MovingAverageTest, GetAverage) { } TEST(MovingAverageTest, GetAverageRoundedDownRounds) { - rtc::MovingAverage moving_average(1024); + webrtc::MovingAverage moving_average(1024); moving_average.AddSample(1); moving_average.AddSample(2); moving_average.AddSample(2); @@ -48,7 +50,7 @@ TEST(MovingAverageTest, GetAverageRoundedDownRounds) { } TEST(MovingAverageTest, GetAverageRoundedToClosestRounds) { - rtc::MovingAverage moving_average(1024); + webrtc::MovingAverage moving_average(1024); moving_average.AddSample(1); moving_average.AddSample(2); moving_average.AddSample(2); @@ -57,7 +59,7 @@ TEST(MovingAverageTest, GetAverageRoundedToClosestRounds) { } TEST(MovingAverageTest, Reset) { - rtc::MovingAverage moving_average(5); + webrtc::MovingAverage moving_average(5); moving_average.AddSample(1); EXPECT_EQ(1, *moving_average.GetAverageRoundedDown()); EXPECT_EQ(1, *moving_average.GetAverageRoundedToClosest()); @@ -71,7 +73,7 @@ TEST(MovingAverageTest, Reset) { } TEST(MovingAverageTest, ManySamples) { - rtc::MovingAverage moving_average(10); + webrtc::MovingAverage moving_average(10); for (int i = 1; i < 11; i++) { moving_average.AddSample(i); } diff --git a/rtc_base/numerics/moving_max_counter.h b/rtc_base/numerics/moving_max_counter.h index 5eb45d392b..88557a191b 100644 --- a/rtc_base/numerics/moving_max_counter.h +++ b/rtc_base/numerics/moving_max_counter.h @@ -15,12 +15,12 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "rtc_base/checks.h" -namespace rtc { +namespace webrtc { // Implements moving max: can add samples to it and calculate maximum over some // fixed moving window. @@ -43,7 +43,7 @@ class MovingMaxCounter { // Advances the current time, and returns the maximum sample in the time // window ending at the current time. The new current time must be at least as // large as the old current time. - absl::optional Max(int64_t current_time_ms); + std::optional Max(int64_t current_time_ms); void Reset(); private: @@ -85,9 +85,9 @@ void MovingMaxCounter::Add(const T& sample, int64_t current_time_ms) { } template -absl::optional MovingMaxCounter::Max(int64_t current_time_ms) { +std::optional MovingMaxCounter::Max(int64_t current_time_ms) { RollWindow(current_time_ms); - absl::optional res; + std::optional res; if (!samples_.empty()) { res.emplace(samples_.front().second); } @@ -113,6 +113,14 @@ void MovingMaxCounter::RollWindow(int64_t new_time_ms) { samples_.erase(samples_.begin(), it); } +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::MovingMaxCounter; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NUMERICS_MOVING_MAX_COUNTER_H_ diff --git a/rtc_base/numerics/moving_max_counter_unittest.cc b/rtc_base/numerics/moving_max_counter_unittest.cc index 0e3195f467..f8f84c126a 100644 --- a/rtc_base/numerics/moving_max_counter_unittest.cc +++ b/rtc_base/numerics/moving_max_counter_unittest.cc @@ -13,7 +13,7 @@ #include "test/gtest.h" TEST(MovingMaxCounter, ReportsMaximumInTheWindow) { - rtc::MovingMaxCounter counter(100); + webrtc::MovingMaxCounter counter(100); counter.Add(1, 1); EXPECT_EQ(counter.Max(1), 1); counter.Add(2, 30); @@ -27,7 +27,7 @@ TEST(MovingMaxCounter, ReportsMaximumInTheWindow) { } TEST(MovingMaxCounter, IgnoresOldElements) { - rtc::MovingMaxCounter counter(100); + webrtc::MovingMaxCounter counter(100); counter.Add(1, 1); counter.Add(2, 30); counter.Add(100, 60); @@ -39,14 +39,14 @@ TEST(MovingMaxCounter, IgnoresOldElements) { } TEST(MovingMaxCounter, HandlesEmptyWindow) { - rtc::MovingMaxCounter counter(100); + webrtc::MovingMaxCounter counter(100); counter.Add(123, 1); EXPECT_TRUE(counter.Max(101).has_value()); EXPECT_FALSE(counter.Max(102).has_value()); } TEST(MovingMaxCounter, HandlesSamplesWithEqualTimestamps) { - rtc::MovingMaxCounter counter(100); + webrtc::MovingMaxCounter counter(100); counter.Add(2, 30); EXPECT_EQ(counter.Max(30), 2); counter.Add(5, 30); diff --git a/rtc_base/numerics/moving_percentile_filter_unittest.cc b/rtc_base/numerics/moving_percentile_filter_unittest.cc index 30c0ebb23d..f456108973 100644 --- a/rtc_base/numerics/moving_percentile_filter_unittest.cc +++ b/rtc_base/numerics/moving_percentile_filter_unittest.cc @@ -13,6 +13,7 @@ #include #include +#include #include "test/gtest.h" diff --git a/rtc_base/numerics/percentile_filter.h b/rtc_base/numerics/percentile_filter.h index 2a18c1aa73..86b5fe10d9 100644 --- a/rtc_base/numerics/percentile_filter.h +++ b/rtc_base/numerics/percentile_filter.h @@ -110,7 +110,7 @@ void PercentileFilter::UpdatePercentileIterator() { template T PercentileFilter::GetPercentileValue() const { - return set_.empty() ? 0 : *percentile_it_; + return set_.empty() ? T() : *percentile_it_; } template diff --git a/rtc_base/numerics/rational.h b/rtc_base/numerics/rational.h new file mode 100644 index 0000000000..32f0cb1597 --- /dev/null +++ b/rtc_base/numerics/rational.h @@ -0,0 +1,28 @@ +/* + * Copyright 2024 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_NUMERICS_RATIONAL_H_ +#define RTC_BASE_NUMERICS_RATIONAL_H_ + +namespace webrtc { + +// This is the worst implementation of a rational... +struct Rational { + int numerator; + int denominator; + + bool operator==(const Rational& other) const { + return numerator == other.numerator && denominator == other.denominator; + } +}; + +} // namespace webrtc + +#endif // RTC_BASE_NUMERICS_RATIONAL_H_ diff --git a/rtc_base/numerics/running_statistics.h b/rtc_base/numerics/running_statistics.h index fe991b043f..c747c79365 100644 --- a/rtc_base/numerics/running_statistics.h +++ b/rtc_base/numerics/running_statistics.h @@ -13,9 +13,9 @@ #include #include -#include +#include +#include -#include "absl/types/optional.h" #include "rtc_base/checks.h" #include "rtc_base/numerics/math_utils.h" @@ -33,7 +33,7 @@ namespace webrtc_impl { // If you want a full-fledged moving window over N last samples, // please use webrtc::RollingAccumulator. // -// The measures return absl::nullopt if no samples were fed (Size() == 0), +// The measures return std::nullopt if no samples were fed (Size() == 0), // otherwise the returned optional is guaranteed to contain a value. // // [1] @@ -108,50 +108,50 @@ class RunningStatistics { // Returns minimum among all seen samples, in O(1) time. // This isn't affected by RemoveSample(). - absl::optional GetMin() const { + std::optional GetMin() const { if (size_ == 0) { - return absl::nullopt; + return std::nullopt; } return min_; } // Returns maximum among all seen samples, in O(1) time. // This isn't affected by RemoveSample(). - absl::optional GetMax() const { + std::optional GetMax() const { if (size_ == 0) { - return absl::nullopt; + return std::nullopt; } return max_; } // Returns sum in O(1) time. - absl::optional GetSum() const { + std::optional GetSum() const { if (size_ == 0) { - return absl::nullopt; + return std::nullopt; } return sum_; } // Returns mean in O(1) time. - absl::optional GetMean() const { + std::optional GetMean() const { if (size_ == 0) { - return absl::nullopt; + return std::nullopt; } return mean_; } // Returns unbiased sample variance in O(1) time. - absl::optional GetVariance() const { + std::optional GetVariance() const { if (size_ == 0) { - return absl::nullopt; + return std::nullopt; } return cumul_ / size_; } // Returns unbiased standard deviation in O(1) time. - absl::optional GetStandardDeviation() const { + std::optional GetStandardDeviation() const { if (size_ == 0) { - return absl::nullopt; + return std::nullopt; } return std::sqrt(*GetVariance()); } diff --git a/rtc_base/numerics/safe_compare.h b/rtc_base/numerics/safe_compare.h index 85f0a30e83..ef8d384d9b 100644 --- a/rtc_base/numerics/safe_compare.h +++ b/rtc_base/numerics/safe_compare.h @@ -10,12 +10,12 @@ // This file defines six constexpr functions: // -// rtc::SafeEq // == -// rtc::SafeNe // != -// rtc::SafeLt // < -// rtc::SafeLe // <= -// rtc::SafeGt // > -// rtc::SafeGe // >= +// webrtc::SafeEq // == +// webrtc::SafeNe // != +// webrtc::SafeLt // < +// webrtc::SafeLe // <= +// webrtc::SafeGt // > +// webrtc::SafeGe // >= // // They each accept two arguments of arbitrary types, and in almost all cases, // they simply call the appropriate comparison operator. However, if both @@ -35,11 +35,10 @@ #include #include -#include #include "rtc_base/type_traits.h" -namespace rtc { +namespace webrtc { namespace safe_cmp_impl { @@ -171,6 +170,19 @@ RTC_SAFECMP_MAKE_FUN(Gt) RTC_SAFECMP_MAKE_FUN(Ge) #undef RTC_SAFECMP_MAKE_FUN +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::SafeEq; +using ::webrtc::SafeGe; +using ::webrtc::SafeGt; +using ::webrtc::SafeLe; +using ::webrtc::SafeLt; +using ::webrtc::SafeNe; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NUMERICS_SAFE_COMPARE_H_ diff --git a/rtc_base/numerics/safe_compare_unittest.cc b/rtc_base/numerics/safe_compare_unittest.cc index 92bde686ba..8541ed418c 100644 --- a/rtc_base/numerics/safe_compare_unittest.cc +++ b/rtc_base/numerics/safe_compare_unittest.cc @@ -10,11 +10,13 @@ #include "rtc_base/numerics/safe_compare.h" +#include #include +#include #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { @@ -392,4 +394,4 @@ TEST(SafeCmpTest, Enum) { static_assert(SafeEq(13u, e4), ""); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/numerics/safe_conversions.h b/rtc_base/numerics/safe_conversions.h index e00219cbd7..8b981ac9d1 100644 --- a/rtc_base/numerics/safe_conversions.h +++ b/rtc_base/numerics/safe_conversions.h @@ -18,7 +18,7 @@ #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions_impl.h" -namespace rtc { +namespace webrtc { // Convenience function that returns true if the supplied value is in range // for the destination type. @@ -69,6 +69,17 @@ inline constexpr Dst saturated_cast(Src value) { RTC_CHECK_NOTREACHED(); } +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::checked_cast; +using ::webrtc::dchecked_cast; +using ::webrtc::IsValueInRangeForNumericType; +using ::webrtc::saturated_cast; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NUMERICS_SAFE_CONVERSIONS_H_ diff --git a/rtc_base/numerics/safe_conversions_impl.h b/rtc_base/numerics/safe_conversions_impl.h index e924ce3256..99a3bc8306 100644 --- a/rtc_base/numerics/safe_conversions_impl.h +++ b/rtc_base/numerics/safe_conversions_impl.h @@ -13,9 +13,10 @@ #ifndef RTC_BASE_NUMERICS_SAFE_CONVERSIONS_IMPL_H_ #define RTC_BASE_NUMERICS_SAFE_CONVERSIONS_IMPL_H_ +#include #include -namespace rtc { +namespace webrtc { namespace internal { enum DstSign { DST_UNSIGNED, DST_SIGNED }; @@ -101,7 +102,9 @@ struct RangeCheckImpl {}; // Dst range always contains the result: nothing to check. template struct RangeCheckImpl { - static constexpr RangeCheckResult Check(Src value) { return TYPE_VALID; } + static constexpr RangeCheckResult Check(Src /* value */) { + return TYPE_VALID; + } }; // Signed to signed narrowing. @@ -172,6 +175,6 @@ inline constexpr RangeCheckResult RangeCheck(Src value) { } } // namespace internal -} // namespace rtc +} // namespace webrtc #endif // RTC_BASE_NUMERICS_SAFE_CONVERSIONS_IMPL_H_ diff --git a/rtc_base/numerics/safe_minmax.h b/rtc_base/numerics/safe_minmax.h index 8356536dbc..959937967c 100644 --- a/rtc_base/numerics/safe_minmax.h +++ b/rtc_base/numerics/safe_minmax.h @@ -11,8 +11,8 @@ // Minimum and maximum // =================== // -// rtc::SafeMin(x, y) -// rtc::SafeMax(x, y) +// webrtc::SafeMin(x, y) +// webrtc::SafeMax(x, y) // // (These are both constexpr.) // @@ -30,7 +30,7 @@ // Clamp (a.k.a. constrain to a given interval) // ============================================ // -// rtc::SafeClamp(x, a, b) +// webrtc::SafeClamp(x, a, b) // // Accepts three arguments of any mix of integral types or any mix of // floating-point types, and returns the value in the closed interval [a, b] @@ -67,7 +67,7 @@ // All three functions allow callers to explicitly specify the return type as a // template parameter, overriding the default return type. E.g. // -// rtc::SafeMin(x, y) // returns an int +// webrtc::SafeMin(x, y) // returns an int // // If the requested type is statically guaranteed to be able to represent the // result, then everything's fine, and the return type is as requested. But if @@ -76,6 +76,7 @@ #ifndef RTC_BASE_NUMERICS_SAFE_MINMAX_H_ #define RTC_BASE_NUMERICS_SAFE_MINMAX_H_ +#include #include #include @@ -83,7 +84,7 @@ #include "rtc_base/numerics/safe_compare.h" #include "rtc_base/type_traits.h" -namespace rtc { +namespace webrtc { namespace safe_minmax_impl { @@ -330,6 +331,16 @@ R2 SafeClamp(T x, L min, H max) { : static_cast(x); } +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::SafeClamp; +using ::webrtc::SafeMax; +using ::webrtc::SafeMin; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NUMERICS_SAFE_MINMAX_H_ diff --git a/rtc_base/numerics/safe_minmax_unittest.cc b/rtc_base/numerics/safe_minmax_unittest.cc index c52b3f93dc..e267077310 100644 --- a/rtc_base/numerics/safe_minmax_unittest.cc +++ b/rtc_base/numerics/safe_minmax_unittest.cc @@ -11,11 +11,13 @@ #include "rtc_base/numerics/safe_minmax.h" #include +#include #include +#include #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { @@ -342,4 +344,4 @@ uint32_t TestClampSafe(uint32_t x, uint32_t a, uint32_t b) { return SafeClamp(x, a, b); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/numerics/sample_counter.cc b/rtc_base/numerics/sample_counter.cc index 16a8e25098..822378fef5 100644 --- a/rtc_base/numerics/sample_counter.cc +++ b/rtc_base/numerics/sample_counter.cc @@ -10,12 +10,14 @@ #include "rtc_base/numerics/sample_counter.h" +#include #include +#include #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" -namespace rtc { +namespace webrtc { SampleCounter::SampleCounter() = default; SampleCounter::~SampleCounter() = default; @@ -31,6 +33,9 @@ void SampleCounter::Add(int sample) { if (!max_ || sample > *max_) { max_ = sample; } + if (!min_ || sample < *min_) { + min_ = sample; + } } void SampleCounter::Add(const SampleCounter& other) { @@ -45,23 +50,29 @@ void SampleCounter::Add(const SampleCounter& other) { num_samples_ += other.num_samples_; if (other.max_ && (!max_ || *max_ < *other.max_)) max_ = other.max_; + if (other.min_ && (!min_ || *min_ > *other.min_)) + min_ = other.min_; } -absl::optional SampleCounter::Avg(int64_t min_required_samples) const { +std::optional SampleCounter::Avg(int64_t min_required_samples) const { RTC_DCHECK_GT(min_required_samples, 0); if (num_samples_ < min_required_samples) - return absl::nullopt; - return rtc::dchecked_cast(sum_ / num_samples_); + return std::nullopt; + return dchecked_cast(sum_ / num_samples_); } -absl::optional SampleCounter::Max() const { +std::optional SampleCounter::Max() const { return max_; } -absl::optional SampleCounter::Sum(int64_t min_required_samples) const { +std::optional SampleCounter::Min() const { + return min_; +} + +std::optional SampleCounter::Sum(int64_t min_required_samples) const { RTC_DCHECK_GT(min_required_samples, 0); if (num_samples_ < min_required_samples) - return absl::nullopt; + return std::nullopt; return sum_; } @@ -76,11 +87,11 @@ void SampleCounter::Reset() { SampleCounterWithVariance::SampleCounterWithVariance() = default; SampleCounterWithVariance::~SampleCounterWithVariance() = default; -absl::optional SampleCounterWithVariance::Variance( +std::optional SampleCounterWithVariance::Variance( int64_t min_required_samples) const { RTC_DCHECK_GT(min_required_samples, 0); if (num_samples_ < min_required_samples) - return absl::nullopt; + return std::nullopt; // E[(x-mean)^2] = E[x^2] - mean^2 int64_t mean = sum_ / num_samples_; return sum_squared_ / num_samples_ - mean * mean; @@ -106,4 +117,4 @@ void SampleCounterWithVariance::Reset() { *this = {}; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/numerics/sample_counter.h b/rtc_base/numerics/sample_counter.h index 717a1afbcf..86a1691c91 100644 --- a/rtc_base/numerics/sample_counter.h +++ b/rtc_base/numerics/sample_counter.h @@ -13,9 +13,9 @@ #include -#include "absl/types/optional.h" +#include -namespace rtc { +namespace webrtc { // Simple utility class for counting basic statistics (max./avg./variance) on // stream of samples. @@ -24,9 +24,10 @@ class SampleCounter { SampleCounter(); ~SampleCounter(); void Add(int sample); - absl::optional Avg(int64_t min_required_samples) const; - absl::optional Max() const; - absl::optional Sum(int64_t min_required_samples) const; + std::optional Avg(int64_t min_required_samples) const; + std::optional Max() const; + std::optional Min() const; + std::optional Sum(int64_t min_required_samples) const; int64_t NumSamples() const; void Reset(); // Adds all the samples from the `other` SampleCounter as if they were all @@ -36,7 +37,8 @@ class SampleCounter { protected: int64_t sum_ = 0; int64_t num_samples_ = 0; - absl::optional max_; + std::optional max_; + std::optional min_; }; class SampleCounterWithVariance : public SampleCounter { @@ -44,7 +46,7 @@ class SampleCounterWithVariance : public SampleCounter { SampleCounterWithVariance(); ~SampleCounterWithVariance(); void Add(int sample); - absl::optional Variance(int64_t min_required_samples) const; + std::optional Variance(int64_t min_required_samples) const; void Reset(); // Adds all the samples from the `other` SampleCounter as if they were all // individually added using `Add(int)` method. @@ -54,5 +56,14 @@ class SampleCounterWithVariance : public SampleCounter { int64_t sum_squared_ = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::SampleCounter; +using ::webrtc::SampleCounterWithVariance; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_NUMERICS_SAMPLE_COUNTER_H_ diff --git a/rtc_base/numerics/sample_counter_unittest.cc b/rtc_base/numerics/sample_counter_unittest.cc index 14b0573de9..64ef6cad56 100644 --- a/rtc_base/numerics/sample_counter_unittest.cc +++ b/rtc_base/numerics/sample_counter_unittest.cc @@ -11,19 +11,21 @@ #include "rtc_base/numerics/sample_counter.h" #include +#include #include "test/gmock.h" #include "test/gtest.h" using ::testing::Eq; -namespace rtc { +namespace webrtc { TEST(SampleCounterTest, ProcessesNoSamples) { constexpr int kMinSamples = 1; SampleCounter counter; - EXPECT_THAT(counter.Avg(kMinSamples), Eq(absl::nullopt)); - EXPECT_THAT(counter.Max(), Eq(absl::nullopt)); + EXPECT_THAT(counter.Avg(kMinSamples), Eq(std::nullopt)); + EXPECT_THAT(counter.Max(), Eq(std::nullopt)); + EXPECT_THAT(counter.Min(), Eq(std::nullopt)); } TEST(SampleCounterTest, NotEnoughSamples) { @@ -32,9 +34,10 @@ TEST(SampleCounterTest, NotEnoughSamples) { for (int value : {1, 2, 3, 4, 5}) { counter.Add(value); } - EXPECT_THAT(counter.Avg(kMinSamples), Eq(absl::nullopt)); - EXPECT_THAT(counter.Sum(kMinSamples), Eq(absl::nullopt)); + EXPECT_THAT(counter.Avg(kMinSamples), Eq(std::nullopt)); + EXPECT_THAT(counter.Sum(kMinSamples), Eq(std::nullopt)); EXPECT_THAT(counter.Max(), Eq(5)); + EXPECT_THAT(counter.Min(), Eq(1)); } TEST(SampleCounterTest, EnoughSamples) { @@ -46,6 +49,7 @@ TEST(SampleCounterTest, EnoughSamples) { EXPECT_THAT(counter.Avg(kMinSamples), Eq(3)); EXPECT_THAT(counter.Sum(kMinSamples), Eq(15)); EXPECT_THAT(counter.Max(), Eq(5)); + EXPECT_THAT(counter.Min(), Eq(1)); } TEST(SampleCounterTest, ComputesVariance) { @@ -68,13 +72,14 @@ TEST(SampleCounterTest, AggregatesTwoCounters) { counter2.Add(value); } // Before aggregation there is not enough samples. - EXPECT_THAT(counter1.Avg(kMinSamples), Eq(absl::nullopt)); - EXPECT_THAT(counter1.Variance(kMinSamples), Eq(absl::nullopt)); + EXPECT_THAT(counter1.Avg(kMinSamples), Eq(std::nullopt)); + EXPECT_THAT(counter1.Variance(kMinSamples), Eq(std::nullopt)); // Aggregate counter2 in counter1. counter1.Add(counter2); EXPECT_THAT(counter1.Avg(kMinSamples), Eq(3)); EXPECT_THAT(counter1.Max(), Eq(5)); + EXPECT_THAT(counter1.Min(), Eq(1)); EXPECT_THAT(counter1.Variance(kMinSamples), Eq(2)); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/numerics/sample_stats.cc b/rtc_base/numerics/sample_stats.cc index 6000b2b88f..606b1b6247 100644 --- a/rtc_base/numerics/sample_stats.cc +++ b/rtc_base/numerics/sample_stats.cc @@ -9,6 +9,12 @@ */ #include "rtc_base/numerics/sample_stats.h" +#include + +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "rtc_base/checks.h" + namespace webrtc { double SampleStats::Max() { diff --git a/rtc_base/numerics/sample_stats.h b/rtc_base/numerics/sample_stats.h index 39af1c6a37..11a884ba32 100644 --- a/rtc_base/numerics/sample_stats.h +++ b/rtc_base/numerics/sample_stats.h @@ -13,7 +13,6 @@ #include "api/numerics/samples_stats_counter.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" -#include "api/units/timestamp.h" namespace webrtc { template diff --git a/rtc_base/numerics/sequence_number_unwrapper.h b/rtc_base/numerics/sequence_number_unwrapper.h index d741b5c910..1def581c02 100644 --- a/rtc_base/numerics/sequence_number_unwrapper.h +++ b/rtc_base/numerics/sequence_number_unwrapper.h @@ -14,8 +14,9 @@ #include #include +#include +#include -#include "absl/types/optional.h" #include "rtc_base/numerics/sequence_number_util.h" namespace webrtc { @@ -69,7 +70,7 @@ class SeqNumUnwrapper { } int64_t last_unwrapped_ = 0; - absl::optional last_value_; + std::optional last_value_; }; using RtpTimestampUnwrapper = SeqNumUnwrapper; diff --git a/rtc_base/numerics/sequence_number_unwrapper_unittest.cc b/rtc_base/numerics/sequence_number_unwrapper_unittest.cc index fcd903bab4..10ffecb311 100644 --- a/rtc_base/numerics/sequence_number_unwrapper_unittest.cc +++ b/rtc_base/numerics/sequence_number_unwrapper_unittest.cc @@ -11,6 +11,7 @@ #include "rtc_base/numerics/sequence_number_unwrapper.h" #include +#include #include "test/gtest.h" diff --git a/rtc_base/numerics/sequence_number_util.h b/rtc_base/numerics/sequence_number_util.h index 702b82fa2b..5aeaf552ca 100644 --- a/rtc_base/numerics/sequence_number_util.h +++ b/rtc_base/numerics/sequence_number_util.h @@ -11,8 +11,6 @@ #ifndef RTC_BASE_NUMERICS_SEQUENCE_NUMBER_UTIL_H_ #define RTC_BASE_NUMERICS_SEQUENCE_NUMBER_UTIL_H_ -#include - #include #include diff --git a/rtc_base/numerics/sequence_number_util_unittest.cc b/rtc_base/numerics/sequence_number_util_unittest.cc index d44127bfa5..f23c1065b0 100644 --- a/rtc_base/numerics/sequence_number_util_unittest.cc +++ b/rtc_base/numerics/sequence_number_util_unittest.cc @@ -14,6 +14,7 @@ #include #include +#include "rtc_base/numerics/mod_ops.h" #include "test/gtest.h" namespace webrtc { diff --git a/rtc_base/openssl.h b/rtc_base/openssl.h index eeed373c43..17af84d0e4 100644 --- a/rtc_base/openssl.h +++ b/rtc_base/openssl.h @@ -16,7 +16,7 @@ #include "rtc_base/win32.h" // NOLINT #endif // WEBRTC_WIN -#include +#include // IWYU pragma: export #if (OPENSSL_VERSION_NUMBER < 0x10100000L) #error OpenSSL is older than 1.1.0, which is the minimum supported version. diff --git a/rtc_base/openssl_adapter.cc b/rtc_base/openssl_adapter.cc index c68eb22f5c..cb5dfc6a85 100644 --- a/rtc_base/openssl_adapter.cc +++ b/rtc_base/openssl_adapter.cc @@ -13,12 +13,31 @@ #include #include #include +#include +#include + +#include +#include +#include +#include #include "absl/strings/string_view.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "rtc_base/async_socket.h" +#include "rtc_base/openssl_session_cache.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_adapter.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_identity.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/strings/string_builder.h" #ifdef OPENSSL_IS_BORINGSSL #include + +#include "rtc_base/boringssl_certificate.h" +#include "rtc_base/openssl.h" #endif -#include #include #include #include @@ -37,14 +56,13 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/openssl.h" #ifdef OPENSSL_IS_BORINGSSL #include "rtc_base/boringssl_identity.h" #else #include "rtc_base/openssl_identity.h" #endif #include "rtc_base/openssl_utility.h" -#include "rtc_base/strings/string_builder.h" +#include "rtc_base/strings/str_join.h" #include "rtc_base/thread.h" ////////////////////////////////////////////////////////////////////// @@ -72,7 +90,7 @@ static BIO_METHOD* BIO_socket_method() { return methods; } -static BIO* BIO_new_socket(rtc::Socket* socket) { +static BIO* BIO_new_socket(webrtc::Socket* socket) { BIO* ret = BIO_new(BIO_socket_method()); if (ret == nullptr) { return nullptr; @@ -97,7 +115,7 @@ static int socket_free(BIO* b) { static int socket_read(BIO* b, char* out, int outl) { if (!out) return -1; - rtc::Socket* socket = static_cast(BIO_get_data(b)); + webrtc::Socket* socket = static_cast(BIO_get_data(b)); BIO_clear_retry_flags(b); int result = socket->Recv(out, outl, nullptr); if (result > 0) { @@ -111,7 +129,7 @@ static int socket_read(BIO* b, char* out, int outl) { static int socket_write(BIO* b, const char* in, int inl) { if (!in) return -1; - rtc::Socket* socket = static_cast(BIO_get_data(b)); + webrtc::Socket* socket = static_cast(BIO_get_data(b)); BIO_clear_retry_flags(b); int result = socket->Send(in, inl); if (result > 0) { @@ -123,7 +141,7 @@ static int socket_write(BIO* b, const char* in, int inl) { } static int socket_puts(BIO* b, const char* str) { - return socket_write(b, str, rtc::checked_cast(strlen(str))); + return socket_write(b, str, webrtc::checked_cast(strlen(str))); } static long socket_ctrl(BIO* b, int cmd, long num, void* ptr) { // NOLINT @@ -131,9 +149,9 @@ static long socket_ctrl(BIO* b, int cmd, long num, void* ptr) { // NOLINT case BIO_CTRL_RESET: return 0; case BIO_CTRL_EOF: { - rtc::Socket* socket = static_cast(ptr); + webrtc::Socket* socket = static_cast(ptr); // 1 means socket closed. - return (socket->GetState() == rtc::Socket::CS_CLOSED) ? 1 : 0; + return (socket->GetState() == webrtc::Socket::CS_CLOSED) ? 1 : 0; } case BIO_CTRL_WPENDING: case BIO_CTRL_PENDING: @@ -164,38 +182,17 @@ static void LogSslError() { // OpenSSLAdapter ///////////////////////////////////////////////////////////////////////////// -namespace rtc { +namespace webrtc { using ::webrtc::TimeDelta; -namespace webrtc_openssl_adapter_internal { - -// Simple O(n^2) implementation is sufficient for current use case. -std::string StrJoin(const std::vector& list, char delimiter) { - RTC_CHECK(!list.empty()); - StringBuilder sb; - sb << list[0]; - for (size_t i = 1; i < list.size(); i++) { - sb.AppendFormat("%c", delimiter); - sb << list[i]; - } - return sb.Release(); -} -} // namespace webrtc_openssl_adapter_internal - -using webrtc_openssl_adapter_internal::StrJoin; - bool OpenSSLAdapter::InitializeSSL() { - if (!SSL_library_init()) - return false; -#if !defined(ADDRESS_SANITIZER) || !defined(WEBRTC_MAC) || defined(WEBRTC_IOS) - // Loading the error strings crashes mac_asan. Omit this debugging aid there. - SSL_load_error_strings(); -#endif - ERR_load_BIO_strings(); - OpenSSL_add_all_algorithms(); - RAND_poll(); - return true; + // TODO: https://issues.webrtc.org/issues/339300437 - remove once + // BoringSSL no longer requires this after + // https://bugs.chromium.org/p/boringssl/issues/detail?id=35 + // In OpenSSL it is supposed to be a no-op as of 1.1: + // https://www.openssl.org/docs/man1.1.1/man3/OPENSSL_init_ssl.html + return OPENSSL_init_ssl(0, nullptr); } bool OpenSSLAdapter::CleanupSSL() { @@ -209,12 +206,12 @@ OpenSSLAdapter::OpenSSLAdapter(Socket* socket, ssl_session_cache_(ssl_session_cache), ssl_cert_verifier_(ssl_cert_verifier), state_(SSL_NONE), - role_(SSL_CLIENT), + role_(webrtc::SSL_CLIENT), ssl_read_needs_write_(false), ssl_write_needs_read_(false), ssl_(nullptr), ssl_ctx_(nullptr), - ssl_mode_(SSL_MODE_TLS), + ssl_mode_(webrtc::SSL_MODE_TLS), ignore_bad_cert_(false), custom_cert_verifier_status_(false) { // If a factory is used, take a reference on the factory's SSL_CTX. @@ -262,8 +259,8 @@ void OpenSSLAdapter::SetIdentity(std::unique_ptr identity) { identity_ = absl::WrapUnique(static_cast(identity.release())); #else - identity_ = - absl::WrapUnique(static_cast(identity.release())); + identity_ = absl::WrapUnique( + static_cast(identity.release())); #endif } @@ -303,7 +300,7 @@ int OpenSSLAdapter::BeginSSL() { // need to create one, and specify `false` to disable session caching. if (ssl_session_cache_ == nullptr) { RTC_DCHECK(!ssl_ctx_); - ssl_ctx_ = CreateContext(ssl_mode_, false); + ssl_ctx_ = CreateContext(ssl_mode_, /* enable_cache= */ false); } if (!ssl_ctx_) { @@ -368,12 +365,12 @@ int OpenSSLAdapter::BeginSSL() { if (!tls_alpn_string.empty()) { SSL_set_alpn_protos( ssl_, reinterpret_cast(tls_alpn_string.data()), - rtc::dchecked_cast(tls_alpn_string.size())); + webrtc::dchecked_cast(tls_alpn_string.size())); } } if (!elliptic_curves_.empty()) { - SSL_set1_curves_list(ssl_, StrJoin(elliptic_curves_, ':').c_str()); + SSL_set1_curves_list(ssl_, webrtc::StrJoin(elliptic_curves_, ":").c_str()); } // Now that the initial config is done, transfer ownership of `bio` to the @@ -396,7 +393,8 @@ int OpenSSLAdapter::ContinueSSL() { // Clear the DTLS timer timer_.reset(); - int code = (role_ == SSL_CLIENT) ? SSL_connect(ssl_) : SSL_accept(ssl_); + int code = + (role_ == webrtc::SSL_CLIENT) ? SSL_connect(ssl_) : SSL_accept(ssl_); switch (SSL_get_error(ssl_, code)) { case SSL_ERROR_NONE: if (!SSLPostConnectionCheck(ssl_, ssl_host_name_)) { @@ -482,7 +480,7 @@ int OpenSSLAdapter::DoSslWrite(const void* pv, size_t cb, int* error) { RTC_DCHECK(error != nullptr); ssl_write_needs_read_ = false; - int ret = SSL_write(ssl_, pv, checked_cast(cb)); + int ret = SSL_write(ssl_, pv, webrtc::checked_cast(cb)); *error = SSL_get_error(ssl_, ret); switch (*error) { case SSL_ERROR_NONE: @@ -576,7 +574,7 @@ int OpenSSLAdapter::Send(const void* pv, size_t cb) { pending_data_.SetData(static_cast(pv), cb); // Since we're taking responsibility for sending this data, return its full // size. The user of this class can consider it sent. - return rtc::dchecked_cast(cb); + return webrtc::dchecked_cast(cb); } return ret; } @@ -614,7 +612,7 @@ int OpenSSLAdapter::Recv(void* pv, size_t cb, int64_t* timestamp) { } ssl_read_needs_write_ = false; - int code = SSL_read(ssl_, pv, checked_cast(cb)); + int code = SSL_read(ssl_, pv, webrtc::checked_cast(cb)); int error = SSL_get_error(ssl_, code); switch (error) { @@ -777,65 +775,34 @@ bool OpenSSLAdapter::SSLPostConnectionCheck(SSL* ssl, absl::string_view host) { return is_valid_cert_name; } -void OpenSSLAdapter::SSLInfoCallback(const SSL* s, int where, int value) { - std::string type; - bool info_log = false; - bool alert_log = false; +void OpenSSLAdapter::SSLInfoCallback(const SSL* ssl, int where, int ret) { switch (where) { - case SSL_CB_EXIT: - info_log = true; - type = "exit"; - break; - case SSL_CB_ALERT: - alert_log = true; - type = "alert"; - break; - case SSL_CB_READ_ALERT: - alert_log = true; - type = "read_alert"; - break; - case SSL_CB_WRITE_ALERT: - alert_log = true; - type = "write_alert"; - break; - case SSL_CB_ACCEPT_LOOP: - info_log = true; - type = "accept_loop"; - break; - case SSL_CB_ACCEPT_EXIT: - info_log = true; - type = "accept_exit"; - break; - case SSL_CB_CONNECT_LOOP: - info_log = true; - type = "connect_loop"; - break; - case SSL_CB_CONNECT_EXIT: - info_log = true; - type = "connect_exit"; - break; - case SSL_CB_HANDSHAKE_START: - info_log = true; - type = "handshake_start"; - break; - case SSL_CB_HANDSHAKE_DONE: - info_log = true; - type = "handshake_done"; - break; case SSL_CB_LOOP: case SSL_CB_READ: case SSL_CB_WRITE: + return; default: break; } - - if (info_log) { - RTC_LOG(LS_INFO) << type << " " << SSL_state_string_long(s); + char buf[1024]; + SimpleStringBuilder ss(buf); + ss << SSL_state_string_long(ssl); + if (ret == 0) { + RTC_LOG(LS_ERROR) << "Error during " << ss.str() << "\n"; + return; } - if (alert_log) { - RTC_LOG(LS_WARNING) << type << " " << SSL_alert_type_string_long(value) - << " " << SSL_alert_desc_string_long(value) << " " - << SSL_state_string_long(s); + // See SSL_alert_type_string_long. + int severity_class = where >> 8; + switch (severity_class) { + case SSL3_AL_WARNING: + case SSL3_AL_FATAL: + ss << " " << SSL_alert_type_string_long(ret); + ss << " " << SSL_alert_desc_string_long(ret); + RTC_LOG(LS_WARNING) << ss.str(); + break; + default: + RTC_LOG(LS_INFO) << ss.str(); + break; } } @@ -947,7 +914,7 @@ int OpenSSLAdapter::SSLVerifyInternal(int previous_status, } const BoringSSLCertificate cert(std::move(crypto_buffer)); #else - const OpenSSLCertificate cert(X509_STORE_CTX_get_current_cert(store)); + const webrtc::OpenSSLCertificate cert(X509_STORE_CTX_get_current_cert(store)); #endif if (!ssl_cert_verifier_->Verify(cert)) { RTC_LOG(LS_INFO) << "Failed to verify certificate using custom callback"; @@ -977,7 +944,7 @@ SSL_CTX* OpenSSLAdapter::CreateContext(SSLMode mode, bool enable_cache) { : TLS_with_buffers_method()); #else SSL_CTX* ctx = - SSL_CTX_new(mode == SSL_MODE_DTLS ? DTLS_method() : TLS_method()); + SSL_CTX_new(mode == webrtc::SSL_MODE_DTLS ? DTLS_method() : TLS_method()); #endif if (ctx == nullptr) { unsigned long error = ERR_get_error(); // NOLINT: type used by OpenSSL. @@ -1022,15 +989,14 @@ SSL_CTX* OpenSSLAdapter::CreateContext(SSLMode mode, bool enable_cache) { SSL_CTX_set_cipher_list( ctx, "ALL:!SHA256:!SHA384:!aPSK:!ECDSA+SHA1:!ADH:!LOW:!EXP:!MD5:!3DES"); - if (mode == SSL_MODE_DTLS) { - SSL_CTX_set_read_ahead(ctx, 1); - } - if (enable_cache) { SSL_CTX_set_session_cache_mode(ctx, SSL_SESS_CACHE_CLIENT); SSL_CTX_sess_set_new_cb(ctx, &OpenSSLAdapter::NewSSLSessionCallback); } +#ifdef OPENSSL_IS_BORINGSSL + SSL_CTX_set_permute_extensions(ctx, true); +#endif return ctx; } @@ -1123,4 +1089,4 @@ OpenSSLAdapter::EarlyExitCatcher::~EarlyExitCatcher() { } } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/openssl_adapter.h b/rtc_base/openssl_adapter.h index 558a04077a..479b1a679c 100644 --- a/rtc_base/openssl_adapter.h +++ b/rtc_base/openssl_adapter.h @@ -22,6 +22,7 @@ #include "absl/strings/string_view.h" #include "api/task_queue/pending_task_safety_flag.h" #include "rtc_base/buffer.h" +#include "rtc_base/openssl_stream_adapter.h" #ifdef OPENSSL_IS_BORINGSSL #include "rtc_base/boringssl_identity.h" #else @@ -35,15 +36,7 @@ #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" -namespace rtc { - -namespace webrtc_openssl_adapter_internal { - -// Local definition, since absl::StrJoin is not allow-listed. Declared in header -// file only for unittests. -std::string StrJoin(const std::vector& list, char delimiter); - -} // namespace webrtc_openssl_adapter_internal +namespace webrtc { class OpenSSLAdapter final : public SSLAdapter { public: @@ -64,7 +57,7 @@ class OpenSSLAdapter final : public SSLAdapter { void SetIgnoreBadCert(bool ignore) override; void SetAlpnProtocols(const std::vector& protos) override; void SetEllipticCurves(const std::vector& curves) override; - void SetMode(SSLMode mode) override; + [[deprecated]] void SetMode(SSLMode mode) override; void SetCertVerifier(SSLCertificateVerifier* ssl_cert_verifier) override; void SetIdentity(std::unique_ptr identity) override; void SetRole(SSLRole role) override; @@ -155,7 +148,7 @@ class OpenSSLAdapter final : public SSLAdapter { #ifdef OPENSSL_IS_BORINGSSL std::unique_ptr identity_; #else - std::unique_ptr identity_; + std::unique_ptr identity_; #endif // Indicates whethere this is a client or a server. SSLRole role_; @@ -181,7 +174,7 @@ class OpenSSLAdapter final : public SSLAdapter { // Holds the result of the call to run of the ssl_cert_verify_->Verify() bool custom_cert_verifier_status_; // Flag to cancel pending timeout task. - webrtc::ScopedTaskSafety timer_; + ScopedTaskSafety timer_; }; // The OpenSSLAdapterFactory is responsbile for creating multiple new @@ -218,8 +211,8 @@ class OpenSSLAdapterFactory : public SSLAdapterFactory { private: // Holds the SSLMode (DTLS,TLS) that will be used to set the session cache. - SSLMode ssl_mode_ = SSL_MODE_TLS; - SSLRole ssl_role_ = SSL_CLIENT; + SSLMode ssl_mode_ = webrtc::SSL_MODE_TLS; + SSLRole ssl_role_ = webrtc::SSL_CLIENT; bool ignore_bad_cert_ = false; std::unique_ptr identity_; @@ -227,7 +220,7 @@ class OpenSSLAdapterFactory : public SSLAdapterFactory { // Holds a cache of existing SSL Sessions. std::unique_ptr ssl_session_cache_; // Provides an optional custom callback for verifying SSL certificates, this - // in currently only used for TLS-TURN connections. + // in currently only used for TURN/TLS connections. SSLCertificateVerifier* ssl_cert_verifier_ = nullptr; // TODO(benwright): Remove this when context is moved to OpenSSLCommon. // Hold a friend class to the OpenSSLAdapter to retrieve the context. @@ -240,6 +233,16 @@ class OpenSSLAdapterFactory : public SSLAdapterFactory { std::string TransformAlpnProtocols(const std::vector& protos); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::OpenSSLAdapter; +using ::webrtc::OpenSSLAdapterFactory; +using ::webrtc::TransformAlpnProtocols; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_OPENSSL_ADAPTER_H_ diff --git a/rtc_base/openssl_adapter_unittest.cc b/rtc_base/openssl_adapter_unittest.cc index ce351dc98e..d30969dee5 100644 --- a/rtc_base/openssl_adapter_unittest.cc +++ b/rtc_base/openssl_adapter_unittest.cc @@ -10,16 +10,21 @@ #include "rtc_base/openssl_adapter.h" +#include +#include +#include #include #include #include -#include "absl/memory/memory.h" -#include "rtc_base/gunit.h" #include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/thread.h" #include "test/gmock.h" +#include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { class MockAsyncSocket : public Socket { @@ -84,7 +89,7 @@ TEST(OpenSSLAdapterTest, TestTransformAlpnProtocols) { // Verifies that SSLStart works when OpenSSLAdapter is started in standalone // mode. TEST(OpenSSLAdapterTest, TestBeginSSLBeforeConnection) { - rtc::AutoThread main_thread; + AutoThread main_thread; Socket* async_socket = new MockAsyncSocket(); OpenSSLAdapter adapter(async_socket); EXPECT_EQ(adapter.StartSSL("webrtc.org"), 0); @@ -92,7 +97,7 @@ TEST(OpenSSLAdapterTest, TestBeginSSLBeforeConnection) { // Verifies that the adapter factory can create new adapters. TEST(OpenSSLAdapterFactoryTest, CreateSingleOpenSSLAdapter) { - rtc::AutoThread main_thread; + AutoThread main_thread; OpenSSLAdapterFactory adapter_factory; Socket* async_socket = new MockAsyncSocket(); auto simple_adapter = std::unique_ptr( @@ -103,7 +108,7 @@ TEST(OpenSSLAdapterFactoryTest, CreateSingleOpenSSLAdapter) { // Verifies that setting a custom verifier still allows for adapters to be // created. TEST(OpenSSLAdapterFactoryTest, CreateWorksWithCustomVerifier) { - rtc::AutoThread main_thread; + AutoThread main_thread; MockCertVerifier* mock_verifier = new MockCertVerifier(); EXPECT_CALL(*mock_verifier, Verify(_)).WillRepeatedly(Return(true)); auto cert_verifier = std::unique_ptr(mock_verifier); @@ -116,19 +121,4 @@ TEST(OpenSSLAdapterFactoryTest, CreateWorksWithCustomVerifier) { EXPECT_NE(simple_adapter, nullptr); } -TEST(StrJoinTest, SingleElement) { - EXPECT_EQ(webrtc_openssl_adapter_internal::StrJoin({"a"}, ','), "a"); -} - -TEST(StrJoinTest, TwoElements) { - EXPECT_EQ(webrtc_openssl_adapter_internal::StrJoin({"first", "second"}, ':'), - "first:second"); -} - -TEST(StrJoinTest, WithEmptyElement) { - EXPECT_EQ( - webrtc_openssl_adapter_internal::StrJoin({"first", "", "second"}, ':'), - "first::second"); -} - -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/openssl_certificate.cc b/rtc_base/openssl_certificate.cc index faed72b4db..64730432e3 100644 --- a/rtc_base/openssl_certificate.cc +++ b/rtc_base/openssl_certificate.cc @@ -10,6 +10,14 @@ #include "rtc_base/openssl_certificate.h" +#include +#include + +#include "absl/strings/string_view.h" +#include "rtc_base/buffer.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_identity.h" + #if defined(WEBRTC_WIN) // Must be included first before openssl headers. #include "rtc_base/win32.h" // NOLINT @@ -23,14 +31,14 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/logging.h" #include "rtc_base/message_digest.h" #include "rtc_base/openssl_digest.h" #include "rtc_base/openssl_identity.h" #include "rtc_base/openssl_utility.h" -namespace rtc { +namespace webrtc { namespace { // Random bits for certificate serial number @@ -56,6 +64,7 @@ static void PrintCert(X509* x509) { // Generate a self-signed certificate, with the public key from the // given key pair. Caller is responsible for freeing the returned object. static X509* MakeCertificate(EVP_PKEY* pkey, const SSLIdentityParams& params) { + RTC_DCHECK(pkey != nullptr); RTC_LOG(LS_INFO) << "Making certificate for " << params.common_name; ASN1_INTEGER* asn1_serial_number = nullptr; @@ -95,8 +104,8 @@ static X509* MakeCertificate(EVP_PKEY* pkey, const SSLIdentityParams& params) { name.reset(X509_NAME_new()); if (name == nullptr || !X509_NAME_add_entry_by_NID(name.get(), NID_commonName, MBSTRING_UTF8, - (unsigned char*)params.common_name.c_str(), - -1, -1, 0) || + (unsigned char*)params.common_name.data(), -1, + -1, 0) || !X509_set_subject_name(x509.get(), name.get()) || !X509_set_issuer_name(x509.get(), name.get())) { return nullptr; @@ -209,27 +218,18 @@ bool OpenSSLCertificate::GetSignatureDigestAlgorithm( } bool OpenSSLCertificate::ComputeDigest(absl::string_view algorithm, - unsigned char* digest, - size_t size, - size_t* length) const { - return ComputeDigest(x509_, algorithm, digest, size, length); -} - -bool OpenSSLCertificate::ComputeDigest(const X509* x509, - absl::string_view algorithm, - unsigned char* digest, - size_t size, - size_t* length) { + Buffer& digest) const { + RTC_DCHECK_GT(digest.capacity(), 0); const EVP_MD* md = nullptr; unsigned int n = 0; if (!OpenSSLDigest::GetDigestEVP(algorithm, &md)) { return false; } - if (size < static_cast(EVP_MD_size(md))) { + if (digest.capacity() < static_cast(EVP_MD_size(md))) { return false; } - X509_digest(x509, md, digest, &n); - *length = n; + X509_digest(x509_, md, digest.data(), &n); + digest.SetSize(n); return true; } @@ -287,4 +287,4 @@ int64_t OpenSSLCertificate::CertificateExpirationTime() const { return ASN1TimeToSec(expire_time->data, expire_time->length, long_format); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/openssl_certificate.h b/rtc_base/openssl_certificate.h index 3f1b8c82f9..044edca3f1 100644 --- a/rtc_base/openssl_certificate.h +++ b/rtc_base/openssl_certificate.h @@ -15,15 +15,16 @@ #include #include +#include #include +#include "absl/strings/string_view.h" #include "rtc_base/buffer.h" +#include "rtc_base/openssl_key_pair.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" -namespace rtc { - -class OpenSSLKeyPair; +namespace webrtc { // OpenSSLCertificate encapsulates an OpenSSL X509* certificate object, // which is also reference counted inside the OpenSSL library. @@ -55,16 +56,7 @@ class OpenSSLCertificate final : public SSLCertificate { // Compute the digest of the certificate given algorithm bool ComputeDigest(absl::string_view algorithm, - unsigned char* digest, - size_t size, - size_t* length) const override; - - // Compute the digest of a certificate as an X509 * - static bool ComputeDigest(const X509* x509, - absl::string_view algorithm, - unsigned char* digest, - size_t size, - size_t* length); + Buffer& digest) const override; bool GetSignatureDigestAlgorithm(std::string* algorithm) const override; @@ -74,6 +66,16 @@ class OpenSSLCertificate final : public SSLCertificate { X509* x509_; // NOT OWNED }; -} // namespace rtc +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { + +using ::webrtc::OpenSSLCertificate; + +} +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_OPENSSL_CERTIFICATE_H_ diff --git a/rtc_base/openssl_digest.cc b/rtc_base/openssl_digest.cc index bbf39570f6..96cac43031 100644 --- a/rtc_base/openssl_digest.cc +++ b/rtc_base/openssl_digest.cc @@ -14,7 +14,7 @@ #include "rtc_base/checks.h" // RTC_DCHECK, RTC_CHECK #include "rtc_base/openssl.h" -namespace rtc { +namespace webrtc { OpenSSLDigest::OpenSSLDigest(absl::string_view algorithm) { ctx_ = EVP_MD_CTX_new(); @@ -59,17 +59,17 @@ size_t OpenSSLDigest::Finish(void* buf, size_t len) { bool OpenSSLDigest::GetDigestEVP(absl::string_view algorithm, const EVP_MD** mdp) { const EVP_MD* md; - if (algorithm == DIGEST_MD5) { + if (algorithm == webrtc::DIGEST_MD5) { md = EVP_md5(); - } else if (algorithm == DIGEST_SHA_1) { + } else if (algorithm == webrtc::DIGEST_SHA_1) { md = EVP_sha1(); - } else if (algorithm == DIGEST_SHA_224) { + } else if (algorithm == webrtc::DIGEST_SHA_224) { md = EVP_sha224(); - } else if (algorithm == DIGEST_SHA_256) { + } else if (algorithm == webrtc::DIGEST_SHA_256) { md = EVP_sha256(); - } else if (algorithm == DIGEST_SHA_384) { + } else if (algorithm == webrtc::DIGEST_SHA_384) { md = EVP_sha384(); - } else if (algorithm == DIGEST_SHA_512) { + } else if (algorithm == webrtc::DIGEST_SHA_512) { md = EVP_sha512(); } else { return false; @@ -87,17 +87,17 @@ bool OpenSSLDigest::GetDigestName(const EVP_MD* md, std::string* algorithm) { int md_type = EVP_MD_type(md); if (md_type == NID_md5) { - *algorithm = DIGEST_MD5; + *algorithm = webrtc::DIGEST_MD5; } else if (md_type == NID_sha1) { - *algorithm = DIGEST_SHA_1; + *algorithm = webrtc::DIGEST_SHA_1; } else if (md_type == NID_sha224) { - *algorithm = DIGEST_SHA_224; + *algorithm = webrtc::DIGEST_SHA_224; } else if (md_type == NID_sha256) { - *algorithm = DIGEST_SHA_256; + *algorithm = webrtc::DIGEST_SHA_256; } else if (md_type == NID_sha384) { - *algorithm = DIGEST_SHA_384; + *algorithm = webrtc::DIGEST_SHA_384; } else if (md_type == NID_sha512) { - *algorithm = DIGEST_SHA_512; + *algorithm = webrtc::DIGEST_SHA_512; } else { algorithm->clear(); return false; @@ -115,4 +115,4 @@ bool OpenSSLDigest::GetDigestSize(absl::string_view algorithm, size_t* length) { return true; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/openssl_digest.h b/rtc_base/openssl_digest.h index c6cc3bb86d..345fe4a9bc 100644 --- a/rtc_base/openssl_digest.h +++ b/rtc_base/openssl_digest.h @@ -11,15 +11,15 @@ #ifndef RTC_BASE_OPENSSL_DIGEST_H_ #define RTC_BASE_OPENSSL_DIGEST_H_ -#include #include #include #include "absl/strings/string_view.h" #include "rtc_base/message_digest.h" +#include "rtc_base/openssl.h" -namespace rtc { +namespace webrtc { // An implementation of the digest class that uses OpenSSL. class OpenSSLDigest final : public MessageDigest { @@ -46,6 +46,14 @@ class OpenSSLDigest final : public MessageDigest { const EVP_MD* md_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::OpenSSLDigest; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_OPENSSL_DIGEST_H_ diff --git a/rtc_base/openssl_identity.cc b/rtc_base/openssl_identity.cc index 186497836d..32b8b3b2e9 100644 --- a/rtc_base/openssl_identity.cc +++ b/rtc_base/openssl_identity.cc @@ -31,7 +31,7 @@ #include "rtc_base/openssl.h" #include "rtc_base/openssl_utility.h" -namespace rtc { +namespace webrtc { OpenSSLIdentity::OpenSSLIdentity( std::unique_ptr key_pair, @@ -79,8 +79,11 @@ std::unique_ptr OpenSSLIdentity::CreateWithExpiration( time_t now = time(nullptr); params.not_before = now + kCertificateWindowInSeconds; params.not_after = now + certificate_lifetime; - if (params.not_before > params.not_after) + if (params.not_before > params.not_after) { + RTC_LOG(LS_ERROR) + << "Іdentity generated failed, not_before is after not_after."; return nullptr; + } return CreateInternal(params); } @@ -112,8 +115,9 @@ std::unique_ptr OpenSSLIdentity::CreateFromPEMStrings( std::unique_ptr OpenSSLIdentity::CreateFromPEMChainStrings( absl::string_view private_key, absl::string_view certificate_chain) { - BIO* bio = BIO_new_mem_buf(certificate_chain.data(), - rtc::dchecked_cast(certificate_chain.size())); + BIO* bio = + BIO_new_mem_buf(certificate_chain.data(), + webrtc::dchecked_cast(certificate_chain.size())); if (!bio) return nullptr; BIO_set_mem_eof_return(bio, 0); @@ -125,9 +129,11 @@ std::unique_ptr OpenSSLIdentity::CreateFromPEMChainStrings( uint32_t err = ERR_peek_error(); if (ERR_GET_LIB(err) == ERR_LIB_PEM && ERR_GET_REASON(err) == PEM_R_NO_START_LINE) { + err = ERR_get_error(); break; } - RTC_LOG(LS_ERROR) << "Failed to parse certificate from PEM string."; + RTC_LOG(LS_ERROR) << "Failed to parse certificate from PEM string: " + << ERR_reason_error_string(err); BIO_free(bio); return nullptr; } @@ -202,4 +208,4 @@ bool OpenSSLIdentity::operator!=(const OpenSSLIdentity& other) const { return !(*this == other); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/openssl_identity.h b/rtc_base/openssl_identity.h index a7372109c3..2b17ae0875 100644 --- a/rtc_base/openssl_identity.h +++ b/rtc_base/openssl_identity.h @@ -22,7 +22,7 @@ #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" -namespace rtc { +namespace webrtc { // Holds a keypair and certificate together, and a method to generate // them consistently. @@ -70,6 +70,14 @@ class OpenSSLIdentity final : public SSLIdentity { std::unique_ptr cert_chain_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::OpenSSLIdentity; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_OPENSSL_IDENTITY_H_ diff --git a/rtc_base/openssl_key_pair.cc b/rtc_base/openssl_key_pair.cc index 4c474f2d54..e3da69f22e 100644 --- a/rtc_base/openssl_key_pair.cc +++ b/rtc_base/openssl_key_pair.cc @@ -30,7 +30,7 @@ #include "rtc_base/openssl.h" #include "rtc_base/openssl_utility.h" -namespace rtc { +namespace webrtc { // We could have exposed a myriad of parameters for the crypto stuff, // but keeping it simple seems best. @@ -191,4 +191,4 @@ bool OpenSSLKeyPair::operator!=(const OpenSSLKeyPair& other) const { return !(*this == other); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/openssl_key_pair.h b/rtc_base/openssl_key_pair.h index d09bdb0d5e..a8a3aa0d9a 100644 --- a/rtc_base/openssl_key_pair.h +++ b/rtc_base/openssl_key_pair.h @@ -20,7 +20,7 @@ #include "rtc_base/checks.h" #include "rtc_base/ssl_identity.h" -namespace rtc { +namespace webrtc { // OpenSSLKeyPair encapsulates an OpenSSL EVP_PKEY* keypair object, // which is reference counted inside the OpenSSL library. @@ -56,6 +56,14 @@ class OpenSSLKeyPair final { EVP_PKEY* pkey_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::OpenSSLKeyPair; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_OPENSSL_KEY_PAIR_H_ diff --git a/rtc_base/openssl_session_cache.cc b/rtc_base/openssl_session_cache.cc index d63724242a..6279a9235f 100644 --- a/rtc_base/openssl_session_cache.cc +++ b/rtc_base/openssl_session_cache.cc @@ -10,11 +10,16 @@ #include "rtc_base/openssl_session_cache.h" +#include + +#include + #include "absl/strings/string_view.h" #include "rtc_base/checks.h" #include "rtc_base/openssl.h" +#include "rtc_base/ssl_stream_adapter.h" -namespace rtc { +namespace webrtc { OpenSSLSessionCache::OpenSSLSessionCache(SSLMode ssl_mode, SSL_CTX* ssl_ctx) : ssl_mode_(ssl_mode), ssl_ctx_(ssl_ctx) { @@ -51,4 +56,4 @@ SSLMode OpenSSLSessionCache::GetSSLMode() const { return ssl_mode_; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/openssl_session_cache.h b/rtc_base/openssl_session_cache.h index 75d8d9a0cf..0bcbad0e34 100644 --- a/rtc_base/openssl_session_cache.h +++ b/rtc_base/openssl_session_cache.h @@ -24,7 +24,7 @@ typedef struct ssl_session_st SSL_SESSION; #endif -namespace rtc { +namespace webrtc { // The OpenSSLSessionCache maps hostnames to SSL_SESSIONS. This cache is // owned by the OpenSSLAdapterFactory and is passed down to each OpenSSLAdapter @@ -63,10 +63,18 @@ class OpenSSLSessionCache final { // Map of hostnames to SSL_SESSIONs; holds references to the SSL_SESSIONs, // which are cleaned up when the factory is destroyed. // TODO(juberti): Add LRU eviction to keep the cache from growing forever. - std::map sessions_; + std::map sessions_; // The cache should never be copied or assigned directly. }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::OpenSSLSessionCache; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_OPENSSL_SESSION_CACHE_H_ diff --git a/rtc_base/openssl_session_cache_unittest.cc b/rtc_base/openssl_session_cache_unittest.cc index 0441d5c012..1d4f18145a 100644 --- a/rtc_base/openssl_session_cache_unittest.cc +++ b/rtc_base/openssl_session_cache_unittest.cc @@ -13,11 +13,8 @@ #include #include -#include -#include - -#include "rtc_base/gunit.h" -#include "rtc_base/openssl.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "test/gtest.h" namespace { // Use methods that avoid X509 objects if possible. @@ -35,9 +32,18 @@ SSL_CTX* NewTlsContext() { return SSL_CTX_new(TLS_method()); #endif } + +SSL_SESSION* NewSslSession(SSL_CTX* ssl_ctx) { +#ifdef OPENSSL_IS_BORINGSSL + return SSL_SESSION_new(ssl_ctx); +#else + return SSL_SESSION_new(); +#endif +} + } // namespace -namespace rtc { +namespace webrtc { TEST(OpenSSLSessionCache, DTLSModeSetCorrectly) { SSL_CTX* ssl_ctx = NewDtlsContext(); @@ -79,7 +85,7 @@ TEST(OpenSSLSessionCache, InvalidLookupReturnsNullptr) { TEST(OpenSSLSessionCache, SimpleValidSessionLookup) { SSL_CTX* ssl_ctx = NewDtlsContext(); - SSL_SESSION* ssl_session = SSL_SESSION_new(ssl_ctx); + SSL_SESSION* ssl_session = NewSslSession(ssl_ctx); OpenSSLSessionCache session_cache(SSL_MODE_DTLS, ssl_ctx); session_cache.AddSession("webrtc.org", ssl_session); @@ -90,8 +96,8 @@ TEST(OpenSSLSessionCache, SimpleValidSessionLookup) { TEST(OpenSSLSessionCache, AddToExistingReplacesPrevious) { SSL_CTX* ssl_ctx = NewDtlsContext(); - SSL_SESSION* ssl_session_1 = SSL_SESSION_new(ssl_ctx); - SSL_SESSION* ssl_session_2 = SSL_SESSION_new(ssl_ctx); + SSL_SESSION* ssl_session_1 = NewSslSession(ssl_ctx); + SSL_SESSION* ssl_session_2 = NewSslSession(ssl_ctx); OpenSSLSessionCache session_cache(SSL_MODE_DTLS, ssl_ctx); session_cache.AddSession("webrtc.org", ssl_session_1); @@ -101,4 +107,4 @@ TEST(OpenSSLSessionCache, AddToExistingReplacesPrevious) { SSL_CTX_free(ssl_ctx); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/openssl_stream_adapter.cc b/rtc_base/openssl_stream_adapter.cc index d462f77ce4..b46faff911 100644 --- a/rtc_base/openssl_stream_adapter.cc +++ b/rtc_base/openssl_stream_adapter.cc @@ -11,32 +11,45 @@ #include "rtc_base/openssl_stream_adapter.h" #include -#include #include -#include -#include -#include - -#include "absl/strings/string_view.h" -#ifndef OPENSSL_IS_BORINGSSL -#include #include -#endif +#include +#include -#include +#include +#include +#include #include +#include +#include #include #include +#include "absl/functional/any_invocable.h" +#include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/field_trials_view.h" +#include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/units/time_delta.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/openssl.h" #include "rtc_base/openssl_adapter.h" #include "rtc_base/openssl_digest.h" +#include "rtc_base/ssl_identity.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/task_utils/repeating_task.h" #ifdef OPENSSL_IS_BORINGSSL +#include +#include +#include +#include + +#include "rtc_base/boringssl_certificate.h" #include "rtc_base/boringssl_identity.h" +#include "rtc_base/openssl.h" #else #include "rtc_base/openssl_identity.h" #endif @@ -46,19 +59,18 @@ #include "rtc_base/string_encode.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/field_trial.h" #if (OPENSSL_VERSION_NUMBER < 0x10100000L) #error "webrtc requires at least OpenSSL version 1.1.0, to support DTLS-SRTP" #endif -// Defines for the TLS Cipher Suite Map. -#define DEFINE_CIPHER_ENTRY_SSL3(name) \ - { SSL3_CK_##name, "TLS_" #name } -#define DEFINE_CIPHER_ENTRY_TLS1(name) \ - { TLS1_CK_##name, "TLS_" #name } +namespace { +// Value specified in RFC 5764. +static constexpr absl::string_view kDtlsSrtpExporterLabel = + "EXTRACTOR-dtls_srtp"; +} // namespace -namespace rtc { +namespace webrtc { namespace { using ::webrtc::SafeTask; // SRTP cipher suite table. `internal_name` is used to construct a @@ -69,80 +81,12 @@ struct SrtpCipherMapEntry { const int id; }; -// Cipher name table. Maps internal OpenSSL cipher ids to the RFC name. -struct SslCipherMapEntry { - uint32_t openssl_id; - const char* rfc_name; -}; - // This isn't elegant, but it's better than an external reference constexpr SrtpCipherMapEntry kSrtpCipherMap[] = { - {"SRTP_AES128_CM_SHA1_80", kSrtpAes128CmSha1_80}, - {"SRTP_AES128_CM_SHA1_32", kSrtpAes128CmSha1_32}, - {"SRTP_AEAD_AES_128_GCM", kSrtpAeadAes128Gcm}, - {"SRTP_AEAD_AES_256_GCM", kSrtpAeadAes256Gcm}}; - -#ifndef OPENSSL_IS_BORINGSSL -// The "SSL_CIPHER_standard_name" function is only available in OpenSSL when -// compiled with tracing, so we need to define the mapping manually here. -constexpr SslCipherMapEntry kSslCipherMap[] = { - // TLS v1.0 ciphersuites from RFC2246. - DEFINE_CIPHER_ENTRY_SSL3(RSA_RC4_128_SHA), - {SSL3_CK_RSA_DES_192_CBC3_SHA, "TLS_RSA_WITH_3DES_EDE_CBC_SHA"}, - - // AES ciphersuites from RFC3268. - {TLS1_CK_RSA_WITH_AES_128_SHA, "TLS_RSA_WITH_AES_128_CBC_SHA"}, - {TLS1_CK_DHE_RSA_WITH_AES_128_SHA, "TLS_DHE_RSA_WITH_AES_128_CBC_SHA"}, - {TLS1_CK_RSA_WITH_AES_256_SHA, "TLS_RSA_WITH_AES_256_CBC_SHA"}, - {TLS1_CK_DHE_RSA_WITH_AES_256_SHA, "TLS_DHE_RSA_WITH_AES_256_CBC_SHA"}, - - // ECC ciphersuites from RFC4492. - DEFINE_CIPHER_ENTRY_TLS1(ECDHE_ECDSA_WITH_RC4_128_SHA), - {TLS1_CK_ECDHE_ECDSA_WITH_DES_192_CBC3_SHA, - "TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA"}, - DEFINE_CIPHER_ENTRY_TLS1(ECDHE_ECDSA_WITH_AES_128_CBC_SHA), - DEFINE_CIPHER_ENTRY_TLS1(ECDHE_ECDSA_WITH_AES_256_CBC_SHA), - - DEFINE_CIPHER_ENTRY_TLS1(ECDHE_RSA_WITH_RC4_128_SHA), - {TLS1_CK_ECDHE_RSA_WITH_DES_192_CBC3_SHA, - "TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA"}, - DEFINE_CIPHER_ENTRY_TLS1(ECDHE_RSA_WITH_AES_128_CBC_SHA), - DEFINE_CIPHER_ENTRY_TLS1(ECDHE_RSA_WITH_AES_256_CBC_SHA), - - // TLS v1.2 ciphersuites. - {TLS1_CK_RSA_WITH_AES_128_SHA256, "TLS_RSA_WITH_AES_128_CBC_SHA256"}, - {TLS1_CK_RSA_WITH_AES_256_SHA256, "TLS_RSA_WITH_AES_256_CBC_SHA256"}, - {TLS1_CK_DHE_RSA_WITH_AES_128_SHA256, - "TLS_DHE_RSA_WITH_AES_128_CBC_SHA256"}, - {TLS1_CK_DHE_RSA_WITH_AES_256_SHA256, - "TLS_DHE_RSA_WITH_AES_256_CBC_SHA256"}, - - // TLS v1.2 GCM ciphersuites from RFC5288. - DEFINE_CIPHER_ENTRY_TLS1(RSA_WITH_AES_128_GCM_SHA256), - DEFINE_CIPHER_ENTRY_TLS1(RSA_WITH_AES_256_GCM_SHA384), - DEFINE_CIPHER_ENTRY_TLS1(DHE_RSA_WITH_AES_128_GCM_SHA256), - DEFINE_CIPHER_ENTRY_TLS1(DHE_RSA_WITH_AES_256_GCM_SHA384), - DEFINE_CIPHER_ENTRY_TLS1(DH_RSA_WITH_AES_128_GCM_SHA256), - DEFINE_CIPHER_ENTRY_TLS1(DH_RSA_WITH_AES_256_GCM_SHA384), - - // ECDH HMAC based ciphersuites from RFC5289. - {TLS1_CK_ECDHE_ECDSA_WITH_AES_128_SHA256, - "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256"}, - {TLS1_CK_ECDHE_ECDSA_WITH_AES_256_SHA384, - "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384"}, - {TLS1_CK_ECDHE_RSA_WITH_AES_128_SHA256, - "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256"}, - {TLS1_CK_ECDHE_RSA_WITH_AES_256_SHA384, - "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384"}, - - // ECDH GCM based ciphersuites from RFC5289. - DEFINE_CIPHER_ENTRY_TLS1(ECDHE_ECDSA_WITH_AES_128_GCM_SHA256), - DEFINE_CIPHER_ENTRY_TLS1(ECDHE_ECDSA_WITH_AES_256_GCM_SHA384), - DEFINE_CIPHER_ENTRY_TLS1(ECDHE_RSA_WITH_AES_128_GCM_SHA256), - DEFINE_CIPHER_ENTRY_TLS1(ECDHE_RSA_WITH_AES_256_GCM_SHA384), - - {0, nullptr}}; -#endif // #ifndef OPENSSL_IS_BORINGSSL + {"SRTP_AES128_CM_SHA1_80", webrtc::kSrtpAes128CmSha1_80}, + {"SRTP_AES128_CM_SHA1_32", webrtc::kSrtpAes128CmSha1_32}, + {"SRTP_AEAD_AES_128_GCM", webrtc::kSrtpAeadAes128Gcm}, + {"SRTP_AEAD_AES_256_GCM", webrtc::kSrtpAeadAes256Gcm}}; #ifdef OPENSSL_IS_BORINGSSL // Enabled by EnableTimeCallbackForTesting. Should never be set in production @@ -150,12 +94,69 @@ constexpr SslCipherMapEntry kSslCipherMap[] = { bool g_use_time_callback_for_testing = false; // Not used in production code. Actual time should be relative to Jan 1, 1970. void TimeCallbackForTesting(const SSL* ssl, struct timeval* out_clock) { - int64_t time = TimeNanos(); - out_clock->tv_sec = time / kNumNanosecsPerSec; - out_clock->tv_usec = (time % kNumNanosecsPerSec) / kNumNanosecsPerMicrosec; + int64_t time = webrtc::TimeNanos(); + out_clock->tv_sec = time / webrtc::kNumNanosecsPerSec; + out_clock->tv_usec = + (time % webrtc::kNumNanosecsPerSec) / webrtc::kNumNanosecsPerMicrosec; +} +#endif + +uint16_t GetMaxVersion(SSLMode ssl_mode, SSLProtocolVersion version) { + switch (ssl_mode) { + case webrtc::SSL_MODE_TLS: + switch (version) { + default: + case webrtc::SSL_PROTOCOL_NOT_GIVEN: + case webrtc::SSL_PROTOCOL_TLS_10: + case webrtc::SSL_PROTOCOL_TLS_11: + case webrtc::SSL_PROTOCOL_TLS_12: + return TLS1_2_VERSION; + case webrtc::SSL_PROTOCOL_TLS_13: +#ifdef TLS1_3_VERSION + return TLS1_3_VERSION; +#else + return TLS1_2_VERSION; +#endif + } + case webrtc::SSL_MODE_DTLS: + switch (version) { + default: + case webrtc::SSL_PROTOCOL_NOT_GIVEN: + case webrtc::SSL_PROTOCOL_DTLS_10: + case webrtc::SSL_PROTOCOL_DTLS_12: + return DTLS1_2_VERSION; + case webrtc::SSL_PROTOCOL_DTLS_13: +#ifdef DTLS1_3_VERSION + return DTLS1_3_VERSION; +#else + return DTLS1_2_VERSION; +#endif + } + } } + +constexpr int kForceDtls13Off = 0; +#ifdef DTLS1_3_VERSION +constexpr int kForceDtls13Enabled = 1; +constexpr int kForceDtls13Only = 2; #endif +int GetForceDtls13(const FieldTrialsView* field_trials) { + if (field_trials == nullptr) { + return kForceDtls13Off; + } +#ifdef DTLS1_3_VERSION + auto mode = field_trials->Lookup("WebRTC-ForceDtls13"); + RTC_LOG(LS_WARNING) << "WebRTC-ForceDtls13: " << mode; + if (mode == "Enabled") { + return kForceDtls13Enabled; + } else if (mode == "Only") { + return kForceDtls13Only; + } +#endif + return kForceDtls13Off; +} + } // namespace ////////////////////////////////////////////////////////////////////// @@ -217,10 +218,10 @@ static int stream_read(BIO* b, char* out, int outl) { size_t read; int error; StreamResult result = stream->Read( - rtc::MakeArrayView(reinterpret_cast(out), outl), read, error); - if (result == SR_SUCCESS) { - return checked_cast(read); - } else if (result == SR_BLOCK) { + MakeArrayView(reinterpret_cast(out), outl), read, error); + if (result == webrtc::SR_SUCCESS) { + return webrtc::checked_cast(read); + } else if (result == webrtc::SR_BLOCK) { BIO_set_retry_read(b); } return -1; @@ -235,18 +236,17 @@ static int stream_write(BIO* b, const char* in, int inl) { size_t written; int error; StreamResult result = stream->Write( - rtc::MakeArrayView(reinterpret_cast(in), inl), written, - error); - if (result == SR_SUCCESS) { - return checked_cast(written); - } else if (result == SR_BLOCK) { + MakeArrayView(reinterpret_cast(in), inl), written, error); + if (result == webrtc::SR_SUCCESS) { + return webrtc::checked_cast(written); + } else if (result == webrtc::SR_BLOCK) { BIO_set_retry_write(b); } return -1; } static int stream_puts(BIO* b, const char* str) { - return stream_write(b, str, checked_cast(strlen(str))); + return stream_write(b, str, webrtc::checked_cast(strlen(str))); } static long stream_ctrl(BIO* b, int cmd, long num, void* ptr) { @@ -256,18 +256,27 @@ static long stream_ctrl(BIO* b, int cmd, long num, void* ptr) { case BIO_CTRL_EOF: { StreamInterface* stream = static_cast(ptr); // 1 means end-of-stream. - return (stream->GetState() == SS_CLOSED) ? 1 : 0; + return (stream->GetState() == webrtc::SS_CLOSED) ? 1 : 0; } case BIO_CTRL_WPENDING: case BIO_CTRL_PENDING: return 0; - case BIO_CTRL_FLUSH: + case BIO_CTRL_FLUSH: { + StreamInterface* stream = static_cast(BIO_get_data(b)); + RTC_DCHECK(stream); + if (stream->Flush()) { + RTC_LOG(LS_WARNING) << "Failed to flush stream"; + return 0; + } return 1; + } case BIO_CTRL_DGRAM_QUERY_MTU: // openssl defaults to mtu=256 unless we return something here. // The handshake doesn't actually need to send packets above 1k, // so this seems like a sensible value that should work in most cases. // Webrtc uses the same value for video packets. + RTC_DCHECK_NOTREACHED() + << "We should be using SSL_set_mtu instead of this!"; return 1200; default: return 0; @@ -278,39 +287,26 @@ static long stream_ctrl(BIO* b, int cmd, long num, void* ptr) { // OpenSSLStreamAdapter ///////////////////////////////////////////////////////////////////////////// -static std::atomic g_use_legacy_tls_protocols_override(false); -static std::atomic g_allow_legacy_tls_protocols(false); - -void SetAllowLegacyTLSProtocols(const absl::optional& allow) { - g_use_legacy_tls_protocols_override.store(allow.has_value()); - if (allow.has_value()) - g_allow_legacy_tls_protocols.store(allow.value()); -} - -bool ShouldAllowLegacyTLSProtocols() { - return g_use_legacy_tls_protocols_override.load() - ? g_allow_legacy_tls_protocols.load() - : webrtc::field_trial::IsEnabled("WebRTC-LegacyTlsProtocols"); -} - OpenSSLStreamAdapter::OpenSSLStreamAdapter( std::unique_ptr stream, - absl::AnyInvocable handshake_error) + absl::AnyInvocable handshake_error, + const FieldTrialsView* field_trials) : stream_(std::move(stream)), handshake_error_(std::move(handshake_error)), - owner_(rtc::Thread::Current()), + owner_(Thread::Current()), state_(SSL_NONE), - role_(SSL_CLIENT), + role_(webrtc::SSL_CLIENT), ssl_read_needs_write_(false), ssl_write_needs_read_(false), ssl_(nullptr), ssl_ctx_(nullptr), - ssl_mode_(SSL_MODE_TLS), - ssl_max_version_(SSL_PROTOCOL_TLS_12), - // Default is to support legacy TLS protocols. - // This will be changed to default non-support in M82 or M83. - support_legacy_tls_protocols_flag_(ShouldAllowLegacyTLSProtocols()) { - stream_->SignalEvent.connect(this, &OpenSSLStreamAdapter::OnEvent); + ssl_mode_(webrtc::SSL_MODE_DTLS), + ssl_max_version_(webrtc::SSL_PROTOCOL_DTLS_12), + force_dtls_13_(GetForceDtls13(field_trials)), + enable_dtls_pqc_(field_trials && + field_trials->IsEnabled("WebRTC-EnableDtlsPqc")) { + stream_->SetEventCallback( + [this](int events, int err) { OnEvent(events, err); }); } OpenSSLStreamAdapter::~OpenSSLStreamAdapter() { @@ -323,7 +319,7 @@ void OpenSSLStreamAdapter::SetIdentity(std::unique_ptr identity) { #ifdef OPENSSL_IS_BORINGSSL identity_.reset(static_cast(identity.release())); #else - identity_.reset(static_cast(identity.release())); + identity_.reset(static_cast(identity.release())); #endif } @@ -335,79 +331,55 @@ void OpenSSLStreamAdapter::SetServerRole(SSLRole role) { role_ = role; } -bool OpenSSLStreamAdapter::SetPeerCertificateDigest( +SSLPeerCertificateDigestError OpenSSLStreamAdapter::SetPeerCertificateDigest( absl::string_view digest_alg, - const unsigned char* digest_val, - size_t digest_len, - SSLPeerCertificateDigestError* error) { + ArrayView digest_val) { RTC_DCHECK(!peer_certificate_verified_); RTC_DCHECK(!HasPeerCertificateDigest()); size_t expected_len; - if (error) { - *error = SSLPeerCertificateDigestError::NONE; - } if (!OpenSSLDigest::GetDigestSize(digest_alg, &expected_len)) { RTC_LOG(LS_WARNING) << "Unknown digest algorithm: " << digest_alg; - if (error) { - *error = SSLPeerCertificateDigestError::UNKNOWN_ALGORITHM; - } - return false; + return SSLPeerCertificateDigestError::UNKNOWN_ALGORITHM; } - if (expected_len != digest_len) { - if (error) { - *error = SSLPeerCertificateDigestError::INVALID_LENGTH; - } - return false; + if (expected_len != digest_val.size()) { + return SSLPeerCertificateDigestError::INVALID_LENGTH; } - peer_certificate_digest_value_.SetData(digest_val, digest_len); + peer_certificate_digest_value_.SetData(digest_val); peer_certificate_digest_algorithm_ = std::string(digest_alg); if (!peer_cert_chain_) { // Normal case, where the digest is set before we obtain the certificate // from the handshake. - return true; + return SSLPeerCertificateDigestError::NONE; } if (!VerifyPeerCertificate()) { Error("SetPeerCertificateDigest", -1, SSL_AD_BAD_CERTIFICATE, false); - if (error) { - *error = SSLPeerCertificateDigestError::VERIFICATION_FAILED; - } - return false; + return SSLPeerCertificateDigestError::VERIFICATION_FAILED; } if (state_ == SSL_CONNECTED) { // Post the event asynchronously to unwind the stack. The caller // of ContinueSSL may be the same object listening for these // events and may not be prepared for reentrancy. - PostEvent(SE_OPEN | SE_READ | SE_WRITE, 0); + PostEvent(webrtc::SE_OPEN | webrtc::SE_READ | webrtc::SE_WRITE, 0); } - - return true; + return SSLPeerCertificateDigestError::NONE; } -std::string OpenSSLStreamAdapter::SslCipherSuiteToName(int cipher_suite) { -#ifdef OPENSSL_IS_BORINGSSL - const SSL_CIPHER* ssl_cipher = SSL_get_cipher_by_value(cipher_suite); - if (!ssl_cipher) { - return std::string(); - } - return SSL_CIPHER_standard_name(ssl_cipher); -#else - const int openssl_cipher_id = 0x03000000L | cipher_suite; - for (const SslCipherMapEntry* entry = kSslCipherMap; entry->rfc_name; - ++entry) { - if (openssl_cipher_id == static_cast(entry->openssl_id)) { - return entry->rfc_name; - } +std::optional OpenSSLStreamAdapter::GetTlsCipherSuiteName() + const { + if (state_ != SSL_CONNECTED) { + return std::nullopt; } - return std::string(); -#endif + + const SSL_CIPHER* current_cipher = SSL_get_current_cipher(ssl_); + return SSL_CIPHER_standard_name(current_cipher); } -bool OpenSSLStreamAdapter::GetSslCipherSuite(int* cipher_suite) { +bool OpenSSLStreamAdapter::GetSslCipherSuite(int* cipher_suite) const { if (state_ != SSL_CONNECTED) { return false; } @@ -423,27 +395,37 @@ bool OpenSSLStreamAdapter::GetSslCipherSuite(int* cipher_suite) { SSLProtocolVersion OpenSSLStreamAdapter::GetSslVersion() const { if (state_ != SSL_CONNECTED) { - return SSL_PROTOCOL_NOT_GIVEN; + return webrtc::SSL_PROTOCOL_NOT_GIVEN; } int ssl_version = SSL_version(ssl_); - if (ssl_mode_ == SSL_MODE_DTLS) { + if (ssl_mode_ == webrtc::SSL_MODE_DTLS) { if (ssl_version == DTLS1_VERSION) { - return SSL_PROTOCOL_DTLS_10; + return webrtc::SSL_PROTOCOL_DTLS_10; } else if (ssl_version == DTLS1_2_VERSION) { - return SSL_PROTOCOL_DTLS_12; + return webrtc::SSL_PROTOCOL_DTLS_12; } +#ifdef DTLS1_3_VERSION + if (ssl_version == DTLS1_3_VERSION) { + return webrtc::SSL_PROTOCOL_DTLS_13; + } +#endif } else { if (ssl_version == TLS1_VERSION) { - return SSL_PROTOCOL_TLS_10; + return webrtc::SSL_PROTOCOL_TLS_10; } else if (ssl_version == TLS1_1_VERSION) { - return SSL_PROTOCOL_TLS_11; + return webrtc::SSL_PROTOCOL_TLS_11; } else if (ssl_version == TLS1_2_VERSION) { - return SSL_PROTOCOL_TLS_12; + return webrtc::SSL_PROTOCOL_TLS_12; } +#ifdef TLS1_3_VERSION + if (ssl_version == TLS1_3_VERSION) { + return webrtc::SSL_PROTOCOL_TLS_13; + } +#endif } - return SSL_PROTOCOL_NOT_GIVEN; + return webrtc::SSL_PROTOCOL_NOT_GIVEN; } bool OpenSSLStreamAdapter::GetSslVersionBytes(int* version) const { @@ -454,21 +436,46 @@ bool OpenSSLStreamAdapter::GetSslVersionBytes(int* version) const { return true; } -// Key Extractor interface -bool OpenSSLStreamAdapter::ExportKeyingMaterial(absl::string_view label, - const uint8_t* context, - size_t context_len, - bool use_context, - uint8_t* result, - size_t result_len) { - if (SSL_export_keying_material(ssl_, result, result_len, label.data(), - label.length(), context, context_len, - use_context) != 1) { +uint16_t OpenSSLStreamAdapter::GetSslGroupIdForTesting() const { + if (state_ != SSL_CONNECTED) { + return 0; + } +#ifdef OPENSSL_IS_BORINGSSL + return SSL_get_group_id(ssl_); +#else + return 0; +#endif +} + +bool OpenSSLStreamAdapter::ExportSrtpKeyingMaterial( + ZeroOnFreeBuffer& keying_material) { + // Arguments are: + // keying material/len -- a buffer to hold the keying material. + // label -- the exporter label. + // part of the RFC defining each exporter + // usage. We only use RFC 5764 for DTLS-SRTP. + // context/context_len -- a context to bind to for this connection; + // use_context optional, can be null, 0 (IN). Not used by WebRTC. + if (SSL_export_keying_material( + ssl_, keying_material.data(), keying_material.size(), + kDtlsSrtpExporterLabel.data(), kDtlsSrtpExporterLabel.size(), nullptr, + 0, false) != 1) { return false; } return true; } +uint16_t OpenSSLStreamAdapter::GetPeerSignatureAlgorithm() const { + if (state_ != SSL_CONNECTED) { + return 0; + } +#ifdef OPENSSL_IS_BORINGSSL + return SSL_get_peer_signature_algorithm(ssl_); +#else + return kSslSignatureAlgorithmUnknown; +#endif +} + bool OpenSSLStreamAdapter::SetDtlsSrtpCryptoSuites( const std::vector& ciphers) { if (state_ != SSL_NONE) { @@ -503,7 +510,7 @@ bool OpenSSLStreamAdapter::SetDtlsSrtpCryptoSuites( return true; } -bool OpenSSLStreamAdapter::GetDtlsSrtpCryptoSuite(int* crypto_suite) { +bool OpenSSLStreamAdapter::GetDtlsSrtpCryptoSuite(int* crypto_suite) const { RTC_DCHECK(state_ == SSL_CONNECTED); if (state_ != SSL_CONNECTED) { return false; @@ -517,7 +524,7 @@ bool OpenSSLStreamAdapter::GetDtlsSrtpCryptoSuite(int* crypto_suite) { } *crypto_suite = srtp_profile->id; - RTC_DCHECK(!SrtpCryptoSuiteToName(*crypto_suite).empty()); + RTC_DCHECK(!webrtc::SrtpCryptoSuiteToName(*crypto_suite).empty()); return true; } @@ -531,7 +538,7 @@ int OpenSSLStreamAdapter::StartSSL() { return -1; } - if (stream_->GetState() != SS_OPEN) { + if (stream_->GetState() != webrtc::SS_OPEN) { state_ = SSL_WAIT; return 0; } @@ -556,14 +563,27 @@ void OpenSSLStreamAdapter::SetMaxProtocolVersion(SSLProtocolVersion version) { } void OpenSSLStreamAdapter::SetInitialRetransmissionTimeout(int timeout_ms) { - RTC_DCHECK(ssl_ctx_ == nullptr); dtls_handshake_timeout_ms_ = timeout_ms; +#ifdef OPENSSL_IS_BORINGSSL + if (ssl_ctx_ != nullptr && ssl_mode_ == webrtc::SSL_MODE_DTLS) { + // TODO (jonaso, webrtc:367395350): Switch to upcoming + // DTLSv1_set_timeout_duration. + DTLSv1_set_initial_timeout_duration(ssl_, dtls_handshake_timeout_ms_); + } +#endif +} + +void OpenSSLStreamAdapter::SetMTU(int mtu) { + dtls_mtu_ = mtu; + if (ssl_) { + RTC_CHECK(SSL_set_mtu(ssl_, dtls_mtu_)) << "Call to SSL_set_mtu failed."; + } } // // StreamInterface Implementation // -StreamResult OpenSSLStreamAdapter::Write(rtc::ArrayView data, +StreamResult OpenSSLStreamAdapter::Write(ArrayView data, size_t& written, int& error) { RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Write(" << data.size() << ")"; @@ -572,33 +592,31 @@ StreamResult OpenSSLStreamAdapter::Write(rtc::ArrayView data, case SSL_NONE: // pass-through in clear text return stream_->Write(data, written, error); - case SSL_WAIT: case SSL_CONNECTING: - return SR_BLOCK; - + return webrtc::SR_BLOCK; case SSL_CONNECTED: if (WaitingToVerifyPeerCertificate()) { - return SR_BLOCK; + return webrtc::SR_BLOCK; } break; - case SSL_ERROR: case SSL_CLOSED: default: error = ssl_error_code_; - return SR_ERROR; + return webrtc::SR_ERROR; } // OpenSSL will return an error if we try to write zero bytes if (data.size() == 0) { written = 0; - return SR_SUCCESS; + return webrtc::SR_SUCCESS; } ssl_write_needs_read_ = false; - int code = SSL_write(ssl_, data.data(), checked_cast(data.size())); + int code = + SSL_write(ssl_, data.data(), webrtc::checked_cast(data.size())); int ssl_error = SSL_get_error(ssl_, code); switch (ssl_error) { case SSL_ERROR_NONE: @@ -606,25 +624,24 @@ StreamResult OpenSSLStreamAdapter::Write(rtc::ArrayView data, RTC_DCHECK_GT(code, 0); RTC_DCHECK_LE(code, data.size()); written = code; - return SR_SUCCESS; + return webrtc::SR_SUCCESS; case SSL_ERROR_WANT_READ: RTC_DLOG(LS_VERBOSE) << " -- error want read"; ssl_write_needs_read_ = true; - return SR_BLOCK; + return webrtc::SR_BLOCK; case SSL_ERROR_WANT_WRITE: RTC_DLOG(LS_VERBOSE) << " -- error want write"; - return SR_BLOCK; - + return webrtc::SR_BLOCK; case SSL_ERROR_ZERO_RETURN: default: Error("SSL_write", (ssl_error ? ssl_error : -1), 0, false); error = ssl_error_code_; - return SR_ERROR; + return webrtc::SR_ERROR; } // not reached } -StreamResult OpenSSLStreamAdapter::Read(rtc::ArrayView data, +StreamResult OpenSSLStreamAdapter::Read(ArrayView data, size_t& read, int& error) { RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::Read(" << data.size() << ")"; @@ -634,29 +651,30 @@ StreamResult OpenSSLStreamAdapter::Read(rtc::ArrayView data, return stream_->Read(data, read, error); case SSL_WAIT: case SSL_CONNECTING: - return SR_BLOCK; + return webrtc::SR_BLOCK; case SSL_CONNECTED: if (WaitingToVerifyPeerCertificate()) { - return SR_BLOCK; + return webrtc::SR_BLOCK; } break; case SSL_CLOSED: - return SR_EOS; + return webrtc::SR_EOS; case SSL_ERROR: default: error = ssl_error_code_; - return SR_ERROR; + return webrtc::SR_ERROR; } // Don't trust OpenSSL with zero byte reads if (data.size() == 0) { read = 0; - return SR_SUCCESS; + return webrtc::SR_SUCCESS; } ssl_read_needs_write_ = false; - const int code = SSL_read(ssl_, data.data(), checked_cast(data.size())); + const int code = + SSL_read(ssl_, data.data(), webrtc::checked_cast(data.size())); const int ssl_error = SSL_get_error(ssl_, code); switch (ssl_error) { @@ -666,33 +684,33 @@ StreamResult OpenSSLStreamAdapter::Read(rtc::ArrayView data, RTC_DCHECK_LE(code, data.size()); read = code; - if (ssl_mode_ == SSL_MODE_DTLS) { + if (ssl_mode_ == webrtc::SSL_MODE_DTLS) { // Enforce atomic reads -- this is a short read unsigned int pending = SSL_pending(ssl_); if (pending) { RTC_DLOG(LS_INFO) << " -- short DTLS read. flushing"; FlushInput(pending); - error = SSE_MSG_TRUNC; - return SR_ERROR; + error = webrtc::SSE_MSG_TRUNC; + return webrtc::SR_ERROR; } } - return SR_SUCCESS; + return webrtc::SR_SUCCESS; case SSL_ERROR_WANT_READ: RTC_DLOG(LS_VERBOSE) << " -- error want read"; - return SR_BLOCK; + return webrtc::SR_BLOCK; case SSL_ERROR_WANT_WRITE: RTC_DLOG(LS_VERBOSE) << " -- error want write"; ssl_read_needs_write_ = true; - return SR_BLOCK; + return webrtc::SR_BLOCK; case SSL_ERROR_ZERO_RETURN: RTC_DLOG(LS_VERBOSE) << " -- remote side closed"; Close(); - return SR_EOS; + return webrtc::SR_EOS; default: Error("SSL_read", (ssl_error ? ssl_error : -1), 0, false); error = ssl_error_code_; - return SR_ERROR; + return webrtc::SR_ERROR; } // not reached } @@ -732,69 +750,67 @@ StreamState OpenSSLStreamAdapter::GetState() const { switch (state_) { case SSL_WAIT: case SSL_CONNECTING: - return SS_OPENING; + return webrtc::SS_OPENING; case SSL_CONNECTED: if (WaitingToVerifyPeerCertificate()) { - return SS_OPENING; + return webrtc::SS_OPENING; } - return SS_OPEN; + return webrtc::SS_OPEN; default: - return SS_CLOSED; + return webrtc::SS_CLOSED; } // not reached } -void OpenSSLStreamAdapter::OnEvent(StreamInterface* stream, - int events, - int err) { +void OpenSSLStreamAdapter::OnEvent(int events, int err) { + RTC_DCHECK_RUN_ON(&callback_sequence_); int events_to_signal = 0; int signal_error = 0; - RTC_DCHECK(stream == stream_.get()); - if ((events & SE_OPEN)) { + if ((events & webrtc::SE_OPEN)) { RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent SE_OPEN"; if (state_ != SSL_WAIT) { RTC_DCHECK(state_ == SSL_NONE); - events_to_signal |= SE_OPEN; + events_to_signal |= webrtc::SE_OPEN; } else { state_ = SSL_CONNECTING; - if (int err = BeginSSL()) { - Error("BeginSSL", err, 0, true); + if (int error = BeginSSL()) { + Error("BeginSSL", error, 0, true); return; } } } - if ((events & (SE_READ | SE_WRITE))) { + if ((events & (webrtc::SE_READ | webrtc::SE_WRITE))) { RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent" - << ((events & SE_READ) ? " SE_READ" : "") - << ((events & SE_WRITE) ? " SE_WRITE" : ""); + << ((events & webrtc::SE_READ) ? " SE_READ" : "") + << ((events & webrtc::SE_WRITE) ? " SE_WRITE" : ""); if (state_ == SSL_NONE) { - events_to_signal |= events & (SE_READ | SE_WRITE); + events_to_signal |= events & (webrtc::SE_READ | webrtc::SE_WRITE); } else if (state_ == SSL_CONNECTING) { - if (int err = ContinueSSL()) { - Error("ContinueSSL", err, 0, true); + if (int error = ContinueSSL()) { + Error("ContinueSSL", error, 0, true); return; } } else if (state_ == SSL_CONNECTED) { - if (((events & SE_READ) && ssl_write_needs_read_) || - (events & SE_WRITE)) { + if (((events & webrtc::SE_READ) && ssl_write_needs_read_) || + (events & webrtc::SE_WRITE)) { RTC_DLOG(LS_VERBOSE) << " -- onStreamWriteable"; - events_to_signal |= SE_WRITE; + events_to_signal |= webrtc::SE_WRITE; } - if (((events & SE_WRITE) && ssl_read_needs_write_) || - (events & SE_READ)) { + if (((events & webrtc::SE_WRITE) && ssl_read_needs_write_) || + (events & webrtc::SE_READ)) { RTC_DLOG(LS_VERBOSE) << " -- onStreamReadable"; - events_to_signal |= SE_READ; + events_to_signal |= webrtc::SE_READ; } } } - if ((events & SE_CLOSE)) { + if ((events & webrtc::SE_CLOSE)) { RTC_DLOG(LS_VERBOSE) << "OpenSSLStreamAdapter::OnEvent(SE_CLOSE, " << err << ")"; Cleanup(0); - events_to_signal |= SE_CLOSE; + events_to_signal |= webrtc::SE_CLOSE; // SE_CLOSE is the only event that uses the final parameter to OnEvent(). RTC_DCHECK(signal_error == 0); signal_error = err; @@ -803,13 +819,14 @@ void OpenSSLStreamAdapter::OnEvent(StreamInterface* stream, if (events_to_signal) { // Note that the adapter presents itself as the origin of the stream events, // since users of the adapter may not recognize the adapted object. - SignalEvent(this, events_to_signal, signal_error); + FireEvent(events_to_signal, signal_error); } } void OpenSSLStreamAdapter::PostEvent(int events, int err) { owner_->PostTask(SafeTask(task_safety_.flag(), [this, events, err]() { - SignalEvent(this, events, err); + RTC_DCHECK_RUN_ON(&callback_sequence_); + FireEvent(events, err); })); } @@ -819,26 +836,31 @@ void OpenSSLStreamAdapter::SetTimeout(int delay_ms) { RTC_DCHECK_GE(delay_ms, 0); RTC_DCHECK(!timeout_task_.Running()); - timeout_task_ = webrtc::RepeatingTaskHandle::DelayedStart( - owner_, webrtc::TimeDelta::Millis(delay_ms), + timeout_task_ = RepeatingTaskHandle::DelayedStart( + owner_, TimeDelta::Millis(delay_ms), [flag = task_safety_.flag(), this]() { if (flag->alive()) { RTC_DLOG(LS_INFO) << "DTLS timeout expired"; timeout_task_.Stop(); int res = DTLSv1_handle_timeout(ssl_); if (res > 0) { + retransmission_count_++; RTC_LOG(LS_INFO) << "DTLS retransmission"; } else if (res < 0) { RTC_LOG(LS_INFO) << "DTLSv1_handle_timeout() return -1"; Error("DTLSv1_handle_timeout", res, -1, true); - return webrtc::TimeDelta::PlusInfinity(); + return TimeDelta::PlusInfinity(); + } + // We check the timer even after SSL_CONNECTED, + // but ContinueSSL() is only needed when SSL_CONNECTING + if (state_ == SSL_CONNECTING) { + ContinueSSL(); } - ContinueSSL(); } else { RTC_DCHECK_NOTREACHED(); } // This callback will never run again (stopped above). - return webrtc::TimeDelta::PlusInfinity(); + return TimeDelta::PlusInfinity(); }); } @@ -870,16 +892,28 @@ int OpenSSLStreamAdapter::BeginSSL() { SSL_set_app_data(ssl_, this); SSL_set_bio(ssl_, bio, bio); // the SSL object owns the bio now. - if (ssl_mode_ == SSL_MODE_DTLS) { + + // Use SSL_set_mtu to configure MTU insted of + // BIO_CTRL_DGRAM_QUERY_MTU + SSL_set_options(ssl_, SSL_OP_NO_QUERY_MTU); + SSL_set_mtu(ssl_, dtls_mtu_); + #ifdef OPENSSL_IS_BORINGSSL + if (ssl_mode_ == webrtc::SSL_MODE_DTLS) { DTLSv1_set_initial_timeout_duration(ssl_, dtls_handshake_timeout_ms_); -#else - // Enable read-ahead for DTLS so whole packets are read from internal BIO - // before parsing. This is done internally by BoringSSL for DTLS. - SSL_set_read_ahead(ssl_, 1); -#endif } + // Experimental code guarded by WebRTC-EnableDtlsPqc. + if (enable_dtls_pqc_) { + const uint16_t kGroups[] = {SSL_GROUP_X25519_MLKEM768, SSL_GROUP_X25519, + SSL_GROUP_SECP256R1, SSL_GROUP_SECP384R1}; + if (!SSL_set1_group_ids(ssl_, kGroups, std::size(kGroups))) { + RTC_LOG(LS_WARNING) << "Failed to call SSL_set1_group_ids."; + return -1; + } + } +#endif + SSL_set_mode(ssl_, SSL_MODE_ENABLE_PARTIAL_WRITE | SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER); @@ -888,18 +922,20 @@ int OpenSSLStreamAdapter::BeginSSL() { } int OpenSSLStreamAdapter::ContinueSSL() { + RTC_DCHECK_RUN_ON(&callback_sequence_); RTC_DLOG(LS_VERBOSE) << "ContinueSSL"; - RTC_DCHECK(state_ == SSL_CONNECTING); + RTC_DCHECK_EQ(state_, SSL_CONNECTING); // Clear the DTLS timer timeout_task_.Stop(); - const int code = (role_ == SSL_CLIENT) ? SSL_connect(ssl_) : SSL_accept(ssl_); + const int code = + (role_ == webrtc::SSL_CLIENT) ? SSL_connect(ssl_) : SSL_accept(ssl_); const int ssl_error = SSL_get_error(ssl_, code); switch (ssl_error) { case SSL_ERROR_NONE: - RTC_DLOG(LS_VERBOSE) << " -- success"; + RTC_DLOG(LS_INFO) << " -- success"; // By this point, OpenSSL should have given us a certificate, or errored // out if one was missing. RTC_DCHECK(peer_cert_chain_ || !GetClientAuthEnabled()); @@ -914,25 +950,17 @@ int OpenSSLStreamAdapter::ContinueSSL() { // The caller of ContinueSSL may be the same object listening for these // events and may not be prepared for reentrancy. // PostEvent(SE_OPEN | SE_READ | SE_WRITE, 0); - SignalEvent(this, SE_OPEN | SE_READ | SE_WRITE, 0); + FireEvent(webrtc::SE_OPEN | webrtc::SE_READ | webrtc::SE_WRITE, 0); } break; - - case SSL_ERROR_WANT_READ: { - RTC_DLOG(LS_VERBOSE) << " -- error want read"; - struct timeval timeout; - if (DTLSv1_get_timeout(ssl_, &timeout)) { - int delay = timeout.tv_sec * 1000 + timeout.tv_usec / 1000; - SetTimeout(delay); - } - } break; - + case SSL_ERROR_WANT_READ: + RTC_DLOG(LS_INFO) << " -- error when we want to read"; + break; case SSL_ERROR_WANT_WRITE: - RTC_DLOG(LS_VERBOSE) << " -- error want write"; + RTC_DLOG(LS_INFO) << " -- error when we want to write"; break; - case SSL_ERROR_ZERO_RETURN: - default: + default: { SSLHandshakeError ssl_handshake_err = SSLHandshakeError::UNKNOWN; int err_code = ERR_peek_last_error(); if (err_code != 0 && ERR_GET_REASON(err_code) == SSL_R_NO_SHARED_CIPHER) { @@ -944,6 +972,15 @@ int OpenSSLStreamAdapter::ContinueSSL() { handshake_error_(ssl_handshake_err); } return (ssl_error != 0) ? ssl_error : -1; + } + } + + if (ssl_ != nullptr) { + struct timeval timeout; + if (DTLSv1_get_timeout(ssl_, &timeout)) { + int delay = timeout.tv_sec * 1000 + timeout.tv_usec / 1000; + SetTimeout(delay); + } } return 0; @@ -953,13 +990,14 @@ void OpenSSLStreamAdapter::Error(absl::string_view context, int err, uint8_t alert, bool signal) { + RTC_DCHECK_RUN_ON(&callback_sequence_); RTC_LOG(LS_WARNING) << "OpenSSLStreamAdapter::Error(" << context << ", " << err << ", " << static_cast(alert) << ")"; state_ = SSL_ERROR; ssl_error_code_ = err; Cleanup(alert); if (signal) { - SignalEvent(this, SE_CLOSE, err); + FireEvent(webrtc::SE_CLOSE, err); } } @@ -1009,9 +1047,9 @@ SSL_CTX* OpenSSLStreamAdapter::SetupSSLContext() { #ifdef OPENSSL_IS_BORINGSSL // If X509 objects aren't used, we can use these methods to avoid // linking the sizable crypto/x509 code, using CRYPTO_BUFFER instead. - SSL_CTX* ctx = - SSL_CTX_new(ssl_mode_ == SSL_MODE_DTLS ? DTLS_with_buffers_method() - : TLS_with_buffers_method()); + SSL_CTX* ctx = SSL_CTX_new(ssl_mode_ == webrtc::SSL_MODE_DTLS + ? DTLS_with_buffers_method() + : TLS_with_buffers_method()); #else SSL_CTX* ctx = SSL_CTX_new(ssl_mode_ == SSL_MODE_DTLS ? DTLS_method() : TLS_method()); @@ -1020,33 +1058,20 @@ SSL_CTX* OpenSSLStreamAdapter::SetupSSLContext() { return nullptr; } - if (support_legacy_tls_protocols_flag_) { - // TODO(https://bugs.webrtc.org/10261): Completely remove this branch in - // M84. - SSL_CTX_set_min_proto_version( - ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_VERSION : TLS1_VERSION); - switch (ssl_max_version_) { - case SSL_PROTOCOL_TLS_10: - SSL_CTX_set_max_proto_version( - ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_VERSION : TLS1_VERSION); - break; - case SSL_PROTOCOL_TLS_11: - SSL_CTX_set_max_proto_version( - ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_VERSION : TLS1_1_VERSION); - break; - case SSL_PROTOCOL_TLS_12: - default: - SSL_CTX_set_max_proto_version( - ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_2_VERSION : TLS1_2_VERSION); - break; - } - } else { - // TODO(https://bugs.webrtc.org/10261): Make this the default in M84. - SSL_CTX_set_min_proto_version( - ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_2_VERSION : TLS1_2_VERSION); - SSL_CTX_set_max_proto_version( - ctx, ssl_mode_ == SSL_MODE_DTLS ? DTLS1_2_VERSION : TLS1_2_VERSION); + auto min_version = + ssl_mode_ == webrtc::SSL_MODE_DTLS ? DTLS1_2_VERSION : TLS1_2_VERSION; + auto max_version = GetMaxVersion(ssl_mode_, ssl_max_version_); +#ifdef DTLS1_3_VERSION + if (force_dtls_13_ == kForceDtls13Enabled) { + max_version = DTLS1_3_VERSION; + } else if (force_dtls_13_ == kForceDtls13Only) { + min_version = DTLS1_3_VERSION; + max_version = DTLS1_3_VERSION; } +#endif + + SSL_CTX_set_min_proto_version(ctx, min_version); + SSL_CTX_set_max_proto_version(ctx, max_version); #ifdef OPENSSL_IS_BORINGSSL // SSL_CTX_set_current_time_cb is only supported in BoringSSL. @@ -1061,6 +1086,7 @@ SSL_CTX* OpenSSLStreamAdapter::SetupSSLContext() { return nullptr; } + // TODO(bugs.webrtc.org/339300437): Remove dependency. SSL_CTX_set_info_callback(ctx, OpenSSLAdapter::SSLInfoCallback); int mode = SSL_VERIFY_PEER; @@ -1098,6 +1124,14 @@ SSL_CTX* OpenSSLStreamAdapter::SetupSSLContext() { } } +#ifdef OPENSSL_IS_BORINGSSL + SSL_CTX_set_permute_extensions(ctx, true); +#endif + +#if defined(OPENSSL_IS_BORINGSSL) || (OPENSSL_VERSION_NUMBER >= 0x30000000L) + SSL_CTX_set_options(ctx, SSL_OP_NO_TICKET); +#endif + return ctx; } @@ -1108,22 +1142,19 @@ bool OpenSSLStreamAdapter::VerifyPeerCertificate() { return false; } - unsigned char digest[EVP_MAX_MD_SIZE]; - size_t digest_length; + Buffer computed_digest(0, EVP_MAX_MD_SIZE); if (!peer_cert_chain_->Get(0).ComputeDigest( - peer_certificate_digest_algorithm_, digest, sizeof(digest), - &digest_length)) { + peer_certificate_digest_algorithm_, computed_digest)) { RTC_LOG(LS_WARNING) << "Failed to compute peer cert digest."; return false; } - Buffer computed_digest(digest, digest_length); if (computed_digest != peer_certificate_digest_value_) { RTC_LOG(LS_WARNING) << "Rejected peer certificate due to mismatched digest using " << peer_certificate_digest_algorithm_ << ". Expected " - << rtc::hex_encode_with_delimiter(peer_certificate_digest_value_, ':') - << " got " << rtc::hex_encode_with_delimiter(computed_digest, ':'); + << hex_encode_with_delimiter(peer_certificate_digest_value_, ':') + << " got " << hex_encode_with_delimiter(computed_digest, ':'); return false; } // Ignore any verification error if the digest matches, since there is no @@ -1179,7 +1210,7 @@ int OpenSSLStreamAdapter::SSLVerifyCallback(X509_STORE_CTX* store, void* arg) { // Record the peer's certificate. X509* cert = X509_STORE_CTX_get0_cert(store); stream->peer_cert_chain_.reset( - new SSLCertChain(std::make_unique(cert))); + new SSLCertChain(std::make_unique(cert))); // If the peer certificate digest isn't known yet, we'll wait to verify // until it's known, and for now just return a success status. @@ -1221,8 +1252,10 @@ static const cipher_list OK_RSA_ciphers[] = { #ifdef TLS1_CK_ECDHE_RSA_WITH_AES_256_GCM_SHA256 CDEF(ECDHE_RSA_WITH_AES_256_GCM_SHA256), #endif -#ifdef TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 +#ifdef TLS1_CK_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 // BoringSSL. CDEF(ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256), +#elif defined(TLS1_RFC_ECDHE_ECDSA_WITH_CHACHA20_POLY1305) // OpenSSL. + CDEF(ECDHE_RSA_WITH_CHACHA20_POLY1305), #endif }; @@ -1233,12 +1266,29 @@ static const cipher_list OK_ECDSA_ciphers[] = { #ifdef TLS1_CK_ECDHE_ECDSA_WITH_AES_256_GCM_SHA256 CDEF(ECDHE_ECDSA_WITH_AES_256_GCM_SHA256), #endif -#ifdef TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 +#ifdef TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 // BoringSSL. CDEF(ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256), +#elif defined(TLS1_CK_ECDHE_ECDSA_WITH_CHACHA20_POLY1305) // OpenSSL. + CDEF(ECDHE_ECDSA_WITH_CHACHA20_POLY1305), #endif }; #undef CDEF +static const cipher_list OK_DTLS13_ciphers[] = { +#ifdef TLS1_3_CK_AES_128_GCM_SHA256 // BoringSSL TLS 1.3 + {static_cast(TLS1_3_CK_AES_128_GCM_SHA256 & 0xffff), + "TLS_AES_128_GCM_SHA256"}, +#endif +#ifdef TLS1_3_CK_AES_256_GCM_SHA256 // BoringSSL TLS 1.3 + {static_cast(TLS1_3_CK_AES_256_GCM_SHA256 & 0xffff), + "TLS_AES_256_GCM_SHA256"}, +#endif +#ifdef TLS1_3_CK_CHACHA20_POLY1305_SHA256 // BoringSSL TLS 1.3 + {static_cast(TLS1_3_CK_CHACHA20_POLY1305_SHA256 & 0xffff), + "TLS_CHACHA20_POLY1305_SHA256"}, +#endif +}; + bool OpenSSLStreamAdapter::IsAcceptableCipher(int cipher, KeyType key_type) { if (key_type == KT_RSA) { for (const cipher_list& c : OK_RSA_ciphers) { @@ -1256,6 +1306,12 @@ bool OpenSSLStreamAdapter::IsAcceptableCipher(int cipher, KeyType key_type) { } } + for (const cipher_list& c : OK_DTLS13_ciphers) { + if (cipher == c.cipher) { + return true; + } + } + return false; } @@ -1277,6 +1333,12 @@ bool OpenSSLStreamAdapter::IsAcceptableCipher(absl::string_view cipher, } } + for (const cipher_list& c : OK_DTLS13_ciphers) { + if (cipher == c.cipher_str) { + return true; + } + } + return false; } @@ -1286,4 +1348,12 @@ void OpenSSLStreamAdapter::EnableTimeCallbackForTesting() { #endif } -} // namespace rtc +SSLProtocolVersion OpenSSLStreamAdapter::GetMaxSupportedDTLSProtocolVersion() { +#if defined(OPENSSL_IS_BORINGSSL) && defined(DTLS1_3_VERSION) + return webrtc::SSL_PROTOCOL_DTLS_13; +#else + return SSL_PROTOCOL_DTLS_12; +#endif +} + +} // namespace webrtc diff --git a/rtc_base/openssl_stream_adapter.h b/rtc_base/openssl_stream_adapter.h index 579ca2a1f8..4259f8c1b5 100644 --- a/rtc_base/openssl_stream_adapter.h +++ b/rtc_base/openssl_stream_adapter.h @@ -16,27 +16,30 @@ #include #include +#include #include #include #include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/array_view.h" #include "rtc_base/buffer.h" +#include "rtc_base/ssl_certificate.h" #ifdef OPENSSL_IS_BORINGSSL #include "rtc_base/boringssl_identity.h" +#include "rtc_base/openssl.h" #else #include "rtc_base/openssl_identity.h" #endif +#include "api/field_trials_view.h" #include "api/task_queue/pending_task_safety_flag.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/stream.h" -#include "rtc_base/system/rtc_export.h" #include "rtc_base/task_utils/repeating_task.h" -#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" -namespace rtc { +namespace webrtc { // This class was written with OpenSSLAdapter (a socket adapter) as a // starting point. It has similar structure and functionality, but uses a @@ -60,73 +63,59 @@ namespace rtc { // and it has an explicit SSL_CLOSED state. It should not be possible to send // any data in clear after one of the StartSSL methods has been called. -// Look in sslstreamadapter.h for documentation of the methods. - -class SSLCertChain; +// Look in ssl_stream_adapter.h for documentation of the methods. /////////////////////////////////////////////////////////////////////////////// -// If `allow` has a value, its value determines if legacy TLS protocols are -// allowed, overriding the default configuration. -// If `allow` has no value, any previous override is removed and the default -// configuration is restored. -RTC_EXPORT void SetAllowLegacyTLSProtocols(const absl::optional& allow); - -class OpenSSLStreamAdapter final : public SSLStreamAdapter, - public sigslot::has_slots<> { +class OpenSSLStreamAdapter final : public SSLStreamAdapter { public: OpenSSLStreamAdapter( std::unique_ptr stream, - absl::AnyInvocable handshake_error); + absl::AnyInvocable handshake_error, + const FieldTrialsView* field_trials = nullptr); ~OpenSSLStreamAdapter() override; void SetIdentity(std::unique_ptr identity) override; SSLIdentity* GetIdentityForTesting() const override; // Default argument is for compatibility - void SetServerRole(SSLRole role = SSL_SERVER) override; - bool SetPeerCertificateDigest( + void SetServerRole(SSLRole role = webrtc::SSL_SERVER) override; + SSLPeerCertificateDigestError SetPeerCertificateDigest( absl::string_view digest_alg, - const unsigned char* digest_val, - size_t digest_len, - SSLPeerCertificateDigestError* error = nullptr) override; + ArrayView digest_val) override; std::unique_ptr GetPeerSSLCertChain() const override; // Goes from state SSL_NONE to either SSL_CONNECTING or SSL_WAIT, depending // on whether the underlying stream is already open or not. int StartSSL() override; - void SetMode(SSLMode mode) override; + [[deprecated]] void SetMode(SSLMode mode) override; void SetMaxProtocolVersion(SSLProtocolVersion version) override; void SetInitialRetransmissionTimeout(int timeout_ms) override; + void SetMTU(int mtu) override; - StreamResult Read(rtc::ArrayView data, - size_t& read, - int& error) override; - StreamResult Write(rtc::ArrayView data, + StreamResult Read(ArrayView data, size_t& read, int& error) override; + StreamResult Write(ArrayView data, size_t& written, int& error) override; void Close() override; StreamState GetState() const override; - // TODO(guoweis): Move this away from a static class method. - static std::string SslCipherSuiteToName(int crypto_suite); - - bool GetSslCipherSuite(int* cipher) override; + std::optional GetTlsCipherSuiteName() const override; - SSLProtocolVersion GetSslVersion() const override; + bool GetSslCipherSuite(int* cipher) const override; + [[deprecated("Use GetSslVersionBytes")]] SSLProtocolVersion GetSslVersion() + const override; bool GetSslVersionBytes(int* version) const override; // Key Extractor interface - bool ExportKeyingMaterial(absl::string_view label, - const uint8_t* context, - size_t context_len, - bool use_context, - uint8_t* result, - size_t result_len) override; + bool ExportSrtpKeyingMaterial( + ZeroOnFreeBuffer& keying_material) override; + + uint16_t GetPeerSignatureAlgorithm() const override; // DTLS-SRTP interface bool SetDtlsSrtpCryptoSuites(const std::vector& crypto_suites) override; - bool GetDtlsSrtpCryptoSuite(int* crypto_suite) override; + bool GetDtlsSrtpCryptoSuite(int* crypto_suite) const override; bool IsTlsConnected() override; @@ -140,6 +129,17 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter, // using a fake clock. static void EnableTimeCallbackForTesting(); + // Return max DTLS SSLProtocolVersion supported by implementation. + static SSLProtocolVersion GetMaxSupportedDTLSProtocolVersion(); + + // Return number of times DTLS retransmission has been triggered. + // Used for testing (and maybe put into stats?). + int GetRetransmissionCount() const override { return retransmission_count_; } + + // Return the the ID of the group used by the adapters most recently + // completed handshake, or 0 if not applicable (e.g. before the handshake). + uint16_t GetSslGroupIdForTesting() const override; + private: enum SSLState { // Before calling one of the StartSSL methods, data flows @@ -152,7 +152,7 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter, SSL_CLOSED // Clean close }; - void OnEvent(StreamInterface* stream, int events, int err); + void OnEvent(int events, int err); void PostEvent(int events, int err); void SetTimeout(int delay_ms); @@ -209,9 +209,9 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter, const std::unique_ptr stream_; absl::AnyInvocable handshake_error_; - rtc::Thread* const owner_; - webrtc::ScopedTaskSafety task_safety_; - webrtc::RepeatingTaskHandle timeout_task_; + Thread* const owner_; + ScopedTaskSafety task_safety_; + RepeatingTaskHandle timeout_task_; SSLState state_; SSLRole role_; @@ -228,7 +228,7 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter, #ifdef OPENSSL_IS_BORINGSSL std::unique_ptr identity_; #else - std::unique_ptr identity_; + std::unique_ptr identity_; #endif // The certificate chain that the peer presented. Initially null, until the // connection is established. @@ -251,12 +251,30 @@ class OpenSSLStreamAdapter final : public SSLStreamAdapter, // be too aggressive for low bandwidth links. int dtls_handshake_timeout_ms_ = 50; - // TODO(https://bugs.webrtc.org/10261): Completely remove this option in M84. - const bool support_legacy_tls_protocols_flag_; + // MTU configured for dtls. + int dtls_mtu_ = 1200; + + // 0 == Disabled + // 1 == Max + // 2 == Enabled (both min and max) + const int force_dtls_13_ = 0; + + int retransmission_count_ = 0; + + // Experimental flag to enable Post-Quantum Cryptography TLS. + const bool enable_dtls_pqc_ = false; }; ///////////////////////////////////////////////////////////////////////////// +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::OpenSSLStreamAdapter; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_OPENSSL_STREAM_ADAPTER_H_ diff --git a/rtc_base/openssl_utility.cc b/rtc_base/openssl_utility.cc index eba3788a94..906127f932 100644 --- a/rtc_base/openssl_utility.cc +++ b/rtc_base/openssl_utility.cc @@ -33,7 +33,7 @@ #include "rtc_base/ssl_roots.h" #endif // WEBRTC_EXCLUDE_BUILT_IN_SSL_ROOT_CERTS -namespace rtc { +namespace webrtc { namespace openssl { // Holds various helper methods. @@ -42,7 +42,8 @@ namespace { // TODO(crbug.com/webrtc/11710): When OS certificate verification is available, // and we don't need VerifyPeerCertMatchesHost, don't compile this in order to // avoid a dependency on OpenSSL X509 objects (see crbug.com/webrtc/11410). -void LogCertificates(SSL* ssl, X509* certificate) { +void LogCertificates([[maybe_unused]] SSL* ssl, + [[maybe_unused]] X509* certificate) { // Logging certificates is extremely verbose. So it is disabled by default. #ifdef LOG_CERTIFICATES BIO* mem = BIO_new(BIO_s_mem()); @@ -151,8 +152,8 @@ bool ParseCertificate(CRYPTO_BUFFER* cert_buffer, return false; } if (expiration_time) { - *expiration_time = - ASN1TimeToSec(CBS_data(¬_after), CBS_len(¬_after), long_format); + *expiration_time = webrtc::ASN1TimeToSec(CBS_data(¬_after), + CBS_len(¬_after), long_format); } // subject Name, if (!CBS_get_asn1_element(&tbs_certificate, nullptr, CBS_ASN1_SEQUENCE)) { @@ -247,8 +248,9 @@ bool LoadBuiltinSSLRootCertificates(SSL_CTX* ctx) { for (size_t i = 0; i < arraysize(kSSLCertCertificateList); i++) { const unsigned char* cert_buffer = kSSLCertCertificateList[i]; size_t cert_buffer_len = kSSLCertCertificateSizeList[i]; - X509* cert = d2i_X509(nullptr, &cert_buffer, - checked_cast(cert_buffer_len)); // NOLINT + X509* cert = + d2i_X509(nullptr, &cert_buffer, + webrtc::checked_cast(cert_buffer_len)); // NOLINT if (cert) { int return_value = X509_STORE_add_cert(SSL_CTX_get_cert_store(ctx), cert); if (return_value == 0) { @@ -271,4 +273,4 @@ CRYPTO_BUFFER_POOL* GetBufferPool() { #endif } // namespace openssl -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/openssl_utility.h b/rtc_base/openssl_utility.h index dd183c283a..d3f5b0e049 100644 --- a/rtc_base/openssl_utility.h +++ b/rtc_base/openssl_utility.h @@ -17,7 +17,7 @@ #include "absl/strings/string_view.h" -namespace rtc { +namespace webrtc { // The openssl namespace holds static helper methods. All methods related // to OpenSSL that are commonly used and don't require global state should be // placed here. @@ -54,7 +54,29 @@ bool LoadBuiltinSSLRootCertificates(SSL_CTX* ssl_ctx); CRYPTO_BUFFER_POOL* GetBufferPool(); #endif +} // namespace openssl +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +namespace openssl { + +#ifndef WEBRTC_EXCLUDE_BUILT_IN_SSL_ROOT_CERTS +using ::webrtc::openssl::LoadBuiltinSSLRootCertificates; +#endif + +using ::webrtc::openssl::LogSSLErrors; +using ::webrtc::openssl::VerifyPeerCertMatchesHost; + +#ifdef OPENSSL_IS_BORINGSSL +using ::webrtc::openssl::GetBufferPool; +using ::webrtc::openssl::ParseCertificate; +#endif + } // namespace openssl } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_OPENSSL_UTILITY_H_ diff --git a/rtc_base/openssl_utility_unittest.cc b/rtc_base/openssl_utility_unittest.cc index 3302490bd0..1f0fcd9af0 100644 --- a/rtc_base/openssl_utility_unittest.cc +++ b/rtc_base/openssl_utility_unittest.cc @@ -40,7 +40,7 @@ #include "rtc_base/ssl_roots.h" #include "test/gmock.h" -namespace rtc { +namespace webrtc { namespace { // Fake P-256 key for use with the test certificates below. const unsigned char kFakeSSLPrivateKey[] = { @@ -187,7 +187,7 @@ SSL* CreateSSLWithPeerCertificate(const unsigned char* cert, size_t cert_len) { const unsigned char* key_ptr = kFakeSSLPrivateKey; EVP_PKEY* key = d2i_PrivateKey( EVP_PKEY_EC, nullptr, &key_ptr, - checked_cast(arraysize(kFakeSSLPrivateKey))); // NOLINT + webrtc::checked_cast(arraysize(kFakeSSLPrivateKey))); // NOLINT RTC_CHECK(key); #ifdef OPENSSL_IS_BORINGSSL @@ -305,4 +305,4 @@ TEST(OpenSSLUtilityTest, VerifyPeerCertMatchesHostLegacy) { SSL_free(ssl); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/operations_chain.cc b/rtc_base/operations_chain.cc index 4398bb16c1..b73189bc51 100644 --- a/rtc_base/operations_chain.cc +++ b/rtc_base/operations_chain.cc @@ -10,10 +10,15 @@ #include "rtc_base/operations_chain.h" +#include +#include + #include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "rtc_base/checks.h" -namespace rtc { +namespace webrtc { OperationsChain::CallbackHandle::CallbackHandle( scoped_refptr operations_chain) @@ -39,7 +44,7 @@ void OperationsChain::CallbackHandle::OnOperationComplete() { // static scoped_refptr OperationsChain::Create() { // Explicit new, to access private constructor. - return rtc::scoped_refptr(new OperationsChain()); + return scoped_refptr(new OperationsChain()); } OperationsChain::OperationsChain() { @@ -65,8 +70,8 @@ bool OperationsChain::IsEmpty() const { } std::function OperationsChain::CreateOperationsChainCallback() { - return [handle = rtc::make_ref_counted( - rtc::scoped_refptr(this))]() { + return [handle = make_ref_counted( + scoped_refptr(this))]() { handle->OnOperationComplete(); }; } @@ -85,4 +90,4 @@ void OperationsChain::OnOperationComplete() { } } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/operations_chain.h b/rtc_base/operations_chain.h index 0e8c0681ba..7c83b0a006 100644 --- a/rtc_base/operations_chain.h +++ b/rtc_base/operations_chain.h @@ -13,21 +13,19 @@ #include #include +#include #include -#include #include #include -#include "absl/types/optional.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "rtc_base/checks.h" -#include "rtc_base/ref_count.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" -namespace rtc { +namespace webrtc { namespace rtc_operations_chain_internal { @@ -188,16 +186,24 @@ class OperationsChain final : public RefCountedNonVirtual { std::function CreateOperationsChainCallback(); void OnOperationComplete(); - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; // FIFO-list of operations that are chained. An operation that is executing // remains on this list until it has completed by invoking the callback passed // to it. std::queue> chained_operations_ RTC_GUARDED_BY(sequence_checker_); - absl::optional> on_chain_empty_callback_ + std::optional> on_chain_empty_callback_ RTC_GUARDED_BY(sequence_checker_); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::OperationsChain; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_OPERATIONS_CHAIN_H_ diff --git a/rtc_base/operations_chain_unittest.cc b/rtc_base/operations_chain_unittest.cc index 4f44423b19..ed5cd40e18 100644 --- a/rtc_base/operations_chain_unittest.cc +++ b/rtc_base/operations_chain_unittest.cc @@ -11,24 +11,29 @@ #include "rtc_base/operations_chain.h" #include +#include #include #include #include #include +#include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" +#include "rtc_base/checks.h" #include "rtc_base/event.h" -#include "rtc_base/gunit.h" #include "rtc_base/thread.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" -namespace rtc { +namespace webrtc { using ::testing::ElementsAre; namespace { -constexpr int kDefaultTimeout = 3000; +constexpr TimeDelta kDefaultTimeout = TimeDelta::Millis(3000); } // namespace @@ -246,8 +251,7 @@ TEST(OperationsChainTest, AsynchronousOperation) { operation_tracker_proxy.PostAsynchronousOperation( &unblock_async_operation_event); // This should not be signaled until we unblock the operation. - EXPECT_FALSE( - async_operation_completed_event->Wait(webrtc::TimeDelta::Zero())); + EXPECT_FALSE(async_operation_completed_event->Wait(TimeDelta::Zero())); // Unblock the operation and wait for it to complete. unblock_async_operation_event.Set(); async_operation_completed_event->Wait(Event::kForever); @@ -265,13 +269,13 @@ TEST(OperationsChainTest, operation_tracker.BindSynchronousOperation(&event0)); // This should already be signaled. (If it wasn't, waiting wouldn't help, // because we'd be blocking the only thread that exists.) - EXPECT_TRUE(event0.Wait(webrtc::TimeDelta::Zero())); + EXPECT_TRUE(event0.Wait(TimeDelta::Zero())); // Chaining another operation should also execute immediately because the // chain should already be empty. Event event1; operations_chain->ChainOperation( operation_tracker.BindSynchronousOperation(&event1)); - EXPECT_TRUE(event1.Wait(webrtc::TimeDelta::Zero())); + EXPECT_TRUE(event1.Wait(TimeDelta::Zero())); } TEST(OperationsChainTest, AsynchronousOperationBlocksSynchronousOperation) { @@ -291,7 +295,7 @@ TEST(OperationsChainTest, AsynchronousOperationBlocksSynchronousOperation) { sync_operation_completed_event->Wait(Event::kForever); // The asynchronous avent should have blocked the synchronous event, meaning // this should already be signaled. - EXPECT_TRUE(async_operation_completed_event->Wait(webrtc::TimeDelta::Zero())); + EXPECT_TRUE(async_operation_completed_event->Wait(TimeDelta::Zero())); } TEST(OperationsChainTest, OperationsAreExecutedInOrder) { @@ -386,14 +390,14 @@ TEST(OperationsChainTest, IsEmpty) { unblock_async_operation_event1.Set(); async_operation_completed_event1->Wait(Event::kForever); EXPECT_FALSE(operation_tracker_proxy.IsEmpty()); - // Completing the last evenet empties the chain. + // Completing the last event empties the chain. unblock_async_operation_event2.Set(); async_operation_completed_event2->Wait(Event::kForever); EXPECT_TRUE(operation_tracker_proxy.IsEmpty()); } TEST(OperationsChainTest, OnChainEmptyCallback) { - rtc::AutoThread main_thread; + AutoThread main_thread; OperationTrackerProxy operation_tracker_proxy; operation_tracker_proxy.Initialize()->Wait(Event::kForever); @@ -411,7 +415,10 @@ TEST(OperationsChainTest, OnChainEmptyCallback) { // Completing the operation empties the chain, invoking the callback. unblock_async_operation_event0.Set(); async_operation_completed_event0->Wait(Event::kForever); - EXPECT_TRUE_WAIT(1u == on_empty_callback_counter, kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return on_empty_callback_counter == 1u; }, + ::testing::IsTrue(), {.timeout = kDefaultTimeout}), + webrtc::IsRtcOk()); // Chain multiple events. Event unblock_async_operation_event1; @@ -423,16 +430,25 @@ TEST(OperationsChainTest, OnChainEmptyCallback) { operation_tracker_proxy.PostAsynchronousOperation( &unblock_async_operation_event2); // Again, the callback is not invoked until the operation has completed. - EXPECT_TRUE_WAIT(1u == on_empty_callback_counter, kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return on_empty_callback_counter == 1u; }, + ::testing::IsTrue(), {.timeout = kDefaultTimeout}), + webrtc::IsRtcOk()); // Upon completing the first event, the chain is still not empty, so the // callback must not be invoked yet. unblock_async_operation_event1.Set(); async_operation_completed_event1->Wait(Event::kForever); - EXPECT_TRUE_WAIT(1u == on_empty_callback_counter, kDefaultTimeout); - // Completing the last evenet empties the chain, invoking the callback. + EXPECT_THAT( + webrtc::WaitUntil([&] { return on_empty_callback_counter == 1u; }, + ::testing::IsTrue(), {.timeout = kDefaultTimeout}), + webrtc::IsRtcOk()); + // Completing the last event empties the chain, invoking the callback. unblock_async_operation_event2.Set(); async_operation_completed_event2->Wait(Event::kForever); - EXPECT_TRUE_WAIT(2u == on_empty_callback_counter, kDefaultTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return on_empty_callback_counter == 2u; }, + ::testing::IsTrue(), {.timeout = kDefaultTimeout}), + webrtc::IsRtcOk()); } TEST(OperationsChainTest, @@ -499,4 +515,4 @@ TEST(OperationsChainDeathTest, #endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/physical_socket_server.cc b/rtc_base/physical_socket_server.cc index d42168249a..e69311b790 100644 --- a/rtc_base/physical_socket_server.cc +++ b/rtc_base/physical_socket_server.cc @@ -9,7 +9,20 @@ */ #include "rtc_base/physical_socket_server.h" +#include #include +#include +#include +#include + +#include "api/async_dns_resolver.h" +#include "api/transport/ecn_marking.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/deprecated/recursive_critical_section.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/thread_annotations.h" #if defined(_MSC_VER) && _MSC_VER < 1300 #pragma warning(disable : 4786) @@ -21,7 +34,6 @@ #if defined(WEBRTC_POSIX) #include -#include #if defined(WEBRTC_USE_EPOLL) // "poll" will be used to wait for the signal dispatcher. #include @@ -30,7 +42,6 @@ #endif #include #include -#include #include #endif @@ -38,23 +49,21 @@ #include #include #include + #undef SetPort #endif #include -#include -#include - -#include "rtc_base/arraysize.h" -#include "rtc_base/byte_order.h" +#include "rtc_base/async_dns_resolver.h" #include "rtc_base/checks.h" +#include "rtc_base/event.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" +#include "rtc_base/network/ecn_marking.h" #include "rtc_base/network_monitor.h" -#include "rtc_base/null_socket_server.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/field_trial.h" #if defined(WEBRTC_LINUX) #include @@ -70,6 +79,7 @@ #if defined(WEBRTC_POSIX) #include // for TCP_NODELAY + #define IP_MTU 14 // Until this is integrated from linux/in.h to netinet/in.h typedef void* SockOptArg; @@ -83,14 +93,14 @@ int64_t GetSocketRecvTimestamp(int socket) { if (ret != 0) return -1; int64_t timestamp = - rtc::kNumMicrosecsPerSec * static_cast(tv_ioctl.tv_sec) + + webrtc::kNumMicrosecsPerSec * static_cast(tv_ioctl.tv_sec) + static_cast(tv_ioctl.tv_usec); return timestamp; } #else -int64_t GetSocketRecvTimestamp(int socket) { +int64_t GetSocketRecvTimestamp(int /* socket */) { return -1; } #endif @@ -99,17 +109,44 @@ int64_t GetSocketRecvTimestamp(int socket) { typedef char* SockOptArg; #endif -#if defined(WEBRTC_USE_EPOLL) +#if defined(WEBRTC_LINUX) // POLLRDHUP / EPOLLRDHUP are only defined starting with Linux 2.6.17. #if !defined(POLLRDHUP) #define POLLRDHUP 0x2000 -#endif +#endif // !defined(POLLRDHUP) #if !defined(EPOLLRDHUP) #define EPOLLRDHUP 0x2000 -#endif -#endif +#endif // !defined(EPOLLRDHUP) +#endif // defined(WEBRTC_LINUX) namespace { + +// RFC-3168, Section 5. ECN is the two least significant bits. +static constexpr uint8_t kEcnMask = 0x03; + +#if defined(WEBRTC_POSIX) + +webrtc::EcnMarking EcnFromDs(uint8_t ds) { + // RFC-3168, Section 5. + constexpr uint8_t ECN_ECT1 = 0x01; + constexpr uint8_t ECN_ECT0 = 0x02; + constexpr uint8_t ECN_CE = 0x03; + const uint8_t ecn = ds & kEcnMask; + + if (ecn == ECN_ECT1) { + return webrtc::EcnMarking::kEct1; + } + if (ecn == ECN_ECT0) { + return webrtc::EcnMarking::kEct0; + } + if (ecn == ECN_CE) { + return webrtc::EcnMarking::kCe; + } + return webrtc::EcnMarking::kNotEct; +} + +#endif + class ScopedSetTrue { public: ScopedSetTrue(bool* value) : value_(value) { @@ -122,22 +159,16 @@ class ScopedSetTrue { bool* value_; }; -// Returns true if the experiement "WebRTC-SCM-Timestamp" is explicitly -// disabled. -bool IsScmTimeStampExperimentDisabled() { - return webrtc::field_trial::IsDisabled("WebRTC-SCM-Timestamp"); -} } // namespace -namespace rtc { +namespace webrtc { PhysicalSocket::PhysicalSocket(PhysicalSocketServer* ss, SOCKET s) : ss_(ss), s_(s), error_(0), state_((s == INVALID_SOCKET) ? CS_CLOSED : CS_CONNECTED), - resolver_(nullptr), - read_scm_timestamp_experiment_(!IsScmTimeStampExperimentDisabled()) { + resolver_(nullptr) { if (s_ != INVALID_SOCKET) { SetEnabledEvents(DE_READ | DE_WRITE); @@ -173,7 +204,7 @@ SocketAddress PhysicalSocket::GetLocalAddress() const { int result = ::getsockname(s_, addr, &addrlen); SocketAddress address; if (result >= 0) { - SocketAddressFromSockAddrStorage(addr_storage, &address); + webrtc::SocketAddressFromSockAddrStorage(addr_storage, &address); } else { RTC_LOG(LS_WARNING) << "GetLocalAddress: unable to get local addr, socket=" << s_; @@ -188,7 +219,7 @@ SocketAddress PhysicalSocket::GetRemoteAddress() const { int result = ::getpeername(s_, addr, &addrlen); SocketAddress address; if (result >= 0) { - SocketAddressFromSockAddrStorage(addr_storage, &address); + webrtc::SocketAddressFromSockAddrStorage(addr_storage, &address); } else { RTC_LOG(LS_WARNING) << "GetRemoteAddress: unable to get remote addr, socket=" << s_; @@ -208,7 +239,8 @@ int PhysicalSocket::Bind(const SocketAddress& bind_addr) { // Since the network binder handled binding the socket to the desired // network interface, we don't need to (and shouldn't) include an IP in // the bind() call; bind() just needs to assign a port. - copied_bind_addr.SetIP(GetAnyIP(copied_bind_addr.ipaddr().family())); + copied_bind_addr.SetIP( + webrtc::GetAnyIP(copied_bind_addr.ipaddr().family())); } else if (result == NetworkBindingResult::NOT_IMPLEMENTED) { RTC_LOG(LS_INFO) << "Can't bind socket to network because " "network binding is not implemented for this OS."; @@ -252,9 +284,8 @@ int PhysicalSocket::Connect(const SocketAddress& addr) { } if (addr.IsUnresolvedIP()) { RTC_LOG(LS_VERBOSE) << "Resolving addr in PhysicalSocket::Connect"; - resolver_ = new AsyncResolver(); - resolver_->SignalDone.connect(this, &PhysicalSocket::OnResolveResult); - resolver_->Start(addr); + resolver_ = std::make_unique(); + resolver_->Start(addr, [this] { OnResolveResult(resolver_->result()); }); state_ = CS_CONNECTING; return 0; } @@ -274,7 +305,7 @@ int PhysicalSocket::DoConnect(const SocketAddress& connect_addr) { uint8_t events = DE_READ | DE_WRITE; if (err == 0) { state_ = CS_CONNECTED; - } else if (IsBlockingError(GetError())) { + } else if (webrtc::IsBlockingError(GetError())) { state_ = CS_CONNECTING; events |= DE_CONNECT; } else { @@ -286,12 +317,12 @@ int PhysicalSocket::DoConnect(const SocketAddress& connect_addr) { } int PhysicalSocket::GetError() const { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return error_; } void PhysicalSocket::SetError(int error) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); error_ = error; } @@ -317,8 +348,19 @@ int PhysicalSocket::GetOption(Option opt, int* value) { #if defined(WEBRTC_POSIX) // unshift DSCP value to get six most significant bits of IP DiffServ field *value >>= 2; +#endif + } else if (opt == OPT_SEND_ECN) { +#if defined(WEBRTC_POSIX) + // Least 2 significant bits. + *value = *value & kEcnMask; +#endif + } else if (opt == OPT_RECV_ECN) { +#if defined(WEBRTC_POSIX) + // Least 2 significant bits. + *value = *value & kEcnMask; #endif } + return ret; } @@ -332,10 +374,13 @@ int PhysicalSocket::SetOption(Option opt, int value) { value = (value) ? IP_PMTUDISC_DO : IP_PMTUDISC_DONT; #endif } else if (opt == OPT_DSCP) { -#if defined(WEBRTC_POSIX) - // shift DSCP value to fit six most significant bits of IP DiffServ field - value <<= 2; -#endif + // IP DiffServ consists of DSCP 6 most significant, ECN 2 least + // significant. + dscp_ = value << 2; + value = dscp_ + (ecn_ & kEcnMask); + } else if (opt == OPT_SEND_ECN) { + ecn_ = value; + value = dscp_ + (ecn_ & kEcnMask); } #if defined(WEBRTC_POSIX) if (sopt == IPV6_TCLASS) { @@ -372,7 +417,7 @@ int PhysicalSocket::Send(const void* pv, size_t cb) { // We have seen minidumps where this may be false. RTC_DCHECK(sent <= static_cast(cb)); if ((sent > 0 && sent < static_cast(cb)) || - (sent < 0 && IsBlockingError(GetError()))) { + (sent < 0 && webrtc::IsBlockingError(GetError()))) { EnableEvents(DE_WRITE); } return sent; @@ -397,15 +442,15 @@ int PhysicalSocket::SendTo(const void* buffer, // We have seen minidumps where this may be false. RTC_DCHECK(sent <= static_cast(length)); if ((sent > 0 && sent < static_cast(length)) || - (sent < 0 && IsBlockingError(GetError()))) { + (sent < 0 && webrtc::IsBlockingError(GetError()))) { EnableEvents(DE_WRITE); } return sent; } int PhysicalSocket::Recv(void* buffer, size_t length, int64_t* timestamp) { - int received = - DoReadFromSocket(buffer, length, /*out_addr*/ nullptr, timestamp); + int received = DoReadFromSocket(buffer, length, /*out_addr*/ nullptr, + timestamp, /*ecn=*/nullptr); if ((received == 0) && (length != 0)) { // Note: on graceful shutdown, recv can return 0. In this case, we // pretend it is blocking, and then signal close, so that simplifying @@ -420,7 +465,7 @@ int PhysicalSocket::Recv(void* buffer, size_t length, int64_t* timestamp) { UpdateLastError(); int error = GetError(); - bool success = (received >= 0) || IsBlockingError(error); + bool success = (received >= 0) || webrtc::IsBlockingError(error); if (udp_ || success) { EnableEvents(DE_READ); } @@ -434,10 +479,35 @@ int PhysicalSocket::RecvFrom(void* buffer, size_t length, SocketAddress* out_addr, int64_t* timestamp) { - int received = DoReadFromSocket(buffer, length, out_addr, timestamp); + int received = DoReadFromSocket(buffer, length, out_addr, timestamp, nullptr); + UpdateLastError(); int error = GetError(); - bool success = (received >= 0) || IsBlockingError(error); + bool success = (received >= 0) || webrtc::IsBlockingError(error); + if (udp_ || success) { + EnableEvents(DE_READ); + } + if (!success) { + RTC_LOG_F(LS_VERBOSE) << "Error = " << error; + } + return received; +} + +int PhysicalSocket::RecvFrom(ReceiveBuffer& buffer) { + int64_t timestamp = -1; + static constexpr int BUF_SIZE = 64 * 1024; + buffer.payload.EnsureCapacity(BUF_SIZE); + + int received = DoReadFromSocket( + buffer.payload.data(), buffer.payload.capacity(), &buffer.source_address, + ×tamp, ecn_ ? &buffer.ecn : nullptr); + buffer.payload.SetSize(received > 0 ? received : 0); + if (received > 0 && timestamp != -1) { + buffer.arrival_time = Timestamp::Micros(timestamp); + } + UpdateLastError(); + int error = GetError(); + bool success = (received >= 0) || webrtc::IsBlockingError(error); if (udp_ || success) { EnableEvents(DE_READ); } @@ -450,62 +520,57 @@ int PhysicalSocket::RecvFrom(void* buffer, int PhysicalSocket::DoReadFromSocket(void* buffer, size_t length, SocketAddress* out_addr, - int64_t* timestamp) { + int64_t* timestamp, + EcnMarking* ecn) { sockaddr_storage addr_storage; socklen_t addr_len = sizeof(addr_storage); sockaddr* addr = reinterpret_cast(&addr_storage); #if defined(WEBRTC_POSIX) int received = 0; - if (read_scm_timestamp_experiment_) { - iovec iov = {.iov_base = buffer, .iov_len = length}; - msghdr msg = {.msg_iov = &iov, .msg_iovlen = 1}; - if (out_addr) { - out_addr->Clear(); - msg.msg_name = addr; - msg.msg_namelen = addr_len; - } - char control[CMSG_SPACE(sizeof(struct timeval))] = {}; - if (timestamp) { - *timestamp = -1; - msg.msg_control = &control; - msg.msg_controllen = sizeof(control); - } - received = ::recvmsg(s_, &msg, 0); - if (received <= 0) { - // An error occured or shut down. - return received; - } - if (timestamp) { - struct cmsghdr* cmsg; - for (cmsg = CMSG_FIRSTHDR(&msg); cmsg; cmsg = CMSG_NXTHDR(&msg, cmsg)) { - if (cmsg->cmsg_level != SOL_SOCKET) - continue; - if (cmsg->cmsg_type == SCM_TIMESTAMP) { - timeval* ts = reinterpret_cast(CMSG_DATA(cmsg)); - *timestamp = - rtc::kNumMicrosecsPerSec * static_cast(ts->tv_sec) + - static_cast(ts->tv_usec); - break; + iovec iov = {.iov_base = buffer, .iov_len = length}; + msghdr msg = {.msg_iov = &iov, .msg_iovlen = 1}; + if (out_addr) { + out_addr->Clear(); + msg.msg_name = addr; + msg.msg_namelen = addr_len; + } + // TODO(bugs.webrtc.org/15368): What size is needed? IPV6_TCLASS is supposed + // to be an int. Why is a larger size needed? + char control[CMSG_SPACE(sizeof(struct timeval) + 5 * sizeof(int))] = {}; + if (timestamp || ecn) { + *timestamp = -1; + msg.msg_control = &control; + msg.msg_controllen = sizeof(control); + } + received = ::recvmsg(s_, &msg, 0); + if (received <= 0) { + // An error occured or shut down. + return received; + } + if (timestamp || ecn) { + struct cmsghdr* cmsg; + for (cmsg = CMSG_FIRSTHDR(&msg); cmsg; cmsg = CMSG_NXTHDR(&msg, cmsg)) { + if (ecn) { + if ((cmsg->cmsg_type == IPV6_TCLASS && + cmsg->cmsg_level == IPPROTO_IPV6) || + (cmsg->cmsg_type == IP_TOS && cmsg->cmsg_level == IPPROTO_IP)) { + *ecn = EcnFromDs(CMSG_DATA(cmsg)[0]); } } - } - if (out_addr) { - SocketAddressFromSockAddrStorage(addr_storage, out_addr); - } - } else { // !read_scm_timestamp_experiment_ - if (out_addr) { - received = ::recvfrom(s_, static_cast(buffer), - static_cast(length), 0, addr, &addr_len); - SocketAddressFromSockAddrStorage(addr_storage, out_addr); - } else { - received = - ::recv(s_, static_cast(buffer), static_cast(length), 0); - } - if (timestamp) { - *timestamp = GetSocketRecvTimestamp(s_); + if (cmsg->cmsg_level != SOL_SOCKET) + continue; + if (timestamp && cmsg->cmsg_type == SCM_TIMESTAMP) { + timeval ts; + std::memcpy(static_cast(&ts), CMSG_DATA(cmsg), sizeof(ts)); + *timestamp = kNumMicrosecsPerSec * static_cast(ts.tv_sec) + + static_cast(ts.tv_usec); + } } } + if (out_addr) { + webrtc::SocketAddressFromSockAddrStorage(addr_storage, out_addr); + } return received; #else @@ -551,7 +616,7 @@ Socket* PhysicalSocket::Accept(SocketAddress* out_addr) { if (s == INVALID_SOCKET) return nullptr; if (out_addr != nullptr) - SocketAddressFromSockAddrStorage(addr_storage, out_addr); + webrtc::SocketAddressFromSockAddrStorage(addr_storage, out_addr); return ss_->WrapSocket(s); } @@ -564,8 +629,7 @@ int PhysicalSocket::Close() { state_ = CS_CLOSED; SetEnabledEvents(0); if (resolver_) { - resolver_->Destroy(false); - resolver_ = nullptr; + resolver_.reset(); } return err; } @@ -589,14 +653,15 @@ int PhysicalSocket::DoSendTo(SOCKET socket, return ::sendto(socket, buf, len, flags, dest_addr, addrlen); } -void PhysicalSocket::OnResolveResult(AsyncResolverInterface* resolver) { - if (resolver != resolver_) { - return; - } - - int error = resolver_->GetError(); +void PhysicalSocket::OnResolveResult(const AsyncDnsResolverResult& result) { + int error = result.GetError(); if (error == 0) { - error = DoConnect(resolver_->address()); + SocketAddress address; + if (result.GetResolvedAddress(AF_INET, &address)) { + error = DoConnect(address); + } else { + Close(); + } } else { Close(); } @@ -676,9 +741,66 @@ int PhysicalSocket::TranslateOption(Option opt, int* slevel, int* sopt) { #else RTC_LOG(LS_WARNING) << "Socket::OPT_DSCP not supported."; return -1; +#endif + case OPT_SEND_ECN: +#if defined(WEBRTC_POSIX) + if (family_ == AF_INET6) { + *slevel = IPPROTO_IPV6; + *sopt = IPV6_TCLASS; + } else { + *slevel = IPPROTO_IP; + *sopt = IP_TOS; + } + break; +#else + RTC_LOG(LS_WARNING) << "Socket::OPT_SEND_ESN not supported."; + return -1; +#endif + case OPT_RECV_ECN: +#if defined(WEBRTC_POSIX) + if (family_ == AF_INET6) { + *slevel = IPPROTO_IPV6; + *sopt = IPV6_RECVTCLASS; + } else { + *slevel = IPPROTO_IP; + *sopt = IP_RECVTOS; + } + break; +#else + RTC_LOG(LS_WARNING) << "Socket::OPT_RECV_ECN not supported."; + return -1; #endif case OPT_RTP_SENDTIME_EXTN_ID: return -1; // No logging is necessary as this not a OS socket option. + case OPT_KEEPALIVE: + *slevel = SOL_SOCKET; + *sopt = SO_KEEPALIVE; + break; + case OPT_TCP_KEEPCNT: + *slevel = IPPROTO_TCP; + *sopt = TCP_KEEPCNT; + break; + case OPT_TCP_KEEPIDLE: + *slevel = IPPROTO_TCP; +#if !defined(WEBRTC_MAC) + *sopt = TCP_KEEPIDLE; +#else + *sopt = TCP_KEEPALIVE; +#endif + break; + case OPT_TCP_KEEPINTVL: + *slevel = IPPROTO_TCP; + *sopt = TCP_KEEPINTVL; + break; + case OPT_TCP_USER_TIMEOUT: +#if defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID) + *slevel = IPPROTO_TCP; + *sopt = TCP_USER_TIMEOUT; + break; +#else + RTC_LOG(LS_WARNING) << "Socket::OPT_TCP_USER_TIMEOUT not supported."; + return -1; +#endif default: RTC_DCHECK_NOTREACHED(); return -1; @@ -720,13 +842,10 @@ bool SocketDispatcher::Initialize() { ioctlsocket(s_, FIONBIO, &argp); #elif defined(WEBRTC_POSIX) fcntl(s_, F_SETFL, fcntl(s_, F_GETFL, 0) | O_NONBLOCK); - if (!IsScmTimeStampExperimentDisabled()) { - int value = 1; - // Attempt to get receive packet timestamp from the socket. - if (::setsockopt(s_, SOL_SOCKET, SO_TIMESTAMP, &value, sizeof(value)) != - 0) { - RTC_DLOG(LS_ERROR) << "::setsockopt failed. errno: " << LAST_SYSTEM_ERROR; - } + int value = 1; + // Attempt to get receive packet timestamp from the socket. + if (::setsockopt(s_, SOL_SOCKET, SO_TIMESTAMP, &value, sizeof(value)) != 0) { + RTC_DLOG(LS_ERROR) << "::setsockopt failed. errno: " << LAST_SYSTEM_ERROR; } #endif @@ -736,10 +855,11 @@ bool SocketDispatcher::Initialize() { // we attempt to write to such a socket, SIGPIPE will be raised, which by // default will terminate the process, which we don't want. By specifying // this socket option, SIGPIPE will be disabled for the socket. - int value = 1; + value = 1; if (::setsockopt(s_, SOL_SOCKET, SO_NOSIGPIPE, &value, sizeof(value)) != 0) { RTC_DLOG(LS_ERROR) << "::setsockopt failed. errno: " << LAST_SYSTEM_ERROR; } + #endif ss_->Add(this); return true; @@ -1045,7 +1165,7 @@ class Signaler : public Dispatcher { } virtual void Signal() { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); if (!fSignaled_) { const uint8_t b[1] = {0}; const ssize_t res = write(afd_[1], b, sizeof(b)); @@ -1056,11 +1176,11 @@ class Signaler : public Dispatcher { uint32_t GetRequestedEvents() override { return DE_READ; } - void OnEvent(uint32_t ff, int err) override { + void OnEvent(uint32_t /* ff */, int /* err */) override { // It is not possible to perfectly emulate an auto-resetting event with // pipes. This simulates it by resetting before the event is handled. - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); if (fSignaled_) { uint8_t b[4]; // Allow for reading more than 1 byte, but expect 1. const ssize_t res = read(afd_[0], b, sizeof(b)); @@ -1078,7 +1198,7 @@ class Signaler : public Dispatcher { PhysicalSocketServer* const ss_; const std::array afd_; bool fSignaled_ RTC_GUARDED_BY(mutex_); - webrtc::Mutex mutex_; + Mutex mutex_; bool& flag_to_clear_; }; @@ -1238,7 +1358,7 @@ void PhysicalSocketServer::Remove(Dispatcher* pdispatcher) { #endif // WEBRTC_USE_EPOLL } -void PhysicalSocketServer::Update(Dispatcher* pdispatcher) { +void PhysicalSocketServer::Update([[maybe_unused]] Dispatcher* pdispatcher) { #if defined(WEBRTC_USE_EPOLL) if (epoll_fd_ == INVALID_SOCKET) { return; @@ -1254,16 +1374,15 @@ void PhysicalSocketServer::Update(Dispatcher* pdispatcher) { #endif } -int PhysicalSocketServer::ToCmsWait(webrtc::TimeDelta max_wait_duration) { +int PhysicalSocketServer::ToCmsWait(TimeDelta max_wait_duration) { return max_wait_duration == Event::kForever ? kForeverMs - : max_wait_duration.RoundUpTo(webrtc::TimeDelta::Millis(1)).ms(); + : max_wait_duration.RoundUpTo(TimeDelta::Millis(1)).ms(); } #if defined(WEBRTC_POSIX) -bool PhysicalSocketServer::Wait(webrtc::TimeDelta max_wait_duration, - bool process_io) { +bool PhysicalSocketServer::Wait(TimeDelta max_wait_duration, bool process_io) { // We don't support reentrant waiting. RTC_DCHECK(!waiting_); ScopedSetTrue s(&waiting_); @@ -1357,7 +1476,15 @@ static void ProcessEvents(Dispatcher* dispatcher, static void ProcessPollEvents(Dispatcher* dispatcher, const pollfd& pfd) { bool readable = (pfd.revents & (POLLIN | POLLPRI)); bool writable = (pfd.revents & POLLOUT); - bool error = (pfd.revents & (POLLRDHUP | POLLERR | POLLHUP)); + + // Linux and Fuchsia define POLLRDHUP, which is set when the peer has + // disconnected. On other platforms, we only check for POLLHUP. +#if defined(WEBRTC_LINUX) || defined(WEBRTC_FUCHSIA) + constexpr short kEvents = POLLRDHUP | POLLERR | POLLHUP; +#else + constexpr short kEvents = POLLERR | POLLHUP; +#endif + bool error = (pfd.revents & kEvents); ProcessEvents(dispatcher, readable, writable, error, error); } @@ -1394,7 +1521,7 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) { ptvWait = &tvWait; // Calculate when to return - stop_us = rtc::TimeMicros() + cmsWait * 1000; + stop_us = TimeMicros() + cmsWait * 1000; } fd_set fdsRead; @@ -1493,10 +1620,10 @@ bool PhysicalSocketServer::WaitSelect(int cmsWait, bool process_io) { if (ptvWait) { ptvWait->tv_sec = 0; ptvWait->tv_usec = 0; - int64_t time_left_us = stop_us - rtc::TimeMicros(); + int64_t time_left_us = stop_us - TimeMicros(); if (time_left_us > 0) { - ptvWait->tv_sec = time_left_us / rtc::kNumMicrosecsPerSec; - ptvWait->tv_usec = time_left_us % rtc::kNumMicrosecsPerSec; + ptvWait->tv_sec = time_left_us / kNumMicrosecsPerSec; + ptvWait->tv_usec = time_left_us % kNumMicrosecsPerSec; } } } @@ -1699,7 +1826,7 @@ bool PhysicalSocketServer::WaitPoll(int cmsWait, bool process_io) { int64_t msStop = -1; if (cmsWait != kForeverMs) { msWait = cmsWait; - msStop = TimeAfter(cmsWait); + msStop = webrtc::TimeAfter(cmsWait); } std::vector pollfds; @@ -1707,7 +1834,7 @@ bool PhysicalSocketServer::WaitPoll(int cmsWait, bool process_io) { while (fWait_) { { - CritScope cr(&crit_); + webrtc::CritScope cr(&crit_); current_dispatcher_keys_.clear(); pollfds.clear(); pollfds.reserve(dispatcher_by_key_.size()); @@ -1741,7 +1868,7 @@ bool PhysicalSocketServer::WaitPoll(int cmsWait, bool process_io) { return true; } else { // We have signaled descriptors - CritScope cr(&crit_); + webrtc::CritScope cr(&crit_); // Iterate only on the dispatchers whose file descriptors were passed into // poll; this avoids the ABA problem (a socket being destroyed and a new // one created with the same file descriptor). @@ -1754,7 +1881,7 @@ bool PhysicalSocketServer::WaitPoll(int cmsWait, bool process_io) { } if (cmsWait != kForeverMs) { - msWait = TimeDiff(msStop, TimeMillis()); + msWait = webrtc::TimeDiff(msStop, webrtc::TimeMillis()); if (msWait < 0) { // Return success on timeout. return true; @@ -1779,7 +1906,7 @@ bool PhysicalSocketServer::Wait(webrtc::TimeDelta max_wait_duration, int cmsWait = ToCmsWait(max_wait_duration); int64_t cmsTotal = cmsWait; int64_t cmsElapsed = 0; - int64_t msStart = Time(); + int64_t msStart = webrtc::Time(); fWait_ = true; while (fWait_) { @@ -1789,7 +1916,7 @@ bool PhysicalSocketServer::Wait(webrtc::TimeDelta max_wait_duration, events.push_back(socket_ev_); { - CritScope cr(&crit_); + webrtc::CritScope cr(&crit_); // Get a snapshot of all current dispatchers; this is used to avoid the // ABA problem (see later comment) and avoids the dispatcher_by_key_ // iterator being invalidated by calling CheckSignalClose, which may @@ -1845,7 +1972,7 @@ bool PhysicalSocketServer::Wait(webrtc::TimeDelta max_wait_duration, return true; } else { // Figure out which one it is and call it - CritScope cr(&crit_); + webrtc::CritScope cr(&crit_); int index = dw - WSA_WAIT_EVENT_0; if (index > 0) { --index; // The first event is the socket event @@ -1938,7 +2065,7 @@ bool PhysicalSocketServer::Wait(webrtc::TimeDelta max_wait_duration, // Break? if (!fWait_) break; - cmsElapsed = TimeSince(msStart); + cmsElapsed = webrtc::TimeSince(msStart); if ((cmsWait != kForeverMs) && (cmsElapsed >= cmsWait)) { break; } @@ -1949,4 +2076,4 @@ bool PhysicalSocketServer::Wait(webrtc::TimeDelta max_wait_duration, } #endif // WEBRTC_WIN -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/physical_socket_server.h b/rtc_base/physical_socket_server.h index 650db80931..3c9eca5561 100644 --- a/rtc_base/physical_socket_server.h +++ b/rtc_base/physical_socket_server.h @@ -11,30 +11,39 @@ #ifndef RTC_BASE_PHYSICAL_SOCKET_SERVER_H_ #define RTC_BASE_PHYSICAL_SOCKET_SERVER_H_ +#include + +#include "api/async_dns_resolver.h" +#include "api/transport/ecn_marking.h" #include "api/units/time_delta.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/third_party/sigslot/sigslot.h" #if defined(WEBRTC_POSIX) #if defined(WEBRTC_LINUX) // On Linux, use epoll. #include + #define WEBRTC_USE_EPOLL 1 -#elif defined(WEBRTC_FUCHSIA) +#elif defined(WEBRTC_FUCHSIA) || defined(WEBRTC_MAC) // Fuchsia implements select and poll but not epoll, and testing shows that poll // is faster than select. #include + #define WEBRTC_USE_POLL 1 #else // On other POSIX systems, use select by default. -#endif // WEBRTC_LINUX, WEBRTC_FUCHSIA +#endif // WEBRTC_LINUX, WEBRTC_FUCHSIA, WEBRTC_MAC #endif // WEBRTC_POSIX #include +#include #include +#include #include #include -#include "rtc_base/async_resolver.h" -#include "rtc_base/async_resolver_interface.h" #include "rtc_base/deprecated/recursive_critical_section.h" #include "rtc_base/socket_server.h" #include "rtc_base/synchronization/mutex.h" @@ -45,7 +54,9 @@ typedef int SOCKET; #endif // WEBRTC_POSIX -namespace rtc { +namespace webrtc { + +class Signaler; // Event constants for the Dispatcher class. enum DispatcherEvent { @@ -56,8 +67,6 @@ enum DispatcherEvent { DE_ACCEPT = 0x0010, }; -class Signaler; - class Dispatcher { public: virtual ~Dispatcher() {} @@ -86,7 +95,7 @@ class RTC_EXPORT PhysicalSocketServer : public SocketServer { virtual Socket* WrapSocket(SOCKET s); // SocketServer: - bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override; + bool Wait(TimeDelta max_wait_duration, bool process_io) override; void WakeUp() override; void Add(Dispatcher* dispatcher); @@ -99,7 +108,7 @@ class RTC_EXPORT PhysicalSocketServer : public SocketServer { // A local historical definition of "foreverness", in milliseconds. static constexpr int kForeverMs = -1; - static int ToCmsWait(webrtc::TimeDelta max_wait_duration); + static int ToCmsWait(TimeDelta max_wait_duration); #if defined(WEBRTC_POSIX) bool WaitSelect(int cmsWait, bool process_io); @@ -119,9 +128,6 @@ class RTC_EXPORT PhysicalSocketServer : public SocketServer { const int epoll_fd_ = INVALID_SOCKET; #elif defined(WEBRTC_USE_POLL) - void AddPoll(Dispatcher* dispatcher, uint64_t key); - void RemovePoll(Dispatcher* dispatcher); - void UpdatePoll(Dispatcher* dispatcher, uint64_t key); bool WaitPoll(int cmsWait, bool process_io); #endif // WEBRTC_USE_EPOLL, WEBRTC_USE_POLL @@ -182,10 +188,12 @@ class PhysicalSocket : public Socket, public sigslot::has_slots<> { const SocketAddress& addr) override; int Recv(void* buffer, size_t length, int64_t* timestamp) override; + // TODO(webrtc:15368): Deprecate and remove. int RecvFrom(void* buffer, size_t length, SocketAddress* out_addr, int64_t* timestamp) override; + int RecvFrom(ReceiveBuffer& buffer) override; int Listen(int backlog) override; Socket* Accept(SocketAddress* out_addr) override; @@ -216,9 +224,10 @@ class PhysicalSocket : public Socket, public sigslot::has_slots<> { int DoReadFromSocket(void* buffer, size_t length, SocketAddress* out_addr, - int64_t* timestamp); + int64_t* timestamp, + EcnMarking* ecn); - void OnResolveResult(AsyncResolverInterface* resolver); + void OnResolveResult(const AsyncDnsResolverResult& resolver); void UpdateLastError(); void MaybeRemapSendError(); @@ -234,17 +243,18 @@ class PhysicalSocket : public Socket, public sigslot::has_slots<> { SOCKET s_; bool udp_; int family_ = 0; - mutable webrtc::Mutex mutex_; + mutable Mutex mutex_; int error_ RTC_GUARDED_BY(mutex_); ConnState state_; - AsyncResolver* resolver_; + std::unique_ptr resolver_; + uint8_t dscp_ = 0; // 6bit. + uint8_t ecn_ = 0; // 2bits. #if !defined(NDEBUG) std::string dbg_addr_; #endif private: - const bool read_scm_timestamp_experiment_; uint8_t enabled_events_ = 0; }; @@ -297,6 +307,23 @@ class SocketDispatcher : public Dispatcher, public PhysicalSocket { #endif }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::DE_ACCEPT; +using ::webrtc::DE_CLOSE; +using ::webrtc::DE_CONNECT; +using ::webrtc::DE_READ; +using ::webrtc::DE_WRITE; +using ::webrtc::Dispatcher; +using ::webrtc::DispatcherEvent; +using ::webrtc::PhysicalSocket; +using ::webrtc::PhysicalSocketServer; +using ::webrtc::SocketDispatcher; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_PHYSICAL_SOCKET_SERVER_H_ diff --git a/rtc_base/physical_socket_server_unittest.cc b/rtc_base/physical_socket_server_unittest.cc index de64a31812..ac1743e0f9 100644 --- a/rtc_base/physical_socket_server_unittest.cc +++ b/rtc_base/physical_socket_server_unittest.cc @@ -10,37 +10,39 @@ #include "rtc_base/physical_socket_server.h" -#include - #include +#include #include -#include "rtc_base/gunit.h" +#include "api/test/rtc_error_matchers.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/net_helpers.h" #include "rtc_base/net_test_helpers.h" #include "rtc_base/network_monitor.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/socket_unittest.h" #include "rtc_base/test_utils.h" #include "rtc_base/thread.h" -#include "test/field_trial.h" +#include "test/gmock.h" #include "test/gtest.h" - -namespace rtc { +#include "test/wait_until.h" #define MAYBE_SKIP_IPV4 \ - if (!HasIPv4Enabled()) { \ + if (!::webrtc::HasIPv4Enabled()) { \ RTC_LOG(LS_INFO) << "No IPv4... skipping"; \ return; \ } #define MAYBE_SKIP_IPV6 \ - if (!HasIPv6Enabled()) { \ + if (!::webrtc::HasIPv6Enabled()) { \ RTC_LOG(LS_INFO) << "No IPv6... skipping"; \ return; \ } +namespace webrtc { + class PhysicalSocketTest; class FakeSocketDispatcher : public SocketDispatcher { @@ -128,7 +130,7 @@ class PhysicalSocketTest : public SocketTest { void WritableAfterPartialWrite(const IPAddress& loopback); FakePhysicalSocketServer server_; - rtc::AutoSocketServerThread thread_; + AutoSocketServerThread thread_; bool fail_accept_; int max_send_size_; }; @@ -198,7 +200,7 @@ TEST_F(PhysicalSocketTest, TestConnectFailIPv4) { } void PhysicalSocketTest::ConnectInternalAcceptError(const IPAddress& loopback) { - webrtc::testing::StreamSink sink; + testing::StreamSink sink; SocketAddress accept_addr; // Create two clients. @@ -206,13 +208,13 @@ void PhysicalSocketTest::ConnectInternalAcceptError(const IPAddress& loopback) { server_.CreateSocket(loopback.family(), SOCK_STREAM)); sink.Monitor(client1.get()); EXPECT_EQ(Socket::CS_CLOSED, client1->GetState()); - EXPECT_TRUE(IsUnspecOrEmptyIP(client1->GetLocalAddress().ipaddr())); + EXPECT_TRUE(webrtc::IsUnspecOrEmptyIP(client1->GetLocalAddress().ipaddr())); std::unique_ptr client2( server_.CreateSocket(loopback.family(), SOCK_STREAM)); sink.Monitor(client2.get()); EXPECT_EQ(Socket::CS_CLOSED, client2->GetState()); - EXPECT_TRUE(IsUnspecOrEmptyIP(client2->GetLocalAddress().ipaddr())); + EXPECT_TRUE(webrtc::IsUnspecOrEmptyIP(client2->GetLocalAddress().ipaddr())); // Create server and listen. std::unique_ptr server( @@ -238,8 +240,11 @@ void PhysicalSocketTest::ConnectInternalAcceptError(const IPAddress& loopback) { EXPECT_FALSE(sink.Check(client1.get(), webrtc::testing::SSE_CLOSE)); // Server has pending connection, try to accept it (will fail). - EXPECT_TRUE_WAIT((sink.Check(server.get(), webrtc::testing::SSE_READ)), - kTimeout); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return (sink.Check(server.get(), webrtc::testing::SSE_READ)); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); // Simulate "::accept" returning an error. SetFailAccept(true); std::unique_ptr accepted(server->Accept(&accept_addr)); @@ -262,8 +267,11 @@ void PhysicalSocketTest::ConnectInternalAcceptError(const IPAddress& loopback) { EXPECT_FALSE(sink.Check(client2.get(), webrtc::testing::SSE_CLOSE)); // Server has pending connection, try to accept it (will succeed). - EXPECT_TRUE_WAIT((sink.Check(server.get(), webrtc::testing::SSE_READ)), - kTimeout); + EXPECT_THAT( + webrtc::WaitUntil( + [&] { return (sink.Check(server.get(), webrtc::testing::SSE_READ)); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); SetFailAccept(false); std::unique_ptr accepted2(server->Accept(&accept_addr)); ASSERT_TRUE(accepted2); @@ -471,18 +479,18 @@ TEST_F(PhysicalSocketTest, TestSocketRecvTimestampIPv6) { SocketTest::TestSocketRecvTimestampIPv6(); } -#if !defined(WEBRTC_MAC) -TEST_F(PhysicalSocketTest, TestSocketRecvTimestampIPv4ScmExperimentDisabled) { - MAYBE_SKIP_IPV4; - webrtc::test::ScopedFieldTrials trial("WebRTC-SCM-Timestamp/Disabled/"); - SocketTest::TestSocketRecvTimestampIPv4(); +#if !defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) +// TODO(bugs.webrtc.org/15368): IpV4 fails on IOS and MAC. IPV6 works. +TEST_F(PhysicalSocketTest, TestSocketSendRecvWithEcnIPv4) { + MAYBE_SKIP_IPV6; + SocketTest::TestSocketSendRecvWithEcnIPV4(); } +#endif -TEST_F(PhysicalSocketTest, TestSocketRecvTimestampIPv6ScmExperimentDisabled) { - webrtc::test::ScopedFieldTrials trial("WebRTC-SCM-Timestamp/Disabled/"); - SocketTest::TestSocketRecvTimestampIPv6(); +TEST_F(PhysicalSocketTest, TestSocketSendRecvWithEcnIPv6) { + MAYBE_SKIP_IPV6; + SocketTest::TestSocketSendRecvWithEcnIPV6(); } -#endif // Verify that if the socket was unable to be bound to a real network interface // (not loopback), Bind will return an error. @@ -532,4 +540,4 @@ TEST_F(PhysicalSocketTest, UdpSocketRecvTimestampUseRtcEpochIPv6) { SocketTest::TestUdpSocketRecvTimestampUseRtcEpochIPv6(); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/platform_thread.cc b/rtc_base/platform_thread.cc index 6d369d747e..53381a4d4e 100644 --- a/rtc_base/platform_thread.cc +++ b/rtc_base/platform_thread.cc @@ -11,7 +11,13 @@ #include "rtc_base/platform_thread.h" #include -#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "rtc_base/platform_thread_types.h" #if !defined(WEBRTC_WIN) #include @@ -19,7 +25,7 @@ #include "rtc_base/checks.h" -namespace rtc { +namespace webrtc { namespace { #if defined(WEBRTC_WIN) @@ -41,8 +47,10 @@ bool SetPriority(ThreadPriority priority) { #if defined(WEBRTC_WIN) return SetThreadPriority(GetCurrentThread(), Win32PriorityFromThreadPriority(priority)) != FALSE; -#elif defined(__native_client__) || defined(WEBRTC_FUCHSIA) - // Setting thread priorities is not supported in NaCl or Fuchsia. +#elif defined(__native_client__) || defined(WEBRTC_FUCHSIA) || \ + (defined(__EMSCRIPTEN__) && !defined(__EMSCRIPTEN_PTHREADS__)) + // Setting thread priorities is not supported in NaCl, Fuchsia or Emscripten + // without pthreads. return true; #elif defined(WEBRTC_CHROMIUM_BUILD) && defined(WEBRTC_LINUX) // TODO(tommi): Switch to the same mechanism as Chromium uses for changing @@ -111,14 +119,14 @@ PlatformThread::PlatformThread(Handle handle, bool joinable) PlatformThread::PlatformThread(PlatformThread&& rhs) : handle_(rhs.handle_), joinable_(rhs.joinable_) { - rhs.handle_ = absl::nullopt; + rhs.handle_ = std::nullopt; } PlatformThread& PlatformThread::operator=(PlatformThread&& rhs) { Finalize(); handle_ = rhs.handle_; joinable_ = rhs.joinable_; - rhs.handle_ = absl::nullopt; + rhs.handle_ = std::nullopt; return *this; } @@ -142,7 +150,7 @@ PlatformThread PlatformThread::SpawnDetached( /*joinable=*/false); } -absl::optional PlatformThread::GetHandle() const { +std::optional PlatformThread::GetHandle() const { return handle_; } @@ -165,7 +173,7 @@ void PlatformThread::Finalize() { if (joinable_) RTC_CHECK_EQ(0, pthread_join(*handle_, nullptr)); #endif - handle_ = absl::nullopt; + handle_ = std::nullopt; } PlatformThread PlatformThread::SpawnThread( @@ -180,7 +188,7 @@ PlatformThread PlatformThread::SpawnThread( auto start_thread_function_ptr = new std::function([thread_function = std::move(thread_function), name = std::string(name), attributes] { - rtc::SetCurrentThreadName(name.c_str()); + SetCurrentThreadName(name.c_str()); SetPriority(attributes.priority); thread_function(); }); @@ -208,4 +216,4 @@ PlatformThread PlatformThread::SpawnThread( return PlatformThread(handle, joinable); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/platform_thread.h b/rtc_base/platform_thread.h index befd61849d..df7ca46fda 100644 --- a/rtc_base/platform_thread.h +++ b/rtc_base/platform_thread.h @@ -17,11 +17,12 @@ #include #endif +#include + #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "rtc_base/platform_thread_types.h" -namespace rtc { +namespace webrtc { enum class ThreadPriority { kLow = 1, @@ -70,7 +71,7 @@ class PlatformThread final { // For a PlatformThread that's been spawned joinable, the destructor suspends // the calling thread until the created thread exits unless the thread has // already exited. - virtual ~PlatformThread(); + ~PlatformThread(); // Finalizes any allocated resources. // For a PlatformThread that's been spawned joinable, Finalize() suspends @@ -97,7 +98,7 @@ class PlatformThread final { ThreadAttributes attributes = ThreadAttributes()); // Returns the base platform thread handle of this thread. - absl::optional GetHandle() const; + std::optional GetHandle() const; #if defined(WEBRTC_WIN) // Queue a Windows APC function that runs when the thread is alertable. @@ -111,10 +112,20 @@ class PlatformThread final { ThreadAttributes attributes, bool joinable); - absl::optional handle_; + std::optional handle_; bool joinable_ = false; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::PlatformThread; +using ::webrtc::ThreadAttributes; +using ::webrtc::ThreadPriority; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_PLATFORM_THREAD_H_ diff --git a/rtc_base/platform_thread_types.cc b/rtc_base/platform_thread_types.cc index d64ea689bb..9b6e7a13d8 100644 --- a/rtc_base/platform_thread_types.cc +++ b/rtc_base/platform_thread_types.cc @@ -10,6 +10,7 @@ #include "rtc_base/platform_thread_types.h" +// IWYU pragma: begin_keep #if defined(WEBRTC_LINUX) #include #include @@ -31,8 +32,9 @@ typedef HRESULT(WINAPI* RTC_SetThreadDescription)(HANDLE hThread, #include "rtc_base/checks.h" #endif +// IWYU pragma: end_keep -namespace rtc { +namespace webrtc { PlatformThreadId CurrentThreadId() { #if defined(WEBRTC_WIN) @@ -123,4 +125,4 @@ void SetCurrentThreadName(const char* name) { #endif } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/platform_thread_types.h b/rtc_base/platform_thread_types.h index 6b9101eec0..603e46ff08 100644 --- a/rtc_base/platform_thread_types.h +++ b/rtc_base/platform_thread_types.h @@ -11,6 +11,7 @@ #ifndef RTC_BASE_PLATFORM_THREAD_TYPES_H_ #define RTC_BASE_PLATFORM_THREAD_TYPES_H_ +// IWYU pragma: begin_exports // clang-format off // clang formating would change include order. #if defined(WEBRTC_WIN) @@ -30,8 +31,9 @@ #endif #endif // clang-format on +// IWYU pragma: end_exports -namespace rtc { +namespace webrtc { #if defined(WEBRTC_WIN) typedef DWORD PlatformThreadId; typedef DWORD PlatformThreadRef; @@ -57,6 +59,19 @@ bool IsThreadRefEqual(const PlatformThreadRef& a, const PlatformThreadRef& b); // Sets the current thread name. void SetCurrentThreadName(const char* name); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::CurrentThreadId; +using ::webrtc::CurrentThreadRef; +using ::webrtc::IsThreadRefEqual; +using ::webrtc::PlatformThreadId; +using ::webrtc::PlatformThreadRef; +using ::webrtc::SetCurrentThreadName; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_PLATFORM_THREAD_TYPES_H_ diff --git a/rtc_base/platform_thread_unittest.cc b/rtc_base/platform_thread_unittest.cc index 97b25e02e2..b25e06a67f 100644 --- a/rtc_base/platform_thread_unittest.cc +++ b/rtc_base/platform_thread_unittest.cc @@ -10,31 +10,32 @@ #include "rtc_base/platform_thread.h" -#include "absl/types/optional.h" +#include + #include "rtc_base/event.h" #include "system_wrappers/include/sleep.h" #include "test/gmock.h" -namespace rtc { +namespace webrtc { TEST(PlatformThreadTest, DefaultConstructedIsEmpty) { PlatformThread thread; - EXPECT_EQ(thread.GetHandle(), absl::nullopt); + EXPECT_EQ(thread.GetHandle(), std::nullopt); EXPECT_TRUE(thread.empty()); } TEST(PlatformThreadTest, StartFinalize) { PlatformThread thread = PlatformThread::SpawnJoinable([] {}, "1"); - EXPECT_NE(thread.GetHandle(), absl::nullopt); + EXPECT_NE(thread.GetHandle(), std::nullopt); EXPECT_FALSE(thread.empty()); thread.Finalize(); EXPECT_TRUE(thread.empty()); - rtc::Event done; + Event done; thread = PlatformThread::SpawnDetached([&] { done.Set(); }, "2"); EXPECT_FALSE(thread.empty()); thread.Finalize(); EXPECT_TRUE(thread.empty()); - done.Wait(webrtc::TimeDelta::Seconds(30)); + done.Wait(TimeDelta::Seconds(30)); } TEST(PlatformThreadTest, MovesEmpty) { @@ -49,12 +50,12 @@ TEST(PlatformThreadTest, MovesHandles) { PlatformThread thread2 = std::move(thread1); EXPECT_TRUE(thread1.empty()); EXPECT_FALSE(thread2.empty()); - rtc::Event done; + Event done; thread1 = PlatformThread::SpawnDetached([&] { done.Set(); }, "2"); thread2 = std::move(thread1); EXPECT_TRUE(thread1.empty()); EXPECT_FALSE(thread2.empty()); - done.Wait(webrtc::TimeDelta::Seconds(30)); + done.Wait(TimeDelta::Seconds(30)); } TEST(PlatformThreadTest, @@ -79,18 +80,18 @@ TEST(PlatformThreadTest, RunFunctionIsCalled) { TEST(PlatformThreadTest, JoinsThread) { // This test flakes if there are problems with the join implementation. - rtc::Event event; + Event event; PlatformThread::SpawnJoinable([&] { event.Set(); }, "T"); - EXPECT_TRUE(event.Wait(/*give_up_after=*/webrtc::TimeDelta::Zero())); + EXPECT_TRUE(event.Wait(/*give_up_after=*/TimeDelta::Zero())); } TEST(PlatformThreadTest, StopsBeforeDetachedThreadExits) { // This test flakes if there are problems with the detached thread // implementation. bool flag = false; - rtc::Event thread_started; - rtc::Event thread_continue; - rtc::Event thread_exiting; + Event thread_started; + Event thread_continue; + Event thread_exiting; PlatformThread::SpawnDetached( [&] { thread_started.Set(); @@ -106,4 +107,4 @@ TEST(PlatformThreadTest, StopsBeforeDetachedThreadExits) { EXPECT_TRUE(flag); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/proxy_info.cc b/rtc_base/proxy_info.cc deleted file mode 100644 index 23d60afa74..0000000000 --- a/rtc_base/proxy_info.cc +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright 2004 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/proxy_info.h" - -namespace rtc { - -const char* ProxyToString(ProxyType proxy) { - const char* const PROXY_NAMES[] = {"none", "https", "socks5", "unknown"}; - return PROXY_NAMES[proxy]; -} - -ProxyInfo::ProxyInfo() : type(PROXY_NONE), autodetect(false) {} -ProxyInfo::~ProxyInfo() = default; - -} // namespace rtc diff --git a/rtc_base/proxy_info.h b/rtc_base/proxy_info.h deleted file mode 100644 index e614692025..0000000000 --- a/rtc_base/proxy_info.h +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2004 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_PROXY_INFO_H_ -#define RTC_BASE_PROXY_INFO_H_ - -#include - -#include "rtc_base/crypt_string.h" -#include "rtc_base/socket_address.h" - -namespace rtc { - -enum ProxyType { PROXY_NONE, PROXY_HTTPS, PROXY_SOCKS5, PROXY_UNKNOWN }; -const char* ProxyToString(ProxyType proxy); - -struct ProxyInfo { - ProxyType type; - SocketAddress address; - std::string autoconfig_url; - bool autodetect; - std::string bypass_list; - std::string username; - CryptString password; - - ProxyInfo(); - ~ProxyInfo(); -}; - -} // namespace rtc - -#endif // RTC_BASE_PROXY_INFO_H_ diff --git a/rtc_base/proxy_server.cc b/rtc_base/proxy_server.cc index 84c96213c3..d9b94c5c25 100644 --- a/rtc_base/proxy_server.cc +++ b/rtc_base/proxy_server.cc @@ -12,13 +12,19 @@ #include +#include #include #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/memory/fifo_buffer.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/server_socket_adapters.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/socket_factory.h" -namespace rtc { +namespace webrtc { // ProxyServer ProxyServer::ProxyServer(SocketFactory* int_factory, @@ -149,8 +155,4 @@ void ProxyBinding::Destroy() { SignalDestroyed(this); } -AsyncProxyServerSocket* SocksProxyServer::WrapSocket(Socket* socket) { - return new AsyncSocksProxyServerSocket(socket); -} - -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/proxy_server.h b/rtc_base/proxy_server.h index 0b9b655a5e..ded2478da8 100644 --- a/rtc_base/proxy_server.h +++ b/rtc_base/proxy_server.h @@ -14,15 +14,14 @@ #include #include -#include "absl/memory/memory.h" #include "rtc_base/memory/fifo_buffer.h" #include "rtc_base/server_socket_adapters.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" +#include "rtc_base/socket_factory.h" +#include "rtc_base/third_party/sigslot/sigslot.h" -namespace rtc { - -class SocketFactory; +namespace webrtc { // ProxyServer is a base class that allows for easy construction of proxy // servers. With its helper class ProxyBinding, it contains all the necessary @@ -89,22 +88,15 @@ class ProxyServer : public sigslot::has_slots<> { std::vector> bindings_; }; -// SocksProxyServer is a simple extension of ProxyServer to implement SOCKS. -class SocksProxyServer : public ProxyServer { - public: - SocksProxyServer(SocketFactory* int_factory, - const SocketAddress& int_addr, - SocketFactory* ext_factory, - const SocketAddress& ext_ip) - : ProxyServer(int_factory, int_addr, ext_factory, ext_ip) {} - - SocksProxyServer(const SocksProxyServer&) = delete; - SocksProxyServer& operator=(const SocksProxyServer&) = delete; - - protected: - AsyncProxyServerSocket* WrapSocket(Socket* socket) override; -}; +} // namespace webrtc +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::ProxyBinding; +using ::webrtc::ProxyServer; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_PROXY_SERVER_H_ diff --git a/rtc_base/proxy_unittest.cc b/rtc_base/proxy_unittest.cc deleted file mode 100644 index 9e3898e430..0000000000 --- a/rtc_base/proxy_unittest.cc +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2009 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include - -#include "rtc_base/gunit.h" -#include "rtc_base/proxy_server.h" -#include "rtc_base/socket_adapters.h" -#include "rtc_base/test_client.h" -#include "rtc_base/test_echo_server.h" -#include "rtc_base/virtual_socket_server.h" - -using rtc::Socket; -using rtc::SocketAddress; - -static const SocketAddress kSocksProxyIntAddr("1.2.3.4", 1080); -static const SocketAddress kSocksProxyExtAddr("1.2.3.5", 0); -static const SocketAddress kBogusProxyIntAddr("1.2.3.4", 999); - -// Sets up a virtual socket server and a SOCKS5 proxy server. -class ProxyTest : public ::testing::Test { - public: - ProxyTest() : ss_(new rtc::VirtualSocketServer()), thread_(ss_.get()) { - socks_.reset(new rtc::SocksProxyServer(ss_.get(), kSocksProxyIntAddr, - ss_.get(), kSocksProxyExtAddr)); - } - - rtc::SocketServer* ss() { return ss_.get(); } - - private: - std::unique_ptr ss_; - rtc::AutoSocketServerThread thread_; - std::unique_ptr socks_; -}; - -// Tests whether we can use a SOCKS5 proxy to connect to a server. -TEST_F(ProxyTest, TestSocks5Connect) { - rtc::Socket* socket = - ss()->CreateSocket(kSocksProxyIntAddr.family(), SOCK_STREAM); - rtc::AsyncSocksProxySocket* proxy_socket = new rtc::AsyncSocksProxySocket( - socket, kSocksProxyIntAddr, "", rtc::CryptString()); - // TODO: IPv6-ize these tests when proxy supports IPv6. - - rtc::TestEchoServer server(rtc::Thread::Current(), - SocketAddress(INADDR_ANY, 0)); - - std::unique_ptr packet_socket( - rtc::AsyncTCPSocket::Create(proxy_socket, SocketAddress(INADDR_ANY, 0), - server.address())); - EXPECT_TRUE(packet_socket != nullptr); - rtc::TestClient client(std::move(packet_socket)); - - EXPECT_EQ(Socket::CS_CONNECTING, proxy_socket->GetState()); - EXPECT_TRUE(client.CheckConnected()); - EXPECT_EQ(Socket::CS_CONNECTED, proxy_socket->GetState()); - EXPECT_EQ(server.address(), client.remote_address()); - client.Send("foo", 3); - EXPECT_TRUE(client.CheckNextPacket("foo", 3, nullptr)); - EXPECT_TRUE(client.CheckNoPacket()); -} diff --git a/rtc_base/race_checker.cc b/rtc_base/race_checker.cc index f0d4e868c2..e7359cc37e 100644 --- a/rtc_base/race_checker.cc +++ b/rtc_base/race_checker.cc @@ -10,7 +10,9 @@ #include "rtc_base/race_checker.h" -namespace rtc { +#include "rtc_base/platform_thread_types.h" + +namespace webrtc { RaceChecker::RaceChecker() {} @@ -53,4 +55,4 @@ RaceCheckerScope::~RaceCheckerScope() { } } // namespace internal -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/race_checker.h b/rtc_base/race_checker.h index 4d574601eb..4a0c594fea 100644 --- a/rtc_base/race_checker.h +++ b/rtc_base/race_checker.h @@ -15,7 +15,7 @@ #include "rtc_base/platform_thread_types.h" #include "rtc_base/thread_annotations.h" -namespace rtc { +namespace webrtc { namespace internal { class RaceCheckerScope; @@ -60,19 +60,32 @@ class RTC_SCOPED_LOCKABLE RaceCheckerScopeDoNothing { }; } // namespace internal +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::RaceChecker; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES + +#define RTC_CHECK_RUNS_SERIALIZED(x) RTC_CHECK_RUNS_SERIALIZED_NEXT(x, __LINE__) + +#define RTC_CHECK_RUNS_SERIALIZED_NEXT(x, suffix) \ + RTC_CHECK_RUNS_SERIALIZED_IMPL(x, suffix) -#define RTC_CHECK_RUNS_SERIALIZED(x) \ - rtc::internal::RaceCheckerScope race_checker(x); \ - RTC_CHECK(!race_checker.RaceDetected()) +#define RTC_CHECK_RUNS_SERIALIZED_IMPL(x, suffix) \ + webrtc::internal::RaceCheckerScope race_checker##suffix(x); \ + RTC_CHECK(!race_checker##suffix.RaceDetected()) #if RTC_DCHECK_IS_ON -#define RTC_DCHECK_RUNS_SERIALIZED(x) \ - rtc::internal::RaceCheckerScope race_checker(x); \ +#define RTC_DCHECK_RUNS_SERIALIZED(x) \ + webrtc::internal::RaceCheckerScope race_checker(x); \ RTC_DCHECK(!race_checker.RaceDetected()) #else #define RTC_DCHECK_RUNS_SERIALIZED(x) \ - rtc::internal::RaceCheckerScopeDoNothing race_checker(x) + webrtc::internal::RaceCheckerScopeDoNothing race_checker(x) #endif #endif // RTC_BASE_RACE_CHECKER_H_ diff --git a/rtc_base/random.cc b/rtc_base/random.cc index 5206b817f3..55b8749cb7 100644 --- a/rtc_base/random.cc +++ b/rtc_base/random.cc @@ -42,8 +42,8 @@ uint32_t Random::Rand(uint32_t low, uint32_t high) { int32_t Random::Rand(int32_t low, int32_t high) { RTC_DCHECK(low <= high); const int64_t low_i64{low}; - return rtc::dchecked_cast( - Rand(rtc::dchecked_cast(high - low_i64)) + low_i64); + return dchecked_cast(Rand(dchecked_cast(high - low_i64)) + + low_i64); } template <> diff --git a/rtc_base/rate_limiter.cc b/rtc_base/rate_limiter.cc index 4740b26f81..26e1c30225 100644 --- a/rtc_base/rate_limiter.cc +++ b/rtc_base/rate_limiter.cc @@ -11,10 +11,9 @@ #include "rtc_base/rate_limiter.h" #include +#include -#include "absl/types/optional.h" #include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -34,9 +33,8 @@ RateLimiter::~RateLimiter() {} bool RateLimiter::TryUseRate(size_t packet_size_bytes) { MutexLock lock(&lock_); int64_t now_ms = clock_->TimeInMilliseconds(); - absl::optional current_rate = current_rate_.Rate(now_ms); - if (!webrtc::field_trial::IsEnabled("WebRTC-DisableRtxRateLimiter") && - current_rate) { + std::optional current_rate = current_rate_.Rate(now_ms); + if (current_rate) { // If there is a current rate, check if adding bytes would cause maximum // bitrate target to be exceeded. If there is NOT a valid current rate, // allow allocating rate even if target is exceeded. This prevents diff --git a/rtc_base/rate_limiter_unittest.cc b/rtc_base/rate_limiter_unittest.cc index a830446d60..a118e46948 100644 --- a/rtc_base/rate_limiter_unittest.cc +++ b/rtc_base/rate_limiter_unittest.cc @@ -15,7 +15,6 @@ #include "rtc_base/event.h" #include "rtc_base/platform_thread.h" #include "system_wrappers/include/clock.h" -#include "test/field_trial.h" #include "test/gtest.h" namespace webrtc { @@ -107,19 +106,6 @@ TEST_F(RateLimitTest, WindowSizeLimits) { EXPECT_FALSE(rate_limiter->SetWindowSize(kWindowSizeMs + 1)); } -TEST_F(RateLimitTest, DiablesRtxRateLimiterByFieldTrial) { - webrtc::test::ScopedFieldTrials trial( - "WebRTC-DisableRtxRateLimiter/Enabled/"); - - // Fill rate, extend window to full size. - EXPECT_TRUE(rate_limiter->TryUseRate(kRateFillingBytes / 2)); - clock_.AdvanceTimeMilliseconds(kWindowSizeMs - 1); - EXPECT_TRUE(rate_limiter->TryUseRate(kRateFillingBytes / 2)); - - // Does not limit rate even when all rate consumed. - EXPECT_TRUE(rate_limiter->TryUseRate(1)); -} - static constexpr TimeDelta kMaxTimeout = TimeDelta::Seconds(30); class ThreadTask { @@ -137,8 +123,8 @@ class ThreadTask { virtual void DoRun() = 0; RateLimiter* const rate_limiter_; - rtc::Event start_signal_; - rtc::Event end_signal_; + Event start_signal_; + Event end_signal_; }; TEST_F(RateLimitTest, MultiThreadedUsage) { @@ -159,7 +145,7 @@ TEST_F(RateLimitTest, MultiThreadedUsage) { EXPECT_TRUE(rate_limiter_->SetWindowSize(kWindowSizeMs / 2)); } } set_window_size_task(rate_limiter.get()); - auto thread1 = rtc::PlatformThread::SpawnJoinable( + auto thread1 = PlatformThread::SpawnJoinable( [&set_window_size_task] { set_window_size_task.Run(); }, "Thread1"); class SetMaxRateTask : public ThreadTask { @@ -170,7 +156,7 @@ TEST_F(RateLimitTest, MultiThreadedUsage) { void DoRun() override { rate_limiter_->SetMaxRate(kMaxRateBps * 2); } } set_max_rate_task(rate_limiter.get()); - auto thread2 = rtc::PlatformThread::SpawnJoinable( + auto thread2 = PlatformThread::SpawnJoinable( [&set_max_rate_task] { set_max_rate_task.Run(); }, "Thread2"); class UseRateTask : public ThreadTask { @@ -187,7 +173,7 @@ TEST_F(RateLimitTest, MultiThreadedUsage) { SimulatedClock* const clock_; } use_rate_task(rate_limiter.get(), &clock_); - auto thread3 = rtc::PlatformThread::SpawnJoinable( + auto thread3 = PlatformThread::SpawnJoinable( [&use_rate_task] { use_rate_task.Run(); }, "Thread3"); set_window_size_task.start_signal_.Set(); diff --git a/rtc_base/rate_statistics.cc b/rtc_base/rate_statistics.cc index 5c83796471..5870ccada3 100644 --- a/rtc_base/rate_statistics.cc +++ b/rtc_base/rate_statistics.cc @@ -57,8 +57,17 @@ void RateStatistics::Reset() { void RateStatistics::Update(int64_t count, int64_t now_ms) { RTC_DCHECK_GE(count, 0); + // Don't reset `first_timestamp_` if the last sample removed by EraseOld() was + // recent. This ensures that the window maintains its intended duration even + // when samples are received near the boundary. Use a margin of 50% of the + // current window size. + const int64_t recent_sample_time_margin = 1.5 * current_window_size_ms_; + bool last_sample_is_recent = + !buckets_.empty() && + buckets_.back().timestamp > now_ms - recent_sample_time_margin; + EraseOld(now_ms); - if (first_timestamp_ == -1 || num_samples_ == 0) { + if (first_timestamp_ == -1 || (num_samples_ == 0 && !last_sample_is_recent)) { first_timestamp_ = now_ms; } @@ -84,7 +93,7 @@ void RateStatistics::Update(int64_t count, int64_t now_ms) { ++num_samples_; } -absl::optional RateStatistics::Rate(int64_t now_ms) const { +std::optional RateStatistics::Rate(int64_t now_ms) const { // Yeah, this const_cast ain't pretty, but the alternative is to declare most // of the members as mutable... const_cast(this)->EraseOld(now_ms); @@ -107,9 +116,9 @@ absl::optional RateStatistics::Rate(int64_t now_ms) const { // overflowed, treat this as rate unavailable. if (num_samples_ == 0 || active_window_size <= 1 || (num_samples_ <= 1 && - rtc::SafeLt(active_window_size, current_window_size_ms_)) || + SafeLt(active_window_size, current_window_size_ms_)) || overflow_) { - return absl::nullopt; + return std::nullopt; } float scale = static_cast(scale_) / active_window_size; @@ -117,9 +126,9 @@ absl::optional RateStatistics::Rate(int64_t now_ms) const { // Better return unavailable rate than garbage value (undefined behavior). if (result > static_cast(std::numeric_limits::max())) { - return absl::nullopt; + return std::nullopt; } - return rtc::dchecked_cast(result); + return dchecked_cast(result); } void RateStatistics::EraseOld(int64_t now_ms) { diff --git a/rtc_base/rate_statistics.h b/rtc_base/rate_statistics.h index e7ce8ad5ed..7bcb0012fd 100644 --- a/rtc_base/rate_statistics.h +++ b/rtc_base/rate_statistics.h @@ -16,8 +16,8 @@ #include #include +#include -#include "absl/types/optional.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -62,7 +62,7 @@ class RTC_EXPORT RateStatistics { // from a monotonic clock. Ie, it doesn't matter if this call moves the // window, since any subsequent call to Update or Rate would still have moved // the window as much or more. - absl::optional Rate(int64_t now_ms) const; + std::optional Rate(int64_t now_ms) const; // Update the size of the averaging window. The maximum allowed value for // window_size_ms is max_window_size_ms as supplied in the constructor. diff --git a/rtc_base/rate_statistics_unittest.cc b/rtc_base/rate_statistics_unittest.cc index 8f1a8384cf..ce982b7cdf 100644 --- a/rtc_base/rate_statistics_unittest.cc +++ b/rtc_base/rate_statistics_unittest.cc @@ -54,7 +54,7 @@ TEST_F(RateStatisticsTest, TestStrictMode) { // Approximately 1200 kbps expected. Not exact since when packets // are removed we will jump 10 ms to the next packet. if (i > kInterval) { - absl::optional rate = stats_.Rate(now_ms); + std::optional rate = stats_.Rate(now_ms); EXPECT_TRUE(static_cast(rate)); uint32_t samples = i / kInterval + 1; uint64_t total_bits = samples * kPacketSize * 8; @@ -79,7 +79,7 @@ TEST_F(RateStatisticsTest, IncreasingThenDecreasingBitrate) { const uint32_t kExpectedBitrate = 8000000; // 1000 bytes per millisecond until plateau is reached. int prev_error = kExpectedBitrate; - absl::optional bitrate; + std::optional bitrate; while (++now_ms < 10000) { stats_.Update(1000, now_ms); bitrate = stats_.Rate(now_ms); @@ -103,7 +103,7 @@ TEST_F(RateStatisticsTest, IncreasingThenDecreasingBitrate) { // Zero bytes per millisecond until 0 is reached. while (++now_ms < 20000) { stats_.Update(0, now_ms); - absl::optional new_bitrate = stats_.Rate(now_ms); + std::optional new_bitrate = stats_.Rate(now_ms); if (static_cast(new_bitrate) && *new_bitrate != *bitrate) { // New bitrate must be lower than previous one. EXPECT_LT(*new_bitrate, *bitrate); @@ -131,7 +131,7 @@ TEST_F(RateStatisticsTest, ResetAfterSilence) { const uint32_t kExpectedBitrate = 8000000; // 1000 bytes per millisecond until the window has been filled. int prev_error = kExpectedBitrate; - absl::optional bitrate; + std::optional bitrate; while (++now_ms < 10000) { stats_.Update(1000, now_ms); bitrate = stats_.Rate(now_ms); @@ -215,7 +215,7 @@ TEST_F(RateStatisticsTest, RespectsWindowSizeEdges) { // Window size should be full, and the single data point should be accepted. ++now_ms; - absl::optional bitrate = stats_.Rate(now_ms); + std::optional bitrate = stats_.Rate(now_ms); EXPECT_TRUE(static_cast(bitrate)); EXPECT_EQ(1000 * 8u, *bitrate); @@ -241,7 +241,7 @@ TEST_F(RateStatisticsTest, HandlesZeroCounts) { stats_.Update(kWindowMs, now_ms); now_ms += kWindowMs - 1; stats_.Update(0, now_ms); - absl::optional bitrate = stats_.Rate(now_ms); + std::optional bitrate = stats_.Rate(now_ms); EXPECT_TRUE(static_cast(bitrate)); EXPECT_EQ(1000 * 8u, *bitrate); @@ -264,7 +264,7 @@ TEST_F(RateStatisticsTest, HandlesQuietPeriods) { stats_.Update(0, now_ms); now_ms += kWindowMs - 1; - absl::optional bitrate = stats_.Rate(now_ms); + std::optional bitrate = stats_.Rate(now_ms); EXPECT_TRUE(static_cast(bitrate)); EXPECT_EQ(0u, *bitrate); @@ -315,4 +315,47 @@ TEST_F(RateStatisticsTest, HandlesSomewhatLargeNumbers) { EXPECT_FALSE(stats_.Rate(now_ms)); } +TEST_F(RateStatisticsTest, HandlesLowFps) { + RateStatistics fps_stats(/*window_size_ms=*/1000, /*scale=*/1000); + + const int64_t kExpectedFps = 1; + constexpr int64_t kTimeDelta = 1000 / kExpectedFps; + + int64_t now_ms = 0; + EXPECT_FALSE(stats_.Rate(now_ms)); + // Fill 1 s window. + while (now_ms < 1000) { + fps_stats.Update(1, now_ms); + now_ms += kTimeDelta; + } + + // Simulate 1 fps stream for 10 seconds. + while (now_ms < 10000) { + fps_stats.Update(1, now_ms); + EXPECT_EQ(kExpectedFps, fps_stats.Rate(now_ms)); + now_ms += kTimeDelta; + } +} + +TEST_F(RateStatisticsTest, Handles25Fps) { + RateStatistics fps_stats(/*window_size_ms=*/1000, /*scale=*/1000); + + constexpr int64_t kExpectedFps = 25; + constexpr int64_t kTimeDelta = 1000 / kExpectedFps; + + int64_t now_ms = 0; + EXPECT_FALSE(stats_.Rate(now_ms)); + // Fill 1 s window. + while (now_ms < 1000) { + fps_stats.Update(1, now_ms); + now_ms += kTimeDelta; + } + // Simulate 25 fps stream for 10 seconds. + while (now_ms < 10000) { + fps_stats.Update(1, now_ms); + EXPECT_EQ(kExpectedFps, fps_stats.Rate(now_ms)); + now_ms += kTimeDelta; + } +} + } // namespace diff --git a/rtc_base/rate_tracker.cc b/rtc_base/rate_tracker.cc index e39dadb988..83481c196d 100644 --- a/rtc_base/rate_tracker.cc +++ b/rtc_base/rate_tracker.cc @@ -11,11 +11,13 @@ #include "rtc_base/rate_tracker.h" #include +#include +#include #include "rtc_base/checks.h" #include "rtc_base/time_utils.h" -namespace rtc { +namespace webrtc { static const int64_t kTimeUnset = -1; @@ -62,7 +64,7 @@ double RateTracker::ComputeRateForInterval( buckets_to_skip = bucket_count_ - current_bucket_; milliseconds_to_skip = 0; available_interval_milliseconds = - TimeDiff(current_time, initialization_time_milliseconds_); + webrtc::TimeDiff(current_time, initialization_time_milliseconds_); // Let one bucket interval pass after initialization before reporting. if (available_interval_milliseconds < bucket_milliseconds_) { return 0.0; @@ -100,7 +102,7 @@ double RateTracker::ComputeTotalRate() const { } return static_cast(total_sample_count_ * 1000) / static_cast( - TimeDiff(current_time, initialization_time_milliseconds_)); + webrtc::TimeDiff(current_time, initialization_time_milliseconds_)); } int64_t RateTracker::TotalSampleCount() const { @@ -137,7 +139,7 @@ void RateTracker::AddSamplesAtTime(int64_t current_time_ms, } int64_t RateTracker::Time() const { - return rtc::TimeMillis(); + return webrtc::TimeMillis(); } void RateTracker::EnsureInitialized() { @@ -155,4 +157,4 @@ size_t RateTracker::NextBucketIndex(size_t bucket_index) const { return (bucket_index + 1u) % (bucket_count_ + 1u); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/rate_tracker.h b/rtc_base/rate_tracker.h index d706569ecd..37155878c4 100644 --- a/rtc_base/rate_tracker.h +++ b/rtc_base/rate_tracker.h @@ -14,7 +14,7 @@ #include #include -namespace rtc { +namespace webrtc { // Computes units per second over a given interval by tracking the units over // each bucket of a given size and calculating the instantaneous rate assuming @@ -67,6 +67,14 @@ class RateTracker { int64_t initialization_time_milliseconds_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::RateTracker; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_RATE_TRACKER_H_ diff --git a/rtc_base/rate_tracker_unittest.cc b/rtc_base/rate_tracker_unittest.cc index 22ae2c07e7..7eed504f93 100644 --- a/rtc_base/rate_tracker_unittest.cc +++ b/rtc_base/rate_tracker_unittest.cc @@ -10,9 +10,11 @@ #include "rtc_base/rate_tracker.h" +#include + #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { const uint32_t kBucketIntervalMs = 100; } // namespace @@ -175,4 +177,4 @@ TEST(RateTrackerTest, TestLargeNumbers) { EXPECT_DOUBLE_EQ(large_number * 2, tracker.ComputeRate()); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/ref_count.h b/rtc_base/ref_count.h index d8d652abd8..d1ef143360 100644 --- a/rtc_base/ref_count.h +++ b/rtc_base/ref_count.h @@ -10,58 +10,22 @@ #ifndef RTC_BASE_REF_COUNT_H_ #define RTC_BASE_REF_COUNT_H_ -namespace rtc { - -// Refcounted objects should implement the following informal interface: -// -// void AddRef() const ; -// RefCountReleaseStatus Release() const; -// -// You may access members of a reference-counted object, including the AddRef() -// and Release() methods, only if you already own a reference to it, or if -// you're borrowing someone else's reference. (A newly created object is a -// special case: the reference count is zero on construction, and the code that -// creates the object should immediately call AddRef(), bringing the reference -// count from zero to one, e.g., by constructing an rtc::scoped_refptr). -// -// AddRef() creates a new reference to the object. -// -// Release() releases a reference to the object; the caller now has one less -// reference than before the call. Returns kDroppedLastRef if the number of -// references dropped to zero because of this (in which case the object destroys -// itself). Otherwise, returns kOtherRefsRemained, to signal that at the precise -// time the caller's reference was dropped, other references still remained (but -// if other threads own references, this may of course have changed by the time -// Release() returns). -// -// The caller of Release() must treat it in the same way as a delete operation: -// Regardless of the return value from Release(), the caller mustn't access the -// object. The object might still be alive, due to references held by other -// users of the object, but the object can go away at any time, e.g., as the -// result of another thread calling Release(). -// -// Calling AddRef() and Release() manually is discouraged. It's recommended to -// use rtc::scoped_refptr to manage all pointers to reference counted objects. -// Note that rtc::scoped_refptr depends on compile-time duck-typing; formally -// implementing the below RefCountInterface is not required. +// Transition file for backwards compatibility with source code +// that includes the non-API file. -enum class RefCountReleaseStatus { kDroppedLastRef, kOtherRefsRemained }; +#include "api/ref_count.h" -// Interfaces where refcounting is part of the public api should -// inherit this abstract interface. The implementation of these -// methods is usually provided by the RefCountedObject template class, -// applied as a leaf in the inheritance tree. -class RefCountInterface { - public: - virtual void AddRef() const = 0; - virtual RefCountReleaseStatus Release() const = 0; +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { - // Non-public destructor, because Release() has exclusive responsibility for - // destroying the object. - protected: - virtual ~RefCountInterface() {} -}; +// TODO(bugs.webrtc.org/15622): Deprecate and remove these aliases. +using RefCountInterface [[deprecated("Use webrtc::RefCountInterface")]] = + webrtc::RefCountInterface; +using RefCountReleaseStatus + [[deprecated("Use webrtc::RefCountReleaseStatus")]] = + webrtc::RefCountReleaseStatus; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_REF_COUNT_H_ diff --git a/rtc_base/ref_counted_object.h b/rtc_base/ref_counted_object.h index 418c3d80cc..032e9ad163 100644 --- a/rtc_base/ref_counted_object.h +++ b/rtc_base/ref_counted_object.h @@ -14,7 +14,7 @@ #include "rtc_base/ref_count.h" #include "rtc_base/ref_counter.h" -namespace rtc { +namespace webrtc { template class RefCountedObject : public T { @@ -84,6 +84,15 @@ class FinalRefCountedObject final : public T { mutable webrtc::webrtc_impl::RefCounter ref_count_{0}; }; +} // namespace webrtc + +// Backwards compatibe aliases. +// TODO: https://issues.webrtc.org/42225969 - deprecate and remove. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::FinalRefCountedObject; +using ::webrtc::RefCountedObject; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_REF_COUNTED_OBJECT_H_ diff --git a/rtc_base/ref_counted_object_unittest.cc b/rtc_base/ref_counted_object_unittest.cc index abeb1e9e67..fff301cac2 100644 --- a/rtc_base/ref_counted_object_unittest.cc +++ b/rtc_base/ref_counted_object_unittest.cc @@ -21,7 +21,7 @@ #include "rtc_base/ref_count.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { @@ -172,4 +172,4 @@ TEST(RefCounted, SmartPointers) { } } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/ref_counter.h b/rtc_base/ref_counter.h index 6ffeda8ced..93ae3d21f2 100644 --- a/rtc_base/ref_counter.h +++ b/rtc_base/ref_counter.h @@ -34,7 +34,7 @@ class RefCounter { // Otherwise, returns kOtherRefsRemained (note that in case of multithreading, // some other caller may have dropped the last reference by the time this call // returns; all we know is that we didn't do it). - rtc::RefCountReleaseStatus DecRef() { + RefCountReleaseStatus DecRef() { // Use release-acquire barrier to ensure all actions on the protected // resource are finished before the resource can be freed. // When ref_count_after_subtract > 0, this function require @@ -47,8 +47,8 @@ class RefCounter { int ref_count_after_subtract = ref_count_.fetch_sub(1, std::memory_order_acq_rel) - 1; return ref_count_after_subtract == 0 - ? rtc::RefCountReleaseStatus::kDroppedLastRef - : rtc::RefCountReleaseStatus::kOtherRefsRemained; + ? RefCountReleaseStatus::kDroppedLastRef + : RefCountReleaseStatus::kOtherRefsRemained; } // Return whether the reference count is one. If the reference count is used diff --git a/rtc_base/rolling_accumulator.h b/rtc_base/rolling_accumulator.h index 84d791edd1..8a46d5fe2c 100644 --- a/rtc_base/rolling_accumulator.h +++ b/rtc_base/rolling_accumulator.h @@ -19,7 +19,7 @@ #include "rtc_base/checks.h" #include "rtc_base/numerics/running_statistics.h" -namespace rtc { +namespace webrtc { // RollingAccumulator stores and reports statistics // over N most recent samples. @@ -42,7 +42,7 @@ class RollingAccumulator { size_t count() const { return static_cast(stats_.Size()); } void Reset() { - stats_ = webrtc::webrtc_impl::RunningStatistics(); + stats_ = webrtc_impl::RunningStatistics(); next_index_ = 0U; max_ = T(); max_stale_ = false; @@ -131,7 +131,7 @@ class RollingAccumulator { double ComputeVariance() const { return stats_.GetVariance().value_or(0); } private: - webrtc::webrtc_impl::RunningStatistics stats_; + webrtc_impl::RunningStatistics stats_; size_t next_index_; mutable T max_; mutable bool max_stale_; @@ -140,6 +140,14 @@ class RollingAccumulator { std::vector samples_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::RollingAccumulator; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_ROLLING_ACCUMULATOR_H_ diff --git a/rtc_base/rolling_accumulator_unittest.cc b/rtc_base/rolling_accumulator_unittest.cc index 3a26f9e42f..b32aa8c67d 100644 --- a/rtc_base/rolling_accumulator_unittest.cc +++ b/rtc_base/rolling_accumulator_unittest.cc @@ -14,7 +14,7 @@ #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { @@ -147,4 +147,4 @@ TEST(RollingAccumulatorTest, NumericStabilityForVariance) { EXPECT_NEAR(stats.ComputeVariance(), 1. / 12, 1e-3); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/rtc_certificate.cc b/rtc_base/rtc_certificate.cc index e0b6b3258e..1b89b76b0e 100644 --- a/rtc_base/rtc_certificate.cc +++ b/rtc_base/rtc_certificate.cc @@ -10,20 +10,22 @@ #include "rtc_base/rtc_certificate.h" +#include #include +#include +#include "api/scoped_refptr.h" #include "rtc_base/checks.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/time_utils.h" -namespace rtc { +namespace webrtc { scoped_refptr RTCCertificate::Create( std::unique_ptr identity) { - // Explicit new to access proteced constructor. - return rtc::scoped_refptr( - new RTCCertificate(identity.release())); + // Explicit new to access protected constructor. + return scoped_refptr(new RTCCertificate(identity.release())); } RTCCertificate::RTCCertificate(SSLIdentity* identity) : identity_(identity) { @@ -74,4 +76,4 @@ bool RTCCertificate::operator!=(const RTCCertificate& certificate) const { return !(*this == certificate); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/rtc_certificate.h b/rtc_base/rtc_certificate.h index 67c5c29a89..7d47656521 100644 --- a/rtc_base/rtc_certificate.h +++ b/rtc_base/rtc_certificate.h @@ -16,17 +16,14 @@ #include #include -#include "absl/base/attributes.h" #include "absl/strings/string_view.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_identity.h" #include "rtc_base/system/rtc_export.h" -namespace rtc { - -class SSLCertChain; -class SSLCertificate; -class SSLIdentity; +namespace webrtc { // This class contains PEM strings of an RTCCertificate's private key and // certificate and acts as a text representation of RTCCertificate. Certificates @@ -92,6 +89,15 @@ class RTC_EXPORT RTCCertificate final const std::unique_ptr identity_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::RTCCertificate; +using ::webrtc::RTCCertificatePEM; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_RTC_CERTIFICATE_H_ diff --git a/rtc_base/rtc_certificate_generator.cc b/rtc_base/rtc_certificate_generator.cc index ffc51aa8da..c3a316e9b8 100644 --- a/rtc_base/rtc_certificate_generator.cc +++ b/rtc_base/rtc_certificate_generator.cc @@ -19,7 +19,7 @@ #include "rtc_base/checks.h" #include "rtc_base/ssl_identity.h" -namespace rtc { +namespace webrtc { namespace { @@ -32,7 +32,7 @@ const uint64_t kYearInSeconds = 365 * 24 * 60 * 60; // static scoped_refptr RTCCertificateGenerator::GenerateCertificate( const KeyParams& key_params, - const absl::optional& expires_ms) { + const std::optional& expires_ms) { if (!key_params.IsValid()) { return nullptr; } @@ -68,7 +68,7 @@ RTCCertificateGenerator::RTCCertificateGenerator(Thread* signaling_thread, void RTCCertificateGenerator::GenerateCertificateAsync( const KeyParams& key_params, - const absl::optional& expires_ms, + const std::optional& expires_ms, RTCCertificateGenerator::Callback callback) { RTC_DCHECK(signaling_thread_->IsCurrent()); RTC_DCHECK(callback); @@ -85,4 +85,4 @@ void RTCCertificateGenerator::GenerateCertificateAsync( }); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/rtc_certificate_generator.h b/rtc_base/rtc_certificate_generator.h index a881f1a369..ca5d419ce1 100644 --- a/rtc_base/rtc_certificate_generator.h +++ b/rtc_base/rtc_certificate_generator.h @@ -13,15 +13,16 @@ #include +#include + #include "absl/functional/any_invocable.h" -#include "absl/types/optional.h" #include "api/scoped_refptr.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread.h" -namespace rtc { +namespace webrtc { // Generates `RTCCertificate`s. // See `RTCCertificateGenerator` for the WebRTC repo's implementation. @@ -29,7 +30,8 @@ class RTCCertificateGeneratorInterface { public: // Functor that will be called when certificate is generated asynchroniosly. // Called with nullptr as the parameter on failure. - using Callback = absl::AnyInvocable) &&>; + using Callback = + absl::AnyInvocable) &&>; virtual ~RTCCertificateGeneratorInterface() = default; @@ -40,7 +42,7 @@ class RTCCertificateGeneratorInterface { // its own restrictions on the expiration time. virtual void GenerateCertificateAsync( const KeyParams& key_params, - const absl::optional& expires_ms, + const std::optional& expires_ms, Callback callback) = 0; }; @@ -58,7 +60,7 @@ class RTC_EXPORT RTCCertificateGenerator // specified, a default expiration time is used. static scoped_refptr GenerateCertificate( const KeyParams& key_params, - const absl::optional& expires_ms); + const std::optional& expires_ms); RTCCertificateGenerator(Thread* signaling_thread, Thread* worker_thread); ~RTCCertificateGenerator() override {} @@ -69,7 +71,7 @@ class RTC_EXPORT RTCCertificateGenerator // larger value than that is clamped down to a year. If `expires_ms` is not // specified, a default expiration time is used. void GenerateCertificateAsync(const KeyParams& key_params, - const absl::optional& expires_ms, + const std::optional& expires_ms, Callback callback) override; private: @@ -77,6 +79,15 @@ class RTC_EXPORT RTCCertificateGenerator Thread* const worker_thread_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::RTCCertificateGenerator; +using ::webrtc::RTCCertificateGeneratorInterface; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_RTC_CERTIFICATE_GENERATOR_H_ diff --git a/rtc_base/rtc_certificate_generator_unittest.cc b/rtc_base/rtc_certificate_generator_unittest.cc index fb7ec913e5..a4388f566f 100644 --- a/rtc_base/rtc_certificate_generator_unittest.cc +++ b/rtc_base/rtc_certificate_generator_unittest.cc @@ -10,16 +10,23 @@ #include "rtc_base/rtc_certificate_generator.h" +#include #include +#include +#include -#include "absl/types/optional.h" -#include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" -#include "rtc_base/gunit.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_identity.h" #include "rtc_base/thread.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "test/wait_until.h" -namespace rtc { +namespace webrtc { class RTCCertificateGeneratorFixture { public: @@ -65,32 +72,35 @@ class RTCCertificateGeneratorFixture { class RTCCertificateGeneratorTest : public ::testing::Test { public: protected: - static constexpr int kGenerationTimeoutMs = 10000; + static constexpr TimeDelta kGenerationTimeoutMs = TimeDelta::Millis(10000); - rtc::AutoThread main_thread_; + AutoThread main_thread_; RTCCertificateGeneratorFixture fixture_; }; TEST_F(RTCCertificateGeneratorTest, GenerateECDSA) { EXPECT_TRUE(RTCCertificateGenerator::GenerateCertificate(KeyParams::ECDSA(), - absl::nullopt)); + std::nullopt)); } TEST_F(RTCCertificateGeneratorTest, GenerateRSA) { EXPECT_TRUE(RTCCertificateGenerator::GenerateCertificate(KeyParams::RSA(), - absl::nullopt)); + std::nullopt)); } TEST_F(RTCCertificateGeneratorTest, GenerateAsyncECDSA) { EXPECT_FALSE(fixture_.certificate()); fixture_.generator()->GenerateCertificateAsync( - KeyParams::ECDSA(), absl::nullopt, fixture_.OnGenerated()); + KeyParams::ECDSA(), std::nullopt, fixture_.OnGenerated()); // Until generation has completed, the certificate is null. Since this is an // async call, generation must not have completed until we process messages // posted to this thread (which is done by `EXPECT_TRUE_WAIT`). EXPECT_FALSE(fixture_.GenerateAsyncCompleted()); EXPECT_FALSE(fixture_.certificate()); - EXPECT_TRUE_WAIT(fixture_.GenerateAsyncCompleted(), kGenerationTimeoutMs); + EXPECT_THAT( + webrtc::WaitUntil([&] { return fixture_.GenerateAsyncCompleted(); }, + ::testing::IsTrue(), {.timeout = kGenerationTimeoutMs}), + webrtc::IsRtcOk()); EXPECT_TRUE(fixture_.certificate()); } @@ -118,7 +128,7 @@ TEST_F(RTCCertificateGeneratorTest, GenerateWithExpires) { EXPECT_GT(cert_b->Expires(), cert_a->Expires()); uint64_t expires_diff = cert_b->Expires() - cert_a->Expires(); EXPECT_GE(expires_diff, kExpiresMs); - EXPECT_LE(expires_diff, kExpiresMs + 2 * kGenerationTimeoutMs + 1000); + EXPECT_LE(expires_diff, kExpiresMs + 2 * kGenerationTimeoutMs.ms() + 1000); } TEST_F(RTCCertificateGeneratorTest, GenerateWithInvalidParamsShouldFail) { @@ -126,12 +136,15 @@ TEST_F(RTCCertificateGeneratorTest, GenerateWithInvalidParamsShouldFail) { EXPECT_FALSE(invalid_params.IsValid()); EXPECT_FALSE(RTCCertificateGenerator::GenerateCertificate(invalid_params, - absl::nullopt)); + std::nullopt)); - fixture_.generator()->GenerateCertificateAsync(invalid_params, absl::nullopt, + fixture_.generator()->GenerateCertificateAsync(invalid_params, std::nullopt, fixture_.OnGenerated()); - EXPECT_TRUE_WAIT(fixture_.GenerateAsyncCompleted(), kGenerationTimeoutMs); + EXPECT_THAT( + webrtc::WaitUntil([&] { return fixture_.GenerateAsyncCompleted(); }, + ::testing::IsTrue(), {.timeout = kGenerationTimeoutMs}), + webrtc::IsRtcOk()); EXPECT_FALSE(fixture_.certificate()); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/rtc_certificate_unittest.cc b/rtc_base/rtc_certificate_unittest.cc index 63183883b3..67f7e2cb3a 100644 --- a/rtc_base/rtc_certificate_unittest.cc +++ b/rtc_base/rtc_certificate_unittest.cc @@ -21,7 +21,7 @@ #include "rtc_base/time_utils.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { @@ -68,7 +68,7 @@ class RTCCertificateTest : public ::testing::Test { // or later. scoped_refptr GenerateCertificateWithExpires( uint64_t expires_s) const { - RTC_CHECK(IsValueInRangeForNumericType(expires_s)); + RTC_CHECK(webrtc::IsValueInRangeForNumericType(expires_s)); SSLIdentityParams params; params.common_name = kTestCertCommonName; @@ -137,4 +137,4 @@ TEST_F(RTCCertificateTest, FromPEMWithInvalidPEM) { EXPECT_FALSE(certificate); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/sanitizer.h b/rtc_base/sanitizer.h index 8af0824b67..2f5045d18e 100644 --- a/rtc_base/sanitizer.h +++ b/rtc_base/sanitizer.h @@ -14,7 +14,7 @@ #include // For size_t. #ifdef __cplusplus -#include "absl/meta/type_traits.h" +#include #endif #if defined(__has_feature) @@ -56,6 +56,14 @@ static inline void rtc_AsanPoison(const volatile void* ptr, size_t num_elements) { #if RTC_HAS_ASAN ASAN_POISON_MEMORY_REGION(ptr, element_size * num_elements); +#else + // This is to prevent from the compiler raising a warning/error over unused + // variables. We cannot use clang's annotation (`[[maybe_unused]]`) because + // this file is also included from c files which doesn't support the + // annotation till we switch to C23 + (void)ptr; + (void)element_size; + (void)num_elements; #endif } @@ -67,6 +75,10 @@ static inline void rtc_AsanUnpoison(const volatile void* ptr, size_t num_elements) { #if RTC_HAS_ASAN ASAN_UNPOISON_MEMORY_REGION(ptr, element_size * num_elements); +#else + (void)ptr; + (void)element_size; + (void)num_elements; #endif } @@ -77,6 +89,10 @@ static inline void rtc_MsanMarkUninitialized(const volatile void* ptr, size_t num_elements) { #if RTC_HAS_MSAN __msan_poison(ptr, element_size * num_elements); +#else + (void)ptr; + (void)element_size; + (void)num_elements; #endif } @@ -88,20 +104,24 @@ static inline void rtc_MsanCheckInitialized(const volatile void* ptr, size_t num_elements) { #if RTC_HAS_MSAN __msan_check_mem_is_initialized(ptr, element_size * num_elements); +#else + (void)ptr; + (void)element_size; + (void)num_elements; #endif } #ifdef __cplusplus -namespace rtc { +namespace webrtc { namespace sanitizer_impl { template constexpr bool IsTriviallyCopyable() { - return static_cast(absl::is_trivially_copy_constructible::value && - (absl::is_trivially_copy_assignable::value || + return static_cast(std::is_trivially_copy_constructible::value && + (std::is_trivially_copy_assignable::value || !std::is_copy_assignable::value) && - absl::is_trivially_destructible::value); + std::is_trivially_destructible::value); } } // namespace sanitizer_impl @@ -137,7 +157,19 @@ inline void MsanCheckInitialized(const T& mem) { rtc_MsanCheckInitialized(mem.data(), sizeof(mem.data()[0]), mem.size()); } +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::AsanPoison; +using ::webrtc::AsanUnpoison; +using ::webrtc::MsanCheckInitialized; +using ::webrtc::MsanMarkUninitialized; +using ::webrtc::MsanUninitialized; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // __cplusplus diff --git a/rtc_base/sanitizer_unittest.cc b/rtc_base/sanitizer_unittest.cc index ffbfed7d22..e293a43c23 100644 --- a/rtc_base/sanitizer_unittest.cc +++ b/rtc_base/sanitizer_unittest.cc @@ -19,7 +19,7 @@ #include #endif -namespace rtc { +namespace webrtc { namespace { // Test sanitizer_impl::IsTriviallyCopyable (at compile time). @@ -149,4 +149,4 @@ TEST(SanitizerTest, MsanUninitialized) { RTC_LOG(LS_INFO) << "read with no init passed"; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/server_socket_adapters.cc b/rtc_base/server_socket_adapters.cc index 673083c7cd..e7e0e6670f 100644 --- a/rtc_base/server_socket_adapters.cc +++ b/rtc_base/server_socket_adapters.cc @@ -14,7 +14,7 @@ #include "rtc_base/byte_buffer.h" -namespace rtc { +namespace webrtc { AsyncProxyServerSocket::AsyncProxyServerSocket(Socket* socket, size_t buffer_size) @@ -55,132 +55,4 @@ void AsyncSSLServerSocket::ProcessInput(char* data, size_t* len) { BufferInput(false); } -AsyncSocksProxyServerSocket::AsyncSocksProxyServerSocket(Socket* socket) - : AsyncProxyServerSocket(socket, kBufferSize), state_(SS_HELLO) { - BufferInput(true); -} - -void AsyncSocksProxyServerSocket::ProcessInput(char* data, size_t* len) { - RTC_DCHECK(state_ < SS_CONNECT_PENDING); - - ByteBufferReader response(data, *len); - if (state_ == SS_HELLO) { - HandleHello(&response); - } else if (state_ == SS_AUTH) { - HandleAuth(&response); - } else if (state_ == SS_CONNECT) { - HandleConnect(&response); - } - - // Consume parsed data - *len = response.Length(); - memmove(data, response.Data(), *len); -} - -void AsyncSocksProxyServerSocket::DirectSend(const ByteBufferWriter& buf) { - BufferedReadAdapter::DirectSend(buf.Data(), buf.Length()); -} - -void AsyncSocksProxyServerSocket::HandleHello(ByteBufferReader* request) { - uint8_t ver, num_methods; - if (!request->ReadUInt8(&ver) || !request->ReadUInt8(&num_methods)) { - Error(0); - return; - } - - if (ver != 5) { - Error(0); - return; - } - - // Handle either no-auth (0) or user/pass auth (2) - uint8_t method = 0xFF; - if (num_methods > 0 && !request->ReadUInt8(&method)) { - Error(0); - return; - } - - SendHelloReply(method); - if (method == 0) { - state_ = SS_CONNECT; - } else if (method == 2) { - state_ = SS_AUTH; - } else { - state_ = SS_ERROR; - } -} - -void AsyncSocksProxyServerSocket::SendHelloReply(uint8_t method) { - ByteBufferWriter response; - response.WriteUInt8(5); // Socks Version - response.WriteUInt8(method); // Auth method - DirectSend(response); -} - -void AsyncSocksProxyServerSocket::HandleAuth(ByteBufferReader* request) { - uint8_t ver, user_len, pass_len; - std::string user, pass; - if (!request->ReadUInt8(&ver) || !request->ReadUInt8(&user_len) || - !request->ReadString(&user, user_len) || !request->ReadUInt8(&pass_len) || - !request->ReadString(&pass, pass_len)) { - Error(0); - return; - } - - SendAuthReply(0); - state_ = SS_CONNECT; -} - -void AsyncSocksProxyServerSocket::SendAuthReply(uint8_t result) { - ByteBufferWriter response; - response.WriteUInt8(1); // Negotiation Version - response.WriteUInt8(result); - DirectSend(response); -} - -void AsyncSocksProxyServerSocket::HandleConnect(ByteBufferReader* request) { - uint8_t ver, command, reserved, addr_type; - uint32_t ip; - uint16_t port; - if (!request->ReadUInt8(&ver) || !request->ReadUInt8(&command) || - !request->ReadUInt8(&reserved) || !request->ReadUInt8(&addr_type) || - !request->ReadUInt32(&ip) || !request->ReadUInt16(&port)) { - Error(0); - return; - } - - if (ver != 5 || command != 1 || reserved != 0 || addr_type != 1) { - Error(0); - return; - } - - SignalConnectRequest(this, SocketAddress(ip, port)); - state_ = SS_CONNECT_PENDING; -} - -void AsyncSocksProxyServerSocket::SendConnectResult(int result, - const SocketAddress& addr) { - if (state_ != SS_CONNECT_PENDING) - return; - - ByteBufferWriter response; - response.WriteUInt8(5); // Socks version - response.WriteUInt8((result != 0)); // 0x01 is generic error - response.WriteUInt8(0); // reserved - response.WriteUInt8(1); // IPv4 address - response.WriteUInt32(addr.ip()); - response.WriteUInt16(addr.port()); - DirectSend(response); - BufferInput(false); - state_ = SS_TUNNEL; -} - -void AsyncSocksProxyServerSocket::Error(int error) { - state_ = SS_ERROR; - BufferInput(false); - Close(); - SetError(SOCKET_EACCES); - SignalCloseEvent(this, error); -} - -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/server_socket_adapters.h b/rtc_base/server_socket_adapters.h index b18c7a6a65..f9eeb1f963 100644 --- a/rtc_base/server_socket_adapters.h +++ b/rtc_base/server_socket_adapters.h @@ -13,7 +13,7 @@ #include "rtc_base/socket_adapters.h" -namespace rtc { +namespace webrtc { // Interface for implementing proxy server sockets. class AsyncProxyServerSocket : public BufferedReadAdapter { @@ -38,40 +38,15 @@ class AsyncSSLServerSocket : public BufferedReadAdapter { void ProcessInput(char* data, size_t* len) override; }; -// Implements a proxy server socket for the SOCKS protocol. -class AsyncSocksProxyServerSocket : public AsyncProxyServerSocket { - public: - explicit AsyncSocksProxyServerSocket(Socket* socket); - - AsyncSocksProxyServerSocket(const AsyncSocksProxyServerSocket&) = delete; - AsyncSocksProxyServerSocket& operator=(const AsyncSocksProxyServerSocket&) = - delete; - - private: - void ProcessInput(char* data, size_t* len) override; - void DirectSend(const ByteBufferWriter& buf); - - void HandleHello(ByteBufferReader* request); - void SendHelloReply(uint8_t method); - void HandleAuth(ByteBufferReader* request); - void SendAuthReply(uint8_t result); - void HandleConnect(ByteBufferReader* request); - void SendConnectResult(int result, const SocketAddress& addr) override; - - void Error(int error); - - static const int kBufferSize = 1024; - enum State { - SS_HELLO, - SS_AUTH, - SS_CONNECT, - SS_CONNECT_PENDING, - SS_TUNNEL, - SS_ERROR - }; - State state_; -}; +} // namespace webrtc +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::AsyncProxyServerSocket; +using ::webrtc::AsyncSSLServerSocket; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SERVER_SOCKET_ADAPTERS_H_ diff --git a/rtc_base/sigslot_tester.h b/rtc_base/sigslot_tester.h deleted file mode 100644 index 92483c0b8d..0000000000 --- a/rtc_base/sigslot_tester.h +++ /dev/null @@ -1,245 +0,0 @@ -// This file was GENERATED by command: -// pump.py sigslottester.h.pump -// DO NOT EDIT BY HAND!!! - -/* - * Copyright 2014 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_SIGSLOT_TESTER_H_ -#define RTC_BASE_SIGSLOT_TESTER_H_ - -// To generate sigslottester.h from sigslottester.h.pump, execute: -// /home/build/google3/third_party/gtest/scripts/pump.py sigslottester.h.pump - -// SigslotTester(s) are utility classes to check if signals owned by an -// object are being invoked at the right time and with the right arguments. -// They are meant to be used in tests. Tests must provide "capture" pointers -// (i.e. address of variables) where the arguments from the signal callback -// can be stored. -// -// Example: -// /* Some signal */ -// sigslot::signal1 foo; -// -// /* We want to monitor foo in some test. Note how signal argument is -// const std::string&, but capture-type is std::string. Capture type -// must be type that can be assigned to. */ -// std::string capture; -// SigslotTester1 slot(&foo, &capture); -// foo.emit("hello"); -// EXPECT_EQ(1, slot.callback_count()); -// EXPECT_EQ("hello", capture); -// /* See unit-tests for more examples */ - -#include "rtc_base/third_party/sigslot/sigslot.h" - -namespace rtc { - -// Base version for testing signals that passes no arguments. -class SigslotTester0 : public sigslot::has_slots<> { - public: - explicit SigslotTester0(sigslot::signal0<>* signal) : callback_count_(0) { - signal->connect(this, &SigslotTester0::OnSignalCallback); - } - - SigslotTester0(const SigslotTester0&) = delete; - SigslotTester0& operator=(const SigslotTester0&) = delete; - - int callback_count() const { return callback_count_; } - - private: - void OnSignalCallback() { callback_count_++; } - int callback_count_; -}; - -// Versions below are for testing signals that pass arguments. For all the -// templates below: -// - A1-A5 is the type of the argument i in the callback. Signals may and often -// do use const-references here for efficiency. -// - C1-C5 is the type of the variable to capture argument i. These should be -// non-const value types suitable for use as lvalues. - -template -class SigslotTester1 : public sigslot::has_slots<> { - public: - SigslotTester1(sigslot::signal1* signal, C1* capture1) - : callback_count_(0), capture1_(capture1) { - signal->connect(this, &SigslotTester1::OnSignalCallback); - } - - SigslotTester1(const SigslotTester1&) = delete; - SigslotTester1& operator=(const SigslotTester1&) = delete; - - int callback_count() const { return callback_count_; } - - private: - void OnSignalCallback(A1 arg1) { - callback_count_++; - *capture1_ = arg1; - } - - int callback_count_; - C1* capture1_; -}; - -template -class SigslotTester2 : public sigslot::has_slots<> { - public: - SigslotTester2(sigslot::signal2* signal, C1* capture1, C2* capture2) - : callback_count_(0), capture1_(capture1), capture2_(capture2) { - signal->connect(this, &SigslotTester2::OnSignalCallback); - } - - SigslotTester2(const SigslotTester2&) = delete; - SigslotTester2& operator=(const SigslotTester2&) = delete; - - int callback_count() const { return callback_count_; } - - private: - void OnSignalCallback(A1 arg1, A2 arg2) { - callback_count_++; - *capture1_ = arg1; - *capture2_ = arg2; - } - - int callback_count_; - C1* capture1_; - C2* capture2_; -}; - -template -class SigslotTester3 : public sigslot::has_slots<> { - public: - SigslotTester3(sigslot::signal3* signal, - C1* capture1, - C2* capture2, - C3* capture3) - : callback_count_(0), - capture1_(capture1), - capture2_(capture2), - capture3_(capture3) { - signal->connect(this, &SigslotTester3::OnSignalCallback); - } - - SigslotTester3(const SigslotTester3&) = delete; - SigslotTester3& operator=(const SigslotTester3&) = delete; - - int callback_count() const { return callback_count_; } - - private: - void OnSignalCallback(A1 arg1, A2 arg2, A3 arg3) { - callback_count_++; - *capture1_ = arg1; - *capture2_ = arg2; - *capture3_ = arg3; - } - - int callback_count_; - C1* capture1_; - C2* capture2_; - C3* capture3_; -}; - -template -class SigslotTester4 : public sigslot::has_slots<> { - public: - SigslotTester4(sigslot::signal4* signal, - C1* capture1, - C2* capture2, - C3* capture3, - C4* capture4) - : callback_count_(0), - capture1_(capture1), - capture2_(capture2), - capture3_(capture3), - capture4_(capture4) { - signal->connect(this, &SigslotTester4::OnSignalCallback); - } - - SigslotTester4(const SigslotTester4&) = delete; - SigslotTester4& operator=(const SigslotTester4&) = delete; - - int callback_count() const { return callback_count_; } - - private: - void OnSignalCallback(A1 arg1, A2 arg2, A3 arg3, A4 arg4) { - callback_count_++; - *capture1_ = arg1; - *capture2_ = arg2; - *capture3_ = arg3; - *capture4_ = arg4; - } - - int callback_count_; - C1* capture1_; - C2* capture2_; - C3* capture3_; - C4* capture4_; -}; - -template -class SigslotTester5 : public sigslot::has_slots<> { - public: - SigslotTester5(sigslot::signal5* signal, - C1* capture1, - C2* capture2, - C3* capture3, - C4* capture4, - C5* capture5) - : callback_count_(0), - capture1_(capture1), - capture2_(capture2), - capture3_(capture3), - capture4_(capture4), - capture5_(capture5) { - signal->connect(this, &SigslotTester5::OnSignalCallback); - } - - SigslotTester5(const SigslotTester5&) = delete; - SigslotTester5& operator=(const SigslotTester5&) = delete; - - int callback_count() const { return callback_count_; } - - private: - void OnSignalCallback(A1 arg1, A2 arg2, A3 arg3, A4 arg4, A5 arg5) { - callback_count_++; - *capture1_ = arg1; - *capture2_ = arg2; - *capture3_ = arg3; - *capture4_ = arg4; - *capture5_ = arg5; - } - - int callback_count_; - C1* capture1_; - C2* capture2_; - C3* capture3_; - C4* capture4_; - C5* capture5_; -}; -} // namespace rtc - -#endif // RTC_BASE_SIGSLOT_TESTER_H_ diff --git a/rtc_base/sigslot_tester_unittest.cc b/rtc_base/sigslot_tester_unittest.cc deleted file mode 100644 index 6c5b12ec5f..0000000000 --- a/rtc_base/sigslot_tester_unittest.cc +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright 2014 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/sigslot_tester.h" - -#include - -#include "rtc_base/third_party/sigslot/sigslot.h" -#include "test/gtest.h" - -namespace rtc { - -TEST(SigslotTester, TestSignal1Arg) { - sigslot::signal1 source1; - int capture1; - SigslotTester1 slot1(&source1, &capture1); - EXPECT_EQ(0, slot1.callback_count()); - - source1.emit(10); - EXPECT_EQ(1, slot1.callback_count()); - EXPECT_EQ(10, capture1); - - source1.emit(20); - EXPECT_EQ(2, slot1.callback_count()); - EXPECT_EQ(20, capture1); -} - -TEST(SigslotTester, TestSignal2Args) { - sigslot::signal2 source2; - int capture1; - char capture2; - SigslotTester2 slot2(&source2, &capture1, &capture2); - EXPECT_EQ(0, slot2.callback_count()); - - source2.emit(10, 'x'); - EXPECT_EQ(1, slot2.callback_count()); - EXPECT_EQ(10, capture1); - EXPECT_EQ('x', capture2); - - source2.emit(20, 'y'); - EXPECT_EQ(2, slot2.callback_count()); - EXPECT_EQ(20, capture1); - EXPECT_EQ('y', capture2); -} - -// Since it applies for 1 and 2 args, we assume it will work for up to 5 args. - -TEST(SigslotTester, TestSignalWithConstReferenceArgs) { - sigslot::signal1 source1; - std::string capture1; - SigslotTester1 slot1(&source1, &capture1); - EXPECT_EQ(0, slot1.callback_count()); - source1.emit("hello"); - EXPECT_EQ(1, slot1.callback_count()); - EXPECT_EQ("hello", capture1); -} - -TEST(SigslotTester, TestSignalWithPointerToConstArgs) { - sigslot::signal1 source1; - const std::string* capture1; - SigslotTester1 slot1(&source1, - &capture1); - EXPECT_EQ(0, slot1.callback_count()); - source1.emit(nullptr); - EXPECT_EQ(1, slot1.callback_count()); - EXPECT_EQ(nullptr, capture1); -} - -TEST(SigslotTester, TestSignalWithConstPointerArgs) { - sigslot::signal1 source1; - std::string* capture1; - SigslotTester1 slot1(&source1, &capture1); - EXPECT_EQ(0, slot1.callback_count()); - source1.emit(nullptr); - EXPECT_EQ(1, slot1.callback_count()); - EXPECT_EQ(nullptr, capture1); -} - -} // namespace rtc diff --git a/rtc_base/sigslottester.h.pump b/rtc_base/sigslottester.h.pump deleted file mode 100755 index c3d2d6e99b..0000000000 --- a/rtc_base/sigslottester.h.pump +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2014 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_SIGSLOTTESTER_H_ -#define RTC_BASE_SIGSLOTTESTER_H_ - -// To generate sigslottester.h from sigslottester.h.pump, execute: -// /home/build/google3/third_party/gtest/scripts/pump.py sigslottester.h.pump - - -// SigslotTester(s) are utility classes to check if signals owned by an -// object are being invoked at the right time and with the right arguments. -// They are meant to be used in tests. Tests must provide "capture" pointers -// (i.e. address of variables) where the arguments from the signal callback -// can be stored. -// -// Example: -// /* Some signal */ -// sigslot::signal1 foo; -// -// /* We want to monitor foo in some test. Note how signal argument is -// const std::string&, but capture-type is std::string. Capture type -// must be type that can be assigned to. */ -// std::string capture; -// SigslotTester1 slot(&foo, &capture); -// foo.emit("hello"); -// EXPECT_EQ(1, slot.callback_count()); -// EXPECT_EQ("hello", capture); -// /* See unit-tests for more examples */ - -#include "rtc_base/third_party/sigslot/sigslot.h" - -namespace rtc { - -// Base version for testing signals that passes no arguments. -class SigslotTester0 : public sigslot::has_slots<> { - public: - explicit SigslotTester0(sigslot::signal0<>* signal) : callback_count_(0) { - signal->connect(this, &SigslotTester0::OnSignalCallback); - } - - SigslotTester0(const SigslotTester0&) = delete; - SigslotTester0& operator=(const SigslotTester0&) = delete; - - int callback_count() const { return callback_count_; } - - private: - void OnSignalCallback() { callback_count_++; } - int callback_count_; -}; - -// Versions below are for testing signals that pass arguments. For all the -// templates below: -// - A1-A5 is the type of the argument i in the callback. Signals may and often -// do use const-references here for efficiency. -// - C1-C5 is the type of the variable to capture argument i. These should be -// non-const value types suitable for use as lvalues. - -$var n = 5 -$range i 1..n -$for i [[ -$range j 1..i - -template <$for j , [[class A$j]], $for j , [[class C$j]]> -class SigslotTester$i : public sigslot::has_slots<> { - public: - SigslotTester$i(sigslot::signal$i<$for j , [[A$j]]>* signal, - $for j , [[C$j* capture$j]]) - : callback_count_(0), - $for j , [[capture$j[[]]_(capture$j)]] { - signal->connect(this, &SigslotTester$i::OnSignalCallback); - } - - SigslotTester$i(const SigslotTester$i&) = delete; - SigslotTester$i& operator=(const SigslotTester$i&) = delete; - - int callback_count() const { return callback_count_; } - - private: - void OnSignalCallback($for j , [[A$j arg$j]]) { - callback_count_++;$for j [[ - - *capture$j[[]]_ = arg$j;]] - - } - - int callback_count_;$for j [[ - - C$j* capture$j[[]]_;]] -}; - -]] -} // namespace rtc - -#endif // RTC_BASE_SIGSLOTTESTER_H_ diff --git a/rtc_base/socket.cc b/rtc_base/socket.cc index bcd62ad2a4..26a5081886 100644 --- a/rtc_base/socket.cc +++ b/rtc_base/socket.cc @@ -10,4 +10,24 @@ #include "rtc_base/socket.h" -namespace rtc {} // namespace rtc +#include + +#include "rtc_base/buffer.h" + +namespace webrtc { + +int Socket::RecvFrom(ReceiveBuffer& buffer) { + static constexpr int BUF_SIZE = 64 * 1024; + int64_t timestamp = -1; + buffer.payload.EnsureCapacity(BUF_SIZE); + int len = RecvFrom(buffer.payload.data(), buffer.payload.capacity(), + &buffer.source_address, ×tamp); + buffer.payload.SetSize(len > 0 ? len : 0); + if (len > 0 && timestamp != -1) { + buffer.arrival_time = Timestamp::Micros(timestamp); + } + + return len; +} + +} // namespace webrtc diff --git a/rtc_base/socket.h b/rtc_base/socket.h index 0ed3a7fa6a..ac220cb0e5 100644 --- a/rtc_base/socket.h +++ b/rtc_base/socket.h @@ -13,19 +13,26 @@ #include +#include +#include +#include + +// IWYU pragma: begin_exports #if defined(WEBRTC_POSIX) #include -#include -#include #include #define SOCKET_EACCES EACCES #endif - -#if defined(WEBRTC_WIN) -#include "rtc_base/win32.h" -#endif - +// IWYU pragma: end_exports + +#include "api/units/timestamp.h" +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/network/ecn_marking.h" #include "rtc_base/socket_address.h" +#include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" // Rather than converting errors into a private namespace, @@ -70,7 +77,7 @@ #define closesocket(s) close(s) #endif // WEBRTC_POSIX -namespace rtc { +namespace webrtc { inline bool IsBlockingError(int e) { return (e == EWOULDBLOCK) || (e == EAGAIN) || (e == EINPROGRESS); @@ -78,8 +85,16 @@ inline bool IsBlockingError(int e) { // General interface for the socket implementations of various networks. The // methods match those of normal UNIX sockets very closely. -class Socket { +class RTC_EXPORT Socket { public: + struct ReceiveBuffer { + ReceiveBuffer(Buffer& payload) : payload(payload) {} + + std::optional arrival_time; + SocketAddress source_address; + EcnMarking ecn = EcnMarking::kNotEct; + Buffer& payload; + }; virtual ~Socket() {} Socket(const Socket&) = delete; @@ -99,10 +114,18 @@ class Socket { virtual int SendTo(const void* pv, size_t cb, const SocketAddress& addr) = 0; // `timestamp` is in units of microseconds. virtual int Recv(void* pv, size_t cb, int64_t* timestamp) = 0; - virtual int RecvFrom(void* pv, - size_t cb, - SocketAddress* paddr, - int64_t* timestamp) = 0; + // TODO(webrtc:15368): Deprecate and remove. + virtual int RecvFrom(void* /* pv */, + size_t /* cb */, + SocketAddress* /* paddr */, + int64_t* /* timestamp */) { + // Not implemented. Use RecvFrom(ReceiveBuffer& buffer). + RTC_CHECK_NOTREACHED(); + } + // Intended to replace RecvFrom(void* ...). + // Default implementation calls RecvFrom(void* ...) with 64Kbyte buffer. + // Returns number of bytes received or a negative value on error. + virtual int RecvFrom(ReceiveBuffer& buffer); virtual int Listen(int backlog) = 0; virtual Socket* Accept(SocketAddress* paddr) = 0; virtual int Close() = 0; @@ -123,6 +146,13 @@ class Socket { OPT_RTP_SENDTIME_EXTN_ID, // This is a non-traditional socket option param. // This is specific to libjingle and will be used // if SendTime option is needed at socket level. + OPT_SEND_ECN, // 2-bit ECN + OPT_RECV_ECN, + OPT_KEEPALIVE, // Enable socket keep alive + OPT_TCP_KEEPCNT, // Set TCP keep alive count + OPT_TCP_KEEPIDLE, // Set TCP keep alive idle time in seconds + OPT_TCP_KEEPINTVL, // Set TCP keep alive interval in seconds + OPT_TCP_USER_TIMEOUT, // Set TCP user timeout }; virtual int GetOption(Option opt, int* value) = 0; virtual int SetOption(Option opt, int value) = 0; @@ -142,6 +172,15 @@ class Socket { Socket() {} }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::IsBlockingError; +using ::webrtc::Socket; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SOCKET_H_ diff --git a/rtc_base/socket_adapters.cc b/rtc_base/socket_adapters.cc index 4ec93ae3e9..3cd523b5b8 100644 --- a/rtc_base/socket_adapters.cc +++ b/rtc_base/socket_adapters.cc @@ -21,12 +21,11 @@ #include "rtc_base/buffer.h" #include "rtc_base/byte_buffer.h" #include "rtc_base/checks.h" -#include "rtc_base/http_common.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/zero_memory.h" -namespace rtc { +namespace webrtc { BufferedReadAdapter::BufferedReadAdapter(Socket* socket, size_t size) : AsyncSocketAdapter(socket), @@ -216,450 +215,4 @@ void AsyncSSLSocket::ProcessInput(char* data, size_t* len) { SignalReadEvent(this); } -/////////////////////////////////////////////////////////////////////////////// - -AsyncHttpsProxySocket::AsyncHttpsProxySocket(Socket* socket, - absl::string_view user_agent, - const SocketAddress& proxy, - absl::string_view username, - const CryptString& password) - : BufferedReadAdapter(socket, 1024), - proxy_(proxy), - agent_(user_agent), - user_(username), - pass_(password), - force_connect_(false), - state_(PS_ERROR), - context_(0) {} - -AsyncHttpsProxySocket::~AsyncHttpsProxySocket() { - delete context_; -} - -int AsyncHttpsProxySocket::Connect(const SocketAddress& addr) { - int ret; - RTC_LOG(LS_VERBOSE) << "AsyncHttpsProxySocket::Connect(" - << proxy_.ToSensitiveString() << ")"; - dest_ = addr; - state_ = PS_INIT; - if (ShouldIssueConnect()) { - BufferInput(true); - } - ret = BufferedReadAdapter::Connect(proxy_); - // TODO: Set state_ appropriately if Connect fails. - return ret; -} - -SocketAddress AsyncHttpsProxySocket::GetRemoteAddress() const { - return dest_; -} - -int AsyncHttpsProxySocket::Close() { - headers_.clear(); - state_ = PS_ERROR; - dest_.Clear(); - delete context_; - context_ = nullptr; - return BufferedReadAdapter::Close(); -} - -Socket::ConnState AsyncHttpsProxySocket::GetState() const { - if (state_ < PS_TUNNEL) { - return CS_CONNECTING; - } else if (state_ == PS_TUNNEL) { - return CS_CONNECTED; - } else { - return CS_CLOSED; - } -} - -void AsyncHttpsProxySocket::OnConnectEvent(Socket* socket) { - RTC_LOG(LS_VERBOSE) << "AsyncHttpsProxySocket::OnConnectEvent"; - if (!ShouldIssueConnect()) { - state_ = PS_TUNNEL; - BufferedReadAdapter::OnConnectEvent(socket); - return; - } - SendRequest(); -} - -void AsyncHttpsProxySocket::OnCloseEvent(Socket* socket, int err) { - RTC_LOG(LS_VERBOSE) << "AsyncHttpsProxySocket::OnCloseEvent(" << err << ")"; - if ((state_ == PS_WAIT_CLOSE) && (err == 0)) { - state_ = PS_ERROR; - Connect(dest_); - } else { - BufferedReadAdapter::OnCloseEvent(socket, err); - } -} - -void AsyncHttpsProxySocket::ProcessInput(char* data, size_t* len) { - size_t start = 0; - for (size_t pos = start; state_ < PS_TUNNEL && pos < *len;) { - if (state_ == PS_SKIP_BODY) { - size_t consume = std::min(*len - pos, content_length_); - pos += consume; - start = pos; - content_length_ -= consume; - if (content_length_ == 0) { - EndResponse(); - } - continue; - } - - if (data[pos++] != '\n') - continue; - - size_t length = pos - start - 1; - if ((length > 0) && (data[start + length - 1] == '\r')) - --length; - - data[start + length] = 0; - ProcessLine(data + start, length); - start = pos; - } - - *len -= start; - if (*len > 0) { - memmove(data, data + start, *len); - } - - if (state_ != PS_TUNNEL) - return; - - bool remainder = (*len > 0); - BufferInput(false); - SignalConnectEvent(this); - - // FIX: if SignalConnect causes the socket to be destroyed, we are in trouble - if (remainder) - SignalReadEvent(this); // TODO: signal this?? -} - -bool AsyncHttpsProxySocket::ShouldIssueConnect() const { - // TODO: Think about whether a more sophisticated test - // than dest port == 80 is needed. - return force_connect_ || (dest_.port() != 80); -} - -void AsyncHttpsProxySocket::SendRequest() { - rtc::StringBuilder ss; - ss << "CONNECT " << dest_.ToString() << " HTTP/1.0\r\n"; - ss << "User-Agent: " << agent_ << "\r\n"; - ss << "Host: " << dest_.HostAsURIString() << "\r\n"; - ss << "Content-Length: 0\r\n"; - ss << "Proxy-Connection: Keep-Alive\r\n"; - ss << headers_; - ss << "\r\n"; - std::string str = ss.str(); - DirectSend(str.c_str(), str.size()); - state_ = PS_LEADER; - expect_close_ = true; - content_length_ = 0; - headers_.clear(); - - RTC_LOG(LS_VERBOSE) << "AsyncHttpsProxySocket >> " << str; -} - -void AsyncHttpsProxySocket::ProcessLine(char* data, size_t len) { - RTC_LOG(LS_VERBOSE) << "AsyncHttpsProxySocket << " << data; - - if (len == 0) { - if (state_ == PS_TUNNEL_HEADERS) { - state_ = PS_TUNNEL; - } else if (state_ == PS_ERROR_HEADERS) { - Error(defer_error_); - return; - } else if (state_ == PS_SKIP_HEADERS) { - if (content_length_) { - state_ = PS_SKIP_BODY; - } else { - EndResponse(); - return; - } - } else { - if (!unknown_mechanisms_.empty()) { - RTC_LOG(LS_ERROR) << "Unsupported authentication methods: " - << unknown_mechanisms_; - } - // Unexpected end of headers - Error(0); - return; - } - } else if (state_ == PS_LEADER) { - unsigned int code; - if (sscanf(data, "HTTP/%*u.%*u %u", &code) != 1) { - Error(0); - return; - } - switch (code) { - case 200: - // connection good! - state_ = PS_TUNNEL_HEADERS; - return; -#if defined(HTTP_STATUS_PROXY_AUTH_REQ) && (HTTP_STATUS_PROXY_AUTH_REQ != 407) -#error Wrong code for HTTP_STATUS_PROXY_AUTH_REQ -#endif - case 407: // HTTP_STATUS_PROXY_AUTH_REQ - state_ = PS_AUTHENTICATE; - return; - default: - defer_error_ = 0; - state_ = PS_ERROR_HEADERS; - return; - } - } else if ((state_ == PS_AUTHENTICATE) && - absl::StartsWithIgnoreCase(data, "Proxy-Authenticate:")) { - std::string response, auth_method; - switch (HttpAuthenticate(absl::string_view(data + 19, len - 19), proxy_, - "CONNECT", "/", user_, pass_, context_, response, - auth_method)) { - case HAR_IGNORE: - RTC_LOG(LS_VERBOSE) << "Ignoring Proxy-Authenticate: " << auth_method; - if (!unknown_mechanisms_.empty()) - unknown_mechanisms_.append(", "); - unknown_mechanisms_.append(auth_method); - break; - case HAR_RESPONSE: - headers_ = "Proxy-Authorization: "; - headers_.append(response); - headers_.append("\r\n"); - state_ = PS_SKIP_HEADERS; - unknown_mechanisms_.clear(); - break; - case HAR_CREDENTIALS: - defer_error_ = SOCKET_EACCES; - state_ = PS_ERROR_HEADERS; - unknown_mechanisms_.clear(); - break; - case HAR_ERROR: - defer_error_ = 0; - state_ = PS_ERROR_HEADERS; - unknown_mechanisms_.clear(); - break; - } - } else if (absl::StartsWithIgnoreCase(data, "Content-Length:")) { - content_length_ = strtoul(data + 15, 0, 0); - } else if (absl::StartsWithIgnoreCase(data, "Proxy-Connection: Keep-Alive")) { - expect_close_ = false; - /* - } else if (absl::StartsWithIgnoreCase(data, "Connection: close") { - expect_close_ = true; - */ - } -} - -void AsyncHttpsProxySocket::EndResponse() { - if (!expect_close_) { - SendRequest(); - return; - } - - // No point in waiting for the server to close... let's close now - // TODO: Refactor out PS_WAIT_CLOSE - state_ = PS_WAIT_CLOSE; - BufferedReadAdapter::Close(); - OnCloseEvent(this, 0); -} - -void AsyncHttpsProxySocket::Error(int error) { - BufferInput(false); - Close(); - SetError(error); - SignalCloseEvent(this, error); -} - -/////////////////////////////////////////////////////////////////////////////// - -AsyncSocksProxySocket::AsyncSocksProxySocket(Socket* socket, - const SocketAddress& proxy, - absl::string_view username, - const CryptString& password) - : BufferedReadAdapter(socket, 1024), - state_(SS_ERROR), - proxy_(proxy), - user_(username), - pass_(password) {} - -AsyncSocksProxySocket::~AsyncSocksProxySocket() = default; - -int AsyncSocksProxySocket::Connect(const SocketAddress& addr) { - int ret; - dest_ = addr; - state_ = SS_INIT; - BufferInput(true); - ret = BufferedReadAdapter::Connect(proxy_); - // TODO: Set state_ appropriately if Connect fails. - return ret; -} - -SocketAddress AsyncSocksProxySocket::GetRemoteAddress() const { - return dest_; -} - -int AsyncSocksProxySocket::Close() { - state_ = SS_ERROR; - dest_.Clear(); - return BufferedReadAdapter::Close(); -} - -Socket::ConnState AsyncSocksProxySocket::GetState() const { - if (state_ < SS_TUNNEL) { - return CS_CONNECTING; - } else if (state_ == SS_TUNNEL) { - return CS_CONNECTED; - } else { - return CS_CLOSED; - } -} - -void AsyncSocksProxySocket::OnConnectEvent(Socket* socket) { - SendHello(); -} - -void AsyncSocksProxySocket::ProcessInput(char* data, size_t* len) { - RTC_DCHECK(state_ < SS_TUNNEL); - - ByteBufferReader response(data, *len); - - if (state_ == SS_HELLO) { - uint8_t ver, method; - if (!response.ReadUInt8(&ver) || !response.ReadUInt8(&method)) - return; - - if (ver != 5) { - Error(0); - return; - } - - if (method == 0) { - SendConnect(); - } else if (method == 2) { - SendAuth(); - } else { - Error(0); - return; - } - } else if (state_ == SS_AUTH) { - uint8_t ver, status; - if (!response.ReadUInt8(&ver) || !response.ReadUInt8(&status)) - return; - - if ((ver != 1) || (status != 0)) { - Error(SOCKET_EACCES); - return; - } - - SendConnect(); - } else if (state_ == SS_CONNECT) { - uint8_t ver, rep, rsv, atyp; - if (!response.ReadUInt8(&ver) || !response.ReadUInt8(&rep) || - !response.ReadUInt8(&rsv) || !response.ReadUInt8(&atyp)) - return; - - if ((ver != 5) || (rep != 0)) { - Error(0); - return; - } - - uint16_t port; - if (atyp == 1) { - uint32_t addr; - if (!response.ReadUInt32(&addr) || !response.ReadUInt16(&port)) - return; - RTC_LOG(LS_VERBOSE) << "Bound on " << addr << ":" << port; - } else if (atyp == 3) { - uint8_t length; - std::string addr; - if (!response.ReadUInt8(&length) || !response.ReadString(&addr, length) || - !response.ReadUInt16(&port)) - return; - RTC_LOG(LS_VERBOSE) << "Bound on " << addr << ":" << port; - } else if (atyp == 4) { - std::string addr; - if (!response.ReadString(&addr, 16) || !response.ReadUInt16(&port)) - return; - RTC_LOG(LS_VERBOSE) << "Bound on :" << port; - } else { - Error(0); - return; - } - - state_ = SS_TUNNEL; - } - - // Consume parsed data - *len = response.Length(); - memmove(data, response.Data(), *len); - - if (state_ != SS_TUNNEL) - return; - - bool remainder = (*len > 0); - BufferInput(false); - SignalConnectEvent(this); - - // FIX: if SignalConnect causes the socket to be destroyed, we are in trouble - if (remainder) - SignalReadEvent(this); // TODO: signal this?? -} - -void AsyncSocksProxySocket::SendHello() { - ByteBufferWriter request; - request.WriteUInt8(5); // Socks Version - if (user_.empty()) { - request.WriteUInt8(1); // Authentication Mechanisms - request.WriteUInt8(0); // No authentication - } else { - request.WriteUInt8(2); // Authentication Mechanisms - request.WriteUInt8(0); // No authentication - request.WriteUInt8(2); // Username/Password - } - DirectSend(request.Data(), request.Length()); - state_ = SS_HELLO; -} - -void AsyncSocksProxySocket::SendAuth() { - ByteBufferWriterT> request; - request.WriteUInt8(1); // Negotiation Version - request.WriteUInt8(static_cast(user_.size())); - request.WriteString(user_); // Username - request.WriteUInt8(static_cast(pass_.GetLength())); - size_t len = pass_.GetLength() + 1; - char* sensitive = new char[len]; - pass_.CopyTo(sensitive, true); - request.WriteBytes(sensitive, pass_.GetLength()); // Password - ExplicitZeroMemory(sensitive, len); - delete[] sensitive; - DirectSend(request.Data(), request.Length()); - state_ = SS_AUTH; -} - -void AsyncSocksProxySocket::SendConnect() { - ByteBufferWriter request; - request.WriteUInt8(5); // Socks Version - request.WriteUInt8(1); // CONNECT - request.WriteUInt8(0); // Reserved - if (dest_.IsUnresolvedIP()) { - std::string hostname = dest_.hostname(); - request.WriteUInt8(3); // DOMAINNAME - request.WriteUInt8(static_cast(hostname.size())); - request.WriteString(hostname); // Destination Hostname - } else { - request.WriteUInt8(1); // IPV4 - request.WriteUInt32(dest_.ip()); // Destination IP - } - request.WriteUInt16(dest_.port()); // Destination Port - DirectSend(request.Data(), request.Length()); - state_ = SS_CONNECT; -} - -void AsyncSocksProxySocket::Error(int error) { - state_ = SS_ERROR; - BufferInput(false); - Close(); - SetError(SOCKET_EACCES); - SignalCloseEvent(this, error); -} - -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/socket_adapters.h b/rtc_base/socket_adapters.h index e78ee18a27..2e0731e6b4 100644 --- a/rtc_base/socket_adapters.h +++ b/rtc_base/socket_adapters.h @@ -11,18 +11,15 @@ #ifndef RTC_BASE_SOCKET_ADAPTERS_H_ #define RTC_BASE_SOCKET_ADAPTERS_H_ -#include +#include +#include -#include "absl/strings/string_view.h" #include "api/array_view.h" #include "rtc_base/async_socket.h" -#include "rtc_base/crypt_string.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" -namespace rtc { - -struct HttpAuthContext; -class ByteBufferReader; -class ByteBufferWriter; +namespace webrtc { /////////////////////////////////////////////////////////////////////////////// @@ -77,102 +74,15 @@ class AsyncSSLSocket : public BufferedReadAdapter { void ProcessInput(char* data, size_t* len) override; }; -/////////////////////////////////////////////////////////////////////////////// - -// Implements a socket adapter that speaks the HTTP/S proxy protocol. -class AsyncHttpsProxySocket : public BufferedReadAdapter { - public: - AsyncHttpsProxySocket(Socket* socket, - absl::string_view user_agent, - const SocketAddress& proxy, - absl::string_view username, - const CryptString& password); - ~AsyncHttpsProxySocket() override; - - AsyncHttpsProxySocket(const AsyncHttpsProxySocket&) = delete; - AsyncHttpsProxySocket& operator=(const AsyncHttpsProxySocket&) = delete; - - // If connect is forced, the adapter will always issue an HTTP CONNECT to the - // target address. Otherwise, it will connect only if the destination port - // is not port 80. - void SetForceConnect(bool force) { force_connect_ = force; } - - int Connect(const SocketAddress& addr) override; - SocketAddress GetRemoteAddress() const override; - int Close() override; - ConnState GetState() const override; - - protected: - void OnConnectEvent(Socket* socket) override; - void OnCloseEvent(Socket* socket, int err) override; - void ProcessInput(char* data, size_t* len) override; - - bool ShouldIssueConnect() const; - void SendRequest(); - void ProcessLine(char* data, size_t len); - void EndResponse(); - void Error(int error); - - private: - SocketAddress proxy_, dest_; - std::string agent_, user_, headers_; - CryptString pass_; - bool force_connect_; - size_t content_length_; - int defer_error_; - bool expect_close_; - enum ProxyState { - PS_INIT, - PS_LEADER, - PS_AUTHENTICATE, - PS_SKIP_HEADERS, - PS_ERROR_HEADERS, - PS_TUNNEL_HEADERS, - PS_SKIP_BODY, - PS_TUNNEL, - PS_WAIT_CLOSE, - PS_ERROR - } state_; - HttpAuthContext* context_; - std::string unknown_mechanisms_; -}; - -/////////////////////////////////////////////////////////////////////////////// - -// Implements a socket adapter that speaks the SOCKS proxy protocol. -class AsyncSocksProxySocket : public BufferedReadAdapter { - public: - AsyncSocksProxySocket(Socket* socket, - const SocketAddress& proxy, - absl::string_view username, - const CryptString& password); - ~AsyncSocksProxySocket() override; - - AsyncSocksProxySocket(const AsyncSocksProxySocket&) = delete; - AsyncSocksProxySocket& operator=(const AsyncSocksProxySocket&) = delete; - - int Connect(const SocketAddress& addr) override; - SocketAddress GetRemoteAddress() const override; - int Close() override; - ConnState GetState() const override; - - protected: - void OnConnectEvent(Socket* socket) override; - void ProcessInput(char* data, size_t* len) override; - - void SendHello(); - void SendConnect(); - void SendAuth(); - void Error(int error); - - private: - enum State { SS_INIT, SS_HELLO, SS_AUTH, SS_CONNECT, SS_TUNNEL, SS_ERROR }; - State state_; - SocketAddress proxy_, dest_; - std::string user_; - CryptString pass_; -}; +} // namespace webrtc +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::AsyncSSLSocket; +using ::webrtc::BufferedReadAdapter; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SOCKET_ADAPTERS_H_ diff --git a/rtc_base/socket_address.cc b/rtc_base/socket_address.cc index 8601fc9040..1e7d9418b8 100644 --- a/rtc_base/socket_address.cc +++ b/rtc_base/socket_address.cc @@ -10,35 +10,28 @@ #include "rtc_base/socket_address.h" -#include "absl/strings/string_view.h" -#include "rtc_base/numerics/safe_conversions.h" +#include +#include +#include +#include +#include #if defined(WEBRTC_POSIX) -#include -#include -#include #if defined(OPENBSD) #include #endif #if !defined(__native_client__) -#include #endif -#include -#include -#include #endif +#include "absl/strings/string_view.h" #include "rtc_base/byte_order.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" +#include "rtc_base/ip_address.h" #include "rtc_base/net_helpers.h" +#include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/strings/string_builder.h" -#if defined(WEBRTC_WIN) -#include "rtc_base/win32.h" -#endif - -namespace rtc { +namespace webrtc { SocketAddress::SocketAddress() { Clear(); @@ -72,11 +65,11 @@ void SocketAddress::Clear() { } bool SocketAddress::IsNil() const { - return hostname_.empty() && IPIsUnspec(ip_) && 0 == port_; + return hostname_.empty() && webrtc::IPIsUnspec(ip_) && 0 == port_; } bool SocketAddress::IsComplete() const { - return (!IPIsAny(ip_)) && (0 != port_); + return (!webrtc::IPIsAny(ip_)) && (0 != port_); } SocketAddress& SocketAddress::operator=(const SocketAddress& addr) { @@ -104,7 +97,7 @@ void SocketAddress::SetIP(const IPAddress& ip) { void SocketAddress::SetIP(absl::string_view hostname) { hostname_ = std::string(hostname); - literal_ = IPFromString(hostname, &ip_); + literal_ = webrtc::IPFromString(hostname, &ip_); if (!literal_) { ip_ = IPAddress(); } @@ -122,7 +115,7 @@ void SocketAddress::SetResolvedIP(const IPAddress& ip) { } void SocketAddress::SetPort(int port) { - port_ = rtc::dchecked_cast(port); + port_ = webrtc::dchecked_cast(port); } uint32_t SocketAddress::ip() const { @@ -167,14 +160,14 @@ std::string SocketAddress::PortAsString() const { std::string SocketAddress::ToString() const { char buf[1024]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); sb << HostAsURIString() << ":" << port(); return sb.str(); } std::string SocketAddress::ToSensitiveString() const { char buf[1024]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); sb << HostAsSensitiveURIString() << ":" << port(); return sb.str(); } @@ -184,7 +177,7 @@ std::string SocketAddress::ToSensitiveNameAndAddressString() const { return ToSensitiveString(); } char buf[1024]; - rtc::SimpleStringBuilder sb(buf); + SimpleStringBuilder sb(buf); sb << HostAsSensitiveURIString() << ":" << port(); sb << " ("; if (ip_.family() == AF_INET6) { @@ -221,20 +214,20 @@ bool SocketAddress::FromString(absl::string_view str) { } bool SocketAddress::IsAnyIP() const { - return IPIsAny(ip_); + return webrtc::IPIsAny(ip_); } bool SocketAddress::IsLoopbackIP() const { - return IPIsLoopback(ip_) || - (IPIsAny(ip_) && 0 == strcmp(hostname_.c_str(), "localhost")); + return webrtc::IPIsLoopback(ip_) || + (webrtc::IPIsAny(ip_) && 0 == strcmp(hostname_.c_str(), "localhost")); } bool SocketAddress::IsPrivateIP() const { - return IPIsPrivate(ip_); + return webrtc::IPIsPrivate(ip_); } bool SocketAddress::IsUnresolvedIP() const { - return IPIsUnspec(ip_) && !literal_ && !hostname_.empty(); + return webrtc::IPIsUnspec(ip_) && !literal_ && !hostname_.empty(); } bool SocketAddress::operator==(const SocketAddress& addr) const { @@ -247,7 +240,8 @@ bool SocketAddress::operator<(const SocketAddress& addr) const { // We only check hostnames if both IPs are ANY or unspecified. This matches // EqualIPs(). - if ((IPIsAny(ip_) || IPIsUnspec(ip_)) && hostname_ != addr.hostname_) + if ((webrtc::IPIsAny(ip_) || webrtc::IPIsUnspec(ip_)) && + hostname_ != addr.hostname_) return hostname_ < addr.hostname_; return port_ < addr.port_; @@ -255,7 +249,8 @@ bool SocketAddress::operator<(const SocketAddress& addr) const { bool SocketAddress::EqualIPs(const SocketAddress& addr) const { return (ip_ == addr.ip_) && - ((!IPIsAny(ip_) && !IPIsUnspec(ip_)) || (hostname_ == addr.hostname_)); + ((!webrtc::IPIsAny(ip_) && !webrtc::IPIsUnspec(ip_)) || + (hostname_ == addr.hostname_)); } bool SocketAddress::EqualPorts(const SocketAddress& addr) const { @@ -264,7 +259,7 @@ bool SocketAddress::EqualPorts(const SocketAddress& addr) const { size_t SocketAddress::Hash() const { size_t h = 0; - h ^= HashIP(ip_); + h ^= webrtc::HashIP(ip_); h ^= port_ | (port_ << 16); return h; } @@ -276,8 +271,8 @@ void SocketAddress::ToSockAddr(sockaddr_in* saddr) const { return; } saddr->sin_family = AF_INET; - saddr->sin_port = HostToNetwork16(port_); - if (IPIsAny(ip_)) { + saddr->sin_port = webrtc::HostToNetwork16(port_); + if (webrtc::IPIsAny(ip_)) { saddr->sin_addr.s_addr = INADDR_ANY; } else { saddr->sin_addr = ip_.ipv4_address(); @@ -287,8 +282,8 @@ void SocketAddress::ToSockAddr(sockaddr_in* saddr) const { bool SocketAddress::FromSockAddr(const sockaddr_in& saddr) { if (saddr.sin_family != AF_INET) return false; - SetIP(NetworkToHost32(saddr.sin_addr.s_addr)); - SetPort(NetworkToHost16(saddr.sin_port)); + SetIP(webrtc::NetworkToHost32(saddr.sin_addr.s_addr)); + SetPort(webrtc::NetworkToHost16(saddr.sin_port)); literal_ = false; return true; } @@ -302,13 +297,13 @@ static size_t ToSockAddrStorageHelper(sockaddr_storage* addr, if (addr->ss_family == AF_INET6) { sockaddr_in6* saddr = reinterpret_cast(addr); saddr->sin6_addr = ip.ipv6_address(); - saddr->sin6_port = HostToNetwork16(port); + saddr->sin6_port = webrtc::HostToNetwork16(port); saddr->sin6_scope_id = scope_id; return sizeof(sockaddr_in6); } else if (addr->ss_family == AF_INET) { sockaddr_in* saddr = reinterpret_cast(addr); saddr->sin_addr = ip.ipv4_address(); - saddr->sin_port = HostToNetwork16(port); + saddr->sin_port = webrtc::HostToNetwork16(port); return sizeof(sockaddr_in); } return 0; @@ -330,12 +325,12 @@ bool SocketAddressFromSockAddrStorage(const sockaddr_storage& addr, if (addr.ss_family == AF_INET) { const sockaddr_in* saddr = reinterpret_cast(&addr); *out = SocketAddress(IPAddress(saddr->sin_addr), - NetworkToHost16(saddr->sin_port)); + webrtc::NetworkToHost16(saddr->sin_port)); return true; } else if (addr.ss_family == AF_INET6) { const sockaddr_in6* saddr = reinterpret_cast(&addr); *out = SocketAddress(IPAddress(saddr->sin6_addr), - NetworkToHost16(saddr->sin6_port)); + webrtc::NetworkToHost16(saddr->sin6_port)); out->SetScopeID(saddr->sin6_scope_id); return true; } @@ -351,4 +346,4 @@ SocketAddress EmptySocketAddressWithFamily(int family) { return SocketAddress(); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/socket_address.h b/rtc_base/socket_address.h index b58a6db3b7..99fd6a6e81 100644 --- a/rtc_base/socket_address.h +++ b/rtc_base/socket_address.h @@ -14,9 +14,6 @@ #include #include "absl/strings/string_view.h" -#ifdef WEBRTC_UNIT_TEST -#include // no-presubmit-check TODO(webrtc:8982) -#endif // WEBRTC_UNIT_TEST #include "rtc_base/ip_address.h" #include "rtc_base/system/rtc_export.h" @@ -25,7 +22,7 @@ struct sockaddr_in; struct sockaddr_storage; -namespace rtc { +namespace webrtc { // Records an IP address and port. class RTC_EXPORT SocketAddress { @@ -133,13 +130,6 @@ class RTC_EXPORT SocketAddress { // Parses hostname:port and [hostname]:port. bool FromString(absl::string_view str); -#ifdef WEBRTC_UNIT_TEST - inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) - std::ostream& os) { // no-presubmit-check TODO(webrtc:8982) - return os << HostAsURIString() << ":" << port(); - } -#endif // WEBRTC_UNIT_TEST - // Determines whether this represents a missing / any IP address. // That is, 0.0.0.0 or ::. // Hostname and/or port may be set. @@ -204,6 +194,16 @@ RTC_EXPORT bool SocketAddressFromSockAddrStorage(const sockaddr_storage& saddr, SocketAddress* out); SocketAddress EmptySocketAddressWithFamily(int family); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::EmptySocketAddressWithFamily; +using ::webrtc::SocketAddress; +using ::webrtc::SocketAddressFromSockAddrStorage; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SOCKET_ADDRESS_H_ diff --git a/rtc_base/socket_address_pair.cc b/rtc_base/socket_address_pair.cc index 54f70ff17a..b97c086f02 100644 --- a/rtc_base/socket_address_pair.cc +++ b/rtc_base/socket_address_pair.cc @@ -10,7 +10,7 @@ #include "rtc_base/socket_address_pair.h" -namespace rtc { +namespace webrtc { SocketAddressPair::SocketAddressPair(const SocketAddress& src, const SocketAddress& dest) @@ -36,4 +36,4 @@ size_t SocketAddressPair::Hash() const { return src_.Hash() ^ dest_.Hash(); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/socket_address_pair.h b/rtc_base/socket_address_pair.h index f315e6460a..1da190c8c0 100644 --- a/rtc_base/socket_address_pair.h +++ b/rtc_base/socket_address_pair.h @@ -15,7 +15,7 @@ #include "rtc_base/socket_address.h" -namespace rtc { +namespace webrtc { // Records a pair (source,destination) of socket addresses. The two addresses // identify a connection between two machines. (For UDP, this "connection" is @@ -38,6 +38,14 @@ class SocketAddressPair { SocketAddress dest_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::SocketAddressPair; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SOCKET_ADDRESS_PAIR_H_ diff --git a/rtc_base/socket_address_unittest.cc b/rtc_base/socket_address_unittest.cc index f10e43f148..e81fb6daa0 100644 --- a/rtc_base/socket_address_unittest.cc +++ b/rtc_base/socket_address_unittest.cc @@ -8,17 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ -#if defined(WEBRTC_POSIX) -#include // for sockaddr_in -#endif +#include "rtc_base/socket_address.h" #include +#include + #include "rtc_base/ip_address.h" -#include "rtc_base/socket_address.h" +#include "rtc_base/net_helpers.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { const in6_addr kTestV6Addr = { {{0x20, 0x01, 0x0d, 0xb8, 0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x70, 0x80, @@ -363,4 +363,4 @@ TEST(SocketAddressTest, TestToSensitiveNameAndAddressString) { hostAndIpv6.ToSensitiveNameAndAddressString()); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/socket_factory.h b/rtc_base/socket_factory.h index 882b44faf2..2aa8acc7df 100644 --- a/rtc_base/socket_factory.h +++ b/rtc_base/socket_factory.h @@ -13,7 +13,7 @@ #include "rtc_base/socket.h" -namespace rtc { +namespace webrtc { class SocketFactory { public: @@ -23,6 +23,14 @@ class SocketFactory { virtual Socket* CreateSocket(int family, int type) = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::SocketFactory; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SOCKET_FACTORY_H_ diff --git a/rtc_base/socket_server.h b/rtc_base/socket_server.h index bf1326dad9..a655271cdf 100644 --- a/rtc_base/socket_server.h +++ b/rtc_base/socket_server.h @@ -17,13 +17,15 @@ #include "rtc_base/event.h" #include "rtc_base/socket_factory.h" -namespace rtc { +namespace webrtc { +class NetworkBinderInterface; class Thread; +class ThreadManager; + // Needs to be forward declared because there's a circular dependency between // NetworkMonitor and Thread. // TODO(deadbeef): Fix this. -class NetworkBinderInterface; // Provides the ability to wait for activity on a set of sockets. The Thread // class provides a nice wrapper on a socket server. @@ -32,21 +34,21 @@ class NetworkBinderInterface; // notified of asynchronous I/O from this server's Wait method. class SocketServer : public SocketFactory { public: - static constexpr webrtc::TimeDelta kForever = rtc::Event::kForever; + static constexpr TimeDelta kForever = Event::kForever; static std::unique_ptr CreateDefault(); // When the socket server is installed into a Thread, this function is called // to allow the socket server to use the thread's message queue for any // messaging that it might need to perform. It is also called with a null // argument before the thread is destroyed. - virtual void SetMessageQueue(Thread* queue) {} + virtual void SetMessageQueue(Thread* /* queue */) {} // Sleeps until: // 1) `max_wait_duration` has elapsed (unless `max_wait_duration` == // `kForever`) // 2) WakeUp() is called // While sleeping, I/O is performed if process_io is true. - virtual bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) = 0; + virtual bool Wait(TimeDelta max_wait_duration, bool process_io) = 0; // Causes the current wait (if one is in progress) to wake up. virtual void WakeUp() = 0; @@ -62,6 +64,14 @@ class SocketServer : public SocketFactory { NetworkBinderInterface* network_binder_ = nullptr; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::SocketServer; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SOCKET_SERVER_H_ diff --git a/rtc_base/socket_stream.cc b/rtc_base/socket_stream.cc deleted file mode 100644 index 5c993ea233..0000000000 --- a/rtc_base/socket_stream.cc +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright 2010 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/socket_stream.h" - -#include "rtc_base/checks.h" -#include "rtc_base/socket.h" - -namespace rtc { - -SocketStream::SocketStream(Socket* socket) : socket_(nullptr) { - Attach(socket); -} - -SocketStream::~SocketStream() { - delete socket_; -} - -void SocketStream::Attach(Socket* socket) { - if (socket_) - delete socket_; - socket_ = socket; - if (socket_) { - socket_->SignalConnectEvent.connect(this, &SocketStream::OnConnectEvent); - socket_->SignalReadEvent.connect(this, &SocketStream::OnReadEvent); - socket_->SignalWriteEvent.connect(this, &SocketStream::OnWriteEvent); - socket_->SignalCloseEvent.connect(this, &SocketStream::OnCloseEvent); - } -} - -Socket* SocketStream::Detach() { - Socket* socket = socket_; - if (socket_) { - socket_->SignalConnectEvent.disconnect(this); - socket_->SignalReadEvent.disconnect(this); - socket_->SignalWriteEvent.disconnect(this); - socket_->SignalCloseEvent.disconnect(this); - socket_ = nullptr; - } - return socket; -} - -StreamState SocketStream::GetState() const { - RTC_DCHECK(socket_ != nullptr); - switch (socket_->GetState()) { - case Socket::CS_CONNECTED: - return SS_OPEN; - case Socket::CS_CONNECTING: - return SS_OPENING; - case Socket::CS_CLOSED: - default: - return SS_CLOSED; - } -} - -StreamResult SocketStream::Read(rtc::ArrayView buffer, - size_t& read, - int& error) { - RTC_DCHECK(socket_ != nullptr); - int result = socket_->Recv(buffer.data(), buffer.size(), nullptr); - if (result < 0) { - if (socket_->IsBlocking()) - return SR_BLOCK; - error = socket_->GetError(); - return SR_ERROR; - } - if ((result > 0) || (buffer.size() == 0)) { - read = result; - return SR_SUCCESS; - } - return SR_EOS; -} - -StreamResult SocketStream::Write(rtc::ArrayView data, - size_t& written, - int& error) { - RTC_DCHECK(socket_ != nullptr); - int result = socket_->Send(data.data(), data.size()); - if (result < 0) { - if (socket_->IsBlocking()) - return SR_BLOCK; - error = socket_->GetError(); - return SR_ERROR; - } - written = result; - return SR_SUCCESS; -} - -void SocketStream::Close() { - RTC_DCHECK(socket_ != nullptr); - socket_->Close(); -} - -void SocketStream::OnConnectEvent(Socket* socket) { - RTC_DCHECK(socket == socket_); - SignalEvent(this, SE_OPEN | SE_READ | SE_WRITE, 0); -} - -void SocketStream::OnReadEvent(Socket* socket) { - RTC_DCHECK(socket == socket_); - SignalEvent(this, SE_READ, 0); -} - -void SocketStream::OnWriteEvent(Socket* socket) { - RTC_DCHECK(socket == socket_); - SignalEvent(this, SE_WRITE, 0); -} - -void SocketStream::OnCloseEvent(Socket* socket, int err) { - RTC_DCHECK(socket == socket_); - SignalEvent(this, SE_CLOSE, err); -} - -} // namespace rtc diff --git a/rtc_base/socket_stream.h b/rtc_base/socket_stream.h deleted file mode 100644 index 492cc42e96..0000000000 --- a/rtc_base/socket_stream.h +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2005 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_SOCKET_STREAM_H_ -#define RTC_BASE_SOCKET_STREAM_H_ - -#include - -#include "rtc_base/socket.h" -#include "rtc_base/stream.h" -#include "rtc_base/third_party/sigslot/sigslot.h" - -namespace rtc { - -/////////////////////////////////////////////////////////////////////////////// - -class SocketStream : public StreamInterface, public sigslot::has_slots<> { - public: - explicit SocketStream(Socket* socket); - ~SocketStream() override; - - SocketStream(const SocketStream&) = delete; - SocketStream& operator=(const SocketStream&) = delete; - - void Attach(Socket* socket); - Socket* Detach(); - - Socket* GetSocket() { return socket_; } - - StreamState GetState() const override; - - StreamResult Read(rtc::ArrayView buffer, - size_t& read, - int& error) override; - - StreamResult Write(rtc::ArrayView data, - size_t& written, - int& error) override; - - void Close() override; - - private: - void OnConnectEvent(Socket* socket); - void OnReadEvent(Socket* socket); - void OnWriteEvent(Socket* socket); - void OnCloseEvent(Socket* socket, int err); - - Socket* socket_; -}; - -/////////////////////////////////////////////////////////////////////////////// - -} // namespace rtc - -#endif // RTC_BASE_SOCKET_STREAM_H_ diff --git a/rtc_base/socket_unittest.cc b/rtc_base/socket_unittest.cc index 0a41a776ac..c7fd137505 100644 --- a/rtc_base/socket_unittest.cc +++ b/rtc_base/socket_unittest.cc @@ -17,14 +17,18 @@ #include #include #include +#include +#include #include "absl/memory/memory.h" #include "absl/strings/string_view.h" +#include "api/test/rtc_error_matchers.h" +#include "api/transport/ecn_marking.h" #include "rtc_base/arraysize.h" -#include "rtc_base/async_packet_socket.h" #include "rtc_base/async_udp_socket.h" #include "rtc_base/buffer.h" #include "rtc_base/gunit.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" #include "rtc_base/net_helpers.h" #include "rtc_base/net_test_helpers.h" @@ -36,8 +40,17 @@ #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/wait_until.h" -namespace rtc { +#define MAYBE_SKIP_IPV6 \ + if (!::webrtc::HasIPv6Enabled()) { \ + RTC_LOG(LS_INFO) << "No IPv6... skipping"; \ + return; \ + } + +namespace webrtc { using webrtc::testing::SSE_CLOSE; using webrtc::testing::SSE_ERROR; @@ -46,12 +59,6 @@ using webrtc::testing::SSE_READ; using webrtc::testing::SSE_WRITE; using webrtc::testing::StreamSink; -#define MAYBE_SKIP_IPV6 \ - if (!HasIPv6Enabled()) { \ - RTC_LOG(LS_INFO) << "No IPv6... skipping"; \ - return; \ - } - // Data size to be used in TcpInternal tests. static const size_t kTcpInternalDataSize = 1024 * 1024; // bytes @@ -233,11 +240,20 @@ void SocketTest::TestUdpSocketRecvTimestampUseRtcEpochIPv6() { UdpSocketRecvTimestampUseRtcEpoch(kIPv6Loopback); } +void SocketTest::TestSocketSendRecvWithEcnIPV4() { + SocketSendRecvWithEcn(kIPv4Loopback); +} + +void SocketTest::TestSocketSendRecvWithEcnIPV6() { + MAYBE_SKIP_IPV6; + SocketSendRecvWithEcn(kIPv6Loopback); +} + // For unbound sockets, GetLocalAddress / GetRemoteAddress return AF_UNSPEC // values on Windows, but an empty address of the same family on Linux/MacOS X. bool IsUnspecOrEmptyIP(const IPAddress& address) { #if !defined(WEBRTC_WIN) - return IPIsAny(address); + return webrtc::IPIsAny(address); #else return address.family() == AF_UNSPEC; #endif @@ -278,7 +294,10 @@ void SocketTest::ConnectInternal(const IPAddress& loopback) { EXPECT_FALSE(sink.Check(client.get(), SSE_CLOSE)); // Server has pending connection, accept it. - EXPECT_TRUE_WAIT((sink.Check(server.get(), SSE_READ)), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return (sink.Check(server.get(), SSE_READ)); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); std::unique_ptr accepted(server->Accept(&accept_addr)); ASSERT_TRUE(accepted); EXPECT_FALSE(accept_addr.IsNil()); @@ -290,7 +309,9 @@ void SocketTest::ConnectInternal(const IPAddress& loopback) { EXPECT_EQ(client->GetLocalAddress(), accepted->GetRemoteAddress()); // Connected from client perspective, check the addresses are correct. - EXPECT_EQ_WAIT(Socket::CS_CONNECTED, client->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return client->GetState(); }, + ::testing::Eq(Socket::CS_CONNECTED)), + webrtc::IsRtcOk()); EXPECT_TRUE(sink.Check(client.get(), SSE_OPEN)); EXPECT_FALSE(sink.Check(client.get(), SSE_CLOSE)); EXPECT_EQ(client->GetRemoteAddress(), server->GetLocalAddress()); @@ -327,7 +348,10 @@ void SocketTest::ConnectWithDnsLookupInternal(const IPAddress& loopback, EXPECT_FALSE(sink.Check(client.get(), SSE_CLOSE)); // Server has pending connection, accept it. - EXPECT_TRUE_WAIT((sink.Check(server.get(), SSE_READ)), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return (sink.Check(server.get(), SSE_READ)); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); std::unique_ptr accepted(server->Accept(&accept_addr)); ASSERT_TRUE(accepted); EXPECT_FALSE(accept_addr.IsNil()); @@ -339,7 +363,9 @@ void SocketTest::ConnectWithDnsLookupInternal(const IPAddress& loopback, EXPECT_EQ(client->GetLocalAddress(), accepted->GetRemoteAddress()); // Connected from client perspective, check the addresses are correct. - EXPECT_EQ_WAIT(Socket::CS_CONNECTED, client->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return client->GetState(); }, + ::testing::Eq(Socket::CS_CONNECTED)), + webrtc::IsRtcOk()); EXPECT_TRUE(sink.Check(client.get(), SSE_OPEN)); EXPECT_FALSE(sink.Check(client.get(), SSE_CLOSE)); EXPECT_EQ(client->GetRemoteAddress(), server->GetLocalAddress()); @@ -368,7 +394,9 @@ void SocketTest::ConnectFailInternal(const IPAddress& loopback) { EXPECT_EQ(0, client->Connect(bogus_addr)); // Wait for connection to fail (ECONNREFUSED). - EXPECT_EQ_WAIT(Socket::CS_CLOSED, client->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return client->GetState(); }, + ::testing::Eq(Socket::CS_CLOSED)), + webrtc::IsRtcOk()); EXPECT_FALSE(sink.Check(client.get(), SSE_OPEN)); EXPECT_TRUE(sink.Check(client.get(), SSE_ERROR)); EXPECT_TRUE(client->GetRemoteAddress().IsNil()); @@ -402,14 +430,16 @@ void SocketTest::ConnectWithDnsLookupFailInternal(const IPAddress& loopback) { // Wait for connection to fail (EHOSTNOTFOUND). bool dns_lookup_finished = false; - WAIT_(client->GetState() == Socket::CS_CLOSED, kTimeout, dns_lookup_finished); + WAIT_(client->GetState() == Socket::CS_CLOSED, 5000, dns_lookup_finished); if (!dns_lookup_finished) { RTC_LOG(LS_WARNING) << "Skipping test; DNS resolution took longer than 5 " "seconds."; return; } - EXPECT_EQ_WAIT(Socket::CS_CLOSED, client->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return client->GetState(); }, + ::testing::Eq(Socket::CS_CLOSED)), + webrtc::IsRtcOk()); EXPECT_FALSE(sink.Check(client.get(), SSE_OPEN)); EXPECT_TRUE(sink.Check(client.get(), SSE_ERROR)); EXPECT_TRUE(client->GetRemoteAddress().IsNil()); @@ -456,7 +486,10 @@ void SocketTest::ConnectWhileNotClosedInternal(const IPAddress& loopback) { // Accept the original connection. SocketAddress accept_addr; - EXPECT_TRUE_WAIT((sink.Check(server.get(), SSE_READ)), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return (sink.Check(server.get(), SSE_READ)); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); std::unique_ptr accepted(server->Accept(&accept_addr)); ASSERT_TRUE(accepted); EXPECT_FALSE(accept_addr.IsNil()); @@ -465,7 +498,9 @@ void SocketTest::ConnectWhileNotClosedInternal(const IPAddress& loopback) { EXPECT_EQ(Socket::CS_CONNECTED, accepted->GetState()); EXPECT_EQ(server->GetLocalAddress(), accepted->GetLocalAddress()); EXPECT_EQ(client->GetLocalAddress(), accepted->GetRemoteAddress()); - EXPECT_EQ_WAIT(Socket::CS_CONNECTED, client->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return client->GetState(); }, + ::testing::Eq(Socket::CS_CONNECTED)), + webrtc::IsRtcOk()); EXPECT_EQ(client->GetRemoteAddress(), server->GetLocalAddress()); EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress()); @@ -498,11 +533,16 @@ void SocketTest::ServerCloseDuringConnectInternal(const IPAddress& loopback) { EXPECT_EQ(0, client->Connect(server->GetLocalAddress())); // Close down the server while the socket is in the accept queue. - EXPECT_TRUE_WAIT(sink.Check(server.get(), SSE_READ), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return sink.Check(server.get(), SSE_READ); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); server->Close(); // This should fail the connection for the client. Clean up. - EXPECT_EQ_WAIT(Socket::CS_CLOSED, client->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return client->GetState(); }, + ::testing::Eq(Socket::CS_CLOSED)), + webrtc::IsRtcOk()); EXPECT_TRUE(sink.Check(client.get(), SSE_ERROR)); client->Close(); } @@ -527,7 +567,10 @@ void SocketTest::ClientCloseDuringConnectInternal(const IPAddress& loopback) { EXPECT_EQ(0, client->Connect(server->GetLocalAddress())); // Close down the client while the socket is in the accept queue. - EXPECT_TRUE_WAIT(sink.Check(server.get(), SSE_READ), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return sink.Check(server.get(), SSE_READ); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); client->Close(); // The connection should still be able to be accepted. @@ -537,7 +580,9 @@ void SocketTest::ClientCloseDuringConnectInternal(const IPAddress& loopback) { EXPECT_EQ(Socket::CS_CONNECTED, accepted->GetState()); // The accepted socket should then close (possibly with err, timing-related) - EXPECT_EQ_WAIT(Socket::CS_CLOSED, accepted->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return accepted->GetState(); }, + ::testing::Eq(Socket::CS_CLOSED)), + webrtc::IsRtcOk()); EXPECT_TRUE(sink.Check(accepted.get(), SSE_CLOSE) || sink.Check(accepted.get(), SSE_ERROR)); @@ -565,13 +610,18 @@ void SocketTest::ServerCloseInternal(const IPAddress& loopback) { EXPECT_EQ(0, client->Connect(server->GetLocalAddress())); // Accept connection. - EXPECT_TRUE_WAIT((sink.Check(server.get(), SSE_READ)), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return (sink.Check(server.get(), SSE_READ)); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); std::unique_ptr accepted(server->Accept(&accept_addr)); ASSERT_TRUE(accepted); sink.Monitor(accepted.get()); // Both sides are now connected. - EXPECT_EQ_WAIT(Socket::CS_CONNECTED, client->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return client->GetState(); }, + ::testing::Eq(Socket::CS_CONNECTED)), + webrtc::IsRtcOk()); EXPECT_TRUE(sink.Check(client.get(), SSE_OPEN)); EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress()); EXPECT_EQ(accepted->GetRemoteAddress(), client->GetLocalAddress()); @@ -582,7 +632,10 @@ void SocketTest::ServerCloseInternal(const IPAddress& loopback) { EXPECT_EQ(Socket::CS_CLOSED, accepted->GetState()); // Expect that the client is notified, and has not yet closed. - EXPECT_TRUE_WAIT(sink.Check(client.get(), SSE_READ), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return sink.Check(client.get(), SSE_READ); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); EXPECT_FALSE(sink.Check(client.get(), SSE_CLOSE)); EXPECT_EQ(Socket::CS_CONNECTED, client->GetState()); @@ -592,7 +645,9 @@ void SocketTest::ServerCloseInternal(const IPAddress& loopback) { EXPECT_EQ('a', buffer[0]); // Now we should close, but the remote address will remain. - EXPECT_EQ_WAIT(Socket::CS_CLOSED, client->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return client->GetState(); }, + ::testing::Eq(Socket::CS_CLOSED)), + webrtc::IsRtcOk()); EXPECT_TRUE(sink.Check(client.get(), SSE_CLOSE)); EXPECT_FALSE(client->GetRemoteAddress().IsAnyIP()); @@ -640,13 +695,18 @@ void SocketTest::CloseInClosedCallbackInternal(const IPAddress& loopback) { EXPECT_EQ(0, client->Connect(server->GetLocalAddress())); // Accept connection. - EXPECT_TRUE_WAIT((sink.Check(server.get(), SSE_READ)), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return (sink.Check(server.get(), SSE_READ)); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); std::unique_ptr accepted(server->Accept(&accept_addr)); ASSERT_TRUE(accepted); sink.Monitor(accepted.get()); // Both sides are now connected. - EXPECT_EQ_WAIT(Socket::CS_CONNECTED, client->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return client->GetState(); }, + ::testing::Eq(Socket::CS_CONNECTED)), + webrtc::IsRtcOk()); EXPECT_TRUE(sink.Check(client.get(), SSE_OPEN)); EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress()); EXPECT_EQ(accepted->GetRemoteAddress(), client->GetLocalAddress()); @@ -660,7 +720,9 @@ void SocketTest::CloseInClosedCallbackInternal(const IPAddress& loopback) { EXPECT_EQ(Socket::CS_CONNECTED, client->GetState()); // Now we should be closed and invalidated - EXPECT_EQ_WAIT(Socket::CS_CLOSED, client->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return client->GetState(); }, + ::testing::Eq(Socket::CS_CLOSED)), + webrtc::IsRtcOk()); EXPECT_TRUE(sink.Check(client.get(), SSE_CLOSE)); EXPECT_TRUE(Socket::CS_CLOSED == client->GetState()); } @@ -698,7 +760,9 @@ void SocketTest::DeleteInReadCallbackInternal(const IPAddress& loopback) { // event. SocketDeleter deleter(std::move(socket2)); socket1->SignalReadEvent.connect(&deleter, &SocketDeleter::Delete); - EXPECT_TRUE_WAIT(deleter.deleted(), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return deleter.deleted(); }, ::testing::IsTrue()), + webrtc::IsRtcOk()); } void SocketTest::SocketServerWaitInternal(const IPAddress& loopback) { @@ -716,7 +780,10 @@ void SocketTest::SocketServerWaitInternal(const IPAddress& loopback) { EXPECT_EQ(0, server->Listen(5)); EXPECT_EQ(0, client->Connect(server->GetLocalAddress())); - EXPECT_TRUE_WAIT((sink.Check(server.get(), SSE_READ)), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return (sink.Check(server.get(), SSE_READ)); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); std::unique_ptr accepted(server->Accept(&accept_addr)); ASSERT_TRUE(accepted); @@ -725,7 +792,9 @@ void SocketTest::SocketServerWaitInternal(const IPAddress& loopback) { EXPECT_EQ(server->GetLocalAddress(), accepted->GetLocalAddress()); EXPECT_EQ(client->GetLocalAddress(), accepted->GetRemoteAddress()); - EXPECT_EQ_WAIT(Socket::CS_CONNECTED, client->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return client->GetState(); }, + ::testing::Eq(Socket::CS_CONNECTED)), + webrtc::IsRtcOk()); EXPECT_TRUE(sink.Check(client.get(), SSE_OPEN)); EXPECT_FALSE(sink.Check(client.get(), SSE_CLOSE)); EXPECT_EQ(client->GetRemoteAddress(), server->GetLocalAddress()); @@ -745,7 +814,10 @@ void SocketTest::SocketServerWaitInternal(const IPAddress& loopback) { EXPECT_FALSE(sink.Check(accepted.get(), SSE_READ)); // But should signal when process_io is true. - EXPECT_TRUE_WAIT((sink.Check(accepted.get(), SSE_READ)), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return (sink.Check(accepted.get(), SSE_READ)); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); EXPECT_LT(0, accepted->Recv(buf, 1024, nullptr)); } @@ -771,25 +843,30 @@ void SocketTest::TcpInternal(const IPAddress& loopback, EXPECT_EQ(0, receiver->Connect(server->GetLocalAddress())); // Accept connection which will be used for sending. - EXPECT_TRUE_WAIT((sink.Check(server.get(), SSE_READ)), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return (sink.Check(server.get(), SSE_READ)); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); std::unique_ptr sender(server->Accept(&accept_addr)); ASSERT_TRUE(sender); sink.Monitor(sender.get()); // Both sides are now connected. - EXPECT_EQ_WAIT(Socket::CS_CONNECTED, receiver->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return receiver->GetState(); }, + ::testing::Eq(Socket::CS_CONNECTED)), + webrtc::IsRtcOk()); EXPECT_TRUE(sink.Check(receiver.get(), SSE_OPEN)); EXPECT_EQ(receiver->GetRemoteAddress(), sender->GetLocalAddress()); EXPECT_EQ(sender->GetRemoteAddress(), receiver->GetLocalAddress()); // Create test data. - rtc::Buffer send_buffer(0, data_size); - rtc::Buffer recv_buffer(0, data_size); + Buffer send_buffer(0, data_size); + Buffer recv_buffer(0, data_size); for (size_t i = 0; i < data_size; ++i) { char ch = static_cast(i % 256); send_buffer.AppendData(&ch, sizeof(ch)); } - rtc::Buffer recved_data(0, data_size); + Buffer recved_data(0, data_size); // Send and receive a bunch of data. size_t sent_size = 0; @@ -830,7 +907,10 @@ void SocketTest::TcpInternal(const IPAddress& loopback, while (recv_buffer.size() < sent_size) { if (!readable) { // Wait until data is available. - EXPECT_TRUE_WAIT(sink.Check(receiver.get(), SSE_READ), kTimeout); + EXPECT_THAT(webrtc::WaitUntil( + [&] { return sink.Check(receiver.get(), SSE_READ); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); readable = true; recv_called = false; } @@ -859,7 +939,10 @@ void SocketTest::TcpInternal(const IPAddress& loopback, // Once all that we've sent has been received, expect to be able to send // again. if (!writable) { - ASSERT_TRUE_WAIT(sink.Check(sender.get(), SSE_WRITE), kTimeout); + ASSERT_THAT( + webrtc::WaitUntil([&] { return sink.Check(sender.get(), SSE_WRITE); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); writable = true; send_called = false; } @@ -872,7 +955,9 @@ void SocketTest::TcpInternal(const IPAddress& loopback, // Close down. sender->Close(); - EXPECT_EQ_WAIT(Socket::CS_CLOSED, receiver->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return receiver->GetState(); }, + ::testing::Eq(Socket::CS_CLOSED)), + webrtc::IsRtcOk()); EXPECT_TRUE(sink.Check(receiver.get(), SSE_CLOSE)); receiver->Close(); } @@ -897,19 +982,27 @@ void SocketTest::SingleFlowControlCallbackInternal(const IPAddress& loopback) { EXPECT_EQ(0, client->Connect(server->GetLocalAddress())); // Accept connection. - EXPECT_TRUE_WAIT((sink.Check(server.get(), SSE_READ)), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return (sink.Check(server.get(), SSE_READ)); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); std::unique_ptr accepted(server->Accept(&accept_addr)); ASSERT_TRUE(accepted); sink.Monitor(accepted.get()); // Both sides are now connected. - EXPECT_EQ_WAIT(Socket::CS_CONNECTED, client->GetState(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return client->GetState(); }, + ::testing::Eq(Socket::CS_CONNECTED)), + webrtc::IsRtcOk()); EXPECT_TRUE(sink.Check(client.get(), SSE_OPEN)); EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress()); EXPECT_EQ(accepted->GetRemoteAddress(), client->GetLocalAddress()); // Expect a writable callback from the connect. - EXPECT_TRUE_WAIT(sink.Check(accepted.get(), SSE_WRITE), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return sink.Check(accepted.get(), SSE_WRITE); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); // Fill the socket buffer. char buf[1024 * 16] = {0}; @@ -919,7 +1012,10 @@ void SocketTest::SingleFlowControlCallbackInternal(const IPAddress& loopback) { EXPECT_TRUE(accepted->IsBlocking()); // Wait until data is available. - EXPECT_TRUE_WAIT(sink.Check(client.get(), SSE_READ), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return sink.Check(client.get(), SSE_READ); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); // Pull data. for (int i = 0; i < sends; ++i) { @@ -927,14 +1023,17 @@ void SocketTest::SingleFlowControlCallbackInternal(const IPAddress& loopback) { } // Expect at least one additional writable callback. - EXPECT_TRUE_WAIT(sink.Check(accepted.get(), SSE_WRITE), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return sink.Check(accepted.get(), SSE_WRITE); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); // Adding data in response to the writeable callback shouldn't cause infinite // callbacks. int extras = 0; for (int i = 0; i < 100; ++i) { accepted->Send(&buf, arraysize(buf)); - rtc::Thread::Current()->ProcessMessages(1); + Thread::Current()->ProcessMessages(1); if (sink.Check(accepted.get(), SSE_WRITE)) { extras++; } @@ -947,7 +1046,7 @@ void SocketTest::SingleFlowControlCallbackInternal(const IPAddress& loopback) { } void SocketTest::UdpInternal(const IPAddress& loopback) { - SocketAddress empty = EmptySocketAddressWithFamily(loopback.family()); + SocketAddress empty = webrtc::EmptySocketAddressWithFamily(loopback.family()); // Test basic bind and connect behavior. Socket* socket = socket_factory_->CreateSocket(loopback.family(), SOCK_DGRAM); EXPECT_EQ(Socket::CS_CLOSED, socket->GetState()); @@ -993,7 +1092,7 @@ void SocketTest::UdpInternal(const IPAddress& loopback) { } void SocketTest::UdpReadyToSend(const IPAddress& loopback) { - SocketAddress empty = EmptySocketAddressWithFamily(loopback.family()); + SocketAddress empty = webrtc::EmptySocketAddressWithFamily(loopback.family()); // RFC 5737 - The blocks 192.0.2.0/24 (TEST-NET-1) ... are provided for use in // documentation. // RFC 3849 - 2001:DB8::/32 as a documentation-only prefix. @@ -1014,13 +1113,13 @@ void SocketTest::UdpReadyToSend(const IPAddress& loopback) { #if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) send_buffer_size /= 2; #endif - client->SetOption(rtc::Socket::OPT_SNDBUF, send_buffer_size); + client->SetOption(Socket::OPT_SNDBUF, send_buffer_size); int error = 0; uint32_t start_ms = Time(); int sent_packet_num = 0; int expected_error = EWOULDBLOCK; - while (start_ms + kTimeout > Time()) { + while (start_ms + 5000 > Time()) { int ret = client->SendTo(test_packet.get(), test_packet_size, test_addr); ++sent_packet_num; if (ret != test_packet_size) { @@ -1034,7 +1133,9 @@ void SocketTest::UdpReadyToSend(const IPAddress& loopback) { } EXPECT_EQ(expected_error, error); EXPECT_FALSE(client->ready_to_send()); - EXPECT_TRUE_WAIT(client->ready_to_send(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return client->ready_to_send(); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); RTC_LOG(LS_INFO) << "Got SignalReadyToSend"; } @@ -1078,6 +1179,56 @@ void SocketTest::GetSetOptionsInternal(const IPAddress& loopback) { ASSERT_NE(-1, socket->SetOption(Socket::OPT_DSCP, desired_dscp)); ASSERT_NE(-1, socket->GetOption(Socket::OPT_DSCP, ¤t_dscp)); ASSERT_EQ(desired_dscp, current_dscp); + + int current_send_esn, desired_send_esn = 1; + ASSERT_NE(-1, socket->GetOption(Socket::OPT_SEND_ECN, ¤t_send_esn)); + ASSERT_NE(-1, socket->SetOption(Socket::OPT_SEND_ECN, desired_send_esn)); + ASSERT_NE(-1, socket->GetOption(Socket::OPT_SEND_ECN, ¤t_send_esn)); + ASSERT_EQ(current_send_esn, desired_send_esn); + + int current_recv_esn, desired_recv_esn = 1; + ASSERT_NE(-1, socket->GetOption(Socket::OPT_RECV_ECN, ¤t_recv_esn)); + ASSERT_NE(-1, socket->SetOption(Socket::OPT_RECV_ECN, desired_recv_esn)); + ASSERT_NE(-1, socket->GetOption(Socket::OPT_RECV_ECN, ¤t_recv_esn)); + ASSERT_EQ(current_recv_esn, desired_recv_esn); +#endif + + // Prepare on TCP specific options. + socket.reset(socket_factory_->CreateSocket(loopback.family(), SOCK_STREAM)); + socket->Bind(SocketAddress(loopback, 0)); + + // Check that we can set NODELAY on a TCP socket. + ASSERT_NE(-1, socket->SetOption(Socket::OPT_NODELAY, desired_nd)); + ASSERT_NE(-1, socket->GetOption(Socket::OPT_NODELAY, ¤t_nd)); + ASSERT_NE(0, current_nd); + + // Check TCP Keep Alive settings. + int current_kl, desired_kl = 1; + ASSERT_NE(-1, socket->SetOption(Socket::OPT_KEEPALIVE, desired_kl)); + ASSERT_NE(-1, socket->GetOption(Socket::OPT_KEEPALIVE, ¤t_kl)); + ASSERT_NE(0, current_kl); + + int current_kl_cnt, desired_kl_cnt = 3; + ASSERT_NE(-1, socket->SetOption(Socket::OPT_TCP_KEEPCNT, desired_kl_cnt)); + ASSERT_NE(-1, socket->GetOption(Socket::OPT_TCP_KEEPCNT, ¤t_kl_cnt)); + ASSERT_EQ(desired_kl_cnt, current_kl_cnt); + + int current_kl_idle, desired_kl_idle = 2; + ASSERT_NE(-1, socket->SetOption(Socket::OPT_TCP_KEEPIDLE, desired_kl_idle)); + ASSERT_NE(-1, socket->GetOption(Socket::OPT_TCP_KEEPIDLE, ¤t_kl_idle)); + ASSERT_EQ(desired_kl_idle, current_kl_idle); + + int current_kl_intvl, desired_kl_intvl = 2; + ASSERT_NE(-1, socket->SetOption(Socket::OPT_TCP_KEEPINTVL, desired_kl_intvl)); + ASSERT_NE(-1, + socket->GetOption(Socket::OPT_TCP_KEEPINTVL, ¤t_kl_intvl)); + ASSERT_EQ(desired_kl_intvl, current_kl_intvl); + +#if defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID) + int current_ut, desired_ut = 10; + ASSERT_NE(-1, socket->SetOption(Socket::OPT_TCP_USER_TIMEOUT, desired_ut)); + ASSERT_NE(-1, socket->GetOption(Socket::OPT_TCP_USER_TIMEOUT, ¤t_ut)); + ASSERT_EQ(desired_ut, current_ut); #endif } @@ -1092,11 +1243,14 @@ void SocketTest::SocketRecvTimestamp(const IPAddress& loopback) { int64_t send_time_1 = TimeMicros(); socket->SendTo("foo", 3, address); - int64_t recv_timestamp_1; // Wait until data is available. - EXPECT_TRUE_WAIT(sink.Check(socket.get(), SSE_READ), kTimeout); - char buffer[3]; - ASSERT_GT(socket->RecvFrom(buffer, 3, nullptr, &recv_timestamp_1), 0); + EXPECT_THAT( + webrtc::WaitUntil([&] { return sink.Check(socket.get(), SSE_READ); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); + Buffer buffer; + Socket::ReceiveBuffer receive_buffer_1(buffer); + ASSERT_GT(socket->RecvFrom(receive_buffer_1), 0); const int64_t kTimeBetweenPacketsMs = 100; Thread::SleepMs(kTimeBetweenPacketsMs); @@ -1104,19 +1258,23 @@ void SocketTest::SocketRecvTimestamp(const IPAddress& loopback) { int64_t send_time_2 = TimeMicros(); socket->SendTo("bar", 3, address); // Wait until data is available. - EXPECT_TRUE_WAIT(sink.Check(socket.get(), SSE_READ), kTimeout); - int64_t recv_timestamp_2; - ASSERT_GT(socket->RecvFrom(buffer, 3, nullptr, &recv_timestamp_2), 0); + EXPECT_THAT( + webrtc::WaitUntil([&] { return sink.Check(socket.get(), SSE_READ); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); + Socket::ReceiveBuffer receive_buffer_2(buffer); + ASSERT_GT(socket->RecvFrom(receive_buffer_2), 0); int64_t system_time_diff = send_time_2 - send_time_1; - int64_t recv_timestamp_diff = recv_timestamp_2 - recv_timestamp_1; + int64_t recv_timestamp_diff = + receive_buffer_2.arrival_time->us() - receive_buffer_1.arrival_time->us(); // Compare against the system time at the point of sending, because // SleepMs may not sleep for exactly the requested time. EXPECT_NEAR(system_time_diff, recv_timestamp_diff, 10000); } void SocketTest::UdpSocketRecvTimestampUseRtcEpoch(const IPAddress& loopback) { - SocketAddress empty = EmptySocketAddressWithFamily(loopback.family()); + SocketAddress empty = webrtc::EmptySocketAddressWithFamily(loopback.family()); std::unique_ptr socket( socket_factory_->CreateSocket(loopback.family(), SOCK_DGRAM)); ASSERT_EQ(socket->Bind(SocketAddress(loopback, 0)), 0); @@ -1132,13 +1290,62 @@ void SocketTest::UdpSocketRecvTimestampUseRtcEpoch(const IPAddress& loopback) { client2->SendTo("foo", 3, address); std::unique_ptr packet_1 = client1->NextPacket(10000); ASSERT_TRUE(packet_1 != nullptr); - EXPECT_NEAR(packet_1->packet_time_us, rtc::TimeMicros(), 1000'000); + EXPECT_NEAR(packet_1->packet_time->us(), TimeMicros(), 1000'000); Thread::SleepMs(100); client2->SendTo("bar", 3, address); std::unique_ptr packet_2 = client1->NextPacket(10000); ASSERT_TRUE(packet_2 != nullptr); - EXPECT_GT(packet_2->packet_time_us, packet_1->packet_time_us); - EXPECT_NEAR(packet_2->packet_time_us, rtc::TimeMicros(), 1000'000); + EXPECT_GT(packet_2->packet_time->us(), packet_1->packet_time->us()); + EXPECT_NEAR(packet_2->packet_time->us(), TimeMicros(), 1000'000); +} + +void SocketTest::SocketSendRecvWithEcn(const IPAddress& loopback) { + StreamSink sink; + std::unique_ptr socket( + socket_factory_->CreateSocket(loopback.family(), SOCK_DGRAM)); + EXPECT_EQ(0, socket->Bind(SocketAddress(loopback, 0))); + SocketAddress address = socket->GetLocalAddress(); + sink.Monitor(socket.get()); + Buffer buffer; + Socket::ReceiveBuffer receive_buffer(buffer); + + socket->SendTo("foo", 3, address); + EXPECT_THAT( + webrtc::WaitUntil([&] { return sink.Check(socket.get(), SSE_READ); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); + ASSERT_GT(socket->RecvFrom(receive_buffer), 0); + EXPECT_EQ(receive_buffer.ecn, EcnMarking::kNotEct); + + socket->SetOption(Socket::OPT_SEND_ECN, 1); // Ect(1) + socket->SetOption(Socket::OPT_RECV_ECN, 1); + + socket->SendTo("bar", 3, address); + EXPECT_THAT( + webrtc::WaitUntil([&] { return sink.Check(socket.get(), SSE_READ); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); + ASSERT_GT(socket->RecvFrom(receive_buffer), 0); + EXPECT_EQ(receive_buffer.ecn, EcnMarking::kEct1); + + socket->SetOption(Socket::OPT_SEND_ECN, 2); // Ect(0) + socket->SendTo("bar", 3, address); + EXPECT_THAT( + webrtc::WaitUntil([&] { return sink.Check(socket.get(), SSE_READ); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); + ASSERT_GT(socket->RecvFrom(receive_buffer), 0); + EXPECT_EQ(receive_buffer.ecn, EcnMarking::kEct0); + + socket->SetOption(Socket::OPT_SEND_ECN, 3); // Ce + socket->SendTo("bar", 3, address); + EXPECT_THAT( + webrtc::WaitUntil([&] { return sink.Check(socket.get(), SSE_READ); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); + ASSERT_GT(socket->RecvFrom(receive_buffer), 0); + EXPECT_EQ(receive_buffer.ecn, EcnMarking::kCe); } -} // namespace rtc + +} // namespace webrtc diff --git a/rtc_base/socket_unittest.h b/rtc_base/socket_unittest.h index db79be2eb8..48c7ee11d4 100644 --- a/rtc_base/socket_unittest.h +++ b/rtc_base/socket_unittest.h @@ -11,18 +11,21 @@ #ifndef RTC_BASE_SOCKET_UNITTEST_H_ #define RTC_BASE_SOCKET_UNITTEST_H_ +#include + #include "absl/strings/string_view.h" -#include "rtc_base/gunit.h" -#include "rtc_base/thread.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/socket_factory.h" +#include "test/gtest.h" -namespace rtc { +namespace webrtc { -// Generic socket tests, to be used when testing individual socketservers. +// Generic socket tests, to be used when testing individual socket servers. // Derive your specific test class from SocketTest, install your // socketserver, and call the SocketTest test methods. class SocketTest : public ::testing::Test { protected: - explicit SocketTest(rtc::SocketFactory* socket_factory) + explicit SocketTest(SocketFactory* socket_factory) : kIPv4Loopback(INADDR_LOOPBACK), kIPv6Loopback(in6addr_loopback), socket_factory_(socket_factory) {} @@ -64,8 +67,9 @@ class SocketTest : public ::testing::Test { void TestSocketRecvTimestampIPv6(); void TestUdpSocketRecvTimestampUseRtcEpochIPv4(); void TestUdpSocketRecvTimestampUseRtcEpochIPv6(); + void TestSocketSendRecvWithEcnIPV4(); + void TestSocketSendRecvWithEcnIPV6(); - static const int kTimeout = 5000; // ms const IPAddress kIPv4Loopback; const IPAddress kIPv6Loopback; @@ -95,6 +99,7 @@ class SocketTest : public ::testing::Test { void GetSetOptionsInternal(const IPAddress& loopback); void SocketRecvTimestamp(const IPAddress& loopback); void UdpSocketRecvTimestampUseRtcEpoch(const IPAddress& loopback); + void SocketSendRecvWithEcn(const IPAddress& loopback); SocketFactory* socket_factory_; }; @@ -103,6 +108,15 @@ class SocketTest : public ::testing::Test { // values on Windows, but an empty address of the same family on Linux/MacOS X. bool IsUnspecOrEmptyIP(const IPAddress& address); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::IsUnspecOrEmptyIP; +using ::webrtc::SocketTest; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SOCKET_UNITTEST_H_ diff --git a/rtc_base/ssl_adapter.cc b/rtc_base/ssl_adapter.cc index ff936a79fb..098556da62 100644 --- a/rtc_base/ssl_adapter.cc +++ b/rtc_base/ssl_adapter.cc @@ -10,11 +10,14 @@ #include "rtc_base/ssl_adapter.h" +#include + #include "rtc_base/openssl_adapter.h" +#include "rtc_base/socket.h" /////////////////////////////////////////////////////////////////////////////// -namespace rtc { +namespace webrtc { std::unique_ptr SSLAdapterFactory::Create() { return std::make_unique(); @@ -36,4 +39,4 @@ bool CleanupSSL() { /////////////////////////////////////////////////////////////////////////////// -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/ssl_adapter.h b/rtc_base/ssl_adapter.h index 4b8b9c74e0..084e2be78f 100644 --- a/rtc_base/ssl_adapter.h +++ b/rtc_base/ssl_adapter.h @@ -11,17 +11,21 @@ #ifndef RTC_BASE_SSL_ADAPTER_H_ #define RTC_BASE_SSL_ADAPTER_H_ +#include #include #include #include "absl/strings/string_view.h" #include "rtc_base/async_socket.h" +#include "rtc_base/checks.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/system/rtc_export.h" -namespace rtc { +namespace webrtc { class SSLAdapter; @@ -75,8 +79,8 @@ class SSLAdapter : public AsyncSocketAdapter { virtual void SetAlpnProtocols(const std::vector& protos) = 0; virtual void SetEllipticCurves(const std::vector& curves) = 0; - // Do DTLS or TLS (default is TLS, if unspecified) - virtual void SetMode(SSLMode mode) = 0; + [[deprecated("Only TLS is supported by the adapter")]] virtual void SetMode( + SSLMode mode) = 0; // Specify a custom certificate verifier for SSL. virtual void SetCertVerifier(SSLCertificateVerifier* ssl_cert_verifier) = 0; @@ -119,6 +123,17 @@ RTC_EXPORT bool InitializeSSL(); // Call to cleanup additional threads, and also the main thread. RTC_EXPORT bool CleanupSSL(); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::CleanupSSL; +using ::webrtc::InitializeSSL; +using ::webrtc::SSLAdapter; +using ::webrtc::SSLAdapterFactory; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SSL_ADAPTER_H_ diff --git a/rtc_base/ssl_adapter_unittest.cc b/rtc_base/ssl_adapter_unittest.cc index 2da59ddbb2..0861e9685e 100644 --- a/rtc_base/ssl_adapter_unittest.cc +++ b/rtc_base/ssl_adapter_unittest.cc @@ -13,58 +13,58 @@ #include #include #include +#include -#include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "rtc_base/gunit.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" #include "rtc_base/ip_address.h" -#include "rtc_base/message_digest.h" -#include "rtc_base/socket_stream.h" +#include "rtc_base/logging.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/stream.h" #include "rtc_base/string_encode.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" #include "rtc_base/virtual_socket_server.h" #include "test/gmock.h" +#include "test/gtest.h" +#include "test/wait_until.h" using ::testing::_; using ::testing::Return; -static const int kTimeout = 5000; +static const webrtc::TimeDelta kTimeout = webrtc::TimeDelta::Millis(5000); -static rtc::Socket* CreateSocket(const rtc::SSLMode& ssl_mode) { - rtc::SocketAddress address(rtc::IPAddress(INADDR_ANY), 0); +static webrtc::Socket* CreateSocket() { + webrtc::SocketAddress address(webrtc::IPAddress(INADDR_ANY), 0); - rtc::Socket* socket = rtc::Thread::Current()->socketserver()->CreateSocket( - address.family(), - (ssl_mode == rtc::SSL_MODE_DTLS) ? SOCK_DGRAM : SOCK_STREAM); + webrtc::Socket* socket = + webrtc::Thread::Current()->socketserver()->CreateSocket(address.family(), + SOCK_STREAM); socket->Bind(address); return socket; } -static std::string GetSSLProtocolName(const rtc::SSLMode& ssl_mode) { - return (ssl_mode == rtc::SSL_MODE_DTLS) ? "DTLS" : "TLS"; -} - // Simple mock for the certificate verifier. -class MockCertVerifier : public rtc::SSLCertificateVerifier { +class MockCertVerifier : public webrtc::SSLCertificateVerifier { public: virtual ~MockCertVerifier() = default; - MOCK_METHOD(bool, Verify, (const rtc::SSLCertificate&), (override)); + MOCK_METHOD(bool, Verify, (const webrtc::SSLCertificate&), (override)); }; // TODO(benwright) - Move to using INSTANTIATE_TEST_SUITE_P instead of using // duplicate test cases for simple parameter changes. -class SSLAdapterTestDummyClient : public sigslot::has_slots<> { +class SSLAdapterTestDummy : public sigslot::has_slots<> { public: - explicit SSLAdapterTestDummyClient(const rtc::SSLMode& ssl_mode) - : ssl_mode_(ssl_mode) { - rtc::Socket* socket = CreateSocket(ssl_mode_); + explicit SSLAdapterTestDummy() : socket_(CreateSocket()) {} + virtual ~SSLAdapterTestDummy() = default; - ssl_adapter_.reset(rtc::SSLAdapter::Create(socket)); - - ssl_adapter_->SetMode(ssl_mode_); + void CreateSSLAdapter(webrtc::Socket* socket, webrtc::SSLRole role) { + ssl_adapter_.reset(webrtc::SSLAdapter::Create(socket)); // Ignore any certificate errors for the purpose of testing. // Note: We do this only because we don't have a real certificate. @@ -72,16 +72,17 @@ class SSLAdapterTestDummyClient : public sigslot::has_slots<> { ssl_adapter_->SetIgnoreBadCert(true); ssl_adapter_->SignalReadEvent.connect( - this, &SSLAdapterTestDummyClient::OnSSLAdapterReadEvent); + this, &SSLAdapterTestDummy::OnSSLAdapterReadEvent); ssl_adapter_->SignalCloseEvent.connect( - this, &SSLAdapterTestDummyClient::OnSSLAdapterCloseEvent); + this, &SSLAdapterTestDummy::OnSSLAdapterCloseEvent); + ssl_adapter_->SetRole(role); } void SetIgnoreBadCert(bool ignore_bad_cert) { ssl_adapter_->SetIgnoreBadCert(ignore_bad_cert); } - void SetCertVerifier(rtc::SSLCertificateVerifier* ssl_cert_verifier) { + void SetCertVerifier(webrtc::SSLCertificateVerifier* ssl_cert_verifier) { ssl_adapter_->SetCertVerifier(ssl_cert_verifier); } @@ -93,40 +94,25 @@ class SSLAdapterTestDummyClient : public sigslot::has_slots<> { ssl_adapter_->SetEllipticCurves(curves); } - rtc::SocketAddress GetAddress() const { + webrtc::SocketAddress GetAddress() const { return ssl_adapter_->GetLocalAddress(); } - rtc::Socket::ConnState GetState() const { return ssl_adapter_->GetState(); } + webrtc::Socket::ConnState GetState() const { + return ssl_adapter_->GetState(); + } const std::string& GetReceivedData() const { return data_; } - int Connect(absl::string_view hostname, const rtc::SocketAddress& address) { - RTC_LOG(LS_INFO) << "Initiating connection with " << address.ToString(); - - int rv = ssl_adapter_->Connect(address); - - if (rv == 0) { - RTC_LOG(LS_INFO) << "Starting " << GetSSLProtocolName(ssl_mode_) - << " handshake with " << hostname; - - if (ssl_adapter_->StartSSL(hostname) != 0) { - return -1; - } - } - - return rv; - } - int Close() { return ssl_adapter_->Close(); } int Send(absl::string_view message) { - RTC_LOG(LS_INFO) << "Client sending '" << message << "'"; + RTC_LOG(LS_INFO) << "Sending '" << message << "'"; return ssl_adapter_->Send(message.data(), message.length()); } - void OnSSLAdapterReadEvent(rtc::Socket* socket) { + void OnSSLAdapterReadEvent(webrtc::Socket* socket) { char buffer[4096] = ""; // Read data received from the server and store it in our internal buffer. @@ -134,53 +120,67 @@ class SSLAdapterTestDummyClient : public sigslot::has_slots<> { if (read != -1) { buffer[read] = '\0'; - RTC_LOG(LS_INFO) << "Client received '" << buffer << "'"; + RTC_LOG(LS_INFO) << "Received '" << buffer << "'"; data_ += buffer; } } - void OnSSLAdapterCloseEvent(rtc::Socket* socket, int error) { + void OnSSLAdapterCloseEvent(webrtc::Socket* socket, int error) { // OpenSSLAdapter signals handshake failure with a close event, but without // closing the socket! Let's close the socket here. This way GetState() can // return CS_CLOSED after failure. - if (socket->GetState() != rtc::Socket::CS_CLOSED) { + if (socket->GetState() != webrtc::Socket::CS_CLOSED) { socket->Close(); } } - private: - const rtc::SSLMode ssl_mode_; - - std::unique_ptr ssl_adapter_; + protected: + std::unique_ptr ssl_adapter_; + std::unique_ptr socket_; + private: std::string data_; }; -class SSLAdapterTestDummyServer : public sigslot::has_slots<> { +class SSLAdapterTestDummyClient : public SSLAdapterTestDummy { public: - explicit SSLAdapterTestDummyServer(const rtc::SSLMode& ssl_mode, - const rtc::KeyParams& key_params) - : ssl_mode_(ssl_mode) { - // Generate a key pair and a certificate for this host. - ssl_identity_ = rtc::SSLIdentity::Create(GetHostname(), key_params); + explicit SSLAdapterTestDummyClient() : SSLAdapterTestDummy() { + CreateSSLAdapter(socket_.release(), webrtc::SSL_CLIENT); + } - server_socket_.reset(CreateSocket(ssl_mode_)); + int Connect(absl::string_view hostname, + const webrtc::SocketAddress& address) { + RTC_LOG(LS_INFO) << "Initiating connection with " << address.ToString(); + int rv = ssl_adapter_->Connect(address); - if (ssl_mode_ == rtc::SSL_MODE_TLS) { - server_socket_->SignalReadEvent.connect( - this, &SSLAdapterTestDummyServer::OnServerSocketReadEvent); + if (rv == 0) { + RTC_LOG(LS_INFO) << "Starting TLS handshake with " << hostname; - server_socket_->Listen(1); + if (ssl_adapter_->StartSSL(hostname) != 0) { + return -1; + } } - RTC_LOG(LS_INFO) << ((ssl_mode_ == rtc::SSL_MODE_DTLS) ? "UDP" : "TCP") - << " server listening on " - << server_socket_->GetLocalAddress().ToString(); + return rv; + } +}; + +class SSLAdapterTestDummyServer : public SSLAdapterTestDummy { + public: + explicit SSLAdapterTestDummyServer(const webrtc::KeyParams& key_params) + : SSLAdapterTestDummy(), + ssl_identity_(webrtc::SSLIdentity::Create(GetHostname(), key_params)) { + socket_->Listen(1); + socket_->SignalReadEvent.connect(this, + &SSLAdapterTestDummyServer::OnReadEvent); + + RTC_LOG(LS_INFO) << "TCP server listening on " + << socket_->GetLocalAddress().ToString(); } - rtc::SocketAddress GetAddress() const { - return server_socket_->GetLocalAddress(); + webrtc::SocketAddress GetAddress() const { + return socket_->GetLocalAddress(); } std::string GetHostname() const { @@ -189,129 +189,37 @@ class SSLAdapterTestDummyServer : public sigslot::has_slots<> { return "example.com"; } - const std::string& GetReceivedData() const { return data_; } - - int Send(absl::string_view message) { - if (ssl_stream_adapter_ == nullptr || - ssl_stream_adapter_->GetState() != rtc::SS_OPEN) { - // No connection yet. - return -1; - } - - RTC_LOG(LS_INFO) << "Server sending '" << message << "'"; - - size_t written; - int error; - - rtc::StreamResult r = ssl_stream_adapter_->Write( - rtc::MakeArrayView(reinterpret_cast(message.data()), - message.size()), - written, error); - if (r == rtc::SR_SUCCESS) { - return written; - } else { - return -1; - } - } - - void AcceptConnection(const rtc::SocketAddress& address) { - // Only a single connection is supported. - ASSERT_TRUE(ssl_stream_adapter_ == nullptr); - - // This is only for DTLS. - ASSERT_EQ(rtc::SSL_MODE_DTLS, ssl_mode_); - - // Transfer ownership of the socket to the SSLStreamAdapter object. - rtc::Socket* socket = server_socket_.release(); - - socket->Connect(address); - - DoHandshake(socket); - } - - void OnServerSocketReadEvent(rtc::Socket* socket) { - // Only a single connection is supported. - ASSERT_TRUE(ssl_stream_adapter_ == nullptr); - - DoHandshake(server_socket_->Accept(nullptr)); - } - - void OnSSLStreamAdapterEvent(rtc::StreamInterface* stream, int sig, int err) { - if (sig & rtc::SE_READ) { - uint8_t buffer[4096] = ""; - size_t read; - int error; - - // Read data received from the client and store it in our internal - // buffer. - rtc::StreamResult r = stream->Read(buffer, read, error); - if (r == rtc::SR_SUCCESS) { - buffer[read] = '\0'; - // Here we assume that the buffer is interpretable as string. - char* buffer_as_char = reinterpret_cast(buffer); - RTC_LOG(LS_INFO) << "Server received '" << buffer_as_char << "'"; - data_ += buffer_as_char; - } + protected: + void OnReadEvent(webrtc::Socket* socket) { + CreateSSLAdapter(socket_->Accept(nullptr), webrtc::SSL_SERVER); + ssl_adapter_->SetIdentity(ssl_identity_->Clone()); + if (ssl_adapter_->StartSSL(GetHostname()) != 0) { + RTC_LOG(LS_ERROR) << "Starting SSL from server failed."; } } private: - void DoHandshake(rtc::Socket* socket) { - ssl_stream_adapter_ = rtc::SSLStreamAdapter::Create( - std::make_unique(socket)); - - ssl_stream_adapter_->SetMode(ssl_mode_); - ssl_stream_adapter_->SetServerRole(); - - // SSLStreamAdapter is normally used for peer-to-peer communication, but - // here we're testing communication between a client and a server - // (e.g. a WebRTC-based application and an RFC 5766 TURN server), where - // clients are not required to provide a certificate during handshake. - // Accordingly, we must disable client authentication here. - ssl_stream_adapter_->SetClientAuthEnabledForTesting(false); - - ssl_stream_adapter_->SetIdentity(ssl_identity_->Clone()); - - // Set a bogus peer certificate digest. - unsigned char digest[20]; - size_t digest_len = sizeof(digest); - ssl_stream_adapter_->SetPeerCertificateDigest(rtc::DIGEST_SHA_1, digest, - digest_len); - - ssl_stream_adapter_->StartSSL(); - - ssl_stream_adapter_->SignalEvent.connect( - this, &SSLAdapterTestDummyServer::OnSSLStreamAdapterEvent); - } - - const rtc::SSLMode ssl_mode_; - - std::unique_ptr server_socket_; - std::unique_ptr ssl_stream_adapter_; - - std::unique_ptr ssl_identity_; - - std::string data_; + std::unique_ptr ssl_identity_; }; class SSLAdapterTestBase : public ::testing::Test, public sigslot::has_slots<> { public: - explicit SSLAdapterTestBase(const rtc::SSLMode& ssl_mode, - const rtc::KeyParams& key_params) - : ssl_mode_(ssl_mode), - vss_(new rtc::VirtualSocketServer()), + explicit SSLAdapterTestBase(const webrtc::KeyParams& key_params) + : vss_(new webrtc::VirtualSocketServer()), thread_(vss_.get()), - server_(new SSLAdapterTestDummyServer(ssl_mode_, key_params)), - client_(new SSLAdapterTestDummyClient(ssl_mode_)), - handshake_wait_(kTimeout) {} + server_(new SSLAdapterTestDummyServer(key_params)), + client_(new SSLAdapterTestDummyClient()), + handshake_wait_(webrtc::TimeDelta::Millis(kTimeout.ms())) {} - void SetHandshakeWait(int wait) { handshake_wait_ = wait; } + void SetHandshakeWait(int wait) { + handshake_wait_ = webrtc::TimeDelta::Millis(wait); + } void SetIgnoreBadCert(bool ignore_bad_cert) { client_->SetIgnoreBadCert(ignore_bad_cert); } - void SetCertVerifier(rtc::SSLCertificateVerifier* ssl_cert_verifier) { + void SetCertVerifier(webrtc::SSLCertificateVerifier* ssl_cert_verifier) { client_->SetCertVerifier(ssl_cert_verifier); } @@ -326,8 +234,8 @@ class SSLAdapterTestBase : public ::testing::Test, public sigslot::has_slots<> { void SetMockCertVerifier(bool return_value) { auto mock_verifier = std::make_unique(); EXPECT_CALL(*mock_verifier, Verify(_)).WillRepeatedly(Return(return_value)); - cert_verifier_ = - std::unique_ptr(std::move(mock_verifier)); + cert_verifier_ = std::unique_ptr( + std::move(mock_verifier)); SetIgnoreBadCert(false); SetCertVerifier(cert_verifier_.get()); @@ -337,34 +245,32 @@ class SSLAdapterTestBase : public ::testing::Test, public sigslot::has_slots<> { int rv; // The initial state is CS_CLOSED - ASSERT_EQ(rtc::Socket::CS_CLOSED, client_->GetState()); + ASSERT_EQ(webrtc::Socket::CS_CLOSED, client_->GetState()); rv = client_->Connect(server_->GetHostname(), server_->GetAddress()); ASSERT_EQ(0, rv); // Now the state should be CS_CONNECTING - ASSERT_EQ(rtc::Socket::CS_CONNECTING, client_->GetState()); - - if (ssl_mode_ == rtc::SSL_MODE_DTLS) { - // For DTLS, call AcceptConnection() with the client's address. - server_->AcceptConnection(client_->GetAddress()); - } + ASSERT_EQ(webrtc::Socket::CS_CONNECTING, client_->GetState()); if (expect_success) { // If expecting success, the client should end up in the CS_CONNECTED // state after handshake. - EXPECT_EQ_WAIT(rtc::Socket::CS_CONNECTED, client_->GetState(), - handshake_wait_); + EXPECT_THAT(webrtc::WaitUntil([&] { return client_->GetState(); }, + ::testing::Eq(webrtc::Socket::CS_CONNECTED), + {.timeout = handshake_wait_}), + webrtc::IsRtcOk()); - RTC_LOG(LS_INFO) << GetSSLProtocolName(ssl_mode_) - << " handshake complete."; + RTC_LOG(LS_INFO) << "TLS handshake complete."; } else { // On handshake failure the client should end up in the CS_CLOSED state. - EXPECT_EQ_WAIT(rtc::Socket::CS_CLOSED, client_->GetState(), - handshake_wait_); + EXPECT_THAT(webrtc::WaitUntil([&] { return client_->GetState(); }, + ::testing::Eq(webrtc::Socket::CS_CLOSED), + {.timeout = handshake_wait_}), + webrtc::IsRtcOk()); - RTC_LOG(LS_INFO) << GetSSLProtocolName(ssl_mode_) << " handshake failed."; + RTC_LOG(LS_INFO) << "TLS handshake failed."; } } @@ -375,55 +281,43 @@ class SSLAdapterTestBase : public ::testing::Test, public sigslot::has_slots<> { ASSERT_EQ(static_cast(message.length()), rv); // The server should have received the client's message. - EXPECT_EQ_WAIT(message, server_->GetReceivedData(), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return server_->GetReceivedData(); }, + ::testing::Eq(message), {.timeout = kTimeout}), + webrtc::IsRtcOk()); rv = server_->Send(message); ASSERT_EQ(static_cast(message.length()), rv); // The client should have received the server's message. - EXPECT_EQ_WAIT(message, client_->GetReceivedData(), kTimeout); + EXPECT_THAT( + webrtc::WaitUntil([&] { return client_->GetReceivedData(); }, + ::testing::Eq(message), {.timeout = kTimeout}), + webrtc::IsRtcOk()); RTC_LOG(LS_INFO) << "Transfer complete."; } protected: - const rtc::SSLMode ssl_mode_; - - std::unique_ptr vss_; - rtc::AutoSocketServerThread thread_; + std::unique_ptr vss_; + webrtc::AutoSocketServerThread thread_; std::unique_ptr server_; std::unique_ptr client_; - std::unique_ptr cert_verifier_; + std::unique_ptr cert_verifier_; - int handshake_wait_; + webrtc::TimeDelta handshake_wait_; }; class SSLAdapterTestTLS_RSA : public SSLAdapterTestBase { public: - SSLAdapterTestTLS_RSA() - : SSLAdapterTestBase(rtc::SSL_MODE_TLS, rtc::KeyParams::RSA()) {} + SSLAdapterTestTLS_RSA() : SSLAdapterTestBase(webrtc::KeyParams::RSA()) {} }; class SSLAdapterTestTLS_ECDSA : public SSLAdapterTestBase { public: - SSLAdapterTestTLS_ECDSA() - : SSLAdapterTestBase(rtc::SSL_MODE_TLS, rtc::KeyParams::ECDSA()) {} + SSLAdapterTestTLS_ECDSA() : SSLAdapterTestBase(webrtc::KeyParams::ECDSA()) {} }; -class SSLAdapterTestDTLS_RSA : public SSLAdapterTestBase { - public: - SSLAdapterTestDTLS_RSA() - : SSLAdapterTestBase(rtc::SSL_MODE_DTLS, rtc::KeyParams::RSA()) {} -}; - -class SSLAdapterTestDTLS_ECDSA : public SSLAdapterTestBase { - public: - SSLAdapterTestDTLS_ECDSA() - : SSLAdapterTestBase(rtc::SSL_MODE_DTLS, rtc::KeyParams::ECDSA()) {} -}; - -// Basic tests: TLS - // Test that handshake works, using RSA TEST_F(SSLAdapterTestTLS_RSA, TestTLSConnect) { TestHandshake(true); @@ -484,7 +378,7 @@ TEST_F(SSLAdapterTestTLS_RSA, TestTLSTransferWithBlockedSocket) { // Note that this may not occur immediately since there may be some amount of // intermediate buffering (either in our code or in BoringSSL). for (int i = 0; i < 1024; ++i) { - std::string message = "Hello, world: " + rtc::ToString(i); + std::string message = "Hello, world: " + absl::StrCat(i); rv = client_->Send(message); if (rv != static_cast(message.size())) { // This test assumes either the whole message or none of it is sent. @@ -503,14 +397,18 @@ TEST_F(SSLAdapterTestTLS_RSA, TestTLSTransferWithBlockedSocket) { // Unblock the underlying socket. All of the buffered messages should be sent // without any further action. vss_->SetSendingBlocked(false); - EXPECT_EQ_WAIT(expected, server_->GetReceivedData(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return server_->GetReceivedData(); }, + ::testing::Eq(expected), {.timeout = kTimeout}), + webrtc::IsRtcOk()); // Send another message. This previously wasn't working std::string final_message = "Fin."; expected += final_message; EXPECT_EQ(static_cast(final_message.size()), client_->Send(final_message)); - EXPECT_EQ_WAIT(expected, server_->GetReceivedData(), kTimeout); + EXPECT_THAT(webrtc::WaitUntil([&] { return server_->GetReceivedData(); }, + ::testing::Eq(expected), {.timeout = kTimeout}), + webrtc::IsRtcOk()); } // Test transfer between client and server, using ECDSA @@ -542,69 +440,3 @@ TEST_F(SSLAdapterTestTLS_ECDSA, TestTLSEllipticCurves) { TestHandshake(true); TestTransfer("Hello, world!"); } - -// Basic tests: DTLS - -// Test that handshake works, using RSA -TEST_F(SSLAdapterTestDTLS_RSA, TestDTLSConnect) { - TestHandshake(true); -} - -// Test that handshake works with a custom verifier that returns true. DTLS_RSA. -TEST_F(SSLAdapterTestDTLS_RSA, TestDTLSConnectCustomCertVerifierSucceeds) { - SetMockCertVerifier(/*return_value=*/true); - TestHandshake(/*expect_success=*/true); -} - -// Test that handshake fails with a custom verifier that returns false. -// DTLS_RSA. -TEST_F(SSLAdapterTestDTLS_RSA, TestTLSConnectCustomCertVerifierFails) { - SetMockCertVerifier(/*return_value=*/false); - TestHandshake(/*expect_success=*/false); -} - -// Test that handshake works, using ECDSA -TEST_F(SSLAdapterTestDTLS_ECDSA, TestDTLSConnect) { - TestHandshake(true); -} - -// Test that handshake works with a custom verifier that returns true. -// DTLS_ECDSA. -TEST_F(SSLAdapterTestDTLS_ECDSA, TestDTLSConnectCustomCertVerifierSucceeds) { - SetMockCertVerifier(/*return_value=*/true); - TestHandshake(/*expect_success=*/true); -} - -// Test that handshake fails with a custom verifier that returns false. -// DTLS_ECDSA. -TEST_F(SSLAdapterTestDTLS_ECDSA, TestTLSConnectCustomCertVerifierFails) { - SetMockCertVerifier(/*return_value=*/false); - TestHandshake(/*expect_success=*/false); -} - -// Test transfer between client and server, using RSA -TEST_F(SSLAdapterTestDTLS_RSA, TestDTLSTransfer) { - TestHandshake(true); - TestTransfer("Hello, world!"); -} - -// Test transfer between client and server, using RSA with custom cert verifier. -TEST_F(SSLAdapterTestDTLS_RSA, TestDTLSTransferCustomCertVerifier) { - SetMockCertVerifier(/*return_value=*/true); - TestHandshake(/*expect_success=*/true); - TestTransfer("Hello, world!"); -} - -// Test transfer between client and server, using ECDSA -TEST_F(SSLAdapterTestDTLS_ECDSA, TestDTLSTransfer) { - TestHandshake(true); - TestTransfer("Hello, world!"); -} - -// Test transfer between client and server, using ECDSA with custom cert -// verifier. -TEST_F(SSLAdapterTestDTLS_ECDSA, TestDTLSTransferCustomCertVerifier) { - SetMockCertVerifier(/*return_value=*/true); - TestHandshake(/*expect_success=*/true); - TestTransfer("Hello, world!"); -} diff --git a/rtc_base/ssl_certificate.cc b/rtc_base/ssl_certificate.cc index d1fd57fca5..7db106cd6b 100644 --- a/rtc_base/ssl_certificate.cc +++ b/rtc_base/ssl_certificate.cc @@ -10,23 +10,29 @@ #include "rtc_base/ssl_certificate.h" +#include +#include #include #include #include +#include #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" -#include "rtc_base/checks.h" +#include "api/array_view.h" +#include "rtc_base/buffer.h" #include "rtc_base/openssl.h" +// IWYU pragma: begin_keep #ifdef OPENSSL_IS_BORINGSSL #include "rtc_base/boringssl_identity.h" #else #include "rtc_base/openssl_identity.h" #endif +// IWYU pragma: end_keep +#include "rtc_base/base64.h" #include "rtc_base/ssl_fingerprint.h" -#include "rtc_base/third_party/base64/base64.h" -namespace rtc { +namespace webrtc { ////////////////////////////////////////////////////////////////////// // SSLCertificateStats @@ -74,8 +80,8 @@ std::unique_ptr SSLCertificate::GetStats() const { Buffer der_buffer; ToDER(&der_buffer); - std::string der_base64; - Base64::EncodeFromArray(der_buffer.data(), der_buffer.size(), &der_base64); + ArrayView der_view(der_buffer); + std::string der_base64 = Base64Encode(der_view); return std::make_unique(std::move(fingerprint), std::move(digest_algorithm), @@ -132,8 +138,8 @@ std::unique_ptr SSLCertificate::FromPEMString( #ifdef OPENSSL_IS_BORINGSSL return BoringSSLCertificate::FromPEMString(pem_string); #else - return OpenSSLCertificate::FromPEMString(pem_string); + return webrtc::OpenSSLCertificate::FromPEMString(pem_string); #endif } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/ssl_certificate.h b/rtc_base/ssl_certificate.h index 2e198800c4..825953b470 100644 --- a/rtc_base/ssl_certificate.h +++ b/rtc_base/ssl_certificate.h @@ -26,7 +26,7 @@ #include "rtc_base/buffer.h" #include "rtc_base/system/rtc_export.h" -namespace rtc { +namespace webrtc { struct RTC_EXPORT SSLCertificateStats { SSLCertificateStats(std::string&& fingerprint, @@ -77,9 +77,7 @@ class RTC_EXPORT SSLCertificate { // Compute the digest of the certificate given algorithm virtual bool ComputeDigest(absl::string_view algorithm, - unsigned char* digest, - size_t size, - size_t* length) const = 0; + Buffer& digest) const = 0; // Returns the time in seconds relative to epoch, 1970-01-01T00:00:00Z (UTC), // or -1 if an expiration time could not be retrieved. @@ -137,6 +135,17 @@ class SSLCertificateVerifier { virtual bool Verify(const SSLCertificate& certificate) = 0; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::SSLCertChain; +using ::webrtc::SSLCertificate; +using ::webrtc::SSLCertificateStats; +using ::webrtc::SSLCertificateVerifier; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SSL_CERTIFICATE_H_ diff --git a/rtc_base/ssl_fingerprint.cc b/rtc_base/ssl_fingerprint.cc index a43bb159c3..daca55873d 100644 --- a/rtc_base/ssl_fingerprint.cc +++ b/rtc_base/ssl_fingerprint.cc @@ -12,6 +12,7 @@ #include +#include #include #include #include @@ -19,6 +20,7 @@ #include "absl/algorithm/container.h" #include "absl/strings/string_view.h" #include "api/array_view.h" +#include "rtc_base/buffer.h" #include "rtc_base/logging.h" #include "rtc_base/message_digest.h" #include "rtc_base/rtc_certificate.h" @@ -26,31 +28,28 @@ #include "rtc_base/ssl_identity.h" #include "rtc_base/string_encode.h" -namespace rtc { +namespace webrtc { SSLFingerprint* SSLFingerprint::Create(absl::string_view algorithm, - const rtc::SSLIdentity* identity) { + const SSLIdentity* identity) { return CreateUnique(algorithm, *identity).release(); } std::unique_ptr SSLFingerprint::CreateUnique( absl::string_view algorithm, - const rtc::SSLIdentity& identity) { + const SSLIdentity& identity) { return Create(algorithm, identity.certificate()); } std::unique_ptr SSLFingerprint::Create( absl::string_view algorithm, - const rtc::SSLCertificate& cert) { - uint8_t digest_val[64]; - size_t digest_len; - bool ret = cert.ComputeDigest(algorithm, digest_val, sizeof(digest_val), - &digest_len); + const SSLCertificate& cert) { + Buffer digest(0, MessageDigest::kMaxSize); + bool ret = cert.ComputeDigest(algorithm, digest); if (!ret) { return nullptr; } - return std::make_unique( - algorithm, ArrayView(digest_val, digest_len)); + return std::make_unique(algorithm, digest); } SSLFingerprint* SSLFingerprint::CreateFromRfc4572( @@ -62,15 +61,15 @@ SSLFingerprint* SSLFingerprint::CreateFromRfc4572( std::unique_ptr SSLFingerprint::CreateUniqueFromRfc4572( absl::string_view algorithm, absl::string_view fingerprint) { - if (algorithm.empty() || !rtc::IsFips180DigestAlgorithm(algorithm)) + if (algorithm.empty() || !webrtc::IsFips180DigestAlgorithm(algorithm)) return nullptr; if (fingerprint.empty()) return nullptr; - char value[rtc::MessageDigest::kMaxSize]; + char value[MessageDigest::kMaxSize]; size_t value_len = - rtc::hex_decode_with_delimiter(ArrayView(value), fingerprint, ':'); + hex_decode_with_delimiter(ArrayView(value), fingerprint, ':'); if (!value_len) return nullptr; @@ -111,7 +110,7 @@ bool SSLFingerprint::operator==(const SSLFingerprint& other) const { } std::string SSLFingerprint::GetRfc4572Fingerprint() const { - std::string fingerprint = rtc::hex_encode_with_delimiter( + std::string fingerprint = hex_encode_with_delimiter( absl::string_view(digest.data(), digest.size()), ':'); absl::c_transform(fingerprint, fingerprint.begin(), ::toupper); return fingerprint; @@ -124,4 +123,4 @@ std::string SSLFingerprint::ToString() const { return fp_str; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/ssl_fingerprint.h b/rtc_base/ssl_fingerprint.h index cfa26dd433..9c9524f1f9 100644 --- a/rtc_base/ssl_fingerprint.h +++ b/rtc_base/ssl_fingerprint.h @@ -14,30 +14,30 @@ #include #include +#include #include #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_identity.h" #include "rtc_base/system/rtc_export.h" -namespace rtc { - -class RTCCertificate; -class SSLCertificate; -class SSLIdentity; +namespace webrtc { struct RTC_EXPORT SSLFingerprint { // TODO(steveanton): Remove once downstream projects have moved off of this. static SSLFingerprint* Create(absl::string_view algorithm, - const rtc::SSLIdentity* identity); + const SSLIdentity* identity); // TODO(steveanton): Rename to Create once projects have migrated. static std::unique_ptr CreateUnique( absl::string_view algorithm, - const rtc::SSLIdentity& identity); + const SSLIdentity& identity); - static std::unique_ptr Create( - absl::string_view algorithm, - const rtc::SSLCertificate& cert); + static std::unique_ptr Create(absl::string_view algorithm, + const SSLCertificate& cert); // TODO(steveanton): Remove once downstream projects have moved off of this. static SSLFingerprint* CreateFromRfc4572(absl::string_view algorithm, @@ -69,9 +69,17 @@ struct RTC_EXPORT SSLFingerprint { std::string ToString() const; std::string algorithm; - rtc::CopyOnWriteBuffer digest; + CopyOnWriteBuffer digest; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::SSLFingerprint; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SSL_FINGERPRINT_H_ diff --git a/rtc_base/ssl_identity.cc b/rtc_base/ssl_identity.cc index 3b4232b06b..9e16cb1ac9 100644 --- a/rtc_base/ssl_identity.cc +++ b/rtc_base/ssl_identity.cc @@ -15,19 +15,25 @@ #include #include +#include +#include +#include +#include +#include + #include "absl/strings/string_view.h" +#include "api/array_view.h" #include "rtc_base/checks.h" #ifdef OPENSSL_IS_BORINGSSL #include "rtc_base/boringssl_identity.h" #else #include "rtc_base/openssl_identity.h" #endif -#include "rtc_base/ssl_certificate.h" +#include "rtc_base/base64.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/third_party/base64/base64.h" #include "rtc_base/time_utils.h" -namespace rtc { +namespace webrtc { ////////////////////////////////////////////////////////////////////// // Helper Functions @@ -102,7 +108,7 @@ int64_t ASN1TimeToSec(const unsigned char* s, size_t length, bool long_format) { if (bytes_left != 1) { return -1; } - return TmToSeconds(tm); + return webrtc::TmToSeconds(tm); } ////////////////////////////////////////////////////////////////////// @@ -162,10 +168,6 @@ ECCurve KeyParams::ec_curve() const { return params_.curve; } -KeyType IntKeyTypeFamilyToKeyType(int key_type_family) { - return static_cast(key_type_family); -} - ////////////////////////////////////////////////////////////////////// // SSLIdentity ////////////////////////////////////////////////////////////////////// @@ -189,19 +191,23 @@ bool SSLIdentity::PemToDer(absl::string_view pem_type, return false; } std::string inner(pem_string.substr(body + 1, trailer - (body + 1))); - *der = Base64::Decode(inner, Base64::DO_PARSE_WHITE | Base64::DO_PAD_ANY | - Base64::DO_TERM_BUFFER); + std::optional decoded = + Base64Decode(inner, Base64DecodeOptions::kForgiving); + if (!decoded.has_value()) { + return false; + } + *der = std::move(*decoded); return true; } std::string SSLIdentity::DerToPem(absl::string_view pem_type, const unsigned char* data, size_t length) { - rtc::StringBuilder result; + StringBuilder result; result << "-----BEGIN " << pem_type << "-----\n"; - std::string b64_encoded; - Base64::EncodeFromArray(data, length, &b64_encoded); + ArrayView data_view(data, length); + std::string b64_encoded = Base64Encode(data_view); // Divide the Base-64 encoded data into 64-character chunks, as per 4.3.2.4 // of RFC 1421. static const size_t kChunkSize = 64; @@ -290,4 +296,4 @@ bool operator!=(const SSLIdentity& a, const SSLIdentity& b) { return !(a == b); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/ssl_identity.h b/rtc_base/ssl_identity.h index a0119bb1c4..2d426e6986 100644 --- a/rtc_base/ssl_identity.h +++ b/rtc_base/ssl_identity.h @@ -20,20 +20,19 @@ #include #include "absl/strings/string_view.h" +#include "rtc_base/ssl_certificate.h" #include "rtc_base/system/rtc_export.h" -namespace rtc { - -class SSLCertChain; -class SSLCertificate; +namespace webrtc { // KT_LAST is intended for vector declarations and loops over all key types; // it does not represent any key type in itself. // KT_DEFAULT is used as the default KeyType for KeyParams. enum KeyType { KT_RSA, KT_ECDSA, KT_LAST, KT_DEFAULT = KT_ECDSA }; -static const int kRsaDefaultModSize = 1024; +static const int kRsaDefaultModSize = 2048; static const int kRsaDefaultExponent = 0x10001; // = 2^16+1 = 65537 +// TODO(bugs.webrtc.org/364338811): raise the bar to 2048 bits. static const int kRsaMinModSize = 1024; static const int kRsaMaxModSize = 8192; @@ -82,11 +81,6 @@ class RTC_EXPORT KeyParams { } params_; }; -// TODO(hbos): Remove once rtc::KeyType (to be modified) and -// blink::WebRTCKeyType (to be landed) match. By using this function in Chromium -// appropriately we can change KeyType enum -> class without breaking Chromium. -KeyType IntKeyTypeFamilyToKeyType(int key_type_family); - // Parameters for generating a certificate. If `common_name` is non-empty, it // will be used for the certificate's subject and issuer name, otherwise a // random string will be used. @@ -169,6 +163,35 @@ extern const char kPemTypeCertificate[]; extern const char kPemTypeRsaPrivateKey[]; extern const char kPemTypeEcPrivateKey[]; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::ASN1TimeToSec; +using ::webrtc::EC_LAST; +using ::webrtc::EC_NIST_P256; +using ::webrtc::ECCurve; +using ::webrtc::kCertificateWindowInSeconds; +using ::webrtc::kDefaultCertificateLifetimeInSeconds; +using ::webrtc::KeyParams; +using ::webrtc::KeyType; +using ::webrtc::kPemTypeCertificate; +using ::webrtc::kPemTypeEcPrivateKey; +using ::webrtc::kPemTypeRsaPrivateKey; +using ::webrtc::kRsaDefaultExponent; +using ::webrtc::kRsaDefaultModSize; +using ::webrtc::kRsaMaxModSize; +using ::webrtc::kRsaMinModSize; +using ::webrtc::KT_DEFAULT; +using ::webrtc::KT_ECDSA; +using ::webrtc::KT_LAST; +using ::webrtc::KT_RSA; +using ::webrtc::RSAParams; +using ::webrtc::SSLIdentity; +using ::webrtc::SSLIdentityParams; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SSL_IDENTITY_H_ diff --git a/rtc_base/ssl_identity_unittest.cc b/rtc_base/ssl_identity_unittest.cc index 1f0278ac71..3fd8ac7fe0 100644 --- a/rtc_base/ssl_identity_unittest.cc +++ b/rtc_base/ssl_identity_unittest.cc @@ -10,23 +10,36 @@ #include "rtc_base/ssl_identity.h" +#ifdef OPENSSL_IS_BORINGSSL +#include +#else +#include // IWYU pragma: keep +#endif +#include #include +#include +#include +#include +#include #include #include #include #include "absl/strings/str_replace.h" #include "absl/strings/string_view.h" +#include "rtc_base/buffer.h" #include "rtc_base/checks.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/fake_ssl_identity.h" -#include "rtc_base/helpers.h" #include "rtc_base/logging.h" #include "rtc_base/message_digest.h" +#include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_fingerprint.h" #include "test/gtest.h" -using rtc::SSLIdentity; +namespace webrtc { +namespace { const char kTestCertificate[] = "-----BEGIN CERTIFICATE-----\n" @@ -75,7 +88,8 @@ const unsigned char kTestCertSha512[] = { // updated too. The fingerprint, fingerprint algorithm and base64 certificate // were created by calling `identity->certificate().GetStats()`. static const char kRSA_PRIVATE_KEY_PEM[] = - "-----BEGIN PRIVATE KEY-----\n" + "-----BEGIN PRI" // Linebreak to avoid detection of private + "VATE KEY-----\n" // keys by linters. "MIICdQIBADANBgkqhkiG9w0BAQEFAASCAl8wggJbAgEAAoGBAMQPqDStRlYeDpkX\n" "erRmv+a1naM8vSVSY0gG2plnrnofViWRW3MRqWC+020MsIj3hPZeSAnt/y/FL/nr\n" "4Ea7NXcwdRo1/1xEK7U/f/cjSg1aunyvHCHwcFcMr31HLFvHr0ZgcFwbgIuFLNEl\n" @@ -125,7 +139,8 @@ static const char kRSA_BASE64_CERTIFICATE[] = "qNHm3g/VxG4NUC1Y+w29ai0/Rgh+VvgbDwK+Q="; static const char kECDSA_PRIVATE_KEY_PEM[] = - "-----BEGIN PRIVATE KEY-----\n" + "-----BEGIN PRI" // Linebreak to avoid detection of private + "VATE KEY-----\n" // keys by linters. "MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQg/AkEA2hklq7dQ2rN\n" "ZxYL6hOUACL4pn7P4FYlA3ZQhIChRANCAAR7YgdO3utP/8IqVRq8G4VZKreMAxeN\n" "rUa12twthv4uFjuHAHa9D9oyAjncmn+xvZZRyVmKrA56jRzENcEEHoAg\n" @@ -157,7 +172,7 @@ static const char kECDSA_BASE64_CERTIFICATE[] = "kekw=="; struct IdentityAndInfo { - std::unique_ptr identity; + std::unique_ptr identity; std::vector ders; std::vector pems; std::vector fingerprints; @@ -169,11 +184,11 @@ IdentityAndInfo CreateFakeIdentityAndInfoFromDers( IdentityAndInfo info; info.ders = ders; for (const std::string& der : ders) { - info.pems.push_back(rtc::SSLIdentity::DerToPem( + info.pems.push_back(SSLIdentity::DerToPem( "CERTIFICATE", reinterpret_cast(der.c_str()), der.length())); } - info.identity.reset(new rtc::FakeSSLIdentity(info.pems)); + info.identity.reset(new FakeSSLIdentity(info.pems)); // Strip header/footer and newline characters of PEM strings. for (size_t i = 0; i < info.pems.size(); ++i) { absl::StrReplaceAll({{"-----BEGIN CERTIFICATE-----", ""}, @@ -183,10 +198,10 @@ IdentityAndInfo CreateFakeIdentityAndInfoFromDers( } // Fingerprints for the whole certificate chain, starting with leaf // certificate. - const rtc::SSLCertChain& chain = info.identity->cert_chain(); - std::unique_ptr fp; + const SSLCertChain& chain = info.identity->cert_chain(); + std::unique_ptr fp; for (size_t i = 0; i < chain.GetSize(); i++) { - fp = rtc::SSLFingerprint::Create("sha-1", chain.Get(i)); + fp = SSLFingerprint::Create("sha-1", chain.Get(i)); EXPECT_TRUE(fp); info.fingerprints.push_back(fp->GetRfc4572Fingerprint()); } @@ -197,17 +212,17 @@ IdentityAndInfo CreateFakeIdentityAndInfoFromDers( class SSLIdentityTest : public ::testing::Test { public: void SetUp() override { - identity_rsa1_ = SSLIdentity::Create("test1", rtc::KT_RSA); - identity_rsa2_ = SSLIdentity::Create("test2", rtc::KT_RSA); - identity_ecdsa1_ = SSLIdentity::Create("test3", rtc::KT_ECDSA); - identity_ecdsa2_ = SSLIdentity::Create("test4", rtc::KT_ECDSA); + identity_rsa1_ = SSLIdentity::Create("test1", KT_RSA); + identity_rsa2_ = SSLIdentity::Create("test2", KT_RSA); + identity_ecdsa1_ = SSLIdentity::Create("test3", KT_ECDSA); + identity_ecdsa2_ = SSLIdentity::Create("test4", KT_ECDSA); ASSERT_TRUE(identity_rsa1_); ASSERT_TRUE(identity_rsa2_); ASSERT_TRUE(identity_ecdsa1_); ASSERT_TRUE(identity_ecdsa2_); - test_cert_ = rtc::SSLCertificate::FromPEMString(kTestCertificate); + test_cert_ = SSLCertificate::FromPEMString(kTestCertificate); ASSERT_TRUE(test_cert_); } @@ -216,70 +231,60 @@ class SSLIdentityTest : public ::testing::Test { ASSERT_TRUE(identity_rsa1_->certificate().GetSignatureDigestAlgorithm( &digest_algorithm)); - ASSERT_EQ(rtc::DIGEST_SHA_256, digest_algorithm); + ASSERT_EQ(DIGEST_SHA_256, digest_algorithm); ASSERT_TRUE(identity_rsa2_->certificate().GetSignatureDigestAlgorithm( &digest_algorithm)); - ASSERT_EQ(rtc::DIGEST_SHA_256, digest_algorithm); + ASSERT_EQ(DIGEST_SHA_256, digest_algorithm); ASSERT_TRUE(identity_ecdsa1_->certificate().GetSignatureDigestAlgorithm( &digest_algorithm)); - ASSERT_EQ(rtc::DIGEST_SHA_256, digest_algorithm); + ASSERT_EQ(DIGEST_SHA_256, digest_algorithm); ASSERT_TRUE(identity_ecdsa2_->certificate().GetSignatureDigestAlgorithm( &digest_algorithm)); - ASSERT_EQ(rtc::DIGEST_SHA_256, digest_algorithm); + ASSERT_EQ(DIGEST_SHA_256, digest_algorithm); // The test certificate has an MD5-based signature. ASSERT_TRUE(test_cert_->GetSignatureDigestAlgorithm(&digest_algorithm)); - ASSERT_EQ(rtc::DIGEST_MD5, digest_algorithm); + ASSERT_EQ(DIGEST_MD5, digest_algorithm); } - typedef unsigned char DigestType[rtc::MessageDigest::kMaxSize]; - - void TestDigestHelper(DigestType digest, + void TestDigestHelper(Buffer& digest, const SSLIdentity* identity, absl::string_view algorithm, size_t expected_len) { - DigestType digest1; - size_t digest_len; - bool rv; - - memset(digest, 0, expected_len); - rv = identity->certificate().ComputeDigest(algorithm, digest, - sizeof(DigestType), &digest_len); - EXPECT_TRUE(rv); - EXPECT_EQ(expected_len, digest_len); + digest.EnsureCapacity(expected_len); + digest.Clear(); + EXPECT_TRUE(identity->certificate().ComputeDigest(algorithm, digest)); + EXPECT_EQ(expected_len, digest.size()); // Repeat digest computation for the identity as a sanity check. - memset(digest1, 0xff, expected_len); - rv = identity->certificate().ComputeDigest(algorithm, digest1, - sizeof(DigestType), &digest_len); - EXPECT_TRUE(rv); - EXPECT_EQ(expected_len, digest_len); + Buffer digest1(0, MessageDigest::kMaxSize); + std::memset(digest1.data(), 0xff, expected_len); + EXPECT_TRUE(identity->certificate().ComputeDigest(algorithm, digest1)); + EXPECT_EQ(expected_len, digest1.size()); - EXPECT_EQ(0, memcmp(digest, digest1, expected_len)); + EXPECT_EQ(digest, digest1); } void TestDigestForGeneratedCert(absl::string_view algorithm, size_t expected_len) { - DigestType digest[4]; + std::array digests; - ASSERT_TRUE(expected_len <= sizeof(DigestType)); - - TestDigestHelper(digest[0], identity_rsa1_.get(), algorithm, expected_len); - TestDigestHelper(digest[1], identity_rsa2_.get(), algorithm, expected_len); - TestDigestHelper(digest[2], identity_ecdsa1_.get(), algorithm, + TestDigestHelper(digests[0], identity_rsa1_.get(), algorithm, expected_len); + TestDigestHelper(digests[1], identity_rsa2_.get(), algorithm, expected_len); + TestDigestHelper(digests[2], identity_ecdsa1_.get(), algorithm, expected_len); - TestDigestHelper(digest[3], identity_ecdsa2_.get(), algorithm, + TestDigestHelper(digests[3], identity_ecdsa2_.get(), algorithm, expected_len); // Sanity check that all four digests are unique. This could theoretically // fail, since cryptographic hash collisions have a non-zero probability. - for (int i = 0; i < 4; i++) { - for (int j = 0; j < 4; j++) { + for (size_t i = 0; i < digests.size(); i++) { + for (size_t j = 0; j < digests.size(); j++) { if (i != j) - EXPECT_NE(0, memcmp(digest[i], digest[j], expected_len)); + EXPECT_NE(digests[i], digests[j]); } } } @@ -287,17 +292,13 @@ class SSLIdentityTest : public ::testing::Test { void TestDigestForFixedCert(absl::string_view algorithm, size_t expected_len, const unsigned char* expected_digest) { - bool rv; - DigestType digest; - size_t digest_len; + Buffer digest(0, MessageDigest::kMaxSize); - ASSERT_TRUE(expected_len <= sizeof(DigestType)); + ASSERT_TRUE(expected_len <= digest.capacity()); - rv = test_cert_->ComputeDigest(algorithm, digest, sizeof(digest), - &digest_len); - EXPECT_TRUE(rv); - EXPECT_EQ(expected_len, digest_len); - EXPECT_EQ(0, memcmp(digest, expected_digest, expected_len)); + EXPECT_TRUE(test_cert_->ComputeDigest(algorithm, digest)); + EXPECT_EQ(expected_len, digest.size()); + EXPECT_EQ(0, memcmp(digest.data(), expected_digest, expected_len)); } void TestCloningIdentity(const SSLIdentity& identity) { @@ -336,45 +337,45 @@ class SSLIdentityTest : public ::testing::Test { std::unique_ptr identity_rsa2_; std::unique_ptr identity_ecdsa1_; std::unique_ptr identity_ecdsa2_; - std::unique_ptr test_cert_; + std::unique_ptr test_cert_; }; TEST_F(SSLIdentityTest, FixedDigestSHA1) { - TestDigestForFixedCert(rtc::DIGEST_SHA_1, 20, kTestCertSha1); + TestDigestForFixedCert(DIGEST_SHA_1, SHA_DIGEST_LENGTH, kTestCertSha1); } // HASH_AlgSHA224 is not supported in the chromium linux build. TEST_F(SSLIdentityTest, FixedDigestSHA224) { - TestDigestForFixedCert(rtc::DIGEST_SHA_224, 28, kTestCertSha224); + TestDigestForFixedCert(DIGEST_SHA_224, SHA224_DIGEST_LENGTH, kTestCertSha224); } TEST_F(SSLIdentityTest, FixedDigestSHA256) { - TestDigestForFixedCert(rtc::DIGEST_SHA_256, 32, kTestCertSha256); + TestDigestForFixedCert(DIGEST_SHA_256, SHA256_DIGEST_LENGTH, kTestCertSha256); } TEST_F(SSLIdentityTest, FixedDigestSHA384) { - TestDigestForFixedCert(rtc::DIGEST_SHA_384, 48, kTestCertSha384); + TestDigestForFixedCert(DIGEST_SHA_384, SHA384_DIGEST_LENGTH, kTestCertSha384); } TEST_F(SSLIdentityTest, FixedDigestSHA512) { - TestDigestForFixedCert(rtc::DIGEST_SHA_512, 64, kTestCertSha512); + TestDigestForFixedCert(DIGEST_SHA_512, SHA512_DIGEST_LENGTH, kTestCertSha512); } // HASH_AlgSHA224 is not supported in the chromium linux build. TEST_F(SSLIdentityTest, DigestSHA224) { - TestDigestForGeneratedCert(rtc::DIGEST_SHA_224, 28); + TestDigestForGeneratedCert(DIGEST_SHA_224, SHA224_DIGEST_LENGTH); } TEST_F(SSLIdentityTest, DigestSHA256) { - TestDigestForGeneratedCert(rtc::DIGEST_SHA_256, 32); + TestDigestForGeneratedCert(DIGEST_SHA_256, SHA256_DIGEST_LENGTH); } TEST_F(SSLIdentityTest, DigestSHA384) { - TestDigestForGeneratedCert(rtc::DIGEST_SHA_384, 48); + TestDigestForGeneratedCert(DIGEST_SHA_384, SHA384_DIGEST_LENGTH); } TEST_F(SSLIdentityTest, DigestSHA512) { - TestDigestForGeneratedCert(rtc::DIGEST_SHA_512, 64); + TestDigestForGeneratedCert(DIGEST_SHA_512, SHA512_DIGEST_LENGTH); } TEST_F(SSLIdentityTest, IdentityComparison) { @@ -452,8 +453,9 @@ TEST_F(SSLIdentityTest, GetSignatureDigestAlgorithm) { TEST_F(SSLIdentityTest, SSLCertificateGetStatsRSA) { std::unique_ptr identity( SSLIdentity::CreateFromPEMStrings(kRSA_PRIVATE_KEY_PEM, kRSA_CERT_PEM)); - std::unique_ptr stats = + std::unique_ptr stats = identity->certificate().GetStats(); + ASSERT_TRUE(stats); EXPECT_EQ(stats->fingerprint, kRSA_FINGERPRINT); EXPECT_EQ(stats->fingerprint_algorithm, kRSA_FINGERPRINT_ALGORITHM); EXPECT_EQ(stats->base64_certificate, kRSA_BASE64_CERTIFICATE); @@ -463,8 +465,9 @@ TEST_F(SSLIdentityTest, SSLCertificateGetStatsRSA) { TEST_F(SSLIdentityTest, SSLCertificateGetStatsECDSA) { std::unique_ptr identity(SSLIdentity::CreateFromPEMStrings( kECDSA_PRIVATE_KEY_PEM, kECDSA_CERT_PEM)); - std::unique_ptr stats = + std::unique_ptr stats = identity->certificate().GetStats(); + ASSERT_TRUE(stats); EXPECT_EQ(stats->fingerprint, kECDSA_FINGERPRINT); EXPECT_EQ(stats->fingerprint_algorithm, kECDSA_FINGERPRINT_ALGORITHM); EXPECT_EQ(stats->base64_certificate, kECDSA_BASE64_CERTIFICATE); @@ -482,9 +485,9 @@ TEST_F(SSLIdentityTest, SSLCertificateGetStatsWithChain) { EXPECT_EQ(info.pems.size(), info.ders.size()); EXPECT_EQ(info.fingerprints.size(), info.ders.size()); - std::unique_ptr first_stats = + std::unique_ptr first_stats = info.identity->cert_chain().GetStats(); - rtc::SSLCertificateStats* cert_stats = first_stats.get(); + SSLCertificateStats* cert_stats = first_stats.get(); for (size_t i = 0; i < info.ders.size(); ++i) { EXPECT_EQ(cert_stats->fingerprint, info.fingerprints[i]); EXPECT_EQ(cert_stats->fingerprint_algorithm, "sha-1"); @@ -498,11 +501,11 @@ class SSLIdentityExpirationTest : public ::testing::Test { public: SSLIdentityExpirationTest() { // Set use of the test RNG to get deterministic expiration timestamp. - rtc::SetRandomTestMode(true); + SetRandomTestMode(true); } ~SSLIdentityExpirationTest() override { // Put it back for the next test. - rtc::SetRandomTestMode(false); + SetRandomTestMode(false); } void TestASN1TimeToSec() { @@ -566,14 +569,14 @@ class SSLIdentityExpirationTest : public ::testing::Test { // clang-format off }; - unsigned char buf[20]; + unsigned char buf[EVP_MAX_MD_SIZE]; // Run all examples and check for the expected result. for (const auto& entry : data) { size_t length = strlen(entry.string); memcpy(buf, entry.string, length); // Copy the ASN1 string... - buf[length] = rtc::CreateRandomId(); // ...and terminate it with junk. - int64_t res = rtc::ASN1TimeToSec(buf, length, entry.long_format); + buf[length] = CreateRandomId(); // ...and terminate it with junk. + int64_t res = ASN1TimeToSec(buf, length, entry.long_format); RTC_LOG(LS_VERBOSE) << entry.string; ASSERT_EQ(entry.want, res); } @@ -581,8 +584,8 @@ class SSLIdentityExpirationTest : public ::testing::Test { for (const auto& entry : data) { size_t length = strlen(entry.string); memcpy(buf, entry.string, length); // Copy the ASN1 string... - buf[length] = rtc::CreateRandomId(); // ...and terminate it with junk. - int64_t res = rtc::ASN1TimeToSec(buf, length - 1, entry.long_format); + buf[length] = CreateRandomId(); // ...and terminate it with junk. + int64_t res = ASN1TimeToSec(buf, length - 1, entry.long_format); RTC_LOG(LS_VERBOSE) << entry.string; ASSERT_EQ(-1, res); } @@ -596,10 +599,10 @@ class SSLIdentityExpirationTest : public ::testing::Test { // we hit time offset limitations in OpenSSL on some 32-bit systems. time_t time_before_generation = time(nullptr); time_t lifetime = - rtc::CreateRandomId() % (0x80000000 - time_before_generation); - rtc::KeyParams key_params = rtc::KeyParams::ECDSA(rtc::EC_NIST_P256); + CreateRandomId() % (0x80000000 - time_before_generation); + KeyParams key_params = KeyParams::ECDSA(EC_NIST_P256); auto identity = - rtc::SSLIdentity::Create("", key_params, lifetime); + SSLIdentity::Create("", key_params, lifetime); time_t time_after_generation = time(nullptr); EXPECT_LE(time_before_generation + lifetime, identity->certificate().CertificateExpirationTime()); @@ -616,3 +619,6 @@ TEST_F(SSLIdentityExpirationTest, TestASN1TimeToSec) { TEST_F(SSLIdentityExpirationTest, TestExpireTime) { TestExpireTime(500); } + +} // namespace +} // namespace webrtc diff --git a/rtc_base/ssl_stream_adapter.cc b/rtc_base/ssl_stream_adapter.cc index 931d0bf0b6..4f74d87274 100644 --- a/rtc_base/ssl_stream_adapter.cc +++ b/rtc_base/ssl_stream_adapter.cc @@ -10,16 +10,23 @@ #include "rtc_base/ssl_stream_adapter.h" -#include "absl/memory/memory.h" +#include +#include +#include +#include +#include + +#include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/field_trials_view.h" #include "rtc_base/openssl_stream_adapter.h" +#include "rtc_base/ssl_identity.h" +#include "rtc_base/stream.h" -/////////////////////////////////////////////////////////////////////////////// - -namespace rtc { +namespace webrtc { -// TODO(guoweis): Move this to SDP layer and use int form internally. -// webrtc:5043. +// Deprecated, prefer SrtpCryptoSuiteToName. const char kCsAesCm128HmacSha1_80[] = "AES_CM_128_HMAC_SHA1_80"; const char kCsAesCm128HmacSha1_32[] = "AES_CM_128_HMAC_SHA1_32"; const char kCsAeadAes128Gcm[] = "AEAD_AES_128_GCM"; @@ -27,31 +34,19 @@ const char kCsAeadAes256Gcm[] = "AEAD_AES_256_GCM"; std::string SrtpCryptoSuiteToName(int crypto_suite) { switch (crypto_suite) { - case kSrtpAes128CmSha1_32: - return kCsAesCm128HmacSha1_32; case kSrtpAes128CmSha1_80: - return kCsAesCm128HmacSha1_80; + return "AES_CM_128_HMAC_SHA1_80"; + case kSrtpAes128CmSha1_32: + return "AES_CM_128_HMAC_SHA1_32"; case kSrtpAeadAes128Gcm: - return kCsAeadAes128Gcm; + return "AEAD_AES_128_GCM"; case kSrtpAeadAes256Gcm: - return kCsAeadAes256Gcm; + return "AEAD_AES_256_GCM"; default: return std::string(); } } -int SrtpCryptoSuiteFromName(absl::string_view crypto_suite) { - if (crypto_suite == kCsAesCm128HmacSha1_32) - return kSrtpAes128CmSha1_32; - if (crypto_suite == kCsAesCm128HmacSha1_80) - return kSrtpAes128CmSha1_80; - if (crypto_suite == kCsAeadAes128Gcm) - return kSrtpAeadAes128Gcm; - if (crypto_suite == kCsAeadAes256Gcm) - return kSrtpAeadAes256Gcm; - return kSrtpInvalidCryptoSuite; -} - bool GetSrtpKeyAndSaltLengths(int crypto_suite, int* key_length, int* salt_length) { @@ -86,37 +81,12 @@ bool IsGcmCryptoSuite(int crypto_suite) { crypto_suite == kSrtpAeadAes128Gcm); } -bool IsGcmCryptoSuiteName(absl::string_view crypto_suite) { - return (crypto_suite == kCsAeadAes256Gcm || crypto_suite == kCsAeadAes128Gcm); -} - std::unique_ptr SSLStreamAdapter::Create( std::unique_ptr stream, - absl::AnyInvocable handshake_error) { - return std::make_unique(std::move(stream), - std::move(handshake_error)); -} - -bool SSLStreamAdapter::GetSslCipherSuite(int* cipher_suite) { - return false; -} - -bool SSLStreamAdapter::ExportKeyingMaterial(absl::string_view label, - const uint8_t* context, - size_t context_len, - bool use_context, - uint8_t* result, - size_t result_len) { - return false; // Default is unsupported -} - -bool SSLStreamAdapter::SetDtlsSrtpCryptoSuites( - const std::vector& crypto_suites) { - return false; -} - -bool SSLStreamAdapter::GetDtlsSrtpCryptoSuite(int* crypto_suite) { - return false; + absl::AnyInvocable handshake_error, + const FieldTrialsView* field_trials) { + return std::make_unique( + std::move(stream), std::move(handshake_error), field_trials); } bool SSLStreamAdapter::IsBoringSsl() { @@ -129,8 +99,19 @@ bool SSLStreamAdapter::IsAcceptableCipher(absl::string_view cipher, KeyType key_type) { return OpenSSLStreamAdapter::IsAcceptableCipher(cipher, key_type); } -std::string SSLStreamAdapter::SslCipherSuiteToName(int cipher_suite) { - return OpenSSLStreamAdapter::SslCipherSuiteToName(cipher_suite); + +// Default shim for backward compat. +bool SSLStreamAdapter::SetPeerCertificateDigest( + absl::string_view digest_alg, + const unsigned char* digest_val, + size_t digest_len, + SSLPeerCertificateDigestError* error) { + unsigned char* nonconst_val = const_cast(digest_val); + SSLPeerCertificateDigestError ret = SetPeerCertificateDigest( + digest_alg, ArrayView(nonconst_val, digest_len)); + if (error) + *error = ret; + return ret == SSLPeerCertificateDigestError::NONE; } /////////////////////////////////////////////////////////////////////////////// @@ -141,6 +122,10 @@ void SSLStreamAdapter::EnableTimeCallbackForTesting() { OpenSSLStreamAdapter::EnableTimeCallbackForTesting(); } +SSLProtocolVersion SSLStreamAdapter::GetMaxSupportedDTLSProtocolVersion() { + return OpenSSLStreamAdapter::GetMaxSupportedDTLSProtocolVersion(); +} + /////////////////////////////////////////////////////////////////////////////// -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/ssl_stream_adapter.h b/rtc_base/ssl_stream_adapter.h index d8b66f11e8..bc62f8eacb 100644 --- a/rtc_base/ssl_stream_adapter.h +++ b/rtc_base/ssl_stream_adapter.h @@ -15,17 +15,20 @@ #include #include +#include #include #include #include "absl/functional/any_invocable.h" -#include "absl/memory/memory.h" #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/field_trials_view.h" +#include "rtc_base/buffer.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/stream.h" -namespace rtc { +namespace webrtc { // Constants for SSL profile. constexpr int kTlsNullWithNullNull = 0; @@ -39,6 +42,10 @@ constexpr int kSrtpAeadAes128Gcm = 0x0007; constexpr int kSrtpAeadAes256Gcm = 0x0008; constexpr int kSrtpCryptoSuiteMaxValue = 0xFFFF; +// Constants for SSL signature algorithms. +constexpr int kSslSignatureAlgorithmUnknown = 0; +constexpr int kSslSignatureAlgorithmMaxValue = 0xFFFF; + // Names of SRTP profiles listed above. // 128-bit AES with 80-bit SHA-1 HMAC. extern const char kCsAesCm128HmacSha1_80[]; @@ -54,9 +61,6 @@ extern const char kCsAeadAes256Gcm[]; // name, as defined in https://tools.ietf.org/html/rfc5764#section-4.1.2. std::string SrtpCryptoSuiteToName(int crypto_suite); -// The reverse of above conversion. -int SrtpCryptoSuiteFromName(absl::string_view crypto_suite); - // Get key length and salt length for given crypto suite. Returns true for // valid suites, otherwise false. bool GetSrtpKeyAndSaltLengths(int crypto_suite, @@ -66,9 +70,6 @@ bool GetSrtpKeyAndSaltLengths(int crypto_suite, // Returns true if the given crypto suite id uses a GCM cipher. bool IsGcmCryptoSuite(int crypto_suite); -// Returns true if the given crypto suite name uses a GCM cipher. -bool IsGcmCryptoSuiteName(absl::string_view crypto_suite); - // SSLStreamAdapter : A StreamInterfaceAdapter that does SSL/TLS. // After SSL has been started, the stream will only open on successful // SSL verification of certificates, and the communication is @@ -86,19 +87,23 @@ bool IsGcmCryptoSuiteName(absl::string_view crypto_suite); enum SSLRole { SSL_CLIENT, SSL_SERVER }; enum SSLMode { SSL_MODE_TLS, SSL_MODE_DTLS }; -// Note: TLS_10, TLS_11, and DTLS_10 will all be ignored, and only DTLS1_2 will -// be accepted unless the trial flag WebRTC-LegacyTlsProtocols/Enabled/ is -// passed in or an explicit override is used. Support for the legacy protocol -// versions will be completely removed in the future. -// See https://bugs.webrtc.org/10261. +// TODO bugs.webrtc.org/40644300 remove unused legacy constants. enum SSLProtocolVersion { SSL_PROTOCOL_NOT_GIVEN = -1, - SSL_PROTOCOL_TLS_10 = 0, - SSL_PROTOCOL_TLS_11, - SSL_PROTOCOL_TLS_12, - SSL_PROTOCOL_DTLS_10 = SSL_PROTOCOL_TLS_11, + SSL_PROTOCOL_TLS_10 = 0, // Deprecated and no longer supported. + SSL_PROTOCOL_TLS_11 = 1, // Deprecated and no longer supported. + SSL_PROTOCOL_TLS_12 = 2, + SSL_PROTOCOL_TLS_13 = 3, + SSL_PROTOCOL_DTLS_10 = 1, // Deprecated and no longer supported. SSL_PROTOCOL_DTLS_12 = SSL_PROTOCOL_TLS_12, + SSL_PROTOCOL_DTLS_13 = SSL_PROTOCOL_TLS_13, }; + +// Versions returned from BoringSSL. +const uint16_t kDtls10VersionBytes = 0xfeff; +const uint16_t kDtls12VersionBytes = 0xfefd; +const uint16_t kDtls13VersionBytes = 0xfefc; + enum class SSLPeerCertificateDigestError { NONE, UNKNOWN_ALGORITHM, @@ -119,7 +124,9 @@ class SSLStreamAdapter : public StreamInterface { // Caller is responsible for freeing the returned object. static std::unique_ptr Create( std::unique_ptr stream, - absl::AnyInvocable handshake_error = nullptr); + absl::AnyInvocable handshake_error = + nullptr, + const FieldTrialsView* field_trials = nullptr); SSLStreamAdapter() = default; ~SSLStreamAdapter() override = default; @@ -136,8 +143,8 @@ class SSLStreamAdapter : public StreamInterface { // TODO(ekr@rtfm.com): rename this SetRole to reflect its new function virtual void SetServerRole(SSLRole role = SSL_SERVER) = 0; - // Do DTLS or TLS. - virtual void SetMode(SSLMode mode) = 0; + [[deprecated("Only DTLS is supported by the stream adapter")]] virtual void + SetMode(SSLMode mode) = 0; // Set maximum supported protocol version. The highest version supported by // both ends will be used for the connection, i.e. if one party supports @@ -152,6 +159,9 @@ class SSLStreamAdapter : public StreamInterface { // This should only be called before StartSSL(). virtual void SetInitialRetransmissionTimeout(int timeout_ms) = 0; + // Set MTU to be used for next handshake flight. + virtual void SetMTU(int mtu) = 0; + // StartSSL starts negotiation with a peer, whose certificate is verified // using the certificate digest. Generally, SetIdentity() and possibly // SetServerRole() should have been called before this. @@ -176,13 +186,16 @@ class SSLStreamAdapter : public StreamInterface { // channel (such as the signaling channel). This must specify the terminal // certificate, not just a CA. SSLStream makes a copy of the digest value. // - // Returns true if successful. - // `error` is optional and provides more information about the failure. - virtual bool SetPeerCertificateDigest( + // Returns SSLPeerCertificateDigestError::NONE if successful. + virtual SSLPeerCertificateDigestError SetPeerCertificateDigest( absl::string_view digest_alg, - const unsigned char* digest_val, - size_t digest_len, - SSLPeerCertificateDigestError* error = nullptr) = 0; + ArrayView digest_val) = 0; + [[deprecated( + "Use SetPeerCertificateDigest with ArrayView instead")]] virtual bool + SetPeerCertificateDigest(absl::string_view digest_alg, + const unsigned char* digest_val, + size_t digest_len, + SSLPeerCertificateDigestError* error = nullptr); // Retrieves the peer's certificate chain including leaf certificate, if a // connection has been established. @@ -190,37 +203,30 @@ class SSLStreamAdapter : public StreamInterface { // Retrieves the IANA registration id of the cipher suite used for the // connection (e.g. 0x2F for "TLS_RSA_WITH_AES_128_CBC_SHA"). - virtual bool GetSslCipherSuite(int* cipher_suite); + virtual bool GetSslCipherSuite(int* cipher_suite) const = 0; + // Returns the name of the cipher suite used for the DTLS transport, + // as defined in the "Description" column of the IANA cipher suite registry. + virtual std::optional GetTlsCipherSuiteName() const = 0; // Retrieves the enum value for SSL version. // Will return -1 until the version has been negotiated. - virtual SSLProtocolVersion GetSslVersion() const = 0; + [[deprecated("Use GetSslVersionBytes")]] virtual SSLProtocolVersion + GetSslVersion() const = 0; // Retrieves the 2-byte version from the TLS protocol. // Will return false until the version has been negotiated. virtual bool GetSslVersionBytes(int* version) const = 0; // Key Exporter interface from RFC 5705 - // Arguments are: - // label -- the exporter label. - // part of the RFC defining each exporter - // usage (IN) - // context/context_len -- a context to bind to for this connection; - // optional, can be null, 0 (IN) - // use_context -- whether to use the context value - // (needed to distinguish no context from - // zero-length ones). - // result -- where to put the computed value - // result_len -- the length of the computed value - virtual bool ExportKeyingMaterial(absl::string_view label, - const uint8_t* context, - size_t context_len, - bool use_context, - uint8_t* result, - size_t result_len); + virtual bool ExportSrtpKeyingMaterial( + ZeroOnFreeBuffer& keying_material) = 0; + + // Returns the signature algorithm or 0 if not applicable. + virtual uint16_t GetPeerSignatureAlgorithm() const = 0; // DTLS-SRTP interface - virtual bool SetDtlsSrtpCryptoSuites(const std::vector& crypto_suites); - virtual bool GetDtlsSrtpCryptoSuite(int* crypto_suite); + virtual bool SetDtlsSrtpCryptoSuites( + const std::vector& crypto_suites) = 0; + virtual bool GetDtlsSrtpCryptoSuite(int* crypto_suite) const = 0; // Returns true if a TLS connection has been established. // The only difference between this and "GetState() == SE_OPEN" is that if @@ -238,11 +244,6 @@ class SSLStreamAdapter : public StreamInterface { static bool IsAcceptableCipher(int cipher, KeyType key_type); static bool IsAcceptableCipher(absl::string_view cipher, KeyType key_type); - // TODO(guoweis): Move this away from a static class method. Currently this is - // introduced such that any caller could depend on sslstreamadapter.h without - // depending on specific SSL implementation. - static std::string SslCipherSuiteToName(int cipher_suite); - //////////////////////////////////////////////////////////////////////////// // Testing only member functions //////////////////////////////////////////////////////////////////////////// @@ -251,6 +252,9 @@ class SSLStreamAdapter : public StreamInterface { // using a fake clock. static void EnableTimeCallbackForTesting(); + // Return max DTLS SSLProtocolVersion supported by implementation. + static SSLProtocolVersion GetMaxSupportedDTLSProtocolVersion(); + // Deprecated. Do not use this API outside of testing. // Do not set this to false outside of testing. void SetClientAuthEnabledForTesting(bool enabled) { @@ -262,6 +266,14 @@ class SSLStreamAdapter : public StreamInterface { // authentication. bool GetClientAuthEnabled() const { return client_auth_enabled_; } + // Return number of times DTLS retransmission has been triggered. + // Used for testing (and maybe put into stats?). + virtual int GetRetransmissionCount() const = 0; + + // Return the the ID of the group used by the adapters most recently + // completed handshake, or 0 if not applicable (e.g. before the handshake). + virtual uint16_t GetSslGroupIdForTesting() const = 0; + private: // If true (default), the client is required to provide a certificate during // handshake. If no certificate is given, handshake fails. This applies to @@ -269,6 +281,52 @@ class SSLStreamAdapter : public StreamInterface { bool client_auth_enabled_ = true; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::GetSrtpKeyAndSaltLengths; +using ::webrtc::IsGcmCryptoSuite; +using ::webrtc::kCsAeadAes128Gcm; +using ::webrtc::kCsAeadAes256Gcm; +using ::webrtc::kCsAesCm128HmacSha1_32; +using ::webrtc::kCsAesCm128HmacSha1_80; +using ::webrtc::kDtls10VersionBytes; +using ::webrtc::kDtls12VersionBytes; +using ::webrtc::kDtls13VersionBytes; +using ::webrtc::kSrtpAeadAes128Gcm; +using ::webrtc::kSrtpAeadAes256Gcm; +using ::webrtc::kSrtpAes128CmSha1_32; +using ::webrtc::kSrtpAes128CmSha1_80; +using ::webrtc::kSrtpCryptoSuiteMaxValue; +using ::webrtc::kSrtpInvalidCryptoSuite; +using ::webrtc::kSslCipherSuiteMaxValue; +using ::webrtc::kSslSignatureAlgorithmMaxValue; +using ::webrtc::kSslSignatureAlgorithmUnknown; +using ::webrtc::kTlsNullWithNullNull; +using ::webrtc::SrtpCryptoSuiteToName; +using ::webrtc::SSE_MSG_TRUNC; +using ::webrtc::SSL_CLIENT; +using ::webrtc::SSL_MODE_DTLS; +using ::webrtc::SSL_MODE_TLS; +using ::webrtc::SSL_PROTOCOL_DTLS_10; +using ::webrtc::SSL_PROTOCOL_DTLS_12; +using ::webrtc::SSL_PROTOCOL_DTLS_13; +using ::webrtc::SSL_PROTOCOL_NOT_GIVEN; +using ::webrtc::SSL_PROTOCOL_TLS_10; +using ::webrtc::SSL_PROTOCOL_TLS_11; +using ::webrtc::SSL_PROTOCOL_TLS_12; +using ::webrtc::SSL_PROTOCOL_TLS_13; +using ::webrtc::SSL_SERVER; +using ::webrtc::SSLHandshakeError; +using ::webrtc::SSLMode; +using ::webrtc::SSLPeerCertificateDigestError; +using ::webrtc::SSLProtocolVersion; +using ::webrtc::SSLRole; +using ::webrtc::SSLStreamAdapter; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SSL_STREAM_ADAPTER_H_ diff --git a/rtc_base/ssl_stream_adapter_unittest.cc b/rtc_base/ssl_stream_adapter_unittest.cc index 8417314a3a..8334934b06 100644 --- a/rtc_base/ssl_stream_adapter_unittest.cc +++ b/rtc_base/ssl_stream_adapter_unittest.cc @@ -10,206 +10,314 @@ #include "rtc_base/ssl_stream_adapter.h" +#ifdef OPENSSL_IS_BORINGSSL +#include +#else +#include // IWYU pragma: keep +#endif +#include +#include + #include +#include +#include +#include +#include #include #include #include +#include +#include +#include #include "absl/memory/memory.h" #include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" +#include "api/test/rtc_error_matchers.h" +#include "api/units/time_delta.h" +#include "rtc_base/buffer.h" #include "rtc_base/buffer_queue.h" +#include "rtc_base/callback_list.h" #include "rtc_base/checks.h" -#include "rtc_base/gunit.h" -#include "rtc_base/helpers.h" -#include "rtc_base/memory/fifo_buffer.h" -#include "rtc_base/memory_stream.h" +#include "rtc_base/crypto_random.h" +#include "rtc_base/fake_clock.h" +#include "rtc_base/logging.h" #include "rtc_base/message_digest.h" -#include "rtc_base/openssl_stream_adapter.h" -#include "rtc_base/ssl_adapter.h" +#include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/stream.h" -#include "test/field_trial.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/time_utils.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/scoped_key_value_config.h" +#include "test/wait_until.h" using ::testing::Combine; +using ::testing::NotNull; using ::testing::tuple; using ::testing::Values; using ::testing::WithParamInterface; using ::webrtc::SafeTask; -static const int kBlockSize = 4096; -static const char kExporterLabel[] = "label"; -static const unsigned char kExporterContext[] = "context"; -static int kExporterContextLen = sizeof(kExporterContext); - -// A private key used for testing, broken into pieces in order to avoid -// issues with Git's checks for private keys in repos. -#define RSA_PRIVATE_KEY_HEADER "-----BEGIN RSA PRIVATE KEY-----\n" - -static const char kRSA_PRIVATE_KEY_PEM[] = RSA_PRIVATE_KEY_HEADER - "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAMYRkbhmI7kVA/rM\n" - "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n" - "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n" - "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAECgYAvgOs4FJcgvp+TuREx7YtiYVsH\n" - "mwQPTum2z/8VzWGwR8BBHBvIpVe1MbD/Y4seyI2aco/7UaisatSgJhsU46/9Y4fq\n" - "2TwXH9QANf4at4d9n/R6rzwpAJOpgwZgKvdQjkfrKTtgLV+/dawvpxUYkRH4JZM1\n" - "CVGukMfKNrSVH4Ap4QJBAOJmGV1ASPnB4r4nc99at7JuIJmd7fmuVUwUgYi4XgaR\n" - "WhScBsgYwZ/JoywdyZJgnbcrTDuVcWG56B3vXbhdpMsCQQDf9zeJrjnPZ3Cqm79y\n" - "kdqANep0uwZciiNiWxsQrCHztywOvbFhdp8iYVFG9EK8DMY41Y5TxUwsHD+67zao\n" - "ZNqJAkEA1suLUP/GvL8IwuRneQd2tWDqqRQ/Td3qq03hP7e77XtF/buya3Ghclo5\n" - "54czUR89QyVfJEC6278nzA7n2h1uVQJAcG6mztNL6ja/dKZjYZye2CY44QjSlLo0\n" - "MTgTSjdfg/28fFn2Jjtqf9Pi/X+50LWI/RcYMC2no606wRk9kyOuIQJBAK6VSAim\n" - "1pOEjsYQn0X5KEIrz1G3bfCbB848Ime3U2/FWlCHMr6ch8kCZ5d1WUeJD3LbwMNG\n" - "UCXiYxSsu20QNVw=\n" +// Generated using `openssl genrsa -out key.pem 2048` +static const char kRSA_PRIVATE_KEY_PEM[] = + "-----BEGIN RSA PRI" // Linebreak to avoid detection of private + "VATE KEY-----\n" // keys by linters. + "MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC4XOJ6agj673j+\n" + "O8sEnPmhVkjDOd858shAa07kVdeRePlE+wU4GUTY0i5JdXF8cUQLTSdKfqsR7f8L\n" + "jtxhehZk7+OQs5P1VsSQeotr2L0WFBNQZ+cSswLBHt4DjG9vyDJMELwPYkLO/EZw\n" + "Q1HBgrSSHUHE9mRak2JQzxEqdnj2ssUs+K9kTkYLnzq86dMRGc+TA4TiVA4U065M\n" + "lwSe95QMJ5OqYBwbNsVF6BTvdnkkNyizunfoGWB8m9gqYIdlmo3uT21OEnF40Pei\n" + "K5CjvB29IpO6cPmNDR7+vwCy/IeGkXwzvICq/ZrocFNBR5Z4tSm003HX6BbIHtnj\n" + "tvxVaIeFAgMBAAECggEADxQ3yOPh0qZiCsc4smqlZzr/rgoOdjajhtNQC1BzFnii\n" + "yK/QTDeS4DoGo6b5roA0HMmFcGweUVPaM6eOYmGiMcTGI9hwPlWHs7p2K065nnPr\n" + "ZXzuEyM1kzaTWY5zsdyZsot+2jJC/Rt4pmd3KSDn5HiEn9e4OwlJdgsNoB+7ApBW\n" + "G8UmI9IUYic+xgS0IADJIYFx99bVmjLi7zshQAHVemn15v9GcBTCA7uojxX+FLmR\n" + "i8nuqUcTqGemE6PaQiX9MahgHU7NJ/gLs9dEeX4tD+8KVkrH/RRbg43eEATkRo8D\n" + "bO3JZ6MBwVNL6BU4hr+BViXEkHqBa9adoImIWHaLGQKBgQC4zlmHrDm9Ftb6fgsc\n" + "KXbEphPF/fuw4FJrPXP+0kRvF8AGbGqesBksX/JJCo46jfehNNGHmKFZ7oKMsHbS\n" + "yZp1/YZlg020ZLJkJz4GGPF1HgaxdV1L6TvIlofKWKKUEyi3RpMhq6w8hb/+mz/C\n" + "KverTah0EkZjZWwSZa4lQjwCaQKBgQD/YtL6WXiduF94pfVz7MmEoBa00C0rPFaC\n" + "5TOMVH+W2RbcGyVoPoLmwf1H2lN9v+wzaTRaPeHWs5MwQ4HDUbACXtGQ+I+6VNvo\n" + "iEo23jIK0hYzFgRGSMK7E0Uj8oBuPdJjkpCM4qqr0p8UHrktUOD8kB3DjdJrbqLm\n" + "q+9qAWzAvQKBgQCGR5EwDojphuXvnpPuA4bDvjSR4Uj3LRdVypI07o1A903UnQQf\n" + "h67S2mhOgDf1/d+XJ6yzTMi4cqAzH6lG4au03eDAc9aLI7unIAhmH8uaIJYWbUO7\n" + "+50v04iZEywWUZF9Ee+oQHfmhfyKQD3klJnew4+Jvxmb8T7EY1NUyTqXOQKBgQDM\n" + "EpsGZBJm7dqUXQE7Zh5NtWMPjz5YyzlSFXbQjwD5eHW04phMqY8OeDs9fG+1D3Te\n" + "TBYCemqJlytpqLf7bL4Z1szdbFHlkkO7l5S+LWWNkf0dS12VEDVTKf3Y0MHh1dLV\n" + "sFuDyOiaro5hlH9if7uY9kxiZGSdZmYTr5Z7fbH6fQKBgF+NKzivaJKz0a7ZCFhR\n" + "UfjvWrldeRzvyOiq+6nohTy3WNUZ+jSjwXZ7B4HGbHeaTBbsaNeO7aPGNe+Rt3Sr\n" + "rj6EzpBKk60ukkg49c+X/Rski/RmRosovJv4YUHtafafjAzeMhfU/tdKvjM00p9x\n" + "yf5MmWCNPsPfGsRZJpnYGvg3\n" "-----END RSA PRIVATE KEY-----\n"; - #undef RSA_PRIVATE_KEY_HEADER +// Generated using +// `openssl req -new -x509 -key key.pem -out cert.pem -days 365` +// after setting the machine date to something that will ensure the +// certificate is expired. static const char kCERT_PEM[] = "-----BEGIN CERTIFICATE-----\n" - "MIIBmTCCAQKgAwIBAgIEbzBSAjANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDEwZX\n" - "ZWJSVEMwHhcNMTQwMTAyMTgyNDQ3WhcNMTQwMjAxMTgyNDQ3WjARMQ8wDQYDVQQD\n" - "EwZXZWJSVEMwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMYRkbhmI7kVA/rM\n" - "czsZ+6JDhDvnkF+vn6yCAGuRPV03zuRqZtDy4N4to7PZu9PjqrRl7nDMXrG3YG9y\n" - "rlIAZ72KjcKKFAJxQyAKLCIdawKRyp8RdK3LEySWEZb0AV58IadqPZDTNHHRX8dz\n" - "5aTSMsbbkZ+C/OzTnbiMqLL/vg6jAgMBAAEwDQYJKoZIhvcNAQELBQADgYEAUflI\n" - "VUe5Krqf5RVa5C3u/UTAOAUJBiDS3VANTCLBxjuMsvqOG0WvaYWP3HYPgrz0jXK2\n" - "LJE/mGw3MyFHEqi81jh95J+ypl6xKW6Rm8jKLR87gUvCaVYn/Z4/P3AqcQTB7wOv\n" - "UD0A8qfhfDM+LK6rPAnCsVN0NRDY3jvd6rzix9M=\n" + "MIIDjTCCAnWgAwIBAgIUTkCy4o8+4W/86RYmgWc8FEhWTzYwDQYJKoZIhvcNAQEL\n" + "BQAwVjELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM\n" + "GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEPMA0GA1UEAwwGV2ViUlRDMB4XDTI0\n" + "MDkwMzAwNTk0NloXDTI1MDkwMzAwNTk0NlowVjELMAkGA1UEBhMCQVUxEzARBgNV\n" + "BAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0IFdpZGdpdHMgUHR5IEx0\n" + "ZDEPMA0GA1UEAwwGV2ViUlRDMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC\n" + "AQEAuFziemoI+u94/jvLBJz5oVZIwznfOfLIQGtO5FXXkXj5RPsFOBlE2NIuSXVx\n" + "fHFEC00nSn6rEe3/C47cYXoWZO/jkLOT9VbEkHqLa9i9FhQTUGfnErMCwR7eA4xv\n" + "b8gyTBC8D2JCzvxGcENRwYK0kh1BxPZkWpNiUM8RKnZ49rLFLPivZE5GC586vOnT\n" + "ERnPkwOE4lQOFNOuTJcEnveUDCeTqmAcGzbFRegU73Z5JDcos7p36BlgfJvYKmCH\n" + "ZZqN7k9tThJxeND3oiuQo7wdvSKTunD5jQ0e/r8AsvyHhpF8M7yAqv2a6HBTQUeW\n" + "eLUptNNx1+gWyB7Z47b8VWiHhQIDAQABo1MwUTAdBgNVHQ4EFgQUlZmkvo2n5ZEa\n" + "B/GCnl8SMQr8G04wHwYDVR0jBBgwFoAUlZmkvo2n5ZEaB/GCnl8SMQr8G04wDwYD\n" + "VR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAnHDEEEOdPaujj3jVWDnk\n" + "bxQYQXuymHr5oqIbGSNZaDiK1ZDwui6fywiUjQUgFipC4Gt3EvpEv8b/M9G4Kr3d\n" + "ET1loApfl6dMRyRym8HydsF4rWs/KmUMpHEcgQzz6ehsX5kqQtStdsAxtTE2QkoY\n" + "5YbQgTKQ0yrwsagKX8pWv0UmXQASJUa26h5H9YpNNfwHy5PZvQya0719qFd8r2EH\n" + "JW67EJElwG5qE2N8DStPUjvVsydfbJflvRBjnf9IRuY9rGogeIOTkkkHAOyNWj3V\n" + "3tZ0r8lKDpUSH6Z5fALuwfEQsWj1qZkZn2ysv1GzEJS2jhS/xPfzOqs8eLVi91lx\n" + "1A==\n" "-----END CERTIFICATE-----\n"; +// Google GTS CA 1C3 certificate. Obtained from https://www.webrtc.org static const char kIntCert1[] = "-----BEGIN CERTIFICATE-----\n" - "MIIEUjCCAjqgAwIBAgIBAjANBgkqhkiG9w0BAQsFADCBljELMAkGA1UEBhMCVVMx\n" - "EzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDU1vdW50YWluIFZpZXcxFDAS\n" - "BgNVBAoMC0dvb2dsZSwgSW5jMQwwCgYDVQQLDANHVFAxFzAVBgNVBAMMDnRlbGVw\n" - "aG9ueS5nb29nMR0wGwYJKoZIhvcNAQkBFg5ndHBAZ29vZ2xlLmNvbTAeFw0xNzA5\n" - "MjYwNDA5MDNaFw0yMDA2MjIwNDA5MDNaMGQxCzAJBgNVBAYTAlVTMQswCQYDVQQI\n" - "DAJDQTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEXMBUGA1UECgwOdGVsZXBob255\n" - "Lmdvb2cxFzAVBgNVBAMMDnRlbGVwaG9ueS5nb29nMIGfMA0GCSqGSIb3DQEBAQUA\n" - "A4GNADCBiQKBgQDJXWeeU1v1+wlqkVobzI3aN7Uh2iVQA9YCdq5suuabtiD/qoOD\n" - "NKpmQqsx7WZGGWSZTDFEBaUpvIK7Hb+nzRqk6iioPCFOFuarm6GxO1xVneImMuE6\n" - "tuWb3YZPr+ikChJbl11y5UcSbg0QsbeUc+jHl5umNvrL85Y+z8SP0rxbBwIDAQAB\n" - "o2AwXjAdBgNVHQ4EFgQU7tdZobqlN8R8V72FQnRxmqq8tKswHwYDVR0jBBgwFoAU\n" - "5GgKMUtcxkQ2dJrtNR5YOlIAPDswDwYDVR0TAQH/BAUwAwEB/zALBgNVHQ8EBAMC\n" - "AQYwDQYJKoZIhvcNAQELBQADggIBADObh9Z+z14FmP9zSenhFtq7hFnmNrSkklk8\n" - "eyYWXKfOuIriEQQBZsz76ZcnzStih8Rj+yQ0AXydk4fJ5LOwC2cUqQBar17g6Pd2\n" - "8g4SIL4azR9WvtiSvpuGlwp25b+yunaacDne6ebnf/MUiiKT5w61Xo3cEPVfl38e\n" - "/Up2l0bioid5enUTmg6LY6RxDO6tnZQkz3XD+nNSwT4ehtkqFpHYWjErj0BbkDM2\n" - "hiVc/JsYOZn3DmuOlHVHU6sKwqh3JEyvHO/d7DGzMGWHpHwv2mCTJq6l/sR95Tc2\n" - "GaQZgGDVNs9pdEouJCDm9e/PbQWRYhnat82PTkXx/6mDAAwdZlIi/pACzq8K4p7e\n" - "6hF0t8uKGnXJubHPXxlnJU6yxZ0yWmivAGjwWK4ur832gKlho4jeMDhiI/T3QPpl\n" - "iMNsIvxRhdD+GxJkQP1ezayw8s+Uc9KwKglrkBSRRDLCJUfPOvMmXLUDSTMX7kp4\n" - "/Ak1CA8dVLJIlfEjLBUuvAttlP7+7lsKNgxAjCxZkWLXIyGULzNPQwVWkGfCbrQs\n" - "XyMvSbFsSIb7blV7eLlmf9a+2RprUUkc2ALXLLCI9YQXmxm2beBfMyNmmebwBJzT\n" - "B0OR+5pFFNTJPoNlqpdrDsGrDu7JlUtk0ZLZzYyKXbgy2qXxfd4OWzXXjxpLMszZ\n" - "LDIpOAkj\n" + "MIIFljCCA36gAwIBAgINAgO8U1lrNMcY9QFQZjANBgkqhkiG9w0BAQsFADBHMQsw\n" + "CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU\n" + "MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMjAwODEzMDAwMDQyWhcNMjcwOTMwMDAw\n" + "MDQyWjBGMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp\n" + "Y2VzIExMQzETMBEGA1UEAxMKR1RTIENBIDFDMzCCASIwDQYJKoZIhvcNAQEBBQAD\n" + "ggEPADCCAQoCggEBAPWI3+dijB43+DdCkH9sh9D7ZYIl/ejLa6T/belaI+KZ9hzp\n" + "kgOZE3wJCor6QtZeViSqejOEH9Hpabu5dOxXTGZok3c3VVP+ORBNtzS7XyV3NzsX\n" + "lOo85Z3VvMO0Q+sup0fvsEQRY9i0QYXdQTBIkxu/t/bgRQIh4JZCF8/ZK2VWNAcm\n" + "BA2o/X3KLu/qSHw3TT8An4Pf73WELnlXXPxXbhqW//yMmqaZviXZf5YsBvcRKgKA\n" + "gOtjGDxQSYflispfGStZloEAoPtR28p3CwvJlk/vcEnHXG0g/Zm0tOLKLnf9LdwL\n" + "tmsTDIwZKxeWmLnwi/agJ7u2441Rj72ux5uxiZ0CAwEAAaOCAYAwggF8MA4GA1Ud\n" + "DwEB/wQEAwIBhjAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwEgYDVR0T\n" + "AQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUinR/r4XN7pXNPZzQ4kYU83E1HScwHwYD\n" + "VR0jBBgwFoAU5K8rJnEaK0gnhS9SZizv8IkTcT4waAYIKwYBBQUHAQEEXDBaMCYG\n" + "CCsGAQUFBzABhhpodHRwOi8vb2NzcC5wa2kuZ29vZy9ndHNyMTAwBggrBgEFBQcw\n" + "AoYkaHR0cDovL3BraS5nb29nL3JlcG8vY2VydHMvZ3RzcjEuZGVyMDQGA1UdHwQt\n" + "MCswKaAnoCWGI2h0dHA6Ly9jcmwucGtpLmdvb2cvZ3RzcjEvZ3RzcjEuY3JsMFcG\n" + "A1UdIARQME4wOAYKKwYBBAHWeQIFAzAqMCgGCCsGAQUFBwIBFhxodHRwczovL3Br\n" + "aS5nb29nL3JlcG9zaXRvcnkvMAgGBmeBDAECATAIBgZngQwBAgIwDQYJKoZIhvcN\n" + "AQELBQADggIBAIl9rCBcDDy+mqhXlRu0rvqrpXJxtDaV/d9AEQNMwkYUuxQkq/BQ\n" + "cSLbrcRuf8/xam/IgxvYzolfh2yHuKkMo5uhYpSTld9brmYZCwKWnvy15xBpPnrL\n" + "RklfRuFBsdeYTWU0AIAaP0+fbH9JAIFTQaSSIYKCGvGjRFsqUBITTcFTNvNCCK9U\n" + "+o53UxtkOCcXCb1YyRt8OS1b887U7ZfbFAO/CVMkH8IMBHmYJvJh8VNS/UKMG2Yr\n" + "PxWhu//2m+OBmgEGcYk1KCTd4b3rGS3hSMs9WYNRtHTGnXzGsYZbr8w0xNPM1IER\n" + "lQCh9BIiAfq0g3GvjLeMcySsN1PCAJA/Ef5c7TaUEDu9Ka7ixzpiO2xj2YC/WXGs\n" + "Yye5TBeg2vZzFb8q3o/zpWwygTMD0IZRcZk0upONXbVRWPeyk+gB9lm+cZv9TSjO\n" + "z23HFtz30dZGm6fKa+l3D/2gthsjgx0QGtkJAITgRNOidSOzNIb2ILCkXhAd4FJG\n" + "AJ2xDx8hcFH1mt0G/FX0Kw4zd8NLQsLxdxP8c4CU6x+7Nz/OAipmsHMdMqUybDKw\n" + "juDEI/9bfU1lcKwrmz3O2+BtjjKAvpafkmO8l7tdufThcV4q5O8DIrGKZTqPwJNl\n" + "1IXNDw9bg1kWRxYtnCQ6yICmJhSFm/Y3m6xv+cXDBlHz4n/FsRC6UfTd\n" "-----END CERTIFICATE-----\n"; +// Google GTS Root R1 certificate. Obtained from https://www.webrtc.org static const char kCACert[] = "-----BEGIN CERTIFICATE-----\n" - "MIIGETCCA/mgAwIBAgIJAKN9r/BdbGUJMA0GCSqGSIb3DQEBCwUAMIGWMQswCQYD\n" - "VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g\n" - "VmlldzEUMBIGA1UECgwLR29vZ2xlLCBJbmMxDDAKBgNVBAsMA0dUUDEXMBUGA1UE\n" - "AwwOdGVsZXBob255Lmdvb2cxHTAbBgkqhkiG9w0BCQEWDmd0cEBnb29nbGUuY29t\n" - "MB4XDTE3MDcyNzIzMDE0NVoXDTE3MDgyNjIzMDE0NVowgZYxCzAJBgNVBAYTAlVT\n" - "MRMwEQYDVQQIDApDYWxpZm9ybmlhMRYwFAYDVQQHDA1Nb3VudGFpbiBWaWV3MRQw\n" - "EgYDVQQKDAtHb29nbGUsIEluYzEMMAoGA1UECwwDR1RQMRcwFQYDVQQDDA50ZWxl\n" - "cGhvbnkuZ29vZzEdMBsGCSqGSIb3DQEJARYOZ3RwQGdvb2dsZS5jb20wggIiMA0G\n" - "CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCfvpF7aBV5Hp1EHsWoIlL3GeHwh8dS\n" - "lv9VQCegN9rD06Ny7MgcED5AiK2vqXmUmOVS+7NbATkdVYN/eozDhKtN3Q3n87kJ\n" - "Nt/TD/TcZZHOZIGsRPbrf2URK26E/5KzTzbzXVBOA1e+gSj+EBbltGqb01ZO5ErF\n" - "iPGViPM/HpYKdq6mfz2bS5PhU67XZMM2zvToyReQ/Fjm/6PJhwKSRXSgZF5djPhk\n" - "2LfOKMLS0AeZtd2C4DFsCU41lfLUkybioDgFuzTQ3TFi1K8A07KYTMmLY/yQppnf\n" - "SpNX58shlVhM+Ed37K1Z0rU0OfVCZ5P+KKaSSfMranjlU7zeUIhZYjqq/EYrEhbS\n" - "dLnNHwgJrqxzId3kq8uuLM6+VB7JZKnZLfT90GdAbX4+tutNe21smmogF9f80vEy\n" - "gM4tOp9rXrvz9vCwWHXVY9kdKemdLAsREoO6MS9k2ctK4jj80o2dROuFC6Q3e7mz\n" - "RjvZr5Tvi464c2o9o/jNlJ0O6q7V2eQzohD+7VnV5QPpRGXxlIeqpR2zoAg+WtRS\n" - "4OgHOVYiD3M6uAlggJA5pcDjMfkEZ+pkhtVcT4qMCEoruk6GbyPxS565oSHu16bH\n" - "EjeCqbZOVND5T3oA7nz6aQSs8sJabt0jmxUkGVnE+4ZDIuuRtkRma+0P/96Mtqor\n" - "OlpNWY1OBDY64QIDAQABo2AwXjAdBgNVHQ4EFgQU5GgKMUtcxkQ2dJrtNR5YOlIA\n" - "PDswHwYDVR0jBBgwFoAU5GgKMUtcxkQ2dJrtNR5YOlIAPDswDwYDVR0TAQH/BAUw\n" - "AwEB/zALBgNVHQ8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAARQly5/bB6VUL2C\n" - "ykDYgWt48go407pAra6tL2kjpdfxV5PdL7iMZRkeht00vj+BVahIqZKrNOa/f5Fx\n" - "vlpahZFu0PDN436aQwRZ9qWut2qDOK0/z9Hhj6NWybquRFwMwqkPG/ivLMDU8Dmj\n" - "CIplpngPYNwXCs0KzdjSXYxqxJbwMjQXELD+/RcurY0oTtJMM1/2vKQMzw24UJqe\n" - "XLJAlsnd2AnWzWNUEviDZY89j9NdkHerBmV2gGzcU+X5lgOO5M8odBv0ZC9D+a6Z\n" - "QPZAOfdGVw60hhGvTW5s/s0dHwCpegRidhs0MD0fTmwwjYFBSmUx3Gztr4JTzOOr\n" - "7e5daJuak2ujQ5DqcGBvt1gePjSudb5brS7JQtN8tI/FyrnR4q/OuOwv1EvlC5RG\n" - "hLX+TXaWqFxB1Hd8ebKRR40mboFG6KcUI3lLBthDvQE7jnq48QfZMjlMQK0ZF1l7\n" - "SrlwRXWA74bU8CLJvnZKKo9p4TsTiDYGSYC6tNHKj5s3TGWL46oqGyZ0KdGNhrtC\n" - "rIGenMhth1vPYjyy0XuGBndXT85yi+IM2l8g8oU845+plxIhgpSI8bbC0oLwnhQ5\n" - "ARfsiYLkXDE7imSS0CSUmye76372mlzAIB1is4bBB/SzpPQtBuB9LDKtONgpSGHn\n" - "dGaXBy+qbVXVyGXaeEbIRjtJ6m92\n" + "MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH\n" + "MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM\n" + "QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy\n" + "MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl\n" + "cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB\n" + "AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM\n" + "f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX\n" + "mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7\n" + "zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P\n" + "fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc\n" + "vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4\n" + "Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp\n" + "zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO\n" + "Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW\n" + "k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+\n" + "DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF\n" + "lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV\n" + "HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW\n" + "Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1\n" + "d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z\n" + "XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR\n" + "gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3\n" + "d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv\n" + "J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg\n" + "DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM\n" + "+SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy\n" + "F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9\n" + "SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws\n" + "E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl\n" "-----END CERTIFICATE-----\n"; class SSLStreamAdapterTestBase; -class SSLDummyStreamBase : public rtc::StreamInterface, - public sigslot::has_slots<> { +// StreamWrapper is a middle layer between `stream`, which supports a single +// event callback, and test classes in this file that need that event forwarded +// to them. I.e. this class wraps a `stream` object that it delegates all calls +// to, but for the event callback, `StreamWrapper` additionally provides support +// for forwarding event notifications to test classes that call +// `SubscribeStreamEvent()`. +// +// This is needed because in this file, tests connect both client and server +// streams (SSLDummyStream) to the same underlying `stream` objects +// (see CreateClientStream() and CreateServerStream()). +class StreamWrapper : public webrtc::StreamInterface { + public: + explicit StreamWrapper(std::unique_ptr stream) + : stream_(std::move(stream)) { + stream_->SetEventCallback([this](int events, int err) { + RTC_DCHECK_RUN_ON(&callback_sequence_); + callbacks_.Send(events, err); + FireEvent(events, err); + }); + } + + template + void SubscribeStreamEvent(const void* removal_tag, F&& callback) { + callbacks_.AddReceiver(removal_tag, std::forward(callback)); + } + + void UnsubscribeStreamEvent(const void* removal_tag) { + callbacks_.RemoveReceivers(removal_tag); + } + + webrtc::StreamState GetState() const override { return stream_->GetState(); } + + void Close() override { stream_->Close(); } + + webrtc::StreamResult Read(webrtc::ArrayView buffer, + size_t& read, + int& error) override { + return stream_->Read(buffer, read, error); + } + + webrtc::StreamResult Write(webrtc::ArrayView data, + size_t& written, + int& error) override { + return stream_->Write(data, written, error); + } + + private: + const std::unique_ptr stream_; + webrtc::CallbackList callbacks_; +}; + +class SSLDummyStream final : public webrtc::StreamInterface { public: - SSLDummyStreamBase(SSLStreamAdapterTestBase* test, - absl::string_view side, - rtc::StreamInterface* in, - rtc::StreamInterface* out) + SSLDummyStream(SSLStreamAdapterTestBase* test, + absl::string_view side, + StreamWrapper* in, + StreamWrapper* out) : test_base_(test), side_(side), in_(in), out_(out), first_packet_(true) { - in_->SignalEvent.connect(this, &SSLDummyStreamBase::OnEventIn); - out_->SignalEvent.connect(this, &SSLDummyStreamBase::OnEventOut); + RTC_CHECK(thread_); + RTC_CHECK_NE(in, out); + in_->SubscribeStreamEvent( + this, [this](int events, int err) { OnEventIn(events, err); }); + out_->SubscribeStreamEvent( + this, [this](int events, int err) { OnEventOut(events, err); }); + } + + ~SSLDummyStream() override { + in_->UnsubscribeStreamEvent(this); + out_->UnsubscribeStreamEvent(this); } - rtc::StreamState GetState() const override { return rtc::SS_OPEN; } + webrtc::StreamState GetState() const override { return webrtc::SS_OPEN; } - rtc::StreamResult Read(rtc::ArrayView buffer, - size_t& read, - int& error) override { - rtc::StreamResult r; + webrtc::StreamResult Read(webrtc::ArrayView buffer, + size_t& read, + int& error) override { + webrtc::StreamResult r; r = in_->Read(buffer, read, error); - if (r == rtc::SR_BLOCK) - return rtc::SR_BLOCK; - if (r == rtc::SR_EOS) - return rtc::SR_EOS; + if (r == webrtc::SR_BLOCK) + return webrtc::SR_BLOCK; + if (r == webrtc::SR_EOS) + return webrtc::SR_EOS; - if (r != rtc::SR_SUCCESS) { + if (r != webrtc::SR_SUCCESS) { ADD_FAILURE(); - return rtc::SR_ERROR; + return webrtc::SR_ERROR; } - return rtc::SR_SUCCESS; + return webrtc::SR_SUCCESS; } // Catch readability events on in and pass them up. - void OnEventIn(rtc::StreamInterface* stream, int sig, int err) { - int mask = (rtc::SE_READ | rtc::SE_CLOSE); + void OnEventIn(int sig, int err) { + int mask = (webrtc::SE_READ | webrtc::SE_CLOSE); if (sig & mask) { - RTC_LOG(LS_VERBOSE) << "SSLDummyStreamBase::OnEvent side=" << side_ + RTC_LOG(LS_VERBOSE) << "SSLDummyStream::OnEventIn side=" << side_ << " sig=" << sig << " forwarding upward"; PostEvent(sig & mask, 0); } } // Catch writeability events on out and pass them up. - void OnEventOut(rtc::StreamInterface* stream, int sig, int err) { - if (sig & rtc::SE_WRITE) { - RTC_LOG(LS_VERBOSE) << "SSLDummyStreamBase::OnEvent side=" << side_ + void OnEventOut(int sig, int err) { + if (sig & webrtc::SE_WRITE) { + RTC_LOG(LS_VERBOSE) << "SSLDummyStream::OnEventOut side=" << side_ << " sig=" << sig << " forwarding upward"; - PostEvent(sig & rtc::SE_WRITE, 0); + PostEvent(sig & webrtc::SE_WRITE, 0); } } // Write to the outgoing FifoBuffer - rtc::StreamResult WriteData(rtc::ArrayView data, - size_t& written, - int& error) { + webrtc::StreamResult WriteData(webrtc::ArrayView data, + size_t& written, + int& error) { return out_->Write(data, written, error); } - rtc::StreamResult Write(rtc::ArrayView data, - size_t& written, - int& error) override; + webrtc::StreamResult Write(webrtc::ArrayView data, + size_t& written, + int& error) override; void Close() override { RTC_LOG(LS_INFO) << "Closing outbound stream"; @@ -219,29 +327,21 @@ class SSLDummyStreamBase : public rtc::StreamInterface, private: void PostEvent(int events, int err) { thread_->PostTask(SafeTask(task_safety_.flag(), [this, events, err]() { - SignalEvent(this, events, err); + RTC_DCHECK_RUN_ON(&callback_sequence_); + FireEvent(events, err); })); } webrtc::ScopedTaskSafety task_safety_; - rtc::Thread* const thread_ = rtc::Thread::Current(); + webrtc::Thread* const thread_ = webrtc::Thread::Current(); SSLStreamAdapterTestBase* test_base_; const std::string side_; - rtc::StreamInterface* in_; - rtc::StreamInterface* out_; + StreamWrapper* const in_; + StreamWrapper* const out_; bool first_packet_; }; -class SSLDummyStreamTLS : public SSLDummyStreamBase { - public: - SSLDummyStreamTLS(SSLStreamAdapterTestBase* test, - absl::string_view side, - rtc::FifoBuffer* in, - rtc::FifoBuffer* out) - : SSLDummyStreamBase(test, side, in, out) {} -}; - -class BufferQueueStream : public rtc::StreamInterface { +class BufferQueueStream : public webrtc::StreamInterface { public: BufferQueueStream(size_t capacity, size_t default_size) : buffer_(capacity, default_size) {} @@ -249,65 +349,56 @@ class BufferQueueStream : public rtc::StreamInterface { // Implementation of abstract StreamInterface methods. // A buffer queue stream is always "open". - rtc::StreamState GetState() const override { return rtc::SS_OPEN; } + webrtc::StreamState GetState() const override { return webrtc::SS_OPEN; } // Reading a buffer queue stream will either succeed or block. - rtc::StreamResult Read(rtc::ArrayView buffer, - size_t& read, - int& error) override { + webrtc::StreamResult Read(webrtc::ArrayView buffer, + size_t& read, + int& error) override { const bool was_writable = buffer_.is_writable(); if (!buffer_.ReadFront(buffer.data(), buffer.size(), &read)) - return rtc::SR_BLOCK; + return webrtc::SR_BLOCK; if (!was_writable) NotifyWritableForTest(); - return rtc::SR_SUCCESS; + return webrtc::SR_SUCCESS; } // Writing to a buffer queue stream will either succeed or block. - rtc::StreamResult Write(rtc::ArrayView data, - size_t& written, - int& error) override { + webrtc::StreamResult Write(webrtc::ArrayView data, + size_t& written, + int& error) override { const bool was_readable = buffer_.is_readable(); if (!buffer_.WriteBack(data.data(), data.size(), &written)) - return rtc::SR_BLOCK; + return webrtc::SR_BLOCK; if (!was_readable) NotifyReadableForTest(); - return rtc::SR_SUCCESS; + return webrtc::SR_SUCCESS; } // A buffer queue stream can not be closed. void Close() override {} protected: - void NotifyReadableForTest() { PostEvent(rtc::SE_READ, 0); } - void NotifyWritableForTest() { PostEvent(rtc::SE_WRITE, 0); } + void NotifyReadableForTest() { PostEvent(webrtc::SE_READ, 0); } + void NotifyWritableForTest() { PostEvent(webrtc::SE_WRITE, 0); } private: void PostEvent(int events, int err) { thread_->PostTask(SafeTask(task_safety_.flag(), [this, events, err]() { - SignalEvent(this, events, err); + RTC_DCHECK_RUN_ON(&callback_sequence_); + FireEvent(events, err); })); } - rtc::Thread* const thread_ = rtc::Thread::Current(); + webrtc::Thread* const thread_ = webrtc::Thread::Current(); webrtc::ScopedTaskSafety task_safety_; - rtc::BufferQueue buffer_; + webrtc::BufferQueue buffer_; }; -class SSLDummyStreamDTLS : public SSLDummyStreamBase { - public: - SSLDummyStreamDTLS(SSLStreamAdapterTestBase* test, - absl::string_view side, - BufferQueueStream* in, - BufferQueueStream* out) - : SSLDummyStreamBase(test, side, in, out) {} -}; - -static const int kFifoBufferSize = 4096; static const int kBufferCapacity = 1; static const size_t kDefaultBufferSize = 2048; @@ -318,51 +409,45 @@ class SSLStreamAdapterTestBase : public ::testing::Test, absl::string_view client_cert_pem, absl::string_view client_private_key_pem, bool dtls, - rtc::KeyParams client_key_type = rtc::KeyParams(rtc::KT_DEFAULT), - rtc::KeyParams server_key_type = rtc::KeyParams(rtc::KT_DEFAULT)) + webrtc::KeyParams client_key_type = webrtc::KeyParams(webrtc::KT_DEFAULT), + webrtc::KeyParams server_key_type = webrtc::KeyParams(webrtc::KT_DEFAULT), + std::pair digest = + std::make_pair(webrtc::DIGEST_SHA_256, SHA256_DIGEST_LENGTH)) : client_cert_pem_(client_cert_pem), client_private_key_pem_(client_private_key_pem), client_key_type_(client_key_type), server_key_type_(server_key_type), - client_stream_(nullptr), - server_stream_(nullptr), + digest_algorithm_(digest.first), + digest_length_(digest.second), delay_(0), mtu_(1460), loss_(0), lose_first_packet_(false), damage_(false), dtls_(dtls), - handshake_wait_(5000), + handshake_wait_(webrtc::TimeDelta::Millis(5000)), identities_set_(false) { // Set use of the test RNG to get predictable loss patterns. - rtc::SetRandomTestMode(true); + webrtc::SetRandomTestMode(true); } ~SSLStreamAdapterTestBase() override { // Put it back for the next test. - rtc::SetRandomTestMode(false); + webrtc::SetRandomTestMode(false); } void SetUp() override { - CreateStreams(); - - client_ssl_ = - rtc::SSLStreamAdapter::Create(absl::WrapUnique(client_stream_)); - server_ssl_ = - rtc::SSLStreamAdapter::Create(absl::WrapUnique(server_stream_)); - - // Set up the slots - client_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent); - server_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent); + InitializeClientAndServerStreams(); - std::unique_ptr client_identity; + std::unique_ptr client_identity; if (!client_cert_pem_.empty() && !client_private_key_pem_.empty()) { - client_identity = rtc::SSLIdentity::CreateFromPEMStrings( + client_identity = webrtc::SSLIdentity::CreateFromPEMStrings( client_private_key_pem_, client_cert_pem_); } else { - client_identity = rtc::SSLIdentity::Create("client", client_key_type_); + client_identity = webrtc::SSLIdentity::Create("client", client_key_type_); } - auto server_identity = rtc::SSLIdentity::Create("server", server_key_type_); + auto server_identity = + webrtc::SSLIdentity::Create("server", server_key_type_); client_ssl_->SetIdentity(std::move(client_identity)); server_ssl_->SetIdentity(std::move(server_identity)); @@ -373,106 +458,108 @@ class SSLStreamAdapterTestBase : public ::testing::Test, server_ssl_.reset(nullptr); } - virtual void CreateStreams() = 0; + virtual std::unique_ptr CreateClientStream() = 0; + virtual std::unique_ptr CreateServerStream() = 0; + + void InitializeClientAndServerStreams( + absl::string_view client_experiment = "", + absl::string_view server_experiment = "") { + // Note: `client_ssl_` and `server_ssl_` may be non-nullptr. + + // The field trials are read when the OpenSSLStreamAdapter is initialized. + using webrtc::test::ScopedKeyValueConfig; + { + std::unique_ptr trial( + client_experiment.empty() + ? nullptr + : new ScopedKeyValueConfig(client_experiment)); + client_ssl_ = webrtc::SSLStreamAdapter::Create(CreateClientStream(), + nullptr, trial.get()); + } + { + std::unique_ptr trial( + server_experiment.empty() + ? nullptr + : new ScopedKeyValueConfig(server_experiment)); + server_ssl_ = webrtc::SSLStreamAdapter::Create(CreateServerStream(), + nullptr, trial.get()); + } + client_ssl_->SetEventCallback( + [this](int events, int err) { OnClientEvent(events, err); }); + server_ssl_->SetEventCallback( + [this](int events, int err) { OnServerEvent(events, err); }); + } // Recreate the client/server identities with the specified validity period. // `not_before` and `not_after` are offsets from the current time in number // of seconds. void ResetIdentitiesWithValidity(int not_before, int not_after) { - CreateStreams(); - - client_ssl_ = - rtc::SSLStreamAdapter::Create(absl::WrapUnique(client_stream_)); - server_ssl_ = - rtc::SSLStreamAdapter::Create(absl::WrapUnique(server_stream_)); - - client_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent); - server_ssl_->SignalEvent.connect(this, &SSLStreamAdapterTestBase::OnEvent); + InitializeClientAndServerStreams(); time_t now = time(nullptr); - rtc::SSLIdentityParams client_params; - client_params.key_params = rtc::KeyParams(rtc::KT_DEFAULT); + webrtc::SSLIdentityParams client_params; + client_params.key_params = webrtc::KeyParams(webrtc::KT_DEFAULT); client_params.common_name = "client"; client_params.not_before = now + not_before; client_params.not_after = now + not_after; - auto client_identity = rtc::SSLIdentity::CreateForTest(client_params); + auto client_identity = webrtc::SSLIdentity::CreateForTest(client_params); - rtc::SSLIdentityParams server_params; - server_params.key_params = rtc::KeyParams(rtc::KT_DEFAULT); + webrtc::SSLIdentityParams server_params; + server_params.key_params = webrtc::KeyParams(webrtc::KT_DEFAULT); server_params.common_name = "server"; server_params.not_before = now + not_before; server_params.not_after = now + not_after; - auto server_identity = rtc::SSLIdentity::CreateForTest(server_params); + auto server_identity = webrtc::SSLIdentity::CreateForTest(server_params); client_ssl_->SetIdentity(std::move(client_identity)); server_ssl_->SetIdentity(std::move(server_identity)); } - virtual void OnEvent(rtc::StreamInterface* stream, int sig, int err) { - RTC_LOG(LS_VERBOSE) << "SSLStreamAdapterTestBase::OnEvent sig=" << sig; - - if (sig & rtc::SE_READ) { - ReadData(stream); - } - - if ((stream == client_ssl_.get()) && (sig & rtc::SE_WRITE)) { - WriteData(); - } - } - void SetPeerIdentitiesByDigest(bool correct, bool expect_success) { - unsigned char server_digest[20]; - size_t server_digest_len; - unsigned char client_digest[20]; - size_t client_digest_len; - bool rv; - rtc::SSLPeerCertificateDigestError err; - rtc::SSLPeerCertificateDigestError expected_err = + webrtc::Buffer server_digest(0, EVP_MAX_MD_SIZE); + webrtc::Buffer client_digest(0, EVP_MAX_MD_SIZE); + webrtc::SSLPeerCertificateDigestError err; + webrtc::SSLPeerCertificateDigestError expected_err = expect_success - ? rtc::SSLPeerCertificateDigestError::NONE - : rtc::SSLPeerCertificateDigestError::VERIFICATION_FAILED; + ? webrtc::SSLPeerCertificateDigestError::NONE + : webrtc::SSLPeerCertificateDigestError::VERIFICATION_FAILED; RTC_LOG(LS_INFO) << "Setting peer identities by digest"; + RTC_DCHECK(server_identity()); + RTC_DCHECK(client_identity()); - rv = server_identity()->certificate().ComputeDigest( - rtc::DIGEST_SHA_1, server_digest, 20, &server_digest_len); - ASSERT_TRUE(rv); - rv = client_identity()->certificate().ComputeDigest( - rtc::DIGEST_SHA_1, client_digest, 20, &client_digest_len); - ASSERT_TRUE(rv); + ASSERT_TRUE(server_identity()->certificate().ComputeDigest( + digest_algorithm_, server_digest)); + ASSERT_TRUE(client_identity()->certificate().ComputeDigest( + digest_algorithm_, client_digest)); if (!correct) { RTC_LOG(LS_INFO) << "Setting bogus digest for server cert"; server_digest[0]++; } - rv = client_ssl_->SetPeerCertificateDigest(rtc::DIGEST_SHA_1, server_digest, - server_digest_len, &err); + err = + client_ssl_->SetPeerCertificateDigest(digest_algorithm_, server_digest); EXPECT_EQ(expected_err, err); - EXPECT_EQ(expect_success, rv); if (!correct) { RTC_LOG(LS_INFO) << "Setting bogus digest for client cert"; client_digest[0]++; } - rv = server_ssl_->SetPeerCertificateDigest(rtc::DIGEST_SHA_1, client_digest, - client_digest_len, &err); + err = + server_ssl_->SetPeerCertificateDigest(digest_algorithm_, client_digest); EXPECT_EQ(expected_err, err); - EXPECT_EQ(expect_success, rv); identities_set_ = true; } - void SetupProtocolVersions(rtc::SSLProtocolVersion server_version, - rtc::SSLProtocolVersion client_version) { + void SetupProtocolVersions(webrtc::SSLProtocolVersion server_version, + webrtc::SSLProtocolVersion client_version) { server_ssl_->SetMaxProtocolVersion(server_version); client_ssl_->SetMaxProtocolVersion(client_version); } void TestHandshake(bool expect_success = true) { - server_ssl_->SetMode(dtls_ ? rtc::SSL_MODE_DTLS : rtc::SSL_MODE_TLS); - client_ssl_->SetMode(dtls_ ? rtc::SSL_MODE_DTLS : rtc::SSL_MODE_TLS); - if (!dtls_) { // Make sure we simulate a reliable network for TLS. // This is just a check to make sure that people don't write wrong @@ -497,22 +584,28 @@ class SSLStreamAdapterTestBase : public ::testing::Test, // Now run the handshake if (expect_success) { - EXPECT_TRUE_WAIT((client_ssl_->GetState() == rtc::SS_OPEN) && - (server_ssl_->GetState() == rtc::SS_OPEN), - handshake_wait_); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return (client_ssl_->GetState() == webrtc::SS_OPEN) && + (server_ssl_->GetState() == webrtc::SS_OPEN); + }, + ::testing::IsTrue(), + {.timeout = handshake_wait_, .clock = &clock_}), + webrtc::IsRtcOk()); } else { - EXPECT_TRUE_WAIT(client_ssl_->GetState() == rtc::SS_CLOSED, - handshake_wait_); + EXPECT_THAT( + webrtc::WaitUntil([&] { return client_ssl_->GetState(); }, + ::testing::Eq(webrtc::SS_CLOSED), + {.timeout = handshake_wait_, .clock = &clock_}), + webrtc::IsRtcOk()); } } // This tests that we give up after 12 DTLS resends. + // Only works for BoringSSL which allows advancing the fake clock. void TestHandshakeTimeout() { - rtc::ScopedFakeClock clock; - int64_t time_start = clock.TimeNanos(); + int64_t time_start = clock_.TimeNanos(); webrtc::TimeDelta time_increment = webrtc::TimeDelta::Millis(1000); - server_ssl_->SetMode(dtls_ ? rtc::SSL_MODE_DTLS : rtc::SSL_MODE_TLS); - client_ssl_->SetMode(dtls_ ? rtc::SSL_MODE_DTLS : rtc::SSL_MODE_TLS); if (!dtls_) { // Make sure we simulate a reliable network for TLS. @@ -538,24 +631,25 @@ class SSLStreamAdapterTestBase : public ::testing::Test, // Now wait for the handshake to timeout (or fail after an hour of simulated // time). - while (client_ssl_->GetState() == rtc::SS_OPENING && - (rtc::TimeDiff(clock.TimeNanos(), time_start) < - 3600 * rtc::kNumNanosecsPerSec)) { - EXPECT_TRUE_WAIT(!((client_ssl_->GetState() == rtc::SS_OPEN) && - (server_ssl_->GetState() == rtc::SS_OPEN)), - 1000); - clock.AdvanceTime(time_increment); + while (client_ssl_->GetState() == webrtc::SS_OPENING && + (webrtc::TimeDiff(clock_.TimeNanos(), time_start) < + 3600 * webrtc::kNumNanosecsPerSec)) { + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return !((client_ssl_->GetState() == webrtc::SS_OPEN) && + (server_ssl_->GetState() == webrtc::SS_OPEN)); + }, + ::testing::IsTrue(), {.clock = &clock_}), + webrtc::IsRtcOk()); + clock_.AdvanceTime(time_increment); } - RTC_CHECK_EQ(client_ssl_->GetState(), rtc::SS_CLOSED); + EXPECT_EQ(client_ssl_->GetState(), webrtc::SS_CLOSED); } // This tests that the handshake can complete before the identity is verified, // and the identity will be verified after the fact. It also verifies that // packets can't be read or written before the identity has been verified. void TestHandshakeWithDelayedIdentity(bool valid_identity) { - server_ssl_->SetMode(dtls_ ? rtc::SSL_MODE_DTLS : rtc::SSL_MODE_TLS); - client_ssl_->SetMode(dtls_ ? rtc::SSL_MODE_DTLS : rtc::SSL_MODE_TLS); - if (!dtls_) { // Make sure we simulate a reliable network for TLS. // This is just a check to make sure that people don't write wrong @@ -571,34 +665,38 @@ class SSLStreamAdapterTestBase : public ::testing::Test, ASSERT_EQ(0, client_ssl_->StartSSL()); // Now run the handshake. - EXPECT_TRUE_WAIT( - client_ssl_->IsTlsConnected() && server_ssl_->IsTlsConnected(), - handshake_wait_); + EXPECT_THAT(webrtc::WaitUntil( + [&] { + return client_ssl_->IsTlsConnected() && + server_ssl_->IsTlsConnected(); + }, + ::testing::IsTrue(), + {.timeout = handshake_wait_, .clock = &clock_}), + webrtc::IsRtcOk()); // Until the identity has been verified, the state should still be // SS_OPENING and writes should return SR_BLOCK. - EXPECT_EQ(rtc::SS_OPENING, client_ssl_->GetState()); - EXPECT_EQ(rtc::SS_OPENING, server_ssl_->GetState()); - uint8_t packet[1]; + EXPECT_EQ(webrtc::SS_OPENING, client_ssl_->GetState()); + EXPECT_EQ(webrtc::SS_OPENING, server_ssl_->GetState()); + uint8_t packet[1] = {0}; size_t sent; + size_t read; int error; - EXPECT_EQ(rtc::SR_BLOCK, client_ssl_->Write(packet, sent, error)); - EXPECT_EQ(rtc::SR_BLOCK, server_ssl_->Write(packet, sent, error)); + EXPECT_EQ(webrtc::SR_BLOCK, client_ssl_->Write(packet, sent, error)); + EXPECT_EQ(webrtc::SR_BLOCK, server_ssl_->Write(packet, sent, error)); // Collect both of the certificate digests; needs to be done before calling // SetPeerCertificateDigest as that may reset the identity. - unsigned char server_digest[20]; - size_t server_digest_len; - unsigned char client_digest[20]; - size_t client_digest_len; - bool rv; - - rv = server_identity()->certificate().ComputeDigest( - rtc::DIGEST_SHA_1, server_digest, 20, &server_digest_len); - ASSERT_TRUE(rv); - rv = client_identity()->certificate().ComputeDigest( - rtc::DIGEST_SHA_1, client_digest, 20, &client_digest_len); - ASSERT_TRUE(rv); + webrtc::Buffer server_digest(0, EVP_MAX_MD_SIZE); + webrtc::Buffer client_digest(0, EVP_MAX_MD_SIZE); + + ASSERT_THAT(server_identity(), NotNull()); + ASSERT_TRUE(server_identity()->certificate().ComputeDigest( + digest_algorithm_, server_digest)); + + ASSERT_THAT(client_identity(), NotNull()); + ASSERT_TRUE(client_identity()->certificate().ComputeDigest( + digest_algorithm_, client_digest)); if (!valid_identity) { RTC_LOG(LS_INFO) << "Setting bogus digest for client/server certs"; @@ -607,74 +705,70 @@ class SSLStreamAdapterTestBase : public ::testing::Test, } // Set the peer certificate digest for the client. - rtc::SSLPeerCertificateDigestError err; - rtc::SSLPeerCertificateDigestError expected_err = + webrtc::SSLPeerCertificateDigestError err; + webrtc::SSLPeerCertificateDigestError expected_err = valid_identity - ? rtc::SSLPeerCertificateDigestError::NONE - : rtc::SSLPeerCertificateDigestError::VERIFICATION_FAILED; - rv = client_ssl_->SetPeerCertificateDigest(rtc::DIGEST_SHA_1, server_digest, - server_digest_len, &err); + ? webrtc::SSLPeerCertificateDigestError::NONE + : webrtc::SSLPeerCertificateDigestError::VERIFICATION_FAILED; + err = + client_ssl_->SetPeerCertificateDigest(digest_algorithm_, server_digest); EXPECT_EQ(expected_err, err); - EXPECT_EQ(valid_identity, rv); // State should then transition to SS_OPEN or SS_CLOSED based on validation // of the identity. if (valid_identity) { - EXPECT_EQ(rtc::SS_OPEN, client_ssl_->GetState()); + EXPECT_EQ(webrtc::SS_OPEN, client_ssl_->GetState()); // If the client sends a packet while the server still hasn't verified the // client identity, the server should continue to return SR_BLOCK. - int error; - EXPECT_EQ(rtc::SR_SUCCESS, client_ssl_->Write(packet, sent, error)); - size_t read; - EXPECT_EQ(rtc::SR_BLOCK, server_ssl_->Read(packet, read, error)); + EXPECT_EQ(webrtc::SR_SUCCESS, client_ssl_->Write(packet, sent, error)); + EXPECT_EQ(webrtc::SR_BLOCK, server_ssl_->Read(packet, read, error)); } else { - EXPECT_EQ(rtc::SS_CLOSED, client_ssl_->GetState()); + EXPECT_EQ(webrtc::SS_CLOSED, client_ssl_->GetState()); } // Set the peer certificate digest for the server. - rv = server_ssl_->SetPeerCertificateDigest(rtc::DIGEST_SHA_1, client_digest, - client_digest_len, &err); + err = + server_ssl_->SetPeerCertificateDigest(digest_algorithm_, client_digest); EXPECT_EQ(expected_err, err); - EXPECT_EQ(valid_identity, rv); if (valid_identity) { - EXPECT_EQ(rtc::SS_OPEN, server_ssl_->GetState()); + EXPECT_EQ(webrtc::SS_OPEN, server_ssl_->GetState()); } else { - EXPECT_EQ(rtc::SS_CLOSED, server_ssl_->GetState()); + EXPECT_EQ(webrtc::SS_CLOSED, server_ssl_->GetState()); } } - rtc::StreamResult DataWritten(SSLDummyStreamBase* from, - const void* data, - size_t data_len, - size_t& written, - int& error) { + webrtc::StreamResult DataWritten(SSLDummyStream* from, + const void* data, + size_t data_len, + size_t& written, + int& error) { // Randomly drop loss_ percent of packets - if (rtc::CreateRandomId() % 100 < static_cast(loss_)) { + if (webrtc::CreateRandomId() % 100 < static_cast(loss_)) { RTC_LOG(LS_VERBOSE) << "Randomly dropping packet, size=" << data_len; written = data_len; - return rtc::SR_SUCCESS; + return webrtc::SR_SUCCESS; } if (dtls_ && (data_len > mtu_)) { RTC_LOG(LS_VERBOSE) << "Dropping packet > mtu, size=" << data_len; written = data_len; - return rtc::SR_SUCCESS; + return webrtc::SR_SUCCESS; } + max_seen_mtu_ = std::max(max_seen_mtu_, data_len); // Optionally damage application data (type 23). Note that we don't damage // handshake packets and we damage the last byte to keep the header // intact but break the MAC. - if (damage_ && (*static_cast(data) == 23)) { + uint8_t data0 = static_cast(data)[0]; + if (damage_ && (data0 == 23 || data0 == 47)) { std::vector buf(data_len); - RTC_LOG(LS_VERBOSE) << "Damaging packet"; - memcpy(&buf[0], data, data_len); buf[data_len - 1]++; - return from->WriteData(rtc::MakeArrayView(&buf[0], data_len), written, + return from->WriteData(webrtc::MakeArrayView(&buf[0], data_len), written, error); } return from->WriteData( - rtc::MakeArrayView(reinterpret_cast(data), data_len), + webrtc::MakeArrayView(reinterpret_cast(data), data_len), written, error); } @@ -689,8 +783,11 @@ class SSLStreamAdapterTestBase : public ::testing::Test, void SetDamage() { damage_ = true; } void SetMtu(size_t mtu) { mtu_ = mtu; } + size_t GetMaxSeenMtu() const { return max_seen_mtu_; } - void SetHandshakeWait(int wait) { handshake_wait_ = wait; } + void SetHandshakeWait(int wait) { + handshake_wait_ = webrtc::TimeDelta::Millis(wait); + } void SetDtlsSrtpCryptoSuites(const std::vector& ciphers, bool client) { if (client) @@ -706,8 +803,8 @@ class SSLStreamAdapterTestBase : public ::testing::Test, return server_ssl_->GetDtlsSrtpCryptoSuite(retval); } - std::unique_ptr GetPeerCertificate(bool client) { - std::unique_ptr chain; + std::unique_ptr GetPeerCertificate(bool client) { + std::unique_ptr chain; if (client) chain = client_ssl_->GetPeerSSLCertChain(); else @@ -722,195 +819,81 @@ class SSLStreamAdapterTestBase : public ::testing::Test, return server_ssl_->GetSslCipherSuite(retval); } - int GetSslVersion(bool client) { + bool GetSslVersionBytes(bool client, int* version) { if (client) - return client_ssl_->GetSslVersion(); + return client_ssl_->GetSslVersionBytes(version); else - return server_ssl_->GetSslVersion(); - } - - bool ExportKeyingMaterial(absl::string_view label, - const unsigned char* context, - size_t context_len, - bool use_context, - bool client, - unsigned char* result, - size_t result_len) { - if (client) - return client_ssl_->ExportKeyingMaterial(label, context, context_len, - use_context, result, result_len); - else - return server_ssl_->ExportKeyingMaterial(label, context, context_len, - use_context, result, result_len); + return server_ssl_->GetSslVersionBytes(version); } // To be implemented by subclasses. virtual void WriteData() = 0; - virtual void ReadData(rtc::StreamInterface* stream) = 0; + virtual void ReadData(webrtc::StreamInterface* stream) = 0; virtual void TestTransfer(int size) = 0; + private: + void OnClientEvent(int sig, int err) { + RTC_LOG(LS_VERBOSE) << "SSLStreamAdapterTestBase::OnClientEvent sig=" + << sig; + + if (sig & webrtc::SE_READ) { + ReadData(client_ssl_.get()); + } + + if (sig & webrtc::SE_WRITE) { + WriteData(); + } + } + + void OnServerEvent(int sig, int err) { + RTC_LOG(LS_VERBOSE) << "SSLStreamAdapterTestBase::OnServerEvent sig=" + << sig; + if (sig & webrtc::SE_READ) { + ReadData(server_ssl_.get()); + } + } + protected: - rtc::SSLIdentity* client_identity() const { + webrtc::SSLIdentity* client_identity() const { if (!client_ssl_) { return nullptr; } return client_ssl_->GetIdentityForTesting(); } - rtc::SSLIdentity* server_identity() const { + webrtc::SSLIdentity* server_identity() const { if (!server_ssl_) { return nullptr; } return server_ssl_->GetIdentityForTesting(); } - rtc::AutoThread main_thread_; + webrtc::AutoThread main_thread_; + webrtc::ScopedFakeClock clock_; std::string client_cert_pem_; std::string client_private_key_pem_; - rtc::KeyParams client_key_type_; - rtc::KeyParams server_key_type_; - SSLDummyStreamBase* client_stream_; // freed by client_ssl_ destructor - SSLDummyStreamBase* server_stream_; // freed by server_ssl_ destructor - std::unique_ptr client_ssl_; - std::unique_ptr server_ssl_; + webrtc::KeyParams client_key_type_; + webrtc::KeyParams server_key_type_; + std::string digest_algorithm_; + size_t digest_length_; + std::unique_ptr client_ssl_; + std::unique_ptr server_ssl_; int delay_; size_t mtu_; + size_t max_seen_mtu_ = 0; int loss_; bool lose_first_packet_; bool damage_; bool dtls_; - int handshake_wait_; + webrtc::TimeDelta handshake_wait_; bool identities_set_; }; -class SSLStreamAdapterTestTLS - : public SSLStreamAdapterTestBase, - public WithParamInterface> { - public: - SSLStreamAdapterTestTLS() - : SSLStreamAdapterTestBase("", - "", - false, - ::testing::get<0>(GetParam()), - ::testing::get<1>(GetParam())), - client_buffer_(kFifoBufferSize), - server_buffer_(kFifoBufferSize) {} - - void CreateStreams() override { - client_stream_ = - new SSLDummyStreamTLS(this, "c2s", &client_buffer_, &server_buffer_); - server_stream_ = - new SSLDummyStreamTLS(this, "s2c", &server_buffer_, &client_buffer_); - } - - // Test data transfer for TLS - void TestTransfer(int size) override { - RTC_LOG(LS_INFO) << "Starting transfer test with " << size << " bytes"; - // Create some dummy data to send. - size_t received; - - send_stream_.ReserveSize(size); - for (int i = 0; i < size; ++i) { - uint8_t ch = static_cast(i); - size_t written; - int error; - send_stream_.Write(rtc::MakeArrayView(&ch, 1), written, error); - } - send_stream_.Rewind(); - - // Prepare the receive stream. - recv_stream_.ReserveSize(size); - - // Start sending - WriteData(); - - // Wait for the client to close - EXPECT_TRUE_WAIT(server_ssl_->GetState() == rtc::SS_CLOSED, 10000); - - // Now check the data - recv_stream_.GetSize(&received); - - EXPECT_EQ(static_cast(size), received); - EXPECT_EQ(0, - memcmp(send_stream_.GetBuffer(), recv_stream_.GetBuffer(), size)); - } - - void WriteData() override { - size_t position, tosend, size; - rtc::StreamResult rv; - size_t sent; - uint8_t block[kBlockSize]; - - send_stream_.GetSize(&size); - if (!size) - return; - - for (;;) { - send_stream_.GetPosition(&position); - int dummy_error; - if (send_stream_.Read(block, tosend, dummy_error) != rtc::SR_EOS) { - int error; - rv = client_ssl_->Write(rtc::MakeArrayView(block, tosend), sent, error); - - if (rv == rtc::SR_SUCCESS) { - send_stream_.SetPosition(position + sent); - RTC_LOG(LS_VERBOSE) << "Sent: " << position + sent; - } else if (rv == rtc::SR_BLOCK) { - RTC_LOG(LS_VERBOSE) << "Blocked..."; - send_stream_.SetPosition(position); - break; - } else { - ADD_FAILURE(); - break; - } - } else { - // Now close - RTC_LOG(LS_INFO) << "Wrote " << position << " bytes. Closing"; - client_ssl_->Close(); - break; - } - } - } - - void ReadData(rtc::StreamInterface* stream) override { - uint8_t buffer[1600]; - size_t bread; - int err2; - rtc::StreamResult r; - - for (;;) { - r = stream->Read(buffer, bread, err2); - - if (r == rtc::SR_ERROR || r == rtc::SR_EOS) { - // Unfortunately, errors are the way that the stream adapter - // signals close in OpenSSL. - stream->Close(); - return; - } - - if (r == rtc::SR_BLOCK) - break; - - ASSERT_EQ(rtc::SR_SUCCESS, r); - RTC_LOG(LS_VERBOSE) << "Read " << bread; - size_t written; - int error; - recv_stream_.Write(rtc::MakeArrayView(buffer, bread), written, error); - } - } - - private: - rtc::FifoBuffer client_buffer_; - rtc::FifoBuffer server_buffer_; - rtc::MemoryStream send_stream_; - rtc::MemoryStream recv_stream_; -}; - class SSLStreamAdapterTestDTLSBase : public SSLStreamAdapterTestBase { public: - SSLStreamAdapterTestDTLSBase(rtc::KeyParams param1, rtc::KeyParams param2) - : SSLStreamAdapterTestBase("", "", true, param1, param2), - client_buffer_(kBufferCapacity, kDefaultBufferSize), - server_buffer_(kBufferCapacity, kDefaultBufferSize), + SSLStreamAdapterTestDTLSBase(webrtc::KeyParams param1, + webrtc::KeyParams param2, + std::pair digest) + : SSLStreamAdapterTestBase("", "", true, param1, param2, digest), packet_size_(1000), count_(0), sent_(0) {} @@ -918,17 +901,18 @@ class SSLStreamAdapterTestDTLSBase : public SSLStreamAdapterTestBase { SSLStreamAdapterTestDTLSBase(absl::string_view cert_pem, absl::string_view private_key_pem) : SSLStreamAdapterTestBase(cert_pem, private_key_pem, true), - client_buffer_(kBufferCapacity, kDefaultBufferSize), - server_buffer_(kBufferCapacity, kDefaultBufferSize), packet_size_(1000), count_(0), sent_(0) {} - void CreateStreams() override { - client_stream_ = - new SSLDummyStreamDTLS(this, "c2s", &client_buffer_, &server_buffer_); - server_stream_ = - new SSLDummyStreamDTLS(this, "s2c", &server_buffer_, &client_buffer_); + std::unique_ptr CreateClientStream() override final { + return absl::WrapUnique( + new SSLDummyStream(this, "c2s", &client_buffer_, &server_buffer_)); + } + + std::unique_ptr CreateServerStream() override final { + return absl::WrapUnique( + new SSLDummyStream(this, "s2c", &server_buffer_, &client_buffer_)); } void WriteData() override { @@ -945,12 +929,12 @@ class SSLStreamAdapterTestDTLSBase : public SSLStreamAdapterTestBase { size_t sent; int error; - rtc::StreamResult rv = client_ssl_->Write( - rtc::MakeArrayView(packet, packet_size_), sent, error); - if (rv == rtc::SR_SUCCESS) { + webrtc::StreamResult rv = client_ssl_->Write( + webrtc::MakeArrayView(packet, packet_size_), sent, error); + if (rv == webrtc::SR_SUCCESS) { RTC_LOG(LS_VERBOSE) << "Sent: " << sent_; sent_++; - } else if (rv == rtc::SR_BLOCK) { + } else if (rv == webrtc::SR_BLOCK) { RTC_LOG(LS_VERBOSE) << "Blocked..."; break; } else { @@ -962,26 +946,26 @@ class SSLStreamAdapterTestDTLSBase : public SSLStreamAdapterTestBase { delete[] packet; } - void ReadData(rtc::StreamInterface* stream) override { + void ReadData(webrtc::StreamInterface* stream) override final { uint8_t buffer[2000]; size_t bread; int err2; - rtc::StreamResult r; + webrtc::StreamResult r; for (;;) { r = stream->Read(buffer, bread, err2); - if (r == rtc::SR_ERROR) { + if (r == webrtc::SR_ERROR) { // Unfortunately, errors are the way that the stream adapter // signals close right now stream->Close(); return; } - if (r == rtc::SR_BLOCK) + if (r == webrtc::SR_BLOCK) break; - ASSERT_EQ(rtc::SR_SUCCESS, r); + ASSERT_EQ(webrtc::SR_SUCCESS, r); RTC_LOG(LS_VERBOSE) << "Read " << bread; // Now parse the datagram @@ -1003,14 +987,20 @@ class SSLStreamAdapterTestDTLSBase : public SSLStreamAdapterTestBase { WriteData(); - EXPECT_TRUE_WAIT(sent_ == count_, 10000); + EXPECT_THAT(webrtc::WaitUntil([&] { return sent_; }, ::testing::Eq(count_), + {.timeout = webrtc::TimeDelta::Millis(10000), + .clock = &clock_}), + webrtc::IsRtcOk()); RTC_LOG(LS_INFO) << "sent_ == " << sent_; if (damage_) { - WAIT(false, 2000); + clock_.AdvanceTime(webrtc::TimeDelta::Millis(2000)); EXPECT_EQ(0U, received_.size()); } else if (loss_ == 0) { - EXPECT_EQ_WAIT(static_cast(sent_), received_.size(), 1000); + EXPECT_THAT(webrtc::WaitUntil([&] { return received_.size(); }, + ::testing::Eq(static_cast(sent_)), + {.clock = &clock_}), + webrtc::IsRtcOk()); } else { RTC_LOG(LS_INFO) << "Sent " << sent_ << " packets; received " << received_.size(); @@ -1018,8 +1008,10 @@ class SSLStreamAdapterTestDTLSBase : public SSLStreamAdapterTestBase { } protected: - BufferQueueStream client_buffer_; - BufferQueueStream server_buffer_; + StreamWrapper client_buffer_{ + std::make_unique(kBufferCapacity, kDefaultBufferSize)}; + StreamWrapper server_buffer_{ + std::make_unique(kBufferCapacity, kDefaultBufferSize)}; private: size_t packet_size_; @@ -1028,22 +1020,10 @@ class SSLStreamAdapterTestDTLSBase : public SSLStreamAdapterTestBase { std::set received_; }; -class SSLStreamAdapterTestDTLS - : public SSLStreamAdapterTestDTLSBase, - public WithParamInterface> { - public: - SSLStreamAdapterTestDTLS() - : SSLStreamAdapterTestDTLSBase(::testing::get<0>(GetParam()), - ::testing::get<1>(GetParam())) {} - - SSLStreamAdapterTestDTLS(absl::string_view cert_pem, - absl::string_view private_key_pem) - : SSLStreamAdapterTestDTLSBase(cert_pem, private_key_pem) {} -}; - -rtc::StreamResult SSLDummyStreamBase::Write(rtc::ArrayView data, - size_t& written, - int& error) { +webrtc::StreamResult SSLDummyStream::Write( + webrtc::ArrayView data, + size_t& written, + int& error) { RTC_LOG(LS_VERBOSE) << "Writing to loopback " << data.size(); if (first_packet_) { @@ -1051,7 +1031,7 @@ rtc::StreamResult SSLDummyStreamBase::Write(rtc::ArrayView data, if (test_base_->GetLoseFirstPacket()) { RTC_LOG(LS_INFO) << "Losing initial packet of length " << data.size(); written = data.size(); // Fake successful writing also to writer. - return rtc::SR_SUCCESS; + return webrtc::SR_SUCCESS; } } @@ -1059,68 +1039,37 @@ rtc::StreamResult SSLDummyStreamBase::Write(rtc::ArrayView data, error); } -class SSLStreamAdapterTestDTLSFromPEMStrings : public SSLStreamAdapterTestDTLS { - public: - SSLStreamAdapterTestDTLSFromPEMStrings() - : SSLStreamAdapterTestDTLS(kCERT_PEM, kRSA_PRIVATE_KEY_PEM) {} -}; - // Test fixture for certificate chaining. Server will push more than one -// certificate. -class SSLStreamAdapterTestDTLSCertChain : public SSLStreamAdapterTestDTLS { +// certificate. Note: these tests use RSA keys and SHA1 digests. +class SSLStreamAdapterTestDTLSCertChain : public SSLStreamAdapterTestDTLSBase { public: - SSLStreamAdapterTestDTLSCertChain() : SSLStreamAdapterTestDTLS("", "") {} + SSLStreamAdapterTestDTLSCertChain() : SSLStreamAdapterTestDTLSBase("", "") {} void SetUp() override { - CreateStreams(); - - client_ssl_ = - rtc::SSLStreamAdapter::Create(absl::WrapUnique(client_stream_)); - server_ssl_ = - rtc::SSLStreamAdapter::Create(absl::WrapUnique(server_stream_)); - - // Set up the slots - client_ssl_->SignalEvent.connect( - reinterpret_cast(this), - &SSLStreamAdapterTestBase::OnEvent); - server_ssl_->SignalEvent.connect( - reinterpret_cast(this), - &SSLStreamAdapterTestBase::OnEvent); - - std::unique_ptr client_identity; + InitializeClientAndServerStreams(); + // These tests apparently need a longer DTLS timeout due to the larger + // handshake. If the client triggers a resend before the handshake is + // complete, the handshake fails. + client_ssl_->SetInitialRetransmissionTimeout(/*timeout_ms=*/1000); + server_ssl_->SetInitialRetransmissionTimeout(/*timeout_ms=*/1000); + + std::unique_ptr client_identity; if (!client_cert_pem_.empty() && !client_private_key_pem_.empty()) { - client_identity = rtc::SSLIdentity::CreateFromPEMStrings( + client_identity = webrtc::SSLIdentity::CreateFromPEMStrings( client_private_key_pem_, client_cert_pem_); } else { - client_identity = rtc::SSLIdentity::Create("client", client_key_type_); + client_identity = webrtc::SSLIdentity::Create("client", client_key_type_); } client_ssl_->SetIdentity(std::move(client_identity)); } }; -// Basic tests: TLS - -// Test that we can make a handshake work -TEST_P(SSLStreamAdapterTestTLS, TestTLSConnect) { - TestHandshake(); -} - -TEST_P(SSLStreamAdapterTestTLS, GetPeerCertChainWithOneCertificate) { - TestHandshake(); - std::unique_ptr cert_chain = - client_ssl_->GetPeerSSLCertChain(); - ASSERT_NE(nullptr, cert_chain); - EXPECT_EQ(1u, cert_chain->GetSize()); - EXPECT_EQ(cert_chain->Get(0).ToPEMString(), - server_identity()->certificate().ToPEMString()); -} - TEST_F(SSLStreamAdapterTestDTLSCertChain, TwoCertHandshake) { - auto server_identity = rtc::SSLIdentity::CreateFromPEMChainStrings( + auto server_identity = webrtc::SSLIdentity::CreateFromPEMChainStrings( kRSA_PRIVATE_KEY_PEM, std::string(kCERT_PEM) + kCACert); server_ssl_->SetIdentity(std::move(server_identity)); TestHandshake(); - std::unique_ptr peer_cert_chain = + std::unique_ptr peer_cert_chain = client_ssl_->GetPeerSSLCertChain(); ASSERT_NE(nullptr, peer_cert_chain); EXPECT_EQ(kCERT_PEM, peer_cert_chain->Get(0).ToPEMString()); @@ -1133,10 +1082,10 @@ TEST_F(SSLStreamAdapterTestDTLSCertChain, TwoCertHandshake) { } TEST_F(SSLStreamAdapterTestDTLSCertChain, TwoCertHandshakeWithCopy) { - server_ssl_->SetIdentity(rtc::SSLIdentity::CreateFromPEMChainStrings( + server_ssl_->SetIdentity(webrtc::SSLIdentity::CreateFromPEMChainStrings( kRSA_PRIVATE_KEY_PEM, std::string(kCERT_PEM) + kCACert)); TestHandshake(); - std::unique_ptr peer_cert_chain = + std::unique_ptr peer_cert_chain = client_ssl_->GetPeerSSLCertChain(); ASSERT_NE(nullptr, peer_cert_chain); EXPECT_EQ(kCERT_PEM, peer_cert_chain->Get(0).ToPEMString()); @@ -1149,10 +1098,10 @@ TEST_F(SSLStreamAdapterTestDTLSCertChain, TwoCertHandshakeWithCopy) { } TEST_F(SSLStreamAdapterTestDTLSCertChain, ThreeCertHandshake) { - server_ssl_->SetIdentity(rtc::SSLIdentity::CreateFromPEMChainStrings( + server_ssl_->SetIdentity(webrtc::SSLIdentity::CreateFromPEMChainStrings( kRSA_PRIVATE_KEY_PEM, std::string(kCERT_PEM) + kIntCert1 + kCACert)); TestHandshake(); - std::unique_ptr peer_cert_chain = + std::unique_ptr peer_cert_chain = client_ssl_->GetPeerSSLCertChain(); ASSERT_NE(nullptr, peer_cert_chain); EXPECT_EQ(kCERT_PEM, peer_cert_chain->Get(0).ToPEMString()); @@ -1165,108 +1114,105 @@ TEST_F(SSLStreamAdapterTestDTLSCertChain, ThreeCertHandshake) { #endif } -// Test that closing the connection on one side updates the other side. -TEST_P(SSLStreamAdapterTestTLS, TestTLSClose) { - TestHandshake(); - client_ssl_->Close(); - EXPECT_EQ_WAIT(rtc::SS_CLOSED, server_ssl_->GetState(), handshake_wait_); -} +class SSLStreamAdapterTestDTLSHandshake + : public SSLStreamAdapterTestDTLSBase, + public WithParamInterface>> { + public: + SSLStreamAdapterTestDTLSHandshake() + : SSLStreamAdapterTestDTLSBase(::testing::get<0>(GetParam()), + ::testing::get<1>(GetParam()), + ::testing::get<2>(GetParam())) {} +}; -// Test transfer -- trivial -TEST_P(SSLStreamAdapterTestTLS, TestTLSTransfer) { +// Test that we can make a handshake work with different parameters. +TEST_P(SSLStreamAdapterTestDTLSHandshake, TestDTLSConnect) { TestHandshake(); - TestTransfer(100000); } -// Test read-write after close. -TEST_P(SSLStreamAdapterTestTLS, ReadWriteAfterClose) { +// Test getting the used DTLS ciphers. +// DTLS 1.2 has different cipher suite than 1.3. +TEST_P(SSLStreamAdapterTestDTLSHandshake, TestGetSslCipherSuite) { + SetupProtocolVersions(webrtc::SSL_PROTOCOL_DTLS_12, + webrtc::SSL_PROTOCOL_DTLS_12); TestHandshake(); - TestTransfer(100000); - client_ssl_->Close(); - - rtc::StreamResult rv; - uint8_t block[kBlockSize]; - size_t dummy; - int error; - - // It's an error to write after closed. - rv = client_ssl_->Write(block, dummy, error); - ASSERT_EQ(rtc::SR_ERROR, rv); - - // But after closed read gives you EOS. - rv = client_ssl_->Read(block, dummy, error); - ASSERT_EQ(rtc::SR_EOS, rv); -} - -// Test a handshake with a bogus peer digest -TEST_P(SSLStreamAdapterTestTLS, TestTLSBogusDigest) { - SetPeerIdentitiesByDigest(false, true); - TestHandshake(false); -} - -TEST_P(SSLStreamAdapterTestTLS, TestTLSDelayedIdentity) { - TestHandshakeWithDelayedIdentity(true); -} - -TEST_P(SSLStreamAdapterTestTLS, TestTLSDelayedIdentityWithBogusDigest) { - TestHandshakeWithDelayedIdentity(false); -} -// Test that the correct error is returned when SetPeerCertificateDigest is -// called with an unknown algorithm. -TEST_P(SSLStreamAdapterTestTLS, - TestSetPeerCertificateDigestWithUnknownAlgorithm) { - unsigned char server_digest[20]; - size_t server_digest_len; - bool rv; - rtc::SSLPeerCertificateDigestError err; - - rv = server_identity()->certificate().ComputeDigest( - rtc::DIGEST_SHA_1, server_digest, 20, &server_digest_len); - ASSERT_TRUE(rv); - - rv = client_ssl_->SetPeerCertificateDigest("unknown algorithm", server_digest, - server_digest_len, &err); - EXPECT_EQ(rtc::SSLPeerCertificateDigestError::UNKNOWN_ALGORITHM, err); - EXPECT_FALSE(rv); -} + int client_cipher; + ASSERT_TRUE(GetSslCipherSuite(true, &client_cipher)); + int server_cipher; + ASSERT_TRUE(GetSslCipherSuite(false, &server_cipher)); -// Test that the correct error is returned when SetPeerCertificateDigest is -// called with an invalid digest length. -TEST_P(SSLStreamAdapterTestTLS, TestSetPeerCertificateDigestWithInvalidLength) { - unsigned char server_digest[20]; - size_t server_digest_len; - bool rv; - rtc::SSLPeerCertificateDigestError err; - - rv = server_identity()->certificate().ComputeDigest( - rtc::DIGEST_SHA_1, server_digest, 20, &server_digest_len); - ASSERT_TRUE(rv); - - rv = client_ssl_->SetPeerCertificateDigest(rtc::DIGEST_SHA_1, server_digest, - server_digest_len - 1, &err); - EXPECT_EQ(rtc::SSLPeerCertificateDigestError::INVALID_LENGTH, err); - EXPECT_FALSE(rv); + ASSERT_EQ(client_cipher, server_cipher); + ASSERT_TRUE(webrtc::SSLStreamAdapter::IsAcceptableCipher( + server_cipher, ::testing::get<1>(GetParam()).type())); } -// Test moving a bunch of data +// Test different key sizes with SHA-256, then different signature algorithms +// with ECDSA. Two different RSA sizes are tested on the client and server. +// TODO: bugs.webrtc.org/375552698 - these tests are slow in debug builds +// and have caused flakyness in the past with a key size of 2048. +INSTANTIATE_TEST_SUITE_P( + SSLStreamAdapterTestDTLSHandshakeKeyParameters, + SSLStreamAdapterTestDTLSHandshake, + Values(std::make_tuple(webrtc::KeyParams::ECDSA(webrtc::EC_NIST_P256), + webrtc::KeyParams::RSA(webrtc::kRsaDefaultModSize, + webrtc::kRsaDefaultExponent), + std::make_pair(webrtc::DIGEST_SHA_256, + SHA256_DIGEST_LENGTH)), + std::make_tuple( + webrtc::KeyParams::RSA(1152, webrtc::kRsaDefaultExponent), + webrtc::KeyParams::ECDSA(webrtc::EC_NIST_P256), + std::make_pair(webrtc::DIGEST_SHA_256, SHA256_DIGEST_LENGTH)))); -// Basic tests: DTLS -// Test that we can make a handshake work -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSConnect) { - TestHandshake(); -} +INSTANTIATE_TEST_SUITE_P( + SSLStreamAdapterTestDTLSHandshakeSignatureAlgorithms, + SSLStreamAdapterTestDTLSHandshake, + Combine(Values(webrtc::KeyParams::ECDSA(webrtc::EC_NIST_P256)), + Values(webrtc::KeyParams::ECDSA(webrtc::EC_NIST_P256)), + Values(std::make_pair(webrtc::DIGEST_SHA_1, SHA_DIGEST_LENGTH), + std::make_pair(webrtc::DIGEST_SHA_224, SHA224_DIGEST_LENGTH), + std::make_pair(webrtc::DIGEST_SHA_256, SHA256_DIGEST_LENGTH), + std::make_pair(webrtc::DIGEST_SHA_384, SHA384_DIGEST_LENGTH), + std::make_pair(webrtc::DIGEST_SHA_512, + SHA512_DIGEST_LENGTH)))); + +// Basic tests done with ECDSA certificates and SHA-256. +class SSLStreamAdapterTestDTLS : public SSLStreamAdapterTestDTLSBase { + public: + SSLStreamAdapterTestDTLS() + : SSLStreamAdapterTestDTLSBase( + webrtc::KeyParams::ECDSA(webrtc::EC_NIST_P256), + webrtc::KeyParams::ECDSA(webrtc::EC_NIST_P256), + std::make_pair(webrtc::DIGEST_SHA_256, SHA256_DIGEST_LENGTH)) {} +}; +#ifdef OPENSSL_IS_BORINGSSL +#define MAYBE_TestDTLSConnectWithLostFirstPacketNoDelay \ + TestDTLSConnectWithLostFirstPacketNoDelay +#else +#define MAYBE_TestDTLSConnectWithLostFirstPacketNoDelay \ + DISABLED_TestDTLSConnectWithLostFirstPacketNoDelay +#endif // Test that we can make a handshake work if the first packet in // each direction is lost. This gives us predictable loss // rather than having to tune random -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSConnectWithLostFirstPacket) { +TEST_F(SSLStreamAdapterTestDTLS, + MAYBE_TestDTLSConnectWithLostFirstPacketNoDelay) { SetLoseFirstPacket(true); TestHandshake(); } +#ifdef OPENSSL_IS_BORINGSSL +#define MAYBE_TestDTLSConnectWithLostFirstPacketDelay2s \ + TestDTLSConnectWithLostFirstPacketDelay2s +#else +#define MAYBE_TestDTLSConnectWithLostFirstPacketDelay2s \ + DISABLED_TestDTLSConnectWithLostFirstPacketDelay2s +#endif // Test a handshake with loss and delay -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSConnectWithLostFirstPacketDelay2s) { +TEST_F(SSLStreamAdapterTestDTLS, + MAYBE_TestDTLSConnectWithLostFirstPacketDelay2s) { SetLoseFirstPacket(true); SetDelay(2000); SetHandshakeWait(20000); @@ -1275,51 +1221,64 @@ TEST_P(SSLStreamAdapterTestDTLS, TestDTLSConnectWithLostFirstPacketDelay2s) { // Test a handshake with small MTU // Disabled due to https://code.google.com/p/webrtc/issues/detail?id=3910 -TEST_P(SSLStreamAdapterTestDTLS, DISABLED_TestDTLSConnectWithSmallMtu) { +TEST_F(SSLStreamAdapterTestDTLS, DISABLED_TestDTLSConnectWithSmallMtu) { SetMtu(700); SetHandshakeWait(20000); TestHandshake(); } // Test a handshake with total loss and timing out. -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSConnectTimeout) { +// Only works in BoringSSL. +#ifdef OPENSSL_IS_BORINGSSL +#define MAYBE_TestDTLSConnectTimeout TestDTLSConnectTimeout +#else +#define MAYBE_TestDTLSConnectTimeout DISABLED_TestDTLSConnectTimeout +#endif +TEST_F(SSLStreamAdapterTestDTLS, MAYBE_TestDTLSConnectTimeout) { SetLoss(100); TestHandshakeTimeout(); } // Test transfer -- trivial -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSTransfer) { +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSTransfer) { TestHandshake(); TestTransfer(100); } -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSTransferWithLoss) { +TEST_F(SSLStreamAdapterTestDTLS, TestSetMTU) { + SetMtu(400); + client_ssl_->SetMTU(300); + server_ssl_->SetMTU(300); + TestHandshake(); + EXPECT_LE(GetMaxSeenMtu(), 300u); +} + +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSTransferWithLoss) { TestHandshake(); SetLoss(10); TestTransfer(100); } -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSTransferWithDamage) { +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSTransferWithDamage) { SetDamage(); // Must be called first because first packet // write happens at end of handshake. TestHandshake(); TestTransfer(100); } -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSDelayedIdentity) { +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSDelayedIdentity) { TestHandshakeWithDelayedIdentity(true); } -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSDelayedIdentityWithBogusDigest) { +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSDelayedIdentityWithBogusDigest) { TestHandshakeWithDelayedIdentity(false); } -// Test DTLS-SRTP with all high ciphers -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpHigh) { - std::vector high; - high.push_back(rtc::kSrtpAes128CmSha1_80); - SetDtlsSrtpCryptoSuites(high, true); - SetDtlsSrtpCryptoSuites(high, false); +// Test DTLS-SRTP with SrtpAes128CmSha1_80 +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSSrtpAes128CmSha1_80) { + const std::vector crypto_suites = {webrtc::kSrtpAes128CmSha1_80}; + SetDtlsSrtpCryptoSuites(crypto_suites, true); + SetDtlsSrtpCryptoSuites(crypto_suites, false); TestHandshake(); int client_cipher; @@ -1328,15 +1287,14 @@ TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpHigh) { ASSERT_TRUE(GetDtlsSrtpCryptoSuite(false, &server_cipher)); ASSERT_EQ(client_cipher, server_cipher); - ASSERT_EQ(client_cipher, rtc::kSrtpAes128CmSha1_80); + ASSERT_EQ(client_cipher, webrtc::kSrtpAes128CmSha1_80); } -// Test DTLS-SRTP with all low ciphers -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpLow) { - std::vector low; - low.push_back(rtc::kSrtpAes128CmSha1_32); - SetDtlsSrtpCryptoSuites(low, true); - SetDtlsSrtpCryptoSuites(low, false); +// Test DTLS-SRTP with SrtpAes128CmSha1_32 +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSSrtpAes128CmSha1_32) { + const std::vector crypto_suites = {webrtc::kSrtpAes128CmSha1_32}; + SetDtlsSrtpCryptoSuites(crypto_suites, true); + SetDtlsSrtpCryptoSuites(crypto_suites, false); TestHandshake(); int client_cipher; @@ -1345,17 +1303,13 @@ TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpLow) { ASSERT_TRUE(GetDtlsSrtpCryptoSuite(false, &server_cipher)); ASSERT_EQ(client_cipher, server_cipher); - ASSERT_EQ(client_cipher, rtc::kSrtpAes128CmSha1_32); + ASSERT_EQ(client_cipher, webrtc::kSrtpAes128CmSha1_32); } -// Test DTLS-SRTP with a mismatch -- should not converge -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpHighLow) { - std::vector high; - high.push_back(rtc::kSrtpAes128CmSha1_80); - std::vector low; - low.push_back(rtc::kSrtpAes128CmSha1_32); - SetDtlsSrtpCryptoSuites(high, true); - SetDtlsSrtpCryptoSuites(low, false); +// Test DTLS-SRTP with incompatible cipher suites -- should not converge. +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSSrtpIncompatibleCipherSuites) { + SetDtlsSrtpCryptoSuites({webrtc::kSrtpAes128CmSha1_80}, true); + SetDtlsSrtpCryptoSuites({webrtc::kSrtpAes128CmSha1_32}, false); TestHandshake(); int client_cipher; @@ -1364,13 +1318,13 @@ TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpHighLow) { ASSERT_FALSE(GetDtlsSrtpCryptoSuite(false, &server_cipher)); } -// Test DTLS-SRTP with each side being mixed -- should select high -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpMixed) { - std::vector mixed; - mixed.push_back(rtc::kSrtpAes128CmSha1_80); - mixed.push_back(rtc::kSrtpAes128CmSha1_32); - SetDtlsSrtpCryptoSuites(mixed, true); - SetDtlsSrtpCryptoSuites(mixed, false); +// Test DTLS-SRTP with each side being mixed -- should select the stronger +// cipher. +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSSrtpMixed) { + const std::vector crypto_suites = {webrtc::kSrtpAes128CmSha1_80, + webrtc::kSrtpAes128CmSha1_32}; + SetDtlsSrtpCryptoSuites(crypto_suites, true); + SetDtlsSrtpCryptoSuites(crypto_suites, false); TestHandshake(); int client_cipher; @@ -1379,15 +1333,14 @@ TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpMixed) { ASSERT_TRUE(GetDtlsSrtpCryptoSuite(false, &server_cipher)); ASSERT_EQ(client_cipher, server_cipher); - ASSERT_EQ(client_cipher, rtc::kSrtpAes128CmSha1_80); + ASSERT_EQ(client_cipher, webrtc::kSrtpAes128CmSha1_80); } -// Test DTLS-SRTP with all GCM-128 ciphers. -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpGCM128) { - std::vector gcm128; - gcm128.push_back(rtc::kSrtpAeadAes128Gcm); - SetDtlsSrtpCryptoSuites(gcm128, true); - SetDtlsSrtpCryptoSuites(gcm128, false); +// Test DTLS-SRTP with SrtpAeadAes128Gcm. +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSSrtpAeadAes128Gcm) { + std::vector crypto_suites = {webrtc::kSrtpAeadAes128Gcm}; + SetDtlsSrtpCryptoSuites(crypto_suites, true); + SetDtlsSrtpCryptoSuites(crypto_suites, false); TestHandshake(); int client_cipher; @@ -1396,15 +1349,14 @@ TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpGCM128) { ASSERT_TRUE(GetDtlsSrtpCryptoSuite(false, &server_cipher)); ASSERT_EQ(client_cipher, server_cipher); - ASSERT_EQ(client_cipher, rtc::kSrtpAeadAes128Gcm); + ASSERT_EQ(client_cipher, webrtc::kSrtpAeadAes128Gcm); } // Test DTLS-SRTP with all GCM-256 ciphers. -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpGCM256) { - std::vector gcm256; - gcm256.push_back(rtc::kSrtpAeadAes256Gcm); - SetDtlsSrtpCryptoSuites(gcm256, true); - SetDtlsSrtpCryptoSuites(gcm256, false); +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSSrtpGCM256) { + std::vector crypto_suites = {webrtc::kSrtpAeadAes256Gcm}; + SetDtlsSrtpCryptoSuites(crypto_suites, true); + SetDtlsSrtpCryptoSuites(crypto_suites, false); TestHandshake(); int client_cipher; @@ -1413,17 +1365,13 @@ TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpGCM256) { ASSERT_TRUE(GetDtlsSrtpCryptoSuite(false, &server_cipher)); ASSERT_EQ(client_cipher, server_cipher); - ASSERT_EQ(client_cipher, rtc::kSrtpAeadAes256Gcm); + ASSERT_EQ(client_cipher, webrtc::kSrtpAeadAes256Gcm); } -// Test DTLS-SRTP with mixed GCM-128/-256 ciphers -- should not converge. -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpGCMMismatch) { - std::vector gcm128; - gcm128.push_back(rtc::kSrtpAeadAes128Gcm); - std::vector gcm256; - gcm256.push_back(rtc::kSrtpAeadAes256Gcm); - SetDtlsSrtpCryptoSuites(gcm128, true); - SetDtlsSrtpCryptoSuites(gcm256, false); +// Test DTLS-SRTP with incompatbile GCM-128/-256 ciphers -- should not converge. +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSSrtpIncompatibleGcmCipherSuites) { + SetDtlsSrtpCryptoSuites({webrtc::kSrtpAeadAes128Gcm}, true); + SetDtlsSrtpCryptoSuites({webrtc::kSrtpAeadAes256Gcm}, false); TestHandshake(); int client_cipher; @@ -1433,12 +1381,11 @@ TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpGCMMismatch) { } // Test DTLS-SRTP with both GCM-128/-256 ciphers -- should select GCM-256. -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpGCMMixed) { - std::vector gcmBoth; - gcmBoth.push_back(rtc::kSrtpAeadAes256Gcm); - gcmBoth.push_back(rtc::kSrtpAeadAes128Gcm); - SetDtlsSrtpCryptoSuites(gcmBoth, true); - SetDtlsSrtpCryptoSuites(gcmBoth, false); +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSSrtpGCMMixed) { + std::vector crypto_suites = {webrtc::kSrtpAeadAes256Gcm, + webrtc::kSrtpAeadAes128Gcm}; + SetDtlsSrtpCryptoSuites(crypto_suites, true); + SetDtlsSrtpCryptoSuites(crypto_suites, false); TestHandshake(); int client_cipher; @@ -1447,60 +1394,61 @@ TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpGCMMixed) { ASSERT_TRUE(GetDtlsSrtpCryptoSuite(false, &server_cipher)); ASSERT_EQ(client_cipher, server_cipher); - ASSERT_EQ(client_cipher, rtc::kSrtpAeadAes256Gcm); + ASSERT_EQ(client_cipher, webrtc::kSrtpAeadAes256Gcm); } // Test SRTP cipher suite lengths. -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSSrtpKeyAndSaltLengths) { +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSSrtpKeyAndSaltLengths) { int key_len; int salt_len; - ASSERT_FALSE(rtc::GetSrtpKeyAndSaltLengths(rtc::kSrtpInvalidCryptoSuite, - &key_len, &salt_len)); + ASSERT_FALSE(webrtc::GetSrtpKeyAndSaltLengths(webrtc::kSrtpInvalidCryptoSuite, + &key_len, &salt_len)); - ASSERT_TRUE(rtc::GetSrtpKeyAndSaltLengths(rtc::kSrtpAes128CmSha1_32, &key_len, - &salt_len)); + ASSERT_TRUE(webrtc::GetSrtpKeyAndSaltLengths(webrtc::kSrtpAes128CmSha1_32, + &key_len, &salt_len)); ASSERT_EQ(128 / 8, key_len); ASSERT_EQ(112 / 8, salt_len); - ASSERT_TRUE(rtc::GetSrtpKeyAndSaltLengths(rtc::kSrtpAes128CmSha1_80, &key_len, - &salt_len)); + ASSERT_TRUE(webrtc::GetSrtpKeyAndSaltLengths(webrtc::kSrtpAes128CmSha1_80, + &key_len, &salt_len)); ASSERT_EQ(128 / 8, key_len); ASSERT_EQ(112 / 8, salt_len); - ASSERT_TRUE(rtc::GetSrtpKeyAndSaltLengths(rtc::kSrtpAeadAes128Gcm, &key_len, - &salt_len)); + ASSERT_TRUE(webrtc::GetSrtpKeyAndSaltLengths(webrtc::kSrtpAeadAes128Gcm, + &key_len, &salt_len)); ASSERT_EQ(128 / 8, key_len); ASSERT_EQ(96 / 8, salt_len); - ASSERT_TRUE(rtc::GetSrtpKeyAndSaltLengths(rtc::kSrtpAeadAes256Gcm, &key_len, - &salt_len)); + ASSERT_TRUE(webrtc::GetSrtpKeyAndSaltLengths(webrtc::kSrtpAeadAes256Gcm, + &key_len, &salt_len)); ASSERT_EQ(256 / 8, key_len); ASSERT_EQ(96 / 8, salt_len); } -// Test an exporter -TEST_P(SSLStreamAdapterTestDTLS, TestDTLSExporter) { - TestHandshake(); - unsigned char client_out[20]; - unsigned char server_out[20]; - - bool result; - result = ExportKeyingMaterial(kExporterLabel, kExporterContext, - kExporterContextLen, true, true, client_out, - sizeof(client_out)); - ASSERT_TRUE(result); +// Test the DTLS-SRTP key exporter +TEST_F(SSLStreamAdapterTestDTLS, TestDTLSSrtpExporter) { + const std::vector crypto_suites = {webrtc::kSrtpAes128CmSha1_80}; + SetDtlsSrtpCryptoSuites(crypto_suites, true); + SetDtlsSrtpCryptoSuites(crypto_suites, false); - result = ExportKeyingMaterial(kExporterLabel, kExporterContext, - kExporterContextLen, true, false, server_out, - sizeof(server_out)); - ASSERT_TRUE(result); - - ASSERT_TRUE(!memcmp(client_out, server_out, sizeof(client_out))); + TestHandshake(); + int selected_crypto_suite; + EXPECT_TRUE(GetDtlsSrtpCryptoSuite(/*client=*/false, &selected_crypto_suite)); + int key_len; + int salt_len; + ASSERT_TRUE(webrtc::GetSrtpKeyAndSaltLengths(selected_crypto_suite, &key_len, + &salt_len)); + webrtc::ZeroOnFreeBuffer client_out(2 * (key_len + salt_len)); + webrtc::ZeroOnFreeBuffer server_out(2 * (key_len + salt_len)); + + EXPECT_TRUE(client_ssl_->ExportSrtpKeyingMaterial(client_out)); + EXPECT_TRUE(server_ssl_->ExportSrtpKeyingMaterial(server_out)); + EXPECT_EQ(client_out, server_out); } // Test not yet valid certificates are not rejected. -TEST_P(SSLStreamAdapterTestDTLS, TestCertNotYetValid) { +TEST_F(SSLStreamAdapterTestDTLS, TestCertNotYetValid) { long one_day = 60 * 60 * 24; // Make the certificates not valid until one day later. ResetIdentitiesWithValidity(one_day, one_day); @@ -1508,13 +1456,20 @@ TEST_P(SSLStreamAdapterTestDTLS, TestCertNotYetValid) { } // Test expired certificates are not rejected. -TEST_P(SSLStreamAdapterTestDTLS, TestCertExpired) { +TEST_F(SSLStreamAdapterTestDTLS, TestCertExpired) { long one_day = 60 * 60 * 24; // Make the certificates already expired. ResetIdentitiesWithValidity(-one_day, -one_day); TestHandshake(); } +class SSLStreamAdapterTestDTLSFromPEMStrings + : public SSLStreamAdapterTestDTLSBase { + public: + SSLStreamAdapterTestDTLSFromPEMStrings() + : SSLStreamAdapterTestDTLSBase(kCERT_PEM, kRSA_PRIVATE_KEY_PEM) {} +}; + // Test data transfer using certs created from strings. TEST_F(SSLStreamAdapterTestDTLSFromPEMStrings, TestTransfer) { TestHandshake(); @@ -1530,7 +1485,7 @@ TEST_F(SSLStreamAdapterTestDTLSFromPEMStrings, TestDTLSGetPeerCertificate) { TestHandshake(); // The client should have a peer certificate after the handshake. - std::unique_ptr client_peer_cert = + std::unique_ptr client_peer_cert = GetPeerCertificate(true); ASSERT_TRUE(client_peer_cert); @@ -1539,7 +1494,7 @@ TEST_F(SSLStreamAdapterTestDTLSFromPEMStrings, TestDTLSGetPeerCertificate) { ASSERT_NE(kCERT_PEM, client_peer_string); // The server should have a peer certificate after the handshake. - std::unique_ptr server_peer_cert = + std::unique_ptr server_peer_cert = GetPeerCertificate(false); ASSERT_TRUE(server_peer_cert); @@ -1547,154 +1502,80 @@ TEST_F(SSLStreamAdapterTestDTLSFromPEMStrings, TestDTLSGetPeerCertificate) { ASSERT_EQ(kCERT_PEM, server_peer_cert->ToPEMString()); } -// Test getting the used DTLS 1.2 ciphers. -// DTLS 1.2 enabled for client and server -> DTLS 1.2 will be used. -TEST_P(SSLStreamAdapterTestDTLS, TestGetSslCipherSuiteDtls12Both) { - SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_12, rtc::SSL_PROTOCOL_DTLS_12); - TestHandshake(); - - int client_cipher; - ASSERT_TRUE(GetSslCipherSuite(true, &client_cipher)); - int server_cipher; - ASSERT_TRUE(GetSslCipherSuite(false, &server_cipher)); - - ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_12, GetSslVersion(true)); - ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_12, GetSslVersion(false)); - - ASSERT_EQ(client_cipher, server_cipher); - ASSERT_TRUE(rtc::SSLStreamAdapter::IsAcceptableCipher( - server_cipher, ::testing::get<1>(GetParam()).type())); +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" +TEST_F(SSLStreamAdapterTestDTLSFromPEMStrings, + DeprecatedSetPeerCertificateDigest) { + webrtc::SSLPeerCertificateDigestError error; + // Pass in a wrong length to trigger an error. + bool ret = client_ssl_->SetPeerCertificateDigest(webrtc::DIGEST_SHA_256, {}, + /*length=*/0, &error); + EXPECT_FALSE(ret); + EXPECT_EQ(error, webrtc::SSLPeerCertificateDigestError::INVALID_LENGTH); } - -// Test getting the used DTLS ciphers. -// DTLS 1.2 is max version for client and server. -TEST_P(SSLStreamAdapterTestDTLS, TestGetSslCipherSuite) { - SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_12, rtc::SSL_PROTOCOL_DTLS_12); - TestHandshake(); - - int client_cipher; - ASSERT_TRUE(GetSslCipherSuite(true, &client_cipher)); - int server_cipher; - ASSERT_TRUE(GetSslCipherSuite(false, &server_cipher)); - - ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_12, GetSslVersion(true)); - ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_12, GetSslVersion(false)); - - ASSERT_EQ(client_cipher, server_cipher); - ASSERT_TRUE(rtc::SSLStreamAdapter::IsAcceptableCipher( - server_cipher, ::testing::get<1>(GetParam()).type())); -} - -// The RSA keysizes here might look strange, why not include the RFC's size -// 2048?. The reason is test case slowness; testing two sizes to exercise -// parametrization is sufficient. -INSTANTIATE_TEST_SUITE_P( - SSLStreamAdapterTestsTLS, - SSLStreamAdapterTestTLS, - Combine(Values(rtc::KeyParams::RSA(1024, 65537), - rtc::KeyParams::RSA(1152, 65537), - rtc::KeyParams::ECDSA(rtc::EC_NIST_P256)), - Values(rtc::KeyParams::RSA(1024, 65537), - rtc::KeyParams::RSA(1152, 65537), - rtc::KeyParams::ECDSA(rtc::EC_NIST_P256)))); -INSTANTIATE_TEST_SUITE_P( - SSLStreamAdapterTestsDTLS, - SSLStreamAdapterTestDTLS, - Combine(Values(rtc::KeyParams::RSA(1024, 65537), - rtc::KeyParams::RSA(1152, 65537), - rtc::KeyParams::ECDSA(rtc::EC_NIST_P256)), - Values(rtc::KeyParams::RSA(1024, 65537), - rtc::KeyParams::RSA(1152, 65537), - rtc::KeyParams::ECDSA(rtc::EC_NIST_P256)))); - -// Tests for enabling / disabling legacy TLS protocols in DTLS. -class SSLStreamAdapterTestDTLSLegacyProtocols - : public SSLStreamAdapterTestDTLSBase { - public: - SSLStreamAdapterTestDTLSLegacyProtocols() - : SSLStreamAdapterTestDTLSBase(rtc::KeyParams::ECDSA(rtc::EC_NIST_P256), - rtc::KeyParams::ECDSA(rtc::EC_NIST_P256)) { - } - - // Do not use the SetUp version from the parent class. - void SetUp() override {} - - // The legacy TLS protocols flag is read when the OpenSSLStreamAdapter is - // initialized, so we set the experiment while creationg client_ssl_ - // and server_ssl_. - - void ConfigureClient(absl::string_view experiment) { - webrtc::test::ScopedFieldTrials trial{std::string(experiment)}; - client_stream_ = - new SSLDummyStreamDTLS(this, "c2s", &client_buffer_, &server_buffer_); - client_ssl_ = - rtc::SSLStreamAdapter::Create(absl::WrapUnique(client_stream_)); - client_ssl_->SignalEvent.connect( - static_cast(this), - &SSLStreamAdapterTestBase::OnEvent); - auto client_identity = rtc::SSLIdentity::Create("client", client_key_type_); - client_ssl_->SetIdentity(std::move(client_identity)); +#pragma clang diagnostic pop + +struct SSLStreamAdapterTestDTLSHandshakeVersion + : public SSLStreamAdapterTestDTLS, + public WithParamInterface> { + webrtc::SSLProtocolVersion GetMin( + const std::vector& array) { + webrtc::SSLProtocolVersion min = array[0]; + for (const auto& e : array) { + if (static_cast(e) < static_cast(min)) { + min = e; + } + } + return min; } - - void ConfigureServer(absl::string_view experiment) { - webrtc::test::ScopedFieldTrials trial{std::string(experiment)}; - server_stream_ = - new SSLDummyStreamDTLS(this, "s2c", &server_buffer_, &client_buffer_); - server_ssl_ = - rtc::SSLStreamAdapter::Create(absl::WrapUnique(server_stream_)); - server_ssl_->SignalEvent.connect( - static_cast(this), - &SSLStreamAdapterTestBase::OnEvent); - server_ssl_->SetIdentity( - rtc::SSLIdentity::Create("server", server_key_type_)); + uint16_t AsDtlsVersionBytes(webrtc::SSLProtocolVersion version) { + switch (version) { + case webrtc::SSL_PROTOCOL_DTLS_10: + return webrtc::kDtls10VersionBytes; + case webrtc::SSL_PROTOCOL_DTLS_12: + return webrtc::kDtls12VersionBytes; + case webrtc::SSL_PROTOCOL_DTLS_13: + return webrtc::kDtls13VersionBytes; + default: + break; + } + RTC_CHECK(false) << "Unknown version: " << static_cast(version); } }; -// Test getting the used DTLS ciphers. -// DTLS 1.2 enabled for neither client nor server -> DTLS 1.0 will be used. -TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, TestGetSslCipherSuite) { - ConfigureClient("WebRTC-LegacyTlsProtocols/Enabled/"); - ConfigureServer("WebRTC-LegacyTlsProtocols/Enabled/"); - SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_10); - TestHandshake(); - - int client_cipher; - ASSERT_TRUE(GetSslCipherSuite(true, &client_cipher)); - int server_cipher; - ASSERT_TRUE(GetSslCipherSuite(false, &server_cipher)); - - ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_10, GetSslVersion(true)); - ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_10, GetSslVersion(false)); - - ASSERT_EQ(client_cipher, server_cipher); -} - -// Test getting the used DTLS 1.2 ciphers. -// DTLS 1.2 enabled for client and server -> DTLS 1.2 will be used. -TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, - TestGetSslCipherSuiteDtls12Both) { - ConfigureClient(""); - ConfigureServer(""); - SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_12, rtc::SSL_PROTOCOL_DTLS_12); +INSTANTIATE_TEST_SUITE_P(SSLStreamAdapterTestDTLSHandshakeVersion, + SSLStreamAdapterTestDTLSHandshakeVersion, + Combine(Values(webrtc::SSL_PROTOCOL_DTLS_12, + webrtc::SSL_PROTOCOL_DTLS_13), + Values(webrtc::SSL_PROTOCOL_DTLS_12, + webrtc::SSL_PROTOCOL_DTLS_13))); + +TEST_P(SSLStreamAdapterTestDTLSHandshakeVersion, TestGetSslVersionBytes) { + auto client = ::testing::get<0>(GetParam()); + auto server = ::testing::get<1>(GetParam()); + SetupProtocolVersions(client, server); TestHandshake(); - int client_cipher; - ASSERT_TRUE(GetSslCipherSuite(true, &client_cipher)); - int server_cipher; - ASSERT_TRUE(GetSslCipherSuite(false, &server_cipher)); + int client_version; + int server_version; + ASSERT_TRUE(GetSslVersionBytes(true, &client_version)); + ASSERT_TRUE(GetSslVersionBytes(false, &server_version)); - ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_12, GetSslVersion(true)); - ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_12, GetSslVersion(false)); + webrtc::SSLProtocolVersion expect = + GetMin({client, server, + webrtc::SSLStreamAdapter::GetMaxSupportedDTLSProtocolVersion()}); - ASSERT_EQ(client_cipher, server_cipher); + auto expect_bytes = AsDtlsVersionBytes(expect); + EXPECT_EQ(client_version, expect_bytes); + EXPECT_EQ(server_version, expect_bytes); } -// DTLS 1.2 enabled for client only -> DTLS 1.0 will be used. -TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, - TestGetSslCipherSuiteDtls12Client) { - ConfigureClient("WebRTC-LegacyTlsProtocols/Enabled/"); - ConfigureServer("WebRTC-LegacyTlsProtocols/Enabled/"); - SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_12); +TEST_P(SSLStreamAdapterTestDTLSHandshakeVersion, TestGetSslCipherSuite) { + auto client = ::testing::get<0>(GetParam()); + auto server = ::testing::get<1>(GetParam()); + SetupProtocolVersions(client, server); TestHandshake(); int client_cipher; @@ -1702,109 +1583,41 @@ TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, int server_cipher; ASSERT_TRUE(GetSslCipherSuite(false, &server_cipher)); - ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_10, GetSslVersion(true)); - ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_10, GetSslVersion(false)); - ASSERT_EQ(client_cipher, server_cipher); + ASSERT_TRUE(webrtc::SSLStreamAdapter::IsAcceptableCipher(server_cipher, + webrtc::KT_DEFAULT)); } -// DTLS 1.2 enabled for server only -> DTLS 1.0 will be used. -TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, - TestGetSslCipherSuiteDtls12Server) { - ConfigureClient("WebRTC-LegacyTlsProtocols/Enabled/"); - ConfigureServer("WebRTC-LegacyTlsProtocols/Enabled/"); - SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_12, rtc::SSL_PROTOCOL_DTLS_10); - TestHandshake(); +#ifdef OPENSSL_IS_BORINGSSL +class SSLStreamAdapterTestDTLSPqc : public SSLStreamAdapterTestDTLSBase { + public: + SSLStreamAdapterTestDTLSPqc() : SSLStreamAdapterTestDTLSBase("", "") {} - int client_cipher; - ASSERT_TRUE(GetSslCipherSuite(true, &client_cipher)); - int server_cipher; - ASSERT_TRUE(GetSslCipherSuite(false, &server_cipher)); + protected: + void SetUp() override { + std::string pqc_trial = "WebRTC-EnableDtlsPqc/Enabled/"; - ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_10, GetSslVersion(true)); - ASSERT_EQ(rtc::SSL_PROTOCOL_DTLS_10, GetSslVersion(false)); + InitializeClientAndServerStreams(pqc_trial, pqc_trial); - ASSERT_EQ(client_cipher, server_cipher); -} + auto client_identity = + webrtc::SSLIdentity::Create("client", client_key_type_); + auto server_identity = + webrtc::SSLIdentity::Create("server", server_key_type_); -// Client has legacy TLS versions disabled, server has DTLS 1.0 only. -// This is meant to cause a failure. -TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, - TestGetSslVersionLegacyDisabledServer10) { - ConfigureClient(""); - ConfigureServer("WebRTC-LegacyTlsProtocols/Enabled/"); - SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_12); - // Handshake should fail. - TestHandshake(false); -} - -// Both client and server have legacy TLS versions disabled and support -// DTLS 1.2. This should work. -TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, - TestGetSslVersionLegacyDisabledServer12) { - ConfigureClient(""); - ConfigureServer(""); - SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_12, rtc::SSL_PROTOCOL_DTLS_12); - TestHandshake(); -} + client_ssl_->SetIdentity(std::move(client_identity)); + server_ssl_->SetIdentity(std::move(server_identity)); + } +}; -// Both client and server have legacy TLS versions enabled and support DTLS 1.0. -// This should work. -TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, - TestGetSslVersionLegacyEnabledClient10Server10) { - ConfigureClient("WebRTC-LegacyTlsProtocols/Enabled/"); - ConfigureServer("WebRTC-LegacyTlsProtocols/Enabled/"); - SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_10); - TestHandshake(); -} +TEST_F(SSLStreamAdapterTestDTLSPqc, TestGetSslGroupId) { + EXPECT_EQ(client_ssl_->GetSslGroupIdForTesting(), 0); + EXPECT_EQ(server_ssl_->GetSslGroupIdForTesting(), 0); -// Legacy protocols are disabled in the client, max TLS version is 1.0 -// This should be a configuration error, and handshake should fail. -TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, - TestGetSslVersionLegacyDisabledClient10Server10) { - ConfigureClient(""); - ConfigureServer("WebRTC-LegacyTlsProtocols/Enabled/"); - SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_10); - TestHandshake(false); -} + SetupProtocolVersions(webrtc::SSL_PROTOCOL_DTLS_13, + webrtc::SSL_PROTOCOL_DTLS_13); -// Both client and server have legacy TLS versions enabled and support DTLS 1.0. -// This should work. -TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, - TestGetSslVersionLegacyOverrideEnabledClient10Server10) { - rtc::SetAllowLegacyTLSProtocols(true); - ConfigureClient(""); - ConfigureServer(""); - // Remove override. - rtc::SetAllowLegacyTLSProtocols(absl::nullopt); - SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_10); TestHandshake(); + EXPECT_EQ(client_ssl_->GetSslGroupIdForTesting(), SSL_GROUP_X25519_MLKEM768); + EXPECT_EQ(server_ssl_->GetSslGroupIdForTesting(), SSL_GROUP_X25519_MLKEM768); } - -// Client has legacy TLS disabled and server has legacy TLS enabled via -// override. Handshake for DTLS 1.0 should fail. -TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, - TestGetSslVersionLegacyOverrideDisabledClient10EnabledServer10) { - rtc::SetAllowLegacyTLSProtocols(false); - ConfigureClient(""); - rtc::SetAllowLegacyTLSProtocols(true); - ConfigureServer(""); - // Remove override. - rtc::SetAllowLegacyTLSProtocols(absl::nullopt); - SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_10); - TestHandshake(false); -} - -// Client has legacy TLS enabled and server has legacy TLS disabled via -// override. Handshake for DTLS 1.0 should fail. -TEST_F(SSLStreamAdapterTestDTLSLegacyProtocols, - TestGetSslVersionLegacyOverrideEnabledClient10DisabledServer10) { - rtc::SetAllowLegacyTLSProtocols(true); - ConfigureClient(""); - rtc::SetAllowLegacyTLSProtocols(false); - ConfigureServer(""); - // Remove override. - rtc::SetAllowLegacyTLSProtocols(absl::nullopt); - SetupProtocolVersions(rtc::SSL_PROTOCOL_DTLS_10, rtc::SSL_PROTOCOL_DTLS_10); - TestHandshake(false); -} +#endif diff --git a/rtc_base/stream.cc b/rtc_base/stream.cc index e6b74b49ac..696f1ad24d 100644 --- a/rtc_base/stream.cc +++ b/rtc_base/stream.cc @@ -9,39 +9,32 @@ */ #include "rtc_base/stream.h" -#include #include -#include -#include +#include #include "api/array_view.h" -#include "rtc_base/checks.h" -#include "rtc_base/thread.h" -namespace rtc { +namespace webrtc { /////////////////////////////////////////////////////////////////////////////// // StreamInterface /////////////////////////////////////////////////////////////////////////////// -StreamResult StreamInterface::WriteAll(const void* data, - size_t data_len, - size_t* written, - int* error) { +StreamResult StreamInterface::WriteAll(ArrayView data, + size_t& written, + int& error) { StreamResult result = SR_SUCCESS; size_t total_written = 0, current_written; - while (total_written < data_len) { - result = Write(ArrayView( - reinterpret_cast(data) + total_written, - data_len - total_written), - current_written, *error); + while (total_written < data.size()) { + ArrayView this_slice = + data.subview(total_written, data.size() - total_written); + result = Write(this_slice, current_written, error); if (result != SR_SUCCESS) break; total_written += current_written; } - if (written) - *written = total_written; + written = total_written; return result; } @@ -51,4 +44,4 @@ bool StreamInterface::Flush() { StreamInterface::StreamInterface() {} -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/stream.h b/rtc_base/stream.h index e02349aed3..9bee2be056 100644 --- a/rtc_base/stream.h +++ b/rtc_base/stream.h @@ -11,15 +11,20 @@ #ifndef RTC_BASE_STREAM_H_ #define RTC_BASE_STREAM_H_ -#include +#include +#include +#include +#include "absl/functional/any_invocable.h" #include "api/array_view.h" -#include "rtc_base/buffer.h" +#include "api/sequence_checker.h" +#include "rtc_base/checks.h" +#include "rtc_base/system/no_unique_address.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" -#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" -namespace rtc { +namespace webrtc { /////////////////////////////////////////////////////////////////////////////// // StreamInterface is a generic asynchronous stream interface, supporting read, @@ -70,10 +75,10 @@ class RTC_EXPORT StreamInterface { // SR_EOS: the end-of-stream has been reached, or the stream is in the // SS_CLOSED state. - virtual StreamResult Read(rtc::ArrayView buffer, + virtual StreamResult Read(ArrayView buffer, size_t& read, int& error) = 0; - virtual StreamResult Write(rtc::ArrayView data, + virtual StreamResult Write(ArrayView data, size_t& written, int& error) = 0; @@ -81,15 +86,24 @@ class RTC_EXPORT StreamInterface { // signalled as a result of this call. virtual void Close() = 0; - // Streams may signal one or more StreamEvents to indicate state changes. - // The first argument identifies the stream on which the state change occured. - // The second argument is a bit-wise combination of StreamEvents. - // If SE_CLOSE is signalled, then the third argument is the associated error - // code. Otherwise, the value is undefined. - // Note: Not all streams will support asynchronous event signalling. However, - // SS_OPENING and SR_BLOCK returned from stream member functions imply that - // certain events will be raised in the future. - sigslot::signal3 SignalEvent; + // Streams may issue one or more events to indicate state changes to a + // provided callback. + // The first argument is a bit-wise combination of `StreamEvent` flags. + // If SE_CLOSE is set, then the second argument is the associated error code. + // Otherwise, the value of the second parameter is undefined and should be + // set to 0. + // Note: Not all streams support callbacks. However, SS_OPENING and + // SR_BLOCK returned from member functions imply that certain callbacks will + // be made in the future. + void SetEventCallback(absl::AnyInvocable callback) { + RTC_DCHECK_RUN_ON(&callback_sequence_); + RTC_DCHECK(!callback_ || !callback); + callback_ = std::move(callback); + } + + // TODO(bugs.webrtc.org/11943): Remove after updating downstream code. + sigslot::signal3 SignalEvent + [[deprecated("Use SetEventCallback instead")]]; // Return true if flush is successful. virtual bool Flush(); @@ -105,24 +119,56 @@ class RTC_EXPORT StreamInterface { // unlike Write, the argument 'written' is always set, and may be non-zero // on results other than SR_SUCCESS. The remaining arguments have the // same semantics as Write. - [[deprecated("Use version with ArrayView")]] StreamResult - WriteAll(const void* data, size_t data_len, size_t* written, int* error); - -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wdeprecated-declarations" - // TODO(bugs.webrc.org/14632): Remove pragmas and change underlying - // implementation when downstream code is converted. StreamResult WriteAll(ArrayView data, size_t& written, - int& error) { - return WriteAll(data.data(), data.size(), &written, &error); - } -#pragma clang diagnostic pop + int& error); protected: StreamInterface(); + + // Utility function for derived classes. + void FireEvent(int stream_events, int err) RTC_RUN_ON(&callback_sequence_) { + if (callback_) { + callback_(stream_events, err); + } +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + // TODO(tommi): This is for backwards compatibility only while `SignalEvent` + // is being replaced by `SetEventCallback`. + SignalEvent(this, stream_events, err); +#pragma clang diagnostic pop + } + + RTC_NO_UNIQUE_ADDRESS SequenceChecker callback_sequence_{ + webrtc::SequenceChecker::kDetached}; + + private: + absl::AnyInvocable callback_ + RTC_GUARDED_BY(&callback_sequence_) = nullptr; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::SE_CLOSE; +using ::webrtc::SE_OPEN; +using ::webrtc::SE_READ; +using ::webrtc::SE_WRITE; +using ::webrtc::SR_BLOCK; +using ::webrtc::SR_EOS; +using ::webrtc::SR_ERROR; +using ::webrtc::SR_SUCCESS; +using ::webrtc::SS_CLOSED; +using ::webrtc::SS_OPEN; +using ::webrtc::SS_OPENING; +using ::webrtc::StreamEvent; +using ::webrtc::StreamInterface; +using ::webrtc::StreamResult; +using ::webrtc::StreamState; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_STREAM_H_ diff --git a/rtc_base/string_encode.cc b/rtc_base/string_encode.cc index 434d1e6139..452b7d6255 100644 --- a/rtc_base/string_encode.cc +++ b/rtc_base/string_encode.cc @@ -17,7 +17,7 @@ #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" -namespace rtc { +namespace webrtc { ///////////////////////////////////////////////////////////////////////////// // String Encoding Utilities @@ -66,7 +66,7 @@ void hex_encode_with_delimiter(char* buffer, while (srcpos < srclen) { unsigned char ch = bsource[srcpos++]; buffer[bufpos] = hex_encode((ch >> 4) & 0xF); - buffer[bufpos + 1] = hex_encode((ch)&0xF); + buffer[bufpos + 1] = hex_encode((ch) & 0xF); bufpos += 2; // Don't write a delimiter after the last byte. @@ -187,88 +187,6 @@ std::vector split(absl::string_view source, char delimiter) { return fields; } -std::string ToString(const bool b) { - return b ? "true" : "false"; -} - -std::string ToString(absl::string_view s) { - return std::string(s); -} - -std::string ToString(const char* s) { - return std::string(s); -} - -std::string ToString(const short s) { - char buf[32]; - const int len = std::snprintf(&buf[0], arraysize(buf), "%hd", s); - RTC_DCHECK_LE(len, arraysize(buf)); - return std::string(&buf[0], len); -} -std::string ToString(const unsigned short s) { - char buf[32]; - const int len = std::snprintf(&buf[0], arraysize(buf), "%hu", s); - RTC_DCHECK_LE(len, arraysize(buf)); - return std::string(&buf[0], len); -} -std::string ToString(const int s) { - char buf[32]; - const int len = std::snprintf(&buf[0], arraysize(buf), "%d", s); - RTC_DCHECK_LE(len, arraysize(buf)); - return std::string(&buf[0], len); -} -std::string ToString(const unsigned int s) { - char buf[32]; - const int len = std::snprintf(&buf[0], arraysize(buf), "%u", s); - RTC_DCHECK_LE(len, arraysize(buf)); - return std::string(&buf[0], len); -} -std::string ToString(const long int s) { - char buf[32]; - const int len = std::snprintf(&buf[0], arraysize(buf), "%ld", s); - RTC_DCHECK_LE(len, arraysize(buf)); - return std::string(&buf[0], len); -} -std::string ToString(const unsigned long int s) { - char buf[32]; - const int len = std::snprintf(&buf[0], arraysize(buf), "%lu", s); - RTC_DCHECK_LE(len, arraysize(buf)); - return std::string(&buf[0], len); -} -std::string ToString(const long long int s) { - char buf[32]; - const int len = std::snprintf(&buf[0], arraysize(buf), "%lld", s); - RTC_DCHECK_LE(len, arraysize(buf)); - return std::string(&buf[0], len); -} -std::string ToString(const unsigned long long int s) { - char buf[32]; - const int len = std::snprintf(&buf[0], arraysize(buf), "%llu", s); - RTC_DCHECK_LE(len, arraysize(buf)); - return std::string(&buf[0], len); -} - -std::string ToString(const double d) { - char buf[32]; - const int len = std::snprintf(&buf[0], arraysize(buf), "%g", d); - RTC_DCHECK_LE(len, arraysize(buf)); - return std::string(&buf[0], len); -} - -std::string ToString(const long double d) { - char buf[32]; - const int len = std::snprintf(&buf[0], arraysize(buf), "%Lg", d); - RTC_DCHECK_LE(len, arraysize(buf)); - return std::string(&buf[0], len); -} - -std::string ToString(const void* const p) { - char buf[32]; - const int len = std::snprintf(&buf[0], arraysize(buf), "%p", p); - RTC_DCHECK_LE(len, arraysize(buf)); - return std::string(&buf[0], len); -} - bool FromString(absl::string_view s, bool* b) { if (s == "false") { *b = false; @@ -281,4 +199,4 @@ bool FromString(absl::string_view s, bool* b) { return false; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/string_encode.h b/rtc_base/string_encode.h index 82a9dfdb62..4d6fc0c032 100644 --- a/rtc_base/string_encode.h +++ b/rtc_base/string_encode.h @@ -13,21 +13,24 @@ #include +#include #include #include #include +#include "absl/base/macros.h" +#include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "rtc_base/checks.h" #include "rtc_base/string_to_number.h" +#include "rtc_base/strings/string_format.h" -namespace rtc { +namespace webrtc { -////////////////////////////////////////////////////////////////////// -// String Encoding Utilities -////////////////////////////////////////////////////////////////////// +inline std::string BoolToString(bool b) { + return b ? "true" : "false"; +} std::string hex_encode(absl::string_view str); std::string hex_encode_with_delimiter(absl::string_view source, char delimiter); @@ -62,28 +65,9 @@ bool tokenize_first(absl::string_view source, std::string* token, std::string* rest); -// Convert arbitrary values to/from a string. -// TODO(jonasolsson): Remove these when absl::StrCat becomes available. -std::string ToString(bool b); - -std::string ToString(absl::string_view s); -// The const char* overload is needed for correct overload resolution because of -// the const void* version of ToString() below. -std::string ToString(const char* s); - -std::string ToString(short s); -std::string ToString(unsigned short s); -std::string ToString(int s); -std::string ToString(unsigned int s); -std::string ToString(long int s); -std::string ToString(unsigned long int s); -std::string ToString(long long int s); -std::string ToString(unsigned long long int s); +// Versions that behave differently from StrCat -std::string ToString(double t); -std::string ToString(long double t); - -std::string ToString(const void* p); +// Versions not supported by StrCat: template ::value && @@ -91,7 +75,7 @@ template ::type = 0> static bool FromString(absl::string_view s, T* t) { RTC_DCHECK(t); - absl::optional result = StringToNumber(s); + std::optional result = webrtc::StringToNumber(s); if (result) *t = *result; @@ -110,6 +94,58 @@ static inline T FromString(absl::string_view str) { ////////////////////////////////////////////////////////////////////// +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::FromString; +using ::webrtc::hex_decode; +using ::webrtc::hex_decode_with_delimiter; +using ::webrtc::hex_encode; +using ::webrtc::hex_encode_with_delimiter; +using ::webrtc::split; +using ::webrtc::tokenize; +using ::webrtc::tokenize_first; + +namespace internal { +template +struct is_absl_strcat_callable : std::false_type {}; + +template +struct is_absl_strcat_callable< + T, + std::void_t()))>> : std::true_type {}; +} // namespace internal + +template +ABSL_DEPRECATE_AND_INLINE() +inline auto ToString(T value) -> + typename std::enable_if && + internal::is_absl_strcat_callable::value, + std::string>::type { + return absl::StrCat(value); +} + +template +ABSL_DEPRECATE_AND_INLINE() +inline auto ToString(T p) -> + typename std::enable_if::value && + std::is_pointer::value, + std::string>::type { + return webrtc::StringFormat("%p", p); +} + +template +ABSL_DEPRECATE_AND_INLINE() +inline auto ToString(T value) -> + typename std::enable_if && std::is_same_v, + std::string>::type { + return webrtc::BoolToString(value); +} + } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_STRING_ENCODE_H__ diff --git a/rtc_base/string_encode_unittest.cc b/rtc_base/string_encode_unittest.cc index f277d3a856..d950f5774a 100644 --- a/rtc_base/string_encode_unittest.cc +++ b/rtc_base/string_encode_unittest.cc @@ -12,12 +12,10 @@ #include -#include // no-presubmit-check TODO(webrtc:8982) - #include "api/array_view.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { class HexEncodeTest : public ::testing::Test { public: @@ -257,32 +255,6 @@ TEST(SplitTest, EmptyTokens) { EXPECT_TRUE(fields[0].empty()); } -TEST(ToString, SanityCheck) { - EXPECT_EQ(ToString(true), "true"); - EXPECT_EQ(ToString(false), "false"); - - const char* c = "message"; - EXPECT_EQ(ToString(c), c); - EXPECT_EQ(ToString(std::string(c)), c); - - EXPECT_EQ(ToString(short{-123}), "-123"); - EXPECT_EQ(ToString((unsigned short)123), "123"); - EXPECT_EQ(ToString(int{-123}), "-123"); - EXPECT_EQ(ToString((unsigned int)123), "123"); - EXPECT_EQ(ToString((long int)-123), "-123"); - EXPECT_EQ(ToString((unsigned long int)123), "123"); - EXPECT_EQ(ToString((long long int)-123), "-123"); - EXPECT_EQ(ToString((unsigned long long int)123), "123"); - - int i = 10; - int* p = &i; - std::ostringstream s; // no-presubmit-check TODO(webrtc:8982) - s << p; - EXPECT_EQ(s.str(), ToString(p)); - - EXPECT_EQ(ToString(0.5), "0.5"); -} - template void ParsesTo(std::string s, T t) { T value; @@ -316,20 +288,4 @@ TEST(FromString, DecodeInvalid) { FailsToParse("1 2"); } -template -void RoundTrip(T t) { - std::string s = ToString(t); - T value; - EXPECT_TRUE(FromString(s, &value)); - EXPECT_EQ(value, t); -} - -TEST(FromString, RoundTrip) { - RoundTrip(123); - RoundTrip(false); - RoundTrip(true); - RoundTrip(0.5); - RoundTrip(-15l); -} - -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/string_to_number.cc b/rtc_base/string_to_number.cc index 1209eced44..e88c97a739 100644 --- a/rtc_base/string_to_number.cc +++ b/rtc_base/string_to_number.cc @@ -17,12 +17,12 @@ #include "rtc_base/checks.h" -namespace rtc { +namespace webrtc { namespace string_to_number_internal { -absl::optional ParseSigned(absl::string_view str, int base) { +std::optional ParseSigned(absl::string_view str, int base) { if (str.empty()) - return absl::nullopt; + return std::nullopt; if (isdigit(static_cast(str[0])) || str[0] == '-') { std::string str_str(str); @@ -35,12 +35,12 @@ absl::optional ParseSigned(absl::string_view str, int base) { return value; } } - return absl::nullopt; + return std::nullopt; } -absl::optional ParseUnsigned(absl::string_view str, int base) { +std::optional ParseUnsigned(absl::string_view str, int base) { if (str.empty()) - return absl::nullopt; + return std::nullopt; if (isdigit(static_cast(str[0])) || str[0] == '-') { std::string str_str(str); @@ -58,7 +58,7 @@ absl::optional ParseUnsigned(absl::string_view str, int base) { return value; } } - return absl::nullopt; + return std::nullopt; } template @@ -80,12 +80,12 @@ inline long double StrToT(const char* str, char** str_end) { } template -absl::optional ParseFloatingPoint(absl::string_view str) { +std::optional ParseFloatingPoint(absl::string_view str) { if (str.empty()) - return absl::nullopt; + return std::nullopt; if (str[0] == '\0') - return absl::nullopt; + return std::nullopt; std::string str_str(str); char* end = nullptr; errno = 0; @@ -93,12 +93,12 @@ absl::optional ParseFloatingPoint(absl::string_view str) { if (end == str_str.c_str() + str_str.size() && errno == 0) { return value; } - return absl::nullopt; + return std::nullopt; } -template absl::optional ParseFloatingPoint(absl::string_view str); -template absl::optional ParseFloatingPoint(absl::string_view str); -template absl::optional ParseFloatingPoint(absl::string_view str); +template std::optional ParseFloatingPoint(absl::string_view str); +template std::optional ParseFloatingPoint(absl::string_view str); +template std::optional ParseFloatingPoint(absl::string_view str); } // namespace string_to_number_internal -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/string_to_number.h b/rtc_base/string_to_number.h index 1d704ee464..a66473b862 100644 --- a/rtc_base/string_to_number.h +++ b/rtc_base/string_to_number.h @@ -12,13 +12,13 @@ #define RTC_BASE_STRING_TO_NUMBER_H_ #include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" -namespace rtc { +namespace webrtc { // This file declares a family of functions to parse integers from strings. // The standard C library functions either fail to indicate errors (atoi, etc.) @@ -27,7 +27,7 @@ namespace rtc { // are disabled in WebRTC. // // Integers are parsed using: -// absl::optional StringToNumber(absl::string_view str, +// std::optional StringToNumber(absl::string_view str, // int base = 10); // // These functions parse a value from the beginning of a string into one of the @@ -44,16 +44,16 @@ namespace string_to_number_internal { using unsigned_type = unsigned long long; // NOLINT(runtime/int) using signed_type = long long; // NOLINT(runtime/int) -absl::optional ParseSigned(absl::string_view str, int base); -absl::optional ParseUnsigned(absl::string_view str, int base); +std::optional ParseSigned(absl::string_view str, int base); +std::optional ParseUnsigned(absl::string_view str, int base); template -absl::optional ParseFloatingPoint(absl::string_view str); +std::optional ParseFloatingPoint(absl::string_view str); } // namespace string_to_number_internal template typename std::enable_if::value && std::is_signed::value, - absl::optional>::type + std::optional>::type StringToNumber(absl::string_view str, int base = 10) { using string_to_number_internal::signed_type; static_assert( @@ -62,37 +62,37 @@ StringToNumber(absl::string_view str, int base = 10) { std::numeric_limits::lowest() >= std::numeric_limits::lowest(), "StringToNumber only supports signed integers as large as long long int"); - absl::optional value = + std::optional value = string_to_number_internal::ParseSigned(str, base); if (value && *value >= std::numeric_limits::lowest() && *value <= std::numeric_limits::max()) { return static_cast(*value); } - return absl::nullopt; + return std::nullopt; } template typename std::enable_if::value && std::is_unsigned::value, - absl::optional>::type + std::optional>::type StringToNumber(absl::string_view str, int base = 10) { using string_to_number_internal::unsigned_type; static_assert(std::numeric_limits::max() <= std::numeric_limits::max(), "StringToNumber only supports unsigned integers as large as " "unsigned long long int"); - absl::optional value = + std::optional value = string_to_number_internal::ParseUnsigned(str, base); if (value && *value <= std::numeric_limits::max()) { return static_cast(*value); } - return absl::nullopt; + return std::nullopt; } template typename std::enable_if::value, - absl::optional>::type -StringToNumber(absl::string_view str, int base = 10) { + std::optional>::type +StringToNumber(absl::string_view str, int /* base */ = 10) { static_assert( std::numeric_limits::max() <= std::numeric_limits::max(), "StringToNumber only supports floating-point numbers as large " @@ -100,6 +100,14 @@ StringToNumber(absl::string_view str, int base = 10) { return string_to_number_internal::ParseFloatingPoint(str); } +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::StringToNumber; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_STRING_TO_NUMBER_H_ diff --git a/rtc_base/string_to_number_unittest.cc b/rtc_base/string_to_number_unittest.cc index edfdbf69ff..77b1437375 100644 --- a/rtc_base/string_to_number_unittest.cc +++ b/rtc_base/string_to_number_unittest.cc @@ -13,13 +13,13 @@ #include #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { // clang-format off @@ -71,10 +71,10 @@ TYPED_TEST_P(BasicNumberTest, TestInvalidNumbers) { (min_value == 0) ? "-2" : (std::to_string(min_value) + "1"); // Make the large value approximately ten times larger than the maximum. const std::string too_large_string = std::to_string(max_value) + "1"; - EXPECT_EQ(absl::nullopt, StringToNumber(too_low_string)); - EXPECT_EQ(absl::nullopt, StringToNumber(too_low_string.c_str())); - EXPECT_EQ(absl::nullopt, StringToNumber(too_large_string)); - EXPECT_EQ(absl::nullopt, StringToNumber(too_large_string.c_str())); + EXPECT_EQ(std::nullopt, StringToNumber(too_low_string)); + EXPECT_EQ(std::nullopt, StringToNumber(too_low_string.c_str())); + EXPECT_EQ(std::nullopt, StringToNumber(too_large_string)); + EXPECT_EQ(std::nullopt, StringToNumber(too_large_string.c_str())); } TYPED_TEST_P(BasicNumberTest, TestInvalidInputs) { @@ -86,33 +86,33 @@ TYPED_TEST_P(BasicNumberTest, TestInvalidInputs) { const char kBeginningEmbeddedNul[] = {'\0', '1', '2', '3', '4'}; const char kTrailingEmbeddedNul[] = {'1', '2', '3', '4', '\0'}; - EXPECT_EQ(absl::nullopt, StringToNumber(kInvalidCharArray)); - EXPECT_EQ(absl::nullopt, StringToNumber(std::string(kInvalidCharArray))); - EXPECT_EQ(absl::nullopt, StringToNumber(kPlusMinusCharArray)); - EXPECT_EQ(absl::nullopt, StringToNumber(std::string(kPlusMinusCharArray))); - EXPECT_EQ(absl::nullopt, StringToNumber(kNumberFollowedByCruft)); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, StringToNumber(kInvalidCharArray)); + EXPECT_EQ(std::nullopt, StringToNumber(std::string(kInvalidCharArray))); + EXPECT_EQ(std::nullopt, StringToNumber(kPlusMinusCharArray)); + EXPECT_EQ(std::nullopt, StringToNumber(std::string(kPlusMinusCharArray))); + EXPECT_EQ(std::nullopt, StringToNumber(kNumberFollowedByCruft)); + EXPECT_EQ(std::nullopt, StringToNumber(std::string(kNumberFollowedByCruft))); - EXPECT_EQ(absl::nullopt, StringToNumber(" 5")); - EXPECT_EQ(absl::nullopt, StringToNumber(" - 5")); - EXPECT_EQ(absl::nullopt, StringToNumber("- 5")); - EXPECT_EQ(absl::nullopt, StringToNumber(" -5")); - EXPECT_EQ(absl::nullopt, StringToNumber("5 ")); + EXPECT_EQ(std::nullopt, StringToNumber(" 5")); + EXPECT_EQ(std::nullopt, StringToNumber(" - 5")); + EXPECT_EQ(std::nullopt, StringToNumber("- 5")); + EXPECT_EQ(std::nullopt, StringToNumber(" -5")); + EXPECT_EQ(std::nullopt, StringToNumber("5 ")); // Test various types of empty inputs - EXPECT_EQ(absl::nullopt, StringToNumber({nullptr, 0})); - EXPECT_EQ(absl::nullopt, StringToNumber("")); - EXPECT_EQ(absl::nullopt, StringToNumber(std::string())); - EXPECT_EQ(absl::nullopt, StringToNumber(std::string(""))); - EXPECT_EQ(absl::nullopt, StringToNumber(absl::string_view())); - EXPECT_EQ(absl::nullopt, StringToNumber(absl::string_view(nullptr, 0))); - EXPECT_EQ(absl::nullopt, StringToNumber(absl::string_view(""))); + EXPECT_EQ(std::nullopt, StringToNumber({nullptr, 0})); + EXPECT_EQ(std::nullopt, StringToNumber("")); + EXPECT_EQ(std::nullopt, StringToNumber(std::string())); + EXPECT_EQ(std::nullopt, StringToNumber(std::string(""))); + EXPECT_EQ(std::nullopt, StringToNumber(absl::string_view())); + EXPECT_EQ(std::nullopt, StringToNumber(absl::string_view(nullptr, 0))); + EXPECT_EQ(std::nullopt, StringToNumber(absl::string_view(""))); // Test strings with embedded nuls. - EXPECT_EQ(absl::nullopt, StringToNumber(absl::string_view( - kEmbeddedNul, sizeof(kEmbeddedNul)))); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, StringToNumber(absl::string_view( + kEmbeddedNul, sizeof(kEmbeddedNul)))); + EXPECT_EQ(std::nullopt, StringToNumber(absl::string_view( kBeginningEmbeddedNul, sizeof(kBeginningEmbeddedNul)))); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, StringToNumber(absl::string_view(kTrailingEmbeddedNul, sizeof(kTrailingEmbeddedNul)))); } @@ -129,10 +129,10 @@ INSTANTIATE_TYPED_TEST_SUITE_P(StringToNumberTest_Integers, IntegerTypes); TEST(StringToNumberTest, TestSpecificValues) { - EXPECT_EQ(absl::nullopt, StringToNumber("256")); - EXPECT_EQ(absl::nullopt, StringToNumber("-256")); - EXPECT_EQ(absl::nullopt, StringToNumber("256")); - EXPECT_EQ(absl::nullopt, StringToNumber("-256")); + EXPECT_EQ(std::nullopt, StringToNumber("256")); + EXPECT_EQ(std::nullopt, StringToNumber("-256")); + EXPECT_EQ(std::nullopt, StringToNumber("256")); + EXPECT_EQ(std::nullopt, StringToNumber("-256")); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/string_utils.cc b/rtc_base/string_utils.cc index b93e615705..9e6baffcb5 100644 --- a/rtc_base/string_utils.cc +++ b/rtc_base/string_utils.cc @@ -10,9 +10,14 @@ #include "rtc_base/string_utils.h" +#include +#include +#include +#include + #include "absl/strings/string_view.h" -namespace rtc { +namespace webrtc { size_t strcpyn(char* buffer, size_t buflen, absl::string_view source) { if (buflen <= 0) @@ -34,4 +39,4 @@ std::string ToHex(const int i) { return std::string(buffer); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/string_utils.h b/rtc_base/string_utils.h index 9534d59e04..ca2607b186 100644 --- a/rtc_base/string_utils.h +++ b/rtc_base/string_utils.h @@ -25,14 +25,11 @@ #if defined(WEBRTC_POSIX) #include -#include #endif // WEBRTC_POSIX #include -#include "absl/strings/string_view.h" - -namespace rtc { +namespace webrtc { const size_t SIZE_UNKNOWN = static_cast(-1); @@ -40,7 +37,7 @@ const size_t SIZE_UNKNOWN = static_cast(-1); // std::map that support heterogenous lookup. // // Example usage: -// std::map my_map; +// std::map my_map; struct AbslStringViewCmp { using is_transparent = void; bool operator()(absl::string_view a, absl::string_view b) const { @@ -133,6 +130,18 @@ constexpr auto MakeCompileTimeString(const char (&a)[N]) { return rtc_base_string_utils_internal::CompileTimeString(a); } +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::AbslStringViewCmp; +using ::webrtc::MakeCompileTimeString; +using ::webrtc::SIZE_UNKNOWN; +using ::webrtc::strcpyn; +using ::webrtc::ToHex; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_STRING_UTILS_H_ diff --git a/rtc_base/string_utils_unittest.cc b/rtc_base/string_utils_unittest.cc index 4e4bebdda7..130ef4add3 100644 --- a/rtc_base/string_utils_unittest.cc +++ b/rtc_base/string_utils_unittest.cc @@ -10,9 +10,11 @@ #include "rtc_base/string_utils.h" +#include + #include "test/gtest.h" -namespace rtc { +namespace webrtc { TEST(string_toHexTest, ToHex) { EXPECT_EQ(ToHex(0), "0"); @@ -56,4 +58,4 @@ static_assert(detail::StringEquals(MakeCompileTimeString("abc123").Concat( "abc123def456ghi"), "Strings should concatenate."); -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/strings/audio_format_to_string.cc b/rtc_base/strings/audio_format_to_string.cc deleted file mode 100644 index 7e91c3b49d..0000000000 --- a/rtc_base/strings/audio_format_to_string.cc +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/strings/audio_format_to_string.h" - -#include - -#include "rtc_base/strings/string_builder.h" - -namespace rtc { -std::string ToString(const webrtc::SdpAudioFormat& saf) { - char sb_buf[1024]; - rtc::SimpleStringBuilder sb(sb_buf); - sb << "{name: " << saf.name; - sb << ", clockrate_hz: " << saf.clockrate_hz; - sb << ", num_channels: " << saf.num_channels; - sb << ", parameters: {"; - const char* sep = ""; - for (const auto& kv : saf.parameters) { - sb << sep << kv.first << ": " << kv.second; - sep = ", "; - } - sb << "}}"; - return sb.str(); -} -std::string ToString(const webrtc::AudioCodecInfo& aci) { - char sb_buf[1024]; - rtc::SimpleStringBuilder sb(sb_buf); - sb << "{sample_rate_hz: " << aci.sample_rate_hz; - sb << ", num_channels: " << aci.num_channels; - sb << ", default_bitrate_bps: " << aci.default_bitrate_bps; - sb << ", min_bitrate_bps: " << aci.min_bitrate_bps; - sb << ", max_bitrate_bps: " << aci.max_bitrate_bps; - sb << ", allow_comfort_noise: " << aci.allow_comfort_noise; - sb << ", supports_network_adaption: " << aci.supports_network_adaption; - sb << "}"; - return sb.str(); -} -std::string ToString(const webrtc::AudioCodecSpec& acs) { - char sb_buf[1024]; - rtc::SimpleStringBuilder sb(sb_buf); - sb << "{format: " << ToString(acs.format); - sb << ", info: " << ToString(acs.info); - sb << "}"; - return sb.str(); -} -} // namespace rtc diff --git a/rtc_base/strings/audio_format_to_string.h b/rtc_base/strings/audio_format_to_string.h deleted file mode 100644 index 6a4535c939..0000000000 --- a/rtc_base/strings/audio_format_to_string.h +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_STRINGS_AUDIO_FORMAT_TO_STRING_H_ -#define RTC_BASE_STRINGS_AUDIO_FORMAT_TO_STRING_H_ - -#include - -#include "api/audio_codecs/audio_format.h" - -namespace rtc { -std::string ToString(const webrtc::SdpAudioFormat& saf); -std::string ToString(const webrtc::AudioCodecInfo& saf); -std::string ToString(const webrtc::AudioCodecSpec& acs); -} // namespace rtc - -#endif // RTC_BASE_STRINGS_AUDIO_FORMAT_TO_STRING_H_ diff --git a/rtc_base/strings/json.cc b/rtc_base/strings/json.cc index 5cf153c926..a7cbfeb3a3 100644 --- a/rtc_base/strings/json.cc +++ b/rtc_base/strings/json.cc @@ -17,18 +17,18 @@ #include "absl/strings/string_view.h" #include "rtc_base/string_encode.h" -namespace rtc { +namespace webrtc { bool GetStringFromJson(const Json::Value& in, std::string* out) { if (!in.isString()) { if (in.isBool()) { - *out = rtc::ToString(in.asBool()); + *out = BoolToString(in.asBool()); } else if (in.isInt()) { - *out = rtc::ToString(in.asInt()); + *out = absl::StrCat(in.asInt()); } else if (in.isUInt()) { - *out = rtc::ToString(in.asUInt()); + *out = absl::StrCat(in.asUInt()); } else if (in.isDouble()) { - *out = rtc::ToString(in.asDouble()); + *out = absl::StrCat(in.asDouble()); } else { return false; } @@ -293,4 +293,4 @@ std::string JsonValueToString(const Json::Value& json) { return output.substr(0, output.size() - 1); // trim trailing newline } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/strings/json.h b/rtc_base/strings/json.h index 3e4bac6d89..90e9080d4e 100644 --- a/rtc_base/strings/json.h +++ b/rtc_base/strings/json.h @@ -11,13 +11,16 @@ #ifndef RTC_BASE_STRINGS_JSON_H_ #define RTC_BASE_STRINGS_JSON_H_ +#include #include #include #include "absl/strings/string_view.h" -#include "json/json.h" +#include "json/json.h" // IWYU pragma: export +#include "json/reader.h" // IWYU pragma: export +#include "json/value.h" // IWYU pragma: export -namespace rtc { +namespace webrtc { /////////////////////////////////////////////////////////////////////////////// // JSON Helpers @@ -78,6 +81,43 @@ bool GetDoubleFromJsonObject(const Json::Value& in, // Writes out a Json value as a string. std::string JsonValueToString(const Json::Value& json); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::BoolVectorToJsonArray; +using ::webrtc::DoubleVectorToJsonArray; +using ::webrtc::GetBoolFromJson; +using ::webrtc::GetBoolFromJsonArray; +using ::webrtc::GetBoolFromJsonObject; +using ::webrtc::GetDoubleFromJson; +using ::webrtc::GetDoubleFromJsonArray; +using ::webrtc::GetDoubleFromJsonObject; +using ::webrtc::GetIntFromJson; +using ::webrtc::GetIntFromJsonArray; +using ::webrtc::GetIntFromJsonObject; +using ::webrtc::GetStringFromJson; +using ::webrtc::GetStringFromJsonArray; +using ::webrtc::GetStringFromJsonObject; +using ::webrtc::GetUIntFromJson; +using ::webrtc::GetUIntFromJsonArray; +using ::webrtc::GetUIntFromJsonObject; +using ::webrtc::GetValueFromJsonArray; +using ::webrtc::GetValueFromJsonObject; +using ::webrtc::IntVectorToJsonArray; +using ::webrtc::JsonArrayToBoolVector; +using ::webrtc::JsonArrayToDoubleVector; +using ::webrtc::JsonArrayToIntVector; +using ::webrtc::JsonArrayToStringVector; +using ::webrtc::JsonArrayToUIntVector; +using ::webrtc::JsonArrayToValueVector; +using ::webrtc::JsonValueToString; +using ::webrtc::StringVectorToJsonArray; +using ::webrtc::UIntVectorToJsonArray; +using ::webrtc::ValueVectorToJsonArray; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_STRINGS_JSON_H_ diff --git a/rtc_base/strings/json_unittest.cc b/rtc_base/strings/json_unittest.cc index 82d26f179e..b0865492d8 100644 --- a/rtc_base/strings/json_unittest.cc +++ b/rtc_base/strings/json_unittest.cc @@ -14,7 +14,7 @@ #include "test/gtest.h" -namespace rtc { +namespace webrtc { static Json::Value in_s("foo"); static Json::Value in_sn("99"); @@ -280,4 +280,4 @@ TEST(JsonTest, DoubleVectorToFromArray) { } } -} // namespace rtc +} // namespace webrtc diff --git a/net/dcsctp/common/str_join.h b/rtc_base/strings/str_join.h similarity index 84% rename from net/dcsctp/common/str_join.h rename to rtc_base/strings/str_join.h index 04517827b7..779cded4b0 100644 --- a/net/dcsctp/common/str_join.h +++ b/rtc_base/strings/str_join.h @@ -7,19 +7,19 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#ifndef NET_DCSCTP_COMMON_STR_JOIN_H_ -#define NET_DCSCTP_COMMON_STR_JOIN_H_ +#ifndef RTC_BASE_STRINGS_STR_JOIN_H_ +#define RTC_BASE_STRINGS_STR_JOIN_H_ #include #include "absl/strings/string_view.h" #include "rtc_base/strings/string_builder.h" -namespace dcsctp { +namespace webrtc { template std::string StrJoin(const Range& seq, absl::string_view delimiter) { - rtc::StringBuilder sb; + StringBuilder sb; int idx = 0; for (const typename Range::value_type& elem : seq) { @@ -37,7 +37,7 @@ template std::string StrJoin(const Range& seq, absl::string_view delimiter, const Functor& fn) { - rtc::StringBuilder sb; + StringBuilder sb; int idx = 0; for (const typename Range::value_type& elem : seq) { @@ -51,6 +51,6 @@ std::string StrJoin(const Range& seq, return sb.Release(); } -} // namespace dcsctp +} // namespace webrtc -#endif // NET_DCSCTP_COMMON_STR_JOIN_H_ +#endif // RTC_BASE_STRINGS_STR_JOIN_H_ diff --git a/net/dcsctp/common/str_join_test.cc b/rtc_base/strings/str_join_unittest.cc similarity index 74% rename from net/dcsctp/common/str_join_test.cc rename to rtc_base/strings/str_join_unittest.cc index dbfd92c1cf..305bf08c6a 100644 --- a/net/dcsctp/common/str_join_test.cc +++ b/rtc_base/strings/str_join_unittest.cc @@ -7,15 +7,15 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "net/dcsctp/common/str_join.h" +#include "rtc_base/strings/str_join.h" #include #include #include -#include "test/gmock.h" +#include "test/gtest.h" -namespace dcsctp { +namespace webrtc { namespace { TEST(StrJoinTest, CanJoinStringsFromVector) { @@ -33,13 +33,13 @@ TEST(StrJoinTest, CanJoinNumbersFromArray) { TEST(StrJoinTest, CanFormatElementsWhileJoining) { std::vector> pairs = { {"hello", "world"}, {"foo", "bar"}, {"fum", "gazonk"}}; - std::string s = StrJoin(pairs, ",", - [&](rtc::StringBuilder& sb, - const std::pair& p) { - sb << p.first << "=" << p.second; - }); + std::string s = StrJoin( + pairs, ",", + [&](StringBuilder& sb, const std::pair& p) { + sb << p.first << "=" << p.second; + }); EXPECT_EQ(s, "hello=world,foo=bar,fum=gazonk"); } } // namespace -} // namespace dcsctp +} // namespace webrtc diff --git a/rtc_base/strings/string_builder.cc b/rtc_base/strings/string_builder.cc index a419b0b3cc..fb568fbd8c 100644 --- a/rtc_base/strings/string_builder.cc +++ b/rtc_base/strings/string_builder.cc @@ -19,9 +19,9 @@ #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_minmax.h" -namespace rtc { +namespace webrtc { -SimpleStringBuilder::SimpleStringBuilder(rtc::ArrayView buffer) +SimpleStringBuilder::SimpleStringBuilder(ArrayView buffer) : buffer_(buffer) { buffer_[0] = '\0'; RTC_DCHECK(IsConsistent()); @@ -34,8 +34,7 @@ SimpleStringBuilder& SimpleStringBuilder::operator<<(char ch) { SimpleStringBuilder& SimpleStringBuilder::operator<<(absl::string_view str) { RTC_DCHECK_LT(size_ + str.length(), buffer_.size()) << "Buffer size was insufficient"; - const size_t chars_added = - rtc::SafeMin(str.length(), buffer_.size() - size_ - 1); + const size_t chars_added = SafeMin(str.length(), buffer_.size() - size_ - 1); memcpy(&buffer_[size_], str.data(), chars_added); size_ += chars_added; buffer_[size_] = '\0'; @@ -97,7 +96,7 @@ SimpleStringBuilder& SimpleStringBuilder::AppendFormat(const char* fmt, ...) { const int len = std::vsnprintf(&buffer_[size_], buffer_.size() - size_, fmt, args); if (len >= 0) { - const size_t chars_added = rtc::SafeMin(len, buffer_.size() - 1 - size_); + const size_t chars_added = SafeMin(len, buffer_.size() - 1 - size_); size_ += chars_added; RTC_DCHECK_EQ(len, chars_added) << "Buffer size was insufficient"; } else { @@ -131,4 +130,4 @@ StringBuilder& StringBuilder::AppendFormat(const char* fmt, ...) { return *this; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/strings/string_builder.h b/rtc_base/strings/string_builder.h index 00986371d3..55d0b061ef 100644 --- a/rtc_base/strings/string_builder.h +++ b/rtc_base/strings/string_builder.h @@ -15,11 +15,12 @@ #include #include +#include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" #include "api/array_view.h" #include "rtc_base/string_encode.h" -namespace rtc { +namespace webrtc { // This is a minimalistic string builder class meant to cover the most cases of // when you might otherwise be tempted to use a stringstream (discouraged for @@ -28,7 +29,7 @@ namespace rtc { // read via `str()`. class SimpleStringBuilder { public: - explicit SimpleStringBuilder(rtc::ArrayView buffer); + explicit SimpleStringBuilder(ArrayView buffer); SimpleStringBuilder(const SimpleStringBuilder&) = delete; SimpleStringBuilder& operator=(const SimpleStringBuilder&) = delete; @@ -69,7 +70,7 @@ class SimpleStringBuilder { // size allows the buffer to be stack allocated, which helps performance. // Having a fixed size is furthermore useful to avoid unnecessary resizing // while building it. - const rtc::ArrayView buffer_; + const ArrayView buffer_; // Represents the number of characters written to the buffer. // This does not include the terminating '\0'. @@ -98,47 +99,42 @@ class StringBuilder { StringBuilder& operator<<(char c) = delete; StringBuilder& operator<<(int i) { - str_ += rtc::ToString(i); + str_ += absl::StrCat(i); return *this; } StringBuilder& operator<<(unsigned i) { - str_ += rtc::ToString(i); + str_ += absl::StrCat(i); return *this; } StringBuilder& operator<<(long i) { // NOLINT - str_ += rtc::ToString(i); + str_ += absl::StrCat(i); return *this; } StringBuilder& operator<<(long long i) { // NOLINT - str_ += rtc::ToString(i); + str_ += absl::StrCat(i); return *this; } StringBuilder& operator<<(unsigned long i) { // NOLINT - str_ += rtc::ToString(i); + str_ += absl::StrCat(i); return *this; } StringBuilder& operator<<(unsigned long long i) { // NOLINT - str_ += rtc::ToString(i); + str_ += absl::StrCat(i); return *this; } StringBuilder& operator<<(float f) { - str_ += rtc::ToString(f); + str_ += absl::StrCat(f); return *this; } StringBuilder& operator<<(double f) { - str_ += rtc::ToString(f); - return *this; - } - - StringBuilder& operator<<(long double f) { - str_ += rtc::ToString(f); + str_ += absl::StrCat(f); return *this; } @@ -165,6 +161,15 @@ class StringBuilder { std::string str_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::SimpleStringBuilder; +using ::webrtc::StringBuilder; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_STRINGS_STRING_BUILDER_H_ diff --git a/rtc_base/strings/string_builder_unittest.cc b/rtc_base/strings/string_builder_unittest.cc index 99dfd86292..44c6c4b633 100644 --- a/rtc_base/strings/string_builder_unittest.cc +++ b/rtc_base/strings/string_builder_unittest.cc @@ -16,11 +16,11 @@ #include "test/gmock.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { TEST(SimpleStringBuilder, Limit) { char sb_buf[10]; - SimpleStringBuilder sb(sb_buf); + webrtc::SimpleStringBuilder sb(sb_buf); EXPECT_EQ(0u, strlen(sb.str())); // Test that for a SSB with a buffer size of 10, that we can write 9 chars @@ -31,7 +31,7 @@ TEST(SimpleStringBuilder, Limit) { TEST(SimpleStringBuilder, NumbersAndChars) { char sb_buf[100]; - SimpleStringBuilder sb(sb_buf); + webrtc::SimpleStringBuilder sb(sb_buf); sb << 1 << ':' << 2.1 << ":" << 2.2f << ':' << 78187493520ll << ':' << 78187493520ul; EXPECT_EQ(0, strcmp(sb.str(), "1:2.1:2.2:78187493520:78187493520")); @@ -39,7 +39,7 @@ TEST(SimpleStringBuilder, NumbersAndChars) { TEST(SimpleStringBuilder, Format) { char sb_buf[100]; - SimpleStringBuilder sb(sb_buf); + webrtc::SimpleStringBuilder sb(sb_buf); sb << "Here we go - "; sb.AppendFormat("This is a hex formatted value: 0x%08llx", 3735928559ULL); EXPECT_EQ(0, @@ -49,7 +49,7 @@ TEST(SimpleStringBuilder, Format) { TEST(SimpleStringBuilder, StdString) { char sb_buf[100]; - SimpleStringBuilder sb(sb_buf); + webrtc::SimpleStringBuilder sb(sb_buf); std::string str = "does this work?"; sb << str; EXPECT_EQ(str, sb.str()); @@ -61,7 +61,7 @@ TEST(SimpleStringBuilder, StdString) { TEST(SimpleStringBuilderDeathTest, BufferOverrunConstCharP) { char sb_buf[4]; - SimpleStringBuilder sb(sb_buf); + webrtc::SimpleStringBuilder sb(sb_buf); const char* const msg = "This is just too much"; #if RTC_DCHECK_IS_ON EXPECT_DEATH(sb << msg, ""); @@ -73,7 +73,7 @@ TEST(SimpleStringBuilderDeathTest, BufferOverrunConstCharP) { TEST(SimpleStringBuilderDeathTest, BufferOverrunStdString) { char sb_buf[4]; - SimpleStringBuilder sb(sb_buf); + webrtc::SimpleStringBuilder sb(sb_buf); sb << 12; const std::string msg = "Aw, come on!"; #if RTC_DCHECK_IS_ON @@ -86,7 +86,7 @@ TEST(SimpleStringBuilderDeathTest, BufferOverrunStdString) { TEST(SimpleStringBuilderDeathTest, BufferOverrunInt) { char sb_buf[4]; - SimpleStringBuilder sb(sb_buf); + webrtc::SimpleStringBuilder sb(sb_buf); constexpr int num = -12345; #if RTC_DCHECK_IS_ON EXPECT_DEATH(sb << num, ""); @@ -102,7 +102,7 @@ TEST(SimpleStringBuilderDeathTest, BufferOverrunInt) { TEST(SimpleStringBuilderDeathTest, BufferOverrunDouble) { char sb_buf[5]; - SimpleStringBuilder sb(sb_buf); + webrtc::SimpleStringBuilder sb(sb_buf); constexpr double num = 123.456; #if RTC_DCHECK_IS_ON EXPECT_DEATH(sb << num, ""); @@ -115,7 +115,7 @@ TEST(SimpleStringBuilderDeathTest, BufferOverrunDouble) { TEST(SimpleStringBuilderDeathTest, BufferOverrunConstCharPAlreadyFull) { char sb_buf[4]; - SimpleStringBuilder sb(sb_buf); + webrtc::SimpleStringBuilder sb(sb_buf); sb << 123; const char* const msg = "This is just too much"; #if RTC_DCHECK_IS_ON @@ -128,7 +128,7 @@ TEST(SimpleStringBuilderDeathTest, BufferOverrunConstCharPAlreadyFull) { TEST(SimpleStringBuilderDeathTest, BufferOverrunIntAlreadyFull) { char sb_buf[4]; - SimpleStringBuilder sb(sb_buf); + webrtc::SimpleStringBuilder sb(sb_buf); sb << "xyz"; constexpr int num = -12345; #if RTC_DCHECK_IS_ON @@ -145,7 +145,7 @@ TEST(SimpleStringBuilderDeathTest, BufferOverrunIntAlreadyFull) { // StringBuilder. TEST(StringBuilder, Limit) { - StringBuilder sb; + webrtc::StringBuilder sb; EXPECT_EQ(0u, sb.str().size()); sb << "012345678"; @@ -153,7 +153,7 @@ TEST(StringBuilder, Limit) { } TEST(StringBuilder, NumbersAndChars) { - StringBuilder sb; + webrtc::StringBuilder sb; sb << 1 << ":" << 2.1 << ":" << 2.2f << ":" << 78187493520ll << ":" << 78187493520ul; EXPECT_THAT(sb.str(), @@ -161,21 +161,21 @@ TEST(StringBuilder, NumbersAndChars) { } TEST(StringBuilder, Format) { - StringBuilder sb; + webrtc::StringBuilder sb; sb << "Here we go - "; sb.AppendFormat("This is a hex formatted value: 0x%08llx", 3735928559ULL); EXPECT_EQ(sb.str(), "Here we go - This is a hex formatted value: 0xdeadbeef"); } TEST(StringBuilder, StdString) { - StringBuilder sb; + webrtc::StringBuilder sb; std::string str = "does this work?"; sb << str; EXPECT_EQ(str, sb.str()); } TEST(StringBuilder, Release) { - StringBuilder sb; + webrtc::StringBuilder sb; std::string str = "This string has to be of a moderate length, or we might " "run into problems with small object optimizations."; @@ -190,7 +190,7 @@ TEST(StringBuilder, Release) { } TEST(StringBuilder, Reset) { - StringBuilder sb("abc"); + webrtc::StringBuilder sb("abc"); sb << "def"; EXPECT_EQ("abcdef", sb.str()); sb.Clear(); @@ -199,4 +199,4 @@ TEST(StringBuilder, Reset) { EXPECT_EQ("123!", sb.str()); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/strings/string_format.cc b/rtc_base/strings/string_format.cc index e69fb6193d..35661531ec 100644 --- a/rtc_base/strings/string_format.cc +++ b/rtc_base/strings/string_format.cc @@ -14,7 +14,7 @@ #include "rtc_base/checks.h" -namespace rtc { +namespace webrtc { namespace { @@ -38,4 +38,4 @@ std::string StringFormat(const char* fmt, ...) { return std::string(buffer); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/strings/string_format.h b/rtc_base/strings/string_format.h index 13124d2925..707cab0ed6 100644 --- a/rtc_base/strings/string_format.h +++ b/rtc_base/strings/string_format.h @@ -13,7 +13,7 @@ #include -namespace rtc { +namespace webrtc { #if defined(__GNUC__) #define RTC_PRINTF_FORMAT(format_param, dots_param) \ @@ -26,6 +26,14 @@ namespace rtc { // Based on base::StringPrintf() in Chrome but without its fancy dynamic memory // allocation for any size of the input buffer. std::string StringFormat(const char* fmt, ...) RTC_PRINTF_FORMAT(1, 2); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::StringFormat; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_STRINGS_STRING_FORMAT_H_ diff --git a/rtc_base/strings/string_format_unittest.cc b/rtc_base/strings/string_format_unittest.cc index 5531001979..73a04fa183 100644 --- a/rtc_base/strings/string_format_unittest.cc +++ b/rtc_base/strings/string_format_unittest.cc @@ -17,7 +17,7 @@ #include "rtc_base/string_encode.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { TEST(StringFormatTest, Empty) { EXPECT_EQ("", StringFormat("%s", "")); @@ -40,7 +40,7 @@ TEST(StringFormatTest, MaxSizeShouldWork) { // whe using `%.*s`. TEST(StringFormatTest, FormatStringView) { const std::string main_string("This is a substring test."); - std::vector string_views = rtc::split(main_string, ' '); + std::vector string_views = split(main_string, ' '); ASSERT_EQ(string_views.size(), 5u); const absl::string_view& sv = string_views[3]; @@ -49,4 +49,4 @@ TEST(StringFormatTest, FormatStringView) { EXPECT_EQ(formatted.compare("We have a substring."), 0); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/synchronization/BUILD.gn b/rtc_base/synchronization/BUILD.gn index 5cab524540..6b05f5ce1d 100644 --- a/rtc_base/synchronization/BUILD.gn +++ b/rtc_base/synchronization/BUILD.gn @@ -36,15 +36,18 @@ rtc_source_set("mutex") { "..:macromagic", "..:platform_thread_types", "../system:no_unique_address", + "//third_party/abseil-cpp/absl/base:core_headers", ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] if (rtc_use_absl_mutex) { - absl_deps += [ "//third_party/abseil-cpp/absl/synchronization" ] + deps += [ "//third_party/abseil-cpp/absl/synchronization" ] } } rtc_library("sequence_checker_internal") { - visibility = [ "../../api:sequence_checker" ] + visibility = [ + "../../api:rtc_api_unittests", + "../../api:sequence_checker", + ] sources = [ "sequence_checker_internal.cc", "sequence_checker_internal.h", @@ -65,8 +68,8 @@ rtc_library("yield_policy") { "yield_policy.cc", "yield_policy.h", ] - deps = [ "..:checks" ] - absl_deps = [ + deps = [ + "..:checks", "//third_party/abseil-cpp/absl/base:config", "//third_party/abseil-cpp/absl/base:core_headers", ] diff --git a/rtc_base/synchronization/mutex.h b/rtc_base/synchronization/mutex.h index 104f4fd3e1..1144b4b8e6 100644 --- a/rtc_base/synchronization/mutex.h +++ b/rtc_base/synchronization/mutex.h @@ -14,7 +14,6 @@ #include #include "absl/base/attributes.h" -#include "absl/base/const_init.h" #include "rtc_base/checks.h" #include "rtc_base/thread_annotations.h" diff --git a/rtc_base/synchronization/mutex_pthread.h b/rtc_base/synchronization/mutex_pthread.h index c749a208aa..a532bd229a 100644 --- a/rtc_base/synchronization/mutex_pthread.h +++ b/rtc_base/synchronization/mutex_pthread.h @@ -76,8 +76,8 @@ class RTC_LOCKABLE MutexImpl final { } private: - // Use two separate primitive types, rather than absl::optional, since the - // data race described below might invalidate absl::optional invariants. + // Use two separate primitive types, rather than std::optional, since the + // data race described below might invalidate std::optional invariants. bool is_owned_ = false; pthread_t latest_owner_ = pthread_self(); #endif diff --git a/rtc_base/synchronization/mutex_unittest.cc b/rtc_base/synchronization/mutex_unittest.cc index a5ebc5f7d4..06938a21f2 100644 --- a/rtc_base/synchronization/mutex_unittest.cc +++ b/rtc_base/synchronization/mutex_unittest.cc @@ -30,8 +30,8 @@ namespace webrtc { namespace { -using ::rtc::Event; -using ::rtc::Thread; +using ::webrtc::Event; +using ::webrtc::Thread; constexpr int kNumThreads = 16; diff --git a/rtc_base/synchronization/sequence_checker_internal.cc b/rtc_base/synchronization/sequence_checker_internal.cc index 3e205b91d5..007acacd14 100644 --- a/rtc_base/synchronization/sequence_checker_internal.cc +++ b/rtc_base/synchronization/sequence_checker_internal.cc @@ -11,20 +11,28 @@ #include +#include "api/task_queue/task_queue_base.h" #include "rtc_base/checks.h" +#include "rtc_base/platform_thread_types.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { namespace webrtc_sequence_checker_internal { SequenceCheckerImpl::SequenceCheckerImpl(bool attach_to_current_thread) : attached_(attach_to_current_thread), - valid_thread_(rtc::CurrentThreadRef()), + valid_thread_(CurrentThreadRef()), valid_queue_(TaskQueueBase::Current()) {} +SequenceCheckerImpl::SequenceCheckerImpl(TaskQueueBase* attached_queue) + : attached_(attached_queue != nullptr), + valid_thread_(PlatformThreadRef()), + valid_queue_(attached_queue) {} + bool SequenceCheckerImpl::IsCurrent() const { const TaskQueueBase* const current_queue = TaskQueueBase::Current(); - const rtc::PlatformThreadRef current_thread = rtc::CurrentThreadRef(); + const PlatformThreadRef current_thread = CurrentThreadRef(); MutexLock scoped_lock(&lock_); if (!attached_) { // Previously detached. attached_ = true; @@ -35,7 +43,7 @@ bool SequenceCheckerImpl::IsCurrent() const { if (valid_queue_) { return valid_queue_ == current_queue; } - return rtc::IsThreadRefEqual(valid_thread_, current_thread); + return IsThreadRefEqual(valid_thread_, current_thread); } void SequenceCheckerImpl::Detach() { @@ -48,7 +56,7 @@ void SequenceCheckerImpl::Detach() { #if RTC_DCHECK_IS_ON std::string SequenceCheckerImpl::ExpectationToString() const { const TaskQueueBase* const current_queue = TaskQueueBase::Current(); - const rtc::PlatformThreadRef current_thread = rtc::CurrentThreadRef(); + const webrtc::PlatformThreadRef current_thread = webrtc::CurrentThreadRef(); MutexLock scoped_lock(&lock_); if (!attached_) return "Checker currently not attached."; @@ -60,7 +68,7 @@ std::string SequenceCheckerImpl::ExpectationToString() const { // # Actual: TQ: 0x7fa8f0604190 SysQ: 0x7fa8f0604a30 Thread: 0x700006f1a000 // TaskQueue doesn't match - rtc::StringBuilder message; + webrtc::StringBuilder message; message.AppendFormat( "# Expected: TQ: %p Thread: %p\n" "# Actual: TQ: %p Thread: %p\n", @@ -69,7 +77,7 @@ std::string SequenceCheckerImpl::ExpectationToString() const { if ((valid_queue_ || current_queue) && valid_queue_ != current_queue) { message << "TaskQueue doesn't match\n"; - } else if (!rtc::IsThreadRefEqual(valid_thread_, current_thread)) { + } else if (!webrtc::IsThreadRefEqual(valid_thread_, current_thread)) { message << "Threads don't match\n"; } diff --git a/rtc_base/synchronization/sequence_checker_internal.h b/rtc_base/synchronization/sequence_checker_internal.h index 22503027a5..ea801497b4 100644 --- a/rtc_base/synchronization/sequence_checker_internal.h +++ b/rtc_base/synchronization/sequence_checker_internal.h @@ -31,6 +31,7 @@ namespace webrtc_sequence_checker_internal { class RTC_EXPORT SequenceCheckerImpl { public: explicit SequenceCheckerImpl(bool attach_to_current_thread); + explicit SequenceCheckerImpl(TaskQueueBase* attached_queue); ~SequenceCheckerImpl() = default; bool IsCurrent() const; @@ -48,7 +49,7 @@ class RTC_EXPORT SequenceCheckerImpl { mutable Mutex lock_; // These are mutable so that IsCurrent can set them. mutable bool attached_ RTC_GUARDED_BY(lock_); - mutable rtc::PlatformThreadRef valid_thread_ RTC_GUARDED_BY(lock_); + mutable PlatformThreadRef valid_thread_ RTC_GUARDED_BY(lock_); mutable const TaskQueueBase* valid_queue_ RTC_GUARDED_BY(lock_); }; @@ -58,7 +59,8 @@ class RTC_EXPORT SequenceCheckerImpl { // right version for your build configuration. class SequenceCheckerDoNothing { public: - explicit SequenceCheckerDoNothing(bool attach_to_current_thread) {} + explicit SequenceCheckerDoNothing(bool /* attach_to_current_thread */) {} + explicit SequenceCheckerDoNothing(TaskQueueBase* /* attached_queue */) {} bool IsCurrent() const { return true; } void Detach() {} }; @@ -66,7 +68,7 @@ class SequenceCheckerDoNothing { template std::enable_if_t, std::string> -ExpectationToString(const ThreadLikeObject* checker) { +ExpectationToString([[maybe_unused]] const ThreadLikeObject* checker) { #if RTC_DCHECK_IS_ON return checker->ExpectationToString(); #else diff --git a/rtc_base/synchronization/yield_policy.cc b/rtc_base/synchronization/yield_policy.cc index d883d42bf4..c8ba9f22c4 100644 --- a/rtc_base/synchronization/yield_policy.cc +++ b/rtc_base/synchronization/yield_policy.cc @@ -16,7 +16,7 @@ #include #endif -namespace rtc { +namespace webrtc { namespace { #if defined(ABSL_HAVE_THREAD_LOCAL) @@ -79,4 +79,4 @@ void ScopedYieldPolicy::YieldExecution() { current->YieldExecution(); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/synchronization/yield_policy.h b/rtc_base/synchronization/yield_policy.h index 5def6b7397..1756cf2105 100644 --- a/rtc_base/synchronization/yield_policy.h +++ b/rtc_base/synchronization/yield_policy.h @@ -10,7 +10,7 @@ #ifndef RTC_BASE_SYNCHRONIZATION_YIELD_POLICY_H_ #define RTC_BASE_SYNCHRONIZATION_YIELD_POLICY_H_ -namespace rtc { +namespace webrtc { class YieldInterface { public: virtual ~YieldInterface() = default; @@ -33,6 +33,15 @@ class ScopedYieldPolicy final { YieldInterface* const previous_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::ScopedYieldPolicy; +using ::webrtc::YieldInterface; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SYNCHRONIZATION_YIELD_POLICY_H_ diff --git a/rtc_base/synchronization/yield_policy_unittest.cc b/rtc_base/synchronization/yield_policy_unittest.cc index 0bf38f4537..1eb9353ba7 100644 --- a/rtc_base/synchronization/yield_policy_unittest.cc +++ b/rtc_base/synchronization/yield_policy_unittest.cc @@ -16,7 +16,7 @@ #include "test/gmock.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { class MockYieldHandler : public YieldInterface { public: @@ -65,4 +65,4 @@ TEST(YieldPolicyTest, IsThreadLocal) { events[2].Wait(Event::kForever); other_thread.join(); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/system/BUILD.gn b/rtc_base/system/BUILD.gn index 77f5139a2f..b18114f107 100644 --- a/rtc_base/system/BUILD.gn +++ b/rtc_base/system/BUILD.gn @@ -29,8 +29,8 @@ rtc_library("file_wrapper") { "..:checks", "..:criticalsection", "..:safe_conversions", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } if (rtc_include_tests) { diff --git a/rtc_base/system/cocoa_threading.mm b/rtc_base/system/cocoa_threading.mm index c09862e7e5..6dbfccf5ca 100644 --- a/rtc_base/system/cocoa_threading.mm +++ b/rtc_base/system/cocoa_threading.mm @@ -17,7 +17,9 @@ void InitCocoaMultiThreading() { static BOOL is_cocoa_multithreaded = [NSThread isMultiThreaded]; if (!is_cocoa_multithreaded) { // +[NSObject class] is idempotent. - [NSThread detachNewThreadSelector:@selector(class) toTarget:[NSObject class] withObject:nil]; + [NSThread detachNewThreadSelector:@selector(class) + toTarget:[NSObject class] + withObject:nil]; is_cocoa_multithreaded = YES; RTC_DCHECK([NSThread isMultiThreaded]); } diff --git a/rtc_base/system/file_wrapper.cc b/rtc_base/system/file_wrapper.cc index f7befc6dc5..ed4ac3fcdb 100644 --- a/rtc_base/system/file_wrapper.cc +++ b/rtc_base/system/file_wrapper.cc @@ -10,15 +10,20 @@ #include "rtc_base/system/file_wrapper.h" +#include + #include +#include +#include +#include #include "absl/strings/string_view.h" +#include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" #ifdef _WIN32 #include #else -#include #endif #include @@ -69,28 +74,28 @@ FileWrapper& FileWrapper::operator=(FileWrapper&& other) { bool FileWrapper::SeekRelative(int64_t offset) { RTC_DCHECK(file_); - return fseek(file_, rtc::checked_cast(offset), SEEK_CUR) == 0; + return fseek(file_, checked_cast(offset), SEEK_CUR) == 0; } bool FileWrapper::SeekTo(int64_t position) { RTC_DCHECK(file_); - return fseek(file_, rtc::checked_cast(position), SEEK_SET) == 0; + return fseek(file_, checked_cast(position), SEEK_SET) == 0; } -long FileWrapper::FileSize() { +std::optional FileWrapper::FileSize() { if (file_ == nullptr) - return -1; + return std::nullopt; long original_position = ftell(file_); if (original_position < 0) - return -1; + return std::nullopt; int seek_error = fseek(file_, 0, SEEK_END); if (seek_error) - return -1; + return std::nullopt; long file_size = ftell(file_); seek_error = fseek(file_, original_position, SEEK_SET); if (seek_error) - return -1; - return file_size; + return std::nullopt; + return checked_cast(file_size); } bool FileWrapper::Flush() { diff --git a/rtc_base/system/file_wrapper.h b/rtc_base/system/file_wrapper.h index 5e1e3d6a16..eafc685e27 100644 --- a/rtc_base/system/file_wrapper.h +++ b/rtc_base/system/file_wrapper.h @@ -12,8 +12,10 @@ #define RTC_BASE_SYSTEM_FILE_WRAPPER_H_ #include +#include #include +#include #include #include "absl/strings/string_view.h" @@ -85,10 +87,10 @@ class FileWrapper final { // Seek to given position. bool SeekTo(int64_t position); - // Returns the file size or -1 if a size could not be determined. + // Returns the file size or std::nullopt if the size could not be determined. // (A file size might not exists for non-seekable files or file-like // objects, for example /dev/tty on unix.) - long FileSize(); + std::optional FileSize(); // Returns number of bytes read. Short count indicates EOF or error. size_t Read(void* buf, size_t length); diff --git a/rtc_base/system/file_wrapper_unittest.cc b/rtc_base/system/file_wrapper_unittest.cc index 980b565c73..b83c1df933 100644 --- a/rtc_base/system/file_wrapper_unittest.cc +++ b/rtc_base/system/file_wrapper_unittest.cc @@ -21,7 +21,8 @@ TEST(FileWrapper, FileSize) { std::string test_name = std::string(test_info->test_case_name()) + "_" + test_info->name(); std::replace(test_name.begin(), test_name.end(), '/', '_'); - const std::string temp_filename = test::OutputPath() + test_name; + const std::string temp_filename = + test::OutputPathWithRandomDirectory() + test_name; // Write { diff --git a/rtc_base/system_time.cc b/rtc_base/system_time.cc index 058e6c2990..c6a9b7b766 100644 --- a/rtc_base/system_time.cc +++ b/rtc_base/system_time.cc @@ -9,7 +9,7 @@ */ // If WEBRTC_EXCLUDE_SYSTEM_TIME is set, an implementation of -// rtc::SystemTimeNanos() must be provided externally. +// webrtc::SystemTimeNanos() must be provided externally. #ifndef WEBRTC_EXCLUDE_SYSTEM_TIME #include @@ -38,7 +38,7 @@ #include "rtc_base/system_time.h" #include "rtc_base/time_utils.h" -namespace rtc { +namespace webrtc { int64_t SystemTimeNanos() { int64_t ticks; @@ -56,7 +56,7 @@ int64_t SystemTimeNanos() { RTC_DCHECK_NE(b, 0); RTC_DCHECK_LE(a, std::numeric_limits::max() / b) << "The multiplication " << a << " * " << b << " overflows"; - return rtc::dchecked_cast(a * b); + return webrtc::dchecked_cast(a * b); }; ticks = mul(mach_absolute_time(), timebase.numer) / timebase.denom; #elif defined(WEBRTC_POSIX) @@ -90,7 +90,7 @@ int64_t SystemTimeNanos() { ticks = now + (num_wrap_timegettime << 32); // TODO(deadbeef): Calculate with nanosecond precision. Otherwise, we're // just wasting a multiply and divide when doing Time() on Windows. - ticks = ticks * kNumNanosecsPerMillisec; + ticks = ticks * webrtc::kNumNanosecsPerMillisec; #pragma clang diagnostic pop #else #error Unsupported platform. @@ -98,5 +98,5 @@ int64_t SystemTimeNanos() { return ticks; } -} // namespace rtc +} // namespace webrtc #endif // WEBRTC_EXCLUDE_SYSTEM_TIME diff --git a/rtc_base/system_time.h b/rtc_base/system_time.h index c0ebc2a217..2cae4286af 100644 --- a/rtc_base/system_time.h +++ b/rtc_base/system_time.h @@ -13,12 +13,23 @@ #include -namespace rtc { +namespace webrtc { // Returns the actual system time, even if a clock is set for testing. // Useful for timeouts while using a test clock, or for logging. int64_t SystemTimeNanos(); -} // namespace rtc +} // namespace webrtc + +// TODO(bugs.webrtc.org/4222596): Remove once Chrome has migrated. +#define RTC_SYSTEM_TIME_IN_WEBRTC_NAMESPACE 1 + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::SystemTimeNanos; +} +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_SYSTEM_TIME_H_ diff --git a/rtc_base/task_queue.cc b/rtc_base/task_queue.cc deleted file mode 100644 index 7c972ed152..0000000000 --- a/rtc_base/task_queue.cc +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "rtc_base/task_queue.h" - -#include "api/task_queue/task_queue_base.h" - -namespace rtc { - -TaskQueue::TaskQueue( - std::unique_ptr task_queue) - : impl_(task_queue.release()) {} - -TaskQueue::~TaskQueue() { - // There might running task that tries to rescheduler itself to the TaskQueue - // and not yet aware TaskQueue destructor is called. - // Calling back to TaskQueue::PostTask need impl_ pointer still be valid, so - // do not invalidate impl_ pointer until Delete returns. - impl_->Delete(); -} - -bool TaskQueue::IsCurrent() const { - return impl_->IsCurrent(); -} - -} // namespace rtc diff --git a/rtc_base/task_queue.h b/rtc_base/task_queue.h deleted file mode 100644 index e52c49cf00..0000000000 --- a/rtc_base/task_queue.h +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_TASK_QUEUE_H_ -#define RTC_BASE_TASK_QUEUE_H_ - -#include - -#include -#include - -#include "absl/functional/any_invocable.h" -#include "absl/memory/memory.h" -#include "api/task_queue/task_queue_base.h" -#include "api/task_queue/task_queue_factory.h" -#include "rtc_base/system/rtc_export.h" -#include "rtc_base/thread_annotations.h" - -namespace rtc { -// Implements a task queue that asynchronously executes tasks in a way that -// guarantees that they're executed in FIFO order and that tasks never overlap. -// Tasks may always execute on the same worker thread and they may not. -// To DCHECK that tasks are executing on a known task queue, use IsCurrent(). -// -// Here are some usage examples: -// -// 1) Asynchronously running a lambda: -// -// class MyClass { -// ... -// TaskQueue queue_("MyQueue"); -// }; -// -// void MyClass::StartWork() { -// queue_.PostTask([]() { Work(); }); -// ... -// -// 2) Posting a custom task on a timer. The task posts itself again after -// every running: -// -// class TimerTask : public QueuedTask { -// public: -// TimerTask() {} -// private: -// bool Run() override { -// ++count_; -// TaskQueueBase::Current()->PostDelayedTask( -// absl::WrapUnique(this), 1000); -// // Ownership has been transferred to the next occurance, -// // so return false to prevent from being deleted now. -// return false; -// } -// int count_ = 0; -// }; -// ... -// queue_.PostDelayedTask(std::make_unique(), 1000); -// -// For more examples, see task_queue_unittests.cc. -// -// A note on destruction: -// -// When a TaskQueue is deleted, pending tasks will not be executed but they will -// be deleted. The deletion of tasks may happen asynchronously after the -// TaskQueue itself has been deleted or it may happen synchronously while the -// TaskQueue instance is being deleted. This may vary from one OS to the next -// so assumptions about lifetimes of pending tasks should not be made. -class RTC_LOCKABLE RTC_EXPORT TaskQueue { - public: - // TaskQueue priority levels. On some platforms these will map to thread - // priorities, on others such as Mac and iOS, GCD queue priorities. - using Priority = ::webrtc::TaskQueueFactory::Priority; - - explicit TaskQueue(std::unique_ptr task_queue); - ~TaskQueue(); - - TaskQueue(const TaskQueue&) = delete; - TaskQueue& operator=(const TaskQueue&) = delete; - - // Used for DCHECKing the current queue. - bool IsCurrent() const; - - // Returns non-owning pointer to the task queue implementation. - webrtc::TaskQueueBase* Get() { return impl_; } - - void PostTask( - absl::AnyInvocable task, - const webrtc::Location& location = webrtc::Location::Current()) { - impl_->PostTask(std::move(task), location); - } - void PostDelayedTask( - absl::AnyInvocable task, - webrtc::TimeDelta delay, - const webrtc::Location& location = webrtc::Location::Current()) { - impl_->PostDelayedTask(std::move(task), delay, location); - } - void PostDelayedHighPrecisionTask( - absl::AnyInvocable task, - webrtc::TimeDelta delay, - const webrtc::Location& location = webrtc::Location::Current()) { - impl_->PostDelayedHighPrecisionTask(std::move(task), delay, location); - } - - private: - webrtc::TaskQueueBase* const impl_; -}; - -} // namespace rtc - -#endif // RTC_BASE_TASK_QUEUE_H_ diff --git a/rtc_base/task_queue_for_test.cc b/rtc_base/task_queue_for_test.cc index cb6b23ceae..e8993edcd1 100644 --- a/rtc_base/task_queue_for_test.cc +++ b/rtc_base/task_queue_for_test.cc @@ -10,12 +10,28 @@ #include "rtc_base/task_queue_for_test.h" +#include +#include + #include "api/task_queue/default_task_queue_factory.h" +#include "api/task_queue/task_queue_base.h" namespace webrtc { -TaskQueueForTest::TaskQueueForTest(absl::string_view name, Priority priority) - : TaskQueue( - CreateDefaultTaskQueueFactory()->CreateTaskQueue(name, priority)) {} +TaskQueueForTest::TaskQueueForTest( + std::unique_ptr task_queue) + : impl_(std::move(task_queue)) {} + +TaskQueueForTest::TaskQueueForTest(absl::string_view name, + TaskQueueFactory::Priority priority) + : impl_(CreateDefaultTaskQueueFactory()->CreateTaskQueue(name, priority)) {} + +TaskQueueForTest::~TaskQueueForTest() { + // Stop the TaskQueue before invalidating impl_ pointer so that tasks that + // race with the TaskQueueForTest destructor could still use TaskQueueForTest + // functions like 'IsCurrent'. + impl_.get_deleter()(impl_.get()); + impl_.release(); +} } // namespace webrtc diff --git a/rtc_base/task_queue_for_test.h b/rtc_base/task_queue_for_test.h index 4c7f842abe..2f1ca08da4 100644 --- a/rtc_base/task_queue_for_test.h +++ b/rtc_base/task_queue_for_test.h @@ -17,41 +17,62 @@ #include "absl/strings/string_view.h" #include "api/function_view.h" #include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/thread_annotations.h" namespace webrtc { -inline void SendTask(TaskQueueBase* task_queue, - rtc::FunctionView task) { +inline void SendTask(TaskQueueBase* task_queue, FunctionView task) { if (task_queue->IsCurrent()) { task(); return; } - rtc::Event event; + Event event; absl::Cleanup cleanup = [&event] { event.Set(); }; task_queue->PostTask([task, cleanup = std::move(cleanup)] { task(); }); - RTC_CHECK(event.Wait(/*give_up_after=*/rtc::Event::kForever, + RTC_CHECK(event.Wait(/*give_up_after=*/Event::kForever, /*warn_after=*/TimeDelta::Seconds(10))); } -class RTC_LOCKABLE TaskQueueForTest : public rtc::TaskQueue { +class TaskQueueForTest { public: - using rtc::TaskQueue::TaskQueue; - explicit TaskQueueForTest(absl::string_view name = "TestQueue", - Priority priority = Priority::NORMAL); + explicit TaskQueueForTest( + std::unique_ptr task_queue); + explicit TaskQueueForTest( + absl::string_view name = "TestQueue", + TaskQueueFactory::Priority priority = TaskQueueFactory::Priority::NORMAL); TaskQueueForTest(const TaskQueueForTest&) = delete; TaskQueueForTest& operator=(const TaskQueueForTest&) = delete; - ~TaskQueueForTest() = default; + ~TaskQueueForTest(); + + bool IsCurrent() const { return impl_->IsCurrent(); } + + // Returns non-owning pointer to the task queue implementation. + TaskQueueBase* Get() { return impl_.get(); } + + void PostTask( + absl::AnyInvocable task, + const webrtc::Location& location = webrtc::Location::Current()) { + impl_->PostTask(std::move(task), location); + } + void PostDelayedTask( + absl::AnyInvocable task, + webrtc::TimeDelta delay, + const webrtc::Location& location = webrtc::Location::Current()) { + impl_->PostDelayedTask(std::move(task), delay, location); + } + void PostDelayedHighPrecisionTask( + absl::AnyInvocable task, + webrtc::TimeDelta delay, + const webrtc::Location& location = webrtc::Location::Current()) { + impl_->PostDelayedHighPrecisionTask(std::move(task), delay, location); + } // A convenience, test-only method that blocks the current thread while // a task executes on the task queue. - void SendTask(rtc::FunctionView task) { - ::webrtc::SendTask(Get(), task); - } + void SendTask(FunctionView task) { ::webrtc::SendTask(Get(), task); } // Wait for the completion of all tasks posted prior to the // WaitForPreviouslyPostedTasks() call. @@ -61,6 +82,9 @@ class RTC_LOCKABLE TaskQueueForTest : public rtc::TaskQueue { // that all already posted tasks on the queue get executed. SendTask([]() {}); } + + private: + std::unique_ptr impl_; }; } // namespace webrtc diff --git a/rtc_base/task_queue_libevent.cc b/rtc_base/task_queue_libevent.cc deleted file mode 100644 index e27c6eff9f..0000000000 --- a/rtc_base/task_queue_libevent.cc +++ /dev/null @@ -1,336 +0,0 @@ -/* - * Copyright 2016 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/task_queue_libevent.h" - -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include - -#include "absl/container/inlined_vector.h" -#include "absl/functional/any_invocable.h" -#include "absl/strings/string_view.h" -#include "api/task_queue/task_queue_base.h" -#include "api/units/time_delta.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_conversions.h" -#include "rtc_base/platform_thread.h" -#include "rtc_base/platform_thread_types.h" -#include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_annotations.h" -#include "rtc_base/time_utils.h" -#include "third_party/libevent/event.h" - -namespace webrtc { -namespace { -constexpr char kQuit = 1; -constexpr char kRunTasks = 2; - -using Priority = TaskQueueFactory::Priority; - -// This ignores the SIGPIPE signal on the calling thread. -// This signal can be fired when trying to write() to a pipe that's being -// closed or while closing a pipe that's being written to. -// We can run into that situation so we ignore this signal and continue as -// normal. -// As a side note for this implementation, it would be great if we could safely -// restore the sigmask, but unfortunately the operation of restoring it, can -// itself actually cause SIGPIPE to be signaled :-| (e.g. on MacOS) -// The SIGPIPE signal by default causes the process to be terminated, so we -// don't want to risk that. -// An alternative to this approach is to ignore the signal for the whole -// process: -// signal(SIGPIPE, SIG_IGN); -void IgnoreSigPipeSignalOnCurrentThread() { - sigset_t sigpipe_mask; - sigemptyset(&sigpipe_mask); - sigaddset(&sigpipe_mask, SIGPIPE); - pthread_sigmask(SIG_BLOCK, &sigpipe_mask, nullptr); -} - -bool SetNonBlocking(int fd) { - const int flags = fcntl(fd, F_GETFL); - RTC_CHECK(flags != -1); - return (flags & O_NONBLOCK) || fcntl(fd, F_SETFL, flags | O_NONBLOCK) != -1; -} - -// TODO(tommi): This is a hack to support two versions of libevent that we're -// compatible with. The method we really want to call is event_assign(), -// since event_set() has been marked as deprecated (and doesn't accept -// passing event_base__ as a parameter). However, the version of libevent -// that we have in Chromium, doesn't have event_assign(), so we need to call -// event_set() there. -void EventAssign(struct event* ev, - struct event_base* base, - int fd, - short events, - void (*callback)(int, short, void*), - void* arg) { -#if defined(_EVENT2_EVENT_H_) - RTC_CHECK_EQ(0, event_assign(ev, base, fd, events, callback, arg)); -#else - event_set(ev, fd, events, callback, arg); - RTC_CHECK_EQ(0, event_base_set(base, ev)); -#endif -} - -rtc::ThreadPriority TaskQueuePriorityToThreadPriority(Priority priority) { - switch (priority) { - case Priority::HIGH: - return rtc::ThreadPriority::kRealtime; - case Priority::LOW: - return rtc::ThreadPriority::kLow; - case Priority::NORMAL: - return rtc::ThreadPriority::kNormal; - } -} - -class TaskQueueLibevent final : public TaskQueueBase { - public: - TaskQueueLibevent(absl::string_view queue_name, rtc::ThreadPriority priority); - - void Delete() override; - - protected: - void PostTaskImpl(absl::AnyInvocable task, - const PostTaskTraits& traits, - const Location& location) override; - void PostDelayedTaskImpl(absl::AnyInvocable task, - TimeDelta delay, - const PostDelayedTaskTraits& traits, - const Location& location) override; - - private: - struct TimerEvent; - - void PostDelayedTaskOnTaskQueue(absl::AnyInvocable task, - TimeDelta delay); - - ~TaskQueueLibevent() override = default; - - static void OnWakeup(int socket, short flags, void* context); // NOLINT - static void RunTimer(int fd, short flags, void* context); // NOLINT - - bool is_active_ = true; - int wakeup_pipe_in_ = -1; - int wakeup_pipe_out_ = -1; - event_base* event_base_; - event wakeup_event_; - rtc::PlatformThread thread_; - Mutex pending_lock_; - absl::InlinedVector, 4> pending_ - RTC_GUARDED_BY(pending_lock_); - // Holds a list of events pending timers for cleanup when the loop exits. - std::list pending_timers_; -}; - -struct TaskQueueLibevent::TimerEvent { - TimerEvent(TaskQueueLibevent* task_queue, absl::AnyInvocable task) - : task_queue(task_queue), task(std::move(task)) {} - ~TimerEvent() { event_del(&ev); } - - event ev; - TaskQueueLibevent* task_queue; - absl::AnyInvocable task; -}; - -TaskQueueLibevent::TaskQueueLibevent(absl::string_view queue_name, - rtc::ThreadPriority priority) - : event_base_(event_base_new()) { - int fds[2]; - RTC_CHECK(pipe(fds) == 0); - SetNonBlocking(fds[0]); - SetNonBlocking(fds[1]); - wakeup_pipe_out_ = fds[0]; - wakeup_pipe_in_ = fds[1]; - - EventAssign(&wakeup_event_, event_base_, wakeup_pipe_out_, - EV_READ | EV_PERSIST, OnWakeup, this); - event_add(&wakeup_event_, 0); - thread_ = rtc::PlatformThread::SpawnJoinable( - [this] { - { - CurrentTaskQueueSetter set_current(this); - while (is_active_) - event_base_loop(event_base_, 0); - - // Ensure remaining deleted tasks are destroyed with Current() set up - // to this task queue. - absl::InlinedVector, 4> pending; - MutexLock lock(&pending_lock_); - pending_.swap(pending); - } - for (TimerEvent* timer : pending_timers_) - delete timer; - -#if RTC_DCHECK_IS_ON - MutexLock lock(&pending_lock_); - RTC_DCHECK(pending_.empty()); -#endif - }, - queue_name, rtc::ThreadAttributes().SetPriority(priority)); -} - -void TaskQueueLibevent::Delete() { - RTC_DCHECK(!IsCurrent()); - struct timespec ts; - char message = kQuit; - while (write(wakeup_pipe_in_, &message, sizeof(message)) != sizeof(message)) { - // The queue is full, so we have no choice but to wait and retry. - RTC_CHECK_EQ(EAGAIN, errno); - ts.tv_sec = 0; - ts.tv_nsec = 1000000; - nanosleep(&ts, nullptr); - } - - thread_.Finalize(); - - event_del(&wakeup_event_); - - IgnoreSigPipeSignalOnCurrentThread(); - - close(wakeup_pipe_in_); - close(wakeup_pipe_out_); - wakeup_pipe_in_ = -1; - wakeup_pipe_out_ = -1; - - event_base_free(event_base_); - delete this; -} - -void TaskQueueLibevent::PostTaskImpl(absl::AnyInvocable task, - const PostTaskTraits& traits, - const Location& location) { - { - MutexLock lock(&pending_lock_); - bool had_pending_tasks = !pending_.empty(); - pending_.push_back(std::move(task)); - - // Only write to the pipe if there were no pending tasks before this one - // since the thread could be sleeping. If there were already pending tasks - // then we know there's either a pending write in the pipe or the thread has - // not yet processed the pending tasks. In either case, the thread will - // eventually wake up and process all pending tasks including this one. - if (had_pending_tasks) { - return; - } - } - - // Note: This behvior outlined above ensures we never fill up the pipe write - // buffer since there will only ever be 1 byte pending. - char message = kRunTasks; - RTC_CHECK_EQ(write(wakeup_pipe_in_, &message, sizeof(message)), - sizeof(message)); -} - -void TaskQueueLibevent::PostDelayedTaskOnTaskQueue( - absl::AnyInvocable task, - TimeDelta delay) { - // libevent api is not thread safe by default, thus event_add need to be - // called on the `thread_`. - RTC_DCHECK(IsCurrent()); - - TimerEvent* timer = new TimerEvent(this, std::move(task)); - EventAssign(&timer->ev, event_base_, -1, 0, &TaskQueueLibevent::RunTimer, - timer); - pending_timers_.push_back(timer); - timeval tv = {.tv_sec = rtc::dchecked_cast(delay.us() / 1'000'000), - .tv_usec = rtc::dchecked_cast(delay.us() % 1'000'000)}; - event_add(&timer->ev, &tv); -} - -void TaskQueueLibevent::PostDelayedTaskImpl(absl::AnyInvocable task, - TimeDelta delay, - const PostDelayedTaskTraits& traits, - const Location& location) { - if (IsCurrent()) { - PostDelayedTaskOnTaskQueue(std::move(task), delay); - } else { - int64_t posted_us = rtc::TimeMicros(); - PostTask([posted_us, delay, task = std::move(task), this]() mutable { - // Compensate for the time that has passed since the posting. - TimeDelta post_time = TimeDelta::Micros(rtc::TimeMicros() - posted_us); - PostDelayedTaskOnTaskQueue( - std::move(task), std::max(delay - post_time, TimeDelta::Zero())); - }); - } -} - -// static -void TaskQueueLibevent::OnWakeup(int socket, - short flags, // NOLINT - void* context) { - TaskQueueLibevent* me = static_cast(context); - RTC_DCHECK(me->wakeup_pipe_out_ == socket); - char buf; - RTC_CHECK(sizeof(buf) == read(socket, &buf, sizeof(buf))); - switch (buf) { - case kQuit: - me->is_active_ = false; - event_base_loopbreak(me->event_base_); - break; - case kRunTasks: { - absl::InlinedVector, 4> tasks; - { - MutexLock lock(&me->pending_lock_); - tasks.swap(me->pending_); - } - RTC_DCHECK(!tasks.empty()); - for (auto& task : tasks) { - std::move(task)(); - // Prefer to delete the `task` before running the next one. - task = nullptr; - } - break; - } - default: - RTC_DCHECK_NOTREACHED(); - break; - } -} - -// static -void TaskQueueLibevent::RunTimer(int fd, - short flags, // NOLINT - void* context) { - TimerEvent* timer = static_cast(context); - std::move(timer->task)(); - timer->task_queue->pending_timers_.remove(timer); - delete timer; -} - -class TaskQueueLibeventFactory final : public TaskQueueFactory { - public: - std::unique_ptr CreateTaskQueue( - absl::string_view name, - Priority priority) const override { - return std::unique_ptr( - new TaskQueueLibevent(name, - TaskQueuePriorityToThreadPriority(priority))); - } -}; - -} // namespace - -std::unique_ptr CreateTaskQueueLibeventFactory() { - return std::make_unique(); -} - -} // namespace webrtc diff --git a/rtc_base/task_queue_stdlib.cc b/rtc_base/task_queue_stdlib.cc index 1ac01e1830..0895eb18cc 100644 --- a/rtc_base/task_queue_stdlib.cc +++ b/rtc_base/task_queue_stdlib.cc @@ -16,6 +16,7 @@ #include #include #include +#include #include #include "absl/functional/any_invocable.h" @@ -34,21 +35,21 @@ namespace webrtc { namespace { -rtc::ThreadPriority TaskQueuePriorityToThreadPriority( +ThreadPriority TaskQueuePriorityToThreadPriority( TaskQueueFactory::Priority priority) { switch (priority) { case TaskQueueFactory::Priority::HIGH: - return rtc::ThreadPriority::kRealtime; + return ThreadPriority::kRealtime; case TaskQueueFactory::Priority::LOW: - return rtc::ThreadPriority::kLow; + return ThreadPriority::kLow; case TaskQueueFactory::Priority::NORMAL: - return rtc::ThreadPriority::kNormal; + return ThreadPriority::kNormal; } } class TaskQueueStdlib final : public TaskQueueBase { public: - TaskQueueStdlib(absl::string_view queue_name, rtc::ThreadPriority priority); + TaskQueueStdlib(absl::string_view queue_name, ThreadPriority priority); ~TaskQueueStdlib() override = default; void Delete() override; @@ -79,12 +80,12 @@ class TaskQueueStdlib final : public TaskQueueBase { struct NextTask { bool final_task = false; absl::AnyInvocable run_task; - TimeDelta sleep_time = rtc::Event::kForever; + TimeDelta sleep_time = Event::kForever; }; - static rtc::PlatformThread InitializeThread(TaskQueueStdlib* me, - absl::string_view queue_name, - rtc::ThreadPriority priority); + static PlatformThread InitializeThread(TaskQueueStdlib* me, + absl::string_view queue_name, + ThreadPriority priority); NextTask GetNextTask(); @@ -93,7 +94,7 @@ class TaskQueueStdlib final : public TaskQueueBase { void NotifyWake(); // Signaled whenever a new task is pending. - rtc::Event flag_notify_; + Event flag_notify_; Mutex pending_lock_; @@ -122,28 +123,27 @@ class TaskQueueStdlib final : public TaskQueueBase { // tasks (including delayed tasks). // Placing this last ensures the thread doesn't touch uninitialized attributes // throughout it's lifetime. - rtc::PlatformThread thread_; + PlatformThread thread_; }; TaskQueueStdlib::TaskQueueStdlib(absl::string_view queue_name, - rtc::ThreadPriority priority) + ThreadPriority priority) : flag_notify_(/*manual_reset=*/false, /*initially_signaled=*/false), thread_(InitializeThread(this, queue_name, priority)) {} // static -rtc::PlatformThread TaskQueueStdlib::InitializeThread( - TaskQueueStdlib* me, - absl::string_view queue_name, - rtc::ThreadPriority priority) { - rtc::Event started; - auto thread = rtc::PlatformThread::SpawnJoinable( +PlatformThread TaskQueueStdlib::InitializeThread(TaskQueueStdlib* me, + absl::string_view queue_name, + ThreadPriority priority) { + Event started; + auto thread = PlatformThread::SpawnJoinable( [&started, me] { CurrentTaskQueueSetter set_current(me); started.Set(); me->ProcessTasks(); }, - queue_name, rtc::ThreadAttributes().SetPriority(priority)); - started.Wait(rtc::Event::kForever); + queue_name, ThreadAttributes().SetPriority(priority)); + started.Wait(Event::kForever); return thread; } @@ -177,7 +177,7 @@ void TaskQueueStdlib::PostDelayedTaskImpl(absl::AnyInvocable task, const PostDelayedTaskTraits& traits, const Location& location) { DelayedEntryTimeout delayed_entry; - delayed_entry.next_fire_at_us = rtc::TimeMicros() + delay.us(); + delayed_entry.next_fire_at_us = TimeMicros() + delay.us(); { MutexLock lock(&pending_lock_); @@ -191,7 +191,7 @@ void TaskQueueStdlib::PostDelayedTaskImpl(absl::AnyInvocable task, TaskQueueStdlib::NextTask TaskQueueStdlib::GetNextTask() { NextTask result; - const int64_t tick_us = rtc::TimeMicros(); + const int64_t tick_us = TimeMicros(); MutexLock lock(&pending_lock_); @@ -200,12 +200,12 @@ TaskQueueStdlib::NextTask TaskQueueStdlib::GetNextTask() { return result; } - if (delayed_queue_.size() > 0) { + if (!delayed_queue_.empty()) { auto delayed_entry = delayed_queue_.begin(); const auto& delay_info = delayed_entry->first; auto& delay_run = delayed_entry->second; if (tick_us >= delay_info.next_fire_at_us) { - if (pending_queue_.size() > 0) { + if (!pending_queue_.empty()) { auto& entry = pending_queue_.front(); auto& entry_order = entry.first; auto& entry_run = entry.second; @@ -225,7 +225,7 @@ TaskQueueStdlib::NextTask TaskQueueStdlib::GetNextTask() { DivideRoundUp(delay_info.next_fire_at_us - tick_us, 1'000)); } - if (pending_queue_.size() > 0) { + if (!pending_queue_.empty()) { auto& entry = pending_queue_.front(); result.run_task = std::move(entry.second); pending_queue_.pop(); @@ -249,7 +249,7 @@ void TaskQueueStdlib::ProcessTasks() { continue; } - flag_notify_.Wait(task.sleep_time); + flag_notify_.Wait(task.sleep_time, task.sleep_time); } // Ensure remaining deleted tasks are destroyed with Current() set up to this diff --git a/rtc_base/task_queue_stdlib_unittest.cc b/rtc_base/task_queue_stdlib_unittest.cc index 0654e9719c..c090b5e497 100644 --- a/rtc_base/task_queue_stdlib_unittest.cc +++ b/rtc_base/task_queue_stdlib_unittest.cc @@ -10,7 +10,15 @@ #include "rtc_base/task_queue_stdlib.h" +#include +#include + +#include "api/task_queue/task_queue_factory.h" #include "api/task_queue/task_queue_test.h" +#include "api/units/time_delta.h" +#include "rtc_base/event.h" +#include "rtc_base/logging.h" +#include "system_wrappers/include/sleep.h" #include "test/gtest.h" namespace webrtc { @@ -25,5 +33,32 @@ INSTANTIATE_TEST_SUITE_P(TaskQueueStdlib, TaskQueueTest, ::testing::Values(CreateTaskQueueFactory)); +class StringPtrLogSink : public LogSink { + public: + explicit StringPtrLogSink(std::string* log_data) : log_data_(log_data) {} + + private: + void OnLogMessage(const std::string& message) override { + OnLogMessage(absl::string_view(message)); + } + void OnLogMessage(absl::string_view message) override { + log_data_->append(message.begin(), message.end()); + } + std::string* const log_data_; +}; + +TEST(TaskQueueStdlib, AvoidsSpammingLogOnInactivity) { + std::string log_output; + StringPtrLogSink stream(&log_output); + LogMessage::AddLogToStream(&stream, LS_VERBOSE); + auto task_queue = CreateTaskQueueStdlibFactory()->CreateTaskQueue( + "test", TaskQueueFactory::Priority::NORMAL); + auto wait_duration = Event::kDefaultWarnDuration + TimeDelta::Seconds(1); + SleepMs(wait_duration.ms()); + EXPECT_EQ(log_output.length(), 0u); + task_queue = nullptr; + LogMessage::RemoveLogToStream(&stream); +} + } // namespace } // namespace webrtc diff --git a/rtc_base/task_queue_unittest.cc b/rtc_base/task_queue_unittest.cc index 579dc3cced..6bd99d3cce 100644 --- a/rtc_base/task_queue_unittest.cc +++ b/rtc_base/task_queue_unittest.cc @@ -28,10 +28,10 @@ #include "rtc_base/time_utils.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { -using ::webrtc::TimeDelta; + // Noop on all platforms except Windows, where it turns on high precision // multimedia timers which increases the precision of TimeMillis() while in // scope. @@ -51,12 +51,6 @@ class EnableHighResTimers { #endif }; -void CheckCurrent(Event* signal, TaskQueue* queue) { - EXPECT_TRUE(queue->IsCurrent()); - if (signal) - signal->Set(); -} - } // namespace // This task needs to be run manually due to the slowness of some of our bots. @@ -66,12 +60,16 @@ TEST(TaskQueueTest, DISABLED_PostDelayedHighRes) { static const char kQueueName[] = "PostDelayedHighRes"; Event event; - webrtc::TaskQueueForTest queue(kQueueName, TaskQueue::Priority::HIGH); + TaskQueueForTest queue(kQueueName, TaskQueueFactory::Priority::HIGH); - uint32_t start = Time(); - queue.PostDelayedTask([&event, &queue] { CheckCurrent(&event, &queue); }, - TimeDelta::Millis(3)); - EXPECT_TRUE(event.Wait(webrtc::TimeDelta::Seconds(1))); + uint32_t start = TimeMillis(); + queue.PostDelayedTask( + [&event, &queue] { + EXPECT_TRUE(queue.IsCurrent()); + event.Set(); + }, + TimeDelta::Millis(3)); + EXPECT_TRUE(event.Wait(TimeDelta::Seconds(1))); uint32_t end = TimeMillis(); // These tests are a little relaxed due to how "powerful" our test bots can // be. Most recently we've seen windows bots fire the callback after 94-99ms, @@ -80,4 +78,4 @@ TEST(TaskQueueTest, DISABLED_PostDelayedHighRes) { EXPECT_NEAR(end - start, 3, 3u); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/task_queue_win.cc b/rtc_base/task_queue_win.cc index 7e46d58e27..56d079b186 100644 --- a/rtc_base/task_queue_win.cc +++ b/rtc_base/task_queue_win.cc @@ -26,12 +26,12 @@ #include #include #include +#include #include #include #include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -51,26 +51,26 @@ namespace { void CALLBACK InitializeQueueThread(ULONG_PTR param) { MSG msg; ::PeekMessage(&msg, nullptr, WM_USER, WM_USER, PM_NOREMOVE); - rtc::Event* data = reinterpret_cast(param); + webrtc::Event* data = reinterpret_cast(param); data->Set(); } -rtc::ThreadPriority TaskQueuePriorityToThreadPriority( +ThreadPriority TaskQueuePriorityToThreadPriority( TaskQueueFactory::Priority priority) { switch (priority) { case TaskQueueFactory::Priority::HIGH: - return rtc::ThreadPriority::kRealtime; + return ThreadPriority::kRealtime; case TaskQueueFactory::Priority::LOW: - return rtc::ThreadPriority::kLow; + return ThreadPriority::kLow; case TaskQueueFactory::Priority::NORMAL: - return rtc::ThreadPriority::kNormal; + return ThreadPriority::kNormal; } } Timestamp CurrentTime() { static const UINT kPeriod = 1; bool high_res = (timeBeginPeriod(kPeriod) == TIMERR_NOERROR); - Timestamp ret = Timestamp::Micros(rtc::TimeMicros()); + Timestamp ret = Timestamp::Micros(TimeMicros()); if (high_res) timeEndPeriod(kPeriod); return ret; @@ -156,7 +156,7 @@ class MultimediaTimer { class TaskQueueWin : public TaskQueueBase { public: - TaskQueueWin(absl::string_view queue_name, rtc::ThreadPriority priority); + TaskQueueWin(absl::string_view queue_name, ThreadPriority priority); ~TaskQueueWin() override = default; void Delete() override; @@ -187,7 +187,7 @@ class TaskQueueWin : public TaskQueueBase { std::greater> timer_tasks_; UINT_PTR timer_id_ = 0; - rtc::PlatformThread thread_; + PlatformThread thread_; Mutex pending_lock_; std::queue> pending_ RTC_GUARDED_BY(pending_lock_); @@ -195,22 +195,22 @@ class TaskQueueWin : public TaskQueueBase { }; TaskQueueWin::TaskQueueWin(absl::string_view queue_name, - rtc::ThreadPriority priority) + ThreadPriority priority) : in_queue_(::CreateEvent(nullptr, true, false, nullptr)) { RTC_DCHECK(in_queue_); - thread_ = rtc::PlatformThread::SpawnJoinable( + thread_ = webrtc::PlatformThread::SpawnJoinable( [this] { RunThreadMain(); }, queue_name, - rtc::ThreadAttributes().SetPriority(priority)); + webrtc::ThreadAttributes().SetPriority(priority)); - rtc::Event event(false, false); + Event event(false, false); RTC_CHECK(thread_.QueueAPC(&InitializeQueueThread, reinterpret_cast(&event))); - event.Wait(rtc::Event::kForever); + event.Wait(Event::kForever); } void TaskQueueWin::Delete() { RTC_DCHECK(!IsCurrent()); - RTC_CHECK(thread_.GetHandle() != absl::nullopt); + RTC_CHECK(thread_.GetHandle() != std::nullopt); while ( !::PostThreadMessage(GetThreadId(*thread_.GetHandle()), WM_QUIT, 0, 0)) { RTC_CHECK_EQ(ERROR_NOT_ENOUGH_QUOTA, ::GetLastError()); @@ -239,7 +239,7 @@ void TaskQueueWin::PostDelayedTaskImpl(absl::AnyInvocable task, } auto* task_info = new DelayedTaskInfo(delay, std::move(task)); - RTC_CHECK(thread_.GetHandle() != absl::nullopt); + RTC_CHECK(thread_.GetHandle() != std::nullopt); if (!::PostThreadMessage(GetThreadId(*thread_.GetHandle()), WM_QUEUE_DELAYED_TASK, 0, reinterpret_cast(task_info))) { diff --git a/rtc_base/task_utils/BUILD.gn b/rtc_base/task_utils/BUILD.gn index 5fcf25ef0b..ee6abc3758 100644 --- a/rtc_base/task_utils/BUILD.gn +++ b/rtc_base/task_utils/BUILD.gn @@ -22,8 +22,8 @@ rtc_library("repeating_task") { "../../api/units:time_delta", "../../api/units:timestamp", "../../system_wrappers:system_wrappers", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable" ] } if (rtc_include_tests) { @@ -33,7 +33,6 @@ if (rtc_include_tests) { deps = [ ":repeating_task", "..:rtc_event", - "..:rtc_task_queue", "..:task_queue_for_test", "../../api/task_queue", "../../api/task_queue/test:mock_task_queue_base", @@ -41,7 +40,7 @@ if (rtc_include_tests) { "../../api/units:timestamp", "../../system_wrappers:system_wrappers", "../../test:test_support", + "//third_party/abseil-cpp/absl/functional:any_invocable", ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable" ] } } diff --git a/rtc_base/task_utils/repeating_task.cc b/rtc_base/task_utils/repeating_task.cc index 6b76d7673a..284bfa2474 100644 --- a/rtc_base/task_utils/repeating_task.cc +++ b/rtc_base/task_utils/repeating_task.cc @@ -24,7 +24,7 @@ class RepeatingTask { TimeDelta first_delay, absl::AnyInvocable task, Clock* clock, - rtc::scoped_refptr alive_flag, + scoped_refptr alive_flag, const Location& location); RepeatingTask(RepeatingTask&&) = default; RepeatingTask& operator=(RepeatingTask&&) = delete; @@ -40,18 +40,16 @@ class RepeatingTask { absl::AnyInvocable task_; // This is always finite. Timestamp next_run_time_ RTC_GUARDED_BY(task_queue_); - rtc::scoped_refptr alive_flag_ - RTC_GUARDED_BY(task_queue_); + scoped_refptr alive_flag_ RTC_GUARDED_BY(task_queue_); }; -RepeatingTask::RepeatingTask( - TaskQueueBase* task_queue, - TaskQueueBase::DelayPrecision precision, - TimeDelta first_delay, - absl::AnyInvocable task, - Clock* clock, - rtc::scoped_refptr alive_flag, - const Location& location) +RepeatingTask::RepeatingTask(TaskQueueBase* task_queue, + TaskQueueBase::DelayPrecision precision, + TimeDelta first_delay, + absl::AnyInvocable task, + Clock* clock, + scoped_refptr alive_flag, + const Location& location) : task_queue_(task_queue), precision_(precision), clock_(clock), diff --git a/rtc_base/task_utils/repeating_task.h b/rtc_base/task_utils/repeating_task.h index c45de95ecc..5779fc1a8b 100644 --- a/rtc_base/task_utils/repeating_task.h +++ b/rtc_base/task_utils/repeating_task.h @@ -82,10 +82,9 @@ class RepeatingTaskHandle { bool Running() const; private: - explicit RepeatingTaskHandle( - rtc::scoped_refptr alive_flag) + explicit RepeatingTaskHandle(scoped_refptr alive_flag) : repeating_task_(std::move(alive_flag)) {} - rtc::scoped_refptr repeating_task_; + scoped_refptr repeating_task_; }; } // namespace webrtc diff --git a/rtc_base/task_utils/repeating_task_unittest.cc b/rtc_base/task_utils/repeating_task_unittest.cc index 2c269b43bc..f85078e7d3 100644 --- a/rtc_base/task_utils/repeating_task_unittest.cc +++ b/rtc_base/task_utils/repeating_task_unittest.cc @@ -12,6 +12,7 @@ #include #include +#include #include "absl/functional/any_invocable.h" #include "api/task_queue/task_queue_base.h" @@ -62,7 +63,7 @@ class FakeTaskQueue : public TaskQueueBase { const PostTaskTraits& /*traits*/, const Location& /*location*/) override { last_task_ = std::move(task); - last_precision_ = absl::nullopt; + last_precision_ = std::nullopt; last_delay_ = TimeDelta::Zero(); } @@ -94,7 +95,7 @@ class FakeTaskQueue : public TaskQueueBase { return last_delay_; } - absl::optional last_precision() const { + std::optional last_precision() const { return last_precision_; } @@ -103,7 +104,7 @@ class FakeTaskQueue : public TaskQueueBase { SimulatedClock* clock_; absl::AnyInvocable last_task_; TimeDelta last_delay_ = TimeDelta::MinusInfinity(); - absl::optional last_precision_; + std::optional last_precision_; }; // NOTE: Since this utility class holds a raw pointer to a variable that likely @@ -207,7 +208,7 @@ TEST(RepeatingTaskTest, CompensatesForShortRunTime) { } TEST(RepeatingTaskTest, CancelDelayedTaskBeforeItRuns) { - rtc::Event done; + Event done; MockClosure mock; EXPECT_CALL(mock, Call).Times(0); EXPECT_CALL(mock, Delete).WillOnce(Invoke([&done] { done.Set(); })); @@ -220,7 +221,7 @@ TEST(RepeatingTaskTest, CancelDelayedTaskBeforeItRuns) { } TEST(RepeatingTaskTest, CancelTaskAfterItRuns) { - rtc::Event done; + Event done; MockClosure mock; EXPECT_CALL(mock, Call).WillOnce(Return(TimeDelta::Millis(100))); EXPECT_CALL(mock, Delete).WillOnce(Invoke([&done] { done.Set(); })); @@ -263,7 +264,7 @@ TEST(RepeatingTaskTest, TaskCanStopItselfByReturningInfinity) { TEST(RepeatingTaskTest, ZeroReturnValueRepostsTheTask) { NiceMock closure; - rtc::Event done; + Event done; EXPECT_CALL(closure, Call()) .WillOnce(Return(TimeDelta::Zero())) .WillOnce(Invoke([&] { @@ -277,7 +278,7 @@ TEST(RepeatingTaskTest, ZeroReturnValueRepostsTheTask) { TEST(RepeatingTaskTest, StartPeriodicTask) { MockFunction closure; - rtc::Event done; + Event done; EXPECT_CALL(closure, Call()) .WillOnce(Return(TimeDelta::Millis(20))) .WillOnce(Return(TimeDelta::Millis(20))) @@ -330,7 +331,7 @@ TEST(RepeatingTaskTest, ClockIntegration) { NiceMock task_queue; ON_CALL(task_queue, PostDelayedTaskImpl) - .WillByDefault([&](absl::AnyInvocable task, TimeDelta delay, + .WillByDefault([&](absl::AnyInvocable task, TimeDelta delay, const MockTaskQueue::PostDelayedTaskTraits&, const Location&) { EXPECT_EQ(delay, expected_delay); @@ -349,7 +350,7 @@ TEST(RepeatingTaskTest, ClockIntegration) { TaskQueueBase::DelayPrecision::kLow, &clock); clock.AdvanceTimeMilliseconds(100); - absl::AnyInvocable task_to_run = std::move(delayed_task); + absl::AnyInvocable task_to_run = std::move(delayed_task); expected_delay = TimeDelta::Millis(90); std::move(task_to_run)(); EXPECT_NE(delayed_task, nullptr); @@ -361,7 +362,7 @@ TEST(RepeatingTaskTest, CanBeStoppedAfterTaskQueueDeletedTheRepeatingTask) { MockTaskQueue task_queue; EXPECT_CALL(task_queue, PostDelayedTaskImpl) - .WillOnce(WithArg<0>([&](absl::AnyInvocable task) { + .WillOnce(WithArg<0>([&](absl::AnyInvocable task) { repeating_task = std::move(task); })); diff --git a/rtc_base/test_certificate_verifier.h b/rtc_base/test_certificate_verifier.h index 9775615b5d..1252a3c0ee 100644 --- a/rtc_base/test_certificate_verifier.h +++ b/rtc_base/test_certificate_verifier.h @@ -13,7 +13,7 @@ #include "rtc_base/ssl_certificate.h" -namespace rtc { +namespace webrtc { class TestCertificateVerifier : public SSLCertificateVerifier { public: @@ -29,6 +29,14 @@ class TestCertificateVerifier : public SSLCertificateVerifier { bool verify_certificate_ = true; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::TestCertificateVerifier; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_TEST_CERTIFICATE_VERIFIER_H_ diff --git a/rtc_base/test_client.cc b/rtc_base/test_client.cc index f23ac2aec0..3798c595d8 100644 --- a/rtc_base/test_client.cc +++ b/rtc_base/test_client.cc @@ -12,14 +12,23 @@ #include +#include #include +#include #include -#include "rtc_base/gunit.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/fake_clock.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" -namespace rtc { +namespace webrtc { // DESIGN: Each packet received is put it into a list of packets. // Callers can retrieve received packets from any thread by calling @@ -30,10 +39,11 @@ TestClient::TestClient(std::unique_ptr socket) TestClient::TestClient(std::unique_ptr socket, ThreadProcessingFakeClock* fake_clock) - : fake_clock_(fake_clock), - socket_(std::move(socket)), - prev_packet_timestamp_(-1) { - socket_->SignalReadPacket.connect(this, &TestClient::OnPacket); + : fake_clock_(fake_clock), socket_(std::move(socket)) { + socket_->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + OnPacket(socket, packet); + }); socket_->SignalReadyToSend.connect(this, &TestClient::OnReadyToSend); } @@ -49,14 +59,14 @@ bool TestClient::CheckConnState(AsyncPacketSocket::State state) { } int TestClient::Send(const char* buf, size_t size) { - rtc::PacketOptions options; + AsyncSocketPacketOptions options; return socket_->Send(buf, size, options); } int TestClient::SendTo(const char* buf, size_t size, const SocketAddress& dest) { - rtc::PacketOptions options; + AsyncSocketPacketOptions options; return socket_->SendTo(buf, size, dest, options); } @@ -75,8 +85,8 @@ std::unique_ptr TestClient::NextPacket(int timeout_ms) { int64_t end = TimeAfter(timeout_ms); while (TimeUntil(end) > 0) { { - webrtc::MutexLock lock(&mutex_); - if (packets_.size() != 0) { + MutexLock lock(&mutex_); + if (!packets_.empty()) { break; } } @@ -85,8 +95,8 @@ std::unique_ptr TestClient::NextPacket(int timeout_ms) { // Return the first packet placed in the queue. std::unique_ptr packet; - webrtc::MutexLock lock(&mutex_); - if (packets_.size() > 0) { + MutexLock lock(&mutex_); + if (!packets_.empty()) { packet = std::move(packets_.front()); packets_.erase(packets_.begin()); } @@ -100,20 +110,21 @@ bool TestClient::CheckNextPacket(const char* buf, bool res = false; std::unique_ptr packet = NextPacket(kTimeoutMs); if (packet) { - res = (packet->size == size && memcmp(packet->buf, buf, size) == 0 && - CheckTimestamp(packet->packet_time_us)); + res = (packet->buf.size() == size && + memcmp(packet->buf.data(), buf, size) == 0 && + CheckTimestamp(packet->packet_time)); if (addr) *addr = packet->addr; } return res; } -bool TestClient::CheckTimestamp(int64_t packet_timestamp) { +bool TestClient::CheckTimestamp(std::optional packet_timestamp) { bool res = true; - if (packet_timestamp == -1) { + if (!packet_timestamp) { res = false; } - if (prev_packet_timestamp_ != -1) { + if (prev_packet_timestamp_) { if (packet_timestamp < prev_packet_timestamp_) { res = false; } @@ -126,7 +137,9 @@ void TestClient::AdvanceTime(int ms) { // If the test is using a fake clock, we must advance the fake clock to // advance time. Otherwise, ProcessMessages will work. if (fake_clock_) { - SIMULATED_WAIT(false, ms, *fake_clock_); + for (int64_t start = TimeMillis(); TimeMillis() < start + ms;) { + fake_clock_->AdvanceTime(webrtc::TimeDelta::Millis(1)); + }; } else { Thread::Current()->ProcessMessages(1); } @@ -145,36 +158,24 @@ int TestClient::SetOption(Socket::Option opt, int value) { } void TestClient::OnPacket(AsyncPacketSocket* socket, - const char* buf, - size_t size, - const SocketAddress& remote_addr, - const int64_t& packet_time_us) { - webrtc::MutexLock lock(&mutex_); - packets_.push_back( - std::make_unique(remote_addr, buf, size, packet_time_us)); + const ReceivedIpPacket& received_packet) { + MutexLock lock(&mutex_); + packets_.push_back(std::make_unique(received_packet)); } void TestClient::OnReadyToSend(AsyncPacketSocket* socket) { ++ready_to_send_count_; } -TestClient::Packet::Packet(const SocketAddress& a, - const char* b, - size_t s, - int64_t packet_time_us) - : addr(a), buf(0), size(s), packet_time_us(packet_time_us) { - buf = new char[size]; - memcpy(buf, b, size); -} +TestClient::Packet::Packet(const ReceivedIpPacket& received_packet) + : addr(received_packet.source_address()), + // Copy received_packet payload to a buffer owned by Packet. + buf(received_packet.payload().data(), received_packet.payload().size()), + packet_time(received_packet.arrival_time()) {} TestClient::Packet::Packet(const Packet& p) - : addr(p.addr), buf(0), size(p.size), packet_time_us(p.packet_time_us) { - buf = new char[size]; - memcpy(buf, p.buf, size); -} - -TestClient::Packet::~Packet() { - delete[] buf; -} + : addr(p.addr), + buf(p.buf.data(), p.buf.size()), + packet_time(p.packet_time) {} -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/test_client.h b/rtc_base/test_client.h index dd91d37ab9..1be26c8fff 100644 --- a/rtc_base/test_client.h +++ b/rtc_base/test_client.h @@ -14,11 +14,14 @@ #include #include +#include "api/units/timestamp.h" #include "rtc_base/async_udp_socket.h" +#include "rtc_base/buffer.h" #include "rtc_base/fake_clock.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/synchronization/mutex.h" -namespace rtc { +namespace webrtc { // A simple client that can send TCP or UDP data and check that it receives // what it expects to receive. Useful for testing server functionality. @@ -26,17 +29,12 @@ class TestClient : public sigslot::has_slots<> { public: // Records the contents of a packet that was received. struct Packet { - Packet(const SocketAddress& a, - const char* b, - size_t s, - int64_t packet_time_us); + Packet(const ReceivedIpPacket& received_packet); Packet(const Packet& p); - virtual ~Packet(); SocketAddress addr; - char* buf; - size_t size; - int64_t packet_time_us; + Buffer buf; + std::optional packet_time; }; // Default timeout for NextPacket reads. @@ -96,24 +94,29 @@ class TestClient : public sigslot::has_slots<> { static const int kNoPacketTimeoutMs = 1000; // Workaround for the fact that AsyncPacketSocket::GetConnState doesn't exist. Socket::ConnState GetState(); - // Slot for packets read on the socket. + void OnPacket(AsyncPacketSocket* socket, - const char* buf, - size_t len, - const SocketAddress& remote_addr, - const int64_t& packet_time_us); + const ReceivedIpPacket& received_packet); void OnReadyToSend(AsyncPacketSocket* socket); - bool CheckTimestamp(int64_t packet_timestamp); + bool CheckTimestamp(std::optional packet_timestamp); void AdvanceTime(int ms); ThreadProcessingFakeClock* fake_clock_ = nullptr; - webrtc::Mutex mutex_; + Mutex mutex_; std::unique_ptr socket_; std::vector> packets_; int ready_to_send_count_ = 0; - int64_t prev_packet_timestamp_; + std::optional prev_packet_timestamp_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::TestClient; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_TEST_CLIENT_H_ diff --git a/rtc_base/test_client_unittest.cc b/rtc_base/test_client_unittest.cc index b2866a2d34..c585a8e532 100644 --- a/rtc_base/test_client_unittest.cc +++ b/rtc_base/test_client_unittest.cc @@ -10,38 +10,39 @@ #include "rtc_base/test_client.h" +#include #include #include "absl/memory/memory.h" #include "rtc_base/async_tcp_socket.h" #include "rtc_base/async_udp_socket.h" #include "rtc_base/logging.h" -#include "rtc_base/net_helpers.h" #include "rtc_base/net_test_helpers.h" #include "rtc_base/physical_socket_server.h" #include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/test_echo_server.h" #include "rtc_base/thread.h" #include "test/gtest.h" -namespace rtc { -namespace { - #define MAYBE_SKIP_IPV4 \ - if (!HasIPv4Enabled()) { \ + if (!::webrtc::HasIPv4Enabled()) { \ RTC_LOG(LS_INFO) << "No IPv4... skipping"; \ return; \ } #define MAYBE_SKIP_IPV6 \ - if (!HasIPv6Enabled()) { \ + if (!::webrtc::HasIPv6Enabled()) { \ RTC_LOG(LS_INFO) << "No IPv6... skipping"; \ return; \ } +namespace webrtc { +namespace { + void TestUdpInternal(const SocketAddress& loopback) { - rtc::PhysicalSocketServer socket_server; - rtc::AutoSocketServerThread main_thread(&socket_server); + PhysicalSocketServer socket_server; + AutoSocketServerThread main_thread(&socket_server); Socket* socket = socket_server.CreateSocket(loopback.family(), SOCK_DGRAM); socket->Bind(loopback); @@ -54,9 +55,9 @@ void TestUdpInternal(const SocketAddress& loopback) { } void TestTcpInternal(const SocketAddress& loopback) { - rtc::PhysicalSocketServer socket_server; - rtc::AutoSocketServerThread main_thread(&socket_server); - TestEchoServer server(&main_thread, loopback); + PhysicalSocketServer socket_server; + AutoSocketServerThread main_thread(&socket_server); + webrtc::TestEchoServer server(&main_thread, loopback); Socket* socket = socket_server.CreateSocket(loopback.family(), SOCK_STREAM); std::unique_ptr tcp_socket = absl::WrapUnique( @@ -105,4 +106,4 @@ TEST(TestClientTest, MAYBE_TestTcpIPv6) { } } // namespace -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/test_echo_server.cc b/rtc_base/test_echo_server.cc index feda4cd8ae..484ed848b7 100644 --- a/rtc_base/test_echo_server.cc +++ b/rtc_base/test_echo_server.cc @@ -12,7 +12,7 @@ #include "rtc_base/socket_server.h" -namespace rtc { +namespace webrtc { TestEchoServer::TestEchoServer(Thread* thread, const SocketAddress& addr) : server_socket_( @@ -29,4 +29,4 @@ TestEchoServer::~TestEchoServer() { } } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/test_echo_server.h b/rtc_base/test_echo_server.h index 82817624a5..758218a309 100644 --- a/rtc_base/test_echo_server.h +++ b/rtc_base/test_echo_server.h @@ -21,12 +21,13 @@ #include "absl/memory/memory.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/async_tcp_socket.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" -namespace rtc { +namespace webrtc { // A test echo server, echoes back any packets sent to it. // Useful for unit tests. @@ -45,19 +46,18 @@ class TestEchoServer : public sigslot::has_slots<> { Socket* raw_socket = socket->Accept(nullptr); if (raw_socket) { AsyncTCPSocket* packet_socket = new AsyncTCPSocket(raw_socket); - packet_socket->SignalReadPacket.connect(this, &TestEchoServer::OnPacket); + packet_socket->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + OnPacket(socket, packet); + }); packet_socket->SubscribeCloseEvent( this, [this](AsyncPacketSocket* s, int err) { OnClose(s, err); }); client_sockets_.push_back(packet_socket); } } - void OnPacket(AsyncPacketSocket* socket, - const char* buf, - size_t size, - const SocketAddress& remote_addr, - const int64_t& /* packet_time_us */) { - rtc::PacketOptions options; - socket->Send(buf, size, options); + void OnPacket(AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + AsyncSocketPacketOptions options; + socket->Send(packet.payload().data(), packet.payload().size(), options); } void OnClose(AsyncPacketSocket* socket, int err) { ClientList::iterator it = absl::c_find(client_sockets_, socket); @@ -72,6 +72,14 @@ class TestEchoServer : public sigslot::has_slots<> { ClientList client_sockets_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::TestEchoServer; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_TEST_ECHO_SERVER_H_ diff --git a/rtc_base/test_utils.h b/rtc_base/test_utils.h index 3f877fc9af..53a7a032ef 100644 --- a/rtc_base/test_utils.h +++ b/rtc_base/test_utils.h @@ -42,7 +42,7 @@ class StreamSink : public sigslot::has_slots<> { StreamSink(); ~StreamSink() override; - void Monitor(rtc::Socket* socket) { + void Monitor(Socket* socket) { socket->SignalConnectEvent.connect(this, &StreamSink::OnConnectEvent); socket->SignalReadEvent.connect(this, &StreamSink::OnReadEvent); socket->SignalWriteEvent.connect(this, &StreamSink::OnWriteEvent); @@ -50,28 +50,28 @@ class StreamSink : public sigslot::has_slots<> { // In case you forgot to unmonitor a previous object with this address events_.erase(socket); } - void Unmonitor(rtc::Socket* socket) { + void Unmonitor(Socket* socket) { socket->SignalConnectEvent.disconnect(this); socket->SignalReadEvent.disconnect(this); socket->SignalWriteEvent.disconnect(this); socket->SignalCloseEvent.disconnect(this); events_.erase(socket); } - bool Check(rtc::Socket* socket, StreamSinkEvent event, bool reset = true) { + bool Check(Socket* socket, StreamSinkEvent event, bool reset = true) { return DoCheck(socket, event, reset); } private: - typedef std::map EventMap; + typedef std::map EventMap; - void OnConnectEvent(rtc::Socket* socket) { AddEvents(socket, SSE_OPEN); } - void OnReadEvent(rtc::Socket* socket) { AddEvents(socket, SSE_READ); } - void OnWriteEvent(rtc::Socket* socket) { AddEvents(socket, SSE_WRITE); } - void OnCloseEvent(rtc::Socket* socket, int error) { + void OnConnectEvent(Socket* socket) { AddEvents(socket, SSE_OPEN); } + void OnReadEvent(Socket* socket) { AddEvents(socket, SSE_READ); } + void OnWriteEvent(Socket* socket) { AddEvents(socket, SSE_WRITE); } + void OnCloseEvent(Socket* socket, int error) { AddEvents(socket, (0 == error) ? SSE_CLOSE : SSE_ERROR); } - void AddEvents(rtc::Socket* obj, int events) { + void AddEvents(Socket* obj, int events) { EventMap::iterator it = events_.find(obj); if (events_.end() == it) { events_.insert(EventMap::value_type(obj, events)); @@ -79,7 +79,7 @@ class StreamSink : public sigslot::has_slots<> { it->second |= events; } } - bool DoCheck(rtc::Socket* obj, StreamSinkEvent event, bool reset) { + bool DoCheck(Socket* obj, StreamSinkEvent event, bool reset) { EventMap::iterator it = events_.find(obj); if ((events_.end() == it) || (0 == (it->second & event))) { return false; diff --git a/rtc_base/third_party/base64/BUILD.gn b/rtc_base/third_party/base64/BUILD.gn index d28338c6a0..fac16ff881 100644 --- a/rtc_base/third_party/base64/BUILD.gn +++ b/rtc_base/third_party/base64/BUILD.gn @@ -16,6 +16,23 @@ rtc_library("base64") { deps = [ "../..:checks", "../../system:rtc_export", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} + +if (rtc_include_tests) { + rtc_library("base64_unittest") { + testonly = true + sources = [ + "base64_unittest.cc", + "test_base64.h", + ] + deps = [ + ":base64", + "../..:logging", + "../../../test:test_main", + "../../../test:test_support", + "//third_party/abseil-cpp/absl/strings:string_view", + ] + } } diff --git a/rtc_base/third_party/base64/base64.cc b/rtc_base/third_party/base64/base64.cc index 9dc961ae52..cc579ade91 100644 --- a/rtc_base/third_party/base64/base64.cc +++ b/rtc_base/third_party/base64/base64.cc @@ -23,7 +23,7 @@ using std::vector; -namespace rtc { +namespace webrtc { static const char kPad = '='; static const unsigned char pd = 0xFD; // Padding @@ -276,4 +276,4 @@ bool Base64::DecodeFromArrayTemplate(const char* data, return success; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/third_party/base64/base64.h b/rtc_base/third_party/base64/base64.h index 5d42513323..7a4b1e6209 100644 --- a/rtc_base/third_party/base64/base64.h +++ b/rtc_base/third_party/base64/base64.h @@ -18,9 +18,8 @@ #include #include "absl/strings/string_view.h" -#include "rtc_base/system/rtc_export.h" -namespace rtc { +namespace webrtc { class Base64 { public: @@ -61,14 +60,14 @@ class Base64 { // encoded characters. static bool IsBase64Encoded(absl::string_view str); - RTC_EXPORT static void EncodeFromArray(const void* data, - size_t len, - std::string* result); - RTC_EXPORT static bool DecodeFromArray(const char* data, - size_t len, - DecodeFlags flags, - std::string* result, - size_t* data_used); + static void EncodeFromArray(const void* data, + size_t len, + std::string* result); + static bool DecodeFromArray(const char* data, + size_t len, + DecodeFlags flags, + std::string* result, + size_t* data_used); static bool DecodeFromArray(const char* data, size_t len, DecodeFlags flags, @@ -123,6 +122,14 @@ class Base64 { size_t* data_used); }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::Base64; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif /* RTC_BASE_THIRD_PARTY_BASE64_BASE64_H_ */ diff --git a/rtc_base/third_party/base64/base64_unittest.cc b/rtc_base/third_party/base64/base64_unittest.cc new file mode 100644 index 0000000000..f753bf689f --- /dev/null +++ b/rtc_base/third_party/base64/base64_unittest.cc @@ -0,0 +1,1459 @@ +/* + * Copyright 2011 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "rtc_base/third_party/base64/base64.h" + +#include +#include + +#include +#include + +#include "rtc_base/logging.h" +#include "rtc_base/third_party/base64/test_base64.h" +#include "test/gtest.h" + +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +namespace { + +static struct { + size_t plain_length; + const char* plaintext; + const char* cyphertext; +} base64_tests[] = { + + // Basic bit patterns; + // values obtained with "echo -n '...' | uuencode -m test" + + {1, "\000", "AA=="}, + {1, "\001", "AQ=="}, + {1, "\002", "Ag=="}, + {1, "\004", "BA=="}, + {1, "\010", "CA=="}, + {1, "\020", "EA=="}, + {1, "\040", "IA=="}, + {1, "\100", "QA=="}, + {1, "\200", "gA=="}, + + {1, "\377", "/w=="}, + {1, "\376", "/g=="}, + {1, "\375", "/Q=="}, + {1, "\373", "+w=="}, + {1, "\367", "9w=="}, + {1, "\357", "7w=="}, + {1, "\337", "3w=="}, + {1, "\277", "vw=="}, + {1, "\177", "fw=="}, + {2, "\000\000", "AAA="}, + {2, "\000\001", "AAE="}, + {2, "\000\002", "AAI="}, + {2, "\000\004", "AAQ="}, + {2, "\000\010", "AAg="}, + {2, "\000\020", "ABA="}, + {2, "\000\040", "ACA="}, + {2, "\000\100", "AEA="}, + {2, "\000\200", "AIA="}, + {2, "\001\000", "AQA="}, + {2, "\002\000", "AgA="}, + {2, "\004\000", "BAA="}, + {2, "\010\000", "CAA="}, + {2, "\020\000", "EAA="}, + {2, "\040\000", "IAA="}, + {2, "\100\000", "QAA="}, + {2, "\200\000", "gAA="}, + + {2, "\377\377", "//8="}, + {2, "\377\376", "//4="}, + {2, "\377\375", "//0="}, + {2, "\377\373", "//s="}, + {2, "\377\367", "//c="}, + {2, "\377\357", "/+8="}, + {2, "\377\337", "/98="}, + {2, "\377\277", "/78="}, + {2, "\377\177", "/38="}, + {2, "\376\377", "/v8="}, + {2, "\375\377", "/f8="}, + {2, "\373\377", "+/8="}, + {2, "\367\377", "9/8="}, + {2, "\357\377", "7/8="}, + {2, "\337\377", "3/8="}, + {2, "\277\377", "v/8="}, + {2, "\177\377", "f/8="}, + + {3, "\000\000\000", "AAAA"}, + {3, "\000\000\001", "AAAB"}, + {3, "\000\000\002", "AAAC"}, + {3, "\000\000\004", "AAAE"}, + {3, "\000\000\010", "AAAI"}, + {3, "\000\000\020", "AAAQ"}, + {3, "\000\000\040", "AAAg"}, + {3, "\000\000\100", "AABA"}, + {3, "\000\000\200", "AACA"}, + {3, "\000\001\000", "AAEA"}, + {3, "\000\002\000", "AAIA"}, + {3, "\000\004\000", "AAQA"}, + {3, "\000\010\000", "AAgA"}, + {3, "\000\020\000", "ABAA"}, + {3, "\000\040\000", "ACAA"}, + {3, "\000\100\000", "AEAA"}, + {3, "\000\200\000", "AIAA"}, + {3, "\001\000\000", "AQAA"}, + {3, "\002\000\000", "AgAA"}, + {3, "\004\000\000", "BAAA"}, + {3, "\010\000\000", "CAAA"}, + {3, "\020\000\000", "EAAA"}, + {3, "\040\000\000", "IAAA"}, + {3, "\100\000\000", "QAAA"}, + {3, "\200\000\000", "gAAA"}, + + {3, "\377\377\377", "////"}, + {3, "\377\377\376", "///+"}, + {3, "\377\377\375", "///9"}, + {3, "\377\377\373", "///7"}, + {3, "\377\377\367", "///3"}, + {3, "\377\377\357", "///v"}, + {3, "\377\377\337", "///f"}, + {3, "\377\377\277", "//+/"}, + {3, "\377\377\177", "//9/"}, + {3, "\377\376\377", "//7/"}, + {3, "\377\375\377", "//3/"}, + {3, "\377\373\377", "//v/"}, + {3, "\377\367\377", "//f/"}, + {3, "\377\357\377", "/+//"}, + {3, "\377\337\377", "/9//"}, + {3, "\377\277\377", "/7//"}, + {3, "\377\177\377", "/3//"}, + {3, "\376\377\377", "/v//"}, + {3, "\375\377\377", "/f//"}, + {3, "\373\377\377", "+///"}, + {3, "\367\377\377", "9///"}, + {3, "\357\377\377", "7///"}, + {3, "\337\377\377", "3///"}, + {3, "\277\377\377", "v///"}, + {3, "\177\377\377", "f///"}, + + // Random numbers: values obtained with + // + // #! /bin/bash + // dd bs=$1 count=1 if=/dev/random of=/tmp/bar.random + // od -N $1 -t o1 /tmp/bar.random + // uuencode -m test < /tmp/bar.random + // + // where $1 is the number of bytes (2, 3) + + {2, "\243\361", "o/E="}, + {2, "\024\167", "FHc="}, + {2, "\313\252", "y6o="}, + {2, "\046\041", "JiE="}, + {2, "\145\236", "ZZ4="}, + {2, "\254\325", "rNU="}, + {2, "\061\330", "Mdg="}, + {2, "\245\032", "pRo="}, + {2, "\006\000", "BgA="}, + {2, "\375\131", "/Vk="}, + {2, "\303\210", "w4g="}, + {2, "\040\037", "IB8="}, + {2, "\261\372", "sfo="}, + {2, "\335\014", "3Qw="}, + {2, "\233\217", "m48="}, + {2, "\373\056", "+y4="}, + {2, "\247\232", "p5o="}, + {2, "\107\053", "Rys="}, + {2, "\204\077", "hD8="}, + {2, "\276\211", "vok="}, + {2, "\313\110", "y0g="}, + {2, "\363\376", "8/4="}, + {2, "\251\234", "qZw="}, + {2, "\103\262", "Q7I="}, + {2, "\142\312", "Yso="}, + {2, "\067\211", "N4k="}, + {2, "\220\001", "kAE="}, + {2, "\152\240", "aqA="}, + {2, "\367\061", "9zE="}, + {2, "\133\255", "W60="}, + {2, "\176\035", "fh0="}, + {2, "\032\231", "Gpk="}, + + {3, "\013\007\144", "Cwdk"}, + {3, "\030\112\106", "GEpG"}, + {3, "\047\325\046", "J9Um"}, + {3, "\310\160\022", "yHAS"}, + {3, "\131\100\237", "WUCf"}, + {3, "\064\342\134", "NOJc"}, + {3, "\010\177\004", "CH8E"}, + {3, "\345\147\205", "5WeF"}, + {3, "\300\343\360", "wOPw"}, + {3, "\061\240\201", "MaCB"}, + {3, "\225\333\044", "ldsk"}, + {3, "\215\137\352", "jV/q"}, + {3, "\371\147\160", "+Wdw"}, + {3, "\030\320\051", "GNAp"}, + {3, "\044\174\241", "JHyh"}, + {3, "\260\127\037", "sFcf"}, + {3, "\111\045\033", "SSUb"}, + {3, "\202\114\107", "gkxH"}, + {3, "\057\371\042", "L/ki"}, + {3, "\223\247\244", "k6ek"}, + {3, "\047\216\144", "J45k"}, + {3, "\203\070\327", "gzjX"}, + {3, "\247\140\072", "p2A6"}, + {3, "\124\115\116", "VE1O"}, + {3, "\157\162\050", "b3Io"}, + {3, "\357\223\004", "75ME"}, + {3, "\052\117\156", "Kk9u"}, + {3, "\347\154\000", "52wA"}, + {3, "\303\012\142", "wwpi"}, + {3, "\060\035\362", "MB3y"}, + {3, "\130\226\361", "WJbx"}, + {3, "\173\013\071", "ews5"}, + {3, "\336\004\027", "3gQX"}, + {3, "\357\366\234", "7/ac"}, + {3, "\353\304\111", "68RJ"}, + {3, "\024\264\131", "FLRZ"}, + {3, "\075\114\251", "PUyp"}, + {3, "\315\031\225", "zRmV"}, + {3, "\154\201\276", "bIG+"}, + {3, "\200\066\072", "gDY6"}, + {3, "\142\350\267", "Yui3"}, + {3, "\033\000\166", "GwB2"}, + {3, "\210\055\077", "iC0/"}, + {3, "\341\037\124", "4R9U"}, + {3, "\161\103\152", "cUNq"}, + {3, "\270\142\131", "uGJZ"}, + {3, "\337\076\074", "3z48"}, + {3, "\375\106\362", "/Uby"}, + {3, "\227\301\127", "l8FX"}, + {3, "\340\002\234", "4AKc"}, + {3, "\121\064\033", "UTQb"}, + {3, "\157\134\143", "b1xj"}, + {3, "\247\055\327", "py3X"}, + {3, "\340\142\005", "4GIF"}, + {3, "\060\260\143", "MLBj"}, + {3, "\075\203\170", "PYN4"}, + {3, "\143\160\016", "Y3AO"}, + {3, "\313\013\063", "ywsz"}, + {3, "\174\236\135", "fJ5d"}, + {3, "\103\047\026", "QycW"}, + {3, "\365\005\343", "9QXj"}, + {3, "\271\160\223", "uXCT"}, + {3, "\362\255\172", "8q16"}, + {3, "\113\012\015", "SwoN"}, + + // various lengths, generated by this python script: + // + // from string import lowercase as lc + // for i in range(27): + // print '{ %2d, "%s",%s "%s" },' % (i, lc[:i], ' ' * (26-i), + // lc[:i].encode('base64').strip()) + + {0, "abcdefghijklmnopqrstuvwxyz", ""}, + {1, "abcdefghijklmnopqrstuvwxyz", "YQ=="}, + {2, "abcdefghijklmnopqrstuvwxyz", "YWI="}, + {3, "abcdefghijklmnopqrstuvwxyz", "YWJj"}, + {4, "abcdefghijklmnopqrstuvwxyz", "YWJjZA=="}, + {5, "abcdefghijklmnopqrstuvwxyz", "YWJjZGU="}, + {6, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVm"}, + {7, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZw=="}, + {8, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2g="}, + {9, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hp"}, + {10, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpag=="}, + {11, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpams="}, + {12, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamts"}, + {13, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbQ=="}, + {14, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW4="}, + {15, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5v"}, + {16, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcA=="}, + {17, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHE="}, + {18, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFy"}, + {19, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFycw=="}, + {20, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3Q="}, + {21, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1"}, + {22, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dg=="}, + {23, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnc="}, + {24, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4"}, + {25, "abcdefghijklmnopqrstuvwxy", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eQ=="}, + {26, "abcdefghijklmnopqrstuvwxyz", "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXo="}, +}; +#if 0 +static struct { + const char* plaintext; + const char* cyphertext; +} base64_strings[] = { + + // The first few Google quotes + // Cyphertext created with "uuencode - GNU sharutils 4.2.1" + { + "Everyone! We're teetering on the brink of disaster." + " - Sergey Brin, 6/24/99, regarding the company's state " + "after the unleashing of Netscape/Google search", + + "RXZlcnlvbmUhICBXZSdyZSB0ZWV0ZXJpbmcgb24gdGhlIGJyaW5rIG9mIGRp" + "c2FzdGVyLiAtIFNlcmdleSBCcmluLCA2LzI0Lzk5LCByZWdhcmRpbmcgdGhl" + "IGNvbXBhbnkncyBzdGF0ZSBhZnRlciB0aGUgdW5sZWFzaGluZyBvZiBOZXRz" + "Y2FwZS9Hb29nbGUgc2VhcmNo" }, + + { + "I'm not sure why we're still alive, but we seem to be." + " - Larry Page, 6/24/99, while hiding in the kitchenette " + "during the Netscape traffic overflow", + + "SSdtIG5vdCBzdXJlIHdoeSB3ZSdyZSBzdGlsbCBhbGl2ZSwgYnV0IHdlIHNl" + "ZW0gdG8gYmUuIC0gTGFycnkgUGFnZSwgNi8yNC85OSwgd2hpbGUgaGlkaW5n" + "IGluIHRoZSBraXRjaGVuZXR0ZSBkdXJpbmcgdGhlIE5ldHNjYXBlIHRyYWZm" + "aWMgb3ZlcmZsb3c" }, + + { + "I think kids want porn." + " - Sergey Brin, 6/99, on why Google shouldn't prioritize a " + "filtered search for children and families", + + "SSB0aGluayBraWRzIHdhbnQgcG9ybi4gLSBTZXJnZXkgQnJpbiwgNi85OSwg" + "b24gd2h5IEdvb2dsZSBzaG91bGRuJ3QgcHJpb3JpdGl6ZSBhIGZpbHRlcmVk" + "IHNlYXJjaCBmb3IgY2hpbGRyZW4gYW5kIGZhbWlsaWVz" }, +}; +#endif +// Compare bytes 0..len-1 of x and y. If not equal, abort with verbose error +// message showing position and numeric value that differed. +// Handles embedded nulls just like any other byte. +// Only added because string.compare() in gcc-3.3.3 seems to misbehave with +// embedded nulls. +// TODO: switch back to string.compare() if/when gcc is fixed +#define EXPECT_EQ_ARRAY(len, x, y, msg) \ + for (size_t j = 0; j < len; ++j) { \ + if (x[j] != y[j]) { \ + RTC_LOG(LS_ERROR) << "" #x << " != " #y << " byte " << j \ + << " msg: " << msg; \ + } \ + } + +size_t Base64Escape(const unsigned char* src, + size_t szsrc, + char* dest, + size_t szdest) { + std::string escaped; + webrtc::Base64::EncodeFromArray((const char*)src, szsrc, &escaped); + memcpy(dest, escaped.data(), std::min(escaped.size(), szdest)); + return escaped.size(); +} + +size_t Base64Unescape(const char* src, + size_t szsrc, + char* dest, + size_t szdest) { + std::string unescaped; + EXPECT_TRUE(webrtc::Base64::DecodeFromArray( + src, szsrc, webrtc::Base64::DO_LAX, &unescaped, nullptr)); + memcpy(dest, unescaped.data(), std::min(unescaped.size(), szdest)); + return unescaped.size(); +} + +size_t Base64Unescape(const char* src, size_t szsrc, std::string* s) { + EXPECT_TRUE(webrtc::Base64::DecodeFromArray( + src, szsrc, webrtc::Base64::DO_LAX, s, nullptr)); + return s->size(); +} + +TEST(Base64, EncodeDecodeBattery) { + RTC_LOG(LS_VERBOSE) << "Testing base-64"; + + size_t i; + + // Check the short strings; this tests the math (and boundaries) + for (i = 0; i < sizeof(base64_tests) / sizeof(base64_tests[0]); ++i) { + char encode_buffer[100]; + size_t encode_length; + char decode_buffer[100]; + size_t decode_length; + size_t cypher_length; + + RTC_LOG(LS_VERBOSE) << "B64: " << base64_tests[i].cyphertext; + + const unsigned char* unsigned_plaintext = + reinterpret_cast(base64_tests[i].plaintext); + + cypher_length = strlen(base64_tests[i].cyphertext); + + // The basic escape function: + memset(encode_buffer, 0, sizeof(encode_buffer)); + encode_length = + Base64Escape(unsigned_plaintext, base64_tests[i].plain_length, + encode_buffer, sizeof(encode_buffer)); + // Is it of the expected length? + EXPECT_EQ(encode_length, cypher_length); + + // Is it the expected encoded value? + EXPECT_STREQ(encode_buffer, base64_tests[i].cyphertext); + + // If we encode it into a buffer of exactly the right length... + memset(encode_buffer, 0, sizeof(encode_buffer)); + encode_length = + Base64Escape(unsigned_plaintext, base64_tests[i].plain_length, + encode_buffer, cypher_length); + // Is it still of the expected length? + EXPECT_EQ(encode_length, cypher_length); + + // And is the value still correct? (i.e., not losing the last byte) + EXPECT_STREQ(encode_buffer, base64_tests[i].cyphertext); + + // If we decode it back: + memset(decode_buffer, 0, sizeof(decode_buffer)); + decode_length = Base64Unescape(encode_buffer, cypher_length, decode_buffer, + sizeof(decode_buffer)); + + // Is it of the expected length? + EXPECT_EQ(decode_length, base64_tests[i].plain_length); + + // Is it the expected decoded value? + EXPECT_EQ(0, + memcmp(decode_buffer, base64_tests[i].plaintext, decode_length)); + + // Our decoder treats the padding '=' characters at the end as + // optional. If encode_buffer has any, run some additional + // tests that fiddle with them. + char* first_equals = strchr(encode_buffer, '='); + if (first_equals) { + // How many equals signs does the string start with? + int equals = (*(first_equals + 1) == '=') ? 2 : 1; + + // Try chopping off the equals sign(s) entirely. The decoder + // should still be okay with this. + std::string decoded2("this junk should also be ignored"); + *first_equals = '\0'; + EXPECT_NE(0U, Base64Unescape(encode_buffer, first_equals - encode_buffer, + &decoded2)); + EXPECT_EQ(decoded2.size(), base64_tests[i].plain_length); + EXPECT_EQ_ARRAY(decoded2.size(), decoded2.data(), + base64_tests[i].plaintext, i); + + size_t len; + + // try putting some extra stuff after the equals signs, or in between them + if (equals == 2) { + snprintf(first_equals, 6, " = = "); + len = first_equals - encode_buffer + 5; + } else { + snprintf(first_equals, 6, " = "); + len = first_equals - encode_buffer + 3; + } + decoded2.assign("this junk should be ignored"); + EXPECT_NE(0U, Base64Unescape(encode_buffer, len, &decoded2)); + EXPECT_EQ(decoded2.size(), base64_tests[i].plain_length); + EXPECT_EQ_ARRAY(decoded2.size(), decoded2, base64_tests[i].plaintext, i); + } + } +} + +// here's a weird case: a giant base64 encoded stream which broke our base64 +// decoding. Let's test it explicitly. +const char SpecificTest[] = + "/9j/4AAQSkZJRgABAgEASABIAAD/" + "4Q0HRXhpZgAATU0AKgAAAAgADAEOAAIAAAAgAAAAngEPAAI\n" + "AAAAFAAAAvgEQAAIAAAAJAAAAwwESAAMAAAABAAEAAAEaAAUAAAABAAAAzAEbAAUAAAABAAAA1" + "A\n" + "EoAAMAAAABAAIAAAExAAIAAAAUAAAA3AEyAAIAAAAUAAAA8AE8AAIAAAAQAAABBAITAAMAAAAB" + "A\n" + "AIAAIdpAAQAAAABAAABFAAAAsQgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgAFNPTlk" + "A\n" + "RFNDLVAyMDAAAAAASAAAAAEAAABIAAAAAUFkb2JlIFBob3Rvc2hvcCA3LjAAMjAwNzowMTozMC" + "A\n" + "yMzoxMDowNABNYWMgT1MgWCAxMC40LjgAAByCmgAFAAAAAQAAAmqCnQAFAAAAAQAAAnKIIgADA" + "A\n" + "AAAQACAACIJwADAAAAAQBkAACQAAAHAAAABDAyMjCQAwACAAAAFAAAAnqQBAACAAAAFAAAAo6R" + "A\n" + "QAHAAAABAECAwCRAgAFAAAAAQAAAqKSBAAKAAAAAQAAAqqSBQAFAAAAAQAAArKSBwADAAAAAQA" + "F\n" + "AACSCAADAAAAAQAAAACSCQADAAAAAQAPAACSCgAFAAAAAQAAArqgAAAHAAAABDAxMDCgAQADAA" + "A\n" + "AAf//" + "AACgAgAEAAAAAQAAAGSgAwAEAAAAAQAAAGSjAAAHAAAAAQMAAACjAQAHAAAAAQEAAACkAQ\n" + "ADAAAAAQAAAACkAgADAAAAAQAAAACkAwADAAAAAQAAAACkBgADAAAAAQAAAACkCAADAAAAAQAA" + "A\n" + "ACkCQADAAAAAQAAAACkCgADAAAAAQAAAAAAAAAAAAAACgAAAZAAAAAcAAAACjIwMDc6MDE6MjA" + "g\n" + "MjM6MDU6NTIAMjAwNzowMToyMCAyMzowNTo1MgAAAAAIAAAAAQAAAAAAAAAKAAAAMAAAABAAAA" + "B\n" + "PAAAACgAAAAYBAwADAAAAAQAGAAABGgAFAAAAAQAAAxIBGwAFAAAAAQAAAxoBKAADAAAAAQACA" + "A\n" + "ACAQAEAAAAAQAAAyICAgAEAAAAAQAACd0AAAAAAAAASAAAAAEAAABIAAAAAf/Y/" + "+AAEEpGSUYAA\n" + "QIBAEgASAAA/+0ADEFkb2JlX0NNAAL/7gAOQWRvYmUAZIAAAAAB/" + "9sAhAAMCAgICQgMCQkMEQsK\n" + "CxEVDwwMDxUYExMVExMYEQwMDAwMDBEMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAQ0LCw" + "0\n" + "ODRAODhAUDg4OFBQODg4OFBEMDAwMDBERDAwMDAwMEQwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMD" + "A\n" + "wMDAz/wAARCABkAGQDASIAAhEBAxEB/90ABAAH/" + "8QBPwAAAQUBAQEBAQEAAAAAAAAAAwABAgQFB\n" + "gcICQoLAQABBQEBAQEBAQAAAAAAAAABAAIDBAUGBwgJCgsQAAEEAQMCBAIFBwYIBQMMMwEAAhE" + "D\n" + "BCESMQVBUWETInGBMgYUkaGxQiMkFVLBYjM0coLRQwclklPw4fFjczUWorKDJkSTVGRFwqN0Nh" + "f\n" + "SVeJl8rOEw9N14/" + "NGJ5SkhbSVxNTk9KW1xdXl9VZmdoaWprbG1ub2N0dXZ3eHl6e3x9fn9xEAAg\n" + "IBAgQEAwQFBgcHBgU1AQACEQMhMRIEQVFhcSITBTKBkRShsUIjwVLR8DMkYuFygpJDUxVjczTx" + "J\n" + "QYWorKDByY1wtJEk1SjF2RFVTZ0ZeLys4TD03Xj80aUpIW0lcTU5PSltcXV5fVWZnaGlqa2xtb" + "m\n" + "9ic3R1dnd4eXp7fH/" + "9oADAMBAAIRAxEAPwDy7bKNTUXNLz9EaJPDWMjxH4ozhtpYwaACT8ShaaW\n" + "bW0uEc9/JFfjj0Q4Hk/PRDxwX7y47W9z/" + "AN9Cv4+O3ILK2DcRqT2CaSvEbcl1Jbz37KG1dBldLo\n" + "qaS4l9xGjG9v6yoDAdYIaIjUk+AREgo4y5sapirb8Yl0NHHdKvBNm4yA1o5Pc+" + "SPEFvCWqB3HZF\n" + "Hj2SbWQ/" + "afGFP0bHP8ATY0uc4w1o1JPkkimGiS2KvqlnmBkOZQTyydzgPMM9v8A0lp4v1Nx9gF1\n" + "tpdqJaGtH/S3I0i3lISXW/8AMqnd/O2bfg2eUkqVYf/" + "Q8zuncO4Bj7lZ+n7f5Mj5KsJcY8NUZ4d\n" + "uEDVo1HkeU0rg3Om4H2rabCWUN7DQuK1n5FWKW4uCwG92gDRJBS6exhxmMboQI+" + "Cv4WFTQ42Bs2\n" + "fvnkkqEmy2YxoMMbpVzaz6jt+RbpHZs8lzkHqrasKkYOKP0jgDfZ4N/" + "wDM1tNrcWfSPmRyq9uNV\n" + "DnFg2s97i7UkjxKVrq0eVz3spZsja+ASDzwsh9jnOk/" + "JFzb3XZD3v1c4yT8UACTCniKDUnKz5Nj\n" + "G33XV1DV73BrT8dF23SejV4zg9g33cOsPb+SxVvqv9ViwNy8vS0iWs/" + "daf8A0Y5dpTi1sADGxCR\n" + "K1o0YBEmInlXWYbDBcDLdPJXa8f71Yrx2jnUoAqLnfZK5hJaW2vdwEk5a/wD/0fN6Ia/" + "e76IiVf\n" + "xavUL7CPpnT4LNbYXAVjuQt/AqDmNYO/" + "Kjnoy4hr5J8SwMhrRMaeSvbsxrfUazcOw4UX0Cisem2\n" + "SBoD4+" + "Kz8nC6llbSLCRrubJA8kwUWbUDa29X1PMa7aQWjuDC0MXMdbDbhI7eazBiUfZ6GOYRe1s\n" + "WvGgJ8Vbw2+m4Bx9s6JpNHuuGo1FF53r/" + "SHYua61gLse0lzXeBP5rkvqx0o5vVWz7WY49QkiQSP\n" + "oN/tLoevW/ogxv0HA7tJ0AnhT+pdDGYVl/wCdcTPkGn2NU0JWNWvlgAbHV6fEqdu2gR/" + "r2WlWwt\n" + "AA5VXAEsLXTqJafArQY5rRr9LiPBJiZsZCI1pJjxCi0j4oncSICSkWwzwkjeaSch//" + "0vO7sP7Lm\n" + "enO9ogtd5FbPT3Q5pCpZVc4ld3Lmn3O8j9EI2BYdunKjOobMQIyI+rusc2wx4d0eutwGnHh/" + "uQc\n" + "Ha7ladj6mVANGvcqOgz0Go7HJ12/GEHcwvB/dPY6ImbbaMaASGuIBjkN7qofs9Ubg9g7OI9p/" + "t/\n" + "RTSmhTHr0v6eSz6UgCPP2/wAVu9Ex2V49dVY2iACB4BZeVXQ/" + "AJ3gzGnnOi2+kACpru8flUsNmt\n" + "zHRf6xfWCnoeAfTh2ZaQKazx/" + "Ke7+QxcKz61fWA2uuObaC4zGhaPJrXBL64ZFmR124O09ENraPK\n" + "N3/AH5GqxIrZVUyp2K2vfdkENsDnxuex9m4Ox9n82xSgNd9D+p/XR1npgseR9ppOy4Dx/NfH/" + "CL\n" + "oQJGunmvMv8AFq3KHVcq3HkYQbD2nuSf0I/rMavSg6TLjLigQhJ7Z58v9QkmlsTOqSCn/" + "9PzL7R\n" + "d6Qq3n0wZ2zotXpT9xLfFYvkr/S7jXeB8E0jRkhKpC3q8LcJ/kmCrTnkuAPCq4do9Q/" + "ytVbuAeY\n" + "Gg5lQybQK+" + "82GBqEQUA1kOHPYf3LLsoyN36G5w8iUfHxepbXE2l0cApALgLHzBq9UxhTXU5hMC1\n" + "ktnSCup6S4Ctk+C5XqVGcaHPfuiuHkeTTuWz0+9zaKiH6CC0/yXBSQ2a/" + "MxojV57634rq+v2PLY\n" + "be1r2nsYG13/" + "AFKxbfCBMcr0brGAzrGEwCG31ncx0SfBzf7S4+zoHUWWsJq3hz9oLfcBH77R9H+\n" + "0pA13u/qPgDp/Q6ri39JlfpXkDx+h/" + "msWn1L6wdO6bSbcrIbU2Q0xLnSe21kuVejJspbVS5+4bd\n" + "ocBAkD/orG+tP1ar67Wy7GtZTm1SCXfRsb+a18fRe38x6SG3/44H1Z3f0y2I+l6DoSXD/" + "8xPrDs\n" + "3enVu3bdnqN3R+//USSVo//" + "1PLohhce+gRWS0Nsby3lRgFkKxQyW7SgUh3em5Tbq2uB9wWw1wey\n" + "J1XGV2XYdm5k7e4WzidXY9oMwo5RZ4T6Hd1ixwfp96PWbAJBVTHzK7O6Ky5oJB1HZMqmUEFlkG" + "y\n" + "xpa4zI1Hkq31dy7bMN9BAc3HeWAnnbyxEycmuup1jiAGglZ31PyrmZ9tQg1WtNj54EHR3/" + "S2qTH\n" + "1Yc5GgD1FFtzPdWGkd2AyflogZmRmsz6PSrbXbdo+" + "txOrP337f3fzVo15DK2uyrTtqpBOnBKx6b\n" + "7MjJsz7tHWOAYP3WD6LU6cqGjFCNl1MmvLcxv6YtDTLSAqP27LrdtYHXFnJZI+" + "Tp3MWg68OpDPv\n" + "UMUM2lkQBoouKQ6swjE9Nml+1sz1PW+z6xt27zuj+skrX2ZvqR5z8kkuOfdPt43/1fMm/" + "grFG6f\n" + "Lss9JA7JG7tnZs/SfJUrfS3foJ9TvHCopJsV8nWx/t24bJn8Fo/5TjWJXMJIS+i+G36TsZ/" + "7Q9P\n" + "8ATfzfeOFofVSZv2/zvt+O3X/v65dJPjt/BiyfN1/wn0zre79nVej/ADG8ep4x2/" + "6Srjd6TdviF\n" + "52ko8m6/Ht9X1KnftEo+POwxzK8mSTF46vrH6T1/OEl5Okkl//Z/" + "+0uHFBob3Rvc2hvcCAzLjAA\n" + "OEJJTQQEAAAAAAArHAIAAAIAAhwCeAAfICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA" + "A\n" + "4QklNBCUAAAAAABD7Caa9B0wqNp2P4sxXqayFOEJJTQPqAAAAAB2wPD94bWwgdmVyc2lvbj0iM" + "S\n" + "4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPCFET0NUWVBFIHBsaXN0IFBVQkxJQyAiLS8vQXBwbGUg" + "Q\n" + "29tcHV0ZXIvL0RURCBQTElTVCAxLjAvL0VOIiAiaHR0cDovL3d3dy5hcHBsZS5jb20vRFREcy9" + "Q\n" + "cm9wZXJ0eUxpc3QtMS4wLmR0ZCI+" + "CjxwbGlzdCB2ZXJzaW9uPSIxLjAiPgo8ZGljdD4KCTxrZXk\n" + "+Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1Ib3Jpem9udGFsUmVzPC9rZXk+" + "Cgk8ZGljdD\n" + "4KCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuY3JlYXRvcjwva2V5PgoJCTxzdHJpbmc+" + "Y\n" + "29tLmFwcGxlLnByaW50aW5nbWFuYWdlcjwvc3RyaW5nPgoJCTxrZXk+" + "Y29tLmFwcGxlLnByaW50\n" + "LnRpY2tldC5pdGVtQXJyYXk8L2tleT4KCQk8YXJyYXk+" + "CgkJCTxkaWN0PgoJCQkJPGtleT5jb20\n" + "uYXBwbGUucHJpbnQuUGFnZUZvcm1hdC5QTUhvcml6b250YWxSZXM8L2tleT4KCQkJCTxyZWFsP" + "j\n" + "cyPC9yZWFsPgoJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNsaWVudDwva2V5PgoJ" + "C\n" + "QkJPHN0cmluZz5jb20uYXBwbGUucHJpbnRpbmdtYW5hZ2VyPC9zdHJpbmc+" + "CgkJCQk8a2V5PmNv\n" + "bS5hcHBsZS5wcmludC50aWNrZXQubW9kRGF0ZTwva2V5PgoJCQkJPGRhdGU+" + "MjAwNy0wMS0zMFQ\n" + "yMjowODo0MVo8L2RhdGU+" + "CgkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuc3RhdGVGbG\n" + "FnPC9rZXk+CgkJCQk8aW50ZWdlcj4wPC9pbnRlZ2VyPgoJCQk8L2RpY3Q+" + "CgkJPC9hcnJheT4KC\n" + "TwvZGljdD4KCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1PcmllbnRhdGlvbjwv\n" + "a2V5PgoJPGRpY3Q+" + "CgkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNyZWF0b3I8L2tleT4\n" + "KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZXI8L3N0cmluZz4KCQk8a2V5PmNvb" + "S\n" + "5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycmF5PC9rZXk+" + "CgkJPGFycmF5PgoJCQk8ZGljdD4KC\n" + "QkJCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1PcmllbnRhdGlvbjwva2V5PgoJ\n" + "CQkJPGludGVnZXI+MTwvaW50ZWdlcj4KCQkJCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LnRpY2tldC5\n" + "jbGllbnQ8L2tleT4KCQkJCTxzdHJpbmc+" + "Y29tLmFwcGxlLnByaW50aW5nbWFuYWdlcjwvc3RyaW\n" + "5nPgoJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0Lm1vZERhdGU8L2tleT4KCQkJCTxk" + "Y\n" + "XRlPjIwMDctMDEtMzBUMjI6MDg6NDFaPC9kYXRlPgoJCQkJPGtleT5jb20uYXBwbGUucHJpbnQ" + "u\n" + "dGlja2V0LnN0YXRlRmxhZzwva2V5PgoJCQkJPGludGVnZXI+" + "MDwvaW50ZWdlcj4KCQkJPC9kaWN\n" + "0PgoJCTwvYXJyYXk+Cgk8L2RpY3Q+" + "Cgk8a2V5PmNvbS5hcHBsZS5wcmludC5QYWdlRm9ybWF0Ll\n" + "BNU2NhbGluZzwva2V5PgoJPGRpY3Q+" + "CgkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNyZ\n" + "WF0b3I8L2tleT4KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZXI8L3N0cmluZz4" + "K\n" + "CQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycmF5PC9rZXk+" + "CgkJPGFycmF5Pgo\n" + "JCQk8ZGljdD4KCQkJCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1TY2FsaW5nPC\n" + "9rZXk+" + "CgkJCQk8cmVhbD4xPC9yZWFsPgoJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0L\n" + "mNsaWVudDwva2V5PgoJCQkJPHN0cmluZz5jb20uYXBwbGUucHJpbnRpbmdtYW5hZ2VyPC9zdHJ" + "p\n" + "bmc+" + "CgkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQubW9kRGF0ZTwva2V5PgoJCQkJPGR\n" + "hdGU+MjAwNy0wMS0zMFQyMjowODo0MVo8L2RhdGU+" + "CgkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC\n" + "50aWNrZXQuc3RhdGVGbGFnPC9rZXk+" + "CgkJCQk8aW50ZWdlcj4wPC9pbnRlZ2VyPgoJCQk8L2RpY\n" + "3Q+CgkJPC9hcnJheT4KCTwvZGljdD4KCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQu\n" + "UE1WZXJ0aWNhbFJlczwva2V5PgoJPGRpY3Q+" + "CgkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V\n" + "0LmNyZWF0b3I8L2tleT4KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZXI8L3N0c" + "m\n" + "luZz4KCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycmF5PC9rZXk+" + "CgkJPGFyc\n" + "mF5PgoJCQk8ZGljdD4KCQkJCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYXQuUE1WZXJ0\n" + "aWNhbFJlczwva2V5PgoJCQkJPHJlYWw+NzI8L3JlYWw+" + "CgkJCQk8a2V5PmNvbS5hcHBsZS5wcml\n" + "udC50aWNrZXQuY2xpZW50PC9rZXk+" + "CgkJCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbm\n" + "FnZXI8L3N0cmluZz4KCQkJCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LnRpY2tldC5tb2REYXRlPC9rZ\n" + "Xk+CgkJCQk8ZGF0ZT4yMDA3LTAxLTMwVDIyOjA4OjQxWjwvZGF0ZT4KCQkJCTxrZXk+" + "Y29tLmFw\n" + "cGxlLnByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCTxpbnRlZ2VyPjA8L2ludGVnZX" + "I\n" + "+CgkJCTwvZGljdD4KCQk8L2FycmF5PgoJPC9kaWN0PgoJPGtleT5jb20uYXBwbGUucHJpbnQuU" + "G\n" + "FnZUZvcm1hdC5QTVZlcnRpY2FsU2NhbGluZzwva2V5PgoJPGRpY3Q+" + "CgkJPGtleT5jb20uYXBwb\n" + "GUucHJpbnQudGlja2V0LmNyZWF0b3I8L2tleT4KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGl" + "u\n" + "Z21hbmFnZXI8L3N0cmluZz4KCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycm" + "F\n" + "5PC9rZXk+CgkJPGFycmF5PgoJCQk8ZGljdD4KCQkJCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LlBhZ2\n" + "VGb3JtYXQuUE1WZXJ0aWNhbFNjYWxpbmc8L2tleT4KCQkJCTxyZWFsPjE8L3JlYWw+" + "CgkJCQk8a\n" + "2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuY2xpZW50PC9rZXk+" + "CgkJCQk8c3RyaW5nPmNvbS5h\n" + "cHBsZS5wcmludGluZ21hbmFnZXI8L3N0cmluZz4KCQkJCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LnR\n" + "pY2tldC5tb2REYXRlPC9rZXk+" + "CgkJCQk8ZGF0ZT4yMDA3LTAxLTMwVDIyOjA4OjQxWjwvZGF0ZT\n" + "4KCQkJCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCTxpb\n" + "nRlZ2VyPjA8L2ludGVnZXI+" + "CgkJCTwvZGljdD4KCQk8L2FycmF5PgoJPC9kaWN0PgoJPGtleT5j\n" + "b20uYXBwbGUucHJpbnQuc3ViVGlja2V0LnBhcGVyX2luZm9fdGlja2V0PC9rZXk+" + "Cgk8ZGljdD4\n" + "KCQk8a2V5PmNvbS5hcHBsZS5wcmludC5QYWdlRm9ybWF0LlBNQWRqdXN0ZWRQYWdlUmVjdDwva" + "2\n" + "V5PgoJCTxkaWN0PgoJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuY3JlYXRvcjwva2V5" + "P\n" + "goJCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZXI8L3N0cmluZz4KCQkJPGtleT5" + "j\n" + "b20uYXBwbGUucHJpbnQudGlja2V0Lml0ZW1BcnJheTwva2V5PgoJCQk8YXJyYXk+" + "CgkJCQk8ZGl\n" + "jdD4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC5QYWdlRm9ybWF0LlBNQWRqdXN0ZWRQYWdlU" + "m\n" + "VjdDwva2V5PgoJCQkJCTxhcnJheT4KCQkJCQkJPHJlYWw+" + "MC4wPC9yZWFsPgoJCQkJCQk8cmVhb\n" + "D4wLjA8L3JlYWw+CgkJCQkJCTxyZWFsPjczNDwvcmVhbD4KCQkJCQkJPHJlYWw+" + "NTc2PC9yZWFs\n" + "PgoJCQkJCTwvYXJyYXk+" + "CgkJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNsaWVudDw\n" + "va2V5PgoJCQkJCTxzdHJpbmc+" + "Y29tLmFwcGxlLnByaW50aW5nbWFuYWdlcjwvc3RyaW5nPgoJCQ\n" + "kJCTxrZXk+Y29tLmFwcGxlLnByaW50LnRpY2tldC5tb2REYXRlPC9rZXk+CgkJCQkJPGRhdGU+" + "M\n" + "jAwNy0wMS0zMFQyMjowODo0MVo8L2RhdGU+" + "CgkJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlj\n" + "a2V0LnN0YXRlRmxhZzwva2V5PgoJCQkJCTxpbnRlZ2VyPjA8L2ludGVnZXI+" + "CgkJCQk8L2RpY3Q\n" + "+CgkJCTwvYXJyYXk+CgkJPC9kaWN0PgoJCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LlBhZ2VGb3JtYX\n" + "QuUE1BZGp1c3RlZFBhcGVyUmVjdDwva2V5PgoJCTxkaWN0PgoJCQk8a2V5PmNvbS5hcHBsZS5w" + "c\n" + "mludC50aWNrZXQuY3JlYXRvcjwva2V5PgoJCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21" + "h\n" + "bmFnZXI8L3N0cmluZz4KCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0Lml0ZW1BcnJheT" + "w\n" + "va2V5PgoJCQk8YXJyYXk+" + "CgkJCQk8ZGljdD4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC5QYW\n" + "dlRm9ybWF0LlBNQWRqdXN0ZWRQYXBlclJlY3Q8L2tleT4KCQkJCQk8YXJyYXk+" + "CgkJCQkJCTxyZ\n" + "WFsPi0xODwvcmVhbD4KCQkJCQkJPHJlYWw+" + "LTE4PC9yZWFsPgoJCQkJCQk8cmVhbD43NzQ8L3Jl\n" + "YWw+CgkJCQkJCTxyZWFsPjU5NDwvcmVhbD4KCQkJCQk8L2FycmF5PgoJCQkJCTxrZXk+" + "Y29tLmF\n" + "wcGxlLnByaW50LnRpY2tldC5jbGllbnQ8L2tleT4KCQkJCQk8c3RyaW5nPmNvbS5hcHBsZS5wc" + "m\n" + "ludGluZ21hbmFnZXI8L3N0cmluZz4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQu" + "b\n" + "W9kRGF0ZTwva2V5PgoJCQkJCTxkYXRlPjIwMDctMDEtMzBUMjI6MDg6NDFaPC9kYXRlPgoJCQk" + "J\n" + "CTxrZXk+" + "Y29tLmFwcGxlLnByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCQk8aW50ZWd\n" + "lcj4wPC9pbnRlZ2VyPgoJCQkJPC9kaWN0PgoJCQk8L2FycmF5PgoJCTwvZGljdD4KCQk8a2V5P" + "m\n" + "NvbS5hcHBsZS5wcmludC5QYXBlckluZm8uUE1QYXBlck5hbWU8L2tleT4KCQk8ZGljdD4KCQkJ" + "P\n" + "GtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNyZWF0b3I8L2tleT4KCQkJPHN0cmluZz5jb20" + "u\n" + "YXBwbGUucHJpbnQucG0uUG9zdFNjcmlwdDwvc3RyaW5nPgoJCQk8a2V5PmNvbS5hcHBsZS5wcm" + "l\n" + "udC50aWNrZXQuaXRlbUFycmF5PC9rZXk+" + "CgkJCTxhcnJheT4KCQkJCTxkaWN0PgoJCQkJCTxrZX\n" + "k+" + "Y29tLmFwcGxlLnByaW50LlBhcGVySW5mby5QTVBhcGVyTmFtZTwva2V5PgoJCQkJCTxzdHJpb" + "\n" + "mc+bmEtbGV0dGVyPC9zdHJpbmc+" + "CgkJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNs\n" + "aWVudDwva2V5PgoJCQkJCTxzdHJpbmc+" + "Y29tLmFwcGxlLnByaW50LnBtLlBvc3RTY3JpcHQ8L3N\n" + "0cmluZz4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQubW9kRGF0ZTwva2V5PgoJC" + "Q\n" + "kJCTxkYXRlPjIwMDMtMDctMDFUMTc6NDk6MzZaPC9kYXRlPgoJCQkJCTxrZXk+" + "Y29tLmFwcGxlL\n" + "nByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCQk8aW50ZWdlcj4xPC9pbnRlZ2VyPgo" + "J\n" + "CQkJPC9kaWN0PgoJCQk8L2FycmF5PgoJCTwvZGljdD4KCQk8a2V5PmNvbS5hcHBsZS5wcmludC" + "5\n" + "QYXBlckluZm8uUE1VbmFkanVzdGVkUGFnZVJlY3Q8L2tleT4KCQk8ZGljdD4KCQkJPGtleT5jb" + "2\n" + "0uYXBwbGUucHJpbnQudGlja2V0LmNyZWF0b3I8L2tleT4KCQkJPHN0cmluZz5jb20uYXBwbGUu" + "c\n" + "HJpbnQucG0uUG9zdFNjcmlwdDwvc3RyaW5nPgoJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWN" + "r\n" + "ZXQuaXRlbUFycmF5PC9rZXk+CgkJCTxhcnJheT4KCQkJCTxkaWN0PgoJCQkJCTxrZXk+" + "Y29tLmF\n" + "wcGxlLnByaW50LlBhcGVySW5mby5QTVVuYWRqdXN0ZWRQYWdlUmVjdDwva2V5PgoJCQkJCTxhc" + "n\n" + "JheT4KCQkJCQkJPHJlYWw+MC4wPC9yZWFsPgoJCQkJCQk8cmVhbD4wLjA8L3JlYWw+" + "CgkJCQkJC\n" + "TxyZWFsPjczNDwvcmVhbD4KCQkJCQkJPHJlYWw+NTc2PC9yZWFsPgoJCQkJCTwvYXJyYXk+" + "CgkJ\n" + "CQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNsaWVudDwva2V5PgoJCQkJCTxzdHJpbm" + "c\n" + "+Y29tLmFwcGxlLnByaW50aW5nbWFuYWdlcjwvc3RyaW5nPgoJCQkJCTxrZXk+" + "Y29tLmFwcGxlLn\n" + "ByaW50LnRpY2tldC5tb2REYXRlPC9rZXk+CgkJCQkJPGRhdGU+" + "MjAwNy0wMS0zMFQyMjowODo0M\n" + "Vo8L2RhdGU+" + "CgkJCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LnN0YXRlRmxhZzwva2V5\n" + "PgoJCQkJCTxpbnRlZ2VyPjA8L2ludGVnZXI+CgkJCQk8L2RpY3Q+CgkJCTwvYXJyYXk+" + "CgkJPC9\n" + "kaWN0PgoJCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LlBhcGVySW5mby5QTVVuYWRqdXN0ZWRQYXBlcl\n" + "JlY3Q8L2tleT4KCQk8ZGljdD4KCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2V0LmNyZWF0" + "b\n" + "3I8L2tleT4KCQkJPHN0cmluZz5jb20uYXBwbGUucHJpbnQucG0uUG9zdFNjcmlwdDwvc3RyaW5" + "n\n" + "PgoJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuaXRlbUFycmF5PC9rZXk+" + "CgkJCTxhcnJ\n" + "heT4KCQkJCTxkaWN0PgoJCQkJCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LlBhcGVySW5mby5QTVVuYW\n" + "RqdXN0ZWRQYXBlclJlY3Q8L2tleT4KCQkJCQk8YXJyYXk+" + "CgkJCQkJCTxyZWFsPi0xODwvcmVhb\n" + "D4KCQkJCQkJPHJlYWw+LTE4PC9yZWFsPgoJCQkJCQk8cmVhbD43NzQ8L3JlYWw+" + "CgkJCQkJCTxy\n" + "ZWFsPjU5NDwvcmVhbD4KCQkJCQk8L2FycmF5PgoJCQkJCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LnR\n" + "pY2tldC5jbGllbnQ8L2tleT4KCQkJCQk8c3RyaW5nPmNvbS5hcHBsZS5wcmludGluZ21hbmFnZ" + "X\n" + "I8L3N0cmluZz4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQubW9kRGF0ZTwva2V5" + "P\n" + "goJCQkJCTxkYXRlPjIwMDctMDEtMzBUMjI6MDg6NDFaPC9kYXRlPgoJCQkJCTxrZXk+" + "Y29tLmFw\n" + "cGxlLnByaW50LnRpY2tldC5zdGF0ZUZsYWc8L2tleT4KCQkJCQk8aW50ZWdlcj4wPC9pbnRlZ2" + "V\n" + "yPgoJCQkJPC9kaWN0PgoJCQk8L2FycmF5PgoJCTwvZGljdD4KCQk8a2V5PmNvbS5hcHBsZS5wc" + "m\n" + "ludC5QYXBlckluZm8ucHBkLlBNUGFwZXJOYW1lPC9rZXk+CgkJPGRpY3Q+CgkJCTxrZXk+" + "Y29tL\n" + "mFwcGxlLnByaW50LnRpY2tldC5jcmVhdG9yPC9rZXk+CgkJCTxzdHJpbmc+" + "Y29tLmFwcGxlLnBy\n" + "aW50LnBtLlBvc3RTY3JpcHQ8L3N0cmluZz4KCQkJPGtleT5jb20uYXBwbGUucHJpbnQudGlja2" + "V\n" + "0Lml0ZW1BcnJheTwva2V5PgoJCQk8YXJyYXk+" + "CgkJCQk8ZGljdD4KCQkJCQk8a2V5PmNvbS5hcH\n" + "BsZS5wcmludC5QYXBlckluZm8ucHBkLlBNUGFwZXJOYW1lPC9rZXk+" + "CgkJCQkJPHN0cmluZz5VU\n" + "yBMZXR0ZXI8L3N0cmluZz4KCQkJCQk8a2V5PmNvbS5hcHBsZS5wcmludC50aWNrZXQuY2xpZW5" + "0\n" + "PC9rZXk+" + "CgkJCQkJPHN0cmluZz5jb20uYXBwbGUucHJpbnQucG0uUG9zdFNjcmlwdDwvc3RyaW5\n" + "nPgoJCQkJCTxrZXk+Y29tLmFwcGxlLnByaW50LnRpY2tldC5tb2REYXRlPC9rZXk+" + "CgkJCQkJPG\n" + "RhdGU+MjAwMy0wNy0wMVQxNzo0OTozNlo8L2RhdGU+" + "CgkJCQkJPGtleT5jb20uYXBwbGUucHJpb\n" + "nQudGlja2V0LnN0YXRlRmxhZzwva2V5PgoJCQkJCTxpbnRlZ2VyPjE8L2ludGVnZXI+" + "CgkJCQk8\n" + "L2RpY3Q+CgkJCTwvYXJyYXk+CgkJPC9kaWN0PgoJCTxrZXk+" + "Y29tLmFwcGxlLnByaW50LnRpY2t\n" + "ldC5BUElWZXJzaW9uPC9rZXk+CgkJPHN0cmluZz4wMC4yMDwvc3RyaW5nPgoJCTxrZXk+" + "Y29tLm\n" + "FwcGxlLnByaW50LnRpY2tldC5wcml2YXRlTG9jazwva2V5PgoJCTxmYWxzZS8+" + "CgkJPGtleT5jb\n" + "20uYXBwbGUucHJpbnQudGlja2V0LnR5cGU8L2tleT4KCQk8c3RyaW5nPmNvbS5hcHBsZS5wcml" + "u\n" + "dC5QYXBlckluZm9UaWNrZXQ8L3N0cmluZz4KCTwvZGljdD4KCTxrZXk+" + "Y29tLmFwcGxlLnByaW5\n" + "0LnRpY2tldC5BUElWZXJzaW9uPC9rZXk+Cgk8c3RyaW5nPjAwLjIwPC9zdHJpbmc+" + "Cgk8a2V5Pm\n" + "NvbS5hcHBsZS5wcmludC50aWNrZXQucHJpdmF0ZUxvY2s8L2tleT4KCTxmYWxzZS8+" + "Cgk8a2V5P\n" + "mNvbS5hcHBsZS5wcmludC50aWNrZXQudHlwZTwva2V5PgoJPHN0cmluZz5jb20uYXBwbGUucHJ" + "p\n" + "bnQuUGFnZUZvcm1hdFRpY2tldDwvc3RyaW5nPgo8L2RpY3Q+CjwvcGxpc3Q+" + "CjhCSU0D6QAAAAA\n" + "AeAADAAAASABIAAAAAALeAkD/7v/uAwYCUgNnBSgD/" + "AACAAAASABIAAAAAALYAigAAQAAAGQAAA\n" + "ABAAMDAwAAAAF//" + "wABAAEAAAAAAAAAAAAAAABoCAAZAZAAAAAAACAAAAAAAAAAAAAAAAAAAAAAA\n" + "AAAAAAAAAAAADhCSU0D7QAAAAAAEABIAAAAAQABAEgAAAABAAE4QklNBCYAAAAAAA4AAAAAAAA" + "A\n" + "AAAAP4AAADhCSU0EDQAAAAAABAAAAB44QklNBBkAAAAAAAQAAAAeOEJJTQPzAAAAAAAJAAAAAA" + "A\n" + "AAAABADhCSU0ECgAAAAAAAQAAOEJJTScQAAAAAAAKAAEAAAAAAAAAAThCSU0D9QAAAAAASAAvZ" + "m\n" + "YAAQBsZmYABgAAAAAAAQAvZmYAAQChmZoABgAAAAAAAQAyAAAAAQBaAAAABgAAAAAAAQA1AAAA" + "A\n" + "QAtAAAABgAAAAAAAThCSU0D+AAAAAAAcAAA/////////////////////////////wPoAAAAAP/" + "/\n" + "//////////////////////////8D6AAAAAD/////////////////////////////A+gAAAAA//" + "/\n" + "//////////////////////////" + "wPoAAA4QklNBAgAAAAAABAAAAABAAACQAAAAkAAAAAAOEJJTQ\n" + "QeAAAAAAAEAAAAADhCSU0EGgAAAAADRQAAAAYAAAAAAAAAAAAAAGQAAABkAAAACABEAFMAQwAw" + "A\n" + "DIAMwAyADUAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAGQAAABkAAAAAAAAAAA" + "A\n" + "AAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAEAAAAAAABudWxsAAAAAgAAAAZib3VuZH" + "N\n" + "PYmpjAAAAAQAAAAAAAFJjdDEAAAAEAAAAAFRvcCBsb25nAAAAAAAAAABMZWZ0bG9uZwAAAAAAA" + "A\n" + "AAQnRvbWxvbmcAAABkAAAAAFJnaHRsb25nAAAAZAAAAAZzbGljZXNWbExzAAAAAU9iamMAAAAB" + "A\n" + "AAAAAAFc2xpY2UAAAASAAAAB3NsaWNlSURsb25nAAAAAAAAAAdncm91cElEbG9uZwAAAAAAAAA" + "G\n" + "b3JpZ2luZW51bQAAAAxFU2xpY2VPcmlnaW4AAAANYXV0b0dlbmVyYXRlZAAAAABUeXBlZW51bQ" + "A\n" + "AAApFU2xpY2VUeXBlAAAAAEltZyAAAAAGYm91bmRzT2JqYwAAAAEAAAAAAABSY3QxAAAABAAAA" + "A\n" + "BUb3AgbG9uZwAAAAAAAAAATGVmdGxvbmcAAAAAAAAAAEJ0b21sb25nAAAAZAAAAABSZ2h0bG9u" + "Z\n" + "wAAAGQAAAADdXJsVEVYVAAAAAEAAAAAAABudWxsVEVYVAAAAAEAAAAAAABNc2dlVEVYVAAAAAE" + "A\n" + "AAAAAAZhbHRUYWdURVhUAAAAAQAAAAAADmNlbGxUZXh0SXNIVE1MYm9vbAEAAAAIY2VsbFRleH" + "R\n" + "URVhUAAAAAQAAAAAACWhvcnpBbGlnbmVudW0AAAAPRVNsaWNlSG9yekFsaWduAAAAB2RlZmF1b" + "H\n" + "QAAAAJdmVydEFsaWduZW51bQAAAA9FU2xpY2VWZXJ0QWxpZ24AAAAHZGVmYXVsdAAAAAtiZ0Nv" + "b\n" + "G9yVHlwZWVudW0AAAARRVNsaWNlQkdDb2xvclR5cGUAAAAATm9uZQAAAAl0b3BPdXRzZXRsb25" + "n\n" + "AAAAAAAAAApsZWZ0T3V0c2V0bG9uZwAAAAAAAAAMYm90dG9tT3V0c2V0bG9uZwAAAAAAAAALcm" + "l\n" + "naHRPdXRzZXRsb25nAAAAAAA4QklNBBEAAAAAAAEBADhCSU0EFAAAAAAABAAAAAE4QklNBAwAA" + "A\n" + "AACfkAAAABAAAAZAAAAGQAAAEsAAB1MAAACd0AGAAB/9j/4AAQSkZJRgABAgEASABIAAD/" + "7QAMQ\n" + "WRvYmVfQ00AAv/uAA5BZG9iZQBkgAAAAAH/" + "2wCEAAwICAgJCAwJCQwRCwoLERUPDAwPFRgTExUT\n" + "ExgRDAwMDAwMEQwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwBDQsLDQ4NEA4OEBQODg4UFA" + "4\n" + "ODg4UEQwMDAwMEREMDAwMDAwRDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDP/" + "AABEIAGQAZA\n" + "MBIgACEQEDEQH/3QAEAAf/xAE/" + "AAABBQEBAQEBAQAAAAAAAAADAAECBAUGBwgJCgsBAAEFAQEBA\n" + "QEBAAAAAAAAAAEAAgMEBQYHCAkKCxAAAQQBAwIEAgUHBggFAwwzAQACEQMEIRIxBUFRYRMicYE" + "y\n" + "BhSRobFCIyQVUsFiMzRygtFDByWSU/" + "Dh8WNzNRaisoMmRJNUZEXCo3Q2F9JV4mXys4TD03Xj80Y\n" + "nlKSFtJXE1OT0pbXF1eX1VmZ2hpamtsbW5vY3R1dnd4eXp7fH1+" + "f3EQACAgECBAQDBAUGBwcGBT\n" + "UBAAIRAyExEgRBUWFxIhMFMoGRFKGxQiPBUtHwMyRi4XKCkkNTFWNzNPElBhaisoMHJjXC0kST" + "V\n" + "KMXZEVVNnRl4vKzhMPTdePzRpSkhbSVxNTk9KW1xdXl9VZmdoaWprbG1ub2JzdHV2d3h5ent8f" + "/\n" + "2gAMAwEAAhEDEQA/" + "APLtso1NRc0vP0Rok8NYyPEfijOG2ljBoAJPxKFppZtbS4Rz38kV+OPRDge\n" + "T89EPHBfvLjtb3P8A30K/j47cgsrYNxGpPYJpK8RtyXUlvPfsobV0GV0uippLiX3EaMb2/" + "rKgMB\n" + "1ghoiNST4BESCjjLmxqmKtvxiXQ0cd0q8E2bjIDWjk9z5I8QW8JaoHcdkUePZJtZD9p8YU/" + "Rsc/\n" + "wBNjS5zjDWjUk+SSKYaJLYq+qWeYGQ5lBPLJ3OA8wz2/wDSWni/" + "U3H2AXW2l2oloa0f9LcjSLeU\n" + "hJdb/wAyqd387Zt+DZ5SSpVh/9DzO6dw7gGPuVn6ft/" + "kyPkqwlxjw1Rnh24QNWjUeR5TSuDc6bg\n" + "fatpsJZQ3sNC4rWfkVYpbi4LAb3aANEkFLp7GHGYxuhAj4K/" + "hYVNDjYGzZ++eSSoSbLZjGgwxul\n" + "XNrPqO35FukdmzyXOQeqtqwqRg4o/SOAN9ng3/" + "AMzW02txZ9I+ZHKr241UOcWDaz3uLtSSPEpWu\n" + "rR5XPeylmyNr4BIPPCyH2Oc6T8kXNvddkPe/" + "VzjJPxQAJMKeIoNScrPk2MbfddXUNXvcGtPx0Xb\n" + "dJ6NXjOD2Dfdw6w9v5LFW+q/1WLA3Ly9LSJaz91p/" + "wDRjl2lOLWwAMbEJErWjRgESYieVdZhsMF\n" + "wMt08ldrx/vVivHaOdSgCoud9krmElpba93ASTlr/AP/R83ohr97voiJV/" + "Fq9QvsI+mdPgs1thc\n" + "BWO5C38CoOY1g78qOejLiGvknxLAyGtExp5K9uzGt9RrNw7DhRfQKKx6bZIGgPj4rPycLqWVtI" + "s\n" + "JGu5skDyTBRZtQNrb1fU8xrtpBaO4MLQxcx1sNuEjt5rMGJR9noY5hF7Wxa8aAnxVvDb6bgHH2" + "z\n" + "omk0e64ajUUXnev9Idi5rrWAux7SXNd4E/muS+rHSjm9VbPtZjj1CSJBI+g3+0uh69b+iDG/" + "QcD\n" + "u0nQCeFP6l0MZhWX/" + "AJ1xM+QafY1TQlY1a+WABsdXp8Sp27aBH+vZaVbC0ADlVcASwtdOolp8Ct\n" + "BjmtGv0uI8EmJmxkIjWkmPEKLSPiidxIgJKRbDPCSN5pJyH//S87uw/" + "suZ6c72iC13kVs9PdDmk\n" + "KllVziV3cuafc7yP0QjYFh26cqM6hsxAjIj6u6xzbDHh3R663AaceH+" + "5BwdruVp2PqZUA0a9yo6\n" + "DPQajscnXb8YQdzC8H909joiZttoxoBIa4gGOQ3uqh+z1RuD2Ds4j2n+39FNKaFMevS/" + "p5LPpSA\n" + "I8/b/ABW70THZXj11VjaIAIHgFl5VdD8AneDMaec6Lb6QAKmu7x+VSw2a3MdF/" + "rF9YKeh4B9OHZ\n" + "lpAprPH8p7v5DFwrPrV9YDa645toLjMaFo8mtcEvrhkWZHXbg7T0Q2to8o3f8AfkarEitlVTKn" + "Y\n" + "ra992QQ2wOfG57H2bg7H2fzbFKA130P6n9dHWemCx5H2mk7LgPH818f8IuhAka6ea8y/" + "wAWrcod\n" + "VyrceRhBsPae5J/Qj+sxq9KDpMuMuKBCEntnny/1CSaWxM6pIKf/0/" + "MvtF3pCrefTBnbOi1elP3\n" + "Et8Vi+Sv9LuNd4HwTSNGSEqkLerwtwn+SYKtOeS4A8Krh2j1D/" + "K1Vu4B5gaDmVDJtAr7zYYGoRB\n" + "QDWQ4c9h/" + "csuyjI3fobnDyJR8fF6ltcTaXRwCkAuAsfMGr1TGFNdTmEwLWS2dIK6npLgK2T4Lle\n" + "pUZxoc9+6K4eR5NO5bPT73NoqIfoILT/JcFJDZr8zGiNXnvrfiur6/" + "Y8tht7WvaexgbXf8AUrFt\n" + "8IExyvRusYDOsYTAIbfWdzHRJ8HN/" + "tLj7OgdRZawmreHP2gt9wEfvtH0f7SkDXe7+o+AOn9DquL\n" + "f0mV+leQPH6H+axafUvrB07ptJtyshtTZDTEudJ7bWS5V6MmyltVLn7ht2hwECQP+isb60/" + "Vqvr\n" + "tbLsa1lObVIJd9Gxv5rXx9F7fzHpIbf/jgfVnd/TLYj6XoOhJcP/zE+sOzd6dW7dt2eo3dH7/" + "9R\n" + "JJWj//" + "U8uiGFx76BFZLQ2xvLeVGAWQrFDJbtKBSHd6blNura4H3BbDXB7InVcZXZdh2bmTt7hbO\n" + "J1dj2gzCjlFnhPod3WLHB+" + "n3o9ZsAkFVMfMrs7orLmgkHUdkyqZQQWWQbLGlrjMjUeSrfV3Ltsw\n" + "30EBzcd5YCedvLETJya66nWOIAaCVnfU/" + "KuZn21CDVa02PngQdHf9LapMfVhzkaAPUUW3M91YaR\n" + "3YDJ+WiBmZGazPo9Kttdt2j63E6s/fft/d/NWjXkMra7KtO2qkE6cErHpvsyMmzPu0dY4Bg/" + "dYP\n" + "otTpyoaMUI2XUya8tzG/pi0NMtICo/" + "bsut21gdcWclkj5OncxaDrw6kM+9QxQzaWRAGii4pDqzC\n" + "MT02aX7WzPU9b7PrG3bvO6P6yStfZm+pHnPySS4590+3jf/" + "V8yb+CsUbp8uyz0kDskbu2dmz9J8\n" + "lSt9Ld+gn1O8cKikmxXydbH+3bhsmfwWj/lONYlcwkhL6L4bfpOxn/tD0/wBN/N944Wh9VJm/" + "b/\n" + "O+347df+/rl0k+O38GLJ83X/CfTOt7v2dV6P8AMbx6njHb/" + "pKuN3pN2+IXnaSjybr8e31fUqd+0\n" + "Sj487DHMryZJMXjq+sfpPX84SXk6SSX/" + "9kAOEJJTQQhAAAAAABVAAAAAQEAAAAPAEEAZABvAGIA\n" + "ZQAgAFAAaABvAHQAbwBzAGgAbwBwAAAAEwBBAGQAbwBiAGUAIABQAGgAbwB0AG8AcwBoAG8AcA" + "A\n" + "gADcALgAwAAAAAQA4QklNBAYAAAAAAAcABQAAAAEBAP/" + "hFWdodHRwOi8vbnMuYWRvYmUuY29tL3\n" + "hhcC8xLjAvADw/eHBhY2tldCBiZWdpbj0n77u/" + "JyBpZD0nVzVNME1wQ2VoaUh6cmVTek5UY3prY\n" + "zlkJz8+Cjw/YWRvYmUteGFwLWZpbHRlcnMgZXNjPSJDUiI/" + "Pgo8eDp4YXBtZXRhIHhtbG5zOng9\n" + "J2Fkb2JlOm5zOm1ldGEvJyB4OnhhcHRrPSdYTVAgdG9vbGtpdCAyLjguMi0zMywgZnJhbWV3b3" + "J\n" + "rIDEuNSc+" + "CjxyZGY6UkRGIHhtbG5zOnJkZj0naHR0cDovL3d3dy53My5vcmcvMTk5OS8wMi8yMi\n" + "1yZGYtc3ludGF4LW5zIycgeG1sbnM6aVg9J2h0dHA6Ly9ucy5hZG9iZS5jb20vaVgvMS4wLyc+" + "C\n" + "gogPHJkZjpEZXNjcmlwdGlvbiBhYm91dD0ndXVpZDoyMmQwMmIwYS1iMjQ5LTExZGItOGFmOC0" + "5\n" + "MWQ1NDAzZjkyZjknCiAgeG1sbnM6cGRmPSdodHRwOi8vbnMuYWRvYmUuY29tL3BkZi8xLjMvJz" + "4\n" + "KICA8IS0tIHBkZjpTdWJqZWN0IGlzIGFsaWFzZWQgLS0+" + "CiA8L3JkZjpEZXNjcmlwdGlvbj4KCi\n" + "A8cmRmOkRlc2NyaXB0aW9uIGFib3V0PSd1dWlkOjIyZDAyYjBhLWIyNDktMTFkYi04YWY4LTkx" + "Z\n" + "DU0MDNmOTJmOScKICB4bWxuczpwaG90b3Nob3A9J2h0dHA6Ly9ucy5hZG9iZS5jb20vcGhvdG9" + "z\n" + "aG9wLzEuMC8nPgogIDwhLS0gcGhvdG9zaG9wOkNhcHRpb24gaXMgYWxpYXNlZCAtLT4KIDwvcm" + "R\n" + "mOkRlc2NyaXB0aW9uPgoKIDxyZGY6RGVzY3JpcHRpb24gYWJvdXQ9J3V1aWQ6MjJkMDJiMGEtY" + "j\n" + "I0OS0xMWRiLThhZjgtOTFkNTQwM2Y5MmY5JwogIHhtbG5zOnhhcD0naHR0cDovL25zLmFkb2Jl" + "L\n" + "mNvbS94YXAvMS4wLyc+" + "CiAgPCEtLSB4YXA6RGVzY3JpcHRpb24gaXMgYWxpYXNlZCAtLT4KIDwv\n" + "cmRmOkRlc2NyaXB0aW9uPgoKIDxyZGY6RGVzY3JpcHRpb24gYWJvdXQ9J3V1aWQ6MjJkMDJiMG" + "E\n" + "tYjI0OS0xMWRiLThhZjgtOTFkNTQwM2Y5MmY5JwogIHhtbG5zOnhhcE1NPSdodHRwOi8vbnMuY" + "W\n" + "RvYmUuY29tL3hhcC8xLjAvbW0vJz4KICA8eGFwTU06RG9jdW1lbnRJRD5hZG9iZTpkb2NpZDpw" + "a\n" + "G90b3Nob3A6MjJkMDJiMDYtYjI0OS0xMWRiLThhZjgtOTFkNTQwM2Y5MmY5PC94YXBNTTpEb2N" + "1\n" + "bWVudElEPgogPC9yZGY6RGVzY3JpcHRpb24+" + "CgogPHJkZjpEZXNjcmlwdGlvbiBhYm91dD0ndXV\n" + "pZDoyMmQwMmIwYS1iMjQ5LTExZGItOGFmOC05MWQ1NDAzZjkyZjknCiAgeG1sbnM6ZGM9J2h0d" + "H\n" + "A6Ly9wdXJsLm9yZy9kYy9lbGVtZW50cy8xLjEvJz4KICA8ZGM6ZGVzY3JpcHRpb24+" + "CiAgIDxyZ\n" + "GY6QWx0PgogICAgPHJkZjpsaSB4bWw6bGFuZz0neC1kZWZhdWx0Jz4gICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgPC9yZGY6bGk+" + "CiAgIDwvcmRmOkFsdD4KICA8L2RjOmRlc2NyaXB0aW9\n" + "uPgogPC9yZGY6RGVzY3JpcHRpb24+Cgo8L3JkZjpSREY+" + "CjwveDp4YXBtZXRhPgogICAgICAgIC\n" + "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" + "A\n" + "ogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAK" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg" + "I\n" + "CAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICA" + "g\n" + "ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIC" + "A\n" + "gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAgICAgI" + "C\n" + "AgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKPD94cGFja2V0IGVuZD0ndyc/" + "P\n" + "v/uAA5BZG9iZQBkQAAAAAH/" + "2wCEAAQDAwMDAwQDAwQGBAMEBgcFBAQFBwgGBgcGBggKCAkJCQkI\n" + "CgoMDAwMDAoMDAwMDAwMDAwMDAwMDAwMDAwMDAwBBAUFCAcIDwoKDxQODg4UFA4ODg4UEQwMDA" + "w\n" + "MEREMDAwMDAwRDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDP/" + "AABEIAGQAZAMBEQACEQEDEQ\n" + "H/3QAEAA3/" + "xAGiAAAABwEBAQEBAAAAAAAAAAAEBQMCBgEABwgJCgsBAAICAwEBAQEBAAAAAAAAA\n" + "AEAAgMEBQYHCAkKCxAAAgEDAwIEAgYHAwQCBgJzAQIDEQQABSESMUFRBhNhInGBFDKRoQcVsUI" + "j\n" + "wVLR4TMWYvAkcoLxJUM0U5KismNzwjVEJ5OjszYXVGR0w9LiCCaDCQoYGYSURUaktFbTVSga8u" + "P\n" + "zxNTk9GV1hZWltcXV5fVmdoaWprbG1ub2N0dXZ3eHl6e3x9fn9zhIWGh4iJiouMjY6PgpOUlZa" + "X\n" + "mJmam5ydnp+" + "So6SlpqeoqaqrrK2ur6EQACAgECAwUFBAUGBAgDA20BAAIRAwQhEjFBBVETYSIGc\n" + "YGRMqGx8BTB0eEjQhVSYnLxMyQ0Q4IWklMlomOywgdz0jXiRIMXVJMICQoYGSY2RRonZHRVN/" + "Kj\n" + "s8MoKdPj84SUpLTE1OT0ZXWFlaW1xdXl9UZWZnaGlqa2xtbm9kdXZ3eHl6e3x9fn9zhIWGh4iJ" + "i\n" + "ouMjY6Pg5SVlpeYmZqbnJ2en5KjpKWmp6ipqqusra6vr/2gAMAwEAAhEDEQA/" + "APBnplwPAdR+GB\n" + "KY6dYtNG1w39yh4+xb+zIksgEfFaRSSoIx8f7RPRRkSWQimM+lRmwWVXFWYigHxUUVoMiJM+" + "Fj0\n" + "tg0RBegLE0Wu+3c+GTBazFCGI7HtSp9slbFYYzyoBsegw2hY1Afl3wqqRqahk+" + "0tDgKpgu4DAUU\n" + "+HY+GRS2ePiMKtUB3G+KGuONq//" + "Q8OzpFbW5WnxMop4k9crG5ZnZNJkEOn21utVRYw7HxZtz+OR\n" + "vdsrZ2lRtci4aVxFEQA0neg/" + "ZXxJpTITNNuOFss0vSotYNvZ2qGRkPKSTqiU8Sdqk5SZU5Ix8XJ\n" + "NNZ8k6bp8TtM73OputUtYq0Unux/" + "hkRkJOzZLCAN2KR+VpbtSkCBaDnIzdlWu59u+XeJTjeASk8\n" + "+juZOESEAVqx8BvU/" + "PJibScTrTy09560hkWOGFd2YgFnPQKD19zhOSkxw2l8Vm6XAiYb8gg+k5O\n" + "9mnhoon9H3cs5s7WF5pp29OGGMFndyaAKBuTiEEPQLD8h/" + "NDmNdYlttNkYjlbFjcXCr3LLH8II8\n" + "C2WUGviZvon/OPWkm3RNSv72SYllMkKxQRV67CQMSKYQAxMkR/" + "wBC56d61P0heel4cYuVOXWvTp\n" + "h4Qjjf/9Hw5qBYyISaqjBV+QpvkAzKcki4HomnIxck/" + "wBhtlR2bhunvlDywddMUl4zW+kQ9FQ8X\n" + "nfuSewrtmPkycPvc/" + "DhMhvyegXOrWWhmLQPKlsj6xIAiLCoZkY96nv7npmJvI2XOjQFMl0fyRqM\n" + "NoxvZvrGt33wlATwiMnVnY1LEdSfuyXF3KIDmUu88w2XlnTl8raAlb2ZFfVL0jdYRtQnxc7BfD" + "C\n" + "OaJR7nm3me5tdOtjbMvp3ZRXkV6chVQRX79hmVjgZG+" + "jgZ5jHGhzecXF5LPL6jEjstSSaDM51Ka\n" + "6MZ9S1C0sEBe8uZo4YCBXdjxGw60wEWyEqfUHkT8vLXRJFuLdTcaqfhlvWUErtukZ3ABPUjIXT" + "E\n" + "m3rGmeV2Tk5UKz/AG/E/wAcgZKya20C3b02kjYtH8AqCygbkUH0nLYlgUb+gbWtPbpXt/" + "n2ybB/\n" + "/9Lw4oaVxGd+PxH3qBkGaY3KyiSP01IkiUclH8sg+LKydm6INvZvKsFu+" + "kWtvD8LRoFNRup6moO\n" + "aqd277HsGW+XPLmn6XM17FF6l7vW4fd2Zuu+" + "RFls2tmUNrLJb7TSBertGQGqetDkxE0na0pvtHs\n" + "QkszWyiGAG5laYlnkeMVHJj8sA5rPk+SvMepTalqlxd3B5zTOXdj/" + "MxqafLpm5xioh5nPK5kpRG\n" + "pkcKAST0A6k5NpfUP5K/ki1ssHmHzF+71KRQ8Nud/Qibb/kYw6/" + "yjbrXISlSH07YaHbWyxx2kXE\n" + "KACB2zHJtLI7XSelBRvH2xCpvaaTDHXkOTVBPcUG2479RlsdmJVPRtvV+ylenQ0y62FP/" + "9PxRpo\n" + "WG5FxKKxKFDA+GVS5NsebLdFsRePc3siVW4f4QR0QVAGYeSXR2unhtZ6s60K6jt+MMSFwtF2+" + "xX\n" + "wr7eGUGLlRPQMsE2vxQm7itxKg3VCfT2+" + "nb8cDYaCDtfOXmCCcROrQrUhkkCHYn6emRMqZxjbLd\n" + "F1+W/" + "4xajHzjNCtQKMffETWUdngX5p+QZ9A8xS6hbo0ui37NNDPT7DOalHpsCD08Rmyw5ARTpdV\n" + "gIPEF35MeRn80ed4S5EdrpKm9kZ15K0iH92hB7Me/tmS60vt/" + "QrCYyekiBdgSTXcjqV9q9MokFD\n" + "N7S3aFVVR8RoK9zldqndvAY6nffr/AGYQqLhjdpCoIAZW22HavU/LJBUP9WblX0xTw7fOmWsX/" + "9\n" + "Tw7FdvMqWkQ3Z1qfED+mQIbI77PX/" + "LFis9vBajZm2Y+x65rMh3t30Bsze400aVaIbSLk6r8CMRT\n" + "l/" + "NmOcllnGDD9Y8uecNfEEiXrMgDGWAyGOOu5WlB+" + "vMrHODTlxZCdjsyFdB006VpVtLasurQxBL\n" + "64WiLI4/" + "aFT1ANOXemV5piR2b9NiljB4yyHy9CLOVI5GJhB+CvXY9R8xmINzs5HNZ+Z96BZpbxA\n" + "fVJo39UFefwopYgL4nMiMd2qZoIn/AJx00u3t/" + "Lt7qpp9Yv5GLf5MUTERqfbvmzBeezjd9H+VlL\n" + "wSQzBqsvOGQD7L12rXsemPNxmXQSxxIPU2nFV4HYqR1xEUWj4ZAxBryr2G+" + "J2VGDZlLrxUH6KZA\n" + "Fkqb15VFelfwy+2FP8A/" + "9Xxlf6AdA182Yk9eFeLxSjoVfcfSMo4uIOfkweFOnpvlWYrLEwNFAA+\n" + "nMOYdrhFvQLeSO7coBXiK8iKiv07Zj8Ac4QtNrW1njUcKcT+yAR/" + "xGmR4WcsStLpTuPU9IFaEsV\n" + "BP3k4m2AgBzSwyQNcIwNTE1aI3wnam9O2Ug7s5Ckk/" + "NDndeVXa2H78MqqV6jmeBp9+ZWKXqDjZ4\n" + "+gvVvy30qCy0qzsLRBCnBI2VdgUTqPvOZ7y+Q7pz+bn5q6d+VflZxZlJ/" + "NN4ypptk5qtB9qRwDX\n" + "gn/AAx2y2ItpfKFv+eH5qNeTajJ5ovVaVywSqvEtTUKqupAA6D2y0BNPtv/AJx//" + "M5PzL8mJeXT\n" + "L+ndPf6rqarSpkAqsnEAAeoN6DpkJRYci9lROSgSUUH9o9K5Tw0ztfSHnXkOtK9q+PHwydq//" + "9b\n" + "yxrVoZNBtNSA5zRMPXmH8j0CLXuBmHE+" + "qneamHpEuqYeV7pzFVTRgQK5XMNmnlb1vyyY5QA1OwJ\n" + "+eUF2seTOLu5s7azVIVAkpVn/" + "hhnIALG73Yz5jvb1dICqzpDNIqyFD8SxH7R28cxibZCiWOsdJs\n" + "PTM6XNstPhnkjIhcHuJBVfvOCiUSn0TfWrTTLjyw8guA/PifTO3xcxxA8a5ZAbimvJP0m3p/" + "kFF\n" + "WxhmpWQJ9NW3zZPHz5vlb/nIDVbrWfzO1RJhxGnpDaRL/" + "khA1T7ktmSOTAJhZaAUtLawsbayl8v\n" + "xWi3Gpay0cF3HPcFRJJHJMXVrcJ8UaAFG5LWjF8tAYW9H/wCcOo9bTzxrt/" + "owkTyksZW5gkIKvI\n" + "7k26nvyReRJHyyBWT7dWQyOWlbnK2526e1O1MqIUFE84uPLkOdK9RXI0E2/wD/1/DA1bURZLY/" + "W\n" + "ZDZqwb0eXw7dMgIi7bjllVXsz7yNcfWC0Vd3Ip92Y2UOz0cnsPlwyx8xQ/" + "u24sMxCadoJp9LOXk\n" + "VX/" + "uwRUE0BI8cokbLMyoKouHu2MaKGXw7fLDwgoGSkbHpaNZyLLHRSKcFFQQRvUdMlwUFOQyLzr\n" + "ztpCaba6fPau4ijv4OURY8AjVFKV7ZZiO+7Vnh6XvXkSWNbW2WTb92KDxIFMzwHlZc3zX+" + "fuizW\n" + "f5p3ty8XGDU4YLmCQiisyII3+4rvl8UB5ffEghRGvOm7AbnvWvjk1fen/" + "ONPldPKP5aWOpPCfr2\n" + "uE31y6q2wbaMEn+VAMDSdyzrzj+avlHyTp0l/r2rxWFuHWJuIeacu4qFCRgsajfBwsty89/" + "6Gr/\n" + "ACa9an+JL/hSnrfoubhXwpXpjwhaL//" + "Q8E1AqtcAZMs8l6i1nqMa1oSVP0VynKLDmaWdSfQXl69\n" + "jF1Jv8MhDb5rpB3AO7INRRLhhGp4R05FgaGvTMU8200xS70zVDMRp2pTIOvBmB3PgQP15kxIcn" + "D\n" + "LH/" + "EEz0rRvOJhldr9pQtCqyd6VrShGTqw5d4ARv9jHfOGl+ZJNMluLkyenaFbiRdqFYW5nrWuwO\n" + "MKB5MdSMRxnhlu9N8p6lLFpti63FUjCtFJTrDKvse2bEDZ4XJ9RZB+YPli2/" + "Mjy5bxoUi1a0YS2\n" + "85UOwIXiy9jRu+TBppfOF1+V3m22vrdpNPM8cs/oo0VJlUqQPjValR3+IZNNvtLS9Yu9Mi0/" + "TJr\n" + "kyp6QhWVVCIWRATsKBemwwFrDzT87fybs/" + "wA1bW21PRb+DTvNlgGSRp6iC8i3KJJx+y6n7D0Pwm\n" + "hxBZXT55/6Fi/Nf0PW+qWXq+t6X1X67F6vD/ftK04V/wBl344U8b//" + "0fBapxheVh9ocV+nviqY2\n" + "/qQJDew/" + "bioWHiuQ8m0bbvaPKGtQ6jaxSo9JloCK75gZI0Xb4sgkHo8MouoAvP94BsRmGY7uWJU\n" + "gzbypOQpNOvIdK4Nw2WCE2tXulTkjEEbdafgclxMhFBas93dwyQzsWDghlJFONKHJCZtjOFBJf" + "y\n" + "j1y9vPL9zpbIs0WkXL2sUjA8hDXlGCRXtt07ZuYvL5KJeo6bfajbkzWkcToR8dqshZ6in2fhNK" + "/\n" + "PDTUlXmHVvMdr5o0v9H2kdrqGpfu7m0nkY87Uf7tkKAU4/" + "s03ynLkEBbfihx7dGT6va67LbRMNR\n" + "aKOBuUTKgIBXoK1BOYR1M3aQ0mOt9yxUeZNdtJhFapLqMluSXkg5oxJrUMW5KevQ9MmNXXNqOi" + "H\n" + "Rr/Hmv8A1r9I/oj95w+r+j9Yf1+NP5+nXtTD+dF8tkfkOlv/0vC3ph7f0/" + "alcVTbS4A8QibuKb5\n" + "RI05EBYRFpdX3ly79a2qYCavH/" + "EY7TCYyMD5PSdD8+wXUSn1ArDqOhBzFlipz4ZwWbaV5htbsgF\n" + "qg9crMXKErGyYwajFGzxyHlGSePbbwyqg5UZlCaxrFpaWU95LIqrEjMAT4Dp9OShGy1ZslBhv/" + "A\n" + "Dj9rd/a+aL+xUK+m38L3d0HrxRo2HFtu5D8c27y8t30raarbWkU+u6g4gsNORn+EcUaSh2Pc0/" + "4\n" + "lgtAjezzbT9SutY1i782al8Nxdyotqh6xWybIg+jc5q8s+I27bFDgFPQp9RE+nrag70+" + "L6crrZu\n" + "4jajokdv6LW/Dii1Wo61PXKQN3KPK0L+h4/rnD/K5V78a5LhXxd3/0/" + "DMXXtwxVNtL9Xkaf3f7N\n" + "etfbKMjdjtkZ9D6ufrlK0+HpX8coF9HJ26sXvfqXrf7i/U+uften/d/" + "wCyrmQL6uOav0pvpP8Ai\n" + "b1F+rV59+vH6a5XLhcjH4nRmY/xpxHP0/UptWvT6Mx/RbmjxWK+aP8AFf1M/" + "pCv1Kvxen9inavf\n" + "MrFwXtzcLUeLXq5Mv/I3nz1b0v8AjofuKVry9KrUpTanOlf9jmQ68va/zH9b/COn/o7/" + "AI431mP\n" + "65SvLh+zWvbl9rMfNfC34K4kmj9T6lD6FKclp/DNYXZx5srsPrHor6nXvkgxTPS/" + "U+rv6dPU5mt\n" + "fngFN5ulv+l/pL/Lp/scerHo//2Q==\n"; + +static std::string gCommandLine; + +TEST(Base64, LargeSample) { + RTC_LOG(LS_VERBOSE) << "Testing specific base64 file"; + + char unescaped[64 * 1024]; + + // unescape that massive blob above + size_t size = Base64Unescape(SpecificTest, sizeof(SpecificTest), unescaped, + sizeof(unescaped)); + + EXPECT_EQ(size, sizeof(testbase64)); + EXPECT_EQ(0, memcmp(testbase64, unescaped, sizeof(testbase64))); +} + +bool DecodeTest(const char* encoded, + size_t expect_unparsed, + const char* decoded, + webrtc::Base64::DecodeFlags flags) { + std::string result; + size_t consumed = 0, encoded_len = strlen(encoded); + bool success = webrtc::Base64::DecodeFromArray(encoded, encoded_len, flags, + &result, &consumed); + size_t unparsed = encoded_len - consumed; + EXPECT_EQ(expect_unparsed, unparsed) + << "\"" << encoded << "\" -> \"" << decoded << "\""; + EXPECT_STREQ(decoded, result.c_str()); + return success; +} + +#define Flags(x, y, z) \ + webrtc::Base64::DO_PARSE_##x | webrtc::Base64::DO_PAD_##y | \ + webrtc::Base64::DO_TERM_##z + +TEST(Base64, DecodeParseOptions) { + // Trailing whitespace + EXPECT_TRUE(DecodeTest("YWJjZA== ", 1, "abcd", Flags(STRICT, YES, CHAR))); + EXPECT_TRUE(DecodeTest("YWJjZA== ", 0, "abcd", Flags(WHITE, YES, CHAR))); + EXPECT_TRUE(DecodeTest("YWJjZA== ", 0, "abcd", Flags(ANY, YES, CHAR))); + + // Embedded whitespace + EXPECT_FALSE(DecodeTest("YWJjZA= =", 3, "abcd", Flags(STRICT, YES, CHAR))); + EXPECT_TRUE(DecodeTest("YWJjZA= =", 0, "abcd", Flags(WHITE, YES, CHAR))); + EXPECT_TRUE(DecodeTest("YWJjZA= =", 0, "abcd", Flags(ANY, YES, CHAR))); + + // Embedded non-base64 characters + EXPECT_FALSE(DecodeTest("YWJjZA=*=", 3, "abcd", Flags(STRICT, YES, CHAR))); + EXPECT_FALSE(DecodeTest("YWJjZA=*=", 3, "abcd", Flags(WHITE, YES, CHAR))); + EXPECT_TRUE(DecodeTest("YWJjZA=*=", 0, "abcd", Flags(ANY, YES, CHAR))); + + // Unexpected padding characters + EXPECT_FALSE(DecodeTest("YW=JjZA==", 7, "a", Flags(STRICT, YES, CHAR))); + EXPECT_FALSE(DecodeTest("YW=JjZA==", 7, "a", Flags(WHITE, YES, CHAR))); + EXPECT_TRUE(DecodeTest("YW=JjZA==", 0, "abcd", Flags(ANY, YES, CHAR))); +} + +TEST(Base64, DecodePadOptions) { + // Padding + EXPECT_TRUE(DecodeTest("YWJjZA==", 0, "abcd", Flags(STRICT, YES, CHAR))); + EXPECT_TRUE(DecodeTest("YWJjZA==", 0, "abcd", Flags(STRICT, ANY, CHAR))); + EXPECT_TRUE(DecodeTest("YWJjZA==", 2, "abcd", Flags(STRICT, NO, CHAR))); + + // Incomplete padding + EXPECT_FALSE(DecodeTest("YWJjZA=", 1, "abcd", Flags(STRICT, YES, CHAR))); + EXPECT_TRUE(DecodeTest("YWJjZA=", 1, "abcd", Flags(STRICT, ANY, CHAR))); + EXPECT_TRUE(DecodeTest("YWJjZA=", 1, "abcd", Flags(STRICT, NO, CHAR))); + + // No padding + EXPECT_FALSE(DecodeTest("YWJjZA", 0, "abcd", Flags(STRICT, YES, CHAR))); + EXPECT_TRUE(DecodeTest("YWJjZA", 0, "abcd", Flags(STRICT, ANY, CHAR))); + EXPECT_TRUE(DecodeTest("YWJjZA", 0, "abcd", Flags(STRICT, NO, CHAR))); +} + +TEST(Base64, DecodeTerminateOptions) { + // Complete quantum + EXPECT_TRUE(DecodeTest("YWJj", 0, "abc", Flags(STRICT, NO, BUFFER))); + EXPECT_TRUE(DecodeTest("YWJj", 0, "abc", Flags(STRICT, NO, CHAR))); + EXPECT_TRUE(DecodeTest("YWJj", 0, "abc", Flags(STRICT, NO, ANY))); + + // Complete quantum with trailing data + EXPECT_FALSE(DecodeTest("YWJj*", 1, "abc", Flags(STRICT, NO, BUFFER))); + EXPECT_TRUE(DecodeTest("YWJj*", 1, "abc", Flags(STRICT, NO, CHAR))); + EXPECT_TRUE(DecodeTest("YWJj*", 1, "abc", Flags(STRICT, NO, ANY))); + + // Incomplete quantum + EXPECT_FALSE(DecodeTest("YWJ", 0, "ab", Flags(STRICT, NO, BUFFER))); + EXPECT_FALSE(DecodeTest("YWJ", 0, "ab", Flags(STRICT, NO, CHAR))); + EXPECT_TRUE(DecodeTest("YWJ", 0, "ab", Flags(STRICT, NO, ANY))); +} + +TEST(Base64, GetNextBase64Char) { + // The table looks like this: + // "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" + char next_char; + EXPECT_TRUE(webrtc::Base64::GetNextBase64Char('A', &next_char)); + EXPECT_EQ('B', next_char); + EXPECT_TRUE(webrtc::Base64::GetNextBase64Char('Z', &next_char)); + EXPECT_EQ('a', next_char); + EXPECT_TRUE(webrtc::Base64::GetNextBase64Char('/', &next_char)); + EXPECT_EQ('A', next_char); + EXPECT_FALSE(webrtc::Base64::GetNextBase64Char('&', &next_char)); + EXPECT_FALSE(webrtc::Base64::GetNextBase64Char('Z', nullptr)); +} + +} // namespace +} // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES diff --git a/rtc_base/test_base64.h b/rtc_base/third_party/base64/test_base64.h similarity index 100% rename from rtc_base/test_base64.h rename to rtc_base/third_party/base64/test_base64.h diff --git a/rtc_base/third_party/sigslot/README.chromium b/rtc_base/third_party/sigslot/README.chromium index 9867dd0320..50116c5e20 100644 --- a/rtc_base/third_party/sigslot/README.chromium +++ b/rtc_base/third_party/sigslot/README.chromium @@ -1,9 +1,9 @@ Name: C++ Signal/Slot Library Short Name: sigslot URL: http://sigslot.sourceforge.net/ -Version: 0 +Version: N/A Date: 2018-07-09 -License: Custom license +License: Ignorable License File: LICENSE Security Critical: yes Shipped: yes @@ -19,3 +19,7 @@ If has_slots is single threaded the user must ensure that it is not trying to connect or disconnect to signalx concurrently or data race may occur. If signalx is single threaded the user must ensure that disconnect, connect or signal is not happening concurrently or data race may occur. + +note: sigslot at sourceforge has only one released version - version 1.0.0 +released on 2002-03-31. There is a read-only CVS repository from which the +dated version (2018-07-09) can be retrieved if necessary. diff --git a/rtc_base/thread.cc b/rtc_base/thread.cc index 6f101ac8f4..2491335c96 100644 --- a/rtc_base/thread.cc +++ b/rtc_base/thread.cc @@ -10,14 +10,28 @@ #include "rtc_base/thread.h" +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/functional/any_invocable.h" #include "absl/strings/string_view.h" +#include "api/function_view.h" +#include "api/location.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" +#include "rtc_base/platform_thread_types.h" #include "rtc_base/socket_server.h" #if defined(WEBRTC_WIN) #include #elif defined(WEBRTC_POSIX) +#include #include #else #error "Either WEBRTC_WIN or WEBRTC_POSIX needs to be defined." @@ -72,7 +86,7 @@ class ScopedAutoReleasePool { } // namespace #endif -namespace rtc { +namespace webrtc { using ::webrtc::MutexLock; using ::webrtc::TimeDelta; @@ -179,7 +193,7 @@ void ThreadManager::ProcessAllMessageQueuesInternal() { } } - rtc::Thread* current = rtc::Thread::Current(); + Thread* current = Thread::Current(); // Note: One of the message queues may have been on this thread, which is // why we can't synchronously wait for queues_not_done to go to 0; we need // to process messages as well. @@ -251,7 +265,7 @@ void ThreadManager::SetCurrentThread(Thread* thread) { SetCurrentThreadInternal(thread); } -void rtc::ThreadManager::ChangeCurrentThreadForTest(rtc::Thread* thread) { +void ThreadManager::ChangeCurrentThreadForTest(Thread* thread) { SetCurrentThreadInternal(thread); } @@ -415,7 +429,7 @@ absl::AnyInvocable Thread::Get(int cmsWait) { } // Pull a message off the message queue, if available. if (!messages_.empty()) { - absl::AnyInvocable task = std::move(messages_.front()); + absl::AnyInvocable task = std::move(messages_.front()); messages_.pop(); return task; } @@ -438,7 +452,7 @@ absl::AnyInvocable Thread::Get(int cmsWait) { { // Wait and multiplex in the meantime if (!ss_->Wait(cmsNext == kForever ? SocketServer::kForever - : webrtc::TimeDelta::Millis(cmsNext), + : TimeDelta::Millis(cmsNext), /*process_io=*/true)) return nullptr; } @@ -456,8 +470,8 @@ absl::AnyInvocable Thread::Get(int cmsWait) { } void Thread::PostTaskImpl(absl::AnyInvocable task, - const PostTaskTraits& traits, - const webrtc::Location& location) { + const PostTaskTraits& /* traits */, + const Location& /* location */) { if (IsQuitting()) { return; } @@ -474,9 +488,9 @@ void Thread::PostTaskImpl(absl::AnyInvocable task, } void Thread::PostDelayedTaskImpl(absl::AnyInvocable task, - webrtc::TimeDelta delay, - const PostDelayedTaskTraits& traits, - const webrtc::Location& location) { + TimeDelta delay, + const PostDelayedTaskTraits& /* traits */, + const Location& /* location */) { if (IsQuitting()) { return; } @@ -509,7 +523,7 @@ int Thread::GetDelay() { return 0; if (!delayed_messages_.empty()) { - int delay = TimeUntil(delayed_messages_.top().run_time_ms); + int delay = webrtc::TimeUntil(delayed_messages_.top().run_time_ms); if (delay < 0) delay = 0; return delay; @@ -521,10 +535,10 @@ int Thread::GetDelay() { void Thread::Dispatch(absl::AnyInvocable task) { TRACE_EVENT0("webrtc", "Thread::Dispatch"); RTC_DCHECK_RUN_ON(this); - int64_t start_time = TimeMillis(); + int64_t start_time = webrtc::TimeMillis(); std::move(task)(); - int64_t end_time = TimeMillis(); - int64_t diff = TimeDiff(end_time, start_time); + int64_t end_time = webrtc::TimeMillis(); + int64_t diff = webrtc::TimeDiff(end_time, start_time); if (diff >= dispatch_warning_ms_) { RTC_LOG(LS_INFO) << "Message to " << name() << " took " << diff << "ms to dispatch."; @@ -695,7 +709,7 @@ void* Thread::PreRun(void* pv) { #endif Thread* thread = static_cast(pv); ThreadManager::Instance()->SetCurrentThread(thread); - rtc::SetCurrentThreadName(thread->name_.c_str()); + SetCurrentThreadName(thread->name_.c_str()); #if defined(WEBRTC_MAC) ScopedAutoReleasePool pool; #endif @@ -707,7 +721,7 @@ void* Thread::PreRun(void* pv) { #else return nullptr; #endif -} // namespace rtc +} void Thread::Run() { ProcessMessages(kForever); @@ -723,8 +737,8 @@ void Thread::Stop() { Join(); } -void Thread::BlockingCallImpl(rtc::FunctionView functor, - const webrtc::Location& location) { +void Thread::BlockingCallImpl(FunctionView functor, + const Location& /* location */) { TRACE_EVENT0("webrtc", "Thread::BlockingCall"); RTC_DCHECK(!IsQuitting()); @@ -806,7 +820,7 @@ uint32_t Thread::GetCouldBeBlockingCallCount() const { // Returns true if no policies added or if there is at least one policy // that permits invocation to `target` thread. -bool Thread::IsInvokeToThreadAllowed(rtc::Thread* target) { +bool Thread::IsInvokeToThreadAllowed(Thread* target) { #if (!defined(NDEBUG) || RTC_DCHECK_IS_ON) RTC_DCHECK_RUN_ON(this); if (!invoke_policy_enabled_) { @@ -836,30 +850,31 @@ bool Thread::ProcessMessages(int cmsLoop) { // Using ProcessMessages with a custom clock for testing and a time greater // than 0 doesn't work, since it's not guaranteed to advance the custom // clock's time, and may get stuck in an infinite loop. - RTC_DCHECK(GetClockForTesting() == nullptr || cmsLoop == 0 || + RTC_DCHECK(webrtc::GetClockForTesting() == nullptr || cmsLoop == 0 || cmsLoop == kForever); - int64_t msEnd = (kForever == cmsLoop) ? 0 : TimeAfter(cmsLoop); + int64_t msEnd = (kForever == cmsLoop) ? 0 : webrtc::TimeAfter(cmsLoop); int cmsNext = cmsLoop; while (true) { #if defined(WEBRTC_MAC) ScopedAutoReleasePool pool; #endif - absl::AnyInvocable task = Get(cmsNext); + absl::AnyInvocable task = Get(cmsNext); if (!task) return !IsQuitting(); Dispatch(std::move(task)); if (cmsLoop != kForever) { - cmsNext = static_cast(TimeUntil(msEnd)); + cmsNext = static_cast(webrtc::TimeUntil(msEnd)); if (cmsNext < 0) return true; } } } -bool Thread::WrapCurrentWithThreadManager(ThreadManager* thread_manager, - bool need_synchronize_access) { +bool Thread::WrapCurrentWithThreadManager( + ThreadManager* thread_manager, + [[maybe_unused]] bool need_synchronize_access) { RTC_DCHECK(!IsRunning()); #if defined(WEBRTC_WIN) @@ -893,8 +908,8 @@ AutoThread::AutoThread() : Thread(CreateDefaultSocketServer(), /*do_init=*/false) { if (!ThreadManager::Instance()->CurrentThread()) { // DoInit registers with ThreadManager. Do that only if we intend to - // be rtc::Thread::Current(), otherwise ProcessAllMessageQueuesInternal will - // post a message to a queue that no running thread is serving. + // be webrtc::Thread::Current(), otherwise ProcessAllMessageQueuesInternal + // will post a message to a queue that no running thread is serving. DoInit(); ThreadManager::Instance()->SetCurrentThread(this); } @@ -914,8 +929,8 @@ AutoSocketServerThread::AutoSocketServerThread(SocketServer* ss) old_thread_ = ThreadManager::Instance()->CurrentThread(); // Temporarily set the current thread to nullptr so that we can keep checks // around that catch unintentional pointer overwrites. - rtc::ThreadManager::Instance()->SetCurrentThread(nullptr); - rtc::ThreadManager::Instance()->SetCurrentThread(this); + ThreadManager::Instance()->SetCurrentThread(nullptr); + ThreadManager::Instance()->SetCurrentThread(this); if (old_thread_) { ThreadManager::Remove(old_thread_); } @@ -929,11 +944,11 @@ AutoSocketServerThread::~AutoSocketServerThread() { // its contents rely on this thread still being set as the current thread. Stop(); DoDestroy(); - rtc::ThreadManager::Instance()->SetCurrentThread(nullptr); - rtc::ThreadManager::Instance()->SetCurrentThread(old_thread_); + ThreadManager::Instance()->SetCurrentThread(nullptr); + ThreadManager::Instance()->SetCurrentThread(old_thread_); if (old_thread_) { ThreadManager::Add(old_thread_); } } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/thread.h b/rtc_base/thread.h index 1f48b6d923..69c74febaf 100644 --- a/rtc_base/thread.h +++ b/rtc_base/thread.h @@ -49,7 +49,7 @@ // Counts how many `Thread::BlockingCall` are made from within a scope and logs // the number of blocking calls at the end of the scope. #define RTC_LOG_THREAD_BLOCK_COUNT() \ - rtc::Thread::ScopedCountBlockingCalls blocked_call_count_printer( \ + webrtc::Thread::ScopedCountBlockingCalls blocked_call_count_printer( \ [func = __func__](uint32_t actual_block, uint32_t could_block) { \ auto total = actual_block + could_block; \ if (total) { \ @@ -74,7 +74,7 @@ #define RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(x) #endif -namespace rtc { +namespace webrtc { class Thread; @@ -140,7 +140,7 @@ class RTC_EXPORT ThreadManager { // This list contains all live Threads. std::vector message_queues_ RTC_GUARDED_BY(crit_); - webrtc::Mutex crit_; + Mutex crit_; #if RTC_DCHECK_IS_ON // Represents all thread seand actions by storing all send targets per thread. @@ -160,7 +160,7 @@ class RTC_EXPORT ThreadManager { // WARNING! SUBCLASSES MUST CALL Stop() IN THEIR DESTRUCTORS! See ~Thread(). -class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { +class RTC_LOCKABLE RTC_EXPORT Thread : public TaskQueueBase { public: static const int kForever = -1; @@ -267,7 +267,7 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { bool empty() const { return size() == 0u; } size_t size() const { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return messages_.size() + delayed_messages_.size(); } @@ -309,18 +309,16 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // See ScopedDisallowBlockingCalls for details. // NOTE: Blocking calls are DISCOURAGED, consider if what you're doing can // be achieved with PostTask() and callbacks instead. - void BlockingCall( - FunctionView functor, - const webrtc::Location& location = webrtc::Location::Current()) { + void BlockingCall(FunctionView functor, + const Location& location = Location::Current()) { BlockingCallImpl(std::move(functor), location); } template , typename = typename std::enable_if_t>> - ReturnT BlockingCall( - Functor&& functor, - const webrtc::Location& location = webrtc::Location::Current()) { + ReturnT BlockingCall(Functor&& functor, + const Location& location = Location::Current()) { ReturnT result; BlockingCall([&] { result = std::forward(functor)(); }, location); return result; @@ -339,7 +337,7 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // returns false. // If NDEBUG is defined and RTC_DCHECK_IS_ON is undefined always returns // true. - bool IsInvokeToThreadAllowed(rtc::Thread* target); + bool IsInvokeToThreadAllowed(Thread* target); // From TaskQueueBase void Delete() override; @@ -368,7 +366,7 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // These functions are public to avoid injecting test hooks. Don't call them // outside of tests. // This method should be called when thread is created using non standard - // method, like derived implementation of rtc::Thread and it can not be + // method, like derived implementation of webrtc::Thread and it can not be // started by calling Start(). This will set started flag to true and // owned to false. This must be called from the current thread. bool WrapCurrent(); @@ -383,15 +381,15 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { public: explicit CurrentThreadSetter(Thread* thread) : CurrentTaskQueueSetter(thread), - manager_(rtc::ThreadManager::Instance()), + manager_(ThreadManager::Instance()), previous_(manager_->CurrentThread()) { manager_->ChangeCurrentThreadForTest(thread); } ~CurrentThreadSetter() { manager_->ChangeCurrentThreadForTest(previous_); } private: - rtc::ThreadManager* const manager_; - rtc::Thread* const previous_; + ThreadManager* const manager_; + Thread* const previous_; }; // DelayedMessage goes into a priority queue, sorted by trigger time. Messages @@ -417,14 +415,14 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // TaskQueueBase implementation. void PostTaskImpl(absl::AnyInvocable task, const PostTaskTraits& traits, - const webrtc::Location& location) override; + const Location& location) override; void PostDelayedTaskImpl(absl::AnyInvocable task, - webrtc::TimeDelta delay, + TimeDelta delay, const PostDelayedTaskTraits& traits, - const webrtc::Location& location) override; + const Location& location) override; virtual void BlockingCallImpl(FunctionView functor, - const webrtc::Location& location); + const Location& location); // Perform initialization, subclasses must call this from their constructor // if false was passed as init_queue to the Thread constructor. @@ -494,7 +492,7 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { std::vector allowed_threads_ RTC_GUARDED_BY(this); bool invoke_policy_enabled_ RTC_GUARDED_BY(this) = false; #endif - mutable webrtc::Mutex mutex_; + mutable Mutex mutex_; bool fInitialized_; bool fDestroyed_; @@ -564,8 +562,19 @@ class AutoSocketServerThread : public Thread { AutoSocketServerThread& operator=(const AutoSocketServerThread&) = delete; private: - rtc::Thread* old_thread_; + Thread* old_thread_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::AutoSocketServerThread; +using ::webrtc::AutoThread; +using ::webrtc::Thread; +using ::webrtc::ThreadManager; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_THREAD_H_ diff --git a/rtc_base/thread_unittest.cc b/rtc_base/thread_unittest.cc index cd733db2cd..9c417fe75a 100644 --- a/rtc_base/thread_unittest.cc +++ b/rtc_base/thread_unittest.cc @@ -10,33 +10,46 @@ #include "rtc_base/thread.h" +#include +#include +#include #include +#include +#include +#include "absl/strings/string_view.h" #include "api/field_trials_view.h" +#include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "api/task_queue/task_queue_test.h" +#include "api/test/rtc_error_matchers.h" #include "api/units/time_delta.h" +#include "rtc_base/async_packet_socket.h" #include "rtc_base/async_udp_socket.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/fake_clock.h" -#include "rtc_base/gunit.h" #include "rtc_base/internal/default_socket_server.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/null_socket_server.h" -#include "rtc_base/physical_socket_server.h" -#include "rtc_base/ref_counted_object.h" +#include "rtc_base/socket.h" #include "rtc_base/socket_address.h" +#include "rtc_base/socket_server.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/time_utils.h" #include "test/gmock.h" +#include "test/gtest.h" #include "test/testsupport/rtc_expect_death.h" +#include "test/wait_until.h" #if defined(WEBRTC_WIN) #include // NOLINT #endif -namespace rtc { +namespace webrtc { namespace { using ::testing::ElementsAre; @@ -84,20 +97,20 @@ class SocketClient : public TestGenerator, public sigslot::has_slots<> { : socket_(AsyncUDPSocket::Create(socket, addr)), post_thread_(post_thread), post_handler_(phandler) { - socket_->SignalReadPacket.connect(this, &SocketClient::OnPacket); + socket_->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + OnPacket(socket, packet); + }); } ~SocketClient() override { delete socket_; } SocketAddress address() const { return socket_->GetLocalAddress(); } - void OnPacket(AsyncPacketSocket* socket, - const char* buf, - size_t size, - const SocketAddress& remote_addr, - const int64_t& packet_time_us) { - EXPECT_EQ(size, sizeof(uint32_t)); - uint32_t prev = reinterpret_cast(buf)[0]; + void OnPacket(AsyncPacketSocket* socket, const ReceivedIpPacket& packet) { + EXPECT_EQ(packet.payload().size(), sizeof(uint32_t)); + uint32_t prev = + reinterpret_cast(packet.payload().data())[0]; uint32_t result = Next(prev); post_thread_->PostDelayedTask([post_handler_ = post_handler_, @@ -111,10 +124,10 @@ class SocketClient : public TestGenerator, public sigslot::has_slots<> { MessageClient* post_handler_; }; -class CustomThread : public rtc::Thread { +class CustomThread : public Thread { public: CustomThread() - : Thread(std::unique_ptr(new rtc::NullSocketServer())) {} + : Thread(std::unique_ptr(new NullSocketServer())) {} ~CustomThread() override { Stop(); } bool Start() { return false; } @@ -145,7 +158,7 @@ class SignalWhenDestroyedThread : public Thread { // See: https://code.google.com/p/webrtc/issues/detail?id=2409 TEST(ThreadTest, DISABLED_Main) { - rtc::AutoThread main_thread; + AutoThread main_thread; const SocketAddress addr("127.0.0.1", 0); // Create the messaging client on its own thread. @@ -187,13 +200,13 @@ TEST(ThreadTest, DISABLED_Main) { } TEST(ThreadTest, CountBlockingCalls) { - rtc::AutoThread current; + AutoThread current; // When the test runs, this will print out: // (thread_unittest.cc:262): Blocking TestBody: total=2 (actual=1, could=1) RTC_LOG_THREAD_BLOCK_COUNT(); #if RTC_DCHECK_IS_ON - rtc::Thread::ScopedCountBlockingCalls blocked_calls( + webrtc::Thread::ScopedCountBlockingCalls blocked_calls( [&](uint32_t actual_block, uint32_t could_block) { EXPECT_EQ(1u, actual_block); EXPECT_EQ(1u, could_block); @@ -229,10 +242,10 @@ TEST(ThreadTest, CountBlockingCalls) { #if RTC_DCHECK_IS_ON TEST(ThreadTest, CountBlockingCallsOneCallback) { - rtc::AutoThread current; + webrtc::AutoThread current; bool was_called_back = false; { - rtc::Thread::ScopedCountBlockingCalls blocked_calls( + webrtc::Thread::ScopedCountBlockingCalls blocked_calls( [&](uint32_t actual_block, uint32_t could_block) { was_called_back = true; }); @@ -242,10 +255,10 @@ TEST(ThreadTest, CountBlockingCallsOneCallback) { } TEST(ThreadTest, CountBlockingCallsSkipCallback) { - rtc::AutoThread current; + webrtc::AutoThread current; bool was_called_back = false; { - rtc::Thread::ScopedCountBlockingCalls blocked_calls( + webrtc::Thread::ScopedCountBlockingCalls blocked_calls( [&](uint32_t actual_block, uint32_t could_block) { was_called_back = true; }); @@ -296,7 +309,7 @@ TEST(ThreadTest, Wrap) { #if (!defined(NDEBUG) || RTC_DCHECK_IS_ON) TEST(ThreadTest, InvokeToThreadAllowedReturnsTrueWithoutPolicies) { - rtc::AutoThread main_thread; + webrtc::AutoThread main_thread; // Create and start the thread. auto thread1 = Thread::CreateWithSocketServer(); auto thread2 = Thread::CreateWithSocketServer(); @@ -307,7 +320,7 @@ TEST(ThreadTest, InvokeToThreadAllowedReturnsTrueWithoutPolicies) { } TEST(ThreadTest, InvokeAllowedWhenThreadsAdded) { - rtc::AutoThread main_thread; + webrtc::AutoThread main_thread; // Create and start the thread. auto thread1 = Thread::CreateWithSocketServer(); auto thread2 = Thread::CreateWithSocketServer(); @@ -326,7 +339,7 @@ TEST(ThreadTest, InvokeAllowedWhenThreadsAdded) { } TEST(ThreadTest, InvokesDisallowedWhenDisallowAllInvokes) { - rtc::AutoThread main_thread; + webrtc::AutoThread main_thread; // Create and start the thread. auto thread1 = Thread::CreateWithSocketServer(); auto thread2 = Thread::CreateWithSocketServer(); @@ -340,7 +353,7 @@ TEST(ThreadTest, InvokesDisallowedWhenDisallowAllInvokes) { #endif // (!defined(NDEBUG) || RTC_DCHECK_IS_ON) TEST(ThreadTest, InvokesAllowedByDefault) { - rtc::AutoThread main_thread; + AutoThread main_thread; // Create and start the thread. auto thread1 = Thread::CreateWithSocketServer(); auto thread2 = Thread::CreateWithSocketServer(); @@ -417,17 +430,17 @@ TEST(ThreadTest, ThreeThreadsBlockingCall) { explicit LockedBool(bool value) : value_(value) {} void Set(bool value) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); value_ = value; } bool Get() { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return value_; } private: - webrtc::Mutex mutex_; + Mutex mutex_; bool value_ RTC_GUARDED_BY(mutex_); }; @@ -456,7 +469,9 @@ TEST(ThreadTest, ThreeThreadsBlockingCall) { SetAndInvokeSet(&async_invoked, thread2, out); }); - EXPECT_TRUE_WAIT(async_invoked.Get(), 2000); + EXPECT_THAT(webrtc::WaitUntil([&] { return async_invoked.Get(); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); } }; @@ -471,7 +486,9 @@ TEST(ThreadTest, ThreeThreadsBlockingCall) { }); EXPECT_FALSE(thread_a_called.Get()); - EXPECT_TRUE_WAIT(thread_a_called.Get(), 2000); + EXPECT_THAT(webrtc::WaitUntil([&] { return thread_a_called.Get(); }, + ::testing::IsTrue()), + webrtc::IsRtcOk()); } static void DelayedPostsWithIdenticalTimesAreProcessedInFifoOrder( @@ -513,7 +530,7 @@ TEST(ThreadTest, DelayedPostsWithIdenticalTimesAreProcessedInFifoOrder) { // all messages (both delayed and non delayed) up until the current time, on // all registered message queues. TEST(ThreadManager, ProcessAllMessageQueues) { - rtc::AutoThread main_thread; + AutoThread main_thread; Event entered_process_all_message_queues(true, false); auto a = Thread::CreateWithSocketServer(); auto b = Thread::CreateWithSocketServer(); @@ -618,7 +635,7 @@ class DestructionFunctor { }; TEST(ThreadPostTaskTest, InvokesWithLambda) { - std::unique_ptr background_thread(rtc::Thread::Create()); + std::unique_ptr background_thread(Thread::Create()); background_thread->Start(); Event event; @@ -627,7 +644,7 @@ TEST(ThreadPostTaskTest, InvokesWithLambda) { } TEST(ThreadPostTaskTest, InvokesWithCopiedFunctor) { - std::unique_ptr background_thread(rtc::Thread::Create()); + std::unique_ptr background_thread(Thread::Create()); background_thread->Start(); LifeCycleFunctor::Stats stats; @@ -641,7 +658,7 @@ TEST(ThreadPostTaskTest, InvokesWithCopiedFunctor) { } TEST(ThreadPostTaskTest, InvokesWithMovedFunctor) { - std::unique_ptr background_thread(rtc::Thread::Create()); + std::unique_ptr background_thread(Thread::Create()); background_thread->Start(); LifeCycleFunctor::Stats stats; @@ -655,7 +672,7 @@ TEST(ThreadPostTaskTest, InvokesWithMovedFunctor) { } TEST(ThreadPostTaskTest, InvokesWithReferencedFunctorShouldCopy) { - std::unique_ptr background_thread(rtc::Thread::Create()); + std::unique_ptr background_thread(Thread::Create()); background_thread->Start(); LifeCycleFunctor::Stats stats; @@ -670,7 +687,7 @@ TEST(ThreadPostTaskTest, InvokesWithReferencedFunctorShouldCopy) { } TEST(ThreadPostTaskTest, InvokesWithCopiedFunctorDestroyedOnTargetThread) { - std::unique_ptr background_thread(rtc::Thread::Create()); + std::unique_ptr background_thread(Thread::Create()); background_thread->Start(); Event event; @@ -684,7 +701,7 @@ TEST(ThreadPostTaskTest, InvokesWithCopiedFunctorDestroyedOnTargetThread) { } TEST(ThreadPostTaskTest, InvokesWithMovedFunctorDestroyedOnTargetThread) { - std::unique_ptr background_thread(rtc::Thread::Create()); + std::unique_ptr background_thread(Thread::Create()); background_thread->Start(); Event event; @@ -699,7 +716,7 @@ TEST(ThreadPostTaskTest, InvokesWithMovedFunctorDestroyedOnTargetThread) { TEST(ThreadPostTaskTest, InvokesWithReferencedFunctorShouldCopyAndDestroyedOnTargetThread) { - std::unique_ptr background_thread(rtc::Thread::Create()); + std::unique_ptr background_thread(Thread::Create()); background_thread->Start(); Event event; @@ -714,7 +731,7 @@ TEST(ThreadPostTaskTest, } TEST(ThreadPostTaskTest, InvokesOnBackgroundThread) { - std::unique_ptr background_thread(rtc::Thread::Create()); + std::unique_ptr background_thread(Thread::Create()); background_thread->Start(); Event event; @@ -731,7 +748,7 @@ TEST(ThreadPostTaskTest, InvokesOnBackgroundThread) { } TEST(ThreadPostTaskTest, InvokesAsynchronously) { - std::unique_ptr background_thread(rtc::Thread::Create()); + std::unique_ptr background_thread(Thread::Create()); background_thread->Start(); // The first event ensures that SendSingleMessage() is not blocking this @@ -747,7 +764,7 @@ TEST(ThreadPostTaskTest, InvokesAsynchronously) { } TEST(ThreadPostTaskTest, InvokesInPostedOrder) { - std::unique_ptr background_thread(rtc::Thread::Create()); + std::unique_ptr background_thread(Thread::Create()); background_thread->Start(); Event first; @@ -769,7 +786,7 @@ TEST(ThreadPostTaskTest, InvokesInPostedOrder) { } TEST(ThreadPostDelayedTaskTest, InvokesAsynchronously) { - std::unique_ptr background_thread(rtc::Thread::Create()); + std::unique_ptr background_thread(Thread::Create()); background_thread->Start(); // The first event ensures that SendSingleMessage() is not blocking this @@ -788,7 +805,7 @@ TEST(ThreadPostDelayedTaskTest, InvokesAsynchronously) { TEST(ThreadPostDelayedTaskTest, InvokesInDelayOrder) { ScopedFakeClock clock; - std::unique_ptr background_thread(rtc::Thread::Create()); + std::unique_ptr background_thread(Thread::Create()); background_thread->Start(); Event first; @@ -814,31 +831,30 @@ TEST(ThreadPostDelayedTaskTest, InvokesInDelayOrder) { } TEST(ThreadPostDelayedTaskTest, IsCurrentTaskQueue) { - auto current_tq = webrtc::TaskQueueBase::Current(); + auto current_tq = TaskQueueBase::Current(); { - std::unique_ptr thread(rtc::Thread::Create()); + std::unique_ptr thread(Thread::Create()); thread->WrapCurrent(); - EXPECT_EQ(webrtc::TaskQueueBase::Current(), - static_cast(thread.get())); + EXPECT_EQ(TaskQueueBase::Current(), + static_cast(thread.get())); thread->UnwrapCurrent(); } - EXPECT_EQ(webrtc::TaskQueueBase::Current(), current_tq); + EXPECT_EQ(TaskQueueBase::Current(), current_tq); } -class ThreadFactory : public webrtc::TaskQueueFactory { +class ThreadFactory : public TaskQueueFactory { public: - std::unique_ptr - CreateTaskQueue(absl::string_view /* name */, - Priority /*priority*/) const override { + std::unique_ptr CreateTaskQueue( + absl::string_view /* name */, + Priority /*priority*/) const override { std::unique_ptr thread = Thread::Create(); thread->Start(); - return std::unique_ptr( - thread.release()); + return std::unique_ptr(thread.release()); } }; -std::unique_ptr CreateDefaultThreadFactory( - const webrtc::FieldTrialsView*) { +std::unique_ptr CreateDefaultThreadFactory( + const FieldTrialsView*) { return std::make_unique(); } @@ -849,4 +865,4 @@ INSTANTIATE_TEST_SUITE_P(RtcThread, ::testing::Values(CreateDefaultThreadFactory)); } // namespace -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/time_utils.cc b/rtc_base/time_utils.cc index 9f112e49c1..d8ce42a539 100644 --- a/rtc_base/time_utils.cc +++ b/rtc_base/time_utils.cc @@ -25,7 +25,7 @@ #include #endif -namespace rtc { +namespace webrtc { #if defined(WEBRTC_WIN) || defined(WINUWP) // FileTime (January 1st 1601) to Unix time (January 1st 1970) @@ -65,7 +65,7 @@ class TimeHelper final { TIME_ZONE_INFORMATION time_zone; GetTimeZoneInformation(&time_zone); int64_t time_zone_bias_ns = - rtc::dchecked_cast(time_zone.Bias) * 60 * 1000 * 1000 * 1000; + dchecked_cast(time_zone.Bias) * 60 * 1000 * 1000 * 1000; singleton.app_start_time_ns_ = (ntp_server_time_ms - kNTPTimeToUnixTimeEpochOffset) * 1000000 - time_zone_bias_ns; @@ -78,9 +78,9 @@ class TimeHelper final { int64_t result = 0; LARGE_INTEGER qpcnt; QueryPerformanceCounter(&qpcnt); - result = rtc::dchecked_cast( - (rtc::dchecked_cast(qpcnt.QuadPart) * 100000 / - rtc::dchecked_cast(singleton.os_ticks_per_second_)) * + result = dchecked_cast( + (dchecked_cast(qpcnt.QuadPart) * 100000 / + dchecked_cast(singleton.os_ticks_per_second_)) * 10000); result = singleton.app_start_time_ns_ + result - singleton.time_since_os_start_ns_; @@ -92,7 +92,7 @@ class TimeHelper final { TIME_ZONE_INFORMATION time_zone; GetTimeZoneInformation(&time_zone); int64_t time_zone_bias_ns = - rtc::dchecked_cast(time_zone.Bias) * 60 * 1000 * 1000 * 1000; + dchecked_cast(time_zone.Bias) * 60 * 1000 * 1000 * 1000; FILETIME ft; // This will give us system file in UTC format. GetSystemTimeAsFileTime(&ft); @@ -114,13 +114,13 @@ class TimeHelper final { void UpdateReferenceTime() { LARGE_INTEGER qpfreq; QueryPerformanceFrequency(&qpfreq); - os_ticks_per_second_ = rtc::dchecked_cast(qpfreq.QuadPart); + os_ticks_per_second_ = dchecked_cast(qpfreq.QuadPart); LARGE_INTEGER qpcnt; QueryPerformanceCounter(&qpcnt); - time_since_os_start_ns_ = rtc::dchecked_cast( - (rtc::dchecked_cast(qpcnt.QuadPart) * 100000 / - rtc::dchecked_cast(os_ticks_per_second_)) * + time_since_os_start_ns_ = dchecked_cast( + (dchecked_cast(qpcnt.QuadPart) * 100000 / + dchecked_cast(os_ticks_per_second_)) * 10000); } @@ -237,7 +237,7 @@ int64_t TimeUTCMicros() { struct timeval time; gettimeofday(&time, nullptr); // Convert from second (1.0) and microsecond (1e-6). - return (static_cast(time.tv_sec) * rtc::kNumMicrosecsPerSec + + return (static_cast(time.tv_sec) * kNumMicrosecsPerSec + time.tv_usec); #elif defined(WEBRTC_WIN) FILETIME ft; @@ -255,4 +255,4 @@ int64_t TimeUTCMillis() { return TimeUTCMicros() / kNumMicrosecsPerMillisec; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/time_utils.h b/rtc_base/time_utils.h index 271c1d6e93..30cb8b8656 100644 --- a/rtc_base/time_utils.h +++ b/rtc_base/time_utils.h @@ -18,7 +18,7 @@ #include "rtc_base/system/rtc_export.h" #include "rtc_base/system_time.h" -namespace rtc { +namespace webrtc { static const int64_t kNumMillisecsPerSec = INT64_C(1000); static const int64_t kNumMicrosecsPerSec = INT64_C(1000000); @@ -127,14 +127,45 @@ int64_t TmToSeconds(const tm& tm); // Note that this function obeys the system's idea about what the time // is. It is not guaranteed to be monotonic; it will jump in case the // system time is changed, e.g., by some other process calling -// settimeofday. Always use rtc::TimeMicros(), not this function, for +// settimeofday. Always use webrtc::TimeMicros(), not this function, for // measuring time intervals and timeouts. -int64_t TimeUTCMicros(); +RTC_EXPORT int64_t TimeUTCMicros(); // Return the number of milliseconds since January 1, 1970, UTC. // See above. -int64_t TimeUTCMillis(); +RTC_EXPORT int64_t TimeUTCMillis(); + +} // namespace webrtc +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::ClockInterface; +using ::webrtc::GetClockForTesting; +using ::webrtc::kNtpJan1970Millisecs; +using ::webrtc::kNumMicrosecsPerMillisec; +using ::webrtc::kNumMicrosecsPerSec; +using ::webrtc::kNumMillisecsPerSec; +using ::webrtc::kNumNanosecsPerMicrosec; +using ::webrtc::kNumNanosecsPerMillisec; +using ::webrtc::kNumNanosecsPerSec; +using ::webrtc::SetClockForTesting; +using ::webrtc::SystemTimeMillis; +using ::webrtc::Time; +using ::webrtc::Time32; +using ::webrtc::TimeAfter; +using ::webrtc::TimeDiff; +using ::webrtc::TimeDiff32; +using ::webrtc::TimeMicros; +using ::webrtc::TimeMillis; +using ::webrtc::TimeNanos; +using ::webrtc::TimeSince; +using ::webrtc::TimeUntil; +using ::webrtc::TimeUTCMicros; +using ::webrtc::TimeUTCMillis; +using ::webrtc::TmToSeconds; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_TIME_UTILS_H_ diff --git a/rtc_base/time_utils_unittest.cc b/rtc_base/time_utils_unittest.cc index 09fb816636..06d98f9dca 100644 --- a/rtc_base/time_utils_unittest.cc +++ b/rtc_base/time_utils_unittest.cc @@ -13,14 +13,13 @@ #include #include "api/units/time_delta.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/event.h" #include "rtc_base/fake_clock.h" -#include "rtc_base/helpers.h" #include "rtc_base/thread.h" #include "test/gtest.h" -namespace rtc { -using ::webrtc::TimeDelta; +namespace webrtc { TEST(TimeTest, TimeInMs) { int64_t ts_earlier = TimeMillis(); @@ -57,28 +56,28 @@ TEST(TimeTest, Intervals) { TEST(TimeTest, TestTimeDiff64) { int64_t ts_diff = 100; - int64_t ts_earlier = rtc::TimeMillis(); + int64_t ts_earlier = TimeMillis(); int64_t ts_later = ts_earlier + ts_diff; - EXPECT_EQ(ts_diff, rtc::TimeDiff(ts_later, ts_earlier)); - EXPECT_EQ(-ts_diff, rtc::TimeDiff(ts_earlier, ts_later)); + EXPECT_EQ(ts_diff, TimeDiff(ts_later, ts_earlier)); + EXPECT_EQ(-ts_diff, TimeDiff(ts_earlier, ts_later)); } -class TmToSeconds : public ::testing::Test { +class TmToSecondsTest : public ::testing::Test { public: - TmToSeconds() { + TmToSecondsTest() { // Set use of the test RNG to get deterministic expiration timestamp. - rtc::SetRandomTestMode(true); + SetRandomTestMode(true); } - ~TmToSeconds() override { + ~TmToSecondsTest() override { // Put it back for the next test. - rtc::SetRandomTestMode(false); + SetRandomTestMode(false); } void TestTmToSeconds(int times) { static char mdays[12] = {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}; for (int i = 0; i < times; i++) { // First generate something correct and check that TmToSeconds is happy. - int year = rtc::CreateRandomId() % 400 + 1970; + int year = CreateRandomId() % 400 + 1970; bool leap_year = false; if (year % 4 == 0) @@ -90,16 +89,16 @@ class TmToSeconds : public ::testing::Test { std::tm tm; tm.tm_year = year - 1900; // std::tm is year 1900 based. - tm.tm_mon = rtc::CreateRandomId() % 12; - tm.tm_mday = rtc::CreateRandomId() % mdays[tm.tm_mon] + 1; - tm.tm_hour = rtc::CreateRandomId() % 24; - tm.tm_min = rtc::CreateRandomId() % 60; - tm.tm_sec = rtc::CreateRandomId() % 60; - int64_t t = rtc::TmToSeconds(tm); + tm.tm_mon = CreateRandomId() % 12; + tm.tm_mday = CreateRandomId() % mdays[tm.tm_mon] + 1; + tm.tm_hour = CreateRandomId() % 24; + tm.tm_min = CreateRandomId() % 60; + tm.tm_sec = CreateRandomId() % 60; + int64_t t = TmToSeconds(tm); EXPECT_TRUE(t >= 0); // Now damage a random field and check that TmToSeconds is unhappy. - switch (rtc::CreateRandomId() % 11) { + switch (CreateRandomId() % 11) { case 0: tm.tm_year = 1969 - 1900; break; @@ -134,26 +133,26 @@ class TmToSeconds : public ::testing::Test { tm.tm_sec = 60; break; } - EXPECT_EQ(rtc::TmToSeconds(tm), -1); + EXPECT_EQ(TmToSeconds(tm), -1); } // Check consistency with the system gmtime_r. With time_t, we can only // portably test dates until 2038, which is achieved by the % 0x80000000. for (int i = 0; i < times; i++) { - time_t t = rtc::CreateRandomId() % 0x80000000; + time_t t = CreateRandomId() % 0x80000000; #if defined(WEBRTC_WIN) std::tm* tm = std::gmtime(&t); EXPECT_TRUE(tm); - EXPECT_TRUE(rtc::TmToSeconds(*tm) == t); + EXPECT_TRUE(TmToSeconds(*tm) == t); #else std::tm tm; EXPECT_TRUE(gmtime_r(&t, &tm)); - EXPECT_TRUE(rtc::TmToSeconds(tm) == t); + EXPECT_TRUE(TmToSeconds(tm) == t); #endif } } }; -TEST_F(TmToSeconds, TestTmToSeconds) { +TEST_F(TmToSecondsTest, TestTmToSeconds) { TestTmToSeconds(100000); } @@ -163,7 +162,7 @@ TEST(FakeClock, TimeFunctionsUseFakeClock) { FakeClock clock; SetClockForTesting(&clock); - clock.SetTime(webrtc::Timestamp::Micros(987654)); + clock.SetTime(Timestamp::Micros(987654)); EXPECT_EQ(987u, Time32()); EXPECT_EQ(987, TimeMillis()); EXPECT_EQ(987654, TimeMicros()); @@ -182,21 +181,21 @@ TEST(FakeClock, InitialTime) { TEST(FakeClock, SetTime) { FakeClock clock; - clock.SetTime(webrtc::Timestamp::Micros(123)); + clock.SetTime(Timestamp::Micros(123)); EXPECT_EQ(123000, clock.TimeNanos()); - clock.SetTime(webrtc::Timestamp::Micros(456)); + clock.SetTime(Timestamp::Micros(456)); EXPECT_EQ(456000, clock.TimeNanos()); } TEST(FakeClock, AdvanceTime) { FakeClock clock; - clock.AdvanceTime(webrtc::TimeDelta::Micros(1u)); + clock.AdvanceTime(TimeDelta::Micros(1u)); EXPECT_EQ(1000, clock.TimeNanos()); - clock.AdvanceTime(webrtc::TimeDelta::Micros(2222u)); + clock.AdvanceTime(TimeDelta::Micros(2222u)); EXPECT_EQ(2223000, clock.TimeNanos()); - clock.AdvanceTime(webrtc::TimeDelta::Millis(3333u)); + clock.AdvanceTime(TimeDelta::Millis(3333u)); EXPECT_EQ(3335223000, clock.TimeNanos()); - clock.AdvanceTime(webrtc::TimeDelta::Seconds(4444u)); + clock.AdvanceTime(TimeDelta::Seconds(4444u)); EXPECT_EQ(4447335223000, clock.TimeNanos()); } @@ -226,8 +225,8 @@ TEST(FakeClock, SettingTimeWakesThreads) { // Advance the fake clock, expecting the worker thread to wake up // and dispatch the message instantly. - clock.AdvanceTime(webrtc::TimeDelta::Seconds(60u)); - EXPECT_TRUE(message_handler_dispatched.Wait(webrtc::TimeDelta::Zero())); + clock.AdvanceTime(TimeDelta::Seconds(60u)); + EXPECT_TRUE(message_handler_dispatched.Wait(TimeDelta::Zero())); worker->Stop(); SetClockForTesting(nullptr); @@ -238,4 +237,4 @@ TEST(FakeClock, SettingTimeWakesThreads) { EXPECT_LT(real_end_time_ms - real_start_time_ms, 10000); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/timestamp_aligner.cc b/rtc_base/timestamp_aligner.cc index 6c7ac1f839..d3ce44a323 100644 --- a/rtc_base/timestamp_aligner.cc +++ b/rtc_base/timestamp_aligner.cc @@ -17,7 +17,7 @@ #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" -namespace rtc { +namespace webrtc { TimestampAligner::TimestampAligner() : frames_seen_(0), @@ -140,4 +140,4 @@ int64_t TimestampAligner::ClipTimestamp(int64_t filtered_time_us, return time_us; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/timestamp_aligner.h b/rtc_base/timestamp_aligner.h index 636afcd659..d060a354e9 100644 --- a/rtc_base/timestamp_aligner.h +++ b/rtc_base/timestamp_aligner.h @@ -16,17 +16,17 @@ #include "rtc_base/system/rtc_export.h" #include "rtc_base/time_utils.h" -namespace rtc { +namespace webrtc { // The TimestampAligner class helps translating timestamps of a capture system -// into the same timescale as is used by rtc::TimeMicros(). Some capture systems -// provide timestamps, which comes from the capturing hardware (camera or sound -// card) or stamped close to the capturing hardware. Such timestamps are more -// accurate (less jittery) than reading the system clock, but may have a -// different epoch and unknown clock drift. Frame timestamps in webrtc should -// use rtc::TimeMicros (system monotonic time), and this class provides a filter -// which lets us use the rtc::TimeMicros timescale, and at the same time take -// advantage of higher accuracy of the capturer's clock. +// into the same timescale as is used by webrtc::TimeMicros(). Some capture +// systems provide timestamps, which comes from the capturing hardware (camera +// or sound card) or stamped close to the capturing hardware. Such timestamps +// are more accurate (less jittery) than reading the system clock, but may have +// a different epoch and unknown clock drift. Frame timestamps in webrtc should +// use webrtc::TimeMicros (system monotonic time), and this class provides a +// filter which lets us use the webrtc::TimeMicros timescale, and at the same +// time take advantage of higher accuracy of the capturer's clock. // This class is not thread safe, so all calls to it must be synchronized // externally. @@ -43,12 +43,12 @@ class RTC_EXPORT TimestampAligner { // "TranslateTimestamp(int64_t capturer_time_us, int64_t system_time_us)" // This avoids the caller from getting two timestamps with the same // millisecond. - static constexpr int64_t kMinFrameIntervalUs = rtc::kNumMicrosecsPerMillisec; + static constexpr int64_t kMinFrameIntervalUs = kNumMicrosecsPerMillisec; // Translates timestamps of a capture system to the same timescale as is used - // by rtc::TimeMicros(). `capturer_time_us` is assumed to be accurate, but + // by webrtc::TimeMicros(). `capturer_time_us` is assumed to be accurate, but // with an unknown epoch and clock drift. `system_time_us` is - // time according to rtc::TimeMicros(), preferably read as soon as + // time according to webrtc::TimeMicros(), preferably read as soon as // possible when the frame is captured. It may have poor accuracy // due to poor resolution or scheduling delays. Returns the // translated timestamp. @@ -88,6 +88,14 @@ class RTC_EXPORT TimestampAligner { int64_t prev_time_offset_us_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::TimestampAligner; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_TIMESTAMP_ALIGNER_H_ diff --git a/rtc_base/timestamp_aligner_unittest.cc b/rtc_base/timestamp_aligner_unittest.cc index ca91b62625..b280e36911 100644 --- a/rtc_base/timestamp_aligner_unittest.cc +++ b/rtc_base/timestamp_aligner_unittest.cc @@ -19,7 +19,7 @@ #include "rtc_base/time_utils.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { // Computes the difference x_k - mean(x), when x_k is the linear sequence x_k = @@ -59,8 +59,8 @@ void TestTimestampFilter(double rel_freq_error) { const int kNumFrames = 3 * kWindowSize; int64_t interval_error_us = kIntervalUs * rel_freq_error; - int64_t system_start_us = rtc::TimeMicros(); - webrtc::Random random(17); + int64_t system_start_us = TimeMicros(); + Random random(17); int64_t prev_translated_time_us = system_start_us; @@ -84,7 +84,7 @@ void TestTimestampFilter(double rel_freq_error) { EXPECT_LE(translated_time_us, system_measured_us); EXPECT_GE(translated_time_us, - prev_translated_time_us + rtc::kNumMicrosecsPerMillisec); + prev_translated_time_us + kNumMicrosecsPerMillisec); // The relative frequency error contributes to the expected error // by a factor which is the difference between the current time @@ -176,7 +176,7 @@ TEST(TimestampAlignerTest, ClipToMonotonous) { if (translated_timestamp_us <= prev_timestamp_us) { did_clip = true; EXPECT_EQ(clip_timestamp_us, - prev_timestamp_us + rtc::kNumMicrosecsPerMillisec); + prev_timestamp_us + kNumMicrosecsPerMillisec); } else { // No change from clipping. EXPECT_EQ(clip_timestamp_us, translated_timestamp_us); @@ -204,4 +204,4 @@ TEST(TimestampAlignerTest, TranslateTimestampWithoutStateUpdate) { } } -} // namespace rtc +} // namespace webrtc diff --git a/p2p/base/udp_port.h b/rtc_base/trace_categories.cc similarity index 57% rename from p2p/base/udp_port.h rename to rtc_base/trace_categories.cc index 2fd68680cf..d906f30b26 100644 --- a/p2p/base/udp_port.h +++ b/rtc_base/trace_categories.cc @@ -1,5 +1,5 @@ /* - * Copyright 2004 The WebRTC Project Authors. All rights reserved. + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,10 +8,10 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef P2P_BASE_UDP_PORT_H_ -#define P2P_BASE_UDP_PORT_H_ +#include "rtc_base/trace_categories.h" -// StunPort will be handling UDPPort functionality. -#include "p2p/base/stun_port.h" +#if defined(RTC_USE_PERFETTO) -#endif // P2P_BASE_UDP_PORT_H_ +PERFETTO_TRACK_EVENT_STATIC_STORAGE_IN_NAMESPACE_WITH_ATTRS(webrtc, RTC_EXPORT); + +#endif // RTC_USE_PERFETTO diff --git a/rtc_base/trace_categories.h b/rtc_base/trace_categories.h new file mode 100644 index 0000000000..ef4070ca07 --- /dev/null +++ b/rtc_base/trace_categories.h @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef RTC_BASE_TRACE_CATEGORIES_H_ +#define RTC_BASE_TRACE_CATEGORIES_H_ + +#if defined(RTC_USE_PERFETTO) + +#define PERFETTO_ENABLE_LEGACY_TRACE_EVENTS 1 + +#include "rtc_base/system/rtc_export.h" +#include "third_party/perfetto/include/perfetto/tracing/track_event.h" // IWYU pragma: export +#include "third_party/perfetto/include/perfetto/tracing/track_event_category_registry.h" +#include "third_party/perfetto/include/perfetto/tracing/track_event_legacy.h" // IWYU pragma: export + +PERFETTO_DEFINE_TEST_CATEGORY_PREFIXES("webrtc-test"); + +PERFETTO_DEFINE_CATEGORIES_IN_NAMESPACE_WITH_ATTRS( + webrtc, + RTC_EXPORT, + perfetto::Category("webrtc"), + perfetto::Category("webrtc_stats"), + perfetto::Category(TRACE_DISABLED_BY_DEFAULT("webrtc")), + perfetto::Category(TRACE_DISABLED_BY_DEFAULT("webrtc_stats"))); + +PERFETTO_USE_CATEGORIES_FROM_NAMESPACE(webrtc); + +#endif // RTC_USE_PERFETTO + +#endif // RTC_BASE_TRACE_CATEGORIES_H_ diff --git a/rtc_base/trace_event.h b/rtc_base/trace_event.h index 6689bc0c37..3329abd66c 100644 --- a/rtc_base/trace_event.h +++ b/rtc_base/trace_event.h @@ -1,25 +1,48 @@ -// Copyright (c) 2012 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file under third_party_mods/chromium or at: -// http://src.chromium.org/svn/trunk/src/LICENSE +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ #ifndef RTC_BASE_TRACE_EVENT_H_ #define RTC_BASE_TRACE_EVENT_H_ -#include - -#include "rtc_base/event_tracer.h" - -#if defined(TRACE_EVENT0) -#error "Another copy of trace_event.h has already been included." -#endif - #if defined(RTC_DISABLE_TRACE_EVENTS) #define RTC_TRACE_EVENTS_ENABLED 0 #else #define RTC_TRACE_EVENTS_ENABLED 1 #endif +// IWYU pragma: begin_exports +#if defined(RTC_USE_PERFETTO) +#include "rtc_base/trace_categories.h" +#endif +#include "third_party/perfetto/include/perfetto/tracing/event_context.h" +#include "third_party/perfetto/include/perfetto/tracing/track.h" +#include "third_party/perfetto/include/perfetto/tracing/track_event_args.h" +// IWYU pragma: end_exports + +#if !defined(RTC_USE_PERFETTO) +#include + +#include "rtc_base/event_tracer.h" + +#define RTC_NOOP() \ + do { \ + } while (0) + +// TODO(b/42226290): Add implementation for these events with Perfetto. +#define TRACE_EVENT_BEGIN(category, name, ...) RTC_NOOP(); +#define TRACE_EVENT_END(category, ...) RTC_NOOP(); +#define TRACE_EVENT(category, name, ...) RTC_NOOP(); +#define TRACE_EVENT_INSTANT(category, name, ...) RTC_NOOP(); +#define TRACE_EVENT_CATEGORY_ENABLED(category) RTC_NOOP(); +#define TRACE_COUNTER(category, track, ...) RTC_NOOP(); + // Type values for identifying types in the TraceValue union. #define TRACE_VALUE_TYPE_BOOL (static_cast(1)) #define TRACE_VALUE_TYPE_UINT (static_cast(2)) @@ -29,6 +52,10 @@ #define TRACE_VALUE_TYPE_STRING (static_cast(6)) #define TRACE_VALUE_TYPE_COPY_STRING (static_cast(7)) +#if defined(TRACE_EVENT0) +#error "Another copy of trace_event.h has already been included." +#endif + #if RTC_TRACE_EVENTS_ENABLED // Extracted from Chromium's src/base/debug/trace_event.h. @@ -127,21 +154,6 @@ // application. In Chrome's case, navigating to about:tracing will turn on // tracing and display data collected across all active processes. // -// -// Memory scoping note: -// Tracing copies the pointers, not the string content, of the strings passed -// in for category, name, and arg_names. Thus, the following code will -// cause problems: -// char* str = strdup("impprtantName"); -// TRACE_EVENT_INSTANT0("SUBSYSTEM", str); // BAD! -// free(str); // Trace system now has dangling pointer -// -// To avoid this issue with the `name` and `arg_name` parameters, use the -// TRACE_EVENT_COPY_XXX overloads of the macros at additional runtime overhead. -// Notes: The category must always be in a long-lived char* (i.e. static const). -// The `arg_values`, when used, are always deep copied with the _COPY -// macros. -// // When are string argument values copied: // const char* arg_values are only referenced by default: // TRACE_EVENT1("category", "name", @@ -185,32 +197,27 @@ INTERNAL_TRACE_EVENT_ADD_SCOPED(category, name, arg1_name, arg1_val, \ arg2_name, arg2_val) +// Enum reflecting the scope of an INSTANT event. Must fit within +// TRACE_EVENT_FLAG_SCOPE_MASK. +static constexpr uint8_t TRACE_EVENT_SCOPE_GLOBAL = 0u << 2; +static constexpr uint8_t TRACE_EVENT_SCOPE_PROCESS = 1u << 2; +static constexpr uint8_t TRACE_EVENT_SCOPE_THREAD = 2u << 2; + // Records a single event called "name" immediately, with 0, 1 or 2 // associated arguments. If the category is not enabled, then this // does nothing. // - category and name strings must have application lifetime (statics or // literals). They may not include " chars. -#define TRACE_EVENT_INSTANT0(category, name) \ +#define TRACE_EVENT_INSTANT0(category, name, scope) \ INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ TRACE_EVENT_FLAG_NONE) -#define TRACE_EVENT_INSTANT1(category, name, arg1_name, arg1_val) \ - INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ +#define TRACE_EVENT_INSTANT1(category, name, scope, arg1_name, arg1_val) \ + INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val) -#define TRACE_EVENT_INSTANT2(category, name, arg1_name, arg1_val, arg2_name, \ - arg2_val) \ - INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ - TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val, \ - arg2_name, arg2_val) -#define TRACE_EVENT_COPY_INSTANT0(category, name) \ - INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ - TRACE_EVENT_FLAG_COPY) -#define TRACE_EVENT_COPY_INSTANT1(category, name, arg1_name, arg1_val) \ - INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ - TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val) -#define TRACE_EVENT_COPY_INSTANT2(category, name, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ - TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val, \ +#define TRACE_EVENT_INSTANT2(category, name, scope, arg1_name, arg1_val, \ + arg2_name, arg2_val) \ + INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_INSTANT, category, name, \ + TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val, \ arg2_name, arg2_val) // Records a single BEGIN event called "name" immediately, with 0, 1 or 2 @@ -229,17 +236,6 @@ INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val, \ arg2_name, arg2_val) -#define TRACE_EVENT_COPY_BEGIN0(category, name) \ - INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ - TRACE_EVENT_FLAG_COPY) -#define TRACE_EVENT_COPY_BEGIN1(category, name, arg1_name, arg1_val) \ - INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ - TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val) -#define TRACE_EVENT_COPY_BEGIN2(category, name, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_BEGIN, category, name, \ - TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val, \ - arg2_name, arg2_val) // Records a single END event for "name" immediately. If the category // is not enabled, then this does nothing. @@ -256,17 +252,6 @@ INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val, \ arg2_name, arg2_val) -#define TRACE_EVENT_COPY_END0(category, name) \ - INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ - TRACE_EVENT_FLAG_COPY) -#define TRACE_EVENT_COPY_END1(category, name, arg1_name, arg1_val) \ - INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ - TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val) -#define TRACE_EVENT_COPY_END2(category, name, arg1_name, arg1_val, arg2_name, \ - arg2_val) \ - INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_END, category, name, \ - TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val, \ - arg2_name, arg2_val) // Records the value of a counter called "name" immediately. Value // must be representable as a 32 bit integer. @@ -276,10 +261,6 @@ INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_COUNTER, category, name, \ TRACE_EVENT_FLAG_NONE, "value", \ static_cast(value)) -#define TRACE_COPY_COUNTER1(category, name, value) \ - INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_COUNTER, category, name, \ - TRACE_EVENT_FLAG_COPY, "value", \ - static_cast(value)) // Records the values of a multi-parted counter called "name" immediately. // The UI will treat value1 and value2 as parts of a whole, displaying their @@ -292,12 +273,6 @@ TRACE_EVENT_FLAG_NONE, value1_name, \ static_cast(value1_val), value2_name, \ static_cast(value2_val)) -#define TRACE_COPY_COUNTER2(category, name, value1_name, value1_val, \ - value2_name, value2_val) \ - INTERNAL_TRACE_EVENT_ADD(TRACE_EVENT_PHASE_COUNTER, category, name, \ - TRACE_EVENT_FLAG_COPY, value1_name, \ - static_cast(value1_val), value2_name, \ - static_cast(value2_val)) // Records the value of a counter called "name" immediately. Value // must be representable as a 32 bit integer. @@ -311,10 +286,6 @@ INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_COUNTER, category, name, \ id, TRACE_EVENT_FLAG_NONE, "value", \ static_cast(value)) -#define TRACE_COPY_COUNTER_ID1(category, name, id, value) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_COUNTER, category, name, \ - id, TRACE_EVENT_FLAG_COPY, "value", \ - static_cast(value)) // Records the values of a multi-parted counter called "name" immediately. // The UI will treat value1 and value2 as parts of a whole, displaying their @@ -331,12 +302,6 @@ id, TRACE_EVENT_FLAG_NONE, value1_name, \ static_cast(value1_val), value2_name, \ static_cast(value2_val)) -#define TRACE_COPY_COUNTER_ID2(category, name, id, value1_name, value1_val, \ - value2_name, value2_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_COUNTER, category, name, \ - id, TRACE_EVENT_FLAG_COPY, value1_name, \ - static_cast(value1_val), value2_name, \ - static_cast(value2_val)) // Records a single ASYNC_BEGIN event called "name" immediately, with 0, 1 or 2 // associated arguments. If the category is not enabled, then this @@ -367,40 +332,20 @@ INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ arg1_val, arg2_name, arg2_val) -#define TRACE_EVENT_COPY_ASYNC_BEGIN0(category, name, id) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ - name, id, TRACE_EVENT_FLAG_COPY) -#define TRACE_EVENT_COPY_ASYNC_BEGIN1(category, name, id, arg1_name, arg1_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ - name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ - arg1_val) -#define TRACE_EVENT_COPY_ASYNC_BEGIN2(category, name, id, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_BEGIN, category, \ - name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ - arg1_val, arg2_name, arg2_val) // Records a single ASYNC_STEP event for `step` immediately. If the category // is not enabled, then this does nothing. The `name` and `id` must match the // ASYNC_BEGIN event above. The `step` param identifies this step within the // async event. This should be called at the beginning of the next phase of an // asynchronous operation. -#define TRACE_EVENT_ASYNC_STEP0(category, name, id, step) \ +#define TRACE_EVENT_ASYNC_STEP_INTO0(category, name, id, step) \ INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_STEP, category, \ name, id, TRACE_EVENT_FLAG_NONE, "step", \ step) -#define TRACE_EVENT_ASYNC_STEP1(category, name, id, step, arg1_name, arg1_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_STEP, category, \ - name, id, TRACE_EVENT_FLAG_NONE, "step", \ - step, arg1_name, arg1_val) -#define TRACE_EVENT_COPY_ASYNC_STEP0(category, name, id, step) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_STEP, category, \ - name, id, TRACE_EVENT_FLAG_COPY, "step", \ - step) -#define TRACE_EVENT_COPY_ASYNC_STEP1(category, name, id, step, arg1_name, \ +#define TRACE_EVENT_ASYNC_STEP_INTO1(category, name, id, step, arg1_name, \ arg1_val) \ INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_STEP, category, \ - name, id, TRACE_EVENT_FLAG_COPY, "step", \ + name, id, TRACE_EVENT_FLAG_NONE, "step", \ step, arg1_name, arg1_val) // Records a single ASYNC_END event for "name" immediately. If the category @@ -417,112 +362,6 @@ INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ arg1_val, arg2_name, arg2_val) -#define TRACE_EVENT_COPY_ASYNC_END0(category, name, id) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ - name, id, TRACE_EVENT_FLAG_COPY) -#define TRACE_EVENT_COPY_ASYNC_END1(category, name, id, arg1_name, arg1_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ - name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ - arg1_val) -#define TRACE_EVENT_COPY_ASYNC_END2(category, name, id, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_ASYNC_END, category, \ - name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ - arg1_val, arg2_name, arg2_val) - -// Records a single FLOW_BEGIN event called "name" immediately, with 0, 1 or 2 -// associated arguments. If the category is not enabled, then this -// does nothing. -// - category and name strings must have application lifetime (statics or -// literals). They may not include " chars. -// - `id` is used to match the FLOW_BEGIN event with the FLOW_END event. FLOW -// events are considered to match if their category, name and id values all -// match. `id` must either be a pointer or an integer value up to 64 bits. If -// it's a pointer, the bits will be xored with a hash of the process ID so -// that the same pointer on two different processes will not collide. -// FLOW events are different from ASYNC events in how they are drawn by the -// tracing UI. A FLOW defines asynchronous data flow, such as posting a task -// (FLOW_BEGIN) and later executing that task (FLOW_END). Expect FLOWs to be -// drawn as lines or arrows from FLOW_BEGIN scopes to FLOW_END scopes. Similar -// to ASYNC, a FLOW can consist of multiple phases. The first phase is defined -// by the FLOW_BEGIN calls. Additional phases can be defined using the FLOW_STEP -// macros. When the operation completes, call FLOW_END. An async operation can -// span threads and processes, but all events in that operation must use the -// same `name` and `id`. Each event can have its own args. -#define TRACE_EVENT_FLOW_BEGIN0(category, name, id) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ - name, id, TRACE_EVENT_FLAG_NONE) -#define TRACE_EVENT_FLOW_BEGIN1(category, name, id, arg1_name, arg1_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ - name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ - arg1_val) -#define TRACE_EVENT_FLOW_BEGIN2(category, name, id, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ - name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \ - arg1_val, arg2_name, arg2_val) -#define TRACE_EVENT_COPY_FLOW_BEGIN0(category, name, id) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ - name, id, TRACE_EVENT_FLAG_COPY) -#define TRACE_EVENT_COPY_FLOW_BEGIN1(category, name, id, arg1_name, arg1_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ - name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ - arg1_val) -#define TRACE_EVENT_COPY_FLOW_BEGIN2(category, name, id, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_BEGIN, category, \ - name, id, TRACE_EVENT_FLAG_COPY, arg1_name, \ - arg1_val, arg2_name, arg2_val) - -// Records a single FLOW_STEP event for `step` immediately. If the category -// is not enabled, then this does nothing. The `name` and `id` must match the -// FLOW_BEGIN event above. The `step` param identifies this step within the -// async event. This should be called at the beginning of the next phase of an -// asynchronous operation. -#define TRACE_EVENT_FLOW_STEP0(category, name, id, step) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_STEP, category, \ - name, id, TRACE_EVENT_FLAG_NONE, "step", \ - step) -#define TRACE_EVENT_FLOW_STEP1(category, name, id, step, arg1_name, arg1_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_STEP, category, \ - name, id, TRACE_EVENT_FLAG_NONE, "step", \ - step, arg1_name, arg1_val) -#define TRACE_EVENT_COPY_FLOW_STEP0(category, name, id, step) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_STEP, category, \ - name, id, TRACE_EVENT_FLAG_COPY, "step", \ - step) -#define TRACE_EVENT_COPY_FLOW_STEP1(category, name, id, step, arg1_name, \ - arg1_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_STEP, category, \ - name, id, TRACE_EVENT_FLAG_COPY, "step", \ - step, arg1_name, arg1_val) - -// Records a single FLOW_END event for "name" immediately. If the category -// is not enabled, then this does nothing. -#define TRACE_EVENT_FLOW_END0(category, name, id) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ - id, TRACE_EVENT_FLAG_NONE) -#define TRACE_EVENT_FLOW_END1(category, name, id, arg1_name, arg1_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ - id, TRACE_EVENT_FLAG_NONE, arg1_name, \ - arg1_val) -#define TRACE_EVENT_FLOW_END2(category, name, id, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ - id, TRACE_EVENT_FLAG_NONE, arg1_name, \ - arg1_val, arg2_name, arg2_val) -#define TRACE_EVENT_COPY_FLOW_END0(category, name, id) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ - id, TRACE_EVENT_FLAG_COPY) -#define TRACE_EVENT_COPY_FLOW_END1(category, name, id, arg1_name, arg1_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ - id, TRACE_EVENT_FLAG_COPY, arg1_name, \ - arg1_val) -#define TRACE_EVENT_COPY_FLOW_END2(category, name, id, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_FLOW_END, category, name, \ - id, TRACE_EVENT_FLAG_COPY, arg1_name, \ - arg1_val, arg2_name, arg2_val) //////////////////////////////////////////////////////////////////////////////// // Implementation specific tracing API definitions. @@ -630,15 +469,11 @@ #define TRACE_EVENT_PHASE_ASYNC_BEGIN ('S') #define TRACE_EVENT_PHASE_ASYNC_STEP ('T') #define TRACE_EVENT_PHASE_ASYNC_END ('F') -#define TRACE_EVENT_PHASE_FLOW_BEGIN ('s') -#define TRACE_EVENT_PHASE_FLOW_STEP ('t') -#define TRACE_EVENT_PHASE_FLOW_END ('f') #define TRACE_EVENT_PHASE_METADATA ('M') #define TRACE_EVENT_PHASE_COUNTER ('C') // Flags for changing the behavior of TRACE_EVENT_API_ADD_TRACE_EVENT. #define TRACE_EVENT_FLAG_NONE (static_cast(0)) -#define TRACE_EVENT_FLAG_COPY (static_cast(1 << 0)) #define TRACE_EVENT_FLAG_HAS_ID (static_cast(1 << 1)) #define TRACE_EVENT_FLAG_MANGLE_ID (static_cast(1 << 2)) @@ -898,18 +733,13 @@ class TraceEndOnScopeClose { } // namespace trace_event_internal } // namespace webrtc + #else //////////////////////////////////////////////////////////////////////////////// // This section defines no-op alternatives to the tracing macros when // RTC_DISABLE_TRACE_EVENTS is defined. -#define RTC_NOOP() \ - do { \ - } while (0) - -#define TRACE_STR_COPY(str) RTC_NOOP() - #define TRACE_DISABLED_BY_DEFAULT(name) "disabled-by-default-" name #define TRACE_ID_MANGLE(id) 0 @@ -919,18 +749,12 @@ class TraceEndOnScopeClose { #define TRACE_EVENT2(category, name, arg1_name, arg1_val, arg2_name, arg2_val) \ RTC_NOOP() -#define TRACE_EVENT_INSTANT0(category, name) RTC_NOOP() -#define TRACE_EVENT_INSTANT1(category, name, arg1_name, arg1_val) RTC_NOOP() - -#define TRACE_EVENT_INSTANT2(category, name, arg1_name, arg1_val, arg2_name, \ - arg2_val) \ +#define TRACE_EVENT_INSTANT0(category, name, scope) RTC_NOOP() +#define TRACE_EVENT_INSTANT1(category, name, scope, arg1_name, arg1_val) \ RTC_NOOP() -#define TRACE_EVENT_COPY_INSTANT0(category, name) RTC_NOOP() -#define TRACE_EVENT_COPY_INSTANT1(category, name, arg1_name, arg1_val) \ - RTC_NOOP() -#define TRACE_EVENT_COPY_INSTANT2(category, name, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ +#define TRACE_EVENT_INSTANT2(category, name, scope, arg1_name, arg1_val, \ + arg2_name, arg2_val) \ RTC_NOOP() #define TRACE_EVENT_BEGIN0(category, name) RTC_NOOP() @@ -938,42 +762,24 @@ class TraceEndOnScopeClose { #define TRACE_EVENT_BEGIN2(category, name, arg1_name, arg1_val, arg2_name, \ arg2_val) \ RTC_NOOP() -#define TRACE_EVENT_COPY_BEGIN0(category, name) RTC_NOOP() -#define TRACE_EVENT_COPY_BEGIN1(category, name, arg1_name, arg1_val) RTC_NOOP() -#define TRACE_EVENT_COPY_BEGIN2(category, name, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - RTC_NOOP() #define TRACE_EVENT_END0(category, name) RTC_NOOP() #define TRACE_EVENT_END1(category, name, arg1_name, arg1_val) RTC_NOOP() #define TRACE_EVENT_END2(category, name, arg1_name, arg1_val, arg2_name, \ arg2_val) \ RTC_NOOP() -#define TRACE_EVENT_COPY_END0(category, name) RTC_NOOP() -#define TRACE_EVENT_COPY_END1(category, name, arg1_name, arg1_val) RTC_NOOP() -#define TRACE_EVENT_COPY_END2(category, name, arg1_name, arg1_val, arg2_name, \ - arg2_val) \ - RTC_NOOP() #define TRACE_COUNTER1(category, name, value) RTC_NOOP() -#define TRACE_COPY_COUNTER1(category, name, value) RTC_NOOP() #define TRACE_COUNTER2(category, name, value1_name, value1_val, value2_name, \ value2_val) \ RTC_NOOP() -#define TRACE_COPY_COUNTER2(category, name, value1_name, value1_val, \ - value2_name, value2_val) \ - RTC_NOOP() #define TRACE_COUNTER_ID1(category, name, id, value) RTC_NOOP() -#define TRACE_COPY_COUNTER_ID1(category, name, id, value) RTC_NOOP() #define TRACE_COUNTER_ID2(category, name, id, value1_name, value1_val, \ value2_name, value2_val) \ RTC_NOOP() -#define TRACE_COPY_COUNTER_ID2(category, name, id, value1_name, value1_val, \ - value2_name, value2_val) \ - RTC_NOOP() #define TRACE_EVENT_ASYNC_BEGIN0(category, name, id) RTC_NOOP() #define TRACE_EVENT_ASYNC_BEGIN1(category, name, id, arg1_name, arg1_val) \ @@ -981,18 +787,9 @@ class TraceEndOnScopeClose { #define TRACE_EVENT_ASYNC_BEGIN2(category, name, id, arg1_name, arg1_val, \ arg2_name, arg2_val) \ RTC_NOOP() -#define TRACE_EVENT_COPY_ASYNC_BEGIN0(category, name, id) RTC_NOOP() -#define TRACE_EVENT_COPY_ASYNC_BEGIN1(category, name, id, arg1_name, arg1_val) \ - RTC_NOOP() -#define TRACE_EVENT_COPY_ASYNC_BEGIN2(category, name, id, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - RTC_NOOP() -#define TRACE_EVENT_ASYNC_STEP0(category, name, id, step) RTC_NOOP() -#define TRACE_EVENT_ASYNC_STEP1(category, name, id, step, arg1_name, arg1_val) \ - RTC_NOOP() -#define TRACE_EVENT_COPY_ASYNC_STEP0(category, name, id, step) RTC_NOOP() -#define TRACE_EVENT_COPY_ASYNC_STEP1(category, name, id, step, arg1_name, \ +#define TRACE_EVENT_ASYNC_STEP_INTO0(category, name, id, step) RTC_NOOP() +#define TRACE_EVENT_ASYNC_STEP_INTO1(category, name, id, step, arg1_name, \ arg1_val) \ RTC_NOOP() @@ -1002,51 +799,12 @@ class TraceEndOnScopeClose { #define TRACE_EVENT_ASYNC_END2(category, name, id, arg1_name, arg1_val, \ arg2_name, arg2_val) \ RTC_NOOP() -#define TRACE_EVENT_COPY_ASYNC_END0(category, name, id) RTC_NOOP() -#define TRACE_EVENT_COPY_ASYNC_END1(category, name, id, arg1_name, arg1_val) \ - RTC_NOOP() -#define TRACE_EVENT_COPY_ASYNC_END2(category, name, id, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - RTC_NOOP() - -#define TRACE_EVENT_FLOW_BEGIN0(category, name, id) RTC_NOOP() -#define TRACE_EVENT_FLOW_BEGIN1(category, name, id, arg1_name, arg1_val) \ - RTC_NOOP() -#define TRACE_EVENT_FLOW_BEGIN2(category, name, id, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - RTC_NOOP() -#define TRACE_EVENT_COPY_FLOW_BEGIN0(category, name, id) RTC_NOOP() -#define TRACE_EVENT_COPY_FLOW_BEGIN1(category, name, id, arg1_name, arg1_val) \ - RTC_NOOP() -#define TRACE_EVENT_COPY_FLOW_BEGIN2(category, name, id, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - RTC_NOOP() - -#define TRACE_EVENT_FLOW_STEP0(category, name, id, step) RTC_NOOP() -#define TRACE_EVENT_FLOW_STEP1(category, name, id, step, arg1_name, arg1_val) \ - RTC_NOOP() -#define TRACE_EVENT_COPY_FLOW_STEP0(category, name, id, step) RTC_NOOP() -#define TRACE_EVENT_COPY_FLOW_STEP1(category, name, id, step, arg1_name, \ - arg1_val) \ - RTC_NOOP() - -#define TRACE_EVENT_FLOW_END0(category, name, id) RTC_NOOP() -#define TRACE_EVENT_FLOW_END1(category, name, id, arg1_name, arg1_val) \ - RTC_NOOP() -#define TRACE_EVENT_FLOW_END2(category, name, id, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - RTC_NOOP() -#define TRACE_EVENT_COPY_FLOW_END0(category, name, id) RTC_NOOP() -#define TRACE_EVENT_COPY_FLOW_END1(category, name, id, arg1_name, arg1_val) \ - RTC_NOOP() -#define TRACE_EVENT_COPY_FLOW_END2(category, name, id, arg1_name, arg1_val, \ - arg2_name, arg2_val) \ - RTC_NOOP() #define TRACE_EVENT_API_GET_CATEGORY_ENABLED "" #define TRACE_EVENT_API_ADD_TRACE_EVENT RTC_NOOP() #endif // RTC_TRACE_EVENTS_ENABLED +#endif // RTC_USE_PERFETTO #endif // RTC_BASE_TRACE_EVENT_H_ diff --git a/rtc_base/type_traits.h b/rtc_base/type_traits.h index 0cb899c47f..9f17acb522 100644 --- a/rtc_base/type_traits.h +++ b/rtc_base/type_traits.h @@ -12,9 +12,10 @@ #define RTC_BASE_TYPE_TRAITS_H_ #include +#include #include -namespace rtc { +namespace webrtc { // Determines if the given class has zero-argument .data() and .size() methods // whose return values are convertible to T* and size_t, respectively. @@ -135,6 +136,17 @@ static_assert(!IsIntlike::value, ""); } // namespace test_enum_intlike +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +template +using HasDataAndSize = ::webrtc::HasDataAndSize; +template +using IsIntlike = ::webrtc::IsIntlike; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_TYPE_TRAITS_H_ diff --git a/rtc_base/unique_id_generator.cc b/rtc_base/unique_id_generator.cc index e68c643dbe..f817decb99 100644 --- a/rtc_base/unique_id_generator.cc +++ b/rtc_base/unique_id_generator.cc @@ -14,11 +14,11 @@ #include #include "absl/strings/string_view.h" -#include "rtc_base/helpers.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/string_encode.h" #include "rtc_base/string_to_number.h" -namespace rtc { +namespace webrtc { UniqueRandomIdGenerator::UniqueRandomIdGenerator() : known_ids_() {} UniqueRandomIdGenerator::UniqueRandomIdGenerator(ArrayView known_ids) @@ -27,7 +27,7 @@ UniqueRandomIdGenerator::UniqueRandomIdGenerator(ArrayView known_ids) UniqueRandomIdGenerator::~UniqueRandomIdGenerator() = default; uint32_t UniqueRandomIdGenerator::GenerateId() { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); RTC_CHECK_LT(known_ids_.size(), std::numeric_limits::max() - 1); while (true) { @@ -39,7 +39,7 @@ uint32_t UniqueRandomIdGenerator::GenerateId() { } bool UniqueRandomIdGenerator::AddKnownId(uint32_t value) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return known_ids_.insert(value).second; } @@ -53,14 +53,14 @@ UniqueStringGenerator::UniqueStringGenerator(ArrayView known_ids) { UniqueStringGenerator::~UniqueStringGenerator() = default; std::string UniqueStringGenerator::GenerateString() { - return ToString(unique_number_generator_.GenerateNumber()); + return absl::StrCat(unique_number_generator_.GenerateNumber()); } bool UniqueStringGenerator::AddKnownId(absl::string_view value) { // TODO(webrtc:13579): remove string copy here once absl::string_view version // of StringToNumber is available. - absl::optional int_value = - StringToNumber(std::string(value)); + std::optional int_value = + webrtc::StringToNumber(std::string(value)); // The underlying generator works for uint32_t values, so if the provided // value is not a uint32_t it will never be generated anyway. if (int_value.has_value()) { @@ -69,4 +69,4 @@ bool UniqueStringGenerator::AddKnownId(absl::string_view value) { return false; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/unique_id_generator.h b/rtc_base/unique_id_generator.h index 10dd4d3151..5fe3366edd 100644 --- a/rtc_base/unique_id_generator.h +++ b/rtc_base/unique_id_generator.h @@ -21,7 +21,7 @@ #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" -namespace rtc { +namespace webrtc { // This class will generate numbers. A common use case is for identifiers. // The generated numbers will be unique, in the local scope of the generator. @@ -53,7 +53,7 @@ class UniqueNumberGenerator { bool AddKnownId(TIntegral value); private: - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker sequence_checker_{ + RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_{ webrtc::SequenceChecker::kDetached}; static_assert(std::is_integral::value, "Must be integral type."); TIntegral counter_ RTC_GUARDED_BY(sequence_checker_); @@ -88,7 +88,7 @@ class UniqueRandomIdGenerator { private: // TODO(bugs.webrtc.org/12666): This lock is needed due to an instance in // SdpOfferAnswerHandler being shared between threads. - webrtc::Mutex mutex_; + Mutex mutex_; std::set known_ids_ RTC_GUARDED_BY(&mutex_); }; @@ -145,6 +145,16 @@ bool UniqueNumberGenerator::AddKnownId(TIntegral value) { RTC_DCHECK_RUN_ON(&sequence_checker_); return known_ids_.insert(value).second; } +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::UniqueNumberGenerator; +using ::webrtc::UniqueRandomIdGenerator; +using ::webrtc::UniqueStringGenerator; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_UNIQUE_ID_GENERATOR_H_ diff --git a/rtc_base/unique_id_generator_unittest.cc b/rtc_base/unique_id_generator_unittest.cc index a6ae8ec9f5..85ed5d4715 100644 --- a/rtc_base/unique_id_generator_unittest.cc +++ b/rtc_base/unique_id_generator_unittest.cc @@ -18,28 +18,28 @@ #include "api/array_view.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/gunit.h" -#include "rtc_base/helpers.h" #include "test/gmock.h" using ::testing::IsEmpty; using ::testing::Test; -namespace rtc { +namespace webrtc { namespace { // Utility class that registers itself as the currently active task queue. -class FakeTaskQueue : public webrtc::TaskQueueBase { +class FakeTaskQueue : public TaskQueueBase { public: FakeTaskQueue() : task_queue_setter_(this) {} void Delete() override {} void PostTaskImpl(absl::AnyInvocable task, const PostTaskTraits& traits, - const webrtc::Location& location) override {} + const Location& location) override {} void PostDelayedTaskImpl(absl::AnyInvocable task, - webrtc::TimeDelta delay, + TimeDelta delay, const PostDelayedTaskTraits& traits, - const webrtc::Location& location) override {} + const Location& location) override {} private: CurrentTaskQueueSetter task_queue_setter_; @@ -76,7 +76,7 @@ TYPED_TEST(UniqueIdGeneratorTest, ElementsDoNotRepeat) { TYPED_TEST(UniqueIdGeneratorTest, KnownElementsAreNotGenerated) { typedef TypeParam Generator; const size_t num_elements = 100; - rtc::InitRandom(0); + InitRandom(0); Generator generator1; std::vector known_values; for (size_t i = 0; i < num_elements; i++) { @@ -84,7 +84,7 @@ TYPED_TEST(UniqueIdGeneratorTest, KnownElementsAreNotGenerated) { } EXPECT_EQ(num_elements, known_values.size()); - rtc::InitRandom(0); + InitRandom(0); Generator generator2(known_values); std::vector values; @@ -103,7 +103,7 @@ TYPED_TEST(UniqueIdGeneratorTest, KnownElementsAreNotGenerated) { TYPED_TEST(UniqueIdGeneratorTest, AddedElementsAreNotGenerated) { typedef TypeParam Generator; const size_t num_elements = 100; - rtc::InitRandom(0); + InitRandom(0); Generator generator1; std::vector known_values; for (size_t i = 0; i < num_elements; i++) { @@ -111,7 +111,7 @@ TYPED_TEST(UniqueIdGeneratorTest, AddedElementsAreNotGenerated) { } EXPECT_EQ(num_elements, known_values.size()); - rtc::InitRandom(0); + InitRandom(0); Generator generator2; for (const typename Generator::value_type& value : known_values) { @@ -134,11 +134,11 @@ TYPED_TEST(UniqueIdGeneratorTest, AddedElementsAreNotGenerated) { TYPED_TEST(UniqueIdGeneratorTest, AddKnownIdOnNewIdReturnsTrue) { typedef TypeParam Generator; - rtc::InitRandom(0); + InitRandom(0); Generator generator1; const typename Generator::value_type id = generator1.Generate(); - rtc::InitRandom(0); + InitRandom(0); Generator generator2; EXPECT_TRUE(generator2.AddKnownId(id)); } @@ -146,11 +146,11 @@ TYPED_TEST(UniqueIdGeneratorTest, AddKnownIdOnNewIdReturnsTrue) { TYPED_TEST(UniqueIdGeneratorTest, AddKnownIdCalledAgainForSameIdReturnsFalse) { typedef TypeParam Generator; - rtc::InitRandom(0); + InitRandom(0); Generator generator1; const typename Generator::value_type id = generator1.Generate(); - rtc::InitRandom(0); + InitRandom(0); Generator generator2; ASSERT_TRUE(generator2.AddKnownId(id)); EXPECT_FALSE(generator2.AddKnownId(id)); @@ -160,12 +160,12 @@ TYPED_TEST(UniqueIdGeneratorTest, AddKnownIdOnIdProvidedAsKnownToCtorReturnsFalse) { typedef TypeParam Generator; - rtc::InitRandom(0); + InitRandom(0); Generator generator1; const typename Generator::value_type id = generator1.Generate(); std::vector known_values = {id}; - rtc::InitRandom(0); + InitRandom(0); Generator generator2(known_values); EXPECT_FALSE(generator2.AddKnownId(id)); } @@ -208,4 +208,4 @@ TEST(UniqueNumberGeneratorDeathTest, FailsWhenUsedInWrongContext) { } #endif -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/units/unit_base.h b/rtc_base/units/unit_base.h index a6bdbf547d..2820c13b2d 100644 --- a/rtc_base/units/unit_base.h +++ b/rtc_base/units/unit_base.h @@ -99,7 +99,7 @@ class UnitBase { RTC_DCHECK_GE(value, 0); RTC_DCHECK_GT(value, MinusInfinityVal()); RTC_DCHECK_LT(value, PlusInfinityVal()); - return Unit_T(rtc::dchecked_cast(value)); + return Unit_T(dchecked_cast(value)); } template ::value>::type* = @@ -110,8 +110,7 @@ class UnitBase { } else if (value == -std::numeric_limits::infinity()) { return MinusInfinity(); } else { - RTC_DCHECK(!std::isnan(value)); - return FromValue(rtc::dchecked_cast(value)); + return FromValue(dchecked_cast(value)); } } @@ -123,7 +122,7 @@ class UnitBase { RTC_DCHECK_GE(value, 0); RTC_DCHECK_GT(value, MinusInfinityVal() / denominator); RTC_DCHECK_LT(value, PlusInfinityVal() / denominator); - return Unit_T(rtc::dchecked_cast(value * denominator)); + return Unit_T(dchecked_cast(value * denominator)); } template ::value>::type* = @@ -136,7 +135,7 @@ class UnitBase { constexpr typename std::enable_if::value, T>::type ToValue() const { RTC_DCHECK(IsFinite()); - return rtc::dchecked_cast(value_); + return dchecked_cast(value_); } template constexpr typename std::enable_if::value, T>::type @@ -154,7 +153,7 @@ class UnitBase { constexpr typename std::enable_if::value, T>::type ToFraction() const { RTC_DCHECK(IsFinite()); - return rtc::dchecked_cast(DivideRoundToNearest(value_, Denominator)); + return dchecked_cast(DivideRoundToNearest(value_, Denominator)); } template constexpr typename std::enable_if::value, T>::type @@ -173,7 +172,7 @@ class UnitBase { ToMultiple() const { RTC_DCHECK_GE(ToValue(), std::numeric_limits::min() / Factor); RTC_DCHECK_LE(ToValue(), std::numeric_limits::max() / Factor); - return rtc::dchecked_cast(ToValue() * Factor); + return dchecked_cast(ToValue() * Factor); } template constexpr typename std::enable_if::value, T>::type @@ -276,6 +275,7 @@ class RelativeUnit : public UnitBase { protected: using UnitBase::UnitBase; + constexpr RelativeUnit() : UnitBase(0) {} }; template diff --git a/rtc_base/units/unit_base_unittest.cc b/rtc_base/units/unit_base_unittest.cc index 258d7d1268..6003f4b754 100644 --- a/rtc_base/units/unit_base_unittest.cc +++ b/rtc_base/units/unit_base_unittest.cc @@ -54,11 +54,19 @@ TEST(UnitBaseTest, ConstExpr) { constexpr TestUnit kTestUnitZero = TestUnit::Zero(); constexpr TestUnit kTestUnitPlusInf = TestUnit::PlusInfinity(); constexpr TestUnit kTestUnitMinusInf = TestUnit::MinusInfinity(); + static_assert(kTestUnitZero.IsZero(), ""); static_assert(kTestUnitPlusInf.IsPlusInfinity(), ""); static_assert(kTestUnitMinusInf.IsMinusInfinity(), ""); static_assert(kTestUnitPlusInf.ToKiloOr(-1) == -1, ""); + // Check FromValue is constexpr for floats. + static_assert(TestUnit::FromValue(0.0).IsZero()); + static_assert(TestUnit::FromValue(INFINITY).IsPlusInfinity()); + static_assert(TestUnit::FromValue(-INFINITY).IsMinusInfinity()); + static_assert(TestUnit::FromValue(250.0) == TestUnit::FromValue(250)); + static_assert(TestUnit::FromValue(-250.0) == TestUnit::FromValue(-250)); + static_assert(kTestUnitPlusInf > kTestUnitZero, ""); constexpr TestUnit kTestUnitKilo = TestUnit::FromKilo(kValue); @@ -69,6 +77,7 @@ TEST(UnitBaseTest, ConstExpr) { static_assert(TestUnitAddKilo(kTestUnitValue, 2).ToValue() == kValue + 2000, ""); static_assert(TestUnit::FromValue(500) / 2 == TestUnit::FromValue(250)); + static_assert(TestUnit::FromValue(500.0) / 2 == TestUnit::FromValue(250.0)); } TEST(UnitBaseTest, GetBackSameValues) { @@ -223,6 +232,14 @@ TEST(UnitBaseTest, MathOperations) { EXPECT_EQ(TestUnit::FromValue(-789) / 10, TestUnit::FromValue(-78)); } +#if GTEST_HAS_DEATH_TEST && RTC_DCHECK_IS_ON && !defined(WEBRTC_ANDROID) +TEST(UnitBaseTest, CrashesWhenCreatedFromNan) { + EXPECT_DEATH(TestUnit::FromValue(NAN), ""); + EXPECT_DEATH(TestUnit::FromValue(0.0 / 0.0), ""); + EXPECT_DEATH(TestUnit::FromValue(INFINITY - INFINITY), ""); +} +#endif + TEST(UnitBaseTest, InfinityOperations) { const int64_t kValue = 267; const TestUnit finite = TestUnit::FromKilo(kValue); diff --git a/rtc_base/untyped_function.h b/rtc_base/untyped_function.h index c1f59458b9..b7e1de1551 100644 --- a/rtc_base/untyped_function.h +++ b/rtc_base/untyped_function.h @@ -30,8 +30,9 @@ enum : size_t { kInlineStorageWords = 4 }; union VoidUnion { void* void_ptr; FunVoid* fun_ptr; - typename std::aligned_storage::type - inline_storage; + // std::max_align_t satisfies alignment requirements for every type. + alignas(std::max_align_t) char inline_storage[kInlineStorageWords * + sizeof(uintptr_t)]; }; // Returns the number of elements of the `inline_storage` array required to diff --git a/rtc_base/virtual_socket_server.cc b/rtc_base/virtual_socket_server.cc index efc206b219..9fad9dfe27 100644 --- a/rtc_base/virtual_socket_server.cc +++ b/rtc_base/virtual_socket_server.cc @@ -18,6 +18,7 @@ #include #include "absl/algorithm/container.h" +#include "api/sequence_checker.h" #include "api/units/time_delta.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" @@ -28,7 +29,7 @@ #include "rtc_base/thread.h" #include "rtc_base/time_utils.h" -namespace rtc { +namespace webrtc { using ::webrtc::MutexLock; using ::webrtc::TaskQueueBase; @@ -60,16 +61,16 @@ const int NUM_SAMPLES = 1000; // Packets are passed between sockets as messages. We copy the data just like // the kernel does. -class Packet { +class VirtualSocketPacket { public: - Packet(const char* data, size_t size, const SocketAddress& from) + VirtualSocketPacket(const char* data, size_t size, const SocketAddress& from) : size_(size), consumed_(0), from_(from) { RTC_DCHECK(nullptr != data); data_ = new char[size_]; memcpy(data_, data, size_); } - ~Packet() { delete[] data_; } + ~VirtualSocketPacket() { delete[] data_; } const char* data() const { return data_ + consumed_; } size_t size() const { return size_ - consumed_; } @@ -159,7 +160,7 @@ void VirtualSocket::SafetyBlock::SetNotAlive() { for (const SocketAddress& remote_addr : *listen_queue_) { server->Disconnect(remote_addr); } - listen_queue_ = absl::nullopt; + listen_queue_ = std::nullopt; } // Cancel potential connects @@ -191,7 +192,7 @@ void VirtualSocket::SafetyBlock::PostSignalReadEvent() { } pending_read_signal_event_ = true; - rtc::scoped_refptr safety(this); + scoped_refptr safety(this); socket_.server_->msg_queue_->PostTask( [safety = std::move(safety)] { safety->MaybeSignalReadEvent(); }); } @@ -293,7 +294,7 @@ int VirtualSocket::SafetyBlock::RecvFrom(void* buffer, } // Return the packet at the front of the queue. - Packet& packet = *recv_buffer_.front(); + VirtualSocketPacket& packet = *recv_buffer_.front(); size_t data_read = std::min(size, packet.size()); memcpy(buffer, packet.data(), data_read); addr = packet.from(); @@ -400,8 +401,8 @@ int VirtualSocket::SetOption(Option opt, int value) { } void VirtualSocket::PostPacket(TimeDelta delay, - std::unique_ptr packet) { - rtc::scoped_refptr safety = safety_; + std::unique_ptr packet) { + scoped_refptr safety = safety_; VirtualSocket* socket = this; server_->msg_queue_->PostDelayedTask( [safety = std::move(safety), socket, @@ -413,7 +414,8 @@ void VirtualSocket::PostPacket(TimeDelta delay, delay); } -bool VirtualSocket::SafetyBlock::AddPacket(std::unique_ptr packet) { +bool VirtualSocket::SafetyBlock::AddPacket( + std::unique_ptr packet) { MutexLock lock(&mutex_); if (alive_) { recv_buffer_.push_back(std::move(packet)); @@ -428,7 +430,7 @@ void VirtualSocket::PostConnect(TimeDelta delay, void VirtualSocket::SafetyBlock::PostConnect(TimeDelta delay, const SocketAddress& remote_addr) { - rtc::scoped_refptr safety(this); + scoped_refptr safety(this); MutexLock lock(&mutex_); RTC_DCHECK(alive_); @@ -487,7 +489,7 @@ void VirtualSocket::PostDisconnect(TimeDelta delay) { // Posted task may outlive this. Use different name for `this` inside the task // to avoid accidental unsafe `this->safety_` instead of safe `safety` VirtualSocket* socket = this; - rtc::scoped_refptr safety = safety_; + scoped_refptr safety = safety_; auto task = [safety = std::move(safety), socket] { if (!safety->IsAlive()) { return; @@ -548,7 +550,7 @@ int VirtualSocket::SendUdp(const void* pv, // If we have not been assigned a local port, then get one. if (local_addr_.IsNil()) { local_addr_ = server_->AssignBindAddress( - EmptySocketAddressWithFamily(addr.ipaddr().family())); + webrtc::EmptySocketAddressWithFamily(addr.ipaddr().family())); int result = server_->Bind(this, local_addr_); if (result != 0) { local_addr_.Clear(); @@ -679,7 +681,8 @@ VirtualSocketServer::~VirtualSocketServer() { IPAddress VirtualSocketServer::GetNextIP(int family) { if (family == AF_INET) { IPAddress next_ip(next_ipv4_); - next_ipv4_.s_addr = HostToNetwork32(NetworkToHost32(next_ipv4_.s_addr) + 1); + next_ipv4_.s_addr = + webrtc::HostToNetwork32(webrtc::NetworkToHost32(next_ipv4_.s_addr) + 1); return next_ip; } else if (family == AF_INET6) { IPAddress next_ip(next_ipv6_); @@ -702,7 +705,7 @@ uint16_t VirtualSocketServer::GetNextPort() { void VirtualSocketServer::SetSendingBlocked(bool blocked) { { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); if (blocked == sending_blocked_) { // Unchanged; nothing to do. return; @@ -724,8 +727,7 @@ void VirtualSocketServer::SetMessageQueue(Thread* msg_queue) { msg_queue_ = msg_queue; } -bool VirtualSocketServer::Wait(webrtc::TimeDelta max_wait_duration, - bool process_io) { +bool VirtualSocketServer::Wait(TimeDelta max_wait_duration, bool process_io) { RTC_DCHECK_RUN_ON(msg_queue_); if (stop_on_idle_ && Thread::Current()->empty()) { return false; @@ -743,8 +745,8 @@ void VirtualSocketServer::WakeUp() { } void VirtualSocketServer::SetAlternativeLocalAddress( - const rtc::IPAddress& address, - const rtc::IPAddress& alternative) { + const IPAddress& address, + const IPAddress& alternative) { alternative_address_mapping_[address] = alternative; } @@ -755,7 +757,7 @@ bool VirtualSocketServer::ProcessMessagesUntilIdle() { if (fake_clock_) { // If using a fake clock, advance it in millisecond increments until the // queue is empty. - fake_clock_->AdvanceTime(webrtc::TimeDelta::Millis(1)); + fake_clock_->AdvanceTime(TimeDelta::Millis(1)); } else { // Otherwise, run a normal message loop. msg_queue_->ProcessMessages(Thread::kForever); @@ -789,7 +791,7 @@ int VirtualSocketServer::Bind(VirtualSocket* socket, const SocketAddress& addr) { RTC_DCHECK(nullptr != socket); // Address must be completely specified at this point - RTC_DCHECK(!IPIsUnspec(addr.ipaddr())); + RTC_DCHECK(!webrtc::IPIsUnspec(addr.ipaddr())); RTC_DCHECK(addr.port() != 0); // Normalize the address (turns v6-mapped addresses into v4-addresses). @@ -801,7 +803,7 @@ int VirtualSocketServer::Bind(VirtualSocket* socket, SocketAddress VirtualSocketServer::AssignBindAddress( const SocketAddress& app_addr) { - RTC_DCHECK(!IPIsUnspec(app_addr.ipaddr())); + RTC_DCHECK(!webrtc::IPIsUnspec(app_addr.ipaddr())); // Normalize the IP. SocketAddress addr; @@ -838,12 +840,12 @@ VirtualSocket* VirtualSocketServer::LookupBinding(const SocketAddress& addr) { } IPAddress default_ip = GetDefaultSourceAddress(addr.ipaddr().family()); - if (!IPIsUnspec(default_ip) && addr.ipaddr() == default_ip) { + if (!webrtc::IPIsUnspec(default_ip) && addr.ipaddr() == default_ip) { // If we can't find a binding for the packet which is sent to the interface // corresponding to the default route, it should match a binding with the // correct port to the any address. SocketAddress sock_addr = - EmptySocketAddressWithFamily(addr.ipaddr().family()); + webrtc::EmptySocketAddressWithFamily(addr.ipaddr().family()); sock_addr.SetPort(addr.port()); return LookupBinding(sock_addr); } @@ -953,7 +955,7 @@ int VirtualSocketServer::SendUdp(VirtualSocket* socket, size_t data_size, const SocketAddress& remote_addr) { { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); ++sent_packets_; if (sending_blocked_) { socket->SetToBlocked(); @@ -1010,7 +1012,7 @@ int VirtualSocketServer::SendUdp(VirtualSocket* socket, // "Derivative Random Drop"); however, this algorithm is a more accurate // simulation of what a normal network would do. { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); size_t packet_size = data_size + UDP_HEADER_SIZE; if (network_size + packet_size > network_capacity_) { RTC_LOG(LS_VERBOSE) << "Dropping packet: network capacity exceeded"; @@ -1027,7 +1029,7 @@ int VirtualSocketServer::SendUdp(VirtualSocket* socket, void VirtualSocketServer::SendTcp(VirtualSocket* socket) { { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); ++sent_packets_; if (sending_blocked_) { // Eventually the socket's buffer will fill and VirtualSocket::SendTcp @@ -1094,7 +1096,7 @@ void VirtualSocketServer::AddPacketToNetwork(VirtualSocket* sender, // route. SocketAddress sender_addr = sender->GetLocalAddress(); IPAddress default_ip = GetDefaultSourceAddress(sender_addr.ipaddr().family()); - if (sender_addr.IsAnyIP() && !IPIsUnspec(default_ip)) { + if (sender_addr.IsAnyIP() && !webrtc::IPIsUnspec(default_ip)) { sender_addr.SetIP(default_ip); } @@ -1102,12 +1104,13 @@ void VirtualSocketServer::AddPacketToNetwork(VirtualSocket* sender, if (ordered) { ts = sender->UpdateOrderedDelivery(ts); } - recipient->PostPacket(TimeDelta::Millis(ts - cur_time), - std::make_unique(data, data_size, sender_addr)); + recipient->PostPacket( + TimeDelta::Millis(ts - cur_time), + std::make_unique(data, data_size, sender_addr)); } uint32_t VirtualSocketServer::SendDelay(uint32_t size) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); if (bandwidth_ == 0) return 0; else @@ -1136,7 +1139,7 @@ void PrintFunction(std::vector >* f) { #endif // void VirtualSocketServer::UpdateDelayDistribution() { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); delay_dist_ = CreateDistribution(delay_mean_, delay_stddev_, delay_samples_); } @@ -1282,13 +1285,15 @@ bool VirtualSocketServer::CanInteractWith(VirtualSocket* local, // If ip1 is IPv4 and ip2 is :: and ip2 is not IPV6_V6ONLY. int remote_v6_only = 0; remote->GetOption(Socket::OPT_IPV6_V6ONLY, &remote_v6_only); - if (local_ip.family() == AF_INET && !remote_v6_only && IPIsAny(remote_ip)) { + if (local_ip.family() == AF_INET && !remote_v6_only && + webrtc::IPIsAny(remote_ip)) { return true; } // Same check, backwards. int local_v6_only = 0; local->GetOption(Socket::OPT_IPV6_V6ONLY, &local_v6_only); - if (remote_ip.family() == AF_INET && !local_v6_only && IPIsAny(local_ip)) { + if (remote_ip.family() == AF_INET && !local_v6_only && + webrtc::IPIsAny(local_ip)) { return true; } @@ -1314,7 +1319,7 @@ IPAddress VirtualSocketServer::GetDefaultSourceAddress(int family) { return IPAddress(); } void VirtualSocketServer::SetDefaultSourceAddress(const IPAddress& from_addr) { - RTC_DCHECK(!IPIsAny(from_addr)); + RTC_DCHECK(!webrtc::IPIsAny(from_addr)); if (from_addr.family() == AF_INET) { default_source_address_v4_ = from_addr; } else if (from_addr.family() == AF_INET6) { @@ -1323,42 +1328,42 @@ void VirtualSocketServer::SetDefaultSourceAddress(const IPAddress& from_addr) { } void VirtualSocketServer::set_bandwidth(uint32_t bandwidth) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); bandwidth_ = bandwidth; } void VirtualSocketServer::set_network_capacity(uint32_t capacity) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); network_capacity_ = capacity; } uint32_t VirtualSocketServer::send_buffer_capacity() const { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return send_buffer_capacity_; } void VirtualSocketServer::set_send_buffer_capacity(uint32_t capacity) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); send_buffer_capacity_ = capacity; } uint32_t VirtualSocketServer::recv_buffer_capacity() const { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return recv_buffer_capacity_; } void VirtualSocketServer::set_recv_buffer_capacity(uint32_t capacity) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); recv_buffer_capacity_ = capacity; } void VirtualSocketServer::set_delay_mean(uint32_t delay_mean) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); delay_mean_ = delay_mean; } void VirtualSocketServer::set_delay_stddev(uint32_t delay_stddev) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); delay_stddev_ = delay_stddev; } void VirtualSocketServer::set_delay_samples(uint32_t delay_samples) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); delay_samples_ = delay_samples; } @@ -1366,18 +1371,18 @@ void VirtualSocketServer::set_drop_probability(double drop_prob) { RTC_DCHECK_GE(drop_prob, 0.0); RTC_DCHECK_LE(drop_prob, 1.0); - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); drop_prob_ = drop_prob; } void VirtualSocketServer::set_max_udp_payload(size_t payload_size) { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); max_udp_payload_ = payload_size; } uint32_t VirtualSocketServer::sent_packets() const { - webrtc::MutexLock lock(&mutex_); + MutexLock lock(&mutex_); return sent_packets_; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/virtual_socket_server.h b/rtc_base/virtual_socket_server.h index 93ef288826..32e1765e2a 100644 --- a/rtc_base/virtual_socket_server.h +++ b/rtc_base/virtual_socket_server.h @@ -13,9 +13,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/make_ref_counted.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" @@ -23,14 +23,14 @@ #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/fake_clock.h" +#include "rtc_base/socket_address_pair.h" #include "rtc_base/socket_server.h" #include "rtc_base/synchronization/mutex.h" -namespace rtc { +namespace webrtc { -class Packet; +class VirtualSocketPacket; class VirtualSocketServer; -class SocketAddressPair; // Implements the socket interface using the virtual network. Packets are // passed in tasks using the thread of the socket server. @@ -86,9 +86,9 @@ class VirtualSocket : public Socket, public sigslot::has_slots<> { // Removes stale packets from the network. Returns current size. size_t PurgeNetworkPackets(int64_t cur_time); - void PostPacket(webrtc::TimeDelta delay, std::unique_ptr packet); - void PostConnect(webrtc::TimeDelta delay, const SocketAddress& remote_addr); - void PostDisconnect(webrtc::TimeDelta delay); + void PostPacket(TimeDelta delay, std::unique_ptr packet); + void PostConnect(TimeDelta delay, const SocketAddress& remote_addr); + void PostDisconnect(TimeDelta delay); private: // Struct shared with pending tasks that may outlive VirtualSocket. @@ -118,8 +118,8 @@ class VirtualSocket : public Socket, public sigslot::has_slots<> { }; AcceptResult Accept(); - bool AddPacket(std::unique_ptr packet); - void PostConnect(webrtc::TimeDelta delay, const SocketAddress& remote_addr); + bool AddPacket(std::unique_ptr packet); + void PostConnect(TimeDelta delay, const SocketAddress& remote_addr); private: enum class Signal { kNone, kReadEvent, kConnectEvent }; @@ -131,7 +131,7 @@ class VirtualSocket : public Socket, public sigslot::has_slots<> { void MaybeSignalReadEvent(); Signal Connect(PostedConnects::iterator remote_addr_it); - webrtc::Mutex mutex_; + Mutex mutex_; VirtualSocket& socket_; bool alive_ RTC_GUARDED_BY(mutex_) = true; // Flag indicating if async Task to signal SignalReadEvent is posted. @@ -148,10 +148,11 @@ class VirtualSocket : public Socket, public sigslot::has_slots<> { PostedConnects posted_connects_ RTC_GUARDED_BY(mutex_); // Data which has been received from the network - std::list> recv_buffer_ RTC_GUARDED_BY(mutex_); + std::list> recv_buffer_ + RTC_GUARDED_BY(mutex_); // Pending sockets which can be Accepted - absl::optional> listen_queue_ + std::optional> listen_queue_ RTC_GUARDED_BY(mutex_); }; @@ -284,10 +285,10 @@ class VirtualSocketServer : public SocketServer { // SocketServer: void SetMessageQueue(Thread* queue) override; - bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override; + bool Wait(TimeDelta max_wait_duration, bool process_io) override; void WakeUp() override; - void SetDelayOnAddress(const rtc::SocketAddress& address, int delay_ms) { + void SetDelayOnAddress(const SocketAddress& address, int delay_ms) { delay_by_ip_[address.ipaddr()] = delay_ms; } @@ -298,8 +299,8 @@ class VirtualSocketServer : public SocketServer { // If SetAlternativeLocalAddress(A, B) is called, then when something // attempts to bind a socket to address A, it will get a socket bound to // address B instead. - void SetAlternativeLocalAddress(const rtc::IPAddress& address, - const rtc::IPAddress& alternative); + void SetAlternativeLocalAddress(const IPAddress& address, + const IPAddress& alternative); typedef std::pair Point; typedef std::vector Function; @@ -451,7 +452,7 @@ class VirtualSocketServer : public SocketServer { IPAddress default_source_address_v4_; IPAddress default_source_address_v6_; - mutable webrtc::Mutex mutex_; + mutable Mutex mutex_; uint32_t bandwidth_ RTC_GUARDED_BY(mutex_); uint32_t network_capacity_ RTC_GUARDED_BY(mutex_); @@ -464,8 +465,8 @@ class VirtualSocketServer : public SocketServer { // Used for testing. uint32_t sent_packets_ RTC_GUARDED_BY(mutex_) = 0; - std::map delay_by_ip_; - std::map alternative_address_mapping_; + std::map delay_by_ip_; + std::map alternative_address_mapping_; std::unique_ptr delay_dist_; double drop_prob_ RTC_GUARDED_BY(mutex_); @@ -477,6 +478,14 @@ class VirtualSocketServer : public SocketServer { bool sending_blocked_ RTC_GUARDED_BY(mutex_) = false; }; -} // namespace rtc +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::VirtualSocketServer; +} +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_VIRTUAL_SOCKET_SERVER_H_ diff --git a/rtc_base/virtual_socket_unittest.cc b/rtc_base/virtual_socket_unittest.cc index 67585b1fcd..c05f8d87c7 100644 --- a/rtc_base/virtual_socket_unittest.cc +++ b/rtc_base/virtual_socket_unittest.cc @@ -13,13 +13,9 @@ #include #include #include -#if defined(WEBRTC_POSIX) -#include -#endif #include #include -#include #include "absl/memory/memory.h" #include "api/units/time_delta.h" @@ -30,6 +26,8 @@ #include "rtc_base/gunit.h" #include "rtc_base/ip_address.h" #include "rtc_base/logging.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/network/received_packet.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" #include "rtc_base/task_utils/repeating_task.h" @@ -41,11 +39,9 @@ #include "rtc_base/virtual_socket_server.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { -using ::webrtc::RepeatingTaskHandle; -using ::webrtc::TimeDelta; using ::webrtc::testing::SSE_CLOSE; using ::webrtc::testing::SSE_ERROR; using ::webrtc::testing::SSE_OPEN; @@ -55,15 +51,15 @@ using ::webrtc::testing::StreamSink; // Sends at a constant rate but with random packet sizes. struct Sender { - Sender(Thread* th, Socket* s, uint32_t rt) + Sender(webrtc::Thread* th, webrtc::Socket* s, uint32_t rt) : thread(th), - socket(std::make_unique(s)), + socket(std::make_unique(s)), rate(rt), count(0) { - last_send = rtc::TimeMillis(); + last_send = webrtc::TimeMillis(); periodic = RepeatingTaskHandle::DelayedStart(thread, NextDelay(), [this] { - int64_t cur_time = rtc::TimeMillis(); + int64_t cur_time = webrtc::TimeMillis(); int64_t delay = cur_time - last_send; uint32_t size = std::clamp(rate * delay / 1000, sizeof(uint32_t), 4096); @@ -81,9 +77,9 @@ struct Sender { return TimeDelta::Seconds(1) * size / rate; } - Thread* thread; - std::unique_ptr socket; - rtc::PacketOptions options; + webrtc::Thread* thread; + std::unique_ptr socket; + AsyncSocketPacketOptions options; RepeatingTaskHandle periodic; uint32_t rate; // bytes per second uint32_t count; @@ -92,16 +88,19 @@ struct Sender { }; struct Receiver : public sigslot::has_slots<> { - Receiver(Thread* th, Socket* s, uint32_t bw) + Receiver(webrtc::Thread* th, webrtc::Socket* s, uint32_t bw) : thread(th), - socket(std::make_unique(s)), + socket(std::make_unique(s)), bandwidth(bw), count(0), sec_count(0), sum(0), sum_sq(0), samples(0) { - socket->SignalReadPacket.connect(this, &Receiver::OnReadPacket); + socket->RegisterReceivedPacketCallback( + [&](AsyncPacketSocket* s, const ReceivedIpPacket& packet) { + OnReadPacket(s, packet); + }); periodic = RepeatingTaskHandle::DelayedStart( thread, TimeDelta::Seconds(1), [this] { // It is always possible for us to receive more than expected because @@ -116,27 +115,25 @@ struct Receiver : public sigslot::has_slots<> { ~Receiver() override { periodic.Stop(); } - void OnReadPacket(AsyncPacketSocket* s, - const char* data, - size_t size, - const SocketAddress& remote_addr, - const int64_t& /* packet_time_us */) { + void OnReadPacket(webrtc::AsyncPacketSocket* s, + const ReceivedIpPacket& packet) { ASSERT_EQ(socket.get(), s); - ASSERT_GE(size, 4U); + ASSERT_GE(packet.payload().size(), 4U); - count += size; - sec_count += size; + count += packet.payload().size(); + sec_count += packet.payload().size(); - uint32_t send_time = *reinterpret_cast(data); - uint32_t recv_time = rtc::TimeMillis(); + uint32_t send_time = + *reinterpret_cast(packet.payload().data()); + uint32_t recv_time = webrtc::TimeMillis(); uint32_t delay = recv_time - send_time; sum += delay; sum_sq += delay * delay; samples += 1; } - Thread* thread; - std::unique_ptr socket; + webrtc::Thread* thread; + std::unique_ptr socket; uint32_t bandwidth; RepeatingTaskHandle periodic; size_t count; @@ -152,14 +149,14 @@ class VirtualSocketServerTest : public ::testing::Test { VirtualSocketServerTest() : ss_(&fake_clock_), thread_(&ss_), - kIPv4AnyAddress(IPAddress(INADDR_ANY), 0), - kIPv6AnyAddress(IPAddress(in6addr_any), 0) {} + kIPv4AnyAddress(webrtc::IPAddress(INADDR_ANY), 0), + kIPv6AnyAddress(webrtc::IPAddress(in6addr_any), 0) {} - void CheckPortIncrementalization(const SocketAddress& post, - const SocketAddress& pre) { + void CheckPortIncrementalization(const webrtc::SocketAddress& post, + const webrtc::SocketAddress& pre) { EXPECT_EQ(post.port(), pre.port() + 1); - IPAddress post_ip = post.ipaddr(); - IPAddress pre_ip = pre.ipaddr(); + webrtc::IPAddress post_ip = post.ipaddr(); + webrtc::IPAddress pre_ip = pre.ipaddr(); EXPECT_EQ(pre_ip.family(), post_ip.family()); if (post_ip.family() == AF_INET) { in_addr pre_ipv4 = pre_ip.ipv4_address(); @@ -177,72 +174,79 @@ class VirtualSocketServerTest : public ::testing::Test { // Test a client can bind to the any address, and all sent packets will have // the default source address. Also, it can receive packets sent to the // default address. - void TestDefaultSourceAddress(const IPAddress& default_address) { + void TestDefaultSourceAddress(const webrtc::IPAddress& default_address) { ss_.SetDefaultSourceAddress(default_address); // Create client1 bound to the any address. - Socket* socket = ss_.CreateSocket(default_address.family(), SOCK_DGRAM); - socket->Bind(EmptySocketAddressWithFamily(default_address.family())); - SocketAddress client1_any_addr = socket->GetLocalAddress(); + webrtc::Socket* socket = + ss_.CreateSocket(default_address.family(), SOCK_DGRAM); + socket->Bind( + webrtc::EmptySocketAddressWithFamily(default_address.family())); + webrtc::SocketAddress client1_any_addr = socket->GetLocalAddress(); EXPECT_TRUE(client1_any_addr.IsAnyIP()); - auto client1 = std::make_unique( - std::make_unique(socket), &fake_clock_); + auto client1 = std::make_unique( + std::make_unique(socket), &fake_clock_); // Create client2 bound to the address route. - Socket* socket2 = ss_.CreateSocket(default_address.family(), SOCK_DGRAM); - socket2->Bind(SocketAddress(default_address, 0)); - SocketAddress client2_addr = socket2->GetLocalAddress(); + webrtc::Socket* socket2 = + ss_.CreateSocket(default_address.family(), SOCK_DGRAM); + socket2->Bind(webrtc::SocketAddress(default_address, 0)); + webrtc::SocketAddress client2_addr = socket2->GetLocalAddress(); EXPECT_FALSE(client2_addr.IsAnyIP()); - auto client2 = std::make_unique( - std::make_unique(socket2), &fake_clock_); + auto client2 = std::make_unique( + std::make_unique(socket2), &fake_clock_); // Client1 sends to client2, client2 should see the default address as // client1's address. - SocketAddress client1_addr; + webrtc::SocketAddress client1_addr; EXPECT_EQ(6, client1->SendTo("bizbaz", 6, client2_addr)); EXPECT_TRUE(client2->CheckNextPacket("bizbaz", 6, &client1_addr)); EXPECT_EQ(client1_addr, - SocketAddress(default_address, client1_any_addr.port())); + webrtc::SocketAddress(default_address, client1_any_addr.port())); // Client2 can send back to client1's default address. EXPECT_EQ(3, client2->SendTo("foo", 3, client1_addr)); EXPECT_TRUE(client1->CheckNextPacket("foo", 3, &client2_addr)); } - void BasicTest(const SocketAddress& initial_addr) { - Socket* socket = ss_.CreateSocket(initial_addr.family(), SOCK_DGRAM); + void BasicTest(const webrtc::SocketAddress& initial_addr) { + webrtc::Socket* socket = + ss_.CreateSocket(initial_addr.family(), SOCK_DGRAM); socket->Bind(initial_addr); - SocketAddress server_addr = socket->GetLocalAddress(); + webrtc::SocketAddress server_addr = socket->GetLocalAddress(); // Make sure VSS didn't switch families on us. EXPECT_EQ(server_addr.family(), initial_addr.family()); - auto client1 = std::make_unique( - std::make_unique(socket), &fake_clock_); - Socket* socket2 = ss_.CreateSocket(initial_addr.family(), SOCK_DGRAM); - auto client2 = std::make_unique( - std::make_unique(socket2), &fake_clock_); + auto client1 = std::make_unique( + std::make_unique(socket), &fake_clock_); + webrtc::Socket* socket2 = + ss_.CreateSocket(initial_addr.family(), SOCK_DGRAM); + auto client2 = std::make_unique( + std::make_unique(socket2), &fake_clock_); - SocketAddress client2_addr; + webrtc::SocketAddress client2_addr; EXPECT_EQ(3, client2->SendTo("foo", 3, server_addr)); EXPECT_TRUE(client1->CheckNextPacket("foo", 3, &client2_addr)); - SocketAddress client1_addr; + webrtc::SocketAddress client1_addr; EXPECT_EQ(6, client1->SendTo("bizbaz", 6, client2_addr)); EXPECT_TRUE(client2->CheckNextPacket("bizbaz", 6, &client1_addr)); EXPECT_EQ(client1_addr, server_addr); - SocketAddress empty = EmptySocketAddressWithFamily(initial_addr.family()); + webrtc::SocketAddress empty = + webrtc::EmptySocketAddressWithFamily(initial_addr.family()); for (int i = 0; i < 10; i++) { - client2 = std::make_unique( - absl::WrapUnique(AsyncUDPSocket::Create(&ss_, empty)), &fake_clock_); + client2 = std::make_unique( + absl::WrapUnique(webrtc::AsyncUDPSocket::Create(&ss_, empty)), + &fake_clock_); - SocketAddress next_client2_addr; + webrtc::SocketAddress next_client2_addr; EXPECT_EQ(3, client2->SendTo("foo", 3, server_addr)); EXPECT_TRUE(client1->CheckNextPacket("foo", 3, &next_client2_addr)); CheckPortIncrementalization(next_client2_addr, client2_addr); // EXPECT_EQ(next_client2_addr.port(), client2_addr.port() + 1); - SocketAddress server_addr2; + webrtc::SocketAddress server_addr2; EXPECT_EQ(6, client1->SendTo("bizbaz", 6, next_client2_addr)); EXPECT_TRUE(client2->CheckNextPacket("bizbaz", 6, &server_addr2)); EXPECT_EQ(server_addr2, server_addr); @@ -252,28 +256,28 @@ class VirtualSocketServerTest : public ::testing::Test { } // initial_addr should be made from either INADDR_ANY or in6addr_any. - void ConnectTest(const SocketAddress& initial_addr) { + void ConnectTest(const webrtc::SocketAddress& initial_addr) { StreamSink sink; - SocketAddress accept_addr; - const SocketAddress kEmptyAddr = - EmptySocketAddressWithFamily(initial_addr.family()); + webrtc::SocketAddress accept_addr; + const webrtc::SocketAddress kEmptyAddr = + webrtc::EmptySocketAddressWithFamily(initial_addr.family()); // Create client - std::unique_ptr client = + std::unique_ptr client = absl::WrapUnique(ss_.CreateSocket(initial_addr.family(), SOCK_STREAM)); sink.Monitor(client.get()); - EXPECT_EQ(client->GetState(), Socket::CS_CLOSED); + EXPECT_EQ(client->GetState(), webrtc::Socket::CS_CLOSED); EXPECT_TRUE(client->GetLocalAddress().IsNil()); // Create server - std::unique_ptr server = + std::unique_ptr server = absl::WrapUnique(ss_.CreateSocket(initial_addr.family(), SOCK_STREAM)); sink.Monitor(server.get()); EXPECT_NE(0, server->Listen(5)); // Bind required EXPECT_EQ(0, server->Bind(initial_addr)); EXPECT_EQ(server->GetLocalAddress().family(), initial_addr.family()); EXPECT_EQ(0, server->Listen(5)); - EXPECT_EQ(server->GetState(), Socket::CS_CONNECTING); + EXPECT_EQ(server->GetState(), webrtc::Socket::CS_CONNECTING); // No pending server connections EXPECT_FALSE(sink.Check(server.get(), SSE_READ)); @@ -287,53 +291,53 @@ class VirtualSocketServerTest : public ::testing::Test { EXPECT_NE(client->GetLocalAddress(), server->GetLocalAddress()); // Client is connecting - EXPECT_EQ(client->GetState(), Socket::CS_CONNECTING); + EXPECT_EQ(client->GetState(), webrtc::Socket::CS_CONNECTING); EXPECT_FALSE(sink.Check(client.get(), SSE_OPEN)); EXPECT_FALSE(sink.Check(client.get(), SSE_CLOSE)); ss_.ProcessMessagesUntilIdle(); // Client still connecting - EXPECT_EQ(client->GetState(), Socket::CS_CONNECTING); + EXPECT_EQ(client->GetState(), webrtc::Socket::CS_CONNECTING); EXPECT_FALSE(sink.Check(client.get(), SSE_OPEN)); EXPECT_FALSE(sink.Check(client.get(), SSE_CLOSE)); // Server has pending connection EXPECT_TRUE(sink.Check(server.get(), SSE_READ)); - std::unique_ptr accepted = + std::unique_ptr accepted = absl::WrapUnique(server->Accept(&accept_addr)); EXPECT_TRUE(nullptr != accepted); EXPECT_NE(accept_addr, kEmptyAddr); EXPECT_EQ(accepted->GetRemoteAddress(), accept_addr); - EXPECT_EQ(accepted->GetState(), Socket::CS_CONNECTED); + EXPECT_EQ(accepted->GetState(), webrtc::Socket::CS_CONNECTED); EXPECT_EQ(accepted->GetLocalAddress(), server->GetLocalAddress()); EXPECT_EQ(accepted->GetRemoteAddress(), client->GetLocalAddress()); ss_.ProcessMessagesUntilIdle(); // Client has connected - EXPECT_EQ(client->GetState(), Socket::CS_CONNECTED); + EXPECT_EQ(client->GetState(), webrtc::Socket::CS_CONNECTED); EXPECT_TRUE(sink.Check(client.get(), SSE_OPEN)); EXPECT_FALSE(sink.Check(client.get(), SSE_CLOSE)); EXPECT_EQ(client->GetRemoteAddress(), server->GetLocalAddress()); EXPECT_EQ(client->GetRemoteAddress(), accepted->GetLocalAddress()); } - void ConnectToNonListenerTest(const SocketAddress& initial_addr) { + void ConnectToNonListenerTest(const webrtc::SocketAddress& initial_addr) { StreamSink sink; - SocketAddress accept_addr; - const SocketAddress nil_addr; - const SocketAddress empty_addr = - EmptySocketAddressWithFamily(initial_addr.family()); + webrtc::SocketAddress accept_addr; + const webrtc::SocketAddress nil_addr; + const webrtc::SocketAddress empty_addr = + webrtc::EmptySocketAddressWithFamily(initial_addr.family()); // Create client - std::unique_ptr client = + std::unique_ptr client = absl::WrapUnique(ss_.CreateSocket(initial_addr.family(), SOCK_STREAM)); sink.Monitor(client.get()); // Create server - std::unique_ptr server = + std::unique_ptr server = absl::WrapUnique(ss_.CreateSocket(initial_addr.family(), SOCK_STREAM)); sink.Monitor(server.get()); EXPECT_EQ(0, server->Bind(initial_addr)); @@ -349,23 +353,23 @@ class VirtualSocketServerTest : public ::testing::Test { EXPECT_EQ(accept_addr, nil_addr); // Connection failed - EXPECT_EQ(client->GetState(), Socket::CS_CLOSED); + EXPECT_EQ(client->GetState(), webrtc::Socket::CS_CLOSED); EXPECT_FALSE(sink.Check(client.get(), SSE_OPEN)); EXPECT_TRUE(sink.Check(client.get(), SSE_ERROR)); EXPECT_EQ(client->GetRemoteAddress(), nil_addr); } - void CloseDuringConnectTest(const SocketAddress& initial_addr) { + void CloseDuringConnectTest(const webrtc::SocketAddress& initial_addr) { StreamSink sink; - SocketAddress accept_addr; - const SocketAddress empty_addr = - EmptySocketAddressWithFamily(initial_addr.family()); + webrtc::SocketAddress accept_addr; + const webrtc::SocketAddress empty_addr = + webrtc::EmptySocketAddressWithFamily(initial_addr.family()); // Create client and server - std::unique_ptr client( + std::unique_ptr client( ss_.CreateSocket(initial_addr.family(), SOCK_STREAM)); sink.Monitor(client.get()); - std::unique_ptr server( + std::unique_ptr server( ss_.CreateSocket(initial_addr.family(), SOCK_STREAM)); sink.Monitor(server.get()); @@ -383,7 +387,7 @@ class VirtualSocketServerTest : public ::testing::Test { ss_.ProcessMessagesUntilIdle(); // Result: connection failed - EXPECT_EQ(client->GetState(), Socket::CS_CLOSED); + EXPECT_EQ(client->GetState(), webrtc::Socket::CS_CLOSED); EXPECT_TRUE(sink.Check(client.get(), SSE_ERROR)); server.reset(ss_.CreateSocket(initial_addr.family(), SOCK_STREAM)); @@ -405,7 +409,7 @@ class VirtualSocketServerTest : public ::testing::Test { ss_.ProcessMessagesUntilIdle(); // Result: connection failed - EXPECT_EQ(client->GetState(), Socket::CS_CLOSED); + EXPECT_EQ(client->GetState(), webrtc::Socket::CS_CLOSED); EXPECT_TRUE(sink.Check(client.get(), SSE_ERROR)); // New server @@ -423,37 +427,37 @@ class VirtualSocketServerTest : public ::testing::Test { // Server accepts connection EXPECT_TRUE(sink.Check(server.get(), SSE_READ)); - std::unique_ptr accepted(server->Accept(&accept_addr)); + std::unique_ptr accepted(server->Accept(&accept_addr)); ASSERT_TRUE(nullptr != accepted.get()); sink.Monitor(accepted.get()); // Client closes before connection complets - EXPECT_EQ(accepted->GetState(), Socket::CS_CONNECTED); + EXPECT_EQ(accepted->GetState(), webrtc::Socket::CS_CONNECTED); // Connected message has not been processed yet. - EXPECT_EQ(client->GetState(), Socket::CS_CONNECTING); + EXPECT_EQ(client->GetState(), webrtc::Socket::CS_CONNECTING); client->Close(); ss_.ProcessMessagesUntilIdle(); // Result: accepted socket closes - EXPECT_EQ(accepted->GetState(), Socket::CS_CLOSED); + EXPECT_EQ(accepted->GetState(), webrtc::Socket::CS_CLOSED); EXPECT_TRUE(sink.Check(accepted.get(), SSE_CLOSE)); EXPECT_FALSE(sink.Check(client.get(), SSE_CLOSE)); } - void CloseTest(const SocketAddress& initial_addr) { + void CloseTest(const webrtc::SocketAddress& initial_addr) { StreamSink sink; - const SocketAddress kEmptyAddr; + const webrtc::SocketAddress kEmptyAddr; // Create clients - std::unique_ptr a = + std::unique_ptr a = absl::WrapUnique(ss_.CreateSocket(initial_addr.family(), SOCK_STREAM)); sink.Monitor(a.get()); a->Bind(initial_addr); EXPECT_EQ(a->GetLocalAddress().family(), initial_addr.family()); - std::unique_ptr b = + std::unique_ptr b = absl::WrapUnique(ss_.CreateSocket(initial_addr.family(), SOCK_STREAM)); sink.Monitor(b.get()); b->Bind(initial_addr); @@ -465,11 +469,11 @@ class VirtualSocketServerTest : public ::testing::Test { ss_.ProcessMessagesUntilIdle(); EXPECT_TRUE(sink.Check(a.get(), SSE_OPEN)); - EXPECT_EQ(a->GetState(), Socket::CS_CONNECTED); + EXPECT_EQ(a->GetState(), webrtc::Socket::CS_CONNECTED); EXPECT_EQ(a->GetRemoteAddress(), b->GetLocalAddress()); EXPECT_TRUE(sink.Check(b.get(), SSE_OPEN)); - EXPECT_EQ(b->GetState(), Socket::CS_CONNECTED); + EXPECT_EQ(b->GetState(), webrtc::Socket::CS_CONNECTED); EXPECT_EQ(b->GetRemoteAddress(), a->GetLocalAddress()); EXPECT_EQ(1, a->Send("a", 1)); @@ -483,27 +487,27 @@ class VirtualSocketServerTest : public ::testing::Test { EXPECT_EQ(-1, b->Recv(buffer, 10, nullptr)); EXPECT_TRUE(sink.Check(a.get(), SSE_CLOSE)); - EXPECT_EQ(a->GetState(), Socket::CS_CLOSED); + EXPECT_EQ(a->GetState(), webrtc::Socket::CS_CLOSED); EXPECT_EQ(a->GetRemoteAddress(), kEmptyAddr); // No signal for Closer EXPECT_FALSE(sink.Check(b.get(), SSE_CLOSE)); - EXPECT_EQ(b->GetState(), Socket::CS_CLOSED); + EXPECT_EQ(b->GetState(), webrtc::Socket::CS_CLOSED); EXPECT_EQ(b->GetRemoteAddress(), kEmptyAddr); } - void TcpSendTest(const SocketAddress& initial_addr) { + void TcpSendTest(const webrtc::SocketAddress& initial_addr) { StreamSink sink; - const SocketAddress kEmptyAddr; + const webrtc::SocketAddress kEmptyAddr; // Connect two sockets - std::unique_ptr a = + std::unique_ptr a = absl::WrapUnique(ss_.CreateSocket(initial_addr.family(), SOCK_STREAM)); sink.Monitor(a.get()); a->Bind(initial_addr); EXPECT_EQ(a->GetLocalAddress().family(), initial_addr.family()); - std::unique_ptr b = + std::unique_ptr b = absl::WrapUnique(ss_.CreateSocket(initial_addr.family(), SOCK_STREAM)); sink.Monitor(b.get()); b->Bind(initial_addr); @@ -615,13 +619,13 @@ class VirtualSocketServerTest : public ::testing::Test { EXPECT_EQ(0, memcmp(recv_buffer, send_buffer, kDataSize)); } - void TcpSendsPacketsInOrderTest(const SocketAddress& initial_addr) { - const SocketAddress kEmptyAddr; + void TcpSendsPacketsInOrderTest(const webrtc::SocketAddress& initial_addr) { + const webrtc::SocketAddress kEmptyAddr; // Connect two sockets - std::unique_ptr a = + std::unique_ptr a = absl::WrapUnique(ss_.CreateSocket(initial_addr.family(), SOCK_STREAM)); - std::unique_ptr b = + std::unique_ptr b = absl::WrapUnique(ss_.CreateSocket(initial_addr.family(), SOCK_STREAM)); a->Bind(initial_addr); EXPECT_EQ(a->GetLocalAddress().family(), initial_addr.family()); @@ -672,9 +676,11 @@ class VirtualSocketServerTest : public ::testing::Test { // It is important that initial_addr's port has to be 0 such that the // incremental port behavior could ensure the 2 Binds result in different // address. - void BandwidthTest(const SocketAddress& initial_addr) { - Socket* send_socket = ss_.CreateSocket(initial_addr.family(), SOCK_DGRAM); - Socket* recv_socket = ss_.CreateSocket(initial_addr.family(), SOCK_DGRAM); + void BandwidthTest(const webrtc::SocketAddress& initial_addr) { + webrtc::Socket* send_socket = + ss_.CreateSocket(initial_addr.family(), SOCK_DGRAM); + webrtc::Socket* recv_socket = + ss_.CreateSocket(initial_addr.family(), SOCK_DGRAM); ASSERT_EQ(0, send_socket->Bind(initial_addr)); ASSERT_EQ(0, recv_socket->Bind(initial_addr)); EXPECT_EQ(send_socket->GetLocalAddress().family(), initial_addr.family()); @@ -684,7 +690,7 @@ class VirtualSocketServerTest : public ::testing::Test { uint32_t bandwidth = 64 * 1024; ss_.set_bandwidth(bandwidth); - Thread* pthMain = Thread::Current(); + webrtc::Thread* pthMain = webrtc::Thread::Current(); Sender sender(pthMain, send_socket, 80 * 1024); Receiver receiver(pthMain, recv_socket, bandwidth); @@ -704,7 +710,7 @@ class VirtualSocketServerTest : public ::testing::Test { // It is important that initial_addr's port has to be 0 such that the // incremental port behavior could ensure the 2 Binds result in different // address. - void DelayTest(const SocketAddress& initial_addr) { + void DelayTest(const webrtc::SocketAddress& initial_addr) { time_t seed = ::time(nullptr); RTC_LOG(LS_VERBOSE) << "seed = " << seed; srand(static_cast(seed)); @@ -716,15 +722,17 @@ class VirtualSocketServerTest : public ::testing::Test { ss_.set_delay_stddev(stddev); ss_.UpdateDelayDistribution(); - Socket* send_socket = ss_.CreateSocket(initial_addr.family(), SOCK_DGRAM); - Socket* recv_socket = ss_.CreateSocket(initial_addr.family(), SOCK_DGRAM); + webrtc::Socket* send_socket = + ss_.CreateSocket(initial_addr.family(), SOCK_DGRAM); + webrtc::Socket* recv_socket = + ss_.CreateSocket(initial_addr.family(), SOCK_DGRAM); ASSERT_EQ(0, send_socket->Bind(initial_addr)); ASSERT_EQ(0, recv_socket->Bind(initial_addr)); EXPECT_EQ(send_socket->GetLocalAddress().family(), initial_addr.family()); EXPECT_EQ(recv_socket->GetLocalAddress().family(), initial_addr.family()); ASSERT_EQ(0, send_socket->Connect(recv_socket->GetLocalAddress())); - Thread* pthMain = Thread::Current(); + webrtc::Thread* pthMain = webrtc::Thread::Current(); // Avg packet size is 2K, so at 200KB/s for 10s, we should see about // 1000 packets, which is necessary to get a good distribution. Sender sender(pthMain, send_socket, 100 * 2 * 1024); @@ -759,24 +767,24 @@ class VirtualSocketServerTest : public ::testing::Test { // Test cross-family communication between a client bound to client_addr and a // server bound to server_addr. shouldSucceed indicates if communication is // expected to work or not. - void CrossFamilyConnectionTest(const SocketAddress& client_addr, - const SocketAddress& server_addr, + void CrossFamilyConnectionTest(const webrtc::SocketAddress& client_addr, + const webrtc::SocketAddress& server_addr, bool shouldSucceed) { StreamSink sink; - SocketAddress accept_address; - const SocketAddress kEmptyAddr; + webrtc::SocketAddress accept_address; + const webrtc::SocketAddress kEmptyAddr; // Client gets a IPv4 address - std::unique_ptr client = + std::unique_ptr client = absl::WrapUnique(ss_.CreateSocket(client_addr.family(), SOCK_STREAM)); sink.Monitor(client.get()); - EXPECT_EQ(client->GetState(), Socket::CS_CLOSED); + EXPECT_EQ(client->GetState(), webrtc::Socket::CS_CLOSED); EXPECT_EQ(client->GetLocalAddress(), kEmptyAddr); client->Bind(client_addr); // Server gets a non-mapped non-any IPv6 address. // IPv4 sockets should not be able to connect to this. - std::unique_ptr server = + std::unique_ptr server = absl::WrapUnique(ss_.CreateSocket(server_addr.family(), SOCK_STREAM)); sink.Monitor(server.get()); server->Bind(server_addr); @@ -786,7 +794,7 @@ class VirtualSocketServerTest : public ::testing::Test { EXPECT_EQ(0, client->Connect(server->GetLocalAddress())); ss_.ProcessMessagesUntilIdle(); EXPECT_TRUE(sink.Check(server.get(), SSE_READ)); - std::unique_ptr accepted = + std::unique_ptr accepted = absl::WrapUnique(server->Accept(&accept_address)); EXPECT_TRUE(nullptr != accepted); EXPECT_NE(kEmptyAddr, accept_address); @@ -801,7 +809,7 @@ class VirtualSocketServerTest : public ::testing::Test { EXPECT_FALSE(sink.Check(server.get(), SSE_READ)); EXPECT_TRUE(nullptr == server->Accept(&accept_address)); EXPECT_EQ(accept_address, kEmptyAddr); - EXPECT_EQ(client->GetState(), Socket::CS_CLOSED); + EXPECT_EQ(client->GetState(), webrtc::Socket::CS_CLOSED); EXPECT_FALSE(sink.Check(client.get(), SSE_OPEN)); EXPECT_EQ(client->GetRemoteAddress(), kEmptyAddr); } @@ -810,25 +818,25 @@ class VirtualSocketServerTest : public ::testing::Test { // Test cross-family datagram sending between a client bound to client_addr // and a server bound to server_addr. shouldSucceed indicates if sending is // expected to succeed or not. - void CrossFamilyDatagramTest(const SocketAddress& client_addr, - const SocketAddress& server_addr, + void CrossFamilyDatagramTest(const webrtc::SocketAddress& client_addr, + const webrtc::SocketAddress& server_addr, bool shouldSucceed) { - Socket* socket = ss_.CreateSocket(AF_INET, SOCK_DGRAM); + webrtc::Socket* socket = ss_.CreateSocket(AF_INET, SOCK_DGRAM); socket->Bind(server_addr); - SocketAddress bound_server_addr = socket->GetLocalAddress(); - auto client1 = std::make_unique( - std::make_unique(socket), &fake_clock_); + webrtc::SocketAddress bound_server_addr = socket->GetLocalAddress(); + auto client1 = std::make_unique( + std::make_unique(socket), &fake_clock_); - Socket* socket2 = ss_.CreateSocket(AF_INET, SOCK_DGRAM); + webrtc::Socket* socket2 = ss_.CreateSocket(AF_INET, SOCK_DGRAM); socket2->Bind(client_addr); - auto client2 = std::make_unique( - std::make_unique(socket2), &fake_clock_); - SocketAddress client2_addr; + auto client2 = std::make_unique( + std::make_unique(socket2), &fake_clock_); + webrtc::SocketAddress client2_addr; if (shouldSucceed) { EXPECT_EQ(3, client2->SendTo("foo", 3, bound_server_addr)); EXPECT_TRUE(client1->CheckNextPacket("foo", 3, &client2_addr)); - SocketAddress client1_addr; + webrtc::SocketAddress client1_addr; EXPECT_EQ(6, client1->SendTo("bizbaz", 6, client2_addr)); EXPECT_TRUE(client2->CheckNextPacket("bizbaz", 6, &client1_addr)); EXPECT_EQ(client1_addr, bound_server_addr); @@ -839,32 +847,32 @@ class VirtualSocketServerTest : public ::testing::Test { } protected: - rtc::ScopedFakeClock fake_clock_; - VirtualSocketServer ss_; - AutoSocketServerThread thread_; - const SocketAddress kIPv4AnyAddress; - const SocketAddress kIPv6AnyAddress; + webrtc::ScopedFakeClock fake_clock_; + webrtc::VirtualSocketServer ss_; + webrtc::AutoSocketServerThread thread_; + const webrtc::SocketAddress kIPv4AnyAddress; + const webrtc::SocketAddress kIPv6AnyAddress; }; TEST_F(VirtualSocketServerTest, basic_v4) { - SocketAddress ipv4_test_addr(IPAddress(INADDR_ANY), 5000); + webrtc::SocketAddress ipv4_test_addr(webrtc::IPAddress(INADDR_ANY), 5000); BasicTest(ipv4_test_addr); } TEST_F(VirtualSocketServerTest, basic_v6) { - SocketAddress ipv6_test_addr(IPAddress(in6addr_any), 5000); + webrtc::SocketAddress ipv6_test_addr(webrtc::IPAddress(in6addr_any), 5000); BasicTest(ipv6_test_addr); } TEST_F(VirtualSocketServerTest, TestDefaultRoute_v4) { - IPAddress ipv4_default_addr(0x01020304); + webrtc::IPAddress ipv4_default_addr(0x01020304); TestDefaultSourceAddress(ipv4_default_addr); } TEST_F(VirtualSocketServerTest, TestDefaultRoute_v6) { - IPAddress ipv6_default_addr; - EXPECT_TRUE( - IPFromString("2401:fa00:4:1000:be30:5bff:fee5:c3", &ipv6_default_addr)); + webrtc::IPAddress ipv6_default_addr; + EXPECT_TRUE(webrtc::IPFromString("2401:fa00:4:1000:be30:5bff:fee5:c3", + &ipv6_default_addr)); TestDefaultSourceAddress(ipv6_default_addr); } @@ -934,100 +942,105 @@ TEST_F(VirtualSocketServerTest, delay_v6) { // Works, receiving socket sees 127.0.0.2. TEST_F(VirtualSocketServerTest, CanConnectFromMappedIPv6ToIPv4Any) { - CrossFamilyConnectionTest(SocketAddress("::ffff:127.0.0.2", 0), - SocketAddress("0.0.0.0", 5000), true); + CrossFamilyConnectionTest(webrtc::SocketAddress("::ffff:127.0.0.2", 0), + webrtc::SocketAddress("0.0.0.0", 5000), true); } // Fails. TEST_F(VirtualSocketServerTest, CantConnectFromUnMappedIPv6ToIPv4Any) { - CrossFamilyConnectionTest(SocketAddress("::2", 0), - SocketAddress("0.0.0.0", 5000), false); + CrossFamilyConnectionTest(webrtc::SocketAddress("::2", 0), + webrtc::SocketAddress("0.0.0.0", 5000), false); } // Fails. TEST_F(VirtualSocketServerTest, CantConnectFromUnMappedIPv6ToMappedIPv6) { - CrossFamilyConnectionTest(SocketAddress("::2", 0), - SocketAddress("::ffff:127.0.0.1", 5000), false); + CrossFamilyConnectionTest(webrtc::SocketAddress("::2", 0), + webrtc::SocketAddress("::ffff:127.0.0.1", 5000), + false); } // Works. receiving socket sees ::ffff:127.0.0.2. TEST_F(VirtualSocketServerTest, CanConnectFromIPv4ToIPv6Any) { - CrossFamilyConnectionTest(SocketAddress("127.0.0.2", 0), - SocketAddress("::", 5000), true); + CrossFamilyConnectionTest(webrtc::SocketAddress("127.0.0.2", 0), + webrtc::SocketAddress("::", 5000), true); } // Fails. TEST_F(VirtualSocketServerTest, CantConnectFromIPv4ToUnMappedIPv6) { - CrossFamilyConnectionTest(SocketAddress("127.0.0.2", 0), - SocketAddress("::1", 5000), false); + CrossFamilyConnectionTest(webrtc::SocketAddress("127.0.0.2", 0), + webrtc::SocketAddress("::1", 5000), false); } // Works. Receiving socket sees ::ffff:127.0.0.1. TEST_F(VirtualSocketServerTest, CanConnectFromIPv4ToMappedIPv6) { - CrossFamilyConnectionTest(SocketAddress("127.0.0.1", 0), - SocketAddress("::ffff:127.0.0.2", 5000), true); + CrossFamilyConnectionTest(webrtc::SocketAddress("127.0.0.1", 0), + webrtc::SocketAddress("::ffff:127.0.0.2", 5000), + true); } // Works, receiving socket sees a result from GetNextIP. TEST_F(VirtualSocketServerTest, CanConnectFromUnboundIPv6ToIPv4Any) { - CrossFamilyConnectionTest(SocketAddress("::", 0), - SocketAddress("0.0.0.0", 5000), true); + CrossFamilyConnectionTest(webrtc::SocketAddress("::", 0), + webrtc::SocketAddress("0.0.0.0", 5000), true); } // Works, receiving socket sees whatever GetNextIP gave the client. TEST_F(VirtualSocketServerTest, CanConnectFromUnboundIPv4ToIPv6Any) { - CrossFamilyConnectionTest(SocketAddress("0.0.0.0", 0), - SocketAddress("::", 5000), true); + CrossFamilyConnectionTest(webrtc::SocketAddress("0.0.0.0", 0), + webrtc::SocketAddress("::", 5000), true); } TEST_F(VirtualSocketServerTest, CanSendDatagramFromUnboundIPv4ToIPv6Any) { - CrossFamilyDatagramTest(SocketAddress("0.0.0.0", 0), - SocketAddress("::", 5000), true); + CrossFamilyDatagramTest(webrtc::SocketAddress("0.0.0.0", 0), + webrtc::SocketAddress("::", 5000), true); } TEST_F(VirtualSocketServerTest, CanSendDatagramFromMappedIPv6ToIPv4Any) { - CrossFamilyDatagramTest(SocketAddress("::ffff:127.0.0.1", 0), - SocketAddress("0.0.0.0", 5000), true); + CrossFamilyDatagramTest(webrtc::SocketAddress("::ffff:127.0.0.1", 0), + webrtc::SocketAddress("0.0.0.0", 5000), true); } TEST_F(VirtualSocketServerTest, CantSendDatagramFromUnMappedIPv6ToIPv4Any) { - CrossFamilyDatagramTest(SocketAddress("::2", 0), - SocketAddress("0.0.0.0", 5000), false); + CrossFamilyDatagramTest(webrtc::SocketAddress("::2", 0), + webrtc::SocketAddress("0.0.0.0", 5000), false); } TEST_F(VirtualSocketServerTest, CantSendDatagramFromUnMappedIPv6ToMappedIPv6) { - CrossFamilyDatagramTest(SocketAddress("::2", 0), - SocketAddress("::ffff:127.0.0.1", 5000), false); + CrossFamilyDatagramTest(webrtc::SocketAddress("::2", 0), + webrtc::SocketAddress("::ffff:127.0.0.1", 5000), + false); } TEST_F(VirtualSocketServerTest, CanSendDatagramFromIPv4ToIPv6Any) { - CrossFamilyDatagramTest(SocketAddress("127.0.0.2", 0), - SocketAddress("::", 5000), true); + CrossFamilyDatagramTest(webrtc::SocketAddress("127.0.0.2", 0), + webrtc::SocketAddress("::", 5000), true); } TEST_F(VirtualSocketServerTest, CantSendDatagramFromIPv4ToUnMappedIPv6) { - CrossFamilyDatagramTest(SocketAddress("127.0.0.2", 0), - SocketAddress("::1", 5000), false); + CrossFamilyDatagramTest(webrtc::SocketAddress("127.0.0.2", 0), + webrtc::SocketAddress("::1", 5000), false); } TEST_F(VirtualSocketServerTest, CanSendDatagramFromIPv4ToMappedIPv6) { - CrossFamilyDatagramTest(SocketAddress("127.0.0.1", 0), - SocketAddress("::ffff:127.0.0.2", 5000), true); + CrossFamilyDatagramTest(webrtc::SocketAddress("127.0.0.1", 0), + webrtc::SocketAddress("::ffff:127.0.0.2", 5000), + true); } TEST_F(VirtualSocketServerTest, CanSendDatagramFromUnboundIPv6ToIPv4Any) { - CrossFamilyDatagramTest(SocketAddress("::", 0), - SocketAddress("0.0.0.0", 5000), true); + CrossFamilyDatagramTest(webrtc::SocketAddress("::", 0), + webrtc::SocketAddress("0.0.0.0", 5000), true); } TEST_F(VirtualSocketServerTest, SetSendingBlockedWithUdpSocket) { - Socket* socket1 = ss_.CreateSocket(kIPv4AnyAddress.family(), SOCK_DGRAM); - std::unique_ptr socket2 = + webrtc::Socket* socket1 = + ss_.CreateSocket(kIPv4AnyAddress.family(), SOCK_DGRAM); + std::unique_ptr socket2 = absl::WrapUnique(ss_.CreateSocket(kIPv4AnyAddress.family(), SOCK_DGRAM)); socket1->Bind(kIPv4AnyAddress); socket2->Bind(kIPv4AnyAddress); - auto client1 = std::make_unique( - std::make_unique(socket1), &fake_clock_); + auto client1 = std::make_unique( + std::make_unique(socket1), &fake_clock_); ss_.SetSendingBlocked(true); EXPECT_EQ(-1, client1->SendTo("foo", 3, socket2->GetLocalAddress())); @@ -1045,9 +1058,9 @@ TEST_F(VirtualSocketServerTest, SetSendingBlockedWithTcpSocket) { ss_.set_recv_buffer_capacity(kBufferSize); StreamSink sink; - std::unique_ptr socket1 = + std::unique_ptr socket1 = absl::WrapUnique(ss_.CreateSocket(kIPv4AnyAddress.family(), SOCK_STREAM)); - std::unique_ptr socket2 = + std::unique_ptr socket2 = absl::WrapUnique(ss_.CreateSocket(kIPv4AnyAddress.family(), SOCK_STREAM)); sink.Monitor(socket1.get()); sink.Monitor(socket2.get()); @@ -1094,9 +1107,9 @@ TEST_F(VirtualSocketServerTest, CreatesStandardDistribution) { ASSERT_LT(0u, kTestSamples[sidx]); const uint32_t kStdDev = static_cast(kTestDev[didx] * kTestMean[midx]); - std::unique_ptr f = - VirtualSocketServer::CreateDistribution(kTestMean[midx], kStdDev, - kTestSamples[sidx]); + std::unique_ptr f = + webrtc::VirtualSocketServer::CreateDistribution( + kTestMean[midx], kStdDev, kTestSamples[sidx]); ASSERT_TRUE(nullptr != f.get()); ASSERT_EQ(kTestSamples[sidx], f->size()); double sum = 0; @@ -1122,4 +1135,4 @@ TEST_F(VirtualSocketServerTest, CreatesStandardDistribution) { } } // namespace -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/weak_ptr.cc b/rtc_base/weak_ptr.cc index 3bfa71b0b4..d67472d264 100644 --- a/rtc_base/weak_ptr.cc +++ b/rtc_base/weak_ptr.cc @@ -13,28 +13,22 @@ // The implementation is borrowed from chromium except that it does not // implement SupportsWeakPtr. -namespace rtc { +namespace webrtc { namespace internal { -WeakReference::Flag::Flag() : is_valid_(true) {} - void WeakReference::Flag::Invalidate() { - RTC_DCHECK(checker_.IsCurrent()) - << "WeakPtrs must be invalidated on the same sequence."; + RTC_DCHECK_RUN_ON(&checker_); is_valid_ = false; } bool WeakReference::Flag::IsValid() const { - RTC_DCHECK(checker_.IsCurrent()) - << "WeakPtrs must be checked on the same sequence."; + RTC_DCHECK_RUN_ON(&checker_); return is_valid_; } -WeakReference::Flag::~Flag() {} - WeakReference::WeakReference() {} -WeakReference::WeakReference(const Flag* flag) : flag_(flag) {} +WeakReference::WeakReference(const RefCountedFlag* flag) : flag_(flag) {} WeakReference::~WeakReference() {} @@ -55,7 +49,7 @@ WeakReferenceOwner::~WeakReferenceOwner() { WeakReference WeakReferenceOwner::GetRef() const { // If we hold the last reference to the Flag then create a new one. if (!HasRefs()) - flag_ = new RefCountedObject(); + flag_ = new WeakReference::RefCountedFlag(); return WeakReference(flag_.get()); } @@ -74,4 +68,4 @@ WeakPtrBase::~WeakPtrBase() {} WeakPtrBase::WeakPtrBase(const WeakReference& ref) : ref_(ref) {} } // namespace internal -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/weak_ptr.h b/rtc_base/weak_ptr.h index 7e75b5b9be..00b77fd63f 100644 --- a/rtc_base/weak_ptr.h +++ b/rtc_base/weak_ptr.h @@ -16,9 +16,12 @@ #include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "rtc_base/checks.h" #include "rtc_base/ref_count.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/system/no_unique_address.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/weak_ptr.h" // The implementation is borrowed from chromium except that it does not // implement SupportsWeakPtr. @@ -84,7 +87,7 @@ // the correct thread to enforce that other WeakPtr objects will enforce they // are used on the desired thread. -namespace rtc { +namespace webrtc { namespace internal { @@ -92,25 +95,28 @@ class WeakReference { public: // Although Flag is bound to a specific sequence, it may be // deleted from another via base::WeakPtr::~WeakPtr(). - class Flag : public RefCountInterface { + class Flag { public: - Flag(); + Flag() = default; void Invalidate(); bool IsValid() const; private: - friend class RefCountedObject; + friend class FinalRefCountedObject; - ~Flag() override; + ~Flag() = default; - RTC_NO_UNIQUE_ADDRESS ::webrtc::SequenceChecker checker_{ + RTC_NO_UNIQUE_ADDRESS SequenceChecker checker_{ webrtc::SequenceChecker::kDetached}; - bool is_valid_; + bool is_valid_ RTC_GUARDED_BY(checker_) = true; }; + // `RefCountedFlag` is the reference counted (shared), non-virtual, flag type. + using RefCountedFlag = FinalRefCountedObject; + WeakReference(); - explicit WeakReference(const Flag* flag); + explicit WeakReference(const RefCountedFlag* flag); ~WeakReference(); WeakReference(WeakReference&& other); @@ -121,7 +127,7 @@ class WeakReference { bool is_valid() const; private: - scoped_refptr flag_; + scoped_refptr flag_; }; class WeakReferenceOwner { @@ -136,7 +142,7 @@ class WeakReferenceOwner { void Invalidate(); private: - mutable scoped_refptr> flag_; + mutable scoped_refptr flag_; }; // This class simplifies the implementation of WeakPtr's type conversion @@ -271,6 +277,15 @@ class WeakPtrFactory { T* ptr_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::WeakPtr; +using ::webrtc::WeakPtrFactory; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_WEAK_PTR_H_ diff --git a/rtc_base/weak_ptr_unittest.cc b/rtc_base/weak_ptr_unittest.cc index 9e22312c28..3bff8add88 100644 --- a/rtc_base/weak_ptr_unittest.cc +++ b/rtc_base/weak_ptr_unittest.cc @@ -17,7 +17,7 @@ #include "rtc_base/task_queue_for_test.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace { @@ -203,7 +203,7 @@ TEST(WeakPtrTest, HasWeakPtrs) { template std::unique_ptr NewObjectCreatedOnTaskQueue() { std::unique_ptr obj; - webrtc::TaskQueueForTest queue("NewObjectCreatedOnTaskQueue"); + TaskQueueForTest queue("NewObjectCreatedOnTaskQueue"); queue.SendTask([&] { obj = std::make_unique(); }); return obj; } @@ -225,7 +225,7 @@ TEST(WeakPtrTest, WeakPtrInitiateAndUseOnDifferentThreads) { auto target = std::make_unique(); // Create weak ptr on main thread WeakPtr weak_ptr = target->factory.GetWeakPtr(); - webrtc::TaskQueueForTest queue("queue"); + TaskQueueForTest queue("queue"); queue.SendTask([&] { // Dereference and invalide weak_ptr on another thread. EXPECT_EQ(weak_ptr.get(), target.get()); @@ -233,4 +233,4 @@ TEST(WeakPtrTest, WeakPtrInitiateAndUseOnDifferentThreads) { }); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/win/BUILD.gn b/rtc_base/win/BUILD.gn index cf8bc21950..f1959f4952 100644 --- a/rtc_base/win/BUILD.gn +++ b/rtc_base/win/BUILD.gn @@ -62,7 +62,7 @@ rtc_library("windows_version_unittest") { sources = [ "windows_version_unittest.cc" ] deps = [ ":windows_version", - "..:gunit_helpers", "..:logging", + "../../test:test_support", ] } diff --git a/rtc_base/win/windows_version.cc b/rtc_base/win/windows_version.cc index 93af1377be..871be7fc37 100644 --- a/rtc_base/win/windows_version.cc +++ b/rtc_base/win/windows_version.cc @@ -168,7 +168,7 @@ class RegKey { #endif // !defined(WINUWP) -namespace rtc { +namespace webrtc { namespace rtc_win { namespace { @@ -301,7 +301,7 @@ OSInfo::OSInfo() version_number_.major, version_number_.minor, version_number_.build); service_pack_.major = version_info.wServicePackMajor; service_pack_.minor = version_info.wServicePackMinor; - service_pack_str_ = rtc::ToUtf8(version_info.szCSDVersion); + service_pack_str_ = webrtc::ToUtf8(version_info.szCSDVersion); SYSTEM_INFO system_info = {}; ::GetNativeSystemInfo(&system_info); @@ -413,7 +413,7 @@ std::string OSInfo::processor_model_name() { RegKey key(HKEY_LOCAL_MACHINE, kProcessorNameString, KEY_READ); std::wstring value; key.ReadValue(L"ProcessorNameString", &value); - processor_model_name_ = rtc::ToUtf8(value); + processor_model_name_ = webrtc::ToUtf8(value); } return processor_model_name_; #endif // defined(WINUWP) @@ -442,4 +442,4 @@ Version GetVersion() { } } // namespace rtc_win -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/win/windows_version.h b/rtc_base/win/windows_version.h index 8542626afb..cbe7cfd88b 100644 --- a/rtc_base/win/windows_version.h +++ b/rtc_base/win/windows_version.h @@ -17,7 +17,7 @@ typedef void* HANDLE; -namespace rtc { +namespace webrtc { namespace rtc_win { // The running version of Windows. This is declared outside OSInfo for @@ -151,6 +151,6 @@ class OSInfo { Version GetVersion(); } // namespace rtc_win -} // namespace rtc +} // namespace webrtc #endif // RTC_BASE_WIN_WINDOWS_VERSION_H_ diff --git a/rtc_base/win/windows_version_unittest.cc b/rtc_base/win/windows_version_unittest.cc index e1cd920157..1e9047f007 100644 --- a/rtc_base/win/windows_version_unittest.cc +++ b/rtc_base/win/windows_version_unittest.cc @@ -10,10 +10,12 @@ #include "rtc_base/win/windows_version.h" -#include "rtc_base/gunit.h" +#include + #include "rtc_base/logging.h" +#include "test/gtest.h" -namespace rtc { +namespace webrtc { namespace rtc_win { namespace { @@ -44,4 +46,4 @@ TEST(WindowsVersion, ProcessorModelName) { } // namespace } // namespace rtc_win -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/win32.cc b/rtc_base/win32.cc index 9ce0523413..fce3e07650 100644 --- a/rtc_base/win32.cc +++ b/rtc_base/win32.cc @@ -20,7 +20,7 @@ #include "rtc_base/checks.h" #include "rtc_base/string_utils.h" -namespace rtc { +namespace webrtc { // Helper function declarations for inet_ntop/inet_pton. static const char* inet_ntop_v4(const void* src, char* dst, socklen_t size); @@ -137,7 +137,7 @@ const char* inet_ntop_v6(const void* src, char* dst, socklen_t size) { for (int i = 0; i < run_array_size; ++i) { if (runpos[i] == -1) { cursor += snprintf(cursor, INET6_ADDRSTRLEN - (cursor - dst), "%x", - NetworkToHost16(as_shorts[i])); + webrtc::NetworkToHost16(as_shorts[i])); if (i != 7 && runpos[i + 1] != 1) { *cursor++ = ':'; } @@ -292,7 +292,7 @@ int inet_pton_v6(const char* src, void* dst) { if (sscanf(readcursor, "%4hx%n", &word, &bytesread) != 1) { return 0; } else { - *addr_cursor = HostToNetwork16(word); + *addr_cursor = webrtc::HostToNetwork16(word); ++addr_cursor; readcursor += bytesread; if (*readcursor != ':' && *readcursor != '\0') { @@ -310,4 +310,4 @@ int inet_pton_v6(const char* src, void* dst) { return 1; } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/win32.h b/rtc_base/win32.h index 6e8d2873aa..19bc067a5d 100644 --- a/rtc_base/win32.h +++ b/rtc_base/win32.h @@ -38,11 +38,20 @@ typedef struct _TOKEN_MANDATORY_LABEL { #undef SetPort -namespace rtc { +namespace webrtc { const char* win32_inet_ntop(int af, const void* src, char* dst, socklen_t size); int win32_inet_pton(int af, const char* src, void* dst); +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::win32_inet_ntop; +using ::webrtc::win32_inet_pton; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_WIN32_H_ diff --git a/rtc_base/win32_socket_init.h b/rtc_base/win32_socket_init.h index 4a90e142b6..6499cd82b1 100644 --- a/rtc_base/win32_socket_init.h +++ b/rtc_base/win32_socket_init.h @@ -17,7 +17,7 @@ #include "rtc_base/win32.h" -namespace rtc { +namespace webrtc { class WinsockInitializer { public: @@ -36,6 +36,6 @@ class WinsockInitializer { int err_; }; -} // namespace rtc +} // namespace webrtc #endif // RTC_BASE_WIN32_SOCKET_INIT_H_ diff --git a/rtc_base/win32_unittest.cc b/rtc_base/win32_unittest.cc index b2955e500a..36e20c2a35 100644 --- a/rtc_base/win32_unittest.cc +++ b/rtc_base/win32_unittest.cc @@ -19,7 +19,7 @@ #error Only for Windows #endif -namespace rtc { +namespace webrtc { class Win32Test : public ::testing::Test { public: @@ -86,4 +86,4 @@ TEST_F(Win32Test, InvalidIPv6AddressParsing) { EXPECT_FALSE(IPFromString("1:2:3:4:5:6:7", &ipv6)); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/zero_memory.cc b/rtc_base/zero_memory.cc index b9c5b380ac..2727ed9e7b 100644 --- a/rtc_base/zero_memory.cc +++ b/rtc_base/zero_memory.cc @@ -17,7 +17,7 @@ #include "rtc_base/checks.h" #include "rtc_base/zero_memory.h" -namespace rtc { +namespace webrtc { // Code and comment taken from "OPENSSL_cleanse" of BoringSSL. void ExplicitZeroMemory(void* ptr, size_t len) { @@ -35,4 +35,4 @@ void ExplicitZeroMemory(void* ptr, size_t len) { #endif // !WEBRTC_WIN } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_base/zero_memory.h b/rtc_base/zero_memory.h index b92f52f9b6..1babf3708e 100644 --- a/rtc_base/zero_memory.h +++ b/rtc_base/zero_memory.h @@ -17,7 +17,7 @@ #include "api/array_view.h" -namespace rtc { +namespace webrtc { // Fill memory with zeros in a way that the compiler doesn't optimize it away // even if the pointer is not used afterwards. @@ -26,10 +26,18 @@ void ExplicitZeroMemory(void* ptr, size_t len); template ::value && std::is_trivial::value>::type* = nullptr> -void ExplicitZeroMemory(rtc::ArrayView a) { +void ExplicitZeroMemory(ArrayView a) { ExplicitZeroMemory(a.data(), a.size()); } +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace rtc { +using ::webrtc::ExplicitZeroMemory; } // namespace rtc +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // RTC_BASE_ZERO_MEMORY_H_ diff --git a/rtc_base/zero_memory_unittest.cc b/rtc_base/zero_memory_unittest.cc index 74d95f49e9..b75e3795ad 100644 --- a/rtc_base/zero_memory_unittest.cc +++ b/rtc_base/zero_memory_unittest.cc @@ -15,7 +15,7 @@ #include "api/array_view.h" #include "test/gtest.h" -namespace rtc { +namespace webrtc { TEST(ZeroMemoryTest, TestZeroMemory) { static const size_t kBufferSize = 32; @@ -35,7 +35,7 @@ TEST(ZeroMemoryTest, TestZeroArrayView) { for (size_t i = 0; i < kBufferSize; i++) { buffer[i] = static_cast(i + 1); } - ExplicitZeroMemory(rtc::ArrayView(buffer, sizeof(buffer))); + ExplicitZeroMemory(ArrayView(buffer, sizeof(buffer))); for (size_t i = 0; i < kBufferSize; i++) { EXPECT_EQ(buffer[i], 0); } @@ -50,4 +50,4 @@ TEST(ZeroMemoryTest, TestZeroMemoryUnused) { ExplicitZeroMemory(buffer, sizeof(buffer)); } -} // namespace rtc +} // namespace webrtc diff --git a/rtc_tools/BUILD.gn b/rtc_tools/BUILD.gn index 5ead8a0506..bac6167a60 100644 --- a/rtc_tools/BUILD.gn +++ b/rtc_tools/BUILD.gn @@ -54,6 +54,7 @@ rtc_library("video_file_reader") { ] deps = [ "../api:make_ref_counted", + "../api:ref_count", "../api:scoped_refptr", "../api/video:video_frame", "../api/video:video_rtp_headers", @@ -61,10 +62,7 @@ rtc_library("video_file_reader") { "../rtc_base:logging", "../rtc_base:refcount", "../rtc_base:stringutils", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -79,10 +77,7 @@ rtc_library("video_file_writer") { "../api/video:video_frame", "../api/video:video_rtp_headers", "../rtc_base:logging", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -115,7 +110,6 @@ rtc_library("video_quality_analysis") { "../rtc_base:logging", "//third_party/libyuv", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } # TODO(bugs.webrtc.org/11474): Enable this on win if needed. For now it @@ -136,12 +130,6 @@ if (!is_component_build) { # is_component_build=true because it depends on WebRTC testonly code # which is not part of //third_party/webrtc_overrides:webrtc_component. - # Abseil dependencies are not moved to the absl_deps field deliberately. - # If build_with_chromium is true, the absl_deps replaces the dependencies with - # the "//third_party/abseil-cpp:absl" target. Which doesn't include absl/flags - # (and some others) because they cannot be used in Chromiums. Special exception - # for the "frame_analyzer" target in "third_party/abseil-cpp/absl.gni" allows - # it to be build in chromium. rtc_executable("frame_analyzer") { visibility = [ "*" ] testonly = true @@ -174,12 +162,6 @@ if (!is_component_build) { # is_component_build=true because it depends on WebRTC testonly code # which is not part of //third_party/webrtc_overrides:webrtc_component. - # Abseil dependencies are not moved to the absl_deps field deliberately. - # If build_with_chromium is true, the absl_deps replaces the dependencies with - # the "//third_party/abseil-cpp:absl" target. Which doesn't include absl/flags - # (and some others) because they cannot be used in Chromiums. Special exception - # for the "rtp_generator" target in "third_party/abseil-cpp/absl.gni" allows - # it to be build in chromium. rtc_executable("rtp_generator") { visibility = [ "*" ] testonly = true @@ -200,12 +182,12 @@ if (!is_component_build) { "..//api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter", "..//api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter", "..//api/video_codecs:video_encoder_factory_template_open_h264_adapter", + "..//test/network:simulated_network", "../api:create_frame_generator", "../api:rtp_parameters", "../api:transport_api", - "../api/rtc_event_log", - "../api/task_queue:default_task_queue_factory", - "../api/task_queue:task_queue", + "../api/environment", + "../api/environment:environment_factory", "../api/video:builtin_video_bitrate_allocator_factory", "../api/video_codecs:video_codecs_api", "../call", @@ -213,12 +195,10 @@ if (!is_component_build) { "../call:fake_network", "../call:rtp_interfaces", "../call:rtp_sender", - "../call:simulated_network", "../call:simulated_packet_receiver", - "../call:video_stream_api", + "../call:video_send_stream_api", "../media:media_constants", "../media:rtc_audio_video", - "../media:rtc_media_base", "../rtc_base:rtc_json", "../rtc_base:threading", "../rtc_base/system:file_wrapper", @@ -243,36 +223,40 @@ if (!is_component_build) { # is_component_build=true because it depends on WebRTC testonly code # which is not part of //third_party/webrtc_overrides:webrtc_component. - # Abseil dependencies are not moved to the absl_deps field deliberately. - # If build_with_chromium is true, the absl_deps replaces the dependencies with - # the "//third_party/abseil-cpp:absl" target. Which doesn't include absl/flags - # (and some others) because they cannot be used in Chromiums. Special exception - # for the "video_replay" target in "third_party/abseil-cpp/absl.gni" allows - # it to be build in chromium. rtc_executable("video_replay") { visibility = [ "*" ] testonly = true sources = [ "video_replay.cc" ] deps = [ "../api:field_trials", + "../api:field_trials_view", "../api:rtp_parameters", - "../api/rtc_event_log", - "../api/task_queue:default_task_queue_factory", + "../api/environment", + "../api/environment:environment_factory", + "../api/task_queue", "../api/test/video:function_video_factory", "../api/transport:field_trial_based_config", + "../api/units:time_delta", "../api/units:timestamp", + "../api/video:encoded_image", "../api/video:video_frame", "../api/video_codecs:video_codecs_api", "../call", "../call:call_interfaces", + "../call:video_receive_stream_api", "../common_video", "../media:rtc_internal_video_codecs", "../modules/rtp_rtcp:rtp_rtcp_format", + "../modules/video_coding:video_codec_interface", "../modules/video_coding:video_coding_utility", "../rtc_base:checks", + "../rtc_base:copy_on_write_buffer", + "../rtc_base:logging", + "../rtc_base:rtc_event", "../rtc_base:rtc_json", "../rtc_base:stringutils", - "../rtc_base:timeutils", + "../rtc_base:threading", + "../rtc_base/system:file_wrapper", "../system_wrappers", "../test:call_config_utils", "../test:encoder_settings", @@ -285,12 +269,14 @@ if (!is_component_build) { "../test:test_renderer", "../test:test_support", "../test:test_video_capturer", + "../test:video_frame_writer", "../test:video_test_common", "../test:video_test_constants", - "../test:video_test_support", "../test/time_controller:time_controller", "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", + "//third_party/abseil-cpp/absl/strings:str_format", + "//third_party/abseil-cpp/absl/strings:strings", ] if (build_with_chromium) { # When building from Chromium, WebRTC's metrics and field trial @@ -359,6 +345,7 @@ if (!build_with_chromium) { rtc_library("event_log_visualizer_utils") { visibility = [ "*" ] + allow_poison = [ "environment_construction" ] sources = [ "rtc_event_log_visualizer/alerts.cc", "rtc_event_log_visualizer/alerts.h", @@ -372,35 +359,44 @@ if (!build_with_chromium) { "rtc_event_log_visualizer/log_simulation.h", "rtc_event_log_visualizer/plot_base.cc", "rtc_event_log_visualizer/plot_base.h", - "rtc_event_log_visualizer/plot_protobuf.cc", - "rtc_event_log_visualizer/plot_protobuf.h", - "rtc_event_log_visualizer/plot_python.cc", - "rtc_event_log_visualizer/plot_python.h", ] deps = [ ":chart_proto", + "../api:candidate", + "../api:dtls_transport_interface", "../api:function_view", - "../api:network_state_predictor_api", - "../modules/audio_coding:neteq_input_audio_tools", - "../modules/audio_coding:neteq_tools_minimal", - "../rtc_base:ignore_wundef", - "../rtc_base:logging", - "../rtc_base:macromagic", - "../rtc_base:rate_statistics", - "../rtc_base:refcount", - - # TODO(kwiberg): Remove this dependency. - "../api/audio_codecs:audio_codecs_api", + "../api:make_ref_counted", + "../api:rtp_headers", + "../api:rtp_parameters", + "../api:scoped_refptr", + "../api/audio_codecs:audio_codecs_api", # TODO(kwiberg): Remove this + # dependency. + "../api/environment", + "../api/environment:environment_factory", + "../api/neteq:neteq_api", + "../api/rtc_event_log:rtc_event_log", + "../api/transport:bandwidth_usage", + "../api/transport:ecn_marking", "../api/transport:field_trial_based_config", "../api/transport:goog_cc", "../api/transport:network_control", + "../api/units:data_rate", + "../api/units:time_delta", + "../api/units:timestamp", "../call:call_interfaces", - "../call:video_stream_api", + "../logging:ice_log", + "../logging:rtc_event_audio", + "../logging:rtc_event_audio", + "../logging:rtc_event_bwe", + "../logging:rtc_event_generic_packet_events", "../logging:rtc_event_log_parser", + "../logging:rtc_event_rtp_rtcp", "../logging:rtc_stream_config", "../modules/audio_coding:ana_debug_dump_proto", "../modules/audio_coding:audio_network_adaptor", + "../modules/audio_coding:neteq_input_audio_tools", "../modules/audio_coding:neteq_tools", + "../modules/audio_coding:neteq_tools_minimal", "../modules/congestion_controller", "../modules/congestion_controller/goog_cc:delay_based_bwe", "../modules/congestion_controller/goog_cc:estimators", @@ -408,19 +404,55 @@ if (!build_with_chromium) { "../modules/pacing", "../modules/remote_bitrate_estimator", "../modules/rtp_rtcp", + "../modules/rtp_rtcp:ntp_time_util", "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base:rate_statistics", + "../rtc_base:refcount", "../rtc_base:rtc_numerics", "../rtc_base:stringutils", + "../rtc_base/network:sent_packet", "../system_wrappers", + "../system_wrappers:field_trial", "../test:explicit_key_value_config", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/functional:bind_front", + "//third_party/abseil-cpp/absl/strings:string_view", + ] + } + + rtc_library("event_log_visualizer_bindings") { + visibility = [ "*" ] + allow_poison = [ "environment_construction" ] + sources = [ + "rtc_event_log_visualizer/analyzer_bindings.cc", + "rtc_event_log_visualizer/analyzer_bindings.h", + ] + deps = [ + ":chart_proto", + ":event_log_visualizer_utils", + "//api/units:time_delta", + "//logging:rtc_event_log_parser", + "//rtc_base:protobuf_utils", + "//rtc_base:safe_conversions", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", + ] + } + + rtc_library("event_log_visualizer_bindings_unittest") { + testonly = true + sources = [ "rtc_event_log_visualizer/analyzer_bindings_unittest.cc" ] + deps = [ + ":chart_proto", + ":event_log_visualizer_bindings", + "//rtc_base:protobuf_utils", + "//rtc_base/system:file_wrapper", + "//test:fileutils", + "//test:test_support", ] } } @@ -434,11 +466,24 @@ if (!build_with_chromium) { "video_encoder/video_encoder.cc", ] deps = [ + "../api/video:encoded_image", + "../api/video:video_bitrate_allocation", + "../api/video:video_bitrate_allocator", + "../api/video:video_frame", + "../api/video:video_frame_type", + "../api/video_codecs:scalability_mode", + "../rtc_base:checks", + "../rtc_base:stringutils", "//api:create_frame_generator", "//api:frame_generator_api", + "//api/environment", + "//api/environment:environment_factory", "//api/video:builtin_video_bitrate_allocator_factory", + "//api/video_codecs:builtin_video_decoder_factory", "//api/video_codecs:builtin_video_encoder_factory", "//api/video_codecs:video_codecs_api", + "//common_video:common_video", + "//media:media_constants", "//modules/video_coding:video_codec_interface", "//modules/video_coding:video_coding_utility", "//modules/video_coding/codecs/av1:av1_svc_config", @@ -448,6 +493,7 @@ if (!build_with_chromium) { "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", "//third_party/abseil-cpp/absl/flags:usage", + "//third_party/abseil-cpp/absl/strings:strings", ] } } @@ -464,9 +510,11 @@ if (rtc_include_tests) { "rtc_event_log_visualizer/main.cc", ] deps = [ + ":chart_proto", ":event_log_visualizer_utils", "../api/neteq:neteq_api", "../api/rtc_event_log", + "../api/units:time_delta", "../logging:rtc_event_log_parser", "../modules/audio_coding:neteq", "../modules/rtp_rtcp:rtp_rtcp_format", @@ -491,7 +539,11 @@ if (rtc_include_tests) { "rtc_event_log_to_text/main.cc", ] deps = [ + "../api:candidate", + "../api:rtp_parameters", "../api/rtc_event_log", + "../api/transport:bandwidth_usage", + "../api/video:video_frame", "../logging:ice_log", "../logging:rtc_event_audio", "../logging:rtc_event_begin_end", @@ -521,6 +573,7 @@ if (rtc_include_tests) { "../resources/foreman_128x96.yuv", "../resources/foreman_cif.yuv", "../resources/reference_less_video_test_file.y4m", + "../resources/rtc_event_log/rtc_event_log_500kbps.binarypb", ] if (is_ios) { @@ -572,12 +625,14 @@ if (rtc_include_tests) { } if (rtc_enable_protobuf) { - deps += [ "network_tester:network_tester_unittests" ] + deps += [ + ":event_log_visualizer_bindings_unittest", + "network_tester:network_tester_unittests", + ] } data = tools_unittests_resources if (is_android) { - deps += [ "//testing/android/native_test:native_test_support" ] shard_timeout = 900 } if (is_ios) { @@ -591,8 +646,8 @@ if (rtc_include_tests) { sources = [ "audioproc_f/audioproc_float_main.cc" ] deps = [ "../api:audioproc_f_api", + "../api/audio:audio_processing", "../modules/audio_processing", - "../modules/audio_processing:api", ] } @@ -608,7 +663,6 @@ if (rtc_include_tests) { "../modules/audio_processing:audioproc_debug_proto", "../modules/audio_processing:audioproc_protobuf_utils", "../rtc_base:checks", - "../rtc_base:ignore_wundef", "../rtc_base:macromagic", "../rtc_base:protobuf_utils", "../rtc_base:stringutils", diff --git a/rtc_tools/DEPS b/rtc_tools/DEPS index f62653d3ae..49c1d01f1c 100644 --- a/rtc_tools/DEPS +++ b/rtc_tools/DEPS @@ -31,11 +31,15 @@ specific_include_rules = { "+modules/video_coding/codecs/h264/include/h264.h", ], ".*video_replay\.cc": [ + "+absl/strings/str_format.h", + "+absl/strings/str_split.h", + "+modules/video_coding/include/video_error_codes.h", "+modules/video_coding/utility/ivf_file_writer.h", ], ".*video_encoder\.cc": [ "+modules/video_coding/codecs/av1/av1_svc_config.h", "+modules/video_coding/include/video_codec_interface.h", + "+modules/video_coding/include/video_error_codes.h", "+modules/video_coding/svc/scalability_mode_util.h", ], ".*encoded_image_file_writer\.(cc|h)": [ diff --git a/rtc_tools/OWNERS b/rtc_tools/OWNERS index c2f49200e7..a98cd6c523 100644 --- a/rtc_tools/OWNERS +++ b/rtc_tools/OWNERS @@ -2,3 +2,6 @@ mbonadei@webrtc.org # For video analysis tools magjed@webrtc.org + +# For RTC event log tools +terelius@webrtc.org diff --git a/rtc_tools/audioproc_f/audioproc_float_main.cc b/rtc_tools/audioproc_f/audioproc_float_main.cc index 3cc6d4323b..aa3e0ef485 100644 --- a/rtc_tools/audioproc_f/audioproc_float_main.cc +++ b/rtc_tools/audioproc_f/audioproc_float_main.cc @@ -8,12 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include - #include "api/test/audioproc_float.h" -#include "modules/audio_processing/include/audio_processing.h" int main(int argc, char* argv[]) { - return webrtc::test::AudioprocFloat( - std::make_unique(), argc, argv); + return webrtc::test::AudioprocFloat(argc, argv); } diff --git a/rtc_tools/data_channel_benchmark/BUILD.gn b/rtc_tools/data_channel_benchmark/BUILD.gn index abe1b4a0e3..a8804a8dda 100644 --- a/rtc_tools/data_channel_benchmark/BUILD.gn +++ b/rtc_tools/data_channel_benchmark/BUILD.gn @@ -46,8 +46,13 @@ rtc_executable("data_channel_benchmark") { "../../api:libjingle_peerconnection_api", "../../api:rtc_error", "../../api:scoped_refptr", + "../../api/audio:audio_device", + "../../api/audio:audio_mixer_api", + "../../api/audio:audio_processing", "../../api/audio_codecs:builtin_audio_decoder_factory", "../../api/audio_codecs:builtin_audio_encoder_factory", + "../../api/video_codecs:video_codecs_api", + "../../api/video_codecs:video_codecs_api", "../../api/video_codecs:video_decoder_factory_template", "../../api/video_codecs:video_decoder_factory_template_dav1d_adapter", "../../api/video_codecs:video_decoder_factory_template_libvpx_vp8_adapter", @@ -58,14 +63,20 @@ rtc_executable("data_channel_benchmark") { "../../api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter", "../../api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter", "../../api/video_codecs:video_encoder_factory_template_open_h264_adapter", + "../../rtc_base:checks", + "../../rtc_base:copy_on_write_buffer", "../../rtc_base:logging", "../../rtc_base:refcount", "../../rtc_base:rtc_event", - "../../rtc_base:ssl", + "../../rtc_base:ssl_adapter", + "../../rtc_base:stringutils", "../../rtc_base:threading", + "../../system_wrappers", "../../system_wrappers:field_trial", "//third_party/abseil-cpp/absl/cleanup:cleanup", "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", + "//third_party/abseil-cpp/absl/strings:string_view", + "//third_party/abseil-cpp/absl/time", ] } diff --git a/rtc_tools/data_channel_benchmark/data_channel_benchmark.cc b/rtc_tools/data_channel_benchmark/data_channel_benchmark.cc index fa0b6ca9c4..7c5d05e336 100644 --- a/rtc_tools/data_channel_benchmark/data_channel_benchmark.cc +++ b/rtc_tools/data_channel_benchmark/data_channel_benchmark.cc @@ -18,18 +18,36 @@ * transport. No TURN server is configured, so both peers need to be reachable * using STUN only. */ -#include +#include #include +#include +#include +#include +#include +#include +#include #include "absl/cleanup/cleanup.h" #include "absl/flags/flag.h" #include "absl/flags/parse.h" +#include "absl/strings/string_view.h" +#include "api/data_channel_interface.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/event.h" +#include "rtc_base/logging.h" #include "rtc_base/ssl_adapter.h" +#include "rtc_base/string_encode.h" +#include "rtc_base/strings/string_builder.h" #include "rtc_base/thread.h" #include "rtc_tools/data_channel_benchmark/grpc_signaling.h" #include "rtc_tools/data_channel_benchmark/peer_connection_client.h" +#include "rtc_tools/data_channel_benchmark/signaling_interface.h" +#include "system_wrappers/include/clock.h" #include "system_wrappers/include/field_trial.h" ABSL_FLAG(int, verbose, 0, "verbosity level (0-5)"); @@ -52,7 +70,7 @@ struct SetupMessage { std::string ToString() { char buffer[64]; - rtc::SimpleStringBuilder sb(buffer); + webrtc::SimpleStringBuilder sb(buffer); sb << packet_size << "," << transfer_size; return sb.str(); @@ -60,7 +78,7 @@ struct SetupMessage { static SetupMessage FromString(absl::string_view sv) { SetupMessage result; - auto parameters = rtc::split(sv, ','); + auto parameters = webrtc::split(sv, ','); std::from_chars(parameters[0].data(), parameters[0].data() + parameters[0].size(), result.packet_size, 10); @@ -74,7 +92,7 @@ struct SetupMessage { class DataChannelServerObserverImpl : public webrtc::DataChannelObserver { public: explicit DataChannelServerObserverImpl(webrtc::DataChannelInterface* dc, - rtc::Thread* signaling_thread) + webrtc::Thread* signaling_thread) : dc_(dc), signaling_thread_(signaling_thread) {} void OnStateChange() override { @@ -113,17 +131,19 @@ class DataChannelServerObserverImpl : public webrtc::DataChannelObserver { bool IsOkToCallOnTheNetworkThread() override { return true; } - bool WaitForClosedState() { return closed_event_.Wait(rtc::Event::kForever); } + bool WaitForClosedState() { + return closed_event_.Wait(webrtc::Event::kForever); + } bool WaitForSetupMessage() { - return setup_message_event_.Wait(rtc::Event::kForever); + return setup_message_event_.Wait(webrtc::Event::kForever); } void StartSending() { RTC_CHECK(remaining_data_) << "Error: no data to send"; std::string data(std::min(setup_.packet_size, remaining_data_), '0'); webrtc::DataBuffer* data_buffer = - new webrtc::DataBuffer(rtc::CopyOnWriteBuffer(data), true); + new webrtc::DataBuffer(webrtc::CopyOnWriteBuffer(data), true); total_queued_up_ = data_buffer->size(); dc_->SendAsync(*data_buffer, [this, data_buffer = data_buffer](webrtc::RTCError err) { @@ -168,9 +188,9 @@ class DataChannelServerObserverImpl : public webrtc::DataChannelObserver { } webrtc::DataChannelInterface* const dc_; - rtc::Thread* const signaling_thread_; - rtc::Event closed_event_; - rtc::Event setup_message_event_; + webrtc::Thread* const signaling_thread_; + webrtc::Event closed_event_; + webrtc::Event setup_message_event_; size_t remaining_data_ = 0u; size_t total_queued_up_ = 0u; struct SetupMessage setup_; @@ -206,17 +226,17 @@ class DataChannelClientObserverImpl : public webrtc::DataChannelObserver { void OnBufferedAmountChange(uint64_t sent_data_size) override {} bool IsOkToCallOnTheNetworkThread() override { return true; } - bool WaitForOpenState() { return open_event_.Wait(rtc::Event::kForever); } + bool WaitForOpenState() { return open_event_.Wait(webrtc::Event::kForever); } // Wait until the received byte count reaches the desired value. bool WaitForBytesReceivedThreshold() { - return bytes_received_event_.Wait(rtc::Event::kForever); + return bytes_received_event_.Wait(webrtc::Event::kForever); } private: webrtc::DataChannelInterface* const dc_; - rtc::Event open_event_; - rtc::Event bytes_received_event_; + webrtc::Event open_event_; + webrtc::Event bytes_received_event_; const uint64_t bytes_received_threshold_; uint64_t bytes_received_ = 0u; }; @@ -225,15 +245,16 @@ int RunServer() { bool oneshot = absl::GetFlag(FLAGS_oneshot); uint16_t port = absl::GetFlag(FLAGS_port); - auto signaling_thread = rtc::Thread::Create(); + auto signaling_thread = webrtc::Thread::Create(); signaling_thread->Start(); { auto factory = webrtc::PeerConnectionClient::CreateDefaultFactory( signaling_thread.get()); auto grpc_server = webrtc::GrpcSignalingServerInterface::Create( - [factory = rtc::scoped_refptr( - factory), + [factory = + webrtc::scoped_refptr( + factory), signaling_thread = signaling_thread.get()](webrtc::SignalingInterface* signaling) { webrtc::PeerConnectionClient client(factory.get(), signaling); @@ -295,7 +316,7 @@ int RunClient() { size_t transfer_size = absl::GetFlag(FLAGS_transfer_size) * 1024 * 1024; size_t packet_size = absl::GetFlag(FLAGS_packet_size); - auto signaling_thread = rtc::Thread::Create(); + auto signaling_thread = webrtc::Thread::Create(); signaling_thread->Start(); { auto factory = webrtc::PeerConnectionClient::CreateDefaultFactory( @@ -308,10 +329,10 @@ int RunClient() { std::unique_ptr observer; // Set up the callback to receive the data channel from the sender. - rtc::scoped_refptr data_channel; - rtc::Event got_data_channel; + webrtc::scoped_refptr data_channel; + webrtc::Event got_data_channel; client.SetOnDataChannel( - [&](rtc::scoped_refptr channel) { + [&](webrtc::scoped_refptr channel) { data_channel = std::move(channel); // DataChannel needs an observer to drain the read queue. observer = std::make_unique( @@ -327,7 +348,7 @@ int RunClient() { } // Wait for the data channel to be received - got_data_channel.Wait(rtc::Event::kForever); + got_data_channel.Wait(webrtc::Event::kForever); absl::Cleanup unregister_observer( [data_channel] { data_channel->UnregisterObserver(); }); @@ -358,15 +379,15 @@ int RunClient() { } int main(int argc, char** argv) { - rtc::InitializeSSL(); + webrtc::InitializeSSL(); absl::ParseCommandLine(argc, argv); // Make sure that higher severity number means more logs by reversing the - // rtc::LoggingSeverity values. + // webrtc::LoggingSeverity values. auto logging_severity = - std::max(0, rtc::LS_NONE - absl::GetFlag(FLAGS_verbose)); - rtc::LogMessage::LogToDebug( - static_cast(logging_severity)); + std::max(0, webrtc::LS_NONE - absl::GetFlag(FLAGS_verbose)); + webrtc::LogMessage::LogToDebug( + static_cast(logging_severity)); bool is_server = absl::GetFlag(FLAGS_server); std::string field_trials = absl::GetFlag(FLAGS_force_fieldtrials); diff --git a/rtc_tools/data_channel_benchmark/grpc_signaling.cc b/rtc_tools/data_channel_benchmark/grpc_signaling.cc index 8db717fc71..33fb60028c 100644 --- a/rtc_tools/data_channel_benchmark/grpc_signaling.cc +++ b/rtc_tools/data_channel_benchmark/grpc_signaling.cc @@ -175,14 +175,14 @@ class GrpcNegotiationServer : public GrpcSignalingServerInterface, if (oneshot_) { // Request the termination of the server early so we don't serve another // client in parallel. - server_stop_thread_ = rtc::Thread::Create(); + server_stop_thread_ = Thread::Create(); server_stop_thread_->Start(); server_stop_thread_->PostTask([this] { Stop(); }); } ServerSessionData session(stream); - auto reading_thread = rtc::Thread::Create(); + auto reading_thread = Thread::Create(); reading_thread->Start(); reading_thread->PostTask([&session, &stream] { ProcessMessages(stream, &session); @@ -202,7 +202,7 @@ class GrpcNegotiationServer : public GrpcSignalingServerInterface, bool oneshot_; std::unique_ptr server_; - std::unique_ptr server_stop_thread_; + std::unique_ptr server_stop_thread_; }; class GrpcNegotiationClient : public GrpcSignalingClientInterface { @@ -227,7 +227,7 @@ class GrpcNegotiationClient : public GrpcSignalingClientInterface { stream_ = stub_->Connect(&context_); session_.SetStream(stream_.get()); - reading_thread_ = rtc::Thread::Create(); + reading_thread_ = Thread::Create(); reading_thread_->Start(); reading_thread_->PostTask([this] { ProcessMessages(stream_.get(), &session_); @@ -241,7 +241,7 @@ class GrpcNegotiationClient : public GrpcSignalingClientInterface { private: std::shared_ptr channel_; std::unique_ptr stub_; - std::unique_ptr reading_thread_; + std::unique_ptr reading_thread_; grpc::ClientContext context_; std::unique_ptr< ::grpc::ClientReaderWriter> diff --git a/rtc_tools/data_channel_benchmark/peer_connection_client.cc b/rtc_tools/data_channel_benchmark/peer_connection_client.cc index a6f8bf558c..74175bd7da 100644 --- a/rtc_tools/data_channel_benchmark/peer_connection_client.cc +++ b/rtc_tools/data_channel_benchmark/peer_connection_client.cc @@ -13,6 +13,9 @@ #include #include +#include "api/audio/audio_device.h" +#include "api/audio/audio_mixer.h" +#include "api/audio/audio_processing.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/create_peerconnection_factory.h" @@ -20,18 +23,23 @@ #include "api/peer_connection_interface.h" #include "api/rtc_error.h" #include "api/scoped_refptr.h" +#include "api/set_local_description_observer_interface.h" #include "api/set_remote_description_observer_interface.h" +#include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h" +#include "api/video_codecs/video_encoder_factory.h" #include "api/video_codecs/video_encoder_factory_template.h" #include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h" #include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/ref_counted_object.h" #include "rtc_base/thread.h" namespace { @@ -42,10 +50,10 @@ class SetLocalDescriptionObserverAdapter : public webrtc::SetLocalDescriptionObserverInterface { public: using Callback = std::function; - static rtc::scoped_refptr Create( + static webrtc::scoped_refptr Create( Callback callback) { - return rtc::scoped_refptr( - new rtc::RefCountedObject( + return webrtc::scoped_refptr( + new webrtc::RefCountedObject( std::move(callback))); } @@ -65,10 +73,10 @@ class SetRemoteDescriptionObserverAdapter : public webrtc::SetRemoteDescriptionObserverInterface { public: using Callback = std::function; - static rtc::scoped_refptr Create( + static webrtc::scoped_refptr Create( Callback callback) { - return rtc::scoped_refptr( - new rtc::RefCountedObject( + return webrtc::scoped_refptr( + new webrtc::RefCountedObject( std::move(callback))); } @@ -90,11 +98,11 @@ class CreateSessionDescriptionObserverAdapter using Success = std::function; using Failure = std::function; - static rtc::scoped_refptr Create( + static webrtc::scoped_refptr Create( Success success, Failure failure) { - return rtc::scoped_refptr( - new rtc::RefCountedObject( + return webrtc::scoped_refptr( + new webrtc::RefCountedObject( std::move(success), std::move(failure))); } @@ -138,8 +146,8 @@ PeerConnectionClient::~PeerConnectionClient() { Disconnect(); } -rtc::scoped_refptr -PeerConnectionClient::CreateDefaultFactory(rtc::Thread* signaling_thread) { +scoped_refptr +PeerConnectionClient::CreateDefaultFactory(Thread* signaling_thread) { auto factory = webrtc::CreatePeerConnectionFactory( /*network_thread=*/nullptr, /*worker_thread=*/nullptr, /*signaling_thread*/ signaling_thread, @@ -250,7 +258,7 @@ void PeerConnectionClient::OnIceCandidate( } void PeerConnectionClient::OnDataChannel( - rtc::scoped_refptr channel) { + scoped_refptr channel) { RTC_LOG(LS_INFO) << __FUNCTION__ << " remote datachannel created"; if (on_data_channel_callback_) on_data_channel_callback_(channel); @@ -258,7 +266,7 @@ void PeerConnectionClient::OnDataChannel( } void PeerConnectionClient::SetOnDataChannel( - std::function)> + std::function)> callback) { on_data_channel_callback_ = callback; } diff --git a/rtc_tools/data_channel_benchmark/peer_connection_client.h b/rtc_tools/data_channel_benchmark/peer_connection_client.h index 62b205c2ed..f8f42e6037 100644 --- a/rtc_tools/data_channel_benchmark/peer_connection_client.h +++ b/rtc_tools/data_channel_benchmark/peer_connection_client.h @@ -12,10 +12,12 @@ #include +#include #include #include #include +#include "api/data_channel_interface.h" #include "api/jsep.h" #include "api/peer_connection_interface.h" #include "api/rtp_receiver_interface.h" @@ -49,23 +51,22 @@ class PeerConnectionClient : public webrtc::PeerConnectionObserver { // Disconnect from the call. void Disconnect(); - rtc::scoped_refptr peerConnection() { + scoped_refptr peerConnection() { return peer_connection_; } // Set a callback to run when a DataChannel is created by the remote peer. void SetOnDataChannel( - std::function)> + std::function)> callback); - std::vector>& - dataChannels() { + std::vector>& dataChannels() { return data_channels_; } // Creates a default PeerConnectionFactory object. - static rtc::scoped_refptr - CreateDefaultFactory(rtc::Thread* signaling_thread); + static scoped_refptr + CreateDefaultFactory(Thread* signaling_thread); private: void AddIceCandidate( @@ -84,7 +85,7 @@ class PeerConnectionClient : public webrtc::PeerConnectionObserver { RTC_LOG(LS_INFO) << __FUNCTION__ << " new state: " << new_state; } void OnDataChannel( - rtc::scoped_refptr channel) override; + scoped_refptr channel) override; void OnNegotiationNeededEvent(uint32_t event_id) override; void OnIceConnectionChange( webrtc::PeerConnectionInterface::IceConnectionState new_state) override; @@ -95,10 +96,10 @@ class PeerConnectionClient : public webrtc::PeerConnectionObserver { RTC_LOG(LS_INFO) << __FUNCTION__ << " receiving? " << receiving; } - rtc::scoped_refptr peer_connection_; - std::function)> + scoped_refptr peer_connection_; + std::function)> on_data_channel_callback_; - std::vector> data_channels_; + std::vector> data_channels_; webrtc::SignalingInterface* signaling_; }; diff --git a/rtc_tools/frame_analyzer/frame_analyzer.cc b/rtc_tools/frame_analyzer/frame_analyzer.cc index 501a6142a8..ca59f6c66c 100644 --- a/rtc_tools/frame_analyzer/frame_analyzer.cc +++ b/rtc_tools/frame_analyzer/frame_analyzer.cc @@ -110,9 +110,9 @@ int main(int argc, char* argv[]) { webrtc::test::ResultsContainer results; - rtc::scoped_refptr reference_video = + webrtc::scoped_refptr reference_video = webrtc::test::OpenYuvOrY4mFile(reference_file_name, width, height); - rtc::scoped_refptr test_video = + webrtc::scoped_refptr test_video = webrtc::test::OpenYuvOrY4mFile(test_file_name, width, height); if (!reference_video || !test_video) { @@ -126,7 +126,7 @@ int main(int argc, char* argv[]) { // Align the reference video both temporally and geometrically. I.e. align the // frames to match up in order to the test video, and align a crop region of // the reference video to match up to the test video. - const rtc::scoped_refptr aligned_reference_video = + const webrtc::scoped_refptr aligned_reference_video = AdjustCropping(ReorderVideo(reference_video, matching_indices), test_video); @@ -136,7 +136,7 @@ int main(int argc, char* argv[]) { CalculateColorTransformationMatrix(aligned_reference_video, test_video); char buf[256]; - rtc::SimpleStringBuilder string_builder(buf); + webrtc::SimpleStringBuilder string_builder(buf); for (int i = 0; i < 3; ++i) { string_builder << "\n"; for (int j = 0; j < 4; ++j) @@ -147,7 +147,7 @@ int main(int argc, char* argv[]) { // Adjust all frames in the test video with the calculated color // transformation. - const rtc::scoped_refptr color_adjusted_test_video = + const webrtc::scoped_refptr color_adjusted_test_video = AdjustColors(color_transformation, test_video); results.frames = webrtc::test::RunAnalysis( diff --git a/rtc_tools/frame_analyzer/linear_least_squares.h b/rtc_tools/frame_analyzer/linear_least_squares.h index 7006db1d65..5b5a7837d4 100644 --- a/rtc_tools/frame_analyzer/linear_least_squares.h +++ b/rtc_tools/frame_analyzer/linear_least_squares.h @@ -13,11 +13,10 @@ #include +#include #include #include -#include "absl/types/optional.h" - namespace webrtc { namespace test { @@ -45,9 +44,9 @@ class IncrementalLinearLeastSquares { private: // Running sum of x^T * x. - absl::optional>> sum_xx; + std::optional>> sum_xx; // Running sum of x^T * y. - absl::optional>> sum_xy; + std::optional>> sum_xy; }; } // namespace test diff --git a/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.cc b/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.cc index ebfc6650b2..a0bc848f5d 100644 --- a/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.cc +++ b/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.cc @@ -105,13 +105,13 @@ void print_freezing_metrics(const std::vector& psnr_per_frame, printf("\n"); } -void compute_metrics(const rtc::scoped_refptr& video, +void compute_metrics(const webrtc::scoped_refptr& video, std::vector* psnr_per_frame, std::vector* ssim_per_frame) { for (size_t i = 0; i < video->number_of_frames() - 1; ++i) { - const rtc::scoped_refptr current_frame = + const webrtc::scoped_refptr current_frame = video->GetFrame(i); - const rtc::scoped_refptr next_frame = + const webrtc::scoped_refptr next_frame = video->GetFrame(i + 1); double result_psnr = webrtc::test::Psnr(current_frame, next_frame); double result_ssim = webrtc::test::Ssim(current_frame, next_frame); @@ -124,7 +124,7 @@ void compute_metrics(const rtc::scoped_refptr& video, int run_analysis(const std::string& video_file) { std::vector psnr_per_frame; std::vector ssim_per_frame; - rtc::scoped_refptr video = + webrtc::scoped_refptr video = webrtc::test::OpenY4mFile(video_file); if (video) { compute_metrics(video, &psnr_per_frame, &ssim_per_frame); diff --git a/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.h b/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.h index 3c93119905..b9f828101b 100644 --- a/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.h +++ b/rtc_tools/frame_analyzer/reference_less_video_analysis_lib.h @@ -37,7 +37,7 @@ void print_freezing_metrics(const std::vector& psnr_per_frame, // Compute the metrics like freezing score based on PSNR and SSIM values for a // given video file. -void compute_metrics(const rtc::scoped_refptr& video, +void compute_metrics(const webrtc::scoped_refptr& video, std::vector* psnr_per_frame, std::vector* ssim_per_frame); diff --git a/rtc_tools/frame_analyzer/reference_less_video_analysis_unittest.cc b/rtc_tools/frame_analyzer/reference_less_video_analysis_unittest.cc index b98a014989..37cfaaf335 100644 --- a/rtc_tools/frame_analyzer/reference_less_video_analysis_unittest.cc +++ b/rtc_tools/frame_analyzer/reference_less_video_analysis_unittest.cc @@ -23,7 +23,7 @@ class ReferenceLessVideoAnalysisTest : public ::testing::Test { ASSERT_TRUE(video); } - rtc::scoped_refptr video; + webrtc::scoped_refptr video; std::vector psnr_per_frame; std::vector ssim_per_frame; }; diff --git a/rtc_tools/frame_analyzer/video_color_aligner.cc b/rtc_tools/frame_analyzer/video_color_aligner.cc index 5983e47f69..54f7fd4de9 100644 --- a/rtc_tools/frame_analyzer/video_color_aligner.cc +++ b/rtc_tools/frame_analyzer/video_color_aligner.cc @@ -33,11 +33,11 @@ namespace { // Helper function for AdjustColors(). This functions calculates a single output // row for y with the given color coefficients. The u/v channels are assumed to // be subsampled by a factor of 2, which is the case of I420. -void CalculateYChannel(rtc::ArrayView y_data, - rtc::ArrayView u_data, - rtc::ArrayView v_data, +void CalculateYChannel(ArrayView y_data, + ArrayView u_data, + ArrayView v_data, const std::array& coeff, - rtc::ArrayView output) { + ArrayView output) { RTC_CHECK_EQ(y_data.size(), output.size()); // Each u/v element represents two y elements. Make sure we have enough to // cover the Y values. @@ -72,11 +72,11 @@ void CalculateYChannel(rtc::ArrayView y_data, // Helper function for AdjustColors(). This functions calculates a single output // row for either u or v, with the given color coefficients. Y, U, and V are // assumed to be the same size, i.e. no subsampling. -void CalculateUVChannel(rtc::ArrayView y_data, - rtc::ArrayView u_data, - rtc::ArrayView v_data, +void CalculateUVChannel(ArrayView y_data, + ArrayView u_data, + ArrayView v_data, const std::array& coeff, - rtc::ArrayView output) { + ArrayView output) { RTC_CHECK_EQ(y_data.size(), u_data.size()); RTC_CHECK_EQ(y_data.size(), v_data.size()); RTC_CHECK_EQ(y_data.size(), output.size()); @@ -92,7 +92,7 @@ void CalculateUVChannel(rtc::ArrayView y_data, // Convert a frame to four vectors consisting of [y, u, v, 1]. std::vector> FlattenYuvData( - const rtc::scoped_refptr& frame) { + const scoped_refptr& frame) { std::vector> result( 4, std::vector(frame->ChromaWidth() * frame->ChromaHeight())); @@ -128,8 +128,8 @@ ColorTransformationMatrix VectorToColorMatrix( } // namespace ColorTransformationMatrix CalculateColorTransformationMatrix( - const rtc::scoped_refptr& reference_frame, - const rtc::scoped_refptr& test_frame) { + const scoped_refptr& reference_frame, + const scoped_refptr& test_frame) { IncrementalLinearLeastSquares incremental_lls; incremental_lls.AddObservations(FlattenYuvData(test_frame), FlattenYuvData(reference_frame)); @@ -137,8 +137,8 @@ ColorTransformationMatrix CalculateColorTransformationMatrix( } ColorTransformationMatrix CalculateColorTransformationMatrix( - const rtc::scoped_refptr

Prefer "ignored" as the variable name to appease Android Studio's "Unused symbol" inspection. + * + *

Example: + * + *

+ *     try (StrictModeContext ignored = StrictModeContext.allowDiskWrites()) {
+ *         return Example.doThingThatRequiresDiskWrites();
+ *     }
+ * 
+ */ +public class StrictModeContext implements Closeable { + private static class Impl extends StrictModeContext { + private final StrictMode.ThreadPolicy mThreadPolicy; + private final StrictMode.VmPolicy mVmPolicy; + + private Impl(StrictMode.ThreadPolicy threadPolicy, StrictMode.VmPolicy vmPolicy) { + mThreadPolicy = threadPolicy; + mVmPolicy = vmPolicy; + } + + private Impl(StrictMode.ThreadPolicy threadPolicy) { + this(threadPolicy, null); + } + + private Impl(StrictMode.VmPolicy vmPolicy) { + this(null, vmPolicy); + } + + @Override + public void close() { + if (mThreadPolicy != null) { + StrictMode.setThreadPolicy(mThreadPolicy); + } + if (mVmPolicy != null) { + StrictMode.setVmPolicy(mVmPolicy); + } + } + } + + /** + * Convenience method for disabling all VM-level StrictMode checks with try-with-resources. + * Includes everything listed here: + * https://developer.android.com/reference/android/os/StrictMode.VmPolicy.Builder.html + */ + public static StrictModeContext allowAllVmPolicies() { + StrictMode.VmPolicy oldPolicy = StrictMode.getVmPolicy(); + StrictMode.setVmPolicy(StrictMode.VmPolicy.LAX); + return new Impl(oldPolicy); + } + + /** + * Convenience method for disabling all thread-level StrictMode checks with try-with-resources. + * Includes everything listed here: + * https://developer.android.com/reference/android/os/StrictMode.ThreadPolicy.Builder.html + */ + public static StrictModeContext allowAllThreadPolicies() { + StrictMode.ThreadPolicy oldPolicy = StrictMode.getThreadPolicy(); + StrictMode.setThreadPolicy(StrictMode.ThreadPolicy.LAX); + return new Impl(oldPolicy); + } + + /** Convenience method for disabling StrictMode for disk-writes with try-with-resources. */ + public static StrictModeContext allowDiskWrites() { + StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskWrites(); + return new Impl(oldPolicy); + } + + /** Convenience method for disabling StrictMode for disk-reads with try-with-resources. */ + public static StrictModeContext allowDiskReads() { + StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskReads(); + return new Impl(oldPolicy); + } + + /** Convenience method for disabling StrictMode for slow calls with try-with-resources. */ + public static StrictModeContext allowSlowCalls() { + StrictMode.ThreadPolicy oldPolicy = StrictMode.getThreadPolicy(); + StrictMode.setThreadPolicy( + new StrictMode.ThreadPolicy.Builder(oldPolicy).permitCustomSlowCalls().build()); + return new Impl(oldPolicy); + } + + /** + * Convenience method for disabling StrictMode for unbuffered input/output operations with + * try-with-resources. For API level 25- this method will do nothing; because + * StrictMode.ThreadPolicy.Builder#permitUnbufferedIo is added in API level 26. + */ + public static StrictModeContext allowUnbufferedIo() { + StrictMode.ThreadPolicy oldPolicy = StrictMode.getThreadPolicy(); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { + StrictMode.setThreadPolicy( + new StrictMode.ThreadPolicy.Builder(oldPolicy) + .permitUnbufferedIo() + .build()); + } + return new Impl(oldPolicy); + } + + @Override + public void close() {} +} \ No newline at end of file diff --git a/test/audio_decoder_proxy_factory.h b/test/audio_decoder_proxy_factory.h index 95606d6ff7..6128bf85a1 100644 --- a/test/audio_decoder_proxy_factory.h +++ b/test/audio_decoder_proxy_factory.h @@ -11,12 +11,20 @@ #ifndef TEST_AUDIO_DECODER_PROXY_FACTORY_H_ #define TEST_AUDIO_DECODER_PROXY_FACTORY_H_ +#include +#include #include +#include #include #include +#include "api/audio_codecs/audio_codec_pair_id.h" #include "api/audio_codecs/audio_decoder.h" #include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_format.h" +#include "api/environment/environment.h" +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" namespace webrtc { namespace test { @@ -39,9 +47,10 @@ class AudioDecoderProxyFactory : public AudioDecoderFactory { return true; } - std::unique_ptr MakeAudioDecoder( + std::unique_ptr Create( + const Environment& /* env */, const SdpAudioFormat& /* format */, - absl::optional /* codec_pair_id */) override { + std::optional /* codec_pair_id */) override { return std::make_unique(decoder_); } @@ -53,7 +62,7 @@ class AudioDecoderProxyFactory : public AudioDecoderFactory { explicit DecoderProxy(AudioDecoder* decoder) : decoder_(decoder) {} private: - std::vector ParsePayload(rtc::Buffer&& payload, + std::vector ParsePayload(Buffer&& payload, uint32_t timestamp) override { return decoder_->ParsePayload(std::move(payload), timestamp); } @@ -83,7 +92,7 @@ class AudioDecoderProxyFactory : public AudioDecoderFactory { } void GeneratePlc(size_t requested_samples_per_channel, - rtc::BufferT* concealment_audio) override { + BufferT* concealment_audio) override { decoder_->GeneratePlc(requested_samples_per_channel, concealment_audio); } diff --git a/test/call_test.cc b/test/call_test.cc index b8a1cd76b8..37e2ef9f09 100644 --- a/test/call_test.cc +++ b/test/call_test.cc @@ -13,22 +13,24 @@ #include #include +#include "api/audio/audio_device.h" +#include "api/audio/builtin_audio_processing_builder.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" -#include "api/task_queue/default_task_queue_factory.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/task_queue/task_queue_base.h" #include "api/test/create_frame_generator.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "call/fake_network_pipe.h" #include "call/packet_receiver.h" -#include "call/simulated_network.h" -#include "modules/audio_device/include/audio_device.h" #include "modules/audio_device/include/test_audio_device.h" #include "modules/audio_mixer/audio_mixer_impl.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/task_queue_for_test.h" #include "test/fake_encoder.h" +#include "test/network/simulated_network.h" #include "test/rtp_rtcp_observer.h" #include "test/testsupport/file_utils.h" #include "test/video_test_constants.h" @@ -38,23 +40,23 @@ namespace webrtc { namespace test { CallTest::CallTest() - : clock_(Clock::GetRealTimeClock()), - task_queue_factory_(CreateDefaultTaskQueueFactory()), - send_event_log_(std::make_unique()), - recv_event_log_(std::make_unique()), + : env_(CreateEnvironment(&field_trials_)), + send_env_(env_), + recv_env_(env_), audio_send_config_(/*send_transport=*/nullptr), audio_send_stream_(nullptr), frame_generator_capturer_(nullptr), - fake_encoder_factory_([this]() { - std::unique_ptr fake_encoder; - if (video_encoder_configs_[0].codec_type == kVideoCodecVP8) { - fake_encoder = std::make_unique(clock_); - } else { - fake_encoder = std::make_unique(clock_); - } - fake_encoder->SetMaxBitrate(fake_encoder_max_bitrate_); - return fake_encoder; - }), + fake_encoder_factory_( + [this](const Environment& env, const SdpVideoFormat& format) { + std::unique_ptr fake_encoder; + if (video_encoder_configs_[0].codec_type == kVideoCodecVP8) { + fake_encoder = std::make_unique(env); + } else { + fake_encoder = std::make_unique(env); + } + fake_encoder->SetMaxBitrate(fake_encoder_max_bitrate_); + return fake_encoder; + }), fake_decoder_factory_([]() { return std::make_unique(); }), bitrate_allocator_factory_(CreateBuiltinVideoBitrateAllocatorFactory()), num_video_streams_(1), @@ -62,12 +64,24 @@ CallTest::CallTest() num_flexfec_streams_(0), audio_decoder_factory_(CreateBuiltinAudioDecoderFactory()), audio_encoder_factory_(CreateBuiltinAudioEncoderFactory()), - task_queue_(task_queue_factory_->CreateTaskQueue( + task_queue_(env_.task_queue_factory().CreateTaskQueue( "CallTestTaskQueue", TaskQueueFactory::Priority::NORMAL)) {} CallTest::~CallTest() = default; +void CallTest::SetSendEventLog(std::unique_ptr event_log) { + EnvironmentFactory f(env_); + f.Set(std::move(event_log)); + send_env_ = f.Create(); +} + +void CallTest::SetRecvEventLog(std::unique_ptr event_log) { + EnvironmentFactory f(env_); + f.Set(std::move(event_log)); + recv_env_ = f.Create(); +} + void CallTest::RegisterRtpExtension(const RtpExtension& extension) { for (const RtpExtension& registered_extension : rtp_extensions_) { if (registered_extension.id == extension.id) { @@ -97,14 +111,15 @@ void CallTest::RunBaseTest(BaseTest* test) { num_audio_streams_ = test->GetNumAudioStreams(); num_flexfec_streams_ = test->GetNumFlexfecStreams(); RTC_DCHECK(num_video_streams_ > 0 || num_audio_streams_ > 0); - Call::Config send_config(send_event_log_.get()); + CallConfig send_config = SendCallConfig(); + CallConfig recv_config = RecvCallConfig(); test->ModifySenderBitrateConfig(&send_config.bitrate_config); if (num_audio_streams_ > 0) { CreateFakeAudioDevices(test->CreateCapturer(), test->CreateRenderer()); test->OnFakeAudioDevicesCreated(fake_send_audio_device_.get(), fake_recv_audio_device_.get()); - apm_send_ = AudioProcessingBuilder().Create(); - apm_recv_ = AudioProcessingBuilder().Create(); + apm_send_ = BuiltinAudioProcessingBuilder().Build(send_config.env); + apm_recv_ = BuiltinAudioProcessingBuilder().Build(recv_config.env); EXPECT_EQ(0, fake_send_audio_device_->Init()); EXPECT_EQ(0, fake_recv_audio_device_->Init()); AudioState::Config audio_state_config; @@ -115,9 +130,8 @@ void CallTest::RunBaseTest(BaseTest* test) { fake_send_audio_device_->RegisterAudioCallback( send_config.audio_state->audio_transport()); } - CreateSenderCall(send_config); + CreateSenderCall(std::move(send_config)); if (test->ShouldCreateReceivers()) { - Call::Config recv_config(recv_event_log_.get()); test->ModifyReceiverBitrateConfig(&recv_config.bitrate_config); if (num_audio_streams_ > 0) { AudioState::Config audio_state_config; @@ -128,7 +142,7 @@ void CallTest::RunBaseTest(BaseTest* test) { fake_recv_audio_device_->RegisterAudioCallback( recv_config.audio_state->audio_transport()); } - CreateReceiverCall(recv_config); + CreateReceiverCall(std::move(recv_config)); } test->OnCallsCreated(sender_call_.get(), receiver_call_.get()); CreateReceiveTransport(test->GetReceiveTransportConfig(), test); @@ -206,36 +220,38 @@ void CallTest::RunBaseTest(BaseTest* test) { }); } +CallConfig CallTest::SendCallConfig() const { + CallConfig sender_config(send_env_); + sender_config.network_state_predictor_factory = + network_state_predictor_factory_.get(); + sender_config.network_controller_factory = network_controller_factory_.get(); + return sender_config; +} + +CallConfig CallTest::RecvCallConfig() const { + return CallConfig(recv_env_); +} + void CallTest::CreateCalls() { - CreateCalls(Call::Config(send_event_log_.get()), - Call::Config(recv_event_log_.get())); + CreateCalls(SendCallConfig(), RecvCallConfig()); } -void CallTest::CreateCalls(const Call::Config& sender_config, - const Call::Config& receiver_config) { - CreateSenderCall(sender_config); - CreateReceiverCall(receiver_config); +void CallTest::CreateCalls(CallConfig sender_config, + CallConfig receiver_config) { + CreateSenderCall(std::move(sender_config)); + CreateReceiverCall(std::move(receiver_config)); } void CallTest::CreateSenderCall() { - CreateSenderCall(Call::Config(send_event_log_.get())); + CreateSenderCall(SendCallConfig()); } -void CallTest::CreateSenderCall(const Call::Config& config) { - auto sender_config = config; - sender_config.task_queue_factory = task_queue_factory_.get(); - sender_config.network_state_predictor_factory = - network_state_predictor_factory_.get(); - sender_config.network_controller_factory = network_controller_factory_.get(); - sender_config.trials = &field_trials_; - sender_call_.reset(Call::Create(sender_config)); +void CallTest::CreateSenderCall(CallConfig config) { + sender_call_ = Call::Create(std::move(config)); } -void CallTest::CreateReceiverCall(const Call::Config& config) { - auto receiver_config = config; - receiver_config.task_queue_factory = task_queue_factory_.get(); - receiver_config.trials = &field_trials_; - receiver_call_.reset(Call::Create(receiver_config)); +void CallTest::CreateReceiverCall(CallConfig config) { + receiver_call_ = Call::Create(std::move(config)); } void CallTest::DestroyCalls() { @@ -359,15 +375,15 @@ void CallTest::CreateMatchingVideoReceiveConfigs( const VideoSendStream::Config& video_send_config, Transport* rtcp_send_transport) { CreateMatchingVideoReceiveConfigs(video_send_config, rtcp_send_transport, - &fake_decoder_factory_, absl::nullopt, - false, 0); + &fake_decoder_factory_, std::nullopt, false, + 0); } void CallTest::CreateMatchingVideoReceiveConfigs( const VideoSendStream::Config& video_send_config, Transport* rtcp_send_transport, VideoDecoderFactory* decoder_factory, - absl::optional decode_sub_stream, + std::optional decode_sub_stream, bool receiver_reference_time_report, int rtp_history_ms) { AddMatchingVideoReceiveConfigs( @@ -381,7 +397,7 @@ void CallTest::AddMatchingVideoReceiveConfigs( const VideoSendStream::Config& video_send_config, Transport* rtcp_send_transport, VideoDecoderFactory* decoder_factory, - absl::optional decode_sub_stream, + std::optional decode_sub_stream, bool receiver_reference_time_report, int rtp_history_ms) { RTC_DCHECK(!video_send_config.rtp.ssrcs.empty()); @@ -441,7 +457,7 @@ void CallTest::CreateMatchingAudioConfigs(Transport* transport, AudioReceiveStreamInterface::Config CallTest::CreateMatchingAudioConfig( const AudioSendStream::Config& send_config, - rtc::scoped_refptr audio_decoder_factory, + scoped_refptr audio_decoder_factory, Transport* transport, std::string sync_group) { AudioReceiveStreamInterface::Config audio_config; @@ -487,9 +503,9 @@ void CallTest::CreateFrameGeneratorCapturerWithDrift(Clock* clock, auto frame_generator_capturer = std::make_unique( clock, - test::CreateSquareFrameGenerator(width, height, absl::nullopt, - absl::nullopt), - framerate * speed, *task_queue_factory_); + test::CreateSquareFrameGenerator(width, height, std::nullopt, + std::nullopt), + framerate * speed, env_.task_queue_factory()); frame_generator_capturer_ = frame_generator_capturer.get(); frame_generator_capturer->Init(); video_sources_.push_back(std::move(frame_generator_capturer)); @@ -502,10 +518,10 @@ void CallTest::CreateFrameGeneratorCapturer(int framerate, video_sources_.clear(); auto frame_generator_capturer = std::make_unique( - clock_, - test::CreateSquareFrameGenerator(width, height, absl::nullopt, - absl::nullopt), - framerate, *task_queue_factory_); + &env_.clock(), + test::CreateSquareFrameGenerator(width, height, std::nullopt, + std::nullopt), + framerate, env_.task_queue_factory()); frame_generator_capturer_ = frame_generator_capturer.get(); frame_generator_capturer->Init(); video_sources_.push_back(std::move(frame_generator_capturer)); @@ -516,9 +532,9 @@ void CallTest::CreateFakeAudioDevices( std::unique_ptr capturer, std::unique_ptr renderer) { fake_send_audio_device_ = TestAudioDeviceModule::Create( - task_queue_factory_.get(), std::move(capturer), nullptr, 1.f); + &env_.task_queue_factory(), std::move(capturer), nullptr, 1.f); fake_recv_audio_device_ = TestAudioDeviceModule::Create( - task_queue_factory_.get(), nullptr, std::move(renderer), 1.f); + &env_.task_queue_factory(), nullptr, std::move(renderer), 1.f); } void CallTest::CreateVideoStreams() { @@ -558,7 +574,7 @@ void CallTest::CreateVideoSendStreams() { if (fec_controller_factory_.get()) { video_send_streams_[i] = sender_call_->CreateVideoSendStream( video_send_configs_[i].Copy(), video_encoder_configs_[i].Copy(), - fec_controller_factory_->CreateFecController()); + fec_controller_factory_->CreateFecController(send_env_)); } else { video_send_streams_[i] = sender_call_->CreateVideoSendStream( video_send_configs_[i].Copy(), video_encoder_configs_[i].Copy()); @@ -643,9 +659,7 @@ void CallTest::StartVideoSources() { void CallTest::StartVideoStreams() { StartVideoSources(); for (size_t i = 0; i < video_send_streams_.size(); ++i) { - std::vector active_rtp_streams( - video_send_configs_[i].rtp.ssrcs.size(), true); - video_send_streams_[i]->StartPerRtpStream(active_rtp_streams); + video_send_streams_[i]->Start(); } for (VideoReceiveStreamInterface* video_recv_stream : video_receive_streams_) video_recv_stream->Start(); @@ -731,20 +745,20 @@ void CallTest::OnRtpPacket(const RtpPacketReceived& packet) { flexfec_recv_stream->OnRtpPacket(packet); } -absl::optional CallTest::GetRtpExtensionByUri( +std::optional CallTest::GetRtpExtensionByUri( const std::string& uri) const { for (const auto& extension : rtp_extensions_) { if (extension.uri == uri) { return extension; } } - return absl::nullopt; + return std::nullopt; } void CallTest::AddRtpExtensionByUri( const std::string& uri, std::vector* extensions) const { - const absl::optional extension = GetRtpExtensionByUri(uri); + const std::optional extension = GetRtpExtensionByUri(uri); if (extension) { extensions->push_back(*extension); } diff --git a/test/call_test.h b/test/call_test.h index 08d0e49a68..37c27c8429 100644 --- a/test/call_test.h +++ b/test/call_test.h @@ -12,11 +12,13 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" +#include "api/audio/audio_device.h" +#include "api/environment/environment.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" @@ -26,7 +28,6 @@ #include "api/units/time_delta.h" #include "api/video/video_bitrate_allocator_factory.h" #include "call/call.h" -#include "modules/audio_device/include/audio_device.h" #include "modules/audio_device/include/test_audio_device.h" #include "test/encoder_settings.h" #include "test/fake_decoder.h" @@ -52,9 +53,14 @@ class CallTest : public ::testing::Test, public RtpPacketSinkInterface { static const std::map payload_type_map_; protected: + const Environment& env() const { return env_; } + + void SetSendEventLog(std::unique_ptr event_log); + void SetRecvEventLog(std::unique_ptr event_log); + void RegisterRtpExtension(const RtpExtension& extension); // Returns header extensions that can be parsed by the transport. - rtc::ArrayView GetRegisteredExtensions() { + ArrayView GetRegisteredExtensions() { return rtp_extensions_; } @@ -62,12 +68,14 @@ class CallTest : public ::testing::Test, public RtpPacketSinkInterface { // to simplify test code. void RunBaseTest(BaseTest* test); + CallConfig SendCallConfig() const; + CallConfig RecvCallConfig() const; + void CreateCalls(); - void CreateCalls(const Call::Config& sender_config, - const Call::Config& receiver_config); + void CreateCalls(CallConfig sender_config, CallConfig receiver_config); void CreateSenderCall(); - void CreateSenderCall(const Call::Config& config); - void CreateReceiverCall(const Call::Config& config); + void CreateSenderCall(CallConfig config); + void CreateReceiverCall(CallConfig config); void DestroyCalls(); void CreateVideoSendConfig(VideoSendStream::Config* video_config, @@ -107,7 +115,7 @@ class CallTest : public ::testing::Test, public RtpPacketSinkInterface { const VideoSendStream::Config& video_send_config, Transport* rtcp_send_transport, VideoDecoderFactory* decoder_factory, - absl::optional decode_sub_stream, + std::optional decode_sub_stream, bool receiver_reference_time_report, int rtp_history_ms); void AddMatchingVideoReceiveConfigs( @@ -115,7 +123,7 @@ class CallTest : public ::testing::Test, public RtpPacketSinkInterface { const VideoSendStream::Config& video_send_config, Transport* rtcp_send_transport, VideoDecoderFactory* decoder_factory, - absl::optional decode_sub_stream, + std::optional decode_sub_stream, bool receiver_reference_time_report, int rtp_history_ms); @@ -123,7 +131,7 @@ class CallTest : public ::testing::Test, public RtpPacketSinkInterface { void CreateMatchingAudioConfigs(Transport* transport, std::string sync_group); static AudioReceiveStreamInterface::Config CreateMatchingAudioConfig( const AudioSendStream::Config& send_config, - rtc::scoped_refptr audio_decoder_factory, + scoped_refptr audio_decoder_factory, Transport* transport, std::string sync_group); void CreateMatchingFecConfig( @@ -185,13 +193,11 @@ class CallTest : public ::testing::Test, public RtpPacketSinkInterface { void OnRtpPacket(const RtpPacketReceived& packet) override; test::RunLoop loop_; - - Clock* const clock_; test::ScopedKeyValueConfig field_trials_; + Environment env_; + Environment send_env_; + Environment recv_env_; - std::unique_ptr task_queue_factory_; - std::unique_ptr send_event_log_; - std::unique_ptr recv_event_log_; std::unique_ptr sender_call_; std::unique_ptr send_transport_; SimulatedNetworkInterface* send_simulated_network_ = nullptr; @@ -230,12 +236,12 @@ class CallTest : public ::testing::Test, public RtpPacketSinkInterface { size_t num_video_streams_; size_t num_audio_streams_; size_t num_flexfec_streams_; - rtc::scoped_refptr audio_decoder_factory_; - rtc::scoped_refptr audio_encoder_factory_; + scoped_refptr audio_decoder_factory_; + scoped_refptr audio_encoder_factory_; test::FakeVideoRenderer fake_renderer_; private: - absl::optional GetRtpExtensionByUri( + std::optional GetRtpExtensionByUri( const std::string& uri) const; void AddRtpExtensionByUri(const std::string& uri, @@ -243,10 +249,10 @@ class CallTest : public ::testing::Test, public RtpPacketSinkInterface { std::unique_ptr task_queue_; std::vector rtp_extensions_; - rtc::scoped_refptr apm_send_; - rtc::scoped_refptr apm_recv_; - rtc::scoped_refptr fake_send_audio_device_; - rtc::scoped_refptr fake_recv_audio_device_; + scoped_refptr apm_send_; + scoped_refptr apm_recv_; + scoped_refptr fake_send_audio_device_; + scoped_refptr fake_recv_audio_device_; }; class BaseTest : public RtpRtcpObserver { diff --git a/test/configurable_frame_size_encoder.cc b/test/configurable_frame_size_encoder.cc index 44a00bcacc..e18a87cd7e 100644 --- a/test/configurable_frame_size_encoder.cc +++ b/test/configurable_frame_size_encoder.cc @@ -53,7 +53,7 @@ int32_t ConfigurableFrameSizeEncoder::Encode( encodedImage._encodedHeight = inputImage.height(); encodedImage._encodedWidth = inputImage.width(); encodedImage._frameType = VideoFrameType::kVideoFrameKey; - encodedImage.SetTimestamp(inputImage.timestamp()); + encodedImage.SetRtpTimestamp(inputImage.rtp_timestamp()); encodedImage.capture_time_ms_ = inputImage.render_time_ms(); CodecSpecificInfo specific{}; specific.codecType = codec_type_; diff --git a/test/configurable_frame_size_encoder.h b/test/configurable_frame_size_encoder.h index 747fc09d4b..1fa032a61b 100644 --- a/test/configurable_frame_size_encoder.h +++ b/test/configurable_frame_size_encoder.h @@ -16,9 +16,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_frame.h" #include "api/video_codecs/video_codec.h" @@ -60,7 +60,7 @@ class ConfigurableFrameSizeEncoder : public VideoEncoder { private: EncodedImageCallback* callback_; - absl::optional> post_encode_callback_; + std::optional> post_encode_callback_; size_t current_frame_size_; VideoCodecType codec_type_; diff --git a/test/create_frame_generator_capturer.cc b/test/create_frame_generator_capturer.cc index a4088d90e5..0eb2e69265 100644 --- a/test/create_frame_generator_capturer.cc +++ b/test/create_frame_generator_capturer.cc @@ -46,42 +46,46 @@ std::string TransformFilePath(std::string path) { std::unique_ptr CreateFrameGeneratorCapturer( Clock* clock, TaskQueueFactory& task_queue_factory, - FrameGeneratorCapturerConfig::SquaresVideo config) { + FrameGeneratorCapturerConfig::SquaresVideo config, + bool allow_zero_hertz) { return std::make_unique( clock, CreateSquareFrameGenerator(config.width, config.height, config.pixel_format, config.num_squares), - config.framerate, task_queue_factory); + config.framerate, task_queue_factory, allow_zero_hertz); } std::unique_ptr CreateFrameGeneratorCapturer( Clock* clock, TaskQueueFactory& task_queue_factory, - FrameGeneratorCapturerConfig::SquareSlides config) { + FrameGeneratorCapturerConfig::SquareSlides config, + bool allow_zero_hertz) { return std::make_unique( clock, CreateSlideFrameGenerator( config.width, config.height, /*frame_repeat_count*/ config.change_interval.seconds() * config.framerate), - config.framerate, task_queue_factory); + config.framerate, task_queue_factory, allow_zero_hertz); } std::unique_ptr CreateFrameGeneratorCapturer( Clock* clock, TaskQueueFactory& task_queue_factory, - FrameGeneratorCapturerConfig::VideoFile config) { + FrameGeneratorCapturerConfig::VideoFile config, + bool allow_zero_hertz) { RTC_CHECK(config.width && config.height); return std::make_unique( clock, CreateFromYuvFileFrameGenerator({TransformFilePath(config.name)}, config.width, config.height, /*frame_repeat_count*/ 1), - config.framerate, task_queue_factory); + config.framerate, task_queue_factory, allow_zero_hertz); } std::unique_ptr CreateFrameGeneratorCapturer( Clock* clock, TaskQueueFactory& task_queue_factory, - FrameGeneratorCapturerConfig::ImageSlides config) { + FrameGeneratorCapturerConfig::ImageSlides config, + bool allow_zero_hertz) { std::unique_ptr slides_generator; std::vector paths = config.paths; for (std::string& path : paths) @@ -105,7 +109,8 @@ std::unique_ptr CreateFrameGeneratorCapturer( config.framerate); } return std::make_unique( - clock, std::move(slides_generator), config.framerate, task_queue_factory); + clock, std::move(slides_generator), config.framerate, task_queue_factory, + allow_zero_hertz); } std::unique_ptr CreateFrameGeneratorCapturer( @@ -113,19 +118,22 @@ std::unique_ptr CreateFrameGeneratorCapturer( TaskQueueFactory& task_queue_factory, const FrameGeneratorCapturerConfig& config) { if (config.video_file) { - return CreateFrameGeneratorCapturer(clock, task_queue_factory, - *config.video_file); + return CreateFrameGeneratorCapturer( + clock, task_queue_factory, *config.video_file, config.allow_zero_hertz); } else if (config.image_slides) { return CreateFrameGeneratorCapturer(clock, task_queue_factory, - *config.image_slides); + *config.image_slides, + config.allow_zero_hertz); } else if (config.squares_slides) { return CreateFrameGeneratorCapturer(clock, task_queue_factory, - *config.squares_slides); + *config.squares_slides, + config.allow_zero_hertz); } else { return CreateFrameGeneratorCapturer( clock, task_queue_factory, config.squares_video.value_or( - FrameGeneratorCapturerConfig::SquaresVideo())); + FrameGeneratorCapturerConfig::SquaresVideo()), + config.allow_zero_hertz); } } diff --git a/test/create_frame_generator_capturer.h b/test/create_frame_generator_capturer.h index 0d8ec71df3..5726013859 100644 --- a/test/create_frame_generator_capturer.h +++ b/test/create_frame_generator_capturer.h @@ -11,10 +11,10 @@ #define TEST_CREATE_FRAME_GENERATOR_CAPTURER_H_ #include +#include #include #include -#include "absl/types/optional.h" #include "api/task_queue/task_queue_factory.h" #include "api/test/frame_generator_interface.h" #include "api/units/time_delta.h" @@ -26,13 +26,13 @@ namespace test { namespace frame_gen_cap_impl { template -class AutoOpt : public absl::optional { +class AutoOpt : public std::optional { public: - using absl::optional::optional; + using std::optional::optional; T* operator->() { - if (!absl::optional::has_value()) + if (!std::optional::has_value()) this->emplace(T()); - return absl::optional::operator->(); + return std::optional::operator->(); } }; } // namespace frame_gen_cap_impl @@ -67,8 +67,8 @@ struct FrameGeneratorCapturerConfig { TimeDelta change_interval = TimeDelta::Seconds(10); struct Crop { TimeDelta scroll_duration = TimeDelta::Seconds(0); - absl::optional width; - absl::optional height; + std::optional width; + std::optional height; } crop; int width = 1850; int height = 1110; @@ -84,28 +84,9 @@ struct FrameGeneratorCapturerConfig { frame_gen_cap_impl::AutoOpt squares_slides; frame_gen_cap_impl::AutoOpt video_file; frame_gen_cap_impl::AutoOpt image_slides; + bool allow_zero_hertz = false; }; -std::unique_ptr CreateFrameGeneratorCapturer( - Clock* clock, - TaskQueueFactory& task_queue_factory, - FrameGeneratorCapturerConfig::SquaresVideo config); - -std::unique_ptr CreateFrameGeneratorCapturer( - Clock* clock, - TaskQueueFactory& task_queue_factory, - FrameGeneratorCapturerConfig::SquareSlides config); - -std::unique_ptr CreateFrameGeneratorCapturer( - Clock* clock, - TaskQueueFactory& task_queue_factory, - FrameGeneratorCapturerConfig::VideoFile config); - -std::unique_ptr CreateFrameGeneratorCapturer( - Clock* clock, - TaskQueueFactory& task_queue_factory, - FrameGeneratorCapturerConfig::ImageSlides config); - std::unique_ptr CreateFrameGeneratorCapturer( Clock* clock, TaskQueueFactory& task_queue_factory, diff --git a/test/direct_transport.cc b/test/direct_transport.cc index 3aa85082c4..fe68dd7048 100644 --- a/test/direct_transport.cc +++ b/test/direct_transport.cc @@ -9,14 +9,32 @@ */ #include "test/direct_transport.h" +#include +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/call/transport.h" #include "api/media_types.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "call/call.h" #include "call/fake_network_pipe.h" +#include "call/simulated_packet_receiver.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_util.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/time_utils.h" @@ -28,7 +46,7 @@ Demuxer::Demuxer(const std::map& payload_type_map) MediaType Demuxer::GetMediaType(const uint8_t* packet_data, const size_t packet_length) const { - if (IsRtpPacket(rtc::MakeArrayView(packet_data, packet_length))) { + if (IsRtpPacket(MakeArrayView(packet_data, packet_length))) { RTC_CHECK_GE(packet_length, 2); const uint8_t payload_type = packet_data[1] & 0x7f; std::map::const_iterator it = @@ -45,8 +63,8 @@ DirectTransport::DirectTransport( std::unique_ptr pipe, Call* send_call, const std::map& payload_type_map, - rtc::ArrayView audio_extensions, - rtc::ArrayView video_extensions) + ArrayView audio_extensions, + ArrayView video_extensions) : send_call_(send_call), task_queue_(task_queue), demuxer_(payload_type_map), @@ -64,14 +82,14 @@ void DirectTransport::SetReceiver(PacketReceiver* receiver) { fake_network_->SetReceiver(receiver); } -bool DirectTransport::SendRtp(rtc::ArrayView data, +bool DirectTransport::SendRtp(ArrayView data, const PacketOptions& options) { if (send_call_) { - rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis()); + SentPacketInfo sent_packet(options.packet_id, TimeMillis()); sent_packet.info.included_in_feedback = options.included_in_feedback; sent_packet.info.included_in_allocation = options.included_in_allocation; sent_packet.info.packet_size_bytes = data.size(); - sent_packet.info.packet_type = rtc::PacketType::kData; + sent_packet.info.packet_type = PacketType::kData; send_call_->OnSentPacket(sent_packet); } @@ -87,11 +105,11 @@ bool DirectTransport::SendRtp(rtc::ArrayView data, default: RTC_CHECK_NOTREACHED(); } - RtpPacketReceived packet(extensions, Timestamp::Micros(rtc::TimeMicros())); + RtpPacketReceived packet(extensions, Timestamp::Micros(TimeMicros())); if (media_type == MediaType::VIDEO) { packet.set_payload_type_frequency(kVideoPayloadTypeFrequency); } - RTC_CHECK(packet.Parse(rtc::CopyOnWriteBuffer(data))); + RTC_CHECK(packet.Parse(CopyOnWriteBuffer(data))); fake_network_->DeliverRtpPacket( media_type, std::move(packet), [](const RtpPacketReceived& packet) { return false; }); @@ -102,8 +120,8 @@ bool DirectTransport::SendRtp(rtc::ArrayView data, return true; } -bool DirectTransport::SendRtcp(rtc::ArrayView data) { - fake_network_->DeliverRtcpPacket(rtc::CopyOnWriteBuffer(data)); +bool DirectTransport::SendRtcp(ArrayView data) { + fake_network_->DeliverRtcpPacket(CopyOnWriteBuffer(data)); MutexLock lock(&process_lock_); if (!next_process_task_.Running()) ProcessPackets(); @@ -123,9 +141,9 @@ void DirectTransport::Start() { } void DirectTransport::ProcessPackets() { - absl::optional initial_delay_ms = + std::optional initial_delay_ms = fake_network_->TimeUntilNextProcess(); - if (initial_delay_ms == absl::nullopt) + if (initial_delay_ms == std::nullopt) return; next_process_task_ = RepeatingTaskHandle::DelayedStart( diff --git a/test/direct_transport.h b/test/direct_transport.h index 1e14c1c1f4..4cf3f262b6 100644 --- a/test/direct_transport.h +++ b/test/direct_transport.h @@ -48,8 +48,8 @@ class DirectTransport : public Transport { std::unique_ptr pipe, Call* send_call, const std::map& payload_type_map, - rtc::ArrayView audio_extensions, - rtc::ArrayView video_extensions); + ArrayView audio_extensions, + ArrayView video_extensions); ~DirectTransport() override; @@ -61,9 +61,9 @@ class DirectTransport : public Transport { using Transport::SendRtcp; using Transport::SendRtp; - bool SendRtp(rtc::ArrayView data, + bool SendRtp(ArrayView data, const PacketOptions& options) override; - bool SendRtcp(rtc::ArrayView data) override; + bool SendRtcp(ArrayView data) override; int GetAverageDelayMs(); diff --git a/test/encoder_settings.cc b/test/encoder_settings.cc index f5b298b107..d5009ca9ef 100644 --- a/test/encoder_settings.cc +++ b/test/encoder_settings.cc @@ -106,6 +106,7 @@ std::vector CreateVideoStreams( DefaultVideoStreamFactory::DefaultVideoStreamFactory() {} std::vector DefaultVideoStreamFactory::CreateEncoderStreams( + const FieldTrialsView& /*field_trials*/, int frame_width, int frame_height, const webrtc::VideoEncoderConfig& encoder_config) { @@ -120,7 +121,7 @@ void FillEncoderConfiguration(VideoCodecType codec_type, configuration->codec_type = codec_type; configuration->number_of_streams = num_streams; configuration->video_stream_factory = - rtc::make_ref_counted(); + make_ref_counted(); configuration->max_bitrate_bps = 0; configuration->frame_drop_enabled = true; configuration->simulcast_layers = std::vector(num_streams); diff --git a/test/encoder_settings.h b/test/encoder_settings.h index 6dbad0fee2..aa3954a381 100644 --- a/test/encoder_settings.h +++ b/test/encoder_settings.h @@ -35,6 +35,7 @@ class DefaultVideoStreamFactory private: std::vector CreateEncoderStreams( + const FieldTrialsView& field_trials, int frame_width, int frame_height, const webrtc::VideoEncoderConfig& encoder_config) override; diff --git a/test/fake_decoder.cc b/test/fake_decoder.cc index b5fd15bf39..36c6dd5c9a 100644 --- a/test/fake_decoder.cc +++ b/test/fake_decoder.cc @@ -15,13 +15,13 @@ #include #include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_factory.h" #include "api/video/i420_buffer.h" #include "api/video/video_frame.h" #include "api/video/video_frame_buffer.h" #include "api/video/video_rotation.h" #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/checks.h" -#include "rtc_base/task_queue.h" #include "rtc_base/time_utils.h" namespace webrtc { @@ -40,21 +40,20 @@ bool FakeDecoder::Configure(const Settings& settings) { return true; } -int32_t FakeDecoder::Decode(const EncodedImage& input, - int64_t render_time_ms) { +int32_t FakeDecoder::Decode(const EncodedImage& input, int64_t render_time_ms) { if (input._encodedWidth > 0 && input._encodedHeight > 0) { width_ = input._encodedWidth; height_ = input._encodedHeight; } - rtc::scoped_refptr buffer = I420Buffer::Create(width_, height_); + scoped_refptr buffer = I420Buffer::Create(width_, height_); I420Buffer::SetBlack(buffer.get()); VideoFrame frame = VideoFrame::Builder() .set_video_frame_buffer(buffer) .set_rotation(webrtc::kVideoRotation_0) .set_timestamp_ms(render_time_ms) .build(); - frame.set_timestamp(input.Timestamp()); + frame.set_rtp_timestamp(input.RtpTimestamp()); frame.set_ntp_time_ms(input.ntp_time_ms_); if (decode_delay_ms_ == 0 || !task_queue_) { diff --git a/test/fake_decoder.h b/test/fake_decoder.h index e14eae0228..8487e36086 100644 --- a/test/fake_decoder.h +++ b/test/fake_decoder.h @@ -34,8 +34,7 @@ class FakeDecoder : public VideoDecoder { bool Configure(const Settings& settings) override; - int32_t Decode(const EncodedImage& input, - int64_t render_time_ms) override; + int32_t Decode(const EncodedImage& input, int64_t render_time_ms) override; int32_t RegisterDecodeCompleteCallback( DecodedImageCallback* callback) override; @@ -62,8 +61,7 @@ class FakeH264Decoder : public FakeDecoder { public: virtual ~FakeH264Decoder() {} - int32_t Decode(const EncodedImage& input, - int64_t render_time_ms) override; + int32_t Decode(const EncodedImage& input, int64_t render_time_ms) override; }; } // namespace test diff --git a/test/fake_encoded_frame.cc b/test/fake_encoded_frame.cc index 32fa5d8ccf..7a74bf98e9 100644 --- a/test/fake_encoded_frame.cc +++ b/test/fake_encoded_frame.cc @@ -15,17 +15,6 @@ #include "api/video/video_frame_type.h" namespace webrtc { - -void PrintTo(const EncodedFrame& frame, - std::ostream* os) /* no-presubmit-check TODO(webrtc:8982) */ { - *os << "EncodedFrame with id=" << frame.Id() << " rtp=" << frame.Timestamp() - << " size=" << frame.size() << " refs=["; - for (size_t ref = 0; ref < frame.num_references; ++ref) { - *os << frame.references[ref] << ","; - } - *os << "]"; -} - namespace test { int64_t FakeEncodedFrame::ReceivedTime() const { @@ -94,7 +83,7 @@ std::unique_ptr FakeFrameBuilder::Build() { frame->SetEncodedData(EncodedImageBuffer::Create(size_)); if (rtp_timestamp_) - frame->SetTimestamp(*rtp_timestamp_); + frame->SetRtpTimestamp(*rtp_timestamp_); if (frame_id_) frame->SetId(*frame_id_); if (playout_delay_) diff --git a/test/fake_encoded_frame.h b/test/fake_encoded_frame.h index a5b2aca4a1..cfac3d9966 100644 --- a/test/fake_encoded_frame.h +++ b/test/fake_encoded_frame.h @@ -12,7 +12,6 @@ #define TEST_FAKE_ENCODED_FRAME_H_ #include -#include // no-presubmit-check TODO(webrtc:8982) #include #include "api/rtp_packet_infos.h" @@ -21,11 +20,6 @@ #include "test/gmock.h" namespace webrtc { - -// For test printing. -void PrintTo(const EncodedFrame& frame, - std::ostream* os); // no-presubmit-check TODO(webrtc:8982) - namespace test { class FakeEncodedFrame : public EncodedFrame { @@ -51,7 +45,7 @@ MATCHER_P(FrameWithSize, id, "") { } MATCHER_P(RtpTimestamp, ts, "") { - return ts == arg.Timestamp(); + return ts == arg.RtpTimestamp(); } class FakeFrameBuilder { @@ -71,15 +65,15 @@ class FakeFrameBuilder { std::unique_ptr Build(); private: - absl::optional rtp_timestamp_; - absl::optional frame_id_; - absl::optional playout_delay_; - absl::optional spatial_layer_; - absl::optional received_time_; - absl::optional payload_type_; - absl::optional ntp_time_; - absl::optional rotation_; - absl::optional packet_infos_; + std::optional rtp_timestamp_; + std::optional frame_id_; + std::optional playout_delay_; + std::optional spatial_layer_; + std::optional received_time_; + std::optional payload_type_; + std::optional ntp_time_; + std::optional rotation_; + std::optional packet_infos_; std::vector references_; bool last_spatial_layer_ = false; size_t size_ = 10; diff --git a/test/fake_encoder.cc b/test/fake_encoder.cc index 1afe1f8a0b..15e2702c79 100644 --- a/test/fake_encoder.cc +++ b/test/fake_encoder.cc @@ -17,6 +17,8 @@ #include #include +#include "api/environment/environment.h" +#include "api/task_queue/task_queue_factory.h" #include "api/video/video_content_type.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "modules/video_coding/include/video_codec_interface.h" @@ -47,8 +49,8 @@ void WriteCounter(unsigned char* payload, uint32_t counter) { } // namespace -FakeEncoder::FakeEncoder(Clock* clock) - : clock_(clock), +FakeEncoder::FakeEncoder(const Environment& env) + : env_(env), num_initializations_(0), callback_(nullptr), max_target_bitrate_kbps_(-1), @@ -103,7 +105,7 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, RateControlParameters rates; bool keyframe; uint32_t counter; - absl::optional qp; + std::optional qp; { MutexLock lock(&mutex_); max_framerate = config_.maxFramerate; @@ -141,7 +143,7 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, EncodedImage encoded; encoded.SetEncodedData(buffer); - encoded.SetTimestamp(input_image.timestamp()); + encoded.SetRtpTimestamp(input_image.rtp_timestamp()); encoded._frameType = frame_info.keyframe ? VideoFrameType::kVideoFrameKey : VideoFrameType::kVideoFrameDelta; encoded._encodedWidth = simulcast_streams[i].width; @@ -149,6 +151,7 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, if (qp) encoded.qp_ = *qp; encoded.SetSimulcastIndex(i); + encoded.SetTemporalIndex(frame_info.layers[i].temporal_id); CodecSpecificInfo codec_specific = EncodeHook(encoded, buffer); if (callback->OnEncodedImage(encoded, &codec_specific).error != @@ -161,7 +164,7 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, CodecSpecificInfo FakeEncoder::EncodeHook( EncodedImage& encoded_image, - rtc::scoped_refptr buffer) { + scoped_refptr buffer) { CodecSpecificInfo codec_specific; codec_specific.codecType = kVideoCodecGeneric; return codec_specific; @@ -266,7 +269,7 @@ void FakeEncoder::SetRatesLocked(const RateControlParameters& parameters) { uint32_t bitrate = current_rate_settings_.bitrate.GetBitrate( spatial_idx, temporal_idx); bitrate = static_cast( - (bitrate* int64_t{max_target_bitrate_kbps_}) / + (bitrate * int64_t{max_target_bitrate_kbps_}) / allocated_bitrate_kbps); current_rate_settings_.bitrate.SetBitrate(spatial_idx, temporal_idx, bitrate); @@ -309,12 +312,12 @@ const VideoCodec& FakeEncoder::config() const { return config_; } -FakeH264Encoder::FakeH264Encoder(Clock* clock) - : FakeEncoder(clock), idr_counter_(0) {} +FakeH264Encoder::FakeH264Encoder(const Environment& env) + : FakeEncoder(env), idr_counter_(0) {} CodecSpecificInfo FakeH264Encoder::EncodeHook( EncodedImage& encoded_image, - rtc::scoped_refptr buffer) { + scoped_refptr buffer) { static constexpr std::array kStartCode = {0, 0, 1}; const size_t kSpsSize = 8; const size_t kPpsSize = 11; @@ -361,8 +364,8 @@ CodecSpecificInfo FakeH264Encoder::EncodeHook( return codec_specific; } -DelayedEncoder::DelayedEncoder(Clock* clock, int delay_ms) - : test::FakeEncoder(clock), delay_ms_(delay_ms) { +DelayedEncoder::DelayedEncoder(const Environment& env, int delay_ms) + : test::FakeEncoder(env), delay_ms_(delay_ms) { // The encoder could be created on a different thread than // it is being used on. sequence_checker_.Detach(); @@ -383,10 +386,8 @@ int32_t DelayedEncoder::Encode(const VideoFrame& input_image, } MultithreadedFakeH264Encoder::MultithreadedFakeH264Encoder( - Clock* clock, - TaskQueueFactory* task_queue_factory) - : test::FakeH264Encoder(clock), - task_queue_factory_(task_queue_factory), + const Environment& env) + : test::FakeH264Encoder(env), current_queue_(0), queue1_(nullptr), queue2_(nullptr) { @@ -399,9 +400,9 @@ int32_t MultithreadedFakeH264Encoder::InitEncode(const VideoCodec* config, const Settings& settings) { RTC_DCHECK_RUN_ON(&sequence_checker_); - queue1_ = task_queue_factory_->CreateTaskQueue( + queue1_ = env_.task_queue_factory().CreateTaskQueue( "Queue 1", TaskQueueFactory::Priority::NORMAL); - queue2_ = task_queue_factory_->CreateTaskQueue( + queue2_ = env_.task_queue_factory().CreateTaskQueue( "Queue 2", TaskQueueFactory::Priority::NORMAL); return FakeH264Encoder::InitEncode(config, settings); diff --git a/test/fake_encoder.h b/test/fake_encoder.h index b804f2ce35..e77f8aad6c 100644 --- a/test/fake_encoder.h +++ b/test/fake_encoder.h @@ -15,12 +15,13 @@ #include #include +#include #include #include "absl/strings/string_view.h" +#include "api/environment/environment.h" #include "api/fec_controller_override.h" #include "api/sequence_checker.h" -#include "api/task_queue/task_queue_factory.h" #include "api/video/encoded_image.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_frame.h" @@ -36,7 +37,7 @@ namespace test { class FakeEncoder : public VideoEncoder { public: - explicit FakeEncoder(Clock* clock); + explicit FakeEncoder(const Environment& env_); virtual ~FakeEncoder() = default; // Sets max bitrate. Not thread-safe, call before registering the encoder. @@ -94,13 +95,13 @@ class FakeEncoder : public VideoEncoder { // `buffer`. virtual CodecSpecificInfo EncodeHook( EncodedImage& encoded_image, - rtc::scoped_refptr buffer); + scoped_refptr buffer); void SetRatesLocked(const RateControlParameters& parameters) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + const Environment env_; FrameInfo last_frame_info_ RTC_GUARDED_BY(mutex_); - Clock* const clock_; VideoCodec config_ RTC_GUARDED_BY(mutex_); int num_initializations_ RTC_GUARDED_BY(mutex_); @@ -111,8 +112,8 @@ class FakeEncoder : public VideoEncoder { uint32_t counter_ RTC_GUARDED_BY(mutex_); mutable Mutex mutex_; bool used_layers_[kMaxSimulcastStreams]; - absl::optional qp_ RTC_GUARDED_BY(mutex_); - absl::optional implementation_name_ RTC_GUARDED_BY(mutex_); + std::optional qp_ RTC_GUARDED_BY(mutex_); + std::optional implementation_name_ RTC_GUARDED_BY(mutex_); // Current byte debt to be payed over a number of frames. // The debt is acquired by keyframes overshooting the bitrate target. @@ -121,13 +122,13 @@ class FakeEncoder : public VideoEncoder { class FakeH264Encoder : public FakeEncoder { public: - explicit FakeH264Encoder(Clock* clock); + explicit FakeH264Encoder(const Environment& env); virtual ~FakeH264Encoder() = default; private: CodecSpecificInfo EncodeHook( EncodedImage& encoded_image, - rtc::scoped_refptr buffer) override; + scoped_refptr buffer) override; int idr_counter_ RTC_GUARDED_BY(local_mutex_); Mutex local_mutex_; @@ -135,7 +136,7 @@ class FakeH264Encoder : public FakeEncoder { class DelayedEncoder : public test::FakeEncoder { public: - DelayedEncoder(Clock* clock, int delay_ms); + DelayedEncoder(const Environment& env, int delay_ms); virtual ~DelayedEncoder() = default; void SetDelay(int delay_ms); @@ -153,8 +154,7 @@ class DelayedEncoder : public test::FakeEncoder { // as it is called from the task queue in VideoStreamEncoder. class MultithreadedFakeH264Encoder : public test::FakeH264Encoder { public: - MultithreadedFakeH264Encoder(Clock* clock, - TaskQueueFactory* task_queue_factory); + explicit MultithreadedFakeH264Encoder(const Environment& env); virtual ~MultithreadedFakeH264Encoder() = default; int32_t InitEncode(const VideoCodec* config, @@ -169,7 +169,6 @@ class MultithreadedFakeH264Encoder : public test::FakeH264Encoder { int32_t Release() override; protected: - TaskQueueFactory* const task_queue_factory_; int current_queue_ RTC_GUARDED_BY(sequence_checker_); std::unique_ptr queue1_ RTC_GUARDED_BY(sequence_checker_); diff --git a/test/fake_texture_frame.cc b/test/fake_texture_frame.cc index 9c17e4c1c0..95e84c7d6a 100644 --- a/test/fake_texture_frame.cc +++ b/test/fake_texture_frame.cc @@ -21,9 +21,8 @@ VideoFrame FakeNativeBuffer::CreateFrame(int width, int64_t render_time_ms, VideoRotation rotation) { return VideoFrame::Builder() - .set_video_frame_buffer( - rtc::make_ref_counted(width, height)) - .set_timestamp_rtp(timestamp) + .set_video_frame_buffer(make_ref_counted(width, height)) + .set_rtp_timestamp(timestamp) .set_timestamp_ms(render_time_ms) .set_rotation(rotation) .build(); @@ -41,8 +40,8 @@ int FakeNativeBuffer::height() const { return height_; } -rtc::scoped_refptr FakeNativeBuffer::ToI420() { - rtc::scoped_refptr buffer = I420Buffer::Create(width_, height_); +scoped_refptr FakeNativeBuffer::ToI420() { + scoped_refptr buffer = I420Buffer::Create(width_, height_); I420Buffer::SetBlack(buffer.get()); return buffer; } diff --git a/test/fake_texture_frame.h b/test/fake_texture_frame.h index 1b25112e01..b0329651b6 100644 --- a/test/fake_texture_frame.h +++ b/test/fake_texture_frame.h @@ -33,7 +33,7 @@ class FakeNativeBuffer : public VideoFrameBuffer { int height() const override; private: - rtc::scoped_refptr ToI420() override; + scoped_refptr ToI420() override; const int width_; const int height_; diff --git a/test/fake_videorenderer.h b/test/fake_videorenderer.h index a2c953ff77..af89a7648f 100644 --- a/test/fake_videorenderer.h +++ b/test/fake_videorenderer.h @@ -17,7 +17,7 @@ namespace webrtc { namespace test { -class FakeVideoRenderer : public rtc::VideoSinkInterface { +class FakeVideoRenderer : public VideoSinkInterface { public: void OnFrame(const webrtc::VideoFrame& frame) override {} }; diff --git a/test/fake_vp8_decoder.cc b/test/fake_vp8_decoder.cc index db18b9b77a..3a4793986b 100644 --- a/test/fake_vp8_decoder.cc +++ b/test/fake_vp8_decoder.cc @@ -12,7 +12,8 @@ #include -#include "absl/types/optional.h" +#include + #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" #include "api/video/video_frame.h" @@ -57,11 +58,11 @@ int32_t FakeVp8Decoder::Decode(const EncodedImage& input, .set_rotation(webrtc::kVideoRotation_0) .set_timestamp_ms(render_time_ms) .build(); - frame.set_timestamp(input.Timestamp()); + frame.set_rtp_timestamp(input.RtpTimestamp()); frame.set_ntp_time_ms(input.ntp_time_ms_); - callback_->Decoded(frame, /*decode_time_ms=*/absl::nullopt, - /*qp=*/absl::nullopt); + callback_->Decoded(frame, /*decode_time_ms=*/std::nullopt, + /*qp=*/std::nullopt); return WEBRTC_VIDEO_CODEC_OK; } diff --git a/test/fake_vp8_decoder.h b/test/fake_vp8_decoder.h index 95cc4b60f7..5bfe3a4974 100644 --- a/test/fake_vp8_decoder.h +++ b/test/fake_vp8_decoder.h @@ -27,8 +27,7 @@ class FakeVp8Decoder : public VideoDecoder { bool Configure(const Settings& settings) override; - int32_t Decode(const EncodedImage& input, - int64_t render_time_ms) override; + int32_t Decode(const EncodedImage& input, int64_t render_time_ms) override; int32_t RegisterDecodeCompleteCallback( DecodedImageCallback* callback) override; diff --git a/test/fake_vp8_encoder.cc b/test/fake_vp8_encoder.cc index dcafd420a6..597d01e5a9 100644 --- a/test/fake_vp8_encoder.cc +++ b/test/fake_vp8_encoder.cc @@ -11,8 +11,8 @@ #include "test/fake_vp8_encoder.h" #include +#include -#include "absl/types/optional.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/vp8_temporal_layers.h" #include "api/video_codecs/vp8_temporal_layers_factory.h" @@ -45,7 +45,7 @@ namespace webrtc { namespace test { -FakeVp8Encoder::FakeVp8Encoder(Clock* clock) : FakeEncoder(clock) { +FakeVp8Encoder::FakeVp8Encoder(const Environment& env) : FakeEncoder(env) { sequence_checker_.Detach(); } @@ -92,14 +92,14 @@ CodecSpecificInfo FakeVp8Encoder::PopulateCodecSpecific( CodecSpecificInfo FakeVp8Encoder::EncodeHook( EncodedImage& encoded_image, - rtc::scoped_refptr buffer) { + scoped_refptr buffer) { RTC_DCHECK_RUN_ON(&sequence_checker_); uint8_t simulcast_index = encoded_image.SimulcastIndex().value_or(0); frame_buffer_controller_->NextFrameConfig(simulcast_index, - encoded_image.Timestamp()); + encoded_image.RtpTimestamp()); CodecSpecificInfo codec_specific = PopulateCodecSpecific(encoded_image.size(), encoded_image._frameType, - simulcast_index, encoded_image.Timestamp()); + simulcast_index, encoded_image.RtpTimestamp()); // Write width and height to the payload the same way as the real encoder // does. diff --git a/test/fake_vp8_encoder.h b/test/fake_vp8_encoder.h index 6aaf547379..53288a19f4 100644 --- a/test/fake_vp8_encoder.h +++ b/test/fake_vp8_encoder.h @@ -16,6 +16,7 @@ #include +#include "api/environment/environment.h" #include "api/fec_controller_override.h" #include "api/sequence_checker.h" #include "api/video/encoded_image.h" @@ -33,7 +34,8 @@ namespace test { class FakeVp8Encoder : public FakeEncoder { public: - explicit FakeVp8Encoder(Clock* clock); + explicit FakeVp8Encoder(const Environment& env); + [[deprecated]] explicit FakeVp8Encoder(Clock* clock); virtual ~FakeVp8Encoder() = default; int32_t InitEncode(const VideoCodec* config, @@ -51,7 +53,7 @@ class FakeVp8Encoder : public FakeEncoder { CodecSpecificInfo EncodeHook( EncodedImage& encoded_image, - rtc::scoped_refptr buffer) override; + scoped_refptr buffer) override; SequenceChecker sequence_checker_; diff --git a/test/fake_vp8_encoder_unittest.cc b/test/fake_vp8_encoder_unittest.cc index e79e8e421b..08982438af 100644 --- a/test/fake_vp8_encoder_unittest.cc +++ b/test/fake_vp8_encoder_unittest.cc @@ -27,15 +27,16 @@ namespace { std::unique_ptr CreateSpecificSimulcastTestFixture() { std::unique_ptr encoder_factory = - std::make_unique([]() { - return std::make_unique(Clock::GetRealTimeClock()); - }); + std::make_unique( + [](const Environment& env, const SdpVideoFormat& format) { + return std::make_unique(env); + }); std::unique_ptr decoder_factory = std::make_unique( []() { return std::make_unique(); }); return CreateSimulcastTestFixture(std::move(encoder_factory), std::move(decoder_factory), - SdpVideoFormat("VP8")); + SdpVideoFormat::VP8()); } } // namespace diff --git a/test/frame_forwarder.cc b/test/frame_forwarder.cc index e89f753bd3..944fbf4f9d 100644 --- a/test/frame_forwarder.cc +++ b/test/frame_forwarder.cc @@ -23,32 +23,31 @@ void FrameForwarder::IncomingCapturedFrame(const VideoFrame& video_frame) { sink_->OnFrame(video_frame); } -void FrameForwarder::AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { +void FrameForwarder::AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) { MutexLock lock(&mutex_); AddOrUpdateSinkLocked(sink, wants); } -void FrameForwarder::AddOrUpdateSinkLocked( - rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { +void FrameForwarder::AddOrUpdateSinkLocked(VideoSinkInterface* sink, + const VideoSinkWants& wants) { RTC_DCHECK(!sink_ || sink_ == sink); sink_ = sink; sink_wants_ = wants; } -void FrameForwarder::RemoveSink(rtc::VideoSinkInterface* sink) { +void FrameForwarder::RemoveSink(VideoSinkInterface* sink) { MutexLock lock(&mutex_); RTC_DCHECK_EQ(sink, sink_); sink_ = nullptr; } -rtc::VideoSinkWants FrameForwarder::sink_wants() const { +VideoSinkWants FrameForwarder::sink_wants() const { MutexLock lock(&mutex_); return sink_wants_; } -rtc::VideoSinkWants FrameForwarder::sink_wants_locked() const { +VideoSinkWants FrameForwarder::sink_wants_locked() const { return sink_wants_; } diff --git a/test/frame_forwarder.h b/test/frame_forwarder.h index 6dfba9521d..59b36a4140 100644 --- a/test/frame_forwarder.h +++ b/test/frame_forwarder.h @@ -18,34 +18,33 @@ namespace webrtc { namespace test { // FrameForwarder can be used as an implementation -// of rtc::VideoSourceInterface where the caller controls when +// of webrtc::VideoSourceInterface where the caller controls when // a frame should be forwarded to its sink. // Currently this implementation only support one sink. -class FrameForwarder : public rtc::VideoSourceInterface { +class FrameForwarder : public VideoSourceInterface { public: FrameForwarder(); ~FrameForwarder() override; // Forwards `video_frame` to the registered `sink_`. virtual void IncomingCapturedFrame(const VideoFrame& video_frame) RTC_LOCKS_EXCLUDED(mutex_); - rtc::VideoSinkWants sink_wants() const RTC_LOCKS_EXCLUDED(mutex_); + VideoSinkWants sink_wants() const RTC_LOCKS_EXCLUDED(mutex_); bool has_sinks() const RTC_LOCKS_EXCLUDED(mutex_); protected: - rtc::VideoSinkWants sink_wants_locked() const - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) + VideoSinkWants sink_wants_locked() const RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + void AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) RTC_LOCKS_EXCLUDED(mutex_) override; - void AddOrUpdateSinkLocked(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) + void AddOrUpdateSinkLocked(VideoSinkInterface* sink, + const VideoSinkWants& wants) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void RemoveSink(rtc::VideoSinkInterface* sink) + void RemoveSink(VideoSinkInterface* sink) RTC_LOCKS_EXCLUDED(mutex_) override; mutable Mutex mutex_; - rtc::VideoSinkInterface* sink_ RTC_GUARDED_BY(mutex_); - rtc::VideoSinkWants sink_wants_ RTC_GUARDED_BY(mutex_); + VideoSinkInterface* sink_ RTC_GUARDED_BY(mutex_); + VideoSinkWants sink_wants_ RTC_GUARDED_BY(mutex_); }; } // namespace test diff --git a/test/frame_generator.cc b/test/frame_generator.cc index d2fa95352d..42f4617b26 100644 --- a/test/frame_generator.cc +++ b/test/frame_generator.cc @@ -51,9 +51,9 @@ FrameGeneratorInterface::Resolution SquareGenerator::GetResolution() const { .height = static_cast(height_)}; } -rtc::scoped_refptr SquareGenerator::CreateI420Buffer(int width, - int height) { - rtc::scoped_refptr buffer(I420Buffer::Create(width, height)); +scoped_refptr SquareGenerator::CreateI420Buffer(int width, + int height) { + scoped_refptr buffer(I420Buffer::Create(width, height)); memset(buffer->MutableDataY(), 127, height * buffer->StrideY()); memset(buffer->MutableDataU(), 127, buffer->ChromaHeight() * buffer->StrideU()); @@ -65,7 +65,7 @@ rtc::scoped_refptr SquareGenerator::CreateI420Buffer(int width, FrameGeneratorInterface::VideoFrameData SquareGenerator::NextFrame() { MutexLock lock(&mutex_); - rtc::scoped_refptr buffer = nullptr; + scoped_refptr buffer = nullptr; switch (type_) { case OutputType::kI420: case OutputType::kI010: @@ -74,10 +74,8 @@ FrameGeneratorInterface::VideoFrameData SquareGenerator::NextFrame() { break; } case OutputType::kI420A: { - rtc::scoped_refptr yuv_buffer = - CreateI420Buffer(width_, height_); - rtc::scoped_refptr axx_buffer = - CreateI420Buffer(width_, height_); + scoped_refptr yuv_buffer = CreateI420Buffer(width_, height_); + scoped_refptr axx_buffer = CreateI420Buffer(width_, height_); buffer = WrapI420ABuffer(yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(), yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(), @@ -100,7 +98,7 @@ FrameGeneratorInterface::VideoFrameData SquareGenerator::NextFrame() { buffer = NV12Buffer::Copy(*buffer->ToI420()); } - return VideoFrameData(buffer, absl::nullopt); + return VideoFrameData(buffer, std::nullopt); } SquareGenerator::Square::Square(int width, int height, int seed) @@ -114,10 +112,10 @@ SquareGenerator::Square::Square(int width, int height, int seed) yuv_a_(random_generator_.Rand(0, 255)) {} void SquareGenerator::Square::Draw( - const rtc::scoped_refptr& frame_buffer) { + const scoped_refptr& frame_buffer) { RTC_DCHECK(frame_buffer->type() == VideoFrameBuffer::Type::kI420 || frame_buffer->type() == VideoFrameBuffer::Type::kI420A); - rtc::scoped_refptr buffer = frame_buffer->ToI420(); + scoped_refptr buffer = frame_buffer->ToI420(); int length_cap = std::min(buffer->height(), buffer->width()) / 4; int length = std::min(length_, length_cap); x_ = (x_ + random_generator_.Rand(0, 4)) % (buffer->width() - length); @@ -298,7 +296,7 @@ FrameGeneratorInterface::VideoFrameData SlideGenerator::NextFrame() { if (++current_display_count_ >= frame_display_count_) current_display_count_ = 0; - return VideoFrameData(buffer_, absl::nullopt); + return VideoFrameData(buffer_, std::nullopt); } FrameGeneratorInterface::Resolution SlideGenerator::GetResolution() const { @@ -363,8 +361,8 @@ ScrollingImageFrameGenerator::ScrollingImageFrameGenerator( target_height_(static_cast(target_height)), current_frame_num_(num_frames_ - 1), prev_frame_not_scrolled_(false), - current_source_frame_(nullptr, absl::nullopt), - current_frame_(nullptr, absl::nullopt), + current_source_frame_(nullptr, std::nullopt), + current_frame_(nullptr, std::nullopt), file_generator_(files, source_width, source_height, 1) { RTC_DCHECK(clock_ != nullptr); RTC_DCHECK_GT(num_frames_, 0); @@ -436,7 +434,7 @@ void ScrollingImageFrameGenerator::CropSourceToScrolledImage( int pixels_scrolled_y = static_cast(scroll_margin_y * scroll_factor + 0.5); - rtc::scoped_refptr i420_buffer = + scoped_refptr i420_buffer = current_source_frame_.buffer->ToI420(); int offset_y = (i420_buffer->StrideY() * pixels_scrolled_y) + pixels_scrolled_x; diff --git a/test/frame_generator.h b/test/frame_generator.h index 76f195d326..089caaa76c 100644 --- a/test/frame_generator.h +++ b/test/frame_generator.h @@ -40,16 +40,16 @@ class SquareGenerator : public FrameGeneratorInterface { VideoFrameData NextFrame() override; Resolution GetResolution() const override; - absl::optional fps() const override { return absl::nullopt; } + std::optional fps() const override { return std::nullopt; } private: - rtc::scoped_refptr CreateI420Buffer(int width, int height); + scoped_refptr CreateI420Buffer(int width, int height); class Square { public: Square(int width, int height, int seed); - void Draw(const rtc::scoped_refptr& frame_buffer); + void Draw(const scoped_refptr& frame_buffer); private: Random random_generator_; @@ -84,7 +84,7 @@ class YuvFileGenerator : public FrameGeneratorInterface { } Resolution GetResolution() const override; - absl::optional fps() const override { return absl::nullopt; } + std::optional fps() const override { return std::nullopt; } private: // Returns true if the new frame was loaded. @@ -100,7 +100,7 @@ class YuvFileGenerator : public FrameGeneratorInterface { const std::unique_ptr frame_buffer_; const int frame_display_count_; int current_display_count_; - rtc::scoped_refptr last_read_buffer_; + scoped_refptr last_read_buffer_; }; class NV12FileGenerator : public FrameGeneratorInterface { @@ -119,7 +119,7 @@ class NV12FileGenerator : public FrameGeneratorInterface { } Resolution GetResolution() const override; - absl::optional fps() const override { return absl::nullopt; } + std::optional fps() const override { return std::nullopt; } private: // Returns true if the new frame was loaded. @@ -135,7 +135,7 @@ class NV12FileGenerator : public FrameGeneratorInterface { const std::unique_ptr frame_buffer_; const int frame_display_count_; int current_display_count_; - rtc::scoped_refptr last_read_buffer_; + scoped_refptr last_read_buffer_; }; // SlideGenerator works similarly to YuvFileGenerator but it fills the frames @@ -151,7 +151,7 @@ class SlideGenerator : public FrameGeneratorInterface { } Resolution GetResolution() const override; - absl::optional fps() const override { return absl::nullopt; } + std::optional fps() const override { return std::nullopt; } private: // Generates some randomly sized and colored squares scattered @@ -163,7 +163,7 @@ class SlideGenerator : public FrameGeneratorInterface { const int frame_display_count_; int current_display_count_; Random random_generator_; - rtc::scoped_refptr buffer_; + scoped_refptr buffer_; }; class ScrollingImageFrameGenerator : public FrameGeneratorInterface { @@ -185,7 +185,7 @@ class ScrollingImageFrameGenerator : public FrameGeneratorInterface { } Resolution GetResolution() const override; - absl::optional fps() const override { return absl::nullopt; } + std::optional fps() const override { return std::nullopt; } private: void UpdateSourceFrame(size_t frame_num); diff --git a/test/frame_generator_capturer.cc b/test/frame_generator_capturer.cc index 6ba0807a74..0e2f2ee952 100644 --- a/test/frame_generator_capturer.cc +++ b/test/frame_generator_capturer.cc @@ -14,9 +14,9 @@ #include #include #include +#include #include -#include "absl/types/optional.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "api/test/frame_generator_interface.h" @@ -29,9 +29,9 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_queue.h" #include "rtc_base/task_utils/repeating_task.h" #include "system_wrappers/include/clock.h" +#include "test/frame_utils.h" #include "test/test_video_capturer.h" namespace webrtc { @@ -41,14 +41,15 @@ FrameGeneratorCapturer::FrameGeneratorCapturer( Clock* clock, std::unique_ptr frame_generator, int target_fps, - TaskQueueFactory& task_queue_factory) + TaskQueueFactory& task_queue_factory, + bool allow_zero_hertz) : clock_(clock), sending_(true), sink_wants_observer_(nullptr), frame_generator_(std::move(frame_generator)), source_fps_(target_fps), target_capture_fps_(target_fps), - first_frame_capture_time_(-1), + allow_zero_hertz_(allow_zero_hertz), task_queue_(task_queue_factory.CreateTaskQueue( "FrameGenCapQ", TaskQueueFactory::Priority::HIGH)) { @@ -58,6 +59,9 @@ FrameGeneratorCapturer::FrameGeneratorCapturer( FrameGeneratorCapturer::~FrameGeneratorCapturer() { Stop(); + // Deconstruct first as tasks in the TaskQueue access other fields of the + // instance of this class. + task_queue_ = nullptr; } void FrameGeneratorCapturer::SetFakeRotation(VideoRotation rotation) { @@ -66,7 +70,7 @@ void FrameGeneratorCapturer::SetFakeRotation(VideoRotation rotation) { } void FrameGeneratorCapturer::SetFakeColorSpace( - absl::optional color_space) { + std::optional color_space) { MutexLock lock(&lock_); fake_color_space_ = color_space; } @@ -74,11 +78,11 @@ void FrameGeneratorCapturer::SetFakeColorSpace( bool FrameGeneratorCapturer::Init() { // This check is added because frame_generator_ might be file based and should // not crash because a file moved. - if (frame_generator_.get() == nullptr) + if (frame_generator_ == nullptr) return false; frame_task_ = RepeatingTaskHandle::DelayedStart( - task_queue_.Get(), + task_queue_.get(), TimeDelta::Seconds(1) / GetCurrentConfiguredFramerate(), [this] { InsertFrame(); @@ -91,32 +95,38 @@ bool FrameGeneratorCapturer::Init() { void FrameGeneratorCapturer::InsertFrame() { MutexLock lock(&lock_); if (sending_) { - FrameGeneratorInterface::VideoFrameData frame_data = - frame_generator_->NextFrame(); - // TODO(srte): Use more advanced frame rate control to allow arbritrary - // fractions. int decimation = std::round(static_cast(source_fps_) / target_capture_fps_); - for (int i = 1; i < decimation; ++i) - frame_data = frame_generator_->NextFrame(); - - VideoFrame frame = VideoFrame::Builder() - .set_video_frame_buffer(frame_data.buffer) - .set_rotation(fake_rotation_) - .set_timestamp_us(clock_->TimeInMicroseconds()) - .set_ntp_time_ms(clock_->CurrentNtpInMilliseconds()) - .set_update_rect(frame_data.update_rect) - .set_color_space(fake_color_space_) - .build(); - if (first_frame_capture_time_ == -1) { - first_frame_capture_time_ = frame.ntp_time_ms(); + for (int i = 1; i < decimation; ++i) { + frame_generator_->SkipNextFrame(); } - TestVideoCapturer::OnFrame(frame); + FrameGeneratorInterface::VideoFrameData frame_data = + frame_generator_->NextFrame(); + if (allow_zero_hertz_) { + // Skip frames that are identical to the previous one but still send at + // least one frame every second. + if (number_of_frames_skipped_ < target_capture_fps_ - 1 && + webrtc::test::FrameBufsEqual(last_frame_captured_, + frame_data.buffer)) { + ++number_of_frames_skipped_; + return; + } + number_of_frames_skipped_ = 0; + } + last_frame_captured_ = frame_data.buffer; + TestVideoCapturer::OnFrame( + VideoFrame::Builder() + .set_video_frame_buffer(frame_data.buffer) + .set_rotation(fake_rotation_) + .set_timestamp_us(clock_->TimeInMicroseconds()) + .set_update_rect(frame_data.update_rect) + .set_color_space(fake_color_space_) + .build()); } } -absl::optional +std::optional FrameGeneratorCapturer::GetResolution() const { FrameGeneratorInterface::Resolution resolution = frame_generator_->GetResolution(); @@ -131,7 +141,7 @@ void FrameGeneratorCapturer::Start() { } if (!frame_task_.Running()) { frame_task_ = RepeatingTaskHandle::Start( - task_queue_.Get(), + task_queue_.get(), [this] { InsertFrame(); return TimeDelta::Seconds(1) / GetCurrentConfiguredFramerate(); @@ -179,7 +189,7 @@ int FrameGeneratorCapturer::GetFrameHeight() const { void FrameGeneratorCapturer::OnOutputFormatRequest( int width, int height, - const absl::optional& max_fps) { + const std::optional& max_fps) { TestVideoCapturer::OnOutputFormatRequest(width, height, max_fps); } @@ -190,42 +200,44 @@ void FrameGeneratorCapturer::SetSinkWantsObserver(SinkWantsObserver* observer) { } void FrameGeneratorCapturer::AddOrUpdateSink( - rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { + VideoSinkInterface* sink, + const VideoSinkWants& wants) { TestVideoCapturer::AddOrUpdateSink(sink, wants); - MutexLock lock(&lock_); - if (sink_wants_observer_) { - // Tests need to observe unmodified sink wants. - sink_wants_observer_->OnSinkWantsChanged(sink, wants); + { + MutexLock lock(&lock_); + if (sink_wants_observer_) { + // Tests need to observe unmodified sink wants. + sink_wants_observer_->OnSinkWantsChanged(sink, wants); + } } - UpdateFps(GetSinkWants().max_framerate_fps); + ChangeFramerate(GetSinkWants().max_framerate_fps); } -void FrameGeneratorCapturer::RemoveSink( - rtc::VideoSinkInterface* sink) { +void FrameGeneratorCapturer::RemoveSink(VideoSinkInterface* sink) { TestVideoCapturer::RemoveSink(sink); - - MutexLock lock(&lock_); - UpdateFps(GetSinkWants().max_framerate_fps); + ChangeFramerate(GetSinkWants().max_framerate_fps); } -void FrameGeneratorCapturer::UpdateFps(int max_fps) { - if (max_fps < target_capture_fps_) { - wanted_fps_.emplace(max_fps); - } else { - wanted_fps_.reset(); +void FrameGeneratorCapturer::RequestRefreshFrame() { + MutexLock lock(&lock_); + if (sending_ && last_frame_captured_ != nullptr) { + TestVideoCapturer::OnFrame( + VideoFrame::Builder() + .set_video_frame_buffer(last_frame_captured_) + .set_rotation(fake_rotation_) + .set_timestamp_us(clock_->TimeInMicroseconds()) + .set_color_space(fake_color_space_) + .build()); } } void FrameGeneratorCapturer::ForceFrame() { // One-time non-repeating task, - task_queue_.PostTask([this] { InsertFrame(); }); + task_queue_->PostTask([this] { InsertFrame(); }); } int FrameGeneratorCapturer::GetCurrentConfiguredFramerate() { MutexLock lock(&lock_); - if (wanted_fps_ && *wanted_fps_ < target_capture_fps_) - return *wanted_fps_; return target_capture_fps_; } diff --git a/test/frame_generator_capturer.h b/test/frame_generator_capturer.h index 6824ba681e..6854caece9 100644 --- a/test/frame_generator_capturer.h +++ b/test/frame_generator_capturer.h @@ -11,19 +11,20 @@ #define TEST_FRAME_GENERATOR_CAPTURER_H_ #include -#include #include +#include -#include "absl/types/optional.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "api/test/frame_generator_interface.h" #include "api/video/color_space.h" #include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" #include "api/video/video_rotation.h" #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/task_queue.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" @@ -38,8 +39,8 @@ class FrameGeneratorCapturer : public TestVideoCapturer { public: // OnSinkWantsChanged is called when FrameGeneratorCapturer::AddOrUpdateSink // is called. - virtual void OnSinkWantsChanged(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) = 0; + virtual void OnSinkWantsChanged(VideoSinkInterface* sink, + const VideoSinkWants& wants) = 0; protected: virtual ~SinkWantsObserver() {} @@ -49,7 +50,8 @@ class FrameGeneratorCapturer : public TestVideoCapturer { Clock* clock, std::unique_ptr frame_generator, int target_fps, - TaskQueueFactory& task_queue_factory); + TaskQueueFactory& task_queue_factory, + bool allow_zero_hertz = false); virtual ~FrameGeneratorCapturer(); void Start() override; @@ -64,23 +66,22 @@ class FrameGeneratorCapturer : public TestVideoCapturer { int width; int height; }; - absl::optional GetResolution() const; + std::optional GetResolution() const; void OnOutputFormatRequest(int width, int height, - const absl::optional& max_fps); + const std::optional& max_fps); void SetSinkWantsObserver(SinkWantsObserver* observer); - void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override; - void RemoveSink(rtc::VideoSinkInterface* sink) override; + void AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) override; + void RemoveSink(VideoSinkInterface* sink) override; + void RequestRefreshFrame() override; void ForceFrame(); void SetFakeRotation(VideoRotation rotation); - void SetFakeColorSpace(absl::optional color_space); - - int64_t first_frame_capture_time() const { return first_frame_capture_time_; } + void SetFakeColorSpace(std::optional color_space); bool Init(); @@ -88,27 +89,24 @@ class FrameGeneratorCapturer : public TestVideoCapturer { void InsertFrame(); static bool Run(void* obj); int GetCurrentConfiguredFramerate(); - void UpdateFps(int max_fps) RTC_EXCLUSIVE_LOCKS_REQUIRED(&lock_); Clock* const clock_; RepeatingTaskHandle frame_task_; - bool sending_; + bool sending_ RTC_GUARDED_BY(&lock_); SinkWantsObserver* sink_wants_observer_ RTC_GUARDED_BY(&lock_); Mutex lock_; std::unique_ptr frame_generator_; + scoped_refptr last_frame_captured_; int source_fps_ RTC_GUARDED_BY(&lock_); int target_capture_fps_ RTC_GUARDED_BY(&lock_); - absl::optional wanted_fps_ RTC_GUARDED_BY(&lock_); VideoRotation fake_rotation_ = kVideoRotation_0; - absl::optional fake_color_space_ RTC_GUARDED_BY(&lock_); - - int64_t first_frame_capture_time_; + std::optional fake_color_space_ RTC_GUARDED_BY(&lock_); + bool allow_zero_hertz_ = false; + int number_of_frames_skipped_ = 0; - // Must be the last field, so it will be deconstructed first as tasks - // in the TaskQueue access other fields of the instance of this class. - rtc::TaskQueue task_queue_; + std::unique_ptr task_queue_; }; } // namespace test } // namespace webrtc diff --git a/test/frame_generator_capturer_unittest.cc b/test/frame_generator_capturer_unittest.cc index 8bf70cffd5..94b9d70c10 100644 --- a/test/frame_generator_capturer_unittest.cc +++ b/test/frame_generator_capturer_unittest.cc @@ -24,8 +24,7 @@ using ::testing::Property; constexpr int kWidth = 640; constexpr int kHeight = 360; -class MockVideoSinkInterfaceVideoFrame - : public rtc::VideoSinkInterface { +class MockVideoSinkInterfaceVideoFrame : public VideoSinkInterface { public: MOCK_METHOD(void, OnFrame, (const VideoFrame& frame), (override)); MOCK_METHOD(void, OnDiscardedFrame, (), (override)); @@ -41,7 +40,7 @@ TEST(FrameGeneratorCapturerTest, CreateFromConfig) { auto capturer = CreateFrameGeneratorCapturer( time.GetClock(), *time.GetTaskQueueFactory(), config); testing::StrictMock mock_sink; - capturer->AddOrUpdateSink(&mock_sink, rtc::VideoSinkWants()); + capturer->AddOrUpdateSink(&mock_sink, VideoSinkWants()); capturer->Start(); EXPECT_CALL(mock_sink, OnFrame(Property(&VideoFrame::width, Eq(300)))) .Times(21); @@ -57,7 +56,7 @@ TEST(FrameGeneratorCapturerTest, OnOutputFormatRequest) { auto capturer = CreateFrameGeneratorCapturer( time.GetClock(), *time.GetTaskQueueFactory(), config); testing::StrictMock mock_sink; - capturer->AddOrUpdateSink(&mock_sink, rtc::VideoSinkWants()); + capturer->AddOrUpdateSink(&mock_sink, VideoSinkWants()); capturer->OnOutputFormatRequest(kWidth / 2, kHeight / 2, /*max_fps=*/10); capturer->Start(); EXPECT_CALL(mock_sink, OnFrame(Property(&VideoFrame::width, Eq(kWidth / 2)))) @@ -89,5 +88,39 @@ TEST(FrameGeneratorCapturerTest, ChangeResolution) { EXPECT_EQ(kHeight / 2, capturer->GetResolution()->height); } +TEST(FrameGeneratorCapturerTest, AllowZeroHertz) { + GlobalSimulatedTimeController time(Timestamp::Seconds(1000)); + FrameGeneratorCapturerConfig config; + config.image_slides->framerate = 30; + config.image_slides->change_interval = TimeDelta::Millis(500); + config.allow_zero_hertz = true; + auto capturer = CreateFrameGeneratorCapturer( + time.GetClock(), *time.GetTaskQueueFactory(), config); + testing::StrictMock mock_sink; + capturer->AddOrUpdateSink(&mock_sink, VideoSinkWants()); + capturer->Start(); + // The video changes frame every 500ms so during 10s we expect to capture 20 + // frames. The framerate set to 30 is ignored. + EXPECT_CALL(mock_sink, OnFrame).Times(21); + time.AdvanceTime(TimeDelta::Seconds(10)); +} + +TEST(FrameGeneratorCapturerTest, AllowZeroHertzMinimumFps) { + GlobalSimulatedTimeController time(Timestamp::Seconds(1000)); + FrameGeneratorCapturerConfig config; + config.image_slides->framerate = 1; + config.image_slides->change_interval = TimeDelta::Seconds(11); + config.allow_zero_hertz = true; + auto capturer = CreateFrameGeneratorCapturer( + time.GetClock(), *time.GetTaskQueueFactory(), config); + testing::StrictMock mock_sink; + capturer->AddOrUpdateSink(&mock_sink, VideoSinkWants()); + capturer->Start(); + // The video frame never changes but the capturer still sends a minimum of one + // frame per second. + EXPECT_CALL(mock_sink, OnFrame).Times(11); + time.AdvanceTime(TimeDelta::Seconds(10)); +} + } // namespace test } // namespace webrtc diff --git a/test/frame_generator_unittest.cc b/test/frame_generator_unittest.cc index ece37a547f..8de5241c93 100644 --- a/test/frame_generator_unittest.cc +++ b/test/frame_generator_unittest.cc @@ -97,8 +97,7 @@ class FrameGeneratorTest : public ::testing::Test { uint8_t u, uint8_t v) { // Check that frame is valid, has the correct color and timestamp are clean. - rtc::scoped_refptr i420_buffer = - frame.buffer->ToI420(); + scoped_refptr i420_buffer = frame.buffer->ToI420(); const uint8_t* buffer; buffer = i420_buffer->DataY(); for (int i = 0; i < y_size; ++i) @@ -114,8 +113,7 @@ class FrameGeneratorTest : public ::testing::Test { uint64_t Hash(const FrameGeneratorInterface::VideoFrameData& frame) { // Generate a 64-bit hash from the frame's buffer. uint64_t hash = 19; - rtc::scoped_refptr i420_buffer = - frame.buffer->ToI420(); + scoped_refptr i420_buffer = frame.buffer->ToI420(); const uint8_t* buffer = i420_buffer->DataY(); for (int i = 0; i < y_size; ++i) { hash = (37 * hash) + buffer[i]; diff --git a/test/frame_utils.cc b/test/frame_utils.cc index b280de1ad1..1e2019bb7f 100644 --- a/test/frame_utils.cc +++ b/test/frame_utils.cc @@ -36,7 +36,7 @@ bool EqualPlane(const uint8_t* data1, } bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2) { - if (f1.timestamp() != f2.timestamp() || + if (f1.rtp_timestamp() != f2.rtp_timestamp() || f1.ntp_time_ms() != f2.ntp_time_ms() || f1.render_time_ms() != f2.render_time_ms()) { return false; @@ -44,8 +44,8 @@ bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2) { return FrameBufsEqual(f1.video_frame_buffer(), f2.video_frame_buffer()); } -bool FrameBufsEqual(const rtc::scoped_refptr& f1, - const rtc::scoped_refptr& f2) { +bool FrameBufsEqual(const scoped_refptr& f1, + const scoped_refptr& f2) { if (f1 == f2) { return true; } @@ -59,8 +59,8 @@ bool FrameBufsEqual(const rtc::scoped_refptr& f1, return false; } - rtc::scoped_refptr f1_i420 = f1->ToI420(); - rtc::scoped_refptr f2_i420 = f2->ToI420(); + scoped_refptr f1_i420 = f1->ToI420(); + scoped_refptr f2_i420 = f2->ToI420(); return EqualPlane(f1_i420->DataY(), f2_i420->DataY(), f1_i420->StrideY(), f2_i420->StrideY(), f1_i420->width(), f1_i420->height()) && EqualPlane(f1_i420->DataU(), f2_i420->DataU(), f1_i420->StrideU(), @@ -71,9 +71,9 @@ bool FrameBufsEqual(const rtc::scoped_refptr& f1, f1_i420->ChromaHeight()); } -rtc::scoped_refptr ReadI420Buffer(int width, int height, FILE* f) { +scoped_refptr ReadI420Buffer(int width, int height, FILE* f) { int half_width = (width + 1) / 2; - rtc::scoped_refptr buffer( + scoped_refptr buffer( // Explicit stride, no padding between rows. I420Buffer::Create(width, height, width, half_width, half_width)); size_t size_y = static_cast(width) * height; @@ -88,8 +88,8 @@ rtc::scoped_refptr ReadI420Buffer(int width, int height, FILE* f) { return buffer; } -rtc::scoped_refptr ReadNV12Buffer(int width, int height, FILE* f) { - rtc::scoped_refptr buffer(NV12Buffer::Create(width, height)); +scoped_refptr ReadNV12Buffer(int width, int height, FILE* f) { + scoped_refptr buffer(NV12Buffer::Create(width, height)); size_t size_y = static_cast(width) * height; size_t size_uv = static_cast(width + width % 2) * ((height + 1) / 2); diff --git a/test/frame_utils.h b/test/frame_utils.h index 1f2b381afb..c537fc68ef 100644 --- a/test/frame_utils.h +++ b/test/frame_utils.h @@ -38,12 +38,12 @@ static inline bool EqualPlane(const uint8_t* data1, bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2); -bool FrameBufsEqual(const rtc::scoped_refptr& f1, - const rtc::scoped_refptr& f2); +bool FrameBufsEqual(const scoped_refptr& f1, + const scoped_refptr& f2); -rtc::scoped_refptr ReadI420Buffer(int width, int height, FILE*); +scoped_refptr ReadI420Buffer(int width, int height, FILE*); -rtc::scoped_refptr ReadNV12Buffer(int width, int height, FILE*); +scoped_refptr ReadNV12Buffer(int width, int height, FILE*); } // namespace test } // namespace webrtc diff --git a/test/function_audio_decoder_factory.h b/test/function_audio_decoder_factory.h index 8464f3d9aa..c31424956f 100644 --- a/test/function_audio_decoder_factory.h +++ b/test/function_audio_decoder_factory.h @@ -19,6 +19,7 @@ #include "absl/memory/memory.h" #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/audio_format.h" +#include "api/environment/environment.h" #include "rtc_base/checks.h" namespace webrtc { @@ -29,14 +30,16 @@ class FunctionAudioDecoderFactory : public AudioDecoderFactory { public: explicit FunctionAudioDecoderFactory( std::function()> create) - : create_([create](const SdpAudioFormat&, - absl::optional codec_pair_id) { + : create_([create](const Environment&, + const SdpAudioFormat&, + std::optional codec_pair_id) { return create(); }) {} explicit FunctionAudioDecoderFactory( std::function( + const Environment&, const SdpAudioFormat&, - absl::optional codec_pair_id)> create) + std::optional codec_pair_id)> create) : create_(std::move(create)) {} // Unused by tests. @@ -49,16 +52,18 @@ class FunctionAudioDecoderFactory : public AudioDecoderFactory { return true; } - std::unique_ptr MakeAudioDecoder( + std::unique_ptr Create( + const Environment& env, const SdpAudioFormat& format, - absl::optional codec_pair_id) override { - return create_(format, codec_pair_id); + std::optional codec_pair_id) override { + return create_(env, format, codec_pair_id); } private: const std::function( + const Environment&, const SdpAudioFormat&, - absl::optional codec_pair_id)> + std::optional codec_pair_id)> create_; }; diff --git a/test/fuzzers/BUILD.gn b/test/fuzzers/BUILD.gn index 114deb03c3..60a9d18b99 100644 --- a/test/fuzzers/BUILD.gn +++ b/test/fuzzers/BUILD.gn @@ -12,6 +12,7 @@ import("../../webrtc.gni") rtc_library("webrtc_fuzzer_main") { sources = [ "webrtc_fuzzer_main.cc" ] + testonly = true deps = [ "../../rtc_base:logging", "//testing/libfuzzer:fuzzing_engine_main", @@ -43,7 +44,6 @@ rtc_library("fuzz_data_helper") { set_defaults("webrtc_fuzzer_test") { configs = rtc_add_configs - absl_deps = [] } template("webrtc_fuzzer_test") { @@ -55,17 +55,20 @@ template("webrtc_fuzzer_test") { ] additional_configs = configs - # If absl_deps is [], no action is needed. If not [], then it needs to be - # converted to //third_party/abseil-cpp:absl when build_with_chromium=true - # otherwise it just needs to be added to deps. - if (absl_deps != []) { - if (!defined(deps)) { + assert( + !defined(absl_deps), + "`absl_deps` has been deprecated, add your Abseil dependencies to the `deps` variable.") + + # Abseil dependencies need to be converted to //third_party/abseil-cpp:absl when build_with_chromium=true + if (build_with_chromium) { + absl_dependencies = + filter_labels_include(deps, [ "//third_party/abseil-cpp/*" ]) + if (absl_dependencies != []) { + filtered_deps = + filter_labels_exclude(deps, [ "//third_party/abseil-cpp/*" ]) deps = [] - } - if (build_with_chromium) { + deps = filtered_deps deps += [ "//third_party/abseil-cpp:absl" ] - } else { - deps += absl_deps } } @@ -77,7 +80,10 @@ template("webrtc_fuzzer_test") { webrtc_fuzzer_test("h264_depacketizer_fuzzer") { sources = [ "h264_depacketizer_fuzzer.cc" ] - deps = [ "../../modules/rtp_rtcp" ] + deps = [ + "../../modules/rtp_rtcp", + "../../rtc_base:copy_on_write_buffer", + ] seed_corpus = "corpora/h264-depacketizer-fuzzer-corpus" } @@ -99,6 +105,17 @@ webrtc_fuzzer_test("vp9_depacketizer_fuzzer") { ] } +webrtc_fuzzer_test("dtls_utils_fuzzer") { + sources = [ "dtls_utils_fuzzer.cc" ] + deps = [ + "../../api:array_view", + "../../p2p:dtls_utils", + ] + + # Seed from boringssl DTLS corpus. + seed_corpus = "//third_party/boringssl/src/fuzz/dtls_client_corpus" +} + webrtc_fuzzer_test("vp8_qp_parser_fuzzer") { sources = [ "vp8_qp_parser_fuzzer.cc" ] deps = [ @@ -110,6 +127,7 @@ webrtc_fuzzer_test("vp8_qp_parser_fuzzer") { webrtc_fuzzer_test("vp9_qp_parser_fuzzer") { sources = [ "vp9_qp_parser_fuzzer.cc" ] deps = [ + "../../api:array_view", "../../modules/video_coding:video_coding_utility", "../../modules/video_coding/", ] @@ -118,6 +136,7 @@ webrtc_fuzzer_test("vp9_qp_parser_fuzzer") { webrtc_fuzzer_test("h264_bitstream_parser_fuzzer") { sources = [ "h264_bitstream_parser_fuzzer.cc" ] deps = [ + "../../api:array_view", "../../common_video", "../../modules/video_coding/", ] @@ -127,15 +146,26 @@ if (rtc_use_h265) { webrtc_fuzzer_test("h265_bitstream_parser_fuzzer") { sources = [ "h265_bitstream_parser_fuzzer.cc" ] deps = [ + "../../api:array_view", "../../common_video", "../../modules/video_coding/", ] } + + webrtc_fuzzer_test("h265_depacketizer_fuzzer") { + sources = [ "h265_depacketizer_fuzzer.cc" ] + deps = [ + "../../api:array_view", + "../../modules/rtp_rtcp", + "../../rtc_base:copy_on_write_buffer", + ] + } } webrtc_fuzzer_test("forward_error_correction_fuzzer") { sources = [ "forward_error_correction_fuzzer.cc" ] deps = [ + "../../api:array_view", "../../api:scoped_refptr", "../../modules/rtp_rtcp", "../../modules/rtp_rtcp:rtp_rtcp_format", @@ -155,6 +185,10 @@ webrtc_fuzzer_test("flexfec_header_reader_fuzzer") { webrtc_fuzzer_test("flexfec_sender_fuzzer") { sources = [ "flexfec_sender_fuzzer.cc" ] deps = [ + "../../api:rtp_parameters", + "../../api/environment", + "../../api/environment:environment_factory", + "../../modules:module_fec_api", "../../modules/rtp_rtcp", "../../modules/rtp_rtcp:rtp_rtcp_format", "../../system_wrappers", @@ -174,7 +208,10 @@ webrtc_fuzzer_test("ulpfec_header_reader_fuzzer") { webrtc_fuzzer_test("ulpfec_generator_fuzzer") { sources = [ "ulpfec_generator_fuzzer.cc" ] deps = [ + "../../api/environment", + "../../api/environment:environment_factory", "../../modules:module_api_public", + "../../modules:module_fec_api", "../../modules/rtp_rtcp", "../../modules/rtp_rtcp:fec_test_helper", "../../modules/rtp_rtcp:rtp_rtcp_format", @@ -188,8 +225,10 @@ webrtc_fuzzer_test("ulpfec_receiver_fuzzer") { sources = [ "ulpfec_receiver_fuzzer.cc" ] deps = [ ":fuzz_data_helper", + "../../api:array_view", "../../modules/rtp_rtcp", "../../modules/rtp_rtcp:rtp_rtcp_format", + "../../system_wrappers", ] } @@ -212,24 +251,34 @@ webrtc_fuzzer_test("rtp_video_frame_assembler_fuzzer") { webrtc_fuzzer_test("rtcp_receiver_fuzzer") { sources = [ "rtcp_receiver_fuzzer.cc" ] deps = [ + "../../api:array_view", + "../../api/environment", + "../../api/environment:environment_factory", "../../modules/rtp_rtcp", "../../modules/rtp_rtcp:rtp_rtcp_format", "../../rtc_base:checks", "../../system_wrappers", + "../../test:explicit_key_value_config", ] seed_corpus = "corpora/rtcp-corpus" } webrtc_fuzzer_test("rtp_packet_fuzzer") { sources = [ "rtp_packet_fuzzer.cc" ] - deps = [ "../../modules/rtp_rtcp:rtp_rtcp_format" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + deps = [ + "../../api:rtp_headers", + "../../api/transport/rtp:corruption_detection_message", + "../../api/video:video_layers_allocation", + "../../api/video:video_rtp_headers", + "../../modules/rtp_rtcp:rtp_rtcp_format", + ] seed_corpus = "corpora/rtp-corpus" } webrtc_fuzzer_test("rtp_packetizer_av1_fuzzer") { sources = [ "rtp_packetizer_av1_fuzzer.cc" ] deps = [ + "../../api:array_view", "../../api/video:video_frame_type", "../../modules/rtp_rtcp:rtp_rtcp", "../../modules/rtp_rtcp:rtp_rtcp_format", @@ -237,16 +286,55 @@ webrtc_fuzzer_test("rtp_packetizer_av1_fuzzer") { ] } +webrtc_fuzzer_test("rtp_format_h264_fuzzer") { + sources = [ "rtp_format_h264_fuzzer.cc" ] + deps = [ + "../../api:array_view", + "../../api/video:video_frame_type", + "../../modules/rtp_rtcp:rtp_rtcp", + "../../modules/rtp_rtcp:rtp_rtcp_format", + "../../modules/video_coding:codec_globals_headers", + "../../rtc_base:checks", + ] +} + +webrtc_fuzzer_test("rtp_format_vp8_fuzzer") { + sources = [ "rtp_format_vp8_fuzzer.cc" ] + deps = [ + "../../api:array_view", + "../../api/video:video_frame_type", + "../../modules/rtp_rtcp:rtp_rtcp", + "../../modules/rtp_rtcp:rtp_rtcp_format", + "../../modules/video_coding:codec_globals_headers", + "../../rtc_base:checks", + ] +} + +webrtc_fuzzer_test("rtp_format_vp9_fuzzer") { + sources = [ "rtp_format_vp9_fuzzer.cc" ] + deps = [ + "../../api:array_view", + "../../api/video:video_frame_type", + "../../modules/rtp_rtcp:rtp_rtcp", + "../../modules/rtp_rtcp:rtp_rtcp_format", + "../../modules/video_coding:codec_globals_headers", + "../../rtc_base:checks", + ] +} + webrtc_fuzzer_test("receive_side_congestion_controller_fuzzer") { sources = [ "receive_side_congestion_controller_fuzzer.cc" ] deps = [ "../../api:array_view", + "../../api:rtp_parameters", + "../../api/environment:environment_factory", "../../api/units:time_delta", "../../api/units:timestamp", "../../modules/congestion_controller", "../../modules/rtp_rtcp:rtp_rtcp_format", "../../system_wrappers", ] + seed_corpus = "corpora/receive-side-cc" } rtc_library("audio_decoder_fuzzer") { @@ -260,7 +348,6 @@ rtc_library("audio_decoder_fuzzer") { "../../modules/rtp_rtcp:rtp_rtcp_format", "../../rtc_base:checks", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } webrtc_fuzzer_test("audio_decoder_g722_fuzzer") { @@ -271,18 +358,11 @@ webrtc_fuzzer_test("audio_decoder_g722_fuzzer") { ] } -webrtc_fuzzer_test("audio_decoder_ilbc_fuzzer") { - sources = [ "audio_decoder_ilbc_fuzzer.cc" ] - deps = [ - ":audio_decoder_fuzzer", - "../../modules/audio_coding:ilbc", - ] -} - webrtc_fuzzer_test("audio_decoder_opus_fuzzer") { sources = [ "audio_decoder_opus_fuzzer.cc" ] deps = [ ":audio_decoder_fuzzer", + "..:explicit_key_value_config", "../../modules/audio_coding:webrtc_opus", ] } @@ -291,6 +371,7 @@ webrtc_fuzzer_test("audio_decoder_opus_redundant_fuzzer") { sources = [ "audio_decoder_opus_redundant_fuzzer.cc" ] deps = [ ":audio_decoder_fuzzer", + "..:explicit_key_value_config", "../../modules/audio_coding:webrtc_opus", ] } @@ -301,6 +382,7 @@ webrtc_fuzzer_test("audio_decoder_multiopus_fuzzer") { ":audio_decoder_fuzzer", "../../api/audio_codecs/opus:audio_decoder_multiopus", "../../api/audio_codecs/opus:audio_decoder_opus_config", + "../../rtc_base:checks", ] } @@ -317,6 +399,7 @@ webrtc_fuzzer_test("audio_decoder_pcm16b_fuzzer") { deps = [ ":audio_decoder_fuzzer", "../../modules/audio_coding:pcm16b", + "../../rtc_base:checks", ] } @@ -340,6 +423,8 @@ webrtc_fuzzer_test("audio_encoder_opus_fuzzer") { deps = [ ":audio_encoder_fuzzer", "../../api/audio_codecs/opus:audio_encoder_opus", + "../../api/environment", + "../../api/environment:environment_factory", "../../rtc_base:checks", ] } @@ -348,7 +433,6 @@ webrtc_fuzzer_test("turn_unwrap_fuzzer") { sources = [ "turn_unwrap_fuzzer.cc" ] deps = [ "../../media", - "../../media:rtc_media_base", "../../media:turn_utils", ] } @@ -357,12 +441,16 @@ webrtc_fuzzer_test("neteq_rtp_fuzzer") { sources = [ "neteq_rtp_fuzzer.cc" ] deps = [ "../../api:array_view", + "../../api:rtp_headers", + "../../api/audio_codecs:audio_codecs_api", "../../api/audio_codecs:builtin_audio_decoder_factory", + "../../api/neteq:neteq_api", "../../modules/audio_coding:neteq", "../../modules/audio_coding:neteq_test_tools", "../../modules/audio_coding:neteq_tools_minimal", "../../modules/audio_coding:pcm16b", "../../modules/rtp_rtcp:rtp_rtcp_format", + "../../rtc_base:checks", ] } @@ -370,11 +458,15 @@ webrtc_fuzzer_test("neteq_signal_fuzzer") { sources = [ "neteq_signal_fuzzer.cc" ] deps = [ "../../api:array_view", + "../../api:rtp_headers", + "../../api/audio_codecs:audio_codecs_api", "../../api/audio_codecs:builtin_audio_decoder_factory", + "../../api/neteq:neteq_api", "../../modules/audio_coding:neteq", "../../modules/audio_coding:neteq_test_tools", "../../modules/audio_coding:neteq_tools_minimal", "../../modules/audio_coding:pcm16b", + "../../rtc_base:checks", "../../rtc_base:random", "../../rtc_base:safe_conversions", ] @@ -383,6 +475,8 @@ webrtc_fuzzer_test("neteq_signal_fuzzer") { webrtc_fuzzer_test("residual_echo_detector_fuzzer") { sources = [ "residual_echo_detector_fuzzer.cc" ] deps = [ + "../../api:scoped_refptr", + "../../api/audio:audio_processing", "../../api/audio:echo_detector_creator", "../../rtc_base:checks", "../../rtc_base:refcount", @@ -401,17 +495,22 @@ webrtc_fuzzer_test("sdp_parser_fuzzer") { if (!build_with_chromium) { # This target depends on test infrastructure that can't be built # with Chromium at the moment. - # TODO(bugs.chromium.org/12534): Make this fuzzer build in Chromium. + # TODO: bugs.webrtc.org/42222682 - Make this fuzzer build in Chromium. webrtc_fuzzer_test("sdp_integration_fuzzer") { sources = [ "sdp_integration_fuzzer.cc" ] deps = [ + "..:wait_until", "../../api:libjingle_peerconnection_api", + "../../api:make_ref_counted", + "../../api:rtc_error_matchers", "../../pc:integration_test_helpers", "../../pc:libjingle_peerconnection", + "../../pc:pc_test_utils", + "../../rtc_base:checks", "../../test:test_support", + "//third_party/abseil-cpp/absl/strings", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] seed_corpus = "corpora/sdp-corpus" } } @@ -419,8 +518,9 @@ if (!build_with_chromium) { webrtc_fuzzer_test("stun_parser_fuzzer") { sources = [ "stun_parser_fuzzer.cc" ] deps = [ + "../../api:array_view", "../../api/transport:stun_types", - "../../p2p:rtc_p2p", + "../../rtc_base:byte_buffer", ] seed_corpus = "corpora/stun-corpus" dict = "corpora/stun.tokens" @@ -428,10 +528,7 @@ webrtc_fuzzer_test("stun_parser_fuzzer") { webrtc_fuzzer_test("stun_validator_fuzzer") { sources = [ "stun_validator_fuzzer.cc" ] - deps = [ - "../../api/transport:stun_types", - "../../p2p:rtc_p2p", - ] + deps = [ "../../api/transport:stun_types" ] seed_corpus = "corpora/stun-corpus" dict = "corpora/stun.tokens" } @@ -439,7 +536,7 @@ webrtc_fuzzer_test("stun_validator_fuzzer") { webrtc_fuzzer_test("pseudotcp_parser_fuzzer") { sources = [ "pseudotcp_parser_fuzzer.cc" ] deps = [ - "../../p2p:rtc_p2p", + "../../p2p:pseudo_tcp", "../../rtc_base:threading", ] } @@ -452,46 +549,57 @@ rtc_library("audio_processing_fuzzer_helper") { ] deps = [ ":fuzz_data_helper", + "../../api:array_view", + "../../api:scoped_refptr", "../../api/audio:audio_frame_api", + "../../api/audio:audio_processing", "../../modules/audio_processing", - "../../modules/audio_processing:api", "../../modules/audio_processing:audio_frame_proxies", "../../rtc_base:checks", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } webrtc_fuzzer_test("audio_processing_fuzzer") { sources = [ "audio_processing_configs_fuzzer.cc" ] deps = [ ":audio_processing_fuzzer_helper", + "../../api:array_view", "../../api:scoped_refptr", "../../api/audio:aec3_factory", + "../../api/audio:audio_processing", + "../../api/audio:builtin_audio_processing_builder", + "../../api/audio:echo_control", "../../api/audio:echo_detector_creator", + "../../api/environment", + "../../api/environment:environment_factory", + "../../api/task_queue", "../../api/task_queue:default_task_queue_factory", "../../modules/audio_processing", - "../../modules/audio_processing:api", "../../modules/audio_processing:audio_buffer", "../../modules/audio_processing:audioproc_test_utils", "../../modules/audio_processing/aec3", "../../modules/audio_processing/aec_dump", "../../modules/audio_processing/aec_dump:aec_dump_impl", + "../../rtc_base:checks", "../../rtc_base:macromagic", - "../../rtc_base:rtc_task_queue", "../../rtc_base:safe_minmax", "../../system_wrappers:field_trial", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/memory", ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] seed_corpus = "corpora/audio_processing-corpus" } webrtc_fuzzer_test("audio_processing_sample_rate_fuzzer") { sources = [ "audio_processing_sample_rate_fuzzer.cc" ] deps = [ + "../../api:array_view", "../../api:scoped_refptr", "../../api/audio:audio_frame_api", + "../../api/audio:audio_processing", + "../../api/audio:builtin_audio_processing_builder", + "../../api/environment:environment_factory", "../../modules/audio_processing", - "../../modules/audio_processing:api", "../../modules/audio_processing:audio_frame_proxies", "../../modules/audio_processing:audioproc_test_utils", "../../rtc_base:checks", @@ -504,9 +612,11 @@ webrtc_fuzzer_test("agc_fuzzer") { sources = [ "agc_fuzzer.cc" ] deps = [ ":fuzz_data_helper", + "../../api:array_view", + "../../api/audio:audio_processing", "../../modules/audio_processing", - "../../modules/audio_processing:api", "../../modules/audio_processing:audio_buffer", + "../../modules/audio_processing/agc:gain_control_interface", "../../rtc_base:macromagic", "../../rtc_base:safe_minmax", ] @@ -519,7 +629,7 @@ webrtc_fuzzer_test("aec3_config_json_fuzzer") { deps = [ ":fuzz_data_helper", "../../api/audio:aec3_config", - "../../api/audio:aec3_config_json", + "../../modules/audio_processing:aec3_config_json", ] dict = "//testing/libfuzzer/fuzzers/dicts/json.dict" seed_corpus = "corpora/aec3-config-json-corpus" @@ -535,11 +645,13 @@ webrtc_fuzzer_test("aec3_fuzzer") { sources = [ "aec3_fuzzer.cc" ] deps = [ ":fuzz_data_helper", - "../../modules/audio_processing:api", + "../../api:array_view", + "../../api/audio:aec3_config", + "../../api/audio:audio_processing", + "../../api/environment:environment_factory", "../../modules/audio_processing:audio_buffer", "../../modules/audio_processing/aec3", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } webrtc_fuzzer_test("comfort_noise_decoder_fuzzer") { @@ -566,6 +678,7 @@ webrtc_fuzzer_test("rtp_dependency_descriptor_fuzzer") { seed_corpus = "corpora/dependency_descriptor-corpus" deps = [ "../../api:array_view", + "../../api/transport/rtp:dependency_descriptor", "../../common_video/generic_frame_descriptor", "../../modules/rtp_rtcp:rtp_rtcp_format", "../../rtc_base:checks", @@ -592,7 +705,13 @@ webrtc_fuzzer_test("rtp_frame_reference_finder_fuzzer") { deps = [ "../../api:rtp_packet_info", "../../api:scoped_refptr", + "../../api/video:encoded_image", + "../../api/video:video_frame", + "../../api/video:video_frame_type", + "../../api/video:video_rtp_headers", "../../modules/rtp_rtcp", + "../../modules/rtp_rtcp:rtp_video_header", + "../../modules/video_coding:codec_globals_headers", "../../modules/video_coding:video_coding", "../../system_wrappers", ] @@ -635,6 +754,7 @@ webrtc_fuzzer_test("sctp_utils_fuzzer") { webrtc_fuzzer_test("dcsctp_socket_fuzzer") { sources = [ "dcsctp_socket_fuzzer.cc" ] deps = [ + "../../api:array_view", "../../net/dcsctp/fuzzers:dcsctp_fuzzers", "../../net/dcsctp/public:socket", "../../net/dcsctp/public:types", @@ -647,6 +767,8 @@ webrtc_fuzzer_test("ssl_certificate_fuzzer") { sources = [ "ssl_certificate_fuzzer.cc" ] deps = [ "../:rtp_test_utils", + "../../rtc_base:buffer", + "../../rtc_base:digest", "../../rtc_base:ssl", "../../rtc_base:stringutils", ] @@ -654,7 +776,13 @@ webrtc_fuzzer_test("ssl_certificate_fuzzer") { webrtc_fuzzer_test("vp8_replay_fuzzer") { sources = [ "vp8_replay_fuzzer.cc" ] - deps = [ "utils:rtp_replayer" ] + deps = [ + "../../api:array_view", + "../../api/video_codecs:video_codecs_api", + "../../call:video_receive_stream_api", + "../../modules/video_coding:codec_globals_headers", + "utils:rtp_replayer", + ] seed_corpus = "corpora/rtpdump-corpus/vp8" } @@ -665,19 +793,26 @@ if (rtc_build_libvpx) { "..:test_support", "../../api:array_view", "../../api:field_trials_view", + "../../api/environment:environment_factory", + "../../api/video:encoded_image", + "../../api/video:video_bitrate_allocation", "../../api/video:video_frame", + "../../api/video:video_frame_type", "../../api/video_codecs:video_codecs_api", + "../../common_video/generic_frame_descriptor", "../../media:media_constants", + "../../modules/video_coding:codec_globals_headers", "../../modules/video_coding:frame_dependencies_calculator", + "../../modules/video_coding:video_codec_interface", "../../modules/video_coding:webrtc_libvpx_interface", "../../modules/video_coding:webrtc_vp9", + "../../rtc_base:checks", "../../rtc_base:safe_compare", - rtc_libvpx_dir, - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/strings:string_view", + rtc_libvpx_dir, ] seed_corpus = "corpora/vp9-encoder-references-corpus" defines = [ "RTC_ENABLE_VP9" ] @@ -686,6 +821,104 @@ if (rtc_build_libvpx) { webrtc_fuzzer_test("vp9_replay_fuzzer") { sources = [ "vp9_replay_fuzzer.cc" ] - deps = [ "utils:rtp_replayer" ] + deps = [ + "../../api/video_codecs:video_codecs_api", + "../../call:video_receive_stream_api", + "utils:rtp_replayer", + ] seed_corpus = "corpora/rtpdump-corpus/vp9" } + +webrtc_fuzzer_test("webrtc_base64_decode_fuzzer") { + sources = [ "base64_decode_fuzzer.cc" ] + deps = [ "../../rtc_base:base64" ] +} + +webrtc_fuzzer_test("webrtc_base64_encode_fuzzer") { + sources = [ "base64_encode_fuzzer.cc" ] + deps = [ + "../../api:array_view", + "../../rtc_base:base64", + "../../rtc_base:checks", + ] +} + +group("fuzzers") { + testonly = true + deps = [ + ":aec3_config_json_fuzzer", + ":aec3_fuzzer", + ":agc_fuzzer", + ":audio_decoder_fuzzer", + ":audio_decoder_g722_fuzzer", + ":audio_decoder_multiopus_fuzzer", + ":audio_decoder_opus_fuzzer", + ":audio_decoder_opus_redundant_fuzzer", + ":audio_decoder_pcm16b_fuzzer", + ":audio_decoder_pcm_fuzzer", + ":audio_encoder_fuzzer", + ":audio_encoder_opus_fuzzer", + ":audio_processing_fuzzer", + ":audio_processing_sample_rate_fuzzer", + ":comfort_noise_decoder_fuzzer", + ":dcsctp_socket_fuzzer", + ":field_trial_fuzzer", + ":flexfec_header_reader_fuzzer", + ":flexfec_receiver_fuzzer", + ":flexfec_sender_fuzzer", + ":forward_error_correction_fuzzer", + ":frame_buffer_fuzzer", + ":h264_bitstream_parser_fuzzer", + ":h264_depacketizer_fuzzer", + ":neteq_rtp_fuzzer", + ":neteq_signal_fuzzer", + ":pseudotcp_parser_fuzzer", + ":receive_side_congestion_controller_fuzzer", + ":residual_echo_detector_fuzzer", + ":rtcp_receiver_fuzzer", + ":rtp_depacketizer_av1_assemble_frame_fuzzer", + ":rtp_dependency_descriptor_fuzzer", + ":rtp_format_h264_fuzzer", + ":rtp_format_vp8_fuzzer", + ":rtp_format_vp9_fuzzer", + ":rtp_frame_reference_finder_fuzzer", + ":rtp_packet_fuzzer", + ":rtp_packetizer_av1_fuzzer", + ":rtp_video_frame_assembler_fuzzer", + ":rtp_video_layers_allocation_fuzzer", + ":sctp_utils_fuzzer", + ":sdp_parser_fuzzer", + ":ssl_certificate_fuzzer", + ":string_to_number_fuzzer", + ":stun_parser_fuzzer", + ":stun_validator_fuzzer", + ":turn_unwrap_fuzzer", + ":ulpfec_generator_fuzzer", + ":ulpfec_header_reader_fuzzer", + ":ulpfec_receiver_fuzzer", + ":vp8_depacketizer_fuzzer", + ":vp8_qp_parser_fuzzer", + ":vp8_replay_fuzzer", + ":vp9_depacketizer_fuzzer", + ":vp9_qp_parser_fuzzer", + ":vp9_replay_fuzzer", + ":webrtc_base64_decode_fuzzer", + ":webrtc_base64_encode_fuzzer", + ] + if (rtc_use_h265) { + deps += [ + ":h265_bitstream_parser_fuzzer", + ":h265_depacketizer_fuzzer", + ] + } + + if (rtc_build_libvpx) { + deps += [ ":vp9_encoder_references_fuzzer" ] + } + + if (!build_with_chromium) { + # TODO: bugs.webrtc.org/42222682 - Add unconditionally when this fuzzer + # builds in Chromium. + deps += [ ":sdp_integration_fuzzer" ] + } +} diff --git a/test/fuzzers/DEPS b/test/fuzzers/DEPS index 50b1c8adce..ce07bcbe6c 100644 --- a/test/fuzzers/DEPS +++ b/test/fuzzers/DEPS @@ -2,4 +2,5 @@ include_rules = [ "+audio", "+pc", "+net/dcsctp", + "+p2p", ] diff --git a/test/fuzzers/aec3_config_json_fuzzer.cc b/test/fuzzers/aec3_config_json_fuzzer.cc index 626350c52c..a42e3fa89d 100644 --- a/test/fuzzers/aec3_config_json_fuzzer.cc +++ b/test/fuzzers/aec3_config_json_fuzzer.cc @@ -8,11 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include #include #include "api/audio/echo_canceller3_config.h" -#include "api/audio/echo_canceller3_config_json.h" -#include "test/fuzzers/fuzz_data_helper.h" +#include "modules/audio_processing/test/echo_canceller3_config_json.h" namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { diff --git a/test/fuzzers/aec3_fuzzer.cc b/test/fuzzers/aec3_fuzzer.cc index a12ca30f63..eb31d82daf 100644 --- a/test/fuzzers/aec3_fuzzer.cc +++ b/test/fuzzers/aec3_fuzzer.cc @@ -8,10 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "absl/types/optional.h" +#include +#include +#include + +#include "api/array_view.h" +#include "api/audio/audio_processing.h" +#include "api/audio/echo_canceller3_config.h" +#include "api/environment/environment_factory.h" #include "modules/audio_processing/aec3/echo_canceller3.h" #include "modules/audio_processing/audio_buffer.h" -#include "modules/audio_processing/include/audio_processing.h" #include "test/fuzzers/fuzz_data_helper.h" namespace webrtc { @@ -40,7 +46,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { return; } - test::FuzzDataHelper fuzz_data(rtc::ArrayView(data, size)); + test::FuzzDataHelper fuzz_data(webrtc::ArrayView(data, size)); constexpr int kSampleRates[] = {16000, 32000, 48000}; const int sample_rate_hz = @@ -52,8 +58,8 @@ void FuzzOneInput(const uint8_t* data, size_t size) { const size_t num_capture_channels = 1 + fuzz_data.ReadOrDefaultValue(0) % (kMaxNumChannels - 1); - EchoCanceller3 aec3(EchoCanceller3Config(), - /*multichannel_config=*/absl::nullopt, sample_rate_hz, + EchoCanceller3 aec3(CreateEnvironment(), EchoCanceller3Config(), + /*multichannel_config=*/std::nullopt, sample_rate_hz, num_render_channels, num_capture_channels); AudioBuffer capture_audio(sample_rate_hz, num_capture_channels, diff --git a/test/fuzzers/agc_fuzzer.cc b/test/fuzzers/agc_fuzzer.cc index 0586708cc6..cf436255ae 100644 --- a/test/fuzzers/agc_fuzzer.cc +++ b/test/fuzzers/agc_fuzzer.cc @@ -8,13 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include #include +#include +#include "api/array_view.h" +#include "api/audio/audio_processing.h" +#include "modules/audio_processing/agc/gain_control.h" #include "modules/audio_processing/audio_buffer.h" #include "modules/audio_processing/gain_control_impl.h" -#include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/numerics/safe_minmax.h" -#include "rtc_base/thread_annotations.h" #include "test/fuzzers/fuzz_data_helper.h" namespace webrtc { @@ -46,18 +50,18 @@ void FuzzGainControllerConfig(test::FuzzDataHelper* fuzz_data, GainControl::Mode mode = fuzz_data->SelectOneOf(modes); const bool enable_limiter = fuzz_data->ReadOrDefaultValue(true); // The values are capped to comply with the API of webrtc::GainControl. - const int analog_level_min = - rtc::SafeClamp(fuzz_data->ReadOrDefaultValue(0), 0, 65534); + const int analog_level_min = webrtc::SafeClamp( + fuzz_data->ReadOrDefaultValue(0), 0, 65534); const int analog_level_max = - rtc::SafeClamp(fuzz_data->ReadOrDefaultValue(65535), - analog_level_min + 1, 65535); + webrtc::SafeClamp(fuzz_data->ReadOrDefaultValue(65535), + analog_level_min + 1, 65535); const int stream_analog_level = - rtc::SafeClamp(fuzz_data->ReadOrDefaultValue(30000), - analog_level_min, analog_level_max); - const int gain = - rtc::SafeClamp(fuzz_data->ReadOrDefaultValue(30), -1, 100); + webrtc::SafeClamp(fuzz_data->ReadOrDefaultValue(30000), + analog_level_min, analog_level_max); + const int gain = webrtc::SafeClamp( + fuzz_data->ReadOrDefaultValue(30), -1, 100); const int target_level_dbfs = - rtc::SafeClamp(fuzz_data->ReadOrDefaultValue(15), -1, 35); + webrtc::SafeClamp(fuzz_data->ReadOrDefaultValue(15), -1, 35); gc->set_mode(mode); gc->enable_limiter(enable_limiter); @@ -117,7 +121,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { if (size > 200000) { return; } - test::FuzzDataHelper fuzz_data(rtc::ArrayView(data, size)); + test::FuzzDataHelper fuzz_data(webrtc::ArrayView(data, size)); auto gci = std::make_unique(); FuzzGainController(&fuzz_data, gci.get()); } diff --git a/test/fuzzers/audio_decoder_fuzzer.cc b/test/fuzzers/audio_decoder_fuzzer.cc index 1db332eeb5..3da93721d7 100644 --- a/test/fuzzers/audio_decoder_fuzzer.cc +++ b/test/fuzzers/audio_decoder_fuzzer.cc @@ -10,12 +10,12 @@ #include "test/fuzzers/audio_decoder_fuzzer.h" +#include +#include #include -#include "absl/types/optional.h" #include "api/audio_codecs/audio_decoder.h" #include "modules/rtp_rtcp/source/byte_io.h" -#include "rtc_base/checks.h" namespace webrtc { namespace { diff --git a/test/fuzzers/audio_decoder_g722_fuzzer.cc b/test/fuzzers/audio_decoder_g722_fuzzer.cc index 08599aa333..0e75baf095 100644 --- a/test/fuzzers/audio_decoder_g722_fuzzer.cc +++ b/test/fuzzers/audio_decoder_g722_fuzzer.cc @@ -8,6 +8,10 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include + #include "modules/audio_coding/codecs/g722/audio_decoder_g722.h" #include "test/fuzzers/audio_decoder_fuzzer.h" diff --git a/test/fuzzers/audio_decoder_ilbc_fuzzer.cc b/test/fuzzers/audio_decoder_ilbc_fuzzer.cc deleted file mode 100644 index 8548645c63..0000000000 --- a/test/fuzzers/audio_decoder_ilbc_fuzzer.cc +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h" -#include "test/fuzzers/audio_decoder_fuzzer.h" - -namespace webrtc { -void FuzzOneInput(const uint8_t* data, size_t size) { - if (size > 10000) { - return; - } - AudioDecoderIlbcImpl dec; - static const int kSampleRateHz = 8000; - static const size_t kAllocatedOuputSizeSamples = kSampleRateHz / 10; - int16_t output[kAllocatedOuputSizeSamples]; - FuzzAudioDecoder(DecoderFunctionType::kNormalDecode, data, size, &dec, - kSampleRateHz, sizeof(output), output); -} -} // namespace webrtc diff --git a/test/fuzzers/audio_decoder_multistream_opus_fuzzer.cc b/test/fuzzers/audio_decoder_multistream_opus_fuzzer.cc index 474a1cdc43..9d61908c4b 100644 --- a/test/fuzzers/audio_decoder_multistream_opus_fuzzer.cc +++ b/test/fuzzers/audio_decoder_multistream_opus_fuzzer.cc @@ -8,8 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include +#include + #include "api/audio_codecs/opus/audio_decoder_multi_channel_opus.h" #include "api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h" +#include "rtc_base/checks.h" #include "test/fuzzers/audio_decoder_fuzzer.h" namespace webrtc { diff --git a/test/fuzzers/audio_decoder_opus_fuzzer.cc b/test/fuzzers/audio_decoder_opus_fuzzer.cc index a015f98b5b..be4d62dbd7 100644 --- a/test/fuzzers/audio_decoder_opus_fuzzer.cc +++ b/test/fuzzers/audio_decoder_opus_fuzzer.cc @@ -8,14 +8,19 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include + #include "modules/audio_coding/codecs/opus/audio_decoder_opus.h" +#include "test/explicit_key_value_config.h" #include "test/fuzzers/audio_decoder_fuzzer.h" namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { const size_t channels = (size % 2) + 1; // 1 or 2 channels. - AudioDecoderOpusImpl dec(channels); - const int kSampleRateHz = 48000; + const int kSampleRateHz = 48'000; + AudioDecoderOpusImpl dec(test::ExplicitKeyValueConfig(""), channels, + kSampleRateHz); const size_t kAllocatedOuputSizeSamples = kSampleRateHz / 10; // 100 ms. int16_t output[kAllocatedOuputSizeSamples]; FuzzAudioDecoder(DecoderFunctionType::kNormalDecode, data, size, &dec, diff --git a/test/fuzzers/audio_decoder_opus_redundant_fuzzer.cc b/test/fuzzers/audio_decoder_opus_redundant_fuzzer.cc index efcba0f35d..ad1787c267 100644 --- a/test/fuzzers/audio_decoder_opus_redundant_fuzzer.cc +++ b/test/fuzzers/audio_decoder_opus_redundant_fuzzer.cc @@ -8,14 +8,19 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include + #include "modules/audio_coding/codecs/opus/audio_decoder_opus.h" +#include "test/explicit_key_value_config.h" #include "test/fuzzers/audio_decoder_fuzzer.h" namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { const size_t channels = (size % 2) + 1; // 1 or 2 channels. - AudioDecoderOpusImpl dec(channels); - const int kSampleRateHz = 48000; + const int kSampleRateHz = 48'000; + AudioDecoderOpusImpl dec(test::ExplicitKeyValueConfig(""), channels, + kSampleRateHz); const size_t kAllocatedOuputSizeSamples = kSampleRateHz / 10; // 100 ms. int16_t output[kAllocatedOuputSizeSamples]; FuzzAudioDecoder(DecoderFunctionType::kRedundantDecode, data, size, &dec, diff --git a/test/fuzzers/audio_decoder_pcm16b_fuzzer.cc b/test/fuzzers/audio_decoder_pcm16b_fuzzer.cc index 6e5d6e2190..90001465e5 100644 --- a/test/fuzzers/audio_decoder_pcm16b_fuzzer.cc +++ b/test/fuzzers/audio_decoder_pcm16b_fuzzer.cc @@ -8,9 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include #include #include "modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h" +#include "rtc_base/checks.h" #include "test/fuzzers/audio_decoder_fuzzer.h" namespace webrtc { diff --git a/test/fuzzers/audio_decoder_pcm_fuzzer.cc b/test/fuzzers/audio_decoder_pcm_fuzzer.cc index dbfcaf9976..efd723493e 100644 --- a/test/fuzzers/audio_decoder_pcm_fuzzer.cc +++ b/test/fuzzers/audio_decoder_pcm_fuzzer.cc @@ -8,6 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include #include #include "modules/audio_coding/codecs/g711/audio_decoder_pcm.h" diff --git a/test/fuzzers/audio_encoder_fuzzer.cc b/test/fuzzers/audio_encoder_fuzzer.cc index 837c26df56..58c9a40fba 100644 --- a/test/fuzzers/audio_encoder_fuzzer.cc +++ b/test/fuzzers/audio_encoder_fuzzer.cc @@ -10,8 +10,12 @@ #include "test/fuzzers/audio_encoder_fuzzer.h" +#include #include +#include +#include "api/array_view.h" +#include "api/audio_codecs/audio_encoder.h" #include "rtc_base/buffer.h" #include "rtc_base/checks.h" #include "test/fuzzers/fuzz_data_helper.h" @@ -21,7 +25,7 @@ namespace webrtc { // This function reads bytes from `data_view`, interprets them as RTP timestamp // and input samples, and sends them for encoding. The process continues until // no more data is available. -void FuzzAudioEncoder(rtc::ArrayView data_view, +void FuzzAudioEncoder(ArrayView data_view, std::unique_ptr encoder) { test::FuzzDataHelper data(data_view); const size_t block_size_samples = @@ -33,8 +37,8 @@ void FuzzAudioEncoder(rtc::ArrayView data_view, return; } - rtc::BufferT input_aligned(block_size_samples); - rtc::Buffer encoded; + BufferT input_aligned(block_size_samples); + Buffer encoded; // Each round in the loop below will need one block of samples + a 32-bit // timestamp from the fuzzer input. diff --git a/test/fuzzers/audio_encoder_fuzzer.h b/test/fuzzers/audio_encoder_fuzzer.h index 0c879df4d3..69ef08f9fa 100644 --- a/test/fuzzers/audio_encoder_fuzzer.h +++ b/test/fuzzers/audio_encoder_fuzzer.h @@ -18,7 +18,7 @@ namespace webrtc { -void FuzzAudioEncoder(rtc::ArrayView data_view, +void FuzzAudioEncoder(ArrayView data_view, std::unique_ptr encoder); } // namespace webrtc diff --git a/test/fuzzers/audio_encoder_opus_fuzzer.cc b/test/fuzzers/audio_encoder_opus_fuzzer.cc index d67e6d6067..7c65582ec7 100644 --- a/test/fuzzers/audio_encoder_opus_fuzzer.cc +++ b/test/fuzzers/audio_encoder_opus_fuzzer.cc @@ -8,20 +8,30 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include + #include "api/audio_codecs/opus/audio_encoder_opus.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "rtc_base/checks.h" #include "test/fuzzers/audio_encoder_fuzzer.h" namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { + // Create Environment once because creating it for each input noticably + // reduces the speed of the fuzzer. + static const Environment* const env = new Environment(CreateEnvironment()); + AudioEncoderOpus::Config config; config.frame_size_ms = 20; RTC_CHECK(config.IsOk()); - constexpr int kPayloadType = 100; + FuzzAudioEncoder( /*data_view=*/{data, size}, - /*encoder=*/AudioEncoderOpus::MakeAudioEncoder(config, kPayloadType)); + /*encoder=*/AudioEncoderOpus::MakeAudioEncoder(*env, config, + {.payload_type = 100})); } } // namespace webrtc diff --git a/test/fuzzers/audio_processing_configs_fuzzer.cc b/test/fuzzers/audio_processing_configs_fuzzer.cc index 331a373f4e..80a5f626dc 100644 --- a/test/fuzzers/audio_processing_configs_fuzzer.cc +++ b/test/fuzzers/audio_processing_configs_fuzzer.cc @@ -9,18 +9,27 @@ */ #include +#include +#include +#include #include +#include -#include "absl/memory/memory.h" +#include "absl/base/nullability.h" +#include "api/array_view.h" +#include "api/audio/audio_processing.h" +#include "api/audio/builtin_audio_processing_builder.h" #include "api/audio/echo_canceller3_factory.h" +#include "api/audio/echo_control.h" #include "api/audio/echo_detector_creator.h" -#include "api/task_queue/default_task_queue_factory.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" #include "modules/audio_processing/aec_dump/aec_dump_factory.h" -#include "modules/audio_processing/include/audio_processing.h" -#include "modules/audio_processing/test/audio_processing_builder_for_testing.h" #include "rtc_base/arraysize.h" -#include "rtc_base/numerics/safe_minmax.h" -#include "rtc_base/task_queue.h" +#include "rtc_base/checks.h" #include "system_wrappers/include/field_trial.h" #include "test/fuzzers/audio_processing_fuzzer_helper.h" #include "test/fuzzers/fuzz_data_helper.h" @@ -33,9 +42,15 @@ const std::string kFieldTrialNames[] = { "WebRTC-Aec3ShortHeadroomKillSwitch", }; -rtc::scoped_refptr CreateApm(test::FuzzDataHelper* fuzz_data, - std::string* field_trial_string, - rtc::TaskQueue* worker_queue) { +const Environment& GetEnvironment() { + static const Environment* const env = new Environment(CreateEnvironment()); + return *env; +} + +webrtc::scoped_refptr CreateApm( + test::FuzzDataHelper* fuzz_data, + std::string* field_trial_string, + TaskQueueBase* absl_nonnull worker_queue) { // Parse boolean values for optionally enabling different // configurable public components of APM. bool use_ts = fuzz_data->ReadOrDefaultValue(true); @@ -103,12 +118,12 @@ rtc::scoped_refptr CreateApm(test::FuzzDataHelper* fuzz_data, apm_config.noise_suppression.enabled = use_ns; apm_config.transient_suppression.enabled = use_ts; - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetEchoControlFactory(std::move(echo_control_factory)) .SetEchoDetector(use_red ? CreateEchoDetector() : nullptr) .SetConfig(apm_config) - .Create(); + .Build(GetEnvironment()); #ifdef WEBRTC_LINUX apm->AttachAecDump(AecDumpFactory::Create("/dev/null", -1, worker_queue)); @@ -117,26 +132,21 @@ rtc::scoped_refptr CreateApm(test::FuzzDataHelper* fuzz_data, return apm; } -TaskQueueFactory* GetTaskQueueFactory() { - static TaskQueueFactory* const factory = - CreateDefaultTaskQueueFactory().release(); - return factory; -} - } // namespace void FuzzOneInput(const uint8_t* data, size_t size) { if (size > 400000) { return; } - test::FuzzDataHelper fuzz_data(rtc::ArrayView(data, size)); + test::FuzzDataHelper fuzz_data(webrtc::ArrayView(data, size)); // This string must be in scope during execution, according to documentation // for field_trial.h. Hence it's created here and not in CreateApm. std::string field_trial_string = ""; - rtc::TaskQueue worker_queue(GetTaskQueueFactory()->CreateTaskQueue( - "rtc-low-prio", rtc::TaskQueue::Priority::LOW)); - auto apm = CreateApm(&fuzz_data, &field_trial_string, &worker_queue); + std::unique_ptr worker_queue = + GetEnvironment().task_queue_factory().CreateTaskQueue( + "rtc-low-prio", TaskQueueFactory::Priority::LOW); + auto apm = CreateApm(&fuzz_data, &field_trial_string, worker_queue.get()); if (apm) { FuzzAudioProcessing(&fuzz_data, std::move(apm)); diff --git a/test/fuzzers/audio_processing_fuzzer_helper.cc b/test/fuzzers/audio_processing_fuzzer_helper.cc index 5252918d77..6c716463c0 100644 --- a/test/fuzzers/audio_processing_fuzzer_helper.cc +++ b/test/fuzzers/audio_processing_fuzzer_helper.cc @@ -13,12 +13,17 @@ #include #include #include -#include +#include +#include +#include +#include "api/array_view.h" #include "api/audio/audio_frame.h" +#include "api/audio/audio_processing.h" +#include "api/scoped_refptr.h" #include "modules/audio_processing/include/audio_frame_proxies.h" -#include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/checks.h" +#include "test/fuzzers/fuzz_data_helper.h" namespace webrtc { namespace { @@ -37,7 +42,7 @@ void GenerateFloatFrame(test::FuzzDataHelper* fuzz_data, std::fill(float_frames[i], float_frames[i] + samples_per_input_channel, 0); const size_t read_bytes = sizeof(float) * samples_per_input_channel; if (fuzz_data->CanReadBytes(read_bytes)) { - rtc::ArrayView byte_array = + ArrayView byte_array = fuzz_data->ReadByteArray(read_bytes); memmove(float_frames[i], byte_array.begin(), read_bytes); } @@ -71,7 +76,7 @@ void GenerateFixedFrame(test::FuzzDataHelper* fuzz_data, } // namespace void FuzzAudioProcessing(test::FuzzDataHelper* fuzz_data, - rtc::scoped_refptr apm) { + scoped_refptr apm) { AudioFrame fixed_frame; // Normal usage is up to 8 channels. Allowing to fuzz one beyond this allows // us to catch implicit assumptions about normal usage. diff --git a/test/fuzzers/audio_processing_fuzzer_helper.h b/test/fuzzers/audio_processing_fuzzer_helper.h index a604db8cef..9437a4a7cb 100644 --- a/test/fuzzers/audio_processing_fuzzer_helper.h +++ b/test/fuzzers/audio_processing_fuzzer_helper.h @@ -13,12 +13,12 @@ #include -#include "modules/audio_processing/include/audio_processing.h" +#include "api/audio/audio_processing.h" #include "test/fuzzers/fuzz_data_helper.h" namespace webrtc { void FuzzAudioProcessing(test::FuzzDataHelper* fuzz_data, - rtc::scoped_refptr apm); + scoped_refptr apm); } // namespace webrtc diff --git a/test/fuzzers/audio_processing_sample_rate_fuzzer.cc b/test/fuzzers/audio_processing_sample_rate_fuzzer.cc index ca3946988c..5c0dedfa45 100644 --- a/test/fuzzers/audio_processing_sample_rate_fuzzer.cc +++ b/test/fuzzers/audio_processing_sample_rate_fuzzer.cc @@ -10,11 +10,17 @@ #include #include -#include -#include - -#include "modules/audio_processing/include/audio_processing.h" -#include "modules/audio_processing/test/audio_processing_builder_for_testing.h" +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/audio/audio_processing.h" +#include "api/audio/builtin_audio_processing_builder.h" +#include "api/environment/environment_factory.h" +#include "api/scoped_refptr.h" #include "rtc_base/checks.h" #include "test/fuzzers/fuzz_data_helper.h" @@ -79,7 +85,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { if (size > 100) { return; } - test::FuzzDataHelper fuzz_data(rtc::ArrayView(data, size)); + test::FuzzDataHelper fuzz_data(webrtc::ArrayView(data, size)); std::unique_ptr capture_processor = fuzz_data.ReadOrDefaultValue(true) @@ -89,13 +95,13 @@ void FuzzOneInput(const uint8_t* data, size_t size) { fuzz_data.ReadOrDefaultValue(true) ? std::make_unique() : nullptr; - rtc::scoped_refptr apm = - AudioProcessingBuilderForTesting() + scoped_refptr apm = + BuiltinAudioProcessingBuilder() .SetConfig({.pipeline = {.multi_channel_render = true, .multi_channel_capture = true}}) .SetCapturePostProcessing(std::move(capture_processor)) .SetRenderPreProcessing(std::move(render_processor)) - .Create(); + .Build(CreateEnvironment()); RTC_DCHECK(apm); std::array fixed_frame; diff --git a/sdk/android/src/jni/pc/audio.cc b/test/fuzzers/base64_decode_fuzzer.cc similarity index 55% rename from sdk/android/src/jni/pc/audio.cc rename to test/fuzzers/base64_decode_fuzzer.cc index 74c8b5547a..a986b3d240 100644 --- a/sdk/android/src/jni/pc/audio.cc +++ b/test/fuzzers/base64_decode_fuzzer.cc @@ -1,5 +1,5 @@ /* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2025 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,16 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "sdk/android/src/jni/pc/audio.h" +#include +#include +#include -#include "modules/audio_processing/include/audio_processing.h" +#include "rtc_base/base64.h" namespace webrtc { -namespace jni { -rtc::scoped_refptr CreateAudioProcessing() { - return AudioProcessingBuilder().Create(); +void FuzzOneInput(const uint8_t* data, size_t size) { + std::string str(reinterpret_cast(data), size); + Base64Decode(str); } -} // namespace jni } // namespace webrtc diff --git a/test/fuzzers/base64_encode_fuzzer.cc b/test/fuzzers/base64_encode_fuzzer.cc new file mode 100644 index 0000000000..a8cb6213c2 --- /dev/null +++ b/test/fuzzers/base64_encode_fuzzer.cc @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2025 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include +#include + +#include "api/array_view.h" +#include "rtc_base/base64.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +void FuzzOneInput(const uint8_t* data, size_t size) { + std::optional decoded_encoded_data = + Base64Decode(Base64Encode(webrtc::MakeArrayView(data, size))); + RTC_CHECK(decoded_encoded_data.has_value()); + RTC_CHECK_EQ(std::memcmp(data, decoded_encoded_data->data(), size), 0); +} + +} // namespace webrtc diff --git a/test/fuzzers/comfort_noise_decoder_fuzzer.cc b/test/fuzzers/comfort_noise_decoder_fuzzer.cc index 7f44af99fb..0a1c379294 100644 --- a/test/fuzzers/comfort_noise_decoder_fuzzer.cc +++ b/test/fuzzers/comfort_noise_decoder_fuzzer.cc @@ -9,6 +9,8 @@ */ #include +#include +#include #include "api/array_view.h" #include "modules/audio_coding/codecs/cng/webrtc_cng.h" @@ -19,7 +21,7 @@ namespace webrtc { namespace test { namespace { -void FuzzOneInputTest(rtc::ArrayView data) { +void FuzzOneInputTest(webrtc::ArrayView data) { FuzzDataHelper fuzz_data(data); ComfortNoiseDecoder cng_decoder; @@ -39,7 +41,7 @@ void FuzzOneInputTest(rtc::ArrayView data) { const size_t output_size = fuzz_data.SelectOneOf(kOutputSizes); const size_t num_generate_calls = std::min(fuzz_data.Read(), static_cast(17)); - rtc::BufferT output(output_size); + webrtc::BufferT output(output_size); for (size_t i = 0; i < num_generate_calls; ++i) { cng_decoder.Generate(output, new_period); } @@ -53,7 +55,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { if (size > 5000) { return; } - test::FuzzOneInputTest(rtc::ArrayView(data, size)); + test::FuzzOneInputTest(webrtc::ArrayView(data, size)); } } // namespace webrtc diff --git a/test/fuzzers/corpora/receive-side-cc/testcase-5414098152390656 b/test/fuzzers/corpora/receive-side-cc/testcase-5414098152390656 new file mode 100644 index 0000000000..98c423cdc2 Binary files /dev/null and b/test/fuzzers/corpora/receive-side-cc/testcase-5414098152390656 differ diff --git a/test/fuzzers/corpora/rtpdump-corpus/vp9/b353565743.rtpdump b/test/fuzzers/corpora/rtpdump-corpus/vp9/b353565743.rtpdump new file mode 100644 index 0000000000..2edbd90dba Binary files /dev/null and b/test/fuzzers/corpora/rtpdump-corpus/vp9/b353565743.rtpdump differ diff --git a/test/fuzzers/dcsctp_packet_fuzzer.cc b/test/fuzzers/dcsctp_packet_fuzzer.cc index 2fc3fe10f1..e6d1d25139 100644 --- a/test/fuzzers/dcsctp_packet_fuzzer.cc +++ b/test/fuzzers/dcsctp_packet_fuzzer.cc @@ -7,6 +7,11 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include + +#include "api/array_view.h" #include "net/dcsctp/packet/chunk/chunk.h" #include "net/dcsctp/packet/sctp_packet.h" @@ -14,9 +19,9 @@ namespace webrtc { using dcsctp::SctpPacket; void FuzzOneInput(const uint8_t* data, size_t size) { - absl::optional c = - SctpPacket::Parse(rtc::ArrayView(data, size), - /*disable_checksum_verification=*/true); + std::optional c = + SctpPacket::Parse(webrtc::ArrayView(data, size), + {.disable_checksum_verification = true}); if (!c.has_value()) { return; diff --git a/test/fuzzers/dcsctp_socket_fuzzer.cc b/test/fuzzers/dcsctp_socket_fuzzer.cc index 390cbb7f6c..fb2556faad 100644 --- a/test/fuzzers/dcsctp_socket_fuzzer.cc +++ b/test/fuzzers/dcsctp_socket_fuzzer.cc @@ -7,12 +7,13 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +#include +#include + +#include "api/array_view.h" #include "net/dcsctp/fuzzers/dcsctp_fuzzers.h" -#include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/public/dcsctp_options.h" -#include "net/dcsctp/public/dcsctp_socket.h" #include "net/dcsctp/socket/dcsctp_socket.h" -#include "rtc_base/logging.h" namespace webrtc { @@ -22,7 +23,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { options.disable_checksum_verification = true; dcsctp::DcSctpSocket socket("A", cb, nullptr, options); - dcsctp::dcsctp_fuzzers::FuzzSocket(socket, cb, - rtc::ArrayView(data, size)); + dcsctp::dcsctp_fuzzers::FuzzSocket( + socket, cb, webrtc::ArrayView(data, size)); } } // namespace webrtc diff --git a/modules/audio_coding/neteq/post_decode_vad_unittest.cc b/test/fuzzers/dtls_utils_fuzzer.cc similarity index 58% rename from modules/audio_coding/neteq/post_decode_vad_unittest.cc rename to test/fuzzers/dtls_utils_fuzzer.cc index da3e4e864e..622955864e 100644 --- a/modules/audio_coding/neteq/post_decode_vad_unittest.cc +++ b/test/fuzzers/dtls_utils_fuzzer.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -7,19 +7,15 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +#include +#include -// Unit tests for PostDecodeVad class. - -#include "modules/audio_coding/neteq/post_decode_vad.h" - -#include "test/gtest.h" +#include "api/array_view.h" +#include "p2p/dtls/dtls_utils.h" namespace webrtc { - -TEST(PostDecodeVad, CreateAndDestroy) { - PostDecodeVad vad; +void FuzzOneInput(const uint8_t* data, size_t size) { + webrtc::GetDtlsHandshakeAcks(webrtc::MakeArrayView(data, size)); } -// TODO(hlundin): Write more tests. - } // namespace webrtc diff --git a/test/fuzzers/flexfec_header_reader_fuzzer.cc b/test/fuzzers/flexfec_header_reader_fuzzer.cc index 854cc8b811..d268537b03 100644 --- a/test/fuzzers/flexfec_header_reader_fuzzer.cc +++ b/test/fuzzers/flexfec_header_reader_fuzzer.cc @@ -9,6 +9,9 @@ */ #include +#include +#include +#include #include "api/scoped_refptr.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -22,7 +25,7 @@ using ReceivedFecPacket = ForwardErrorCorrection::ReceivedFecPacket; void FuzzOneInput(const uint8_t* data, size_t size) { ReceivedFecPacket packet; - packet.pkt = rtc::scoped_refptr(new Packet()); + packet.pkt = webrtc::scoped_refptr(new Packet()); const size_t packet_size = std::min(size, static_cast(IP_PACKET_SIZE)); packet.pkt->data.SetSize(packet_size); diff --git a/test/fuzzers/flexfec_receiver_fuzzer.cc b/test/fuzzers/flexfec_receiver_fuzzer.cc index 67d603d3fc..a35e0bd80d 100644 --- a/test/fuzzers/flexfec_receiver_fuzzer.cc +++ b/test/fuzzers/flexfec_receiver_fuzzer.cc @@ -8,9 +8,13 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include +#include +#include +#include +#include #include "modules/rtp_rtcp/include/flexfec_receiver.h" +#include "modules/rtp_rtcp/include/recovered_packet_receiver.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" diff --git a/test/fuzzers/flexfec_sender_fuzzer.cc b/test/fuzzers/flexfec_sender_fuzzer.cc index 8ddd1c0fe0..e7bf7427cc 100644 --- a/test/fuzzers/flexfec_sender_fuzzer.cc +++ b/test/fuzzers/flexfec_sender_fuzzer.cc @@ -8,11 +8,20 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include #include +#include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/rtp_parameters.h" +#include "modules/include/module_fec_types.h" #include "modules/rtp_rtcp/include/flexfec_sender.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/rtp_header_extension_size.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "system_wrappers/include/clock.h" @@ -30,14 +39,21 @@ const std::vector kNoRtpHeaderExtensionSizes; } // namespace void FuzzOneInput(const uint8_t* data, size_t size) { + // Create Environment once because creating it for each input noticably + // reduces the speed of the fuzzer. + static SimulatedClock* const clock = new SimulatedClock(1); + static const Environment* const env = + new Environment(CreateEnvironment(clock)); + size_t i = 0; if (size < 5 || size > 200) { return; } - SimulatedClock clock(1 + data[i++]); - FlexfecSender sender(kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, kNoMid, - kNoRtpHeaderExtensions, kNoRtpHeaderExtensionSizes, - nullptr /* rtp_state */, &clock); + // Set time to (1 + data[i++]); + clock->AdvanceTimeMicroseconds(1 + data[i++] - clock->TimeInMicroseconds()); + FlexfecSender sender(*env, kFlexfecPayloadType, kFlexfecSsrc, kMediaSsrc, + kNoMid, kNoRtpHeaderExtensions, + kNoRtpHeaderExtensionSizes, nullptr /* rtp_state */); FecProtectionParams params = { data[i++], static_cast(data[i++] % 100), data[i++] <= 127 ? kFecMaskRandom : kFecMaskBursty}; diff --git a/test/fuzzers/forward_error_correction_fuzzer.cc b/test/fuzzers/forward_error_correction_fuzzer.cc index 04a459bc71..6997dc3523 100644 --- a/test/fuzzers/forward_error_correction_fuzzer.cc +++ b/test/fuzzers/forward_error_correction_fuzzer.cc @@ -8,9 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include #include +#include "api/array_view.h" #include "api/scoped_refptr.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/forward_error_correction.h" #include "rtc_base/byte_buffer.h" @@ -34,7 +39,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { ForwardErrorCorrection::CreateFlexfec(kFecSsrc, kMediaSsrc); // Entropy from fuzzer. - rtc::ByteBufferReader fuzz_buffer(reinterpret_cast(data), size); + webrtc::ByteBufferReader fuzz_buffer(webrtc::MakeArrayView(data, size)); // Initial stream state. uint16_t media_seqnum; @@ -54,8 +59,9 @@ void FuzzOneInput(const uint8_t* data, size_t size) { ++i) { ForwardErrorCorrection::RecoveredPacket* recovered_packet = new ForwardErrorCorrection::RecoveredPacket(); - recovered_packet->pkt = rtc::scoped_refptr( - new ForwardErrorCorrection::Packet()); + recovered_packet->pkt = + webrtc::scoped_refptr( + new ForwardErrorCorrection::Packet()); recovered_packet->pkt->data.SetSize(kPacketSize); memset(recovered_packet->pkt->data.MutableData(), 0, kPacketSize); recovered_packet->ssrc = kMediaSsrc; @@ -65,7 +71,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { // New packets received from the network. ForwardErrorCorrection::ReceivedPacket received_packet; - received_packet.pkt = rtc::scoped_refptr( + received_packet.pkt = webrtc::scoped_refptr( new ForwardErrorCorrection::Packet()); received_packet.pkt->data.SetSize(kPacketSize); received_packet.pkt->data.EnsureCapacity(IP_PACKET_SIZE); @@ -75,8 +81,8 @@ void FuzzOneInput(const uint8_t* data, size_t size) { uint8_t packet_type; uint8_t packet_loss; while (true) { - if (!fuzz_buffer.ReadBytes(reinterpret_cast(packet_buffer), - kPacketSize)) { + if (!fuzz_buffer.ReadBytes( + webrtc::ArrayView(packet_buffer, kPacketSize))) { return; } if (!fuzz_buffer.ReadUInt8(&reordering)) diff --git a/test/fuzzers/frame_buffer_fuzzer.cc b/test/fuzzers/frame_buffer_fuzzer.cc index e58d5e9f98..c86499f0fd 100644 --- a/test/fuzzers/frame_buffer_fuzzer.cc +++ b/test/fuzzers/frame_buffer_fuzzer.cc @@ -8,6 +8,11 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include +#include + #include "api/array_view.h" #include "api/video/encoded_frame.h" #include "api/video/frame_buffer.h" @@ -35,7 +40,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { test::ScopedKeyValueConfig field_trials; FrameBuffer buffer(/*max_frame_slots=*/100, /*max_decode_history=*/1000, field_trials); - test::FuzzDataHelper helper(rtc::MakeArrayView(data, size)); + test::FuzzDataHelper helper(webrtc::MakeArrayView(data, size)); SeqNumUnwrapper unwrapper; while (helper.BytesLeft() > 0) { @@ -64,7 +69,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { } case 5: { auto frame = std::make_unique(); - frame->SetTimestamp(helper.ReadOrDefaultValue(0)); + frame->SetRtpTimestamp(helper.ReadOrDefaultValue(0)); int64_t wire_id = helper.ReadOrDefaultValue(0) & (kFrameIdLength - 1); frame->SetId(unwrapper.Unwrap(wire_id)); diff --git a/test/fuzzers/fuzz_data_helper.cc b/test/fuzzers/fuzz_data_helper.cc index 866f7bc4b8..5e8019d46d 100644 --- a/test/fuzzers/fuzz_data_helper.cc +++ b/test/fuzzers/fuzz_data_helper.cc @@ -10,11 +10,14 @@ #include "test/fuzzers/fuzz_data_helper.h" +#include + +#include "api/array_view.h" + namespace webrtc { namespace test { -FuzzDataHelper::FuzzDataHelper(rtc::ArrayView data) - : data_(data) {} +FuzzDataHelper::FuzzDataHelper(ArrayView data) : data_(data) {} } // namespace test } // namespace webrtc diff --git a/test/fuzzers/fuzz_data_helper.h b/test/fuzzers/fuzz_data_helper.h index cedc31d0a4..42617f1426 100644 --- a/test/fuzzers/fuzz_data_helper.h +++ b/test/fuzzers/fuzz_data_helper.h @@ -23,7 +23,7 @@ namespace test { // of when the end of the data has been reached. class FuzzDataHelper { public: - explicit FuzzDataHelper(rtc::ArrayView data); + explicit FuzzDataHelper(ArrayView data); // Returns true if n bytes can be read. bool CanReadBytes(size_t n) const { return data_ix_ + n <= data_.size(); } @@ -70,9 +70,9 @@ class FuzzDataHelper { return select_from[index]; } - rtc::ArrayView ReadByteArray(size_t bytes) { + ArrayView ReadByteArray(size_t bytes) { if (!CanReadBytes(bytes)) { - return rtc::ArrayView(nullptr, 0); + return ArrayView(nullptr, 0); } const size_t index_to_return = data_ix_; data_ix_ += bytes; @@ -95,7 +95,7 @@ class FuzzDataHelper { size_t BytesLeft() const { return data_.size() - data_ix_; } private: - rtc::ArrayView data_; + ArrayView data_; size_t data_ix_ = 0; }; diff --git a/test/fuzzers/h264_bitstream_parser_fuzzer.cc b/test/fuzzers/h264_bitstream_parser_fuzzer.cc index cd1128c0b4..4122fea08b 100644 --- a/test/fuzzers/h264_bitstream_parser_fuzzer.cc +++ b/test/fuzzers/h264_bitstream_parser_fuzzer.cc @@ -9,13 +9,16 @@ */ #include +#include + +#include "api/array_view.h" #include "common_video/h264/h264_bitstream_parser.h" namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { H264BitstreamParser h264_bitstream_parser; h264_bitstream_parser.ParseBitstream( - rtc::ArrayView(data, size)); + webrtc::ArrayView(data, size)); h264_bitstream_parser.GetLastSliceQp(); } } // namespace webrtc diff --git a/test/fuzzers/h264_depacketizer_fuzzer.cc b/test/fuzzers/h264_depacketizer_fuzzer.cc index 97127228ed..59e232e889 100644 --- a/test/fuzzers/h264_depacketizer_fuzzer.cc +++ b/test/fuzzers/h264_depacketizer_fuzzer.cc @@ -7,13 +7,17 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +#include +#include + #include "modules/rtp_rtcp/source/video_rtp_depacketizer_h264.h" +#include "rtc_base/copy_on_write_buffer.h" namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { if (size > 200000) return; VideoRtpDepacketizerH264 depacketizer; - depacketizer.Parse(rtc::CopyOnWriteBuffer(data, size)); + depacketizer.Parse(webrtc::CopyOnWriteBuffer(data, size)); } } // namespace webrtc diff --git a/test/fuzzers/h265_bitstream_parser_fuzzer.cc b/test/fuzzers/h265_bitstream_parser_fuzzer.cc index dbcb23bc8b..83ec007bb5 100644 --- a/test/fuzzers/h265_bitstream_parser_fuzzer.cc +++ b/test/fuzzers/h265_bitstream_parser_fuzzer.cc @@ -9,13 +9,16 @@ */ #include +#include + +#include "api/array_view.h" #include "common_video/h265/h265_bitstream_parser.h" namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { H265BitstreamParser h265_bitstream_parser; h265_bitstream_parser.ParseBitstream( - rtc::ArrayView(data, size)); + webrtc::ArrayView(data, size)); h265_bitstream_parser.GetLastSliceQp(); } } // namespace webrtc diff --git a/test/fuzzers/h265_depacketizer_fuzzer.cc b/test/fuzzers/h265_depacketizer_fuzzer.cc new file mode 100644 index 0000000000..22835e0fa1 --- /dev/null +++ b/test/fuzzers/h265_depacketizer_fuzzer.cc @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include +#include + +#include "modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h" +#include "rtc_base/copy_on_write_buffer.h" + +namespace webrtc { +void FuzzOneInput(const uint8_t* data, size_t size) { + if (size > 200000) + return; + VideoRtpDepacketizerH265 depacketizer; + depacketizer.Parse(webrtc::CopyOnWriteBuffer(data, size)); +} +} // namespace webrtc diff --git a/test/fuzzers/neteq_rtp_fuzzer.cc b/test/fuzzers/neteq_rtp_fuzzer.cc index 3caa5fe5de..734e7347cd 100644 --- a/test/fuzzers/neteq_rtp_fuzzer.cc +++ b/test/fuzzers/neteq_rtp_fuzzer.cc @@ -10,17 +10,27 @@ #include #include +#include #include +#include #include +#include +#include #include #include "api/array_view.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/neteq/neteq.h" +#include "api/rtp_headers.h" #include "modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h" #include "modules/audio_coding/neteq/tools/audio_checksum.h" #include "modules/audio_coding/neteq/tools/encode_neteq_input.h" +#include "modules/audio_coding/neteq/tools/neteq_input.h" #include "modules/audio_coding/neteq/tools/neteq_test.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "rtc_base/checks.h" namespace webrtc { namespace test { @@ -32,12 +42,12 @@ class SineGenerator : public EncodeNetEqInput::Generator { explicit SineGenerator(int sample_rate_hz) : sample_rate_hz_(sample_rate_hz) {} - rtc::ArrayView Generate(size_t num_samples) override { + webrtc::ArrayView Generate(size_t num_samples) override { if (samples_.size() < num_samples) { samples_.resize(num_samples); } - rtc::ArrayView output(samples_.data(), num_samples); + webrtc::ArrayView output(samples_.data(), num_samples); for (auto& x : output) { x = static_cast(2000.0 * std::sin(phase_)); phase_ += 2 * kPi * kFreqHz / sample_rate_hz_; @@ -55,7 +65,7 @@ class SineGenerator : public EncodeNetEqInput::Generator { class FuzzRtpInput : public NetEqInput { public: - explicit FuzzRtpInput(rtc::ArrayView data) : data_(data) { + explicit FuzzRtpInput(webrtc::ArrayView data) : data_(data) { AudioEncoderPcm16B::Config config; config.payload_type = kPayloadType; config.sample_rate_hz = 32000; @@ -69,15 +79,15 @@ class FuzzRtpInput : public NetEqInput { MaybeFuzzPayload(); } - absl::optional NextPacketTime() const override { + std::optional NextPacketTime() const override { return packet_->time_ms; } - absl::optional NextOutputEventTime() const override { + std::optional NextOutputEventTime() const override { return input_->NextOutputEventTime(); } - absl::optional NextSetMinimumDelayInfo() const override { + std::optional NextSetMinimumDelayInfo() const override { return input_->NextSetMinimumDelayInfo(); } @@ -98,7 +108,7 @@ class FuzzRtpInput : public NetEqInput { bool ended() const override { return ended_; } - absl::optional NextHeader() const override { + std::optional NextHeader() const override { RTC_DCHECK(packet_); return packet_->header; } @@ -153,7 +163,7 @@ class FuzzRtpInput : public NetEqInput { } bool ended_ = false; - rtc::ArrayView data_; + webrtc::ArrayView data_; size_t data_ix_ = 0; std::unique_ptr input_; std::unique_ptr packet_; @@ -162,7 +172,7 @@ class FuzzRtpInput : public NetEqInput { void FuzzOneInputTest(const uint8_t* data, size_t size) { std::unique_ptr input( - new FuzzRtpInput(rtc::ArrayView(data, size))); + new FuzzRtpInput(webrtc::ArrayView(data, size))); std::unique_ptr output(new AudioChecksum); NetEqTest::Callbacks callbacks; NetEq::Config config; diff --git a/test/fuzzers/neteq_signal_fuzzer.cc b/test/fuzzers/neteq_signal_fuzzer.cc index 485c38085e..1ba6d8135d 100644 --- a/test/fuzzers/neteq_signal_fuzzer.cc +++ b/test/fuzzers/neteq_signal_fuzzer.cc @@ -8,17 +8,29 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include #include +#include +#include #include +#include #include +#include +#include #include #include "api/array_view.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/neteq/neteq.h" +#include "api/rtp_headers.h" #include "modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h" #include "modules/audio_coding/neteq/tools/audio_checksum.h" #include "modules/audio_coding/neteq/tools/encode_neteq_input.h" +#include "modules/audio_coding/neteq/tools/neteq_input.h" #include "modules/audio_coding/neteq/tools/neteq_test.h" +#include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/random.h" #include "test/fuzzers/fuzz_data_helper.h" @@ -37,20 +49,21 @@ class SineAndNoiseGenerator : public EncodeNetEqInput::Generator { noise_generator_(fuzz_data_.ReadOrDefaultValueNotZero(1)) {} // Generates num_samples of the sine-gaussian mixture. - rtc::ArrayView Generate(size_t num_samples) override { + webrtc::ArrayView Generate(size_t num_samples) override { if (samples_.size() < num_samples) { samples_.resize(num_samples); } - rtc::ArrayView output(samples_.data(), num_samples); + webrtc::ArrayView output(samples_.data(), num_samples); // Randomize an amplitude between 0 and 32768; use 65000/2 if we are out of // fuzzer data. const float amplitude = fuzz_data_.ReadOrDefaultValue(65000) / 2; // Randomize a noise standard deviation between 0 and 1999. const float noise_std = fuzz_data_.ReadOrDefaultValue(0) % 2000; for (auto& x : output) { - x = rtc::saturated_cast(amplitude * std::sin(phase_) + - noise_generator_.Gaussian(0, noise_std)); + x = webrtc::saturated_cast( + amplitude * std::sin(phase_) + + noise_generator_.Gaussian(0, noise_std)); phase_ += 2 * kPi * kFreqHz / sample_rate_hz_; } return output; @@ -89,15 +102,15 @@ class FuzzSignalInput : public NetEqInput { output_event_period_ms_ = fuzz_data_.SelectOneOf(output_event_periods); } - absl::optional NextPacketTime() const override { + std::optional NextPacketTime() const override { return packet_->time_ms; } - absl::optional NextOutputEventTime() const override { + std::optional NextOutputEventTime() const override { return next_output_event_ms_; } - absl::optional NextSetMinimumDelayInfo() const override { + std::optional NextSetMinimumDelayInfo() const override { return input_->NextSetMinimumDelayInfo(); } @@ -133,7 +146,7 @@ class FuzzSignalInput : public NetEqInput { bool ended() const override { return ended_; } - absl::optional NextHeader() const override { + std::optional NextHeader() const override { RTC_DCHECK(packet_); return packet_->header; } @@ -160,7 +173,7 @@ void FuzzOneInputTest(const uint8_t* data, size_t size) { return; } - FuzzDataHelper fuzz_data(rtc::ArrayView(data, size)); + FuzzDataHelper fuzz_data(webrtc::ArrayView(data, size)); // Allowed sample rates and payload types used in the test. std::pair rate_types[] = { @@ -179,7 +192,6 @@ void FuzzOneInputTest(const uint8_t* data, size_t size) { // Configure NetEq and the NetEqTest object. NetEqTest::Callbacks callbacks; NetEq::Config config; - config.enable_post_decode_vad = true; config.enable_fast_accelerate = true; auto codecs = NetEqTest::StandardDecoderMap(); // rate_types contains the payload types that will be used for encoding. diff --git a/test/fuzzers/pseudotcp_parser_fuzzer.cc b/test/fuzzers/pseudotcp_parser_fuzzer.cc index 78ddf0e455..fb2ae2502a 100644 --- a/test/fuzzers/pseudotcp_parser_fuzzer.cc +++ b/test/fuzzers/pseudotcp_parser_fuzzer.cc @@ -15,28 +15,28 @@ #include "rtc_base/thread.h" namespace webrtc { -class FakeIPseudoTcpNotify : public cricket::IPseudoTcpNotify { +class FakeIPseudoTcpNotify : public webrtc::IPseudoTcpNotify { public: - void OnTcpOpen(cricket::PseudoTcp* tcp) {} - void OnTcpReadable(cricket::PseudoTcp* tcp) {} - void OnTcpWriteable(cricket::PseudoTcp* tcp) {} - void OnTcpClosed(cricket::PseudoTcp* tcp, uint32_t error) {} - - cricket::IPseudoTcpNotify::WriteResult TcpWritePacket(cricket::PseudoTcp* tcp, - const char* buffer, - size_t len) { - return cricket::IPseudoTcpNotify::WriteResult::WR_SUCCESS; + void OnTcpOpen(webrtc::PseudoTcp* tcp) {} + void OnTcpReadable(webrtc::PseudoTcp* tcp) {} + void OnTcpWriteable(webrtc::PseudoTcp* tcp) {} + void OnTcpClosed(webrtc::PseudoTcp* tcp, uint32_t error) {} + + webrtc::IPseudoTcpNotify::WriteResult TcpWritePacket(webrtc::PseudoTcp* tcp, + const char* buffer, + size_t len) { + return webrtc::IPseudoTcpNotify::WriteResult::WR_SUCCESS; } }; struct Environment { - explicit Environment(cricket::IPseudoTcpNotify* notifier) + explicit Environment(webrtc::IPseudoTcpNotify* notifier) : ptcp(notifier, 0) {} // We need the thread to avoid some uninteresting crashes, since the // production code expects there to be a thread object available. - rtc::AutoThread thread; - cricket::PseudoTcp ptcp; + webrtc::AutoThread thread; + webrtc::PseudoTcp ptcp; }; Environment* env = new Environment(new FakeIPseudoTcpNotify()); diff --git a/test/fuzzers/receive_side_congestion_controller_fuzzer.cc b/test/fuzzers/receive_side_congestion_controller_fuzzer.cc index 8f548c2b90..94c5f1deab 100644 --- a/test/fuzzers/receive_side_congestion_controller_fuzzer.cc +++ b/test/fuzzers/receive_side_congestion_controller_fuzzer.cc @@ -13,6 +13,8 @@ #include #include "api/array_view.h" +#include "api/environment/environment_factory.h" +#include "api/media_types.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "modules/congestion_controller/include/receive_side_congestion_controller.h" @@ -28,10 +30,9 @@ void FuzzOneInput(const uint8_t* data, size_t size) { Timestamp arrival_time = Timestamp::Micros(123'456'789); SimulatedClock clock(arrival_time); ReceiveSideCongestionController cc( - &clock, + CreateEnvironment(&clock), /*feedback_sender=*/[](auto...) {}, - /*remb_sender=*/[](auto...) {}, - /*network_state_estimator=*/nullptr); + /*remb_sender=*/[](auto...) {}); RtpHeaderExtensionMap extensions; extensions.Register(1); extensions.Register(2); @@ -47,7 +48,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { arrival_time += TimeDelta::Millis(ByteReader::ReadBigEndian(data)); data += sizeof(uint8_t); packet_size = std::min(end_data - data, packet_size); - auto raw_packet = rtc::MakeArrayView(data, packet_size); + auto raw_packet = webrtc::MakeArrayView(data, packet_size); data += packet_size; if (!rtp_packet.Parse(raw_packet)) { diff --git a/test/fuzzers/residual_echo_detector_fuzzer.cc b/test/fuzzers/residual_echo_detector_fuzzer.cc index 0efe81f220..18b48e0f8f 100644 --- a/test/fuzzers/residual_echo_detector_fuzzer.cc +++ b/test/fuzzers/residual_echo_detector_fuzzer.cc @@ -11,11 +11,13 @@ #include #include -#include #include +#include #include +#include "api/audio/audio_processing.h" #include "api/audio/echo_detector_creator.h" +#include "api/scoped_refptr.h" #include "rtc_base/checks.h" namespace webrtc { @@ -42,7 +44,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { read_idx += 2; std::bitset<16> call_order(call_order_int); - rtc::scoped_refptr echo_detector = CreateEchoDetector(); + webrtc::scoped_refptr echo_detector = CreateEchoDetector(); std::vector input(1); // Call AnalyzeCaptureAudio once to prevent the flushing of the buffer. echo_detector->AnalyzeCaptureAudio(input); diff --git a/test/fuzzers/rtcp_receiver_fuzzer.cc b/test/fuzzers/rtcp_receiver_fuzzer.cc index e61f6c06ac..0212afe388 100644 --- a/test/fuzzers/rtcp_receiver_fuzzer.cc +++ b/test/fuzzers/rtcp_receiver_fuzzer.cc @@ -7,11 +7,19 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ + +#include +#include +#include + +#include "api/array_view.h" +#include "api/environment/environment_factory.h" +#include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.h" #include "modules/rtp_rtcp/source/rtcp_receiver.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" -#include "rtc_base/checks.h" #include "system_wrappers/include/clock.h" +#include "test/explicit_key_value_config.h" namespace webrtc { namespace { @@ -28,7 +36,7 @@ class NullModuleRtpRtcp : public RTCPReceiver::ModuleRtpRtcp { void OnRequestSendReport() override {} void OnReceivedNack(const std::vector&) override {} void OnReceivedRtcpReportBlocks( - rtc::ArrayView report_blocks) override {} + webrtc::ArrayView report_blocks) override {} }; } // namespace @@ -37,17 +45,18 @@ void FuzzOneInput(const uint8_t* data, size_t size) { if (size > kMaxInputLenBytes) { return; } - + test::ExplicitKeyValueConfig field_trials( + "WebRTC-RFC8888CongestionControlFeedback/Enabled/"); NullModuleRtpRtcp rtp_rtcp_module; SimulatedClock clock(1234); RtpRtcpInterface::Configuration config; - config.clock = &clock; config.rtcp_report_interval_ms = kRtcpIntervalMs; config.local_media_ssrc = 1; - RTCPReceiver receiver(config, &rtp_rtcp_module); + RTCPReceiver receiver(CreateEnvironment(&clock, &field_trials), config, + &rtp_rtcp_module); - receiver.IncomingPacket(rtc::MakeArrayView(data, size)); + receiver.IncomingPacket(webrtc::MakeArrayView(data, size)); } } // namespace webrtc diff --git a/test/fuzzers/rtp_depacketizer_av1_assemble_frame_fuzzer.cc b/test/fuzzers/rtp_depacketizer_av1_assemble_frame_fuzzer.cc index 7e9e70263e..f59a6ce2dc 100644 --- a/test/fuzzers/rtp_depacketizer_av1_assemble_frame_fuzzer.cc +++ b/test/fuzzers/rtp_depacketizer_av1_assemble_frame_fuzzer.cc @@ -18,10 +18,10 @@ namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { - std::vector> rtp_payloads; + std::vector> rtp_payloads; // Convert plain array of bytes into array of array bytes. - test::FuzzDataHelper fuzz_input(rtc::MakeArrayView(data, size)); + test::FuzzDataHelper fuzz_input(webrtc::MakeArrayView(data, size)); while (fuzz_input.CanReadBytes(sizeof(uint16_t))) { // In practice one rtp payload can be up to ~1200 - 1500 bytes. Majority // of the payload is just copied. To make fuzzing more efficient limit the diff --git a/test/fuzzers/rtp_dependency_descriptor_fuzzer.cc b/test/fuzzers/rtp_dependency_descriptor_fuzzer.cc index 82404f7264..a96d80045d 100644 --- a/test/fuzzers/rtp_dependency_descriptor_fuzzer.cc +++ b/test/fuzzers/rtp_dependency_descriptor_fuzzer.cc @@ -15,7 +15,7 @@ #include #include "api/array_view.h" -#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "api/transport/rtp/dependency_descriptor.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "rtc_base/checks.h" #include "test/fuzzers/fuzz_data_helper.h" @@ -28,7 +28,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { const FrameDependencyStructure* structure1_ptr = nullptr; std::unique_ptr structure2; - test::FuzzDataHelper fuzz_data(rtc::MakeArrayView(data, size)); + test::FuzzDataHelper fuzz_data(webrtc::MakeArrayView(data, size)); while (fuzz_data.CanReadBytes(1)) { // Treat next byte as size of the next extension. That aligns how // two-byte rtp header extension sizes are written. @@ -58,7 +58,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { uint8_t some_memory[256]; // That should be true because value_size <= next_size < 256 RTC_CHECK_LT(value_size, 256); - rtc::ArrayView write_buffer(some_memory, value_size); + webrtc::ArrayView write_buffer(some_memory, value_size); RTC_CHECK(RtpDependencyDescriptorExtension::Write(write_buffer, structure1, descriptor1)); diff --git a/test/fuzzers/rtp_format_h264_fuzzer.cc b/test/fuzzers/rtp_format_h264_fuzzer.cc new file mode 100644 index 0000000000..b14e2ac60b --- /dev/null +++ b/test/fuzzers/rtp_format_h264_fuzzer.cc @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include +#include + +#include "api/array_view.h" +#include "modules/rtp_rtcp/source/rtp_format.h" +#include "modules/rtp_rtcp/source/rtp_format_h264.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "rtc_base/checks.h" +#include "test/fuzzers/fuzz_data_helper.h" + +namespace webrtc { +void FuzzOneInput(const uint8_t* data, size_t size) { + test::FuzzDataHelper fuzz_input(webrtc::MakeArrayView(data, size)); + + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1200; + // Read uint8_t to be sure reduction_lens are much smaller than + // max_payload_len and thus limits structure is valid. + limits.first_packet_reduction_len = fuzz_input.ReadOrDefaultValue(0); + limits.last_packet_reduction_len = fuzz_input.ReadOrDefaultValue(0); + limits.single_packet_reduction_len = + fuzz_input.ReadOrDefaultValue(0); + const H264PacketizationMode kPacketizationModes[] = { + H264PacketizationMode::NonInterleaved, + H264PacketizationMode::SingleNalUnit}; + + H264PacketizationMode packetization_mode = + fuzz_input.SelectOneOf(kPacketizationModes); + + // Main function under test: RtpPacketizerH264's constructor. + RtpPacketizerH264 packetizer(fuzz_input.ReadByteArray(fuzz_input.BytesLeft()), + limits, packetization_mode); + + size_t num_packets = packetizer.NumPackets(); + if (num_packets == 0) { + return; + } + // When packetization was successful, validate NextPacket function too. + // While at it, check that packets respect the payload size limits. + RtpPacketToSend rtp_packet(nullptr); + // Single packet. + if (num_packets == 1) { + RTC_CHECK(packetizer.NextPacket(&rtp_packet)); + RTC_CHECK_LE(rtp_packet.payload_size(), + limits.max_payload_len - limits.single_packet_reduction_len); + return; + } + // First packet. + RTC_CHECK(packetizer.NextPacket(&rtp_packet)); + RTC_CHECK_LE(rtp_packet.payload_size(), + limits.max_payload_len - limits.first_packet_reduction_len); + // Middle packets. + for (size_t i = 1; i < num_packets - 1; ++i) { + rtp_packet.Clear(); + RTC_CHECK(packetizer.NextPacket(&rtp_packet)) + << "Failed to get packet#" << i; + RTC_CHECK_LE(rtp_packet.payload_size(), limits.max_payload_len) + << "Packet #" << i << " exceeds it's limit"; + } + // Last packet. + rtp_packet.Clear(); + RTC_CHECK(packetizer.NextPacket(&rtp_packet)); + RTC_CHECK_LE(rtp_packet.payload_size(), + limits.max_payload_len - limits.last_packet_reduction_len); +} +} // namespace webrtc diff --git a/test/fuzzers/rtp_format_vp8_fuzzer.cc b/test/fuzzers/rtp_format_vp8_fuzzer.cc new file mode 100644 index 0000000000..8cada1bd83 --- /dev/null +++ b/test/fuzzers/rtp_format_vp8_fuzzer.cc @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include +#include + +#include "api/array_view.h" +#include "modules/rtp_rtcp/source/rtp_format.h" +#include "modules/rtp_rtcp/source/rtp_format_vp8.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "rtc_base/checks.h" +#include "test/fuzzers/fuzz_data_helper.h" + +namespace webrtc { +void FuzzOneInput(const uint8_t* data, size_t size) { + test::FuzzDataHelper fuzz_input(webrtc::MakeArrayView(data, size)); + + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1200; + // Read uint8_t to be sure reduction_lens are much smaller than + // max_payload_len and thus limits structure is valid. + limits.first_packet_reduction_len = fuzz_input.ReadOrDefaultValue(0); + limits.last_packet_reduction_len = fuzz_input.ReadOrDefaultValue(0); + limits.single_packet_reduction_len = + fuzz_input.ReadOrDefaultValue(0); + + RTPVideoHeaderVP8 hdr_info; + hdr_info.InitRTPVideoHeaderVP8(); + uint16_t picture_id = fuzz_input.ReadOrDefaultValue(0); + hdr_info.pictureId = + picture_id >= 0x8000 ? kNoPictureId : picture_id & 0x7fff; + + // Main function under test: RtpPacketizerVp8's constructor. + RtpPacketizerVp8 packetizer(fuzz_input.ReadByteArray(fuzz_input.BytesLeft()), + limits, hdr_info); + + size_t num_packets = packetizer.NumPackets(); + if (num_packets == 0) { + return; + } + // When packetization was successful, validate NextPacket function too. + // While at it, check that packets respect the payload size limits. + RtpPacketToSend rtp_packet(nullptr); + // Single packet. + if (num_packets == 1) { + RTC_CHECK(packetizer.NextPacket(&rtp_packet)); + RTC_CHECK_LE(rtp_packet.payload_size(), + limits.max_payload_len - limits.single_packet_reduction_len); + return; + } + // First packet. + RTC_CHECK(packetizer.NextPacket(&rtp_packet)); + RTC_CHECK_LE(rtp_packet.payload_size(), + limits.max_payload_len - limits.first_packet_reduction_len); + // Middle packets. + for (size_t i = 1; i < num_packets - 1; ++i) { + RTC_CHECK(packetizer.NextPacket(&rtp_packet)) + << "Failed to get packet#" << i; + RTC_CHECK_LE(rtp_packet.payload_size(), limits.max_payload_len) + << "Packet #" << i << " exceeds it's limit"; + } + // Last packet. + RTC_CHECK(packetizer.NextPacket(&rtp_packet)); + RTC_CHECK_LE(rtp_packet.payload_size(), + limits.max_payload_len - limits.last_packet_reduction_len); +} +} // namespace webrtc diff --git a/test/fuzzers/rtp_format_vp9_fuzzer.cc b/test/fuzzers/rtp_format_vp9_fuzzer.cc new file mode 100644 index 0000000000..e53b1b5477 --- /dev/null +++ b/test/fuzzers/rtp_format_vp9_fuzzer.cc @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include +#include + +#include "api/array_view.h" +#include "modules/rtp_rtcp/source/rtp_format.h" +#include "modules/rtp_rtcp/source/rtp_format_vp9.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "rtc_base/checks.h" +#include "test/fuzzers/fuzz_data_helper.h" + +namespace webrtc { +void FuzzOneInput(const uint8_t* data, size_t size) { + test::FuzzDataHelper fuzz_input(webrtc::MakeArrayView(data, size)); + + RtpPacketizer::PayloadSizeLimits limits; + limits.max_payload_len = 1200; + // Read uint8_t to be sure reduction_lens are much smaller than + // max_payload_len and thus limits structure is valid. + limits.first_packet_reduction_len = fuzz_input.ReadOrDefaultValue(0); + limits.last_packet_reduction_len = fuzz_input.ReadOrDefaultValue(0); + limits.single_packet_reduction_len = + fuzz_input.ReadOrDefaultValue(0); + + RTPVideoHeaderVP9 hdr_info; + hdr_info.InitRTPVideoHeaderVP9(); + uint16_t picture_id = fuzz_input.ReadOrDefaultValue(0); + hdr_info.picture_id = + picture_id >= 0x8000 ? kNoPictureId : picture_id & 0x7fff; + + // Main function under test: RtpPacketizerVp9's constructor. + RtpPacketizerVp9 packetizer(fuzz_input.ReadByteArray(fuzz_input.BytesLeft()), + limits, hdr_info); + + size_t num_packets = packetizer.NumPackets(); + if (num_packets == 0) { + return; + } + // When packetization was successful, validate NextPacket function too. + // While at it, check that packets respect the payload size limits. + RtpPacketToSend rtp_packet(nullptr); + // Single packet. + if (num_packets == 1) { + RTC_CHECK(packetizer.NextPacket(&rtp_packet)); + RTC_CHECK_LE(rtp_packet.payload_size(), + limits.max_payload_len - limits.single_packet_reduction_len); + return; + } + // First packet. + RTC_CHECK(packetizer.NextPacket(&rtp_packet)); + RTC_CHECK_LE(rtp_packet.payload_size(), + limits.max_payload_len - limits.first_packet_reduction_len); + // Middle packets. + for (size_t i = 1; i < num_packets - 1; ++i) { + RTC_CHECK(packetizer.NextPacket(&rtp_packet)) + << "Failed to get packet#" << i; + RTC_CHECK_LE(rtp_packet.payload_size(), limits.max_payload_len) + << "Packet #" << i << " exceeds it's limit"; + } + // Last packet. + RTC_CHECK(packetizer.NextPacket(&rtp_packet)); + RTC_CHECK_LE(rtp_packet.payload_size(), + limits.max_payload_len - limits.last_packet_reduction_len); +} +} // namespace webrtc diff --git a/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc b/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc index b1cabc31ac..c8c0f6fd7b 100644 --- a/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc +++ b/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc @@ -8,10 +8,26 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include +#include #include +#include +#include #include "api/rtp_packet_infos.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "api/video/video_timing.h" #include "modules/rtp_rtcp/source/frame_object.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/rtp_frame_reference_finder.h" namespace webrtc { @@ -23,7 +39,7 @@ class DataReader { template void CopyTo(T* object) { - static_assert(std::is_pod(), ""); + static_assert(std::is_trivial_v && std::is_standard_layout_v, ""); uint8_t* destination = reinterpret_cast(object); size_t object_size = sizeof(T); size_t num_bytes = std::min(size_ - offset_, object_size); @@ -56,9 +72,21 @@ class DataReader { size_t offset_ = 0; }; -absl::optional +RTPVideoHeaderH264 GenerateRTPVideoHeaderH264(DataReader* reader) { + RTPVideoHeaderH264 result; + result.nalu_type = reader->GetNum(); + result.packetization_type = reader->GetNum(); + int nalus_length = reader->GetNum(); + for (int i = 0; i < nalus_length; ++i) { + reader->CopyTo(&result.nalus.emplace_back()); + } + result.packetization_mode = reader->GetNum(); + return result; +} + +std::optional GenerateGenericFrameDependencies(DataReader* reader) { - absl::optional result; + std::optional result; uint8_t flags = reader->GetNum(); if (flags & 0b1000'0000) { // i.e. with 50% chance there are no generic dependencies. @@ -117,8 +145,10 @@ void FuzzOneInput(const uint8_t* data, size_t size) { &video_header.video_type_header.emplace()); break; case kVideoCodecH264: - reader.CopyTo( - &video_header.video_type_header.emplace()); + video_header.video_type_header = GenerateRTPVideoHeaderH264(&reader); + break; + case kVideoCodecH265: + // TODO(bugs.webrtc.org/13485) break; default: break; @@ -142,7 +172,8 @@ void FuzzOneInput(const uint8_t* data, size_t size) { kVideoRotation_0, VideoContentType::UNSPECIFIED, video_header, - /*color_space=*/absl::nullopt, + /*color_space=*/std::nullopt, + /*frame_instrumentation_data=*/std::nullopt, RtpPacketInfos(), EncodedImageBuffer::Create(/*size=*/0)); // clang-format on diff --git a/test/fuzzers/rtp_packet_fuzzer.cc b/test/fuzzers/rtp_packet_fuzzer.cc index 60afb986de..d2d4376bed 100644 --- a/test/fuzzers/rtp_packet_fuzzer.cc +++ b/test/fuzzers/rtp_packet_fuzzer.cc @@ -9,10 +9,21 @@ */ #include +#include +#include +#include +#include #include -#include "absl/types/optional.h" -#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "api/rtp_headers.h" +#include "api/transport/rtp/corruption_detection_message.h" +#include "api/video/color_space.h" +#include "api/video/video_content_type.h" +#include "api/video/video_layers_allocation.h" +#include "api/video/video_timing.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/corruption_detection_extension.h" +#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" @@ -72,11 +83,11 @@ void FuzzOneInput(const uint8_t* data, size_t size) { int32_t offset; packet.GetExtension(&offset); break; - case kRtpExtensionAudioLevel: - bool voice_activity; - uint8_t audio_level; - packet.GetExtension(&voice_activity, &audio_level); + case kRtpExtensionAudioLevel: { + AudioLevel audio_level; + packet.GetExtension(&audio_level); break; + } case kRtpExtensionCsrcAudioLevel: { std::vector audio_levels; packet.GetExtension(&audio_levels); @@ -101,7 +112,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { break; case kRtpExtensionTransportSequenceNumber02: { uint16_t seqnum; - absl::optional feedback_request; + std::optional feedback_request; packet.GetExtension(&seqnum, &feedback_request); break; @@ -146,7 +157,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { break; } case kRtpExtensionInbandComfortNoise: { - absl::optional noise_level; + std::optional noise_level; packet.GetExtension(&noise_level); break; } @@ -164,6 +175,11 @@ void FuzzOneInput(const uint8_t* data, size_t size) { // This extension requires state to read and so complicated that // deserves own fuzzer. break; + case kRtpExtensionCorruptionDetection: { + CorruptionDetectionMessage message; + packet.GetExtension(&message); + break; + } } } diff --git a/test/fuzzers/rtp_packetizer_av1_fuzzer.cc b/test/fuzzers/rtp_packetizer_av1_fuzzer.cc index e5550c1279..a1e0ed6a6a 100644 --- a/test/fuzzers/rtp_packetizer_av1_fuzzer.cc +++ b/test/fuzzers/rtp_packetizer_av1_fuzzer.cc @@ -10,6 +10,7 @@ #include #include +#include "api/array_view.h" #include "api/video/video_frame_type.h" #include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" @@ -19,7 +20,7 @@ namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { - test::FuzzDataHelper fuzz_input(rtc::MakeArrayView(data, size)); + test::FuzzDataHelper fuzz_input(webrtc::MakeArrayView(data, size)); RtpPacketizer::PayloadSizeLimits limits; limits.max_payload_len = 1200; diff --git a/test/fuzzers/rtp_video_layers_allocation_fuzzer.cc b/test/fuzzers/rtp_video_layers_allocation_fuzzer.cc index ae8b8728fb..c6f557f145 100644 --- a/test/fuzzers/rtp_video_layers_allocation_fuzzer.cc +++ b/test/fuzzers/rtp_video_layers_allocation_fuzzer.cc @@ -28,7 +28,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { if (size > kMaxSize) { return; } - auto raw = rtc::MakeArrayView(data, size); + auto raw = webrtc::MakeArrayView(data, size); VideoLayersAllocation allocation1; if (!RtpVideoLayersAllocationExtension::Parse(raw, &allocation1)) { @@ -43,7 +43,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { RTC_CHECK_LE(value_size, raw.size()); uint8_t some_memory[kMaxSize]; RTC_CHECK_LE(value_size, kMaxSize); - rtc::ArrayView write_buffer(some_memory, value_size); + webrtc::ArrayView write_buffer(some_memory, value_size); RTC_CHECK( RtpVideoLayersAllocationExtension::Write(write_buffer, allocation1)); diff --git a/test/fuzzers/sctp_utils_fuzzer.cc b/test/fuzzers/sctp_utils_fuzzer.cc index 249707514e..90d107ad33 100644 --- a/test/fuzzers/sctp_utils_fuzzer.cc +++ b/test/fuzzers/sctp_utils_fuzzer.cc @@ -20,7 +20,7 @@ namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { - rtc::CopyOnWriteBuffer payload(data, size); + webrtc::CopyOnWriteBuffer payload(data, size); std::string label; DataChannelInit config; IsOpenMessage(payload); diff --git a/test/fuzzers/sdp_integration_fuzzer.cc b/test/fuzzers/sdp_integration_fuzzer.cc index ece4b50505..c3ff73e0af 100644 --- a/test/fuzzers/sdp_integration_fuzzer.cc +++ b/test/fuzzers/sdp_integration_fuzzer.cc @@ -11,8 +11,20 @@ #include #include +#include +#include +#include + #include "absl/strings/string_view.h" +#include "api/jsep.h" +#include "api/make_ref_counted.h" +#include "api/peer_connection_interface.h" +#include "api/test/rtc_error_matchers.h" #include "pc/test/integration_test_helpers.h" +#include "pc/test/mock_peer_connection_observers.h" +#include "rtc_base/checks.h" +#include "test/gmock.h" +#include "test/wait_until.h" namespace webrtc { @@ -27,22 +39,26 @@ class FuzzerTest : public PeerConnectionIntegrationBaseTest { // generated are discarded. auto srd_observer = - rtc::make_ref_counted(); + webrtc::make_ref_counted(); SdpParseError error; - std::unique_ptr sdp( - CreateSessionDescription("offer", std::string(message), &error)); + std::unique_ptr sdp = + CreateSessionDescription(SdpType::kOffer, std::string(message), &error); caller()->pc()->SetRemoteDescription(std::move(sdp), srd_observer); // Wait a short time for observer to be called. Timeout is short // because the fuzzer should be trying many branches. - EXPECT_TRUE_WAIT(srd_observer->called(), 100); + EXPECT_THAT( + WaitUntil([&] { return srd_observer->called(); }, ::testing::IsTrue()), + IsRtcOk()); // If set-remote-description was successful, try to answer. auto sld_observer = - rtc::make_ref_counted(); + webrtc::make_ref_counted(); if (srd_observer->error().ok()) { caller()->pc()->SetLocalDescription(sld_observer); - EXPECT_TRUE_WAIT(sld_observer->called(), 100); + EXPECT_THAT(WaitUntil([&] { return sld_observer->called(); }, + ::testing::IsTrue()), + IsRtcOk()); } // If there is an EXPECT failure, die here. RTC_CHECK(!HasFailure()); diff --git a/test/fuzzers/sdp_parser_fuzzer.cc b/test/fuzzers/sdp_parser_fuzzer.cc index c85eab4047..04ee4af7bb 100644 --- a/test/fuzzers/sdp_parser_fuzzer.cc +++ b/test/fuzzers/sdp_parser_fuzzer.cc @@ -11,7 +11,10 @@ #include #include -#include "api/jsep_session_description.h" +#include +#include + +#include "api/jsep.h" namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { @@ -21,8 +24,8 @@ void FuzzOneInput(const uint8_t* data, size_t size) { std::string message(reinterpret_cast(data), size); webrtc::SdpParseError error; - std::unique_ptr sdp( - CreateSessionDescription("offer", message, &error)); + std::unique_ptr sdp = + CreateSessionDescription(SdpType::kOffer, message, &error); } } // namespace webrtc diff --git a/test/fuzzers/ssl_certificate_fuzzer.cc b/test/fuzzers/ssl_certificate_fuzzer.cc index 4bab5c8f02..ea1f4ee8e1 100644 --- a/test/fuzzers/ssl_certificate_fuzzer.cc +++ b/test/fuzzers/ssl_certificate_fuzzer.cc @@ -11,19 +11,20 @@ #include #include +#include #include +#include "rtc_base/buffer.h" #include "rtc_base/message_digest.h" #include "rtc_base/ssl_certificate.h" -#include "rtc_base/string_encode.h" namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { std::string pem_certificate(reinterpret_cast(data), size); - std::unique_ptr cert = - rtc::SSLCertificate::FromPEMString(pem_certificate); + std::unique_ptr cert = + SSLCertificate::FromPEMString(pem_certificate); if (cert == nullptr) { return; @@ -37,12 +38,10 @@ void FuzzOneInput(const uint8_t* data, size_t size) { std::string algorithm; cert->GetSignatureDigestAlgorithm(&algorithm); - unsigned char digest[rtc::MessageDigest::kMaxSize]; - size_t digest_len; - cert->ComputeDigest(algorithm, digest, rtc::MessageDigest::kMaxSize, - &digest_len); + Buffer buffer(0, MessageDigest::kMaxSize); + cert->ComputeDigest(algorithm, buffer); - rtc::Buffer der_buffer; + Buffer der_buffer; cert->ToDER(&der_buffer); } diff --git a/test/fuzzers/string_to_number_fuzzer.cc b/test/fuzzers/string_to_number_fuzzer.cc index 28b36a73ce..4d619c5a5e 100644 --- a/test/fuzzers/string_to_number_fuzzer.cc +++ b/test/fuzzers/string_to_number_fuzzer.cc @@ -19,17 +19,17 @@ namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { std::string number_to_parse(reinterpret_cast(data), size); - rtc::StringToNumber(number_to_parse); - rtc::StringToNumber(number_to_parse); - rtc::StringToNumber(number_to_parse); - rtc::StringToNumber(number_to_parse); - rtc::StringToNumber(number_to_parse); - rtc::StringToNumber(number_to_parse); - rtc::StringToNumber(number_to_parse); - rtc::StringToNumber(number_to_parse); - rtc::StringToNumber(number_to_parse); - rtc::StringToNumber(number_to_parse); - rtc::StringToNumber(number_to_parse); + webrtc::StringToNumber(number_to_parse); + webrtc::StringToNumber(number_to_parse); + webrtc::StringToNumber(number_to_parse); + webrtc::StringToNumber(number_to_parse); + webrtc::StringToNumber(number_to_parse); + webrtc::StringToNumber(number_to_parse); + webrtc::StringToNumber(number_to_parse); + webrtc::StringToNumber(number_to_parse); + webrtc::StringToNumber(number_to_parse); + webrtc::StringToNumber(number_to_parse); + webrtc::StringToNumber(number_to_parse); } } // namespace webrtc diff --git a/test/fuzzers/stun_parser_fuzzer.cc b/test/fuzzers/stun_parser_fuzzer.cc index 6ca9eac8b2..5a3188d201 100644 --- a/test/fuzzers/stun_parser_fuzzer.cc +++ b/test/fuzzers/stun_parser_fuzzer.cc @@ -11,18 +11,20 @@ #include #include +#include + +#include "api/array_view.h" #include "api/transport/stun.h" +#include "rtc_base/byte_buffer.h" namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { - const char* message = reinterpret_cast(data); - // Normally we'd check the integrity first, but those checks are // fuzzed separately in stun_validator_fuzzer.cc. We still want to // fuzz this target since the integrity checks could be forged by a // malicious adversary who receives a call. - std::unique_ptr stun_msg(new cricket::IceMessage()); - rtc::ByteBufferReader buf(message, size); + std::unique_ptr stun_msg(new webrtc::IceMessage()); + webrtc::ByteBufferReader buf(webrtc::MakeArrayView(data, size)); stun_msg->Read(&buf); stun_msg->ValidateMessageIntegrity(""); } diff --git a/test/fuzzers/stun_validator_fuzzer.cc b/test/fuzzers/stun_validator_fuzzer.cc index 421638db1b..84c900a7e0 100644 --- a/test/fuzzers/stun_validator_fuzzer.cc +++ b/test/fuzzers/stun_validator_fuzzer.cc @@ -17,7 +17,7 @@ namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { const char* message = reinterpret_cast(data); - cricket::StunMessage::ValidateFingerprint(message, size); - cricket::StunMessage::ValidateMessageIntegrityForTesting(message, size, ""); + webrtc::StunMessage::ValidateFingerprint(message, size); + webrtc::StunMessage::ValidateMessageIntegrityForTesting(message, size, ""); } } // namespace webrtc diff --git a/test/fuzzers/turn_unwrap_fuzzer.cc b/test/fuzzers/turn_unwrap_fuzzer.cc index 47ee7fd205..9da8e7addc 100644 --- a/test/fuzzers/turn_unwrap_fuzzer.cc +++ b/test/fuzzers/turn_unwrap_fuzzer.cc @@ -17,6 +17,6 @@ namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { size_t content_position; size_t content_size; - cricket::UnwrapTurnPacket(data, size, &content_position, &content_size); + webrtc::UnwrapTurnPacket(data, size, &content_position, &content_size); } } // namespace webrtc diff --git a/test/fuzzers/ulpfec_generator_fuzzer.cc b/test/fuzzers/ulpfec_generator_fuzzer.cc index 43d9450918..e250c8ca0c 100644 --- a/test/fuzzers/ulpfec_generator_fuzzer.cc +++ b/test/fuzzers/ulpfec_generator_fuzzer.cc @@ -8,11 +8,18 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "modules/include/module_common_types_public.h" +#include "modules/include/module_fec_types.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/byte_io.h" -#include "modules/rtp_rtcp/source/fec_test_helper.h" +#include "modules/rtp_rtcp/source/forward_error_correction_internal.h" +#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/ulpfec_generator.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" @@ -26,8 +33,12 @@ constexpr uint8_t kRedPayloadType = 97; } // namespace void FuzzOneInput(const uint8_t* data, size_t size) { - SimulatedClock clock(1); - UlpfecGenerator generator(kRedPayloadType, kFecPayloadType, &clock); + // Create Environment once because creating it for each input noticably + // reduces the speed of the fuzzer. + static const Environment* const env = + new Environment(CreateEnvironment(std::make_unique(1))); + + UlpfecGenerator generator(*env, kRedPayloadType, kFecPayloadType); size_t i = 0; if (size < 4) return; @@ -41,7 +52,8 @@ void FuzzOneInput(const uint8_t* data, size_t size) { size_t payload_size = data[i++] % 10; if (i + payload_size + rtp_header_length + 2 > size) break; - rtc::CopyOnWriteBuffer packet(&data[i], payload_size + rtp_header_length); + webrtc::CopyOnWriteBuffer packet(&data[i], + payload_size + rtp_header_length); packet.EnsureCapacity(IP_PACKET_SIZE); // Write a valid parsable header (version = 2, no padding, no extensions, // no CSRCs). diff --git a/test/fuzzers/ulpfec_header_reader_fuzzer.cc b/test/fuzzers/ulpfec_header_reader_fuzzer.cc index 243cb4ed70..16f123a850 100644 --- a/test/fuzzers/ulpfec_header_reader_fuzzer.cc +++ b/test/fuzzers/ulpfec_header_reader_fuzzer.cc @@ -9,6 +9,9 @@ */ #include +#include +#include +#include #include "api/scoped_refptr.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -22,7 +25,7 @@ using ReceivedFecPacket = ForwardErrorCorrection::ReceivedFecPacket; void FuzzOneInput(const uint8_t* data, size_t size) { ReceivedFecPacket packet; - packet.pkt = rtc::scoped_refptr(new Packet()); + packet.pkt = webrtc::scoped_refptr(new Packet()); const size_t packet_size = std::min(size, static_cast(IP_PACKET_SIZE)); packet.pkt->data.SetSize(packet_size); diff --git a/test/fuzzers/ulpfec_receiver_fuzzer.cc b/test/fuzzers/ulpfec_receiver_fuzzer.cc index 0a29ba3259..b73651a69c 100644 --- a/test/fuzzers/ulpfec_receiver_fuzzer.cc +++ b/test/fuzzers/ulpfec_receiver_fuzzer.cc @@ -8,12 +8,16 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include +#include +#include +#include "api/array_view.h" +#include "modules/rtp_rtcp/include/recovered_packet_receiver.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/ulpfec_receiver.h" +#include "system_wrappers/include/clock.h" #include "test/fuzzers/fuzz_data_helper.h" namespace webrtc { @@ -38,7 +42,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { DummyCallback callback; UlpfecReceiver receiver(ulpfec_ssrc, 0, &callback, Clock::GetRealTimeClock()); - test::FuzzDataHelper fuzz_data(rtc::MakeArrayView(data, size)); + test::FuzzDataHelper fuzz_data(webrtc::MakeArrayView(data, size)); while (fuzz_data.CanReadBytes(kMinDataNeeded)) { size_t packet_length = kRtpHeaderSize + fuzz_data.Read(); auto raw_packet = fuzz_data.ReadByteArray(packet_length); diff --git a/test/fuzzers/utils/BUILD.gn b/test/fuzzers/utils/BUILD.gn index dfb617857c..d0901831f1 100644 --- a/test/fuzzers/utils/BUILD.gn +++ b/test/fuzzers/utils/BUILD.gn @@ -15,10 +15,8 @@ rtc_library("rtp_replayer") { "rtp_replayer.h", ] deps = [ - "../../../api/rtc_event_log", - "../../../api/task_queue:default_task_queue_factory", + "../../../api/environment:environment_factory", "../../../api/test/video:function_video_factory", - "../../../api/transport:field_trial_based_config", "../../../api/units:timestamp", "../../../api/video_codecs:video_codecs_api", "../../../call", @@ -43,6 +41,6 @@ rtc_library("rtp_replayer") { "../../../test:test_support", "../../../test:test_video_capturer", "../../../test:video_test_common", + "//third_party/abseil-cpp/absl/memory", ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory:memory" ] } diff --git a/test/fuzzers/utils/rtp_replayer.cc b/test/fuzzers/utils/rtp_replayer.cc index 83f894dc28..6af6d330de 100644 --- a/test/fuzzers/utils/rtp_replayer.cc +++ b/test/fuzzers/utils/rtp_replayer.cc @@ -16,8 +16,7 @@ #include #include "absl/memory/memory.h" -#include "api/task_queue/default_task_queue_factory.h" -#include "api/transport/field_trial_based_config.h" +#include "api/environment/environment_factory.h" #include "api/units/timestamp.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" @@ -48,7 +47,7 @@ void RtpReplayer::Replay( const uint8_t* rtp_dump_data, size_t rtp_dump_size) { RunLoop loop; - rtc::ScopedBaseFakeClock fake_clock; + ScopedBaseFakeClock fake_clock; // Work around: webrtc calls webrtc::Random(clock.TimeInMicroseconds()) // everywhere and Random expects non-zero seed. Let's set the clock non-zero @@ -73,14 +72,8 @@ void RtpReplayer::Replay( } // Setup the video streams based on the configuration. - webrtc::RtcEventLogNull event_log; - std::unique_ptr task_queue_factory = - CreateDefaultTaskQueueFactory(); - Call::Config call_config(&event_log); - call_config.task_queue_factory = task_queue_factory.get(); - FieldTrialBasedConfig field_trials; - call_config.trials = &field_trials; - std::unique_ptr call(Call::Create(call_config)); + CallConfig call_config(CreateEnvironment()); + std::unique_ptr call(Call::Create(std::move(call_config))); SetupVideoStreams(&receive_stream_configs, stream_state.get(), call.get()); // Start replaying the provided stream now that it has been configured. @@ -156,14 +149,14 @@ std::unique_ptr RtpReplayer::CreateRtpReader( } void RtpReplayer::ReplayPackets( - rtc::FakeClock* clock, + FakeClock* clock, Call* call, test::RtpFileReader* rtp_reader, const RtpPacketReceived::ExtensionManager& extensions) { int64_t replay_start_ms = -1; while (true) { - int64_t now_ms = rtc::TimeMillis(); + int64_t now_ms = TimeMillis(); if (replay_start_ms == -1) { replay_start_ms = now_ms; } diff --git a/test/fuzzers/utils/rtp_replayer.h b/test/fuzzers/utils/rtp_replayer.h index ae94a640a5..c6fa74f0f6 100644 --- a/test/fuzzers/utils/rtp_replayer.h +++ b/test/fuzzers/utils/rtp_replayer.h @@ -18,7 +18,6 @@ #include #include -#include "api/rtc_event_log/rtc_event_log.h" #include "api/test/video/function_video_decoder_factory.h" #include "api/video_codecs/video_decoder.h" #include "call/call.h" @@ -43,7 +42,7 @@ class RtpReplayer final { // rtp receival code path. struct StreamState { test::NullTransport transport; - std::vector>> sinks; + std::vector>> sinks; std::vector receive_streams; std::unique_ptr decoder_factory; }; @@ -80,7 +79,7 @@ class RtpReplayer final { size_t rtp_dump_size); // Replays each packet to from the RtpDump. - static void ReplayPackets(rtc::FakeClock* clock, + static void ReplayPackets(FakeClock* clock, Call* call, test::RtpFileReader* rtp_reader, const RtpHeaderExtensionMap& extensions); diff --git a/test/fuzzers/vp8_depacketizer_fuzzer.cc b/test/fuzzers/vp8_depacketizer_fuzzer.cc index 1691b55cc0..345fe5cefe 100644 --- a/test/fuzzers/vp8_depacketizer_fuzzer.cc +++ b/test/fuzzers/vp8_depacketizer_fuzzer.cc @@ -7,6 +7,9 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +#include +#include + #include "api/array_view.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h" @@ -14,7 +17,7 @@ namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { RTPVideoHeader video_header; - VideoRtpDepacketizerVp8::ParseRtpPayload(rtc::MakeArrayView(data, size), + VideoRtpDepacketizerVp8::ParseRtpPayload(webrtc::MakeArrayView(data, size), &video_header); } } // namespace webrtc diff --git a/test/fuzzers/vp8_qp_parser_fuzzer.cc b/test/fuzzers/vp8_qp_parser_fuzzer.cc index 2ecfd820c8..597e1f492f 100644 --- a/test/fuzzers/vp8_qp_parser_fuzzer.cc +++ b/test/fuzzers/vp8_qp_parser_fuzzer.cc @@ -7,6 +7,9 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +#include +#include + #include "modules/video_coding/utility/vp8_header_parser.h" namespace webrtc { diff --git a/test/fuzzers/vp8_replay_fuzzer.cc b/test/fuzzers/vp8_replay_fuzzer.cc index 819b9626f9..01a0ff9e8e 100644 --- a/test/fuzzers/vp8_replay_fuzzer.cc +++ b/test/fuzzers/vp8_replay_fuzzer.cc @@ -12,7 +12,11 @@ #include #include +#include +#include +#include "api/video_codecs/sdp_video_format.h" +#include "call/video_receive_stream.h" #include "test/fuzzers/utils/rtp_replayer.h" namespace webrtc { @@ -22,7 +26,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { VideoReceiveStreamInterface::Config vp8_config(&(stream_state->transport)); VideoReceiveStreamInterface::Decoder vp8_decoder; - vp8_decoder.video_format = SdpVideoFormat("VP8"); + vp8_decoder.video_format = SdpVideoFormat::VP8(); vp8_decoder.payload_type = 125; vp8_config.decoders.push_back(std::move(vp8_decoder)); diff --git a/test/fuzzers/vp9_depacketizer_fuzzer.cc b/test/fuzzers/vp9_depacketizer_fuzzer.cc index ae36a94931..68758e8b7e 100644 --- a/test/fuzzers/vp9_depacketizer_fuzzer.cc +++ b/test/fuzzers/vp9_depacketizer_fuzzer.cc @@ -7,6 +7,9 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ +#include +#include + #include "api/array_view.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h" @@ -14,7 +17,7 @@ namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { RTPVideoHeader video_header; - VideoRtpDepacketizerVp9::ParseRtpPayload(rtc::MakeArrayView(data, size), + VideoRtpDepacketizerVp9::ParseRtpPayload(webrtc::MakeArrayView(data, size), &video_header); } } // namespace webrtc diff --git a/test/fuzzers/vp9_encoder_references_fuzzer.cc b/test/fuzzers/vp9_encoder_references_fuzzer.cc index 2d34789fb5..8f6fcbd7b2 100644 --- a/test/fuzzers/vp9_encoder_references_fuzzer.cc +++ b/test/fuzzers/vp9_encoder_references_fuzzer.cc @@ -10,19 +10,36 @@ #include +#include +#include +#include +#include + #include "absl/algorithm/container.h" #include "absl/base/macros.h" #include "absl/container/inlined_vector.h" +#include "absl/strings/string_view.h" #include "api/array_view.h" +#include "api/environment/environment_factory.h" #include "api/field_trials_view.h" +#include "api/video/encoded_image.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_type.h" #include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/spatial_layer.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" -#include "media/base/media_constants.h" +#include "common_video/generic_frame_descriptor/generic_frame_info.h" +#include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/codecs/interface/libvpx_interface.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" #include "modules/video_coding/frame_dependencies_calculator.h" -#include "rtc_base/numerics/safe_compare.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "rtc_base/checks.h" #include "test/fuzzers/fuzz_data_helper.h" // Fuzzer simulates various svc configurations and libvpx encoder dropping @@ -114,8 +131,9 @@ class FrameValidator : public EncodedImageCallback { } } - void CheckGenericReferences(rtc::ArrayView frame_dependencies, - const GenericFrameInfo& generic_info) const { + void CheckGenericReferences( + webrtc::ArrayView frame_dependencies, + const GenericFrameInfo& generic_info) const { for (int64_t dependency_frame_id : frame_dependencies) { RTC_CHECK_GE(dependency_frame_id, 0); const LayerFrame& dependency = Frame(dependency_frame_id); @@ -125,7 +143,7 @@ class FrameValidator : public EncodedImageCallback { } void CheckGenericAndCodecSpecificReferencesAreConsistent( - rtc::ArrayView frame_dependencies, + webrtc::ArrayView frame_dependencies, const CodecSpecificInfo& info, const LayerFrame& layer_frame) const { const CodecSpecificInfoVP9& vp9_info = info.codecSpecific.VP9; @@ -134,7 +152,7 @@ class FrameValidator : public EncodedImageCallback { RTC_CHECK_EQ(generic_info.spatial_id, layer_frame.spatial_id); RTC_CHECK_EQ(generic_info.temporal_id, layer_frame.temporal_id); auto picture_id_diffs = - rtc::MakeArrayView(vp9_info.p_diff, vp9_info.num_ref_pics); + webrtc::MakeArrayView(vp9_info.p_diff, vp9_info.num_ref_pics); RTC_CHECK_EQ( frame_dependencies.size(), picture_id_diffs.size() + (vp9_info.inter_layer_predicted ? 1 : 0)); @@ -175,7 +193,6 @@ class FieldTrials : public FieldTrialsView { ~FieldTrials() override = default; std::string Lookup(absl::string_view key) const override { static constexpr absl::string_view kBinaryFieldTrials[] = { - "WebRTC-Vp9ExternalRefCtrl", "WebRTC-Vp9IssueKeyFrameOnLayerDeactivation", }; for (size_t i = 0; i < ABSL_ARRAYSIZE(kBinaryFieldTrials); ++i) { @@ -188,11 +205,20 @@ class FieldTrials : public FieldTrialsView { if (key == "WebRTC-CongestionWindow" || key == "WebRTC-UseBaseHeavyVP8TL3RateAllocation" || key == "WebRTC-VideoRateControl" || + key == "WebRTC-GetEncoderInfoOverride" || + key == "WebRTC-VP9-GetEncoderInfoOverride" || key == "WebRTC-VP9-PerformanceFlags" || - key == "WebRTC-VP9VariableFramerateScreenshare" || - key == "WebRTC-VP9QualityScaler") { + key == "WebRTC-VP9QualityScaler" || + key == "WebRTC-VP9-SvcForSimulcast" || + key == "WebRTC-StableTargetRate") { return ""; } + + // TODO: bugs.webrtc.org/15827 - Fuzz frame drop config. + if (key == "WebRTC-LibvpxVp9Encoder-SvcFrameDropConfig") { + return ""; + } + // Crash when using unexpected field trial to decide if it should be fuzzed // or have a constant value. RTC_CHECK(false) << "Unfuzzed field trial " << key << "\n"; @@ -522,7 +548,7 @@ static_assert(DropBelow(0b1101, /*sid=*/3, 4) == false, ""); } // namespace void FuzzOneInput(const uint8_t* data, size_t size) { - FuzzDataHelper helper(rtc::MakeArrayView(data, size)); + FuzzDataHelper helper(webrtc::MakeArrayView(data, size)); FrameValidator validator; FieldTrials field_trials(helper); @@ -530,8 +556,8 @@ void FuzzOneInput(const uint8_t* data, size_t size) { LibvpxState state; // Initialize encoder - LibvpxVp9Encoder encoder(cricket::CreateVideoCodec(cricket::kVp9CodecName), - std::make_unique(&state), field_trials); + LibvpxVp9Encoder encoder(CreateEnvironment(&field_trials), {}, + std::make_unique(&state)); VideoCodec codec = CodecSettings(helper); if (encoder.InitEncode(&codec, EncoderSettings()) != WEBRTC_VIDEO_CODEC_OK) { return; @@ -574,21 +600,24 @@ void FuzzOneInput(const uint8_t* data, size_t size) { // Don't encode disabled spatial layers. continue; } - bool drop = true; - switch (state.frame_drop.framedrop_mode) { - case FULL_SUPERFRAME_DROP: - drop = encode_spatial_layers == 0; - break; - case LAYER_DROP: - drop = (encode_spatial_layers & (1 << sid)) == 0; - break; - case CONSTRAINED_LAYER_DROP: - drop = DropBelow(encode_spatial_layers, sid, - state.config.ss_number_layers); - break; - case CONSTRAINED_FROM_ABOVE_DROP: - drop = DropAbove(encode_spatial_layers, sid); - break; + bool drop = false; + // Never drop keyframe. + if (frame_types[0] != VideoFrameType::kVideoFrameKey) { + switch (state.frame_drop.framedrop_mode) { + case FULL_SUPERFRAME_DROP: + drop = encode_spatial_layers == 0; + break; + case LAYER_DROP: + drop = (encode_spatial_layers & (1 << sid)) == 0; + break; + case CONSTRAINED_LAYER_DROP: + drop = DropBelow(encode_spatial_layers, sid, + state.config.ss_number_layers); + break; + case CONSTRAINED_FROM_ABOVE_DROP: + drop = DropAbove(encode_spatial_layers, sid); + break; + } } if (!drop) { state.layer_id.spatial_layer_id = sid; diff --git a/test/fuzzers/vp9_qp_parser_fuzzer.cc b/test/fuzzers/vp9_qp_parser_fuzzer.cc index 80dfe15b16..a65cee54cf 100644 --- a/test/fuzzers/vp9_qp_parser_fuzzer.cc +++ b/test/fuzzers/vp9_qp_parser_fuzzer.cc @@ -8,10 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include + +#include "api/array_view.h" #include "modules/video_coding/utility/vp9_uncompressed_header_parser.h" namespace webrtc { void FuzzOneInput(const uint8_t* data, size_t size) { - ParseUncompressedVp9Header(rtc::MakeArrayView(data, size)); + ParseUncompressedVp9Header(webrtc::MakeArrayView(data, size)); } } // namespace webrtc diff --git a/test/fuzzers/vp9_replay_fuzzer.cc b/test/fuzzers/vp9_replay_fuzzer.cc index fc10d9ffc7..e55aa952f8 100644 --- a/test/fuzzers/vp9_replay_fuzzer.cc +++ b/test/fuzzers/vp9_replay_fuzzer.cc @@ -12,7 +12,11 @@ #include #include +#include +#include +#include "api/video_codecs/sdp_video_format.h" +#include "call/video_receive_stream.h" #include "test/fuzzers/utils/rtp_replayer.h" namespace webrtc { @@ -22,7 +26,7 @@ void FuzzOneInput(const uint8_t* data, size_t size) { VideoReceiveStreamInterface::Config vp9_config(&(stream_state->transport)); VideoReceiveStreamInterface::Decoder vp9_decoder; - vp9_decoder.video_format = SdpVideoFormat("VP9"); + vp9_decoder.video_format = SdpVideoFormat::VP9Profile0(); vp9_decoder.payload_type = 124; vp9_config.decoders.push_back(std::move(vp9_decoder)); diff --git a/test/fuzzers/webrtc_fuzzer_main.cc b/test/fuzzers/webrtc_fuzzer_main.cc index a52dd231be..830a9bead2 100644 --- a/test/fuzzers/webrtc_fuzzer_main.cc +++ b/test/fuzzers/webrtc_fuzzer_main.cc @@ -12,6 +12,9 @@ // It's intended to set sane defaults, such as removing logging for further // fuzzing efficiency. +#include +#include + #include "rtc_base/logging.h" namespace { @@ -23,7 +26,7 @@ void InitializeWebRtcFuzzDefaults() { // Remove default logging to prevent huge slowdowns. // TODO(pbos): Disable in Chromium: http://crbug.com/561667 #if !defined(WEBRTC_CHROMIUM_BUILD) - rtc::LogMessage::LogToDebug(rtc::LS_NONE); + webrtc::LogMessage::LogToDebug(webrtc::LS_NONE); #endif // !defined(WEBRTC_CHROMIUM_BUILD) g_initialized = true; diff --git a/test/gmock.h b/test/gmock.h index f137d080a4..9621e3e1b2 100644 --- a/test/gmock.h +++ b/test/gmock.h @@ -14,7 +14,7 @@ #include "rtc_base/ignore_wundef.h" RTC_PUSH_IGNORING_WUNDEF() -#include "testing/gmock/include/gmock/gmock.h" +#include "testing/gmock/include/gmock/gmock.h" // IWYU pragma: export RTC_POP_IGNORING_WUNDEF() #endif // TEST_GMOCK_H_ diff --git a/test/gtest.h b/test/gtest.h index fa4396420e..68948e0df4 100644 --- a/test/gtest.h +++ b/test/gtest.h @@ -14,8 +14,10 @@ #include "rtc_base/ignore_wundef.h" RTC_PUSH_IGNORING_WUNDEF() +// IWYU pragma: begin_exports #include "testing/gtest/include/gtest/gtest-spi.h" #include "testing/gtest/include/gtest/gtest.h" +// IWYU pragma: end_exports RTC_POP_IGNORING_WUNDEF() // GTEST_HAS_DEATH_TEST is set to 1 when death tests are supported, but appears diff --git a/test/ios/coverage_util_ios.h b/test/ios/coverage_util_ios.h index a17b69dca8..04cd69a355 100644 --- a/test/ios/coverage_util_ios.h +++ b/test/ios/coverage_util_ios.h @@ -11,7 +11,7 @@ #ifndef TEST_IOS_COVERAGE_UTIL_IOS_H_ #define TEST_IOS_COVERAGE_UTIL_IOS_H_ -namespace rtc { +namespace webrtc { namespace test { // In debug builds, if IOS_ENABLE_COVERAGE is defined, sets the filename of the @@ -19,6 +19,6 @@ namespace test { void ConfigureCoverageReportPath(); } // namespace test -} // namespace rtc +} // namespace webrtc #endif // TEST_IOS_COVERAGE_UTIL_IOS_H_ diff --git a/test/ios/coverage_util_ios.mm b/test/ios/coverage_util_ios.mm index c21a16def2..da96cec838 100644 --- a/test/ios/coverage_util_ios.mm +++ b/test/ios/coverage_util_ios.mm @@ -14,7 +14,7 @@ extern "C" void __llvm_profile_set_filename(const char* name); #endif -namespace rtc { +namespace webrtc { namespace test { void ConfigureCoverageReportPath() { @@ -23,14 +23,16 @@ void ConfigureCoverageReportPath() { dispatch_once(&once_token, ^{ // Writes the profraw file to the Documents directory, where the app has // write rights. - NSArray* paths = - NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); + NSArray* paths = NSSearchPathForDirectoriesInDomains( + NSDocumentDirectory, NSUserDomainMask, YES); NSString* documents_directory = [paths firstObject]; - NSString* file_name = [documents_directory stringByAppendingPathComponent:@"coverage.profraw"]; + NSString* file_name = [documents_directory + stringByAppendingPathComponent:@"coverage.profraw"]; // For documentation, see: // http://clang.llvm.org/docs/SourceBasedCodeCoverage.html - __llvm_profile_set_filename([file_name cStringUsingEncoding:NSUTF8StringEncoding]); + __llvm_profile_set_filename( + [file_name cStringUsingEncoding:NSUTF8StringEncoding]); // Print the path for easier retrieval. NSLog(@"Coverage data at %@.", file_name); @@ -39,4 +41,4 @@ void ConfigureCoverageReportPath() { } } // namespace test -} // namespace rtc +} // namespace webrtc diff --git a/test/ios/google_test_runner.mm b/test/ios/google_test_runner.mm index 87b7f7dfd7..d173b9fef6 100644 --- a/test/ios/google_test_runner.mm +++ b/test/ios/google_test_runner.mm @@ -29,7 +29,8 @@ - (void)testRunGoogleTests { self.continueAfterFailure = false; id appDelegate = UIApplication.sharedApplication.delegate; - XCTAssertTrue([appDelegate conformsToProtocol:@protocol(GoogleTestRunnerDelegate)]); + XCTAssertTrue( + [appDelegate conformsToProtocol:@protocol(GoogleTestRunnerDelegate)]); id runnerDelegate = static_cast>(appDelegate); diff --git a/test/ios/test_support.h b/test/ios/test_support.h index 5ac731393f..77ad8e849c 100644 --- a/test/ios/test_support.h +++ b/test/ios/test_support.h @@ -11,12 +11,11 @@ #ifndef TEST_IOS_TEST_SUPPORT_H_ #define TEST_IOS_TEST_SUPPORT_H_ +#include #include #include -#include "absl/types/optional.h" - -namespace rtc { +namespace webrtc { namespace test { // Launches an iOS app that serves as a host for a test suite. // This is necessary as iOS doesn't like processes without a gui @@ -28,12 +27,12 @@ void InitTestSuite(int (*test_suite)(void), bool save_chartjson_result, bool export_perf_results_new_api, std::string webrtc_test_metrics_output_path, - absl::optional> metrics_to_plot); + std::optional> metrics_to_plot); // Returns true if unittests should be run by the XCTest runnner. bool ShouldRunIOSUnittestsWithXCTest(); } // namespace test -} // namespace rtc +} // namespace webrtc #endif // TEST_IOS_TEST_SUPPORT_H_ diff --git a/test/ios/test_support.mm b/test/ios/test_support.mm index d3c9ee0c74..56a2754b9b 100644 --- a/test/ios/test_support.mm +++ b/test/ios/test_support.mm @@ -38,7 +38,8 @@ // run in a row, this provides an indication of which one is currently running. // If enabled, runs unittests using the XCTest test runner. -const char kEnableRunIOSUnittestsWithXCTest[] = "enable-run-ios-unittests-with-xctest"; +const char kEnableRunIOSUnittestsWithXCTest[] = + "enable-run-ios-unittests-with-xctest"; static int (*g_test_suite)(void) = NULL; static int g_argc; @@ -46,8 +47,8 @@ static bool g_write_perf_output; static bool g_export_perf_results_new_api; static std::string g_webrtc_test_metrics_output_path; -static absl::optional g_is_xctest; -static absl::optional> g_metrics_to_plot; +static std::optional g_is_xctest; +static std::optional> g_metrics_to_plot; @interface UIApplication (Testing) - (void)_terminateWithStatus:(int)status; @@ -79,7 +80,7 @@ - (BOOL)application:(UIApplication *)application // root view controller. Set an empty one here. [_window setRootViewController:[[UIViewController alloc] init]]; - if (!rtc::test::ShouldRunIOSUnittestsWithXCTest()) { + if (!webrtc::test::ShouldRunIOSUnittestsWithXCTest()) { // When running in XCTest mode, XCTest will invoke `runGoogleTest` directly. // Otherwise, schedule a call to `runTests`. [self performSelector:@selector(runTests) withObject:nil afterDelay:0.1]; @@ -89,53 +90,65 @@ - (BOOL)application:(UIApplication *)application } - (BOOL)supportsRunningGoogleTests { - return rtc::test::ShouldRunIOSUnittestsWithXCTest(); + return webrtc::test::ShouldRunIOSUnittestsWithXCTest(); } - (int)runGoogleTests { - rtc::test::ConfigureCoverageReportPath(); + webrtc::test::ConfigureCoverageReportPath(); int exitStatus = g_test_suite(); - NSArray *outputDirectories = - NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); + NSArray *outputDirectories = NSSearchPathForDirectoriesInDomains( + NSDocumentDirectory, NSUserDomainMask, YES); std::vector> exporters; if (g_export_perf_results_new_api) { - exporters.push_back(std::make_unique()); + exporters.push_back( + std::make_unique()); if (g_write_perf_output) { // Stores data into a proto file under the app's document directory. NSString *fileName = @"perftest-output.pb"; if ([outputDirectories count] != 0) { - NSString *outputPath = [outputDirectories[0] stringByAppendingPathComponent:fileName]; + NSString *outputPath = + [outputDirectories[0] stringByAppendingPathComponent:fileName]; - exporters.push_back(std::make_unique( - [NSString stdStringForString:outputPath])); + exporters.push_back( + std::make_unique( + [NSString stdStringForString:outputPath])); } } if (!g_webrtc_test_metrics_output_path.empty()) { - RTC_CHECK_EQ(g_webrtc_test_metrics_output_path.find('/'), std::string::npos) - << "On iOS, --webrtc_test_metrics_output_path must only be a file name."; + RTC_CHECK_EQ(g_webrtc_test_metrics_output_path.find('/'), + std::string::npos) + << "On iOS, --webrtc_test_metrics_output_path must only be a file " + "name."; if ([outputDirectories count] != 0) { - NSString *fileName = [NSString stringWithCString:g_webrtc_test_metrics_output_path.c_str() - encoding:[NSString defaultCStringEncoding]]; - NSString *outputPath = [outputDirectories[0] stringByAppendingPathComponent:fileName]; - exporters.push_back(std::make_unique( - webrtc::test::MetricsSetProtoFileExporter::Options( - [NSString stdStringForString:outputPath]))); + NSString *fileName = [NSString + stringWithCString:g_webrtc_test_metrics_output_path.c_str() + encoding:[NSString defaultCStringEncoding]]; + NSString *outputPath = + [outputDirectories[0] stringByAppendingPathComponent:fileName]; + exporters.push_back( + std::make_unique( + webrtc::test::MetricsSetProtoFileExporter::Options( + [NSString stdStringForString:outputPath]))); } } } else { - exporters.push_back(std::make_unique()); + exporters.push_back( + std::make_unique()); } - webrtc::test::ExportPerfMetric(*webrtc::test::GetGlobalMetricsLogger(), std::move(exporters)); + webrtc::test::ExportPerfMetric(*webrtc::test::GetGlobalMetricsLogger(), + std::move(exporters)); if (!g_export_perf_results_new_api) { if (g_write_perf_output) { // Stores data into a proto file under the app's document directory. NSString *fileName = @"perftest-output.pb"; if ([outputDirectories count] != 0) { - NSString *outputPath = [outputDirectories[0] stringByAppendingPathComponent:fileName]; + NSString *outputPath = + [outputDirectories[0] stringByAppendingPathComponent:fileName]; - if (!webrtc::test::WritePerfResults([NSString stdStringForString:outputPath])) { + if (!webrtc::test::WritePerfResults( + [NSString stdStringForString:outputPath])) { return 1; } } @@ -149,8 +162,8 @@ - (int)runGoogleTests { } - (void)runTests { - RTC_DCHECK(!rtc::test::ShouldRunIOSUnittestsWithXCTest()); - rtc::test::ConfigureCoverageReportPath(); + RTC_DCHECK(!webrtc::test::ShouldRunIOSUnittestsWithXCTest()); + webrtc::test::ConfigureCoverageReportPath(); int exitStatus = [self runGoogleTests]; @@ -169,7 +182,7 @@ - (void)runTests { } @end -namespace rtc { +namespace webrtc { namespace test { // Note: This is not thread safe, and must be called from the same thread as @@ -180,7 +193,7 @@ void InitTestSuite(int (*test_suite)(void), bool write_perf_output, bool export_perf_results_new_api, std::string webrtc_test_metrics_output_path, - absl::optional> metrics_to_plot) { + std::optional> metrics_to_plot) { g_test_suite = test_suite; g_argc = argc; g_argv = argv; @@ -204,14 +217,14 @@ bool ShouldRunIOSUnittestsWithXCTest() { char **argv = g_argv; while (*argv != nullptr) { if (strstr(*argv, kEnableRunIOSUnittestsWithXCTest) != nullptr) { - g_is_xctest = absl::optional(true); + g_is_xctest = std::optional(true); return true; } argv++; } - g_is_xctest = absl::optional(false); + g_is_xctest = std::optional(false); return false; } } // namespace test -} // namespace rtc +} // namespace webrtc diff --git a/test/jitter/BUILD.gn b/test/jitter/BUILD.gn index ad9c58ac42..c14c94c55a 100644 --- a/test/jitter/BUILD.gn +++ b/test/jitter/BUILD.gn @@ -25,7 +25,6 @@ rtc_library("delay_variation_calculator") { "../../rtc_base:logging", "../../rtc_base:rtc_numerics", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("logging_delay_variation_calculator") { @@ -40,8 +39,8 @@ rtc_library("logging_delay_variation_calculator") { "../../api/units:data_size", "../../api/video:video_frame_type", "../../rtc_base:logging", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } if (rtc_include_tests) { diff --git a/test/jitter/delay_variation_calculator.cc b/test/jitter/delay_variation_calculator.cc index 092bd7ca82..2d409f88d8 100644 --- a/test/jitter/delay_variation_calculator.cc +++ b/test/jitter/delay_variation_calculator.cc @@ -10,9 +10,9 @@ #include "test/jitter/delay_variation_calculator.h" +#include #include -#include "absl/types/optional.h" #include "api/units/frequency.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -29,9 +29,9 @@ void DelayVariationCalculator::Insert( uint32_t rtp_timestamp, Timestamp arrival_time, DataSize size, - absl::optional spatial_layer, - absl::optional temporal_layer, - absl::optional frame_type) { + std::optional spatial_layer, + std::optional temporal_layer, + std::optional frame_type) { Frame frame{.rtp_timestamp = rtp_timestamp, .unwrapped_rtp_timestamp = unwrapper_.Unwrap(rtp_timestamp), .arrival_time = arrival_time, diff --git a/test/jitter/delay_variation_calculator.h b/test/jitter/delay_variation_calculator.h index 6400f82ad7..6b06afe86d 100644 --- a/test/jitter/delay_variation_calculator.h +++ b/test/jitter/delay_variation_calculator.h @@ -14,9 +14,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/numerics/samples_stats_counter.h" #include "api/test/metrics/metrics_logger.h" #include "api/units/data_size.h" @@ -56,9 +56,9 @@ class DelayVariationCalculator { void Insert(uint32_t rtp_timestamp, Timestamp arrival_time, DataSize size, - absl::optional spatial_layer = absl::nullopt, - absl::optional temporal_layer = absl::nullopt, - absl::optional frame_type = absl::nullopt); + std::optional spatial_layer = std::nullopt, + std::optional temporal_layer = std::nullopt, + std::optional frame_type = std::nullopt); const TimeSeries& time_series() const { return time_series_; } @@ -68,9 +68,9 @@ class DelayVariationCalculator { int64_t unwrapped_rtp_timestamp; Timestamp arrival_time; DataSize size; - absl::optional spatial_layer; - absl::optional temporal_layer; - absl::optional frame_type; + std::optional spatial_layer; + std::optional temporal_layer; + std::optional frame_type; }; using MetadataT = std::map; @@ -84,7 +84,7 @@ class DelayVariationCalculator { MetadataT BuildMetadata(const Frame& frame); RtpTimestampUnwrapper unwrapper_; - absl::optional prev_frame_ = absl::nullopt; + std::optional prev_frame_ = std::nullopt; TimeSeries time_series_; }; diff --git a/test/jitter/logging_delay_variation_calculator.cc b/test/jitter/logging_delay_variation_calculator.cc index aa58b0fa01..dc08a173c8 100644 --- a/test/jitter/logging_delay_variation_calculator.cc +++ b/test/jitter/logging_delay_variation_calculator.cc @@ -20,9 +20,9 @@ void LoggingDelayVariationCalculator::Insert( uint32_t rtp_timestamp, Timestamp arrival_time, DataSize size, - absl::optional spatial_layer, - absl::optional temporal_layer, - absl::optional frame_type) { + std::optional spatial_layer, + std::optional temporal_layer, + std::optional frame_type) { calc_.Insert(rtp_timestamp, arrival_time, size, spatial_layer, temporal_layer, frame_type); } diff --git a/test/jitter/logging_delay_variation_calculator.h b/test/jitter/logging_delay_variation_calculator.h index f15d2aec77..a3a67af59c 100644 --- a/test/jitter/logging_delay_variation_calculator.h +++ b/test/jitter/logging_delay_variation_calculator.h @@ -35,9 +35,9 @@ class LoggingDelayVariationCalculator { void Insert(uint32_t rtp_timestamp, Timestamp arrival_time, DataSize size, - absl::optional spatial_layer = absl::nullopt, - absl::optional temporal_layer = absl::nullopt, - absl::optional frame_type = absl::nullopt); + std::optional spatial_layer = std::nullopt, + std::optional temporal_layer = std::nullopt, + std::optional frame_type = std::nullopt); private: void LogMetrics() const; diff --git a/test/layer_filtering_transport.cc b/test/layer_filtering_transport.cc index 931a89b9da..bc68d3af1c 100644 --- a/test/layer_filtering_transport.cc +++ b/test/layer_filtering_transport.cc @@ -40,8 +40,8 @@ LayerFilteringTransport::LayerFilteringTransport( const std::map& payload_type_map, uint32_t ssrc_to_filter_min, uint32_t ssrc_to_filter_max, - rtc::ArrayView audio_extensions, - rtc::ArrayView video_extensions) + ArrayView audio_extensions, + ArrayView video_extensions) : DirectTransport(task_queue, std::move(pipe), send_call, @@ -67,8 +67,8 @@ LayerFilteringTransport::LayerFilteringTransport( int selected_tl, int selected_sl, const std::map& payload_type_map, - rtc::ArrayView audio_extensions, - rtc::ArrayView video_extensions) + ArrayView audio_extensions, + ArrayView video_extensions) : LayerFilteringTransport(task_queue, std::move(pipe), send_call, @@ -86,7 +86,7 @@ bool LayerFilteringTransport::DiscardedLastPacket() const { return discarded_last_packet_; } -bool LayerFilteringTransport::SendRtp(rtc::ArrayView packet, +bool LayerFilteringTransport::SendRtp(ArrayView packet, const PacketOptions& options) { if (selected_tl_ == -1 && selected_sl_ == -1) { // Nothing to change, forward the packet immediately. @@ -114,7 +114,7 @@ bool LayerFilteringTransport::SendRtp(rtc::ArrayView packet, bool end_of_frame; if (is_vp8) { - temporal_idx = absl::get( + temporal_idx = std::get( parsed_payload->video_header.video_type_header) .temporalIdx; spatial_idx = kNoSpatialIdx; @@ -122,7 +122,7 @@ bool LayerFilteringTransport::SendRtp(rtc::ArrayView packet, non_ref_for_inter_layer_pred = false; end_of_frame = true; } else { - const auto& vp9_header = absl::get( + const auto& vp9_header = std::get( parsed_payload->video_header.video_type_header); temporal_idx = vp9_header.temporal_idx; spatial_idx = vp9_header.spatial_idx; diff --git a/test/layer_filtering_transport.h b/test/layer_filtering_transport.h index f4aa550d74..e0a23a200e 100644 --- a/test/layer_filtering_transport.h +++ b/test/layer_filtering_transport.h @@ -40,8 +40,8 @@ class LayerFilteringTransport : public test::DirectTransport { const std::map& payload_type_map, uint32_t ssrc_to_filter_min, uint32_t ssrc_to_filter_max, - rtc::ArrayView audio_extensions, - rtc::ArrayView video_extensions); + ArrayView audio_extensions, + ArrayView video_extensions); LayerFilteringTransport( TaskQueueBase* task_queue, std::unique_ptr pipe, @@ -51,10 +51,10 @@ class LayerFilteringTransport : public test::DirectTransport { int selected_tl, int selected_sl, const std::map& payload_type_map, - rtc::ArrayView audio_extensions, - rtc::ArrayView video_extensions); + ArrayView audio_extensions, + ArrayView video_extensions); bool DiscardedLastPacket() const; - bool SendRtp(rtc::ArrayView data, + bool SendRtp(ArrayView data, const PacketOptions& options) override; private: diff --git a/test/logging/BUILD.gn b/test/logging/BUILD.gn index 301c0e59c0..7e8f827d9a 100644 --- a/test/logging/BUILD.gn +++ b/test/logging/BUILD.gn @@ -27,9 +27,6 @@ rtc_library("log_writer") { "../../rtc_base:rtc_base_tests_utils", "../../rtc_base:stringutils", "../../test:fileutils", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } diff --git a/test/mac/run_test.mm b/test/mac/run_test.mm index 38c6c8f8c1..b6d60ecd10 100644 --- a/test/mac/run_test.mm +++ b/test/mac/run_test.mm @@ -13,9 +13,11 @@ #include "test/run_test.h" // Converting a C++ function pointer to an Objective-C block. -typedef void(^TestBlock)(); -TestBlock functionToBlock(void(*function)()) { - return [^(void) { function(); } copy]; +typedef void (^TestBlock)(); +TestBlock functionToBlock(void (*function)()) { + return [^(void) { + function(); + } copy]; } // Class calling the test function on the platform specific thread. @@ -50,7 +52,7 @@ - (BOOL)running { namespace webrtc { namespace test { -void RunTest(void(*test)()) { +void RunTest(void (*test)()) { @autoreleasepool { [NSApplication sharedApplication]; @@ -63,8 +65,9 @@ void RunTest(void(*test)()) { withObject:testBlock]; NSRunLoop *runLoop = [NSRunLoop currentRunLoop]; - while ([testRunner running] && [runLoop runMode:NSDefaultRunLoopMode - beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.1]]) + while ([testRunner running] && + [runLoop runMode:NSDefaultRunLoopMode + beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.1]]) ; } } diff --git a/test/mac/video_renderer_mac.mm b/test/mac/video_renderer_mac.mm index 7103375383..04e4b76444 100644 --- a/test/mac/video_renderer_mac.mm +++ b/test/mac/video_renderer_mac.mm @@ -31,11 +31,12 @@ - (void)makeCurrentContext; @end @implementation CocoaWindow - static NSInteger nextXOrigin_; - static NSInteger nextYOrigin_; +static NSInteger nextXOrigin_; +static NSInteger nextYOrigin_; - (id)initWithTitle:(NSString *)title width:(int)width height:(int)height { - if (self = [super init]) { + self = [super init]; + if (self) { title_ = title; width_ = width; height_ = height; @@ -66,7 +67,8 @@ - (void)createWindow:(NSObject *)ignored { defer:NO]; NSRect viewFrame = NSMakeRect(0, 0, width_, height_); - NSOpenGLView *view = [[NSOpenGLView alloc] initWithFrame:viewFrame pixelFormat:nil]; + NSOpenGLView *view = [[NSOpenGLView alloc] initWithFrame:viewFrame + pixelFormat:nil]; context_ = [view openGLContext]; [[window_ contentView] addSubview:view]; @@ -83,10 +85,10 @@ - (void)makeCurrentContext { namespace webrtc { namespace test { -VideoRenderer* VideoRenderer::CreatePlatformRenderer(const char* window_title, +VideoRenderer *VideoRenderer::CreatePlatformRenderer(const char *window_title, size_t width, size_t height) { - MacRenderer* renderer = new MacRenderer(); + MacRenderer *renderer = new MacRenderer(); if (!renderer->Init(window_title, width, height)) { delete renderer; return NULL; @@ -94,20 +96,18 @@ - (void)makeCurrentContext { return renderer; } -MacRenderer::MacRenderer() - : window_(NULL) {} +MacRenderer::MacRenderer() : window_(NULL) {} MacRenderer::~MacRenderer() { GlRenderer::Destroy(); } -bool MacRenderer::Init(const char* window_title, int width, int height) { +bool MacRenderer::Init(const char *window_title, int width, int height) { window_ = [[CocoaWindow alloc] initWithTitle:[NSString stringWithUTF8String:window_title] - width:width - height:height]; - if (!window_) - return false; + width:width + height:height]; + if (!window_) return false; [window_ performSelectorOnMainThread:@selector(createWindow:) withObject:nil waitUntilDone:YES]; @@ -118,10 +118,10 @@ - (void)makeCurrentContext { return true; } -void MacRenderer::OnFrame(const VideoFrame& frame) { +void MacRenderer::OnFrame(const VideoFrame &frame) { [window_ makeCurrentContext]; GlRenderer::OnFrame(frame); } -} // test -} // webrtc +} // namespace test +} // namespace webrtc diff --git a/test/mac_capturer.h b/test/mac_capturer.h index 58ccfc0675..8e53cd5a74 100644 --- a/test/mac_capturer.h +++ b/test/mac_capturer.h @@ -25,7 +25,7 @@ namespace webrtc { namespace test { class MacCapturer : public TestVideoCapturer, - public rtc::VideoSinkInterface { + public VideoSinkInterface { public: static MacCapturer* Create(size_t width, size_t height, diff --git a/test/mac_capturer.mm b/test/mac_capturer.mm index 9b14f28c2a..d3c86084b4 100644 --- a/test/mac_capturer.mm +++ b/test/mac_capturer.mm @@ -15,7 +15,8 @@ #import "sdk/objc/native/api/video_capturer.h" #import "sdk/objc/native/src/objc_frame_buffer.h" -@interface RTCTestVideoSourceAdapter : NSObject +@interface RTCTestVideoSourceAdapter + : NSObject @property(nonatomic) webrtc::test::MacCapturer *capturer; @end @@ -24,9 +25,10 @@ @implementation RTCTestVideoSourceAdapter - (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { - const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec; - rtc::scoped_refptr buffer = - rtc::make_ref_counted(frame.buffer); + const int64_t timestamp_us = + frame.timeStampNs / webrtc::kNumNanosecsPerMicrosec; + webrtc::scoped_refptr buffer = + webrtc::make_ref_counted(frame.buffer); _capturer->OnFrame(webrtc::VideoFrame::Builder() .set_video_frame_buffer(buffer) .set_rotation(webrtc::kVideoRotation_0) @@ -38,15 +40,18 @@ - (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer namespace { -AVCaptureDeviceFormat *SelectClosestFormat(AVCaptureDevice *device, size_t width, size_t height) { +AVCaptureDeviceFormat *SelectClosestFormat(AVCaptureDevice *device, + size_t width, + size_t height) { NSArray *formats = [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device]; AVCaptureDeviceFormat *selectedFormat = nil; int currentDiff = INT_MAX; for (AVCaptureDeviceFormat *format in formats) { - CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - int diff = - std::abs((int64_t)width - dimension.width) + std::abs((int64_t)height - dimension.height); + CMVideoDimensions dimension = + CMVideoFormatDescriptionGetDimensions(format.formatDescription); + int diff = std::abs((int64_t)width - dimension.width) + + std::abs((int64_t)height - dimension.height); if (diff < currentDiff) { selectedFormat = format; currentDiff = diff; @@ -74,8 +79,8 @@ - (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:adapter]; capturer_ = (__bridge_retained void *)capturer; - AVCaptureDevice *device = - [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices] objectAtIndex:capture_device_index]; + AVCaptureDevice *device = [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) + captureDevices] objectAtIndex:capture_device_index]; AVCaptureDeviceFormat *format = SelectClosestFormat(device, width, height); [capturer startCaptureWithDevice:device format:format fps:target_fps]; } @@ -90,7 +95,8 @@ - (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer void MacCapturer::Destroy() { #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wunused-variable" - RTCTestVideoSourceAdapter *adapter = (__bridge_transfer RTCTestVideoSourceAdapter *)adapter_; + RTCTestVideoSourceAdapter *adapter = + (__bridge_transfer RTCTestVideoSourceAdapter *)adapter_; RTC_OBJC_TYPE(RTCCameraVideoCapturer) *capturer = (__bridge_transfer RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer_; [capturer stopCapture]; diff --git a/test/mappable_native_buffer.cc b/test/mappable_native_buffer.cc index 1b171e604b..8a2380959d 100644 --- a/test/mappable_native_buffer.cc +++ b/test/mappable_native_buffer.cc @@ -30,7 +30,7 @@ class NV12BufferWithDidConvertToI420 : public NV12Buffer { bool did_convert_to_i420() const { return did_convert_to_i420_; } - rtc::scoped_refptr ToI420() override { + scoped_refptr ToI420() override { did_convert_to_i420_ = true; return NV12Buffer::ToI420(); } @@ -47,9 +47,9 @@ VideoFrame CreateMappableNativeFrame(int64_t ntp_time_ms, int height) { VideoFrame frame = VideoFrame::Builder() - .set_video_frame_buffer(rtc::make_ref_counted( + .set_video_frame_buffer(make_ref_counted( mappable_type, width, height)) - .set_timestamp_rtp(99) + .set_rtp_timestamp(99) .set_timestamp_ms(99) .set_rotation(kVideoRotation_0) .build(); @@ -57,39 +57,38 @@ VideoFrame CreateMappableNativeFrame(int64_t ntp_time_ms, return frame; } -rtc::scoped_refptr GetMappableNativeBufferFromVideoFrame( +scoped_refptr GetMappableNativeBufferFromVideoFrame( const VideoFrame& frame) { - return rtc::scoped_refptr( + return scoped_refptr( static_cast(frame.video_frame_buffer().get())); } MappableNativeBuffer::ScaledBuffer::ScaledBuffer( - rtc::scoped_refptr parent, + scoped_refptr parent, int width, int height) : parent_(std::move(parent)), width_(width), height_(height) {} MappableNativeBuffer::ScaledBuffer::~ScaledBuffer() {} -rtc::scoped_refptr +scoped_refptr MappableNativeBuffer::ScaledBuffer::CropAndScale(int offset_x, int offset_y, int crop_width, int crop_height, int scaled_width, int scaled_height) { - return rtc::make_ref_counted(parent_, scaled_width, - scaled_height); + return make_ref_counted(parent_, scaled_width, scaled_height); } -rtc::scoped_refptr +scoped_refptr MappableNativeBuffer::ScaledBuffer::ToI420() { return parent_->GetOrCreateMappedBuffer(width_, height_)->ToI420(); } -rtc::scoped_refptr +scoped_refptr MappableNativeBuffer::ScaledBuffer::GetMappedFrameBuffer( - rtc::ArrayView types) { + ArrayView types) { if (absl::c_find(types, parent_->mappable_type_) == types.end()) return nullptr; return parent_->GetOrCreateMappedBuffer(width_, height_); @@ -105,7 +104,7 @@ MappableNativeBuffer::MappableNativeBuffer(VideoFrameBuffer::Type mappable_type, MappableNativeBuffer::~MappableNativeBuffer() {} -rtc::scoped_refptr MappableNativeBuffer::CropAndScale( +scoped_refptr MappableNativeBuffer::CropAndScale( int offset_x, int offset_y, int crop_width, @@ -116,16 +115,16 @@ rtc::scoped_refptr MappableNativeBuffer::CropAndScale( offset_x, offset_y, crop_width, crop_height, scaled_width, scaled_height); } -rtc::scoped_refptr MappableNativeBuffer::ToI420() { +scoped_refptr MappableNativeBuffer::ToI420() { return FullSizeBuffer()->ToI420(); } -rtc::scoped_refptr MappableNativeBuffer::GetMappedFrameBuffer( - rtc::ArrayView types) { +scoped_refptr MappableNativeBuffer::GetMappedFrameBuffer( + ArrayView types) { return FullSizeBuffer()->GetMappedFrameBuffer(types); } -std::vector> +std::vector> MappableNativeBuffer::GetMappedFramedBuffers() const { MutexLock lock(&lock_); return mapped_buffers_; @@ -144,32 +143,32 @@ bool MappableNativeBuffer::DidConvertToI420() const { return false; } -rtc::scoped_refptr +scoped_refptr MappableNativeBuffer::FullSizeBuffer() { - return rtc::make_ref_counted( - rtc::scoped_refptr(this), width_, height_); + return make_ref_counted( + scoped_refptr(this), width_, height_); } -rtc::scoped_refptr -MappableNativeBuffer::GetOrCreateMappedBuffer(int width, int height) { +scoped_refptr MappableNativeBuffer::GetOrCreateMappedBuffer( + int width, + int height) { MutexLock lock(&lock_); for (auto& mapped_buffer : mapped_buffers_) { if (mapped_buffer->width() == width && mapped_buffer->height() == height) { return mapped_buffer; } } - rtc::scoped_refptr mapped_buffer; + scoped_refptr mapped_buffer; switch (mappable_type_) { case VideoFrameBuffer::Type::kI420: { - rtc::scoped_refptr i420_buffer = - I420Buffer::Create(width, height); + scoped_refptr i420_buffer = I420Buffer::Create(width, height); I420Buffer::SetBlack(i420_buffer.get()); mapped_buffer = i420_buffer; break; } case VideoFrameBuffer::Type::kNV12: { auto nv12_buffer = - rtc::make_ref_counted(width, height); + make_ref_counted(width, height); nv12_buffer->InitializeData(); mapped_buffer = std::move(nv12_buffer); break; diff --git a/test/mappable_native_buffer.h b/test/mappable_native_buffer.h index 08f155e07f..c2b4cec627 100644 --- a/test/mappable_native_buffer.h +++ b/test/mappable_native_buffer.h @@ -30,7 +30,7 @@ VideoFrame CreateMappableNativeFrame(int64_t ntp_time_ms, int width, int height); -rtc::scoped_refptr GetMappableNativeBufferFromVideoFrame( +scoped_refptr GetMappableNativeBufferFromVideoFrame( const VideoFrame& frame); // A for-testing native buffer that is scalable and mappable. The contents of @@ -53,29 +53,28 @@ class MappableNativeBuffer : public VideoFrameBuffer { int width() const override { return width_; } int height() const override { return height_; } - rtc::scoped_refptr CropAndScale(int offset_x, - int offset_y, - int crop_width, - int crop_height, - int scaled_width, - int scaled_height) override; + scoped_refptr CropAndScale(int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) override; - rtc::scoped_refptr ToI420() override; - rtc::scoped_refptr GetMappedFrameBuffer( - rtc::ArrayView types) override; + scoped_refptr ToI420() override; + scoped_refptr GetMappedFrameBuffer( + ArrayView types) override; // Gets all the buffers that have been mapped so far, including mappings of // cropped and scaled buffers. - std::vector> GetMappedFramedBuffers() - const; + std::vector> GetMappedFramedBuffers() const; bool DidConvertToI420() const; private: - friend class rtc::RefCountedObject; + friend class RefCountedObject; class ScaledBuffer : public VideoFrameBuffer { public: - ScaledBuffer(rtc::scoped_refptr parent, + ScaledBuffer(scoped_refptr parent, int width, int height); ~ScaledBuffer() override; @@ -84,35 +83,34 @@ class MappableNativeBuffer : public VideoFrameBuffer { int width() const override { return width_; } int height() const override { return height_; } - rtc::scoped_refptr CropAndScale( - int offset_x, - int offset_y, - int crop_width, - int crop_height, - int scaled_width, - int scaled_height) override; + scoped_refptr CropAndScale(int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) override; - rtc::scoped_refptr ToI420() override; - rtc::scoped_refptr GetMappedFrameBuffer( - rtc::ArrayView types) override; + scoped_refptr ToI420() override; + scoped_refptr GetMappedFrameBuffer( + ArrayView types) override; private: - friend class rtc::RefCountedObject; + friend class RefCountedObject; - const rtc::scoped_refptr parent_; + const scoped_refptr parent_; const int width_; const int height_; }; - rtc::scoped_refptr FullSizeBuffer(); - rtc::scoped_refptr GetOrCreateMappedBuffer(int width, - int height); + scoped_refptr FullSizeBuffer(); + scoped_refptr GetOrCreateMappedBuffer(int width, + int height); const VideoFrameBuffer::Type mappable_type_; const int width_; const int height_; mutable Mutex lock_; - std::vector> mapped_buffers_ + std::vector> mapped_buffers_ RTC_GUARDED_BY(&lock_); }; diff --git a/test/mock_audio_decoder_factory.h b/test/mock_audio_decoder_factory.h index 425ea38f9c..d14a7dcc6d 100644 --- a/test/mock_audio_decoder_factory.h +++ b/test/mock_audio_decoder_factory.h @@ -16,6 +16,7 @@ #include "api/audio_codecs/audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/environment/environment.h" #include "api/make_ref_counted.h" #include "api/scoped_refptr.h" #include "test/gmock.h" @@ -24,67 +25,34 @@ namespace webrtc { class MockAudioDecoderFactory : public AudioDecoderFactory { public: - MOCK_METHOD(std::vector, - GetSupportedDecoders, - (), - (override)); - MOCK_METHOD(bool, IsSupportedDecoder, (const SdpAudioFormat&), (override)); - std::unique_ptr MakeAudioDecoder( - const SdpAudioFormat& format, - absl::optional codec_pair_id) override { - std::unique_ptr return_value; - MakeAudioDecoderMock(format, codec_pair_id, &return_value); - return return_value; - } - MOCK_METHOD(void, - MakeAudioDecoderMock, - (const SdpAudioFormat& format, - absl::optional codec_pair_id, - std::unique_ptr*)); - // Creates a MockAudioDecoderFactory with no formats and that may not be // invoked to create a codec - useful for initializing a voice engine, for // example. - static rtc::scoped_refptr - CreateUnusedFactory() { - using ::testing::_; - using ::testing::AnyNumber; - using ::testing::Return; - - rtc::scoped_refptr factory = - rtc::make_ref_counted(); - ON_CALL(*factory.get(), GetSupportedDecoders()) - .WillByDefault(Return(std::vector())); - EXPECT_CALL(*factory.get(), GetSupportedDecoders()).Times(AnyNumber()); - ON_CALL(*factory, IsSupportedDecoder(_)).WillByDefault(Return(false)); - EXPECT_CALL(*factory, IsSupportedDecoder(_)).Times(AnyNumber()); - EXPECT_CALL(*factory.get(), MakeAudioDecoderMock(_, _, _)).Times(0); + static scoped_refptr CreateUnusedFactory() { + auto factory = + make_ref_counted>(); + EXPECT_CALL(*factory, Create).Times(0); return factory; } // Creates a MockAudioDecoderFactory with no formats that may be invoked to // create a codec any number of times. It will, though, return nullptr on each // call, since it supports no codecs. - static rtc::scoped_refptr - CreateEmptyFactory() { - using ::testing::_; - using ::testing::AnyNumber; - using ::testing::Return; - using ::testing::SetArgPointee; - - rtc::scoped_refptr factory = - rtc::make_ref_counted(); - ON_CALL(*factory.get(), GetSupportedDecoders()) - .WillByDefault(Return(std::vector())); - EXPECT_CALL(*factory.get(), GetSupportedDecoders()).Times(AnyNumber()); - ON_CALL(*factory, IsSupportedDecoder(_)).WillByDefault(Return(false)); - EXPECT_CALL(*factory, IsSupportedDecoder(_)).Times(AnyNumber()); - ON_CALL(*factory.get(), MakeAudioDecoderMock(_, _, _)) - .WillByDefault(SetArgPointee<2>(nullptr)); - EXPECT_CALL(*factory.get(), MakeAudioDecoderMock(_, _, _)) - .Times(AnyNumber()); - return factory; + static scoped_refptr CreateEmptyFactory() { + return make_ref_counted>(); } + + MOCK_METHOD(std::vector, + GetSupportedDecoders, + (), + (override)); + MOCK_METHOD(bool, IsSupportedDecoder, (const SdpAudioFormat&), (override)); + MOCK_METHOD(std::unique_ptr, + Create, + (const Environment&, + const SdpAudioFormat&, + std::optional), + (override)); }; } // namespace webrtc diff --git a/test/mock_audio_encoder.cc b/test/mock_audio_encoder.cc index 36615111a5..74f6a29f5b 100644 --- a/test/mock_audio_encoder.cc +++ b/test/mock_audio_encoder.cc @@ -25,29 +25,27 @@ MockAudioEncoder::FakeEncoding::FakeEncoding(size_t encoded_bytes) { AudioEncoder::EncodedInfo MockAudioEncoder::FakeEncoding::operator()( uint32_t timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) { + ArrayView audio, + Buffer* encoded) { encoded->SetSize(encoded->size() + info_.encoded_bytes); return info_; } MockAudioEncoder::CopyEncoding::~CopyEncoding() = default; -MockAudioEncoder::CopyEncoding::CopyEncoding( - AudioEncoder::EncodedInfo info, - rtc::ArrayView payload) +MockAudioEncoder::CopyEncoding::CopyEncoding(AudioEncoder::EncodedInfo info, + ArrayView payload) : info_(info), payload_(payload) {} -MockAudioEncoder::CopyEncoding::CopyEncoding( - rtc::ArrayView payload) +MockAudioEncoder::CopyEncoding::CopyEncoding(ArrayView payload) : payload_(payload) { info_.encoded_bytes = payload_.size(); } AudioEncoder::EncodedInfo MockAudioEncoder::CopyEncoding::operator()( uint32_t timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded) { + ArrayView audio, + Buffer* encoded) { RTC_CHECK(encoded); RTC_CHECK_LE(info_.encoded_bytes, payload_.size()); encoded->AppendData(payload_.data(), info_.encoded_bytes); diff --git a/test/mock_audio_encoder.h b/test/mock_audio_encoder.h index 1f4510e885..df0089653b 100644 --- a/test/mock_audio_encoder.h +++ b/test/mock_audio_encoder.h @@ -29,10 +29,14 @@ class MockAudioEncoder : public AudioEncoder { MOCK_METHOD(size_t, Num10MsFramesInNextPacket, (), (const, override)); MOCK_METHOD(size_t, Max10MsFramesInAPacket, (), (const, override)); MOCK_METHOD(int, GetTargetBitrate, (), (const, override)); - MOCK_METHOD((absl::optional>), + MOCK_METHOD((std::optional>), GetFrameLengthRange, (), (const, override)); + MOCK_METHOD((std::optional>), + GetBitrateRange, + (), + (const, override)); MOCK_METHOD(void, Reset, (), (override)); MOCK_METHOD(bool, SetFec, (bool enable), (override)); @@ -42,7 +46,7 @@ class MockAudioEncoder : public AudioEncoder { MOCK_METHOD(void, OnReceivedUplinkBandwidth, (int target_audio_bitrate_bps, - absl::optional probing_interval_ms), + std::optional probing_interval_ms), (override)); MOCK_METHOD(void, OnReceivedUplinkPacketLossFraction, @@ -62,13 +66,13 @@ class MockAudioEncoder : public AudioEncoder { MOCK_METHOD(EncodedInfo, EncodeImpl, (uint32_t timestamp, - rtc::ArrayView audio, - rtc::Buffer*), + webrtc::ArrayView audio, + webrtc::Buffer*), (override)); class FakeEncoding { public: - // Creates a functor that will return `info` and adjust the rtc::Buffer + // Creates a functor that will return `info` and adjust the webrtc::Buffer // given as input to it, so it is info.encoded_bytes larger. explicit FakeEncoding(const AudioEncoder::EncodedInfo& info); @@ -77,8 +81,8 @@ class MockAudioEncoder : public AudioEncoder { explicit FakeEncoding(size_t encoded_bytes); AudioEncoder::EncodedInfo operator()(uint32_t timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded); + ArrayView audio, + Buffer* encoded); private: AudioEncoder::EncodedInfo info_; @@ -94,20 +98,20 @@ class MockAudioEncoder : public AudioEncoder { // ArrayView, it _does not_ copy the payload. Make sure it doesn't fall out // of scope! CopyEncoding(AudioEncoder::EncodedInfo info, - rtc::ArrayView payload); + ArrayView payload); // Shorthand version of the constructor above, for when you wish to append // the whole payload and do not care about any EncodedInfo attribute other // than encoded_bytes. - explicit CopyEncoding(rtc::ArrayView payload); + explicit CopyEncoding(ArrayView payload); AudioEncoder::EncodedInfo operator()(uint32_t timestamp, - rtc::ArrayView audio, - rtc::Buffer* encoded); + ArrayView audio, + Buffer* encoded); private: AudioEncoder::EncodedInfo info_; - rtc::ArrayView payload_; + ArrayView payload_; }; }; diff --git a/test/mock_audio_encoder_factory.h b/test/mock_audio_encoder_factory.h index eaa5b8f17d..0d3c96a3f8 100644 --- a/test/mock_audio_encoder_factory.h +++ b/test/mock_audio_encoder_factory.h @@ -15,6 +15,7 @@ #include #include "api/audio_codecs/audio_encoder_factory.h" +#include "api/environment/environment.h" #include "api/make_ref_counted.h" #include "api/scoped_refptr.h" #include "test/gmock.h" @@ -28,70 +29,29 @@ class MockAudioEncoderFactory GetSupportedEncoders, (), (override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, QueryAudioEncoder, (const SdpAudioFormat& format), (override)); - - std::unique_ptr MakeAudioEncoder( - int payload_type, - const SdpAudioFormat& format, - absl::optional codec_pair_id) override { - std::unique_ptr return_value; - MakeAudioEncoderMock(payload_type, format, codec_pair_id, &return_value); - return return_value; - } - MOCK_METHOD(void, - MakeAudioEncoderMock, - (int payload_type, - const SdpAudioFormat& format, - absl::optional codec_pair_id, - std::unique_ptr*)); + MOCK_METHOD(std::unique_ptr, + Create, + (const Environment&, const SdpAudioFormat&, Options), + (override)); // Creates a MockAudioEncoderFactory with no formats and that may not be // invoked to create a codec - useful for initializing a voice engine, for // example. - static rtc::scoped_refptr - CreateUnusedFactory() { - using ::testing::_; - using ::testing::AnyNumber; - using ::testing::Return; - - auto factory = rtc::make_ref_counted(); - ON_CALL(*factory.get(), GetSupportedEncoders()) - .WillByDefault(Return(std::vector())); - ON_CALL(*factory.get(), QueryAudioEncoder(_)) - .WillByDefault(Return(absl::nullopt)); - - EXPECT_CALL(*factory.get(), GetSupportedEncoders()).Times(AnyNumber()); - EXPECT_CALL(*factory.get(), QueryAudioEncoder(_)).Times(AnyNumber()); - EXPECT_CALL(*factory.get(), MakeAudioEncoderMock(_, _, _, _)).Times(0); + static scoped_refptr CreateUnusedFactory() { + auto factory = make_ref_counted(); + EXPECT_CALL(*factory, Create).Times(0); return factory; } // Creates a MockAudioEncoderFactory with no formats that may be invoked to // create a codec any number of times. It will, though, return nullptr on each // call, since it supports no codecs. - static rtc::scoped_refptr - CreateEmptyFactory() { - using ::testing::_; - using ::testing::AnyNumber; - using ::testing::Return; - using ::testing::SetArgPointee; - - auto factory = rtc::make_ref_counted(); - ON_CALL(*factory.get(), GetSupportedEncoders()) - .WillByDefault(Return(std::vector())); - ON_CALL(*factory.get(), QueryAudioEncoder(_)) - .WillByDefault(Return(absl::nullopt)); - ON_CALL(*factory.get(), MakeAudioEncoderMock(_, _, _, _)) - .WillByDefault(SetArgPointee<3>(nullptr)); - - EXPECT_CALL(*factory.get(), GetSupportedEncoders()).Times(AnyNumber()); - EXPECT_CALL(*factory.get(), QueryAudioEncoder(_)).Times(AnyNumber()); - EXPECT_CALL(*factory.get(), MakeAudioEncoderMock(_, _, _, _)) - .Times(AnyNumber()); - return factory; + static scoped_refptr CreateEmptyFactory() { + return make_ref_counted(); } }; diff --git a/test/mock_transformable_frame.h b/test/mock_transformable_frame.h deleted file mode 100644 index 35b8d92d22..0000000000 --- a/test/mock_transformable_frame.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef TEST_MOCK_TRANSFORMABLE_FRAME_H_ -#define TEST_MOCK_TRANSFORMABLE_FRAME_H_ - -#include "api/frame_transformer_interface.h" -#include "test/gmock.h" - -namespace webrtc { - -class MockTransformableAudioFrame : public TransformableAudioFrameInterface { - public: - MOCK_METHOD(rtc::ArrayView, GetData, (), (const, override)); - MOCK_METHOD(rtc::ArrayView, - GetContributingSources, - (), - (const, override)); - MOCK_METHOD(void, SetData, (rtc::ArrayView), (override)); - MOCK_METHOD(uint8_t, GetPayloadType, (), (const, override)); - MOCK_METHOD(uint32_t, GetSsrc, (), (const, override)); - MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override)); - MOCK_METHOD(void, SetRTPTimestamp, (uint32_t), (override)); - MOCK_METHOD(Direction, GetDirection, (), (const, override)); -}; - -} // namespace webrtc - -#endif // TEST_MOCK_TRANSFORMABLE_FRAME_H_ diff --git a/test/mock_transport.h b/test/mock_transport.h index a997ce8541..87b60b3068 100644 --- a/test/mock_transport.h +++ b/test/mock_transport.h @@ -23,9 +23,9 @@ class MockTransport : public Transport { MOCK_METHOD(bool, SendRtp, - (rtc::ArrayView, const PacketOptions&), + (webrtc::ArrayView, const PacketOptions&), (override)); - MOCK_METHOD(bool, SendRtcp, (rtc::ArrayView), (override)); + MOCK_METHOD(bool, SendRtcp, (webrtc::ArrayView), (override)); }; } // namespace webrtc diff --git a/test/network/BUILD.gn b/test/network/BUILD.gn index 5a6cb31f4b..6f2b030081 100644 --- a/test/network/BUILD.gn +++ b/test/network/BUILD.gn @@ -16,6 +16,7 @@ rtc_library("emulated_network") { ] if (rtc_include_tests) { visibility += [ + "../../modules/congestion_controller/goog_cc:goog_cc_unittests", "../peer_scenario:*", "../scenario:*", ] @@ -38,7 +39,9 @@ rtc_library("emulated_network") { "traffic_route.h", ] deps = [ + ":simulated_network", "../../api:array_view", + "../../api:async_dns_resolver", "../../api:field_trials_view", "../../api:network_emulation_manager_api", "../../api:packet_socket_factory", @@ -47,65 +50,48 @@ rtc_library("emulated_network") { "../../api:simulated_network_api", "../../api:time_controller", "../../api/numerics", + "../../api/task_queue", "../../api/task_queue:pending_task_safety_flag", "../../api/test/network_emulation", + "../../api/transport:ecn_marking", "../../api/transport:stun_types", "../../api/units:data_rate", "../../api/units:data_size", "../../api/units:time_delta", "../../api/units:timestamp", - "../../call:simulated_network", "../../p2p:p2p_server_utils", - "../../p2p:rtc_p2p", + "../../p2p:port_interface", "../../rtc_base:async_packet_socket", + "../../rtc_base:checks", "../../rtc_base:copy_on_write_buffer", "../../rtc_base:ip_address", "../../rtc_base:logging", "../../rtc_base:macromagic", + "../../rtc_base:net_helpers", "../../rtc_base:network", "../../rtc_base:network_constants", "../../rtc_base:random", - "../../rtc_base:rtc_base_tests_utils", "../../rtc_base:rtc_event", - "../../rtc_base:rtc_task_queue", "../../rtc_base:safe_minmax", "../../rtc_base:socket", "../../rtc_base:socket_address", + "../../rtc_base:socket_factory", "../../rtc_base:socket_server", "../../rtc_base:stringutils", "../../rtc_base:task_queue_for_test", "../../rtc_base:threading", - "../../rtc_base/memory:always_valid_pointer", + "../../rtc_base/network:received_packet", "../../rtc_base/synchronization:mutex", "../../rtc_base/system:no_unique_address", "../../rtc_base/task_utils:repeating_task", "../../system_wrappers", - "../../test:scoped_key_value_config", "../scenario:column_printer", "../time_controller", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/base:nullability", + "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("network_emulation_unittest") { - testonly = true - sources = [ "network_emulation_unittest.cc" ] - deps = [ - ":emulated_network", - "../:test_support", - "../../api:simulated_network_api", - "../../api/units:time_delta", - "../../call:simulated_network", - "../../rtc_base:gunit_helpers", - "../../rtc_base:logging", - "../../rtc_base:rtc_event", - "../../rtc_base:task_queue_for_test", - "../../rtc_base/synchronization:mutex", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -115,24 +101,26 @@ if (rtc_include_tests && !build_with_chromium) { sources = [ "network_emulation_pc_unittest.cc" ] deps = [ ":emulated_network", - "../:test_support", - "../../api:callfactory_api", + ":simulated_network", + "..:test_support", + "..:wait_until", + "../../api:audio_options_api", + "../../api:enable_media_with_defaults", "../../api:libjingle_peerconnection_api", + "../../api:media_stream_interface", + "../../api:network_emulation_manager_api", + "../../api:rtc_error_matchers", "../../api:scoped_refptr", "../../api:simulated_network_api", "../../api/rtc_event_log:rtc_event_log_factory", "../../api/task_queue:default_task_queue_factory", + "../../api/test/network_emulation", "../../api/transport:field_trial_based_config", - "../../call:simulated_network", - "../../media:rtc_audio_video", - "../../media:rtc_media_engine_defaults", "../../modules/audio_device:test_audio_device_module", - "../../p2p:rtc_p2p", + "../../p2p:port_allocator", "../../pc:pc_test_utils", "../../pc:peerconnection_wrapper", - "../../rtc_base:gunit_helpers", - "../../rtc_base:logging", - "../../rtc_base:rtc_event", + "../../rtc_base:network", "../../rtc_base:task_queue_for_test", ] } @@ -143,22 +131,57 @@ rtc_library("cross_traffic_unittest") { sources = [ "cross_traffic_unittest.cc" ] deps = [ ":emulated_network", - "../:test_support", + "..:test_support", "../../api:network_emulation_manager_api", "../../api:simulated_network_api", - "../../call:simulated_network", + "../../api/test/network_emulation", + "../../api/units:data_rate", + "../../api/units:data_size", + "../../api/units:time_delta", + "../../api/units:timestamp", + "../../rtc_base:ip_address", "../../rtc_base:logging", - "../../rtc_base:network_constants", - "../../rtc_base:rtc_event", - "../time_controller", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/types:optional", + "../../rtc_base:task_queue_for_test", + "../../system_wrappers", ] } if (rtc_include_tests) { + rtc_library("network_emulation_unittest") { + testonly = true + sources = [ "network_emulation_unittest.cc" ] + deps = [ + ":emulated_network", + ":simulated_network", + "..:test_support", + "..:wait_until", + "../../api:network_emulation_manager_api", + "../../api:rtc_error_matchers", + "../../api:simulated_network_api", + "../../api/task_queue", + "../../api/test/network_emulation", + "../../api/transport:ecn_marking", + "../../api/transport:stun_types", + "../../api/units:data_size", + "../../api/units:time_delta", + "../../api/units:timestamp", + "../../rtc_base:buffer", + "../../rtc_base:byte_buffer", + "../../rtc_base:checks", + "../../rtc_base:copy_on_write_buffer", + "../../rtc_base:ip_address", + "../../rtc_base:macromagic", + "../../rtc_base:net_helpers", + "../../rtc_base:socket", + "../../rtc_base:socket_address", + "../../rtc_base:task_queue_for_test", + "../../rtc_base:threading", + "../../rtc_base/synchronization:mutex", + "../../rtc_base/third_party/sigslot", + "//third_party/abseil-cpp/absl/functional:any_invocable", + ] + } + rtc_library("feedback_generator") { testonly = true sources = [ @@ -167,21 +190,29 @@ if (rtc_include_tests) { ] deps = [ ":emulated_network", + ":simulated_network", + "../../api:network_emulation_manager_api", + "../../api:simulated_network_api", "../../api/transport:network_control", "../../api/transport:test_feedback_generator_interface", - "../../call:simulated_network", + "../../api/units:data_rate", + "../../api/units:data_size", + "../../api/units:time_delta", + "../../api/units:timestamp", "../../rtc_base:checks", - "../time_controller", + "//third_party/abseil-cpp/absl/memory", ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } rtc_library("feedback_generator_unittest") { testonly = true sources = [ "feedback_generator_unittest.cc" ] deps = [ - "../:test_support", + "..:test_support", + "../../api:simulated_network_api", "../../api/transport:test_feedback_generator", + "../../api/transport:test_feedback_generator_interface", + "../../api/units:time_delta", ] } @@ -193,6 +224,89 @@ if (rtc_include_tests) { ":feedback_generator_unittest", ":network_emulation_pc_unittest", ":network_emulation_unittest", + ":simulated_network_unittest", + ] + if (rtc_enable_protobuf) { + deps += [ ":schedulable_network_behavior_test" ] + } + } + } +} + +rtc_library("simulated_network") { + sources = [ + "simulated_network.cc", + "simulated_network.h", + ] + deps = [ + "../../api:sequence_checker", + "../../api:simulated_network_api", + "../../api/units:data_rate", + "../../api/units:data_size", + "../../api/units:time_delta", + "../../api/units:timestamp", + "../../rtc_base:checks", + "../../rtc_base:macromagic", + "../../rtc_base:race_checker", + "../../rtc_base:random", + "../../rtc_base/synchronization:mutex", + "../../rtc_base/system:rtc_export", + "//third_party/abseil-cpp/absl/functional:any_invocable", + ] +} + +if (rtc_include_tests) { + rtc_library("simulated_network_unittest") { + testonly = true + sources = [ "simulated_network_unittest.cc" ] + deps = [ + ":simulated_network", + "..:test_support", + "../../api:simulated_network_api", + "../../api/units:data_rate", + "../../api/units:time_delta", + "../../api/units:timestamp", + ] + } +} + +if (rtc_enable_protobuf) { + rtc_library("schedulable_network_behavior") { + sources = [ + "schedulable_network_behavior.cc", + "schedulable_network_behavior.h", + ] + deps = [ + ":simulated_network", + "../../api:sequence_checker", + "../../api:simulated_network_api", + "../../api/task_queue", + "../../api/test/network_emulation:network_config_schedule_proto", + "../../api/units:data_rate", + "../../api/units:time_delta", + "../../api/units:timestamp", + "../../rtc_base:checks", + "../../rtc_base:macromagic", + "../../rtc_base/task_utils:repeating_task", + "../../system_wrappers", + "//third_party/abseil-cpp/absl/functional:any_invocable", + ] + } + + if (rtc_include_tests) { + rtc_library("schedulable_network_behavior_test") { + testonly = true + sources = [ "schedulable_network_behavior_test.cc" ] + deps = [ + ":schedulable_network_behavior", + "..:test_support", + "../../api:create_network_emulation_manager", + "../../api:network_emulation_manager_api", + "../../api:simulated_network_api", + "../../api/test/network_emulation:network_config_schedule_proto", + "../../api/units:time_delta", + "../../api/units:timestamp", + "../../system_wrappers", ] } } diff --git a/test/network/OWNERS b/test/network/OWNERS index b177c4eec5..5fb23613f2 100644 --- a/test/network/OWNERS +++ b/test/network/OWNERS @@ -1 +1,2 @@ titovartem@webrtc.org +terelius@webrtc.org diff --git a/test/network/cross_traffic.cc b/test/network/cross_traffic.cc index dd34bd02f1..d35a83014d 100644 --- a/test/network/cross_traffic.cc +++ b/test/network/cross_traffic.cc @@ -12,13 +12,26 @@ #include +#include +#include +#include +#include #include -#include "absl/memory/memory.h" -#include "absl/types/optional.h" +#include "absl/functional/any_invocable.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/test/network_emulation/cross_traffic.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "cross_traffic.h" -#include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" +#include "rtc_base/strings/string_builder.h" +#include "system_wrappers/include/clock.h" +#include "test/network/network_emulation.h" +#include "test/scenario/column_printer.h" namespace webrtc { namespace test { @@ -43,7 +56,7 @@ void RandomWalkCrossTraffic::Process(Timestamp at_time) { if (at_time - last_update_time_ >= config_.update_interval) { intensity_ += random_.Gaussian(config_.bias, config_.variance) * sqrt((at_time - last_update_time_).seconds()); - intensity_ = rtc::SafeClamp(intensity_, 0.0, 1.0); + intensity_ = SafeClamp(intensity_, 0.0, 1.0); last_update_time_ = at_time; } pending_size_ += TrafficRate() * delta; @@ -68,7 +81,7 @@ DataRate RandomWalkCrossTraffic::TrafficRate() const { ColumnPrinter RandomWalkCrossTraffic::StatsPrinter() { return ColumnPrinter::Lambda( "random_walk_cross_traffic_rate", - [this](rtc::SimpleStringBuilder& sb) { + [this](SimpleStringBuilder& sb) { sb.AppendFormat("%.0lf", TrafficRate().bps() / 8.0); }, 32); @@ -119,7 +132,7 @@ DataRate PulsedPeaksCrossTraffic::TrafficRate() const { ColumnPrinter PulsedPeaksCrossTraffic::StatsPrinter() { return ColumnPrinter::Lambda( "pulsed_peaks_cross_traffic_rate", - [this](rtc::SimpleStringBuilder& sb) { + [this](SimpleStringBuilder& sb) { sb.AppendFormat("%.0lf", TrafficRate().bps() / 8.0); }, 32); @@ -141,33 +154,34 @@ TcpMessageRouteImpl::TcpMessageRouteImpl(Clock* clock, }) {} void TcpMessageRouteImpl::SendMessage(size_t size, - std::function on_received) { - task_queue_->PostTask([this, size, handler = std::move(on_received)] { - // If we are currently sending a message we won't reset the connection, - // we'll act as if the messages are sent in the same TCP stream. This is - // intended to simulate recreation of a TCP session for each message - // in the typical case while avoiding the complexity overhead of - // maintaining multiple virtual TCP sessions in parallel. - if (pending_.empty() && in_flight_.empty()) { - cwnd_ = 10; - ssthresh_ = INFINITY; - } - int64_t data_left = static_cast(size); - int64_t kMaxPacketSize = 1200; - int64_t kMinPacketSize = 4; - Message message{std::move(handler)}; - while (data_left > 0) { - int64_t packet_size = std::min(data_left, kMaxPacketSize); - int fragment_id = next_fragment_id_++; - pending_.push_back(MessageFragment{ - fragment_id, - static_cast(std::max(kMinPacketSize, packet_size))}); - message.pending_fragment_ids.insert(fragment_id); - data_left -= packet_size; - } - messages_.emplace_back(message); - SendPackets(clock_->CurrentTime()); - }); + absl::AnyInvocable on_received) { + task_queue_->PostTask( + [this, size, handler = std::move(on_received)]() mutable { + // If we are currently sending a message we won't reset the connection, + // we'll act as if the messages are sent in the same TCP stream. This is + // intended to simulate recreation of a TCP session for each message + // in the typical case while avoiding the complexity overhead of + // maintaining multiple virtual TCP sessions in parallel. + if (pending_.empty() && in_flight_.empty()) { + cwnd_ = 10; + ssthresh_ = INFINITY; + } + int64_t data_left = static_cast(size); + int64_t kMaxPacketSize = 1200; + int64_t kMinPacketSize = 4; + Message message{std::move(handler)}; + while (data_left > 0) { + int64_t packet_size = std::min(data_left, kMaxPacketSize); + int fragment_id = next_fragment_id_++; + pending_.push_back(MessageFragment{ + fragment_id, + static_cast(std::max(kMinPacketSize, packet_size))}); + message.pending_fragment_ids.insert(fragment_id); + data_left -= packet_size; + } + messages_.emplace_back(std::move(message)); + SendPackets(clock_->CurrentTime()); + }); } void TcpMessageRouteImpl::OnRequest(TcpPacket packet_info) { diff --git a/test/network/cross_traffic.h b/test/network/cross_traffic.h index d21e942475..0ce7df5057 100644 --- a/test/network/cross_traffic.h +++ b/test/network/cross_traffic.h @@ -11,17 +11,26 @@ #ifndef TEST_NETWORK_CROSS_TRAFFIC_H_ #define TEST_NETWORK_CROSS_TRAFFIC_H_ -#include +#include +#include +#include +#include #include -#include +#include +#include "absl/functional/any_invocable.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/test/network_emulation/cross_traffic.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/test/network_emulation_manager.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "rtc_base/random.h" +#include "rtc_base/thread_annotations.h" +#include "system_wrappers/include/clock.h" #include "test/network/network_emulation.h" #include "test/scenario/column_printer.h" @@ -88,14 +97,15 @@ class TcpMessageRouteImpl final : public TcpMessageRoute { // Sends a TCP message of the given `size` over the route, `on_received` is // called when the message has been delivered. Note that the connection // parameters are reset iff there's no currently pending message on the route. - void SendMessage(size_t size, std::function on_received) override; + void SendMessage(size_t size, + absl::AnyInvocable on_received) override; private: // Represents a message sent over the route. When all fragments has been // delivered, the message is considered delivered and the handler is // triggered. This only happen once. struct Message { - std::function handler; + absl::AnyInvocable handler; std::set pending_fragment_ids; }; // Represents a piece of a message that fit into a TCP packet. diff --git a/test/network/cross_traffic_unittest.cc b/test/network/cross_traffic_unittest.cc index 36aff67bb2..1632d29c0e 100644 --- a/test/network/cross_traffic_unittest.cc +++ b/test/network/cross_traffic_unittest.cc @@ -11,23 +11,26 @@ #include "test/network/cross_traffic.h" #include -#include -#include +#include +#include #include -#include "absl/memory/memory.h" -#include "absl/types/optional.h" +#include "api/test/network_emulation/cross_traffic.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/test/network_emulation_manager.h" #include "api/test/simulated_network.h" -#include "call/simulated_network.h" -#include "rtc_base/event.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" -#include "rtc_base/network_constants.h" -#include "test/gmock.h" +#include "rtc_base/task_queue_for_test.h" +#include "system_wrappers/include/clock.h" #include "test/gtest.h" +#include "test/network/network_emulation.h" #include "test/network/network_emulation_manager.h" #include "test/network/traffic_route.h" -#include "test/time_controller/simulated_time_controller.h" namespace webrtc { namespace test { @@ -51,11 +54,11 @@ struct TrafficCounterFixture { TaskQueueForTest task_queue_; EmulatedEndpointImpl endpoint{EmulatedEndpointImpl::Options{ /*id=*/1, - rtc::IPAddress(kTestIpAddress), + IPAddress(kTestIpAddress), EmulatedEndpointConfig(), EmulatedNetworkStatsGatheringMode::kDefault, }, - /*is_enabled=*/true, &task_queue_, &clock}; + /*is_enabled=*/true, task_queue_.Get(), &clock}; }; } // namespace @@ -125,12 +128,11 @@ TEST(CrossTrafficTest, RandomWalkCrossTraffic) { } TEST(TcpMessageRouteTest, DeliveredOnLossyNetwork) { - NetworkEmulationManagerImpl net(TimeMode::kSimulated, - EmulatedNetworkStatsGatheringMode::kDefault); + NetworkEmulationManagerImpl net({.time_mode = TimeMode::kSimulated}); BuiltInNetworkBehaviorConfig send; // 800 kbps means that the 100 kB message would be delivered in ca 1 second // under ideal conditions and no overhead. - send.link_capacity_kbps = 100 * 8; + send.link_capacity = DataRate::KilobitsPerSec(100 * 8); send.loss_percent = 50; send.queue_delay_ms = 100; send.delay_standard_deviation_ms = 20; diff --git a/test/network/emulated_network_manager.cc b/test/network/emulated_network_manager.cc index fa4037e5db..bc3c1ca1da 100644 --- a/test/network/emulated_network_manager.cc +++ b/test/network/emulated_network_manager.cc @@ -10,58 +10,86 @@ #include "test/network/emulated_network_manager.h" +#include #include #include +#include +#include "absl/base/nullability.h" #include "absl/memory/memory.h" -#include "p2p/base/basic_packet_socket_factory.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/test/time_controller.h" +#include "rtc_base/checks.h" +#include "rtc_base/network.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "test/network/fake_network_socket_server.h" +#include "test/network/network_emulation.h" namespace webrtc { namespace test { +// Framework assumes that webrtc::NetworkManager is called from network thread. +class EmulatedNetworkManager::NetworkManagerImpl : public NetworkManagerBase { + public: + explicit NetworkManagerImpl(Thread* absl_nonnull network_thread, + EndpointsContainer* absl_nonnull + endpoints_container) + : network_thread_(network_thread), + endpoints_container_(endpoints_container) {} + + void StartUpdating() override; + void StopUpdating() override; + + void UpdateNetworksOnce(); + void MaybeSignalNetworksChanged(); + + // We don't support any address interfaces in the network emulation framework. + std::vector GetAnyAddressNetworks() override { return {}; } + + private: + Thread* absl_nonnull const network_thread_; + const EndpointsContainer* absl_nonnull const endpoints_container_; + bool sent_first_update_ RTC_GUARDED_BY(network_thread_) = false; + int start_count_ RTC_GUARDED_BY(network_thread_) = 0; +}; + EmulatedNetworkManager::EmulatedNetworkManager( TimeController* time_controller, - TaskQueueForTest* task_queue, + TaskQueueBase* task_queue, EndpointsContainer* endpoints_container) : task_queue_(task_queue), endpoints_container_(endpoints_container), - sent_first_update_(false), - start_count_(0) { - auto socket_server = - std::make_unique(endpoints_container); - packet_socket_factory_ = - std::make_unique(socket_server.get()); - // Since we pass ownership of the socket server to `network_thread_`, we must - // arrange that it outlives `packet_socket_factory_` which refers to it. - network_thread_ = - time_controller->CreateThread("net_thread", std::move(socket_server)); + socket_server_(new FakeNetworkSocketServer(endpoints_container)), + network_thread_( + time_controller->CreateThread("net_thread", + absl::WrapUnique(socket_server_))), + network_manager_( + std::make_unique(network_thread_.get(), + endpoints_container)), + network_manager_ptr_(network_manager_.get()) {} + +EmulatedNetworkManager::~EmulatedNetworkManager() = default; + +absl_nonnull std::unique_ptr +EmulatedNetworkManager::ReleaseNetworkManager() { + RTC_CHECK(network_manager_ != nullptr) + << "ReleaseNetworkManager can be called at most once."; + return std::move(network_manager_); } -void EmulatedNetworkManager::EnableEndpoint(EmulatedEndpointImpl* endpoint) { - RTC_CHECK(endpoints_container_->HasEndpoint(endpoint)) - << "No such interface: " << endpoint->GetPeerLocalAddress().ToString(); - network_thread_->PostTask([this, endpoint]() { - endpoint->Enable(); - UpdateNetworksOnce(); - }); +void EmulatedNetworkManager::UpdateNetworks() { + NetworkManagerImpl* absl_nonnull network_manager = network_manager_ptr_; + network_thread_->PostTask( + [network_manager] { network_manager->UpdateNetworksOnce(); }); } -void EmulatedNetworkManager::DisableEndpoint(EmulatedEndpointImpl* endpoint) { - RTC_CHECK(endpoints_container_->HasEndpoint(endpoint)) - << "No such interface: " << endpoint->GetPeerLocalAddress().ToString(); - network_thread_->PostTask([this, endpoint]() { - endpoint->Disable(); - UpdateNetworksOnce(); - }); -} +void EmulatedNetworkManager::NetworkManagerImpl::StartUpdating() { + RTC_DCHECK_RUN_ON(network_thread_); -// Network manager interface. All these methods are supposed to be called from -// the same thread. -void EmulatedNetworkManager::StartUpdating() { - RTC_DCHECK_RUN_ON(network_thread_.get()); - - if (start_count_) { + if (start_count_ > 0) { // If network interfaces are already discovered and signal is sent, // we should trigger network signal immediately for the new clients // to start allocating ports. @@ -73,13 +101,13 @@ void EmulatedNetworkManager::StartUpdating() { ++start_count_; } -void EmulatedNetworkManager::StopUpdating() { - RTC_DCHECK_RUN_ON(network_thread_.get()); - if (!start_count_) +void EmulatedNetworkManager::NetworkManagerImpl::StopUpdating() { + RTC_DCHECK_RUN_ON(network_thread_); + if (start_count_ == 0) return; --start_count_; - if (!start_count_) { + if (start_count_ == 0) { sent_first_update_ = false; } } @@ -91,11 +119,11 @@ void EmulatedNetworkManager::GetStats( }); } -void EmulatedNetworkManager::UpdateNetworksOnce() { - RTC_DCHECK_RUN_ON(network_thread_.get()); +void EmulatedNetworkManager::NetworkManagerImpl::UpdateNetworksOnce() { + RTC_DCHECK_RUN_ON(network_thread_); - std::vector> networks; - for (std::unique_ptr& net : + std::vector> networks; + for (std::unique_ptr& net : endpoints_container_->GetEnabledNetworks()) { net->set_default_local_address_provider(this); networks.push_back(std::move(net)); @@ -109,8 +137,8 @@ void EmulatedNetworkManager::UpdateNetworksOnce() { } } -void EmulatedNetworkManager::MaybeSignalNetworksChanged() { - RTC_DCHECK_RUN_ON(network_thread_.get()); +void EmulatedNetworkManager::NetworkManagerImpl::MaybeSignalNetworksChanged() { + RTC_DCHECK_RUN_ON(network_thread_); // If manager is stopped we don't need to signal anything. if (start_count_ == 0) { return; diff --git a/test/network/emulated_network_manager.h b/test/network/emulated_network_manager.h index fb4ee1ee85..347ad463cb 100644 --- a/test/network/emulated_network_manager.h +++ b/test/network/emulated_network_manager.h @@ -15,11 +15,13 @@ #include #include -#include "api/sequence_checker.h" +#include "absl/base/nullability.h" +#include "api/task_queue/task_queue_base.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/test/network_emulation_manager.h" #include "api/test/time_controller.h" -#include "rtc_base/ip_address.h" #include "rtc_base/network.h" +#include "rtc_base/socket_factory.h" #include "rtc_base/socket_server.h" #include "rtc_base/thread.h" #include "test/network/network_emulation.h" @@ -27,34 +29,23 @@ namespace webrtc { namespace test { -// Framework assumes that rtc::NetworkManager is called from network thread. -class EmulatedNetworkManager : public rtc::NetworkManagerBase, - public sigslot::has_slots<>, - public EmulatedNetworkManagerInterface { +class EmulatedNetworkManager : public EmulatedNetworkManagerInterface { public: - EmulatedNetworkManager(TimeController* time_controller, - TaskQueueForTest* task_queue, - EndpointsContainer* endpoints_container); + EmulatedNetworkManager(TimeController* absl_nonnull time_controller, + TaskQueueBase* absl_nonnull task_queue, + EndpointsContainer* absl_nonnull endpoints_container); + ~EmulatedNetworkManager() override; - void EnableEndpoint(EmulatedEndpointImpl* endpoint); - void DisableEndpoint(EmulatedEndpointImpl* endpoint); + void UpdateNetworks(); - // NetworkManager interface. All these methods are supposed to be called from - // the same thread. - void StartUpdating() override; - void StopUpdating() override; - - // We don't support any address interfaces in the network emulation framework. - std::vector GetAnyAddressNetworks() override { - return {}; + Thread* absl_nonnull network_thread() override { + return network_thread_.get(); } - - // EmulatedNetworkManagerInterface API - rtc::Thread* network_thread() override { return network_thread_.get(); } - rtc::NetworkManager* network_manager() override { return this; } - rtc::PacketSocketFactory* packet_socket_factory() override { - return packet_socket_factory_.get(); + SocketFactory* absl_nonnull socket_factory() override { + return socket_server_; } + absl_nonnull std::unique_ptr ReleaseNetworkManager() override; + std::vector endpoints() const override { return endpoints_container_->GetEndpoints(); } @@ -62,19 +53,21 @@ class EmulatedNetworkManager : public rtc::NetworkManagerBase, std::function stats_callback) const override; private: - void UpdateNetworksOnce(); - void MaybeSignalNetworksChanged(); + class NetworkManagerImpl; + + TaskQueueBase* absl_nonnull const task_queue_; + const EndpointsContainer* absl_nonnull const endpoints_container_; + + // Socket server is owned by the `network_thread_' + SocketServer* absl_nonnull const socket_server_; + + const absl_nonnull std::unique_ptr network_thread_; + absl_nullable std::unique_ptr network_manager_; - TaskQueueForTest* const task_queue_; - const EndpointsContainer* const endpoints_container_; - // The `network_thread_` must outlive `packet_socket_factory_`, because they - // both refer to a socket server that is owned by `network_thread_`. Both - // pointers are assigned only in the constructor, but the way they are - // initialized unfortunately doesn't work with const std::unique_ptr<...>. - std::unique_ptr network_thread_; - std::unique_ptr packet_socket_factory_; - bool sent_first_update_ RTC_GUARDED_BY(network_thread_); - int start_count_ RTC_GUARDED_BY(network_thread_); + // Keep pointer to the network manager when it is extracted to be injected + // into PeerConnectionFactory. That is brittle and may crash if a test would + // try to use emulated network after related PeerConnectionFactory is deleted. + NetworkManagerImpl* absl_nonnull const network_manager_ptr_; }; } // namespace test diff --git a/test/network/emulated_turn_server.cc b/test/network/emulated_turn_server.cc index 0bc7ec6e2a..6e5cc089ae 100644 --- a/test/network/emulated_turn_server.cc +++ b/test/network/emulated_turn_server.cc @@ -10,69 +10,36 @@ #include "test/network/emulated_turn_server.h" +#include +#include +#include #include #include +#include "api/async_dns_resolver.h" #include "api/packet_socket_factory.h" +#include "api/sequence_checker.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/test/network_emulation_manager.h" +#include "p2p/base/port_interface.h" +#include "p2p/test/turn_server.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network/received_packet.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/task_queue_for_test.h" +#include "rtc_base/thread.h" namespace { static const char kTestRealm[] = "example.org"; static const char kTestSoftware[] = "TestTurnServer"; -// A wrapper class for copying data between an AsyncPacketSocket and a -// EmulatedEndpoint. This is used by the cricket::TurnServer when -// sending data back into the emulated network. -class AsyncPacketSocketWrapper : public rtc::AsyncPacketSocket { - public: - AsyncPacketSocketWrapper(webrtc::test::EmulatedTURNServer* turn_server, - webrtc::EmulatedEndpoint* endpoint, - uint16_t port) - : turn_server_(turn_server), - endpoint_(endpoint), - local_address_( - rtc::SocketAddress(endpoint_->GetPeerLocalAddress(), port)) {} - ~AsyncPacketSocketWrapper() { turn_server_->Unbind(local_address_); } - - rtc::SocketAddress GetLocalAddress() const override { return local_address_; } - rtc::SocketAddress GetRemoteAddress() const override { - return rtc::SocketAddress(); - } - int Send(const void* pv, - size_t cb, - const rtc::PacketOptions& options) override { - RTC_CHECK(false) << "TCP not implemented"; - return -1; - } - int SendTo(const void* pv, - size_t cb, - const rtc::SocketAddress& addr, - const rtc::PacketOptions& options) override { - // Copy from rtc::AsyncPacketSocket to EmulatedEndpoint. - rtc::CopyOnWriteBuffer buf(reinterpret_cast(pv), cb); - endpoint_->SendPacket(local_address_, addr, buf); - return cb; - } - int Close() override { return 0; } - - rtc::AsyncPacketSocket::State GetState() const override { - return rtc::AsyncPacketSocket::STATE_BOUND; - } - int GetOption(rtc::Socket::Option opt, int* value) override { return 0; } - int SetOption(rtc::Socket::Option opt, int value) override { return 0; } - int GetError() const override { return 0; } - void SetError(int error) override {} - - private: - webrtc::test::EmulatedTURNServer* const turn_server_; - webrtc::EmulatedEndpoint* const endpoint_; - const rtc::SocketAddress local_address_; -}; - -// A wrapper class for cricket::TurnServer to allocate sockets. -class PacketSocketFactoryWrapper : public rtc::PacketSocketFactory { +// A wrapper class for webrtc::TurnServer to allocate sockets. +class PacketSocketFactoryWrapper : public webrtc::PacketSocketFactory { public: explicit PacketSocketFactoryWrapper( webrtc::test::EmulatedTURNServer* turn_server) @@ -81,25 +48,24 @@ class PacketSocketFactoryWrapper : public rtc::PacketSocketFactory { // This method is called from TurnServer when making a TURN ALLOCATION. // It will create a socket on the `peer_` endpoint. - rtc::AsyncPacketSocket* CreateUdpSocket(const rtc::SocketAddress& address, - uint16_t min_port, - uint16_t max_port) override { + webrtc::AsyncPacketSocket* CreateUdpSocket( + const webrtc::SocketAddress& address, + uint16_t min_port, + uint16_t max_port) override { return turn_server_->CreatePeerSocket(); } - rtc::AsyncListenSocket* CreateServerTcpSocket( - const rtc::SocketAddress& local_address, + webrtc::AsyncListenSocket* CreateServerTcpSocket( + const webrtc::SocketAddress& local_address, uint16_t min_port, uint16_t max_port, int opts) override { return nullptr; } - rtc::AsyncPacketSocket* CreateClientTcpSocket( - const rtc::SocketAddress& local_address, - const rtc::SocketAddress& remote_address, - const rtc::ProxyInfo& proxy_info, - const std::string& user_agent, - const rtc::PacketSocketTcpOptions& tcp_options) override { + webrtc::AsyncPacketSocket* CreateClientTcpSocket( + const webrtc::SocketAddress& local_address, + const webrtc::SocketAddress& remote_address, + const webrtc::PacketSocketTcpOptions& tcp_options) override { return nullptr; } std::unique_ptr CreateAsyncDnsResolver() @@ -116,26 +82,78 @@ class PacketSocketFactoryWrapper : public rtc::PacketSocketFactory { namespace webrtc { namespace test { -EmulatedTURNServer::EmulatedTURNServer(std::unique_ptr thread, +// A wrapper class for copying data between an AsyncPacketSocket and a +// EmulatedEndpoint. This is used by the webrtc::TurnServer when +// sending data back into the emulated network. +class EmulatedTURNServer::AsyncPacketSocketWrapper : public AsyncPacketSocket { + public: + AsyncPacketSocketWrapper(webrtc::test::EmulatedTURNServer* turn_server, + webrtc::EmulatedEndpoint* endpoint, + uint16_t port) + : turn_server_(turn_server), + endpoint_(endpoint), + local_address_(SocketAddress(endpoint_->GetPeerLocalAddress(), port)) {} + ~AsyncPacketSocketWrapper() { turn_server_->Unbind(local_address_); } + + SocketAddress GetLocalAddress() const override { return local_address_; } + SocketAddress GetRemoteAddress() const override { return SocketAddress(); } + int Send(const void* pv, + size_t cb, + const AsyncSocketPacketOptions& options) override { + RTC_CHECK(false) << "TCP not implemented"; + return -1; + } + int SendTo(const void* pv, + size_t cb, + const SocketAddress& addr, + const AsyncSocketPacketOptions& options) override { + // Copy from webrtc::AsyncPacketSocket to EmulatedEndpoint. + CopyOnWriteBuffer buf(reinterpret_cast(pv), cb); + endpoint_->SendPacket(local_address_, addr, buf); + return cb; + } + int Close() override { return 0; } + void NotifyPacketReceived(const ReceivedIpPacket& packet) { + AsyncPacketSocket::NotifyPacketReceived(packet); + } + + AsyncPacketSocket::State GetState() const override { + return AsyncPacketSocket::STATE_BOUND; + } + int GetOption(Socket::Option opt, int* value) override { return 0; } + int SetOption(Socket::Option opt, int value) override { return 0; } + int GetError() const override { return 0; } + void SetError(int error) override {} + + private: + webrtc::test::EmulatedTURNServer* const turn_server_; + webrtc::EmulatedEndpoint* const endpoint_; + const SocketAddress local_address_; +}; + +EmulatedTURNServer::EmulatedTURNServer(const EmulatedTURNServerConfig& config, + std::unique_ptr thread, EmulatedEndpoint* client, EmulatedEndpoint* peer) : thread_(std::move(thread)), client_(client), peer_(peer) { ice_config_.username = "keso"; ice_config_.password = "keso"; - SendTask(thread_.get(), [=]() { + SendTask(thread_.get(), [this, enable_permission_checks = + config.enable_permission_checks]() { RTC_DCHECK_RUN_ON(thread_.get()); - turn_server_ = std::make_unique(thread_.get()); + turn_server_ = std::make_unique(thread_.get()); turn_server_->set_realm(kTestRealm); turn_server_->set_realm(kTestSoftware); turn_server_->set_auth_hook(this); + turn_server_->set_enable_permission_checks(enable_permission_checks); auto client_socket = Wrap(client_); - turn_server_->AddInternalSocket(client_socket, cricket::PROTO_UDP); + turn_server_->AddInternalSocket(client_socket, PROTO_UDP); turn_server_->SetExternalSocketFactory(new PacketSocketFactoryWrapper(this), - rtc::SocketAddress()); + SocketAddress()); client_address_ = client_socket->GetLocalAddress(); char buf[256]; - rtc::SimpleStringBuilder str(buf); + SimpleStringBuilder str(buf); str.AppendFormat("turn:%s?transport=udp", client_address_.ToString().c_str()); ice_config_.url = str.str(); @@ -143,41 +161,40 @@ EmulatedTURNServer::EmulatedTURNServer(std::unique_ptr thread, } void EmulatedTURNServer::Stop() { - SendTask(thread_.get(), [=]() { + SendTask(thread_.get(), [this]() { RTC_DCHECK_RUN_ON(thread_.get()); sockets_.clear(); }); } EmulatedTURNServer::~EmulatedTURNServer() { - SendTask(thread_.get(), [=]() { + SendTask(thread_.get(), [this]() { RTC_DCHECK_RUN_ON(thread_.get()); turn_server_.reset(nullptr); }); } -rtc::AsyncPacketSocket* EmulatedTURNServer::Wrap(EmulatedEndpoint* endpoint) { +AsyncPacketSocket* EmulatedTURNServer::Wrap(EmulatedEndpoint* endpoint) { RTC_DCHECK_RUN_ON(thread_.get()); auto port = endpoint->BindReceiver(0, this).value(); auto socket = new AsyncPacketSocketWrapper(this, endpoint, port); - sockets_[rtc::SocketAddress(endpoint->GetPeerLocalAddress(), port)] = socket; + sockets_[SocketAddress(endpoint->GetPeerLocalAddress(), port)] = socket; return socket; } void EmulatedTURNServer::OnPacketReceived(webrtc::EmulatedIpPacket packet) { - // Copy from EmulatedEndpoint to rtc::AsyncPacketSocket. + // Copy from EmulatedEndpoint to webrtc::AsyncPacketSocket. thread_->PostTask([this, packet(std::move(packet))]() { RTC_DCHECK_RUN_ON(thread_.get()); auto it = sockets_.find(packet.to); if (it != sockets_.end()) { - it->second->SignalReadPacket( - it->second, reinterpret_cast(packet.cdata()), - packet.size(), packet.from, packet.arrival_time.ms()); + it->second->NotifyPacketReceived( + ReceivedIpPacket(packet.data, packet.from, packet.arrival_time)); } }); } -void EmulatedTURNServer::Unbind(rtc::SocketAddress address) { +void EmulatedTURNServer::Unbind(SocketAddress address) { RTC_DCHECK_RUN_ON(thread_.get()); if (GetClientEndpoint()->GetPeerLocalAddress() == address.ipaddr()) { GetClientEndpoint()->UnbindReceiver(address.port()); diff --git a/test/network/emulated_turn_server.h b/test/network/emulated_turn_server.h index 9cb0ceabf6..0cd5bf6603 100644 --- a/test/network/emulated_turn_server.h +++ b/test/network/emulated_turn_server.h @@ -16,33 +16,38 @@ #include #include "absl/strings/string_view.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/test/network_emulation_manager.h" #include "api/transport/stun.h" -#include "p2p/base/turn_server.h" +#include "p2p/test/turn_server.h" #include "rtc_base/async_packet_socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { namespace test { -// EmulatedTURNServer wraps cricket::TurnServer to be used inside +// EmulatedTURNServer wraps webrtc::TurnServer to be used inside // a emulated network. // // Packets from EmulatedEndpoint (client or peer) are received in // EmulatedTURNServer::OnPacketReceived which performs a map lookup -// and delivers them into cricket::TurnServer using +// and delivers them into webrtc::TurnServer using // AsyncPacketSocket::SignalReadPacket // -// Packets from cricket::TurnServer to EmulatedEndpoint are sent into +// Packets from webrtc::TurnServer to EmulatedEndpoint are sent into // using a wrapper around AsyncPacketSocket (no lookup required as the // wrapper around AsyncPacketSocket keep a pointer to the EmulatedEndpoint). class EmulatedTURNServer : public EmulatedTURNServerInterface, - public cricket::TurnAuthInterface, + public TurnAuthInterface, public webrtc::EmulatedNetworkReceiverInterface { public: // Create an EmulatedTURNServer. - // `thread` is a thread that will be used to run cricket::TurnServer + // `thread` is a thread that will be used to run webrtc::TurnServer // that expects all calls to be made from a single thread. - EmulatedTURNServer(std::unique_ptr thread, + EmulatedTURNServer(const EmulatedTURNServerConfig& config, + std::unique_ptr thread, EmulatedEndpoint* client, EmulatedEndpoint* peer); ~EmulatedTURNServer() override; @@ -51,45 +56,46 @@ class EmulatedTURNServer : public EmulatedTURNServerInterface, EmulatedEndpoint* GetClientEndpoint() const override { return client_; } - rtc::SocketAddress GetClientEndpointAddress() const override { + SocketAddress GetClientEndpointAddress() const override { return client_address_; } EmulatedEndpoint* GetPeerEndpoint() const override { return peer_; } - // cricket::TurnAuthInterface + // webrtc::TurnAuthInterface bool GetKey(absl::string_view username, absl::string_view realm, std::string* key) override { - return cricket::ComputeStunCredentialHash( - std::string(username), std::string(realm), std::string(username), key); + return ComputeStunCredentialHash(std::string(username), std::string(realm), + std::string(username), key); } - rtc::AsyncPacketSocket* CreatePeerSocket() { return Wrap(peer_); } + AsyncPacketSocket* CreatePeerSocket() { return Wrap(peer_); } // This method is called by network emulation when a packet // comes from an emulated link. void OnPacketReceived(webrtc::EmulatedIpPacket packet) override; // This is called when the TURN server deletes a socket. - void Unbind(rtc::SocketAddress address); + void Unbind(SocketAddress address); // Unbind all sockets. void Stop(); private: - std::unique_ptr thread_; - rtc::SocketAddress client_address_; + std::unique_ptr thread_; + SocketAddress client_address_; IceServerConfig ice_config_; EmulatedEndpoint* const client_; EmulatedEndpoint* const peer_; - std::unique_ptr turn_server_ RTC_GUARDED_BY(&thread_); - std::map sockets_ + std::unique_ptr turn_server_ RTC_GUARDED_BY(&thread_); + class AsyncPacketSocketWrapper; + std::map sockets_ RTC_GUARDED_BY(&thread_); // Wraps a EmulatedEndpoint in a AsyncPacketSocket to bridge interaction - // with TurnServer. cricket::TurnServer gets ownership of the socket. - rtc::AsyncPacketSocket* Wrap(EmulatedEndpoint* endpoint); + // with TurnServer. webrtc::TurnServer gets ownership of the socket. + AsyncPacketSocket* Wrap(EmulatedEndpoint* endpoint); }; } // namespace test diff --git a/test/network/fake_network_socket_server.cc b/test/network/fake_network_socket_server.cc index 63828e5d70..c9a1fb50a5 100644 --- a/test/network/fake_network_socket_server.cc +++ b/test/network/fake_network_socket_server.cc @@ -10,55 +10,69 @@ #include "test/network/fake_network_socket_server.h" -#include +#include +#include +#include +#include +#include #include #include #include #include "absl/algorithm/container.h" #include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/transport/ecn_marking.h" +#include "api/units/time_delta.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/event.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" +#include "test/network/network_emulation.h" namespace webrtc { namespace test { namespace { -std::string ToString(const rtc::SocketAddress& addr) { +std::string ToString(const SocketAddress& addr) { return addr.HostAsURIString() + ":" + std::to_string(addr.port()); } } // namespace // Represents a socket, which will operate with emulated network. -class FakeNetworkSocket : public rtc::Socket, +class FakeNetworkSocket : public Socket, public EmulatedNetworkReceiverInterface { public: explicit FakeNetworkSocket(FakeNetworkSocketServer* scoket_manager, - rtc::Thread* thread); + Thread* thread); ~FakeNetworkSocket() override; // Will be invoked by EmulatedEndpoint to deliver packets into this socket. void OnPacketReceived(EmulatedIpPacket packet) override; - // rtc::Socket methods: - rtc::SocketAddress GetLocalAddress() const override; - rtc::SocketAddress GetRemoteAddress() const override; - int Bind(const rtc::SocketAddress& addr) override; - int Connect(const rtc::SocketAddress& addr) override; + // webrtc::Socket methods: + SocketAddress GetLocalAddress() const override; + SocketAddress GetRemoteAddress() const override; + int Bind(const SocketAddress& addr) override; + int Connect(const SocketAddress& addr) override; int Close() override; int Send(const void* pv, size_t cb) override; - int SendTo(const void* pv, - size_t cb, - const rtc::SocketAddress& addr) override; - int Recv(void* pv, size_t cb, int64_t* timestamp) override; - int RecvFrom(void* pv, - size_t cb, - rtc::SocketAddress* paddr, - int64_t* timestamp) override; + int SendTo(const void* pv, size_t cb, const SocketAddress& addr) override; + int Recv(void* pv, size_t cb, int64_t* timestamp) override { + RTC_DCHECK_NOTREACHED() << " Use RecvFrom instead."; + return 0; + } + int RecvFrom(ReceiveBuffer& buffer) override; int Listen(int backlog) override; - rtc::Socket* Accept(rtc::SocketAddress* paddr) override; + Socket* Accept(SocketAddress* paddr) override; int GetError() const override; void SetError(int error) override; ConnState GetState() const override; @@ -67,20 +81,20 @@ class FakeNetworkSocket : public rtc::Socket, private: FakeNetworkSocketServer* const socket_server_; - rtc::Thread* const thread_; + Thread* const thread_; EmulatedEndpointImpl* endpoint_ RTC_GUARDED_BY(&thread_); - rtc::SocketAddress local_addr_ RTC_GUARDED_BY(&thread_); - rtc::SocketAddress remote_addr_ RTC_GUARDED_BY(&thread_); + SocketAddress local_addr_ RTC_GUARDED_BY(&thread_); + SocketAddress remote_addr_ RTC_GUARDED_BY(&thread_); ConnState state_ RTC_GUARDED_BY(&thread_); int error_ RTC_GUARDED_BY(&thread_); std::map options_map_ RTC_GUARDED_BY(&thread_); - absl::optional pending_ RTC_GUARDED_BY(thread_); - rtc::scoped_refptr alive_; + std::optional pending_ RTC_GUARDED_BY(thread_); + scoped_refptr alive_; }; FakeNetworkSocket::FakeNetworkSocket(FakeNetworkSocketServer* socket_server, - rtc::Thread* thread) + Thread* thread) : socket_server_(socket_server), thread_(thread), state_(CS_CLOSED), @@ -113,17 +127,17 @@ void FakeNetworkSocket::OnPacketReceived(EmulatedIpPacket packet) { socket_server_->WakeUp(); } -rtc::SocketAddress FakeNetworkSocket::GetLocalAddress() const { +SocketAddress FakeNetworkSocket::GetLocalAddress() const { RTC_DCHECK_RUN_ON(thread_); return local_addr_; } -rtc::SocketAddress FakeNetworkSocket::GetRemoteAddress() const { +SocketAddress FakeNetworkSocket::GetRemoteAddress() const { RTC_DCHECK_RUN_ON(thread_); return remote_addr_; } -int FakeNetworkSocket::Bind(const rtc::SocketAddress& addr) { +int FakeNetworkSocket::Bind(const SocketAddress& addr) { RTC_DCHECK_RUN_ON(thread_); RTC_CHECK(local_addr_.IsNil()) << "Socket already bound to address: " << ToString(local_addr_); @@ -135,7 +149,7 @@ int FakeNetworkSocket::Bind(const rtc::SocketAddress& addr) { error_ = EADDRNOTAVAIL; return 2; } - absl::optional port = + std::optional port = endpoint_->BindReceiver(local_addr_.port(), this); if (!port) { local_addr_.Clear(); @@ -147,7 +161,7 @@ int FakeNetworkSocket::Bind(const rtc::SocketAddress& addr) { return 0; } -int FakeNetworkSocket::Connect(const rtc::SocketAddress& addr) { +int FakeNetworkSocket::Connect(const SocketAddress& addr) { RTC_DCHECK_RUN_ON(thread_); RTC_CHECK(remote_addr_.IsNil()) << "Socket already connected to address: " << ToString(remote_addr_); @@ -166,7 +180,7 @@ int FakeNetworkSocket::Send(const void* pv, size_t cb) { int FakeNetworkSocket::SendTo(const void* pv, size_t cb, - const rtc::SocketAddress& addr) { + const SocketAddress& addr) { RTC_DCHECK_RUN_ON(thread_); RTC_CHECK(!local_addr_.IsNil()) << "Socket have to be bind to some local address"; @@ -174,55 +188,34 @@ int FakeNetworkSocket::SendTo(const void* pv, error_ = ENETDOWN; return -1; } - rtc::CopyOnWriteBuffer packet(static_cast(pv), cb); - endpoint_->SendPacket(local_addr_, addr, packet); - return cb; -} + CopyOnWriteBuffer packet(static_cast(pv), cb); + EcnMarking ecn = EcnMarking::kNotEct; + auto it = options_map_.find(OPT_SEND_ECN); + if (it != options_map_.end() && it->second == 1) { + ecn = EcnMarking::kEct1; + } -int FakeNetworkSocket::Recv(void* pv, size_t cb, int64_t* timestamp) { - rtc::SocketAddress paddr; - return RecvFrom(pv, cb, &paddr, timestamp); + endpoint_->SendPacket(local_addr_, addr, packet, /*application_overhead=*/0, + ecn); + return cb; } -// Reads 1 packet from internal queue. Reads up to `cb` bytes into `pv` -// and returns the length of received packet. -int FakeNetworkSocket::RecvFrom(void* pv, - size_t cb, - rtc::SocketAddress* paddr, - int64_t* timestamp) { +int FakeNetworkSocket::RecvFrom(ReceiveBuffer& buffer) { RTC_DCHECK_RUN_ON(thread_); - - if (timestamp) { - *timestamp = -1; - } RTC_CHECK(pending_); - - *paddr = pending_->from; - size_t data_read = std::min(cb, pending_->size()); - memcpy(pv, pending_->cdata(), data_read); - *timestamp = pending_->arrival_time.us(); - - // According to RECV(2) Linux Man page - // real socket will discard data, that won't fit into provided buffer, - // but we won't to skip such error, so we will assert here. - RTC_CHECK(data_read == pending_->size()) - << "Too small buffer is provided for socket read. " - "Received data size: " - << pending_->size() << "; Provided buffer size: " << cb; - + buffer.source_address = pending_->from; + buffer.arrival_time = pending_->arrival_time; + buffer.payload.SetData(pending_->cdata(), pending_->size()); + buffer.ecn = pending_->ecn; pending_.reset(); - - // According to RECV(2) Linux Man page - // real socket will return message length, not data read. In our case it is - // actually the same value. - return static_cast(data_read); + return buffer.payload.size(); } int FakeNetworkSocket::Listen(int backlog) { RTC_CHECK(false) << "Listen() isn't valid for SOCK_DGRAM"; } -rtc::Socket* FakeNetworkSocket::Accept(rtc::SocketAddress* /*paddr*/) { +Socket* FakeNetworkSocket::Accept(SocketAddress* /*paddr*/) { RTC_CHECK(false) << "Accept() isn't valid for SOCK_DGRAM"; } @@ -248,7 +241,7 @@ void FakeNetworkSocket::SetError(int error) { error_ = error; } -rtc::Socket::ConnState FakeNetworkSocket::GetState() const { +Socket::ConnState FakeNetworkSocket::GetState() const { RTC_DCHECK_RUN_ON(thread_); return state_; } @@ -276,7 +269,7 @@ FakeNetworkSocketServer::FakeNetworkSocketServer( FakeNetworkSocketServer::~FakeNetworkSocketServer() = default; EmulatedEndpointImpl* FakeNetworkSocketServer::GetEndpointNode( - const rtc::IPAddress& ip) { + const IPAddress& ip) { return endpoints_container_->LookupByLocalAddress(ip); } @@ -285,7 +278,7 @@ void FakeNetworkSocketServer::Unregister(FakeNetworkSocket* socket) { sockets_.erase(absl::c_find(sockets_, socket)); } -rtc::Socket* FakeNetworkSocketServer::CreateSocket(int family, int type) { +Socket* FakeNetworkSocketServer::CreateSocket(int family, int type) { RTC_DCHECK(family == AF_INET || family == AF_INET6); // We support only UDP sockets for now. RTC_DCHECK(type == SOCK_DGRAM) << "Only UDP sockets are supported"; @@ -298,14 +291,14 @@ rtc::Socket* FakeNetworkSocketServer::CreateSocket(int family, int type) { return out; } -void FakeNetworkSocketServer::SetMessageQueue(rtc::Thread* thread) { +void FakeNetworkSocketServer::SetMessageQueue(Thread* thread) { thread_ = thread; } // Always returns true (if return false, it won't be invoked again...) bool FakeNetworkSocketServer::Wait(webrtc::TimeDelta max_wait_duration, bool process_io) { - RTC_DCHECK(thread_ == rtc::Thread::Current()); + RTC_DCHECK(thread_ == Thread::Current()); if (!max_wait_duration.IsZero()) wakeup_.Wait(max_wait_duration); diff --git a/test/network/fake_network_socket_server.h b/test/network/fake_network_socket_server.h index 68624a7791..a08e362fba 100644 --- a/test/network/fake_network_socket_server.h +++ b/test/network/fake_network_socket_server.h @@ -11,15 +11,15 @@ #ifndef TEST_NETWORK_FAKE_NETWORK_SOCKET_SERVER_H_ #define TEST_NETWORK_FAKE_NETWORK_SOCKET_SERVER_H_ -#include #include -#include "api/units/timestamp.h" +#include "api/units/time_delta.h" #include "rtc_base/event.h" +#include "rtc_base/ip_address.h" #include "rtc_base/socket.h" #include "rtc_base/socket_server.h" #include "rtc_base/synchronization/mutex.h" -#include "system_wrappers/include/clock.h" +#include "rtc_base/thread_annotations.h" #include "test/network/network_emulation.h" namespace webrtc { @@ -27,30 +27,30 @@ namespace test { class FakeNetworkSocket; // FakeNetworkSocketServer must outlive any sockets it creates. -class FakeNetworkSocketServer : public rtc::SocketServer { +class FakeNetworkSocketServer : public SocketServer { public: explicit FakeNetworkSocketServer(EndpointsContainer* endpoints_controller); ~FakeNetworkSocketServer() override; - // rtc::SocketFactory methods: - rtc::Socket* CreateSocket(int family, int type) override; + // webrtc::SocketFactory methods: + Socket* CreateSocket(int family, int type) override; - // rtc::SocketServer methods: + // webrtc::SocketServer methods: // Called by the network thread when this server is installed, kicking off the // message handler loop. - void SetMessageQueue(rtc::Thread* thread) override; + void SetMessageQueue(Thread* thread) override; bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override; void WakeUp() override; protected: friend class FakeNetworkSocket; - EmulatedEndpointImpl* GetEndpointNode(const rtc::IPAddress& ip); + EmulatedEndpointImpl* GetEndpointNode(const IPAddress& ip); void Unregister(FakeNetworkSocket* socket); private: const EndpointsContainer* endpoints_container_; - rtc::Event wakeup_; - rtc::Thread* thread_ = nullptr; + Event wakeup_; + Thread* thread_ = nullptr; Mutex lock_; std::vector sockets_ RTC_GUARDED_BY(lock_); diff --git a/test/network/feedback_generator.cc b/test/network/feedback_generator.cc index e339fd87b0..7462c99e54 100644 --- a/test/network/feedback_generator.cc +++ b/test/network/feedback_generator.cc @@ -9,16 +9,28 @@ */ #include "test/network/feedback_generator.h" +#include +#include +#include + #include "absl/memory/memory.h" +#include "api/test/network_emulation_manager.h" +#include "api/test/simulated_network.h" #include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "rtc_base/checks.h" +#include "test/network/network_emulation_manager.h" +#include "test/network/simulated_network.h" namespace webrtc { FeedbackGeneratorImpl::FeedbackGeneratorImpl( FeedbackGeneratorImpl::Config config) : conf_(config), - net_(TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault), + net_({.time_mode = TimeMode::kSimulated}), send_link_{new SimulatedNetwork(conf_.send_link)}, ret_link_{new SimulatedNetwork(conf_.return_link)}, route_(this, @@ -62,7 +74,7 @@ void FeedbackGeneratorImpl::SetReturnConfig( } void FeedbackGeneratorImpl::SetSendLinkCapacity(DataRate capacity) { - conf_.send_link.link_capacity_kbps = capacity.kbps(); + conf_.send_link.link_capacity = capacity; send_link_->SetConfig(conf_.send_link); } diff --git a/test/network/feedback_generator.h b/test/network/feedback_generator.h index ecd4597d3f..c7975f69fa 100644 --- a/test/network/feedback_generator.h +++ b/test/network/feedback_generator.h @@ -10,17 +10,20 @@ #ifndef TEST_NETWORK_FEEDBACK_GENERATOR_H_ #define TEST_NETWORK_FEEDBACK_GENERATOR_H_ +#include #include #include -#include #include +#include "api/test/simulated_network.h" #include "api/transport/network_types.h" #include "api/transport/test/feedback_generator_interface.h" -#include "call/simulated_network.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "test/network/network_emulation.h" #include "test/network/network_emulation_manager.h" -#include "test/time_controller/simulated_time_controller.h" +#include "test/network/simulated_network.h" namespace webrtc { diff --git a/test/network/feedback_generator_unittest.cc b/test/network/feedback_generator_unittest.cc index 9a577bea00..6ad41e5696 100644 --- a/test/network/feedback_generator_unittest.cc +++ b/test/network/feedback_generator_unittest.cc @@ -8,7 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include + +#include "api/test/simulated_network.h" #include "api/transport/test/create_feedback_generator.h" +#include "api/transport/test/feedback_generator_interface.h" +#include "api/units/time_delta.h" #include "test/gtest.h" namespace webrtc { diff --git a/test/network/g3doc/index.md b/test/network/g3doc/index.md index c82b56445e..5d993eb0cb 100644 --- a/test/network/g3doc/index.md +++ b/test/network/g3doc/index.md @@ -1,5 +1,5 @@ - + # Network Emulation Framework @@ -102,11 +102,11 @@ The framework has also the following private abstractions: For integrating with `webrtc::PeerConnection` there are helper abstractions: * `webrtc::EmulatedNetworkManagerInterface` which is implemented by - `webrtc::test::EmulatedNetworkManager` and provides `rtc::Thread` and - `rtc::NetworkManager` for WebRTC to use as network thread for - `PeerConnection` and for `cricket::BasicPortAllocator`. + `webrtc::test::EmulatedNetworkManager` and provides `webrtc::Thread` and + `webrtc::NetworkManager` for WebRTC to use as network thread for + `PeerConnection` and for `webrtc::BasicPortAllocator`. - Implementation represent framework endpoints as `rtc::Network` to WebRTC. + Implementation represent framework endpoints as `webrtc::Network` to WebRTC. ## Architecture @@ -124,7 +124,7 @@ Here is a visual overview of the emulated network architecture: ![Architecture](network_emulation_framework.png "Architecture") When network is hooked into `PeerConnection` it is done through network thread -and `NetworkManager`. In the network thread the custom `rtc::SocketServer` is +and `NetworkManager`. In the network thread the custom `webrtc::SocketServer` is provided: `webrtc::test::FakeNetworkSocketServer`. This custom socket server will construct custom sockets (`webrtc::test::FakeNetworkSocket`), which internally bind themselves to the required endpoint. All packets processing diff --git a/test/network/network_emulation.cc b/test/network/network_emulation.cc index f1c9ca80dd..3dfe3d0035 100644 --- a/test/network/network_emulation.cc +++ b/test/network/network_emulation.cc @@ -10,26 +10,46 @@ #include "test/network/network_emulation.h" +#include + #include +#include +#include #include +#include #include +#include +#include #include +#include -#include "absl/types/optional.h" +#include "absl/base/nullability.h" #include "api/numerics/samples_stats_counter.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/test/network_emulation_manager.h" +#include "api/test/simulated_network.h" +#include "api/transport/ecn_marking.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/ip_address.h" #include "rtc_base/logging.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/network.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "system_wrappers/include/clock.h" namespace webrtc { namespace { EmulatedNetworkOutgoingStats GetOverallOutgoingStats( - const std::map& - outgoing_stats, + const std::map& outgoing_stats, EmulatedNetworkStatsGatheringMode mode) { EmulatedNetworkOutgoingStatsBuilder builder(mode); for (const auto& entry : outgoing_stats) { @@ -39,8 +59,7 @@ EmulatedNetworkOutgoingStats GetOverallOutgoingStats( } EmulatedNetworkIncomingStats GetOverallIncomingStats( - const std::map& - incoming_stats, + const std::map& incoming_stats, EmulatedNetworkStatsGatheringMode mode) { EmulatedNetworkIncomingStatsBuilder builder(mode); for (const auto& entry : incoming_stats) { @@ -49,6 +68,17 @@ EmulatedNetworkIncomingStats GetOverallIncomingStats( return builder.Build(); } +bool IsDtlsHandshakePacket(const uint8_t* payload, size_t payload_size) { + if (payload_size < 14) { + return false; + } + // https://tools.ietf.org/html/rfc6347#section-4.1 + // https://tools.ietf.org/html/rfc6347#section-4.2.2 + // https://tools.ietf.org/html/rfc5246#section-7.4 + return payload[0] == 22 && + (payload[13] == 1 || payload[13] == 2 || payload[13] == 11); +} + } // namespace EmulatedNetworkOutgoingStatsBuilder::EmulatedNetworkOutgoingStatsBuilder( @@ -57,19 +87,21 @@ EmulatedNetworkOutgoingStatsBuilder::EmulatedNetworkOutgoingStatsBuilder( sequence_checker_.Detach(); } -void EmulatedNetworkOutgoingStatsBuilder::OnPacketSent(Timestamp sent_time, - DataSize packet_size) { +void EmulatedNetworkOutgoingStatsBuilder::OnPacketSent( + Timestamp sent_time, + const EmulatedIpPacket& packet) { RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_CHECK_GE(packet_size, DataSize::Zero()); + RTC_CHECK_GE(packet.size(), 0); if (stats_.first_packet_sent_time.IsInfinite()) { stats_.first_packet_sent_time = sent_time; - stats_.first_sent_packet_size = packet_size; + stats_.first_sent_packet_size = DataSize::Bytes(packet.ip_packet_size()); } stats_.last_packet_sent_time = sent_time; stats_.packets_sent++; - stats_.bytes_sent += packet_size; + stats_.bytes_sent += DataSize::Bytes(packet.ip_packet_size()); + stats_.ecn_count.Add(packet.ecn); if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) { - stats_.sent_packets_size.AddSample(packet_size.bytes()); + stats_.sent_packets_size.AddSample(packet.ip_packet_size()); } } @@ -86,6 +118,7 @@ void EmulatedNetworkOutgoingStatsBuilder::AddOutgoingStats( if (stats_.last_packet_sent_time < stats.last_packet_sent_time) { stats_.last_packet_sent_time = stats.last_packet_sent_time; } + stats_.ecn_count += stats.ecn_count; } EmulatedNetworkOutgoingStats EmulatedNetworkOutgoingStatsBuilder::Build() @@ -112,18 +145,20 @@ void EmulatedNetworkIncomingStatsBuilder::OnPacketDropped( void EmulatedNetworkIncomingStatsBuilder::OnPacketReceived( Timestamp received_time, - DataSize packet_size) { + const EmulatedIpPacket& packet) { RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_CHECK_GE(packet_size, DataSize::Zero()); + RTC_CHECK_GE(packet.size(), 0); if (stats_.first_packet_received_time.IsInfinite()) { stats_.first_packet_received_time = received_time; - stats_.first_received_packet_size = packet_size; + stats_.first_received_packet_size = + DataSize::Bytes(packet.ip_packet_size()); } stats_.last_packet_received_time = received_time; stats_.packets_received++; - stats_.bytes_received += packet_size; + stats_.ecn_count.Add(packet.ecn); + stats_.bytes_received += DataSize::Bytes(packet.ip_packet_size()); if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) { - stats_.received_packets_size.AddSample(packet_size.bytes()); + stats_.received_packets_size.AddSample(packet.ip_packet_size()); } } @@ -144,6 +179,7 @@ void EmulatedNetworkIncomingStatsBuilder::AddIncomingStats( if (stats_.last_packet_received_time < stats.last_packet_received_time) { stats_.last_packet_received_time = stats.last_packet_received_time; } + stats_.ecn_count += stats.ecn_count; } EmulatedNetworkIncomingStats EmulatedNetworkIncomingStatsBuilder::Build() @@ -159,34 +195,33 @@ EmulatedNetworkStatsBuilder::EmulatedNetworkStatsBuilder( } EmulatedNetworkStatsBuilder::EmulatedNetworkStatsBuilder( - rtc::IPAddress local_ip, + IPAddress local_ip, EmulatedNetworkStatsGatheringMode stats_gathering_mode) : stats_gathering_mode_(stats_gathering_mode) { local_addresses_.push_back(local_ip); sequence_checker_.Detach(); } -void EmulatedNetworkStatsBuilder::OnPacketSent(Timestamp queued_time, - Timestamp sent_time, - rtc::IPAddress destination_ip, - DataSize packet_size) { +void EmulatedNetworkStatsBuilder::OnPacketSent(Timestamp sent_time, + const EmulatedIpPacket& packet) { RTC_DCHECK_RUN_ON(&sequence_checker_); if (stats_gathering_mode_ == EmulatedNetworkStatsGatheringMode::kDebug) { - sent_packets_queue_wait_time_us_.AddSample((sent_time - queued_time).us()); + sent_packets_queue_wait_time_us_.AddSample( + (sent_time - packet.arrival_time).us()); } - auto it = outgoing_stats_per_destination_.find(destination_ip); + auto it = outgoing_stats_per_destination_.find(packet.to.ipaddr()); if (it == outgoing_stats_per_destination_.end()) { outgoing_stats_per_destination_ - .emplace(destination_ip, + .emplace(packet.to.ipaddr(), std::make_unique( stats_gathering_mode_)) - .first->second->OnPacketSent(sent_time, packet_size); + .first->second->OnPacketSent(sent_time, packet); } else { - it->second->OnPacketSent(sent_time, packet_size); + it->second->OnPacketSent(sent_time, packet); } } -void EmulatedNetworkStatsBuilder::OnPacketDropped(rtc::IPAddress source_ip, +void EmulatedNetworkStatsBuilder::OnPacketDropped(IPAddress source_ip, DataSize packet_size) { RTC_DCHECK_RUN_ON(&sequence_checker_); auto it = incoming_stats_per_source_.find(source_ip); @@ -201,19 +236,19 @@ void EmulatedNetworkStatsBuilder::OnPacketDropped(rtc::IPAddress source_ip, } } -void EmulatedNetworkStatsBuilder::OnPacketReceived(Timestamp received_time, - rtc::IPAddress source_ip, - DataSize packet_size) { +void EmulatedNetworkStatsBuilder::OnPacketReceived( + Timestamp received_time, + const EmulatedIpPacket& packet) { RTC_DCHECK_RUN_ON(&sequence_checker_); - auto it = incoming_stats_per_source_.find(source_ip); + auto it = incoming_stats_per_source_.find(packet.from.ipaddr()); if (it == incoming_stats_per_source_.end()) { incoming_stats_per_source_ - .emplace(source_ip, + .emplace(packet.from.ipaddr(), std::make_unique( stats_gathering_mode_)) - .first->second->OnPacketReceived(received_time, packet_size); + .first->second->OnPacketReceived(received_time, packet); } else { - it->second->OnPacketReceived(received_time, packet_size); + it->second->OnPacketReceived(received_time, packet); } } @@ -222,7 +257,7 @@ void EmulatedNetworkStatsBuilder::AddEmulatedNetworkStats( RTC_DCHECK_RUN_ON(&sequence_checker_); // Append IPs from other endpoints stats to the builder. - for (const rtc::IPAddress& addr : stats.local_addresses) { + for (const IPAddress& addr : stats.local_addresses) { local_addresses_.push_back(addr); } @@ -260,11 +295,11 @@ void EmulatedNetworkStatsBuilder::AddEmulatedNetworkStats( EmulatedNetworkStats EmulatedNetworkStatsBuilder::Build() const { RTC_DCHECK_RUN_ON(&sequence_checker_); - std::map outgoing_stats; + std::map outgoing_stats; for (const auto& entry : outgoing_stats_per_destination_) { outgoing_stats.emplace(entry.first, entry.second->Build()); } - std::map incoming_stats; + std::map incoming_stats; for (const auto& entry : incoming_stats_per_source_) { incoming_stats.emplace(entry.first, entry.second->Build()); } @@ -309,13 +344,49 @@ EmulatedNetworkNodeStats EmulatedNetworkNodeStatsBuilder::Build() const { return stats_; } +size_t LinkEmulation::GetPacketSizeForEmulation( + const EmulatedIpPacket& packet) const { + if (fake_dtls_handshake_sizes_ && + IsDtlsHandshakePacket(packet.data.cdata(), packet.data.size())) { + // DTLS handshake packets can not have deterministic size unless + // the OpenSSL/BoringSSL is configured to have deterministic random, + // which is hard. The workaround is - conditionally ignore the actual + // size and hardcode the value order of typical handshake packet size. + return 1000; + } + return packet.ip_packet_size(); +} + +LinkEmulation::LinkEmulation( + Clock* clock, + TaskQueueBase* absl_nonnull task_queue, + std::unique_ptr network_behavior, + EmulatedNetworkReceiverInterface* receiver, + EmulatedNetworkStatsGatheringMode stats_gathering_mode, + bool fake_dtls_handshake_sizes) + : clock_(clock), + task_queue_(task_queue), + network_behavior_(std::move(network_behavior)), + receiver_(receiver), + fake_dtls_handshake_sizes_(fake_dtls_handshake_sizes), + stats_builder_(stats_gathering_mode) { + task_queue_->PostTask([&]() { + RTC_DCHECK_RUN_ON(task_queue_); + network_behavior_->RegisterDeliveryTimeChangedCallback([&]() { + RTC_DCHECK_RUN_ON(task_queue_); + UpdateProcessSchedule(); + }); + }); +} + void LinkEmulation::OnPacketReceived(EmulatedIpPacket packet) { task_queue_->PostTask([this, packet = std::move(packet)]() mutable { RTC_DCHECK_RUN_ON(task_queue_); uint64_t packet_id = next_packet_id_++; - bool sent = network_behavior_->EnqueuePacket(PacketInFlightInfo( - packet.ip_packet_size(), packet.arrival_time.us(), packet_id)); + bool sent = network_behavior_->EnqueuePacket( + PacketInFlightInfo(GetPacketSizeForEmulation(packet), + packet.arrival_time.us(), packet_id, packet.ecn)); if (sent) { packets_.emplace_back(StoredPacket{.id = packet_id, .sent_time = clock_->CurrentTime(), @@ -324,28 +395,8 @@ void LinkEmulation::OnPacketReceived(EmulatedIpPacket packet) { } if (process_task_.Running()) return; - absl::optional next_time_us = - network_behavior_->NextDeliveryTimeUs(); - if (!next_time_us) - return; - Timestamp current_time = clock_->CurrentTime(); - process_task_ = RepeatingTaskHandle::DelayedStart( - task_queue_->Get(), - std::max(TimeDelta::Zero(), - Timestamp::Micros(*next_time_us) - current_time), - [this]() { - RTC_DCHECK_RUN_ON(task_queue_); - Timestamp current_time = clock_->CurrentTime(); - Process(current_time); - absl::optional next_time_us = - network_behavior_->NextDeliveryTimeUs(); - if (!next_time_us) { - process_task_.Stop(); - return TimeDelta::Zero(); // This is ignored. - } - RTC_DCHECK_GE(*next_time_us, current_time.us()); - return Timestamp::Micros(*next_time_us) - current_time; - }); + + UpdateProcessSchedule(); }); } @@ -370,11 +421,13 @@ void LinkEmulation::Process(Timestamp at_time) { packet->removed = true; stats_builder_.AddPacketTransportTime( clock_->CurrentTime() - packet->sent_time, - packet->packet.ip_packet_size()); + GetPacketSizeForEmulation(packet->packet)); if (delivery_info.receive_time_us != PacketDeliveryInfo::kNotReceived) { packet->packet.arrival_time = Timestamp::Micros(delivery_info.receive_time_us); + // Link may have changed ECN. + packet->packet.ecn = delivery_info.ecn; receiver_->OnPacketReceived(std::move(packet->packet)); } while (!packets_.empty() && packets_.front().removed) { @@ -383,7 +436,35 @@ void LinkEmulation::Process(Timestamp at_time) { } } -NetworkRouterNode::NetworkRouterNode(rtc::TaskQueue* task_queue) +void LinkEmulation::UpdateProcessSchedule() { + RTC_DCHECK_RUN_ON(task_queue_); + if (process_task_.Running()) { + process_task_.Stop(); + }; + std::optional next_time_us = network_behavior_->NextDeliveryTimeUs(); + if (!next_time_us) + return; + Timestamp current_time = clock_->CurrentTime(); + process_task_ = RepeatingTaskHandle::DelayedStart( + task_queue_, + std::max(TimeDelta::Zero(), + Timestamp::Micros(*next_time_us) - current_time), + [this]() { + RTC_DCHECK_RUN_ON(task_queue_); + Timestamp current_time = clock_->CurrentTime(); + Process(current_time); + std::optional next_time_us = + network_behavior_->NextDeliveryTimeUs(); + if (!next_time_us) { + process_task_.Stop(); + return TimeDelta::Zero(); // This is ignored. + } + RTC_DCHECK_GE(*next_time_us, current_time.us()); + return Timestamp::Micros(*next_time_us) - current_time; + }); +} + +NetworkRouterNode::NetworkRouterNode(TaskQueueBase* absl_nonnull task_queue) : task_queue_(task_queue) {} void NetworkRouterNode::OnPacketReceived(EmulatedIpPacket packet) { @@ -408,9 +489,9 @@ void NetworkRouterNode::OnPacketReceived(EmulatedIpPacket packet) { } void NetworkRouterNode::SetReceiver( - const rtc::IPAddress& dest_ip, + const IPAddress& dest_ip, EmulatedNetworkReceiverInterface* receiver) { - task_queue_->PostTask([=] { + task_queue_->PostTask([this, dest_ip, receiver] { RTC_DCHECK_RUN_ON(task_queue_); EmulatedNetworkReceiverInterface* cur_receiver = routing_[dest_ip]; RTC_CHECK(cur_receiver == nullptr || cur_receiver == receiver) @@ -419,14 +500,14 @@ void NetworkRouterNode::SetReceiver( }); } -void NetworkRouterNode::RemoveReceiver(const rtc::IPAddress& dest_ip) { +void NetworkRouterNode::RemoveReceiver(const IPAddress& dest_ip) { RTC_DCHECK_RUN_ON(task_queue_); routing_.erase(dest_ip); } void NetworkRouterNode::SetDefaultReceiver( EmulatedNetworkReceiverInterface* receiver) { - task_queue_->PostTask([=] { + task_queue_->PostTask([this, receiver] { RTC_DCHECK_RUN_ON(task_queue_); if (default_receiver_.has_value()) { RTC_CHECK_EQ(*default_receiver_, receiver) @@ -438,12 +519,12 @@ void NetworkRouterNode::SetDefaultReceiver( void NetworkRouterNode::RemoveDefaultReceiver() { RTC_DCHECK_RUN_ON(task_queue_); - default_receiver_ = absl::nullopt; + default_receiver_ = std::nullopt; } void NetworkRouterNode::SetWatcher( std::function watcher) { - task_queue_->PostTask([=] { + task_queue_->PostTask([this, watcher] { RTC_DCHECK_RUN_ON(task_queue_); watcher_ = watcher; }); @@ -451,7 +532,7 @@ void NetworkRouterNode::SetWatcher( void NetworkRouterNode::SetFilter( std::function filter) { - task_queue_->PostTask([=] { + task_queue_->PostTask([this, filter] { RTC_DCHECK_RUN_ON(task_queue_); filter_ = filter; }); @@ -459,15 +540,17 @@ void NetworkRouterNode::SetFilter( EmulatedNetworkNode::EmulatedNetworkNode( Clock* clock, - rtc::TaskQueue* task_queue, + TaskQueueBase* absl_nonnull task_queue, std::unique_ptr network_behavior, - EmulatedNetworkStatsGatheringMode stats_gathering_mode) + EmulatedNetworkStatsGatheringMode stats_gathering_mode, + bool fake_dtls_handshake_sizes) : router_(task_queue), link_(clock, task_queue, std::move(network_behavior), &router_, - stats_gathering_mode) {} + stats_gathering_mode, + fake_dtls_handshake_sizes) {} void EmulatedNetworkNode::OnPacketReceived(EmulatedIpPacket packet) { link_.OnPacketReceived(std::move(packet)); @@ -478,7 +561,7 @@ EmulatedNetworkNodeStats EmulatedNetworkNode::stats() const { } void EmulatedNetworkNode::CreateRoute( - const rtc::IPAddress& receiver_ip, + const IPAddress& receiver_ip, std::vector nodes, EmulatedNetworkReceiverInterface* receiver) { RTC_CHECK(!nodes.empty()); @@ -487,7 +570,7 @@ void EmulatedNetworkNode::CreateRoute( nodes.back()->router()->SetReceiver(receiver_ip, receiver); } -void EmulatedNetworkNode::ClearRoute(const rtc::IPAddress& receiver_ip, +void EmulatedNetworkNode::ClearRoute(const IPAddress& receiver_ip, std::vector nodes) { for (EmulatedNetworkNode* node : nodes) node->router()->RemoveReceiver(receiver_ip); @@ -497,7 +580,7 @@ EmulatedNetworkNode::~EmulatedNetworkNode() = default; EmulatedEndpointImpl::Options::Options( uint64_t id, - const rtc::IPAddress& ip, + const IPAddress& ip, const EmulatedEndpointConfig& config, EmulatedNetworkStatsGatheringMode stats_gathering_mode) : id(id), @@ -512,7 +595,8 @@ EmulatedEndpointImpl::Options::Options( EmulatedEndpointImpl::EmulatedEndpointImpl(const Options& options, bool is_enabled, - rtc::TaskQueue* task_queue, + TaskQueueBase* absl_nonnull + task_queue, Clock* clock) : options_(options), is_enabled_(is_enabled), @@ -530,13 +614,12 @@ EmulatedEndpointImpl::EmulatedEndpointImpl(const Options& options, } else if (options_.ip.family() == AF_INET6) { prefix_length = kIPv6NetworkPrefixLength; } - rtc::IPAddress prefix = TruncateIP(options_.ip, prefix_length); - network_ = std::make_unique( + IPAddress prefix = TruncateIP(options_.ip, prefix_length); + network_ = std::make_unique( options_.ip.ToString(), "Endpoint id=" + std::to_string(options_.id), prefix, prefix_length, options_.type); network_->AddIP(options_.ip); - enabled_state_checker_.Detach(); RTC_LOG(LS_INFO) << "Created emulated endpoint " << options_.log_name << "; id=" << options_.id; } @@ -546,21 +629,19 @@ uint64_t EmulatedEndpointImpl::GetId() const { return options_.id; } -void EmulatedEndpointImpl::SendPacket(const rtc::SocketAddress& from, - const rtc::SocketAddress& to, - rtc::CopyOnWriteBuffer packet_data, - uint16_t application_overhead) { +void EmulatedEndpointImpl::SendPacket(const SocketAddress& from, + const SocketAddress& to, + CopyOnWriteBuffer packet_data, + uint16_t application_overhead, + EcnMarking ecn) { if (!options_.allow_send_packet_with_different_source_ip) { RTC_CHECK(from.ipaddr() == options_.ip); } EmulatedIpPacket packet(from, to, std::move(packet_data), - clock_->CurrentTime(), application_overhead); + clock_->CurrentTime(), application_overhead, ecn); task_queue_->PostTask([this, packet = std::move(packet)]() mutable { RTC_DCHECK_RUN_ON(task_queue_); - stats_builder_.OnPacketSent(packet.arrival_time, clock_->CurrentTime(), - packet.to.ipaddr(), - DataSize::Bytes(packet.ip_packet_size())); - + stats_builder_.OnPacketSent(clock_->CurrentTime(), packet); if (packet.to.ipaddr() == options_.ip) { OnPacketReceived(std::move(packet)); } else { @@ -569,19 +650,19 @@ void EmulatedEndpointImpl::SendPacket(const rtc::SocketAddress& from, }); } -absl::optional EmulatedEndpointImpl::BindReceiver( +std::optional EmulatedEndpointImpl::BindReceiver( uint16_t desired_port, EmulatedNetworkReceiverInterface* receiver) { return BindReceiverInternal(desired_port, receiver, /*is_one_shot=*/false); } -absl::optional EmulatedEndpointImpl::BindOneShotReceiver( +std::optional EmulatedEndpointImpl::BindOneShotReceiver( uint16_t desired_port, EmulatedNetworkReceiverInterface* receiver) { return BindReceiverInternal(desired_port, receiver, /*is_one_shot=*/true); } -absl::optional EmulatedEndpointImpl::BindReceiverInternal( +std::optional EmulatedEndpointImpl::BindReceiverInternal( uint16_t desired_port, EmulatedNetworkReceiverInterface* receiver, bool is_one_shot) { @@ -608,7 +689,7 @@ absl::optional EmulatedEndpointImpl::BindReceiverInternal( RTC_LOG(LS_INFO) << "Can't bind receiver to used port " << desired_port << " in endpoint " << options_.log_name << "; id=" << options_.id; - return absl::nullopt; + return std::nullopt; } RTC_LOG(LS_INFO) << "New receiver is binded to endpoint " << options_.log_name << "; id=" << options_.id << " on port " << port; @@ -648,10 +729,10 @@ void EmulatedEndpointImpl::UnbindDefaultReceiver() { MutexLock lock(&receiver_lock_); RTC_LOG(LS_INFO) << "Default receiver is removed from endpoint " << options_.log_name << "; id=" << options_.id; - default_receiver_ = absl::nullopt; + default_receiver_ = std::nullopt; } -rtc::IPAddress EmulatedEndpointImpl::GetPeerLocalAddress() const { +IPAddress EmulatedEndpointImpl::GetPeerLocalAddress() const { return options_.ip; } @@ -664,8 +745,7 @@ void EmulatedEndpointImpl::OnPacketReceived(EmulatedIpPacket packet) { << "; Receiver options_.ip=" << options_.ip.ToString(); } MutexLock lock(&receiver_lock_); - stats_builder_.OnPacketReceived(clock_->CurrentTime(), packet.from.ipaddr(), - DataSize::Bytes(packet.ip_packet_size())); + stats_builder_.OnPacketReceived(clock_->CurrentTime(), packet); auto it = port_to_receiver_.find(packet.to.port()); if (it == port_to_receiver_.end()) { if (default_receiver_.has_value()) { @@ -694,19 +774,19 @@ void EmulatedEndpointImpl::OnPacketReceived(EmulatedIpPacket packet) { } void EmulatedEndpointImpl::Enable() { - RTC_DCHECK_RUN_ON(&enabled_state_checker_); + MutexLock lock(&enable_state_mutex_); RTC_CHECK(!is_enabled_); is_enabled_ = true; } void EmulatedEndpointImpl::Disable() { - RTC_DCHECK_RUN_ON(&enabled_state_checker_); + MutexLock lock(&enable_state_mutex_); RTC_CHECK(is_enabled_); is_enabled_ = false; } bool EmulatedEndpointImpl::Enabled() const { - RTC_DCHECK_RUN_ON(&enabled_state_checker_); + MutexLock lock(&enable_state_mutex_); return is_enabled_; } @@ -716,9 +796,9 @@ EmulatedNetworkStats EmulatedEndpointImpl::stats() const { } EmulatedEndpointImpl* EndpointsContainer::LookupByLocalAddress( - const rtc::IPAddress& local_ip) const { + const IPAddress& local_ip) const { for (auto* endpoint : endpoints_) { - rtc::IPAddress peer_local_address = endpoint->GetPeerLocalAddress(); + IPAddress peer_local_address = endpoint->GetPeerLocalAddress(); if (peer_local_address == local_ip) { return endpoint; } @@ -740,13 +820,12 @@ bool EndpointsContainer::HasEndpoint(EmulatedEndpointImpl* endpoint) const { return false; } -std::vector> -EndpointsContainer::GetEnabledNetworks() const { - std::vector> networks; +std::vector> EndpointsContainer::GetEnabledNetworks() + const { + std::vector> networks; for (auto* endpoint : endpoints_) { if (endpoint->Enabled()) { - networks.emplace_back( - std::make_unique(endpoint->network())); + networks.emplace_back(std::make_unique(endpoint->network())); } } return networks; diff --git a/test/network/network_emulation.h b/test/network/network_emulation.h index dffabafa7c..891e04a39f 100644 --- a/test/network/network_emulation.h +++ b/test/network/network_emulation.h @@ -12,29 +12,35 @@ #define TEST_NETWORK_NETWORK_EMULATION_H_ #include +#include #include +#include #include #include +#include #include #include #include -#include "absl/types/optional.h" -#include "api/array_view.h" +#include "absl/base/nullability.h" #include "api/numerics/samples_stats_counter.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/test/network_emulation_manager.h" #include "api/test/simulated_network.h" +#include "api/transport/ecn_marking.h" +#include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/ip_address.h" #include "rtc_base/network.h" #include "rtc_base/network_constants.h" #include "rtc_base/socket_address.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" @@ -48,7 +54,7 @@ class EmulatedNetworkOutgoingStatsBuilder { explicit EmulatedNetworkOutgoingStatsBuilder( EmulatedNetworkStatsGatheringMode stats_gathering_mode); - void OnPacketSent(Timestamp sent_time, DataSize packet_size); + void OnPacketSent(Timestamp sent_time, const EmulatedIpPacket& packet); void AddOutgoingStats(const EmulatedNetworkOutgoingStats& stats); @@ -70,7 +76,8 @@ class EmulatedNetworkIncomingStatsBuilder { void OnPacketDropped(DataSize packet_size); - void OnPacketReceived(Timestamp received_time, DataSize packet_size); + void OnPacketReceived(Timestamp received_time, + const EmulatedIpPacket& packet); // Adds stats collected from another endpoints to the builder. void AddIncomingStats(const EmulatedNetworkIncomingStats& stats); @@ -91,19 +98,15 @@ class EmulatedNetworkStatsBuilder { explicit EmulatedNetworkStatsBuilder( EmulatedNetworkStatsGatheringMode stats_gathering_mode); explicit EmulatedNetworkStatsBuilder( - rtc::IPAddress local_ip, + IPAddress local_ip, EmulatedNetworkStatsGatheringMode stats_gathering_mode); - void OnPacketSent(Timestamp queued_time, - Timestamp sent_time, - rtc::IPAddress destination_ip, - DataSize packet_size); + void OnPacketSent(Timestamp send_time, const EmulatedIpPacket& packet); - void OnPacketDropped(rtc::IPAddress source_ip, DataSize packet_size); + void OnPacketDropped(IPAddress source_ip, DataSize packet_size); void OnPacketReceived(Timestamp received_time, - rtc::IPAddress source_ip, - DataSize packet_size); + const EmulatedIpPacket& packet); void AddEmulatedNetworkStats(const EmulatedNetworkStats& stats); @@ -113,12 +116,11 @@ class EmulatedNetworkStatsBuilder { const EmulatedNetworkStatsGatheringMode stats_gathering_mode_; RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; - std::vector local_addresses_ - RTC_GUARDED_BY(sequence_checker_); + std::vector local_addresses_ RTC_GUARDED_BY(sequence_checker_); SamplesStatsCounter sent_packets_queue_wait_time_us_; - std::map> + std::map> outgoing_stats_per_destination_ RTC_GUARDED_BY(sequence_checker_); - std::map> + std::map> incoming_stats_per_source_ RTC_GUARDED_BY(sequence_checker_); }; @@ -145,15 +147,11 @@ class EmulatedNetworkNodeStatsBuilder { class LinkEmulation : public EmulatedNetworkReceiverInterface { public: LinkEmulation(Clock* clock, - rtc::TaskQueue* task_queue, + TaskQueueBase* absl_nonnull task_queue, std::unique_ptr network_behavior, EmulatedNetworkReceiverInterface* receiver, - EmulatedNetworkStatsGatheringMode stats_gathering_mode) - : clock_(clock), - task_queue_(task_queue), - network_behavior_(std::move(network_behavior)), - receiver_(receiver), - stats_builder_(stats_gathering_mode) {} + EmulatedNetworkStatsGatheringMode stats_gathering_mode, + bool fake_dtls_handshake_sizes); void OnPacketReceived(EmulatedIpPacket packet) override; EmulatedNetworkNodeStats stats() const; @@ -165,13 +163,16 @@ class LinkEmulation : public EmulatedNetworkReceiverInterface { EmulatedIpPacket packet; bool removed; }; + void UpdateProcessSchedule() RTC_RUN_ON(task_queue_); void Process(Timestamp at_time) RTC_RUN_ON(task_queue_); + size_t GetPacketSizeForEmulation(const EmulatedIpPacket& packet) const; Clock* const clock_; - rtc::TaskQueue* const task_queue_; + TaskQueueBase* absl_nonnull const task_queue_; const std::unique_ptr network_behavior_ RTC_GUARDED_BY(task_queue_); EmulatedNetworkReceiverInterface* const receiver_; + const bool fake_dtls_handshake_sizes_; RepeatingTaskHandle process_task_ RTC_GUARDED_BY(task_queue_); std::deque packets_ RTC_GUARDED_BY(task_queue_); @@ -186,12 +187,12 @@ class LinkEmulation : public EmulatedNetworkReceiverInterface { // the packet will be silently dropped. class NetworkRouterNode : public EmulatedNetworkReceiverInterface { public: - explicit NetworkRouterNode(rtc::TaskQueue* task_queue); + explicit NetworkRouterNode(TaskQueueBase* absl_nonnull task_queue); void OnPacketReceived(EmulatedIpPacket packet) override; - void SetReceiver(const rtc::IPAddress& dest_ip, + void SetReceiver(const IPAddress& dest_ip, EmulatedNetworkReceiverInterface* receiver); - void RemoveReceiver(const rtc::IPAddress& dest_ip); + void RemoveReceiver(const IPAddress& dest_ip); // Sets a default receive that will be used for all incoming packets for which // there is no specific receiver binded to their destination port. void SetDefaultReceiver(EmulatedNetworkReceiverInterface* receiver); @@ -200,10 +201,10 @@ class NetworkRouterNode : public EmulatedNetworkReceiverInterface { void SetFilter(std::function filter); private: - rtc::TaskQueue* const task_queue_; - absl::optional default_receiver_ + TaskQueueBase* absl_nonnull const task_queue_; + std::optional default_receiver_ RTC_GUARDED_BY(task_queue_); - std::map routing_ + std::map routing_ RTC_GUARDED_BY(task_queue_); std::function watcher_ RTC_GUARDED_BY(task_queue_); @@ -224,9 +225,10 @@ class EmulatedNetworkNode : public EmulatedNetworkReceiverInterface { // they are ready. EmulatedNetworkNode( Clock* clock, - rtc::TaskQueue* task_queue, + TaskQueueBase* absl_nonnull task_queue, std::unique_ptr network_behavior, - EmulatedNetworkStatsGatheringMode stats_gathering_mode); + EmulatedNetworkStatsGatheringMode stats_gathering_mode, + bool fake_dtls_handshake_sizes); ~EmulatedNetworkNode() override; EmulatedNetworkNode(const EmulatedNetworkNode&) = delete; @@ -240,10 +242,10 @@ class EmulatedNetworkNode : public EmulatedNetworkReceiverInterface { // Creates a route for the given receiver_ip over all the given nodes to the // given receiver. - static void CreateRoute(const rtc::IPAddress& receiver_ip, + static void CreateRoute(const IPAddress& receiver_ip, std::vector nodes, EmulatedNetworkReceiverInterface* receiver); - static void ClearRoute(const rtc::IPAddress& receiver_ip, + static void ClearRoute(const IPAddress& receiver_ip, std::vector nodes); private: @@ -259,16 +261,16 @@ class EmulatedEndpointImpl : public EmulatedEndpoint { public: struct Options { Options(uint64_t id, - const rtc::IPAddress& ip, + const IPAddress& ip, const EmulatedEndpointConfig& config, EmulatedNetworkStatsGatheringMode stats_gathering_mode); // TODO(titovartem) check if we can remove id. uint64_t id; // Endpoint local IP address. - rtc::IPAddress ip; + IPAddress ip; EmulatedNetworkStatsGatheringMode stats_gathering_mode; - rtc::AdapterType type; + AdapterType type; // Allow endpoint to send packets specifying source IP address different to // the current endpoint IP address. If false endpoint will crash if attempt // to send such packet will be done. @@ -283,7 +285,7 @@ class EmulatedEndpointImpl : public EmulatedEndpoint { EmulatedEndpointImpl(const Options& options, bool is_enabled, - rtc::TaskQueue* task_queue, + TaskQueueBase* absl_nonnull task_queue, Clock* clock); ~EmulatedEndpointImpl() override; @@ -291,24 +293,25 @@ class EmulatedEndpointImpl : public EmulatedEndpoint { NetworkRouterNode* router() { return &router_; } - void SendPacket(const rtc::SocketAddress& from, - const rtc::SocketAddress& to, - rtc::CopyOnWriteBuffer packet_data, - uint16_t application_overhead = 0) override; + void SendPacket(const SocketAddress& from, + const SocketAddress& to, + CopyOnWriteBuffer packet_data, + uint16_t application_overhead = 0, + EcnMarking ecn = EcnMarking::kNotEct) override; - absl::optional BindReceiver( + std::optional BindReceiver( uint16_t desired_port, EmulatedNetworkReceiverInterface* receiver) override; // Binds a receiver, and automatically removes the binding after first call to // OnPacketReceived. - absl::optional BindOneShotReceiver( + std::optional BindOneShotReceiver( uint16_t desired_port, EmulatedNetworkReceiverInterface* receiver); void UnbindReceiver(uint16_t port) override; void BindDefaultReceiver(EmulatedNetworkReceiverInterface* receiver) override; void UnbindDefaultReceiver() override; - rtc::IPAddress GetPeerLocalAddress() const override; + IPAddress GetPeerLocalAddress() const override; // Will be called to deliver packet into endpoint from network node. void OnPacketReceived(EmulatedIpPacket packet) override; @@ -317,7 +320,7 @@ class EmulatedEndpointImpl : public EmulatedEndpoint { void Disable(); bool Enabled() const; - const rtc::Network& network() const { return *network_.get(); } + const Network& network() const { return *network_.get(); } EmulatedNetworkStats stats() const; @@ -327,7 +330,7 @@ class EmulatedEndpointImpl : public EmulatedEndpoint { bool is_one_shot; }; - absl::optional BindReceiverInternal( + std::optional BindReceiverInternal( uint16_t desired_port, EmulatedNetworkReceiverInterface* receiver, bool is_one_shot); @@ -336,17 +339,17 @@ class EmulatedEndpointImpl : public EmulatedEndpoint { uint16_t NextPort() RTC_EXCLUSIVE_LOCKS_REQUIRED(receiver_lock_); Mutex receiver_lock_; - RTC_NO_UNIQUE_ADDRESS SequenceChecker enabled_state_checker_; + mutable Mutex enable_state_mutex_; const Options options_; - bool is_enabled_ RTC_GUARDED_BY(enabled_state_checker_); + bool is_enabled_ RTC_GUARDED_BY(enable_state_mutex_); Clock* const clock_; - rtc::TaskQueue* const task_queue_; - std::unique_ptr network_; + TaskQueueBase* absl_nonnull const task_queue_; + std::unique_ptr network_; NetworkRouterNode router_; uint16_t next_port_ RTC_GUARDED_BY(receiver_lock_); - absl::optional default_receiver_ + std::optional default_receiver_ RTC_GUARDED_BY(receiver_lock_); std::map port_to_receiver_ RTC_GUARDED_BY(receiver_lock_); @@ -379,12 +382,11 @@ class EndpointsContainer { EndpointsContainer(const std::vector& endpoints, EmulatedNetworkStatsGatheringMode stats_gathering_mode); - EmulatedEndpointImpl* LookupByLocalAddress( - const rtc::IPAddress& local_ip) const; + EmulatedEndpointImpl* LookupByLocalAddress(const IPAddress& local_ip) const; bool HasEndpoint(EmulatedEndpointImpl* endpoint) const; // Returns list of networks for enabled endpoints. Caller takes ownership of - // returned rtc::Network objects. - std::vector> GetEnabledNetworks() const; + // returned webrtc::Network objects. + std::vector> GetEnabledNetworks() const; std::vector GetEndpoints() const; EmulatedNetworkStats GetStats() const; @@ -409,7 +411,8 @@ class FakePacketRoute : public EmulatedNetworkReceiverInterface { void SendPacket(size_t size, FakePacketType packet) { RTC_CHECK_GE(size, sizeof(int)); sent_.emplace(next_packet_id_, packet); - rtc::CopyOnWriteBuffer buf(size); + CopyOnWriteBuffer buf(size); + memset(buf.MutableData(), 0, size); reinterpret_cast(buf.MutableData())[0] = next_packet_id_++; route_->from->SendPacket(send_addr_, recv_addr_, buf); } @@ -423,8 +426,8 @@ class FakePacketRoute : public EmulatedNetworkReceiverInterface { private: EmulatedRoute* const route_; const std::function action_; - const rtc::SocketAddress send_addr_; - const rtc::SocketAddress recv_addr_; + const SocketAddress send_addr_; + const SocketAddress recv_addr_; int next_packet_id_ = 0; std::map sent_; }; diff --git a/test/network/network_emulation_manager.cc b/test/network/network_emulation_manager.cc index 97c0bc1ba8..f08a1de00a 100644 --- a/test/network/network_emulation_manager.cc +++ b/test/network/network_emulation_manager.cc @@ -11,12 +11,34 @@ #include "test/network/network_emulation_manager.h" #include +#include +#include +#include #include - -#include "api/units/time_delta.h" +#include +#include +#include + +#include "absl/base/nullability.h" +#include "api/array_view.h" +#include "api/field_trials_view.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/test/network_emulation/cross_traffic.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/test/network_emulation_manager.h" +#include "api/test/simulated_network.h" +#include "api/test/time_controller.h" #include "api/units/timestamp.h" -#include "call/simulated_network.h" +#include "rtc_base/checks.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/task_queue_for_test.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "test/network/cross_traffic.h" +#include "test/network/emulated_network_manager.h" #include "test/network/emulated_turn_server.h" +#include "test/network/network_emulation.h" +#include "test/network/simulated_network.h" #include "test/network/traffic_route.h" #include "test/time_controller/real_time_controller.h" #include "test/time_controller/simulated_time_controller.h" @@ -30,10 +52,12 @@ constexpr uint32_t kMinIPv4Address = 0xC0A80000; // uint32_t representation of 192.168.255.255 address constexpr uint32_t kMaxIPv4Address = 0xC0A8FFFF; -std::unique_ptr CreateTimeController(TimeMode mode) { +std::unique_ptr CreateTimeController( + TimeMode mode, + const FieldTrialsView* field_trials) { switch (mode) { case TimeMode::kRealTime: - return std::make_unique(); + return std::make_unique(field_trials); case TimeMode::kSimulated: // Using an offset of 100000 to get nice fixed width and readable // timestamps in typical test scenarios. @@ -45,12 +69,13 @@ std::unique_ptr CreateTimeController(TimeMode mode) { } // namespace NetworkEmulationManagerImpl::NetworkEmulationManagerImpl( - TimeMode mode, - EmulatedNetworkStatsGatheringMode stats_gathering_mode) - : time_mode_(mode), - stats_gathering_mode_(stats_gathering_mode), - time_controller_(CreateTimeController(mode)), + NetworkEmulationManagerConfig config) + : time_mode_(config.time_mode), + stats_gathering_mode_(config.stats_gathering_mode), + time_controller_( + CreateTimeController(config.time_mode, config.field_trials)), clock_(time_controller_->GetClock()), + fake_dtls_handshake_sizes_(config.fake_dtls_handshake_sizes), next_node_id_(1), next_ip4_address_(kMinIPv4Address), task_queue_(time_controller_->GetTaskQueueFactory()->CreateTaskQueue( @@ -76,7 +101,8 @@ EmulatedNetworkNode* NetworkEmulationManagerImpl::CreateEmulatedNode( EmulatedNetworkNode* NetworkEmulationManagerImpl::CreateEmulatedNode( std::unique_ptr network_behavior) { auto node = std::make_unique( - clock_, &task_queue_, std::move(network_behavior), stats_gathering_mode_); + clock_, task_queue_.Get(), std::move(network_behavior), + stats_gathering_mode_, fake_dtls_handshake_sizes_); EmulatedNetworkNode* out = node.get(); task_queue_.PostTask([this, node = std::move(node)]() mutable { network_nodes_.push_back(std::move(node)); @@ -91,7 +117,7 @@ NetworkEmulationManagerImpl::NodeBuilder() { EmulatedEndpointImpl* NetworkEmulationManagerImpl::CreateEndpoint( EmulatedEndpointConfig config) { - absl::optional ip = config.ip; + std::optional ip = config.ip; if (!ip) { switch (config.generated_ip_family) { case EmulatedEndpointConfig::IpAddressFamily::kIpv4: @@ -111,7 +137,7 @@ EmulatedEndpointImpl* NetworkEmulationManagerImpl::CreateEndpoint( auto node = std::make_unique( EmulatedEndpointImpl::Options(next_node_id_++, *ip, config, stats_gathering_mode_), - config.start_as_enabled, &task_queue_, clock_); + config.start_as_enabled, task_queue_.Get(), clock_); EmulatedEndpointImpl* out = node.get(); endpoints_.push_back(std::move(node)); return out; @@ -121,15 +147,16 @@ void NetworkEmulationManagerImpl::EnableEndpoint(EmulatedEndpoint* endpoint) { EmulatedNetworkManager* network_manager = endpoint_to_network_manager_[endpoint]; RTC_CHECK(network_manager); - network_manager->EnableEndpoint(static_cast(endpoint)); + static_cast(endpoint)->Enable(); + network_manager->UpdateNetworks(); } void NetworkEmulationManagerImpl::DisableEndpoint(EmulatedEndpoint* endpoint) { EmulatedNetworkManager* network_manager = endpoint_to_network_manager_[endpoint]; RTC_CHECK(network_manager); - network_manager->DisableEndpoint( - static_cast(endpoint)); + static_cast(endpoint)->Disable(); + network_manager->UpdateNetworks(); } EmulatedRoute* NetworkEmulationManagerImpl::CreateRoute( @@ -264,7 +291,7 @@ CrossTrafficGenerator* NetworkEmulationManagerImpl::StartCrossTraffic( void NetworkEmulationManagerImpl::StopCrossTraffic( CrossTrafficGenerator* generator) { - task_queue_.PostTask([=]() { + task_queue_.PostTask([this, generator]() { auto it = std::find_if(cross_traffics_.begin(), cross_traffics_.end(), [=](const CrossTrafficSource& el) { return el.first.get() == generator; @@ -274,7 +301,7 @@ void NetworkEmulationManagerImpl::StopCrossTraffic( }); } -EmulatedNetworkManagerInterface* +EmulatedNetworkManagerInterface* absl_nonnull NetworkEmulationManagerImpl::CreateEmulatedNetworkManagerInterface( const std::vector& endpoints) { std::vector endpoint_impls; @@ -285,15 +312,10 @@ NetworkEmulationManagerImpl::CreateEmulatedNetworkManagerInterface( auto endpoints_container = std::make_unique( endpoint_impls, stats_gathering_mode_); auto network_manager = std::make_unique( - time_controller_.get(), &task_queue_, endpoints_container.get()); + time_controller_.get(), task_queue_.Get(), endpoints_container.get()); for (auto* endpoint : endpoints) { // Associate endpoint with network manager. - bool insertion_result = - endpoint_to_network_manager_.insert({endpoint, network_manager.get()}) - .second; - RTC_CHECK(insertion_result) - << "Endpoint ip=" << endpoint->GetPeerLocalAddress().ToString() - << " is already used for another network"; + endpoint_to_network_manager_[endpoint] = network_manager.get(); } EmulatedNetworkManagerInterface* out = network_manager.get(); @@ -304,7 +326,7 @@ NetworkEmulationManagerImpl::CreateEmulatedNetworkManagerInterface( } void NetworkEmulationManagerImpl::GetStats( - rtc::ArrayView endpoints, + ArrayView endpoints, std::function stats_callback) { task_queue_.PostTask([endpoints, stats_callback, stats_gathering_mode = stats_gathering_mode_]() { @@ -321,7 +343,7 @@ void NetworkEmulationManagerImpl::GetStats( } void NetworkEmulationManagerImpl::GetStats( - rtc::ArrayView nodes, + ArrayView nodes, std::function stats_callback) { task_queue_.PostTask( [nodes, stats_callback, stats_gathering_mode = stats_gathering_mode_]() { @@ -333,11 +355,10 @@ void NetworkEmulationManagerImpl::GetStats( }); } -absl::optional -NetworkEmulationManagerImpl::GetNextIPv4Address() { +std::optional NetworkEmulationManagerImpl::GetNextIPv4Address() { uint32_t addresses_count = kMaxIPv4Address - kMinIPv4Address; for (uint32_t i = 0; i < addresses_count; i++) { - rtc::IPAddress ip(next_ip4_address_); + IPAddress ip(next_ip4_address_); if (next_ip4_address_ == kMaxIPv4Address) { next_ip4_address_ = kMinIPv4Address; } else { @@ -347,7 +368,7 @@ NetworkEmulationManagerImpl::GetNextIPv4Address() { return ip; } } - return absl::nullopt; + return std::nullopt; } Timestamp NetworkEmulationManagerImpl::Now() const { @@ -359,11 +380,11 @@ EmulatedTURNServerInterface* NetworkEmulationManagerImpl::CreateTURNServer( auto* client = CreateEndpoint(config.client_config); auto* peer = CreateEndpoint(config.client_config); char buf[128]; - rtc::SimpleStringBuilder str(buf); + SimpleStringBuilder str(buf); str.AppendFormat("turn_server_%u", static_cast(turn_servers_.size())); auto turn = std::make_unique( - time_controller_->CreateThread(str.str()), client, peer); + config, time_controller_->CreateThread(str.str()), client, peer); auto out = turn.get(); turn_servers_.push_back(std::move(turn)); return out; diff --git a/test/network/network_emulation_manager.h b/test/network/network_emulation_manager.h index 29debca693..11b1d11d3a 100644 --- a/test/network/network_emulation_manager.h +++ b/test/network/network_emulation_manager.h @@ -11,18 +11,25 @@ #ifndef TEST_NETWORK_NETWORK_EMULATION_MANAGER_H_ #define TEST_NETWORK_NETWORK_EMULATION_MANAGER_H_ +#include +#include +#include #include #include +#include #include #include #include +#include "absl/base/nullability.h" #include "api/array_view.h" +#include "api/test/network_emulation/cross_traffic.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/test/network_emulation_manager.h" #include "api/test/simulated_network.h" #include "api/test/time_controller.h" -#include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "rtc_base/ip_address.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/repeating_task.h" #include "system_wrappers/include/clock.h" @@ -36,9 +43,7 @@ namespace test { class NetworkEmulationManagerImpl : public NetworkEmulationManager { public: - NetworkEmulationManagerImpl( - TimeMode mode, - EmulatedNetworkStatsGatheringMode stats_gathering_mode); + explicit NetworkEmulationManagerImpl(NetworkEmulationManagerConfig config); ~NetworkEmulationManagerImpl(); EmulatedNetworkNode* CreateEmulatedNode(BuiltInNetworkBehaviorConfig config, @@ -76,15 +81,16 @@ class NetworkEmulationManagerImpl : public NetworkEmulationManager { std::unique_ptr generator) override; void StopCrossTraffic(CrossTrafficGenerator* generator) override; - EmulatedNetworkManagerInterface* CreateEmulatedNetworkManagerInterface( + EmulatedNetworkManagerInterface* absl_nonnull + CreateEmulatedNetworkManagerInterface( const std::vector& endpoints) override; void GetStats( - rtc::ArrayView endpoints, + ArrayView endpoints, std::function stats_callback) override; void GetStats( - rtc::ArrayView nodes, + ArrayView nodes, std::function stats_callback) override; TimeController* time_controller() override { return time_controller_.get(); } @@ -100,18 +106,19 @@ class NetworkEmulationManagerImpl : public NetworkEmulationManager { using CrossTrafficSource = std::pair, RepeatingTaskHandle>; - absl::optional GetNextIPv4Address(); + std::optional GetNextIPv4Address(); const TimeMode time_mode_; const EmulatedNetworkStatsGatheringMode stats_gathering_mode_; const std::unique_ptr time_controller_; Clock* const clock_; + const bool fake_dtls_handshake_sizes_; int next_node_id_; RepeatingTaskHandle process_task_handle_; uint32_t next_ip4_address_; - std::set used_ip_addresses_; + std::set used_ip_addresses_; // All objects can be added to the manager only when it is idle. std::vector> endpoints_; diff --git a/test/network/network_emulation_pc_unittest.cc b/test/network/network_emulation_pc_unittest.cc index 51a45a8234..2eca595ebe 100644 --- a/test/network/network_emulation_pc_unittest.cc +++ b/test/network/network_emulation_pc_unittest.cc @@ -8,35 +8,43 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include #include +#include +#include -#include "api/call/call_factory_interface.h" +#include "api/audio_options.h" +#include "api/enable_media_with_defaults.h" +#include "api/jsep.h" +#include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/scoped_refptr.h" #include "api/task_queue/default_task_queue_factory.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/test/network_emulation_manager.h" +#include "api/test/rtc_error_matchers.h" +#include "api/test/simulated_network.h" #include "api/transport/field_trial_based_config.h" -#include "call/simulated_network.h" -#include "media/engine/webrtc_media_engine.h" -#include "media/engine/webrtc_media_engine_defaults.h" #include "modules/audio_device/include/test_audio_device.h" -#include "p2p/base/basic_packet_socket_factory.h" -#include "p2p/client/basic_port_allocator.h" +#include "p2p/base/port_allocator.h" #include "pc/peer_connection_wrapper.h" #include "pc/test/mock_peer_connection_observers.h" -#include "rtc_base/gunit.h" +#include "rtc_base/network.h" #include "rtc_base/task_queue_for_test.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/network/network_emulation.h" #include "test/network/network_emulation_manager.h" +#include "test/network/simulated_network.h" +#include "test/wait_until.h" namespace webrtc { namespace test { namespace { -constexpr int kDefaultTimeoutMs = 1000; +using ::testing::Eq; +using ::testing::IsTrue; + constexpr int kMaxAptitude = 32000; constexpr int kSamplingFrequency = 48000; constexpr char kSignalThreadName[] = "signaling_thread"; @@ -52,48 +60,37 @@ bool AddIceCandidates(PeerConnectionWrapper* peer, return success; } -rtc::scoped_refptr CreatePeerConnectionFactory( - rtc::Thread* signaling_thread, - rtc::Thread* network_thread) { +scoped_refptr CreatePeerConnectionFactory( + Thread* signaling_thread, + EmulatedNetworkManagerInterface* network) { PeerConnectionFactoryDependencies pcf_deps; pcf_deps.task_queue_factory = CreateDefaultTaskQueueFactory(); - pcf_deps.call_factory = CreateCallFactory(); - pcf_deps.event_log_factory = - std::make_unique(pcf_deps.task_queue_factory.get()); - pcf_deps.network_thread = network_thread; + pcf_deps.event_log_factory = std::make_unique(); + pcf_deps.network_thread = network->network_thread(); pcf_deps.signaling_thread = signaling_thread; pcf_deps.trials = std::make_unique(); - cricket::MediaEngineDependencies media_deps; - media_deps.task_queue_factory = pcf_deps.task_queue_factory.get(); - media_deps.adm = TestAudioDeviceModule::Create( - media_deps.task_queue_factory, + pcf_deps.socket_factory = network->socket_factory(); + pcf_deps.network_manager = network->ReleaseNetworkManager(); + + pcf_deps.adm = TestAudioDeviceModule::Create( + pcf_deps.task_queue_factory.get(), TestAudioDeviceModule::CreatePulsedNoiseCapturer(kMaxAptitude, kSamplingFrequency), TestAudioDeviceModule::CreateDiscardRenderer(kSamplingFrequency), /*speed=*/1.f); - media_deps.trials = pcf_deps.trials.get(); - SetMediaEngineDefaults(&media_deps); - pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps)); + EnableMediaWithDefaults(pcf_deps); return CreateModularPeerConnectionFactory(std::move(pcf_deps)); } -rtc::scoped_refptr CreatePeerConnection( - const rtc::scoped_refptr& pcf, +scoped_refptr CreatePeerConnection( + const scoped_refptr& pcf, PeerConnectionObserver* observer, - rtc::PacketSocketFactory* packet_socket_factory, - rtc::NetworkManager* network_manager, EmulatedTURNServerInterface* turn_server = nullptr) { PeerConnectionDependencies pc_deps(observer); - auto port_allocator = std::make_unique( - network_manager, packet_socket_factory); - - // This test does not support TCP - int flags = cricket::PORTALLOCATOR_DISABLE_TCP; - port_allocator->set_flags(port_allocator->flags() | flags); - - pc_deps.allocator = std::move(port_allocator); PeerConnectionInterface::RTCConfiguration rtc_configuration; rtc_configuration.sdp_semantics = SdpSemantics::kUnifiedPlan; + // This test does not support TCP + rtc_configuration.port_allocator_config.flags = PORTALLOCATOR_DISABLE_TCP; if (turn_server != nullptr) { webrtc::PeerConnectionInterface::IceServer server; server.username = turn_server->GetIceServerConfig().username; @@ -113,13 +110,12 @@ rtc::scoped_refptr CreatePeerConnection( } // namespace TEST(NetworkEmulationManagerPCTest, Run) { - std::unique_ptr signaling_thread = rtc::Thread::Create(); + std::unique_ptr signaling_thread = Thread::Create(); signaling_thread->SetName(kSignalThreadName, nullptr); signaling_thread->Start(); // Setup emulated network - NetworkEmulationManagerImpl emulation( - TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); + NetworkEmulationManagerImpl emulation({.time_mode = TimeMode::kRealTime}); EmulatedNetworkNode* alice_node = emulation.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); @@ -138,28 +134,23 @@ TEST(NetworkEmulationManagerPCTest, Run) { emulation.CreateEmulatedNetworkManagerInterface({bob_endpoint}); // Setup peer connections. - rtc::scoped_refptr alice_pcf; - rtc::scoped_refptr alice_pc; + scoped_refptr alice_pcf; + scoped_refptr alice_pc; std::unique_ptr alice_observer = std::make_unique(); - rtc::scoped_refptr bob_pcf; - rtc::scoped_refptr bob_pc; + scoped_refptr bob_pcf; + scoped_refptr bob_pc; std::unique_ptr bob_observer = std::make_unique(); SendTask(signaling_thread.get(), [&]() { - alice_pcf = CreatePeerConnectionFactory(signaling_thread.get(), - alice_network->network_thread()); - alice_pc = CreatePeerConnection(alice_pcf, alice_observer.get(), - alice_network->packet_socket_factory(), - alice_network->network_manager()); - - bob_pcf = CreatePeerConnectionFactory(signaling_thread.get(), - bob_network->network_thread()); - bob_pc = CreatePeerConnection(bob_pcf, bob_observer.get(), - bob_network->packet_socket_factory(), - bob_network->network_manager()); + alice_pcf = + CreatePeerConnectionFactory(signaling_thread.get(), alice_network); + alice_pc = CreatePeerConnection(alice_pcf, alice_observer.get()); + + bob_pcf = CreatePeerConnectionFactory(signaling_thread.get(), bob_network); + bob_pc = CreatePeerConnection(bob_pcf, bob_observer.get()); }); std::unique_ptr alice = @@ -170,20 +161,23 @@ TEST(NetworkEmulationManagerPCTest, Run) { std::move(bob_observer)); SendTask(signaling_thread.get(), [&]() { - rtc::scoped_refptr source = - alice_pcf->CreateAudioSource(cricket::AudioOptions()); - rtc::scoped_refptr track = + scoped_refptr source = + alice_pcf->CreateAudioSource(AudioOptions()); + scoped_refptr track = alice_pcf->CreateAudioTrack("audio", source.get()); alice->AddTransceiver(track); // Connect peers. ASSERT_TRUE(alice->ExchangeOfferAnswerWith(bob.get())); // Do the SDP negotiation, and also exchange ice candidates. - ASSERT_TRUE_WAIT( - alice->signaling_state() == PeerConnectionInterface::kStable, - kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(alice->IsIceGatheringDone(), kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(bob->IsIceGatheringDone(), kDefaultTimeoutMs); + ASSERT_THAT(WaitUntil([&] { return alice->signaling_state(); }, + Eq(PeerConnectionInterface::kStable)), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return alice->IsIceGatheringDone(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return bob->IsIceGatheringDone(); }, IsTrue()), + IsRtcOk()); // Connect an ICE candidate pairs. ASSERT_TRUE( @@ -191,8 +185,10 @@ TEST(NetworkEmulationManagerPCTest, Run) { ASSERT_TRUE( AddIceCandidates(alice.get(), bob->observer()->GetAllCandidates())); // This means that ICE and DTLS are connected. - ASSERT_TRUE_WAIT(bob->IsIceConnected(), kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(alice->IsIceConnected(), kDefaultTimeoutMs); + ASSERT_THAT(WaitUntil([&] { return bob->IsIceConnected(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return alice->IsIceConnected(); }, IsTrue()), + IsRtcOk()); // Close peer connections alice->pc()->Close(); @@ -205,13 +201,12 @@ TEST(NetworkEmulationManagerPCTest, Run) { } TEST(NetworkEmulationManagerPCTest, RunTURN) { - std::unique_ptr signaling_thread = rtc::Thread::Create(); + std::unique_ptr signaling_thread = Thread::Create(); signaling_thread->SetName(kSignalThreadName, nullptr); signaling_thread->Start(); // Setup emulated network - NetworkEmulationManagerImpl emulation( - TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); + NetworkEmulationManagerImpl emulation({.time_mode = TimeMode::kRealTime}); EmulatedNetworkNode* alice_node = emulation.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); @@ -249,28 +244,24 @@ TEST(NetworkEmulationManagerPCTest, RunTURN) { emulation.CreateEmulatedNetworkManagerInterface({bob_endpoint}); // Setup peer connections. - rtc::scoped_refptr alice_pcf; - rtc::scoped_refptr alice_pc; + scoped_refptr alice_pcf; + scoped_refptr alice_pc; std::unique_ptr alice_observer = std::make_unique(); - rtc::scoped_refptr bob_pcf; - rtc::scoped_refptr bob_pc; + scoped_refptr bob_pcf; + scoped_refptr bob_pc; std::unique_ptr bob_observer = std::make_unique(); SendTask(signaling_thread.get(), [&]() { - alice_pcf = CreatePeerConnectionFactory(signaling_thread.get(), - alice_network->network_thread()); - alice_pc = CreatePeerConnection( - alice_pcf, alice_observer.get(), alice_network->packet_socket_factory(), - alice_network->network_manager(), alice_turn); - - bob_pcf = CreatePeerConnectionFactory(signaling_thread.get(), - bob_network->network_thread()); - bob_pc = CreatePeerConnection(bob_pcf, bob_observer.get(), - bob_network->packet_socket_factory(), - bob_network->network_manager(), bob_turn); + alice_pcf = + CreatePeerConnectionFactory(signaling_thread.get(), alice_network); + alice_pc = + CreatePeerConnection(alice_pcf, alice_observer.get(), alice_turn); + + bob_pcf = CreatePeerConnectionFactory(signaling_thread.get(), bob_network); + bob_pc = CreatePeerConnection(bob_pcf, bob_observer.get(), bob_turn); }); std::unique_ptr alice = @@ -281,20 +272,23 @@ TEST(NetworkEmulationManagerPCTest, RunTURN) { std::move(bob_observer)); SendTask(signaling_thread.get(), [&]() { - rtc::scoped_refptr source = - alice_pcf->CreateAudioSource(cricket::AudioOptions()); - rtc::scoped_refptr track = + scoped_refptr source = + alice_pcf->CreateAudioSource(AudioOptions()); + scoped_refptr track = alice_pcf->CreateAudioTrack("audio", source.get()); alice->AddTransceiver(track); // Connect peers. ASSERT_TRUE(alice->ExchangeOfferAnswerWith(bob.get())); // Do the SDP negotiation, and also exchange ice candidates. - ASSERT_TRUE_WAIT( - alice->signaling_state() == PeerConnectionInterface::kStable, - kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(alice->IsIceGatheringDone(), kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(bob->IsIceGatheringDone(), kDefaultTimeoutMs); + ASSERT_THAT(WaitUntil([&] { return alice->signaling_state(); }, + Eq(PeerConnectionInterface::kStable)), + IsRtcOk()); + ASSERT_THAT( + WaitUntil([&] { return alice->IsIceGatheringDone(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return bob->IsIceGatheringDone(); }, IsTrue()), + IsRtcOk()); // Connect an ICE candidate pairs. ASSERT_TRUE( @@ -302,8 +296,10 @@ TEST(NetworkEmulationManagerPCTest, RunTURN) { ASSERT_TRUE( AddIceCandidates(alice.get(), bob->observer()->GetAllCandidates())); // This means that ICE and DTLS are connected. - ASSERT_TRUE_WAIT(bob->IsIceConnected(), kDefaultTimeoutMs); - ASSERT_TRUE_WAIT(alice->IsIceConnected(), kDefaultTimeoutMs); + ASSERT_THAT(WaitUntil([&] { return bob->IsIceConnected(); }, IsTrue()), + IsRtcOk()); + ASSERT_THAT(WaitUntil([&] { return alice->IsIceConnected(); }, IsTrue()), + IsRtcOk()); // Close peer connections alice->pc()->Close(); diff --git a/test/network/network_emulation_unittest.cc b/test/network/network_emulation_unittest.cc index 2e67a5a00a..eb158c0753 100644 --- a/test/network/network_emulation_unittest.cc +++ b/test/network/network_emulation_unittest.cc @@ -11,25 +11,52 @@ #include "test/network/network_emulation.h" #include +#include +#include +#include #include +#include #include - +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/task_queue/task_queue_base.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/test/network_emulation_manager.h" +#include "api/test/rtc_error_matchers.h" #include "api/test/simulated_network.h" +#include "api/transport/ecn_marking.h" +#include "api/transport/stun.h" +#include "api/units/data_size.h" #include "api/units/time_delta.h" -#include "call/simulated_network.h" -#include "rtc_base/event.h" -#include "rtc_base/gunit.h" +#include "api/units/timestamp.h" +#include "rtc_base/buffer.h" +#include "rtc_base/byte_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/socket.h" +#include "rtc_base/socket_address.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/network/network_emulation_manager.h" +#include "test/network/simulated_network.h" +#include "test/wait_until.h" namespace webrtc { namespace test { namespace { using ::testing::ElementsAreArray; +using ::testing::Eq; constexpr TimeDelta kNetworkPacketWaitTimeout = TimeDelta::Millis(100); constexpr TimeDelta kStatsWaitTimeout = TimeDelta::Seconds(1); @@ -37,37 +64,40 @@ constexpr int kOverheadIpv4Udp = 20 + 8; class SocketReader : public sigslot::has_slots<> { public: - explicit SocketReader(rtc::Socket* socket, rtc::Thread* network_thread) + explicit SocketReader(Socket* socket, Thread* network_thread) : socket_(socket), network_thread_(network_thread) { socket_->SignalReadEvent.connect(this, &SocketReader::OnReadEvent); - size_ = 128 * 1024; - buf_ = new char[size_]; } - ~SocketReader() override { delete[] buf_; } - void OnReadEvent(rtc::Socket* socket) { + void OnReadEvent(Socket* socket) { RTC_DCHECK(socket_ == socket); RTC_DCHECK(network_thread_->IsCurrent()); - int64_t timestamp; - len_ = socket_->Recv(buf_, size_, ×tamp); + + Socket::ReceiveBuffer receive_buffer(payload_); + socket_->RecvFrom(receive_buffer); + last_ecn_mark_ = receive_buffer.ecn; MutexLock lock(&lock_); received_count_++; } - int ReceivedCount() { + int ReceivedCount() const { MutexLock lock(&lock_); return received_count_; } + webrtc::EcnMarking LastEcnMarking() const { + MutexLock lock(&lock_); + return last_ecn_mark_; + } + private: - rtc::Socket* const socket_; - rtc::Thread* const network_thread_; - char* buf_; - size_t size_; - int len_; + Socket* const socket_; + Thread* const network_thread_; + Buffer payload_; + webrtc::EcnMarking last_ecn_mark_; - Mutex lock_; + mutable Mutex lock_; int received_count_ RTC_GUARDED_BY(lock_) = 0; }; @@ -76,6 +106,23 @@ class MockReceiver : public EmulatedNetworkReceiverInterface { MOCK_METHOD(void, OnPacketReceived, (EmulatedIpPacket packet), (override)); }; +class MockNetworkBehaviourInterface : public NetworkBehaviorInterface { + public: + MOCK_METHOD(bool, EnqueuePacket, (PacketInFlightInfo), (override)); + MOCK_METHOD(std::vector, + DequeueDeliverablePackets, + (int64_t), + (override)); + MOCK_METHOD(std::optional, + NextDeliveryTimeUs, + (), + (const, override)); + MOCK_METHOD(void, + RegisterDeliveryTimeChangedCallback, + (absl::AnyInvocable), + (override)); +}; + class NetworkEmulationManagerThreeNodesRoutingTest : public ::testing::Test { public: NetworkEmulationManagerThreeNodesRoutingTest() { @@ -107,28 +154,24 @@ class NetworkEmulationManagerThreeNodesRoutingTest : public ::testing::Test { // Next code is using API of EmulatedEndpoint, that is visible only for // internals of network emulation layer. Don't use this API in other tests. // Send packet from e1 to e2. - e1_->SendPacket( - rtc::SocketAddress(e1_->GetPeerLocalAddress(), common_send_port), - rtc::SocketAddress(e2_->GetPeerLocalAddress(), r_e1_e2_port), - rtc::CopyOnWriteBuffer(10)); + e1_->SendPacket(SocketAddress(e1_->GetPeerLocalAddress(), common_send_port), + SocketAddress(e2_->GetPeerLocalAddress(), r_e1_e2_port), + CopyOnWriteBuffer(10)); // Send packet from e2 to e1. - e2_->SendPacket( - rtc::SocketAddress(e2_->GetPeerLocalAddress(), common_send_port), - rtc::SocketAddress(e1_->GetPeerLocalAddress(), r_e2_e1_port), - rtc::CopyOnWriteBuffer(10)); + e2_->SendPacket(SocketAddress(e2_->GetPeerLocalAddress(), common_send_port), + SocketAddress(e1_->GetPeerLocalAddress(), r_e2_e1_port), + CopyOnWriteBuffer(10)); // Send packet from e1 to e3. - e1_->SendPacket( - rtc::SocketAddress(e1_->GetPeerLocalAddress(), common_send_port), - rtc::SocketAddress(e3_->GetPeerLocalAddress(), r_e1_e3_port), - rtc::CopyOnWriteBuffer(10)); + e1_->SendPacket(SocketAddress(e1_->GetPeerLocalAddress(), common_send_port), + SocketAddress(e3_->GetPeerLocalAddress(), r_e1_e3_port), + CopyOnWriteBuffer(10)); // Send packet from e3 to e1. - e3_->SendPacket( - rtc::SocketAddress(e3_->GetPeerLocalAddress(), common_send_port), - rtc::SocketAddress(e1_->GetPeerLocalAddress(), r_e3_e1_port), - rtc::CopyOnWriteBuffer(10)); + e3_->SendPacket(SocketAddress(e3_->GetPeerLocalAddress(), common_send_port), + SocketAddress(e1_->GetPeerLocalAddress(), r_e3_e1_port), + CopyOnWriteBuffer(10)); // Sleep at the end to wait for async packets delivery. emulation_.time_controller()->AdvanceTime(kNetworkPacketWaitTimeout); @@ -143,7 +186,7 @@ class NetworkEmulationManagerThreeNodesRoutingTest : public ::testing::Test { MockReceiver r_e3_e1_; NetworkEmulationManagerImpl emulation_{ - TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault}; + NetworkEmulationManagerConfig{.time_mode = TimeMode::kRealTime}}; EmulatedEndpoint* e1_; EmulatedEndpoint* e2_; EmulatedEndpoint* e3_; @@ -157,12 +200,10 @@ EmulatedNetworkNode* CreateEmulatedNodeWithDefaultBuiltInConfig( } // namespace -using ::testing::_; - TEST(NetworkEmulationManagerTest, GeneratedIpv4AddressDoesNotCollide) { NetworkEmulationManagerImpl network_manager( - TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); - std::set ips; + {.time_mode = TimeMode::kRealTime}); + std::set ips; EmulatedEndpointConfig config; config.generated_ip_family = EmulatedEndpointConfig::IpAddressFamily::kIpv4; for (int i = 0; i < 1000; i++) { @@ -175,8 +216,8 @@ TEST(NetworkEmulationManagerTest, GeneratedIpv4AddressDoesNotCollide) { TEST(NetworkEmulationManagerTest, GeneratedIpv6AddressDoesNotCollide) { NetworkEmulationManagerImpl network_manager( - TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); - std::set ips; + {.time_mode = TimeMode::kRealTime}); + std::set ips; EmulatedEndpointConfig config; config.generated_ip_family = EmulatedEndpointConfig::IpAddressFamily::kIpv6; for (int i = 0; i < 1000; i++) { @@ -189,7 +230,7 @@ TEST(NetworkEmulationManagerTest, GeneratedIpv6AddressDoesNotCollide) { TEST(NetworkEmulationManagerTest, Run) { NetworkEmulationManagerImpl network_manager( - TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); + {.time_mode = TimeMode::kRealTime}); EmulatedNetworkNode* alice_node = network_manager.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); @@ -207,13 +248,13 @@ TEST(NetworkEmulationManagerTest, Run) { EmulatedNetworkManagerInterface* nt2 = network_manager.CreateEmulatedNetworkManagerInterface({bob_endpoint}); - rtc::Thread* t1 = nt1->network_thread(); - rtc::Thread* t2 = nt2->network_thread(); + Thread* t1 = nt1->network_thread(); + Thread* t2 = nt2->network_thread(); - rtc::CopyOnWriteBuffer data("Hello"); + CopyOnWriteBuffer data("Hello"); for (uint64_t j = 0; j < 2; j++) { - rtc::Socket* s1 = nullptr; - rtc::Socket* s2 = nullptr; + Socket* s1 = nullptr; + Socket* s2 = nullptr; SendTask(t1, [&] { s1 = t1->socketserver()->CreateSocket(AF_INET, SOCK_DGRAM); }); @@ -224,8 +265,8 @@ TEST(NetworkEmulationManagerTest, Run) { SocketReader r1(s1, t1); SocketReader r2(s2, t2); - rtc::SocketAddress a1(alice_endpoint->GetPeerLocalAddress(), 0); - rtc::SocketAddress a2(bob_endpoint->GetPeerLocalAddress(), 0); + SocketAddress a1(alice_endpoint->GetPeerLocalAddress(), 0); + SocketAddress a2(bob_endpoint->GetPeerLocalAddress(), 0); SendTask(t1, [&] { s1->Bind(a1); @@ -265,8 +306,8 @@ TEST(NetworkEmulationManagerTest, Run) { EXPECT_EQ(st.PacketsDiscardedNoReceiver(), 0l); EXPECT_EQ(st.BytesDiscardedNoReceiver().bytes(), 0l); - rtc::IPAddress bob_ip = bob_endpoint->GetPeerLocalAddress(); - std::map source_st = + IPAddress bob_ip = bob_endpoint->GetPeerLocalAddress(); + std::map source_st = st.incoming_stats_per_source; ASSERT_EQ(source_st.size(), 1lu); EXPECT_EQ(source_st.at(bob_ip).packets_received, 2000l); @@ -275,7 +316,7 @@ TEST(NetworkEmulationManagerTest, Run) { EXPECT_EQ(source_st.at(bob_ip).packets_discarded_no_receiver, 0l); EXPECT_EQ(source_st.at(bob_ip).bytes_discarded_no_receiver.bytes(), 0l); - std::map dest_st = + std::map dest_st = st.outgoing_stats_per_destination; ASSERT_EQ(dest_st.size(), 1lu); EXPECT_EQ(dest_st.at(bob_ip).packets_sent, 2000l); @@ -307,8 +348,8 @@ TEST(NetworkEmulationManagerTest, Run) { EXPECT_TRUE(st.FirstPacketReceivedTime().IsFinite()); EXPECT_TRUE(st.LastPacketReceivedTime().IsFinite()); - rtc::IPAddress alice_ip = alice_endpoint->GetPeerLocalAddress(); - std::map source_st = + IPAddress alice_ip = alice_endpoint->GetPeerLocalAddress(); + std::map source_st = st.incoming_stats_per_source; ASSERT_EQ(source_st.size(), 1lu); EXPECT_EQ(source_st.at(alice_ip).packets_received, 2000l); @@ -317,7 +358,7 @@ TEST(NetworkEmulationManagerTest, Run) { EXPECT_EQ(source_st.at(alice_ip).packets_discarded_no_receiver, 0l); EXPECT_EQ(source_st.at(alice_ip).bytes_discarded_no_receiver.bytes(), 0l); - std::map dest_st = + std::map dest_st = st.outgoing_stats_per_destination; ASSERT_EQ(dest_st.size(), 1lu); EXPECT_EQ(dest_st.at(alice_ip).packets_sent, 2000l); @@ -336,14 +377,101 @@ TEST(NetworkEmulationManagerTest, Run) { received_stats_count++; }); - ASSERT_EQ_SIMULATED_WAIT(received_stats_count.load(), 2, - kStatsWaitTimeout.ms(), - *network_manager.time_controller()); + ASSERT_THAT(WaitUntil([&] { return received_stats_count.load(); }, Eq(2), + {.timeout = kStatsWaitTimeout, + .clock = network_manager.time_controller()}), + IsRtcOk()); +} + +TEST(NetworkEmulationManagerTest, EcnMarkingIsPropagated) { + NetworkEmulationManagerImpl network_manager( + {.time_mode = TimeMode::kRealTime}); + + EmulatedNetworkNode* alice_node = network_manager.CreateEmulatedNode( + std::make_unique(BuiltInNetworkBehaviorConfig())); + EmulatedNetworkNode* bob_node = network_manager.CreateEmulatedNode( + std::make_unique(BuiltInNetworkBehaviorConfig())); + EmulatedEndpoint* alice_endpoint = + network_manager.CreateEndpoint(EmulatedEndpointConfig()); + EmulatedEndpoint* bob_endpoint = + network_manager.CreateEndpoint(EmulatedEndpointConfig()); + network_manager.CreateRoute(alice_endpoint, {alice_node}, bob_endpoint); + network_manager.CreateRoute(bob_endpoint, {bob_node}, alice_endpoint); + + EmulatedNetworkManagerInterface* nt1 = + network_manager.CreateEmulatedNetworkManagerInterface({alice_endpoint}); + EmulatedNetworkManagerInterface* nt2 = + network_manager.CreateEmulatedNetworkManagerInterface({bob_endpoint}); + + Thread* t1 = nt1->network_thread(); + Thread* t2 = nt2->network_thread(); + + Socket* s1 = nullptr; + Socket* s2 = nullptr; + SendTask(t1, + [&] { s1 = t1->socketserver()->CreateSocket(AF_INET, SOCK_DGRAM); }); + SendTask(t2, + [&] { s2 = t2->socketserver()->CreateSocket(AF_INET, SOCK_DGRAM); }); + + SocketReader r1(s1, t1); + SocketReader r2(s2, t2); + + SocketAddress a1(alice_endpoint->GetPeerLocalAddress(), 0); + SocketAddress a2(bob_endpoint->GetPeerLocalAddress(), 0); + + SendTask(t1, [&] { + s1->Bind(a1); + a1 = s1->GetLocalAddress(); + }); + SendTask(t2, [&] { + s2->Bind(a2); + a2 = s2->GetLocalAddress(); + }); + + SendTask(t1, [&] { s1->Connect(a2); }); + SendTask(t2, [&] { s2->Connect(a1); }); + + t1->PostTask([&]() { + s1->SetOption(Socket::Option::OPT_SEND_ECN, 1); + CopyOnWriteBuffer data("Hello"); + s1->Send(data.data(), data.size()); + }); + + network_manager.time_controller()->AdvanceTime(TimeDelta::Seconds(1)); + + EXPECT_EQ(r2.ReceivedCount(), 1); + EXPECT_EQ(r2.LastEcnMarking(), webrtc::EcnMarking::kEct1); + + std::atomic received_stats_count{0}; + nt1->GetStats([&](EmulatedNetworkStats st) { + EXPECT_EQ(st.overall_incoming_stats.packets_received, 0); + EXPECT_EQ(st.overall_outgoing_stats.packets_sent, 1); + EXPECT_EQ(st.overall_outgoing_stats.ecn_count.ect_1(), 1); + EXPECT_EQ(st.overall_outgoing_stats.ecn_count.ce(), 0); + EXPECT_EQ(st.overall_outgoing_stats.ecn_count.not_ect(), 0); + ++received_stats_count; + }); + nt2->GetStats([&](EmulatedNetworkStats st) { + EXPECT_EQ(st.overall_incoming_stats.packets_received, 1); + EXPECT_EQ(st.overall_outgoing_stats.packets_sent, 0); + EXPECT_EQ(st.overall_incoming_stats.ecn_count.ect_1(), 1); + EXPECT_EQ(st.overall_incoming_stats.ecn_count.ce(), 0); + EXPECT_EQ(st.overall_incoming_stats.ecn_count.not_ect(), 0); + ++received_stats_count; + }); + ASSERT_THAT(WaitUntil([&] { return received_stats_count.load(); }, Eq(2), + {.timeout = kStatsWaitTimeout, + .clock = network_manager.time_controller()}), + IsRtcOk()); + + SendTask(t1, [&] { delete s1; }); + SendTask(t2, [&] { delete s2; }); } TEST(NetworkEmulationManagerTest, DebugStatsCollectedInDebugMode) { NetworkEmulationManagerImpl network_manager( - TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDebug); + {.time_mode = TimeMode::kSimulated, + .stats_gathering_mode = EmulatedNetworkStatsGatheringMode::kDebug}); EmulatedNetworkNode* alice_node = network_manager.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); @@ -361,13 +489,13 @@ TEST(NetworkEmulationManagerTest, DebugStatsCollectedInDebugMode) { EmulatedNetworkManagerInterface* nt2 = network_manager.CreateEmulatedNetworkManagerInterface({bob_endpoint}); - rtc::Thread* t1 = nt1->network_thread(); - rtc::Thread* t2 = nt2->network_thread(); + Thread* t1 = nt1->network_thread(); + Thread* t2 = nt2->network_thread(); - rtc::CopyOnWriteBuffer data("Hello"); + CopyOnWriteBuffer data("Hello"); for (uint64_t j = 0; j < 2; j++) { - rtc::Socket* s1 = nullptr; - rtc::Socket* s2 = nullptr; + Socket* s1 = nullptr; + Socket* s2 = nullptr; SendTask(t1, [&] { s1 = t1->socketserver()->CreateSocket(AF_INET, SOCK_DGRAM); }); @@ -378,8 +506,8 @@ TEST(NetworkEmulationManagerTest, DebugStatsCollectedInDebugMode) { SocketReader r1(s1, t1); SocketReader r2(s2, t2); - rtc::SocketAddress a1(alice_endpoint->GetPeerLocalAddress(), 0); - rtc::SocketAddress a2(bob_endpoint->GetPeerLocalAddress(), 0); + SocketAddress a1(alice_endpoint->GetPeerLocalAddress(), 0); + SocketAddress a2(bob_endpoint->GetPeerLocalAddress(), 0); SendTask(t1, [&] { s1->Bind(a1); @@ -410,12 +538,12 @@ TEST(NetworkEmulationManagerTest, DebugStatsCollectedInDebugMode) { const int64_t single_packet_size = data.size() + kOverheadIpv4Udp; std::atomic received_stats_count{0}; nt1->GetStats([&](EmulatedNetworkStats st) { - rtc::IPAddress bob_ip = bob_endpoint->GetPeerLocalAddress(); - std::map source_st = + IPAddress bob_ip = bob_endpoint->GetPeerLocalAddress(); + std::map source_st = st.incoming_stats_per_source; ASSERT_EQ(source_st.size(), 1lu); - std::map dest_st = + std::map dest_st = st.outgoing_stats_per_destination; ASSERT_EQ(dest_st.size(), 1lu); @@ -436,14 +564,15 @@ TEST(NetworkEmulationManagerTest, DebugStatsCollectedInDebugMode) { received_stats_count++; }); - ASSERT_EQ_SIMULATED_WAIT(received_stats_count.load(), 1, - kStatsWaitTimeout.ms(), - *network_manager.time_controller()); + ASSERT_THAT(WaitUntil([&] { return received_stats_count.load(); }, Eq(1), + {.timeout = kStatsWaitTimeout, + .clock = network_manager.time_controller()}), + IsRtcOk()); } TEST(NetworkEmulationManagerTest, ThroughputStats) { NetworkEmulationManagerImpl network_manager( - TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); + {.time_mode = TimeMode::kRealTime}); EmulatedNetworkNode* alice_node = network_manager.CreateEmulatedNode( std::make_unique(BuiltInNetworkBehaviorConfig())); @@ -461,15 +590,15 @@ TEST(NetworkEmulationManagerTest, ThroughputStats) { EmulatedNetworkManagerInterface* nt2 = network_manager.CreateEmulatedNetworkManagerInterface({bob_endpoint}); - rtc::Thread* t1 = nt1->network_thread(); - rtc::Thread* t2 = nt2->network_thread(); + Thread* t1 = nt1->network_thread(); + Thread* t2 = nt2->network_thread(); constexpr int64_t kUdpPayloadSize = 100; constexpr int64_t kSinglePacketSize = kUdpPayloadSize + kOverheadIpv4Udp; - rtc::CopyOnWriteBuffer data(kUdpPayloadSize); + CopyOnWriteBuffer data(kUdpPayloadSize); - rtc::Socket* s1 = nullptr; - rtc::Socket* s2 = nullptr; + Socket* s1 = nullptr; + Socket* s2 = nullptr; SendTask(t1, [&] { s1 = t1->socketserver()->CreateSocket(AF_INET, SOCK_DGRAM); }); SendTask(t2, @@ -478,8 +607,8 @@ TEST(NetworkEmulationManagerTest, ThroughputStats) { SocketReader r1(s1, t1); SocketReader r2(s2, t2); - rtc::SocketAddress a1(alice_endpoint->GetPeerLocalAddress(), 0); - rtc::SocketAddress a2(bob_endpoint->GetPeerLocalAddress(), 0); + SocketAddress a1(alice_endpoint->GetPeerLocalAddress(), 0); + SocketAddress a2(bob_endpoint->GetPeerLocalAddress(), 0); SendTask(t1, [&] { s1->Bind(a1); @@ -514,9 +643,10 @@ TEST(NetworkEmulationManagerTest, ThroughputStats) { received_stats_count++; }); - ASSERT_EQ_SIMULATED_WAIT(received_stats_count.load(), 1, - kStatsWaitTimeout.ms(), - *network_manager.time_controller()); + ASSERT_THAT(WaitUntil([&] { return received_stats_count.load(); }, Eq(1), + {.timeout = kStatsWaitTimeout, + .clock = network_manager.time_controller()}), + IsRtcOk()); EXPECT_EQ(r1.ReceivedCount(), 11); EXPECT_EQ(r2.ReceivedCount(), 11); @@ -574,15 +704,15 @@ TEST_F(NetworkEmulationManagerThreeNodesRoutingTest, TEST(NetworkEmulationManagerTest, EndpointLoopback) { NetworkEmulationManagerImpl network_manager( - TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault); + {.time_mode = TimeMode::kSimulated}); auto endpoint = network_manager.CreateEndpoint(EmulatedEndpointConfig()); MockReceiver receiver; EXPECT_CALL(receiver, OnPacketReceived(::testing::_)).Times(1); ASSERT_EQ(endpoint->BindReceiver(80, &receiver), 80); - endpoint->SendPacket(rtc::SocketAddress(endpoint->GetPeerLocalAddress(), 80), - rtc::SocketAddress(endpoint->GetPeerLocalAddress(), 80), + endpoint->SendPacket(SocketAddress(endpoint->GetPeerLocalAddress(), 80), + SocketAddress(endpoint->GetPeerLocalAddress(), 80), "Hello"); network_manager.time_controller()->AdvanceTime(TimeDelta::Seconds(1)); } @@ -591,9 +721,9 @@ TEST(NetworkEmulationManagerTest, EndpointCanSendWithDifferentSourceIp) { constexpr uint32_t kEndpointIp = 0xC0A80011; // 192.168.0.17 constexpr uint32_t kSourceIp = 0xC0A80012; // 192.168.0.18 NetworkEmulationManagerImpl network_manager( - TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault); + {.time_mode = TimeMode::kSimulated}); EmulatedEndpointConfig endpoint_config; - endpoint_config.ip = rtc::IPAddress(kEndpointIp); + endpoint_config.ip = IPAddress(kEndpointIp); endpoint_config.allow_send_packet_with_different_source_ip = true; auto endpoint = network_manager.CreateEndpoint(endpoint_config); @@ -601,8 +731,8 @@ TEST(NetworkEmulationManagerTest, EndpointCanSendWithDifferentSourceIp) { EXPECT_CALL(receiver, OnPacketReceived(::testing::_)).Times(1); ASSERT_EQ(endpoint->BindReceiver(80, &receiver), 80); - endpoint->SendPacket(rtc::SocketAddress(kSourceIp, 80), - rtc::SocketAddress(endpoint->GetPeerLocalAddress(), 80), + endpoint->SendPacket(SocketAddress(kSourceIp, 80), + SocketAddress(endpoint->GetPeerLocalAddress(), 80), "Hello"); network_manager.time_controller()->AdvanceTime(TimeDelta::Seconds(1)); } @@ -612,11 +742,11 @@ TEST(NetworkEmulationManagerTest, constexpr uint32_t kDestEndpointIp = 0xC0A80011; // 192.168.0.17 constexpr uint32_t kDestIp = 0xC0A80012; // 192.168.0.18 NetworkEmulationManagerImpl network_manager( - TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault); + {.time_mode = TimeMode::kSimulated}); auto sender_endpoint = network_manager.CreateEndpoint(EmulatedEndpointConfig()); EmulatedEndpointConfig endpoint_config; - endpoint_config.ip = rtc::IPAddress(kDestEndpointIp); + endpoint_config.ip = IPAddress(kDestEndpointIp); endpoint_config.allow_receive_packets_with_different_dest_ip = true; auto receiver_endpoint = network_manager.CreateEndpoint(endpoint_config); @@ -629,14 +759,14 @@ TEST(NetworkEmulationManagerTest, receiver_endpoint); sender_endpoint->SendPacket( - rtc::SocketAddress(sender_endpoint->GetPeerLocalAddress(), 80), - rtc::SocketAddress(kDestIp, 80), "Hello"); + SocketAddress(sender_endpoint->GetPeerLocalAddress(), 80), + SocketAddress(kDestIp, 80), "Hello"); network_manager.time_controller()->AdvanceTime(TimeDelta::Seconds(1)); } TEST(NetworkEmulationManagerTURNTest, GetIceServerConfig) { NetworkEmulationManagerImpl network_manager( - TimeMode::kRealTime, EmulatedNetworkStatsGatheringMode::kDefault); + {.time_mode = TimeMode::kRealTime}); auto turn = network_manager.CreateTURNServer(EmulatedTURNServerConfig()); EXPECT_GT(turn->GetIceServerConfig().username.size(), 0u); @@ -647,8 +777,7 @@ TEST(NetworkEmulationManagerTURNTest, GetIceServerConfig) { } TEST(NetworkEmulationManagerTURNTest, ClientTraffic) { - NetworkEmulationManagerImpl emulation( - TimeMode::kSimulated, EmulatedNetworkStatsGatheringMode::kDefault); + NetworkEmulationManagerImpl emulation({.time_mode = TimeMode::kSimulated}); auto* ep = emulation.CreateEndpoint(EmulatedEndpointConfig()); auto* turn = emulation.CreateTURNServer(EmulatedTURNServerConfig()); auto* node = CreateEmulatedNodeWithDefaultBuiltInConfig(&emulation); @@ -659,18 +788,58 @@ TEST(NetworkEmulationManagerTURNTest, ClientTraffic) { int port = ep->BindReceiver(0, &recv).value(); // Construct a STUN BINDING. - cricket::StunMessage ping(cricket::STUN_BINDING_REQUEST); - rtc::ByteBufferWriter buf; + StunMessage ping(STUN_BINDING_REQUEST); + ByteBufferWriter buf; ping.Write(&buf); - rtc::CopyOnWriteBuffer packet(buf.Data(), buf.Length()); + CopyOnWriteBuffer packet(buf.Data(), buf.Length()); // We expect to get a ping reply. EXPECT_CALL(recv, OnPacketReceived(::testing::_)).Times(1); - ep->SendPacket(rtc::SocketAddress(ep->GetPeerLocalAddress(), port), + ep->SendPacket(SocketAddress(ep->GetPeerLocalAddress(), port), turn->GetClientEndpointAddress(), packet); emulation.time_controller()->AdvanceTime(TimeDelta::Seconds(1)); } +TEST(LinkEmulationTest, HandlesDeliveryTimeChangedCallback) { + constexpr uint32_t kEndpointIp = 0xC0A80011; // 192.168.0.17 + NetworkEmulationManagerImpl network_manager( + {.time_mode = TimeMode::kSimulated}); + auto mock_behaviour = + std::make_unique<::testing::NiceMock>(); + MockNetworkBehaviourInterface* mock_behaviour_ptr = mock_behaviour.get(); + absl::AnyInvocable delivery_time_changed_callback = nullptr; + TaskQueueBase* emulation_task_queue = nullptr; + EXPECT_CALL(*mock_behaviour_ptr, RegisterDeliveryTimeChangedCallback) + .WillOnce([&](absl::AnyInvocable callback) { + delivery_time_changed_callback = std::move(callback); + emulation_task_queue = TaskQueueBase::Current(); + }); + LinkEmulation* link = + network_manager.CreateEmulatedNode(std::move(mock_behaviour))->link(); + network_manager.time_controller()->AdvanceTime(TimeDelta::Zero()); + ASSERT_TRUE(delivery_time_changed_callback); + + EXPECT_CALL(*mock_behaviour_ptr, EnqueuePacket); + EXPECT_CALL(*mock_behaviour_ptr, NextDeliveryTimeUs) + .WillOnce(::testing::Return( + network_manager.time_controller()->GetClock()->TimeInMicroseconds() + + 10)); + link->OnPacketReceived(EmulatedIpPacket( + SocketAddress(kEndpointIp, 50), SocketAddress(kEndpointIp, 79), + CopyOnWriteBuffer(10), Timestamp::Millis(1))); + network_manager.time_controller()->AdvanceTime(TimeDelta::Zero()); + + // Test that NetworkBehaviour can reschedule time for delivery. When + // delivery_time_changed_callback is triggered, LinkEmulation re-query the + // next delivery time. + EXPECT_CALL(*mock_behaviour_ptr, NextDeliveryTimeUs) + .WillOnce(::testing::Return( + network_manager.time_controller()->GetClock()->TimeInMicroseconds() + + 20)); + emulation_task_queue->PostTask([&]() { delivery_time_changed_callback(); }); + network_manager.time_controller()->AdvanceTime(TimeDelta::Zero()); +} + } // namespace test } // namespace webrtc diff --git a/test/network/schedulable_network_behavior.cc b/test/network/schedulable_network_behavior.cc new file mode 100644 index 0000000000..114754dc64 --- /dev/null +++ b/test/network/schedulable_network_behavior.cc @@ -0,0 +1,147 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "test/network/schedulable_network_behavior.h" + +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/test/network_emulation/network_config_schedule.pb.h" +#include "api/test/simulated_network.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/checks.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "system_wrappers/include/clock.h" +#include "test/network/simulated_network.h" + +namespace webrtc { + +namespace { + +using ::webrtc::BuiltInNetworkBehaviorConfig; + +void UpdateConfigFromSchedule( + const network_behaviour::NetworkConfigScheduleItem& schedule_item, + BuiltInNetworkBehaviorConfig& config) { + if (schedule_item.has_queue_length_packets()) { + config.queue_length_packets = schedule_item.queue_length_packets(); + } + if (schedule_item.has_queue_delay_ms()) { + config.queue_delay_ms = schedule_item.queue_delay_ms(); + } + if (schedule_item.has_link_capacity_kbps()) { + config.link_capacity = + DataRate::KilobitsPerSec(schedule_item.link_capacity_kbps()); + } + if (schedule_item.has_loss_percent()) { + config.loss_percent = schedule_item.loss_percent(); + } + if (schedule_item.has_delay_standard_deviation_ms()) { + config.delay_standard_deviation_ms = + schedule_item.delay_standard_deviation_ms(); + } + if (schedule_item.has_allow_reordering()) { + config.allow_reordering = schedule_item.allow_reordering(); + } + if (schedule_item.has_avg_burst_loss_length()) { + config.avg_burst_loss_length = schedule_item.avg_burst_loss_length(); + } + if (schedule_item.has_packet_overhead()) { + config.packet_overhead = schedule_item.packet_overhead(); + } +} + +BuiltInNetworkBehaviorConfig GetInitialConfig( + const network_behaviour::NetworkConfigSchedule& schedule) { + BuiltInNetworkBehaviorConfig config; + if (!schedule.item().empty()) { + UpdateConfigFromSchedule(schedule.item(0), config); + } + return config; +} + +} // namespace + +SchedulableNetworkBehavior::SchedulableNetworkBehavior( + network_behaviour::NetworkConfigSchedule schedule, + uint64_t random_seed, + webrtc::Clock& clock, + absl::AnyInvocable start_callback) + : SimulatedNetwork(GetInitialConfig(schedule), random_seed), + schedule_(std::move(schedule)), + start_condition_(std::move(start_callback)), + clock_(clock), + config_(GetInitialConfig(schedule_)) { + if (schedule_.item().size() > 1) { + next_schedule_index_ = 1; + } + sequence_checker_.Detach(); +} + +bool SchedulableNetworkBehavior::EnqueuePacket( + webrtc::PacketInFlightInfo packet_info) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (first_send_time_.IsInfinite() && + start_condition_(webrtc::Timestamp::Micros(packet_info.send_time_us))) { + first_send_time_ = webrtc::Timestamp::Micros(packet_info.send_time_us); + if (schedule_.item().size() > 1) { + RTC_CHECK_LT(next_schedule_index_, schedule_.item().size()); + webrtc::TimeDelta delay = + webrtc::TimeDelta::Millis(schedule_.item()[next_schedule_index_] + .time_since_first_sent_packet_ms()); + schedule_task_ = RepeatingTaskHandle::DelayedStart( + webrtc::TaskQueueBase::Current(), delay, + [this] { return UpdateConfigAndReschedule(); }); + } + } + return SimulatedNetwork::EnqueuePacket(packet_info); +} + +TimeDelta SchedulableNetworkBehavior::UpdateConfigAndReschedule() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + Timestamp reschedule_time = clock_.CurrentTime(); + RTC_CHECK_LT(next_schedule_index_, schedule_.item().size()); + + auto next_config = schedule_.item()[next_schedule_index_]; + UpdateConfigFromSchedule(next_config, config_); + SimulatedNetwork::SetConfig(config_, reschedule_time); + next_schedule_index_ = ++next_schedule_index_ % schedule_.item().size(); + webrtc::TimeDelta delay = webrtc::TimeDelta::Zero(); + webrtc::TimeDelta time_since_first_sent_packet = + reschedule_time - first_send_time_; + if (next_schedule_index_ != 0) { + delay = std::max(TimeDelta::Millis(schedule_.item()[next_schedule_index_] + .time_since_first_sent_packet_ms()) - + (time_since_first_sent_packet - wrap_time_delta_), + TimeDelta::Zero()); + } else if (!schedule_.has_repeat_schedule_after_last_ms()) { + // No more schedule items. + schedule_task_.Stop(); + return TimeDelta::Zero(); // This is ignored. + } else { + // Wrap around to the first schedule item. + wrap_time_delta_ += + TimeDelta::Millis(schedule_.repeat_schedule_after_last_ms()) + + TimeDelta::Millis(schedule_.item()[schedule_.item().size() - 1] + .time_since_first_sent_packet_ms()); + delay = + webrtc::TimeDelta::Millis(schedule_.repeat_schedule_after_last_ms()); + RTC_DCHECK_GE(delay, TimeDelta::Zero()); + } + + return delay; +} + +} // namespace webrtc diff --git a/test/network/schedulable_network_behavior.h b/test/network/schedulable_network_behavior.h new file mode 100644 index 0000000000..53cf05de94 --- /dev/null +++ b/test/network/schedulable_network_behavior.h @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_NETWORK_SCHEDULABLE_NETWORK_BEHAVIOR_H_ +#define TEST_NETWORK_SCHEDULABLE_NETWORK_BEHAVIOR_H_ + +#include + +#include "absl/functional/any_invocable.h" +#include "api/sequence_checker.h" +#include "api/test/network_emulation/network_config_schedule.pb.h" +#include "api/test/simulated_network.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "rtc_base/thread_annotations.h" +#include "system_wrappers/include/clock.h" +#include "test/network/simulated_network.h" + +namespace webrtc { + +// Network behaviour implementation where parameters change over time as +// specified with a schedule proto. +class SchedulableNetworkBehavior : public SimulatedNetwork { + public: + SchedulableNetworkBehavior( + network_behaviour::NetworkConfigSchedule schedule, + uint64_t random_seed, + Clock& clock, + absl::AnyInvocable start_condition = + [](webrtc::Timestamp) { return true; }); + + bool EnqueuePacket(PacketInFlightInfo packet_info) override; + + private: + TimeDelta UpdateConfigAndReschedule(); + + SequenceChecker sequence_checker_; + const network_behaviour::NetworkConfigSchedule schedule_; + absl::AnyInvocable start_condition_ + RTC_GUARDED_BY(&sequence_checker_); + // Send time of the first packet enqueued after `start_condition_` return + // true. + Timestamp first_send_time_ RTC_GUARDED_BY(&sequence_checker_) = + Timestamp::MinusInfinity(); + + Clock& clock_ RTC_GUARDED_BY(&sequence_checker_); + BuiltInNetworkBehaviorConfig config_ RTC_GUARDED_BY(&sequence_checker_); + // Index of the next schedule item to apply. + int next_schedule_index_ RTC_GUARDED_BY(&sequence_checker_) = 0; + // Total time from the first sent packet, until the last time the schedule + // repeat. + TimeDelta wrap_time_delta_ RTC_GUARDED_BY(&sequence_checker_) = + TimeDelta::Zero(); + RepeatingTaskHandle schedule_task_ RTC_GUARDED_BY(&sequence_checker_); +}; + +} // namespace webrtc + +#endif // TEST_NETWORK_SCHEDULABLE_NETWORK_BEHAVIOR_H_ diff --git a/test/network/schedulable_network_behavior_test.cc b/test/network/schedulable_network_behavior_test.cc new file mode 100644 index 0000000000..2108e0caa9 --- /dev/null +++ b/test/network/schedulable_network_behavior_test.cc @@ -0,0 +1,295 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "test/network/schedulable_network_behavior.h" + +#include +#include +#include + +#include "api/test/create_network_emulation_manager.h" +#include "api/test/network_emulation/network_config_schedule.pb.h" +#include "api/test/network_emulation_manager.h" +#include "api/test/simulated_network.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "system_wrappers/include/clock.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::Mock; +using ::testing::MockFunction; +using ::testing::Return; +using ::testing::Sequence; +using ::testing::SizeIs; + +constexpr uint64_t kRandomSeed = 1; + +class SchedulableNetworkBehaviorTestFixture { + public: + SchedulableNetworkBehaviorTestFixture() + : manager_(webrtc::CreateNetworkEmulationManager( + {.time_mode = TimeMode::kSimulated})) {} + + webrtc::Clock& clock() const { + return *manager_->time_controller()->GetClock(); + } + void AdvanceTime(webrtc::TimeDelta delta) { + manager_->time_controller()->AdvanceTime(delta); + } + void AdvanceTimeTo(int64_t timestamp_us) { + TimeDelta delta = Timestamp::Micros(timestamp_us) - TimeNow(); + ASSERT_GE(delta, TimeDelta::Zero()); + manager_->time_controller()->AdvanceTime(delta); + } + + webrtc::Timestamp TimeNow() const { + return manager_->time_controller()->GetClock()->CurrentTime(); + } + + private: + const std::unique_ptr manager_; +}; + +TEST(SchedulableNetworkBehaviorTest, NoSchedule) { + SchedulableNetworkBehaviorTestFixture fixture; + + network_behaviour::NetworkConfigSchedule schedule; + SchedulableNetworkBehavior network_behaviour(schedule, kRandomSeed, + fixture.clock()); + webrtc::Timestamp send_time = fixture.TimeNow(); + EXPECT_TRUE(network_behaviour.EnqueuePacket({/*size=*/1000 / 8, + /*send_time_us=*/send_time.us(), + /*packet_id=*/1})); + ASSERT_TRUE(network_behaviour.NextDeliveryTimeUs().has_value()); + fixture.AdvanceTimeTo(*network_behaviour.NextDeliveryTimeUs()); + EXPECT_THAT( + network_behaviour.DequeueDeliverablePackets(fixture.TimeNow().us()), + SizeIs(1)); +} + +TEST(SchedulableNetworkBehaviorTest, ScheduleWithoutUpdates) { + SchedulableNetworkBehaviorTestFixture fixture; + + network_behaviour::NetworkConfigSchedule schedule; + auto initial_config = schedule.add_item(); + initial_config->set_link_capacity_kbps(10); + initial_config->set_queue_delay_ms(70); + + SchedulableNetworkBehavior network_behaviour(schedule, kRandomSeed, + fixture.clock()); + webrtc::Timestamp send_time = fixture.TimeNow(); + EXPECT_TRUE(network_behaviour.EnqueuePacket({/*size=*/1000 / 8, + /*send_time_us=*/send_time.us(), + /*packet_id=*/1})); + + // 1000 bits, on a 10kbps link should take 100ms + 70 extra. + // The network_behaviour at the time of writing this test needs two calls + // to NextDeliveryTimeUs to before the packet is delivered (one for the link + // capacity queue and one for the queue delay). + std::vector packet_delivery_infos; + while (packet_delivery_infos.empty()) { + ASSERT_TRUE(network_behaviour.NextDeliveryTimeUs().has_value()); + fixture.AdvanceTimeTo(*network_behaviour.NextDeliveryTimeUs()); + packet_delivery_infos = + network_behaviour.DequeueDeliverablePackets(fixture.TimeNow().us()); + } + EXPECT_EQ(fixture.TimeNow(), send_time + TimeDelta::Millis(170)); + ASSERT_THAT(packet_delivery_infos, SizeIs(1)); + EXPECT_EQ(packet_delivery_infos[0].packet_id, 1u); + EXPECT_EQ(packet_delivery_infos[0].receive_time_us, send_time.us() + 170'000); +} + +TEST(SchedulableNetworkBehaviorTest, + TriggersDeliveryTimeChangedCallbackOnScheduleIfPacketInLinkCapacityQueue) { + SchedulableNetworkBehaviorTestFixture fixture; + network_behaviour::NetworkConfigSchedule schedule; + auto initial_config = schedule.add_item(); + // A packet of size 1000 bits should take 100ms to send. + initial_config->set_link_capacity_kbps(10); + initial_config->set_queue_delay_ms(10); + auto updated_capacity = schedule.add_item(); + updated_capacity->set_time_since_first_sent_packet_ms(50); + // A packet of size 1000 bits should take 10ms to send. But since "half" the + // first packet has passed the narrow section, it should take 50ms + 500/100 = + // 55ms. + updated_capacity->set_link_capacity_kbps(100); + + SchedulableNetworkBehavior network_behaviour(schedule, kRandomSeed, + fixture.clock()); + MockFunction delivery_time_changed_callback; + network_behaviour.RegisterDeliveryTimeChangedCallback( + delivery_time_changed_callback.AsStdFunction()); + + webrtc::Timestamp first_packet_send_time = fixture.TimeNow(); + EXPECT_CALL(delivery_time_changed_callback, Call).WillOnce([&]() { + EXPECT_EQ(fixture.TimeNow(), + first_packet_send_time + TimeDelta::Millis(50)); + ASSERT_TRUE(network_behaviour.NextDeliveryTimeUs().has_value()); + }); + EXPECT_TRUE(network_behaviour.EnqueuePacket( + {/*size=*/1000 / 8, + /*send_time_us=*/first_packet_send_time.us(), + /*packet_id=*/1})); + fixture.AdvanceTime( + TimeDelta::Millis(updated_capacity->time_since_first_sent_packet_ms())); + Mock::VerifyAndClearExpectations(&delivery_time_changed_callback); + ASSERT_TRUE(network_behaviour.NextDeliveryTimeUs().has_value()); + fixture.AdvanceTime( + TimeDelta::Micros(*network_behaviour.NextDeliveryTimeUs())); + std::vector dequeued_packets = + network_behaviour.DequeueDeliverablePackets(fixture.TimeNow().us()); + ASSERT_FALSE(dequeued_packets.empty()); + EXPECT_EQ(dequeued_packets[0].receive_time_us, + (first_packet_send_time + TimeDelta::Millis(55) + + /*queue_delay=*/TimeDelta::Millis(10)) + .us()); +} + +TEST(SchedulableNetworkBehaviorTest, ScheduleStartedWhenStartConditionTrue) { + SchedulableNetworkBehaviorTestFixture fixture; + network_behaviour::NetworkConfigSchedule schedule; + auto initial_config = schedule.add_item(); + initial_config->set_link_capacity_kbps(0); + auto item = schedule.add_item(); + item->set_time_since_first_sent_packet_ms(1); + item->set_link_capacity_kbps(1000000); + + MockFunction start_condition; + webrtc::Timestamp first_packet_send_time = fixture.TimeNow(); + webrtc::Timestamp second_packet_send_time = + fixture.TimeNow() + TimeDelta::Millis(100); + Sequence s; + EXPECT_CALL(start_condition, Call(first_packet_send_time)) + .InSequence(s) + .WillOnce(Return(false)); + // Expect schedule to start when the second packet is sent. + EXPECT_CALL(start_condition, Call(second_packet_send_time)) + .InSequence(s) + .WillOnce(Return(true)); + SchedulableNetworkBehavior network_behaviour( + schedule, kRandomSeed, fixture.clock(), start_condition.AsStdFunction()); + + EXPECT_TRUE(network_behaviour.EnqueuePacket( + {/*size=*/1000 / 8, + /*send_time_us=*/first_packet_send_time.us(), + /*packet_id=*/1})); + EXPECT_FALSE(network_behaviour.NextDeliveryTimeUs().has_value()); + // Move passed the normal schedule change time. Still dont expect a delivery + // time. + fixture.AdvanceTime(TimeDelta::Millis(100)); + EXPECT_FALSE(network_behaviour.NextDeliveryTimeUs().has_value()); + + EXPECT_TRUE(network_behaviour.EnqueuePacket( + {/*size=*/1000 / 8, + /*send_time_us=*/second_packet_send_time.us(), + /*packet_id=*/2})); + + EXPECT_FALSE(network_behaviour.NextDeliveryTimeUs().has_value()); + fixture.AdvanceTime(TimeDelta::Millis(1)); + EXPECT_TRUE(network_behaviour.NextDeliveryTimeUs().has_value()); +} + +TEST(SchedulableNetworkBehaviorTest, ScheduleWithRepeat) { + SchedulableNetworkBehaviorTestFixture fixture; + network_behaviour::NetworkConfigSchedule schedule; + auto initial_config = schedule.add_item(); + // A packet of size 1000 bits should take 100ms to send. + initial_config->set_link_capacity_kbps(10); + auto updated_capacity = schedule.add_item(); + updated_capacity->set_time_since_first_sent_packet_ms(150); + // A packet of size 1000 bits should take 10ms to send. + updated_capacity->set_link_capacity_kbps(100); + // A packet of size 1000 bits, scheduled 200ms after the last update to the + // config should again take 100ms to send. + schedule.set_repeat_schedule_after_last_ms(200); + + SchedulableNetworkBehavior network_behaviour(schedule, kRandomSeed, + fixture.clock()); + + webrtc::Timestamp first_packet_send_time = fixture.TimeNow(); + EXPECT_TRUE(network_behaviour.EnqueuePacket( + {/*size=*/1000 / 8, + /*send_time_us=*/first_packet_send_time.us(), + /*packet_id=*/1})); + ASSERT_TRUE(network_behaviour.NextDeliveryTimeUs().has_value()); + EXPECT_EQ(*network_behaviour.NextDeliveryTimeUs(), + fixture.TimeNow().us() + TimeDelta::Millis(100).us()); + fixture.AdvanceTimeTo(*network_behaviour.NextDeliveryTimeUs()); + EXPECT_THAT( + network_behaviour.DequeueDeliverablePackets(fixture.TimeNow().us()), + SizeIs(1)); + fixture.AdvanceTime( + TimeDelta::Millis(updated_capacity->time_since_first_sent_packet_ms() + + schedule.repeat_schedule_after_last_ms() - + /*time already advanced*/ 100)); + // Schedule should be repeated. + // A packet of size 1000 bits should take 100ms to send. + EXPECT_TRUE( + network_behaviour.EnqueuePacket({/*size=*/1000 / 8, + /*send_time_us=*/fixture.TimeNow().us(), + /*packet_id=*/2})); + ASSERT_TRUE(network_behaviour.NextDeliveryTimeUs().has_value()); + EXPECT_EQ(*network_behaviour.NextDeliveryTimeUs(), + fixture.TimeNow().us() + TimeDelta::Millis(100).us()); +} + +TEST(SchedulableNetworkBehaviorTest, ScheduleWithoutRepeat) { + SchedulableNetworkBehaviorTestFixture fixture; + network_behaviour::NetworkConfigSchedule schedule; + auto initial_config = schedule.add_item(); + // A packet of size 1000 bits should take 100ms to send. + initial_config->set_link_capacity_kbps(10); + auto updated_capacity = schedule.add_item(); + updated_capacity->set_time_since_first_sent_packet_ms(150); + // A packet of size 1000 bits should take 10ms to send. + updated_capacity->set_link_capacity_kbps(100); + + SchedulableNetworkBehavior network_behaviour(schedule, kRandomSeed, + fixture.clock()); + + webrtc::Timestamp first_packet_send_time = fixture.TimeNow(); + EXPECT_TRUE(network_behaviour.EnqueuePacket( + {/*size=*/1000 / 8, + /*send_time_us=*/first_packet_send_time.us(), + /*packet_id=*/1})); + ASSERT_TRUE(network_behaviour.NextDeliveryTimeUs().has_value()); + EXPECT_EQ(*network_behaviour.NextDeliveryTimeUs(), + fixture.TimeNow().us() + TimeDelta::Millis(100).us()); + fixture.AdvanceTimeTo(*network_behaviour.NextDeliveryTimeUs()); + EXPECT_THAT( + network_behaviour.DequeueDeliverablePackets(fixture.TimeNow().us()), + SizeIs(1)); + // Advance time to when the updated capacity should be in effect and add one + // minute. The updated capacity should still be in affect. + fixture.AdvanceTime( + TimeDelta::Millis(updated_capacity->time_since_first_sent_packet_ms() - + /*time already advanced*/ 100) + + TimeDelta::Minutes(1)); + + // Schedule should not be repeated. + // A packet of size 1000 bits should take 10ms to send. + EXPECT_TRUE( + network_behaviour.EnqueuePacket({/*size=*/1000 / 8, + /*send_time_us=*/fixture.TimeNow().us(), + /*packet_id=*/2})); + ASSERT_TRUE(network_behaviour.NextDeliveryTimeUs().has_value()); + EXPECT_EQ(*network_behaviour.NextDeliveryTimeUs(), + fixture.TimeNow().us() + TimeDelta::Millis(10).us()); + fixture.AdvanceTimeTo(*network_behaviour.NextDeliveryTimeUs()); + EXPECT_THAT( + network_behaviour.DequeueDeliverablePackets(fixture.TimeNow().us()), + SizeIs(1)); +} + +} // namespace +} // namespace webrtc diff --git a/call/simulated_network.cc b/test/network/simulated_network.cc similarity index 50% rename from call/simulated_network.cc rename to test/network/simulated_network.cc index 8f9d76dfe3..da0751f048 100644 --- a/call/simulated_network.cc +++ b/test/network/simulated_network.cc @@ -8,47 +8,56 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "call/simulated_network.h" +#include "test/network/simulated_network.h" #include #include #include +#include +#include #include +#include +#include "absl/functional/any_invocable.h" +#include "api/test/simulated_network.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "rtc_base/checks.h" +#include "rtc_base/race_checker.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { namespace { -// Calculate the time (in microseconds) that takes to send N `bits` on a +// Calculate the time that it takes to send N `bits` on a // network with link capacity equal to `capacity_kbps` starting at time -// `start_time_us`. -int64_t CalculateArrivalTimeUs(int64_t start_time_us, +// `start_time`. +Timestamp CalculateArrivalTime(Timestamp start_time, int64_t bits, - int capacity_kbps) { - // If capacity is 0, the link capacity is assumed to be infinite. - if (capacity_kbps == 0) { - return start_time_us; + DataRate capacity) { + if (capacity.IsInfinite()) { + return start_time; } + if (capacity.IsZero()) { + return Timestamp::PlusInfinity(); + } + // Adding `capacity - 1` to the numerator rounds the extra delay caused by // capacity constraints up to an integral microsecond. Sending 0 bits takes 0 // extra time, while sending 1 bit gets rounded up to 1 (the multiplication by // 1000 is because capacity is in kbps). // The factor 1000 comes from 10^6 / 10^3, where 10^6 is due to the time unit // being us and 10^3 is due to the rate unit being kbps. - return start_time_us + ((1000 * bits + capacity_kbps - 1) / capacity_kbps); + return start_time + TimeDelta::Micros((1000 * bits + capacity.kbps() - 1) / + capacity.kbps()); } } // namespace SimulatedNetwork::SimulatedNetwork(Config config, uint64_t random_seed) - : random_(random_seed), - bursting_(false), - last_enqueue_time_us_(0), - last_capacity_link_exit_time_(0) { + : random_(random_seed), bursting_(false), last_enqueue_time_us_(0) { SetConfig(config); } @@ -57,6 +66,7 @@ SimulatedNetwork::~SimulatedNetwork() = default; void SimulatedNetwork::SetConfig(const Config& config) { MutexLock lock(&config_lock_); config_state_.config = config; // Shallow copy of the struct. + double prob_loss = config.loss_percent / 100.0; if (config_state_.config.avg_burst_loss_length == -1) { // Uniform loss @@ -79,6 +89,30 @@ void SimulatedNetwork::SetConfig(const Config& config) { } } +void SimulatedNetwork::SetConfig(const BuiltInNetworkBehaviorConfig& new_config, + Timestamp config_update_time) { + RTC_DCHECK_RUNS_SERIALIZED(&process_checker_); + + if (!capacity_link_.empty()) { + // Calculate and update how large portion of the packet first in the + // capacity link is left to to send at time `config_update_time`. + const BuiltInNetworkBehaviorConfig& current_config = + GetConfigState().config; + TimeDelta duration_with_current_config = + config_update_time - capacity_link_.front().last_update_time; + RTC_DCHECK_GE(duration_with_current_config, TimeDelta::Zero()); + capacity_link_.front().bits_left_to_send -= std::min( + duration_with_current_config.ms() * current_config.link_capacity.kbps(), + capacity_link_.front().bits_left_to_send); + capacity_link_.front().last_update_time = config_update_time; + } + SetConfig(new_config); + UpdateCapacityQueue(GetConfigState(), config_update_time); + if (UpdateNextProcessTime() && next_process_time_changed_callback_) { + next_process_time_changed_callback_(); + } +} + void SimulatedNetwork::UpdateConfig( std::function config_modifier) { MutexLock lock(&config_lock_); @@ -117,100 +151,114 @@ bool SimulatedNetwork::EnqueuePacket(PacketInFlightInfo packet) { return false; } - // If the packet has been sent before the previous packet in the network left - // the capacity queue, let's ensure the new packet will start its trip in the - // network after the last bit of the previous packet has left it. - int64_t packet_send_time_us = packet.send_time_us; - if (!capacity_link_.empty()) { - packet_send_time_us = - std::max(packet_send_time_us, capacity_link_.back().arrival_time_us); - } - capacity_link_.push({.packet = packet, - .arrival_time_us = CalculateArrivalTimeUs( - packet_send_time_us, packet.size * 8, - state.config.link_capacity_kbps)}); - - // Only update `next_process_time_us_` if not already set (if set, there is no - // way that a new packet will make the `next_process_time_us_` change). - if (!next_process_time_us_) { + // Note that arrival time will be updated when previous packets are dequeued + // from the capacity link. + // A packet can not enter the narrow section before the last packet has exit. + Timestamp enqueue_time = Timestamp::Micros(packet.send_time_us); + Timestamp arrival_time = + capacity_link_.empty() + ? CalculateArrivalTime( + std::max(enqueue_time, last_capacity_link_exit_time_), + packet.size * 8, state.config.link_capacity) + : Timestamp::PlusInfinity(); + capacity_link_.push( + {.packet = packet, + .last_update_time = enqueue_time, + .bits_left_to_send = 8 * static_cast(packet.size), + .arrival_time = arrival_time}); + + // Only update `next_process_time_` if not already set. Otherwise, + // next_process_time_ is calculated when a packet is dequeued. Note that this + // means that the newly enqueued packet risk having an arrival time before + // `next_process_time_` if packet reordering is allowed and + // config.delay_standard_deviation_ms is set. + // TODO(bugs.webrtc.org/14525): Consider preventing this. + if (next_process_time_.IsInfinite() && arrival_time.IsFinite()) { RTC_DCHECK_EQ(capacity_link_.size(), 1); - next_process_time_us_ = capacity_link_.front().arrival_time_us; + next_process_time_ = arrival_time; } last_enqueue_time_us_ = packet.send_time_us; return true; } -absl::optional SimulatedNetwork::NextDeliveryTimeUs() const { +std::optional SimulatedNetwork::NextDeliveryTimeUs() const { RTC_DCHECK_RUNS_SERIALIZED(&process_checker_); - return next_process_time_us_; + if (next_process_time_.IsFinite()) { + return next_process_time_.us(); + } + return std::nullopt; } void SimulatedNetwork::UpdateCapacityQueue(ConfigState state, - int64_t time_now_us) { - // If there is at least one packet in the `capacity_link_`, let's update its - // arrival time to take into account changes in the network configuration - // since the last call to UpdateCapacityQueue. + Timestamp time_now) { + // Only the first packet in capacity_link_ have a calculated arrival time + // (when packet leave the narrow section), and time when it entered the narrow + // section. Also, the configuration may have changed. Thus we need to + // calculate the arrival time again before maybe moving the packet to the + // delay link. if (!capacity_link_.empty()) { - capacity_link_.front().arrival_time_us = CalculateArrivalTimeUs( - std::max(capacity_link_.front().packet.send_time_us, - last_capacity_link_exit_time_), - capacity_link_.front().packet.size * 8, - state.config.link_capacity_kbps); + capacity_link_.front().last_update_time = std::max( + capacity_link_.front().last_update_time, last_capacity_link_exit_time_); + capacity_link_.front().arrival_time = CalculateArrivalTime( + capacity_link_.front().last_update_time, + capacity_link_.front().bits_left_to_send, state.config.link_capacity); } // The capacity link is empty or the first packet is not expected to exit yet. if (capacity_link_.empty() || - time_now_us < capacity_link_.front().arrival_time_us) { + time_now < capacity_link_.front().arrival_time) { return; } bool reorder_packets = false; do { - // Time to get this packet (the original or just updated arrival_time_us is + // Time to get this packet (the original or just updated arrival_time is // smaller or equal to time_now_us). PacketInfo packet = capacity_link_.front(); + RTC_DCHECK(packet.arrival_time.IsFinite()); capacity_link_.pop(); // If the network is paused, the pause will be implemented as an extra delay // to be spent in the `delay_link_` queue. - if (state.pause_transmission_until_us > packet.arrival_time_us) { - packet.arrival_time_us = state.pause_transmission_until_us; + if (state.pause_transmission_until_us > packet.arrival_time.us()) { + packet.arrival_time = + Timestamp::Micros(state.pause_transmission_until_us); } // Store the original arrival time, before applying packet loss or extra // delay. This is needed to know when it is the first available time the // next packet in the `capacity_link_` queue can start transmitting. - last_capacity_link_exit_time_ = packet.arrival_time_us; + last_capacity_link_exit_time_ = packet.arrival_time; // Drop packets at an average rate of `state.config.loss_percent` with // and average loss burst length of `state.config.avg_burst_loss_length`. if ((bursting_ && random_.Rand() < state.prob_loss_bursting) || (!bursting_ && random_.Rand() < state.prob_start_bursting)) { bursting_ = true; - packet.arrival_time_us = PacketDeliveryInfo::kNotReceived; + packet.arrival_time = Timestamp::MinusInfinity(); } else { // If packets are not dropped, apply extra delay as configured. bursting_ = false; - int64_t arrival_time_jitter_us = std::max( + TimeDelta arrival_time_jitter = TimeDelta::Micros(std::max( random_.Gaussian(state.config.queue_delay_ms * 1000, state.config.delay_standard_deviation_ms * 1000), - 0.0); + 0.0)); // If reordering is not allowed then adjust arrival_time_jitter // to make sure all packets are sent in order. - int64_t last_arrival_time_us = - delay_link_.empty() ? -1 : delay_link_.back().arrival_time_us; + Timestamp last_arrival_time = delay_link_.empty() + ? Timestamp::MinusInfinity() + : delay_link_.back().arrival_time; if (!state.config.allow_reordering && !delay_link_.empty() && - packet.arrival_time_us + arrival_time_jitter_us < - last_arrival_time_us) { - arrival_time_jitter_us = last_arrival_time_us - packet.arrival_time_us; + packet.arrival_time + arrival_time_jitter < last_arrival_time) { + arrival_time_jitter = last_arrival_time - packet.arrival_time; } - packet.arrival_time_us += arrival_time_jitter_us; + packet.arrival_time += arrival_time_jitter; // Optimization: Schedule a reorder only when a packet will exit before // the one in front. - if (last_arrival_time_us > packet.arrival_time_us) { + if (last_arrival_time > packet.arrival_time) { reorder_packets = true; } } @@ -221,23 +269,23 @@ void SimulatedNetwork::UpdateCapacityQueue(ConfigState state, break; } // If instead there is another packet in the `capacity_link_` queue, let's - // calculate its arrival_time_us based on the latest config (which might + // calculate its arrival_time based on the latest config (which might // have been changed since it was enqueued). - int64_t next_start = std::max(last_capacity_link_exit_time_, - capacity_link_.front().packet.send_time_us); - capacity_link_.front().arrival_time_us = CalculateArrivalTimeUs( - next_start, capacity_link_.front().packet.size * 8, - state.config.link_capacity_kbps); + Timestamp next_start = std::max(last_capacity_link_exit_time_, + capacity_link_.front().last_update_time); + capacity_link_.front().arrival_time = + CalculateArrivalTime(next_start, capacity_link_.front().packet.size * 8, + state.config.link_capacity); // And if the next packet in the queue needs to exit, let's dequeue it. - } while (capacity_link_.front().arrival_time_us <= time_now_us); + } while (capacity_link_.front().arrival_time <= time_now); if (state.config.allow_reordering && reorder_packets) { // Packets arrived out of order and since the network config allows - // reordering, let's sort them per arrival_time_us to make so they will also + // reordering, let's sort them per arrival_time to make so they will also // be delivered out of order. std::stable_sort(delay_link_.begin(), delay_link_.end(), [](const PacketInfo& p1, const PacketInfo& p2) { - return p1.arrival_time_us < p2.arrival_time_us; + return p1.arrival_time < p2.arrival_time; }); } } @@ -250,27 +298,49 @@ SimulatedNetwork::ConfigState SimulatedNetwork::GetConfigState() const { std::vector SimulatedNetwork::DequeueDeliverablePackets( int64_t receive_time_us) { RTC_DCHECK_RUNS_SERIALIZED(&process_checker_); + Timestamp receive_time = Timestamp::Micros(receive_time_us); - UpdateCapacityQueue(GetConfigState(), receive_time_us); + UpdateCapacityQueue(GetConfigState(), receive_time); std::vector packets_to_deliver; // Check the extra delay queue. while (!delay_link_.empty() && - receive_time_us >= delay_link_.front().arrival_time_us) { + receive_time >= delay_link_.front().arrival_time) { PacketInfo packet_info = delay_link_.front(); - packets_to_deliver.emplace_back( - PacketDeliveryInfo(packet_info.packet, packet_info.arrival_time_us)); + packets_to_deliver.emplace_back(PacketDeliveryInfo( + packet_info.packet, packet_info.arrival_time.IsFinite() + ? packet_info.arrival_time.us() + : PacketDeliveryInfo::kNotReceived)); delay_link_.pop_front(); } + // There is no need to invoke `next_process_time_changed_callback_` here since + // it is expected that the user of NetworkBehaviorInterface calls + // NextDeliveryTimeUs after DequeueDeliverablePackets. See + // NetworkBehaviorInterface. + UpdateNextProcessTime(); + return packets_to_deliver; +} - if (!delay_link_.empty()) { - next_process_time_us_ = delay_link_.front().arrival_time_us; - } else if (!capacity_link_.empty()) { - next_process_time_us_ = capacity_link_.front().arrival_time_us; - } else { - next_process_time_us_.reset(); +bool SimulatedNetwork::UpdateNextProcessTime() { + Timestamp next_process_time = next_process_time_; + + next_process_time_ = Timestamp::PlusInfinity(); + for (const PacketInfo& packet : delay_link_) { + if (packet.arrival_time.IsFinite()) { + next_process_time_ = packet.arrival_time; + break; + } } - return packets_to_deliver; + if (next_process_time_.IsInfinite() && !capacity_link_.empty()) { + next_process_time_ = capacity_link_.front().arrival_time; + } + return next_process_time != next_process_time_; +} + +void SimulatedNetwork::RegisterDeliveryTimeChangedCallback( + absl::AnyInvocable callback) { + RTC_DCHECK_RUNS_SERIALIZED(&process_checker_); + next_process_time_changed_callback_ = std::move(callback); } } // namespace webrtc diff --git a/test/network/simulated_network.h b/test/network/simulated_network.h new file mode 100644 index 0000000000..7abf7edca8 --- /dev/null +++ b/test/network/simulated_network.h @@ -0,0 +1,162 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef TEST_NETWORK_SIMULATED_NETWORK_H_ +#define TEST_NETWORK_SIMULATED_NETWORK_H_ + +#include + +#include +#include +#include +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "api/sequence_checker.h" +#include "api/test/simulated_network.h" +#include "api/units/timestamp.h" +#include "rtc_base/race_checker.h" +#include "rtc_base/random.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +// Class simulating a network link. +// +// This is a basic implementation of NetworkBehaviorInterface that supports: +// - Packet loss +// - Capacity delay: Delay caused by a narrow section that only allows one +// packet through at the time with a limited capacity. +// - Extra delay with or without packets reorder +// - Packet overhead +// - Queue max capacity +class RTC_EXPORT SimulatedNetwork : public SimulatedNetworkInterface { + public: + using Config = BuiltInNetworkBehaviorConfig; + explicit SimulatedNetwork(Config config, uint64_t random_seed = 1); + ~SimulatedNetwork() override; + + // Sets a new configuration. This will affect packets that will be sent with + // EnqueuePacket but also packets in the network that have not left the + // network emulation. Packets that are ready to be retrieved by + // DequeueDeliverablePackets are not affected by the new configuration. + // This method can be invoked directly by tests on any thread/sequence, but is + // less accurate than the version with timestamp since changes to the + // configuration does not take affect until the time returned by + // NextDeliveryTimeUs has passed. + void SetConfig(const Config& config) override; + // Updates the configuration at a specific time. + // Note that packets that have already passed the narrow section constrained + // by link capacity will not be affected by the change. If packet re-ordering + // is not allowed, packets with new shorter queue delays will arrive + // immediately after packets with the old, longer queue delays. Must be + // invoked on the same sequence as other methods in NetworkBehaviorInterface. + void SetConfig(const BuiltInNetworkBehaviorConfig& config, + Timestamp config_update_time); + + void UpdateConfig(std::function + config_modifier) override; + void PauseTransmissionUntil(int64_t until_us) override; + + // NetworkBehaviorInterface + bool EnqueuePacket(PacketInFlightInfo packet) override; + std::vector DequeueDeliverablePackets( + int64_t receive_time_us) override; + + std::optional NextDeliveryTimeUs() const override; + void RegisterDeliveryTimeChangedCallback( + absl::AnyInvocable callback) override; + + private: + struct PacketInfo { + PacketInFlightInfo packet; + // Time the packet was last updated by the capacity link. + Timestamp last_update_time; + // Size of the packet left to send through the capacity link. May differ + // from the packet size if the link capacity changes while the packet is in + // the capacity link. + int64_t bits_left_to_send; + // Time when the packet has left (or will leave) the network. + Timestamp arrival_time; + }; + // Contains current configuration state. + struct ConfigState { + // Static link configuration. + Config config; + // The probability to drop the packet if we are currently dropping a + // burst of packet + double prob_loss_bursting; + // The probability to drop a burst of packets. + double prob_start_bursting; + // Used for temporary delay spikes. + int64_t pause_transmission_until_us = 0; + }; + + // Calculates next_process_time_. Returns true if changed. + bool UpdateNextProcessTime() RTC_RUN_ON(&process_checker_); + // Moves packets from capacity- to delay link. + // If `previouse_config` is set, it is the config that was used until + // `time_now_us` + void UpdateCapacityQueue(ConfigState state, Timestamp time_now) + RTC_RUN_ON(&process_checker_); + ConfigState GetConfigState() const; + + mutable Mutex config_lock_; + + // Guards the data structures involved in delay and loss processing, such as + // the packet queues. + RaceChecker process_checker_; + // Models the capacity of the network by rejecting packets if the queue is + // full and keeping them in the queue until they are ready to exit (according + // to the link capacity, which cannot be violated, e.g. a 1 kbps link will + // only be able to deliver 1000 bits per second). + // + // Invariant: + // The head of the `capacity_link_` has arrival_time correctly set to the + // time when the packet is supposed to be delivered (without accounting + // potential packet loss or potential extra delay and without accounting for a + // new configuration of the network, which requires a re-computation of the + // arrival_time). + std::queue capacity_link_ RTC_GUARDED_BY(process_checker_); + // Models the extra delay of the network (see `queue_delay_ms` + // and `delay_standard_deviation_ms` in BuiltInNetworkBehaviorConfig), packets + // in the `delay_link_` have technically already left the network and don't + // use its capacity but they are not delivered yet. + std::deque delay_link_ RTC_GUARDED_BY(process_checker_); + // Represents the next moment in time when the network is supposed to deliver + // packets to the client (either by pulling them from `delay_link_` or + // `capacity_link_` or both). + Timestamp next_process_time_ RTC_GUARDED_BY(process_checker_) = + Timestamp::PlusInfinity(); + absl::AnyInvocable next_process_time_changed_callback_ + RTC_GUARDED_BY(process_checker_) = nullptr; + + ConfigState config_state_ RTC_GUARDED_BY(config_lock_); + + Random random_ RTC_GUARDED_BY(process_checker_); + // Are we currently dropping a burst of packets? + bool bursting_; + + // The send time of the last enqueued packet, this is only used to check that + // the send time of enqueued packets is monotonically increasing. + int64_t last_enqueue_time_us_; + + // The last time a packet left the capacity_link_ (used to enforce + // the capacity of the link and avoid packets starts to get sent before + // the link it free). + Timestamp last_capacity_link_exit_time_ = Timestamp::MinusInfinity(); +}; + +} // namespace webrtc + +#endif // TEST_NETWORK_SIMULATED_NETWORK_H_ diff --git a/call/simulated_network_unittest.cc b/test/network/simulated_network_unittest.cc similarity index 66% rename from call/simulated_network_unittest.cc rename to test/network/simulated_network_unittest.cc index 825dd6d065..9c59ded933 100644 --- a/call/simulated_network_unittest.cc +++ b/test/network/simulated_network_unittest.cc @@ -7,18 +7,17 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ -#include "call/simulated_network.h" +#include "test/network/simulated_network.h" -#include -#include +#include +#include #include -#include #include -#include "absl/algorithm/container.h" #include "api/test/simulated_network.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "test/gmock.h" #include "test/gtest.h" @@ -26,6 +25,8 @@ namespace webrtc { namespace { using ::testing::ElementsAre; +using ::testing::MockFunction; +using ::testing::SizeIs; PacketInFlightInfo PacketWithSize(size_t size) { return PacketInFlightInfo(/*size=*/size, /*send_time_us=*/0, /*packet_id=*/1); @@ -33,7 +34,7 @@ PacketInFlightInfo PacketWithSize(size_t size) { TEST(SimulatedNetworkTest, NextDeliveryTimeIsUnknownOnEmptyNetwork) { SimulatedNetwork network = SimulatedNetwork({}); - EXPECT_EQ(network.NextDeliveryTimeUs(), absl::nullopt); + EXPECT_EQ(network.NextDeliveryTimeUs(), std::nullopt); } TEST(SimulatedNetworkTest, EnqueueFirstPacketOnNetworkWithInfiniteCapacity) { @@ -48,7 +49,8 @@ TEST(SimulatedNetworkTest, EnqueueFirstPacketOnNetworkWithInfiniteCapacity) { TEST(SimulatedNetworkTest, EnqueueFirstPacketOnNetworkWithLimitedCapacity) { // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity // should be ready to exit the network in 1 second. - SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + SimulatedNetwork network = + SimulatedNetwork({.link_capacity = DataRate::KilobitsPerSec(1)}); ASSERT_TRUE(network.EnqueuePacket(PacketWithSize(125))); EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(1).us()); @@ -58,7 +60,8 @@ TEST(SimulatedNetworkTest, EnqueuePacketsButNextDeliveryIsBasedOnFirstEnqueuedPacket) { // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity // should be ready to exit the network in 1 second. - SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + SimulatedNetwork network = + SimulatedNetwork({.link_capacity = DataRate::KilobitsPerSec(1)}); ASSERT_TRUE(network.EnqueuePacket( PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1))); EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(1).us()); @@ -79,7 +82,8 @@ TEST(SimulatedNetworkTest, TEST(SimulatedNetworkTest, EnqueueFailsWhenQueueLengthIsReached) { SimulatedNetwork network = - SimulatedNetwork({.queue_length_packets = 1, .link_capacity_kbps = 1}); + SimulatedNetwork({.queue_length_packets = 1, + .link_capacity = DataRate::KilobitsPerSec(1)}); ASSERT_TRUE(network.EnqueuePacket( PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1))); @@ -106,8 +110,8 @@ TEST(SimulatedNetworkTest, PacketOverhead) { // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity // should be ready to exit the network in 1 second, but since there is an // overhead per packet of 125 bytes, it will exit the network after 2 seconds. - SimulatedNetwork network = - SimulatedNetwork({.link_capacity_kbps = 1, .packet_overhead = 125}); + SimulatedNetwork network = SimulatedNetwork( + {.link_capacity = DataRate::KilobitsPerSec(1), .packet_overhead = 125}); ASSERT_TRUE(network.EnqueuePacket(PacketWithSize(125))); EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(2).us()); @@ -117,7 +121,8 @@ TEST(SimulatedNetworkTest, DequeueDeliverablePacketsLeavesPacketsInCapacityLink) { // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity // should be ready to exit the network in 1 second. - SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + SimulatedNetwork network = + SimulatedNetwork({.link_capacity = DataRate::KilobitsPerSec(1)}); ASSERT_TRUE(network.EnqueuePacket( PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1))); // Enqueue another packet of 125 bytes (this one should exit after 2 seconds). @@ -147,7 +152,8 @@ TEST(SimulatedNetworkTest, DequeueDeliverablePacketsAppliesConfigChangesToCapacityLink) { // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity // should be ready to exit the network in 1 second. - SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + SimulatedNetwork network = + SimulatedNetwork({.link_capacity = DataRate::KilobitsPerSec(1)}); const PacketInFlightInfo packet_1 = PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1); ASSERT_TRUE(network.EnqueuePacket(packet_1)); @@ -166,7 +172,7 @@ TEST(SimulatedNetworkTest, // Since the link capacity changes from 1 kbps to 10 kbps, packets will take // 100 ms each to leave the network. - network.SetConfig({.link_capacity_kbps = 10}); + network.SetConfig({.link_capacity = DataRate::KilobitsPerSec(10)}); // The next delivery time doesn't change (it will be updated, if needed at // DequeueDeliverablePackets time). @@ -194,10 +200,123 @@ TEST(SimulatedNetworkTest, /*receive_time_us=*/TimeDelta::Millis(1100).us()))); } -TEST(SimulatedNetworkTest, NetworkEmptyAfterLastPacketDequeued) { +TEST(SimulatedNetworkTest, + SetConfigUpdateNextDeliveryTimeIfLinkCapacityChange) { // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity // should be ready to exit the network in 1 second. - SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + SimulatedNetwork network = + SimulatedNetwork({.link_capacity = DataRate::KilobitsPerSec(1)}); + MockFunction delivery_time_changed_callback; + network.RegisterDeliveryTimeChangedCallback( + delivery_time_changed_callback.AsStdFunction()); + const PacketInFlightInfo packet_1 = + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1); + ASSERT_TRUE(network.EnqueuePacket(packet_1)); + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(1).us()); + + // Since the link capacity changes from 1 kbps to 10 kbps, packets will take + // 100 ms each to leave the network. After 500ms, half the packet should have + // gone through. + EXPECT_CALL(delivery_time_changed_callback, Call).WillOnce([&]() { + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Millis(500 + 50).us()); + }); + network.SetConfig({.link_capacity = DataRate::KilobitsPerSec(10)}, + /*config_update_time*/ Timestamp::Millis(500)); +} + +TEST(SimulatedNetworkTest, + SetConfigUpdateNextDeliveryTimeIfLinkCapacityChangeFromZero) { + SimulatedNetwork network = + SimulatedNetwork({.link_capacity = DataRate::Zero()}); + MockFunction delivery_time_changed_callback; + network.RegisterDeliveryTimeChangedCallback( + delivery_time_changed_callback.AsStdFunction()); + const PacketInFlightInfo packet_1 = + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1); + const PacketInFlightInfo packet_2 = + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/2); + ASSERT_TRUE(network.EnqueuePacket(packet_1)); + ASSERT_TRUE(network.EnqueuePacket(packet_2)); + EXPECT_FALSE(network.NextDeliveryTimeUs().has_value()); + + // The link capacity changes from 0 kbps to 10 kbps during 10ms 1/10th of the + // packet will be transmitted. (The packet would take 100ms to go through the + // network at 10kbps.) + ::testing::Sequence s; + EXPECT_CALL(delivery_time_changed_callback, Call) + .InSequence(s) + .WillOnce([&]() { + EXPECT_EQ(network.NextDeliveryTimeUs(), + TimeDelta::Millis(500 + 100).us()); + }); + EXPECT_CALL(delivery_time_changed_callback, Call) + .InSequence(s) + .WillOnce( + [&]() { EXPECT_FALSE(network.NextDeliveryTimeUs().has_value()); }); + EXPECT_CALL(delivery_time_changed_callback, Call) + .InSequence(s) + .WillOnce([&]() { + EXPECT_EQ(network.NextDeliveryTimeUs(), + TimeDelta::Millis(610 + 90).us()); + }); + network.SetConfig({.link_capacity = DataRate::KilobitsPerSec(10)}, + /*config_update_time*/ Timestamp::Millis(500)); + network.SetConfig({.link_capacity = DataRate::Zero()}, + /*config_update_time*/ Timestamp::Millis(510)); + network.SetConfig({.link_capacity = DataRate::KilobitsPerSec(10)}, + /*config_update_time*/ Timestamp::Millis(610)); +} + +TEST(SimulatedNetworkTest, SetConfigUpdateQueueDelayAfterDelivery) { + // A packet of 125 bytes that gets enqueued on a network with 1000 kbps + // capacity should be ready to exit the narrow section in 1 ms. + SimulatedNetwork network = + SimulatedNetwork({.queue_delay_ms = 1000, + .link_capacity = DataRate::KilobitsPerSec(1000)}); + MockFunction delivery_time_changed_callback; + network.RegisterDeliveryTimeChangedCallback( + delivery_time_changed_callback.AsStdFunction()); + EXPECT_CALL(delivery_time_changed_callback, Call).Times(0); + const PacketInFlightInfo packet_1 = + PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1); + ASSERT_TRUE(network.EnqueuePacket(packet_1)); + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Millis(1).us()); + // But no packets is actually delivered. Only moved to the delay link. + EXPECT_TRUE(network + .DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Millis(1).us()) + .empty()); + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Millis(1000 + 1).us()); + + // Changing the queue time does not change the next delivery time. + network.SetConfig( + {.queue_delay_ms = 1, .link_capacity = DataRate::KilobitsPerSec(100)}, + /*config_update_time*/ Timestamp::Millis(500)); + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Millis(1000 + 1).us()); + + // A new packet require NextDeliveryTimeUs to change since the capacity + // change. But does not affect the delivery time of packet_1. + const PacketInFlightInfo packet_2 = PacketInFlightInfo( + /*size=*/125, /*send_time_us=*/TimeDelta::Millis(500).us(), + /*packet_id=*/2); + ASSERT_TRUE(network.EnqueuePacket(packet_2)); + EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Millis(1000 + 1).us()); + // At 100kbps, it will take packet 2 10ms to pass through the narrow section. + // Since delay is lower for packet_2, but reordering is not allowed, both + // packets are delivered at the same time. + std::vector delivered_packets = + network.DequeueDeliverablePackets( + /*receive_time_us=*/TimeDelta::Millis(1000 + 1).us()); + ASSERT_THAT(delivered_packets, SizeIs(2)); + EXPECT_EQ(delivered_packets[0].receive_time_us, + delivered_packets[1].receive_time_us); +} + +TEST(SimulatedNetworkTest, NetworkEmptyAfterLastPacketDequeued) { + // A packet of 125 bytes that gets enqueued on a network with 1 kbps + // capacity should be ready to exit the network in 1 second. + SimulatedNetwork network = + SimulatedNetwork({.link_capacity = DataRate::KilobitsPerSec(1)}); ASSERT_TRUE(network.EnqueuePacket(PacketWithSize(125))); // Collecting all the delivered packets ... @@ -207,25 +326,26 @@ TEST(SimulatedNetworkTest, NetworkEmptyAfterLastPacketDequeued) { EXPECT_EQ(delivered_packets.size(), 1ul); // ... leaves the network empty. - EXPECT_EQ(network.NextDeliveryTimeUs(), absl::nullopt); + EXPECT_EQ(network.NextDeliveryTimeUs(), std::nullopt); } TEST(SimulatedNetworkTest, DequeueDeliverablePacketsOnLateCall) { - // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity - // should be ready to exit the network in 1 second. - SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + // A packet of 125 bytes that gets enqueued on a network with 1 kbps + // capacity should be ready to exit the network in 1 second. + SimulatedNetwork network = + SimulatedNetwork({.link_capacity = DataRate::KilobitsPerSec(1)}); ASSERT_TRUE(network.EnqueuePacket( PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1))); - // Enqueue another packet of 125 bytes with send time 1 second so this should - // exit after 2 seconds. + // Enqueue another packet of 125 bytes with send time 1 second so this + // should exit after 2 seconds. ASSERT_TRUE(network.EnqueuePacket( PacketInFlightInfo(/*size=*/125, /*send_time_us=*/TimeDelta::Seconds(1).us(), /*packet_id=*/2))); - // Collecting delivered packets after 3 seconds will result in the delivery of - // both the enqueued packets. + // Collecting delivered packets after 3 seconds will result in the delivery + // of both the enqueued packets. std::vector delivered_packets = network.DequeueDeliverablePackets( /*receive_time_us=*/TimeDelta::Seconds(3).us()); @@ -234,13 +354,14 @@ TEST(SimulatedNetworkTest, DequeueDeliverablePacketsOnLateCall) { TEST(SimulatedNetworkTest, DequeueDeliverablePacketsOnEarlyCallReturnsNoPackets) { - // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity - // should be ready to exit the network in 1 second. - SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + // A packet of 125 bytes that gets enqueued on a network with 1 kbps + // capacity should be ready to exit the network in 1 second. + SimulatedNetwork network = + SimulatedNetwork({.link_capacity = DataRate::KilobitsPerSec(1)}); ASSERT_TRUE(network.EnqueuePacket(PacketWithSize(125))); - // Collecting delivered packets after 0.5 seconds will result in the delivery - // of 0 packets. + // Collecting delivered packets after 0.5 seconds will result in the + // delivery of 0 packets. std::vector delivered_packets = network.DequeueDeliverablePackets( /*receive_time_us=*/TimeDelta::Seconds(0.5).us()); @@ -251,10 +372,10 @@ TEST(SimulatedNetworkTest, } TEST(SimulatedNetworkTest, QueueDelayMsWithoutStandardDeviation) { - // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity - // should be ready to exit the network in 1 second. - SimulatedNetwork network = - SimulatedNetwork({.queue_delay_ms = 100, .link_capacity_kbps = 1}); + // A packet of 125 bytes that gets enqueued on a network with 1 kbps + // capacity should be ready to exit the network in 1 second. + SimulatedNetwork network = SimulatedNetwork( + {.queue_delay_ms = 100, .link_capacity = DataRate::KilobitsPerSec(1)}); ASSERT_TRUE(network.EnqueuePacket(PacketWithSize(125))); // The next delivery time is still 1 second even if there are 100 ms of // extra delay but this will be applied at DequeueDeliverablePackets time. @@ -281,10 +402,10 @@ TEST(SimulatedNetworkTest, SimulatedNetwork network = SimulatedNetwork({.queue_delay_ms = 100, .delay_standard_deviation_ms = 90, - .link_capacity_kbps = 1, + .link_capacity = DataRate::KilobitsPerSec(1), .allow_reordering = false}); - // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity - // should be ready to exit the network in 1 second. + // A packet of 125 bytes that gets enqueued on a network with 1 kbps + // capacity should be ready to exit the network in 1 second. ASSERT_TRUE(network.EnqueuePacket( PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1))); @@ -319,11 +440,11 @@ TEST(SimulatedNetworkTest, QueueDelayMsWithStandardDeviationAndReorderAllowed) { SimulatedNetwork network = SimulatedNetwork({.queue_delay_ms = 100, .delay_standard_deviation_ms = 90, - .link_capacity_kbps = 1, + .link_capacity = DataRate::KilobitsPerSec(1), .allow_reordering = true}, /*random_seed=*/1); - // A packet of 125 bytes that gets enqueued on a network with 1 kbps capacity - // should be ready to exit the network in 1 second. + // A packet of 125 bytes that gets enqueued on a network with 1 kbps + // capacity should be ready to exit the network in 1 second. ASSERT_TRUE(network.EnqueuePacket( PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1))); @@ -355,8 +476,9 @@ TEST(SimulatedNetworkTest, QueueDelayMsWithStandardDeviationAndReorderAllowed) { } TEST(SimulatedNetworkTest, PacketLoss) { - // On a network with 50% probablility of packet loss ... - SimulatedNetwork network = SimulatedNetwork({.loss_percent = 50}); + // On a network with 50% probability of packet loss ... + SimulatedNetwork network = + SimulatedNetwork({.loss_percent = 50}, /*random_seed =*/1); // Enqueueing 8 packets ... for (int i = 0; i < 8; i++) { @@ -379,9 +501,51 @@ TEST(SimulatedNetworkTest, PacketLoss) { EXPECT_EQ(lost_packets, 4); } +TEST(SimulatedNetworkTest, NextDeliveryTimeSetAfterLostPackets) { + // On a network with 50% probablility of packet loss ... + SimulatedNetwork network = + SimulatedNetwork({.queue_delay_ms = 10, + .link_capacity = DataRate::KilobitsPerSec(1000), + .loss_percent = 50}, + /*random_seed =*/1); + // Enqueueing 8 packets at the same time. It should take 1ms to pass through + // the capacity limited section per packet, it total adding 8ms delay to the + // last packet. Since queue delay is 10ms, multiple packets will be in the + // delay queue at the same time. + for (int i = 0; i < 8; i++) { + ASSERT_TRUE(network.EnqueuePacket(PacketInFlightInfo( + /*size=*/125, /*send_time_us=*/0, /*packet_id=*/i + 1))); + } + int64_t time_us = 0; + std::vector delivered_packets; + // This assumes first packet is lost and last packet is delivered.... + while (delivered_packets.size() != 8) { + ASSERT_TRUE(network.NextDeliveryTimeUs().has_value()); + time_us = *network.NextDeliveryTimeUs(); + std::vector packets = + network.DequeueDeliverablePackets(time_us); + delivered_packets.insert(delivered_packets.end(), packets.begin(), + packets.end()); + } + // Results in the loss of 4 of them. + int lost_packets = 0; + int received_packets = 0; + for (const auto& packet : delivered_packets) { + if (packet.receive_time_us == PacketDeliveryInfo::kNotReceived) { + lost_packets++; + } else { + received_packets++; + } + } + EXPECT_EQ(delivered_packets.back().receive_time_us, + Timestamp::Millis(10 + 8).us()); + EXPECT_EQ(lost_packets, 4); + EXPECT_EQ(received_packets, 4); +} + TEST(SimulatedNetworkTest, PacketLossBurst) { - // On a network with 50% probablility of packet loss and an average burst loss - // length of 100 ... + // On a network with 50% probability of packet loss and an average burst + // loss length of 100 ... SimulatedNetwork network = SimulatedNetwork( {.loss_percent = 50, .avg_burst_loss_length = 100}, /*random_seed=*/1); @@ -412,9 +576,11 @@ TEST(SimulatedNetworkTest, PacketLossBurst) { } TEST(SimulatedNetworkTest, PauseTransmissionUntil) { - // 3 packets of 125 bytes that gets enqueued on a network with 1 kbps capacity - // should be ready to exit the network after 1, 2 and 3 seconds respectively. - SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + // 3 packets of 125 bytes that gets enqueued on a network with 1 kbps + // capacity should be ready to exit the network after 1, 2 and 3 seconds + // respectively. + SimulatedNetwork network = + SimulatedNetwork({.link_capacity = DataRate::KilobitsPerSec(1)}); ASSERT_TRUE(network.EnqueuePacket( PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/1))); ASSERT_TRUE(network.EnqueuePacket( @@ -443,18 +609,19 @@ TEST(SimulatedNetworkTest, PauseTransmissionUntil) { // delivery time of the next packet which accounts for the network pause. EXPECT_EQ(network.NextDeliveryTimeUs(), TimeDelta::Seconds(6).us()); - // And 2 seconds after the exit of the first enqueued packet, the following 2 - // packets are also delivered. + // And 2 seconds after the exit of the first enqueued packet, the following + // 2 packets are also delivered. delivered_packets = network.DequeueDeliverablePackets( /*receive_time_us=*/TimeDelta::Seconds(7).us()); EXPECT_EQ(delivered_packets.size(), 2ul); } TEST(SimulatedNetworkTest, CongestedNetworkRespectsLinkCapacity) { - SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + SimulatedNetwork network = + SimulatedNetwork({.link_capacity = DataRate::KilobitsPerSec(1)}); for (size_t i = 0; i < 1'000; ++i) { - ASSERT_TRUE(network.EnqueuePacket( - PacketInFlightInfo(/*size=*/125, /*send_time_us=*/0, /*packet_id=*/i))); + ASSERT_TRUE(network.EnqueuePacket(PacketInFlightInfo( + /*size=*/125, /*send_time_us=*/0, /*packet_id=*/i))); } PacketDeliveryInfo last_delivered_packet{ PacketInFlightInfo(/*size=*/0, /*send_time_us=*/0, /*packet_id=*/0), 0}; @@ -474,11 +641,12 @@ TEST(SimulatedNetworkTest, CongestedNetworkRespectsLinkCapacity) { } TEST(SimulatedNetworkTest, EnqueuePacketWithSubSecondNonMonotonicBehaviour) { - // On multi-core systems, different threads can experience sub-millisecond non - // monothonic behaviour when running on different cores. This test checks that - // when a non monotonic packet enqueue, the network continues to work and the - // out of order packet is sent anyway. - SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); + // On multi-core systems, different threads can experience sub-millisecond + // non monothonic behaviour when running on different cores. This test + // checks that when a non monotonic packet enqueue, the network continues to + // work and the out of order packet is sent anyway. + SimulatedNetwork network = + SimulatedNetwork({.link_capacity = DataRate::KilobitsPerSec(1)}); ASSERT_TRUE(network.EnqueuePacket(PacketInFlightInfo( /*size=*/125, /*send_time_us=*/TimeDelta::Seconds(1).us(), /*packet_id=*/0))); @@ -503,7 +671,8 @@ TEST(SimulatedNetworkTest, EnqueuePacketWithSubSecondNonMonotonicBehaviour) { // TODO(bugs.webrtc.org/14525): Re-enable when the DCHECK will be uncommented // and the non-monotonic events on real time clock tests is solved/understood. // TEST(SimulatedNetworkDeathTest, EnqueuePacketExpectMonotonicSendTime) { -// SimulatedNetwork network = SimulatedNetwork({.link_capacity_kbps = 1}); +// SimulatedNetwork network = SimulatedNetwork({.link_capacity = +// DataRate::KilobitsPerSec(1)}); // ASSERT_TRUE(network.EnqueuePacket(PacketInFlightInfo( // /*size=*/125, /*send_time_us=*/2'000'000, /*packet_id=*/0))); // EXPECT_DEATH_IF_SUPPORTED(network.EnqueuePacket(PacketInFlightInfo( diff --git a/test/network/traffic_route.cc b/test/network/traffic_route.cc index 94db5aede1..6c57583221 100644 --- a/test/network/traffic_route.cc +++ b/test/network/traffic_route.cc @@ -11,12 +11,19 @@ #include "test/network/traffic_route.h" #include +#include +#include +#include #include +#include #include -#include "absl/types/optional.h" -#include "rtc_base/logging.h" -#include "rtc_base/numerics/safe_minmax.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/socket_address.h" +#include "system_wrappers/include/clock.h" +#include "test/network/network_emulation.h" namespace webrtc { namespace test { @@ -46,7 +53,7 @@ CrossTrafficRouteImpl::CrossTrafficRouteImpl( EmulatedEndpointImpl* endpoint) : clock_(clock), receiver_(receiver), endpoint_(endpoint) { null_receiver_ = std::make_unique(); - absl::optional port = + std::optional port = endpoint_->BindReceiver(0, null_receiver_.get()); RTC_DCHECK(port); null_receiver_port_ = port.value(); @@ -65,7 +72,7 @@ void CrossTrafficRouteImpl::NetworkDelayedAction(size_t packet_size, auto action_receiver = std::make_unique(action); // BindOneShotReceiver arranges to free the port in the endpoint after the // action is done. - absl::optional port = + std::optional port = endpoint_->BindOneShotReceiver(0, action_receiver.get()); RTC_DCHECK(port); actions_.push_back(std::move(action_receiver)); @@ -77,11 +84,11 @@ void CrossTrafficRouteImpl::SendPacket(size_t packet_size) { } void CrossTrafficRouteImpl::SendPacket(size_t packet_size, uint16_t dest_port) { - rtc::CopyOnWriteBuffer data(packet_size); + CopyOnWriteBuffer data(packet_size); std::fill_n(data.MutableData(), data.size(), 0); receiver_->OnPacketReceived(EmulatedIpPacket( - /*from=*/rtc::SocketAddress(), - rtc::SocketAddress(endpoint_->GetPeerLocalAddress(), dest_port), data, + /*from=*/SocketAddress(), + SocketAddress(endpoint_->GetPeerLocalAddress(), dest_port), data, clock_->CurrentTime())); } diff --git a/test/network/traffic_route.h b/test/network/traffic_route.h index dbc41a694f..c9974ab517 100644 --- a/test/network/traffic_route.h +++ b/test/network/traffic_route.h @@ -11,11 +11,14 @@ #ifndef TEST_NETWORK_TRAFFIC_ROUTE_H_ #define TEST_NETWORK_TRAFFIC_ROUTE_H_ +#include +#include +#include #include #include -#include "api/test/network_emulation_manager.h" -#include "rtc_base/copy_on_write_buffer.h" +#include "api/test/network_emulation/cross_traffic.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" #include "system_wrappers/include/clock.h" #include "test/network/network_emulation.h" diff --git a/test/null_transport.cc b/test/null_transport.cc index db61fdda5a..884f294f29 100644 --- a/test/null_transport.cc +++ b/test/null_transport.cc @@ -12,12 +12,12 @@ namespace webrtc { namespace test { -bool NullTransport::SendRtp(rtc::ArrayView packet, +bool NullTransport::SendRtp(ArrayView packet, const PacketOptions& options) { return true; } -bool NullTransport::SendRtcp(rtc::ArrayView packet) { +bool NullTransport::SendRtcp(ArrayView packet) { return true; } diff --git a/test/null_transport.h b/test/null_transport.h index 5e13ab1fce..16496225af 100644 --- a/test/null_transport.h +++ b/test/null_transport.h @@ -19,9 +19,9 @@ class PacketReceiver; namespace test { class NullTransport : public Transport { public: - bool SendRtp(rtc::ArrayView packet, + bool SendRtp(ArrayView packet, const PacketOptions& options) override; - bool SendRtcp(rtc::ArrayView packet) override; + bool SendRtcp(ArrayView packet) override; }; } // namespace test } // namespace webrtc diff --git a/test/pc/e2e/BUILD.gn b/test/pc/e2e/BUILD.gn index 75db40eef5..ac87522574 100644 --- a/test/pc/e2e/BUILD.gn +++ b/test/pc/e2e/BUILD.gn @@ -48,8 +48,10 @@ if (!build_with_chromium) { "echo/echo_emulation.h", ] deps = [ + "../../../api:sequence_checker", "../../../api/test/pclf:media_configuration", "../../../modules/audio_device:test_audio_device_module", + "../../../rtc_base:logging", "../../../rtc_base:swap_queue", ] } @@ -65,23 +67,28 @@ if (!build_with_chromium) { "../../../api:frame_generator_api", "../../../api:function_view", "../../../api:libjingle_peerconnection_api", + "../../../api:make_ref_counted", + "../../../api:media_stream_interface", + "../../../api:rtc_error", + "../../../api:rtc_stats_api", + "../../../api:rtp_parameters", "../../../api:scoped_refptr", "../../../api:sequence_checker", + "../../../api/audio:audio_processing", "../../../api/task_queue:pending_task_safety_flag", "../../../api/test/pclf:media_configuration", "../../../api/test/pclf:media_quality_test_params", "../../../api/test/pclf:peer_configurer", - "../../../modules/audio_processing:api", + "../../../pc:pc_test_utils", "../../../pc:peerconnection_wrapper", + "../../../rtc_base:checks", "../../../rtc_base:logging", + "../../../rtc_base:macromagic", "../../../rtc_base:refcount", + "../../../rtc_base:threading", "../../../rtc_base/synchronization:mutex", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -96,28 +103,28 @@ if (!build_with_chromium) { ":test_peer", "../..:copy_to_file_audio_capturer", "../../../api:create_time_controller", + "../../../api:libjingle_peerconnection_api", + "../../../api:scoped_refptr", "../../../api:time_controller", + "../../../api/audio:audio_device", "../../../api/rtc_event_log:rtc_event_log_factory", - "../../../api/task_queue:default_task_queue_factory", + "../../../api/task_queue", "../../../api/test/pclf:media_configuration", "../../../api/test/pclf:media_quality_test_params", "../../../api/test/pclf:peer_configurer", "../../../api/transport:field_trial_based_config", "../../../api/video_codecs:builtin_video_decoder_factory", "../../../api/video_codecs:builtin_video_encoder_factory", - "../../../media:rtc_audio_video", - "../../../media:rtc_media_engine_defaults", + "../../../api/video_codecs:video_codecs_api", "../../../modules/audio_device:test_audio_device_module", - "../../../modules/audio_processing/aec_dump", - "../../../p2p:rtc_p2p", - "../../../rtc_base:rtc_task_queue", + "../../../pc:pc_test_utils", + "../../../rtc_base:checks", "../../../rtc_base:threading", + "../../../rtc_base/system:file_wrapper", "analyzer/video:quality_analyzing_video_encoder", "analyzer/video:video_quality_analyzer_injection_helper", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -130,7 +137,6 @@ if (!build_with_chromium) { "../../../api/test/video:test_video_track_source", "../../../api/video:video_frame", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("media_helper") { @@ -157,7 +163,6 @@ if (!build_with_chromium) { "../../../pc:video_track_source", "analyzer/video:video_quality_analyzer_injection_helper", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:variant" ] } rtc_library("peer_params_preprocessor") { @@ -169,15 +174,20 @@ if (!build_with_chromium) { ] deps = [ "../..:fileutils", - "../../../api:peer_network_dependencies", + "../../../api:array_view", + "../../../api:rtp_parameters", "../../../api/test/pclf:media_configuration", "../../../api/test/pclf:media_quality_test_params", "../../../api/test/pclf:peer_configurer", + "../../../api/video_codecs:scalability_mode", + "../../../media:media_constants", "../../../modules/video_coding/svc:scalability_mode_util", "../../../modules/video_coding/svc:scalability_structures", + "../../../modules/video_coding/svc:scalable_video_controller", + "../../../rtc_base:checks", "../../../rtc_base:macromagic", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("test_activities_executor") { @@ -197,10 +207,7 @@ if (!build_with_chromium) { "../../../rtc_base/synchronization:mutex", "../../../rtc_base/task_utils:repeating_task", "../../../system_wrappers", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -220,18 +227,26 @@ if (!build_with_chromium) { ":peer_params_preprocessor", ":sdp_changer", ":stats_poller", + ":stats_provider", ":test_activities_executor", ":test_peer", ":test_peer_factory", + ":test_video_capturer_video_track_source", "../..:field_trial", "../..:fileutils", "../..:perf_test", + "../..:test_flags", + "../..:test_support", "../../../api:audio_quality_analyzer_api", "../../../api:libjingle_peerconnection_api", "../../../api:media_stream_interface", "../../../api:peer_connection_quality_test_fixture_api", + "../../../api:rtc_error", "../../../api:rtc_event_log_output_file", + "../../../api:rtp_parameters", + "../../../api:rtp_transceiver_direction", "../../../api:scoped_refptr", + "../../../api:stats_observer_interface", "../../../api:time_controller", "../../../api:video_quality_analyzer_api", "../../../api/rtc_event_log", @@ -243,15 +258,20 @@ if (!build_with_chromium) { "../../../api/test/pclf:peer_configurer", "../../../api/units:time_delta", "../../../api/units:timestamp", + "../../../api/video:video_frame", + "../../../media:media_constants", "../../../pc:pc_test_utils", "../../../pc:sdp_utils", + "../../../rtc_base:checks", "../../../rtc_base:gunit_helpers", + "../../../rtc_base:logging", "../../../rtc_base:macromagic", "../../../rtc_base:safe_conversions", "../../../rtc_base:stringutils", "../../../rtc_base:task_queue_for_test", "../../../rtc_base:threading", "../../../rtc_base/synchronization:mutex", + "../../../rtc_base/task_utils:repeating_task", "../../../system_wrappers", "../../../system_wrappers:field_trial", "analyzer/video:default_video_quality_analyzer", @@ -259,8 +279,9 @@ if (!build_with_chromium) { "analyzer/video:video_frame_tracking_id_injector", "analyzer/video:video_quality_analyzer_injection_helper", "analyzer/video:video_quality_metrics_reporter", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } peer_connection_e2e_smoke_test_resources = [ @@ -283,7 +304,7 @@ if (!build_with_chromium) { ":default_audio_quality_analyzer", ":network_quality_metrics_reporter", ":stats_based_network_quality_metrics_reporter", - "../../../api:callfactory_api", + "../../..//test/network:simulated_network", "../../../api:create_network_emulation_manager", "../../../api:create_peer_connection_quality_test_frame_generator", "../../../api:create_peerconnection_quality_test_fixture", @@ -301,10 +322,9 @@ if (!build_with_chromium) { "../../../api/test/pclf:peer_configurer", "../../../api/video_codecs:builtin_video_decoder_factory", "../../../api/video_codecs:builtin_video_encoder_factory", - "../../../call:simulated_network", + "../../../media:media_constants", "../../../media:rtc_audio_video", "../../../modules/audio_device:audio_device_impl", - "../../../p2p:rtc_p2p", "../../../pc:pc_test_utils", "../../../pc:peerconnection_wrapper", "../../../rtc_base:gunit_helpers", @@ -334,17 +354,24 @@ if (!build_with_chromium) { ":network_quality_metrics_reporter", ":peerconnection_quality_test", ":stats_based_network_quality_metrics_reporter", + "../..:test_flags", "../..:test_support", "../../../api:create_network_emulation_manager", "../../../api:create_peer_connection_quality_test_frame_generator", "../../../api:network_emulation_manager_api", "../../../api:peer_connection_quality_test_fixture_api", + "../../../api/test/metrics:metric", + "../../../api/test/metrics:metrics_exporter", "../../../api/test/metrics:metrics_logger", "../../../api/test/metrics:stdout_metrics_exporter", + "../../../api/test/network_emulation", "../../../api/test/pclf:media_configuration", "../../../api/test/pclf:media_quality_test_params", "../../../api/test/pclf:peer_configurer", "../../../api/units:time_delta", + "../../../media:media_constants", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -367,10 +394,7 @@ if (!build_with_chromium) { "../../../api/test/pclf:media_quality_test_params", "../../../api/test/pclf:peer_configurer", "../../../api/units:time_delta", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -438,10 +462,7 @@ if (!build_with_chromium) { "../../../api:sequence_checker", "../../../api:track_id_stream_info_map", "../../../rtc_base:macromagic", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -455,8 +476,10 @@ if (!build_with_chromium) { deps = [ ":metric_metadata_keys", "../..:perf_test", + "../..:test_flags", "../../../api:audio_quality_analyzer_api", "../../../api:rtc_stats_api", + "../../../api:scoped_refptr", "../../../api:stats_observer_interface", "../../../api:track_id_stream_info_map", "../../../api/numerics", @@ -469,8 +492,9 @@ if (!build_with_chromium) { "../../../rtc_base:logging", "../../../rtc_base:rtc_numerics", "../../../rtc_base/synchronization:mutex", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("network_quality_metrics_reporter") { @@ -493,8 +517,8 @@ if (!build_with_chromium) { "../../../rtc_base:rtc_event", "../../../rtc_base/synchronization:mutex", "../../../system_wrappers:field_trial", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("stats_based_network_quality_metrics_reporter") { @@ -506,28 +530,34 @@ if (!build_with_chromium) { deps = [ ":metric_metadata_keys", "../..:perf_test", + "../..:test_flags", "../../../api:array_view", "../../../api:network_emulation_manager_api", "../../../api:peer_connection_quality_test_fixture_api", "../../../api:rtc_stats_api", "../../../api:scoped_refptr", "../../../api:sequence_checker", + "../../../api:track_id_stream_info_map", "../../../api/numerics", "../../../api/test/metrics:metric", "../../../api/test/metrics:metrics_logger", "../../../api/test/network_emulation", "../../../api/units:data_rate", "../../../api/units:data_size", + "../../../api/units:time_delta", "../../../api/units:timestamp", "../../../rtc_base:checks", "../../../rtc_base:ip_address", + "../../../rtc_base:logging", + "../../../rtc_base:macromagic", "../../../rtc_base:rtc_event", "../../../rtc_base:stringutils", "../../../rtc_base/synchronization:mutex", "../../../rtc_base/system:no_unique_address", "../../../system_wrappers:field_trial", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("cross_media_metrics_reporter") { @@ -539,9 +569,11 @@ if (!build_with_chromium) { deps = [ ":metric_metadata_keys", "../..:perf_test", + "../..:test_flags", "../../../api:network_emulation_manager_api", "../../../api:peer_connection_quality_test_fixture_api", "../../../api:rtc_stats_api", + "../../../api:scoped_refptr", "../../../api:track_id_stream_info_map", "../../../api/numerics", "../../../api/test/metrics:metric", @@ -553,10 +585,8 @@ if (!build_with_chromium) { "../../../rtc_base:rtc_numerics", "../../../rtc_base/synchronization:mutex", "../../../system_wrappers:field_trial", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -570,20 +600,22 @@ if (!build_with_chromium) { "../../../api:array_view", "../../../api:libjingle_peerconnection_api", "../../../api:rtp_parameters", + "../../../api:rtp_transceiver_direction", "../../../api/test/pclf:media_configuration", "../../../media:media_constants", "../../../media:rid_description", - "../../../media:rtc_media_base", - "../../../p2p:rtc_p2p", + "../../../media:stream_params", + "../../../p2p:p2p_constants", + "../../../p2p:transport_description", + "../../../p2p:transport_info", "../../../pc:sdp_utils", "../../../pc:session_description", "../../../pc:simulcast_description", + "../../../rtc_base:checks", "../../../rtc_base:stringutils", - ] - absl_deps = [ + "../../../rtc_base:unique_id_generator", "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } } diff --git a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc index bca52d9bfc..7c92cd228f 100644 --- a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc +++ b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.cc @@ -10,13 +10,25 @@ #include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h" -#include "api/stats/rtc_stats.h" +#include +#include +#include +#include +#include + +#include "absl/flags/flag.h" +#include "absl/strings/string_view.h" +#include "api/scoped_refptr.h" +#include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" #include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_logger.h" #include "api/test/track_id_stream_info_map.h" +#include "api/units/time_delta.h" #include "rtc_base/checks.h" -#include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" #include "test/pc/e2e/metric_metadata_keys.h" +#include "test/test_flags.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -38,33 +50,31 @@ void DefaultAudioQualityAnalyzer::Start(std::string test_case_name, void DefaultAudioQualityAnalyzer::OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) { + const scoped_refptr& report) { auto stats = report->GetStatsOfType(); for (auto& stat : stats) { - if (!stat->kind.is_defined() || !(*stat->kind == "audio")) { + if (!stat->kind.has_value() || !(*stat->kind == "audio")) { continue; } StatsSample sample; - sample.total_samples_received = - stat->total_samples_received.ValueOrDefault(0ul); - sample.concealed_samples = stat->concealed_samples.ValueOrDefault(0ul); + sample.total_samples_received = stat->total_samples_received.value_or(0ul); + sample.concealed_samples = stat->concealed_samples.value_or(0ul); sample.removed_samples_for_acceleration = - stat->removed_samples_for_acceleration.ValueOrDefault(0ul); + stat->removed_samples_for_acceleration.value_or(0ul); sample.inserted_samples_for_deceleration = - stat->inserted_samples_for_deceleration.ValueOrDefault(0ul); + stat->inserted_samples_for_deceleration.value_or(0ul); sample.silent_concealed_samples = - stat->silent_concealed_samples.ValueOrDefault(0ul); + stat->silent_concealed_samples.value_or(0ul); sample.jitter_buffer_delay = - TimeDelta::Seconds(stat->jitter_buffer_delay.ValueOrDefault(0.)); + TimeDelta::Seconds(stat->jitter_buffer_delay.value_or(0.)); sample.jitter_buffer_target_delay = - TimeDelta::Seconds(stat->jitter_buffer_target_delay.ValueOrDefault(0.)); + TimeDelta::Seconds(stat->jitter_buffer_target_delay.value_or(0.)); sample.jitter_buffer_emitted_count = - stat->jitter_buffer_emitted_count.ValueOrDefault(0ul); - sample.total_samples_duration = - stat->total_samples_duration.ValueOrDefault(0.); - sample.total_audio_energy = stat->total_audio_energy.ValueOrDefault(0.); + stat->jitter_buffer_emitted_count.value_or(0ul); + sample.total_samples_duration = stat->total_samples_duration.value_or(0.); + sample.total_audio_energy = stat->total_audio_energy.value_or(0.); TrackIdStreamInfoMap::StreamInfo stream_info = analyzer_helper_->GetStreamInfoFromTrackId(*stat->track_identifier); @@ -128,49 +138,49 @@ void DefaultAudioQualityAnalyzer::OnStatsReports( std::string DefaultAudioQualityAnalyzer::GetTestCaseName( const std::string& stream_label) const { - return test_case_name_ + "/" + stream_label; + if (!absl::GetFlag(FLAGS_isolated_script_test_perf_output).empty()) { + return test_case_name_ + "/" + stream_label; + } + return test_case_name_; } void DefaultAudioQualityAnalyzer::Stop() { MutexLock lock(&lock_); for (auto& item : streams_stats_) { + std::string test_case_name = GetTestCaseName(item.first); const TrackIdStreamInfoMap::StreamInfo& stream_info = stream_info_[item.first]; - // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map metric_metadata{ {MetricMetadataKey::kAudioStreamMetadataKey, item.first}, {MetricMetadataKey::kPeerMetadataKey, stream_info.receiver_peer}, - {MetricMetadataKey::kReceiverMetadataKey, stream_info.receiver_peer}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; + {MetricMetadataKey::kReceiverMetadataKey, stream_info.receiver_peer}}; - metrics_logger_->LogMetric("expand_rate", GetTestCaseName(item.first), - item.second.expand_rate, Unit::kUnitless, - ImprovementDirection::kSmallerIsBetter, - metric_metadata); - metrics_logger_->LogMetric("accelerate_rate", GetTestCaseName(item.first), + metrics_logger_->LogMetric( + "expand_rate", test_case_name, item.second.expand_rate, Unit::kUnitless, + ImprovementDirection::kSmallerIsBetter, metric_metadata); + metrics_logger_->LogMetric("accelerate_rate", test_case_name, item.second.accelerate_rate, Unit::kUnitless, ImprovementDirection::kSmallerIsBetter, metric_metadata); - metrics_logger_->LogMetric("preemptive_rate", GetTestCaseName(item.first), + metrics_logger_->LogMetric("preemptive_rate", test_case_name, item.second.preemptive_rate, Unit::kUnitless, ImprovementDirection::kSmallerIsBetter, metric_metadata); + metrics_logger_->LogMetric("speech_expand_rate", test_case_name, + item.second.speech_expand_rate, Unit::kUnitless, + ImprovementDirection::kSmallerIsBetter, + metric_metadata); metrics_logger_->LogMetric( - "speech_expand_rate", GetTestCaseName(item.first), - item.second.speech_expand_rate, Unit::kUnitless, - ImprovementDirection::kSmallerIsBetter, metric_metadata); - metrics_logger_->LogMetric( - "average_jitter_buffer_delay_ms", GetTestCaseName(item.first), + "average_jitter_buffer_delay_ms", test_case_name, item.second.average_jitter_buffer_delay_ms, Unit::kMilliseconds, ImprovementDirection::kNeitherIsBetter, metric_metadata); metrics_logger_->LogMetric( - "preferred_buffer_size_ms", GetTestCaseName(item.first), + "preferred_buffer_size_ms", test_case_name, item.second.preferred_buffer_size_ms, Unit::kMilliseconds, ImprovementDirection::kNeitherIsBetter, metric_metadata); - metrics_logger_->LogMetric("energy", GetTestCaseName(item.first), - item.second.energy, Unit::kUnitless, - ImprovementDirection::kNeitherIsBetter, - metric_metadata); + metrics_logger_->LogMetric( + "energy", test_case_name, item.second.energy, Unit::kUnitless, + ImprovementDirection::kNeitherIsBetter, metric_metadata); } } diff --git a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h index c59f727422..6e7487d14e 100644 --- a/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h +++ b/test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h @@ -44,7 +44,7 @@ class DefaultAudioQualityAnalyzer : public AudioQualityAnalyzerInterface { TrackIdStreamInfoMap* analyzer_helper) override; void OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) override; + const scoped_refptr& report) override; void Stop() override; // Returns audio quality stats per stream label. diff --git a/test/pc/e2e/analyzer/video/BUILD.gn b/test/pc/e2e/analyzer/video/BUILD.gn index 17876e54be..72b2796976 100644 --- a/test/pc/e2e/analyzer/video/BUILD.gn +++ b/test/pc/e2e/analyzer/video/BUILD.gn @@ -63,13 +63,13 @@ rtc_library("video_dumping") { "video_dumping.h", ] deps = [ - "../../../..:video_test_support", + "../../../..:video_frame_writer", "../../../../../api/test/video:video_frame_writer", "../../../../../api/video:video_frame", "../../../../../rtc_base:logging", "../../../../../system_wrappers", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("encoded_image_data_injector_api") { @@ -77,7 +77,6 @@ rtc_library("encoded_image_data_injector_api") { sources = [ "encoded_image_data_injector.h" ] deps = [ "../../../../../api/video:encoded_image" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("single_process_encoded_image_data_injector") { @@ -92,8 +91,8 @@ rtc_library("single_process_encoded_image_data_injector") { "../../../../../api/video:encoded_image", "../../../../../rtc_base:checks", "../../../../../rtc_base/synchronization:mutex", + "//third_party/abseil-cpp/absl/memory", ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } rtc_library("video_frame_tracking_id_injector") { @@ -107,8 +106,8 @@ rtc_library("video_frame_tracking_id_injector") { ":encoded_image_data_injector_api", "../../../../../api/video:encoded_image", "../../../../../rtc_base:checks", + "//third_party/abseil-cpp/absl/memory", ] - absl_deps = [ "//third_party/abseil-cpp/absl/memory" ] } rtc_library("simulcast_dummy_buffer_helper") { @@ -130,16 +129,14 @@ rtc_library("quality_analyzing_video_decoder") { ":encoded_image_data_injector_api", ":simulcast_dummy_buffer_helper", "../../../../../api:video_quality_analyzer_api", + "../../../../../api/environment", "../../../../../api/video:encoded_image", "../../../../../api/video:video_frame", "../../../../../api/video_codecs:video_codecs_api", "../../../../../modules/video_coding:video_codec_interface", "../../../../../rtc_base:logging", "../../../../../rtc_base/synchronization:mutex", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -152,6 +149,7 @@ rtc_library("quality_analyzing_video_encoder") { deps = [ ":encoded_image_data_injector_api", "../../../../../api:video_quality_analyzer_api", + "../../../../../api/environment", "../../../../../api/test/pclf:media_configuration", "../../../../../api/video:video_frame", "../../../../../api/video_codecs:video_codecs_api", @@ -159,8 +157,8 @@ rtc_library("quality_analyzing_video_encoder") { "../../../../../modules/video_coding/svc:scalability_mode_util", "../../../../../rtc_base:logging", "../../../../../rtc_base/synchronization:mutex", + "//third_party/abseil-cpp/absl/strings:string_view", ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("analyzing_video_sinks_helper") { @@ -174,10 +172,7 @@ rtc_library("analyzing_video_sinks_helper") { "../../../../../api/test/video:video_frame_writer", "../../../../../rtc_base:macromagic", "../../../../../rtc_base/synchronization:mutex", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -248,10 +243,7 @@ rtc_library("default_video_quality_analyzer_internal") { "../../../../../rtc_tools:video_quality_analysis", "../../../../../system_wrappers", "dvqa:pausable_state", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -259,63 +251,144 @@ rtc_library("multi_reader_queue") { testonly = true sources = [ "multi_reader_queue.h" ] deps = [ "../../../../../rtc_base:checks" ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } -rtc_library("video_quality_metrics_reporter") { - testonly = true - sources = [ - "video_quality_metrics_reporter.cc", - "video_quality_metrics_reporter.h", - ] - deps = [ - "../..:metric_metadata_keys", - "../../../../../api:peer_connection_quality_test_fixture_api", - "../../../../../api:rtc_stats_api", - "../../../../../api:track_id_stream_info_map", - "../../../../../api/numerics", - "../../../../../api/test/metrics:metric", - "../../../../../api/test/metrics:metrics_logger", - "../../../../../api/units:data_rate", - "../../../../../api/units:data_size", - "../../../../../api/units:time_delta", - "../../../../../api/units:timestamp", - "../../../../../rtc_base:checks", - "../../../../../rtc_base/synchronization:mutex", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] -} +if (!build_with_chromium) { + rtc_library("video_quality_metrics_reporter") { + testonly = true + sources = [ + "video_quality_metrics_reporter.cc", + "video_quality_metrics_reporter.h", + ] + deps = [ + "../..:metric_metadata_keys", + "../../../..:test_flags", + "../../../../../api:peer_connection_quality_test_fixture_api", + "../../../../../api:rtc_stats_api", + "../../../../../api:scoped_refptr", + "../../../../../api:track_id_stream_info_map", + "../../../../../api/numerics", + "../../../../../api/test/metrics:metric", + "../../../../../api/test/metrics:metrics_logger", + "../../../../../api/units:data_rate", + "../../../../../api/units:data_size", + "../../../../../api/units:time_delta", + "../../../../../api/units:timestamp", + "../../../../../rtc_base:checks", + "../../../../../rtc_base:logging", + "../../../../../rtc_base:macromagic", + "../../../../../rtc_base/synchronization:mutex", + "../../../../../rtc_base/synchronization:mutex", + "../../../../../system_wrappers", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/strings:string_view", + ] + } -rtc_library("default_video_quality_analyzer") { - testonly = true - sources = [ - "default_video_quality_analyzer.cc", - "default_video_quality_analyzer.h", - ] + rtc_library("default_video_quality_analyzer") { + testonly = true + sources = [ + "default_video_quality_analyzer.cc", + "default_video_quality_analyzer.h", + ] - deps = [ - ":default_video_quality_analyzer_internal", - ":default_video_quality_analyzer_shared", - "../..:metric_metadata_keys", - "../../../../../api:array_view", - "../../../../../api:video_quality_analyzer_api", - "../../../../../api/numerics", - "../../../../../api/test/metrics:metric", - "../../../../../api/test/metrics:metrics_logger", - "../../../../../api/units:data_size", - "../../../../../api/units:time_delta", - "../../../../../api/units:timestamp", - "../../../../../api/video:encoded_image", - "../../../../../api/video:video_frame", - "../../../../../rtc_base:checks", - "../../../../../rtc_base:logging", - "../../../../../rtc_base:macromagic", - "../../../../../rtc_base:stringutils", - "../../../../../rtc_base/synchronization:mutex", - "../../../../../system_wrappers", - "dvqa:frames_storage", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + deps = [ + ":default_video_quality_analyzer_internal", + ":default_video_quality_analyzer_shared", + "../..:metric_metadata_keys", + "../../../..:test_flags", + "../../../../../api:array_view", + "../../../../../api:rtp_packet_info", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/numerics", + "../../../../../api/test/metrics:metric", + "../../../../../api/test/metrics:metrics_logger", + "../../../../../api/units:data_size", + "../../../../../api/units:time_delta", + "../../../../../api/units:timestamp", + "../../../../../api/video:encoded_image", + "../../../../../api/video:video_frame", + "../../../../../api/video_codecs:video_codecs_api", + "../../../../../rtc_base:checks", + "../../../../../rtc_base:logging", + "../../../../../rtc_base:macromagic", + "../../../../../rtc_base:stringutils", + "../../../../../rtc_base/synchronization:mutex", + "../../../../../system_wrappers", + "dvqa:frames_storage", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/strings:string_view", + ] + } + + rtc_library("analyzing_video_sink") { + testonly = true + sources = [ + "analyzing_video_sink.cc", + "analyzing_video_sink.h", + ] + deps = [ + ":analyzing_video_sinks_helper", + ":simulcast_dummy_buffer_helper", + ":video_dumping", + "../..:metric_metadata_keys", + "../../../..:fixed_fps_video_frame_writer_adapter", + "../../../..:test_flags", + "../../../..:test_renderer", + "../../../../../api:scoped_refptr", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/numerics", + "../../../../../api/test/metrics:metric", + "../../../../../api/test/metrics:metrics_logger", + "../../../../../api/test/pclf:media_configuration", + "../../../../../api/test/video:video_frame_writer", + "../../../../../api/units:timestamp", + "../../../../../api/video:video_frame", + "../../../../../rtc_base:checks", + "../../../../../rtc_base:logging", + "../../../../../rtc_base:macromagic", + "../../../../../rtc_base/synchronization:mutex", + "../../../../../system_wrappers", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:string_view", + ] + } + + rtc_library("video_quality_analyzer_injection_helper") { + testonly = true + sources = [ + "video_quality_analyzer_injection_helper.cc", + "video_quality_analyzer_injection_helper.h", + ] + deps = [ + ":analyzing_video_sink", + ":analyzing_video_sinks_helper", + ":encoded_image_data_injector_api", + ":quality_analyzing_video_decoder", + ":quality_analyzing_video_encoder", + ":simulcast_dummy_buffer_helper", + ":video_dumping", + "../../../..:fixed_fps_video_frame_writer_adapter", + "../../../..:test_renderer", + "../../../..:test_video_capturer", + "../../../..:video_frame_writer", + "../../../..:video_test_common", + "../../../../../api:array_view", + "../../../../../api:stats_observer_interface", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/test/pclf:media_configuration", + "../../../../../api/video:video_frame", + "../../../../../api/video_codecs:video_codecs_api", + "../../../../../rtc_base:checks", + "../../../../../rtc_base:logging", + "../../../../../rtc_base:stringutils", + "../../../../../rtc_base/synchronization:mutex", + "../../../../../system_wrappers", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings:string_view", + ] + } } rtc_library("default_video_quality_analyzer_shared") { @@ -332,78 +405,6 @@ rtc_library("default_video_quality_analyzer_shared") { "../../../../../rtc_base:checks", "../../../../../rtc_base:stringutils", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] -} - -rtc_library("analyzing_video_sink") { - testonly = true - sources = [ - "analyzing_video_sink.cc", - "analyzing_video_sink.h", - ] - deps = [ - ":analyzing_video_sinks_helper", - ":simulcast_dummy_buffer_helper", - ":video_dumping", - "../..:metric_metadata_keys", - "../../../..:fixed_fps_video_frame_writer_adapter", - "../../../..:test_renderer", - "../../../../../api:video_quality_analyzer_api", - "../../../../../api/numerics", - "../../../../../api/test/metrics:metric", - "../../../../../api/test/metrics:metrics_logger", - "../../../../../api/test/pclf:media_configuration", - "../../../../../api/test/video:video_frame_writer", - "../../../../../api/units:timestamp", - "../../../../../api/video:video_frame", - "../../../../../rtc_base:checks", - "../../../../../rtc_base:logging", - "../../../../../rtc_base:macromagic", - "../../../../../rtc_base/synchronization:mutex", - "../../../../../system_wrappers", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/memory:memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("video_quality_analyzer_injection_helper") { - testonly = true - sources = [ - "video_quality_analyzer_injection_helper.cc", - "video_quality_analyzer_injection_helper.h", - ] - deps = [ - ":analyzing_video_sink", - ":analyzing_video_sinks_helper", - ":encoded_image_data_injector_api", - ":quality_analyzing_video_decoder", - ":quality_analyzing_video_encoder", - ":simulcast_dummy_buffer_helper", - ":video_dumping", - "../../../..:fixed_fps_video_frame_writer_adapter", - "../../../..:test_renderer", - "../../../..:test_video_capturer", - "../../../..:video_test_common", - "../../../..:video_test_support", - "../../../../../api:array_view", - "../../../../../api:stats_observer_interface", - "../../../../../api:video_quality_analyzer_api", - "../../../../../api/test/pclf:media_configuration", - "../../../../../api/video:video_frame", - "../../../../../api/video_codecs:video_codecs_api", - "../../../../../rtc_base:checks", - "../../../../../rtc_base:logging", - "../../../../../rtc_base:stringutils", - "../../../../../rtc_base/synchronization:mutex", - "../../../../../system_wrappers", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - ] } if (rtc_include_tests) { @@ -438,10 +439,7 @@ if (rtc_include_tests) { "../../../../../rtc_base:timeutils", "../../../../../system_wrappers", "../../../../time_controller", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -453,7 +451,6 @@ if (rtc_include_tests) { "../../../..:test_support", "../../../../../api/test/pclf:media_configuration", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("default_video_quality_analyzer_frames_comparator_test") { @@ -477,10 +474,6 @@ if (rtc_include_tests) { ":default_video_quality_analyzer_internal", "../../../..:test_support", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/strings:strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } rtc_library("multi_reader_queue_test") { @@ -490,7 +483,6 @@ if (rtc_include_tests) { ":multi_reader_queue", "../../../..:test_support", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("default_video_quality_analyzer_stream_state_test") { @@ -502,49 +494,58 @@ if (rtc_include_tests) { "../../../../../api/units:timestamp", "../../../../../system_wrappers", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } - rtc_library("default_video_quality_analyzer_test") { - testonly = true - sources = [ "default_video_quality_analyzer_test.cc" ] - deps = [ - ":default_video_quality_analyzer", - ":default_video_quality_analyzer_shared", - "../../../..:test_support", - "../../../../../api:create_frame_generator", - "../../../../../api:rtp_packet_info", - "../../../../../api:time_controller", - "../../../../../api/test/metrics:global_metrics_logger_and_exporter", - "../../../../../api/units:time_delta", - "../../../../../api/units:timestamp", - "../../../../../api/video:encoded_image", - "../../../../../api/video:video_frame", - "../../../../../common_video", - "../../../../../rtc_base:stringutils", - "../../../../../rtc_tools:video_quality_analysis", - "../../../../../system_wrappers", - "../../../../time_controller", - ] - } + if (!build_with_chromium) { + rtc_library("default_video_quality_analyzer_test") { + testonly = true + sources = [ "default_video_quality_analyzer_test.cc" ] + deps = [ + ":default_video_quality_analyzer", + ":default_video_quality_analyzer_shared", + "../../../..:test_support", + "../../../../../api:create_frame_generator", + "../../../../../api:rtp_packet_info", + "../../../../../api:time_controller", + "../../../../../api/test/metrics:global_metrics_logger_and_exporter", + "../../../../../api/units:time_delta", + "../../../../../api/units:timestamp", + "../../../../../api/video:encoded_image", + "../../../../../api/video:video_frame", + "../../../../../common_video", + "../../../../../rtc_base:stringutils", + "../../../../../rtc_tools:video_quality_analysis", + "../../../../../system_wrappers", + "../../../../time_controller", + ] + } - rtc_library("default_video_quality_analyzer_metric_names_test") { - testonly = true - sources = [ "default_video_quality_analyzer_metric_names_test.cc" ] - deps = [ - ":default_video_quality_analyzer", - "../../../..:test_support", - "../../../../../api:create_frame_generator", - "../../../../../api:rtp_packet_info", - "../../../../../api/test/metrics:metric", - "../../../../../api/test/metrics:metrics_logger", - "../../../../../api/test/metrics:stdout_metrics_exporter", - "../../../../../api/video:encoded_image", - "../../../../../api/video:video_frame", - "../../../../../common_video", - "../../../../../rtc_tools:video_quality_analysis", - "../../../../../system_wrappers", - ] + rtc_library("default_video_quality_analyzer_metric_names_test") { + testonly = true + sources = [ "default_video_quality_analyzer_metric_names_test.cc" ] + deps = [ + ":default_video_quality_analyzer", + ":default_video_quality_analyzer_shared", + "../../../..:test_flags", + "../../../..:test_support", + "../../../../../api:create_frame_generator", + "../../../../../api:frame_generator_api", + "../../../../../api:rtp_packet_info", + "../../../../../api:video_quality_analyzer_api", + "../../../../../api/test/metrics:metric", + "../../../../../api/test/metrics:metrics_exporter", + "../../../../../api/test/metrics:metrics_logger", + "../../../../../api/test/metrics:stdout_metrics_exporter", + "../../../../../api/units:timestamp", + "../../../../../api/video:encoded_image", + "../../../../../api/video:video_frame", + "../../../../../common_video", + "../../../../../rtc_tools:video_quality_analysis", + "../../../../../system_wrappers", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/strings:string_view", + ] + } } rtc_library("video_dumping_test") { @@ -554,12 +555,12 @@ if (rtc_include_tests) { ":video_dumping", "../../../..:fileutils", "../../../..:test_support", + "../../../..:video_frame_writer", "../../../..:video_test_support", "../../../../../api:scoped_refptr", "../../../../../api/video:video_frame", "../../../../../rtc_base:random", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("single_process_encoded_image_data_injector_unittest") { diff --git a/test/pc/e2e/analyzer/video/analyzing_video_sink.cc b/test/pc/e2e/analyzer/video/analyzing_video_sink.cc index 2392483b1a..9cba75cfd2 100644 --- a/test/pc/e2e/analyzer/video/analyzing_video_sink.cc +++ b/test/pc/e2e/analyzer/video/analyzing_video_sink.cc @@ -9,23 +9,31 @@ */ #include "test/pc/e2e/analyzer/video/analyzing_video_sink.h" +#include +#include +#include #include +#include #include +#include #include #include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/scoped_refptr.h" #include "api/test/metrics/metric.h" #include "api/test/metrics/metrics_logger.h" #include "api/test/pclf/media_configuration.h" #include "api/test/video/video_frame_writer.h" +#include "api/test/video_quality_analyzer_interface.h" #include "api/units/timestamp.h" #include "api/video/i420_buffer.h" #include "api/video/video_frame.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/synchronization/mutex.h" +#include "system_wrappers/include/clock.h" +#include "test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h" #include "test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h" #include "test/pc/e2e/analyzer/video/video_dumping.h" #include "test/pc/e2e/metric_metadata_keys.h" @@ -58,15 +66,16 @@ void AnalyzingVideoSink::UpdateSubscription( MutexLock lock(&mutex_); subscription_ = subscription; for (auto it = stream_sinks_.cbegin(); it != stream_sinks_.cend();) { - absl::optional new_requested_resolution = + std::optional new_requested_resolution = subscription_.GetResolutionForPeer(it->second.sender_peer_name); - if (!new_requested_resolution.has_value() || - (*new_requested_resolution != it->second.resolution)) { + if (new_requested_resolution != it->second.resolution) { RTC_LOG(LS_INFO) << peer_name_ << ": Subscribed resolution for stream " << it->first << " from " << it->second.sender_peer_name << " was updated from " << it->second.resolution.ToString() << " to " - << new_requested_resolution->ToString() + << (new_requested_resolution.has_value() + ? new_requested_resolution->ToString() + : "none") << ". Repopulating all video sinks and recreating " << "requested video writers"; writers_to_close.insert(it->second.video_frame_writer); @@ -115,16 +124,13 @@ void AnalyzingVideoSink::LogMetrics(webrtc::test::MetricsLogger& metrics_logger, absl::string_view test_case_name) const { if (report_infra_stats_) { MutexLock lock(&mutex_); - const std::string test_case(test_case_name); - // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map metadata = { - {MetricMetadataKey::kPeerMetadataKey, peer_name_}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case}}; + {MetricMetadataKey::kPeerMetadataKey, peer_name_}}; metrics_logger.LogMetric( - "analyzing_sink_processing_time_ms", test_case + "/" + peer_name_, + "analyzing_sink_processing_time_ms", test_case_name, stats_.analyzing_sink_processing_time_ms, test::Unit::kMilliseconds, test::ImprovementDirection::kSmallerIsBetter, metadata); - metrics_logger.LogMetric("scaling_tims_ms", test_case + "/" + peer_name_, + metrics_logger.LogMetric("scaling_tims_ms", test_case_name, stats_.scaling_tims_ms, test::Unit::kMilliseconds, test::ImprovementDirection::kSmallerIsBetter, metadata); @@ -140,8 +146,10 @@ VideoFrame AnalyzingVideoSink::ScaleVideoFrame( const VideoFrame& frame, const VideoResolution& required_resolution) { Timestamp processing_started = clock_->CurrentTime(); - if (required_resolution.width() == static_cast(frame.width()) && - required_resolution.height() == static_cast(frame.height())) { + if ((required_resolution.width() == static_cast(frame.width()) && + required_resolution.height() == static_cast(frame.height())) || + !required_resolution.IsRegular() || + (required_resolution.width() == 0 || required_resolution.height() == 0)) { if (report_infra_stats_) { stats_.scaling_tims_ms.AddSample( (clock_->CurrentTime() - processing_started).ms()); @@ -162,7 +170,7 @@ VideoFrame AnalyzingVideoSink::ScaleVideoFrame( << required_resolution.ToString() << "; actual resolution=" << frame.width() << "x" << frame.height(); - rtc::scoped_refptr scaled_buffer(I420Buffer::Create( + scoped_refptr scaled_buffer(I420Buffer::Create( required_resolution.width(), required_resolution.height())); scaled_buffer->ScaleFrom(*frame.video_frame_buffer()->ToI420()); @@ -191,14 +199,14 @@ AnalyzingVideoSink::SinksDescriptor* AnalyzingVideoSink::PopulateSinks( } // Slow pass: we need to create and save sinks - absl::optional> peer_and_config = + std::optional> peer_and_config = sinks_helper_->GetPeerAndConfig(stream_label); RTC_CHECK(peer_and_config.has_value()) << "No video config for stream " << stream_label; const std::string& sender_peer_name = peer_and_config->first; const VideoConfig& config = peer_and_config->second; - absl::optional resolution = + std::optional resolution = subscription_.GetResolutionForPeer(sender_peer_name); if (!resolution.has_value()) { RTC_LOG(LS_ERROR) << peer_name_ << " received stream " << stream_label diff --git a/test/pc/e2e/analyzer/video/analyzing_video_sink.h b/test/pc/e2e/analyzer/video/analyzing_video_sink.h index 4c7fbebe18..94752a88b4 100644 --- a/test/pc/e2e/analyzer/video/analyzing_video_sink.h +++ b/test/pc/e2e/analyzer/video/analyzing_video_sink.h @@ -33,7 +33,7 @@ namespace webrtc { namespace webrtc_pc_e2e { // A sink to inject video quality analyzer as a sink into WebRTC. -class AnalyzingVideoSink : public rtc::VideoSinkInterface { +class AnalyzingVideoSink : public VideoSinkInterface { public: struct Stats { // Time required to scale video frame to the requested rendered resolution. @@ -76,7 +76,7 @@ class AnalyzingVideoSink : public rtc::VideoSinkInterface { // Is set if dumping of output video was requested; test::VideoFrameWriter* video_frame_writer = nullptr; - std::vector>> sinks; + std::vector>> sinks; }; // Scales video frame to `required_resolution` if necessary. Crashes if video diff --git a/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc b/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc index 6cd89551ea..078fb18fd0 100644 --- a/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc +++ b/test/pc/e2e/analyzer/video/analyzing_video_sink_test.cc @@ -11,11 +11,11 @@ #include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/scoped_refptr.h" #include "api/test/create_frame_generator.h" #include "api/test/frame_generator_interface.h" @@ -45,7 +45,7 @@ using ::testing::Test; // Remove files and directories in a directory non-recursively. void CleanDir(absl::string_view dir, size_t expected_output_files_count) { - absl::optional> dir_content = + std::optional> dir_content = test::ReadDirectory(dir); if (expected_output_files_count == 0) { ASSERT_TRUE(!dir_content.has_value() || dir_content->empty()) @@ -81,8 +81,8 @@ std::unique_ptr CreateFrameGenerator( size_t width, size_t height) { return test::CreateSquareFrameGenerator(width, height, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); } void AssertFrameIdsAre(const std::string& filename, @@ -160,8 +160,8 @@ TEST_F(AnalyzingVideoSinkTest, VideoFramesAreDumpedCorrectly) { auto frame_reader = test::CreateY4mFrameReader( test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m")); EXPECT_THAT(frame_reader->num_frames(), Eq(1)); - rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); - rtc::scoped_refptr expected_frame = + scoped_refptr actual_frame = frame_reader->PullFrame(); + scoped_refptr expected_frame = frame.video_frame_buffer()->ToI420(); double psnr = I420PSNR(*expected_frame, *actual_frame); double ssim = I420SSIM(*expected_frame, *actual_frame); @@ -200,8 +200,8 @@ TEST_F(AnalyzingVideoSinkTest, auto frame_reader = test::CreateY4mFrameReader( test::JoinFilename(test_directory_, "alice_video_bob_320x240_30.y4m")); EXPECT_THAT(frame_reader->num_frames(), Eq(1)); - rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); - rtc::scoped_refptr expected_frame = + scoped_refptr actual_frame = frame_reader->PullFrame(); + scoped_refptr expected_frame = frame.video_frame_buffer()->ToI420(); double psnr = I420PSNR(*expected_frame, *actual_frame); double ssim = I420SSIM(*expected_frame, *actual_frame); @@ -242,8 +242,8 @@ TEST_F(AnalyzingVideoSinkTest, auto frame_reader = test::CreateY4mFrameReader( test::JoinFilename(test_directory_, "alice_video_bob_320x240_30.y4m")); EXPECT_THAT(frame_reader->num_frames(), Eq(1)); - rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); - rtc::scoped_refptr expected_frame = + scoped_refptr actual_frame = frame_reader->PullFrame(); + scoped_refptr expected_frame = frame.video_frame_buffer()->ToI420(); double psnr = I420PSNR(*expected_frame, *actual_frame); double ssim = I420SSIM(*expected_frame, *actual_frame); @@ -295,8 +295,8 @@ TEST_F(AnalyzingVideoSinkTest, auto frame_reader = test::CreateY4mFrameReader( test::JoinFilename(test_directory_, "alice_video_bob_1280x720_30.y4m")); EXPECT_THAT(frame_reader->num_frames(), Eq(1)); - rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); - rtc::scoped_refptr expected_frame = + scoped_refptr actual_frame = frame_reader->PullFrame(); + scoped_refptr expected_frame = frame_before.video_frame_buffer()->ToI420(); double psnr = I420PSNR(*expected_frame, *actual_frame); double ssim = I420SSIM(*expected_frame, *actual_frame); @@ -308,8 +308,8 @@ TEST_F(AnalyzingVideoSinkTest, auto frame_reader = test::CreateY4mFrameReader( test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m")); EXPECT_THAT(frame_reader->num_frames(), Eq(1)); - rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); - rtc::scoped_refptr expected_frame = + scoped_refptr actual_frame = frame_reader->PullFrame(); + scoped_refptr expected_frame = frame_after.video_frame_buffer()->ToI420(); double psnr = I420PSNR(*expected_frame, *actual_frame); double ssim = I420SSIM(*expected_frame, *actual_frame); @@ -321,6 +321,74 @@ TEST_F(AnalyzingVideoSinkTest, ExpectOutputFilesCount(2); } +TEST_F(AnalyzingVideoSinkTest, KeepsCountingFrameWhenUnsucsribed) { + VideoSubscription subscription_before; + subscription_before.SubscribeToPeer( + "alice", VideoResolution(/*width=*/1280, /*height=*/720, /*fps=*/30)); + + VideoConfig video_config("alice_video", /*width=*/1280, /*height=*/720, + /*fps=*/30); + + ExampleVideoQualityAnalyzer analyzer; + std::unique_ptr frame_generator = + CreateFrameGenerator(/*width=*/1280, /*height=*/720); + VideoFrame frame_before = CreateFrame(*frame_generator); + frame_before.set_id( + analyzer.OnFrameCaptured("alice", "alice_video", frame_before)); + VideoFrame frame_after = CreateFrame(*frame_generator); + frame_after.set_id( + analyzer.OnFrameCaptured("alice", "alice_video", frame_after)); + + { + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", video_config); + AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper, + subscription_before, /*report_infra_stats=*/false); + sink.OnFrame(frame_before); + + sink.UpdateSubscription(VideoSubscription()); + sink.OnFrame(frame_after); + } + + EXPECT_THAT(analyzer.frames_rendered(), Eq(2)); +} + +TEST_F(AnalyzingVideoSinkTest, + KeepsCountingFrameWhenUnsucsribedUsingEmptyResolution) { + VideoSubscription subscription_before; + subscription_before.SubscribeToPeer( + "alice", VideoResolution(/*width=*/1280, /*height=*/720, /*fps=*/30)); + VideoSubscription subscription_after; + subscription_after.SubscribeToPeer( + "alice", VideoResolution(/*width=*/0, /*height=*/0, /*fps=*/0)); + + VideoConfig video_config("alice_video", /*width=*/1280, /*height=*/720, + /*fps=*/30); + + ExampleVideoQualityAnalyzer analyzer; + std::unique_ptr frame_generator = + CreateFrameGenerator(/*width=*/1280, /*height=*/720); + VideoFrame frame_before = CreateFrame(*frame_generator); + frame_before.set_id( + analyzer.OnFrameCaptured("alice", "alice_video", frame_before)); + VideoFrame frame_after = CreateFrame(*frame_generator); + frame_after.set_id( + analyzer.OnFrameCaptured("alice", "alice_video", frame_after)); + + { + AnalyzingVideoSinksHelper helper; + helper.AddConfig("alice", video_config); + AnalyzingVideoSink sink("bob", Clock::GetRealTimeClock(), analyzer, helper, + subscription_before, /*report_infra_stats=*/false); + sink.OnFrame(frame_before); + + sink.UpdateSubscription(subscription_after); + sink.OnFrame(frame_after); + } + + EXPECT_THAT(analyzer.frames_rendered(), Eq(2)); +} + TEST_F(AnalyzingVideoSinkTest, VideoFramesAreDumpedCorrectlyWhenSubscriptionChangedOnTheSameOne) { VideoSubscription subscription_before; @@ -363,8 +431,8 @@ TEST_F(AnalyzingVideoSinkTest, test::JoinFilename(test_directory_, "alice_video_bob_640x360_30.y4m")); EXPECT_THAT(frame_reader->num_frames(), Eq(2)); // Read the first frame. - rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); - rtc::scoped_refptr expected_frame = + scoped_refptr actual_frame = frame_reader->PullFrame(); + scoped_refptr expected_frame = frame_before.video_frame_buffer()->ToI420(); // Frames should be equal. EXPECT_DOUBLE_EQ(I420SSIM(*expected_frame, *actual_frame), 1.00); @@ -412,8 +480,8 @@ TEST_F(AnalyzingVideoSinkTest, SmallDiviationsInAspectRationAreAllowed) { test::JoinFilename(test_directory_, "alice_video_bob_480x270_30.y4m")); EXPECT_THAT(frame_reader->num_frames(), Eq(1)); // Read the first frame. - rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); - rtc::scoped_refptr expected_frame = + scoped_refptr actual_frame = frame_reader->PullFrame(); + scoped_refptr expected_frame = frame.video_frame_buffer()->ToI420(); // Actual frame is upscaled version of the expected. But because rendered // resolution is equal to the actual frame size we need to upscale expected @@ -507,8 +575,8 @@ TEST_F(AnalyzingVideoSinkTest, test::JoinFilename(test_directory_, "alice_video_bob_320x240_10.y4m")); EXPECT_THAT(frame_reader->num_frames(), Eq(11)); for (int i = 0; i < 10; ++i) { - rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); - rtc::scoped_refptr expected_frame = + scoped_refptr actual_frame = frame_reader->PullFrame(); + scoped_refptr expected_frame = frame1.video_frame_buffer()->ToI420(); double psnr = I420PSNR(*expected_frame, *actual_frame); double ssim = I420SSIM(*expected_frame, *actual_frame); @@ -516,8 +584,8 @@ TEST_F(AnalyzingVideoSinkTest, EXPECT_DOUBLE_EQ(ssim, 1.00); EXPECT_DOUBLE_EQ(psnr, 48); } - rtc::scoped_refptr actual_frame = frame_reader->PullFrame(); - rtc::scoped_refptr expected_frame = + scoped_refptr actual_frame = frame_reader->PullFrame(); + scoped_refptr expected_frame = frame2.video_frame_buffer()->ToI420(); double psnr = I420PSNR(*expected_frame, *actual_frame); double ssim = I420SSIM(*expected_frame, *actual_frame); diff --git a/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.cc b/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.cc index 70dc4b00b5..cf4b1bb583 100644 --- a/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.cc +++ b/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.cc @@ -36,12 +36,12 @@ void AnalyzingVideoSinksHelper::AddConfig(absl::string_view sender_peer_name, } } -absl::optional> +std::optional> AnalyzingVideoSinksHelper::GetPeerAndConfig(absl::string_view stream_label) { MutexLock lock(&mutex_); auto it = video_configs_.find(std::string(stream_label)); if (it == video_configs_.end()) { - return absl::nullopt; + return std::nullopt; } return it->second; } diff --git a/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h b/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h index 5f38c5a40e..a4a92af542 100644 --- a/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h +++ b/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h @@ -14,12 +14,12 @@ #include #include #include +#include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/test/pclf/media_configuration.h" #include "api/test/video/video_frame_writer.h" #include "rtc_base/synchronization/mutex.h" @@ -35,7 +35,7 @@ class AnalyzingVideoSinksHelper { // Adds config in the registry. If config with such stream label was // registered before, the new value will override the old one. void AddConfig(absl::string_view sender_peer_name, VideoConfig config); - absl::optional> GetPeerAndConfig( + std::optional> GetPeerAndConfig( absl::string_view stream_label); // Removes video config for specified stream label. If there are no know video // config for such stream label - does nothing. diff --git a/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper_test.cc b/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper_test.cc index 1a820a5229..792f44f1d4 100644 --- a/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper_test.cc +++ b/test/pc/e2e/analyzer/video/analyzing_video_sinks_helper_test.cc @@ -10,10 +10,10 @@ #include "test/pc/e2e/analyzer/video/analyzing_video_sinks_helper.h" #include +#include #include #include -#include "absl/types/optional.h" #include "api/test/pclf/media_configuration.h" #include "test/gmock.h" #include "test/gtest.h" @@ -40,7 +40,7 @@ TEST(AnalyzingVideoSinksHelperTest, ConfigsCanBeAdded) { AnalyzingVideoSinksHelper helper; helper.AddConfig("alice", config); - absl::optional> registred_config = + std::optional> registred_config = helper.GetPeerAndConfig("alice_video"); ASSERT_TRUE(registred_config.has_value()); EXPECT_THAT(registred_config->first, Eq("alice")); @@ -56,7 +56,7 @@ TEST(AnalyzingVideoSinksHelperTest, AddingForExistingLabelWillOverwriteValue) { AnalyzingVideoSinksHelper helper; helper.AddConfig("alice", config_before); - absl::optional> registred_config = + std::optional> registred_config = helper.GetPeerAndConfig("alice_video"); ASSERT_TRUE(registred_config.has_value()); EXPECT_THAT(registred_config->first, Eq("alice")); diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc index 473461c3ba..42f45abc7a 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.cc @@ -11,29 +11,42 @@ #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h" #include +#include +#include #include #include +#include #include #include #include #include +#include "absl/flags/flag.h" +#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/numerics/samples_stats_counter.h" +#include "api/rtp_packet_info.h" #include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_logger.h" +#include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "api/video/encoded_image.h" #include "api/video/video_frame.h" +#include "api/video_codecs/video_encoder.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" #include "system_wrappers/include/clock.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h" +#include "test/pc/e2e/analyzer/video/names_collection.h" #include "test/pc/e2e/metric_metadata_keys.h" +#include "test/test_flags.h" namespace webrtc { namespace { @@ -111,10 +124,10 @@ void LogStreamInternalStats(const std::string& name, } template -absl::optional MaybeGetValue(const std::map& map, size_t key) { +std::optional MaybeGetValue(const std::map& map, size_t key) { auto it = map.find(key); if (it == map.end()) { - return absl::nullopt; + return std::nullopt; } return it->second; } @@ -142,10 +155,9 @@ DefaultVideoQualityAnalyzer::~DefaultVideoQualityAnalyzer() { Stop(); } -void DefaultVideoQualityAnalyzer::Start( - std::string test_case_name, - rtc::ArrayView peer_names, - int max_threads_count) { +void DefaultVideoQualityAnalyzer::Start(std::string test_case_name, + ArrayView peer_names, + int max_threads_count) { test_label_ = std::move(test_case_name); frames_comparator_.Start(max_threads_count); { @@ -205,21 +217,21 @@ uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured( auto state_it = stream_states_.find(stream_index); if (state_it == stream_states_.end()) { - stream_states_.emplace(stream_index, - StreamState(peer_index, frame_receivers_indexes, - captured_time, clock_)); + stream_states_.emplace( + stream_index, AnalyzerStreamState(peer_index, frame_receivers_indexes, + captured_time, clock_)); } - StreamState* state = &stream_states_.at(stream_index); + AnalyzerStreamState* state = &stream_states_.at(stream_index); state->PushBack(frame_id); - absl::optional time_between_captured_frames = absl::nullopt; + std::optional time_between_captured_frames = std::nullopt; if (state->last_captured_frame_time().has_value()) { time_between_captured_frames = captured_time - *state->last_captured_frame_time(); } state->SetLastCapturedFrameTime(captured_time); // Update frames in flight info. - auto it = captured_frames_in_flight_.find(frame_id); - if (it != captured_frames_in_flight_.end()) { + auto captured_frame_it = captured_frames_in_flight_.find(frame_id); + if (captured_frame_it != captured_frames_in_flight_.end()) { // If we overflow uint16_t and hit previous frame id and this frame is // still in flight, it means that this stream wasn't rendered for long // time and we need to process existing frame as dropped. @@ -243,13 +255,13 @@ uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured( StatsSample(captured_frames_in_flight_.size(), Now())); frames_comparator_.AddComparison( InternalStatsKey(stream_index, peer_index, i), - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, FrameComparisonType::kDroppedFrame, - it->second.GetStatsForPeer(i)); + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kDroppedFrame, + captured_frame_it->second.GetStatsForPeer(i)); } - frames_storage_.Remove(it->second.id()); - captured_frames_in_flight_.erase(it); + frames_storage_.Remove(captured_frame_it->second.id()); + captured_frames_in_flight_.erase(captured_frame_it); } captured_frames_in_flight_.emplace( frame_id, FrameInFlight(stream_index, frame_id, captured_time, @@ -285,8 +297,12 @@ void DefaultVideoQualityAnalyzer::OnFramePreEncode( << "DefaultVideoQualityAnalyzer has to be started before use"; auto it = captured_frames_in_flight_.find(frame.id()); - RTC_CHECK(it != captured_frames_in_flight_.end()) - << "Frame id=" << frame.id() << " not found"; + if (it == captured_frames_in_flight_.end()) { + // If the frame is not found, it is possible that it has been encoded twice + // and that it was received by all the participants the first time. + RTC_LOG(LS_WARNING) << "Frame id=" << frame.id() << " not found."; + return; + } FrameInFlight& frame_in_flight = it->second; frame_counters_.pre_encoded++; size_t peer_index = peers_->index(peer_name); @@ -344,8 +360,8 @@ void DefaultVideoQualityAnalyzer::OnFrameEncoded( } } Timestamp now = Now(); - StreamState& state = stream_states_.at(frame_in_flight.stream()); - absl::optional time_between_encoded_frames = absl::nullopt; + AnalyzerStreamState& state = stream_states_.at(frame_in_flight.stream()); + std::optional time_between_encoded_frames = std::nullopt; if (state.last_encoded_frame_time().has_value()) { time_between_encoded_frames = now - *state.last_encoded_frame_time(); } @@ -472,7 +488,7 @@ void DefaultVideoQualityAnalyzer::OnFrameDecoded( used_decoder.switched_on_at = now; used_decoder.switched_from_at = now; it->second.OnFrameDecoded(peer_index, now, frame.width(), frame.height(), - used_decoder); + used_decoder, stats.qp); if (options_.report_infra_metrics) { analyzer_stats_.on_frame_decoded_processing_time_ms.AddSample( @@ -499,7 +515,8 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered( auto frame_it = captured_frames_in_flight_.find(frame.id()); if (frame_it == captured_frames_in_flight_.end() || frame_it->second.HasRenderedTime(peer_index) || - frame_it->second.IsDropped(peer_index)) { + frame_it->second.IsDropped(peer_index) || + frame_it->second.IsSuperfluous(peer_index)) { // It means this frame was rendered or dropped before, so we can skip it. // It may happen when we have multiple simulcast streams in one track and // received the same frame from two different streams because SFU can't @@ -510,7 +527,8 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered( if (frame_it != captured_frames_in_flight_.end()) { if (frame_it->second.HasRenderedTime(peer_index)) { reason = kSkipRenderedFrameReasonRendered; - } else if (frame_it->second.IsDropped(peer_index)) { + } else if (frame_it->second.IsDropped(peer_index) || + frame_it->second.IsSuperfluous(peer_index)) { reason = kSkipRenderedFrameReasonDropped; } } @@ -523,10 +541,10 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered( // Find corresponding captured frame. FrameInFlight* frame_in_flight = &frame_it->second; - absl::optional captured_frame = frames_storage_.Get(frame.id()); + std::optional captured_frame = frames_storage_.Get(frame.id()); const size_t stream_index = frame_in_flight->stream(); - StreamState* state = &stream_states_.at(stream_index); + AnalyzerStreamState* state = &stream_states_.at(stream_index); const InternalStatsKey stats_key(stream_index, state->sender(), peer_index); // Update frames counters. @@ -695,7 +713,7 @@ void DefaultVideoQualityAnalyzer::UnregisterParticipantInCall( absl::string_view peer_name) { MutexLock lock(&mutex_); RTC_CHECK(peers_->HasName(peer_name)); - absl::optional peer_index = peers_->RemoveIfPresent(peer_name); + std::optional peer_index = peers_->RemoveIfPresent(peer_name); RTC_CHECK(peer_index.has_value()); for (auto& [stream_index, stream_state] : stream_states_) { @@ -785,7 +803,7 @@ void DefaultVideoQualityAnalyzer::Stop() { for (auto& state_entry : stream_states_) { const size_t stream_index = state_entry.first; - StreamState& stream_state = state_entry.second; + AnalyzerStreamState& stream_state = state_entry.second; // Populate `last_rendered_frame_times` map for all peers that were met in // call, not only for the currently presented ones. @@ -945,9 +963,10 @@ uint16_t DefaultVideoQualityAnalyzer::GetNextFrameId() { } void DefaultVideoQualityAnalyzer:: - AddExistingFramesInFlightForStreamToComparator(size_t stream_index, - StreamState& stream_state, - size_t peer_index) { + AddExistingFramesInFlightForStreamToComparator( + size_t stream_index, + AnalyzerStreamState& stream_state, + size_t peer_index) { InternalStatsKey stats_key(stream_index, stream_state.sender(), peer_index); // Add frames in flight for this stream into frames comparator. @@ -960,8 +979,8 @@ void DefaultVideoQualityAnalyzer:: RTC_DCHECK(it != captured_frames_in_flight_.end()); FrameInFlight& frame = it->second; - frames_comparator_.AddComparison(stats_key, /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + frames_comparator_.AddComparison(stats_key, /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kFrameInFlight, frame.GetStatsForPeer(peer_index)); } @@ -971,7 +990,7 @@ int DefaultVideoQualityAnalyzer::ProcessNotSeenFramesBeforeRendered( size_t peer_index, uint16_t rendered_frame_id, const InternalStatsKey& stats_key, - StreamState& state) { + AnalyzerStreamState& state) { int dropped_count = 0; while (!state.IsEmpty(peer_index) && state.Front(peer_index) != rendered_frame_id) { @@ -1049,8 +1068,8 @@ int DefaultVideoQualityAnalyzer::ProcessNotSeenFramesBeforeRendered( analyzer_stats_.frames_in_flight_left_count.AddSample( StatsSample(captured_frames_in_flight_.size(), Now())); - frames_comparator_.AddComparison(stats_key, /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + frames_comparator_.AddComparison(stats_key, /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kDroppedFrame, next_frame.GetStatsForPeer(peer_index)); } else { @@ -1071,11 +1090,9 @@ void DefaultVideoQualityAnalyzer::ReportResults() { ReportResults(item.first, item.second, stream_frame_counters_.at(item.first)); } - // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. - metrics_logger_->LogSingleValueMetric( - "cpu_usage_%", test_label_, GetCpuUsagePercent(), Unit::kUnitless, - ImprovementDirection::kSmallerIsBetter, - {{MetricMetadataKey::kExperimentalTestNameMetadataKey, test_label_}}); + metrics_logger_->LogSingleValueMetric("cpu_usage_%", test_label_, + GetCpuUsagePercent(), Unit::kUnitless, + ImprovementDirection::kSmallerIsBetter); LogFrameCounters("Global", frame_counters_); if (!unknown_sender_frame_counters_.empty()) { RTC_LOG(LS_INFO) << "Received frame counters with unknown frame id:"; @@ -1167,13 +1184,11 @@ void DefaultVideoQualityAnalyzer::ReportResults( const FrameCounters& frame_counters) { TimeDelta test_duration = Now() - start_time_; std::string test_case_name = GetTestCaseName(ToMetricName(key)); - // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map metric_metadata{ {MetricMetadataKey::kPeerMetadataKey, peers_->name(key.sender)}, {MetricMetadataKey::kVideoStreamMetadataKey, streams_.name(key.stream)}, {MetricMetadataKey::kSenderMetadataKey, peers_->name(key.sender)}, - {MetricMetadataKey::kReceiverMetadataKey, peers_->name(key.receiver)}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_label_}}; + {MetricMetadataKey::kReceiverMetadataKey, peers_->name(key.receiver)}}; metrics_logger_->LogMetric( "psnr_dB", test_case_name, stats.psnr, Unit::kUnitless, @@ -1254,6 +1269,9 @@ void DefaultVideoQualityAnalyzer::ReportResults( ImprovementDirection::kSmallerIsBetter, std::move(qp_metadata)); } + metrics_logger_->LogMetric( + "rendered_frame_qp", test_case_name, stats.rendered_frame_qp, + Unit::kUnitless, ImprovementDirection::kSmallerIsBetter, metric_metadata); metrics_logger_->LogSingleValueMetric( "actual_encode_bitrate", test_case_name, static_cast(stats.total_encoded_images_payload) / @@ -1294,7 +1312,10 @@ void DefaultVideoQualityAnalyzer::ReportResults( std::string DefaultVideoQualityAnalyzer::GetTestCaseName( const std::string& stream_label) const { - return test_label_ + "/" + stream_label; + if (!absl::GetFlag(FLAGS_isolated_script_test_perf_output).empty()) { + return test_label_ + "/" + stream_label; + } + return test_label_; } Timestamp DefaultVideoQualityAnalyzer::Now() { @@ -1313,7 +1334,7 @@ std::string DefaultVideoQualityAnalyzer::ToMetricName( // TODO(titovartem): remove this special case. return stream_label; } - rtc::StringBuilder out; + StringBuilder out; out << stream_label << "_" << peers_->name(key.sender) << "_" << peers_->name(key.receiver); return out.str(); diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h index 0fff92a40a..943cf8cbcf 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer.h @@ -50,7 +50,7 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { ~DefaultVideoQualityAnalyzer() override; void Start(std::string test_case_name, - rtc::ArrayView peer_names, + ArrayView peer_names, int max_threads_count) override; uint16_t OnFrameCaptured(absl::string_view peer_name, const std::string& stream_label, @@ -92,7 +92,7 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { std::string GetSenderPeerName(uint16_t frame_id) const override; void OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) override {} + const scoped_refptr& report) override {} // Returns set of stream labels, that were met during test call. std::set GetKnownVideoStreams() const; @@ -120,10 +120,10 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { // because this value is reserved by `VideoFrame` as "ID not set". uint16_t GetNextFrameId() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void AddExistingFramesInFlightForStreamToComparator(size_t stream_index, - StreamState& stream_state, - size_t peer_index) - RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + void AddExistingFramesInFlightForStreamToComparator( + size_t stream_index, + AnalyzerStreamState& stream_state, + size_t peer_index) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Processes frames for the peer identified by `peer_index` up to // `rendered_frame_id` (excluded). Sends each dropped frame for comparison and @@ -133,7 +133,7 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { int ProcessNotSeenFramesBeforeRendered(size_t peer_index, uint16_t rendered_frame_id, const InternalStatsKey& stats_key, - StreamState& state) + AnalyzerStreamState& state) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Report results for all metrics for all streams. @@ -194,7 +194,8 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { std::map stream_frame_counters_ RTC_GUARDED_BY(mutex_); // Map from stream index in `streams_` to its StreamState. - std::unordered_map stream_states_ RTC_GUARDED_BY(mutex_); + std::unordered_map stream_states_ + RTC_GUARDED_BY(mutex_); // Map from stream index in `streams_` to sender peer index in `peers_`. std::unordered_map stream_to_sender_ RTC_GUARDED_BY(mutex_); diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.cc index 0cee4b4b3b..216406c210 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_cpu_measurer.cc @@ -17,24 +17,24 @@ namespace webrtc { void DefaultVideoQualityAnalyzerCpuMeasurer::StartMeasuringCpuProcessTime() { MutexLock lock(&mutex_); - cpu_time_ -= rtc::GetProcessCpuTimeNanos(); - wallclock_time_ -= rtc::SystemTimeNanos(); + cpu_time_ -= GetProcessCpuTimeNanos(); + wallclock_time_ -= SystemTimeNanos(); } void DefaultVideoQualityAnalyzerCpuMeasurer::StopMeasuringCpuProcessTime() { MutexLock lock(&mutex_); - cpu_time_ += rtc::GetProcessCpuTimeNanos(); - wallclock_time_ += rtc::SystemTimeNanos(); + cpu_time_ += GetProcessCpuTimeNanos(); + wallclock_time_ += SystemTimeNanos(); } void DefaultVideoQualityAnalyzerCpuMeasurer::StartExcludingCpuThreadTime() { MutexLock lock(&mutex_); - cpu_time_ += rtc::GetThreadCpuTimeNanos(); + cpu_time_ += GetThreadCpuTimeNanos(); } void DefaultVideoQualityAnalyzerCpuMeasurer::StopExcludingCpuThreadTime() { MutexLock lock(&mutex_); - cpu_time_ -= rtc::GetThreadCpuTimeNanos(); + cpu_time_ -= GetThreadCpuTimeNanos(); } double DefaultVideoQualityAnalyzerCpuMeasurer::GetCpuUsagePercent() const { diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc index 056bd73f88..fb96ee6ee5 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.cc @@ -10,11 +10,11 @@ #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h" +#include #include #include #include -#include "absl/types/optional.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -26,11 +26,11 @@ namespace webrtc { namespace { template -absl::optional MaybeGetValue(const std::unordered_map& map, - size_t key) { +std::optional MaybeGetValue(const std::unordered_map& map, + size_t key) { auto it = map.find(key); if (it == map.end()) { - return absl::nullopt; + return std::nullopt; } return it->second; } @@ -41,7 +41,7 @@ FrameInFlight::FrameInFlight( size_t stream, uint16_t frame_id, Timestamp captured_time, - absl::optional time_between_captured_frames, + std::optional time_between_captured_frames, std::set expected_receivers) : stream_(stream), expected_receivers_(std::move(expected_receivers)), @@ -79,7 +79,7 @@ bool FrameInFlight::HaveAllPeersReceived() const { void FrameInFlight::OnFrameEncoded( webrtc::Timestamp time, - absl::optional time_between_encoded_frames, + std::optional time_between_encoded_frames, VideoFrameType frame_type, DataSize encoded_image_size, uint32_t target_encode_bitrate, @@ -93,7 +93,7 @@ void FrameInFlight::OnFrameEncoded( *time_between_encoded_frames; } frame_type_ = frame_type; - encoded_image_size_ = encoded_image_size; + encoded_image_size_ += encoded_image_size; target_encode_bitrate_ += target_encode_bitrate; stream_layers_qp_[stream_index].AddSample(SamplesStatsCounter::StatsSample{ .value = static_cast(qp), .time = time}); @@ -135,11 +135,13 @@ void FrameInFlight::OnFrameDecoded(size_t peer, webrtc::Timestamp time, int width, int height, - const StreamCodecInfo& used_decoder) { + const StreamCodecInfo& used_decoder, + const std::optional qp) { receiver_stats_[peer].decode_end_time = time; receiver_stats_[peer].used_decoder = used_decoder; receiver_stats_[peer].decoded_frame_width = width; receiver_stats_[peer].decoded_frame_height = height; + receiver_stats_[peer].decoded_frame_qp = qp; } void FrameInFlight::OnDecoderError(size_t peer, @@ -192,7 +194,7 @@ FrameStats FrameInFlight::GetStatsForPeer(size_t peer) const { stats.used_encoder = used_encoder_; stats.spatial_layers_qp = stream_layers_qp_; - absl::optional receiver_stats = + std::optional receiver_stats = MaybeGetValue(receiver_stats_, peer); if (receiver_stats.has_value()) { stats.received_time = receiver_stats->received_time; @@ -204,6 +206,7 @@ FrameStats FrameInFlight::GetStatsForPeer(size_t peer) const { receiver_stats->time_between_rendered_frames; stats.decoded_frame_width = receiver_stats->decoded_frame_width; stats.decoded_frame_height = receiver_stats->decoded_frame_height; + stats.decoded_frame_qp = receiver_stats->decoded_frame_qp; stats.used_decoder = receiver_stats->used_decoder; stats.pre_decoded_frame_type = receiver_stats->frame_type; stats.pre_decoded_image_size = receiver_stats->encoded_image_size; diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h index d73de29afb..c116cf69b2 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h @@ -12,12 +12,12 @@ #define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_FRAME_IN_FLIGHT_H_ #include +#include #include #include #include #include -#include "absl/types/optional.h" #include "api/numerics/samples_stats_counter.h" #include "api/units/data_size.h" #include "api/units/time_delta.h" @@ -36,18 +36,20 @@ struct ReceiverFrameStats { Timestamp rendered_time = Timestamp::MinusInfinity(); // Will be set if there is frame rendered before this one. - absl::optional prev_frame_rendered_time = absl::nullopt; - absl::optional time_between_rendered_frames = absl::nullopt; + std::optional prev_frame_rendered_time = std::nullopt; + std::optional time_between_rendered_frames = std::nullopt; // Type and encoded size of received frame. VideoFrameType frame_type = VideoFrameType::kEmptyFrame; DataSize encoded_image_size = DataSize::Bytes(0); - absl::optional decoded_frame_width = absl::nullopt; - absl::optional decoded_frame_height = absl::nullopt; + std::optional decoded_frame_width = std::nullopt; + std::optional decoded_frame_height = std::nullopt; + + std::optional decoded_frame_qp = std::nullopt; // Can be not set if frame was dropped in the network. - absl::optional used_decoder = absl::nullopt; + std::optional used_decoder = std::nullopt; bool dropped = false; bool decoder_failed = false; @@ -69,7 +71,7 @@ class FrameInFlight { FrameInFlight(size_t stream, uint16_t frame_id, Timestamp captured_time, - absl::optional time_between_captured_frames, + std::optional time_between_captured_frames, std::set expected_receivers); size_t stream() const { return stream_; } @@ -91,7 +93,7 @@ class FrameInFlight { void SetPreEncodeTime(Timestamp time) { pre_encode_time_ = time; } void OnFrameEncoded(Timestamp time, - absl::optional time_between_encoded_frames, + std::optional time_between_encoded_frames, VideoFrameType frame_type, DataSize encoded_image_size, uint32_t target_encode_bitrate, @@ -113,7 +115,9 @@ class FrameInFlight { Timestamp time, int width, int height, - const StreamCodecInfo& used_decoder); + const StreamCodecInfo& used_decoder, + const std::optional qp); + void OnDecoderError(size_t peer, const StreamCodecInfo& used_decoder); bool HasDecodeEndTime(size_t peer) const; @@ -134,6 +138,7 @@ class FrameInFlight { void MarkSuperfluous(size_t peer) { receiver_stats_[peer].superfluous = true; } + bool IsSuperfluous(size_t peer) const; void SetPrevFrameRenderedTime(size_t peer, webrtc::Timestamp time) { receiver_stats_[peer].prev_frame_rendered_time = time; @@ -146,8 +151,6 @@ class FrameInFlight { FrameStats GetStatsForPeer(size_t peer) const; private: - bool IsSuperfluous(size_t peer) const; - const size_t stream_; // Set of peer's indexes who are expected to receive this frame. This is not // the set of peer's indexes that received the frame. For example, if peer A @@ -166,8 +169,8 @@ class FrameInFlight { Timestamp pre_encode_time_ = Timestamp::MinusInfinity(); Timestamp encoded_time_ = Timestamp::MinusInfinity(); - absl::optional time_between_captured_frames_ = absl::nullopt; - absl::optional time_between_encoded_frames_ = absl::nullopt; + std::optional time_between_captured_frames_ = std::nullopt; + std::optional time_between_encoded_frames_ = std::nullopt; // Type and encoded size of sent frame. VideoFrameType frame_type_ = VideoFrameType::kEmptyFrame; @@ -177,7 +180,7 @@ class FrameInFlight { // spatial or simulcast index is set in `EncodedImage`, 0 is used. std::map stream_layers_qp_; // Can be not set if frame was dropped by encoder. - absl::optional used_encoder_ = absl::nullopt; + std::optional used_encoder_ = std::nullopt; // Map from the receiver peer's index to frame stats for that peer. std::unordered_map receiver_stats_; }; diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc index eb10aef34c..4a9906a401 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.cc @@ -12,11 +12,11 @@ #include #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" @@ -187,7 +187,7 @@ FrameComparison ValidateFrameComparison(FrameComparison comparison) { void DefaultVideoQualityAnalyzerFramesComparator::Start(int max_threads_count) { for (int i = 0; i < max_threads_count; i++) { - thread_pool_.push_back(rtc::PlatformThread::SpawnJoinable( + thread_pool_.push_back(PlatformThread::SpawnJoinable( [this] { ProcessComparisons(); }, "DefaultVideoQualityAnalyzerFramesComparator-" + std::to_string(i))); } @@ -302,7 +302,7 @@ void DefaultVideoQualityAnalyzerFramesComparator::EnsureStatsForStream( } void DefaultVideoQualityAnalyzerFramesComparator::RegisterParticipantInCall( - rtc::ArrayView> stream_started_time, + ArrayView> stream_started_time, Timestamp start_time) { MutexLock lock(&mutex_); RTC_CHECK_EQ(state_, State::kActive) @@ -317,8 +317,8 @@ void DefaultVideoQualityAnalyzerFramesComparator::RegisterParticipantInCall( void DefaultVideoQualityAnalyzerFramesComparator::AddComparison( InternalStatsKey stats_key, - absl::optional captured, - absl::optional rendered, + std::optional captured, + std::optional rendered, FrameComparisonType type, FrameStats frame_stats) { MutexLock lock(&mutex_); @@ -331,8 +331,8 @@ void DefaultVideoQualityAnalyzerFramesComparator::AddComparison( void DefaultVideoQualityAnalyzerFramesComparator::AddComparison( InternalStatsKey stats_key, int skipped_between_rendered, - absl::optional captured, - absl::optional rendered, + std::optional captured, + std::optional rendered, FrameComparisonType type, FrameStats frame_stats) { MutexLock lock(&mutex_); @@ -349,8 +349,8 @@ void DefaultVideoQualityAnalyzerFramesComparator::AddComparison( void DefaultVideoQualityAnalyzerFramesComparator::AddComparisonInternal( InternalStatsKey stats_key, - absl::optional captured, - absl::optional rendered, + std::optional captured, + std::optional rendered, FrameComparisonType type, FrameStats frame_stats) { cpu_measurer_.StartExcludingCpuThreadTime(); @@ -360,9 +360,9 @@ void DefaultVideoQualityAnalyzerFramesComparator::AddComparisonInternal( // frames itself to make future computations lighter. if (comparisons_.size() >= kMaxActiveComparisons) { comparisons_.emplace_back(ValidateFrameComparison( - FrameComparison(std::move(stats_key), /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, type, - std::move(frame_stats), OverloadReason::kCpu))); + FrameComparison(std::move(stats_key), /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, type, std::move(frame_stats), + OverloadReason::kCpu))); } else { OverloadReason overload_reason = OverloadReason::kNone; if (!captured && type == FrameComparisonType::kRegular) { @@ -379,7 +379,7 @@ void DefaultVideoQualityAnalyzerFramesComparator::AddComparisonInternal( void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparisons() { while (true) { // Try to pick next comparison to perform from the queue. - absl::optional comparison = absl::nullopt; + std::optional comparison = std::nullopt; bool more_new_comparisons_expected; { MutexLock lock(&mutex_); @@ -418,9 +418,9 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison( double ssim = -1.0; if ((options_.compute_psnr || options_.compute_ssim) && comparison.captured.has_value() && comparison.rendered.has_value()) { - rtc::scoped_refptr reference_buffer = + scoped_refptr reference_buffer = comparison.captured->video_frame_buffer()->ToI420(); - rtc::scoped_refptr test_buffer = + scoped_refptr test_buffer = comparison.rendered->video_frame_buffer()->ToI420(); if (options_.adjust_cropping_before_comparing_frames) { test_buffer = ScaleVideoFrameBuffer( @@ -552,6 +552,14 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison( StatsSample(*comparison.frame_stats.decoded_frame_width * *comparison.frame_stats.decoded_frame_height, frame_stats.decode_end_time, metadata)); + // TODO(webrtc:357636606): Add a check that the rendered QP is among the + // encoded spatial layer's QP. Can only do that if there are 1 and only 1 + // QP value per spatial layer. + if (frame_stats.decoded_frame_qp.has_value()) { + stats->rendered_frame_qp.AddSample( + StatsSample(static_cast(*frame_stats.decoded_frame_qp), + frame_stats.decode_end_time, metadata)); + } } if (frame_stats.prev_frame_rendered_time.has_value() && diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h index 006c3eb9bf..e64642716b 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h @@ -88,29 +88,28 @@ class DefaultVideoQualityAnalyzerFramesComparator { // has to be created. // `start_time` - call start time. void RegisterParticipantInCall( - rtc::ArrayView> - stream_started_time, + ArrayView> stream_started_time, Timestamp start_time); // `captured` - video frame captured by sender to use for PSNR/SSIM // computation. If `type` is `FrameComparisonType::kRegular` and - // `captured` is `absl::nullopt` comparison is assumed to be overloaded + // `captured` is `std::nullopt` comparison is assumed to be overloaded // due to memory constraints. // `rendered` - video frame rendered by receiver to use for PSNR/SSIM // computation. Required only if `type` is // `FrameComparisonType::kRegular`, but can still be omitted if - // `captured` is `absl::nullopt`. + // `captured` is `std::nullopt`. void AddComparison(InternalStatsKey stats_key, - absl::optional captured, - absl::optional rendered, + std::optional captured, + std::optional rendered, FrameComparisonType type, FrameStats frame_stats); // `skipped_between_rendered` - amount of frames dropped on this stream before // last received frame and current frame. void AddComparison(InternalStatsKey stats_key, int skipped_between_rendered, - absl::optional captured, - absl::optional rendered, + std::optional captured, + std::optional rendered, FrameComparisonType type, FrameStats frame_stats); @@ -127,8 +126,8 @@ class DefaultVideoQualityAnalyzerFramesComparator { enum State { kNew, kActive, kStopped }; void AddComparisonInternal(InternalStatsKey stats_key, - absl::optional captured, - absl::optional rendered, + std::optional captured, + std::optional rendered, FrameComparisonType type, FrameStats frame_stats) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); @@ -148,8 +147,8 @@ class DefaultVideoQualityAnalyzerFramesComparator { std::deque comparisons_ RTC_GUARDED_BY(mutex_); FramesComparatorStats frames_comparator_stats_ RTC_GUARDED_BY(mutex_); - std::vector thread_pool_; - rtc::Event comparison_available_event_; + std::vector thread_pool_; + Event comparison_available_event_; }; } // namespace webrtc diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc index 2656bf5d44..571c3c602f 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator_test.cc @@ -50,8 +50,8 @@ VideoFrame CreateFrame(uint16_t frame_id, Timestamp timestamp) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(width, height, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); test::FrameGeneratorInterface::VideoFrameData frame_data = frame_generator->NextFrame(); return VideoFrame::Builder() @@ -132,7 +132,7 @@ void AssertFirstMetadataHasField(const SamplesStatsCounter& counter, } std::string ToString(const SamplesStatsCounter& counter) { - rtc::StringBuilder out; + StringBuilder out; for (const StatsSample& s : counter.GetTimedSamples()) { out << "{ time_ms=" << s.time.ms() << "; value=" << s.value << "}, "; } @@ -177,8 +177,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, comparator.EnsureStatsForStream(stream, sender, peers_count, stream_start_time, stream_start_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kRegular, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -223,12 +223,12 @@ TEST( comparator.EnsureStatsForStream(stream, sender, peers_count, stream_start_time, stream_start_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kRegular, frame_stats1); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kRegular, frame_stats2); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -311,13 +311,13 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, stream_start_time, stream_start_time); for (size_t i = 0; i < stats.size() - 1; ++i) { comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kFrameInFlight, stats[i]); } comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kRegular, stats[stats.size() - 1]); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -378,8 +378,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, captured_time, captured_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kFrameInFlight, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -401,6 +401,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, ExpectEmpty(stats.resolution_of_decoded_frame); ExpectEmpty(stats.target_encode_bitrate); EXPECT_THAT(stats.spatial_layers_qp, IsEmpty()); + ExpectEmpty(stats.rendered_frame_qp); ExpectEmpty(stats.recv_key_frame_size_bytes); ExpectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 0); @@ -438,8 +439,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, captured_time, captured_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kFrameInFlight, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -461,6 +462,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, ExpectEmpty(stats.resolution_of_decoded_frame); ExpectEmpty(stats.target_encode_bitrate); EXPECT_THAT(stats.spatial_layers_qp, IsEmpty()); + ExpectEmpty(stats.rendered_frame_qp); ExpectEmpty(stats.recv_key_frame_size_bytes); ExpectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 0); @@ -510,8 +512,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, captured_time, captured_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kFrameInFlight, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -536,6 +538,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1)); ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2, /*value=*/5.0); + ExpectEmpty(stats.rendered_frame_qp); ExpectEmpty(stats.recv_key_frame_size_bytes); ExpectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -586,8 +589,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, captured_time, captured_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kFrameInFlight, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -612,6 +615,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1)); ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2, /*value=*/5.0); + ExpectEmpty(stats.rendered_frame_qp); ExpectEmpty(stats.recv_key_frame_size_bytes); ExpectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -667,8 +671,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, captured_time, captured_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kFrameInFlight, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -694,6 +698,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1)); ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2, /*value=*/5.0); + ExpectEmpty(stats.rendered_frame_qp); ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1, /*value=*/500.0); ExpectEmpty(stats.recv_delta_frame_size_bytes); @@ -749,6 +754,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.decode_end_time = captured_time + TimeDelta::Millis(50); frame_stats.decoded_frame_width = 200; frame_stats.decoded_frame_height = 100; + frame_stats.decoded_frame_qp = 10; frame_stats.used_decoder = Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time); @@ -757,8 +763,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, captured_time, captured_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kFrameInFlight, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -784,6 +790,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1)); ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2, /*value=*/5.0); + ExpectSizeAndAllElementsAre(stats.rendered_frame_qp, /*size=*/1, + /*value=*/10.0); ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1, /*value=*/500.0); ExpectEmpty(stats.recv_delta_frame_size_bytes); @@ -840,13 +848,14 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.decoder_failed = true; frame_stats.used_decoder = Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time); + frame_stats.decoded_frame_qp = 10; comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, captured_time, captured_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kFrameInFlight, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -872,6 +881,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1)); ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2, /*value=*/5.0); + ExpectEmpty(stats.rendered_frame_qp); ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1, /*value=*/500.0); ExpectEmpty(stats.recv_delta_frame_size_bytes); @@ -913,8 +923,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, captured_time, captured_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kDroppedFrame, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -936,6 +946,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, ExpectEmpty(stats.resolution_of_decoded_frame); ExpectEmpty(stats.target_encode_bitrate); EXPECT_THAT(stats.spatial_layers_qp, IsEmpty()); + ExpectEmpty(stats.rendered_frame_qp); ExpectEmpty(stats.recv_key_frame_size_bytes); ExpectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 0); @@ -973,8 +984,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, captured_time, captured_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kDroppedFrame, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -996,6 +1007,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, ExpectEmpty(stats.resolution_of_decoded_frame); ExpectEmpty(stats.target_encode_bitrate); EXPECT_THAT(stats.spatial_layers_qp, IsEmpty()); + ExpectEmpty(stats.rendered_frame_qp); ExpectEmpty(stats.recv_key_frame_size_bytes); ExpectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 0); @@ -1045,8 +1057,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, captured_time, captured_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kDroppedFrame, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -1071,6 +1083,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1)); ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2, /*value=*/5.0); + ExpectEmpty(stats.rendered_frame_qp); ExpectEmpty(stats.recv_key_frame_size_bytes); ExpectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -1121,8 +1134,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, captured_time, captured_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kDroppedFrame, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -1147,6 +1160,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1)); ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2, /*value=*/5.0); + ExpectEmpty(stats.rendered_frame_qp); ExpectEmpty(stats.recv_key_frame_size_bytes); ExpectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -1198,8 +1212,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, captured_time, captured_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kDroppedFrame, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -1235,6 +1249,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, EXPECT_EQ(stats.encoders, std::vector{*frame_stats.used_encoder}); EXPECT_THAT(stats.decoders, IsEmpty()); + ExpectEmpty(stats.rendered_frame_qp); } TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, @@ -1280,13 +1295,14 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time); frame_stats.decoded_frame_width = 200; frame_stats.decoded_frame_height = 100; + frame_stats.decoded_frame_qp = 10; comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, captured_time, captured_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kDroppedFrame, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -1311,6 +1327,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1)); ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2, /*value=*/5.0); + ExpectEmpty(stats.rendered_frame_qp); ExpectEmpty(stats.recv_key_frame_size_bytes); ExpectEmpty(stats.recv_delta_frame_size_bytes); EXPECT_EQ(stats.total_encoded_images_payload, 1000); @@ -1366,13 +1383,14 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, frame_stats.decoder_failed = true; frame_stats.used_decoder = Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time); + frame_stats.decoded_frame_qp = 10; comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, captured_time, captured_time); comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kDroppedFrame, frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); @@ -1398,6 +1416,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1)); ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2, /*value=*/5.0); + ExpectEmpty(stats.rendered_frame_qp); ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1, /*value=*/500.0); ExpectEmpty(stats.recv_delta_frame_size_bytes); @@ -1460,6 +1479,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, Vp8CodecForOneFrame(frame_id, frame_stats.decode_end_time); frame_stats.decoded_frame_width = 200; frame_stats.decoded_frame_height = 100; + frame_stats.decoded_frame_qp = 10; // Frame rendered frame_stats.rendered_time = captured_time + TimeDelta::Millis(60); @@ -1494,6 +1514,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, EXPECT_THAT(stats.spatial_layers_qp, SizeIs(1)); ExpectSizeAndAllElementsAre(stats.spatial_layers_qp[0], /*size=*/2, /*value=*/5.0); + ExpectSizeAndAllElementsAre(stats.rendered_frame_qp, /*size=*/1, + /*value=*/10.0); ExpectSizeAndAllElementsAre(stats.recv_key_frame_size_bytes, /*size=*/1, /*value=*/500.0); ExpectEmpty(stats.recv_delta_frame_size_bytes); @@ -1555,6 +1577,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, AllStatsHaveMetadataSet) { frame_stats.rendered_time = captured_time + TimeDelta::Millis(60); frame_stats.decoded_frame_width = 200; frame_stats.decoded_frame_height = 100; + frame_stats.decoded_frame_qp = 10; comparator.Start(/*max_threads_count=*/1); comparator.EnsureStatsForStream(stream, sender, /*peers_count=*/2, @@ -1580,6 +1603,7 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, AllStatsHaveMetadataSet) { AssertFirstMetadataHasField(stats.target_encode_bitrate, "frame_id", "1"); AssertFirstMetadataHasField(stats.spatial_layers_qp[0], "frame_id", "1"); AssertFirstMetadataHasField(stats.recv_key_frame_size_bytes, "frame_id", "1"); + AssertFirstMetadataHasField(stats.rendered_frame_qp, "frame_id", "1"); ExpectEmpty(stats.recv_delta_frame_size_bytes); } @@ -1604,21 +1628,21 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, // Add 5 frames which were rendered with 30 fps (~30ms between frames) // Frame ids are in [1..5] and last frame is with 120ms offset from first. - absl::optional prev_frame_rendered_time = absl::nullopt; + std::optional prev_frame_rendered_time = std::nullopt; for (int i = 0; i < 5; ++i) { FrameStats frame_stats = FrameStatsWith10msDeltaBetweenPhasesAnd10x10Frame( /*frame_id=*/i + 1, stream_start_time + TimeDelta::Millis(30 * i)); frame_stats.prev_frame_rendered_time = prev_frame_rendered_time; frame_stats.time_between_rendered_frames = prev_frame_rendered_time.has_value() - ? absl::optional(frame_stats.rendered_time - - *prev_frame_rendered_time) - : absl::nullopt; + ? std::optional(frame_stats.rendered_time - + *prev_frame_rendered_time) + : std::nullopt; prev_frame_rendered_time = frame_stats.rendered_time; comparator.AddComparison(stats_key, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kRegular, frame_stats); } @@ -1633,8 +1657,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest, comparator.AddComparison(stats_key, /*skipped_between_rendered=*/4, - /*captured=*/absl::nullopt, - /*rendered=*/absl::nullopt, + /*captured=*/std::nullopt, + /*rendered=*/std::nullopt, FrameComparisonType::kRegular, freeze_frame_stats); comparator.Stop(/*last_rendered_frame_times=*/{}); diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.cc index 16f49ef154..4689729f3e 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.cc @@ -15,7 +15,7 @@ namespace webrtc { std::string InternalStatsKey::ToString() const { - rtc::StringBuilder out; + StringBuilder out; out << "stream=" << stream << "_sender=" << sender << "_receiver=" << receiver; return out.str(); @@ -37,8 +37,8 @@ bool operator==(const InternalStatsKey& a, const InternalStatsKey& b) { } FrameComparison::FrameComparison(InternalStatsKey stats_key, - absl::optional captured, - absl::optional rendered, + std::optional captured, + std::optional rendered, FrameComparisonType type, FrameStats frame_stats, OverloadReason overload_reason) diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h index 88c0335b5a..c6887488c7 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h @@ -12,11 +12,11 @@ #define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_INTERNAL_SHARED_OBJECTS_H_ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/numerics/samples_stats_counter.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" @@ -59,10 +59,10 @@ struct FrameStats { Timestamp rendered_time = Timestamp::MinusInfinity(); // Next timings are set if and only if previous frame exist. - absl::optional prev_frame_rendered_time = absl::nullopt; - absl::optional time_between_captured_frames = absl::nullopt; - absl::optional time_between_encoded_frames = absl::nullopt; - absl::optional time_between_rendered_frames = absl::nullopt; + std::optional prev_frame_rendered_time = std::nullopt; + std::optional time_between_captured_frames = std::nullopt; + std::optional time_between_encoded_frames = std::nullopt; + std::optional time_between_rendered_frames = std::nullopt; VideoFrameType encoded_frame_type = VideoFrameType::kEmptyFrame; DataSize encoded_image_size = DataSize::Bytes(0); @@ -72,14 +72,19 @@ struct FrameStats { // Sender side qp values per spatial layer. In case when spatial layer is not // set for `webrtc::EncodedImage`, 0 is used as default. std::map spatial_layers_qp; + // Receive side qp value. Receiver only renders one spatial layer for a given + // time index. The QP value here corresponds to one of the encoded spatial + // layer's QP given in `spatial_layers_qp`, i.e. to the one that corresponds + // to the rendered frame. + std::optional decoded_frame_qp = std::nullopt; - absl::optional decoded_frame_width = absl::nullopt; - absl::optional decoded_frame_height = absl::nullopt; + std::optional decoded_frame_width = std::nullopt; + std::optional decoded_frame_height = std::nullopt; // Can be not set if frame was dropped by encoder. - absl::optional used_encoder = absl::nullopt; + std::optional used_encoder = std::nullopt; // Can be not set if frame was dropped in the network. - absl::optional used_decoder = absl::nullopt; + std::optional used_decoder = std::nullopt; bool decoder_failed = false; }; @@ -116,8 +121,8 @@ enum class FrameComparisonType { // true or false showing was frame dropped or not. struct FrameComparison { FrameComparison(InternalStatsKey stats_key, - absl::optional captured, - absl::optional rendered, + std::optional captured, + std::optional rendered, FrameComparisonType type, FrameStats frame_stats, OverloadReason overload_reason); @@ -125,8 +130,8 @@ struct FrameComparison { InternalStatsKey stats_key; // Frames can be omitted if there too many computations waiting in the // queue. - absl::optional captured; - absl::optional rendered; + std::optional captured; + std::optional rendered; FrameComparisonType type; FrameStats frame_stats; OverloadReason overload_reason; diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc index 072c2ef202..f3f9caac0a 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_metric_names_test.cc @@ -8,25 +8,33 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include #include +#include +#include #include #include +#include "absl/flags/flag.h" +#include "absl/strings/string_view.h" #include "api/rtp_packet_info.h" #include "api/rtp_packet_infos.h" #include "api/test/create_frame_generator.h" +#include "api/test/frame_generator_interface.h" #include "api/test/metrics/metric.h" #include "api/test/metrics/metrics_logger.h" -#include "api/test/metrics/stdout_metrics_exporter.h" +#include "api/test/video_quality_analyzer_interface.h" +#include "api/units/timestamp.h" #include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" #include "api/video/video_frame.h" -#include "common_video/libyuv/include/webrtc_libyuv.h" -#include "rtc_tools/frame_analyzer/video_geometry_aligner.h" +#include "system_wrappers/include/clock.h" #include "system_wrappers/include/sleep.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h" +#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h" +#include "test/test_flags.h" namespace webrtc { namespace { @@ -38,14 +46,19 @@ using ::testing::UnorderedElementsAre; using ::webrtc::test::DefaultMetricsLogger; using ::webrtc::test::ImprovementDirection; using ::webrtc::test::Metric; -using ::webrtc::test::MetricsExporter; -using ::webrtc::test::StdoutMetricsExporter; using ::webrtc::test::Unit; constexpr int kAnalyzerMaxThreadsCount = 1; constexpr int kFrameWidth = 320; constexpr int kFrameHeight = 240; +std::string GetExpectedTestCaseName(const std::string& stream_label) { + if (!absl::GetFlag(FLAGS_isolated_script_test_perf_output).empty()) { + return "test_case/" + stream_label; + } + return "test_case"; +} + DefaultVideoQualityAnalyzerOptions AnalyzerOptionsForTest() { DefaultVideoQualityAnalyzerOptions options; options.compute_psnr = true; @@ -72,7 +85,7 @@ EncodedImage FakeEncode(const VideoFrame& frame) { packet_infos.push_back(RtpPacketInfo( /*ssrc=*/1, /*csrcs=*/{}, - /*rtp_timestamp=*/frame.timestamp(), + /*rtp_timestamp=*/frame.rtp_timestamp(), /*receive_time=*/Timestamp::Micros(frame.timestamp_us() + 10000))); image.SetPacketInfos(RtpPacketInfos(packet_infos)); return image; @@ -160,8 +173,8 @@ std::vector ToTestCases(const std::vector& metrics) { TEST(DefaultVideoQualityAnalyzerMetricNamesTest, MetricNamesForP2PAreCorrect) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); DefaultMetricsLogger metrics_logger(Clock::GetRealTimeClock()); @@ -181,142 +194,147 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest, MetricNamesForP2PAreCorrect) { metrics, UnorderedElementsAre( MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "psnr_dB", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "ssim", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "transport_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "total_delay_incl_transport", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "time_between_rendered_frames", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "harmonic_framerate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "encode_frame_rate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "encode_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "time_between_freezes", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "freeze_time_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "pixels_per_frame", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "min_psnr_dB", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "decode_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "receive_to_render_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "dropped_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "frames_in_flight", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "rendered_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "max_skipped", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "target_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "qp_sl0", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), + .name = "rendered_frame_qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter}, + MetricValidationInfo{ + .test_case = GetExpectedTestCaseName("alice_video"), .name = "actual_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "capture_frame_rate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "num_encoded_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "num_decoded_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "num_send_key_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "num_recv_key_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "recv_key_frame_size_bytes", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "recv_delta_frame_size_bytes", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, @@ -331,8 +349,8 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest, MetricNamesFor3PeersAreCorrect) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); DefaultMetricsLogger metrics_logger(Clock::GetRealTimeClock()); @@ -355,284 +373,294 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest, UnorderedElementsAre( // Bob MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "psnr_dB", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "ssim", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "transport_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "total_delay_incl_transport", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "time_between_rendered_frames", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "harmonic_framerate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "encode_frame_rate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "encode_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "time_between_freezes", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "freeze_time_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "pixels_per_frame", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "min_psnr_dB", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "decode_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "receive_to_render_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "dropped_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "frames_in_flight", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "rendered_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "max_skipped", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "target_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "qp_sl0", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), + .name = "rendered_frame_qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter}, + MetricValidationInfo{ + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "actual_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "capture_frame_rate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "num_encoded_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "num_decoded_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "num_send_key_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "num_recv_key_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "recv_key_frame_size_bytes", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_bob", + .test_case = GetExpectedTestCaseName("alice_video_alice_bob"), .name = "recv_delta_frame_size_bytes", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, // Charlie MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "psnr_dB", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "ssim", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "transport_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "total_delay_incl_transport", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "time_between_rendered_frames", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "harmonic_framerate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "encode_frame_rate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "encode_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "time_between_freezes", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "freeze_time_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "pixels_per_frame", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "min_psnr_dB", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "decode_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "receive_to_render_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "dropped_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "frames_in_flight", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "rendered_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "max_skipped", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "target_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "qp_sl0", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), + .name = "rendered_frame_qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter}, + MetricValidationInfo{ + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "actual_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "capture_frame_rate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "num_encoded_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "num_decoded_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "num_send_key_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "num_recv_key_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "recv_key_frame_size_bytes", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, MetricValidationInfo{ - .test_case = "test_case/alice_video_alice_charlie", + .test_case = GetExpectedTestCaseName("alice_video_alice_charlie"), .name = "recv_delta_frame_size_bytes", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter}, @@ -647,8 +675,8 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest, TestCaseFor3PeerIsTheSameAfterAllPeersLeft) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); DefaultMetricsLogger metrics_logger(Clock::GetRealTimeClock()); @@ -669,11 +697,7 @@ TEST(DefaultVideoQualityAnalyzerMetricNamesTest, std::vector metrics = ToTestCases(metrics_logger.GetCollectedMetrics()); - EXPECT_THAT(metrics, SizeIs(57)); - EXPECT_THAT(metrics, Contains("test_case/alice_video_alice_bob").Times(28)); - EXPECT_THAT(metrics, - Contains("test_case/alice_video_alice_charlie").Times(28)); - EXPECT_THAT(metrics, Contains("test_case").Times(1)); + EXPECT_THAT(metrics, SizeIs(59)); } } // namespace diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.cc index 79b9286e2d..70345dd6bc 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.cc @@ -26,7 +26,7 @@ constexpr int kMicrosPerSecond = 1000000; } // namespace std::string StreamCodecInfo::ToString() const { - rtc::StringBuilder out; + StringBuilder out; out << "{codec_name=" << codec_name << "; first_frame_id=" << first_frame_id << "; last_frame_id=" << last_frame_id << "; switched_on_at=" << webrtc::ToString(switched_on_at) @@ -38,8 +38,7 @@ std::ostream& operator<<(std::ostream& os, const StreamCodecInfo& state) { return os << state.ToString(); } -rtc::StringBuilder& operator<<(rtc::StringBuilder& sb, - const StreamCodecInfo& state) { +StringBuilder& operator<<(StringBuilder& sb, const StreamCodecInfo& state) { return sb << state.ToString(); } @@ -70,7 +69,7 @@ std::string ToString(FrameDropPhase phase) { std::ostream& operator<<(std::ostream& os, FrameDropPhase phase) { return os << ToString(phase); } -rtc::StringBuilder& operator<<(rtc::StringBuilder& sb, FrameDropPhase phase) { +StringBuilder& operator<<(StringBuilder& sb, FrameDropPhase phase) { return sb << ToString(phase); } @@ -100,7 +99,7 @@ StreamStats::StreamStats(Timestamp stream_started_time) } std::string StatsKey::ToString() const { - rtc::StringBuilder out; + StringBuilder out; out << stream_label << "_" << receiver; return out.str(); } @@ -151,11 +150,11 @@ std::set VideoStreamsInfo::GetStreams( return it->second; } -absl::optional VideoStreamsInfo::GetSender( +std::optional VideoStreamsInfo::GetSender( absl::string_view stream_label) const { auto it = stream_to_sender_.find(std::string(stream_label)); if (it == stream_to_sender_.end()) { - return absl::nullopt; + return std::nullopt; } return it->second; } diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h index 73cbcc03df..5f529dd4a3 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h @@ -14,13 +14,13 @@ #include #include #include +#include #include #include #include #include #include -#include "absl/types/optional.h" #include "api/numerics/samples_stats_counter.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -88,8 +88,7 @@ struct StreamCodecInfo { }; std::ostream& operator<<(std::ostream& os, const StreamCodecInfo& state); -rtc::StringBuilder& operator<<(rtc::StringBuilder& sb, - const StreamCodecInfo& state); +StringBuilder& operator<<(StringBuilder& sb, const StreamCodecInfo& state); bool operator==(const StreamCodecInfo& a, const StreamCodecInfo& b); // Represents phases where video frame can be dropped and such drop will be @@ -106,7 +105,7 @@ enum class FrameDropPhase : int { std::string ToString(FrameDropPhase phase); std::ostream& operator<<(std::ostream& os, FrameDropPhase phase); -rtc::StringBuilder& operator<<(rtc::StringBuilder& sb, FrameDropPhase phase); +StringBuilder& operator<<(StringBuilder& sb, FrameDropPhase phase); struct StreamStats { explicit StreamStats(Timestamp stream_started_time); @@ -151,6 +150,11 @@ struct StreamStats { // Sender side qp values per spatial layer. In case when spatial layer is not // set for `webrtc::EncodedImage`, 0 is used as default. std::map spatial_layers_qp; + // QP values of the rendered frames. In SVC or simulcast coding scenarios, the + // receiver will only render one of the spatial layers at a time. Hence, this + // value corresponds to the rendered frames' QP values, which should ideally + // correspond to one of the QP values in `spatial_layers_qp`. + SamplesStatsCounter rendered_frame_qp; int64_t total_encoded_images_payload = 0; // Counters on which phase how many frames were dropped. @@ -233,9 +237,9 @@ class VideoStreamsInfo { // empty set will be returned. std::set GetStreams(absl::string_view sender_name) const; - // Returns sender name for specified `stream_label`. Returns `absl::nullopt` + // Returns sender name for specified `stream_label`. Returns `std::nullopt` // if provided `stream_label` isn't known to the video analyzer. - absl::optional GetSender(absl::string_view stream_label) const; + std::optional GetSender(absl::string_view stream_label) const; // Returns set of the receivers for specified `stream_label`. If stream wasn't // received by any peer or `stream_label` isn't known to the video analyzer diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.cc index bcdf16d1a4..610230d02f 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.cc @@ -10,10 +10,10 @@ #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h" +#include #include #include -#include "absl/types/optional.h" #include "api/units/timestamp.h" #include "rtc_base/checks.h" #include "system_wrappers/include/clock.h" @@ -23,21 +23,21 @@ namespace webrtc { namespace { template -absl::optional MaybeGetValue(const std::unordered_map& map, - size_t key) { +std::optional MaybeGetValue(const std::unordered_map& map, + size_t key) { auto it = map.find(key); if (it == map.end()) { - return absl::nullopt; + return std::nullopt; } return it->second; } } // namespace -StreamState::StreamState(size_t sender, - std::set receivers, - Timestamp stream_started_time, - Clock* clock) +AnalyzerStreamState::AnalyzerStreamState(size_t sender, + std::set receivers, + Timestamp stream_started_time, + Clock* clock) : sender_(sender), stream_started_time_(stream_started_time), clock_(clock), @@ -51,9 +51,9 @@ StreamState::StreamState(size_t sender, } } -uint16_t StreamState::PopFront(size_t peer) { +uint16_t AnalyzerStreamState::PopFront(size_t peer) { RTC_CHECK_NE(peer, kAliveFramesQueueIndex); - absl::optional frame_id = frame_ids_.PopFront(peer); + std::optional frame_id = frame_ids_.PopFront(peer); RTC_DCHECK(frame_id.has_value()); // If alive's frame queue is longer than all others, than also pop frame from @@ -62,7 +62,7 @@ uint16_t StreamState::PopFront(size_t peer) { size_t other_size = GetLongestReceiverQueue(); // Pops frame from alive queue if alive's queue is the longest one. if (alive_size > other_size) { - absl::optional alive_frame_id = + std::optional alive_frame_id = frame_ids_.PopFront(kAliveFramesQueueIndex); RTC_DCHECK(alive_frame_id.has_value()); RTC_DCHECK_EQ(frame_id.value(), alive_frame_id.value()); @@ -71,14 +71,14 @@ uint16_t StreamState::PopFront(size_t peer) { return frame_id.value(); } -void StreamState::AddPeer(size_t peer) { +void AnalyzerStreamState::AddPeer(size_t peer) { RTC_CHECK_NE(peer, kAliveFramesQueueIndex); frame_ids_.AddReader(peer, kAliveFramesQueueIndex); receivers_.insert(peer); pausable_state_.emplace(peer, PausableState(clock_)); } -void StreamState::RemovePeer(size_t peer) { +void AnalyzerStreamState::RemovePeer(size_t peer) { RTC_CHECK_NE(peer, kAliveFramesQueueIndex); frame_ids_.RemoveReader(peer); receivers_.erase(peer); @@ -94,14 +94,15 @@ void StreamState::RemovePeer(size_t peer) { } } -PausableState* StreamState::GetPausableState(size_t peer) { +PausableState* AnalyzerStreamState::GetPausableState(size_t peer) { auto it = pausable_state_.find(peer); RTC_CHECK(it != pausable_state_.end()) << "No pausable state for receiver " << peer; return &it->second; } -void StreamState::SetLastRenderedFrameTime(size_t peer, Timestamp time) { +void AnalyzerStreamState::SetLastRenderedFrameTime(size_t peer, + Timestamp time) { auto it = last_rendered_frame_time_.find(peer); if (it == last_rendered_frame_time_.end()) { last_rendered_frame_time_.insert({peer, time}); @@ -110,12 +111,12 @@ void StreamState::SetLastRenderedFrameTime(size_t peer, Timestamp time) { } } -absl::optional StreamState::last_rendered_frame_time( +std::optional AnalyzerStreamState::last_rendered_frame_time( size_t peer) const { return MaybeGetValue(last_rendered_frame_time_, peer); } -size_t StreamState::GetLongestReceiverQueue() const { +size_t AnalyzerStreamState::GetLongestReceiverQueue() const { size_t max = 0; for (size_t receiver : receivers_) { size_t cur_size = frame_ids_.size(receiver); diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h index 22fbfd4a40..6004e8aab3 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h @@ -12,10 +12,10 @@ #define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_STREAM_STATE_H_ #include +#include #include #include -#include "absl/types/optional.h" #include "api/units/timestamp.h" #include "system_wrappers/include/clock.h" #include "test/pc/e2e/analyzer/video/dvqa/pausable_state.h" @@ -35,12 +35,12 @@ namespace webrtc { // dead. // // Supports peer indexes from 0 to max(size_t) - 1. -class StreamState { +class AnalyzerStreamState { public: - StreamState(size_t sender, - std::set receivers, - Timestamp stream_started_time, - Clock* clock); + AnalyzerStreamState(size_t sender, + std::set receivers, + Timestamp stream_started_time, + Clock* clock); size_t sender() const { return sender_; } Timestamp stream_started_time() const { return stream_started_time_; } @@ -73,19 +73,19 @@ class StreamState { void SetLastCapturedFrameTime(Timestamp time) { last_captured_frame_time_ = time; } - absl::optional last_captured_frame_time() const { + std::optional last_captured_frame_time() const { return last_captured_frame_time_; } void SetLastEncodedFrameTime(Timestamp time) { last_encoded_frame_time_ = time; } - absl::optional last_encoded_frame_time() const { + std::optional last_encoded_frame_time() const { return last_encoded_frame_time_; } void SetLastRenderedFrameTime(size_t peer, Timestamp time); - absl::optional last_rendered_frame_time(size_t peer) const; + std::optional last_rendered_frame_time(size_t peer) const; private: // Index of the `frame_ids_` queue which is used to track alive frames for @@ -113,8 +113,8 @@ class StreamState { // frame_id2 and consider those frames as dropped and then compare received // frame with the one from `FrameInFlight` with id frame_id3. MultiReaderQueue frame_ids_; - absl::optional last_captured_frame_time_ = absl::nullopt; - absl::optional last_encoded_frame_time_ = absl::nullopt; + std::optional last_captured_frame_time_ = std::nullopt; + std::optional last_encoded_frame_time_ = std::nullopt; std::unordered_map last_rendered_frame_time_; // Mapping from peer's index to pausable state for this receiver. std::unordered_map pausable_state_; diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state_test.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state_test.cc index 1b2c59b26d..7a79452121 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state_test.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state_test.cc @@ -20,9 +20,9 @@ namespace webrtc { namespace { TEST(StreamStateTest, PopFrontAndFrontIndependentForEachPeer) { - StreamState state(/*sender=*/0, - /*receivers=*/std::set{1, 2}, Timestamp::Seconds(1), - Clock::GetRealTimeClock()); + AnalyzerStreamState state(/*sender=*/0, + /*receivers=*/std::set{1, 2}, + Timestamp::Seconds(1), Clock::GetRealTimeClock()); state.PushBack(/*frame_id=*/1); state.PushBack(/*frame_id=*/2); @@ -37,9 +37,9 @@ TEST(StreamStateTest, PopFrontAndFrontIndependentForEachPeer) { } TEST(StreamStateTest, IsEmpty) { - StreamState state(/*sender=*/0, - /*receivers=*/std::set{1, 2}, Timestamp::Seconds(1), - Clock::GetRealTimeClock()); + AnalyzerStreamState state(/*sender=*/0, + /*receivers=*/std::set{1, 2}, + Timestamp::Seconds(1), Clock::GetRealTimeClock()); state.PushBack(/*frame_id=*/1); EXPECT_FALSE(state.IsEmpty(/*peer=*/1)); @@ -50,9 +50,9 @@ TEST(StreamStateTest, IsEmpty) { } TEST(StreamStateTest, PopFrontForOnlyOnePeerDontChangeAliveFramesCount) { - StreamState state(/*sender=*/0, - /*receivers=*/std::set{1, 2}, Timestamp::Seconds(1), - Clock::GetRealTimeClock()); + AnalyzerStreamState state(/*sender=*/0, + /*receivers=*/std::set{1, 2}, + Timestamp::Seconds(1), Clock::GetRealTimeClock()); state.PushBack(/*frame_id=*/1); state.PushBack(/*frame_id=*/2); @@ -65,9 +65,9 @@ TEST(StreamStateTest, PopFrontForOnlyOnePeerDontChangeAliveFramesCount) { } TEST(StreamStateTest, PopFrontForAllPeersReducesAliveFramesCount) { - StreamState state(/*sender=*/0, - /*receivers=*/std::set{1, 2}, Timestamp::Seconds(1), - Clock::GetRealTimeClock()); + AnalyzerStreamState state(/*sender=*/0, + /*receivers=*/std::set{1, 2}, + Timestamp::Seconds(1), Clock::GetRealTimeClock()); state.PushBack(/*frame_id=*/1); state.PushBack(/*frame_id=*/2); @@ -80,9 +80,9 @@ TEST(StreamStateTest, PopFrontForAllPeersReducesAliveFramesCount) { } TEST(StreamStateTest, RemovePeerForLastExpectedReceiverUpdatesAliveFrames) { - StreamState state(/*sender=*/0, - /*receivers=*/std::set{1, 2}, Timestamp::Seconds(1), - Clock::GetRealTimeClock()); + AnalyzerStreamState state(/*sender=*/0, + /*receivers=*/std::set{1, 2}, + Timestamp::Seconds(1), Clock::GetRealTimeClock()); state.PushBack(/*frame_id=*/1); state.PushBack(/*frame_id=*/2); diff --git a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc index 7b52bad0a8..4aa80bf224 100644 --- a/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc +++ b/test/pc/e2e/analyzer/video/default_video_quality_analyzer_test.cc @@ -81,7 +81,7 @@ EncodedImage FakeEncode(const VideoFrame& frame) { packet_infos.push_back(RtpPacketInfo( /*ssrc=*/1, /*csrcs=*/{}, - /*rtp_timestamp=*/frame.timestamp(), + /*rtp_timestamp=*/frame.rtp_timestamp(), /*receive_time=*/Timestamp::Micros(frame.timestamp_us() + 10000))); image.SetPacketInfos(RtpPacketInfos(packet_infos)); return image; @@ -95,7 +95,7 @@ VideoFrame DeepCopy(const VideoFrame& frame) { } std::vector GetSortedSamples(const SamplesStatsCounter& counter) { - rtc::ArrayView view = counter.GetTimedSamples(); + ArrayView view = counter.GetTimedSamples(); std::vector out(view.begin(), view.end()); std::sort(out.begin(), out.end(), [](const StatsSample& a, const StatsSample& b) { @@ -105,7 +105,7 @@ std::vector GetSortedSamples(const SamplesStatsCounter& counter) { } std::vector GetTimeSortedValues(const SamplesStatsCounter& counter) { - rtc::ArrayView view = counter.GetTimedSamples(); + ArrayView view = counter.GetTimedSamples(); std::vector sorted(view.begin(), view.end()); std::sort(sorted.begin(), sorted.end(), [](const StatsSample& a, const StatsSample& b) { @@ -125,7 +125,7 @@ void ExpectRateIs(const SamplesRateCounter& rate_couter, double expected_rate) { } std::string ToString(const std::vector& values) { - rtc::StringBuilder out; + StringBuilder out; for (const auto& v : values) { out << "{ time_ms=" << v.time.ms() << "; value=" << v.value << "}, "; } @@ -211,8 +211,8 @@ void PassFramesThroughAnalyzer(DefaultVideoQualityAnalyzer& analyzer, TEST(DefaultVideoQualityAnalyzerTest, NormalScenario) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), test::GetGlobalMetricsLogger(), @@ -271,8 +271,8 @@ TEST(DefaultVideoQualityAnalyzerTest, NormalScenario) { TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwice) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), test::GetGlobalMetricsLogger(), @@ -324,8 +324,8 @@ TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwice) { TEST(DefaultVideoQualityAnalyzerTest, NormalScenario2Receivers) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); constexpr char kAlice[] = "alice"; constexpr char kBob[] = "bob"; @@ -456,8 +456,8 @@ TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwiceBySamePeerWith2Receivers) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); constexpr char kAlice[] = "alice"; constexpr char kBob[] = "bob"; @@ -514,8 +514,8 @@ TEST(DefaultVideoQualityAnalyzerTest, TEST(DefaultVideoQualityAnalyzerTest, HeavyQualityMetricsFromEqualFrames) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions analyzer_options; analyzer_options.compute_psnr = true; @@ -573,8 +573,8 @@ TEST(DefaultVideoQualityAnalyzerTest, HeavyQualityMetricsFromShiftedFramesWithAdjustment) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions analyzer_options; analyzer_options.compute_psnr = true; @@ -599,7 +599,7 @@ TEST(DefaultVideoQualityAnalyzerTest, VideoFrame received_frame = frame; // Shift frame by a few pixels. test::CropRegion crop_region{0, 1, 3, 0}; - rtc::scoped_refptr cropped_buffer = + scoped_refptr cropped_buffer = CropAndZoom(crop_region, received_frame.video_frame_buffer()->ToI420()); received_frame.set_video_frame_buffer(cropped_buffer); @@ -637,8 +637,8 @@ TEST(DefaultVideoQualityAnalyzerTest, TEST(DefaultVideoQualityAnalyzerTest, CpuUsage) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), test::GetGlobalMetricsLogger(), @@ -693,8 +693,8 @@ TEST(DefaultVideoQualityAnalyzerTest, CpuUsage) { TEST(DefaultVideoQualityAnalyzerTest, RuntimeParticipantsAdding) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); constexpr char kAlice[] = "alice"; constexpr char kBob[] = "bob"; @@ -849,8 +849,8 @@ TEST(DefaultVideoQualityAnalyzerTest, SimulcastFrameWasFullyReceivedByAllPeersBeforeEncodeFinish) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), test::GetGlobalMetricsLogger(), @@ -911,8 +911,8 @@ TEST(DefaultVideoQualityAnalyzerTest, FrameCanBeReceivedBySenderAfterItWasReceivedByReceiver) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); options.enable_receive_own_stream = true; @@ -1008,8 +1008,8 @@ TEST(DefaultVideoQualityAnalyzerTest, FrameCanBeReceivedByReceiverAfterItWasReceivedBySender) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); options.enable_receive_own_stream = true; @@ -1104,8 +1104,8 @@ TEST(DefaultVideoQualityAnalyzerTest, TEST(DefaultVideoQualityAnalyzerTest, CodecTrackedCorrectly) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), test::GetGlobalMetricsLogger(), @@ -1175,8 +1175,8 @@ TEST(DefaultVideoQualityAnalyzerTest, FramesInFlightAreCorrectlySentToTheComparatorAfterStop) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), @@ -1267,8 +1267,8 @@ TEST( FramesInFlightAreCorrectlySentToTheComparatorAfterStopForSenderAndReceiver) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); options.enable_receive_own_stream = true; @@ -1373,8 +1373,8 @@ TEST( TEST(DefaultVideoQualityAnalyzerTest, GetStreamFrames) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), @@ -1419,8 +1419,8 @@ TEST(DefaultVideoQualityAnalyzerTest, GetStreamFrames) { TEST(DefaultVideoQualityAnalyzerTest, ReceiverReceivedFramesWhenSenderRemoved) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), @@ -1462,8 +1462,8 @@ TEST(DefaultVideoQualityAnalyzerTest, ReceiverReceivedFramesWhenSenderRemovedWithSelfview) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); options.enable_receive_own_stream = true; @@ -1506,8 +1506,8 @@ TEST(DefaultVideoQualityAnalyzerTest, SenderReceivedFramesWhenReceiverRemovedWithSelfview) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); options.enable_receive_own_stream = true; @@ -1550,8 +1550,8 @@ TEST(DefaultVideoQualityAnalyzerTest, SenderAndReceiverReceivedFramesWhenReceiverRemovedWithSelfview) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); options.enable_receive_own_stream = true; @@ -1607,8 +1607,8 @@ TEST(DefaultVideoQualityAnalyzerTest, TEST(DefaultVideoQualityAnalyzerTest, ReceiverRemovedBeforeCapturing2ndFrame) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), @@ -1648,8 +1648,8 @@ TEST(DefaultVideoQualityAnalyzerTest, ReceiverRemovedBeforeCapturing2ndFrame) { TEST(DefaultVideoQualityAnalyzerTest, ReceiverRemovedBeforePreEncoded) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), @@ -1691,8 +1691,8 @@ TEST(DefaultVideoQualityAnalyzerTest, ReceiverRemovedBeforePreEncoded) { TEST(DefaultVideoQualityAnalyzerTest, ReceiverRemovedBeforeEncoded) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), @@ -1735,8 +1735,8 @@ TEST(DefaultVideoQualityAnalyzerTest, ReceiverRemovedBetweenSimulcastLayersEncoded) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), @@ -1782,8 +1782,8 @@ TEST(DefaultVideoQualityAnalyzerTest, TEST(DefaultVideoQualityAnalyzerTest, UnregisterOneAndRegisterAnother) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), @@ -1844,8 +1844,8 @@ TEST(DefaultVideoQualityAnalyzerTest, UnregisterOneAndRegisterAnotherRegisterBack) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), @@ -1900,8 +1900,8 @@ TEST(DefaultVideoQualityAnalyzerTest, FramesInFlightAreAccountedForUnregisterPeers) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), @@ -1934,8 +1934,8 @@ TEST(DefaultVideoQualityAnalyzerTest, TEST(DefaultVideoQualityAnalyzerTest, InfraMetricsAreReportedWhenRequested) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); options.report_infra_metrics = true; @@ -1966,8 +1966,8 @@ TEST(DefaultVideoQualityAnalyzerTest, InfraMetricsAreReportedWhenRequested) { TEST(DefaultVideoQualityAnalyzerTest, InfraMetricsNotCollectedByDefault) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); options.report_infra_metrics = false; @@ -1999,8 +1999,8 @@ TEST(DefaultVideoQualityAnalyzerTest, FrameDroppedByDecoderIsAccountedCorrectly) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); options.report_infra_metrics = false; @@ -2041,8 +2041,8 @@ TEST_P(DefaultVideoQualityAnalyzerTimeBetweenFreezesTest, TimeBetweenFreezesIsEqualToStreamDurationWhenThereAreNoFeeezes) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), @@ -2094,8 +2094,8 @@ TEST_F(DefaultVideoQualityAnalyzerSimulatedTimeTest, PausedAndResumedStreamIsAccountedInStatsCorrectly) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); options.report_infra_metrics = false; @@ -2172,8 +2172,8 @@ TEST_F(DefaultVideoQualityAnalyzerSimulatedTimeTest, PausedAndResumedTwoStreamsAreAccountedInStatsCorrectly) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzer analyzer( GetClock(), test::GetGlobalMetricsLogger(), AnalyzerOptionsForTest()); @@ -2250,8 +2250,8 @@ TEST_F(DefaultVideoQualityAnalyzerSimulatedTimeTest, PausedStreamIsAccountedInStatsCorrectly) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest(); options.report_infra_metrics = false; @@ -2320,8 +2320,8 @@ TEST_F(DefaultVideoQualityAnalyzerSimulatedTimeTest, MemoryOverloadedAndThenAllFramesReceived) { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzer analyzer( GetClock(), test::GetGlobalMetricsLogger(), AnalyzerOptionsForTest()); @@ -2382,8 +2382,8 @@ TEST(DefaultVideoQualityAnalyzerTest, CheckFrameSenderPeerName) { constexpr char kBobStreamLabel[] = "bob-video"; std::unique_ptr frame_generator = test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight, - /*type=*/absl::nullopt, - /*num_squares=*/absl::nullopt); + /*type=*/std::nullopt, + /*num_squares=*/std::nullopt); DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), test::GetGlobalMetricsLogger(), AnalyzerOptionsForTest()); diff --git a/test/pc/e2e/analyzer/video/dvqa/BUILD.gn b/test/pc/e2e/analyzer/video/dvqa/BUILD.gn index 5f525ae062..fd250f4f73 100644 --- a/test/pc/e2e/analyzer/video/dvqa/BUILD.gn +++ b/test/pc/e2e/analyzer/video/dvqa/BUILD.gn @@ -73,7 +73,6 @@ rtc_library("frames_storage") { "../../../../../../api/video:video_frame", "../../../../../../system_wrappers", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (rtc_include_tests) { diff --git a/test/pc/e2e/analyzer/video/dvqa/frames_storage.cc b/test/pc/e2e/analyzer/video/dvqa/frames_storage.cc index 2a70468e69..369295f3fa 100644 --- a/test/pc/e2e/analyzer/video/dvqa/frames_storage.cc +++ b/test/pc/e2e/analyzer/video/dvqa/frames_storage.cc @@ -11,9 +11,9 @@ #include "test/pc/e2e/analyzer/video/dvqa/frames_storage.h" #include +#include #include -#include "absl/types/optional.h" #include "api/units/timestamp.h" #include "api/video/video_frame.h" @@ -26,10 +26,10 @@ void FramesStorage::Add(const VideoFrame& frame, Timestamp captured_time) { RemoveTooOldFrames(); } -absl::optional FramesStorage::Get(uint16_t frame_id) { +std::optional FramesStorage::Get(uint16_t frame_id) { auto it = frame_id_index_.find(frame_id); if (it == frame_id_index_.end()) { - return absl::nullopt; + return std::nullopt; } return heap_[it->second].frame; diff --git a/test/pc/e2e/analyzer/video/dvqa/frames_storage.h b/test/pc/e2e/analyzer/video/dvqa/frames_storage.h index d3c6bd48db..05012e6402 100644 --- a/test/pc/e2e/analyzer/video/dvqa/frames_storage.h +++ b/test/pc/e2e/analyzer/video/dvqa/frames_storage.h @@ -12,10 +12,10 @@ #define TEST_PC_E2E_ANALYZER_VIDEO_DVQA_FRAMES_STORAGE_H_ #include +#include #include #include -#include "absl/types/optional.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "api/video/video_frame.h" @@ -41,7 +41,7 @@ class FramesStorage { void Add(const VideoFrame& frame, Timestamp captured_time); // Complexity: O(1) - absl::optional Get(uint16_t frame_id); + std::optional Get(uint16_t frame_id); // Removes the frame identified by `frame_id` from the storage. No error // happens in case there isn't a frame identified by `frame_id`. diff --git a/test/pc/e2e/analyzer/video/dvqa/frames_storage_test.cc b/test/pc/e2e/analyzer/video/dvqa/frames_storage_test.cc index 88a172eeda..23d4525140 100644 --- a/test/pc/e2e/analyzer/video/dvqa/frames_storage_test.cc +++ b/test/pc/e2e/analyzer/video/dvqa/frames_storage_test.cc @@ -27,7 +27,7 @@ namespace webrtc { namespace { VideoFrame Create2x2Frame(uint16_t frame_id) { - rtc::scoped_refptr buffer = + scoped_refptr buffer = I420Buffer::Create(/*width=*/2, /*height=*/2); memset(buffer->MutableDataY(), static_cast(frame_id), 4); memset(buffer->MutableDataU(), static_cast(frame_id + 1), 1); @@ -40,7 +40,7 @@ VideoFrame Create2x2Frame(uint16_t frame_id) { } void AssertHasFrame(FramesStorage& storage, uint16_t frame_id) { - absl::optional frame = storage.Get(frame_id); + std::optional frame = storage.Get(frame_id); ASSERT_TRUE(frame.has_value()) << "Frame " << frame_id << " wasn't found"; EXPECT_EQ(frame->id(), frame_id); } diff --git a/test/pc/e2e/analyzer/video/encoded_image_data_injector.h b/test/pc/e2e/analyzer/video/encoded_image_data_injector.h index 384e901462..307470168b 100644 --- a/test/pc/e2e/analyzer/video/encoded_image_data_injector.h +++ b/test/pc/e2e/analyzer/video/encoded_image_data_injector.h @@ -12,9 +12,9 @@ #define TEST_PC_E2E_ANALYZER_VIDEO_ENCODED_IMAGE_DATA_INJECTOR_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/video/encoded_image.h" namespace webrtc { @@ -35,7 +35,7 @@ class EncodedImageDataInjector { }; struct EncodedImageExtractionResult { - absl::optional id; + std::optional id; EncodedImage image; // Is true if encoded image should be discarded. It is used to filter out // unnecessary spatial layers and simulcast streams. diff --git a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc index 1d8ec30874..f55e886c46 100644 --- a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc +++ b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.cc @@ -18,10 +18,9 @@ namespace webrtc { ExampleVideoQualityAnalyzer::ExampleVideoQualityAnalyzer() = default; ExampleVideoQualityAnalyzer::~ExampleVideoQualityAnalyzer() = default; -void ExampleVideoQualityAnalyzer::Start( - std::string test_case_name, - rtc::ArrayView peer_names, - int max_threads_count) {} +void ExampleVideoQualityAnalyzer::Start(std::string test_case_name, + ArrayView peer_names, + int max_threads_count) {} uint16_t ExampleVideoQualityAnalyzer::OnFrameCaptured( absl::string_view peer_name, diff --git a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h index 56cefa5ab6..78e44d8c0f 100644 --- a/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h +++ b/test/pc/e2e/analyzer/video/example_video_quality_analyzer.h @@ -34,7 +34,7 @@ class ExampleVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { ~ExampleVideoQualityAnalyzer() override; void Start(std::string test_case_name, - rtc::ArrayView peer_names, + ArrayView peer_names, int max_threads_count) override; uint16_t OnFrameCaptured(absl::string_view peer_name, const std::string& stream_label, diff --git a/test/pc/e2e/analyzer/video/multi_reader_queue.h b/test/pc/e2e/analyzer/video/multi_reader_queue.h index 39d26b42bc..99c53d06f6 100644 --- a/test/pc/e2e/analyzer/video/multi_reader_queue.h +++ b/test/pc/e2e/analyzer/video/multi_reader_queue.h @@ -13,10 +13,10 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "rtc_base/checks.h" namespace webrtc { @@ -90,10 +90,10 @@ class MultiReaderQueue { } // Extract element from specified head. Complexity O(1). - absl::optional PopFront(size_t reader) { + std::optional PopFront(size_t reader) { size_t pos = GetHeadPositionOrDie(reader); if (pos >= queue_.size()) { - return absl::nullopt; + return std::nullopt; } T out = queue_[pos]; @@ -109,10 +109,10 @@ class MultiReaderQueue { } // Returns element at specified head. Complexity O(1). - absl::optional Front(size_t reader) const { + std::optional Front(size_t reader) const { size_t pos = GetHeadPositionOrDie(reader); if (pos >= queue_.size()) { - return absl::nullopt; + return std::nullopt; } return queue_[pos]; } diff --git a/test/pc/e2e/analyzer/video/multi_reader_queue_test.cc b/test/pc/e2e/analyzer/video/multi_reader_queue_test.cc index ea6aa0a416..a035f44d7a 100644 --- a/test/pc/e2e/analyzer/video/multi_reader_queue_test.cc +++ b/test/pc/e2e/analyzer/video/multi_reader_queue_test.cc @@ -10,7 +10,8 @@ #include "test/pc/e2e/analyzer/video/multi_reader_queue.h" -#include "absl/types/optional.h" +#include + #include "test/gtest.h" namespace webrtc { @@ -48,7 +49,7 @@ TEST(MultiReaderQueueTest, SizeIsCorrectAfterRemoveFromOnlyOneHead) { // Removing elements from queue #0 for (int i = 0; i < 5; ++i) { EXPECT_EQ(queue.size(/*reader=*/0), static_cast(5 - i)); - EXPECT_EQ(queue.PopFront(/*reader=*/0), absl::optional(i)); + EXPECT_EQ(queue.PopFront(/*reader=*/0), std::optional(i)); for (int j = 1; j < 10; ++j) { EXPECT_EQ(queue.size(/*reader=*/j), 5lu); } @@ -63,7 +64,7 @@ TEST(MultiReaderQueueTest, SingleHeadOneAddOneRemove) { EXPECT_EQ(queue.size(), 1lu); EXPECT_TRUE(queue.Front(/*reader=*/0).has_value()); EXPECT_EQ(queue.Front(/*reader=*/0).value(), 1); - absl::optional value = queue.PopFront(/*reader=*/0); + std::optional value = queue.PopFront(/*reader=*/0); EXPECT_TRUE(value.has_value()); EXPECT_EQ(value.value(), 1); EXPECT_EQ(queue.size(), 0lu); @@ -78,8 +79,8 @@ TEST(MultiReaderQueueTest, SingleHead) { EXPECT_EQ(queue.size(), i + 1); } for (size_t i = 0; i < 10; ++i) { - EXPECT_EQ(queue.Front(/*reader=*/0), absl::optional(i)); - EXPECT_EQ(queue.PopFront(/*reader=*/0), absl::optional(i)); + EXPECT_EQ(queue.Front(/*reader=*/0), std::optional(i)); + EXPECT_EQ(queue.PopFront(/*reader=*/0), std::optional(i)); EXPECT_EQ(queue.size(), 10 - i - 1); } } @@ -92,19 +93,19 @@ TEST(MultiReaderQueueTest, ThreeHeadsAddAllRemoveAllPerHead) { EXPECT_EQ(queue.size(), i + 1); } for (size_t i = 0; i < 10; ++i) { - absl::optional value = queue.PopFront(/*reader=*/0); + std::optional value = queue.PopFront(/*reader=*/0); EXPECT_EQ(queue.size(), 10lu); ASSERT_TRUE(value.has_value()); EXPECT_EQ(value.value(), i); } for (size_t i = 0; i < 10; ++i) { - absl::optional value = queue.PopFront(/*reader=*/1); + std::optional value = queue.PopFront(/*reader=*/1); EXPECT_EQ(queue.size(), 10lu); ASSERT_TRUE(value.has_value()); EXPECT_EQ(value.value(), i); } for (size_t i = 0; i < 10; ++i) { - absl::optional value = queue.PopFront(/*reader=*/2); + std::optional value = queue.PopFront(/*reader=*/2); EXPECT_EQ(queue.size(), 10 - i - 1); ASSERT_TRUE(value.has_value()); EXPECT_EQ(value.value(), i); @@ -119,9 +120,9 @@ TEST(MultiReaderQueueTest, ThreeHeadsAddAllRemoveAll) { EXPECT_EQ(queue.size(), i + 1); } for (size_t i = 0; i < 10; ++i) { - absl::optional value1 = queue.PopFront(/*reader=*/0); - absl::optional value2 = queue.PopFront(/*reader=*/1); - absl::optional value3 = queue.PopFront(/*reader=*/2); + std::optional value1 = queue.PopFront(/*reader=*/0); + std::optional value2 = queue.PopFront(/*reader=*/1); + std::optional value3 = queue.PopFront(/*reader=*/2); EXPECT_EQ(queue.size(), 10 - i - 1); ASSERT_TRUE(value1.has_value()); ASSERT_TRUE(value2.has_value()); @@ -146,7 +147,7 @@ TEST(MultiReaderQueueTest, AddReaderSeeElementsOnlyFromReaderToCopy) { EXPECT_EQ(queue.readers_count(), 3lu); for (size_t i = 5; i < 10; ++i) { - absl::optional value = queue.PopFront(/*reader=*/2); + std::optional value = queue.PopFront(/*reader=*/2); EXPECT_EQ(queue.size(/*reader=*/2), 10 - i - 1); ASSERT_TRUE(value.has_value()); EXPECT_EQ(value.value(), i); @@ -167,7 +168,7 @@ TEST(MultiReaderQueueTest, AddReaderWithoutReaderToCopySeeFullQueue) { EXPECT_EQ(queue.readers_count(), 3lu); for (size_t i = 0; i < 10; ++i) { - absl::optional value = queue.PopFront(/*reader=*/2); + std::optional value = queue.PopFront(/*reader=*/2); EXPECT_EQ(queue.size(/*reader=*/2), 10 - i - 1); ASSERT_TRUE(value.has_value()); EXPECT_EQ(value.value(), i); diff --git a/test/pc/e2e/analyzer/video/names_collection.cc b/test/pc/e2e/analyzer/video/names_collection.cc index 3ccab620f8..d7129d0ef9 100644 --- a/test/pc/e2e/analyzer/video/names_collection.cc +++ b/test/pc/e2e/analyzer/video/names_collection.cc @@ -10,14 +10,14 @@ #include "test/pc/e2e/analyzer/video/names_collection.h" +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" namespace webrtc { -NamesCollection::NamesCollection(rtc::ArrayView names) { +NamesCollection::NamesCollection(ArrayView names) { names_ = std::vector(names.begin(), names.end()); for (size_t i = 0; i < names_.size(); ++i) { index_.emplace(names_[i], i); @@ -65,15 +65,14 @@ size_t NamesCollection::AddIfAbsent(absl::string_view name) { return out; } -absl::optional NamesCollection::RemoveIfPresent( - absl::string_view name) { +std::optional NamesCollection::RemoveIfPresent(absl::string_view name) { auto it = index_.find(name); if (it == index_.end()) { - return absl::nullopt; + return std::nullopt; } size_t index = it->second; if (removed_[index]) { - return absl::nullopt; + return std::nullopt; } removed_[index] = true; size_--; diff --git a/test/pc/e2e/analyzer/video/names_collection.h b/test/pc/e2e/analyzer/video/names_collection.h index f9a13a2a11..7a2d59d1e8 100644 --- a/test/pc/e2e/analyzer/video/names_collection.h +++ b/test/pc/e2e/analyzer/video/names_collection.h @@ -12,12 +12,12 @@ #define TEST_PC_E2E_ANALYZER_VIDEO_NAMES_COLLECTION_H_ #include +#include #include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" namespace webrtc { @@ -39,7 +39,7 @@ class NamesCollection { public: NamesCollection() = default; - explicit NamesCollection(rtc::ArrayView names); + explicit NamesCollection(ArrayView names); // Returns amount of currently presented names in the collection. size_t size() const { return size_; } @@ -70,9 +70,9 @@ class NamesCollection { // will continue to return previously known index for `index(name)` and return // `name` for `name(index(name))`. // - // Returns the index of the removed value or absl::nullopt if no such `name` + // Returns the index of the removed value or std::nullopt if no such `name` // registered in the collection. - absl::optional RemoveIfPresent(absl::string_view name); + std::optional RemoveIfPresent(absl::string_view name); // Returns a set of indexes for all currently present names in the // collection. diff --git a/test/pc/e2e/analyzer/video/names_collection_test.cc b/test/pc/e2e/analyzer/video/names_collection_test.cc index 6c52f96975..e67c23f3fd 100644 --- a/test/pc/e2e/analyzer/video/names_collection_test.cc +++ b/test/pc/e2e/analyzer/video/names_collection_test.cc @@ -10,10 +10,10 @@ #include "test/pc/e2e/analyzer/video/names_collection.h" +#include #include #include -#include "absl/types/optional.h" #include "test/gmock.h" #include "test/gtest.h" @@ -76,7 +76,7 @@ TEST(NamesCollectionTest, RemoveRemovesFromCollectionButNotIndex) { EXPECT_THAT(collection.size(), Eq(static_cast(2))); EXPECT_THAT(collection.RemoveIfPresent("bob"), - Eq(absl::optional(bob_index))); + Eq(std::optional(bob_index))); EXPECT_THAT(collection.size(), Eq(static_cast(1))); EXPECT_FALSE(collection.HasName("bob")); @@ -94,7 +94,7 @@ TEST(NamesCollectionTest, RemoveOfAliceDoesNotChangeBobIndex) { EXPECT_THAT(collection.size(), Eq(static_cast(2))); EXPECT_THAT(collection.RemoveIfPresent("alice"), - Eq(absl::optional(alice_index))); + Eq(std::optional(alice_index))); EXPECT_THAT(collection.size(), Eq(static_cast(1))); EXPECT_THAT(collection.index("bob"), Eq(bob_index)); @@ -108,17 +108,17 @@ TEST(NamesCollectionTest, RemoveSecondTimeHasNoEffect) { EXPECT_THAT(collection.size(), Eq(static_cast(1))); EXPECT_THAT(collection.RemoveIfPresent("bob"), - Eq(absl::optional(bob_index))); + Eq(std::optional(bob_index))); EXPECT_THAT(collection.size(), Eq(static_cast(0))); - EXPECT_THAT(collection.RemoveIfPresent("bob"), Eq(absl::nullopt)); + EXPECT_THAT(collection.RemoveIfPresent("bob"), Eq(std::nullopt)); } TEST(NamesCollectionTest, RemoveOfNotExistingHasNoEffect) { NamesCollection collection(std::vector{"bob"}); EXPECT_THAT(collection.size(), Eq(static_cast(1))); - EXPECT_THAT(collection.RemoveIfPresent("alice"), Eq(absl::nullopt)); + EXPECT_THAT(collection.RemoveIfPresent("alice"), Eq(std::nullopt)); EXPECT_THAT(collection.size(), Eq(static_cast(1))); } @@ -129,7 +129,7 @@ TEST(NamesCollectionTest, AddRemoveAddPreserveTheIndex) { EXPECT_THAT(collection.size(), Eq(static_cast(1))); EXPECT_THAT(collection.RemoveIfPresent("alice"), - Eq(absl::optional(alice_index))); + Eq(std::optional(alice_index))); EXPECT_THAT(collection.size(), Eq(static_cast(0))); EXPECT_THAT(collection.AddIfAbsent("alice"), Eq(alice_index)); @@ -144,7 +144,7 @@ TEST(NamesCollectionTest, GetKnownSizeReturnsForRemovedNames) { EXPECT_THAT(collection.GetKnownSize(), Eq(static_cast(1))); EXPECT_THAT(collection.RemoveIfPresent("alice"), - Eq(absl::optional(alice_index))); + Eq(std::optional(alice_index))); EXPECT_THAT(collection.GetKnownSize(), Eq(static_cast(1))); } diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc index 4d19c912f7..1b51402c48 100644 --- a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc +++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.cc @@ -13,10 +13,10 @@ #include #include #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/video/i420_buffer.h" #include "api/video/video_frame.h" #include "modules/video_coding/include/video_error_codes.h" @@ -75,19 +75,20 @@ int32_t QualityAnalyzingVideoDecoder::Decode(const EncodedImage& input_image, // // For more details see QualityAnalyzingVideoEncoder. return analyzing_callback_->IrrelevantSimulcastStreamDecoded( - out.id.value_or(VideoFrame::kNotSetId), input_image.Timestamp()); + out.id.value_or(VideoFrame::kNotSetId), input_image.RtpTimestamp()); } EncodedImage* origin_image; { MutexLock lock(&mutex_); // Store id to be able to retrieve it in analyzing callback. - timestamp_to_frame_id_.insert({input_image.Timestamp(), out.id}); + timestamp_to_frame_id_.insert({input_image.RtpTimestamp(), out.id}); // Store encoded image to prevent its destruction while it is used in // decoder. - origin_image = &( - decoding_images_.insert({input_image.Timestamp(), std::move(out.image)}) - .first->second); + origin_image = + &(decoding_images_ + .insert({input_image.RtpTimestamp(), std::move(out.image)}) + .first->second); } // We can safely dereference `origin_image`, because it can be removed from // the map only after `delegate_` Decode method will be invoked. Image will @@ -101,8 +102,8 @@ int32_t QualityAnalyzingVideoDecoder::Decode(const EncodedImage& input_image, VideoQualityAnalyzerInterface::DecoderStats stats; { MutexLock lock(&mutex_); - timestamp_to_frame_id_.erase(input_image.Timestamp()); - decoding_images_.erase(input_image.Timestamp()); + timestamp_to_frame_id_.erase(input_image.RtpTimestamp()); + decoding_images_.erase(input_image.RtpTimestamp()); stats.decoder_name = codec_name_; } analyzer_->OnDecoderError( @@ -155,8 +156,8 @@ void QualityAnalyzingVideoDecoder::DecoderCallback::SetDelegateCallback( // method on `delegate_callback_`, as was called on `this` callback. int32_t QualityAnalyzingVideoDecoder::DecoderCallback::Decoded( VideoFrame& decodedImage) { - decoder_->OnFrameDecoded(&decodedImage, /*decode_time_ms=*/absl::nullopt, - /*qp=*/absl::nullopt); + decoder_->OnFrameDecoded(&decodedImage, /*decode_time_ms=*/std::nullopt, + /*qp=*/std::nullopt); MutexLock lock(&callback_mutex_); RTC_DCHECK(delegate_callback_); @@ -166,7 +167,7 @@ int32_t QualityAnalyzingVideoDecoder::DecoderCallback::Decoded( int32_t QualityAnalyzingVideoDecoder::DecoderCallback::Decoded( VideoFrame& decodedImage, int64_t decode_time_ms) { - decoder_->OnFrameDecoded(&decodedImage, decode_time_ms, /*qp=*/absl::nullopt); + decoder_->OnFrameDecoded(&decodedImage, decode_time_ms, /*qp=*/std::nullopt); MutexLock lock(&callback_mutex_); RTC_DCHECK(delegate_callback_); @@ -175,8 +176,8 @@ int32_t QualityAnalyzingVideoDecoder::DecoderCallback::Decoded( void QualityAnalyzingVideoDecoder::DecoderCallback::Decoded( VideoFrame& decodedImage, - absl::optional decode_time_ms, - absl::optional qp) { + std::optional decode_time_ms, + std::optional qp) { decoder_->OnFrameDecoded(&decodedImage, decode_time_ms, qp); MutexLock lock(&callback_mutex_); @@ -191,16 +192,16 @@ QualityAnalyzingVideoDecoder::DecoderCallback::IrrelevantSimulcastStreamDecoded( webrtc::VideoFrame dummy_frame = webrtc::VideoFrame::Builder() .set_video_frame_buffer(GetDummyFrameBuffer()) - .set_timestamp_rtp(timestamp_ms) + .set_rtp_timestamp(timestamp_ms) .set_id(frame_id) .build(); MutexLock lock(&callback_mutex_); RTC_DCHECK(delegate_callback_); - delegate_callback_->Decoded(dummy_frame, absl::nullopt, absl::nullopt); + delegate_callback_->Decoded(dummy_frame, std::nullopt, std::nullopt); return WEBRTC_VIDEO_CODEC_OK; } -rtc::scoped_refptr +scoped_refptr QualityAnalyzingVideoDecoder::DecoderCallback::GetDummyFrameBuffer() { if (!dummy_frame_buffer_) { dummy_frame_buffer_ = CreateDummyFrameBuffer(); @@ -211,25 +212,25 @@ QualityAnalyzingVideoDecoder::DecoderCallback::GetDummyFrameBuffer() { void QualityAnalyzingVideoDecoder::OnFrameDecoded( VideoFrame* frame, - absl::optional decode_time_ms, - absl::optional qp) { - absl::optional frame_id; + std::optional decode_time_ms, + std::optional qp) { + std::optional frame_id; std::string codec_name; { MutexLock lock(&mutex_); - auto it = timestamp_to_frame_id_.find(frame->timestamp()); + auto it = timestamp_to_frame_id_.find(frame->rtp_timestamp()); if (it == timestamp_to_frame_id_.end()) { // Ensure, that we have info about this frame. It can happen that for some // reasons decoder response, that it failed to decode, when we were // posting frame to it, but then call the callback for this frame. RTC_LOG(LS_ERROR) << "QualityAnalyzingVideoDecoder::OnFrameDecoded: No " "frame id for frame for frame->timestamp()=" - << frame->timestamp(); + << frame->rtp_timestamp(); return; } frame_id = it->second; timestamp_to_frame_id_.erase(it); - decoding_images_.erase(frame->timestamp()); + decoding_images_.erase(frame->rtp_timestamp()); codec_name = codec_name_; } // Set frame id to the value, that was extracted from corresponding encoded @@ -238,6 +239,7 @@ void QualityAnalyzingVideoDecoder::OnFrameDecoded( VideoQualityAnalyzerInterface::DecoderStats stats; stats.decoder_name = codec_name; stats.decode_time_ms = decode_time_ms; + stats.qp = qp; analyzer_->OnFrameDecoded(peer_name_, *frame, stats); } @@ -258,10 +260,10 @@ QualityAnalyzingVideoDecoderFactory::GetSupportedFormats() const { return delegate_->GetSupportedFormats(); } -std::unique_ptr -QualityAnalyzingVideoDecoderFactory::CreateVideoDecoder( +std::unique_ptr QualityAnalyzingVideoDecoderFactory::Create( + const Environment& env, const SdpVideoFormat& format) { - std::unique_ptr decoder = delegate_->CreateVideoDecoder(format); + std::unique_ptr decoder = delegate_->Create(env, format); return std::make_unique( peer_name_, std::move(decoder), extractor_, analyzer_); } diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h index 2f0c2b9d5d..2e567e1c35 100644 --- a/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h +++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_decoder.h @@ -13,11 +13,12 @@ #include #include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" +#include "api/environment/environment.h" #include "api/test/video_quality_analyzer_interface.h" #include "api/video/encoded_image.h" #include "api/video/video_frame.h" @@ -78,26 +79,26 @@ class QualityAnalyzingVideoDecoder : public VideoDecoder { int32_t Decoded(VideoFrame& decodedImage) override; int32_t Decoded(VideoFrame& decodedImage, int64_t decode_time_ms) override; void Decoded(VideoFrame& decodedImage, - absl::optional decode_time_ms, - absl::optional qp) override; + std::optional decode_time_ms, + std::optional qp) override; int32_t IrrelevantSimulcastStreamDecoded(uint16_t frame_id, uint32_t timestamp_ms); private: - rtc::scoped_refptr GetDummyFrameBuffer(); + scoped_refptr GetDummyFrameBuffer(); QualityAnalyzingVideoDecoder* const decoder_; - rtc::scoped_refptr dummy_frame_buffer_; + scoped_refptr dummy_frame_buffer_; Mutex callback_mutex_; DecodedImageCallback* delegate_callback_ RTC_GUARDED_BY(callback_mutex_); }; void OnFrameDecoded(VideoFrame* frame, - absl::optional decode_time_ms, - absl::optional qp); + std::optional decode_time_ms, + std::optional qp); const std::string peer_name_; const std::string implementation_name_; @@ -113,7 +114,7 @@ class QualityAnalyzingVideoDecoder : public VideoDecoder { // Name of the video codec type used. Ex: VP8, VP9, H264 etc. std::string codec_name_ RTC_GUARDED_BY(mutex_); - std::map> timestamp_to_frame_id_ + std::map> timestamp_to_frame_id_ RTC_GUARDED_BY(mutex_); // Stores currently being decoded images by timestamp. Because // EncodedImageDataExtractor can create new copy on EncodedImage we need to @@ -136,8 +137,8 @@ class QualityAnalyzingVideoDecoderFactory : public VideoDecoderFactory { // Methods of VideoDecoderFactory interface. std::vector GetSupportedFormats() const override; - std::unique_ptr CreateVideoDecoder( - const SdpVideoFormat& format) override; + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override; private: const std::string peer_name_; diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc index 7a2b3165d6..65e2f12594 100644 --- a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc +++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.cc @@ -15,6 +15,7 @@ #include #include "absl/strings/string_view.h" +#include "api/environment/environment.h" #include "api/video/video_codec_type.h" #include "api/video_codecs/video_encoder.h" #include "modules/video_coding/include/video_error_codes.h" @@ -84,7 +85,7 @@ int32_t QualityAnalyzingVideoEncoder::InitEncode( MutexLock lock(&mutex_); codec_settings_ = *codec_settings; mode_ = SimulcastMode::kNormal; - absl::optional inter_layer_pred_mode; + std::optional inter_layer_pred_mode; if (codec_settings->GetScalabilityMode().has_value()) { inter_layer_pred_mode = ScalabilityModeToInterLayerPredMode( *codec_settings->GetScalabilityMode()); @@ -141,7 +142,7 @@ int32_t QualityAnalyzingVideoEncoder::Encode( { MutexLock lock(&mutex_); // Store id to be able to retrieve it in analyzing callback. - timestamp_to_frame_id_list_.push_back({frame.timestamp(), frame.id()}); + timestamp_to_frame_id_list_.push_back({frame.rtp_timestamp(), frame.id()}); // If this list is growing, it means that we are not receiving new encoded // images from encoder. So it should be a bug in setup on in the encoder. RTC_DCHECK_LT(timestamp_to_frame_id_list_.size(), kMaxFrameInPipelineCount); @@ -158,7 +159,7 @@ int32_t QualityAnalyzingVideoEncoder::Encode( auto it = timestamp_to_frame_id_list_.end(); while (it != timestamp_to_frame_id_list_.begin()) { --it; - if (it->first == frame.timestamp()) { + if (it->first == frame.rtp_timestamp()) { timestamp_to_frame_id_list_.erase(it); break; } @@ -199,7 +200,7 @@ void QualityAnalyzingVideoEncoder::SetRates( std::tie(min_bitrate_bps, max_bitrate_bps) = GetMinMaxBitratesBps(codec_settings_, si); double bitrate_multiplier = bitrate_multiplier_; - const uint32_t corrected_bitrate = rtc::checked_cast( + const uint32_t corrected_bitrate = checked_cast( bitrate_multiplier * spatial_layer_bitrate_bps); if (corrected_bitrate < min_bitrate_bps) { bitrate_multiplier = min_bitrate_bps / spatial_layer_bitrate_bps; @@ -211,8 +212,8 @@ void QualityAnalyzingVideoEncoder::SetRates( if (parameters.bitrate.HasBitrate(si, ti)) { multiplied_allocation.SetBitrate( si, ti, - rtc::checked_cast( - bitrate_multiplier * parameters.bitrate.GetBitrate(si, ti))); + checked_cast(bitrate_multiplier * + parameters.bitrate.GetBitrate(si, ti))); } } } @@ -252,7 +253,7 @@ EncodedImageCallback::Result QualityAnalyzingVideoEncoder::OnEncodedImage( std::pair timestamp_frame_id; while (!timestamp_to_frame_id_list_.empty()) { timestamp_frame_id = timestamp_to_frame_id_list_.front(); - if (timestamp_frame_id.first == encoded_image.Timestamp()) { + if (timestamp_frame_id.first == encoded_image.RtpTimestamp()) { break; } timestamp_to_frame_id_list_.pop_front(); @@ -271,7 +272,7 @@ EncodedImageCallback::Result QualityAnalyzingVideoEncoder::OnEncodedImage( // posting frame to it, but then call the callback for this frame. RTC_LOG(LS_ERROR) << "QualityAnalyzingVideoEncoder::OnEncodedImage: No " "frame id for encoded_image.Timestamp()=" - << encoded_image.Timestamp(); + << encoded_image.RtpTimestamp(); return EncodedImageCallback::Result( EncodedImageCallback::Result::Error::OK); } @@ -398,15 +399,15 @@ QualityAnalyzingVideoEncoderFactory::GetSupportedFormats() const { VideoEncoderFactory::CodecSupport QualityAnalyzingVideoEncoderFactory::QueryCodecSupport( const SdpVideoFormat& format, - absl::optional scalability_mode) const { + std::optional scalability_mode) const { return delegate_->QueryCodecSupport(format, scalability_mode); } -std::unique_ptr -QualityAnalyzingVideoEncoderFactory::CreateVideoEncoder( +std::unique_ptr QualityAnalyzingVideoEncoderFactory::Create( + const Environment& env, const SdpVideoFormat& format) { return std::make_unique( - peer_name_, delegate_->CreateVideoEncoder(format), bitrate_multiplier_, + peer_name_, delegate_->Create(env, format), bitrate_multiplier_, stream_to_sfu_config_, injector_, analyzer_); } diff --git a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h index 4adeacc0cd..02159d74c8 100644 --- a/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h +++ b/test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h @@ -17,6 +17,7 @@ #include #include "absl/strings/string_view.h" +#include "api/environment/environment.h" #include "api/test/pclf/media_configuration.h" #include "api/test/video_quality_analyzer_interface.h" #include "api/video/video_frame.h" @@ -51,7 +52,7 @@ class QualityAnalyzingVideoEncoder : public VideoEncoder, public EncodedImageCallback { public: using EmulatedSFUConfigMap = - std::map>; + std::map>; QualityAnalyzingVideoEncoder(absl::string_view peer_name, std::unique_ptr delegate, @@ -137,7 +138,7 @@ class QualityAnalyzingVideoEncoder : public VideoEncoder, const double bitrate_multiplier_; // Contains mapping from stream label to optional spatial index. // If we have stream label "Foo" and mapping contains - // 1. `absl::nullopt` means all streams are required + // 1. `std::nullopt` means all streams are required // 2. Concrete value means that particular simulcast/SVC stream have to be // analyzed. EmulatedSFUConfigMap stream_to_sfu_config_; @@ -175,9 +176,9 @@ class QualityAnalyzingVideoEncoderFactory : public VideoEncoderFactory { std::vector GetSupportedFormats() const override; VideoEncoderFactory::CodecSupport QueryCodecSupport( const SdpVideoFormat& format, - absl::optional scalability_mode) const override; - std::unique_ptr CreateVideoEncoder( - const SdpVideoFormat& format) override; + std::optional scalability_mode) const override; + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override; private: const std::string peer_name_; diff --git a/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.cc b/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.cc index 7a73b9f4f1..955406ab83 100644 --- a/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.cc +++ b/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.cc @@ -22,10 +22,9 @@ constexpr char kIrrelatedSimulcastStreamFrameData[] = "Dummy!"; } // namespace -rtc::scoped_refptr CreateDummyFrameBuffer() { +scoped_refptr CreateDummyFrameBuffer() { // Use i420 buffer here as default one and supported by all codecs. - rtc::scoped_refptr buffer = - webrtc::I420Buffer::Create(2, 2); + scoped_refptr buffer = webrtc::I420Buffer::Create(2, 2); memcpy(buffer->MutableDataY(), kIrrelatedSimulcastStreamFrameData, 2); memcpy(buffer->MutableDataY() + buffer->StrideY(), kIrrelatedSimulcastStreamFrameData + 2, 2); @@ -38,7 +37,7 @@ bool IsDummyFrame(const webrtc::VideoFrame& video_frame) { if (video_frame.width() != 2 || video_frame.height() != 2) { return false; } - rtc::scoped_refptr buffer = + scoped_refptr buffer = video_frame.video_frame_buffer()->ToI420(); if (memcmp(buffer->DataY(), kIrrelatedSimulcastStreamFrameData, 2) != 0) { return false; diff --git a/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h b/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h index 8ecfae7385..911d9d22e0 100644 --- a/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h +++ b/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper.h @@ -22,7 +22,7 @@ namespace webrtc_pc_e2e { // original was discarded and some frame is required to be passed upstream // to make WebRTC pipeline happy and not request key frame on the received // stream due to lack of incoming frames. -rtc::scoped_refptr CreateDummyFrameBuffer(); +scoped_refptr CreateDummyFrameBuffer(); // Tests if provided frame contains a buffer created by // `CreateDummyFrameBuffer`. diff --git a/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper_test.cc b/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper_test.cc index db1030232d..cdf778c96c 100644 --- a/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper_test.cc +++ b/test/pc/e2e/analyzer/video/simulcast_dummy_buffer_helper_test.cc @@ -25,7 +25,7 @@ uint8_t RandByte(Random& random) { } VideoFrame CreateRandom2x2VideoFrame(uint16_t id, Random& random) { - rtc::scoped_refptr buffer = I420Buffer::Create(2, 2); + scoped_refptr buffer = I420Buffer::Create(2, 2); uint8_t data[6] = {RandByte(random), RandByte(random), RandByte(random), RandByte(random), RandByte(random), RandByte(random)}; diff --git a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc index ccd2f03537..3bc40a5dad 100644 --- a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc +++ b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.cc @@ -100,7 +100,7 @@ EncodedImageExtractionResult SingleProcessEncodedImageDataInjector::ExtractData( } size_t prev_frames_size = 0; - absl::optional id = absl::nullopt; + std::optional id = std::nullopt; bool discard = true; std::vector extraction_infos; for (size_t frame_size : frame_sizes) { diff --git a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc index f6fa40455a..4cc09f097f 100644 --- a/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc +++ b/test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector_unittest.cc @@ -20,7 +20,7 @@ namespace webrtc { namespace webrtc_pc_e2e { namespace { -rtc::scoped_refptr +scoped_refptr CreateEncodedImageBufferOfSizeNFilledWithValuesFromX(size_t n, uint8_t x) { auto buffer = EncodedImageBuffer::Create(n); for (size_t i = 0; i < n; ++i) { @@ -49,7 +49,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest, InjectExtractDiscardFalse) { EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source.SetTimestamp(123456789); + source.SetRtpTimestamp(123456789); EncodedImageExtractionResult out = injector.ExtractData(injector.InjectData(512, false, source)); @@ -68,7 +68,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest, InjectExtractDiscardTrue) { EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source.SetTimestamp(123456789); + source.SetRtpTimestamp(123456789); EncodedImageExtractionResult out = injector.ExtractData(injector.InjectData(512, true, source)); @@ -85,7 +85,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest, EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source.SetTimestamp(123456789); + source.SetRtpTimestamp(123456789); EncodedImage intermediate = injector.InjectData(512, false, source); intermediate.SetSpatialIndex(2); @@ -110,7 +110,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest, EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source.SetTimestamp(123456789); + source.SetRtpTimestamp(123456789); EncodedImage intermediate = injector.InjectData(512, false, source); intermediate.SetSpatialIndex(2); @@ -138,15 +138,15 @@ TEST(SingleProcessEncodedImageDataInjectorTest, Inject3Extract3) { // 1st frame EncodedImage source1 = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source1.SetTimestamp(123456710); + source1.SetRtpTimestamp(123456710); // 2nd frame 1st spatial layer EncodedImage source2 = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/11); - source2.SetTimestamp(123456720); + source2.SetRtpTimestamp(123456720); // 2nd frame 2nd spatial layer EncodedImage source3 = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/21); - source3.SetTimestamp(123456720); + source3.SetRtpTimestamp(123456720); EncodedImage intermediate1 = injector.InjectData(510, false, source1); EncodedImage intermediate2 = injector.InjectData(520, true, source2); @@ -183,13 +183,13 @@ TEST(SingleProcessEncodedImageDataInjectorTest, InjectExtractFromConcatenated) { EncodedImage source1 = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source1.SetTimestamp(123456710); + source1.SetRtpTimestamp(123456710); EncodedImage source2 = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/11); - source2.SetTimestamp(123456710); + source2.SetRtpTimestamp(123456710); EncodedImage source3 = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/21); - source3.SetTimestamp(123456710); + source3.SetRtpTimestamp(123456710); // Inject id into 3 images with same frame id. EncodedImage intermediate1 = injector.InjectData(512, false, source1); @@ -200,7 +200,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest, InjectExtractFromConcatenated) { // buffer. size_t concatenated_length = intermediate1.size() + intermediate2.size() + intermediate3.size(); - rtc::Buffer concatenated_buffer; + Buffer concatenated_buffer; concatenated_buffer.AppendData(intermediate1.data(), intermediate1.size()); concatenated_buffer.AppendData(intermediate2.data(), intermediate2.size()); concatenated_buffer.AppendData(intermediate3.data(), intermediate3.size()); @@ -235,13 +235,13 @@ TEST(SingleProcessEncodedImageDataInjector, EncodedImage source1 = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source1.SetTimestamp(123456710); + source1.SetRtpTimestamp(123456710); EncodedImage source2 = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/11); - source2.SetTimestamp(123456710); + source2.SetRtpTimestamp(123456710); EncodedImage source3 = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/21); - source3.SetTimestamp(123456710); + source3.SetRtpTimestamp(123456710); // Inject id into 3 images with same frame id. EncodedImage intermediate1 = injector.InjectData(512, true, source1); @@ -252,7 +252,7 @@ TEST(SingleProcessEncodedImageDataInjector, // buffer. size_t concatenated_length = intermediate1.size() + intermediate2.size() + intermediate3.size(); - rtc::Buffer concatenated_buffer; + Buffer concatenated_buffer; concatenated_buffer.AppendData(intermediate1.data(), intermediate1.size()); concatenated_buffer.AppendData(intermediate2.data(), intermediate2.size()); concatenated_buffer.AppendData(intermediate3.data(), intermediate3.size()); @@ -282,7 +282,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest, InjectOnceExtractTwice) { EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source.SetTimestamp(123456789); + source.SetRtpTimestamp(123456789); EncodedImageExtractionResult out = injector.ExtractData( injector.InjectData(/*id=*/512, /*discard=*/false, source)); @@ -310,7 +310,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest, Add1stReceiverAfterStart) { EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source.SetTimestamp(123456789); + source.SetRtpTimestamp(123456789); EncodedImage modified_image = injector.InjectData( /*id=*/512, /*discard=*/false, source); @@ -332,7 +332,7 @@ TEST(SingleProcessEncodedImageDataInjectorTest, Add3rdReceiverAfterStart) { EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source.SetTimestamp(123456789); + source.SetRtpTimestamp(123456789); EncodedImage modified_image = injector.InjectData( /*id=*/512, /*discard=*/false, source); injector.ExtractData(modified_image); @@ -357,10 +357,10 @@ TEST(SingleProcessEncodedImageDataInjectorTest, EncodedImage source1 = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source1.SetTimestamp(10); + source1.SetRtpTimestamp(10); EncodedImage source2 = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source2.SetTimestamp(20); + source2.SetRtpTimestamp(20); EncodedImage modified_image1 = injector.InjectData( /*id=*/512, /*discard=*/false, source1); @@ -399,7 +399,7 @@ TEST(SingleProcessEncodedImageDataInjectorTestDeathTest, EncodedImage source = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source.SetTimestamp(123456789); + source.SetRtpTimestamp(123456789); EncodedImage modified = injector.InjectData(/*id=*/512, /*discard=*/false, source); @@ -417,10 +417,10 @@ TEST(SingleProcessEncodedImageDataInjectorTestDeathTest, EncodedImage source1 = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source1.SetTimestamp(10); + source1.SetRtpTimestamp(10); EncodedImage source2 = CreateEncodedImageOfSizeNFilledWithValuesFromX(/*n=*/10, /*x=*/1); - source2.SetTimestamp(20); + source2.SetRtpTimestamp(20); EncodedImage modified_image1 = injector.InjectData( /*id=*/512, /*discard=*/false, source1); diff --git a/test/pc/e2e/analyzer/video/video_dumping.h b/test/pc/e2e/analyzer/video/video_dumping.h index cad4e1bdbf..304a991266 100644 --- a/test/pc/e2e/analyzer/video/video_dumping.h +++ b/test/pc/e2e/analyzer/video/video_dumping.h @@ -25,7 +25,7 @@ namespace webrtc_pc_e2e { // `VideoSinkInterface` to dump incoming video frames into specified video // writer. -class VideoWriter final : public rtc::VideoSinkInterface { +class VideoWriter final : public VideoSinkInterface { public: // Creates video writer. Caller keeps ownership of `video_writer` and is // responsible for closing it after VideoWriter will be destroyed. diff --git a/test/pc/e2e/analyzer/video/video_dumping_test.cc b/test/pc/e2e/analyzer/video/video_dumping_test.cc index 5dd4021516..2b96fda024 100644 --- a/test/pc/e2e/analyzer/video/video_dumping_test.cc +++ b/test/pc/e2e/analyzer/video/video_dumping_test.cc @@ -12,10 +12,10 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" #include "api/video/video_frame.h" @@ -40,7 +40,7 @@ uint8_t RandByte(Random& random) { } VideoFrame CreateRandom2x2VideoFrame(uint16_t id, Random& random) { - rtc::scoped_refptr buffer = I420Buffer::Create(2, 2); + scoped_refptr buffer = I420Buffer::Create(2, 2); uint8_t data[6] = {RandByte(random), RandByte(random), RandByte(random), RandByte(random), RandByte(random), RandByte(random)}; @@ -63,12 +63,11 @@ std::vector AsVector(const uint8_t* data, size_t size) { return out; } -void AssertFramesEqual(rtc::scoped_refptr actual, - rtc::scoped_refptr expected) { +void AssertFramesEqual(scoped_refptr actual, + scoped_refptr expected) { ASSERT_THAT(actual->width(), Eq(expected->width())); ASSERT_THAT(actual->height(), Eq(expected->height())); - rtc::scoped_refptr expected_i420 = - expected->ToI420(); + scoped_refptr expected_i420 = expected->ToI420(); int height = actual->height(); diff --git a/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector_unittest.cc b/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector_unittest.cc index c7d453c4bb..6ef6597649 100644 --- a/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector_unittest.cc +++ b/test/pc/e2e/analyzer/video/video_frame_tracking_id_injector_unittest.cc @@ -20,7 +20,7 @@ namespace { EncodedImage CreateEncodedImageOfSizeN(size_t n) { EncodedImage image; - rtc::scoped_refptr buffer = EncodedImageBuffer::Create(n); + scoped_refptr buffer = EncodedImageBuffer::Create(n); for (size_t i = 0; i < n; ++i) { buffer->data()[i] = static_cast(i); } diff --git a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc index c61686f011..643dc946fd 100644 --- a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc +++ b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.cc @@ -48,7 +48,7 @@ class AnalyzingFramePreprocessor absl::string_view peer_name, absl::string_view stream_label, VideoQualityAnalyzerInterface* analyzer, - std::vector>> sinks) + std::vector>> sinks) : peer_name_(peer_name), stream_label_(stream_label), analyzer_(analyzer), @@ -72,8 +72,7 @@ class AnalyzingFramePreprocessor const std::string peer_name_; const std::string stream_label_; VideoQualityAnalyzerInterface* const analyzer_; - const std::vector>> - sinks_; + const std::vector>> sinks_; }; } // namespace @@ -117,7 +116,7 @@ std::unique_ptr VideoQualityAnalyzerInjectionHelper::CreateFramePreprocessor( absl::string_view peer_name, const VideoConfig& config) { - std::vector>> sinks; + std::vector>> sinks; if (config.input_dump_options.has_value()) { std::unique_ptr writer = config.input_dump_options->CreateInputDumpVideoFrameWriter( @@ -149,7 +148,7 @@ VideoQualityAnalyzerInjectionHelper::CreateVideoSink( void VideoQualityAnalyzerInjectionHelper::Start( std::string test_case_name, - rtc::ArrayView peer_names, + ArrayView peer_names, int max_threads_count) { analyzer_->Start(std::move(test_case_name), peer_names, max_threads_count); extractor_->Start(peer_names.size()); @@ -169,7 +168,7 @@ void VideoQualityAnalyzerInjectionHelper::UnregisterParticipantInCall( void VideoQualityAnalyzerInjectionHelper::OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) { + const scoped_refptr& report) { analyzer_->OnStatsReports(pc_label, report); } diff --git a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h index 0039da8e86..6bba004a00 100644 --- a/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h +++ b/test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h @@ -81,7 +81,7 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface { bool report_infra_metrics); void Start(std::string test_case_name, - rtc::ArrayView peer_names, + ArrayView peer_names, int max_threads_count = 1); // Registers new call participant to the underlying video quality analyzer. @@ -96,7 +96,7 @@ class VideoQualityAnalyzerInjectionHelper : public StatsObserverInterface { // `analyzer_`. void OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) override; + const scoped_refptr& report) override; // Stops VideoQualityAnalyzerInterface to populate final data and metrics. // Should be invoked after analyzed video tracks are disposed. diff --git a/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc b/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc index 817b3caad0..d717cbb8ce 100644 --- a/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc +++ b/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.cc @@ -10,17 +10,30 @@ #include "test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h" +#include #include #include +#include +#include "absl/flags/flag.h" +#include "absl/strings/string_view.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/scoped_refptr.h" #include "api/stats/rtc_stats.h" +#include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" #include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_logger.h" +#include "api/test/track_id_stream_info_map.h" #include "api/units/data_rate.h" +#include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "rtc_base/checks.h" +#include "rtc_base/synchronization/mutex.h" +#include "system_wrappers/include/clock.h" #include "test/pc/e2e/metric_metadata_keys.h" +#include "test/test_flags.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -28,7 +41,6 @@ namespace { using ::webrtc::test::ImprovementDirection; using ::webrtc::test::Unit; -using ::webrtc::webrtc_pc_e2e::MetricMetadataKey; SamplesStatsCounter BytesPerSecondToKbps(const SamplesStatsCounter& counter) { return counter * 0.008; @@ -47,49 +59,47 @@ void VideoQualityMetricsReporter::Start( absl::string_view test_case_name, const TrackIdStreamInfoMap* /*reporter_helper*/) { test_case_name_ = std::string(test_case_name); - start_time_ = Now(); + start_time_ = clock_->CurrentTime(); } void VideoQualityMetricsReporter::OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) { + const scoped_refptr& report) { RTC_CHECK(start_time_) << "Please invoke Start(...) method before calling OnStatsReports(...)"; - auto transport_stats = report->GetStatsOfType(); + std::vector transport_stats = + report->GetStatsOfType(); if (transport_stats.size() == 0u || - !transport_stats[0]->selected_candidate_pair_id.is_defined()) { + !transport_stats[0]->selected_candidate_pair_id.has_value()) { return; } RTC_DCHECK_EQ(transport_stats.size(), 1); std::string selected_ice_id = - transport_stats[0]->selected_candidate_pair_id.ValueToString(); + transport_stats[0] + ->GetAttribute(transport_stats[0]->selected_candidate_pair_id) + .ToString(); // Use the selected ICE candidate pair ID to get the appropriate ICE stats. const RTCIceCandidatePairStats ice_candidate_pair_stats = report->Get(selected_ice_id)->cast_to(); - auto outbound_rtp_stats = report->GetStatsOfType(); - StatsSample sample; - for (auto& s : outbound_rtp_stats) { - if (!s->kind.is_defined()) { + StatsSample sample = {.timestamp = *start_time_}; + for (const RTCOutboundRtpStreamStats* s : + report->GetStatsOfType()) { + if (!s->kind.has_value() || *s->kind != "video") { continue; } - if (!(*s->kind == "video")) { - continue; - } - if (s->timestamp() > sample.sample_time) { - sample.sample_time = s->timestamp(); - } + sample.timestamp = std::max(*sample.timestamp, s->timestamp()); sample.retransmitted_bytes_sent += - DataSize::Bytes(s->retransmitted_bytes_sent.ValueOrDefault(0ul)); - sample.bytes_sent += DataSize::Bytes(s->bytes_sent.ValueOrDefault(0ul)); + DataSize::Bytes(s->retransmitted_bytes_sent.value_or(0ul)); + sample.bytes_sent += DataSize::Bytes(s->bytes_sent.value_or(0ul)); sample.header_bytes_sent += - DataSize::Bytes(s->header_bytes_sent.ValueOrDefault(0ul)); + DataSize::Bytes(s->header_bytes_sent.value_or(0ul)); } - MutexLock lock(&video_bwe_stats_lock_); + MutexLock lock(&stats_lock_); VideoBweStats& video_bwe_stats = video_bwe_stats_[std::string(pc_label)]; - if (ice_candidate_pair_stats.available_outgoing_bitrate.is_defined()) { + if (ice_candidate_pair_stats.available_outgoing_bitrate.has_value()) { video_bwe_stats.available_send_bandwidth.AddSample( DataRate::BitsPerSec( *ice_candidate_pair_stats.available_outgoing_bitrate) @@ -97,12 +107,10 @@ void VideoQualityMetricsReporter::OnStatsReports( } StatsSample prev_sample = last_stats_sample_[std::string(pc_label)]; - if (prev_sample.sample_time.IsZero()) { - prev_sample.sample_time = start_time_.value(); - } last_stats_sample_[std::string(pc_label)] = sample; - TimeDelta time_between_samples = sample.sample_time - prev_sample.sample_time; + TimeDelta time_between_samples = + *sample.timestamp - prev_sample.timestamp.value_or(*start_time_); if (time_between_samples.IsZero()) { return; } @@ -121,25 +129,21 @@ void VideoQualityMetricsReporter::OnStatsReports( } void VideoQualityMetricsReporter::StopAndReportResults() { - MutexLock video_bwemutex_(&video_bwe_stats_lock_); + MutexLock lock(&stats_lock_); for (const auto& item : video_bwe_stats_) { ReportVideoBweResults(item.first, item.second); } } -std::string VideoQualityMetricsReporter::GetTestCaseName( - const std::string& peer_name) const { - return test_case_name_ + "/" + peer_name; -} - void VideoQualityMetricsReporter::ReportVideoBweResults( const std::string& peer_name, const VideoBweStats& video_bwe_stats) { - std::string test_case_name = GetTestCaseName(peer_name); - // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. + std::string test_case_name = + !absl::GetFlag(FLAGS_isolated_script_test_perf_output).empty() + ? test_case_name_ + "/" + peer_name + : test_case_name_; std::map metric_metadata{ - {MetricMetadataKey::kPeerMetadataKey, peer_name}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; + {MetricMetadataKey::kPeerMetadataKey, peer_name}}; metrics_logger_->LogMetric( "available_send_bandwidth", test_case_name, diff --git a/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h b/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h index d3d976343b..2abc6b9c4f 100644 --- a/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h +++ b/test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h @@ -12,26 +12,26 @@ #define TEST_PC_E2E_ANALYZER_VIDEO_VIDEO_QUALITY_METRICS_REPORTER_H_ #include +#include #include +#include #include "absl/strings/string_view.h" #include "api/numerics/samples_stats_counter.h" +#include "api/scoped_refptr.h" +#include "api/stats/rtc_stats_report.h" #include "api/test/metrics/metrics_logger.h" #include "api/test/peerconnection_quality_test_fixture.h" #include "api/test/track_id_stream_info_map.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" +#include "system_wrappers/include/clock.h" namespace webrtc { namespace webrtc_pc_e2e { -struct VideoBweStats { - SamplesStatsCounter available_send_bandwidth; - SamplesStatsCounter transmission_bitrate; - SamplesStatsCounter retransmission_bitrate; -}; - class VideoQualityMetricsReporter : public PeerConnectionE2EQualityTestFixture::QualityMetricsReporter { public: @@ -43,36 +43,38 @@ class VideoQualityMetricsReporter const TrackIdStreamInfoMap* reporter_helper) override; void OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) override; + const scoped_refptr& report) override; void StopAndReportResults() override; private: + struct VideoBweStats { + SamplesStatsCounter available_send_bandwidth; + SamplesStatsCounter transmission_bitrate; + SamplesStatsCounter retransmission_bitrate; + }; struct StatsSample { + std::optional timestamp; DataSize bytes_sent = DataSize::Zero(); DataSize header_bytes_sent = DataSize::Zero(); DataSize retransmitted_bytes_sent = DataSize::Zero(); - - Timestamp sample_time = Timestamp::Zero(); }; - std::string GetTestCaseName(const std::string& peer_name) const; void ReportVideoBweResults(const std::string& peer_name, const VideoBweStats& video_bwe_stats); - Timestamp Now() const { return clock_->CurrentTime(); } Clock* const clock_; test::MetricsLogger* const metrics_logger_; std::string test_case_name_; - absl::optional start_time_; + std::optional start_time_; - Mutex video_bwe_stats_lock_; + Mutex stats_lock_; // Map between a peer connection label (provided by the framework) and // its video BWE stats. std::map video_bwe_stats_ - RTC_GUARDED_BY(video_bwe_stats_lock_); + RTC_GUARDED_BY(stats_lock_); std::map last_stats_sample_ - RTC_GUARDED_BY(video_bwe_stats_lock_); + RTC_GUARDED_BY(stats_lock_); }; } // namespace webrtc_pc_e2e diff --git a/test/pc/e2e/analyzer_helper.cc b/test/pc/e2e/analyzer_helper.cc index 76cd9a7c78..0987f8c42a 100644 --- a/test/pc/e2e/analyzer_helper.cc +++ b/test/pc/e2e/analyzer_helper.cc @@ -24,7 +24,7 @@ void AnalyzerHelper::AddTrackToStreamMapping( absl::string_view track_id, absl::string_view receiver_peer, absl::string_view stream_label, - absl::optional sync_group) { + std::optional sync_group) { RTC_DCHECK_RUN_ON(&signaling_sequence_checker_); track_to_stream_map_.insert( {std::string(track_id), diff --git a/test/pc/e2e/analyzer_helper.h b/test/pc/e2e/analyzer_helper.h index d0b47c4fb9..008cb3b5e8 100644 --- a/test/pc/e2e/analyzer_helper.h +++ b/test/pc/e2e/analyzer_helper.h @@ -12,10 +12,10 @@ #define TEST_PC_E2E_ANALYZER_HELPER_H_ #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/sequence_checker.h" #include "api/test/track_id_stream_info_map.h" #include "rtc_base/thread_annotations.h" @@ -40,7 +40,7 @@ class AnalyzerHelper : public TrackIdStreamInfoMap { void AddTrackToStreamMapping(absl::string_view track_id, absl::string_view receiver_peer, absl::string_view stream_label, - absl::optional sync_group); + std::optional sync_group); void AddTrackToStreamMapping(std::string track_id, std::string stream_label); void AddTrackToStreamMapping(std::string track_id, std::string stream_label, diff --git a/test/pc/e2e/cross_media_metrics_reporter.cc b/test/pc/e2e/cross_media_metrics_reporter.cc index aad5946c9f..3315ec6b84 100644 --- a/test/pc/e2e/cross_media_metrics_reporter.cc +++ b/test/pc/e2e/cross_media_metrics_reporter.cc @@ -9,17 +9,24 @@ */ #include "test/pc/e2e/cross_media_metrics_reporter.h" +#include +#include +#include #include #include -#include "api/stats/rtc_stats.h" +#include "absl/flags/flag.h" +#include "absl/strings/string_view.h" +#include "api/scoped_refptr.h" +#include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" #include "api/test/metrics/metric.h" -#include "api/units/timestamp.h" +#include "api/test/metrics/metrics_logger.h" +#include "api/test/track_id_stream_info_map.h" #include "rtc_base/checks.h" -#include "rtc_base/event.h" -#include "system_wrappers/include/field_trial.h" +#include "rtc_base/synchronization/mutex.h" #include "test/pc/e2e/metric_metadata_keys.h" +#include "test/test_flags.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -42,13 +49,13 @@ void CrossMediaMetricsReporter::Start( void CrossMediaMetricsReporter::OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) { + const scoped_refptr& report) { auto inbound_stats = report->GetStatsOfType(); std::map> sync_group_stats; for (const auto& stat : inbound_stats) { - if (stat->estimated_playout_timestamp.ValueOrDefault(0.) > 0 && - stat->track_identifier.is_defined()) { + if (stat->estimated_playout_timestamp.value_or(0.) > 0 && + stat->track_identifier.has_value()) { sync_group_stats[reporter_helper_ ->GetStreamInfoFromTrackId(*stat->track_identifier) .sync_group] @@ -66,8 +73,8 @@ void CrossMediaMetricsReporter::OnStatsReports( const RTCInboundRtpStreamStats* audio_stat = pair.second[0]; const RTCInboundRtpStreamStats* video_stat = pair.second[1]; - RTC_CHECK(pair.second.size() == 2 && audio_stat->kind.is_defined() && - video_stat->kind.is_defined() && + RTC_CHECK(pair.second.size() == 2 && audio_stat->kind.has_value() && + video_stat->kind.has_value() && *audio_stat->kind != *video_stat->kind) << "Sync group should consist of one audio and one video stream."; @@ -77,8 +84,8 @@ void CrossMediaMetricsReporter::OnStatsReports( // Stream labels of a sync group are same for all polls, so we need it add // it only once. if (stats_info_.find(sync_group) == stats_info_.end()) { - RTC_CHECK(audio_stat->track_identifier.is_defined()); - RTC_CHECK(video_stat->track_identifier.is_defined()); + RTC_CHECK(audio_stat->track_identifier.has_value()); + RTC_CHECK(video_stat->track_identifier.has_value()); stats_info_[sync_group].audio_stream_info = reporter_helper_->GetStreamInfoFromTrackId( *audio_stat->track_identifier); @@ -105,7 +112,6 @@ void CrossMediaMetricsReporter::StopAndReportResults() { MutexLock lock(&mutex_); for (const auto& pair : stats_info_) { const std::string& sync_group = pair.first; - // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map audio_metric_metadata{ {MetricMetadataKey::kPeerSyncGroupMetadataKey, sync_group}, {MetricMetadataKey::kAudioStreamMetadataKey, @@ -113,8 +119,7 @@ void CrossMediaMetricsReporter::StopAndReportResults() { {MetricMetadataKey::kPeerMetadataKey, pair.second.audio_stream_info.receiver_peer}, {MetricMetadataKey::kReceiverMetadataKey, - pair.second.audio_stream_info.receiver_peer}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; + pair.second.audio_stream_info.receiver_peer}}; metrics_logger_->LogMetric( "audio_ahead_ms", GetTestCaseName(pair.second.audio_stream_info.stream_label, sync_group), @@ -122,7 +127,6 @@ void CrossMediaMetricsReporter::StopAndReportResults() { webrtc::test::ImprovementDirection::kSmallerIsBetter, std::move(audio_metric_metadata)); - // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map video_metric_metadata{ {MetricMetadataKey::kPeerSyncGroupMetadataKey, sync_group}, {MetricMetadataKey::kAudioStreamMetadataKey, @@ -130,8 +134,7 @@ void CrossMediaMetricsReporter::StopAndReportResults() { {MetricMetadataKey::kPeerMetadataKey, pair.second.video_stream_info.receiver_peer}, {MetricMetadataKey::kReceiverMetadataKey, - pair.second.video_stream_info.receiver_peer}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; + pair.second.video_stream_info.receiver_peer}}; metrics_logger_->LogMetric( "video_ahead_ms", GetTestCaseName(pair.second.video_stream_info.stream_label, sync_group), @@ -144,7 +147,10 @@ void CrossMediaMetricsReporter::StopAndReportResults() { std::string CrossMediaMetricsReporter::GetTestCaseName( const std::string& stream_label, const std::string& sync_group) const { - return test_case_name_ + "/" + sync_group + "_" + stream_label; + if (!absl::GetFlag(FLAGS_isolated_script_test_perf_output).empty()) { + return test_case_name_ + "/" + sync_group + "_" + stream_label; + } + return test_case_name_; } } // namespace webrtc_pc_e2e diff --git a/test/pc/e2e/cross_media_metrics_reporter.h b/test/pc/e2e/cross_media_metrics_reporter.h index 2d51ebb20f..a385d4a219 100644 --- a/test/pc/e2e/cross_media_metrics_reporter.h +++ b/test/pc/e2e/cross_media_metrics_reporter.h @@ -12,10 +12,10 @@ #define TEST_PC_E2E_CROSS_MEDIA_METRICS_REPORTER_H_ #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/numerics/samples_stats_counter.h" #include "api/test/metrics/metrics_logger.h" #include "api/test/peerconnection_quality_test_fixture.h" @@ -36,7 +36,7 @@ class CrossMediaMetricsReporter const TrackIdStreamInfoMap* reporter_helper) override; void OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) override; + const scoped_refptr& report) override; void StopAndReportResults() override; private: diff --git a/test/pc/e2e/echo/echo_emulation.cc b/test/pc/e2e/echo/echo_emulation.cc index 8fdabeb16f..89357094a0 100644 --- a/test/pc/e2e/echo/echo_emulation.cc +++ b/test/pc/e2e/echo/echo_emulation.cc @@ -13,6 +13,7 @@ #include #include "api/test/pclf/media_configuration.h" +#include "rtc_base/logging.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -38,8 +39,7 @@ EchoEmulatingCapturer::EchoEmulatingCapturer( capturer_thread_.Detach(); } -void EchoEmulatingCapturer::OnAudioRendered( - rtc::ArrayView data) { +void EchoEmulatingCapturer::OnAudioRendered(ArrayView data) { RTC_DCHECK_RUN_ON(&renderer_thread_); if (!recording_started_) { // Because rendering can start before capturing in the beginning we can have @@ -63,7 +63,7 @@ void EchoEmulatingCapturer::OnAudioRendered( } } -bool EchoEmulatingCapturer::Capture(rtc::BufferT* buffer) { +bool EchoEmulatingCapturer::Capture(BufferT* buffer) { RTC_DCHECK_RUN_ON(&capturer_thread_); bool result = delegate_->Capture(buffer); // Now we have to reduce input signal to avoid saturation when mixing in the @@ -106,7 +106,7 @@ EchoEmulatingRenderer::EchoEmulatingRenderer( RTC_DCHECK(echo_emulating_capturer_); } -bool EchoEmulatingRenderer::Render(rtc::ArrayView data) { +bool EchoEmulatingRenderer::Render(ArrayView data) { if (data.size() > 0) { echo_emulating_capturer_->OnAudioRendered(data); } diff --git a/test/pc/e2e/echo/echo_emulation.h b/test/pc/e2e/echo/echo_emulation.h index 359a481e46..f26d8118bc 100644 --- a/test/pc/e2e/echo/echo_emulation.h +++ b/test/pc/e2e/echo/echo_emulation.h @@ -16,6 +16,7 @@ #include #include +#include "api/sequence_checker.h" #include "api/test/pclf/media_configuration.h" #include "modules/audio_device/include/test_audio_device.h" #include "rtc_base/swap_queue.h" @@ -31,13 +32,13 @@ class EchoEmulatingCapturer : public TestAudioDeviceModule::Capturer { std::unique_ptr capturer, EchoEmulationConfig config); - void OnAudioRendered(rtc::ArrayView data); + void OnAudioRendered(ArrayView data); int SamplingFrequency() const override { return delegate_->SamplingFrequency(); } int NumChannels() const override { return delegate_->NumChannels(); } - bool Capture(rtc::BufferT* buffer) override; + bool Capture(BufferT* buffer) override; private: std::unique_ptr delegate_; @@ -66,7 +67,7 @@ class EchoEmulatingRenderer : public TestAudioDeviceModule::Renderer { return delegate_->SamplingFrequency(); } int NumChannels() const override { return delegate_->NumChannels(); } - bool Render(rtc::ArrayView data) override; + bool Render(ArrayView data) override; private: std::unique_ptr delegate_; diff --git a/test/pc/e2e/g3doc/architecture.md b/test/pc/e2e/g3doc/architecture.md index 1b68c6db2c..0dce4a4710 100644 --- a/test/pc/e2e/g3doc/architecture.md +++ b/test/pc/e2e/g3doc/architecture.md @@ -1,5 +1,9 @@ - + # PeerConnection level framework fixture architecture @@ -45,7 +49,7 @@ thread for all `PeerConnection`'s is owned by `PeerConnectionE2EQualityTestFixture` and shared between all participants in the call. The network thread is owned by the network layer (it maybe either emulated network provided by [Network Emulation Framework][24] or network thread and -`rtc::NetworkManager` provided by user) and provided when peer is added to the +`webrtc::NetworkManager` provided by user) and provided when peer is added to the fixture via [`AddPeer(...)`][15] API. ## GetStats API based metrics reporters diff --git a/test/pc/e2e/g3doc/default_video_quality_analyzer.md b/test/pc/e2e/g3doc/default_video_quality_analyzer.md index 576d8e28a3..27df66738a 100644 --- a/test/pc/e2e/g3doc/default_video_quality_analyzer.md +++ b/test/pc/e2e/g3doc/default_video_quality_analyzer.md @@ -1,5 +1,9 @@ - + # DefaultVideoQualityAnalyzer @@ -44,7 +48,7 @@ factory methods for components, that will be used to inject * [`webrtc::test::TestVideoCapturer::FramePreprocessor`][8] which is used to pass generated frames into analyzer on capturing and then set the returned frame ID. It also configures dumping of captured frames if requried. -* [`rtc::VideoSinkInterface`][9] which is used to pass frames to +* [`webrtc::VideoSinkInterface`][9] which is used to pass frames to the analyzer before they will be rendered to compute per frame metrics. It also configures dumping of rendered video if requried. diff --git a/test/pc/e2e/g3doc/index.md b/test/pc/e2e/g3doc/index.md index 678262bb2b..c0ec23c997 100644 --- a/test/pc/e2e/g3doc/index.md +++ b/test/pc/e2e/g3doc/index.md @@ -1,5 +1,9 @@ - + # PeerConnection Level Framework @@ -49,7 +53,7 @@ The framework API is located in the namespace *`webrtc::webrtc_pc_e2e`*. * Generated audio * Audio from specified file * Dumping of captured/rendered audio into file - * Parameterizing of `cricket::AudioOptions` + * Parameterizing of `webrtc::AudioOptions` * Echo emulation * Injection of various WebRTC components into underlying *`webrtc::PeerConnection`* or *`webrtc::PeerConnectionFactory`*. You can see diff --git a/test/pc/e2e/media/media_helper.cc b/test/pc/e2e/media/media_helper.cc index 35eae68334..0b4de88e9f 100644 --- a/test/pc/e2e/media/media_helper.cc +++ b/test/pc/e2e/media/media_helper.cc @@ -11,8 +11,8 @@ #include #include +#include -#include "absl/types/variant.h" #include "api/media_stream_interface.h" #include "api/test/create_frame_generator.h" #include "api/test/pclf/media_configuration.h" @@ -24,14 +24,24 @@ namespace webrtc { namespace webrtc_pc_e2e { +namespace { + +bool IsScreencast(const VideoConfig& video_config) { + return video_config.content_hint == VideoTrackInterface::ContentHint::kText || + video_config.content_hint == + VideoTrackInterface::ContentHint::kDetailed; +} + +} // namespace + void MediaHelper::MaybeAddAudio(TestPeer* peer) { if (!peer->params().audio_config) { return; } const AudioConfig& audio_config = peer->params().audio_config.value(); - rtc::scoped_refptr source = + scoped_refptr source = peer->pc_factory()->CreateAudioSource(audio_config.audio_options); - rtc::scoped_refptr track = + scoped_refptr track = peer->pc_factory()->CreateAudioTrack(*audio_config.stream_label, source.get()); std::string sync_group = audio_config.sync_group @@ -40,12 +50,12 @@ void MediaHelper::MaybeAddAudio(TestPeer* peer) { peer->AddTrack(track, {sync_group, *audio_config.stream_label}); } -std::vector> +std::vector> MediaHelper::MaybeAddVideo(TestPeer* peer) { // Params here valid because of pre-run validation. const Params& params = peer->params(); const ConfigurableParams& configurable_params = peer->configurable_params(); - std::vector> out; + std::vector> out; for (size_t i = 0; i < configurable_params.video_configs.size(); ++i) { const VideoConfig& video_config = configurable_params.video_configs[i]; // Setup input video source into peer connection. @@ -53,17 +63,14 @@ MediaHelper::MaybeAddVideo(TestPeer* peer) { video_config, peer->ReleaseVideoSource(i), video_quality_analyzer_injection_helper_->CreateFramePreprocessor( params.name.value(), video_config)); - bool is_screencast = - video_config.content_hint == VideoTrackInterface::ContentHint::kText || - video_config.content_hint == - VideoTrackInterface::ContentHint::kDetailed; - rtc::scoped_refptr source = - rtc::make_ref_counted( - std::move(capturer), is_screencast, video_config.stream_label); + scoped_refptr source = + make_ref_counted( + std::move(capturer), IsScreencast(video_config), + video_config.stream_label); out.push_back(source); RTC_LOG(LS_INFO) << "Adding video with video_config.stream_label=" << video_config.stream_label.value(); - rtc::scoped_refptr track = + scoped_refptr track = peer->pc_factory()->CreateVideoTrack(source, video_config.stream_label.value()); if (video_config.content_hint.has_value()) { @@ -72,7 +79,7 @@ MediaHelper::MaybeAddVideo(TestPeer* peer) { std::string sync_group = video_config.sync_group ? video_config.sync_group.value() : video_config.stream_label.value() + "-sync"; - RTCErrorOr> sender = + RTCErrorOr> sender = peer->AddTrack(track, {sync_group, *video_config.stream_label}); RTC_CHECK(sender.ok()); if (video_config.temporal_layers_count || @@ -101,7 +108,7 @@ std::unique_ptr MediaHelper::CreateVideoCapturer( std::unique_ptr frame_preprocessor) { CapturingDeviceIndex* capturing_device_index = - absl::get_if(&source); + std::get_if(&source); if (capturing_device_index != nullptr) { std::unique_ptr capturer = test::CreateVideoCapturer(video_config.width, video_config.height, @@ -116,9 +123,9 @@ std::unique_ptr MediaHelper::CreateVideoCapturer( auto capturer = std::make_unique( clock_, - absl::get>( + std::get>( std::move(source)), - video_config.fps, *task_queue_factory_); + video_config.fps, *task_queue_factory_, IsScreencast(video_config)); capturer->SetFramePreprocessor(std::move(frame_preprocessor)); capturer->Init(); return capturer; diff --git a/test/pc/e2e/media/media_helper.h b/test/pc/e2e/media/media_helper.h index 2d163d009e..9dbabe5f39 100644 --- a/test/pc/e2e/media/media_helper.h +++ b/test/pc/e2e/media/media_helper.h @@ -37,8 +37,8 @@ class MediaHelper { void MaybeAddAudio(TestPeer* peer); - std::vector> - MaybeAddVideo(TestPeer* peer); + std::vector> MaybeAddVideo( + TestPeer* peer); private: std::unique_ptr CreateVideoCapturer( diff --git a/test/pc/e2e/media/test_video_capturer_video_track_source.h b/test/pc/e2e/media/test_video_capturer_video_track_source.h index abcdc6c716..6e691a5737 100644 --- a/test/pc/e2e/media/test_video_capturer_video_track_source.h +++ b/test/pc/e2e/media/test_video_capturer_video_track_source.h @@ -12,9 +12,9 @@ #define TEST_PC_E2E_MEDIA_TEST_VIDEO_CAPTURER_VIDEO_TRACK_SOURCE_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/sequence_checker.h" #include "api/test/video/test_video_track_source.h" #include "api/video/video_frame.h" @@ -29,7 +29,7 @@ class TestVideoCapturerVideoTrackSource : public test::TestVideoTrackSource { TestVideoCapturerVideoTrackSource( std::unique_ptr video_capturer, bool is_screencast, - absl::optional stream_label = absl::nullopt) + std::optional stream_label = std::nullopt) : TestVideoTrackSource(/*remote=*/false, std::move(stream_label)), video_capturer_(std::move(video_capturer)), is_screencast_(is_screencast) { @@ -76,7 +76,7 @@ class TestVideoCapturerVideoTrackSource : public test::TestVideoTrackSource { void OnOutputFormatRequest(int width, int height, - const absl::optional& max_fps) override { + const std::optional& max_fps) override { video_capturer_->OnOutputFormatRequest(width, height, max_fps); } @@ -86,7 +86,7 @@ class TestVideoCapturerVideoTrackSource : public test::TestVideoTrackSource { } protected: - rtc::VideoSourceInterface* source() override { + VideoSourceInterface* source() override { return video_capturer_.get(); } diff --git a/test/pc/e2e/metric_metadata_keys.h b/test/pc/e2e/metric_metadata_keys.h index fbcd3b90fe..fd27d90747 100644 --- a/test/pc/e2e/metric_metadata_keys.h +++ b/test/pc/e2e/metric_metadata_keys.h @@ -10,7 +10,6 @@ #ifndef TEST_PC_E2E_METRIC_METADATA_KEYS_H_ #define TEST_PC_E2E_METRIC_METADATA_KEYS_H_ -#include namespace webrtc { namespace webrtc_pc_e2e { @@ -30,13 +29,6 @@ class MetricMetadataKey { static constexpr char kVideoStreamMetadataKey[] = "video_stream"; // Represents name of the sync group to which stream belongs. static constexpr char kPeerSyncGroupMetadataKey[] = "peer_sync_group"; - // Represents the test name (without any peer and stream data appended to it - // as it currently happens with the webrtc.test_metrics.Metric.test_case - // field). This metadata is temporary and it will be removed once this - // information is moved to webrtc.test_metrics.Metric.test_case. - // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. - static constexpr char kExperimentalTestNameMetadataKey[] = - "experimental_test_name"; // Represents index of a video spatial layer to which metric belongs. static constexpr char kSpatialLayerMetadataKey[] = "spatial_layer"; diff --git a/test/pc/e2e/network_quality_metrics_reporter.cc b/test/pc/e2e/network_quality_metrics_reporter.cc index 2d6aa597ce..bccc15ee57 100644 --- a/test/pc/e2e/network_quality_metrics_reporter.cc +++ b/test/pc/e2e/network_quality_metrics_reporter.cc @@ -27,11 +27,6 @@ using ::webrtc::test::Unit; constexpr TimeDelta kStatsWaitTimeout = TimeDelta::Seconds(1); -// Field trial which controls whether to report standard-compliant bytes -// sent/received per stream. If enabled, padding and headers are not included -// in bytes sent or received. -constexpr char kUseStandardBytesStats[] = "WebRTC-UseStandardBytesStats"; - } // namespace NetworkQualityMetricsReporter::NetworkQualityMetricsReporter( @@ -72,22 +67,21 @@ void NetworkQualityMetricsReporter::Start( void NetworkQualityMetricsReporter::OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) { + const scoped_refptr& report) { DataSize payload_received = DataSize::Zero(); DataSize payload_sent = DataSize::Zero(); auto inbound_stats = report->GetStatsOfType(); for (const auto& stat : inbound_stats) { payload_received += - DataSize::Bytes(stat->bytes_received.ValueOrDefault(0ul) + - stat->header_bytes_received.ValueOrDefault(0ul)); + DataSize::Bytes(stat->bytes_received.value_or(0ul) + + stat->header_bytes_received.value_or(0ul)); } auto outbound_stats = report->GetStatsOfType(); for (const auto& stat : outbound_stats) { - payload_sent += - DataSize::Bytes(stat->bytes_sent.ValueOrDefault(0ul) + - stat->header_bytes_sent.ValueOrDefault(0ul)); + payload_sent += DataSize::Bytes(stat->bytes_sent.value_or(0ul) + + stat->header_bytes_sent.value_or(0ul)); } MutexLock lock(&lock_); @@ -108,11 +102,6 @@ void NetworkQualityMetricsReporter::StopAndReportResults() { ReportStats(alice_network_label_, alice_stats, alice_packets_loss); ReportStats(bob_network_label_, bob_stats, bob_packets_loss); - if (!webrtc::field_trial::IsEnabled(kUseStandardBytesStats)) { - RTC_LOG(LS_ERROR) - << "Non-standard GetStats; \"payload\" counts include RTP headers"; - } - MutexLock lock(&lock_); for (const auto& pair : pc_stats_) { ReportPCStats(pair.first, pair.second); @@ -121,7 +110,7 @@ void NetworkQualityMetricsReporter::StopAndReportResults() { EmulatedNetworkStats NetworkQualityMetricsReporter::PopulateStats( EmulatedNetworkManagerInterface* network) { - rtc::Event wait; + Event wait; EmulatedNetworkStats stats; network->GetStats([&](EmulatedNetworkStats s) { stats = std::move(s); diff --git a/test/pc/e2e/network_quality_metrics_reporter.h b/test/pc/e2e/network_quality_metrics_reporter.h index 1348a58943..73bbd30c40 100644 --- a/test/pc/e2e/network_quality_metrics_reporter.h +++ b/test/pc/e2e/network_quality_metrics_reporter.h @@ -43,13 +43,11 @@ class NetworkQualityMetricsReporter const TrackIdStreamInfoMap* reporter_helper) override; void OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) override; + const scoped_refptr& report) override; void StopAndReportResults() override; private: struct PCStats { - // TODO(nisse): Separate audio and video counters. Depends on standard stat - // counters, enabled by field trial "WebRTC-UseStandardBytesStats". DataSize payload_received = DataSize::Zero(); DataSize payload_sent = DataSize::Zero(); }; diff --git a/test/pc/e2e/peer_connection_e2e_smoke_test.cc b/test/pc/e2e/peer_connection_e2e_smoke_test.cc index 8c19172391..57be393461 100644 --- a/test/pc/e2e/peer_connection_e2e_smoke_test.cc +++ b/test/pc/e2e/peer_connection_e2e_smoke_test.cc @@ -22,10 +22,11 @@ #include "api/test/pclf/media_quality_test_params.h" #include "api/test/pclf/peer_configurer.h" #include "api/test/peerconnection_quality_test_fixture.h" -#include "call/simulated_network.h" +#include "media/base/media_constants.h" #include "system_wrappers/include/field_trial.h" #include "test/field_trial.h" #include "test/gtest.h" +#include "test/network/simulated_network.h" #include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h" @@ -80,9 +81,8 @@ class PeerConnectionE2EQualityTestSmokeTest : public ::testing::Test { } void AddPeer(EmulatedNetworkManagerInterface* network, - rtc::FunctionView update_configurer) { - auto configurer = - std::make_unique(network->network_dependencies()); + FunctionView update_configurer) { + auto configurer = std::make_unique(*network); update_configurer(configurer.get()); fixture_->AddPeer(std::move(configurer)); } @@ -144,7 +144,7 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Smoke) { audio.sync_group = "alice-media"; alice->SetAudioConfig(std::move(audio)); alice->SetVideoCodecs( - {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})}); + {VideoCodecConfig(kVp9CodecName, {{"profile-id", "0"}})}); alice->SetUseFlexFEC(true); alice->SetUseUlpFEC(true); @@ -163,7 +163,7 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Smoke) { test::ResourcePath("pc_quality_smoke_test_bob_source", "wav"); charlie->SetAudioConfig(std::move(audio)); charlie->SetVideoCodecs( - {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})}); + {VideoCodecConfig(kVp9CodecName, {{"profile-id", "0"}})}); charlie->SetUseFlexFEC(true); charlie->SetUseUlpFEC(true); @@ -180,71 +180,6 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Smoke) { RunAndCheckEachVideoStreamReceivedFrames(run_params); } -// IOS debug builds can be quite slow, disabling to avoid issues with timeouts. -#if defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64) && !defined(NDEBUG) -#define MAYBE_Smoke DISABLED_Smoke -#else -#define MAYBE_SendAndReceivePacketsOnOneThread \ - SmokeSendAndReceivePacketsOnOneThread -#endif -// Only use the network thread for sending and receiving packets. -// The one and only network thread is used as a worker thread in all -// PeerConnections. Pacing when sending packets is done on the worker thread. -// See bugs.webrtc.org/14502. -TEST_F(PeerConnectionE2EQualityTestSmokeTest, - MAYBE_SendAndReceivePacketsOnOneThread) { - test::ScopedFieldTrials trials( - std::string(field_trial::GetFieldTrialString()) + - "WebRTC-SendPacketsOnWorkerThread/Enabled/"); - - std::pair - network_links = CreateNetwork(); - AddPeer(network_links.first, [](PeerConfigurer* alice) { - // Peerconnection use the network thread as the worker thread. - alice->SetUseNetworkThreadAsWorkerThread(); - VideoConfig video(160, 120, 15); - video.stream_label = "alice-video"; - video.sync_group = "alice-media"; - alice->AddVideoConfig(std::move(video)); - - AudioConfig audio; - audio.stream_label = "alice-audio"; - audio.input_file_name = - test::ResourcePath("pc_quality_smoke_test_alice_source", "wav"); - audio.sampling_frequency_in_hz = 48000; - audio.sync_group = "alice-media"; - alice->SetAudioConfig(std::move(audio)); - alice->SetVideoCodecs( - {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})}); - }); - AddPeer(network_links.second, [](PeerConfigurer* charlie) { - // Peerconnection use the network thread as the worker thread. - charlie->SetUseNetworkThreadAsWorkerThread(); - charlie->SetName("charlie"); - VideoConfig video(160, 120, 15); - video.stream_label = "charlie-video"; - video.temporal_layers_count = 2; - charlie->AddVideoConfig(std::move(video)); - - AudioConfig audio; - audio.stream_label = "charlie-audio"; - audio.input_file_name = - test::ResourcePath("pc_quality_smoke_test_bob_source", "wav"); - charlie->SetAudioConfig(std::move(audio)); - charlie->SetVideoCodecs( - {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})}); - charlie->SetVideoEncoderBitrateMultiplier(1.1); - }); - fixture()->AddQualityMetricsReporter( - std::make_unique( - std::map>( - {{"alice", network_links.first->endpoints()}, - {"charlie", network_links.second->endpoints()}}), - network_emulation(), test::GetGlobalMetricsLogger())); - RunParams run_params(TimeDelta::Seconds(2)); - RunAndCheckEachVideoStreamReceivedFrames(run_params); -} - #if defined(WEBRTC_MAC) || defined(WEBRTC_IOS) TEST_F(PeerConnectionE2EQualityTestSmokeTest, SmokeH264) { std::pair @@ -263,7 +198,7 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, SmokeH264) { audio.sampling_frequency_in_hz = 48000; audio.sync_group = "alice-media"; alice->SetAudioConfig(std::move(audio)); - alice->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + alice->SetVideoCodecs({VideoCodecConfig(webrtc::kH264CodecName)}); alice->SetVideoEncoderFactory(webrtc::test::CreateObjCEncoderFactory()); alice->SetVideoDecoderFactory(webrtc::test::CreateObjCDecoderFactory()); }); @@ -279,7 +214,7 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, SmokeH264) { audio.input_file_name = test::ResourcePath("pc_quality_smoke_test_bob_source", "wav"); charlie->SetAudioConfig(std::move(audio)); - charlie->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + charlie->SetVideoCodecs({VideoCodecConfig(webrtc::kH264CodecName)}); charlie->SetVideoEncoderFactory(webrtc::test::CreateObjCEncoderFactory()); charlie->SetVideoDecoderFactory(webrtc::test::CreateObjCDecoderFactory()); }); @@ -337,7 +272,7 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_ChangeNetworkConditions) { video.sync_group = "alice-media"; alice->AddVideoConfig(std::move(video)); alice->SetVideoCodecs( - {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})}); + {VideoCodecConfig(kVp9CodecName, {{"profile-id", "0"}})}); alice->SetUseFlexFEC(true); alice->SetUseUlpFEC(true); @@ -345,7 +280,7 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_ChangeNetworkConditions) { }); AddPeer(bob_network, [](PeerConfigurer* bob) { bob->SetVideoCodecs( - {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})}); + {VideoCodecConfig(kVp9CodecName, {{"profile-id", "0"}})}); bob->SetUseFlexFEC(true); bob->SetUseUlpFEC(true); @@ -468,14 +403,15 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_Svc) { simulcast.emulated_sfu_config = EmulatedSFUConfig(1); alice->AddVideoConfig(std::move(simulcast)); - AudioConfig audio("alice-audio"); - audio.input_file_name = - test::ResourcePath("pc_quality_smoke_test_alice_source", "wav"); - alice->SetAudioConfig(std::move(audio)); - alice->SetVideoCodecs({VideoCodecConfig(cricket::kVp9CodecName)}); + alice->SetAudioConfig({ + .stream_label = "alice-audio", + .input_file_name = + test::ResourcePath("pc_quality_smoke_test_alice_source", "wav"), + }); + alice->SetVideoCodecs({VideoCodecConfig(kVp9CodecName)}); }); AddPeer(network_links.second, [](PeerConfigurer* bob) { - bob->SetVideoCodecs({VideoCodecConfig(cricket::kVp9CodecName)}); + bob->SetVideoCodecs({VideoCodecConfig(kVp9CodecName)}); }); RunParams run_params(TimeDelta::Seconds(2)); RunAndCheckEachVideoStreamReceivedFrames(run_params); @@ -510,11 +446,11 @@ TEST_F(PeerConnectionE2EQualityTestSmokeTest, MAYBE_HighBitrate) { audio.sampling_frequency_in_hz = 48000; alice->SetAudioConfig(std::move(audio)); alice->SetVideoCodecs( - {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})}); + {VideoCodecConfig(kVp9CodecName, {{"profile-id", "0"}})}); }); AddPeer(network_links.second, [](PeerConfigurer* bob) { bob->SetVideoCodecs( - {VideoCodecConfig(cricket::kVp9CodecName, {{"profile-id", "0"}})}); + {VideoCodecConfig(kVp9CodecName, {{"profile-id", "0"}})}); }); RunParams run_params(TimeDelta::Seconds(2)); RunAndCheckEachVideoStreamReceivedFrames(run_params); diff --git a/test/pc/e2e/peer_connection_quality_test.cc b/test/pc/e2e/peer_connection_quality_test.cc index 83613118f9..ce177d4e4d 100644 --- a/test/pc/e2e/peer_connection_quality_test.cc +++ b/test/pc/e2e/peer_connection_quality_test.cc @@ -10,40 +10,75 @@ #include "test/pc/e2e/peer_connection_quality_test.h" #include +#include +#include +#include +#include #include -#include +#include +#include #include +#include +#include "absl/flags/flag.h" #include "absl/strings/string_view.h" #include "api/jsep.h" #include "api/media_stream_interface.h" +#include "api/media_types.h" #include "api/peer_connection_interface.h" +#include "api/rtc_error.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/rtc_event_log_output_file.h" +#include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" +#include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/test/audio_quality_analyzer_interface.h" #include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_logger.h" #include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" #include "api/test/pclf/peer_configurer.h" +#include "api/test/stats_observer_interface.h" #include "api/test/time_controller.h" #include "api/test/video_quality_analyzer_interface.h" -#include "pc/sdp_utils.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "media/base/media_constants.h" #include "pc/test/mock_peer_connection_observers.h" -#include "rtc_base/gunit.h" -#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" +#include "rtc_base/task_utils/repeating_task.h" +#include "rtc_base/thread.h" #include "system_wrappers/include/cpu_info.h" #include "system_wrappers/include/field_trial.h" #include "test/field_trial.h" +#include "test/gtest.h" #include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h" #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h" +#include "test/pc/e2e/analyzer/video/single_process_encoded_image_data_injector.h" #include "test/pc/e2e/analyzer/video/video_frame_tracking_id_injector.h" +#include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h" #include "test/pc/e2e/analyzer/video/video_quality_metrics_reporter.h" #include "test/pc/e2e/cross_media_metrics_reporter.h" +#include "test/pc/e2e/media/media_helper.h" +#include "test/pc/e2e/media/test_video_capturer_video_track_source.h" #include "test/pc/e2e/metric_metadata_keys.h" #include "test/pc/e2e/peer_params_preprocessor.h" +#include "test/pc/e2e/sdp/sdp_changer.h" #include "test/pc/e2e/stats_poller.h" +#include "test/pc/e2e/stats_provider.h" +#include "test/pc/e2e/test_activities_executor.h" +#include "test/pc/e2e/test_peer.h" #include "test/pc/e2e/test_peer_factory.h" +#include "test/test_flags.h" #include "test/testsupport/file_utils.h" namespace webrtc { @@ -71,8 +106,6 @@ constexpr TimeDelta kQuickTestModeRunDuration = TimeDelta::Millis(100); // Field trials to enable Flex FEC advertising and receiving. constexpr char kFlexFecEnabledFieldTrials[] = "WebRTC-FlexFEC-03-Advertised/Enabled/WebRTC-FlexFEC-03/Enabled/"; -constexpr char kUseStandardsBytesStats[] = - "WebRTC-UseStandardBytesStats/Enabled/"; class FixturePeerConnectionObserver : public MockPeerConnectionObserver { public: @@ -83,14 +116,13 @@ class FixturePeerConnectionObserver : public MockPeerConnectionObserver { // of reconnect this callback can be called again, so it should be tolerant // to such behavior. FixturePeerConnectionObserver( - std::function)> + std::function)> on_track_callback, std::function on_connected_callback) : on_track_callback_(std::move(on_track_callback)), on_connected_callback_(std::move(on_connected_callback)) {} - void OnTrack( - rtc::scoped_refptr transceiver) override { + void OnTrack(scoped_refptr transceiver) override { MockPeerConnectionObserver::OnTrack(transceiver); on_track_callback_(transceiver); } @@ -104,7 +136,7 @@ class FixturePeerConnectionObserver : public MockPeerConnectionObserver { } private: - std::function)> + std::function)> on_track_callback_; std::function on_connected_callback_; }; @@ -180,7 +212,7 @@ PeerConnectionE2EQualityTest::PeerConnectionE2EQualityTest( void PeerConnectionE2EQualityTest::ExecuteAt( TimeDelta target_time_since_start, std::function func) { - executor_->ScheduleActivity(target_time_since_start, absl::nullopt, func); + executor_->ScheduleActivity(target_time_since_start, std::nullopt, func); } void PeerConnectionE2EQualityTest::ExecuteEvery( @@ -241,7 +273,7 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) { << !bob_configurer->configurable_params()->video_configs.empty() << "; audio=" << bob_configurer->params()->audio_config.has_value(); - const std::unique_ptr signaling_thread = + const std::unique_ptr signaling_thread = time_controller_.CreateThread(kSignalThreadName); media_helper_ = std::make_unique( video_quality_analyzer_injection_helper_.get(), task_queue_factory_.get(), @@ -256,9 +288,9 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) { // Audio streams are intercepted in AudioDeviceModule, so if it is required to // catch output of Alice's stream, Alice's output_dump_file_name should be // passed to Bob's TestPeer setup as audio output file name. - absl::optional alice_remote_audio_config = + std::optional alice_remote_audio_config = RemotePeerAudioConfig::Create(bob_configurer->params()->audio_config); - absl::optional bob_remote_audio_config = + std::optional bob_remote_audio_config = RemotePeerAudioConfig::Create(alice_configurer->params()->audio_config); // Copy Alice and Bob video configs, subscriptions and names to correctly pass // them into lambdas. @@ -275,12 +307,12 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) { TestPeerFactory test_peer_factory( signaling_thread.get(), time_controller_, - video_quality_analyzer_injection_helper_.get(), task_queue_.get()); + video_quality_analyzer_injection_helper_.get()); alice_ = test_peer_factory.CreateTestPeer( std::move(alice_configurer), std::make_unique( [this, alice_name, alice_subscription, bob_video_configs]( - rtc::scoped_refptr transceiver) { + scoped_refptr transceiver) { OnTrackCallback(alice_name, alice_subscription, transceiver, bob_video_configs); }, @@ -290,7 +322,7 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) { std::move(bob_configurer), std::make_unique( [this, bob_name, bob_subscription, alice_video_configs]( - rtc::scoped_refptr transceiver) { + scoped_refptr transceiver) { OnTrackCallback(bob_name, bob_subscription, transceiver, alice_video_configs); }, @@ -384,7 +416,7 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) { executor_->Start(task_queue_.get()); Timestamp start_time = Now(); - bool is_quick_test_enabled = field_trial::IsEnabled("WebRTC-QuickPerfTest"); + bool is_quick_test_enabled = absl::GetFlag(FLAGS_webrtc_quick_perf_test); if (is_quick_test_enabled) { time_controller_.AdvanceTime(kQuickTestModeRunDuration); } else { @@ -437,12 +469,11 @@ void PeerConnectionE2EQualityTest::Run(RunParams run_params) { std::string PeerConnectionE2EQualityTest::GetFieldTrials( const RunParams& run_params) { - std::vector default_field_trials = { - kUseStandardsBytesStats}; + std::vector default_field_trials = {}; if (run_params.enable_flex_fec_support) { default_field_trials.push_back(kFlexFecEnabledFieldTrials); } - rtc::StringBuilder sb; + StringBuilder sb; sb << field_trial::GetFieldTrialString(); for (const absl::string_view& field_trial : default_field_trials) { sb << field_trial; @@ -453,9 +484,9 @@ std::string PeerConnectionE2EQualityTest::GetFieldTrials( void PeerConnectionE2EQualityTest::OnTrackCallback( absl::string_view peer_name, VideoSubscription peer_subscription, - rtc::scoped_refptr transceiver, + scoped_refptr transceiver, std::vector remote_video_configs) { - const rtc::scoped_refptr& track = + const scoped_refptr& track = transceiver->receiver()->track(); RTC_CHECK_EQ(transceiver->receiver()->stream_ids().size(), 2) << "Expected 2 stream ids: 1st - sync group, 2nd - unique stream label"; @@ -470,10 +501,10 @@ void PeerConnectionE2EQualityTest::OnTrackCallback( // It is safe to cast here, because it is checked above that // track->kind() is kVideoKind. auto* video_track = static_cast(track.get()); - std::unique_ptr> video_sink = + std::unique_ptr> video_sink = video_quality_analyzer_injection_helper_->CreateVideoSink( peer_name, peer_subscription, /*report_infra_stats=*/false); - video_track->AddOrUpdateSink(video_sink.get(), rtc::VideoSinkWants()); + video_track->AddOrUpdateSink(video_sink.get(), VideoSinkWants()); output_video_sinks_.push_back(std::move(video_sink)); } @@ -489,8 +520,8 @@ void PeerConnectionE2EQualityTest::SetupCallOnSignalingThread( // Setup receive audio transceiver if Bob has audio to send. If we'll need // multiple audio streams, then we need transceiver for each Bob's audio // stream. - RTCErrorOr> result = - alice_->AddTransceiver(cricket::MediaType::MEDIA_TYPE_AUDIO, + RTCErrorOr> result = + alice_->AddTransceiver(webrtc::MediaType::AUDIO, receive_only_transceiver_init); RTC_CHECK(result.ok()); alice_transceivers_counter++; @@ -503,7 +534,7 @@ void PeerConnectionE2EQualityTest::SetupCallOnSignalingThread( transceiver_params.direction = RtpTransceiverDirection::kSendOnly; // Because simulcast enabled `alice_->params().video_codecs` has only 1 // element. - if (alice_->params().video_codecs[0].name == cricket::kVp8CodecName) { + if (alice_->params().video_codecs[0].name == kVp8CodecName) { // For Vp8 simulcast we need to add as many RtpEncodingParameters to the // track as many simulcast streams requested. If they specified in // `video_config.simulcast_config` it should be copied from there. @@ -529,9 +560,8 @@ void PeerConnectionE2EQualityTest::SetupCallOnSignalingThread( alice_video_transceivers_non_simulcast_counter++; } - RTCErrorOr> result = - alice_->AddTransceiver(cricket::MediaType::MEDIA_TYPE_VIDEO, - transceiver_params); + RTCErrorOr> result = + alice_->AddTransceiver(webrtc::MediaType::VIDEO, transceiver_params); RTC_CHECK(result.ok()); alice_transceivers_counter++; @@ -541,8 +571,8 @@ void PeerConnectionE2EQualityTest::SetupCallOnSignalingThread( // Alice. for (size_t i = alice_video_transceivers_non_simulcast_counter; i < bob_->configurable_params().video_configs.size(); ++i) { - RTCErrorOr> result = - alice_->AddTransceiver(cricket::MediaType::MEDIA_TYPE_VIDEO, + RTCErrorOr> result = + alice_->AddTransceiver(webrtc::MediaType::VIDEO, receive_only_transceiver_init); RTC_CHECK(result.ok()); alice_transceivers_counter++; @@ -568,19 +598,19 @@ void PeerConnectionE2EQualityTest::SetPeerCodecPreferences(TestPeer* peer) { peer->params().video_codecs, true, peer->params().use_ulp_fec, peer->params().use_flex_fec, peer->pc_factory() - ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_VIDEO) + ->GetRtpReceiverCapabilities(webrtc::MediaType::VIDEO) .codecs); std::vector without_rtx_video_capabilities = FilterVideoCodecCapabilities( peer->params().video_codecs, false, peer->params().use_ulp_fec, peer->params().use_flex_fec, peer->pc_factory() - ->GetRtpSenderCapabilities(cricket::MediaType::MEDIA_TYPE_VIDEO) + ->GetRtpReceiverCapabilities(webrtc::MediaType::VIDEO) .codecs); // Set codecs for transceivers for (auto transceiver : peer->pc()->GetTransceivers()) { - if (transceiver->media_type() == cricket::MediaType::MEDIA_TYPE_VIDEO) { + if (transceiver->media_type() == webrtc::MediaType::VIDEO) { if (transceiver->sender()->init_send_encodings().size() > 1) { // If transceiver's sender has more then 1 send encodings, it means it // has multiple simulcast streams, so we need disable RTX on it. @@ -615,7 +645,7 @@ PeerConnectionE2EQualityTest::CreateSignalingInterceptor( } void PeerConnectionE2EQualityTest::WaitUntilIceCandidatesGathered( - rtc::Thread* signaling_thread) { + Thread* signaling_thread) { ASSERT_TRUE(time_controller_.Wait( [&]() { bool result; @@ -628,7 +658,7 @@ void PeerConnectionE2EQualityTest::WaitUntilIceCandidatesGathered( } void PeerConnectionE2EQualityTest::WaitUntilPeersAreConnected( - rtc::Thread* signaling_thread) { + Thread* signaling_thread) { // This means that ICE and DTLS are connected. alice_connected_ = time_controller_.Wait( [&]() { @@ -714,7 +744,7 @@ void PeerConnectionE2EQualityTest::ExchangeIceCandidates( } void PeerConnectionE2EQualityTest::StartVideo( - const std::vector>& + const std::vector>& sources) { for (auto& source : sources) { if (source->state() != MediaSourceInterface::SourceState::kLive) { @@ -741,18 +771,14 @@ void PeerConnectionE2EQualityTest::TearDownCall() { } void PeerConnectionE2EQualityTest::ReportGeneralTestResults() { - // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. metrics_logger_->LogSingleValueMetric( *alice_->params().name + "_connected", test_case_name_, alice_connected_, Unit::kUnitless, ImprovementDirection::kBiggerIsBetter, - {{MetricMetadataKey::kPeerMetadataKey, *alice_->params().name}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}); - // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. + {{MetricMetadataKey::kPeerMetadataKey, *alice_->params().name}}); metrics_logger_->LogSingleValueMetric( *bob_->params().name + "_connected", test_case_name_, bob_connected_, Unit::kUnitless, ImprovementDirection::kBiggerIsBetter, - {{MetricMetadataKey::kPeerMetadataKey, *bob_->params().name}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}); + {{MetricMetadataKey::kPeerMetadataKey, *bob_->params().name}}); } Timestamp PeerConnectionE2EQualityTest::Now() const { diff --git a/test/pc/e2e/peer_connection_quality_test.h b/test/pc/e2e/peer_connection_quality_test.h index 6cbf232874..efcc02c5ce 100644 --- a/test/pc/e2e/peer_connection_quality_test.h +++ b/test/pc/e2e/peer_connection_quality_test.h @@ -91,7 +91,7 @@ class PeerConnectionE2EQualityTest std::string GetFieldTrials(const RunParams& run_params); void OnTrackCallback(absl::string_view peer_name, VideoSubscription peer_subscription, - rtc::scoped_refptr transceiver, + scoped_refptr transceiver, std::vector remote_video_configs); // Have to be run on the signaling thread. void SetupCallOnSignalingThread(const RunParams& run_params); @@ -99,12 +99,12 @@ class PeerConnectionE2EQualityTest void SetPeerCodecPreferences(TestPeer* peer); std::unique_ptr CreateSignalingInterceptor( const RunParams& run_params); - void WaitUntilIceCandidatesGathered(rtc::Thread* signaling_thread); - void WaitUntilPeersAreConnected(rtc::Thread* signaling_thread); + void WaitUntilIceCandidatesGathered(Thread* signaling_thread); + void WaitUntilPeersAreConnected(Thread* signaling_thread); void ExchangeOfferAnswer(SignalingInterceptor* signaling_interceptor); void ExchangeIceCandidates(SignalingInterceptor* signaling_interceptor); void StartVideo( - const std::vector>& + const std::vector>& sources); void TearDownCall(); void ReportGeneralTestResults(); @@ -129,11 +129,11 @@ class PeerConnectionE2EQualityTest std::vector> quality_metrics_reporters_; - std::vector> + std::vector> alice_video_sources_; - std::vector> + std::vector> bob_video_sources_; - std::vector>> + std::vector>> output_video_sinks_; AnalyzerHelper analyzer_helper_; diff --git a/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc b/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc index 46f7e93243..a3eea5d825 100644 --- a/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc +++ b/test/pc/e2e/peer_connection_quality_test_metric_names_test.cc @@ -10,24 +10,31 @@ #include #include +#include #include +#include +#include +#include "absl/flags/flag.h" +#include "absl/strings/string_view.h" #include "api/test/create_network_emulation_manager.h" -#include "api/test/create_peer_connection_quality_test_frame_generator.h" +#include "api/test/metrics/metric.h" #include "api/test/metrics/metrics_logger.h" -#include "api/test/metrics/stdout_metrics_exporter.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/test/network_emulation_manager.h" #include "api/test/pclf/media_configuration.h" #include "api/test/pclf/media_quality_test_params.h" #include "api/test/pclf/peer_configurer.h" #include "api/test/peerconnection_quality_test_fixture.h" #include "api/units/time_delta.h" +#include "media/base/media_constants.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/pc/e2e/metric_metadata_keys.h" #include "test/pc/e2e/network_quality_metrics_reporter.h" #include "test/pc/e2e/peer_connection_quality_test.h" #include "test/pc/e2e/stats_based_network_quality_metrics_reporter.h" +#include "test/test_flags.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -39,28 +46,32 @@ using ::testing::UnorderedElementsAre; using ::webrtc::test::DefaultMetricsLogger; using ::webrtc::test::ImprovementDirection; using ::webrtc::test::Metric; -using ::webrtc::test::MetricsExporter; -using ::webrtc::test::StdoutMetricsExporter; using ::webrtc::test::Unit; using ::webrtc::webrtc_pc_e2e::PeerConfigurer; +std::string GetExpectedTestCaseName(const std::string& stream_label) { + if (!absl::GetFlag(FLAGS_isolated_script_test_perf_output).empty()) { + return "test_case/" + stream_label; + } + return "test_case"; +} + // Adds a peer with some audio and video (the client should not care about // details about audio and video configs). -void AddDefaultAudioVideoPeer( - absl::string_view peer_name, - absl::string_view audio_stream_label, - absl::string_view video_stream_label, - const PeerNetworkDependencies& network_dependencies, - PeerConnectionE2EQualityTestFixture& fixture) { - AudioConfig audio{std::string(audio_stream_label)}; - audio.sync_group = std::string(peer_name); +void AddDefaultAudioVideoPeer(absl::string_view peer_name, + absl::string_view audio_stream_label, + absl::string_view video_stream_label, + EmulatedNetworkManagerInterface& network, + PeerConnectionE2EQualityTestFixture& fixture) { + AudioConfig audio{.stream_label = std::string(audio_stream_label), + .sync_group = std::string(peer_name)}; VideoConfig video(std::string(video_stream_label), 320, 180, 15); video.sync_group = std::string(peer_name); - auto peer = std::make_unique(network_dependencies); + auto peer = std::make_unique(network); peer->SetName(peer_name); peer->SetAudioConfig(std::move(audio)); peer->AddVideoConfig(std::move(video)); - peer->SetVideoCodecs({VideoCodecConfig(cricket::kVp8CodecName)}); + peer->SetVideoCodecs({VideoCodecConfig(kVp8CodecName)}); fixture.AddPeer(std::move(peer)); } @@ -108,7 +119,7 @@ std::vector ToValidationInfo( TEST(PeerConnectionE2EQualityTestMetricNamesTest, ExportedMetricsHasCorrectNamesAndAnnotation) { std::unique_ptr network_emulation = - CreateNetworkEmulationManager(TimeMode::kSimulated); + CreateNetworkEmulationManager({.time_mode = TimeMode::kSimulated}); DefaultMetricsLogger metrics_logger( network_emulation->time_controller()->GetClock()); PeerConnectionE2EQualityTest fixture( @@ -135,9 +146,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, network_emulation->CreateEmulatedNetworkManagerInterface({bob_endpoint}); AddDefaultAudioVideoPeer("alice", "alice_audio", "alice_video", - alice_network->network_dependencies(), fixture); - AddDefaultAudioVideoPeer("bob", "bob_audio", "bob_video", - bob_network->network_dependencies(), fixture); + *alice_network, fixture); + AddDefaultAudioVideoPeer("bob", "bob_audio", "bob_video", *bob_network, + fixture); fixture.AddQualityMetricsReporter( std::make_unique( std::map>( @@ -159,155 +170,127 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, .name = "alice_connected", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ .test_case = "test_case", .name = "bob_connected", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, // Metrics from DefaultAudioQualityAnalyzer MetricValidationInfo{ - .test_case = "test_case/alice_audio", + .test_case = GetExpectedTestCaseName("alice_audio"), .name = "expand_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_audio", + .test_case = GetExpectedTestCaseName("alice_audio"), .name = "accelerate_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_audio", + .test_case = GetExpectedTestCaseName("alice_audio"), .name = "preemptive_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_audio", + .test_case = GetExpectedTestCaseName("alice_audio"), .name = "speech_expand_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_audio", + .test_case = GetExpectedTestCaseName("alice_audio"), .name = "average_jitter_buffer_delay_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kNeitherIsBetter, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_audio", + .test_case = GetExpectedTestCaseName("alice_audio"), .name = "preferred_buffer_size_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kNeitherIsBetter, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_audio", + .test_case = GetExpectedTestCaseName("bob_audio"), .name = "expand_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_audio", + .test_case = GetExpectedTestCaseName("bob_audio"), .name = "accelerate_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_audio", + .test_case = GetExpectedTestCaseName("bob_audio"), .name = "preemptive_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_audio", + .test_case = GetExpectedTestCaseName("bob_audio"), .name = "speech_expand_rate", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_audio", + .test_case = GetExpectedTestCaseName("bob_audio"), .name = "average_jitter_buffer_delay_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kNeitherIsBetter, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_audio", + .test_case = GetExpectedTestCaseName("bob_audio"), .name = "preferred_buffer_size_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kNeitherIsBetter, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, // Metrics from DefaultVideoQualityAnalyzer MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "psnr_dB", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -315,11 +298,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "ssim", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -327,11 +308,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "transport_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -339,11 +318,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "total_delay_incl_transport", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -351,11 +328,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "time_between_rendered_frames", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -363,11 +338,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "harmonic_framerate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -375,11 +348,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "encode_frame_rate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -387,11 +358,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "encode_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -399,11 +368,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "time_between_freezes", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -411,11 +378,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "freeze_time_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -423,11 +388,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "pixels_per_frame", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -435,11 +398,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "min_psnr_dB", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -447,11 +408,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "decode_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -459,11 +418,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "receive_to_render_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -471,11 +428,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "dropped_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -483,11 +438,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "frames_in_flight", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -495,11 +448,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "rendered_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -507,11 +458,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "max_skipped", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -519,11 +468,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "target_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, @@ -531,11 +478,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), .name = "qp_sl0", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -544,11 +489,19 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kSpatialLayerMetadataKey, "0"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kSpatialLayerMetadataKey, "0"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_video", + .test_case = GetExpectedTestCaseName("alice_video"), + .name = "rendered_frame_qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "alice_video"}, + {MetricMetadataKey::kSenderMetadataKey, "alice"}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, + MetricValidationInfo{ + .test_case = GetExpectedTestCaseName("alice_video"), .name = "actual_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, @@ -556,11 +509,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kSenderMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "psnr_dB", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -568,11 +519,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "ssim", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -580,11 +529,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "transport_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -592,11 +539,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "total_delay_incl_transport", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -604,11 +549,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "time_between_rendered_frames", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -616,11 +559,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "harmonic_framerate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -628,11 +569,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "encode_frame_rate", .unit = Unit::kHertz, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -640,11 +579,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "encode_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -652,11 +589,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "time_between_freezes", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -664,11 +599,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "freeze_time_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -676,11 +609,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "pixels_per_frame", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -688,11 +619,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "min_psnr_dB", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -700,11 +629,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "decode_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -712,11 +639,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "receive_to_render_time", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -724,11 +649,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "dropped_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -736,11 +659,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "frames_in_flight", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -748,11 +669,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "rendered_frames", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kBiggerIsBetter, @@ -760,11 +679,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "max_skipped", .unit = Unit::kCount, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -772,11 +689,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "target_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, @@ -784,11 +699,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "actual_encode_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, @@ -796,11 +709,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {MetricMetadataKey::kVideoStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_video", + .test_case = GetExpectedTestCaseName("bob_video"), .name = "qp_sl0", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -809,248 +720,198 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, "bob_video"}, {MetricMetadataKey::kSenderMetadataKey, "bob"}, {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kSpatialLayerMetadataKey, "0"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kSpatialLayerMetadataKey, "0"}}}, + MetricValidationInfo{ + .test_case = GetExpectedTestCaseName("bob_video"), + .name = "rendered_frame_qp", + .unit = Unit::kUnitless, + .improvement_direction = ImprovementDirection::kSmallerIsBetter, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, + {MetricMetadataKey::kVideoStreamMetadataKey, + "bob_video"}, + {MetricMetadataKey::kSenderMetadataKey, "bob"}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ .test_case = "test_case", .name = "cpu_usage_%", .unit = Unit::kUnitless, - .improvement_direction = ImprovementDirection::kSmallerIsBetter, - .metadata = {{MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .improvement_direction = ImprovementDirection::kSmallerIsBetter}, // Metrics from StatsBasedNetworkQualityMetricsReporter MetricValidationInfo{ - .test_case = "test_case/alice", + .test_case = GetExpectedTestCaseName("alice"), .name = "bytes_discarded_no_receiver", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/alice", + .test_case = GetExpectedTestCaseName("alice"), .name = "packets_discarded_no_receiver", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/alice", + .test_case = GetExpectedTestCaseName("alice"), .name = "payload_bytes_received", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/alice", + .test_case = GetExpectedTestCaseName("alice"), .name = "payload_bytes_sent", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/alice", + .test_case = GetExpectedTestCaseName("alice"), .name = "bytes_sent", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/alice", + .test_case = GetExpectedTestCaseName("alice"), .name = "packets_sent", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/alice", + .test_case = GetExpectedTestCaseName("alice"), .name = "average_send_rate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/alice", + .test_case = GetExpectedTestCaseName("alice"), .name = "bytes_received", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/alice", + .test_case = GetExpectedTestCaseName("alice"), .name = "packets_received", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/alice", + .test_case = GetExpectedTestCaseName("alice"), .name = "average_receive_rate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/alice", + .test_case = GetExpectedTestCaseName("alice"), .name = "sent_packets_loss", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob", + .test_case = GetExpectedTestCaseName("bob"), .name = "bytes_discarded_no_receiver", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob", + .test_case = GetExpectedTestCaseName("bob"), .name = "packets_discarded_no_receiver", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob", + .test_case = GetExpectedTestCaseName("bob"), .name = "payload_bytes_received", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob", + .test_case = GetExpectedTestCaseName("bob"), .name = "payload_bytes_sent", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob", + .test_case = GetExpectedTestCaseName("bob"), .name = "bytes_sent", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob", + .test_case = GetExpectedTestCaseName("bob"), .name = "packets_sent", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob", + .test_case = GetExpectedTestCaseName("bob"), .name = "average_send_rate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob", + .test_case = GetExpectedTestCaseName("bob"), .name = "bytes_received", .unit = Unit::kBytes, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob", + .test_case = GetExpectedTestCaseName("bob"), .name = "packets_received", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob", + .test_case = GetExpectedTestCaseName("bob"), .name = "average_receive_rate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob", + .test_case = GetExpectedTestCaseName("bob"), .name = "sent_packets_loss", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, // Metrics from VideoQualityMetricsReporter MetricValidationInfo{ - .test_case = "test_case/alice", + .test_case = GetExpectedTestCaseName("alice"), .name = "available_send_bandwidth", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/alice", + .test_case = GetExpectedTestCaseName("alice"), .name = "transmission_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/alice", + .test_case = GetExpectedTestCaseName("alice"), .name = "retransmission_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob", + .test_case = GetExpectedTestCaseName("bob"), .name = "available_send_bandwidth", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob", + .test_case = GetExpectedTestCaseName("bob"), .name = "transmission_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob", + .test_case = GetExpectedTestCaseName("bob"), .name = "retransmission_bitrate", .unit = Unit::kKilobitsPerSecond, .improvement_direction = ImprovementDirection::kNeitherIsBetter, - .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + .metadata = {{MetricMetadataKey::kPeerMetadataKey, "bob"}}}, // Metrics from CrossMediaMetricsReporter MetricValidationInfo{ - .test_case = "test_case/alice_alice_audio", + .test_case = GetExpectedTestCaseName("alice_alice_audio"), .name = "audio_ahead_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -1058,11 +919,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, {MetricMetadataKey::kPeerSyncGroupMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_alice_video", + .test_case = GetExpectedTestCaseName("alice_alice_video"), .name = "video_ahead_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -1070,11 +929,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_video"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, {MetricMetadataKey::kPeerSyncGroupMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_bob_audio", + .test_case = GetExpectedTestCaseName("bob_bob_audio"), .name = "audio_ahead_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -1082,11 +939,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, {MetricMetadataKey::kPeerSyncGroupMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_bob_video", + .test_case = GetExpectedTestCaseName("bob_bob_video"), .name = "video_ahead_ms", .unit = Unit::kMilliseconds, .improvement_direction = ImprovementDirection::kSmallerIsBetter, @@ -1094,37 +949,31 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, {{MetricMetadataKey::kAudioStreamMetadataKey, "bob_video"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, {MetricMetadataKey::kPeerSyncGroupMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}}, MetricValidationInfo{ - .test_case = "test_case/alice_audio", + .test_case = GetExpectedTestCaseName("alice_audio"), .name = "energy", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, .metadata = {{MetricMetadataKey::kAudioStreamMetadataKey, "alice_audio"}, {MetricMetadataKey::kPeerMetadataKey, "bob"}, - {MetricMetadataKey::kReceiverMetadataKey, "bob"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}}, + {MetricMetadataKey::kReceiverMetadataKey, "bob"}}}, MetricValidationInfo{ - .test_case = "test_case/bob_audio", + .test_case = GetExpectedTestCaseName("bob_audio"), .name = "energy", .unit = Unit::kUnitless, .improvement_direction = ImprovementDirection::kNeitherIsBetter, .metadata = { {MetricMetadataKey::kAudioStreamMetadataKey, "bob_audio"}, {MetricMetadataKey::kPeerMetadataKey, "alice"}, - {MetricMetadataKey::kReceiverMetadataKey, "alice"}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, - "test_case"}}})); + {MetricMetadataKey::kReceiverMetadataKey, "alice"}}})); } TEST(PeerConnectionE2EQualityTestMetricNamesTest, ExportedNetworkMetricsHaveCustomNetworkLabelIfSet) { std::unique_ptr network_emulation = - CreateNetworkEmulationManager(TimeMode::kSimulated); + CreateNetworkEmulationManager({.time_mode = TimeMode::kSimulated}); DefaultMetricsLogger metrics_logger( network_emulation->time_controller()->GetClock()); PeerConnectionE2EQualityTest fixture( @@ -1151,9 +1000,9 @@ TEST(PeerConnectionE2EQualityTestMetricNamesTest, network_emulation->CreateEmulatedNetworkManagerInterface({bob_endpoint}); AddDefaultAudioVideoPeer("alice", "alice_audio", "alice_video", - alice_network->network_dependencies(), fixture); - AddDefaultAudioVideoPeer("bob", "bob_audio", "bob_video", - bob_network->network_dependencies(), fixture); + *alice_network, fixture); + AddDefaultAudioVideoPeer("bob", "bob_audio", "bob_video", *bob_network, + fixture); std::string kAliceNetworkLabel = "alice_label"; std::string kBobNetworkLabel = "bob_label"; fixture.AddQualityMetricsReporter( diff --git a/test/pc/e2e/peer_connection_quality_test_test.cc b/test/pc/e2e/peer_connection_quality_test_test.cc index 066fe7d8ee..35e299bf37 100644 --- a/test/pc/e2e/peer_connection_quality_test_test.cc +++ b/test/pc/e2e/peer_connection_quality_test_test.cc @@ -40,7 +40,7 @@ using ::webrtc::webrtc_pc_e2e::PeerConfigurer; // Remove files and directories in a directory non-recursively. void CleanDir(absl::string_view dir, size_t expected_output_files_count) { - absl::optional> dir_content = + std::optional> dir_content = test::ReadDirectory(dir); if (expected_output_files_count == 0) { ASSERT_FALSE(dir_content.has_value()) << "Empty directory is expected"; @@ -90,7 +90,7 @@ class PeerConnectionE2EQualityTestTest : public Test { TEST_F(PeerConnectionE2EQualityTestTest, OutputVideoIsDumpedWhenRequested) { std::unique_ptr network_emulation = - CreateNetworkEmulationManager(TimeMode::kSimulated); + CreateNetworkEmulationManager({.time_mode = TimeMode::kSimulated}); PeerConnectionE2EQualityTest fixture( "test_case", *network_emulation->time_controller(), /*audio_quality_analyzer=*/nullptr, /*video_quality_analyzer=*/nullptr, @@ -116,12 +116,12 @@ TEST_F(PeerConnectionE2EQualityTestTest, OutputVideoIsDumpedWhenRequested) { VideoConfig alice_video("alice_video", 320, 180, 15); alice_video.output_dump_options = VideoDumpOptions(test_directory_); - PeerConfigurer alice(alice_network->network_dependencies()); + PeerConfigurer alice(*alice_network); alice.SetName("alice"); alice.AddVideoConfig(std::move(alice_video)); fixture.AddPeer(std::make_unique(std::move(alice))); - PeerConfigurer bob(bob_network->network_dependencies()); + PeerConfigurer bob(*bob_network); bob.SetName("bob"); fixture.AddPeer(std::make_unique(std::move(bob))); diff --git a/test/pc/e2e/peer_params_preprocessor.cc b/test/pc/e2e/peer_params_preprocessor.cc index fa3351adb3..50938a8a1a 100644 --- a/test/pc/e2e/peer_params_preprocessor.cc +++ b/test/pc/e2e/peer_params_preprocessor.cc @@ -10,17 +10,26 @@ #include "test/pc/e2e/peer_params_preprocessor.h" +#include + +#include #include #include +#include +#include #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/rtp_parameters.h" #include "api/test/pclf/media_configuration.h" #include "api/test/pclf/media_quality_test_params.h" #include "api/test/pclf/peer_configurer.h" -#include "api/test/peer_network_dependencies.h" +#include "api/video_codecs/scalability_mode.h" +#include "media/base/media_constants.h" #include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/svc/scalability_mode_util.h" -#include "rtc_base/arraysize.h" +#include "modules/video_coding/svc/scalable_video_controller.h" +#include "rtc_base/checks.h" #include "test/testsupport/file_utils.h" namespace webrtc { @@ -40,10 +49,10 @@ class PeerParamsPreprocessor::DefaultNamesProvider { // instance. explicit DefaultNamesProvider( absl::string_view prefix, - rtc::ArrayView default_names = {}) + ArrayView default_names = {}) : prefix_(prefix), default_names_(default_names) {} - void MaybeSetName(absl::optional& name) { + void MaybeSetName(std::optional& name) { if (name.has_value()) { known_names_.insert(name.value()); } else { @@ -68,7 +77,7 @@ class PeerParamsPreprocessor::DefaultNamesProvider { } const std::string prefix_; - const rtc::ArrayView default_names_; + const ArrayView default_names_; std::set known_names_; size_t counter_ = 0; @@ -97,7 +106,7 @@ void PeerParamsPreprocessor::SetDefaultValuesForMissingParams( } if (params->video_codecs.empty()) { - params->video_codecs.push_back(VideoCodecConfig(cricket::kVp8CodecName)); + params->video_codecs.push_back(VideoCodecConfig(kVp8CodecName)); } } @@ -159,11 +168,11 @@ void PeerParamsPreprocessor::ValidateParams(const PeerConfigurer& peer) { if (!encoding_param.scalability_mode) continue; - absl::optional scalability_mode = + std::optional scalability_mode = ScalabilityModeFromString(*encoding_param.scalability_mode); RTC_CHECK(scalability_mode) << "Unknown scalability_mode requested"; - absl::optional + std::optional stream_layers_config = ScalabilityStructureConfig(*scalability_mode); is_svc |= stream_layers_config->num_spatial_layers > 1; diff --git a/test/pc/e2e/sdp/sdp_changer.cc b/test/pc/e2e/sdp/sdp_changer.cc index af55f29175..6c74740cac 100644 --- a/test/pc/e2e/sdp/sdp_changer.cc +++ b/test/pc/e2e/sdp/sdp_changer.cc @@ -10,15 +10,34 @@ #include "test/pc/e2e/sdp/sdp_changer.h" +#include +#include +#include +#include +#include +#include #include +#include -#include "absl/memory/memory.h" +#include "api/array_view.h" +#include "api/jsep.h" #include "api/jsep_session_description.h" +#include "api/media_types.h" +#include "api/rtp_parameters.h" +#include "api/rtp_transceiver_direction.h" #include "api/test/pclf/media_configuration.h" #include "media/base/media_constants.h" +#include "media/base/rid_description.h" +#include "media/base/stream_params.h" #include "p2p/base/p2p_constants.h" +#include "p2p/base/transport_description.h" +#include "p2p/base/transport_info.h" #include "pc/sdp_utils.h" +#include "pc/session_description.h" +#include "pc/simulcast_description.h" +#include "rtc_base/checks.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/unique_id_generator.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -26,7 +45,7 @@ namespace { std::string CodecRequiredParamsToString( const std::map& codec_required_params) { - rtc::StringBuilder out; + StringBuilder out; for (const auto& entry : codec_required_params) { out << entry.first << "=" << entry.second << ";"; } @@ -34,8 +53,8 @@ std::string CodecRequiredParamsToString( } std::string SupportedCodecsToString( - rtc::ArrayView supported_codecs) { - rtc::StringBuilder out; + ArrayView supported_codecs) { + StringBuilder out; for (const auto& codec : supported_codecs) { out << codec.name; if (!codec.parameters.empty()) { @@ -53,11 +72,11 @@ std::string SupportedCodecsToString( } // namespace std::vector FilterVideoCodecCapabilities( - rtc::ArrayView video_codecs, + ArrayView video_codecs, bool use_rtx, bool use_ulpfec, bool use_flexfec, - rtc::ArrayView supported_codecs) { + ArrayView supported_codecs) { std::vector output_codecs; // Find requested codecs among supported and add them to output in the order // they were requested. @@ -92,12 +111,12 @@ std::vector FilterVideoCodecCapabilities( // Add required FEC and RTX codecs to output. for (auto& codec : supported_codecs) { - if (codec.name == cricket::kRtxCodecName && use_rtx) { + if (codec.name == kRtxCodecName && use_rtx) { output_codecs.push_back(codec); - } else if (codec.name == cricket::kFlexfecCodecName && use_flexfec) { + } else if (codec.name == kFlexfecCodecName && use_flexfec) { output_codecs.push_back(codec); - } else if ((codec.name == cricket::kRedCodecName || - codec.name == cricket::kUlpfecCodecName) && + } else if ((codec.name == kRedCodecName || + codec.name == kUlpfecCodecName) && use_ulpfec) { // Red and ulpfec should be enabled or disabled together. output_codecs.push_back(codec); @@ -113,8 +132,8 @@ std::vector FilterVideoCodecCapabilities( void SignalingInterceptor::FillSimulcastContext( SessionDescriptionInterface* offer) { for (auto& content : offer->description()->contents()) { - cricket::MediaContentDescription* media_desc = content.media_description(); - if (media_desc->type() != cricket::MediaType::MEDIA_TYPE_VIDEO) { + MediaContentDescription* media_desc = content.media_description(); + if (media_desc->type() != webrtc::MediaType::VIDEO) { continue; } if (media_desc->HasSimulcast()) { @@ -127,12 +146,12 @@ void SignalingInterceptor::FillSimulcastContext( media_desc->mutable_streams()[0].rids()); // Set new rids basing on created SimulcastSectionInfo. - std::vector rids; - cricket::SimulcastDescription simulcast_description; + std::vector rids; + SimulcastDescription simulcast_description; for (std::string& rid : info.rids) { - rids.emplace_back(rid, cricket::RidDirection::kSend); + rids.emplace_back(rid, RidDirection::kSend); simulcast_description.send_layers().AddLayer( - cricket::SimulcastLayer(rid, false)); + SimulcastLayer(rid, false)); } media_desc->mutable_streams()[0].set_rids(rids); media_desc->set_simulcast_description(simulcast_description); @@ -169,8 +188,8 @@ LocalAndRemoteSdp SignalingInterceptor::PatchOffer( const VideoCodecConfig& first_codec) { for (auto& content : offer->description()->contents()) { context_.mids_order.push_back(content.mid()); - cricket::MediaContentDescription* media_desc = content.media_description(); - if (media_desc->type() != cricket::MediaType::MEDIA_TYPE_VIDEO) { + MediaContentDescription* media_desc = content.media_description(); + if (media_desc->type() != webrtc::MediaType::VIDEO) { continue; } if (content.media_description()->streams().empty()) { @@ -185,11 +204,11 @@ LocalAndRemoteSdp SignalingInterceptor::PatchOffer( if (!params_.stream_label_to_simulcast_streams_count.empty()) { // Because simulcast enabled `params_.video_codecs` has only 1 element. - if (first_codec.name == cricket::kVp8CodecName) { + if (first_codec.name == kVp8CodecName) { return PatchVp8Offer(std::move(offer)); } - if (first_codec.name == cricket::kVp9CodecName) { + if (first_codec.name == kVp9CodecName) { return PatchVp9Offer(std::move(offer)); } } @@ -208,20 +227,19 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp8Offer( // Clone original offer description. We mustn't access original offer after // this point. - std::unique_ptr desc = - offer->description()->Clone(); + std::unique_ptr desc = offer->description()->Clone(); for (auto& info : context_.simulcast_infos) { // For each simulcast section we have to perform: // 1. Swap MID and RID header extensions // 2. Remove RIDs from streams and remove SimulcastDescription // 3. For each RID duplicate media section - cricket::ContentInfo* simulcast_content = desc->GetContentByName(info.mid); + ContentInfo* simulcast_content = desc->GetContentByName(info.mid); // Now we need to prepare common prototype for "m=video" sections, in which // single simulcast section will be converted. Do it before removing content // because otherwise description will be deleted. - std::unique_ptr prototype_media_desc = + std::unique_ptr prototype_media_desc = simulcast_content->media_description()->Clone(); // Remove simulcast video section from offer. @@ -260,8 +278,7 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp8Offer( // Remove rids and simulcast description from media description. prototype_media_desc->mutable_streams()[0].set_rids({}); - prototype_media_desc->set_simulcast_description( - cricket::SimulcastDescription()); + prototype_media_desc->set_simulcast_description(SimulcastDescription()); // For each rid add separate video section. for (std::string& rid : info.rids) { @@ -271,20 +288,20 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp8Offer( } // Now we need to add bundle line to have all media bundled together. - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); for (auto& content : desc->contents()) { bundle_group.AddContentName(content.mid()); } - if (desc->HasGroup(cricket::GROUP_TYPE_BUNDLE)) { - desc->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); + if (desc->HasGroup(GROUP_TYPE_BUNDLE)) { + desc->RemoveGroupByName(GROUP_TYPE_BUNDLE); } desc->AddGroup(bundle_group); // Update transport_infos to add TransportInfo for each new media section. - std::vector transport_infos = desc->transport_infos(); + std::vector transport_infos = desc->transport_infos(); transport_infos.erase(std::remove_if( transport_infos.begin(), transport_infos.end(), - [this](const cricket::TransportInfo& ti) { + [this](const TransportInfo& ti) { // Remove transport infos that correspond to simulcast video sections. return context_.simulcast_infos_by_mid.find(ti.content_name) != context_.simulcast_infos_by_mid.end(); @@ -306,7 +323,7 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp8Offer( LocalAndRemoteSdp SignalingInterceptor::PatchVp9Offer( std::unique_ptr offer) { - rtc::UniqueRandomIdGenerator ssrcs_generator; + UniqueRandomIdGenerator ssrcs_generator; for (auto& content : offer->description()->contents()) { for (auto& stream : content.media_description()->streams()) { for (auto& ssrc : stream.ssrcs) { @@ -316,8 +333,7 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp9Offer( } for (auto& content : offer->description()->contents()) { - if (content.media_description()->type() != - cricket::MediaType::MEDIA_TYPE_VIDEO) { + if (content.media_description()->type() != webrtc::MediaType::VIDEO) { // We are interested in only video tracks continue; } @@ -328,8 +344,7 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp9Offer( continue; } RTC_CHECK_EQ(content.media_description()->streams().size(), 1); - cricket::StreamParams& stream = - content.media_description()->mutable_streams()[0]; + StreamParams& stream = content.media_description()->mutable_streams()[0]; RTC_CHECK_EQ(stream.stream_ids().size(), 2) << "Expected 2 stream ids in video stream: 1st - sync_group, 2nd - " "unique label"; @@ -355,7 +370,7 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp9Offer( stream.AddFidSsrc(ssrc, ssrcs_generator.GenerateId()); } stream.ssrc_groups.push_back( - cricket::SsrcGroup(cricket::kSimSsrcGroupSemantics, primary_ssrcs)); + SsrcGroup(kSimSsrcGroupSemantics, primary_ssrcs)); } auto offer_for_remote = CloneSessionDescription(offer.get()); return LocalAndRemoteSdp(std::move(offer), std::move(offer_for_remote)); @@ -365,8 +380,8 @@ LocalAndRemoteSdp SignalingInterceptor::PatchAnswer( std::unique_ptr answer, const VideoCodecConfig& first_codec) { for (auto& content : answer->description()->contents()) { - cricket::MediaContentDescription* media_desc = content.media_description(); - if (media_desc->type() != cricket::MediaType::MEDIA_TYPE_VIDEO) { + MediaContentDescription* media_desc = content.media_description(); + if (media_desc->type() != webrtc::MediaType::VIDEO) { continue; } if (content.media_description()->direction() != @@ -378,11 +393,11 @@ LocalAndRemoteSdp SignalingInterceptor::PatchAnswer( if (!params_.stream_label_to_simulcast_streams_count.empty()) { // Because simulcast enabled `params_.video_codecs` has only 1 element. - if (first_codec.name == cricket::kVp8CodecName) { + if (first_codec.name == kVp8CodecName) { return PatchVp8Answer(std::move(answer)); } - if (first_codec.name == cricket::kVp9CodecName) { + if (first_codec.name == kVp9CodecName) { return PatchVp9Answer(std::move(answer)); } } @@ -398,16 +413,14 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp8Answer( return LocalAndRemoteSdp(std::move(answer), std::move(answer_for_remote)); } - std::unique_ptr desc = - answer->description()->Clone(); + std::unique_ptr desc = answer->description()->Clone(); for (auto& info : context_.simulcast_infos) { - cricket::ContentInfo* simulcast_content = - desc->GetContentByName(info.rids[0]); + ContentInfo* simulcast_content = desc->GetContentByName(info.rids[0]); RTC_CHECK(simulcast_content); // Get media description, which will be converted to simulcast answer. - std::unique_ptr media_desc = + std::unique_ptr media_desc = simulcast_content->media_description()->Clone(); // Set `simulcast_content` to nullptr, because then it will be removed, so // it will point to deleted object. @@ -440,18 +453,18 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp8Answer( // Add StreamParams with rids for receive. RTC_CHECK_EQ(media_desc->mutable_streams().size(), 0); - std::vector rids; + std::vector rids; for (auto& rid : info.rids) { - rids.emplace_back(rid, cricket::RidDirection::kReceive); + rids.emplace_back(rid, RidDirection::kReceive); } - cricket::StreamParams stream_params; + StreamParams stream_params; stream_params.set_rids(rids); media_desc->mutable_streams().push_back(stream_params); // Restore SimulcastDescription. It should correspond to one from offer, // but it have to have receive layers instead of send. So we need to put // send layers from offer to receive layers in answer. - cricket::SimulcastDescription simulcast_description; + SimulcastDescription simulcast_description; for (const auto& layer : info.simulcast_description.send_layers()) { simulcast_description.receive_layers().AddLayerWithAlternatives(layer); } @@ -464,19 +477,18 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp8Answer( desc = RestoreMediaSectionsOrder(std::move(desc)); // Now we need to add bundle line to have all media bundled together. - cricket::ContentGroup bundle_group(cricket::GROUP_TYPE_BUNDLE); + ContentGroup bundle_group(GROUP_TYPE_BUNDLE); for (auto& content : desc->contents()) { bundle_group.AddContentName(content.mid()); } - if (desc->HasGroup(cricket::GROUP_TYPE_BUNDLE)) { - desc->RemoveGroupByName(cricket::GROUP_TYPE_BUNDLE); + if (desc->HasGroup(GROUP_TYPE_BUNDLE)) { + desc->RemoveGroupByName(GROUP_TYPE_BUNDLE); } desc->AddGroup(bundle_group); // Fix transport_infos: it have to have single info for simulcast section. - std::vector transport_infos = desc->transport_infos(); - std::map - mid_to_transport_description; + std::vector transport_infos = desc->transport_infos(); + std::map mid_to_transport_description; for (auto info_it = transport_infos.begin(); info_it != transport_infos.end();) { auto it = context_.simulcast_infos_by_rid.find(info_it->content_name); @@ -502,16 +514,16 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp8Answer( return LocalAndRemoteSdp(std::move(answer), std::move(patched_answer)); } -std::unique_ptr +std::unique_ptr SignalingInterceptor::RestoreMediaSectionsOrder( - std::unique_ptr source) { - std::unique_ptr out = source->Clone(); + std::unique_ptr source) { + std::unique_ptr out = source->Clone(); for (auto& mid : context_.mids_order) { RTC_CHECK(out->RemoveContentByName(mid)); } RTC_CHECK_EQ(out->contents().size(), 0); for (auto& mid : context_.mids_order) { - cricket::ContentInfo* content = source->GetContentByName(mid); + ContentInfo* content = source->GetContentByName(mid); RTC_CHECK(content); out->AddContent(mid, content->type, content->media_description()->Clone()); } @@ -526,7 +538,7 @@ LocalAndRemoteSdp SignalingInterceptor::PatchVp9Answer( std::vector> SignalingInterceptor::PatchOffererIceCandidates( - rtc::ArrayView candidates) { + ArrayView candidates) { std::vector> out; for (auto* candidate : candidates) { auto simulcast_info_it = @@ -550,7 +562,7 @@ SignalingInterceptor::PatchOffererIceCandidates( std::vector> SignalingInterceptor::PatchAnswererIceCandidates( - rtc::ArrayView candidates) { + ArrayView candidates) { std::vector> out; for (auto* candidate : candidates) { auto simulcast_info_it = @@ -576,8 +588,8 @@ SignalingInterceptor::PatchAnswererIceCandidates( SignalingInterceptor::SimulcastSectionInfo::SimulcastSectionInfo( const std::string& mid, - cricket::MediaProtocolType media_protocol_type, - const std::vector& rids_desc) + MediaProtocolType media_protocol_type, + const std::vector& rids_desc) : mid(mid), media_protocol_type(media_protocol_type) { for (auto& rid : rids_desc) { rids.push_back(rid.rid); diff --git a/test/pc/e2e/sdp/sdp_changer.h b/test/pc/e2e/sdp/sdp_changer.h index 6f68d03f52..62a13e45b4 100644 --- a/test/pc/e2e/sdp/sdp_changer.h +++ b/test/pc/e2e/sdp/sdp_changer.h @@ -12,11 +12,11 @@ #define TEST_PC_E2E_SDP_SDP_CHANGER_H_ #include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/jsep.h" #include "api/rtp_parameters.h" @@ -40,11 +40,11 @@ namespace webrtc_pc_e2e { // vector and they will be added in the same order, as they were in // `supported_codecs`. std::vector FilterVideoCodecCapabilities( - rtc::ArrayView video_codecs, + ArrayView video_codecs, bool use_rtx, bool use_ulpfec, bool use_flexfec, - rtc::ArrayView supported_codecs); + ArrayView supported_codecs); struct LocalAndRemoteSdp { LocalAndRemoteSdp(std::unique_ptr local_sdp, @@ -82,27 +82,27 @@ class SignalingInterceptor { const VideoCodecConfig& first_codec); std::vector> PatchOffererIceCandidates( - rtc::ArrayView candidates); + ArrayView candidates); std::vector> PatchAnswererIceCandidates( - rtc::ArrayView candidates); + ArrayView candidates); private: // Contains information about simulcast section, that is required to perform // modified offer/answer and ice candidates exchange. struct SimulcastSectionInfo { SimulcastSectionInfo(const std::string& mid, - cricket::MediaProtocolType media_protocol_type, - const std::vector& rids_desc); + MediaProtocolType media_protocol_type, + const std::vector& rids_desc); const std::string mid; - const cricket::MediaProtocolType media_protocol_type; + const MediaProtocolType media_protocol_type; std::vector rids; - cricket::SimulcastDescription simulcast_description; + SimulcastDescription simulcast_description; webrtc::RtpExtension mid_extension; webrtc::RtpExtension rid_extension; webrtc::RtpExtension rrid_extension; - cricket::TransportDescription transport_description; + TransportDescription transport_description; }; struct SignalingContext { @@ -133,8 +133,8 @@ class SignalingInterceptor { std::unique_ptr answer); void FillSimulcastContext(SessionDescriptionInterface* offer); - std::unique_ptr RestoreMediaSectionsOrder( - std::unique_ptr source); + std::unique_ptr RestoreMediaSectionsOrder( + std::unique_ptr source); PatchingParams params_; SignalingContext context_; diff --git a/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc b/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc index eb5f29287e..333d6818bf 100644 --- a/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc +++ b/test/pc/e2e/stats_based_network_quality_metrics_reporter.cc @@ -10,34 +10,40 @@ #include "test/pc/e2e/stats_based_network_quality_metrics_reporter.h" +#include #include #include -#include #include #include -#include #include #include +#include "absl/flags/flag.h" #include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" -#include "api/stats/rtc_stats.h" +#include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" #include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_logger.h" #include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/test/network_emulation_manager.h" +#include "api/test/track_id_stream_info_map.h" #include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/ip_address.h" +#include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" -#include "system_wrappers/include/field_trial.h" +#include "rtc_base/thread_annotations.h" #include "test/pc/e2e/metric_metadata_keys.h" +#include "test/test_flags.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -51,14 +57,9 @@ using NetworkLayerStats = constexpr TimeDelta kStatsWaitTimeout = TimeDelta::Seconds(1); -// Field trial which controls whether to report standard-compliant bytes -// sent/received per stream. If enabled, padding and headers are not included -// in bytes sent or received. -constexpr char kUseStandardBytesStats[] = "WebRTC-UseStandardBytesStats"; - EmulatedNetworkStats PopulateStats(std::vector endpoints, NetworkEmulationManager* network_emulation) { - rtc::Event stats_loaded; + Event stats_loaded; EmulatedNetworkStats stats; network_emulation->GetStats(endpoints, [&](EmulatedNetworkStats s) { stats = std::move(s); @@ -69,10 +70,10 @@ EmulatedNetworkStats PopulateStats(std::vector endpoints, return stats; } -std::map PopulateIpToPeer( +std::map PopulateIpToPeer( const std::map>& peer_endpoints) { - std::map out; + std::map out; for (const auto& entry : peer_endpoints) { for (const EmulatedEndpoint* const endpoint : entry.second) { RTC_CHECK(out.find(endpoint->GetPeerLocalAddress()) == out.end()) @@ -154,7 +155,7 @@ class EmulatedNetworkStatsAccumulator { std::map n_stats_ RTC_GUARDED_BY(sequence_checker_); - rtc::Event all_stats_collected_; + Event all_stats_collected_; Mutex mutex_; std::map stats_ RTC_GUARDED_BY(mutex_); bool stats_released_ = false; @@ -249,7 +250,7 @@ StatsBasedNetworkQualityMetricsReporter::NetworkLayerStatsCollector:: const NetworkLayerStats& stats = peer_to_stats[peer_name]; for (const auto& income_stats_entry : stats.endpoints_stats.incoming_stats_per_source) { - const rtc::IPAddress& source_ip = income_stats_entry.first; + const IPAddress& source_ip = income_stats_entry.first; auto it = ip_to_peer_.find(source_ip); if (it == ip_to_peer_.end()) { // Source IP is unknown for this collector, so will be skipped. @@ -293,31 +294,29 @@ void StatsBasedNetworkQualityMetricsReporter::Start( void StatsBasedNetworkQualityMetricsReporter::OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) { + const scoped_refptr& report) { PCStats cur_stats; auto inbound_stats = report->GetStatsOfType(); for (const auto& stat : inbound_stats) { cur_stats.payload_received += - DataSize::Bytes(stat->bytes_received.ValueOrDefault(0ul) + - stat->header_bytes_received.ValueOrDefault(0ul)); + DataSize::Bytes(stat->bytes_received.value_or(0ul) + + stat->header_bytes_received.value_or(0ul)); } auto outbound_stats = report->GetStatsOfType(); for (const auto& stat : outbound_stats) { - cur_stats.payload_sent += - DataSize::Bytes(stat->bytes_sent.ValueOrDefault(0ul) + - stat->header_bytes_sent.ValueOrDefault(0ul)); + cur_stats.payload_sent += DataSize::Bytes( + stat->bytes_sent.value_or(0ul) + stat->header_bytes_sent.value_or(0ul)); } auto candidate_pairs_stats = report->GetStatsOfType(); for (const auto& stat : candidate_pairs_stats) { cur_stats.total_received += - DataSize::Bytes(stat->bytes_received.ValueOrDefault(0ul)); - cur_stats.total_sent += - DataSize::Bytes(stat->bytes_sent.ValueOrDefault(0ul)); - cur_stats.packets_received += stat->packets_received.ValueOrDefault(0ul); - cur_stats.packets_sent += stat->packets_sent.ValueOrDefault(0ul); + DataSize::Bytes(stat->bytes_received.value_or(0ul)); + cur_stats.total_sent += DataSize::Bytes(stat->bytes_sent.value_or(0ul)); + cur_stats.packets_received += stat->packets_received.value_or(0ul); + cur_stats.packets_sent += stat->packets_sent.value_or(0ul); } MutexLock lock(&mutex_); @@ -327,11 +326,6 @@ void StatsBasedNetworkQualityMetricsReporter::OnStatsReports( void StatsBasedNetworkQualityMetricsReporter::StopAndReportResults() { Timestamp end_time = clock_->CurrentTime(); - if (!webrtc::field_trial::IsEnabled(kUseStandardBytesStats)) { - RTC_LOG(LS_ERROR) - << "Non-standard GetStats; \"payload\" counts include RTP headers"; - } - std::map stats = collector_.GetStats(); for (const auto& entry : stats) { LogNetworkLayerStats(entry.first, entry.second); @@ -370,10 +364,8 @@ void StatsBasedNetworkQualityMetricsReporter::ReportStats( const NetworkLayerStats& network_layer_stats, int64_t packet_loss, const Timestamp& end_time) { - // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map metric_metadata{ - {MetricMetadataKey::kPeerMetadataKey, pc_label}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; + {MetricMetadataKey::kPeerMetadataKey, pc_label}}; metrics_logger_->LogSingleValueMetric( "bytes_discarded_no_receiver", GetTestCaseName(pc_label), network_layer_stats.endpoints_stats.overall_incoming_stats @@ -424,9 +416,10 @@ void StatsBasedNetworkQualityMetricsReporter::ReportStats( std::string StatsBasedNetworkQualityMetricsReporter::GetTestCaseName( absl::string_view network_label) const { - rtc::StringBuilder builder; - builder << test_case_name_ << "/" << network_label.data(); - return builder.str(); + if (!absl::GetFlag(FLAGS_isolated_script_test_perf_output).empty()) { + return test_case_name_ + "/" + std::string(network_label); + } + return test_case_name_; } void StatsBasedNetworkQualityMetricsReporter::LogNetworkLayerStats( @@ -440,11 +433,9 @@ void StatsBasedNetworkQualityMetricsReporter::LogNetworkLayerStats( stats.endpoints_stats.overall_incoming_stats.packets_received >= 2 ? stats.endpoints_stats.overall_incoming_stats.AverageReceiveRate() : DataRate::Zero(); - // TODO(bugs.webrtc.org/14757): Remove kExperimentalTestNameMetadataKey. std::map metric_metadata{ - {MetricMetadataKey::kPeerMetadataKey, peer_name}, - {MetricMetadataKey::kExperimentalTestNameMetadataKey, test_case_name_}}; - rtc::StringBuilder log; + {MetricMetadataKey::kPeerMetadataKey, peer_name}}; + StringBuilder log; log << "Raw network layer statistic for [" << peer_name << "]:\n" << "Local IPs:\n"; for (size_t i = 0; i < stats.endpoints_stats.local_addresses.size(); ++i) { diff --git a/test/pc/e2e/stats_based_network_quality_metrics_reporter.h b/test/pc/e2e/stats_based_network_quality_metrics_reporter.h index 60daf40c8c..f401f10946 100644 --- a/test/pc/e2e/stats_based_network_quality_metrics_reporter.h +++ b/test/pc/e2e/stats_based_network_quality_metrics_reporter.h @@ -65,14 +65,11 @@ class StatsBasedNetworkQualityMetricsReporter const TrackIdStreamInfoMap* reporter_helper) override; void OnStatsReports( absl::string_view pc_label, - const rtc::scoped_refptr& report) override; + const scoped_refptr& report) override; void StopAndReportResults() override; private: struct PCStats { - // TODO(bugs.webrtc.org/10525): Separate audio and video counters. Depends - // on standard stat counters, enabled by field trial - // "WebRTC-UseStandardBytesStats". DataSize payload_received = DataSize::Zero(); DataSize payload_sent = DataSize::Zero(); @@ -106,7 +103,7 @@ class StatsBasedNetworkQualityMetricsReporter RTC_GUARDED_BY(mutex_); std::map> peer_downlinks_ RTC_GUARDED_BY(mutex_); - std::map ip_to_peer_ RTC_GUARDED_BY(mutex_); + std::map ip_to_peer_ RTC_GUARDED_BY(mutex_); NetworkEmulationManager* const network_emulation_; }; diff --git a/test/pc/e2e/stats_based_network_quality_metrics_reporter_test.cc b/test/pc/e2e/stats_based_network_quality_metrics_reporter_test.cc index be55149482..fe2947aa79 100644 --- a/test/pc/e2e/stats_based_network_quality_metrics_reporter_test.cc +++ b/test/pc/e2e/stats_based_network_quality_metrics_reporter_test.cc @@ -12,11 +12,11 @@ #include #include +#include #include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/array_view.h" #include "api/test/create_network_emulation_manager.h" #include "api/test/create_peer_connection_quality_test_frame_generator.h" @@ -47,38 +47,38 @@ using ::webrtc::webrtc_pc_e2e::PeerConfigurer; // Adds a peer with some audio and video (the client should not care about // details about audio and video configs). -void AddDefaultAudioVideoPeer( - absl::string_view peer_name, - absl::string_view audio_stream_label, - absl::string_view video_stream_label, - const PeerNetworkDependencies& network_dependencies, - PeerConnectionE2EQualityTestFixture& fixture) { - AudioConfig audio{std::string(audio_stream_label)}; - audio.sync_group = std::string(peer_name); +void AddDefaultAudioVideoPeer(absl::string_view peer_name, + absl::string_view audio_stream_label, + absl::string_view video_stream_label, + EmulatedNetworkManagerInterface& network, + PeerConnectionE2EQualityTestFixture& fixture) { + AudioConfig audio{.stream_label = std::string(audio_stream_label), + .sync_group = std::string(peer_name)}; VideoConfig video(std::string(video_stream_label), 320, 180, 15); video.sync_group = std::string(peer_name); - auto peer = std::make_unique(network_dependencies); + auto peer = std::make_unique(network); peer->SetName(peer_name); peer->SetAudioConfig(std::move(audio)); peer->AddVideoConfig(std::move(video)); - peer->SetVideoCodecs({VideoCodecConfig(cricket::kVp8CodecName)}); + peer->SetVideoCodecs({VideoCodecConfig(kVp8CodecName)}); fixture.AddPeer(std::move(peer)); } -absl::optional FindMeetricByName(absl::string_view name, - rtc::ArrayView metrics) { +std::optional FindMeetricByName(absl::string_view name, + ArrayView metrics) { for (const Metric& metric : metrics) { if (metric.name == name) { return metric; } } - return absl::nullopt; + return std::nullopt; } TEST(StatsBasedNetworkQualityMetricsReporterTest, DebugStatsAreCollected) { std::unique_ptr network_emulation = - CreateNetworkEmulationManager(TimeMode::kSimulated, - EmulatedNetworkStatsGatheringMode::kDebug); + CreateNetworkEmulationManager( + {.time_mode = TimeMode::kSimulated, + .stats_gathering_mode = EmulatedNetworkStatsGatheringMode::kDebug}); DefaultMetricsLogger metrics_logger( network_emulation->time_controller()->GetClock()); PeerConnectionE2EQualityTest fixture( @@ -91,11 +91,13 @@ TEST(StatsBasedNetworkQualityMetricsReporterTest, DebugStatsAreCollected) { EmulatedEndpoint* bob_endpoint = network_emulation->CreateEndpoint(EmulatedEndpointConfig()); - EmulatedNetworkNode* alice_link = network_emulation->CreateEmulatedNode( - BuiltInNetworkBehaviorConfig{.link_capacity_kbps = 500}); + EmulatedNetworkNode* alice_link = + network_emulation->CreateEmulatedNode(BuiltInNetworkBehaviorConfig{ + .link_capacity = DataRate::KilobitsPerSec(500)}); network_emulation->CreateRoute(alice_endpoint, {alice_link}, bob_endpoint); - EmulatedNetworkNode* bob_link = network_emulation->CreateEmulatedNode( - BuiltInNetworkBehaviorConfig{.link_capacity_kbps = 500}); + EmulatedNetworkNode* bob_link = + network_emulation->CreateEmulatedNode(BuiltInNetworkBehaviorConfig{ + .link_capacity = DataRate::KilobitsPerSec(500)}); network_emulation->CreateRoute(bob_endpoint, {bob_link}, alice_endpoint); EmulatedNetworkManagerInterface* alice_network = @@ -105,9 +107,9 @@ TEST(StatsBasedNetworkQualityMetricsReporterTest, DebugStatsAreCollected) { network_emulation->CreateEmulatedNetworkManagerInterface({bob_endpoint}); AddDefaultAudioVideoPeer("alice", "alice_audio", "alice_video", - alice_network->network_dependencies(), fixture); - AddDefaultAudioVideoPeer("bob", "bob_audio", "bob_video", - bob_network->network_dependencies(), fixture); + *alice_network, fixture); + AddDefaultAudioVideoPeer("bob", "bob_audio", "bob_video", *bob_network, + fixture); auto network_stats_reporter = std::make_unique( @@ -125,20 +127,20 @@ TEST(StatsBasedNetworkQualityMetricsReporterTest, DebugStatsAreCollected) { fixture.Run(RunParams(TimeDelta::Seconds(4))); std::vector metrics = metrics_logger.GetCollectedMetrics(); - absl::optional uplink_packet_transport_time = + std::optional uplink_packet_transport_time = FindMeetricByName("uplink_packet_transport_time", metrics); ASSERT_TRUE(uplink_packet_transport_time.has_value()); ASSERT_FALSE(uplink_packet_transport_time->time_series.samples.empty()); - absl::optional uplink_size_to_packet_transport_time = + std::optional uplink_size_to_packet_transport_time = FindMeetricByName("uplink_size_to_packet_transport_time", metrics); ASSERT_TRUE(uplink_size_to_packet_transport_time.has_value()); ASSERT_FALSE( uplink_size_to_packet_transport_time->time_series.samples.empty()); - absl::optional downlink_packet_transport_time = + std::optional downlink_packet_transport_time = FindMeetricByName("downlink_packet_transport_time", metrics); ASSERT_TRUE(downlink_packet_transport_time.has_value()); ASSERT_FALSE(downlink_packet_transport_time->time_series.samples.empty()); - absl::optional downlink_size_to_packet_transport_time = + std::optional downlink_size_to_packet_transport_time = FindMeetricByName("downlink_size_to_packet_transport_time", metrics); ASSERT_TRUE(downlink_size_to_packet_transport_time.has_value()); ASSERT_FALSE( diff --git a/test/pc/e2e/stats_poller.cc b/test/pc/e2e/stats_poller.cc index c04805fb20..49daf20c1b 100644 --- a/test/pc/e2e/stats_poller.cc +++ b/test/pc/e2e/stats_poller.cc @@ -23,7 +23,7 @@ void InternalStatsObserver::PollStats() { } void InternalStatsObserver::OnStatsDelivered( - const rtc::scoped_refptr& report) { + const scoped_refptr& report) { for (auto* observer : observers_) { observer->OnStatsReports(pc_label_, report); } @@ -34,7 +34,7 @@ StatsPoller::StatsPoller(std::vector observers, : observers_(std::move(observers)) { webrtc::MutexLock lock(&mutex_); for (auto& peer : peers) { - pollers_.push_back(rtc::make_ref_counted( + pollers_.push_back(make_ref_counted( peer.first, peer.second, observers_)); } } @@ -44,7 +44,7 @@ StatsPoller::StatsPoller(std::vector observers, : observers_(std::move(observers)) { webrtc::MutexLock lock(&mutex_); for (auto& peer : peers) { - pollers_.push_back(rtc::make_ref_counted( + pollers_.push_back(make_ref_counted( peer.first, peer.second, observers_)); } } @@ -59,8 +59,8 @@ void StatsPoller::PollStatsAndNotifyObservers() { void StatsPoller::RegisterParticipantInCall(absl::string_view peer_name, StatsProvider* peer) { webrtc::MutexLock lock(&mutex_); - pollers_.push_back(rtc::make_ref_counted( - peer_name, peer, observers_)); + pollers_.push_back( + make_ref_counted(peer_name, peer, observers_)); } bool StatsPoller::UnregisterParticipantInCall(absl::string_view peer_name) { diff --git a/test/pc/e2e/stats_poller.h b/test/pc/e2e/stats_poller.h index 3576f1bf05..d2487b0a11 100644 --- a/test/pc/e2e/stats_poller.h +++ b/test/pc/e2e/stats_poller.h @@ -41,7 +41,7 @@ class InternalStatsObserver : public RTCStatsCollectorCallback { void PollStats(); void OnStatsDelivered( - const rtc::scoped_refptr& report) override; + const scoped_refptr& report) override; private: std::string pc_label_; @@ -70,7 +70,7 @@ class StatsPoller { private: const std::vector observers_; webrtc::Mutex mutex_; - std::vector> pollers_ + std::vector> pollers_ RTC_GUARDED_BY(mutex_); }; diff --git a/test/pc/e2e/stats_poller_test.cc b/test/pc/e2e/stats_poller_test.cc index 02a323127b..754145f629 100644 --- a/test/pc/e2e/stats_poller_test.cc +++ b/test/pc/e2e/stats_poller_test.cc @@ -41,7 +41,7 @@ class MockStatsObserver : public StatsObserverInterface { MOCK_METHOD(void, OnStatsReports, (absl::string_view pc_label, - const rtc::scoped_refptr& report)); + const webrtc::scoped_refptr& report)); }; TEST(StatsPollerTest, UnregisterParticipantAddedInCtor) { diff --git a/test/pc/e2e/test_activities_executor.cc b/test/pc/e2e/test_activities_executor.cc index 7bcf7dd6c3..cad4f63249 100644 --- a/test/pc/e2e/test_activities_executor.cc +++ b/test/pc/e2e/test_activities_executor.cc @@ -48,7 +48,7 @@ void TestActivitiesExecutor::Stop() { void TestActivitiesExecutor::ScheduleActivity( TimeDelta initial_delay_since_start, - absl::optional interval, + std::optional interval, std::function func) { RTC_CHECK(initial_delay_since_start.IsFinite() && initial_delay_since_start >= TimeDelta::Zero()); @@ -112,7 +112,7 @@ Timestamp TestActivitiesExecutor::Now() const { TestActivitiesExecutor::ScheduledActivity::ScheduledActivity( TimeDelta initial_delay_since_start, - absl::optional interval, + std::optional interval, std::function func) : initial_delay_since_start(initial_delay_since_start), interval(interval), diff --git a/test/pc/e2e/test_activities_executor.h b/test/pc/e2e/test_activities_executor.h index 2469ac7f36..1d9d7ca1c5 100644 --- a/test/pc/e2e/test_activities_executor.h +++ b/test/pc/e2e/test_activities_executor.h @@ -11,10 +11,10 @@ #ifndef TEST_PC_E2E_TEST_ACTIVITIES_EXECUTOR_H_ #define TEST_PC_E2E_TEST_ACTIVITIES_EXECUTOR_H_ +#include #include #include -#include "absl/types/optional.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" @@ -43,17 +43,17 @@ class TestActivitiesExecutor { // If test is started, then it will be executed immediately according to its // schedule. void ScheduleActivity(TimeDelta initial_delay_since_start, - absl::optional interval, + std::optional interval, std::function func); private: struct ScheduledActivity { ScheduledActivity(TimeDelta initial_delay_since_start, - absl::optional interval, + std::optional interval, std::function func); TimeDelta initial_delay_since_start; - absl::optional interval; + std::optional interval; std::function func; }; diff --git a/test/pc/e2e/test_peer.cc b/test/pc/e2e/test_peer.cc index b3a9e1c164..a0c5dc48f3 100644 --- a/test/pc/e2e/test_peer.cc +++ b/test/pc/e2e/test_peer.cc @@ -9,15 +9,29 @@ */ #include "test/pc/e2e/test_peer.h" +#include #include #include +#include -#include "absl/memory/memory.h" #include "absl/strings/string_view.h" +#include "api/jsep.h" +#include "api/make_ref_counted.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" #include "api/scoped_refptr.h" +#include "api/set_remote_description_observer_interface.h" +#include "api/stats/rtc_stats_collector_callback.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" #include "api/test/pclf/peer_configurer.h" -#include "modules/audio_processing/include/audio_processing.h" +#include "pc/peer_connection_wrapper.h" +#include "pc/test/mock_peer_connection_observers.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -83,7 +97,7 @@ bool TestPeer::SetRemoteDescription( std::string* error_out) { RTC_CHECK(wrapper_) << "TestPeer is already closed"; - auto observer = rtc::make_ref_counted(); + auto observer = make_ref_counted(); // We're assuming (and asserting) that the PeerConnection implementation of // SetRemoteDescription is synchronous when called on the signaling thread. pc()->SetRemoteDescription(std::move(desc), observer); @@ -121,29 +135,25 @@ void TestPeer::Close() { signaling_thread_task_safety_->SetNotAlive(); wrapper_->pc()->Close(); remote_ice_candidates_.clear(); - audio_processing_ = nullptr; video_sources_.clear(); wrapper_ = nullptr; worker_thread_ = nullptr; } -TestPeer::TestPeer( - rtc::scoped_refptr pc_factory, - rtc::scoped_refptr pc, - std::unique_ptr observer, - Params params, - ConfigurableParams configurable_params, - std::vector video_sources, - rtc::scoped_refptr audio_processing, - std::unique_ptr worker_thread) +TestPeer::TestPeer(scoped_refptr pc_factory, + scoped_refptr pc, + std::unique_ptr observer, + Params params, + ConfigurableParams configurable_params, + std::vector video_sources, + std::unique_ptr worker_thread) : params_(std::move(params)), configurable_params_(std::move(configurable_params)), worker_thread_(std::move(worker_thread)), wrapper_(std::make_unique(std::move(pc_factory), std::move(pc), std::move(observer))), - video_sources_(std::move(video_sources)), - audio_processing_(audio_processing) { + video_sources_(std::move(video_sources)) { signaling_thread_task_safety_ = PendingTaskSafetyFlag::CreateDetached(); } diff --git a/test/pc/e2e/test_peer.h b/test/pc/e2e/test_peer.h index 1ce2acbdf0..db13e8986b 100644 --- a/test/pc/e2e/test_peer.h +++ b/test/pc/e2e/test_peer.h @@ -11,24 +11,34 @@ #ifndef TEST_PC_E2E_TEST_PEER_H_ #define TEST_PC_E2E_TEST_PEER_H_ +#include #include +#include +#include +#include #include -#include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/function_view.h" +#include "api/data_channel_interface.h" +#include "api/jsep.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/peer_connection_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" -#include "api/sequence_checker.h" -#include "api/set_remote_description_observer_interface.h" +#include "api/stats/rtc_stats_collector_callback.h" +#include "api/stats/rtc_stats_report.h" #include "api/task_queue/pending_task_safety_flag.h" -#include "api/test/frame_generator_interface.h" #include "api/test/pclf/media_configuration.h" #include "api/test/pclf/media_quality_test_params.h" #include "api/test/pclf/peer_configurer.h" #include "pc/peer_connection_wrapper.h" -#include "rtc_base/logging.h" +#include "pc/test/mock_peer_connection_observers.h" +#include "rtc_base/checks.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "test/pc/e2e/stats_provider.h" namespace webrtc { @@ -70,8 +80,7 @@ class TestPeer final : public StatsProvider { // Tell underlying `PeerConnection` to create an Offer. // `observer` will be invoked on the signaling thread when offer is created. - void CreateOffer( - rtc::scoped_refptr observer) { + void CreateOffer(scoped_refptr observer) { RTC_CHECK(wrapper_) << "TestPeer is already closed"; pc()->CreateOffer(observer.get(), params_.rtc_offer_answer_options); } @@ -95,23 +104,23 @@ class TestPeer final : public StatsProvider { bool SetRemoteDescription(std::unique_ptr desc, std::string* error_out = nullptr); - rtc::scoped_refptr AddTransceiver( - cricket::MediaType media_type, + scoped_refptr AddTransceiver( + webrtc::MediaType media_type, const RtpTransceiverInit& init) { RTC_CHECK(wrapper_) << "TestPeer is already closed"; return wrapper_->AddTransceiver(media_type, init); } - rtc::scoped_refptr AddTrack( - rtc::scoped_refptr track, + scoped_refptr AddTrack( + scoped_refptr track, const std::vector& stream_ids = {}) { RTC_CHECK(wrapper_) << "TestPeer is already closed"; return wrapper_->AddTrack(track, stream_ids); } - rtc::scoped_refptr CreateDataChannel( + scoped_refptr CreateDataChannel( const std::string& label, - const absl::optional& config = absl::nullopt) { + const std::optional& config = std::nullopt) { RTC_CHECK(wrapper_) << "TestPeer is already closed"; return wrapper_->CreateDataChannel(label, config); } @@ -131,16 +140,14 @@ class TestPeer final : public StatsProvider { return wrapper_->IsIceConnected(); } - rtc::scoped_refptr GetStats() { + scoped_refptr GetStats() { RTC_CHECK(wrapper_) << "TestPeer is already closed"; return wrapper_->GetStats(); } void DetachAecDump() { RTC_CHECK(wrapper_) << "TestPeer is already closed"; - if (audio_processing_) { - audio_processing_->DetachAecDump(); - } + wrapper_->pc_factory()->StopAecDump(); } // Adds provided `candidates` to the owned peer connection. @@ -153,14 +160,13 @@ class TestPeer final : public StatsProvider { protected: friend class TestPeerFactory; - TestPeer(rtc::scoped_refptr pc_factory, - rtc::scoped_refptr pc, + TestPeer(scoped_refptr pc_factory, + scoped_refptr pc, std::unique_ptr observer, Params params, ConfigurableParams configurable_params, std::vector video_sources, - rtc::scoped_refptr audio_processing, - std::unique_ptr worker_thread); + std::unique_ptr worker_thread); private: const Params params_; @@ -170,16 +176,14 @@ class TestPeer final : public StatsProvider { // Safety flag to protect all tasks posted on the signaling thread to not be // executed after `wrapper_` object is destructed. - rtc::scoped_refptr signaling_thread_task_safety_ = - nullptr; + scoped_refptr signaling_thread_task_safety_ = nullptr; // Keeps ownership of worker thread. It has to be destroyed after `wrapper_`. // `worker_thread_`can be null if the Peer use only one thread as both the // worker thread and network thread. - std::unique_ptr worker_thread_; + std::unique_ptr worker_thread_; std::unique_ptr wrapper_; std::vector video_sources_; - rtc::scoped_refptr audio_processing_; std::vector> remote_ice_candidates_; }; diff --git a/test/pc/e2e/test_peer_factory.cc b/test/pc/e2e/test_peer_factory.cc index 9b2f2d6953..07b980a084 100644 --- a/test/pc/e2e/test_peer_factory.cc +++ b/test/pc/e2e/test_peer_factory.cc @@ -9,25 +9,38 @@ */ #include "test/pc/e2e/test_peer_factory.h" +#include +#include +#include #include +#include #include "absl/memory/memory.h" #include "absl/strings/string_view.h" -#include "api/task_queue/default_task_queue_factory.h" +#include "api/audio/audio_device.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_event_log/rtc_event_log_factory.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_factory.h" #include "api/test/create_time_controller.h" #include "api/test/pclf/media_configuration.h" +#include "api/test/pclf/media_quality_test_params.h" #include "api/test/pclf/peer_configurer.h" #include "api/test/time_controller.h" #include "api/transport/field_trial_based_config.h" #include "api/video_codecs/builtin_video_decoder_factory.h" #include "api/video_codecs/builtin_video_encoder_factory.h" -#include "media/engine/webrtc_media_engine.h" -#include "media/engine/webrtc_media_engine_defaults.h" -#include "modules/audio_processing/aec_dump/aec_dump_factory.h" -#include "p2p/client/basic_port_allocator.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "modules/audio_device/include/test_audio_device.h" +#include "pc/test/mock_peer_connection_observers.h" +#include "rtc_base/checks.h" +#include "rtc_base/system/file_wrapper.h" #include "rtc_base/thread.h" #include "test/pc/e2e/analyzer/video/quality_analyzing_video_encoder.h" +#include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h" #include "test/pc/e2e/echo/echo_emulation.h" +#include "test/pc/e2e/test_peer.h" #include "test/testsupport/copy_to_file_audio_capturer.h" namespace webrtc { @@ -44,24 +57,14 @@ constexpr int kDefaultSamplingFrequencyInHz = 48000; // and `pc_dependencies` if they are omitted. Also setup required // dependencies, that won't be specially provided by factory and will be just // transferred to peer connection creation code. -void SetMandatoryEntities(InjectableComponents* components, - TimeController& time_controller) { +void SetMandatoryEntities(InjectableComponents* components) { RTC_DCHECK(components->pcf_dependencies); RTC_DCHECK(components->pc_dependencies); // Setup required peer connection factory dependencies. - if (components->pcf_dependencies->task_queue_factory == nullptr) { - components->pcf_dependencies->task_queue_factory = - time_controller.CreateTaskQueueFactory(); - } - if (components->pcf_dependencies->call_factory == nullptr) { - components->pcf_dependencies->call_factory = - CreateTimeControllerBasedCallFactory(&time_controller); - } if (components->pcf_dependencies->event_log_factory == nullptr) { components->pcf_dependencies->event_log_factory = - std::make_unique( - components->pcf_dependencies->task_queue_factory.get()); + std::make_unique(); } if (!components->pcf_dependencies->trials) { components->pcf_dependencies->trials = @@ -71,7 +74,7 @@ void SetMandatoryEntities(InjectableComponents* components, // Returns mapping from stream label to optional spatial index. // If we have stream label "Foo" and mapping contains -// 1. `absl::nullopt` means all simulcast/SVC streams are required +// 1. `std::nullopt` means all simulcast/SVC streams are required // 2. Concrete value means that particular simulcast/SVC stream have to be // analyzed. EmulatedSFUConfigMap CalculateRequiredSpatialIndexPerStream( @@ -91,7 +94,7 @@ EmulatedSFUConfigMap CalculateRequiredSpatialIndexPerStream( } std::unique_ptr CreateAudioRenderer( - const absl::optional& config) { + const std::optional& config) { if (!config) { // Return default renderer because we always require some renderer. return TestAudioDeviceModule::CreateDiscardRenderer( @@ -106,7 +109,7 @@ std::unique_ptr CreateAudioRenderer( } std::unique_ptr CreateAudioCapturer( - const absl::optional& audio_config) { + const std::optional& audio_config) { if (!audio_config) { // If we have no audio config we still need to provide some audio device. // In such case use generated capturer. Despite of we provided audio here, @@ -123,10 +126,10 @@ std::unique_ptr CreateAudioCapturer( } } -rtc::scoped_refptr CreateAudioDeviceModule( - absl::optional audio_config, - absl::optional remote_audio_config, - absl::optional echo_emulation_config, +scoped_refptr CreateAudioDeviceModule( + std::optional audio_config, + std::optional remote_audio_config, + std::optional echo_emulation_config, TaskQueueFactory* task_queue_factory) { std::unique_ptr renderer = CreateAudioRenderer(remote_audio_config); @@ -154,27 +157,6 @@ rtc::scoped_refptr CreateAudioDeviceModule( std::move(renderer), /*speed=*/1.f); } -std::unique_ptr CreateMediaEngine( - PeerConnectionFactoryComponents* pcf_dependencies, - rtc::scoped_refptr audio_device_module) { - cricket::MediaEngineDependencies media_deps; - media_deps.task_queue_factory = pcf_dependencies->task_queue_factory.get(); - media_deps.adm = audio_device_module; - media_deps.audio_processing = pcf_dependencies->audio_processing; - media_deps.audio_mixer = pcf_dependencies->audio_mixer; - media_deps.video_encoder_factory = - std::move(pcf_dependencies->video_encoder_factory); - media_deps.video_decoder_factory = - std::move(pcf_dependencies->video_decoder_factory); - media_deps.audio_encoder_factory = pcf_dependencies->audio_encoder_factory; - media_deps.audio_decoder_factory = pcf_dependencies->audio_decoder_factory; - webrtc::SetMediaEngineDefaults(&media_deps); - RTC_DCHECK(pcf_dependencies->trials); - media_deps.trials = pcf_dependencies->trials.get(); - - return cricket::CreateMediaEngine(std::move(media_deps)); -} - void WrapVideoEncoderFactory( absl::string_view peer_name, double bitrate_multiplier, @@ -212,19 +194,20 @@ void WrapVideoDecoderFactory( // from InjectableComponents::PeerConnectionFactoryComponents. PeerConnectionFactoryDependencies CreatePCFDependencies( std::unique_ptr pcf_dependencies, - std::unique_ptr media_engine, - rtc::Thread* signaling_thread, - rtc::Thread* worker_thread, - rtc::Thread* network_thread) { + TimeController& time_controller, + scoped_refptr audio_device_module, + Thread* signaling_thread, + Thread* worker_thread, + Thread* network_thread) { PeerConnectionFactoryDependencies pcf_deps; pcf_deps.signaling_thread = signaling_thread; pcf_deps.worker_thread = worker_thread; pcf_deps.network_thread = network_thread; - pcf_deps.media_engine = std::move(media_engine); + pcf_deps.socket_factory = pcf_dependencies->socket_factory; + pcf_deps.network_manager = std::move(pcf_dependencies->network_manager); - pcf_deps.call_factory = std::move(pcf_dependencies->call_factory); pcf_deps.event_log_factory = std::move(pcf_dependencies->event_log_factory); - pcf_deps.task_queue_factory = std::move(pcf_dependencies->task_queue_factory); + pcf_deps.task_queue_factory = time_controller.CreateTaskQueueFactory(); if (pcf_dependencies->fec_controller_factory != nullptr) { pcf_deps.fec_controller_factory = @@ -241,6 +224,19 @@ PeerConnectionFactoryDependencies CreatePCFDependencies( pcf_deps.trials = std::move(pcf_dependencies->trials); } + // Media dependencies + pcf_deps.adm = std::move(audio_device_module); + pcf_deps.audio_processing_builder = + std::move(pcf_dependencies->audio_processing); + pcf_deps.audio_mixer = pcf_dependencies->audio_mixer; + pcf_deps.video_encoder_factory = + std::move(pcf_dependencies->video_encoder_factory); + pcf_deps.video_decoder_factory = + std::move(pcf_dependencies->video_decoder_factory); + pcf_deps.audio_encoder_factory = pcf_dependencies->audio_encoder_factory; + pcf_deps.audio_decoder_factory = pcf_dependencies->audio_decoder_factory; + EnableMediaWithDefaultsAndTimeController(time_controller, pcf_deps); + return pcf_deps; } @@ -248,18 +244,10 @@ PeerConnectionFactoryDependencies CreatePCFDependencies( // from InjectableComponents::PeerConnectionComponents. PeerConnectionDependencies CreatePCDependencies( MockPeerConnectionObserver* observer, - uint32_t port_allocator_extra_flags, std::unique_ptr pc_dependencies) { PeerConnectionDependencies pc_deps(observer); - auto port_allocator = std::make_unique( - pc_dependencies->network_manager, pc_dependencies->packet_socket_factory); - - // This test does not support TCP - int flags = port_allocator_extra_flags | cricket::PORTALLOCATOR_DISABLE_TCP; - port_allocator->set_flags(port_allocator->flags() | flags); - pc_deps.allocator = std::move(port_allocator); if (pc_dependencies->async_dns_resolver_factory != nullptr) { pc_deps.async_dns_resolver_factory = @@ -280,10 +268,10 @@ PeerConnectionDependencies CreatePCDependencies( } // namespace -absl::optional RemotePeerAudioConfig::Create( - absl::optional config) { +std::optional RemotePeerAudioConfig::Create( + std::optional config) { if (!config) { - return absl::nullopt; + return std::nullopt; } return RemotePeerAudioConfig(config.value()); } @@ -291,8 +279,8 @@ absl::optional RemotePeerAudioConfig::Create( std::unique_ptr TestPeerFactory::CreateTestPeer( std::unique_ptr configurer, std::unique_ptr observer, - absl::optional remote_audio_config, - absl::optional echo_emulation_config) { + std::optional remote_audio_config, + std::optional echo_emulation_config) { std::unique_ptr components = configurer->ReleaseComponents(); std::unique_ptr params = configurer->ReleaseParams(); @@ -305,22 +293,14 @@ std::unique_ptr TestPeerFactory::CreateTestPeer( RTC_DCHECK(configurable_params); RTC_DCHECK_EQ(configurable_params->video_configs.size(), video_sources.size()); - SetMandatoryEntities(components.get(), time_controller_); + SetMandatoryEntities(components.get()); params->rtc_configuration.sdp_semantics = SdpSemantics::kUnifiedPlan; // Create peer connection factory. - if (components->pcf_dependencies->audio_processing == nullptr) { - components->pcf_dependencies->audio_processing = - webrtc::AudioProcessingBuilder().Create(); - } - if (params->aec_dump_path) { - components->pcf_dependencies->audio_processing->CreateAndAttachAecDump( - *params->aec_dump_path, -1, task_queue_); - } - rtc::scoped_refptr audio_device_module = - CreateAudioDeviceModule( - params->audio_config, remote_audio_config, echo_emulation_config, - components->pcf_dependencies->task_queue_factory.get()); + scoped_refptr audio_device_module = + CreateAudioDeviceModule(params->audio_config, remote_audio_config, + echo_emulation_config, + time_controller_.GetTaskQueueFactory()); WrapVideoEncoderFactory( params->name.value(), params->video_encoder_bitrate_multiplier, CalculateRequiredSpatialIndexPerStream( @@ -329,11 +309,8 @@ std::unique_ptr TestPeerFactory::CreateTestPeer( WrapVideoDecoderFactory(params->name.value(), components->pcf_dependencies.get(), video_analyzer_helper_); - std::unique_ptr media_engine = - CreateMediaEngine(components->pcf_dependencies.get(), - audio_device_module); - std::unique_ptr owned_worker_thread = + std::unique_ptr owned_worker_thread = components->worker_thread != nullptr ? nullptr : time_controller_.CreateThread("worker_thread"); @@ -341,32 +318,35 @@ std::unique_ptr TestPeerFactory::CreateTestPeer( components->worker_thread = owned_worker_thread.get(); } - // Store `webrtc::AudioProcessing` into local variable before move of - // `components->pcf_dependencies` - rtc::scoped_refptr audio_processing = - components->pcf_dependencies->audio_processing; PeerConnectionFactoryDependencies pcf_deps = CreatePCFDependencies( - std::move(components->pcf_dependencies), std::move(media_engine), - signaling_thread_, components->worker_thread, components->network_thread); - rtc::scoped_refptr peer_connection_factory = + std::move(components->pcf_dependencies), time_controller_, + std::move(audio_device_module), signaling_thread_, + components->worker_thread, components->network_thread); + scoped_refptr peer_connection_factory = CreateModularPeerConnectionFactory(std::move(pcf_deps)); + peer_connection_factory->SetOptions(params->peer_connection_factory_options); + if (params->aec_dump_path) { + peer_connection_factory->StartAecDump( + FileWrapper::OpenWriteOnly(*params->aec_dump_path).Release(), -1); + } // Create peer connection. - PeerConnectionDependencies pc_deps = - CreatePCDependencies(observer.get(), params->port_allocator_extra_flags, - std::move(components->pc_dependencies)); - rtc::scoped_refptr peer_connection = + PeerConnectionDependencies pc_deps = CreatePCDependencies( + observer.get(), std::move(components->pc_dependencies)); + + params->rtc_configuration.port_allocator_config.flags = + params->port_allocator_flags; + scoped_refptr peer_connection = peer_connection_factory ->CreatePeerConnectionOrError(params->rtc_configuration, std::move(pc_deps)) .MoveValue(); peer_connection->SetBitrate(params->bitrate_settings); - return absl::WrapUnique( - new TestPeer(peer_connection_factory, peer_connection, - std::move(observer), std::move(*params), - std::move(*configurable_params), std::move(video_sources), - audio_processing, std::move(owned_worker_thread))); + return absl::WrapUnique(new TestPeer( + peer_connection_factory, peer_connection, std::move(observer), + std::move(*params), std::move(*configurable_params), + std::move(video_sources), std::move(owned_worker_thread))); } } // namespace webrtc_pc_e2e diff --git a/test/pc/e2e/test_peer_factory.h b/test/pc/e2e/test_peer_factory.h index f2698e2a15..e4044fb367 100644 --- a/test/pc/e2e/test_peer_factory.h +++ b/test/pc/e2e/test_peer_factory.h @@ -11,19 +11,15 @@ #ifndef TEST_PC_E2E_TEST_PEER_FACTORY_H_ #define TEST_PC_E2E_TEST_PEER_FACTORY_H_ -#include #include +#include #include -#include -#include "absl/strings/string_view.h" -#include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/test/pclf/media_configuration.h" -#include "api/test/pclf/media_quality_test_params.h" #include "api/test/pclf/peer_configurer.h" #include "api/test/time_controller.h" -#include "modules/audio_device/include/test_audio_device.h" -#include "rtc_base/task_queue.h" +#include "pc/test/mock_peer_connection_observers.h" +#include "rtc_base/thread.h" #include "test/pc/e2e/analyzer/video/video_quality_analyzer_injection_helper.h" #include "test/pc/e2e/test_peer.h" @@ -35,11 +31,11 @@ struct RemotePeerAudioConfig { : sampling_frequency_in_hz(config.sampling_frequency_in_hz), output_file_name(config.output_dump_file_name) {} - static absl::optional Create( - absl::optional config); + static std::optional Create( + std::optional config); int sampling_frequency_in_hz; - absl::optional output_file_name; + std::optional output_file_name; }; class TestPeerFactory { @@ -51,15 +47,12 @@ class TestPeerFactory { // factories and call factory. // `video_analyzer_helper` will be used to setup video quality analysis for // created peers. - // `task_queue` will be used for AEC dump if it is requested. - TestPeerFactory(rtc::Thread* signaling_thread, + TestPeerFactory(Thread* signaling_thread, TimeController& time_controller, - VideoQualityAnalyzerInjectionHelper* video_analyzer_helper, - rtc::TaskQueue* task_queue) + VideoQualityAnalyzerInjectionHelper* video_analyzer_helper) : signaling_thread_(signaling_thread), time_controller_(time_controller), - video_analyzer_helper_(video_analyzer_helper), - task_queue_(task_queue) {} + video_analyzer_helper_(video_analyzer_helper) {} // Setups all components, that should be provided to WebRTC // PeerConnectionFactory and PeerConnection creation methods, @@ -68,14 +61,13 @@ class TestPeerFactory { std::unique_ptr CreateTestPeer( std::unique_ptr configurer, std::unique_ptr observer, - absl::optional remote_audio_config, - absl::optional echo_emulation_config); + std::optional remote_audio_config, + std::optional echo_emulation_config); private: - rtc::Thread* signaling_thread_; + Thread* signaling_thread_; TimeController& time_controller_; VideoQualityAnalyzerInjectionHelper* video_analyzer_helper_; - rtc::TaskQueue* task_queue_; }; } // namespace webrtc_pc_e2e diff --git a/test/pc/sctp/BUILD.gn b/test/pc/sctp/BUILD.gn index f088a5b20c..e30fd825af 100644 --- a/test/pc/sctp/BUILD.gn +++ b/test/pc/sctp/BUILD.gn @@ -12,7 +12,14 @@ rtc_source_set("fake_sctp_transport") { visibility = [ "*" ] sources = [ "fake_sctp_transport.h" ] deps = [ + "../../../api:libjingle_peerconnection_api", + "../../../api:priority", + "../../../api:rtc_error", + "../../../api/environment", + "../../../api/transport:datagram_transport_interface", "../../../api/transport:sctp_transport_factory_interface", "../../../media:rtc_data_sctp_transport_internal", + "../../../rtc_base:checks", + "../../../rtc_base:copy_on_write_buffer", ] } diff --git a/test/pc/sctp/fake_sctp_transport.h b/test/pc/sctp/fake_sctp_transport.h index 1fd2f6128b..a2e5ace88f 100644 --- a/test/pc/sctp/fake_sctp_transport.h +++ b/test/pc/sctp/fake_sctp_transport.h @@ -11,38 +11,57 @@ #ifndef TEST_PC_SCTP_FAKE_SCTP_TRANSPORT_H_ #define TEST_PC_SCTP_FAKE_SCTP_TRANSPORT_H_ +#include +#include #include +#include +#include "api/environment/environment.h" +#include "api/priority.h" +#include "api/rtc_error.h" +#include "api/sctp_transport_interface.h" +#include "api/transport/data_channel_transport_interface.h" #include "api/transport/sctp_transport_factory_interface.h" #include "media/sctp/sctp_transport_internal.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" // Used for tests in this file to verify that PeerConnection responds to signals // from the SctpTransport correctly, and calls Start with the correct // local/remote ports. -class FakeSctpTransport : public cricket::SctpTransportInternal { +class FakeSctpTransport : public webrtc::SctpTransportInternal { public: void SetOnConnectedCallback(std::function callback) override {} void SetDataChannelSink(webrtc::DataChannelSink* sink) override {} - void SetDtlsTransport(rtc::PacketTransportInternal* transport) override {} - bool Start(int local_port, int remote_port, int max_message_size) override { - local_port_.emplace(local_port); - remote_port_.emplace(remote_port); - max_message_size_ = max_message_size; + void SetDtlsTransport(webrtc::DtlsTransportInternal* transport) override {} + bool Start(const webrtc::SctpOptions& options) override { + local_port_.emplace(options.local_port); + remote_port_.emplace(options.remote_port); + max_message_size_ = options.max_message_size; + return true; + } + bool OpenStream(int sid, webrtc::PriorityValue priority) override { return true; } - bool OpenStream(int sid) override { return true; } bool ResetStream(int sid) override { return true; } webrtc::RTCError SendData(int sid, const webrtc::SendDataParams& params, - const rtc::CopyOnWriteBuffer& payload) override { + const webrtc::CopyOnWriteBuffer& payload) override { return webrtc::RTCError::OK(); } bool ReadyToSendData() override { return true; } void set_debug_name_for_testing(const char* debug_name) override {} - int max_message_size() const { return max_message_size_; } - absl::optional max_outbound_streams() const { return absl::nullopt; } - absl::optional max_inbound_streams() const { return absl::nullopt; } + int max_message_size() const override { return max_message_size_; } + std::optional max_outbound_streams() const override { + return std::nullopt; + } + std::optional max_inbound_streams() const override { + return std::nullopt; + } + size_t buffered_amount(int sid) const override { return 0; } + size_t buffered_amount_low_threshold(int sid) const override { return 0; } + void SetBufferedAmountLowThreshold(int sid, size_t bytes) override {} int local_port() const { RTC_DCHECK(local_port_); return *local_port_; @@ -53,17 +72,18 @@ class FakeSctpTransport : public cricket::SctpTransportInternal { } private: - absl::optional local_port_; - absl::optional remote_port_; + std::optional local_port_; + std::optional remote_port_; int max_message_size_; }; class FakeSctpTransportFactory : public webrtc::SctpTransportFactoryInterface { public: - std::unique_ptr CreateSctpTransport( - rtc::PacketTransportInternal*) override { + std::unique_ptr CreateSctpTransport( + const webrtc::Environment& env, + webrtc::DtlsTransportInternal*) override { last_fake_sctp_transport_ = new FakeSctpTransport(); - return std::unique_ptr( + return std::unique_ptr( last_fake_sctp_transport_); } diff --git a/test/peer_scenario/BUILD.gn b/test/peer_scenario/BUILD.gn index 18f81a56e6..41bcd65f0b 100644 --- a/test/peer_scenario/BUILD.gn +++ b/test/peer_scenario/BUILD.gn @@ -26,18 +26,29 @@ if (rtc_include_tests) { "..:fake_video_codecs", "..:fileutils", "..:frame_generator_capturer", + "..:scoped_key_value_config", "..:test_support", + "../../api:array_view", + "../../api:audio_options_api", "../../api:candidate", "../../api:create_time_controller", "../../api:libjingle_peerconnection_api", + "../../api:make_ref_counted", + "../../api:media_stream_interface", "../../api:network_emulation_manager_api", - "../../api:rtc_stats_api", + "../../api:rtc_error", + "../../api:scoped_refptr", + "../../api:sequence_checker", "../../api:time_controller", - "../../api/audio_codecs:builtin_audio_decoder_factory", - "../../api/audio_codecs:builtin_audio_encoder_factory", + "../../api/environment", "../../api/rtc_event_log:rtc_event_log_factory", - "../../api/task_queue:default_task_queue_factory", + "../../api/test/network_emulation", + "../../api/transport:datagram_transport_interface", + "../../api/transport:enums", "../../api/transport:field_trial_based_config", + "../../api/video:video_frame", + "../../api/video_codecs:scalability_mode", + "../../api/video_codecs:video_codecs_api", "../../api/video_codecs:video_decoder_factory_template", "../../api/video_codecs:video_decoder_factory_template_dav1d_adapter", "../../api/video_codecs:video_decoder_factory_template_libvpx_vp8_adapter", @@ -48,28 +59,41 @@ if (rtc_include_tests) { "../../api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter", "../../api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter", "../../api/video_codecs:video_encoder_factory_template_open_h264_adapter", - "../../media:rtc_audio_video", - "../../media:rtc_media_base", + "../../call:payload_type_picker", + "../../call:rtp_interfaces", + "../../call:rtp_receiver", + "../../media:media_constants", "../../media:rtp_utils", "../../modules/audio_device:test_audio_device_module", "../../modules/rtp_rtcp:rtp_rtcp_format", - "../../p2p:rtc_p2p", - "../../pc:channel", + "../../p2p:basic_packet_socket_factory", + "../../p2p:basic_port_allocator", + "../../p2p:dtls_transport_internal", + "../../p2p:p2p_constants", + "../../p2p:port_allocator", + "../../p2p:transport_description", + "../../pc:dtls_transport", "../../pc:jsep_transport_controller", "../../pc:pc_test_utils", "../../pc:rtp_transport_internal", "../../pc:session_description", + "../../rtc_base:checks", + "../../rtc_base:copy_on_write_buffer", + "../../rtc_base:crypto_random", + "../../rtc_base:logging", + "../../rtc_base:macromagic", + "../../rtc_base:network", "../../rtc_base:null_socket_server", + "../../rtc_base:ssl", "../../rtc_base:stringutils", "../../rtc_base:task_queue_for_test", - "../../test:explicit_key_value_config", - "../../test:scoped_key_value_config", + "../../rtc_base:threading", + "../../rtc_base/third_party/sigslot", "../logging:log_writer", "../network:emulated_network", "../scenario", "../time_controller", - ] - absl_deps = [ + "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/memory", ] diff --git a/test/peer_scenario/OWNERS b/test/peer_scenario/OWNERS new file mode 100644 index 0000000000..5904b95df7 --- /dev/null +++ b/test/peer_scenario/OWNERS @@ -0,0 +1 @@ +perkj@webrtc.org diff --git a/test/peer_scenario/peer_scenario.cc b/test/peer_scenario/peer_scenario.cc index 485e33f67f..8dca6bae26 100644 --- a/test/peer_scenario/peer_scenario.cc +++ b/test/peer_scenario/peer_scenario.cc @@ -15,6 +15,7 @@ #include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" #include "test/logging/file_log_writer.h" +#include "test/network/network_emulation_manager.h" #include "test/testsupport/file_utils.h" #include "test/time_controller/real_time_controller.h" #include "test/time_controller/simulated_time_controller.h" @@ -55,13 +56,13 @@ PeerScenario::PeerScenario( std::unique_ptr log_writer_manager, TimeMode mode) : log_writer_manager_(std::move(log_writer_manager)), - net_(mode, EmulatedNetworkStatsGatheringMode::kDefault), + net_({.time_mode = mode}), signaling_thread_(net_.time_controller()->GetMainThread()) {} PeerScenarioClient* PeerScenario::CreateClient( PeerScenarioClient::Config config) { return CreateClient( - std::string("client_") + rtc::ToString(peer_clients_.size() + 1), config); + std::string("client_") + absl::StrCat(peer_clients_.size() + 1), config); } PeerScenarioClient* PeerScenario::CreateClient( @@ -101,7 +102,7 @@ void PeerScenario::AttachVideoQualityAnalyzer(VideoQualityAnalyzer* analyzer, PeerScenarioClient* receiver) { video_quality_pairs_.emplace_back(clock(), analyzer); auto pair = &video_quality_pairs_.back(); - send_track->AddOrUpdateSink(&pair->capture_tap_, rtc::VideoSinkWants()); + send_track->AddOrUpdateSink(&pair->capture_tap_, VideoSinkWants()); receiver->AddVideoReceiveSink(send_track->id(), &pair->decode_tap_); } diff --git a/test/peer_scenario/peer_scenario.h b/test/peer_scenario/peer_scenario.h index a177eeaac6..f9cf063cb3 100644 --- a/test/peer_scenario/peer_scenario.h +++ b/test/peer_scenario/peer_scenario.h @@ -112,7 +112,7 @@ class PeerScenario { const std::unique_ptr log_writer_manager_; NetworkEmulationManagerImpl net_; - rtc::Thread* const signaling_thread_; + Thread* const signaling_thread_; std::list video_quality_pairs_; std::list peer_clients_; }; diff --git a/test/peer_scenario/peer_scenario_client.cc b/test/peer_scenario/peer_scenario_client.cc index 697bf055a7..391967f14e 100644 --- a/test/peer_scenario/peer_scenario_client.cc +++ b/test/peer_scenario/peer_scenario_client.cc @@ -9,34 +9,68 @@ */ #include "test/peer_scenario/peer_scenario_client.h" +#include +#include #include +#include #include +#include #include +#include +#include "absl/container/inlined_vector.h" #include "absl/memory/memory.h" -#include "api/audio_codecs/builtin_audio_decoder_factory.h" -#include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/audio_options.h" +#include "api/candidate.h" +#include "api/data_channel_interface.h" +#include "api/environment/environment.h" +#include "api/jsep.h" +#include "api/make_ref_counted.h" +#include "api/media_stream_interface.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" #include "api/rtc_event_log/rtc_event_log_factory.h" -#include "api/task_queue/default_task_queue_factory.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_transceiver_interface.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/set_local_description_observer_interface.h" +#include "api/set_remote_description_observer_interface.h" #include "api/test/create_time_controller.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/test/network_emulation_manager.h" #include "api/transport/field_trial_based_config.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder.h" +#include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_decoder_factory_template.h" #include "api/video_codecs/video_decoder_factory_template_dav1d_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_decoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_decoder_factory_template_open_h264_adapter.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_factory.h" #include "api/video_codecs/video_encoder_factory_template.h" #include "api/video_codecs/video_encoder_factory_template_libaom_av1_adapter.h" #include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" -#include "media/engine/webrtc_media_engine.h" +#include "media/base/media_constants.h" #include "modules/audio_device/include/test_audio_device.h" -#include "p2p/client/basic_port_allocator.h" +#include "p2p/base/port_allocator.h" +#include "pc/test/frame_generator_capturer_video_track_source.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/thread.h" #include "test/create_frame_generator_capturer.h" #include "test/fake_decoder.h" #include "test/fake_vp8_encoder.h" #include "test/frame_generator_capturer.h" +#include "test/logging/log_writer.h" namespace webrtc { namespace test { @@ -65,7 +99,7 @@ class LambdaPeerConnectionObserver final : public PeerConnectionObserver { handler(new_state); } void OnDataChannel( - rtc::scoped_refptr data_channel) override { + scoped_refptr data_channel) override { for (const auto& handler : handlers_->on_data_channel) handler(data_channel); } @@ -101,23 +135,21 @@ class LambdaPeerConnectionObserver final : public PeerConnectionObserver { handler(address, port, url, error_code, error_text); } void OnIceCandidatesRemoved( - const std::vector& candidates) override { + const std::vector& candidates) override { for (const auto& handler : handlers_->on_ice_candidates_removed) handler(candidates); } - void OnAddTrack(rtc::scoped_refptr receiver, - const std::vector>& + void OnAddTrack(scoped_refptr receiver, + const std::vector>& streams) override { for (const auto& handler : handlers_->on_add_track) handler(receiver, streams); } - void OnTrack( - rtc::scoped_refptr transceiver) override { + void OnTrack(scoped_refptr transceiver) override { for (const auto& handler : handlers_->on_track) handler(transceiver); } - void OnRemoveTrack( - rtc::scoped_refptr receiver) override { + void OnRemoveTrack(scoped_refptr receiver) override { for (const auto& handler : handlers_->on_remove_track) handler(receiver); } @@ -177,26 +209,28 @@ class LambdaSetRemoteDescriptionObserver class FakeVideoEncoderFactory : public VideoEncoderFactory { public: - FakeVideoEncoderFactory(Clock* clock) : clock_(clock) {} std::vector GetSupportedFormats() const override { - return {SdpVideoFormat("VP8")}; + const absl::InlinedVector + kSupportedScalabilityModes = {webrtc::ScalabilityMode::kL1T1, + webrtc::ScalabilityMode::kL1T2, + webrtc::ScalabilityMode::kL1T3}; + return {SdpVideoFormat(kVp8CodecName, {}, kSupportedScalabilityModes)}; } - std::unique_ptr CreateVideoEncoder( - const SdpVideoFormat& format) override { + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override { RTC_CHECK_EQ(format.name, "VP8"); - return std::make_unique(clock_); + return std::make_unique(env); } - - private: - Clock* const clock_; }; + class FakeVideoDecoderFactory : public VideoDecoderFactory { public: std::vector GetSupportedFormats() const override { - return {SdpVideoFormat("VP8")}; + return {SdpVideoFormat::VP8()}; } - std::unique_ptr CreateVideoDecoder( - const SdpVideoFormat& format) override { + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override { return std::make_unique(); } }; @@ -204,7 +238,7 @@ class FakeVideoDecoderFactory : public VideoDecoderFactory { PeerScenarioClient::PeerScenarioClient( NetworkEmulationManager* net, - rtc::Thread* signaling_thread, + Thread* signaling_thread, std::unique_ptr log_writer_factory, PeerScenarioClient::Config config) : endpoints_(CreateEndpoints(net, config.endpoints)), @@ -215,13 +249,13 @@ PeerScenarioClient::PeerScenarioClient( handlers_(config.handlers), observer_(new LambdaPeerConnectionObserver(&handlers_)) { handlers_.on_track.push_back( - [this](rtc::scoped_refptr transceiver) { + [this](scoped_refptr transceiver) { auto track = transceiver->receiver()->track().get(); if (track->kind() == MediaStreamTrackInterface::kVideoKind) { auto* video = static_cast(track); RTC_DCHECK_RUN_ON(signaling_thread_); for (auto* sink : track_id_to_video_sinks_[track->id()]) { - video->AddOrUpdateSink(sink, rtc::VideoSinkWants()); + video->AddOrUpdateSink(sink, VideoSinkWants()); } } }); @@ -246,17 +280,14 @@ PeerScenarioClient::PeerScenarioClient( pcf_deps.network_thread = manager->network_thread(); pcf_deps.signaling_thread = signaling_thread_; pcf_deps.worker_thread = worker_thread_.get(); - pcf_deps.call_factory = - CreateTimeControllerBasedCallFactory(net->time_controller()); + pcf_deps.socket_factory = manager->socket_factory(); + pcf_deps.network_manager = manager->ReleaseNetworkManager(); pcf_deps.task_queue_factory = net->time_controller()->CreateTaskQueueFactory(); - pcf_deps.event_log_factory = - std::make_unique(task_queue_factory_); + pcf_deps.event_log_factory = std::make_unique(); pcf_deps.trials = std::make_unique(); - cricket::MediaEngineDependencies media_deps; - media_deps.task_queue_factory = task_queue_factory_; - media_deps.adm = TestAudioDeviceModule::Create( + pcf_deps.adm = TestAudioDeviceModule::Create( task_queue_factory_, TestAudioDeviceModule::CreatePulsedNoiseCapturer( config.audio.pulsed_noise->amplitude * @@ -264,28 +295,24 @@ PeerScenarioClient::PeerScenarioClient( config.audio.sample_rate, config.audio.channels), TestAudioDeviceModule::CreateDiscardRenderer(config.audio.sample_rate)); - media_deps.audio_processing = AudioProcessingBuilder().Create(); if (config.video.use_fake_codecs) { - media_deps.video_encoder_factory = - std::make_unique( - net->time_controller()->GetClock()); - media_deps.video_decoder_factory = + pcf_deps.video_encoder_factory = + std::make_unique(); + pcf_deps.video_decoder_factory = std::make_unique(); } else { - media_deps.video_encoder_factory = + pcf_deps.video_encoder_factory = std::make_unique>(); - media_deps.video_decoder_factory = + pcf_deps.video_decoder_factory = std::make_unique>(); } - media_deps.audio_encoder_factory = CreateBuiltinAudioEncoderFactory(); - media_deps.audio_decoder_factory = CreateBuiltinAudioDecoderFactory(); - media_deps.trials = pcf_deps.trials.get(); - pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps)); + EnableMediaWithDefaultsAndTimeController(*net->time_controller(), pcf_deps); + pcf_deps.fec_controller_factory = nullptr; pcf_deps.network_controller_factory = nullptr; pcf_deps.network_state_predictor_factory = nullptr; @@ -296,10 +323,7 @@ PeerScenarioClient::PeerScenarioClient( pc_factory_->SetOptions(pc_options); PeerConnectionDependencies pc_deps(observer_.get()); - pc_deps.allocator = std::make_unique( - manager->network_manager(), manager->packet_socket_factory()); - pc_deps.allocator->set_flags(pc_deps.allocator->flags() | - cricket::PORTALLOCATOR_DISABLE_TCP); + config.rtc_config.port_allocator_config.flags |= PORTALLOCATOR_DISABLE_TCP; peer_connection_ = pc_factory_ ->CreatePeerConnectionOrError(config.rtc_config, std::move(pc_deps)) @@ -317,7 +341,7 @@ EmulatedEndpoint* PeerScenarioClient::endpoint(int index) { PeerScenarioClient::AudioSendTrack PeerScenarioClient::CreateAudio( std::string track_id, - cricket::AudioOptions options) { + AudioOptions options) { RTC_DCHECK_RUN_ON(signaling_thread_); AudioSendTrack res; auto source = pc_factory_->CreateAudioSource(options); @@ -336,7 +360,7 @@ PeerScenarioClient::VideoSendTrack PeerScenarioClient::CreateVideo( config.generator); res.capturer = capturer.get(); capturer->Init(); - res.source = rtc::make_ref_counted( + res.source = make_ref_counted( std::move(capturer), config.screencast); res.source->Start(); auto track = pc_factory_->CreateVideoTrack(res.source, track_id); @@ -348,7 +372,7 @@ PeerScenarioClient::VideoSendTrack PeerScenarioClient::CreateVideo( void PeerScenarioClient::AddVideoReceiveSink( std::string track_id, - rtc::VideoSinkInterface* video_sink) { + VideoSinkInterface* video_sink) { RTC_DCHECK_RUN_ON(signaling_thread_); track_id_to_video_sinks_[track_id].push_back(video_sink); } @@ -358,8 +382,9 @@ void PeerScenarioClient::CreateAndSetSdp( std::function offer_handler) { RTC_DCHECK_RUN_ON(signaling_thread_); peer_connection_->CreateOffer( - rtc::make_ref_counted( - [=](std::unique_ptr offer) { + make_ref_counted( + [this, munge_offer, + offer_handler](std::unique_ptr offer) { RTC_DCHECK_RUN_ON(signaling_thread_); if (munge_offer) { munge_offer(offer.get()); @@ -368,7 +393,7 @@ void PeerScenarioClient::CreateAndSetSdp( RTC_CHECK(offer->ToString(&sdp_offer)); peer_connection_->SetLocalDescription( std::move(offer), - rtc::make_ref_counted( + make_ref_counted( [sdp_offer, offer_handler](RTCError) { offer_handler(sdp_offer); })); @@ -379,48 +404,60 @@ void PeerScenarioClient::CreateAndSetSdp( void PeerScenarioClient::SetSdpOfferAndGetAnswer( std::string remote_offer, + std::function remote_description_set, std::function answer_handler) { if (!signaling_thread_->IsCurrent()) { signaling_thread_->PostTask( - [=] { SetSdpOfferAndGetAnswer(remote_offer, answer_handler); }); + [this, remote_offer, remote_description_set, answer_handler] { + SetSdpOfferAndGetAnswer(remote_offer, remote_description_set, + answer_handler); + }); return; } RTC_DCHECK_RUN_ON(signaling_thread_); peer_connection_->SetRemoteDescription( CreateSessionDescription(SdpType::kOffer, remote_offer), - rtc::make_ref_counted([=](RTCError) { - RTC_DCHECK_RUN_ON(signaling_thread_); - peer_connection_->CreateAnswer( - rtc::make_ref_counted( - [=](std::unique_ptr answer) { - RTC_DCHECK_RUN_ON(signaling_thread_); - std::string sdp_answer; - answer->ToString(&sdp_answer); - RTC_LOG(LS_INFO) << sdp_answer; - peer_connection_->SetLocalDescription( - std::move(answer), - rtc::make_ref_counted( - [answer_handler, sdp_answer](RTCError) { - answer_handler(sdp_answer); - })); - }) - .get(), - PeerConnectionInterface::RTCOfferAnswerOptions()); - })); + make_ref_counted( + [this, remote_description_set, answer_handler](RTCError) { + RTC_DCHECK_RUN_ON(signaling_thread_); + if (remote_description_set) { + // Allow the caller to modify transceivers + // before creating the answer. + remote_description_set(); + } + peer_connection_->CreateAnswer( + make_ref_counted( + [this, answer_handler]( + std::unique_ptr answer) { + RTC_DCHECK_RUN_ON(signaling_thread_); + std::string sdp_answer; + answer->ToString(&sdp_answer); + RTC_LOG(LS_INFO) << sdp_answer; + peer_connection_->SetLocalDescription( + std::move(answer), + make_ref_counted( + [answer_handler, sdp_answer](RTCError) { + answer_handler(sdp_answer); + })); + }) + .get(), + PeerConnectionInterface::RTCOfferAnswerOptions()); + })); } void PeerScenarioClient::SetSdpAnswer( std::string remote_answer, std::function done_handler) { if (!signaling_thread_->IsCurrent()) { - signaling_thread_->PostTask( - [=] { SetSdpAnswer(remote_answer, done_handler); }); + signaling_thread_->PostTask([this, remote_answer, done_handler] { + SetSdpAnswer(remote_answer, done_handler); + }); return; } RTC_DCHECK_RUN_ON(signaling_thread_); peer_connection_->SetRemoteDescription( CreateSessionDescription(SdpType::kAnswer, remote_answer), - rtc::make_ref_counted( + make_ref_counted( [remote_answer, done_handler](RTCError) { auto answer = CreateSessionDescription(SdpType::kAnswer, remote_answer); diff --git a/test/peer_scenario/peer_scenario_client.h b/test/peer_scenario/peer_scenario_client.h index e863757759..dfe502ec95 100644 --- a/test/peer_scenario/peer_scenario_client.h +++ b/test/peer_scenario/peer_scenario_client.h @@ -37,7 +37,8 @@ class PeerScenarioClient { struct CallbackHandlers { std::vector> on_signaling_change; - std::vector)>> + std::vector< + std::function)>> on_data_channel; std::vector> on_renegotiation_needed; std::vector< @@ -56,16 +57,17 @@ class PeerScenarioClient { int, const std::string&)>> on_ice_candidate_error; - std::vector&)>> + std::vector&)>> on_ice_candidates_removed; std::vector, - const std::vector>&)>> + webrtc::scoped_refptr, + const std::vector>&)>> on_add_track; std::vector< - std::function)>> + std::function)>> on_track; - std::vector)>> + std::vector< + std::function)>> on_remove_track; }; struct Config { @@ -79,7 +81,7 @@ class PeerScenarioClient { struct PulsedNoise { double amplitude = 0.1; }; - absl::optional pulsed_noise = PulsedNoise(); + std::optional pulsed_noise = PulsedNoise(); } audio; struct Video { bool use_fake_codecs = false; @@ -100,21 +102,21 @@ class PeerScenarioClient { }; struct AudioSendTrack { - rtc::scoped_refptr track; - rtc::scoped_refptr sender; + scoped_refptr track; + scoped_refptr sender; }; struct VideoSendTrack { // Raw pointer to the capturer owned by `source`. FrameGeneratorCapturer* capturer; - rtc::scoped_refptr source; - rtc::scoped_refptr track; - rtc::scoped_refptr sender; + scoped_refptr source; + scoped_refptr track; + scoped_refptr sender; }; PeerScenarioClient( NetworkEmulationManager* net, - rtc::Thread* signaling_thread, + Thread* signaling_thread, std::unique_ptr log_writer_factory, Config config); @@ -123,19 +125,18 @@ class PeerScenarioClient { RTC_DCHECK_RUN_ON(signaling_thread_); return peer_connection_.get(); } - rtc::Thread* thread() { return signaling_thread_; } + Thread* thread() { return signaling_thread_; } Clock* clock() { return Clock::GetRealTimeClock(); } // Returns the endpoint created from the EmulatedEndpointConfig with the same // index in PeerScenarioClient::config. EmulatedEndpoint* endpoint(int index = 0); - AudioSendTrack CreateAudio(std::string track_id, - cricket::AudioOptions options); + AudioSendTrack CreateAudio(std::string track_id, AudioOptions options); VideoSendTrack CreateVideo(std::string track_id, VideoSendTrackConfig config); void AddVideoReceiveSink(std::string track_id, - rtc::VideoSinkInterface* video_sink); + VideoSinkInterface* video_sink); CallbackHandlers* handlers() { return &handlers_; } @@ -147,6 +148,7 @@ class PeerScenarioClient { std::function munge_offer, std::function offer_handler); void SetSdpOfferAndGetAnswer(std::string remote_offer, + std::function remote_description_set, std::function answer_handler); void SetSdpAnswer( std::string remote_answer, @@ -159,18 +161,18 @@ class PeerScenarioClient { private: const std::map endpoints_; TaskQueueFactory* const task_queue_factory_; - rtc::Thread* const signaling_thread_; + Thread* const signaling_thread_; const std::unique_ptr log_writer_factory_; - const std::unique_ptr worker_thread_; + const std::unique_ptr worker_thread_; CallbackHandlers handlers_ RTC_GUARDED_BY(signaling_thread_); const std::unique_ptr observer_; - std::map*>> + std::map*>> track_id_to_video_sinks_ RTC_GUARDED_BY(signaling_thread_); std::list> pending_ice_candidates_ RTC_GUARDED_BY(signaling_thread_); - rtc::scoped_refptr pc_factory_; - rtc::scoped_refptr peer_connection_ + scoped_refptr pc_factory_; + scoped_refptr peer_connection_ RTC_GUARDED_BY(signaling_thread_); }; diff --git a/test/peer_scenario/scenario_connection.cc b/test/peer_scenario/scenario_connection.cc index 66eca275d1..9e35696ad1 100644 --- a/test/peer_scenario/scenario_connection.cc +++ b/test/peer_scenario/scenario_connection.cc @@ -9,15 +9,50 @@ */ #include "test/peer_scenario/scenario_connection.h" -#include "absl/memory/memory.h" +#include +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/candidate.h" +#include "api/environment/environment.h" +#include "api/jsep.h" +#include "api/peer_connection_interface.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/test/network_emulation_manager.h" +#include "api/transport/data_channel_transport_interface.h" +#include "api/transport/enums.h" +#include "call/payload_type_picker.h" +#include "call/rtp_demuxer.h" +#include "call/rtp_packet_sink_interface.h" #include "media/base/rtp_utils.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "p2p/base/basic_packet_socket_factory.h" +#include "p2p/base/p2p_constants.h" +#include "p2p/base/port_allocator.h" +#include "p2p/base/transport_description.h" #include "p2p/client/basic_port_allocator.h" -#include "pc/channel.h" +#include "p2p/dtls/dtls_transport_internal.h" +#include "pc/dtls_transport.h" #include "pc/jsep_transport_controller.h" #include "pc/rtp_transport_internal.h" #include "pc/session_description.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/crypto_random.h" +#include "rtc_base/network.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/ssl_fingerprint.h" +#include "rtc_base/ssl_identity.h" #include "rtc_base/task_queue_for_test.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" +#include "test/network/network_emulation_manager.h" namespace webrtc { class ScenarioIceConnectionImpl : public ScenarioIceConnection, @@ -25,18 +60,19 @@ class ScenarioIceConnectionImpl : public ScenarioIceConnection, private JsepTransportController::Observer, private RtpPacketSinkInterface { public: - ScenarioIceConnectionImpl(test::NetworkEmulationManagerImpl* net, + ScenarioIceConnectionImpl(const Environment& env, + test::NetworkEmulationManagerImpl* net, IceConnectionObserver* observer); ~ScenarioIceConnectionImpl() override; - void SendRtpPacket(rtc::ArrayView packet_view) override; - void SendRtcpPacket(rtc::ArrayView packet_view) override; + void SendRtpPacket(ArrayView packet_view) override; + void SendRtcpPacket(ArrayView packet_view) override; void SetRemoteSdp(SdpType type, const std::string& remote_sdp) override; void SetLocalSdp(SdpType type, const std::string& local_sdp) override; EmulatedEndpoint* endpoint() override { return endpoint_; } - const cricket::TransportDescription& transport_description() const override { + const TransportDescription& transport_description() const override { return transport_description_; } @@ -45,24 +81,27 @@ class ScenarioIceConnectionImpl : public ScenarioIceConnection, bool OnTransportChanged( const std::string& mid, RtpTransportInternal* rtp_transport, - rtc::scoped_refptr dtls_transport, + scoped_refptr dtls_transport, DataChannelTransportInterface* data_channel_transport) override; void OnRtpPacket(const RtpPacketReceived& packet) override; void OnCandidates(const std::string& mid, - const std::vector& candidates); + const std::vector& candidates); IceConnectionObserver* const observer_; EmulatedEndpoint* const endpoint_; EmulatedNetworkManagerInterface* const manager_; - rtc::Thread* const signaling_thread_; - rtc::Thread* const network_thread_; - rtc::scoped_refptr const certificate_ + Thread* const signaling_thread_; + Thread* const network_thread_; + scoped_refptr const certificate_ RTC_GUARDED_BY(network_thread_); - cricket::TransportDescription const transport_description_ + TransportDescription const transport_description_ RTC_GUARDED_BY(signaling_thread_); - std::unique_ptr port_allocator_ + std::unique_ptr network_manager_; + BasicPacketSocketFactory packet_socket_factory_; + std::unique_ptr port_allocator_ RTC_GUARDED_BY(network_thread_); + PayloadTypePicker payload_type_picker_; std::unique_ptr jsep_controller_; RtpTransportInternal* rtp_transport_ RTC_GUARDED_BY(network_thread_) = nullptr; @@ -73,40 +112,45 @@ class ScenarioIceConnectionImpl : public ScenarioIceConnection, }; std::unique_ptr ScenarioIceConnection::Create( + const Environment& env, webrtc::test::NetworkEmulationManagerImpl* net, IceConnectionObserver* observer) { - return std::make_unique(net, observer); + return std::make_unique(env, net, observer); } ScenarioIceConnectionImpl::ScenarioIceConnectionImpl( + const Environment& env, test::NetworkEmulationManagerImpl* net, IceConnectionObserver* observer) : observer_(observer), endpoint_(net->CreateEndpoint(EmulatedEndpointConfig())), manager_(net->CreateEmulatedNetworkManagerInterface({endpoint_})), - signaling_thread_(rtc::Thread::Current()), + signaling_thread_(Thread::Current()), network_thread_(manager_->network_thread()), - certificate_(rtc::RTCCertificate::Create( - rtc::SSLIdentity::Create("", ::rtc::KT_DEFAULT))), + certificate_(RTCCertificate::Create(SSLIdentity::Create("", KT_DEFAULT))), transport_description_( /*transport_options*/ {}, - rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), - rtc::CreateRandomString(cricket::ICE_PWD_LENGTH), - cricket::IceMode::ICEMODE_FULL, - cricket::ConnectionRole::CONNECTIONROLE_PASSIVE, - rtc::SSLFingerprint::CreateFromCertificate(*certificate_.get()) - .get()), + CreateRandomString(ICE_UFRAG_LENGTH), + CreateRandomString(ICE_PWD_LENGTH), + IceMode::ICEMODE_FULL, + ConnectionRole::CONNECTIONROLE_PASSIVE, + SSLFingerprint::CreateFromCertificate(*certificate_.get()).get()), + network_manager_(manager_->ReleaseNetworkManager()), + packet_socket_factory_(manager_->socket_factory()), port_allocator_( - new cricket::BasicPortAllocator(manager_->network_manager(), - manager_->packet_socket_factory())), + std::make_unique(env, + network_manager_.get(), + &packet_socket_factory_)), jsep_controller_( - new JsepTransportController(network_thread_, + new JsepTransportController(env, + network_thread_, port_allocator_.get(), /*async_resolver_factory*/ nullptr, + payload_type_picker_, CreateJsepConfig())) { SendTask(network_thread_, [this] { RTC_DCHECK_RUN_ON(network_thread_); - uint32_t flags = cricket::PORTALLOCATOR_DISABLE_TCP; + uint32_t flags = PORTALLOCATOR_DISABLE_TCP; port_allocator_->set_flags(port_allocator_->flags() | flags); port_allocator_->Initialize(); RTC_CHECK(port_allocator_->SetConfiguration(/*stun_servers*/ {}, @@ -130,36 +174,35 @@ JsepTransportController::Config ScenarioIceConnectionImpl::CreateJsepConfig() { config.transport_observer = this; config.bundle_policy = PeerConnectionInterface::BundlePolicy::kBundlePolicyMaxBundle; - config.rtcp_handler = [this](const rtc::CopyOnWriteBuffer& packet, + config.rtcp_handler = [this](const CopyOnWriteBuffer& packet, int64_t packet_time_us) { RTC_DCHECK_RUN_ON(network_thread_); observer_->OnPacketReceived(packet); }; - config.field_trials = &field_trials; return config; } void ScenarioIceConnectionImpl::SendRtpPacket( - rtc::ArrayView packet_view) { - rtc::CopyOnWriteBuffer packet(packet_view.data(), packet_view.size(), - ::cricket::kMaxRtpPacketLen); + ArrayView packet_view) { + CopyOnWriteBuffer packet(packet_view.data(), packet_view.size(), + kMaxRtpPacketLen); network_thread_->PostTask([this, packet = std::move(packet)]() mutable { RTC_DCHECK_RUN_ON(network_thread_); if (rtp_transport_ != nullptr) - rtp_transport_->SendRtpPacket(&packet, rtc::PacketOptions(), - cricket::PF_SRTP_BYPASS); + rtp_transport_->SendRtpPacket(&packet, AsyncSocketPacketOptions(), + PF_SRTP_BYPASS); }); } void ScenarioIceConnectionImpl::SendRtcpPacket( - rtc::ArrayView packet_view) { - rtc::CopyOnWriteBuffer packet(packet_view.data(), packet_view.size(), - ::cricket::kMaxRtpPacketLen); + ArrayView packet_view) { + CopyOnWriteBuffer packet(packet_view.data(), packet_view.size(), + kMaxRtpPacketLen); network_thread_->PostTask([this, packet = std::move(packet)]() mutable { RTC_DCHECK_RUN_ON(network_thread_); if (rtp_transport_ != nullptr) - rtp_transport_->SendRtcpPacket(&packet, rtc::PacketOptions(), - cricket::PF_SRTP_BYPASS); + rtp_transport_->SendRtcpPacket(&packet, AsyncSocketPacketOptions(), + PF_SRTP_BYPASS); }); } void ScenarioIceConnectionImpl::SetRemoteSdp(SdpType type, @@ -168,26 +211,19 @@ void ScenarioIceConnectionImpl::SetRemoteSdp(SdpType type, remote_description_ = webrtc::CreateSessionDescription(type, remote_sdp); jsep_controller_->SubscribeIceCandidateGathered( [this](const std::string& transport, - const std::vector& candidate) { + const std::vector& candidate) { ScenarioIceConnectionImpl::OnCandidates(transport, candidate); }); auto res = jsep_controller_->SetRemoteDescription( - remote_description_->GetType(), remote_description_->description()); + remote_description_->GetType(), + local_description_ ? local_description_->description() : nullptr, + remote_description_->description()); RTC_CHECK(res.ok()) << res.message(); RtpDemuxerCriteria criteria; for (const auto& content : remote_description_->description()->contents()) { - if (content.media_description()->as_audio()) { - for (const auto& codec : - content.media_description()->as_audio()->codecs()) { - criteria.payload_types().insert(codec.id); - } - } - if (content.media_description()->as_video()) { - for (const auto& codec : - content.media_description()->as_video()->codecs()) { - criteria.payload_types().insert(codec.id); - } + for (const auto& codec : content.media_description()->codecs()) { + criteria.payload_types().insert(codec.id); } } @@ -203,7 +239,8 @@ void ScenarioIceConnectionImpl::SetLocalSdp(SdpType type, RTC_DCHECK_RUN_ON(signaling_thread_); local_description_ = webrtc::CreateSessionDescription(type, local_sdp); auto res = jsep_controller_->SetLocalDescription( - local_description_->GetType(), local_description_->description()); + local_description_->GetType(), local_description_->description(), + remote_description_ ? remote_description_->description() : nullptr); RTC_CHECK(res.ok()) << res.message(); jsep_controller_->MaybeStartGathering(); } @@ -211,7 +248,7 @@ void ScenarioIceConnectionImpl::SetLocalSdp(SdpType type, bool ScenarioIceConnectionImpl::OnTransportChanged( const std::string& mid, RtpTransportInternal* rtp_transport, - rtc::scoped_refptr dtls_transport, + scoped_refptr dtls_transport, DataChannelTransportInterface* data_channel_transport) { RTC_DCHECK_RUN_ON(network_thread_); if (rtp_transport == nullptr) { @@ -234,7 +271,7 @@ void ScenarioIceConnectionImpl::OnRtpPacket(const RtpPacketReceived& packet) { void ScenarioIceConnectionImpl::OnCandidates( const std::string& mid, - const std::vector& candidates) { + const std::vector& candidates) { RTC_DCHECK_RUN_ON(signaling_thread_); observer_->OnIceCandidates(mid, candidates); } diff --git a/test/peer_scenario/scenario_connection.h b/test/peer_scenario/scenario_connection.h index e8cef527c5..3b00542c27 100644 --- a/test/peer_scenario/scenario_connection.h +++ b/test/peer_scenario/scenario_connection.h @@ -16,6 +16,7 @@ #include #include "api/candidate.h" +#include "api/environment/environment.h" #include "api/jsep.h" #include "p2p/base/transport_description.h" #include "test/network/network_emulation_manager.h" @@ -31,32 +32,31 @@ class ScenarioIceConnection { class IceConnectionObserver { public: // Called on network thread. - virtual void OnPacketReceived(rtc::CopyOnWriteBuffer packet) = 0; + virtual void OnPacketReceived(CopyOnWriteBuffer packet) = 0; // Called on signaling thread. - virtual void OnIceCandidates( - const std::string& mid, - const std::vector& candidates) = 0; + virtual void OnIceCandidates(const std::string& mid, + const std::vector& candidates) = 0; protected: ~IceConnectionObserver() = default; }; static std::unique_ptr Create( + const Environment& env, test::NetworkEmulationManagerImpl* net, IceConnectionObserver* observer); virtual ~ScenarioIceConnection() = default; // Posts tasks to send packets to network thread. - virtual void SendRtpPacket(rtc::ArrayView packet_view) = 0; - virtual void SendRtcpPacket(rtc::ArrayView packet_view) = 0; + virtual void SendRtpPacket(ArrayView packet_view) = 0; + virtual void SendRtcpPacket(ArrayView packet_view) = 0; // Used for ICE configuration, called on signaling thread. virtual void SetRemoteSdp(SdpType type, const std::string& remote_sdp) = 0; virtual void SetLocalSdp(SdpType type, const std::string& local_sdp) = 0; virtual EmulatedEndpoint* endpoint() = 0; - virtual const cricket::TransportDescription& transport_description() - const = 0; + virtual const TransportDescription& transport_description() const = 0; webrtc::test::ScopedKeyValueConfig field_trials; }; diff --git a/test/peer_scenario/signaling_route.cc b/test/peer_scenario/signaling_route.cc index eeec7c8657..8688c1abd8 100644 --- a/test/peer_scenario/signaling_route.cc +++ b/test/peer_scenario/signaling_route.cc @@ -59,6 +59,7 @@ void StartSdpNegotiation( CrossTrafficRoute* ret_route, std::function munge_offer, std::function modify_offer, + std::function callee_remote_description_set, std::function exchange_finished) { caller->CreateAndSetSdp(munge_offer, [=](std::string sdp_offer) { if (modify_offer) { @@ -67,11 +68,14 @@ void StartSdpNegotiation( RTC_CHECK(offer->ToString(&sdp_offer)); } send_route->NetworkDelayedAction(kSdpPacketSize, [=] { - callee->SetSdpOfferAndGetAnswer(sdp_offer, [=](std::string answer) { - ret_route->NetworkDelayedAction(kSdpPacketSize, [=] { - caller->SetSdpAnswer(std::move(answer), std::move(exchange_finished)); - }); - }); + callee->SetSdpOfferAndGetAnswer( + sdp_offer, std::move(callee_remote_description_set), + [=](std::string answer) { + ret_route->NetworkDelayedAction(kSdpPacketSize, [=] { + caller->SetSdpAnswer(std::move(answer), + std::move(exchange_finished)); + }); + }); }); }); } @@ -91,23 +95,40 @@ void SignalingRoute::StartIceSignaling() { StartIceSignalingForRoute(callee_, caller_, ret_route_); } +void SignalingRoute::NegotiateSdp( + std::function munge_offer, + std::function modify_offer, + std::function callee_remote_description_set, + std::function + exchange_finished) { + StartSdpNegotiation(caller_, callee_, send_route_, ret_route_, munge_offer, + modify_offer, callee_remote_description_set, + exchange_finished); +} + void SignalingRoute::NegotiateSdp( std::function munge_offer, std::function modify_offer, std::function exchange_finished) { - StartSdpNegotiation(caller_, callee_, send_route_, ret_route_, munge_offer, - modify_offer, exchange_finished); + NegotiateSdp(munge_offer, modify_offer, {}, exchange_finished); } void SignalingRoute::NegotiateSdp( std::function modify_offer, std::function exchange_finished) { - NegotiateSdp({}, modify_offer, exchange_finished); + NegotiateSdp({}, modify_offer, {}, exchange_finished); +} + +void SignalingRoute::NegotiateSdp( + std::function remote_description_set, + std::function + exchange_finished) { + NegotiateSdp({}, {}, remote_description_set, exchange_finished); } void SignalingRoute::NegotiateSdp( std::function exchange_finished) { - NegotiateSdp({}, {}, exchange_finished); + NegotiateSdp({}, {}, {}, exchange_finished); } } // namespace test diff --git a/test/peer_scenario/signaling_route.h b/test/peer_scenario/signaling_route.h index a95ae5c9f7..9b317d2552 100644 --- a/test/peer_scenario/signaling_route.h +++ b/test/peer_scenario/signaling_route.h @@ -35,9 +35,18 @@ class SignalingRoute { // The `munge_offer` callback is used to modify an offer between its creation // and set local description. This behavior is forbidden according to the spec // but available here in order to allow test coverage on corner cases. - // The `exchange_finished` callback is called with the answer produced after - // SDP negotations has completed. + // `callee_remote_description_set` is invoked when callee has applied the + // offer but not yet created an answer. The purpose is to allow tests to + // modify transceivers created from the offer. The `exchange_finished` + // callback is called with the answer produced after SDP negotations has + // completed. // TODO(srte): Handle lossy links. + void NegotiateSdp( + std::function munge_offer, + std::function modify_offer, + std::function callee_remote_description_set, + std::function + exchange_finished); void NegotiateSdp( std::function munge_offer, std::function modify_offer, @@ -47,6 +56,10 @@ class SignalingRoute { std::function modify_offer, std::function exchange_finished); + void NegotiateSdp( + std::function remote_description_set, + std::function + exchange_finished); void NegotiateSdp( std::function exchange_finished); diff --git a/test/peer_scenario/tests/BUILD.gn b/test/peer_scenario/tests/BUILD.gn index fb2948922a..2ad8c8572a 100644 --- a/test/peer_scenario/tests/BUILD.gn +++ b/test/peer_scenario/tests/BUILD.gn @@ -13,23 +13,40 @@ if (rtc_include_tests) { testonly = true sources = [ "bwe_ramp_up_test.cc", + "l4s_test.cc", "peer_scenario_quality_test.cc", "remote_estimate_test.cc", "unsignaled_stream_test.cc", ] deps = [ "..:peer_scenario", + "../../:create_frame_generator_capturer", "../../:field_trial", "../../:test_support", + "../../../api:libjingle_peerconnection_api", + "../../../api:make_ref_counted", "../../../api:rtc_stats_api", + "../../../api:rtp_parameters", + "../../../api:rtp_sender_interface", + "../../../api:rtp_transceiver_direction", + "../../../api:scoped_refptr", + "../../../api/test/network_emulation", + "../../../api/transport:ecn_marking", "../../../api/units:data_rate", "../../../api/units:time_delta", - "../../../media:rtc_media_base", "../../../media:stream_params", "../../../modules/rtp_rtcp:rtp_rtcp_format", "../../../pc:media_session", "../../../pc:pc_test_utils", "../../../pc:session_description", + "../../../rtc_base:checks", + "../../../rtc_base:logging", ] + if (rtc_enable_protobuf) { + deps += [ + "../../../api/test/network_emulation:network_config_schedule_proto", + "../../../api/test/network_emulation:schedulable_network_node_builder", + ] + } } } diff --git a/test/peer_scenario/tests/bwe_ramp_up_test.cc b/test/peer_scenario/tests/bwe_ramp_up_test.cc index a7a17bbfd1..ddd9d15bad 100644 --- a/test/peer_scenario/tests/bwe_ramp_up_test.cc +++ b/test/peer_scenario/tests/bwe_ramp_up_test.cc @@ -8,29 +8,52 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include +#include +#include + +#include "api/jsep.h" +#include "api/make_ref_counted.h" +#include "api/media_types.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_direction.h" +#include "api/scoped_refptr.h" +#include "api/stats/rtc_stats_report.h" #include "api/stats/rtcstats_objects.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_util.h" #include "pc/media_session.h" #include "pc/test/mock_peer_connection_observers.h" +#include "rtc_base/checks.h" +#include "test/create_frame_generator_capturer.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/peer_scenario/peer_scenario.h" #include "test/peer_scenario/peer_scenario_client.h" +#if WEBRTC_ENABLE_PROTOBUF +#include "api/test/network_emulation/network_config_schedule.pb.h" +#include "api/test/network_emulation/schedulable_network_node_builder.h" +#endif + namespace webrtc { namespace test { using ::testing::SizeIs; +using ::testing::Test; +using ::testing::ValuesIn; +using ::testing::WithParamInterface; -rtc::scoped_refptr GetStatsAndProcess( +scoped_refptr GetStatsAndProcess( PeerScenario& s, PeerScenarioClient* client) { auto stats_collector = - rtc::make_ref_counted(); + make_ref_counted(); client->pc()->GetStats(stats_collector.get()); s.ProcessMessages(TimeDelta::Millis(0)); RTC_CHECK(stats_collector->called()); @@ -38,7 +61,7 @@ rtc::scoped_refptr GetStatsAndProcess( } DataRate GetAvailableSendBitrate( - const rtc::scoped_refptr& report) { + const scoped_refptr& report) { auto stats = report->GetStatsOfType(); if (stats.empty()) { return DataRate::Zero(); @@ -46,6 +69,55 @@ DataRate GetAvailableSendBitrate( return DataRate::BitsPerSec(*stats[0]->available_outgoing_bitrate); } +#if WEBRTC_ENABLE_PROTOBUF +TEST(BweRampupTest, BweRampUpWhenCapacityIncrease) { + PeerScenario s(*test_info_); + + PeerScenarioClient* caller = s.CreateClient({}); + PeerScenarioClient* callee = s.CreateClient({}); + + network_behaviour::NetworkConfigSchedule schedule; + auto initial_config = schedule.add_item(); + initial_config->set_link_capacity_kbps(500); + auto updated_capacity = schedule.add_item(); + updated_capacity->set_time_since_first_sent_packet_ms(3000); + updated_capacity->set_link_capacity_kbps(3000); + SchedulableNetworkNodeBuilder schedulable_builder(*s.net(), + std::move(schedule)); + + auto caller_node = schedulable_builder.Build(/*random_seed=*/1); + auto callee_node = s.net()->NodeBuilder().capacity_kbps(5000).Build().node; + s.net()->CreateRoute(caller->endpoint(), {caller_node}, callee->endpoint()); + s.net()->CreateRoute(callee->endpoint(), {callee_node}, caller->endpoint()); + + FrameGeneratorCapturerConfig::SquaresVideo video_resolution = { + .framerate = 30, .width = 1280, .height = 720}; + PeerScenarioClient::VideoSendTrack track = caller->CreateVideo( + "VIDEO", {.generator = {.squares_video = video_resolution}}); + + auto signaling = + s.ConnectSignaling(caller, callee, {caller_node}, {callee_node}); + + signaling.StartIceSignaling(); + + std::atomic offer_exchange_done(false); + signaling.NegotiateSdp([&](const SessionDescriptionInterface& answer) { + offer_exchange_done = true; + }); + // Wait for SDP negotiation. + s.WaitAndProcess(&offer_exchange_done); + + s.ProcessMessages(TimeDelta::Seconds(5)); + DataRate bwe_before_capacity_increase = + GetAvailableSendBitrate(GetStatsAndProcess(s, caller)); + EXPECT_GT(bwe_before_capacity_increase.kbps(), 300); + EXPECT_LT(bwe_before_capacity_increase.kbps(), 650); + s.ProcessMessages(TimeDelta::Seconds(15)); + EXPECT_GT(GetAvailableSendBitrate(GetStatsAndProcess(s, caller)).kbps(), + 1000); +} +#endif // WEBRTC_ENABLE_PROTOBUF + // Test that caller BWE can rampup even if callee can not demux incoming RTP // packets. TEST(BweRampupTest, RampUpWithUndemuxableRtpPackets) { @@ -75,11 +147,11 @@ TEST(BweRampupTest, RampUpWithUndemuxableRtpPackets) { signaling.NegotiateSdp( [&](SessionDescriptionInterface* offer) { RtpHeaderExtensionMap extension_map( - cricket::GetFirstVideoContentDescription(offer->description()) + GetFirstVideoContentDescription(offer->description()) ->rtp_header_extensions()); ASSERT_TRUE(extension_map.IsRegistered(kRtpExtensionMid)); const std::string video_mid = - cricket::GetFirstVideoContent(offer->description())->mid(); + GetFirstVideoContent(offer->description())->mid(); send_node->router()->SetFilter([extension_map, video_mid, &send_node]( const EmulatedIpPacket& packet) { if (IsRtpPacket(packet.data)) { @@ -120,9 +192,165 @@ TEST(BweRampupTest, RampUpWithUndemuxableRtpPackets) { DataRate final_bwe = GetAvailableSendBitrate(GetStatsAndProcess(s, caller)); // Ensure BWE has increased from the initial BWE. BWE will not increase unless - // RTCP feedback is recevied. The increase is just an arbitrary value to + // RTCP feedback is received. The increase is just an arbitrary value to // ensure BWE has increased beyond noise levels. EXPECT_GT(final_bwe, initial_bwe + DataRate::KilobitsPerSec(345)); } + +struct InitialProbeTestParams { + DataRate network_capacity; + DataRate expected_bwe_min; +}; +class BweRampupWithInitialProbeTest + : public Test, + public WithParamInterface {}; + +INSTANTIATE_TEST_SUITE_P( + BweRampupWithInitialProbeTest, + BweRampupWithInitialProbeTest, + ValuesIn( + {{ + .network_capacity = DataRate::KilobitsPerSec(3000), + .expected_bwe_min = DataRate::KilobitsPerSec(2500), + }, + { + .network_capacity = webrtc::DataRate::KilobitsPerSec(500), + .expected_bwe_min = webrtc::DataRate::KilobitsPerSec(400), + }})); + +class MockRtpSenderObserver : public RtpSenderObserverInterface { + public: + MOCK_METHOD(void, OnFirstPacketSent, (webrtc::MediaType)); +}; + +// Test that caller and callee BWE rampup even if no media packets are sent. +// - BandWidthEstimationSettings.allow_probe_without_media must be set. +// - A Video RtpTransceiver with RTX support needs to be negotiated. +TEST_P(BweRampupWithInitialProbeTest, BweRampUpBothDirectionsWithoutMedia) { + PeerScenario s(*::testing::UnitTest::GetInstance()->current_test_info()); + InitialProbeTestParams test_params = GetParam(); + + PeerScenarioClient* caller = s.CreateClient({}); + PeerScenarioClient* callee = s.CreateClient({}); + + auto transceiver = caller->pc()->AddTransceiver(webrtc::MediaType::VIDEO); + ASSERT_TRUE(transceiver.error().ok()); + + MockRtpSenderObserver observer; + EXPECT_CALL(observer, OnFirstPacketSent).Times(0); + transceiver.value()->sender()->SetObserver(&observer); + + caller->pc()->ReconfigureBandwidthEstimation( + {.allow_probe_without_media = true}); + callee->pc()->ReconfigureBandwidthEstimation( + {.allow_probe_without_media = true}); + + auto node_builder = + s.net()->NodeBuilder().capacity_kbps(test_params.network_capacity.kbps()); + auto caller_node = node_builder.Build().node; + auto callee_node = node_builder.Build().node; + s.net()->CreateRoute(caller->endpoint(), {caller_node}, callee->endpoint()); + s.net()->CreateRoute(callee->endpoint(), {callee_node}, caller->endpoint()); + + auto signaling = + s.ConnectSignaling(caller, callee, {caller_node}, {callee_node}); + signaling.StartIceSignaling(); + + std::atomic offer_exchange_done(false); + signaling.NegotiateSdp( + [&]() { + // When remote description has been set, a transceiver is created. + // Set the diretion to sendrecv so that it can be used for BWE probing + // from callee -> caller. + ASSERT_THAT(callee->pc()->GetTransceivers(), SizeIs(1)); + ASSERT_TRUE( + callee->pc() + ->GetTransceivers()[0] + ->SetDirectionWithError(RtpTransceiverDirection::kSendRecv) + .ok()); + }, + [&](const SessionDescriptionInterface& answer) { + offer_exchange_done = true; + }); + // Wait for SDP negotiation. + s.WaitAndProcess(&offer_exchange_done); + + // Test that 1s after offer/answer exchange finish, we have a BWE estimate, + // even though no video frames have been sent. + s.ProcessMessages(TimeDelta::Seconds(2)); + + auto callee_inbound_stats = + GetStatsAndProcess(s, callee)->GetStatsOfType(); + ASSERT_THAT(callee_inbound_stats, SizeIs(1)); + ASSERT_EQ(*callee_inbound_stats[0]->frames_received, 0u); + auto caller_inbound_stats = + GetStatsAndProcess(s, caller)->GetStatsOfType(); + ASSERT_THAT(caller_inbound_stats, SizeIs(1)); + ASSERT_EQ(*caller_inbound_stats[0]->frames_received, 0u); + + DataRate caller_bwe = GetAvailableSendBitrate(GetStatsAndProcess(s, caller)); + EXPECT_GT(caller_bwe.kbps(), test_params.expected_bwe_min.kbps()); + EXPECT_LE(caller_bwe.kbps(), test_params.network_capacity.kbps()); + DataRate callee_bwe = GetAvailableSendBitrate(GetStatsAndProcess(s, callee)); + EXPECT_GT(callee_bwe.kbps(), test_params.expected_bwe_min.kbps()); + EXPECT_LE(callee_bwe.kbps(), test_params.network_capacity.kbps()); +} + +// Test that we can reconfigure bandwidth estimation and send new BWE probes. +// In this test, camera is stopped, and some times later, the app want to get a +// new BWE estimate. +TEST(BweRampupTest, CanReconfigureBweAfterStopingVideo) { + PeerScenario s(*::testing::UnitTest::GetInstance()->current_test_info()); + PeerScenarioClient* caller = s.CreateClient({}); + PeerScenarioClient* callee = s.CreateClient({}); + + auto node_builder = s.net()->NodeBuilder().capacity_kbps(1000); + auto caller_node = node_builder.Build().node; + auto callee_node = node_builder.Build().node; + s.net()->CreateRoute(caller->endpoint(), {caller_node}, callee->endpoint()); + s.net()->CreateRoute(callee->endpoint(), {callee_node}, caller->endpoint()); + + PeerScenarioClient::VideoSendTrack track = caller->CreateVideo("VIDEO", {}); + + auto signaling = + s.ConnectSignaling(caller, callee, {caller_node}, {callee_node}); + + signaling.StartIceSignaling(); + + std::atomic offer_exchange_done(false); + signaling.NegotiateSdp([&](const SessionDescriptionInterface& answer) { + offer_exchange_done = true; + }); + // Wait for SDP negotiation. + s.WaitAndProcess(&offer_exchange_done); + + // Send a TCP messages to the receiver using the same downlink node. + // This is done just to force a lower BWE than the link capacity. + webrtc::TcpMessageRoute* tcp_route = s.net()->CreateTcpRoute( + s.net()->CreateRoute({caller_node}), s.net()->CreateRoute({callee_node})); + DataRate bwe_before_restart = DataRate::Zero(); + + std::atomic message_delivered(false); + tcp_route->SendMessage( + /*size=*/5'00'000, + /*on_received=*/[&]() { message_delivered = true; }); + s.WaitAndProcess(&message_delivered); + bwe_before_restart = GetAvailableSendBitrate(GetStatsAndProcess(s, caller)); + + // Camera is stopped. + track.capturer->Stop(); + s.ProcessMessages(TimeDelta::Seconds(2)); + + // Some time later, the app is interested in restarting BWE since we may want + // to resume video eventually. + caller->pc()->ReconfigureBandwidthEstimation( + {.allow_probe_without_media = true}); + s.ProcessMessages(TimeDelta::Seconds(1)); + DataRate bwe_after_restart = + GetAvailableSendBitrate(GetStatsAndProcess(s, caller)); + EXPECT_GT(bwe_after_restart.kbps(), bwe_before_restart.kbps() + 300); + EXPECT_LT(bwe_after_restart.kbps(), 1000); +} + } // namespace test } // namespace webrtc diff --git a/test/peer_scenario/tests/l4s_test.cc b/test/peer_scenario/tests/l4s_test.cc new file mode 100644 index 0000000000..3ac13e0005 --- /dev/null +++ b/test/peer_scenario/tests/l4s_test.cc @@ -0,0 +1,371 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "api/stats/rtcstats_objects.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.h" +#include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" +#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" +#include "modules/rtp_rtcp/source/rtp_util.h" +#include "pc/test/mock_peer_connection_observers.h" +#include "test/create_frame_generator_capturer.h" +#include "test/field_trial.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/peer_scenario/peer_scenario.h" +#include "test/peer_scenario/peer_scenario_client.h" + +namespace webrtc { +namespace { + +using test::PeerScenario; +using test::PeerScenarioClient; +using ::testing::HasSubstr; + +// Helper class used for counting RTCP feedback messages. +class RtcpFeedbackCounter { + public: + void Count(const EmulatedIpPacket& packet) { + if (!IsRtcpPacket(packet.data)) { + return; + } + rtcp::CommonHeader header; + ASSERT_TRUE(header.Parse(packet.data.cdata(), packet.data.size())); + if (header.type() != rtcp::Rtpfb::kPacketType) { + return; + } + if (header.fmt() == rtcp::CongestionControlFeedback::kFeedbackMessageType) { + ++congestion_control_feedback_; + rtcp::CongestionControlFeedback fb; + ASSERT_TRUE(fb.Parse(header)); + for (const rtcp::CongestionControlFeedback::PacketInfo& info : + fb.packets()) { + switch (info.ecn) { + case EcnMarking::kNotEct: + ++not_ect_; + break; + case EcnMarking::kEct0: + // Not used. + RTC_CHECK_NOTREACHED(); + break; + case EcnMarking::kEct1: + // ECN-Capable Transport + ++ect1_; + break; + case EcnMarking::kCe: + ++ce_; + } + } + } + if (header.fmt() == rtcp::TransportFeedback::kFeedbackMessageType) { + ++transport_sequence_number_feedback_; + } + } + + int FeedbackAccordingToRfc8888() const { + return congestion_control_feedback_; + } + int FeedbackAccordingToTransportCc() const { + return transport_sequence_number_feedback_; + } + int not_ect() const { return not_ect_; } + int ect1() const { return ect1_; } + int ce() const { return ce_; } + + private: + int congestion_control_feedback_ = 0; + int transport_sequence_number_feedback_ = 0; + int not_ect_ = 0; + int ect1_ = 0; + int ce_ = 0; +}; + +scoped_refptr GetStatsAndProcess( + PeerScenario& s, + PeerScenarioClient* client) { + auto stats_collector = + make_ref_counted(); + client->pc()->GetStats(stats_collector.get()); + s.ProcessMessages(TimeDelta::Millis(0)); + RTC_CHECK(stats_collector->called()); + return stats_collector->report(); +} + +DataRate GetAvailableSendBitrate( + const scoped_refptr& report) { + auto stats = report->GetStatsOfType(); + if (stats.empty()) { + return DataRate::Zero(); + } + return DataRate::BitsPerSec(*stats[0]->available_outgoing_bitrate); +} + +TEST(L4STest, NegotiateAndUseCcfbIfEnabled) { + test::ScopedFieldTrials trials( + "WebRTC-RFC8888CongestionControlFeedback/Enabled/"); + PeerScenario s(*test_info_); + + PeerScenarioClient::Config config = PeerScenarioClient::Config(); + config.disable_encryption = true; + PeerScenarioClient* caller = s.CreateClient(config); + PeerScenarioClient* callee = s.CreateClient(config); + + // Create network path from caller to callee. + auto send_node = s.net()->NodeBuilder().Build().node; + auto ret_node = s.net()->NodeBuilder().Build().node; + s.net()->CreateRoute(caller->endpoint(), {send_node}, callee->endpoint()); + s.net()->CreateRoute(callee->endpoint(), {ret_node}, caller->endpoint()); + + RtcpFeedbackCounter send_node_feedback_counter; + send_node->router()->SetWatcher([&](const EmulatedIpPacket& packet) { + send_node_feedback_counter.Count(packet); + }); + RtcpFeedbackCounter ret_node_feedback_counter; + ret_node->router()->SetWatcher([&](const EmulatedIpPacket& packet) { + ret_node_feedback_counter.Count(packet); + }); + + auto signaling = s.ConnectSignaling(caller, callee, {send_node}, {ret_node}); + PeerScenarioClient::VideoSendTrackConfig video_conf; + video_conf.generator.squares_video->framerate = 15; + + caller->CreateVideo("VIDEO_1", video_conf); + callee->CreateVideo("VIDEO_2", video_conf); + + signaling.StartIceSignaling(); + + std::atomic offer_exchange_done(false); + signaling.NegotiateSdp( + [&](SessionDescriptionInterface* offer) { + std::string offer_str = absl::StrCat(*offer); + // Check that the offer contain both congestion control feedback + // accoring to RFC 8888, and transport-cc and the header extension + // http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01 + EXPECT_THAT(offer_str, HasSubstr("a=rtcp-fb:* ack ccfb\r\n")); + EXPECT_THAT(offer_str, HasSubstr("transport-cc")); + EXPECT_THAT( + offer_str, + HasSubstr("http://www.ietf.org/id/" + "draft-holmer-rmcat-transport-wide-cc-extensions")); + }, + [&](const SessionDescriptionInterface& answer) { + std::string answer_str = absl::StrCat(answer); + EXPECT_THAT(answer_str, HasSubstr("a=rtcp-fb:* ack ccfb\r\n")); + // Check that the answer does not contain transport-cc nor the + // header extension + // http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01 + EXPECT_THAT(answer_str, Not(HasSubstr("transport-cc"))); + EXPECT_THAT( + answer_str, + Not(HasSubstr(" http://www.ietf.org/id/" + "draft-holmer-rmcat-transport-wide-cc-extensions-"))); + offer_exchange_done = true; + }); + // Wait for SDP negotiation and the packet filter to be setup. + s.WaitAndProcess(&offer_exchange_done); + + s.ProcessMessages(TimeDelta::Seconds(2)); + EXPECT_GT(send_node_feedback_counter.FeedbackAccordingToRfc8888(), 0); + // TODO: bugs.webrtc.org/42225697 - Fix bug. Caller sends both transport + // sequence number feedback and congestion control feedback. So + // callee still send packets with transport sequence number header extensions + // even though it has been removed from the answer. + // EXPECT_EQ(send_node_feedback_counter.FeedbackAccordingToTransportCc(), 0); + + EXPECT_GT(ret_node_feedback_counter.FeedbackAccordingToRfc8888(), 0); + EXPECT_EQ(ret_node_feedback_counter.FeedbackAccordingToTransportCc(), 0); +} + +TEST(L4STest, CallerAdaptToLinkCapacityWithoutEcn) { + test::ScopedFieldTrials trials( + "WebRTC-RFC8888CongestionControlFeedback/Enabled/"); + PeerScenario s(*test_info_); + + PeerScenarioClient::Config config = PeerScenarioClient::Config(); + PeerScenarioClient* caller = s.CreateClient(config); + PeerScenarioClient* callee = s.CreateClient(config); + + auto caller_to_callee = s.net() + ->NodeBuilder() + .capacity(DataRate::KilobitsPerSec(600)) + .Build() + .node; + auto callee_to_caller = s.net()->NodeBuilder().Build().node; + s.net()->CreateRoute(caller->endpoint(), {caller_to_callee}, + callee->endpoint()); + s.net()->CreateRoute(callee->endpoint(), {callee_to_caller}, + caller->endpoint()); + + auto signaling = s.ConnectSignaling(caller, callee, {caller_to_callee}, + {callee_to_caller}); + PeerScenarioClient::VideoSendTrackConfig video_conf; + video_conf.generator.squares_video->framerate = 15; + caller->CreateVideo("VIDEO_1", video_conf); + + signaling.StartIceSignaling(); + std::atomic offer_exchange_done(false); + signaling.NegotiateSdp([&](const SessionDescriptionInterface& answer) { + offer_exchange_done = true; + }); + s.WaitAndProcess(&offer_exchange_done); + s.ProcessMessages(TimeDelta::Seconds(3)); + DataRate available_bwe = + GetAvailableSendBitrate(GetStatsAndProcess(s, caller)); + EXPECT_GT(available_bwe.kbps(), 500); + EXPECT_LT(available_bwe.kbps(), 610); +} + +TEST(L4STest, SendsEct1UntilFirstFeedback) { + test::ScopedFieldTrials trials( + "WebRTC-RFC8888CongestionControlFeedback/Enabled/"); + PeerScenario s(*test_info_); + + PeerScenarioClient::Config config = PeerScenarioClient::Config(); + config.disable_encryption = true; + PeerScenarioClient* caller = s.CreateClient(config); + PeerScenarioClient* callee = s.CreateClient(config); + + // Create network path from caller to callee. + auto caller_to_callee = s.net()->NodeBuilder().Build().node; + auto callee_to_caller = s.net()->NodeBuilder().Build().node; + s.net()->CreateRoute(caller->endpoint(), {caller_to_callee}, + callee->endpoint()); + s.net()->CreateRoute(callee->endpoint(), {callee_to_caller}, + caller->endpoint()); + + RtcpFeedbackCounter feedback_counter; + std::atomic seen_ect1_feedback = false; + std::atomic seen_not_ect_feedback = false; + callee_to_caller->router()->SetWatcher([&](const EmulatedIpPacket& packet) { + feedback_counter.Count(packet); + if (feedback_counter.ect1() > 0) { + seen_ect1_feedback = true; + RTC_LOG(LS_INFO) << " ect 1" << feedback_counter.ect1(); + } + if (feedback_counter.not_ect() > 0) { + seen_not_ect_feedback = true; + RTC_LOG(LS_INFO) << " not ect" << feedback_counter.not_ect(); + } + }); + + auto signaling = s.ConnectSignaling(caller, callee, {caller_to_callee}, + {callee_to_caller}); + PeerScenarioClient::VideoSendTrackConfig video_conf; + video_conf.generator.squares_video->framerate = 15; + + caller->CreateVideo("VIDEO_1", video_conf); + signaling.StartIceSignaling(); + + std::atomic offer_exchange_done(false); + signaling.NegotiateSdp([&](const SessionDescriptionInterface& answer) { + offer_exchange_done = true; + }); + s.WaitAndProcess(&offer_exchange_done); + + // Wait for first feedback where packets have been sent with ECT(1). Then + // feedback for packets sent as not ECT since currently webrtc does not + // implement adaptation to ECN. + EXPECT_TRUE(s.WaitAndProcess(&seen_ect1_feedback, TimeDelta::Seconds(1))); + EXPECT_FALSE(seen_not_ect_feedback); + EXPECT_TRUE(s.WaitAndProcess(&seen_not_ect_feedback, TimeDelta::Seconds(1))); +} + +TEST(L4STest, SendsEct1AfterRouteChange) { + test::ScopedFieldTrials trials( + "WebRTC-RFC8888CongestionControlFeedback/Enabled/"); + PeerScenario s(*test_info_); + + PeerScenarioClient::Config config; + config.disable_encryption = true; + config.endpoints = {{0, {.type = AdapterType::ADAPTER_TYPE_WIFI}}}; + PeerScenarioClient* caller = s.CreateClient(config); + // Callee has booth wifi and cellular adapters. + config.endpoints = {{0, {.type = AdapterType::ADAPTER_TYPE_WIFI}}, + {1, {.type = AdapterType::ADAPTER_TYPE_CELLULAR}}}; + PeerScenarioClient* callee = s.CreateClient(config); + + // Create network path from caller to callee. + auto caller_to_callee = s.net()->NodeBuilder().Build().node; + auto callee_to_caller_wifi = s.net()->NodeBuilder().Build().node; + auto callee_to_caller_cellular = s.net()->NodeBuilder().Build().node; + s.net()->CreateRoute(caller->endpoint(0), {caller_to_callee}, + callee->endpoint(0)); + s.net()->CreateRoute(caller->endpoint(0), {caller_to_callee}, + callee->endpoint(1)); + s.net()->CreateRoute(callee->endpoint(0), {callee_to_caller_wifi}, + caller->endpoint(0)); + s.net()->CreateRoute(callee->endpoint(1), {callee_to_caller_cellular}, + caller->endpoint(0)); + + RtcpFeedbackCounter wifi_feedback_counter; + std::atomic seen_ect1_on_wifi_feedback = false; + std::atomic seen_not_ect_on_wifi_feedback = false; + callee_to_caller_wifi->router()->SetWatcher( + [&](const EmulatedIpPacket& packet) { + wifi_feedback_counter.Count(packet); + if (wifi_feedback_counter.ect1() > 0) { + seen_ect1_on_wifi_feedback = true; + RTC_LOG(LS_INFO) << " ect 1 feedback on wifi: " + << wifi_feedback_counter.ect1(); + } + if (wifi_feedback_counter.not_ect() > 0) { + seen_not_ect_on_wifi_feedback = true; + RTC_LOG(LS_INFO) << " not ect feedback on wifi: " + << wifi_feedback_counter.not_ect(); + } + }); + + auto signaling = s.ConnectSignaling(caller, callee, {caller_to_callee}, + {callee_to_caller_wifi}); + PeerScenarioClient::VideoSendTrackConfig video_conf; + video_conf.generator.squares_video->framerate = 15; + + caller->CreateVideo("VIDEO_1", video_conf); + signaling.StartIceSignaling(); + + std::atomic offer_exchange_done(false); + signaling.NegotiateSdp([&](const SessionDescriptionInterface& answer) { + offer_exchange_done = true; + }); + s.WaitAndProcess(&offer_exchange_done); + + // Wait for first feedback where packets have been sent with ECT(1). Then + // feedback for packets sent as not ECT since currently webrtc does not + // implement adaptation to ECN. + EXPECT_TRUE( + s.WaitAndProcess(&seen_ect1_on_wifi_feedback, TimeDelta::Seconds(1))); + EXPECT_FALSE(seen_not_ect_on_wifi_feedback); + EXPECT_TRUE( + s.WaitAndProcess(&seen_not_ect_on_wifi_feedback, TimeDelta::Seconds(1))); + + RtcpFeedbackCounter cellular_feedback_counter; + std::atomic seen_ect1_on_cellular_feedback = false; + callee_to_caller_cellular->router()->SetWatcher( + [&](const EmulatedIpPacket& packet) { + cellular_feedback_counter.Count(packet); + if (cellular_feedback_counter.ect1() > 0) { + seen_ect1_on_cellular_feedback = true; + RTC_LOG(LS_INFO) << " ect 1 feedback on cellular: " + << cellular_feedback_counter.ect1(); + } + }); + // Disable callees wifi and expect that the connection switch to cellular and + // sends packets with ECT(1) again. + s.net()->DisableEndpoint(callee->endpoint(0)); + EXPECT_TRUE( + s.WaitAndProcess(&seen_ect1_on_cellular_feedback, TimeDelta::Seconds(5))); +} + +} // namespace +} // namespace webrtc diff --git a/test/peer_scenario/tests/remote_estimate_test.cc b/test/peer_scenario/tests/remote_estimate_test.cc index fa343a7fdf..8dee3435a7 100644 --- a/test/peer_scenario/tests/remote_estimate_test.cc +++ b/test/peer_scenario/tests/remote_estimate_test.cc @@ -23,8 +23,7 @@ namespace test { namespace { RtpHeaderExtensionMap AudioExtensions( const SessionDescriptionInterface& session) { - auto* audio_desc = - cricket::GetFirstAudioContentDescription(session.description()); + auto* audio_desc = GetFirstAudioContentDescription(session.description()); return RtpHeaderExtensionMap(audio_desc->rtp_header_extensions()); } @@ -76,7 +75,7 @@ TEST(RemoteEstimateEndToEnd, AudioUsesAbsSendTimeExtension) { s.net()->CreateRoute(callee->endpoint(), {ret_node}, caller->endpoint()); auto signaling = s.ConnectSignaling(caller, callee, {send_node}, {ret_node}); - caller->CreateAudio("AUDIO", cricket::AudioOptions()); + caller->CreateAudio("AUDIO", AudioOptions()); signaling.StartIceSignaling(); RtpHeaderExtensionMap extension_map; std::atomic offer_exchange_done(false); diff --git a/test/peer_scenario/tests/unsignaled_stream_test.cc b/test/peer_scenario/tests/unsignaled_stream_test.cc index 4f478b4b2a..32bccbd939 100644 --- a/test/peer_scenario/tests/unsignaled_stream_test.cc +++ b/test/peer_scenario/tests/unsignaled_stream_test.cc @@ -10,11 +10,9 @@ #include "media/base/stream_params.h" #include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_util.h" -#include "pc/media_session.h" #include "pc/session_description.h" -#include "test/field_trial.h" -#include "test/gmock.h" #include "test/gtest.h" #include "test/peer_scenario/peer_scenario.h" @@ -47,7 +45,7 @@ std::string TestParametersMidTestConfigurationToString( } } -class FrameObserver : public rtc::VideoSinkInterface { +class FrameObserver : public VideoSinkInterface { public: FrameObserver() : frame_observed_(false) {} void OnFrame(const VideoFrame&) override { frame_observed_ = true; } @@ -66,10 +64,10 @@ uint32_t get_ssrc(SessionDescriptionInterface* offer, size_t track_index) { void set_ssrc(SessionDescriptionInterface* offer, size_t index, uint32_t ssrc) { EXPECT_LT(index, offer->description()->contents().size()); - cricket::StreamParams& new_stream_params = offer->description() - ->contents()[index] - .media_description() - ->mutable_streams()[0]; + StreamParams& new_stream_params = offer->description() + ->contents()[index] + .media_description() + ->mutable_streams()[0]; new_stream_params.ssrcs[0] = ssrc; new_stream_params.ssrc_groups[0].ssrcs[0] = ssrc; } @@ -98,7 +96,6 @@ TEST_P(UnsignaledStreamTest, ReplacesUnsignaledStreamOnCompletedSignaling) { PeerScenarioClient::Config config = PeerScenarioClient::Config(); // Disable encryption so that we can inject a fake early media packet without // triggering srtp failures. - config.disable_encryption = true; auto* caller = s.CreateClient(config); auto* callee = s.CreateClient(config); @@ -146,7 +143,7 @@ TEST_P(UnsignaledStreamTest, ReplacesUnsignaledStreamOnCompletedSignaling) { uint32_t first_ssrc = 0; uint32_t second_ssrc = 0; - absl::optional mid_header_extension_id = absl::nullopt; + std::optional mid_header_extension_id = std::nullopt; signaling.NegotiateSdp( /* munge_sdp = */ @@ -154,8 +151,7 @@ TEST_P(UnsignaledStreamTest, ReplacesUnsignaledStreamOnCompletedSignaling) { // Obtain the MID header extension ID and if we want the // MidTestConfiguration::kMidNotNegotiated setup then we remove the MID // header extension through SDP munging (otherwise SDP is not modified). - for (cricket::ContentInfo& content_info : - offer->description()->contents()) { + for (ContentInfo& content_info : offer->description()->contents()) { std::vector header_extensions = content_info.media_description()->rtp_header_extensions(); for (auto it = header_extensions.begin(); @@ -219,7 +215,7 @@ TEST_P(UnsignaledStreamTest, ReplacesUnsignaledStreamOnCompletedSignaling) { break; } // Inject the modified packet. - rtc::CopyOnWriteBuffer updated_buffer = parsed_packet.Buffer(); + CopyOnWriteBuffer updated_buffer = parsed_packet.Buffer(); EmulatedIpPacket updated_packet( packet.from, packet.to, updated_buffer, packet.arrival_time); send_node->OnPacketReceived(std::move(updated_packet)); diff --git a/test/rtcp_packet_parser.cc b/test/rtcp_packet_parser.cc index 3686a31407..f0a7465290 100644 --- a/test/rtcp_packet_parser.cc +++ b/test/rtcp_packet_parser.cc @@ -21,7 +21,7 @@ namespace test { RtcpPacketParser::RtcpPacketParser() = default; RtcpPacketParser::~RtcpPacketParser() = default; -bool RtcpPacketParser::Parse(rtc::ArrayView data) { +bool RtcpPacketParser::Parse(ArrayView data) { ++processed_rtcp_packets_; const uint8_t* const buffer = data.data(); diff --git a/test/rtcp_packet_parser.h b/test/rtcp_packet_parser.h index 0aa1cbf499..84ef5366ec 100644 --- a/test/rtcp_packet_parser.h +++ b/test/rtcp_packet_parser.h @@ -47,7 +47,7 @@ bool ParseSinglePacket(const uint8_t* buffer, size_t size, Packet* packet) { } // Same function, but takes raw buffer as single argument instead of pair. template -bool ParseSinglePacket(rtc::ArrayView buffer, Packet* packet) { +bool ParseSinglePacket(ArrayView buffer, Packet* packet) { return ParseSinglePacket(buffer.data(), buffer.size(), packet); } @@ -79,7 +79,7 @@ class RtcpPacketParser { RtcpPacketParser(); ~RtcpPacketParser(); - bool Parse(rtc::ArrayView packet); + bool Parse(ArrayView packet); PacketCounter* app() { return &app_; } PacketCounter* bye() { return &bye_; } diff --git a/test/rtp_file_reader.cc b/test/rtp_file_reader.cc index b6f3cbbe5b..d1718f6345 100644 --- a/test/rtp_file_reader.cc +++ b/test/rtp_file_reader.cc @@ -10,8 +10,6 @@ #include "test/rtp_file_reader.h" -#include - #include #include #include @@ -21,12 +19,44 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/system/arch.h" +#include "rtc_base/time_utils.h" -namespace webrtc { -namespace test { +namespace { +constexpr size_t kRtpDumpFirstLineLength = 80; +constexpr uint16_t kRtpDumpPacketHeaderSize = 8; + +enum { + kResultFail = -1, + kResultSuccess = 0, + kResultSkip = 1, +}; + +enum { + kPcapVersionMajor = 2, + kPcapVersionMinor = 4, + kLinktypeNull = 0, + kLinktypeEthernet = 1, + kBsdNullLoopback1 = 0x00000002, + kBsdNullLoopback2 = 0x02000000, + kEthernetIIHeaderMacSkip = 12, + kEthertypeIp = 0x0800, + kIpVersion4 = 4, + kMinIpHeaderLength = 20, + kFragmentOffsetClear = 0x0000, + kFragmentOffsetDoNotFragment = 0x4000, + kProtocolTcp = 0x06, + kProtocolUdp = 0x11, + kUdpHeaderLength = 8, +}; + +constexpr size_t kMaxReadBufferSize = 4096; +constexpr uint32_t kPcapBOMSwapOrder = 0xd4c3b2a1UL; +constexpr uint32_t kPcapBOMNoSwapOrder = 0xa1b2c3d4UL; +constexpr uint32_t kPcapNgBOMLittleEndian = 0x4d3c2b1aUL; -static const size_t kFirstLineLength = 80; -static uint16_t kPacketHeaderSize = 8; +constexpr uint32_t kPcapNgSectionHeaderBlock = 0x0a0d0d0aUL; +constexpr uint32_t kPcapNgInterfaceDescriptionBlock = 0x00000001LU; +constexpr uint32_t kPcapNgPacketBlock = 0x00000006LU; #define TRY(expr) \ do { \ @@ -36,6 +66,17 @@ static uint16_t kPacketHeaderSize = 8; } \ } while (0) +#define TRY_PCAP(expr) \ + do { \ + int r = (expr); \ + if (r == kResultFail) { \ + RTC_LOG(LS_INFO) << "FAIL at " << __FILE__ << ":" << __LINE__; \ + return kResultFail; \ + } else if (r == kResultSkip) { \ + return kResultSkip; \ + } \ + } while (0) + bool ReadUint32(uint32_t* out, FILE* file) { *out = 0; for (size_t i = 0; i < 4; ++i) { @@ -60,6 +101,11 @@ bool ReadUint16(uint16_t* out, FILE* file) { return true; } +} // namespace + +namespace webrtc { +namespace test { + class RtpFileReaderImpl : public RtpFileReader { public: virtual bool Init(FILE* file, const std::set& ssrc_filter) = 0; @@ -123,8 +169,8 @@ class RtpDumpReader : public RtpFileReaderImpl { bool Init(FILE* file, const std::set& ssrc_filter) override { file_ = file; - char firstline[kFirstLineLength + 1] = {0}; - if (fgets(firstline, kFirstLineLength, file_) == nullptr) { + char firstline[kRtpDumpFirstLineLength + 1] = {0}; + if (fgets(firstline, kRtpDumpFirstLineLength, file_) == nullptr) { RTC_LOG(LS_INFO) << "Can't read from file"; return false; } @@ -139,7 +185,8 @@ class RtpDumpReader : public RtpFileReaderImpl { return false; } } else { - RTC_LOG(LS_INFO) << "Wrong file format of input file"; + RTC_LOG(LS_INFO) + << "Input file is neither in rtpplay nor RTPencode format"; return false; } @@ -169,7 +216,7 @@ class RtpDumpReader : public RtpFileReaderImpl { TRY(ReadUint32(&offset, file_)); // Use 'len' here because a 'plen' of 0 specifies rtcp. - len -= kPacketHeaderSize; + len -= kRtpDumpPacketHeaderSize; if (packet->length < len) { RTC_LOG(LS_ERROR) << "Packet is too large to fit: " << len << " bytes vs " << packet->length @@ -191,45 +238,10 @@ class RtpDumpReader : public RtpFileReaderImpl { FILE* file_; }; -enum { - kResultFail = -1, - kResultSuccess = 0, - kResultSkip = 1, - - kPcapVersionMajor = 2, - kPcapVersionMinor = 4, - kLinktypeNull = 0, - kLinktypeEthernet = 1, - kBsdNullLoopback1 = 0x00000002, - kBsdNullLoopback2 = 0x02000000, - kEthernetIIHeaderMacSkip = 12, - kEthertypeIp = 0x0800, - kIpVersion4 = 4, - kMinIpHeaderLength = 20, - kFragmentOffsetClear = 0x0000, - kFragmentOffsetDoNotFragment = 0x4000, - kProtocolTcp = 0x06, - kProtocolUdp = 0x11, - kUdpHeaderLength = 8, - kMaxReadBufferSize = 4096 -}; - -const uint32_t kPcapBOMSwapOrder = 0xd4c3b2a1UL; -const uint32_t kPcapBOMNoSwapOrder = 0xa1b2c3d4UL; - -#define TRY_PCAP(expr) \ - do { \ - int r = (expr); \ - if (r == kResultFail) { \ - RTC_LOG(LS_INFO) << "FAIL at " << __FILE__ << ":" << __LINE__; \ - return kResultFail; \ - } else if (r == kResultSkip) { \ - return kResultSkip; \ - } \ - } while (0) - // Read RTP packets from file in tcpdump/libpcap format, as documented at: // http://wiki.wireshark.org/Development/LibpcapFileFormat +// Transparently supports PCAPNG as described at +// https://pcapng.com/ class PcapReader : public RtpFileReaderImpl { public: PcapReader() @@ -240,6 +252,7 @@ class PcapReader : public RtpFileReaderImpl { #else swap_network_byte_order_(true), #endif + pcapng_(false), read_buffer_(), packets_by_ssrc_(), packets_(), @@ -263,42 +276,30 @@ class PcapReader : public RtpFileReaderImpl { int Initialize(FILE* file, const std::set& ssrc_filter) { file_ = file; + size_t total_packet_count = 0; if (ReadGlobalHeader() < 0) { return kResultFail; } - - int total_packet_count = 0; - uint32_t stream_start_ms = 0; - int32_t next_packet_pos = ftell(file_); - for (;;) { - TRY_PCAP(fseek(file_, next_packet_pos, SEEK_SET)); - int result = ReadPacket(&next_packet_pos, stream_start_ms, - ++total_packet_count, ssrc_filter); - if (result == kResultFail) { - break; - } else if (result == kResultSuccess && packets_.size() == 1) { - RTC_DCHECK_EQ(stream_start_ms, 0); - PacketIterator it = packets_.begin(); - stream_start_ms = it->time_offset_ms; - it->time_offset_ms = 0; - } + int result; + if (!pcapng_) { + result = ReadPcap(ssrc_filter, total_packet_count); + } else { + result = ReadPcapNg(ssrc_filter, total_packet_count); } - - if (feof(file_) == 0) { - printf("Failed reading file!\n"); + if (result == kResultFail) { return kResultFail; } - printf("Total packets in file: %d\n", total_packet_count); - printf("Total RTP/RTCP packets: %zu\n", packets_.size()); + RTC_LOG(LS_INFO) << "Total packets in file: " << total_packet_count; + RTC_LOG(LS_INFO) << "Total RTP/RTCP packets: " << packets_.size(); for (SsrcMapIterator mit = packets_by_ssrc_.begin(); mit != packets_by_ssrc_.end(); ++mit) { uint32_t ssrc = mit->first; const std::vector& packet_indices = mit->second; int pt = packets_[packet_indices[0]].payload_type; - printf("SSRC: %08x, %zu packets, pt=%d\n", ssrc, packet_indices.size(), - pt); + RTC_LOG(LS_INFO) << "SSRC: " << ssrc << ", " << packet_indices.size() + << " packets, pt=" << pt << "."; } // TODO(solenberg): Better validation of identified SSRC streams. @@ -321,6 +322,56 @@ class PcapReader : public RtpFileReaderImpl { return kResultSuccess; } + int ReadPcap(const std::set& ssrc_filter, + size_t& total_packet_count) { + uint32_t stream_start_ms = 0; + int32_t next_packet_pos = ftell(file_); + for (;;) { + TRY_PCAP(fseek(file_, next_packet_pos, SEEK_SET)); + int result = ReadPacket(&next_packet_pos, stream_start_ms, ssrc_filter); + if (result == kResultFail) { + break; + } else if (result == kResultSuccess && packets_.size() == 1) { + RTC_DCHECK_EQ(stream_start_ms, 0); + PacketIterator it = packets_.begin(); + stream_start_ms = it->time_offset_ms; + it->time_offset_ms = 0; + } + total_packet_count++; + } + + if (feof(file_) == 0) { + RTC_LOG(LS_ERROR) << "Failed reading file!"; + return kResultFail; + } + return kResultSuccess; + } + + int ReadPcapNg(const std::set& ssrc_filter, + size_t& total_packet_count) { + uint32_t stream_start_ms = 0; + int next_packet_pos = 0; + for (;;) { + TRY_PCAP(fseek(file_, next_packet_pos, SEEK_SET)); + int result = ReadPacketNg(&next_packet_pos, stream_start_ms, ssrc_filter); + + if (result == kResultFail) { + break; + } else if (result == kResultSuccess && packets_.size() == 1) { + RTC_DCHECK_EQ(stream_start_ms, 0); + PacketIterator it = packets_.begin(); + stream_start_ms = it->time_offset_ms; + it->time_offset_ms = 0; + } + total_packet_count++; + } + if (feof(file_) == 0) { + RTC_LOG(LS_ERROR) << "Failed reading file!"; + return kResultFail; + } + return kResultSuccess; + } + bool NextPacket(RtpPacket* packet) override { uint32_t length = RtpPacket::kMaxPacketBufferSize; if (NextPcap(packet->data, &length, &packet->time_ms) != kResultSuccess) @@ -353,7 +404,6 @@ class PcapReader : public RtpFileReaderImpl { private: // A marker of an RTP packet within the file. struct RtpPacketMarker { - uint32_t packet_number; // One-based index (like in WireShark) uint32_t time_offset_ms; uint32_t source_ip; uint32_t dest_ip; @@ -377,6 +427,10 @@ class PcapReader : public RtpFileReaderImpl { swap_pcap_byte_order_ = true; } else if (magic == kPcapBOMNoSwapOrder) { swap_pcap_byte_order_ = false; + } else if (magic == kPcapNgSectionHeaderBlock) { + pcapng_ = true; + RTC_LOG(LS_INFO) << "PCAPNG detected, support is experimental"; + return kResultSuccess; } else { return kResultFail; } @@ -408,9 +462,32 @@ class PcapReader : public RtpFileReaderImpl { return kResultSuccess; } + int ProcessPacket(RtpPacketMarker& marker, + const std::set& ssrc_filter, + ArrayView packet) { + if (IsRtcpPacket(packet)) { + marker.payload_type = packet[1]; + packets_.push_back(marker); + } else if (IsRtpPacket(packet)) { + uint32_t ssrc = ParseRtpSsrc(packet); + marker.payload_type = ParseRtpPayloadType(packet); + if (ssrc_filter.empty() || ssrc_filter.find(ssrc) != ssrc_filter.end()) { + packets_by_ssrc_[ssrc].push_back( + static_cast(packets_.size())); + packets_.push_back(marker); + } else { + return kResultSkip; + } + } else { + RTC_LOG(LS_INFO) << "Not recognized as RTP/RTCP"; + return kResultSkip; + } + + return kResultSuccess; + } + int ReadPacket(int32_t* next_packet_pos, uint32_t stream_start_ms, - uint32_t number, const std::set& ssrc_filter) { RTC_DCHECK(next_packet_pos); @@ -426,37 +503,79 @@ class PcapReader : public RtpFileReaderImpl { *next_packet_pos = ftell(file_) + incl_len; RtpPacketMarker marker = {0}; - marker.packet_number = number; marker.time_offset_ms = CalcTimeDelta(ts_sec, ts_usec, stream_start_ms); TRY_PCAP(ReadPacketHeader(&marker)); marker.pos_in_file = ftell(file_); if (marker.payload_length > sizeof(read_buffer_)) { - printf("Packet too large!\n"); + RTC_LOG(LS_ERROR) << "Packet too large!"; return kResultFail; } TRY_PCAP(Read(read_buffer_, marker.payload_length)); + return ProcessPacket(marker, ssrc_filter, + {read_buffer_, marker.payload_length}); + } - rtc::ArrayView packet(read_buffer_, marker.payload_length); - if (IsRtcpPacket(packet)) { - marker.payload_type = packet[1]; - packets_.push_back(marker); - } else if (IsRtpPacket(packet)) { - uint32_t ssrc = ParseRtpSsrc(packet); - marker.payload_type = ParseRtpPayloadType(packet); - if (ssrc_filter.empty() || ssrc_filter.find(ssrc) != ssrc_filter.end()) { - packets_by_ssrc_[ssrc].push_back( - static_cast(packets_.size())); - packets_.push_back(marker); - } else { - return kResultSkip; - } - } else { - RTC_LOG(LS_INFO) << "Not recognized as RTP/RTCP"; - return kResultSkip; + int ReadPacketNg(int32_t* next_packet_pos, + uint32_t stream_start_ms, + const std::set& ssrc_filter) { + uint32_t block_type; + uint32_t block_length; + TRY_PCAP(Read(&block_type, false)); + TRY_PCAP(Read(&block_length, false)); + if (block_length == 0) { + RTC_LOG(LS_ERROR) << "Empty PCAPNG block"; + return kResultFail; } - return kResultSuccess; + *next_packet_pos += block_length; + switch (block_type) { + case kPcapNgSectionHeaderBlock: { + // TODO: https://issues.webrtc.org/issues/351327754 - interpret more of + // this block, in particular the if_tsresol option. + uint32_t byte_order_magic; + TRY_PCAP(Read(&byte_order_magic, false)); + swap_pcap_byte_order_ = (byte_order_magic == kPcapNgBOMLittleEndian); + } break; + case kPcapNgInterfaceDescriptionBlock: + break; + case kPcapNgPacketBlock: { + uint32_t interface; // Interface ID. Unused. + uint32_t ts_upper; // Upper 32 bits of timestamp. + uint32_t ts_lower; // Lower 32 bits of timestamp. + uint32_t incl_len; // Number of octets of packet saved in file. + uint32_t orig_len; // Actual length of packet. + TRY_PCAP(Read(&interface, false)); + TRY_PCAP(Read(&ts_upper, false)); + TRY_PCAP(Read(&ts_lower, false)); + TRY_PCAP(Read(&incl_len, false)); + TRY_PCAP(Read(&orig_len, false)); + + RtpPacketMarker marker = {0}; + // Note: Wireshark writes nanoseconds most of the time, see comments in + // it's pcapio.c. We are only interesting in the time difference so + // truncating to uint32_t is ok. + uint64_t timestamp_ms = + ((static_cast(ts_upper) << 32) | ts_lower) / + kNumMicrosecsPerSec; + marker.time_offset_ms = + static_cast(timestamp_ms) - stream_start_ms; + TRY_PCAP(ReadPacketHeader(&marker)); + marker.pos_in_file = ftell(file_); + if (marker.payload_length > sizeof(read_buffer_)) { + RTC_LOG(LS_ERROR) << "Packet too large!"; + return kResultFail; + } + TRY_PCAP(Read(read_buffer_, marker.payload_length)); + if (ProcessPacket(marker, ssrc_filter, + {read_buffer_, marker.payload_length}) != + kResultSuccess) { + return kResultFail; + } + return kResultSuccess; + } + } + return kResultSkip; } int ReadPacketHeader(RtpPacketMarker* marker) { @@ -470,7 +589,6 @@ class PcapReader : public RtpFileReaderImpl { TRY_PCAP(Read(&protocol, true)); if (protocol == kBsdNullLoopback1 || protocol == kBsdNullLoopback2) { int result = ReadXxpIpHeader(marker); - RTC_LOG(LS_INFO) << "Recognized loopback frame"; if (result != kResultSkip) { return result; } @@ -484,7 +602,6 @@ class PcapReader : public RtpFileReaderImpl { TRY_PCAP(Read(&type, true)); if (type == kEthertypeIp) { int result = ReadXxpIpHeader(marker); - RTC_LOG(LS_INFO) << "Recognized ethernet 2 frame"; if (result != kResultSkip) { return result; } @@ -544,13 +661,13 @@ class PcapReader : public RtpFileReaderImpl { RTC_LOG(LS_INFO) << "TCP packets are not handled"; return kResultSkip; } else if (protocol == kProtocolUdp) { - uint16_t length; - uint16_t checksum; + uint16_t payload_length; + uint16_t payload_checksum; TRY_PCAP(Read(&marker->source_port, true)); TRY_PCAP(Read(&marker->dest_port, true)); - TRY_PCAP(Read(&length, true)); - TRY_PCAP(Read(&checksum, true)); - marker->payload_length = length - kUdpHeaderLength; + TRY_PCAP(Read(&payload_length, true)); + TRY_PCAP(Read(&payload_checksum, true)); + marker->payload_length = payload_length - kUdpHeaderLength; } else { RTC_LOG(LS_INFO) << "Unknown transport (expected UDP or TCP)"; return kResultSkip; @@ -617,6 +734,7 @@ class PcapReader : public RtpFileReaderImpl { FILE* file_; bool swap_pcap_byte_order_; const bool swap_network_byte_order_; + bool pcapng_; uint8_t read_buffer_[kMaxReadBufferSize]; SsrcMap packets_by_ssrc_; @@ -648,7 +766,7 @@ RtpFileReader* RtpFileReader::Create(FileFormat format, FILE* file = tmpfile(); if (file == nullptr) { - printf("ERROR: Can't open file from memory buffer\n"); + RTC_LOG(LS_ERROR) << "ERROR: Can't open file from memory buffer."; return nullptr; } @@ -671,7 +789,7 @@ RtpFileReader* RtpFileReader::Create(FileFormat format, std::string filename_str = std::string(filename); FILE* file = fopen(filename_str.c_str(), "rb"); if (file == nullptr) { - printf("ERROR: Can't open file: %s\n", filename_str.c_str()); + RTC_LOG(LS_ERROR) << "ERROR: Can't open file: '" << filename_str << "'."; return nullptr; } diff --git a/test/rtp_file_reader.h b/test/rtp_file_reader.h index 4e26c71baa..4af704d202 100644 --- a/test/rtp_file_reader.h +++ b/test/rtp_file_reader.h @@ -10,6 +10,7 @@ #ifndef TEST_RTP_FILE_READER_H_ #define TEST_RTP_FILE_READER_H_ +#include #include #include diff --git a/test/rtp_file_reader_unittest.cc b/test/rtp_file_reader_unittest.cc index 995d9fbc9d..eea88c9d97 100644 --- a/test/rtp_file_reader_unittest.cc +++ b/test/rtp_file_reader_unittest.cc @@ -85,7 +85,7 @@ class TestPcapFileReader : public ::testing::Test { PacketsPerSsrc pps; test::RtpPacket packet; while (rtp_packet_source_->NextPacket(&packet)) { - rtc::ArrayView raw(packet.data, packet.length); + ArrayView raw(packet.data, packet.length); if (IsRtpPacket(raw)) { pps[ParseRtpSsrc(raw)]++; } diff --git a/test/rtp_file_writer.cc b/test/rtp_file_writer.cc index 22f664abc8..f992756aab 100644 --- a/test/rtp_file_writer.cc +++ b/test/rtp_file_writer.cc @@ -13,9 +13,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "rtc_base/checks.h" namespace webrtc { @@ -92,7 +92,7 @@ class RtpDumpWriter : public RtpFileWriter { } FILE* file_; - absl::optional first_packet_time_; + std::optional first_packet_time_; }; RtpFileWriter* RtpFileWriter::Create(FileFormat format, diff --git a/test/rtp_rtcp_observer.h b/test/rtp_rtcp_observer.h index cbbff1abfc..d4b8ec4352 100644 --- a/test/rtp_rtcp_observer.h +++ b/test/rtp_rtcp_observer.h @@ -15,6 +15,7 @@ #include #include +#include "absl/flags/flag.h" #include "api/array_view.h" #include "api/test/simulated_network.h" #include "api/units/time_delta.h" @@ -25,10 +26,7 @@ #include "system_wrappers/include/field_trial.h" #include "test/direct_transport.h" #include "test/gtest.h" - -namespace { -constexpr webrtc::TimeDelta kShortTimeout = webrtc::TimeDelta::Millis(500); -} +#include "test/test_flags.h" namespace webrtc { namespace test { @@ -45,26 +43,26 @@ class RtpRtcpObserver { virtual ~RtpRtcpObserver() {} virtual bool Wait() { - if (field_trial::IsEnabled("WebRTC-QuickPerfTest")) { - observation_complete_.Wait(kShortTimeout); + if (absl::GetFlag(FLAGS_webrtc_quick_perf_test)) { + observation_complete_.Wait(TimeDelta::Millis(500)); return true; } return observation_complete_.Wait(timeout_); } - virtual Action OnSendRtp(rtc::ArrayView packet) { + virtual Action OnSendRtp(ArrayView packet) { return SEND_PACKET; } - virtual Action OnSendRtcp(rtc::ArrayView packet) { + virtual Action OnSendRtcp(ArrayView packet) { return SEND_PACKET; } - virtual Action OnReceiveRtp(rtc::ArrayView packet) { + virtual Action OnReceiveRtp(ArrayView packet) { return SEND_PACKET; } - virtual Action OnReceiveRtcp(rtc::ArrayView packet) { + virtual Action OnReceiveRtcp(ArrayView packet) { return SEND_PACKET; } @@ -72,7 +70,7 @@ class RtpRtcpObserver { RtpRtcpObserver() : RtpRtcpObserver(TimeDelta::Zero()) {} explicit RtpRtcpObserver(TimeDelta event_timeout) : timeout_(event_timeout) {} - rtc::Event observation_complete_; + Event observation_complete_; private: const TimeDelta timeout_; @@ -88,8 +86,8 @@ class PacketTransport : public test::DirectTransport { TransportType transport_type, const std::map& payload_type_map, std::unique_ptr nw_pipe, - rtc::ArrayView audio_extensions, - rtc::ArrayView video_extensions) + ArrayView audio_extensions, + ArrayView video_extensions) : test::DirectTransport(task_queue, std::move(nw_pipe), send_call, @@ -100,7 +98,7 @@ class PacketTransport : public test::DirectTransport { transport_type_(transport_type) {} private: - bool SendRtp(rtc::ArrayView packet, + bool SendRtp(ArrayView packet, const PacketOptions& options) override { EXPECT_TRUE(IsRtpPacket(packet)); RtpRtcpObserver::Action action = RtpRtcpObserver::SEND_PACKET; @@ -121,7 +119,7 @@ class PacketTransport : public test::DirectTransport { return true; // Will never happen, makes compiler happy. } - bool SendRtcp(rtc::ArrayView packet) override { + bool SendRtcp(ArrayView packet) override { EXPECT_TRUE(IsRtcpPacket(packet)); RtpRtcpObserver::Action action = RtpRtcpObserver::SEND_PACKET; if (observer_) { diff --git a/test/run_loop.cc b/test/run_loop.cc index 7cc80ab481..d3495e00e8 100644 --- a/test/run_loop.cc +++ b/test/run_loop.cc @@ -40,7 +40,7 @@ void RunLoop::Flush() { // thread will loop forever since time never increases. Since the clock is // simulated, 0ms can be used as the loop delay, which will process all // messages ready for execution. - int cms = rtc::GetClockForTesting() ? 0 : 1000; + int cms = GetClockForTesting() ? 0 : 1000; worker_thread_.ProcessMessages(cms); } @@ -62,12 +62,12 @@ bool RunLoop::FakeSocketServer::Wait(webrtc::TimeDelta max_wait_duration, void RunLoop::FakeSocketServer::WakeUp() {} -rtc::Socket* RunLoop::FakeSocketServer::CreateSocket(int family, int type) { +Socket* RunLoop::FakeSocketServer::CreateSocket(int family, int type) { return nullptr; } -RunLoop::WorkerThread::WorkerThread(rtc::SocketServer* ss) - : rtc::Thread(ss), tq_setter_(this) {} +RunLoop::WorkerThread::WorkerThread(SocketServer* ss) + : Thread(ss), tq_setter_(this) {} } // namespace test } // namespace webrtc diff --git a/test/run_loop.h b/test/run_loop.h index 8a2bf54402..17638f624e 100644 --- a/test/run_loop.h +++ b/test/run_loop.h @@ -39,7 +39,7 @@ class RunLoop { } private: - class FakeSocketServer : public rtc::SocketServer { + class FakeSocketServer : public SocketServer { public: FakeSocketServer(); ~FakeSocketServer(); @@ -50,15 +50,15 @@ class RunLoop { bool Wait(webrtc::TimeDelta max_wait_duration, bool process_io) override; void WakeUp() override; - rtc::Socket* CreateSocket(int family, int type) override; + Socket* CreateSocket(int family, int type) override; private: bool fail_next_wait_ = false; }; - class WorkerThread : public rtc::Thread { + class WorkerThread : public Thread { public: - explicit WorkerThread(rtc::SocketServer* ss); + explicit WorkerThread(SocketServer* ss); private: CurrentTaskQueueSetter tq_setter_; diff --git a/test/run_loop_unittest.cc b/test/run_loop_unittest.cc index 80f0bcbdcc..e6c747ac4f 100644 --- a/test/run_loop_unittest.cc +++ b/test/run_loop_unittest.cc @@ -10,8 +10,8 @@ #include "test/run_loop.h" +#include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" -#include "rtc_base/task_queue.h" #include "test/gtest.h" namespace webrtc { diff --git a/test/scenario/BUILD.gn b/test/scenario/BUILD.gn index c3b8847fd1..2cae7625cd 100644 --- a/test/scenario/BUILD.gn +++ b/test/scenario/BUILD.gn @@ -77,22 +77,31 @@ if (rtc_include_tests && !build_with_chromium) { "../:test_common", "../:test_support", "../:video_test_common", + "../..//test/network:simulated_network", "../../api:array_view", "../../api:create_frame_generator", "../../api:fec_controller_api", "../../api:frame_generator_api", + "../../api:libjingle_logging_api", "../../api:libjingle_peerconnection_api", "../../api:rtc_event_log_output_file", + "../../api:rtp_headers", "../../api:rtp_parameters", + "../../api:scoped_refptr", "../../api:sequence_checker", "../../api:time_controller", - "../../api:time_controller", "../../api:transport_api", + "../../api/audio:audio_device", + "../../api/audio:builtin_audio_processing_builder", + "../../api/audio_codecs:audio_codecs_api", "../../api/audio_codecs:builtin_audio_decoder_factory", "../../api/audio_codecs:builtin_audio_encoder_factory", + "../../api/environment", + "../../api/environment:environment_factory", "../../api/rtc_event_log", "../../api/rtc_event_log:rtc_event_log_factory", "../../api/task_queue", + "../../api/test/network_emulation", "../../api/test/video:function_video_factory", "../../api/transport:network_control", "../../api/units:data_rate", @@ -108,13 +117,12 @@ if (rtc_include_tests && !build_with_chromium) { "../../call", "../../call:call_interfaces", "../../call:rtp_sender", - "../../call:simulated_network", - "../../call:video_stream_api", + "../../call:video_receive_stream_api", + "../../call:video_send_stream_api", "../../common_video", "../../media:media_constants", "../../media:rtc_audio_video", "../../media:rtc_internal_video_codecs", - "../../media:rtc_media_base", "../../modules/audio_device", "../../modules/audio_device:mock_audio_device", "../../modules/audio_device:test_audio_device_module", @@ -127,13 +135,16 @@ if (rtc_include_tests && !build_with_chromium) { "../../modules/video_coding:video_codec_interface", "../../modules/video_coding:video_coding_utility", "../../modules/video_coding:webrtc_h264", - "../../modules/video_coding:webrtc_multiplex", "../../modules/video_coding:webrtc_vp8", "../../modules/video_coding:webrtc_vp9", "../../modules/video_coding/svc:scalability_mode_util", "../../rtc_base:checks", "../../rtc_base:copy_on_write_buffer", + "../../rtc_base:macromagic", "../../rtc_base:net_helper", + "../../rtc_base:net_helpers", + "../../rtc_base:network_constants", + "../../rtc_base:network_route", "../../rtc_base:refcount", "../../rtc_base:rtc_base_tests_utils", "../../rtc_base:rtc_event", @@ -141,25 +152,24 @@ if (rtc_include_tests && !build_with_chromium) { "../../rtc_base:rtc_stats_counters", "../../rtc_base:safe_minmax", "../../rtc_base:socket_address", + "../../rtc_base:stringutils", "../../rtc_base:task_queue_for_test", "../../rtc_base:threading", + "../../rtc_base/network:sent_packet", "../../rtc_base/synchronization:mutex", "../../rtc_base/task_utils:repeating_task", "../../system_wrappers", - "../../system_wrappers:field_trial", "../../video/config:streams_config", "../logging:log_writer", "../network:emulated_network", "../time_controller", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/cleanup", "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", "//third_party/abseil-cpp/absl/functional:any_invocable", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (is_android) { deps += [ "../../modules/video_coding:android_codec_factory_helper" ] @@ -193,6 +203,7 @@ if (rtc_include_tests && !build_with_chromium) { "../../system_wrappers:field_trial", "../../test:field_trial", "../../test:test_support", + "../../test:video_test_constants", "../logging:log_writer", "//testing/gmock", ] diff --git a/test/scenario/audio_stream.cc b/test/scenario/audio_stream.cc index 7715555e23..52765f5935 100644 --- a/test/scenario/audio_stream.cc +++ b/test/scenario/audio_stream.cc @@ -9,18 +9,35 @@ */ #include "test/scenario/audio_stream.h" -#include "absl/memory/memory.h" -#include "test/call_test.h" +#include +#include +#include +#include + +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/call/transport.h" +#include "api/media_types.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "call/audio_receive_stream.h" +#include "call/audio_send_stream.h" +#include "rtc_base/checks.h" +#include "rtc_base/strings/string_builder.h" +#include "test/scenario/call_client.h" +#include "test/scenario/column_printer.h" +#include "test/scenario/scenario_config.h" #include "test/video_test_constants.h" #if WEBRTC_ENABLE_PROTOBUF -RTC_PUSH_IGNORING_WUNDEF() #ifdef WEBRTC_ANDROID_PLATFORM_BUILD #include "external/webrtc/webrtc/modules/audio_coding/audio_network_adaptor/config.pb.h" #else #include "modules/audio_coding/audio_network_adaptor/config.pb.h" #endif -RTC_POP_IGNORING_WUNDEF() #endif namespace webrtc { @@ -31,7 +48,7 @@ enum : int { // The first valid value is 1. kAbsSendTimeExtensionId }; -absl::optional CreateAdaptationString( +std::optional CreateAdaptationString( AudioStreamConfig::NetworkAdaptation config) { #if WEBRTC_ENABLE_PROTOBUF @@ -63,7 +80,7 @@ absl::optional CreateAdaptationString( RTC_LOG(LS_ERROR) << "audio_network_adaptation is enabled" " but WEBRTC_ENABLE_PROTOBUF is false.\n" "Ignoring settings."; - return absl::nullopt; + return std::nullopt; #endif // WEBRTC_ENABLE_PROTOBUF } } // namespace @@ -85,13 +102,13 @@ std::vector GetAudioRtpExtensions( SendAudioStream::SendAudioStream( CallClient* sender, AudioStreamConfig config, - rtc::scoped_refptr encoder_factory, + scoped_refptr encoder_factory, Transport* send_transport) : sender_(sender), config_(config) { AudioSendStream::Config send_config(send_transport); ssrc_ = sender->GetNextAudioSsrc(); send_config.rtp.ssrc = ssrc_; - SdpAudioFormat::Parameters sdp_params; + CodecParameterMap sdp_params; if (config.source.channels == 2) sdp_params["stereo"] = "1"; if (config.encoder.initial_frame_length != TimeDelta::Millis(20)) @@ -134,9 +151,6 @@ SendAudioStream::SendAudioStream( send_config.max_bitrate_bps = max_rate.bps(); } - if (config.stream.in_bandwidth_estimation) { - send_config.send_codec_spec->transport_cc_enabled = true; - } send_config.rtp.extensions = GetAudioRtpExtensions(config); sender_->SendTask([&] { @@ -169,7 +183,7 @@ void SendAudioStream::SetMuted(bool mute) { ColumnPrinter SendAudioStream::StatsPrinter() { return ColumnPrinter::Lambda( "audio_target_rate", - [this](rtc::SimpleStringBuilder& sb) { + [this](SimpleStringBuilder& sb) { sender_->SendTask([this, &sb] { AudioSendStream::Stats stats = send_stream_->GetStats(); sb.AppendFormat("%.0lf", stats.target_bitrate_bps / 8.0); @@ -182,7 +196,7 @@ ReceiveAudioStream::ReceiveAudioStream( CallClient* receiver, AudioStreamConfig config, SendAudioStream* send_stream, - rtc::scoped_refptr decoder_factory, + scoped_refptr decoder_factory, Transport* feedback_transport) : receiver_(receiver), config_(config) { AudioReceiveStreamInterface::Config recv_config; @@ -226,9 +240,9 @@ AudioStreamPair::~AudioStreamPair() = default; AudioStreamPair::AudioStreamPair( CallClient* sender, - rtc::scoped_refptr encoder_factory, + scoped_refptr encoder_factory, CallClient* receiver, - rtc::scoped_refptr decoder_factory, + scoped_refptr decoder_factory, AudioStreamConfig config) : config_(config), send_stream_(sender, config, encoder_factory, sender->transport_.get()), diff --git a/test/scenario/audio_stream.h b/test/scenario/audio_stream.h index cbaf9d29eb..de011170f4 100644 --- a/test/scenario/audio_stream.h +++ b/test/scenario/audio_stream.h @@ -41,7 +41,7 @@ class SendAudioStream { friend class ReceiveAudioStream; SendAudioStream(CallClient* sender, AudioStreamConfig config, - rtc::scoped_refptr encoder_factory, + scoped_refptr encoder_factory, Transport* send_transport); AudioSendStream* send_stream_ = nullptr; CallClient* const sender_; @@ -67,7 +67,7 @@ class ReceiveAudioStream { ReceiveAudioStream(CallClient* receiver, AudioStreamConfig config, SendAudioStream* send_stream, - rtc::scoped_refptr decoder_factory, + scoped_refptr decoder_factory, Transport* feedback_transport); AudioReceiveStreamInterface* receive_stream_ = nullptr; CallClient* const receiver_; @@ -90,9 +90,9 @@ class AudioStreamPair { private: friend class Scenario; AudioStreamPair(CallClient* sender, - rtc::scoped_refptr encoder_factory, + scoped_refptr encoder_factory, CallClient* receiver, - rtc::scoped_refptr decoder_factory, + scoped_refptr decoder_factory, AudioStreamConfig config); private: diff --git a/test/scenario/call_client.cc b/test/scenario/call_client.cc index d2019aebc7..b598c2878c 100644 --- a/test/scenario/call_client.cc +++ b/test/scenario/call_client.cc @@ -13,6 +13,9 @@ #include #include +#include "api/audio/builtin_audio_processing_builder.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/media_types.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/rtc_event_log/rtc_event_log_factory.h" @@ -40,14 +43,13 @@ const uint32_t kReceiverLocalAudioSsrc = 0x1234567; constexpr int kEventLogOutputIntervalMs = 5000; -CallClientFakeAudio InitAudio(TimeController* time_controller) { +CallClientFakeAudio InitAudio(const Environment& env) { CallClientFakeAudio setup; auto capturer = TestAudioDeviceModule::CreatePulsedNoiseCapturer(256, 48000); auto renderer = TestAudioDeviceModule::CreateDiscardRenderer(48000); setup.fake_audio_device = TestAudioDeviceModule::Create( - time_controller->GetTaskQueueFactory(), std::move(capturer), - std::move(renderer), 1.f); - setup.apm = AudioProcessingBuilder().Create(); + &env.task_queue_factory(), std::move(capturer), std::move(renderer), 1.f); + setup.apm = BuiltinAudioProcessingBuilder().Build(env); setup.fake_audio_device->Init(); AudioState::Config audio_state_config; audio_state_config.audio_mixer = AudioMixerImpl::Create(); @@ -59,38 +61,28 @@ CallClientFakeAudio InitAudio(TimeController* time_controller) { return setup; } -Call* CreateCall(TimeController* time_controller, - RtcEventLog* event_log, - CallClientConfig config, - LoggingNetworkControllerFactory* network_controller_factory, - rtc::scoped_refptr audio_state) { - CallConfig call_config(event_log); +std::unique_ptr CreateCall( + const Environment& env, + CallClientConfig config, + LoggingNetworkControllerFactory* network_controller_factory, + scoped_refptr audio_state) { + CallConfig call_config(env); call_config.bitrate_config.max_bitrate_bps = config.transport.rates.max_rate.bps_or(-1); call_config.bitrate_config.min_bitrate_bps = config.transport.rates.min_rate.bps(); call_config.bitrate_config.start_bitrate_bps = config.transport.rates.start_rate.bps(); - call_config.task_queue_factory = time_controller->GetTaskQueueFactory(); call_config.network_controller_factory = network_controller_factory; call_config.audio_state = audio_state; - call_config.pacer_burst_interval = config.pacer_burst_interval; - call_config.trials = config.field_trials; - Clock* clock = time_controller->GetClock(); - return Call::Create(call_config, clock, - RtpTransportControllerSendFactory().Create( - call_config.ExtractTransportConfig(), clock)); + return Call::Create(std::move(call_config)); } std::unique_ptr CreateEventLog( - TaskQueueFactory* task_queue_factory, - LogWriterFactoryInterface* log_writer_factory) { - if (!log_writer_factory) { - return std::make_unique(); - } - auto event_log = RtcEventLogFactory(task_queue_factory) - .CreateRtcEventLog(RtcEventLog::EncodingType::NewFormat); - bool success = event_log->StartLogging(log_writer_factory->Create(".rtc.dat"), + const Environment& env, + LogWriterFactoryInterface& log_writer_factory) { + auto event_log = RtcEventLogFactory().Create(env); + bool success = event_log->StartLogging(log_writer_factory.Create(".rtc.dat"), kEventLogOutputIntervalMs); RTC_CHECK(success); return event_log; @@ -218,22 +210,25 @@ CallClient::CallClient( std::unique_ptr log_writer_factory, CallClientConfig config) : time_controller_(time_controller), - clock_(time_controller->GetClock()), + env_(CreateEnvironment(time_controller_->CreateTaskQueueFactory(), + time_controller_->GetClock())), log_writer_factory_(std::move(log_writer_factory)), network_controller_factory_(log_writer_factory_.get(), config.transport), - task_queue_(time_controller->GetTaskQueueFactory()->CreateTaskQueue( + task_queue_(env_.task_queue_factory().CreateTaskQueue( "CallClient", TaskQueueFactory::Priority::NORMAL)) { - config.field_trials = &field_trials_; SendTask([this, config] { - event_log_ = CreateEventLog(time_controller_->GetTaskQueueFactory(), - log_writer_factory_.get()); - fake_audio_setup_ = InitAudio(time_controller_); - - call_.reset(CreateCall(time_controller_, event_log_.get(), config, - &network_controller_factory_, - fake_audio_setup_.audio_state)); - transport_ = std::make_unique(clock_, call_.get()); + if (log_writer_factory_ != nullptr) { + EnvironmentFactory env_factory(env_); + env_factory.Set(CreateEventLog(env_, *log_writer_factory_)); + env_ = env_factory.Create(); + } + fake_audio_setup_ = InitAudio(env_); + + call_ = CreateCall(env_, config, &network_controller_factory_, + fake_audio_setup_.audio_state); + transport_ = + std::make_unique(&env_.clock(), call_.get()); }); } @@ -241,17 +236,16 @@ CallClient::~CallClient() { SendTask([&] { call_.reset(); fake_audio_setup_ = {}; - rtc::Event done; - event_log_->StopLogging([&done] { done.Set(); }); - done.Wait(rtc::Event::kForever); - event_log_.reset(); + Event done; + env_.event_log().StopLogging([&done] { done.Set(); }); + done.Wait(Event::kForever); }); } ColumnPrinter CallClient::StatsPrinter() { return ColumnPrinter::Lambda( "pacer_delay call_send_bw", - [this](rtc::SimpleStringBuilder& sb) { + [this](SimpleStringBuilder& sb) { Call::Stats call_stats = call_->GetStats(); sb.AppendFormat("%.3lf %.0lf", call_stats.pacer_delay_ms / 1000.0, call_stats.send_bandwidth_bps / 8.0); @@ -282,7 +276,7 @@ DataRate CallClient::padding_rate() const { void CallClient::SetRemoteBitrate(DataRate bitrate) { RemoteBitrateReport msg; msg.bandwidth = bitrate; - msg.receive_time = clock_->CurrentTime(); + msg.receive_time = env_.clock().CurrentTime(); network_controller_factory_.SetRemoteBitrateEstimate(msg); } @@ -294,14 +288,14 @@ void CallClient::UpdateBitrateConstraints( } void CallClient::SetAudioReceiveRtpHeaderExtensions( - rtc::ArrayView extensions) { + ArrayView extensions) { SendTask([this, &extensions]() { audio_extensions_ = RtpHeaderExtensionMap(extensions); }); } void CallClient::SetVideoReceiveRtpHeaderExtensions( - rtc::ArrayView extensions) { + ArrayView extensions) { SendTask([this, &extensions]() { video_extensions_ = RtpHeaderExtensionMap(extensions); }); @@ -370,6 +364,10 @@ void CallClient::SendTask(std::function task) { task_queue_.SendTask(std::move(task)); } +void CallClient::UpdateNetworkAdapterId(int adapter_id) { + transport_->UpdateAdapterId(adapter_id); +} + int16_t CallClient::Bind(EmulatedEndpoint* endpoint) { uint16_t port = endpoint->BindReceiver(0, this).value(); endpoints_.push_back({endpoint, port}); diff --git a/test/scenario/call_client.h b/test/scenario/call_client.h index 3717a7e796..6237584a6c 100644 --- a/test/scenario/call_client.h +++ b/test/scenario/call_client.h @@ -17,12 +17,13 @@ #include #include "api/array_view.h" +#include "api/audio/audio_device.h" +#include "api/environment/environment.h" #include "api/rtc_event_log/rtc_event_log.h" #include "api/rtp_parameters.h" #include "api/test/time_controller.h" #include "api/units/data_rate.h" #include "call/call.h" -#include "modules/audio_device/include/audio_device.h" #include "modules/congestion_controller/goog_cc/test/goog_cc_printer.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "rtc_base/task_queue_for_test.h" @@ -95,9 +96,9 @@ class LoggingNetworkControllerFactory }; struct CallClientFakeAudio { - rtc::scoped_refptr apm; - rtc::scoped_refptr fake_audio_device; - rtc::scoped_refptr audio_state; + scoped_refptr apm; + scoped_refptr fake_audio_device; + scoped_refptr audio_state; }; // CallClient represents a participant in a call scenario. It is created by the // Scenario class and is used as sender and receiver when setting up a media @@ -124,10 +125,11 @@ class CallClient : public EmulatedNetworkReceiverInterface { void UpdateBitrateConstraints(const BitrateConstraints& constraints); void SetRemoteBitrate(DataRate bitrate); - void SetAudioReceiveRtpHeaderExtensions( - rtc::ArrayView extensions); - void SetVideoReceiveRtpHeaderExtensions( - rtc::ArrayView extensions); + void SetAudioReceiveRtpHeaderExtensions(ArrayView extensions); + void SetVideoReceiveRtpHeaderExtensions(ArrayView extensions); + + // Sets the network adapter id used next time the network route changes. + void UpdateNetworkAdapterId(int adapter_id); void OnPacketReceived(EmulatedIpPacket packet) override; std::unique_ptr GetLogWriter(std::string name); @@ -156,9 +158,8 @@ class CallClient : public EmulatedNetworkReceiverInterface { void UnBind(); TimeController* const time_controller_; - Clock* clock_; + Environment env_; const std::unique_ptr log_writer_factory_; - std::unique_ptr event_log_; LoggingNetworkControllerFactory network_controller_factory_; CallClientFakeAudio fake_audio_setup_; std::unique_ptr call_; @@ -175,8 +176,6 @@ class CallClient : public EmulatedNetworkReceiverInterface { std::map ssrc_media_types_; // Defined last so it's destroyed first. TaskQueueForTest task_queue_; - - const FieldTrialBasedConfig field_trials_; }; class CallClientPair { diff --git a/test/scenario/column_printer.cc b/test/scenario/column_printer.cc index 661c83bd0d..046925ceaa 100644 --- a/test/scenario/column_printer.cc +++ b/test/scenario/column_printer.cc @@ -17,19 +17,19 @@ ColumnPrinter::~ColumnPrinter() = default; ColumnPrinter::ColumnPrinter( const char* headers, - std::function printer, + std::function printer, size_t max_length) : headers_(headers), printer_(printer), max_length_(max_length) {} ColumnPrinter ColumnPrinter::Fixed(const char* headers, std::string fields) { return ColumnPrinter( - headers, [fields](rtc::SimpleStringBuilder& sb) { sb << fields; }, + headers, [fields](SimpleStringBuilder& sb) { sb << fields; }, fields.size()); } ColumnPrinter ColumnPrinter::Lambda( const char* headers, - std::function printer, + std::function printer, size_t max_length) { return ColumnPrinter(headers, printer, max_length); } @@ -59,7 +59,7 @@ void StatesPrinter::PrintHeaders() { void StatesPrinter::PrintRow() { // Note that this is run for null output to preserve side effects, this allows // setting break points etc. - rtc::SimpleStringBuilder sb(buffer_); + SimpleStringBuilder sb(buffer_); printers_[0].printer_(sb); for (size_t i = 1; i < printers_.size(); ++i) { sb << ' '; diff --git a/test/scenario/column_printer.h b/test/scenario/column_printer.h index 529f4597ec..0f31b651c8 100644 --- a/test/scenario/column_printer.h +++ b/test/scenario/column_printer.h @@ -26,18 +26,18 @@ class ColumnPrinter { static ColumnPrinter Fixed(const char* headers, std::string fields); static ColumnPrinter Lambda( const char* headers, - std::function printer, + std::function printer, size_t max_length = 256); protected: friend class StatesPrinter; const char* headers_; - std::function printer_; + std::function printer_; size_t max_length_; private: ColumnPrinter(const char* headers, - std::function printer, + std::function printer, size_t max_length); }; diff --git a/test/scenario/network_node.cc b/test/scenario/network_node.cc index 6265454263..e64cf62cba 100644 --- a/test/scenario/network_node.cc +++ b/test/scenario/network_node.cc @@ -9,13 +9,36 @@ */ #include "test/scenario/network_node.h" -#include +#include +#include #include -#include +#include #include "absl/cleanup/cleanup.h" +#include "api/array_view.h" +#include "api/call/transport.h" +#include "api/sequence_checker.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "call/call.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/event.h" #include "rtc_base/net_helper.h" -#include "rtc_base/numerics/safe_minmax.h" +#include "rtc_base/net_helpers.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/network_route.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" +#include "system_wrappers/include/clock.h" +#include "test/network/network_emulation.h" +#include "test/network/simulated_network.h" +#include "test/scenario/column_printer.h" +#include "test/scenario/scenario_config.h" namespace webrtc { namespace test { @@ -24,7 +47,7 @@ constexpr char kDummyTransportName[] = "dummy"; SimulatedNetwork::Config CreateSimulationConfig( NetworkSimulationConfig config) { SimulatedNetwork::Config sim_config; - sim_config.link_capacity_kbps = config.bandwidth.kbps_or(0); + sim_config.link_capacity = config.bandwidth; sim_config.loss_percent = config.loss_rate * 100; sim_config.queue_delay_ms = config.delay.ms(); sim_config.delay_standard_deviation_ms = config.delay_std_dev.ms(); @@ -33,6 +56,11 @@ SimulatedNetwork::Config CreateSimulationConfig( config.packet_queue_length_limit.value_or(0); return sim_config; } + +RouteEndpoint CreateRouteEndpoint(uint16_t network_id, uint16_t adapter_id) { + return RouteEndpoint(ADAPTER_TYPE_UNKNOWN, adapter_id, network_id, + /* uses_turn = */ false); +} } // namespace SimulationNode::SimulationNode(NetworkSimulationConfig config, @@ -59,8 +87,7 @@ void SimulationNode::PauseTransmissionUntil(Timestamp until) { ColumnPrinter SimulationNode::ConfigPrinter() const { return ColumnPrinter::Lambda( - "propagation_delay capacity loss_rate", - [this](rtc::SimpleStringBuilder& sb) { + "propagation_delay capacity loss_rate", [this](SimpleStringBuilder& sb) { sb.AppendFormat("%.3lf %.0lf %.2lf", config_.delay.seconds(), config_.bandwidth.bps() / 8.0, config_.loss_rate); }); @@ -68,33 +95,35 @@ ColumnPrinter SimulationNode::ConfigPrinter() const { NetworkNodeTransport::NetworkNodeTransport(Clock* sender_clock, Call* sender_call) - : sender_clock_(sender_clock), sender_call_(sender_call) {} + : sender_clock_(sender_clock), sender_call_(sender_call) { + sequence_checker_.Detach(); +} NetworkNodeTransport::~NetworkNodeTransport() = default; -bool NetworkNodeTransport::SendRtp(rtc::ArrayView packet, +bool NetworkNodeTransport::SendRtp(ArrayView packet, const PacketOptions& options) { int64_t send_time_ms = sender_clock_->TimeInMilliseconds(); - rtc::SentPacket sent_packet; + SentPacketInfo sent_packet; sent_packet.packet_id = options.packet_id; sent_packet.info.included_in_feedback = options.included_in_feedback; sent_packet.info.included_in_allocation = options.included_in_allocation; sent_packet.send_time_ms = send_time_ms; sent_packet.info.packet_size_bytes = packet.size(); - sent_packet.info.packet_type = rtc::PacketType::kData; + sent_packet.info.packet_type = PacketType::kData; sender_call_->OnSentPacket(sent_packet); MutexLock lock(&mutex_); if (!endpoint_) return false; - rtc::CopyOnWriteBuffer buffer(packet); + CopyOnWriteBuffer buffer(packet); endpoint_->SendPacket(local_address_, remote_address_, buffer, packet_overhead_.bytes()); return true; } -bool NetworkNodeTransport::SendRtcp(rtc::ArrayView packet) { - rtc::CopyOnWriteBuffer buffer(packet); +bool NetworkNodeTransport::SendRtcp(ArrayView packet) { + CopyOnWriteBuffer buffer(packet); MutexLock lock(&mutex_); if (!endpoint_) return false; @@ -103,32 +132,41 @@ bool NetworkNodeTransport::SendRtcp(rtc::ArrayView packet) { return true; } +void NetworkNodeTransport::UpdateAdapterId(int adapter_id) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + adapter_id_ = adapter_id; +} + void NetworkNodeTransport::Connect(EmulatedEndpoint* endpoint, - const rtc::SocketAddress& receiver_address, + const SocketAddress& receiver_address, DataSize packet_overhead) { - rtc::NetworkRoute route; + RTC_DCHECK_RUN_ON(&sequence_checker_); + NetworkRoute route; route.connected = true; // We assume that the address will be unique in the lower bytes. - route.local = rtc::RouteEndpoint::CreateWithNetworkId(static_cast( - receiver_address.ipaddr().v4AddressAsHostOrderInteger())); - route.remote = rtc::RouteEndpoint::CreateWithNetworkId(static_cast( - receiver_address.ipaddr().v4AddressAsHostOrderInteger())); + route.local = CreateRouteEndpoint( + static_cast( + receiver_address.ipaddr().v4AddressAsHostOrderInteger()), + adapter_id_); + route.remote = CreateRouteEndpoint( + static_cast( + receiver_address.ipaddr().v4AddressAsHostOrderInteger()), + adapter_id_); route.packet_overhead = packet_overhead.bytes() + - receiver_address.ipaddr().overhead() + - cricket::kUdpHeaderSize; + receiver_address.ipaddr().overhead() + kUdpHeaderSize; { // Only IPv4 address is supported. RTC_CHECK_EQ(receiver_address.family(), AF_INET); MutexLock lock(&mutex_); endpoint_ = endpoint; - local_address_ = rtc::SocketAddress(endpoint_->GetPeerLocalAddress(), 0); + local_address_ = SocketAddress(endpoint_->GetPeerLocalAddress(), 0); remote_address_ = receiver_address; packet_overhead_ = packet_overhead; current_network_route_ = route; } // Must be called from the worker thread. - rtc::Event event; + Event event; auto cleanup = absl::MakeCleanup([&event] { event.Set(); }); auto&& task = [this, &route, cleanup = std::move(cleanup)] { sender_call_->GetTransportControllerSend()->OnNetworkRouteChanged( diff --git a/test/scenario/network_node.h b/test/scenario/network_node.h index 614dc132e1..e2f6f59c39 100644 --- a/test/scenario/network_node.h +++ b/test/scenario/network_node.h @@ -10,19 +10,24 @@ #ifndef TEST_SCENARIO_NETWORK_NODE_H_ #define TEST_SCENARIO_NETWORK_NODE_H_ -#include -#include +#include +#include #include -#include -#include +#include "api/array_view.h" #include "api/call/transport.h" +#include "api/sequence_checker.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/units/data_size.h" #include "api/units/timestamp.h" #include "call/call.h" -#include "call/simulated_network.h" -#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network_route.h" +#include "rtc_base/socket_address.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" +#include "system_wrappers/include/clock.h" #include "test/network/network_emulation.h" +#include "test/network/simulated_network.h" #include "test/scenario/column_printer.h" #include "test/scenario/scenario_config.h" @@ -53,12 +58,14 @@ class NetworkNodeTransport : public Transport { NetworkNodeTransport(Clock* sender_clock, Call* sender_call); ~NetworkNodeTransport() override; - bool SendRtp(rtc::ArrayView packet, + void UpdateAdapterId(int adapter_id); + + bool SendRtp(ArrayView packet, const PacketOptions& options) override; - bool SendRtcp(rtc::ArrayView packet) override; + bool SendRtcp(ArrayView packet) override; void Connect(EmulatedEndpoint* endpoint, - const rtc::SocketAddress& receiver_address, + const SocketAddress& receiver_address, DataSize packet_overhead); void Disconnect(); @@ -68,14 +75,17 @@ class NetworkNodeTransport : public Transport { } private: + SequenceChecker sequence_checker_; + int adapter_id_ RTC_GUARDED_BY(sequence_checker_) = 0; + Mutex mutex_; Clock* const sender_clock_; Call* const sender_call_; EmulatedEndpoint* endpoint_ RTC_GUARDED_BY(mutex_) = nullptr; - rtc::SocketAddress local_address_ RTC_GUARDED_BY(mutex_); - rtc::SocketAddress remote_address_ RTC_GUARDED_BY(mutex_); + SocketAddress local_address_ RTC_GUARDED_BY(mutex_); + SocketAddress remote_address_ RTC_GUARDED_BY(mutex_); DataSize packet_overhead_ RTC_GUARDED_BY(mutex_) = DataSize::Zero(); - rtc::NetworkRoute current_network_route_ RTC_GUARDED_BY(mutex_); + NetworkRoute current_network_route_ RTC_GUARDED_BY(mutex_); }; } // namespace test } // namespace webrtc diff --git a/test/scenario/performance_stats.h b/test/scenario/performance_stats.h index eca1ee79b8..7025ae6003 100644 --- a/test/scenario/performance_stats.h +++ b/test/scenario/performance_stats.h @@ -21,8 +21,8 @@ namespace webrtc { namespace test { struct VideoFramePair { - rtc::scoped_refptr captured; - rtc::scoped_refptr decoded; + scoped_refptr captured; + scoped_refptr decoded; Timestamp capture_time = Timestamp::MinusInfinity(); Timestamp decoded_time = Timestamp::PlusInfinity(); Timestamp render_time = Timestamp::PlusInfinity(); diff --git a/test/scenario/scenario.cc b/test/scenario/scenario.cc index 98f59e6c7d..0201d069ce 100644 --- a/test/scenario/scenario.cc +++ b/test/scenario/scenario.cc @@ -20,6 +20,7 @@ #include "rtc_base/socket_address.h" #include "test/logging/file_log_writer.h" #include "test/network/network_emulation.h" +#include "test/network/network_emulation_manager.h" #include "test/scenario/video_stream.h" #include "test/testsupport/file_utils.h" @@ -66,8 +67,8 @@ Scenario::Scenario( std::unique_ptr log_writer_factory, bool real_time) : log_writer_factory_(std::move(log_writer_factory)), - network_manager_(real_time ? TimeMode::kRealTime : TimeMode::kSimulated, - EmulatedNetworkStatsGatheringMode::kDefault), + network_manager_({.time_mode = real_time ? TimeMode::kRealTime + : TimeMode::kSimulated}), clock_(network_manager_.time_controller()->GetClock()), audio_decoder_factory_(CreateBuiltinAudioDecoderFactory()), audio_encoder_factory_(CreateBuiltinAudioEncoderFactory()), @@ -88,7 +89,7 @@ Scenario::~Scenario() { ColumnPrinter Scenario::TimePrinter() { return ColumnPrinter::Lambda( "time", - [this](rtc::SimpleStringBuilder& sb) { + [this](SimpleStringBuilder& sb) { sb.AppendFormat("%.3lf", Now().seconds()); }, 32); @@ -163,7 +164,7 @@ void Scenario::ChangeRoute(std::pair clients, DataSize overhead) { EmulatedRoute* route = network_manager_.CreateRoute(over_nodes); uint16_t port = clients.second->Bind(route->to); - auto addr = rtc::SocketAddress(route->to->GetPeerLocalAddress(), port); + auto addr = SocketAddress(route->to->GetPeerLocalAddress(), port); clients.first->transport_->Connect(route->from, addr, overhead); } diff --git a/test/scenario/scenario.h b/test/scenario/scenario.h index cad9210002..92ce202c1e 100644 --- a/test/scenario/scenario.h +++ b/test/scenario/scenario.h @@ -176,8 +176,8 @@ class Scenario { std::vector> simulation_nodes_; std::vector> printers_; - rtc::scoped_refptr audio_decoder_factory_; - rtc::scoped_refptr audio_encoder_factory_; + scoped_refptr audio_decoder_factory_; + scoped_refptr audio_encoder_factory_; Timestamp start_time_ = Timestamp::PlusInfinity(); // Defined last so it's destroyed first. diff --git a/test/scenario/scenario_config.h b/test/scenario/scenario_config.h index 9ce99401d7..b47a220cc7 100644 --- a/test/scenario/scenario_config.h +++ b/test/scenario/scenario_config.h @@ -12,9 +12,9 @@ #include +#include #include -#include "absl/types/optional.h" #include "api/fec_controller.h" #include "api/rtp_parameters.h" #include "api/test/frame_generator_interface.h" @@ -57,7 +57,6 @@ struct CallClientConfig { // The number of bites that can be sent in one burst is pacer_burst_interval * // current bwe. 40ms is the default Chrome setting. TimeDelta pacer_burst_interval = TimeDelta::Millis(40); - const FieldTrialsView* field_trials = nullptr; }; struct PacketStreamConfig { @@ -91,8 +90,8 @@ struct VideoStreamConfig { struct Images { struct Crop { TimeDelta scroll_duration = TimeDelta::Seconds(0); - absl::optional width; - absl::optional height; + std::optional width; + std::optional height; } crop; int width = 1850; int height = 1110; @@ -133,11 +132,11 @@ struct VideoStreamConfig { using Codec = VideoCodecType; Codec codec = Codec::kVideoCodecGeneric; - absl::optional max_data_rate; - absl::optional min_data_rate; - absl::optional max_framerate; + std::optional max_data_rate; + std::optional min_data_rate; + std::optional max_framerate; // Counted in frame count. - absl::optional key_frame_interval = 3000; + std::optional key_frame_interval = 3000; bool frame_dropping = true; struct SingleLayer { bool denoising = true; @@ -200,8 +199,8 @@ struct AudioStreamConfig { bool enable_dtx = false; DataRate fixed_rate = DataRate::KilobitsPerSec(32); // Overrides fixed rate. - absl::optional min_rate; - absl::optional max_rate; + std::optional min_rate; + std::optional max_rate; TimeDelta initial_frame_length = TimeDelta::Millis(20); } encoder; struct Stream { @@ -222,7 +221,7 @@ struct NetworkSimulationConfig { TimeDelta delay = TimeDelta::Zero(); TimeDelta delay_std_dev = TimeDelta::Zero(); double loss_rate = 0; - absl::optional packet_queue_length_limit; + std::optional packet_queue_length_limit; DataSize packet_overhead = DataSize::Zero(); }; } // namespace test diff --git a/test/scenario/stats_collection.cc b/test/scenario/stats_collection.cc index e32696de71..4737a953e6 100644 --- a/test/scenario/stats_collection.cc +++ b/test/scenario/stats_collection.cc @@ -10,9 +10,27 @@ #include "test/scenario/stats_collection.h" +#include +#include +#include +#include +#include +#include + +#include "api/rtc_event_log_output.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "call/audio_receive_stream.h" +#include "call/call.h" +#include "call/video_receive_stream.h" +#include "call/video_send_stream.h" #include "common_video/libyuv/include/webrtc_libyuv.h" +#include "rtc_base/checks.h" #include "rtc_base/memory_usage.h" #include "rtc_base/thread.h" +#include "test/logging/log_writer.h" +#include "test/scenario/performance_stats.h" namespace webrtc { namespace test { @@ -139,7 +157,7 @@ void CallStatsCollector::AddStats(Call::Stats sample) { stats_.pacer_delay.AddSample(TimeDelta::Millis(sample.pacer_delay_ms)); if (sample.rtt_ms > 0) stats_.round_trip_time.AddSample(TimeDelta::Millis(sample.rtt_ms)); - stats_.memory_usage.AddSample(rtc::GetProcessResidentSizeBytes()); + stats_.memory_usage.AddSample(GetProcessResidentSizeBytes()); } void AudioReceiveStatsCollector::AddStats( diff --git a/test/scenario/stats_collection.h b/test/scenario/stats_collection.h index 1f5d8daea7..1be0508060 100644 --- a/test/scenario/stats_collection.h +++ b/test/scenario/stats_collection.h @@ -12,8 +12,8 @@ #include #include +#include -#include "absl/types/optional.h" #include "call/call.h" #include "rtc_base/thread.h" #include "test/logging/log_writer.h" @@ -24,7 +24,7 @@ namespace test { struct VideoQualityAnalyzerConfig { double psnr_coverage = 1; - rtc::Thread* thread = nullptr; + Thread* thread = nullptr; }; class VideoLayerAnalyzer { @@ -59,7 +59,7 @@ class VideoQualityAnalyzer { const VideoQualityAnalyzerConfig config_; std::map layer_analyzers_; const std::unique_ptr writer_; - absl::optional cached_; + std::optional cached_; }; class CallStatsCollector { diff --git a/test/scenario/video_frame_matcher.cc b/test/scenario/video_frame_matcher.cc index dc8cd59756..27e580e3ec 100644 --- a/test/scenario/video_frame_matcher.cc +++ b/test/scenario/video_frame_matcher.cc @@ -62,7 +62,7 @@ void VideoFrameMatcher::OnDecodedFrame(const VideoFrame& frame, int layer_id, Timestamp render_time, Timestamp at_time) { - rtc::scoped_refptr decoded(new DecodedFrame{}); + scoped_refptr decoded(new DecodedFrame{}); decoded->decoded_time = at_time; decoded->render_time = render_time; decoded->frame = frame.video_frame_buffer(); @@ -143,7 +143,7 @@ void CapturedFrameTap::OnDiscardedFrame() { ForwardingCapturedFrameTap::ForwardingCapturedFrameTap( Clock* clock, VideoFrameMatcher* matcher, - rtc::VideoSourceInterface* source) + VideoSourceInterface* source) : clock_(clock), matcher_(matcher), source_(source) {} void ForwardingCapturedFrameTap::OnFrame(const VideoFrame& frame) { @@ -159,7 +159,7 @@ void ForwardingCapturedFrameTap::OnDiscardedFrame() { void ForwardingCapturedFrameTap::AddOrUpdateSink( VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { + const VideoSinkWants& wants) { if (!sink_) sink_ = sink; RTC_DCHECK_EQ(sink_, sink); diff --git a/test/scenario/video_frame_matcher.h b/test/scenario/video_frame_matcher.h index a3aa85447d..343909a654 100644 --- a/test/scenario/video_frame_matcher.h +++ b/test/scenario/video_frame_matcher.h @@ -48,24 +48,24 @@ class VideoFrameMatcher { int id; Timestamp decoded_time = Timestamp::PlusInfinity(); Timestamp render_time = Timestamp::PlusInfinity(); - rtc::scoped_refptr frame; - rtc::scoped_refptr thumb; + scoped_refptr frame; + scoped_refptr thumb; int repeat_count = 0; }; - using DecodedFrame = rtc::FinalRefCountedObject; + using DecodedFrame = FinalRefCountedObject; struct CapturedFrame { int id; Timestamp capture_time = Timestamp::PlusInfinity(); - rtc::scoped_refptr frame; - rtc::scoped_refptr thumb; + scoped_refptr frame; + scoped_refptr thumb; double best_score = INFINITY; - rtc::scoped_refptr best_decode; + scoped_refptr best_decode; bool matched = false; }; struct VideoLayer { int layer_id; std::deque captured_frames; - rtc::scoped_refptr last_decode; + scoped_refptr last_decode; int next_decoded_id = 1; }; void HandleMatch(CapturedFrame captured, int layer_id); @@ -76,7 +76,7 @@ class VideoFrameMatcher { TaskQueueForTest task_queue_; }; -class CapturedFrameTap : public rtc::VideoSinkInterface { +class CapturedFrameTap : public VideoSinkInterface { public: CapturedFrameTap(Clock* clock, VideoFrameMatcher* matcher); CapturedFrameTap(CapturedFrameTap&) = delete; @@ -91,13 +91,12 @@ class CapturedFrameTap : public rtc::VideoSinkInterface { int discarded_count_ = 0; }; -class ForwardingCapturedFrameTap - : public rtc::VideoSinkInterface, - public rtc::VideoSourceInterface { +class ForwardingCapturedFrameTap : public VideoSinkInterface, + public VideoSourceInterface { public: ForwardingCapturedFrameTap(Clock* clock, VideoFrameMatcher* matcher, - rtc::VideoSourceInterface* source); + VideoSourceInterface* source); ForwardingCapturedFrameTap(ForwardingCapturedFrameTap&) = delete; ForwardingCapturedFrameTap& operator=(ForwardingCapturedFrameTap&) = delete; @@ -107,18 +106,18 @@ class ForwardingCapturedFrameTap // VideoSourceInterface interface void AddOrUpdateSink(VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override; + const VideoSinkWants& wants) override; void RemoveSink(VideoSinkInterface* sink) override; private: Clock* const clock_; VideoFrameMatcher* const matcher_; - rtc::VideoSourceInterface* const source_; + VideoSourceInterface* const source_; VideoSinkInterface* sink_ = nullptr; int discarded_count_ = 0; }; -class DecodedFrameTap : public rtc::VideoSinkInterface { +class DecodedFrameTap : public VideoSinkInterface { public: DecodedFrameTap(Clock* clock, VideoFrameMatcher* matcher, int layer_id); // VideoSinkInterface interface diff --git a/test/scenario/video_stream.cc b/test/scenario/video_stream.cc index e082aa37c6..be211be9f0 100644 --- a/test/scenario/video_stream.cc +++ b/test/scenario/video_stream.cc @@ -40,7 +40,6 @@ enum : int { // The first valid value is 1. kVideoRotationRtpExtensionId, }; -constexpr int kDefaultMaxQp = cricket::WebRtcVideoSendChannel::kDefaultQpMax; uint8_t CodecTypeToPayloadType(VideoCodecType codec_type) { switch (codec_type) { case VideoCodecType::kVideoCodecGeneric: @@ -51,26 +50,14 @@ uint8_t CodecTypeToPayloadType(VideoCodecType codec_type) { return VideoTestConstants::kPayloadTypeVP9; case VideoCodecType::kVideoCodecH264: return VideoTestConstants::kPayloadTypeH264; + case VideoCodecType::kVideoCodecH265: + return VideoTestConstants::kPayloadTypeH265; default: RTC_DCHECK_NOTREACHED(); } return {}; } -std::string CodecTypeToCodecName(VideoCodecType codec_type) { - switch (codec_type) { - case VideoCodecType::kVideoCodecGeneric: - return ""; - case VideoCodecType::kVideoCodecVP8: - return cricket::kVp8CodecName; - case VideoCodecType::kVideoCodecVP9: - return cricket::kVp9CodecName; - case VideoCodecType::kVideoCodecH264: - return cricket::kH264CodecName; - default: - RTC_DCHECK_NOTREACHED(); - } - return {}; -} + VideoEncoderConfig::ContentType ConvertContentType( VideoStreamConfig::Encoder::ContentType content_type) { switch (content_type) { @@ -128,7 +115,7 @@ VideoSendStream::Config CreateVideoSendStreamConfig( } return send_config; } -rtc::scoped_refptr +scoped_refptr CreateVp9SpecificSettings(VideoStreamConfig video_config) { constexpr auto kScreen = VideoStreamConfig::Encoder::ContentType::kScreen; VideoStreamConfig::Encoder conf = video_config.encoder; @@ -156,11 +143,10 @@ CreateVp9SpecificSettings(VideoStreamConfig video_config) { vp9.automaticResizeOn = conf.single.automatic_scaling; vp9.denoisingOn = conf.single.denoising; } - return rtc::make_ref_counted( - vp9); + return make_ref_counted(vp9); } -rtc::scoped_refptr +scoped_refptr CreateVp8SpecificSettings(VideoStreamConfig config) { VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); vp8_settings.keyFrameInterval = config.encoder.key_frame_interval.value_or(0); @@ -177,11 +163,11 @@ CreateVp8SpecificSettings(VideoStreamConfig config) { vp8_settings.automaticResizeOn = config.encoder.single.automatic_scaling; vp8_settings.denoisingOn = config.encoder.single.denoising; } - return rtc::make_ref_counted( + return make_ref_counted( vp8_settings); } -rtc::scoped_refptr +scoped_refptr CreateH264SpecificSettings(VideoStreamConfig config) { RTC_DCHECK_EQ(config.encoder.simulcast_streams.size(), 1); RTC_DCHECK(config.encoder.simulcast_streams[0] == ScalabilityMode::kL1T1); @@ -191,7 +177,7 @@ CreateH264SpecificSettings(VideoStreamConfig config) { return nullptr; } -rtc::scoped_refptr +scoped_refptr CreateEncoderSpecificSettings(VideoStreamConfig config) { using Codec = VideoStreamConfig::Encoder::Codec; switch (config.encoder.codec) { @@ -203,9 +189,7 @@ CreateEncoderSpecificSettings(VideoStreamConfig config) { return CreateVp9SpecificSettings(config); case Codec::kVideoCodecGeneric: case Codec::kVideoCodecAV1: - return nullptr; - case Codec::kVideoCodecMultiplex: - RTC_DCHECK_NOTREACHED(); + case Codec::kVideoCodecH265: return nullptr; } } @@ -223,19 +207,6 @@ VideoEncoderConfig CreateVideoEncoderConfig(VideoStreamConfig config) { std::vector(encoder_config.number_of_streams); encoder_config.min_transmit_bitrate_bps = config.stream.pad_to_rate.bps(); - std::string cricket_codec = CodecTypeToCodecName(config.encoder.codec); - if (!cricket_codec.empty()) { - bool screenshare = config.encoder.content_type == - VideoStreamConfig::Encoder::ContentType::kScreen; - encoder_config.video_stream_factory = - rtc::make_ref_counted( - cricket_codec, kDefaultMaxQp, screenshare, screenshare, - encoder_info); - } else { - encoder_config.video_stream_factory = - rtc::make_ref_counted(); - } - // TODO(srte): Base this on encoder capabilities. encoder_config.max_bitrate_bps = config.encoder.max_data_rate.value_or(DataRate::KilobitsPerSec(10000)) @@ -291,7 +262,7 @@ std::unique_ptr CreateFrameGenerator( case Capture::kGenerator: return CreateSquareFrameGenerator( source.generator.width, source.generator.height, - source.generator.pixel_format, /*num_squares*/ absl::nullopt); + source.generator.pixel_format, /*num_squares*/ std::nullopt); case Capture::kVideoFile: RTC_CHECK(source.video_file.width && source.video_file.height); return CreateFromYuvFileFrameGenerator( @@ -311,7 +282,7 @@ VideoReceiveStreamInterface::Config CreateVideoReceiveStreamConfig( Transport* feedback_transport, VideoDecoderFactory* decoder_factory, VideoReceiveStreamInterface::Decoder decoder, - rtc::VideoSinkInterface* renderer, + VideoSinkInterface* renderer, uint32_t local_ssrc, uint32_t ssrc, uint32_t rtx_ssrc) { @@ -368,23 +339,23 @@ SendVideoStream::SendVideoStream(CallClient* sender, VideoFrameMatcher* matcher) : sender_(sender), config_(config) { video_capturer_ = std::make_unique( - sender_->clock_, CreateFrameGenerator(sender_->clock_, config.source), - config.source.framerate, - *sender->time_controller_->GetTaskQueueFactory()); + &sender_->env_.clock(), + CreateFrameGenerator(&sender_->env_.clock(), config.source), + config.source.framerate, sender_->env_.task_queue_factory()); video_capturer_->Init(); using Encoder = VideoStreamConfig::Encoder; using Codec = VideoStreamConfig::Encoder::Codec; switch (config.encoder.implementation) { case Encoder::Implementation::kFake: - encoder_factory_ = - std::make_unique([this]() { + encoder_factory_ = std::make_unique( + [this](const Environment& env, const SdpVideoFormat& format) { MutexLock lock(&mutex_); std::unique_ptr encoder; if (config_.encoder.codec == Codec::kVideoCodecVP8) { - encoder = std::make_unique(sender_->clock_); + encoder = std::make_unique(env); } else if (config_.encoder.codec == Codec::kVideoCodecGeneric) { - encoder = std::make_unique(sender_->clock_); + encoder = std::make_unique(env); } else { RTC_DCHECK_NOTREACHED(); } @@ -424,7 +395,8 @@ SendVideoStream::SendVideoStream(CallClient* sender, if (config.stream.fec_controller_factory) { send_stream_ = sender_->call_->CreateVideoSendStream( std::move(send_config), std::move(encoder_config), - config.stream.fec_controller_factory->CreateFecController()); + config.stream.fec_controller_factory->CreateFecController( + sender_->env_)); } else { send_stream_ = sender_->call_->CreateVideoSendStream( std::move(send_config), std::move(encoder_config)); @@ -432,7 +404,7 @@ SendVideoStream::SendVideoStream(CallClient* sender, if (matcher->Active()) { frame_tap_ = std::make_unique( - sender_->clock_, matcher, video_capturer_.get()); + &sender_->env_.clock(), matcher, video_capturer_.get()); send_stream_->SetSource(frame_tap_.get(), config.encoder.degradation_preference); } else { @@ -482,12 +454,8 @@ void SendVideoStream::UpdateConfig( } void SendVideoStream::UpdateActiveLayers(std::vector active_layers) { - sender_->task_queue_.PostTask([=] { + sender_->task_queue_.PostTask([this, active_layers] { MutexLock lock(&mutex_); - if (config_.encoder.codec == - VideoStreamConfig::Encoder::Codec::kVideoCodecVP8) { - send_stream_->StartPerRtpStream(active_layers); - } VideoEncoderConfig encoder_config = CreateVideoEncoderConfig(config_); RTC_CHECK_EQ(encoder_config.simulcast_layers.size(), active_layers.size()); for (size_t i = 0; i < encoder_config.simulcast_layers.size(); ++i) @@ -523,7 +491,7 @@ VideoSendStream::Stats SendVideoStream::GetStats() const { ColumnPrinter SendVideoStream::StatsPrinter() { return ColumnPrinter::Lambda( "video_target_rate video_sent_rate width height", - [this](rtc::SimpleStringBuilder& sb) { + [this](SimpleStringBuilder& sb) { VideoSendStream::Stats video_stats = send_stream_->GetStats(); int width = 0; int height = 0; @@ -559,10 +527,10 @@ ReceiveVideoStream::ReceiveVideoStream(CallClient* receiver, CodecTypeToPayloadString(config.encoder.codec)); size_t num_streams = config.encoder.simulcast_streams.size(); for (size_t i = 0; i < num_streams; ++i) { - rtc::VideoSinkInterface* renderer = &fake_renderer_; + VideoSinkInterface* renderer = &fake_renderer_; if (matcher->Active()) { - render_taps_.emplace_back( - std::make_unique(receiver_->clock_, matcher, i)); + render_taps_.emplace_back(std::make_unique( + &receiver_->env_.clock(), matcher, i)); renderer = render_taps_.back().get(); } auto recv_config = CreateVideoReceiveStreamConfig( diff --git a/test/scenario/video_stream.h b/test/scenario/video_stream.h index 43c51eab73..7e774182df 100644 --- a/test/scenario/video_stream.h +++ b/test/scenario/video_stream.h @@ -96,8 +96,7 @@ class ReceiveVideoStream { std::vector receive_streams_; FlexfecReceiveStream* flecfec_stream_ = nullptr; FakeVideoRenderer fake_renderer_; - std::vector>> - render_taps_; + std::vector>> render_taps_; CallClient* const receiver_; const VideoStreamConfig config_; std::unique_ptr decoder_factory_; diff --git a/test/scenario/video_stream_unittest.cc b/test/scenario/video_stream_unittest.cc index 3c4ced00d5..b0d1f4d161 100644 --- a/test/scenario/video_stream_unittest.cc +++ b/test/scenario/video_stream_unittest.cc @@ -14,6 +14,7 @@ #include "test/field_trial.h" #include "test/gtest.h" #include "test/scenario/scenario.h" +#include "test/video_test_constants.h" namespace webrtc { namespace test { @@ -176,7 +177,9 @@ TEST(VideoStreamTest, SendsFecWithFlexFec) { s.RunFor(TimeDelta::Seconds(5)); VideoSendStream::Stats video_stats; route->first()->SendTask([&]() { video_stats = video->send()->GetStats(); }); - EXPECT_GT(video_stats.substreams.begin()->second.rtp_stats.fec.packets, 0u); + EXPECT_GT(video_stats.substreams[VideoTestConstants::kFlexfecSendSsrc] + .rtp_stats.fec.packets, + 0u); } TEST(VideoStreamTest, ResolutionAdaptsToAvailableBandwidth) { @@ -248,7 +251,7 @@ TEST(VideoStreamTest, ResolutionAdaptsToAvailableBandwidth) { num_qvga_frames_ = 0; num_vga_frames_ = 0; - s.RunFor(TimeDelta::Seconds(40)); + s.RunFor(TimeDelta::Seconds(70)); EXPECT_GT(num_qvga_frames_, 0u); EXPECT_GT(num_vga_frames_, 0u); } diff --git a/test/test_flags.cc b/test/test_flags.cc index a0fff747fe..4df2583672 100644 --- a/test/test_flags.cc +++ b/test/test_flags.cc @@ -49,3 +49,8 @@ ABSL_FLAG(bool, export_perf_results_new_api, false, "Tells to initialize new API for exporting performance metrics"); + +ABSL_FLAG(bool, + webrtc_quick_perf_test, + false, + "Runs webrtc perfomance tests in quick mode."); diff --git a/test/test_flags.h b/test/test_flags.h index 30f918fc7d..84f1c29503 100644 --- a/test/test_flags.h +++ b/test/test_flags.h @@ -20,5 +20,6 @@ ABSL_DECLARE_FLAG(std::vector, plot); ABSL_DECLARE_FLAG(std::string, isolated_script_test_perf_output); ABSL_DECLARE_FLAG(std::string, webrtc_test_metrics_output_path); ABSL_DECLARE_FLAG(bool, export_perf_results_new_api); +ABSL_DECLARE_FLAG(bool, webrtc_quick_perf_test); #endif // TEST_TEST_FLAGS_H_ diff --git a/test/test_main_lib.cc b/test/test_main_lib.cc index 4c80315ac5..8931adf776 100644 --- a/test/test_main_lib.cc +++ b/test/test_main_lib.cc @@ -10,15 +10,19 @@ #include "test/test_main_lib.h" +#include + +#include +#include #include #include +#include #include +#include #include #include "absl/flags/flag.h" -#include "absl/memory/memory.h" -#include "absl/strings/match.h" -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" #include "api/test/metrics/chrome_perf_dashboard_metrics_exporter.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/metrics/metrics_exporter.h" @@ -30,15 +34,20 @@ #include "rtc_base/logging.h" #include "rtc_base/ssl_adapter.h" #include "rtc_base/ssl_stream_adapter.h" -#include "rtc_base/thread.h" #include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" -#include "test/field_trial.h" #include "test/gtest.h" #include "test/test_flags.h" #include "test/testsupport/perf_test.h" #include "test/testsupport/resources_dir_flag.h" +#if defined(RTC_USE_PERFETTO) +#include "rtc_base/event_tracer.h" +#include "third_party/perfetto/include/perfetto/tracing/backend_type.h" +#include "third_party/perfetto/include/perfetto/tracing/tracing.h" +#include "third_party/perfetto/protos/perfetto/config/trace_config.gen.h" +#endif + #if defined(WEBRTC_WIN) #include "rtc_base/win32_socket_init.h" #endif @@ -92,8 +101,7 @@ ABSL_FLAG(bool, verbose, false, "verbose logs to stderr"); ABSL_FLAG(std::string, trace_event, "", - "Path to collect trace events (json file) for chrome://tracing. " - "If not set, events aren't captured."); + "Path to collect trace events. If not set, events aren't captured."); ABSL_FLAG(std::string, test_launcher_shard_index, @@ -124,41 +132,28 @@ class TestMainImpl : public TestMain { // Default to LS_INFO, even for release builds to provide better test // logging. - if (rtc::LogMessage::GetLogToDebug() > rtc::LS_INFO) - rtc::LogMessage::LogToDebug(rtc::LS_INFO); + if (LogMessage::GetLogToDebug() > LS_INFO) + LogMessage::LogToDebug(LS_INFO); if (absl::GetFlag(FLAGS_verbose)) - rtc::LogMessage::LogToDebug(rtc::LS_VERBOSE); - - rtc::LogMessage::SetLogToStderr(absl::GetFlag(FLAGS_logs) || - absl::GetFlag(FLAGS_verbose)); + LogMessage::LogToDebug(LS_VERBOSE); - // The sharding arguments take precedence over the sharding environment - // variables. - if (!absl::GetFlag(FLAGS_test_launcher_shard_index).empty() && - !absl::GetFlag(FLAGS_test_launcher_total_shards).empty()) { - std::string shard_index = - "GTEST_SHARD_INDEX=" + absl::GetFlag(FLAGS_test_launcher_shard_index); - std::string total_shards = - "GTEST_TOTAL_SHARDS=" + - absl::GetFlag(FLAGS_test_launcher_total_shards); - putenv(shard_index.data()); - putenv(total_shards.data()); - } + LogMessage::SetLogToStderr(absl::GetFlag(FLAGS_logs) || + absl::GetFlag(FLAGS_verbose)); // InitFieldTrialsFromString stores the char*, so the char array must // outlive the application. field_trials_ = absl::GetFlag(FLAGS_force_fieldtrials); - webrtc::field_trial::InitFieldTrialsFromString(field_trials_.c_str()); - webrtc::metrics::Enable(); + field_trial::InitFieldTrialsFromString(field_trials_.c_str()); + metrics::Enable(); #if defined(WEBRTC_WIN) - winsock_init_ = std::make_unique(); + winsock_init_ = std::make_unique(); #endif // Initialize SSL which are used by several tests. - rtc::InitializeSSL(); - rtc::SSLStreamAdapter::EnableTimeCallbackForTesting(); + InitializeSSL(); + SSLStreamAdapter::EnableTimeCallbackForTesting(); return 0; } @@ -167,29 +162,40 @@ class TestMainImpl : public TestMain { std::string trace_event_path = absl::GetFlag(FLAGS_trace_event); const bool capture_events = !trace_event_path.empty(); if (capture_events) { - rtc::tracing::SetupInternalTracer(); - rtc::tracing::StartInternalCapture(trace_event_path); + StartTracingCapture(trace_event_path); } - absl::optional> metrics_to_plot = + std::optional> metrics_to_plot = absl::GetFlag(FLAGS_plot); if (metrics_to_plot->empty()) { - metrics_to_plot = absl::nullopt; + metrics_to_plot = std::nullopt; } else { if (metrics_to_plot->size() == 1 && (*metrics_to_plot)[0] == kPlotAllMetrics) { metrics_to_plot->clear(); } } + // The sharding arguments take precedence over the sharding environment + // variables. + if (!absl::GetFlag(FLAGS_test_launcher_shard_index).empty() && + !absl::GetFlag(FLAGS_test_launcher_total_shards).empty()) { + std::string shard_index = + "GTEST_SHARD_INDEX=" + absl::GetFlag(FLAGS_test_launcher_shard_index); + std::string total_shards = + "GTEST_TOTAL_SHARDS=" + + absl::GetFlag(FLAGS_test_launcher_total_shards); + putenv(total_shards.data()); + putenv(shard_index.data()); + } #if defined(WEBRTC_IOS) - rtc::test::InitTestSuite( - RUN_ALL_TESTS, argc, argv, - absl::GetFlag(FLAGS_write_perf_output_on_ios), - absl::GetFlag(FLAGS_export_perf_results_new_api), - absl::GetFlag(FLAGS_webrtc_test_metrics_output_path), metrics_to_plot); - rtc::test::RunTestsFromIOSApp(); + test::InitTestSuite(RUN_ALL_TESTS, argc, argv, + absl::GetFlag(FLAGS_write_perf_output_on_ios), + absl::GetFlag(FLAGS_export_perf_results_new_api), + absl::GetFlag(FLAGS_webrtc_test_metrics_output_path), + metrics_to_plot); + test::RunTestsFromIOSApp(); int exit_code = 0; #else int exit_code = RUN_ALL_TESTS(); @@ -236,7 +242,7 @@ class TestMainImpl : public TestMain { #endif if (capture_events) { - rtc::tracing::StopInternalCapture(); + StopTracingCapture(); } #if defined(ADDRESS_SANITIZER) || defined(LEAK_SANITIZER) || \ @@ -254,8 +260,61 @@ class TestMainImpl : public TestMain { private: #if defined(WEBRTC_WIN) - std::unique_ptr winsock_init_; + std::unique_ptr winsock_init_; +#endif +#if defined(RTC_USE_PERFETTO) + std::unique_ptr tracing_session_; + FILE* tracing_output_file_ = nullptr; #endif + + void StartTracingCapture(absl::string_view trace_output_file) { +#if defined(RTC_USE_PERFETTO) + tracing_output_file_ = std::fopen(trace_output_file.data(), "w"); + if (!tracing_output_file_) { + RTC_LOG(LS_ERROR) << "Failed to open trace file \"" << trace_output_file + << "\". Tracing will be disabled."; + } + perfetto::TracingInitArgs args; + args.backends |= perfetto::kInProcessBackend; + perfetto::Tracing::Initialize(args); + webrtc::RegisterPerfettoTrackEvents(); + + perfetto::TraceConfig cfg; + cfg.add_buffers()->set_size_kb(1024); // Record up to 1 MiB. + tracing_session_ = perfetto::Tracing::NewTrace(); + tracing_session_->Setup(cfg); + RTC_LOG(LS_INFO) + << "Starting tracing with Perfetto and outputting to file \"" + << trace_output_file << "\""; + tracing_session_->StartBlocking(); +#else + tracing::SetupInternalTracer(); + tracing::StartInternalCapture(trace_output_file); +#endif + } + + void StopTracingCapture() { +#if defined(RTC_USE_PERFETTO) + if (tracing_output_file_) { + RTC_CHECK(tracing_session_); + tracing_session_->StopBlocking(); + std::vector tracing_data = tracing_session_->ReadTraceBlocking(); + size_t count = std::fwrite(tracing_data.data(), sizeof tracing_data[0], + tracing_data.size(), tracing_output_file_); + if (count != tracing_data.size()) { + RTC_LOG(LS_ERROR) << "Expected to write " << tracing_data.size() + << " bytes but only " << count << " bytes written"; + } + std::fclose(tracing_output_file_); + tracing_output_file_ = nullptr; + } else { + RTC_LOG(LS_INFO) << "no file"; + } + +#else + tracing::StopInternalCapture(); +#endif + } }; } // namespace diff --git a/test/test_video_capturer.cc b/test/test_video_capturer.cc index 385af12b80..42d8c3db2b 100644 --- a/test/test_video_capturer.cc +++ b/test/test_video_capturer.cc @@ -25,10 +25,10 @@ TestVideoCapturer::~TestVideoCapturer() = default; void TestVideoCapturer::OnOutputFormatRequest( int width, int height, - const absl::optional& max_fps) { - absl::optional> target_aspect_ratio = + const std::optional& max_fps) { + std::optional> target_aspect_ratio = std::make_pair(width, height); - absl::optional max_pixel_count = width * height; + std::optional max_pixel_count = width * height; video_adapter_.OnOutputFormatRequest(target_aspect_ratio, max_pixel_count, max_fps); } @@ -62,7 +62,7 @@ void TestVideoCapturer::OnFrame(const VideoFrame& original_frame) { // Video adapter has requested a down-scale. Allocate a new buffer and // return scaled version. // For simplicity, only scale here without cropping. - rtc::scoped_refptr scaled_buffer = + scoped_refptr scaled_buffer = I420Buffer::Create(out_width, out_height); scaled_buffer->ScaleFrom(*frame.video_frame_buffer()->ToI420()); VideoFrame::Builder new_frame_builder = @@ -85,18 +85,17 @@ void TestVideoCapturer::OnFrame(const VideoFrame& original_frame) { } } -rtc::VideoSinkWants TestVideoCapturer::GetSinkWants() { +VideoSinkWants TestVideoCapturer::GetSinkWants() { return broadcaster_.wants(); } -void TestVideoCapturer::AddOrUpdateSink( - rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { +void TestVideoCapturer::AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) { broadcaster_.AddOrUpdateSink(sink, wants); UpdateVideoAdapter(); } -void TestVideoCapturer::RemoveSink(rtc::VideoSinkInterface* sink) { +void TestVideoCapturer::RemoveSink(VideoSinkInterface* sink) { broadcaster_.RemoveSink(sink); UpdateVideoAdapter(); } diff --git a/test/test_video_capturer.h b/test/test_video_capturer.h index 49660d8972..41ffdb79c5 100644 --- a/test/test_video_capturer.h +++ b/test/test_video_capturer.h @@ -23,7 +23,7 @@ namespace webrtc { namespace test { -class TestVideoCapturer : public rtc::VideoSourceInterface { +class TestVideoCapturer : public VideoSourceInterface { public: class FramePreprocessor { public: @@ -34,9 +34,9 @@ class TestVideoCapturer : public rtc::VideoSourceInterface { ~TestVideoCapturer() override; - void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override; - void RemoveSink(rtc::VideoSinkInterface* sink) override; + void AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) override; + void RemoveSink(VideoSinkInterface* sink) override; void SetFramePreprocessor(std::unique_ptr preprocessor) { MutexLock lock(&lock_); preprocessor_ = std::move(preprocessor); @@ -47,7 +47,7 @@ class TestVideoCapturer : public rtc::VideoSourceInterface { } void OnOutputFormatRequest(int width, int height, - const absl::optional& max_fps); + const std::optional& max_fps); // Starts or resumes video capturing. Can be called multiple times during // lifetime of this object. @@ -61,7 +61,7 @@ class TestVideoCapturer : public rtc::VideoSourceInterface { protected: void OnFrame(const VideoFrame& frame); - rtc::VideoSinkWants GetSinkWants(); + VideoSinkWants GetSinkWants(); private: void UpdateVideoAdapter(); @@ -70,8 +70,8 @@ class TestVideoCapturer : public rtc::VideoSourceInterface { Mutex lock_; std::unique_ptr preprocessor_ RTC_GUARDED_BY(lock_); bool enable_adaptation_ RTC_GUARDED_BY(lock_) = true; - rtc::VideoBroadcaster broadcaster_; - cricket::VideoAdapter video_adapter_; + VideoBroadcaster broadcaster_; + VideoAdapter video_adapter_; }; } // namespace test } // namespace webrtc diff --git a/test/testsupport/copy_to_file_audio_capturer.cc b/test/testsupport/copy_to_file_audio_capturer.cc index 6de8e7fd99..df93f26f4b 100644 --- a/test/testsupport/copy_to_file_audio_capturer.cc +++ b/test/testsupport/copy_to_file_audio_capturer.cc @@ -33,7 +33,7 @@ int CopyToFileAudioCapturer::NumChannels() const { return delegate_->NumChannels(); } -bool CopyToFileAudioCapturer::Capture(rtc::BufferT* buffer) { +bool CopyToFileAudioCapturer::Capture(BufferT* buffer) { bool result = delegate_->Capture(buffer); if (result) { wav_writer_->WriteSamples(buffer->data(), buffer->size()); diff --git a/test/testsupport/copy_to_file_audio_capturer.h b/test/testsupport/copy_to_file_audio_capturer.h index a410beeea8..2f7859ff0b 100644 --- a/test/testsupport/copy_to_file_audio_capturer.h +++ b/test/testsupport/copy_to_file_audio_capturer.h @@ -12,9 +12,9 @@ #define TEST_TESTSUPPORT_COPY_TO_FILE_AUDIO_CAPTURER_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/array_view.h" #include "common_audio/wav_file.h" #include "modules/audio_device/include/test_audio_device.h" @@ -36,7 +36,7 @@ class CopyToFileAudioCapturer : public TestAudioDeviceModule::Capturer { int SamplingFrequency() const override; int NumChannels() const override; - bool Capture(rtc::BufferT* buffer) override; + bool Capture(BufferT* buffer) override; private: std::unique_ptr delegate_; diff --git a/test/testsupport/copy_to_file_audio_capturer_unittest.cc b/test/testsupport/copy_to_file_audio_capturer_unittest.cc index 3831c28580..e97b69860a 100644 --- a/test/testsupport/copy_to_file_audio_capturer_unittest.cc +++ b/test/testsupport/copy_to_file_audio_capturer_unittest.cc @@ -38,7 +38,7 @@ class CopyToFileAudioCapturerTest : public ::testing::Test { }; TEST_F(CopyToFileAudioCapturerTest, Capture) { - rtc::BufferT expected_buffer; + BufferT expected_buffer; ASSERT_TRUE(capturer_->Capture(&expected_buffer)); ASSERT_TRUE(!expected_buffer.empty()); // Destruct capturer to close wav file. @@ -48,7 +48,7 @@ TEST_F(CopyToFileAudioCapturerTest, Capture) { // what was captured. std::unique_ptr wav_file_capturer = TestAudioDeviceModule::CreateWavFileReader(temp_filename_, 48000); - rtc::BufferT actual_buffer; + BufferT actual_buffer; wav_file_capturer->Capture(&actual_buffer); ASSERT_EQ(actual_buffer, expected_buffer); } diff --git a/test/testsupport/file_utils.cc b/test/testsupport/file_utils.cc index 47fed9ac05..963514c88d 100644 --- a/test/testsupport/file_utils.cc +++ b/test/testsupport/file_utils.cc @@ -20,7 +20,6 @@ #include #include -#include #include #include "Shlwapi.h" @@ -36,15 +35,18 @@ #include // To check for directory existence. #ifndef S_ISDIR // Not defined in stat.h on Windows. -#define S_ISDIR(mode) (((mode)&S_IFMT) == S_IFDIR) +#define S_ISDIR(mode) (((mode) & S_IFMT) == S_IFDIR) #endif #include #include #include +#include +#include #include #include +#include #if defined(WEBRTC_IOS) #include "test/testsupport/ios_file_utils.h" @@ -54,6 +56,7 @@ #include "absl/strings/string_view.h" #include "rtc_base/checks.h" +#include "rtc_base/crypto_random.h" #include "rtc_base/string_utils.h" #include "rtc_base/strings/string_builder.h" #include "test/testsupport/file_utils_override.h" @@ -94,6 +97,13 @@ std::string OutputPath() { return webrtc::test::internal::OutputPath(); } +std::string OutputPathWithRandomDirectory() { + std::string path = webrtc::test::internal::OutputPath(); + std::string rand_dir = path + CreateRandomUuid(); + RTC_CHECK(CreateDir(rand_dir)) << "Failed to create dir: " << rand_dir; + return rand_dir + std::string(kPathDelimiter); +} + std::string WorkingDir() { return webrtc::test::internal::WorkingDir(); } @@ -103,13 +113,13 @@ std::string WorkingDir() { std::string TempFilename(absl::string_view dir, absl::string_view prefix) { #ifdef WIN32 wchar_t filename[MAX_PATH]; - if (::GetTempFileNameW(rtc::ToUtf16(dir).c_str(), - rtc::ToUtf16(prefix).c_str(), 0, filename) != 0) - return rtc::ToUtf8(filename); + if (::GetTempFileNameW(webrtc::ToUtf16(dir).c_str(), + webrtc::ToUtf16(prefix).c_str(), 0, filename) != 0) + return webrtc::ToUtf8(filename); RTC_DCHECK_NOTREACHED(); return ""; #else - rtc::StringBuilder os; + StringBuilder os; os << dir << "/" << prefix << "XXXXXX"; std::string tempname = os.Release(); @@ -131,9 +141,9 @@ std::string GenerateTempFilename(absl::string_view dir, return filename; } -absl::optional> ReadDirectory(absl::string_view path) { +std::optional> ReadDirectory(absl::string_view path) { if (path.length() == 0) - return absl::optional>(); + return std::optional>(); std::string path_str(path); @@ -144,14 +154,15 @@ absl::optional> ReadDirectory(absl::string_view path) { // Init. WIN32_FIND_DATAW data; - HANDLE handle = ::FindFirstFileW(rtc::ToUtf16(path_str + '*').c_str(), &data); + HANDLE handle = + ::FindFirstFileW(webrtc::ToUtf16(path_str + '*').c_str(), &data); if (handle == INVALID_HANDLE_VALUE) - return absl::optional>(); + return std::optional>(); // Populate output. std::vector found_entries; do { - const std::string name = rtc::ToUtf8(data.cFileName); + const std::string name = webrtc::ToUtf8(data.cFileName); if (name != "." && name != "..") found_entries.emplace_back(path_str + name); } while (::FindNextFileW(handle, &data) == TRUE); @@ -167,7 +178,7 @@ absl::optional> ReadDirectory(absl::string_view path) { // Init. DIR* dir = ::opendir(path_str.c_str()); if (dir == nullptr) - return absl::optional>(); + return std::optional>(); // Populate output. std::vector found_entries; @@ -181,7 +192,7 @@ absl::optional> ReadDirectory(absl::string_view path) { closedir(dir); #endif - return absl::optional>(std::move(found_entries)); + return std::optional>(std::move(found_entries)); } bool CreateDir(absl::string_view directory_name) { @@ -214,6 +225,26 @@ bool RemoveDir(absl::string_view directory_name) { #endif } +bool RemoveNonEmptyDir(absl::string_view directory_name) { + std::optional> dir_content = + ReadDirectory(directory_name); + if (dir_content.has_value()) { + for (const std::string& entry : *dir_content) { + if (DirExists(entry)) { + if (!RemoveNonEmptyDir(entry)) { + return false; + } + } else if (FileExists(entry)) { + if (!RemoveFile(entry)) { + return false; + } + } + } + } + // Directory should be emptied. + return RemoveDir(directory_name); +} + bool RemoveFile(absl::string_view file_name) { #ifdef WIN32 return DeleteFileA(std::string(file_name).c_str()) != FALSE; @@ -228,8 +259,13 @@ std::string ResourcePath(absl::string_view name, absl::string_view extension) { std::string JoinFilename(absl::string_view dir, absl::string_view name) { RTC_CHECK(!dir.empty()) << "Special cases not implemented."; - rtc::StringBuilder os; - os << dir << kPathDelimiter << name; + StringBuilder os; + os << dir; + // If the directory path already ends with a path delimiter don't append it + if (dir.back() != kPathDelimiter.back()) { + os << kPathDelimiter; + } + os << name; return os.Release(); } diff --git a/test/testsupport/file_utils.h b/test/testsupport/file_utils.h index ab80ca4454..b0652eafd6 100644 --- a/test/testsupport/file_utils.h +++ b/test/testsupport/file_utils.h @@ -13,12 +13,12 @@ #ifndef TEST_TESTSUPPORT_FILE_UTILS_H_ #define TEST_TESTSUPPORT_FILE_UTILS_H_ +#include #include #include #include "absl/base/attributes.h" #include "absl/strings/string_view.h" -#include "absl/types/optional.h" namespace webrtc { namespace test { @@ -42,6 +42,11 @@ ABSL_CONST_INIT extern const absl::string_view kPathDelimiter; // found, the current working directory ("./") is returned as a fallback. std::string OutputPath(); +// Same as the above but appends a randomly named folder at the end of the path +// Primerly used to provide a solution for stress testing environments to +// prevent colission of files and folders. +std::string OutputPathWithRandomDirectory(); + // Generates an empty file with a unique name in the specified directory and // returns the file name and path. // TODO(titovartem) rename to TempFile and next method to TempFilename @@ -75,7 +80,7 @@ std::string WorkingDir(); // of strings with one element for each found file or directory. Each element is // a path created by prepending `dir` to the file/directory name. "." and ".." // are never added in the returned vector. -absl::optional> ReadDirectory(absl::string_view path); +std::optional> ReadDirectory(absl::string_view path); // Creates a directory if it not already exists. // Returns true if successful. Will print an error message to stderr and return @@ -85,6 +90,9 @@ bool CreateDir(absl::string_view directory_name); // Removes a directory, which must already be empty. bool RemoveDir(absl::string_view directory_name); +// Removes all the files inside a non-empty directory and the directory itself. +bool RemoveNonEmptyDir(absl::string_view directory_name); + // Removes a file. bool RemoveFile(absl::string_view file_name); diff --git a/test/testsupport/file_utils_override.cc b/test/testsupport/file_utils_override.cc index 7d0a3e3312..40c0bd60a7 100644 --- a/test/testsupport/file_utils_override.cc +++ b/test/testsupport/file_utils_override.cc @@ -19,7 +19,6 @@ #include #include -#include #include #include "Shlwapi.h" @@ -41,8 +40,9 @@ #include "test/testsupport/mac_file_utils.h" #endif +#include + #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "rtc_base/string_utils.h" @@ -81,7 +81,7 @@ const absl::string_view kResourcesDirName = "resources"; // Finds the WebRTC src dir. // The returned path always ends with a path separator. -absl::optional ProjectRootPath() { +std::optional ProjectRootPath() { #if defined(WEBRTC_ANDROID) return std::string(kAndroidChromiumTestsRoot); #elif defined WEBRTC_IOS @@ -103,7 +103,7 @@ absl::optional ProjectRootPath() { ssize_t count = ::readlink("/proc/self/exe", buf, arraysize(buf)); if (count <= 0) { RTC_DCHECK_NOTREACHED() << "Unable to resolve /proc/self/exe."; - return absl::nullopt; + return std::nullopt; } // On POSIX, tests execute in out/Whatever, so src is two levels up. std::string exe_dir = DirName(absl::string_view(buf, count)); @@ -113,9 +113,9 @@ absl::optional ProjectRootPath() { wchar_t buf[MAX_PATH]; buf[0] = 0; if (GetModuleFileNameW(NULL, buf, MAX_PATH) == 0) - return absl::nullopt; + return std::nullopt; - std::string exe_path = rtc::ToUtf8(std::wstring(buf)); + std::string exe_path = webrtc::ToUtf8(std::wstring(buf)); std::string exe_dir = DirName(exe_path); return DirName(DirName(exe_dir)) + std::string(kPathDelimiter); #endif @@ -129,7 +129,7 @@ std::string OutputPath() { #elif defined(WEBRTC_FUCHSIA) return std::string(kFuchsiaTempWritableDir); #else - absl::optional path_opt = ProjectRootPath(); + std::optional path_opt = ProjectRootPath(); RTC_DCHECK(path_opt); std::string path = *path_opt + "out"; if (!CreateDir(path)) { @@ -157,9 +157,9 @@ std::string ResourcePath(absl::string_view name, absl::string_view extension) { #if defined(WEBRTC_IOS) return IOSResourcePath(name, extension); #else - absl::optional path_opt = ProjectRootPath(); + std::optional path_opt = ProjectRootPath(); RTC_DCHECK(path_opt); - rtc::StringBuilder os(*path_opt); + StringBuilder os(*path_opt); os << kResourcesDirName << kPathDelimiter << name << "." << extension; return os.Release(); #endif diff --git a/test/testsupport/file_utils_unittest.cc b/test/testsupport/file_utils_unittest.cc index b9de01d09d..adb9a2a0ca 100644 --- a/test/testsupport/file_utils_unittest.cc +++ b/test/testsupport/file_utils_unittest.cc @@ -14,11 +14,12 @@ #include #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "rtc_base/checks.h" +#include "rtc_base/crypto_random.h" #include "test/gmock.h" #include "test/gtest.h" @@ -44,7 +45,7 @@ std::string Path(absl::string_view path) { void CleanDir(absl::string_view dir, size_t* num_deleted_entries) { RTC_DCHECK(num_deleted_entries); *num_deleted_entries = 0; - absl::optional> dir_content = ReadDirectory(dir); + std::optional> dir_content = ReadDirectory(dir); EXPECT_TRUE(dir_content); for (const auto& entry : *dir_content) { if (DirExists(entry)) { @@ -119,6 +120,28 @@ TEST_F(FileUtilsTest, OutputPathFromRootWorkingDir) { ASSERT_THAT(result, EndsWith(expected_end)); } +TEST_F(FileUtilsTest, RandomOutputPathFromUnchangedWorkingDir) { + SetRandomTestMode(true); + std::string fixed_first_uuid = "def01482-f829-429a-bfd4-841706e92cdd"; + std::string expected_end = ExpectedRootDirByPlatform() + fixed_first_uuid + + std::string(kPathDelimiter); + std::string result = webrtc::test::OutputPathWithRandomDirectory(); + + ASSERT_THAT(result, EndsWith(expected_end)); +} + +TEST_F(FileUtilsTest, RandomOutputPathFromRootWorkingDir) { + ASSERT_EQ(0, chdir(kPathDelimiter.data())); + + SetRandomTestMode(true); + std::string fixed_first_uuid = "def01482-f829-429a-bfd4-841706e92cdd"; + std::string expected_end = ExpectedRootDirByPlatform() + fixed_first_uuid + + std::string(kPathDelimiter); + std::string result = webrtc::test::OutputPathWithRandomDirectory(); + + ASSERT_THAT(result, EndsWith(expected_end)); +} + TEST_F(FileUtilsTest, TempFilename) { std::string temp_filename = webrtc::test::TempFilename( webrtc::test::OutputPath(), "TempFilenameTest"); @@ -147,7 +170,8 @@ TEST_F(FileUtilsTest, GenerateTempFilename) { #define MAYBE_CreateDir CreateDir #endif TEST_F(FileUtilsTest, MAYBE_CreateDir) { - std::string directory = "fileutils-unittest-empty-dir"; + std::string directory = + test::OutputPathWithRandomDirectory() + "fileutils-unittest-empty-dir"; // Make sure it's removed if a previous test has failed: remove(directory.c_str()); ASSERT_TRUE(webrtc::test::CreateDir(directory)); @@ -231,7 +255,7 @@ TEST_F(FileUtilsTest, WriteReadDeleteFilesAndDirs) { // Create an empty temporary directory for this test. const std::string temp_directory = - OutputPath() + Path("TempFileUtilsTestReadDirectory/"); + OutputPathWithRandomDirectory() + Path("TempFileUtilsTestReadDirectory/"); CreateDir(temp_directory); EXPECT_NO_FATAL_FAILURE(CleanDir(temp_directory, &num_deleted_entries)); EXPECT_TRUE(DirExists(temp_directory)); @@ -247,7 +271,7 @@ TEST_F(FileUtilsTest, WriteReadDeleteFilesAndDirs) { EXPECT_TRUE(DirExists(temp_subdir)); // Checks. - absl::optional> dir_content = + std::optional> dir_content = ReadDirectory(temp_directory); EXPECT_TRUE(dir_content); EXPECT_EQ(2u, dir_content->size()); @@ -257,6 +281,30 @@ TEST_F(FileUtilsTest, WriteReadDeleteFilesAndDirs) { EXPECT_FALSE(DirExists(temp_directory)); } +TEST_F(FileUtilsTest, DeleteNonEmptyDirectory) { + const std::string temp_directory = + OutputPathWithRandomDirectory() + Path("TempFileUtilsTestReadDirectory/"); + CreateDir(temp_directory); + EXPECT_TRUE(DirExists(temp_directory)); + + // Add a file. + const std::string temp_filename = temp_directory + "TempFilenameTest"; + WriteStringInFile("test\n", temp_filename); + EXPECT_TRUE(FileExists(temp_filename)); + + // Add a directory with one file. + const std::string temp_subdir = temp_directory + Path("subdir/"); + EXPECT_TRUE(CreateDir(temp_subdir)); + EXPECT_TRUE(DirExists(temp_subdir)); + const std::string temp_filename2 = temp_subdir + "TempFilenameTest2"; + WriteStringInFile("test2\n", temp_filename2); + EXPECT_TRUE(FileExists(temp_filename2)); + + // Checks. + EXPECT_TRUE(RemoveNonEmptyDir(temp_directory)); + EXPECT_FALSE(DirExists(temp_directory)); +} + TEST_F(FileUtilsTest, DirNameStripsFilename) { EXPECT_EQ(Path("/some/path"), DirName(Path("/some/path/file.txt"))); } @@ -273,5 +321,15 @@ TEST_F(FileUtilsTest, DirNameStopsAtRoot) { EXPECT_EQ(Path("/"), DirName(Path("/"))); } +TEST_F(FileUtilsTest, JoinFilenameDoesNotAppendExtraPathDelimiterIfExists) { + EXPECT_EQ(JoinFilename(Path("/some/path/"), "file.txt"), + Path("/some/path/file.txt")); +} + +TEST_F(FileUtilsTest, JoinFilenameAppendsPathDelimiterIfMissing) { + EXPECT_EQ(JoinFilename(Path("/some/path"), "file.txt"), + Path("/some/path/file.txt")); +} + } // namespace test } // namespace webrtc diff --git a/test/testsupport/fixed_fps_video_frame_writer_adapter.cc b/test/testsupport/fixed_fps_video_frame_writer_adapter.cc index 531dade0e8..4b90d21c42 100644 --- a/test/testsupport/fixed_fps_video_frame_writer_adapter.cc +++ b/test/testsupport/fixed_fps_video_frame_writer_adapter.cc @@ -11,9 +11,9 @@ #include "test/testsupport/fixed_fps_video_frame_writer_adapter.h" #include +#include #include -#include "absl/types/optional.h" #include "api/units/time_delta.h" #include "api/video/video_sink_interface.h" #include "rtc_base/checks.h" diff --git a/test/testsupport/fixed_fps_video_frame_writer_adapter.h b/test/testsupport/fixed_fps_video_frame_writer_adapter.h index d4d95e9f82..6d966bf4cf 100644 --- a/test/testsupport/fixed_fps_video_frame_writer_adapter.h +++ b/test/testsupport/fixed_fps_video_frame_writer_adapter.h @@ -12,8 +12,8 @@ #define TEST_TESTSUPPORT_FIXED_FPS_VIDEO_FRAME_WRITER_ADAPTER_H_ #include +#include -#include "absl/types/optional.h" #include "api/test/video/video_frame_writer.h" #include "api/video/video_sink_interface.h" #include "system_wrappers/include/clock.h" @@ -78,7 +78,7 @@ class FixedFpsVideoFrameWriterAdapter : public VideoFrameWriter { // Expected time slot for the last frame. Timestamp last_frame_time_ = Timestamp::MinusInfinity(); - absl::optional last_frame_ = absl::nullopt; + std::optional last_frame_ = std::nullopt; }; } // namespace test diff --git a/test/testsupport/frame_reader.h b/test/testsupport/frame_reader.h index 7856476ca0..c18ea7f5c6 100644 --- a/test/testsupport/frame_reader.h +++ b/test/testsupport/frame_reader.h @@ -13,14 +13,15 @@ #include +#include +#include #include -#include "absl/types/optional.h" #include "api/scoped_refptr.h" +#include "api/video/i420_buffer.h" #include "api/video/resolution.h" namespace webrtc { -class I420Buffer; namespace test { // Handles reading of I420 frames from video files. @@ -37,30 +38,30 @@ class FrameReader { // Reads and returns next frame. Returns `nullptr` if reading failed or end of // stream is reached. - virtual rtc::scoped_refptr PullFrame() = 0; + virtual scoped_refptr PullFrame() = 0; // Reads and returns next frame. `frame_num` stores unwrapped frame number // which can be passed to `ReadFrame` to re-read this frame later. Returns // `nullptr` if reading failed or end of stream is reached. - virtual rtc::scoped_refptr PullFrame(int* frame_num) = 0; + virtual scoped_refptr PullFrame(int* frame_num) = 0; // Reads and returns frame specified by `frame_num`. Returns `nullptr` if // reading failed. - virtual rtc::scoped_refptr ReadFrame(int frame_num) = 0; + virtual scoped_refptr ReadFrame(int frame_num) = 0; // Reads next frame, resizes and returns it. `frame_num` stores unwrapped // frame number which can be passed to `ReadFrame` to re-read this frame // later. `resolution` specifies resolution of the returned frame. // `framerate_scale` specifies frame rate scale factor. Frame rate scaling is // done by skipping or repeating frames. - virtual rtc::scoped_refptr PullFrame(int* frame_num, - Resolution resolution, - Ratio framerate_scale) = 0; + virtual scoped_refptr PullFrame(int* frame_num, + Resolution resolution, + Ratio framerate_scale) = 0; // Reads frame specified by `frame_num`, resizes and returns it. Returns // `nullptr` if reading failed. - virtual rtc::scoped_refptr ReadFrame(int frame_num, - Resolution resolution) = 0; + virtual scoped_refptr ReadFrame(int frame_num, + Resolution resolution) = 0; // Total number of retrievable frames. virtual int num_frames() const = 0; @@ -83,18 +84,18 @@ class YuvFrameReaderImpl : public FrameReader { virtual void Init(); - rtc::scoped_refptr PullFrame() override; + scoped_refptr PullFrame() override; - rtc::scoped_refptr PullFrame(int* frame_num) override; + scoped_refptr PullFrame(int* frame_num) override; - rtc::scoped_refptr PullFrame(int* frame_num, - Resolution resolution, - Ratio framerate_scale) override; + scoped_refptr PullFrame(int* frame_num, + Resolution resolution, + Ratio framerate_scale) override; - rtc::scoped_refptr ReadFrame(int frame_num) override; + scoped_refptr ReadFrame(int frame_num) override; - rtc::scoped_refptr ReadFrame(int frame_num, - Resolution resolution) override; + scoped_refptr ReadFrame(int frame_num, + Resolution resolution) override; int num_frames() const override { return num_frames_; } @@ -104,7 +105,7 @@ class YuvFrameReaderImpl : public FrameReader { int Skip(Ratio framerate_scale); private: - absl::optional ticks_; + std::optional ticks_; }; const std::string filepath_; diff --git a/test/testsupport/ios_file_utils.mm b/test/testsupport/ios_file_utils.mm index ef36937e6a..5c5c7f092a 100644 --- a/test/testsupport/ios_file_utils.mm +++ b/test/testsupport/ios_file_utils.mm @@ -24,7 +24,8 @@ // For iOS, resource files are added to the application bundle in the root // and not in separate folders as is the case for other platforms. This method // therefore removes any prepended folders and uses only the actual file name. -std::string IOSResourcePath(absl::string_view name, absl::string_view extension) { +std::string IOSResourcePath(absl::string_view name, + absl::string_view extension) { @autoreleasepool { NSString* path = [NSString stringForAbslStringView:name]; NSString* fileName = path.lastPathComponent; @@ -46,11 +47,10 @@ // For iOS, we don't have access to the output directory. Return the path to the // temporary directory instead. This is mostly used by tests that need to write // output files to disk. -std::string IOSOutputPath() { +std::string IOSOutputPath() { @autoreleasepool { NSString* tempDir = NSTemporaryDirectory(); - if (tempDir == nil) - tempDir = @"/tmp"; + if (tempDir == nil) tempDir = @"/tmp"; return [NSString stdStringForString:tempDir]; } } diff --git a/test/testsupport/ivf_video_frame_generator.cc b/test/testsupport/ivf_video_frame_generator.cc index ec3c948fa4..a50779d25a 100644 --- a/test/testsupport/ivf_video_frame_generator.cc +++ b/test/testsupport/ivf_video_frame_generator.cc @@ -12,6 +12,7 @@ #include +#include "api/environment/environment.h" #include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" #include "api/video_codecs/video_codec.h" @@ -30,14 +31,36 @@ namespace { constexpr TimeDelta kMaxNextFrameWaitTimeout = TimeDelta::Seconds(1); +std::unique_ptr CreateDecoder(const Environment& env, + VideoCodecType codec_type) { + switch (codec_type) { + case VideoCodecType::kVideoCodecVP8: + return CreateVp8Decoder(env); + case VideoCodecType::kVideoCodecVP9: + return VP9Decoder::Create(); + case VideoCodecType::kVideoCodecH264: + return H264Decoder::Create(); + case VideoCodecType::kVideoCodecAV1: + return CreateDav1dDecoder(env); + case VideoCodecType::kVideoCodecH265: + // No H.265 SW decoder implementation will be provided. + return nullptr; + case VideoCodecType::kVideoCodecGeneric: + return nullptr; + } +} + } // namespace -IvfVideoFrameGenerator::IvfVideoFrameGenerator(const std::string& file_name) +IvfVideoFrameGenerator::IvfVideoFrameGenerator(const Environment& env, + absl::string_view file_name, + std::optional fps_hint) : callback_(this), file_reader_(IvfFileReader::Create(FileWrapper::OpenReadOnly(file_name))), - video_decoder_(CreateVideoDecoder(file_reader_->GetVideoCodecType())), - width_(file_reader_->GetFrameWidth()), - height_(file_reader_->GetFrameHeight()) { + video_decoder_(CreateDecoder(env, file_reader_->GetVideoCodecType())), + original_resolution_({.width = file_reader_->GetFrameWidth(), + .height = file_reader_->GetFrameHeight()}), + fps_hint_(fps_hint) { RTC_CHECK(video_decoder_) << "No decoder found for file's video codec type"; VideoDecoder::Settings decoder_settings; decoder_settings.set_codec_type(file_reader_->GetVideoCodecType()); @@ -62,7 +85,7 @@ IvfVideoFrameGenerator::~IvfVideoFrameGenerator() { video_decoder_.reset(); { MutexLock frame_lock(&frame_decode_lock_); - next_frame_ = absl::nullopt; + next_frame_ = std::nullopt; // Set event in case another thread is waiting on it. next_frame_decoded_.Set(); } @@ -75,7 +98,7 @@ FrameGeneratorInterface::VideoFrameData IvfVideoFrameGenerator::NextFrame() { if (!file_reader_->HasMoreFrames()) { file_reader_->Reset(); } - absl::optional image = file_reader_->NextFrame(); + std::optional image = file_reader_->NextFrame(); RTC_CHECK(image); // Last parameter is undocumented and there is no usage of it found. RTC_CHECK_EQ(WEBRTC_VIDEO_CODEC_OK, @@ -85,29 +108,48 @@ FrameGeneratorInterface::VideoFrameData IvfVideoFrameGenerator::NextFrame() { << kMaxNextFrameWaitTimeout << ". Can't continue"; MutexLock frame_lock(&frame_decode_lock_); - rtc::scoped_refptr buffer = - next_frame_->video_frame_buffer(); - if (width_ != static_cast(buffer->width()) || - height_ != static_cast(buffer->height())) { + scoped_refptr buffer = next_frame_->video_frame_buffer(); + + // Set original resolution to resolution of decoded frame. + original_resolution_ = {.width = static_cast(buffer->width()), + .height = static_cast(buffer->width())}; + + if (output_resolution_.has_value() && + (output_resolution_->width != original_resolution_.width || + output_resolution_->height != original_resolution_.height)) { // Video adapter has requested a down-scale. Allocate a new buffer and // return scaled version. - rtc::scoped_refptr scaled_buffer = - I420Buffer::Create(width_, height_); + scoped_refptr scaled_buffer = I420Buffer::Create( + output_resolution_->width, output_resolution_->height); scaled_buffer->ScaleFrom(*buffer->ToI420()); buffer = scaled_buffer; } return VideoFrameData(buffer, next_frame_->update_rect()); } +void IvfVideoFrameGenerator::SkipNextFrame() { + MutexLock lock(&lock_); + next_frame_decoded_.Reset(); + RTC_CHECK(file_reader_); + if (!file_reader_->HasMoreFrames()) { + file_reader_->Reset(); + } + std::optional image = file_reader_->NextFrame(); + RTC_CHECK(image); + // Last parameter is undocumented and there is no usage of it found. + // Frame has to be decoded in case it is a key frame. + RTC_CHECK_EQ(WEBRTC_VIDEO_CODEC_OK, + video_decoder_->Decode(*image, /*render_time_ms=*/0)); +} + void IvfVideoFrameGenerator::ChangeResolution(size_t width, size_t height) { MutexLock lock(&lock_); - width_ = width; - height_ = height; + output_resolution_ = {.width = width, .height = height}; } FrameGeneratorInterface::Resolution IvfVideoFrameGenerator::GetResolution() const { - return {.width = width_, .height = height_}; + return output_resolution_.value_or(original_resolution_); } int32_t IvfVideoFrameGenerator::DecodedCallback::Decoded( @@ -123,8 +165,8 @@ int32_t IvfVideoFrameGenerator::DecodedCallback::Decoded( } void IvfVideoFrameGenerator::DecodedCallback::Decoded( VideoFrame& decoded_image, - absl::optional decode_time_ms, - absl::optional qp) { + std::optional decode_time_ms, + std::optional qp) { reader_->OnFrameDecoded(decoded_image); } @@ -134,22 +176,5 @@ void IvfVideoFrameGenerator::OnFrameDecoded(const VideoFrame& decoded_frame) { next_frame_decoded_.Set(); } -std::unique_ptr IvfVideoFrameGenerator::CreateVideoDecoder( - VideoCodecType codec_type) { - if (codec_type == VideoCodecType::kVideoCodecVP8) { - return VP8Decoder::Create(); - } - if (codec_type == VideoCodecType::kVideoCodecVP9) { - return VP9Decoder::Create(); - } - if (codec_type == VideoCodecType::kVideoCodecH264) { - return H264Decoder::Create(); - } - if (codec_type == VideoCodecType::kVideoCodecAV1) { - return CreateDav1dDecoder(); - } - return nullptr; -} - } // namespace test } // namespace webrtc diff --git a/test/testsupport/ivf_video_frame_generator.h b/test/testsupport/ivf_video_frame_generator.h index 6c6fa4951d..cb0b97f9b2 100644 --- a/test/testsupport/ivf_video_frame_generator.h +++ b/test/testsupport/ivf_video_frame_generator.h @@ -12,9 +12,11 @@ #define TEST_TESTSUPPORT_IVF_VIDEO_FRAME_GENERATOR_H_ #include +#include #include -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" +#include "api/environment/environment.h" #include "api/sequence_checker.h" #include "api/test/frame_generator_interface.h" #include "api/video/video_codec_type.h" @@ -30,14 +32,18 @@ namespace test { // All methods except constructor must be used from the same thread. class IvfVideoFrameGenerator : public FrameGeneratorInterface { public: - explicit IvfVideoFrameGenerator(const std::string& file_name); + // Allow to specify a `fps_hint` in case the fps of the video is known. + IvfVideoFrameGenerator(const Environment& env, + absl::string_view file_name, + std::optional fps_hint); ~IvfVideoFrameGenerator() override; VideoFrameData NextFrame() override; + void SkipNextFrame() override; void ChangeResolution(size_t width, size_t height) override; Resolution GetResolution() const override; - absl::optional fps() const override { return absl::nullopt; } + std::optional fps() const override { return fps_hint_; } private: class DecodedCallback : public DecodedImageCallback { @@ -48,23 +54,27 @@ class IvfVideoFrameGenerator : public FrameGeneratorInterface { int32_t Decoded(VideoFrame& decoded_image) override; int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override; void Decoded(VideoFrame& decoded_image, - absl::optional decode_time_ms, - absl::optional qp) override; + std::optional decode_time_ms, + std::optional qp) override; private: IvfVideoFrameGenerator* const reader_; }; void OnFrameDecoded(const VideoFrame& decoded_frame); - static std::unique_ptr CreateVideoDecoder( - VideoCodecType codec_type); DecodedCallback callback_; std::unique_ptr file_reader_; std::unique_ptr video_decoder_; - size_t width_; - size_t height_; + // Resolution of IVF. Initially readed from IVF header and then set to + // resolution of decoded frame. + Resolution original_resolution_; + // Resolution of output frames. When set, the decoded frames scaled to + // `output_resolution_`. Otherwise the decoded resolution, which may vary from + // frame to frame, is preserved. + std::optional output_resolution_; + std::optional fps_hint_; // This lock is used to ensure that all API method will be called // sequentially. It is required because we need to ensure that generator @@ -80,8 +90,8 @@ class IvfVideoFrameGenerator : public FrameGeneratorInterface { // frame was sent to decoder and decoder callback was invoked. Mutex frame_decode_lock_; - rtc::Event next_frame_decoded_; - absl::optional next_frame_ RTC_GUARDED_BY(frame_decode_lock_); + Event next_frame_decoded_; + std::optional next_frame_ RTC_GUARDED_BY(frame_decode_lock_); }; } // namespace test diff --git a/test/testsupport/ivf_video_frame_generator_unittest.cc b/test/testsupport/ivf_video_frame_generator_unittest.cc index d6227b9986..ff0dd08d87 100644 --- a/test/testsupport/ivf_video_frame_generator_unittest.cc +++ b/test/testsupport/ivf_video_frame_generator_unittest.cc @@ -11,9 +11,11 @@ #include "test/testsupport/ivf_video_frame_generator.h" #include +#include #include -#include "absl/types/optional.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/test/create_frame_generator.h" #include "api/units/time_delta.h" #include "api/video/encoded_image.h" @@ -93,7 +95,7 @@ class IvfFileWriterEncodedCallback : public EncodedImageCallback { Mutex lock_; int received_frames_count_ RTC_GUARDED_BY(lock_) = 0; - rtc::Event expected_frames_count_received_; + Event expected_frames_count_received_; }; class IvfVideoFrameGeneratorTest : public ::testing::Test { @@ -116,7 +118,7 @@ class IvfVideoFrameGeneratorTest : public ::testing::Test { std::unique_ptr frame_generator = test::CreateSquareFrameGenerator( kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kI420, - absl::nullopt); + std::nullopt); VideoCodec codec_settings; webrtc::test::CodecSettings(video_codec_type, &codec_settings); @@ -146,7 +148,7 @@ class IvfVideoFrameGeneratorTest : public ::testing::Test { const uint32_t timestamp = last_frame_timestamp + kVideoPayloadTypeFrequency / codec_settings.maxFramerate; - frame.set_timestamp(timestamp); + frame.set_rtp_timestamp(timestamp); last_frame_timestamp = timestamp; @@ -158,21 +160,28 @@ class IvfVideoFrameGeneratorTest : public ::testing::Test { kMaxFrameEncodeWaitTimeout)); } + Environment env_ = CreateEnvironment(); std::string file_name_; std::vector video_frames_; }; } // namespace -TEST_F(IvfVideoFrameGeneratorTest, DoesNotKnowFps) { - CreateTestVideoFile(VideoCodecType::kVideoCodecVP8, VP8Encoder::Create()); - IvfVideoFrameGenerator generator(file_name_); - EXPECT_EQ(generator.fps(), absl::nullopt); +TEST_F(IvfVideoFrameGeneratorTest, FpsWithoutHint) { + CreateTestVideoFile(VideoCodecType::kVideoCodecVP8, CreateVp8Encoder(env_)); + IvfVideoFrameGenerator generator(env_, file_name_, /*fps_hint=*/std::nullopt); + EXPECT_EQ(generator.fps(), std::nullopt); +} + +TEST_F(IvfVideoFrameGeneratorTest, FpsWithHint) { + CreateTestVideoFile(VideoCodecType::kVideoCodecVP8, CreateVp8Encoder(env_)); + IvfVideoFrameGenerator generator(env_, file_name_, /*fps_hint=*/123); + EXPECT_EQ(generator.fps(), 123); } TEST_F(IvfVideoFrameGeneratorTest, Vp8) { - CreateTestVideoFile(VideoCodecType::kVideoCodecVP8, VP8Encoder::Create()); - IvfVideoFrameGenerator generator(file_name_); + CreateTestVideoFile(VideoCodecType::kVideoCodecVP8, CreateVp8Encoder(env_)); + IvfVideoFrameGenerator generator(env_, file_name_, /*fps_hint=*/std::nullopt); for (size_t i = 0; i < video_frames_.size(); ++i) { auto& expected_frame = video_frames_[i]; VideoFrame actual_frame = BuildFrame(generator.NextFrame()); @@ -181,8 +190,8 @@ TEST_F(IvfVideoFrameGeneratorTest, Vp8) { } TEST_F(IvfVideoFrameGeneratorTest, Vp8DoubleRead) { - CreateTestVideoFile(VideoCodecType::kVideoCodecVP8, VP8Encoder::Create()); - IvfVideoFrameGenerator generator(file_name_); + CreateTestVideoFile(VideoCodecType::kVideoCodecVP8, CreateVp8Encoder(env_)); + IvfVideoFrameGenerator generator(env_, file_name_, /*fps_hint=*/std::nullopt); for (size_t i = 0; i < video_frames_.size() * 2; ++i) { auto& expected_frame = video_frames_[i % video_frames_.size()]; VideoFrame actual_frame = BuildFrame(generator.NextFrame()); @@ -191,8 +200,8 @@ TEST_F(IvfVideoFrameGeneratorTest, Vp8DoubleRead) { } TEST_F(IvfVideoFrameGeneratorTest, Vp9) { - CreateTestVideoFile(VideoCodecType::kVideoCodecVP9, VP9Encoder::Create()); - IvfVideoFrameGenerator generator(file_name_); + CreateTestVideoFile(VideoCodecType::kVideoCodecVP9, CreateVp9Encoder(env_)); + IvfVideoFrameGenerator generator(env_, file_name_, /*fps_hint=*/std::nullopt); for (size_t i = 0; i < video_frames_.size(); ++i) { auto& expected_frame = video_frames_[i]; VideoFrame actual_frame = BuildFrame(generator.NextFrame()); @@ -202,8 +211,8 @@ TEST_F(IvfVideoFrameGeneratorTest, Vp9) { #if defined(WEBRTC_USE_H264) TEST_F(IvfVideoFrameGeneratorTest, H264) { - CreateTestVideoFile(VideoCodecType::kVideoCodecH264, H264Encoder::Create()); - IvfVideoFrameGenerator generator(file_name_); + CreateTestVideoFile(VideoCodecType::kVideoCodecH264, CreateH264Encoder(env_)); + IvfVideoFrameGenerator generator(env_, file_name_, /*fps_hint=*/std::nullopt); for (size_t i = 0; i < video_frames_.size(); ++i) { auto& expected_frame = video_frames_[i]; VideoFrame actual_frame = BuildFrame(generator.NextFrame()); @@ -212,5 +221,16 @@ TEST_F(IvfVideoFrameGeneratorTest, H264) { } #endif +TEST_F(IvfVideoFrameGeneratorTest, ScalesResolution) { + CreateTestVideoFile(VideoCodecType::kVideoCodecVP8, CreateVp8Encoder(env_)); + IvfVideoFrameGenerator generator(env_, file_name_, /*fps_hint=*/123); + generator.ChangeResolution(kWidth * 2, kHeight / 2); + scoped_refptr frame_buffer = generator.NextFrame().buffer; + frame_buffer = generator.NextFrame().buffer; + ASSERT_TRUE(frame_buffer); + EXPECT_EQ(frame_buffer->width(), kWidth * 2); + EXPECT_EQ(frame_buffer->height(), kHeight / 2); +} + } // namespace test } // namespace webrtc diff --git a/test/testsupport/mock/mock_frame_reader.h b/test/testsupport/mock/mock_frame_reader.h index f68bbf8368..1be387b443 100644 --- a/test/testsupport/mock/mock_frame_reader.h +++ b/test/testsupport/mock/mock_frame_reader.h @@ -20,18 +20,18 @@ namespace test { class MockFrameReader : public FrameReader { public: - MOCK_METHOD(rtc::scoped_refptr, PullFrame, (), (override)); - MOCK_METHOD(rtc::scoped_refptr, PullFrame, (int*), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, PullFrame, (), (override)); + MOCK_METHOD(scoped_refptr, PullFrame, (int*), (override)); + MOCK_METHOD(scoped_refptr, PullFrame, (int*, Resolution, Ratio), (override)); - MOCK_METHOD(rtc::scoped_refptr, ReadFrame, (int), (override)); - MOCK_METHOD(rtc::scoped_refptr, + MOCK_METHOD(scoped_refptr, ReadFrame, (int), (override)); + MOCK_METHOD(scoped_refptr, ReadFrame, (int, Resolution), (override)); - MOCK_METHOD(int, num_frames, (), (const override)); + MOCK_METHOD(int, num_frames, (), (const, override)); }; } // namespace test diff --git a/test/testsupport/perf_test.cc b/test/testsupport/perf_test.cc index bbea5f841a..c274ee1a7b 100644 --- a/test/testsupport/perf_test.cc +++ b/test/testsupport/perf_test.cc @@ -46,7 +46,7 @@ std::string UnitWithDirection( std::vector GetSortedSamples( const SamplesStatsCounter& counter) { - rtc::ArrayView view = + ArrayView view = counter.GetTimedSamples(); std::vector out(view.begin(), view.end()); std::stable_sort(out.begin(), out.end(), @@ -178,7 +178,7 @@ class ResultsLinePrinter { void PrintResultList(absl::string_view graph_name, absl::string_view trace_name, - const rtc::ArrayView values, + const ArrayView values, absl::string_view units, const bool important, webrtc::test::ImproveDirection improve_direction) { @@ -198,7 +198,7 @@ class ResultsLinePrinter { absl::string_view units, bool important) { MutexLock lock(&mutex_); - rtc::StringBuilder message; + StringBuilder message; message << (important ? "*" : "") << "RESULT " << graph_name << ": " << trace_name << "= " << prefix << values << suffix << " " << units; // <*>RESULT : = @@ -268,7 +268,7 @@ void PrintResult(absl::string_view measurement, absl::string_view units, bool important, ImproveDirection improve_direction) { - rtc::StringBuilder graph_name; + StringBuilder graph_name; graph_name << measurement << modifier; RTC_CHECK(std::isfinite(value)) << "Expected finite value for graph " << graph_name.str() @@ -286,7 +286,7 @@ void PrintResult(absl::string_view measurement, absl::string_view units, const bool important, ImproveDirection improve_direction) { - rtc::StringBuilder graph_name; + StringBuilder graph_name; graph_name << measurement << modifier; GetPlottableCounterPrinter().AddCounter(graph_name.str(), trace, counter, units); @@ -323,7 +323,7 @@ void PrintResultMeanAndError(absl::string_view measurement, RTC_CHECK(std::isfinite(mean)); RTC_CHECK(std::isfinite(error)); - rtc::StringBuilder graph_name; + StringBuilder graph_name; graph_name << measurement << modifier; GetPerfWriter().LogResultMeanAndError(graph_name.str(), trace, mean, error, units, important, improve_direction); @@ -335,7 +335,7 @@ void PrintResultMeanAndError(absl::string_view measurement, void PrintResultList(absl::string_view measurement, absl::string_view modifier, absl::string_view trace, - const rtc::ArrayView values, + const ArrayView values, absl::string_view units, bool important, ImproveDirection improve_direction) { @@ -343,7 +343,7 @@ void PrintResultList(absl::string_view measurement, RTC_CHECK(std::isfinite(v)); } - rtc::StringBuilder graph_name; + StringBuilder graph_name; graph_name << measurement << modifier; GetPerfWriter().LogResultList(graph_name.str(), trace, values, units, important, improve_direction); diff --git a/test/testsupport/perf_test.h b/test/testsupport/perf_test.h index 732fff7d14..0f48b683ce 100644 --- a/test/testsupport/perf_test.h +++ b/test/testsupport/perf_test.h @@ -76,7 +76,7 @@ void PrintResultList( absl::string_view measurement, absl::string_view modifier, absl::string_view user_story, - rtc::ArrayView values, + ArrayView values, absl::string_view units, bool important, ImproveDirection improve_direction = ImproveDirection::kNone); diff --git a/test/testsupport/perf_test_histogram_writer.cc b/test/testsupport/perf_test_histogram_writer.cc index 93924ba16c..de771b9654 100644 --- a/test/testsupport/perf_test_histogram_writer.cc +++ b/test/testsupport/perf_test_histogram_writer.cc @@ -66,7 +66,7 @@ class PerfTestHistogramWriter : public PerfTestResultWriter { } void LogResultList(absl::string_view graph_name, absl::string_view trace_name, - const rtc::ArrayView values, + const ArrayView values, absl::string_view units, const bool important, ImproveDirection improve_direction) override { @@ -110,7 +110,7 @@ class PerfTestHistogramWriter : public PerfTestResultWriter { // Lookup on graph name + trace name (or measurement + story in catapult // parlance). There should be several histograms with the same measurement // if they're for different stories. - rtc::StringBuilder measurement_and_story; + StringBuilder measurement_and_story; measurement_and_story << graph_name << trace_name; MutexLock lock(&mutex_); if (histograms_.count(measurement_and_story.str()) == 0) { diff --git a/test/testsupport/perf_test_result_writer.h b/test/testsupport/perf_test_result_writer.h index 1b93bc9583..5ec31de4d2 100644 --- a/test/testsupport/perf_test_result_writer.h +++ b/test/testsupport/perf_test_result_writer.h @@ -44,7 +44,7 @@ class PerfTestResultWriter { virtual void LogResultList( absl::string_view graph_name, absl::string_view trace_name, - rtc::ArrayView values, + ArrayView values, absl::string_view units, bool important, webrtc::test::ImproveDirection improve_direction) = 0; diff --git a/test/testsupport/test_artifacts.cc b/test/testsupport/test_artifacts.cc index 6f062e5fe4..b0ab046e63 100644 --- a/test/testsupport/test_artifacts.cc +++ b/test/testsupport/test_artifacts.cc @@ -20,7 +20,7 @@ namespace { const std::string& DefaultArtifactPath() { - static const std::string path = webrtc::test::OutputPath(); + static const std::string path = webrtc::test::OutputPathWithRandomDirectory(); return path; } } // namespace @@ -55,8 +55,11 @@ bool WriteToTestArtifactsDir(const char* filename, return false; } - FileWrapper output = FileWrapper::OpenWriteOnly( - JoinFilename(absl::GetFlag(FLAGS_test_artifacts_dir), filename)); + std::string full_path = + JoinFilename(absl::GetFlag(FLAGS_test_artifacts_dir), filename); + FileWrapper output = FileWrapper::OpenWriteOnly(full_path); + + RTC_LOG(LS_INFO) << "Writing test artifacts in: " << full_path; return output.is_open() && output.Write(buffer, length); } diff --git a/test/testsupport/video_frame_writer.cc b/test/testsupport/video_frame_writer.cc index c36ebdeed7..09af7f12ea 100644 --- a/test/testsupport/video_frame_writer.cc +++ b/test/testsupport/video_frame_writer.cc @@ -25,20 +25,20 @@ namespace webrtc { namespace test { namespace { -rtc::Buffer ExtractI420BufferWithSize(const VideoFrame& frame, - int width, - int height) { +Buffer ExtractI420BufferWithSize(const VideoFrame& frame, + int width, + int height) { if (frame.width() != width || frame.height() != height) { RTC_CHECK_LE(std::abs(static_cast(width) / height - static_cast(frame.width()) / frame.height()), 2 * std::numeric_limits::epsilon()); // Same aspect ratio, no cropping needed. - rtc::scoped_refptr scaled(I420Buffer::Create(width, height)); + scoped_refptr scaled(I420Buffer::Create(width, height)); scaled->ScaleFrom(*frame.video_frame_buffer()->ToI420()); size_t length = CalcBufferSize(VideoType::kI420, scaled->width(), scaled->height()); - rtc::Buffer buffer(length); + Buffer buffer(length); RTC_CHECK_NE(ExtractBuffer(scaled, length, buffer.data()), -1); return buffer; } @@ -46,7 +46,7 @@ rtc::Buffer ExtractI420BufferWithSize(const VideoFrame& frame, // No resize. size_t length = CalcBufferSize(VideoType::kI420, frame.width(), frame.height()); - rtc::Buffer buffer(length); + Buffer buffer(length); RTC_CHECK_NE(ExtractBuffer(frame, length, buffer.data()), -1); return buffer; } @@ -72,7 +72,7 @@ Y4mVideoFrameWriterImpl::Y4mVideoFrameWriterImpl(std::string output_file_name, } bool Y4mVideoFrameWriterImpl::WriteFrame(const webrtc::VideoFrame& frame) { - rtc::Buffer frame_buffer = ExtractI420BufferWithSize(frame, width_, height_); + Buffer frame_buffer = ExtractI420BufferWithSize(frame, width_, height_); RTC_CHECK_EQ(frame_buffer.size(), frame_writer_->FrameLength()); return frame_writer_->WriteFrame(frame_buffer.data()); } @@ -98,7 +98,7 @@ YuvVideoFrameWriterImpl::YuvVideoFrameWriterImpl(std::string output_file_name, } bool YuvVideoFrameWriterImpl::WriteFrame(const webrtc::VideoFrame& frame) { - rtc::Buffer frame_buffer = ExtractI420BufferWithSize(frame, width_, height_); + Buffer frame_buffer = ExtractI420BufferWithSize(frame, width_, height_); RTC_CHECK_EQ(frame_buffer.size(), frame_writer_->FrameLength()); return frame_writer_->WriteFrame(frame_buffer.data()); } diff --git a/test/testsupport/video_frame_writer_unittest.cc b/test/testsupport/video_frame_writer_unittest.cc index 9d59627c0f..30ebc526d7 100644 --- a/test/testsupport/video_frame_writer_unittest.cc +++ b/test/testsupport/video_frame_writer_unittest.cc @@ -38,8 +38,8 @@ const size_t kFileHeaderSize = 29; // Size of header: "FRAME\n" const size_t kFrameHeaderSize = 6; -rtc::scoped_refptr CreateI420Buffer(int width, int height) { - rtc::scoped_refptr buffer(I420Buffer::Create(width, height)); +scoped_refptr CreateI420Buffer(int width, int height) { + scoped_refptr buffer(I420Buffer::Create(width, height)); for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { buffer->MutableDataY()[x + y * width] = 128; @@ -56,9 +56,8 @@ rtc::scoped_refptr CreateI420Buffer(int width, int height) { return buffer; } -void AssertI420BuffersEq( - rtc::scoped_refptr actual, - rtc::scoped_refptr expected) { +void AssertI420BuffersEq(scoped_refptr actual, + scoped_refptr expected) { ASSERT_TRUE(actual); ASSERT_EQ(actual->width(), expected->width()); @@ -126,7 +125,7 @@ class YuvVideoFrameWriterTest : public VideoFrameWriterTest { TEST_F(Y4mVideoFrameWriterTest, InitSuccess) {} TEST_F(Y4mVideoFrameWriterTest, WriteFrame) { - rtc::scoped_refptr expected_buffer = + scoped_refptr expected_buffer = CreateI420Buffer(kFrameWidth, kFrameHeight); VideoFrame frame = @@ -149,7 +148,7 @@ TEST_F(Y4mVideoFrameWriterTest, WriteFrame) { TEST_F(YuvVideoFrameWriterTest, InitSuccess) {} TEST_F(YuvVideoFrameWriterTest, WriteFrame) { - rtc::scoped_refptr expected_buffer = + scoped_refptr expected_buffer = CreateI420Buffer(kFrameWidth, kFrameHeight); VideoFrame frame = diff --git a/test/testsupport/y4m_frame_generator.cc b/test/testsupport/y4m_frame_generator.cc index f1ecbf9b41..bd3c8f4127 100644 --- a/test/testsupport/y4m_frame_generator.cc +++ b/test/testsupport/y4m_frame_generator.cc @@ -57,7 +57,7 @@ Y4mFrameGenerator::Y4mFrameGenerator(absl::string_view filename, Y4mFrameGenerator::VideoFrameData Y4mFrameGenerator::NextFrame() { webrtc::VideoFrame::UpdateRect update_rect{0, 0, static_cast(width_), static_cast(height_)}; - rtc::scoped_refptr next_frame_buffer = + scoped_refptr next_frame_buffer = frame_reader_->PullFrame(); if (!next_frame_buffer || @@ -67,13 +67,17 @@ Y4mFrameGenerator::VideoFrameData Y4mFrameGenerator::NextFrame() { } // Allocate a new buffer and return scaled version. - rtc::scoped_refptr scaled_buffer( + scoped_refptr scaled_buffer( I420Buffer::Create(width_, height_)); webrtc::I420Buffer::SetBlack(scaled_buffer.get()); scaled_buffer->ScaleFrom(*next_frame_buffer->ToI420()); return VideoFrameData(scaled_buffer, update_rect); } +void Y4mFrameGenerator::SkipNextFrame() { + frame_reader_->PullFrame(); +} + void Y4mFrameGenerator::ChangeResolution(size_t width, size_t height) { width_ = width; height_ = height; diff --git a/test/testsupport/y4m_frame_generator.h b/test/testsupport/y4m_frame_generator.h index 4ff64be7dc..dfffb4a732 100644 --- a/test/testsupport/y4m_frame_generator.h +++ b/test/testsupport/y4m_frame_generator.h @@ -13,10 +13,10 @@ #include #include +#include #include #include "absl/strings/string_view.h" -#include "absl/types/optional.h" #include "api/test/frame_generator_interface.h" #include "rtc_base/checks.h" #include "test/testsupport/frame_reader.h" @@ -45,11 +45,13 @@ class Y4mFrameGenerator : public FrameGeneratorInterface { VideoFrameData NextFrame() override; + void SkipNextFrame() override; + void ChangeResolution(size_t width, size_t height) override; Resolution GetResolution() const override; - absl::optional fps() const override { return fps_; } + std::optional fps() const override { return fps_; } private: YuvFrameReaderImpl::RepeatMode ToYuvFrameReaderRepeatMode( diff --git a/test/testsupport/y4m_frame_reader.cc b/test/testsupport/y4m_frame_reader.cc index 72fb9b5188..4501ddba41 100644 --- a/test/testsupport/y4m_frame_reader.cc +++ b/test/testsupport/y4m_frame_reader.cc @@ -10,12 +10,14 @@ #include +#include #include #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "rtc_base/logging.h" +#include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" #include "test/testsupport/file_utils.h" #include "test/testsupport/frame_reader.h" @@ -38,10 +40,29 @@ void ParseY4mHeader(std::string filepath, << "File " << filepath << " is too small"; fclose(file); - RTC_CHECK(sscanf(h, "YUV4MPEG2 W%d H%d", &resolution->width, - &resolution->height) == 2) + std::vector header = split(h, ' '); + RTC_CHECK(!header.empty() && header[0] == "YUV4MPEG2") << filepath << " is not a valid Y4M file"; + for (size_t i = 1; i < header.size(); ++i) { + RTC_CHECK(!header[i].empty()); + switch (header[i][0]) { + case 'W': { + auto n = header[i].substr(1); + std::from_chars(n.data(), n.data() + n.size(), resolution->width); + continue; + } + case 'H': { + auto n = header[i].substr(1); + std::from_chars(n.data(), n.data() + n.size(), resolution->height); + continue; + } + default: { + continue; + } + } + } + RTC_CHECK_GT(resolution->width, 0) << "Width must be positive"; RTC_CHECK_GT(resolution->height, 0) << "Height must be positive"; diff --git a/test/testsupport/y4m_frame_reader_unittest.cc b/test/testsupport/y4m_frame_reader_unittest.cc index df81a8135b..205cbdeb39 100644 --- a/test/testsupport/y4m_frame_reader_unittest.cc +++ b/test/testsupport/y4m_frame_reader_unittest.cc @@ -65,13 +65,13 @@ TEST_F(Y4mFrameReaderTest, num_frames) { } TEST_F(Y4mFrameReaderTest, PullFrame_frameResolution) { - rtc::scoped_refptr buffer = reader_->PullFrame(); + scoped_refptr buffer = reader_->PullFrame(); EXPECT_EQ(kResolution.width, buffer->width()); EXPECT_EQ(kResolution.height, buffer->height()); } TEST_F(Y4mFrameReaderTest, PullFrame_frameContent) { - rtc::scoped_refptr buffer = reader_->PullFrame(); + scoped_refptr buffer = reader_->PullFrame(); EXPECT_EQ(kFrameContent[0][0], *buffer->DataY()); EXPECT_EQ(kFrameContent[0][1], *buffer->DataU()); EXPECT_EQ(kFrameContent[0][2], *buffer->DataV()); @@ -81,15 +81,14 @@ TEST_F(Y4mFrameReaderTest, ReadFrame_randomOrder) { std::vector expected_frames = {2, 0, 1}; std::vector actual_frames; for (int frame_num : expected_frames) { - rtc::scoped_refptr buffer = - reader_->ReadFrame(frame_num); + scoped_refptr buffer = reader_->ReadFrame(frame_num); actual_frames.push_back(*buffer->DataY()); } EXPECT_EQ(expected_frames, actual_frames); } TEST_F(Y4mFrameReaderTest, PullFrame_scale) { - rtc::scoped_refptr buffer = reader_->PullFrame( + scoped_refptr buffer = reader_->PullFrame( /*pulled_frame_num=*/nullptr, Resolution({.width = 2, .height = 2}), FrameReader::kNoScale); EXPECT_EQ(2, buffer->width()); @@ -108,7 +107,7 @@ TEST_P(Y4mFrameReaderRepeatModeTest, PullFrame) { reader_ = CreateY4mFrameReader(filepath_, mode); std::vector read_frames; for (size_t i = 0; i < expected_frames.size(); ++i) { - rtc::scoped_refptr buffer = reader_->PullFrame(); + scoped_refptr buffer = reader_->PullFrame(); read_frames.push_back(*buffer->DataY()); } EXPECT_EQ(expected_frames, read_frames); @@ -136,7 +135,7 @@ TEST_P(Y4mFrameReaderFramerateScaleTest, PullFrame) { std::vector actual_frames; for (size_t i = 0; i < expected_frames.size(); ++i) { int pulled_frame; - rtc::scoped_refptr buffer = + scoped_refptr buffer = reader_->PullFrame(&pulled_frame, kResolution, framerate_scale); actual_frames.push_back(pulled_frame); } diff --git a/test/testsupport/yuv_frame_reader.cc b/test/testsupport/yuv_frame_reader.cc index 02c1a68008..ea1a586e9f 100644 --- a/test/testsupport/yuv_frame_reader.cc +++ b/test/testsupport/yuv_frame_reader.cc @@ -36,7 +36,7 @@ int WrapFrameNum(int frame_num, int num_frames, RepeatMode mode) { } RTC_CHECK_EQ(RepeatMode::kPingPong, mode); - int cycle_len = 2 * (num_frames - 1); + int cycle_len = std::max(1, 2 * (num_frames - 1)); int wrapped_num = frame_num % cycle_len; if (wrapped_num >= num_frames) { return cycle_len - wrapped_num; @@ -44,13 +44,13 @@ int WrapFrameNum(int frame_num, int num_frames, RepeatMode mode) { return wrapped_num; } -rtc::scoped_refptr Scale(rtc::scoped_refptr buffer, - Resolution resolution) { +scoped_refptr Scale(scoped_refptr buffer, + Resolution resolution) { if (buffer->width() == resolution.width && buffer->height() == resolution.height) { return buffer; } - rtc::scoped_refptr scaled( + scoped_refptr scaled( I420Buffer::Create(resolution.width, resolution.height)); scaled->ScaleFrom(*buffer.get()); return scaled; @@ -103,18 +103,17 @@ void YuvFrameReaderImpl::Init() { RTC_CHECK_GT(num_frames_, 0u) << "File " << filepath_ << " is too small"; } -rtc::scoped_refptr YuvFrameReaderImpl::PullFrame() { +scoped_refptr YuvFrameReaderImpl::PullFrame() { return PullFrame(/*frame_num=*/nullptr); } -rtc::scoped_refptr YuvFrameReaderImpl::PullFrame(int* frame_num) { +scoped_refptr YuvFrameReaderImpl::PullFrame(int* frame_num) { return PullFrame(frame_num, resolution_, /*framerate_scale=*/kNoScale); } -rtc::scoped_refptr YuvFrameReaderImpl::PullFrame( - int* frame_num, - Resolution resolution, - Ratio framerate_scale) { +scoped_refptr YuvFrameReaderImpl::PullFrame(int* frame_num, + Resolution resolution, + Ratio framerate_scale) { frame_num_ += framerate_scaler_.Skip(framerate_scale); auto buffer = ReadFrame(frame_num_, resolution); if (frame_num != nullptr) { @@ -123,13 +122,12 @@ rtc::scoped_refptr YuvFrameReaderImpl::PullFrame( return buffer; } -rtc::scoped_refptr YuvFrameReaderImpl::ReadFrame(int frame_num) { +scoped_refptr YuvFrameReaderImpl::ReadFrame(int frame_num) { return ReadFrame(frame_num, resolution_); } -rtc::scoped_refptr YuvFrameReaderImpl::ReadFrame( - int frame_num, - Resolution resolution) { +scoped_refptr YuvFrameReaderImpl::ReadFrame(int frame_num, + Resolution resolution) { int wrapped_num = WrapFrameNum(frame_num, num_frames_, repeat_mode_); if (wrapped_num >= num_frames_) { RTC_CHECK_EQ(RepeatMode::kSingle, repeat_mode_); diff --git a/test/testsupport/yuv_frame_reader_unittest.cc b/test/testsupport/yuv_frame_reader_unittest.cc index b9ea2d0c46..82d242ff32 100644 --- a/test/testsupport/yuv_frame_reader_unittest.cc +++ b/test/testsupport/yuv_frame_reader_unittest.cc @@ -29,8 +29,7 @@ using Ratio = FrameReader::Ratio; using RepeatMode = YuvFrameReaderImpl::RepeatMode; constexpr Resolution kResolution({.width = 1, .height = 1}); -constexpr char kFrameContent[3][3] = {{0, 1, 2}, {1, 2, 3}, {2, 3, 4}}; -constexpr int kNumFrames = sizeof(kFrameContent) / sizeof(kFrameContent[0]); +constexpr int kDefaultNumFrames = 3; } // namespace class YuvFrameReaderTest : public ::testing::Test { @@ -41,43 +40,52 @@ class YuvFrameReaderTest : public ::testing::Test { void SetUp() override { filepath_ = webrtc::test::TempFilename(webrtc::test::OutputPath(), "yuv_frame_reader_unittest"); + CreateYuvFileAndReader(/*num_frames=*/3, RepeatMode::kSingle); + } + + void TearDown() override { remove(filepath_.c_str()); } + + void CreateYuvFileAndReader(int num_frames, RepeatMode repeat_mode) { FILE* file = fopen(filepath_.c_str(), "wb"); - fwrite(kFrameContent, 1, sizeof(kFrameContent), file); + for (int i = 0; i < num_frames; ++i) { + uint8_t y = static_cast(i & 255); + uint8_t u = static_cast((i + 1) & 255); + uint8_t v = static_cast((i + 2) & 255); + fwrite(&y, 1, 1, file); + fwrite(&u, 1, 1, file); + fwrite(&v, 1, 1, file); + } fclose(file); - reader_ = CreateYuvFrameReader(filepath_, kResolution); + reader_ = CreateYuvFrameReader(filepath_, kResolution, repeat_mode); } - void TearDown() override { remove(filepath_.c_str()); } - std::string filepath_; std::unique_ptr reader_; }; TEST_F(YuvFrameReaderTest, num_frames) { - EXPECT_EQ(kNumFrames, reader_->num_frames()); + EXPECT_EQ(kDefaultNumFrames, reader_->num_frames()); } TEST_F(YuvFrameReaderTest, PullFrame_frameContent) { - rtc::scoped_refptr buffer = reader_->PullFrame(); - EXPECT_EQ(kFrameContent[0][0], *buffer->DataY()); - EXPECT_EQ(kFrameContent[0][1], *buffer->DataU()); - EXPECT_EQ(kFrameContent[0][2], *buffer->DataV()); + scoped_refptr buffer = reader_->PullFrame(); + EXPECT_EQ(0u, *buffer->DataY()); + EXPECT_EQ(1u, *buffer->DataU()); + EXPECT_EQ(2u, *buffer->DataV()); } TEST_F(YuvFrameReaderTest, ReadFrame_randomOrder) { - std::vector expected_frames = {2, 0, 1}; - std::vector actual_frames; - for (int frame_num : expected_frames) { - rtc::scoped_refptr buffer = - reader_->ReadFrame(frame_num); - actual_frames.push_back(*buffer->DataY()); - } - EXPECT_EQ(expected_frames, actual_frames); + scoped_refptr buffer = reader_->ReadFrame(2); + EXPECT_EQ(2u, *buffer->DataY()); + buffer = reader_->ReadFrame(0); + EXPECT_EQ(0u, *buffer->DataY()); + buffer = reader_->ReadFrame(1); + EXPECT_EQ(1u, *buffer->DataY()); } TEST_F(YuvFrameReaderTest, PullFrame_scale) { - rtc::scoped_refptr buffer = reader_->PullFrame( + scoped_refptr buffer = reader_->PullFrame( /*pulled_frame_num=*/nullptr, Resolution({.width = 2, .height = 2}), FrameReader::kNoScale); EXPECT_EQ(2, buffer->width()); @@ -87,30 +95,31 @@ TEST_F(YuvFrameReaderTest, PullFrame_scale) { class YuvFrameReaderRepeatModeTest : public YuvFrameReaderTest, public ::testing::WithParamInterface< - std::tuple>> {}; + std::tuple>> {}; TEST_P(YuvFrameReaderRepeatModeTest, PullFrame) { - RepeatMode mode = std::get<0>(GetParam()); - std::vector expected_frames = std::get<1>(GetParam()); - - reader_ = CreateYuvFrameReader(filepath_, kResolution, mode); - std::vector read_frames; - for (size_t i = 0; i < expected_frames.size(); ++i) { - rtc::scoped_refptr buffer = reader_->PullFrame(); - read_frames.push_back(*buffer->DataY()); + auto [num_frames, repeat_mode, expected_frames] = GetParam(); + CreateYuvFileAndReader(num_frames, repeat_mode); + for (auto expected_frame : expected_frames) { + scoped_refptr buffer = reader_->PullFrame(); + EXPECT_EQ(expected_frame, *buffer->DataY()); } - EXPECT_EQ(expected_frames, read_frames); } INSTANTIATE_TEST_SUITE_P( YuvFrameReaderTest, YuvFrameReaderRepeatModeTest, ::testing::ValuesIn( - {std::make_tuple(RepeatMode::kSingle, std::vector{0, 1, 2}), - std::make_tuple(RepeatMode::kRepeat, - std::vector{0, 1, 2, 0, 1, 2}), - std::make_tuple(RepeatMode::kPingPong, - std::vector{0, 1, 2, 1, 0, 1, 2})})); + {std::make_tuple(3, RepeatMode::kSingle, std::vector{0, 1, 2}), + std::make_tuple(3, + RepeatMode::kRepeat, + std::vector{0, 1, 2, 0, 1, 2}), + std::make_tuple(3, + RepeatMode::kPingPong, + std::vector{0, 1, 2, 1, 0, 1, 2}), + std::make_tuple(1, + RepeatMode::kPingPong, + std::vector{0, 0})})); class YuvFrameReaderFramerateScaleTest : public YuvFrameReaderTest, @@ -118,17 +127,13 @@ class YuvFrameReaderFramerateScaleTest std::tuple>> {}; TEST_P(YuvFrameReaderFramerateScaleTest, PullFrame) { - Ratio framerate_scale = std::get<0>(GetParam()); - std::vector expected_frames = std::get<1>(GetParam()); - - std::vector actual_frames; - for (size_t i = 0; i < expected_frames.size(); ++i) { + auto [framerate_scale, expected_frames] = GetParam(); + for (auto expected_frame : expected_frames) { int pulled_frame; - rtc::scoped_refptr buffer = + scoped_refptr buffer = reader_->PullFrame(&pulled_frame, kResolution, framerate_scale); - actual_frames.push_back(pulled_frame); + EXPECT_EQ(pulled_frame, expected_frame); } - EXPECT_EQ(expected_frames, actual_frames); } INSTANTIATE_TEST_SUITE_P(YuvFrameReaderTest, diff --git a/test/time_controller/BUILD.gn b/test/time_controller/BUILD.gn index b4b368a42a..3255594797 100644 --- a/test/time_controller/BUILD.gn +++ b/test/time_controller/BUILD.gn @@ -11,8 +11,6 @@ import("../../webrtc.gni") rtc_library("time_controller") { testonly = true sources = [ - "external_time_controller.cc", - "external_time_controller.h", "real_time_controller.cc", "real_time_controller.h", "simulated_task_queue.cc", @@ -24,6 +22,7 @@ rtc_library("time_controller") { ] deps = [ + "../../api:field_trials_view", "../../api:sequence_checker", "../../api:time_controller", "../../api/task_queue", @@ -35,13 +34,14 @@ rtc_library("time_controller") { "../../rtc_base:platform_thread_types", "../../rtc_base:rtc_base_tests_utils", "../../rtc_base:rtc_event", + "../../rtc_base:socket_server", + "../../rtc_base:threading", + "../../rtc_base:timeutils", "../../rtc_base/synchronization:mutex", "../../rtc_base/synchronization:yield_policy", "../../system_wrappers", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] } @@ -49,7 +49,6 @@ if (rtc_include_tests) { rtc_library("time_controller_unittests") { testonly = true sources = [ - "external_time_controller_unittest.cc", "simulated_time_controller_unittest.cc", "time_controller_conformance_test.cc", ] @@ -57,10 +56,10 @@ if (rtc_include_tests) { ":time_controller", "../:test_support", "../../api:time_controller", + "../../api/task_queue", "../../api/units:time_delta", "../../rtc_base:macromagic", "../../rtc_base:rtc_event", - "../../rtc_base:rtc_task_queue", "../../rtc_base:task_queue_for_test", "../../rtc_base:threading", "../../rtc_base/synchronization:mutex", diff --git a/test/time_controller/external_time_controller.cc b/test/time_controller/external_time_controller.cc deleted file mode 100644 index 41f36eaaef..0000000000 --- a/test/time_controller/external_time_controller.cc +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "test/time_controller/external_time_controller.h" - -#include -#include -#include -#include - -#include "absl/functional/any_invocable.h" -#include "api/task_queue/task_queue_base.h" -#include "api/task_queue/task_queue_factory.h" -#include "api/units/time_delta.h" -#include "api/units/timestamp.h" -#include "rtc_base/checks.h" -#include "rtc_base/synchronization/yield_policy.h" -#include "test/time_controller/simulated_time_controller.h" - -namespace webrtc { - -// Wraps a TaskQueue so that it can reschedule the time controller whenever -// an external call schedules a new task. -class ExternalTimeController::TaskQueueWrapper : public TaskQueueBase { - public: - TaskQueueWrapper(ExternalTimeController* parent, - std::unique_ptr base) - : parent_(parent), base_(std::move(base)) {} - - void PostTaskImpl(absl::AnyInvocable task, - const PostTaskTraits& traits, - const Location& location) override { - parent_->UpdateTime(); - base_->PostTask(TaskWrapper(std::move(task))); - parent_->ScheduleNext(); - } - - void PostDelayedTaskImpl(absl::AnyInvocable task, - TimeDelta delay, - const PostDelayedTaskTraits& traits, - const Location& location) override { - parent_->UpdateTime(); - if (traits.high_precision) { - base_->PostDelayedHighPrecisionTask(TaskWrapper(std::move(task)), delay); - } else { - base_->PostDelayedTask(TaskWrapper(std::move(task)), delay); - } - parent_->ScheduleNext(); - } - - void Delete() override { delete this; } - - private: - absl::AnyInvocable TaskWrapper( - absl::AnyInvocable task) { - return [task = std::move(task), this]() mutable { - CurrentTaskQueueSetter current(this); - std::move(task)(); - }; - } - - ExternalTimeController* const parent_; - std::unique_ptr base_; -}; - -ExternalTimeController::ExternalTimeController(ControlledAlarmClock* alarm) - : alarm_(alarm), - impl_(alarm_->GetClock()->CurrentTime()), - yield_policy_(&impl_) { - global_clock_.SetTime(alarm_->GetClock()->CurrentTime()); - alarm_->SetCallback([this] { Run(); }); -} - -Clock* ExternalTimeController::GetClock() { - return alarm_->GetClock(); -} - -TaskQueueFactory* ExternalTimeController::GetTaskQueueFactory() { - return this; -} - -void ExternalTimeController::AdvanceTime(TimeDelta duration) { - alarm_->Sleep(duration); -} - -std::unique_ptr ExternalTimeController::CreateThread( - const std::string& name, - std::unique_ptr socket_server) { - RTC_DCHECK_NOTREACHED(); - return nullptr; -} - -rtc::Thread* ExternalTimeController::GetMainThread() { - RTC_DCHECK_NOTREACHED(); - return nullptr; -} - -std::unique_ptr -ExternalTimeController::CreateTaskQueue( - absl::string_view name, - TaskQueueFactory::Priority priority) const { - return std::unique_ptr( - new TaskQueueWrapper(const_cast(this), - impl_.CreateTaskQueue(name, priority))); -} - -void ExternalTimeController::Run() { - rtc::ScopedYieldPolicy yield_policy(&impl_); - UpdateTime(); - impl_.RunReadyRunners(); - ScheduleNext(); -} - -void ExternalTimeController::UpdateTime() { - Timestamp now = alarm_->GetClock()->CurrentTime(); - impl_.AdvanceTime(now); - global_clock_.SetTime(now); -} - -void ExternalTimeController::ScheduleNext() { - RTC_DCHECK_EQ(impl_.CurrentTime(), alarm_->GetClock()->CurrentTime()); - TimeDelta delay = - std::max(impl_.NextRunTime() - impl_.CurrentTime(), TimeDelta::Zero()); - if (delay.IsFinite()) { - alarm_->ScheduleAlarmAt(alarm_->GetClock()->CurrentTime() + delay); - } -} - -} // namespace webrtc diff --git a/test/time_controller/external_time_controller.h b/test/time_controller/external_time_controller.h deleted file mode 100644 index a67f2557b4..0000000000 --- a/test/time_controller/external_time_controller.h +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef TEST_TIME_CONTROLLER_EXTERNAL_TIME_CONTROLLER_H_ -#define TEST_TIME_CONTROLLER_EXTERNAL_TIME_CONTROLLER_H_ - -#include -#include - -#include "absl/strings/string_view.h" -#include "api/task_queue/task_queue_base.h" -#include "api/task_queue/task_queue_factory.h" -#include "api/test/time_controller.h" -#include "api/units/time_delta.h" -#include "api/units/timestamp.h" -#include "system_wrappers/include/clock.h" -#include "test/time_controller/simulated_time_controller.h" - -namespace webrtc { - -// TimeController implementation built on an external controlled alarm. -// This implementation is used to delegate scheduling and execution to an -// external run loop. -class ExternalTimeController : public TimeController, public TaskQueueFactory { - public: - explicit ExternalTimeController(ControlledAlarmClock* alarm); - - // Implementation of TimeController. - Clock* GetClock() override; - TaskQueueFactory* GetTaskQueueFactory() override; - void AdvanceTime(TimeDelta duration) override; - std::unique_ptr CreateThread( - const std::string& name, - std::unique_ptr socket_server) override; - rtc::Thread* GetMainThread() override; - - // Implementation of TaskQueueFactory. - std::unique_ptr CreateTaskQueue( - absl::string_view name, - TaskQueueFactory::Priority priority) const override; - - private: - class TaskQueueWrapper; - - // Executes any tasks scheduled at or before the current time. May call - // `ScheduleNext` to schedule the next call to `Run`. - void Run(); - - void UpdateTime(); - void ScheduleNext(); - - ControlledAlarmClock* alarm_; - sim_time_impl::SimulatedTimeControllerImpl impl_; - rtc::ScopedYieldPolicy yield_policy_; - - // Overrides the global rtc::Clock to ensure that it reports the same times as - // the time controller. - rtc::ScopedBaseFakeClock global_clock_; -}; - -} // namespace webrtc - -#endif // TEST_TIME_CONTROLLER_EXTERNAL_TIME_CONTROLLER_H_ diff --git a/test/time_controller/external_time_controller_unittest.cc b/test/time_controller/external_time_controller_unittest.cc deleted file mode 100644 index 13d63fe8ed..0000000000 --- a/test/time_controller/external_time_controller_unittest.cc +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "test/time_controller/external_time_controller.h" - -#include -#include -#include - -#include "rtc_base/event.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/repeating_task.h" -#include "test/gmock.h" -#include "test/gtest.h" - -// NOTE: Since these tests rely on real time behavior, they will be flaky -// if run on heavily loaded systems. -namespace webrtc { -namespace { -using ::testing::AtLeast; -using ::testing::Invoke; -using ::testing::MockFunction; -using ::testing::NiceMock; -using ::testing::Return; -constexpr Timestamp kStartTime = Timestamp::Seconds(1000); - -class FakeAlarm : public ControlledAlarmClock { - public: - explicit FakeAlarm(Timestamp start_time); - - Clock* GetClock() override; - bool ScheduleAlarmAt(Timestamp deadline) override; - void SetCallback(std::function callback) override; - void Sleep(TimeDelta duration) override; - - private: - SimulatedClock clock_; - Timestamp deadline_; - std::function callback_; -}; - -FakeAlarm::FakeAlarm(Timestamp start_time) - : clock_(start_time), - deadline_(Timestamp::PlusInfinity()), - callback_([] {}) {} - -Clock* FakeAlarm::GetClock() { - return &clock_; -} - -bool FakeAlarm::ScheduleAlarmAt(Timestamp deadline) { - if (deadline < deadline_) { - deadline_ = deadline; - return true; - } - return false; -} - -void FakeAlarm::SetCallback(std::function callback) { - callback_ = callback; -} - -void FakeAlarm::Sleep(TimeDelta duration) { - Timestamp end_time = clock_.CurrentTime() + duration; - - while (deadline_ <= end_time) { - clock_.AdvanceTime(deadline_ - clock_.CurrentTime()); - deadline_ = Timestamp::PlusInfinity(); - callback_(); - } - - clock_.AdvanceTime(end_time - clock_.CurrentTime()); -} - -} // namespace - -TEST(ExternalTimeControllerTest, TaskIsStoppedOnStop) { - const TimeDelta kShortInterval = TimeDelta::Millis(5); - const TimeDelta kLongInterval = TimeDelta::Millis(20); - const int kShortIntervalCount = 4; - const int kMargin = 1; - FakeAlarm alarm(kStartTime); - ExternalTimeController time_simulation(&alarm); - rtc::TaskQueue task_queue( - time_simulation.GetTaskQueueFactory()->CreateTaskQueue( - "TestQueue", TaskQueueFactory::Priority::NORMAL)); - std::atomic_int counter(0); - auto handle = RepeatingTaskHandle::Start(task_queue.Get(), [&] { - if (++counter >= kShortIntervalCount) - return kLongInterval; - return kShortInterval; - }); - // Sleep long enough to go through the initial phase. - time_simulation.AdvanceTime(kShortInterval * (kShortIntervalCount + kMargin)); - EXPECT_EQ(counter.load(), kShortIntervalCount); - - task_queue.PostTask( - [handle = std::move(handle)]() mutable { handle.Stop(); }); - - // Sleep long enough that the task would run at least once more if not - // stopped. - time_simulation.AdvanceTime(kLongInterval * 2); - EXPECT_EQ(counter.load(), kShortIntervalCount); -} - -TEST(ExternalTimeControllerTest, TaskCanStopItself) { - std::atomic_int counter(0); - FakeAlarm alarm(kStartTime); - ExternalTimeController time_simulation(&alarm); - rtc::TaskQueue task_queue( - time_simulation.GetTaskQueueFactory()->CreateTaskQueue( - "TestQueue", TaskQueueFactory::Priority::NORMAL)); - - RepeatingTaskHandle handle; - task_queue.PostTask([&] { - handle = RepeatingTaskHandle::Start(task_queue.Get(), [&] { - ++counter; - handle.Stop(); - return TimeDelta::Millis(2); - }); - }); - time_simulation.AdvanceTime(TimeDelta::Millis(10)); - EXPECT_EQ(counter.load(), 1); -} - -TEST(ExternalTimeControllerTest, YieldForTask) { - FakeAlarm alarm(kStartTime); - ExternalTimeController time_simulation(&alarm); - - rtc::TaskQueue task_queue( - time_simulation.GetTaskQueueFactory()->CreateTaskQueue( - "TestQueue", TaskQueueFactory::Priority::NORMAL)); - - rtc::Event event; - task_queue.PostTask([&] { event.Set(); }); - EXPECT_TRUE(event.Wait(TimeDelta::Millis(200))); -} - -TEST(ExternalTimeControllerTest, TasksYieldToEachOther) { - FakeAlarm alarm(kStartTime); - ExternalTimeController time_simulation(&alarm); - - rtc::TaskQueue task_queue( - time_simulation.GetTaskQueueFactory()->CreateTaskQueue( - "TestQueue", TaskQueueFactory::Priority::NORMAL)); - rtc::TaskQueue other_queue( - time_simulation.GetTaskQueueFactory()->CreateTaskQueue( - "OtherQueue", TaskQueueFactory::Priority::NORMAL)); - - task_queue.PostTask([&] { - rtc::Event event; - other_queue.PostTask([&] { event.Set(); }); - EXPECT_TRUE(event.Wait(TimeDelta::Millis(200))); - }); - - time_simulation.AdvanceTime(TimeDelta::Millis(300)); -} - -TEST(ExternalTimeControllerTest, CurrentTaskQueue) { - FakeAlarm alarm(kStartTime); - ExternalTimeController time_simulation(&alarm); - - rtc::TaskQueue task_queue( - time_simulation.GetTaskQueueFactory()->CreateTaskQueue( - "TestQueue", TaskQueueFactory::Priority::NORMAL)); - - task_queue.PostTask([&] { EXPECT_TRUE(task_queue.IsCurrent()); }); - - time_simulation.AdvanceTime(TimeDelta::Millis(10)); -} - -} // namespace webrtc diff --git a/test/time_controller/real_time_controller.cc b/test/time_controller/real_time_controller.cc index 7cc750d6d4..5a27d25996 100644 --- a/test/time_controller/real_time_controller.cc +++ b/test/time_controller/real_time_controller.cc @@ -9,15 +9,16 @@ */ #include "test/time_controller/real_time_controller.h" +#include "api/field_trials_view.h" #include "api/task_queue/default_task_queue_factory.h" #include "rtc_base/null_socket_server.h" namespace webrtc { namespace { -class MainThread : public rtc::Thread { +class MainThread : public Thread { public: MainThread() - : Thread(std::make_unique(), false), + : Thread(std::make_unique(), false), current_setter_(this) { DoInit(); } @@ -30,8 +31,8 @@ class MainThread : public rtc::Thread { CurrentThreadSetter current_setter_; }; } // namespace -RealTimeController::RealTimeController() - : task_queue_factory_(CreateDefaultTaskQueueFactory()), +RealTimeController::RealTimeController(const FieldTrialsView* field_trials) + : task_queue_factory_(CreateDefaultTaskQueueFactory(field_trials)), main_thread_(std::make_unique()) { main_thread_->SetName("Main", this); } @@ -44,18 +45,18 @@ TaskQueueFactory* RealTimeController::GetTaskQueueFactory() { return task_queue_factory_.get(); } -std::unique_ptr RealTimeController::CreateThread( +std::unique_ptr RealTimeController::CreateThread( const std::string& name, - std::unique_ptr socket_server) { + std::unique_ptr socket_server) { if (!socket_server) - socket_server = std::make_unique(); - auto res = std::make_unique(std::move(socket_server)); + socket_server = std::make_unique(); + auto res = std::make_unique(std::move(socket_server)); res->SetName(name, nullptr); res->Start(); return res; } -rtc::Thread* RealTimeController::GetMainThread() { +Thread* RealTimeController::GetMainThread() { return main_thread_.get(); } diff --git a/test/time_controller/real_time_controller.h b/test/time_controller/real_time_controller.h index 5f02eaf85f..99eb72ece5 100644 --- a/test/time_controller/real_time_controller.h +++ b/test/time_controller/real_time_controller.h @@ -13,6 +13,7 @@ #include #include +#include "api/field_trials_view.h" #include "api/task_queue/task_queue_factory.h" #include "api/test/time_controller.h" #include "api/units/time_delta.h" @@ -21,19 +22,19 @@ namespace webrtc { class RealTimeController : public TimeController { public: - RealTimeController(); + RealTimeController(const FieldTrialsView* field_trials = nullptr); Clock* GetClock() override; TaskQueueFactory* GetTaskQueueFactory() override; - std::unique_ptr CreateThread( + std::unique_ptr CreateThread( const std::string& name, - std::unique_ptr socket_server) override; - rtc::Thread* GetMainThread() override; + std::unique_ptr socket_server) override; + Thread* GetMainThread() override; void AdvanceTime(TimeDelta duration) override; private: const std::unique_ptr task_queue_factory_; - const std::unique_ptr main_thread_; + const std::unique_ptr main_thread_; }; } // namespace webrtc diff --git a/test/time_controller/simulated_task_queue.cc b/test/time_controller/simulated_task_queue.cc index 66b3fd8087..3d786dfdcc 100644 --- a/test/time_controller/simulated_task_queue.cc +++ b/test/time_controller/simulated_task_queue.cc @@ -52,7 +52,7 @@ void SimulatedTaskQueue::RunReady(Timestamp at_time) { } CurrentTaskQueueSetter set_current(this); while (!ready_tasks_.empty()) { - absl::AnyInvocable ready = std::move(ready_tasks_.front()); + absl::AnyInvocable ready = std::move(ready_tasks_.front()); ready_tasks_.pop_front(); lock_.Unlock(); std::move(ready)(); diff --git a/test/time_controller/simulated_thread.cc b/test/time_controller/simulated_thread.cc index e8a5a22a71..d8ff5d9804 100644 --- a/test/time_controller/simulated_thread.cc +++ b/test/time_controller/simulated_thread.cc @@ -18,9 +18,9 @@ namespace { // A socket server that does nothing. It's different from NullSocketServer in // that it does allow sleep/wakeup. This avoids usage of an Event instance which // otherwise would cause issues with the simulated Yeild behavior. -class DummySocketServer : public rtc::SocketServer { +class DummySocketServer : public SocketServer { public: - rtc::Socket* CreateSocket(int family, int type) override { + Socket* CreateSocket(int family, int type) override { RTC_DCHECK_NOTREACHED(); return nullptr; } @@ -36,9 +36,9 @@ class DummySocketServer : public rtc::SocketServer { SimulatedThread::SimulatedThread( sim_time_impl::SimulatedTimeControllerImpl* handler, absl::string_view name, - std::unique_ptr socket_server) - : rtc::Thread(socket_server ? std::move(socket_server) - : std::make_unique()), + std::unique_ptr socket_server) + : Thread(socket_server ? std::move(socket_server) + : std::make_unique()), handler_(handler), name_(new char[name.size()]) { std::copy_n(name.begin(), name.size(), name_); @@ -61,7 +61,7 @@ void SimulatedThread::RunReady(Timestamp at_time) { } } -void SimulatedThread::BlockingCallImpl(rtc::FunctionView functor, +void SimulatedThread::BlockingCallImpl(FunctionView functor, const Location& /*location*/) { if (IsQuitting()) return; @@ -81,7 +81,7 @@ void SimulatedThread::BlockingCallImpl(rtc::FunctionView functor, void SimulatedThread::PostTaskImpl(absl::AnyInvocable task, const PostTaskTraits& traits, const Location& location) { - rtc::Thread::PostTaskImpl(std::move(task), traits, location); + Thread::PostTaskImpl(std::move(task), traits, location); MutexLock lock(&lock_); next_run_time_ = Timestamp::MinusInfinity(); } @@ -90,10 +90,10 @@ void SimulatedThread::PostDelayedTaskImpl(absl::AnyInvocable task, TimeDelta delay, const PostDelayedTaskTraits& traits, const Location& location) { - rtc::Thread::PostDelayedTaskImpl(std::move(task), delay, traits, location); + Thread::PostDelayedTaskImpl(std::move(task), delay, traits, location); MutexLock lock(&lock_); next_run_time_ = - std::min(next_run_time_, Timestamp::Millis(rtc::TimeMillis()) + delay); + std::min(next_run_time_, Timestamp::Millis(TimeMillis()) + delay); } void SimulatedThread::Stop() { diff --git a/test/time_controller/simulated_thread.h b/test/time_controller/simulated_thread.h index 8c6c728a48..2c370a031d 100644 --- a/test/time_controller/simulated_thread.h +++ b/test/time_controller/simulated_thread.h @@ -17,13 +17,13 @@ namespace webrtc { -class SimulatedThread : public rtc::Thread, +class SimulatedThread : public Thread, public sim_time_impl::SimulatedSequenceRunner { public: using CurrentThreadSetter = CurrentThreadSetter; SimulatedThread(sim_time_impl::SimulatedTimeControllerImpl* handler, absl::string_view name, - std::unique_ptr socket_server); + std::unique_ptr socket_server); ~SimulatedThread() override; void RunReady(Timestamp at_time) override; @@ -36,7 +36,7 @@ class SimulatedThread : public rtc::Thread, TaskQueueBase* GetAsTaskQueue() override { return this; } // Thread interface - void BlockingCallImpl(rtc::FunctionView functor, + void BlockingCallImpl(FunctionView functor, const Location& location) override; void PostTaskImpl(absl::AnyInvocable task, const PostTaskTraits& traits, diff --git a/test/time_controller/simulated_time_controller.cc b/test/time_controller/simulated_time_controller.cc index dbb36fdfcc..69851570d4 100644 --- a/test/time_controller/simulated_time_controller.cc +++ b/test/time_controller/simulated_time_controller.cc @@ -10,14 +10,25 @@ #include "test/time_controller/simulated_time_controller.h" #include -#include #include #include #include -#include +#include #include #include "absl/strings/string_view.h" +#include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/checks.h" +#include "rtc_base/platform_thread_types.h" +#include "rtc_base/socket_server.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/synchronization/yield_policy.h" +#include "rtc_base/thread.h" +#include "rtc_base/time_utils.h" +#include "system_wrappers/include/clock.h" #include "test/time_controller/simulated_task_queue.h" #include "test/time_controller/simulated_thread.h" @@ -37,7 +48,7 @@ bool RemoveByValue(C* vec, typename C::value_type val) { namespace sim_time_impl { SimulatedTimeControllerImpl::SimulatedTimeControllerImpl(Timestamp start_time) - : thread_id_(rtc::CurrentThreadId()), current_time_(start_time) {} + : thread_id_(CurrentThreadId()), current_time_(start_time) {} SimulatedTimeControllerImpl::~SimulatedTimeControllerImpl() = default; @@ -53,9 +64,9 @@ SimulatedTimeControllerImpl::CreateTaskQueue( return task_queue; } -std::unique_ptr SimulatedTimeControllerImpl::CreateThread( +std::unique_ptr SimulatedTimeControllerImpl::CreateThread( const std::string& name, - std::unique_ptr socket_server) { + std::unique_ptr socket_server) { auto thread = std::make_unique(this, name, std::move(socket_server)); Register(thread.get()); @@ -63,7 +74,7 @@ std::unique_ptr SimulatedTimeControllerImpl::CreateThread( } void SimulatedTimeControllerImpl::YieldExecution() { - if (rtc::CurrentThreadId() == thread_id_) { + if (CurrentThreadId() == thread_id_) { TaskQueueBase* yielding_from = TaskQueueBase::Current(); // Since we might continue execution on a process thread, we should reset // the thread local task queue reference. This ensures that thread checkers @@ -86,7 +97,7 @@ void SimulatedTimeControllerImpl::RunReadyRunners() { // by Thread::Current(). SimulatedThread::CurrentThreadSetter set_current(dummy_thread_.get()); MutexLock lock(&lock_); - RTC_DCHECK_EQ(rtc::CurrentThreadId(), thread_id_); + RTC_DCHECK_EQ(CurrentThreadId(), thread_id_); Timestamp current_time = CurrentTime(); // Clearing `ready_runners_` in case this is a recursive call: // RunReadyRunners -> Run -> Event::Wait -> Yield ->RunReadyRunners @@ -182,21 +193,21 @@ TaskQueueFactory* GlobalSimulatedTimeController::GetTaskQueueFactory() { return &impl_; } -std::unique_ptr GlobalSimulatedTimeController::CreateThread( +std::unique_ptr GlobalSimulatedTimeController::CreateThread( const std::string& name, - std::unique_ptr socket_server) { + std::unique_ptr socket_server) { return impl_.CreateThread(name, std::move(socket_server)); } -rtc::Thread* GlobalSimulatedTimeController::GetMainThread() { +Thread* GlobalSimulatedTimeController::GetMainThread() { return main_thread_.get(); } void GlobalSimulatedTimeController::AdvanceTime(TimeDelta duration) { - rtc::ScopedYieldPolicy yield_policy(&impl_); + ScopedYieldPolicy yield_policy(&impl_); Timestamp current_time = impl_.CurrentTime(); Timestamp target_time = current_time + duration; - RTC_DCHECK_EQ(current_time.us(), rtc::TimeMicros()); + RTC_DCHECK_EQ(current_time.us(), TimeMicros()); while (current_time < target_time) { impl_.RunReadyRunners(); Timestamp next_time = std::min(impl_.NextRunTime(), target_time); @@ -212,15 +223,12 @@ void GlobalSimulatedTimeController::AdvanceTime(TimeDelta duration) { } void GlobalSimulatedTimeController::SkipForwardBy(TimeDelta duration) { - rtc::ScopedYieldPolicy yield_policy(&impl_); + ScopedYieldPolicy yield_policy(&impl_); Timestamp current_time = impl_.CurrentTime(); Timestamp target_time = current_time + duration; impl_.AdvanceTime(target_time); sim_clock_.AdvanceTimeMicroseconds(duration.us()); global_clock_.AdvanceTime(duration); - - // Run tasks that were pending during the skip. - impl_.RunReadyRunners(); } void GlobalSimulatedTimeController::Register( diff --git a/test/time_controller/simulated_time_controller.h b/test/time_controller/simulated_time_controller.h index f3f0da9274..a458b97204 100644 --- a/test/time_controller/simulated_time_controller.h +++ b/test/time_controller/simulated_time_controller.h @@ -43,7 +43,7 @@ class SimulatedSequenceRunner { }; class SimulatedTimeControllerImpl : public TaskQueueFactory, - public rtc::YieldInterface { + public YieldInterface { public: explicit SimulatedTimeControllerImpl(Timestamp start_time); ~SimulatedTimeControllerImpl() override; @@ -58,9 +58,9 @@ class SimulatedTimeControllerImpl : public TaskQueueFactory, void YieldExecution() RTC_LOCKS_EXCLUDED(time_lock_, lock_) override; // Create thread using provided `socket_server`. - std::unique_ptr CreateThread( + std::unique_ptr CreateThread( const std::string& name, - std::unique_ptr socket_server) + std::unique_ptr socket_server) RTC_LOCKS_EXCLUDED(time_lock_, lock_); // Runs all runners in `runners_` that has tasks or modules ready for @@ -83,8 +83,8 @@ class SimulatedTimeControllerImpl : public TaskQueueFactory, void StopYield(TaskQueueBase* yielding_from); private: - const rtc::PlatformThreadId thread_id_; - const std::unique_ptr dummy_thread_ = rtc::Thread::Create(); + const PlatformThreadId thread_id_; + const std::unique_ptr dummy_thread_ = Thread::Create(); mutable Mutex time_lock_; Timestamp current_time_ RTC_GUARDED_BY(time_lock_); mutable Mutex lock_; @@ -122,8 +122,8 @@ class TokenTaskQueue : public TaskQueueBase { // TimeController implementation using completely simulated time. Task queues // and process threads created by this controller will run delayed activities // when AdvanceTime() is called. Overrides the global clock backing -// rtc::TimeMillis() and rtc::TimeMicros(). Note that this is not thread safe -// since it modifies global state. +// webrtc::TimeMillis() and webrtc::TimeMicros(). Note that this is not thread +// safe since it modifies global state. class GlobalSimulatedTimeController : public TimeController { public: explicit GlobalSimulatedTimeController(Timestamp start_time); @@ -131,15 +131,14 @@ class GlobalSimulatedTimeController : public TimeController { Clock* GetClock() override; TaskQueueFactory* GetTaskQueueFactory() override; - std::unique_ptr CreateThread( + std::unique_ptr CreateThread( const std::string& name, - std::unique_ptr socket_server) override; - rtc::Thread* GetMainThread() override; + std::unique_ptr socket_server) override; + Thread* GetMainThread() override; void AdvanceTime(TimeDelta duration) override; // Advances time by `duration`and do not run delayed tasks in the meantime. - // Runs any pending tasks at the end. // Useful for simulating contention on destination queues. void SkipForwardBy(TimeDelta duration); @@ -155,12 +154,12 @@ class GlobalSimulatedTimeController : public TimeController { void Unregister(sim_time_impl::SimulatedSequenceRunner* runner); private: - rtc::ScopedBaseFakeClock global_clock_; + ScopedBaseFakeClock global_clock_; // Provides simulated CurrentNtpInMilliseconds() SimulatedClock sim_clock_; sim_time_impl::SimulatedTimeControllerImpl impl_; - rtc::ScopedYieldPolicy yield_policy_; - std::unique_ptr main_thread_; + ScopedYieldPolicy yield_policy_; + std::unique_ptr main_thread_; }; } // namespace webrtc diff --git a/test/time_controller/simulated_time_controller_unittest.cc b/test/time_controller/simulated_time_controller_unittest.cc index f223ffe85d..10d741beb4 100644 --- a/test/time_controller/simulated_time_controller_unittest.cc +++ b/test/time_controller/simulated_time_controller_unittest.cc @@ -13,9 +13,9 @@ #include #include +#include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" #include "rtc_base/event.h" -#include "rtc_base/task_queue.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/repeating_task.h" #include "test/gmock.h" @@ -39,11 +39,11 @@ TEST(SimulatedTimeControllerTest, TaskIsStoppedOnStop) { const int kShortIntervalCount = 4; const int kMargin = 1; GlobalSimulatedTimeController time_simulation(kStartTime); - rtc::TaskQueue task_queue( + std::unique_ptr task_queue = time_simulation.GetTaskQueueFactory()->CreateTaskQueue( - "TestQueue", TaskQueueFactory::Priority::NORMAL)); + "TestQueue", TaskQueueFactory::Priority::NORMAL); std::atomic_int counter(0); - auto handle = RepeatingTaskHandle::Start(task_queue.Get(), [&] { + auto handle = RepeatingTaskHandle::Start(task_queue.get(), [&] { if (++counter >= kShortIntervalCount) return kLongInterval; return kShortInterval; @@ -52,7 +52,7 @@ TEST(SimulatedTimeControllerTest, TaskIsStoppedOnStop) { time_simulation.AdvanceTime(kShortInterval * (kShortIntervalCount + kMargin)); EXPECT_EQ(counter.load(), kShortIntervalCount); - task_queue.PostTask( + task_queue->PostTask( [handle = std::move(handle)]() mutable { handle.Stop(); }); // Sleep long enough that the task would run at least once more if not @@ -64,13 +64,13 @@ TEST(SimulatedTimeControllerTest, TaskIsStoppedOnStop) { TEST(SimulatedTimeControllerTest, TaskCanStopItself) { std::atomic_int counter(0); GlobalSimulatedTimeController time_simulation(kStartTime); - rtc::TaskQueue task_queue( + std::unique_ptr task_queue = time_simulation.GetTaskQueueFactory()->CreateTaskQueue( - "TestQueue", TaskQueueFactory::Priority::NORMAL)); + "TestQueue", TaskQueueFactory::Priority::NORMAL); RepeatingTaskHandle handle; - task_queue.PostTask([&] { - handle = RepeatingTaskHandle::Start(task_queue.Get(), [&] { + task_queue->PostTask([&] { + handle = RepeatingTaskHandle::Start(task_queue.get(), [&] { ++counter; handle.Stop(); return TimeDelta::Millis(2); @@ -86,29 +86,29 @@ TEST(SimulatedTimeControllerTest, Example) { void DoPeriodicTask() {} TimeDelta TimeUntilNextRun() { return TimeDelta::Millis(100); } void StartPeriodicTask(RepeatingTaskHandle* handle, - rtc::TaskQueue* task_queue) { - *handle = RepeatingTaskHandle::Start(task_queue->Get(), [this] { + TaskQueueBase* task_queue) { + *handle = RepeatingTaskHandle::Start(task_queue, [this] { DoPeriodicTask(); return TimeUntilNextRun(); }); } }; GlobalSimulatedTimeController time_simulation(kStartTime); - rtc::TaskQueue task_queue( + std::unique_ptr task_queue = time_simulation.GetTaskQueueFactory()->CreateTaskQueue( - "TestQueue", TaskQueueFactory::Priority::NORMAL)); + "TestQueue", TaskQueueFactory::Priority::NORMAL); auto object = std::make_unique(); // Create and start the periodic task. RepeatingTaskHandle handle; - object->StartPeriodicTask(&handle, &task_queue); + object->StartPeriodicTask(&handle, task_queue.get()); // Restart the task - task_queue.PostTask( + task_queue->PostTask( [handle = std::move(handle)]() mutable { handle.Stop(); }); - object->StartPeriodicTask(&handle, &task_queue); - task_queue.PostTask( + object->StartPeriodicTask(&handle, task_queue.get()); + task_queue->PostTask( [handle = std::move(handle)]() mutable { handle.Stop(); }); - task_queue.PostTask([object = std::move(object)] {}); + task_queue->PostTask([object = std::move(object)] {}); } TEST(SimulatedTimeControllerTest, DelayTaskRunOnTime) { @@ -134,7 +134,7 @@ TEST(SimulatedTimeControllerTest, ThreadYeildsOnSynchronousCall) { // called. main_thread->PostTask([&] { task_has_run = true; }); SendTask(t2.get(), [] { - rtc::Event yield_event; + Event yield_event; // Wait() triggers YieldExecution() which will runs message processing on // all threads that are not in the yielded set. @@ -159,6 +159,8 @@ TEST(SimulatedTimeControllerTest, SkipsDelayedTaskForward) { })); main_thread->PostDelayedTask(fun.AsStdFunction(), shorter_duration); sim.SkipForwardBy(duration_during_which_nothing_runs); + // Run tasks that were pending during the skip. + sim.AdvanceTime(TimeDelta::Zero()); } } // namespace webrtc diff --git a/test/time_controller/time_controller_conformance_test.cc b/test/time_controller/time_controller_conformance_test.cc index 300dd9175c..5e0dcf85cc 100644 --- a/test/time_controller/time_controller_conformance_test.cc +++ b/test/time_controller/time_controller_conformance_test.cc @@ -81,7 +81,7 @@ class SimulatedRealTimeControllerConformanceTest TEST_P(SimulatedRealTimeControllerConformanceTest, ThreadPostOrderTest) { std::unique_ptr time_controller = CreateTimeController(GetParam()); - std::unique_ptr thread = time_controller->CreateThread("thread"); + std::unique_ptr thread = time_controller->CreateThread("thread"); // Tasks on thread have to be executed in order in which they were // posted. @@ -98,7 +98,7 @@ TEST_P(SimulatedRealTimeControllerConformanceTest, ThreadPostOrderTest) { TEST_P(SimulatedRealTimeControllerConformanceTest, ThreadPostDelayedOrderTest) { std::unique_ptr time_controller = CreateTimeController(GetParam()); - std::unique_ptr thread = time_controller->CreateThread("thread"); + std::unique_ptr thread = time_controller->CreateThread("thread"); ExecutionOrderKeeper execution_order; thread->PostDelayedTask([&]() { execution_order.Executed(2); }, @@ -114,7 +114,7 @@ TEST_P(SimulatedRealTimeControllerConformanceTest, ThreadPostDelayedOrderTest) { TEST_P(SimulatedRealTimeControllerConformanceTest, ThreadPostInvokeOrderTest) { std::unique_ptr time_controller = CreateTimeController(GetParam()); - std::unique_ptr thread = time_controller->CreateThread("thread"); + std::unique_ptr thread = time_controller->CreateThread("thread"); // Tasks on thread have to be executed in order in which they were // posted/invoked. @@ -132,7 +132,7 @@ TEST_P(SimulatedRealTimeControllerConformanceTest, ThreadPostInvokeFromThreadOrderTest) { std::unique_ptr time_controller = CreateTimeController(GetParam()); - std::unique_ptr thread = time_controller->CreateThread("thread"); + std::unique_ptr thread = time_controller->CreateThread("thread"); // If task is invoked from thread X on thread X it has to be executed // immediately. @@ -158,7 +158,7 @@ TEST_P(SimulatedRealTimeControllerConformanceTest, // Tasks on thread have to be executed in order in which they were // posted/invoked. ExecutionOrderKeeper execution_order; - rtc::Event event; + Event event; task_queue->PostTask([&]() { execution_order.Executed(1); }); task_queue->PostTask([&]() { execution_order.Executed(2); diff --git a/test/vcm_capturer.h b/test/vcm_capturer.h index 1deea21229..433cb48159 100644 --- a/test/vcm_capturer.h +++ b/test/vcm_capturer.h @@ -22,7 +22,7 @@ namespace webrtc { namespace test { class VcmCapturer : public TestVideoCapturer, - public rtc::VideoSinkInterface { + public VideoSinkInterface { public: static VcmCapturer* Create(size_t width, size_t height, @@ -54,7 +54,7 @@ class VcmCapturer : public TestVideoCapturer, size_t width_; size_t height_; - rtc::scoped_refptr vcm_; + scoped_refptr vcm_; VideoCaptureCapability capability_; }; diff --git a/test/video_codec_tester.cc b/test/video_codec_tester.cc new file mode 100644 index 0000000000..7c40fc2e5e --- /dev/null +++ b/test/video_codec_tester.cc @@ -0,0 +1,1740 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/video_codec_tester.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/functional/any_invocable.h" +#include "absl/strings/match.h" +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/field_trials_view.h" +#include "api/make_ref_counted.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/scoped_refptr.h" +#include "api/test/create_frame_generator.h" +#include "api/test/frame_generator_interface.h" +#include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_logger.h" +#include "api/test/video/video_frame_writer.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/builtin_video_bitrate_allocator_factory.h" +#include "api/video/encoded_image.h" +#include "api/video/resolution.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/h264_profile_level_id.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/simulcast_stream.h" +#include "api/video_codecs/spatial_layer.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "media/base/media_constants.h" +#include "modules/video_coding/codecs/av1/av1_svc_config.h" +#include "modules/video_coding/codecs/h264/include/h264.h" +#include "modules/video_coding/codecs/vp9/svc_config.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/svc/scalability_mode_util.h" +#include "modules/video_coding/utility/ivf_file_writer.h" +#include "rtc_base/checks.h" +#include "rtc_base/event.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/file_wrapper.h" +#include "rtc_base/task_queue_for_test.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/time_utils.h" +#include "system_wrappers/include/sleep.h" +#include "test/testsupport/file_utils.h" +#include "test/testsupport/frame_reader.h" +#include "test/testsupport/video_frame_writer.h" +#include "third_party/libyuv/include/libyuv/compare.h" +#include "video/config/encoder_stream_factory.h" +#include "video/config/video_encoder_config.h" + +namespace webrtc { +namespace test { + +namespace { +using CodedVideoSource = VideoCodecTester::CodedVideoSource; +using VideoSourceSettings = VideoCodecTester::VideoSourceSettings; +using EncodingSettings = VideoCodecTester::EncodingSettings; +using LayerSettings = EncodingSettings::LayerSettings; +using LayerId = VideoCodecTester::LayerId; +using EncoderSettings = VideoCodecTester::EncoderSettings; +using DecoderSettings = VideoCodecTester::DecoderSettings; +using PacingSettings = VideoCodecTester::PacingSettings; +using PacingMode = PacingSettings::PacingMode; +using VideoCodecStats = VideoCodecTester::VideoCodecStats; +using DecodeCallback = + absl::AnyInvocable; +using webrtc::test::ImprovementDirection; + +constexpr Frequency k90kHz = Frequency::Hertz(90000); + +const std::set kFullSvcScalabilityModes{ + ScalabilityMode::kL2T1, ScalabilityMode::kL2T1h, ScalabilityMode::kL2T2, + ScalabilityMode::kL2T2h, ScalabilityMode::kL2T3, ScalabilityMode::kL2T3h, + ScalabilityMode::kL3T1, ScalabilityMode::kL3T1h, ScalabilityMode::kL3T2, + ScalabilityMode::kL3T2h, ScalabilityMode::kL3T3, ScalabilityMode::kL3T3h}; + +const std::set kKeySvcScalabilityModes{ + ScalabilityMode::kL2T1_KEY, ScalabilityMode::kL2T2_KEY, + ScalabilityMode::kL2T2_KEY_SHIFT, ScalabilityMode::kL2T3_KEY, + ScalabilityMode::kL3T1_KEY, ScalabilityMode::kL3T2_KEY, + ScalabilityMode::kL3T3_KEY}; + +scoped_refptr ScaleFrame( + scoped_refptr buffer, + int scaled_width, + int scaled_height) { + if (buffer->width() == scaled_width && buffer->height() == scaled_height) { + return buffer; + } + return buffer->Scale(scaled_width, scaled_height); +} + +// A video source that reads frames from YUV, Y4M or IVF (compressed with VPx, +// AV1 or H264) files. +class VideoSource { + public: + explicit VideoSource(VideoSourceSettings source_settings) + : source_settings_(source_settings) { + if (absl::EndsWith(source_settings.file_path, "ivf")) { + ivf_reader_ = CreateFromIvfFileFrameGenerator(CreateEnvironment(), + source_settings.file_path); + } else if (absl::EndsWith(source_settings.file_path, "y4m")) { + yuv_reader_ = + CreateY4mFrameReader(source_settings_.file_path, + YuvFrameReaderImpl::RepeatMode::kPingPong); + } else { + yuv_reader_ = CreateYuvFrameReader( + source_settings_.file_path, source_settings_.resolution, + YuvFrameReaderImpl::RepeatMode::kPingPong); + } + RTC_CHECK(ivf_reader_ || yuv_reader_); + } + + VideoFrame PullFrame(uint32_t timestamp_rtp, + Resolution output_resolution, + Frequency output_framerate) { + // If the source and output frame rates differ, resampling is performed by + // skipping or repeating source frames. + time_delta_ = time_delta_.value_or(1 / source_settings_.framerate); + int seek = 0; + while (time_delta_->us() <= 0) { + *time_delta_ += 1 / source_settings_.framerate; + ++seek; + } + *time_delta_ -= 1 / output_framerate; + + if (seek > 0 || last_frame_ == nullptr) { + scoped_refptr buffer; + do { + if (yuv_reader_) { + buffer = yuv_reader_->PullFrame(); + } else { + buffer = ivf_reader_->NextFrame().buffer; + } + } while (--seek > 0); + RTC_CHECK(buffer) << "Could not read frame. timestamp_rtp " + << timestamp_rtp; + last_frame_ = buffer; + } + + scoped_refptr buffer = ScaleFrame( + last_frame_, output_resolution.width, output_resolution.height); + return VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_rtp_timestamp(timestamp_rtp) + .set_timestamp_us((timestamp_rtp / k90kHz).us()) + .build(); + } + + private: + VideoSourceSettings source_settings_; + std::unique_ptr yuv_reader_; + std::unique_ptr ivf_reader_; + scoped_refptr last_frame_; + // Time delta between the source and output video. Used for frame rate + // scaling. This value increases by the source frame duration each time a + // frame is read from the source, and decreases by the output frame duration + // each time an output frame is delivered. + std::optional time_delta_; +}; + +// Pacer calculates delay necessary to keep frame encode or decode call spaced +// from the previous calls by the pacing time. `Schedule` is expected to be +// called as close as possible to posting frame encode or decode task. This +// class is not thread safe. +class Pacer { + public: + explicit Pacer(PacingSettings settings) + : settings_(settings), delay_(TimeDelta::Zero()) {} + + Timestamp Schedule(Timestamp timestamp) { + Timestamp now = Timestamp::Micros(TimeMicros()); + if (settings_.mode == PacingMode::kNoPacing) { + return now; + } + + Timestamp scheduled = now; + if (prev_scheduled_) { + scheduled = *prev_scheduled_ + PacingTime(timestamp); + if (scheduled < now) { + scheduled = now; + } + } + + prev_timestamp_ = timestamp; + prev_scheduled_ = scheduled; + return scheduled; + } + + private: + TimeDelta PacingTime(Timestamp timestamp) { + if (settings_.mode == PacingMode::kRealTime) { + return timestamp - *prev_timestamp_; + } + RTC_CHECK_EQ(PacingMode::kConstantRate, settings_.mode); + return 1 / settings_.constant_rate; + } + + PacingSettings settings_; + std::optional prev_timestamp_; + std::optional prev_scheduled_; + TimeDelta delay_; +}; + +// A task queue that limits its maximum size and guarantees FIFO execution of +// the scheduled tasks. +class LimitedTaskQueue { + public: + // Frame reading, encoding and decoding are handled in separate threads. If + // encoding or decoding is slow, the frame reader may run far ahead, loading + // many large frames into memory. To prevent this, we limit the maximum size + // of the task queue. When this limit is reached, posting new tasks is blocked + // until the queue size is reduced by executing previous tasks. + static constexpr int kMaxTaskQueueSize = 3; + + LimitedTaskQueue() : queue_size_(0) {} + + void PostScheduledTask(absl::AnyInvocable task, + Timestamp scheduled) { + { + // Block posting new tasks until the queue size is reduced. + MutexLock lock(&mutex_); + while (queue_size_ >= kMaxTaskQueueSize) { + task_executed_.Wait(TimeDelta::Seconds(10)); + task_executed_.Reset(); + } + } + + ++queue_size_; + task_queue_.PostTask([this, task = std::move(task), scheduled]() mutable { + Timestamp now = Timestamp::Millis(TimeMillis()); + int64_t wait_ms = (scheduled - now).ms(); + if (wait_ms > 0) { + RTC_CHECK_LT(wait_ms, 10000) << "Too high wait_ms " << wait_ms; + SleepMs(wait_ms); + } + std::move(task)(); + --queue_size_; + task_executed_.Set(); + }); + } + + void PostTask(absl::AnyInvocable task) { + Timestamp now = Timestamp::Millis(TimeMillis()); + PostScheduledTask(std::move(task), now); + } + + void PostTaskAndWait(absl::AnyInvocable task) { + PostTask(std::move(task)); + WaitForPreviouslyPostedTasks(); + } + + void WaitForPreviouslyPostedTasks() { + task_queue_.WaitForPreviouslyPostedTasks(); + } + + private: + TaskQueueForTest task_queue_; + std::atomic_int queue_size_; + Event task_executed_; + Mutex mutex_; +}; + +class TesterY4mWriter { + public: + explicit TesterY4mWriter(absl::string_view base_path) + : base_path_(base_path) {} + + ~TesterY4mWriter() { + task_queue_.SendTask([] {}); + } + + void Write(const VideoFrame& frame, int spatial_idx) { + task_queue_.PostTask([this, frame, spatial_idx] { + if (y4m_writers_.find(spatial_idx) == y4m_writers_.end()) { + std::string file_path = + base_path_ + "-s" + std::to_string(spatial_idx) + ".y4m"; + Y4mVideoFrameWriterImpl* y4m_writer = new Y4mVideoFrameWriterImpl( + file_path, frame.width(), frame.height(), /*fps=*/30); + RTC_CHECK(y4m_writer); + + y4m_writers_[spatial_idx] = + std::unique_ptr(y4m_writer); + } + + y4m_writers_.at(spatial_idx)->WriteFrame(frame); + }); + } + + private: + std::string base_path_; + std::map> y4m_writers_; + TaskQueueForTest task_queue_; +}; + +class TesterIvfWriter { + public: + explicit TesterIvfWriter(absl::string_view base_path) + : base_path_(base_path) {} + + ~TesterIvfWriter() { + task_queue_.SendTask([] {}); + } + + void Write(const EncodedImage& encoded_frame, VideoCodecType codec_type) { + task_queue_.PostTask([this, encoded_frame, codec_type] { + int spatial_idx = encoded_frame.SpatialIndex().value_or( + encoded_frame.SimulcastIndex().value_or(0)); + if (ivf_file_writers_.find(spatial_idx) == ivf_file_writers_.end()) { + std::string ivf_path = + base_path_ + "-s" + std::to_string(spatial_idx) + ".ivf"; + FileWrapper ivf_file = FileWrapper::OpenWriteOnly(ivf_path); + RTC_CHECK(ivf_file.is_open()); + + std::unique_ptr ivf_writer = + IvfFileWriter::Wrap(std::move(ivf_file), /*byte_limit=*/0); + RTC_CHECK(ivf_writer); + + ivf_file_writers_[spatial_idx] = std::move(ivf_writer); + } + + // IVF writer splits superframe into spatial layer frames. We want to dump + // whole superframe so that decoders can correctly decode the dump. Reset + // spatial index to get desired behavior. + EncodedImage frame_copy = encoded_frame; + frame_copy.SetSpatialIndex(std::nullopt); + frame_copy.SetSpatialLayerFrameSize(0, 0); + + // To play: ffplay -vcodec vp8|vp9|av1|hevc|h264 filename + ivf_file_writers_.at(spatial_idx)->WriteFrame(frame_copy, codec_type); + }); + } + + private: + std::string base_path_; + std::map> ivf_file_writers_; + TaskQueueForTest task_queue_; +}; + +class LeakyBucket { + public: + LeakyBucket() : level_bits_(0) {} + + // Updates bucket level and returns its current level in bits. Data is removed + // from bucket with rate equal to target bitrate of previous frame. Bucket + // level is tracked with floating point precision. Returned value of bucket + // level is rounded up. + int Update(const VideoCodecStats::Frame& frame) { + RTC_CHECK(frame.target_bitrate) << "Bitrate must be specified."; + if (prev_frame_) { + RTC_CHECK_GT(frame.timestamp_rtp, prev_frame_->timestamp_rtp) + << "Timestamp must increase."; + TimeDelta passed = + (frame.timestamp_rtp - prev_frame_->timestamp_rtp) / k90kHz; + level_bits_ -= + prev_frame_->target_bitrate->bps() * passed.seconds(); + level_bits_ = std::max(level_bits_, 0.0); + } + prev_frame_ = frame; + level_bits_ += frame.frame_size.bytes() * 8; + return static_cast(std::ceil(level_bits_)); + } + + private: + std::optional prev_frame_; + double level_bits_; +}; + +class VideoCodecAnalyzer : public VideoCodecTester::VideoCodecStats { + public: + void StartEncode(const VideoFrame& video_frame, + const EncodingSettings& encoding_settings) { + int64_t encode_start_us = TimeMicros(); + task_queue_.PostTask([this, timestamp_rtp = video_frame.rtp_timestamp(), + encoding_settings, encode_start_us]() { + RTC_CHECK(frames_.find(timestamp_rtp) == frames_.end()) + << "Duplicate frame. Frame with timestamp " << timestamp_rtp + << " was seen before"; + + Frame frame; + frame.timestamp_rtp = timestamp_rtp; + frame.encode_start = Timestamp::Micros(encode_start_us), + frames_.emplace(timestamp_rtp, + std::map{{/*spatial_idx=*/0, frame}}); + encoding_settings_.emplace(timestamp_rtp, encoding_settings); + }); + } + + void FinishEncode(const EncodedImage& encoded_frame) { + int64_t encode_finished_us = TimeMicros(); + task_queue_.PostTask( + [this, timestamp_rtp = encoded_frame.RtpTimestamp(), + spatial_idx = encoded_frame.SpatialIndex().value_or( + encoded_frame.SimulcastIndex().value_or(0)), + temporal_idx = encoded_frame.TemporalIndex().value_or(0), + width = encoded_frame._encodedWidth, + height = encoded_frame._encodedHeight, + frame_type = encoded_frame._frameType, + frame_size_bytes = encoded_frame.size(), qp = encoded_frame.qp_, + encode_finished_us]() { + if (spatial_idx > 0) { + RTC_CHECK(frames_.find(timestamp_rtp) != frames_.end()) + << "Spatial layer 0 frame with timestamp " << timestamp_rtp + << " was not seen before"; + const Frame& base_frame = + frames_.at(timestamp_rtp).at(/*spatial_idx=*/0); + frames_.at(timestamp_rtp).emplace(spatial_idx, base_frame); + } + + Frame& frame = frames_.at(timestamp_rtp).at(spatial_idx); + frame.layer_id = {.spatial_idx = spatial_idx, + .temporal_idx = temporal_idx}; + frame.width = width; + frame.height = height; + frame.frame_size = DataSize::Bytes(frame_size_bytes); + frame.qp = qp; + frame.keyframe = frame_type == VideoFrameType::kVideoFrameKey; + frame.encode_time = + Timestamp::Micros(encode_finished_us) - frame.encode_start; + frame.encoded = true; + }); + } + + void StartDecode(const EncodedImage& encoded_frame) { + int64_t decode_start_us = TimeMicros(); + task_queue_.PostTask( + [this, timestamp_rtp = encoded_frame.RtpTimestamp(), + spatial_idx = encoded_frame.SpatialIndex().value_or( + encoded_frame.SimulcastIndex().value_or(0)), + temporal_idx = encoded_frame.TemporalIndex().value_or(0), + width = encoded_frame._encodedWidth, + height = encoded_frame._encodedHeight, + frame_type = encoded_frame._frameType, qp = encoded_frame.qp_, + frame_size_bytes = encoded_frame.size(), decode_start_us]() { + bool decode_only = frames_.find(timestamp_rtp) == frames_.end(); + if (decode_only || frames_.at(timestamp_rtp).find(spatial_idx) == + frames_.at(timestamp_rtp).end()) { + Frame frame; + frame.timestamp_rtp = timestamp_rtp; + frame.layer_id = {.spatial_idx = spatial_idx, + .temporal_idx = temporal_idx}; + frame.width = width; + frame.height = height; + frame.keyframe = frame_type == VideoFrameType::kVideoFrameKey; + frame.qp = qp; + if (decode_only) { + frame.frame_size = DataSize::Bytes(frame_size_bytes); + frames_[timestamp_rtp] = {{spatial_idx, frame}}; + } else { + frames_[timestamp_rtp][spatial_idx] = frame; + } + } + + Frame& frame = frames_.at(timestamp_rtp).at(spatial_idx); + frame.decode_start = Timestamp::Micros(decode_start_us); + }); + } + + void FinishDecode(const VideoFrame& decoded_frame, + int spatial_idx, + std::optional ref_frame = std::nullopt) { + int64_t decode_finished_us = TimeMicros(); + task_queue_.PostTask([this, timestamp_rtp = decoded_frame.rtp_timestamp(), + spatial_idx, width = decoded_frame.width(), + height = decoded_frame.height(), + decode_finished_us]() { + Frame& frame = frames_.at(timestamp_rtp).at(spatial_idx); + frame.decode_time = + Timestamp::Micros(decode_finished_us) - frame.decode_start; + if (!frame.encoded) { + frame.width = width; + frame.height = height; + } + frame.decoded = true; + }); + + if (ref_frame.has_value()) { + // Copy hardware-backed frame into main memory to release output buffers + // which number may be limited in hardware decoders. + scoped_refptr decoded_buffer = + decoded_frame.video_frame_buffer()->ToI420(); + + task_queue_.PostTask([this, decoded_buffer, ref_frame, + timestamp_rtp = decoded_frame.rtp_timestamp(), + spatial_idx]() { + scoped_refptr ref_buffer = + ScaleFrame(ref_frame->video_frame_buffer(), decoded_buffer->width(), + decoded_buffer->height()) + ->ToI420(); + Frame& frame = frames_.at(timestamp_rtp).at(spatial_idx); + frame.psnr = CalcPsnr(*decoded_buffer, *ref_buffer); + }); + } + } + + std::vector Slice(Filter filter, bool merge) const { + std::vector slice; + for (const auto& [timestamp_rtp, temporal_unit_frames] : frames_) { + if (temporal_unit_frames.empty()) { + continue; + } + + bool is_svc = false; + if (!encoding_settings_.empty()) { + ScalabilityMode scalability_mode = + encoding_settings_.at(timestamp_rtp).scalability_mode; + if (kFullSvcScalabilityModes.count(scalability_mode) > 0 || + (kKeySvcScalabilityModes.count(scalability_mode) > 0 && + temporal_unit_frames.at(0).keyframe)) { + is_svc = true; + } + } + + std::vector subframes; + for (const auto& [spatial_idx, frame] : temporal_unit_frames) { + if (frame.timestamp_rtp < filter.min_timestamp_rtp || + frame.timestamp_rtp > filter.max_timestamp_rtp) { + continue; + } + if (filter.layer_id) { + if (is_svc && + frame.layer_id.spatial_idx > filter.layer_id->spatial_idx) { + continue; + } + if (!is_svc && + frame.layer_id.spatial_idx != filter.layer_id->spatial_idx) { + continue; + } + if (frame.layer_id.temporal_idx > filter.layer_id->temporal_idx) { + continue; + } + } + subframes.push_back(frame); + } + + if (subframes.empty()) { + continue; + } + + if (!merge) { + std::copy(subframes.begin(), subframes.end(), + std::back_inserter(slice)); + continue; + } + + Frame superframe = subframes.back(); + for (const Frame& frame : webrtc::ArrayView(subframes).subview( + 0, subframes.size() - 1)) { + superframe.decoded |= frame.decoded; + superframe.encoded |= frame.encoded; + superframe.frame_size += frame.frame_size; + superframe.keyframe |= frame.keyframe; + superframe.encode_time = + std::max(superframe.encode_time, frame.encode_time); + superframe.decode_time = + std::max(superframe.decode_time, frame.decode_time); + } + + if (!encoding_settings_.empty()) { + RTC_CHECK(encoding_settings_.find(superframe.timestamp_rtp) != + encoding_settings_.end()) + << "No encoding settings for frame " << superframe.timestamp_rtp; + const EncodingSettings& es = + encoding_settings_.at(superframe.timestamp_rtp); + superframe.target_bitrate = GetTargetBitrate(es, filter.layer_id); + superframe.target_framerate = GetTargetFramerate(es, filter.layer_id); + } + + slice.push_back(superframe); + } + return slice; + } + + Stream Aggregate(Filter filter) const { + std::vector frames = Slice(filter, /*merge=*/true); + Stream stream; + LeakyBucket leaky_bucket; + for (const Frame& frame : frames) { + Timestamp time = Timestamp::Micros((frame.timestamp_rtp / k90kHz).us()); + if (!frame.frame_size.IsZero()) { + stream.width.AddSample(StatsSample(frame.width, time)); + stream.height.AddSample(StatsSample(frame.height, time)); + stream.frame_size_bytes.AddSample( + StatsSample(frame.frame_size.bytes(), time)); + stream.keyframe.AddSample(StatsSample(frame.keyframe, time)); + if (frame.qp) { + stream.qp.AddSample(StatsSample(*frame.qp, time)); + } + } + if (frame.encoded) { + stream.encode_time_ms.AddSample( + StatsSample(frame.encode_time.ms(), time)); + } + if (frame.decoded) { + stream.decode_time_ms.AddSample( + StatsSample(frame.decode_time.ms(), time)); + } + if (frame.psnr) { + stream.psnr.y.AddSample(StatsSample(frame.psnr->y, time)); + stream.psnr.u.AddSample(StatsSample(frame.psnr->u, time)); + stream.psnr.v.AddSample(StatsSample(frame.psnr->v, time)); + } + if (frame.target_framerate) { + stream.target_framerate_fps.AddSample( + StatsSample(frame.target_framerate->hertz(), time)); + } + if (frame.target_bitrate) { + stream.target_bitrate_kbps.AddSample( + StatsSample(frame.target_bitrate->kbps(), time)); + int buffer_level_bits = leaky_bucket.Update(frame); + stream.transmission_time_ms.AddSample(StatsSample( + 1000 * buffer_level_bits / frame.target_bitrate->bps(), + time)); + } + } + + int num_encoded_frames = stream.frame_size_bytes.NumSamples(); + if (num_encoded_frames == 0) { + return stream; + } + + const Frame& first_frame = frames.front(); + + Filter filter_all_layers{.min_timestamp_rtp = filter.min_timestamp_rtp, + .max_timestamp_rtp = filter.max_timestamp_rtp}; + std::vector frames_all_layers = + Slice(filter_all_layers, /*merge=*/true); + const Frame& last_frame = frames_all_layers.back(); + TimeDelta duration = + (last_frame.timestamp_rtp - first_frame.timestamp_rtp) / k90kHz; + if (last_frame.target_framerate) { + duration += 1 / *last_frame.target_framerate; + } + + DataRate encoded_bitrate = + DataSize::Bytes(stream.frame_size_bytes.GetSum()) / duration; + Frequency encoded_framerate = num_encoded_frames / duration; + + double bitrate_mismatch_pct = 0.0; + if (const auto& target_bitrate = first_frame.target_bitrate; + target_bitrate) { + bitrate_mismatch_pct = 100 * (encoded_bitrate / *target_bitrate - 1); + } + double framerate_mismatch_pct = 0.0; + if (const auto& target_framerate = first_frame.target_framerate; + target_framerate) { + framerate_mismatch_pct = + 100 * (encoded_framerate / *target_framerate - 1); + } + + for (Frame& frame : frames) { + Timestamp time = Timestamp::Micros((frame.timestamp_rtp / k90kHz).us()); + stream.encoded_bitrate_kbps.AddSample( + StatsSample(encoded_bitrate.kbps(), time)); + stream.encoded_framerate_fps.AddSample( + StatsSample(encoded_framerate.hertz(), time)); + stream.bitrate_mismatch_pct.AddSample( + StatsSample(bitrate_mismatch_pct, time)); + stream.framerate_mismatch_pct.AddSample( + StatsSample(framerate_mismatch_pct, time)); + } + + return stream; + } + + void LogMetrics(absl::string_view csv_path, + std::vector frames, + std::map metadata) const { + RTC_LOG(LS_INFO) << "Write metrics to " << csv_path; + FILE* csv_file = fopen(csv_path.data(), "w"); + const std::string delimiter = ";"; + StringBuilder header; + header + << "timestamp_rtp;spatial_idx;temporal_idx;width;height;frame_size_" + "bytes;keyframe;qp;encode_time_us;decode_time_us;psnr_y_db;psnr_u_" + "db;psnr_v_db;target_bitrate_kbps;target_framerate_fps"; + for (const auto& data : metadata) { + header << ";" << data.first; + } + fwrite(header.str().c_str(), 1, header.size(), csv_file); + + for (const Frame& f : frames) { + StringBuilder row; + row << "\n" << f.timestamp_rtp; + row << ";" << f.layer_id.spatial_idx; + row << ";" << f.layer_id.temporal_idx; + row << ";" << f.width; + row << ";" << f.height; + row << ";" << f.frame_size.bytes(); + row << ";" << f.keyframe; + row << ";"; + if (f.qp) { + row << *f.qp; + } + row << ";" << f.encode_time.us(); + row << ";" << f.decode_time.us(); + if (f.psnr) { + row << ";" << f.psnr->y; + row << ";" << f.psnr->u; + row << ";" << f.psnr->v; + } else { + row << ";;;"; + } + + const auto& es = encoding_settings_.at(f.timestamp_rtp); + row << ";" + << f.target_bitrate.value_or(GetTargetBitrate(es, f.layer_id)).kbps(); + row << ";" + << f.target_framerate.value_or(GetTargetFramerate(es, f.layer_id)) + .hertz(); + + for (const auto& data : metadata) { + row << ";" << data.second; + } + fwrite(row.str().c_str(), 1, row.size(), csv_file); + } + + fclose(csv_file); + } + + void Flush() { task_queue_.WaitForPreviouslyPostedTasks(); } + + private: + struct FrameId { + uint32_t timestamp_rtp; + int spatial_idx; + + bool operator==(const FrameId& o) const { + return timestamp_rtp == o.timestamp_rtp && spatial_idx == o.spatial_idx; + } + bool operator<(const FrameId& o) const { + return timestamp_rtp < o.timestamp_rtp || + (timestamp_rtp == o.timestamp_rtp && spatial_idx < o.spatial_idx); + } + }; + + Frame::Psnr CalcPsnr(const I420BufferInterface& ref_buffer, + const I420BufferInterface& dec_buffer) { + RTC_CHECK_EQ(ref_buffer.width(), dec_buffer.width()); + RTC_CHECK_EQ(ref_buffer.height(), dec_buffer.height()); + + uint64_t sse_y = libyuv::ComputeSumSquareErrorPlane( + dec_buffer.DataY(), dec_buffer.StrideY(), ref_buffer.DataY(), + ref_buffer.StrideY(), dec_buffer.width(), dec_buffer.height()); + + uint64_t sse_u = libyuv::ComputeSumSquareErrorPlane( + dec_buffer.DataU(), dec_buffer.StrideU(), ref_buffer.DataU(), + ref_buffer.StrideU(), dec_buffer.width() / 2, dec_buffer.height() / 2); + + uint64_t sse_v = libyuv::ComputeSumSquareErrorPlane( + dec_buffer.DataV(), dec_buffer.StrideV(), ref_buffer.DataV(), + ref_buffer.StrideV(), dec_buffer.width() / 2, dec_buffer.height() / 2); + + int num_y_samples = dec_buffer.width() * dec_buffer.height(); + Frame::Psnr psnr; + psnr.y = libyuv::SumSquareErrorToPsnr(sse_y, num_y_samples); + psnr.u = libyuv::SumSquareErrorToPsnr(sse_u, num_y_samples / 4); + psnr.v = libyuv::SumSquareErrorToPsnr(sse_v, num_y_samples / 4); + return psnr; + } + + DataRate GetTargetBitrate(const EncodingSettings& encoding_settings, + std::optional layer_id) const { + int base_spatial_idx; + if (layer_id.has_value()) { + bool is_svc = + kFullSvcScalabilityModes.count(encoding_settings.scalability_mode); + base_spatial_idx = is_svc ? 0 : layer_id->spatial_idx; + } else { + int num_spatial_layers = + ScalabilityModeToNumSpatialLayers(encoding_settings.scalability_mode); + int num_temporal_layers = ScalabilityModeToNumTemporalLayers( + encoding_settings.scalability_mode); + layer_id = LayerId({.spatial_idx = num_spatial_layers - 1, + .temporal_idx = num_temporal_layers - 1}); + base_spatial_idx = 0; + } + + DataRate bitrate = DataRate::Zero(); + for (int sidx = base_spatial_idx; sidx <= layer_id->spatial_idx; ++sidx) { + for (int tidx = 0; tidx <= layer_id->temporal_idx; ++tidx) { + auto layer_settings = encoding_settings.layers_settings.find( + {.spatial_idx = sidx, .temporal_idx = tidx}); + RTC_CHECK(layer_settings != encoding_settings.layers_settings.end()) + << "bitrate is not specified for layer sidx=" << sidx + << " tidx=" << tidx; + bitrate += layer_settings->second.bitrate; + } + } + return bitrate; + } + + Frequency GetTargetFramerate(const EncodingSettings& encoding_settings, + std::optional layer_id) const { + if (layer_id.has_value()) { + auto layer_settings = encoding_settings.layers_settings.find( + {.spatial_idx = layer_id->spatial_idx, + .temporal_idx = layer_id->temporal_idx}); + RTC_CHECK(layer_settings != encoding_settings.layers_settings.end()) + << "framerate is not specified for layer sidx=" + << layer_id->spatial_idx << " tidx=" << layer_id->temporal_idx; + return layer_settings->second.framerate; + } + return encoding_settings.layers_settings.rbegin()->second.framerate; + } + + SamplesStatsCounter::StatsSample StatsSample(double value, + Timestamp time) const { + return SamplesStatsCounter::StatsSample{value, time}; + } + + LimitedTaskQueue task_queue_; + // RTP timestamp -> spatial layer -> Frame + std::map> frames_; + std::map encoding_settings_; +}; + +class Decoder : public DecodedImageCallback { + public: + Decoder(const Environment& env, + VideoDecoderFactory* decoder_factory, + const DecoderSettings& decoder_settings, + VideoCodecAnalyzer* analyzer) + : env_(env), + decoder_factory_(decoder_factory), + analyzer_(analyzer), + pacer_(decoder_settings.pacing_settings) { + RTC_CHECK(analyzer_) << "Analyzer must be provided"; + + if (decoder_settings.decoder_input_base_path) { + ivf_writer_ = std::make_unique( + *decoder_settings.decoder_input_base_path); + } + + if (decoder_settings.decoder_output_base_path) { + y4m_writer_ = std::make_unique( + *decoder_settings.decoder_output_base_path); + } + } + + void Initialize(const SdpVideoFormat& sdp_video_format) { + decoder_ = decoder_factory_->Create(env_, sdp_video_format); + RTC_CHECK(decoder_) << "Could not create decoder for video format " + << sdp_video_format.ToString(); + + codec_type_ = PayloadStringToCodecType(sdp_video_format.name); + + task_queue_.PostTaskAndWait([this] { + decoder_->RegisterDecodeCompleteCallback(this); + + VideoDecoder::Settings ds; + ds.set_codec_type(*codec_type_); + ds.set_number_of_cores(1); + ds.set_max_render_resolution({1280, 720}); + bool result = decoder_->Configure(ds); + RTC_CHECK(result) << "Failed to configure decoder"; + }); + } + + void Decode(const EncodedImage& encoded_frame, + std::optional ref_frame = std::nullopt) { + int spatial_idx = encoded_frame.SpatialIndex().value_or( + encoded_frame.SimulcastIndex().value_or(0)); + { + MutexLock lock(&mutex_); + RTC_CHECK_EQ(spatial_idx_.value_or(spatial_idx), spatial_idx) + << "Spatial index changed from " << *spatial_idx_ << " to " + << spatial_idx; + spatial_idx_ = spatial_idx; + + if (ref_frame.has_value()) { + ref_frames_.insert({encoded_frame.RtpTimestamp(), *ref_frame}); + } + } + + Timestamp pts = + Timestamp::Micros((encoded_frame.RtpTimestamp() / k90kHz).us()); + + task_queue_.PostScheduledTask( + [this, encoded_frame] { + analyzer_->StartDecode(encoded_frame); + int error = decoder_->Decode(encoded_frame, /*render_time_ms*/ 0); + if (error != 0) { + RTC_LOG(LS_WARNING) + << "Decode failed with error code " << error + << " RTP timestamp " << encoded_frame.RtpTimestamp(); + } + }, + pacer_.Schedule(pts)); + + if (ivf_writer_) { + ivf_writer_->Write(encoded_frame, *codec_type_); + } + } + + void Flush() { + // TODO(webrtc:14852): Add Flush() to VideoDecoder API. + task_queue_.PostTaskAndWait([this] { decoder_->Release(); }); + } + + private: + int Decoded(VideoFrame& decoded_frame) override { + int spatial_idx; + std::optional ref_frame; + { + MutexLock lock(&mutex_); + spatial_idx = *spatial_idx_; + + if (ref_frames_.size() > 0) { + auto it = ref_frames_.find(decoded_frame.rtp_timestamp()); + RTC_CHECK(it != ref_frames_.end()); + ref_frame = it->second; + ref_frames_.erase(ref_frames_.begin(), std::next(it)); + } + } + + analyzer_->FinishDecode(decoded_frame, spatial_idx, ref_frame); + + if (y4m_writer_) { + y4m_writer_->Write(decoded_frame, spatial_idx); + } + + return WEBRTC_VIDEO_CODEC_OK; + } + + const Environment env_; + VideoDecoderFactory* decoder_factory_; + std::unique_ptr decoder_; + VideoCodecAnalyzer* const analyzer_; + Pacer pacer_; + LimitedTaskQueue task_queue_; + std::unique_ptr ivf_writer_; + std::unique_ptr y4m_writer_; + std::optional codec_type_; + std::optional spatial_idx_ RTC_GUARDED_BY(mutex_); + std::map ref_frames_ RTC_GUARDED_BY(mutex_); + Mutex mutex_; +}; + +class Encoder : public EncodedImageCallback { + public: + using EncodeCallback = + absl::AnyInvocable; + + Encoder(const Environment& env, + VideoEncoderFactory* encoder_factory, + const EncoderSettings& encoder_settings, + VideoCodecAnalyzer* analyzer) + : env_(env), + encoder_factory_(encoder_factory), + analyzer_(analyzer), + pacer_(encoder_settings.pacing_settings) { + RTC_CHECK(analyzer_) << "Analyzer must be provided"; + + if (encoder_settings.encoder_input_base_path) { + y4m_writer_ = std::make_unique( + *encoder_settings.encoder_input_base_path); + } + + if (encoder_settings.encoder_output_base_path) { + ivf_writer_ = std::make_unique( + *encoder_settings.encoder_output_base_path); + } + } + + void Initialize(const EncodingSettings& encoding_settings) { + encoder_ = + encoder_factory_->Create(env_, encoding_settings.sdp_video_format); + RTC_CHECK(encoder_) << "Could not create encoder for video format " + << encoding_settings.sdp_video_format.ToString(); + + codec_type_ = + PayloadStringToCodecType(encoding_settings.sdp_video_format.name); + + task_queue_.PostTaskAndWait([this, encoding_settings] { + encoder_->RegisterEncodeCompleteCallback(this); + Configure(encoding_settings); + SetRates(encoding_settings); + }); + } + + void Encode(const VideoFrame& input_frame, + const EncodingSettings& encoding_settings, + EncodeCallback callback) { + { + MutexLock lock(&mutex_); + callbacks_[input_frame.rtp_timestamp()] = std::move(callback); + } + + Timestamp pts = + Timestamp::Micros((input_frame.rtp_timestamp() / k90kHz).us()); + + task_queue_.PostScheduledTask( + [this, input_frame, encoding_settings] { + analyzer_->StartEncode(input_frame, encoding_settings); + + if (!last_encoding_settings_ || + !IsSameRate(encoding_settings, *last_encoding_settings_)) { + SetRates(encoding_settings); + } + last_encoding_settings_ = encoding_settings; + + std::vector frame_types = { + encoding_settings.keyframe ? VideoFrameType::kVideoFrameKey + : VideoFrameType::kVideoFrameDelta}; + int error = encoder_->Encode(input_frame, &frame_types); + if (error != 0) { + RTC_LOG(LS_WARNING) + << "Encode failed with error code " << error + << " RTP timestamp " << input_frame.rtp_timestamp(); + } + }, + pacer_.Schedule(pts)); + + if (y4m_writer_) { + y4m_writer_->Write(input_frame, /*spatial_idx=*/0); + } + } + + void Flush() { + task_queue_.PostTaskAndWait([this] { encoder_->Release(); }); + if (last_superframe_) { + int num_spatial_layers = + ScalabilityModeToNumSpatialLayers(last_superframe_->scalability_mode); + for (int sidx = *last_superframe_->encoded_frame.SpatialIndex() + 1; + sidx < num_spatial_layers; ++sidx) { + last_superframe_->encoded_frame.SetSpatialIndex(sidx); + DeliverEncodedFrame(last_superframe_->encoded_frame); + } + last_superframe_.reset(); + } + } + + private: + struct Superframe { + EncodedImage encoded_frame; + scoped_refptr encoded_data; + ScalabilityMode scalability_mode; + }; + + Result OnEncodedImage(const EncodedImage& encoded_frame, + const CodecSpecificInfo* codec_specific_info) override { + analyzer_->FinishEncode(encoded_frame); + + if (last_superframe_ && last_superframe_->encoded_frame.RtpTimestamp() != + encoded_frame.RtpTimestamp()) { + // New temporal unit. We have frame of previous temporal unit (TU) stored + // which means that the previous TU used spatial prediction. If encoder + // dropped a frame of layer X in the previous TU, mark the stored frame + // as a frame belonging to layer >X and deliver it such that decoders of + // layer >X receive encoded lower layers. + int num_spatial_layers = + ScalabilityModeToNumSpatialLayers(last_superframe_->scalability_mode); + for (int sidx = + last_superframe_->encoded_frame.SpatialIndex().value_or(0) + 1; + sidx < num_spatial_layers; ++sidx) { + last_superframe_->encoded_frame.SetSpatialIndex(sidx); + DeliverEncodedFrame(last_superframe_->encoded_frame); + } + last_superframe_.reset(); + } + + const EncodedImage& superframe = + MakeSuperFrame(encoded_frame, codec_specific_info); + DeliverEncodedFrame(superframe); + + return Result(Result::Error::OK); + } + + void DeliverEncodedFrame(const EncodedImage& encoded_frame) { + { + MutexLock lock(&mutex_); + auto it = callbacks_.find(encoded_frame.RtpTimestamp()); + RTC_CHECK(it != callbacks_.end()); + it->second(encoded_frame); + callbacks_.erase(callbacks_.begin(), it); + } + + if (ivf_writer_ != nullptr) { + ivf_writer_->Write(encoded_frame, codec_type_); + } + } + + void Configure(const EncodingSettings& es) { + const LayerSettings& top_layer_settings = + es.layers_settings.rbegin()->second; + const int num_spatial_layers = + ScalabilityModeToNumSpatialLayers(es.scalability_mode); + const int num_temporal_layers = + ScalabilityModeToNumTemporalLayers(es.scalability_mode); + DataRate total_bitrate = std::accumulate( + es.layers_settings.begin(), es.layers_settings.end(), DataRate::Zero(), + [](DataRate acc, const std::pair layer) { + return acc + layer.second.bitrate; + }); + + VideoCodec vc; + vc.width = top_layer_settings.resolution.width; + vc.height = top_layer_settings.resolution.height; + vc.startBitrate = total_bitrate.kbps(); + vc.maxBitrate = total_bitrate.kbps(); + vc.minBitrate = 0; + vc.maxFramerate = top_layer_settings.framerate.hertz(); + vc.active = true; + vc.numberOfSimulcastStreams = 0; + vc.mode = es.content_type; + vc.SetFrameDropEnabled(es.frame_drop); + vc.SetScalabilityMode(es.scalability_mode); + vc.SetVideoEncoderComplexity(VideoCodecComplexity::kComplexityNormal); + + vc.codecType = PayloadStringToCodecType(es.sdp_video_format.name); + switch (vc.codecType) { + case kVideoCodecVP8: + *(vc.VP8()) = VideoEncoder::GetDefaultVp8Settings(); + vc.VP8()->SetNumberOfTemporalLayers(num_temporal_layers); + vc.SetScalabilityMode(std::vector{ + ScalabilityMode::kL1T1, ScalabilityMode::kL1T2, + ScalabilityMode::kL1T3}[num_temporal_layers - 1]); + vc.qpMax = kDefaultVideoMaxQpVpx; + break; + case kVideoCodecVP9: + *(vc.VP9()) = VideoEncoder::GetDefaultVp9Settings(); + vc.qpMax = kDefaultVideoMaxQpVpx; + break; + case kVideoCodecAV1: + vc.qpMax = kDefaultVideoMaxQpAv1; + break; + case kVideoCodecH264: + *(vc.H264()) = VideoEncoder::GetDefaultH264Settings(); + vc.H264()->SetNumberOfTemporalLayers(num_temporal_layers); + vc.qpMax = kDefaultVideoMaxQpH26x; + break; + case kVideoCodecH265: + vc.qpMax = kDefaultVideoMaxQpH26x; + break; + case kVideoCodecGeneric: + RTC_CHECK_NOTREACHED(); + break; + } + + bool is_simulcast = + num_spatial_layers > 1 && + (vc.codecType == kVideoCodecVP8 || vc.codecType == kVideoCodecH264 || + vc.codecType == kVideoCodecH265); + if (is_simulcast) { + vc.numberOfSimulcastStreams = num_spatial_layers; + for (int sidx = 0; sidx < num_spatial_layers; ++sidx) { + auto tl0_settings = es.layers_settings.find( + LayerId{.spatial_idx = sidx, .temporal_idx = 0}); + auto tlx_settings = es.layers_settings.find(LayerId{ + .spatial_idx = sidx, .temporal_idx = num_temporal_layers - 1}); + DataRate total_layer_bitrate = std::accumulate( + tl0_settings, tlx_settings, DataRate::Zero(), + [](DataRate acc, + const std::pair layer) { + return acc + layer.second.bitrate; + }); + SimulcastStream& ss = vc.simulcastStream[sidx]; + ss.width = tl0_settings->second.resolution.width; + ss.height = tl0_settings->second.resolution.height; + ss.numberOfTemporalLayers = num_temporal_layers; + ss.maxBitrate = total_layer_bitrate.kbps(); + ss.targetBitrate = total_layer_bitrate.kbps(); + ss.minBitrate = 0; + ss.maxFramerate = vc.maxFramerate; + ss.qpMax = vc.qpMax; + ss.active = true; + } + } + + VideoEncoder::Settings ves( + VideoEncoder::Capabilities(/*loss_notification=*/false), + /*number_of_cores=*/1, + /*max_payload_size=*/1440); + + int result = encoder_->InitEncode(&vc, ves); + RTC_CHECK(result == WEBRTC_VIDEO_CODEC_OK); + } + + void SetRates(const EncodingSettings& es) { + VideoEncoder::RateControlParameters rc; + int num_spatial_layers = + ScalabilityModeToNumSpatialLayers(es.scalability_mode); + int num_temporal_layers = + ScalabilityModeToNumTemporalLayers(es.scalability_mode); + for (int sidx = 0; sidx < num_spatial_layers; ++sidx) { + for (int tidx = 0; tidx < num_temporal_layers; ++tidx) { + auto layers_settings = es.layers_settings.find( + {.spatial_idx = sidx, .temporal_idx = tidx}); + RTC_CHECK(layers_settings != es.layers_settings.end()) + << "Bitrate for layer S=" << sidx << " T=" << tidx << " is not set"; + rc.bitrate.SetBitrate(sidx, tidx, + layers_settings->second.bitrate.bps()); + } + } + rc.framerate_fps = + es.layers_settings.rbegin()->second.framerate.hertz(); + encoder_->SetRates(rc); + } + + bool IsSameRate(const EncodingSettings& a, const EncodingSettings& b) const { + for (auto [layer_id, layer] : a.layers_settings) { + const auto& other_layer = b.layers_settings.at(layer_id); + if (layer.bitrate != other_layer.bitrate || + layer.framerate != other_layer.framerate) { + return false; + } + } + + return true; + } + + static bool IsSvc(const EncodedImage& encoded_frame, + const CodecSpecificInfo& codec_specific_info) { + if (!codec_specific_info.scalability_mode) { + return false; + } + ScalabilityMode scalability_mode = *codec_specific_info.scalability_mode; + return (kFullSvcScalabilityModes.count(scalability_mode) || + (kKeySvcScalabilityModes.count(scalability_mode) && + encoded_frame.FrameType() == VideoFrameType::kVideoFrameKey)); + } + + const EncodedImage& MakeSuperFrame( + const EncodedImage& encoded_frame, + const CodecSpecificInfo* codec_specific_info) { + if (last_superframe_) { + // Append to base spatial layer frame(s). + RTC_CHECK_EQ(*encoded_frame.SpatialIndex(), + *last_superframe_->encoded_frame.SpatialIndex() + 1) + << "Inter-layer frame drops are not supported."; + size_t current_size = last_superframe_->encoded_data->size(); + last_superframe_->encoded_data->Realloc(current_size + + encoded_frame.size()); + memcpy(last_superframe_->encoded_data->data() + current_size, + encoded_frame.data(), encoded_frame.size()); + last_superframe_->encoded_frame.SetEncodedData( + last_superframe_->encoded_data); + last_superframe_->encoded_frame.SetSpatialIndex( + encoded_frame.SpatialIndex()); + return last_superframe_->encoded_frame; + } + + RTC_CHECK(codec_specific_info != nullptr); + if (IsSvc(encoded_frame, *codec_specific_info)) { + last_superframe_ = Superframe{ + .encoded_frame = EncodedImage(encoded_frame), + .encoded_data = EncodedImageBuffer::Create(encoded_frame.data(), + encoded_frame.size()), + .scalability_mode = *codec_specific_info->scalability_mode}; + last_superframe_->encoded_frame.SetEncodedData( + last_superframe_->encoded_data); + return last_superframe_->encoded_frame; + } + + return encoded_frame; + } + + const Environment env_; + VideoEncoderFactory* const encoder_factory_; + std::unique_ptr encoder_; + VideoCodecAnalyzer* const analyzer_; + Pacer pacer_; + std::optional last_encoding_settings_; + std::unique_ptr bitrate_allocator_; + LimitedTaskQueue task_queue_; + std::unique_ptr y4m_writer_; + std::unique_ptr ivf_writer_; + std::map sidx_ RTC_GUARDED_BY(mutex_); + std::map callbacks_ RTC_GUARDED_BY(mutex_); + VideoCodecType codec_type_; + std::optional last_superframe_; + Mutex mutex_; +}; + +void ConfigureSimulcast(const FieldTrialsView& field_trials, VideoCodec* vc) { + int num_spatial_layers = + ScalabilityModeToNumSpatialLayers(*vc->GetScalabilityMode()); + int num_temporal_layers = + ScalabilityModeToNumTemporalLayers(*vc->GetScalabilityMode()); + + if (num_spatial_layers == 1) { + SimulcastStream* ss = &vc->simulcastStream[0]; + ss->width = vc->width; + ss->height = vc->height; + ss->numberOfTemporalLayers = num_temporal_layers; + ss->maxBitrate = vc->maxBitrate; + ss->targetBitrate = vc->maxBitrate; + ss->minBitrate = vc->minBitrate; + ss->qpMax = vc->qpMax; + ss->active = true; + return; + } + + VideoEncoderConfig encoder_config; + encoder_config.codec_type = vc->codecType; + encoder_config.number_of_streams = num_spatial_layers; + encoder_config.simulcast_layers.resize(num_spatial_layers); + VideoEncoder::EncoderInfo encoder_info; + auto stream_factory = make_ref_counted(encoder_info); + const std::vector streams = stream_factory->CreateEncoderStreams( + field_trials, vc->width, vc->height, encoder_config); + vc->numberOfSimulcastStreams = streams.size(); + RTC_CHECK_LE(vc->numberOfSimulcastStreams, num_spatial_layers); + if (vc->numberOfSimulcastStreams < num_spatial_layers) { + vc->SetScalabilityMode(LimitNumSpatialLayers(*vc->GetScalabilityMode(), + vc->numberOfSimulcastStreams)); + } + + for (int i = 0; i < vc->numberOfSimulcastStreams; ++i) { + SimulcastStream* ss = &vc->simulcastStream[i]; + ss->width = streams[i].width; + ss->height = streams[i].height; + ss->numberOfTemporalLayers = num_temporal_layers; + ss->maxBitrate = streams[i].max_bitrate_bps / 1000; + ss->targetBitrate = streams[i].target_bitrate_bps / 1000; + ss->minBitrate = streams[i].min_bitrate_bps / 1000; + ss->qpMax = vc->qpMax; + ss->active = true; + } +} + +void SetDefaultCodecSpecificSettings(VideoCodec* vc, int num_temporal_layers) { + switch (vc->codecType) { + case kVideoCodecVP8: + *(vc->VP8()) = VideoEncoder::GetDefaultVp8Settings(); + vc->VP8()->SetNumberOfTemporalLayers(num_temporal_layers); + break; + case kVideoCodecVP9: { + *(vc->VP9()) = VideoEncoder::GetDefaultVp9Settings(); + vc->VP9()->SetNumberOfTemporalLayers(num_temporal_layers); + } break; + case kVideoCodecH264: { + *(vc->H264()) = VideoEncoder::GetDefaultH264Settings(); + vc->H264()->SetNumberOfTemporalLayers(num_temporal_layers); + } break; + case kVideoCodecAV1: + case kVideoCodecH265: + break; + case kVideoCodecGeneric: + RTC_CHECK_NOTREACHED(); + } +} + +std::tuple, ScalabilityMode> +SplitBitrateAndUpdateScalabilityMode(const Environment& env, + std::string codec_type, + ScalabilityMode scalability_mode, + int width, + int height, + std::vector layer_bitrate, + Frequency framerate, + VideoCodecMode content_type) { + int num_spatial_layers = ScalabilityModeToNumSpatialLayers(scalability_mode); + int num_temporal_layers = + ScalabilityModeToNumTemporalLayers(scalability_mode); + + int num_bitrates = static_cast(layer_bitrate.size()); + RTC_CHECK(num_bitrates == 1 || num_bitrates == num_spatial_layers || + num_bitrates == num_spatial_layers * num_temporal_layers); + + if (num_bitrates == num_spatial_layers * num_temporal_layers) { + return std::make_tuple(layer_bitrate, scalability_mode); + } + + DataRate total_bitrate = std::accumulate( + layer_bitrate.begin(), layer_bitrate.end(), DataRate::Zero()); + + VideoCodec vc; + vc.codecType = PayloadStringToCodecType(codec_type); + vc.width = width; + vc.height = height; + vc.startBitrate = total_bitrate.kbps(); + vc.maxBitrate = total_bitrate.kbps(); + vc.minBitrate = 0; + vc.maxFramerate = framerate.hertz(); + vc.numberOfSimulcastStreams = 0; + vc.mode = content_type; + vc.SetScalabilityMode(scalability_mode); + SetDefaultCodecSpecificSettings(&vc, num_temporal_layers); + + if (num_bitrates == num_spatial_layers) { + switch (vc.codecType) { + case kVideoCodecVP8: + case kVideoCodecH264: + case kVideoCodecH265: + vc.numberOfSimulcastStreams = num_spatial_layers; + for (int sidx = 0; sidx < num_spatial_layers; ++sidx) { + SimulcastStream* ss = &vc.simulcastStream[sidx]; + ss->width = width >> (num_spatial_layers - sidx - 1); + ss->height = height >> (num_spatial_layers - sidx - 1); + ss->maxFramerate = vc.maxFramerate; + ss->numberOfTemporalLayers = num_temporal_layers; + ss->maxBitrate = layer_bitrate[sidx].kbps(); + ss->targetBitrate = layer_bitrate[sidx].kbps(); + ss->minBitrate = 0; + ss->qpMax = 0; + ss->active = true; + } + break; + case kVideoCodecVP9: + case kVideoCodecAV1: + for (int sidx = num_spatial_layers - 1; sidx >= 0; --sidx) { + SpatialLayer* ss = &vc.spatialLayers[sidx]; + ss->width = width >> (num_spatial_layers - sidx - 1); + ss->height = height >> (num_spatial_layers - sidx - 1); + ss->maxFramerate = vc.maxFramerate; + ss->numberOfTemporalLayers = num_temporal_layers; + ss->maxBitrate = layer_bitrate[sidx].kbps(); + ss->targetBitrate = layer_bitrate[sidx].kbps(); + ss->minBitrate = 0; + ss->qpMax = 0; + ss->active = true; + } + break; + case kVideoCodecGeneric: + RTC_CHECK_NOTREACHED(); + } + } else { + switch (vc.codecType) { + case kVideoCodecVP8: + case kVideoCodecH264: + case kVideoCodecH265: + ConfigureSimulcast(env.field_trials(), &vc); + break; + case kVideoCodecVP9: { + const std::vector spatialLayers = GetVp9SvcConfig(vc); + for (size_t i = 0; i < spatialLayers.size(); ++i) { + vc.spatialLayers[i] = spatialLayers[i]; + vc.spatialLayers[i].active = true; + } + } break; + case kVideoCodecAV1: { + bool result = + SetAv1SvcConfig(vc, num_spatial_layers, num_temporal_layers); + RTC_CHECK(result) << "SetAv1SvcConfig failed"; + } break; + case kVideoCodecGeneric: + RTC_CHECK_NOTREACHED(); + } + + if (*vc.GetScalabilityMode() != scalability_mode) { + RTC_LOG(LS_WARNING) << "Scalability mode changed from " + << ScalabilityModeToString(scalability_mode) << " to " + << ScalabilityModeToString(*vc.GetScalabilityMode()); + num_spatial_layers = + ScalabilityModeToNumSpatialLayers(*vc.GetScalabilityMode()); + num_temporal_layers = + ScalabilityModeToNumTemporalLayers(*vc.GetScalabilityMode()); + } + } + + std::unique_ptr bitrate_allocator = + CreateBuiltinVideoBitrateAllocatorFactory()->Create(env, vc); + VideoBitrateAllocation bitrate_allocation = + bitrate_allocator->Allocate(VideoBitrateAllocationParameters( + total_bitrate.bps(), framerate.hertz())); + + std::vector bitrates; + for (int sidx = 0; sidx < num_spatial_layers; ++sidx) { + for (int tidx = 0; tidx < num_temporal_layers; ++tidx) { + int bitrate_bps = bitrate_allocation.GetBitrate(sidx, tidx); + bitrates.push_back(DataRate::BitsPerSec(bitrate_bps)); + } + } + + return std::make_tuple(bitrates, *vc.GetScalabilityMode()); +} + +} // namespace + +void VideoCodecStats::Stream::LogMetrics( + MetricsLogger* logger, + std::string test_case_name, + std::string prefix, + std::map metadata) const { + logger->LogMetric(prefix + "width", test_case_name, width, Unit::kCount, + ImprovementDirection::kBiggerIsBetter, metadata); + logger->LogMetric(prefix + "height", test_case_name, height, Unit::kCount, + ImprovementDirection::kBiggerIsBetter, metadata); + logger->LogMetric(prefix + "frame_size_bytes", test_case_name, + frame_size_bytes, Unit::kBytes, + ImprovementDirection::kNeitherIsBetter, metadata); + logger->LogMetric(prefix + "keyframe", test_case_name, keyframe, Unit::kCount, + ImprovementDirection::kSmallerIsBetter, metadata); + logger->LogMetric(prefix + "qp", test_case_name, qp, Unit::kUnitless, + ImprovementDirection::kSmallerIsBetter, metadata); + // TODO(webrtc:14852): Change to us or even ns. + logger->LogMetric(prefix + "encode_time_ms", test_case_name, encode_time_ms, + Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter, + metadata); + logger->LogMetric(prefix + "decode_time_ms", test_case_name, decode_time_ms, + Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter, + metadata); + // TODO(webrtc:14852): Change to kUnitLess. kKilobitsPerSecond are converted + // to bytes per second in Chromeperf dash. + logger->LogMetric(prefix + "target_bitrate_kbps", test_case_name, + target_bitrate_kbps, Unit::kKilobitsPerSecond, + ImprovementDirection::kBiggerIsBetter, metadata); + logger->LogMetric(prefix + "target_framerate_fps", test_case_name, + target_framerate_fps, Unit::kHertz, + ImprovementDirection::kBiggerIsBetter, metadata); + // TODO(webrtc:14852): Change to kUnitLess. kKilobitsPerSecond are converted + // to bytes per second in Chromeperf dash. + logger->LogMetric(prefix + "encoded_bitrate_kbps", test_case_name, + encoded_bitrate_kbps, Unit::kKilobitsPerSecond, + ImprovementDirection::kBiggerIsBetter, metadata); + logger->LogMetric(prefix + "encoded_framerate_fps", test_case_name, + encoded_framerate_fps, Unit::kHertz, + ImprovementDirection::kBiggerIsBetter, metadata); + logger->LogMetric(prefix + "bitrate_mismatch_pct", test_case_name, + bitrate_mismatch_pct, Unit::kPercent, + ImprovementDirection::kNeitherIsBetter, metadata); + logger->LogMetric(prefix + "framerate_mismatch_pct", test_case_name, + framerate_mismatch_pct, Unit::kPercent, + ImprovementDirection::kNeitherIsBetter, metadata); + logger->LogMetric(prefix + "transmission_time_ms", test_case_name, + transmission_time_ms, Unit::kMilliseconds, + ImprovementDirection::kSmallerIsBetter, metadata); + logger->LogMetric(prefix + "psnr_y_db", test_case_name, psnr.y, + Unit::kUnitless, ImprovementDirection::kBiggerIsBetter, + metadata); + logger->LogMetric(prefix + "psnr_u_db", test_case_name, psnr.u, + Unit::kUnitless, ImprovementDirection::kBiggerIsBetter, + metadata); + logger->LogMetric(prefix + "psnr_v_db", test_case_name, psnr.v, + Unit::kUnitless, ImprovementDirection::kBiggerIsBetter, + metadata); +} + +EncodingSettings VideoCodecTester::CreateEncodingSettings( + const Environment& env, + std::string codec_type, + std::string scalability_name, + int width, + int height, + std::vector bitrate, + Frequency framerate, + bool screencast, + bool frame_drop) { + VideoCodecMode content_type = screencast ? VideoCodecMode::kScreensharing + : VideoCodecMode::kRealtimeVideo; + + auto [adjusted_bitrate, scalability_mode] = + SplitBitrateAndUpdateScalabilityMode( + env, codec_type, *ScalabilityModeFromString(scalability_name), width, + height, bitrate, framerate, content_type); + + int num_spatial_layers = ScalabilityModeToNumSpatialLayers(scalability_mode); + int num_temporal_layers = + ScalabilityModeToNumTemporalLayers(scalability_mode); + + std::map layers_settings; + for (int sidx = 0; sidx < num_spatial_layers; ++sidx) { + int layer_width = width >> (num_spatial_layers - sidx - 1); + int layer_height = height >> (num_spatial_layers - sidx - 1); + for (int tidx = 0; tidx < num_temporal_layers; ++tidx) { + layers_settings.emplace( + LayerId{.spatial_idx = sidx, .temporal_idx = tidx}, + LayerSettings{ + .resolution = {.width = layer_width, .height = layer_height}, + .framerate = framerate / (1 << (num_temporal_layers - tidx - 1)), + .bitrate = adjusted_bitrate[sidx * num_temporal_layers + tidx]}); + } + } + + SdpVideoFormat sdp_video_format = SdpVideoFormat(codec_type); + if (codec_type == "H264") { + const std::string packetization_mode = + "1"; // H264PacketizationMode::SingleNalUnit + sdp_video_format.parameters = + CreateH264Format(H264Profile::kProfileConstrainedBaseline, + H264Level::kLevel3_1, packetization_mode, + /*add_scalability_modes=*/false) + .parameters; + } + + return EncodingSettings{.sdp_video_format = sdp_video_format, + .scalability_mode = scalability_mode, + .content_type = content_type, + .frame_drop = frame_drop, + .layers_settings = layers_settings}; +} + +std::map VideoCodecTester::CreateFrameSettings( + const EncodingSettings& encoding_settings, + int num_frames, + uint32_t timestamp_rtp) { + std::map frame_settings; + Frequency framerate = + encoding_settings.layers_settings.rbegin()->second.framerate; + for (int frame_num = 0; frame_num < num_frames; ++frame_num) { + frame_settings.emplace(timestamp_rtp, encoding_settings); + timestamp_rtp += k90kHz / framerate; + } + return frame_settings; +} + +std::unique_ptr +VideoCodecTester::RunDecodeTest(const Environment& env, + CodedVideoSource* video_source, + VideoDecoderFactory* decoder_factory, + const DecoderSettings& decoder_settings, + const SdpVideoFormat& sdp_video_format) { + std::unique_ptr analyzer = + std::make_unique(); + Decoder decoder(env, decoder_factory, decoder_settings, analyzer.get()); + decoder.Initialize(sdp_video_format); + + while (auto frame = video_source->PullFrame()) { + decoder.Decode(*frame); + } + + decoder.Flush(); + analyzer->Flush(); + return std::move(analyzer); +} + +std::unique_ptr +VideoCodecTester::RunEncodeTest( + const Environment& env, + const VideoSourceSettings& source_settings, + VideoEncoderFactory* encoder_factory, + const EncoderSettings& encoder_settings, + const std::map& encoding_settings) { + VideoSource video_source(source_settings); + std::unique_ptr analyzer = + std::make_unique(); + Encoder encoder(env, encoder_factory, encoder_settings, analyzer.get()); + encoder.Initialize(encoding_settings.begin()->second); + + for (const auto& [timestamp_rtp, frame_settings] : encoding_settings) { + const EncodingSettings::LayerSettings& top_layer = + frame_settings.layers_settings.rbegin()->second; + VideoFrame source_frame = video_source.PullFrame( + timestamp_rtp, top_layer.resolution, top_layer.framerate); + encoder.Encode(source_frame, frame_settings, + [](const EncodedImage& encoded_frame) {}); + } + + encoder.Flush(); + analyzer->Flush(); + return std::move(analyzer); +} + +std::unique_ptr +VideoCodecTester::RunEncodeDecodeTest( + const Environment& env, + const VideoSourceSettings& source_settings, + VideoEncoderFactory* encoder_factory, + VideoDecoderFactory* decoder_factory, + const EncoderSettings& encoder_settings, + const DecoderSettings& decoder_settings, + const std::map& encoding_settings) { + VideoSource video_source(source_settings); + std::unique_ptr analyzer = + std::make_unique(); + const EncodingSettings& first_frame_settings = + encoding_settings.begin()->second; + Encoder encoder(env, encoder_factory, encoder_settings, analyzer.get()); + encoder.Initialize(first_frame_settings); + + int num_spatial_layers = + ScalabilityModeToNumSpatialLayers(first_frame_settings.scalability_mode); + std::vector> decoders; + for (int sidx = 0; sidx < num_spatial_layers; ++sidx) { + auto decoder = std::make_unique(env, decoder_factory, + decoder_settings, analyzer.get()); + decoder->Initialize(first_frame_settings.sdp_video_format); + decoders.push_back(std::move(decoder)); + } + + for (const auto& [timestamp_rtp, frame_settings] : encoding_settings) { + const EncodingSettings::LayerSettings& top_layer = + frame_settings.layers_settings.rbegin()->second; + VideoFrame source_frame = video_source.PullFrame( + timestamp_rtp, top_layer.resolution, top_layer.framerate); + encoder.Encode( + source_frame, frame_settings, + [&decoders, source_frame](const EncodedImage& encoded_frame) { + int sidx = encoded_frame.SpatialIndex().value_or( + encoded_frame.SimulcastIndex().value_or(0)); + decoders.at(sidx)->Decode(encoded_frame, source_frame); + }); + } + + encoder.Flush(); + for (auto& decoder : decoders) { + decoder->Flush(); + } + analyzer->Flush(); + return std::move(analyzer); +} + +} // namespace test +} // namespace webrtc diff --git a/test/video_codec_tester.h b/test/video_codec_tester.h new file mode 100644 index 0000000000..fcec47e702 --- /dev/null +++ b/test/video_codec_tester.h @@ -0,0 +1,244 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_VIDEO_CODEC_TESTER_H_ +#define TEST_VIDEO_CODEC_TESTER_H_ + +#include +#include +#include +#include +#include +#include + +#include "api/environment/environment.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/test/metrics/metric.h" +#include "api/test/metrics/metrics_logger.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/frequency.h" +#include "api/video/encoded_image.h" +#include "api/video/resolution.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" + +namespace webrtc { +namespace test { + +class VideoCodecTester { + public: + struct LayerId { + int spatial_idx = 0; + int temporal_idx = 0; + + bool operator==(const LayerId& o) const { + return spatial_idx == o.spatial_idx && temporal_idx == o.temporal_idx; + } + bool operator<(const LayerId& o) const { + return spatial_idx < o.spatial_idx || + (spatial_idx == o.spatial_idx && temporal_idx < o.temporal_idx); + } + }; + + struct EncodingSettings { + SdpVideoFormat sdp_video_format = SdpVideoFormat::VP8(); + ScalabilityMode scalability_mode = ScalabilityMode::kL1T1; + VideoCodecMode content_type = VideoCodecMode::kRealtimeVideo; + bool frame_drop = true; + bool keyframe = false; + + struct LayerSettings { + Resolution resolution; + Frequency framerate; + DataRate bitrate; + }; + std::map layers_settings; + }; + + class VideoCodecStats { + public: + struct Filter { + uint32_t min_timestamp_rtp = std::numeric_limits::min(); + uint32_t max_timestamp_rtp = std::numeric_limits::max(); + std::optional layer_id; + }; + + struct Frame { + uint32_t timestamp_rtp = 0; + LayerId layer_id; + bool encoded = false; + bool decoded = false; + int width = 0; + int height = 0; + DataSize frame_size = DataSize::Zero(); + bool keyframe = false; + std::optional qp; + Timestamp encode_start = Timestamp::Zero(); + TimeDelta encode_time = TimeDelta::Zero(); + Timestamp decode_start = Timestamp::Zero(); + TimeDelta decode_time = TimeDelta::Zero(); + std::optional target_bitrate; + std::optional target_framerate; + + struct Psnr { + double y = 0.0; + double u = 0.0; + double v = 0.0; + }; + std::optional psnr; + }; + + struct Stream { + SamplesStatsCounter width; + SamplesStatsCounter height; + SamplesStatsCounter frame_size_bytes; + SamplesStatsCounter keyframe; + SamplesStatsCounter qp; + SamplesStatsCounter encode_time_ms; + SamplesStatsCounter decode_time_ms; + SamplesStatsCounter target_bitrate_kbps; + SamplesStatsCounter target_framerate_fps; + SamplesStatsCounter encoded_bitrate_kbps; + SamplesStatsCounter encoded_framerate_fps; + SamplesStatsCounter bitrate_mismatch_pct; + SamplesStatsCounter framerate_mismatch_pct; + SamplesStatsCounter transmission_time_ms; + + struct Psnr { + SamplesStatsCounter y; + SamplesStatsCounter u; + SamplesStatsCounter v; + } psnr; + + // Logs `Stream` metrics to provided `MetricsLogger`. + void LogMetrics(MetricsLogger* logger, + std::string test_case_name, + std::string prefix, + std::map metadata = {}) const; + }; + + virtual ~VideoCodecStats() = default; + + // Returns frames for the slice specified by `filter`. If `merge` is true, + // also merges frames belonging to the same temporal unit into one + // superframe. + virtual std::vector Slice(Filter filter, bool merge) const = 0; + + // Returns video statistics aggregated for the slice specified by `filter`. + virtual Stream Aggregate(Filter filter) const = 0; + + // Write metrics to a CSV file. + virtual void LogMetrics( + absl::string_view csv_path, + std::vector frames, + std::map metadata) const = 0; + }; + + // Pacing settings for codec input. + struct PacingSettings { + enum PacingMode { + // Pacing is not used. Frames are sent to codec back-to-back. + kNoPacing, + // Pace with the rate equal to the target video frame rate. Pacing time is + // derived from RTP timestamp. + kRealTime, + // Pace with the explicitly provided rate. + kConstantRate, + }; + PacingMode mode = PacingMode::kNoPacing; + // Pacing rate for `kConstantRate` mode. + Frequency constant_rate = Frequency::Zero(); + }; + + struct VideoSourceSettings { + std::string file_path; + Resolution resolution; + Frequency framerate; + }; + + struct DecoderSettings { + PacingSettings pacing_settings; + std::optional decoder_input_base_path; + std::optional decoder_output_base_path; + }; + + struct EncoderSettings { + PacingSettings pacing_settings; + std::optional encoder_input_base_path; + std::optional encoder_output_base_path; + }; + + virtual ~VideoCodecTester() = default; + + // Interface for a coded video frames source. + class CodedVideoSource { + public: + virtual ~CodedVideoSource() = default; + + // Returns next frame. Returns `std::nullopt` if the end-of-stream is + // reached. Frames should have RTP timestamps representing desired frame + // rate. + virtual std::optional PullFrame() = 0; + }; + + // A helper function that creates `EncodingSettings` from the given + // parameters. `bitrate` is either total, or per-spatial layer or per-spatial + // and per-temporal layer. If layer bitrates are not explicitly specified, + // then the codec-specific rate allocators used to distribute the total + // bitrate across spatial or/and temporal layers. + static EncodingSettings CreateEncodingSettings(const Environment& env, + std::string codec_type, + std::string scalability_name, + int width, + int height, + std::vector bitrate, + Frequency framerate, + bool screencast = false, + bool frame_drop = true); + + // A helper function that creates a map of RTP timestamps to + // `EncodingSettings` for the given number of frames. + static std::map CreateFrameSettings( + const EncodingSettings& encoding_settings, + int num_frames, + uint32_t first_timestamp_rtp = 90000); + + // Decodes video, collects and returns decode metrics. + static std::unique_ptr RunDecodeTest( + const Environment& env, + CodedVideoSource* video_source, + VideoDecoderFactory* decoder_factory, + const DecoderSettings& decoder_settings, + const SdpVideoFormat& sdp_video_format); + + // Encodes video, collects and returns encode metrics. + static std::unique_ptr RunEncodeTest( + const Environment& env, + const VideoSourceSettings& source_settings, + VideoEncoderFactory* encoder_factory, + const EncoderSettings& encoder_settings, + const std::map& encoding_settings); + + // Encodes and decodes video, collects and returns encode and decode metrics. + static std::unique_ptr RunEncodeDecodeTest( + const Environment& env, + const VideoSourceSettings& source_settings, + VideoEncoderFactory* encoder_factory, + VideoDecoderFactory* decoder_factory, + const EncoderSettings& encoder_settings, + const DecoderSettings& decoder_settings, + const std::map& encoding_settings); +}; + +} // namespace test +} // namespace webrtc + +#endif // TEST_VIDEO_CODEC_TESTER_H_ diff --git a/test/video_codec_tester_unittest.cc b/test/video_codec_tester_unittest.cc new file mode 100644 index 0000000000..d78c7c6416 --- /dev/null +++ b/test/video_codec_tester_unittest.cc @@ -0,0 +1,955 @@ +/* + * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/video_codec_tester.h" + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/scoped_refptr.h" +#include "api/test/mock_video_decoder.h" +#include "api/test/mock_video_decoder_factory.h" +#include "api/test/mock_video_encoder.h" +#include "api/test/mock_video_encoder_factory.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "api/video/encoded_image.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/builtin_video_decoder_factory.h" +#include "api/video_codecs/builtin_video_encoder_factory.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/svc/scalability_mode_util.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" +#include "third_party/libyuv/include/libyuv/planar_functions.h" + +namespace webrtc { +namespace test { + +namespace { +using ::testing::_; +using ::testing::ElementsAre; +using ::testing::Field; +using ::testing::NiceMock; +using ::testing::Return; +using ::testing::SizeIs; +using ::testing::UnorderedElementsAreArray; +using ::testing::Values; +using ::testing::WithoutArgs; + +using VideoCodecStats = VideoCodecTester::VideoCodecStats; +using VideoSourceSettings = VideoCodecTester::VideoSourceSettings; +using CodedVideoSource = VideoCodecTester::CodedVideoSource; +using EncodingSettings = VideoCodecTester::EncodingSettings; +using LayerSettings = EncodingSettings::LayerSettings; +using LayerId = VideoCodecTester::LayerId; +using DecoderSettings = VideoCodecTester::DecoderSettings; +using EncoderSettings = VideoCodecTester::EncoderSettings; +using PacingSettings = VideoCodecTester::PacingSettings; +using PacingMode = PacingSettings::PacingMode; +using Filter = VideoCodecStats::Filter; +using Frame = VideoCodecTester::VideoCodecStats::Frame; +using Stream = VideoCodecTester::VideoCodecStats::Stream; + +constexpr int kWidth = 2; +constexpr int kHeight = 2; +const DataRate kBitrate = DataRate::BytesPerSec(100); +const Frequency kFramerate = Frequency::Hertz(30); +constexpr Frequency k90kHz = Frequency::Hertz(90000); + +scoped_refptr CreateYuvBuffer(uint8_t y = 0, + uint8_t u = 0, + uint8_t v = 0) { + scoped_refptr buffer(I420Buffer::Create(2, 2)); + + libyuv::I420Rect(buffer->MutableDataY(), buffer->StrideY(), + buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), 0, 0, + buffer->width(), buffer->height(), y, u, v); + return buffer; +} + +// TODO(ssilkin): Wrap this into a class that removes file in dtor. +std::string CreateYuvFile(int width, int height, int num_frames) { + std::string path = webrtc::test::TempFilename(webrtc::test::OutputPath(), + "video_codec_tester_unittest"); + FILE* file = fopen(path.c_str(), "wb"); + for (int frame_num = 0; frame_num < num_frames; ++frame_num) { + // For purposes of testing quality estimation, we need Y, U, V values in + // source and decoded video to be unique and deterministic. In source video + // we make them functions of frame number. The test decoder makes them + // functions of encoded frame size in decoded video. + uint8_t y = (frame_num * 3 + 0) & 255; + uint8_t u = (frame_num * 3 + 1) & 255; + uint8_t v = (frame_num * 3 + 2) & 255; + scoped_refptr buffer = CreateYuvBuffer(y, u, v); + fwrite(buffer->DataY(), 1, width * height, file); + int chroma_size_bytes = (width + 1) / 2 * (height + 1) / 2; + fwrite(buffer->DataU(), 1, chroma_size_bytes, file); + fwrite(buffer->DataV(), 1, chroma_size_bytes, file); + } + fclose(file); + return path; +} + +class TestVideoEncoder : public MockVideoEncoder { + public: + TestVideoEncoder(ScalabilityMode scalability_mode, + std::vector> encoded_frames) + : scalability_mode_(scalability_mode), encoded_frames_(encoded_frames) {} + int32_t Encode(const VideoFrame& input_frame, + const std::vector*) override { + for (const Frame& frame : encoded_frames_[num_encoded_frames_]) { + if (frame.frame_size.IsZero()) { + continue; // Frame drop. + } + EncodedImage encoded_frame; + encoded_frame._encodedWidth = frame.width; + encoded_frame._encodedHeight = frame.height; + encoded_frame.SetFrameType(frame.keyframe + ? VideoFrameType::kVideoFrameKey + : VideoFrameType::kVideoFrameDelta); + encoded_frame.SetRtpTimestamp(input_frame.rtp_timestamp()); + encoded_frame.SetSpatialIndex(frame.layer_id.spatial_idx); + encoded_frame.SetTemporalIndex(frame.layer_id.temporal_idx); + encoded_frame.SetEncodedData( + EncodedImageBuffer::Create(frame.frame_size.bytes())); + CodecSpecificInfo codec_specific_info; + codec_specific_info.scalability_mode = scalability_mode_; + callback_->OnEncodedImage(encoded_frame, &codec_specific_info); + } + ++num_encoded_frames_; + return WEBRTC_VIDEO_CODEC_OK; + } + + int32_t RegisterEncodeCompleteCallback( + EncodedImageCallback* callback) override { + callback_ = callback; + return WEBRTC_VIDEO_CODEC_OK; + } + + private: + ScalabilityMode scalability_mode_; + std::vector> encoded_frames_; + int num_encoded_frames_ = 0; + EncodedImageCallback* callback_; +}; + +class TestVideoDecoder : public MockVideoDecoder { + public: + int32_t Decode(const EncodedImage& encoded_frame, int64_t) { + uint8_t y = (encoded_frame.size() + 0) & 255; + uint8_t u = (encoded_frame.size() + 2) & 255; + uint8_t v = (encoded_frame.size() + 4) & 255; + scoped_refptr frame_buffer = CreateYuvBuffer(y, u, v); + VideoFrame decoded_frame = + VideoFrame::Builder() + .set_video_frame_buffer(frame_buffer) + .set_rtp_timestamp(encoded_frame.RtpTimestamp()) + .build(); + callback_->Decoded(decoded_frame); + frame_sizes_.push_back(DataSize::Bytes(encoded_frame.size())); + return WEBRTC_VIDEO_CODEC_OK; + } + + int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) { + callback_ = callback; + return WEBRTC_VIDEO_CODEC_OK; + } + + const std::vector& frame_sizes() const { return frame_sizes_; } + + private: + DecodedImageCallback* callback_; + std::vector frame_sizes_; +}; + +class VideoCodecTesterTest : public ::testing::Test { + public: + std::unique_ptr RunEncodeDecodeTest( + std::string codec_type, + ScalabilityMode scalability_mode, + std::vector> encoded_frames, + std::optional num_source_frames = std::nullopt) { + int num_frames = encoded_frames.size(); + std::string yuv_path = + CreateYuvFile(kWidth, kHeight, num_source_frames.value_or(num_frames)); + VideoSourceSettings video_source_settings{ + .file_path = yuv_path, + .resolution = {.width = kWidth, .height = kHeight}, + .framerate = kFramerate}; + + NiceMock encoder_factory; + ON_CALL(encoder_factory, Create).WillByDefault(WithoutArgs([&] { + return std::make_unique>(scalability_mode, + encoded_frames); + })); + + NiceMock decoder_factory; + ON_CALL(decoder_factory, Create).WillByDefault(WithoutArgs([&] { + // Video codec tester destroyes decoder at the end of test. Test + // decoder collects stats which we need to access after test. To keep + // the decode alive we wrap it into a wrapper and pass the wrapper to + // the tester. + class DecoderWrapper : public TestVideoDecoder { + public: + explicit DecoderWrapper(TestVideoDecoder* decoder) + : decoder_(decoder) {} + int32_t Decode(const EncodedImage& encoded_frame, + int64_t render_time_ms) { + return decoder_->Decode(encoded_frame, render_time_ms); + } + int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) { + return decoder_->RegisterDecodeCompleteCallback(callback); + } + TestVideoDecoder* decoder_; + }; + decoders_.push_back(std::make_unique>()); + return std::make_unique>(decoders_.back().get()); + })); + + int num_spatial_layers = + ScalabilityModeToNumSpatialLayers(scalability_mode); + int num_temporal_layers = + ScalabilityModeToNumTemporalLayers(scalability_mode); + std::map encoding_settings; + for (int frame_num = 0; frame_num < num_frames; ++frame_num) { + std::map layers_settings; + for (int sidx = 0; sidx < num_spatial_layers; ++sidx) { + for (int tidx = 0; tidx < num_temporal_layers; ++tidx) { + layers_settings.emplace( + LayerId{.spatial_idx = sidx, .temporal_idx = tidx}, + LayerSettings{ + .resolution = {.width = kWidth, .height = kHeight}, + .framerate = + kFramerate / (1 << (num_temporal_layers - 1 - tidx)), + .bitrate = kBitrate}); + } + } + encoding_settings.emplace( + encoded_frames[frame_num].front().timestamp_rtp, + EncodingSettings{.sdp_video_format = SdpVideoFormat(codec_type), + .scalability_mode = scalability_mode, + .layers_settings = layers_settings}); + } + + std::unique_ptr stats = + VideoCodecTester::RunEncodeDecodeTest( + env_, video_source_settings, &encoder_factory, &decoder_factory, + EncoderSettings{}, DecoderSettings{}, encoding_settings); + + remove(yuv_path.c_str()); + return stats; + } + + protected: + const Environment env_ = CreateEnvironment(); + std::vector> decoders_; +}; + +EncodedImage CreateEncodedImage(uint32_t timestamp_rtp) { + EncodedImage encoded_image; + encoded_image.SetRtpTimestamp(timestamp_rtp); + return encoded_image; +} + +class MockCodedVideoSource : public CodedVideoSource { + public: + MockCodedVideoSource(int num_frames, Frequency framerate) + : num_frames_(num_frames), frame_num_(0), framerate_(framerate) {} + + std::optional PullFrame() override { + if (frame_num_ >= num_frames_) { + return std::nullopt; + } + uint32_t timestamp_rtp = frame_num_ * k90kHz / framerate_; + ++frame_num_; + return CreateEncodedImage(timestamp_rtp); + } + + private: + int num_frames_; + int frame_num_; + Frequency framerate_; +}; + +} // namespace + +TEST_F(VideoCodecTesterTest, Slice) { + std::unique_ptr stats = + RunEncodeDecodeTest("VP9", ScalabilityMode::kL2T2, + {{{.timestamp_rtp = 0, + .layer_id = {.spatial_idx = 0, .temporal_idx = 0}, + .frame_size = DataSize::Bytes(1)}, + {.timestamp_rtp = 0, + .layer_id = {.spatial_idx = 1, .temporal_idx = 0}, + .frame_size = DataSize::Bytes(2)}}, + {{.timestamp_rtp = 1, + .layer_id = {.spatial_idx = 0, .temporal_idx = 1}, + .frame_size = DataSize::Bytes(3)}}}); + std::vector slice = stats->Slice(Filter{}, /*merge=*/false); + EXPECT_THAT(slice, + ElementsAre(Field(&Frame::frame_size, DataSize::Bytes(1)), + Field(&Frame::frame_size, DataSize::Bytes(2)), + Field(&Frame::frame_size, DataSize::Bytes(3)), + Field(&Frame::frame_size, DataSize::Bytes(0)))); + + slice = stats->Slice({.min_timestamp_rtp = 1}, /*merge=*/false); + EXPECT_THAT(slice, + ElementsAre(Field(&Frame::frame_size, DataSize::Bytes(3)), + Field(&Frame::frame_size, DataSize::Bytes(0)))); + + slice = stats->Slice({.max_timestamp_rtp = 0}, /*merge=*/false); + EXPECT_THAT(slice, + ElementsAre(Field(&Frame::frame_size, DataSize::Bytes(1)), + Field(&Frame::frame_size, DataSize::Bytes(2)))); + + slice = stats->Slice({.layer_id = {{.spatial_idx = 0, .temporal_idx = 0}}}, + /*merge=*/false); + EXPECT_THAT(slice, + ElementsAre(Field(&Frame::frame_size, DataSize::Bytes(1)))); + + slice = stats->Slice({.layer_id = {{.spatial_idx = 0, .temporal_idx = 1}}}, + /*merge=*/false); + EXPECT_THAT(slice, + ElementsAre(Field(&Frame::frame_size, DataSize::Bytes(1)), + Field(&Frame::frame_size, DataSize::Bytes(3)))); +} + +TEST_F(VideoCodecTesterTest, Merge) { + std::unique_ptr stats = + RunEncodeDecodeTest("VP8", ScalabilityMode::kL2T2_KEY, + {{{.timestamp_rtp = 0, + .layer_id = {.spatial_idx = 0, .temporal_idx = 0}, + .frame_size = DataSize::Bytes(1), + .keyframe = true}, + {.timestamp_rtp = 0, + .layer_id = {.spatial_idx = 1, .temporal_idx = 0}, + .frame_size = DataSize::Bytes(2)}}, + {{.timestamp_rtp = 1, + .layer_id = {.spatial_idx = 0, .temporal_idx = 1}, + .frame_size = DataSize::Bytes(4)}, + {.timestamp_rtp = 1, + .layer_id = {.spatial_idx = 1, .temporal_idx = 1}, + .frame_size = DataSize::Bytes(8)}}}); + + std::vector slice = stats->Slice(Filter{}, /*merge=*/true); + EXPECT_THAT( + slice, + ElementsAre( + AllOf(Field(&Frame::timestamp_rtp, 0), Field(&Frame::keyframe, true), + Field(&Frame::frame_size, DataSize::Bytes(3))), + AllOf(Field(&Frame::timestamp_rtp, 1), Field(&Frame::keyframe, false), + Field(&Frame::frame_size, DataSize::Bytes(12))))); +} + +struct AggregationTestParameters { + Filter filter; + double expected_keyframe_sum; + double expected_encoded_bitrate_kbps; + double expected_encoded_framerate_fps; + double expected_bitrate_mismatch_pct; + double expected_framerate_mismatch_pct; +}; + +class VideoCodecTesterTestAggregation + : public VideoCodecTesterTest, + public ::testing::WithParamInterface {}; + +TEST_P(VideoCodecTesterTestAggregation, Aggregate) { + AggregationTestParameters test_params = GetParam(); + std::unique_ptr stats = + RunEncodeDecodeTest("VP8", ScalabilityMode::kL2T2_KEY, + {{// L0T0 + {.timestamp_rtp = 0, + .layer_id = {.spatial_idx = 0, .temporal_idx = 0}, + .frame_size = DataSize::Bytes(1), + .keyframe = true}, + // L1T0 + {.timestamp_rtp = 0, + .layer_id = {.spatial_idx = 1, .temporal_idx = 0}, + .frame_size = DataSize::Bytes(2)}}, + // Emulate frame drop (frame_size = 0). + {{.timestamp_rtp = 3000, + .layer_id = {.spatial_idx = 0, .temporal_idx = 0}, + .frame_size = DataSize::Zero()}}, + {// L0T1 + {.timestamp_rtp = 87000, + .layer_id = {.spatial_idx = 0, .temporal_idx = 1}, + .frame_size = DataSize::Bytes(4)}, + // L1T1 + {.timestamp_rtp = 87000, + .layer_id = {.spatial_idx = 1, .temporal_idx = 1}, + .frame_size = DataSize::Bytes(8)}}}); + + Stream stream = stats->Aggregate(test_params.filter); + EXPECT_EQ(stream.keyframe.GetSum(), test_params.expected_keyframe_sum); + EXPECT_EQ(stream.encoded_bitrate_kbps.GetAverage(), + test_params.expected_encoded_bitrate_kbps); + EXPECT_EQ(stream.encoded_framerate_fps.GetAverage(), + test_params.expected_encoded_framerate_fps); + EXPECT_EQ(stream.bitrate_mismatch_pct.GetAverage(), + test_params.expected_bitrate_mismatch_pct); + EXPECT_EQ(stream.framerate_mismatch_pct.GetAverage(), + test_params.expected_framerate_mismatch_pct); +} + +INSTANTIATE_TEST_SUITE_P( + All, + VideoCodecTesterTestAggregation, + Values( + // No filtering. + AggregationTestParameters{ + .filter = {}, + .expected_keyframe_sum = 1, + .expected_encoded_bitrate_kbps = + DataRate::BytesPerSec(15).kbps(), + .expected_encoded_framerate_fps = 2, + .expected_bitrate_mismatch_pct = + 100 * (15.0 / (kBitrate.bytes_per_sec() * 4) - 1), + .expected_framerate_mismatch_pct = 100 * + (2.0 / kFramerate.hertz() - 1)}, + // L0T0 + AggregationTestParameters{ + .filter = {.layer_id = {{.spatial_idx = 0, .temporal_idx = 0}}}, + .expected_keyframe_sum = 1, + .expected_encoded_bitrate_kbps = + DataRate::BytesPerSec(1).kbps(), + .expected_encoded_framerate_fps = 1, + .expected_bitrate_mismatch_pct = + 100 * (1.0 / kBitrate.bytes_per_sec() - 1), + .expected_framerate_mismatch_pct = + 100 * (1.0 / (kFramerate.hertz() / 2) - 1)}, + // L0T1 + AggregationTestParameters{ + .filter = {.layer_id = {{.spatial_idx = 0, .temporal_idx = 1}}}, + .expected_keyframe_sum = 1, + .expected_encoded_bitrate_kbps = + DataRate::BytesPerSec(5).kbps(), + .expected_encoded_framerate_fps = 2, + .expected_bitrate_mismatch_pct = + 100 * (5.0 / (kBitrate.bytes_per_sec() * 2) - 1), + .expected_framerate_mismatch_pct = 100 * + (2.0 / kFramerate.hertz() - 1)}, + // L1T0 + AggregationTestParameters{ + .filter = {.layer_id = {{.spatial_idx = 1, .temporal_idx = 0}}}, + .expected_keyframe_sum = 1, + .expected_encoded_bitrate_kbps = + DataRate::BytesPerSec(3).kbps(), + .expected_encoded_framerate_fps = 1, + .expected_bitrate_mismatch_pct = + 100 * (3.0 / kBitrate.bytes_per_sec() - 1), + .expected_framerate_mismatch_pct = + 100 * (1.0 / (kFramerate.hertz() / 2) - 1)}, + // L1T1 + AggregationTestParameters{ + .filter = {.layer_id = {{.spatial_idx = 1, .temporal_idx = 1}}}, + .expected_keyframe_sum = 1, + .expected_encoded_bitrate_kbps = + DataRate::BytesPerSec(11).kbps(), + .expected_encoded_framerate_fps = 2, + .expected_bitrate_mismatch_pct = + 100 * (11.0 / (kBitrate.bytes_per_sec() * 2) - 1), + .expected_framerate_mismatch_pct = 100 * (2.0 / kFramerate.hertz() - + 1)})); + +TEST_F(VideoCodecTesterTest, Psnr) { + std::unique_ptr stats = RunEncodeDecodeTest( + "VP8", ScalabilityMode::kL1T1, + {{{.timestamp_rtp = 0, .frame_size = DataSize::Bytes(2)}}, + {{.timestamp_rtp = 3000, .frame_size = DataSize::Bytes(6)}}}); + + std::vector slice = stats->Slice(Filter{}, /*merge=*/false); + ASSERT_THAT(slice, SizeIs(2)); + ASSERT_TRUE(slice[0].psnr.has_value()); + ASSERT_TRUE(slice[1].psnr.has_value()); + EXPECT_NEAR(slice[0].psnr->y, 42, 1); + EXPECT_NEAR(slice[0].psnr->u, 38, 1); + EXPECT_NEAR(slice[0].psnr->v, 36, 1); + EXPECT_NEAR(slice[1].psnr->y, 38, 1); + EXPECT_NEAR(slice[1].psnr->u, 36, 1); + EXPECT_NEAR(slice[1].psnr->v, 34, 1); +} + +TEST_F(VideoCodecTesterTest, ReversePlayback) { + std::unique_ptr stats = RunEncodeDecodeTest( + "VP8", ScalabilityMode::kL1T1, + {{{.timestamp_rtp = 0, .frame_size = DataSize::Bytes(1)}}, + {{.timestamp_rtp = 1, .frame_size = DataSize::Bytes(1)}}, + {{.timestamp_rtp = 2, .frame_size = DataSize::Bytes(1)}}, + {{.timestamp_rtp = 3, .frame_size = DataSize::Bytes(1)}}, + {{.timestamp_rtp = 4, .frame_size = DataSize::Bytes(1)}}, + {{.timestamp_rtp = 5, .frame_size = DataSize::Bytes(1)}}}, + /*num_source_frames=*/3); + + std::vector slice = stats->Slice(Filter{}, /*merge=*/false); + ASSERT_THAT(slice, SizeIs(6)); + ASSERT_TRUE(slice[0].psnr.has_value()); + ASSERT_TRUE(slice[1].psnr.has_value()); + ASSERT_TRUE(slice[2].psnr.has_value()); + ASSERT_TRUE(slice[3].psnr.has_value()); + ASSERT_TRUE(slice[4].psnr.has_value()); + ASSERT_TRUE(slice[5].psnr.has_value()); + EXPECT_NEAR(slice[0].psnr->y, 48, 1); + EXPECT_NEAR(slice[1].psnr->y, 42, 1); + EXPECT_NEAR(slice[2].psnr->y, 34, 1); + EXPECT_NEAR(slice[3].psnr->y, 42, 1); + EXPECT_NEAR(slice[4].psnr->y, 48, 1); + EXPECT_NEAR(slice[5].psnr->y, 42, 1); +} + +struct ScalabilityTestParameters { + std::string codec_type; + ScalabilityMode scalability_mode; + // Temporal unit -> spatial layer -> frame size. + std::vector> encoded_frame_sizes; + std::vector expected_decode_frame_sizes; +}; + +class VideoCodecTesterTestScalability + : public VideoCodecTesterTest, + public ::testing::WithParamInterface {}; + +TEST_P(VideoCodecTesterTestScalability, EncodeDecode) { + ScalabilityTestParameters test_params = GetParam(); + std::vector> frames; + for (size_t frame_num = 0; frame_num < test_params.encoded_frame_sizes.size(); + ++frame_num) { + std::vector temporal_unit; + for (auto [sidx, frame_size] : test_params.encoded_frame_sizes[frame_num]) { + temporal_unit.push_back( + Frame{.timestamp_rtp = static_cast(3000 * frame_num), + .layer_id = {.spatial_idx = sidx, .temporal_idx = 0}, + .frame_size = frame_size, + .keyframe = (frame_num == 0 && sidx == 0)}); + } + frames.push_back(temporal_unit); + } + RunEncodeDecodeTest(test_params.codec_type, test_params.scalability_mode, + frames); + + size_t num_spatial_layers = + ScalabilityModeToNumSpatialLayers(test_params.scalability_mode); + EXPECT_EQ(num_spatial_layers, decoders_.size()); + + // Collect input frame sizes from all decoders. + std::vector decode_frame_sizes; + for (const auto& decoder : decoders_) { + const auto& frame_sizes = decoder->frame_sizes(); + decode_frame_sizes.insert(decode_frame_sizes.end(), frame_sizes.begin(), + frame_sizes.end()); + } + EXPECT_THAT(decode_frame_sizes, UnorderedElementsAreArray( + test_params.expected_decode_frame_sizes)); +} + +INSTANTIATE_TEST_SUITE_P( + All, + VideoCodecTesterTestScalability, + Values( + ScalabilityTestParameters{ + .codec_type = "VP8", + .scalability_mode = ScalabilityMode::kS2T1, + .encoded_frame_sizes = {{{0, DataSize::Bytes(1)}, + {1, DataSize::Bytes(2)}}, + {{0, DataSize::Bytes(4)}, + // Emulate frame drop. + {1, DataSize::Bytes(0)}}}, + .expected_decode_frame_sizes = {DataSize::Bytes(1), + DataSize::Bytes(2), + DataSize::Bytes(4)}, + }, + ScalabilityTestParameters{ + .codec_type = "VP9", + .scalability_mode = ScalabilityMode::kL2T1, + .encoded_frame_sizes = + {{{0, DataSize::Bytes(1)}, {1, DataSize::Bytes(2)}}, + {{0, DataSize::Bytes(4)}, {1, DataSize::Bytes(8)}}, + {{0, DataSize::Bytes(16)}, + // Emulate frame drop. + {1, DataSize::Bytes(0)}}}, + .expected_decode_frame_sizes = + {DataSize::Bytes(1), DataSize::Bytes(3), DataSize::Bytes(4), + DataSize::Bytes(12), DataSize::Bytes(16), DataSize::Bytes(16)}, + }, + ScalabilityTestParameters{ + .codec_type = "VP9", + .scalability_mode = ScalabilityMode::kL2T1_KEY, + .encoded_frame_sizes = + {{{0, DataSize::Bytes(1)}, {1, DataSize::Bytes(2)}}, + {{0, DataSize::Bytes(4)}, {1, DataSize::Bytes(8)}}, + {{0, DataSize::Bytes(16)}, + // Emulate frame drop. + {1, DataSize::Bytes(0)}}}, + .expected_decode_frame_sizes = + {DataSize::Bytes(1), DataSize::Bytes(3), DataSize::Bytes(4), + DataSize::Bytes(8), DataSize::Bytes(16)}, + }, + ScalabilityTestParameters{ + .codec_type = "VP9", + .scalability_mode = ScalabilityMode::kS2T1, + .encoded_frame_sizes = + {{{0, DataSize::Bytes(1)}, {1, DataSize::Bytes(2)}}, + {{0, DataSize::Bytes(4)}, {1, DataSize::Bytes(8)}}, + {{0, DataSize::Bytes(16)}, + // Emulate frame drop. + {1, DataSize::Bytes(0)}}}, + .expected_decode_frame_sizes = + {DataSize::Bytes(1), DataSize::Bytes(2), DataSize::Bytes(4), + DataSize::Bytes(8), DataSize::Bytes(16)}, + })); + +class VideoCodecTesterTestPacing + : public ::testing::TestWithParam> { + public: + const int kSourceWidth = 2; + const int kSourceHeight = 2; + const int kNumFrames = 3; + const Frequency kFramerate = Frequency::Hertz(10); + + void SetUp() override { + source_yuv_file_path_ = CreateYuvFile(kSourceWidth, kSourceHeight, 1); + } + + void TearDown() override { remove(source_yuv_file_path_.c_str()); } + + protected: + const Environment env_ = CreateEnvironment(); + std::string source_yuv_file_path_; +}; + +TEST_P(VideoCodecTesterTestPacing, PaceEncode) { + auto [pacing_settings, expected_delta_ms] = GetParam(); + const Environment env = CreateEnvironment(); + VideoSourceSettings video_source{ + .file_path = source_yuv_file_path_, + .resolution = {.width = kSourceWidth, .height = kSourceHeight}, + .framerate = kFramerate}; + + NiceMock encoder_factory; + ON_CALL(encoder_factory, Create).WillByDefault(WithoutArgs([] { + return std::make_unique>(); + })); + + EncodingSettings encoding_settings = VideoCodecTester::CreateEncodingSettings( + env, "VP8", "L1T1", kSourceWidth, kSourceHeight, {kBitrate}, kFramerate); + std::map frame_settings = + VideoCodecTester::CreateFrameSettings(encoding_settings, kNumFrames); + + EncoderSettings encoder_settings; + encoder_settings.pacing_settings = pacing_settings; + std::vector frames = + VideoCodecTester::RunEncodeTest(env, video_source, &encoder_factory, + encoder_settings, frame_settings) + ->Slice(/*filter=*/{}, /*merge=*/false); + ASSERT_THAT(frames, SizeIs(kNumFrames)); + EXPECT_NEAR((frames[1].encode_start - frames[0].encode_start).ms(), + expected_delta_ms, 10); + EXPECT_NEAR((frames[2].encode_start - frames[1].encode_start).ms(), + expected_delta_ms, 10); +} + +TEST_P(VideoCodecTesterTestPacing, PaceDecode) { + auto [pacing_settings, expected_delta_ms] = GetParam(); + MockCodedVideoSource video_source(kNumFrames, kFramerate); + + NiceMock decoder_factory; + ON_CALL(decoder_factory, Create).WillByDefault(WithoutArgs([] { + return std::make_unique>(); + })); + + DecoderSettings decoder_settings; + decoder_settings.pacing_settings = pacing_settings; + std::vector frames = + VideoCodecTester::RunDecodeTest(env_, &video_source, &decoder_factory, + decoder_settings, SdpVideoFormat::VP8()) + ->Slice(/*filter=*/{}, /*merge=*/false); + ASSERT_THAT(frames, SizeIs(kNumFrames)); + EXPECT_NEAR((frames[1].decode_start - frames[0].decode_start).ms(), + expected_delta_ms, 10); + EXPECT_NEAR((frames[2].decode_start - frames[1].decode_start).ms(), + expected_delta_ms, 10); +} + +INSTANTIATE_TEST_SUITE_P( + DISABLED_All, + VideoCodecTesterTestPacing, + Values( + // No pacing. + std::make_tuple(PacingSettings{.mode = PacingMode::kNoPacing}, + /*expected_delta_ms=*/0), + // Real-time pacing. + std::make_tuple(PacingSettings{.mode = PacingMode::kRealTime}, + /*expected_delta_ms=*/100), + // Pace with specified constant rate. + std::make_tuple(PacingSettings{.mode = PacingMode::kConstantRate, + .constant_rate = Frequency::Hertz(20)}, + /*expected_delta_ms=*/50))); + +struct EncodingSettingsTestParameters { + std::string codec_type; + std::string scalability_mode; + std::vector bitrate; + std::vector expected_bitrate; +}; + +class VideoCodecTesterTestEncodingSettings + : public ::testing::TestWithParam {}; + +TEST_P(VideoCodecTesterTestEncodingSettings, CreateEncodingSettings) { + EncodingSettingsTestParameters test_params = GetParam(); + EncodingSettings encoding_settings = VideoCodecTester::CreateEncodingSettings( + CreateEnvironment(), test_params.codec_type, test_params.scalability_mode, + /*width=*/1280, + /*height=*/720, test_params.bitrate, kFramerate); + const std::map& layers_settings = + encoding_settings.layers_settings; + std::vector configured_bitrate; + std::transform( + layers_settings.begin(), layers_settings.end(), + std::back_inserter(configured_bitrate), + [](const auto& layer_settings) { return layer_settings.second.bitrate; }); + EXPECT_EQ(configured_bitrate, test_params.expected_bitrate); +} + +INSTANTIATE_TEST_SUITE_P( + Vp8, + VideoCodecTesterTestEncodingSettings, + Values( + EncodingSettingsTestParameters{ + .codec_type = "VP8", + .scalability_mode = "L1T1", + .bitrate = {DataRate::KilobitsPerSec(1)}, + .expected_bitrate = {DataRate::KilobitsPerSec(1)}}, + EncodingSettingsTestParameters{ + .codec_type = "VP8", + .scalability_mode = "L1T1", + .bitrate = {DataRate::KilobitsPerSec(10000)}, + .expected_bitrate = {DataRate::KilobitsPerSec(10000)}}, + EncodingSettingsTestParameters{ + .codec_type = "VP8", + .scalability_mode = "L1T3", + .bitrate = {DataRate::KilobitsPerSec(1000)}, + .expected_bitrate = {DataRate::KilobitsPerSec(400), + DataRate::KilobitsPerSec(200), + DataRate::KilobitsPerSec(400)}}, + EncodingSettingsTestParameters{ + .codec_type = "VP8", + .scalability_mode = "S3T3", + .bitrate = {DataRate::KilobitsPerSec(100)}, + .expected_bitrate = + {DataRate::KilobitsPerSec(40), DataRate::KilobitsPerSec(20), + DataRate::KilobitsPerSec(40), DataRate::KilobitsPerSec(0), + DataRate::KilobitsPerSec(0), DataRate::KilobitsPerSec(0), + DataRate::KilobitsPerSec(0), DataRate::KilobitsPerSec(0), + DataRate::KilobitsPerSec(0)}}, + EncodingSettingsTestParameters{ + .codec_type = "VP8", + .scalability_mode = "S3T3", + .bitrate = {DataRate::KilobitsPerSec(10000)}, + .expected_bitrate = + {DataRate::KilobitsPerSec(60), DataRate::KilobitsPerSec(30), + DataRate::KilobitsPerSec(60), DataRate::KilobitsPerSec(200), + DataRate::KilobitsPerSec(100), DataRate::KilobitsPerSec(200), + DataRate::KilobitsPerSec(1000), DataRate::KilobitsPerSec(500), + DataRate::KilobitsPerSec(1000)}}, + EncodingSettingsTestParameters{ + .codec_type = "VP8", + .scalability_mode = "S3T3", + .bitrate = + {DataRate::KilobitsPerSec(100), DataRate::KilobitsPerSec(200), + DataRate::KilobitsPerSec(300), DataRate::KilobitsPerSec(400), + DataRate::KilobitsPerSec(500), DataRate::KilobitsPerSec(600), + DataRate::KilobitsPerSec(700), DataRate::KilobitsPerSec(800), + DataRate::KilobitsPerSec(900)}, + .expected_bitrate = { + DataRate::KilobitsPerSec(100), DataRate::KilobitsPerSec(200), + DataRate::KilobitsPerSec(300), DataRate::KilobitsPerSec(400), + DataRate::KilobitsPerSec(500), DataRate::KilobitsPerSec(600), + DataRate::KilobitsPerSec(700), DataRate::KilobitsPerSec(800), + DataRate::KilobitsPerSec(900)}})); + +INSTANTIATE_TEST_SUITE_P( + Vp9, + VideoCodecTesterTestEncodingSettings, + Values( + EncodingSettingsTestParameters{ + .codec_type = "VP9", + .scalability_mode = "L1T1", + .bitrate = {DataRate::KilobitsPerSec(1)}, + .expected_bitrate = {DataRate::KilobitsPerSec(1)}}, + EncodingSettingsTestParameters{ + .codec_type = "VP9", + .scalability_mode = "L1T1", + .bitrate = {DataRate::KilobitsPerSec(10000)}, + .expected_bitrate = {DataRate::KilobitsPerSec(10000)}}, + EncodingSettingsTestParameters{ + .codec_type = "VP9", + .scalability_mode = "L1T3", + .bitrate = {DataRate::KilobitsPerSec(1000)}, + .expected_bitrate = {DataRate::BitsPerSec(539811), + DataRate::BitsPerSec(163293), + DataRate::BitsPerSec(296896)}}, + EncodingSettingsTestParameters{ + .codec_type = "VP9", + .scalability_mode = "L3T3", + .bitrate = {DataRate::KilobitsPerSec(100)}, + .expected_bitrate = + {DataRate::BitsPerSec(53981), DataRate::BitsPerSec(16329), + DataRate::BitsPerSec(29690), DataRate::BitsPerSec(0), + DataRate::BitsPerSec(0), DataRate::BitsPerSec(0), + DataRate::BitsPerSec(0), DataRate::BitsPerSec(0), + DataRate::BitsPerSec(0)}}, + EncodingSettingsTestParameters{ + .codec_type = "VP9", + .scalability_mode = "L3T3", + .bitrate = {DataRate::KilobitsPerSec(10000)}, + .expected_bitrate = + {DataRate::BitsPerSec(76653), DataRate::BitsPerSec(23188), + DataRate::BitsPerSec(42159), DataRate::BitsPerSec(225641), + DataRate::BitsPerSec(68256), DataRate::BitsPerSec(124103), + DataRate::BitsPerSec(822672), DataRate::BitsPerSec(248858), + DataRate::BitsPerSec(452470)}}, + EncodingSettingsTestParameters{ + .codec_type = "VP9", + .scalability_mode = "L3T3", + .bitrate = + {DataRate::KilobitsPerSec(100), DataRate::KilobitsPerSec(200), + DataRate::KilobitsPerSec(300), DataRate::KilobitsPerSec(400), + DataRate::KilobitsPerSec(500), DataRate::KilobitsPerSec(600), + DataRate::KilobitsPerSec(700), DataRate::KilobitsPerSec(800), + DataRate::KilobitsPerSec(900)}, + .expected_bitrate = { + DataRate::KilobitsPerSec(100), DataRate::KilobitsPerSec(200), + DataRate::KilobitsPerSec(300), DataRate::KilobitsPerSec(400), + DataRate::KilobitsPerSec(500), DataRate::KilobitsPerSec(600), + DataRate::KilobitsPerSec(700), DataRate::KilobitsPerSec(800), + DataRate::KilobitsPerSec(900)}})); + +INSTANTIATE_TEST_SUITE_P( + Av1, + VideoCodecTesterTestEncodingSettings, + Values( + EncodingSettingsTestParameters{ + .codec_type = "AV1", + .scalability_mode = "L1T1", + .bitrate = {DataRate::KilobitsPerSec(1)}, + .expected_bitrate = {DataRate::KilobitsPerSec(1)}}, + EncodingSettingsTestParameters{ + .codec_type = "AV1", + .scalability_mode = "L1T1", + .bitrate = {DataRate::KilobitsPerSec(10000)}, + .expected_bitrate = {DataRate::KilobitsPerSec(10000)}}, + EncodingSettingsTestParameters{ + .codec_type = "AV1", + .scalability_mode = "L1T3", + .bitrate = {DataRate::KilobitsPerSec(1000)}, + .expected_bitrate = {DataRate::BitsPerSec(539811), + DataRate::BitsPerSec(163293), + DataRate::BitsPerSec(296896)}}, + EncodingSettingsTestParameters{ + .codec_type = "AV1", + .scalability_mode = "L3T3", + .bitrate = {DataRate::KilobitsPerSec(100)}, + .expected_bitrate = + {DataRate::BitsPerSec(53981), DataRate::BitsPerSec(16329), + DataRate::BitsPerSec(29690), DataRate::BitsPerSec(0), + DataRate::BitsPerSec(0), DataRate::BitsPerSec(0), + DataRate::BitsPerSec(0), DataRate::BitsPerSec(0), + DataRate::BitsPerSec(0)}}, + EncodingSettingsTestParameters{ + .codec_type = "AV1", + .scalability_mode = "L3T3", + .bitrate = {DataRate::KilobitsPerSec(10000)}, + .expected_bitrate = + {DataRate::BitsPerSec(76653), DataRate::BitsPerSec(23188), + DataRate::BitsPerSec(42159), DataRate::BitsPerSec(225641), + DataRate::BitsPerSec(68256), DataRate::BitsPerSec(124103), + DataRate::BitsPerSec(822672), DataRate::BitsPerSec(248858), + DataRate::BitsPerSec(452470)}}, + EncodingSettingsTestParameters{ + .codec_type = "AV1", + .scalability_mode = "L3T3", + .bitrate = + {DataRate::KilobitsPerSec(100), DataRate::KilobitsPerSec(200), + DataRate::KilobitsPerSec(300), DataRate::KilobitsPerSec(400), + DataRate::KilobitsPerSec(500), DataRate::KilobitsPerSec(600), + DataRate::KilobitsPerSec(700), DataRate::KilobitsPerSec(800), + DataRate::KilobitsPerSec(900)}, + .expected_bitrate = { + DataRate::KilobitsPerSec(100), DataRate::KilobitsPerSec(200), + DataRate::KilobitsPerSec(300), DataRate::KilobitsPerSec(400), + DataRate::KilobitsPerSec(500), DataRate::KilobitsPerSec(600), + DataRate::KilobitsPerSec(700), DataRate::KilobitsPerSec(800), + DataRate::KilobitsPerSec(900)}})); + +// TODO(webrtc:42225151): Add an IVF test stream and enable the test. +TEST(VideoCodecTester, DISABLED_CompressedVideoSource) { + const Environment env = CreateEnvironment(); + std::unique_ptr encoder_factory = + CreateBuiltinVideoEncoderFactory(); + std::unique_ptr decoder_factory = + CreateBuiltinVideoDecoderFactory(); + + VideoSourceSettings source_settings{ + .file_path = ".ivf", + .resolution = {.width = 320, .height = 180}, + .framerate = Frequency::Hertz(30)}; + + EncodingSettings encoding_settings = VideoCodecTester::CreateEncodingSettings( + env, "AV1", "L1T1", 320, 180, {DataRate::KilobitsPerSec(128)}, + Frequency::Hertz(30)); + + std::map frame_settings = + VideoCodecTester::CreateFrameSettings(encoding_settings, 3); + + std::unique_ptr stats = + VideoCodecTester::RunEncodeDecodeTest( + env, source_settings, encoder_factory.get(), decoder_factory.get(), + EncoderSettings{}, DecoderSettings{}, frame_settings); + + std::vector slice = stats->Slice(Filter{}, /*merge=*/false); + ASSERT_THAT(slice, SizeIs(3)); + ASSERT_TRUE(slice[0].psnr.has_value()); + ASSERT_TRUE(slice[1].psnr.has_value()); + ASSERT_TRUE(slice[2].psnr.has_value()); + EXPECT_NEAR(slice[0].psnr->y, 42, 1); + EXPECT_NEAR(slice[1].psnr->y, 38, 1); + EXPECT_NEAR(slice[1].psnr->v, 38, 1); +} + +} // namespace test +} // namespace webrtc diff --git a/test/video_decoder_proxy_factory.h b/test/video_decoder_proxy_factory.h index f2b318eadf..2a22f66f2d 100644 --- a/test/video_decoder_proxy_factory.h +++ b/test/video_decoder_proxy_factory.h @@ -34,8 +34,8 @@ class VideoDecoderProxyFactory final : public VideoDecoderFactory { return {}; } - std::unique_ptr CreateVideoDecoder( - const SdpVideoFormat& format) override { + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override { return std::make_unique(decoder_); } diff --git a/test/video_encoder_nullable_proxy_factory.h b/test/video_encoder_nullable_proxy_factory.h index da81fff343..d27afb4caf 100644 --- a/test/video_encoder_nullable_proxy_factory.h +++ b/test/video_encoder_nullable_proxy_factory.h @@ -14,6 +14,7 @@ #include #include +#include "api/environment/environment.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" #include "test/video_encoder_proxy_factory.h" @@ -30,12 +31,12 @@ class VideoEncoderNullableProxyFactory final : public VideoEncoderProxyFactory { ~VideoEncoderNullableProxyFactory() override = default; - std::unique_ptr CreateVideoEncoder( - const SdpVideoFormat& format) override { + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override { if (!encoder_) { return nullptr; } - return VideoEncoderProxyFactory::CreateVideoEncoder(format); + return VideoEncoderProxyFactory::Create(env, format); } }; diff --git a/test/video_encoder_proxy_factory.h b/test/video_encoder_proxy_factory.h index 99796c0054..5560fb8918 100644 --- a/test/video_encoder_proxy_factory.h +++ b/test/video_encoder_proxy_factory.h @@ -14,6 +14,7 @@ #include #include +#include "api/environment/environment.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_factory.h" @@ -45,8 +46,8 @@ class VideoEncoderProxyFactory : public VideoEncoderFactory { return {}; } - std::unique_ptr CreateVideoEncoder( - const SdpVideoFormat& format) override { + std::unique_ptr Create(const Environment& env, + const SdpVideoFormat& format) override { ++num_simultaneous_encoder_instances_; max_num_simultaneous_encoder_instances_ = std::max(max_num_simultaneous_encoder_instances_, @@ -126,17 +127,17 @@ class VideoEncoderProxyFactory : public VideoEncoderFactory { encoder_selector_->OnCurrentEncoder(format); } - absl::optional OnAvailableBitrate( + std::optional OnAvailableBitrate( const DataRate& rate) override { return encoder_selector_->OnAvailableBitrate(rate); } - absl::optional OnResolutionChange( + std::optional OnResolutionChange( const RenderResolution& resolution) override { return encoder_selector_->OnResolutionChange(resolution); } - absl::optional OnEncoderBroken() override { + std::optional OnEncoderBroken() override { return encoder_selector_->OnEncoderBroken(); } diff --git a/test/video_renderer.h b/test/video_renderer.h index 9e580f6f59..378afed160 100644 --- a/test/video_renderer.h +++ b/test/video_renderer.h @@ -18,7 +18,7 @@ namespace webrtc { class VideoFrame; namespace test { -class VideoRenderer : public rtc::VideoSinkInterface { +class VideoRenderer : public VideoSinkInterface { public: // Creates a platform-specific renderer if possible, or a null implementation // if failing. diff --git a/test/video_test_constants.h b/test/video_test_constants.h index 732d4f0056..b9083987ed 100644 --- a/test/video_test_constants.h +++ b/test/video_test_constants.h @@ -31,6 +31,7 @@ class VideoTestConstants { kRtxRedPayloadType = 99, kVideoSendPayloadType = 100, kAudioSendPayloadType = 103, + kPayloadTypeH265 = 117, kRedPayloadType = 118, kUlpfecPayloadType = 119, kFlexfecPayloadType = 120, diff --git a/test/wait_until.cc b/test/wait_until.cc new file mode 100644 index 0000000000..b92f32cc5c --- /dev/null +++ b/test/wait_until.cc @@ -0,0 +1,53 @@ +/* + * Copyright 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/wait_until.h" + +#include + +#include "absl/functional/overload.h" +#include "api/test/time_controller.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/thread.h" +#include "rtc_base/time_utils.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { +namespace wait_until_internal { + +Timestamp GetTimeFromClockVariant(const ClockVariant& clock) { + return std::visit( + absl::Overload{ + [](const std::monostate&) { return Timestamp::Micros(TimeMicros()); }, + [](SimulatedClock* clock) { return clock->CurrentTime(); }, + [](TimeController* time_controller) { + return time_controller->GetClock()->CurrentTime(); + }, + [](auto* clock) { + return Timestamp::Micros(clock->TimeNanos() / 1000); + }, + }, + clock); +} + +void AdvanceTimeOnClockVariant(ClockVariant& clock, TimeDelta delta) { + std::visit(absl::Overload{ + [&](const std::monostate&) { + Thread::Current()->ProcessMessages(0); + Thread::Current()->SleepMs(delta.ms()); + }, + [&](auto* clock) { clock->AdvanceTime(delta); }, + }, + clock); +} + +} // namespace wait_until_internal +} // namespace webrtc diff --git a/test/wait_until.h b/test/wait_until.h new file mode 100644 index 0000000000..1d45fdcdc9 --- /dev/null +++ b/test/wait_until.h @@ -0,0 +1,100 @@ +/* + * Copyright 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_WAIT_UNTIL_H_ +#define TEST_WAIT_UNTIL_H_ + +#include +#include + +#include "api/rtc_error.h" +#include "api/test/time_controller.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/checks.h" +#include "rtc_base/fake_clock.h" +#include "rtc_base/thread.h" +#include "system_wrappers/include/clock.h" +#include "test/gmock.h" +#include "test/wait_until_internal.h" // IWYU pragma: private + +namespace webrtc { + +using ClockVariant = std::variant; + +namespace wait_until_internal { +Timestamp GetTimeFromClockVariant(const ClockVariant& clock); +void AdvanceTimeOnClockVariant(ClockVariant& clock, TimeDelta delta); +} // namespace wait_until_internal + +struct WaitUntilSettings { + // The maximum time to wait for the condition to be met. + TimeDelta timeout = TimeDelta::Seconds(5); + // The interval between polling the condition. + TimeDelta polling_interval = TimeDelta::Millis(1); + // The clock to use for timing. + ClockVariant clock = std::monostate(); + // Name of the result to be used in the error message. + std::string result_name = "result"; +}; + +// Runs a function `fn`, which returns a result, until `matcher` matches the +// result. +// +// The function is called repeatedly until the result matches the matcher or the +// timeout is reached. If the matcher matches the result, the result is +// returned. Otherwise, an error is returned. +// +// Example: +// +// int counter = 0; +// RTCErrorOr result = Waituntil([&] { return ++counter; }, Eq(3)) +// EXPECT_THAT(result, IsOkAndHolds(3)); +template +[[nodiscard]] auto WaitUntil(const Fn& fn, + Matcher matcher, + WaitUntilSettings settings = {}) + -> RTCErrorOr { + if (std::holds_alternative(settings.clock)) { + RTC_CHECK(Thread::Current()) << "A current thread is required. An " + "webrtc::AutoThread can work for tests."; + } + + Timestamp start = + wait_until_internal::GetTimeFromClockVariant(settings.clock); + do { + auto result = fn(); + if (::testing::Value(result, matcher)) { + return result; + } + wait_until_internal::AdvanceTimeOnClockVariant(settings.clock, + settings.polling_interval); + } while (wait_until_internal::GetTimeFromClockVariant(settings.clock) < + start + settings.timeout); + + // One more try after the last sleep. This failure will contain the error + // message. + auto result = fn(); + ::testing::StringMatchResultListener listener; + if (wait_until_internal::ExplainMatchResult(matcher, result, &listener, + settings.result_name)) { + return result; + } + + return RTCError(RTCErrorType::INTERNAL_ERROR, listener.str()); +} + +} // namespace webrtc + +#endif // TEST_WAIT_UNTIL_H_ diff --git a/test/wait_until_internal.h b/test/wait_until_internal.h new file mode 100644 index 0000000000..ce1f7095b6 --- /dev/null +++ b/test/wait_until_internal.h @@ -0,0 +1,59 @@ +/* + * Copyright 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef TEST_WAIT_UNTIL_INTERNAL_H_ +#define TEST_WAIT_UNTIL_INTERNAL_H_ + +#include + +#include "absl/base/nullability.h" +#include "absl/strings/string_view.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace wait_until_internal { + +// Explains the match result of `matcher` against `value` to `listener`. +// `value_name` is the name of the value to be used in the error message. +// This is inspired by testing::ExplainMatchResult and +// testing::internal::MatchPrintAndExplain. +template +bool ExplainMatchResult(const M& matcher, + const T& value, + ::testing::StringMatchResultListener* absl_nonnull + listener, + absl::string_view value_name) { + // SafeMatcherCast is required for matchers whose type does not match the + // argument type. + ::testing::Matcher safe_matcher = + ::testing::SafeMatcherCast(matcher); + + auto* ss = listener->stream(); + *ss << "Value of: " << value_name << "\n"; + *ss << "Expected: "; + safe_matcher.DescribeTo(ss); + *ss << "\nActual: "; + ::testing::StringMatchResultListener inner_listener; + if (::testing::ExplainMatchResult(safe_matcher, value, &inner_listener)) { + return true; + } + *ss << ::testing::PrintToString(value); + if (const std::string& inner_message = inner_listener.str(); + !inner_message.empty()) { + *ss << ", " << inner_message; + } + return false; +} + +} // namespace wait_until_internal +} // namespace webrtc + +#endif // TEST_WAIT_UNTIL_INTERNAL_H_ diff --git a/test/wait_until_unittest.cc b/test/wait_until_unittest.cc new file mode 100644 index 0000000000..6eff548b33 --- /dev/null +++ b/test/wait_until_unittest.cc @@ -0,0 +1,129 @@ +/* + * Copyright 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "test/wait_until.h" + +#include + +#include "api/rtc_error.h" +#include "api/test/create_time_controller.h" +#include "api/test/rtc_error_matchers.h" +#include "api/test/time_controller.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/fake_clock.h" +#include "rtc_base/thread.h" +#include "system_wrappers/include/clock.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using testing::_; +using testing::AllOf; +using testing::Eq; +using testing::Ge; +using testing::Gt; +using testing::Lt; +using testing::MatchesRegex; + +TEST(WaitUntilTest, ReturnsWhenConditionIsMet) { + AutoThread thread; + + int counter = 0; + RTCErrorOr result = WaitUntil([&] { return ++counter; }, Eq(3)); + EXPECT_THAT(result, IsRtcOkAndHolds(3)); +} + +TEST(WaitUntilTest, ReturnsErrorWhenTimeoutIsReached) { + AutoThread thread; + int counter = 0; + RTCErrorOr result = + WaitUntil([&] { return --counter; }, Eq(1), + {.timeout = TimeDelta::Millis(10), .result_name = "counter"}); + // Only returns the last error. Note we only are checking that the error + // message ends with a negative number rather than a specific number to avoid + // flakiness. + EXPECT_THAT( + result, + IsRtcErrorOrWithMessage( + _, MatchesRegex( + "Value of: counter\nExpected: is equal to 1\nActual: -\\d+"))); +} + +TEST(WaitUntilTest, ErrorContainsMatcherExplanation) { + AutoThread thread; + int counter = 0; + auto matcher = AllOf(Gt(0), Lt(10)); + RTCErrorOr result = + WaitUntil([&] { return --counter; }, matcher, + {.timeout = TimeDelta::Millis(10), .result_name = "counter"}); + // Only returns the last error. Note we only are checking that the error + // message ends with a negative number rather than a specific number to avoid + // flakiness. + EXPECT_THAT( + result, + IsRtcErrorOrWithMessage( + _, MatchesRegex("Value of: counter\nExpected: \\(is > 0\\) and " + "\\(is < 10\\)\nActual: -\\d+, which doesn't match " + "\\(is > 0\\)"))); +} + +TEST(WaitUntilTest, ReturnsWhenConditionIsMetWithSimulatedClock) { + SimulatedClock fake_clock(Timestamp::Millis(1337)); + + int counter = 0; + RTCErrorOr result = + WaitUntil([&] { return ++counter; }, Eq(3), {.clock = &fake_clock}); + EXPECT_THAT(result, IsRtcOkAndHolds(3)); + // The fake clock should have advanced at least 2ms. + EXPECT_THAT(fake_clock.CurrentTime(), Ge(Timestamp::Millis(1339))); +} + +TEST(WaitUntilTest, ReturnsWhenConditionIsMetWithThreadProcessingFakeClock) { + ScopedFakeClock fake_clock; + + int counter = 0; + RTCErrorOr result = + WaitUntil([&] { return ++counter; }, Eq(3), {.clock = &fake_clock}); + EXPECT_THAT(result, IsRtcOkAndHolds(3)); + // The fake clock should have advanced at least 2ms. + EXPECT_THAT(Timestamp::Micros(fake_clock.TimeNanos() * 1000), + Ge(Timestamp::Millis(1339))); +} + +TEST(WaitUntilTest, ReturnsWhenConditionIsMetWithFakeClock) { + FakeClock fake_clock; + + int counter = 0; + RTCErrorOr result = + WaitUntil([&] { return ++counter; }, Eq(3), {.clock = &fake_clock}); + EXPECT_THAT(result, IsRtcOkAndHolds(3)); + // The fake clock should have advanced at least 2ms. + EXPECT_THAT(Timestamp::Micros(fake_clock.TimeNanos() * 1000), + Ge(Timestamp::Millis(1339))); +} + +TEST(WaitUntilTest, ReturnsWhenConditionIsMetWithSimulatedTimeController) { + std::unique_ptr time_controller = + CreateSimulatedTimeController(); + + int counter = 0; + RTCErrorOr result = WaitUntil([&] { return ++counter; }, Eq(3), + {.clock = time_controller.get()}); + EXPECT_THAT(result, IsRtcOkAndHolds(3)); + // The fake clock should have advanced at least 2ms. + EXPECT_THAT(time_controller->GetClock()->CurrentTime(), + Ge(Timestamp::Millis(1339))); +} + +} // namespace +} // namespace webrtc diff --git a/test/win/d3d_renderer.h b/test/win/d3d_renderer.h index 9e5e23c328..fd98bde7ec 100644 --- a/test/win/d3d_renderer.h +++ b/test/win/d3d_renderer.h @@ -43,11 +43,11 @@ class D3dRenderer : public VideoRenderer { size_t width_, height_; HWND hwnd_; - rtc::scoped_refptr d3d_; - rtc::scoped_refptr d3d_device_; + webrtc::scoped_refptr d3d_; + webrtc::scoped_refptr d3d_device_; - rtc::scoped_refptr texture_; - rtc::scoped_refptr vertex_buffer_; + webrtc::scoped_refptr texture_; + webrtc::scoped_refptr vertex_buffer_; }; } // namespace test } // namespace webrtc diff --git a/tools_webrtc/OWNERS b/tools_webrtc/OWNERS index f73dc520b8..90ac9c9cc2 100644 --- a/tools_webrtc/OWNERS +++ b/tools_webrtc/OWNERS @@ -1,5 +1,4 @@ mbonadei@webrtc.org jansson@webrtc.org terelius@webrtc.org -landrey@webrtc.org jleconte@webrtc.org diff --git a/tools_webrtc/android/build_aar.py b/tools_webrtc/android/build_aar.py index d910b39a7c..d3da72694c 100755 --- a/tools_webrtc/android/build_aar.py +++ b/tools_webrtc/android/build_aar.py @@ -51,219 +51,216 @@ def _ParseArgs(): - parser = argparse.ArgumentParser(description='libwebrtc.aar generator.') - parser.add_argument( - '--build-dir', - type=os.path.abspath, - help='Build dir. By default will create and use temporary dir.') - parser.add_argument('--output', - default='libwebrtc.aar', - type=os.path.abspath, - help='Output file of the script.') - parser.add_argument('--arch', - default=DEFAULT_ARCHS, - nargs='*', - help='Architectures to build. Defaults to %(default)s.') - parser.add_argument('--use-goma', - action='store_true', - default=False, - help='Use goma.') - parser.add_argument('--use-remoteexec', - action='store_true', - default=False, - help='Use RBE.') - parser.add_argument('--use-unstripped-libs', - action='store_true', - default=False, - help='Use unstripped .so files within libwebrtc.aar') - parser.add_argument('--verbose', - action='store_true', - default=False, - help='Debug logging.') - parser.add_argument( - '--extra-gn-args', - default=[], - nargs='*', - help="""Additional GN arguments to be used during Ninja generation. + parser = argparse.ArgumentParser(description='libwebrtc.aar generator.') + parser.add_argument( + '--build-dir', + type=os.path.abspath, + help='Build dir. By default will create and use temporary dir.') + parser.add_argument('--output', + default='libwebrtc.aar', + type=os.path.abspath, + help='Output file of the script.') + parser.add_argument( + '--arch', + default=DEFAULT_ARCHS, + nargs='*', + help='Architectures to build. Defaults to %(default)s.') + parser.add_argument('--use-remoteexec', + action='store_true', + default=False, + help='Use RBE.') + parser.add_argument('--use-unstripped-libs', + action='store_true', + default=False, + help='Use unstripped .so files within libwebrtc.aar') + parser.add_argument('--verbose', + action='store_true', + default=False, + help='Debug logging.') + parser.add_argument( + '--extra-gn-args', + default=[], + nargs='*', + help="""Additional GN arguments to be used during Ninja generation. These are passed to gn inside `--args` switch and applied after any other arguments and will override any values defined by the script. Example of building debug aar file: build_aar.py --extra-gn-args='is_debug=true'""") - parser.add_argument( - '--extra-ninja-switches', - default=[], - nargs='*', - help="""Additional Ninja switches to be used during compilation. + parser.add_argument( + '--extra-ninja-switches', + default=[], + nargs='*', + help="""Additional Ninja switches to be used during compilation. These are applied after any other Ninja switches. Example of enabling verbose Ninja output: build_aar.py --extra-ninja-switches='-v'""") - parser.add_argument( - '--extra-gn-switches', - default=[], - nargs='*', - help="""Additional GN switches to be used during compilation. + parser.add_argument( + '--extra-gn-switches', + default=[], + nargs='*', + help="""Additional GN switches to be used during compilation. These are applied after any other GN switches. Example of enabling verbose GN output: build_aar.py --extra-gn-switches='-v'""") - return parser.parse_args() + return parser.parse_args() def _RunGN(args): - cmd = [ - sys.executable, - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py') - ] - cmd.extend(args) - logging.debug('Running: %r', cmd) - subprocess.check_call(cmd) + cmd = [ + sys.executable, + os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py') + ] + cmd.extend(args) + logging.debug('Running: %r', cmd) + subprocess.check_call(cmd) def _RunNinja(output_directory, args): - cmd = [ - os.path.join(SRC_DIR, 'third_party', 'ninja', 'ninja'), '-C', - output_directory - ] - cmd.extend(args) - logging.debug('Running: %r', cmd) - subprocess.check_call(cmd) + cmd = [ + os.path.join(SRC_DIR, 'third_party', 'ninja', 'ninja'), '-C', + output_directory + ] + cmd.extend(args) + logging.debug('Running: %r', cmd) + subprocess.check_call(cmd) def _EncodeForGN(value): - """Encodes value as a GN literal.""" - if isinstance(value, str): - return '"' + value + '"' - if isinstance(value, bool): - return repr(value).lower() - return repr(value) + """Encodes value as a GN literal.""" + if isinstance(value, str): + return '"' + value + '"' + if isinstance(value, bool): + return repr(value).lower() + return repr(value) def _GetOutputDirectory(build_dir, arch): - """Returns the GN output directory for the target architecture.""" - return os.path.join(build_dir, arch) + """Returns the GN output directory for the target architecture.""" + return os.path.join(build_dir, arch) def _GetTargetCpu(arch): - """Returns target_cpu for the GN build with the given architecture.""" - if arch in ['armeabi', 'armeabi-v7a']: - return 'arm' - if arch == 'arm64-v8a': - return 'arm64' - if arch == 'x86': - return 'x86' - if arch == 'x86_64': - return 'x64' - raise Exception('Unknown arch: ' + arch) + """Returns target_cpu for the GN build with the given architecture.""" + if arch in ['armeabi', 'armeabi-v7a']: + return 'arm' + if arch == 'arm64-v8a': + return 'arm64' + if arch == 'x86': + return 'x86' + if arch == 'x86_64': + return 'x64' + raise Exception('Unknown arch: ' + arch) def _GetArmVersion(arch): - """Returns arm_version for the GN build with the given architecture.""" - if arch == 'armeabi': - return 6 - if arch == 'armeabi-v7a': - return 7 - if arch in ['arm64-v8a', 'x86', 'x86_64']: - return None - raise Exception('Unknown arch: ' + arch) - - -def Build(build_dir, arch, use_goma, use_remoteexec, extra_gn_args, - extra_gn_switches, extra_ninja_switches): - """Generates target architecture using GN and builds it using ninja.""" - logging.info('Building: %s', arch) - output_directory = _GetOutputDirectory(build_dir, arch) - gn_args = { - 'target_os': 'android', - 'is_debug': False, - 'is_component_build': False, - 'rtc_include_tests': False, - 'target_cpu': _GetTargetCpu(arch), - 'use_goma': use_goma, - 'use_remoteexec': use_remoteexec, - } - arm_version = _GetArmVersion(arch) - if arm_version: - gn_args['arm_version'] = arm_version - gn_args_str = '--args=' + ' '.join( - [k + '=' + _EncodeForGN(v) for k, v in gn_args.items()] + extra_gn_args) - - gn_args_list = ['gen', output_directory, gn_args_str] - gn_args_list.extend(extra_gn_switches) - _RunGN(gn_args_list) - - ninja_args = TARGETS[:] - if use_goma or use_remoteexec: - ninja_args.extend(['-j', '200']) - ninja_args.extend(extra_ninja_switches) - _RunNinja(output_directory, ninja_args) + """Returns arm_version for the GN build with the given architecture.""" + if arch == 'armeabi': + return 6 + if arch == 'armeabi-v7a': + return 7 + if arch in ['arm64-v8a', 'x86', 'x86_64']: + return None + raise Exception('Unknown arch: ' + arch) + + +def Build(build_dir, arch, use_remoteexec, extra_gn_args, extra_gn_switches, + extra_ninja_switches): + """Generates target architecture using GN and builds it using ninja.""" + logging.info('Building: %s', arch) + output_directory = _GetOutputDirectory(build_dir, arch) + gn_args = { + 'target_os': 'android', + 'is_debug': False, + 'is_component_build': False, + 'rtc_include_tests': False, + 'target_cpu': _GetTargetCpu(arch), + 'use_remoteexec': use_remoteexec, + 'android_static_analysis': "off", + } + arm_version = _GetArmVersion(arch) + if arm_version: + gn_args['arm_version'] = arm_version + gn_args_str = '--args=' + ' '.join( + [k + '=' + _EncodeForGN(v) + for k, v in gn_args.items()] + extra_gn_args) + + gn_args_list = ['gen', output_directory, gn_args_str] + gn_args_list.extend(extra_gn_switches) + _RunGN(gn_args_list) + + ninja_args = TARGETS[:] + if use_remoteexec: + ninja_args.extend(['-j', '200']) + ninja_args.extend(extra_ninja_switches) + _RunNinja(output_directory, ninja_args) def CollectCommon(aar_file, build_dir, arch): - """Collects architecture independent files into the .aar-archive.""" - logging.info('Collecting common files.') - output_directory = _GetOutputDirectory(build_dir, arch) - aar_file.write(MANIFEST_FILE, 'AndroidManifest.xml') - aar_file.write(os.path.join(output_directory, JAR_FILE), 'classes.jar') + """Collects architecture independent files into the .aar-archive.""" + logging.info('Collecting common files.') + output_directory = _GetOutputDirectory(build_dir, arch) + aar_file.write(MANIFEST_FILE, 'AndroidManifest.xml') + aar_file.write(os.path.join(output_directory, JAR_FILE), 'classes.jar') def Collect(aar_file, build_dir, arch, unstripped): - """Collects architecture specific files into the .aar-archive.""" - logging.info('Collecting: %s', arch) - output_directory = _GetOutputDirectory(build_dir, arch) + """Collects architecture specific files into the .aar-archive.""" + logging.info('Collecting: %s', arch) + output_directory = _GetOutputDirectory(build_dir, arch) - abi_dir = os.path.join('jni', arch) - for so_file in NEEDED_SO_FILES: - source_so_file = os.path.join("lib.unstripped", - so_file) if unstripped else so_file - aar_file.write(os.path.join(output_directory, source_so_file), - os.path.join(abi_dir, so_file)) + abi_dir = os.path.join('jni', arch) + for so_file in NEEDED_SO_FILES: + source_so_file = os.path.join("lib.unstripped", + so_file) if unstripped else so_file + aar_file.write(os.path.join(output_directory, source_so_file), + os.path.join(abi_dir, so_file)) def GenerateLicenses(output_dir, build_dir, archs): - builder = LicenseBuilder( - [_GetOutputDirectory(build_dir, arch) for arch in archs], TARGETS) - builder.GenerateLicenseText(output_dir) + builder = LicenseBuilder( + [_GetOutputDirectory(build_dir, arch) for arch in archs], TARGETS) + builder.generate_license_text(output_dir) def BuildAar(archs, output_file, - use_goma=False, use_remoteexec=False, extra_gn_args=None, ext_build_dir=None, extra_gn_switches=None, extra_ninja_switches=None, unstripped=False): - extra_gn_args = extra_gn_args or [] - extra_gn_switches = extra_gn_switches or [] - extra_ninja_switches = extra_ninja_switches or [] - build_dir = ext_build_dir if ext_build_dir else tempfile.mkdtemp() - - for arch in archs: - Build(build_dir, arch, use_goma, use_remoteexec, extra_gn_args, - extra_gn_switches, extra_ninja_switches) - - with zipfile.ZipFile(output_file, 'w') as aar_file: - # Architecture doesn't matter here, arbitrarily using the first one. - CollectCommon(aar_file, build_dir, archs[0]) + extra_gn_args = extra_gn_args or [] + extra_gn_switches = extra_gn_switches or [] + extra_ninja_switches = extra_ninja_switches or [] + build_dir = ext_build_dir if ext_build_dir else tempfile.mkdtemp() + for arch in archs: - Collect(aar_file, build_dir, arch, unstripped) + Build(build_dir, arch, use_remoteexec, extra_gn_args, + extra_gn_switches, extra_ninja_switches) + + with zipfile.ZipFile(output_file, 'w') as aar_file: + # Architecture doesn't matter here, arbitrarily using the first one. + CollectCommon(aar_file, build_dir, archs[0]) + for arch in archs: + Collect(aar_file, build_dir, arch, unstripped) - license_dir = os.path.dirname(os.path.realpath(output_file)) - GenerateLicenses(license_dir, build_dir, archs) + license_dir = os.path.dirname(os.path.realpath(output_file)) + GenerateLicenses(license_dir, build_dir, archs) - if not ext_build_dir: - shutil.rmtree(build_dir, True) + if not ext_build_dir: + shutil.rmtree(build_dir, True) def main(): - args = _ParseArgs() - logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) + args = _ParseArgs() + logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) - BuildAar(args.arch, args.output, args.use_goma, args.use_remoteexec, - args.extra_gn_args, args.build_dir, args.extra_gn_switches, - args.extra_ninja_switches, args.use_unstripped_libs) + BuildAar(args.arch, args.output, args.use_remoteexec, args.extra_gn_args, + args.build_dir, args.extra_gn_switches, args.extra_ninja_switches, + args.use_unstripped_libs) if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/android/test_aar.py b/tools_webrtc/android/test_aar.py index 7eb281aa9a..2d4ce1f0d5 100755 --- a/tools_webrtc/android/test_aar.py +++ b/tools_webrtc/android/test_aar.py @@ -34,110 +34,111 @@ AAR_PROJECT_DIR = os.path.join(CHECKOUT_ROOT, 'examples/aarproject') -def _ParseArgs(): - parser = argparse.ArgumentParser(description='Releases WebRTC on Bintray.') - parser.add_argument('--use-goma', - action='store_true', - default=False, - help='Use goma.') - parser.add_argument('--skip-tests', - action='store_true', - default=False, - help='Skips running the tests.') - parser.add_argument( - '--build-dir', - default=None, - help='Temporary directory to store the build files. If not specified, ' - 'a new directory will be created.') - parser.add_argument('--verbose', - action='store_true', - default=False, - help='Debug logging.') - return parser.parse_args() - - -def _GetCommitHash(): - commit_hash = subprocess.check_output( - ['git', 'rev-parse', 'HEAD'], cwd=CHECKOUT_ROOT).decode('UTF-8').strip() - return commit_hash - - -def _GetCommitPos(): - commit_message = subprocess.check_output( - ['git', 'rev-list', '--format=%B', '--max-count=1', 'HEAD'], - cwd=CHECKOUT_ROOT).decode('UTF-8') - commit_pos_match = re.search(COMMIT_POSITION_REGEX, commit_message, - re.MULTILINE) - if not commit_pos_match: - raise Exception('Commit position not found in the commit message: %s' % - commit_message) - return commit_pos_match.group(1) - - -def _TestAAR(build_dir): - """Runs AppRTCMobile tests using the AAR. Returns true if the tests pass.""" - logging.info('Testing library.') - - # Uninstall any existing version of AppRTCMobile. - logging.info('Uninstalling previous AppRTCMobile versions. It is okay for ' - 'these commands to fail if AppRTCMobile is not installed.') - subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc']) - subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc.test']) - - # Run tests. - try: - # First clean the project. - subprocess.check_call([GRADLEW_BIN, 'clean'], cwd=AAR_PROJECT_DIR) - # Then run the tests. - subprocess.check_call([ - GRADLEW_BIN, 'connectedDebugAndroidTest', - '-PaarDir=' + os.path.abspath(build_dir) - ], - cwd=AAR_PROJECT_DIR) - except subprocess.CalledProcessError: - logging.exception('Test failure.') - return False # Clean or tests failed - - return True # Tests pass - - -def BuildAndTestAar(use_goma, skip_tests, build_dir): - version = '1.0.' + _GetCommitPos() - commit = _GetCommitHash() - logging.info('Building and Testing AAR version %s with hash %s', version, - commit) - - # If build directory is not specified, create a temporary directory. - use_tmp_dir = not build_dir - if use_tmp_dir: - build_dir = tempfile.mkdtemp() - - try: - base_name = ARTIFACT_ID + '-' + version - aar_file = os.path.join(build_dir, base_name + '.aar') - - logging.info('Building at %s', build_dir) - BuildAar(ARCHS, - aar_file, - use_goma=use_goma, - ext_build_dir=os.path.join(build_dir, 'aar-build')) - - tests_pass = skip_tests or _TestAAR(build_dir) - if not tests_pass: - raise Exception('Test failure.') - - logging.info('Test success.') - - finally: +def _parse_args(): + parser = argparse.ArgumentParser(description='Releases WebRTC on Bintray.') + parser.add_argument('--use-remoteexec', + action='store_true', + default=False, + help='Use RBE.') + parser.add_argument('--skip-tests', + action='store_true', + default=False, + help='Skips running the tests.') + parser.add_argument( + '--build-dir', + default=None, + help='Temporary directory to store the build files. If not specified, ' + 'a new directory will be created.') + parser.add_argument('--verbose', + action='store_true', + default=False, + help='Debug logging.') + return parser.parse_args() + + +def _get_commit_hash(): + commit_hash = subprocess.check_output( + ['git', 'rev-parse', 'HEAD'], + cwd=CHECKOUT_ROOT).decode('UTF-8').strip() + return commit_hash + + +def _get_commit_pos(): + commit_message = subprocess.check_output( + ['git', 'rev-list', '--format=%B', '--max-count=1', 'HEAD'], + cwd=CHECKOUT_ROOT).decode('UTF-8') + commit_pos_match = re.search(COMMIT_POSITION_REGEX, commit_message, + re.MULTILINE) + if not commit_pos_match: + raise Exception('Commit position not found in the commit message: %s' % + commit_message) + return commit_pos_match.group(1) + + +def _test_aar(build_dir): + """Runs AppRTCMobile tests using AAR. Returns true if the tests pass.""" + logging.info('Testing library.') + + # Uninstall any existing version of AppRTCMobile. + logging.info('Uninstalling previous AppRTCMobile versions. It is okay for ' + 'these commands to fail if AppRTCMobile is not installed.') + subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc']) + subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc.test']) + + # Run tests. + try: + # First clean the project. + subprocess.check_call([GRADLEW_BIN, 'clean'], cwd=AAR_PROJECT_DIR) + # Then run the tests. + subprocess.check_call([ + GRADLEW_BIN, 'connectedDebugAndroidTest', + '-PaarDir=' + os.path.abspath(build_dir) + ], + cwd=AAR_PROJECT_DIR) + except subprocess.CalledProcessError: + logging.exception('Test failure.') + return False # Clean or tests failed + + return True # Tests pass + + +def build_and_test_aar(use_remoteexec, skip_tests, build_dir): + version = '1.0.' + _get_commit_pos() + commit = _get_commit_hash() + logging.info('Building and Testing AAR version %s with hash %s', version, + commit) + + # If build directory is not specified, create a temporary directory. + use_tmp_dir = not build_dir if use_tmp_dir: - shutil.rmtree(build_dir, True) + build_dir = tempfile.mkdtemp() + + try: + base_name = ARTIFACT_ID + '-' + version + aar_file = os.path.join(build_dir, base_name + '.aar') + + logging.info('Building at %s', build_dir) + BuildAar(ARCHS, + aar_file, + use_remoteexec=use_remoteexec, + ext_build_dir=os.path.join(build_dir, 'aar-build')) + + tests_pass = skip_tests or _test_aar(build_dir) + if not tests_pass: + raise Exception('Test failure.') + + logging.info('Test success.') + + finally: + if use_tmp_dir: + shutil.rmtree(build_dir, True) def main(): - args = _ParseArgs() - logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) - BuildAndTestAar(args.use_goma, args.skip_tests, args.build_dir) + args = _parse_args() + logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) + build_and_test_aar(args.use_remoteexec, args.skip_tests, args.build_dir) if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/apple/generate_privacy_manifest.py b/tools_webrtc/apple/generate_privacy_manifest.py new file mode 100644 index 0000000000..8d4dad9cdc --- /dev/null +++ b/tools_webrtc/apple/generate_privacy_manifest.py @@ -0,0 +1,81 @@ +#!/usr/bin/env vpython3 + +# Copyright (c) 2024 The WebRTC Project Authors. All rights reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +__doc__ = """Generate privacy manifest of WebRTC iOS framework.""" + +import argparse +import plistlib +import sys + + +def generate_privacy_manifest(out_file): + privacy_manifest = { + "NSPrivacyTracking": + False, + "NSPrivacyCollectedDataTypes": [], + "NSPrivacyTrackingDomains": [], + "NSPrivacyAccessedAPITypes": [ + # For mach_absolute_time usage in rtc_base/system_time.cc + { + "NSPrivacyAccessedAPIType": + "NSPrivacyAccessedAPICategorySystemBootTime", + "NSPrivacyAccessedAPITypeReasons": [ + # Declare this reason to access the system boot time + # in order to measure the amount of time that has elapsed + # between events that occurred within the app or to perform + # calculations to enable timers. + "35F9.1", + # Declare this reason to access the system boot time to + # calculate absolute timestamps for events that occurred + # within your app, such as events related to the UIKit or + # AVFAudio frameworks. + "8FFB.1", + ] + }, + # For stat usage in rtc_base/file_rotating_stream.cc + # TODO: bugs.webrtc.org/337909152 - Make this optional since this + # is only used for RTCFileLogger, which is not used by default and + # not considered as a core feature. + { + "NSPrivacyAccessedAPIType": + "NSPrivacyAccessedAPICategoryFileTimestamp", + "NSPrivacyAccessedAPITypeReasons": [ + # Declare this reason to access the timestamps, size, or + # other metadata of files inside the app container, app + # group container, or the app’s CloudKit container. + "C617.1" + ] + } + ] + } + + with open(out_file, 'wb') as file: + plistlib.dump(privacy_manifest, file, fmt=plistlib.FMT_XML) + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument("-o", "--output", type=str, help="Output file.") + # TODO: bugs.webrtc.org/337909152 - Add an option to not to emit privacy + # manifest entries for NSPrivacyAccessedAPICategoryFileTimestamp + + args = parser.parse_args() + + if not args.output: + print("Output file is required") + return 1 + + generate_privacy_manifest(args.output) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/tools_webrtc/autoroller/roll_deps.py b/tools_webrtc/autoroller/roll_deps.py index c57ba9c040..0ffc7b6562 100755 --- a/tools_webrtc/autoroller/roll_deps.py +++ b/tools_webrtc/autoroller/roll_deps.py @@ -21,12 +21,21 @@ import urllib.request -def FindSrcDirPath(): - """Returns the abs path to the src/ dir of the project.""" - src_dir = os.path.dirname(os.path.abspath(__file__)) - while os.path.basename(src_dir) != 'src': - src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) - return src_dir +def FindRootPath(): + """Returns the absolute path to the highest level repo root. + + If this repo is checked out as a submodule of the chromium/src + superproject, this returns the superproect root. Otherwise, it returns the + webrtc/src repo root. + """ + root_dir = os.path.dirname(os.path.abspath(__file__)) + while os.path.basename(root_dir) not in ('src', 'chromium'): + par_dir = os.path.normpath(os.path.join(root_dir, os.pardir)) + if par_dir == root_dir: + raise RuntimeError('Could not find the repo root.') + root_dir = par_dir + return root_dir + # Skip these dependencies (list without solution name prefix). @@ -35,6 +44,7 @@ def FindSrcDirPath(): # Disable the roll of 'android_ndk' as it won't appear in chromium DEPS. 'src/third_party/android_ndk', 'src/third_party/mockito/src', + 'src/third_party/protobuf-javascript', ] # These dependencies are missing in chromium/src/DEPS, either unused or already @@ -49,7 +59,9 @@ def FindSrcDirPath(): 'src/testing', 'src/third_party', 'src/third_party/clang_format/script', + 'src/third_party/grpc/src', 'src/third_party/gtest-parallel', + 'src/third_party/kotlin_stdlib', 'src/third_party/pipewire/linux-amd64', 'src/tools', ] @@ -65,8 +77,8 @@ def FindSrcDirPath(): ROLL_BRANCH_NAME = 'roll_chromium_revision' SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) -CHECKOUT_SRC_DIR = FindSrcDirPath() -CHECKOUT_ROOT_DIR = os.path.realpath(os.path.join(CHECKOUT_SRC_DIR, os.pardir)) +CHECKOUT_ROOT_DIR = FindRootPath() +GCLIENT_ROOT_DIR = os.path.realpath(os.path.join(CHECKOUT_ROOT_DIR, os.pardir)) # Copied from tools/android/roll/android_deps/.../BuildConfigGenerator.groovy. ANDROID_DEPS_START = r'=== ANDROID_DEPS Generated Code Start ===' @@ -76,19 +88,20 @@ def FindSrcDirPath(): NOTIFY_EMAIL = 'webrtc-trooper@grotations.appspotmail.com' -sys.path.append(os.path.join(CHECKOUT_SRC_DIR, 'build')) +sys.path.append(os.path.join(CHECKOUT_ROOT_DIR, 'build')) import find_depot_tools find_depot_tools.add_depot_tools_to_path() CLANG_UPDATE_SCRIPT_URL_PATH = 'tools/clang/scripts/update.py' -CLANG_UPDATE_SCRIPT_LOCAL_PATH = os.path.join(CHECKOUT_SRC_DIR, 'tools', +CLANG_UPDATE_SCRIPT_LOCAL_PATH = os.path.join(CHECKOUT_ROOT_DIR, 'tools', 'clang', 'scripts', 'update.py') DepsEntry = collections.namedtuple('DepsEntry', 'path url revision') ChangedDep = collections.namedtuple('ChangedDep', 'path url current_rev new_rev') CipdDepsEntry = collections.namedtuple('CipdDepsEntry', 'path packages') +GcsDepsEntry = collections.namedtuple('GcsDepsEntry', 'path bucket objects') VersionEntry = collections.namedtuple('VersionEntry', 'version') ChangedCipdPackage = collections.namedtuple( 'ChangedCipdPackage', 'path package current_version new_version') @@ -101,42 +114,42 @@ def FindSrcDirPath(): class RollError(Exception): - pass + pass def StrExpansion(): - return lambda str_value: str_value + return lambda str_value: str_value def VarLookup(local_scope): - return lambda var_name: local_scope['vars'][var_name] + return lambda var_name: local_scope['vars'][var_name] def ParseDepsDict(deps_content): - local_scope = {} - global_scope = { - 'Str': StrExpansion(), - 'Var': VarLookup(local_scope), - 'deps_os': {}, - } - exec(deps_content, global_scope, local_scope) - return local_scope + local_scope = {} + global_scope = { + 'Str': StrExpansion(), + 'Var': VarLookup(local_scope), + 'deps_os': {}, + } + exec(deps_content, global_scope, local_scope) + return local_scope def ParseLocalDepsFile(filename): - with open(filename, 'rb') as f: - deps_content = f.read().decode('utf-8') - return ParseDepsDict(deps_content) + with open(filename, 'rb') as f: + deps_content = f.read().decode('utf-8') + return ParseDepsDict(deps_content) def ParseCommitPosition(commit_message): - for line in reversed(commit_message.splitlines()): - m = COMMIT_POSITION_RE.match(line.strip()) - if m: - return int(m.group(1)) - logging.error('Failed to parse commit position id from:\n%s\n', - commit_message) - sys.exit(-1) + for line in reversed(commit_message.splitlines()): + m = COMMIT_POSITION_RE.match(line.strip()) + if m: + return int(m.group(1)) + logging.error('Failed to parse commit position id from:\n%s\n', + commit_message) + sys.exit(-1) def _RunCommand(command, @@ -144,68 +157,75 @@ def _RunCommand(command, ignore_exit_code=False, extra_env=None, input_data=None): - """Runs a command and returns the output from that command. + """Runs a command and returns the output from that command. If the command fails (exit code != 0), the function will exit the process. Returns: A tuple containing the stdout and stderr outputs as strings. """ - working_dir = working_dir or CHECKOUT_SRC_DIR - logging.debug('CMD: %s CWD: %s', ' '.join(command), working_dir) - env = os.environ.copy() - if extra_env: - assert all(isinstance(value, str) for value in extra_env.values()) - logging.debug('extra env: %s', extra_env) - env.update(extra_env) - p = subprocess.Popen(command, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env=env, - cwd=working_dir, - universal_newlines=True) - std_output, err_output = p.communicate(input_data) - p.stdout.close() - p.stderr.close() - if not ignore_exit_code and p.returncode != 0: - logging.error('Command failed: %s\n' - 'stdout:\n%s\n' - 'stderr:\n%s\n', ' '.join(command), std_output, err_output) - sys.exit(p.returncode) - return std_output, err_output + working_dir = working_dir or CHECKOUT_ROOT_DIR + logging.debug('CMD: %s CWD: %s', ' '.join(command), working_dir) + env = os.environ.copy() + if extra_env: + assert all(isinstance(value, str) for value in extra_env.values()) + logging.debug('extra env: %s', extra_env) + env.update(extra_env) + p = subprocess.Popen(command, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=env, + cwd=working_dir, + universal_newlines=True) + std_output, err_output = p.communicate(input_data) + p.stdout.close() + p.stderr.close() + if not ignore_exit_code and p.returncode != 0: + logging.error('Command failed: %s\n' + 'stdout:\n%s\n' + 'stderr:\n%s\n', ' '.join(command), std_output, + err_output) + sys.exit(p.returncode) + return std_output, err_output + + +def _IsExistingDir(path): + """Returns True if `path` exists and is a dir. + """ + return os.path.isdir(path) def _GetBranches(): - """Returns a tuple of active,branches. + """Returns a tuple of active,branches. The 'active' is the name of the currently active branch and 'branches' is a list of all branches. """ - lines = _RunCommand(['git', 'branch'])[0].split('\n') - branches = [] - active = '' - for line in lines: - if '*' in line: - # The assumption is that the first char will always be the '*'. - active = line[1:].strip() - branches.append(active) - else: - branch = line.strip() - if branch: - branches.append(branch) - return active, branches + lines = _RunCommand(['git', 'branch'])[0].split('\n') + branches = [] + active = '' + for line in lines: + if '*' in line: + # The assumption is that the first char will always be the '*'. + active = line[1:].strip() + branches.append(active) + else: + branch = line.strip() + if branch: + branches.append(branch) + return active, branches def _ReadGitilesContent(url): - # Download and decode BASE64 content until - # https://code.google.com/p/gitiles/issues/detail?id=7 is fixed. - base64_content = ReadUrlContent(url + '?format=TEXT') - return base64.b64decode(base64_content[0]).decode('utf-8') + # Download and decode BASE64 content until + # https://code.google.com/p/gitiles/issues/detail?id=7 is fixed. + base64_content = ReadUrlContent(url + '?format=TEXT') + return base64.b64decode(base64_content[0]).decode('utf-8') def ReadRemoteCrFile(path_below_src, revision): - """Reads a remote Chromium file of a specific revision. + """Reads a remote Chromium file of a specific revision. Args: path_below_src: A path to the target file relative to src dir. @@ -213,35 +233,35 @@ def ReadRemoteCrFile(path_below_src, revision): Returns: A string with file content. """ - return _ReadGitilesContent(CHROMIUM_FILE_TEMPLATE % - (revision, path_below_src)) + return _ReadGitilesContent(CHROMIUM_FILE_TEMPLATE % + (revision, path_below_src)) def ReadRemoteCrCommit(revision): - """Reads a remote Chromium commit message. Returns a string.""" - return _ReadGitilesContent(CHROMIUM_COMMIT_TEMPLATE % revision) + """Reads a remote Chromium commit message. Returns a string.""" + return _ReadGitilesContent(CHROMIUM_COMMIT_TEMPLATE % revision) def ReadUrlContent(url): - """Connect to a remote host and read the contents. + """Connect to a remote host and read the contents. Args: url: URL to connect to. Returns: A list of lines. """ - conn = urllib.request.urlopen(url) - try: - return conn.readlines() - except IOError as e: - logging.exception('Error connecting to %s. Error: %s', url, e) - raise - finally: - conn.close() + conn = urllib.request.urlopen(url) + try: + return conn.readlines() + except IOError as e: + logging.exception('Error connecting to %s. Error: %s', url, e) + raise + finally: + conn.close() def GetMatchingDepsEntries(depsentry_dict, dir_path): - """Gets all deps entries matching the provided path. + """Gets all deps entries matching the provided path. This list may contain more than one DepsEntry object. Example: dir_path='src/testing' would give results containing both @@ -253,94 +273,98 @@ def GetMatchingDepsEntries(depsentry_dict, dir_path): Returns: A list of DepsEntry objects. """ - result = [] - for path, depsentry in depsentry_dict.items(): - if path == dir_path: - result.append(depsentry) - else: - parts = path.split('/') - if all(part == parts[i] for i, part in enumerate(dir_path.split('/'))): - result.append(depsentry) - return result - - -def BuildDepsentryDict(deps_dict): - """Builds a dict of paths to DepsEntry objects from a raw deps dict.""" - result = {} - - def AddDepsEntries(deps_subdict): - for path, dep in deps_subdict.items(): - if path in result: - continue - if not isinstance(dep, dict): - dep = {'url': dep} - if dep.get('dep_type') == 'cipd': - result[path] = CipdDepsEntry(path, dep['packages']) - else: - if '@' not in dep['url']: - url, revision = dep['url'], 'HEAD' + result = [] + for path, depsentry in depsentry_dict.items(): + if path == dir_path: + result.append(depsentry) else: - url, revision = dep['url'].split('@') - result[path] = DepsEntry(path, url, revision) + parts = path.split('/') + if all(part == parts[i] + for i, part in enumerate(dir_path.split('/'))): + result.append(depsentry) + return result - def AddVersionEntry(vars_subdict): - for key, value in vars_subdict.items(): - if key in result: - continue - if not key.endswith('_version'): - continue - key = re.sub('_version$', '', key) - result[key] = VersionEntry(value) - AddDepsEntries(deps_dict['deps']) - for deps_os in ['win', 'mac', 'linux', 'android', 'ios', 'unix']: - AddDepsEntries(deps_dict.get('deps_os', {}).get(deps_os, {})) - AddVersionEntry(deps_dict.get('vars', {})) - return result +def BuildDepsentryDict(deps_dict): + """Builds a dict of paths to DepsEntry objects from a raw deps dict.""" + result = {} + + def AddDepsEntries(deps_subdict): + for path, dep in deps_subdict.items(): + if path in result: + continue + if not isinstance(dep, dict): + dep = {'url': dep} + if dep.get('dep_type') == 'cipd': + result[path] = CipdDepsEntry(path, dep['packages']) + elif dep.get('dep_type') == 'gcs': + result[path] = GcsDepsEntry(path, dep['bucket'], + dep['objects']) + else: + if '@' not in dep['url']: + url, revision = dep['url'], 'HEAD' + else: + url, revision = dep['url'].split('@') + result[path] = DepsEntry(path, url, revision) + + def AddVersionEntry(vars_subdict): + for key, value in vars_subdict.items(): + if key in result: + continue + if not key.endswith('_version'): + continue + key = re.sub('_version$', '', key) + result[key] = VersionEntry(value) + + AddDepsEntries(deps_dict['deps']) + for deps_os in ['win', 'mac', 'linux', 'android', 'ios', 'unix']: + AddDepsEntries(deps_dict.get('deps_os', {}).get(deps_os, {})) + AddVersionEntry(deps_dict.get('vars', {})) + return result def _FindChangedCipdPackages(path, old_pkgs, new_pkgs): - old_pkgs_names = {p['package'] for p in old_pkgs} - new_pkgs_names = {p['package'] for p in new_pkgs} - pkgs_equal = (old_pkgs_names == new_pkgs_names) - added_pkgs = [p for p in new_pkgs_names if p not in old_pkgs_names] - removed_pkgs = [p for p in old_pkgs_names if p not in new_pkgs_names] - - assert pkgs_equal, ('Old: %s\n New: %s.\nYou need to do a manual roll ' - 'and remove/add entries in DEPS so the old and new ' - 'list match.\nMost likely, you should add \"%s\" and ' - 'remove \"%s\"' % - (old_pkgs, new_pkgs, added_pkgs, removed_pkgs)) - - for old_pkg in old_pkgs: - for new_pkg in new_pkgs: - old_version = old_pkg['version'] - new_version = new_pkg['version'] - if (old_pkg['package'] == new_pkg['package'] - and old_version != new_version): - logging.debug('Roll dependency %s to %s', path, new_version) - yield ChangedCipdPackage(path, old_pkg['package'], old_version, - new_version) + old_pkgs_names = {p['package'] for p in old_pkgs} + new_pkgs_names = {p['package'] for p in new_pkgs} + pkgs_equal = (old_pkgs_names == new_pkgs_names) + added_pkgs = [p for p in new_pkgs_names if p not in old_pkgs_names] + removed_pkgs = [p for p in old_pkgs_names if p not in new_pkgs_names] + + assert pkgs_equal, ('Old: %s\n New: %s.\nYou need to do a manual roll ' + 'and remove/add entries in DEPS so the old and new ' + 'list match.\nMost likely, you should add \"%s\" and ' + 'remove \"%s\"' % + (old_pkgs, new_pkgs, added_pkgs, removed_pkgs)) + + for old_pkg in old_pkgs: + for new_pkg in new_pkgs: + old_version = old_pkg['version'] + new_version = new_pkg['version'] + if (old_pkg['package'] == new_pkg['package'] + and old_version != new_version): + logging.debug('Roll dependency %s to %s', path, new_version) + yield ChangedCipdPackage(path, old_pkg['package'], old_version, + new_version) def _FindChangedVars(name, old_version, new_version): - if old_version != new_version: - logging.debug('Roll dependency %s to %s', name, new_version) - yield ChangedVersionEntry(name, old_version, new_version) + if old_version != new_version: + logging.debug('Roll dependency %s to %s', name, new_version) + yield ChangedVersionEntry(name, old_version, new_version) def _FindNewDeps(old, new): - """ Gather dependencies only in `new` and return corresponding paths. """ - old_entries = set(BuildDepsentryDict(old)) - new_entries = set(BuildDepsentryDict(new)) - return [ - path for path in new_entries - old_entries - if path not in DONT_AUTOROLL_THESE - ] + """ Gather dependencies only in `new` and return corresponding paths. """ + old_entries = set(BuildDepsentryDict(old)) + new_entries = set(BuildDepsentryDict(new)) + return [ + path for path in new_entries - old_entries + if path not in DONT_AUTOROLL_THESE + ] def FindAddedDeps(webrtc_deps, new_cr_deps): - """ + """ Calculate new deps entries of interest. Ideally, that would mean: only appearing in chromium DEPS @@ -361,18 +385,18 @@ def FindAddedDeps(webrtc_deps, new_cr_deps): A list of paths added dependencies sitting in `ANDROID_DEPS_PATH`. A list of paths for other added dependencies. """ - all_added_deps = _FindNewDeps(webrtc_deps, new_cr_deps) - generated_android_deps = [ - path for path in all_added_deps if path.startswith(ANDROID_DEPS_PATH) - ] - other_deps = [ - path for path in all_added_deps if path not in generated_android_deps - ] - return generated_android_deps, other_deps + all_added_deps = _FindNewDeps(webrtc_deps, new_cr_deps) + generated_android_deps = [ + path for path in all_added_deps if path.startswith(ANDROID_DEPS_PATH) + ] + other_deps = [ + path for path in all_added_deps if path not in generated_android_deps + ] + return generated_android_deps, other_deps def FindRemovedDeps(webrtc_deps, new_cr_deps): - """ + """ Calculate obsolete deps entries. Ideally, that would mean: no more appearing in chromium DEPS @@ -395,19 +419,20 @@ def FindRemovedDeps(webrtc_deps, new_cr_deps): A list of paths of dependencies removed from `ANDROID_DEPS_PATH`. A list of paths of unexpected disappearing dependencies. """ - all_removed_deps = _FindNewDeps(new_cr_deps, webrtc_deps) - generated_android_deps = sorted( - [path for path in all_removed_deps if path.startswith(ANDROID_DEPS_PATH)]) - # Webrtc-only dependencies are handled in CalculateChangedDeps. - other_deps = sorted([ - path for path in all_removed_deps - if path not in generated_android_deps and path not in WEBRTC_ONLY_DEPS - ]) - return generated_android_deps, other_deps + all_removed_deps = _FindNewDeps(new_cr_deps, webrtc_deps) + generated_android_deps = sorted([ + path for path in all_removed_deps if path.startswith(ANDROID_DEPS_PATH) + ]) + # Webrtc-only dependencies are handled in CalculateChangedDeps. + other_deps = sorted([ + path for path in all_removed_deps + if path not in generated_android_deps and path not in WEBRTC_ONLY_DEPS + ]) + return generated_android_deps, other_deps def CalculateChangedDeps(webrtc_deps, new_cr_deps): - """ + """ Calculate changed deps entries based on entries defined in the WebRTC DEPS file: - If a shared dependency with the Chromium DEPS file: roll it to the same @@ -421,70 +446,80 @@ def CalculateChangedDeps(webrtc_deps, new_cr_deps): Returns: A list of ChangedDep objects representing the changed deps. """ - result = [] - webrtc_entries = BuildDepsentryDict(webrtc_deps) - new_cr_entries = BuildDepsentryDict(new_cr_deps) - for path, webrtc_deps_entry in webrtc_entries.items(): - if path in DONT_AUTOROLL_THESE: - continue - cr_deps_entry = new_cr_entries.get(path) - if cr_deps_entry: - assert type(cr_deps_entry) is type(webrtc_deps_entry) - - if isinstance(cr_deps_entry, CipdDepsEntry): - result.extend( - _FindChangedCipdPackages(path, webrtc_deps_entry.packages, - cr_deps_entry.packages)) - continue - - if isinstance(cr_deps_entry, VersionEntry): - result.extend( - _FindChangedVars(path, webrtc_deps_entry.version, - cr_deps_entry.version)) - continue - - # Use the revision from Chromium's DEPS file. - new_rev = cr_deps_entry.revision - assert webrtc_deps_entry.url == cr_deps_entry.url, ( - 'WebRTC DEPS entry %s has a different URL %s than Chromium %s.' % - (path, webrtc_deps_entry.url, cr_deps_entry.url)) - else: - if isinstance(webrtc_deps_entry, DepsEntry): - # Use the HEAD of the deps repo. - stdout, _ = _RunCommand( - ['git', 'ls-remote', webrtc_deps_entry.url, 'HEAD']) - new_rev = stdout.strip().split('\t')[0] - else: - # The dependency has been removed from chromium. - # This is handled by FindRemovedDeps. - continue - - # Check if an update is necessary. - if webrtc_deps_entry.revision != new_rev: - logging.debug('Roll dependency %s to %s', path, new_rev) - result.append( - ChangedDep(path, webrtc_deps_entry.url, webrtc_deps_entry.revision, - new_rev)) - return sorted(result) + result = [] + webrtc_entries = BuildDepsentryDict(webrtc_deps) + new_cr_entries = BuildDepsentryDict(new_cr_deps) + for path, webrtc_deps_entry in webrtc_entries.items(): + if path in DONT_AUTOROLL_THESE: + continue + cr_deps_entry = new_cr_entries.get(path) + if cr_deps_entry: + assert type(cr_deps_entry) is type(webrtc_deps_entry) + + if isinstance(cr_deps_entry, CipdDepsEntry): + result.extend( + _FindChangedCipdPackages(path, webrtc_deps_entry.packages, + cr_deps_entry.packages)) + continue + + if isinstance(cr_deps_entry, GcsDepsEntry): + result.extend( + _FindChangedVars( + path, ','.join(x['object_name'] + for x in webrtc_deps_entry.objects), + ','.join(x['object_name'] + for x in cr_deps_entry.objects))) + continue + + if isinstance(cr_deps_entry, VersionEntry): + result.extend( + _FindChangedVars(path, webrtc_deps_entry.version, + cr_deps_entry.version)) + continue + + # Use the revision from Chromium's DEPS file. + new_rev = cr_deps_entry.revision + assert webrtc_deps_entry.url == cr_deps_entry.url, ( + 'WebRTC DEPS entry %s has a different URL %s than Chromium %s.' + % (path, webrtc_deps_entry.url, cr_deps_entry.url)) + else: + if isinstance(webrtc_deps_entry, DepsEntry): + # Use the HEAD of the deps repo. + stdout, _ = _RunCommand( + ['git', 'ls-remote', webrtc_deps_entry.url, 'HEAD']) + new_rev = stdout.strip().split('\t')[0] + else: + # The dependency has been removed from chromium. + # This is handled by FindRemovedDeps. + continue + + # Check if an update is necessary. + if webrtc_deps_entry.revision != new_rev: + logging.debug('Roll dependency %s to %s', path, new_rev) + result.append( + ChangedDep(path, webrtc_deps_entry.url, + webrtc_deps_entry.revision, new_rev)) + return sorted(result) def CalculateChangedClang(new_cr_rev): - def GetClangRev(lines): - for line in lines: - match = CLANG_REVISION_RE.match(line) - if match: - return match.group(1) - raise RollError('Could not parse Clang revision!') + def GetClangRev(lines): + for line in lines: + match = CLANG_REVISION_RE.match(line) + if match: + return match.group(1) + raise RollError('Could not parse Clang revision!') - with open(CLANG_UPDATE_SCRIPT_LOCAL_PATH, 'r') as f: - current_lines = f.readlines() - current_rev = GetClangRev(current_lines) + with open(CLANG_UPDATE_SCRIPT_LOCAL_PATH, 'r') as f: + current_lines = f.readlines() + current_rev = GetClangRev(current_lines) - new_clang_update_py = ReadRemoteCrFile(CLANG_UPDATE_SCRIPT_URL_PATH, - new_cr_rev).splitlines() - new_rev = GetClangRev(new_clang_update_py) - return ChangedDep(CLANG_UPDATE_SCRIPT_LOCAL_PATH, None, current_rev, new_rev) + new_clang_update_py = ReadRemoteCrFile(CLANG_UPDATE_SCRIPT_URL_PATH, + new_cr_rev).splitlines() + new_rev = GetClangRev(new_clang_update_py) + return ChangedDep(CLANG_UPDATE_SCRIPT_LOCAL_PATH, None, current_rev, + new_rev) def GenerateCommitMessage( @@ -496,181 +531,186 @@ def GenerateCommitMessage( removed_deps_paths=None, clang_change=None, ): - current_cr_rev = rev_update.current_chromium_rev[0:10] - new_cr_rev = rev_update.new_chromium_rev[0:10] - rev_interval = '%s..%s' % (current_cr_rev, new_cr_rev) - git_number_interval = '%s:%s' % (current_commit_pos, new_commit_pos) - - commit_msg = [ - 'Roll chromium_revision %s (%s)\n' % (rev_interval, git_number_interval), - 'Change log: %s' % (CHROMIUM_LOG_TEMPLATE % rev_interval), - 'Full diff: %s\n' % (CHROMIUM_COMMIT_TEMPLATE % rev_interval) - ] - - def Section(adjective, deps): - noun = 'dependency' if len(deps) == 1 else 'dependencies' - commit_msg.append('%s %s' % (adjective, noun)) - - if changed_deps_list: - Section('Changed', changed_deps_list) + current_cr_rev = rev_update.current_chromium_rev[0:10] + new_cr_rev = rev_update.new_chromium_rev[0:10] + rev_interval = '%s..%s' % (current_cr_rev, new_cr_rev) + git_number_interval = '%s:%s' % (current_commit_pos, new_commit_pos) + + commit_msg = [ + 'Roll chromium_revision %s (%s)\n' % + (rev_interval, git_number_interval), + 'Change log: %s' % (CHROMIUM_LOG_TEMPLATE % rev_interval), + 'Full diff: %s\n' % (CHROMIUM_COMMIT_TEMPLATE % rev_interval) + ] + + def Section(adjective, deps): + noun = 'dependency' if len(deps) == 1 else 'dependencies' + commit_msg.append('%s %s' % (adjective, noun)) + + if changed_deps_list: + Section('Changed', changed_deps_list) + + for c in changed_deps_list: + if isinstance(c, ChangedCipdPackage): + commit_msg.append('* %s: %s..%s' % + (c.path, c.current_version, c.new_version)) + elif isinstance(c, ChangedVersionEntry): + commit_msg.append('* %s_version: %s..%s' % + (c.path, c.current_version, c.new_version)) + else: + commit_msg.append( + '* %s: %s/+log/%s..%s' % + (c.path, c.url, c.current_rev[0:10], c.new_rev[0:10])) + + if added_deps_paths: + Section('Added', added_deps_paths) + commit_msg.extend('* %s' % p for p in added_deps_paths) + + if removed_deps_paths: + Section('Removed', removed_deps_paths) + commit_msg.extend('* %s' % p for p in removed_deps_paths) + + if any([changed_deps_list, added_deps_paths, removed_deps_paths]): + change_url = CHROMIUM_FILE_TEMPLATE % (rev_interval, 'DEPS') + commit_msg.append('DEPS diff: %s\n' % change_url) + else: + commit_msg.append('No dependencies changed.') + + if clang_change and clang_change.current_rev != clang_change.new_rev: + commit_msg.append('Clang version changed %s:%s' % + (clang_change.current_rev, clang_change.new_rev)) + change_url = CHROMIUM_FILE_TEMPLATE % (rev_interval, + CLANG_UPDATE_SCRIPT_URL_PATH) + commit_msg.append('Details: %s\n' % change_url) + else: + commit_msg.append('No update to Clang.\n') - for c in changed_deps_list: - if isinstance(c, ChangedCipdPackage): - commit_msg.append('* %s: %s..%s' % - (c.path, c.current_version, c.new_version)) - elif isinstance(c, ChangedVersionEntry): - commit_msg.append('* %s_version: %s..%s' % - (c.path, c.current_version, c.new_version)) - else: - commit_msg.append('* %s: %s/+log/%s..%s' % - (c.path, c.url, c.current_rev[0:10], c.new_rev[0:10])) - - if added_deps_paths: - Section('Added', added_deps_paths) - commit_msg.extend('* %s' % p for p in added_deps_paths) - - if removed_deps_paths: - Section('Removed', removed_deps_paths) - commit_msg.extend('* %s' % p for p in removed_deps_paths) - - if any([changed_deps_list, added_deps_paths, removed_deps_paths]): - change_url = CHROMIUM_FILE_TEMPLATE % (rev_interval, 'DEPS') - commit_msg.append('DEPS diff: %s\n' % change_url) - else: - commit_msg.append('No dependencies changed.') - - if clang_change and clang_change.current_rev != clang_change.new_rev: - commit_msg.append('Clang version changed %s:%s' % - (clang_change.current_rev, clang_change.new_rev)) - change_url = CHROMIUM_FILE_TEMPLATE % (rev_interval, - CLANG_UPDATE_SCRIPT_URL_PATH) - commit_msg.append('Details: %s\n' % change_url) - else: - commit_msg.append('No update to Clang.\n') - - commit_msg.append('BUG=None') - return '\n'.join(commit_msg) + commit_msg.append('BUG=None') + return '\n'.join(commit_msg) def UpdateDepsFile(deps_filename, rev_update, changed_deps, new_cr_content): - """Update the DEPS file with the new revision.""" - - with open(deps_filename, 'rb') as deps_file: - deps_content = deps_file.read().decode('utf-8') - - # Update the chromium_revision variable. - deps_content = deps_content.replace(rev_update.current_chromium_rev, - rev_update.new_chromium_rev) - - # Add and remove dependencies. For now: only generated android deps. - # Since gclient cannot add or remove deps, we on the fact that - # these android deps are located in one place we can copy/paste. - deps_re = re.compile(ANDROID_DEPS_START + '.*' + ANDROID_DEPS_END, re.DOTALL) - new_deps = deps_re.search(new_cr_content) - old_deps = deps_re.search(deps_content) - if not new_deps or not old_deps: - faulty = 'Chromium' if not new_deps else 'WebRTC' - raise RollError('Was expecting to find "%s" and "%s"\n' - 'in %s DEPS' % - (ANDROID_DEPS_START, ANDROID_DEPS_END, faulty)) - deps_content = deps_re.sub(new_deps.group(0), deps_content) - - for dep in changed_deps: - if isinstance(dep, ChangedVersionEntry): - deps_content = deps_content.replace(dep.current_version, dep.new_version) - - with open(deps_filename, 'wb') as deps_file: - deps_file.write(deps_content.encode('utf-8')) - - # Update each individual DEPS entry. - for dep in changed_deps: - # ChangedVersionEntry types are already been processed. - if isinstance(dep, ChangedVersionEntry): - continue - local_dep_dir = os.path.join(CHECKOUT_ROOT_DIR, dep.path) - if not os.path.isdir(local_dep_dir): - raise RollError( - 'Cannot find local directory %s. Either run\n' - 'gclient sync --deps=all\n' - 'or make sure the .gclient file for your solution contains all ' - 'platforms in the target_os list, i.e.\n' - 'target_os = ["android", "unix", "mac", "ios", "win"];\n' - 'Then run "gclient sync" again.' % local_dep_dir) - if isinstance(dep, ChangedCipdPackage): - package = dep.package.format() # Eliminate double curly brackets - update = '%s:%s@%s' % (dep.path, package, dep.new_version) - else: - update = '%s@%s' % (dep.path, dep.new_rev) - _RunCommand(['gclient', 'setdep', '--revision', update], - working_dir=CHECKOUT_SRC_DIR) + """Update the DEPS file with the new revision.""" + + with open(deps_filename, 'rb') as deps_file: + deps_content = deps_file.read().decode('utf-8') + + # Update the chromium_revision variable. + deps_content = deps_content.replace(rev_update.current_chromium_rev, + rev_update.new_chromium_rev) + + # Add and remove dependencies. For now: only generated android deps. + # Since gclient cannot add or remove deps, we on the fact that + # these android deps are located in one place we can copy/paste. + deps_re = re.compile(ANDROID_DEPS_START + '.*' + ANDROID_DEPS_END, + re.DOTALL) + new_deps = deps_re.search(new_cr_content) + old_deps = deps_re.search(deps_content) + if not new_deps or not old_deps: + faulty = 'Chromium' if not new_deps else 'WebRTC' + raise RollError('Was expecting to find "%s" and "%s"\n' + 'in %s DEPS' % + (ANDROID_DEPS_START, ANDROID_DEPS_END, faulty)) + deps_content = deps_re.sub(new_deps.group(0), deps_content) + + for dep in changed_deps: + if isinstance(dep, ChangedVersionEntry): + deps_content = deps_content.replace(dep.current_version, + dep.new_version) + + with open(deps_filename, 'wb') as deps_file: + deps_file.write(deps_content.encode('utf-8')) + + # Update each individual DEPS entry. + for dep in changed_deps: + # ChangedVersionEntry types are already been processed. + if isinstance(dep, ChangedVersionEntry): + continue + local_dep_dir = os.path.join(GCLIENT_ROOT_DIR, dep.path) + if not _IsExistingDir(local_dep_dir): + raise RollError( + 'Cannot find local directory %s. Either run\n' + 'gclient sync --deps=all\n' + 'or make sure the .gclient file for your solution contains all ' + 'platforms in the target_os list, i.e.\n' + 'target_os = ["android", "unix", "mac", "ios", "win"];\n' + 'Then run "gclient sync" again.' % local_dep_dir) + if isinstance(dep, ChangedCipdPackage): + package = dep.package.format() # Eliminate double curly brackets + update = '%s:%s@%s' % (dep.path, package, dep.new_version) + else: + update = '%s@%s' % (dep.path, dep.new_rev) + _RunCommand(['gclient', 'setdep', '--revision', update], + working_dir=CHECKOUT_ROOT_DIR) def _IsTreeClean(): - stdout, _ = _RunCommand(['git', 'status', '--porcelain']) - if len(stdout) == 0: - return True + stdout, _ = _RunCommand(['git', 'status', '--porcelain']) + if len(stdout) == 0: + return True - logging.error('Dirty/unversioned files:\n%s', stdout) - return False + logging.error('Dirty/unversioned files:\n%s', stdout) + return False def _EnsureUpdatedMainBranch(dry_run): - current_branch = _RunCommand(['git', 'rev-parse', '--abbrev-ref', - 'HEAD'])[0].splitlines()[0] - if current_branch != 'main': - logging.error('Please checkout the main branch and re-run this script.') - if not dry_run: - sys.exit(-1) + current_branch = _RunCommand(['git', 'rev-parse', '--abbrev-ref', + 'HEAD'])[0].splitlines()[0] + if current_branch != 'main': + logging.error( + 'Please checkout the main branch and re-run this script.') + if not dry_run: + sys.exit(-1) - logging.info('Updating main branch...') - _RunCommand(['git', 'pull']) + logging.info('Updating main branch...') + _RunCommand(['git', 'pull']) def _CreateRollBranch(dry_run): - logging.info('Creating roll branch: %s', ROLL_BRANCH_NAME) - if not dry_run: - _RunCommand(['git', 'checkout', '-b', ROLL_BRANCH_NAME]) + logging.info('Creating roll branch: %s', ROLL_BRANCH_NAME) + if not dry_run: + _RunCommand(['git', 'checkout', '-b', ROLL_BRANCH_NAME]) def _RemovePreviousRollBranch(dry_run): - active_branch, branches = _GetBranches() - if active_branch == ROLL_BRANCH_NAME: - active_branch = 'main' - if ROLL_BRANCH_NAME in branches: - logging.info('Removing previous roll branch (%s)', ROLL_BRANCH_NAME) - if not dry_run: - _RunCommand(['git', 'checkout', active_branch]) - _RunCommand(['git', 'branch', '-D', ROLL_BRANCH_NAME]) + active_branch, branches = _GetBranches() + if active_branch == ROLL_BRANCH_NAME: + active_branch = 'main' + if ROLL_BRANCH_NAME in branches: + logging.info('Removing previous roll branch (%s)', ROLL_BRANCH_NAME) + if not dry_run: + _RunCommand(['git', 'checkout', active_branch]) + _RunCommand(['git', 'branch', '-D', ROLL_BRANCH_NAME]) def _LocalCommit(commit_msg, dry_run): - logging.info('Committing changes locally.') - if not dry_run: - _RunCommand(['git', 'add', '--update', '.']) - _RunCommand(['git', 'commit', '-m', commit_msg]) + logging.info('Committing changes locally.') + if not dry_run: + _RunCommand(['git', 'add', '--update', '.']) + _RunCommand(['git', 'commit', '-m', commit_msg]) def ChooseCQMode(skip_cq, cq_over, current_commit_pos, new_commit_pos): - if skip_cq: - return 0 - if (new_commit_pos - current_commit_pos) < cq_over: - return 1 - return 2 + if skip_cq: + return 0 + if (new_commit_pos - current_commit_pos) < cq_over: + return 1 + return 2 def _GetCcRecipients(changed_deps_list): - """Returns a list of emails to notify based on the changed deps list. + """Returns a list of emails to notify based on the changed deps list. """ - cc_recipients = [] - for c in changed_deps_list: - if 'libvpx' in c.path or 'libaom' in c.path: - cc_recipients.append('marpan@webrtc.org') - cc_recipients.append('jianj@chromium.org') - return cc_recipients + cc_recipients = [] + for c in changed_deps_list: + if 'libvpx' in c.path or 'libaom' in c.path: + cc_recipients.append('marpan@webrtc.org') + cc_recipients.append('jianj@chromium.org') + return cc_recipients def _UploadCL(commit_queue_mode, add_cc=None): - """Upload the committed changes as a changelist to Gerrit. + """Upload the committed changes as a changelist to Gerrit. commit_queue_mode: - 2: Submit to commit queue. @@ -679,139 +719,143 @@ def _UploadCL(commit_queue_mode, add_cc=None): add_cc: A list of email addresses to add as CC recipients. """ - cc_recipients = [NOTIFY_EMAIL] - if add_cc: - cc_recipients.extend(add_cc) - cmd = ['git', 'cl', 'upload', '--force', '--bypass-hooks'] - if commit_queue_mode >= 2: - logging.info('Sending the CL to the CQ...') - cmd.extend(['-o', 'label=Bot-Commit+1']) - cmd.extend(['-o', 'label=Commit-Queue+2']) - cmd.extend(['--send-mail', '--cc', ','.join(cc_recipients)]) - elif commit_queue_mode >= 1: - logging.info('Starting CQ dry run...') - cmd.extend(['-o', 'label=Commit-Queue+1']) - extra_env = { - 'EDITOR': 'true', - 'SKIP_GCE_AUTH_FOR_GIT': '1', - } - stdout, stderr = _RunCommand(cmd, extra_env=extra_env) - logging.debug('Output from "git cl upload":\nstdout:\n%s\n\nstderr:\n%s', - stdout, stderr) + cc_recipients = [NOTIFY_EMAIL] + if add_cc: + cc_recipients.extend(add_cc) + cmd = ['git', 'cl', 'upload', '--force', '--bypass-hooks'] + if commit_queue_mode >= 2: + logging.info('Sending the CL to the CQ...') + cmd.extend(['-o', 'label=Bot-Commit+1']) + cmd.extend(['-o', 'label=Commit-Queue+2']) + cmd.extend(['--send-mail', '--cc', ','.join(cc_recipients)]) + elif commit_queue_mode >= 1: + logging.info('Starting CQ dry run...') + cmd.extend(['-o', 'label=Commit-Queue+1']) + extra_env = { + 'EDITOR': 'true', + 'SKIP_GCE_AUTH_FOR_GIT': '1', + } + stdout, stderr = _RunCommand(cmd, extra_env=extra_env) + logging.debug('Output from "git cl upload":\nstdout:\n%s\n\nstderr:\n%s', + stdout, stderr) def GetRollRevisionRanges(opts, webrtc_deps): - current_cr_rev = webrtc_deps['vars']['chromium_revision'] - new_cr_rev = opts.revision - if not new_cr_rev: - stdout, _ = _RunCommand(['git', 'ls-remote', CHROMIUM_SRC_URL, 'HEAD']) - head_rev = stdout.strip().split('\t')[0] - logging.info('No revision specified. Using HEAD: %s', head_rev) - new_cr_rev = head_rev + current_cr_rev = webrtc_deps['vars']['chromium_revision'] + new_cr_rev = opts.revision + if not new_cr_rev: + stdout, _ = _RunCommand(['git', 'ls-remote', CHROMIUM_SRC_URL, 'HEAD']) + head_rev = stdout.strip().split('\t')[0] + logging.info('No revision specified. Using HEAD: %s', head_rev) + new_cr_rev = head_rev - return ChromiumRevisionUpdate(current_cr_rev, new_cr_rev) + return ChromiumRevisionUpdate(current_cr_rev, new_cr_rev) def main(): - p = argparse.ArgumentParser() - p.add_argument('--clean', - action='store_true', - default=False, - help='Removes any previous local roll branch.') - p.add_argument('-r', - '--revision', - help=('Chromium Git revision to roll to. Defaults to the ' - 'Chromium HEAD revision if omitted.')) - p.add_argument('--dry-run', - action='store_true', - default=False, - help=('Calculate changes and modify DEPS, but don\'t create ' - 'any local branch, commit, upload CL or send any ' - 'tryjobs.')) - p.add_argument('-i', - '--ignore-unclean-workdir', - action='store_true', - default=False, - help=('Ignore if the current branch is not main or if there ' - 'are uncommitted changes (default: %(default)s).')) - grp = p.add_mutually_exclusive_group() - grp.add_argument('--skip-cq', + p = argparse.ArgumentParser() + p.add_argument('--clean', action='store_true', default=False, - help='Skip sending the CL to the CQ (default: %(default)s)') - grp.add_argument('--cq-over', - type=int, - default=1, - help=('Commit queue dry run if the revision difference ' - 'is below this number (default: %(default)s)')) - p.add_argument('-v', - '--verbose', - action='store_true', - default=False, - help='Be extra verbose in printing of log messages.') - opts = p.parse_args() - - if opts.verbose: - logging.basicConfig(level=logging.DEBUG) - else: - logging.basicConfig(level=logging.INFO) - - if not opts.ignore_unclean_workdir and not _IsTreeClean(): - logging.error('Please clean your local checkout first.') - return 1 - - if opts.clean: - _RemovePreviousRollBranch(opts.dry_run) - - if not opts.ignore_unclean_workdir: - _EnsureUpdatedMainBranch(opts.dry_run) - - deps_filename = os.path.join(CHECKOUT_SRC_DIR, 'DEPS') - webrtc_deps = ParseLocalDepsFile(deps_filename) - - rev_update = GetRollRevisionRanges(opts, webrtc_deps) - - current_commit_pos = ParseCommitPosition( - ReadRemoteCrCommit(rev_update.current_chromium_rev)) - new_commit_pos = ParseCommitPosition( - ReadRemoteCrCommit(rev_update.new_chromium_rev)) - - new_cr_content = ReadRemoteCrFile('DEPS', rev_update.new_chromium_rev) - new_cr_deps = ParseDepsDict(new_cr_content) - changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) - # Discard other deps, assumed to be chromium-only dependencies. - new_generated_android_deps, _ = FindAddedDeps(webrtc_deps, new_cr_deps) - removed_generated_android_deps, other_deps = FindRemovedDeps( - webrtc_deps, new_cr_deps) - if other_deps: - raise RollError('WebRTC DEPS entries are missing from Chromium: %s.\n' - 'Remove them or add them to either ' - 'WEBRTC_ONLY_DEPS or DONT_AUTOROLL_THESE.' % other_deps) - clang_change = CalculateChangedClang(rev_update.new_chromium_rev) - commit_msg = GenerateCommitMessage( - rev_update, - current_commit_pos, - new_commit_pos, - changed_deps, - added_deps_paths=new_generated_android_deps, - removed_deps_paths=removed_generated_android_deps, - clang_change=clang_change) - logging.debug('Commit message:\n%s', commit_msg) - - _CreateRollBranch(opts.dry_run) - if not opts.dry_run: - UpdateDepsFile(deps_filename, rev_update, changed_deps, new_cr_content) - if _IsTreeClean(): - logging.info("No DEPS changes detected, skipping CL creation.") - else: - _LocalCommit(commit_msg, opts.dry_run) - commit_queue_mode = ChooseCQMode(opts.skip_cq, opts.cq_over, - current_commit_pos, new_commit_pos) - logging.info('Uploading CL...') + help='Removes any previous local roll branch.') + p.add_argument('-r', + '--revision', + help=('Chromium Git revision to roll to. Defaults to the ' + 'Chromium HEAD revision if omitted.')) + p.add_argument( + '--dry-run', + action='store_true', + default=False, + help=('Calculate changes and modify DEPS, but don\'t create ' + 'any local branch, commit, upload CL or send any ' + 'tryjobs.')) + p.add_argument( + '-i', + '--ignore-unclean-workdir', + action='store_true', + default=False, + help=('Ignore if the current branch is not main or if there ' + 'are uncommitted changes (default: %(default)s).')) + grp = p.add_mutually_exclusive_group() + grp.add_argument( + '--skip-cq', + action='store_true', + default=False, + help='Skip sending the CL to the CQ (default: %(default)s)') + grp.add_argument('--cq-over', + type=int, + default=1, + help=('Commit queue dry run if the revision difference ' + 'is below this number (default: %(default)s)')) + p.add_argument('-v', + '--verbose', + action='store_true', + default=False, + help='Be extra verbose in printing of log messages.') + opts = p.parse_args() + + if opts.verbose: + logging.basicConfig(level=logging.DEBUG) + else: + logging.basicConfig(level=logging.INFO) + + if not opts.ignore_unclean_workdir and not _IsTreeClean(): + logging.error('Please clean your local checkout first.') + return 1 + + if opts.clean: + _RemovePreviousRollBranch(opts.dry_run) + + if not opts.ignore_unclean_workdir: + _EnsureUpdatedMainBranch(opts.dry_run) + + deps_filename = os.path.join(CHECKOUT_ROOT_DIR, 'DEPS') + webrtc_deps = ParseLocalDepsFile(deps_filename) + + rev_update = GetRollRevisionRanges(opts, webrtc_deps) + + current_commit_pos = ParseCommitPosition( + ReadRemoteCrCommit(rev_update.current_chromium_rev)) + new_commit_pos = ParseCommitPosition( + ReadRemoteCrCommit(rev_update.new_chromium_rev)) + + new_cr_content = ReadRemoteCrFile('DEPS', rev_update.new_chromium_rev) + new_cr_deps = ParseDepsDict(new_cr_content) + changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) + # Discard other deps, assumed to be chromium-only dependencies. + new_generated_android_deps, _ = FindAddedDeps(webrtc_deps, new_cr_deps) + removed_generated_android_deps, other_deps = FindRemovedDeps( + webrtc_deps, new_cr_deps) + if other_deps: + raise RollError('WebRTC DEPS entries are missing from Chromium: %s.\n' + 'Remove them or add them to either ' + 'WEBRTC_ONLY_DEPS or DONT_AUTOROLL_THESE.' % + other_deps) + clang_change = CalculateChangedClang(rev_update.new_chromium_rev) + commit_msg = GenerateCommitMessage( + rev_update, + current_commit_pos, + new_commit_pos, + changed_deps, + added_deps_paths=new_generated_android_deps, + removed_deps_paths=removed_generated_android_deps, + clang_change=clang_change) + logging.debug('Commit message:\n%s', commit_msg) + + _CreateRollBranch(opts.dry_run) if not opts.dry_run: - _UploadCL(commit_queue_mode, _GetCcRecipients(changed_deps)) - return 0 + UpdateDepsFile(deps_filename, rev_update, changed_deps, new_cr_content) + if _IsTreeClean(): + logging.info("No DEPS changes detected, skipping CL creation.") + else: + _LocalCommit(commit_msg, opts.dry_run) + commit_queue_mode = ChooseCQMode(opts.skip_cq, opts.cq_over, + current_commit_pos, new_commit_pos) + logging.info('Uploading CL...') + if not opts.dry_run: + _UploadCL(commit_queue_mode, _GetCcRecipients(changed_deps)) + return 0 if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/autoroller/unittests/roll_deps_test.py b/tools_webrtc/autoroller/unittests/roll_deps_test.py index 1b201616ef..fed0238f60 100755 --- a/tools_webrtc/autoroller/unittests/roll_deps_test.py +++ b/tools_webrtc/autoroller/unittests/roll_deps_test.py @@ -8,6 +8,7 @@ # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. +# pylint: disable=invalid-name import glob import os @@ -49,323 +50,359 @@ class TestError(Exception): - pass + pass class FakeCmd: - def __init__(self): - self.expectations = [] - - def AddExpectation(self, *args, **kwargs): - returns = kwargs.pop('_returns', None) - ignores = kwargs.pop('_ignores', []) - self.expectations.append((args, kwargs, returns, ignores)) - - def __call__(self, *args, **kwargs): - if not self.expectations: - raise TestError('Got unexpected\n%s\n%s' % (args, kwargs)) - exp_args, exp_kwargs, exp_returns, ignores = self.expectations.pop(0) - for item in ignores: - kwargs.pop(item, None) - if args != exp_args or kwargs != exp_kwargs: - message = 'Expected:\n args: %s\n kwargs: %s\n' % (exp_args, exp_kwargs) - message += 'Got:\n args: %s\n kwargs: %s\n' % (args, kwargs) - raise TestError(message) - return exp_returns + + def __init__(self): + self.expectations = [] + + def AddExpectation(self, *args, **kwargs): + returns = kwargs.pop('_returns', None) + ignores = kwargs.pop('_ignores', []) + self.expectations.append((args, kwargs, returns, ignores)) + + def __call__(self, *args, **kwargs): + if not self.expectations: + raise TestError('Got unexpected\n%s\n%s' % (args, kwargs)) + exp_args, exp_kwargs, exp_returns, ignores = self.expectations.pop(0) + for item in ignores: + kwargs.pop(item, None) + if args != exp_args or kwargs != exp_kwargs: + message = ('Expected:\n args: %s\n kwargs: %s\n' % + (exp_args, exp_kwargs)) + message += 'Got:\n args: %s\n kwargs: %s\n' % (args, kwargs) + raise TestError(message) + return exp_returns class NullCmd: - """No-op mock when calls mustn't be checked. """ + """No-op mock when calls mustn't be checked. """ + + def __call__(self, *args, **kwargs): + # Empty stdout and stderr. + return None, None + - def __call__(self, *args, **kwargs): - # Empty stdout and stderr. - return None, None +class MockIsExistingDir: + """Pretends that all paths are valid directories.""" + + def __call__(self, *args, **kwargs): + return True class TestRollChromiumRevision(unittest.TestCase): - def setUp(self): - self._output_dir = tempfile.mkdtemp() - test_data_dir = os.path.join(SCRIPT_DIR, 'testdata', 'roll_deps') - for test_file in glob.glob(os.path.join(test_data_dir, '*')): - shutil.copy(test_file, self._output_dir) - join = lambda f: os.path.join(self._output_dir, f) - self._webrtc_depsfile = join('DEPS') - self._new_cr_depsfile = join('DEPS.chromium.new') - self._webrtc_depsfile_android = join('DEPS.with_android_deps') - self._new_cr_depsfile_android = join('DEPS.chromium.with_android_deps') - self.fake = FakeCmd() - - def tearDown(self): - shutil.rmtree(self._output_dir, ignore_errors=True) - self.assertEqual(self.fake.expectations, []) - - def testVarLookup(self): - local_scope = {'foo': 'wrong', 'vars': {'foo': 'bar'}} - lookup = roll_deps.VarLookup(local_scope) - self.assertEqual(lookup('foo'), 'bar') - - def testUpdateDepsFile(self): - new_rev = 'aaaaabbbbbcccccdddddeeeeefffff0000011111' - current_rev = TEST_DATA_VARS['chromium_revision'] - - with open(self._new_cr_depsfile_android, 'rb') as deps_file: - new_cr_contents = deps_file.read().decode('utf-8') - - UpdateDepsFile(self._webrtc_depsfile, - ChromiumRevisionUpdate(current_rev, new_rev), [], - new_cr_contents) - with open(self._webrtc_depsfile, 'rb') as deps_file: - deps_contents = deps_file.read().decode('utf-8') - self.assertTrue(new_rev in deps_contents, - 'Failed to find %s in\n%s' % (new_rev, deps_contents)) - - def _UpdateDepsSetup(self): - with open(self._webrtc_depsfile_android, 'rb') as deps_file: - webrtc_contents = deps_file.read().decode('utf-8') - with open(self._new_cr_depsfile_android, 'rb') as deps_file: - new_cr_contents = deps_file.read().decode('utf-8') - webrtc_deps = ParseDepsDict(webrtc_contents) - new_cr_deps = ParseDepsDict(new_cr_contents) - - changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) - with mock.patch('roll_deps._RunCommand', NullCmd()): - UpdateDepsFile(self._webrtc_depsfile_android, NO_CHROMIUM_REVISION_UPDATE, - changed_deps, new_cr_contents) - - with open(self._webrtc_depsfile_android, 'rb') as deps_file: - updated_contents = deps_file.read().decode('utf-8') - - return webrtc_contents, updated_contents - - def testUpdateAndroidGeneratedDeps(self): - _, updated_contents = self._UpdateDepsSetup() - - changed = 'third_party/android_deps/libs/android_arch_core_common' - changed_version = '1.0.0-cr0' - self.assertTrue(changed in updated_contents) - self.assertTrue(changed_version in updated_contents) - - def testAddAndroidGeneratedDeps(self): - webrtc_contents, updated_contents = self._UpdateDepsSetup() - - added = 'third_party/android_deps/libs/android_arch_lifecycle_common' - self.assertFalse(added in webrtc_contents) - self.assertTrue(added in updated_contents) - - def testRemoveAndroidGeneratedDeps(self): - webrtc_contents, updated_contents = self._UpdateDepsSetup() - - removed = 'third_party/android_deps/libs/android_arch_lifecycle_runtime' - self.assertTrue(removed in webrtc_contents) - self.assertFalse(removed in updated_contents) - - def testParseDepsDict(self): - with open(self._webrtc_depsfile, 'rb') as deps_file: - deps_contents = deps_file.read().decode('utf-8') - local_scope = ParseDepsDict(deps_contents) - vars_dict = local_scope['vars'] - - def AssertVar(variable_name): - self.assertEqual(vars_dict[variable_name], TEST_DATA_VARS[variable_name]) - - AssertVar('chromium_git') - AssertVar('chromium_revision') - self.assertEqual(len(local_scope['deps']), 3) - self.assertEqual(len(local_scope['deps_os']), 1) - - def testGetMatchingDepsEntriesReturnsPathInSimpleCase(self): - entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing/gtest') - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0], DEPS_ENTRIES['src/testing/gtest']) - - def testGetMatchingDepsEntriesHandlesSimilarStartingPaths(self): - entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing') - self.assertEqual(len(entries), 2) - - def testGetMatchingDepsEntriesHandlesTwoPathsWithIdenticalFirstParts(self): - entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/build') - self.assertEqual(len(entries), 1) - - def testCalculateChangedDeps(self): - webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) - new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile) - with mock.patch('roll_deps._RunCommand', self.fake): - _SetupGitLsRemoteCall( - self.fake, 'https://chromium.googlesource.com/chromium/src/build', - BUILD_NEW_REV) - changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) - - self.assertEqual(len(changed_deps), 4) - self.assertEqual(changed_deps[0].path, 'fuchsia') - self.assertEqual(changed_deps[0].current_version, 'version:10.20221201.3.1') - self.assertEqual(changed_deps[0].new_version, 'version:11.20230207.1.1') - - self.assertEqual(changed_deps[1].path, 'src/build') - self.assertEqual(changed_deps[1].current_rev, BUILD_OLD_REV) - self.assertEqual(changed_deps[1].new_rev, BUILD_NEW_REV) - - self.assertEqual(changed_deps[2].path, 'src/buildtools/linux64') - self.assertEqual(changed_deps[2].package, 'gn/gn/linux-amd64') - self.assertEqual(changed_deps[2].current_version, - 'git_revision:69ec4fca1fa69ddadae13f9e6b7507efa0675263') - self.assertEqual(changed_deps[2].new_version, 'git_revision:new-revision') - - self.assertEqual(changed_deps[3].path, 'src/third_party/depot_tools') - self.assertEqual(changed_deps[3].current_rev, DEPOTTOOLS_OLD_REV) - self.assertEqual(changed_deps[3].new_rev, DEPOTTOOLS_NEW_REV) - - def testWithDistinctDeps(self): - """Check CalculateChangedDeps works when deps are added/removed.""" - webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) - new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) - changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) - self.assertEqual(len(changed_deps), 1) - self.assertEqual( - changed_deps[0].path, - 'src/third_party/android_deps/libs/android_arch_core_common') - self.assertEqual( - changed_deps[0].package, - 'chromium/third_party/android_deps/libs/android_arch_core_common') - self.assertEqual(changed_deps[0].current_version, 'version:0.9.0') - self.assertEqual(changed_deps[0].new_version, 'version:1.0.0-cr0') - - def testFindAddedDeps(self): - webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) - new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) - added_android_paths, other_paths = FindAddedDeps(webrtc_deps, new_cr_deps) - self.assertEqual( - added_android_paths, - ['src/third_party/android_deps/libs/android_arch_lifecycle_common']) - self.assertEqual(other_paths, []) - - def testFindRemovedDeps(self): - webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) - new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) - removed_android_paths, other_paths = FindRemovedDeps( - webrtc_deps, new_cr_deps) - self.assertEqual( - removed_android_paths, - ['src/third_party/android_deps/libs/android_arch_lifecycle_runtime']) - self.assertEqual(other_paths, []) - - def testMissingDepsIsDetected(self): - """Check error is reported when deps cannot be automatically removed.""" - # The situation at test is the following: - # * A WebRTC DEPS entry is missing from Chromium. - # * The dependency isn't an android_deps (those are supported). - webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) - new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) - _, other_paths = FindRemovedDeps(webrtc_deps, new_cr_deps) - self.assertEqual( - other_paths, - ['fuchsia', 'src/buildtools/linux64', 'src/third_party/depot_tools']) - - def testExpectedDepsIsNotReportedMissing(self): - """Some deps musn't be seen as missing, even if absent from Chromium.""" - webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) - new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) - removed_android_paths, other_paths = FindRemovedDeps( - webrtc_deps, new_cr_deps) - self.assertTrue('src/build' not in removed_android_paths) - self.assertTrue('src/build' not in other_paths) - - def _CommitMessageSetup(self): - webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) - new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) - - changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) - added_paths, _ = FindAddedDeps(webrtc_deps, new_cr_deps) - removed_paths, _ = FindRemovedDeps(webrtc_deps, new_cr_deps) - - current_commit_pos = 'cafe' - new_commit_pos = 'f00d' - - commit_msg = GenerateCommitMessage(NO_CHROMIUM_REVISION_UPDATE, - current_commit_pos, new_commit_pos, - changed_deps, added_paths, removed_paths) - - return [l.strip() for l in commit_msg.split('\n')] - - def testChangedDepsInCommitMessage(self): - commit_lines = self._CommitMessageSetup() - - changed = '* src/third_party/android_deps/libs/' \ - 'android_arch_core_common: version:0.9.0..version:1.0.0-cr0' - self.assertTrue(changed in commit_lines) - # Check it is in adequate section. - changed_line = commit_lines.index(changed) - self.assertTrue('Changed' in commit_lines[changed_line - 1]) - - def testAddedDepsInCommitMessage(self): - commit_lines = self._CommitMessageSetup() - - added = '* src/third_party/android_deps/libs/' \ - 'android_arch_lifecycle_common' - self.assertTrue(added in commit_lines) - # Check it is in adequate section. - added_line = commit_lines.index(added) - self.assertTrue('Added' in commit_lines[added_line - 1]) - - def testRemovedDepsInCommitMessage(self): - commit_lines = self._CommitMessageSetup() - - removed = '* src/third_party/android_deps/libs/' \ - 'android_arch_lifecycle_runtime' - self.assertTrue(removed in commit_lines) - # Check it is in adequate section. - removed_line = commit_lines.index(removed) - self.assertTrue('Removed' in commit_lines[removed_line - 1]) + + def setUp(self): + self._output_dir = tempfile.mkdtemp() + test_data_dir = os.path.join(SCRIPT_DIR, 'testdata', 'roll_deps') + for test_file in glob.glob(os.path.join(test_data_dir, '*')): + shutil.copy(test_file, self._output_dir) + join = lambda f: os.path.join(self._output_dir, f) + self._webrtc_depsfile = join('DEPS') + self._new_cr_depsfile = join('DEPS.chromium.new') + self._webrtc_depsfile_android = join('DEPS.with_android_deps') + self._new_cr_depsfile_android = join('DEPS.chromium.with_android_deps') + self.fake = FakeCmd() + + def tearDown(self): + shutil.rmtree(self._output_dir, ignore_errors=True) + self.assertEqual(self.fake.expectations, []) + + def testVarLookup(self): + local_scope = {'foo': 'wrong', 'vars': {'foo': 'bar'}} + lookup = roll_deps.VarLookup(local_scope) + self.assertEqual(lookup('foo'), 'bar') + + def testUpdateDepsFile(self): + new_rev = 'aaaaabbbbbcccccdddddeeeeefffff0000011111' + current_rev = TEST_DATA_VARS['chromium_revision'] + + with open(self._new_cr_depsfile_android, 'rb') as deps_file: + new_cr_contents = deps_file.read().decode('utf-8') + + UpdateDepsFile(self._webrtc_depsfile, + ChromiumRevisionUpdate(current_rev, new_rev), [], + new_cr_contents) + with open(self._webrtc_depsfile, 'rb') as deps_file: + deps_contents = deps_file.read().decode('utf-8') + self.assertTrue( + new_rev in deps_contents, + 'Failed to find %s in\n%s' % (new_rev, deps_contents)) + + def _UpdateDepsSetup(self): + with open(self._webrtc_depsfile_android, 'rb') as deps_file: + webrtc_contents = deps_file.read().decode('utf-8') + with open(self._new_cr_depsfile_android, 'rb') as deps_file: + new_cr_contents = deps_file.read().decode('utf-8') + webrtc_deps = ParseDepsDict(webrtc_contents) + new_cr_deps = ParseDepsDict(new_cr_contents) + + changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) + with mock.patch('roll_deps._RunCommand', NullCmd()): + with mock.patch('roll_deps._IsExistingDir', MockIsExistingDir()): + UpdateDepsFile(self._webrtc_depsfile_android, + NO_CHROMIUM_REVISION_UPDATE, changed_deps, + new_cr_contents) + + with open(self._webrtc_depsfile_android, 'rb') as deps_file: + updated_contents = deps_file.read().decode('utf-8') + + return webrtc_contents, updated_contents + + def testUpdateAndroidGeneratedDeps(self): + _, updated_contents = self._UpdateDepsSetup() + + changed = 'third_party/android_deps/libs/android_arch_core_common' + changed_version = '1.0.0-cr0' + self.assertTrue(changed in updated_contents) + self.assertTrue(changed_version in updated_contents) + + def testAddAndroidGeneratedDeps(self): + webrtc_contents, updated_contents = self._UpdateDepsSetup() + + added = 'third_party/android_deps/libs/android_arch_lifecycle_common' + self.assertFalse(added in webrtc_contents) + self.assertTrue(added in updated_contents) + + def testRemoveAndroidGeneratedDeps(self): + webrtc_contents, updated_contents = self._UpdateDepsSetup() + + # pylint: disable=line-too-long + removed = 'third_party/android_deps/libs/android_arch_lifecycle_runtime' + self.assertTrue(removed in webrtc_contents) + self.assertFalse(removed in updated_contents) + + def testParseDepsDict(self): + with open(self._webrtc_depsfile, 'rb') as deps_file: + deps_contents = deps_file.read().decode('utf-8') + local_scope = ParseDepsDict(deps_contents) + vars_dict = local_scope['vars'] + + def AssertVar(variable_name): + self.assertEqual(vars_dict[variable_name], + TEST_DATA_VARS[variable_name]) + + AssertVar('chromium_git') + AssertVar('chromium_revision') + self.assertEqual(len(local_scope['deps']), 4) + self.assertEqual(len(local_scope['deps_os']), 1) + + def testGetMatchingDepsEntriesReturnsPathInSimpleCase(self): + entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing/gtest') + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0], DEPS_ENTRIES['src/testing/gtest']) + + def testGetMatchingDepsEntriesHandlesSimilarStartingPaths(self): + entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing') + self.assertEqual(len(entries), 2) + + def testGetMatchingDepsEntriesHandlesTwoPathsWithIdenticalFirstParts(self): + entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/build') + self.assertEqual(len(entries), 1) + + def testCalculateChangedDeps(self): + webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) + new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile) + with mock.patch('roll_deps._RunCommand', self.fake): + _SetupGitLsRemoteCall( + self.fake, + 'https://chromium.googlesource.com/chromium/src/build', + BUILD_NEW_REV) + changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) + + self.assertEqual(len(changed_deps), 5) + self.assertEqual(changed_deps[0].path, 'fuchsia') + self.assertEqual(changed_deps[0].current_version, + 'version:10.20221201.3.1') + self.assertEqual(changed_deps[0].new_version, + 'version:11.20230207.1.1') + + self.assertEqual(changed_deps[1].path, 'src/build') + self.assertEqual(changed_deps[1].current_rev, BUILD_OLD_REV) + self.assertEqual(changed_deps[1].new_rev, BUILD_NEW_REV) + + self.assertEqual(changed_deps[2].path, 'src/buildtools/linux64') + self.assertEqual(changed_deps[2].package, 'gn/gn/linux-amd64') + self.assertEqual( + changed_deps[2].current_version, + 'git_revision:69ec4fca1fa69ddadae13f9e6b7507efa0675263') + self.assertEqual(changed_deps[2].new_version, + 'git_revision:new-revision') + + self.assertEqual(changed_deps[3].path, 'src/third_party/depot_tools') + self.assertEqual(changed_deps[3].current_rev, DEPOTTOOLS_OLD_REV) + self.assertEqual(changed_deps[3].new_rev, DEPOTTOOLS_NEW_REV) + + self.assertEqual(changed_deps[4].path, + 'src/third_party/js_code_coverage') + self.assertEqual( + changed_deps[4].current_version, + 'js_code_coverage/d538975c93eefc7bafd599b50f867e90c1ef17f3') + self.assertEqual( + changed_deps[4].new_version, + 'js_code_coverage/d538975c93eefc7bafd599b50f867e90c1ef17f4') + + def testWithDistinctDeps(self): + """Check CalculateChangedDeps works when deps are added/removed.""" + webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) + new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) + changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) + self.assertEqual(len(changed_deps), 1) + self.assertEqual( + changed_deps[0].path, + 'src/third_party/android_deps/libs/android_arch_core_common') + self.assertEqual( + changed_deps[0].package, + 'chromium/third_party/android_deps/libs/android_arch_core_common') + self.assertEqual(changed_deps[0].current_version, 'version:0.9.0') + self.assertEqual(changed_deps[0].new_version, 'version:1.0.0-cr0') + + def testFindAddedDeps(self): + webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) + new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) + added_android_paths, other_paths = FindAddedDeps( + webrtc_deps, new_cr_deps) + self.assertEqual(added_android_paths, [ + 'src/third_party/android_deps/libs/android_arch_lifecycle_common' + ]) + self.assertEqual(other_paths, []) + + def testFindRemovedDeps(self): + webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) + new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) + removed_android_paths, other_paths = FindRemovedDeps( + webrtc_deps, new_cr_deps) + self.assertEqual(removed_android_paths, [ + 'src/third_party/android_deps/libs/android_arch_lifecycle_runtime' + ]) + self.assertEqual(other_paths, []) + + def testMissingDepsIsDetected(self): + """Check error is reported when deps cannot be automatically removed. + """ + # The situation at test is the following: + # * A WebRTC DEPS entry is missing from Chromium. + # * The dependency isn't an android_deps (those are supported). + webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) + new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) + _, other_paths = FindRemovedDeps(webrtc_deps, new_cr_deps) + self.assertEqual(other_paths, [ + 'fuchsia', 'src/buildtools/linux64', 'src/third_party/depot_tools', + 'src/third_party/js_code_coverage' + ]) + + def testExpectedDepsIsNotReportedMissing(self): + """Some deps musn't be seen as missing, even if absent from Chromium. + """ + webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) + new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) + removed_android_paths, other_paths = FindRemovedDeps( + webrtc_deps, new_cr_deps) + self.assertTrue('src/build' not in removed_android_paths) + self.assertTrue('src/build' not in other_paths) + + def _CommitMessageSetup(self): + webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) + new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) + + changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) + added_paths, _ = FindAddedDeps(webrtc_deps, new_cr_deps) + removed_paths, _ = FindRemovedDeps(webrtc_deps, new_cr_deps) + + current_commit_pos = 'cafe' + new_commit_pos = 'f00d' + + commit_msg = GenerateCommitMessage(NO_CHROMIUM_REVISION_UPDATE, + current_commit_pos, new_commit_pos, + changed_deps, added_paths, + removed_paths) + + return [l.strip() for l in commit_msg.split('\n')] + + def testChangedDepsInCommitMessage(self): + commit_lines = self._CommitMessageSetup() + + changed = '* src/third_party/android_deps/libs/' \ + 'android_arch_core_common: version:0.9.0..version:1.0.0-cr0' + self.assertTrue(changed in commit_lines) + # Check it is in adequate section. + changed_line = commit_lines.index(changed) + self.assertTrue('Changed' in commit_lines[changed_line - 1]) + + def testAddedDepsInCommitMessage(self): + commit_lines = self._CommitMessageSetup() + + added = '* src/third_party/android_deps/libs/' \ + 'android_arch_lifecycle_common' + self.assertTrue(added in commit_lines) + # Check it is in adequate section. + added_line = commit_lines.index(added) + self.assertTrue('Added' in commit_lines[added_line - 1]) + + def testRemovedDepsInCommitMessage(self): + commit_lines = self._CommitMessageSetup() + + removed = '* src/third_party/android_deps/libs/' \ + 'android_arch_lifecycle_runtime' + self.assertTrue(removed in commit_lines) + # Check it is in adequate section. + removed_line = commit_lines.index(removed) + self.assertTrue('Removed' in commit_lines[removed_line - 1]) class TestChooseCQMode(unittest.TestCase): - def testSkip(self): - self.assertEqual(ChooseCQMode(True, 99, 500000, 500100), 0) - def testDryRun(self): - self.assertEqual(ChooseCQMode(False, 101, 500000, 500100), 1) + def testSkip(self): + self.assertEqual(ChooseCQMode(True, 99, 500000, 500100), 0) - def testSubmit(self): - self.assertEqual(ChooseCQMode(False, 100, 500000, 500100), 2) + def testDryRun(self): + self.assertEqual(ChooseCQMode(False, 101, 500000, 500100), 1) + + def testSubmit(self): + self.assertEqual(ChooseCQMode(False, 100, 500000, 500100), 2) class TestReadUrlContent(unittest.TestCase): - def setUp(self): - self.url = 'http://localhost+?format=TEXT' - def testReadUrlContent(self): - url_mock = mock.Mock() - roll_deps.urllib.request.urlopen = url_mock + def setUp(self): + self.url = 'http://localhost+?format=TEXT' + + def testReadUrlContent(self): + url_mock = mock.Mock() + roll_deps.urllib.request.urlopen = url_mock - roll_deps.ReadUrlContent(self.url) + roll_deps.ReadUrlContent(self.url) - calls = [ - mock.call('http://localhost+?format=TEXT'), - mock.call().readlines(), - mock.call().close() - ] - self.assertEqual(url_mock.mock_calls, calls) + calls = [ + mock.call('http://localhost+?format=TEXT'), + mock.call().readlines(), + mock.call().close() + ] + self.assertEqual(url_mock.mock_calls, calls) - def testReadUrlContentError(self): - roll_deps.logging = mock.Mock() + def testReadUrlContentError(self): + roll_deps.logging = mock.Mock() - readlines_mock = mock.Mock() - readlines_mock.readlines = mock.Mock( - side_effect=IOError('Connection error')) - readlines_mock.close = mock.Mock() + readlines_mock = mock.Mock() + readlines_mock.readlines = mock.Mock( + side_effect=IOError('Connection error')) + readlines_mock.close = mock.Mock() - url_mock = mock.Mock(return_value=readlines_mock) - roll_deps.urllib.request.urlopen = url_mock + url_mock = mock.Mock(return_value=readlines_mock) + roll_deps.urllib.request.urlopen = url_mock - try: - roll_deps.ReadUrlContent(self.url) - except OSError: - self.assertTrue(roll_deps.logging.exception.called) + try: + roll_deps.ReadUrlContent(self.url) + except OSError: + self.assertTrue(roll_deps.logging.exception.called) def _SetupGitLsRemoteCall(cmd_fake, url, revision): - cmd = ['git', 'ls-remote', url, revision] - cmd_fake.AddExpectation(cmd, _returns=(revision, None)) + cmd = ['git', 'ls-remote', url, revision] + cmd_fake.AddExpectation(cmd, _returns=(revision, None)) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tools_webrtc/autoroller/unittests/testdata/roll_deps/DEPS b/tools_webrtc/autoroller/unittests/testdata/roll_deps/DEPS index a6f577527d..f65b40802c 100644 --- a/tools_webrtc/autoroller/unittests/testdata/roll_deps/DEPS +++ b/tools_webrtc/autoroller/unittests/testdata/roll_deps/DEPS @@ -30,6 +30,20 @@ deps = { # Script expects to find these markers. # === ANDROID_DEPS Generated Code Start === # === ANDROID_DEPS Generated Code End === + + # Entry that uses GCS + 'src/third_party/js_code_coverage': { + 'dep_type': 'gcs', + 'bucket': 'chromium-nodejs', + 'objects': [ + { + 'object_name': 'js_code_coverage/d538975c93eefc7bafd599b50f867e90c1ef17f3', + 'sha256sum': '646bb00ced0a930b2eb1e4dbcfac18ebbb8f889bb80599e0254d9d6505427914', + 'size_bytes': 1469185, + 'generation': 1657780123604338, + }, + ], + }, } deps_os = { diff --git a/tools_webrtc/autoroller/unittests/testdata/roll_deps/DEPS.chromium.new b/tools_webrtc/autoroller/unittests/testdata/roll_deps/DEPS.chromium.new index 6cd6b04b64..a5be523acc 100644 --- a/tools_webrtc/autoroller/unittests/testdata/roll_deps/DEPS.chromium.new +++ b/tools_webrtc/autoroller/unittests/testdata/roll_deps/DEPS.chromium.new @@ -26,4 +26,18 @@ deps = { # Script expects to find these markers. # === ANDROID_DEPS Generated Code Start === # === ANDROID_DEPS Generated Code End === + + # Entry that uses GCS + 'src/third_party/js_code_coverage': { + 'dep_type': 'gcs', + 'bucket': 'chromium-nodejs', + 'objects': [ + { + 'object_name': 'js_code_coverage/d538975c93eefc7bafd599b50f867e90c1ef17f4', + 'sha256sum': '646bb00ced0a930b2eb1e4dbcfac18ebbb8f889bb80599e0254d9d6505427915', + 'size_bytes': 1469186, + 'generation': 1657780123604339, + }, + ], + }, } diff --git a/tools_webrtc/chromiumos/OWNERS b/tools_webrtc/chromiumos/OWNERS new file mode 100644 index 0000000000..a15b0e6155 --- /dev/null +++ b/tools_webrtc/chromiumos/OWNERS @@ -0,0 +1,3 @@ +aaronyu@google.com +pteerapong@google.com +mbonadei@webrtc.org diff --git a/tools_webrtc/ensure_webcam_is_running.py b/tools_webrtc/ensure_webcam_is_running.py deleted file mode 100755 index 4428d79bd8..0000000000 --- a/tools_webrtc/ensure_webcam_is_running.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env vpython3 - -# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. -"""Checks if a virtual webcam is running and starts it if not. - -Returns a non-zero return code if the webcam could not be started. - -Prerequisites: -* The Python interpreter must have the psutil package installed. -* Windows: a scheduled task named 'ManyCam' must exist and be configured to - launch ManyCam preconfigured to auto-play the test clip. -* Mac: ManyCam must be installed in the default location and be preconfigured - to auto-play the test clip. -* Linux: Not implemented - -NOTICE: When running this script as a buildbot step, make sure to set -usePTY=False for the build step when adding it, or the subprocess will die as -soon the step has executed. - -If any command line arguments are passed to the script, it is executed as a -command in a subprocess. -""" - -import subprocess -import sys -# psutil is not installed on non-Linux machines by default. -import psutil # pylint: disable=F0401 - -WEBCAM_WIN = ('schtasks', '/run', '/tn', 'ManyCam') -WEBCAM_MAC = ('open', '/Applications/ManyCam/ManyCam.app') - - -def IsWebCamRunning(): - if sys.platform == 'win32': - process_name = 'ManyCam.exe' - elif sys.platform.startswith('darwin'): - process_name = 'ManyCam' - elif sys.platform.startswith('linux'): - # TODO(bugs.webrtc.org/9636): Currently a no-op on Linux: sw webcams no - # longer in use. - print('Virtual webcam: no-op on Linux') - return True - else: - raise Exception('Unsupported platform: %s' % sys.platform) - for p in psutil.process_iter(): - try: - if process_name == p.name: - print('Found a running virtual webcam (%s with PID %s)' % - (p.name, p.pid)) - return True - except psutil.AccessDenied: - pass # This is normal if we query sys processes, etc. - return False - - -def StartWebCam(): - try: - if sys.platform == 'win32': - subprocess.check_call(WEBCAM_WIN) - print('Successfully launched virtual webcam.') - elif sys.platform.startswith('darwin'): - subprocess.check_call(WEBCAM_MAC) - print('Successfully launched virtual webcam.') - elif sys.platform.startswith('linux'): - # TODO(bugs.webrtc.org/9636): Currently a no-op on Linux: sw webcams no - # longer in use. - print('Not implemented on Linux') - - except Exception as e: - print('Failed to launch virtual webcam: %s' % e) - return False - - return True - - -def _ForcePythonInterpreter(cmd): - """Returns the fixed command line to call the right python executable.""" - out = cmd[:] - if out[0] == 'vpython3': - out[0] = sys.executable - elif out[0].endswith('.py'): - out.insert(0, sys.executable) - return out - - -def Main(argv): - if not IsWebCamRunning(): - if not StartWebCam(): - return 1 - - if argv: - return subprocess.call(_ForcePythonInterpreter(argv)) - return 0 - - -if __name__ == '__main__': - sys.exit(Main(sys.argv[1:])) diff --git a/tools_webrtc/get_landmines.py b/tools_webrtc/get_landmines.py index 18bc413e25..aa4edbefb0 100755 --- a/tools_webrtc/get_landmines.py +++ b/tools_webrtc/get_landmines.py @@ -8,8 +8,8 @@ # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. """ -This file emits the list of reasons why a particular build needs to be clobbered -(or a list of 'landmines'). +This file emits the list of reasons why a particular build needs to +be clobbered (or a list of 'landmines'). """ import os @@ -23,52 +23,60 @@ host_os = landmine_utils.host_os # pylint: disable=invalid-name +# pylint: disable=line-too-long def print_landmines(): # pylint: disable=invalid-name - """ + """ ALL LANDMINES ARE EMITTED FROM HERE. """ - # DO NOT add landmines as part of a regular CL. Landmines are a last-effort - # bandaid fix if a CL that got landed has a build dependency bug and all - # bots need to be cleaned up. If you're writing a new CL that causes build - # dependency problems, fix the dependency problems instead of adding a - # landmine. - # See the Chromium version in src/build/get_landmines.py for usage examples. - print('Clobber to remove out/{Debug,Release}/args.gn (webrtc:5070)') - if host_os() == 'win': - print('Clobber to resolve some issues with corrupt .pdb files on bots.') - print('Clobber due to corrupt .pdb files (after #14623)') - print('Clobber due to Win 64-bit Debug linking error (crbug.com/668961)') - print('Clobber due to Win Clang Debug linking errors in ' - 'https://codereview.webrtc.org/2786603002') - print('Clobber due to Win Debug linking errors in ' - 'https://codereview.webrtc.org/2832063003/') - print('Clobber win x86 bots (issues with isolated files).') - print('Clobber because of libc++ issue') - print('Clobber because of libc++ issue - take 2') - print('Clobber because of libc++ issue - take 3') - print('Clobber because of libc++ issue - take 4 (crbug.com/1337238)') - print('Clobber because of libc++ issue - take 5 (crbug.com/1337238)') - print('Clobber because of libc++ issue - take 6 (crbug.com/1337238)') - if host_os() == 'mac': - print('Clobber due to iOS compile errors (crbug.com/694721)') - print('Clobber to unblock https://codereview.webrtc.org/2709573003') - print('Clobber to fix https://codereview.webrtc.org/2709573003 after ' - 'landing') - print('Clobber to fix https://codereview.webrtc.org/2767383005 before' - 'landing (changing rtc_executable -> rtc_test on iOS)') - print('Clobber to fix https://codereview.webrtc.org/2767383005 before' - 'landing (changing rtc_executable -> rtc_test on iOS)') - print('Another landmine for low_bandwidth_audio_test (webrtc:7430)') - print('Clobber to change neteq_rtpplay type to executable') - print('Clobber to remove .xctest files.') - print('Clobber to remove .xctest files (take 2).') - print('Switching rtc_executable to rtc_test') + # DO NOT add landmines as part of a regular CL. Landmines are a last-effort + # bandaid fix if a CL that got landed has a build dependency bug and all + # bots need to be cleaned up. If you're writing a new CL that causes build + # dependency problems, fix the dependency problems instead of adding a + # landmine. + # See the Chromium version in src/build/get_landmines.py for usage examples. + print('Clobber to remove out/{Debug,Release}/args.gn (webrtc:5070)') + if host_os() == 'win': + print( + 'Clobber to resolve some issues with corrupt .pdb files on bots.') + print('Clobber due to corrupt .pdb files (after #14623)') + print( + 'Clobber due to Win 64-bit Debug linking error (crbug.com/668961)') + print('Clobber due to Win Clang Debug linking errors in ' + 'https://codereview.webrtc.org/2786603002') + print('Clobber due to Win Debug linking errors in ' + 'https://codereview.webrtc.org/2832063003/') + print('Clobber win x86 bots (issues with isolated files).') + print('Clobber because of libc++ issue') + print('Clobber because of libc++ issue - take 2') + print('Clobber because of libc++ issue - take 3') + print('Clobber because of libc++ issue - take 4 (crbug.com/1337238)') + print('Clobber because of libc++ issue - take 5 (crbug.com/1337238)') + print('Clobber because of libc++ issue - take 6 (crbug.com/1337238)') + print('Clobber because b/367066321') + if host_os() == 'mac': + print('Clobber due to iOS compile errors (crbug.com/694721)') + print('Clobber to unblock https://codereview.webrtc.org/2709573003') + print('Clobber to fix https://codereview.webrtc.org/2709573003 after ' + 'landing') + print('Clobber to fix https://codereview.webrtc.org/2767383005 before' + 'landing (changing rtc_executable -> rtc_test on iOS)') + print('Clobber to fix https://codereview.webrtc.org/2767383005 before' + 'landing (changing rtc_executable -> rtc_test on iOS)') + print('Another landmine for low_bandwidth_audio_test (webrtc:7430)') + print('Clobber to change neteq_rtpplay type to executable') + print('Clobber to remove .xctest files.') + print('Clobber to remove .xctest files (take 2).') + print('Switching rtc_executable to rtc_test') + print('Lets clobber iOS due to signing issue b/396118151') + print('Lets clobber iOS due to signing issue b/396118151 (2nd try)') + if host_os() == 'android': + print('Clobber due to Android "compile confirm no-op" errors.') def main(): - print_landmines() - return 0 + print_landmines() + return 0 if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/gn_check_autofix.py b/tools_webrtc/gn_check_autofix.py old mode 100644 new mode 100755 index c68e370037..d750e46f54 --- a/tools_webrtc/gn_check_autofix.py +++ b/tools_webrtc/gn_check_autofix.py @@ -10,15 +10,19 @@ """ This tool tries to fix (some) errors reported by `gn gen --check` or `gn check`. -It will run `mb gen` in a temporary directory and it is really useful to -check for different configurations. +If a command line flag `-C out/` is supplied, it will run `gn gen --check` +in that directory. Otherwise it will run `mb gen` in a temporary directory +which is useful to check for different configurations. Usage: + $ vpython3 tools_webrtc/gn_check_autofix.py -C out/Default + or $ vpython3 tools_webrtc/gn_check_autofix.py -m some_mater -b some_bot or $ vpython3 tools_webrtc/gn_check_autofix.py -c some_mb_config """ +import argparse import os import re import shutil @@ -39,70 +43,71 @@ class TemporaryDirectory: - def __init__(self): - self._closed = False - self._name = None - self._name = tempfile.mkdtemp() - def __enter__(self): - return self._name + def __init__(self): + self._closed = False + self._name = None + self._name = tempfile.mkdtemp() + + def __enter__(self): + return self._name - def __exit__(self, exc, value, _tb): - if self._name and not self._closed: - shutil.rmtree(self._name) - self._closed = True + def __exit__(self, exc, value, _tb): + if self._name and not self._closed: + shutil.rmtree(self._name) + self._closed = True def Run(cmd): - print('Running:', ' '.join(cmd)) - sub = subprocess.Popen(cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True) - return sub.communicate() - - -def FixErrors(filename, missing_deps, deleted_sources): - with open(filename) as f: - lines = f.readlines() - - fixed_file = '' - indentation_level = None - for line in lines: - match = TARGET_RE.match(line) - if match: - target = match.group('target_name') - if target in missing_deps: - indentation_level = match.group('indentation_level') - elif indentation_level is not None: - match = re.match(indentation_level + '}$', line) - if match: - line = ('deps = [\n' + ''.join(' "' + dep + '",\n' - for dep in missing_deps[target]) + - ']\n') + line - indentation_level = None - elif line.strip().startswith('deps = ['): - joined_deps = ''.join(' "' + dep + '",\n' - for dep in missing_deps[target]) - line = line.replace('deps = [', 'deps = [' + joined_deps) - indentation_level = None - - if line.strip() not in deleted_sources: - fixed_file += line - - with open(filename, 'w') as f: - f.write(fixed_file) - - Run(['gn', 'format', filename]) - - -def FirstNonEmpty(iterable): - """Return first item which evaluates to True, or fallback to None.""" - return next((x for x in iterable if x), None) - - -def Rebase(base_path, dependency_path, dependency): - """Adapt paths so they work both in stand-alone WebRTC and Chromium tree. + print('Running:', ' '.join(cmd)) + sub = subprocess.Popen(cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True) + return sub.communicate() + + +def fix_errors(filename, missing_deps, deleted_sources): + with open(filename) as file: + lines = file.readlines() + + fixed_file = '' + indentation_level = None + for line in lines: + match = TARGET_RE.match(line) + if match: + target = match.group('target_name') + if target in missing_deps: + indentation_level = match.group('indentation_level') + elif indentation_level is not None: + match = re.match(indentation_level + '}$', line) + if match: + line = ('deps = [\n' + ''.join(' "' + dep + '",\n' + for dep in missing_deps[target]) + + ']\n') + line + indentation_level = None + elif line.strip().startswith('deps = ['): + joined_deps = ''.join(' "' + dep + '",\n' + for dep in missing_deps[target]) + line = line.replace('deps = [', 'deps = [' + joined_deps) + indentation_level = None + + if line.strip() not in deleted_sources: + fixed_file += line + + with open(filename, 'w') as file: + file.write(fixed_file) + + Run(['gn', 'format', filename]) + + +def first_non_empty(iterable): + """Return first item which evaluates to True, or fallback to None.""" + return next((x for x in iterable if x), None) + + +def rebase(base_path, dependency_path, dependency): + """Adapt paths so they work both in stand-alone WebRTC and Chromium tree. To cope with varying top-level directory (WebRTC VS Chromium), we use: * relative paths for WebRTC modules. @@ -119,83 +124,105 @@ def Rebase(base_path, dependency_path, dependency): Full target path (E.g. '../rtc_base/time:timestamp_extrapolator'). """ - root = FirstNonEmpty(dependency_path.split('/')) - if root in CHROMIUM_DIRS: - # Chromium paths must remain absolute. E.g. //third_party//abseil-cpp... - rebased = dependency_path - else: - base_path = base_path.split(os.path.sep) - dependency_path = dependency_path.split(os.path.sep) + root = first_non_empty(dependency_path.split('/')) + if root in CHROMIUM_DIRS: + # Chromium paths must remain absolute. E.g. + # //third_party//abseil-cpp... + rebased = dependency_path + else: + base_path = base_path.split(os.path.sep) + dependency_path = dependency_path.split(os.path.sep) - first_difference = None - shortest_length = min(len(dependency_path), len(base_path)) - for i in range(shortest_length): - if dependency_path[i] != base_path[i]: - first_difference = i - break + first_difference = None + shortest_length = min(len(dependency_path), len(base_path)) + for i in range(shortest_length): + if dependency_path[i] != base_path[i]: + first_difference = i + break - first_difference = first_difference or shortest_length - base_path = base_path[first_difference:] - dependency_path = dependency_path[first_difference:] - rebased = os.path.sep.join((['..'] * len(base_path)) + dependency_path) - return rebased + ':' + dependency + first_difference = first_difference or shortest_length + base_path = base_path[first_difference:] + dependency_path = dependency_path[first_difference:] + rebased = os.path.sep.join((['..'] * len(base_path)) + dependency_path) + return rebased + ':' + dependency def main(): - deleted_sources = set() - errors_by_file = defaultdict(lambda: defaultdict(set)) - - with TemporaryDirectory() as tmp_dir: - mb_script_path = os.path.join(SCRIPT_DIR, 'mb', 'mb.py') - mb_config_file_path = os.path.join(SCRIPT_DIR, 'mb', 'mb_config.pyl') - mb_gen_command = ([ - mb_script_path, - 'gen', - tmp_dir, - '--config-file', - mb_config_file_path, - ] + sys.argv[1:]) - - mb_output = Run(mb_gen_command) - errors = mb_output[0].split('ERROR')[1:] - - if mb_output[1]: - print(mb_output[1]) - return 1 - - for error in errors: - error = error.split('\n') - target_msg = 'The target:' - if target_msg not in error: - target_msg = 'It is not in any dependency of' - if target_msg not in error: - print('\n'.join(error)) - continue - index = error.index(target_msg) + 1 - path, target = error[index].strip().split(':') - if error[index + 1] in ('is including a file from the target:', - 'The include file is in the target(s):'): - dep = error[index + 2].strip() - dep_path, dep = dep.split(':') - dep = Rebase(path, dep_path, dep) - # Replacing /target:target with /target - dep = re.sub(r'/(\w+):(\1)$', r'/\1', dep) - # Replacing target:target with target - dep = re.sub(r'^(\w+):(\1)$', r'\1', dep) - path = os.path.join(path[2:], 'BUILD.gn') - errors_by_file[path][target].add(dep) - elif error[index + 1] == 'has a source file:': - deleted_file = '"' + os.path.basename(error[index + 2].strip()) + '",' - deleted_sources.add(deleted_file) + helptext = """ +This tool tries to fix (some) errors reported by `gn gen --check`. + +If a command line flag `-C out/` is supplied, it will run `gn gen --check` +in that directory. Otherwise it will run `mb gen` in a temporary directory +with all other command line arguments forwarded to `mb gen`. This mode is +useful to check for different configurations.""" + + parser = argparse.ArgumentParser( + description=helptext, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('-C', + dest='local_build_dir', + help='Path lo a local build dir, e.g. out/Default') + (flags, argv_to_forward) = parser.parse_known_args(sys.argv[1:]) + + deleted_sources = set() + errors_by_file = defaultdict(lambda: defaultdict(set)) + + if flags.local_build_dir: + mb_output = Run(["gn", "gen", "--check", flags.local_build_dir]) else: - print('\n'.join(error)) - continue - - for path, missing_deps in list(errors_by_file.items()): - FixErrors(path, missing_deps, deleted_sources) - - return 0 + with TemporaryDirectory() as tmp_dir: + mb_script_path = os.path.join(SCRIPT_DIR, 'mb', 'mb.py') + mb_config_file_path = os.path.join(SCRIPT_DIR, 'mb', + 'mb_config.pyl') + mb_gen_command = ([ + mb_script_path, + 'gen', + tmp_dir, + '--config-file', + mb_config_file_path, + ] + argv_to_forward) + mb_output = Run(mb_gen_command) + + errors = mb_output[0].split('ERROR')[1:] + + if mb_output[1]: + print(mb_output[1]) + return 1 + + for error in errors: + error = error.split('\n') + target_msg = 'The target:' + if target_msg not in error: + target_msg = 'It is not in any dependency of' + if target_msg not in error: + print('\n'.join(error)) + continue + index = error.index(target_msg) + 1 + path, target = error[index].strip().split(':') + if error[index + 1] in ('is including a file from the target:', + 'The include file is in the target(s):'): + dep = error[index + 2].strip() + dep_path, dep = dep.split(':') + dep = rebase(path, dep_path, dep) + # Replacing /target:target with /target + dep = re.sub(r'/(\w+):(\1)$', r'/\1', dep) + # Replacing target:target with target + dep = re.sub(r'^(\w+):(\1)$', r'\1', dep) + path = os.path.join(path[2:], 'BUILD.gn') + errors_by_file[path][target].add(dep) + elif error[index + 1] == 'has a source file:': + deleted_file = '"' + os.path.basename( + error[index + 2].strip()) + '",' + deleted_sources.add(deleted_file) + else: + print('\n'.join(error)) + continue + + for path, missing_deps in list(errors_by_file.items()): + fix_errors(path, missing_deps, deleted_sources) + + return 0 if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/ios/build_ios_libs.py b/tools_webrtc/ios/build_ios_libs.py index bc6a6b09f6..3304ec3353 100755 --- a/tools_webrtc/ios/build_ios_libs.py +++ b/tools_webrtc/ios/build_ios_libs.py @@ -40,8 +40,8 @@ 'device:arm64', 'simulator:arm64', 'simulator:x64' ] IOS_MINIMUM_DEPLOYMENT_TARGET = { - 'device': '11.0', - 'simulator': '11.0', + 'device': '14.0', + 'simulator': '14.0', 'catalyst': '14.0' } LIBVPX_BUILD_VP9 = True @@ -51,308 +51,309 @@ def _ParseArgs(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument('--build_config', - default='release', - choices=['debug', 'release'], - help='The build config. Can be "debug" or "release". ' - 'Defaults to "release".') - parser.add_argument('--arch', - nargs='+', - default=DEFAULT_ARCHS, - choices=ENABLED_ARCHS, - help='Architectures to build. Defaults to %(default)s.') - parser.add_argument( - '-c', - '--clean', - action='store_true', - default=False, - help='Removes the previously generated build output, if any.') - parser.add_argument('-p', - '--purify', - action='store_true', - default=False, - help='Purifies the previously generated build output by ' - 'removing the temporary results used when (re)building.') - parser.add_argument( - '-o', - '--output-dir', - type=os.path.abspath, - default=SDK_OUTPUT_DIR, - help='Specifies a directory to output the build artifacts to. ' - 'If specified together with -c, deletes the dir.') - parser.add_argument( - '-r', - '--revision', - type=int, - default=0, - help='Specifies a revision number to embed if building the framework.') - parser.add_argument('--verbose', - action='store_true', - default=False, - help='Debug logging.') - parser.add_argument('--use-goma', - action='store_true', - default=False, - help='Use goma to build.') - parser.add_argument('--use-remoteexec', - action='store_true', - default=False, - help='Use RBE to build.') - parser.add_argument('--deployment-target', - default=IOS_MINIMUM_DEPLOYMENT_TARGET['device'], - help='Raise the minimum deployment target to build for. ' - 'Cannot be lowered below 12.0 for iOS/iPadOS ' - 'and 14.0 for Catalyst.') - parser.add_argument( - '--extra-gn-args', - default=[], - nargs='*', - help='Additional GN args to be used during Ninja generation.') - - return parser.parse_args() + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('--build_config', + default='release', + choices=['debug', 'release'], + help='The build config. Can be "debug" or "release". ' + 'Defaults to "release".') + parser.add_argument( + '--arch', + nargs='+', + default=DEFAULT_ARCHS, + choices=ENABLED_ARCHS, + help='Architectures to build. Defaults to %(default)s.') + parser.add_argument( + '-c', + '--clean', + action='store_true', + default=False, + help='Removes the previously generated build output, if any.') + parser.add_argument( + '-p', + '--purify', + action='store_true', + default=False, + help='Purifies the previously generated build output by ' + 'removing the temporary results used when (re)building.') + parser.add_argument( + '-o', + '--output-dir', + type=os.path.abspath, + default=SDK_OUTPUT_DIR, + help='Specifies a directory to output the build artifacts to. ' + 'If specified together with -c, deletes the dir.') + parser.add_argument( + '-r', + '--revision', + type=int, + default=0, + help='Specifies a revision number to embed if building the framework.') + parser.add_argument('--verbose', + action='store_true', + default=False, + help='Debug logging.') + parser.add_argument('--use-remoteexec', + action='store_true', + default=False, + help='Use RBE to build.') + parser.add_argument( + '--deployment-target', + default=IOS_MINIMUM_DEPLOYMENT_TARGET['device'], + help='Raise the minimum deployment target to build for. ' + 'Cannot be lowered below 12.0 for iOS/iPadOS ' + 'and 14.0 for Catalyst.') + parser.add_argument( + '--extra-gn-args', + default=[], + nargs='*', + help='Additional GN args to be used during Ninja generation.') + + return parser.parse_args() def _RunCommand(cmd): - logging.debug('Running: %r', cmd) - subprocess.check_call(cmd, cwd=SRC_DIR) + logging.debug('Running: %r', cmd) + subprocess.check_call(cmd, cwd=SRC_DIR) def _CleanArtifacts(output_dir): - if os.path.isdir(output_dir): - logging.info('Deleting %s', output_dir) - shutil.rmtree(output_dir) + if os.path.isdir(output_dir): + logging.info('Deleting %s', output_dir) + shutil.rmtree(output_dir) def _CleanTemporary(output_dir, architectures): - if os.path.isdir(output_dir): - logging.info('Removing temporary build files.') - for arch in architectures: - arch_lib_path = os.path.join(output_dir, arch) - if os.path.isdir(arch_lib_path): - shutil.rmtree(arch_lib_path) + if os.path.isdir(output_dir): + logging.info('Removing temporary build files.') + for arch in architectures: + arch_lib_path = os.path.join(output_dir, arch) + if os.path.isdir(arch_lib_path): + shutil.rmtree(arch_lib_path) def _ParseArchitecture(architectures): - result = dict() - for arch in architectures: - if ":" in arch: - target_environment, target_cpu = arch.split(":") - else: - logging.warning('The environment for build is not specified.') - logging.warning('It is assumed based on cpu type.') - logging.warning('See crbug.com/1138425 for more details.') - if arch == "x64": - target_environment = "simulator" - else: - target_environment = "device" - target_cpu = arch - archs = result.get(target_environment) - if archs is None: - result[target_environment] = {target_cpu} - else: - archs.add(target_cpu) - - return result + result = dict() + for arch in architectures: + if ":" in arch: + target_environment, target_cpu = arch.split(":") + else: + logging.warning('The environment for build is not specified.') + logging.warning('It is assumed based on cpu type.') + logging.warning('See crbug.com/1138425 for more details.') + if arch == "x64": + target_environment = "simulator" + else: + target_environment = "device" + target_cpu = arch + archs = result.get(target_environment) + if archs is None: + result[target_environment] = {target_cpu} + else: + archs.add(target_cpu) + + return result def _VersionMax(*versions): - return max( - *versions, - key=lambda version: [int(component) for component in version.split('.')]) + return max(*versions, + key=lambda version: + [int(component) for component in version.split('.')]) def BuildWebRTC(output_dir, target_environment, target_arch, flavor, gn_target_name, ios_deployment_target, libvpx_build_vp9, - use_goma, use_remoteexec, extra_gn_args): - gn_args = [ - 'target_os="ios"', - 'ios_enable_code_signing=false', - 'is_component_build=false', - 'rtc_include_tests=false', - ] - - # Add flavor option. - if flavor == 'debug': - gn_args.append('is_debug=true') - elif flavor == 'release': - gn_args.append('is_debug=false') - else: - raise ValueError('Unexpected flavor type: %s' % flavor) - - gn_args.append('target_environment="%s"' % target_environment) - - gn_args.append('target_cpu="%s"' % target_arch) - - gn_args.append('ios_deployment_target="%s"' % ios_deployment_target) - - gn_args.append('rtc_libvpx_build_vp9=' + - ('true' if libvpx_build_vp9 else 'false')) - - gn_args.append('use_lld=true') - gn_args.append('use_goma=' + ('true' if use_goma else 'false')) - gn_args.append('use_remoteexec=' + ('true' if use_remoteexec else 'false')) - gn_args.append('rtc_enable_objc_symbol_export=true') - - args_string = ' '.join(gn_args + extra_gn_args) - logging.info('Building WebRTC with args: %s', args_string) - - cmd = [ - sys.executable, - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'), - 'gen', - output_dir, - '--args=' + args_string, - ] - _RunCommand(cmd) - logging.info('Building target: %s', gn_target_name) - - cmd = [ - os.path.join(SRC_DIR, 'third_party', 'ninja', 'ninja'), - '-C', - output_dir, - gn_target_name, - ] - if use_goma or use_remoteexec: - cmd.extend(['-j', '200']) - _RunCommand(cmd) + use_remoteexec, extra_gn_args): + gn_args = [ + 'target_os="ios"', + 'ios_enable_code_signing=false', + 'is_component_build=false', + 'rtc_include_tests=false', + ] + # Add flavor option. + if flavor == 'debug': + gn_args.append('is_debug=true') + elif flavor == 'release': + gn_args.append('is_debug=false') + else: + raise ValueError('Unexpected flavor type: %s' % flavor) -def main(): - args = _ParseArgs() + gn_args.append('target_environment="%s"' % target_environment) - logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) + gn_args.append('target_cpu="%s"' % target_arch) - if args.clean: - _CleanArtifacts(args.output_dir) - return 0 + gn_args.append('ios_deployment_target="%s"' % ios_deployment_target) - # architectures is typed as Dict[str, Set[str]], - # where key is for the environment (device or simulator) - # and value is for the cpu type. - architectures = _ParseArchitecture(args.arch) - gn_args = args.extra_gn_args + gn_args.append('rtc_libvpx_build_vp9=' + + ('true' if libvpx_build_vp9 else 'false')) - if args.purify: - _CleanTemporary(args.output_dir, list(architectures.keys())) - return 0 + gn_args.append('use_lld=true') + gn_args.append('use_remoteexec=' + ('true' if use_remoteexec else 'false')) + gn_args.append('rtc_enable_objc_symbol_export=true') + + args_string = ' '.join(gn_args + extra_gn_args) + logging.info('Building WebRTC with args: %s', args_string) - gn_target_name = 'framework_objc' - gn_args.append('enable_dsyms=true') - gn_args.append('enable_stripping=true') - - # Build all architectures. - framework_paths = [] - all_lib_paths = [] - for (environment, archs) in list(architectures.items()): - ios_deployment_target = _VersionMax( - args.deployment_target, IOS_MINIMUM_DEPLOYMENT_TARGET[environment]) - framework_path = os.path.join(args.output_dir, environment) - framework_paths.append(framework_path) - lib_paths = [] - for arch in archs: - lib_path = os.path.join(framework_path, arch + '_libs') - lib_paths.append(lib_path) - BuildWebRTC(lib_path, environment, arch, args.build_config, - gn_target_name, ios_deployment_target, LIBVPX_BUILD_VP9, - args.use_goma, args.use_remoteexec, gn_args) - all_lib_paths.extend(lib_paths) - - # Combine the slices. - dylib_path = os.path.join(SDK_FRAMEWORK_NAME, 'WebRTC') - # Dylibs will be combined, all other files are the same across archs. - shutil.rmtree(os.path.join(framework_path, SDK_FRAMEWORK_NAME), - ignore_errors=True) - shutil.copytree(os.path.join(lib_paths[0], SDK_FRAMEWORK_NAME), - os.path.join(framework_path, SDK_FRAMEWORK_NAME), - symlinks=True) - logging.info('Merging framework slices for %s.', environment) - dylib_paths = [os.path.join(path, dylib_path) for path in lib_paths] - out_dylib_path = os.path.join(framework_path, dylib_path) - if os.path.islink(out_dylib_path): - out_dylib_path = os.path.join(os.path.dirname(out_dylib_path), - os.readlink(out_dylib_path)) - try: - os.remove(out_dylib_path) - except OSError: - pass - cmd = ['lipo'] + dylib_paths + ['-create', '-output', out_dylib_path] + cmd = [ + sys.executable, + os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'), + 'gen', + output_dir, + '--args=' + args_string, + ] _RunCommand(cmd) + logging.info('Building target: %s', gn_target_name) - # Merge the dSYM slices. - lib_dsym_dir_path = os.path.join(lib_paths[0], SDK_DSYM_NAME) - if os.path.isdir(lib_dsym_dir_path): - shutil.rmtree(os.path.join(framework_path, SDK_DSYM_NAME), - ignore_errors=True) - shutil.copytree(lib_dsym_dir_path, - os.path.join(framework_path, SDK_DSYM_NAME)) - logging.info('Merging dSYM slices.') - dsym_path = os.path.join(SDK_DSYM_NAME, 'Contents', 'Resources', 'DWARF', - 'WebRTC') - lib_dsym_paths = [os.path.join(path, dsym_path) for path in lib_paths] - out_dsym_path = os.path.join(framework_path, dsym_path) - try: - os.remove(out_dsym_path) - except OSError: - pass - cmd = ['lipo'] + lib_dsym_paths + ['-create', '-output', out_dsym_path] - _RunCommand(cmd) - - # Check for Mac-style WebRTC.framework/Resources/ (for Catalyst)... - resources_dir = os.path.join(framework_path, SDK_FRAMEWORK_NAME, - 'Resources') - if not os.path.exists(resources_dir): - # ...then fall back to iOS-style WebRTC.framework/ - resources_dir = os.path.dirname(resources_dir) - - # Modify the version number. - # Format should be ... - # e.g. 55.0.14986 means - # branch cut 55, no hotfixes, and revision 14986. - infoplist_path = os.path.join(resources_dir, 'Info.plist') - cmd = [ - 'PlistBuddy', '-c', 'Print :CFBundleShortVersionString', - infoplist_path - ] - major_minor = subprocess.check_output(cmd).decode('utf-8').strip() - version_number = '%s.%s' % (major_minor, args.revision) - logging.info('Substituting revision number: %s', version_number) - cmd = [ - 'PlistBuddy', '-c', 'Set :CFBundleVersion ' + version_number, - infoplist_path - ] - _RunCommand(cmd) - _RunCommand(['plutil', '-convert', 'binary1', infoplist_path]) - - xcframework_dir = os.path.join(args.output_dir, SDK_XCFRAMEWORK_NAME) - if os.path.isdir(xcframework_dir): - shutil.rmtree(xcframework_dir) - - logging.info('Creating xcframework.') - cmd = ['xcodebuild', '-create-xcframework', '-output', xcframework_dir] - - # Apparently, xcodebuild needs absolute paths for input arguments - for framework_path in framework_paths: - cmd += [ - '-framework', - os.path.abspath(os.path.join(framework_path, SDK_FRAMEWORK_NAME)), + cmd = [ + os.path.join(SRC_DIR, 'third_party', 'ninja', 'ninja'), + '-C', + output_dir, + gn_target_name, ] - dsym_full_path = os.path.join(framework_path, SDK_DSYM_NAME) - if os.path.exists(dsym_full_path): - cmd += ['-debug-symbols', os.path.abspath(dsym_full_path)] + if use_remoteexec: + cmd.extend(['-j', '200']) + _RunCommand(cmd) - _RunCommand(cmd) - # Generate the license file. - logging.info('Generate license file.') - gn_target_full_name = '//sdk:' + gn_target_name - builder = LicenseBuilder(all_lib_paths, [gn_target_full_name]) - builder.GenerateLicenseText( - os.path.join(args.output_dir, SDK_XCFRAMEWORK_NAME)) +def main(): + args = _ParseArgs() + + logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) + + if args.clean: + _CleanArtifacts(args.output_dir) + return 0 + + # architectures is typed as Dict[str, Set[str]], + # where key is for the environment (device or simulator) + # and value is for the cpu type. + architectures = _ParseArchitecture(args.arch) + gn_args = args.extra_gn_args + + if args.purify: + _CleanTemporary(args.output_dir, list(architectures.keys())) + return 0 + + gn_target_name = 'framework_objc' + gn_args.append('enable_dsyms=true') + gn_args.append('enable_stripping=true') + + # Build all architectures. + framework_paths = [] + all_lib_paths = [] + for (environment, archs) in list(architectures.items()): + ios_deployment_target = _VersionMax( + args.deployment_target, IOS_MINIMUM_DEPLOYMENT_TARGET[environment]) + framework_path = os.path.join(args.output_dir, environment) + framework_paths.append(framework_path) + lib_paths = [] + for arch in archs: + lib_path = os.path.join(framework_path, arch + '_libs') + lib_paths.append(lib_path) + BuildWebRTC(lib_path, environment, arch, args.build_config, + gn_target_name, ios_deployment_target, + LIBVPX_BUILD_VP9, args.use_remoteexec, gn_args) + all_lib_paths.extend(lib_paths) + + # Combine the slices. + dylib_path = os.path.join(SDK_FRAMEWORK_NAME, 'WebRTC') + # Dylibs will be combined, all other files are the same across archs. + shutil.rmtree(os.path.join(framework_path, SDK_FRAMEWORK_NAME), + ignore_errors=True) + shutil.copytree(os.path.join(lib_paths[0], SDK_FRAMEWORK_NAME), + os.path.join(framework_path, SDK_FRAMEWORK_NAME), + symlinks=True) + logging.info('Merging framework slices for %s.', environment) + dylib_paths = [os.path.join(path, dylib_path) for path in lib_paths] + out_dylib_path = os.path.join(framework_path, dylib_path) + if os.path.islink(out_dylib_path): + out_dylib_path = os.path.join(os.path.dirname(out_dylib_path), + os.readlink(out_dylib_path)) + try: + os.remove(out_dylib_path) + except OSError: + pass + cmd = ['lipo'] + dylib_paths + ['-create', '-output', out_dylib_path] + _RunCommand(cmd) + + # Merge the dSYM slices. + lib_dsym_dir_path = os.path.join(lib_paths[0], SDK_DSYM_NAME) + if os.path.isdir(lib_dsym_dir_path): + shutil.rmtree(os.path.join(framework_path, SDK_DSYM_NAME), + ignore_errors=True) + shutil.copytree(lib_dsym_dir_path, + os.path.join(framework_path, SDK_DSYM_NAME)) + logging.info('Merging dSYM slices.') + dsym_path = os.path.join(SDK_DSYM_NAME, 'Contents', 'Resources', + 'DWARF', 'WebRTC') + lib_dsym_paths = [ + os.path.join(path, dsym_path) for path in lib_paths + ] + out_dsym_path = os.path.join(framework_path, dsym_path) + try: + os.remove(out_dsym_path) + except OSError: + pass + cmd = ['lipo' + ] + lib_dsym_paths + ['-create', '-output', out_dsym_path] + _RunCommand(cmd) + + # Check for Mac-style WebRTC.framework/Resources/ (for Catalyst)... + resources_dir = os.path.join(framework_path, SDK_FRAMEWORK_NAME, + 'Resources') + if not os.path.exists(resources_dir): + # ...then fall back to iOS-style WebRTC.framework/ + resources_dir = os.path.dirname(resources_dir) + + # Modify the version number. + # Format should be ... + # e.g. 55.0.14986 means + # branch cut 55, no hotfixes, and revision 14986. + infoplist_path = os.path.join(resources_dir, 'Info.plist') + cmd = [ + 'PlistBuddy', '-c', 'Print :CFBundleShortVersionString', + infoplist_path + ] + major_minor = subprocess.check_output(cmd).decode('utf-8').strip() + version_number = '%s.%s' % (major_minor, args.revision) + logging.info('Substituting revision number: %s', version_number) + cmd = [ + 'PlistBuddy', '-c', 'Set :CFBundleVersion ' + version_number, + infoplist_path + ] + _RunCommand(cmd) + _RunCommand(['plutil', '-convert', 'binary1', infoplist_path]) + + xcframework_dir = os.path.join(args.output_dir, SDK_XCFRAMEWORK_NAME) + if os.path.isdir(xcframework_dir): + shutil.rmtree(xcframework_dir) + + logging.info('Creating xcframework.') + cmd = ['xcodebuild', '-create-xcframework', '-output', xcframework_dir] + + # Apparently, xcodebuild needs absolute paths for input arguments + for framework_path in framework_paths: + cmd += [ + '-framework', + os.path.abspath(os.path.join(framework_path, SDK_FRAMEWORK_NAME)), + ] + dsym_full_path = os.path.join(framework_path, SDK_DSYM_NAME) + if os.path.exists(dsym_full_path): + cmd += ['-debug-symbols', os.path.abspath(dsym_full_path)] - logging.info('Done.') - return 0 + _RunCommand(cmd) + + # Generate the license file. + logging.info('Generate license file.') + gn_target_full_name = '//sdk:' + gn_target_name + builder = LicenseBuilder(all_lib_paths, [gn_target_full_name]) + builder.generate_license_text( + os.path.join(args.output_dir, SDK_XCFRAMEWORK_NAME)) + + logging.info('Done.') + return 0 if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/iwyu/apply-include-cleaner b/tools_webrtc/iwyu/apply-include-cleaner new file mode 100755 index 0000000000..02de99a4ca --- /dev/null +++ b/tools_webrtc/iwyu/apply-include-cleaner @@ -0,0 +1,150 @@ +#!/usr/bin/env bash +# +# Run the include-cleaner tool (iwyu replacement) on a file in the webrtc source +# directory. +# +# +# In order to handle include paths correctly, you need to provide +# a compile DB (aka compile_commands.json). +# You can create it in one of the following ways: +# "gn gen --export-compile-commands path/to/out" +# "tools/clang/scripts/generate_compdb.py -p path/to/out > compile_commands.json" +# If "out/Default" exists, the script will attempt to generate it for you. +# +# clang-include-cleaner is built as part of the "clangd" package in our +# LLVM build. +# Example .gclient file: +# solutions = [ +# { +# "name": "src", +# "url": "https://webrtc.googlesource.com/src.git", +# "deps_file": "DEPS", +# "managed": False, +# "custom_deps": {}, +# "custom_vars" : { +# "checkout_clangd": True, +# "download_remoteexec_cfg" : True, +# } +# }, +# ] + + +CLEANER=third_party/llvm-build/Release+Asserts/bin/clang-include-cleaner +if [ ! -x $CLEANER ]; then + echo "clang-include-cleaner not found" + echo -n "Add '\"checkout_clangd\": True' to 'custom_vars' in your" + echo ".gclient file and run 'gclient sync'." + exit 1 +fi + +# Debug level, also controlled by the "-d" argument. +# Set this to 1 to get more debug information. +# Set this to 2 to also get a dump of the iwyu tool output. +DEBUG=0 + +set -e +if [ $DEBUG -gt 0 ]; then + set -x +fi + +error() { + echo "$*" >&2 + exit 1 +} + +WORKDIR=out/Default + +usage() { + echo "Usage: $0 [-r] file.cc [file2.cc ...]" + echo "Runs the include-cleaner tool on a list of files" + echo "Arguments:" + echo " -n : Just print changes, don't do them" + echo " -c : Just return non-zero exit code if there are changes, don't do them" + echo " -r : Remove non-required includes from .h file" + echo " -d : Set debug level to " + echo " -w : Specify the workdir (out/Default if not specified)" + echo " -h : Print this help message" +} + +COMMAND=" --edit" +INCLUDE_ARGS="" +GMOCK_INCLUDES="--extra-arg=-I../../third_party/googletest/src/googlemock/include/" +GTEST_INCLUDES="--extra-arg=-I../../third_party/googletest/src/googletest/include/" +CHECK_MODE=false + +while getopts 'd:rncw:h' opts; do + case "${opts}" in + n) COMMAND=" --print=changes" ;; + c) COMMAND=" --print=changes" ; CHECK_MODE=true ;; + r) INCLUDE_ARGS=" --remove" ;; + d) DEBUG=${OPTARG};if [ $DEBUG -gt 0 ]; then set -x; fi ;; + w) WORKDIR=${OPTARG} ;; + h) usage; exit 1 ;; + *) error "Unexpected option ${opts}" ;; + esac +done +shift $(expr $OPTIND - 1 ) + +if [[ -z "$COMPILE_COMMANDS" ]]; then + if [ -d "$WORKDIR" ]; then + if [ ! -f "$WORKDIR/compile_commands.json" ]; then + echo "Generating compile commands file" + tools/clang/scripts/generate_compdb.py -p $WORKDIR > $WORKDIR/compile_commands.json + fi + COMPILE_COMMANDS="$WORKDIR/compile_commands.json" + else + error "Could not generate $WORKDIR/compile_commands.json." + fi +fi + +# To get a list of files in a commit: git diff-tree --no-commit-id --name-only -r HEAD +for FILE in "$@" +do + if [ -z $FILE ] || [ ! -f $FILE ]; then + usage + error "File $FILE is not found" + fi +done + +HAS_OUTPUT=false +for FILE in "$@" +do + OUTPUT=$($CLEANER -p $WORKDIR $INCLUDE_ARGS $GMOCK_INCLUDES $GTEST_INCLUDES $COMMAND $FILE) + + # include-cleaner does not support custom mappings for certain deps + # this ensures that the gtest/gmock deps it inserts are replaced + # with the right paths for those includes. + # Since sed inplace argument acts differently between GNU/BSD based systems + # we handle this here. + case "$(uname -s)" in + Linux*) INPLACE_ARG=( -i );; + Darwin*) INPLACE_ARG=( -i '' );; + *) INPLACE_ARG=( -i ) + esac + IWYU_MAPPING=( "\"gmock\/gmock\.h\":\"test\/gmock\.h\"" + "\"gtest\/gtest\.h\":\"test\/gtest\.h\"" + "\:\"rtc_base\/net_helpers\.h\"" ) + + for mapping in "${IWYU_MAPPING[@]}" ; do + KEY="${mapping%%:*}" + VALUE="${mapping##*:}" + if grep -q "#include ${VALUE}" $FILE; then + OUTPUT=$(echo "$OUTPUT" | sed "/+ ${KEY}/d") + sed "${INPLACE_ARG[@]}" -e "/#include ${KEY}/d" $FILE + else + sed "${INPLACE_ARG[@]}" -e "s@^#include ${KEY}@#include ${VALUE}@g" $FILE + fi + done + + echo "${OUTPUT}" + HAS_OUTPUT=$HAS_OUTPUT || [[ ! -z $OUTPUT ]] +done + +echo "Finished. Check diff, compile, gn gen --check (tools_webrtc/gn_check_autofix.py can fix most of the issues)" +echo "and git cl format before uploading." + +# Return a non-zero exit code if running with "CHECK_MODE" +# and there are changes to apply. +if $CHECK_MODE && [[ ! -z $OUTPUT ]]; then + exit 1 +fi diff --git a/tools_webrtc/iwyu/apply-iwyu b/tools_webrtc/iwyu/apply-iwyu deleted file mode 100755 index 3a20ff3551..0000000000 --- a/tools_webrtc/iwyu/apply-iwyu +++ /dev/null @@ -1,131 +0,0 @@ -#!/usr/bin/env bash -# -# Run the include-what-you-use tool (iwyu) on a file in the webrtc source -# directory. -# -# The script uses a subsequent grep pass to remove #include files -# that are problematic to include. -# -# In order to handle include paths correctly, you need to provide -# a compile DB (aka compile_commands.json). -# You can create it in one of the following ways: -# "gn gen --export-compile-commands path/to/out" -# "tools/clang/scripts/generate_compdb.py -p path/to/out > compile_commands.json" -# If "out/Default" exists, the script will attempt to generate it for you. -# -# To get iwyu on Debian/glinux, do "sudo apt-get install iwyu". - -# Debug level, also controlled by the "-d" argument. -# Set this to 1 to get more debug information. -# Set this to 2 to also get a dump of the iwyu tool output. -DEBUG=0 - -set -e -if [ $DEBUG -gt 0 ]; then - set -x -fi - -error() { - echo "$*" >&2 - exit 1 -} - -find_alternates() { - for name in "$@" - do - name_path=$(which "${name}") - if [ ! -z "${name_path}" ]; then - echo ${name_path} - return 0 - fi - done - error "Could not find any of the tools '$@' in PATH." - return 1 -} - -IWYU_TOOL=$(find_alternates iwyu_tool iwyu_tool.py) -FIX_INCLUDE=$(find_alternates fix_include fix_includes.py) -FIX_INCLUDE_ARGS='' -IWYU_TOOL_DIR="${IWYU_TOOL_DIR:-tools_webrtc/iwyu}" -COMPILE_COMMANDS='' - -usage() { - echo "Usage: $0 [ -c compile-commands-file.json ] [-r] file.cc" - echo "Runs the IWYU and fix-include on a CC file and its associated .h file" - echo "Arguments:" - echo " -c compile-commands: Compiler command file" - echo " -r : Remove non-required includes from .h file" - echo " -d : Set debug level to " - echo " -h : Print this help message" - echo "(default command file: out/Default/compile_commands.json - this" - echo "will be generated if not present" -} - -while getopts 'c:d:rh' opts; do - case "${opts}" in - c) COMPILE_COMMANDS="${OPTARG}" ;; - r) FIX_INCLUDE_ARGS="${FIX_INCLUDE_ARGS} --nosafe_headers" ;; - d) DEBUG=${OPTARG};if [ $DEBUG -gt 0 ]; then set -x; fi ;; - h) usage; exit 1 ;; - *) error "Unexpected option ${opts}" ;; - esac -done -shift $(expr $OPTIND - 1 ) - -if [[ -z "$COMPILE_COMMANDS" ]]; then - if [ -d "out/Default" ]; then - if [ ! -f "out/Default/compile_commands.json" ]; then - gn gen --export-compile-commands out/Default - fi - COMPILE_COMMANDS="out/Default/compile_commands.json" - else - error "compile_commands.json must be passed." - fi -fi - -FILE="$1" - -if [ ! -f $FILE_CC ]; then - error "File $FILE is not found" -fi - -# Find the .h file that IWYU will modify, if any. -FILE_CC=$FILE -if [ -f $(dirname $FILE)/$(basename -s .cc $FILE).h ]; then - FILE_H=$(dirname $FILE)/$(basename -s .cc $FILE).h -else - FILE_H="" -fi - -tmpfile=$(realpath $(mktemp iwyu.XXXXXXX)) -trap 'rm -f -- "${tmpfile}"' EXIT - -# IWYU has a confusing set of exit codes. Discard it. -"$IWYU_TOOL" -p "$COMPILE_COMMANDS" "$FILE_CC" -- -Xiwyu --no_fwd_decls \ - -Xiwyu --mapping_file=../../$IWYU_TOOL_DIR/mappings.imp \ - >& ${tmpfile} || echo "IWYU done, code $?" - -if grep 'fatal error' ${tmpfile}; then - echo "iwyu run failed" - cat ${tmpfile} - exit 1 -else - if [ $DEBUG -gt 1 ]; then - cat ${tmpfile} - fi - # In compile_commands.json, the file name is recorded - # as a relative path to the build directory. - pushd "$(dirname "$COMPILE_COMMANDS")" || error "pushd failed" - "$FIX_INCLUDE" $FIX_INCLUDE_ARGS < ${tmpfile} || echo "Some files modified" - popd -fi - -grep -v -f tools_webrtc/iwyu/iwyu-filter-list $FILE_CC > $FILE_CC.new -mv $FILE_CC.new $FILE_CC - -if [ -n "$FILE_H" ]; then - grep -v -f tools_webrtc/iwyu/iwyu-filter-list $FILE_H > $FILE_H.new - mv $FILE_H.new $FILE_H -fi - -echo "Finished. Check diff, compile and git cl format before uploading." diff --git a/tools_webrtc/iwyu/apply_include_cleaner.py b/tools_webrtc/iwyu/apply_include_cleaner.py new file mode 100755 index 0000000000..7f89b85bd2 --- /dev/null +++ b/tools_webrtc/iwyu/apply_include_cleaner.py @@ -0,0 +1,251 @@ +#!/usr/bin/env vpython3 + +# Copyright (c) 2025 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. +# +# Run the include-cleaner tool (iwyu replacement) on a file in the webrtc +# source directory. +# +# +# In order to handle include paths correctly, you need to provide +# a compile DB (aka compile_commands.json). +# You can create it in one of the following ways: +# - "gn gen --export-compile-commands path/to/out" +# - "tools/clang/scripts/generate_compdb.py -p path/to/out +# > compile_commands.json" +# If "out/Default" exists, the script will attempt to generate it for you. +# +# clang-include-cleaner is built as part of the "clangd" package in our +# LLVM build. +# Example .gclient file: +# solutions = [ +# { +# "name": "src", +# "url": "https://webrtc.googlesource.com/src.git", +# "deps_file": "DEPS", +# "managed": False, +# "custom_deps": {}, +# "custom_vars" : { +# "checkout_clangd": True, +# "download_remoteexec_cfg" : True, +# } +# }, +# ] + +import argparse +import re +import pathlib +import subprocess +import sys +from typing import Tuple + +_CLEANER_BINARY_PATH = pathlib.Path( + "third_party/llvm-build/Release+Asserts/bin/clang-include-cleaner") +_DEFAULT_WORKDIR = pathlib.Path("out/Default") +_EXTRA_ARGS = [ + "-I../../third_party/googletest/src/googlemock/include/", + "-I../../third_party/googletest/src/googletest/include/", +] +_IWYU_MAPPING = { + '"gmock/gmock.h"': '"test/gmock.h"', + '"gtest/gtest.h"': '"test/gtest.h"', + "": '"rtc_base/net_helpers.h"', +} + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser( + description="Runs the include-cleaner tool on a list of files", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + parser.add_argument("files", + nargs="+", + type=_valid_file, + help="List of files to process") + parser.add_argument( + "-p", + "--print", + action=argparse.BooleanOptionalAction, + default=False, + help="Don't modify the files, just print the changes", + ) + parser.add_argument( + "-c", + "--check-for-changes", + action=argparse.BooleanOptionalAction, + default=False, + help="""Checks whether include-cleaner generated changes and exit with +1 in case it did. Used for bot validation that the current commit did not +introduce an include regression.""") + parser.add_argument( + "-w", + "--work-dir", + type=_valid_dir, + default=str(_DEFAULT_WORKDIR), + help="Specify the gn workdir", + ) + + return parser.parse_args() + + +def _valid_file(path: str) -> pathlib.Path: + """Checks if the given path is an existing file + relative to the current working directory. + + Args: + path: Relative file path to the current working directory + + Returns: + pathlib.Path object wrapping the file path + + Raises: + ValueError: If the file doesn't exist + """ + pathlib_handle = pathlib.Path(path) + if not pathlib_handle.is_file(): + raise ValueError(f"File path {pathlib_handle} does not exist!") + return pathlib_handle + + +def _valid_dir(path: str) -> pathlib.Path: + """Checks if the given path is an existing dir + relative to the current working directory. + + Args: + path: Relative dir path to the current working directory + + Returns: + pathlib.Path object wrapping the dir path + + Raises: + ValueError: If the dir doesn't exist + """ + pathlib_handle = pathlib.Path(path) + if not pathlib_handle.is_dir(): + raise ValueError(f"Dir path {pathlib_handle} does not exist!") + return pathlib_handle + + +def _generate_compile_commands(work_dir: pathlib.Path) -> None: + """Automatically generates the compile_commands.json file to be used + by the include cleaner binary. + + Args: + work_dir: gn out dir where the compile_commands json file exists + """ + compile_commands_path = work_dir / "compile_commands.json" + if not compile_commands_path.is_file(): + print("Generating compile commands file...") + subprocess.run( + ["tools/clang/scripts/generate_compdb.py", "-p", work_dir], + stdout=compile_commands_path.open(mode="w+"), + check=True, + ) + + +# Transitioning the cmd type to tuple to prevent modification of +# the original command from the callsite in main... +def _apply_include_cleaner_to_file(file_path: pathlib.Path, + should_modify: bool, + cmd: Tuple[str, ...]) -> bool: + """Applies the include cleaner binary to a given file. + Other than that, make sure to do include substitutions following the + _IWYU_MAPPING variable and clear the tool output from redundant additions + (those that came from _IWYU_MAPPING and weren't necessary) + + Args: + file_path: The path to the file to execute include cleaner on + should_print: whether we'd like to apply the include cleaner changes to + the file + cmd: pre defined include cleaner command with all the relevant + arguments but the file path + + Returns: + True if include cleaner provided a substitution that was actually + required in code (wasn't removed by the _IWYU_MAPPING) + """ + cmd += (str(file_path), ) + result = subprocess.run(cmd, capture_output=True, text=True, check=False) + if result.returncode != 0: + print(f"Failed to run include cleaner on {file_path}, stderr:", + f"{result.stderr.strip()}") + output = result.stdout.strip() + + content = file_path.read_text() + modified_content = content + for key, value in _IWYU_MAPPING.items(): + if value in modified_content: + # If the required include is already in the file, clear it from the + # cleaner output and remove what the cleaner added to the file + output = output.replace(f'+ {key.replace("#include ", "")}', "") + if should_modify: + modified_content = re.sub(rf"^#include {re.escape(key)}.*\n?", + "", + modified_content, + flags=re.MULTILINE) + + elif should_modify: + # Otherwise, change what the cleaner added to the correct include + # from _IWYU_MAPPING + modified_content = re.sub(rf"^#include {re.escape(key)}", + f"#include {value}", + modified_content, + flags=re.MULTILINE) + if should_modify and content != modified_content: + file_path.write_text(modified_content) + + if output: + print(output) + else: + print(f"Successfuly ran include cleaner on {file_path}") + return bool(output) + + +def main() -> None: + if not _CLEANER_BINARY_PATH.exists(): + print(f"clang-include-cleaner not found in {_CLEANER_BINARY_PATH}") + print( + "Add '\"checkout_clangd\": True' to 'custom_vars' in your ", + ".gclient file and run 'gclient sync'.", + ) + + args = _parse_args() + + _generate_compile_commands(args.work_dir) + + # Build the execution command + cmd = [str(_CLEANER_BINARY_PATH), "-p", str(args.work_dir)] + for extra_arg in _EXTRA_ARGS: + cmd.append(f"--extra-arg={extra_arg}") + if args.print or args.check_for_changes: + cmd.append("--print=changes") + should_modify = False + else: + cmd.append("--edit") + should_modify = True + + changes_generated = False + # TODO(dorhen@meta): Ideally don't iterate on the files + # and execute cleaner on each, but instead execute the + # cleaner binary once - passing in all files. + # e.g instead of `cleaner foo.cc && cleaner bar.cc` + # do `cleaner foo.cc bar.cc` + for file in args.files: + changes_generated = (_apply_include_cleaner_to_file( + file, should_modify, tuple(cmd)) or changes_generated) + + print("Finished. Check diff, compile, gn gen --check", + "(tools_webrtc/gn_check_autofix.py can fix most of the issues)") + print("and git cl format before uploading.") + + if changes_generated and args.check_for_changes: + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/tools_webrtc/iwyu/iwyu-filter-list b/tools_webrtc/iwyu/iwyu-filter-list index b5b0fb0721..f2d01bafab 100644 --- a/tools_webrtc/iwyu/iwyu-filter-list +++ b/tools_webrtc/iwyu/iwyu-filter-list @@ -7,3 +7,5 @@ #include <__memory/unique_ptr.h> #include <__tree> #include +#include +#include diff --git a/tools_webrtc/iwyu/iwyu-verifier-filter_list.json b/tools_webrtc/iwyu/iwyu-verifier-filter_list.json new file mode 100644 index 0000000000..9cd95be5d8 --- /dev/null +++ b/tools_webrtc/iwyu/iwyu-verifier-filter_list.json @@ -0,0 +1,15 @@ +{ + "excluded_paths": [ + "examples/androidnativeapi/jni/", + "examples/androidvoip/jni/", + "examples/objc/", + "examples/objcnativeapi/objc/", + "modules/audio_device/win/", + "modules/desktop_capture/", + "modules/desktop_capture/win/", + "rtc_base/win", + "sdk/android/src/jni/", + "test/ios/", + "test/mac/" + ] +} diff --git a/tools_webrtc/iwyu/mappings.imp b/tools_webrtc/iwyu/mappings.imp deleted file mode 100644 index ab2712438d..0000000000 --- a/tools_webrtc/iwyu/mappings.imp +++ /dev/null @@ -1,38 +0,0 @@ -# -# Mappings file for IWYU in webrtc -# -# Documentation of syntax: -# https://github.com/include-what-you-use/include-what-you-use/blob/master/docs/IWYUMappings.md -# -# Remember that it needs include strings INCLUDING <> or "" inside the quotes. -# -[ -# Redirect to have gmock and gtest includes under our control -{ include: ['"gmock/gmock.h"', "private", '"test/gmock.h"', "public"] }, -{ include: ['"gtest/gtest.h"', "private", '"test/gtest.h"', "public"] }, - -# rtc_base/containers internal defs -{ include: ['"rtc_base/containers/flat_tree.h"', "private", '"rtc_base/containers/flat_set.h"', "public"] }, - -# Revectoring of JSON -{ include: ['"json/reader.h"', "private", '"rtc_base/strings/json.h"', "public"] }, -{ include: ['"json/value.h"', "private", '"rtc_base/strings/json.h"', "public"] }, - -# LIBSRTP overrides -{ include: ['"rdbx.h"', "private", '"third_party/libsrtp/include/srtp_priv.h"', "public"] }, -{ include: ['"auth.h"', "private", '"third_party/libsrtp/include/srtp_priv.h"', "public"] }, - -# pthread internals -{ include: ['', "private", '', "public"] }, - -# Needed to agree with presubmit tests for includes (and not include ) -{ symbol: ["std::string", "public", "", "public"] }, -{ symbol: ["std::move", "public", "", "public"] }, -{ symbol: ["std::make_unique", "public", "", "public"] }, -{ symbol: ["std::unique_ptr", "public", "", "public"] }, -# Needed to avoid -{ symbol: ["std::ostringstream", "public", "", "public"] }, - -{ ref: "../../third_party/libc++/src/include/libcxx.imp" }, -] - diff --git a/tools_webrtc/libs/generate_licenses.py b/tools_webrtc/libs/generate_licenses.py index fdc40f5c4c..73fd19ae86 100755 --- a/tools_webrtc/libs/generate_licenses.py +++ b/tools_webrtc/libs/generate_licenses.py @@ -42,6 +42,7 @@ ], 'boringssl': ['third_party/boringssl/src/LICENSE'], 'crc32c': ['third_party/crc32c/src/LICENSE'], + 'compiler-rt': ['third_party/compiler-rt/src/LICENSE.TXT'], 'cpu_features': ['third_party/cpu_features/src/LICENSE'], 'dav1d': ['third_party/dav1d/LICENSE'], 'errorprone': [ @@ -55,12 +56,12 @@ 'libaom': ['third_party/libaom/source/libaom/LICENSE'], 'libc++': ['third_party/libc++/src/LICENSE.TXT'], 'libc++abi': ['third_party/libc++abi/src/LICENSE.TXT'], - 'libevent': ['third_party/libevent/LICENSE'], 'libjpeg_turbo': ['third_party/libjpeg_turbo/LICENSE.md'], 'libsrtp': ['third_party/libsrtp/LICENSE'], 'libunwind': ['third_party/libunwind/src/LICENSE.TXT'], 'libvpx': ['third_party/libvpx/source/libvpx/LICENSE'], 'libyuv': ['third_party/libyuv/LICENSE'], + 'llvm-libc': ['third_party/llvm-libc/src/LICENSE.TXT'], 'nasm': ['third_party/nasm/LICENSE'], 'opus': ['third_party/opus/src/COPYING'], 'pffft': ['third_party/pffft/LICENSE'], @@ -77,9 +78,11 @@ 'ooura': ['common_audio/third_party/ooura/LICENSE'], 'spl_sqrt_floor': ['common_audio/third_party/spl_sqrt_floor/LICENSE'], 'kotlin_stdlib': ['third_party/kotlin_stdlib/LICENSE'], - + 'jni_zero': ['third_party/jni_zero/LICENSE'], + 'protobuf-javascript': ['third_party/protobuf-javascript/LICENSE'], + 'perfetto': ['third_party/perfetto/LICENSE'], # TODO(bugs.webrtc.org/1110): Remove this hack. This is not a lib. - # For some reason it is listed as so in _GetThirdPartyLibraries. + # For some reason it is listed as so in _get_third_party_libraries. 'android_deps': [], # This is not a library but a collection of libraries. 'androidx': [], @@ -97,7 +100,11 @@ 'com_android_support_support_annotations/LICENSE' ], - # Internal dependencies, licenses are already included by other dependencies + 'android_build_tools.*': [ + 'third_party/android_build_tools/bundletool/LICENSE' + ], + + # Internal dependencies, licenses are already included by other deps. 'android_deps:com_android_support_support_annotations.*': [], } @@ -111,7 +118,7 @@ # tools we need are *actually* in their build folder, thus we need to move up # to the *true* source root, when we're embedded like this. if SRC_DIR.endswith(os.path.join('third_party', 'webrtc')): - SRC_DIR = os.path.abspath(os.path.join(SRC_DIR, os.pardir, os.pardir)) + SRC_DIR = os.path.abspath(os.path.join(SRC_DIR, os.pardir, os.pardir)) sys.path.append(os.path.join(SRC_DIR, 'build')) import find_depot_tools @@ -120,28 +127,29 @@ class LicenseBuilder: - def __init__(self, - buildfile_dirs, - targets, - lib_to_licenses_dict=None, - lib_regex_to_licenses_dict=None): - if lib_to_licenses_dict is None: - lib_to_licenses_dict = LIB_TO_LICENSES_DICT - if lib_regex_to_licenses_dict is None: - lib_regex_to_licenses_dict = LIB_REGEX_TO_LICENSES_DICT + def __init__(self, + buildfile_dirs, + targets, + lib_to_licenses_dict=None, + lib_regex_to_licenses_dict=None): + if lib_to_licenses_dict is None: + lib_to_licenses_dict = LIB_TO_LICENSES_DICT + + if lib_regex_to_licenses_dict is None: + lib_regex_to_licenses_dict = LIB_REGEX_TO_LICENSES_DICT - self.buildfile_dirs = buildfile_dirs - self.targets = targets - self.lib_to_licenses_dict = lib_to_licenses_dict - self.lib_regex_to_licenses_dict = lib_regex_to_licenses_dict + self.buildfile_dirs = buildfile_dirs + self.targets = targets + self.lib_to_licenses_dict = lib_to_licenses_dict + self.lib_regex_to_licenses_dict = lib_regex_to_licenses_dict - self.common_licenses_dict = self.lib_to_licenses_dict.copy() - self.common_licenses_dict.update(self.lib_regex_to_licenses_dict) + self.common_licenses_dict = self.lib_to_licenses_dict.copy() + self.common_licenses_dict.update(self.lib_regex_to_licenses_dict) - @staticmethod - def _ParseLibraryName(dep): - """Returns library name after third_party + @staticmethod + def _parse_library_name(dep): + """Returns library name after third_party Input one of: //a/b/third_party/libname:c @@ -150,11 +158,11 @@ def _ParseLibraryName(dep): Outputs libname or None if this is not a third_party dependency. """ - groups = re.match(THIRD_PARTY_LIB_SIMPLE_NAME_REGEX, dep) - return groups.group(1) if groups else None + groups = re.match(THIRD_PARTY_LIB_SIMPLE_NAME_REGEX, dep) + return groups.group(1) if groups else None - def _ParseLibrary(self, dep): - """Returns library simple or regex name that matches `dep` after third_party + def _parse_library(self, dep): + """Return library simple or regex name matching `dep` after third_party This method matches `dep` dependency against simple names in LIB_TO_LICENSES_DICT and regular expression names in @@ -162,104 +170,110 @@ def _ParseLibrary(self, dep): Outputs matched dict key or None if this is not a third_party dependency. """ - libname = LicenseBuilder._ParseLibraryName(dep) - - for lib_regex in self.lib_regex_to_licenses_dict: - if re.match(THIRD_PARTY_LIB_REGEX_TEMPLATE % lib_regex, dep): - return lib_regex - - return libname - - @staticmethod - def _RunGN(buildfile_dir, target): - cmd = [ - sys.executable, - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'), - 'desc', - '--all', - '--format=json', - os.path.abspath(buildfile_dir), - target, - ] - logging.debug('Running: %r', cmd) - output_json = subprocess.check_output(cmd, cwd=WEBRTC_ROOT).decode('UTF-8') - logging.debug('Output: %s', output_json) - return output_json - - def _GetThirdPartyLibraries(self, buildfile_dir, target): - output = json.loads(LicenseBuilder._RunGN(buildfile_dir, target)) - libraries = set() - for described_target in list(output.values()): - third_party_libs = (self._ParseLibrary(dep) - for dep in described_target['deps']) - libraries |= set(lib for lib in third_party_libs if lib) - return libraries - - def GenerateLicenseText(self, output_dir): - # Get a list of third_party libs from gn. For fat libraries we must consider - # all architectures, hence the multiple buildfile directories. - third_party_libs = set() - for buildfile in self.buildfile_dirs: - for target in self.targets: - third_party_libs |= self._GetThirdPartyLibraries(buildfile, target) - assert len(third_party_libs) > 0 - - missing_licenses = third_party_libs - set(self.common_licenses_dict.keys()) - if missing_licenses: - error_msg = 'Missing licenses for following third_party targets: %s' % \ - ', '.join(sorted(missing_licenses)) - logging.error(error_msg) - raise Exception(error_msg) - - # Put webrtc at the front of the list. - license_libs = sorted(third_party_libs) - license_libs.insert(0, 'webrtc') - - logging.info('List of licenses: %s', ', '.join(license_libs)) - - # Generate markdown. - output_license_file = open(os.path.join(output_dir, 'LICENSE.md'), 'w+') - for license_lib in license_libs: - if len(self.common_licenses_dict[license_lib]) == 0: - logging.info('Skipping compile time or internal dependency: %s', - license_lib) - continue # Compile time dependency - - output_license_file.write('# %s\n' % license_lib) - output_license_file.write('```\n') - for path in self.common_licenses_dict[license_lib]: - license_path = os.path.join(WEBRTC_ROOT, path) - with open(license_path, 'r') as license_file: - license_text = escape(license_file.read(), quote=True) - output_license_file.write(license_text) - output_license_file.write('\n') - output_license_file.write('```\n\n') - - output_license_file.close() + libname = LicenseBuilder._parse_library_name(dep) + + for lib_regex in self.lib_regex_to_licenses_dict: + if re.match(THIRD_PARTY_LIB_REGEX_TEMPLATE % lib_regex, dep): + return lib_regex + + return libname + + @staticmethod + def _run_gn(buildfile_dir, target): + cmd = [ + sys.executable, + os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'), + 'desc', + '--all', + '--format=json', + os.path.abspath(buildfile_dir), + target, + ] + logging.debug('Running: %r', cmd) + output_json = subprocess.check_output(cmd, + cwd=WEBRTC_ROOT).decode('UTF-8') + logging.debug('Output: %s', output_json) + return output_json + + def _get_third_party_libraries(self, buildfile_dir, target): + output = json.loads(LicenseBuilder._run_gn(buildfile_dir, target)) + libraries = set() + for described_target in list(output.values()): + third_party_libs = (self._parse_library(dep) + for dep in described_target['deps']) + libraries |= set(lib for lib in third_party_libs if lib) + return libraries + + def generate_license_text(self, output_dir): + # Get a list of third_party libs from gn. For fat libraries we must + # consider all architectures, hence the multiple buildfile directories. + third_party_libs = set() + for buildfile in self.buildfile_dirs: + for target in self.targets: + third_party_libs |= self._get_third_party_libraries( + buildfile, target) + assert len(third_party_libs) > 0 + + missing_licenses = third_party_libs - set( + self.common_licenses_dict.keys()) + if missing_licenses: + error_msg = 'Missing licenses for third_party targets: %s' % \ + ', '.join(sorted(missing_licenses)) + logging.error(error_msg) + raise Exception(error_msg) + + # Put webrtc at the front of the list. + license_libs = sorted(third_party_libs) + license_libs.insert(0, 'webrtc') + + logging.info('List of licenses: %s', ', '.join(license_libs)) + + # Generate markdown. + output_license_file = open(os.path.join(output_dir, 'LICENSE.md'), + 'w+') + for license_lib in license_libs: + if len(self.common_licenses_dict[license_lib]) == 0: + logging.info( + 'Skipping compile time or internal dependency: %s', + license_lib) + continue # Compile time dependency + + output_license_file.write('# %s\n' % license_lib) + output_license_file.write('```\n') + for path in self.common_licenses_dict[license_lib]: + license_path = os.path.join(WEBRTC_ROOT, path) + with open(license_path, 'r') as license_file: + license_text = escape(license_file.read(), quote=True) + output_license_file.write(license_text) + output_license_file.write('\n') + output_license_file.write('```\n\n') + + output_license_file.close() def main(): - parser = argparse.ArgumentParser(description='Generate WebRTC LICENSE.md') - parser.add_argument('--verbose', - action='store_true', - default=False, - help='Debug logging.') - parser.add_argument('--target', - required=True, - action='append', - default=[], - help='Name of the GN target to generate a license for') - parser.add_argument('output_dir', help='Directory to output LICENSE.md to.') - parser.add_argument('buildfile_dirs', - nargs='+', - help='Directories containing gn generated ninja files') - args = parser.parse_args() - - logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) - - builder = LicenseBuilder(args.buildfile_dirs, args.target) - builder.GenerateLicenseText(args.output_dir) + parser = argparse.ArgumentParser(description='Generate WebRTC LICENSE.md') + parser.add_argument('--verbose', + action='store_true', + default=False, + help='Debug logging.') + parser.add_argument('--target', + required=True, + action='append', + default=[], + help='Name of the GN target to generate a license for') + parser.add_argument('output_dir', + help='Directory to output LICENSE.md to.') + parser.add_argument('buildfile_dirs', + nargs='+', + help='Directories containing gn generated ninja files') + args = parser.parse_args() + + logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) + + builder = LicenseBuilder(args.buildfile_dirs, args.target) + builder.generate_license_text(args.output_dir) if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/tools_webrtc/libs/generate_licenses_test.py b/tools_webrtc/libs/generate_licenses_test.py index 6dfd8f3e22..4f200e92c5 100755 --- a/tools_webrtc/libs/generate_licenses_test.py +++ b/tools_webrtc/libs/generate_licenses_test.py @@ -17,9 +17,10 @@ class TestLicenseBuilder(unittest.TestCase): - @staticmethod - def _FakeRunGN(buildfile_dir, target): - return """ + + @staticmethod + def _fake_run_gn(buildfile_dir, target): + return """ { "target1": { "deps": [ @@ -32,89 +33,94 @@ def _FakeRunGN(buildfile_dir, target): } """ - def testParseLibraryName(self): - self.assertEqual( - LicenseBuilder._ParseLibraryName('//a/b/third_party/libname1:c'), - 'libname1') - self.assertEqual( - LicenseBuilder._ParseLibraryName('//a/b/third_party/libname2:c(d)'), - 'libname2') - self.assertEqual( - LicenseBuilder._ParseLibraryName('//a/b/third_party/libname3/c:d(e)'), - 'libname3') - self.assertEqual( - LicenseBuilder._ParseLibraryName('//a/b/not_third_party/c'), None) - - def testParseLibrarySimpleMatch(self): - builder = LicenseBuilder([], [], {}, {}) - self.assertEqual(builder._ParseLibrary('//a/b/third_party/libname:c'), - 'libname') - - def testParseLibraryRegExNoMatchFallbacksToDefaultLibname(self): - lib_dict = { - 'libname:foo.*': ['path/to/LICENSE'], - } - builder = LicenseBuilder([], [], lib_dict, {}) - self.assertEqual( - builder._ParseLibrary('//a/b/third_party/libname:bar_java'), 'libname') - - def testParseLibraryRegExMatch(self): - lib_regex_dict = { - 'libname:foo.*': ['path/to/LICENSE'], - } - builder = LicenseBuilder([], [], {}, lib_regex_dict) - self.assertEqual( - builder._ParseLibrary('//a/b/third_party/libname:foo_bar_java'), - 'libname:foo.*') - - def testParseLibraryRegExMatchWithSubDirectory(self): - lib_regex_dict = { - 'libname/foo:bar.*': ['path/to/LICENSE'], - } - builder = LicenseBuilder([], [], {}, lib_regex_dict) - self.assertEqual( - builder._ParseLibrary('//a/b/third_party/libname/foo:bar_java'), - 'libname/foo:bar.*') - - def testParseLibraryRegExMatchWithStarInside(self): - lib_regex_dict = { - 'libname/foo.*bar.*': ['path/to/LICENSE'], - } - builder = LicenseBuilder([], [], {}, lib_regex_dict) - self.assertEqual( - builder._ParseLibrary('//a/b/third_party/libname/fooHAHA:bar_java'), - 'libname/foo.*bar.*') - - @patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) - def testGetThirdPartyLibrariesWithoutRegex(self): - builder = LicenseBuilder([], [], {}, {}) - self.assertEqual(builder._GetThirdPartyLibraries('out/arm', 'target1'), - set(['libname1', 'libname2', 'libname3'])) - - @patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) - def testGetThirdPartyLibrariesWithRegex(self): - lib_regex_dict = { - 'libname2:c.*': ['path/to/LICENSE'], - } - builder = LicenseBuilder([], [], {}, lib_regex_dict) - self.assertEqual(builder._GetThirdPartyLibraries('out/arm', 'target1'), - set(['libname1', 'libname2:c.*', 'libname3'])) - - @patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) - def testGenerateLicenseTextFailIfUnknownLibrary(self): - lib_dict = { - 'simple_library': ['path/to/LICENSE'], - } - builder = LicenseBuilder(['dummy_dir'], ['dummy_target'], lib_dict, {}) - - with self.assertRaises(Exception) as context: - builder.GenerateLicenseText('dummy/dir') - - self.assertEqual( - context.exception.args[0], - 'Missing licenses for following third_party targets: ' - 'libname1, libname2, libname3') + def test_parse_library_name(self): + self.assertEqual( + LicenseBuilder._parse_library_name('//a/b/third_party/libname1:c'), + 'libname1') + self.assertEqual( + LicenseBuilder._parse_library_name( + '//a/b/third_party/libname2:c(d)'), 'libname2') + self.assertEqual( + LicenseBuilder._parse_library_name( + '//a/b/third_party/libname3/c:d(e)'), 'libname3') + self.assertEqual( + LicenseBuilder._parse_library_name('//a/b/not_third_party/c'), + None) + + def test_parse_library_simple_match(self): + builder = LicenseBuilder([], [], {}, {}) + self.assertEqual(builder._parse_library('//a/b/third_party/libname:c'), + 'libname') + + def test_parse_library_regex_no_match_fallbacks_to_default_libname(self): + lib_dict = { + 'libname:foo.*': ['path/to/LICENSE'], + } + builder = LicenseBuilder([], [], lib_dict, {}) + self.assertEqual( + builder._parse_library('//a/b/third_party/libname:bar_java'), + 'libname') + + def test_parse_library_regex_match(self): + lib_regex_dict = { + 'libname:foo.*': ['path/to/LICENSE'], + } + builder = LicenseBuilder([], [], {}, lib_regex_dict) + self.assertEqual( + builder._parse_library('//a/b/third_party/libname:foo_bar_java'), + 'libname:foo.*') + + def test_parse_library_regex_match_with_sub_directory(self): + lib_regex_dict = { + 'libname/foo:bar.*': ['path/to/LICENSE'], + } + builder = LicenseBuilder([], [], {}, lib_regex_dict) + self.assertEqual( + builder._parse_library('//a/b/third_party/libname/foo:bar_java'), + 'libname/foo:bar.*') + + def test_parse_library_regex_match_with_star_inside(self): + lib_regex_dict = { + 'libname/foo.*bar.*': ['path/to/LICENSE'], + } + builder = LicenseBuilder([], [], {}, lib_regex_dict) + self.assertEqual( + builder._parse_library( + '//a/b/third_party/libname/fooHAHA:bar_java'), + 'libname/foo.*bar.*') + + @patch('generate_licenses.LicenseBuilder._run_gn', _fake_run_gn) + def test_get_third_party_libraries_without_regex(self): + builder = LicenseBuilder([], [], {}, {}) + self.assertEqual( + builder._get_third_party_libraries('out/arm', 'target1'), + set(['libname1', 'libname2', 'libname3'])) + + @patch('generate_licenses.LicenseBuilder._run_gn', _fake_run_gn) + def test_get_third_party_libraries_with_regex(self): + lib_regex_dict = { + 'libname2:c.*': ['path/to/LICENSE'], + } + builder = LicenseBuilder([], [], {}, lib_regex_dict) + self.assertEqual( + builder._get_third_party_libraries('out/arm', 'target1'), + set(['libname1', 'libname2:c.*', 'libname3'])) + + @patch('generate_licenses.LicenseBuilder._run_gn', _fake_run_gn) + def test_generate_license_text_fail_if_unknown_library(self): + lib_dict = { + 'simple_library': ['path/to/LICENSE'], + } + builder = LicenseBuilder(['dummy_dir'], ['dummy_target'], lib_dict, {}) + + with self.assertRaises(Exception) as context: + builder.generate_license_text('dummy/dir') + + self.assertEqual( + context.exception.args[0], + 'Missing licenses for third_party targets: ' + 'libname1, libname2, libname3') if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tools_webrtc/matlab/maxUnwrap.m b/tools_webrtc/matlab/maxUnwrap.m index 276c9523a4..cda48278b8 100644 --- a/tools_webrtc/matlab/maxUnwrap.m +++ b/tools_webrtc/matlab/maxUnwrap.m @@ -1,3 +1,4 @@ +% // clang-format off function sequence = maxUnwrap(sequence, max) % % sequence = maxUnwrap(sequence, max) diff --git a/tools_webrtc/matlab/parseLog.m b/tools_webrtc/matlab/parseLog.m index 5d4c3f7bc1..62c9388621 100644 --- a/tools_webrtc/matlab/parseLog.m +++ b/tools_webrtc/matlab/parseLog.m @@ -1,3 +1,4 @@ +% // clang-format off function parsed = parseLog(filename) % % parsed = parseLog(filename) diff --git a/tools_webrtc/matlab/rtpAnalyze.m b/tools_webrtc/matlab/rtpAnalyze.m index c51af9cca5..c64756579e 100644 --- a/tools_webrtc/matlab/rtpAnalyze.m +++ b/tools_webrtc/matlab/rtpAnalyze.m @@ -1,3 +1,4 @@ +% // clang-format off function rtpAnalyze( input_file ) %RTP_ANALYZE Analyze RTP stream(s) from a txt file % The function takes the output from the command line tool rtp_analyze diff --git a/tools_webrtc/mb/docs/user_guide.md b/tools_webrtc/mb/docs/user_guide.md index 8c66cd328c..79fad06093 100644 --- a/tools_webrtc/mb/docs/user_guide.md +++ b/tools_webrtc/mb/docs/user_guide.md @@ -132,10 +132,6 @@ You can pass the `-q/--quiet` flag to get mb to be silent unless there is an error, and pass the `-v/--verbose` flag to get mb to log all of the files that are read and written, and all the commands that are run. -If the build config will use the Goma distributed-build system, you can pass -the path to your Goma client in the `-g/--goma-dir` flag, and it will be -incorporated into the appropriate flags for GYP or GN as needed. - If gen ends up using GYP, the path must have a valid GYP configuration as the last component of the path (i.e., specify `//out/Release_x64`, not `//out`). The gyp script defaults to `//build/gyp_chromium`, but can be overridden with @@ -248,8 +244,8 @@ For example, if you had: } 'mixins': { 'bot': { - 'gyp_defines': 'use_goma=1 dcheck_always_on=0', - 'gn_args': 'use_goma=true dcheck_always_on=false', + 'gyp_defines': 'use_remoteexec=1 dcheck_always_on=0', + 'gn_args': 'use_remoteexec=true dcheck_always_on=false', }, 'debug': { 'gn_args': 'is_debug=true', @@ -276,7 +272,7 @@ For example, if you had: and you ran `mb gen -c linux_release_trybot //out/Release`, it would translate into a call to `gyp_chromium -G Release` with `GYP_DEFINES` set to -`"use_goma=true dcheck_always_on=false dcheck_always_on=true"`. +`"use_remoteexec=true dcheck_always_on=false dcheck_always_on=true"`. (From that you can see that mb is intentionally dumb and does not attempt to de-dup the flags, it lets gyp do that). diff --git a/tools_webrtc/mb/mb.py b/tools_webrtc/mb/mb.py index c23e69c09f..6c77b9a7d4 100755 --- a/tools_webrtc/mb/mb.py +++ b/tools_webrtc/mb/mb.py @@ -25,132 +25,123 @@ def _GetExecutable(target, platform): - executable_prefix = '.\\' if platform == 'win32' else './' - executable_suffix = '.exe' if platform == 'win32' else '' - return executable_prefix + target + executable_suffix + executable_prefix = '.\\' if platform == 'win32' else './' + executable_suffix = '.exe' if platform == 'win32' else '' + return executable_prefix + target + executable_suffix def main(args): - mbw = WebRTCMetaBuildWrapper() - return mbw.Main(args) + mbw = WebRTCMetaBuildWrapper() + return mbw.Main(args) class WebRTCMetaBuildWrapper(mb.MetaBuildWrapper): - def __init__(self): - super().__init__() - # Make sure default_config and default_isolate_map are attributes of the - # parent class before changing their values. - # pylint: disable=access-member-before-definition - assert self.default_config - assert self.default_isolate_map - self.default_config = os.path.join(_SCRIPT_DIR, 'mb_config.pyl') - self.default_isolate_map = os.path.join(_SRC_DIR, 'infra', 'specs', - 'gn_isolate_map.pyl') - - def GetSwarmingCommand(self, target, vals): - isolate_map = self.ReadIsolateMap() - test_type = isolate_map[target]['type'] - - is_android = 'target_os="android"' in vals['gn_args'] - is_fuchsia = 'target_os="fuchsia"' in vals['gn_args'] - is_ios = 'target_os="ios"' in vals['gn_args'] - is_linux = self.platform.startswith('linux') and not is_android - is_win = self.platform.startswith('win') - - if test_type == 'nontest': - self.WriteFailureAndRaise('We should not be isolating %s.' % target, - output_path=None) - if test_type not in ('console_test_launcher', 'windowed_test_launcher', - 'non_parallel_console_test_launcher', 'raw', - 'additional_compile_target', 'junit_test', 'script'): - self.WriteFailureAndRaise('No command line for ' - '%s found (test type %s).' % - (target, test_type), - output_path=None) - - cmdline = [] - extra_files = [ - '../../.vpython3', - '../../testing/test_env.py', - ] - vpython_exe = 'vpython3' - - if isolate_map[target].get('script'): - cmdline += [ - vpython_exe, - '../../' + self.ToSrcRelPath(isolate_map[target]['script']) - ] - elif is_android: - cmdline += [ - 'luci-auth', 'context', '--', vpython_exe, - '../../build/android/test_wrapper/logdog_wrapper.py', '--target', - target, '--logdog-bin-cmd', - '../../.task_template_packages/logdog_butler', '--logcat-output-file', - '${ISOLATED_OUTDIR}/logcats', '--store-tombstones' - ] - elif is_ios or is_fuchsia or test_type == 'raw': - if is_win: - cmdline += ['bin\\run_{}.bat'.format(target)] - else: - cmdline += ['bin/run_{}'.format(target)] - else: - if isolate_map[target].get('use_webcam', False): - cmdline += [ - vpython_exe, '../../tools_webrtc/ensure_webcam_is_running.py' + + def __init__(self): + super().__init__() + # Make sure default_config and default_isolate_map are attributes of the + # parent class before changing their values. + # pylint: disable=access-member-before-definition + assert self.default_config + assert self.default_isolate_map + self.default_config = os.path.join(_SCRIPT_DIR, 'mb_config.pyl') + self.default_isolate_map = os.path.join(_SRC_DIR, 'infra', 'specs', + 'gn_isolate_map.pyl') + + def GetSwarmingCommand(self, target, vals): + isolate_map = self.ReadIsolateMap() + test_type = isolate_map[target]['type'] + + is_android = 'target_os="android"' in vals['gn_args'] + is_fuchsia = 'target_os="fuchsia"' in vals['gn_args'] + is_ios = 'target_os="ios"' in vals['gn_args'] + is_linux = self.platform.startswith('linux') and not is_android + is_win = self.platform.startswith('win') + + if test_type not in ('console_test_launcher', 'windowed_test_launcher', + 'non_parallel_console_test_launcher', 'raw', + 'additional_compile_target'): + self.WriteFailureAndRaise('No command line for ' + '%s found (test type %s).' % + (target, test_type), + output_path=None) + + extra_files = [ + '../../.vpython3', + '../../testing/test_env.py', ] - extra_files.append('../../tools_webrtc/ensure_webcam_is_running.py') - if isolate_map[target].get('use_pipewire', False): - cmdline += [vpython_exe, '../../tools_webrtc/configure_pipewire.py'] - extra_files.append('../../tools_webrtc/configure_pipewire.py') - - # is_linux uses use_ozone and x11 by default. - use_x11 = is_linux - - xvfb = use_x11 and test_type == 'windowed_test_launcher' - if xvfb: - cmdline += [vpython_exe, '../../testing/xvfb.py'] - extra_files.append('../../testing/xvfb.py') - else: - cmdline += [vpython_exe, '../../testing/test_env.py'] - - extra_files += [ - '../../third_party/gtest-parallel/gtest-parallel', - '../../third_party/gtest-parallel/gtest_parallel.py', - '../../tools_webrtc/gtest-parallel-wrapper.py', - ] - output_dir = '${ISOLATED_OUTDIR}/test_logs' - cmdline += [ - '../../tools_webrtc/gtest-parallel-wrapper.py', - '--output_dir=%s' % output_dir, - '--gtest_color=no', - ] - if test_type == 'non_parallel_console_test_launcher': - # Still use the gtest-parallel-wrapper.py script since we - # need it to run tests on swarming, but don't execute tests - # in parallel. - cmdline.append('--workers=1') - - asan = 'is_asan=true' in vals['gn_args'] - lsan = 'is_lsan=true' in vals['gn_args'] - msan = 'is_msan=true' in vals['gn_args'] - tsan = 'is_tsan=true' in vals['gn_args'] - sanitizer = asan or lsan or msan or tsan - if not sanitizer: - # Retry would hide most sanitizers detections. - cmdline.append('--retry_failed=3') - - cmdline.append(_GetExecutable(target, self.platform)) - - cmdline.extend([ - '--asan=%d' % asan, - '--lsan=%d' % lsan, - '--msan=%d' % msan, - '--tsan=%d' % tsan, - ]) - - cmdline += isolate_map[target].get('args', []) - - return cmdline, extra_files + vpython_exe = 'vpython3' + + if is_ios or is_fuchsia or test_type == 'raw': + if is_win: + cmdline = ['bin\\run_{}.bat'.format(target)] + else: + cmdline = ['bin/run_{}'.format(target)] + elif is_android: + cmdline = [ + 'luci-auth', 'context', '--', vpython_exe, + '../../build/android/test_wrapper/logdog_wrapper.py', + '--target', target, '--logdog-bin-cmd', + '../../.task_template_packages/logdog_butler', + '--logcat-output-file', '${ISOLATED_OUTDIR}/logcats', + '--store-tombstones' + ] + else: + cmdline = [] + if isolate_map[target].get('use_pipewire', False): + cmdline = [ + vpython_exe, '../../tools_webrtc/configure_pipewire.py' + ] + extra_files.append('../../tools_webrtc/configure_pipewire.py') + + # is_linux uses use_ozone and x11 by default. + use_x11 = is_linux + + xvfb = use_x11 and test_type == 'windowed_test_launcher' + if xvfb: + cmdline += [vpython_exe, '../../testing/xvfb.py'] + extra_files.append('../../testing/xvfb.py') + else: + cmdline += [vpython_exe, '../../testing/test_env.py'] + + extra_files += [ + '../../third_party/gtest-parallel/gtest-parallel', + '../../third_party/gtest-parallel/gtest_parallel.py', + '../../tools_webrtc/gtest-parallel-wrapper.py', + ] + output_dir = '${ISOLATED_OUTDIR}/test_logs' + cmdline += [ + '../../tools_webrtc/gtest-parallel-wrapper.py', + '--output_dir=%s' % output_dir, + '--gtest_color=no', + ] + if test_type == 'non_parallel_console_test_launcher': + # Still use the gtest-parallel-wrapper.py script since we + # need it to run tests on swarming, but don't execute tests + # in parallel. + cmdline.append('--workers=1') + + asan = 'is_asan=true' in vals['gn_args'] + lsan = 'is_lsan=true' in vals['gn_args'] + msan = 'is_msan=true' in vals['gn_args'] + tsan = 'is_tsan=true' in vals['gn_args'] + sanitizer = asan or lsan or msan or tsan + if not sanitizer: + # Retry would hide most sanitizers detections. + cmdline.append('--retry_failed=3') + + cmdline.append(_GetExecutable(target, self.platform)) + + cmdline.extend([ + '--asan=%d' % asan, + '--lsan=%d' % lsan, + '--msan=%d' % msan, + '--tsan=%d' % tsan, + ]) + + cmdline += isolate_map[target].get('args', []) + + return cmdline, extra_files if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) + sys.exit(main(sys.argv[1:])) diff --git a/tools_webrtc/mb/mb_config.pyl b/tools_webrtc/mb/mb_config.pyl index cb52f9124a..3d1fd7ccb2 100644 --- a/tools_webrtc/mb/mb_config.pyl +++ b/tools_webrtc/mb/mb_config.pyl @@ -34,7 +34,6 @@ 'Android32': 'android_release_bot_arm', 'Android32 (dbg)': 'android_debug_static_bot_arm', 'Android32 (more configs)': { - 'bwe_test_logging': 'bwe_test_logging_android_arm', 'dummy_audio_file_devices_no_protobuf': 'dummy_audio_file_devices_no_protobuf_android_arm', 'rtti_no_sctp': 'rtti_no_sctp_android_arm', @@ -55,10 +54,11 @@ # "More configs" bots will build all the following configs in sequence. # This is using MB's "phases" feature. 'Linux (more configs)': { - 'bwe_test_logging': 'bwe_test_logging_x64', 'dummy_audio_file_devices_no_protobuf': 'dummy_audio_file_devices_no_protobuf_x64', 'rtti_no_sctp': 'rtti_no_sctp_x64', + 'openssl': 'openssl_x64', + 'libstdcpp': 'libstdcpp_x64', }, 'Linux Asan': 'asan_lsan_clang_release_bot_x64', 'Linux MSan': 'msan_clang_release_bot_x64', @@ -86,7 +86,6 @@ # Windows 'Win (more configs)': { - 'bwe_test_logging': 'bwe_test_logging_x86', 'dummy_audio_file_devices_no_protobuf': 'dummy_audio_file_devices_no_protobuf_x86', 'rtti_no_sctp': 'rtti_no_sctp_no_unicode_win_x86', @@ -142,10 +141,11 @@ 'android_arm64_rel': 'android_release_bot_arm64', 'android_arm_dbg': 'android_debug_static_bot_arm', 'android_arm_more_configs': { - 'bwe_test_logging': 'bwe_test_logging_android_arm', 'dummy_audio_file_devices_no_protobuf': 'dummy_audio_file_devices_no_protobuf_android_arm', 'rtti_no_sctp': 'rtti_no_sctp_android_arm', + 'disable_trace_events': 'disable_trace_events_android_arm', + 'perfetto': 'perfetto_android_arm', }, 'android_arm_rel': 'android_release_bot_arm', 'android_compile_arm64_dbg': 'android_debug_static_bot_arm64', @@ -165,6 +165,9 @@ 'ios_compile_arm64_rel': 'ios_release_bot_arm64', 'ios_dbg_simulator': 'ios_debug_bot_x64', + # Include What You Use + 'iwyu_verifier': 'release_bot_x64', + # Linux 'linux_asan': 'asan_lsan_clang_release_bot_x64', 'linux_compile_arm64_dbg': 'debug_bot_arm64', @@ -179,10 +182,13 @@ 'linux_dbg': 'debug_bot_x64', 'linux_libfuzzer_rel': 'libfuzzer_asan_release_bot_x64', 'linux_more_configs': { - 'bwe_test_logging': 'bwe_test_logging_x64', 'dummy_audio_file_devices_no_protobuf': 'dummy_audio_file_devices_no_protobuf_x64', 'rtti_no_sctp': 'rtti_no_sctp_x64', + 'disable_trace_events': 'disable_trace_events_x64', + 'perfetto': 'perfetto_x64', + 'openssl': 'openssl_x64', + 'libstdcpp': 'libstdcpp_x64', }, 'linux_msan': 'msan_clang_release_bot_x64', 'linux_rel': 'release_bot_x64', @@ -202,6 +208,8 @@ 'mac_rel_m1': 'release_bot_arm64', # Windows + 'win11_debug': 'win_clang_debug_bot_x64', + 'win11_release': 'win_clang_release_bot_x64', 'win_asan': 'win_asan_clang_release_bot_x64', 'win_compile_x64_clang_dbg': 'win_clang_debug_bot_x64', 'win_compile_x64_clang_rel': 'win_clang_release_bot_x64', @@ -212,10 +220,11 @@ 'win_x86_clang_dbg': 'win_clang_debug_bot_x86', 'win_x86_clang_rel': 'win_clang_release_bot_x86', 'win_x86_more_configs': { - 'bwe_test_logging': 'bwe_test_logging_x86', 'dummy_audio_file_devices_no_protobuf': 'dummy_audio_file_devices_no_protobuf_x86', 'rtti_no_sctp': 'rtti_no_sctp_no_unicode_win_x86', + 'disable_trace_events': 'disable_trace_events_x86', + 'perfetto': 'perfetto_x86', }, } }, @@ -225,24 +234,80 @@ # A given config *may* be platform-specific but is not necessarily so (i.e., # we might have mac, win, and linux bots all using the 'release_bot' config). 'configs': { - 'android_asan_shared_release_bot_arm': - ['android', 'asan', 'clang', 'pure_release_bot', 'arm'], - 'android_debug_static_bot_arm': ['android', 'debug_static_bot', 'arm'], - 'android_debug_static_bot_arm64': ['android', 'debug_static_bot', 'arm64'], - 'android_debug_static_bot_x64': ['android', 'debug_static_bot', 'x64'], - 'android_debug_static_bot_x86': ['android', 'debug_static_bot', 'x86'], - 'android_pure_release_bot_arm': ['android', 'pure_release_bot', 'arm'], - 'android_pure_release_bot_arm64': ['android', 'pure_release_bot', 'arm64'], - 'android_release_bot_arm': ['android', 'release_bot', 'arm'], - 'android_release_bot_arm64': ['android', 'release_bot', 'arm64'], - 'android_release_bot_x64': ['android', 'release_bot', 'x64'], - 'android_release_bot_x86': ['android', 'release_bot', 'x86'], - 'asan_lsan_clang_release_bot_x64': - ['asan', 'lsan', 'clang', 'openh264', 'pure_release_bot', 'x64', 'h265'], - 'bwe_test_logging_android_arm': - ['android', 'debug_static_bot', 'arm', 'bwe_test_logging'], - 'bwe_test_logging_x64': ['debug_bot', 'x64', 'bwe_test_logging'], - 'bwe_test_logging_x86': ['debug_bot', 'x86', 'bwe_test_logging'], + 'android_asan_shared_release_bot_arm': [ + 'android', + 'asan', + 'clang', + 'pure_release_bot', + 'arm', + ], + 'android_debug_static_bot_arm': [ + 'android', + 'debug_static_bot', + 'arm', + 'h265', + ], + 'android_debug_static_bot_arm64': [ + 'android', + 'debug_static_bot', + 'arm64', + 'h265', + ], + 'android_debug_static_bot_x64': [ + 'android', + 'debug_static_bot', + 'x64', + 'h265', + ], + 'android_debug_static_bot_x86': [ + 'android', + 'debug_static_bot', + 'x86', + 'h265', + ], + 'android_pure_release_bot_arm': [ + 'android', + 'pure_release_bot', + 'arm', + ], + 'android_pure_release_bot_arm64': [ + 'android', + 'pure_release_bot', + 'arm64', + ], + 'android_release_bot_arm': [ + 'android', + 'release_bot', + 'arm', + 'h265', + ], + 'android_release_bot_arm64': [ + 'android', + 'release_bot', + 'arm64', + 'h265', + ], + 'android_release_bot_x64': [ + 'android', + 'release_bot', + 'x64', + 'h265', + ], + 'android_release_bot_x86': [ + 'android', + 'release_bot', + 'x86', + 'h265', + ], + 'asan_lsan_clang_release_bot_x64': [ + 'asan', + 'lsan', + 'clang', + 'openh264', + 'pure_release_bot', + 'x64', + 'h265', + ], 'code_coverage_bot_x64': [ 'openh264', 'release_bot', 'x64', 'code_coverage', 'partial_code_coverage_instrumentation', 'h265' @@ -253,37 +318,84 @@ 'debug_bot_arm64': ['openh264', 'debug_bot', 'arm64', 'h265'], 'debug_bot_x64': ['openh264', 'debug_bot', 'x64', 'h265'], 'debug_bot_x86': ['openh264', 'debug_bot', 'x86', 'h265'], + 'disable_trace_events_android_arm': [ + 'android', + 'arm', + 'disable_trace_events', + 'release_bot', + ], + 'disable_trace_events_x64': ['x64', 'disable_trace_events', 'release_bot'], + 'disable_trace_events_x86': ['x86', 'disable_trace_events', 'release_bot'], 'dummy_audio_file_devices_no_protobuf_android_arm': [ - 'android', 'debug_static_bot', 'arm', 'dummy_audio_file_devices', - 'no_protobuf' + 'android', + 'arm', + 'debug_static_bot', + 'dummy_audio_file_devices', + 'no_protobuf', ], 'dummy_audio_file_devices_no_protobuf_x64': ['debug_bot', 'x64', 'dummy_audio_file_devices', 'no_protobuf'], 'dummy_audio_file_devices_no_protobuf_x86': ['debug_bot', 'x86', 'dummy_audio_file_devices', 'no_protobuf'], - 'ios_debug_bot_arm64': - ['ios', 'debug_bot', 'arm64', 'no_ios_code_signing', 'xctest'], - 'ios_debug_bot_x64': ['ios', 'debug_bot', 'x64', 'xctest'], + 'ios_debug_bot_arm64': [ + 'ios', + 'ios_simulator', + 'debug_bot', + 'arm64', + 'no_ios_code_signing', + 'xctest', + ], + 'ios_debug_bot_x64': [ + 'ios', + 'ios_simulator', + 'debug_bot', + 'x64', + 'xctest', + ], 'ios_internal_debug_bot_arm64': [ - 'ios', 'debug_bot', 'arm64', 'ios_code_signing_identity_description', - 'xctest' + 'ios', + 'ios_device', + 'debug_bot', + 'arm64', + 'ios_code_signing_identity_description', + 'xctest', ], 'ios_internal_pure_release_bot_arm64': [ - 'ios', 'pure_release_bot', 'arm64', - 'ios_code_signing_identity_description', 'xctest', + 'ios', + 'ios_device', + 'pure_release_bot', + 'arm64', + 'ios_code_signing_identity_description', + 'xctest', 'rtc_objc_test_prefix', ], 'ios_internal_release_bot_arm64': [ - 'ios', 'release_bot', 'arm64', 'ios_code_signing_identity_description', - 'xctest' + 'ios', + 'ios_device', + 'release_bot', + 'arm64', + 'ios_code_signing_identity_description', + 'xctest', ], 'ios_release_bot_arm64': [ - 'ios', 'release_bot', 'arm64', 'no_ios_code_signing', 'xctest', + 'ios', + 'ios_simulator', + 'release_bot', + 'arm64', + 'no_ios_code_signing', + 'xctest', ], 'libfuzzer_asan_release_bot_x64': [ - 'libfuzzer', 'asan', 'optimize_for_fuzzing', 'openh264', - 'pure_release_bot', 'x64', 'h265' + 'asan', + 'chromium_base_dependency', + 'h265', + 'libfuzzer', + 'openh264', + 'optimize_for_fuzzing', + 'pure_release_bot', + 'x64', ], + 'libstdcpp_x64': ['debug_bot', 'x64', 'no_custom_libcxx'], 'mac_asan_clang_release_bot_x64': [ 'asan', 'clang', @@ -295,6 +407,15 @@ 'msan_clang_release_bot_x64': ['msan', 'clang', 'openh264', 'pure_release_bot', 'x64', 'h265'], 'no_h264_debug_bot_x86': ['debug_bot', 'x86'], + 'openssl_x64': ['debug_bot', 'x64', 'no_build_ssl'], + 'perfetto_android_arm': [ + 'android', + 'arm', + 'perfetto', + 'release_bot', + ], + 'perfetto_x64': ['x64', 'perfetto', 'release_bot'], + 'perfetto_x86': ['x86', 'perfetto', 'release_bot'], 'pure_release_bot_x64': ['openh264', 'pure_release_bot', 'x64', 'h265'], 'pure_release_bot_x86': ['openh264', 'pure_release_bot', 'x86', 'h265'], 'release_bot_arm': ['openh264', 'release_bot', 'arm', 'h265'], @@ -303,19 +424,22 @@ 'release_bot_x64_fuchsia': ['openh264', 'release_bot', 'x64', 'fuchsia', 'h265'], 'release_bot_x86': ['openh264', 'release_bot', 'x86', 'h265'], - 'rtti_no_sctp_android_arm': - ['android', 'debug_static_bot', 'arm', 'rtti', 'no_sctp'], + 'rtti_no_sctp_android_arm': [ + 'android', + 'arm', + 'debug_static_bot', + 'no_sctp', + 'rtti', + ], 'rtti_no_sctp_no_unicode_win_x86': ['debug_bot', 'x86', 'rtti', 'no_sctp', 'win_undef_unicode'], 'rtti_no_sctp_x64': ['debug_bot', 'x64', 'rtti', 'no_sctp'], 'tsan_clang_release_bot_x64': ['tsan', 'clang', 'openh264', 'pure_release_bot', 'x64', 'h265'], - 'ubsan_clang_release_bot_x64': [ - 'ubsan', 'clang', 'openh264', 'pure_release_bot', 'x64', 'h265' - ], - 'ubsan_vptr_clang_release_bot_x64': [ - 'ubsan_vptr', 'clang', 'openh264', 'pure_release_bot', 'x64', 'h265' - ], + 'ubsan_clang_release_bot_x64': + ['ubsan', 'clang', 'openh264', 'pure_release_bot', 'x64', 'h265'], + 'ubsan_vptr_clang_release_bot_x64': + ['ubsan_vptr', 'clang', 'openh264', 'pure_release_bot', 'x64', 'h265'], 'win_asan_clang_release_bot_x64': [ 'asan', 'clang', @@ -366,7 +490,7 @@ # mb should use. See //tools/mb/docs/user_guide.md for more information. 'mixins': { 'android': { - 'gn_args': 'target_os="android"', + 'gn_args': 'target_os="android" android_static_analysis="off"', }, 'arm': { 'gn_args': 'target_cpu="arm"', @@ -377,8 +501,9 @@ 'asan': { 'gn_args': 'is_asan=true', }, - 'bwe_test_logging': { - 'gn_args': 'rtc_enable_bwe_test_logging=true', + # TODO(b/42223878): Remove this code when the dependency on base is gone. + 'chromium_base_dependency': { + 'mixins': ['enable_chromium_prelude', 'enable_rust'] }, # is_clang=true by default, this is only to guard from upstream changes. 'clang': { @@ -397,14 +522,38 @@ 'gn_args': 'is_debug=true', }, 'debug_bot': { - 'mixins': ['debug', 'reclient'], + 'mixins': [ + 'debug', + 'reclient', + 'strict_field_trials', + 'disallow_deprecated_namespaces' + ], }, 'debug_static_bot': { - 'mixins': ['debug', 'minimal_symbols', 'reclient'], + 'mixins': [ + 'debug', + 'minimal_symbols', + 'reclient', + 'strict_field_trials', + 'disallow_deprecated_namespaces', + ], + }, + 'disable_trace_events': { + 'gn_args': 'rtc_disable_trace_events=true', + }, + # TODO(b/42232595): Remove once the GN flag is removed. + 'disallow_deprecated_namespaces': { + 'gn_args': 'rtc_allow_deprecated_namespaces=false' }, 'dummy_audio_file_devices': { 'gn_args': 'rtc_use_dummy_audio_file_devices=true', }, + 'enable_chromium_prelude': { + 'gn_args': 'enable_chromium_prelude=true', + }, + 'enable_rust': { + 'gn_args': 'enable_rust=true enable_rust_cxx=true', + }, 'fuchsia': { 'gn_args': 'target_os="fuchsia"' }, @@ -420,8 +569,15 @@ 'ios_code_signing_identity_description': { 'gn_args': 'ios_code_signing_identity_description="Apple Development"', }, + 'ios_device': { + 'gn_args': 'target_environment="device"' + }, + 'ios_simulator': { + 'gn_args': 'target_environment="simulator"' + }, 'libfuzzer': { - 'gn_args': 'use_libfuzzer=true', + # TODO(b/42223878): use_fuzztest_wrapper adds a dependency to //base. + 'gn_args': 'use_libfuzzer=true use_fuzztest_wrapper=true', }, 'lsan': { 'gn_args': 'is_lsan=true', @@ -430,8 +586,13 @@ 'gn_args': 'symbol_level=1', }, 'msan': { - 'gn_args': 'is_msan=true msan_track_origins=2' - ' instrumented_libraries_release = "focal"', + 'gn_args': 'is_msan=true msan_track_origins=2', + }, + 'no_build_ssl': { + 'gn_args': 'rtc_build_ssl=false libsrtp_build_boringssl=false', + }, + 'no_custom_libcxx': { + 'gn_args': 'use_custom_libcxx=false', }, 'no_ios_code_signing': { 'gn_args': 'ios_enable_code_signing=false', @@ -452,6 +613,9 @@ 'gn_args': 'coverage_instrumentation_input_file="//.code-coverage/files_to_instrument.txt"' }, + 'perfetto': { + 'gn_args': 'rtc_use_perfetto=true', + }, # The 'pure_release_bot' configuration is for release bots that are doing a # 100% release build without DCHECKs while 'release_bot' is a partial # release configs since `dcheck_always_on` is set to true. @@ -465,7 +629,12 @@ 'gn_args': 'is_debug=false', }, 'release_bot': { - 'mixins': ['pure_release_bot', 'dcheck_always_on'], + 'mixins': [ + 'pure_release_bot', + 'dcheck_always_on', + 'strict_field_trials', + 'disallow_deprecated_namespaces', + ], }, 'rtc_objc_test_prefix': { 'gn_args': 'rtc_objc_prefix="RTC_TESTING"', @@ -473,6 +642,9 @@ 'rtti': { 'gn_args': 'use_rtti=true', }, + 'strict_field_trials': { + 'gn_args': 'rtc_strict_field_trials="dcheck"', + }, 'tsan': { 'gn_args': 'is_tsan=true', }, diff --git a/tools_webrtc/mb/mb_unittest.py b/tools_webrtc/mb/mb_unittest.py index 583fefd87e..9e7f1ab07d 100755 --- a/tools_webrtc/mb/mb_unittest.py +++ b/tools_webrtc/mb/mb_unittest.py @@ -25,134 +25,137 @@ class FakeMBW(mb.WebRTCMetaBuildWrapper): - def __init__(self, win32=False): - super().__init__() - - # Override vars for test portability. - if win32: - self.chromium_src_dir = 'c:\\fake_src' - self.default_config = 'c:\\fake_src\\tools_webrtc\\mb\\mb_config.pyl' - self.default_isolate_map = ('c:\\fake_src\\testing\\buildbot\\' - 'gn_isolate_map.pyl') - self.platform = 'win32' - self.executable = 'c:\\python\\vpython3.exe' - self.sep = '\\' - self.cwd = 'c:\\fake_src\\out\\Default' - else: - self.chromium_src_dir = '/fake_src' - self.default_config = '/fake_src/tools_webrtc/mb/mb_config.pyl' - self.default_isolate_map = '/fake_src/testing/buildbot/gn_isolate_map.pyl' - self.executable = '/usr/bin/vpython3' - self.platform = 'linux2' - self.sep = '/' - self.cwd = '/fake_src/out/Default' - - self.files = {} - self.dirs = set() - self.calls = [] - self.cmds = [] - self.cross_compile = None - self.out = '' - self.err = '' - self.rmdirs = [] - - def ExpandUser(self, path): - # pylint: disable=no-self-use - return '$HOME/%s' % path - - def Exists(self, path): - abs_path = self._AbsPath(path) - return self.files.get(abs_path) is not None or abs_path in self.dirs - - def ListDir(self, path): - dir_contents = [] - for f in list(self.files.keys()) + list(self.dirs): - head, _ = os.path.split(f) - if head == path: - dir_contents.append(f) - return dir_contents - - def MaybeMakeDirectory(self, path): - abpath = self._AbsPath(path) - self.dirs.add(abpath) - - def PathJoin(self, *comps): - return self.sep.join(comps) - - def ReadFile(self, path): - try: - return self.files[self._AbsPath(path)] - except KeyError as e: - raise IOError('%s not found' % path) from e - - def WriteFile(self, path, contents, force_verbose=False): - if self.args.dryrun or self.args.verbose or force_verbose: - self.Print('\nWriting """\\\n%s""" to %s.\n' % (contents, path)) - abpath = self._AbsPath(path) - self.files[abpath] = contents - - def Call(self, cmd, env=None, capture_output=True, input=None): - # pylint: disable=redefined-builtin - del env - del capture_output - del input - self.calls.append(cmd) - if self.cmds: - return self.cmds.pop(0) - return 0, '', '' - - def Print(self, *args, **kwargs): - sep = kwargs.get('sep', ' ') - end = kwargs.get('end', '\n') - f = kwargs.get('file', sys.stdout) - if f == sys.stderr: - self.err += sep.join(args) + end - else: - self.out += sep.join(args) + end - - def TempDir(self): - tmp_dir = os.path.join(tempfile.gettempdir(), 'mb_test') - self.dirs.add(tmp_dir) - return tmp_dir - - def TempFile(self, mode='w'): - del mode - return FakeFile(self.files) - - def RemoveFile(self, path): - abpath = self._AbsPath(path) - self.files[abpath] = None - - def RemoveDirectory(self, abs_path): - # Normalize the passed-in path to handle different working directories - # used during unit testing. - abs_path = self._AbsPath(abs_path) - self.rmdirs.append(abs_path) - files_to_delete = [f for f in self.files if f.startswith(abs_path)] - for f in files_to_delete: - self.files[f] = None - - def _AbsPath(self, path): - if not ((self.platform == 'win32' and path.startswith('c:')) or - (self.platform != 'win32' and path.startswith('/'))): - path = self.PathJoin(self.cwd, path) - if self.sep == '\\': - return re.sub(r'\\+', r'\\', path) - return re.sub('/+', '/', path) + + def __init__(self, win32=False): + super().__init__() + + # Override vars for test portability. + if win32: + self.chromium_src_dir = 'c:\\fake_src' + self.default_config = ( + 'c:\\fake_src\\tools_webrtc\\mb\\mb_config.pyl') + self.default_isolate_map = ('c:\\fake_src\\testing\\buildbot\\' + 'gn_isolate_map.pyl') + self.platform = 'win32' + self.executable = 'c:\\python\\vpython3.exe' + self.sep = '\\' + self.cwd = 'c:\\fake_src\\out\\Default' + else: + self.chromium_src_dir = '/fake_src' + self.default_config = '/fake_src/tools_webrtc/mb/mb_config.pyl' + self.default_isolate_map = ( + '/fake_src/testing/buildbot/gn_isolate_map.pyl') + self.executable = '/usr/bin/vpython3' + self.platform = 'linux2' + self.sep = '/' + self.cwd = '/fake_src/out/Default' + + self.files = {} + self.dirs = set() + self.calls = [] + self.cmds = [] + self.cross_compile = None + self.out = '' + self.err = '' + self.rmdirs = [] + + def ExpandUser(self, path): + # pylint: disable=no-self-use + return '$HOME/%s' % path + + def Exists(self, path): + abs_path = self._AbsPath(path) + return self.files.get(abs_path) is not None or abs_path in self.dirs + + def ListDir(self, path): + dir_contents = [] + for f in list(self.files.keys()) + list(self.dirs): + head, _ = os.path.split(f) + if head == path: + dir_contents.append(f) + return dir_contents + + def MaybeMakeDirectory(self, path): + abpath = self._AbsPath(path) + self.dirs.add(abpath) + + def PathJoin(self, *comps): + return self.sep.join(comps) + + def ReadFile(self, path): + try: + return self.files[self._AbsPath(path)] + except KeyError as e: + raise IOError('%s not found' % path) from e + + def WriteFile(self, path, contents, force_verbose=False): + if self.args.dryrun or self.args.verbose or force_verbose: + self.Print('\nWriting """\\\n%s""" to %s.\n' % (contents, path)) + abpath = self._AbsPath(path) + self.files[abpath] = contents + + def Call(self, cmd, env=None, capture_output=True, input=None): + # pylint: disable=redefined-builtin + del env + del capture_output + del input + self.calls.append(cmd) + if self.cmds: + return self.cmds.pop(0) + return 0, '', '' + + def Print(self, *args, **kwargs): + sep = kwargs.get('sep', ' ') + end = kwargs.get('end', '\n') + f = kwargs.get('file', sys.stdout) + if f == sys.stderr: + self.err += sep.join(args) + end + else: + self.out += sep.join(args) + end + + def TempDir(self): + tmp_dir = os.path.join(tempfile.gettempdir(), 'mb_test') + self.dirs.add(tmp_dir) + return tmp_dir + + def TempFile(self, mode='w'): + del mode + return FakeFile(self.files) + + def RemoveFile(self, path): + abpath = self._AbsPath(path) + self.files[abpath] = None + + def RemoveDirectory(self, abs_path): + # Normalize the passed-in path to handle different working directories + # used during unit testing. + abs_path = self._AbsPath(abs_path) + self.rmdirs.append(abs_path) + files_to_delete = [f for f in self.files if f.startswith(abs_path)] + for f in files_to_delete: + self.files[f] = None + + def _AbsPath(self, path): + if not ((self.platform == 'win32' and path.startswith('c:')) or + (self.platform != 'win32' and path.startswith('/'))): + path = self.PathJoin(self.cwd, path) + if self.sep == '\\': + return re.sub(r'\\+', r'\\', path) + return re.sub('/+', '/', path) class FakeFile: - # pylint: disable=invalid-name - def __init__(self, files): - self.name = '/tmp/file' - self.buf = '' - self.files = files + # pylint: disable=invalid-name + def __init__(self, files): + self.name = '/tmp/file' + self.buf = '' + self.files = files - def write(self, contents): - self.buf += contents + def write(self, contents): + self.buf += contents - def close(self): - self.files[self.name] = self.buf + def close(self): + self.files[self.name] = self.buf TEST_CONFIG = """\ @@ -213,516 +216,440 @@ def close(self): def CreateFakeMBW(files=None, win32=False): - mbw = FakeMBW(win32=win32) - mbw.files.setdefault(mbw.default_config, TEST_CONFIG) - mbw.files.setdefault( - mbw.ToAbsPath('//testing/buildbot/gn_isolate_map.pyl'), '''{ + mbw = FakeMBW(win32=win32) + mbw.files.setdefault(mbw.default_config, TEST_CONFIG) + mbw.files.setdefault( + mbw.ToAbsPath('//testing/buildbot/gn_isolate_map.pyl'), '''{ "foo_unittests": { "label": "//foo:foo_unittests", "type": "console_test_launcher", "args": [], }, }''') - mbw.files.setdefault( - mbw.ToAbsPath('//build/args/bots/fake_group/fake_args_bot.gn'), - 'is_debug = false\ndcheck_always_on=false\n') - mbw.files.setdefault(mbw.ToAbsPath('//tools/mb/rts_banned_suites.json'), '{}') - if files: - for path, contents in list(files.items()): - mbw.files[path] = contents - if path.endswith('.runtime_deps'): - - def FakeCall(cmd, env=None, capture_output=True, stdin=None): - # pylint: disable=cell-var-from-loop - del cmd - del env - del capture_output - del stdin - mbw.files[path] = contents - return 0, '', '' - - # pylint: disable=invalid-name - mbw.Call = FakeCall - return mbw + mbw.files.setdefault( + mbw.ToAbsPath('//build/args/bots/fake_group/fake_args_bot.gn'), + 'is_debug = false\ndcheck_always_on=false\n') + mbw.files.setdefault(mbw.ToAbsPath('//tools/mb/rts_banned_suites.json'), + '{}') + if files: + for path, contents in list(files.items()): + mbw.files[path] = contents + if path.endswith('.runtime_deps'): + + def FakeCall(cmd, env=None, capture_output=True, stdin=None): + # pylint: disable=cell-var-from-loop + del cmd + del env + del capture_output + del stdin + mbw.files[path] = contents + return 0, '', '' + + # pylint: disable=invalid-name + mbw.Call = FakeCall + return mbw class UnitTest(unittest.TestCase): - # pylint: disable=invalid-name - def check(self, - args, - mbw=None, - files=None, - out=None, - err=None, - ret=None, - env=None): - if not mbw: - mbw = CreateFakeMBW(files) - - try: - prev_env = os.environ.copy() - os.environ = env if env else prev_env - actual_ret = mbw.Main(args) - finally: - os.environ = prev_env - self.assertEqual( - actual_ret, ret, - "ret: %s, out: %s, err: %s" % (actual_ret, mbw.out, mbw.err)) - if out is not None: - self.assertEqual(mbw.out, out) - if err is not None: - self.assertEqual(mbw.err, err) - return mbw - - def test_gen_swarming(self): - files = { - '/tmp/swarming_targets': - 'foo_unittests\n', - '/fake_src/testing/buildbot/gn_isolate_map.pyl': - ("{'foo_unittests': {" - " 'label': '//foo:foo_unittests'," - " 'type': 'raw'," - " 'args': []," - "}}\n"), - '/fake_src/out/Default/foo_unittests.runtime_deps': ("foo_unittests\n"), - } - mbw = CreateFakeMBW(files) - self.check([ - 'gen', '-c', 'debug_goma', '--swarming-targets-file', - '/tmp/swarming_targets', '//out/Default' - ], - mbw=mbw, - ret=0) - self.assertIn('/fake_src/out/Default/foo_unittests.isolate', mbw.files) - self.assertIn('/fake_src/out/Default/foo_unittests.isolated.gen.json', - mbw.files) - - def test_gen_swarming_android(self): - test_files = { - '/tmp/swarming_targets': - 'foo_unittests\n', - '/fake_src/testing/buildbot/gn_isolate_map.pyl': - ("{'foo_unittests': {" - " 'label': '//foo:foo_unittests'," - " 'type': 'console_test_launcher'," - "}}\n"), - '/fake_src/out/Default/foo_unittests.runtime_deps': ("foo_unittests\n"), - } - mbw = self.check([ - 'gen', '-c', 'android_bot', '//out/Default', '--swarming-targets-file', - '/tmp/swarming_targets', '--isolate-map-file', - '/fake_src/testing/buildbot/gn_isolate_map.pyl' - ], - files=test_files, - ret=0) - - isolate_file = mbw.files['/fake_src/out/Default/foo_unittests.isolate'] - isolate_file_contents = ast.literal_eval(isolate_file) - files = isolate_file_contents['variables']['files'] - command = isolate_file_contents['variables']['command'] - - self.assertEqual( - files, - ['../../.vpython3', '../../testing/test_env.py', 'foo_unittests']) - self.assertEqual(command, [ - 'luci-auth', - 'context', - '--', - 'vpython3', - '../../build/android/test_wrapper/logdog_wrapper.py', - '--target', - 'foo_unittests', - '--logdog-bin-cmd', - '../../.task_template_packages/logdog_butler', - '--logcat-output-file', - '${ISOLATED_OUTDIR}/logcats', - '--store-tombstones', - ]) - - def test_gen_swarming_android_junit_test(self): - test_files = { - '/tmp/swarming_targets': - 'foo_unittests\n', - '/fake_src/testing/buildbot/gn_isolate_map.pyl': - ("{'foo_unittests': {" - " 'label': '//foo:foo_unittests'," - " 'type': 'junit_test'," - "}}\n"), - '/fake_src/out/Default/foo_unittests.runtime_deps': ("foo_unittests\n"), - } - mbw = self.check([ - 'gen', '-c', 'android_bot', '//out/Default', '--swarming-targets-file', - '/tmp/swarming_targets', '--isolate-map-file', - '/fake_src/testing/buildbot/gn_isolate_map.pyl' - ], - files=test_files, - ret=0) - - isolate_file = mbw.files['/fake_src/out/Default/foo_unittests.isolate'] - isolate_file_contents = ast.literal_eval(isolate_file) - files = isolate_file_contents['variables']['files'] - command = isolate_file_contents['variables']['command'] - - self.assertEqual( - files, - ['../../.vpython3', '../../testing/test_env.py', 'foo_unittests']) - self.assertEqual(command, [ - 'luci-auth', - 'context', - '--', - 'vpython3', - '../../build/android/test_wrapper/logdog_wrapper.py', - '--target', - 'foo_unittests', - '--logdog-bin-cmd', - '../../.task_template_packages/logdog_butler', - '--logcat-output-file', - '${ISOLATED_OUTDIR}/logcats', - '--store-tombstones', - ]) - - def test_gen_script(self): - test_files = { - '/tmp/swarming_targets': - 'foo_unittests_script\n', - '/fake_src/testing/buildbot/gn_isolate_map.pyl': - ("{'foo_unittests_script': {" - " 'label': '//foo:foo_unittests'," - " 'type': 'script'," - " 'script': '//foo/foo_unittests_script.py'," - "}}\n"), - '/fake_src/out/Default/foo_unittests_script.runtime_deps': - ("foo_unittests\n" - "foo_unittests_script.py\n"), - } - mbw = self.check([ - 'gen', '-c', 'debug_goma', '//out/Default', '--swarming-targets-file', - '/tmp/swarming_targets', '--isolate-map-file', - '/fake_src/testing/buildbot/gn_isolate_map.pyl' - ], - files=test_files, - ret=0) - - isolate_file = ( - mbw.files['/fake_src/out/Default/foo_unittests_script.isolate']) - isolate_file_contents = ast.literal_eval(isolate_file) - files = isolate_file_contents['variables']['files'] - command = isolate_file_contents['variables']['command'] - - self.assertEqual(files, [ - '../../.vpython3', - '../../testing/test_env.py', - 'foo_unittests', - 'foo_unittests_script.py', - ]) - self.assertEqual(command, [ - 'vpython3', - '../../foo/foo_unittests_script.py', - ]) - - def test_gen_raw(self): - test_files = { - '/tmp/swarming_targets': - 'foo_unittests\n', - '/fake_src/testing/buildbot/gn_isolate_map.pyl': - ("{'foo_unittests': {" - " 'label': '//foo:foo_unittests'," - " 'type': 'raw'," - "}}\n"), - '/fake_src/out/Default/foo_unittests.runtime_deps': ("foo_unittests\n"), - } - mbw = self.check([ - 'gen', '-c', 'debug_goma', '//out/Default', '--swarming-targets-file', - '/tmp/swarming_targets', '--isolate-map-file', - '/fake_src/testing/buildbot/gn_isolate_map.pyl' - ], - files=test_files, - ret=0) - - isolate_file = mbw.files['/fake_src/out/Default/foo_unittests.isolate'] - isolate_file_contents = ast.literal_eval(isolate_file) - files = isolate_file_contents['variables']['files'] - command = isolate_file_contents['variables']['command'] - - self.assertEqual(files, [ - '../../.vpython3', - '../../testing/test_env.py', - 'foo_unittests', - ]) - self.assertEqual(command, ['bin/run_foo_unittests']) - - def test_gen_non_parallel_console_test_launcher(self): - test_files = { - '/tmp/swarming_targets': - 'foo_unittests\n', - '/fake_src/testing/buildbot/gn_isolate_map.pyl': - ("{'foo_unittests': {" - " 'label': '//foo:foo_unittests'," - " 'type': 'non_parallel_console_test_launcher'," - "}}\n"), - '/fake_src/out/Default/foo_unittests.runtime_deps': ("foo_unittests\n"), - } - mbw = self.check([ - 'gen', '-c', 'debug_goma', '//out/Default', '--swarming-targets-file', - '/tmp/swarming_targets', '--isolate-map-file', - '/fake_src/testing/buildbot/gn_isolate_map.pyl' - ], - files=test_files, - ret=0) - - isolate_file = mbw.files['/fake_src/out/Default/foo_unittests.isolate'] - isolate_file_contents = ast.literal_eval(isolate_file) - files = isolate_file_contents['variables']['files'] - command = isolate_file_contents['variables']['command'] - - self.assertEqual(files, [ - '../../.vpython3', - '../../testing/test_env.py', - '../../third_party/gtest-parallel/gtest-parallel', - '../../third_party/gtest-parallel/gtest_parallel.py', - '../../tools_webrtc/gtest-parallel-wrapper.py', - 'foo_unittests', - ]) - self.assertEqual(command, [ - 'vpython3', - '../../testing/test_env.py', - '../../tools_webrtc/gtest-parallel-wrapper.py', - '--output_dir=${ISOLATED_OUTDIR}/test_logs', - '--gtest_color=no', - '--workers=1', - '--retry_failed=3', - './foo_unittests', - '--asan=0', - '--lsan=0', - '--msan=0', - '--tsan=0', - ]) - - def test_isolate_windowed_test_launcher_linux(self): - test_files = { - '/tmp/swarming_targets': - 'foo_unittests\n', - '/fake_src/testing/buildbot/gn_isolate_map.pyl': - ("{'foo_unittests': {" - " 'label': '//foo:foo_unittests'," - " 'type': 'windowed_test_launcher'," - "}}\n"), - '/fake_src/out/Default/foo_unittests.runtime_deps': - ("foo_unittests\n" - "some_resource_file\n"), - } - mbw = self.check([ - 'gen', '-c', 'debug_goma', '//out/Default', '--swarming-targets-file', - '/tmp/swarming_targets', '--isolate-map-file', - '/fake_src/testing/buildbot/gn_isolate_map.pyl' - ], - files=test_files, - ret=0) - - isolate_file = mbw.files['/fake_src/out/Default/foo_unittests.isolate'] - isolate_file_contents = ast.literal_eval(isolate_file) - files = isolate_file_contents['variables']['files'] - command = isolate_file_contents['variables']['command'] - - self.assertEqual(files, [ - '../../.vpython3', - '../../testing/test_env.py', - '../../testing/xvfb.py', - '../../third_party/gtest-parallel/gtest-parallel', - '../../third_party/gtest-parallel/gtest_parallel.py', - '../../tools_webrtc/gtest-parallel-wrapper.py', - 'foo_unittests', - 'some_resource_file', - ]) - self.assertEqual(command, [ - 'vpython3', - '../../testing/xvfb.py', - '../../tools_webrtc/gtest-parallel-wrapper.py', - '--output_dir=${ISOLATED_OUTDIR}/test_logs', - '--gtest_color=no', - '--retry_failed=3', - './foo_unittests', - '--asan=0', - '--lsan=0', - '--msan=0', - '--tsan=0', - ]) - - def test_gen_windowed_test_launcher_win(self): - files = { - 'c:\\fake_src\\out\\Default\\tmp\\swarming_targets': - 'unittests\n', - 'c:\\fake_src\\testing\\buildbot\\gn_isolate_map.pyl': - ("{'unittests': {" - " 'label': '//somewhere:unittests'," - " 'type': 'windowed_test_launcher'," - "}}\n"), - r'c:\fake_src\out\Default\unittests.exe.runtime_deps': - ("unittests.exe\n" - "some_dependency\n"), - } - mbw = CreateFakeMBW(files=files, win32=True) - self.check([ - 'gen', '-c', 'debug_goma', '--swarming-targets-file', - 'c:\\fake_src\\out\\Default\\tmp\\swarming_targets', - '--isolate-map-file', - 'c:\\fake_src\\testing\\buildbot\\gn_isolate_map.pyl', '//out/Default' - ], - mbw=mbw, - ret=0) - - isolate_file = mbw.files['c:\\fake_src\\out\\Default\\unittests.isolate'] - isolate_file_contents = ast.literal_eval(isolate_file) - files = isolate_file_contents['variables']['files'] - command = isolate_file_contents['variables']['command'] - - self.assertEqual(files, [ - '../../.vpython3', - '../../testing/test_env.py', - '../../third_party/gtest-parallel/gtest-parallel', - '../../third_party/gtest-parallel/gtest_parallel.py', - '../../tools_webrtc/gtest-parallel-wrapper.py', - 'some_dependency', - 'unittests.exe', - ]) - self.assertEqual(command, [ - 'vpython3', - '../../testing/test_env.py', - '../../tools_webrtc/gtest-parallel-wrapper.py', - '--output_dir=${ISOLATED_OUTDIR}/test_logs', - '--gtest_color=no', - '--retry_failed=3', - r'.\unittests.exe', - '--asan=0', - '--lsan=0', - '--msan=0', - '--tsan=0', - ]) - - def test_gen_console_test_launcher(self): - test_files = { - '/tmp/swarming_targets': - 'foo_unittests\n', - '/fake_src/testing/buildbot/gn_isolate_map.pyl': - ("{'foo_unittests': {" - " 'label': '//foo:foo_unittests'," - " 'type': 'console_test_launcher'," - "}}\n"), - '/fake_src/out/Default/foo_unittests.runtime_deps': ("foo_unittests\n"), - } - mbw = self.check([ - 'gen', '-c', 'debug_goma', '//out/Default', '--swarming-targets-file', - '/tmp/swarming_targets', '--isolate-map-file', - '/fake_src/testing/buildbot/gn_isolate_map.pyl' - ], - files=test_files, - ret=0) - - isolate_file = mbw.files['/fake_src/out/Default/foo_unittests.isolate'] - isolate_file_contents = ast.literal_eval(isolate_file) - files = isolate_file_contents['variables']['files'] - command = isolate_file_contents['variables']['command'] - - self.assertEqual(files, [ - '../../.vpython3', - '../../testing/test_env.py', - '../../third_party/gtest-parallel/gtest-parallel', - '../../third_party/gtest-parallel/gtest_parallel.py', - '../../tools_webrtc/gtest-parallel-wrapper.py', - 'foo_unittests', - ]) - self.assertEqual(command, [ - 'vpython3', - '../../testing/test_env.py', - '../../tools_webrtc/gtest-parallel-wrapper.py', - '--output_dir=${ISOLATED_OUTDIR}/test_logs', - '--gtest_color=no', - '--retry_failed=3', - './foo_unittests', - '--asan=0', - '--lsan=0', - '--msan=0', - '--tsan=0', - ]) - - def test_isolate_test_launcher_with_webcam(self): - test_files = { - '/tmp/swarming_targets': - 'foo_unittests\n', - '/fake_src/testing/buildbot/gn_isolate_map.pyl': - ("{'foo_unittests': {" - " 'label': '//foo:foo_unittests'," - " 'type': 'console_test_launcher'," - " 'use_webcam': True," - "}}\n"), - '/fake_src/out/Default/foo_unittests.runtime_deps': - ("foo_unittests\n" - "some_resource_file\n"), - } - mbw = self.check([ - 'gen', '-c', 'debug_goma', '//out/Default', '--swarming-targets-file', - '/tmp/swarming_targets', '--isolate-map-file', - '/fake_src/testing/buildbot/gn_isolate_map.pyl' - ], - files=test_files, - ret=0) - - isolate_file = mbw.files['/fake_src/out/Default/foo_unittests.isolate'] - isolate_file_contents = ast.literal_eval(isolate_file) - files = isolate_file_contents['variables']['files'] - command = isolate_file_contents['variables']['command'] - - self.assertEqual(files, [ - '../../.vpython3', - '../../testing/test_env.py', - '../../third_party/gtest-parallel/gtest-parallel', - '../../third_party/gtest-parallel/gtest_parallel.py', - '../../tools_webrtc/ensure_webcam_is_running.py', - '../../tools_webrtc/gtest-parallel-wrapper.py', - 'foo_unittests', - 'some_resource_file', - ]) - self.assertEqual(command, [ - 'vpython3', - '../../tools_webrtc/ensure_webcam_is_running.py', - 'vpython3', - '../../testing/test_env.py', - '../../tools_webrtc/gtest-parallel-wrapper.py', - '--output_dir=${ISOLATED_OUTDIR}/test_logs', - '--gtest_color=no', - '--retry_failed=3', - './foo_unittests', - '--asan=0', - '--lsan=0', - '--msan=0', - '--tsan=0', - ]) - - def test_isolate(self): - files = { - '/fake_src/out/Default/toolchain.ninja': - "", - '/fake_src/testing/buildbot/gn_isolate_map.pyl': - ("{'foo_unittests': {" - " 'label': '//foo:foo_unittests'," - " 'type': 'non_parallel_console_test_launcher'," - "}}\n"), - '/fake_src/out/Default/foo_unittests.runtime_deps': ("foo_unittests\n"), - } - self.check( - ['isolate', '-c', 'debug_goma', '//out/Default', 'foo_unittests'], - files=files, - ret=0) - - # test running isolate on an existing build_dir - files['/fake_src/out/Default/args.gn'] = 'is_debug = true\n' - self.check(['isolate', '//out/Default', 'foo_unittests'], - files=files, - ret=0) - files['/fake_src/out/Default/mb_type'] = 'gn\n' - self.check(['isolate', '//out/Default', 'foo_unittests'], - files=files, - ret=0) + # pylint: disable=invalid-name + def check(self, + args, + mbw=None, + files=None, + out=None, + err=None, + ret=None, + env=None): + if not mbw: + mbw = CreateFakeMBW(files) + + try: + prev_env = os.environ.copy() + os.environ = env if env else prev_env + actual_ret = mbw.Main(args) + finally: + os.environ = prev_env + self.assertEqual( + actual_ret, ret, + "ret: %s, out: %s, err: %s" % (actual_ret, mbw.out, mbw.err)) + if out is not None: + self.assertEqual(mbw.out, out) + if err is not None: + self.assertEqual(mbw.err, err) + return mbw + + def test_gen_swarming(self): + files = { + '/tmp/swarming_targets': + 'foo_unittests\n', + '/fake_src/testing/buildbot/gn_isolate_map.pyl': + ("{'foo_unittests': {" + " 'label': '//foo:foo_unittests'," + " 'type': 'raw'," + " 'args': []," + "}}\n"), + '/fake_src/out/Default/foo_unittests.runtime_deps': + ("foo_unittests\n"), + } + mbw = CreateFakeMBW(files) + self.check([ + 'gen', '-c', 'debug_goma', '--swarming-targets-file', + '/tmp/swarming_targets', '//out/Default' + ], + mbw=mbw, + ret=0) + self.assertIn('/fake_src/out/Default/foo_unittests.isolate', mbw.files) + self.assertIn('/fake_src/out/Default/foo_unittests.isolated.gen.json', + mbw.files) + + def test_gen_swarming_android(self): + test_files = { + '/tmp/swarming_targets': + 'foo_unittests\n', + '/fake_src/testing/buildbot/gn_isolate_map.pyl': + ("{'foo_unittests': {" + " 'label': '//foo:foo_unittests'," + " 'type': 'console_test_launcher'," + "}}\n"), + '/fake_src/out/Default/foo_unittests.runtime_deps': + ("foo_unittests\n"), + } + mbw = self.check([ + 'gen', '-c', 'android_bot', '//out/Default', + '--swarming-targets-file', '/tmp/swarming_targets', + '--isolate-map-file', + '/fake_src/testing/buildbot/gn_isolate_map.pyl' + ], + files=test_files, + ret=0) + + isolate_file = mbw.files['/fake_src/out/Default/foo_unittests.isolate'] + isolate_file_contents = ast.literal_eval(isolate_file) + files = isolate_file_contents['variables']['files'] + command = isolate_file_contents['variables']['command'] + + self.assertEqual( + files, + ['../../.vpython3', '../../testing/test_env.py', 'foo_unittests']) + self.assertEqual(command, [ + 'luci-auth', + 'context', + '--', + 'vpython3', + '../../build/android/test_wrapper/logdog_wrapper.py', + '--target', + 'foo_unittests', + '--logdog-bin-cmd', + '../../.task_template_packages/logdog_butler', + '--logcat-output-file', + '${ISOLATED_OUTDIR}/logcats', + '--store-tombstones', + ]) + + def test_gen_swarming_android_test(self): + test_files = { + '/tmp/swarming_targets': + 'foo_unittests\n', + '/fake_src/testing/buildbot/gn_isolate_map.pyl': + ("{'foo_unittests': {" + " 'label': '//foo:foo_unittests'," + " 'type': 'console_test_launcher'," + "}}\n"), + '/fake_src/out/Default/foo_unittests.runtime_deps': + ("foo_unittests\n"), + } + mbw = self.check([ + 'gen', '-c', 'android_bot', '//out/Default', + '--swarming-targets-file', '/tmp/swarming_targets', + '--isolate-map-file', + '/fake_src/testing/buildbot/gn_isolate_map.pyl' + ], + files=test_files, + ret=0) + + isolate_file = mbw.files['/fake_src/out/Default/foo_unittests.isolate'] + isolate_file_contents = ast.literal_eval(isolate_file) + files = isolate_file_contents['variables']['files'] + command = isolate_file_contents['variables']['command'] + + self.assertEqual( + files, + ['../../.vpython3', '../../testing/test_env.py', 'foo_unittests']) + self.assertEqual(command, [ + 'luci-auth', + 'context', + '--', + 'vpython3', + '../../build/android/test_wrapper/logdog_wrapper.py', + '--target', + 'foo_unittests', + '--logdog-bin-cmd', + '../../.task_template_packages/logdog_butler', + '--logcat-output-file', + '${ISOLATED_OUTDIR}/logcats', + '--store-tombstones', + ]) + + def test_gen_raw(self): + test_files = { + '/tmp/swarming_targets': + 'foo_unittests\n', + '/fake_src/testing/buildbot/gn_isolate_map.pyl': + ("{'foo_unittests': {" + " 'label': '//foo:foo_unittests'," + " 'type': 'raw'," + "}}\n"), + '/fake_src/out/Default/foo_unittests.runtime_deps': + ("foo_unittests\n"), + } + mbw = self.check([ + 'gen', '-c', 'debug_goma', '//out/Default', + '--swarming-targets-file', '/tmp/swarming_targets', + '--isolate-map-file', + '/fake_src/testing/buildbot/gn_isolate_map.pyl' + ], + files=test_files, + ret=0) + + isolate_file = mbw.files['/fake_src/out/Default/foo_unittests.isolate'] + isolate_file_contents = ast.literal_eval(isolate_file) + files = isolate_file_contents['variables']['files'] + command = isolate_file_contents['variables']['command'] + + self.assertEqual(files, [ + '../../.vpython3', + '../../testing/test_env.py', + 'foo_unittests', + ]) + self.assertEqual(command, ['bin/run_foo_unittests']) + + def test_gen_non_parallel_console_test_launcher(self): + test_files = { + '/tmp/swarming_targets': + 'foo_unittests\n', + '/fake_src/testing/buildbot/gn_isolate_map.pyl': + ("{'foo_unittests': {" + " 'label': '//foo:foo_unittests'," + " 'type': 'non_parallel_console_test_launcher'," + "}}\n"), + '/fake_src/out/Default/foo_unittests.runtime_deps': + ("foo_unittests\n"), + } + mbw = self.check([ + 'gen', '-c', 'debug_goma', '//out/Default', + '--swarming-targets-file', '/tmp/swarming_targets', + '--isolate-map-file', + '/fake_src/testing/buildbot/gn_isolate_map.pyl' + ], + files=test_files, + ret=0) + + isolate_file = mbw.files['/fake_src/out/Default/foo_unittests.isolate'] + isolate_file_contents = ast.literal_eval(isolate_file) + files = isolate_file_contents['variables']['files'] + command = isolate_file_contents['variables']['command'] + + self.assertEqual(files, [ + '../../.vpython3', + '../../testing/test_env.py', + '../../third_party/gtest-parallel/gtest-parallel', + '../../third_party/gtest-parallel/gtest_parallel.py', + '../../tools_webrtc/gtest-parallel-wrapper.py', + 'foo_unittests', + ]) + self.assertEqual(command, [ + 'vpython3', + '../../testing/test_env.py', + '../../tools_webrtc/gtest-parallel-wrapper.py', + '--output_dir=${ISOLATED_OUTDIR}/test_logs', + '--gtest_color=no', + '--workers=1', + '--retry_failed=3', + './foo_unittests', + '--asan=0', + '--lsan=0', + '--msan=0', + '--tsan=0', + ]) + + def test_isolate_windowed_test_launcher_linux(self): + test_files = { + '/tmp/swarming_targets': + 'foo_unittests\n', + '/fake_src/testing/buildbot/gn_isolate_map.pyl': + ("{'foo_unittests': {" + " 'label': '//foo:foo_unittests'," + " 'type': 'windowed_test_launcher'," + "}}\n"), + '/fake_src/out/Default/foo_unittests.runtime_deps': + ("foo_unittests\n" + "some_resource_file\n"), + } + mbw = self.check([ + 'gen', '-c', 'debug_goma', '//out/Default', + '--swarming-targets-file', '/tmp/swarming_targets', + '--isolate-map-file', + '/fake_src/testing/buildbot/gn_isolate_map.pyl' + ], + files=test_files, + ret=0) + + isolate_file = mbw.files['/fake_src/out/Default/foo_unittests.isolate'] + isolate_file_contents = ast.literal_eval(isolate_file) + files = isolate_file_contents['variables']['files'] + command = isolate_file_contents['variables']['command'] + + self.assertEqual(files, [ + '../../.vpython3', + '../../testing/test_env.py', + '../../testing/xvfb.py', + '../../third_party/gtest-parallel/gtest-parallel', + '../../third_party/gtest-parallel/gtest_parallel.py', + '../../tools_webrtc/gtest-parallel-wrapper.py', + 'foo_unittests', + 'some_resource_file', + ]) + self.assertEqual(command, [ + 'vpython3', + '../../testing/xvfb.py', + '../../tools_webrtc/gtest-parallel-wrapper.py', + '--output_dir=${ISOLATED_OUTDIR}/test_logs', + '--gtest_color=no', + '--retry_failed=3', + './foo_unittests', + '--asan=0', + '--lsan=0', + '--msan=0', + '--tsan=0', + ]) + + def test_gen_windowed_test_launcher_win(self): + files = { + 'c:\\fake_src\\out\\Default\\tmp\\swarming_targets': + 'unittests\n', + 'c:\\fake_src\\testing\\buildbot\\gn_isolate_map.pyl': + ("{'unittests': {" + " 'label': '//somewhere:unittests'," + " 'type': 'windowed_test_launcher'," + "}}\n"), + r'c:\fake_src\out\Default\unittests.exe.runtime_deps': + ("unittests.exe\n" + "some_dependency\n"), + } + mbw = CreateFakeMBW(files=files, win32=True) + self.check([ + 'gen', '-c', 'debug_goma', '--swarming-targets-file', + 'c:\\fake_src\\out\\Default\\tmp\\swarming_targets', + '--isolate-map-file', + 'c:\\fake_src\\testing\\buildbot\\gn_isolate_map.pyl', + '//out/Default' + ], + mbw=mbw, + ret=0) + + isolate_file = mbw.files[ + 'c:\\fake_src\\out\\Default\\unittests.isolate'] + isolate_file_contents = ast.literal_eval(isolate_file) + files = isolate_file_contents['variables']['files'] + command = isolate_file_contents['variables']['command'] + + self.assertEqual(files, [ + '../../.vpython3', + '../../testing/test_env.py', + '../../third_party/gtest-parallel/gtest-parallel', + '../../third_party/gtest-parallel/gtest_parallel.py', + '../../tools_webrtc/gtest-parallel-wrapper.py', + 'some_dependency', + 'unittests.exe', + ]) + self.assertEqual(command, [ + 'vpython3', + '../../testing/test_env.py', + '../../tools_webrtc/gtest-parallel-wrapper.py', + '--output_dir=${ISOLATED_OUTDIR}/test_logs', + '--gtest_color=no', + '--retry_failed=3', + r'.\unittests.exe', + '--asan=0', + '--lsan=0', + '--msan=0', + '--tsan=0', + ]) + + def test_gen_console_test_launcher(self): + test_files = { + '/tmp/swarming_targets': + 'foo_unittests\n', + '/fake_src/testing/buildbot/gn_isolate_map.pyl': + ("{'foo_unittests': {" + " 'label': '//foo:foo_unittests'," + " 'type': 'console_test_launcher'," + "}}\n"), + '/fake_src/out/Default/foo_unittests.runtime_deps': + ("foo_unittests\n"), + } + mbw = self.check([ + 'gen', '-c', 'debug_goma', '//out/Default', + '--swarming-targets-file', '/tmp/swarming_targets', + '--isolate-map-file', + '/fake_src/testing/buildbot/gn_isolate_map.pyl' + ], + files=test_files, + ret=0) + + isolate_file = mbw.files['/fake_src/out/Default/foo_unittests.isolate'] + isolate_file_contents = ast.literal_eval(isolate_file) + files = isolate_file_contents['variables']['files'] + command = isolate_file_contents['variables']['command'] + + self.assertEqual(files, [ + '../../.vpython3', + '../../testing/test_env.py', + '../../third_party/gtest-parallel/gtest-parallel', + '../../third_party/gtest-parallel/gtest_parallel.py', + '../../tools_webrtc/gtest-parallel-wrapper.py', + 'foo_unittests', + ]) + self.assertEqual(command, [ + 'vpython3', + '../../testing/test_env.py', + '../../tools_webrtc/gtest-parallel-wrapper.py', + '--output_dir=${ISOLATED_OUTDIR}/test_logs', + '--gtest_color=no', + '--retry_failed=3', + './foo_unittests', + '--asan=0', + '--lsan=0', + '--msan=0', + '--tsan=0', + ]) + + def test_isolate(self): + files = { + '/fake_src/out/Default/toolchain.ninja': + "", + '/fake_src/testing/buildbot/gn_isolate_map.pyl': + ("{'foo_unittests': {" + " 'label': '//foo:foo_unittests'," + " 'type': 'non_parallel_console_test_launcher'," + "}}\n"), + '/fake_src/out/Default/foo_unittests.runtime_deps': + ("foo_unittests\n"), + } + self.check( + ['isolate', '-c', 'debug_goma', '//out/Default', 'foo_unittests'], + files=files, + ret=0) + + # test running isolate on an existing build_dir + files['/fake_src/out/Default/args.gn'] = 'is_debug = true\n' + self.check(['isolate', '//out/Default', 'foo_unittests'], + files=files, + ret=0) + files['/fake_src/out/Default/mb_type'] = 'gn\n' + self.check(['isolate', '//out/Default', 'foo_unittests'], + files=files, + ret=0) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tools_webrtc/presubmit_checks_lib/build_helpers.py b/tools_webrtc/presubmit_checks_lib/build_helpers.py index d64c2f457b..42c919b758 100644 --- a/tools_webrtc/presubmit_checks_lib/build_helpers.py +++ b/tools_webrtc/presubmit_checks_lib/build_helpers.py @@ -7,8 +7,8 @@ # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. -"""This script helps to invoke gn and ninja -which lie in depot_tools repository.""" +"""This script helps to invoke gn and ninja which lie in depot_tools +repository.""" import json import os @@ -19,30 +19,38 @@ import tempfile -def FindSrcDirPath(): - """Returns the abs path to the src/ dir of the project.""" - src_dir = os.path.dirname(os.path.abspath(__file__)) - while os.path.basename(src_dir) != 'src': - src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) - return src_dir +def find_root_path(): + """Returns the absolute path to the highest level repo root. + If this repo is checked out as a submodule of the chromium/src + superproject, this returns the superproect root. Otherwise, it returns the + webrtc/src repo root. + """ + root_dir = os.path.dirname(os.path.abspath(__file__)) + while os.path.basename(root_dir) not in ('src', 'chromium'): + par_dir = os.path.normpath(os.path.join(root_dir, os.pardir)) + if par_dir == root_dir: + raise RuntimeError('Could not find the repo root.') + root_dir = par_dir + return root_dir -SRC_DIR = FindSrcDirPath() -sys.path.append(os.path.join(SRC_DIR, 'build')) + +ROOT_DIR = find_root_path() +sys.path.append(os.path.join(ROOT_DIR, 'build')) import find_depot_tools -def RunGnCommand(args, root_dir=None): - """Runs `gn` with provided args and return error if any.""" - try: - command = [ - sys.executable, - os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py') - ] + args - subprocess.check_output(command, cwd=root_dir) - except subprocess.CalledProcessError as err: - return err.output - return None +def run_gn_command(args, root_dir=None): + """Runs `gn` with provided args and return error if any.""" + try: + command = [ + sys.executable, + os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py') + ] + args + subprocess.check_output(command, cwd=root_dir) + except subprocess.CalledProcessError as err: + return err.output + return None # GN_ERROR_RE matches the summary of an error output by `gn check`. @@ -51,50 +59,50 @@ def RunGnCommand(args, root_dir=None): GN_ERROR_RE = re.compile(r'^ERROR .+(?:\n.*[^_\n].*$)+', re.MULTILINE) -def RunGnCheck(root_dir=None): - """Runs `gn gen --check` with default args to detect mismatches between +def run_gn_check(root_dir=None): + """Runs `gn gen --check` with default args to detect mismatches between #includes and dependencies in the BUILD.gn files, as well as general build errors. Returns a list of error summary strings. """ - out_dir = tempfile.mkdtemp('gn') - try: - error = RunGnCommand(['gen', '--check', out_dir], root_dir) - finally: - shutil.rmtree(out_dir, ignore_errors=True) - return GN_ERROR_RE.findall(error.decode('utf-8')) if error else [] + out_dir = tempfile.mkdtemp('gn') + try: + error = run_gn_command(['gen', '--check', out_dir], root_dir) + finally: + shutil.rmtree(out_dir, ignore_errors=True) + return GN_ERROR_RE.findall(error.decode('utf-8')) if error else [] -def RunNinjaCommand(args, root_dir=None): - """Runs ninja quietly. Any failure (e.g. clang not found) is +def run_ninja_command(args, root_dir=None): + """Runs ninja quietly. Any failure (e.g. clang not found) is silently discarded, since this is unlikely an error in submitted CL.""" - command = [os.path.join(SRC_DIR, 'third_party', 'ninja', 'ninja')] + args - p = subprocess.Popen(command, - cwd=root_dir, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - out, _ = p.communicate() - return out + command = [os.path.join(ROOT_DIR, 'third_party', 'ninja', 'ninja')] + args + proc = subprocess.Popen(command, + cwd=root_dir, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + out, _ = proc.communicate() + return out -def GetClangTidyPath(): - """POC/WIP! Use the one we have, even it doesn't match clang's version.""" - tidy = ('third_party/android_toolchain/toolchains/' - 'llvm/prebuilt/linux-x86_64/bin/clang-tidy') - return os.path.join(SRC_DIR, tidy) +def get_clang_tidy_path(): + """POC/WIP! Use the one we have, even it doesn't match clang's version.""" + tidy = ('third_party/android_toolchain/toolchains/' + 'llvm/prebuilt/linux-x86_64/bin/clang-tidy') + return os.path.join(ROOT_DIR, tidy) -def GetCompilationDb(root_dir=None): - """Run ninja compdb tool to get proper flags, defines and include paths.""" - # The compdb tool expect a rule. - commands = json.loads(RunNinjaCommand(['-t', 'compdb', 'cxx'], root_dir)) - # Turns 'file' field into a key. - return {v['file']: v for v in commands} +def get_compilation_db(root_dir=None): + """Run ninja compdb tool to get proper flags, defines and include paths.""" + # The compdb tool expect a rule. + commands = json.loads(run_ninja_command(['-t', 'compdb', 'cxx'], root_dir)) + # Turns 'file' field into a key. + return {v['file']: v for v in commands} -def GetCompilationCommand(filepath, gn_args, work_dir): - """Get the whole command used to compile one cc file. +def get_compilation_command(filepath, gn_args, work_dir): + """Get the whole command used to compile one cc file. Typically, clang++ with flags, defines and include paths. Args: @@ -105,30 +113,30 @@ def GetCompilationCommand(filepath, gn_args, work_dir): Returns: Command as a list, ready to be consumed by subprocess.Popen. """ - gn_errors = RunGnCommand(['gen'] + gn_args + [work_dir]) - if gn_errors: - raise RuntimeError('FYI, cannot complete check due to gn error:\n%s\n' - 'Please open a bug.' % gn_errors) - - # Needed for single file compilation. - commands = GetCompilationDb(work_dir) - - # Path as referenced by ninja. - rel_path = os.path.relpath(os.path.abspath(filepath), work_dir) - - # Gather defines, include path and flags (such as -std=c++11). - try: - compilation_entry = commands[rel_path] - except KeyError as not_found: - raise ValueError('%s: Not found in compilation database.\n' - 'Please check the path.' % filepath) from not_found - command = compilation_entry['command'].split() - - # Remove troublesome flags. May trigger an error otherwise. - if '-MMD' in command: - command.remove('-MMD') - if '-MF' in command: - index = command.index('-MF') - del command[index:index + 2] # Remove filename as well. - - return command + gn_errors = run_gn_command(['gen'] + gn_args + [work_dir]) + if gn_errors: + raise RuntimeError('FYI, cannot complete check due to gn error:\n%s\n' + 'Please open a bug.' % gn_errors) + + # Needed for single file compilation. + commands = get_compilation_db(work_dir) + + # Path as referenced by ninja. + rel_path = os.path.relpath(os.path.abspath(filepath), work_dir) + + # Gather defines, include path and flags (such as -std=c++11). + try: + compilation_entry = commands[rel_path] + except KeyError as not_found: + raise ValueError('%s: Not found in compilation database.\n' + 'Please check the path.' % filepath) from not_found + command = compilation_entry['command'].split() + + # Remove troublesome flags. May trigger an error otherwise. + if '-MMD' in command: + command.remove('-MMD') + if '-MF' in command: + index = command.index('-MF') + del command[index:index + 2] # Remove filename as well. + + return command diff --git a/tools_webrtc/presubmit_checks_lib/build_helpers_test.py b/tools_webrtc/presubmit_checks_lib/build_helpers_test.py index 42b94d6c29..431f72cc0f 100755 --- a/tools_webrtc/presubmit_checks_lib/build_helpers_test.py +++ b/tools_webrtc/presubmit_checks_lib/build_helpers_test.py @@ -20,13 +20,13 @@ class GnCheckTest(unittest.TestCase): - def testCircularDependencyError(self): - test_dir = os.path.join(TESTDATA_DIR, 'circular_dependency') - expected_error = re.compile('ERROR Dependency cycle') - gn_output = build_helpers.RunGnCheck(test_dir) - self.assertEqual(1, len(gn_output)) - self.assertRegex(gn_output[0], expected_error) + def test_circular_dependency_error(self): + test_dir = os.path.join(TESTDATA_DIR, 'circular_dependency') + expected_error = re.compile('ERROR Dependency cycle') + gn_output = build_helpers.run_gn_check(test_dir) + self.assertEqual(1, len(gn_output)) + self.assertRegex(gn_output[0], expected_error) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tools_webrtc/remove_extra_namespace.py b/tools_webrtc/remove_extra_namespace.py new file mode 100755 index 0000000000..21ac2d1aa2 --- /dev/null +++ b/tools_webrtc/remove_extra_namespace.py @@ -0,0 +1,93 @@ +#!/usr/bin/env vpython3 + +# Copyright (c) 2023 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. +"""Remove extra namespace qualifications + +Looks for names that don't need to be qualified by namespace, and deletes +the qualifier. + +Depends on namespace names being properly formatted +""" +import os +import glob +import sys +import re +import argparse + + +def remove_extra_namespace_from_file(namespace, filename): + print('Processing namespace', namespace, 'file', filename) + with open(filename) as file: + newfile = open(filename + '.NEW', 'w') + namespaces = [] + changes = 0 + for line in file: + match = re.match(r'namespace (\S+) {', line) + if match is not None: + namespaces.insert(0, match.group(1)) + newfile.write(line) + continue + match = re.match(r'}\s+// namespace (\S+)$', line) + if match is not None: + if match.group(1) != namespaces[0]: + print('Namespace mismatch') + raise RuntimeError('Namespace mismatch') + del namespaces[0] + newfile.write(line) + continue + # Remove namespace usage. Only replacing when target + # namespace is the innermost namespace. + if len(namespaces) > 0 and namespaces[0] == namespace: + # Note that in namespace foo, we match neither ::foo::name + # nor morefoo::name + # Neither do we match foo:: when it is not followed by + # an identifier character. + usage_re = r'(?<=[^a-z:]){}::(?=[a-zA-Z])'.format( + namespaces[0]) + if re.search(usage_re, line): + line = re.sub(usage_re, '', line) + changes += 1 + newfile.write(line) + if changes > 0: + print('Made', changes, 'changes to', filename) + os.remove(filename) + os.rename(filename + '.NEW', filename) + else: + os.remove(filename + '.NEW') + + +def remove_extra_namespace_from_files(namespace, files): + for file in files: + if os.path.isfile(file): + if re.search(r'\.(h|cc)$', file): + remove_extra_namespace_from_file(namespace, file) + elif os.path.isdir(file): + if file in ('third_party', 'out'): + continue + subfiles = glob.glob(file + '/*') + remove_extra_namespace_from_files(namespace, subfiles) + else: + print(file, 'is not a file or directory, ignoring') + + +def main(): + parser = argparse.ArgumentParser( + prog='remove_extra_namespace.py', + description=__doc__.strip().splitlines()[0], + epilog=''.join(__doc__.splitlines(True)[1:]), + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument('--namespace') + parser.add_argument('files', nargs=argparse.REMAINDER) + args = parser.parse_args() + return remove_extra_namespace_from_files(args.namespace, args.files) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/tools_webrtc/sanitizers/lsan_suppressions_webrtc.cc b/tools_webrtc/sanitizers/lsan_suppressions_webrtc.cc index 064b2804ab..f92adba241 100644 --- a/tools_webrtc/sanitizers/lsan_suppressions_webrtc.cc +++ b/tools_webrtc/sanitizers/lsan_suppressions_webrtc.cc @@ -51,16 +51,16 @@ char kLSanDefaultSuppressions[] = // rtc_unittest // https://code.google.com/p/webrtc/issues/detail?id=3827 for details. - "leak:rtc::unstarted_task_test_DoNotDeleteTask2_Test::TestBody\n" - "leak:rtc::HttpServer::HandleConnection\n" - "leak:rtc::HttpServer::Connection::onHttpHeaderComplete\n" - "leak:rtc::HttpResponseData::set_success\n" - "leak:rtc::HttpData::changeHeader\n" + "leak:webrtc::unstarted_task_test_DoNotDeleteTask2_Test::TestBody\n" + "leak:webrtc::HttpServer::HandleConnection\n" + "leak:webrtc::HttpServer::Connection::onHttpHeaderComplete\n" + "leak:webrtc::HttpResponseData::set_success\n" + "leak:webrtc::HttpData::changeHeader\n" // https://code.google.com/p/webrtc/issues/detail?id=4149 for details. "leak:StartDNSLookup\n" // rtc_media_unittests - "leak:cricket::FakeNetworkInterface::SetOption\n" + "leak:webrtc::FakeNetworkInterface::SetOption\n" "leak:CodecTest_TestCodecOperators_Test::TestBody\n" "leak:VideoEngineTest*::ConstrainNewCodecBody\n" "leak:VideoMediaChannelTest*::AddRemoveRecvStreams\n" @@ -73,9 +73,7 @@ char kLSanDefaultSuppressions[] = // peerconnection_unittests // https://code.google.com/p/webrtc/issues/detail?id=2528 - "leak:cricket::FakeVideoMediaChannel::~FakeVideoMediaChannel\n" - "leak:cricket::MediaSessionDescriptionFactory::CreateAnswer\n" - "leak:cricket::MediaSessionDescriptionFactory::CreateOffer\n" + "leak:webrtc::FakeVideoMediaChannel::~FakeVideoMediaChannel\n" "leak:DtmfSenderTest_InsertEmptyTonesToCancelPreviousTask_Test::TestBody\n" "leak:sigslot::_signal_base2*::~_signal_base2\n" "leak:testing::internal::CmpHelperEQ\n" @@ -83,8 +81,6 @@ char kLSanDefaultSuppressions[] = "leak:webrtc::AudioDeviceLinuxALSA::InitSpeaker\n" "leak:webrtc::CreateIceCandidate\n" "leak:webrtc::WebRtcIdentityRequestObserver::OnSuccess\n" - "leak:webrtc::WebRtcSessionDescriptionFactory::InternalCreateAnswer\n" - "leak:webrtc::WebRtcSessionDescriptionFactory::InternalCreateOffer\n" "leak:PeerConnectionInterfaceTest_SsrcInOfferAnswer_Test::TestBody\n" "leak:PeerConnectionInterfaceTest_CloseAndTestMethods_Test::TestBody\n" "leak:WebRtcSdpTest::TestDeserializeRtcpFb\n" diff --git a/tools_webrtc/version_updater/update_version.py b/tools_webrtc/version_updater/update_version.py index 2a693cd630..786da2103f 100644 --- a/tools_webrtc/version_updater/update_version.py +++ b/tools_webrtc/version_updater/update_version.py @@ -19,150 +19,147 @@ import sys -def FindSrcDirPath(): - """Returns the abs path to the src/ dir of the project.""" - src_dir = os.path.dirname(os.path.abspath(__file__)) - while os.path.basename(src_dir) != 'src': - src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) - return src_dir - - UPDATE_BRANCH_NAME = 'webrtc_version_update' -CHECKOUT_SRC_DIR = FindSrcDirPath() +SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) +CHECKOUT_SRC_DIR = os.path.realpath( + os.path.join(SCRIPT_DIR, os.pardir, os.pardir)) NOTIFY_EMAIL = 'webrtc-trooper@webrtc.org' -def _RemovePreviousUpdateBranch(): - active_branch, branches = _GetBranches() - if active_branch == UPDATE_BRANCH_NAME: - active_branch = 'main' - if UPDATE_BRANCH_NAME in branches: - logging.info('Removing previous update branch (%s)', UPDATE_BRANCH_NAME) - subprocess.check_call(['git', 'checkout', active_branch]) - subprocess.check_call(['git', 'branch', '-D', UPDATE_BRANCH_NAME]) - logging.info('No branch to remove') +def _remove_previous_update_branch(): + active_branch, branches = _get_branches() + if active_branch == UPDATE_BRANCH_NAME: + active_branch = 'main' + if UPDATE_BRANCH_NAME in branches: + logging.info('Removing previous update branch (%s)', + UPDATE_BRANCH_NAME) + subprocess.check_call(['git', 'checkout', active_branch]) + subprocess.check_call(['git', 'branch', '-D', UPDATE_BRANCH_NAME]) + logging.info('No branch to remove') -def _GetLastAuthor(): - """Returns a string with the author of the last commit.""" - author = subprocess.check_output( - ['git', 'log', '-1', '--pretty=format:"%an"'], - universal_newlines=True).splitlines() - return author +def _get_last_author(): + """Returns a string with the author of the last commit.""" + author = subprocess.check_output( + ['git', 'log', '-1', '--pretty=format:"%an"'], + universal_newlines=True).splitlines() + return author -def _GetBranches(): - """Returns a tuple (active, branches). +def _get_branches(): + """Returns a tuple (active, branches). 'active' is a string with name of the currently active branch, while 'branches' is the list of all branches. """ - lines = subprocess.check_output(['git', 'branch'], - universal_newlines=True).splitlines() - branches = [] - active = '' - for line in lines: - if '*' in line: - # The assumption is that the first char will always be the '*'. - active = line[1:].strip() - branches.append(active) - else: - branch = line.strip() - if branch: - branches.append(branch) - return active, branches - - -def _CreateUpdateBranch(): - logging.info('Creating update branch: %s', UPDATE_BRANCH_NAME) - subprocess.check_call(['git', 'checkout', '-b', UPDATE_BRANCH_NAME]) - - -def _UpdateWebRTCVersion(filename): - with open(filename, 'rb') as f: - content = f.read().decode('utf-8') - d = datetime.datetime.utcnow() - # pylint: disable=line-too-long - new_content = re.sub( - r'WebRTC source stamp [0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}', - r'WebRTC source stamp %02d-%02d-%02dT%02d:%02d:%02d' % - (d.year, d.month, d.day, d.hour, d.minute, d.second), - content, - flags=re.MULTILINE) - # pylint: enable=line-too-long - with open(filename, 'wb') as f: - f.write(new_content.encode('utf-8')) - - -def _IsTreeClean(): - stdout = subprocess.check_output(['git', 'status', '--porcelain'], - universal_newlines=True) - if len(stdout) == 0: - return True - return False - - -def _LocalCommit(): - logging.info('Committing changes locally.') - d = datetime.datetime.utcnow() - - commit_msg = ('Update WebRTC code version (%02d-%02d-%02dT%02d:%02d:%02d).' - '\n\nBug: None') - commit_msg = commit_msg % (d.year, d.month, d.day, d.hour, d.minute, d.second) - subprocess.check_call(['git', 'add', '--update', '.']) - subprocess.check_call(['git', 'commit', '-m', commit_msg]) - - -def _UploadCL(commit_queue_mode): - """Upload the committed changes as a changelist to Gerrit. + lines = subprocess.check_output(['git', 'branch'], + universal_newlines=True).splitlines() + branches = [] + active = '' + for line in lines: + if '*' in line: + # The assumption is that the first char will always be the '*'. + active = line[1:].strip() + branches.append(active) + else: + branch = line.strip() + if branch: + branches.append(branch) + return active, branches + + +def _create_update_branch(): + logging.info('Creating update branch: %s', UPDATE_BRANCH_NAME) + subprocess.check_call(['git', 'checkout', '-b', UPDATE_BRANCH_NAME]) + + +def _update_webrtc_version(filename): + with open(filename, 'rb') as file: + content = file.read().decode('utf-8') + date = datetime.datetime.utcnow() + # pylint: disable=line-too-long + new_content = re.sub( + r'WebRTC source stamp [0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}', + r'WebRTC source stamp %02d-%02d-%02dT%02d:%02d:%02d' % + (date.year, date.month, date.day, date.hour, date.minute, date.second), + content, + flags=re.MULTILINE) + # pylint: enable=line-too-long + with open(filename, 'wb') as file: + file.write(new_content.encode('utf-8')) + + +def _is_tree_clean(): + stdout = subprocess.check_output(['git', 'status', '--porcelain'], + universal_newlines=True) + if len(stdout) == 0: + return True + return False + + +def _local_commit(): + logging.info('Committing changes locally.') + date = datetime.datetime.utcnow() + + msg = ('Update WebRTC code version (%02d-%02d-%02dT%02d:%02d:%02d).' + '\n\nBug: None') + msg = msg % (date.year, date.month, date.day, date.hour, date.minute, + date.second) + subprocess.check_call(['git', 'add', '--update', '.']) + subprocess.check_call(['git', 'commit', '-m', msg]) + + +def _upload_cl(commit_queue_mode): + """Upload the committed changes as a changelist to Gerrit. commit_queue_mode: - 2: Submit to commit queue. - 1: Run trybots but do not submit to CQ. - 0: Skip CQ, upload only. """ - cmd = [ - 'git', 'cl', 'upload', '--force', '--bypass-hooks', '--bypass-watchlist' - ] - if commit_queue_mode >= 2: - logging.info('Sending the CL to the CQ...') - cmd.extend(['-o', 'label=Bot-Commit+1']) - cmd.extend(['-o', 'label=Commit-Queue+2']) - cmd.extend(['--send-mail', '--cc', NOTIFY_EMAIL]) - elif commit_queue_mode >= 1: - logging.info('Starting CQ dry run...') - cmd.extend(['-o', 'label=Commit-Queue+1']) - subprocess.check_call(cmd) + cmd = [ + 'git', 'cl', 'upload', '--force', '--bypass-hooks', + '--bypass-watchlist' + ] + if commit_queue_mode >= 2: + logging.info('Sending the CL to the CQ...') + cmd.extend(['-o', 'label=Bot-Commit+1']) + cmd.extend(['-o', 'label=Commit-Queue+2']) + cmd.extend(['--send-mail', '--cc', NOTIFY_EMAIL]) + elif commit_queue_mode >= 1: + logging.info('Starting CQ dry run...') + cmd.extend(['-o', 'label=Commit-Queue+1']) + subprocess.check_call(cmd) def main(): - logging.basicConfig(level=logging.INFO) - p = argparse.ArgumentParser() - p.add_argument('--clean', - action='store_true', - default=False, - help='Removes any previous local update branch.') - opts = p.parse_args() - - if opts.clean: - _RemovePreviousUpdateBranch() - - if _GetLastAuthor() == 'webrtc-version-updater': - logging.info('Last commit is a version change, skipping CL.') + logging.basicConfig(level=logging.INFO) + parser = argparse.ArgumentParser() + parser.add_argument('--clean', + action='store_true', + default=False, + help='Removes any previous local update branch.') + opts = parser.parse_args() + + if opts.clean: + _remove_previous_update_branch() + + if _get_last_author() == 'webrtc-version-updater': + logging.info('Last commit is a version change, skipping CL.') + return 0 + + version_filename = os.path.join(CHECKOUT_SRC_DIR, 'call', 'version.cc') + _create_update_branch() + _update_webrtc_version(version_filename) + if _is_tree_clean(): + logging.info('No WebRTC version change detected, skipping CL.') + else: + _local_commit() + logging.info('Uploading CL...') + _upload_cl(2) return 0 - version_filename = os.path.join(CHECKOUT_SRC_DIR, 'call', 'version.cc') - _CreateUpdateBranch() - _UpdateWebRTCVersion(version_filename) - if _IsTreeClean(): - logging.info('No WebRTC version change detected, skipping CL.') - else: - _LocalCommit() - logging.info('Uploading CL...') - _UploadCL(2) - return 0 - if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/video/BUILD.gn b/video/BUILD.gn index d696445db2..3e7af5a1e9 100644 --- a/video/BUILD.gn +++ b/video/BUILD.gn @@ -30,7 +30,6 @@ rtc_library("video_stream_encoder_interface") { "../api/video_codecs:video_codecs_api", "../video/config:encoder_config", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("video") { @@ -65,8 +64,6 @@ rtc_library("video") { "video_quality_observer2.h", "video_receive_stream2.cc", "video_receive_stream2.h", - "video_send_stream.cc", - "video_send_stream.h", "video_send_stream_impl.cc", "video_send_stream_impl.h", "video_stream_decoder2.cc", @@ -74,7 +71,9 @@ rtc_library("video") { ] deps = [ + ":decode_synchronizer", ":frame_cadence_adapter", + ":frame_decode_scheduler", ":frame_dumping_decoder", ":task_queue_frame_decode_scheduler", ":unique_timestamp_counter", @@ -82,28 +81,43 @@ rtc_library("video") { ":video_stream_encoder_impl", ":video_stream_encoder_interface", "../api:array_view", + "../api:bitrate_allocation", "../api:fec_controller_api", "../api:field_trials_view", "../api:frame_transformer_interface", + "../api:make_ref_counted", + "../api:rtp_headers", + "../api:rtp_packet_info", "../api:rtp_parameters", + "../api:rtp_sender_interface", "../api:scoped_refptr", "../api:sequence_checker", "../api:transport_api", + "../api/adaptation:resource_adaptation_api", "../api/crypto:frame_decryptor_interface", "../api/crypto:options", + "../api/environment", + "../api/metronome", "../api/task_queue", "../api/task_queue:pending_task_safety_flag", - "../api/transport:field_trial_based_config", + "../api/transport/rtp:corruption_detection_message", + "../api/transport/rtp:dependency_descriptor", + "../api/transport/rtp:rtp_source", "../api/units:data_rate", "../api/units:frequency", "../api/units:time_delta", "../api/units:timestamp", + "../api/video:encoded_frame", "../api/video:encoded_image", "../api/video:recordable_encoded_frame", + "../api/video:render_resolution", + "../api/video:video_adaptation", "../api/video:video_bitrate_allocation", "../api/video:video_bitrate_allocator", "../api/video:video_codec_constants", "../api/video:video_frame", + "../api/video:video_frame_type", + "../api/video:video_layers_allocation", "../api/video:video_rtp_headers", "../api/video:video_stream_encoder", "../api/video_codecs:video_codecs_api", @@ -112,9 +126,15 @@ rtc_library("video") { "../call:rtp_interfaces", "../call:rtp_receiver", "../call:rtp_sender", - "../call:video_stream_api", + "../call:video_receive_stream_api", + "../call:video_send_stream_api", "../common_video", + "../common_video:corruption_detection_converters", + "../common_video:corruption_score_calculator", + "../common_video:frame_counts", + "../common_video:frame_instrumentation_data", "../media:media_constants", + "../media:rtc_sdp_video_format_utils", "../modules:module_api", "../modules:module_api_public", "../modules/pacing", @@ -123,6 +143,8 @@ rtc_library("video") { "../modules/rtp_rtcp:rtp_rtcp_format", "../modules/rtp_rtcp:rtp_video_header", "../modules/video_coding", + "../modules/video_coding:h264_sprop_parameter_sets", + "../modules/video_coding:h26x_packet_buffer", "../modules/video_coding:nack_requester", "../modules/video_coding:packet_buffer", "../modules/video_coding:video_codec_interface", @@ -130,6 +152,7 @@ rtc_library("video") { "../modules/video_coding:webrtc_vp9_helpers", "../modules/video_coding/timing:timing_module", "../rtc_base:checks", + "../rtc_base:copy_on_write_buffer", "../rtc_base:event_tracer", "../rtc_base:histogram_percentile_counter", "../rtc_base:logging", @@ -141,7 +164,6 @@ rtc_library("video") { "../rtc_base:rate_tracker", "../rtc_base:rtc_event", "../rtc_base:rtc_numerics", - "../rtc_base:rtc_task_queue", "../rtc_base:safe_conversions", "../rtc_base:sample_counter", "../rtc_base:stringutils", @@ -154,20 +176,19 @@ rtc_library("video") { "../rtc_base/experiments:normalize_simulcast_size_experiment", "../rtc_base/experiments:rate_control_settings", "../rtc_base/synchronization:mutex", + "../rtc_base/system:file_wrapper", "../rtc_base/system:no_unique_address", "../rtc_base/task_utils:repeating_task", "../system_wrappers", "../system_wrappers:field_trial", "../system_wrappers:metrics", "../video/config:encoder_config", + "../video/corruption_detection:frame_instrumentation_evaluation", "adaptation:video_adaptation", "render:incoming_video_stream", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] if (!build_with_mozilla) { @@ -215,9 +236,8 @@ rtc_library("frame_dumping_encoder") { "../rtc_base:stringutils", "../rtc_base:timeutils", "../rtc_base/system:file_wrapper", + "//third_party/abseil-cpp/absl/algorithm:container", ] - - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } rtc_library("frame_cadence_adapter") { @@ -230,6 +250,7 @@ rtc_library("frame_cadence_adapter") { deps = [ "../api:field_trials_view", "../api:sequence_checker", + "../api/metronome", "../api/task_queue", "../api/task_queue:pending_task_safety_flag", "../api/units:time_delta", @@ -244,14 +265,14 @@ rtc_library("frame_cadence_adapter") { "../rtc_base:timeutils", "../rtc_base/synchronization:mutex", "../rtc_base/system:no_unique_address", + "../rtc_base/system:unused", "../rtc_base/task_utils:repeating_task", "../system_wrappers", "../system_wrappers:field_trial", "../system_wrappers:metrics", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/cleanup", ] } @@ -285,14 +306,10 @@ rtc_library("video_stream_buffer_controller") { "../rtc_base:checks", "../rtc_base:logging", "../rtc_base:macromagic", - "../rtc_base/experiments:rtt_mult_experiment", "../system_wrappers", "../system_wrappers:field_trial", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/functional:bind_front", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -301,10 +318,7 @@ rtc_source_set("frame_decode_scheduler") { deps = [ ":frame_decode_timing", "../api/units:timestamp", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -323,7 +337,6 @@ rtc_library("task_queue_frame_decode_scheduler") { "../rtc_base:checks", "../system_wrappers", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("frame_decode_timing") { @@ -339,7 +352,6 @@ rtc_library("frame_decode_timing") { "../rtc_base:logging", "../system_wrappers", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("video_receive_stream_timeout_tracker") { @@ -375,7 +387,6 @@ rtc_library("decode_synchronizer") { "../rtc_base:logging", "../rtc_base:macromagic", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("video_stream_encoder_impl") { @@ -391,6 +402,12 @@ rtc_library("video_stream_encoder_impl") { "encoder_overshoot_detector.h", "frame_encode_metadata_writer.cc", "frame_encode_metadata_writer.h", + "quality_convergence_controller.cc", + "quality_convergence_controller.h", + "quality_convergence_monitor.cc", + "quality_convergence_monitor.h", + "rate_utilization_tracker.cc", + "rate_utilization_tracker.h", "video_source_sink_controller.cc", "video_source_sink_controller.h", "video_stream_encoder.cc", @@ -401,14 +418,22 @@ rtc_library("video_stream_encoder_impl") { ":frame_cadence_adapter", ":frame_dumping_encoder", ":video_stream_encoder_interface", + "../api:fec_controller_api", "../api:field_trials_view", + "../api:make_ref_counted", + "../api:rtc_error", "../api:rtp_parameters", "../api:rtp_sender_interface", + "../api:scoped_refptr", "../api:sequence_checker", "../api/adaptation:resource_adaptation_api", + "../api/environment", "../api/task_queue:pending_task_safety_flag", "../api/task_queue:task_queue", "../api/units:data_rate", + "../api/units:data_size", + "../api/units:time_delta", + "../api/units:timestamp", "../api/video:encoded_image", "../api/video:render_resolution", "../api/video:video_adaptation", @@ -417,16 +442,19 @@ rtc_library("video_stream_encoder_impl") { "../api/video:video_bitrate_allocator_factory", "../api/video:video_codec_constants", "../api/video:video_frame", + "../api/video:video_frame_type", "../api/video:video_layers_allocation", "../api/video:video_rtp_headers", "../api/video:video_stream_encoder", + "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../call/adaptation:resource_adaptation", "../common_video", + "../common_video:frame_instrumentation_data", "../media:media_channel", - "../media:rtc_media_base", "../modules:module_api_public", "../modules/video_coding", + "../modules/video_coding:codec_globals_headers", "../modules/video_coding:video_codec_interface", "../modules/video_coding:video_coding_utility", "../modules/video_coding:webrtc_vp9_helpers", @@ -443,14 +471,12 @@ rtc_library("video_stream_encoder_impl") { "../rtc_base:refcount", "../rtc_base:rtc_event", "../rtc_base:rtc_numerics", - "../rtc_base:rtc_task_queue", "../rtc_base:safe_conversions", "../rtc_base:stringutils", "../rtc_base:timeutils", "../rtc_base/experiments:balanced_degradation_settings", "../rtc_base/experiments:encoder_info_settings", "../rtc_base/experiments:field_trial_parser", - "../rtc_base/experiments:quality_rampup_experiment", "../rtc_base/experiments:quality_scaler_settings", "../rtc_base/experiments:quality_scaling_experiment", "../rtc_base/experiments:rate_control_settings", @@ -458,18 +484,14 @@ rtc_library("video_stream_encoder_impl") { "../rtc_base/system:no_unique_address", "../rtc_base/task_utils:repeating_task", "../system_wrappers", - "../system_wrappers:field_trial", "../system_wrappers:metrics", "adaptation:video_adaptation", "config:encoder_config", "config:streams_config", - ] - absl_deps = [ + "corruption_detection:frame_instrumentation_generator", "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/cleanup", "//third_party/abseil-cpp/absl/container:inlined_vector", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -507,73 +529,98 @@ if (rtc_include_tests) { ] deps = [ ":frame_dumping_decoder", + "..//test/network:simulated_network", + "../api:array_view", "../api:create_frame_generator", "../api:fec_controller_api", + "../api:field_trials_view", "../api:frame_generator_api", "../api:libjingle_peerconnection_api", + "../api:make_ref_counted", "../api:rtc_event_log_output_file", + "../api:rtp_parameters", + "../api:scoped_refptr", + "../api:simulated_network_api", "../api:test_dependency_factory", + "../api:transport_api", "../api:video_quality_test_fixture_api", + "../api/audio:audio_device", + "../api/audio:builtin_audio_processing_builder", + "../api/environment", "../api/numerics", + "../api/rtc_event_log", "../api/rtc_event_log:rtc_event_log_factory", "../api/task_queue", "../api/task_queue:default_task_queue_factory", "../api/test/metrics:global_metrics_logger_and_exporter", "../api/test/metrics:metric", + "../api/units:time_delta", "../api/video:builtin_video_bitrate_allocator_factory", + "../api/video:encoded_image", + "../api/video:video_bitrate_allocation", "../api/video:video_bitrate_allocator_factory", + "../api/video:video_codec_constants", "../api/video:video_frame", + "../api/video:video_frame_type", "../api/video:video_rtp_headers", "../api/video_codecs:video_codecs_api", + "../call:call_interfaces", "../call:fake_network", - "../call:simulated_network", + "../call:video_receive_stream_api", + "../call:video_send_stream_api", "../common_video", "../media:media_constants", "../media:rtc_audio_video", "../media:rtc_internal_video_codecs", - "../media:rtc_media_base", "../media:rtc_simulcast_encoder_adapter", - "../modules/audio_device:audio_device_api", "../modules/audio_device:audio_device_module_from_input_and_output", + "../modules/audio_device:test_audio_device_module", "../modules/audio_device:windows_core_audio_utility", "../modules/audio_mixer:audio_mixer_impl", "../modules/rtp_rtcp", "../modules/rtp_rtcp:rtp_rtcp_format", "../modules/video_coding", + "../modules/video_coding:codec_globals_headers", "../modules/video_coding:video_coding_utility", "../modules/video_coding:webrtc_h264", - "../modules/video_coding:webrtc_multiplex", "../modules/video_coding:webrtc_vp8", "../modules/video_coding:webrtc_vp9", + "../rtc_base:checks", + "../rtc_base:copy_on_write_buffer", + "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base:platform_thread", "../rtc_base:rtc_base_tests_utils", "../rtc_base:rtc_event", "../rtc_base:rtc_numerics", + "../rtc_base:safe_conversions", "../rtc_base:stringutils", "../rtc_base:task_queue_for_test", "../rtc_base:timeutils", "../rtc_base/synchronization:mutex", + "../rtc_base/system:file_wrapper", "../rtc_base/task_utils:repeating_task", "../system_wrappers", + "../test:direct_transport", "../test:fake_video_codecs", "../test:fileutils", + "../test:frame_generator_capturer", "../test:platform_video_capturer", "../test:rtp_test_utils", "../test:test_common", + "../test:test_flags", "../test:test_renderer", "../test:test_support", "../test:test_support_test_artifacts", + "../test:video_frame_writer", "../test:video_test_common", "../test:video_test_constants", - "../test:video_test_support", + "config:encoder_config", "config:streams_config", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (is_mac || is_ios) { @@ -593,6 +640,7 @@ if (rtc_include_tests) { "../api:simulated_network_api", "../api:test_dependency_factory", "../api:video_quality_test_fixture_api", + "../api/units:data_rate", "../api/video_codecs:video_codecs_api", "../modules/pacing", "../modules/video_coding:webrtc_vp9", @@ -604,11 +652,8 @@ if (rtc_include_tests) { "../test:test_support", "../video/config:encoder_config", "//testing/gtest", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -617,6 +662,7 @@ if (rtc_include_tests) { sources = [ "pc_full_stack_tests.cc" ] deps = [ + "..//test/network:simulated_network", "../api:create_network_emulation_manager", "../api:create_peer_connection_quality_test_frame_generator", "../api:create_peerconnection_quality_test_fixture", @@ -631,7 +677,7 @@ if (rtc_include_tests) { "../api/test/pclf:media_quality_test_params", "../api/test/pclf:peer_configurer", "../api/video_codecs:video_codecs_api", - "../call:simulated_network", + "../media:media_constants", "../modules/video_coding:webrtc_vp9", "../system_wrappers:field_trial", "../test:field_trial", @@ -653,6 +699,7 @@ if (rtc_include_tests) { "../api:simulated_network_api", "../api:video_quality_test_fixture_api", "../api/transport:bitrate_settings", + "../api/units:data_rate", "../api/video_codecs:video_codecs_api", "../rtc_base:checks", "../rtc_base:logging", @@ -661,14 +708,12 @@ if (rtc_include_tests) { "../test:run_test", "../test:run_test_interface", "../test:test_common", + "../test:test_flags", "../test:test_renderer", "../test:test_support", "//testing/gtest", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -697,6 +742,7 @@ if (rtc_include_tests) { "../api:simulated_network_api", "../api:video_quality_test_fixture_api", "../api/transport:bitrate_settings", + "../api/units:data_rate", "../api/video_codecs:video_codecs_api", "../rtc_base:checks", "../rtc_base:logging", @@ -706,11 +752,11 @@ if (rtc_include_tests) { "../test:run_test", "../test:run_test_interface", "../test:test_common", + "../test:test_flags", "../test:test_renderer", "../test:test_support", "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -723,6 +769,7 @@ if (rtc_include_tests) { "../api:simulated_network_api", "../api:video_quality_test_fixture_api", "../api/transport:bitrate_settings", + "../api/units:data_rate", "../api/video_codecs:video_codecs_api", "../rtc_base:checks", "../rtc_base:logging", @@ -732,12 +779,12 @@ if (rtc_include_tests) { "../test:run_test", "../test:run_test_interface", "../test:test_common", + "../test:test_flags", "../test:test_renderer", "../test:test_support", "//testing/gtest", "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", - "//third_party/abseil-cpp/absl/types:optional", ] } } @@ -760,6 +807,7 @@ if (rtc_include_tests) { "end_to_end_tests/call_operation_tests.cc", "end_to_end_tests/codec_tests.cc", "end_to_end_tests/config_tests.cc", + "end_to_end_tests/corruption_detection_tests.cc", "end_to_end_tests/extended_reports_tests.cc", "end_to_end_tests/fec_tests.cc", "end_to_end_tests/frame_encryption_tests.cc", @@ -779,8 +827,11 @@ if (rtc_include_tests) { "frame_decode_timing_unittest.cc", "frame_encode_metadata_writer_unittest.cc", "picture_id_tests.cc", + "quality_convergence_controller_unittest.cc", + "quality_convergence_monitor_unittest.cc", "quality_limitation_reason_tracker_unittest.cc", "quality_scaling_tests.cc", + "rate_utilization_tracker_unittest.cc", "receive_statistics_proxy_unittest.cc", "report_block_stats_unittest.cc", "rtp_video_stream_receiver2_unittest.cc", @@ -811,50 +862,74 @@ if (rtc_include_tests) { ":video_stream_buffer_controller", ":video_stream_encoder_impl", ":video_stream_encoder_interface", + "../api:array_view", + "../api:bitrate_allocation", "../api:create_frame_generator", "../api:fake_frame_decryptor", "../api:fake_frame_encryptor", + "../api:fec_controller_api", "../api:field_trials_view", "../api:frame_generator_api", - "../api:libjingle_peerconnection_api", + "../api:frame_transformer_interface", + "../api:location", + "../api:make_ref_counted", "../api:mock_fec_controller_override", "../api:mock_frame_decryptor", + "../api:mock_frame_transformer", "../api:mock_video_codec_factory", "../api:mock_video_decoder", "../api:mock_video_encoder", + "../api:rtc_error_matchers", "../api:rtp_headers", + "../api:rtp_packet_info", "../api:rtp_parameters", "../api:scoped_refptr", "../api:sequence_checker", "../api:simulated_network_api", "../api:time_controller", - "../api:transport_api", + "../api:video_track_source_constraints", "../api/adaptation:resource_adaptation_api", "../api/crypto:options", + "../api/environment", + "../api/environment:environment_factory", "../api/metronome/test:fake_metronome", "../api/rtc_event_log", "../api/task_queue", "../api/task_queue:default_task_queue_factory", + "../api/task_queue:pending_task_safety_flag", "../api/test/metrics:global_metrics_logger_and_exporter", "../api/test/metrics:metric", "../api/test/video:function_video_factory", - "../api/transport:field_trial_based_config", + "../api/transport:bitrate_settings", + "../api/transport/rtp:corruption_detection_message", + "../api/transport/rtp:dependency_descriptor", + "../api/transport/rtp:rtp_source", "../api/units:data_rate", + "../api/units:data_size", "../api/units:frequency", "../api/units:time_delta", "../api/units:timestamp", "../api/video:builtin_video_bitrate_allocator_factory", + "../api/video:encoded_frame", "../api/video:encoded_image", "../api/video:recordable_encoded_frame", + "../api/video:render_resolution", + "../api/video:resolution", "../api/video:video_adaptation", "../api/video:video_bitrate_allocation", + "../api/video:video_bitrate_allocator", + "../api/video:video_bitrate_allocator_factory", + "../api/video:video_codec_constants", "../api/video:video_frame", "../api/video:video_frame_type", + "../api/video:video_layers_allocation", "../api/video:video_rtp_headers", + "../api/video:video_stream_encoder", "../api/video/test:video_frame_matchers", "../api/video_codecs:scalability_mode", "../api/video_codecs:video_codecs_api", "../api/video_codecs:vp8_temporal_layers_factory", + "../call:bitrate_allocator", "../call:call_interfaces", "../call:fake_network", "../call:mock_bitrate_allocator", @@ -862,35 +937,35 @@ if (rtc_include_tests) { "../call:rtp_interfaces", "../call:rtp_receiver", "../call:rtp_sender", - "../call:simulated_network", "../call:simulated_packet_receiver", - "../call:video_stream_api", + "../call:video_receive_stream_api", + "../call:video_send_stream_api", "../call/adaptation:resource_adaptation", "../call/adaptation:resource_adaptation_test_utilities", "../common_video", + "../common_video:frame_instrumentation_data", "../common_video/test:utilities", "../media:codec", "../media:media_constants", "../media:rtc_audio_video", "../media:rtc_internal_video_codecs", - "../media:rtc_media", - "../media:rtc_media_base", "../media:rtc_media_tests_utils", "../media:rtc_simulcast_encoder_adapter", + "../modules:module_api", "../modules:module_api_public", "../modules/pacing", "../modules/rtp_rtcp", - "../modules/rtp_rtcp:mock_rtp_rtcp", "../modules/rtp_rtcp:rtp_rtcp_format", - "../modules/utility:utility", + "../modules/rtp_rtcp:rtp_rtcp_format", + "../modules/rtp_rtcp:rtp_video_header", "../modules/video_coding", "../modules/video_coding:codec_globals_headers", "../modules/video_coding:encoded_frame", + "../modules/video_coding:nack_requester", "../modules/video_coding:packet_buffer", "../modules/video_coding:video_codec_interface", "../modules/video_coding:video_coding_utility", "../modules/video_coding:webrtc_h264", - "../modules/video_coding:webrtc_multiplex", "../modules/video_coding:webrtc_vp8", "../modules/video_coding:webrtc_vp9", "../modules/video_coding:webrtc_vp9_helpers", @@ -901,17 +976,17 @@ if (rtc_include_tests) { "../modules/video_coding/timing:timing_module", "../rtc_base:byte_buffer", "../rtc_base:checks", + "../rtc_base:copy_on_write_buffer", "../rtc_base:gunit_helpers", "../rtc_base:logging", "../rtc_base:macromagic", - "../rtc_base:platform_thread", + "../rtc_base:network_route", "../rtc_base:rate_limiter", "../rtc_base:rate_statistics", "../rtc_base:refcount", "../rtc_base:rtc_base_tests_utils", "../rtc_base:rtc_event", "../rtc_base:rtc_numerics", - "../rtc_base:rtc_task_queue", "../rtc_base:safe_conversions", "../rtc_base:stringutils", "../rtc_base:task_queue_for_test", @@ -921,19 +996,18 @@ if (rtc_include_tests) { "../rtc_base/containers:flat_map", "../rtc_base/experiments:alr_experiment", "../rtc_base/experiments:encoder_info_settings", + "../rtc_base/experiments:rate_control_settings", "../rtc_base/synchronization:mutex", "../system_wrappers", - "../system_wrappers:field_trial", "../system_wrappers:metrics", "../test:direct_transport", "../test:encoder_settings", + "../test:explicit_key_value_config", "../test:fake_encoded_frame", "../test:fake_video_codecs", "../test:field_trial", - "../test:fileutils", "../test:frame_generator_capturer", "../test:frame_utils", - "../test:mock_frame_transformer", "../test:mock_transport", "../test:null_transport", "../test:rtp_test_utils", @@ -943,20 +1017,20 @@ if (rtc_include_tests) { "../test:test_support", "../test:video_test_common", "../test:video_test_constants", + "../test:wait_until", + "../test/network:simulated_network", "../test/time_controller", "adaptation:video_adaptation", "config:encoder_config", "config:streams_config", "config:video_config_tests", - ] - absl_deps = [ + "corruption_detection:corruption_detection_tests", + "corruption_detection/evaluation:corruption_detection_eval_tests", "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/functional:bind_front", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", ] if (!build_with_mozilla) { deps += [ "../media:rtc_media_base" ] diff --git a/video/OWNERS b/video/OWNERS index 2206a59a18..3a278f3a85 100644 --- a/video/OWNERS +++ b/video/OWNERS @@ -3,4 +3,5 @@ ilnik@webrtc.org mflodman@webrtc.org philipel@webrtc.org sprang@webrtc.org +ssilkin@webrtc.org stefan@webrtc.org diff --git a/video/adaptation/BUILD.gn b/video/adaptation/BUILD.gn index d206909853..829d1f1305 100644 --- a/video/adaptation/BUILD.gn +++ b/video/adaptation/BUILD.gn @@ -22,8 +22,6 @@ rtc_library("video_adaptation") { "overuse_frame_detector.h", "pixel_limit_resource.cc", "pixel_limit_resource.h", - "quality_rampup_experiment_helper.cc", - "quality_rampup_experiment_helper.h", "quality_scaler_resource.cc", "quality_scaler_resource.h", "video_stream_encoder_resource.cc", @@ -38,6 +36,7 @@ rtc_library("video_adaptation") { "../../api:scoped_refptr", "../../api:sequence_checker", "../../api/adaptation:resource_adaptation_api", + "../../api/environment", "../../api/task_queue:task_queue", "../../api/units:data_rate", "../../api/units:time_delta", @@ -60,20 +59,15 @@ rtc_library("video_adaptation") { "../../rtc_base:timeutils", "../../rtc_base/experiments:balanced_degradation_settings", "../../rtc_base/experiments:field_trial_parser", - "../../rtc_base/experiments:quality_rampup_experiment", "../../rtc_base/experiments:quality_scaler_settings", "../../rtc_base/synchronization:mutex", "../../rtc_base/system:no_unique_address", "../../rtc_base/task_utils:repeating_task", - "../../system_wrappers:field_trial", "../../system_wrappers:system_wrappers", "../../video:video_stream_encoder_interface", "../../video/config:encoder_config", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/base:core_headers", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -92,6 +86,8 @@ if (rtc_include_tests) { ":video_adaptation", "../../api:field_trials_view", "../../api:scoped_refptr", + "../../api/environment", + "../../api/environment:environment_factory", "../../api/task_queue:task_queue", "../../api/units:time_delta", "../../api/units:timestamp", @@ -111,15 +107,10 @@ if (rtc_include_tests) { "../../rtc_base:rtc_numerics", "../../rtc_base:task_queue_for_test", "../../rtc_base:threading", - "../../test:field_trial", "../../test:rtc_expect_death", - "../../test:scoped_key_value_config", "../../test:test_support", "../../test/time_controller:time_controller", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/types:optional", ] } } diff --git a/video/adaptation/balanced_constraint.cc b/video/adaptation/balanced_constraint.cc index f9ee08ac87..0e8975ca97 100644 --- a/video/adaptation/balanced_constraint.cc +++ b/video/adaptation/balanced_constraint.cc @@ -20,7 +20,7 @@ namespace webrtc { BalancedConstraint::BalancedConstraint( DegradationPreferenceProvider* degradation_preference_provider, const FieldTrialsView& field_trials) - : encoder_target_bitrate_bps_(absl::nullopt), + : encoder_target_bitrate_bps_(std::nullopt), balanced_settings_(field_trials), degradation_preference_provider_(degradation_preference_provider) { RTC_DCHECK(degradation_preference_provider_); @@ -28,7 +28,7 @@ BalancedConstraint::BalancedConstraint( } void BalancedConstraint::OnEncoderTargetBitrateUpdated( - absl::optional encoder_target_bitrate_bps) { + std::optional encoder_target_bitrate_bps) { RTC_DCHECK_RUN_ON(&sequence_checker_); encoder_target_bitrate_bps_ = std::move(encoder_target_bitrate_bps); } diff --git a/video/adaptation/balanced_constraint.h b/video/adaptation/balanced_constraint.h index 22c7d2923c..294fe6f70c 100644 --- a/video/adaptation/balanced_constraint.h +++ b/video/adaptation/balanced_constraint.h @@ -11,9 +11,9 @@ #ifndef VIDEO_ADAPTATION_BALANCED_CONSTRAINT_H_ #define VIDEO_ADAPTATION_BALANCED_CONSTRAINT_H_ +#include #include -#include "absl/types/optional.h" #include "api/field_trials_view.h" #include "api/sequence_checker.h" #include "call/adaptation/adaptation_constraint.h" @@ -31,7 +31,7 @@ class BalancedConstraint : public AdaptationConstraint { ~BalancedConstraint() override = default; void OnEncoderTargetBitrateUpdated( - absl::optional encoder_target_bitrate_bps); + std::optional encoder_target_bitrate_bps); // AdaptationConstraint implementation. std::string Name() const override { return "BalancedConstraint"; } @@ -42,7 +42,7 @@ class BalancedConstraint : public AdaptationConstraint { private: RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; - absl::optional encoder_target_bitrate_bps_ + std::optional encoder_target_bitrate_bps_ RTC_GUARDED_BY(&sequence_checker_); const BalancedDegradationSettings balanced_settings_; const DegradationPreferenceProvider* degradation_preference_provider_; diff --git a/video/adaptation/bandwidth_quality_scaler_resource.cc b/video/adaptation/bandwidth_quality_scaler_resource.cc index 485019f309..815e07ed76 100644 --- a/video/adaptation/bandwidth_quality_scaler_resource.cc +++ b/video/adaptation/bandwidth_quality_scaler_resource.cc @@ -20,9 +20,9 @@ namespace webrtc { // static -rtc::scoped_refptr +scoped_refptr BandwidthQualityScalerResource::Create() { - return rtc::make_ref_counted(); + return make_ref_counted(); } BandwidthQualityScalerResource::BandwidthQualityScalerResource() @@ -40,7 +40,8 @@ bool BandwidthQualityScalerResource::is_started() const { void BandwidthQualityScalerResource::StartCheckForOveruse( const std::vector& - resolution_bitrate_limits) { + resolution_bitrate_limits, + VideoCodecType codec_type) { RTC_DCHECK_RUN_ON(encoder_queue()); RTC_DCHECK(!is_started()); bandwidth_quality_scaler_ = std::make_unique(this); @@ -48,7 +49,7 @@ void BandwidthQualityScalerResource::StartCheckForOveruse( // If the configuration parameters more than one, we should define and // declare the function BandwidthQualityScaler::Initialize() and call it. bandwidth_quality_scaler_->SetResolutionBitrateLimits( - resolution_bitrate_limits); + resolution_bitrate_limits, codec_type); } void BandwidthQualityScalerResource::StopCheckForOveruse() { diff --git a/video/adaptation/bandwidth_quality_scaler_resource.h b/video/adaptation/bandwidth_quality_scaler_resource.h index a57c9907a4..9661e75e6f 100644 --- a/video/adaptation/bandwidth_quality_scaler_resource.h +++ b/video/adaptation/bandwidth_quality_scaler_resource.h @@ -12,11 +12,11 @@ #define VIDEO_ADAPTATION_BANDWIDTH_QUALITY_SCALER_RESOURCE_H_ #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/scoped_refptr.h" #include "api/video/video_adaptation_reason.h" #include "api/video_codecs/video_encoder.h" @@ -32,7 +32,7 @@ class BandwidthQualityScalerResource : public VideoStreamEncoderResource, public BandwidthQualityScalerUsageHandlerInterface { public: - static rtc::scoped_refptr Create(); + static scoped_refptr Create(); BandwidthQualityScalerResource(); ~BandwidthQualityScalerResource() override; @@ -45,7 +45,8 @@ class BandwidthQualityScalerResource void StartCheckForOveruse( const std::vector& - resolution_bitrate_limits); + resolution_bitrate_limits, + VideoCodecType codec_type); void StopCheckForOveruse(); // BandwidthScalerQpUsageHandlerInterface implementation. diff --git a/video/adaptation/bitrate_constraint.cc b/video/adaptation/bitrate_constraint.cc index 2f92095b2b..6eb480bf7f 100644 --- a/video/adaptation/bitrate_constraint.cc +++ b/video/adaptation/bitrate_constraint.cc @@ -20,19 +20,19 @@ namespace webrtc { BitrateConstraint::BitrateConstraint() - : encoder_settings_(absl::nullopt), - encoder_target_bitrate_bps_(absl::nullopt) { + : encoder_settings_(std::nullopt), + encoder_target_bitrate_bps_(std::nullopt) { sequence_checker_.Detach(); } void BitrateConstraint::OnEncoderSettingsUpdated( - absl::optional encoder_settings) { + std::optional encoder_settings) { RTC_DCHECK_RUN_ON(&sequence_checker_); encoder_settings_ = std::move(encoder_settings); } void BitrateConstraint::OnEncoderTargetBitrateUpdated( - absl::optional encoder_target_bitrate_bps) { + std::optional encoder_target_bitrate_bps) { RTC_DCHECK_RUN_ON(&sequence_checker_); encoder_target_bitrate_bps_ = std::move(encoder_target_bitrate_bps); } @@ -64,13 +64,13 @@ bool BitrateConstraint::IsAdaptationUpAllowed( return true; } - absl::optional current_frame_size_px = + std::optional current_frame_size_px = input_state.single_active_stream_pixels(); if (!current_frame_size_px.has_value()) { return true; } - absl::optional bitrate_limits = + std::optional bitrate_limits = encoder_settings_->encoder_info().GetEncoderBitrateLimitsForResolution( // Need some sort of expected resulting pixels to be used // instead of unrestricted. diff --git a/video/adaptation/bitrate_constraint.h b/video/adaptation/bitrate_constraint.h index a608e5db5d..8906a16d13 100644 --- a/video/adaptation/bitrate_constraint.h +++ b/video/adaptation/bitrate_constraint.h @@ -11,9 +11,9 @@ #ifndef VIDEO_ADAPTATION_BITRATE_CONSTRAINT_H_ #define VIDEO_ADAPTATION_BITRATE_CONSTRAINT_H_ +#include #include -#include "absl/types/optional.h" #include "api/sequence_checker.h" #include "call/adaptation/adaptation_constraint.h" #include "call/adaptation/encoder_settings.h" @@ -29,9 +29,9 @@ class BitrateConstraint : public AdaptationConstraint { ~BitrateConstraint() override = default; void OnEncoderSettingsUpdated( - absl::optional encoder_settings); + std::optional encoder_settings); void OnEncoderTargetBitrateUpdated( - absl::optional encoder_target_bitrate_bps); + std::optional encoder_target_bitrate_bps); // AdaptationConstraint implementation. std::string Name() const override { return "BitrateConstraint"; } @@ -42,9 +42,9 @@ class BitrateConstraint : public AdaptationConstraint { private: RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; - absl::optional encoder_settings_ + std::optional encoder_settings_ RTC_GUARDED_BY(&sequence_checker_); - absl::optional encoder_target_bitrate_bps_ + std::optional encoder_target_bitrate_bps_ RTC_GUARDED_BY(&sequence_checker_); }; diff --git a/video/adaptation/bitrate_constraint_unittest.cc b/video/adaptation/bitrate_constraint_unittest.cc index 8a416db1fa..29ebb6a912 100644 --- a/video/adaptation/bitrate_constraint_unittest.cc +++ b/video/adaptation/bitrate_constraint_unittest.cc @@ -36,7 +36,7 @@ const VideoSourceRestrictions k720p{/*max_pixels_per_frame=*/1280 * 720, struct TestParams { bool active; - absl::optional scalability_mode; + std::optional scalability_mode; }; void FillCodecConfig(VideoCodec* video_codec, diff --git a/video/adaptation/encode_usage_resource.cc b/video/adaptation/encode_usage_resource.cc index 4a97881b04..3ca08cbcf2 100644 --- a/video/adaptation/encode_usage_resource.cc +++ b/video/adaptation/encode_usage_resource.cc @@ -18,10 +18,9 @@ namespace webrtc { // static -rtc::scoped_refptr EncodeUsageResource::Create( +scoped_refptr EncodeUsageResource::Create( std::unique_ptr overuse_detector) { - return rtc::make_ref_counted( - std::move(overuse_detector)); + return make_ref_counted(std::move(overuse_detector)); } EncodeUsageResource::EncodeUsageResource( @@ -29,7 +28,7 @@ EncodeUsageResource::EncodeUsageResource( : VideoStreamEncoderResource("EncoderUsageResource"), overuse_detector_(std::move(overuse_detector)), is_started_(false), - target_frame_rate_(absl::nullopt) { + target_frame_rate_(std::nullopt) { RTC_DCHECK(overuse_detector_); } @@ -56,7 +55,7 @@ void EncodeUsageResource::StopCheckForOveruse() { } void EncodeUsageResource::SetTargetFrameRate( - absl::optional target_frame_rate) { + std::optional target_frame_rate) { RTC_DCHECK_RUN_ON(encoder_queue()); if (target_frame_rate == target_frame_rate_) return; @@ -77,7 +76,7 @@ void EncodeUsageResource::OnEncodeCompleted( uint32_t timestamp, int64_t time_sent_in_us, int64_t capture_time_us, - absl::optional encode_duration_us) { + std::optional encode_duration_us) { RTC_DCHECK_RUN_ON(encoder_queue()); // TODO(hbos): Rename FrameSent() to something more appropriate (e.g. // "OnEncodeCompleted"?). diff --git a/video/adaptation/encode_usage_resource.h b/video/adaptation/encode_usage_resource.h index c391132e57..7820859d9d 100644 --- a/video/adaptation/encode_usage_resource.h +++ b/video/adaptation/encode_usage_resource.h @@ -12,9 +12,9 @@ #define VIDEO_ADAPTATION_ENCODE_USAGE_RESOURCE_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/scoped_refptr.h" #include "api/video/video_adaptation_reason.h" #include "video/adaptation/overuse_frame_detector.h" @@ -30,7 +30,7 @@ namespace webrtc { class EncodeUsageResource : public VideoStreamEncoderResource, public OveruseFrameDetectorObserverInterface { public: - static rtc::scoped_refptr Create( + static scoped_refptr Create( std::unique_ptr overuse_detector); explicit EncodeUsageResource( @@ -42,13 +42,13 @@ class EncodeUsageResource : public VideoStreamEncoderResource, void StartCheckForOveruse(CpuOveruseOptions options); void StopCheckForOveruse(); - void SetTargetFrameRate(absl::optional target_frame_rate); + void SetTargetFrameRate(std::optional target_frame_rate); void OnEncodeStarted(const VideoFrame& cropped_frame, int64_t time_when_first_seen_us); void OnEncodeCompleted(uint32_t timestamp, int64_t time_sent_in_us, int64_t capture_time_us, - absl::optional encode_duration_us); + std::optional encode_duration_us); // OveruseFrameDetectorObserverInterface implementation. void AdaptUp() override; @@ -60,7 +60,7 @@ class EncodeUsageResource : public VideoStreamEncoderResource, const std::unique_ptr overuse_detector_ RTC_GUARDED_BY(encoder_queue()); bool is_started_ RTC_GUARDED_BY(encoder_queue()); - absl::optional target_frame_rate_ RTC_GUARDED_BY(encoder_queue()); + std::optional target_frame_rate_ RTC_GUARDED_BY(encoder_queue()); }; } // namespace webrtc diff --git a/video/adaptation/overuse_frame_detector.cc b/video/adaptation/overuse_frame_detector.cc index e5c2c7d379..d095d81d85 100644 --- a/video/adaptation/overuse_frame_detector.cc +++ b/video/adaptation/overuse_frame_detector.cc @@ -20,12 +20,14 @@ #include #include +#include "api/environment/environment.h" +#include "api/field_trials_view.h" #include "api/video/video_frame.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/time_utils.h" -#include "system_wrappers/include/field_trial.h" +#include "rtc_base/trace_event.h" #if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) #include @@ -77,8 +79,8 @@ class SendProcessingUsage1 : public OveruseFrameDetector::ProcessingUsage { count_(0), last_processed_capture_time_us_(-1), max_sample_diff_ms_(kDefaultSampleDiffMs * kMaxSampleDiffMarginFactor), - filtered_processing_ms_(new rtc::ExpFilter(kWeightFactorProcessing)), - filtered_frame_diff_ms_(new rtc::ExpFilter(kWeightFactorFrameDiff)) { + filtered_processing_ms_(new ExpFilter(kWeightFactorProcessing)), + filtered_frame_diff_ms_(new ExpFilter(kWeightFactorFrameDiff)) { Reset(); } ~SendProcessingUsage1() override {} @@ -104,16 +106,16 @@ class SendProcessingUsage1 : public OveruseFrameDetector::ProcessingUsage { if (last_capture_time_us != -1) AddCaptureSample(1e-3 * (time_when_first_seen_us - last_capture_time_us)); - frame_timing_.push_back(FrameTiming(frame.timestamp_us(), frame.timestamp(), - time_when_first_seen_us)); + frame_timing_.push_back(FrameTiming( + frame.timestamp_us(), frame.rtp_timestamp(), time_when_first_seen_us)); } - absl::optional FrameSent( + std::optional FrameSent( uint32_t timestamp, int64_t time_sent_in_us, int64_t /* capture_time_us */, - absl::optional /* encode_duration_us */) override { - absl::optional encode_duration_us; + std::optional /* encode_duration_us */) override { + std::optional encode_duration_us; // Delay before reporting actual encoding time, used to have the ability to // detect total encoding time when encoding more than one layer. Encoding is // here assumed to finish within a second (or that we get enough long-time @@ -135,7 +137,7 @@ class SendProcessingUsage1 : public OveruseFrameDetector::ProcessingUsage { while (!frame_timing_.empty()) { FrameTiming timing = frame_timing_.front(); if (time_sent_in_us - timing.capture_us < - kEncodingTimeMeasureWindowMs * rtc::kNumMicrosecsPerMillisec) { + kEncodingTimeMeasureWindowMs * kNumMicrosecsPerMillisec) { break; } if (timing.last_send_us != -1) { @@ -210,8 +212,8 @@ class SendProcessingUsage1 : public OveruseFrameDetector::ProcessingUsage { uint64_t count_; int64_t last_processed_capture_time_us_; float max_sample_diff_ms_; - std::unique_ptr filtered_processing_ms_; - std::unique_ptr filtered_frame_diff_ms_; + std::unique_ptr filtered_processing_ms_; + std::unique_ptr filtered_frame_diff_ms_; }; // New cpu load estimator. @@ -240,11 +242,10 @@ class SendProcessingUsage2 : public OveruseFrameDetector::ProcessingUsage { int64_t time_when_first_seen_us, int64_t last_capture_time_us) override {} - absl::optional FrameSent( - uint32_t /* timestamp */, - int64_t /* time_sent_in_us */, - int64_t capture_time_us, - absl::optional encode_duration_us) override { + std::optional FrameSent(uint32_t /* timestamp */, + int64_t /* time_sent_in_us */, + int64_t capture_time_us, + std::optional encode_duration_us) override { if (encode_duration_us) { int duration_per_frame_us = DurationPerInputFrame(capture_time_us, *encode_duration_us); @@ -290,7 +291,7 @@ class SendProcessingUsage2 : public OveruseFrameDetector::ProcessingUsage { int64_t DurationPerInputFrame(int64_t capture_time_us, int64_t encode_time_us) { // Discard data on old frames; limit 2 seconds. - static constexpr int64_t kMaxAge = 2 * rtc::kNumMicrosecsPerSec; + static constexpr int64_t kMaxAge = 2 * kNumMicrosecsPerSec; for (auto it = max_encode_time_per_input_frame_.begin(); it != max_encode_time_per_input_frame_.end() && it->first < capture_time_us - kMaxAge;) { @@ -362,19 +363,19 @@ class OverdoseInjector : public OveruseFrameDetector::ProcessingUsage { usage_->FrameCaptured(frame, time_when_first_seen_us, last_capture_time_us); } - absl::optional FrameSent( + std::optional FrameSent( // These two argument used by old estimator. uint32_t timestamp, int64_t time_sent_in_us, // And these two by the new estimator. int64_t capture_time_us, - absl::optional encode_duration_us) override { + std::optional encode_duration_us) override { return usage_->FrameSent(timestamp, time_sent_in_us, capture_time_us, encode_duration_us); } int Value() override { - int64_t now_ms = rtc::TimeMillis(); + int64_t now_ms = TimeMillis(); if (last_toggling_ms_ == -1) { last_toggling_ms_ = now_ms; } else { @@ -403,7 +404,7 @@ class OverdoseInjector : public OveruseFrameDetector::ProcessingUsage { } } - absl::optional overried_usage_value; + std::optional overried_usage_value; switch (state_) { case State::kNormal: break; @@ -430,7 +431,8 @@ class OverdoseInjector : public OveruseFrameDetector::ProcessingUsage { } // namespace std::unique_ptr -OveruseFrameDetector::CreateProcessingUsage(const CpuOveruseOptions& options) { +OveruseFrameDetector::CreateProcessingUsage(const FieldTrialsView& field_trials, + const CpuOveruseOptions& options) { std::unique_ptr instance; if (options.filter_time_ms > 0) { instance = std::make_unique(options); @@ -438,7 +440,7 @@ OveruseFrameDetector::CreateProcessingUsage(const CpuOveruseOptions& options) { instance = std::make_unique(options); } std::string toggling_interval = - field_trial::FindFullName("WebRTC-ForceSimulatedOveruseIntervalMs"); + field_trials.Lookup("WebRTC-ForceSimulatedOveruseIntervalMs"); if (!toggling_interval.empty()) { int normal_period_ms = 0; int overuse_period_ms = 0; @@ -465,10 +467,12 @@ OveruseFrameDetector::CreateProcessingUsage(const CpuOveruseOptions& options) { } OveruseFrameDetector::OveruseFrameDetector( + const Environment& env, CpuOveruseMetricsObserver* metrics_observer) - : metrics_observer_(metrics_observer), + : env_(env), + metrics_observer_(metrics_observer), num_process_times_(0), - // TODO(bugs.webrtc.org/9078): Use absl::optional + // TODO(bugs.webrtc.org/9078): Use std::optional last_capture_time_us_(-1), num_pixels_(0), max_framerate_(kDefaultFrameRate), @@ -480,7 +484,7 @@ OveruseFrameDetector::OveruseFrameDetector( current_rampup_delay_ms_(kStandardRampUpDelayMs) { task_checker_.Detach(); ParseFieldTrial({&filter_time_constant_}, - field_trial::FindFullName("WebRTC-CpuLoadEstimator")); + env_.field_trials().Lookup("WebRTC-CpuLoadEstimator")); } OveruseFrameDetector::~OveruseFrameDetector() {} @@ -527,7 +531,7 @@ bool OveruseFrameDetector::FrameTimeoutDetected(int64_t now_us) const { if (last_capture_time_us_ == -1) return false; return (now_us - last_capture_time_us_) > - options_.frame_timeout_interval_ms * rtc::kNumMicrosecsPerMillisec; + options_.frame_timeout_interval_ms * kNumMicrosecsPerMillisec; } void OveruseFrameDetector::ResetAll(int num_pixels) { @@ -538,7 +542,7 @@ void OveruseFrameDetector::ResetAll(int num_pixels) { usage_->Reset(); last_capture_time_us_ = -1; num_process_times_ = 0; - encode_usage_percent_ = absl::nullopt; + encode_usage_percent_ = std::nullopt; OnTargetFramerateUpdated(max_framerate_); } @@ -566,14 +570,13 @@ void OveruseFrameDetector::FrameCaptured(const VideoFrame& frame, void OveruseFrameDetector::FrameSent(uint32_t timestamp, int64_t time_sent_in_us, int64_t capture_time_us, - absl::optional encode_duration_us) { + std::optional encode_duration_us) { RTC_DCHECK_RUN_ON(&task_checker_); encode_duration_us = usage_->FrameSent(timestamp, time_sent_in_us, capture_time_us, encode_duration_us); if (encode_duration_us) { - EncodedFrameTimeMeasured(*encode_duration_us / - rtc::kNumMicrosecsPerMillisec); + EncodedFrameTimeMeasured(*encode_duration_us / kNumMicrosecsPerMillisec); } } @@ -586,7 +589,8 @@ void OveruseFrameDetector::CheckForOveruse( !encode_usage_percent_) return; - int64_t now_ms = rtc::TimeMillis(); + int64_t now_ms = TimeMillis(); + const char* action = "NoAction"; if (IsOverusing(*encode_usage_percent_)) { // If the last thing we did was going up, and now have to back down, we need @@ -612,21 +616,24 @@ void OveruseFrameDetector::CheckForOveruse( ++num_overuse_detections_; observer->AdaptDown(); + action = "AdaptDown"; } else if (IsUnderusing(*encode_usage_percent_, now_ms)) { last_rampup_time_ms_ = now_ms; in_quick_rampup_ = true; observer->AdaptUp(); + action = "AdaptUp"; } + TRACE_EVENT2("webrtc", "OveruseFrameDetector::CheckForOveruse", + "encode_usage_percent", *encode_usage_percent_, "action", + TRACE_STR_COPY(action)); int rampup_delay = in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_; - RTC_LOG(LS_VERBOSE) << " Frame stats: " - " encode usage " - << *encode_usage_percent_ << " overuse detections " - << num_overuse_detections_ << " rampup delay " - << rampup_delay; + RTC_LOG(LS_INFO) << "CheckForOveruse: encode usage " << *encode_usage_percent_ + << " overuse detections " << num_overuse_detections_ + << " rampup delay " << rampup_delay << " action " << action; } void OveruseFrameDetector::SetOptions(const CpuOveruseOptions& options) { @@ -639,7 +646,7 @@ void OveruseFrameDetector::SetOptions(const CpuOveruseOptions& options) { } // Force reset with next frame. num_pixels_ = 0; - usage_ = CreateProcessingUsage(options); + usage_ = CreateProcessingUsage(env_.field_trials(), options); } bool OveruseFrameDetector::IsOverusing(int usage_percent) { diff --git a/video/adaptation/overuse_frame_detector.h b/video/adaptation/overuse_frame_detector.h index f39cee043e..bfdc8ebd1d 100644 --- a/video/adaptation/overuse_frame_detector.h +++ b/video/adaptation/overuse_frame_detector.h @@ -13,8 +13,9 @@ #include #include +#include -#include "absl/types/optional.h" +#include "api/environment/environment.h" #include "api/field_trials_view.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" @@ -73,7 +74,8 @@ class OveruseFrameDetectorObserverInterface { // check for overuse. class OveruseFrameDetector { public: - explicit OveruseFrameDetector(CpuOveruseMetricsObserver* metrics_observer); + OveruseFrameDetector(const Environment& env, + CpuOveruseMetricsObserver* metrics_observer); virtual ~OveruseFrameDetector(); OveruseFrameDetector(const OveruseFrameDetector&) = delete; @@ -103,7 +105,7 @@ class OveruseFrameDetector { void FrameSent(uint32_t timestamp, int64_t time_sent_in_us, int64_t capture_time_us, - absl::optional encode_duration_us); + std::optional encode_duration_us); // Interface for cpu load estimation. Intended for internal use only. class ProcessingUsage { @@ -114,13 +116,13 @@ class OveruseFrameDetector { int64_t time_when_first_seen_us, int64_t last_capture_time_us) = 0; // Returns encode_time in us, if there's a new measurement. - virtual absl::optional FrameSent( + virtual std::optional FrameSent( // These two argument used by old estimator. uint32_t timestamp, int64_t time_sent_in_us, // And these two by the new estimator. int64_t capture_time_us, - absl::optional encode_duration_us) = 0; + std::optional encode_duration_us) = 0; virtual int Value() = 0; virtual ~ProcessingUsage() = default; @@ -144,15 +146,17 @@ class OveruseFrameDetector { void ResetAll(int num_pixels); static std::unique_ptr CreateProcessingUsage( + const FieldTrialsView& field_trials, const CpuOveruseOptions& options); + const Environment env_; RTC_NO_UNIQUE_ADDRESS SequenceChecker task_checker_; // Owned by the task queue from where StartCheckForOveruse is called. RepeatingTaskHandle check_overuse_task_ RTC_GUARDED_BY(task_checker_); // Stats metrics. CpuOveruseMetricsObserver* const metrics_observer_; - absl::optional encode_usage_percent_ RTC_GUARDED_BY(task_checker_); + std::optional encode_usage_percent_ RTC_GUARDED_BY(task_checker_); int64_t num_process_times_ RTC_GUARDED_BY(task_checker_); diff --git a/video/adaptation/overuse_frame_detector_unittest.cc b/video/adaptation/overuse_frame_detector_unittest.cc index 85a84fe23a..fa8530f00d 100644 --- a/video/adaptation/overuse_frame_detector_unittest.cc +++ b/video/adaptation/overuse_frame_detector_unittest.cc @@ -12,6 +12,8 @@ #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" #include "api/video/video_adaptation_reason.h" @@ -22,7 +24,6 @@ #include "rtc_base/task_queue_for_test.h" #include "test/gmock.h" #include "test/gtest.h" -#include "test/scoped_key_value_config.h" namespace webrtc { @@ -33,8 +34,8 @@ namespace { const int kWidth = 640; const int kHeight = 480; // Corresponds to load of 15% -const int kFrameIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec; -const int kProcessTimeUs = 5 * rtc::kNumMicrosecsPerMillisec; +const int kFrameIntervalUs = 33 * kNumMicrosecsPerMillisec; +const int kProcessTimeUs = 5 * kNumMicrosecsPerMillisec; } // namespace class MockCpuOveruseObserver : public OveruseFrameDetectorObserverInterface { @@ -61,8 +62,9 @@ class CpuOveruseObserverImpl : public OveruseFrameDetectorObserverInterface { class OveruseFrameDetectorUnderTest : public OveruseFrameDetector { public: explicit OveruseFrameDetectorUnderTest( + const Environment& env, CpuOveruseMetricsObserver* metrics_observer) - : OveruseFrameDetector(metrics_observer) {} + : OveruseFrameDetector(env, metrics_observer) {} ~OveruseFrameDetectorUnderTest() {} using OveruseFrameDetector::CheckForOveruse; @@ -75,7 +77,8 @@ class OveruseFrameDetectorTest : public ::testing::Test, void SetUp() override { observer_ = &mock_observer_; options_.min_process_count = 0; - overuse_detector_ = std::make_unique(this); + overuse_detector_ = std::make_unique( + CreateEnvironment(), this); // Unfortunately, we can't call SetOptions here, since that would break // single-threading requirements in the RunOnTqNormalUsage test. } @@ -105,12 +108,12 @@ class OveruseFrameDetectorTest : public ::testing::Test, .build(); uint32_t timestamp = 0; while (num_frames-- > 0) { - frame.set_timestamp(timestamp); - int64_t capture_time_us = rtc::TimeMicros(); + frame.set_rtp_timestamp(timestamp); + int64_t capture_time_us = TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); clock_.AdvanceTime(TimeDelta::Micros(delay_us)); - overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), - capture_time_us, delay_us); + overuse_detector_->FrameSent(timestamp, TimeMicros(), capture_time_us, + delay_us); clock_.AdvanceTime(TimeDelta::Micros(interval_us - delay_us)); timestamp += interval_us * 90 / 1000; } @@ -122,7 +125,7 @@ class OveruseFrameDetectorTest : public ::testing::Test, int width, int height, // One element per layer - rtc::ArrayView delays_us) { + ArrayView delays_us) { VideoFrame frame = VideoFrame::Builder() .set_video_frame_buffer(I420Buffer::Create(width, height)) @@ -131,8 +134,8 @@ class OveruseFrameDetectorTest : public ::testing::Test, .build(); uint32_t timestamp = 0; while (num_frames-- > 0) { - frame.set_timestamp(timestamp); - int64_t capture_time_us = rtc::TimeMicros(); + frame.set_rtp_timestamp(timestamp); + int64_t capture_time_us = TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); int max_delay_us = 0; for (int delay_us : delays_us) { @@ -141,8 +144,8 @@ class OveruseFrameDetectorTest : public ::testing::Test, max_delay_us = delay_us; } - overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), - capture_time_us, delay_us); + overuse_detector_->FrameSent(timestamp, TimeMicros(), capture_time_us, + delay_us); } overuse_detector_->CheckForOveruse(observer_); clock_.AdvanceTime(TimeDelta::Micros(interval_us - max_delay_us)); @@ -166,14 +169,13 @@ class OveruseFrameDetectorTest : public ::testing::Test, .build(); uint32_t timestamp = 0; while (num_frames-- > 0) { - frame.set_timestamp(timestamp); + frame.set_rtp_timestamp(timestamp); int interval_us = random.Rand(min_interval_us, max_interval_us); - int64_t capture_time_us = rtc::TimeMicros(); + int64_t capture_time_us = TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); clock_.AdvanceTime(TimeDelta::Micros(delay_us)); - overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), - capture_time_us, - absl::optional(delay_us)); + overuse_detector_->FrameSent(timestamp, TimeMicros(), capture_time_us, + std::optional(delay_us)); overuse_detector_->CheckForOveruse(observer_); // Avoid turning clock backwards. @@ -189,11 +191,11 @@ class OveruseFrameDetectorTest : public ::testing::Test, // the usage. From the tests where these are used, adding another sample // doesn't affect the expected outcome (this is mainly to check initial // values and whether the overuse detector has been reset or not). - InsertAndSendFramesWithInterval(2, rtc::kNumMicrosecsPerSec, width, height, + InsertAndSendFramesWithInterval(2, kNumMicrosecsPerSec, width, height, kFrameIntervalUs); } void TriggerOveruse(int num_times) { - const int kDelayUs = 32 * rtc::kNumMicrosecsPerMillisec; + const int kDelayUs = 32 * kNumMicrosecsPerMillisec; for (int i = 0; i < num_times; ++i) { InsertAndSendFramesWithInterval(1000, kFrameIntervalUs, kWidth, kHeight, kDelayUs); @@ -214,21 +216,21 @@ class OveruseFrameDetectorTest : public ::testing::Test, int UsagePercent() { return encode_usage_percent_; } int64_t OveruseProcessingTimeLimitForFramerate(int fps) const { - int64_t frame_interval = rtc::kNumMicrosecsPerSec / fps; + int64_t frame_interval = kNumMicrosecsPerSec / fps; int64_t max_processing_time_us = (frame_interval * options_.high_encode_usage_threshold_percent) / 100; return max_processing_time_us; } int64_t UnderuseProcessingTimeLimitForFramerate(int fps) const { - int64_t frame_interval = rtc::kNumMicrosecsPerSec / fps; + int64_t frame_interval = kNumMicrosecsPerSec / fps; int64_t max_processing_time_us = (frame_interval * options_.low_encode_usage_threshold_percent) / 100; return max_processing_time_us; } CpuOveruseOptions options_; - rtc::ScopedFakeClock clock_; + ScopedFakeClock clock_; MockCpuOveruseObserver mock_observer_; OveruseFrameDetectorObserverInterface* observer_; std::unique_ptr overuse_detector_; @@ -264,7 +266,7 @@ TEST_F(OveruseFrameDetectorTest, DoubleOveruseAndRecover) { } TEST_F(OveruseFrameDetectorTest, TriggerUnderuseWithMinProcessCount) { - const int kProcessIntervalUs = 5 * rtc::kNumMicrosecsPerSec; + const int kProcessIntervalUs = 5 * kNumMicrosecsPerSec; options_.min_process_count = 1; CpuOveruseObserverImpl overuse_observer; observer_ = nullptr; @@ -330,13 +332,12 @@ TEST_F(OveruseFrameDetectorTest, ResetAfterFrameTimeout) { kProcessTimeUs); EXPECT_NE(InitialUsage(), UsagePercent()); InsertAndSendFramesWithInterval( - 2, options_.frame_timeout_interval_ms * rtc::kNumMicrosecsPerMillisec, - kWidth, kHeight, kProcessTimeUs); + 2, options_.frame_timeout_interval_ms * kNumMicrosecsPerMillisec, kWidth, + kHeight, kProcessTimeUs); EXPECT_NE(InitialUsage(), UsagePercent()); // Verify reset. InsertAndSendFramesWithInterval( - 2, - (options_.frame_timeout_interval_ms + 1) * rtc::kNumMicrosecsPerMillisec, + 2, (options_.frame_timeout_interval_ms + 1) * kNumMicrosecsPerMillisec, kWidth, kHeight, kProcessTimeUs); ForceUpdate(kWidth, kHeight); EXPECT_EQ(InitialUsage(), UsagePercent()); @@ -371,7 +372,7 @@ TEST_F(OveruseFrameDetectorTest, InitialProcessingUsage) { TEST_F(OveruseFrameDetectorTest, MeasuresMultipleConcurrentSamples) { overuse_detector_->SetOptions(options_); EXPECT_CALL(mock_observer_, AdaptDown()).Times(::testing::AtLeast(1)); - static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec; + static const int kIntervalUs = 33 * kNumMicrosecsPerMillisec; static const size_t kNumFramesEncodingDelay = 3; VideoFrame frame = VideoFrame::Builder() @@ -381,13 +382,13 @@ TEST_F(OveruseFrameDetectorTest, MeasuresMultipleConcurrentSamples) { .build(); for (size_t i = 0; i < 1000; ++i) { // Unique timestamps. - frame.set_timestamp(static_cast(i)); - int64_t capture_time_us = rtc::TimeMicros(); + frame.set_rtp_timestamp(static_cast(i)); + int64_t capture_time_us = TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); clock_.AdvanceTime(TimeDelta::Micros(kIntervalUs)); if (i > kNumFramesEncodingDelay) { overuse_detector_->FrameSent( - static_cast(i - kNumFramesEncodingDelay), rtc::TimeMicros(), + static_cast(i - kNumFramesEncodingDelay), TimeMicros(), capture_time_us, kIntervalUs); } overuse_detector_->CheckForOveruse(observer_); @@ -398,8 +399,8 @@ TEST_F(OveruseFrameDetectorTest, UpdatesExistingSamples) { // >85% encoding time should trigger overuse. overuse_detector_->SetOptions(options_); EXPECT_CALL(mock_observer_, AdaptDown()).Times(::testing::AtLeast(1)); - static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec; - static const int kDelayUs = 30 * rtc::kNumMicrosecsPerMillisec; + static const int kIntervalUs = 33 * kNumMicrosecsPerMillisec; + static const int kDelayUs = 30 * kNumMicrosecsPerMillisec; VideoFrame frame = VideoFrame::Builder() .set_video_frame_buffer(I420Buffer::Create(kWidth, kHeight)) @@ -408,16 +409,16 @@ TEST_F(OveruseFrameDetectorTest, UpdatesExistingSamples) { .build(); uint32_t timestamp = 0; for (size_t i = 0; i < 1000; ++i) { - frame.set_timestamp(timestamp); - int64_t capture_time_us = rtc::TimeMicros(); + frame.set_rtp_timestamp(timestamp); + int64_t capture_time_us = TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); // Encode and send first parts almost instantly. clock_.AdvanceTime(TimeDelta::Millis(1)); - overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), capture_time_us, - rtc::kNumMicrosecsPerMillisec); + overuse_detector_->FrameSent(timestamp, TimeMicros(), capture_time_us, + kNumMicrosecsPerMillisec); // Encode heavier part, resulting in >85% usage total. clock_.AdvanceTime(TimeDelta::Micros(kDelayUs) - TimeDelta::Millis(1)); - overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), capture_time_us, + overuse_detector_->FrameSent(timestamp, TimeMicros(), capture_time_us, kDelayUs); clock_.AdvanceTime(TimeDelta::Micros(kIntervalUs - kDelayUs)); timestamp += kIntervalUs * 90 / 1000; @@ -432,7 +433,7 @@ TEST_F(OveruseFrameDetectorTest, RunOnTqNormalUsage) { overuse_detector_->StartCheckForOveruse(queue.Get(), options_, observer_); }); - rtc::Event event; + Event event; // Expect NormalUsage(). When called, stop the `overuse_detector_` and then // set `event` to end the test. EXPECT_CALL(mock_observer_, AdaptUp()) @@ -442,8 +443,8 @@ TEST_F(OveruseFrameDetectorTest, RunOnTqNormalUsage) { })); queue.PostTask([this] { - const int kDelayUs1 = 5 * rtc::kNumMicrosecsPerMillisec; - const int kDelayUs2 = 6 * rtc::kNumMicrosecsPerMillisec; + const int kDelayUs1 = 5 * kNumMicrosecsPerMillisec; + const int kDelayUs2 = 6 * kNumMicrosecsPerMillisec; InsertAndSendFramesWithInterval(1300, kFrameIntervalUs, kWidth, kHeight, kDelayUs1); InsertAndSendFramesWithInterval(1, kFrameIntervalUs, kWidth, kHeight, @@ -461,7 +462,7 @@ TEST_F(OveruseFrameDetectorTest, MaxIntervalScalesWithFramerate) { overuse_detector_->SetOptions(options_); // Trigger overuse. - int64_t frame_interval_us = rtc::kNumMicrosecsPerSec / kCapturerMaxFrameRate; + int64_t frame_interval_us = kNumMicrosecsPerSec / kCapturerMaxFrameRate; // Processing time just below over use limit given kEncodeMaxFrameRate. int64_t processing_time_us = (98 * OveruseProcessingTimeLimitForFramerate(kEncodeMaxFrameRate)) / 100; @@ -473,7 +474,7 @@ TEST_F(OveruseFrameDetectorTest, MaxIntervalScalesWithFramerate) { } // Simulate frame rate reduction and normal usage. - frame_interval_us = rtc::kNumMicrosecsPerSec / kEncodeMaxFrameRate; + frame_interval_us = kNumMicrosecsPerSec / kEncodeMaxFrameRate; overuse_detector_->OnTargetFramerateUpdated(kEncodeMaxFrameRate); EXPECT_CALL(mock_observer_, AdaptDown()).Times(0); for (int i = 0; i < options_.high_threshold_consecutive_count; ++i) { @@ -498,7 +499,7 @@ TEST_F(OveruseFrameDetectorTest, RespectsMinFramerate) { overuse_detector_->OnTargetFramerateUpdated(kMinFrameRate); // Normal usage just at the limit. - int64_t frame_interval_us = rtc::kNumMicrosecsPerSec / kMinFrameRate; + int64_t frame_interval_us = kNumMicrosecsPerSec / kMinFrameRate; // Processing time just below over use limit given kEncodeMaxFrameRate. int64_t processing_time_us = (98 * OveruseProcessingTimeLimitForFramerate(kMinFrameRate)) / 100; @@ -533,7 +534,7 @@ TEST_F(OveruseFrameDetectorTest, LimitsMaxFrameInterval) { const int kMaxFrameRate = 20; overuse_detector_->SetOptions(options_); overuse_detector_->OnTargetFramerateUpdated(kMaxFrameRate); - int64_t frame_interval_us = rtc::kNumMicrosecsPerSec / kMaxFrameRate; + int64_t frame_interval_us = kNumMicrosecsPerSec / kMaxFrameRate; // Maximum frame interval allowed is 35% above ideal. int64_t max_frame_interval_us = (135 * frame_interval_us) / 100; // Maximum processing time, without triggering overuse, allowed with the above @@ -583,10 +584,10 @@ TEST_F(OveruseFrameDetectorTest, NoOveruseForLargeRandomFrameInterval) { overuse_detector_->SetOptions(options_); const int kNumFrames = 500; - const int kEncodeTimeUs = 100 * rtc::kNumMicrosecsPerMillisec; + const int kEncodeTimeUs = 100 * kNumMicrosecsPerMillisec; - const int kMinIntervalUs = 30 * rtc::kNumMicrosecsPerMillisec; - const int kMaxIntervalUs = 1000 * rtc::kNumMicrosecsPerMillisec; + const int kMinIntervalUs = 30 * kNumMicrosecsPerMillisec; + const int kMaxIntervalUs = 1000 * kNumMicrosecsPerMillisec; const int kTargetFramerate = 5; @@ -611,10 +612,10 @@ TEST_F(OveruseFrameDetectorTest, NoOveruseForRandomFrameIntervalWithReset) { // .Times(::testing::AtLeast(1)); const int kNumFrames = 500; - const int kEncodeTimeUs = 100 * rtc::kNumMicrosecsPerMillisec; + const int kEncodeTimeUs = 100 * kNumMicrosecsPerMillisec; - const int kMinIntervalUs = 30 * rtc::kNumMicrosecsPerMillisec; - const int kMaxIntervalUs = 3000 * rtc::kNumMicrosecsPerMillisec; + const int kMinIntervalUs = 30 * kNumMicrosecsPerMillisec; + const int kMaxIntervalUs = 3000 * kNumMicrosecsPerMillisec; const int kTargetFramerate = 5; @@ -639,11 +640,11 @@ TEST_F(OveruseFrameDetectorTest, NoOveruseForSimulcast) { constexpr int kNumFrames = 500; constexpr int kEncodeTimesUs[] = { - 10 * rtc::kNumMicrosecsPerMillisec, - 8 * rtc::kNumMicrosecsPerMillisec, - 12 * rtc::kNumMicrosecsPerMillisec, + 10 * kNumMicrosecsPerMillisec, + 8 * kNumMicrosecsPerMillisec, + 12 * kNumMicrosecsPerMillisec, }; - constexpr int kIntervalUs = 30 * rtc::kNumMicrosecsPerMillisec; + constexpr int kIntervalUs = 30 * kNumMicrosecsPerMillisec; InsertAndSendSimulcastFramesWithInterval(kNumFrames, kIntervalUs, kWidth, kHeight, kEncodeTimesUs); @@ -657,7 +658,7 @@ TEST_F(OveruseFrameDetectorTest, NoOveruseForSimulcast) { class OveruseFrameDetectorTest2 : public OveruseFrameDetectorTest { protected: void SetUp() override { - options_.filter_time_ms = 5 * rtc::kNumMillisecsPerSec; + options_.filter_time_ms = 5 * kNumMillisecsPerSec; OveruseFrameDetectorTest::SetUp(); } @@ -673,7 +674,7 @@ class OveruseFrameDetectorTest2 : public OveruseFrameDetectorTest { .set_timestamp_us(0) .build(); while (num_frames-- > 0) { - int64_t capture_time_us = rtc::TimeMicros(); + int64_t capture_time_us = TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us /* ignored */); overuse_detector_->FrameSent(0 /* ignored timestamp */, 0 /* ignored send_time_us */, @@ -698,7 +699,7 @@ class OveruseFrameDetectorTest2 : public OveruseFrameDetectorTest { .build(); for (int i = 0; i < num_frames; i++) { int interval_us = random.Rand(min_interval_us, max_interval_us); - int64_t capture_time_us = rtc::TimeMicros(); + int64_t capture_time_us = TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); overuse_detector_->FrameSent(0 /* ignored timestamp */, 0 /* ignored send_time_us */, @@ -712,7 +713,7 @@ class OveruseFrameDetectorTest2 : public OveruseFrameDetectorTest { void ForceUpdate(int width, int height) override { // This is mainly to check initial values and whether the overuse // detector has been reset or not. - InsertAndSendFramesWithInterval(1, rtc::kNumMicrosecsPerSec, width, height, + InsertAndSendFramesWithInterval(1, kNumMicrosecsPerSec, width, height, kFrameIntervalUs); } }; @@ -746,7 +747,7 @@ TEST_F(OveruseFrameDetectorTest2, DoubleOveruseAndRecover) { } TEST_F(OveruseFrameDetectorTest2, TriggerUnderuseWithMinProcessCount) { - const int kProcessIntervalUs = 5 * rtc::kNumMicrosecsPerSec; + const int kProcessIntervalUs = 5 * kNumMicrosecsPerSec; options_.min_process_count = 1; CpuOveruseObserverImpl overuse_observer; observer_ = nullptr; @@ -810,13 +811,12 @@ TEST_F(OveruseFrameDetectorTest2, ResetAfterFrameTimeout) { kProcessTimeUs); EXPECT_NE(InitialUsage(), UsagePercent()); InsertAndSendFramesWithInterval( - 2, options_.frame_timeout_interval_ms * rtc::kNumMicrosecsPerMillisec, - kWidth, kHeight, kProcessTimeUs); + 2, options_.frame_timeout_interval_ms * kNumMicrosecsPerMillisec, kWidth, + kHeight, kProcessTimeUs); EXPECT_NE(InitialUsage(), UsagePercent()); // Verify reset. InsertAndSendFramesWithInterval( - 2, - (options_.frame_timeout_interval_ms + 1) * rtc::kNumMicrosecsPerMillisec, + 2, (options_.frame_timeout_interval_ms + 1) * kNumMicrosecsPerMillisec, kWidth, kHeight, kProcessTimeUs); ForceUpdate(kWidth, kHeight); EXPECT_EQ(InitialUsage(), UsagePercent()); @@ -853,7 +853,7 @@ TEST_F(OveruseFrameDetectorTest2, InitialProcessingUsage) { TEST_F(OveruseFrameDetectorTest2, MeasuresMultipleConcurrentSamples) { overuse_detector_->SetOptions(options_); EXPECT_CALL(mock_observer_, AdaptDown()).Times(::testing::AtLeast(1)); - static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec; + static const int kIntervalUs = 33 * kNumMicrosecsPerMillisec; static const size_t kNumFramesEncodingDelay = 3; VideoFrame frame = VideoFrame::Builder() @@ -863,13 +863,13 @@ TEST_F(OveruseFrameDetectorTest2, MeasuresMultipleConcurrentSamples) { .build(); for (size_t i = 0; i < 1000; ++i) { // Unique timestamps. - frame.set_timestamp(static_cast(i)); - int64_t capture_time_us = rtc::TimeMicros(); + frame.set_rtp_timestamp(static_cast(i)); + int64_t capture_time_us = TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); clock_.AdvanceTime(TimeDelta::Micros(kIntervalUs)); if (i > kNumFramesEncodingDelay) { overuse_detector_->FrameSent( - static_cast(i - kNumFramesEncodingDelay), rtc::TimeMicros(), + static_cast(i - kNumFramesEncodingDelay), TimeMicros(), capture_time_us, kIntervalUs); } overuse_detector_->CheckForOveruse(observer_); @@ -880,8 +880,8 @@ TEST_F(OveruseFrameDetectorTest2, UpdatesExistingSamples) { // >85% encoding time should trigger overuse. overuse_detector_->SetOptions(options_); EXPECT_CALL(mock_observer_, AdaptDown()).Times(::testing::AtLeast(1)); - static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec; - static const int kDelayUs = 30 * rtc::kNumMicrosecsPerMillisec; + static const int kIntervalUs = 33 * kNumMicrosecsPerMillisec; + static const int kDelayUs = 30 * kNumMicrosecsPerMillisec; VideoFrame frame = VideoFrame::Builder() .set_video_frame_buffer(I420Buffer::Create(kWidth, kHeight)) @@ -890,16 +890,16 @@ TEST_F(OveruseFrameDetectorTest2, UpdatesExistingSamples) { .build(); uint32_t timestamp = 0; for (size_t i = 0; i < 1000; ++i) { - frame.set_timestamp(timestamp); - int64_t capture_time_us = rtc::TimeMicros(); + frame.set_rtp_timestamp(timestamp); + int64_t capture_time_us = TimeMicros(); overuse_detector_->FrameCaptured(frame, capture_time_us); // Encode and send first parts almost instantly. clock_.AdvanceTime(TimeDelta::Millis(1)); - overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), capture_time_us, - rtc::kNumMicrosecsPerMillisec); + overuse_detector_->FrameSent(timestamp, TimeMicros(), capture_time_us, + kNumMicrosecsPerMillisec); // Encode heavier part, resulting in >85% usage total. clock_.AdvanceTime(TimeDelta::Micros(kDelayUs) - TimeDelta::Millis(1)); - overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), capture_time_us, + overuse_detector_->FrameSent(timestamp, TimeMicros(), capture_time_us, kDelayUs); clock_.AdvanceTime(TimeDelta::Micros(kIntervalUs - kDelayUs)); timestamp += kIntervalUs * 90 / 1000; @@ -914,7 +914,7 @@ TEST_F(OveruseFrameDetectorTest2, RunOnTqNormalUsage) { overuse_detector_->StartCheckForOveruse(queue.Get(), options_, observer_); }); - rtc::Event event; + Event event; // Expect NormalUsage(). When called, stop the `overuse_detector_` and then // set `event` to end the test. EXPECT_CALL(mock_observer_, AdaptUp()) @@ -924,8 +924,8 @@ TEST_F(OveruseFrameDetectorTest2, RunOnTqNormalUsage) { })); queue.PostTask([this] { - const int kDelayUs1 = 5 * rtc::kNumMicrosecsPerMillisec; - const int kDelayUs2 = 6 * rtc::kNumMicrosecsPerMillisec; + const int kDelayUs1 = 5 * kNumMicrosecsPerMillisec; + const int kDelayUs2 = 6 * kNumMicrosecsPerMillisec; InsertAndSendFramesWithInterval(1300, kFrameIntervalUs, kWidth, kHeight, kDelayUs1); InsertAndSendFramesWithInterval(1, kFrameIntervalUs, kWidth, kHeight, @@ -943,10 +943,10 @@ TEST_F(OveruseFrameDetectorTest2, NoOveruseForLargeRandomFrameInterval) { EXPECT_CALL(mock_observer_, AdaptUp()).Times(::testing::AtLeast(1)); const int kNumFrames = 500; - const int kEncodeTimeUs = 100 * rtc::kNumMicrosecsPerMillisec; + const int kEncodeTimeUs = 100 * kNumMicrosecsPerMillisec; - const int kMinIntervalUs = 30 * rtc::kNumMicrosecsPerMillisec; - const int kMaxIntervalUs = 1000 * rtc::kNumMicrosecsPerMillisec; + const int kMinIntervalUs = 30 * kNumMicrosecsPerMillisec; + const int kMaxIntervalUs = 1000 * kNumMicrosecsPerMillisec; InsertAndSendFramesWithRandomInterval(kNumFrames, kMinIntervalUs, kMaxIntervalUs, kWidth, kHeight, @@ -963,10 +963,10 @@ TEST_F(OveruseFrameDetectorTest2, NoOveruseForRandomFrameIntervalWithReset) { EXPECT_CALL(mock_observer_, AdaptUp()).Times(::testing::AtLeast(1)); const int kNumFrames = 500; - const int kEncodeTimeUs = 100 * rtc::kNumMicrosecsPerMillisec; + const int kEncodeTimeUs = 100 * kNumMicrosecsPerMillisec; - const int kMinIntervalUs = 30 * rtc::kNumMicrosecsPerMillisec; - const int kMaxIntervalUs = 3000 * rtc::kNumMicrosecsPerMillisec; + const int kMinIntervalUs = 30 * kNumMicrosecsPerMillisec; + const int kMaxIntervalUs = 3000 * kNumMicrosecsPerMillisec; InsertAndSendFramesWithRandomInterval(kNumFrames, kMinIntervalUs, kMaxIntervalUs, kWidth, kHeight, @@ -984,12 +984,12 @@ TEST_F(OveruseFrameDetectorTest2, ToleratesOutOfOrderFrames) { // Represents a cpu utilization close to 100%. First input frame results in // three encoded frames, and the last of those isn't finished until after the // first encoded frame corresponding to the next input frame. - const int kEncodeTimeUs = 30 * rtc::kNumMicrosecsPerMillisec; + const int kEncodeTimeUs = 30 * kNumMicrosecsPerMillisec; const int kCaptureTimesMs[] = {33, 33, 66, 33}; for (int capture_time_ms : kCaptureTimesMs) { overuse_detector_->FrameSent( - 0, 0, capture_time_ms * rtc::kNumMicrosecsPerMillisec, kEncodeTimeUs); + 0, 0, capture_time_ms * kNumMicrosecsPerMillisec, kEncodeTimeUs); } EXPECT_GE(UsagePercent(), InitialUsage()); } @@ -1002,11 +1002,11 @@ TEST_F(OveruseFrameDetectorTest2, NoOveruseForSimulcast) { constexpr int kNumFrames = 500; constexpr int kEncodeTimesUs[] = { - 10 * rtc::kNumMicrosecsPerMillisec, - 8 * rtc::kNumMicrosecsPerMillisec, - 12 * rtc::kNumMicrosecsPerMillisec, + 10 * kNumMicrosecsPerMillisec, + 8 * kNumMicrosecsPerMillisec, + 12 * kNumMicrosecsPerMillisec, }; - constexpr int kIntervalUs = 30 * rtc::kNumMicrosecsPerMillisec; + constexpr int kIntervalUs = 30 * kNumMicrosecsPerMillisec; InsertAndSendSimulcastFramesWithInterval(kNumFrames, kIntervalUs, kWidth, kHeight, kEncodeTimesUs); diff --git a/video/adaptation/pixel_limit_resource.cc b/video/adaptation/pixel_limit_resource.cc index 872e169879..446824fa78 100644 --- a/video/adaptation/pixel_limit_resource.cc +++ b/video/adaptation/pixel_limit_resource.cc @@ -24,11 +24,10 @@ constexpr TimeDelta kResourceUsageCheckIntervalMs = TimeDelta::Seconds(5); } // namespace // static -rtc::scoped_refptr PixelLimitResource::Create( +scoped_refptr PixelLimitResource::Create( TaskQueueBase* task_queue, VideoStreamInputStateProvider* input_state_provider) { - return rtc::make_ref_counted(task_queue, - input_state_provider); + return make_ref_counted(task_queue, input_state_provider); } PixelLimitResource::PixelLimitResource( @@ -36,7 +35,7 @@ PixelLimitResource::PixelLimitResource( VideoStreamInputStateProvider* input_state_provider) : task_queue_(task_queue), input_state_provider_(input_state_provider), - max_pixels_(absl::nullopt) { + max_pixels_(std::nullopt) { RTC_DCHECK(task_queue_); RTC_DCHECK(input_state_provider_); } @@ -67,7 +66,7 @@ void PixelLimitResource::SetResourceListener(ResourceListener* listener) { // No pixel limit configured yet, try again later. return kResourceUsageCheckIntervalMs; } - absl::optional frame_size_pixels = + std::optional frame_size_pixels = input_state_provider_->InputState().frame_size_pixels(); if (!frame_size_pixels.has_value()) { // We haven't observed a frame yet so we don't know if it's going to be @@ -83,11 +82,11 @@ void PixelLimitResource::SetResourceListener(ResourceListener* listener) { int target_pixels_lower_bounds = GetLowerResolutionThan(target_pixel_upper_bounds); if (current_pixels > target_pixel_upper_bounds) { - listener_->OnResourceUsageStateMeasured( - rtc::scoped_refptr(this), ResourceUsageState::kOveruse); + listener_->OnResourceUsageStateMeasured(scoped_refptr(this), + ResourceUsageState::kOveruse); } else if (current_pixels < target_pixels_lower_bounds) { - listener_->OnResourceUsageStateMeasured( - rtc::scoped_refptr(this), ResourceUsageState::kUnderuse); + listener_->OnResourceUsageStateMeasured(scoped_refptr(this), + ResourceUsageState::kUnderuse); } return kResourceUsageCheckIntervalMs; }); diff --git a/video/adaptation/pixel_limit_resource.h b/video/adaptation/pixel_limit_resource.h index b42f92434f..591de753f0 100644 --- a/video/adaptation/pixel_limit_resource.h +++ b/video/adaptation/pixel_limit_resource.h @@ -11,9 +11,9 @@ #ifndef VIDEO_ADAPTATION_PIXEL_LIMIT_RESOURCE_H_ #define VIDEO_ADAPTATION_PIXEL_LIMIT_RESOURCE_H_ +#include #include -#include "absl/types/optional.h" #include "api/adaptation/resource.h" #include "api/scoped_refptr.h" #include "call/adaptation/video_stream_input_state_provider.h" @@ -33,7 +33,7 @@ namespace webrtc { // purposes. class PixelLimitResource : public Resource { public: - static rtc::scoped_refptr Create( + static scoped_refptr Create( TaskQueueBase* task_queue, VideoStreamInputStateProvider* input_state_provider); @@ -50,7 +50,7 @@ class PixelLimitResource : public Resource { private: TaskQueueBase* const task_queue_; VideoStreamInputStateProvider* const input_state_provider_; - absl::optional max_pixels_ RTC_GUARDED_BY(task_queue_); + std::optional max_pixels_ RTC_GUARDED_BY(task_queue_); webrtc::ResourceListener* listener_ RTC_GUARDED_BY(task_queue_); RepeatingTaskHandle repeating_task_ RTC_GUARDED_BY(task_queue_); }; diff --git a/video/adaptation/pixel_limit_resource_unittest.cc b/video/adaptation/pixel_limit_resource_unittest.cc index 28eb19b1aa..4ed3daa494 100644 --- a/video/adaptation/pixel_limit_resource_unittest.cc +++ b/video/adaptation/pixel_limit_resource_unittest.cc @@ -64,7 +64,7 @@ TEST_F(PixelLimitResourceTest, ResourceIsSilentByDefault) { // OnResourceUsageStateMeasured() is invoked. testing::StrictMock resource_listener; RunTaskOnTaskQueue([&]() { - rtc::scoped_refptr pixel_limit_resource = + scoped_refptr pixel_limit_resource = PixelLimitResource::Create(task_queue_.get(), &input_state_provider_); pixel_limit_resource->SetResourceListener(&resource_listener); // Set a current pixel count. @@ -80,7 +80,7 @@ TEST_F(PixelLimitResourceTest, constexpr int kMaxPixels = 640 * 480; testing::StrictMock resource_listener; RunTaskOnTaskQueue([&]() { - rtc::scoped_refptr pixel_limit_resource = + scoped_refptr pixel_limit_resource = PixelLimitResource::Create(task_queue_.get(), &input_state_provider_); pixel_limit_resource->SetResourceListener(&resource_listener); time_controller_.AdvanceTime(TimeDelta::Zero()); @@ -115,7 +115,7 @@ TEST_F(PixelLimitResourceTest, const int kMinPixels = GetLowerResolutionThan(kMaxPixels); testing::StrictMock resource_listener; RunTaskOnTaskQueue([&]() { - rtc::scoped_refptr pixel_limit_resource = + scoped_refptr pixel_limit_resource = PixelLimitResource::Create(task_queue_.get(), &input_state_provider_); pixel_limit_resource->SetResourceListener(&resource_listener); time_controller_.AdvanceTime(TimeDelta::Zero()); diff --git a/video/adaptation/quality_rampup_experiment_helper.cc b/video/adaptation/quality_rampup_experiment_helper.cc deleted file mode 100644 index adcad40c03..0000000000 --- a/video/adaptation/quality_rampup_experiment_helper.cc +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright 2020 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video/adaptation/quality_rampup_experiment_helper.h" - -#include -#include - -#include "rtc_base/logging.h" - -namespace webrtc { - -QualityRampUpExperimentHelper::QualityRampUpExperimentHelper( - QualityRampUpExperimentListener* experiment_listener, - Clock* clock, - QualityRampupExperiment experiment) - : experiment_listener_(experiment_listener), - clock_(clock), - quality_rampup_experiment_(std::move(experiment)), - cpu_adapted_(false), - qp_resolution_adaptations_(0) { - RTC_DCHECK(experiment_listener_); - RTC_DCHECK(clock_); -} - -std::unique_ptr -QualityRampUpExperimentHelper::CreateIfEnabled( - QualityRampUpExperimentListener* experiment_listener, - Clock* clock) { - QualityRampupExperiment experiment = QualityRampupExperiment::ParseSettings(); - if (experiment.Enabled()) { - return std::unique_ptr( - new QualityRampUpExperimentHelper(experiment_listener, clock, - experiment)); - } - return nullptr; -} - -void QualityRampUpExperimentHelper::ConfigureQualityRampupExperiment( - bool reset, - absl::optional pixels, - absl::optional max_bitrate) { - if (reset) - quality_rampup_experiment_.Reset(); - if (pixels && max_bitrate) - quality_rampup_experiment_.SetMaxBitrate(*pixels, max_bitrate->kbps()); -} - -void QualityRampUpExperimentHelper::PerformQualityRampupExperiment( - rtc::scoped_refptr quality_scaler_resource, - DataRate bandwidth, - DataRate encoder_target_bitrate, - absl::optional max_bitrate) { - if (!quality_scaler_resource->is_started() || !max_bitrate) - return; - - int64_t now_ms = clock_->TimeInMilliseconds(); - - bool try_quality_rampup = false; - if (quality_rampup_experiment_.BwHigh(now_ms, bandwidth.kbps())) { - // Verify that encoder is at max bitrate and the QP is low. - if (encoder_target_bitrate == *max_bitrate && - quality_scaler_resource->QpFastFilterLow()) { - try_quality_rampup = true; - } - } - if (try_quality_rampup && qp_resolution_adaptations_ > 0 && !cpu_adapted_) { - experiment_listener_->OnQualityRampUp(); - } -} - -void QualityRampUpExperimentHelper::cpu_adapted(bool cpu_adapted) { - cpu_adapted_ = cpu_adapted; -} - -void QualityRampUpExperimentHelper::qp_resolution_adaptations( - int qp_resolution_adaptations) { - qp_resolution_adaptations_ = qp_resolution_adaptations; -} - -} // namespace webrtc diff --git a/video/adaptation/quality_rampup_experiment_helper.h b/video/adaptation/quality_rampup_experiment_helper.h deleted file mode 100644 index 4fe1f24876..0000000000 --- a/video/adaptation/quality_rampup_experiment_helper.h +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2020 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_ -#define VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_ - -#include - -#include "api/scoped_refptr.h" -#include "api/units/data_rate.h" -#include "rtc_base/experiments/quality_rampup_experiment.h" -#include "system_wrappers/include/clock.h" -#include "video/adaptation/quality_scaler_resource.h" - -namespace webrtc { - -class QualityRampUpExperimentListener { - public: - virtual ~QualityRampUpExperimentListener() = default; - virtual void OnQualityRampUp() = 0; -}; - -// Helper class for orchestrating the WebRTC-Video-QualityRampupSettings -// experiment. -class QualityRampUpExperimentHelper { - public: - // Returns a QualityRampUpExperimentHelper if the experiment is enabled, - // an nullptr otherwise. - static std::unique_ptr CreateIfEnabled( - QualityRampUpExperimentListener* experiment_listener, - Clock* clock); - - QualityRampUpExperimentHelper(const QualityRampUpExperimentHelper&) = delete; - QualityRampUpExperimentHelper& operator=( - const QualityRampUpExperimentHelper&) = delete; - - void cpu_adapted(bool cpu_adapted); - void qp_resolution_adaptations(int qp_adaptations); - - void ConfigureQualityRampupExperiment(bool reset, - absl::optional pixels, - absl::optional max_bitrate); - - void PerformQualityRampupExperiment( - rtc::scoped_refptr quality_scaler_resource, - DataRate bandwidth, - DataRate encoder_target_bitrate, - absl::optional max_bitrate); - - private: - QualityRampUpExperimentHelper( - QualityRampUpExperimentListener* experiment_listener, - Clock* clock, - QualityRampupExperiment experiment); - QualityRampUpExperimentListener* const experiment_listener_; - Clock* clock_; - QualityRampupExperiment quality_rampup_experiment_; - bool cpu_adapted_; - int qp_resolution_adaptations_; -}; - -} // namespace webrtc - -#endif // VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_ diff --git a/video/adaptation/quality_scaler_resource.cc b/video/adaptation/quality_scaler_resource.cc index 68d56fe29e..19ffbf8937 100644 --- a/video/adaptation/quality_scaler_resource.cc +++ b/video/adaptation/quality_scaler_resource.cc @@ -12,6 +12,7 @@ #include +#include "api/field_trials_view.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/balanced_degradation_settings.h" #include "rtc_base/time_utils.h" @@ -19,8 +20,8 @@ namespace webrtc { // static -rtc::scoped_refptr QualityScalerResource::Create() { - return rtc::make_ref_counted(); +scoped_refptr QualityScalerResource::Create() { + return make_ref_counted(); } QualityScalerResource::QualityScalerResource() @@ -37,11 +38,12 @@ bool QualityScalerResource::is_started() const { } void QualityScalerResource::StartCheckForOveruse( - VideoEncoder::QpThresholds qp_thresholds) { + VideoEncoder::QpThresholds qp_thresholds, + const FieldTrialsView& field_trials) { RTC_DCHECK_RUN_ON(encoder_queue()); RTC_DCHECK(!is_started()); - quality_scaler_ = - std::make_unique(this, std::move(qp_thresholds)); + quality_scaler_ = std::make_unique( + this, std::move(qp_thresholds), field_trials); } void QualityScalerResource::StopCheckForOveruse() { @@ -59,12 +61,6 @@ void QualityScalerResource::SetQpThresholds( quality_scaler_->SetQpThresholds(std::move(qp_thresholds)); } -bool QualityScalerResource::QpFastFilterLow() { - RTC_DCHECK_RUN_ON(encoder_queue()); - RTC_DCHECK(is_started()); - return quality_scaler_->QpFastFilterLow(); -} - void QualityScalerResource::OnEncodeCompleted(const EncodedImage& encoded_image, int64_t time_sent_in_us) { RTC_DCHECK_RUN_ON(encoder_queue()); diff --git a/video/adaptation/quality_scaler_resource.h b/video/adaptation/quality_scaler_resource.h index cbb6d3d06f..88aa40c936 100644 --- a/video/adaptation/quality_scaler_resource.h +++ b/video/adaptation/quality_scaler_resource.h @@ -12,10 +12,11 @@ #define VIDEO_ADAPTATION_QUALITY_SCALER_RESOURCE_H_ #include +#include #include #include -#include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/scoped_refptr.h" #include "api/video/video_adaptation_reason.h" #include "api/video_codecs/video_encoder.h" @@ -30,17 +31,17 @@ namespace webrtc { class QualityScalerResource : public VideoStreamEncoderResource, public QualityScalerQpUsageHandlerInterface { public: - static rtc::scoped_refptr Create(); + static scoped_refptr Create(); QualityScalerResource(); ~QualityScalerResource() override; bool is_started() const; - void StartCheckForOveruse(VideoEncoder::QpThresholds qp_thresholds); + void StartCheckForOveruse(VideoEncoder::QpThresholds qp_thresholds, + const FieldTrialsView& field_trials); void StopCheckForOveruse(); void SetQpThresholds(VideoEncoder::QpThresholds qp_thresholds); - bool QpFastFilterLow(); void OnEncodeCompleted(const EncodedImage& encoded_image, int64_t time_sent_in_us); void OnFrameDropped(EncodedImageCallback::DropReason reason); diff --git a/video/adaptation/quality_scaler_resource_unittest.cc b/video/adaptation/quality_scaler_resource_unittest.cc index 70d297588f..b83db37545 100644 --- a/video/adaptation/quality_scaler_resource_unittest.cc +++ b/video/adaptation/quality_scaler_resource_unittest.cc @@ -11,8 +11,8 @@ #include "video/adaptation/quality_scaler_resource.h" #include +#include -#include "absl/types/optional.h" #include "api/task_queue/task_queue_base.h" #include "api/video_codecs/video_encoder.h" #include "call/adaptation/test/mock_resource_listener.h" @@ -53,10 +53,10 @@ class QualityScalerResourceTest : public ::testing::Test { } protected: - rtc::AutoThread main_thread_; + AutoThread main_thread_; StrictMock fake_resource_listener_; FakeDegradationPreferenceProvider degradation_preference_provider_; - rtc::scoped_refptr quality_scaler_resource_; + scoped_refptr quality_scaler_resource_; }; TEST_F(QualityScalerResourceTest, ReportQpHigh) { diff --git a/video/adaptation/video_stream_encoder_resource.cc b/video/adaptation/video_stream_encoder_resource.cc index ad89aef52a..7793315f93 100644 --- a/video/adaptation/video_stream_encoder_resource.cc +++ b/video/adaptation/video_stream_encoder_resource.cc @@ -51,7 +51,7 @@ void VideoStreamEncoderResource::OnResourceUsageStateMeasured( ResourceUsageState usage_state) { MutexLock crit(&lock_); if (listener_) { - listener_->OnResourceUsageStateMeasured(rtc::scoped_refptr(this), + listener_->OnResourceUsageStateMeasured(scoped_refptr(this), usage_state); } } diff --git a/video/adaptation/video_stream_encoder_resource.h b/video/adaptation/video_stream_encoder_resource.h index e10f595757..5e93b31ec3 100644 --- a/video/adaptation/video_stream_encoder_resource.h +++ b/video/adaptation/video_stream_encoder_resource.h @@ -11,10 +11,10 @@ #ifndef VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_H_ #define VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_H_ +#include #include #include -#include "absl/types/optional.h" #include "api/adaptation/resource.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" diff --git a/video/adaptation/video_stream_encoder_resource_manager.cc b/video/adaptation/video_stream_encoder_resource_manager.cc index 46db686703..ae575de690 100644 --- a/video/adaptation/video_stream_encoder_resource_manager.cc +++ b/video/adaptation/video_stream_encoder_resource_manager.cc @@ -21,6 +21,7 @@ #include "absl/algorithm/container.h" #include "absl/base/macros.h" #include "api/adaptation/resource.h" +#include "api/field_trials_view.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/video/video_adaptation_reason.h" @@ -87,38 +88,15 @@ bool EqualFlags(const std::vector& a, const std::vector& b) { return std::equal(a.begin(), a.end(), b.begin()); } -absl::optional GetSingleActiveLayerMaxBitrate( - const VideoCodec& codec) { - int num_active = 0; - absl::optional max_bitrate; - if (codec.codecType == VideoCodecType::kVideoCodecVP9) { - for (int i = 0; i < codec.VP9().numberOfSpatialLayers; ++i) { - if (codec.spatialLayers[i].active) { - ++num_active; - max_bitrate = - DataRate::KilobitsPerSec(codec.spatialLayers[i].maxBitrate); - } - } - } else { - for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) { - if (codec.simulcastStream[i].active) { - ++num_active; - max_bitrate = - DataRate::KilobitsPerSec(codec.simulcastStream[i].maxBitrate); - } - } - } - return (num_active > 1) ? absl::nullopt : max_bitrate; -} - } // namespace class VideoStreamEncoderResourceManager::InitialFrameDropper { public: explicit InitialFrameDropper( - rtc::scoped_refptr quality_scaler_resource) + scoped_refptr quality_scaler_resource, + const FieldTrialsView& field_trials) : quality_scaler_resource_(quality_scaler_resource), - quality_scaler_settings_(QualityScalerSettings::ParseFromFieldTrials()), + quality_scaler_settings_(field_trials), has_seen_first_bwe_drop_(false), set_start_bitrate_(DataRate::Zero()), set_start_bitrate_time_ms_(0), @@ -136,15 +114,15 @@ class VideoStreamEncoderResourceManager::InitialFrameDropper { return initial_framedrop_ < kMaxInitialFramedrop; } - absl::optional single_active_stream_pixels() const { + std::optional single_active_stream_pixels() const { return single_active_stream_pixels_; } - absl::optional UseBandwidthAllocationBps() const { + std::optional UseBandwidthAllocationBps() const { return (use_bandwidth_allocation_ && bandwidth_allocation_ > DataRate::Zero()) - ? absl::optional(bandwidth_allocation_.bps()) - : absl::nullopt; + ? std::optional(bandwidth_allocation_.bps()) + : std::nullopt; } bool last_stream_configuration_changed() const { @@ -241,14 +219,14 @@ class VideoStreamEncoderResourceManager::InitialFrameDropper { // achieve desired bitrate. static const int kMaxInitialFramedrop = 4; - const rtc::scoped_refptr quality_scaler_resource_; + const scoped_refptr quality_scaler_resource_; const QualityScalerSettings quality_scaler_settings_; bool has_seen_first_bwe_drop_; DataRate set_start_bitrate_; int64_t set_start_bitrate_time_ms_; // Counts how many frames we've dropped in the initial framedrop phase. int initial_framedrop_; - absl::optional single_active_stream_pixels_; + std::optional single_active_stream_pixels_; bool use_bandwidth_allocation_; DataRate bandwidth_allocation_; @@ -289,14 +267,14 @@ VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager( clock_(clock), experiment_cpu_load_estimator_(experiment_cpu_load_estimator), initial_frame_dropper_( - std::make_unique(quality_scaler_resource_)), - quality_scaling_experiment_enabled_(QualityScalingExperiment::Enabled()), + std::make_unique(quality_scaler_resource_, + field_trials)), + quality_scaling_experiment_enabled_( + QualityScalingExperiment::Enabled(field_trials_)), pixel_limit_resource_experiment_enabled_( field_trials.IsEnabled(kPixelLimitResourceFieldTrialName)), - encoder_target_bitrate_bps_(absl::nullopt), - quality_rampup_experiment_( - QualityRampUpExperimentHelper::CreateIfEnabled(this, clock_)), - encoder_settings_(absl::nullopt) { + encoder_target_bitrate_bps_(std::nullopt), + encoder_settings_(std::nullopt) { TRACE_EVENT0( "webrtc", "VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager"); @@ -400,7 +378,7 @@ void VideoStreamEncoderResourceManager::StopManagedResources() { } void VideoStreamEncoderResourceManager::AddResource( - rtc::scoped_refptr resource, + scoped_refptr resource, VideoAdaptationReason reason) { RTC_DCHECK_RUN_ON(encoder_queue_); RTC_DCHECK(resource); @@ -412,7 +390,7 @@ void VideoStreamEncoderResourceManager::AddResource( } void VideoStreamEncoderResourceManager::RemoveResource( - rtc::scoped_refptr resource) { + scoped_refptr resource) { { RTC_DCHECK_RUN_ON(encoder_queue_); RTC_DCHECK(resource); @@ -438,12 +416,6 @@ void VideoStreamEncoderResourceManager::SetEncoderSettings( initial_frame_dropper_->OnEncoderSettingsUpdated( encoder_settings_->video_codec(), current_adaptation_counters_); MaybeUpdateTargetFrameRate(); - if (quality_rampup_experiment_) { - quality_rampup_experiment_->ConfigureQualityRampupExperiment( - initial_frame_dropper_->last_stream_configuration_changed(), - initial_frame_dropper_->single_active_stream_pixels(), - GetSingleActiveLayerMaxBitrate(encoder_settings_->video_codec())); - } } void VideoStreamEncoderResourceManager::SetStartBitrate( @@ -503,13 +475,13 @@ void VideoStreamEncoderResourceManager::OnEncodeStarted( void VideoStreamEncoderResourceManager::OnEncodeCompleted( const EncodedImage& encoded_image, int64_t time_sent_in_us, - absl::optional encode_duration_us, + std::optional encode_duration_us, DataSize frame_size) { RTC_DCHECK_RUN_ON(encoder_queue_); // Inform `encode_usage_resource_` of the encode completed event. - uint32_t timestamp = encoded_image.Timestamp(); + uint32_t timestamp = encoded_image.RtpTimestamp(); int64_t capture_time_us = - encoded_image.capture_time_ms_ * rtc::kNumMicrosecsPerMillisec; + encoded_image.capture_time_ms_ * kNumMicrosecsPerMillisec; encode_usage_resource_->OnEncodeCompleted( timestamp, time_sent_in_us, capture_time_us, encode_duration_us); quality_scaler_resource_->OnEncodeCompleted(encoded_image, time_sent_in_us); @@ -528,13 +500,13 @@ bool VideoStreamEncoderResourceManager::DropInitialFrames() const { return initial_frame_dropper_->DropInitialFrames(); } -absl::optional +std::optional VideoStreamEncoderResourceManager::SingleActiveStreamPixels() const { RTC_DCHECK_RUN_ON(encoder_queue_); return initial_frame_dropper_->single_active_stream_pixels(); } -absl::optional +std::optional VideoStreamEncoderResourceManager::UseBandwidthAllocationBps() const { RTC_DCHECK_RUN_ON(encoder_queue_); return initial_frame_dropper_->UseBandwidthAllocationBps(); @@ -543,25 +515,17 @@ VideoStreamEncoderResourceManager::UseBandwidthAllocationBps() const { void VideoStreamEncoderResourceManager::OnMaybeEncodeFrame() { RTC_DCHECK_RUN_ON(encoder_queue_); initial_frame_dropper_->Disable(); - if (quality_rampup_experiment_ && quality_scaler_resource_->is_started()) { - DataRate bandwidth = encoder_rates_.has_value() - ? encoder_rates_->bandwidth_allocation - : DataRate::Zero(); - quality_rampup_experiment_->PerformQualityRampupExperiment( - quality_scaler_resource_, bandwidth, - DataRate::BitsPerSec(encoder_target_bitrate_bps_.value_or(0)), - GetSingleActiveLayerMaxBitrate(encoder_settings_->video_codec())); - } } void VideoStreamEncoderResourceManager::UpdateQualityScalerSettings( - absl::optional qp_thresholds) { + std::optional qp_thresholds) { RTC_DCHECK_RUN_ON(encoder_queue_); if (qp_thresholds.has_value()) { if (quality_scaler_resource_->is_started()) { quality_scaler_resource_->SetQpThresholds(qp_thresholds.value()); } else { - quality_scaler_resource_->StartCheckForOveruse(qp_thresholds.value()); + quality_scaler_resource_->StartCheckForOveruse(qp_thresholds.value(), + field_trials_); AddResource(quality_scaler_resource_, VideoAdaptationReason::kQuality); } } else if (quality_scaler_resource_->is_started()) { @@ -574,7 +538,8 @@ void VideoStreamEncoderResourceManager::UpdateQualityScalerSettings( void VideoStreamEncoderResourceManager::UpdateBandwidthQualityScalerSettings( bool bandwidth_quality_scaling_allowed, const std::vector& - resolution_bitrate_limits) { + resolution_bitrate_limits, + VideoCodecType codec_type) { RTC_DCHECK_RUN_ON(encoder_queue_); if (!bandwidth_quality_scaling_allowed) { @@ -589,7 +554,7 @@ void VideoStreamEncoderResourceManager::UpdateBandwidthQualityScalerSettings( AddResource(bandwidth_quality_scaler_resource_, webrtc::VideoAdaptationReason::kQuality); bandwidth_quality_scaler_resource_->StartCheckForOveruse( - resolution_bitrate_limits); + resolution_bitrate_limits, codec_type); } } } @@ -612,23 +577,23 @@ void VideoStreamEncoderResourceManager::ConfigureQualityScaler( // Quality scaler has not already been configured. // Use experimental thresholds if available. - absl::optional experimental_thresholds; + std::optional experimental_thresholds; if (quality_scaling_experiment_enabled_) { experimental_thresholds = QualityScalingExperiment::GetQpThresholds( - GetVideoCodecTypeOrGeneric(encoder_settings_)); + GetVideoCodecTypeOrGeneric(encoder_settings_), field_trials_); } UpdateQualityScalerSettings(experimental_thresholds.has_value() ? experimental_thresholds : scaling_settings.thresholds); } } else { - UpdateQualityScalerSettings(absl::nullopt); + UpdateQualityScalerSettings(std::nullopt); } // Set the qp-thresholds to the balanced settings if balanced mode. if (degradation_preference_ == DegradationPreference::BALANCED && quality_scaler_resource_->is_started()) { - absl::optional thresholds = + std::optional thresholds = balanced_settings_.GetQpThresholds( GetVideoCodecTypeOrGeneric(encoder_settings_), LastFrameSizeOrDefault()); @@ -648,13 +613,14 @@ void VideoStreamEncoderResourceManager::ConfigureBandwidthQualityScaler( encoder_settings_->encoder_config().is_quality_scaling_allowed) && !encoder_info.is_qp_trusted.value_or(true); - UpdateBandwidthQualityScalerSettings(bandwidth_quality_scaling_allowed, - encoder_info.resolution_bitrate_limits); + UpdateBandwidthQualityScalerSettings( + bandwidth_quality_scaling_allowed, encoder_info.resolution_bitrate_limits, + GetVideoCodecTypeOrGeneric(encoder_settings_)); UpdateStatsAdaptationSettings(); } VideoAdaptationReason VideoStreamEncoderResourceManager::GetReasonFromResource( - rtc::scoped_refptr resource) const { + scoped_refptr resource) const { RTC_DCHECK_RUN_ON(encoder_queue_); const auto& registered_resource = resources_.find(resource); RTC_DCHECK(registered_resource != resources_.end()) @@ -680,7 +646,7 @@ CpuOveruseOptions VideoStreamEncoderResourceManager::GetCpuOveruseOptions() options.high_encode_usage_threshold_percent = 200; } if (experiment_cpu_load_estimator_) { - options.filter_time_ms = 5 * rtc::kNumMillisecsPerSec; + options.filter_time_ms = 5 * kNumMillisecsPerSec; } return options; } @@ -697,7 +663,7 @@ int VideoStreamEncoderResourceManager::LastFrameSizeOrDefault() const { void VideoStreamEncoderResourceManager::OnVideoSourceRestrictionsUpdated( VideoSourceRestrictions restrictions, const VideoAdaptationCounters& adaptation_counters, - rtc::scoped_refptr reason, + scoped_refptr reason, const VideoSourceRestrictions& unfiltered_restrictions) { RTC_DCHECK_RUN_ON(encoder_queue_); current_adaptation_counters_ = adaptation_counters; @@ -714,8 +680,8 @@ void VideoStreamEncoderResourceManager::OnVideoSourceRestrictionsUpdated( } void VideoStreamEncoderResourceManager::OnResourceLimitationChanged( - rtc::scoped_refptr resource, - const std::map, VideoAdaptationCounters>& + scoped_refptr resource, + const std::map, VideoAdaptationCounters>& resource_limitations) { RTC_DCHECK_RUN_ON(encoder_queue_); if (!resource) { @@ -739,30 +705,20 @@ void VideoStreamEncoderResourceManager::OnResourceLimitationChanged( adaptation_reason, limitations[VideoAdaptationReason::kCpu], limitations[VideoAdaptationReason::kQuality]); - if (quality_rampup_experiment_) { - bool cpu_limited = limitations.at(VideoAdaptationReason::kCpu).Total() > 0; - auto qp_resolution_adaptations = - limitations.at(VideoAdaptationReason::kQuality).resolution_adaptations; - quality_rampup_experiment_->cpu_adapted(cpu_limited); - quality_rampup_experiment_->qp_resolution_adaptations( - qp_resolution_adaptations); - } - RTC_LOG(LS_INFO) << ActiveCountsToString(limitations); } void VideoStreamEncoderResourceManager::MaybeUpdateTargetFrameRate() { RTC_DCHECK_RUN_ON(encoder_queue_); - absl::optional codec_max_frame_rate = + std::optional codec_max_frame_rate = encoder_settings_.has_value() - ? absl::optional( - encoder_settings_->video_codec().maxFramerate) - : absl::nullopt; + ? std::optional(encoder_settings_->video_codec().maxFramerate) + : std::nullopt; // The current target framerate is the maximum frame rate as specified by // the current codec configuration or any limit imposed by the adaptation // module. This is used to make sure overuse detection doesn't needlessly // trigger in low and/or variable framerate scenarios. - absl::optional target_frame_rate = + std::optional target_frame_rate = video_source_restrictions_.max_frame_rate(); if (!target_frame_rate.has_value() || (codec_max_frame_rate.has_value() && @@ -791,7 +747,7 @@ void VideoStreamEncoderResourceManager::UpdateStatsAdaptationSettings() const { std::string VideoStreamEncoderResourceManager::ActiveCountsToString( const std::map& active_counts) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Downgrade counts: fps: {"; for (auto& reason_count : active_counts) { @@ -808,12 +764,6 @@ std::string VideoStreamEncoderResourceManager::ActiveCountsToString( return ss.Release(); } -void VideoStreamEncoderResourceManager::OnQualityRampUp() { - RTC_DCHECK_RUN_ON(encoder_queue_); - stream_adapter_->ClearRestrictions(); - quality_rampup_experiment_.reset(); -} - bool VideoStreamEncoderResourceManager::IsSimulcastOrMultipleSpatialLayers( const VideoEncoderConfig& encoder_config, const VideoCodec& video_codec) { @@ -823,7 +773,7 @@ bool VideoStreamEncoderResourceManager::IsSimulcastOrMultipleSpatialLayers( return false; } - absl::optional num_spatial_layers; + std::optional num_spatial_layers; if (simulcast_layers[0].scalability_mode.has_value() && video_codec.numberOfSimulcastStreams == 1) { num_spatial_layers = ScalabilityModeToNumSpatialLayers( diff --git a/video/adaptation/video_stream_encoder_resource_manager.h b/video/adaptation/video_stream_encoder_resource_manager.h index 8925157bcf..1520bd5aef 100644 --- a/video/adaptation/video_stream_encoder_resource_manager.h +++ b/video/adaptation/video_stream_encoder_resource_manager.h @@ -14,12 +14,12 @@ #include #include #include +#include #include #include #include #include -#include "absl/types/optional.h" #include "api/adaptation/resource.h" #include "api/field_trials_view.h" #include "api/rtp_parameters.h" @@ -46,7 +46,6 @@ #include "video/adaptation/encode_usage_resource.h" #include "video/adaptation/overuse_frame_detector.h" #include "video/adaptation/pixel_limit_resource.h" -#include "video/adaptation/quality_rampup_experiment_helper.h" #include "video/adaptation/quality_scaler_resource.h" #include "video/adaptation/video_stream_encoder_resource.h" #include "video/config/video_encoder_config.h" @@ -70,8 +69,7 @@ extern const int kDefaultInputPixelsHeight; // ResourceAdaptationProcessor code such as the initial frame dropping. class VideoStreamEncoderResourceManager : public VideoSourceRestrictionsListener, - public ResourceLimitationsListener, - public QualityRampUpExperimentListener { + public ResourceLimitationsListener { public: VideoStreamEncoderResourceManager( VideoStreamInputStateProvider* input_state_provider, @@ -121,37 +119,34 @@ class VideoStreamEncoderResourceManager int64_t time_when_first_seen_us); void OnEncodeCompleted(const EncodedImage& encoded_image, int64_t time_sent_in_us, - absl::optional encode_duration_us, + std::optional encode_duration_us, DataSize frame_size); void OnFrameDropped(EncodedImageCallback::DropReason reason); // Resources need to be mapped to an AdaptReason (kCpu or kQuality) in order // to update legacy getStats(). - void AddResource(rtc::scoped_refptr resource, + void AddResource(scoped_refptr resource, VideoAdaptationReason reason); - void RemoveResource(rtc::scoped_refptr resource); + void RemoveResource(scoped_refptr resource); std::vector AdaptationConstraints() const; // If true, the VideoStreamEncoder should execute its logic to maybe drop // frames based on size and bitrate. bool DropInitialFrames() const; - absl::optional SingleActiveStreamPixels() const; - absl::optional UseBandwidthAllocationBps() const; + std::optional SingleActiveStreamPixels() const; + std::optional UseBandwidthAllocationBps() const; // VideoSourceRestrictionsListener implementation. // Updates `video_source_restrictions_`. void OnVideoSourceRestrictionsUpdated( VideoSourceRestrictions restrictions, const VideoAdaptationCounters& adaptation_counters, - rtc::scoped_refptr reason, + scoped_refptr reason, const VideoSourceRestrictions& unfiltered_restrictions) override; void OnResourceLimitationChanged( - rtc::scoped_refptr resource, - const std::map, VideoAdaptationCounters>& + scoped_refptr resource, + const std::map, VideoAdaptationCounters>& resource_limitations) override; - // QualityRampUpExperimentListener implementation. - void OnQualityRampUp() override; - static bool IsSimulcastOrMultipleSpatialLayers( const VideoEncoderConfig& encoder_config, const VideoCodec& video_codec); @@ -160,7 +155,7 @@ class VideoStreamEncoderResourceManager class InitialFrameDropper; VideoAdaptationReason GetReasonFromResource( - rtc::scoped_refptr resource) const; + scoped_refptr resource) const; CpuOveruseOptions GetCpuOveruseOptions() const; int LastFrameSizeOrDefault() const; @@ -171,12 +166,13 @@ class VideoStreamEncoderResourceManager // Use nullopt to disable quality scaling. void UpdateQualityScalerSettings( - absl::optional qp_thresholds); + std::optional qp_thresholds); void UpdateBandwidthQualityScalerSettings( bool bandwidth_quality_scaling_allowed, const std::vector& - resolution_bitrate_limits); + resolution_bitrate_limits, + VideoCodecType codec_type); void UpdateStatsAdaptationSettings() const; @@ -190,10 +186,10 @@ class VideoStreamEncoderResourceManager RTC_GUARDED_BY(encoder_queue_); const std::unique_ptr balanced_constraint_ RTC_GUARDED_BY(encoder_queue_); - const rtc::scoped_refptr encode_usage_resource_; - const rtc::scoped_refptr quality_scaler_resource_; - rtc::scoped_refptr pixel_limit_resource_; - const rtc::scoped_refptr + const scoped_refptr encode_usage_resource_; + const scoped_refptr quality_scaler_resource_; + scoped_refptr pixel_limit_resource_; + const scoped_refptr bandwidth_quality_scaler_resource_; TaskQueueBase* encoder_queue_; @@ -219,18 +215,16 @@ class VideoStreamEncoderResourceManager const bool quality_scaling_experiment_enabled_ RTC_GUARDED_BY(encoder_queue_); const bool pixel_limit_resource_experiment_enabled_ RTC_GUARDED_BY(encoder_queue_); - absl::optional encoder_target_bitrate_bps_ - RTC_GUARDED_BY(encoder_queue_); - absl::optional encoder_rates_ + std::optional encoder_target_bitrate_bps_ RTC_GUARDED_BY(encoder_queue_); - std::unique_ptr quality_rampup_experiment_ + std::optional encoder_rates_ RTC_GUARDED_BY(encoder_queue_); - absl::optional encoder_settings_ + std::optional encoder_settings_ RTC_GUARDED_BY(encoder_queue_); // Ties a resource to a reason for statistical reporting. This AdaptReason is // also used by this module to make decisions about how to adapt up/down. - std::map, VideoAdaptationReason> resources_ + std::map, VideoAdaptationReason> resources_ RTC_GUARDED_BY(encoder_queue_); }; diff --git a/video/alignment_adjuster.cc b/video/alignment_adjuster.cc index 1762bec4cf..de32992837 100644 --- a/video/alignment_adjuster.cc +++ b/video/alignment_adjuster.cc @@ -67,7 +67,7 @@ double RoundToMultiple(int alignment, int AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( const VideoEncoder::EncoderInfo& encoder_info, VideoEncoderConfig* config, - absl::optional max_layers) { + std::optional max_layers) { const int requested_alignment = encoder_info.requested_resolution_alignment; if (!encoder_info.apply_alignment_to_all_simulcast_layers) { return requested_alignment; diff --git a/video/alignment_adjuster.h b/video/alignment_adjuster.h index 36ac062e91..086f633f96 100644 --- a/video/alignment_adjuster.h +++ b/video/alignment_adjuster.h @@ -34,7 +34,7 @@ class AlignmentAdjuster { static int GetAlignmentAndMaybeAdjustScaleFactors( const VideoEncoder::EncoderInfo& info, VideoEncoderConfig* config, - absl::optional max_layers); + std::optional max_layers); }; } // namespace webrtc diff --git a/video/alignment_adjuster_unittest.cc b/video/alignment_adjuster_unittest.cc index 28e4bc0550..95a5e1346e 100644 --- a/video/alignment_adjuster_unittest.cc +++ b/video/alignment_adjuster_unittest.cc @@ -125,7 +125,7 @@ TEST_P(AlignmentAdjusterTest, AlignmentAppliedToAllLayers) { VideoEncoder::EncoderInfo info = GetEncoderInfo(kRequestedAlignment, kApplyAlignmentToAllLayers); int alignment = AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( - info, &config, absl::nullopt); + info, &config, std::nullopt); EXPECT_EQ(alignment, kAdjustedAlignment); // Verify adjusted scale factors. @@ -150,7 +150,7 @@ TEST_P(AlignmentAdjusterTest, AlignmentNotAppliedToAllLayers) { VideoEncoder::EncoderInfo info = GetEncoderInfo(kRequestedAlignment, kApplyAlignmentToAllLayers); int alignment = AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( - info, &config, absl::nullopt); + info, &config, std::nullopt); EXPECT_EQ(alignment, kRequestedAlignment); // Verify that scale factors are not adjusted. @@ -175,7 +175,7 @@ TEST_P(AlignmentAdjusterTestTwoLayers, AlignmentAppliedToAllLayers) { VideoEncoder::EncoderInfo info = GetEncoderInfo(kRequestedAlignment, kApplyAlignmentToAllLayers); int alignment = AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( - info, &config, absl::optional(kMaxLayers)); + info, &config, std::optional(kMaxLayers)); EXPECT_EQ(alignment, kAdjustedAlignment); // Verify adjusted scale factors. diff --git a/video/buffered_frame_decryptor.cc b/video/buffered_frame_decryptor.cc index 61e88122fd..29ec8aec22 100644 --- a/video/buffered_frame_decryptor.cc +++ b/video/buffered_frame_decryptor.cc @@ -10,13 +10,21 @@ #include "video/buffered_frame_decryptor.h" +#include +#include +#include #include #include +#include "api/array_view.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/field_trials_view.h" +#include "api/media_types.h" +#include "api/scoped_refptr.h" #include "modules/rtp_rtcp/source/frame_object.h" #include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" +#include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -32,7 +40,7 @@ BufferedFrameDecryptor::BufferedFrameDecryptor( BufferedFrameDecryptor::~BufferedFrameDecryptor() {} void BufferedFrameDecryptor::SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) { + scoped_refptr frame_decryptor) { frame_decryptor_ = std::move(frame_decryptor); } @@ -65,12 +73,12 @@ BufferedFrameDecryptor::FrameDecision BufferedFrameDecryptor::DecryptFrame( } // Retrieve the maximum possible size of the decrypted payload. const size_t max_plaintext_byte_size = - frame_decryptor_->GetMaxPlaintextByteSize(cricket::MEDIA_TYPE_VIDEO, + frame_decryptor_->GetMaxPlaintextByteSize(webrtc::MediaType::VIDEO, frame->size()); RTC_CHECK_LE(max_plaintext_byte_size, frame->size()); // Place the decrypted frame inline into the existing frame. - rtc::ArrayView inline_decrypted_bitstream(frame->mutable_data(), - max_plaintext_byte_size); + ArrayView inline_decrypted_bitstream(frame->mutable_data(), + max_plaintext_byte_size); // Enable authenticating the header if the field trial isn't disabled. std::vector additional_data; @@ -80,7 +88,7 @@ BufferedFrameDecryptor::FrameDecision BufferedFrameDecryptor::DecryptFrame( // Attempt to decrypt the video frame. const FrameDecryptorInterface::Result decrypt_result = - frame_decryptor_->Decrypt(cricket::MEDIA_TYPE_VIDEO, /*csrcs=*/{}, + frame_decryptor_->Decrypt(webrtc::MediaType::VIDEO, /*csrcs=*/{}, additional_data, *frame, inline_decrypted_bitstream); // Optionally call the callback if there was a change in status diff --git a/video/buffered_frame_decryptor.h b/video/buffered_frame_decryptor.h index 1865ea62c5..2c857f8c98 100644 --- a/video/buffered_frame_decryptor.h +++ b/video/buffered_frame_decryptor.h @@ -70,7 +70,7 @@ class BufferedFrameDecryptor final { // decryptor. This allows the decryptor to be switched out without resetting // the video stream. void SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor); + scoped_refptr frame_decryptor); // Determines whether the frame should be stashed, dropped or handed off to // the OnDecryptedFrameCallback. @@ -94,7 +94,7 @@ class BufferedFrameDecryptor final { bool first_frame_decrypted_ = false; FrameDecryptorInterface::Status last_status_ = FrameDecryptorInterface::Status::kUnknown; - rtc::scoped_refptr frame_decryptor_; + scoped_refptr frame_decryptor_; OnDecryptedFrameCallback* const decrypted_frame_callback_; OnDecryptionStatusChangeCallback* const decryption_status_change_callback_; std::deque> stashed_frames_; diff --git a/video/buffered_frame_decryptor_unittest.cc b/video/buffered_frame_decryptor_unittest.cc index 074777bf3a..28e93fe022 100644 --- a/video/buffered_frame_decryptor_unittest.cc +++ b/video/buffered_frame_decryptor_unittest.cc @@ -12,6 +12,7 @@ #include #include +#include #include #include "api/test/mock_frame_decryptor.h" @@ -74,7 +75,8 @@ class BufferedFrameDecryptorTest : public ::testing::Test, kVideoRotation_0, VideoContentType::UNSPECIFIED, rtp_video_header, - /*color_space=*/absl::nullopt, + /*color_space=*/std::nullopt, + /*frame_instrumentation_data=*/std::nullopt, RtpPacketInfos(), EncodedImageBuffer::Create(/*size=*/0)); // clang-format on @@ -86,7 +88,7 @@ class BufferedFrameDecryptorTest : public ::testing::Test, decrypted_frame_call_count_ = 0; decryption_status_change_count_ = 0; seq_num_ = 0; - mock_frame_decryptor_ = rtc::make_ref_counted(); + mock_frame_decryptor_ = make_ref_counted(); buffered_frame_decryptor_ = std::make_unique(this, this, field_trials_); buffered_frame_decryptor_->SetFrameDecryptor(mock_frame_decryptor_); @@ -96,7 +98,7 @@ class BufferedFrameDecryptorTest : public ::testing::Test, test::ScopedKeyValueConfig field_trials_; std::vector fake_packet_data_; - rtc::scoped_refptr mock_frame_decryptor_; + scoped_refptr mock_frame_decryptor_; std::unique_ptr buffered_frame_decryptor_; size_t decrypted_frame_call_count_; size_t decryption_status_change_count_ = 0; diff --git a/video/call_stats2_unittest.cc b/video/call_stats2_unittest.cc index 76abbcfebd..af19f3b883 100644 --- a/video/call_stats2_unittest.cc +++ b/video/call_stats2_unittest.cc @@ -59,9 +59,9 @@ class CallStats2Test : public ::testing::Test { "CallStats", TaskQueueFactory::Priority::NORMAL); - // Note: Since rtc::Thread doesn't support injecting a Clock, we're going + // Note: Since webrtc::Thread doesn't support injecting a Clock, we're going // to be using a mix of the fake clock (used by CallStats) as well as the - // system clock (used by rtc::Thread). This isn't ideal and will result in + // system clock (used by webrtc::Thread). This isn't ideal and will result in // the tests taking longer to execute in some cases than they need to. SimulatedClock fake_clock_{12345}; CallStats call_stats_{&fake_clock_, loop_.task_queue()}; diff --git a/video/config/BUILD.gn b/video/config/BUILD.gn index 96e254e76b..5dcc6316f4 100644 --- a/video/config/BUILD.gn +++ b/video/config/BUILD.gn @@ -18,14 +18,16 @@ rtc_library("streams_config") { deps = [ ":encoder_config", + "../../api:array_view", "../../api:field_trials_view", - "../../api/transport:field_trial_based_config", "../../api/units:data_rate", + "../../api/video:resolution", "../../api/video:video_codec_constants", + "../../api/video:video_frame", "../../api/video_codecs:video_codecs_api", "../../call/adaptation:resource_adaptation", "../../media:media_constants", - "../../media:rtc_media_base", + "../../media:video_adapter", "../../modules/video_coding:video_coding_utility", "../../modules/video_coding:webrtc_vp9_helpers", "../../rtc_base:checks", @@ -34,12 +36,8 @@ rtc_library("streams_config") { "../../rtc_base/experiments:min_video_bitrate_experiment", "../../rtc_base/experiments:normalize_simulcast_size_experiment", "../../rtc_base/experiments:rate_control_settings", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", ] } @@ -50,6 +48,7 @@ rtc_library("encoder_config") { ] deps = [ + "../../api:field_trials_view", "../../api:scoped_refptr", "../../api/video:resolution", "../../api/video_codecs:scalability_mode", @@ -58,12 +57,6 @@ rtc_library("encoder_config") { "../../rtc_base:refcount", "../../rtc_base:stringutils", ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - ] } if (rtc_include_tests) { @@ -76,21 +69,22 @@ if (rtc_include_tests) { "simulcast_unittest.cc", ] deps = [ + ":encoder_config", ":streams_config", - "../../api/transport:field_trial_based_config", + "../../api:field_trials_view", + "../../api:make_ref_counted", + "../../api/units:data_rate", + "../../api/video:resolution", + "../../api/video:video_frame", + "../../api/video_codecs:scalability_mode", + "../../api/video_codecs:video_codecs_api", "../../call/adaptation:resource_adaptation", "../../media:media_constants", - "../../test:field_trial", + "../../rtc_base:safe_conversions", + "../../rtc_base/experiments:min_video_bitrate_experiment", + "../../test:explicit_key_value_config", "../../test:test_support", - ] - absl_deps = [ - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/functional:any_invocable", - "//third_party/abseil-cpp/absl/functional:bind_front", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", - "//third_party/abseil-cpp/absl/types:variant", + "//third_party/abseil-cpp/absl/strings:string_view", ] if (!build_with_mozilla) { deps += [ "../../media:rtc_media_base" ] diff --git a/video/config/encoder_stream_factory.cc b/video/config/encoder_stream_factory.cc index a4e41ad62d..558427478a 100644 --- a/video/config/encoder_stream_factory.cc +++ b/video/config/encoder_stream_factory.cc @@ -17,18 +17,22 @@ #include "absl/algorithm/container.h" #include "absl/strings/match.h" +#include "api/field_trials_view.h" #include "api/video/video_codec_constants.h" #include "media/base/media_constants.h" #include "media/base/video_adapter.h" #include "modules/video_coding/codecs/vp9/svc_config.h" +#include "modules/video_coding/utility/simulcast_utility.h" #include "rtc_base/experiments/min_video_bitrate_experiment.h" #include "rtc_base/experiments/normalize_simulcast_size_experiment.h" #include "rtc_base/logging.h" #include "video/config/simulcast.h" -namespace cricket { +namespace webrtc { namespace { +using ::webrtc::FieldTrialsView; + const int kMinLayerSize = 16; int ScaleDownResolution(int resolution, @@ -47,7 +51,7 @@ bool PowerOfTwo(int value) { return (value > 0) && ((value & (value - 1)) == 0); } -bool IsScaleFactorsPowerOfTwo(const webrtc::VideoEncoderConfig& config) { +bool IsScaleFactorsPowerOfTwo(const VideoEncoderConfig& config) { for (const auto& layer : config.simulcast_layers) { double scale = std::max(layer.scale_resolution_down_by, 1.0); if (std::round(scale) != scale || !PowerOfTwo(scale)) { @@ -57,14 +61,14 @@ bool IsScaleFactorsPowerOfTwo(const webrtc::VideoEncoderConfig& config) { return true; } -bool IsTemporalLayersSupported(const std::string& codec_name) { - return absl::EqualsIgnoreCase(codec_name, kVp8CodecName) || - absl::EqualsIgnoreCase(codec_name, kVp9CodecName) || - absl::EqualsIgnoreCase(codec_name, kAv1CodecName); +bool IsTemporalLayersSupported(VideoCodecType codec_type) { + return codec_type == webrtc::VideoCodecType::kVideoCodecVP8 || + codec_type == webrtc::VideoCodecType::kVideoCodecVP9 || + codec_type == webrtc::VideoCodecType::kVideoCodecAV1 || + codec_type == webrtc::VideoCodecType::kVideoCodecH265; } -size_t FindRequiredActiveLayers( - const webrtc::VideoEncoderConfig& encoder_config) { +size_t FindRequiredActiveLayers(const VideoEncoderConfig& encoder_config) { // Need enough layers so that at least the first active one is present. for (size_t i = 0; i < encoder_config.number_of_streams; ++i) { if (encoder_config.simulcast_layers[i].active) { @@ -94,49 +98,173 @@ static int GetMaxDefaultVideoBitrateKbps(int width, return max_bitrate; } -} // namespace +int GetDefaultMaxQp(VideoCodecType codec_type) { + switch (codec_type) { + case webrtc::kVideoCodecH264: + case webrtc::kVideoCodecH265: + return kDefaultVideoMaxQpH26x; + case webrtc::kVideoCodecVP8: + case webrtc::kVideoCodecVP9: + case webrtc::kVideoCodecGeneric: + return kDefaultVideoMaxQpVpx; + case webrtc::kVideoCodecAV1: + return kDefaultVideoMaxQpAv1; + } +} + +// Round size to nearest simulcast-friendly size. +// Simulcast stream width and height must both be dividable by +// |2 ^ (simulcast_layers - 1)|. +int NormalizeSimulcastSize(const FieldTrialsView& field_trials, + int size, + size_t simulcast_layers) { + int base2_exponent = static_cast(simulcast_layers) - 1; + const std::optional experimental_base2_exponent = + webrtc::NormalizeSimulcastSizeExperiment::GetBase2Exponent(field_trials); + if (experimental_base2_exponent && + (size > (1 << *experimental_base2_exponent))) { + base2_exponent = *experimental_base2_exponent; + } + return ((size >> base2_exponent) << base2_exponent); +} + +// Override bitrate limits and other stream settings with values from +// `encoder_config.simulcast_layers` which come from `RtpEncodingParameters`. +void OverrideStreamSettings( + const VideoEncoderConfig& encoder_config, + const std::optional& experimental_min_bitrate, + std::vector& layers) { + RTC_DCHECK_LE(layers.size(), encoder_config.simulcast_layers.size()); + + // Allow an experiment to override the minimum bitrate for the lowest + // spatial layer. The experiment's configuration has the lowest priority. + layers[0].min_bitrate_bps = experimental_min_bitrate + .value_or(webrtc::DataRate::BitsPerSec( + webrtc::kDefaultMinVideoBitrateBps)) + .bps(); + + const bool temporal_layers_supported = + IsTemporalLayersSupported(encoder_config.codec_type); + + for (size_t i = 0; i < layers.size(); ++i) { + const VideoStream& overrides = encoder_config.simulcast_layers[i]; + VideoStream& layer = layers[i]; + layer.active = overrides.active; + layer.scalability_mode = overrides.scalability_mode; + layer.scale_resolution_down_to = overrides.scale_resolution_down_to; + // Update with configured num temporal layers if supported by codec. + if (overrides.num_temporal_layers > 0 && temporal_layers_supported) { + layer.num_temporal_layers = *overrides.num_temporal_layers; + } + if (overrides.max_framerate > 0) { + layer.max_framerate = overrides.max_framerate; + } + // Update simulcast bitrates with configured min and max bitrate. + if (overrides.min_bitrate_bps > 0) { + layer.min_bitrate_bps = overrides.min_bitrate_bps; + } + if (overrides.max_bitrate_bps > 0) { + layer.max_bitrate_bps = overrides.max_bitrate_bps; + } + if (overrides.target_bitrate_bps > 0) { + layer.target_bitrate_bps = overrides.target_bitrate_bps; + } + if (overrides.min_bitrate_bps > 0 && overrides.max_bitrate_bps > 0) { + // Min and max bitrate are configured. + // Set target to 3/4 of the max bitrate (or to max if below min). + if (overrides.target_bitrate_bps <= 0) + layer.target_bitrate_bps = layer.max_bitrate_bps * 3 / 4; + if (layer.target_bitrate_bps < layer.min_bitrate_bps) + layer.target_bitrate_bps = layer.max_bitrate_bps; + } else if (overrides.min_bitrate_bps > 0) { + // Only min bitrate is configured, make sure target/max are above min. + layer.target_bitrate_bps = + std::max(layer.target_bitrate_bps, layer.min_bitrate_bps); + layer.max_bitrate_bps = + std::max(layer.max_bitrate_bps, layer.min_bitrate_bps); + } else if (overrides.max_bitrate_bps > 0) { + // Only max bitrate is configured, make sure min/target are below max. + // Keep target bitrate if it is set explicitly in encoding config. + // Otherwise set target bitrate to 3/4 of the max bitrate + // or the one calculated from GetSimulcastConfig() which is larger. + layer.min_bitrate_bps = + std::min(layer.min_bitrate_bps, layer.max_bitrate_bps); + if (overrides.target_bitrate_bps <= 0) { + layer.target_bitrate_bps = + std::max(layer.target_bitrate_bps, layer.max_bitrate_bps * 3 / 4); + } + layer.target_bitrate_bps = + std::clamp(layer.target_bitrate_bps, layer.min_bitrate_bps, + layer.max_bitrate_bps); + } + + if (overrides.max_qp > 0) { + layer.max_qp = overrides.max_qp; + } else if (encoder_config.max_qp > 0) { + layer.max_qp = encoder_config.max_qp; + } else { + layer.max_qp = GetDefaultMaxQp(encoder_config.codec_type); + } + } + + bool is_highest_layer_max_bitrate_configured = + encoder_config.simulcast_layers[layers.size() - 1].max_bitrate_bps > 0; + bool is_screencast = encoder_config.content_type == + webrtc::VideoEncoderConfig::ContentType::kScreen; + if (!is_screencast && !is_highest_layer_max_bitrate_configured && + encoder_config.max_bitrate_bps > 0) { + // No application-configured maximum for the largest layer. + // If there is bitrate leftover, give it to the largest layer. + webrtc::BoostMaxSimulcastLayer( + webrtc::DataRate::BitsPerSec(encoder_config.max_bitrate_bps), &layers); + } + + // Sort the layers by max_bitrate_bps, they might not always be from + // smallest to biggest + std::vector index(layers.size()); + std::iota(index.begin(), index.end(), 0); + absl::c_stable_sort(index, [&layers](size_t a, size_t b) { + return layers[a].max_bitrate_bps < layers[b].max_bitrate_bps; + }); + + if (!layers[index[0]].active) { + // Adjust min bitrate of the first active layer to allow it to go as low as + // the lowest (now inactive) layer could. + // Otherwise, if e.g. a single HD stream is active, it would have 600kbps + // min bitrate, which would always be allocated to the stream. + // This would lead to congested network, dropped frames and overall bad + // experience. -// TODO(bugs.webrtc.org/8785): Consider removing max_qp as member of -// EncoderStreamFactory and instead set this value individually for each stream -// in the VideoEncoderConfig.simulcast_layers. -EncoderStreamFactory::EncoderStreamFactory(std::string codec_name, - int max_qp, - bool is_screenshare, - bool conference_mode) - : codec_name_(codec_name), - max_qp_(max_qp), - is_screenshare_(is_screenshare), - conference_mode_(conference_mode), - trials_(fallback_trials_), - encoder_info_requested_resolution_alignment_(1) {} + const int min_configured_bitrate = layers[index[0]].min_bitrate_bps; + for (size_t i = 0; i < layers.size(); ++i) { + if (layers[index[i]].active) { + layers[index[i]].min_bitrate_bps = min_configured_bitrate; + break; + } + } + } +} + +} // namespace EncoderStreamFactory::EncoderStreamFactory( - std::string codec_name, - int max_qp, - bool is_screenshare, - bool conference_mode, const webrtc::VideoEncoder::EncoderInfo& encoder_info, - absl::optional restrictions, - const webrtc::FieldTrialsView* trials) - : codec_name_(codec_name), - max_qp_(max_qp), - is_screenshare_(is_screenshare), - conference_mode_(conference_mode), - trials_(trials ? *trials : fallback_trials_), - encoder_info_requested_resolution_alignment_( + std::optional restrictions) + : encoder_info_requested_resolution_alignment_( encoder_info.requested_resolution_alignment), restrictions_(restrictions) {} -std::vector EncoderStreamFactory::CreateEncoderStreams( +std::vector EncoderStreamFactory::CreateEncoderStreams( + const FieldTrialsView& trials, int frame_width, int frame_height, - const webrtc::VideoEncoderConfig& encoder_config) { + const VideoEncoderConfig& encoder_config) { RTC_DCHECK_GT(encoder_config.number_of_streams, 0); RTC_DCHECK_GE(encoder_config.simulcast_layers.size(), encoder_config.number_of_streams); - const absl::optional experimental_min_bitrate = - GetExperimentalMinVideoBitrate(encoder_config.codec_type); + const std::optional experimental_min_bitrate = + GetExperimentalMinVideoBitrate(trials, encoder_config.codec_type); bool is_simulcast = (encoder_config.number_of_streams > 1); // If scalability mode was specified, don't treat {active,inactive,inactive} @@ -155,24 +283,32 @@ std::vector EncoderStreamFactory::CreateEncoderStreams( } } - if (is_simulcast || ((absl::EqualsIgnoreCase(codec_name_, kVp8CodecName) || - absl::EqualsIgnoreCase(codec_name_, kH264CodecName)) && - is_screenshare_ && conference_mode_)) { - return CreateSimulcastOrConferenceModeScreenshareStreams( + std::vector streams; + if (is_simulcast || + webrtc::SimulcastUtility::IsConferenceModeScreenshare(encoder_config)) { + streams = CreateSimulcastOrConferenceModeScreenshareStreams( + trials, frame_width, frame_height, encoder_config, + experimental_min_bitrate); + } else { + streams = CreateDefaultVideoStreams( frame_width, frame_height, encoder_config, experimental_min_bitrate); } - return CreateDefaultVideoStreams(frame_width, frame_height, encoder_config, - experimental_min_bitrate); + // The bitrate priority currently implemented on a per-sender level, so we + // just set it for the first simulcast layer. + RTC_DCHECK_GT(streams.size(), 0); + streams[0].bitrate_priority = encoder_config.bitrate_priority; + + return streams; } -std::vector -EncoderStreamFactory::CreateDefaultVideoStreams( +std::vector EncoderStreamFactory::CreateDefaultVideoStreams( int width, int height, - const webrtc::VideoEncoderConfig& encoder_config, - const absl::optional& experimental_min_bitrate) const { - std::vector layers; + const VideoEncoderConfig& encoder_config, + const std::optional& experimental_min_bitrate) const { + bool is_screencast = encoder_config.content_type == + webrtc::VideoEncoderConfig::ContentType::kScreen; // The max bitrate specified by the API. // - `encoder_config.simulcast_layers[0].max_bitrate_bps` comes from the first @@ -180,7 +316,7 @@ EncoderStreamFactory::CreateDefaultVideoStreams( // - `encoder_config.max_bitrate_bps` comes from SDP; "b=AS" or conditionally // "x-google-max-bitrate". // If `api_max_bitrate_bps` has a value then it is positive. - absl::optional api_max_bitrate_bps; + std::optional api_max_bitrate_bps; if (encoder_config.simulcast_layers[0].max_bitrate_bps > 0) { api_max_bitrate_bps = encoder_config.simulcast_layers[0].max_bitrate_bps; } @@ -195,12 +331,11 @@ EncoderStreamFactory::CreateDefaultVideoStreams( int max_bitrate_bps = api_max_bitrate_bps.has_value() ? *api_max_bitrate_bps - : GetMaxDefaultVideoBitrateKbps(width, height, is_screenshare_) * - 1000; + : GetMaxDefaultVideoBitrateKbps(width, height, is_screencast) * 1000; int min_bitrate_bps = experimental_min_bitrate - ? rtc::saturated_cast(experimental_min_bitrate->bps()) + ? webrtc::saturated_cast(experimental_min_bitrate->bps()) : webrtc::kDefaultMinVideoBitrateBps; if (encoder_config.simulcast_layers[0].min_bitrate_bps > 0) { // Use set min bitrate. @@ -213,22 +348,22 @@ EncoderStreamFactory::CreateDefaultVideoStreams( ? encoder_config.simulcast_layers[0].max_framerate : kDefaultVideoMaxFramerate; - webrtc::VideoStream layer; + VideoStream layer; layer.width = width; layer.height = height; layer.max_framerate = max_framerate; - layer.requested_resolution = - encoder_config.simulcast_layers[0].requested_resolution; + layer.scale_resolution_down_to = + encoder_config.simulcast_layers[0].scale_resolution_down_to; // Note: VP9 seems to have be sending if any layer is active, // (see `UpdateSendState`) and still use parameters only from // encoder_config.simulcast_layers[0]. layer.active = absl::c_any_of(encoder_config.simulcast_layers, [](const auto& layer) { return layer.active; }); - if (encoder_config.simulcast_layers[0].requested_resolution) { - auto res = GetLayerResolutionFromRequestedResolution( + if (encoder_config.simulcast_layers[0].scale_resolution_down_to) { + auto res = GetLayerResolutionFromScaleResolutionDownTo( width, height, - *encoder_config.simulcast_layers[0].requested_resolution); + *encoder_config.simulcast_layers[0].scale_resolution_down_to); layer.width = res.width; layer.height = res.height; } else if (encoder_config.simulcast_layers[0].scale_resolution_down_by > 1.) { @@ -242,11 +377,11 @@ EncoderStreamFactory::CreateDefaultVideoStreams( kMinLayerSize); } - if (absl::EqualsIgnoreCase(codec_name_, kVp9CodecName)) { + if (encoder_config.codec_type == webrtc::VideoCodecType::kVideoCodecVP9) { RTC_DCHECK(encoder_config.encoder_specific_settings); // Use VP9 SVC layering from codec settings which might be initialized // though field trial in ConfigureVideoEncoderSettings. - webrtc::VideoCodecVP9 vp9_settings; + VideoCodecVP9 vp9_settings; encoder_config.encoder_specific_settings->FillVideoCodecVp9(&vp9_settings); layer.num_temporal_layers = vp9_settings.numberOfTemporalLayers; @@ -264,7 +399,7 @@ EncoderStreamFactory::CreateDefaultVideoStreams( std::vector svc_layers = webrtc::GetSvcConfig(width, height, max_framerate, /*first_active_layer=*/0, num_spatial_layers, - *layer.num_temporal_layers, is_screenshare_); + *layer.num_temporal_layers, is_screencast); int sum_max_bitrates_kbps = 0; for (const webrtc::SpatialLayer& spatial_layer : svc_layers) { sum_max_bitrates_kbps += spatial_layer.maxBitrate; @@ -272,7 +407,8 @@ EncoderStreamFactory::CreateDefaultVideoStreams( RTC_DCHECK_GE(sum_max_bitrates_kbps, 0); if (!api_max_bitrate_bps.has_value()) { max_bitrate_bps = sum_max_bitrates_kbps * 1000; - } else { + } else if (encoder_config.simulcast_layers[0].max_bitrate_bps <= 0) { + // Encoding max bitrate is kept if configured. max_bitrate_bps = std::min(max_bitrate_bps, sum_max_bitrates_kbps * 1000); } @@ -290,10 +426,15 @@ EncoderStreamFactory::CreateDefaultVideoStreams( encoder_config.simulcast_layers[0].target_bitrate_bps, max_bitrate_bps); } layer.max_bitrate_bps = max_bitrate_bps; - layer.max_qp = max_qp_; layer.bitrate_priority = encoder_config.bitrate_priority; - if (IsTemporalLayersSupported(codec_name_)) { + if (encoder_config.max_qp > 0) { + layer.max_qp = encoder_config.max_qp; + } else { + layer.max_qp = GetDefaultMaxQp(encoder_config.codec_type); + } + + if (IsTemporalLayersSupported(encoder_config.codec_type)) { // Use configured number of temporal layers if set. if (encoder_config.simulcast_layers[0].num_temporal_layers) { layer.num_temporal_layers = @@ -301,185 +442,66 @@ EncoderStreamFactory::CreateDefaultVideoStreams( } } layer.scalability_mode = encoder_config.simulcast_layers[0].scalability_mode; - layers.push_back(layer); - return layers; + return {layer}; } -std::vector +std::vector EncoderStreamFactory::CreateSimulcastOrConferenceModeScreenshareStreams( + const FieldTrialsView& trials, int width, int height, - const webrtc::VideoEncoderConfig& encoder_config, - const absl::optional& experimental_min_bitrate) const { - std::vector layers; + const VideoEncoderConfig& encoder_config, + const std::optional& experimental_min_bitrate) const { + std::vector resolutions = + GetStreamResolutions(trials, width, height, encoder_config); - const bool temporal_layers_supported = IsTemporalLayersSupported(codec_name_); // Use legacy simulcast screenshare if conference mode is explicitly enabled // or use the regular simulcast configuration path which is generic. - layers = GetSimulcastConfig(FindRequiredActiveLayers(encoder_config), - encoder_config.number_of_streams, width, height, - encoder_config.bitrate_priority, max_qp_, - is_screenshare_ && conference_mode_, - temporal_layers_supported, trials_); - // Allow an experiment to override the minimum bitrate for the lowest - // spatial layer. The experiment's configuration has the lowest priority. - if (experimental_min_bitrate) { - layers[0].min_bitrate_bps = - rtc::saturated_cast(experimental_min_bitrate->bps()); - } - // Update the active simulcast layers and configured bitrates. - bool is_highest_layer_max_bitrate_configured = false; - const bool has_scale_resolution_down_by = absl::c_any_of( - encoder_config.simulcast_layers, [](const webrtc::VideoStream& layer) { - return layer.scale_resolution_down_by != -1.; - }); - - bool default_scale_factors_used = true; - if (has_scale_resolution_down_by) { - default_scale_factors_used = IsScaleFactorsPowerOfTwo(encoder_config); - } - const bool norm_size_configured = - webrtc::NormalizeSimulcastSizeExperiment::GetBase2Exponent().has_value(); - const int normalized_width = - (default_scale_factors_used || norm_size_configured) && - (width >= kMinLayerSize) - ? NormalizeSimulcastSize(width, encoder_config.number_of_streams) - : width; - const int normalized_height = - (default_scale_factors_used || norm_size_configured) && - (height >= kMinLayerSize) - ? NormalizeSimulcastSize(height, encoder_config.number_of_streams) - : height; - for (size_t i = 0; i < layers.size(); ++i) { - layers[i].active = encoder_config.simulcast_layers[i].active; - layers[i].scalability_mode = - encoder_config.simulcast_layers[i].scalability_mode; - layers[i].requested_resolution = - encoder_config.simulcast_layers[i].requested_resolution; - // Update with configured num temporal layers if supported by codec. - if (encoder_config.simulcast_layers[i].num_temporal_layers && - IsTemporalLayersSupported(codec_name_)) { - layers[i].num_temporal_layers = - *encoder_config.simulcast_layers[i].num_temporal_layers; - } - if (encoder_config.simulcast_layers[i].max_framerate > 0) { - layers[i].max_framerate = - encoder_config.simulcast_layers[i].max_framerate; - } - if (encoder_config.simulcast_layers[i].requested_resolution.has_value()) { - auto res = GetLayerResolutionFromRequestedResolution( - normalized_width, normalized_height, - *encoder_config.simulcast_layers[i].requested_resolution); - layers[i].width = res.width; - layers[i].height = res.height; - } else if (has_scale_resolution_down_by) { - const double scale_resolution_down_by = std::max( - encoder_config.simulcast_layers[i].scale_resolution_down_by, 1.0); - layers[i].width = ScaleDownResolution( - normalized_width, scale_resolution_down_by, kMinLayerSize); - layers[i].height = ScaleDownResolution( - normalized_height, scale_resolution_down_by, kMinLayerSize); - } - // Update simulcast bitrates with configured min and max bitrate. - if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0) { - layers[i].min_bitrate_bps = - encoder_config.simulcast_layers[i].min_bitrate_bps; - } - if (encoder_config.simulcast_layers[i].max_bitrate_bps > 0) { - layers[i].max_bitrate_bps = - encoder_config.simulcast_layers[i].max_bitrate_bps; - } - if (encoder_config.simulcast_layers[i].target_bitrate_bps > 0) { - layers[i].target_bitrate_bps = - encoder_config.simulcast_layers[i].target_bitrate_bps; - } - if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0 && - encoder_config.simulcast_layers[i].max_bitrate_bps > 0) { - // Min and max bitrate are configured. - // Set target to 3/4 of the max bitrate (or to max if below min). - if (encoder_config.simulcast_layers[i].target_bitrate_bps <= 0) - layers[i].target_bitrate_bps = layers[i].max_bitrate_bps * 3 / 4; - if (layers[i].target_bitrate_bps < layers[i].min_bitrate_bps) - layers[i].target_bitrate_bps = layers[i].max_bitrate_bps; - } else if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0) { - // Only min bitrate is configured, make sure target/max are above min. - layers[i].target_bitrate_bps = - std::max(layers[i].target_bitrate_bps, layers[i].min_bitrate_bps); - layers[i].max_bitrate_bps = - std::max(layers[i].max_bitrate_bps, layers[i].min_bitrate_bps); - } else if (encoder_config.simulcast_layers[i].max_bitrate_bps > 0) { - // Only max bitrate is configured, make sure min/target are below max. - // Keep target bitrate if it is set explicitly in encoding config. - // Otherwise set target bitrate to 3/4 of the max bitrate - // or the one calculated from GetSimulcastConfig() which is larger. - layers[i].min_bitrate_bps = - std::min(layers[i].min_bitrate_bps, layers[i].max_bitrate_bps); - if (encoder_config.simulcast_layers[i].target_bitrate_bps <= 0) { - layers[i].target_bitrate_bps = std::max( - layers[i].target_bitrate_bps, layers[i].max_bitrate_bps * 3 / 4); - } - layers[i].target_bitrate_bps = std::max( - std::min(layers[i].target_bitrate_bps, layers[i].max_bitrate_bps), - layers[i].min_bitrate_bps); - } - if (i == layers.size() - 1) { - is_highest_layer_max_bitrate_configured = - encoder_config.simulcast_layers[i].max_bitrate_bps > 0; - } - } - if (!is_screenshare_ && !is_highest_layer_max_bitrate_configured && - encoder_config.max_bitrate_bps > 0) { - // No application-configured maximum for the largest layer. - // If there is bitrate leftover, give it to the largest layer. - BoostMaxSimulcastLayer( - webrtc::DataRate::BitsPerSec(encoder_config.max_bitrate_bps), &layers); - } - - // Sort the layers by max_bitrate_bps, they might not always be from - // smallest to biggest - std::vector index(layers.size()); - std::iota(index.begin(), index.end(), 0); - std::stable_sort(index.begin(), index.end(), [&layers](size_t a, size_t b) { - return layers[a].max_bitrate_bps < layers[b].max_bitrate_bps; - }); + std::vector layers = webrtc::GetSimulcastConfig( + resolutions, + webrtc::SimulcastUtility::IsConferenceModeScreenshare(encoder_config), + IsTemporalLayersSupported(encoder_config.codec_type), trials, + encoder_config.codec_type); - if (!layers[index[0]].active) { - // Adjust min bitrate of the first active layer to allow it to go as low as - // the lowest (now inactive) layer could. - // Otherwise, if e.g. a single HD stream is active, it would have 600kbps - // min bitrate, which would always be allocated to the stream. - // This would lead to congested network, dropped frames and overall bad - // experience. - - const int min_configured_bitrate = layers[index[0]].min_bitrate_bps; - for (size_t i = 0; i < layers.size(); ++i) { - if (layers[index[i]].active) { - layers[index[i]].min_bitrate_bps = min_configured_bitrate; - break; - } - } - } + OverrideStreamSettings(encoder_config, experimental_min_bitrate, layers); return layers; } -webrtc::Resolution -EncoderStreamFactory::GetLayerResolutionFromRequestedResolution( +Resolution EncoderStreamFactory::GetLayerResolutionFromScaleResolutionDownTo( int frame_width, int frame_height, - webrtc::Resolution requested_resolution) const { + Resolution scale_resolution_down_to) const { + // Make frame and `scale_resolution_down_to` have matching orientation. + if ((frame_width < frame_height) != + (scale_resolution_down_to.width < scale_resolution_down_to.height)) { + scale_resolution_down_to = {.width = scale_resolution_down_to.height, + .height = scale_resolution_down_to.width}; + } + // Downscale by smallest scaling factor, if necessary. + if (frame_width > 0 && frame_height > 0 && + (scale_resolution_down_to.width < frame_width || + scale_resolution_down_to.height < frame_height)) { + double scale_factor = std::min( + scale_resolution_down_to.width / static_cast(frame_width), + scale_resolution_down_to.height / static_cast(frame_height)); + frame_width = std::round(frame_width * scale_factor); + frame_height = std::round(frame_height * scale_factor); + } + Resolution frame = {.width = frame_width, .height = frame_height}; + + // Maybe adapt further based on restrictions and encoder alignment. VideoAdapter adapter(encoder_info_requested_resolution_alignment_); - adapter.OnOutputFormatRequest(requested_resolution.ToPair(), - requested_resolution.PixelCount(), - absl::nullopt); + adapter.OnOutputFormatRequest(frame.ToPair(), frame.PixelCount(), + std::nullopt); if (restrictions_) { - rtc::VideoSinkWants wants; + VideoSinkWants wants; wants.is_active = true; wants.target_pixel_count = restrictions_->target_pixels_per_frame(); - wants.max_pixel_count = - rtc::dchecked_cast(restrictions_->max_pixels_per_frame().value_or( + wants.max_pixel_count = webrtc::dchecked_cast( + restrictions_->max_pixels_per_frame().value_or( std::numeric_limits::max())); - wants.aggregates.emplace(rtc::VideoSinkWants::Aggregates()); + wants.aggregates.emplace(VideoSinkWants::Aggregates()); wants.resolution_alignment = encoder_info_requested_resolution_alignment_; adapter.OnSinkWants(wants); } @@ -493,4 +515,108 @@ EncoderStreamFactory::GetLayerResolutionFromRequestedResolution( return {.width = out_width, .height = out_height}; } -} // namespace cricket +std::vector EncoderStreamFactory::GetStreamResolutions( + const FieldTrialsView& trials, + int width, + int height, + const VideoEncoderConfig& encoder_config) const { + std::vector resolutions; + if (webrtc::SimulcastUtility::IsConferenceModeScreenshare(encoder_config)) { + for (size_t i = 0; i < encoder_config.number_of_streams; ++i) { + resolutions.push_back({.width = width, .height = height}); + } + } else { + size_t min_num_layers = FindRequiredActiveLayers(encoder_config); + size_t max_num_layers = + !encoder_config.HasScaleResolutionDownTo() + ? webrtc::LimitSimulcastLayerCount( + min_num_layers, encoder_config.number_of_streams, width, + height, trials, encoder_config.codec_type) + : encoder_config.number_of_streams; + RTC_DCHECK_LE(max_num_layers, encoder_config.number_of_streams); + + // When the `scale_resolution_down_to` API is used, disable upper layers + // that are bigger than what adaptation restrictions allow. For example if + // restrictions are 540p, simulcast 180p:360p:720p becomes 180p:360p:- as + // opposed to 180p:360p:540p. This makes CPU adaptation consistent with BW + // adaptation (bitrate allocator disabling layers rather than downscaling) + // and means we don't have to break power of two optimization paths (i.e. + // S-modes based simulcast). Note that the lowest layer is never disabled. + if (encoder_config.HasScaleResolutionDownTo() && + restrictions_.has_value() && + restrictions_->max_pixels_per_frame().has_value()) { + int max_pixels = webrtc::dchecked_cast( + restrictions_->max_pixels_per_frame().value()); + int prev_pixel_count = + encoder_config.simulcast_layers[0] + .scale_resolution_down_to.value_or(Resolution()) + .PixelCount(); + std::optional restricted_num_layers; + for (size_t i = 1; i < max_num_layers; ++i) { + int pixel_count = encoder_config.simulcast_layers[i] + .scale_resolution_down_to.value_or(Resolution()) + .PixelCount(); + if (!restricted_num_layers.has_value() && max_pixels < pixel_count) { + // Current layer is the highest layer allowed by restrictions. + restricted_num_layers = i; + } + if (pixel_count < prev_pixel_count) { + // Cannot limit layers because config is not lower-to-higher. + restricted_num_layers = std::nullopt; + break; + } + prev_pixel_count = pixel_count; + } + max_num_layers = restricted_num_layers.value_or(max_num_layers); + } + + const bool has_scale_resolution_down_by = absl::c_any_of( + encoder_config.simulcast_layers, [](const webrtc::VideoStream& layer) { + return layer.scale_resolution_down_by != -1.; + }); + + bool default_scale_factors_used = true; + if (has_scale_resolution_down_by) { + default_scale_factors_used = IsScaleFactorsPowerOfTwo(encoder_config); + } + + const bool norm_size_configured = + webrtc::NormalizeSimulcastSizeExperiment::GetBase2Exponent(trials) + .has_value(); + const int normalized_width = + (default_scale_factors_used || norm_size_configured) && + (width >= kMinLayerSize) + ? NormalizeSimulcastSize(trials, width, max_num_layers) + : width; + const int normalized_height = + (default_scale_factors_used || norm_size_configured) && + (height >= kMinLayerSize) + ? NormalizeSimulcastSize(trials, height, max_num_layers) + : height; + + resolutions.resize(max_num_layers); + for (size_t i = 0; i < max_num_layers; i++) { + if (encoder_config.simulcast_layers[i] + .scale_resolution_down_to.has_value()) { + resolutions[i] = GetLayerResolutionFromScaleResolutionDownTo( + normalized_width, normalized_height, + *encoder_config.simulcast_layers[i].scale_resolution_down_to); + } else if (has_scale_resolution_down_by) { + const double scale_resolution_down_by = std::max( + encoder_config.simulcast_layers[i].scale_resolution_down_by, 1.0); + resolutions[i].width = ScaleDownResolution( + normalized_width, scale_resolution_down_by, kMinLayerSize); + resolutions[i].height = ScaleDownResolution( + normalized_height, scale_resolution_down_by, kMinLayerSize); + } else { + // Resolutions with default 1/2 scale factor, from low to high. + resolutions[i].width = normalized_width >> (max_num_layers - i - 1); + resolutions[i].height = normalized_height >> (max_num_layers - i - 1); + } + } + } + + return resolutions; +} + +} // namespace webrtc diff --git a/video/config/encoder_stream_factory.h b/video/config/encoder_stream_factory.h index 37abb93876..192aac6113 100644 --- a/video/config/encoder_stream_factory.h +++ b/video/config/encoder_stream_factory.h @@ -13,68 +13,64 @@ #include #include -#include "api/transport/field_trial_based_config.h" +#include "api/field_trials_view.h" #include "api/units/data_rate.h" #include "api/video_codecs/video_encoder.h" #include "call/adaptation/video_source_restrictions.h" #include "video/config/video_encoder_config.h" -namespace cricket { +namespace webrtc { class EncoderStreamFactory : public webrtc::VideoEncoderConfig::VideoStreamFactoryInterface { public: - // Note: this constructor is used by testcase in downstream. - EncoderStreamFactory(std::string codec_name, - int max_qp, - bool is_screenshare, - bool conference_mode); - - EncoderStreamFactory(std::string codec_name, - int max_qp, - bool is_screenshare, - bool conference_mode, - const webrtc::VideoEncoder::EncoderInfo& encoder_info, - absl::optional - restrictions = absl::nullopt, - const webrtc::FieldTrialsView* trials = nullptr); + EncoderStreamFactory( + const webrtc::VideoEncoder::EncoderInfo& encoder_info, + std::optional restrictions = std::nullopt); - std::vector CreateEncoderStreams( + std::vector CreateEncoderStreams( + const FieldTrialsView& trials, int width, int height, - const webrtc::VideoEncoderConfig& encoder_config) override; + const VideoEncoderConfig& encoder_config) override; private: - std::vector CreateDefaultVideoStreams( + std::vector CreateDefaultVideoStreams( int width, int height, - const webrtc::VideoEncoderConfig& encoder_config, - const absl::optional& experimental_min_bitrate) const; + const VideoEncoderConfig& encoder_config, + const std::optional& experimental_min_bitrate) const; - std::vector - CreateSimulcastOrConferenceModeScreenshareStreams( + std::vector CreateSimulcastOrConferenceModeScreenshareStreams( + const FieldTrialsView& trials, int width, int height, - const webrtc::VideoEncoderConfig& encoder_config, - const absl::optional& experimental_min_bitrate) const; + const VideoEncoderConfig& encoder_config, + const std::optional& experimental_min_bitrate) const; - webrtc::Resolution GetLayerResolutionFromRequestedResolution( + Resolution GetLayerResolutionFromScaleResolutionDownTo( int in_frame_width, int in_frame_height, - webrtc::Resolution requested_resolution) const; + Resolution scale_resolution_down_to) const; + + std::vector GetStreamResolutions( + const FieldTrialsView& trials, + int width, + int height, + const VideoEncoderConfig& encoder_config) const; - const std::string codec_name_; - const int max_qp_; - const bool is_screenshare_; - // Allows a screenshare specific configuration, which enables temporal - // layering and various settings. - const bool conference_mode_; - const webrtc::FieldTrialBasedConfig fallback_trials_; - const webrtc::FieldTrialsView& trials_; const int encoder_info_requested_resolution_alignment_; - const absl::optional restrictions_; + const std::optional restrictions_; }; +} // namespace webrtc + +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::EncoderStreamFactory; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // VIDEO_CONFIG_ENCODER_STREAM_FACTORY_H_ diff --git a/video/config/encoder_stream_factory_unittest.cc b/video/config/encoder_stream_factory_unittest.cc index b37b300c96..a36efaab01 100644 --- a/video/config/encoder_stream_factory_unittest.cc +++ b/video/config/encoder_stream_factory_unittest.cc @@ -10,74 +10,551 @@ #include "video/config/encoder_stream_factory.h" +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/field_trials_view.h" +#include "api/make_ref_counted.h" +#include "api/video/resolution.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" #include "call/adaptation/video_source_restrictions.h" +#include "rtc_base/experiments/min_video_bitrate_experiment.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "test/explicit_key_value_config.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "video/config/video_encoder_config.h" namespace webrtc { +namespace { +using test::ExplicitKeyValueConfig; +using ::testing::Combine; +using ::testing::ElementsAre; +using ::testing::IsEmpty; +using ::testing::Not; +using ::testing::SizeIs; +using ::testing::TestWithParam; +using ::testing::Values; +using ::webrtc::EncoderStreamFactory; -using cricket::EncoderStreamFactory; -constexpr int kMaxQp = 48; +struct CreateVideoStreamParams { + int width = 0; + int height = 0; + int max_framerate_fps = -1; + int min_bitrate_bps = -1; + int target_bitrate_bps = -1; + int max_bitrate_bps = -1; + int scale_resolution_down_by = -1; + std::optional scalability_mode; +}; -namespace { +// A helper function that creates `VideoStream` with given settings. +VideoStream CreateVideoStream(const CreateVideoStreamParams& params) { + VideoStream stream; + stream.width = params.width; + stream.height = params.height; + stream.max_framerate = params.max_framerate_fps; + stream.min_bitrate_bps = params.min_bitrate_bps; + stream.target_bitrate_bps = params.target_bitrate_bps; + stream.max_bitrate_bps = params.max_bitrate_bps; + stream.scale_resolution_down_by = params.scale_resolution_down_by; + stream.scalability_mode = params.scalability_mode; + return stream; +} std::vector GetStreamResolutions( const std::vector& streams) { std::vector res; for (const auto& s : streams) { - if (s.active) { - res.push_back( - {rtc::checked_cast(s.width), rtc::checked_cast(s.height)}); - } + res.push_back({checked_cast(s.width), checked_cast(s.height)}); } return res; } -VideoStream LayerWithRequestedResolution(Resolution res) { - VideoStream s; - s.requested_resolution = res; - return s; +std::vector CreateEncoderStreams( + const FieldTrialsView& field_trials, + const Resolution& resolution, + const VideoEncoderConfig& encoder_config, + std::optional restrictions = std::nullopt) { + VideoEncoder::EncoderInfo encoder_info; + auto factory = + make_ref_counted(encoder_info, restrictions); + return factory->CreateEncoderStreams(field_trials, resolution.width, + resolution.height, encoder_config); } } // namespace -TEST(EncoderStreamFactory, SinglecastRequestedResolution) { - VideoEncoder::EncoderInfo encoder_info; - auto factory = rtc::make_ref_counted( - "VP8", kMaxQp, - /* is_screenshare= */ false, - /* conference_mode= */ false, encoder_info); +TEST(EncoderStreamFactory, SinglecastScaleResolutionDownTo) { + ExplicitKeyValueConfig field_trials(""); VideoEncoderConfig encoder_config; encoder_config.number_of_streams = 1; - encoder_config.simulcast_layers.push_back( - LayerWithRequestedResolution({.width = 640, .height = 360})); - auto streams = factory->CreateEncoderStreams(1280, 720, encoder_config); - EXPECT_EQ(streams[0].requested_resolution, + encoder_config.simulcast_layers.resize(1); + encoder_config.simulcast_layers[0].scale_resolution_down_to = {.width = 640, + .height = 360}; + auto streams = CreateEncoderStreams( + field_trials, {.width = 1280, .height = 720}, encoder_config); + EXPECT_EQ(streams[0].scale_resolution_down_to, (Resolution{.width = 640, .height = 360})); EXPECT_EQ(GetStreamResolutions(streams), (std::vector{ {.width = 640, .height = 360}, })); } -TEST(EncoderStreamFactory, SinglecastRequestedResolutionWithAdaptation) { +TEST(EncoderStreamFactory, SinglecastScaleResolutionDownToWithAdaptation) { + ExplicitKeyValueConfig field_trials(""); VideoSourceRestrictions restrictions( /* max_pixels_per_frame= */ (320 * 320), - /* target_pixels_per_frame= */ absl::nullopt, - /* max_frame_rate= */ absl::nullopt); - VideoEncoder::EncoderInfo encoder_info; - auto factory = rtc::make_ref_counted( - "VP8", kMaxQp, - /* is_screenshare= */ false, - /* conference_mode= */ false, encoder_info, restrictions); + /* target_pixels_per_frame= */ std::nullopt, + /* max_frame_rate= */ std::nullopt); VideoEncoderConfig encoder_config; encoder_config.number_of_streams = 1; - encoder_config.simulcast_layers.push_back( - LayerWithRequestedResolution({.width = 640, .height = 360})); - auto streams = factory->CreateEncoderStreams(1280, 720, encoder_config); - EXPECT_EQ(streams[0].requested_resolution, + encoder_config.simulcast_layers.resize(1); + encoder_config.simulcast_layers[0].scale_resolution_down_to = {.width = 640, + .height = 360}; + auto streams = + CreateEncoderStreams(field_trials, {.width = 1280, .height = 720}, + encoder_config, restrictions); + EXPECT_EQ(streams[0].scale_resolution_down_to, (Resolution{.width = 640, .height = 360})); EXPECT_EQ(GetStreamResolutions(streams), (std::vector{ {.width = 320, .height = 180}, })); } +TEST(EncoderStreamFactory, SimulcastScaleResolutionDownToUnrestricted) { + ExplicitKeyValueConfig field_trials(""); + VideoEncoderConfig encoder_config; + encoder_config.number_of_streams = 3; + encoder_config.simulcast_layers.resize(3); + encoder_config.simulcast_layers[0].scale_resolution_down_to = {.width = 320, + .height = 180}; + encoder_config.simulcast_layers[1].scale_resolution_down_to = {.width = 640, + .height = 360}; + encoder_config.simulcast_layers[2].scale_resolution_down_to = {.width = 1280, + .height = 720}; + auto streams = CreateEncoderStreams( + field_trials, {.width = 1280, .height = 720}, encoder_config); + std::vector stream_resolutions = GetStreamResolutions(streams); + ASSERT_THAT(stream_resolutions, SizeIs(3)); + EXPECT_EQ(stream_resolutions[0], (Resolution{.width = 320, .height = 180})); + EXPECT_EQ(stream_resolutions[1], (Resolution{.width = 640, .height = 360})); + EXPECT_EQ(stream_resolutions[2], (Resolution{.width = 1280, .height = 720})); +} + +TEST(EncoderStreamFactory, SimulcastScaleResolutionDownToWith360pRestriction) { + ExplicitKeyValueConfig field_trials(""); + VideoSourceRestrictions restrictions( + /* max_pixels_per_frame= */ (640 * 360), + /* target_pixels_per_frame= */ std::nullopt, + /* max_frame_rate= */ std::nullopt); + VideoEncoderConfig encoder_config; + encoder_config.number_of_streams = 3; + encoder_config.simulcast_layers.resize(3); + encoder_config.simulcast_layers[0].scale_resolution_down_to = {.width = 320, + .height = 180}; + encoder_config.simulcast_layers[1].scale_resolution_down_to = {.width = 640, + .height = 360}; + encoder_config.simulcast_layers[2].scale_resolution_down_to = {.width = 1280, + .height = 720}; + auto streams = + CreateEncoderStreams(field_trials, {.width = 1280, .height = 720}, + encoder_config, restrictions); + std::vector stream_resolutions = GetStreamResolutions(streams); + // 720p layer is dropped due to 360p restrictions. + ASSERT_THAT(stream_resolutions, SizeIs(2)); + EXPECT_EQ(stream_resolutions[0], (Resolution{.width = 320, .height = 180})); + EXPECT_EQ(stream_resolutions[1], (Resolution{.width = 640, .height = 360})); +} + +TEST(EncoderStreamFactory, SimulcastScaleResolutionDownToWith90pRestriction) { + ExplicitKeyValueConfig field_trials(""); + VideoSourceRestrictions restrictions( + /* max_pixels_per_frame= */ (160 * 90), + /* target_pixels_per_frame= */ std::nullopt, + /* max_frame_rate= */ std::nullopt); + VideoEncoderConfig encoder_config; + encoder_config.number_of_streams = 3; + encoder_config.simulcast_layers.resize(3); + encoder_config.simulcast_layers[0].scale_resolution_down_to = {.width = 320, + .height = 180}; + encoder_config.simulcast_layers[1].scale_resolution_down_to = {.width = 640, + .height = 360}; + encoder_config.simulcast_layers[2].scale_resolution_down_to = {.width = 1280, + .height = 720}; + auto streams = + CreateEncoderStreams(field_trials, {.width = 1280, .height = 720}, + encoder_config, restrictions); + std::vector stream_resolutions = GetStreamResolutions(streams); + ASSERT_THAT(stream_resolutions, SizeIs(1)); + // 90p restriction means all but the first layer (180p) is dropped. The one + // and only layer is downsized to 90p. + EXPECT_EQ(stream_resolutions[0], (Resolution{.width = 160, .height = 90})); +} + +TEST(EncoderStreamFactory, + ReverseSimulcastScaleResolutionDownToWithRestriction) { + ExplicitKeyValueConfig field_trials(""); + VideoSourceRestrictions restrictions( + /* max_pixels_per_frame= */ (640 * 360), + /* target_pixels_per_frame= */ std::nullopt, + /* max_frame_rate= */ std::nullopt); + VideoEncoderConfig encoder_config; + encoder_config.number_of_streams = 3; + encoder_config.simulcast_layers.resize(3); + // 720p, 360p, 180p (instead of the usual 180p, 360p, 720p). + encoder_config.simulcast_layers[0].scale_resolution_down_to = {.width = 1280, + .height = 720}; + encoder_config.simulcast_layers[1].scale_resolution_down_to = {.width = 640, + .height = 360}; + encoder_config.simulcast_layers[2].scale_resolution_down_to = {.width = 320, + .height = 180}; + auto streams = + CreateEncoderStreams(field_trials, {.width = 1280, .height = 720}, + encoder_config, restrictions); + std::vector stream_resolutions = GetStreamResolutions(streams); + // The layer dropping that is performed for lower-to-higher ordered simulcast + // streams is not applicable when higher-to-lower order is used. In this case + // the 360p restriction is applied to all layers. + ASSERT_THAT(stream_resolutions, SizeIs(3)); + EXPECT_EQ(stream_resolutions[0], (Resolution{.width = 640, .height = 360})); + EXPECT_EQ(stream_resolutions[1], (Resolution{.width = 640, .height = 360})); + EXPECT_EQ(stream_resolutions[2], (Resolution{.width = 320, .height = 180})); +} + +TEST(EncoderStreamFactory, BitratePriority) { + constexpr double kBitratePriority = 0.123; + VideoEncoderConfig encoder_config; + encoder_config.number_of_streams = 2; + encoder_config.simulcast_layers.resize(encoder_config.number_of_streams); + encoder_config.bitrate_priority = kBitratePriority; + auto streams = CreateEncoderStreams( + /*field_trials=*/ExplicitKeyValueConfig(""), + {.width = 640, .height = 360}, encoder_config); + ASSERT_THAT(streams, SizeIs(2)); + EXPECT_EQ(streams[0].bitrate_priority, kBitratePriority); + EXPECT_FALSE(streams[1].bitrate_priority); +} + +TEST(EncoderStreamFactory, SetsMinBitrateToDefaultValue) { + VideoEncoder::EncoderInfo encoder_info; + auto factory = make_ref_counted(encoder_info); + VideoEncoderConfig encoder_config; + encoder_config.number_of_streams = 2; + encoder_config.simulcast_layers.resize(encoder_config.number_of_streams); + auto streams = factory->CreateEncoderStreams(ExplicitKeyValueConfig(""), 1920, + 1080, encoder_config); + ASSERT_THAT(streams, Not(IsEmpty())); + EXPECT_EQ(streams[0].min_bitrate_bps, kDefaultMinVideoBitrateBps); +} + +TEST(EncoderStreamFactory, SetsMinBitrateToExperimentalValue) { + VideoEncoder::EncoderInfo encoder_info; + auto factory = make_ref_counted(encoder_info); + VideoEncoderConfig encoder_config; + encoder_config.number_of_streams = 2; + encoder_config.simulcast_layers.resize(encoder_config.number_of_streams); + auto streams = factory->CreateEncoderStreams( + ExplicitKeyValueConfig("WebRTC-Video-MinVideoBitrate/Enabled,br:1kbps/"), + 1920, 1080, encoder_config); + ASSERT_THAT(streams, Not(IsEmpty())); + EXPECT_NE(streams[0].min_bitrate_bps, kDefaultMinVideoBitrateBps); + EXPECT_EQ(streams[0].min_bitrate_bps, 1000); +} + +struct StreamResolutionTestParams { + absl::string_view field_trials; + size_t number_of_streams = 1; + Resolution resolution = {.width = 640, .height = 480}; + bool is_legacy_screencast = false; + size_t first_active_layer_idx = 0; +}; + +std::vector CreateStreamResolutions( + const StreamResolutionTestParams& test_params) { + VideoEncoderConfig encoder_config; + encoder_config.codec_type = VideoCodecType::kVideoCodecVP8; + encoder_config.number_of_streams = test_params.number_of_streams; + encoder_config.simulcast_layers.resize(test_params.number_of_streams); + for (size_t i = 0; i < encoder_config.number_of_streams; ++i) { + encoder_config.simulcast_layers[i].active = + (i >= test_params.first_active_layer_idx); + } + if (test_params.is_legacy_screencast) { + encoder_config.content_type = VideoEncoderConfig::ContentType::kScreen; + encoder_config.legacy_conference_mode = true; + } + return GetStreamResolutions( + CreateEncoderStreams(ExplicitKeyValueConfig(test_params.field_trials), + test_params.resolution, encoder_config)); +} + +TEST(EncoderStreamFactory, KeepsResolutionUnchangedWhenAligned) { + EXPECT_THAT( + CreateStreamResolutions({.number_of_streams = 2, + .resolution = {.width = 516, .height = 526}}), + ElementsAre(Resolution{.width = 516 / 2, .height = 526 / 2}, + Resolution{.width = 516, .height = 526})); +} + +TEST(EncoderStreamFactory, AdjustsResolutionWhenUnaligned) { + // By default width and height of the smallest simulcast stream are required + // to be whole numbers. To achieve that, the resolution of the highest + // simulcast stream is adjusted to be multiple of (2 ^ (number_of_streams - + // 1)) by rounding down. + EXPECT_THAT( + CreateStreamResolutions({.number_of_streams = 2, + .resolution = {.width = 515, .height = 517}}), + ElementsAre(Resolution{.width = 514 / 2, .height = 516 / 2}, + Resolution{.width = 514, .height = 516})); +} + +TEST(EncoderStreamFactory, MakesResolutionDivisibleBy4) { + EXPECT_THAT( + CreateStreamResolutions( + {.field_trials = "WebRTC-NormalizeSimulcastResolution/Enabled-2/", + .number_of_streams = 2, + .resolution = {.width = 515, .height = 517}}), + ElementsAre(Resolution{.width = 512 / 2, .height = 516 / 2}, + Resolution{.width = 512, .height = 516})); +} + +TEST(EncoderStreamFactory, KeepsStreamCountUnchangedWhenResolutionIsHigh) { + EXPECT_THAT( + CreateStreamResolutions({.number_of_streams = 3, + .resolution = {.width = 1000, .height = 1000}}), + SizeIs(3)); +} + +TEST(EncoderStreamFactory, ReducesStreamCountWhenResolutionIsLow) { + EXPECT_THAT( + CreateStreamResolutions({.number_of_streams = 3, + .resolution = {.width = 100, .height = 100}}), + SizeIs(1)); +} + +TEST(EncoderStreamFactory, ReducesStreamCountDownToFirstActiveStream) { + EXPECT_THAT( + CreateStreamResolutions({.number_of_streams = 3, + .resolution = {.width = 100, .height = 100}, + .first_active_layer_idx = 1}), + SizeIs(2)); +} + +TEST(EncoderStreamFactory, + ReducesLegacyScreencastStreamCountWhenResolutionIsLow) { + // At least 2 streams are expected to be configured in legacy screencast mode. + EXPECT_THAT( + CreateStreamResolutions({.number_of_streams = 3, + .resolution = {.width = 100, .height = 100}, + .is_legacy_screencast = true}), + SizeIs(2)); +} + +TEST(EncoderStreamFactory, KeepsStreamCountUnchangedWhenLegacyLimitIsDisabled) { + EXPECT_THAT(CreateStreamResolutions( + {.field_trials = "WebRTC-LegacySimulcastLayerLimit/Disabled/", + .number_of_streams = 3, + .resolution = {.width = 100, .height = 100}}), + SizeIs(3)); +} + +TEST(EncoderStreamFactory, KeepsHighResolutionWhenStreamCountIsReduced) { + EXPECT_THAT( + CreateStreamResolutions({.number_of_streams = 3, + .resolution = {.width = 640, .height = 360}}), + ElementsAre(Resolution{.width = 320, .height = 180}, + Resolution{.width = 640, .height = 360})); +} + +struct OverrideStreamSettingsTestParams { + std::string field_trials; + Resolution input_resolution; + VideoEncoderConfig::ContentType content_type; + std::vector requested_streams; + std::vector expected_streams; +}; + +using EncoderStreamFactoryOverrideStreamSettingsTest = + TestWithParam>; + +TEST_P(EncoderStreamFactoryOverrideStreamSettingsTest, OverrideStreamSettings) { + OverrideStreamSettingsTestParams test_params = std::get<0>(GetParam()); + VideoEncoderConfig encoder_config; + encoder_config.codec_type = std::get<1>(GetParam()); + encoder_config.number_of_streams = test_params.requested_streams.size(); + encoder_config.simulcast_layers = test_params.requested_streams; + encoder_config.content_type = test_params.content_type; + auto streams = + CreateEncoderStreams(ExplicitKeyValueConfig(test_params.field_trials), + test_params.input_resolution, encoder_config); + ASSERT_EQ(streams.size(), test_params.expected_streams.size()); + for (size_t i = 0; i < streams.size(); ++i) { + SCOPED_TRACE(i); + const VideoStream& expected = test_params.expected_streams[i]; + EXPECT_EQ(streams[i].width, expected.width); + EXPECT_EQ(streams[i].height, expected.height); + EXPECT_EQ(streams[i].max_framerate, expected.max_framerate); + EXPECT_EQ(streams[i].min_bitrate_bps, expected.min_bitrate_bps); + EXPECT_EQ(streams[i].target_bitrate_bps, expected.target_bitrate_bps); + EXPECT_EQ(streams[i].max_bitrate_bps, expected.max_bitrate_bps); + EXPECT_EQ(streams[i].scalability_mode, expected.scalability_mode); + } +} + +INSTANTIATE_TEST_SUITE_P( + Vp8H264Screencast, + EncoderStreamFactoryOverrideStreamSettingsTest, + Combine(Values(OverrideStreamSettingsTestParams{ + .input_resolution = {.width = 1920, .height = 1080}, + .content_type = VideoEncoderConfig::ContentType::kScreen, + .requested_streams = + {CreateVideoStream( + {.max_framerate_fps = 5, + .max_bitrate_bps = 420'000, + .scale_resolution_down_by = 1, + .scalability_mode = ScalabilityMode::kL1T2}), + CreateVideoStream( + {.max_framerate_fps = 30, + .max_bitrate_bps = 2'500'000, + .scale_resolution_down_by = 1, + .scalability_mode = ScalabilityMode::kL1T2})}, + .expected_streams = + {CreateVideoStream( + {.width = 1920, + .height = 1080, + .max_framerate_fps = 5, + .min_bitrate_bps = 30'000, + .target_bitrate_bps = 420'000, + .max_bitrate_bps = 420'000, + .scalability_mode = ScalabilityMode::kL1T2}), + CreateVideoStream( + {.width = 1920, + .height = 1080, + .max_framerate_fps = 30, + .min_bitrate_bps = 800'000, + .target_bitrate_bps = 2'500'000, + .max_bitrate_bps = 2'500'000, + .scalability_mode = ScalabilityMode::kL1T2})}}), + Values(VideoCodecType::kVideoCodecVP8, + VideoCodecType::kVideoCodecH264))); + +INSTANTIATE_TEST_SUITE_P( + Av1Vp9H265Screencast, + EncoderStreamFactoryOverrideStreamSettingsTest, + Combine(Values(OverrideStreamSettingsTestParams{ + .input_resolution = {.width = 1920, .height = 1080}, + .content_type = VideoEncoderConfig::ContentType::kScreen, + .requested_streams = + {CreateVideoStream( + {.max_framerate_fps = 5, + .max_bitrate_bps = 420'000, + .scale_resolution_down_by = 1, + .scalability_mode = ScalabilityMode::kL1T2}), + CreateVideoStream( + {.max_framerate_fps = 30, + .max_bitrate_bps = 2'500'000, + .scale_resolution_down_by = 1, + .scalability_mode = ScalabilityMode::kL1T2})}, + .expected_streams = + {CreateVideoStream( + {.width = 1920, + .height = 1080, + .max_framerate_fps = 5, + .min_bitrate_bps = 30'000, + .target_bitrate_bps = 420'000, + .max_bitrate_bps = 420'000, + .scalability_mode = ScalabilityMode::kL1T2}), + CreateVideoStream( + {.width = 1920, + .height = 1080, + .max_framerate_fps = 30, + .min_bitrate_bps = 769'000, + .target_bitrate_bps = 2'500'000, + .max_bitrate_bps = 2'500'000, + .scalability_mode = ScalabilityMode::kL1T2})}}), + Values( +#ifdef RTC_ENABLE_H265 + webrtc::kVideoCodecH265, +#endif + VideoCodecType::kVideoCodecAV1, + VideoCodecType::kVideoCodecVP9))); + +TEST(EncoderStreamFactory, VP9TemporalLayerCountTransferToStreamSettings) { + VideoEncoderConfig encoder_config; + VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); + encoder_config.encoder_specific_settings = + make_ref_counted( + vp9_settings); + encoder_config.codec_type = VideoCodecType::kVideoCodecVP9; + encoder_config.number_of_streams = 1; + encoder_config.simulcast_layers.resize(1); + encoder_config.simulcast_layers[0].num_temporal_layers = 3; + auto streams = CreateEncoderStreams(ExplicitKeyValueConfig(""), {1280, 720}, + encoder_config); + ASSERT_THAT(streams, SizeIs(1)); + EXPECT_EQ(streams[0].num_temporal_layers, 3); +} + +TEST(EncoderStreamFactory, AV1TemporalLayerCountTransferToStreamSettings) { + VideoEncoderConfig encoder_config; + encoder_config.codec_type = VideoCodecType::kVideoCodecAV1; + encoder_config.number_of_streams = 1; + encoder_config.simulcast_layers.resize(1); + encoder_config.simulcast_layers[0].num_temporal_layers = 3; + auto streams = CreateEncoderStreams(ExplicitKeyValueConfig(""), {1280, 720}, + encoder_config); + ASSERT_THAT(streams, SizeIs(1)); + EXPECT_EQ(streams[0].num_temporal_layers, 3); +} + +TEST(EncoderStreamFactory, H264TemporalLayerCountTransferToStreamSettings) { + VideoEncoderConfig encoder_config; + encoder_config.codec_type = VideoCodecType::kVideoCodecH264; + encoder_config.number_of_streams = 1; + encoder_config.simulcast_layers.resize(1); + encoder_config.simulcast_layers[0].num_temporal_layers = 3; + auto streams = CreateEncoderStreams(ExplicitKeyValueConfig(""), {1280, 720}, + encoder_config); + ASSERT_THAT(streams, SizeIs(1)); + EXPECT_EQ(streams[0].num_temporal_layers, std::nullopt); +} + +#ifdef RTC_ENABLE_H265 +TEST(EncoderStreamFactory, H265TemporalLayerCountTransferToStreamSettings) { + VideoEncoderConfig encoder_config; + encoder_config.codec_type = VideoCodecType::kVideoCodecH265; + encoder_config.number_of_streams = 1; + encoder_config.simulcast_layers.resize(1); + encoder_config.simulcast_layers[0].num_temporal_layers = 3; + auto streams = CreateEncoderStreams(ExplicitKeyValueConfig(""), {1280, 720}, + encoder_config); + ASSERT_THAT(streams, SizeIs(1)); + EXPECT_EQ(streams[0].num_temporal_layers, 3); +} +#endif + +TEST(EncoderStreamFactory, VP9SetsMaxBitrateToConfiguredEncodingValue) { + VideoEncoderConfig encoder_config; + VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); + encoder_config.encoder_specific_settings = + make_ref_counted( + vp9_settings); + encoder_config.codec_type = VideoCodecType::kVideoCodecVP9; + encoder_config.number_of_streams = 1; + encoder_config.simulcast_layers.resize(3); + encoder_config.simulcast_layers[0].max_bitrate_bps = 5000000; + auto streams = CreateEncoderStreams(ExplicitKeyValueConfig(""), {1280, 720}, + encoder_config); + ASSERT_THAT(streams, SizeIs(1)); + EXPECT_EQ(streams[0].max_bitrate_bps, 5000000); +} + } // namespace webrtc diff --git a/video/config/simulcast.cc b/video/config/simulcast.cc index 2bd4ac04c3..66e9005282 100644 --- a/video/config/simulcast.cc +++ b/video/config/simulcast.cc @@ -14,25 +14,31 @@ #include #include +#include #include #include #include "absl/strings/match.h" -#include "absl/types/optional.h" -#include "api/video/video_codec_constants.h" +#include "api/array_view.h" +#include "api/field_trials_view.h" +#include "api/units/data_rate.h" +#include "api/video/resolution.h" +#include "api/video/video_codec_type.h" #include "media/base/media_constants.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/min_video_bitrate_experiment.h" -#include "rtc_base/experiments/normalize_simulcast_size_experiment.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/logging.h" +#include "video/config/video_encoder_config.h" -namespace cricket { +namespace webrtc { namespace { +using ::webrtc::FieldTrialsView; + constexpr char kUseLegacySimulcastLayerLimitFieldTrial[] = "WebRTC-LegacySimulcastLayerLimit"; @@ -40,16 +46,16 @@ constexpr double kDefaultMaxRoundupRate = 0.1; // Limits for legacy conference screensharing mode. Currently used for the // lower of the two simulcast streams. -constexpr webrtc::DataRate kScreenshareDefaultTl0Bitrate = +constexpr DataRate kScreenshareDefaultTl0Bitrate = webrtc::DataRate::KilobitsPerSec(200); -constexpr webrtc::DataRate kScreenshareDefaultTl1Bitrate = +constexpr DataRate kScreenshareDefaultTl1Bitrate = webrtc::DataRate::KilobitsPerSec(1000); // Min/max bitrate for the higher one of the two simulcast stream used for // screen content. -constexpr webrtc::DataRate kScreenshareHighStreamMinBitrate = +constexpr DataRate kScreenshareHighStreamMinBitrate = webrtc::DataRate::KilobitsPerSec(600); -constexpr webrtc::DataRate kScreenshareHighStreamMaxBitrate = +constexpr DataRate kScreenshareHighStreamMaxBitrate = webrtc::DataRate::KilobitsPerSec(1250); constexpr int kDefaultNumTemporalLayers = 3; @@ -64,19 +70,19 @@ struct SimulcastFormat { size_t max_layers; // The maximum bitrate for encoding stream at `widthxheight`, when we are // not sending the next higher spatial stream. - webrtc::DataRate max_bitrate; + DataRate max_bitrate; // The target bitrate for encoding stream at `widthxheight`, when this layer // is not the highest layer (i.e., when we are sending another higher spatial // stream). - webrtc::DataRate target_bitrate; + DataRate target_bitrate; // The minimum bitrate needed for encoding stream at `widthxheight`. - webrtc::DataRate min_bitrate; + DataRate min_bitrate; }; // These tables describe from which resolution we can use how many // simulcast layers at what bitrates (maximum, target, and minimum). // Important!! Keep this table from high resolution to low resolution. -constexpr const SimulcastFormat kSimulcastFormats[] = { +constexpr const SimulcastFormat kSimulcastFormatsVP8[] = { {1920, 1080, 3, webrtc::DataRate::KilobitsPerSec(5000), webrtc::DataRate::KilobitsPerSec(4000), webrtc::DataRate::KilobitsPerSec(800)}, @@ -102,23 +108,86 @@ constexpr const SimulcastFormat kSimulcastFormats[] = { webrtc::DataRate::KilobitsPerSec(0), webrtc::DataRate::KilobitsPerSec(30)}}; -constexpr webrtc::DataRate Interpolate(const webrtc::DataRate& a, - const webrtc::DataRate& b, - float rate) { +// These tables describe from which resolution we can use how many +// simulcast layers at what bitrates (maximum, target, and minimum). +// Important!! Keep this table from high resolution to low resolution. +constexpr const SimulcastFormat kSimulcastFormatsVP9[] = { + {1920, 1080, 3, webrtc::DataRate::KilobitsPerSec(3367), + webrtc::DataRate::KilobitsPerSec(3367), + webrtc::DataRate::KilobitsPerSec(769)}, + {1280, 720, 3, webrtc::DataRate::KilobitsPerSec(1524), + webrtc::DataRate::KilobitsPerSec(1524), + webrtc::DataRate::KilobitsPerSec(481)}, + {960, 540, 3, webrtc::DataRate::KilobitsPerSec(879), + webrtc::DataRate::KilobitsPerSec(879), + webrtc::DataRate::KilobitsPerSec(337)}, + {640, 360, 2, webrtc::DataRate::KilobitsPerSec(420), + webrtc::DataRate::KilobitsPerSec(420), + webrtc::DataRate::KilobitsPerSec(193)}, + {480, 270, 2, webrtc::DataRate::KilobitsPerSec(257), + webrtc::DataRate::KilobitsPerSec(257), + webrtc::DataRate::KilobitsPerSec(121)}, + {320, 180, 1, webrtc::DataRate::KilobitsPerSec(142), + webrtc::DataRate::KilobitsPerSec(142), + webrtc::DataRate::KilobitsPerSec(30)}, + {240, 135, 1, webrtc::DataRate::KilobitsPerSec(101), + webrtc::DataRate::KilobitsPerSec(101), + webrtc::DataRate::KilobitsPerSec(30)}, + // As the resolution goes down, interpolate the target and max bitrates down + // towards zero. The min bitrate is still limited at 30 kbps and the target + // and the max will be capped from below accordingly. + {0, 0, 1, webrtc::DataRate::KilobitsPerSec(0), + webrtc::DataRate::KilobitsPerSec(0), + webrtc::DataRate::KilobitsPerSec(30)}}; + +constexpr DataRate Interpolate(const DataRate& a, + const DataRate& b, + float rate) { return a * (1.0 - rate) + b * rate; } // TODO(webrtc:12415): Flip this to a kill switch when this feature launches. -bool EnableLowresBitrateInterpolation(const webrtc::FieldTrialsView& trials) { +bool EnableLowresBitrateInterpolation(const FieldTrialsView& trials) { return absl::StartsWith( trials.Lookup("WebRTC-LowresSimulcastBitrateInterpolation"), "Enabled"); } +int GetDefaultSimulcastTemporalLayers(VideoCodecType codec) { + switch (codec) { + case webrtc::kVideoCodecVP8: + case webrtc::kVideoCodecVP9: + case webrtc::kVideoCodecAV1: + case webrtc::kVideoCodecH264: + case webrtc::kVideoCodecGeneric: + return kDefaultNumTemporalLayers; + // For codec type that has no software fallback, defaults to L1T1 for + // initial simulcast setup, as this is the only scalability mode secure to + // be supported. + case webrtc::kVideoCodecH265: + return 1; + } + RTC_DCHECK_NOTREACHED() << "Unsupported codec."; + return 1; +} + std::vector GetSimulcastFormats( - bool enable_lowres_bitrate_interpolation) { + bool enable_lowres_bitrate_interpolation, + VideoCodecType codec) { std::vector formats; - formats.insert(formats.begin(), std::begin(kSimulcastFormats), - std::end(kSimulcastFormats)); + switch (codec) { + case webrtc::kVideoCodecGeneric: + case webrtc::kVideoCodecVP8: + case webrtc::kVideoCodecH264: + formats.insert(formats.begin(), std::begin(kSimulcastFormatsVP8), + std::end(kSimulcastFormatsVP8)); + break; + case webrtc::kVideoCodecVP9: + case webrtc::kVideoCodecAV1: + case webrtc::kVideoCodecH265: + formats.insert(formats.begin(), std::begin(kSimulcastFormatsVP9), + std::end(kSimulcastFormatsVP9)); + break; + } if (!enable_lowres_bitrate_interpolation) { RTC_CHECK_GE(formats.size(), 2u); SimulcastFormat& format0x0 = formats[formats.size() - 1]; @@ -130,33 +199,14 @@ std::vector GetSimulcastFormats( return formats; } -// Multiway: Number of temporal layers for each simulcast stream. -int DefaultNumberOfTemporalLayers(const webrtc::FieldTrialsView& trials) { - const std::string group_name = - trials.Lookup("WebRTC-VP8ConferenceTemporalLayers"); - if (group_name.empty()) - return kDefaultNumTemporalLayers; - - int num_temporal_layers = kDefaultNumTemporalLayers; - if (sscanf(group_name.c_str(), "%d", &num_temporal_layers) == 1 && - num_temporal_layers > 0 && - num_temporal_layers <= webrtc::kMaxTemporalStreams) { - return num_temporal_layers; - } - - RTC_LOG(LS_WARNING) << "Attempt to set number of temporal layers to " - "incorrect value: " - << group_name; - - return kDefaultNumTemporalLayers; -} - int FindSimulcastFormatIndex(int width, int height, - bool enable_lowres_bitrate_interpolation) { + bool enable_lowres_bitrate_interpolation, + VideoCodecType codec) { RTC_DCHECK_GE(width, 0); RTC_DCHECK_GE(height, 0); - const auto formats = GetSimulcastFormats(enable_lowres_bitrate_interpolation); + const auto formats = + GetSimulcastFormats(enable_lowres_bitrate_interpolation, codec); for (uint32_t i = 0; i < formats.size(); ++i) { if (width * height >= formats[i].width * formats[i].height) { return i; @@ -166,30 +216,16 @@ int FindSimulcastFormatIndex(int width, return -1; } -} // namespace - -// Round size to nearest simulcast-friendly size. -// Simulcast stream width and height must both be dividable by -// |2 ^ (simulcast_layers - 1)|. -int NormalizeSimulcastSize(int size, size_t simulcast_layers) { - int base2_exponent = static_cast(simulcast_layers) - 1; - const absl::optional experimental_base2_exponent = - webrtc::NormalizeSimulcastSizeExperiment::GetBase2Exponent(); - if (experimental_base2_exponent && - (size > (1 << *experimental_base2_exponent))) { - base2_exponent = *experimental_base2_exponent; - } - return ((size >> base2_exponent) << base2_exponent); -} - SimulcastFormat InterpolateSimulcastFormat( int width, int height, - absl::optional max_roundup_rate, - bool enable_lowres_bitrate_interpolation) { - const auto formats = GetSimulcastFormats(enable_lowres_bitrate_interpolation); + std::optional max_roundup_rate, + bool enable_lowres_bitrate_interpolation, + VideoCodecType codec) { + const auto formats = + GetSimulcastFormats(enable_lowres_bitrate_interpolation, codec); const int index = FindSimulcastFormatIndex( - width, height, enable_lowres_bitrate_interpolation); + width, height, enable_lowres_bitrate_interpolation, codec); if (index == 0) return formats[index]; const int total_pixels_up = @@ -203,179 +239,42 @@ SimulcastFormat InterpolateSimulcastFormat( size_t max_layers = (rate < max_roundup_rate.value_or(kDefaultMaxRoundupRate)) ? formats[index - 1].max_layers : formats[index].max_layers; - webrtc::DataRate max_bitrate = Interpolate(formats[index - 1].max_bitrate, - formats[index].max_bitrate, rate); - webrtc::DataRate target_bitrate = Interpolate( - formats[index - 1].target_bitrate, formats[index].target_bitrate, rate); - webrtc::DataRate min_bitrate = Interpolate(formats[index - 1].min_bitrate, - formats[index].min_bitrate, rate); + DataRate max_bitrate = Interpolate(formats[index - 1].max_bitrate, + formats[index].max_bitrate, rate); + DataRate target_bitrate = Interpolate(formats[index - 1].target_bitrate, + formats[index].target_bitrate, rate); + DataRate min_bitrate = Interpolate(formats[index - 1].min_bitrate, + formats[index].min_bitrate, rate); return {width, height, max_layers, max_bitrate, target_bitrate, min_bitrate}; } -SimulcastFormat InterpolateSimulcastFormat( - int width, - int height, - bool enable_lowres_bitrate_interpolation) { - return InterpolateSimulcastFormat(width, height, absl::nullopt, - enable_lowres_bitrate_interpolation); -} - -webrtc::DataRate FindSimulcastMaxBitrate( - int width, - int height, - bool enable_lowres_bitrate_interpolation) { - return InterpolateSimulcastFormat(width, height, - enable_lowres_bitrate_interpolation) - .max_bitrate; -} - -webrtc::DataRate FindSimulcastTargetBitrate( - int width, - int height, - bool enable_lowres_bitrate_interpolation) { - return InterpolateSimulcastFormat(width, height, - enable_lowres_bitrate_interpolation) - .target_bitrate; -} - -webrtc::DataRate FindSimulcastMinBitrate( - int width, - int height, - bool enable_lowres_bitrate_interpolation) { - return InterpolateSimulcastFormat(width, height, - enable_lowres_bitrate_interpolation) - .min_bitrate; -} - -void BoostMaxSimulcastLayer(webrtc::DataRate max_bitrate, - std::vector* layers) { - if (layers->empty()) - return; - - const webrtc::DataRate total_bitrate = GetTotalMaxBitrate(*layers); - - // We're still not using all available bits. - if (total_bitrate < max_bitrate) { - // Spend additional bits to boost the max layer. - const webrtc::DataRate bitrate_left = max_bitrate - total_bitrate; - layers->back().max_bitrate_bps += bitrate_left.bps(); - } -} - -webrtc::DataRate GetTotalMaxBitrate( - const std::vector& layers) { - if (layers.empty()) - return webrtc::DataRate::Zero(); - - int total_max_bitrate_bps = 0; - for (size_t s = 0; s < layers.size() - 1; ++s) { - total_max_bitrate_bps += layers[s].target_bitrate_bps; - } - total_max_bitrate_bps += layers.back().max_bitrate_bps; - return webrtc::DataRate::BitsPerSec(total_max_bitrate_bps); -} - -size_t LimitSimulcastLayerCount(int width, - int height, - size_t need_layers, - size_t layer_count, - const webrtc::FieldTrialsView& trials) { - if (!absl::StartsWith(trials.Lookup(kUseLegacySimulcastLayerLimitFieldTrial), - "Disabled")) { - // Max layers from one higher resolution in kSimulcastFormats will be used - // if the ratio (pixels_up - pixels) / (pixels_up - pixels_down) is less - // than configured `max_ratio`. pixels_down is the selected index in - // kSimulcastFormats based on pixels. - webrtc::FieldTrialOptional max_ratio("max_ratio"); - webrtc::ParseFieldTrial({&max_ratio}, - trials.Lookup("WebRTC-SimulcastLayerLimitRoundUp")); - - const bool enable_lowres_bitrate_interpolation = - EnableLowresBitrateInterpolation(trials); - size_t adaptive_layer_count = std::max( - need_layers, - InterpolateSimulcastFormat(width, height, max_ratio.GetOptional(), - enable_lowres_bitrate_interpolation) - .max_layers); - if (layer_count > adaptive_layer_count) { - RTC_LOG(LS_WARNING) << "Reducing simulcast layer count from " - << layer_count << " to " << adaptive_layer_count; - layer_count = adaptive_layer_count; - } - } - return layer_count; -} - -std::vector GetSimulcastConfig( - size_t min_layers, - size_t max_layers, - int width, - int height, - double bitrate_priority, - int max_qp, - bool is_screenshare_with_conference_mode, - bool temporal_layers_supported, - const webrtc::FieldTrialsView& trials) { - RTC_DCHECK_LE(min_layers, max_layers); - RTC_DCHECK(max_layers > 1 || is_screenshare_with_conference_mode); - - const bool base_heavy_tl3_rate_alloc = - webrtc::RateControlSettings::ParseFromKeyValueConfig(&trials) - .Vp8BaseHeavyTl3RateAllocation(); - if (is_screenshare_with_conference_mode) { - return GetScreenshareLayers(max_layers, width, height, bitrate_priority, - max_qp, temporal_layers_supported, - base_heavy_tl3_rate_alloc, trials); - } else { - // Some applications rely on the old behavior limiting the simulcast layer - // count based on the resolution automatically, which they can get through - // the WebRTC-LegacySimulcastLayerLimit field trial until they update. - max_layers = - LimitSimulcastLayerCount(width, height, min_layers, max_layers, trials); - - return GetNormalSimulcastLayers(max_layers, width, height, bitrate_priority, - max_qp, temporal_layers_supported, - base_heavy_tl3_rate_alloc, trials); - } -} - -std::vector GetNormalSimulcastLayers( - size_t layer_count, - int width, - int height, - double bitrate_priority, - int max_qp, +std::vector GetNormalSimulcastLayers( + ArrayView resolutions, bool temporal_layers_supported, bool base_heavy_tl3_rate_alloc, - const webrtc::FieldTrialsView& trials) { - std::vector layers(layer_count); - + const FieldTrialsView& trials, + VideoCodecType codec) { const bool enable_lowres_bitrate_interpolation = EnableLowresBitrateInterpolation(trials); - - // Format width and height has to be divisible by |2 ^ num_simulcast_layers - - // 1|. - width = NormalizeSimulcastSize(width, layer_count); - height = NormalizeSimulcastSize(height, layer_count); + const int num_temporal_layers = + temporal_layers_supported ? GetDefaultSimulcastTemporalLayers(codec) : 1; // Add simulcast streams, from highest resolution (`s` = num_simulcast_layers // -1) to lowest resolution at `s` = 0. - for (size_t s = layer_count - 1;; --s) { - layers[s].width = width; - layers[s].height = height; - // TODO(pbos): Fill actual temporal-layer bitrate thresholds. - layers[s].max_qp = max_qp; - layers[s].num_temporal_layers = - temporal_layers_supported ? DefaultNumberOfTemporalLayers(trials) : 1; - layers[s].max_bitrate_bps = - FindSimulcastMaxBitrate(width, height, - enable_lowres_bitrate_interpolation) - .bps(); - layers[s].target_bitrate_bps = - FindSimulcastTargetBitrate(width, height, - enable_lowres_bitrate_interpolation) - .bps(); - int num_temporal_layers = DefaultNumberOfTemporalLayers(trials); + std::vector layers(resolutions.size()); + for (size_t s = 0; s < resolutions.size(); ++s) { + layers[s].width = resolutions[s].width; + layers[s].height = resolutions[s].height; + layers[s].num_temporal_layers = num_temporal_layers; + + SimulcastFormat interpolated_format = InterpolateSimulcastFormat( + layers[s].width, layers[s].height, /*max_roundup_rate=*/std::nullopt, + enable_lowres_bitrate_interpolation, codec); + + layers[s].max_bitrate_bps = interpolated_format.max_bitrate.bps(); + layers[s].target_bitrate_bps = interpolated_format.target_bitrate.bps(); + layers[s].min_bitrate_bps = interpolated_format.min_bitrate.bps(); + if (s == 0) { // If alternative temporal rate allocation is selected, adjust the // bitrate of the lowest simulcast stream so that absolute bitrate for @@ -388,12 +287,6 @@ std::vector GetNormalSimulcastLayers( // Base heavy allocation increases TL0 bitrate from 40% to 60%. rate_factor = 0.4 / 0.6; } - } else { - rate_factor = - webrtc::SimulcastRateAllocator::GetTemporalRateAllocation( - 3, 0, /*base_heavy_tl3_rate_alloc=*/false) / - webrtc::SimulcastRateAllocator::GetTemporalRateAllocation( - num_temporal_layers, 0, /*base_heavy_tl3_rate_alloc=*/false); } layers[s].max_bitrate_bps = @@ -401,10 +294,6 @@ std::vector GetNormalSimulcastLayers( layers[s].target_bitrate_bps = static_cast(layers[s].target_bitrate_bps * rate_factor); } - layers[s].min_bitrate_bps = - FindSimulcastMinBitrate(width, height, - enable_lowres_bitrate_interpolation) - .bps(); // Ensure consistency. layers[s].max_bitrate_bps = @@ -413,42 +302,26 @@ std::vector GetNormalSimulcastLayers( std::max(layers[s].min_bitrate_bps, layers[s].target_bitrate_bps); layers[s].max_framerate = kDefaultVideoMaxFramerate; - - width /= 2; - height /= 2; - - if (s == 0) { - break; - } } - // Currently the relative bitrate priority of the sender is controlled by - // the value of the lowest VideoStream. - // TODO(bugs.webrtc.org/8630): The web specification describes being able to - // control relative bitrate for each individual simulcast layer, but this - // is currently just implemented per rtp sender. - layers[0].bitrate_priority = bitrate_priority; + return layers; } -std::vector GetScreenshareLayers( - size_t max_layers, - int width, - int height, - double bitrate_priority, - int max_qp, - bool temporal_layers_supported, - bool base_heavy_tl3_rate_alloc, - const webrtc::FieldTrialsView& trials) { +std::vector GetScreenshareLayers(size_t max_layers, + int width, + int height, + bool temporal_layers_supported, + bool base_heavy_tl3_rate_alloc, + const FieldTrialsView& trials) { size_t num_simulcast_layers = std::min(max_layers, kScreenshareMaxSimulcastLayers); - std::vector layers(num_simulcast_layers); + std::vector layers(num_simulcast_layers); // For legacy screenshare in conference mode, tl0 and tl1 bitrates are // piggybacked on the VideoCodec struct as target and max bitrates, // respectively. See eg. webrtc::LibvpxVp8Encoder::SetRates(). layers[0].width = width; layers[0].height = height; - layers[0].max_qp = max_qp; layers[0].max_framerate = 5; layers[0].min_bitrate_bps = webrtc::kDefaultMinVideoBitrateBps; layers[0].target_bitrate_bps = kScreenshareDefaultTl0Bitrate.bps(); @@ -477,7 +350,6 @@ std::vector GetScreenshareLayers( layers[1].width = width; layers[1].height = height; - layers[1].max_qp = max_qp; layers[1].max_framerate = kDefaultVideoMaxFramerate; layers[1].num_temporal_layers = temporal_layers_supported ? kScreenshareTemporalLayers : 1; @@ -488,10 +360,87 @@ std::vector GetScreenshareLayers( layers[1].max_bitrate_bps = max_bitrate_bps; } - // The bitrate priority currently implemented on a per-sender level, so we - // just set it for the first simulcast layer. - layers[0].bitrate_priority = bitrate_priority; return layers; } -} // namespace cricket +} // namespace + +size_t LimitSimulcastLayerCount(size_t min_num_layers, + size_t max_num_layers, + int width, + int height, + const FieldTrialsView& trials, + VideoCodecType codec) { + if (!absl::StartsWith(trials.Lookup(kUseLegacySimulcastLayerLimitFieldTrial), + "Disabled")) { + // Max layers from one higher resolution in kSimulcastFormats will be used + // if the ratio (pixels_up - pixels) / (pixels_up - pixels_down) is less + // than configured `max_ratio`. pixels_down is the selected index in + // kSimulcastFormats based on pixels. + FieldTrialOptional max_ratio("max_ratio"); + webrtc::ParseFieldTrial({&max_ratio}, + trials.Lookup("WebRTC-SimulcastLayerLimitRoundUp")); + + size_t reduced_num_layers = + std::max(min_num_layers, + InterpolateSimulcastFormat( + width, height, max_ratio.GetOptional(), + /*enable_lowres_bitrate_interpolation=*/false, codec) + .max_layers); + if (max_num_layers > reduced_num_layers) { + RTC_LOG(LS_WARNING) << "Reducing simulcast layer count from " + << max_num_layers << " to " << reduced_num_layers; + return reduced_num_layers; + } + } + return max_num_layers; +} + +void BoostMaxSimulcastLayer(DataRate max_bitrate, + std::vector* layers) { + if (layers->empty()) + return; + + const DataRate total_bitrate = GetTotalMaxBitrate(*layers); + + // We're still not using all available bits. + if (total_bitrate < max_bitrate) { + // Spend additional bits to boost the max layer. + const DataRate bitrate_left = max_bitrate - total_bitrate; + layers->back().max_bitrate_bps += bitrate_left.bps(); + } +} + +DataRate GetTotalMaxBitrate(const std::vector& layers) { + if (layers.empty()) + return webrtc::DataRate::Zero(); + + int total_max_bitrate_bps = 0; + for (size_t s = 0; s < layers.size() - 1; ++s) { + total_max_bitrate_bps += layers[s].target_bitrate_bps; + } + total_max_bitrate_bps += layers.back().max_bitrate_bps; + return webrtc::DataRate::BitsPerSec(total_max_bitrate_bps); +} + +std::vector GetSimulcastConfig( + ArrayView resolutions, + bool is_screenshare_with_conference_mode, + bool temporal_layers_supported, + const FieldTrialsView& trials, + VideoCodecType codec) { + RTC_DCHECK(!resolutions.empty()); + + const bool base_heavy_tl3_rate_alloc = + RateControlSettings(trials).Vp8BaseHeavyTl3RateAllocation(); + if (is_screenshare_with_conference_mode) { + return GetScreenshareLayers( + resolutions.size(), resolutions[0].width, resolutions[0].height, + temporal_layers_supported, base_heavy_tl3_rate_alloc, trials); + } else { + return GetNormalSimulcastLayers(resolutions, temporal_layers_supported, + base_heavy_tl3_rate_alloc, trials, codec); + } +} + +} // namespace webrtc diff --git a/video/config/simulcast.h b/video/config/simulcast.h index 32af168bcd..1a7f87c768 100644 --- a/video/config/simulcast.h +++ b/video/config/simulcast.h @@ -15,58 +15,51 @@ #include +#include "api/array_view.h" #include "api/field_trials_view.h" #include "api/units/data_rate.h" +#include "api/video/resolution.h" #include "video/config/video_encoder_config.h" -namespace cricket { +namespace webrtc { // Gets the total maximum bitrate for the `streams`. -webrtc::DataRate GetTotalMaxBitrate( - const std::vector& streams); +DataRate GetTotalMaxBitrate(const std::vector& streams); // Adds any bitrate of `max_bitrate` that is above the total maximum bitrate for // the `layers` to the highest quality layer. -void BoostMaxSimulcastLayer(webrtc::DataRate max_bitrate, - std::vector* layers); +void BoostMaxSimulcastLayer(DataRate max_bitrate, + std::vector* layers); -// Round size to nearest simulcast-friendly size -int NormalizeSimulcastSize(int size, size_t simulcast_layers); +// Returns number of simulcast streams. The value depends on the resolution and +// is restricted to the range from `min_num_layers` to `max_num_layers`, +// inclusive. +size_t LimitSimulcastLayerCount(size_t min_num_layers, + size_t max_num_layers, + int width, + int height, + const FieldTrialsView& trials, + VideoCodecType codec); // Gets simulcast settings. -std::vector GetSimulcastConfig( - size_t min_layers, - size_t max_layers, - int width, - int height, - double bitrate_priority, - int max_qp, +std::vector GetSimulcastConfig( + ArrayView resolutions, bool is_screenshare_with_conference_mode, bool temporal_layers_supported, - const webrtc::FieldTrialsView& trials); + const FieldTrialsView& trials, + VideoCodecType codec); -// Gets the simulcast config layers for a non-screensharing case. -std::vector GetNormalSimulcastLayers( - size_t max_layers, - int width, - int height, - double bitrate_priority, - int max_qp, - bool temporal_layers_supported, - bool base_heavy_tl3_rate_alloc, - const webrtc::FieldTrialsView& trials); - -// Gets simulcast config layers for screenshare settings. -std::vector GetScreenshareLayers( - size_t max_layers, - int width, - int height, - double bitrate_priority, - int max_qp, - bool temporal_layers_supported, - bool base_heavy_tl3_rate_alloc, - const webrtc::FieldTrialsView& trials); +} // namespace webrtc +// Re-export symbols from the webrtc namespace for backwards compatibility. +// TODO(bugs.webrtc.org/4222596): Remove once all references are updated. +#ifdef WEBRTC_ALLOW_DEPRECATED_NAMESPACES +namespace cricket { +using ::webrtc::BoostMaxSimulcastLayer; +using ::webrtc::GetSimulcastConfig; +using ::webrtc::GetTotalMaxBitrate; +using ::webrtc::LimitSimulcastLayerCount; } // namespace cricket +#endif // WEBRTC_ALLOW_DEPRECATED_NAMESPACES #endif // VIDEO_CONFIG_SIMULCAST_H_ diff --git a/video/config/simulcast_unittest.cc b/video/config/simulcast_unittest.cc index 152a0f9525..94b48fea56 100644 --- a/video/config/simulcast_unittest.cc +++ b/video/config/simulcast_unittest.cc @@ -10,17 +10,33 @@ #include "video/config/simulcast.h" -#include "api/transport/field_trial_based_config.h" +#include +#include +#include +#include + +#include "api/units/data_rate.h" +#include "api/video/resolution.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/video_codec.h" #include "media/base/media_constants.h" -#include "test/field_trial.h" +#include "test/explicit_key_value_config.h" +#include "test/gmock.h" #include "test/gtest.h" +#include "video/config/video_encoder_config.h" namespace webrtc { namespace { -constexpr int kQpMax = 55; -constexpr double kBitratePriority = 2.0; +using test::ExplicitKeyValueConfig; +using ::testing::Combine; +using ::testing::SizeIs; +using ::testing::TestParamInfo; +using ::testing::TestWithParam; +using ::testing::Values; + constexpr bool kScreenshare = true; -constexpr int kDefaultTemporalLayers = 3; // Value from simulcast.cc. +constexpr int kDefaultTemporalLayers = 3; // Value from simulcast.cc. +constexpr int kDefaultH265TemporalLayers = 1; // Value from simulcast.cc. // Values from kSimulcastConfigs in simulcast.cc. const std::vector GetSimulcastBitrates720p() { @@ -36,17 +52,45 @@ const std::vector GetSimulcastBitrates720p() { streams[2].max_bitrate_bps = 2500000; return streams; } + +// Values from kSimulcastFormatsH265 in simulcast.cc +const std::vector GetH265SimulcastBitrates720p() { + std::vector streams(3); + streams[0].min_bitrate_bps = 30'000; + streams[0].target_bitrate_bps = 142'000; + streams[0].max_bitrate_bps = 142'000; + streams[1].min_bitrate_bps = 193'000; + streams[1].target_bitrate_bps = 420'000; + streams[1].max_bitrate_bps = 420'000; + streams[2].min_bitrate_bps = 481'000; + streams[2].target_bitrate_bps = 1'524'000; + streams[2].max_bitrate_bps = 1'524'000; + return streams; +} + +// Creates a vector of resolutions scaled down with 1/2 factor ordered from low +// to high. +std::vector CreateResolutions(int max_width, + int max_height, + int num_streams) { + std::vector resolutions(num_streams); + for (int i = 0; i < num_streams; ++i) { + resolutions[i].width = max_width >> (num_streams - i - 1); + resolutions[i].height = max_height >> (num_streams - i - 1); + } + return resolutions; +} } // namespace TEST(SimulcastTest, TotalMaxBitrateIsZeroForNoStreams) { std::vector streams; - EXPECT_EQ(0, cricket::GetTotalMaxBitrate(streams).bps()); + EXPECT_EQ(0, GetTotalMaxBitrate(streams).bps()); } TEST(SimulcastTest, GetTotalMaxBitrateForSingleStream) { std::vector streams(1); streams[0].max_bitrate_bps = 100000; - EXPECT_EQ(100000, cricket::GetTotalMaxBitrate(streams).bps()); + EXPECT_EQ(100000, GetTotalMaxBitrate(streams).bps()); } TEST(SimulcastTest, GetTotalMaxBitrateForMultipleStreams) { @@ -54,7 +98,7 @@ TEST(SimulcastTest, GetTotalMaxBitrateForMultipleStreams) { streams[0].target_bitrate_bps = 100000; streams[1].target_bitrate_bps = 200000; streams[2].max_bitrate_bps = 400000; - EXPECT_EQ(700000, cricket::GetTotalMaxBitrate(streams).bps()); + EXPECT_EQ(700000, GetTotalMaxBitrate(streams).bps()); } TEST(SimulcastTest, BandwidthAboveTotalMaxBitrateGivenToHighestStream) { @@ -66,29 +110,28 @@ TEST(SimulcastTest, BandwidthAboveTotalMaxBitrateGivenToHighestStream) { const webrtc::DataRate one_bps = webrtc::DataRate::BitsPerSec(1); // No bitrate above the total max to give to the highest stream. - const webrtc::DataRate max_total_bitrate = - cricket::GetTotalMaxBitrate(streams); - cricket::BoostMaxSimulcastLayer(max_total_bitrate, &streams); + const webrtc::DataRate max_total_bitrate = GetTotalMaxBitrate(streams); + BoostMaxSimulcastLayer(max_total_bitrate, &streams); EXPECT_EQ(400000, streams[2].max_bitrate_bps); - EXPECT_EQ(max_total_bitrate, cricket::GetTotalMaxBitrate(streams)); + EXPECT_EQ(max_total_bitrate, GetTotalMaxBitrate(streams)); // The bitrate above the total max should be given to the highest stream. - cricket::BoostMaxSimulcastLayer(max_total_bitrate + one_bps, &streams); + BoostMaxSimulcastLayer(max_total_bitrate + one_bps, &streams); EXPECT_EQ(400000 + 1, streams[2].max_bitrate_bps); - EXPECT_EQ(max_total_bitrate + one_bps, cricket::GetTotalMaxBitrate(streams)); + EXPECT_EQ(max_total_bitrate + one_bps, GetTotalMaxBitrate(streams)); } TEST(SimulcastTest, GetConfig) { + const ExplicitKeyValueConfig trials(""); + const std::vector kExpected = GetSimulcastBitrates720p(); - const FieldTrialBasedConfig trials; - const size_t kMinLayers = 1; const size_t kMaxLayers = 3; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, 1280, 720, kBitratePriority, kQpMax, - !kScreenshare, true, trials); + std::vector streams = + GetSimulcastConfig(CreateResolutions(1280, 720, kMaxLayers), + !kScreenshare, true, trials, webrtc::kVideoCodecVP8); - EXPECT_EQ(kMaxLayers, streams.size()); + ASSERT_THAT(streams, SizeIs(kMaxLayers)); EXPECT_EQ(320u, streams[0].width); EXPECT_EQ(180u, streams[0].height); EXPECT_EQ(640u, streams[1].width); @@ -98,32 +141,56 @@ TEST(SimulcastTest, GetConfig) { for (size_t i = 0; i < streams.size(); ++i) { EXPECT_EQ(size_t{kDefaultTemporalLayers}, streams[i].num_temporal_layers); - EXPECT_EQ(cricket::kDefaultVideoMaxFramerate, streams[i].max_framerate); - EXPECT_EQ(kQpMax, streams[i].max_qp); + EXPECT_EQ(kDefaultVideoMaxFramerate, streams[i].max_framerate); + EXPECT_EQ(-1, streams[i].max_qp); + EXPECT_EQ(kExpected[i].min_bitrate_bps, streams[i].min_bitrate_bps); + EXPECT_EQ(kExpected[i].target_bitrate_bps, streams[i].target_bitrate_bps); + EXPECT_EQ(kExpected[i].max_bitrate_bps, streams[i].max_bitrate_bps); + EXPECT_TRUE(streams[i].active); + } +} + +TEST(SimulcastTest, GetConfigH265) { + const ExplicitKeyValueConfig trials(""); + const std::vector kExpected = GetH265SimulcastBitrates720p(); + + const size_t kMaxLayers = 3; + std::vector streams = + GetSimulcastConfig(CreateResolutions(1280, 720, kMaxLayers), + !kScreenshare, true, trials, webrtc::kVideoCodecH265); + + ASSERT_THAT(streams, SizeIs(kMaxLayers)); + EXPECT_EQ(320u, streams[0].width); + EXPECT_EQ(180u, streams[0].height); + EXPECT_EQ(640u, streams[1].width); + EXPECT_EQ(360u, streams[1].height); + EXPECT_EQ(1280u, streams[2].width); + EXPECT_EQ(720u, streams[2].height); + + for (size_t i = 0; i < streams.size(); ++i) { + EXPECT_EQ(size_t{kDefaultH265TemporalLayers}, + streams[i].num_temporal_layers); + EXPECT_EQ(kDefaultVideoMaxFramerate, streams[i].max_framerate); + EXPECT_EQ(-1, streams[i].max_qp); EXPECT_EQ(kExpected[i].min_bitrate_bps, streams[i].min_bitrate_bps); EXPECT_EQ(kExpected[i].target_bitrate_bps, streams[i].target_bitrate_bps); EXPECT_EQ(kExpected[i].max_bitrate_bps, streams[i].max_bitrate_bps); EXPECT_TRUE(streams[i].active); } - // Currently set on lowest stream. - EXPECT_EQ(kBitratePriority, streams[0].bitrate_priority); - EXPECT_FALSE(streams[1].bitrate_priority); - EXPECT_FALSE(streams[2].bitrate_priority); } TEST(SimulcastTest, GetConfigWithBaseHeavyVP8TL3RateAllocation) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig trials( "WebRTC-UseBaseHeavyVP8TL3RateAllocation/Enabled/"); - FieldTrialBasedConfig trials; const std::vector kExpected = GetSimulcastBitrates720p(); - const size_t kMinLayers = 1; const size_t kMaxLayers = 3; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, 1280, 720, kBitratePriority, kQpMax, - !kScreenshare, true, trials); + std::vector streams = + GetSimulcastConfig(CreateResolutions(1280, 720, kMaxLayers), + !kScreenshare, true, trials, webrtc::kVideoCodecVP8); + ASSERT_THAT(streams, SizeIs(kMaxLayers)); EXPECT_EQ(kExpected[0].min_bitrate_bps, streams[0].min_bitrate_bps); EXPECT_EQ(static_cast(0.4 * kExpected[0].target_bitrate_bps / 0.6), streams[0].target_bitrate_bps); @@ -137,184 +204,33 @@ TEST(SimulcastTest, GetConfigWithBaseHeavyVP8TL3RateAllocation) { } TEST(SimulcastTest, GetConfigWithLimitedMaxLayers) { - const size_t kMinLayers = 1; + ExplicitKeyValueConfig trials(""); + const size_t kMaxLayers = 2; - FieldTrialBasedConfig trials; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, 1280, 720, kBitratePriority, kQpMax, - !kScreenshare, true, trials); + std::vector streams = + GetSimulcastConfig(CreateResolutions(1280, 720, kMaxLayers), + !kScreenshare, true, trials, webrtc::kVideoCodecVP8); - EXPECT_EQ(kMaxLayers, streams.size()); + ASSERT_THAT(streams, SizeIs(kMaxLayers)); EXPECT_EQ(640u, streams[0].width); EXPECT_EQ(360u, streams[0].height); EXPECT_EQ(1280u, streams[1].width); EXPECT_EQ(720u, streams[1].height); } -TEST(SimulcastTest, GetConfigWithLimitedMaxLayersForResolution) { - test::ScopedFieldTrials field_trials( - "WebRTC-LegacySimulcastLayerLimit/Enabled/"); - FieldTrialBasedConfig trials; - const size_t kMinLayers = 1; - const size_t kMaxLayers = 3; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, 800, 600, kBitratePriority, kQpMax, !kScreenshare, - true, trials); - - EXPECT_EQ(2u, streams.size()); - EXPECT_EQ(400u, streams[0].width); - EXPECT_EQ(300u, streams[0].height); - EXPECT_EQ(800u, streams[1].width); - EXPECT_EQ(600u, streams[1].height); -} - -TEST(SimulcastTest, GetConfigWithLowResolutionScreenshare) { - test::ScopedFieldTrials field_trials( - "WebRTC-LegacySimulcastLayerLimit/Enabled/"); - FieldTrialBasedConfig trials; - const size_t kMinLayers = 1; - const size_t kMaxLayers = 3; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, 100, 100, kBitratePriority, kQpMax, kScreenshare, - true, trials); - - // Simulcast streams number is never decreased for screenshare, - // even for very low resolution. - EXPECT_GT(streams.size(), 1u); -} - -TEST(SimulcastTest, GetConfigWithNotLimitedMaxLayersForResolution) { - test::ScopedFieldTrials field_trials( - "WebRTC-LegacySimulcastLayerLimit/Disabled/"); - FieldTrialBasedConfig trials; - const size_t kMinLayers = 1; - const size_t kMaxLayers = 3; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, 800, 600, kBitratePriority, kQpMax, !kScreenshare, - true, trials); - - EXPECT_EQ(kMaxLayers, streams.size()); - EXPECT_EQ(200u, streams[0].width); - EXPECT_EQ(150u, streams[0].height); - EXPECT_EQ(400u, streams[1].width); - EXPECT_EQ(300u, streams[1].height); - EXPECT_EQ(800u, streams[2].width); - EXPECT_EQ(600u, streams[2].height); -} - -TEST(SimulcastTest, GetConfigWithNormalizedResolution) { - FieldTrialBasedConfig trials; - const size_t kMinLayers = 1; - const size_t kMaxLayers = 2; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, 640 + 1, 360 + 1, kBitratePriority, kQpMax, - !kScreenshare, true, trials); - - // Must be divisible by |2 ^ (num_layers - 1)|. - EXPECT_EQ(kMaxLayers, streams.size()); - EXPECT_EQ(320u, streams[0].width); - EXPECT_EQ(180u, streams[0].height); - EXPECT_EQ(640u, streams[1].width); - EXPECT_EQ(360u, streams[1].height); -} - -TEST(SimulcastTest, GetConfigWithNormalizedResolutionDivisibleBy4) { - test::ScopedFieldTrials field_trials( - "WebRTC-NormalizeSimulcastResolution/Enabled-2/"); - FieldTrialBasedConfig trials; - - const size_t kMinLayers = 1; - const size_t kMaxLayers = 2; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, 709, 501, kBitratePriority, kQpMax, !kScreenshare, - true, trials); - - // Must be divisible by |2 ^ 2|. - EXPECT_EQ(kMaxLayers, streams.size()); - EXPECT_EQ(354u, streams[0].width); - EXPECT_EQ(250u, streams[0].height); - EXPECT_EQ(708u, streams[1].width); - EXPECT_EQ(500u, streams[1].height); -} - -TEST(SimulcastTest, GetConfigWithNormalizedResolutionDivisibleBy8) { - test::ScopedFieldTrials field_trials( - "WebRTC-NormalizeSimulcastResolution/Enabled-3/"); - FieldTrialBasedConfig trials; - - const size_t kMinLayers = 1; - const size_t kMaxLayers = 2; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, 709, 501, kBitratePriority, kQpMax, !kScreenshare, - true, trials); - - // Must be divisible by |2 ^ 3|. - EXPECT_EQ(kMaxLayers, streams.size()); - EXPECT_EQ(352u, streams[0].width); - EXPECT_EQ(248u, streams[0].height); - EXPECT_EQ(704u, streams[1].width); - EXPECT_EQ(496u, streams[1].height); -} - -TEST(SimulcastTest, GetConfigForLegacyLayerLimit) { - test::ScopedFieldTrials field_trials( - "WebRTC-LegacySimulcastLayerLimit/Enabled/"); - FieldTrialBasedConfig trials; - - const size_t kMinLayers = 1; - const int kMaxLayers = 3; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, 320, 180, kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(1u, streams.size()); - - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 640, 360, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(2u, streams.size()); - - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 1920, 1080, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(3u, streams.size()); -} - -TEST(SimulcastTest, GetConfigForLegacyLayerLimitWithRequiredHD) { - test::ScopedFieldTrials field_trials( - "WebRTC-LegacySimulcastLayerLimit/Enabled/"); - FieldTrialBasedConfig trials; - - const size_t kMinLayers = 3; // "HD" layer must be present! - const int kMaxLayers = 3; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, 320, 180, kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(3u, streams.size()); - - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 640, 360, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(3u, streams.size()); - - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 1920, 1080, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(3u, streams.size()); -} - TEST(SimulcastTest, GetConfigForScreenshareSimulcast) { - FieldTrialBasedConfig trials; - const size_t kMinLayers = 1; - const size_t kMaxLayers = 3; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, 1400, 800, kBitratePriority, kQpMax, kScreenshare, - true, trials); - - EXPECT_GT(streams.size(), 1u); + ExplicitKeyValueConfig trials(""); + std::vector streams = GetSimulcastConfig( + std::vector{{.width = 1400, .height = 800}, + {.width = 1400, .height = 800}, + {.width = 1400, .height = 800}}, + kScreenshare, true, trials, webrtc::kVideoCodecVP8); + + EXPECT_THAT(streams, SizeIs(2)); for (size_t i = 0; i < streams.size(); ++i) { EXPECT_EQ(1400u, streams[i].width) << "Screen content never scaled."; EXPECT_EQ(800u, streams[i].height) << "Screen content never scaled."; - EXPECT_EQ(kQpMax, streams[i].max_qp); + EXPECT_EQ(-1, streams[i].max_qp); EXPECT_TRUE(streams[i].active); EXPECT_GT(streams[i].num_temporal_layers, size_t{1}); EXPECT_GT(streams[i].max_framerate, 0); @@ -325,46 +241,41 @@ TEST(SimulcastTest, GetConfigForScreenshareSimulcast) { } TEST(SimulcastTest, GetConfigForScreenshareSimulcastWithLimitedMaxLayers) { - FieldTrialBasedConfig trials; - const size_t kMinLayers = 1; - const size_t kMaxLayers = 1; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, 1400, 800, kBitratePriority, kQpMax, kScreenshare, - true, trials); - - EXPECT_EQ(kMaxLayers, streams.size()); + ExplicitKeyValueConfig trials(""); + std::vector streams = GetSimulcastConfig( + std::vector{{.width = 1400, .height = 800}}, kScreenshare, + true, trials, webrtc::kVideoCodecVP8); + EXPECT_THAT(streams, SizeIs(1)); } TEST(SimulcastTest, AveragesBitratesForNonStandardResolution) { - FieldTrialBasedConfig trials; - const size_t kMinLayers = 1; - const size_t kMaxLayers = 3; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, 900, 800, kBitratePriority, kQpMax, !kScreenshare, - true, trials); - - EXPECT_EQ(kMaxLayers, streams.size()); - EXPECT_EQ(900u, streams[2].width); - EXPECT_EQ(800u, streams[2].height); - EXPECT_EQ(1850000, streams[2].max_bitrate_bps); - EXPECT_EQ(1850000, streams[2].target_bitrate_bps); - EXPECT_EQ(475000, streams[2].min_bitrate_bps); + ExplicitKeyValueConfig trials(""); + std::vector streams = + GetSimulcastConfig(std::vector{{.width = 900, .height = 800}}, + !kScreenshare, true, trials, webrtc::kVideoCodecVP8); + + ASSERT_THAT(streams, SizeIs(1)); + EXPECT_EQ(900u, streams[0].width); + EXPECT_EQ(800u, streams[0].height); + EXPECT_EQ(1850000, streams[0].max_bitrate_bps); + EXPECT_EQ(1850000, streams[0].target_bitrate_bps); + EXPECT_EQ(475000, streams[0].min_bitrate_bps); } TEST(SimulcastTest, BitratesForCloseToStandardResolution) { - const size_t kMinLayers = 1; + ExplicitKeyValueConfig trials(""); + const size_t kMaxLayers = 3; // Resolution very close to 720p in number of pixels const size_t kWidth = 1280; const size_t kHeight = 716; const std::vector kExpectedNear = GetSimulcastBitrates720p(); - FieldTrialBasedConfig trials; - std::vector streams = cricket::GetSimulcastConfig( - kMinLayers, kMaxLayers, kWidth, kHeight, kBitratePriority, kQpMax, - !kScreenshare, true, trials); + std::vector streams = + GetSimulcastConfig(CreateResolutions(kWidth, kHeight, kMaxLayers), + !kScreenshare, true, trials, webrtc::kVideoCodecVP8); - EXPECT_EQ(kMaxLayers, streams.size()); + ASSERT_THAT(streams, SizeIs(kMaxLayers)); EXPECT_EQ(kWidth, streams[2].width); EXPECT_EQ(kHeight, streams[2].height); for (size_t i = 0; i < streams.size(); ++i) { @@ -378,104 +289,86 @@ TEST(SimulcastTest, BitratesForCloseToStandardResolution) { } TEST(SimulcastTest, MaxLayersWithRoundUpDisabled) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig trials( "WebRTC-SimulcastLayerLimitRoundUp/max_ratio:0.0/"); - FieldTrialBasedConfig trials; + const size_t kMinLayers = 1; const int kMaxLayers = 3; - std::vector streams; - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 960, 540, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(3u, streams.size()); + size_t num_layers = LimitSimulcastLayerCount(kMinLayers, kMaxLayers, 960, 540, + trials, webrtc::kVideoCodecVP8); + EXPECT_EQ(num_layers, 3u); // <960x540: 2 layers - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 960, 539, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(2u, streams.size()); - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 270, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(2u, streams.size()); + num_layers = LimitSimulcastLayerCount(kMinLayers, kMaxLayers, 960, 539, + trials, webrtc::kVideoCodecVP8); + EXPECT_EQ(num_layers, 2u); + num_layers = LimitSimulcastLayerCount(kMinLayers, kMaxLayers, 480, 270, + trials, webrtc::kVideoCodecVP8); + EXPECT_EQ(num_layers, 2u); // <480x270: 1 layer - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 269, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(1u, streams.size()); + num_layers = LimitSimulcastLayerCount(kMinLayers, kMaxLayers, 480, 269, + trials, webrtc::kVideoCodecVP8); + EXPECT_EQ(num_layers, 1u); } TEST(SimulcastTest, MaxLayersWithDefaultRoundUpRatio) { // Default: "WebRTC-SimulcastLayerLimitRoundUp/max_ratio:0.1/" - FieldTrialBasedConfig trials; + ExplicitKeyValueConfig trials(""); const size_t kMinLayers = 1; const int kMaxLayers = 3; - std::vector streams; - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 960, 540, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(3u, streams.size()); + size_t num_layers = LimitSimulcastLayerCount(kMinLayers, kMaxLayers, 960, 540, + trials, webrtc::kVideoCodecVP8); + EXPECT_EQ(num_layers, 3u); // Lowest cropped height where max layers from higher resolution is used. - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 960, 512, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(3u, streams.size()); - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 960, 508, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(2u, streams.size()); - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 270, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(2u, streams.size()); + num_layers = LimitSimulcastLayerCount(kMinLayers, kMaxLayers, 960, 512, + trials, webrtc::kVideoCodecVP8); + EXPECT_EQ(num_layers, 3u); + num_layers = LimitSimulcastLayerCount(kMinLayers, kMaxLayers, 960, 508, + trials, webrtc::kVideoCodecVP8); + EXPECT_EQ(num_layers, 2u); + num_layers = LimitSimulcastLayerCount(kMinLayers, kMaxLayers, 480, 270, + trials, webrtc::kVideoCodecVP8); + EXPECT_EQ(num_layers, 2u); // Lowest cropped height where max layers from higher resolution is used. - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 256, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(2u, streams.size()); - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 254, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(1u, streams.size()); + num_layers = LimitSimulcastLayerCount(kMinLayers, kMaxLayers, 480, 256, + trials, webrtc::kVideoCodecVP8); + EXPECT_EQ(num_layers, 2u); + num_layers = LimitSimulcastLayerCount(kMinLayers, kMaxLayers, 480, 254, + trials, webrtc::kVideoCodecVP8); + EXPECT_EQ(num_layers, 1u); } TEST(SimulcastTest, MaxLayersWithRoundUpRatio) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig trials( "WebRTC-SimulcastLayerLimitRoundUp/max_ratio:0.13/"); - FieldTrialBasedConfig trials; + const size_t kMinLayers = 1; const int kMaxLayers = 3; - std::vector streams; - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 270, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(2u, streams.size()); + size_t num_layers = LimitSimulcastLayerCount(kMinLayers, kMaxLayers, 480, 270, + trials, webrtc::kVideoCodecVP8); + EXPECT_EQ(num_layers, 2u); // Lowest cropped height where max layers from higher resolution is used. - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 252, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(2u, streams.size()); - streams = cricket::GetSimulcastConfig(kMinLayers, kMaxLayers, 480, 250, - kBitratePriority, kQpMax, !kScreenshare, - true, trials); - EXPECT_EQ(1u, streams.size()); + num_layers = LimitSimulcastLayerCount(kMinLayers, kMaxLayers, 480, 252, + trials, webrtc::kVideoCodecVP8); + EXPECT_EQ(num_layers, 2u); + num_layers = LimitSimulcastLayerCount(kMinLayers, kMaxLayers, 480, 250, + trials, webrtc::kVideoCodecVP8); + EXPECT_EQ(num_layers, 1u); } TEST(SimulcastTest, BitratesInterpolatedForResBelow180p) { // TODO(webrtc:12415): Remove when feature launches. - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig trials( "WebRTC-LowresSimulcastBitrateInterpolation/Enabled/"); const size_t kMaxLayers = 3; - FieldTrialBasedConfig trials; - - std::vector streams = cricket::GetSimulcastConfig( - /* min_layers = */ 1, kMaxLayers, /* width = */ 960, /* height = */ 540, - kBitratePriority, kQpMax, !kScreenshare, true, trials); + std::vector streams = GetSimulcastConfig( + CreateResolutions(/*max_width=*/960, /*max_height=*/540, kMaxLayers), + !kScreenshare, true, trials, webrtc::kVideoCodecVP8); - ASSERT_EQ(streams.size(), kMaxLayers); + ASSERT_THAT(streams, SizeIs(kMaxLayers)); EXPECT_EQ(240u, streams[0].width); EXPECT_EQ(135u, streams[0].height); EXPECT_EQ(streams[0].max_bitrate_bps, 112500); @@ -485,16 +378,14 @@ TEST(SimulcastTest, BitratesInterpolatedForResBelow180p) { TEST(SimulcastTest, BitratesConsistentForVerySmallRes) { // TODO(webrtc:12415): Remove when feature launches. - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig trials( "WebRTC-LowresSimulcastBitrateInterpolation/Enabled/"); - FieldTrialBasedConfig trials; - - std::vector streams = cricket::GetSimulcastConfig( - /* min_layers = */ 1, /* max_layers = */ 3, /* width = */ 1, - /* height = */ 1, kBitratePriority, kQpMax, !kScreenshare, true, trials); + std::vector streams = + GetSimulcastConfig(std::vector{{.width = 1, .height = 1}}, + !kScreenshare, true, trials, webrtc::kVideoCodecVP8); - ASSERT_TRUE(!streams.empty()); + ASSERT_THAT(streams, SizeIs(1)); EXPECT_EQ(1u, streams[0].width); EXPECT_EQ(1u, streams[0].height); EXPECT_EQ(streams[0].max_bitrate_bps, 30000); @@ -504,17 +395,15 @@ TEST(SimulcastTest, BitratesConsistentForVerySmallRes) { TEST(SimulcastTest, BitratesNotInterpolatedForResBelow180pWhenDisabledTrialSet) { - test::ScopedFieldTrials field_trials( + ExplicitKeyValueConfig trials( "WebRTC-LowresSimulcastBitrateInterpolation/Disabled/"); const size_t kMaxLayers = 3; - FieldTrialBasedConfig trials; + std::vector streams = GetSimulcastConfig( + CreateResolutions(/*max_width=*/960, /*max_height=*/540, kMaxLayers), + !kScreenshare, true, trials, webrtc::kVideoCodecVP8); - std::vector streams = cricket::GetSimulcastConfig( - /* min_layers = */ 1, kMaxLayers, /* width = */ 960, /* height = */ 540, - kBitratePriority, kQpMax, !kScreenshare, true, trials); - - ASSERT_EQ(streams.size(), kMaxLayers); + ASSERT_THAT(streams, SizeIs(kMaxLayers)); EXPECT_EQ(240u, streams[0].width); EXPECT_EQ(135u, streams[0].height); EXPECT_EQ(streams[0].max_bitrate_bps, 200000); @@ -522,4 +411,108 @@ TEST(SimulcastTest, EXPECT_EQ(streams[0].min_bitrate_bps, 30000); } +struct BitrateLimitsTestParams { + int width; + int height; + std::vector expected_min_bitrate_kbps; + std::vector expected_max_bitrate_kbps; +}; + +using BitrateLimitsTest = + TestWithParam>; + +TEST_P(BitrateLimitsTest, VerifyBitrateLimits) { + const auto codec_type = std::get(GetParam()); + const auto test_params = std::get(GetParam()); + ExplicitKeyValueConfig trials(""); + std::vector streams = GetSimulcastConfig( + CreateResolutions(test_params.width, test_params.height, + /*num_streams=*/3), + !kScreenshare, + /*temporal_layers_supported=*/true, trials, codec_type); + ASSERT_THAT(streams, SizeIs(3)); + for (size_t i = 0; i < streams.size(); ++i) { + EXPECT_EQ(streams[i].min_bitrate_bps / 1000, + test_params.expected_min_bitrate_kbps[i]); + EXPECT_EQ(streams[i].max_bitrate_bps / 1000, + test_params.expected_max_bitrate_kbps[i]); + } +} + +INSTANTIATE_TEST_SUITE_P( + Vp8H264, + BitrateLimitsTest, + Combine(Values(webrtc::kVideoCodecVP8, webrtc::kVideoCodecH264), + Values(BitrateLimitsTestParams{.width = 1920, + .height = 1080, + .expected_min_bitrate_kbps{150, 350, + 800}, + .expected_max_bitrate_kbps{450, 1200, + 5000}}, + BitrateLimitsTestParams{ + .width = 1280, + .height = 720, + .expected_min_bitrate_kbps{30, 150, 600}, + .expected_max_bitrate_kbps{200, 700, 2500}}, + BitrateLimitsTestParams{ + .width = 960, + .height = 540, + .expected_min_bitrate_kbps{30, 150, 350}, + .expected_max_bitrate_kbps{200, 450, 1200}})), + [](const TestParamInfo& info) { + return CodecTypeToPayloadString( + std::get(info.param)) + + std::to_string( + std::get(info.param).height); + }); + +INSTANTIATE_TEST_SUITE_P( + Av1Vp9H265, + BitrateLimitsTest, + Combine( + Values( +#ifdef RTC_ENABLE_H265 + webrtc::kVideoCodecH265, +#endif + webrtc::kVideoCodecAV1, + webrtc::kVideoCodecVP9), + Values( + BitrateLimitsTestParams{.width = 1920, + .height = 1080, + .expected_min_bitrate_kbps{121, 337, 769}, + .expected_max_bitrate_kbps{257, 879, 3367}}, + BitrateLimitsTestParams{.width = 1280, + .height = 720, + .expected_min_bitrate_kbps{30, 193, 481}, + .expected_max_bitrate_kbps{142, 420, 1524}}, + BitrateLimitsTestParams{ + .width = 960, + .height = 540, + .expected_min_bitrate_kbps{30, 121, 337}, + .expected_max_bitrate_kbps{101, 257, 879}})), + [](const TestParamInfo& info) { + return CodecTypeToPayloadString( + std::get(info.param)) + + std::to_string( + std::get(info.param).height); + }); + +#ifdef RTC_ENABLE_H265 +// Test that for H.265, the simulcast layers are created with the correct +// default temporal layers, before that is overrided by application settings. +TEST(SimulcastTest, GetConfigForH265) { + const ExplicitKeyValueConfig trials(""); + + const size_t kMaxLayers = 3; + std::vector streams = + GetSimulcastConfig(CreateResolutions(1280, 720, kMaxLayers), + !kScreenshare, true, trials, webrtc::kVideoCodecH265); + + ASSERT_THAT(streams, SizeIs(kMaxLayers)); + for (size_t i = 0; i < streams.size(); ++i) { + EXPECT_EQ(1ul, streams[i].num_temporal_layers); + } +} +#endif + } // namespace webrtc diff --git a/video/config/video_encoder_config.cc b/video/config/video_encoder_config.cc index 6ea2052138..ca3b20e436 100644 --- a/video/config/video_encoder_config.cc +++ b/video/config/video_encoder_config.cc @@ -24,7 +24,7 @@ VideoStream::VideoStream() max_bitrate_bps(-1), scale_resolution_down_by(-1.), max_qp(-1), - num_temporal_layers(absl::nullopt), + num_temporal_layers(std::nullopt), active(true) {} VideoStream::VideoStream(const VideoStream& other) = default; @@ -32,7 +32,7 @@ VideoStream::~VideoStream() = default; std::string VideoStream::ToString() const { char buf[1024]; - rtc::SimpleStringBuilder ss(buf); + SimpleStringBuilder ss(buf); ss << "{width: " << width; ss << ", height: " << height; ss << ", max_framerate: " << max_framerate; @@ -44,7 +44,7 @@ std::string VideoStream::ToString() const { ss << ", bitrate_priority: " << bitrate_priority.value_or(0); ss << ", active: " << active; ss << ", scale_down_by: " << scale_resolution_down_by; - + ss << '}'; return ss.str(); } @@ -59,7 +59,8 @@ VideoEncoderConfig::VideoEncoderConfig() bitrate_priority(1.0), number_of_streams(0), legacy_conference_mode(false), - is_quality_scaling_allowed(false) {} + is_quality_scaling_allowed(false), + max_qp(-1) {} VideoEncoderConfig::VideoEncoderConfig(VideoEncoderConfig&&) = default; @@ -67,9 +68,8 @@ VideoEncoderConfig::~VideoEncoderConfig() = default; std::string VideoEncoderConfig::ToString() const { char buf[1024]; - rtc::SimpleStringBuilder ss(buf); - ss << "{codec_type: "; - ss << CodecTypeToPayloadString(codec_type); + SimpleStringBuilder ss(buf); + ss << "{codec_type: " << CodecTypeToPayloadString(codec_type); ss << ", content_type: "; switch (content_type) { case ContentType::kRealtimeVideo: @@ -82,12 +82,27 @@ std::string VideoEncoderConfig::ToString() const { ss << ", frame_drop_enabled: " << frame_drop_enabled; ss << ", encoder_specific_settings: "; ss << (encoder_specific_settings != nullptr ? "(ptr)" : "NULL"); - ss << ", min_transmit_bitrate_bps: " << min_transmit_bitrate_bps; + ss << ", number_of_streams: " << number_of_streams; + ss << ", legacy_conference_mode: " << legacy_conference_mode; + ss << ", is_quality_scaling_allowed: " << is_quality_scaling_allowed; + ss << ", max_qp: " << max_qp; + for (size_t n = 0; n < simulcast_layers.size(); ++n) { + ss << ", simulcast_layers[" << n << "]: " << simulcast_layers[n].ToString(); + } ss << '}'; return ss.str(); } +bool VideoEncoderConfig::HasScaleResolutionDownTo() const { + for (const VideoStream& simulcast_layer : simulcast_layers) { + if (simulcast_layer.scale_resolution_down_to.has_value()) { + return true; + } + } + return false; +} + VideoEncoderConfig::VideoEncoderConfig(const VideoEncoderConfig&) = default; void VideoEncoderConfig::EncoderSpecificSettings::FillEncoderSpecificSettings( @@ -96,6 +111,8 @@ void VideoEncoderConfig::EncoderSpecificSettings::FillEncoderSpecificSettings( FillVideoCodecVp8(codec->VP8()); } else if (codec->codecType == kVideoCodecVP9) { FillVideoCodecVp9(codec->VP9()); + } else if (codec->codecType == kVideoCodecAV1) { + FillVideoCodecAv1(codec->AV1()); } else { RTC_DCHECK_NOTREACHED() << "Encoder specifics set/used for unknown codec type."; @@ -112,6 +129,11 @@ void VideoEncoderConfig::EncoderSpecificSettings::FillVideoCodecVp9( RTC_DCHECK_NOTREACHED(); } +void VideoEncoderConfig::EncoderSpecificSettings::FillVideoCodecAv1( + VideoCodecAV1* av1_settings) const { + RTC_DCHECK_NOTREACHED(); +} + VideoEncoderConfig::Vp8EncoderSpecificSettings::Vp8EncoderSpecificSettings( const VideoCodecVP8& specifics) : specifics_(specifics) {} @@ -130,4 +152,13 @@ void VideoEncoderConfig::Vp9EncoderSpecificSettings::FillVideoCodecVp9( *vp9_settings = specifics_; } +VideoEncoderConfig::Av1EncoderSpecificSettings::Av1EncoderSpecificSettings( + const VideoCodecAV1& specifics) + : specifics_(specifics) {} + +void VideoEncoderConfig::Av1EncoderSpecificSettings::FillVideoCodecAv1( + VideoCodecAV1* av1_settings) const { + *av1_settings = specifics_; +} + } // namespace webrtc diff --git a/video/config/video_encoder_config.h b/video/config/video_encoder_config.h index 59c9a39f82..adb78c7706 100644 --- a/video/config/video_encoder_config.h +++ b/video/config/video_encoder_config.h @@ -13,10 +13,11 @@ #include +#include #include #include -#include "absl/types/optional.h" +#include "api/field_trials_view.h" #include "api/scoped_refptr.h" #include "api/video/resolution.h" #include "api/video_codecs/scalability_mode.h" @@ -35,7 +36,7 @@ struct VideoStream { // Width/Height in pixels. // This is the actual width and height used to configure encoder, - // which might be less than `requested_resolution` due to adaptation + // which might be less than `scale_resolution_down_to` due to adaptation // or due to the source providing smaller frames than requested. size_t width; size_t height; @@ -61,27 +62,27 @@ struct VideoStream { // (meaning that this field _must_ be set), and for signaling the app-level // encoder settings (meaning that the field _may_ be set). We should separate // this and remove this optional instead. - absl::optional num_temporal_layers; + std::optional num_temporal_layers; // The priority of this stream, to be used when allocating resources // between multiple streams. - absl::optional bitrate_priority; + std::optional bitrate_priority; - absl::optional scalability_mode; + std::optional scalability_mode; // If this stream is enabled by the user, or not. bool active; // An optional user supplied max_frame_resolution // than can be set independently of (adapted) VideoSource. - // This value is set from RtpEncodingParameters::requested_resolution + // This value is set from RtpEncodingParameters::scale_resolution_down_to // (i.e. used for signaling app-level settings). // // The actual encode resolution is in `width` and `height`, - // which can be lower than requested_resolution, + // which can be lower than scale_resolution_down_to, // e.g. if source only provides lower resolution or // if resource adaptation is active. - absl::optional requested_resolution; + std::optional scale_resolution_down_to; }; class VideoEncoderConfig { @@ -89,8 +90,8 @@ class VideoEncoderConfig { // These are reference counted to permit copying VideoEncoderConfig and be // kept alive until all encoder_specific_settings go out of scope. // TODO(kthelgason): Consider removing the need for copying VideoEncoderConfig - // and use absl::optional for encoder_specific_settings instead. - class EncoderSpecificSettings : public rtc::RefCountInterface { + // and use std::optional for encoder_specific_settings instead. + class EncoderSpecificSettings : public RefCountInterface { public: // TODO(pbos): Remove FillEncoderSpecificSettings as soon as VideoCodec is // not in use and encoder implementations ask for codec-specific structs @@ -99,6 +100,7 @@ class VideoEncoderConfig { virtual void FillVideoCodecVp8(VideoCodecVP8* vp8_settings) const; virtual void FillVideoCodecVp9(VideoCodecVP9* vp9_settings) const; + virtual void FillVideoCodecAv1(VideoCodecAV1* av1_settings) const; private: ~EncoderSpecificSettings() override {} @@ -123,18 +125,28 @@ class VideoEncoderConfig { VideoCodecVP9 specifics_; }; + class Av1EncoderSpecificSettings : public EncoderSpecificSettings { + public: + explicit Av1EncoderSpecificSettings(const VideoCodecAV1& specifics); + void FillVideoCodecAv1(VideoCodecAV1* av1_settings) const override; + + private: + VideoCodecAV1 specifics_; + }; + enum class ContentType { kRealtimeVideo, kScreen, }; - class VideoStreamFactoryInterface : public rtc::RefCountInterface { + class VideoStreamFactoryInterface : public RefCountInterface { public: // An implementation should return a std::vector with the // wanted VideoStream settings for the given video resolution. // The size of the vector may not be larger than // `encoder_config.number_of_streams`. virtual std::vector CreateEncoderStreams( + const FieldTrialsView& field_trials, int frame_width, int frame_height, const VideoEncoderConfig& encoder_config) = 0; @@ -154,6 +166,8 @@ class VideoEncoderConfig { ~VideoEncoderConfig(); std::string ToString() const; + bool HasScaleResolutionDownTo() const; + // TODO(bugs.webrtc.org/6883): Consolidate on one of these. VideoCodecType codec_type; SdpVideoFormat video_format; @@ -161,11 +175,11 @@ class VideoEncoderConfig { // Note: This factory can be unset, and VideoStreamEncoder will // then use the EncoderStreamFactory. The factory is only set by // tests. - rtc::scoped_refptr video_stream_factory; + scoped_refptr video_stream_factory; std::vector spatial_layers; ContentType content_type; bool frame_drop_enabled; - rtc::scoped_refptr encoder_specific_settings; + scoped_refptr encoder_specific_settings; // Padding will be used up to this bitrate regardless of the bitrate produced // by the encoder. Padding above what's actually produced by the encoder helps diff --git a/video/corruption_detection/BUILD.gn b/video/corruption_detection/BUILD.gn new file mode 100644 index 0000000000..f2beeadf21 --- /dev/null +++ b/video/corruption_detection/BUILD.gn @@ -0,0 +1,241 @@ +# Copyright 2024 The WebRTC project authors. All rights reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_library("corruption_classifier") { + sources = [ + "corruption_classifier.cc", + "corruption_classifier.h", + ] + deps = [ + ":halton_frame_sampler", + "../../api:array_view", + "../../rtc_base:checks", + "../../rtc_base:logging", + ] +} + +rtc_library("frame_instrumentation_evaluation") { + sources = [ + "frame_instrumentation_evaluation.cc", + "frame_instrumentation_evaluation.h", + ] + deps = [ + ":corruption_classifier", + ":halton_frame_sampler", + "../../api:array_view", + "../../api:scoped_refptr", + "../../api/video:video_frame", + "../../common_video:frame_instrumentation_data", + "../../rtc_base:checks", + "../../rtc_base:logging", + ] +} + +rtc_library("frame_instrumentation_generator") { + sources = [ + "frame_instrumentation_generator.cc", + "frame_instrumentation_generator.h", + ] + deps = [ + ":generic_mapping_functions", + ":halton_frame_sampler", + "../../api:scoped_refptr", + "../../api/video:corruption_detection_filter_settings", + "../../api/video:encoded_image", + "../../api/video:video_frame", + "../../api/video:video_frame_type", + "../../api/video_codecs:video_codecs_api", + "../../common_video:frame_instrumentation_data", + "../../modules:module_api_public", + "../../modules/video_coding:video_coding_utility", + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../rtc_base:macromagic", + "../../rtc_base/synchronization:mutex", + "//third_party/abseil-cpp/absl/algorithm:container", + ] +} + +rtc_library("frame_pair_corruption_score") { + sources = [ + "frame_pair_corruption_score.cc", + "frame_pair_corruption_score.h", + ] + deps = [ + ":corruption_classifier", + ":generic_mapping_functions", + ":halton_frame_sampler", + ":utils", + "../../api:scoped_refptr", + "../../api/video:video_frame", + "../../rtc_base:checks", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("generic_mapping_functions") { + sources = [ + "generic_mapping_functions.cc", + "generic_mapping_functions.h", + ] + deps = [ + "../../api/video:corruption_detection_filter_settings", + "../../api/video:video_frame", + "../../api/video_codecs:video_codecs_api", + "../../rtc_base:checks", + ] +} + +rtc_library("halton_frame_sampler") { + sources = [ + "halton_frame_sampler.cc", + "halton_frame_sampler.h", + ] + deps = [ + ":halton_sequence", + "../../api:scoped_refptr", + "../../api/video:video_frame", + "../../rtc_base:checks", + "../../rtc_base:logging", + "../../rtc_base:safe_minmax", + ] +} + +rtc_library("halton_sequence") { + sources = [ + "halton_sequence.cc", + "halton_sequence.h", + ] + deps = [ "../../rtc_base:checks" ] +} + +rtc_library("utils") { + sources = [ + "utils.cc", + "utils.h", + ] + deps = [ + "../../api:scoped_refptr", + "../../api/video:video_frame", + "//third_party/abseil-cpp/absl/strings:string_view", + "//third_party/abseil-cpp/absl/strings:strings", + ] +} + +if (rtc_include_tests) { + rtc_library("corruption_classifier_unittest") { + testonly = true + sources = [ "corruption_classifier_unittest.cc" ] + deps = [ + ":corruption_classifier", + ":halton_frame_sampler", + "../../test:test_support", + ] + } + + rtc_library("frame_instrumentation_evaluation_unittest") { + testonly = true + sources = [ "frame_instrumentation_evaluation_unittest.cc" ] + deps = [ + ":frame_instrumentation_evaluation", + "../../api:scoped_refptr", + "../../api/video:video_frame", + "../../common_video:frame_instrumentation_data", + "../../test:test_support", + ] + } + + rtc_library("frame_instrumentation_generator_unittest") { + testonly = true + sources = [ "frame_instrumentation_generator_unittest.cc" ] + deps = [ + ":frame_instrumentation_generator", + "../../api:make_ref_counted", + "../../api:scoped_refptr", + "../../api/video:corruption_detection_filter_settings", + "../../api/video:encoded_image", + "../../api/video:video_frame", + "../../api/video:video_frame_type", + "../../common_video:frame_instrumentation_data", + "../../rtc_base:refcount", + "../../test:test_support", + ] + } + + rtc_library("frame_pair_corruption_score_unittest") { + testonly = true + sources = [ "frame_pair_corruption_score_unittest.cc" ] + deps = [ + ":frame_pair_corruption_score", + "../../api:scoped_refptr", + "../../api/video:video_frame", + "../../test:fileutils", + "../../test:test_support", + "../../test:video_test_support", + "//third_party/abseil-cpp/absl/strings:string_view", + ] + data = [ "../../resources/ConferenceMotion_1280_720_50.yuv" ] + } + + rtc_library("generic_mapping_functions_unittest") { + testonly = true + sources = [ "generic_mapping_functions_unittest.cc" ] + deps = [ + ":generic_mapping_functions", + "../../api/video:video_frame", + "../../test:test_support", + ] + } + + rtc_library("halton_frame_sampler_unittest") { + testonly = true + sources = [ "halton_frame_sampler_unittest.cc" ] + deps = [ + ":halton_frame_sampler", + "../../api:scoped_refptr", + "../../api/video:video_frame", + "../../test:test_support", + ] + } + + rtc_library("halton_sequence_unittest") { + testonly = true + sources = [ "halton_sequence_unittest.cc" ] + deps = [ + ":halton_sequence", + "../../test:test_support", + ] + } + + rtc_library("utils_unittest") { + testonly = true + sources = [ "utils_unittest.cc" ] + deps = [ + ":utils", + "../../api/video:video_frame", + "../../test:test_support", + ] + } + + rtc_library("corruption_detection_tests") { + testonly = true + sources = [] + deps = [ + ":corruption_classifier_unittest", + ":frame_instrumentation_evaluation_unittest", + ":frame_instrumentation_generator_unittest", + ":frame_pair_corruption_score_unittest", + ":generic_mapping_functions_unittest", + ":halton_frame_sampler_unittest", + ":halton_sequence_unittest", + ":utils_unittest", + ] + } +} diff --git a/video/corruption_detection/corruption_classifier.cc b/video/corruption_detection/corruption_classifier.cc new file mode 100644 index 0000000000..0c37dae5b9 --- /dev/null +++ b/video/corruption_detection/corruption_classifier.cc @@ -0,0 +1,105 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/corruption_classifier.h" + +#include +#include +#include + +#include "api/array_view.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "video/corruption_detection/halton_frame_sampler.h" + +namespace webrtc { + +CorruptionClassifier::CorruptionClassifier(float scale_factor) + : config_(ScalarConfig{.scale_factor = scale_factor}) { + RTC_CHECK_GT(scale_factor, 0) << "The scale factor must be positive."; + RTC_LOG(LS_INFO) << "Calculating corruption probability using scale factor."; +} + +CorruptionClassifier::CorruptionClassifier(float growth_rate, float midpoint) + : config_(LogisticFunctionConfig{.growth_rate = growth_rate, + .midpoint = midpoint}) { + RTC_CHECK_GT(growth_rate, 0) + << "As the `score` is defined now (low score means probably not " + "corrupted and vice versa), the growth rate must be positive to have " + "a logistic function that is monotonically increasing."; + RTC_LOG(LS_INFO) + << "Calculating corruption probability using logistic function."; +} + +double CorruptionClassifier::CalculateCorruptionProbability( + ArrayView filtered_original_samples, + ArrayView filtered_compressed_samples, + int luma_threshold, + int chroma_threshold) const { + double loss = GetScore(filtered_original_samples, filtered_compressed_samples, + luma_threshold, chroma_threshold); + + if (const auto* scalar_config = std::get_if(&config_)) { + // Fitting the unbounded loss to the interval of [0, 1] using a simple scale + // factor and capping the loss to 1. + return std::min(loss / scalar_config->scale_factor, 1.0); + } + + const auto config = std::get_if(&config_); + RTC_DCHECK(config); + // Fitting the unbounded loss to the interval of [0, 1] using the logistic + // function. + return 1 / (1 + std::exp(-config->growth_rate * (loss - config->midpoint))); +} + +// The score is calculated according to the following formula : +// +// score = (sum_i max{(|original_i - compressed_i| - threshold, 0)^2}) / N +// +// where N is the number of samples, i in [0, N), and the threshold is +// either `luma_threshold` or `chroma_threshold` depending on whether the +// sample is luma or chroma. +double CorruptionClassifier::GetScore( + ArrayView filtered_original_samples, + ArrayView filtered_compressed_samples, + int luma_threshold, + int chroma_threshold) const { + RTC_DCHECK_GE(luma_threshold, 0); + RTC_DCHECK_GE(chroma_threshold, 0); + RTC_CHECK_EQ(filtered_original_samples.size(), + filtered_compressed_samples.size()) + << "The original and compressed frame have different amounts of " + "filtered samples."; + RTC_CHECK_GT(filtered_original_samples.size(), 0); + const int num_samples = filtered_original_samples.size(); + double sum = 0.0; + for (int i = 0; i < num_samples; ++i) { + RTC_CHECK_EQ(filtered_original_samples[i].plane, + filtered_compressed_samples[i].plane); + double abs_diff = std::abs(filtered_original_samples[i].value - + filtered_compressed_samples[i].value); + switch (filtered_original_samples[i].plane) { + case ImagePlane::kLuma: + if (abs_diff > luma_threshold) { + sum += std::pow(abs_diff - luma_threshold, 2); + } + break; + case ImagePlane::kChroma: + if (abs_diff > chroma_threshold) { + sum += std::pow(abs_diff - chroma_threshold, 2); + } + break; + } + } + + return sum / num_samples; +} + +} // namespace webrtc diff --git a/video/corruption_detection/corruption_classifier.h b/video/corruption_detection/corruption_classifier.h new file mode 100644 index 0000000000..552f315c86 --- /dev/null +++ b/video/corruption_detection/corruption_classifier.h @@ -0,0 +1,74 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_CORRUPTION_DETECTION_CORRUPTION_CLASSIFIER_H_ +#define VIDEO_CORRUPTION_DETECTION_CORRUPTION_CLASSIFIER_H_ + +#include + +#include "api/array_view.h" +#include "video/corruption_detection/halton_frame_sampler.h" + +namespace webrtc { + +// Based on the given filtered samples to `CalculateCorruptionProbability` this +// class calculates a probability to indicate whether the frame is corrupted. +// The classification is done either by scaling the loss to the interval of [0, +// 1] using a simple `scale_factor` or by applying a logistic function to the +// loss. The logistic function is constructed based on `growth_rate` and +// `midpoint`, to the score between the original and the compressed frames' +// samples. This score is calculated using `GetScore`. +// +// TODO: bugs.webrtc.org/358039777 - Remove one of the constructors based on +// which mapping function works best in practice. +class CorruptionClassifier { + public: + // Calculates the corruption probability using a simple scale factor. + explicit CorruptionClassifier(float scale_factor); + // Calculates the corruption probability using a logistic function. + CorruptionClassifier(float growth_rate, float midpoint); + ~CorruptionClassifier() = default; + + // This function calculates and returns the probability (in the interval [0, + // 1] that a frame is corrupted. The probability is determined either by + // scaling the loss to the interval of [0, 1] using a simple `scale_factor` + // or by applying a logistic function to the loss. The method is chosen + // depending on the used constructor. + double CalculateCorruptionProbability( + ArrayView filtered_original_samples, + ArrayView filtered_compressed_samples, + int luma_threshold, + int chroma_threshold) const; + + private: + struct ScalarConfig { + float scale_factor; + }; + + // Logistic function parameters. See + // https://en.wikipedia.org/wiki/Logistic_function. + struct LogisticFunctionConfig { + float growth_rate; + float midpoint; + }; + + // Returns the non-normalized score between the original and the compressed + // frames' samples. + double GetScore(ArrayView filtered_original_samples, + ArrayView filtered_compressed_samples, + int luma_threshold, + int chroma_threshold) const; + + const std::variant config_; +}; + +} // namespace webrtc + +#endif // VIDEO_CORRUPTION_DETECTION_CORRUPTION_CLASSIFIER_H_ diff --git a/video/corruption_detection/corruption_classifier_unittest.cc b/video/corruption_detection/corruption_classifier_unittest.cc new file mode 100644 index 0000000000..5f8252400e --- /dev/null +++ b/video/corruption_detection/corruption_classifier_unittest.cc @@ -0,0 +1,294 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/corruption_classifier.h" + +#include + +#include "test/gmock.h" +#include "test/gtest.h" +#include "video/corruption_detection/halton_frame_sampler.h" + +namespace webrtc { +namespace { + +using ::testing::DoubleNear; +#if GTEST_HAS_DEATH_TEST +using ::testing::_; +using ::testing::HasSubstr; +#endif // GTEST_HAS_DEATH_TEST + +constexpr int kLumaThreshold = 3; +constexpr int kChromaThreshold = 2; + +constexpr double kMaxAbsoluteError = 1e-4; + +// Arbitrary values for testing. +constexpr double kBaseOriginalLumaSampleValue1 = 1.0; +constexpr double kBaseOriginalLumaSampleValue2 = 2.5; +constexpr double kBaseOriginalChromaSampleValue1 = 0.5; + +constexpr FilteredSample kFilteredOriginalSampleValues[] = { + {.value = kBaseOriginalLumaSampleValue1, .plane = ImagePlane::kLuma}, + {.value = kBaseOriginalLumaSampleValue2, .plane = ImagePlane::kLuma}, + {.value = kBaseOriginalChromaSampleValue1, .plane = ImagePlane::kChroma}}; + +// The value 14.0 corresponds to the corruption probability being on the same +// side of 0.5 in the `ScalarConfig` and `LogisticFunctionConfig`. +constexpr float kScaleFactor = 14.0; + +constexpr float kGrowthRate = 1.0; +constexpr float kMidpoint = 7.0; + +// Helper function to create fake compressed sample values. +std::vector GetCompressedSampleValues( + double increase_value_luma, + double increase_value_chroma) { + return std::vector{ + {.value = kBaseOriginalLumaSampleValue1 + increase_value_luma, + .plane = ImagePlane::kLuma}, + {.value = kBaseOriginalLumaSampleValue2 + increase_value_luma, + .plane = ImagePlane::kLuma}, + {.value = kBaseOriginalChromaSampleValue1 + increase_value_chroma, + .plane = ImagePlane::kChroma}}; +} + +#if GTEST_HAS_DEATH_TEST +TEST(CorruptionClassifierTest, EmptySamplesShouldResultInDeath) { + CorruptionClassifier corruption_classifier(kScaleFactor); + EXPECT_DEATH(corruption_classifier.CalculateCorruptionProbability( + {}, {}, kLumaThreshold, kChromaThreshold), + _); +} + +TEST(CorruptionClassifierTest, DifferentAmountOfSamplesShouldResultInDeath) { + CorruptionClassifier corruption_classifier(kScaleFactor); + const std::vector filtered_compressed_samples = { + {.value = 1.0, .plane = ImagePlane::kLuma}}; + + EXPECT_DEATH(corruption_classifier.CalculateCorruptionProbability( + kFilteredOriginalSampleValues, filtered_compressed_samples, + kLumaThreshold, kChromaThreshold), + HasSubstr("The original and compressed frame have different " + "amounts of filtered samples.")); +} +#endif // GTEST_HAS_DEATH_TEST + +TEST(CorruptionClassifierTest, + SameSampleValuesShouldResultInNoCorruptionScalarConfig) { + float kIncreaseValue = 0.0; + const std::vector kFilteredCompressedSampleValues = + GetCompressedSampleValues(kIncreaseValue, kIncreaseValue); + + CorruptionClassifier corruption_classifier(kScaleFactor); + + // Expected: score = 0. + // Note that the `score` above corresponds to the value returned by the + // `GetScore` function. Then this value should be passed through the Scalar or + // Logistic function giving the expected result inside DoubleNear. This + // applies for all the following tests. + EXPECT_THAT( + corruption_classifier.CalculateCorruptionProbability( + kFilteredOriginalSampleValues, kFilteredCompressedSampleValues, + kLumaThreshold, kChromaThreshold), + DoubleNear(0.0, kMaxAbsoluteError)); +} + +TEST(CorruptionClassifierTest, + SameSampleValuesShouldResultInNoCorruptionLogisticFunctionConfig) { + float kIncreaseValue = 0.0; + const std::vector kFilteredCompressedSampleValues = + GetCompressedSampleValues(kIncreaseValue, kIncreaseValue); + + CorruptionClassifier corruption_classifier(kGrowthRate, kMidpoint); + + // Expected: score = 0. See above for explanation why we have `0.0009` below. + EXPECT_THAT( + corruption_classifier.CalculateCorruptionProbability( + kFilteredOriginalSampleValues, kFilteredCompressedSampleValues, + kLumaThreshold, kChromaThreshold), + DoubleNear(0.0009, kMaxAbsoluteError)); +} + +TEST(CorruptionClassifierTest, + NoCorruptionWhenAllSampleDifferencesBelowThresholdScalarConfig) { + // Following value should be < `kLumaThreshold` and `kChromaThreshold`. + const double kIncreaseValue = 1; + const std::vector kFilteredCompressedSampleValues = + GetCompressedSampleValues(kIncreaseValue, kIncreaseValue); + + CorruptionClassifier corruption_classifier(kScaleFactor); + + // Expected: score = 0. + EXPECT_THAT( + corruption_classifier.CalculateCorruptionProbability( + kFilteredOriginalSampleValues, kFilteredCompressedSampleValues, + kLumaThreshold, kChromaThreshold), + DoubleNear(0.0, kMaxAbsoluteError)); +} + +TEST(CorruptionClassifierTest, + NoCorruptionWhenAllSampleDifferencesBelowThresholdLogisticFunctionConfig) { + // Following value should be < `kLumaThreshold` and `kChromaThreshold`. + const double kIncreaseValue = 1; + const std::vector kFilteredCompressedSampleValues = + GetCompressedSampleValues(kIncreaseValue, kIncreaseValue); + + CorruptionClassifier corruption_classifier(kGrowthRate, kMidpoint); + + // Expected: score = 0. + EXPECT_THAT( + corruption_classifier.CalculateCorruptionProbability( + kFilteredOriginalSampleValues, kFilteredCompressedSampleValues, + kLumaThreshold, kChromaThreshold), + DoubleNear(0.0009, kMaxAbsoluteError)); +} + +TEST(CorruptionClassifierTest, + NoCorruptionWhenSmallPartOfSamplesAboveThresholdScalarConfig) { + const double kIncreaseValueLuma = 1; + const double kIncreaseValueChroma = 2.5; // Above `kChromaThreshold`. + const std::vector kFilteredCompressedSampleValues = + GetCompressedSampleValues(kIncreaseValueLuma, kIncreaseValueChroma); + + CorruptionClassifier corruption_classifier(kScaleFactor); + + // Expected: score = (0.5)^2 / 3. + EXPECT_THAT( + corruption_classifier.CalculateCorruptionProbability( + kFilteredOriginalSampleValues, kFilteredCompressedSampleValues, + kLumaThreshold, kChromaThreshold), + DoubleNear(0.0060, kMaxAbsoluteError)); +} + +TEST(CorruptionClassifierTest, + NoCorruptionWhenSmallPartOfSamplesAboveThresholdLogisticFunctionConfig) { + const double kIncreaseValueLuma = 1; + const double kIncreaseValueChroma = 2.5; // Above `kChromaThreshold`. + const std::vector kFilteredCompressedSampleValues = + GetCompressedSampleValues(kIncreaseValueLuma, kIncreaseValueChroma); + + CorruptionClassifier corruption_classifier(kGrowthRate, kMidpoint); + + // Expected: score = (0.5)^2 / 3. + EXPECT_THAT( + corruption_classifier.CalculateCorruptionProbability( + kFilteredOriginalSampleValues, kFilteredCompressedSampleValues, + kLumaThreshold, kChromaThreshold), + DoubleNear(0.001, kMaxAbsoluteError)); +} + +TEST(CorruptionClassifierTest, + NoCorruptionWhenAllSamplesSlightlyAboveThresholdScalarConfig) { + const double kIncreaseValueLuma = 4.2; // Above `kLumaThreshold`. + const double kIncreaseValueChroma = 2.5; // Above `kChromaThreshold`. + const std::vector kFilteredCompressedSampleValues = + GetCompressedSampleValues(kIncreaseValueLuma, kIncreaseValueChroma); + + CorruptionClassifier corruption_classifier(kScaleFactor); + + // Expected: score = ((0.5)^2 + 2*(1.2)^2) / 3. + EXPECT_THAT( + corruption_classifier.CalculateCorruptionProbability( + kFilteredOriginalSampleValues, kFilteredCompressedSampleValues, + kLumaThreshold, kChromaThreshold), + DoubleNear(0.07452, kMaxAbsoluteError)); +} + +TEST(CorruptionClassifierTest, + NoCorruptionWhenAllSamplesSlightlyAboveThresholdLogisticFunctionConfig) { + const double kIncreaseValueLuma = 4.2; // Above `kLumaThreshold`. + const double kIncreaseValueChroma = 2.5; // Above `kChromaThreshold`. + const std::vector kFilteredCompressedSampleValues = + GetCompressedSampleValues(kIncreaseValueLuma, kIncreaseValueChroma); + + CorruptionClassifier corruption_classifier(kGrowthRate, kMidpoint); + + // Expected: score = ((0.5)^2 + 2*(1.2)^2) / 3. + EXPECT_THAT( + corruption_classifier.CalculateCorruptionProbability( + kFilteredOriginalSampleValues, kFilteredCompressedSampleValues, + kLumaThreshold, kChromaThreshold), + DoubleNear(0.0026, kMaxAbsoluteError)); +} + +// Observe that the following 2 tests in practice could be classified as +// corrupted, if so wanted. However, with the `kGrowthRate`, `kMidpoint` and +// `kScaleFactor` values chosen in these tests, the score is not high enough to +// be classified as corrupted. +TEST(CorruptionClassifierTest, + NoCorruptionWhenAllSamplesSomewhatAboveThresholdScalarConfig) { + const double kIncreaseValue = 5.0; + const std::vector kFilteredCompressedSampleValues = + GetCompressedSampleValues(kIncreaseValue, kIncreaseValue); + + CorruptionClassifier corruption_classifier(kScaleFactor); + + // Expected: score = ((3)^2 + 2*(2)^2) / 3. + EXPECT_THAT( + corruption_classifier.CalculateCorruptionProbability( + kFilteredOriginalSampleValues, kFilteredCompressedSampleValues, + kLumaThreshold, kChromaThreshold), + DoubleNear(0.4048, kMaxAbsoluteError)); +} + +TEST(CorruptionClassifierTest, + NoCorruptionWhenAllSamplesSomewhatAboveThresholdLogisticFunctionConfig) { + // Somewhat above `kLumaThreshold` and `kChromaThreshold`. + const double kIncreaseValue = 5.0; + const std::vector kFilteredCompressedSampleValues = + GetCompressedSampleValues(kIncreaseValue, kIncreaseValue); + + CorruptionClassifier corruption_classifier(kGrowthRate, kMidpoint); + + // Expected: score = ((3)^2 + 2*(2)^2) / 3. + EXPECT_THAT( + corruption_classifier.CalculateCorruptionProbability( + kFilteredOriginalSampleValues, kFilteredCompressedSampleValues, + kLumaThreshold, kChromaThreshold), + DoubleNear(0.2086, kMaxAbsoluteError)); +} + +TEST(CorruptionClassifierTest, + CorruptionWhenAllSamplesWellAboveThresholdScalarConfig) { + // Well above `kLumaThreshold` and `kChromaThreshold`. + const double kIncreaseValue = 7.0; + const std::vector kFilteredCompressedSampleValues = + GetCompressedSampleValues(kIncreaseValue, kIncreaseValue); + + CorruptionClassifier corruption_classifier(kScaleFactor); + + // Expected: score = ((5)^2 + 2*(4)^2) / 3. Expected 1 because of capping. + EXPECT_THAT( + corruption_classifier.CalculateCorruptionProbability( + kFilteredOriginalSampleValues, kFilteredCompressedSampleValues, + kLumaThreshold, kChromaThreshold), + DoubleNear(1, kMaxAbsoluteError)); +} + +TEST(CorruptionClassifierTest, + CorruptionWhenAllSamplesWellAboveThresholdLogisticFunctionConfig) { + // Well above `kLumaThreshold` and `kChromaThreshold`. + const double kIncreaseValue = 7.0; + const std::vector kFilteredCompressedSampleValues = + GetCompressedSampleValues(kIncreaseValue, kIncreaseValue); + + CorruptionClassifier corruption_classifier(kGrowthRate, kMidpoint); + + // Expected: score = ((5)^2 + 2*(4)^2) / 3. + EXPECT_THAT( + corruption_classifier.CalculateCorruptionProbability( + kFilteredOriginalSampleValues, kFilteredCompressedSampleValues, + kLumaThreshold, kChromaThreshold), + DoubleNear(1, kMaxAbsoluteError)); +} + +} // namespace +} // namespace webrtc diff --git a/video/corruption_detection/evaluation/BUILD.gn b/video/corruption_detection/evaluation/BUILD.gn new file mode 100644 index 0000000000..7b95a3b207 --- /dev/null +++ b/video/corruption_detection/evaluation/BUILD.gn @@ -0,0 +1,79 @@ +# Copyright 2025 The WebRTC project authors. All rights reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +rtc_library("test_clip") { + testonly = true + sources = [ + "test_clip.cc", + "test_clip.h", + ] + deps = [ + ":utils", + "../../../api/video_codecs:video_codecs_api", + "../../../rtc_base:checks", + "../../../test:fileutils", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +rtc_library("utils") { + testonly = true + sources = [ + "utils.cc", + "utils.h", + ] + deps = [ + "../../../api:array_view", + "../../../rtc_base:checks", + "../../../rtc_base:stringutils", + "../../../rtc_base/system:file_wrapper", + "../../../test:fileutils", + "//third_party/abseil-cpp/absl/strings:string_view", + ] +} + +if (rtc_include_tests) { + rtc_library("test_clip_unittest") { + testonly = true + sources = [ "test_clip_unittest.cc" ] + deps = [ + ":test_clip", + ":utils", + "../../../api/video_codecs:video_codecs_api", + "../../../test:fileutils", + "../../../test:test_support", + "//third_party/abseil-cpp/absl/strings:string_view", + ] + data = [ "../../../resources/ConferenceMotion_1280_720_50.yuv" ] + } + + rtc_library("utils_unittest") { + testonly = true + sources = [ "utils_unittest.cc" ] + deps = [ + ":utils", + "../../../api:scoped_refptr", + "../../../api/video:video_frame", + "../../../rtc_base/system:file_wrapper", + "../../../test:fileutils", + "../../../test:test_support", + "../../../test:video_test_support", + ] + } + + rtc_library("corruption_detection_eval_tests") { + testonly = true + sources = [] + deps = [ + ":test_clip_unittest", + ":utils_unittest", + ] + } +} diff --git a/video/corruption_detection/evaluation/test_clip.cc b/video/corruption_detection/evaluation/test_clip.cc new file mode 100644 index 0000000000..66ef5ef609 --- /dev/null +++ b/video/corruption_detection/evaluation/test_clip.cc @@ -0,0 +1,54 @@ +/* + * Copyright 2025 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/evaluation/test_clip.h" + +#include + +#include "absl/strings/string_view.h" +#include "api/video_codecs/video_codec.h" +#include "rtc_base/checks.h" +#include "test/testsupport/file_utils.h" +#include "video/corruption_detection/evaluation/utils.h" + +namespace webrtc { + +TestClip TestClip::CreateYuvClip(absl::string_view filename, + int width, + int height, + int framerate, + VideoCodecMode codec_mode) { + // First assume that the filename does not contain the extension. + std::string clip_path = test::ResourcePath(filename, "yuv"); + if (!test::FileExists(clip_path)) { + // Second assume that the filename contains a full path to the video. + RTC_CHECK(test::FileExists(filename)) << "Could not find clip " << filename; + clip_path = std::string(filename); + } + return TestClip(clip_path, width, height, framerate, codec_mode, + /*is_yuv=*/true); +} + +TestClip TestClip::CreateY4mClip(absl::string_view filename, + VideoCodecMode codec_mode) { + // First assume that the filename does not contain the extension. + std::string clip_path = test::ResourcePath(filename, "y4m"); + if (!test::FileExists(clip_path)) { + // Second assume that the filename contains a full path to the video. + RTC_CHECK(test::FileExists(filename)) << "Could not find clip " << filename; + clip_path = std::string(filename); + } + const Y4mMetadata y4m_metadata = ReadMetadataFromY4mHeader(clip_path); + return TestClip(clip_path, y4m_metadata.width, y4m_metadata.height, + y4m_metadata.framerate, codec_mode, + /*is_yuv=*/false); +} + +} // namespace webrtc diff --git a/video/corruption_detection/evaluation/test_clip.h b/video/corruption_detection/evaluation/test_clip.h new file mode 100644 index 0000000000..863cc24727 --- /dev/null +++ b/video/corruption_detection/evaluation/test_clip.h @@ -0,0 +1,76 @@ +/* + * Copyright 2025 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_CORRUPTION_DETECTION_EVALUATION_TEST_CLIP_H_ +#define VIDEO_CORRUPTION_DETECTION_EVALUATION_TEST_CLIP_H_ + +#include + +#include "absl/strings/string_view.h" +#include "api/video_codecs/video_codec.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { + +// Identifies a test clip. +// If the clip is a YUV video, the user should use the `CreateYuvClip` method. +// Hence, the user should also give information about the resolution and +// framerate of the video. If the clip is a Y4M video, the resolution and +// framerate are derived from the file header, by using the `CreateY4mClip` +// method. +class TestClip { + public: + static TestClip CreateYuvClip(absl::string_view filename, + int width, + int height, + int framerate, + VideoCodecMode codec_mode); + + static TestClip CreateY4mClip(absl::string_view filename, + VideoCodecMode codec_mode); + + // Returns the path to the video with the '.yuv' or '.y4m' extension. + // Observe that this path can only be reached as long as the `TestClip` + // instance is alive. + absl::string_view clip_path() const { return clip_path_with_extension_; } + VideoCodecMode codec_mode() const { return codec_mode_; } + int width() const { return width_; } + int height() const { return height_; } + int framerate() const { return framerate_; } + bool is_yuv() const { return is_yuv_; } + + private: + TestClip(absl::string_view clip_path_with_extension, + int width, + int height, + int framerate, + VideoCodecMode codec_mode, + bool is_yuv) + : clip_path_with_extension_(std::string(clip_path_with_extension)), + codec_mode_(codec_mode), + width_(width), + height_(height), + framerate_(framerate), + is_yuv_(is_yuv) {} + + // The path to the video with the '.yuv' or '.y4m' extension. + const std::string clip_path_with_extension_; + // Specifies whether the video is a real time or a screensharing video. It + // is used to initialize the encoder properly. + const VideoCodecMode codec_mode_; + const int width_ = 0; + const int height_ = 0; + const int framerate_ = 0; + const bool is_yuv_ = false; +}; + +} // namespace webrtc + +#endif // VIDEO_CORRUPTION_DETECTION_EVALUATION_TEST_CLIP_H_ diff --git a/video/corruption_detection/evaluation/test_clip_unittest.cc b/video/corruption_detection/evaluation/test_clip_unittest.cc new file mode 100644 index 0000000000..6ba1282ca4 --- /dev/null +++ b/video/corruption_detection/evaluation/test_clip_unittest.cc @@ -0,0 +1,116 @@ +/* + * Copyright 2025 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/evaluation/test_clip.h" + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/video_codecs/video_codec.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" +#include "video/corruption_detection/evaluation/utils.h" + +namespace webrtc { +namespace { + +using ::testing::HasSubstr; + +constexpr int kWidth = 1280; +constexpr int kHeight = 720; +constexpr int kFramerate = 50; +constexpr VideoCodecMode kCodecMode = VideoCodecMode::kRealtimeVideo; + +constexpr int kDummyVideoWidth = 2; +constexpr int kDummyVideoHeight = 2; +// One frame of dummy video. +constexpr uint8_t kDummyFileContent[kDummyVideoWidth * kDummyVideoHeight * 3 / + 2] = {0, 1, 2, 3, 4, 5}; + +#if GTEST_HAS_DEATH_TEST +TEST(TestClipTest, FileDoesNotExist) { + EXPECT_DEATH(TestClip::CreateY4mClip("does_not_exist", kCodecMode), + HasSubstr("Could not find clip does_not_exist")); + EXPECT_DEATH(TestClip::CreateY4mClip("does_not_exist.y4m", kCodecMode), + HasSubstr("Could not find clip does_not_exist")); + EXPECT_DEATH(TestClip::CreateYuvClip("does_not_exist", kWidth, kHeight, + kFramerate, kCodecMode), + HasSubstr("Could not find clip does_not_exist")); + EXPECT_DEATH(TestClip::CreateYuvClip("does_not_exist.yuv", kWidth, kHeight, + kFramerate, kCodecMode), + HasSubstr("Could not find clip does_not_exist")); +} +#endif // GTEST_HAS_DEATH_TEST + +TEST(TestClipTest, CreateYuvClipWithoutExtension) { + const std::string kFilenameYuvWithoutExtension = + "ConferenceMotion_1280_720_50"; + const std::string kFilenameYuvWithExtension = + "ConferenceMotion_1280_720_50.yuv"; + + TestClip kDefaultTestClip = TestClip::CreateYuvClip( + kFilenameYuvWithoutExtension, kWidth, kHeight, kFramerate, kCodecMode); + + EXPECT_THAT(kDefaultTestClip.codec_mode(), kCodecMode); + // Using `HasSubstr` because `ResourcePath` adds a prefix to the filename. + EXPECT_THAT(kDefaultTestClip.clip_path(), + HasSubstr(kFilenameYuvWithExtension)); + EXPECT_THAT(kDefaultTestClip.width(), kWidth); + EXPECT_THAT(kDefaultTestClip.height(), kHeight); + EXPECT_THAT(kDefaultTestClip.framerate(), kFramerate); + EXPECT_THAT(kDefaultTestClip.is_yuv(), true); +} + +TEST(TestClipTest, CreateYuvClipWithExtension) { + // Create a YUV file and add dummy data to it. This will simulate that + // `TestClip` can find a video when specified with full path. + const std::string yuv_filepath = + test::TempFilename(test::OutputPath(), "temp_video.yuv"); + FILE* video_file = fopen(yuv_filepath.c_str(), "wb"); + ASSERT_TRUE(video_file); + fwrite(kDummyFileContent, 1, kDummyVideoWidth * kDummyVideoHeight * 3 / 2, + video_file); + // `fclose` returns 0 on success. + ASSERT_EQ(fclose(video_file), 0); + + TestClip kDefaultTestClip = + TestClip::CreateYuvClip(yuv_filepath, kDummyVideoWidth, kDummyVideoHeight, + kFramerate, kCodecMode); + + EXPECT_THAT(kDefaultTestClip.codec_mode(), kCodecMode); + EXPECT_THAT(kDefaultTestClip.clip_path(), HasSubstr(yuv_filepath)); + EXPECT_THAT(kDefaultTestClip.width(), kDummyVideoWidth); + EXPECT_THAT(kDefaultTestClip.height(), kDummyVideoHeight); + EXPECT_THAT(kDefaultTestClip.framerate(), kFramerate); + EXPECT_THAT(kDefaultTestClip.is_yuv(), true); +} + +TEST(TestClipTest, CreateY4mClipWithExtension) { + // Create a temporary Y4M file for testing. This will simulate that + // `TestClip` can find a video when specified with full path. + TempY4mFileCreator temp_y4m_file_creator(kDummyVideoWidth, kDummyVideoHeight, + kFramerate); + temp_y4m_file_creator.CreateTempY4mFile(kDummyFileContent); + const absl::string_view y4m_filepath = temp_y4m_file_creator.y4m_filepath(); + + TestClip kDefaultTestClip = TestClip::CreateY4mClip(y4m_filepath, kCodecMode); + EXPECT_THAT(kDefaultTestClip.codec_mode(), kCodecMode); + EXPECT_THAT(kDefaultTestClip.clip_path(), HasSubstr(y4m_filepath)); + EXPECT_THAT(kDefaultTestClip.width(), kDummyVideoWidth); + EXPECT_THAT(kDefaultTestClip.height(), kDummyVideoHeight); + EXPECT_THAT(kDefaultTestClip.framerate(), kFramerate); + EXPECT_THAT(kDefaultTestClip.is_yuv(), false); +} + +} // namespace +} // namespace webrtc diff --git a/video/corruption_detection/evaluation/utils.cc b/video/corruption_detection/evaluation/utils.cc new file mode 100644 index 0000000000..affdcc912a --- /dev/null +++ b/video/corruption_detection/evaluation/utils.cc @@ -0,0 +1,102 @@ +/* + * Copyright 2025 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/evaluation/utils.h" + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "rtc_base/checks.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/system/file_wrapper.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { +namespace { + +constexpr char kFrameHeader[] = "FRAME\n"; + +// Reading 30 bytes from the Y4M header should be enough to get the resolution +// and framerate. The header starts with: `YUV4MPEG2 W H +// Fn:Fd`. +constexpr int kHeaderBytesToRead = 30; + +} // namespace + +TempY4mFileCreator::TempY4mFileCreator(int width, int height, int framerate) + : width_(width), + height_(height), + framerate_(framerate), + frame_size_(width * height * 3 / 2), + y4m_filepath_(test::TempFilename(test::OutputPath(), "temp_video")) { + // A file with the given name path should just have been created. + RTC_CHECK_EQ(test::GetFileSize(y4m_filepath_), 0); +} + +TempY4mFileCreator::~TempY4mFileCreator() { + RTC_CHECK(test::RemoveFile(y4m_filepath_.c_str())); +} + +void TempY4mFileCreator::CreateTempY4mFile( + ArrayView file_content) { + RTC_CHECK_EQ(file_content.size() % frame_size_, 0) + << "Content size is not a multiple of frame size. Probably some data is " + "missing."; + FileWrapper video_file = FileWrapper::OpenWriteOnly(y4m_filepath_); + RTC_CHECK(video_file.is_open()); + + WriteFileHeader(video_file); + + // Write frame content. + int frame_number = file_content.size() / frame_size_; + for (int frame_index = 0; frame_index < frame_number; ++frame_index) { + RTC_CHECK(video_file.Write(kFrameHeader, sizeof(kFrameHeader) - 1)); + RTC_CHECK_LT(frame_size_ * frame_index, file_content.size()); + RTC_CHECK(video_file.Write(file_content.data() + frame_size_ * frame_index, + frame_size_)); + } + + RTC_CHECK(video_file.Flush()); +} + +void TempY4mFileCreator::WriteFileHeader(FileWrapper& video_file) const { + StringBuilder frame_header; + frame_header << "YUV4MPEG2 W" << width_ << " H" << height_ << " F" + << framerate_ << ":1 C420\n"; + RTC_CHECK(video_file.Write(frame_header.str().c_str(), frame_header.size())); +} + +Y4mMetadata ReadMetadataFromY4mHeader(absl::string_view clip_path) { + FILE* file = fopen(std::string(clip_path).c_str(), "r"); + RTC_CHECK(file) << "Could not open " << clip_path; + + char header[kHeaderBytesToRead]; + RTC_CHECK(fgets(header, sizeof(header), file) != nullptr) + << "File " << clip_path << " is too small"; + fclose(file); + + int fps_numerator; + int fps_denominator; + int width; + int height; + RTC_CHECK_EQ(sscanf(header, "YUV4MPEG2 W%u H%u F%i:%i", &width, &height, + &fps_numerator, &fps_denominator), + 4); + RTC_CHECK_NE(fps_denominator, 0); + return {.width = width, + .height = height, + .framerate = fps_numerator / fps_denominator}; +} + +} // namespace webrtc diff --git a/video/corruption_detection/evaluation/utils.h b/video/corruption_detection/evaluation/utils.h new file mode 100644 index 0000000000..f4cac20cbf --- /dev/null +++ b/video/corruption_detection/evaluation/utils.h @@ -0,0 +1,65 @@ +/* + * Copyright 2025 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_CORRUPTION_DETECTION_EVALUATION_UTILS_H_ +#define VIDEO_CORRUPTION_DETECTION_EVALUATION_UTILS_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "rtc_base/system/file_wrapper.h" + +namespace webrtc { + +// Creates a temporary Y4M file with the given `width`, `height` and +// `framerate`. The temporary file is removed when the class is destroyed. +class TempY4mFileCreator { + public: + TempY4mFileCreator(int width, int height, int framerate); + + // Removes the temporary created file. + ~TempY4mFileCreator(); + + // Creates a temporary Y4M video file with the content given by + // `file_content`. `file_content` should have YUV420p format, where each frame + // is of size `width_ * height_ * 3 / 2` and stack one after another in YYYYUV + // format. + // + // The number of frames depends on the size of `file_content`. + void CreateTempY4mFile(ArrayView file_content); + + absl::string_view y4m_filepath() const { return y4m_filepath_; } + + private: + // Writes the file header. It populates file header with the width, height and + // framerate information given by the class constructor. + void WriteFileHeader(FileWrapper& video_file) const; + + const int width_; + const int height_; + const int framerate_; + const int frame_size_; + + const std::string y4m_filepath_; +}; + +struct Y4mMetadata { + int width = 0; + int height = 0; + int framerate = 0; +}; + +Y4mMetadata ReadMetadataFromY4mHeader(absl::string_view clip_path); + +} // namespace webrtc + +#endif // VIDEO_CORRUPTION_DETECTION_EVALUATION_UTILS_H_ diff --git a/video/corruption_detection/evaluation/utils_unittest.cc b/video/corruption_detection/evaluation/utils_unittest.cc new file mode 100644 index 0000000000..a5ebd8d1bf --- /dev/null +++ b/video/corruption_detection/evaluation/utils_unittest.cc @@ -0,0 +1,141 @@ +/* + * Copyright 2025 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/evaluation/utils.h" + +#include +#include +#include +#include + +#include "api/scoped_refptr.h" +#include "api/video/i420_buffer.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" +#include "test/testsupport/frame_reader.h" + +namespace webrtc { +namespace { + +using ::testing::HasSubstr; +using ::testing::Not; + +// Dimension for the test frames. +constexpr int kWidth = 4; +constexpr int kHeight = 4; +constexpr int kChromaWidth = 2; +constexpr int kChromaHeight = 2; +constexpr int kFrameRate = 30; + +// An arbitrary 4x4 raw YUV420 frame. +constexpr uint8_t kFrameYContent[kWidth * kHeight] = { + 12, 5, 7, 11, // + 159, 15, 11, 0, // + 4, 240, 131, 59, // + 61, 87, 11, 0 // +}; +constexpr uint8_t kFrameUContent[kChromaWidth * kChromaHeight] = { + 248, 184, // + 139, 229 // +}; +constexpr uint8_t kFrameVContent[kChromaWidth * kChromaHeight] = { + 32, 69, // + 7, 193 // +}; + +// Concatenates and returns Y, U and V content into a single vector. +std::vector ConcatenateVideoChannels() { + std::vector file_content_flattened; + file_content_flattened.insert(file_content_flattened.end(), kFrameYContent, + kFrameYContent + kWidth * kHeight); + file_content_flattened.insert(file_content_flattened.end(), kFrameUContent, + kFrameUContent + kChromaWidth * kChromaHeight); + file_content_flattened.insert(file_content_flattened.end(), kFrameVContent, + kFrameVContent + kChromaWidth * kChromaHeight); + return file_content_flattened; +} + +TEST(TempY4mFileCreatorTest, CheckIfY4mFileIsCreated) { + std::vector file_content_flattened = ConcatenateVideoChannels(); + + TempY4mFileCreator temp_y4m_file_creator(kWidth, kHeight, kFrameRate); + temp_y4m_file_creator.CreateTempY4mFile(file_content_flattened); + + EXPECT_TRUE(test::FileExists(temp_y4m_file_creator.y4m_filepath())); +} + +TEST(TempY4mFileCreatorTest, CanCreateMultipleFiles) { + std::vector file_content_flattened = ConcatenateVideoChannels(); + + TempY4mFileCreator temp_y4m_file_creator_1(kWidth, kHeight, kFrameRate); + TempY4mFileCreator temp_y4m_file_creator_2(kWidth, kHeight, kFrameRate); + temp_y4m_file_creator_1.CreateTempY4mFile(file_content_flattened); + temp_y4m_file_creator_2.CreateTempY4mFile(file_content_flattened); + + // Check that the files are created. + ASSERT_TRUE(test::FileExists(temp_y4m_file_creator_1.y4m_filepath())); + ASSERT_TRUE(test::FileExists(temp_y4m_file_creator_2.y4m_filepath())); + // Check that the created files have different paths. + EXPECT_THAT(temp_y4m_file_creator_1.y4m_filepath(), + Not(temp_y4m_file_creator_2.y4m_filepath())); +} + +TEST(TempY4mFileCreatorTest, CheckIfY4mFileHasCorrectContent) { + std::vector file_content_flattened = ConcatenateVideoChannels(); + + TempY4mFileCreator temp_y4m_file_creator(kWidth, kHeight, kFrameRate); + temp_y4m_file_creator.CreateTempY4mFile(file_content_flattened); + std::unique_ptr frame_generator = + CreateY4mFrameReader(std::string(temp_y4m_file_creator.y4m_filepath()), + test::YuvFrameReaderImpl::RepeatMode::kSingle); + + scoped_refptr i420_buffer = frame_generator->PullFrame(); + ASSERT_EQ(i420_buffer->width(), kWidth); + ASSERT_EQ(i420_buffer->height(), kHeight); + for (int i = 0; i < kWidth * kHeight; ++i) { + EXPECT_EQ(i420_buffer->DataY()[i], kFrameYContent[i]); + } + for (int i = 0; i < kChromaWidth * kChromaHeight; ++i) { + EXPECT_EQ(i420_buffer->DataU()[i], kFrameUContent[i]); + EXPECT_EQ(i420_buffer->DataV()[i], kFrameVContent[i]); + } + // No more frames. + EXPECT_THAT(frame_generator->PullFrame(), nullptr); +} + +#if GTEST_HAS_DEATH_TEST +TEST(TempY4mFileCreatorTest, ContentEndInMiddleError) { + std::vector file_content_flattened = ConcatenateVideoChannels(); + std::vector only_luma_content( + file_content_flattened.begin(), + file_content_flattened.begin() + kWidth * kHeight); + + TempY4mFileCreator temp_y4m_file_creator(kWidth, kHeight, kFrameRate); + EXPECT_DEATH(temp_y4m_file_creator.CreateTempY4mFile(only_luma_content), + HasSubstr("Content size is not a multiple of frame size.")); +} +#endif // GTEST_HAS_DEATH_TEST + +TEST(ReadMetadataFromY4mHeaderTest, ReadMetadataFromY4mHeader) { + std::vector file_content_flattened = ConcatenateVideoChannels(); + + TempY4mFileCreator temp_y4m_file_creator(kWidth, kHeight, kFrameRate); + temp_y4m_file_creator.CreateTempY4mFile(file_content_flattened); + Y4mMetadata y4m_metadata = + ReadMetadataFromY4mHeader(temp_y4m_file_creator.y4m_filepath()); + + EXPECT_EQ(y4m_metadata.width, kWidth); + EXPECT_EQ(y4m_metadata.height, kHeight); + EXPECT_EQ(y4m_metadata.framerate, kFrameRate); +} + +} // namespace +} // namespace webrtc diff --git a/video/corruption_detection/frame_instrumentation_evaluation.cc b/video/corruption_detection/frame_instrumentation_evaluation.cc new file mode 100644 index 0000000000..60d8a9e774 --- /dev/null +++ b/video/corruption_detection/frame_instrumentation_evaluation.cc @@ -0,0 +1,95 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/frame_instrumentation_evaluation.h" + +#include +#include +#include + +#include "api/array_view.h" +#include "api/scoped_refptr.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "common_video/frame_instrumentation_data.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "video/corruption_detection/corruption_classifier.h" +#include "video/corruption_detection/halton_frame_sampler.h" + +namespace webrtc { + +namespace { + +std::vector ConvertSampleValuesToFilteredSamples( + ArrayView values, + ArrayView samples) { + RTC_CHECK_EQ(values.size(), samples.size()) + << "values and samples must have the same size"; + std::vector filtered_samples; + filtered_samples.reserve(values.size()); + for (size_t i = 0; i < values.size(); ++i) { + filtered_samples.push_back({.value = values[i], .plane = samples[i].plane}); + } + return filtered_samples; +} + +} // namespace + +std::optional GetCorruptionScore(const FrameInstrumentationData& data, + const VideoFrame& frame) { + if (data.sample_values.empty()) { + RTC_LOG(LS_WARNING) + << "Samples are needed to calculate a corruption score."; + return std::nullopt; + } + + scoped_refptr frame_buffer_as_i420 = + frame.video_frame_buffer()->ToI420(); + if (!frame_buffer_as_i420) { + RTC_LOG(LS_ERROR) << "Failed to convert " + << VideoFrameBufferTypeToString( + frame.video_frame_buffer()->type()) + << " image to I420"; + return std::nullopt; + } + + HaltonFrameSampler frame_sampler; + frame_sampler.SetCurrentIndex(data.sequence_index); + std::vector sample_coordinates = + frame_sampler.GetSampleCoordinatesForFrame(data.sample_values.size()); + if (sample_coordinates.empty()) { + RTC_LOG(LS_ERROR) << "Failed to get sample coordinates for frame."; + return std::nullopt; + } + + std::vector samples = + GetSampleValuesForFrame(frame_buffer_as_i420, sample_coordinates, + frame.width(), frame.height(), data.std_dev); + if (samples.empty()) { + RTC_LOG(LS_ERROR) << "Failed to get sample values for frame"; + return std::nullopt; + } + + std::vector data_samples = + ConvertSampleValuesToFilteredSamples(data.sample_values, samples); + if (data_samples.empty()) { + RTC_LOG(LS_ERROR) << "Failed to convert sample values to filtered samples"; + return std::nullopt; + } + + CorruptionClassifier classifier(3); + + return classifier.CalculateCorruptionProbability(data_samples, samples, + data.luma_error_threshold, + data.chroma_error_threshold); +} + +} // namespace webrtc diff --git a/video/corruption_detection/frame_instrumentation_evaluation.h b/video/corruption_detection/frame_instrumentation_evaluation.h new file mode 100644 index 0000000000..8bd3e1c436 --- /dev/null +++ b/video/corruption_detection/frame_instrumentation_evaluation.h @@ -0,0 +1,26 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_CORRUPTION_DETECTION_FRAME_INSTRUMENTATION_EVALUATION_H_ +#define VIDEO_CORRUPTION_DETECTION_FRAME_INSTRUMENTATION_EVALUATION_H_ + +#include + +#include "api/video/video_frame.h" +#include "common_video/frame_instrumentation_data.h" + +namespace webrtc { + +std::optional GetCorruptionScore(const FrameInstrumentationData& data, + const VideoFrame& frame); + +} // namespace webrtc + +#endif // VIDEO_CORRUPTION_DETECTION_FRAME_INSTRUMENTATION_EVALUATION_H_ diff --git a/video/corruption_detection/frame_instrumentation_evaluation_unittest.cc b/video/corruption_detection/frame_instrumentation_evaluation_unittest.cc new file mode 100644 index 0000000000..d82068deb2 --- /dev/null +++ b/video/corruption_detection/frame_instrumentation_evaluation_unittest.cc @@ -0,0 +1,147 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/frame_instrumentation_evaluation.h" + +#include +#include +#include + +#include "api/scoped_refptr.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "common_video/frame_instrumentation_data.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +scoped_refptr MakeI420FrameBufferWithDifferentPixelValues() { + // Create an I420 frame of size 4x4. + const int kDefaultLumaWidth = 4; + const int kDefaultLumaHeight = 4; + const int kDefaultChromaWidth = 2; + std::vector kDefaultYContent = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + std::vector kDefaultUContent = {17, 18, 19, 20}; + std::vector kDefaultVContent = {21, 22, 23, 24}; + + return I420Buffer::Copy(kDefaultLumaWidth, kDefaultLumaHeight, + kDefaultYContent.data(), kDefaultLumaWidth, + kDefaultUContent.data(), kDefaultChromaWidth, + kDefaultVContent.data(), kDefaultChromaWidth); +} + +TEST(FrameInstrumentationEvaluationTest, + HaveNoCorruptionScoreWhenNoSampleValuesAreProvided) { + FrameInstrumentationData data = {.sequence_index = 0, + .communicate_upper_bits = false, + .std_dev = 1.0, + .luma_error_threshold = 0, + .chroma_error_threshold = 0, + .sample_values = {}}; + VideoFrame frame = + VideoFrame::Builder() + .set_video_frame_buffer(MakeI420FrameBufferWithDifferentPixelValues()) + .build(); + + std::optional corruption_score = GetCorruptionScore(data, frame); + + EXPECT_FALSE(corruption_score.has_value()); +} + +TEST(FrameInstrumentationEvaluationTest, + HaveACorruptionScoreWhenSampleValuesAreProvided) { + FrameInstrumentationData data = { + .sequence_index = 0, + .communicate_upper_bits = false, + .std_dev = 1.0, + .luma_error_threshold = 0, + .chroma_error_threshold = 0, + .sample_values = {12, 12, 12, 12, 12, 12, 12, 12}}; + VideoFrame frame = + VideoFrame::Builder() + .set_video_frame_buffer(MakeI420FrameBufferWithDifferentPixelValues()) + .build(); + + std::optional corruption_score = GetCorruptionScore(data, frame); + + ASSERT_TRUE(corruption_score.has_value()); + EXPECT_DOUBLE_EQ(*corruption_score, 1.0); +} + +TEST(FrameInstrumentationEvaluationTest, + ApplyThresholdsWhenNonNegativeThresholdsAreProvided) { + FrameInstrumentationData data = { + .sequence_index = 0, + .communicate_upper_bits = false, + .std_dev = 1.0, + .luma_error_threshold = 8, + .chroma_error_threshold = 8, + .sample_values = {12, 12, 12, 12, 12, 12, 12, 12}}; + VideoFrame frame = + VideoFrame::Builder() + .set_video_frame_buffer(MakeI420FrameBufferWithDifferentPixelValues()) + .build(); + + std::optional corruption_score = GetCorruptionScore(data, frame); + + ASSERT_TRUE(corruption_score.has_value()); + EXPECT_LE(*corruption_score, 1); + EXPECT_GE(*corruption_score, 0); +} + +TEST(FrameInstrumentationEvaluationTest, + ApplyStdDevWhenNonNegativeStdDevIsProvided) { + FrameInstrumentationData data = { + .sequence_index = 0, + .communicate_upper_bits = false, + .std_dev = 0.6, + .luma_error_threshold = 8, + .chroma_error_threshold = 8, + .sample_values = {12, 12, 12, 12, 12, 12, 12, 12}}; + + std::vector sample_values = {12, 12, 12, 12, 12, 12, 12, 12}; + VideoFrame frame = + VideoFrame::Builder() + .set_video_frame_buffer(MakeI420FrameBufferWithDifferentPixelValues()) + .build(); + + std::optional corruption_score = GetCorruptionScore(data, frame); + + ASSERT_TRUE(corruption_score.has_value()); + EXPECT_LE(*corruption_score, 1); + EXPECT_GE(*corruption_score, 0); +} + +TEST(FrameInstrumentationEvaluationTest, ApplySequenceIndexWhenProvided) { + FrameInstrumentationData data = { + .sequence_index = 1, + .communicate_upper_bits = false, + .std_dev = 0.6, + .luma_error_threshold = 8, + .chroma_error_threshold = 8, + .sample_values = {12, 12, 12, 12, 12, 12, 12, 12}}; + + std::vector sample_values = {12, 12, 12, 12, 12, 12, 12, 12}; + VideoFrame frame = + VideoFrame::Builder() + .set_video_frame_buffer(MakeI420FrameBufferWithDifferentPixelValues()) + .build(); + + std::optional corruption_score = GetCorruptionScore(data, frame); + + ASSERT_TRUE(corruption_score.has_value()); + EXPECT_LE(*corruption_score, 1); + EXPECT_GE(*corruption_score, 0); +} + +} // namespace +} // namespace webrtc diff --git a/video/corruption_detection/frame_instrumentation_generator.cc b/video/corruption_detection/frame_instrumentation_generator.cc new file mode 100644 index 0000000000..6dc9f1cd88 --- /dev/null +++ b/video/corruption_detection/frame_instrumentation_generator.cc @@ -0,0 +1,237 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/frame_instrumentation_generator.h" + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "api/scoped_refptr.h" +#include "api/video/corruption_detection_filter_settings.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" +#include "api/video_codecs/video_codec.h" +#include "common_video/frame_instrumentation_data.h" +#include "modules/include/module_common_types_public.h" +#include "modules/video_coding/utility/qp_parser.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" +#include "video/corruption_detection/generic_mapping_functions.h" +#include "video/corruption_detection/halton_frame_sampler.h" + +namespace webrtc { +namespace { + +// Avoid holding on to frames that might have been dropped by encoder, as that +// can lead to frame buffer pools draining. +constexpr size_t kMaxPendingFrames = 3; + +std::optional GetCorruptionFilterSettings( + const EncodedImage& encoded_image, + VideoCodecType video_codec_type, + int layer_id) { + std::optional filter_settings = + encoded_image.corruption_detection_filter_settings(); + + if (!filter_settings.has_value()) { + // No implementation specific filter settings available, using a generic + // QP-based settings instead. + int qp = encoded_image.qp_; + if (qp == -1) { + std::optional parsed_qp = + QpParser().Parse(video_codec_type, layer_id, encoded_image.data(), + encoded_image.size()); + if (!parsed_qp.has_value()) { + RTC_LOG(LS_VERBOSE) + << "Missing QP for " << CodecTypeToPayloadString(video_codec_type) + << " layer " << layer_id << "."; + return std::nullopt; + } + qp = *parsed_qp; + } + + filter_settings = GetCorruptionFilterSettings(qp, video_codec_type); + } + return filter_settings; +} + +} // namespace + +FrameInstrumentationGenerator::FrameInstrumentationGenerator( + VideoCodecType video_codec_type) + : video_codec_type_(video_codec_type) {} + +void FrameInstrumentationGenerator::OnCapturedFrame(VideoFrame frame) { + MutexLock lock(&mutex_); + while (captured_frames_.size() >= kMaxPendingFrames) { + captured_frames_.pop(); + } + captured_frames_.push(frame); +} + +std::optional< + std::variant> +FrameInstrumentationGenerator::OnEncodedImage( + const EncodedImage& encoded_image) { + uint32_t rtp_timestamp_encoded_image = encoded_image.RtpTimestamp(); + std::optional captured_frame; + int layer_id; + int sequence_index; + bool communicate_upper_bits; + std::vector sample_coordinates; + { + MutexLock lock(&mutex_); + while (!captured_frames_.empty() && + IsNewerTimestamp(rtp_timestamp_encoded_image, + captured_frames_.front().rtp_timestamp())) { + captured_frames_.pop(); + } + if (captured_frames_.empty() || captured_frames_.front().rtp_timestamp() != + rtp_timestamp_encoded_image) { + RTC_LOG(LS_VERBOSE) << "No captured frames for RTC timestamp " + << rtp_timestamp_encoded_image << "."; + return std::nullopt; + } + captured_frame = captured_frames_.front(); + + layer_id = GetLayerId(encoded_image); + + bool is_key_frame = + encoded_image.FrameType() == VideoFrameType::kVideoFrameKey; + if (!is_key_frame) { + for (const auto& [unused, context] : contexts_) { + if (context.rtp_timestamp_of_last_key_frame == + rtp_timestamp_encoded_image) { + // Upper layer of an SVC key frame. + is_key_frame = true; + break; + } + } + } + if (is_key_frame) { + contexts_[layer_id].rtp_timestamp_of_last_key_frame = + encoded_image.RtpTimestamp(); + } else if (contexts_.find(layer_id) == contexts_.end()) { + // TODO: bugs.webrtc.org/358039777 - Update this if statement such that + // LxTy scalability modes work properly. It is not a problem for LxTy_KEY + // scalability. + // + // For LxTy, it sometimes hinders calculating corruption score on the + // higher spatial layers. Because e.g. in L3T1 the first frame might not + // create 3 spatial layers but, only 2. Then, we end up not creating this + // in the map and will therefore not get any corruption score until a new + // key frame is sent. + RTC_LOG(LS_INFO) << "The first frame of a spatial or simulcast layer is " + "not a key frame."; + return std::nullopt; + } + + sequence_index = contexts_[layer_id].frame_sampler.GetCurrentIndex(); + communicate_upper_bits = false; + if (is_key_frame) { + communicate_upper_bits = true; + // Increase until all the last 7 bits are zeroes. + + // If this would overflow to 15 bits, reset to 0. + if (sequence_index > 0b0011'1111'1000'0000) { + sequence_index = 0; + } else if ((sequence_index & 0b0111'1111) != 0) { + // Last 7 bits are not all zeroes. + sequence_index >>= 7; + sequence_index += 1; + sequence_index <<= 7; + } + contexts_[layer_id].frame_sampler.SetCurrentIndex(sequence_index); + } + + // TODO: bugs.webrtc.org/358039777 - Maybe allow other sample sizes as well + sample_coordinates = + contexts_[layer_id] + .frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + is_key_frame, captured_frame->rtp_timestamp(), + /*num_samples=*/13); + if (sample_coordinates.empty()) { + if (!is_key_frame) { + return std::nullopt; + } + return FrameInstrumentationSyncData{.sequence_index = sequence_index, + .communicate_upper_bits = true}; + } + } + RTC_DCHECK(captured_frame.has_value()); + RTC_DCHECK(!sample_coordinates.empty()); + + std::optional filter_settings = + GetCorruptionFilterSettings(encoded_image, video_codec_type_, layer_id); + if (!filter_settings.has_value()) { + return std::nullopt; + } + + scoped_refptr captured_frame_buffer_as_i420 = + captured_frame->video_frame_buffer()->ToI420(); + if (!captured_frame_buffer_as_i420) { + RTC_LOG(LS_ERROR) << "Failed to convert " + << VideoFrameBufferTypeToString( + captured_frame->video_frame_buffer()->type()) + << " image to I420."; + return std::nullopt; + } + + FrameInstrumentationData data = { + .sequence_index = sequence_index, + .communicate_upper_bits = communicate_upper_bits, + .std_dev = filter_settings->std_dev, + .luma_error_threshold = filter_settings->luma_error_threshold, + .chroma_error_threshold = filter_settings->chroma_error_threshold}; + std::vector samples = GetSampleValuesForFrame( + captured_frame_buffer_as_i420, sample_coordinates, + encoded_image._encodedWidth, encoded_image._encodedHeight, + filter_settings->std_dev); + data.sample_values.reserve(samples.size()); + absl::c_transform(samples, std::back_inserter(data.sample_values), + [](const FilteredSample& sample) { return sample.value; }); + return data; +} + +std::optional FrameInstrumentationGenerator::GetHaltonSequenceIndex( + int layer_id) const { + MutexLock lock(&mutex_); + auto it = contexts_.find(layer_id); + if (it == contexts_.end()) { + return std::nullopt; + } + return it->second.frame_sampler.GetCurrentIndex(); +} + +void FrameInstrumentationGenerator::SetHaltonSequenceIndex(int index, + int layer_id) { + MutexLock lock(&mutex_); + if (index <= 0x3FFF) { + contexts_[layer_id].frame_sampler.SetCurrentIndex(index); + } + RTC_DCHECK_LE(index, 0x3FFF) << "Index must not be larger than 0x3FFF"; +} + +int FrameInstrumentationGenerator::GetLayerId( + const EncodedImage& encoded_image) const { + return std::max(encoded_image.SpatialIndex().value_or(0), + encoded_image.SimulcastIndex().value_or(0)); +} +} // namespace webrtc diff --git a/video/corruption_detection/frame_instrumentation_generator.h b/video/corruption_detection/frame_instrumentation_generator.h new file mode 100644 index 0000000000..07cdcb2d42 --- /dev/null +++ b/video/corruption_detection/frame_instrumentation_generator.h @@ -0,0 +1,70 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_CORRUPTION_DETECTION_FRAME_INSTRUMENTATION_GENERATOR_H_ +#define VIDEO_CORRUPTION_DETECTION_FRAME_INSTRUMENTATION_GENERATOR_H_ + +#include +#include +#include +#include +#include + +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "common_video/frame_instrumentation_data.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" +#include "video/corruption_detection/halton_frame_sampler.h" + +namespace webrtc { + +class FrameInstrumentationGenerator { + public: + FrameInstrumentationGenerator() = delete; + explicit FrameInstrumentationGenerator(VideoCodecType video_codec_type); + + FrameInstrumentationGenerator(const FrameInstrumentationGenerator&) = delete; + FrameInstrumentationGenerator& operator=( + const FrameInstrumentationGenerator&) = delete; + + ~FrameInstrumentationGenerator() = default; + + void OnCapturedFrame(VideoFrame frame) RTC_LOCKS_EXCLUDED(mutex_); + std::optional< + std::variant> + OnEncodedImage(const EncodedImage& encoded_image) RTC_LOCKS_EXCLUDED(mutex_); + + // Returns `std::nullopt` if there is no context for the given layer. + std::optional GetHaltonSequenceIndex(int layer_id) const + RTC_LOCKS_EXCLUDED(mutex_); + void SetHaltonSequenceIndex(int index, int layer_id) + RTC_LOCKS_EXCLUDED(mutex_); + + int GetLayerId(const EncodedImage& encoded_image) const; + + private: + struct Context { + HaltonFrameSampler frame_sampler; + uint32_t rtp_timestamp_of_last_key_frame = 0; + }; + + // Incoming video frames in capture order. + std::queue captured_frames_ RTC_GUARDED_BY(mutex_); + // Map from spatial or simulcast index to sampling context. + std::map contexts_ RTC_GUARDED_BY(mutex_); + const VideoCodecType video_codec_type_; + mutable Mutex mutex_; +}; + +} // namespace webrtc + +#endif // VIDEO_CORRUPTION_DETECTION_FRAME_INSTRUMENTATION_GENERATOR_H_ diff --git a/video/corruption_detection/frame_instrumentation_generator_unittest.cc b/video/corruption_detection/frame_instrumentation_generator_unittest.cc new file mode 100644 index 0000000000..d4362c012d --- /dev/null +++ b/video/corruption_detection/frame_instrumentation_generator_unittest.cc @@ -0,0 +1,751 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/frame_instrumentation_generator.h" + +#include +#include +#include +#include +#include + +#include "api/make_ref_counted.h" +#include "api/scoped_refptr.h" +#include "api/video/corruption_detection_filter_settings.h" +#include "api/video/encoded_image.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "common_video/frame_instrumentation_data.h" +#include "rtc_base/ref_counted_object.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { +using ::testing::ElementsAre; + +constexpr int kDefaultScaledWidth = 4; +constexpr int kDefaultScaledHeight = 4; + +scoped_refptr MakeDefaultI420FrameBuffer() { + // Create an I420 frame of size 4x4. + const int kDefaultLumaWidth = 4; + const int kDefaultLumaHeight = 4; + const int kDefaultChromaWidth = 2; + const int kDefaultPixelValue = 30; + std::vector kDefaultYContent(16, kDefaultPixelValue); + std::vector kDefaultUContent(4, kDefaultPixelValue); + std::vector kDefaultVContent(4, kDefaultPixelValue); + + return I420Buffer::Copy(kDefaultLumaWidth, kDefaultLumaHeight, + kDefaultYContent.data(), kDefaultLumaWidth, + kDefaultUContent.data(), kDefaultChromaWidth, + kDefaultVContent.data(), kDefaultChromaWidth); +} + +scoped_refptr MakeI420FrameBufferWithDifferentPixelValues() { + // Create an I420 frame of size 4x4. + const int kDefaultLumaWidth = 4; + const int kDefaultLumaHeight = 4; + const int kDefaultChromaWidth = 2; + std::vector kDefaultYContent = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + std::vector kDefaultUContent = {17, 18, 19, 20}; + std::vector kDefaultVContent = {21, 22, 23, 24}; + + return I420Buffer::Copy(kDefaultLumaWidth, kDefaultLumaHeight, + kDefaultYContent.data(), kDefaultLumaWidth, + kDefaultUContent.data(), kDefaultChromaWidth, + kDefaultVContent.data(), kDefaultChromaWidth); +} + +TEST(FrameInstrumentationGeneratorTest, + ReturnsNothingWhenNoFramesHaveBeenProvided) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecGeneric); + + EXPECT_FALSE(generator.OnEncodedImage(EncodedImage()).has_value()); +} + +TEST(FrameInstrumentationGeneratorTest, + ReturnsNothingWhenNoFrameWithTheSameTimestampIsProvided) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecGeneric); + VideoFrame frame = VideoFrame::Builder() + .set_video_frame_buffer(MakeDefaultI420FrameBuffer()) + .set_rtp_timestamp(1) + .build(); + EncodedImage encoded_image; + encoded_image.SetRtpTimestamp(2); + + generator.OnCapturedFrame(frame); + + EXPECT_FALSE(generator.OnEncodedImage(encoded_image).has_value()); +} + +TEST(FrameInstrumentationGeneratorTest, + ReturnsNothingWhenTheFirstFrameOfASpatialOrSimulcastLayerIsNotAKeyFrame) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecGeneric); + VideoFrame frame = VideoFrame::Builder() + .set_video_frame_buffer(MakeDefaultI420FrameBuffer()) + .set_rtp_timestamp(1) + .build(); + + // Delta frame with no preceding key frame. + EncodedImage encoded_image; + encoded_image.SetRtpTimestamp(1); + encoded_image.SetFrameType(VideoFrameType::kVideoFrameDelta); + encoded_image.SetSpatialIndex(0); + encoded_image.SetSimulcastIndex(0); + + generator.OnCapturedFrame(frame); + + // The first frame of a spatial or simulcast layer is not a key frame. + EXPECT_FALSE(generator.OnEncodedImage(encoded_image).has_value()); +} + +TEST(FrameInstrumentationGeneratorTest, + ReturnsNothingWhenQpIsUnsetAndNotParseable) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecGeneric); + VideoFrame frame = VideoFrame::Builder() + .set_video_frame_buffer(MakeDefaultI420FrameBuffer()) + .set_rtp_timestamp(1) + .build(); + + // Frame where QP is unset and QP is not parseable from the encoded data. + EncodedImage encoded_image; + encoded_image.SetRtpTimestamp(1); + encoded_image.SetFrameType(VideoFrameType::kVideoFrameKey); + + generator.OnCapturedFrame(frame); + + EXPECT_FALSE(generator.OnEncodedImage(encoded_image).has_value()); +} + +#if GTEST_HAS_DEATH_TEST +TEST(FrameInstrumentationGeneratorTest, FailsWhenCodecIsUnsupported) { + // No available mapping from codec to filter parameters. + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecGeneric); + VideoFrame frame = VideoFrame::Builder() + .set_video_frame_buffer(MakeDefaultI420FrameBuffer()) + .set_rtp_timestamp(1) + .build(); + EncodedImage encoded_image; + encoded_image.SetRtpTimestamp(1); + encoded_image.SetFrameType(VideoFrameType::kVideoFrameKey); + encoded_image.qp_ = 10; + + generator.OnCapturedFrame(frame); + + EXPECT_DEATH(generator.OnEncodedImage(encoded_image), + "Codec type Generic is not supported"); +} +#endif // GTEST_HAS_DEATH_TEST + +TEST(FrameInstrumentationGeneratorTest, + ReturnsInstrumentationDataForVP8KeyFrameWithQpSet) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecVP8); + VideoFrame frame = VideoFrame::Builder() + .set_video_frame_buffer(MakeDefaultI420FrameBuffer()) + .set_rtp_timestamp(1) + .build(); + // VP8 key frame with QP set. + EncodedImage encoded_image; + encoded_image.SetRtpTimestamp(1); + encoded_image.SetFrameType(VideoFrameType::kVideoFrameKey); + encoded_image.qp_ = 10; + encoded_image._encodedWidth = kDefaultScaledWidth; + encoded_image._encodedHeight = kDefaultScaledHeight; + + generator.OnCapturedFrame(frame); + std::optional< + std::variant> + data = generator.OnEncodedImage(encoded_image); + + ASSERT_TRUE(data.has_value()); + ASSERT_TRUE(std::holds_alternative(*data)); + FrameInstrumentationData frame_instrumentation_data = + std::get(*data); + EXPECT_EQ(frame_instrumentation_data.sequence_index, 0); + EXPECT_TRUE(frame_instrumentation_data.communicate_upper_bits); + EXPECT_NE(frame_instrumentation_data.std_dev, 0.0); + EXPECT_NE(frame_instrumentation_data.luma_error_threshold, 0); + EXPECT_NE(frame_instrumentation_data.chroma_error_threshold, 0); + EXPECT_FALSE(frame_instrumentation_data.sample_values.empty()); +} + +TEST(FrameInstrumentationGeneratorTest, + ReturnsInstrumentationDataWhenQpIsParseable) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecVP8); + VideoFrame frame = VideoFrame::Builder() + .set_video_frame_buffer(MakeDefaultI420FrameBuffer()) + .set_rtp_timestamp(1) + .build(); + + // VP8 key frame with parseable QP. + constexpr uint8_t kCodedFrameVp8Qp25[] = { + 0x10, 0x02, 0x00, 0x9d, 0x01, 0x2a, 0x10, 0x00, 0x10, 0x00, + 0x02, 0x47, 0x08, 0x85, 0x85, 0x88, 0x85, 0x84, 0x88, 0x0c, + 0x82, 0x00, 0x0c, 0x0d, 0x60, 0x00, 0xfe, 0xfc, 0x5c, 0xd0}; + scoped_refptr encoded_image_buffer = + EncodedImageBuffer::Create(kCodedFrameVp8Qp25, + sizeof(kCodedFrameVp8Qp25)); + EncodedImage encoded_image; + encoded_image.SetRtpTimestamp(1); + encoded_image.SetFrameType(VideoFrameType::kVideoFrameKey); + encoded_image.SetEncodedData(encoded_image_buffer); + encoded_image._encodedWidth = kDefaultScaledWidth; + encoded_image._encodedHeight = kDefaultScaledHeight; + + generator.OnCapturedFrame(frame); + std::optional< + std::variant> + data = generator.OnEncodedImage(encoded_image); + + ASSERT_TRUE(data.has_value()); + ASSERT_TRUE(std::holds_alternative(*data)); + FrameInstrumentationData frame_instrumentation_data = + std::get(*data); + EXPECT_EQ(frame_instrumentation_data.sequence_index, 0); + EXPECT_TRUE(frame_instrumentation_data.communicate_upper_bits); + EXPECT_NE(frame_instrumentation_data.std_dev, 0.0); + EXPECT_NE(frame_instrumentation_data.luma_error_threshold, 0); + EXPECT_NE(frame_instrumentation_data.chroma_error_threshold, 0); + EXPECT_FALSE(frame_instrumentation_data.sample_values.empty()); +} + +TEST(FrameInstrumentationGeneratorTest, + ReturnsInstrumentationDataForUpperLayerOfAnSvcKeyFrame) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecVP9); + VideoFrame frame = VideoFrame::Builder() + .set_video_frame_buffer(MakeDefaultI420FrameBuffer()) + .set_rtp_timestamp(1) + .build(); + EncodedImage encoded_image1; + encoded_image1.SetRtpTimestamp(1); + encoded_image1.SetFrameType(VideoFrameType::kVideoFrameKey); + encoded_image1.SetSpatialIndex(0); + encoded_image1.qp_ = 10; + encoded_image1._encodedWidth = kDefaultScaledWidth; + encoded_image1._encodedHeight = kDefaultScaledHeight; + + // Delta frame that is an upper layer of an SVC key frame. + EncodedImage encoded_image2; + encoded_image2.SetRtpTimestamp(1); + encoded_image2.SetFrameType(VideoFrameType::kVideoFrameDelta); + encoded_image2.SetSpatialIndex(1); + encoded_image2.qp_ = 10; + encoded_image2._encodedWidth = kDefaultScaledWidth; + encoded_image2._encodedHeight = kDefaultScaledHeight; + + generator.OnCapturedFrame(frame); + generator.OnEncodedImage(encoded_image1); + std::optional< + std::variant> + data = generator.OnEncodedImage(encoded_image2); + + ASSERT_TRUE(data.has_value()); + ASSERT_TRUE(std::holds_alternative(*data)); + FrameInstrumentationData frame_instrumentation_data = + std::get(*data); + EXPECT_EQ(frame_instrumentation_data.sequence_index, 0); + EXPECT_TRUE(frame_instrumentation_data.communicate_upper_bits); + EXPECT_NE(frame_instrumentation_data.std_dev, 0.0); + EXPECT_NE(frame_instrumentation_data.luma_error_threshold, 0); + EXPECT_NE(frame_instrumentation_data.chroma_error_threshold, 0); + EXPECT_FALSE(frame_instrumentation_data.sample_values.empty()); +} + +TEST(FrameInstrumentationGeneratorTest, + ReturnsNothingWhenNotEnoughTimeHasPassedSinceLastSampledFrame) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecVP8); + VideoFrame frame1 = VideoFrame::Builder() + .set_video_frame_buffer(MakeDefaultI420FrameBuffer()) + .set_rtp_timestamp(1) + .build(); + VideoFrame frame2 = VideoFrame::Builder() + .set_video_frame_buffer(MakeDefaultI420FrameBuffer()) + .set_rtp_timestamp(2) + .build(); + EncodedImage encoded_image1; + encoded_image1.SetRtpTimestamp(1); + encoded_image1.SetFrameType(VideoFrameType::kVideoFrameKey); + encoded_image1.SetSpatialIndex(0); + encoded_image1.qp_ = 10; + encoded_image1._encodedWidth = kDefaultScaledWidth; + encoded_image1._encodedHeight = kDefaultScaledHeight; + + // Delta frame that is too recent in comparison to the last sampled frame: + // passed time < 90'000. + EncodedImage encoded_image2; + encoded_image2.SetRtpTimestamp(2); + encoded_image2.SetFrameType(VideoFrameType::kVideoFrameDelta); + encoded_image2.SetSpatialIndex(0); + encoded_image2.qp_ = 10; + encoded_image2._encodedWidth = kDefaultScaledWidth; + encoded_image2._encodedHeight = kDefaultScaledHeight; + + generator.OnCapturedFrame(frame1); + generator.OnCapturedFrame(frame2); + generator.OnEncodedImage(encoded_image1); + + ASSERT_FALSE(generator.OnEncodedImage(encoded_image2).has_value()); +} + +TEST(FrameInstrumentationGeneratorTest, + ReturnsInstrumentationDataForUpperLayerOfASecondSvcKeyFrame) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecVP9); + VideoFrame frame1 = VideoFrame::Builder() + .set_video_frame_buffer(MakeDefaultI420FrameBuffer()) + .set_rtp_timestamp(1) + .build(); + VideoFrame frame2 = VideoFrame::Builder() + .set_video_frame_buffer(MakeDefaultI420FrameBuffer()) + .set_rtp_timestamp(2) + .build(); + for (const VideoFrame& frame : {frame1, frame2}) { + EncodedImage encoded_image1; + encoded_image1.SetRtpTimestamp(frame.rtp_timestamp()); + encoded_image1.SetFrameType(VideoFrameType::kVideoFrameKey); + encoded_image1.SetSpatialIndex(0); + encoded_image1.qp_ = 10; + encoded_image1._encodedWidth = kDefaultScaledWidth; + encoded_image1._encodedHeight = kDefaultScaledHeight; + + EncodedImage encoded_image2; + encoded_image2.SetRtpTimestamp(frame.rtp_timestamp()); + encoded_image2.SetFrameType(VideoFrameType::kVideoFrameDelta); + encoded_image2.SetSpatialIndex(1); + encoded_image2.qp_ = 10; + encoded_image2._encodedWidth = kDefaultScaledWidth; + encoded_image2._encodedHeight = kDefaultScaledHeight; + + generator.OnCapturedFrame(frame); + + std::optional< + std::variant> + data1 = generator.OnEncodedImage(encoded_image1); + + std::optional< + std::variant> + data2 = generator.OnEncodedImage(encoded_image2); + + ASSERT_TRUE(data1.has_value()); + ASSERT_TRUE(data2.has_value()); + ASSERT_TRUE(std::holds_alternative(*data1)); + + ASSERT_TRUE(std::holds_alternative(*data2)); + + EXPECT_TRUE( + std::get(*data1).communicate_upper_bits); + EXPECT_TRUE( + std::get(*data2).communicate_upper_bits); + } +} + +TEST(FrameInstrumentationGeneratorTest, + SvcLayersSequenceIndicesIncreaseIndependentOnEachother) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecVP9); + VideoFrame frame1 = + VideoFrame::Builder() + .set_video_frame_buffer(MakeI420FrameBufferWithDifferentPixelValues()) + .set_rtp_timestamp(1) + .build(); + VideoFrame frame2 = + VideoFrame::Builder() + .set_video_frame_buffer(MakeI420FrameBufferWithDifferentPixelValues()) + .set_rtp_timestamp(2) + .build(); + for (const VideoFrame& frame : {frame1, frame2}) { + EncodedImage encoded_image1; + encoded_image1.SetRtpTimestamp(frame.rtp_timestamp()); + encoded_image1.SetFrameType(VideoFrameType::kVideoFrameKey); + encoded_image1.SetSpatialIndex(0); + encoded_image1.qp_ = 10; + encoded_image1._encodedWidth = kDefaultScaledWidth; + encoded_image1._encodedHeight = kDefaultScaledHeight; + + EncodedImage encoded_image2; + encoded_image2.SetRtpTimestamp(frame.rtp_timestamp()); + encoded_image2.SetFrameType(VideoFrameType::kVideoFrameDelta); + encoded_image2.SetSpatialIndex(1); + encoded_image2.qp_ = 10; + encoded_image2._encodedWidth = kDefaultScaledWidth; + encoded_image2._encodedHeight = kDefaultScaledHeight; + + generator.OnCapturedFrame(frame); + + std::optional< + std::variant> + data1 = generator.OnEncodedImage(encoded_image1); + + std::optional< + std::variant> + data2 = generator.OnEncodedImage(encoded_image2); + + ASSERT_TRUE(data1.has_value()); + ASSERT_TRUE(data2.has_value()); + ASSERT_TRUE(std::holds_alternative(*data1)); + + ASSERT_TRUE(std::holds_alternative(*data2)); + + FrameInstrumentationData frame_instrumentation_data1 = + std::get(*data1); + FrameInstrumentationData frame_instrumentation_data2 = + std::get(*data2); + + EXPECT_TRUE(frame_instrumentation_data1.communicate_upper_bits); + EXPECT_TRUE(frame_instrumentation_data2.communicate_upper_bits); + + EXPECT_EQ(frame_instrumentation_data1.sequence_index, + frame_instrumentation_data2.sequence_index); + + // In the test the frames have equal frame buffers so the sample values + // should be equal. + EXPECT_THAT(frame_instrumentation_data1.sample_values, + frame_instrumentation_data2.sample_values); + } +} + +TEST(FrameInstrumentationGeneratorTest, + OutputsDeltaFrameInstrumentationDataForSimulcast) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecVP9); + bool has_found_delta_frame = false; + // 34 frames is the minimum number of frames to be able to sample a delta + // frame. + for (int i = 0; i < 34; ++i) { + VideoFrame frame = VideoFrame::Builder() + .set_video_frame_buffer(MakeDefaultI420FrameBuffer()) + .set_rtp_timestamp(i) + .build(); + EncodedImage encoded_image1; + encoded_image1.SetRtpTimestamp(frame.rtp_timestamp()); + encoded_image1.SetFrameType(i == 0 ? VideoFrameType::kVideoFrameKey + : VideoFrameType::kVideoFrameDelta); + encoded_image1.SetSimulcastIndex(0); + encoded_image1.qp_ = 10; + encoded_image1._encodedWidth = kDefaultScaledWidth; + encoded_image1._encodedHeight = kDefaultScaledHeight; + + EncodedImage encoded_image2; + encoded_image2.SetRtpTimestamp(frame.rtp_timestamp()); + encoded_image2.SetFrameType(i == 0 ? VideoFrameType::kVideoFrameKey + : VideoFrameType::kVideoFrameDelta); + encoded_image2.SetSimulcastIndex(1); + encoded_image2.qp_ = 10; + encoded_image2._encodedWidth = kDefaultScaledWidth; + encoded_image2._encodedHeight = kDefaultScaledHeight; + + generator.OnCapturedFrame(frame); + + std::optional< + std::variant> + data1 = generator.OnEncodedImage(encoded_image1); + + std::optional< + std::variant> + data2 = generator.OnEncodedImage(encoded_image2); + + if (i == 0) { + ASSERT_TRUE(data1.has_value()); + ASSERT_TRUE(data2.has_value()); + ASSERT_TRUE(std::holds_alternative(*data1)); + + ASSERT_TRUE(std::holds_alternative(*data2)); + + EXPECT_TRUE( + std::get(*data1).communicate_upper_bits); + EXPECT_TRUE( + std::get(*data2).communicate_upper_bits); + } else if (data1.has_value() || data2.has_value()) { + if (data1.has_value()) { + ASSERT_TRUE(std::holds_alternative(*data1)); + EXPECT_FALSE( + std::get(*data1).communicate_upper_bits); + } + if (data2.has_value()) { + ASSERT_TRUE(std::holds_alternative(*data2)); + EXPECT_FALSE( + std::get(*data2).communicate_upper_bits); + } + has_found_delta_frame = true; + } + } + EXPECT_TRUE(has_found_delta_frame); +} + +TEST(FrameInstrumentationGeneratorTest, + SequenceIndexIncreasesCorrectlyAtNewKeyFrame) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecVP8); + VideoFrame frame1 = + VideoFrame::Builder() + .set_video_frame_buffer(MakeI420FrameBufferWithDifferentPixelValues()) + .set_rtp_timestamp(1) + .build(); + VideoFrame frame2 = + VideoFrame::Builder() + .set_video_frame_buffer(MakeI420FrameBufferWithDifferentPixelValues()) + .set_rtp_timestamp(2) + .build(); + EncodedImage encoded_image1; + encoded_image1.SetRtpTimestamp(1); + encoded_image1.SetFrameType(VideoFrameType::kVideoFrameKey); + encoded_image1.qp_ = 10; + encoded_image1._encodedWidth = kDefaultScaledWidth; + encoded_image1._encodedHeight = kDefaultScaledHeight; + + // Delta frame that is an upper layer of an SVC key frame. + EncodedImage encoded_image2; + encoded_image2.SetRtpTimestamp(2); + encoded_image2.SetFrameType(VideoFrameType::kVideoFrameKey); + encoded_image2.qp_ = 10; + encoded_image2._encodedWidth = kDefaultScaledWidth; + encoded_image2._encodedHeight = kDefaultScaledHeight; + + generator.OnCapturedFrame(frame1); + generator.OnCapturedFrame(frame2); + + ASSERT_EQ(generator.GetLayerId(encoded_image1), + generator.GetLayerId(encoded_image2)); + generator.SetHaltonSequenceIndex(0b0010'1010, + generator.GetLayerId(encoded_image1)); + + std::optional< + std::variant> + data1 = generator.OnEncodedImage(encoded_image1); + std::optional< + std::variant> + data2 = generator.OnEncodedImage(encoded_image2); + + ASSERT_TRUE(data1.has_value()); + ASSERT_TRUE(data2.has_value()); + ASSERT_TRUE(std::holds_alternative(*data1)); + ASSERT_TRUE(std::holds_alternative(*data2)); + + FrameInstrumentationData frame_instrumentation_data1 = + std::get(*data1); + FrameInstrumentationData frame_instrumentation_data2 = + std::get(*data2); + + EXPECT_EQ(frame_instrumentation_data1.sequence_index, 0b0000'1000'0000); + EXPECT_EQ(frame_instrumentation_data2.sequence_index, 0b0001'0000'0000); +} + +TEST(FrameInstrumentationGeneratorTest, + SequenceIndexThatWouldOverflowTo15BitsIncreasesCorrectlyAtNewKeyFrame) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecVP8); + VideoFrame frame1 = + VideoFrame::Builder() + .set_video_frame_buffer(MakeI420FrameBufferWithDifferentPixelValues()) + .set_rtp_timestamp(1) + .build(); + VideoFrame frame2 = + VideoFrame::Builder() + .set_video_frame_buffer(MakeI420FrameBufferWithDifferentPixelValues()) + .set_rtp_timestamp(2) + .build(); + EncodedImage encoded_image1; + encoded_image1.SetRtpTimestamp(1); + encoded_image1.SetFrameType(VideoFrameType::kVideoFrameKey); + encoded_image1.qp_ = 10; + encoded_image1._encodedWidth = kDefaultScaledWidth; + encoded_image1._encodedHeight = kDefaultScaledHeight; + encoded_image1.SetSimulcastIndex(0); + + EncodedImage encoded_image2; + encoded_image2.SetRtpTimestamp(2); + encoded_image2.SetFrameType(VideoFrameType::kVideoFrameKey); + encoded_image2.qp_ = 10; + encoded_image2._encodedWidth = kDefaultScaledWidth; + encoded_image2._encodedHeight = kDefaultScaledHeight; + encoded_image2.SetSimulcastIndex(0); + + generator.OnCapturedFrame(frame1); + generator.OnCapturedFrame(frame2); + + ASSERT_EQ(generator.GetLayerId(encoded_image1), + generator.GetLayerId(encoded_image2)); + generator.SetHaltonSequenceIndex(0b11'1111'1111'1111, + generator.GetLayerId(encoded_image1)); + std::optional< + std::variant> + data1 = generator.OnEncodedImage(encoded_image1); + std::optional< + std::variant> + data2 = generator.OnEncodedImage(encoded_image2); + + ASSERT_TRUE(data1.has_value()); + ASSERT_TRUE(data2.has_value()); + ASSERT_TRUE(std::holds_alternative(*data1)); + ASSERT_TRUE(std::holds_alternative(*data2)); + + FrameInstrumentationData frame_instrumentation_data1 = + std::get(*data1); + FrameInstrumentationData frame_instrumentation_data2 = + std::get(*data2); + + EXPECT_EQ(frame_instrumentation_data1.sequence_index, 0); + EXPECT_EQ(frame_instrumentation_data2.sequence_index, 0b1000'0000); +} + +TEST(FrameInstrumentationGeneratorTest, + SequenceIndexIncreasesCorrectlyAtNewKeyFrameAlreadyZeroes) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecVP8); + VideoFrame frame1 = + VideoFrame::Builder() + .set_video_frame_buffer(MakeI420FrameBufferWithDifferentPixelValues()) + .set_rtp_timestamp(1) + .build(); + VideoFrame frame2 = + VideoFrame::Builder() + .set_video_frame_buffer(MakeI420FrameBufferWithDifferentPixelValues()) + .set_rtp_timestamp(2) + .build(); + EncodedImage encoded_image1; + encoded_image1.SetRtpTimestamp(1); + encoded_image1.SetFrameType(VideoFrameType::kVideoFrameKey); + encoded_image1.qp_ = 10; + encoded_image1._encodedWidth = kDefaultScaledWidth; + encoded_image1._encodedHeight = kDefaultScaledHeight; + + // Delta frame that is an upper layer of an SVC key frame. + EncodedImage encoded_image2; + encoded_image2.SetRtpTimestamp(2); + encoded_image2.SetFrameType(VideoFrameType::kVideoFrameKey); + encoded_image2.qp_ = 10; + encoded_image2._encodedWidth = kDefaultScaledWidth; + encoded_image2._encodedHeight = kDefaultScaledHeight; + + generator.OnCapturedFrame(frame1); + generator.OnCapturedFrame(frame2); + + ASSERT_EQ(generator.GetLayerId(encoded_image1), + generator.GetLayerId(encoded_image2)); + generator.SetHaltonSequenceIndex(0b1000'0000, + generator.GetLayerId(encoded_image1)); + + std::optional< + std::variant> + data1 = generator.OnEncodedImage(encoded_image1); + std::optional< + std::variant> + data2 = generator.OnEncodedImage(encoded_image2); + + ASSERT_TRUE(data1.has_value()); + ASSERT_TRUE(data2.has_value()); + ASSERT_TRUE(std::holds_alternative(*data1)); + ASSERT_TRUE(std::holds_alternative(*data2)); + + FrameInstrumentationData frame_instrumentation_data1 = + std::get(*data1); + FrameInstrumentationData frame_instrumentation_data2 = + std::get(*data2); + + EXPECT_EQ(frame_instrumentation_data1.sequence_index, 0b0000'1000'0000); + EXPECT_EQ(frame_instrumentation_data2.sequence_index, 0b0001'0000'0000); +} + +TEST(FrameInstrumentationGeneratorTest, GetterAndSetterOperatesAsExpected) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecVP8); + // `std::nullopt` when uninitialized. + EXPECT_FALSE(generator.GetHaltonSequenceIndex(1).has_value()); + + // Zero is a valid index. + generator.SetHaltonSequenceIndex(0, 1); + std::optional index = generator.GetHaltonSequenceIndex(1); + EXPECT_TRUE(index.has_value()); + EXPECT_EQ(*index, 0); + +#if GTEST_HAS_DEATH_TEST + // Negative values are not allowed to be set. + EXPECT_DEATH(generator.SetHaltonSequenceIndex(-2, 1), + "Index must be non-negative"); + index = generator.GetHaltonSequenceIndex(1); + EXPECT_TRUE(index.has_value()); + EXPECT_EQ(*index, 0); + + // Values requiring more than 15 bits are not allowed. + EXPECT_DEATH(generator.SetHaltonSequenceIndex(0x4000, 1), + "Index must not be larger than 0x3FFF"); + index = generator.GetHaltonSequenceIndex(1); + EXPECT_TRUE(index.has_value()); + EXPECT_EQ(*index, 0); +#endif // GTEST_HAS_DEATH_TEST +} + +TEST(FrameInstrumentationGeneratorTest, QueuesAtMostThreeInputFrames) { + auto generator = std::make_unique( + VideoCodecType::kVideoCodecVP8); + + bool frames_destroyed[4] = {}; + class TestBuffer : public webrtc::I420Buffer { + public: + TestBuffer(int width, int height, bool* frame_destroyed_indicator) + : I420Buffer(width, height), + frame_destroyed_indicator_(frame_destroyed_indicator) {} + + private: + friend class RefCountedObject; + ~TestBuffer() override { *frame_destroyed_indicator_ = true; } + + bool* frame_destroyed_indicator_; + }; + + // Insert four frames, the first one should expire and be released. + for (int i = 0; i < 4; ++i) { + generator->OnCapturedFrame( + VideoFrame::Builder() + .set_video_frame_buffer(make_ref_counted( + kDefaultScaledWidth, kDefaultScaledHeight, + &frames_destroyed[i])) + .set_rtp_timestamp(1 + (33 * i)) + .build()); + } + + EXPECT_THAT(frames_destroyed, ElementsAre(true, false, false, false)); + + generator.reset(); + EXPECT_THAT(frames_destroyed, ElementsAre(true, true, true, true)); +} + +TEST(FrameInstrumentationGeneratorTest, + UsesFilterSettingsFromFrameWhenAvailable) { + FrameInstrumentationGenerator generator(VideoCodecType::kVideoCodecVP8); + VideoFrame frame = VideoFrame::Builder() + .set_video_frame_buffer(MakeDefaultI420FrameBuffer()) + .set_rtp_timestamp(1) + .build(); + // No QP needed when frame provides filter settings. + EncodedImage encoded_image; + encoded_image.SetRtpTimestamp(1); + encoded_image.SetFrameType(VideoFrameType::kVideoFrameKey); + encoded_image._encodedWidth = kDefaultScaledWidth; + encoded_image._encodedHeight = kDefaultScaledHeight; + encoded_image.set_corruption_detection_filter_settings( + CorruptionDetectionFilterSettings{.std_dev = 1.0, + .luma_error_threshold = 2, + .chroma_error_threshold = 3}); + + generator.OnCapturedFrame(frame); + std::optional< + std::variant> + data = generator.OnEncodedImage(encoded_image); + + ASSERT_TRUE(data.has_value()); + ASSERT_TRUE(std::holds_alternative(*data)); + FrameInstrumentationData frame_instrumentation_data = + std::get(*data); + EXPECT_EQ(frame_instrumentation_data.std_dev, 1.0); + EXPECT_EQ(frame_instrumentation_data.luma_error_threshold, 2); + EXPECT_EQ(frame_instrumentation_data.chroma_error_threshold, 3); +} + +} // namespace +} // namespace webrtc diff --git a/video/corruption_detection/frame_pair_corruption_score.cc b/video/corruption_detection/frame_pair_corruption_score.cc new file mode 100644 index 0000000000..91b789cb5e --- /dev/null +++ b/video/corruption_detection/frame_pair_corruption_score.cc @@ -0,0 +1,100 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/frame_pair_corruption_score.h" + +#include +#include + +#include "absl/strings/string_view.h" +#include "api/scoped_refptr.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" +#include "rtc_base/checks.h" +#include "video/corruption_detection/generic_mapping_functions.h" +#include "video/corruption_detection/halton_frame_sampler.h" +#include "video/corruption_detection/utils.h" + +namespace webrtc { +namespace { + +constexpr float kDefaultSampleFraction = 0.5; + +} + +FramePairCorruptionScorer::FramePairCorruptionScorer( + absl::string_view codec_name, + float scale_factor, + std::optional sample_fraction) + : codec_type_(GetVideoCodecType(codec_name)), + sample_fraction_(sample_fraction.value_or(kDefaultSampleFraction)), + corruption_classifier_(scale_factor) { + RTC_CHECK_GE(sample_fraction_, 0) << "Sample fraction must be non-negative."; + RTC_CHECK_LE(sample_fraction_, 1) << "Sample fraction must be less than or " + "equal to 1."; +} + +FramePairCorruptionScorer::FramePairCorruptionScorer( + absl::string_view codec_name, + float growth_rate, + float midpoint, + std::optional sample_fraction) + : codec_type_(GetVideoCodecType(codec_name)), + sample_fraction_(sample_fraction.value_or(kDefaultSampleFraction)), + corruption_classifier_(growth_rate, midpoint) { + RTC_CHECK_GE(sample_fraction_, 0) << "Sample fraction must be non-negative."; + RTC_CHECK_LE(sample_fraction_, 1) << "Sample fraction must be less than or " + "equal to 1."; +} + +double FramePairCorruptionScorer::CalculateScore( + int qp, + I420BufferInterface& reference_buffer, + I420BufferInterface& test_buffer) { + RTC_CHECK_GE(reference_buffer.width(), test_buffer.width()); + RTC_CHECK_GE(reference_buffer.height(), test_buffer.height()); + // Adapted for VP9 and AV1. + RTC_DCHECK_GE(qp, 0); + RTC_DCHECK_LE(qp, 255); + + // We calculate corruption score per "sample" rather than per "pixel", hence + // times "3/2". + const int num_samples = static_cast( + (test_buffer.width() * test_buffer.height() * 3 / 2) * sample_fraction_); + std::vector halton_samples = + halton_frame_sampler_.GetSampleCoordinatesForFrame(num_samples); + RTC_DCHECK_EQ(halton_samples.size(), num_samples); + + scoped_refptr reference_i420_buffer = + GetAsI420Buffer(reference_buffer.ToI420()); + scoped_refptr test_i420_buffer = + GetAsI420Buffer(test_buffer.ToI420()); + + CorruptionDetectionFilterSettings filter_settings = + GetCorruptionFilterSettings(qp, codec_type_); + + const std::vector filtered_reference_sample_values = + GetSampleValuesForFrame( + reference_i420_buffer, halton_samples, test_i420_buffer->width(), + test_i420_buffer->height(), filter_settings.std_dev); + const std::vector filtered_test_sample_values = + GetSampleValuesForFrame( + test_i420_buffer, halton_samples, test_i420_buffer->width(), + test_i420_buffer->height(), filter_settings.std_dev); + RTC_CHECK_EQ(filtered_reference_sample_values.size(), + filtered_test_sample_values.size()); + + return corruption_classifier_.CalculateCorruptionProbability( + filtered_reference_sample_values, filtered_test_sample_values, + filter_settings.luma_error_threshold, + filter_settings.chroma_error_threshold); +} + +} // namespace webrtc diff --git a/video/corruption_detection/frame_pair_corruption_score.h b/video/corruption_detection/frame_pair_corruption_score.h new file mode 100644 index 0000000000..31a70a6a74 --- /dev/null +++ b/video/corruption_detection/frame_pair_corruption_score.h @@ -0,0 +1,81 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_CORRUPTION_DETECTION_FRAME_PAIR_CORRUPTION_SCORE_H_ +#define VIDEO_CORRUPTION_DETECTION_FRAME_PAIR_CORRUPTION_SCORE_H_ + +#include + +#include "absl/strings/string_view.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_buffer.h" +#include "video/corruption_detection/corruption_classifier.h" +#include "video/corruption_detection/halton_frame_sampler.h" + +namespace webrtc { + +// Given a `reference_buffer` and a `test_buffer`, calculates the corruption +// score of the a frame pair. The score is calculated by comparing the sample +// values (each pixel has 3 sample values, the Y, U and V samples) of the +// reference buffer and the test buffer at a set of sampled coordinates. +// +// TODO: bugs.webrtc.org/358039777 - Remove one of the constructors based on +// which mapping function works best in practice. +// There are two constructors for this class. The first one takes a +// `scale_factor` as a parameter, which is used to calculate the scaling +// function. The second one takes a `growth_rate` and a `midpoint` as +// parameters, which are used to calculate the logistic function. +// `sample_fraction` is the fraction of pixels to sample. E.g. if +// `sample_fraction` = 0.5, then we sample 50% of the samples. +// +// The dimension of the `reference_buffer` and `test_buffer` does not need to be +// the same, in order to support downscaling caused by e.g. simulcast and +// scalable encoding. However, the dimensions of the `reference_buffer` must be +// larger than or equal to the dimensions of the `test_buffer`. +class FramePairCorruptionScorer { + public: + // `scale_factor` is the parameter constructing the scaling function, which is + // used to calculate the corruption score. `sample_fraction` is the fraction + // of pixels to sample. + FramePairCorruptionScorer(absl::string_view codec_name, + float scale_factor, + std::optional sample_fraction); + + // `growth_rate` and `midpoint` are parameters constructing a logistic + // function, which is used to calculate the corruption score. + // `sample_fraction` is the fraction of pixels to sample. + FramePairCorruptionScorer(absl::string_view codec_name, + float growth_rate, + float midpoint, + std::optional sample_fraction); + + ~FramePairCorruptionScorer() = default; + + // Returns the corruption score as a probability value between 0 and 1, where + // 0 means no corruption and 1 means that the compressed frame is corrupted. + // + // However, note that the corruption score may not accurately reflect + // corruption. E.g. even if the corruption score is 0, the compressed frame + // may still be corrupted and vice versa. + double CalculateScore(int qp, + I420BufferInterface& reference_buffer, + I420BufferInterface& test_buffer); + + private: + const VideoCodecType codec_type_; + const float sample_fraction_; + + HaltonFrameSampler halton_frame_sampler_; + CorruptionClassifier corruption_classifier_; +}; + +} // namespace webrtc + +#endif // VIDEO_CORRUPTION_DETECTION_FRAME_PAIR_CORRUPTION_SCORE_H_ diff --git a/video/corruption_detection/frame_pair_corruption_score_unittest.cc b/video/corruption_detection/frame_pair_corruption_score_unittest.cc new file mode 100644 index 0000000000..cf6de29bda --- /dev/null +++ b/video/corruption_detection/frame_pair_corruption_score_unittest.cc @@ -0,0 +1,193 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/frame_pair_corruption_score.h" + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/scoped_refptr.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" +#include "test/testsupport/frame_reader.h" + +namespace webrtc { +namespace { + +using test::FrameReader; + +// Input video. +constexpr absl::string_view kFilename = "ConferenceMotion_1280_720_50"; +constexpr int kWidth = 1280; +constexpr int kHeight = 720; + +constexpr absl::string_view kCodecName = "VP8"; + +// Scale function parameters. +constexpr float kScaleFactor = 14; + +// Logistic function parameters. +constexpr float kGrowthRate = 0.5; +constexpr float kMidpoint = 3; + +std::unique_ptr GetFrameGenerator() { + std::string clip_path = test::ResourcePath(kFilename, "yuv"); + EXPECT_TRUE(test::FileExists(clip_path)); + return CreateYuvFrameReader(clip_path, {.width = kWidth, .height = kHeight}, + test::YuvFrameReaderImpl::RepeatMode::kPingPong); +} + +scoped_refptr GetDowscaledFrame( + scoped_refptr frame, + float downscale_factor) { + scoped_refptr downscaled_frame = + I420Buffer::Create(kWidth * downscale_factor, kHeight * downscale_factor); + downscaled_frame->ScaleFrom(*frame); + return downscaled_frame; +} + +TEST(FramePairCorruptionScorerTest, SameFrameReturnsNoCorruptionScaleFunction) { + std::unique_ptr frame_reader = GetFrameGenerator(); + scoped_refptr frame = frame_reader->PullFrame(); + + FramePairCorruptionScorer frame_pair_corruption_score( + kCodecName, kScaleFactor, std::nullopt); + EXPECT_LT( + frame_pair_corruption_score.CalculateScore(/*qp=*/1, *frame, *frame), + 0.5); +} + +TEST(FramePairCorruptionScorerTest, + SameFrameReturnsNoCorruptionLogisticFunction) { + std::unique_ptr frame_reader = GetFrameGenerator(); + scoped_refptr frame = frame_reader->PullFrame(); + + FramePairCorruptionScorer frame_pair_corruption_score( + kCodecName, kGrowthRate, kMidpoint, std::nullopt); + EXPECT_LT( + frame_pair_corruption_score.CalculateScore(/*qp=*/1, *frame, *frame), + 0.5); +} + +TEST(FramePairCorruptionScorerTest, + HalfScaledFrameReturnsNoCorruptionScaleFunction) { + std::unique_ptr frame_reader = GetFrameGenerator(); + scoped_refptr frame = frame_reader->PullFrame(); + + FramePairCorruptionScorer frame_pair_corruption_score( + kCodecName, kScaleFactor, std::nullopt); + EXPECT_LT(frame_pair_corruption_score.CalculateScore( + /*qp=*/1, *frame, + *GetDowscaledFrame(frame, /*downscale_factor=*/0.5)), + 0.5); +} + +TEST(FramePairCorruptionScorerTest, + HalfScaledFrameReturnsNoCorruptionLogisticFunction) { + std::unique_ptr frame_reader = GetFrameGenerator(); + scoped_refptr frame = frame_reader->PullFrame(); + + FramePairCorruptionScorer frame_pair_corruption_score( + kCodecName, kGrowthRate, kMidpoint, std::nullopt); + EXPECT_LT(frame_pair_corruption_score.CalculateScore( + /*qp=*/1, *frame, + *GetDowscaledFrame(frame, /*downscale_factor=*/0.5)), + 0.5); +} + +TEST(FramePairCorruptionScorerTest, QuarterScaledFrameReturnsNoCorruption) { + std::unique_ptr frame_reader = GetFrameGenerator(); + scoped_refptr frame = frame_reader->PullFrame(); + + FramePairCorruptionScorer frame_pair_corruption_score( + kCodecName, kScaleFactor, std::nullopt); + EXPECT_LT(frame_pair_corruption_score.CalculateScore( + /*qp=*/1, *frame, + *GetDowscaledFrame(frame, /*downscale_factor=*/0.25)), + 0.5); +} + +TEST(FramePairCorruptionScorerTest, + DifferentFrameResultsInCorruptionScaleFunction) { + std::unique_ptr frame_reader = GetFrameGenerator(); + scoped_refptr frame = frame_reader->PullFrame(); + + // Get frame number 5, which should be different from the first frame, and + // hence, indicate a corruption. + scoped_refptr different_frame = + frame_reader->ReadFrame(/*frame_num=*/5); + + FramePairCorruptionScorer frame_pair_corruption_score( + kCodecName, kScaleFactor, std::nullopt); + EXPECT_GT(frame_pair_corruption_score.CalculateScore(/*qp=*/1, *frame, + *different_frame), + 0.5); +} + +TEST(FramePairCorruptionScorerTest, + DifferentFrameResultsInCorruptionLogisticFunction) { + std::unique_ptr frame_reader = GetFrameGenerator(); + scoped_refptr frame = frame_reader->PullFrame(); + + // Get frame number 5, which should be different from the first frame, and + // hence, indicate a corruption. + scoped_refptr different_frame = + frame_reader->ReadFrame(/*frame_num=*/5); + + FramePairCorruptionScorer frame_pair_corruption_score( + kCodecName, kGrowthRate, kMidpoint, std::nullopt); + EXPECT_GT(frame_pair_corruption_score.CalculateScore(/*qp=*/1, *frame, + *different_frame), + 0.5); +} + +TEST(FramePairCorruptionScorerTest, + HalfScaledDifferentFrameResultsInCorruptionScaleFunction) { + std::unique_ptr frame_reader = GetFrameGenerator(); + scoped_refptr frame = frame_reader->PullFrame(); + + // Get frame number 5, which should be different from the first frame, and + // hence, indicate a corruption. + scoped_refptr different_frame = + frame_reader->ReadFrame(/*frame_num=*/5); + + FramePairCorruptionScorer frame_pair_corruption_score( + kCodecName, kScaleFactor, std::nullopt); + EXPECT_GT(frame_pair_corruption_score.CalculateScore( + /*qp=*/1, *frame, + *GetDowscaledFrame(different_frame, /*downscale_factor=*/0.25)), + 0.5); +} + +TEST(FramePairCorruptionScorerTest, + HalfScaledDifferentFrameResultsInCorruptionLogisticFunction) { + std::unique_ptr frame_reader = GetFrameGenerator(); + scoped_refptr frame = frame_reader->PullFrame(); + + // Get frame number 5, which should be different from the first frame, and + // hence, indicate a corruption. + scoped_refptr different_frame = + frame_reader->ReadFrame(/*frame_num=*/5); + + FramePairCorruptionScorer frame_pair_corruption_score( + kCodecName, kGrowthRate, kMidpoint, std::nullopt); + EXPECT_GT(frame_pair_corruption_score.CalculateScore( + /*qp=*/1, *frame, + *GetDowscaledFrame(different_frame, /*downscale_factor=*/0.25)), + 0.5); +} + +} // namespace +} // namespace webrtc diff --git a/video/corruption_detection/generic_mapping_functions.cc b/video/corruption_detection/generic_mapping_functions.cc new file mode 100644 index 0000000000..9c75a50281 --- /dev/null +++ b/video/corruption_detection/generic_mapping_functions.cc @@ -0,0 +1,92 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/generic_mapping_functions.h" + +#include + +#include "api/video/video_codec_type.h" +#include "api/video_codecs/video_codec.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace { + +constexpr int kLumaThreshold = 5; +constexpr int kChromaThresholdVp8 = 6; +constexpr int kChromaThresholdVp9 = 4; +constexpr int kChromaThresholdAv1 = 4; +constexpr int kChromaThresholdH264 = 2; +constexpr int kChromaThresholdH265 = 4; + +int LumaThreshold(VideoCodecType codec_type) { + return kLumaThreshold; +} + +int ChromaThreshold(VideoCodecType codec_type) { + switch (codec_type) { + case VideoCodecType::kVideoCodecVP8: + return kChromaThresholdVp8; + case VideoCodecType::kVideoCodecVP9: + return kChromaThresholdVp9; + case VideoCodecType::kVideoCodecAV1: + return kChromaThresholdAv1; + case VideoCodecType::kVideoCodecH264: + return kChromaThresholdH264; + case VideoCodecType::kVideoCodecH265: + return kChromaThresholdH265; + default: + RTC_FATAL() << "Codec type " << CodecTypeToPayloadString(codec_type) + << " is not supported."; + } +} + +double ExponentialFunction(double a, double b, double c, int qp) { + return a * std::exp(b * qp - c); +} + +double RationalFunction(double a, double b, double c, int qp) { + return (-a * qp) / (qp + b) + c; +} + +// Maps QP to the optimal standard deviation for the Gausian kernel. +// Observe that the values below can be changed unnoticed. +double MapQpToOptimalStdDev(int qp, VideoCodecType codec_type) { + switch (codec_type) { + case VideoCodecType::kVideoCodecVP8: + return ExponentialFunction(0.006, 0.01857465, -4.26470513, qp); + case VideoCodecType::kVideoCodecVP9: + return RationalFunction(1, -257, 0.3, qp); + case VideoCodecType::kVideoCodecAV1: + return RationalFunction(0.69, -256, 0.42, qp); + case VideoCodecType::kVideoCodecH264: + return ExponentialFunction(0.016, 0.13976962, -1.40179328, qp); + case VideoCodecType::kVideoCodecH265: + // Observe that these values are currently only tuned for software libx265 + // in "preset ultrafast -tune zerolatency" mode. + return RationalFunction(1.6, -52, 0.1, qp); + default: + RTC_FATAL() << "Codec type " << CodecTypeToPayloadString(codec_type) + << " is not supported."; + } +} + +} // namespace + +CorruptionDetectionFilterSettings GetCorruptionFilterSettings( + int qp, + VideoCodecType codec_type) { + return CorruptionDetectionFilterSettings{ + .std_dev = MapQpToOptimalStdDev(qp, codec_type), + .luma_error_threshold = LumaThreshold(codec_type), + .chroma_error_threshold = ChromaThreshold(codec_type)}; +} + +} // namespace webrtc diff --git a/video/corruption_detection/generic_mapping_functions.h b/video/corruption_detection/generic_mapping_functions.h new file mode 100644 index 0000000000..191c229e6e --- /dev/null +++ b/video/corruption_detection/generic_mapping_functions.h @@ -0,0 +1,28 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_CORRUPTION_DETECTION_GENERIC_MAPPING_FUNCTIONS_H_ +#define VIDEO_CORRUPTION_DETECTION_GENERIC_MAPPING_FUNCTIONS_H_ + +#include "api/video/corruption_detection_filter_settings.h" +#include "api/video/video_codec_type.h" + +namespace webrtc { + +// TODO: bugs.webrtc.org/358039777 - Remove when downstream usage is gone. +using FilterSettings = CorruptionDetectionFilterSettings; + +CorruptionDetectionFilterSettings GetCorruptionFilterSettings( + int qp, + VideoCodecType codec_type); + +} // namespace webrtc + +#endif // VIDEO_CORRUPTION_DETECTION_GENERIC_MAPPING_FUNCTIONS_H_ diff --git a/video/corruption_detection/generic_mapping_functions_unittest.cc b/video/corruption_detection/generic_mapping_functions_unittest.cc new file mode 100644 index 0000000000..2817570945 --- /dev/null +++ b/video/corruption_detection/generic_mapping_functions_unittest.cc @@ -0,0 +1,106 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/generic_mapping_functions.h" + +#include "api/video/video_codec_type.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::DoubleNear; +using ::testing::FieldsAre; + +constexpr double kMaxAbsoluteError = 1e-4; + +constexpr int kLumaThreshold = 5; +constexpr int kChromaThresholdVp8 = 6; +constexpr int kChromaThresholdVp9 = 4; +constexpr int kChromaThresholdAv1 = 4; +constexpr int kChromaThresholdH264 = 2; +constexpr int kChromaThresholdH265 = 4; + +TEST(GenericMappingFunctionsTest, TestVp8) { + constexpr VideoCodecType kCodecType = VideoCodecType::kVideoCodecVP8; + EXPECT_THAT(GetCorruptionFilterSettings( + /*qp=*/10, kCodecType), + FieldsAre(DoubleNear(0.5139, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdVp8)); + EXPECT_THAT(GetCorruptionFilterSettings( + /*qp=*/100, kCodecType), + FieldsAre(DoubleNear(2.7351, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdVp8)); + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/127, kCodecType), + FieldsAre(DoubleNear(4.5162, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdVp8)); +} + +TEST(GenericMappingFunctionsTest, TestVp9) { + constexpr VideoCodecType kCodecType = VideoCodecType::kVideoCodecVP9; + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/10, kCodecType), + FieldsAre(DoubleNear(0.3405, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdVp9)); + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/100, kCodecType), + FieldsAre(DoubleNear(0.9369, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdVp9)); + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/200, kCodecType), + FieldsAre(DoubleNear(3.8088, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdVp9)); + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/255, kCodecType), + FieldsAre(DoubleNear(127.8, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdVp9)); +} + +TEST(GenericMappingFunctionsTest, TestAv1) { + constexpr VideoCodecType kCodecType = VideoCodecType::kVideoCodecAV1; + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/10, kCodecType), + FieldsAre(DoubleNear(0.4480, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdAv1)); + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/100, kCodecType), + FieldsAre(DoubleNear(0.8623, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdAv1)); + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/200, kCodecType), + FieldsAre(DoubleNear(2.8842, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdAv1)); + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/255, kCodecType), + FieldsAre(DoubleNear(176.37, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdAv1)); +} + +TEST(GenericMappingFunctionsTest, TestH264) { + constexpr VideoCodecType kCodecType = VideoCodecType::kVideoCodecH264; + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/10, kCodecType), + FieldsAre(DoubleNear(0.263, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdH264)); + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/30, kCodecType), + FieldsAre(DoubleNear(4.3047, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdH264)); + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/51, kCodecType), + FieldsAre(DoubleNear(81.0346, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdH264)); +} + +TEST(GenericMappingFunctionsTest, TestH265) { + constexpr VideoCodecType kCodecType = VideoCodecType::kVideoCodecH265; + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/10, kCodecType), + FieldsAre(DoubleNear(0.481, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdH265)); + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/30, kCodecType), + FieldsAre(DoubleNear(2.2818, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdH265)); + EXPECT_THAT(GetCorruptionFilterSettings(/*qp=*/51, kCodecType), + FieldsAre(DoubleNear(81.7, kMaxAbsoluteError), kLumaThreshold, + kChromaThresholdH265)); +} + +} // namespace +} // namespace webrtc diff --git a/video/corruption_detection/halton_frame_sampler.cc b/video/corruption_detection/halton_frame_sampler.cc new file mode 100644 index 0000000000..53545ed5cd --- /dev/null +++ b/video/corruption_detection/halton_frame_sampler.cc @@ -0,0 +1,253 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/halton_frame_sampler.h" + +#include +#include +#include +#include + +#include "api/scoped_refptr.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_minmax.h" +#include "video/corruption_detection/halton_sequence.h" + +namespace webrtc { +namespace { + +const double kCutoff = 0.2; +const int kLowerBoundKernelSize = 3; +constexpr int kMaxFramesBetweenSamples = 33; + +// Corresponds to 1 second for RTP timestamps (which are 90kHz). +constexpr uint32_t kMaxDurationBetweenSamples = 90'000; + +// The second *time* is always later than the first. If the second *timestamp* +// is smaller than the first, we interpret that as if one wraparound has +// occurred. +uint32_t EnoughTimeHasPassed(uint32_t from, uint32_t to) { + return (to - from) >= kMaxDurationBetweenSamples; +} + +} // namespace + +HaltonFrameSampler::HaltonFrameSampler() + : coordinate_sampler_prng_(HaltonSequence(2)) {} + +std::vector +HaltonFrameSampler::GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + bool is_key_frame, + uint32_t rtp_timestamp, + int num_samples) { + if (num_samples < 1) { + return {}; + } + if (rtp_timestamp_last_frame_sampled_.has_value()) { + RTC_CHECK_NE(*rtp_timestamp_last_frame_sampled_, rtp_timestamp); + } + if (is_key_frame || frames_until_next_sample_ <= 0 || + !rtp_timestamp_last_frame_sampled_.has_value() || + EnoughTimeHasPassed(*rtp_timestamp_last_frame_sampled_, rtp_timestamp)) { + frames_until_next_sample_ = + (kMaxFramesBetweenSamples - 1) - (frames_sampled_ % 8); + ++frames_sampled_; + rtp_timestamp_last_frame_sampled_ = rtp_timestamp; + return GetSampleCoordinatesForFrame(num_samples); + } + --frames_until_next_sample_; + return {}; +} + +std::vector +HaltonFrameSampler::GetSampleCoordinatesForFrame(int num_samples) { + RTC_CHECK_GE(num_samples, 1); + std::vector coordinates; + coordinates.reserve(num_samples); + for (int i = 0; i < num_samples; ++i) { + coordinates.push_back(GetNextSampleCoordinates()); + } + return coordinates; +} + +HaltonFrameSampler::Coordinates HaltonFrameSampler::GetNextSampleCoordinates() { + std::vector point = coordinate_sampler_prng_.GetNext(); + return {.row = point[0], .column = point[1]}; +} + +void HaltonFrameSampler::Restart() { + coordinate_sampler_prng_.Reset(); +} + +int HaltonFrameSampler::GetCurrentIndex() const { + return coordinate_sampler_prng_.GetCurrentIndex(); +} + +void HaltonFrameSampler::SetCurrentIndex(int index) { + coordinate_sampler_prng_.SetCurrentIndex(index); +} + +// Apply Gaussian filtering to the data. +double GetFilteredElement(int width, + int height, + int stride, + const uint8_t* data, + int row, + int column, + double std_dev) { + RTC_CHECK_GE(row, 0); + RTC_CHECK_LT(row, height); + RTC_CHECK_GE(column, 0); + RTC_CHECK_LT(column, width); + RTC_CHECK_GE(stride, width); + RTC_CHECK_GT(std_dev, 0.0) + << "Standard deviation = 0 yields improper Gaussian weights."; + + int max_distance = + std::ceil(std::sqrt(-2.0 * std::log(kCutoff) * std::pow(std_dev, 2.0))) - + 1; + // In order to counteract unexpected distortions (such as noise), a lower + // bound for blurring is introduced. This is done to reduce false positives + // caused by these distortions. + // False positives are decreased since for small `std_dev`s the quantization + // is strong and would cut of many of the small continuous weights used for + // robust comparision. + max_distance = std::max(kLowerBoundKernelSize, max_distance); + + double element_sum = 0.0; + double total_weight = 0.0; + for (int r = std::max(row - max_distance, 0); + r < std::min(row + max_distance + 1, height); ++r) { + for (int c = std::max(column - max_distance, 0); + c < std::min(column + max_distance + 1, width); ++c) { + double weight = + std::exp(-1.0 * (std::pow(row - r, 2) + std::pow(column - c, 2)) / + (2.0 * std::pow(std_dev, 2))); + element_sum += data[r * stride + c] * weight; + total_weight += weight; + } + } + + // Take the rounding errors into consideration. + return SafeClamp(element_sum / total_weight, 0.0, 255.0); +} + +std::vector GetSampleValuesForFrame( + const scoped_refptr i420_frame_buffer, + std::vector sample_coordinates, + int scaled_width, + int scaled_height, + double std_dev_gaussian_blur) { + // Validate input. + if (i420_frame_buffer == nullptr) { + RTC_LOG(LS_WARNING) << "The framebuffer must not be nullptr"; + return {}; + } + if (sample_coordinates.empty()) { + RTC_LOG(LS_WARNING) << "There must be at least one coordinate provided"; + return {}; + } + for (HaltonFrameSampler::Coordinates coordinate : sample_coordinates) { + if (coordinate.column < 0.0 || coordinate.column >= 1.0 || + coordinate.row < 0.0 || coordinate.row >= 1.0) { + RTC_LOG(LS_WARNING) << "The coordinates must be in [0,1): column=" + << coordinate.column << ", row=" << coordinate.row + << ".\n"; + return {}; + } + } + if (scaled_width <= 0 || scaled_height <= 0) { + RTC_LOG(LS_WARNING) + << "The width and height to scale to must be positive: width=" + << scaled_width << ", height=" << scaled_height << ".\n"; + return {}; + } + if (std_dev_gaussian_blur < 0.0) { + RTC_LOG(LS_WARNING) + << "The standard deviation for the Gaussian blur must not be negative: " + << std_dev_gaussian_blur << ".\n"; + return {}; + } + if (scaled_width > i420_frame_buffer->width() || + scaled_height > i420_frame_buffer->height()) { + RTC_LOG(LS_WARNING) + << "Upscaling causes corruption. Therefore, only down-scaling is " + "permissible."; + return {}; + } + + // Scale the frame to the desired resolution: + // 1. Create a new buffer with the desired resolution. + // 2. Scale the old buffer to the size of the new buffer. + scoped_refptr scaled_i420_buffer = + I420Buffer::Create(scaled_width, scaled_height); + scaled_i420_buffer->ScaleFrom(*i420_frame_buffer); + + // Treat the planes as if they would have the following 2-dimensional layout: + // +------+---+ + // | | U | + // | Y +---+ + // | | V | + // +------+---+ + // where width:=(Y.width+U.width) and height:=Y.height. + // When interpreting the 2D sample coordinates, we simply treat them + // as if they were taken from the above layout. We then need to translate the + // coordinates back to the corresponding plane's corresponding 2D coordinates. + // Then we find the filtered value that corresponds to those coordinates. + int width_merged_planes = + scaled_i420_buffer->width() + scaled_i420_buffer->ChromaWidth(); + int height_merged_planes = scaled_i420_buffer->height(); + // Fetch the sample value for all of the requested coordinates. + std::vector filtered_samples; + filtered_samples.reserve(sample_coordinates.size()); + for (HaltonFrameSampler::Coordinates coordinate : sample_coordinates) { + // Scale the coordinates from [0,1) to [0,`width_merged_planes`) and + // [0,`height_merged_planes`). Truncation is intentional. + int column = coordinate.column * width_merged_planes; + int row = coordinate.row * height_merged_planes; + + // Map to plane coordinates and fetch the value. + double value_for_coordinate; + if (column < scaled_i420_buffer->width()) { + // Y plane. + value_for_coordinate = GetFilteredElement( + scaled_i420_buffer->width(), scaled_i420_buffer->height(), + scaled_i420_buffer->StrideY(), scaled_i420_buffer->DataY(), row, + column, std_dev_gaussian_blur); + filtered_samples.push_back( + {.value = value_for_coordinate, .plane = ImagePlane::kLuma}); + } else if (row < scaled_i420_buffer->ChromaHeight()) { + // U plane. + column -= scaled_i420_buffer->width(); + value_for_coordinate = GetFilteredElement( + scaled_i420_buffer->ChromaWidth(), scaled_i420_buffer->ChromaHeight(), + scaled_i420_buffer->StrideU(), scaled_i420_buffer->DataU(), row, + column, std_dev_gaussian_blur); + filtered_samples.push_back( + {.value = value_for_coordinate, .plane = ImagePlane::kChroma}); + } else { + // V plane. + column -= scaled_i420_buffer->width(); + row -= scaled_i420_buffer->ChromaHeight(); + value_for_coordinate = GetFilteredElement( + scaled_i420_buffer->ChromaWidth(), scaled_i420_buffer->ChromaHeight(), + scaled_i420_buffer->StrideV(), scaled_i420_buffer->DataV(), row, + column, std_dev_gaussian_blur); + filtered_samples.push_back( + {.value = value_for_coordinate, .plane = ImagePlane::kChroma}); + } + } + return filtered_samples; +} + +} // namespace webrtc diff --git a/video/corruption_detection/halton_frame_sampler.h b/video/corruption_detection/halton_frame_sampler.h new file mode 100644 index 0000000000..ad2e1bd9f9 --- /dev/null +++ b/video/corruption_detection/halton_frame_sampler.h @@ -0,0 +1,87 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_CORRUPTION_DETECTION_HALTON_FRAME_SAMPLER_H_ +#define VIDEO_CORRUPTION_DETECTION_HALTON_FRAME_SAMPLER_H_ + +#include +#include +#include + +#include "api/scoped_refptr.h" +#include "api/video/video_frame_buffer.h" +#include "video/corruption_detection/halton_sequence.h" + +namespace webrtc { + +enum class ImagePlane { kLuma, kChroma }; + +struct FilteredSample { + double value; + ImagePlane plane; +}; + +// Determines if a frame should be sampled and, based on the 2 dimensional +// Halton sequence, finds the coordinates for those samples. +class HaltonFrameSampler { + public: + struct Coordinates { + double row = 0; + double column = 0; + }; + + HaltonFrameSampler(); + HaltonFrameSampler(const HaltonFrameSampler&) = default; + HaltonFrameSampler(HaltonFrameSampler&&) = default; + HaltonFrameSampler& operator=(const HaltonFrameSampler&) = default; + HaltonFrameSampler& operator=(HaltonFrameSampler&&) = default; + + std::vector GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + bool is_key_frame, + uint32_t rtp_timestamp, + int num_samples); + std::vector GetSampleCoordinatesForFrame(int num_samples); + void Restart(); + int GetCurrentIndex() const; + void SetCurrentIndex(int index); + + private: + Coordinates GetNextSampleCoordinates(); + + HaltonSequence coordinate_sampler_prng_; + std::optional rtp_timestamp_last_frame_sampled_; + int frames_sampled_ = 0; + int frames_until_next_sample_ = 0; +}; + +// 1. Scale the frame buffer to the resolution given by `scaled_width` and +// `scaled_height`. +// 2. Scale the `sample_coordinates` to the frame's resolution. +// 3. Apply the Gaussian filtering given by `std_dev_gaussian_blur`. +// 4. Fetch the values at the scaled coordinates in the filtered frame. +std::vector GetSampleValuesForFrame( + scoped_refptr i420_frame_buffer, + std::vector sample_coordinates, + int scaled_width, + int scaled_height, + double std_dev_gaussian_blur); + +// Returns the blurred value. The minimum half-kernel size is 3 pixels. +double GetFilteredElement(int width, + int height, + int stride, + const uint8_t* data, + int row, + int column, + double std_dev); + +} // namespace webrtc + +#endif // VIDEO_CORRUPTION_DETECTION_HALTON_FRAME_SAMPLER_H_ diff --git a/video/corruption_detection/halton_frame_sampler_unittest.cc b/video/corruption_detection/halton_frame_sampler_unittest.cc new file mode 100644 index 0000000000..3fd899c912 --- /dev/null +++ b/video/corruption_detection/halton_frame_sampler_unittest.cc @@ -0,0 +1,583 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/halton_frame_sampler.h" + +#include +#include + +#include "api/scoped_refptr.h" +#include "api/video/i420_buffer.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::_; +using ::testing::AllOf; +using ::testing::DoubleEq; +using ::testing::DoubleNear; +using ::testing::ElementsAre; +using ::testing::Field; +using ::testing::IsEmpty; +using ::testing::Not; + +using Coordinates = HaltonFrameSampler::Coordinates; + +// Defaults for sampling tests. +const int kDefaultScaledWidth = 4; +const int kDefaultScaledHeight = 4; +const double kDefaultStdDevGaussianBlur = 0.02; + +#if GTEST_HAS_DEATH_TEST +// Defaults for blurring tests. +const int kDefaultWidth = 4; +const int kDefaultHeight = 4; +const int kDefaultStride = 4; +const uint8_t kDefaultData[kDefaultWidth * kDefaultHeight] = { + 20, 196, 250, 115, 139, 39, 99, 197, 21, 166, 254, 28, 227, 54, 64, 46}; +const int kDefaultRow = 3; +const int kDefaultColumn = 2; +const double kDefaultStdDev = 1.12; +#endif // GTEST_HAS_DEATH_TEST + +scoped_refptr MakeDefaultI420FrameBuffer() { + // Create an I420 frame of size 4x4. + const int kDefaultLumaWidth = 4; + const int kDefaultLumaHeight = 4; + const int kDefaultChromaWidth = 2; + const uint8_t kDefaultYContent[16] = {20, 196, 250, 115, 139, 39, 99, 197, + 21, 166, 254, 28, 227, 54, 64, 46}; + const uint8_t kDefaultUContent[4] = {156, 203, 36, 128}; + const uint8_t kDefaultVContent[4] = {112, 2, 0, 24}; + + return I420Buffer::Copy(kDefaultLumaWidth, kDefaultLumaHeight, + kDefaultYContent, kDefaultLumaWidth, kDefaultUContent, + kDefaultChromaWidth, kDefaultVContent, + kDefaultChromaWidth); +} + +std::vector MakeDefaultSampleCoordinates() { + // Coordinates in all planes. + return {{.row = 0.2, .column = 0.7}, + {.row = 0.5, .column = 0.9}, + {.row = 0.3, .column = 0.7}, + {.row = 0.8, .column = 0.4}}; +} + +TEST(GaussianFilteringTest, ShouldReturnFilteredValueWhenInputIsValid) { + const int kWidth = 8; + const int kHeight = 8; + const int kStride = 8; + const uint8_t kData[kWidth * kHeight] = { + 219, 38, 75, 13, 77, 22, 108, 5, // + 199, 105, 237, 3, 194, 63, 200, 95, // + 116, 21, 224, 21, 79, 210, 138, 3, // + 130, 156, 139, 176, 1, 134, 191, 61, // + 123, 59, 34, 237, 223, 162, 113, 108, // + 146, 210, 214, 110, 50, 205, 135, 18, // + 51, 198, 63, 69, 70, 117, 180, 126, // + 244, 250, 194, 195, 85, 24, 25, 224}; + // Chosing the point in the middle so all pixels are used. + const int kRow = 3; + const int kColumn = 3; + // Resulting in a filter size of 3 pixels. + const double kStdDev = 1; + + EXPECT_THAT(GetFilteredElement(kWidth, kHeight, kStride, kData, kRow, kColumn, + kStdDev), + DoubleEq(126.45897447350468)); +} + +#if GTEST_HAS_DEATH_TEST +TEST(GaussianFilteringTest, ShouldCrashWhenRowIsNegative) { + EXPECT_DEATH( + GetFilteredElement(kDefaultWidth, kDefaultHeight, kDefaultStride, + kDefaultData, -1, kDefaultColumn, kDefaultStdDev), + _); +} + +TEST(GaussianFilteringTest, ShouldCrashWhenRowIsOutOfRange) { + EXPECT_DEATH( + GetFilteredElement(kDefaultWidth, 4, kDefaultStride, kDefaultData, 4, + kDefaultColumn, kDefaultStdDev), + _); +} + +TEST(GaussianFilteringTest, ShouldCrashWhenColumnIsNegative) { + EXPECT_DEATH( + GetFilteredElement(kDefaultWidth, kDefaultHeight, kDefaultStride, + kDefaultData, kDefaultRow, -1, kDefaultStdDev), + _); +} + +TEST(GaussianFilteringTest, ShouldCrashWhenColumnIsOutOfRange) { + EXPECT_DEATH(GetFilteredElement(4, kDefaultHeight, kDefaultStride, + kDefaultData, kDefaultRow, 4, kDefaultStdDev), + _); +} + +TEST(GaussianFilteringTest, ShouldCrashWhenStrideIsSmallerThanWidth) { + EXPECT_DEATH(GetFilteredElement(4, kDefaultHeight, 3, kDefaultData, + kDefaultRow, kDefaultColumn, kDefaultStdDev), + _); +} + +TEST(GaussianFilteringTest, ShouldCrashWhenStdDevIsNegative) { + EXPECT_DEATH( + GetFilteredElement(kDefaultWidth, kDefaultHeight, kDefaultStride, + kDefaultData, kDefaultRow, kDefaultColumn, -1.0), + _); +} + +TEST(GaussianFilteringTest, RoundingErrorsShouldNotHappen) { + // These values should force a rounding error. + constexpr int kWidth = 128; + constexpr int kHeight = 128; + constexpr double kStdDev = 40; + const std::vector data(kWidth * kHeight, 255); + + EXPECT_THAT(GetFilteredElement(kWidth, kHeight, kHeight, data.data(), + kWidth / 2, kHeight / 2, kStdDev), + 255); +} + +TEST(HaltonFrameSamplerTest, FrameIsNotSampledWhenTimestampsAreEqual) { + HaltonFrameSampler halton_frame_sampler; + + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, /*rtp_timestamp=*/0, /*num_samples=*/1), + Not(IsEmpty())); + EXPECT_DEATH( + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, /*rtp_timestamp=*/0, /*num_samples=*/1), + _); +} + +#endif // GTEST_HAS_DEATH_TEST + +TEST(HaltonFrameSamplerGaussianFilteringTest, + ShouldReturnEmptyListGivenInvalidInputNoFrameBuffer) { + const std::vector kDefaultSampleCoordinates = + MakeDefaultSampleCoordinates(); + + EXPECT_THAT(GetSampleValuesForFrame(nullptr, kDefaultSampleCoordinates, + kDefaultScaledWidth, kDefaultScaledHeight, + kDefaultStdDevGaussianBlur), + IsEmpty()); +} + +TEST(HaltonFrameSamplerGaussianFilteringTest, + ShouldReturnEmptyListGivenInvalidInputNoCoordinates) { + const scoped_refptr kDefaultI420Buffer = + MakeDefaultI420FrameBuffer(); + + EXPECT_THAT( + GetSampleValuesForFrame(kDefaultI420Buffer, {}, kDefaultScaledWidth, + kDefaultScaledHeight, kDefaultStdDevGaussianBlur), + IsEmpty()); +} + +TEST(HaltonFrameSamplerGaussianFilteringTest, + ShouldReturnEmptyListGivenInvalidInputOutOfRangeCoordinates) { + const scoped_refptr kDefaultI420Buffer = + MakeDefaultI420FrameBuffer(); + const std::vector kSampleCoordinates = { + {.row = 0.2, .column = 0.7}, + {.row = 0.5, .column = 1.0}, + {.row = 0.3, .column = 0.7}, + {.row = 0.8, .column = 0.4}}; + + EXPECT_THAT(GetSampleValuesForFrame(kDefaultI420Buffer, kSampleCoordinates, + kDefaultScaledWidth, kDefaultScaledHeight, + kDefaultStdDevGaussianBlur), + IsEmpty()); +} + +TEST(HaltonFrameSamplerGaussianFilteringTest, + ShouldReturnEmptyListGivenInvalidInputWidthZero) { + const scoped_refptr kDefaultI420Buffer = + MakeDefaultI420FrameBuffer(); + const std::vector kDefaultSampleCoordinates = + MakeDefaultSampleCoordinates(); + + EXPECT_THAT( + GetSampleValuesForFrame(kDefaultI420Buffer, kDefaultSampleCoordinates, 0, + kDefaultScaledHeight, kDefaultStdDevGaussianBlur), + IsEmpty()); +} + +TEST(HaltonFrameSamplerGaussianFilteringTest, + ShouldReturnEmptyListGivenInvalidInputHeightZero) { + const scoped_refptr kDefaultI420Buffer = + MakeDefaultI420FrameBuffer(); + const std::vector kDefaultSampleCoordinates = + MakeDefaultSampleCoordinates(); + + EXPECT_THAT(GetSampleValuesForFrame( + kDefaultI420Buffer, kDefaultSampleCoordinates, + kDefaultScaledWidth, 0, kDefaultStdDevGaussianBlur), + IsEmpty()); +} + +TEST(HaltonFrameSamplerGaussianFilteringTest, + ShouldReturnEmptyListGivenInvalidInputStdDevNegative) { + const scoped_refptr kDefaultI420Buffer = + MakeDefaultI420FrameBuffer(); + const std::vector kDefaultSampleCoordinates = + MakeDefaultSampleCoordinates(); + + EXPECT_THAT( + GetSampleValuesForFrame(kDefaultI420Buffer, kDefaultSampleCoordinates, + kDefaultScaledWidth, kDefaultScaledHeight, -1.0), + IsEmpty()); +} + +TEST(HaltonFrameSamplerGaussianFilteringTest, + ShouldReturnEmptyListWhenUpscaling) { + const scoped_refptr kDefaultI420Buffer = + MakeDefaultI420FrameBuffer(); + + EXPECT_THAT(GetSampleValuesForFrame(kDefaultI420Buffer, + MakeDefaultSampleCoordinates(), + /*scaled_width=*/8, /*scaled_height=*/8, + kDefaultStdDevGaussianBlur), + IsEmpty()); +} + +TEST(HaltonFrameSamplerGaussianFilteringTest, + ShouldReturnGivenValueWhenNoScalingOrFilteringIsDefined) { + // 4x4 i420 frame data. + const int kLumaWidth = 4; + const int kLumaHeight = 4; + const int kChromaWidth = 2; + const uint8_t kYContent[16] = {20, 196, 250, 115, 139, 39, 99, 197, + 21, 166, 254, 28, 227, 54, 64, 46}; + const uint8_t kUContent[4] = {156, 203, 36, 128}; + const uint8_t kVContent[4] = {112, 2, 0, 24}; + const scoped_refptr kI420Buffer = + I420Buffer::Copy(kLumaWidth, kLumaHeight, kYContent, kLumaWidth, + kUContent, kChromaWidth, kVContent, kChromaWidth); + + // Coordinates in all planes. + const std::vector kSampleCoordinates = { + {.row = 0.2, .column = 0.7}, + {.row = 0.5, .column = 0.9}, + {.row = 0.3, .column = 0.7}, + {.row = 0.8, .column = 0.4}}; + + // No scaling. + const int kScaledWidth = kLumaWidth; + const int kScaledHeight = kLumaHeight; + + // No filtering. + const double kStdDevGaussianBlur = 0.02; + + EXPECT_THAT( + GetSampleValuesForFrame(kI420Buffer, kSampleCoordinates, kScaledWidth, + kScaledHeight, kStdDevGaussianBlur), + ElementsAre(AllOf(Field(&FilteredSample::value, DoubleEq(156.0)), + Field(&FilteredSample::plane, ImagePlane::kChroma)), + AllOf(Field(&FilteredSample::value, DoubleEq(2.0)), + Field(&FilteredSample::plane, ImagePlane::kChroma)), + AllOf(Field(&FilteredSample::value, DoubleEq(36.0)), + Field(&FilteredSample::plane, ImagePlane::kChroma)), + AllOf(Field(&FilteredSample::value, DoubleEq(64.0)), + Field(&FilteredSample::plane, ImagePlane::kLuma)))); +} + +TEST(HaltonFrameSamplerGaussianFilteringTest, + ShouldScaleTheFrameWhenScalingIsRequested) { + // 4x4 i420 frame data. + const int kLumaWidth = 4; + const int kLumaHeight = 4; + const int kChromaWidth = 2; + const uint8_t kYContent[16] = {20, 196, 250, 115, 139, 39, 99, 197, + 21, 166, 254, 28, 227, 54, 64, 46}; + const uint8_t kUContent[4] = {156, 203, 36, 128}; + const uint8_t kVContent[4] = {112, 2, 0, 24}; + const scoped_refptr kI420Buffer = + I420Buffer::Copy(kLumaWidth, kLumaHeight, kYContent, kLumaWidth, + kUContent, kChromaWidth, kVContent, kChromaWidth); + + // Coordinates in all planes. + const std::vector kSampleCoordinates = { + {.row = 0.2, .column = 0.7}, + {.row = 0.5, .column = 0.9}, + {.row = 0.3, .column = 0.7}, + {.row = 0.8, .column = 0.4}}; + + // With scaling. + const int kScaledWidth = 2; + const int kScaledHeight = 2; + + // No filtering. + const double kStdDevGaussianBlur = 0.02; + + EXPECT_THAT( + GetSampleValuesForFrame(kI420Buffer, kSampleCoordinates, kScaledWidth, + kScaledHeight, kStdDevGaussianBlur), + ElementsAre(AllOf(Field(&FilteredSample::value, DoubleEq(131.0)), + Field(&FilteredSample::plane, ImagePlane::kChroma)), + AllOf(Field(&FilteredSample::value, DoubleEq(35.0)), + Field(&FilteredSample::plane, ImagePlane::kChroma)), + AllOf(Field(&FilteredSample::value, DoubleEq(131.0)), + Field(&FilteredSample::plane, ImagePlane::kChroma)), + AllOf(Field(&FilteredSample::value, DoubleEq(98.0)), + Field(&FilteredSample::plane, ImagePlane::kLuma)))); +} + +TEST(HaltonFrameSamplerGaussianFilteringTest, + ShouldReturnFilteredValuesWhenFilteringIsRequested) { + // 8x8 i420 frame data. + const int kLumaWidth = 8; + const int kLumaHeight = 8; + const int kChromaWidth = 4; + const uint8_t kYContent[kLumaWidth * kLumaHeight] = { + 219, 38, 75, 13, 77, 22, 108, 5, // + 199, 105, 237, 3, 194, 63, 200, 95, // + 116, 21, 224, 21, 79, 210, 138, 3, // + 130, 156, 139, 176, 1, 134, 191, 61, // + 123, 59, 34, 237, 223, 162, 113, 108, // + 146, 210, 214, 110, 50, 205, 135, 18, // + 51, 198, 63, 69, 70, 117, 180, 126, // + 244, 250, 194, 195, 85, 24, 25, 224}; + const uint8_t kUContent[16] = { + 219, 38, 75, 13, 77, 22, 108, 5, // + 199, 105, 237, 3, 194, 63, 200, 95, + }; + const uint8_t kVContent[16] = { + 123, 59, 34, 237, 223, 162, 113, 108, // + 51, 198, 63, 69, 70, 117, 180, 126, + }; + const scoped_refptr kI420Buffer = + I420Buffer::Copy(kLumaWidth, kLumaHeight, kYContent, kLumaWidth, + kUContent, kChromaWidth, kVContent, kChromaWidth); + + // Coordinates in all (YUV) planes. + const std::vector kSampleCoordinates = { + {.row = 0.2, .column = 0.7}, + {.row = 0.5, .column = 0.9}, + {.row = 0.3, .column = 0.7}, + {.row = 0.8, .column = 0.4}}; + + // No scaling. + const int kScaledWidth = kLumaWidth; + const int kScaledHeight = kLumaHeight; + + // With filtering (kernel size 3x3 minimum required). + const double kStdDevGaussianBlur = 1; + + EXPECT_THAT( + GetSampleValuesForFrame(kI420Buffer, kSampleCoordinates, kScaledWidth, + kScaledHeight, kStdDevGaussianBlur), + ElementsAre( + AllOf(Field(&FilteredSample::value, DoubleEq(114.6804322931639)), + Field(&FilteredSample::plane, ImagePlane::kChroma)), + AllOf(Field(&FilteredSample::value, DoubleEq(109.66816384377159)), + Field(&FilteredSample::plane, ImagePlane::kChroma)), + AllOf(Field(&FilteredSample::value, DoubleEq(133.7339472739954)), + Field(&FilteredSample::plane, ImagePlane::kChroma)), + AllOf(Field(&FilteredSample::value, DoubleEq(104.43135638243807)), + Field(&FilteredSample::plane, ImagePlane::kLuma)))); +} + +TEST(HaltonFrameSamplerTest, CoordinatesFollowsHaltonSequence) { + HaltonFrameSampler halton_frame_sampler; + const int kNumSamples = 1; + EXPECT_THAT(halton_frame_sampler.GetSampleCoordinatesForFrame(kNumSamples), + ElementsAre(AllOf(Field(&Coordinates::row, DoubleEq(0.0)), + Field(&Coordinates::column, DoubleEq(0.0))))); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrame(kNumSamples), + ElementsAre(AllOf(Field(&Coordinates::row, DoubleEq(1.0 / 2)), + Field(&Coordinates::column, DoubleEq(1.0 / 3))))); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrame(kNumSamples), + ElementsAre(AllOf(Field(&Coordinates::row, DoubleEq(1.0 / 4)), + Field(&Coordinates::column, DoubleEq(2.0 / 3))))); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrame(kNumSamples), + ElementsAre(AllOf(Field(&Coordinates::row, DoubleEq(3.0 / 4)), + Field(&Coordinates::column, DoubleEq(1.0 / 9))))); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrame(kNumSamples), + ElementsAre(AllOf(Field(&Coordinates::row, DoubleEq(1.0 / 8)), + Field(&Coordinates::column, DoubleEq(4.0 / 9))))); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrame(kNumSamples), + ElementsAre(AllOf(Field(&Coordinates::row, DoubleEq(5.0 / 8)), + Field(&Coordinates::column, DoubleEq(7.0 / 9))))); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrame(kNumSamples), + ElementsAre(AllOf(Field(&Coordinates::row, DoubleEq(3.0 / 8)), + Field(&Coordinates::column, DoubleEq(2.0 / 9))))); +} + +TEST(HaltonFrameSamplerTest, GeneratesMultipleSamplesWhenRequested) { + HaltonFrameSampler halton_frame_sampler; + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrame(3), + ElementsAre(AllOf(Field(&Coordinates::row, DoubleEq(0.0)), + Field(&Coordinates::column, DoubleEq(0.0))), + AllOf(Field(&Coordinates::row, DoubleEq(1.0 / 2)), + Field(&Coordinates::column, DoubleEq(1.0 / 3))), + AllOf(Field(&Coordinates::row, DoubleEq(1.0 / 4)), + Field(&Coordinates::column, DoubleEq(2.0 / 3))))); +} + +TEST(HaltonFrameSamplerTest, ShouldChangeIndexWhenRequestedTo) { + HaltonFrameSampler halton_frame_sampler; + halton_frame_sampler.SetCurrentIndex(1); + EXPECT_EQ(halton_frame_sampler.GetCurrentIndex(), 1); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrame(1), + ElementsAre(AllOf(Field(&Coordinates::row, DoubleEq(1.0 / 2)), + Field(&Coordinates::column, DoubleEq(1.0 / 3))))); +} + +TEST(HaltonFrameSamplerTest, FirstFrameIsSampled) { + HaltonFrameSampler halton_frame_sampler; + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, /*rtp_timestamp=*/0, /*num_samples=*/1), + Not(IsEmpty())); +} + +TEST(HaltonFrameSamplerTest, + DeltaFrameFollowingSampledFrameWithTooShortTimeDeltaIsNotSampled) { + HaltonFrameSampler halton_frame_sampler; + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, /*rtp_timestamp=*/0, /*num_samples=*/1); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, /*rtp_timestamp=*/1, /*num_samples=*/1), + IsEmpty()); +} + +TEST(HaltonFrameSamplerTest, + DeltaFramesAreSampledBasedOnHowManyFramesHasPassedSinceLastSampledFrame) { + HaltonFrameSampler halton_frame_sampler; + uint32_t rtp_timestamp = 0; + const int kNumSamples = 1; + + // The number of frames between each sample is defined as + // 33 - mod(number_of_sampled_frames, 8) + // so the following gets get coverage for [26, 33] two times. + for (int iterations = 0; iterations < 2; ++iterations) { + for (int num_sampled_frames = 0; num_sampled_frames < 8; + ++num_sampled_frames) { + EXPECT_THAT(halton_frame_sampler + .GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, rtp_timestamp, kNumSamples), + Not(IsEmpty())); + ++rtp_timestamp; + for (int num_unsampled_frames = 1; + num_unsampled_frames < 33 - num_sampled_frames; + ++num_unsampled_frames) { + EXPECT_THAT(halton_frame_sampler + .GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, rtp_timestamp, kNumSamples), + IsEmpty()); + ++rtp_timestamp; + } + } + } +} + +TEST(HaltonFrameSamplerTest, KeyFrameIsSampled) { + HaltonFrameSampler halton_frame_sampler; + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, /*rtp_timestamp=*/0, /*num_samples=*/1), + Not(IsEmpty())); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/true, /*rtp_timestamp=*/1, /*num_samples=*/1), + Not(IsEmpty())); +} + +TEST(HaltonFrameSamplerTest, + SampleFramesWhenEnoughTimeHasPassedSinceLastSampledFrame) { + HaltonFrameSampler halton_frame_sampler; + const uint32_t kRtpTimestamp = 0; + const int kNumSamples = 1; + const uint32_t kSufficientDuration = 90'000; + const uint32_t kTooShortDuration = 1; + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, kRtpTimestamp, kNumSamples); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, kRtpTimestamp + kSufficientDuration, + kNumSamples), + Not(IsEmpty())); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, + kRtpTimestamp + kSufficientDuration + kTooShortDuration, kNumSamples), + IsEmpty()); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, kRtpTimestamp + 2 * kSufficientDuration, + kNumSamples), + Not(IsEmpty())); +} + +TEST(HaltonFrameSamplerTest, + FrameIsNotSampledWhenTooShortTimeHasPassedSinceLastSampledFrame) { + HaltonFrameSampler halton_frame_sampler; + const uint32_t kRtpTimestamp = 0; + const uint32_t kTooShortDuration = 90'000 - 1; + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, kRtpTimestamp, /*num_samples=*/1); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, kRtpTimestamp + kTooShortDuration, + /*num_samples=*/1), + IsEmpty()); +} + +TEST(HaltonFrameSamplerTest, + SampleFramesWhenEnoughTimeWithWraparoundHasPassedSinceLastSampledFrame) { + HaltonFrameSampler halton_frame_sampler; + + // Time delta = 90'000. + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, /*rtp_timestamp=*/0xFFFE'A071, + /*num_samples=*/1), + Not(IsEmpty())); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, /*rtp_timestamp=*/1, /*num_samples=*/1), + Not(IsEmpty())); +} + +TEST( + HaltonFrameSamplerTest, + FrameIsNotSampledWhenTooShortTimeDeltaWithWraparoundSinceLastSampledFrame) { + HaltonFrameSampler halton_frame_sampler; + + // Time delta = 89'999. + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, /*rtp_timestamp=*/0xFFFE'A072, + /*num_samples=*/1), + Not(IsEmpty())); + EXPECT_THAT( + halton_frame_sampler.GetSampleCoordinatesForFrameIfFrameShouldBeSampled( + /*is_key_frame=*/false, /*rtp_timestamp=*/1, /*num_samples=*/1), + IsEmpty()); +} + +} // namespace +} // namespace webrtc diff --git a/video/corruption_detection/halton_sequence.cc b/video/corruption_detection/halton_sequence.cc new file mode 100644 index 0000000000..c41647340a --- /dev/null +++ b/video/corruption_detection/halton_sequence.cc @@ -0,0 +1,73 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/halton_sequence.h" + +#include +#include + +#include "rtc_base/checks.h" + +namespace webrtc { +namespace { + +static constexpr int kMaxDimensions = 5; +const int kBases[kMaxDimensions] = {2, 3, 5, 7, 11}; + +double GetVanDerCorputSequenceElement(int sequence_idx, int base) { + if (sequence_idx < 0 || base < 2) { + sequence_idx = 0; + base = 2; + } + double element = 0.0; + double positional_value = 1.0; + int left = sequence_idx; + while (left > 0) { + positional_value /= base; + element += positional_value * (left % base); + left /= base; + } + return element; +} + +} // namespace + +HaltonSequence::HaltonSequence(int num_dimensions) + : num_dimensions_(num_dimensions), current_idx_(0) { + RTC_CHECK_GE(num_dimensions_, 1) + << "num_dimensions must be >= 1. Will be set to 1."; + RTC_CHECK_LE(num_dimensions_, kMaxDimensions) + << "num_dimensions must be <= " << kMaxDimensions << ". Will be set to " + << kMaxDimensions << "."; + num_dimensions_ = std::clamp(num_dimensions_, 1, kMaxDimensions); +} + +std::vector HaltonSequence::GetNext() { + std::vector point = {}; + point.reserve(num_dimensions_); + for (int i = 0; i < num_dimensions_; ++i) { + point.push_back(GetVanDerCorputSequenceElement(current_idx_, kBases[i])); + } + ++current_idx_; + return point; +} + +void HaltonSequence::SetCurrentIndex(int idx) { + if (idx >= 0) { + current_idx_ = idx; + } + RTC_DCHECK_GE(idx, 0) << "Index must be non-negative"; +} + +void HaltonSequence::Reset() { + HaltonSequence::current_idx_ = 0; +} + +} // namespace webrtc diff --git a/video/corruption_detection/halton_sequence.h b/video/corruption_detection/halton_sequence.h new file mode 100644 index 0000000000..576f32bdfb --- /dev/null +++ b/video/corruption_detection/halton_sequence.h @@ -0,0 +1,48 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_CORRUPTION_DETECTION_HALTON_SEQUENCE_H_ +#define VIDEO_CORRUPTION_DETECTION_HALTON_SEQUENCE_H_ + +#include + +namespace webrtc { + +// Generates the Halton sequence: a low discrepancy sequence of doubles in the +// half-open interval [0,1). See https://en.wikipedia.org/wiki/Halton_sequence +// for information on how the sequence is constructed. +class HaltonSequence { + public: + // Creates a sequence in `num_dimensions` number of dimensions. Possible + // values are [1, 5]. + explicit HaltonSequence(int num_dimensions); + // Creates a default sequence in a single dimension. + HaltonSequence() = default; + HaltonSequence(const HaltonSequence&) = default; + HaltonSequence(HaltonSequence&&) = default; + HaltonSequence& operator=(const HaltonSequence&) = default; + HaltonSequence& operator=(HaltonSequence&&) = default; + ~HaltonSequence() = default; + + // Gets the next point in the sequence where each value is in the half-open + // interval [0,1). + std::vector GetNext(); + int GetCurrentIndex() const { return current_idx_; } + void SetCurrentIndex(int idx); + void Reset(); + + private: + int num_dimensions_ = 1; + int current_idx_ = 0; +}; + +} // namespace webrtc + +#endif // VIDEO_CORRUPTION_DETECTION_HALTON_SEQUENCE_H_ diff --git a/video/corruption_detection/halton_sequence_unittest.cc b/video/corruption_detection/halton_sequence_unittest.cc new file mode 100644 index 0000000000..9eedb800f4 --- /dev/null +++ b/video/corruption_detection/halton_sequence_unittest.cc @@ -0,0 +1,73 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/halton_sequence.h" + +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::DoubleEq; +using ::testing::ElementsAre; + +TEST(HaltonSequenceTest, ShouldGenerateBase2SequenceByDefault) { + HaltonSequence halton_sequence; + EXPECT_THAT(halton_sequence.GetNext(), ElementsAre(DoubleEq(0.0))); + EXPECT_THAT(halton_sequence.GetNext(), ElementsAre(DoubleEq(1.0 / 2))); + EXPECT_THAT(halton_sequence.GetNext(), ElementsAre(DoubleEq(1.0 / 4))); + EXPECT_THAT(halton_sequence.GetNext(), ElementsAre(DoubleEq(3.0 / 4))); + EXPECT_THAT(halton_sequence.GetNext(), ElementsAre(DoubleEq(1.0 / 8))); + EXPECT_THAT(halton_sequence.GetNext(), ElementsAre(DoubleEq(5.0 / 8))); + EXPECT_THAT(halton_sequence.GetNext(), ElementsAre(DoubleEq(3.0 / 8))); +} + +TEST(HaltonSequenceTest, + ShouldGenerateBase2Base3SequencesWhenCreatedAs2Dimensional) { + HaltonSequence halton_sequence(2); + EXPECT_THAT(halton_sequence.GetNext(), + ElementsAre(DoubleEq(0.0), DoubleEq(0.0))); + EXPECT_THAT(halton_sequence.GetNext(), + ElementsAre(DoubleEq(1.0 / 2), DoubleEq(1.0 / 3))); + EXPECT_THAT(halton_sequence.GetNext(), + ElementsAre(DoubleEq(1.0 / 4), DoubleEq(2.0 / 3))); + EXPECT_THAT(halton_sequence.GetNext(), + ElementsAre(DoubleEq(3.0 / 4), DoubleEq(1.0 / 9))); + EXPECT_THAT(halton_sequence.GetNext(), + ElementsAre(DoubleEq(1.0 / 8), DoubleEq(4.0 / 9))); + EXPECT_THAT(halton_sequence.GetNext(), + ElementsAre(DoubleEq(5.0 / 8), DoubleEq(7.0 / 9))); + EXPECT_THAT(halton_sequence.GetNext(), + ElementsAre(DoubleEq(3.0 / 8), DoubleEq(2.0 / 9))); +} + +TEST(HaltonSequenceTest, ShouldRestartSequenceWhenResetIsCalled) { + HaltonSequence halton_sequence; + EXPECT_THAT(halton_sequence.GetCurrentIndex(), 0); + EXPECT_THAT(halton_sequence.GetNext(), ElementsAre(DoubleEq(0.0))); + EXPECT_THAT(halton_sequence.GetCurrentIndex(), 1); + EXPECT_THAT(halton_sequence.GetNext(), ElementsAre(DoubleEq(1.0 / 2))); + EXPECT_THAT(halton_sequence.GetCurrentIndex(), 2); + halton_sequence.Reset(); + EXPECT_THAT(halton_sequence.GetCurrentIndex(), 0); + EXPECT_THAT(halton_sequence.GetNext(), ElementsAre(DoubleEq(0.0))); +} + +TEST(HaltonSequenceTest, ShouldSetCurrentIndexWhenSetCurrentIndexIsCalled) { + HaltonSequence halton_sequence; + EXPECT_THAT(halton_sequence.GetCurrentIndex(), 0); + halton_sequence.SetCurrentIndex(3); + EXPECT_THAT(halton_sequence.GetCurrentIndex(), 3); + EXPECT_THAT(halton_sequence.GetNext(), ElementsAre(DoubleEq(3.0 / 4))); +} + +} // namespace +} // namespace webrtc diff --git a/video/corruption_detection/utils.cc b/video/corruption_detection/utils.cc new file mode 100644 index 0000000000..b213dfb509 --- /dev/null +++ b/video/corruption_detection/utils.cc @@ -0,0 +1,70 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/utils.h" + +#include "absl/strings/match.h" +#include "absl/strings/string_view.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_buffer.h" + +namespace webrtc { +namespace { + +constexpr char kPayloadNameVp8[] = "VP8"; +constexpr char kPayloadNameVp9[] = "VP9"; +constexpr char kPayloadNameAv1[] = "AV1"; +constexpr char kPayloadNameH264[] = "H264"; +constexpr char kPayloadNameH265[] = "H265"; +constexpr char kPayloadNameGeneric[] = "Generic"; + +} // namespace + +// Returns the `VideoCodecType` corresponding to the given `codec_name`. +// The `codec_name` does not need to "exactly" match the namings +// `kPayloadNameXXX`. For example, "VP8", "vp8" and "libvxp_vp8" are all +// valid, and will return the `kVideoCodecVP8`. +// I.e. it does the best to match a codec name to a `VideoCodecType`. +VideoCodecType GetVideoCodecType(absl::string_view codec_name) { + if (absl::StrContainsIgnoreCase(codec_name, kPayloadNameVp8)) + return kVideoCodecVP8; + if (absl::StrContainsIgnoreCase(codec_name, kPayloadNameVp9)) + return kVideoCodecVP9; + if (absl::StrContainsIgnoreCase(codec_name, kPayloadNameAv1)) + return kVideoCodecAV1; + if (absl::StrContainsIgnoreCase(codec_name, kPayloadNameH264)) + return kVideoCodecH264; + if (absl::StrContainsIgnoreCase(codec_name, kPayloadNameH265)) + return kVideoCodecH265; + if (absl::StrContainsIgnoreCase(codec_name, kPayloadNameGeneric)) + return kVideoCodecGeneric; + RTC_FATAL() << "Codec name " << codec_name << " is not supported."; +} + +// Creates a new buffer and copies the pixel data. While the copying is done, +// the type of the buffer is changed from `I420BufferInterface` to `I420Buffer`. +// Observe also that the padding bytes are removed. +scoped_refptr GetAsI420Buffer( + const scoped_refptr i420_buffer_interface) { + // Note: `I420Buffer::Copy` removes padding bytes. + // I.e. if the input is to the left the output will be as to the right. + // +------+--+ +------+ + // | | | | | + // | Y |P | --> | Y | + // | | | | | + // +------+--+ +------+ + scoped_refptr frame_as_i420_buffer = + I420Buffer::Copy(*i420_buffer_interface); + RTC_DCHECK_EQ(frame_as_i420_buffer->StrideY(), frame_as_i420_buffer->width()); + return frame_as_i420_buffer; +} + +} // namespace webrtc diff --git a/video/corruption_detection/utils.h b/video/corruption_detection/utils.h new file mode 100644 index 0000000000..7c1f896166 --- /dev/null +++ b/video/corruption_detection/utils.h @@ -0,0 +1,29 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_CORRUPTION_DETECTION_UTILS_H_ +#define VIDEO_CORRUPTION_DETECTION_UTILS_H_ + +#include "absl/strings/string_view.h" +#include "api/scoped_refptr.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_buffer.h" + +namespace webrtc { + +VideoCodecType GetVideoCodecType(absl::string_view codec_name); + +scoped_refptr GetAsI420Buffer( + scoped_refptr i420_buffer_interface); + +} // namespace webrtc + +#endif // VIDEO_CORRUPTION_DETECTION_UTILS_H_ diff --git a/video/corruption_detection/utils_unittest.cc b/video/corruption_detection/utils_unittest.cc new file mode 100644 index 0000000000..1325f14d6b --- /dev/null +++ b/video/corruption_detection/utils_unittest.cc @@ -0,0 +1,38 @@ +/* + * Copyright 2024 The WebRTC project authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/corruption_detection/utils.h" + +#include "api/video/video_codec_type.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +#if GTEST_HAS_DEATH_TEST +using ::testing::_; +#endif // GTEST_HAS_DEATH_TEST + +TEST(UtilsTest, FindCodecFromString) { + EXPECT_EQ(GetVideoCodecType(/*codec_name=*/"VP8"), kVideoCodecVP8); + EXPECT_EQ(GetVideoCodecType(/*codec_name=*/"libvpx-vp9"), kVideoCodecVP9); + EXPECT_EQ(GetVideoCodecType(/*codec_name=*/"ImprovedAV1"), kVideoCodecAV1); + EXPECT_EQ(GetVideoCodecType(/*codec_name=*/"lets_use_h264"), kVideoCodecH264); +} + +#if GTEST_HAS_DEATH_TEST +TEST(UtilsTest, IfCodecDoesNotExistRaiseError) { + EXPECT_DEATH(GetVideoCodecType(/*codec_name=*/"Not_a_codec"), _); +} +#endif // GTEST_HAS_DEATH_TEST + +} // namespace +} // namespace webrtc diff --git a/video/cpu_scaling_tests.cc b/video/cpu_scaling_tests.cc index 79473721b7..308a7d4ad8 100644 --- a/video/cpu_scaling_tests.cc +++ b/video/cpu_scaling_tests.cc @@ -71,8 +71,8 @@ void CpuOveruseTest::RunTestAndCheckForAdaptation( } // Called when FrameGeneratorCapturer::AddOrUpdateSink is called. - void OnSinkWantsChanged(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override { + void OnSinkWantsChanged(VideoSinkInterface* sink, + const VideoSinkWants& wants) override { if (wants.max_pixel_count == std::numeric_limits::max() && wants.max_framerate_fps == kFps) { // Max configured framerate is initially set. diff --git a/video/decode_synchronizer.cc b/video/decode_synchronizer.cc index 32702c2e81..94c4902c78 100644 --- a/video/decode_synchronizer.cc +++ b/video/decode_synchronizer.cc @@ -58,11 +58,11 @@ DecodeSynchronizer::SynchronizedFrameDecodeScheduler:: RTC_DCHECK(stopped_); } -absl::optional +std::optional DecodeSynchronizer::SynchronizedFrameDecodeScheduler::ScheduledRtpTimestamp() { return next_frame_.has_value() - ? absl::make_optional(next_frame_->rtp_timestamp()) - : absl::nullopt; + ? std::make_optional(next_frame_->rtp_timestamp()) + : std::nullopt; } DecodeSynchronizer::ScheduledFrame diff --git a/video/decode_synchronizer.h b/video/decode_synchronizer.h index 17181862a4..d441fa0543 100644 --- a/video/decode_synchronizer.h +++ b/video/decode_synchronizer.h @@ -15,10 +15,10 @@ #include #include +#include #include #include -#include "absl/types/optional.h" #include "api/metronome/metronome.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" @@ -103,7 +103,7 @@ class DecodeSynchronizer { Timestamp LatestDecodeTime(); // FrameDecodeScheduler implementation. - absl::optional ScheduledRtpTimestamp() override; + std::optional ScheduledRtpTimestamp() override; void ScheduleFrame(uint32_t rtp, FrameDecodeTiming::FrameSchedule schedule, FrameReleaseCallback cb) override; @@ -112,7 +112,7 @@ class DecodeSynchronizer { private: DecodeSynchronizer* sync_; - absl::optional next_frame_; + std::optional next_frame_; bool stopped_ = false; }; diff --git a/video/decode_synchronizer_unittest.cc b/video/decode_synchronizer_unittest.cc index fb48a7e3f6..78adc77c7f 100644 --- a/video/decode_synchronizer_unittest.cc +++ b/video/decode_synchronizer_unittest.cc @@ -39,7 +39,7 @@ class MockMetronome : public Metronome { RequestCallOnNextTick, (absl::AnyInvocable callback), (override)); - MOCK_METHOD(TimeDelta, TickPeriod, (), (const override)); + MOCK_METHOD(TimeDelta, TickPeriod, (), (const, override)); }; class DecodeSynchronizerTest : public ::testing::Test { @@ -239,7 +239,7 @@ TEST(DecodeSynchronizerStandaloneTest, time_controller.GetMainThread()); absl::AnyInvocable callback; EXPECT_CALL(metronome, RequestCallOnNextTick) - .WillOnce(Invoke([&callback](absl::AnyInvocable cb) { + .WillOnce(Invoke([&callback](absl::AnyInvocable cb) { callback = std::move(cb); })); auto scheduler = decode_synchronizer_.CreateSynchronizedFrameScheduler(); diff --git a/video/encoder_bitrate_adjuster.cc b/video/encoder_bitrate_adjuster.cc index 465d517d21..8176e65822 100644 --- a/video/encoder_bitrate_adjuster.cc +++ b/video/encoder_bitrate_adjuster.cc @@ -14,6 +14,8 @@ #include #include +#include "api/field_trials_view.h" +#include "modules/video_coding/svc/scalability_mode_util.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" @@ -39,22 +41,40 @@ struct LayerRateInfo { } }; } // namespace -constexpr int64_t EncoderBitrateAdjuster::kWindowSizeMs; +constexpr TimeDelta EncoderBitrateAdjuster::kWindowSize; constexpr size_t EncoderBitrateAdjuster::kMinFramesSinceLayoutChange; constexpr double EncoderBitrateAdjuster::kDefaultUtilizationFactor; -EncoderBitrateAdjuster::EncoderBitrateAdjuster(const VideoCodec& codec_settings) - : utilize_bandwidth_headroom_(RateControlSettings::ParseFromFieldTrials() +EncoderBitrateAdjuster::EncoderBitrateAdjuster( + const VideoCodec& codec_settings, + const FieldTrialsView& field_trials, + Clock& clock) + : utilize_bandwidth_headroom_(RateControlSettings(field_trials) .BitrateAdjusterCanUseNetworkHeadroom()), + use_newfangled_headroom_adjustment_(!field_trials.IsDisabled( + "WebRTC-BitrateAdjusterUseNewfangledHeadroomAdjustment")), frames_since_layout_change_(0), min_bitrates_bps_{}, codec_(codec_settings.codecType), - codec_mode_(codec_settings.mode) { + codec_mode_(codec_settings.mode), + clock_(clock) { // TODO(https://crbug.com/webrtc/14891): If we want to support simulcast of // SVC streams, EncoderBitrateAdjuster needs to be updated to care about both // `simulcastStream` and `spatialLayers` at the same time. - if (codec_settings.codecType == VideoCodecType::kVideoCodecVP9 && - codec_settings.numberOfSimulcastStreams <= 1) { + if (codec_settings.codecType == VideoCodecType::kVideoCodecAV1 && + codec_settings.numberOfSimulcastStreams <= 1 && + codec_settings.GetScalabilityMode().has_value()) { + for (int si = 0; si < ScalabilityModeToNumSpatialLayers( + *(codec_settings.GetScalabilityMode())); + ++si) { + if (codec_settings.spatialLayers[si].active) { + min_bitrates_bps_[si] = + std::max(codec_settings.minBitrate * 1000, + codec_settings.spatialLayers[si].minBitrate * 1000); + } + } + } else if (codec_settings.codecType == VideoCodecType::kVideoCodecVP9 && + codec_settings.numberOfSimulcastStreams <= 1) { for (size_t si = 0; si < codec_settings.VP9().numberOfSpatialLayers; ++si) { if (codec_settings.spatialLayers[si].active) { min_bitrates_bps_[si] = @@ -78,6 +98,7 @@ EncoderBitrateAdjuster::~EncoderBitrateAdjuster() = default; VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation( const VideoEncoder::RateControlParameters& rates) { current_rate_control_parameters_ = rates; + const Timestamp now = clock_.CurrentTime(); // First check that overshoot detectors exist, and store per simulcast/spatial // layer how many active temporal layers we have. @@ -93,7 +114,7 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation( if (!overshoot_detectors_[si][ti]) { overshoot_detectors_[si][ti] = std::make_unique( - kWindowSizeMs, codec_, + kWindowSize.ms(), codec_, codec_mode_ == VideoCodecMode::kScreensharing); frames_since_layout_change_ = 0; } @@ -103,10 +124,26 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation( frames_since_layout_change_ = 0; } } + if (use_newfangled_headroom_adjustment_) { + // Instantiate average media rate trackers, one per active spatial layer. + + DataRate spatial_layer_rate = + DataRate::BitsPerSec(rates.bitrate.GetSpatialLayerSum(si)); + if (spatial_layer_rate.IsZero()) { + media_rate_trackers_[si].reset(); + } else { + if (media_rate_trackers_[si] == nullptr) { + constexpr int kMaxDataPointsInUtilizationTrackers = 100; + media_rate_trackers_[si] = std::make_unique( + kMaxDataPointsInUtilizationTrackers, kWindowSize); + } + // Media rate trackers use the unadjusted target rate. + media_rate_trackers_[si]->OnDataRateChanged(spatial_layer_rate, now); + } + } } // Next poll the overshoot detectors and populate the adjusted allocation. - const int64_t now_ms = rtc::TimeMillis(); VideoBitrateAllocation adjusted_allocation; std::vector layer_infos; DataRate wanted_overshoot_sum = DataRate::Zero(); @@ -137,12 +174,16 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation( RTC_DCHECK(overshoot_detectors_[si][0]); layer_info.link_utilization_factor = overshoot_detectors_[si][0] - ->GetNetworkRateUtilizationFactor(now_ms) + ->GetNetworkRateUtilizationFactor(now.ms()) .value_or(kDefaultUtilizationFactor); layer_info.media_utilization_factor = - overshoot_detectors_[si][0] - ->GetMediaRateUtilizationFactor(now_ms) - .value_or(kDefaultUtilizationFactor); + use_newfangled_headroom_adjustment_ + ? media_rate_trackers_[si] + ->GetRateUtilizationFactor(now) + .value_or(kDefaultUtilizationFactor) + : overshoot_detectors_[si][0] + ->GetMediaRateUtilizationFactor(now.ms()) + .value_or(kDefaultUtilizationFactor); } else if (layer_info.target_rate > DataRate::Zero()) { // Multiple temporal layers enabled for this simulcast/spatial layer. // Update rate for each of them and make a weighted average of utilization @@ -152,11 +193,13 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation( layer_info.media_utilization_factor = 0.0; for (size_t ti = 0; ti < active_tls[si]; ++ti) { RTC_DCHECK(overshoot_detectors_[si][ti]); - const absl::optional ti_link_utilization_factor = + const std::optional ti_link_utilization_factor = overshoot_detectors_[si][ti]->GetNetworkRateUtilizationFactor( - now_ms); - const absl::optional ti_media_utilization_factor = - overshoot_detectors_[si][ti]->GetMediaRateUtilizationFactor(now_ms); + now.ms()); + + const std::optional ti_media_utilization_factor = + overshoot_detectors_[si][ti]->GetMediaRateUtilizationFactor( + now.ms()); if (!ti_link_utilization_factor || !ti_media_utilization_factor) { layer_info.link_utilization_factor = kDefaultUtilizationFactor; layer_info.media_utilization_factor = kDefaultUtilizationFactor; @@ -170,14 +213,17 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation( layer_info.media_utilization_factor += weight * ti_media_utilization_factor.value(); } + + if (use_newfangled_headroom_adjustment_) { + layer_info.media_utilization_factor = + media_rate_trackers_[si]->GetRateUtilizationFactor(now).value_or( + kDefaultUtilizationFactor); + } } else { RTC_DCHECK_NOTREACHED(); } if (layer_info.link_utilization_factor < 1.0) { - // TODO(sprang): Consider checking underuse and allowing it to cancel some - // potential overuse by other streams. - // Don't boost target bitrate if encoder is under-using. layer_info.link_utilization_factor = 1.0; } else { @@ -297,7 +343,7 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation( overshoot_detectors_[si][ti]->SetTargetRate( DataRate::BitsPerSec(layer_bitrate_bps), - fps_fraction * rates.framerate_fps, now_ms); + fps_fraction * rates.framerate_fps, now.ms()); } } } @@ -327,7 +373,11 @@ void EncoderBitrateAdjuster::OnEncodedFrame(DataSize size, // Detectors may not exist, for instance if ScreenshareLayers is used. auto& detector = overshoot_detectors_[stream_index][temporal_index]; if (detector) { - detector->OnEncodedFrame(size.bytes(), rtc::TimeMillis()); + detector->OnEncodedFrame(size.bytes(), TimeMillis()); + } + if (media_rate_trackers_[stream_index]) { + media_rate_trackers_[stream_index]->OnDataProduced(size, + clock_.CurrentTime()); } } @@ -336,6 +386,7 @@ void EncoderBitrateAdjuster::Reset() { for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) { overshoot_detectors_[si][ti].reset(); } + media_rate_trackers_[si].reset(); } // Call AdjustRateAllocation() with the last know bitrate allocation, so that // the appropriate overuse detectors are immediately re-created. diff --git a/video/encoder_bitrate_adjuster.h b/video/encoder_bitrate_adjuster.h index 6b35186b98..9248b0d5eb 100644 --- a/video/encoder_bitrate_adjuster.h +++ b/video/encoder_bitrate_adjuster.h @@ -13,17 +13,21 @@ #include +#include "api/field_trials_view.h" +#include "api/units/time_delta.h" #include "api/video/encoded_image.h" #include "api/video/video_bitrate_allocation.h" #include "api/video_codecs/video_encoder.h" +#include "system_wrappers/include/clock.h" #include "video/encoder_overshoot_detector.h" +#include "video/rate_utilization_tracker.h" namespace webrtc { class EncoderBitrateAdjuster { public: // Size of sliding window used to track overshoot rate. - static constexpr int64_t kWindowSizeMs = 3000; + static constexpr TimeDelta kWindowSize = TimeDelta::Seconds(3); // Minimum number of frames since last layout change required to trust the // overshoot statistics. Otherwise falls back to default utilization. // By layout change, we mean any simulcast/spatial/temporal layer being either @@ -34,7 +38,9 @@ class EncoderBitrateAdjuster { // build too much queue at the very start. static constexpr double kDefaultUtilizationFactor = 1.2; - explicit EncoderBitrateAdjuster(const VideoCodec& codec_settings); + EncoderBitrateAdjuster(const VideoCodec& codec_settings, + const FieldTrialsView& field_trials, + Clock& clock); ~EncoderBitrateAdjuster(); // Adjusts the given rate allocation to make it paceable within the target @@ -57,6 +63,7 @@ class EncoderBitrateAdjuster { private: const bool utilize_bandwidth_headroom_; + const bool use_newfangled_headroom_adjustment_; VideoEncoder::RateControlParameters current_rate_control_parameters_; // FPS allocation of temporal layers, per simulcast/spatial layer. Represented @@ -71,6 +78,10 @@ class EncoderBitrateAdjuster { std::unique_ptr overshoot_detectors_[kMaxSpatialLayers][kMaxTemporalStreams]; + // Per spatial layer track of average media utilization. + std::unique_ptr + media_rate_trackers_[kMaxSpatialLayers]; + // Minimum bitrates allowed, per spatial layer. uint32_t min_bitrates_bps_[kMaxSpatialLayers]; @@ -79,6 +90,8 @@ class EncoderBitrateAdjuster { // Codec mode: { kRealtimeVideo, kScreensharing }. VideoCodecMode codec_mode_; + + Clock& clock_; }; } // namespace webrtc diff --git a/video/encoder_bitrate_adjuster_unittest.cc b/video/encoder_bitrate_adjuster_unittest.cc index 4ec223a208..8d3195b892 100644 --- a/video/encoder_bitrate_adjuster_unittest.cc +++ b/video/encoder_bitrate_adjuster_unittest.cc @@ -11,18 +11,26 @@ #include "video/encoder_bitrate_adjuster.h" #include +#include #include +#include "api/field_trials_view.h" #include "api/units/data_rate.h" -#include "rtc_base/fake_clock.h" +#include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" -#include "test/field_trial.h" #include "test/gtest.h" +#include "test/scoped_key_value_config.h" +#include "test/time_controller/simulated_time_controller.h" namespace webrtc { namespace test { -class EncoderBitrateAdjusterTest : public ::testing::Test { +using ::testing::Test; +using ::testing::Values; +using ::testing::WithParamInterface; + +class EncoderBitrateAdjusterTest : public Test, + public WithParamInterface { public: static constexpr int64_t kWindowSizeMs = 3000; static constexpr int kDefaultBitrateBps = 300000; @@ -34,42 +42,18 @@ class EncoderBitrateAdjusterTest : public ::testing::Test { static_assert(kSequenceLength % 2 == 0, "Sequence length must be even."); EncoderBitrateAdjusterTest() - : target_bitrate_(DataRate::BitsPerSec(kDefaultBitrateBps)), + : time_controller_(/*start_time=*/Timestamp::Millis(123)), + target_bitrate_(DataRate::BitsPerSec(kDefaultBitrateBps)), target_framerate_fps_(kDefaultFrameRateFps), tl_pattern_idx_{}, - sequence_idx_{} {} + sequence_idx_{}, + scoped_field_trial_(GetParam()) {} protected: - void SetUpAdjuster(size_t num_spatial_layers, - size_t num_temporal_layers, - bool vp9_svc) { - // Initialize some default VideoCodec instance with the given number of - // layers. - if (vp9_svc) { - codec_.codecType = VideoCodecType::kVideoCodecVP9; - codec_.numberOfSimulcastStreams = 1; - codec_.VP9()->numberOfSpatialLayers = num_spatial_layers; - codec_.VP9()->numberOfTemporalLayers = num_temporal_layers; - for (size_t si = 0; si < num_spatial_layers; ++si) { - codec_.spatialLayers[si].minBitrate = 100 * (1 << si); - codec_.spatialLayers[si].targetBitrate = 200 * (1 << si); - codec_.spatialLayers[si].maxBitrate = 300 * (1 << si); - codec_.spatialLayers[si].active = true; - codec_.spatialLayers[si].numberOfTemporalLayers = num_temporal_layers; - } - } else { - codec_.codecType = VideoCodecType::kVideoCodecVP8; - codec_.numberOfSimulcastStreams = num_spatial_layers; - codec_.VP8()->numberOfTemporalLayers = num_temporal_layers; - for (size_t si = 0; si < num_spatial_layers; ++si) { - codec_.simulcastStream[si].minBitrate = 100 * (1 << si); - codec_.simulcastStream[si].targetBitrate = 200 * (1 << si); - codec_.simulcastStream[si].maxBitrate = 300 * (1 << si); - codec_.simulcastStream[si].active = true; - codec_.simulcastStream[si].numberOfTemporalLayers = num_temporal_layers; - } - } - + void SetUpAdjusterWithCodec(size_t num_spatial_layers, + size_t num_temporal_layers, + const VideoCodec& codec) { + codec_ = codec; for (size_t si = 0; si < num_spatial_layers; ++si) { encoder_info_.fps_allocation[si].resize(num_temporal_layers); double fraction = 1.0; @@ -80,13 +64,47 @@ class EncoderBitrateAdjusterTest : public ::testing::Test { } } - adjuster_ = std::make_unique(codec_); + adjuster_ = std::make_unique( + codec_, scoped_field_trial_, *time_controller_.GetClock()); adjuster_->OnEncoderInfo(encoder_info_); current_adjusted_allocation_ = adjuster_->AdjustRateAllocation(VideoEncoder::RateControlParameters( current_input_allocation_, target_framerate_fps_)); } + void SetUpAdjuster(size_t num_spatial_layers, + size_t num_temporal_layers, + bool vp9_svc) { + // Initialize some default VideoCodec instance with the given number of + // layers. + VideoCodec codec; + if (vp9_svc) { + codec.codecType = VideoCodecType::kVideoCodecVP9; + codec.numberOfSimulcastStreams = 1; + codec.VP9()->numberOfSpatialLayers = num_spatial_layers; + codec.VP9()->numberOfTemporalLayers = num_temporal_layers; + for (size_t si = 0; si < num_spatial_layers; ++si) { + codec.spatialLayers[si].minBitrate = 100 * (1 << si); + codec.spatialLayers[si].targetBitrate = 200 * (1 << si); + codec.spatialLayers[si].maxBitrate = 300 * (1 << si); + codec.spatialLayers[si].active = true; + codec.spatialLayers[si].numberOfTemporalLayers = num_temporal_layers; + } + } else { + codec.codecType = VideoCodecType::kVideoCodecVP8; + codec.numberOfSimulcastStreams = num_spatial_layers; + codec.VP8()->numberOfTemporalLayers = num_temporal_layers; + for (size_t si = 0; si < num_spatial_layers; ++si) { + codec.simulcastStream[si].minBitrate = 100 * (1 << si); + codec.simulcastStream[si].targetBitrate = 200 * (1 << si); + codec.simulcastStream[si].maxBitrate = 300 * (1 << si); + codec.simulcastStream[si].active = true; + codec.simulcastStream[si].numberOfTemporalLayers = num_temporal_layers; + } + } + SetUpAdjusterWithCodec(num_spatial_layers, num_temporal_layers, codec); + } + void InsertFrames(std::vector> media_utilization_factors, int64_t duration_ms) { InsertFrames(media_utilization_factors, media_utilization_factors, @@ -100,10 +118,10 @@ class EncoderBitrateAdjusterTest : public ::testing::Test { RTC_DCHECK_EQ(media_utilization_factors.size(), network_utilization_factors.size()); - const int64_t start_us = rtc::TimeMicros(); - while (rtc::TimeMicros() < - start_us + (duration_ms * rtc::kNumMicrosecsPerMillisec)) { - clock_.AdvanceTime(TimeDelta::Seconds(1) / target_framerate_fps_); + const int64_t start_us = TimeMicros(); + while (TimeMicros() < start_us + (duration_ms * kNumMicrosecsPerMillisec)) { + time_controller_.AdvanceTime(TimeDelta::Seconds(1) / + target_framerate_fps_); for (size_t si = 0; si < NumSpatialLayers(); ++si) { const std::vector& tl_pattern = kTlPatterns[NumTemporalLayers(si) - 1]; @@ -222,16 +240,19 @@ class EncoderBitrateAdjusterTest : public ::testing::Test { return multiplied_allocation; } + GlobalSimulatedTimeController time_controller_; + VideoCodec codec_; VideoEncoder::EncoderInfo encoder_info_; std::unique_ptr adjuster_; VideoBitrateAllocation current_input_allocation_; VideoBitrateAllocation current_adjusted_allocation_; - rtc::ScopedFakeClock clock_; + DataRate target_bitrate_; double target_framerate_fps_; int tl_pattern_idx_[kMaxSpatialLayers]; int sequence_idx_[kMaxSpatialLayers][kMaxTemporalStreams]; + test::ScopedKeyValueConfig scoped_field_trial_; const std::vector kTlPatterns[kMaxTemporalStreams] = { {0}, @@ -240,7 +261,7 @@ class EncoderBitrateAdjusterTest : public ::testing::Test { {0, 3, 2, 3, 1, 3, 2, 3}}; }; -TEST_F(EncoderBitrateAdjusterTest, SingleLayerOptimal) { +TEST_P(EncoderBitrateAdjusterTest, SingleLayerOptimal) { // Single layer, well behaved encoder. current_input_allocation_.SetBitrate(0, 0, 300000); target_framerate_fps_ = 30; @@ -254,7 +275,7 @@ TEST_F(EncoderBitrateAdjusterTest, SingleLayerOptimal) { ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.01); } -TEST_F(EncoderBitrateAdjusterTest, SingleLayerOveruse) { +TEST_P(EncoderBitrateAdjusterTest, SingleLayerOveruse) { // Single layer, well behaved encoder. current_input_allocation_.SetBitrate(0, 0, 300000); target_framerate_fps_ = 30; @@ -268,7 +289,7 @@ TEST_F(EncoderBitrateAdjusterTest, SingleLayerOveruse) { current_adjusted_allocation_, 0.01); } -TEST_F(EncoderBitrateAdjusterTest, SingleLayerUnderuse) { +TEST_P(EncoderBitrateAdjusterTest, SingleLayerUnderuse) { // Single layer, well behaved encoder. current_input_allocation_.SetBitrate(0, 0, 300000); target_framerate_fps_ = 30; @@ -281,7 +302,7 @@ TEST_F(EncoderBitrateAdjusterTest, SingleLayerUnderuse) { ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.00); } -TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersOptimalSize) { +TEST_P(EncoderBitrateAdjusterTest, ThreeTemporalLayersOptimalSize) { // Three temporal layers, 60%/20%/20% bps distro, well behaved encoder. current_input_allocation_.SetBitrate(0, 0, 180000); current_input_allocation_.SetBitrate(0, 1, 60000); @@ -295,7 +316,7 @@ TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersOptimalSize) { ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.01); } -TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersOvershoot) { +TEST_P(EncoderBitrateAdjusterTest, ThreeTemporalLayersOvershoot) { // Three temporal layers, 60%/20%/20% bps distro. // 10% overshoot on all layers. current_input_allocation_.SetBitrate(0, 0, 180000); @@ -312,7 +333,7 @@ TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersOvershoot) { current_adjusted_allocation_, 0.01); } -TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersUndershoot) { +TEST_P(EncoderBitrateAdjusterTest, ThreeTemporalLayersUndershoot) { // Three temporal layers, 60%/20%/20% bps distro, undershoot all layers. current_input_allocation_.SetBitrate(0, 0, 180000); current_input_allocation_.SetBitrate(0, 1, 60000); @@ -327,7 +348,7 @@ TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersUndershoot) { ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.0); } -TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersSkewedOvershoot) { +TEST_P(EncoderBitrateAdjusterTest, ThreeTemporalLayersSkewedOvershoot) { // Three temporal layers, 60%/20%/20% bps distro. // 10% overshoot on base layer, 20% on higher layers. current_input_allocation_.SetBitrate(0, 0, 180000); @@ -345,7 +366,7 @@ TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersSkewedOvershoot) { current_adjusted_allocation_, 0.01); } -TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersNonLayeredEncoder) { +TEST_P(EncoderBitrateAdjusterTest, ThreeTemporalLayersNonLayeredEncoder) { // Three temporal layers, 60%/20%/20% bps allocation, 10% overshoot, // encoder does not actually support temporal layers. current_input_allocation_.SetBitrate(0, 0, 180000); @@ -366,7 +387,7 @@ TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersNonLayeredEncoder) { ExpectNear(expected_allocation, current_adjusted_allocation_, 0.01); } -TEST_F(EncoderBitrateAdjusterTest, IgnoredStream) { +TEST_P(EncoderBitrateAdjusterTest, IgnoredStream) { // Encoder with three temporal layers, but in a mode that does not support // deterministic frame rate. Those are ignored, even if bitrate overshoots. current_input_allocation_.SetBitrate(0, 0, 180000); @@ -385,7 +406,7 @@ TEST_F(EncoderBitrateAdjusterTest, IgnoredStream) { ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.00); } -TEST_F(EncoderBitrateAdjusterTest, DifferentSpatialOvershoots) { +TEST_P(EncoderBitrateAdjusterTest, DifferentSpatialOvershoots) { // Two streams, both with three temporal layers. // S0 has 5% overshoot, S1 has 25% overshoot. current_input_allocation_.SetBitrate(0, 0, 180000); @@ -417,12 +438,14 @@ TEST_F(EncoderBitrateAdjusterTest, DifferentSpatialOvershoots) { } } -TEST_F(EncoderBitrateAdjusterTest, HeadroomAllowsOvershootToMediaRate) { +TEST_P(EncoderBitrateAdjusterTest, HeadroomAllowsOvershootToMediaRate) { + if (GetParam() == "WebRTC-VideoRateControl/adjuster_use_headroom:false/") { + // This test does not make sense without headroom adjustment. + GTEST_SKIP(); + } + // Two streams, both with three temporal layers. // Media rate is 1.0, but network rate is higher. - ScopedFieldTrials field_trial( - "WebRTC-VideoRateControl/adjuster_use_headroom:true/"); - const uint32_t kS0Bitrate = 300000; const uint32_t kS1Bitrate = 900000; current_input_allocation_.SetBitrate(0, 0, kS0Bitrate / 3); @@ -459,11 +482,14 @@ TEST_F(EncoderBitrateAdjusterTest, HeadroomAllowsOvershootToMediaRate) { } } -TEST_F(EncoderBitrateAdjusterTest, DontExceedMediaRateEvenWithHeadroom) { +TEST_P(EncoderBitrateAdjusterTest, DontExceedMediaRateEvenWithHeadroom) { + if (GetParam() == "WebRTC-VideoRateControl/adjuster_use_headroom:false/") { + // This test does not make sense without headroom adjustment. + GTEST_SKIP(); + } + // Two streams, both with three temporal layers. // Media rate is 1.1, but network rate is higher. - ScopedFieldTrials field_trial( - "WebRTC-VideoRateControl/adjuster_use_headroom:true/"); const uint32_t kS0Bitrate = 300000; const uint32_t kS1Bitrate = 900000; @@ -477,30 +503,78 @@ TEST_F(EncoderBitrateAdjusterTest, DontExceedMediaRateEvenWithHeadroom) { target_framerate_fps_ = 30; // Run twice, once configured as simulcast and once as VP9 SVC. - for (int i = 0; i < 2; ++i) { - SetUpAdjuster(2, 3, i == 0); - // Network rate has 30% overshoot, media rate has 10% overshoot. - InsertFrames({{1.1, 1.1, 1.1}, {1.1, 1.1, 1.1}}, - {{1.3, 1.3, 1.3}, {1.3, 1.3, 1.3}}, - kWindowSizeMs * kSequenceLength); + for (const bool is_svc : {false, true}) { + SetUpAdjuster(/*num_spatial_layers=*/2, + /*num_temporal_layers=*/3, is_svc); - // Push back by 30%. + // First insert frames with no overshoot. + InsertFrames({{1.0, 1.0, 1.0}}, kWindowSizeMs * kSequenceLength); + // Verify encoder is not pushed backed. current_adjusted_allocation_ = adjuster_->AdjustRateAllocation(VideoEncoder::RateControlParameters( current_input_allocation_, target_framerate_fps_)); // The up-down causes a bit more noise, allow slightly more error margin. - ExpectNear(MultiplyAllocation(current_input_allocation_, 1 / 1.3), + ExpectNear(MultiplyAllocation(current_input_allocation_, 1.0), current_adjusted_allocation_, 0.015); + // Change network rate to 30% overshoot, media rate has 10% overshoot. + InsertFrames({{1.1, 1.1, 1.1}, {1.1, 1.1, 1.1}}, + {{1.3, 1.3, 1.3}, {1.3, 1.3, 1.3}}, + kWindowSizeMs * kSequenceLength); + // Add 100% link headroom, overshoot from network to media rate is allowed. current_adjusted_allocation_ = adjuster_->AdjustRateAllocation(VideoEncoder::RateControlParameters( current_input_allocation_, target_framerate_fps_, DataRate::BitsPerSec(current_input_allocation_.get_sum_bps() * 2))); + ExpectNear(MultiplyAllocation(current_input_allocation_, 1 / 1.1), - current_adjusted_allocation_, 0.015); + current_adjusted_allocation_, 0.02); } } +TEST_P(EncoderBitrateAdjusterTest, HonorsMinBitrateWithAv1) { + // Single layer, well behaved encoder. + const DataRate kHighBitrate = DataRate::KilobitsPerSec(20); + const DataRate kALowerMinBitrate = DataRate::KilobitsPerSec(15); + + current_input_allocation_.SetBitrate(0, 0, kHighBitrate.bps()); + + VideoBitrateAllocation expected_input_allocation; + expected_input_allocation.SetBitrate(0, 0, kALowerMinBitrate.bps()); + + target_framerate_fps_ = 30; + + VideoCodec codec; + codec.codecType = VideoCodecType::kVideoCodecAV1; + codec.numberOfSimulcastStreams = 1; + codec.SetScalabilityMode(ScalabilityMode::kL1T1); + codec.spatialLayers[0].minBitrate = kALowerMinBitrate.kbps(); + codec.spatialLayers[0].targetBitrate = 500; + codec.spatialLayers[0].maxBitrate = 1000; + codec.spatialLayers[0].active = true; + codec.spatialLayers[0].numberOfTemporalLayers = 1; + + SetUpAdjusterWithCodec(/*num_spatial_layers=*/1, /*num_temporal_layers=*/1, + codec); + + InsertFrames({{2.0}}, kWindowSizeMs); + + current_adjusted_allocation_ = + adjuster_->AdjustRateAllocation(VideoEncoder::RateControlParameters( + current_input_allocation_, target_framerate_fps_)); + // Adjusted allocation near input. Allow 1% error margin due to rounding + // errors etc. + ExpectNear(expected_input_allocation, current_adjusted_allocation_, 0.01); +} + +INSTANTIATE_TEST_SUITE_P( + AdjustWithHeadroomVariations, + EncoderBitrateAdjusterTest, + Values("WebRTC-VideoRateControl/adjuster_use_headroom:false/", + "WebRTC-VideoRateControl/adjuster_use_headroom:true/", + "WebRTC-VideoRateControl/adjuster_use_headroom:true/" + "WebRTC-BitrateAdjusterUseNewfangledHeadroomAdjustment/Enabled/")); + } // namespace test } // namespace webrtc diff --git a/video/encoder_overshoot_detector.cc b/video/encoder_overshoot_detector.cc index 2c4efdb5a6..b9697f57de 100644 --- a/video/encoder_overshoot_detector.cc +++ b/video/encoder_overshoot_detector.cc @@ -136,13 +136,13 @@ double EncoderOvershootDetector::HandleEncodedFrame( return utilization_factor; } -absl::optional -EncoderOvershootDetector::GetNetworkRateUtilizationFactor(int64_t time_ms) { +std::optional EncoderOvershootDetector::GetNetworkRateUtilizationFactor( + int64_t time_ms) { CullOldUpdates(time_ms); // No data points within window, return. if (utilization_factors_.empty()) { - return absl::nullopt; + return std::nullopt; } // TODO(sprang): Consider changing from arithmetic mean to some other @@ -150,13 +150,13 @@ EncoderOvershootDetector::GetNetworkRateUtilizationFactor(int64_t time_ms) { return sum_network_utilization_factors_ / utilization_factors_.size(); } -absl::optional EncoderOvershootDetector::GetMediaRateUtilizationFactor( +std::optional EncoderOvershootDetector::GetMediaRateUtilizationFactor( int64_t time_ms) { CullOldUpdates(time_ms); // No data points within window, return. if (utilization_factors_.empty()) { - return absl::nullopt; + return std::nullopt; } return sum_media_utilization_factors_ / utilization_factors_.size(); @@ -266,8 +266,13 @@ void EncoderOvershootDetector::UpdateHistograms() { RTC_HISTOGRAMS_COUNTS_10000(index, overshoot_histogram_prefix + "H264", average_overshoot_percent); break; + case VideoCodecType::kVideoCodecH265: + RTC_HISTOGRAMS_COUNTS_10000(index, rmse_histogram_prefix + "H265", + bitrate_rmse); + RTC_HISTOGRAMS_COUNTS_10000(index, overshoot_histogram_prefix + "H265", + average_overshoot_percent); + break; case VideoCodecType::kVideoCodecGeneric: - case VideoCodecType::kVideoCodecMultiplex: break; } } diff --git a/video/encoder_overshoot_detector.h b/video/encoder_overshoot_detector.h index 12c4bba5db..e78b601d1d 100644 --- a/video/encoder_overshoot_detector.h +++ b/video/encoder_overshoot_detector.h @@ -12,8 +12,8 @@ #define VIDEO_ENCODER_OVERSHOOT_DETECTOR_H_ #include +#include -#include "absl/types/optional.h" #include "api/units/data_rate.h" #include "api/video_codecs/video_codec.h" @@ -34,11 +34,11 @@ class EncoderOvershootDetector { // This utilization factor reaches 1.0 only if the encoder produces encoded // frame in such a way that they can be sent onto the network at // `target_bitrate` without building growing queues. - absl::optional GetNetworkRateUtilizationFactor(int64_t time_ms); + std::optional GetNetworkRateUtilizationFactor(int64_t time_ms); // This utilization factor is based just on actual encoded frame sizes in // relation to ideal sizes. An undershoot may be compensated by an // overshoot so that the average over time is close to `target_bitrate`. - absl::optional GetMediaRateUtilizationFactor(int64_t time_ms); + std::optional GetMediaRateUtilizationFactor(int64_t time_ms); void Reset(); private: diff --git a/video/encoder_overshoot_detector_unittest.cc b/video/encoder_overshoot_detector_unittest.cc index bdc2676281..0712194a25 100644 --- a/video/encoder_overshoot_detector_unittest.cc +++ b/video/encoder_overshoot_detector_unittest.cc @@ -35,10 +35,10 @@ static std::string CodecTypeToHistogramSuffix(VideoCodecType codec) { return "Av1"; case kVideoCodecH264: return "H264"; + case kVideoCodecH265: + return "H265"; case kVideoCodecGeneric: return "Generic"; - case kVideoCodecMultiplex: - return "Multiplex"; } } @@ -70,37 +70,37 @@ class EncoderOvershootDetectorTest : public TestWithParam { static_cast(actual_utilization_factor * (target_bitrate_.bps() / target_framerate_fps_) / 8); detector_.SetTargetRate(target_bitrate_, target_framerate_fps_, - rtc::TimeMillis()); + TimeMillis()); - if (rtc::TimeMillis() == 0) { + if (TimeMillis() == 0) { // Encode a first frame which by definition has no overuse factor. - detector_.OnEncodedFrame(frame_size_bytes, rtc::TimeMillis()); + detector_.OnEncodedFrame(frame_size_bytes, TimeMillis()); clock_.AdvanceTime(TimeDelta::Seconds(1) / target_framerate_fps_); } int64_t runtime_us = 0; while (runtime_us < test_duration_ms * 1000) { - detector_.OnEncodedFrame(frame_size_bytes, rtc::TimeMillis()); - runtime_us += rtc::kNumMicrosecsPerSec / target_framerate_fps_; + detector_.OnEncodedFrame(frame_size_bytes, TimeMillis()); + runtime_us += kNumMicrosecsPerSec / target_framerate_fps_; clock_.AdvanceTime(TimeDelta::Seconds(1) / target_framerate_fps_); } // At constant utilization, both network and media utilization should be // close to expected. - const absl::optional network_utilization_factor = - detector_.GetNetworkRateUtilizationFactor(rtc::TimeMillis()); + const std::optional network_utilization_factor = + detector_.GetNetworkRateUtilizationFactor(TimeMillis()); EXPECT_NEAR(network_utilization_factor.value_or(-1), expected_utilization_factor, allowed_error); - const absl::optional media_utilization_factor = - detector_.GetMediaRateUtilizationFactor(rtc::TimeMillis()); + const std::optional media_utilization_factor = + detector_.GetMediaRateUtilizationFactor(TimeMillis()); EXPECT_NEAR(media_utilization_factor.value_or(-1), expected_utilization_factor, allowed_error); } static constexpr int64_t kWindowSizeMs = 3000; EncoderOvershootDetector detector_; - rtc::ScopedFakeClock clock_; + ScopedFakeClock clock_; DataRate target_bitrate_; double target_framerate_fps_; }; @@ -108,17 +108,16 @@ class EncoderOvershootDetectorTest : public TestWithParam { TEST_P(EncoderOvershootDetectorTest, NoUtilizationIfNoRate) { const int frame_size_bytes = 1000; const int64_t time_interval_ms = 33; - detector_.SetTargetRate(target_bitrate_, target_framerate_fps_, - rtc::TimeMillis()); + detector_.SetTargetRate(target_bitrate_, target_framerate_fps_, TimeMillis()); // No data points, can't determine overshoot rate. EXPECT_FALSE( - detector_.GetNetworkRateUtilizationFactor(rtc::TimeMillis()).has_value()); + detector_.GetNetworkRateUtilizationFactor(TimeMillis()).has_value()); - detector_.OnEncodedFrame(frame_size_bytes, rtc::TimeMillis()); + detector_.OnEncodedFrame(frame_size_bytes, TimeMillis()); clock_.AdvanceTime(TimeDelta::Millis(time_interval_ms)); EXPECT_TRUE( - detector_.GetNetworkRateUtilizationFactor(rtc::TimeMillis()).has_value()); + detector_.GetNetworkRateUtilizationFactor(TimeMillis()).has_value()); } TEST_P(EncoderOvershootDetectorTest, OptimalSize) { @@ -165,8 +164,7 @@ TEST_P(EncoderOvershootDetectorTest, ConstantRateVaryingOvershoot) { TEST_P(EncoderOvershootDetectorTest, PartialOvershoot) { const int ideal_frame_size_bytes = (target_bitrate_.bps() / target_framerate_fps_) / 8; - detector_.SetTargetRate(target_bitrate_, target_framerate_fps_, - rtc::TimeMillis()); + detector_.SetTargetRate(target_bitrate_, target_framerate_fps_, TimeMillis()); // Test scenario with average bitrate matching the target bitrate, but // with some utilization factor penalty as the frames can't be paced out @@ -180,31 +178,30 @@ TEST_P(EncoderOvershootDetectorTest, PartialOvershoot) { int64_t runtime_us = 0; int i = 0; - while (runtime_us < kWindowSizeMs * rtc::kNumMicrosecsPerMillisec) { - runtime_us += rtc::kNumMicrosecsPerSec / target_framerate_fps_; + while (runtime_us < kWindowSizeMs * kNumMicrosecsPerMillisec) { + runtime_us += kNumMicrosecsPerSec / target_framerate_fps_; clock_.AdvanceTime(TimeDelta::Seconds(1) / target_framerate_fps_); int frame_size_bytes = (i++ % 4 < 2) ? (ideal_frame_size_bytes * 120) / 100 : (ideal_frame_size_bytes * 80) / 100; - detector_.OnEncodedFrame(frame_size_bytes, rtc::TimeMillis()); + detector_.OnEncodedFrame(frame_size_bytes, TimeMillis()); } // Expect 5% overshoot for network rate, see above. - const absl::optional network_utilization_factor = - detector_.GetNetworkRateUtilizationFactor(rtc::TimeMillis()); + const std::optional network_utilization_factor = + detector_.GetNetworkRateUtilizationFactor(TimeMillis()); EXPECT_NEAR(network_utilization_factor.value_or(-1), 1.05, 0.01); // Expect media rate to be on average correct. - const absl::optional media_utilization_factor = - detector_.GetMediaRateUtilizationFactor(rtc::TimeMillis()); + const std::optional media_utilization_factor = + detector_.GetMediaRateUtilizationFactor(TimeMillis()); EXPECT_NEAR(media_utilization_factor.value_or(-1), 1.00, 0.01); } TEST_P(EncoderOvershootDetectorTest, RecordsZeroErrorMetricWithNoOvershoot) { DataSize ideal_frame_size = target_bitrate_ / Frequency::Hertz(target_framerate_fps_); - detector_.SetTargetRate(target_bitrate_, target_framerate_fps_, - rtc::TimeMillis()); - detector_.OnEncodedFrame(ideal_frame_size.bytes(), rtc::TimeMillis()); + detector_.SetTargetRate(target_bitrate_, target_framerate_fps_, TimeMillis()); + detector_.OnEncodedFrame(ideal_frame_size.bytes(), TimeMillis()); detector_.Reset(); const VideoCodecType codec = GetParam().codec_type; @@ -235,9 +232,8 @@ TEST_P(EncoderOvershootDetectorTest, target_bitrate_ / Frequency::Hertz(target_framerate_fps_); // Use target frame size with 50% overshoot. DataSize target_frame_size = ideal_frame_size * 3 / 2; - detector_.SetTargetRate(target_bitrate_, target_framerate_fps_, - rtc::TimeMillis()); - detector_.OnEncodedFrame(target_frame_size.bytes(), rtc::TimeMillis()); + detector_.SetTargetRate(target_bitrate_, target_framerate_fps_, TimeMillis()); + detector_.OnEncodedFrame(target_frame_size.bytes(), TimeMillis()); detector_.Reset(); const VideoCodecType codec = GetParam().codec_type; @@ -275,6 +271,8 @@ INSTANTIATE_TEST_SUITE_P( {VideoCodecType::kVideoCodecAV1, false}, {VideoCodecType::kVideoCodecAV1, true}, {VideoCodecType::kVideoCodecH264, false}, - {VideoCodecType::kVideoCodecH264, true}})); + {VideoCodecType::kVideoCodecH264, true}, + {VideoCodecType::kVideoCodecH265, false}, + {VideoCodecType::kVideoCodecH265, true}})); } // namespace webrtc diff --git a/video/encoder_rtcp_feedback.cc b/video/encoder_rtcp_feedback.cc index ebba41e807..42827e2df9 100644 --- a/video/encoder_rtcp_feedback.cc +++ b/video/encoder_rtcp_feedback.cc @@ -11,12 +11,14 @@ #include "video/encoder_rtcp_feedback.h" #include +#include #include -#include "absl/types/optional.h" +#include "api/environment/environment.h" #include "api/video_codecs/video_encoder.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/keyframe_interval_settings.h" +#include "system_wrappers/include/clock.h" namespace webrtc { @@ -25,19 +27,22 @@ constexpr int kMinKeyframeSendIntervalMs = 300; } // namespace EncoderRtcpFeedback::EncoderRtcpFeedback( - Clock* clock, + const Environment& env, + bool per_layer_keyframes, const std::vector& ssrcs, VideoStreamEncoderInterface* encoder, std::function( uint32_t ssrc, const std::vector& seq_nums)> get_packet_infos) - : clock_(clock), + : env_(env), ssrcs_(ssrcs), + per_layer_keyframes_(per_layer_keyframes), get_packet_infos_(std::move(get_packet_infos)), video_stream_encoder_(encoder), - time_last_packet_delivery_queue_(Timestamp::Zero()), + time_last_packet_delivery_queue_(per_layer_keyframes ? ssrcs.size() : 1, + Timestamp::Zero()), min_keyframe_send_interval_( - TimeDelta::Millis(KeyframeIntervalSettings::ParseFromFieldTrials() + TimeDelta::Millis(KeyframeIntervalSettings(env_.field_trials()) .MinKeyframeSendIntervalMs() .value_or(kMinKeyframeSendIntervalMs))) { RTC_DCHECK(!ssrcs.empty()); @@ -49,14 +54,32 @@ void EncoderRtcpFeedback::OnReceivedIntraFrameRequest(uint32_t ssrc) { RTC_DCHECK_RUN_ON(&packet_delivery_queue_); RTC_DCHECK(std::find(ssrcs_.begin(), ssrcs_.end(), ssrc) != ssrcs_.end()); - const Timestamp now = clock_->CurrentTime(); - if (time_last_packet_delivery_queue_ + min_keyframe_send_interval_ > now) + auto it = std::find(ssrcs_.begin(), ssrcs_.end(), ssrc); + if (it == ssrcs_.end()) { + RTC_LOG(LS_WARNING) << "SSRC " << ssrc << " not found."; + return; + } + size_t ssrc_index = + per_layer_keyframes_ ? std::distance(ssrcs_.begin(), it) : 0; + RTC_CHECK_LE(ssrc_index, time_last_packet_delivery_queue_.size()); + const Timestamp now = env_.clock().CurrentTime(); + if (time_last_packet_delivery_queue_[ssrc_index] + + min_keyframe_send_interval_ > + now) return; - time_last_packet_delivery_queue_ = now; - - // Always produce key frame for all streams. - video_stream_encoder_->SendKeyFrame(); + time_last_packet_delivery_queue_[ssrc_index] = now; + + std::vector layers(ssrcs_.size(), + VideoFrameType::kVideoFrameDelta); + if (!per_layer_keyframes_) { + // Always produce key frame for all streams. + video_stream_encoder_->SendKeyFrame(); + } else { + // Determine on which layer we ask for key frames. + layers[ssrc_index] = VideoFrameType::kVideoFrameKey; + video_stream_encoder_->SendKeyFrame(layers); + } } void EncoderRtcpFeedback::OnReceivedLossNotification( @@ -101,7 +124,7 @@ void EncoderRtcpFeedback::OnReceivedLossNotification( loss_notification.dependencies_of_last_received_decodable = decodability_flag; loss_notification.last_received_decodable = - !decodability_flag ? absl::make_optional(false) : absl::nullopt; + !decodability_flag ? std::make_optional(false) : std::nullopt; } else if (!last_received.is_first && last_received.is_last) { if (decodability_flag) { // The frame has been received in full, and found to be decodable. @@ -113,7 +136,7 @@ void EncoderRtcpFeedback::OnReceivedLossNotification( // It is impossible to tell whether some dependencies were undecodable, // or whether the frame was unassemblable, but in either case, the frame // itself was undecodable. - loss_notification.dependencies_of_last_received_decodable = absl::nullopt; + loss_notification.dependencies_of_last_received_decodable = std::nullopt; loss_notification.last_received_decodable = false; } } else { // !last_received.is_first && !last_received.is_last @@ -123,12 +146,12 @@ void EncoderRtcpFeedback::OnReceivedLossNotification( // (Messages of this type are not sent by WebRTC at the moment, but are // theoretically possible, for example for serving as acks.) loss_notification.dependencies_of_last_received_decodable = true; - loss_notification.last_received_decodable = absl::nullopt; + loss_notification.last_received_decodable = std::nullopt; } else { // It is impossible to tell whether some dependencies were undecodable, // or whether the frame was unassemblable, but in either case, the frame // itself was undecodable. - loss_notification.dependencies_of_last_received_decodable = absl::nullopt; + loss_notification.dependencies_of_last_received_decodable = std::nullopt; loss_notification.last_received_decodable = false; } } diff --git a/video/encoder_rtcp_feedback.h b/video/encoder_rtcp_feedback.h index c66a94503e..29832dbd6d 100644 --- a/video/encoder_rtcp_feedback.h +++ b/video/encoder_rtcp_feedback.h @@ -13,13 +13,13 @@ #include #include +#include "api/environment/environment.h" #include "api/sequence_checker.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "call/rtp_video_sender_interface.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/system/no_unique_address.h" -#include "system_wrappers/include/clock.h" #include "video/video_stream_encoder_interface.h" namespace webrtc { @@ -32,7 +32,8 @@ class EncoderRtcpFeedback : public RtcpIntraFrameObserver, public RtcpLossNotificationObserver { public: EncoderRtcpFeedback( - Clock* clock, + const Environment& env, + bool per_layer_keyframes, const std::vector& ssrcs, VideoStreamEncoderInterface* encoder, std::function( @@ -49,8 +50,9 @@ class EncoderRtcpFeedback : public RtcpIntraFrameObserver, bool decodability_flag) override; private: - Clock* const clock_; + const Environment env_; const std::vector ssrcs_; + const bool per_layer_keyframes_; const std::function( uint32_t ssrc, const std::vector& seq_nums)> @@ -58,7 +60,7 @@ class EncoderRtcpFeedback : public RtcpIntraFrameObserver, VideoStreamEncoderInterface* const video_stream_encoder_; RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_delivery_queue_; - Timestamp time_last_packet_delivery_queue_ + std::vector time_last_packet_delivery_queue_ RTC_GUARDED_BY(packet_delivery_queue_); const TimeDelta min_keyframe_send_interval_; diff --git a/video/encoder_rtcp_feedback_unittest.cc b/video/encoder_rtcp_feedback_unittest.cc index f1ac65d48f..62140e81a8 100644 --- a/video/encoder_rtcp_feedback_unittest.cc +++ b/video/encoder_rtcp_feedback_unittest.cc @@ -12,39 +12,52 @@ #include +#include "api/environment/environment_factory.h" #include "test/gmock.h" #include "test/gtest.h" #include "video/test/mock_video_stream_encoder.h" using ::testing::_; +using ::testing::ElementsAre; namespace webrtc { -class VieKeyRequestTest : public ::testing::Test { +class VideoEncoderFeedbackKeyframeTestBase : public ::testing::Test { public: - VieKeyRequestTest() + VideoEncoderFeedbackKeyframeTestBase(bool per_layer_pli_handling, + std::vector ssrcs) : simulated_clock_(123456789), encoder_(), - encoder_rtcp_feedback_( - &simulated_clock_, - std::vector(1, VieKeyRequestTest::kSsrc), - &encoder_, - nullptr) {} + encoder_rtcp_feedback_(CreateEnvironment(&simulated_clock_), + per_layer_pli_handling, + ssrcs, + &encoder_, + nullptr) {} protected: - const uint32_t kSsrc = 1234; + static const uint32_t kSsrc = 1234; + static const uint32_t kOtherSsrc = 4321; SimulatedClock simulated_clock_; ::testing::StrictMock encoder_; EncoderRtcpFeedback encoder_rtcp_feedback_; }; -TEST_F(VieKeyRequestTest, CreateAndTriggerRequests) { +class VideoEncoderFeedbackKeyframeTest + : public VideoEncoderFeedbackKeyframeTestBase { + public: + VideoEncoderFeedbackKeyframeTest() + : VideoEncoderFeedbackKeyframeTestBase( + /*per_layer_pli_handling=*/false, + {VideoEncoderFeedbackKeyframeTestBase::kSsrc}) {} +}; + +TEST_F(VideoEncoderFeedbackKeyframeTest, CreateAndTriggerRequests) { EXPECT_CALL(encoder_, SendKeyFrame(_)).Times(1); encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); } -TEST_F(VieKeyRequestTest, TooManyOnReceivedIntraFrameRequest) { +TEST_F(VideoEncoderFeedbackKeyframeTest, TooManyOnReceivedIntraFrameRequest) { EXPECT_CALL(encoder_, SendKeyFrame(_)).Times(1); encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); @@ -58,4 +71,61 @@ TEST_F(VieKeyRequestTest, TooManyOnReceivedIntraFrameRequest) { encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); } +class VideoEncoderFeedbackKeyframePerLayerPliTest + : public VideoEncoderFeedbackKeyframeTestBase { + public: + VideoEncoderFeedbackKeyframePerLayerPliTest() + : VideoEncoderFeedbackKeyframeTestBase( + /*per_layer_pli_handling=*/true, + {VideoEncoderFeedbackKeyframeTestBase::kSsrc, + VideoEncoderFeedbackKeyframeTestBase::kOtherSsrc}) {} +}; + +TEST_F(VideoEncoderFeedbackKeyframePerLayerPliTest, CreateAndTriggerRequests) { + EXPECT_CALL(encoder_, + SendKeyFrame(ElementsAre(VideoFrameType::kVideoFrameKey, + VideoFrameType::kVideoFrameDelta))) + .Times(1); + EXPECT_CALL(encoder_, + SendKeyFrame(ElementsAre(VideoFrameType::kVideoFrameDelta, + VideoFrameType::kVideoFrameKey))) + .Times(1); + encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); + encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kOtherSsrc); +} + +TEST_F(VideoEncoderFeedbackKeyframePerLayerPliTest, + TooManyOnReceivedIntraFrameRequest) { + EXPECT_CALL(encoder_, + SendKeyFrame(ElementsAre(VideoFrameType::kVideoFrameKey, + VideoFrameType::kVideoFrameDelta))) + .Times(1); + EXPECT_CALL(encoder_, + SendKeyFrame(ElementsAre(VideoFrameType::kVideoFrameDelta, + VideoFrameType::kVideoFrameKey))) + .Times(1); + encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); + encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); + encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kOtherSsrc); + simulated_clock_.AdvanceTimeMilliseconds(10); + encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); + encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kOtherSsrc); + + EXPECT_CALL(encoder_, + SendKeyFrame(ElementsAre(VideoFrameType::kVideoFrameKey, + VideoFrameType::kVideoFrameDelta))) + .Times(1); + EXPECT_CALL(encoder_, + SendKeyFrame(ElementsAre(VideoFrameType::kVideoFrameDelta, + VideoFrameType::kVideoFrameKey))) + .Times(1); + simulated_clock_.AdvanceTimeMilliseconds(300); + encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); + encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); + encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kSsrc); + encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kOtherSsrc); + encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kOtherSsrc); + encoder_rtcp_feedback_.OnReceivedIntraFrameRequest(kOtherSsrc); +} + } // namespace webrtc diff --git a/video/end_to_end_tests/bandwidth_tests.cc b/video/end_to_end_tests/bandwidth_tests.cc index 272e32b42e..82643ef59d 100644 --- a/video/end_to_end_tests/bandwidth_tests.cc +++ b/video/end_to_end_tests/bandwidth_tests.cc @@ -10,13 +10,14 @@ #include +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/task_queue/task_queue_base.h" #include "api/test/simulated_network.h" #include "api/units/time_delta.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "api/video/video_bitrate_allocation.h" #include "call/fake_network_pipe.h" -#include "call/simulated_network.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "rtc_base/rate_limiter.h" #include "rtc_base/synchronization/mutex.h" @@ -26,6 +27,7 @@ #include "test/fake_encoder.h" #include "test/field_trial.h" #include "test/gtest.h" +#include "test/network/simulated_network.h" #include "test/rtcp_packet_parser.h" #include "test/rtp_rtcp_observer.h" #include "test/video_encoder_proxy_factory.h" @@ -58,7 +60,7 @@ TEST_F(BandwidthEndToEndTest, ReceiveStreamSendsRemb) { RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId)); } - Action OnReceiveRtcp(rtc::ArrayView packet) override { + Action OnReceiveRtcp(ArrayView packet) override { test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet)); @@ -127,7 +129,7 @@ class BandwidthStatsTest : public test::EndToEndTest { } // Called on the pacer thread. - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { // Stats need to be fetched on the thread where the caller objects were // constructed. task_queue_->PostTask([this]() { @@ -196,11 +198,11 @@ TEST_F(BandwidthEndToEndTest, RembWithSendSideBwe) { explicit BweObserver(TaskQueueBase* task_queue) : EndToEndTest(test::VideoTestConstants::kDefaultTimeout), sender_call_(nullptr), - clock_(Clock::GetRealTimeClock()), + env_(CreateEnvironment()), sender_ssrc_(0), remb_bitrate_bps_(1000000), state_(kWaitForFirstRampUp), - retransmission_rate_limiter_(clock_, 1000), + retransmission_rate_limiter_(&env_.clock(), 1000), task_queue_(task_queue) {} void OnStreamsStopped() override { rtp_rtcp_ = nullptr; } @@ -238,12 +240,11 @@ TEST_F(BandwidthEndToEndTest, RembWithSendSideBwe) { SimulatedNetworkInterface* /*receiver_network*/) override { RtpRtcpInterface::Configuration config; config.receiver_only = true; - config.clock = clock_; config.outgoing_transport = to_sender; config.retransmission_rate_limiter = &retransmission_rate_limiter_; config.local_media_ssrc = remb_sender_local_ssrc_; - rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(config); + rtp_rtcp_ = std::make_unique(env_, config); rtp_rtcp_->SetRemoteSSRC(remb_sender_remote_ssrc_); rtp_rtcp_->SetRTCPStatus(RtcpMode::kReducedSize); } @@ -294,7 +295,7 @@ TEST_F(BandwidthEndToEndTest, RembWithSendSideBwe) { enum TestState { kWaitForFirstRampUp, kWaitForRemb, kWaitForSecondRampUp }; Call* sender_call_; - Clock* const clock_; + const Environment env_; uint32_t sender_ssrc_; uint32_t remb_sender_local_ssrc_ = 0; uint32_t remb_sender_remote_ssrc_ = 0; @@ -317,9 +318,10 @@ TEST_F(BandwidthEndToEndTest, ReportsSetEncoderRates) { class EncoderRateStatsTest : public test::EndToEndTest, public test::FakeEncoder { public: - explicit EncoderRateStatsTest(TaskQueueBase* task_queue) + explicit EncoderRateStatsTest(const Environment& env, + TaskQueueBase* task_queue) : EndToEndTest(test::VideoTestConstants::kDefaultTimeout), - FakeEncoder(Clock::GetRealTimeClock()), + FakeEncoder(env), task_queue_(task_queue), send_stream_(nullptr), encoder_factory_(this), @@ -398,7 +400,7 @@ TEST_F(BandwidthEndToEndTest, ReportsSetEncoderRates) { test::VideoEncoderProxyFactory encoder_factory_; std::unique_ptr bitrate_allocator_factory_; uint32_t bitrate_kbps_ RTC_GUARDED_BY(mutex_); - } test(task_queue()); + } test(env(), task_queue()); RunBaseTest(&test); } diff --git a/video/end_to_end_tests/call_operation_tests.cc b/video/end_to_end_tests/call_operation_tests.cc index 4a8490b325..5b6662bb82 100644 --- a/video/end_to_end_tests/call_operation_tests.cc +++ b/video/end_to_end_tests/call_operation_tests.cc @@ -14,13 +14,13 @@ #include "api/test/frame_generator_interface.h" #include "api/test/simulated_network.h" #include "call/fake_network_pipe.h" -#include "call/simulated_network.h" #include "rtc_base/task_queue_for_test.h" #include "system_wrappers/include/sleep.h" #include "test/call_test.h" #include "test/field_trial.h" #include "test/frame_forwarder.h" #include "test/gtest.h" +#include "test/network/simulated_network.h" #include "test/null_transport.h" #include "test/video_test_constants.h" @@ -82,7 +82,7 @@ TEST_F(CallOperationEndToEndTest, RendersSingleDelayedFrame) { // frames in the queue. static const int kRenderDelayMs = 1000; - class Renderer : public rtc::VideoSinkInterface { + class Renderer : public VideoSinkInterface { public: void OnFrame(const VideoFrame& video_frame) override { SleepMs(kRenderDelayMs); @@ -93,7 +93,7 @@ TEST_F(CallOperationEndToEndTest, RendersSingleDelayedFrame) { return event_.Wait(test::VideoTestConstants::kDefaultTimeout); } - rtc::Event event_; + Event event_; } renderer; test::FrameForwarder frame_forwarder; @@ -116,8 +116,8 @@ TEST_F(CallOperationEndToEndTest, RendersSingleDelayedFrame) { // Create frames that are smaller than the send width/height, this is // done to check that the callbacks are done after processing video. std::unique_ptr frame_generator( - test::CreateSquareFrameGenerator(kWidth, kHeight, absl::nullopt, - absl::nullopt)); + test::CreateSquareFrameGenerator(kWidth, kHeight, std::nullopt, + std::nullopt)); GetVideoSendStream()->SetSource(&frame_forwarder, DegradationPreference::MAINTAIN_FRAMERATE); @@ -141,7 +141,7 @@ TEST_F(CallOperationEndToEndTest, RendersSingleDelayedFrame) { } TEST_F(CallOperationEndToEndTest, TransmitsFirstFrame) { - class Renderer : public rtc::VideoSinkInterface { + class Renderer : public VideoSinkInterface { public: void OnFrame(const VideoFrame& video_frame) override { event_.Set(); } @@ -149,7 +149,7 @@ TEST_F(CallOperationEndToEndTest, TransmitsFirstFrame) { return event_.Wait(test::VideoTestConstants::kDefaultTimeout); } - rtc::Event event_; + Event event_; } renderer; std::unique_ptr frame_generator; @@ -175,7 +175,7 @@ TEST_F(CallOperationEndToEndTest, TransmitsFirstFrame) { frame_generator = test::CreateSquareFrameGenerator( test::VideoTestConstants::kDefaultWidth, - test::VideoTestConstants::kDefaultHeight, absl::nullopt, absl::nullopt); + test::VideoTestConstants::kDefaultHeight, std::nullopt, std::nullopt); GetVideoSendStream()->SetSource(&frame_forwarder, DegradationPreference::MAINTAIN_FRAMERATE); test::FrameGeneratorInterface::VideoFrameData frame_data = diff --git a/video/end_to_end_tests/codec_tests.cc b/video/end_to_end_tests/codec_tests.cc index 60a0bc8a15..446b2c784e 100644 --- a/video/end_to_end_tests/codec_tests.cc +++ b/video/end_to_end_tests/codec_tests.cc @@ -9,8 +9,9 @@ */ #include +#include -#include "absl/types/optional.h" +#include "api/environment/environment.h" #include "api/test/video/function_video_encoder_factory.h" #include "api/video/color_space.h" #include "api/video/video_rotation.h" @@ -20,8 +21,6 @@ #include "media/engine/internal_decoder_factory.h" #include "media/engine/internal_encoder_factory.h" #include "modules/video_coding/codecs/h264/include/h264.h" -#include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h" -#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "test/call_test.h" @@ -49,11 +48,11 @@ class CodecEndToEndTest : public test::CallTest { }; class CodecObserver : public test::EndToEndTest, - public rtc::VideoSinkInterface { + public VideoSinkInterface { public: CodecObserver(int no_frames_to_wait_for, VideoRotation rotation_to_test, - absl::optional color_space_to_test, + std::optional color_space_to_test, const std::string& payload_name, VideoEncoderFactory* encoder_factory, VideoDecoderFactory* decoder_factory) @@ -99,8 +98,8 @@ class CodecObserver : public test::EndToEndTest, if (expected_color_space_) { EXPECT_EQ(expected_color_space_, video_frame.color_space() - ? absl::make_optional(*video_frame.color_space()) - : absl::nullopt); + ? std::make_optional(*video_frame.color_space()) + : std::nullopt); } if (++frame_counter_ == no_frames_to_wait_for_) observation_complete_.Set(); @@ -115,7 +114,7 @@ class CodecObserver : public test::EndToEndTest, private: int no_frames_to_wait_for_; VideoRotation expected_rotation_; - absl::optional expected_color_space_; + std::optional expected_color_space_; std::string payload_name_; VideoEncoderFactory* encoder_factory_; VideoDecoderFactory* decoder_factory_; @@ -124,20 +123,28 @@ class CodecObserver : public test::EndToEndTest, TEST_F(CodecEndToEndTest, SendsAndReceivesVP8) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); test::FunctionVideoDecoderFactory decoder_factory( - []() { return VP8Decoder::Create(); }); - CodecObserver test(5, kVideoRotation_0, absl::nullopt, "VP8", - &encoder_factory, &decoder_factory); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Decoder(env); + }); + CodecObserver test(5, kVideoRotation_0, std::nullopt, "VP8", &encoder_factory, + &decoder_factory); RunBaseTest(&test); } TEST_F(CodecEndToEndTest, SendsAndReceivesVP8Rotation90) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); test::FunctionVideoDecoderFactory decoder_factory( - []() { return VP8Decoder::Create(); }); - CodecObserver test(5, kVideoRotation_90, absl::nullopt, "VP8", + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Decoder(env); + }); + CodecObserver test(5, kVideoRotation_90, std::nullopt, "VP8", &encoder_factory, &decoder_factory); RunBaseTest(&test); } @@ -145,27 +152,33 @@ TEST_F(CodecEndToEndTest, SendsAndReceivesVP8Rotation90) { #if defined(RTC_ENABLE_VP9) TEST_F(CodecEndToEndTest, SendsAndReceivesVP9) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP9Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp9Encoder(env); + }); test::FunctionVideoDecoderFactory decoder_factory( []() { return VP9Decoder::Create(); }); - CodecObserver test(500, kVideoRotation_0, absl::nullopt, "VP9", + CodecObserver test(500, kVideoRotation_0, std::nullopt, "VP9", &encoder_factory, &decoder_factory); RunBaseTest(&test); } TEST_F(CodecEndToEndTest, SendsAndReceivesVP9VideoRotation90) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP9Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp9Encoder(env); + }); test::FunctionVideoDecoderFactory decoder_factory( []() { return VP9Decoder::Create(); }); - CodecObserver test(5, kVideoRotation_90, absl::nullopt, "VP9", + CodecObserver test(5, kVideoRotation_90, std::nullopt, "VP9", &encoder_factory, &decoder_factory); RunBaseTest(&test); } TEST_F(CodecEndToEndTest, SendsAndReceivesVP9ExplicitColorSpace) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP9Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp9Encoder(env); + }); test::FunctionVideoDecoderFactory decoder_factory( []() { return VP9Decoder::Create(); }); CodecObserver test(5, kVideoRotation_90, @@ -177,7 +190,9 @@ TEST_F(CodecEndToEndTest, SendsAndReceivesVP9ExplicitColorSpace) { TEST_F(CodecEndToEndTest, SendsAndReceivesVP9ExplicitColorSpaceWithHdrMetadata) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP9Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp9Encoder(env); + }); test::FunctionVideoDecoderFactory decoder_factory( []() { return VP9Decoder::Create(); }); CodecObserver test(5, kVideoRotation_90, @@ -186,44 +201,6 @@ TEST_F(CodecEndToEndTest, RunBaseTest(&test); } -// Mutiplex tests are using VP9 as the underlying implementation. -TEST_F(CodecEndToEndTest, SendsAndReceivesMultiplex) { - InternalEncoderFactory internal_encoder_factory; - InternalDecoderFactory internal_decoder_factory; - test::FunctionVideoEncoderFactory encoder_factory( - [&internal_encoder_factory]() { - return std::make_unique( - &internal_encoder_factory, SdpVideoFormat(cricket::kVp9CodecName)); - }); - test::FunctionVideoDecoderFactory decoder_factory( - [&internal_decoder_factory]() { - return std::make_unique( - &internal_decoder_factory, SdpVideoFormat(cricket::kVp9CodecName)); - }); - - CodecObserver test(5, kVideoRotation_0, absl::nullopt, "multiplex", - &encoder_factory, &decoder_factory); - RunBaseTest(&test); -} - -TEST_F(CodecEndToEndTest, SendsAndReceivesMultiplexVideoRotation90) { - InternalEncoderFactory internal_encoder_factory; - InternalDecoderFactory internal_decoder_factory; - test::FunctionVideoEncoderFactory encoder_factory( - [&internal_encoder_factory]() { - return std::make_unique( - &internal_encoder_factory, SdpVideoFormat(cricket::kVp9CodecName)); - }); - test::FunctionVideoDecoderFactory decoder_factory( - [&internal_decoder_factory]() { - return std::make_unique( - &internal_decoder_factory, SdpVideoFormat(cricket::kVp9CodecName)); - }); - CodecObserver test(5, kVideoRotation_90, absl::nullopt, "multiplex", - &encoder_factory, &decoder_factory); - RunBaseTest(&test); -} - #endif // defined(RTC_ENABLE_VP9) #if defined(WEBRTC_USE_H264) @@ -247,46 +224,52 @@ INSTANTIATE_TEST_SUITE_P( TEST_P(EndToEndTestH264, SendsAndReceivesH264) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return H264Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateH264Encoder(env); + }); test::FunctionVideoDecoderFactory decoder_factory( []() { return H264Decoder::Create(); }); - CodecObserver test(500, kVideoRotation_0, absl::nullopt, "H264", + CodecObserver test(500, kVideoRotation_0, std::nullopt, "H264", &encoder_factory, &decoder_factory); RunBaseTest(&test); } TEST_P(EndToEndTestH264, SendsAndReceivesH264VideoRotation90) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return H264Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateH264Encoder(env); + }); test::FunctionVideoDecoderFactory decoder_factory( []() { return H264Decoder::Create(); }); - CodecObserver test(5, kVideoRotation_90, absl::nullopt, "H264", + CodecObserver test(5, kVideoRotation_90, std::nullopt, "H264", &encoder_factory, &decoder_factory); RunBaseTest(&test); } TEST_P(EndToEndTestH264, SendsAndReceivesH264PacketizationMode0) { - cricket::VideoCodec codec = - cricket::CreateVideoCodec(cricket::kH264CodecName); - codec.SetParam(cricket::kH264FmtpPacketizationMode, "0"); + SdpVideoFormat codec(webrtc::kH264CodecName); + codec.parameters[webrtc::kH264FmtpPacketizationMode] = "0"; test::FunctionVideoEncoderFactory encoder_factory( - [codec]() { return H264Encoder::Create(codec); }); + [codec](const Environment& env, const SdpVideoFormat& format) { + return CreateH264Encoder(env, H264EncoderSettings::Parse(codec)); + }); test::FunctionVideoDecoderFactory decoder_factory( []() { return H264Decoder::Create(); }); - CodecObserver test(500, kVideoRotation_0, absl::nullopt, "H264", + CodecObserver test(500, kVideoRotation_0, std::nullopt, "H264", &encoder_factory, &decoder_factory); RunBaseTest(&test); } TEST_P(EndToEndTestH264, SendsAndReceivesH264PacketizationMode1) { - cricket::VideoCodec codec = - cricket::CreateVideoCodec(cricket::kH264CodecName); - codec.SetParam(cricket::kH264FmtpPacketizationMode, "1"); + SdpVideoFormat codec(webrtc::kH264CodecName); + codec.parameters[webrtc::kH264FmtpPacketizationMode] = "1"; test::FunctionVideoEncoderFactory encoder_factory( - [codec]() { return H264Encoder::Create(codec); }); + [codec](const Environment& env, const SdpVideoFormat& format) { + return CreateH264Encoder(env, H264EncoderSettings::Parse(codec)); + }); test::FunctionVideoDecoderFactory decoder_factory( []() { return H264Decoder::Create(); }); - CodecObserver test(500, kVideoRotation_0, absl::nullopt, "H264", + CodecObserver test(500, kVideoRotation_0, std::nullopt, "H264", &encoder_factory, &decoder_factory); RunBaseTest(&test); } diff --git a/video/end_to_end_tests/corruption_detection_tests.cc b/video/end_to_end_tests/corruption_detection_tests.cc new file mode 100644 index 0000000000..098d230622 --- /dev/null +++ b/video/end_to_end_tests/corruption_detection_tests.cc @@ -0,0 +1,123 @@ + +/* + * Copyright 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "api/environment/environment.h" +#include "api/rtp_parameters.h" +#include "api/task_queue/task_queue_base.h" +#include "api/test/video/function_video_decoder_factory.h" +#include "api/test/video/function_video_encoder_factory.h" +#include "api/video/video_codec_type.h" +#include "api/video_codecs/sdp_video_format.h" +#include "call/video_receive_stream.h" +#include "call/video_send_stream.h" +#include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "rtc_base/checks.h" +#include "rtc_base/task_queue_for_test.h" +#include "system_wrappers/include/sleep.h" +#include "test/call_test.h" +#include "test/gtest.h" +#include "test/video_test_constants.h" +#include "video/config/video_encoder_config.h" + +namespace webrtc { +namespace { +RtpExtension GetCorruptionExtension() { + return RtpExtension(RtpExtension::kCorruptionDetectionUri, + /*extension_id=*/1, + /*encrypted=*/true); +} +} // namespace + +class CorruptionDetectionTest : public test::CallTest { + public: + CorruptionDetectionTest() { RegisterRtpExtension(GetCorruptionExtension()); } +}; + +TEST_F( + CorruptionDetectionTest, + ReportsCorruptionStatsIfSendStreamIsConfiguredToEnableCorruptionDetection) { + class StatsObserver : public test::EndToEndTest { + public: + StatsObserver() + : EndToEndTest(test::VideoTestConstants::kLongTimeout), + encoder_factory_( + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }), + decoder_factory_( + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Decoder(env); + }) {} + + private: + void ModifyVideoConfigs( + VideoSendStream::Config* send_config, + std::vector* receive_configs, + VideoEncoderConfig* encoder_config) override { + encoder_config->codec_type = kVideoCodecVP8; + send_config->encoder_settings.enable_frame_instrumentation_generator = + true; + send_config->encoder_settings.encoder_factory = &encoder_factory_; + send_config->rtp.payload_name = "VP8"; + send_config->rtp.extensions.clear(); + send_config->rtp.extensions.push_back(GetCorruptionExtension()); + + for (auto& receive_config : *receive_configs) { + receive_config.decoder_factory = &decoder_factory_; + RTC_CHECK(!receive_config.decoders.empty()); + receive_config.decoders[0].video_format = + SdpVideoFormat(send_config->rtp.payload_name); + } + } + + void OnVideoStreamsCreated(VideoSendStream* send_stream, + const std::vector& + receive_streams) override { + receive_streams_ = receive_streams; + task_queue_ = TaskQueueBase::Current(); + } + + void PerformTest() override { + constexpr int kMaxIterations = 200; + bool corruption_score_reported = false; + for (int i = 0; i < kMaxIterations; ++i) { + SleepMs(10); + VideoReceiveStreamInterface::Stats stats; + SendTask(task_queue_, [&]() { + ASSERT_EQ(receive_streams_.size(), 1u); + stats = receive_streams_[0]->GetStats(); + }); + if (stats.corruption_score_count > 0) { + corruption_score_reported = true; + ASSERT_TRUE(stats.corruption_score_sum.has_value()); + EXPECT_TRUE(stats.corruption_score_squared_sum.has_value()); + double average_score = + *stats.corruption_score_sum / stats.corruption_score_count; + EXPECT_GE(average_score, 0); + EXPECT_LE(average_score, 1); + break; + } + } + EXPECT_TRUE(corruption_score_reported); + } + + std::vector receive_streams_; + TaskQueueBase* task_queue_ = nullptr; + test::FunctionVideoEncoderFactory encoder_factory_; + test::FunctionVideoDecoderFactory decoder_factory_; + } test; + + RunBaseTest(&test); +} + +} // namespace webrtc diff --git a/video/end_to_end_tests/extended_reports_tests.cc b/video/end_to_end_tests/extended_reports_tests.cc index e481282466..c595074fc6 100644 --- a/video/end_to_end_tests/extended_reports_tests.cc +++ b/video/end_to_end_tests/extended_reports_tests.cc @@ -12,19 +12,20 @@ #include #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/rtp_headers.h" #include "api/task_queue/task_queue_base.h" #include "api/test/simulated_network.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" #include "api/video_codecs/sdp_video_format.h" #include "call/call.h" #include "call/fake_network_pipe.h" #include "call/rtp_config.h" -#include "call/simulated_network.h" #include "call/simulated_packet_receiver.h" #include "call/video_receive_stream.h" #include "call/video_send_stream.h" @@ -37,6 +38,7 @@ #include "test/call_test.h" #include "test/field_trial.h" #include "test/gtest.h" +#include "test/network/simulated_network.h" #include "test/rtcp_packet_parser.h" #include "test/rtp_rtcp_observer.h" #include "test/video_test_constants.h" @@ -63,8 +65,9 @@ class RtcpXrObserver : public test::EndToEndTest { RtcpXrObserver(bool enable_rrtr, bool expect_target_bitrate, bool enable_zero_target_bitrate, - VideoEncoderConfig::ContentType content_type) - : EndToEndTest(test::VideoTestConstants::kDefaultTimeout), + VideoEncoderConfig::ContentType content_type, + TimeDelta timeout = test::VideoTestConstants::kDefaultTimeout) + : EndToEndTest(timeout), enable_rrtr_(enable_rrtr), expect_target_bitrate_(expect_target_bitrate), enable_zero_target_bitrate_(enable_zero_target_bitrate), @@ -76,14 +79,14 @@ class RtcpXrObserver : public test::EndToEndTest { sent_zero_rtcp_target_bitrate_(false), sent_rtcp_dlrr_(0), send_simulated_network_(nullptr) { - forward_transport_config_.link_capacity_kbps = 500; + forward_transport_config_.link_capacity = DataRate::KilobitsPerSec(500); forward_transport_config_.queue_delay_ms = 0; forward_transport_config_.loss_percent = 0; } private: // Receive stream should send RR packets (and RRTR packets if enabled). - Action OnReceiveRtcp(rtc::ArrayView packet) override { + Action OnReceiveRtcp(ArrayView packet) override { MutexLock lock(&mutex_); test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet)); @@ -100,7 +103,7 @@ class RtcpXrObserver : public test::EndToEndTest { return SEND_PACKET; } // Send stream should send SR packets (and DLRR packets if enabled). - Action OnSendRtcp(rtc::ArrayView packet) override { + Action OnSendRtcp(ArrayView packet) override { MutexLock lock(&mutex_); test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet)); @@ -109,7 +112,7 @@ class RtcpXrObserver : public test::EndToEndTest { enable_zero_target_bitrate_) { // Reduce bandwidth restriction to disable second stream after it was // enabled for some time. - forward_transport_config_.link_capacity_kbps = 200; + forward_transport_config_.link_capacity = DataRate::KilobitsPerSec(200); send_simulated_network_->SetConfig(forward_transport_config_); } @@ -259,7 +262,8 @@ TEST_F(ExtendedReportsEndToEndTest, TestExtendedReportsCanSignalZeroTargetBitrate) { RtcpXrObserver test(/*enable_rrtr=*/false, /*expect_target_bitrate=*/true, /*enable_zero_target_bitrate=*/true, - VideoEncoderConfig::ContentType::kScreen); + VideoEncoderConfig::ContentType::kScreen, + test::VideoTestConstants::kLongTimeout); RunBaseTest(&test); } } // namespace webrtc diff --git a/video/end_to_end_tests/fec_tests.cc b/video/end_to_end_tests/fec_tests.cc index 11d11dcc0d..98a9d2a85a 100644 --- a/video/end_to_end_tests/fec_tests.cc +++ b/video/end_to_end_tests/fec_tests.cc @@ -14,7 +14,6 @@ #include "api/test/simulated_network.h" #include "api/test/video/function_video_encoder_factory.h" #include "call/fake_network_pipe.h" -#include "call/simulated_network.h" #include "media/engine/internal_decoder_factory.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/source/byte_io.h" @@ -25,6 +24,7 @@ #include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/network/simulated_network.h" #include "test/rtcp_packet_parser.h" #include "test/video_test_constants.h" @@ -51,16 +51,19 @@ class FecEndToEndTest : public test::CallTest { TEST_F(FecEndToEndTest, ReceivesUlpfec) { class UlpfecRenderObserver : public test::EndToEndTest, - public rtc::VideoSinkInterface { + public VideoSinkInterface { public: UlpfecRenderObserver() : EndToEndTest(test::VideoTestConstants::kDefaultTimeout), - encoder_factory_([]() { return VP8Encoder::Create(); }), + encoder_factory_( + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }), random_(0xcafef00d1), num_packets_sent_(0) {} private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -110,7 +113,7 @@ TEST_F(FecEndToEndTest, ReceivesUlpfec) { MutexLock lock(&mutex_); // Rendering frame with timestamp of packet that was dropped -> FEC // protection worked. - auto it = dropped_timestamps_.find(video_frame.timestamp()); + auto it = dropped_timestamps_.find(video_frame.rtp_timestamp()); if (it != dropped_timestamps_.end()) { observation_complete_.Set(); } @@ -166,7 +169,7 @@ TEST_F(FecEndToEndTest, ReceivesUlpfec) { } class FlexfecRenderObserver : public test::EndToEndTest, - public rtc::VideoSinkInterface { + public VideoSinkInterface { public: static constexpr uint32_t kVideoLocalSsrc = 123; static constexpr uint32_t kFlexfecLocalSsrc = 456; @@ -182,7 +185,7 @@ class FlexfecRenderObserver : public test::EndToEndTest, size_t GetNumFlexfecStreams() const override { return 1; } private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -255,7 +258,7 @@ class FlexfecRenderObserver : public test::EndToEndTest, return SEND_PACKET; } - Action OnReceiveRtcp(rtc::ArrayView data) override { + Action OnReceiveRtcp(ArrayView data) override { test::RtcpPacketParser parser; parser.Parse(data); @@ -289,7 +292,7 @@ class FlexfecRenderObserver : public test::EndToEndTest, MutexLock lock(&mutex_); // Rendering frame with timestamp of packet that was dropped -> FEC // protection worked. - auto it = dropped_timestamps_.find(video_frame.timestamp()); + auto it = dropped_timestamps_.find(video_frame.rtp_timestamp()); if (it != dropped_timestamps_.end()) { if (!expect_flexfec_rtcp_ || received_flexfec_rtcp_) { observation_complete_.Set(); @@ -372,10 +375,13 @@ TEST_F(FecEndToEndTest, ReceivedUlpfecPacketsNotNacked) { ulpfec_sequence_number_(0), has_last_sequence_number_(false), last_sequence_number_(0), - encoder_factory_([]() { return VP8Encoder::Create(); }) {} + encoder_factory_( + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }) {} private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock_(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -443,7 +449,7 @@ TEST_F(FecEndToEndTest, ReceivedUlpfecPacketsNotNacked) { return SEND_PACKET; } - Action OnReceiveRtcp(rtc::ArrayView packet) override { + Action OnReceiveRtcp(ArrayView packet) override { MutexLock lock_(&mutex_); if (state_ == kVerifyUlpfecPacketNotInNackList) { test::RtcpPacketParser rtcp_parser; diff --git a/video/end_to_end_tests/frame_encryption_tests.cc b/video/end_to_end_tests/frame_encryption_tests.cc index 01f3db64aa..698d8c14c0 100644 --- a/video/end_to_end_tests/frame_encryption_tests.cc +++ b/video/end_to_end_tests/frame_encryption_tests.cc @@ -27,11 +27,14 @@ enum : int { // The first valid value is 1. }; class DecryptedFrameObserver : public test::EndToEndTest, - public rtc::VideoSinkInterface { + public VideoSinkInterface { public: DecryptedFrameObserver() : EndToEndTest(test::VideoTestConstants::kDefaultTimeout), - encoder_factory_([] { return VP8Encoder::Create(); }) {} + encoder_factory_( + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }) {} private: void ModifyVideoConfigs( @@ -53,7 +56,7 @@ class DecryptedFrameObserver : public test::EndToEndTest, recv_config.decoders.clear(); recv_config.decoders.push_back(decoder); recv_config.renderer = this; - recv_config.frame_decryptor = rtc::make_ref_counted(); + recv_config.frame_decryptor = make_ref_counted(); recv_config.crypto_options.sframe.require_frame_encryption = true; } } diff --git a/video/end_to_end_tests/histogram_tests.cc b/video/end_to_end_tests/histogram_tests.cc index 2dd9a2ecc4..5f281e267f 100644 --- a/video/end_to_end_tests/histogram_tests.cc +++ b/video/end_to_end_tests/histogram_tests.cc @@ -8,7 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "absl/types/optional.h" +#include + #include "api/test/video/function_video_encoder_factory.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "rtc_base/synchronization/mutex.h" @@ -42,7 +43,7 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx, bool use_fec, bool screenshare) { class FrameObserver : public test::EndToEndTest, - public rtc::VideoSinkInterface { + public VideoSinkInterface { public: FrameObserver(bool use_rtx, bool use_fec, bool screenshare) : EndToEndTest(test::VideoTestConstants::kLongTimeout), @@ -50,7 +51,10 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx, use_fec_(use_fec), screenshare_(screenshare), // This test uses NACK, so to send FEC we can't use a fake encoder. - encoder_factory_([]() { return VP8Encoder::Create(); }), + encoder_factory_( + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }), num_frames_received_(0) {} private: @@ -66,7 +70,7 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx, } } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { if (MinMetricRunTimePassed() && MinNumberOfFramesReceived()) observation_complete_.Set(); @@ -107,7 +111,7 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx, send_config->encoder_settings.encoder_factory = &encoder_factory_; send_config->rtp.payload_name = "VP8"; encoder_config->codec_type = kVideoCodecVP8; - (*receive_configs)[0].decoders[0].video_format = SdpVideoFormat("VP8"); + (*receive_configs)[0].decoders[0].video_format = SdpVideoFormat::VP8(); (*receive_configs)[0].rtp.red_payload_type = test::VideoTestConstants::kRedPayloadType; (*receive_configs)[0].rtp.ulpfec_payload_type = @@ -148,7 +152,7 @@ void HistogramTest::VerifyHistogramStats(bool use_rtx, const bool use_fec_; const bool screenshare_; test::FunctionVideoEncoderFactory encoder_factory_; - absl::optional start_runtime_ms_; + std::optional start_runtime_ms_; int num_frames_received_ RTC_GUARDED_BY(&mutex_); } test(use_rtx, use_fec, screenshare); diff --git a/video/end_to_end_tests/multi_codec_receive_tests.cc b/video/end_to_end_tests/multi_codec_receive_tests.cc index 307b5085c9..c4e4479d3a 100644 --- a/video/end_to_end_tests/multi_codec_receive_tests.cc +++ b/video/end_to_end_tests/multi_codec_receive_tests.cc @@ -13,7 +13,6 @@ #include "api/test/simulated_network.h" #include "api/test/video/function_video_encoder_factory.h" #include "call/fake_network_pipe.h" -#include "call/simulated_network.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/video_coding/codecs/h264/include/h264.h" @@ -24,6 +23,7 @@ #include "test/call_test.h" #include "test/gmock.h" #include "test/gtest.h" +#include "test/network/simulated_network.h" #include "test/video_test_constants.h" using ::testing::Contains; @@ -62,7 +62,7 @@ int RemoveOlderOrEqual(uint32_t timestamp, std::vector* timestamps) { } class FrameObserver : public test::RtpRtcpObserver, - public rtc::VideoSinkInterface { + public VideoSinkInterface { public: FrameObserver() : test::RtpRtcpObserver(test::VideoTestConstants::kDefaultTimeout) {} @@ -76,7 +76,7 @@ class FrameObserver : public test::RtpRtcpObserver, private: // Sends kFramesToObserve. - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock(&mutex_); RtpPacket rtp_packet; @@ -107,11 +107,11 @@ class FrameObserver : public test::RtpRtcpObserver, // Verifies that all sent frames are decoded and rendered. void OnFrame(const VideoFrame& rendered_frame) override { MutexLock lock(&mutex_); - EXPECT_THAT(sent_timestamps_, Contains(rendered_frame.timestamp())); + EXPECT_THAT(sent_timestamps_, Contains(rendered_frame.rtp_timestamp())); // Remove old timestamps too, only the newest decoded frame is rendered. num_rendered_frames_ += - RemoveOlderOrEqual(rendered_frame.timestamp(), &sent_timestamps_); + RemoveOlderOrEqual(rendered_frame.rtp_timestamp(), &sent_timestamps_); if (num_rendered_frames_ >= kFramesToObserve) { EXPECT_TRUE(sent_timestamps_.empty()) << "All sent frames not decoded."; @@ -120,8 +120,8 @@ class FrameObserver : public test::RtpRtcpObserver, } Mutex mutex_; - absl::optional last_timestamp_; // Only accessed from pacer thread. - absl::optional expected_payload_type_ RTC_GUARDED_BY(mutex_); + std::optional last_timestamp_; // Only accessed from pacer thread. + std::optional expected_payload_type_ RTC_GUARDED_BY(mutex_); int num_sent_frames_ RTC_GUARDED_BY(mutex_) = 0; int num_rendered_frames_ RTC_GUARDED_BY(mutex_) = 0; std::vector sent_timestamps_ RTC_GUARDED_BY(mutex_); @@ -199,23 +199,25 @@ void MultiCodecReceiveTest::RunTestWithCodecs( EXPECT_TRUE(!configs.empty()); test::FunctionVideoEncoderFactory encoder_factory( - [](const SdpVideoFormat& format) -> std::unique_ptr { + [](const Environment& env, + const SdpVideoFormat& format) -> std::unique_ptr { if (format.name == "VP8") { - return VP8Encoder::Create(); + return CreateVp8Encoder(env); } if (format.name == "VP9") { - return VP9Encoder::Create(); + return CreateVp9Encoder(env); } if (format.name == "H264") { - return H264Encoder::Create(); + return CreateH264Encoder(env); } RTC_DCHECK_NOTREACHED() << format.name; return nullptr; }); test::FunctionVideoDecoderFactory decoder_factory( - [](const SdpVideoFormat& format) -> std::unique_ptr { + [](const Environment& env, + const SdpVideoFormat& format) -> std::unique_ptr { if (format.name == "VP8") { - return VP8Decoder::Create(); + return CreateVp8Decoder(env); } if (format.name == "VP9") { return VP9Decoder::Create(); diff --git a/video/end_to_end_tests/multi_stream_tester.cc b/video/end_to_end_tests/multi_stream_tester.cc index 8d99329194..7d82085617 100644 --- a/video/end_to_end_tests/multi_stream_tester.cc +++ b/video/end_to_end_tests/multi_stream_tester.cc @@ -14,21 +14,20 @@ #include #include -#include "absl/memory/memory.h" -#include "api/rtc_event_log/rtc_event_log.h" -#include "api/task_queue/default_task_queue_factory.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/task_queue/task_queue_base.h" #include "api/test/create_frame_generator.h" #include "api/test/simulated_network.h" #include "api/test/video/function_video_encoder_factory.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "call/fake_network_pipe.h" -#include "call/simulated_network.h" #include "media/engine/internal_decoder_factory.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "rtc_base/task_queue_for_test.h" #include "test/call_test.h" #include "test/encoder_settings.h" +#include "test/network/simulated_network.h" #include "test/video_test_constants.h" namespace webrtc { @@ -43,17 +42,14 @@ MultiStreamTester::MultiStreamTester() { MultiStreamTester::~MultiStreamTester() = default; void MultiStreamTester::RunTest() { - webrtc::RtcEventLogNull event_log; - auto task_queue_factory = CreateDefaultTaskQueueFactory(); + Environment env = CreateEnvironment(); // Use high prioirity since this task_queue used for fake network delivering // at correct time. Those test tasks should be prefered over code under test // to make test more stable. - auto task_queue = task_queue_factory->CreateTaskQueue( + auto task_queue = env.task_queue_factory().CreateTaskQueue( "TaskQueue", TaskQueueFactory::Priority::HIGH); - Call::Config config(&event_log); - test::ScopedKeyValueConfig field_trials; - config.trials = &field_trials; - config.task_queue_factory = task_queue_factory.get(); + CallConfig sender_config(env); + CallConfig receiver_config(env); std::unique_ptr sender_call; std::unique_ptr receiver_call; std::unique_ptr sender_transport; @@ -63,14 +59,16 @@ void MultiStreamTester::RunTest() { VideoReceiveStreamInterface* receive_streams[kNumStreams]; test::FrameGeneratorCapturer* frame_generators[kNumStreams]; test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); std::unique_ptr bitrate_allocator_factory = CreateBuiltinVideoBitrateAllocatorFactory(); InternalDecoderFactory decoder_factory; SendTask(task_queue.get(), [&]() { - sender_call = absl::WrapUnique(Call::Create(config)); - receiver_call = absl::WrapUnique(Call::Create(config)); + sender_call = Call::Create(std::move(sender_config)); + receiver_call = Call::Create(std::move(receiver_config)); sender_transport = CreateSendTransport(task_queue.get(), sender_call.get()); receiver_transport = CreateReceiveTransport(task_queue.get(), receiver_call.get()); @@ -116,10 +114,10 @@ void MultiStreamTester::RunTest() { receive_streams[i]->Start(); auto* frame_generator = new test::FrameGeneratorCapturer( - Clock::GetRealTimeClock(), - test::CreateSquareFrameGenerator(width, height, absl::nullopt, - absl::nullopt), - 30, *task_queue_factory); + &env.clock(), + test::CreateSquareFrameGenerator(width, height, std::nullopt, + std::nullopt), + 30, env.task_queue_factory()); frame_generators[i] = frame_generator; send_streams[i]->SetSource(frame_generator, DegradationPreference::MAINTAIN_FRAMERATE); diff --git a/video/end_to_end_tests/multi_stream_tests.cc b/video/end_to_end_tests/multi_stream_tests.cc index b997538d96..35f017758b 100644 --- a/video/end_to_end_tests/multi_stream_tests.cc +++ b/video/end_to_end_tests/multi_stream_tests.cc @@ -29,7 +29,7 @@ namespace webrtc { // Each renderer verifies that it receives the expected resolution, and as soon // as every renderer has received a frame, the test finishes. TEST(MultiStreamEndToEndTest, SendsAndReceivesMultipleStreams) { - class VideoOutputObserver : public rtc::VideoSinkInterface { + class VideoOutputObserver : public VideoSinkInterface { public: VideoOutputObserver(const MultiStreamTester::CodecSettings& settings, uint32_t ssrc, @@ -51,7 +51,7 @@ TEST(MultiStreamEndToEndTest, SendsAndReceivesMultipleStreams) { const MultiStreamTester::CodecSettings& settings_; const uint32_t ssrc_; test::FrameGeneratorCapturer** const frame_generator_; - rtc::Event done_; + Event done_; }; class Tester : public MultiStreamTester { diff --git a/video/end_to_end_tests/network_state_tests.cc b/video/end_to_end_tests/network_state_tests.cc index 4d43f7609c..4def7097af 100644 --- a/video/end_to_end_tests/network_state_tests.cc +++ b/video/end_to_end_tests/network_state_tests.cc @@ -17,7 +17,6 @@ #include "api/test/simulated_network.h" #include "api/video_codecs/video_encoder.h" #include "call/fake_network_pipe.h" -#include "call/simulated_network.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" @@ -25,6 +24,7 @@ #include "test/call_test.h" #include "test/fake_encoder.h" #include "test/gtest.h" +#include "test/network/simulated_network.h" #include "test/video_encoder_proxy_factory.h" namespace webrtc { @@ -36,13 +36,13 @@ class NetworkStateEndToEndTest : public test::CallTest { protected: class UnusedTransport : public Transport { private: - bool SendRtp(rtc::ArrayView packet, + bool SendRtp(ArrayView packet, const PacketOptions& options) override { ADD_FAILURE() << "Unexpected RTP sent."; return false; } - bool SendRtcp(rtc::ArrayView packet) override { + bool SendRtcp(ArrayView packet) override { ADD_FAILURE() << "Unexpected RTCP sent."; return false; } @@ -61,14 +61,14 @@ class NetworkStateEndToEndTest : public test::CallTest { } private: - bool SendRtp(rtc::ArrayView packet, + bool SendRtp(ArrayView packet, const PacketOptions& options) override { MutexLock lock(&mutex_); need_rtp_ = false; return true; } - bool SendRtcp(rtc::ArrayView packet) override { + bool SendRtcp(ArrayView packet) override { MutexLock lock(&mutex_); need_rtcp_ = false; return true; @@ -94,7 +94,7 @@ void NetworkStateEndToEndTest::VerifyNewVideoSendStreamsRespectNetworkState( SendTask(task_queue(), [this, network_to_bring_up, &encoder_factory, transport]() { - CreateSenderCall(Call::Config(send_event_log_.get())); + CreateSenderCall(); sender_call_->SignalChannelNetworkState(network_to_bring_up, kNetworkUp); CreateSendConfig(1, 0, 0, transport); @@ -155,11 +155,11 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { static const int kNumAcceptedDowntimeRtcp = 1; class NetworkStateTest : public test::EndToEndTest, public test::FakeEncoder { public: - explicit NetworkStateTest(TaskQueueBase* task_queue) + explicit NetworkStateTest(const Environment& env, TaskQueueBase* task_queue) : EndToEndTest(test::VideoTestConstants::kDefaultTimeout), - FakeEncoder(Clock::GetRealTimeClock()), + FakeEncoder(env), e2e_test_task_queue_(task_queue), - task_queue_(CreateDefaultTaskQueueFactory()->CreateTaskQueue( + task_queue_(env.task_queue_factory().CreateTaskQueue( "NetworkStateTest", TaskQueueFactory::Priority::NORMAL)), sender_call_(nullptr), @@ -172,7 +172,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { receiver_rtcp_(0), down_frames_(0) {} - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock(&test_mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -183,19 +183,19 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { return SEND_PACKET; } - Action OnSendRtcp(rtc::ArrayView packet) override { + Action OnSendRtcp(ArrayView packet) override { MutexLock lock(&test_mutex_); ++sender_rtcp_; packet_event_.Set(); return SEND_PACKET; } - Action OnReceiveRtp(rtc::ArrayView packet) override { + Action OnReceiveRtp(ArrayView packet) override { ADD_FAILURE() << "Unexpected receiver RTP, should not be sending."; return SEND_PACKET; } - Action OnReceiveRtcp(rtc::ArrayView packet) override { + Action OnReceiveRtcp(ArrayView packet) override { MutexLock lock(&test_mutex_); ++receiver_rtcp_; packet_event_.Set(); @@ -299,7 +299,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { private: void WaitForPacketsOrSilence(bool sender_down, bool receiver_down) { - int64_t initial_time_ms = clock_->TimeInMilliseconds(); + int64_t initial_time_ms = env_.clock().TimeInMilliseconds(); int initial_sender_rtp; int initial_sender_rtcp; int initial_receiver_rtcp; @@ -313,7 +313,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { bool receiver_done = false; while (!sender_done || !receiver_done) { packet_event_.Wait(TimeDelta::Millis(kSilenceTimeoutMs)); - int64_t time_now_ms = clock_->TimeInMilliseconds(); + int64_t time_now_ms = env_.clock().TimeInMilliseconds(); MutexLock lock(&test_mutex_); if (sender_down) { ASSERT_LE(sender_rtp_ - initial_sender_rtp - sender_padding_, @@ -348,8 +348,8 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { TaskQueueBase* const e2e_test_task_queue_; std::unique_ptr task_queue_; Mutex test_mutex_; - rtc::Event encoded_frames_; - rtc::Event packet_event_; + Event encoded_frames_; + Event packet_event_; Call* sender_call_; Call* receiver_call_; test::VideoEncoderProxyFactory encoder_factory_; @@ -359,7 +359,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { int sender_rtcp_ RTC_GUARDED_BY(test_mutex_); int receiver_rtcp_ RTC_GUARDED_BY(test_mutex_); int down_frames_ RTC_GUARDED_BY(test_mutex_); - } test(task_queue()); + } test(env(), task_queue()); RunBaseTest(&test); } @@ -367,7 +367,7 @@ TEST_F(NetworkStateEndToEndTest, RespectsNetworkState) { TEST_F(NetworkStateEndToEndTest, NewVideoSendStreamsRespectVideoNetworkDown) { class UnusedEncoder : public test::FakeEncoder { public: - UnusedEncoder() : FakeEncoder(Clock::GetRealTimeClock()) {} + explicit UnusedEncoder(const Environment& env) : FakeEncoder(env) {} int32_t InitEncode(const VideoCodec* config, const Settings& settings) override { @@ -381,7 +381,7 @@ TEST_F(NetworkStateEndToEndTest, NewVideoSendStreamsRespectVideoNetworkDown) { } }; - UnusedEncoder unused_encoder; + UnusedEncoder unused_encoder(env()); UnusedTransport unused_transport; VerifyNewVideoSendStreamsRespectNetworkState( MediaType::AUDIO, &unused_encoder, &unused_transport); @@ -390,8 +390,8 @@ TEST_F(NetworkStateEndToEndTest, NewVideoSendStreamsRespectVideoNetworkDown) { TEST_F(NetworkStateEndToEndTest, NewVideoSendStreamsIgnoreAudioNetworkDown) { class RequiredEncoder : public test::FakeEncoder { public: - RequiredEncoder() - : FakeEncoder(Clock::GetRealTimeClock()), encoded_frame_(false) {} + explicit RequiredEncoder(const Environment& env) + : FakeEncoder(env), encoded_frame_(false) {} ~RequiredEncoder() { if (!encoded_frame_) { ADD_FAILURE() << "Didn't encode an expected frame"; @@ -408,7 +408,7 @@ TEST_F(NetworkStateEndToEndTest, NewVideoSendStreamsIgnoreAudioNetworkDown) { }; RequiredTransport required_transport(true /*rtp*/, false /*rtcp*/); - RequiredEncoder required_encoder; + RequiredEncoder required_encoder(env()); VerifyNewVideoSendStreamsRespectNetworkState( MediaType::VIDEO, &required_encoder, &required_transport); } diff --git a/video/end_to_end_tests/resolution_bitrate_limits_tests.cc b/video/end_to_end_tests/resolution_bitrate_limits_tests.cc index e110fb759c..128406249d 100644 --- a/video/end_to_end_tests/resolution_bitrate_limits_tests.cc +++ b/video/end_to_end_tests/resolution_bitrate_limits_tests.cc @@ -31,8 +31,7 @@ void SetEncoderSpecific(VideoEncoderConfig* encoder_config, VideoCodecVP9 vp9 = VideoEncoder::GetDefaultVp9Settings(); vp9.numberOfSpatialLayers = num_spatial_layers; encoder_config->encoder_specific_settings = - rtc::make_ref_counted( - vp9); + make_ref_counted(vp9); } } @@ -103,13 +102,13 @@ class InitEncodeTest : public test::EndToEndTest, public test::FakeEncoder { public: struct Bitrate { - const absl::optional min; - const absl::optional max; + const std::optional min; + const std::optional max; }; struct TestConfig { const bool active; const Bitrate bitrate; - const absl::optional scalability_mode; + const std::optional scalability_mode; }; struct Expectation { const uint32_t pixels = 0; @@ -117,15 +116,17 @@ class InitEncodeTest : public test::EndToEndTest, const Bitrate ne_bitrate; }; - InitEncodeTest(const std::string& payload_name, + InitEncodeTest(const Environment& env, + const std::string& payload_name, const std::vector& configs, const std::vector& expectations) : EndToEndTest(test::VideoTestConstants::kDefaultTimeout), - FakeEncoder(Clock::GetRealTimeClock()), + FakeEncoder(env), encoder_factory_(this), payload_name_(payload_name), configs_(configs), - expectations_(expectations) {} + expectations_(expectations), + encoder_info_override_(env.field_trials()) {} void OnFrameGeneratorCapturerCreated( test::FrameGeneratorCapturer* frame_generator_capturer) override { @@ -134,8 +135,8 @@ class InitEncodeTest : public test::EndToEndTest, frame_generator_capturer->ChangeResolution(1280, 720); } - void OnSinkWantsChanged(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override {} + void OnSinkWantsChanged(VideoSinkInterface* sink, + const VideoSinkWants& wants) override {} size_t GetNumVideoStreams() const override { return SupportsSpatialLayers(payload_name_) ? 1 : configs_.size(); @@ -145,17 +146,13 @@ class InitEncodeTest : public test::EndToEndTest, VideoSendStream::Config* send_config, std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { - webrtc::VideoEncoder::EncoderInfo encoder_info; send_config->encoder_settings.encoder_factory = &encoder_factory_; send_config->rtp.payload_name = payload_name_; send_config->rtp.payload_type = test::VideoTestConstants::kVideoSendPayloadType; const VideoCodecType codec_type = PayloadStringToCodecType(payload_name_); encoder_config->codec_type = codec_type; - encoder_config->video_stream_factory = - rtc::make_ref_counted( - payload_name_, /*max qp*/ 0, /*screencast*/ false, - /*screenshare enabled*/ false, encoder_info); + encoder_config->video_stream_factory = nullptr; encoder_config->max_bitrate_bps = -1; if (configs_.size() == 1 && configs_[0].bitrate.max) encoder_config->max_bitrate_bps = configs_[0].bitrate.max->bps(); @@ -222,7 +219,7 @@ TEST_P(ResolutionBitrateLimitsTest, LimitsApplied) { "min_bitrate_bps:32000," "max_bitrate_bps:3333000/"); - InitEncodeTest test(payload_name_, {{.active = true}}, + InitEncodeTest test(env(), payload_name_, {{.active = true}}, // Expectations: {{.pixels = 1280 * 720, .eq_bitrate = {DataRate::KilobitsPerSec(32), @@ -232,7 +229,7 @@ TEST_P(ResolutionBitrateLimitsTest, LimitsApplied) { TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, OneStreamDefaultMaxBitrateAppliedForOneSpatialLayer) { - InitEncodeTest test("VP9", + InitEncodeTest test(env(), "VP9", {{.active = true, .bitrate = {DataRate::KilobitsPerSec(30), DataRate::KilobitsPerSec(3000)}, @@ -247,14 +244,14 @@ TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, OneStreamSvcMaxBitrateAppliedForTwoSpatialLayers) { InitEncodeTest test( - "VP9", + env(), "VP9", {{.active = true, .bitrate = {DataRate::KilobitsPerSec(30), DataRate::KilobitsPerSec(3000)}, .scalability_mode = ScalabilityMode::kL2T1}}, // Expectations: {{.pixels = 1280 * 720, - .ne_bitrate = {absl::nullopt, DataRate::KilobitsPerSec(3000)}}}); + .ne_bitrate = {std::nullopt, DataRate::KilobitsPerSec(3000)}}}); RunBaseTest(&test); } TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, @@ -267,7 +264,8 @@ TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, "max_bitrate_bps:3333000/"); InitEncodeTest test( - "VP9", {{.active = true, .scalability_mode = ScalabilityMode::kL1T1}}, + env(), "VP9", + {{.active = true, .scalability_mode = ScalabilityMode::kL1T1}}, // Expectations: {{.pixels = 1280 * 720, .eq_bitrate = {DataRate::KilobitsPerSec(32), @@ -285,7 +283,8 @@ TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, "max_bitrate_bps:2222000|3333000/"); InitEncodeTest test( - "VP9", {{.active = true, .scalability_mode = ScalabilityMode::kL2T1}}, + env(), "VP9", + {{.active = true, .scalability_mode = ScalabilityMode::kL2T1}}, // Expectations: {{.pixels = 640 * 360, .ne_bitrate = {DataRate::KilobitsPerSec(31), @@ -297,7 +296,7 @@ TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, } TEST_P(ResolutionBitrateLimitsTest, EncodingsApplied) { - InitEncodeTest test(payload_name_, + InitEncodeTest test(env(), payload_name_, {{.active = true, .bitrate = {DataRate::KilobitsPerSec(22), DataRate::KilobitsPerSec(3555)}}}, @@ -316,7 +315,7 @@ TEST_P(ResolutionBitrateLimitsTest, IntersectionApplied) { "min_bitrate_bps:32000," "max_bitrate_bps:3333000/"); - InitEncodeTest test(payload_name_, + InitEncodeTest test(env(), payload_name_, {{.active = true, .bitrate = {DataRate::KilobitsPerSec(22), DataRate::KilobitsPerSec(1555)}}}, @@ -335,7 +334,7 @@ TEST_P(ResolutionBitrateLimitsTest, LimitsAppliedMiddleActive) { "min_bitrate_bps:21000|32000," "max_bitrate_bps:2222000|3333000/"); - InitEncodeTest test(payload_name_, + InitEncodeTest test(env(), payload_name_, {{.active = false}, {.active = true}, {.active = false}}, // Expectations: {{.pixels = 640 * 360, @@ -344,35 +343,81 @@ TEST_P(ResolutionBitrateLimitsTest, LimitsAppliedMiddleActive) { RunBaseTest(&test); } -TEST_P(ResolutionBitrateLimitsTest, IntersectionAppliedMiddleActive) { +TEST_P(ResolutionBitrateLimitsTest, EncodingMinMaxBitrateAppliedMiddleActive) { webrtc::test::ScopedFieldTrials field_trials( "WebRTC-GetEncoderInfoOverride/" "frame_size_pixels:230400|921600," "min_start_bitrate_bps:0|0," "min_bitrate_bps:31000|32000," - "max_bitrate_bps:2222000|3333000/"); + "max_bitrate_bps:1111000|3333000/"); - InitEncodeTest test(payload_name_, - {{.active = false}, + InitEncodeTest test(env(), payload_name_, + {{.active = false, + .bitrate = {DataRate::KilobitsPerSec(28), + DataRate::KilobitsPerSec(1000)}}, {.active = true, - .bitrate = {DataRate::KilobitsPerSec(30), + .bitrate = {DataRate::KilobitsPerSec(28), DataRate::KilobitsPerSec(1555)}}, {.active = false}}, // Expectations: {{.pixels = 640 * 360, - .eq_bitrate = {DataRate::KilobitsPerSec(31), + .eq_bitrate = {DataRate::KilobitsPerSec(28), DataRate::KilobitsPerSec(1555)}}}); RunBaseTest(&test); } +TEST_P(ResolutionBitrateLimitsTest, MinBitrateNotAboveEncodingMax) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-GetEncoderInfoOverride/" + "frame_size_pixels:230400|921600," + "min_start_bitrate_bps:0|0," + "min_bitrate_bps:31000|32000," + "max_bitrate_bps:1111000|3333000/"); + + InitEncodeTest test( + env(), payload_name_, + {{.active = false}, + {.active = true, + .bitrate = {std::nullopt, DataRate::KilobitsPerSec(25)}}, + {.active = false}}, + // Expectations: + {{.pixels = 640 * 360, + .eq_bitrate = {DataRate::KilobitsPerSec(25), + DataRate::KilobitsPerSec(25)}}}); + RunBaseTest(&test); +} + +TEST_P(ResolutionBitrateLimitsTest, MaxBitrateNotBelowEncodingMin) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-GetEncoderInfoOverride/" + "frame_size_pixels:230400|921600," + "min_start_bitrate_bps:0|0," + "min_bitrate_bps:21000|22000," + "max_bitrate_bps:31000|32000/"); + + InitEncodeTest test( + env(), payload_name_, + {{.active = false, + .bitrate = {DataRate::KilobitsPerSec(50), std::nullopt}}, + {.active = true, + .bitrate = {DataRate::KilobitsPerSec(50), std::nullopt}}, + {.active = false}}, + // Expectations: + {{.pixels = 640 * 360, + .eq_bitrate = {DataRate::KilobitsPerSec(50), + DataRate::KilobitsPerSec(50)}}}); + RunBaseTest(&test); +} + TEST_P(ResolutionBitrateLimitsTest, DefaultLimitsAppliedMiddleActive) { - const absl::optional + const std::optional kDefaultSinglecastLimits360p = EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( PayloadStringToCodecType(payload_name_), 640 * 360); InitEncodeTest test( - payload_name_, {{.active = false}, {.active = true}, {.active = false}}, + env(), payload_name_, + {{.active = false}, {.active = true}, {.active = false}}, // Expectations: {{.pixels = 640 * 360, .eq_bitrate = { @@ -384,13 +429,13 @@ TEST_P(ResolutionBitrateLimitsTest, DefaultLimitsAppliedMiddleActive) { TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, DefaultLimitsAppliedForOneSpatialLayer) { - const absl::optional + const std::optional kDefaultSinglecastLimits720p = EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( PayloadStringToCodecType("VP9"), 1280 * 720); InitEncodeTest test( - "VP9", + env(), "VP9", {{.active = true, .scalability_mode = ScalabilityMode::kL1T3}, {.active = false}}, // Expectations: @@ -410,7 +455,7 @@ TEST_P(ResolutionBitrateLimitsTest, LimitsAppliedHighestActive) { "min_bitrate_bps:31000|32000," "max_bitrate_bps:2222000|3333000/"); - InitEncodeTest test(payload_name_, + InitEncodeTest test(env(), payload_name_, {{.active = false}, {.active = false}, {.active = true}}, // Expectations: {{.pixels = 1280 * 720, @@ -419,23 +464,27 @@ TEST_P(ResolutionBitrateLimitsTest, LimitsAppliedHighestActive) { RunBaseTest(&test); } -TEST_P(ResolutionBitrateLimitsTest, IntersectionAppliedHighestActive) { +TEST_P(ResolutionBitrateLimitsTest, EncodingMinMaxBitrateAppliedHighestActive) { webrtc::test::ScopedFieldTrials field_trials( "WebRTC-GetEncoderInfoOverride/" "frame_size_pixels:230400|921600," "min_start_bitrate_bps:0|0," "min_bitrate_bps:31000|32000," - "max_bitrate_bps:2222000|3333000/"); - - InitEncodeTest test(payload_name_, - {{.active = false}, - {.active = false}, + "max_bitrate_bps:555000|1111000/"); + + InitEncodeTest test(env(), payload_name_, + {{.active = false, + .bitrate = {DataRate::KilobitsPerSec(28), + DataRate::KilobitsPerSec(500)}}, + {.active = false, + .bitrate = {DataRate::KilobitsPerSec(28), + DataRate::KilobitsPerSec(1000)}}, {.active = true, - .bitrate = {DataRate::KilobitsPerSec(30), + .bitrate = {DataRate::KilobitsPerSec(28), DataRate::KilobitsPerSec(1555)}}}, // Expectations: {{.pixels = 1280 * 720, - .eq_bitrate = {DataRate::KilobitsPerSec(32), + .eq_bitrate = {DataRate::KilobitsPerSec(28), DataRate::KilobitsPerSec(1555)}}}); RunBaseTest(&test); } @@ -448,7 +497,8 @@ TEST_P(ResolutionBitrateLimitsTest, LimitsNotAppliedLowestActive) { "min_bitrate_bps:31000|32000," "max_bitrate_bps:2222000|3333000/"); - InitEncodeTest test(payload_name_, {{.active = true}, {.active = false}}, + InitEncodeTest test(env(), payload_name_, + {{.active = true}, {.active = false}}, // Expectations: {{.pixels = 640 * 360, .ne_bitrate = {DataRate::KilobitsPerSec(31), @@ -469,7 +519,7 @@ TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, "max_bitrate_bps:2222000|3333000/"); InitEncodeTest test( - "VP9", + env(), "VP9", {{.active = true, .scalability_mode = ScalabilityMode::kL1T1}, {.active = false}}, // Expectations: @@ -489,7 +539,7 @@ TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, "max_bitrate_bps:2222000|3333000/"); InitEncodeTest test( - "VP9", + env(), "VP9", {{.active = true, .scalability_mode = ScalabilityMode::kL2T1}, {.active = false}}, // Expectations: @@ -512,7 +562,8 @@ TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, "max_bitrate_bps:133000/"); InitEncodeTest test( - "AV1", {{.active = true, .scalability_mode = ScalabilityMode::kL1T1}}, + env(), "AV1", + {{.active = true, .scalability_mode = ScalabilityMode::kL1T1}}, // Expectations: {{.pixels = 1280 * 720, .eq_bitrate = {DataRate::KilobitsPerSec(32), @@ -521,7 +572,7 @@ TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, } TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, - LimitsAppliedForAv1Simulcast) { + LimitsAppliedForAv1SingleSpatialLayer) { webrtc::test::ScopedFieldTrials field_trials( "WebRTC-GetEncoderInfoOverride/" "frame_size_pixels:230400|921600," @@ -530,7 +581,7 @@ TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, "max_bitrate_bps:400000|1200000/"); InitEncodeTest test( - "AV1", + env(), "AV1", {{.active = true, .scalability_mode = ScalabilityMode::kL1T1}, {.active = false}}, // Expectations: @@ -540,6 +591,28 @@ TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, RunBaseTest(&test); } +TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, + EncodingMinMaxBitrateAppliedForAv1SingleSpatialLayer) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-GetEncoderInfoOverride/" + "frame_size_pixels:921600," + "min_start_bitrate_bps:0," + "min_bitrate_bps:32000," + "max_bitrate_bps:99000/"); + + InitEncodeTest test(env(), "AV1", + {{.active = true, + .bitrate = {DataRate::KilobitsPerSec(28), + DataRate::KilobitsPerSec(100)}, + .scalability_mode = ScalabilityMode::kL1T1}, + {.active = false}}, + // Expectations: + {{.pixels = 1280 * 720, + .eq_bitrate = {DataRate::KilobitsPerSec(28), + DataRate::KilobitsPerSec(100)}}}); + RunBaseTest(&test); +} + TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, LimitsNotAppliedForAv1MultipleSpatialLayers) { webrtc::test::ScopedFieldTrials field_trials( @@ -550,7 +623,7 @@ TEST_F(ResolutionBitrateLimitsWithScalabilityModeTest, "max_bitrate_bps:900000|1333000/"); InitEncodeTest test( - "AV1", + env(), "AV1", {{.active = true, .scalability_mode = ScalabilityMode::kL2T1}, {.active = false}}, // Expectations: @@ -571,7 +644,8 @@ TEST_P(ResolutionBitrateLimitsTest, LimitsNotAppliedSimulcast) { "min_bitrate_bps:31000|32000," "max_bitrate_bps:2222000|3333000/"); - InitEncodeTest test(payload_name_, {{.active = true}, {.active = true}}, + InitEncodeTest test(env(), payload_name_, + {{.active = true}, {.active = true}}, // Expectations: {{.pixels = 640 * 360, .ne_bitrate = {DataRate::KilobitsPerSec(31), diff --git a/video/end_to_end_tests/retransmission_tests.cc b/video/end_to_end_tests/retransmission_tests.cc index 10828fa005..7c50a7c034 100644 --- a/video/end_to_end_tests/retransmission_tests.cc +++ b/video/end_to_end_tests/retransmission_tests.cc @@ -16,7 +16,6 @@ #include "api/test/video/function_video_encoder_factory.h" #include "api/units/time_delta.h" #include "call/fake_network_pipe.h" -#include "call/simulated_network.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "rtc_base/event.h" @@ -25,6 +24,7 @@ #include "test/call_test.h" #include "test/field_trial.h" #include "test/gtest.h" +#include "test/network/simulated_network.h" #include "test/rtcp_packet_parser.h" #include "test/video_test_constants.h" @@ -60,7 +60,7 @@ TEST_F(RetransmissionEndToEndTest, ReceivesAndRetransmitsNack) { nacks_left_(kNumberOfNacksToObserve) {} private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -97,7 +97,7 @@ TEST_F(RetransmissionEndToEndTest, ReceivesAndRetransmitsNack) { return SEND_PACKET; } - Action OnReceiveRtcp(rtc::ArrayView packet) override { + Action OnReceiveRtcp(ArrayView packet) override { MutexLock lock(&mutex_); test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet)); @@ -145,7 +145,7 @@ TEST_F(RetransmissionEndToEndTest, ReceivesNackAndRetransmitsAudio) { size_t GetNumVideoStreams() const override { return 0; } size_t GetNumAudioStreams() const override { return 1; } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -164,7 +164,7 @@ TEST_F(RetransmissionEndToEndTest, ReceivesNackAndRetransmitsAudio) { nack.SetMediaSsrc(remote_ssrc_); uint16_t nack_list[] = {*sequence_number_to_retransmit_}; nack.SetPacketIds(nack_list, 1); - rtc::Buffer buffer = nack.Build(); + Buffer buffer = nack.Build(); EXPECT_TRUE(receive_transport_->SendRtcp(buffer)); } @@ -191,7 +191,7 @@ TEST_F(RetransmissionEndToEndTest, ReceivesNackAndRetransmitsAudio) { uint32_t local_ssrc_; uint32_t remote_ssrc_; Transport* receive_transport_; - absl::optional sequence_number_to_retransmit_; + std::optional sequence_number_to_retransmit_; } test; RunBaseTest(&test); @@ -212,7 +212,7 @@ TEST_F(RetransmissionEndToEndTest, receive_stream_ = receive_streams[0]; } - Action OnReceiveRtcp(rtc::ArrayView packet) override { + Action OnReceiveRtcp(ArrayView packet) override { test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet)); if (parser.pli()->num_packets() > 0) @@ -233,7 +233,7 @@ TEST_F(RetransmissionEndToEndTest, void PerformTest() override { start_time_ = clock_->TimeInMilliseconds(); task_queue_->PostTask([this] { Run(); }); - test_done_.Wait(rtc::Event::kForever); + test_done_.Wait(Event::kForever); } void Run() { @@ -261,7 +261,7 @@ TEST_F(RetransmissionEndToEndTest, VideoSendStream* send_stream_; VideoReceiveStreamInterface* receive_stream_; TaskQueueBase* const task_queue_; - rtc::Event test_done_; + Event test_done_; bool frame_decoded_ = false; int64_t start_time_ = 0; } test(task_queue()); @@ -273,7 +273,7 @@ void RetransmissionEndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) { static const int kPacketsToDrop = 1; class PliObserver : public test::EndToEndTest, - public rtc::VideoSinkInterface { + public VideoSinkInterface { public: explicit PliObserver(int rtp_history_ms) : EndToEndTest(test::VideoTestConstants::kLongTimeout), @@ -284,7 +284,7 @@ void RetransmissionEndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) { received_pli_(false) {} private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -302,7 +302,7 @@ void RetransmissionEndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) { return SEND_PACKET; } - Action OnReceiveRtcp(rtc::ArrayView packet) override { + Action OnReceiveRtcp(ArrayView packet) override { MutexLock lock(&mutex_); test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet)); @@ -316,7 +316,7 @@ void RetransmissionEndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) { void OnFrame(const VideoFrame& video_frame) override { MutexLock lock(&mutex_); if (received_pli_ && - video_frame.timestamp() > highest_dropped_timestamp_) { + video_frame.rtp_timestamp() > highest_dropped_timestamp_) { observation_complete_.Set(); } if (!received_pli_) @@ -363,7 +363,7 @@ void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx, bool enable_red) { static const int kDroppedFrameNumber = 10; class RetransmissionObserver : public test::EndToEndTest, - public rtc::VideoSinkInterface { + public VideoSinkInterface { public: RetransmissionObserver(bool enable_rtx, bool enable_red) : EndToEndTest(test::VideoTestConstants::kDefaultTimeout), @@ -372,12 +372,15 @@ void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx, enable_rtx ? test::VideoTestConstants::kSendRtxSsrcs[0] : test::VideoTestConstants::kVideoSendSsrcs[0]), retransmission_payload_type_(GetPayloadType(enable_rtx, enable_red)), - encoder_factory_([]() { return VP8Encoder::Create(); }), + encoder_factory_( + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }), marker_bits_observed_(0), retransmitted_timestamp_(0) {} private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -412,7 +415,7 @@ void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx, EXPECT_EQ(kVideoRotation_90, frame.rotation()); { MutexLock lock(&mutex_); - if (frame.timestamp() == retransmitted_timestamp_) + if (frame.rtp_timestamp() == retransmitted_timestamp_) observation_complete_.Set(); } orig_renderer_->OnFrame(frame); @@ -469,7 +472,7 @@ void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx, send_config->encoder_settings.encoder_factory = &encoder_factory_; send_config->rtp.payload_name = "VP8"; encoder_config->codec_type = kVideoCodecVP8; - (*receive_configs)[0].decoders[0].video_format = SdpVideoFormat("VP8"); + (*receive_configs)[0].decoders[0].video_format = SdpVideoFormat::VP8(); } void OnFrameGeneratorCapturerCreated( @@ -494,7 +497,7 @@ void RetransmissionEndToEndTest::DecodesRetransmittedFrame(bool enable_rtx, } Mutex mutex_; - rtc::VideoSinkInterface* orig_renderer_ = nullptr; + VideoSinkInterface* orig_renderer_ = nullptr; const int payload_type_; const uint32_t retransmission_ssrc_; const int retransmission_payload_type_; diff --git a/video/end_to_end_tests/rtp_rtcp_tests.cc b/video/end_to_end_tests/rtp_rtcp_tests.cc index 0864715c11..e372a9408c 100644 --- a/video/end_to_end_tests/rtp_rtcp_tests.cc +++ b/video/end_to_end_tests/rtp_rtcp_tests.cc @@ -12,7 +12,6 @@ #include "api/test/simulated_network.h" #include "call/fake_network_pipe.h" -#include "call/simulated_network.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" @@ -21,6 +20,7 @@ #include "rtc_base/task_queue_for_test.h" #include "test/call_test.h" #include "test/gtest.h" +#include "test/network/simulated_network.h" #include "test/rtcp_packet_parser.h" #include "test/video_test_constants.h" @@ -48,7 +48,7 @@ void RtpRtcpEndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) { sent_rtcp_(0) {} private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock(&mutex_); if (++sent_rtp_ % 3 == 0) return DROP_PACKET; @@ -56,7 +56,7 @@ void RtpRtcpEndToEndTest::RespectsRtcpMode(RtcpMode rtcp_mode) { return SEND_PACKET; } - Action OnReceiveRtcp(rtc::ArrayView packet) override { + Action OnReceiveRtcp(ArrayView packet) override { MutexLock lock(&mutex_); ++sent_rtcp_; test::RtcpPacketParser parser; @@ -142,6 +142,7 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation( private: std::vector CreateEncoderStreams( + const FieldTrialsView& /*field_trials*/, int frame_width, int frame_height, const VideoEncoderConfig& encoder_config) override { @@ -211,7 +212,7 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation( } } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); const uint32_t ssrc = rtp_packet.Ssrc(); @@ -261,7 +262,7 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation( return SEND_PACKET; } - Action OnSendRtcp(rtc::ArrayView packet) override { + Action OnSendRtcp(ArrayView packet) override { test::RtcpPacketParser rtcp_parser; rtcp_parser.Parse(packet); if (rtcp_parser.sender_report()->num_packets() > 0) { @@ -303,7 +304,7 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation( } GetVideoEncoderConfig()->video_stream_factory = - rtc::make_ref_counted(); + make_ref_counted(); // Use the same total bitrates when sending a single stream to avoid // lowering the bitrate estimate and requiring a subsequent rampup. one_stream = GetVideoEncoderConfig()->Copy(); @@ -334,7 +335,7 @@ void RtpRtcpEndToEndTest::TestRtpStatePreservation( // Using this request speeds up this test because then there is no need // to wait for a second for periodic Sender Report. rtcp::RapidResyncRequest force_send_sr_back_request; - rtc::Buffer packet = force_send_sr_back_request.Build(); + Buffer packet = force_send_sr_back_request.Build(); static_cast(receive_transport_.get()) ->SendRtcp(packet); } @@ -405,7 +406,7 @@ TEST_F(RtpRtcpEndToEndTest, DISABLED_TestFlexfecRtpStatePreservation) { } private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock(&mutex_); RtpPacket rtp_packet; @@ -455,9 +456,9 @@ TEST_F(RtpRtcpEndToEndTest, DISABLED_TestFlexfecRtpStatePreservation) { return SEND_PACKET; } - absl::optional last_observed_sequence_number_ + std::optional last_observed_sequence_number_ RTC_GUARDED_BY(mutex_); - absl::optional last_observed_timestamp_ RTC_GUARDED_BY(mutex_); + std::optional last_observed_timestamp_ RTC_GUARDED_BY(mutex_); size_t num_flexfec_packets_sent_ RTC_GUARDED_BY(mutex_); Mutex mutex_; } observer; @@ -467,7 +468,9 @@ TEST_F(RtpRtcpEndToEndTest, DISABLED_TestFlexfecRtpStatePreservation) { static constexpr int kFrameRate = 15; test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); SendTask(task_queue(), [&]() { CreateCalls(); diff --git a/video/end_to_end_tests/ssrc_tests.cc b/video/end_to_end_tests/ssrc_tests.cc index b6ee7d2dbd..f7542ef002 100644 --- a/video/end_to_end_tests/ssrc_tests.cc +++ b/video/end_to_end_tests/ssrc_tests.cc @@ -13,13 +13,13 @@ #include "api/test/simulated_network.h" #include "call/fake_network_pipe.h" #include "call/packet_receiver.h" -#include "call/simulated_network.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_util.h" #include "rtc_base/task_queue_for_test.h" #include "test/call_test.h" #include "test/gtest.h" +#include "test/network/simulated_network.h" #include "test/rtcp_packet_parser.h" #include "test/video_test_constants.h" @@ -41,7 +41,7 @@ TEST_F(SsrcEndToEndTest, ReceiverUsesLocalSsrc) { SyncRtcpObserver() : EndToEndTest(test::VideoTestConstants::kDefaultTimeout) {} - Action OnReceiveRtcp(rtc::ArrayView packet) override { + Action OnReceiveRtcp(ArrayView packet) override { test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet)); EXPECT_EQ(test::VideoTestConstants::kReceiverLocalVideoSsrc, @@ -85,10 +85,10 @@ TEST_F(SsrcEndToEndTest, UnknownRtpPacketTriggersUndemuxablePacketHandler) { receiver_->DeliverRtpPacket(media_type, std::move(packet), std::move(handler)); } - void DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) override {} + void DeliverRtcpPacket(CopyOnWriteBuffer packet) override {} PacketReceiver* receiver_; - rtc::Event undemuxable_packet_handler_triggered_; + Event undemuxable_packet_handler_triggered_; }; std::unique_ptr send_transport; @@ -163,7 +163,7 @@ void SsrcEndToEndTest::TestSendsSetSsrcs(size_t num_ssrcs, } private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -271,7 +271,7 @@ TEST_F(SsrcEndToEndTest, DISABLED_RedundantPayloadsTransmittedOnAllSsrcs) { } private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); diff --git a/video/end_to_end_tests/stats_tests.cc b/video/end_to_end_tests/stats_tests.cc index 475a6cd2da..d06a98be71 100644 --- a/video/end_to_end_tests/stats_tests.cc +++ b/video/end_to_end_tests/stats_tests.cc @@ -9,14 +9,13 @@ */ #include +#include #include "absl/algorithm/container.h" -#include "absl/types/optional.h" #include "api/task_queue/task_queue_base.h" #include "api/test/simulated_network.h" #include "api/test/video/function_video_encoder_factory.h" #include "call/fake_network_pipe.h" -#include "call/simulated_network.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/video_coding/include/video_coding_defines.h" #include "rtc_base/strings/string_builder.h" @@ -27,6 +26,7 @@ #include "test/call_test.h" #include "test/fake_encoder.h" #include "test/gtest.h" +#include "test/network/simulated_network.h" #include "test/rtcp_packet_parser.h" #include "test/video_test_constants.h" @@ -53,13 +53,13 @@ TEST_F(StatsEndToEndTest, GetStats) { public: StatsObserver() : EndToEndTest(test::VideoTestConstants::kLongTimeout), - encoder_factory_([]() { - return std::make_unique( - Clock::GetRealTimeClock(), 10); - }) {} + encoder_factory_( + [](const Environment& env, const SdpVideoFormat& format) { + return std::make_unique(env, 10); + }) {} private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { // Drop every 25th packet => 4% loss. static const int kPacketLossFrac = 25; RtpPacket header; @@ -73,17 +73,17 @@ TEST_F(StatsEndToEndTest, GetStats) { return SEND_PACKET; } - Action OnSendRtcp(rtc::ArrayView packet) override { + Action OnSendRtcp(ArrayView packet) override { check_stats_event_.Set(); return SEND_PACKET; } - Action OnReceiveRtp(rtc::ArrayView packet) override { + Action OnReceiveRtp(ArrayView packet) override { check_stats_event_.Set(); return SEND_PACKET; } - Action OnReceiveRtcp(rtc::ArrayView packet) override { + Action OnReceiveRtcp(ArrayView packet) override { check_stats_event_.Set(); return SEND_PACKET; } @@ -229,7 +229,7 @@ TEST_F(StatsEndToEndTest, GetStats) { } std::string CompoundKey(const char* name, uint32_t ssrc) { - rtc::StringBuilder oss; + StringBuilder oss; oss << name << "_" << ssrc; return oss.Release(); } @@ -356,7 +356,7 @@ TEST_F(StatsEndToEndTest, GetStats) { std::vector expected_receive_ssrcs_; std::set expected_send_ssrcs_; - rtc::Event check_stats_event_; + Event check_stats_event_; TaskQueueBase* task_queue_ = nullptr; } test; @@ -430,7 +430,7 @@ TEST_F(StatsEndToEndTest, TestReceivedRtpPacketStats) { void OnStreamsStopped() override { task_safety_flag_->SetNotAlive(); } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { if (sent_rtp_ >= kNumRtpPacketsToSend) { // Need to check the stats on the correct thread. task_queue_->PostTask(SafeTask(task_safety_flag_, [this]() { @@ -454,7 +454,7 @@ TEST_F(StatsEndToEndTest, TestReceivedRtpPacketStats) { VideoReceiveStreamInterface* receive_stream_ = nullptr; uint32_t sent_rtp_ = 0; TaskQueueBase* const task_queue_; - rtc::scoped_refptr task_safety_flag_ = + scoped_refptr task_safety_flag_ = PendingTaskSafetyFlag::CreateDetached(); } test(task_queue()); @@ -469,7 +469,7 @@ TEST_F(StatsEndToEndTest, TestReceivedRtpPacketStats) { #endif TEST_F(StatsEndToEndTest, MAYBE_ContentTypeSwitches) { class StatsObserver : public test::BaseTest, - public rtc::VideoSinkInterface { + public VideoSinkInterface { public: StatsObserver() : BaseTest(test::VideoTestConstants::kLongTimeout), @@ -489,7 +489,7 @@ TEST_F(StatsEndToEndTest, MAYBE_ContentTypeSwitches) { } } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { if (MinNumberOfFramesReceived()) observation_complete_.Set(); return SEND_PACKET; @@ -518,17 +518,17 @@ TEST_F(StatsEndToEndTest, MAYBE_ContentTypeSwitches) { metrics::Reset(); - Call::Config send_config(send_event_log_.get()); + CallConfig send_config = SendCallConfig(); test.ModifySenderBitrateConfig(&send_config.bitrate_config); - Call::Config recv_config(recv_event_log_.get()); + CallConfig recv_config = RecvCallConfig(); test.ModifyReceiverBitrateConfig(&recv_config.bitrate_config); VideoEncoderConfig encoder_config_with_screenshare; SendTask(task_queue(), [this, &test, &send_config, &recv_config, &encoder_config_with_screenshare]() { - CreateSenderCall(send_config); - CreateReceiverCall(recv_config); + CreateSenderCall(std::move(send_config)); + CreateReceiverCall(std::move(recv_config)); CreateReceiveTransport(test.GetReceiveTransportConfig(), &test); CreateSendTransport(test.GetReceiveTransportConfig(), &test); @@ -603,7 +603,7 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) { task_queue_(task_queue) {} private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { { MutexLock lock(&mutex_); if (++sent_rtp_packets_ == kPacketNumberToDrop) { @@ -618,7 +618,7 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) { return SEND_PACKET; } - Action OnReceiveRtcp(rtc::ArrayView packet) override { + Action OnReceiveRtcp(ArrayView packet) override { MutexLock lock(&mutex_); test::RtcpPacketParser rtcp_parser; rtcp_parser.Parse(packet); @@ -642,9 +642,10 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) { stream_stats.rtcp_packet_type_counts.nack_packets; } for (const auto& receive_stream : receive_streams_) { - VideoReceiveStreamInterface::Stats stats = receive_stream->GetStats(); + VideoReceiveStreamInterface::Stats receive_stats = + receive_stream->GetStats(); receive_stream_nack_packets += - stats.rtcp_packet_type_counts.nack_packets; + receive_stats.rtcp_packet_type_counts.nack_packets; } if (send_stream_nack_packets >= 1 && receive_stream_nack_packets >= 1) { // NACK packet sent on receive stream and received on sent stream. @@ -691,9 +692,9 @@ TEST_F(StatsEndToEndTest, VerifyNackStats) { bool dropped_rtp_packet_requested_ RTC_GUARDED_BY(&mutex_) = false; std::vector receive_streams_; VideoSendStream* send_stream_ = nullptr; - absl::optional start_runtime_ms_; + std::optional start_runtime_ms_; TaskQueueBase* const task_queue_; - rtc::scoped_refptr task_safety_flag_ = + scoped_refptr task_safety_flag_ = PendingTaskSafetyFlag::CreateDetached(); } test(task_queue()); @@ -732,13 +733,13 @@ TEST_F(StatsEndToEndTest, CallReportsRttForSender) { Start(); }); - int64_t start_time_ms = clock_->TimeInMilliseconds(); + int64_t start_time_ms = env().clock().TimeInMilliseconds(); while (true) { Call::Stats stats; SendTask(task_queue(), [this, &stats]() { stats = sender_call_->GetStats(); }); ASSERT_GE(start_time_ms + test::VideoTestConstants::kDefaultTimeout.ms(), - clock_->TimeInMilliseconds()) + env().clock().TimeInMilliseconds()) << "No RTT stats before timeout!"; if (stats.rtt_ms != -1) { // To avoid failures caused by rounding or minor ntp clock adjustments, diff --git a/video/end_to_end_tests/transport_feedback_tests.cc b/video/end_to_end_tests/transport_feedback_tests.cc index 36be6d9015..4f285f2b29 100644 --- a/video/end_to_end_tests/transport_feedback_tests.cc +++ b/video/end_to_end_tests/transport_feedback_tests.cc @@ -16,7 +16,6 @@ #include "api/units/time_delta.h" #include "call/call.h" #include "call/fake_network_pipe.h" -#include "call/simulated_network.h" #include "modules/rtp_rtcp/source/byte_io.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet.h" @@ -25,6 +24,7 @@ #include "test/call_test.h" #include "test/field_trial.h" #include "test/gtest.h" +#include "test/network/simulated_network.h" #include "test/rtcp_packet_parser.h" #include "test/video_test_constants.h" #include "video/end_to_end_tests/multi_stream_tester.h" @@ -50,8 +50,8 @@ TEST(TransportFeedbackMultiStreamTest, AssignsTransportSequenceNumbers) { Call* sender_call, const std::map& ssrc_map, const std::map& payload_type_map, - rtc::ArrayView audio_extensions, - rtc::ArrayView video_extensions) + ArrayView audio_extensions, + ArrayView video_extensions) : DirectTransport(task_queue, std::make_unique( Clock::GetRealTimeClock(), @@ -70,7 +70,7 @@ TEST(TransportFeedbackMultiStreamTest, AssignsTransportSequenceNumbers) { } virtual ~RtpExtensionHeaderObserver() {} - bool SendRtp(rtc::ArrayView data, + bool SendRtp(ArrayView data, const PacketOptions& options) override { { MutexLock lock(&lock_); @@ -157,7 +157,7 @@ TEST(TransportFeedbackMultiStreamTest, AssignsTransportSequenceNumbers) { private: Mutex lock_; - rtc::Event done_; + Event done_; RtpHeaderExtensionMap extensions_; RtpSequenceNumberUnwrapper unwrapper_; std::set received_packed_ids_; @@ -261,18 +261,18 @@ class TransportFeedbackTester : public test::EndToEndTest { } protected: - Action OnSendRtcp(rtc::ArrayView data) override { + Action OnSendRtcp(ArrayView data) override { EXPECT_FALSE(HasTransportFeedback(data)); return SEND_PACKET; } - Action OnReceiveRtcp(rtc::ArrayView data) override { + Action OnReceiveRtcp(ArrayView data) override { if (HasTransportFeedback(data)) observation_complete_.Set(); return SEND_PACKET; } - bool HasTransportFeedback(rtc::ArrayView data) const { + bool HasTransportFeedback(ArrayView data) const { test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(data)); return parser.transport_feedback()->num_packets() > 0; @@ -339,7 +339,7 @@ TEST_F(TransportFeedbackEndToEndTest, } protected: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); const bool only_padding = rtp_packet.payload_size() == 0; @@ -362,7 +362,7 @@ TEST_F(TransportFeedbackEndToEndTest, return SEND_PACKET; } - Action OnReceiveRtcp(rtc::ArrayView data) override { + Action OnReceiveRtcp(ArrayView data) override { MutexLock lock(&mutex_); // To fill up the congestion window we drop feedback on packets after 20 // packets have been sent. This means that any packets that has not yet @@ -379,7 +379,7 @@ TEST_F(TransportFeedbackEndToEndTest, return SEND_PACKET; } - bool HasTransportFeedback(rtc::ArrayView data) const { + bool HasTransportFeedback(ArrayView data) const { test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(data)); return parser.transport_feedback()->num_packets() > 0; @@ -433,7 +433,7 @@ TEST_F(TransportFeedbackEndToEndTest, TransportSeqNumOnAudioAndVideo) { kTransportSequenceNumberExtensionId)); } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet(&extensions_); EXPECT_TRUE(rtp_packet.Parse(packet)); uint16_t transport_sequence_number = 0; diff --git a/video/frame_cadence_adapter.cc b/video/frame_cadence_adapter.cc index ef76038ef0..2ee785fd53 100644 --- a/video/frame_cadence_adapter.cc +++ b/video/frame_cadence_adapter.cc @@ -12,6 +12,7 @@ #include #include +#include #include #include #include @@ -19,6 +20,7 @@ #include "absl/algorithm/container.h" #include "absl/base/attributes.h" +#include "absl/cleanup/cleanup.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" @@ -31,6 +33,7 @@ #include "rtc_base/rate_statistics.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" +#include "rtc_base/system/unused.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" @@ -49,45 +52,47 @@ class AdapterMode { // Called on the worker thread for every frame that enters. virtual void OnFrame(Timestamp post_time, - int frames_scheduled_for_processing, + bool queue_overload, const VideoFrame& frame) = 0; // Returns the currently estimated input framerate. - virtual absl::optional GetInputFrameRateFps() = 0; + virtual std::optional GetInputFrameRateFps() = 0; // Updates the frame rate. - virtual void UpdateFrameRate() = 0; + virtual void UpdateFrameRate(Timestamp frame_timestamp) = 0; }; // Implements a pass-through adapter. Single-threaded. class PassthroughAdapterMode : public AdapterMode { public: - PassthroughAdapterMode(Clock* clock, - FrameCadenceAdapterInterface::Callback* callback) - : clock_(clock), callback_(callback) { + explicit PassthroughAdapterMode( + FrameCadenceAdapterInterface::Callback* callback) + : callback_(callback) { sequence_checker_.Detach(); } // Adapter overrides. void OnFrame(Timestamp post_time, - int frames_scheduled_for_processing, + bool queue_overload, const VideoFrame& frame) override { RTC_DCHECK_RUN_ON(&sequence_checker_); - callback_->OnFrame(post_time, frames_scheduled_for_processing, frame); + callback_->OnFrame(post_time, queue_overload, frame); } - absl::optional GetInputFrameRateFps() override { + std::optional GetInputFrameRateFps() override { RTC_DCHECK_RUN_ON(&sequence_checker_); - return input_framerate_.Rate(clock_->TimeInMilliseconds()); + return last_frame_rate_; } - void UpdateFrameRate() override { + void UpdateFrameRate(Timestamp frame_timestamp) override { RTC_DCHECK_RUN_ON(&sequence_checker_); - input_framerate_.Update(1, clock_->TimeInMilliseconds()); + // RateStatistics will calculate a too high rate immediately after Update. + last_frame_rate_ = input_framerate_.Rate(frame_timestamp.ms()); + input_framerate_.Update(1, frame_timestamp.ms()); } private: - Clock* const clock_; + std::optional last_frame_rate_; FrameCadenceAdapterInterface::Callback* const callback_; RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; // Input frame rate statistics for use when not in zero-hertz mode. @@ -101,7 +106,9 @@ class ZeroHertzAdapterMode : public AdapterMode { ZeroHertzAdapterMode(TaskQueueBase* queue, Clock* clock, FrameCadenceAdapterInterface::Callback* callback, - double max_fps); + double max_fps, + std::atomic& frames_scheduled_for_processing, + bool zero_hertz_queue_overload); ~ZeroHertzAdapterMode() { refresh_frame_requester_.Stop(); } // Reconfigures according to parameters. @@ -118,10 +125,10 @@ class ZeroHertzAdapterMode : public AdapterMode { // Adapter overrides. void OnFrame(Timestamp post_time, - int frames_scheduled_for_processing, + bool queue_overload, const VideoFrame& frame) override; - absl::optional GetInputFrameRateFps() override; - void UpdateFrameRate() override {} + std::optional GetInputFrameRateFps() override; + void UpdateFrameRate(Timestamp frame_timestamp) override {} // Notified on dropped frames. void OnDiscardedFrame(); @@ -130,13 +137,17 @@ class ZeroHertzAdapterMode : public AdapterMode { // Callback::RequestRefreshFrame. void ProcessKeyFrameRequest(); + // Updates the restrictions of max frame rate for the video source. + // Always called during construction using latest `restricted_frame_delay_`. + void UpdateVideoSourceRestrictions(std::optional max_frame_rate); + private: // The tracking state of each spatial layer. Used for determining when to // stop repeating frames. struct SpatialLayerTracker { // If unset, the layer is disabled. Otherwise carries the quality // convergence status of the layer. - absl::optional quality_converged; + std::optional quality_converged; }; // The state of a scheduled repeat. struct ScheduledRepeat { @@ -170,24 +181,34 @@ class ZeroHertzAdapterMode : public AdapterMode { // after this call. void ResetQualityConvergenceInfo() RTC_RUN_ON(sequence_checker_); // Processes incoming frames on a delayed cadence. - void ProcessOnDelayedCadence() RTC_RUN_ON(sequence_checker_); - // Schedules a later repeat with delay depending on state of layer trackers. + void ProcessOnDelayedCadence(Timestamp post_time) + RTC_RUN_ON(sequence_checker_); + // Schedules a later repeat with delay depending on state of layer trackers + // and if UpdateVideoSourceRestrictions has been called or not. // If true is passed in `idle_repeat`, the repeat is going to be - // kZeroHertzIdleRepeatRatePeriod. Otherwise it'll be the value of - // `frame_delay`. + // kZeroHertzIdleRepeatRatePeriod. Otherwise it'll be the maximum value of + // `frame_delay` or `restricted_frame_delay_` if it has been set. void ScheduleRepeat(int frame_id, bool idle_repeat) RTC_RUN_ON(sequence_checker_); - // Repeats a frame in the abscence of incoming frames. Slows down when quality + // Repeats a frame in the absence of incoming frames. Slows down when quality // convergence is attained, and stops the cadence terminally when new frames // have arrived. void ProcessRepeatedFrameOnDelayedCadence(int frame_id) RTC_RUN_ON(sequence_checker_); - // Sends a frame, updating the timestamp to the current time. - void SendFrameNow(const VideoFrame& frame) const + // Sends a frame, updating the timestamp to the current time. Also updates + // `queue_overload_count_` based on the time it takes to encode a frame and + // the amount of received frames while encoding. The `queue_overload` + // parameter in the OnFrame callback will be true while + // `queue_overload_count_` is larger than zero to allow the client to drop + // frames and thereby mitigate delay buildups. + // Repeated frames are sent with `post_time` set to std::nullopt. + void SendFrameNow(std::optional post_time, const VideoFrame& frame) RTC_RUN_ON(sequence_checker_); // Returns the repeat duration depending on if it's an idle repeat or not. TimeDelta RepeatDuration(bool idle_repeat) const RTC_RUN_ON(sequence_checker_); + // Returns the frame duration taking potential restrictions into account. + TimeDelta FrameDuration() const RTC_RUN_ON(sequence_checker_); // Unless timer already running, starts repeatedly requesting refresh frames // after a grace_period. If a frame appears before the grace_period has // passed, the request is cancelled. @@ -200,6 +221,14 @@ class ZeroHertzAdapterMode : public AdapterMode { // The configured max_fps. // TODO(crbug.com/1255737): support max_fps updates. const double max_fps_; + + // Number of frames that are currently scheduled for processing on the + // `queue_`. + const std::atomic& frames_scheduled_for_processing_; + + // Can be used as kill-switch for the queue overload mechanism. + const bool zero_hertz_queue_overload_enabled_; + // How much the incoming frame sequence is delayed by. const TimeDelta frame_delay_ = TimeDelta::Seconds(1) / max_fps_; @@ -210,7 +239,7 @@ class ZeroHertzAdapterMode : public AdapterMode { // for cancelling deferred repeated frame processing happening. int current_frame_id_ RTC_GUARDED_BY(sequence_checker_) = 0; // Has content when we are repeating frames. - absl::optional scheduled_repeat_ + std::optional scheduled_repeat_ RTC_GUARDED_BY(sequence_checker_); // Convergent state of each of the configured simulcast layers. std::vector layer_trackers_ @@ -219,26 +248,118 @@ class ZeroHertzAdapterMode : public AdapterMode { // they can be dropped in various places in the capture pipeline. RepeatingTaskHandle refresh_frame_requester_ RTC_GUARDED_BY(sequence_checker_); + // Can be set by UpdateVideoSourceRestrictions when the video source restricts + // the max frame rate. + std::optional restricted_frame_delay_ + RTC_GUARDED_BY(sequence_checker_); + // Set in OnSendFrame to reflect how many future frames will be forwarded with + // the `queue_overload` flag set to true. + int queue_overload_count_ RTC_GUARDED_BY(sequence_checker_) = 0; ScopedTaskSafety safety_; }; +// Implements a frame cadence adapter supporting VSync aligned encoding. +class VSyncEncodeAdapterMode : public AdapterMode { + public: + VSyncEncodeAdapterMode(Clock* clock, + TaskQueueBase* queue, + scoped_refptr queue_safety_flag, + Metronome* metronome, + TaskQueueBase* worker_queue, + FrameCadenceAdapterInterface::Callback* callback) + : clock_(clock), + queue_(queue), + queue_safety_flag_(queue_safety_flag), + callback_(callback), + metronome_(metronome), + worker_queue_(worker_queue) { + queue_sequence_checker_.Detach(); + worker_sequence_checker_.Detach(); + } + + void PrepareShutdown() { + MutexLock lock(&queue_lock_); + queue_ = nullptr; + } + + // Adapter overrides. + void OnFrame(Timestamp post_time, + bool queue_overload, + const VideoFrame& frame) override; + + std::optional GetInputFrameRateFps() override { + RTC_DCHECK_RUN_ON(&queue_sequence_checker_); + return last_frame_rate_; + } + + void UpdateFrameRate(Timestamp frame_timestamp) override { + RTC_DCHECK_RUN_ON(&queue_sequence_checker_); + // RateStatistics will calculate a too high rate immediately after Update. + last_frame_rate_ = input_framerate_.Rate(frame_timestamp.ms()); + input_framerate_.Update(1, frame_timestamp.ms()); + } + + void EncodeAllEnqueuedFrames(); + + private: + // Holds input frames coming from the client ready to be encoded. + struct InputFrameRef { + InputFrameRef(const VideoFrame& video_frame, Timestamp time_when_posted_us) + : time_when_posted_us(time_when_posted_us), + video_frame(std::move(video_frame)) {} + Timestamp time_when_posted_us; + const VideoFrame video_frame; + }; + + Clock* const clock_; + // Protects `queue_`. + // TODO: crbug.com/358040973 - We should eventually figure out a way to avoid + // lock protection. + Mutex queue_lock_; + TaskQueueBase* queue_ RTC_GUARDED_BY(queue_lock_) + RTC_PT_GUARDED_BY(queue_lock_); + RTC_NO_UNIQUE_ADDRESS SequenceChecker queue_sequence_checker_; + scoped_refptr queue_safety_flag_; + // Input frame rate statistics for use when not in zero-hertz mode. + std::optional last_frame_rate_ + RTC_GUARDED_BY(queue_sequence_checker_); + RateStatistics input_framerate_ RTC_GUARDED_BY(queue_sequence_checker_){ + FrameCadenceAdapterInterface::kFrameRateAveragingWindowSizeMs, 1000}; + FrameCadenceAdapterInterface::Callback* const callback_; + + Metronome* metronome_; + TaskQueueBase* const worker_queue_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_; + // `worker_safety_` protects tasks on the worker queue related to + // `metronome_` since metronome usage must happen on worker thread. + ScopedTaskSafetyDetached worker_safety_; + Timestamp expected_next_tick_ RTC_GUARDED_BY(worker_sequence_checker_) = + Timestamp::PlusInfinity(); + // Vector of input frames to be encoded. + std::vector input_queue_ + RTC_GUARDED_BY(worker_sequence_checker_); +}; + class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface { public: FrameCadenceAdapterImpl(Clock* clock, TaskQueueBase* queue, + Metronome* metronome, + TaskQueueBase* worker_queue, const FieldTrialsView& field_trials); ~FrameCadenceAdapterImpl(); // FrameCadenceAdapterInterface overrides. void Initialize(Callback* callback) override; void SetZeroHertzModeEnabled( - absl::optional params) override; - absl::optional GetInputFrameRateFps() override; - void UpdateFrameRate() override; + std::optional params) override; + std::optional GetInputFrameRateFps() override; void UpdateLayerQualityConvergence(size_t spatial_index, bool quality_converged) override; void UpdateLayerStatus(size_t spatial_index, bool enabled) override; + void UpdateVideoSourceRestrictions( + std::optional max_frame_rate) override; void ProcessKeyFrameRequest() override; // VideoFrameSink overrides. @@ -248,9 +369,10 @@ class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface { const VideoTrackSourceConstraints& constraints) override; private: - // Called from OnFrame in zero-hertz mode. + void UpdateFrameRate(Timestamp frame_timestamp); + // Called from OnFrame in both pass-through and zero-hertz mode. void OnFrameOnMainQueue(Timestamp post_time, - int frames_scheduled_for_processing, + bool queue_overload, const VideoFrame& frame) RTC_RUN_ON(queue_); // Returns true under all of the following conditions: @@ -260,39 +382,60 @@ class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface { // - zero-hertz mode enabled bool IsZeroHertzScreenshareEnabled() const RTC_RUN_ON(queue_); + // Configures current adapter on non-ZeroHertz mode, called when Initialize or + // MaybeReconfigureAdapters. + void ConfigureCurrentAdapterWithoutZeroHertz(); + // Handles adapter creation on configuration changes. void MaybeReconfigureAdapters(bool was_zero_hertz_enabled) RTC_RUN_ON(queue_); Clock* const clock_; TaskQueueBase* const queue_; - // True if we support frame entry for screenshare with a minimum frequency of - // 0 Hz. - const bool zero_hertz_screenshare_enabled_; - - // The two possible modes we're under. - absl::optional passthrough_adapter_; - absl::optional zero_hertz_adapter_; + // Kill-switch for the queue overload mechanism in zero-hertz mode. + const bool frame_cadence_adapter_zero_hertz_queue_overload_enabled_; + + // Field trial for using timestamp from video frames, rather than clock when + // calculating input frame rate. + const bool use_video_frame_timestamp_; + // Used for verifying that timestamps are monotonically increasing. + std::optional last_incoming_frame_timestamp_; + bool incoming_frame_timestamp_monotonically_increasing_ = true; + + // The three possible modes we're under. + std::optional passthrough_adapter_; + std::optional zero_hertz_adapter_; + // The `vsync_encode_adapter_` must be destroyed on the worker queue since + // VSync metronome needs to happen on worker thread. + std::unique_ptr vsync_encode_adapter_; // If set, zero-hertz mode has been enabled. - absl::optional zero_hertz_params_; + std::optional zero_hertz_params_; // Cache for the current adapter mode. AdapterMode* current_adapter_mode_ = nullptr; + // VSync encoding is used when this valid. + Metronome* const metronome_; + TaskQueueBase* const worker_queue_; + // Timestamp for statistics reporting. - absl::optional zero_hertz_adapter_created_timestamp_ + std::optional zero_hertz_adapter_created_timestamp_ RTC_GUARDED_BY(queue_); // Set up during Initialize. Callback* callback_ = nullptr; // The source's constraints. - absl::optional source_constraints_ + std::optional source_constraints_ RTC_GUARDED_BY(queue_); + // Stores the latest restriction in max frame rate set by + // UpdateVideoSourceRestrictions. Ensures that a previously set restriction + // can be maintained during reconstructions of the adapter. + std::optional restricted_max_frame_rate_ RTC_GUARDED_BY(queue_); + // Race checker for incoming frames. This is the network thread in chromium, // but may vary from test contexts. - rtc::RaceChecker incoming_frame_race_checker_; - bool has_reported_screenshare_frame_rate_umas_ RTC_GUARDED_BY(queue_) = false; + RaceChecker incoming_frame_race_checker_; // Number of frames that are currently scheduled for processing on the // `queue_`. @@ -305,8 +448,15 @@ ZeroHertzAdapterMode::ZeroHertzAdapterMode( TaskQueueBase* queue, Clock* clock, FrameCadenceAdapterInterface::Callback* callback, - double max_fps) - : queue_(queue), clock_(clock), callback_(callback), max_fps_(max_fps) { + double max_fps, + std::atomic& frames_scheduled_for_processing, + bool zero_hertz_queue_overload_enabled) + : queue_(queue), + clock_(clock), + callback_(callback), + max_fps_(max_fps), + frames_scheduled_for_processing_(frames_scheduled_for_processing), + zero_hertz_queue_overload_enabled_(zero_hertz_queue_overload_enabled) { sequence_checker_.Detach(); MaybeStartRefreshFrameRequester(); } @@ -328,8 +478,8 @@ void ZeroHertzAdapterMode::UpdateLayerQualityConvergence( bool quality_converged) { RTC_DCHECK_RUN_ON(&sequence_checker_); TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc"), __func__, - "spatial_index", spatial_index, "converged", - quality_converged); + TRACE_EVENT_SCOPE_GLOBAL, "spatial_index", spatial_index, + "converged", quality_converged); if (spatial_index >= layer_trackers_.size()) return; if (layer_trackers_[spatial_index].quality_converged.has_value()) @@ -340,7 +490,8 @@ void ZeroHertzAdapterMode::UpdateLayerStatus(size_t spatial_index, bool enabled) { RTC_DCHECK_RUN_ON(&sequence_checker_); TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc"), __func__, - "spatial_index", spatial_index, "enabled", enabled); + TRACE_EVENT_SCOPE_GLOBAL, "spatial_index", spatial_index, + "enabled", enabled); if (spatial_index >= layer_trackers_.size()) return; if (enabled) { @@ -349,12 +500,12 @@ void ZeroHertzAdapterMode::UpdateLayerStatus(size_t spatial_index, layer_trackers_[spatial_index].quality_converged = false; } } else { - layer_trackers_[spatial_index].quality_converged = absl::nullopt; + layer_trackers_[spatial_index].quality_converged = std::nullopt; } } void ZeroHertzAdapterMode::OnFrame(Timestamp post_time, - int frames_scheduled_for_processing, + bool queue_overload, const VideoFrame& frame) { RTC_DCHECK_RUN_ON(&sequence_checker_); TRACE_EVENT0("webrtc", "ZeroHertzAdapterMode::OnFrame"); @@ -374,13 +525,13 @@ void ZeroHertzAdapterMode::OnFrame(Timestamp post_time, // Store the frame in the queue and schedule deferred processing. queued_frames_.push_back(frame); current_frame_id_++; - scheduled_repeat_ = absl::nullopt; + scheduled_repeat_ = std::nullopt; TimeDelta time_spent_since_post = clock_->CurrentTime() - post_time; queue_->PostDelayedHighPrecisionTask( SafeTask(safety_.flag(), - [this] { + [this, post_time] { RTC_DCHECK_RUN_ON(&sequence_checker_); - ProcessOnDelayedCadence(); + ProcessOnDelayedCadence(post_time); }), std::max(frame_delay_ - time_spent_since_post, TimeDelta::Zero())); } @@ -396,14 +547,29 @@ void ZeroHertzAdapterMode::OnDiscardedFrame() { MaybeStartRefreshFrameRequester(); } -absl::optional ZeroHertzAdapterMode::GetInputFrameRateFps() { +std::optional ZeroHertzAdapterMode::GetInputFrameRateFps() { RTC_DCHECK_RUN_ON(&sequence_checker_); return max_fps_; } +void ZeroHertzAdapterMode::UpdateVideoSourceRestrictions( + std::optional max_frame_rate) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("webrtc"), __func__, + TRACE_EVENT_SCOPE_GLOBAL, "max_frame_rate", + max_frame_rate.value_or(-1)); + if (max_frame_rate.value_or(0) > 0) { + // Set new, validated (> 0) and restricted frame rate. + restricted_frame_delay_ = TimeDelta::Seconds(1) / *max_frame_rate; + } else { + // Source reports that the frame rate is now unrestricted. + restricted_frame_delay_ = std::nullopt; + } +} + void ZeroHertzAdapterMode::ProcessKeyFrameRequest() { RTC_DCHECK_RUN_ON(&sequence_checker_); - TRACE_EVENT_INSTANT0("webrtc", __func__); + TRACE_EVENT_INSTANT0("webrtc", __func__, TRACE_EVENT_SCOPE_GLOBAL); // If we're new and don't have a frame, there's no need to request refresh // frames as this was being triggered for us when zero-hz mode was set up. // @@ -465,13 +631,13 @@ void ZeroHertzAdapterMode::ResetQualityConvergenceInfo() { } } -void ZeroHertzAdapterMode::ProcessOnDelayedCadence() { +void ZeroHertzAdapterMode::ProcessOnDelayedCadence(Timestamp post_time) { RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK(!queued_frames_.empty()); TRACE_EVENT0("webrtc", __func__); // Avoid sending the front frame for encoding (which could take a long time) - // until we schedule a repeate. + // until we schedule a repeat. VideoFrame front_frame = queued_frames_.front(); // If there were two or more frames stored, we do not have to schedule repeats @@ -484,7 +650,7 @@ void ZeroHertzAdapterMode::ProcessOnDelayedCadence() { // arrive. ScheduleRepeat(current_frame_id_, HasQualityConverged()); } - SendFrameNow(front_frame); + SendFrameNow(post_time, front_frame); } void ZeroHertzAdapterMode::ScheduleRepeat(int frame_id, bool idle_repeat) { @@ -541,23 +707,70 @@ void ZeroHertzAdapterMode::ProcessRepeatedFrameOnDelayedCadence(int frame_id) { // Schedule another repeat before sending the frame off which could take time. ScheduleRepeat(frame_id, HasQualityConverged()); - SendFrameNow(frame); + SendFrameNow(std::nullopt, frame); } -void ZeroHertzAdapterMode::SendFrameNow(const VideoFrame& frame) const { +void ZeroHertzAdapterMode::SendFrameNow(std::optional post_time, + const VideoFrame& frame) { RTC_DCHECK_RUN_ON(&sequence_checker_); TRACE_EVENT0("webrtc", __func__); - // TODO(crbug.com/1255737): figure out if frames_scheduled_for_processing - // makes sense to compute in this implementation. - callback_->OnFrame(/*post_time=*/clock_->CurrentTime(), - /*frames_scheduled_for_processing=*/1, frame); + + Timestamp encode_start_time = clock_->CurrentTime(); + if (post_time.has_value()) { + TimeDelta delay = (encode_start_time - *post_time); + RTC_HISTOGRAM_COUNTS_10000("WebRTC.Screenshare.ZeroHz.DelayMs", delay.ms()); + } + + // Forward the frame and set `queue_overload` if is has been detected that it + // is not possible to deliver frames at the expected rate due to slow + // encoding. + callback_->OnFrame(/*post_time=*/encode_start_time, queue_overload_count_ > 0, + frame); + + // WebRTC-ZeroHertzQueueOverload kill-switch. + if (!zero_hertz_queue_overload_enabled_) + return; + + // `queue_overload_count_` determines for how many future frames the + // `queue_overload` flag will be set and it is only increased if: + // o We are not already in an overload state. + // o New frames have been scheduled for processing on the queue while encoding + // took place in OnFrame. + // o The duration of OnFrame is longer than the current frame duration. + // If all these conditions are fulfilled, `queue_overload_count_` is set to + // `frames_scheduled_for_processing_` and any pending repeat is canceled since + // new frames are available and the repeat is not needed. + // If the adapter is already in an overload state, simply decrease + // `queue_overload_count_` by one. + if (queue_overload_count_ == 0) { + const int frames_scheduled_for_processing = + frames_scheduled_for_processing_.load(std::memory_order_relaxed); + if (frames_scheduled_for_processing > 0) { + TimeDelta encode_time = clock_->CurrentTime() - encode_start_time; + if (encode_time > FrameDuration()) { + queue_overload_count_ = frames_scheduled_for_processing; + // Invalidates any outstanding repeat to avoid sending pending repeat + // directly after too long encode. + current_frame_id_++; + } + } + } else { + queue_overload_count_--; + } + RTC_HISTOGRAM_BOOLEAN("WebRTC.Screenshare.ZeroHz.QueueOverload", + queue_overload_count_ > 0); +} + +TimeDelta ZeroHertzAdapterMode::FrameDuration() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return std::max(frame_delay_, restricted_frame_delay_.value_or(frame_delay_)); } TimeDelta ZeroHertzAdapterMode::RepeatDuration(bool idle_repeat) const { RTC_DCHECK_RUN_ON(&sequence_checker_); return idle_repeat ? FrameCadenceAdapterInterface::kZeroHertzIdleRepeatRatePeriod - : frame_delay_; + : FrameDuration(); } void ZeroHertzAdapterMode::MaybeStartRefreshFrameRequester() { @@ -576,46 +789,144 @@ void ZeroHertzAdapterMode::MaybeStartRefreshFrameRequester() { } } +void VSyncEncodeAdapterMode::OnFrame(Timestamp post_time, + bool queue_overload, + const VideoFrame& frame) { + // We expect `metronome_` and `EncodeAllEnqueuedFrames()` runs on + // `worker_queue_`. + if (!worker_queue_->IsCurrent()) { + worker_queue_->PostTask(SafeTask( + worker_safety_.flag(), [this, post_time, queue_overload, frame] { + OnFrame(post_time, queue_overload, frame); + })); + return; + } + + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + TRACE_EVENT0("webrtc", "VSyncEncodeAdapterMode::OnFrame"); + + input_queue_.emplace_back(std::move(frame), post_time); + + // The `metronome_` tick period maybe throttled in some case, so here we only + // align encode task to VSync event when `metronome_` tick period is less + // than 34ms (30Hz). + static constexpr TimeDelta kMaxAllowedDelay = TimeDelta::Millis(34); + if (metronome_->TickPeriod() <= kMaxAllowedDelay) { + // The metronome is ticking frequently enough that it is worth the extra + // delay. + metronome_->RequestCallOnNextTick( + SafeTask(worker_safety_.flag(), [this] { EncodeAllEnqueuedFrames(); })); + } else { + // The metronome is ticking too infrequently, encode immediately. + EncodeAllEnqueuedFrames(); + } +} + +void VSyncEncodeAdapterMode::EncodeAllEnqueuedFrames() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + TRACE_EVENT0("webrtc", "VSyncEncodeAdapterMode::EncodeAllEnqueuedFrames"); + + // Local time in webrtc time base. + Timestamp post_time = clock_->CurrentTime(); + + for (auto& input : input_queue_) { + TRACE_EVENT1("webrtc", "FrameCadenceAdapterImpl::EncodeAllEnqueuedFrames", + "VSyncEncodeDelay", + (post_time - input.time_when_posted_us).ms()); + + const VideoFrame frame = std::move(input.video_frame); + MutexLock lock(&queue_lock_); + if (queue_) { + queue_->PostTask(SafeTask(queue_safety_flag_, [this, post_time, frame] { + { + MutexLock lock(&queue_lock_); + if (!queue_) { + return; + } + RTC_DCHECK_RUN_ON(queue_); + } + + // TODO(b/304158952): Support more refined queue overload control. + // Not running under mutex is safe since `callback_` existence is + // guaranteed to exist as long as running encode queue tasks exist. + callback_->OnFrame(post_time, /*queue_overload=*/false, frame); + })); + } + } + + input_queue_.clear(); +} + FrameCadenceAdapterImpl::FrameCadenceAdapterImpl( Clock* clock, TaskQueueBase* queue, + Metronome* metronome, + TaskQueueBase* worker_queue, const FieldTrialsView& field_trials) : clock_(clock), queue_(queue), - zero_hertz_screenshare_enabled_( - !field_trials.IsDisabled("WebRTC-ZeroHertzScreenshare")) {} + frame_cadence_adapter_zero_hertz_queue_overload_enabled_( + !field_trials.IsDisabled("WebRTC-ZeroHertzQueueOverload")), + use_video_frame_timestamp_(field_trials.IsEnabled( + "WebRTC-FrameCadenceAdapter-UseVideoFrameTimestamp")), + metronome_(metronome), + worker_queue_(worker_queue) {} FrameCadenceAdapterImpl::~FrameCadenceAdapterImpl() { RTC_DLOG(LS_VERBOSE) << __func__ << " this " << this; + + // VSync adapter needs to be destroyed on worker queue when metronome is + // valid. + if (metronome_) { + vsync_encode_adapter_->PrepareShutdown(); + absl::Cleanup cleanup = [adapter = std::move(vsync_encode_adapter_)] {}; + worker_queue_->PostTask([cleanup = std::move(cleanup)] {}); + } + + RTC_HISTOGRAM_BOOLEAN( + "WebRTC.Video.InputFrameTimestampMonotonicallyIncreasing", + incoming_frame_timestamp_monotonically_increasing_); } void FrameCadenceAdapterImpl::Initialize(Callback* callback) { callback_ = callback; - passthrough_adapter_.emplace(clock_, callback); - current_adapter_mode_ = &passthrough_adapter_.value(); + // Use VSync encode mode if metronome is valid, otherwise passthrough mode + // would be used. + if (metronome_) { + vsync_encode_adapter_ = std::make_unique( + clock_, queue_, safety_.flag(), metronome_, worker_queue_, callback_); + } else { + passthrough_adapter_.emplace(callback); + } + ConfigureCurrentAdapterWithoutZeroHertz(); } void FrameCadenceAdapterImpl::SetZeroHertzModeEnabled( - absl::optional params) { + std::optional params) { RTC_DCHECK_RUN_ON(queue_); bool was_zero_hertz_enabled = zero_hertz_params_.has_value(); - if (params.has_value() && !was_zero_hertz_enabled) - has_reported_screenshare_frame_rate_umas_ = false; zero_hertz_params_ = params; MaybeReconfigureAdapters(was_zero_hertz_enabled); } -absl::optional FrameCadenceAdapterImpl::GetInputFrameRateFps() { +std::optional FrameCadenceAdapterImpl::GetInputFrameRateFps() { RTC_DCHECK_RUN_ON(queue_); return current_adapter_mode_->GetInputFrameRateFps(); } -void FrameCadenceAdapterImpl::UpdateFrameRate() { +void FrameCadenceAdapterImpl::UpdateFrameRate(Timestamp frame_timestamp) { RTC_DCHECK_RUN_ON(queue_); // The frame rate need not be updated for the zero-hertz adapter. The - // passthrough adapter however uses it. Always pass frames into the - // passthrough to keep the estimation alive should there be an adapter switch. - passthrough_adapter_->UpdateFrameRate(); + // vsync encode and passthrough adapter however uses it. Always pass frames + // into the vsync encode or passthrough to keep the estimation alive should + // there be an adapter switch. + if (metronome_) { + RTC_CHECK(vsync_encode_adapter_); + vsync_encode_adapter_->UpdateFrameRate(frame_timestamp); + } else { + RTC_CHECK(passthrough_adapter_); + passthrough_adapter_->UpdateFrameRate(frame_timestamp); + } } void FrameCadenceAdapterImpl::UpdateLayerQualityConvergence( @@ -632,6 +943,17 @@ void FrameCadenceAdapterImpl::UpdateLayerStatus(size_t spatial_index, zero_hertz_adapter_->UpdateLayerStatus(spatial_index, enabled); } +void FrameCadenceAdapterImpl::UpdateVideoSourceRestrictions( + std::optional max_frame_rate) { + RTC_DCHECK_RUN_ON(queue_); + // Store the restriction to ensure that it can be reapplied in possible + // future adapter creations on configuration changes. + restricted_max_frame_rate_ = max_frame_rate; + if (zero_hertz_adapter_) { + zero_hertz_adapter_->UpdateVideoSourceRestrictions(max_frame_rate); + } +} + void FrameCadenceAdapterImpl::ProcessKeyFrameRequest() { RTC_DCHECK_RUN_ON(queue_); if (zero_hertz_adapter_) @@ -652,7 +974,7 @@ void FrameCadenceAdapterImpl::OnFrame(const VideoFrame& frame) { if (zero_hertz_adapter_created_timestamp_.has_value()) { TimeDelta time_until_first_frame = clock_->CurrentTime() - *zero_hertz_adapter_created_timestamp_; - zero_hertz_adapter_created_timestamp_ = absl::nullopt; + zero_hertz_adapter_created_timestamp_ = std::nullopt; RTC_HISTOGRAM_COUNTS_10000( "WebRTC.Screenshare.ZeroHz.TimeUntilFirstFrameMs", time_until_first_frame.ms()); @@ -661,7 +983,7 @@ void FrameCadenceAdapterImpl::OnFrame(const VideoFrame& frame) { const int frames_scheduled_for_processing = frames_scheduled_for_processing_.fetch_sub(1, std::memory_order_relaxed); - OnFrameOnMainQueue(post_time, frames_scheduled_for_processing, + OnFrameOnMainQueue(post_time, frames_scheduled_for_processing > 1, std::move(frame)); })); } @@ -689,40 +1011,71 @@ void FrameCadenceAdapterImpl::OnConstraintsChanged( })); } -void FrameCadenceAdapterImpl::OnFrameOnMainQueue( - Timestamp post_time, - int frames_scheduled_for_processing, - const VideoFrame& frame) { +void FrameCadenceAdapterImpl::OnFrameOnMainQueue(Timestamp post_time, + bool queue_overload, + const VideoFrame& frame) { RTC_DCHECK_RUN_ON(queue_); - current_adapter_mode_->OnFrame(post_time, frames_scheduled_for_processing, - frame); + current_adapter_mode_->OnFrame(post_time, queue_overload, frame); + if (last_incoming_frame_timestamp_ && + last_incoming_frame_timestamp_ >= + Timestamp::Micros(frame.timestamp_us())) { + RTC_LOG(LS_ERROR) + << "Incoming frame timestamp is not monotonically increasing" + << " current: " << frame.timestamp_us() + << " last: " << last_incoming_frame_timestamp_.value().us(); + incoming_frame_timestamp_monotonically_increasing_ = false; + } + last_incoming_frame_timestamp_ = Timestamp::Micros(frame.timestamp_us()); + Timestamp update_frame_rate_timestamp = + use_video_frame_timestamp_ ? *last_incoming_frame_timestamp_ : post_time; + UpdateFrameRate(update_frame_rate_timestamp); } bool FrameCadenceAdapterImpl::IsZeroHertzScreenshareEnabled() const { RTC_DCHECK_RUN_ON(queue_); - return zero_hertz_screenshare_enabled_ && source_constraints_.has_value() && + return source_constraints_.has_value() && source_constraints_->max_fps.value_or(-1) > 0 && source_constraints_->min_fps.value_or(-1) == 0 && zero_hertz_params_.has_value(); } +void FrameCadenceAdapterImpl::ConfigureCurrentAdapterWithoutZeroHertz() { + // Enable VSyncEncodeAdapterMode if metronome is valid. + if (metronome_) { + RTC_CHECK(vsync_encode_adapter_); + current_adapter_mode_ = vsync_encode_adapter_.get(); + } else { + RTC_CHECK(passthrough_adapter_); + current_adapter_mode_ = &passthrough_adapter_.value(); + } +} + void FrameCadenceAdapterImpl::MaybeReconfigureAdapters( bool was_zero_hertz_enabled) { RTC_DCHECK_RUN_ON(queue_); bool is_zero_hertz_enabled = IsZeroHertzScreenshareEnabled(); if (is_zero_hertz_enabled) { - if (!was_zero_hertz_enabled) { - zero_hertz_adapter_.emplace(queue_, clock_, callback_, - source_constraints_->max_fps.value()); - RTC_LOG(LS_INFO) << "Zero hertz mode activated."; + bool max_fps_has_changed = GetInputFrameRateFps().value_or(-1) != + source_constraints_->max_fps.value_or(-1); + if (!was_zero_hertz_enabled || max_fps_has_changed) { + RTC_LOG(LS_INFO) << "Zero hertz mode enabled (max_fps=" + << source_constraints_->max_fps.value() << ")"; + zero_hertz_adapter_.emplace( + queue_, clock_, callback_, source_constraints_->max_fps.value(), + frames_scheduled_for_processing_, + frame_cadence_adapter_zero_hertz_queue_overload_enabled_); + zero_hertz_adapter_->UpdateVideoSourceRestrictions( + restricted_max_frame_rate_); zero_hertz_adapter_created_timestamp_ = clock_->CurrentTime(); } zero_hertz_adapter_->ReconfigureParameters(zero_hertz_params_.value()); current_adapter_mode_ = &zero_hertz_adapter_.value(); } else { - if (was_zero_hertz_enabled) - zero_hertz_adapter_ = absl::nullopt; - current_adapter_mode_ = &passthrough_adapter_.value(); + if (was_zero_hertz_enabled) { + zero_hertz_adapter_ = std::nullopt; + RTC_LOG(LS_INFO) << "Zero hertz mode disabled."; + } + ConfigureCurrentAdapterWithoutZeroHertz(); } } @@ -731,8 +1084,11 @@ void FrameCadenceAdapterImpl::MaybeReconfigureAdapters( std::unique_ptr FrameCadenceAdapterInterface::Create(Clock* clock, TaskQueueBase* queue, + Metronome* metronome, + TaskQueueBase* worker_queue, const FieldTrialsView& field_trials) { - return std::make_unique(clock, queue, field_trials); + return std::make_unique(clock, queue, metronome, + worker_queue, field_trials); } } // namespace webrtc diff --git a/video/frame_cadence_adapter.h b/video/frame_cadence_adapter.h index d0eab7e770..2f97b5b49f 100644 --- a/video/frame_cadence_adapter.h +++ b/video/frame_cadence_adapter.h @@ -15,6 +15,7 @@ #include "absl/base/attributes.h" #include "api/field_trials_view.h" +#include "api/metronome/metronome.h" #include "api/task_queue/task_queue_base.h" #include "api/units/time_delta.h" #include "api/video/video_frame.h" @@ -29,8 +30,7 @@ namespace webrtc { // With the exception of the constructor and the methods overridden in // VideoSinkInterface, the rest of the interface to this class (including dtor) // needs to happen on the queue passed in Create. -class FrameCadenceAdapterInterface - : public rtc::VideoSinkInterface { +class FrameCadenceAdapterInterface : public VideoSinkInterface { public: // Averaging window spanning 90 frames at default 30fps, matching old media // optimization module defaults. @@ -60,14 +60,11 @@ class FrameCadenceAdapterInterface // The |post_time| parameter indicates the current time sampled when // FrameCadenceAdapterInterface::OnFrame was called. // - // |frames_scheduled_for_processing| indicates how many frames that have - // been scheduled for processing. During sequential conditions where - // FrameCadenceAdapterInterface::OnFrame is invoked and subsequently ending - // up in this callback, this value will read 1. Otherwise if the - // |queue| gets stalled for some reason, the value will increase - // beyond 1. + // |queue_overload| is true if the frame cadence adapter notices it's + // not able to deliver the incoming |frame| to the |queue| in the expected + // time. virtual void OnFrame(Timestamp post_time, - int frames_scheduled_for_processing, + bool queue_overload, const VideoFrame& frame) = 0; // Called when the source has discarded a frame. @@ -84,6 +81,8 @@ class FrameCadenceAdapterInterface static std::unique_ptr Create( Clock* clock, TaskQueueBase* queue, + Metronome* metronome, + TaskQueueBase* worker_queue, const FieldTrialsView& field_trials); // Call before using the rest of the API. @@ -93,15 +92,11 @@ class FrameCadenceAdapterInterface // zero-hertz operation. If absl:::nullopt is passed, the cadence adapter will // switch to passthrough mode. virtual void SetZeroHertzModeEnabled( - absl::optional params) = 0; + std::optional params) = 0; // Returns the input framerate. This is measured by RateStatistics when // zero-hertz mode is off, and returns the max framerate in zero-hertz mode. - virtual absl::optional GetInputFrameRateFps() = 0; - - // Updates frame rate. This is done unconditionally irrespective of adapter - // mode. - virtual void UpdateFrameRate() = 0; + virtual std::optional GetInputFrameRateFps() = 0; // Updates quality convergence status for an enabled spatial layer. // Convergence means QP has dropped to a low-enough level to warrant ceasing @@ -112,6 +107,12 @@ class FrameCadenceAdapterInterface // Updates spatial layer enabled status. virtual void UpdateLayerStatus(size_t spatial_index, bool enabled) = 0; + // Updates the restrictions of max frame rate for the video source. + // The new `max_frame_rate` will only affect the cadence of Callback::OnFrame + // for non-idle (non converged) repeated frames. + virtual void UpdateVideoSourceRestrictions( + std::optional max_frame_rate) = 0; + // Conditionally requests a refresh frame via // Callback::RequestRefreshFrame. virtual void ProcessKeyFrameRequest() = 0; diff --git a/video/frame_cadence_adapter_unittest.cc b/video/frame_cadence_adapter_unittest.cc index 052b0a6c61..fb1eefeb37 100644 --- a/video/frame_cadence_adapter_unittest.cc +++ b/video/frame_cadence_adapter_unittest.cc @@ -10,10 +10,13 @@ #include "video/frame_cadence_adapter.h" +#include +#include #include #include #include "absl/functional/any_invocable.h" +#include "api/metronome/test/fake_metronome.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" @@ -24,6 +27,7 @@ #include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/rate_statistics.h" +#include "rtc_base/task_queue_for_test.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/metrics.h" #include "system_wrappers/include/ntp_time.h" @@ -38,16 +42,18 @@ namespace { using ::testing::_; using ::testing::ElementsAre; +using ::testing::InSequence; using ::testing::Invoke; using ::testing::InvokeWithoutArgs; using ::testing::Mock; +using ::testing::NiceMock; using ::testing::Pair; using ::testing::Values; VideoFrame CreateFrame() { return VideoFrame::Builder() .set_video_frame_buffer( - rtc::make_ref_counted(/*width=*/16, /*height=*/16)) + make_ref_counted(/*width=*/16, /*height=*/16)) .build(); } @@ -55,7 +61,7 @@ VideoFrame CreateFrameWithTimestamps( GlobalSimulatedTimeController* time_controller) { return VideoFrame::Builder() .set_video_frame_buffer( - rtc::make_ref_counted(/*width=*/16, /*height=*/16)) + make_ref_counted(/*width=*/16, /*height=*/16)) .set_ntp_time_ms(time_controller->GetClock()->CurrentNtpInMilliseconds()) .set_timestamp_us(time_controller->GetClock()->CurrentTime().us()) .build(); @@ -64,64 +70,31 @@ VideoFrame CreateFrameWithTimestamps( std::unique_ptr CreateAdapter( const FieldTrialsView& field_trials, Clock* clock) { - return FrameCadenceAdapterInterface::Create(clock, TaskQueueBase::Current(), - field_trials); + return FrameCadenceAdapterInterface::Create( + clock, TaskQueueBase::Current(), /*metronome=*/nullptr, + /*worker_queue=*/nullptr, field_trials); } class MockCallback : public FrameCadenceAdapterInterface::Callback { public: - MOCK_METHOD(void, OnFrame, (Timestamp, int, const VideoFrame&), (override)); + MOCK_METHOD(void, OnFrame, (Timestamp, bool, const VideoFrame&), (override)); MOCK_METHOD(void, OnDiscardedFrame, (), (override)); MOCK_METHOD(void, RequestRefreshFrame, (), (override)); }; -class ZeroHertzFieldTrialDisabler : public test::ScopedKeyValueConfig { - public: - ZeroHertzFieldTrialDisabler() - : test::ScopedKeyValueConfig("WebRTC-ZeroHertzScreenshare/Disabled/") {} -}; - -class ZeroHertzFieldTrialEnabler : public test::ScopedKeyValueConfig { - public: - ZeroHertzFieldTrialEnabler() - : test::ScopedKeyValueConfig("WebRTC-ZeroHertzScreenshare/Enabled/") {} -}; - -TEST(FrameCadenceAdapterTest, - ForwardsFramesOnConstructionAndUnderDisabledFieldTrial) { - GlobalSimulatedTimeController time_controller(Timestamp::Millis(1)); - ZeroHertzFieldTrialDisabler disabled_field_trials; - test::ScopedKeyValueConfig no_field_trials; - for (int i = 0; i != 2; i++) { - MockCallback callback; - auto adapter = - CreateAdapter(i == 0 ? disabled_field_trials : no_field_trials, - time_controller.GetClock()); - adapter->Initialize(&callback); - VideoFrame frame = CreateFrame(); - EXPECT_CALL(callback, OnFrame).Times(1); - adapter->OnFrame(frame); - time_controller.AdvanceTime(TimeDelta::Zero()); - Mock::VerifyAndClearExpectations(&callback); - EXPECT_CALL(callback, OnDiscardedFrame).Times(1); - adapter->OnDiscardedFrame(); - Mock::VerifyAndClearExpectations(&callback); - } -} - TEST(FrameCadenceAdapterTest, CountsOutstandingFramesToProcess) { test::ScopedKeyValueConfig no_field_trials; GlobalSimulatedTimeController time_controller(Timestamp::Millis(1)); MockCallback callback; auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); adapter->Initialize(&callback); - EXPECT_CALL(callback, OnFrame(_, 2, _)).Times(1); - EXPECT_CALL(callback, OnFrame(_, 1, _)).Times(1); + EXPECT_CALL(callback, OnFrame(_, true, _)).Times(1); + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(1); auto frame = CreateFrame(); adapter->OnFrame(frame); adapter->OnFrame(frame); time_controller.AdvanceTime(TimeDelta::Zero()); - EXPECT_CALL(callback, OnFrame(_, 1, _)).Times(1); + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(1); adapter->OnFrame(frame); time_controller.AdvanceTime(TimeDelta::Zero()); } @@ -130,7 +103,8 @@ TEST(FrameCadenceAdapterTest, FrameRateFollowsRateStatisticsByDefault) { test::ScopedKeyValueConfig no_field_trials; GlobalSimulatedTimeController time_controller(Timestamp::Zero()); auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); - adapter->Initialize(nullptr); + MockCallback callback; + adapter->Initialize(&callback); // Create an "oracle" rate statistics which should be followed on a sequence // of frames. @@ -139,57 +113,65 @@ TEST(FrameCadenceAdapterTest, FrameRateFollowsRateStatisticsByDefault) { for (int frame = 0; frame != 10; ++frame) { time_controller.AdvanceTime(TimeDelta::Millis(10)); + std::optional expected_fps = + rate.Rate(time_controller.GetClock()->TimeInMilliseconds()); rate.Update(1, time_controller.GetClock()->TimeInMilliseconds()); - adapter->UpdateFrameRate(); - EXPECT_EQ(rate.Rate(time_controller.GetClock()->TimeInMilliseconds()), - adapter->GetInputFrameRateFps()) + // FrameCadanceAdapter::OnFrame post the frame to another sequence. + adapter->OnFrame(CreateFrameWithTimestamps(&time_controller)); + time_controller.AdvanceTime(TimeDelta::Millis(0)); + EXPECT_EQ(expected_fps, adapter->GetInputFrameRateFps()) << " failed for frame " << frame; } } -TEST(FrameCadenceAdapterTest, - FrameRateFollowsRateStatisticsWhenFeatureDisabled) { - ZeroHertzFieldTrialDisabler feature_disabler; +TEST(FrameCadenceAdapterTest, FrameRateFollowsMaxFpsWhenZeroHertzActivated) { GlobalSimulatedTimeController time_controller(Timestamp::Zero()); - auto adapter = CreateAdapter(feature_disabler, time_controller.GetClock()); - adapter->Initialize(nullptr); - - // Create an "oracle" rate statistics which should be followed on a sequence - // of frames. - RateStatistics rate( - FrameCadenceAdapterInterface::kFrameRateAveragingWindowSizeMs, 1000); - + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); + MockCallback callback; + adapter->Initialize(&callback); + adapter->SetZeroHertzModeEnabled( + FrameCadenceAdapterInterface::ZeroHertzModeParams{}); + adapter->OnConstraintsChanged(VideoTrackSourceConstraints{0, 1}); for (int frame = 0; frame != 10; ++frame) { time_controller.AdvanceTime(TimeDelta::Millis(10)); - rate.Update(1, time_controller.GetClock()->TimeInMilliseconds()); - adapter->UpdateFrameRate(); - EXPECT_EQ(rate.Rate(time_controller.GetClock()->TimeInMilliseconds()), - adapter->GetInputFrameRateFps()) - << " failed for frame " << frame; + // FrameCadanceAdapter::OnFrame post the frame to another sequence. + adapter->OnFrame(CreateFrameWithTimestamps(&time_controller)); + time_controller.AdvanceTime(TimeDelta::Millis(0)); + EXPECT_EQ(adapter->GetInputFrameRateFps(), 1u); } } -TEST(FrameCadenceAdapterTest, FrameRateFollowsMaxFpsWhenZeroHertzActivated) { - ZeroHertzFieldTrialEnabler enabler; +TEST(FrameCadenceAdapterTest, ZeroHertzAdapterSupportsMaxFpsChange) { GlobalSimulatedTimeController time_controller(Timestamp::Zero()); - auto adapter = CreateAdapter(enabler, time_controller.GetClock()); - adapter->Initialize(nullptr); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); + MockCallback callback; + adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{}); adapter->OnConstraintsChanged(VideoTrackSourceConstraints{0, 1}); - for (int frame = 0; frame != 10; ++frame) { - time_controller.AdvanceTime(TimeDelta::Millis(10)); - adapter->UpdateFrameRate(); - EXPECT_EQ(adapter->GetInputFrameRateFps(), 1u); - } + time_controller.AdvanceTime(TimeDelta::Zero()); + EXPECT_EQ(adapter->GetInputFrameRateFps(), 1u); + adapter->OnFrame(CreateFrame()); + time_controller.AdvanceTime(TimeDelta::Seconds(1)); + adapter->OnConstraintsChanged(VideoTrackSourceConstraints{0, 2}); + time_controller.AdvanceTime(TimeDelta::Zero()); + EXPECT_EQ(adapter->GetInputFrameRateFps(), 2u); + adapter->OnFrame(CreateFrame()); + // Ensure that the max_fps has been changed from 1 to 2 fps even if it was + // changed while zero hertz was already active. + EXPECT_CALL(callback, OnFrame); + time_controller.AdvanceTime(TimeDelta::Millis(500)); } TEST(FrameCadenceAdapterTest, FrameRateFollowsRateStatisticsAfterZeroHertzDeactivated) { - ZeroHertzFieldTrialEnabler enabler; GlobalSimulatedTimeController time_controller(Timestamp::Zero()); - auto adapter = CreateAdapter(enabler, time_controller.GetClock()); - adapter->Initialize(nullptr); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); + MockCallback callback; + adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{}); adapter->OnConstraintsChanged(VideoTrackSourceConstraints{0, 1}); @@ -199,25 +181,26 @@ TEST(FrameCadenceAdapterTest, for (int frame = 0; frame != MAX; ++frame) { time_controller.AdvanceTime(TimeDelta::Millis(10)); rate.Update(1, time_controller.GetClock()->TimeInMilliseconds()); - adapter->UpdateFrameRate(); + adapter->OnFrame(CreateFrameWithTimestamps(&time_controller)); + time_controller.AdvanceTime(TimeDelta::Millis(0)); } // Turn off zero hertz on the next-last frame; after the last frame we // should see a value that tracks the rate oracle. - adapter->SetZeroHertzModeEnabled(absl::nullopt); + adapter->SetZeroHertzModeEnabled(std::nullopt); // Last frame. time_controller.AdvanceTime(TimeDelta::Millis(10)); - rate.Update(1, time_controller.GetClock()->TimeInMilliseconds()); - adapter->UpdateFrameRate(); + adapter->OnFrame(CreateFrameWithTimestamps(&time_controller)); + time_controller.AdvanceTime(TimeDelta::Millis(0)); EXPECT_EQ(rate.Rate(time_controller.GetClock()->TimeInMilliseconds()), adapter->GetInputFrameRateFps()); } TEST(FrameCadenceAdapterTest, ForwardsFramesDelayed) { - ZeroHertzFieldTrialEnabler enabler; MockCallback callback; GlobalSimulatedTimeController time_controller(Timestamp::Zero()); - auto adapter = CreateAdapter(enabler, time_controller.GetClock()); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{}); @@ -230,13 +213,13 @@ TEST(FrameCadenceAdapterTest, ForwardsFramesDelayed) { EXPECT_CALL(callback, OnFrame).Times(0); adapter->OnFrame(frame); EXPECT_CALL(callback, OnFrame) - .WillOnce(Invoke([&](Timestamp post_time, int, + .WillOnce(Invoke([&](Timestamp post_time, bool, const VideoFrame& frame) { EXPECT_EQ(post_time, time_controller.GetClock()->CurrentTime()); EXPECT_EQ(frame.timestamp_us(), - original_timestamp_us + index * rtc::kNumMicrosecsPerSec); - EXPECT_EQ(frame.ntp_time_ms(), original_ntp_time.ToMs() + - index * rtc::kNumMillisecsPerSec); + original_timestamp_us + index * kNumMicrosecsPerSec); + EXPECT_EQ(frame.ntp_time_ms(), + original_ntp_time.ToMs() + index * kNumMillisecsPerSec); })); time_controller.AdvanceTime(TimeDelta::Seconds(1)); frame = CreateFrameWithTimestamps(&time_controller); @@ -244,9 +227,9 @@ TEST(FrameCadenceAdapterTest, ForwardsFramesDelayed) { } TEST(FrameCadenceAdapterTest, DelayedProcessingUnderSlightContention) { - ZeroHertzFieldTrialEnabler enabler; GlobalSimulatedTimeController time_controller(Timestamp::Zero()); - auto adapter = CreateAdapter(enabler, time_controller.GetClock()); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); MockCallback callback; adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( @@ -266,9 +249,9 @@ TEST(FrameCadenceAdapterTest, DelayedProcessingUnderSlightContention) { } TEST(FrameCadenceAdapterTest, DelayedProcessingUnderHeavyContention) { - ZeroHertzFieldTrialEnabler enabler; GlobalSimulatedTimeController time_controller(Timestamp::Zero()); - auto adapter = CreateAdapter(enabler, time_controller.GetClock()); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); MockCallback callback; adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( @@ -285,6 +268,7 @@ TEST(FrameCadenceAdapterTest, DelayedProcessingUnderHeavyContention) { })); adapter->OnFrame(CreateFrame()); time_controller.SkipForwardBy(time_skipped); + time_controller.AdvanceTime(TimeDelta::Zero()); } TEST(FrameCadenceAdapterTest, RepeatsFramesDelayed) { @@ -293,10 +277,10 @@ TEST(FrameCadenceAdapterTest, RepeatsFramesDelayed) { // clock is initialized running from 0. For this reason we choose the // `time_controller` initialization constant to something arbitrary which is // not 0. - ZeroHertzFieldTrialEnabler enabler; MockCallback callback; GlobalSimulatedTimeController time_controller(Timestamp::Millis(47892223)); - auto adapter = CreateAdapter(enabler, time_controller.GetClock()); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{}); @@ -309,7 +293,7 @@ TEST(FrameCadenceAdapterTest, RepeatsFramesDelayed) { adapter->OnFrame(frame); EXPECT_CALL(callback, OnFrame) - .WillOnce(Invoke([&](Timestamp post_time, int, const VideoFrame& frame) { + .WillOnce(Invoke([&](Timestamp post_time, bool, const VideoFrame& frame) { EXPECT_EQ(post_time, time_controller.GetClock()->CurrentTime()); EXPECT_EQ(frame.timestamp_us(), original_timestamp_us); EXPECT_EQ(frame.ntp_time_ms(), original_ntp_time.ToMs()); @@ -318,23 +302,23 @@ TEST(FrameCadenceAdapterTest, RepeatsFramesDelayed) { Mock::VerifyAndClearExpectations(&callback); EXPECT_CALL(callback, OnFrame) - .WillOnce(Invoke([&](Timestamp post_time, int, const VideoFrame& frame) { + .WillOnce(Invoke([&](Timestamp post_time, bool, const VideoFrame& frame) { EXPECT_EQ(post_time, time_controller.GetClock()->CurrentTime()); EXPECT_EQ(frame.timestamp_us(), - original_timestamp_us + rtc::kNumMicrosecsPerSec); + original_timestamp_us + kNumMicrosecsPerSec); EXPECT_EQ(frame.ntp_time_ms(), - original_ntp_time.ToMs() + rtc::kNumMillisecsPerSec); + original_ntp_time.ToMs() + kNumMillisecsPerSec); })); time_controller.AdvanceTime(TimeDelta::Seconds(1)); Mock::VerifyAndClearExpectations(&callback); EXPECT_CALL(callback, OnFrame) - .WillOnce(Invoke([&](Timestamp post_time, int, const VideoFrame& frame) { + .WillOnce(Invoke([&](Timestamp post_time, bool, const VideoFrame& frame) { EXPECT_EQ(post_time, time_controller.GetClock()->CurrentTime()); EXPECT_EQ(frame.timestamp_us(), - original_timestamp_us + 2 * rtc::kNumMicrosecsPerSec); + original_timestamp_us + 2 * kNumMicrosecsPerSec); EXPECT_EQ(frame.ntp_time_ms(), - original_ntp_time.ToMs() + 2 * rtc::kNumMillisecsPerSec); + original_ntp_time.ToMs() + 2 * kNumMillisecsPerSec); })); time_controller.AdvanceTime(TimeDelta::Seconds(1)); } @@ -347,10 +331,10 @@ TEST(FrameCadenceAdapterTest, // it to zero, but select unset timestamps in the frames (via CreateFrame()) // and verify that the timestamp modifying logic doesn't depend on the current // time. - ZeroHertzFieldTrialEnabler enabler; MockCallback callback; GlobalSimulatedTimeController time_controller(Timestamp::Millis(4711)); - auto adapter = CreateAdapter(enabler, time_controller.GetClock()); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{}); @@ -359,7 +343,7 @@ TEST(FrameCadenceAdapterTest, // Send one frame, expect a repeat. adapter->OnFrame(CreateFrame()); EXPECT_CALL(callback, OnFrame) - .WillOnce(Invoke([&](Timestamp post_time, int, const VideoFrame& frame) { + .WillOnce(Invoke([&](Timestamp post_time, bool, const VideoFrame& frame) { EXPECT_EQ(post_time, time_controller.GetClock()->CurrentTime()); EXPECT_EQ(frame.timestamp_us(), 0); EXPECT_EQ(frame.ntp_time_ms(), 0); @@ -367,7 +351,7 @@ TEST(FrameCadenceAdapterTest, time_controller.AdvanceTime(TimeDelta::Seconds(1)); Mock::VerifyAndClearExpectations(&callback); EXPECT_CALL(callback, OnFrame) - .WillOnce(Invoke([&](Timestamp post_time, int, const VideoFrame& frame) { + .WillOnce(Invoke([&](Timestamp post_time, bool, const VideoFrame& frame) { EXPECT_EQ(post_time, time_controller.GetClock()->CurrentTime()); EXPECT_EQ(frame.timestamp_us(), 0); EXPECT_EQ(frame.ntp_time_ms(), 0); @@ -380,10 +364,10 @@ TEST(FrameCadenceAdapterTest, StopsRepeatingFramesDelayed) { // At 2s, the repeated initial frame appears. // At 2.5s, we schedule another new frame. // At 3.5s, we receive this frame. - ZeroHertzFieldTrialEnabler enabler; MockCallback callback; GlobalSimulatedTimeController time_controller(Timestamp::Zero()); - auto adapter = CreateAdapter(enabler, time_controller.GetClock()); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{}); @@ -399,19 +383,19 @@ TEST(FrameCadenceAdapterTest, StopsRepeatingFramesDelayed) { // Send the new frame at 2.5s, which should appear after 3.5s. adapter->OnFrame(CreateFrameWithTimestamps(&time_controller)); EXPECT_CALL(callback, OnFrame) - .WillOnce(Invoke([&](Timestamp, int, const VideoFrame& frame) { - EXPECT_EQ(frame.timestamp_us(), 5 * rtc::kNumMicrosecsPerSec / 2); + .WillOnce(Invoke([&](Timestamp, bool, const VideoFrame& frame) { + EXPECT_EQ(frame.timestamp_us(), 5 * kNumMicrosecsPerSec / 2); EXPECT_EQ(frame.ntp_time_ms(), - original_ntp_time.ToMs() + 5u * rtc::kNumMillisecsPerSec / 2); + original_ntp_time.ToMs() + 5u * kNumMillisecsPerSec / 2); })); time_controller.AdvanceTime(TimeDelta::Seconds(1)); } TEST(FrameCadenceAdapterTest, RequestsRefreshFrameOnKeyFrameRequestWhenNew) { - ZeroHertzFieldTrialEnabler enabler; MockCallback callback; GlobalSimulatedTimeController time_controller(Timestamp::Zero()); - auto adapter = CreateAdapter(enabler, time_controller.GetClock()); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{}); @@ -426,10 +410,10 @@ TEST(FrameCadenceAdapterTest, RequestsRefreshFrameOnKeyFrameRequestWhenNew) { } TEST(FrameCadenceAdapterTest, IgnoresKeyFrameRequestShortlyAfterFrame) { - ZeroHertzFieldTrialEnabler enabler; MockCallback callback; GlobalSimulatedTimeController time_controller(Timestamp::Zero()); - auto adapter = CreateAdapter(enabler, time_controller.GetClock()); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{}); @@ -441,10 +425,10 @@ TEST(FrameCadenceAdapterTest, IgnoresKeyFrameRequestShortlyAfterFrame) { } TEST(FrameCadenceAdapterTest, RequestsRefreshFramesUntilArrival) { - ZeroHertzFieldTrialEnabler enabler; MockCallback callback; GlobalSimulatedTimeController time_controller(Timestamp::Zero()); - auto adapter = CreateAdapter(enabler, time_controller.GetClock()); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{}); @@ -467,10 +451,10 @@ TEST(FrameCadenceAdapterTest, RequestsRefreshFramesUntilArrival) { } TEST(FrameCadenceAdapterTest, RequestsRefreshAfterFrameDrop) { - ZeroHertzFieldTrialEnabler enabler; MockCallback callback; GlobalSimulatedTimeController time_controller(Timestamp::Zero()); - auto adapter = CreateAdapter(enabler, time_controller.GetClock()); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{}); @@ -508,10 +492,10 @@ TEST(FrameCadenceAdapterTest, RequestsRefreshAfterFrameDrop) { } TEST(FrameCadenceAdapterTest, OmitsRefreshAfterFrameDropWithTimelyFrameEntry) { - ZeroHertzFieldTrialEnabler enabler; MockCallback callback; GlobalSimulatedTimeController time_controller(Timestamp::Zero()); - auto adapter = CreateAdapter(enabler, time_controller.GetClock()); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{}); @@ -547,10 +531,10 @@ TEST(FrameCadenceAdapterTest, OmitsRefreshAfterFrameDropWithTimelyFrameEntry) { TEST(FrameCadenceAdapterTest, AcceptsUnconfiguredLayerFeedback) { // This is a regression test for bugs.webrtc.org/14417. - ZeroHertzFieldTrialEnabler enabler; MockCallback callback; GlobalSimulatedTimeController time_controller(Timestamp::Zero()); - auto adapter = CreateAdapter(enabler, time_controller.GetClock()); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = CreateAdapter(no_field_trials, time_controller.GetClock()); adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{.num_simulcast_layers = @@ -564,13 +548,14 @@ TEST(FrameCadenceAdapterTest, AcceptsUnconfiguredLayerFeedback) { } TEST(FrameCadenceAdapterTest, IgnoresDropInducedCallbacksPostDestruction) { - ZeroHertzFieldTrialEnabler enabler; auto callback = std::make_unique(); GlobalSimulatedTimeController time_controller(Timestamp::Zero()); auto queue = time_controller.GetTaskQueueFactory()->CreateTaskQueue( "queue", TaskQueueFactory::Priority::NORMAL); + test::ScopedKeyValueConfig no_field_trials; auto adapter = FrameCadenceAdapterInterface::Create( - time_controller.GetClock(), queue.get(), enabler); + time_controller.GetClock(), queue.get(), /*metronome=*/nullptr, + /*worker_queue=*/nullptr, no_field_trials); queue->PostTask([&adapter, &callback] { adapter->Initialize(callback.get()); adapter->SetZeroHertzModeEnabled( @@ -586,6 +571,110 @@ TEST(FrameCadenceAdapterTest, IgnoresDropInducedCallbacksPostDestruction) { time_controller.AdvanceTime(3 * TimeDelta::Seconds(1) / kMaxFps); } +TEST(FrameCadenceAdapterTest, EncodeFramesAreAlignedWithMetronomeTick) { + GlobalSimulatedTimeController time_controller(Timestamp::Zero()); + // Here the metronome interval is 33ms, because the metronome is not + // infrequent then the encode tasks are aligned with the tick period. + static constexpr TimeDelta kTickPeriod = TimeDelta::Millis(33); + auto queue = time_controller.GetTaskQueueFactory()->CreateTaskQueue( + "queue", TaskQueueFactory::Priority::NORMAL); + auto worker_queue = time_controller.GetTaskQueueFactory()->CreateTaskQueue( + "work_queue", TaskQueueFactory::Priority::NORMAL); + test::FakeMetronome metronome(kTickPeriod); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = FrameCadenceAdapterInterface::Create( + time_controller.GetClock(), queue.get(), &metronome, worker_queue.get(), + no_field_trials); + MockCallback callback; + adapter->Initialize(&callback); + auto frame = CreateFrame(); + + // `callback->OnFrame()` would not be called if only 32ms went by after + // `adapter->OnFrame()`. + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(0); + adapter->OnFrame(frame); + time_controller.AdvanceTime(TimeDelta::Millis(32)); + Mock::VerifyAndClearExpectations(&callback); + + // `callback->OnFrame()` should be called if 33ms went by after + // `adapter->OnFrame()`. + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(1); + time_controller.AdvanceTime(TimeDelta::Millis(1)); + Mock::VerifyAndClearExpectations(&callback); + + // `callback->OnFrame()` would not be called if only 32ms went by after + // `adapter->OnFrame()`. + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(0); + // Send two frame before next tick. + adapter->OnFrame(frame); + adapter->OnFrame(frame); + time_controller.AdvanceTime(TimeDelta::Millis(32)); + Mock::VerifyAndClearExpectations(&callback); + + // `callback->OnFrame()` should be called if 33ms went by after + // `adapter->OnFrame()`. + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(2); + time_controller.AdvanceTime(TimeDelta::Millis(1)); + Mock::VerifyAndClearExpectations(&callback); + + // Change the metronome tick period to 67ms (15Hz). + metronome.SetTickPeriod(TimeDelta::Millis(67)); + // Expect the encode would happen immediately. + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(1); + adapter->OnFrame(frame); + time_controller.AdvanceTime(TimeDelta::Zero()); + Mock::VerifyAndClearExpectations(&callback); + + // Change the metronome tick period to 16ms (60Hz). + metronome.SetTickPeriod(TimeDelta::Millis(16)); + // Expect the encode would not happen if only 15ms went by after + // `adapter->OnFrame()`. + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(0); + adapter->OnFrame(frame); + time_controller.AdvanceTime(TimeDelta::Millis(15)); + Mock::VerifyAndClearExpectations(&callback); + // `callback->OnFrame()` should be called if 16ms went by after + // `adapter->OnFrame()`. + EXPECT_CALL(callback, OnFrame(_, false, _)).Times(1); + time_controller.AdvanceTime(TimeDelta::Millis(1)); + Mock::VerifyAndClearExpectations(&callback); + + Event finalized; + queue->PostTask([&] { + adapter = nullptr; + finalized.Set(); + }); + finalized.Wait(Event::kForever); +} + +TEST(FrameCadenceAdapterTest, ShutdownUnderMetronome) { + // Regression test for crbug.com/356423094. + // The current thread takes the role of worker queue. + GlobalSimulatedTimeController time_controller(Timestamp::Zero()); + static constexpr TimeDelta kTickPeriod = TimeDelta::Millis(100); + auto queue = time_controller.GetTaskQueueFactory()->CreateTaskQueue( + "queue", TaskQueueFactory::Priority::NORMAL); + test::FakeMetronome metronome(kTickPeriod); + test::ScopedKeyValueConfig no_field_trials; + auto adapter = FrameCadenceAdapterInterface::Create( + time_controller.GetClock(), queue.get(), &metronome, + TaskQueueBase::Current(), no_field_trials); + MockCallback callback; + EXPECT_CALL(callback, OnFrame).Times(0); + adapter->Initialize(&callback); + + // Pass a frame, this is expected to trigger an encode call in the future. + adapter->OnFrame(CreateFrame()); + + // Then post destruction of the adapter and destroy the encode queue from the + // worker (i.e. current). + SendTask(queue.get(), [&] { adapter = nullptr; }); + queue = nullptr; + + // Now that we advance time, there should be no encoding happening. + time_controller.AdvanceTime(TimeDelta::Millis(100)); +} + class FrameCadenceAdapterSimulcastLayersParamTest : public ::testing::TestWithParam { public: @@ -609,11 +698,11 @@ class FrameCadenceAdapterSimulcastLayersParamTest int NumSpatialLayers() const { return GetParam(); } protected: - ZeroHertzFieldTrialEnabler enabler_; + test::ScopedKeyValueConfig no_field_trials_; MockCallback callback_; GlobalSimulatedTimeController time_controller_{Timestamp::Zero()}; const std::unique_ptr adapter_{ - CreateAdapter(enabler_, time_controller_.GetClock())}; + CreateAdapter(no_field_trials_, time_controller_.GetClock())}; }; TEST_P(FrameCadenceAdapterSimulcastLayersParamTest, @@ -708,6 +797,8 @@ class ZeroHertzLayerQualityConvergenceTest : public ::testing::Test { static constexpr TimeDelta kMinFrameDelay = TimeDelta::Millis(100); static constexpr TimeDelta kIdleFrameDelay = FrameCadenceAdapterInterface::kZeroHertzIdleRepeatRatePeriod; + // Restricts non-idle repeat rate to 5 fps (default is 10 fps); + static constexpr int kRestrictedMaxFps = 5; ZeroHertzLayerQualityConvergenceTest() { adapter_->Initialize(&callback_); @@ -725,7 +816,7 @@ class ZeroHertzLayerQualityConvergenceTest : public ::testing::Test { std::initializer_list list) { Timestamp origin = time_controller_.GetClock()->CurrentTime(); for (auto delay : list) { - EXPECT_CALL(callback_, OnFrame(origin + delay, _, _)); + EXPECT_CALL(callback_, OnFrame(origin + delay, false, _)); time_controller_.AdvanceTime(origin + delay - time_controller_.GetClock()->CurrentTime()); } @@ -736,11 +827,11 @@ class ZeroHertzLayerQualityConvergenceTest : public ::testing::Test { } protected: - ZeroHertzFieldTrialEnabler field_trial_enabler_; + test::ScopedKeyValueConfig no_field_trials_; MockCallback callback_; GlobalSimulatedTimeController time_controller_{Timestamp::Zero()}; std::unique_ptr adapter_{ - CreateAdapter(field_trial_enabler_, time_controller_.GetClock())}; + CreateAdapter(no_field_trials_, time_controller_.GetClock())}; }; TEST_F(ZeroHertzLayerQualityConvergenceTest, InitialStateUnconverged) { @@ -810,6 +901,100 @@ TEST_F(ZeroHertzLayerQualityConvergenceTest, }); } +TEST_F(ZeroHertzLayerQualityConvergenceTest, + UnconvergedRepeatRateAdaptsDownWhenRestricted) { + PassFrame(); + ScheduleDelayed(1.5 * kMinFrameDelay, [&] { + adapter_->UpdateVideoSourceRestrictions(kRestrictedMaxFps); + }); + ExpectFrameEntriesAtDelaysFromNow({ + 1 * kMinFrameDelay, // Original frame emitted at non-restricted rate. + + // 1.5 * kMinFrameDelay: restricts max fps to 5 fps which should result + // in a new non-idle repeat delay of 2 * kMinFrameDelay. + 2 * kMinFrameDelay, // Unconverged repeat at non-restricted rate. + 4 * kMinFrameDelay, // Unconverged repeats at restricted rate. This + // happens 2 * kMinFrameDelay after the last frame. + 6 * kMinFrameDelay, // ... + }); +} + +TEST_F(ZeroHertzLayerQualityConvergenceTest, + UnconvergedRepeatRateAdaptsUpWhenGoingFromRestrictedToUnrestricted) { + PassFrame(); + ScheduleDelayed(1.5 * kMinFrameDelay, [&] { + adapter_->UpdateVideoSourceRestrictions(kRestrictedMaxFps); + }); + ScheduleDelayed(5.5 * kMinFrameDelay, [&] { + adapter_->UpdateVideoSourceRestrictions(std::nullopt); + }); + ExpectFrameEntriesAtDelaysFromNow({ + 1 * kMinFrameDelay, // Original frame emitted at non-restricted rate. + + // 1.5 * kMinFrameDelay: restricts max fps to 5 fps which should result + // in a new non-idle repeat delay of 2 * kMinFrameDelay. + 2 * kMinFrameDelay, // Unconverged repeat at non-restricted rate. + 4 * kMinFrameDelay, // Unconverged repeat at restricted rate. + + // 5.5 * kMinFrameDelay: removes frame-rate restriction and we should + // then go back to 10 fps as unconverged repeat rate. + 6 * kMinFrameDelay, // Last unconverged repeat at restricted rate. + 7 * kMinFrameDelay, // Back to unconverged repeat at non-restricted rate. + 8 * kMinFrameDelay, // We are now unrestricted. + 9 * kMinFrameDelay, // ... + }); +} + +TEST_F(ZeroHertzLayerQualityConvergenceTest, + UnconvergedRepeatRateMaintainsRestrictionOnReconfigureToHigherMaxFps) { + PassFrame(); + ScheduleDelayed(1.5 * kMinFrameDelay, [&] { + adapter_->UpdateVideoSourceRestrictions(kRestrictedMaxFps); + }); + ScheduleDelayed(2.5 * kMinFrameDelay, [&] { + adapter_->OnConstraintsChanged(VideoTrackSourceConstraints{ + /*min_fps=*/0, /*max_fps=*/2 * TimeDelta::Seconds(1) / kMinFrameDelay}); + }); + ScheduleDelayed(3 * kMinFrameDelay, [&] { PassFrame(); }); + ScheduleDelayed(8 * kMinFrameDelay, [&] { + adapter_->OnConstraintsChanged(VideoTrackSourceConstraints{ + /*min_fps=*/0, + /*max_fps=*/0.2 * TimeDelta::Seconds(1) / kMinFrameDelay}); + }); + ScheduleDelayed(9 * kMinFrameDelay, [&] { PassFrame(); }); + ExpectFrameEntriesAtDelaysFromNow({ + 1 * kMinFrameDelay, // Original frame emitted at non-restricted rate. + + // 1.5 * kMinFrameDelay: restricts max fps to 5 fps which should result + // in a new non-idle repeat delay of 2 * kMinFrameDelay. + 2 * kMinFrameDelay, // Unconverged repeat at non-restricted rate. + + // 2.5 * kMinFrameDelay: new constraint asks for max rate of 20 fps. + // The 0Hz adapter is reconstructed for 20 fps but inherits the current + // restriction for rate of non-converged frames of 5 fps. + + // A new frame is passed at 3 * kMinFrameDelay. The previous repeat + // cadence was stopped by the change in constraints. + 3.5 * kMinFrameDelay, // Original frame emitted at non-restricted 20 fps. + // The delay is 0.5 * kMinFrameDelay. + 5.5 * kMinFrameDelay, // Unconverged repeat at restricted rate. + // The delay is 2 * kMinFrameDelay when restricted. + 7.5 * kMinFrameDelay, // ... + + // 8 * kMinFrameDelay: new constraint asks for max rate of 2 fps. + // The 0Hz adapter is reconstructed for 2 fps and will therefore not obey + // the current restriction for rate of non-converged frames of 5 fps + // since the new max rate is lower. + + // A new frame is passed at 9 * kMinFrameDelay. The previous repeat + // cadence was stopped by the change in constraints. + 14 * kMinFrameDelay, // Original frame emitted at non-restricted 2 fps. + // The delay is 5 * kMinFrameDelay. + 19 * kMinFrameDelay, // Unconverged repeat at non-restricted rate. + 24 * kMinFrameDelay, // ... + }); +} + class FrameCadenceAdapterMetricsTest : public ::testing::Test { public: FrameCadenceAdapterMetricsTest() : time_controller_(Timestamp::Millis(1)) { @@ -837,6 +1022,24 @@ TEST_F(FrameCadenceAdapterMetricsTest, RecordsTimeUntilFirstFrame) { ElementsAre(Pair(666, 1))); } +TEST_F(FrameCadenceAdapterMetricsTest, + RecordsFrameTimestampMonotonicallyIncreasing) { + MockCallback callback; + test::ScopedKeyValueConfig no_field_trials; + std::unique_ptr adapter = + CreateAdapter(no_field_trials, time_controller_.GetClock()); + adapter->Initialize(&callback); + time_controller_.AdvanceTime(TimeDelta::Millis(666)); + adapter->OnFrame(CreateFrameWithTimestamps(&time_controller_)); + adapter->OnFrame(CreateFrameWithTimestamps(&time_controller_)); + time_controller_.AdvanceTime(TimeDelta::Zero()); + adapter = nullptr; + DepleteTaskQueues(); + EXPECT_THAT(metrics::Samples( + "WebRTC.Video.InputFrameTimestampMonotonicallyIncreasing"), + ElementsAre(Pair(false, 1))); +} + TEST(FrameCadenceAdapterRealTimeTest, TimestampsDoNotDrift) { // This regression test must be performed in realtime because of limitations // in GlobalSimulatedTimeController. @@ -847,16 +1050,16 @@ TEST(FrameCadenceAdapterRealTimeTest, TimestampsDoNotDrift) { auto factory = CreateDefaultTaskQueueFactory(); auto queue = factory->CreateTaskQueue("test", TaskQueueFactory::Priority::NORMAL); - ZeroHertzFieldTrialEnabler enabler; MockCallback callback; Clock* clock = Clock::GetRealTimeClock(); std::unique_ptr adapter; int frame_counter = 0; int64_t original_ntp_time_ms; int64_t original_timestamp_us; - rtc::Event event; + Event event; + test::ScopedKeyValueConfig no_field_trials; queue->PostTask([&] { - adapter = CreateAdapter(enabler, clock); + adapter = CreateAdapter(no_field_trials, clock); adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{}); @@ -866,10 +1069,10 @@ TEST(FrameCadenceAdapterRealTimeTest, TimestampsDoNotDrift) { frame.set_ntp_time_ms(original_ntp_time_ms); original_timestamp_us = clock->CurrentTime().us(); frame.set_timestamp_us(original_timestamp_us); - constexpr int kSleepMs = rtc::kNumMillisecsPerSec / 2; + constexpr int kSleepMs = kNumMillisecsPerSec / 2; EXPECT_CALL(callback, OnFrame) .WillRepeatedly( - Invoke([&](Timestamp, int, const VideoFrame& incoming_frame) { + Invoke([&](Timestamp, bool, const VideoFrame& incoming_frame) { ++frame_counter; // Avoid the first OnFrame and sleep on the second. if (frame_counter == 2) { @@ -884,27 +1087,16 @@ TEST(FrameCadenceAdapterRealTimeTest, TimestampsDoNotDrift) { })); adapter->OnFrame(frame); }); - event.Wait(rtc::Event::kForever); - rtc::Event finalized; + event.Wait(Event::kForever); + Event finalized; queue->PostTask([&] { adapter = nullptr; finalized.Set(); }); - finalized.Wait(rtc::Event::kForever); + finalized.Wait(Event::kForever); } -// TODO(bugs.webrtc.org/15462) Disable ScheduledRepeatAllowsForSlowEncode for -// TaskQueueLibevent. -#if defined(WEBRTC_ENABLE_LIBEVENT) -#define MAYBE_ScheduledRepeatAllowsForSlowEncode \ - DISABLED_ScheduledRepeatAllowsForSlowEncode -#else -#define MAYBE_ScheduledRepeatAllowsForSlowEncode \ - ScheduledRepeatAllowsForSlowEncode -#endif - -TEST(FrameCadenceAdapterRealTimeTest, - MAYBE_ScheduledRepeatAllowsForSlowEncode) { +TEST(FrameCadenceAdapterRealTimeTest, ScheduledRepeatAllowsForSlowEncode) { // This regression test must be performed in realtime because of limitations // in GlobalSimulatedTimeController. // @@ -914,15 +1106,15 @@ TEST(FrameCadenceAdapterRealTimeTest, auto factory = CreateDefaultTaskQueueFactory(); auto queue = factory->CreateTaskQueue("test", TaskQueueFactory::Priority::NORMAL); - ZeroHertzFieldTrialEnabler enabler; MockCallback callback; Clock* clock = Clock::GetRealTimeClock(); std::unique_ptr adapter; int frame_counter = 0; - rtc::Event event; - absl::optional start_time; + Event event; + std::optional start_time; + test::ScopedKeyValueConfig no_field_trials; queue->PostTask([&] { - adapter = CreateAdapter(enabler, clock); + adapter = CreateAdapter(no_field_trials, clock); adapter->Initialize(&callback); adapter->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{}); @@ -948,13 +1140,174 @@ TEST(FrameCadenceAdapterRealTimeTest, })); adapter->OnFrame(frame); }); - event.Wait(rtc::Event::kForever); - rtc::Event finalized; + event.Wait(Event::kForever); + Event finalized; queue->PostTask([&] { adapter = nullptr; finalized.Set(); }); - finalized.Wait(rtc::Event::kForever); + finalized.Wait(Event::kForever); +} + +class ZeroHertzQueueOverloadTest : public ::testing::Test { + public: + static constexpr int kMaxFps = 10; + + ZeroHertzQueueOverloadTest() { + Initialize(); + metrics::Reset(); + } + + void Initialize() { + adapter_->Initialize(&callback_); + adapter_->SetZeroHertzModeEnabled( + FrameCadenceAdapterInterface::ZeroHertzModeParams{ + /*num_simulcast_layers=*/1}); + adapter_->OnConstraintsChanged( + VideoTrackSourceConstraints{/*min_fps=*/0, kMaxFps}); + time_controller_.AdvanceTime(TimeDelta::Zero()); + } + + void ScheduleDelayed(TimeDelta delay, absl::AnyInvocable task) { + TaskQueueBase::Current()->PostDelayedTask(std::move(task), delay); + } + + void PassFrame() { adapter_->OnFrame(CreateFrame()); } + + void AdvanceTime(TimeDelta duration) { + time_controller_.AdvanceTime(duration); + } + + void SkipForwardBy(TimeDelta duration) { + time_controller_.SkipForwardBy(duration); + } + + Timestamp CurrentTime() { return time_controller_.GetClock()->CurrentTime(); } + + protected: + test::ScopedKeyValueConfig field_trials_; + NiceMock callback_; + GlobalSimulatedTimeController time_controller_{Timestamp::Zero()}; + std::unique_ptr adapter_{ + CreateAdapter(field_trials_, time_controller_.GetClock())}; +}; + +TEST_F(ZeroHertzQueueOverloadTest, + ForwardedFramesDuringTooLongEncodeTimeAreFlaggedWithQueueOverload) { + InSequence s; + PassFrame(); + EXPECT_CALL(callback_, OnFrame(_, false, _)).WillOnce(InvokeWithoutArgs([&] { + PassFrame(); + PassFrame(); + PassFrame(); + SkipForwardBy(TimeDelta::Millis(301)); + })); + EXPECT_CALL(callback_, OnFrame(_, true, _)).Times(3); + AdvanceTime(TimeDelta::Millis(100)); + EXPECT_THAT(metrics::Samples("WebRTC.Screenshare.ZeroHz.QueueOverload"), + ElementsAre(Pair(false, 1), Pair(true, 3))); +} + +TEST_F(ZeroHertzQueueOverloadTest, + ForwardedFramesAfterOverloadBurstAreNotFlaggedWithQueueOverload) { + InSequence s; + PassFrame(); + EXPECT_CALL(callback_, OnFrame(_, false, _)).WillOnce(InvokeWithoutArgs([&] { + PassFrame(); + PassFrame(); + PassFrame(); + SkipForwardBy(TimeDelta::Millis(301)); + })); + EXPECT_CALL(callback_, OnFrame(_, true, _)).Times(3); + AdvanceTime(TimeDelta::Millis(100)); + EXPECT_CALL(callback_, OnFrame(_, false, _)).Times(2); + PassFrame(); + PassFrame(); + AdvanceTime(TimeDelta::Millis(100)); + EXPECT_THAT(metrics::Samples("WebRTC.Screenshare.ZeroHz.QueueOverload"), + ElementsAre(Pair(false, 3), Pair(true, 3))); +} + +TEST_F(ZeroHertzQueueOverloadTest, + ForwardedFramesDuringNormalEncodeTimeAreNotFlaggedWithQueueOverload) { + InSequence s; + PassFrame(); + EXPECT_CALL(callback_, OnFrame(_, false, _)).WillOnce(InvokeWithoutArgs([&] { + PassFrame(); + PassFrame(); + PassFrame(); + // Long but not too long encode time. + SkipForwardBy(TimeDelta::Millis(99)); + })); + EXPECT_CALL(callback_, OnFrame(_, false, _)).Times(3); + AdvanceTime(TimeDelta::Millis(199)); + EXPECT_THAT(metrics::Samples("WebRTC.Screenshare.ZeroHz.QueueOverload"), + ElementsAre(Pair(false, 4))); +} + +TEST_F( + ZeroHertzQueueOverloadTest, + AvoidSettingQueueOverloadAndSendRepeatWhenNoNewPacketsWhileTooLongEncode) { + // Receive one frame only and let OnFrame take such a long time that an + // overload normally is warranted. But the fact that no new frames arrive + // while being blocked should trigger a non-idle repeat to ensure that the + // video stream does not freeze and queue overload should be false. + PassFrame(); + EXPECT_CALL(callback_, OnFrame(_, false, _)) + .WillOnce( + InvokeWithoutArgs([&] { SkipForwardBy(TimeDelta::Millis(101)); })) + .WillOnce(InvokeWithoutArgs([&] { + // Non-idle repeat. + EXPECT_EQ(CurrentTime(), Timestamp::Zero() + TimeDelta::Millis(201)); + })); + AdvanceTime(TimeDelta::Millis(100)); + EXPECT_THAT(metrics::Samples("WebRTC.Screenshare.ZeroHz.QueueOverload"), + ElementsAre(Pair(false, 2))); +} + +TEST_F(ZeroHertzQueueOverloadTest, + EnterFastRepeatAfterQueueOverloadWhenReceivedOnlyOneFrameDuringEncode) { + InSequence s; + // - Forward one frame frame during high load which triggers queue overload. + // - Receive only one new frame while being blocked and verify that the + // cancelled repeat was for the first frame and not the second. + // - Fast repeat mode should happen after second frame. + PassFrame(); + EXPECT_CALL(callback_, OnFrame(_, false, _)).WillOnce(InvokeWithoutArgs([&] { + PassFrame(); + SkipForwardBy(TimeDelta::Millis(101)); + })); + EXPECT_CALL(callback_, OnFrame(_, true, _)); + AdvanceTime(TimeDelta::Millis(100)); + + // Fast repeats should take place from here on. + EXPECT_CALL(callback_, OnFrame(_, false, _)).Times(5); + AdvanceTime(TimeDelta::Millis(500)); + EXPECT_THAT(metrics::Samples("WebRTC.Screenshare.ZeroHz.QueueOverload"), + ElementsAre(Pair(false, 6), Pair(true, 1))); +} + +TEST_F(ZeroHertzQueueOverloadTest, + QueueOverloadIsDisabledForZeroHerzWhenKillSwitchIsEnabled) { + webrtc::test::ScopedKeyValueConfig field_trials( + field_trials_, "WebRTC-ZeroHertzQueueOverload/Disabled/"); + adapter_.reset(); + adapter_ = CreateAdapter(field_trials, time_controller_.GetClock()); + Initialize(); + + // Same as ForwardedFramesDuringTooLongEncodeTimeAreFlaggedWithQueueOverload + // but this time the queue overload mechanism is disabled. + InSequence s; + PassFrame(); + EXPECT_CALL(callback_, OnFrame(_, false, _)).WillOnce(InvokeWithoutArgs([&] { + PassFrame(); + PassFrame(); + PassFrame(); + SkipForwardBy(TimeDelta::Millis(301)); + })); + EXPECT_CALL(callback_, OnFrame(_, false, _)).Times(3); + AdvanceTime(TimeDelta::Millis(100)); + EXPECT_EQ(metrics::NumSamples("WebRTC.Screenshare.ZeroHz.QueueOverload"), 0); } } // namespace diff --git a/video/frame_decode_scheduler.h b/video/frame_decode_scheduler.h index 29e27c22c8..762268abd0 100644 --- a/video/frame_decode_scheduler.h +++ b/video/frame_decode_scheduler.h @@ -13,8 +13,9 @@ #include +#include + #include "absl/functional/any_invocable.h" -#include "absl/types/optional.h" #include "api/units/timestamp.h" #include "video/frame_decode_timing.h" @@ -31,7 +32,7 @@ class FrameDecodeScheduler { // Returns the rtp timestamp of the next frame scheduled for release, or // `nullopt` if no frame is currently scheduled. - virtual absl::optional ScheduledRtpTimestamp() = 0; + virtual std::optional ScheduledRtpTimestamp() = 0; // Schedules a frame for release based on `schedule`. When released, // `callback` will be invoked with the `rtp` timestamp of the frame and the diff --git a/video/frame_decode_timing.cc b/video/frame_decode_timing.cc index 58ecd41c9e..e7401a0ca4 100644 --- a/video/frame_decode_timing.cc +++ b/video/frame_decode_timing.cc @@ -11,8 +11,8 @@ #include "video/frame_decode_timing.h" #include +#include -#include "absl/types/optional.h" #include "api/units/time_delta.h" #include "rtc_base/logging.h" @@ -25,7 +25,7 @@ FrameDecodeTiming::FrameDecodeTiming(Clock* clock, RTC_DCHECK(timing_); } -absl::optional +std::optional FrameDecodeTiming::OnFrameBufferUpdated(uint32_t next_temporal_unit_rtp, uint32_t last_temporal_unit_rtp, TimeDelta max_wait_for_frame, @@ -44,7 +44,7 @@ FrameDecodeTiming::OnFrameBufferUpdated(uint32_t next_temporal_unit_rtp, RTC_DLOG(LS_VERBOSE) << "Fast-forwarded frame " << next_temporal_unit_rtp << " render time " << render_time << " with delay " << max_wait; - return absl::nullopt; + return std::nullopt; } max_wait.Clamp(TimeDelta::Zero(), max_wait_for_frame); diff --git a/video/frame_decode_timing.h b/video/frame_decode_timing.h index 6bde4702ad..c5b024f7f2 100644 --- a/video/frame_decode_timing.h +++ b/video/frame_decode_timing.h @@ -38,7 +38,7 @@ class FrameDecodeTiming { Timestamp render_time; }; - absl::optional OnFrameBufferUpdated( + std::optional OnFrameBufferUpdated( uint32_t next_temporal_unit_rtp, uint32_t last_temporal_unit_rtp, TimeDelta max_wait_for_frame, diff --git a/video/frame_decode_timing_unittest.cc b/video/frame_decode_timing_unittest.cc index 83ea91692c..cb45d709f1 100644 --- a/video/frame_decode_timing_unittest.cc +++ b/video/frame_decode_timing_unittest.cc @@ -12,7 +12,8 @@ #include -#include "absl/types/optional.h" +#include + #include "api/units/time_delta.h" #include "modules/video_coding/timing/timing.h" #include "rtc_base/containers/flat_map.h" @@ -98,7 +99,7 @@ TEST_F(FrameDecodeTimingTest, FastForwardsFrameTooFarInThePast) { EXPECT_THAT(frame_decode_scheduler_.OnFrameBufferUpdated( 90000, 180000, kMaxWaitForFrame, false), - Eq(absl::nullopt)); + Eq(std::nullopt)); } TEST_F(FrameDecodeTimingTest, NoFastForwardIfOnlyFrameToDecode) { diff --git a/video/frame_dumping_encoder.cc b/video/frame_dumping_encoder.cc index cb9c576f4e..3785293ee4 100644 --- a/video/frame_dumping_encoder.cc +++ b/video/frame_dumping_encoder.cc @@ -95,9 +95,9 @@ class FrameDumpingEncoder : public VideoEncoder, public EncodedImageCallback { std::string FilenameFromSimulcastIndex(int index) RTC_EXCLUSIVE_LOCKS_REQUIRED(mu_) { char filename_buffer[1024]; - rtc::SimpleStringBuilder builder(filename_buffer); - builder << output_directory_ << "/webrtc_encoded_frames" - << "." << origin_time_micros_ << "." << index << ".ivf"; + SimpleStringBuilder builder(filename_buffer); + builder << output_directory_ << "/webrtc_encoded_frames" << "." + << origin_time_micros_ << "." << index << ".ivf"; return builder.str(); } @@ -137,8 +137,8 @@ std::unique_ptr MaybeCreateFrameDumpingEncoderWrapper( return encoder; } absl::c_replace(output_directory, ';', '/'); - return std::make_unique( - std::move(encoder), rtc::TimeMicros(), output_directory); + return std::make_unique(std::move(encoder), TimeMicros(), + output_directory); } } // namespace webrtc diff --git a/video/frame_encode_metadata_writer.cc b/video/frame_encode_metadata_writer.cc index d6095a090b..482ba201de 100644 --- a/video/frame_encode_metadata_writer.cc +++ b/video/frame_encode_metadata_writer.cc @@ -28,7 +28,7 @@ const int kThrottleRatio = 100000; class EncodedImageBufferWrapper : public EncodedImageBufferInterface { public: - explicit EncodedImageBufferWrapper(rtc::Buffer&& buffer) + explicit EncodedImageBufferWrapper(Buffer&& buffer) : buffer_(std::move(buffer)) {} const uint8_t* data() const override { return buffer_.data(); } @@ -36,7 +36,7 @@ class EncodedImageBufferWrapper : public EncodedImageBufferInterface { size_t size() const override { return buffer_.size(); } private: - rtc::Buffer buffer_; + Buffer buffer_; }; } // namespace @@ -99,19 +99,19 @@ void FrameEncodeMetadataWriter::OnEncodeStarted(const VideoFrame& frame) { timing_frames_info_.resize(num_spatial_layers_); FrameMetadata metadata; - metadata.rtp_timestamp = frame.timestamp(); - metadata.encode_start_time_ms = rtc::TimeMillis(); + metadata.rtp_timestamp = frame.rtp_timestamp(); + metadata.encode_start_time_ms = TimeMillis(); metadata.ntp_time_ms = frame.ntp_time_ms(); metadata.timestamp_us = frame.timestamp_us(); metadata.rotation = frame.rotation(); metadata.color_space = frame.color_space(); + metadata.is_steady_state_refresh_frame = frame.update_rect().IsEmpty(); metadata.packet_infos = frame.packet_infos(); for (size_t si = 0; si < num_spatial_layers_; ++si) { RTC_DCHECK(timing_frames_info_[si].frames.empty() || - rtc::TimeDiff( - frame.render_time_ms(), - timing_frames_info_[si].frames.back().timestamp_us / 1000) >= - 0); + TimeDiff(frame.render_time_ms(), + timing_frames_info_[si].frames.back().timestamp_us / + 1000) >= 0); // If stream is disabled due to low bandwidth OnEncodeStarted still will be // called and have to be ignored. if (timing_frames_info_[si].target_bitrate_bytes_per_sec == 0) @@ -136,14 +136,15 @@ void FrameEncodeMetadataWriter::OnEncodeStarted(const VideoFrame& frame) { } } -void FrameEncodeMetadataWriter::FillTimingInfo(size_t simulcast_svc_idx, - EncodedImage* encoded_image) { +void FrameEncodeMetadataWriter::FillMetadataAndTimingInfo( + size_t simulcast_svc_idx, + EncodedImage* encoded_image) { MutexLock lock(&lock_); - absl::optional outlier_frame_size; - absl::optional encode_start_ms; + std::optional outlier_frame_size; + std::optional encode_start_ms; uint8_t timing_flags = VideoSendTiming::kNotTriggered; - int64_t encode_done_ms = rtc::TimeMillis(); + int64_t encode_done_ms = TimeMillis(); encode_start_ms = ExtractEncodeStartTimeAndFillMetadata(simulcast_svc_idx, encoded_image); @@ -182,7 +183,7 @@ void FrameEncodeMetadataWriter::FillTimingInfo(size_t simulcast_svc_idx, // If encode start is not available that means that encoder uses internal // source. In that case capture timestamp may be from a different clock with a - // drift relative to rtc::TimeMillis(). We can't use it for Timing frames, + // drift relative to webrtc::TimeMillis(). We can't use it for Timing frames, // because to being sent in the network capture time required to be less than // all the other timestamps. if (encode_start_ms) { @@ -204,13 +205,11 @@ void FrameEncodeMetadataWriter::UpdateBitstream( // Make sure that the data is not copied if owned by EncodedImage. const EncodedImage& buffer = *encoded_image; - rtc::Buffer modified_buffer = - SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite( - buffer, encoded_image->ColorSpace()); + Buffer modified_buffer = SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite( + buffer, encoded_image->ColorSpace()); encoded_image->SetEncodedData( - rtc::make_ref_counted( - std::move(modified_buffer))); + make_ref_counted(std::move(modified_buffer))); } void FrameEncodeMetadataWriter::Reset() { @@ -223,11 +222,11 @@ void FrameEncodeMetadataWriter::Reset() { stalled_encoder_logged_messages_ = 0; } -absl::optional +std::optional FrameEncodeMetadataWriter::ExtractEncodeStartTimeAndFillMetadata( size_t simulcast_svc_idx, EncodedImage* encoded_image) { - absl::optional result; + std::optional result; size_t num_simulcast_svc_streams = timing_frames_info_.size(); if (simulcast_svc_idx < num_simulcast_svc_streams) { auto metadata_list = &timing_frames_info_[simulcast_svc_idx].frames; @@ -236,7 +235,7 @@ FrameEncodeMetadataWriter::ExtractEncodeStartTimeAndFillMetadata( // Because some hardware encoders don't preserve capture timestamp we // use RTP timestamps here. while (!metadata_list->empty() && - IsNewerTimestamp(encoded_image->Timestamp(), + IsNewerTimestamp(encoded_image->RtpTimestamp(), metadata_list->front().rtp_timestamp)) { frame_drop_callback_->OnDroppedFrame( EncodedImageCallback::DropReason::kDroppedByEncoder); @@ -249,13 +248,15 @@ FrameEncodeMetadataWriter::ExtractEncodeStartTimeAndFillMetadata( : VideoContentType::UNSPECIFIED; if (!metadata_list->empty() && - metadata_list->front().rtp_timestamp == encoded_image->Timestamp()) { + metadata_list->front().rtp_timestamp == encoded_image->RtpTimestamp()) { result.emplace(metadata_list->front().encode_start_time_ms); encoded_image->capture_time_ms_ = metadata_list->front().timestamp_us / 1000; encoded_image->ntp_time_ms_ = metadata_list->front().ntp_time_ms; encoded_image->rotation_ = metadata_list->front().rotation; encoded_image->SetColorSpace(metadata_list->front().color_space); + encoded_image->SetIsSteadyStateRefreshFrame( + metadata_list->front().is_steady_state_refresh_frame); encoded_image->SetPacketInfos(metadata_list->front().packet_infos); metadata_list->pop_front(); } else { diff --git a/video/frame_encode_metadata_writer.h b/video/frame_encode_metadata_writer.h index afebca816c..637d03a40e 100644 --- a/video/frame_encode_metadata_writer.h +++ b/video/frame_encode_metadata_writer.h @@ -12,9 +12,9 @@ #define VIDEO_FRAME_ENCODE_METADATA_WRITER_H_ #include +#include #include -#include "absl/types/optional.h" #include "api/video/encoded_image.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" @@ -34,7 +34,8 @@ class FrameEncodeMetadataWriter { void OnEncodeStarted(const VideoFrame& frame); - void FillTimingInfo(size_t simulcast_svc_idx, EncodedImage* encoded_image); + void FillMetadataAndTimingInfo(size_t simulcast_svc_idx, + EncodedImage* encoded_image); void UpdateBitstream(const CodecSpecificInfo* codec_specific_info, EncodedImage* encoded_image); @@ -44,7 +45,7 @@ class FrameEncodeMetadataWriter { private: // For non-internal-source encoders, returns encode started time and fixes // capture timestamp for the frame, if corrupted by the encoder. - absl::optional ExtractEncodeStartTimeAndFillMetadata( + std::optional ExtractEncodeStartTimeAndFillMetadata( size_t simulcast_svc_idx, EncodedImage* encoded_image) RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_); @@ -54,7 +55,8 @@ class FrameEncodeMetadataWriter { int64_t ntp_time_ms = 0; int64_t timestamp_us = 0; VideoRotation rotation = kVideoRotation_0; - absl::optional color_space; + std::optional color_space; + bool is_steady_state_refresh_frame = false; RtpPacketInfos packet_infos; }; struct TimingFramesLayerInfo { diff --git a/video/frame_encode_metadata_writer_unittest.cc b/video/frame_encode_metadata_writer_unittest.cc index e151282b77..753c3092e0 100644 --- a/video/frame_encode_metadata_writer_unittest.cc +++ b/video/frame_encode_metadata_writer_unittest.cc @@ -27,7 +27,7 @@ namespace webrtc { namespace test { namespace { -const rtc::scoped_refptr kFrameBuffer = I420Buffer::Create(4, 4); +const scoped_refptr kFrameBuffer = I420Buffer::Create(4, 4); inline size_t FrameSize(const size_t& min_frame_size, const size_t& max_frame_size, @@ -92,7 +92,7 @@ std::vector> GetTimingFrames( for (int i = 0; i < num_frames; ++i) { current_timestamp += 1; VideoFrame frame = VideoFrame::Builder() - .set_timestamp_rtp(current_timestamp * 90) + .set_rtp_timestamp(current_timestamp * 90) .set_timestamp_ms(current_timestamp) .set_video_frame_buffer(kFrameBuffer) .build(); @@ -105,7 +105,7 @@ std::vector> GetTimingFrames( image.SetEncodedData(EncodedImageBuffer::Create(max_frame_size)); image.set_size(FrameSize(min_frame_size, max_frame_size, si, i)); image.capture_time_ms_ = current_timestamp; - image.SetTimestamp(static_cast(current_timestamp * 90)); + image.SetRtpTimestamp(static_cast(current_timestamp * 90)); image.SetSpatialIndex(si); if (dropped) { @@ -113,7 +113,7 @@ std::vector> GetTimingFrames( continue; } - encode_timer.FillTimingInfo(si, &image); + encode_timer.FillMetadataAndTimingInfo(si, &image); if (IsTimingFrame(image)) { result[si].push_back(FrameType::kTiming); @@ -198,7 +198,7 @@ TEST(FrameEncodeMetadataWriterTest, NoTimingFrameIfNoEncodeStartTime) { EncodedImage image; image.SetEncodedData(EncodedImageBuffer::Create(kFrameSize)); image.capture_time_ms_ = timestamp; - image.SetTimestamp(static_cast(timestamp * 90)); + image.SetRtpTimestamp(static_cast(timestamp * 90)); FakeEncodedImageCallback sink; FrameEncodeMetadataWriter encode_timer(&sink); @@ -213,18 +213,18 @@ TEST(FrameEncodeMetadataWriterTest, NoTimingFrameIfNoEncodeStartTime) { // Verify a single frame works with encode start time set. VideoFrame frame = VideoFrame::Builder() .set_timestamp_ms(timestamp) - .set_timestamp_rtp(timestamp * 90) + .set_rtp_timestamp(timestamp * 90) .set_video_frame_buffer(kFrameBuffer) .build(); encode_timer.OnEncodeStarted(frame); - encode_timer.FillTimingInfo(0, &image); + encode_timer.FillMetadataAndTimingInfo(0, &image); EXPECT_TRUE(IsTimingFrame(image)); // New frame, now skip OnEncodeStarted. Should not result in timing frame. image.capture_time_ms_ = ++timestamp; - image.SetTimestamp(static_cast(timestamp * 90)); + image.SetRtpTimestamp(static_cast(timestamp * 90)); image.timing_ = EncodedImage::Timing(); - encode_timer.FillTimingInfo(0, &image); + encode_timer.FillMetadataAndTimingInfo(0, &image); EXPECT_FALSE(IsTimingFrame(image)); } @@ -244,24 +244,24 @@ TEST(FrameEncodeMetadataWriterTest, NotifiesAboutDroppedFrames) { EncodedImage image; VideoFrame frame = VideoFrame::Builder() - .set_timestamp_rtp(kTimestampMs1 * 90) + .set_rtp_timestamp(kTimestampMs1 * 90) .set_timestamp_ms(kTimestampMs1) .set_video_frame_buffer(kFrameBuffer) .build(); image.capture_time_ms_ = kTimestampMs1; - image.SetTimestamp(static_cast(image.capture_time_ms_ * 90)); - frame.set_timestamp(image.capture_time_ms_ * 90); + image.SetRtpTimestamp(static_cast(image.capture_time_ms_ * 90)); + frame.set_rtp_timestamp(image.capture_time_ms_ * 90); frame.set_timestamp_us(image.capture_time_ms_ * 1000); encode_timer.OnEncodeStarted(frame); EXPECT_EQ(0u, sink.GetNumFramesDropped()); - encode_timer.FillTimingInfo(0, &image); + encode_timer.FillMetadataAndTimingInfo(0, &image); image.capture_time_ms_ = kTimestampMs2; - image.SetTimestamp(static_cast(image.capture_time_ms_ * 90)); + image.SetRtpTimestamp(static_cast(image.capture_time_ms_ * 90)); image.timing_ = EncodedImage::Timing(); - frame.set_timestamp(image.capture_time_ms_ * 90); + frame.set_rtp_timestamp(image.capture_time_ms_ * 90); frame.set_timestamp_us(image.capture_time_ms_ * 1000); encode_timer.OnEncodeStarted(frame); // No OnEncodedImageCall for timestamp2. Yet, at this moment it's not known @@ -269,21 +269,21 @@ TEST(FrameEncodeMetadataWriterTest, NotifiesAboutDroppedFrames) { EXPECT_EQ(0u, sink.GetNumFramesDropped()); image.capture_time_ms_ = kTimestampMs3; - image.SetTimestamp(static_cast(image.capture_time_ms_ * 90)); + image.SetRtpTimestamp(static_cast(image.capture_time_ms_ * 90)); image.timing_ = EncodedImage::Timing(); - frame.set_timestamp(image.capture_time_ms_ * 90); + frame.set_rtp_timestamp(image.capture_time_ms_ * 90); frame.set_timestamp_us(image.capture_time_ms_ * 1000); encode_timer.OnEncodeStarted(frame); - encode_timer.FillTimingInfo(0, &image); + encode_timer.FillMetadataAndTimingInfo(0, &image); EXPECT_EQ(1u, sink.GetNumFramesDropped()); image.capture_time_ms_ = kTimestampMs4; - image.SetTimestamp(static_cast(image.capture_time_ms_ * 90)); + image.SetRtpTimestamp(static_cast(image.capture_time_ms_ * 90)); image.timing_ = EncodedImage::Timing(); - frame.set_timestamp(image.capture_time_ms_ * 90); + frame.set_rtp_timestamp(image.capture_time_ms_ * 90); frame.set_timestamp_us(image.capture_time_ms_ * 1000); encode_timer.OnEncodeStarted(frame); - encode_timer.FillTimingInfo(0, &image); + encode_timer.FillMetadataAndTimingInfo(0, &image); EXPECT_EQ(1u, sink.GetNumFramesDropped()); } @@ -300,15 +300,15 @@ TEST(FrameEncodeMetadataWriterTest, RestoresCaptureTimestamps) { encode_timer.OnSetRates(bitrate_allocation, 30); image.capture_time_ms_ = kTimestampMs; // Correct timestamp. - image.SetTimestamp(static_cast(image.capture_time_ms_ * 90)); + image.SetRtpTimestamp(static_cast(image.capture_time_ms_ * 90)); VideoFrame frame = VideoFrame::Builder() .set_timestamp_ms(image.capture_time_ms_) - .set_timestamp_rtp(image.capture_time_ms_ * 90) + .set_rtp_timestamp(image.capture_time_ms_ * 90) .set_video_frame_buffer(kFrameBuffer) .build(); encode_timer.OnEncodeStarted(frame); image.capture_time_ms_ = 0; // Incorrect timestamp. - encode_timer.FillTimingInfo(0, &image); + encode_timer.FillMetadataAndTimingInfo(0, &image); EXPECT_EQ(kTimestampMs, image.capture_time_ms_); } @@ -324,15 +324,15 @@ TEST(FrameEncodeMetadataWriterTest, CopiesRotation) { bitrate_allocation.SetBitrate(0, 0, 500000); encode_timer.OnSetRates(bitrate_allocation, 30); - image.SetTimestamp(static_cast(kTimestampMs * 90)); + image.SetRtpTimestamp(static_cast(kTimestampMs * 90)); VideoFrame frame = VideoFrame::Builder() .set_timestamp_ms(kTimestampMs) - .set_timestamp_rtp(kTimestampMs * 90) + .set_rtp_timestamp(kTimestampMs * 90) .set_rotation(kVideoRotation_180) .set_video_frame_buffer(kFrameBuffer) .build(); encode_timer.OnEncodeStarted(frame); - encode_timer.FillTimingInfo(0, &image); + encode_timer.FillMetadataAndTimingInfo(0, &image); EXPECT_EQ(kVideoRotation_180, image.rotation_); } @@ -350,15 +350,15 @@ TEST(FrameEncodeMetadataWriterTest, SetsContentType) { bitrate_allocation.SetBitrate(0, 0, 500000); encode_timer.OnSetRates(bitrate_allocation, 30); - image.SetTimestamp(static_cast(kTimestampMs * 90)); + image.SetRtpTimestamp(static_cast(kTimestampMs * 90)); VideoFrame frame = VideoFrame::Builder() .set_timestamp_ms(kTimestampMs) - .set_timestamp_rtp(kTimestampMs * 90) + .set_rtp_timestamp(kTimestampMs * 90) .set_rotation(kVideoRotation_180) .set_video_frame_buffer(kFrameBuffer) .build(); encode_timer.OnEncodeStarted(frame); - encode_timer.FillTimingInfo(0, &image); + encode_timer.FillMetadataAndTimingInfo(0, &image); EXPECT_EQ(VideoContentType::SCREENSHARE, image.content_type_); } @@ -376,19 +376,54 @@ TEST(FrameEncodeMetadataWriterTest, CopiesColorSpace) { webrtc::ColorSpace color_space = CreateTestColorSpace(/*with_hdr_metadata=*/true); - image.SetTimestamp(static_cast(kTimestampMs * 90)); + image.SetRtpTimestamp(static_cast(kTimestampMs * 90)); VideoFrame frame = VideoFrame::Builder() .set_timestamp_ms(kTimestampMs) - .set_timestamp_rtp(kTimestampMs * 90) + .set_rtp_timestamp(kTimestampMs * 90) .set_color_space(color_space) .set_video_frame_buffer(kFrameBuffer) .build(); encode_timer.OnEncodeStarted(frame); - encode_timer.FillTimingInfo(0, &image); + encode_timer.FillMetadataAndTimingInfo(0, &image); ASSERT_NE(image.ColorSpace(), nullptr); EXPECT_EQ(color_space, *image.ColorSpace()); } +TEST(FrameEncodeMetadataWriterTest, SetsIsSteadyStateRefreshFrame) { + EncodedImage image; + const int64_t kTimestampMs = 123456; + FakeEncodedImageCallback sink; + + FrameEncodeMetadataWriter encode_timer(&sink); + encode_timer.OnEncoderInit(VideoCodec()); + // Any non-zero bitrate needed to be set before the first frame. + VideoBitrateAllocation bitrate_allocation; + bitrate_allocation.SetBitrate(0, 0, 500000); + encode_timer.OnSetRates(bitrate_allocation, 30); + + image.SetRtpTimestamp(static_cast(kTimestampMs * 90)); + VideoFrame not_refresh_frame = VideoFrame::Builder() + .set_timestamp_ms(kTimestampMs) + .set_rtp_timestamp(kTimestampMs * 90) + .set_video_frame_buffer(kFrameBuffer) + .build(); + encode_timer.OnEncodeStarted(not_refresh_frame); + encode_timer.FillMetadataAndTimingInfo(0, &image); + EXPECT_FALSE(image.IsSteadyStateRefreshFrame()); + + VideoFrame::UpdateRect empty_update_rect; + empty_update_rect.MakeEmptyUpdate(); + VideoFrame refresh_frame = VideoFrame::Builder() + .set_timestamp_ms(kTimestampMs) + .set_rtp_timestamp(kTimestampMs * 90) + .set_update_rect(empty_update_rect) + .set_video_frame_buffer(kFrameBuffer) + .build(); + encode_timer.OnEncodeStarted(refresh_frame); + encode_timer.FillMetadataAndTimingInfo(0, &image); + EXPECT_TRUE(image.IsSteadyStateRefreshFrame()); +} + TEST(FrameEncodeMetadataWriterTest, CopiesPacketInfos) { EncodedImage image; const int64_t kTimestampMs = 123456; @@ -402,15 +437,15 @@ TEST(FrameEncodeMetadataWriterTest, CopiesPacketInfos) { encode_timer.OnSetRates(bitrate_allocation, 30); RtpPacketInfos packet_infos = CreatePacketInfos(3); - image.SetTimestamp(static_cast(kTimestampMs * 90)); + image.SetRtpTimestamp(static_cast(kTimestampMs * 90)); VideoFrame frame = VideoFrame::Builder() .set_timestamp_ms(kTimestampMs) - .set_timestamp_rtp(kTimestampMs * 90) + .set_rtp_timestamp(kTimestampMs * 90) .set_packet_infos(packet_infos) .set_video_frame_buffer(kFrameBuffer) .build(); encode_timer.OnEncodeStarted(frame); - encode_timer.FillTimingInfo(0, &image); + encode_timer.FillMetadataAndTimingInfo(0, &image); EXPECT_EQ(image.PacketInfos().size(), 3U); } diff --git a/video/full_stack_tests.cc b/video/full_stack_tests.cc index 7791afc854..123ca10f0e 100644 --- a/video/full_stack_tests.cc +++ b/video/full_stack_tests.cc @@ -8,16 +8,17 @@ * be found in the AUTHORS file in the root of the source tree. */ #include +#include #include #include #include #include "absl/flags/flag.h" #include "absl/flags/parse.h" -#include "absl/types/optional.h" #include "api/test/simulated_network.h" #include "api/test/test_dependency_factory.h" #include "api/test/video_quality_test_fixture.h" +#include "api/units/data_rate.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/vp9_profile.h" @@ -135,7 +136,7 @@ TEST(FullStackTest, Generator_Net_Delay_0_0_Plr_0_VP9Profile2) { return; auto fixture = CreateVideoQualityTestFixture(); - SdpVideoFormat::Parameters vp92 = { + CodecParameterMap vp92 = { {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}}; ParamsWithLogging generator; generator.call.send_side_bwe = true; @@ -147,33 +148,6 @@ TEST(FullStackTest, Generator_Net_Delay_0_0_Plr_0_VP9Profile2) { fixture->RunWithAnalyzer(generator); } -TEST(FullStackTest, Foreman_Cif_Net_Delay_0_0_Plr_0_Multiplex) { - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging foreman_cif; - foreman_cif.call.send_side_bwe = true; - foreman_cif.video[0] = { - true, 352, 288, 30, - 700000, 700000, 700000, false, - "multiplex", 1, 0, 0, - false, false, false, ClipNameToClipPath("foreman_cif")}; - foreman_cif.analyzer = {"foreman_cif_net_delay_0_0_plr_0_Multiplex", 0.0, 0.0, - kFullStackTestDurationSecs}; - fixture->RunWithAnalyzer(foreman_cif); -} - -TEST(FullStackTest, Generator_Net_Delay_0_0_Plr_0_Multiplex) { - auto fixture = CreateVideoQualityTestFixture(); - - ParamsWithLogging generator; - generator.call.send_side_bwe = true; - generator.video[0] = { - true, 352, 288, 30, 700000, 700000, 700000, false, - "multiplex", 1, 0, 0, false, false, false, "GeneratorI420A"}; - generator.analyzer = {"generator_net_delay_0_0_plr_0_Multiplex", 0.0, 0.0, - kFullStackTestDurationSecs}; - fixture->RunWithAnalyzer(generator); -} - #endif // defined(RTC_ENABLE_VP9) #if defined(WEBRTC_LINUX) @@ -242,7 +216,7 @@ TEST(FullStackTest, Foreman_Cif_Link_150kbps_Net_Delay_0_0_Plr_0) { false, false, true, ClipNameToClipPath("foreman_cif")}; foreman_cif.analyzer = {"foreman_cif_link_150kbps_net_delay_0_0_plr_0", 0.0, 0.0, kFullStackTestDurationSecs}; - foreman_cif.config->link_capacity_kbps = 150; + foreman_cif.config->link_capacity = DataRate::KilobitsPerSec(150); fixture->RunWithAnalyzer(foreman_cif); } @@ -261,7 +235,7 @@ TEST(FullStackTest, foreman_cif.analyzer = { "foreman_cif_link_150kbps_delay100ms_30pkts_queue_overshoot30", 0.0, 0.0, kFullStackTestDurationSecs}; - foreman_cif.config->link_capacity_kbps = 150; + foreman_cif.config->link_capacity = DataRate::KilobitsPerSec(150); foreman_cif.config->queue_length_packets = 30; foreman_cif.config->queue_delay_ms = 100; fixture->RunWithAnalyzer(foreman_cif); @@ -283,7 +257,7 @@ TEST(FullStackTest, Foreman_Cif_Link_250kbps_Delay100ms_10pkts_Loss1) { 0, {}, 1.30}; foreman_cif.analyzer = {"foreman_cif_link_250kbps_delay100ms_10pkts_loss1", 0.0, 0.0, kFullStackTestDurationSecs}; - foreman_cif.config->link_capacity_kbps = 250; + foreman_cif.config->link_capacity = DataRate::KilobitsPerSec(250); foreman_cif.config->queue_length_packets = 10; foreman_cif.config->queue_delay_ms = 100; foreman_cif.config->loss_percent = 1; @@ -354,7 +328,7 @@ TEST(FullStackTest, Foreman_Cif_500kbps_Delay_50_0_Plr_3_Flexfec) { foreman_cif.analyzer = {"foreman_cif_500kbps_delay_50_0_plr_3_flexfec", 0.0, 0.0, kFullStackTestDurationSecs}; foreman_cif.config->loss_percent = 3; - foreman_cif.config->link_capacity_kbps = 500; + foreman_cif.config->link_capacity = DataRate::KilobitsPerSec(500); foreman_cif.config->queue_delay_ms = 50; fixture->RunWithAnalyzer(foreman_cif); } @@ -371,7 +345,7 @@ TEST(FullStackTest, Foreman_Cif_500kbps_Delay_50_0_Plr_3_Ulpfec) { foreman_cif.analyzer = {"foreman_cif_500kbps_delay_50_0_plr_3_ulpfec", 0.0, 0.0, kFullStackTestDurationSecs}; foreman_cif.config->loss_percent = 3; - foreman_cif.config->link_capacity_kbps = 500; + foreman_cif.config->link_capacity = DataRate::KilobitsPerSec(500); foreman_cif.config->queue_delay_ms = 50; fixture->RunWithAnalyzer(foreman_cif); } @@ -493,7 +467,7 @@ TEST(FullStackTest, Foreman_Cif_500kbps) { kFullStackTestDurationSecs}; foreman_cif.config->queue_length_packets = 0; foreman_cif.config->queue_delay_ms = 0; - foreman_cif.config->link_capacity_kbps = 500; + foreman_cif.config->link_capacity = DataRate::KilobitsPerSec(500); fixture->RunWithAnalyzer(foreman_cif); } @@ -510,7 +484,7 @@ TEST(FullStackTest, Foreman_Cif_500kbps_32pkts_Queue) { kFullStackTestDurationSecs}; foreman_cif.config->queue_length_packets = 32; foreman_cif.config->queue_delay_ms = 0; - foreman_cif.config->link_capacity_kbps = 500; + foreman_cif.config->link_capacity = DataRate::KilobitsPerSec(500); fixture->RunWithAnalyzer(foreman_cif); } @@ -527,7 +501,7 @@ TEST(FullStackTest, Foreman_Cif_500kbps_100ms) { kFullStackTestDurationSecs}; foreman_cif.config->queue_length_packets = 0; foreman_cif.config->queue_delay_ms = 100; - foreman_cif.config->link_capacity_kbps = 500; + foreman_cif.config->link_capacity = DataRate::KilobitsPerSec(500); fixture->RunWithAnalyzer(foreman_cif); } @@ -546,7 +520,7 @@ TEST(GenericDescriptorTest, kFullStackTestDurationSecs}; foreman_cif.config->queue_length_packets = 32; foreman_cif.config->queue_delay_ms = 100; - foreman_cif.config->link_capacity_kbps = 500; + foreman_cif.config->link_capacity = DataRate::KilobitsPerSec(500); foreman_cif.call.generic_descriptor = true; fixture->RunWithAnalyzer(foreman_cif); } @@ -564,7 +538,7 @@ TEST(FullStackTest, Foreman_Cif_500kbps_100ms_32pkts_Queue_Recv_Bwe) { 0.0, 0.0, kFullStackTestDurationSecs}; foreman_cif.config->queue_length_packets = 32; foreman_cif.config->queue_delay_ms = 100; - foreman_cif.config->link_capacity_kbps = 500; + foreman_cif.config->link_capacity = DataRate::KilobitsPerSec(500); fixture->RunWithAnalyzer(foreman_cif); } @@ -581,7 +555,7 @@ TEST(FullStackTest, Foreman_Cif_1000kbps_100ms_32pkts_Queue) { kFullStackTestDurationSecs}; foreman_cif.config->queue_length_packets = 32; foreman_cif.config->queue_delay_ms = 100; - foreman_cif.config->link_capacity_kbps = 1000; + foreman_cif.config->link_capacity = DataRate::KilobitsPerSec(1000); fixture->RunWithAnalyzer(foreman_cif); } @@ -603,7 +577,7 @@ TEST(FullStackTest, Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue) { 0.0, 0.0, kFullStackTestDurationSecs}; conf_motion_hd.config->queue_length_packets = 32; conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 2000; + conf_motion_hd.config->link_capacity = DataRate::KilobitsPerSec(2000); fixture->RunWithAnalyzer(conf_motion_hd); } @@ -627,7 +601,7 @@ TEST(GenericDescriptorTest, conf_motion_hd.config->queue_length_packets = 50; conf_motion_hd.config->loss_percent = 3; conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 2000; + conf_motion_hd.config->link_capacity = DataRate::KilobitsPerSec(2000); conf_motion_hd.call.generic_descriptor = true; fixture->RunWithAnalyzer(conf_motion_hd); } @@ -650,7 +624,7 @@ TEST(FullStackTest, Conference_Motion_Hd_3tl_Moderate_Limits) { conf_motion_hd.config->queue_length_packets = 50; conf_motion_hd.config->loss_percent = 3; conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 2000; + conf_motion_hd.config->link_capacity = DataRate::KilobitsPerSec(2000); fixture->RunWithAnalyzer(conf_motion_hd); } @@ -672,57 +646,7 @@ TEST(FullStackTest, Conference_Motion_Hd_4tl_Moderate_Limits) { conf_motion_hd.config->queue_length_packets = 50; conf_motion_hd.config->loss_percent = 3; conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 2000; - fixture->RunWithAnalyzer(conf_motion_hd); -} - -TEST(FullStackTest, Conference_Motion_Hd_3tl_Alt_Moderate_Limits) { - test::ScopedFieldTrials field_trial( - AppendFieldTrials("WebRTC-UseShortVP8TL3Pattern/Enabled/")); - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging conf_motion_hd; - conf_motion_hd.call.send_side_bwe = true; - conf_motion_hd.video[0] = { - true, 1280, - 720, 50, - 30000, 3000000, - 3000000, false, - "VP8", 3, - -1, 0, - false, false, - false, ClipNameToClipPath("ConferenceMotion_1280_720_50")}; - conf_motion_hd.analyzer = {"conference_motion_hd_3tl_alt_moderate_limits", - 0.0, 0.0, kFullStackTestDurationSecs}; - conf_motion_hd.config->queue_length_packets = 50; - conf_motion_hd.config->loss_percent = 3; - conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 2000; - fixture->RunWithAnalyzer(conf_motion_hd); -} - -TEST(FullStackTest, Conference_Motion_Hd_3tl_Alt_Heavy_Moderate_Limits) { - auto fixture = CreateVideoQualityTestFixture(); - test::ScopedFieldTrials field_trial( - AppendFieldTrials("WebRTC-UseShortVP8TL3Pattern/Enabled/" - "WebRTC-UseBaseHeavyVP8TL3RateAllocation/Enabled/")); - ParamsWithLogging conf_motion_hd; - conf_motion_hd.call.send_side_bwe = true; - conf_motion_hd.video[0] = { - true, 1280, - 720, 50, - 30000, 3000000, - 3000000, false, - "VP8", 3, - -1, 0, - false, false, - false, ClipNameToClipPath("ConferenceMotion_1280_720_50")}; - conf_motion_hd.analyzer = { - "conference_motion_hd_3tl_alt_heavy_moderate_limits", 0.0, 0.0, - kFullStackTestDurationSecs}; - conf_motion_hd.config->queue_length_packets = 50; - conf_motion_hd.config->loss_percent = 3; - conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 2000; + conf_motion_hd.config->link_capacity = DataRate::KilobitsPerSec(2000); fixture->RunWithAnalyzer(conf_motion_hd); } @@ -743,7 +667,7 @@ TEST(FullStackTest, Foreman_Cif_30kbps_AV1) { .clip_path = ClipNameToClipPath("foreman_cif")}; foreman_cif.analyzer = {.test_label = "foreman_cif_30kbps_AV1", .test_durations_secs = kFullStackTestDurationSecs}; - foreman_cif.config->link_capacity_kbps = 30; + foreman_cif.config->link_capacity = DataRate::KilobitsPerSec(30); foreman_cif.call.generic_descriptor = true; fixture->RunWithAnalyzer(foreman_cif); } @@ -769,11 +693,69 @@ TEST(FullStackTest, Conference_Motion_Hd_3tl_AV1) { conf_motion_hd.config->queue_length_packets = 50; conf_motion_hd.config->loss_percent = 3; conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 1000; + conf_motion_hd.config->link_capacity = DataRate::KilobitsPerSec(1000); conf_motion_hd.call.generic_descriptor = true; fixture->RunWithAnalyzer(conf_motion_hd); } +#if defined(WEBRTC_MAC) +// TODO(webrtc:351644561): Flaky on Mac x86/ARM. +#define MAYBE_Screenshare_Slides_Simulcast_AV1 \ + DISABLED_Screenshare_Slides_Simulcast_AV1 +#else +#define MAYBE_Screenshare_Slides_Simulcast_AV1 Screenshare_Slides_Simulcast_AV1 +#endif +TEST(FullStackTest, MAYBE_Screenshare_Slides_Simulcast_AV1) { + auto fixture = CreateVideoQualityTestFixture(); + ParamsWithLogging screenshare; + screenshare.analyzer = {.test_label = "screenshare_slides_simulcast_AV1", + .test_durations_secs = kFullStackTestDurationSecs}; + screenshare.call.send_side_bwe = true; + screenshare.screenshare[0] = {.enabled = true}; + screenshare.video[0] = {.enabled = true, + .width = 1850, + .height = 1110, + .fps = 30, + .min_bitrate_bps = 0, + .target_bitrate_bps = 0, + .max_bitrate_bps = 2500000, + .codec = "AV1", + .num_temporal_layers = 2}; + + // Set `min_bitrate_bps` and `target_bitrate_bps` to zero to use WebRTC + // defaults. + VideoQualityTest::Params screenshare_params_low; + screenshare_params_low.video[0] = {.enabled = true, + .width = 1850, + .height = 1110, + .fps = 5, + .min_bitrate_bps = 0, + .target_bitrate_bps = 0, + .max_bitrate_bps = 420'000, + .codec = "AV1", + .num_temporal_layers = 2}; + + VideoQualityTest::Params screenshare_params_high; + screenshare_params_high.video[0] = {.enabled = true, + .width = 1850, + .height = 1110, + .fps = 30, + .min_bitrate_bps = 0, + .target_bitrate_bps = 0, + .max_bitrate_bps = 2'500'000, + .codec = "AV1", + .num_temporal_layers = 2}; + + std::vector streams = { + VideoQualityTest::DefaultVideoStream(screenshare_params_low, 0), + VideoQualityTest::DefaultVideoStream(screenshare_params_high, 0)}; + screenshare.ss[0] = { + .streams = streams, + .selected_stream = 1, + }; + fixture->RunWithAnalyzer(screenshare); +} + #if defined(RTC_ENABLE_VP9) TEST(FullStackTest, Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue_Vp9) { auto fixture = CreateVideoQualityTestFixture(); @@ -793,7 +775,7 @@ TEST(FullStackTest, Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue_Vp9) { kFullStackTestDurationSecs}; conf_motion_hd.config->queue_length_packets = 32; conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 2000; + conf_motion_hd.config->link_capacity = DataRate::KilobitsPerSec(2000); fixture->RunWithAnalyzer(conf_motion_hd); } #endif @@ -868,7 +850,7 @@ TEST(GenericDescriptorTest, Screenshare_Slides_Lossy_Net_Generic_Descriptor) { 0.0, 0.0, kFullStackTestDurationSecs}; screenshare.config->loss_percent = 5; screenshare.config->queue_delay_ms = 200; - screenshare.config->link_capacity_kbps = 500; + screenshare.config->link_capacity = DataRate::KilobitsPerSec(500); screenshare.call.generic_descriptor = true; fixture->RunWithAnalyzer(screenshare); } @@ -885,7 +867,7 @@ TEST(FullStackTest, Screenshare_Slides_Very_Lossy) { kFullStackTestDurationSecs}; screenshare.config->loss_percent = 10; screenshare.config->queue_delay_ms = 200; - screenshare.config->link_capacity_kbps = 500; + screenshare.config->link_capacity = DataRate::KilobitsPerSec(500); fixture->RunWithAnalyzer(screenshare); } @@ -900,7 +882,7 @@ TEST(FullStackTest, Screenshare_Slides_Lossy_Limited) { screenshare.analyzer = {"screenshare_slides_lossy_limited", 0.0, 0.0, kFullStackTestDurationSecs}; screenshare.config->loss_percent = 5; - screenshare.config->link_capacity_kbps = 200; + screenshare.config->link_capacity = DataRate::KilobitsPerSec(200); screenshare.config->queue_length_packets = 30; fixture->RunWithAnalyzer(screenshare); @@ -917,7 +899,7 @@ TEST(FullStackTest, Screenshare_Slides_Moderately_Restricted) { screenshare.analyzer = {"screenshare_slides_moderately_restricted", 0.0, 0.0, kFullStackTestDurationSecs}; screenshare.config->loss_percent = 1; - screenshare.config->link_capacity_kbps = 1200; + screenshare.config->link_capacity = DataRate::KilobitsPerSec(1200); screenshare.config->queue_length_packets = 30; fixture->RunWithAnalyzer(screenshare); @@ -1026,11 +1008,10 @@ TEST(FullStackTest, Vp9ksvc_3sl_Low) { TEST(FullStackTest, Vp9ksvc_3sl_Low_Bw_Limited) { webrtc::test::ScopedFieldTrials override_trials( - AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/" - "WebRTC-Vp9ExternalRefCtrl/Enabled/")); + AppendFieldTrials("WebRTC-Vp9IssueKeyFrameOnLayerDeactivation/Enabled/")); auto fixture = CreateVideoQualityTestFixture(); ParamsWithLogging simulcast; - simulcast.config->link_capacity_kbps = 500; + simulcast.config->link_capacity = DataRate::KilobitsPerSec(500); simulcast.call.send_side_bwe = true; simulcast.video[0] = SvcVp9Video(); simulcast.analyzer = {"vp9ksvc_3sl_low_bw_limited", 0.0, 0.0, @@ -1053,7 +1034,7 @@ TEST(FullStackTest, Vp9ksvc_3sl_Medium_Network_Restricted) { simulcast.ss[0] = { std::vector(), 0, 3, -1, InterLayerPredMode::kOnKeyPic, std::vector(), false}; - simulcast.config->link_capacity_kbps = 1000; + simulcast.config->link_capacity = DataRate::KilobitsPerSec(1000); simulcast.config->queue_delay_ms = 100; fixture->RunWithAnalyzer(simulcast); } @@ -1071,7 +1052,7 @@ TEST(FullStackTest, Vp9ksvc_3sl_Medium_Network_Restricted_Trusted_Rate) { simulcast.ss[0] = { std::vector(), 0, 3, -1, InterLayerPredMode::kOnKeyPic, std::vector(), false}; - simulcast.config->link_capacity_kbps = 1000; + simulcast.config->link_capacity = DataRate::KilobitsPerSec(1000); simulcast.config->queue_delay_ms = 100; fixture->RunWithAnalyzer(simulcast); } diff --git a/video/g3doc/adaptation.md b/video/g3doc/adaptation.md index cb06e886b2..d34394fdc8 100644 --- a/video/g3doc/adaptation.md +++ b/video/g3doc/adaptation.md @@ -1,5 +1,5 @@ - + # Video Adaptation @@ -23,8 +23,8 @@ resource detects that it is overused, it calls `SetUsageState(kOveruse)`. When the resource is no longer overused, it can signal this using `SetUsageState(kUnderuse)`. -There are two resources that are used by default on all video tracks: Quality -scaler resource and encode overuse resource. +There are two resources that are used by default on all video tracks: the +quality scaler and encode overuse resources. ### QP Scaler Resource @@ -72,8 +72,8 @@ status and the `Adaptation` proposal. There are 3 degradation preferences, described in the [RtpParameters][RtpParameters] header. These are -* `MAINTIAIN_FRAMERATE`: Adapt video resolution -* `MAINTIAIN_RESOLUTION`: Adapt video frame-rate. +* `MAINTAIN_FRAMERATE`: Adapt video resolution +* `MAINTAIN_RESOLUTION`: Adapt video frame-rate. * `BALANCED`: Adapt video frame-rate or resolution. The degradation preference is set for a video track using the diff --git a/video/g3doc/stats.md b/video/g3doc/stats.md index 0bc2953b1f..1b20dff304 100644 --- a/video/g3doc/stats.md +++ b/video/g3doc/stats.md @@ -1,5 +1,5 @@ - + # Video stats @@ -32,15 +32,19 @@ and holds a `VideoSendStream::Stats` object. * `referenced_media_ssrc` - only present for type kRtx/kFlexfec. The SSRC for the kMedia stream that retransmissions or FEC is performed for. Updated when a frame has been encoded, `VideoStreamEncoder::OnEncodedImage`. -* `frames_encoded `- total number of encoded frames. + +* `frames_encoded `- total number of encoded frames [[rtcoutboundrtpstreamstats-framesencoded]]. * `encode_frame_rate` - number of encoded frames during the last second. * `width` - width of last encoded frame [[rtcoutboundrtpstreamstats-framewidth]]. * `height` - height of last encoded frame [[rtcoutboundrtpstreamstats-frameheight]]. -* `total_encode_time_ms` - total encode time for encoded frames. +* `total_encode_time_ms` - total encode time for encoded frames [[rtcoutboundrtpstreamstats-totalencodetime]]. * `qp_sum` - sum of quantizer values of encoded frames [[rtcoutboundrtpstreamstats-qpsum]]. * `frame_counts` - total number of encoded key/delta frames [[rtcoutboundrtpstreamstats-keyframesencoded]]. +* `huge_frames_sent` - total number of huge frames sent [[rtcoutboundrtpstreamstats-hugeframessent]]. +* `scalability_mode` - configured scalability mode for the stream [[rtcoutboundrtpstreamstats-scalabilitymode]]. Updated when a RTP packet is transmitted to the network, `RtpSenderEgress::SendPacket`. + * `rtp_stats` - total number of sent bytes/packets. * `total_bitrate_bps` - total bitrate sent in bits per second (over a one second window). * `retransmit_bitrate_bps` - total retransmit bitrate sent in bits per second (over a one second window). @@ -49,9 +53,11 @@ Updated when a RTP packet is transmitted to the network, `RtpSenderEgress::SendP * `total_packet_send_delay_ms` - total capture-to-send delay for sent packets [[rtcoutboundrtpstreamstats-totalpacketsenddelay]]. Updated when an incoming RTCP packet is parsed, `RTCPReceiver::ParseCompoundPacket`. + * `rtcp_packet_type_counts` - total number of received NACK/FIR/PLI packets [rtcoutboundrtpstreamstats-[nackcount], [fircount], [plicount]]. Updated when a RTCP report block packet is received, `RTCPReceiver::TriggerCallbacksFromRtcpPacket`. + * `rtcp_stats` - RTCP report block data. * `report_block_data` - RTCP report block data. @@ -59,24 +65,32 @@ Updated when a RTCP report block packet is received, `RTCPReceiver::TriggerCallb * `std::map substreams` - StreamStats mapped per SSRC. Updated when a frame is received from the source, `VideoStreamEncoder::OnFrame`. + * `frames` - total number of frames fed to VideoStreamEncoder. * `input_frame_rate` - number of frames fed to VideoStreamEncoder during the last second. +* `frames_dropped_by_bad_timestamp` - total number of dropped frames due to bad timestamp. * `frames_dropped_by_congestion_window` - total number of dropped frames due to congestion window pushback. * `frames_dropped_by_encoder_queue` - total number of dropped frames due to that the encoder is blocked. Updated if a frame from the source is dropped, `VideoStreamEncoder::OnDiscardedFrame`. + * `frames_dropped_by_capturer` - total number dropped frames by the source. Updated if a frame is dropped by `FrameDropper`, `VideoStreamEncoder::MaybeEncodeVideoFrame`. + * `frames_dropped_by_rate_limiter` - total number of dropped frames to avoid bitrate overuse. Updated (if changed) before a frame is passed to the encoder, `VideoStreamEncoder::EncodeVideoFrame`. + * `encoder_implementation_name` - name of encoder implementation [[rtcoutboundrtpstreamstats-encoderimplementation]]. +* `power_efficient_encoder` - whether the encoder is considered power efficient [[rtcoutboundrtpstreamstats-powerefficientencoder]]. Updated after a frame has been encoded, `VideoStreamEncoder::OnEncodedImage`. -* `frames_encoded `- total number of encoded frames [[rtcoutboundrtpstreamstats-framesencoded]]. + +* `frames_encoded `- total number of frames encoded [[rtcoutboundrtpstreamstats-framesencoded]]. * `encode_frame_rate` - number of encoded frames during the last second [[rtcoutboundrtpstreamstats-framespersecond]]. * `total_encoded_bytes_target` - total target frame size in bytes [[rtcoutboundrtpstreamstats-totalencodedbytestarget]]. +* `frames_sent` - total number of frames sent [[rtcoutboundrtpstreamstats-framessent]]. * `huge_frames_sent` - total number of huge frames sent [[rtcoutboundrtpstreamstats-hugeframessent]]. * `media_bitrate_bps` - the actual bitrate the encoder is producing. * `avg_encode_time_ms` - average encode time for encoded frames. @@ -84,6 +98,7 @@ Updated after a frame has been encoded, `VideoStreamEncoder::OnEncodedImage`. * `frames_dropped_by_encoder`- total number of dropped frames by the encoder. Adaptation stats. + * `bw_limited_resolution` - shows if resolution is limited due to restricted bandwidth. * `cpu_limited_resolution` - shows if resolution is limited due to cpu. * `bw_limited_framerate` - shows if framerate is limited due to restricted bandwidth. @@ -95,9 +110,11 @@ Adaptation stats. * `number_of_quality_adapt_changes` - total number of times resolution/framerate has changed due to quality limitation. Updated when the encoder is configured, `VideoStreamEncoder::ReconfigureEncoder`. + * `content_type` - configured content type (UNSPECIFIED/SCREENSHARE). Updated when the available bitrate changes, `VideoSendStreamImpl::OnBitrateUpdated`. + * `target_media_bitrate_bps` - the bitrate the encoder is configured to use. * `suspended` - shows if video is suspended due to zero target bitrate. @@ -119,37 +136,49 @@ and holds a `VideoReceiveStream::Stats` object. * `ssrc` - configured SSRC for the received stream. Updated when a complete frame is received, `FrameBuffer::InsertFrame`. + * `frame_counts` - total number of key/delta frames received [[rtcinboundrtpstreamstats-keyframesdecoded]]. * `network_frame_rate` - number of frames received during the last second. Updated when a frame is ready for decoding, `FrameBuffer::GetNextFrame`. From `VCMTiming`: + * `jitter_buffer_ms` - jitter delay in ms: this is the delay added to handle network jitter * `max_decode_ms` - the 95th percentile observed decode time within a time window (10 sec). * `render_delay_ms` - render delay in ms. * `min_playout_delay_ms` - minimum playout delay in ms. * `target_delay_ms` - target playout delay in ms. Max(`min_playout_delay_ms`, `jitter_delay_ms` + `max_decode_ms` + `render_delay_ms`). * `current_delay_ms` - actual playout delay in ms. -* `jitter_buffer_delay_seconds` - total jitter buffer delay in seconds: this is the time spent waiting in the jitter buffer [[rtcinboundrtpstreamstats-jitterbufferdelay]]. +* `jitter_buffer_delay` - sum of time for each frame from earliest packet is entered to corresponding frame is emitted from the jitter buffer [[rtcinboundrtpstreamstats-jitterbufferdelay]]. * `jitter_buffer_emitted_count` - total number of frames that have come out from the jitter buffer [[rtcinboundrtpstreamstats-jitterbufferemittedcount]]. +* `jitter_buffer_target_delay` - increased by the target jitter buffer delay every time a frame is emitted from the jitter buffer [[rtcinboundrtpstreamstats-jitterbuffertargetdelay]]. +* `jitter_buffer_minimum_delay` - minimum obtainable jitter buffer delay without external influence [[rtcinboundrtpstreamstats-jitterbufferminimumdelay]]. Updated (if changed) after a frame is passed to the decoder, `VCMGenericDecoder::Decode`. + * `decoder_implementation_name` - name of decoder implementation [[rtcinboundrtpstreamstats-decoderimplementation]]. +* `power_efficient_decoder` - whether the decoder is considered power efficient [[rtcinboundrtpstreamstats-powerefficientdecoder]]. Updated when a frame is ready for decoding, `FrameBuffer::GetNextFrame`. + * `timing_frame_info` - timestamps for a full lifetime of a frame. * `first_frame_received_to_decoded_ms` - initial decoding latency between the first arrived frame and the first decoded frame. -* `frames_dropped` - total number of dropped frames prior to decoding or if the system is too slow [[rtcreceivedrtpstreamstats-framesdropped]]. +* `frames_dropped` - total number of dropped frames prior to decoding or if the system is too slow [[rtcinboundrtpstreamstats-framesdropped]]. Updated after a frame has been decoded, `VCMDecodedFrameCallback::Decoded`. + * `frames_decoded` - total number of decoded frames [[rtcinboundrtpstreamstats-framesdecoded]]. * `decode_frame_rate` - number of decoded frames during the last second [[rtcinboundrtpstreamstats-framespersecond]]. * `decode_ms` - time to decode last frame in ms. -* `total_decode_time_ms` - total decode time for decoded frames [[rtcinboundrtpstreamstats-totaldecodetime]]. +* `total_decode_time` - total decode time for decoded frames [[rtcinboundrtpstreamstats-totaldecodetime]]. +* `total_processing_delay` - sum of time for each frame from first RTP packet is received to corresponding frame is decoded [[rtcinboundrtpstreamstats-totalprocessingdelay]]. +* `total_assembly_time` - sum of time for each frame from first RTP packet is received to the last RTP packet of a frame is received (for frames consisting of more than one RTP packet) [[rtcinboundrtpstreamstats-totalassemblytime]]. +* `frames_assembled_from_multiple_packets` - total number of correctly decoded frames that consist of more than one RTP packet [[rtcinboundrtpstreamstats-framesassembledfrommultiplepackets]]. * `qp_sum` - sum of quantizer values of decoded frames [[rtcinboundrtpstreamstats-qpsum]]. * `content_type` - content type (UNSPECIFIED/SCREENSHARE). * `interframe_delay_max_ms` - max inter-frame delay within a time window between decoded frames. Updated before a frame is sent to the renderer, `VideoReceiveStream2::OnFrame`. + * `frames_rendered` - total number of rendered frames. * `render_frame_rate` - number of rendered frames during the last second. * `width` - width of last frame fed to renderer [[rtcinboundrtpstreamstats-framewidth]]. @@ -164,12 +193,13 @@ Updated before a frame is sent to the renderer, `VideoReceiveStream2::OnFrame`. * `total_squared_inter_frame_delay` - sum of squared inter-frame delays in seconds between rendered frames [[rtcinboundrtpstreamstats-totalsquaredinterframedelay]]. `ReceiveStatisticsImpl::OnRtpPacket` is updated for received RTP packets. From `ReceiveStatistics`: + * `total_bitrate_bps` - incoming bitrate in bps. * `rtp_stats` - RTP statistics for the received stream. Updated when a RTCP packet is sent, `RTCPSender::ComputeCompoundRTCPPacket`. -* `rtcp_packet_type_counts` - total number of sent NACK/FIR/PLI packets [rtcinboundrtpstreamstats-[nackcount], [fircount], [plicount]]. +* `rtcp_packet_type_counts` - total number of sent NACK/FIR/PLI packets [rtcinboundrtpstreamstats-[nackcount], [fircount], [plicount]]. [VideoSendStream]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/call/video_send_stream.h [VideoSendStream::Stats]: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/call/video_send_stream.h?q=VideoSendStream::Stats @@ -179,14 +209,17 @@ Updated when a RTCP packet is sent, `RTCPSender::ComputeCompoundRTCPPacket`. [rtcoutboundrtpstreamstats-frameheight]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-frameheight [rtcoutboundrtpstreamstats-qpsum]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-qpsum [rtcoutboundrtpstreamstats-keyframesencoded]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-keyframesencoded +[rtcoutboundrtpstreamstats-scalabilitymode]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-scalabilitymode [rtcoutboundrtpstreamstats-totalpacketsenddelay]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalpacketsenddelay [nackcount]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-nackcount [fircount]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-fircount [plicount]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-plicount [rtcoutboundrtpstreamstats-encoderimplementation]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-encoderimplementation +[rtcoutboundrtpstreamstats-powerefficientencoder]:https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-powerefficientencoder [rtcoutboundrtpstreamstats-framesencoded]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-framesencoded [rtcoutboundrtpstreamstats-framespersecond]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-framespersecond [rtcoutboundrtpstreamstats-totalencodedbytestarget]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalencodedbytestarget +[rtcoutboundrtpstreamstats-framessent]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-framessent [rtcoutboundrtpstreamstats-hugeframessent]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-hugeframessent [rtcoutboundrtpstreamstats-totalencodetime]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-totalencodetime [rtcoutboundrtpstreamstats-qualitylimitationreason]: https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-qualitylimitationreason @@ -199,11 +232,17 @@ Updated when a RTCP packet is sent, `RTCPSender::ComputeCompoundRTCPPacket`. [rtcinboundrtpstreamstats-keyframesdecoded]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-keyframesdecoded [rtcinboundrtpstreamstats-jitterbufferdelay]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferdelay [rtcinboundrtpstreamstats-jitterbufferemittedcount]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferemittedcount +[rtcinboundrtpstreamstats-jitterbuffertargetdelay]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbuffertargetdelay +[rtcinboundrtpstreamstats-jitterbufferminimumdelay]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferminimumdelay [rtcinboundrtpstreamstats-decoderimplementation]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-decoderimplementation -[rtcreceivedrtpstreamstats-framesdropped]: https://www.w3.org/TR/webrtc-stats/#dom-rtcreceivedrtpstreamstats-framesdropped +[rtcinboundrtpstreamstats-powerefficientdecoder]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-powerefficientdecoder +[rtcinboundrtpstreamstats-framesdropped]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-framesdropped [rtcinboundrtpstreamstats-framesdecoded]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-framesdecoded [rtcinboundrtpstreamstats-framespersecond]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-framespersecond [rtcinboundrtpstreamstats-totaldecodetime]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totaldecodetime +[rtcinboundrtpstreamstats-totalprocessingdelay]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalprocessingdelay +[rtcinboundrtpstreamstats-totalassemblytime]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalassemblytime +[rtcinboundrtpstreamstats-framesassembledfrommultiplepackets]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-framesassembledfrommultiplepackets [rtcinboundrtpstreamstats-qpsum]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-qpsum [rtcinboundrtpstreamstats-totalinterframedelay]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalinterframedelay [rtcinboundrtpstreamstats-totalsquaredinterframedelay]: https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalsquaredinterframedelay diff --git a/video/pc_full_stack_tests.cc b/video/pc_full_stack_tests.cc index 83b06830e0..bde1a751f8 100644 --- a/video/pc_full_stack_tests.cc +++ b/video/pc_full_stack_tests.cc @@ -26,11 +26,12 @@ #include "api/test/simulated_network.h" #include "api/test/time_controller.h" #include "api/video_codecs/vp9_profile.h" -#include "call/simulated_network.h" +#include "media/base/media_constants.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "system_wrappers/include/field_trial.h" #include "test/field_trial.h" #include "test/gtest.h" +#include "test/network/simulated_network.h" #include "test/pc/e2e/network_quality_metrics_reporter.h" #include "test/testsupport/file_utils.h" @@ -54,15 +55,13 @@ CreateTestFixture(const std::string& test_case_name, TimeController& time_controller, std::pair network_links, - rtc::FunctionView alice_configurer, - rtc::FunctionView bob_configurer) { + FunctionView alice_configurer, + FunctionView bob_configurer) { auto fixture = webrtc_pc_e2e::CreatePeerConnectionE2EQualityTestFixture( test_case_name, time_controller, /*audio_quality_analyzer=*/nullptr, /*video_quality_analyzer=*/nullptr); - auto alice = std::make_unique( - network_links.first->network_dependencies()); - auto bob = std::make_unique( - network_links.second->network_dependencies()); + auto alice = std::make_unique(*network_links.first); + auto bob = std::make_unique(*network_links.second); alice_configurer(alice.get()); bob_configurer(bob.get()); fixture->AddPeer(std::move(alice)); @@ -98,7 +97,7 @@ std::vector ParameterizedTestParams() { // Use the worker thread for sending packets. // https://bugs.chromium.org/p/webrtc/issues/detail?id=14502 {.use_network_thread_as_worker_thread = true, - .field_trials = "WebRTC-SendPacketsOnWorkerThread/Enabled/", + .field_trials = "", .test_case_name_postfix = "_ReducedThreads"}}; } @@ -137,13 +136,13 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_VP9) { video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); alice->SetVideoCodecs({VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ + /*name=*/kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }, [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ + /*name=*/kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); @@ -168,13 +167,13 @@ TEST(PCGenericDescriptorTest, video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); alice->SetVideoCodecs({VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ + /*name=*/kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }, [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ + /*name=*/kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); @@ -182,9 +181,7 @@ TEST(PCGenericDescriptorTest, } // VP9 2nd profile isn't supported on android arm and arm 64. -#if (defined(WEBRTC_ANDROID) && \ - (defined(WEBRTC_ARCH_ARM64) || defined(WEBRTC_ARCH_ARM))) || \ - (defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_ARM64)) +#if defined(WEBRTC_ARCH_ARM64) || defined(WEBRTC_ARCH_ARM) #define MAYBE_Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2 \ DISABLED_Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2 #else @@ -206,49 +203,19 @@ TEST(PCFullStackTest, MAYBE_Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2) { video, test::FrameGeneratorInterface::OutputType::kI010); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); alice->SetVideoCodecs({VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ + /*name=*/kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}})}); }, [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ + /*name=*/kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}})}); }); fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } -/* -// TODO(bugs.webrtc.org/10639) migrate commented out test, when required -// functionality will be supported in PeerConnection level framework. -TEST(PCFullStackTest, ForemanCifWithoutPacketLossMultiplexI420Frame) { - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging foreman_cif; - foreman_cif.call.send_side_bwe = true; - foreman_cif.video[0] = { - true, 352, 288, 30, - 700000, 700000, 700000, false, - "multiplex", 1, 0, 0, - false, false, false, ClipNameToClipPath("foreman_cif")}; - foreman_cif.analyzer = {"foreman_cif_net_delay_0_0_plr_0_Multiplex", 0.0, 0.0, - kTestDurationSec}; - fixture->RunWithAnalyzer(foreman_cif); -} - -TEST(PCFullStackTest, GeneratorWithoutPacketLossMultiplexI420AFrame) { - auto fixture = CreateVideoQualityTestFixture(); - - ParamsWithLogging generator; - generator.call.send_side_bwe = true; - generator.video[0] = { - true, 352, 288, 30, 700000, 700000, 700000, false, - "multiplex", 1, 0, 0, false, false, false, "GeneratorI420A"}; - generator.analyzer = {"generator_net_delay_0_0_plr_0_Multiplex", 0.0, 0.0, - kTestDurationSec}; - fixture->RunWithAnalyzer(generator); -} -*/ #endif // defined(RTC_ENABLE_VP9) TEST(PCFullStackTest, Pc_Net_Delay_0_0_Plr_0) { @@ -321,7 +288,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Link_150kbps_Net_Delay_0_0_Plr_0) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; - config.link_capacity_kbps = 150; + config.link_capacity = DataRate::KilobitsPerSec(150); auto fixture = CreateTestFixture( "pc_foreman_cif_link_150kbps_net_delay_0_0_plr_0", *network_emulation_manager->time_controller(), @@ -341,7 +308,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Link_130kbps_Delay100ms_Loss1_Ulpfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; - config.link_capacity_kbps = 130; + config.link_capacity = DataRate::KilobitsPerSec(130); config.queue_delay_ms = 100; config.loss_percent = 1; auto fixture = CreateTestFixture( @@ -364,7 +331,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Link_50kbps_Delay100ms_Loss1_Ulpfec) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; - config.link_capacity_kbps = 50; + config.link_capacity = DataRate::KilobitsPerSec(50); config.queue_delay_ms = 100; config.loss_percent = 1; auto fixture = CreateTestFixture( @@ -389,7 +356,7 @@ TEST(PCFullStackTest, std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; - config.link_capacity_kbps = 150; + config.link_capacity = DataRate::KilobitsPerSec(150); config.queue_length_packets = 30; config.queue_delay_ms = 100; auto fixture = CreateTestFixture( @@ -416,7 +383,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Link_250kbps_Delay100ms_10pkts_Loss1) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; - config.link_capacity_kbps = 250; + config.link_capacity = DataRate::KilobitsPerSec(250); config.queue_length_packets = 10; config.queue_delay_ms = 100; config.loss_percent = 1; @@ -510,7 +477,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_Delay_50_0_Plr_3_Flexfec) { CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; config.loss_percent = 3; - config.link_capacity_kbps = 500; + config.link_capacity = DataRate::KilobitsPerSec(500); config.queue_delay_ms = 50; auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps_delay_50_0_plr_3_flexfec", @@ -535,7 +502,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_Delay_50_0_Plr_3_Ulpfec) { CreateNetworkEmulationManager(); BuiltInNetworkBehaviorConfig config; config.loss_percent = 3; - config.link_capacity_kbps = 500; + config.link_capacity = DataRate::KilobitsPerSec(500); config.queue_delay_ms = 50; auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps_delay_50_0_plr_3_ulpfec", @@ -568,10 +535,10 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_H264) { auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - alice->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + alice->SetVideoCodecs({VideoCodecConfig(webrtc::kH264CodecName)}); }, [](PeerConfigurer* bob) { - bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + bob->SetVideoCodecs({VideoCodecConfig(webrtc::kH264CodecName)}); }); fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } @@ -596,10 +563,10 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_30kbps_Net_Delay_0_0_Plr_0_H264) { bitrate_settings.start_bitrate_bps = 30000; bitrate_settings.max_bitrate_bps = 30000; alice->SetBitrateSettings(bitrate_settings); - alice->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + alice->SetVideoCodecs({VideoCodecConfig(webrtc::kH264CodecName)}); }, [](PeerConfigurer* bob) { - bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + bob->SetVideoCodecs({VideoCodecConfig(webrtc::kH264CodecName)}); }); fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } @@ -621,10 +588,10 @@ TEST(PCGenericDescriptorTest, auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - alice->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + alice->SetVideoCodecs({VideoCodecConfig(webrtc::kH264CodecName)}); }, [](PeerConfigurer* bob) { - bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + bob->SetVideoCodecs({VideoCodecConfig(webrtc::kH264CodecName)}); }); fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } @@ -648,10 +615,10 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Sps_Pps_Idr) { auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - alice->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + alice->SetVideoCodecs({VideoCodecConfig(webrtc::kH264CodecName)}); }, [](PeerConfigurer* bob) { - bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + bob->SetVideoCodecs({VideoCodecConfig(webrtc::kH264CodecName)}); }); fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); } @@ -672,11 +639,11 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Flexfec) { auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - alice->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + alice->SetVideoCodecs({VideoCodecConfig(webrtc::kH264CodecName)}); alice->SetUseFlexFEC(true); }, [](PeerConfigurer* bob) { - bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + bob->SetVideoCodecs({VideoCodecConfig(webrtc::kH264CodecName)}); bob->SetUseFlexFEC(true); }); RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); @@ -702,11 +669,11 @@ TEST(PCFullStackTest, DISABLED_Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Ulpfec) { auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - alice->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + alice->SetVideoCodecs({VideoCodecConfig(webrtc::kH264CodecName)}); alice->SetUseUlpFEC(true); }, [](PeerConfigurer* bob) { - bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); + bob->SetVideoCodecs({VideoCodecConfig(webrtc::kH264CodecName)}); bob->SetUseUlpFEC(true); }); fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); @@ -719,7 +686,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps) { BuiltInNetworkBehaviorConfig config; config.queue_length_packets = 0; config.queue_delay_ms = 0; - config.link_capacity_kbps = 500; + config.link_capacity = DataRate::KilobitsPerSec(500); auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps", *network_emulation_manager->time_controller(), network_emulation_manager->CreateEndpointPairWithTwoWayRoutes(config), @@ -740,7 +707,7 @@ TEST_P(ParameterizedPCFullStackTest, Pc_Foreman_Cif_500kbps_32pkts_Queue) { BuiltInNetworkBehaviorConfig config; config.queue_length_packets = 32; config.queue_delay_ms = 0; - config.link_capacity_kbps = 500; + config.link_capacity = DataRate::KilobitsPerSec(500); auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps_32pkts_queue" + GetParam().test_case_name_postfix, *network_emulation_manager->time_controller(), @@ -769,7 +736,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_100ms) { BuiltInNetworkBehaviorConfig config; config.queue_length_packets = 0; config.queue_delay_ms = 100; - config.link_capacity_kbps = 500; + config.link_capacity = DataRate::KilobitsPerSec(500); auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps_100ms", *network_emulation_manager->time_controller(), @@ -792,7 +759,7 @@ TEST(PCGenericDescriptorTest, BuiltInNetworkBehaviorConfig config; config.queue_length_packets = 32; config.queue_delay_ms = 100; - config.link_capacity_kbps = 500; + config.link_capacity = DataRate::KilobitsPerSec(500); auto fixture = CreateTestFixture( "pc_foreman_cif_500kbps_100ms_32pkts_queue_generic_descriptor", *network_emulation_manager->time_controller(), @@ -824,7 +791,7 @@ TEST(PCFullStackTest, ForemanCif500kbps100msLimitedQueueRecvBwe) { 0.0, 0.0, kTestDurationSec}; foreman_cif.config->queue_length_packets = 32; foreman_cif.config->queue_delay_ms = 100; - foreman_cif.config->link_capacity_kbps = 500; + foreman_cif.config->link_capacity = DataRate::KilobitsPerSec(500); fixture->RunWithAnalyzer(foreman_cif); } */ @@ -835,7 +802,7 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_1000kbps_100ms_32pkts_Queue) { BuiltInNetworkBehaviorConfig config; config.queue_length_packets = 32; config.queue_delay_ms = 100; - config.link_capacity_kbps = 1000; + config.link_capacity = DataRate::KilobitsPerSec(1000); auto fixture = CreateTestFixture( "pc_foreman_cif_1000kbps_100ms_32pkts_queue", *network_emulation_manager->time_controller(), @@ -858,7 +825,7 @@ TEST(PCFullStackTest, Pc_Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue) { BuiltInNetworkBehaviorConfig config; config.queue_length_packets = 32; config.queue_delay_ms = 100; - config.link_capacity_kbps = 2000; + config.link_capacity = DataRate::KilobitsPerSec(2000); auto fixture = CreateTestFixture( "pc_conference_motion_hd_2000kbps_100ms_32pkts_queue", *network_emulation_manager->time_controller(), @@ -895,7 +862,7 @@ TEST(PCGenericDescriptorTest, ConferenceMotionHd2TLModerateLimits) { conf_motion_hd.config->queue_length_packets = 50; conf_motion_hd.config->loss_percent = 3; conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 2000; + conf_motion_hd.config->link_capacity = DataRate::KilobitsPerSec(2000); conf_motion_hd.call.generic_descriptor = GenericDescriptorEnabled(); fixture->RunWithAnalyzer(conf_motion_hd); } @@ -919,7 +886,7 @@ TEST(PCFullStackTest, ConferenceMotionHd3TLModerateLimits) { conf_motion_hd.config->queue_length_packets = 50; conf_motion_hd.config->loss_percent = 3; conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 2000; + conf_motion_hd.config->link_capacity = DataRate::KilobitsPerSec(2000); fixture->RunWithAnalyzer(conf_motion_hd); } @@ -942,62 +909,10 @@ TEST(PCFullStackTest, ConferenceMotionHd4TLModerateLimits) { conf_motion_hd.config->queue_length_packets = 50; conf_motion_hd.config->loss_percent = 3; conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 2000; - fixture->RunWithAnalyzer(conf_motion_hd); -} - -// TODO(bugs.webrtc.org/10639) requires simulcast/SVC support in PC framework -TEST(PCFullStackTest, ConferenceMotionHd3TLModerateLimitsAltTLPattern) { - test::ScopedFieldTrials field_trial( - AppendFieldTrials("WebRTC-UseShortVP8TL3Pattern/Enabled/")); - auto fixture = CreateVideoQualityTestFixture(); - ParamsWithLogging conf_motion_hd; - conf_motion_hd.call.send_side_bwe = true; - conf_motion_hd.video[0] = { - true, 1280, - 720, 50, - 30000, 3000000, - 3000000, false, - "VP8", 3, - -1, 0, - false, false, - false, ClipNameToClipPath("ConferenceMotion_1280_720_50")}; - conf_motion_hd.analyzer = {"conference_motion_hd_3tl_alt_moderate_limits", - 0.0, 0.0, kTestDurationSec}; - conf_motion_hd.config->queue_length_packets = 50; - conf_motion_hd.config->loss_percent = 3; - conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 2000; + conf_motion_hd.config->link_capacity = DataRate::KilobitsPerSec(2000); fixture->RunWithAnalyzer(conf_motion_hd); } -// TODO(bugs.webrtc.org/10639) requires simulcast/SVC support in PC framework -TEST(PCFullStackTest, - ConferenceMotionHd3TLModerateLimitsAltTLPatternAndBaseHeavyTLAllocation) { - auto fixture = CreateVideoQualityTestFixture(); - test::ScopedFieldTrials field_trial( - AppendFieldTrials("WebRTC-UseShortVP8TL3Pattern/Enabled/" - "WebRTC-UseBaseHeavyVP8TL3RateAllocation/Enabled/")); - ParamsWithLogging conf_motion_hd; - conf_motion_hd.call.send_side_bwe = true; - conf_motion_hd.video[0] = { - true, 1280, - 720, 50, - 30000, 3000000, - 3000000, false, - "VP8", 3, - -1, 0, - false, false, - false, ClipNameToClipPath("ConferenceMotion_1280_720_50")}; - conf_motion_hd.analyzer = { - "conference_motion_hd_3tl_alt_heavy_moderate_limits", 0.0, 0.0, - kTestDurationSec}; - conf_motion_hd.config->queue_length_packets = 50; - conf_motion_hd.config->loss_percent = 3; - conf_motion_hd.config->queue_delay_ms = 100; - conf_motion_hd.config->link_capacity_kbps = 2000; - fixture->RunWithAnalyzer(conf_motion_hd); -} */ #if defined(RTC_ENABLE_VP9) @@ -1008,7 +923,7 @@ TEST_P(ParameterizedPCFullStackTest, BuiltInNetworkBehaviorConfig config; config.queue_length_packets = 32; config.queue_delay_ms = 100; - config.link_capacity_kbps = 2000; + config.link_capacity = DataRate::KilobitsPerSec(2000); auto fixture = CreateTestFixture( "pc_conference_motion_hd_2000kbps_100ms_32pkts_queue_vp9" + GetParam().test_case_name_postfix, @@ -1021,7 +936,7 @@ TEST_P(ParameterizedPCFullStackTest, video, ClipNameToClipPath("ConferenceMotion_1280_720_50")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); alice->SetVideoCodecs({VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ + /*name=*/kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); if (GetParam().use_network_thread_as_worker_thread) { @@ -1030,7 +945,7 @@ TEST_P(ParameterizedPCFullStackTest, }, [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ + /*name=*/kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); if (GetParam().use_network_thread_as_worker_thread) { @@ -1203,7 +1118,7 @@ TEST(PCGenericDescriptorTest, Screenshare_Slides_Lossy_Net_Generic_Descriptor) { 0.0, 0.0, kTestDurationSec}; screenshare.config->loss_percent = 5; screenshare.config->queue_delay_ms = 200; - screenshare.config->link_capacity_kbps = 500; + screenshare.config->link_capacity = DataRate::KilobitsPerSec(500); screenshare.call.generic_descriptor = true; fixture->RunWithAnalyzer(screenshare); } @@ -1221,7 +1136,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_VeryLossyNet) { kTestDurationSec}; screenshare.config->loss_percent = 10; screenshare.config->queue_delay_ms = 200; - screenshare.config->link_capacity_kbps = 500; + screenshare.config->link_capacity = DataRate::KilobitsPerSec(500); fixture->RunWithAnalyzer(screenshare); } @@ -1237,7 +1152,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_LossyNetRestrictedQueue) { screenshare.analyzer = {"screenshare_slides_lossy_limited", 0.0, 0.0, kTestDurationSec}; screenshare.config->loss_percent = 5; - screenshare.config->link_capacity_kbps = 200; + screenshare.config->link_capacity = DataRate::KilobitsPerSec(200); screenshare.config->queue_length_packets = 30; fixture->RunWithAnalyzer(screenshare); @@ -1255,7 +1170,7 @@ TEST(PCFullStackTest, ScreenshareSlidesVP8_2TL_ModeratelyRestricted) { screenshare.analyzer = {"screenshare_slides_moderately_restricted", 0.0, 0.0, kTestDurationSec}; screenshare.config->loss_percent = 1; - screenshare.config->link_capacity_kbps = 1200; + screenshare.config->link_capacity = DataRate::KilobitsPerSec(1200); screenshare.config->queue_length_packets = 30; fixture->RunWithAnalyzer(screenshare); @@ -1338,13 +1253,13 @@ TEST(PCFullStackTest, Pc_Screenshare_Slides_Vp9_3sl_High_Fps) { video, ScreenShareConfig(TimeDelta::Seconds(10))); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); alice->SetVideoCodecs({VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ + /*name=*/kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }, [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ + /*name=*/kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); @@ -1371,13 +1286,13 @@ TEST(PCFullStackTest, Pc_Vp9svc_3sl_High) { video, ClipNameToClipPath("ConferenceMotion_1280_720_50")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); alice->SetVideoCodecs({VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ + /*name=*/kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }, [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ + /*name=*/kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); @@ -1404,13 +1319,13 @@ TEST(PCFullStackTest, Pc_Vp9svc_3sl_Low) { video, ClipNameToClipPath("ConferenceMotion_1280_720_50")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); alice->SetVideoCodecs({VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ + /*name=*/kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }, [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig( - /*name=*/cricket::kVp9CodecName, /*required_params=*/{ + /*name=*/kVp9CodecName, /*required_params=*/{ {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); @@ -1481,7 +1396,7 @@ TEST(PCFullStackTest, VP9KSVC_3SL_Medium_Network_Restricted) { simulcast.ss[0] = { std::vector(), 0, 3, -1, InterLayerPredMode::kOnKeyPic, std::vector(), false}; - simulcast.config->link_capacity_kbps = 1000; + simulcast.config->link_capacity = DataRate::KilobitsPerSec(1000); simulcast.config->queue_delay_ms = 100; fixture->RunWithAnalyzer(simulcast); } @@ -1500,7 +1415,7 @@ TEST(PCFullStackTest, VP9KSVC_3SL_Medium_Network_Restricted_Trusted_Rate) { simulcast.ss[0] = { std::vector(), 0, 3, -1, InterLayerPredMode::kOnKeyPic, std::vector(), false}; - simulcast.config->link_capacity_kbps = 1000; + simulcast.config->link_capacity = DataRate::KilobitsPerSec(1000); simulcast.config->queue_delay_ms = 100; fixture->RunWithAnalyzer(simulcast); } @@ -1779,7 +1694,7 @@ TEST_P(PCDualStreamsTest, std::to_string(first_stream); dual_streams.analyzer = {test_label, 0.0, 0.0, kTestDurationSec}; dual_streams.config->loss_percent = 1; - dual_streams.config->link_capacity_kbps = 7500; + dual_streams.config->link_capacity = DataRate::KilobitsPerSec(7500); dual_streams.config->queue_length_packets = 30; dual_streams.config->queue_delay_ms = 100; @@ -1817,7 +1732,7 @@ TEST_P(PCDualStreamsTest, Conference_Restricted) { std::to_string(first_stream); dual_streams.analyzer = {test_label, 0.0, 0.0, kTestDurationSec}; dual_streams.config->loss_percent = 1; - dual_streams.config->link_capacity_kbps = 5000; + dual_streams.config->link_capacity = DataRate::KilobitsPerSec(5000); dual_streams.config->queue_length_packets = 30; dual_streams.config->queue_delay_ms = 100; diff --git a/video/picture_id_tests.cc b/video/picture_id_tests.cc index 09b9118cc7..46d6499fcf 100644 --- a/video/picture_id_tests.cc +++ b/video/picture_id_tests.cc @@ -13,7 +13,6 @@ #include "api/test/simulated_network.h" #include "api/test/video/function_video_encoder_factory.h" #include "call/fake_network_pipe.h" -#include "call/simulated_network.h" #include "media/engine/internal_encoder_factory.h" #include "media/engine/simulcast_encoder_adapter.h" #include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h" @@ -25,6 +24,7 @@ #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" #include "test/call_test.h" +#include "test/network/simulated_network.h" #include "test/video_test_constants.h" namespace webrtc { @@ -98,16 +98,16 @@ class PictureIdObserver : public test::RtpRtcpObserver { parsed->timestamp = rtp_packet.Timestamp(); parsed->ssrc = rtp_packet.Ssrc(); - absl::optional parsed_payload = + std::optional parsed_payload = depacketizer_->Parse(rtp_packet.PayloadBuffer()); EXPECT_TRUE(parsed_payload); - if (const auto* vp8_header = absl::get_if( + if (const auto* vp8_header = std::get_if( &parsed_payload->video_header.video_type_header)) { parsed->picture_id = vp8_header->pictureId; parsed->tl0_pic_idx = vp8_header->tl0PicIdx; parsed->temporal_idx = vp8_header->temporalIdx; - } else if (const auto* vp9_header = absl::get_if( + } else if (const auto* vp9_header = std::get_if( &parsed_payload->video_header.video_type_header)) { parsed->picture_id = vp9_header->picture_id; parsed->tl0_pic_idx = vp9_header->tl0_pic_idx; @@ -175,7 +175,7 @@ class PictureIdObserver : public test::RtpRtcpObserver { } } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock(&mutex_); ParsedPacket parsed; @@ -363,7 +363,9 @@ void PictureIdTest::TestPictureIdIncreaseAfterRecreateStreams( TEST_P(PictureIdTest, ContinuousAfterReconfigureVp8) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); SetupEncoder(&encoder_factory, "VP8"); TestPictureIdContinuousAfterReconfigure({1, 3, 3, 1, 1}); } @@ -371,14 +373,18 @@ TEST_P(PictureIdTest, ContinuousAfterReconfigureVp8) { // TODO(bugs.webrtc.org/14985): Investigate and reenable. TEST_P(PictureIdTest, DISABLED_IncreasingAfterRecreateStreamVp8) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); SetupEncoder(&encoder_factory, "VP8"); TestPictureIdIncreaseAfterRecreateStreams({1, 3, 3, 1, 1}); } TEST_P(PictureIdTest, ContinuousAfterStreamCountChangeVp8) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); // Make sure that the picture id is not reset if the stream count goes // down and then up. SetupEncoder(&encoder_factory, "VP8"); @@ -388,9 +394,10 @@ TEST_P(PictureIdTest, ContinuousAfterStreamCountChangeVp8) { TEST_P(PictureIdTest, ContinuousAfterReconfigureSimulcastEncoderAdapter) { InternalEncoderFactory internal_encoder_factory; test::FunctionVideoEncoderFactory encoder_factory( - [&internal_encoder_factory]() { + [&internal_encoder_factory](const Environment& env, + const SdpVideoFormat& format) { return std::make_unique( - &internal_encoder_factory, SdpVideoFormat("VP8")); + env, &internal_encoder_factory, nullptr, SdpVideoFormat::VP8()); }); SetupEncoder(&encoder_factory, "VP8"); TestPictureIdContinuousAfterReconfigure({1, 3, 3, 1, 1}); @@ -401,9 +408,10 @@ TEST_P(PictureIdTest, DISABLED_IncreasingAfterRecreateStreamSimulcastEncoderAdapter) { InternalEncoderFactory internal_encoder_factory; test::FunctionVideoEncoderFactory encoder_factory( - [&internal_encoder_factory]() { + [&internal_encoder_factory](const Environment& env, + const SdpVideoFormat& format) { return std::make_unique( - &internal_encoder_factory, SdpVideoFormat("VP8")); + env, &internal_encoder_factory, nullptr, SdpVideoFormat::VP8()); }); SetupEncoder(&encoder_factory, "VP8"); TestPictureIdIncreaseAfterRecreateStreams({1, 3, 3, 1, 1}); @@ -412,9 +420,10 @@ TEST_P(PictureIdTest, TEST_P(PictureIdTest, ContinuousAfterStreamCountChangeSimulcastEncoderAdapter) { InternalEncoderFactory internal_encoder_factory; test::FunctionVideoEncoderFactory encoder_factory( - [&internal_encoder_factory]() { + [&internal_encoder_factory](const Environment& env, + const SdpVideoFormat& format) { return std::make_unique( - &internal_encoder_factory, SdpVideoFormat("VP8")); + env, &internal_encoder_factory, nullptr, SdpVideoFormat::VP8()); }); // Make sure that the picture id is not reset if the stream count goes // down and then up. @@ -425,7 +434,9 @@ TEST_P(PictureIdTest, ContinuousAfterStreamCountChangeSimulcastEncoderAdapter) { // TODO(bugs.webrtc.org/14985): Investigate and reenable. TEST_P(PictureIdTest, DISABLED_IncreasingAfterRecreateStreamVp9) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP9Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp9Encoder(env); + }); SetupEncoder(&encoder_factory, "VP9"); TestPictureIdIncreaseAfterRecreateStreams({1, 1}); } diff --git a/video/quality_convergence_controller.cc b/video/quality_convergence_controller.cc new file mode 100644 index 0000000000..c063f62b34 --- /dev/null +++ b/video/quality_convergence_controller.cc @@ -0,0 +1,119 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/quality_convergence_controller.h" + +#include + +#include "rtc_base/checks.h" +#include "rtc_base/experiments/struct_parameters_parser.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace { +// TODO(https://crbug.com/328598314): Remove default values once HW encoders +// correctly report the minimum QP value. These thresholds correspond to the +// default configurations used for the software encoders. +constexpr int kVp8DefaultStaticQpThreshold = 15; +constexpr int kVp9DefaultStaticQpThreshold = 32; +constexpr int kAv1DefaultStaticQpThreshold = 60; + +struct StaticDetectionConfig { + // Overrides the static QP threshold if set to a higher value than what is + // reported by the encoder. + std::optional static_qp_threshold_override; + std::unique_ptr Parser(); +}; + +std::unique_ptr StaticDetectionConfig::Parser() { + // The empty comments ensures that each pair is on a separate line. + return StructParametersParser::Create("static_qp_threshold", + &static_qp_threshold_override); +} + +int GetDefaultStaticQpThreshold(VideoCodecType codec, + const FieldTrialsView& trials) { + StaticDetectionConfig static_config; + int default_static_qp_threhsold = 0; + switch (codec) { + case kVideoCodecVP8: + default_static_qp_threhsold = kVp8DefaultStaticQpThreshold; + static_config.Parser()->Parse(trials.Lookup("WebRTC-QCM-Static-VP8")); + break; + case kVideoCodecVP9: + default_static_qp_threhsold = kVp9DefaultStaticQpThreshold; + static_config.Parser()->Parse(trials.Lookup("WebRTC-QCM-Static-VP9")); + break; + case kVideoCodecAV1: + default_static_qp_threhsold = kAv1DefaultStaticQpThreshold; + static_config.Parser()->Parse(trials.Lookup("WebRTC-QCM-Static-AV1")); + break; + case kVideoCodecGeneric: + case kVideoCodecH264: + case kVideoCodecH265: + // -1 will effectively disable the static QP threshold since QP values are + // always >= 0. + return -1; + } + + if (static_config.static_qp_threshold_override.has_value()) { + RTC_LOG(LS_INFO) << "static_qp_threshold_override: " + << *static_config.static_qp_threshold_override; + return *static_config.static_qp_threshold_override; + } + + return default_static_qp_threhsold; +} +} // namespace + +void QualityConvergenceController::Initialize(int number_of_layers, + std::optional encoder_min_qp, + VideoCodecType codec, + const FieldTrialsView& trials) { + RTC_DCHECK(sequence_checker_.IsCurrent()); + RTC_CHECK(number_of_layers > 0); + number_of_layers_ = number_of_layers; + convergence_monitors_.clear(); + + int qp_threshold = GetDefaultStaticQpThreshold(codec, trials); + if (encoder_min_qp.has_value()) { + qp_threshold = std::max(qp_threshold, *encoder_min_qp); + } + + for (int i = 0; i < number_of_layers_; ++i) { + convergence_monitors_.push_back( + QualityConvergenceMonitor::Create(qp_threshold, codec, trials)); + } + initialized_ = true; +} + +bool QualityConvergenceController::AddSampleAndCheckTargetQuality( + int layer_index, + int qp, + bool is_refresh_frame) { + RTC_DCHECK(sequence_checker_.IsCurrent()); + RTC_CHECK(initialized_); + if (layer_index < 0 || layer_index >= number_of_layers_) { + return false; + } + + // TODO(kron): Remove temporary check that verifies that the initialization is + // working as expected. See https://crbug.com/359410061. + RTC_DCHECK(number_of_layers_ == + static_cast(convergence_monitors_.size())); + if (number_of_layers_ != static_cast(convergence_monitors_.size())) { + return false; + } + + convergence_monitors_[layer_index]->AddSample(qp, is_refresh_frame); + return convergence_monitors_[layer_index]->AtTargetQuality(); +} + +} // namespace webrtc diff --git a/video/quality_convergence_controller.h b/video/quality_convergence_controller.h new file mode 100644 index 0000000000..16978fda65 --- /dev/null +++ b/video/quality_convergence_controller.h @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_QUALITY_CONVERGENCE_CONTROLLER_H_ +#define VIDEO_QUALITY_CONVERGENCE_CONTROLLER_H_ + +#include +#include +#include + +#include "api/field_trials_view.h" +#include "api/sequence_checker.h" +#include "api/video/video_codec_type.h" +#include "video/quality_convergence_monitor.h" + +namespace webrtc { + +class QualityConvergenceController { + public: + void Initialize(int number_of_layers, + std::optional static_qp_threshold, + VideoCodecType codec, + const FieldTrialsView& trials); + + // Add the supplied `qp` value to the detection window for specified layer. + // `is_refresh_frame` must only be `true` if the corresponding + // video frame is a refresh frame that is used to improve the visual quality. + // Returns `true` if the algorithm has determined that the supplied QP values + // have converged and reached the target quality for this layer. + bool AddSampleAndCheckTargetQuality(int layer_index, + int qp, + bool is_refresh_frame); + + private: + bool initialized_ = false; + int number_of_layers_ = 0; + std::vector> convergence_monitors_; + SequenceChecker sequence_checker_{SequenceChecker::kDetached}; +}; + +} // namespace webrtc + +#endif // VIDEO_QUALITY_CONVERGENCE_CONTROLLER_H_ diff --git a/video/quality_convergence_controller_unittest.cc b/video/quality_convergence_controller_unittest.cc new file mode 100644 index 0000000000..46965d4bff --- /dev/null +++ b/video/quality_convergence_controller_unittest.cc @@ -0,0 +1,134 @@ + +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "video/quality_convergence_controller.h" + +#include + +#include "test/gtest.h" +#include "test/scoped_key_value_config.h" + +namespace webrtc { +namespace { +constexpr int kVp8DefaultStaticQpThreshold = 15; + +TEST(QualityConvergenceController, Singlecast) { + test::ScopedKeyValueConfig field_trials; + QualityConvergenceController controller; + controller.Initialize(1, /*encoder_min_qp=*/std::nullopt, kVideoCodecVP8, + field_trials); + + EXPECT_FALSE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/0, kVp8DefaultStaticQpThreshold + 1, + /*is_refresh_frame=*/false)); + EXPECT_TRUE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/0, kVp8DefaultStaticQpThreshold, + /*is_refresh_frame=*/false)); +} + +TEST(QualityConvergenceController, Simulcast) { + test::ScopedKeyValueConfig field_trials; + QualityConvergenceController controller; + controller.Initialize(2, /*encoder_min_qp=*/std::nullopt, kVideoCodecVP8, + field_trials); + + EXPECT_FALSE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/0, kVp8DefaultStaticQpThreshold + 1, + /*is_refresh_frame=*/false)); + EXPECT_FALSE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/1, kVp8DefaultStaticQpThreshold + 1, + /*is_refresh_frame=*/false)); + + // Layer 0 reaches target quality. + EXPECT_TRUE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/0, kVp8DefaultStaticQpThreshold, + /*is_refresh_frame=*/false)); + EXPECT_FALSE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/1, kVp8DefaultStaticQpThreshold + 1, + /*is_refresh_frame=*/false)); + + // Frames are repeated for both layers. Layer 0 still at target quality. + EXPECT_TRUE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/0, kVp8DefaultStaticQpThreshold, + /*is_refresh_frame=*/true)); + EXPECT_FALSE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/1, kVp8DefaultStaticQpThreshold + 1, + /*is_refresh_frame=*/true)); +} + +TEST(QualityConvergenceController, InvalidLayerIndex) { + test::ScopedKeyValueConfig field_trials; + QualityConvergenceController controller; + controller.Initialize(2, /*encoder_min_qp=*/std::nullopt, kVideoCodecVP8, + field_trials); + + EXPECT_FALSE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/-1, kVp8DefaultStaticQpThreshold, + /*is_refresh_frame=*/false)); + EXPECT_FALSE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/3, kVp8DefaultStaticQpThreshold, + /*is_refresh_frame=*/false)); +} + +TEST(QualityConvergenceController, UseMaxOfEncoderMinAndDefaultQpThresholds) { + test::ScopedKeyValueConfig field_trials; + QualityConvergenceController controller; + controller.Initialize(1, kVp8DefaultStaticQpThreshold + 1, kVideoCodecVP8, + field_trials); + + EXPECT_FALSE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/0, kVp8DefaultStaticQpThreshold + 2, + /*is_refresh_frame=*/false)); + EXPECT_TRUE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/0, kVp8DefaultStaticQpThreshold + 1, + /*is_refresh_frame=*/false)); +} + +TEST(QualityConvergenceController, OverrideVp8StaticThreshold) { + test::ScopedKeyValueConfig field_trials( + "WebRTC-QCM-Static-VP8/static_qp_threshold:22/"); + QualityConvergenceController controller; + controller.Initialize(1, /*encoder_min_qp=*/std::nullopt, kVideoCodecVP8, + field_trials); + + EXPECT_FALSE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/0, /*qp=*/23, /*is_refresh_frame=*/false)); + EXPECT_TRUE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/0, /*qp=*/22, /*is_refresh_frame=*/false)); +} + +TEST(QualityConvergenceMonitorSetup, OverrideVp9StaticThreshold) { + test::ScopedKeyValueConfig field_trials( + "WebRTC-QCM-Static-VP9/static_qp_threshold:44/"); + QualityConvergenceController controller; + controller.Initialize(1, /*encoder_min_qp=*/std::nullopt, kVideoCodecVP9, + field_trials); + + EXPECT_FALSE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/0, /*qp=*/45, /*is_refresh_frame=*/false)); + EXPECT_TRUE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/0, /*qp=*/44, /*is_refresh_frame=*/false)); +} + +TEST(QualityConvergenceMonitorSetup, OverrideAv1StaticThreshold) { + test::ScopedKeyValueConfig field_trials( + "WebRTC-QCM-Static-AV1/static_qp_threshold:46/"); + QualityConvergenceController controller; + controller.Initialize(1, /*encoder_min_qp=*/std::nullopt, kVideoCodecAV1, + field_trials); + + EXPECT_FALSE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/0, /*qp=*/47, /*is_refresh_frame=*/false)); + EXPECT_TRUE(controller.AddSampleAndCheckTargetQuality( + /*layer_index=*/0, /*qp=*/46, /*is_refresh_frame=*/false)); +} + +} // namespace +} // namespace webrtc diff --git a/video/quality_convergence_monitor.cc b/video/quality_convergence_monitor.cc new file mode 100644 index 0000000000..25b7bed7a5 --- /dev/null +++ b/video/quality_convergence_monitor.cc @@ -0,0 +1,207 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/quality_convergence_monitor.h" + +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/experiments/struct_parameters_parser.h" + +namespace webrtc { +namespace { +constexpr size_t kDefaultRecentWindowLength = 6; +constexpr size_t kDefaultPastWindowLength = 6; +constexpr float kDefaultAlpha = 0.06; + +struct DynamicDetectionConfig { + bool enabled = false; + // alpha is a percentage of the codec-specific max QP value that is used to + // determine the dynamic QP threshold: + // dynamic_qp_threshold = static_min_qp_threshold + alpha * max_QP + // Please note that although the static threshold is overridden, the dynamic + // threshold is calculated from static_min_qp_threshold reported by the + // encoder. + double alpha = kDefaultAlpha; + int recent_length = kDefaultRecentWindowLength; + int past_length = kDefaultPastWindowLength; + std::unique_ptr Parser(); +}; + +std::unique_ptr DynamicDetectionConfig::Parser() { + // The empty comments ensures that each pair is on a separate line. + return StructParametersParser::Create("enabled", &enabled, // + "alpha", &alpha, // + "recent_length", &recent_length, // + "past_length", &past_length); +} + +QualityConvergenceMonitor::Parameters GetParameters( + int static_qp_threshold, + VideoCodecType codec, + const FieldTrialsView& trials) { + QualityConvergenceMonitor::Parameters params; + params.static_qp_threshold = static_qp_threshold; + + DynamicDetectionConfig dynamic_config; + // Apply codec specific settings. + int max_qp = 0; + switch (codec) { + case kVideoCodecVP8: + dynamic_config.enabled = true; + dynamic_config.Parser()->Parse(trials.Lookup("WebRTC-QCM-Dynamic-VP8")); + max_qp = 127; + break; + case kVideoCodecVP9: + // Change to enabled by default for VP9. + dynamic_config.enabled = true; + dynamic_config.Parser()->Parse(trials.Lookup("WebRTC-QCM-Dynamic-VP9")); + max_qp = 255; + break; + case kVideoCodecAV1: + // Change to enabled by default for AV1. + dynamic_config.enabled = true; + dynamic_config.Parser()->Parse(trials.Lookup("WebRTC-QCM-Dynamic-AV1")); + max_qp = 255; + break; + case kVideoCodecGeneric: + case kVideoCodecH264: + case kVideoCodecH265: + break; + } + + if (dynamic_config.enabled) { + params.dynamic_detection_enabled = dynamic_config.enabled; + params.dynamic_qp_threshold = + static_qp_threshold + max_qp * dynamic_config.alpha; + params.recent_window_length = dynamic_config.recent_length; + params.past_window_length = dynamic_config.past_length; + } + return params; +} +} // namespace + +QualityConvergenceMonitor::QualityConvergenceMonitor(const Parameters& params) + : params_(params) { + RTC_CHECK( + !params_.dynamic_detection_enabled || + (params_.past_window_length > 0 && params_.recent_window_length > 0)); +} + +// Adds the sample to the algorithms detection window and runs the following +// convergence detection algorithm to determine if the time series of QP +// values indicates that the encoded video has reached "target quality". +// +// Definitions +// +// - Let x[n] be the pixel data of a video frame. +// - Let e[n] be the encoded representation of x[n]. +// - Let qp[n] be the corresponding QP value of the encoded video frame e[n]. +// - x[n] is a refresh frame if x[n] = x[n-1]. +// - qp_window is a list (or queue) of stored QP values, with size +// L <= past_window_length + recent_window_length. +// - qp_window can be partioned into: +// qp_past = qp_window[ 0:end-recent_window_length ] and +// qp_recent = qp_window[ -recent_window_length:end ]. +// - Let dynamic_qp_threshold be a maximum QP value for which convergence +// is accepted. +// +// Algorithm +// +// For each encoded video frame e[n], take the corresponding qp[n] and do the +// following: +// 0. Check Static Threshold: if qp[n] < static_qp_threshold, return true. +// 1. Check for Refresh Frame: If x[n] is not a refresh frame: +// - Clear Q. +// - Return false. +// 2. Check Previous Convergence: If x[n] is a refresh frame AND true was +// returned for x[n-1], return true. +// 3. Update QP History: Append qp[n] to qp_window. If qp_window's length +// exceeds past_window_length + recent_window_length, remove the first +// element. +// 4. Check for Sufficient Data: If L <= recent_window_length, return false. +// 5. Calculate Average QP: Calculate avg(qp_past) and avg(ap_recent). +// 6. Determine Convergence: If avg(qp_past) <= dynamic_qp_threshold AND +// avg(qp_past) <= avg(qp_recent), return true. Otherwise, return false. +// +void QualityConvergenceMonitor::AddSample(int qp, bool is_refresh_frame) { + // Invalid QP. + if (qp < 0) { + qp_window_.clear(); + at_target_quality_ = false; + return; + } + + // 0. Check static threshold. + if (qp <= params_.static_qp_threshold) { + at_target_quality_ = true; + return; + } + + // 1. Check for refresh frame and if dynamic detection is disabled. + if (!is_refresh_frame || !params_.dynamic_detection_enabled) { + qp_window_.clear(); + at_target_quality_ = false; + return; + } + + // 2. Check previous convergence. + RTC_CHECK(is_refresh_frame); + if (at_target_quality_) { + // No need to update state. + return; + } + + // 3. Update QP history. + qp_window_.push_back(qp); + if (qp_window_.size() > + params_.recent_window_length + params_.past_window_length) { + qp_window_.pop_front(); + } + + // 4. Check for sufficient data. + if (qp_window_.size() <= params_.recent_window_length) { + // No need to update state. + RTC_CHECK(at_target_quality_ == false); + return; + } + + // 5. Calculate average QP. + float qp_past_average = + std::accumulate(qp_window_.begin(), + qp_window_.end() - params_.recent_window_length, 0.0) / + (qp_window_.size() - params_.recent_window_length); + float qp_recent_average = + std::accumulate(qp_window_.end() - params_.recent_window_length, + qp_window_.end(), 0.0) / + params_.recent_window_length; + // 6. Determine convergence. + if (qp_past_average <= params_.dynamic_qp_threshold && + qp_past_average <= qp_recent_average) { + at_target_quality_ = true; + } +} + +bool QualityConvergenceMonitor::AtTargetQuality() const { + return at_target_quality_; +} + +// Static +std::unique_ptr QualityConvergenceMonitor::Create( + int static_qp_threshold, + VideoCodecType codec, + const FieldTrialsView& trials) { + Parameters params = GetParameters(static_qp_threshold, codec, trials); + return std::unique_ptr( + new QualityConvergenceMonitor(params)); +} + +} // namespace webrtc diff --git a/video/quality_convergence_monitor.h b/video/quality_convergence_monitor.h new file mode 100644 index 0000000000..326372db43 --- /dev/null +++ b/video/quality_convergence_monitor.h @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_QUALITY_CONVERGENCE_MONITOR_H_ +#define VIDEO_QUALITY_CONVERGENCE_MONITOR_H_ + +#include +#include + +#include "api/field_trials_view.h" +#include "api/video/video_codec_type.h" + +namespace webrtc { + +class QualityConvergenceMonitor { + public: + struct Parameters { + // Static QP threshold. No history or even refresh-frame requirements to + // determine that target quality is reached if the QP value is at or below + // this threshold. + int static_qp_threshold = 0; + + // Determines if the dynamic threshold should be used for refresh frames. + bool dynamic_detection_enabled = false; + + // Window lengths of QP values to use when determining if refresh frames + // have reached the target quality. The combined window length is + // `past_window_length` + `recent_window_length`. The recent part of the + // window contains the most recent samples. Once the recent buffer reaches + // this length, new samples will pop the oldest samples in recent and move + // them to the past buffer. The average of `QP_past` must be equal to or + // less than the average of `QP_recent` to determine that target quality is + // reached. See the implementation in `AddSample()`. + size_t recent_window_length = 0; + size_t past_window_length = 0; + + // During dynamic detection, the average of `QP_past` must be less than or + // equal to this threshold to determine that target quality is reached. + int dynamic_qp_threshold = 0; + }; + + explicit QualityConvergenceMonitor(const Parameters& params); + + static std::unique_ptr Create( + int static_qp_threshold, + VideoCodecType codec, + const FieldTrialsView& trials); + + // Add the supplied `qp` value to the detection window. + // `is_refresh_frame` must only be `true` if the corresponding + // video frame is a refresh frame that is used to improve the visual quality. + void AddSample(int qp, bool is_refresh_frame); + + // Returns `true` if the algorithm has determined that the supplied QP values + // have converged and reached the target quality. + bool AtTargetQuality() const; + + // Used in tests to verify that default values and field trials are set + // correctly. + Parameters GetParametersForTesting() const { return params_; } + + private: + const Parameters params_; + bool at_target_quality_ = false; + + // Contains a window of QP values. New values are added at the back while old + // values are popped from the front to maintain the configured window length. + std::deque qp_window_; +}; + +} // namespace webrtc + +#endif // VIDEO_QUALITY_CONVERGENCE_MONITOR_H_ diff --git a/video/quality_convergence_monitor_unittest.cc b/video/quality_convergence_monitor_unittest.cc new file mode 100644 index 0000000000..e236f1dab9 --- /dev/null +++ b/video/quality_convergence_monitor_unittest.cc @@ -0,0 +1,306 @@ + +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "video/quality_convergence_monitor.h" + +#include + +#include "test/gtest.h" +#include "test/scoped_key_value_config.h" + +namespace webrtc { +namespace { +constexpr int kStaticQpThreshold = 13; + +constexpr QualityConvergenceMonitor::Parameters kParametersOnlyStaticThreshold = + {.static_qp_threshold = kStaticQpThreshold, + .dynamic_detection_enabled = false}; +constexpr QualityConvergenceMonitor::Parameters + kParametersWithDynamicDetection = { + .static_qp_threshold = kStaticQpThreshold, + .dynamic_detection_enabled = true, + .recent_window_length = 3, + .past_window_length = 9, + .dynamic_qp_threshold = 24}; + +// Test the basics of the algorithm. + +TEST(QualityConvergenceMonitorAlgorithm, StaticThreshold) { + QualityConvergenceMonitor::Parameters p = kParametersOnlyStaticThreshold; + auto monitor = std::make_unique(p); + ASSERT_TRUE(monitor); + + for (bool is_refresh_frame : {false, true}) { + // Ramp down from 100. Not at target quality until qp <= static threshold. + for (int qp = 100; qp > p.static_qp_threshold; --qp) { + monitor->AddSample(qp, is_refresh_frame); + EXPECT_FALSE(monitor->AtTargetQuality()); + } + + monitor->AddSample(p.static_qp_threshold, is_refresh_frame); + EXPECT_TRUE(monitor->AtTargetQuality()); + + // 100 samples just above the threshold is not at target quality. + for (int i = 0; i < 100; ++i) { + monitor->AddSample(p.static_qp_threshold + 1, is_refresh_frame); + EXPECT_FALSE(monitor->AtTargetQuality()); + } + } +} + +TEST(QualityConvergenceMonitorAlgorithm, + StaticThresholdWithDynamicDetectionEnabled) { + QualityConvergenceMonitor::Parameters p = kParametersWithDynamicDetection; + auto monitor = std::make_unique(p); + ASSERT_TRUE(monitor); + + for (bool is_refresh_frame : {false, true}) { + // Clear buffer. + monitor->AddSample(-1, /*is_refresh_frame=*/false); + EXPECT_FALSE(monitor->AtTargetQuality()); + + // Ramp down from 100. Not at target quality until qp <= static threshold. + for (int qp = 100; qp > p.static_qp_threshold; --qp) { + monitor->AddSample(qp, is_refresh_frame); + EXPECT_FALSE(monitor->AtTargetQuality()); + } + + // A single frame at the static QP threshold is considered to be at target + // quality regardless of if it's a refresh frame or not. + monitor->AddSample(p.static_qp_threshold, is_refresh_frame); + EXPECT_TRUE(monitor->AtTargetQuality()); + } + + // 100 samples just above the threshold is not at target quality if it's not a + // refresh frame. + for (int i = 0; i < 100; ++i) { + monitor->AddSample(p.static_qp_threshold + 1, /*is_refresh_frame=*/false); + EXPECT_FALSE(monitor->AtTargetQuality()); + } +} + +TEST(QualityConvergenceMonitorAlgorithm, ConvergenceAtDynamicThreshold) { + QualityConvergenceMonitor::Parameters p = kParametersWithDynamicDetection; + auto monitor = std::make_unique(p); + ASSERT_TRUE(monitor); + + // `recent_window_length` + `past_window_length` refresh frames at the dynamic + // threshold must mean we're at target quality. + for (size_t i = 0; i < p.recent_window_length + p.past_window_length; ++i) { + monitor->AddSample(p.dynamic_qp_threshold, /*is_refresh_frame=*/true); + } + EXPECT_TRUE(monitor->AtTargetQuality()); +} + +TEST(QualityConvergenceMonitorAlgorithm, NoConvergenceAboveDynamicThreshold) { + QualityConvergenceMonitor::Parameters p = kParametersWithDynamicDetection; + auto monitor = std::make_unique(p); + ASSERT_TRUE(monitor); + + // 100 samples just above the threshold must imply that we're not at target + // quality. + for (int i = 0; i < 100; ++i) { + monitor->AddSample(p.dynamic_qp_threshold + 1, /*is_refresh_frame=*/true); + EXPECT_FALSE(monitor->AtTargetQuality()); + } +} + +TEST(QualityConvergenceMonitorAlgorithm, + MaintainAtTargetQualityForRefreshFrames) { + QualityConvergenceMonitor::Parameters p = kParametersWithDynamicDetection; + auto monitor = std::make_unique(p); + ASSERT_TRUE(monitor); + + // `recent_window_length` + `past_window_length` refresh frames at the dynamic + // threshold must mean we're at target quality. + for (size_t i = 0; i < p.recent_window_length + p.past_window_length; ++i) { + monitor->AddSample(p.dynamic_qp_threshold, /*is_refresh_frame=*/true); + } + EXPECT_TRUE(monitor->AtTargetQuality()); + + int qp = p.dynamic_qp_threshold; + for (int i = 0; i < 100; ++i) { + monitor->AddSample(qp++, /*is_refresh_frame=*/true); + EXPECT_TRUE(monitor->AtTargetQuality()); + } + + // Reset state for first frame that is not a refresh frame. + monitor->AddSample(qp, /*is_refresh_frame=*/false); + EXPECT_FALSE(monitor->AtTargetQuality()); +} + +// Test corner cases. + +TEST(QualityConvergenceMonitorAlgorithm, SufficientData) { + QualityConvergenceMonitor::Parameters p = kParametersWithDynamicDetection; + auto monitor = std::make_unique(p); + ASSERT_TRUE(monitor); + + // Less than `recent_window_length + 1` refresh frame QP values at the dynamic + // threshold is not sufficient. + for (size_t i = 0; i < p.recent_window_length; ++i) { + monitor->AddSample(p.dynamic_qp_threshold, /*is_refresh_frame=*/true); + // Not sufficient data + EXPECT_FALSE(monitor->AtTargetQuality()); + } + + // However, `recent_window_length + 1` QP values are sufficient. + monitor->AddSample(p.dynamic_qp_threshold, /*is_refresh_frame=*/true); + EXPECT_TRUE(monitor->AtTargetQuality()); +} + +TEST(QualityConvergenceMonitorAlgorithm, + AtTargetIfQpPastLessThanOrEqualToQpRecent) { + QualityConvergenceMonitor::Parameters p = kParametersWithDynamicDetection; + p.past_window_length = 3; + p.recent_window_length = 3; + auto monitor = std::make_unique(p); + + // Sequence for which QP_past > QP_recent. + for (int qp : {23, 21, 21, 21, 21, 22}) { + monitor->AddSample(qp, /*is_refresh_frame=*/true); + EXPECT_FALSE(monitor->AtTargetQuality()); + } + + // Reset QP window. + monitor->AddSample(-1, /*is_refresh_frame=*/false); + EXPECT_FALSE(monitor->AtTargetQuality()); + + // Sequence for which one additional sample of 22 will make QP_past == + // QP_recent. + for (int qp : {22, 21, 21, 21, 21}) { + monitor->AddSample(qp, /*is_refresh_frame=*/true); + EXPECT_FALSE(monitor->AtTargetQuality()); + } + monitor->AddSample(22, /*is_refresh_frame=*/true); + EXPECT_TRUE(monitor->AtTargetQuality()); + + // Reset QP window. + monitor->AddSample(-1, /*is_refresh_frame=*/false); + EXPECT_FALSE(monitor->AtTargetQuality()); + + // Sequence for which one additional sample of 23 will make QP_past < + // QP_recent. + for (int qp : {22, 21, 21, 21, 21}) { + monitor->AddSample(qp, /*is_refresh_frame=*/true); + EXPECT_FALSE(monitor->AtTargetQuality()); + } + monitor->AddSample(23, /*is_refresh_frame=*/true); + EXPECT_TRUE(monitor->AtTargetQuality()); +} + +// Test default values and that they can be overridden with field trials. + +TEST(QualityConvergenceMonitorSetup, DefaultParameters) { + test::ScopedKeyValueConfig field_trials; + auto monitor = QualityConvergenceMonitor::Create( + kStaticQpThreshold, kVideoCodecVP8, field_trials); + ASSERT_TRUE(monitor); + QualityConvergenceMonitor::Parameters vp8_parameters = + monitor->GetParametersForTesting(); + EXPECT_EQ(vp8_parameters.static_qp_threshold, kStaticQpThreshold); + EXPECT_TRUE(vp8_parameters.dynamic_detection_enabled); + EXPECT_EQ(vp8_parameters.dynamic_qp_threshold, 20); // 13 + 7. + EXPECT_EQ(vp8_parameters.recent_window_length, 6u); + EXPECT_EQ(vp8_parameters.past_window_length, 6u); + + monitor = QualityConvergenceMonitor::Create(kStaticQpThreshold, + kVideoCodecVP9, field_trials); + ASSERT_TRUE(monitor); + QualityConvergenceMonitor::Parameters vp9_parameters = + monitor->GetParametersForTesting(); + EXPECT_EQ(vp9_parameters.static_qp_threshold, kStaticQpThreshold); + EXPECT_TRUE(vp9_parameters.dynamic_detection_enabled); + EXPECT_EQ(vp9_parameters.dynamic_qp_threshold, 28); // 13 + 15. + EXPECT_EQ(vp9_parameters.recent_window_length, 6u); + EXPECT_EQ(vp9_parameters.past_window_length, 6u); + + monitor = QualityConvergenceMonitor::Create(kStaticQpThreshold, + kVideoCodecAV1, field_trials); + ASSERT_TRUE(monitor); + QualityConvergenceMonitor::Parameters av1_parameters = + monitor->GetParametersForTesting(); + EXPECT_EQ(av1_parameters.static_qp_threshold, kStaticQpThreshold); + EXPECT_TRUE(av1_parameters.dynamic_detection_enabled); + EXPECT_EQ(av1_parameters.dynamic_qp_threshold, 28); // 13 + 15. + EXPECT_EQ(av1_parameters.recent_window_length, 6u); + EXPECT_EQ(av1_parameters.past_window_length, 6u); +} + +TEST(QualityConvergenceMonitorSetup, OverrideVp8Parameters) { + test::ScopedKeyValueConfig field_trials( + "WebRTC-QCM-Dynamic-VP8/" + "enabled:1,alpha:0.08,recent_length:6,past_length:4/"); + + auto monitor = QualityConvergenceMonitor::Create( + kStaticQpThreshold, kVideoCodecVP8, field_trials); + ASSERT_TRUE(monitor); + QualityConvergenceMonitor::Parameters p = monitor->GetParametersForTesting(); + EXPECT_EQ(p.static_qp_threshold, kStaticQpThreshold); + EXPECT_TRUE(p.dynamic_detection_enabled); + EXPECT_EQ(p.dynamic_qp_threshold, 23); // 13 + 10. + EXPECT_EQ(p.recent_window_length, 6u); + EXPECT_EQ(p.past_window_length, 4u); +} + +TEST(QualityConvergenceMonitorSetup, OverrideVp9Parameters) { + test::ScopedKeyValueConfig field_trials( + "WebRTC-QCM-Dynamic-VP9/" + "enabled:1,alpha:0.08,recent_length:6,past_length:4/"); + + auto monitor = QualityConvergenceMonitor::Create( + kStaticQpThreshold, kVideoCodecVP9, field_trials); + ASSERT_TRUE(monitor); + QualityConvergenceMonitor::Parameters p = monitor->GetParametersForTesting(); + EXPECT_EQ(p.static_qp_threshold, kStaticQpThreshold); + EXPECT_TRUE(p.dynamic_detection_enabled); + EXPECT_EQ(p.dynamic_qp_threshold, 33); // 13 + 20. + EXPECT_EQ(p.recent_window_length, 6u); + EXPECT_EQ(p.past_window_length, 4u); +} + +TEST(QualityConvergenceMonitorSetup, OverrideAv1Parameters) { + test::ScopedKeyValueConfig field_trials( + "WebRTC-QCM-Dynamic-AV1/" + "enabled:1,alpha:0.10,recent_length:8,past_length:8/"); + + auto monitor = QualityConvergenceMonitor::Create( + kStaticQpThreshold, kVideoCodecAV1, field_trials); + ASSERT_TRUE(monitor); + QualityConvergenceMonitor::Parameters p = monitor->GetParametersForTesting(); + EXPECT_EQ(p.static_qp_threshold, kStaticQpThreshold); + EXPECT_TRUE(p.dynamic_detection_enabled); + EXPECT_EQ(p.dynamic_qp_threshold, 38); // 13 + 25. + EXPECT_EQ(p.recent_window_length, 8u); + EXPECT_EQ(p.past_window_length, 8u); +} + +TEST(QualityConvergenceMonitorSetup, DisableVp9Dynamic) { + test::ScopedKeyValueConfig field_trials("WebRTC-QCM-Dynamic-VP9/enabled:0/"); + + auto monitor = QualityConvergenceMonitor::Create( + kStaticQpThreshold, kVideoCodecVP9, field_trials); + ASSERT_TRUE(monitor); + QualityConvergenceMonitor::Parameters p = monitor->GetParametersForTesting(); + EXPECT_FALSE(p.dynamic_detection_enabled); +} + +TEST(QualityConvergenceMonitorSetup, DisableAv1Dynamic) { + test::ScopedKeyValueConfig field_trials("WebRTC-QCM-Dynamic-AV1/enabled:0/"); + + auto monitor = QualityConvergenceMonitor::Create( + kStaticQpThreshold, kVideoCodecAV1, field_trials); + ASSERT_TRUE(monitor); + QualityConvergenceMonitor::Parameters p = monitor->GetParametersForTesting(); + EXPECT_FALSE(p.dynamic_detection_enabled); +} + +} // namespace +} // namespace webrtc diff --git a/video/quality_scaling_tests.cc b/video/quality_scaling_tests.cc index edd186d88d..81aeebb2da 100644 --- a/video/quality_scaling_tests.cc +++ b/video/quality_scaling_tests.cc @@ -40,15 +40,13 @@ void SetEncoderSpecific(VideoEncoderConfig* encoder_config, VideoCodecVP8 vp8 = VideoEncoder::GetDefaultVp8Settings(); vp8.automaticResizeOn = automatic_resize; encoder_config->encoder_specific_settings = - rtc::make_ref_counted( - vp8); + make_ref_counted(vp8); } else if (type == kVideoCodecVP9) { VideoCodecVP9 vp9 = VideoEncoder::GetDefaultVp9Settings(); vp9.automaticResizeOn = automatic_resize; vp9.numberOfSpatialLayers = num_spatial_layers; encoder_config->encoder_specific_settings = - rtc::make_ref_counted( - vp9); + make_ref_counted(vp9); } } } // namespace @@ -57,17 +55,17 @@ class QualityScalingTest : public test::CallTest { protected: const std::string kPrefix = "WebRTC-Video-QualityScaling/Enabled-"; const std::string kEnd = ",0,0,0.9995,0.9999,1/"; - const absl::optional + const std::optional kSinglecastLimits720pVp8 = EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( kVideoCodecVP8, 1280 * 720); - const absl::optional + const std::optional kSinglecastLimits360pVp9 = EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( kVideoCodecVP9, 640 * 360); - const absl::optional + const std::optional kSinglecastLimits720pVp9 = EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( kVideoCodecVP9, @@ -78,7 +76,7 @@ class ScalingObserver : public test::SendTest { protected: struct TestParams { bool active; - absl::optional scalability_mode; + std::optional scalability_mode; }; ScalingObserver(const std::string& payload_name, const std::vector& test_params, @@ -87,13 +85,14 @@ class ScalingObserver : public test::SendTest { bool expect_scaling) : SendTest(expect_scaling ? kTimeout * 4 : kTimeout), encoder_factory_( - [](const SdpVideoFormat& format) -> std::unique_ptr { + [](const Environment& env, + const SdpVideoFormat& format) -> std::unique_ptr { if (format.name == "VP8") - return VP8Encoder::Create(); + return CreateVp8Encoder(env); if (format.name == "VP9") - return VP9Encoder::Create(); + return CreateVp9Encoder(env); if (format.name == "H264") - return H264Encoder::Create(); + return CreateH264Encoder(env); RTC_DCHECK_NOTREACHED() << format.name; return nullptr; }), @@ -124,7 +123,6 @@ class ScalingObserver : public test::SendTest { VideoSendStream::Config* send_config, std::vector* receive_configs, VideoEncoderConfig* encoder_config) override { - VideoEncoder::EncoderInfo encoder_info; send_config->encoder_settings.encoder_factory = &encoder_factory_; send_config->rtp.payload_name = payload_name_; send_config->rtp.payload_type = @@ -132,10 +130,6 @@ class ScalingObserver : public test::SendTest { encoder_config->video_format.name = payload_name_; const VideoCodecType codec_type = PayloadStringToCodecType(payload_name_); encoder_config->codec_type = codec_type; - encoder_config->video_stream_factory = - rtc::make_ref_counted( - payload_name_, /*max_qp=*/0, /*is_screenshare=*/false, - /*conference_mode=*/false, encoder_info); encoder_config->max_bitrate_bps = std::max(start_bps_, encoder_config->max_bitrate_bps); if (payload_name_ == "VP9") { @@ -157,7 +151,7 @@ class ScalingObserver : public test::SendTest { test_params_.size()); } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { // The tests are expected to send at the configured start bitrate. Do not // send any packets to avoid receiving REMB and possibly go down in target // bitrate. A low bitrate estimate could result in downgrading due to other @@ -198,8 +192,8 @@ class DownscalingObserver frame_generator_capturer->ChangeResolution(kInitialWidth, kInitialHeight); } - void OnSinkWantsChanged(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override { + void OnSinkWantsChanged(VideoSinkInterface* sink, + const VideoSinkWants& wants) override { if (wants.max_pixel_count < kInitialWidth * kInitialHeight) observation_complete_.Set(); } @@ -231,8 +225,8 @@ class UpscalingObserver frame_generator_capturer->ChangeResolution(kInitialWidth, kInitialHeight); } - void OnSinkWantsChanged(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override { + void OnSinkWantsChanged(VideoSinkInterface* sink, + const VideoSinkWants& wants) override { if (wants.max_pixel_count > last_wants_.max_pixel_count) { if (wants.max_pixel_count == std::numeric_limits::max()) observation_complete_.Set(); @@ -240,7 +234,7 @@ class UpscalingObserver last_wants_ = wants; } - rtc::VideoSinkWants last_wants_; + VideoSinkWants last_wants_; }; TEST_F(QualityScalingTest, AdaptsDownForHighQp_Vp8) { diff --git a/video/rate_utilization_tracker.cc b/video/rate_utilization_tracker.cc new file mode 100644 index 0000000000..ad7125a5a1 --- /dev/null +++ b/video/rate_utilization_tracker.cc @@ -0,0 +1,142 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/rate_utilization_tracker.h" + +#include + +namespace webrtc { + +RateUtilizationTracker::RateUtilizationTracker( + size_t max_num_encoded_data_points, + TimeDelta max_duration) + : max_data_points_(max_num_encoded_data_points), + max_duration_(max_duration), + current_rate_(DataRate::Zero()) { + RTC_CHECK_GE(max_num_encoded_data_points, 0); + RTC_CHECK_GT(max_duration, TimeDelta::Zero()); +} + +void RateUtilizationTracker::OnDataRateChanged(DataRate rate, Timestamp time) { + current_rate_ = rate; + if (data_points_.empty()) { + // First entry should be contain first produced data, so just return after + // setting `current_rate_`. + return; + } else { + RateUsageUpdate& last_data_point = data_points_.back(); + RTC_CHECK_GE(time, last_data_point.time); + if (last_data_point.time == time) { + last_data_point.target_rate = rate; + } else { + data_points_.push_back({.time = time, + .target_rate = rate, + .produced_data = DataSize::Zero()}); + } + } + + CullOldData(time); +} + +void RateUtilizationTracker::OnDataProduced(DataSize size, Timestamp time) { + if (data_points_.empty()) { + data_points_.push_back( + {.time = time, .target_rate = current_rate_, .produced_data = size}); + } else { + RateUsageUpdate& last_data_point = data_points_.back(); + RTC_CHECK_GE(time, last_data_point.time); + if (last_data_point.time == time) { + last_data_point.produced_data += size; + } else { + data_points_.push_back( + {.time = time, .target_rate = current_rate_, .produced_data = size}); + } + } + + CullOldData(time); +} + +std::optional RateUtilizationTracker::GetRateUtilizationFactor( + Timestamp time) const { + if (data_points_.empty()) { + return std::nullopt; + } + + RTC_CHECK_GE(time, data_points_.back().time); + DataSize allocated_send_data_size = DataSize::Zero(); + DataSize total_produced_data = DataSize::Zero(); + + // Keep track of the last time data was produced - how much it was and how + // much rate budget has been allocated since then. + DataSize data_allocated_for_last_data = DataSize::Zero(); + DataSize size_of_last_data = DataSize::Zero(); + + RTC_DCHECK(!data_points_.front().produced_data.IsZero()); + for (size_t i = 0; i < data_points_.size(); ++i) { + const RateUsageUpdate& update = data_points_[i]; + total_produced_data += update.produced_data; + + DataSize allocated_since_previous_data_point = + i == 0 ? DataSize::Zero() + : (update.time - data_points_[i - 1].time) * + data_points_[i - 1].target_rate; + allocated_send_data_size += allocated_since_previous_data_point; + + if (update.produced_data.IsZero()) { + // Just a rate update past the last seen produced data. + data_allocated_for_last_data = + std::min(size_of_last_data, data_allocated_for_last_data + + allocated_since_previous_data_point); + } else { + // A newer data point with produced data, reset accumulator for rate + // allocated past the last data point. + size_of_last_data = update.produced_data; + data_allocated_for_last_data = DataSize::Zero(); + } + } + + if (allocated_send_data_size.IsZero() && current_rate_.IsZero()) { + // No allocated rate across all of the data points, ignore. + return std::nullopt; + } + + // Calculate the rate past the very last data point until the polling time. + const RateUsageUpdate& last_update = data_points_.back(); + DataSize allocated_since_last_data_point = + (time - last_update.time) * last_update.target_rate; + + // If the last produced data packet is larger than the accumulated rate + // allocation window since then, use that data point size instead (minus any + // data rate accumulated in rate updates after that data point was produced). + allocated_send_data_size += + std::max(allocated_since_last_data_point, + size_of_last_data - data_allocated_for_last_data); + + return total_produced_data.bytes() / allocated_send_data_size.bytes(); +} + +void RateUtilizationTracker::CullOldData(Timestamp time) { + // Remove data points that are either too old, exceed the limit of number of + // data points - and make sure the first entry in the list contains actual + // data produced since we calculate send usage since that time. + + // We don't allow negative times so always start window at absolute time >= 0. + const Timestamp oldest_included_time = + time.ms() > max_duration_.ms() ? time - max_duration_ : Timestamp::Zero(); + + while (!data_points_.empty() && + (data_points_.front().time < oldest_included_time || + data_points_.size() > max_data_points_ || + data_points_.front().produced_data.IsZero())) { + data_points_.pop_front(); + } +} + +} // namespace webrtc diff --git a/video/rate_utilization_tracker.h b/video/rate_utilization_tracker.h new file mode 100644 index 0000000000..619d542c4a --- /dev/null +++ b/video/rate_utilization_tracker.h @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef VIDEO_RATE_UTILIZATION_TRACKER_H_ +#define VIDEO_RATE_UTILIZATION_TRACKER_H_ + +#include +#include + +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" + +namespace webrtc { + +// Helper class that tracks the rate of utilization over a sliding window. +// tl;dr: if an encoder has a target rate of 1000kbps but in practice +// produces 500kbps it would have a utilization factor of 0.5. +// The tracker looks only at discrete events, and keeps only a fixed amount +// of data points (e.g. encoded frames) or points newer than a given time +// limit, whichever is lower. + +// More precisely This class measures the allocated cumulative byte budget (as +// specified by one or more rate updates) and the actual cumulative number of +// bytes produced over a sliding window. A utilization factor (produced bytes / +// budgeted bytes) is calculated seen from the first data point timestamp until +// the last data point timestamp plus the amount time needed to send that last +// data point given no further updates to the rate. The implication of this is a +// smoother value, and e.g. setting a rate and adding a data point, then +// immediately querying the utilization reports 1.0 utilization instead of some +// undefined state. + +class RateUtilizationTracker { + public: + RateUtilizationTracker(size_t max_num_encoded_data_points, + TimeDelta max_duration); + + // The timestamps used should never decrease relative the last one. + void OnDataRateChanged(DataRate rate, Timestamp time); + void OnDataProduced(DataSize size, Timestamp time); + std::optional GetRateUtilizationFactor(Timestamp time) const; + + private: + struct RateUsageUpdate { + Timestamp time; + DataRate target_rate; + DataSize produced_data; + }; + + void CullOldData(Timestamp time); + + const size_t max_data_points_; + const TimeDelta max_duration_; + DataRate current_rate_; + std::deque data_points_; +}; + +} // namespace webrtc + +#endif // VIDEO_RATE_UTILIZATION_TRACKER_H_ diff --git a/video/rate_utilization_tracker_unittest.cc b/video/rate_utilization_tracker_unittest.cc new file mode 100644 index 0000000000..2e75d66a7b --- /dev/null +++ b/video/rate_utilization_tracker_unittest.cc @@ -0,0 +1,269 @@ +/* + * Copyright (c) 2024 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/rate_utilization_tracker.h" + +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::Not; + +constexpr int kDefaultMaxDataPoints = 10; +constexpr TimeDelta kDefaultTimeWindow = TimeDelta::Seconds(1); +constexpr Timestamp kStartTime = Timestamp::Millis(9876654); +constexpr double kAllowedError = 0.002; // 0.2% error allowed. + +MATCHER_P(PrettyCloseTo, expected, "") { + return arg && std::abs(*arg - expected) < kAllowedError; +} + +TEST(RateUtilizationTrackerTest, NoDataInNoDataOut) { + RateUtilizationTracker tracker(kDefaultMaxDataPoints, kDefaultTimeWindow); + EXPECT_FALSE(tracker.GetRateUtilizationFactor(kStartTime).has_value()); +} + +TEST(RateUtilizationTrackerTest, NoUtilizationWithoutDataPoints) { + RateUtilizationTracker tracker(kDefaultMaxDataPoints, kDefaultTimeWindow); + tracker.OnDataRateChanged(DataRate::KilobitsPerSec(100), kStartTime); + EXPECT_FALSE(tracker.GetRateUtilizationFactor(kStartTime).has_value()); +} + +TEST(RateUtilizationTrackerTest, NoUtilizationWithoutRateUpdates) { + RateUtilizationTracker tracker(kDefaultMaxDataPoints, kDefaultTimeWindow); + tracker.OnDataProduced(DataSize::Bytes(100), kStartTime); + EXPECT_FALSE(tracker.GetRateUtilizationFactor(kStartTime).has_value()); +} + +TEST(RateUtilizationTrackerTest, SingleDataPoint) { + RateUtilizationTracker tracker(kDefaultMaxDataPoints, kDefaultTimeWindow); + constexpr TimeDelta kFrameInterval = TimeDelta::Seconds(1) / 33; + constexpr DataRate kTargetRate = DataRate::KilobitsPerSec(100); + constexpr DataSize kIdealFrameSize = kTargetRate * kFrameInterval; + + tracker.OnDataRateChanged(kTargetRate, kStartTime); + tracker.OnDataProduced(kIdealFrameSize, kStartTime); + + // From the start, the window is extended to cover the expected duration for + // the last frame - resulting in 100% utilization. + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime), PrettyCloseTo(1.0)); + + // At the expected frame interval the utilization is still 100%. + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + kFrameInterval), + PrettyCloseTo(1.0)); + + // After two frame intervals the utilization is half the expected. + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + 2 * kFrameInterval), + PrettyCloseTo(0.5)); +} + +TEST(RateUtilizationTrackerTest, TwoDataPoints) { + RateUtilizationTracker tracker(kDefaultMaxDataPoints, kDefaultTimeWindow); + constexpr TimeDelta kFrameInterval = TimeDelta::Seconds(1) / 33; + constexpr DataRate kTargetRate = DataRate::KilobitsPerSec(100); + constexpr DataSize kIdealFrameSize = kTargetRate * kFrameInterval; + + tracker.OnDataRateChanged(kTargetRate, kStartTime); + tracker.OnDataProduced(kIdealFrameSize, kStartTime); + tracker.OnDataProduced(kIdealFrameSize, kStartTime + kFrameInterval); + + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + 2 * kFrameInterval), + PrettyCloseTo(1.0)); + + // After two three frame interval we have two utilizated intervals and one + // unitilzed => 2/3 utilization. + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + 3 * kFrameInterval), + PrettyCloseTo(2.0 / 3.0)); +} + +TEST(RateUtilizationTrackerTest, TwoDataPointsConsistentOveruse) { + RateUtilizationTracker tracker(kDefaultMaxDataPoints, kDefaultTimeWindow); + constexpr TimeDelta kFrameInterval = TimeDelta::Seconds(1) / 33; + constexpr DataRate kTargetRate = DataRate::KilobitsPerSec(100); + constexpr DataSize kIdealFrameSize = kTargetRate * kFrameInterval; + + tracker.OnDataRateChanged(kTargetRate, kStartTime); + tracker.OnDataProduced(kIdealFrameSize * 2, kStartTime); + tracker.OnDataProduced(kIdealFrameSize * 2, kStartTime + kFrameInterval); + + // Note that the last data point is presumed to be sent at the designated rate + // and no new data points produced until the buffers empty. Thus the + // overshoot is just 4/3 unstead of 4/2. + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + 2 * kFrameInterval), + PrettyCloseTo(4.0 / 3.0)); +} + +TEST(RateUtilizationTrackerTest, OveruseWithFrameDrop) { + RateUtilizationTracker tracker(kDefaultMaxDataPoints, kDefaultTimeWindow); + constexpr TimeDelta kFrameInterval = TimeDelta::Seconds(1) / 33; + constexpr DataRate kTargetRate = DataRate::KilobitsPerSec(100); + constexpr DataSize kIdealFrameSize = kTargetRate * kFrameInterval; + + // First frame is 2x larger than it should be. + tracker.OnDataRateChanged(kTargetRate, kStartTime); + tracker.OnDataProduced(kIdealFrameSize * 2, kStartTime); + // Compensate by dropping a frame before the next nominal-size one. + tracker.OnDataProduced(kIdealFrameSize, kStartTime + 2 * kFrameInterval); + + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + 3 * kFrameInterval), + PrettyCloseTo(1.0)); +} + +TEST(RateUtilizationTrackerTest, VaryingRate) { + RateUtilizationTracker tracker(kDefaultMaxDataPoints, kDefaultTimeWindow); + constexpr TimeDelta kFrameInterval = TimeDelta::Seconds(1) / 33; + constexpr DataRate kTargetRate = DataRate::KilobitsPerSec(100); + constexpr DataSize kIdealFrameSize = kTargetRate * kFrameInterval; + + // Rate goes up, rate comes down... + tracker.OnDataRateChanged(kTargetRate, kStartTime); + tracker.OnDataProduced(kIdealFrameSize, kStartTime); + tracker.OnDataRateChanged(kTargetRate * 2, kStartTime + kFrameInterval); + tracker.OnDataProduced(kIdealFrameSize * 2, kStartTime + kFrameInterval); + tracker.OnDataRateChanged(kTargetRate, kStartTime + 2 * kFrameInterval); + tracker.OnDataProduced(kIdealFrameSize, kStartTime + 2 * kFrameInterval); + + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + 3 * kFrameInterval), + PrettyCloseTo(1.0)); +} + +TEST(RateUtilizationTrackerTest, VaryingRateMidFrameInterval) { + RateUtilizationTracker tracker(kDefaultMaxDataPoints, kDefaultTimeWindow); + constexpr TimeDelta kFrameInterval = TimeDelta::Seconds(1) / 33; + constexpr DataRate kTargetRate = DataRate::KilobitsPerSec(100); + constexpr DataSize kIdealFrameSize = kTargetRate * kFrameInterval; + + // First frame 1/3 too large + tracker.OnDataRateChanged(kTargetRate, kStartTime); + tracker.OnDataProduced(kIdealFrameSize * (3.0 / 2.0), kStartTime); + + // Mid frame interval double the target rate. Should lead to no overshoot. + tracker.OnDataRateChanged(kTargetRate * 2, kStartTime + kFrameInterval / 2); + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + kFrameInterval), + PrettyCloseTo(1.0)); +} + +TEST(RateUtilizationTrackerTest, VaryingRateAfterLastDataPoint) { + RateUtilizationTracker tracker(kDefaultMaxDataPoints, kDefaultTimeWindow); + constexpr TimeDelta kFrameInterval = TimeDelta::Seconds(1) / 33; + constexpr DataRate kTargetRate = DataRate::KilobitsPerSec(100); + constexpr DataSize kIdealFrameSize = kTargetRate * kFrameInterval; + + tracker.OnDataRateChanged(kTargetRate, kStartTime); + // Data point is just after the rate update. + tracker.OnDataProduced(kIdealFrameSize, kStartTime + TimeDelta::Micros(1)); + + // Half an interval past the last frame double the target rate. + tracker.OnDataRateChanged(kTargetRate * 2, kStartTime + kFrameInterval / 2); + + // The last data point should now extend only to 2/3 the way to the next frame + // interval. + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + + kFrameInterval * (2.0 / 3.0)), + PrettyCloseTo(1.0)); + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + + kFrameInterval * (2.3 / 3.0)), + Not(PrettyCloseTo(1.0))); +} + +TEST(RateUtilizationTrackerTest, DataPointLimit) { + // Set max data points to two. + RateUtilizationTracker tracker(/*max_data_points=*/2, kDefaultTimeWindow); + constexpr TimeDelta kFrameInterval = TimeDelta::Seconds(1) / 33; + constexpr DataRate kTargetRate = DataRate::KilobitsPerSec(100); + constexpr DataSize kIdealFrameSize = kTargetRate * kFrameInterval; + + // Insert two frames that are too large. + tracker.OnDataRateChanged(kTargetRate, kStartTime); + tracker.OnDataProduced(kIdealFrameSize * 2, kStartTime); + tracker.OnDataProduced(kIdealFrameSize * 2, kStartTime + 1 * kFrameInterval); + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + 1 * kFrameInterval), + Not(PrettyCloseTo(1.0))); + + // Insert two frames of the correct size. Past grievances have been forgotten. + tracker.OnDataProduced(kIdealFrameSize, kStartTime + 2 * kFrameInterval); + tracker.OnDataProduced(kIdealFrameSize, kStartTime + 3 * kFrameInterval); + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + 3 * kFrameInterval), + PrettyCloseTo(1.0)); +} + +TEST(RateUtilizationTrackerTest, WindowSizeLimit) { + constexpr TimeDelta kFrameInterval = TimeDelta::Seconds(1) / 33; + constexpr DataRate kTargetRate = DataRate::KilobitsPerSec(100); + constexpr DataSize kIdealFrameSize = kTargetRate * kFrameInterval; + // Number of data points enough, but time window too small. + RateUtilizationTracker tracker(/*max_data_points=*/4, /*time_window=*/ + 2 * kFrameInterval - TimeDelta::Millis(1)); + + // Insert two frames that are too large. + tracker.OnDataRateChanged(kTargetRate, kStartTime); + tracker.OnDataProduced(kIdealFrameSize * 2, kStartTime); + tracker.OnDataProduced(kIdealFrameSize * 2, kStartTime + 1 * kFrameInterval); + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + 1 * kFrameInterval), + Not(PrettyCloseTo(1.0))); + + // Insert two frames of the correct size. Past grievances have been forgotten. + tracker.OnDataProduced(kIdealFrameSize, kStartTime + 2 * kFrameInterval); + tracker.OnDataProduced(kIdealFrameSize, kStartTime + 3 * kFrameInterval); + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + 3 * kFrameInterval), + PrettyCloseTo(1.0)); +} + +TEST(RateUtilizationTrackerTest, EqualTimestampsTreatedAtSameDataPoint) { + // Set max data points to two. + RateUtilizationTracker tracker(/*max_data_points=*/2, kDefaultTimeWindow); + constexpr TimeDelta kFrameInterval = TimeDelta::Seconds(1) / 33; + constexpr DataRate kTargetRate = DataRate::KilobitsPerSec(100); + constexpr DataSize kIdealFrameSize = kTargetRate * kFrameInterval; + + tracker.OnDataRateChanged(kTargetRate, kStartTime); + tracker.OnDataProduced(kIdealFrameSize, kStartTime); + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime), PrettyCloseTo(1.0)); + + // This is viewed as an undershoot. + tracker.OnDataProduced(kIdealFrameSize, kStartTime + (kFrameInterval * 2)); + EXPECT_THAT( + tracker.GetRateUtilizationFactor(kStartTime + (kFrameInterval * 2)), + PrettyCloseTo(2.0 / 3.0)); + + // Add the same data point again. Treated as layered frame so will accumulate + // in the same data point. This is expected to have a send time twice as long + // now, reducing the undershoot. + tracker.OnDataProduced(kIdealFrameSize, kStartTime + (kFrameInterval * 2)); + EXPECT_THAT( + tracker.GetRateUtilizationFactor(kStartTime + (kFrameInterval * 2)), + PrettyCloseTo(3.0 / 4.0)); +} + +TEST(RateUtilizationTrackerTest, FullRateAfterLastDataPoint) { + RateUtilizationTracker tracker(kDefaultMaxDataPoints, kDefaultTimeWindow); + constexpr TimeDelta kFrameInterval = TimeDelta::Seconds(1) / 33; + constexpr DataRate kTargetRate = DataRate::KilobitsPerSec(100); + constexpr DataSize kIdealFrameSize = kTargetRate * kFrameInterval; + + tracker.OnDataRateChanged(kTargetRate, kStartTime); + tracker.OnDataProduced(kIdealFrameSize, kStartTime); + + // New rate update, but accumulated rate for last data point fully saturated + // by next to last rate update. + tracker.OnDataRateChanged(kTargetRate, kStartTime + kFrameInterval * 2); + + EXPECT_THAT(tracker.GetRateUtilizationFactor(kStartTime + kFrameInterval * 3), + PrettyCloseTo(1.0 / 3.0)); +} + +} // namespace +} // namespace webrtc diff --git a/video/receive_statistics_proxy.cc b/video/receive_statistics_proxy.cc index 75512a2465..aae6b60f2d 100644 --- a/video/receive_statistics_proxy.cc +++ b/video/receive_statistics_proxy.cc @@ -12,6 +12,7 @@ #include #include +#include #include #include "modules/video_coding/include/video_codec_interface.h" @@ -50,13 +51,13 @@ const char* UmaPrefixForContentType(VideoContentType content_type) { } // TODO(https://bugs.webrtc.org/11572): Workaround for an issue with some -// rtc::Thread instances and/or implementations that don't register as the +// webrtc::Thread instances and/or implementations that don't register as the // current task queue. bool IsCurrentTaskQueueOrThread(TaskQueueBase* task_queue) { if (task_queue->IsCurrent()) return true; - rtc::Thread* current_thread = rtc::ThreadManager::Instance()->CurrentThread(); + Thread* current_thread = ThreadManager::Instance()->CurrentThread(); if (!current_thread) return false; @@ -96,13 +97,13 @@ ReceiveStatisticsProxy::~ReceiveStatisticsProxy() { } void ReceiveStatisticsProxy::UpdateHistograms( - absl::optional fraction_lost, + std::optional fraction_lost, const StreamDataCounters& rtp_stats, const StreamDataCounters* rtx_stats) { RTC_DCHECK_RUN_ON(&main_thread_); char log_stream_buf[8 * 1024]; - rtc::SimpleStringBuilder log_stream(log_stream_buf); + SimpleStringBuilder log_stream(log_stream_buf); int stream_duration_sec = (clock_->TimeInMilliseconds() - start_ms_) / 1000; @@ -134,8 +135,7 @@ void ReceiveStatisticsProxy::UpdateHistograms( if (first_decoded_frame_time_ms_) { const int64_t elapsed_ms = (clock_->TimeInMilliseconds() - *first_decoded_frame_time_ms_); - if (elapsed_ms >= - metrics::kMinRunTimeInSeconds * rtc::kNumMillisecsPerSec) { + if (elapsed_ms >= metrics::kMinRunTimeInSeconds * kNumMillisecsPerSec) { int decoded_fps = static_cast( (stats_.frames_decoded * 1000.0f / elapsed_ms) + 0.5f); RTC_HISTOGRAM_COUNTS_100("WebRTC.Video.DecodedFramesPerSecond", @@ -170,7 +170,7 @@ void ReceiveStatisticsProxy::UpdateHistograms( round(render_pixel_tracker_.ComputeTotalRate())); } - absl::optional sync_offset_ms = + std::optional sync_offset_ms = sync_offset_counter_.Avg(kMinRequiredSamples); if (sync_offset_ms) { RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.AVSyncOffsetInMs", @@ -197,18 +197,18 @@ void ReceiveStatisticsProxy::UpdateHistograms( << key_frames_permille << '\n'; } - absl::optional qp = qp_counters_.vp8.Avg(kMinRequiredSamples); - if (qp) { - RTC_HISTOGRAM_COUNTS_200("WebRTC.Video.Decoded.Vp8.Qp", *qp); - log_stream << "WebRTC.Video.Decoded.Vp8.Qp " << *qp << '\n'; + std::optional vp8_qp = qp_counters_.vp8.Avg(kMinRequiredSamples); + if (vp8_qp) { + RTC_HISTOGRAM_COUNTS_200("WebRTC.Video.Decoded.Vp8.Qp", *vp8_qp); + log_stream << "WebRTC.Video.Decoded.Vp8.Qp " << *vp8_qp << '\n'; } - absl::optional decode_ms = decode_time_counter_.Avg(kMinRequiredSamples); + std::optional decode_ms = decode_time_counter_.Avg(kMinRequiredSamples); if (decode_ms) { RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DecodeTimeInMs", *decode_ms); log_stream << "WebRTC.Video.DecodeTimeInMs " << *decode_ms << '\n'; } - absl::optional jb_delay_ms = + std::optional jb_delay_ms = jitter_delay_counter_.Avg(kMinRequiredSamples); if (jb_delay_ms) { RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs", @@ -216,36 +216,25 @@ void ReceiveStatisticsProxy::UpdateHistograms( log_stream << "WebRTC.Video.JitterBufferDelayInMs " << *jb_delay_ms << '\n'; } - absl::optional target_delay_ms = + std::optional target_delay_ms = target_delay_counter_.Avg(kMinRequiredSamples); if (target_delay_ms) { RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.TargetDelayInMs", *target_delay_ms); log_stream << "WebRTC.Video.TargetDelayInMs " << *target_delay_ms << '\n'; } - absl::optional current_delay_ms = + std::optional current_delay_ms = current_delay_counter_.Avg(kMinRequiredSamples); if (current_delay_ms) { RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.CurrentDelayInMs", *current_delay_ms); log_stream << "WebRTC.Video.CurrentDelayInMs " << *current_delay_ms << '\n'; } - absl::optional delay_ms = oneway_delay_counter_.Avg(kMinRequiredSamples); + std::optional delay_ms = oneway_delay_counter_.Avg(kMinRequiredSamples); if (delay_ms) RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.OnewayDelayInMs", *delay_ms); - // Aggregate content_specific_stats_ by removing experiment or simulcast - // information; - std::map aggregated_stats; for (const auto& it : content_specific_stats_) { - // Calculate simulcast specific metrics (".S0" ... ".S2" suffixes). - VideoContentType content_type = it.first; - // Calculate aggregated metrics (no suffixes. Aggregated on everything). - content_type = it.first; - aggregated_stats[content_type].Add(it.second); - } - - for (const auto& it : aggregated_stats) { // For the metric Foo we report the following slices: // WebRTC.Video.Foo, // WebRTC.Video.Screenshare.Foo, @@ -253,62 +242,73 @@ void ReceiveStatisticsProxy::UpdateHistograms( auto stats = it.second; std::string uma_prefix = UmaPrefixForContentType(content_type); - absl::optional e2e_delay_ms = + std::optional e2e_delay_ms = stats.e2e_delay_counter.Avg(kMinRequiredSamples); if (e2e_delay_ms) { RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".EndToEndDelayInMs", *e2e_delay_ms); - log_stream << uma_prefix << ".EndToEndDelayInMs" - << " " << *e2e_delay_ms << '\n'; + log_stream << uma_prefix << ".EndToEndDelayInMs" << " " << *e2e_delay_ms + << '\n'; } - absl::optional e2e_delay_max_ms = stats.e2e_delay_counter.Max(); + std::optional e2e_delay_max_ms = stats.e2e_delay_counter.Max(); if (e2e_delay_max_ms && e2e_delay_ms) { RTC_HISTOGRAM_COUNTS_SPARSE_100000(uma_prefix + ".EndToEndDelayMaxInMs", *e2e_delay_max_ms); - log_stream << uma_prefix << ".EndToEndDelayMaxInMs" - << " " << *e2e_delay_max_ms << '\n'; + log_stream << uma_prefix << ".EndToEndDelayMaxInMs" << " " + << *e2e_delay_max_ms << '\n'; } - absl::optional interframe_delay_ms = + std::optional interframe_delay_ms = stats.interframe_delay_counter.Avg(kMinRequiredSamples); if (interframe_delay_ms) { RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".InterframeDelayInMs", *interframe_delay_ms); - log_stream << uma_prefix << ".InterframeDelayInMs" - << " " << *interframe_delay_ms << '\n'; + log_stream << uma_prefix << ".InterframeDelayInMs" << " " + << *interframe_delay_ms << '\n'; } - absl::optional interframe_delay_max_ms = + std::optional interframe_delay_max_ms = stats.interframe_delay_counter.Max(); if (interframe_delay_max_ms && interframe_delay_ms) { RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".InterframeDelayMaxInMs", *interframe_delay_max_ms); - log_stream << uma_prefix << ".InterframeDelayMaxInMs" - << " " << *interframe_delay_max_ms << '\n'; + log_stream << uma_prefix << ".InterframeDelayMaxInMs" << " " + << *interframe_delay_max_ms << '\n'; } - absl::optional interframe_delay_95p_ms = + std::optional interframe_delay_95p_ms = stats.interframe_delay_percentiles.GetPercentile(0.95f); if (interframe_delay_95p_ms && interframe_delay_ms != -1) { RTC_HISTOGRAM_COUNTS_SPARSE_10000( uma_prefix + ".InterframeDelay95PercentileInMs", *interframe_delay_95p_ms); - log_stream << uma_prefix << ".InterframeDelay95PercentileInMs" - << " " << *interframe_delay_95p_ms << '\n'; + log_stream << uma_prefix << ".InterframeDelay95PercentileInMs" << " " + << *interframe_delay_95p_ms << '\n'; } - absl::optional width = stats.received_width.Avg(kMinRequiredSamples); + std::optional width = stats.received_width.Avg(kMinRequiredSamples); if (width) { RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".ReceivedWidthInPixels", *width); - log_stream << uma_prefix << ".ReceivedWidthInPixels" - << " " << *width << '\n'; + log_stream << uma_prefix << ".ReceivedWidthInPixels" << " " << *width + << '\n'; } - absl::optional height = stats.received_height.Avg(kMinRequiredSamples); + std::optional height = stats.received_height.Avg(kMinRequiredSamples); if (height) { RTC_HISTOGRAM_COUNTS_SPARSE_10000(uma_prefix + ".ReceivedHeightInPixels", *height); - log_stream << uma_prefix << ".ReceivedHeightInPixels" - << " " << *height << '\n'; + log_stream << uma_prefix << ".ReceivedHeightInPixels" << " " << *height + << '\n'; + } + + std::optional corruption_score = stats.corruption_score.GetMean(); + if (corruption_score) { + // Granularity level: 2e-3. + RTC_HISTOGRAM_COUNTS_SPARSE(uma_prefix + ".CorruptionLikelihoodPermille", + static_cast(*corruption_score * 1000), + /*min=*/0, /*max=*/1000, + /*bucket_count=*/500); + log_stream << uma_prefix << ".CorruptionLikelihoodPermille" << " " + << static_cast(*corruption_score * 1000) << '\n'; } if (content_type != VideoContentType::UNSPECIFIED) { @@ -320,11 +320,11 @@ void ReceiveStatisticsProxy::UpdateHistograms( flow_duration_sec / 1000); RTC_HISTOGRAM_COUNTS_SPARSE_10000( uma_prefix + ".MediaBitrateReceivedInKbps", media_bitrate_kbps); - log_stream << uma_prefix << ".MediaBitrateReceivedInKbps" - << " " << media_bitrate_kbps << '\n'; + log_stream << uma_prefix << ".MediaBitrateReceivedInKbps" << " " + << media_bitrate_kbps << '\n'; } - int num_total_frames = + num_total_frames = stats.frame_counts.key_frames + stats.frame_counts.delta_frames; if (num_total_frames >= kMinRequiredSamples) { int num_key_frames = stats.frame_counts.key_frames; @@ -332,15 +332,14 @@ void ReceiveStatisticsProxy::UpdateHistograms( (num_key_frames * 1000 + num_total_frames / 2) / num_total_frames; RTC_HISTOGRAM_COUNTS_SPARSE_1000( uma_prefix + ".KeyFramesReceivedInPermille", key_frames_permille); - log_stream << uma_prefix << ".KeyFramesReceivedInPermille" - << " " << key_frames_permille << '\n'; + log_stream << uma_prefix << ".KeyFramesReceivedInPermille" << " " + << key_frames_permille << '\n'; } - absl::optional qp = stats.qp_counter.Avg(kMinRequiredSamples); + std::optional qp = stats.qp_counter.Avg(kMinRequiredSamples); if (qp) { RTC_HISTOGRAM_COUNTS_SPARSE_200(uma_prefix + ".Decoded.Vp8.Qp", *qp); - log_stream << uma_prefix << ".Decoded.Vp8.Qp" - << " " << *qp << '\n'; + log_stream << uma_prefix << ".Decoded.Vp8.Qp" << " " << *qp << '\n'; } } } @@ -409,13 +408,13 @@ void ReceiveStatisticsProxy::UpdateFramerate(int64_t now_ms) const { stats_.network_frame_rate = static_cast(framerate); } -absl::optional +std::optional ReceiveStatisticsProxy::GetCurrentEstimatedPlayoutNtpTimestampMs( int64_t now_ms) const { RTC_DCHECK_RUN_ON(&main_thread_); if (!last_estimated_playout_ntp_timestamp_ms_ || !last_estimated_playout_time_ms_) { - return absl::nullopt; + return std::nullopt; } int64_t elapsed_ms = now_ms - *last_estimated_playout_time_ms_; return *last_estimated_playout_ntp_timestamp_ms_ + elapsed_ms; @@ -457,7 +456,7 @@ VideoReceiveStreamInterface::Stats ReceiveStatisticsProxy::GetStats() const { video_quality_observer_->TotalPausesDurationMs(); stats_.total_inter_frame_delay = static_cast(video_quality_observer_->TotalFramesDurationMs()) / - rtc::kNumMillisecsPerSec; + kNumMillisecsPerSec; stats_.total_squared_inter_frame_delay = video_quality_observer_->SumSquaredFrameDurationsSec(); @@ -590,7 +589,7 @@ void ReceiveStatisticsProxy::OnCname(uint32_t ssrc, absl::string_view cname) { } void ReceiveStatisticsProxy::OnDecodedFrame(const VideoFrame& frame, - absl::optional qp, + std::optional qp, TimeDelta decode_time, VideoContentType content_type, VideoFrameType frame_type) { @@ -626,7 +625,7 @@ void ReceiveStatisticsProxy::OnDecodedFrame(const VideoFrame& frame, void ReceiveStatisticsProxy::OnDecodedFrame( const VideoFrameMetaData& frame_meta, - absl::optional qp, + std::optional qp, TimeDelta decode_time, TimeDelta processing_delay, TimeDelta assembly_time, @@ -823,6 +822,28 @@ void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms) { avg_rtt_ms_ = avg_rtt_ms; } +void ReceiveStatisticsProxy::OnCorruptionScore(double corruption_score, + VideoContentType content_type) { + worker_thread_->PostTask(SafeTask(task_safety_.flag(), [corruption_score, + content_type, this] { + RTC_DCHECK_RUN_ON(&main_thread_); + + if (!stats_.corruption_score_sum.has_value()) { + RTC_DCHECK(!stats_.corruption_score_squared_sum.has_value()); + RTC_DCHECK_EQ(stats_.corruption_score_count, 0); + stats_.corruption_score_sum = 0; + stats_.corruption_score_squared_sum = 0; + } + *stats_.corruption_score_sum += corruption_score; + *stats_.corruption_score_squared_sum += corruption_score * corruption_score; + ++stats_.corruption_score_count; + + ContentSpecificStats* content_specific_stats = + &content_specific_stats_[content_type]; + content_specific_stats->corruption_score.AddSample(corruption_score); + })); +} + void ReceiveStatisticsProxy::DecoderThreadStarting() { RTC_DCHECK_RUN_ON(&main_thread_); } @@ -849,6 +870,7 @@ void ReceiveStatisticsProxy::ContentSpecificStats::Add( frame_counts.key_frames += other.frame_counts.key_frames; frame_counts.delta_frames += other.frame_counts.delta_frames; interframe_delay_percentiles.Add(other.interframe_delay_percentiles); + corruption_score.MergeStatistics(other.corruption_score); } } // namespace internal diff --git a/video/receive_statistics_proxy.h b/video/receive_statistics_proxy.h index 8e4941f961..023f4fa5f2 100644 --- a/video/receive_statistics_proxy.h +++ b/video/receive_statistics_proxy.h @@ -11,21 +11,30 @@ #ifndef VIDEO_RECEIVE_STATISTICS_PROXY_H_ #define VIDEO_RECEIVE_STATISTICS_PROXY_H_ +#include +#include #include #include -#include -#include +#include -#include "absl/types/optional.h" +#include "absl/strings/string_view.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" -#include "api/units/timestamp.h" +#include "api/units/time_delta.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_timing.h" #include "api/video_codecs/video_decoder.h" #include "call/video_receive_stream.h" -#include "modules/include/module_common_types.h" +#include "common_video/frame_counts.h" +#include "modules/rtp_rtcp/include/rtcp_statistics.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/numerics/histogram_percentile_counter.h" #include "rtc_base/numerics/moving_max_counter.h" +#include "rtc_base/numerics/running_statistics.h" #include "rtc_base/numerics/sample_counter.h" #include "rtc_base/rate_statistics.h" #include "rtc_base/rate_tracker.h" @@ -56,7 +65,7 @@ class ReceiveStatisticsProxy : public VideoStreamBufferControllerStatsObserver, VideoReceiveStreamInterface::Stats GetStats() const; void OnDecodedFrame(const VideoFrame& frame, - absl::optional qp, + std::optional qp, TimeDelta decode_time, VideoContentType content_type, VideoFrameType frame_type); @@ -65,7 +74,7 @@ class ReceiveStatisticsProxy : public VideoStreamBufferControllerStatsObserver, // above OnDecodedFrame method, which is called back on the thread where // the actual decoding happens. void OnDecodedFrame(const VideoFrameMetaData& frame_meta, - absl::optional qp, + std::optional qp, TimeDelta decode_time, TimeDelta processing_delay, TimeDelta assembly_time, @@ -105,6 +114,9 @@ class ReceiveStatisticsProxy : public VideoStreamBufferControllerStatsObserver, // Implements RtcpCnameCallback. void OnCname(uint32_t ssrc, absl::string_view cname) override; + void OnCorruptionScore(double corruption_score, + VideoContentType content_type); + // Implements RtcpPacketTypeCounterObserver. void RtcpPacketTypesCounterUpdated( uint32_t ssrc, @@ -119,13 +131,13 @@ class ReceiveStatisticsProxy : public VideoStreamBufferControllerStatsObserver, // Produce histograms. Must be called after DecoderThreadStopped(), typically // at the end of the call. - void UpdateHistograms(absl::optional fraction_lost, + void UpdateHistograms(std::optional fraction_lost, const StreamDataCounters& rtp_stats, const StreamDataCounters* rtx_stats); private: struct QpCounters { - rtc::SampleCounter vp8; + SampleCounter vp8; }; struct ContentSpecificStats { @@ -134,21 +146,22 @@ class ReceiveStatisticsProxy : public VideoStreamBufferControllerStatsObserver, void Add(const ContentSpecificStats& other); - rtc::SampleCounter e2e_delay_counter; - rtc::SampleCounter interframe_delay_counter; + SampleCounter e2e_delay_counter; + SampleCounter interframe_delay_counter; int64_t flow_duration_ms = 0; int64_t total_media_bytes = 0; - rtc::SampleCounter received_width; - rtc::SampleCounter received_height; - rtc::SampleCounter qp_counter; + SampleCounter received_width; + SampleCounter received_height; + SampleCounter qp_counter; FrameCounts frame_counts; - rtc::HistogramPercentileCounter interframe_delay_percentiles; + HistogramPercentileCounter interframe_delay_percentiles; + webrtc_impl::RunningStatistics corruption_score; }; // Removes info about old frames and then updates the framerate. void UpdateFramerate(int64_t now_ms) const; - absl::optional GetCurrentEstimatedPlayoutNtpTimestampMs( + std::optional GetCurrentEstimatedPlayoutNtpTimestampMs( int64_t now_ms) const; Clock* const clock_; @@ -161,17 +174,17 @@ class ReceiveStatisticsProxy : public VideoStreamBufferControllerStatsObserver, const uint32_t remote_ssrc_; RateStatistics decode_fps_estimator_ RTC_GUARDED_BY(main_thread_); RateStatistics renders_fps_estimator_ RTC_GUARDED_BY(main_thread_); - rtc::RateTracker render_fps_tracker_ RTC_GUARDED_BY(main_thread_); - rtc::RateTracker render_pixel_tracker_ RTC_GUARDED_BY(main_thread_); - rtc::SampleCounter sync_offset_counter_ RTC_GUARDED_BY(main_thread_); - rtc::SampleCounter decode_time_counter_ RTC_GUARDED_BY(main_thread_); - rtc::SampleCounter jitter_delay_counter_ RTC_GUARDED_BY(main_thread_); - rtc::SampleCounter target_delay_counter_ RTC_GUARDED_BY(main_thread_); - rtc::SampleCounter current_delay_counter_ RTC_GUARDED_BY(main_thread_); - rtc::SampleCounter oneway_delay_counter_ RTC_GUARDED_BY(main_thread_); + RateTracker render_fps_tracker_ RTC_GUARDED_BY(main_thread_); + RateTracker render_pixel_tracker_ RTC_GUARDED_BY(main_thread_); + SampleCounter sync_offset_counter_ RTC_GUARDED_BY(main_thread_); + SampleCounter decode_time_counter_ RTC_GUARDED_BY(main_thread_); + SampleCounter jitter_delay_counter_ RTC_GUARDED_BY(main_thread_); + SampleCounter target_delay_counter_ RTC_GUARDED_BY(main_thread_); + SampleCounter current_delay_counter_ RTC_GUARDED_BY(main_thread_); + SampleCounter oneway_delay_counter_ RTC_GUARDED_BY(main_thread_); std::unique_ptr video_quality_observer_ RTC_GUARDED_BY(main_thread_); - mutable rtc::MovingMaxCounter interframe_delay_max_moving_ + mutable MovingMaxCounter interframe_delay_max_moving_ RTC_GUARDED_BY(main_thread_); std::map content_specific_stats_ RTC_GUARDED_BY(main_thread_); @@ -181,22 +194,22 @@ class ReceiveStatisticsProxy : public VideoStreamBufferControllerStatsObserver, mutable std::map frame_window_ RTC_GUARDED_BY(main_thread_); VideoContentType last_content_type_ RTC_GUARDED_BY(&main_thread_); VideoCodecType last_codec_type_ RTC_GUARDED_BY(main_thread_); - absl::optional first_frame_received_time_ms_ + std::optional first_frame_received_time_ms_ RTC_GUARDED_BY(main_thread_); - absl::optional first_decoded_frame_time_ms_ + std::optional first_decoded_frame_time_ms_ RTC_GUARDED_BY(main_thread_); - absl::optional last_decoded_frame_time_ms_ + std::optional last_decoded_frame_time_ms_ RTC_GUARDED_BY(main_thread_); size_t num_delayed_frames_rendered_ RTC_GUARDED_BY(main_thread_); int64_t sum_missed_render_deadline_ms_ RTC_GUARDED_BY(main_thread_); // Mutable because calling Max() on MovingMaxCounter is not const. Yet it is // called from const GetStats(). - mutable rtc::MovingMaxCounter timing_frame_info_counter_ + mutable MovingMaxCounter timing_frame_info_counter_ RTC_GUARDED_BY(main_thread_); - absl::optional num_unique_frames_ RTC_GUARDED_BY(main_thread_); - absl::optional last_estimated_playout_ntp_timestamp_ms_ + std::optional num_unique_frames_ RTC_GUARDED_BY(main_thread_); + std::optional last_estimated_playout_ntp_timestamp_ms_ RTC_GUARDED_BY(main_thread_); - absl::optional last_estimated_playout_time_ms_ + std::optional last_estimated_playout_time_ms_ RTC_GUARDED_BY(main_thread_); // The thread on which this instance is constructed and some of its main diff --git a/video/receive_statistics_proxy_unittest.cc b/video/receive_statistics_proxy_unittest.cc index a30a7e4490..565d076fdb 100644 --- a/video/receive_statistics_proxy_unittest.cc +++ b/video/receive_statistics_proxy_unittest.cc @@ -12,11 +12,11 @@ #include #include +#include #include #include #include -#include "absl/types/optional.h" #include "api/scoped_refptr.h" #include "api/units/frequency.h" #include "api/units/time_delta.h" @@ -26,6 +26,7 @@ #include "api/video/video_rotation.h" #include "rtc_base/thread.h" #include "system_wrappers/include/metrics.h" +#include "test/gmock.h" #include "test/gtest.h" #include "test/scoped_key_value_config.h" #include "test/time_controller/simulated_time_controller.h" @@ -34,6 +35,8 @@ namespace webrtc { namespace internal { namespace { +using ::testing::DoubleEq; + const TimeDelta kFreqOffsetProcessInterval = TimeDelta::Seconds(40); const uint32_t kRemoteSsrc = 456; const int kMinRequiredSamples = 200; @@ -60,7 +63,7 @@ class ReceiveStatisticsProxyTest : public ::testing::Test { return statistics_proxy_->GetStats(); } - void FlushAndUpdateHistograms(absl::optional fraction_lost, + void FlushAndUpdateHistograms(std::optional fraction_lost, const StreamDataCounters& rtp_stats, const StreamDataCounters* rtx_stats) { time_controller_.AdvanceTime(TimeDelta::Zero()); @@ -83,7 +86,7 @@ class ReceiveStatisticsProxyTest : public ::testing::Test { VideoFrame frame = VideoFrame::Builder() .set_video_frame_buffer(I420Buffer::Create(width, height)) - .set_timestamp_rtp(0) + .set_rtp_timestamp(0) .set_timestamp_ms(render_time_ms) .set_rotation(kVideoRotation_0) .build(); @@ -114,7 +117,7 @@ TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesFramesDecoded) { EXPECT_EQ(0u, statistics_proxy_->GetStats().frames_decoded); webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); for (uint32_t i = 1; i <= 3; ++i) { - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); EXPECT_EQ(i, FlushAndGetStats().frames_decoded); @@ -127,12 +130,12 @@ TEST_F(ReceiveStatisticsProxyTest, DecodedFpsIsReported) { TimeDelta::Seconds(metrics::kMinRunTimeInSeconds) * kFps; webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); for (int i = 0; i < kRequiredSamples; ++i) { - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); time_controller_.AdvanceTime(1 / kFps); } - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.DecodedFramesPerSecond")); EXPECT_METRIC_EQ(1, metrics::NumEvents("WebRTC.Video.DecodedFramesPerSecond", @@ -145,12 +148,12 @@ TEST_F(ReceiveStatisticsProxyTest, DecodedFpsIsNotReportedForTooFewSamples) { TimeDelta::Seconds(metrics::kMinRunTimeInSeconds) * kFps; webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); for (int i = 0; i < kRequiredSamples - 1; ++i) { - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); time_controller_.AdvanceTime(1 / kFps); } - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.DecodedFramesPerSecond")); } @@ -162,9 +165,9 @@ TEST_F(ReceiveStatisticsProxyTest, TimeDelta expected_total_decode_time = TimeDelta::Zero(); unsigned int expected_frames_decoded = 0; for (uint32_t i = 1; i <= 3; ++i) { - statistics_proxy_->OnDecodedFrame( - frame, absl::nullopt, TimeDelta::Millis(1), - VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Millis(1), + VideoContentType::UNSPECIFIED, + VideoFrameType::kVideoFrameKey); expected_total_decode_time += TimeDelta::Millis(1); ++expected_frames_decoded; time_controller_.AdvanceTime(TimeDelta::Zero()); @@ -199,9 +202,9 @@ TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesProcessingDelay) { frame.set_packet_infos(RtpPacketInfos(packet_infos)); for (int i = 1; i <= 3; ++i) { time_controller_.AdvanceTime(kProcessingDelay); - statistics_proxy_->OnDecodedFrame( - frame, absl::nullopt, TimeDelta::Millis(1), - VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Millis(1), + VideoContentType::UNSPECIFIED, + VideoFrameType::kVideoFrameKey); expected_total_processing_delay += i * kProcessingDelay; ++expected_frames_decoded; time_controller_.AdvanceTime(TimeDelta::Zero()); @@ -236,7 +239,7 @@ TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesAssemblyTime) { RtpPacketInfos::vector_type single_packet_frame = {RtpPacketInfo( /*ssrc=*/{}, /*csrcs=*/{}, /*rtp_timestamp=*/{}, /*receive_time=*/Now())}; frame.set_packet_infos(RtpPacketInfos(single_packet_frame)); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Millis(1), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Millis(1), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); ++expected_frames_decoded; @@ -299,7 +302,7 @@ TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesAssemblyTime) { } TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesQpSum) { - EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); + EXPECT_EQ(std::nullopt, statistics_proxy_->GetStats().qp_sum); webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); statistics_proxy_->OnDecodedFrame(frame, 3u, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, @@ -312,7 +315,7 @@ TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesQpSum) { } TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameIncreasesTotalDecodeTime) { - EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); + EXPECT_EQ(std::nullopt, statistics_proxy_->GetStats().qp_sum); webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); statistics_proxy_->OnDecodedFrame(frame, 3u, TimeDelta::Millis(4), VideoContentType::UNSPECIFIED, @@ -348,25 +351,25 @@ TEST_F(ReceiveStatisticsProxyTest, ReportsMaxInterframeDelay) { const TimeDelta kInterframeDelay2 = TimeDelta::Millis(200); const TimeDelta kInterframeDelay3 = TimeDelta::Millis(100); EXPECT_EQ(-1, statistics_proxy_->GetStats().interframe_delay_max_ms); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); EXPECT_EQ(-1, FlushAndGetStats().interframe_delay_max_ms); time_controller_.AdvanceTime(kInterframeDelay1); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameDelta); EXPECT_EQ(kInterframeDelay1.ms(), FlushAndGetStats().interframe_delay_max_ms); time_controller_.AdvanceTime(kInterframeDelay2); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameDelta); EXPECT_EQ(kInterframeDelay2.ms(), FlushAndGetStats().interframe_delay_max_ms); time_controller_.AdvanceTime(kInterframeDelay3); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameDelta); // kInterframeDelay3 is smaller than kInterframeDelay2. @@ -379,26 +382,26 @@ TEST_F(ReceiveStatisticsProxyTest, ReportInterframeDelayInWindow) { const TimeDelta kInterframeDelay2 = TimeDelta::Millis(750); const TimeDelta kInterframeDelay3 = TimeDelta::Millis(700); EXPECT_EQ(-1, statistics_proxy_->GetStats().interframe_delay_max_ms); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); EXPECT_EQ(-1, FlushAndGetStats().interframe_delay_max_ms); time_controller_.AdvanceTime(kInterframeDelay1); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameDelta); EXPECT_EQ(kInterframeDelay1.ms(), FlushAndGetStats().interframe_delay_max_ms); time_controller_.AdvanceTime(kInterframeDelay2); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameDelta); // Still first delay is the maximum EXPECT_EQ(kInterframeDelay1.ms(), FlushAndGetStats().interframe_delay_max_ms); time_controller_.AdvanceTime(kInterframeDelay3); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameDelta); // Now the first sample is out of the window, so the second is the maximum. @@ -507,24 +510,24 @@ TEST_F(ReceiveStatisticsProxyTest, ReportsTotalSquaredInterFrameDelay) { TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithoutQpQpSumWontExist) { webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); - EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + EXPECT_EQ(std::nullopt, statistics_proxy_->GetStats().qp_sum); + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); - EXPECT_EQ(absl::nullopt, FlushAndGetStats().qp_sum); + EXPECT_EQ(std::nullopt, FlushAndGetStats().qp_sum); } TEST_F(ReceiveStatisticsProxyTest, OnDecodedFrameWithoutQpResetsQpSum) { webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); - EXPECT_EQ(absl::nullopt, statistics_proxy_->GetStats().qp_sum); + EXPECT_EQ(std::nullopt, statistics_proxy_->GetStats().qp_sum); statistics_proxy_->OnDecodedFrame(frame, 3u, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); EXPECT_EQ(3u, FlushAndGetStats().qp_sum); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameDelta); - EXPECT_EQ(absl::nullopt, FlushAndGetStats().qp_sum); + EXPECT_EQ(std::nullopt, FlushAndGetStats().qp_sum); } TEST_F(ReceiveStatisticsProxyTest, OnRenderedFrameIncreasesFramesRendered) { @@ -536,6 +539,32 @@ TEST_F(ReceiveStatisticsProxyTest, OnRenderedFrameIncreasesFramesRendered) { } } +TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsOnCorruptionScore) { + EXPECT_EQ(std::nullopt, statistics_proxy_->GetStats().corruption_score_sum); + EXPECT_EQ(std::nullopt, + statistics_proxy_->GetStats().corruption_score_squared_sum); + EXPECT_EQ(0u, statistics_proxy_->GetStats().corruption_score_count); + + const std::vector corruption_scores = {0.5, 0.25, 0.80}; + const double kExpectedCorruptionScoreSum = 0.5 + 0.25 + 0.80; + const double kExpectedCorruptionScoreSquaredSum = + 0.5 * 0.5 + 0.25 * 0.25 + 0.80 * 0.80; + for (size_t i = 0; i < corruption_scores.size(); ++i) { + statistics_proxy_->OnCorruptionScore( + /*corruption_score=*/corruption_scores[i], + VideoContentType::UNSPECIFIED); + } + + time_controller_.AdvanceTime(TimeDelta::Zero()); + + VideoReceiveStreamInterface::Stats stats = statistics_proxy_->GetStats(); + EXPECT_THAT(kExpectedCorruptionScoreSum, + DoubleEq(*stats.corruption_score_sum)); + EXPECT_THAT(kExpectedCorruptionScoreSquaredSum, + DoubleEq(*stats.corruption_score_squared_sum)); + EXPECT_EQ(3u, stats.corruption_score_count); +} + TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsSsrc) { EXPECT_EQ(kRemoteSsrc, statistics_proxy_->GetStats().ssrc); } @@ -549,8 +578,8 @@ TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsIncomingPayloadType) { TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsDecoderInfo) { auto init_stats = statistics_proxy_->GetStats(); - EXPECT_EQ(init_stats.decoder_implementation_name, absl::nullopt); - EXPECT_EQ(init_stats.power_efficient_decoder, absl::nullopt); + EXPECT_EQ(init_stats.decoder_implementation_name, std::nullopt); + EXPECT_EQ(init_stats.power_efficient_decoder, std::nullopt); const VideoDecoder::DecoderInfo decoder_info{ .implementation_name = "decoderName", .is_hardware_accelerated = true}; @@ -655,12 +684,12 @@ TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsFrameCounts) { const int kDeltaFrames = 22; webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); for (int i = 0; i < kKeyFrames; i++) { - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); } for (int i = 0; i < kDeltaFrames; i++) { - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameDelta); } @@ -688,7 +717,7 @@ TEST_F(ReceiveStatisticsProxyTest, ReportsLongestTimingFrameInfo) { const int64_t kLongEndToEndDelay = 100; const uint32_t kExpectedRtpTimestamp = 2; TimingFrameInfo info; - absl::optional result; + std::optional result; info.rtp_timestamp = kExpectedRtpTimestamp - 1; info.capture_time_ms = 0; info.decode_finish_ms = kShortEndToEndDelay; @@ -713,7 +742,7 @@ TEST_F(ReceiveStatisticsProxyTest, RespectsReportingIntervalForTimingFrames) { const uint32_t kExpectedRtpTimestamp = 2; const TimeDelta kShortDelay = TimeDelta::Seconds(1); const TimeDelta kLongDelay = TimeDelta::Seconds(10); - absl::optional result; + std::optional result; info.rtp_timestamp = kExpectedRtpTimestamp; info.capture_time_ms = 0; info.decode_finish_ms = kShortEndToEndDelay; @@ -735,11 +764,11 @@ TEST_F(ReceiveStatisticsProxyTest, LifetimeHistogramIsUpdated) { webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); statistics_proxy_->OnCompleteFrame(true, 1000, VideoContentType::UNSPECIFIED); statistics_proxy_->OnDecodedFrame( - frame, absl::nullopt, TimeDelta::Millis(1000), + frame, std::nullopt, TimeDelta::Millis(1000), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); FlushAndGetStats(); - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ( 1, metrics::NumSamples("WebRTC.Video.ReceiveStreamLifetimeInSeconds")); @@ -753,7 +782,7 @@ TEST_F(ReceiveStatisticsProxyTest, const TimeDelta kLifetime = TimeDelta::Seconds(3); time_controller_.AdvanceTime(kLifetime); // No frames received. - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ( 0, metrics::NumSamples("WebRTC.Video.ReceiveStreamLifetimeInSeconds")); @@ -781,7 +810,7 @@ TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsPlayoutTimestamp) { const int64_t kVideoNtpMs = 21; const int64_t kSyncOffsetMs = 22; const double kFreqKhz = 90.0; - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms); statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz); EXPECT_EQ(kVideoNtpMs, FlushAndGetStats().estimated_playout_ntp_timestamp_ms); @@ -811,7 +840,7 @@ TEST_F(ReceiveStatisticsProxyTest, AvSyncOffsetHistogramIsUpdated) { statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz); } - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.AVSyncOffsetInMs")); EXPECT_METRIC_EQ( 1, metrics::NumEvents("WebRTC.Video.AVSyncOffsetInMs", kSyncOffsetMs)); @@ -835,7 +864,7 @@ TEST_F(ReceiveStatisticsProxyTest, RtpToNtpFrequencyOffsetHistogramIsUpdated) { time_controller_.AdvanceTime(kFreqOffsetProcessInterval); //) Process interval passed, max diff: 4. statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz); - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); // Average reported: (2 + 4) / 2 = 3. EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.RtpToNtpFreqOffsetInKhz")); @@ -849,7 +878,7 @@ TEST_F(ReceiveStatisticsProxyTest, Vp8QpHistogramIsUpdated) { for (int i = 0; i < kMinRequiredSamples; ++i) statistics_proxy_->OnPreDecode(kVideoCodecVP8, kQp); - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.Decoded.Vp8.Qp")); EXPECT_METRIC_EQ(1, metrics::NumEvents("WebRTC.Video.Decoded.Vp8.Qp", kQp)); } @@ -860,7 +889,7 @@ TEST_F(ReceiveStatisticsProxyTest, Vp8QpHistogramIsNotUpdatedForTooFewSamples) { for (int i = 0; i < kMinRequiredSamples - 1; ++i) statistics_proxy_->OnPreDecode(kVideoCodecVP8, kQp); - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.Decoded.Vp8.Qp")); } @@ -869,7 +898,7 @@ TEST_F(ReceiveStatisticsProxyTest, Vp8QpHistogramIsNotUpdatedIfNoQpValue) { for (int i = 0; i < kMinRequiredSamples; ++i) statistics_proxy_->OnPreDecode(kVideoCodecVP8, -1); - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.Decoded.Vp8.Qp")); } @@ -884,7 +913,7 @@ TEST_F(ReceiveStatisticsProxyTest, statistics_proxy_->OnCompleteFrame(kIsKeyFrame, kFrameSizeBytes, VideoContentType::UNSPECIFIED); statistics_proxy_->OnDecodedFrame( - frame, absl::nullopt, TimeDelta::Millis(1000), + frame, std::nullopt, TimeDelta::Millis(1000), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameDelta); } FlushAndGetStats(); @@ -893,7 +922,7 @@ TEST_F(ReceiveStatisticsProxyTest, EXPECT_EQ(kMinRequiredSamples - 1, statistics_proxy_->GetStats().frame_counts.delta_frames); - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ( 0, metrics::NumSamples("WebRTC.Video.KeyFramesReceivedInPermille")); @@ -909,7 +938,7 @@ TEST_F(ReceiveStatisticsProxyTest, statistics_proxy_->OnCompleteFrame(kIsKeyFrame, kFrameSizeBytes, VideoContentType::UNSPECIFIED); statistics_proxy_->OnDecodedFrame( - frame, absl::nullopt, TimeDelta::Millis(1000), + frame, std::nullopt, TimeDelta::Millis(1000), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameDelta); } FlushAndGetStats(); @@ -918,7 +947,7 @@ TEST_F(ReceiveStatisticsProxyTest, EXPECT_EQ(kMinRequiredSamples, statistics_proxy_->GetStats().frame_counts.delta_frames); - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ( 1, metrics::NumSamples("WebRTC.Video.KeyFramesReceivedInPermille")); @@ -934,14 +963,14 @@ TEST_F(ReceiveStatisticsProxyTest, KeyFrameHistogramIsUpdated) { statistics_proxy_->OnCompleteFrame(true, kFrameSizeBytes, VideoContentType::UNSPECIFIED); statistics_proxy_->OnDecodedFrame( - frame, absl::nullopt, TimeDelta::Millis(1000), + frame, std::nullopt, TimeDelta::Millis(1000), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); } for (int i = 0; i < kMinRequiredSamples; ++i) { statistics_proxy_->OnCompleteFrame(false, kFrameSizeBytes, VideoContentType::UNSPECIFIED); statistics_proxy_->OnDecodedFrame( - frame, absl::nullopt, TimeDelta::Millis(1000), + frame, std::nullopt, TimeDelta::Millis(1000), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameDelta); } FlushAndGetStats(); @@ -951,7 +980,7 @@ TEST_F(ReceiveStatisticsProxyTest, KeyFrameHistogramIsUpdated) { EXPECT_EQ(kMinRequiredSamples, statistics_proxy_->GetStats().frame_counts.delta_frames); - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ( 1, metrics::NumSamples("WebRTC.Video.KeyFramesReceivedInPermille")); @@ -973,7 +1002,7 @@ TEST_F(ReceiveStatisticsProxyTest, TimingHistogramsNotUpdatedForTooFewSamples) { kMinPlayoutDelayMs, kRenderDelayMs); } - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.DecodeTimeInMs")); EXPECT_METRIC_EQ(0, @@ -997,7 +1026,7 @@ TEST_F(ReceiveStatisticsProxyTest, TimingHistogramsAreUpdated) { kMinPlayoutDelayMs, kRenderDelayMs); } - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.JitterBufferDelayInMs")); EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.TargetDelayInMs")); @@ -1023,7 +1052,7 @@ TEST_F(ReceiveStatisticsProxyTest, DoesNotReportStaleFramerates) { // i.e. bad frame.set_ntp_time_ms( time_controller_.GetClock()->CurrentNtpInMilliseconds()); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); statistics_proxy_->OnRenderedFrame(MetaData(frame)); @@ -1061,7 +1090,7 @@ TEST_F(ReceiveStatisticsProxyTest, statistics_proxy_->OnRenderedFrame(MetaData(CreateFrame(kWidth, kHeight))); } - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.ReceivedWidthInPixels")); @@ -1078,7 +1107,7 @@ TEST_F(ReceiveStatisticsProxyTest, ReceivedFrameHistogramsAreUpdated) { statistics_proxy_->OnRenderedFrame(MetaData(CreateFrame(kWidth, kHeight))); } - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.ReceivedWidthInPixels")); @@ -1096,7 +1125,7 @@ TEST_F(ReceiveStatisticsProxyTest, ReceivedFrameHistogramsAreUpdated) { TEST_F(ReceiveStatisticsProxyTest, ZeroDelayReportedIfFrameNotDelayed) { webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); @@ -1107,7 +1136,7 @@ TEST_F(ReceiveStatisticsProxyTest, ZeroDelayReportedIfFrameNotDelayed) { // Min run time has passed. time_controller_.AdvanceTime( TimeDelta::Seconds((metrics::kMinRunTimeInSeconds))); - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); EXPECT_METRIC_EQ( @@ -1119,7 +1148,7 @@ TEST_F(ReceiveStatisticsProxyTest, ZeroDelayReportedIfFrameNotDelayed) { TEST_F(ReceiveStatisticsProxyTest, DelayedFrameHistogramsAreNotUpdatedIfMinRuntimeHasNotPassed) { webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); @@ -1130,7 +1159,7 @@ TEST_F(ReceiveStatisticsProxyTest, // Min run time has not passed. time_controller_.AdvanceTime( TimeDelta::Seconds(metrics::kMinRunTimeInSeconds) - TimeDelta::Millis(1)); - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); @@ -1141,14 +1170,14 @@ TEST_F(ReceiveStatisticsProxyTest, TEST_F(ReceiveStatisticsProxyTest, DelayedFramesHistogramsAreNotUpdatedIfNoRenderedFrames) { webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); // Min run time has passed. No rendered frames. time_controller_.AdvanceTime( TimeDelta::Seconds((metrics::kMinRunTimeInSeconds))); - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); @@ -1158,7 +1187,7 @@ TEST_F(ReceiveStatisticsProxyTest, TEST_F(ReceiveStatisticsProxyTest, DelayReportedIfFrameIsDelayed) { webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); @@ -1169,7 +1198,7 @@ TEST_F(ReceiveStatisticsProxyTest, DelayReportedIfFrameIsDelayed) { // Min run time has passed. time_controller_.AdvanceTime( TimeDelta::Seconds(metrics::kMinRunTimeInSeconds)); - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); EXPECT_METRIC_EQ( @@ -1183,7 +1212,7 @@ TEST_F(ReceiveStatisticsProxyTest, DelayReportedIfFrameIsDelayed) { TEST_F(ReceiveStatisticsProxyTest, AverageDelayOfDelayedFramesIsReported) { webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), VideoContentType::UNSPECIFIED, VideoFrameType::kVideoFrameKey); @@ -1202,7 +1231,7 @@ TEST_F(ReceiveStatisticsProxyTest, AverageDelayOfDelayedFramesIsReported) { // Min run time has passed. time_controller_.AdvanceTime( TimeDelta::Seconds(metrics::kMinRunTimeInSeconds)); - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.DelayedFramesToRenderer")); EXPECT_METRIC_EQ( @@ -1225,7 +1254,7 @@ TEST_F(ReceiveStatisticsProxyTest, RtcpPacketTypeCounter counter; statistics_proxy_->RtcpPacketTypesCounterUpdated(kRemoteSsrc, counter); - statistics_proxy_->UpdateHistograms(absl::nullopt, data_counters, nullptr); + statistics_proxy_->UpdateHistograms(std::nullopt, data_counters, nullptr); EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.FirPacketsSentPerMinute")); EXPECT_METRIC_EQ(0, @@ -1250,7 +1279,7 @@ TEST_F(ReceiveStatisticsProxyTest, RtcpHistogramsAreUpdated) { counter.nack_packets = kNackPackets; statistics_proxy_->RtcpPacketTypesCounterUpdated(kRemoteSsrc, counter); - statistics_proxy_->UpdateHistograms(absl::nullopt, data_counters, nullptr); + statistics_proxy_->UpdateHistograms(std::nullopt, data_counters, nullptr); EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.FirPacketsSentPerMinute")); EXPECT_METRIC_EQ(1, @@ -1337,18 +1366,18 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, InterFrameDelaysAreReported) { webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); for (int i = 0; i < kMinRequiredSamples; ++i) { - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameKey); time_controller_.AdvanceTime(kInterFrameDelay); } // One extra with double the interval. time_controller_.AdvanceTime(kInterFrameDelay); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameDelta); - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); const TimeDelta kExpectedInterFrame = (kInterFrameDelay * (kMinRequiredSamples - 1) + kInterFrameDelay * 2) / kMinRequiredSamples; @@ -1375,24 +1404,24 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, for (int i = 0; i <= kMinRequiredSamples - kLastFivePercentsSamples; ++i) { time_controller_.AdvanceTime(kInterFrameDelay); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameKey); } // Last 5% of intervals are double in size. for (int i = 0; i < kLastFivePercentsSamples; ++i) { time_controller_.AdvanceTime(2 * kInterFrameDelay); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameKey); } // Final sample is outlier and 10 times as big. time_controller_.AdvanceTime(10 * kInterFrameDelay); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameKey); - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); const TimeDelta kExpectedInterFrame = kInterFrameDelay * 2; if (videocontenttypehelpers::IsScreenshare(content_type_)) { EXPECT_METRIC_EQ( @@ -1412,7 +1441,7 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); for (int i = 0; i < kMinRequiredSamples; ++i) { - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameKey); time_controller_.AdvanceTime(kInterFrameDelay); @@ -1420,7 +1449,7 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, // `kMinRequiredSamples` samples, and thereby intervals, is required. That // means we're one frame short of having a valid data set. - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.InterframeDelayInMs")); EXPECT_METRIC_EQ(0, @@ -1436,7 +1465,7 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, MaxInterFrameDelayOnlyWithPause) { webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); for (int i = 0; i <= kMinRequiredSamples; ++i) { - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameKey); time_controller_.AdvanceTime(kInterFrameDelay); @@ -1448,15 +1477,15 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, MaxInterFrameDelayOnlyWithPause) { time_controller_.AdvanceTime(TimeDelta::Seconds(5)); // Insert two more frames. The interval during the pause should be // disregarded in the stats. - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameKey); time_controller_.AdvanceTime(kInterFrameDelay); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameDelta); - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); if (videocontenttypehelpers::IsScreenshare(content_type_)) { EXPECT_METRIC_EQ( 1, metrics::NumSamples("WebRTC.Video.Screenshare.InterframeDelayInMs")); @@ -1480,6 +1509,42 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, MaxInterFrameDelayOnlyWithPause) { } } +TEST_P(ReceiveStatisticsProxyTestWithContent, CorruptionScore) { + const std::vector corruption_scores = {0.5, 0.25, 0.80}; + const int kCorruptionLikelihoodPermille = + static_cast((0.5 + 0.25 + 0.80) / 3 * 1000); + for (size_t i = 0; i < corruption_scores.size(); ++i) { + statistics_proxy_->OnCorruptionScore( + /*corruption_score=*/corruption_scores[i], content_type_); + } + + FlushAndGetStats(); + EXPECT_EQ(3u, statistics_proxy_->GetStats().corruption_score_count); + + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), + nullptr); + if (videocontenttypehelpers::IsScreenshare(content_type_)) { + EXPECT_METRIC_EQ( + 1, metrics::NumSamples( + "WebRTC.Video.Screenshare.CorruptionLikelihoodPermille")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents( + "WebRTC.Video.Screenshare.CorruptionLikelihoodPermille", + kCorruptionLikelihoodPermille)); + EXPECT_METRIC_EQ( + 0, metrics::NumSamples("WebRTC.Video.CorruptionLikelihoodPermille")); + } else { + EXPECT_METRIC_EQ( + 1, metrics::NumSamples("WebRTC.Video.CorruptionLikelihoodPermille")); + EXPECT_METRIC_EQ( + 1, metrics::NumEvents("WebRTC.Video.CorruptionLikelihoodPermille", + kCorruptionLikelihoodPermille)); + EXPECT_METRIC_EQ( + 0, metrics::NumSamples( + "WebRTC.Video.Screenshare.CorruptionLikelihoodPermille")); + } +} + TEST_P(ReceiveStatisticsProxyTestWithContent, FreezesAreReported) { const TimeDelta kInterFrameDelay = TimeDelta::Millis(33); const TimeDelta kFreezeDelay = TimeDelta::Millis(200); @@ -1490,7 +1555,7 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, FreezesAreReported) { for (int i = 0; i < kMinRequiredSamples; ++i) { VideoFrameMetaData meta = MetaData(frame); statistics_proxy_->OnDecodedFrame( - meta, absl::nullopt, TimeDelta::Zero(), TimeDelta::Zero(), + meta, std::nullopt, TimeDelta::Zero(), TimeDelta::Zero(), TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameKey); statistics_proxy_->OnRenderedFrame(meta); time_controller_.AdvanceTime(kInterFrameDelay); @@ -1499,11 +1564,11 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, FreezesAreReported) { time_controller_.AdvanceTime(kFreezeDelay); VideoFrameMetaData meta = MetaData(frame); statistics_proxy_->OnDecodedFrame( - meta, absl::nullopt, TimeDelta::Zero(), TimeDelta::Zero(), + meta, std::nullopt, TimeDelta::Zero(), TimeDelta::Zero(), TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameDelta); statistics_proxy_->OnRenderedFrame(meta); - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); const TimeDelta kExpectedTimeBetweenFreezes = kInterFrameDelay * (kMinRequiredSamples - 1); const int kExpectedNumberFreezesPerMinute = 60 / kCallDuration.seconds(); @@ -1538,7 +1603,7 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, HarmonicFrameRateIsReported) { for (int i = 0; i < kMinRequiredSamples; ++i) { time_controller_.AdvanceTime(kFrameDuration); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameKey); statistics_proxy_->OnRenderedFrame(MetaData(frame)); @@ -1547,7 +1612,7 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, HarmonicFrameRateIsReported) { // Freezes and pauses should be included into harmonic frame rate. // Add freeze. time_controller_.AdvanceTime(kFreezeDuration); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameDelta); statistics_proxy_->OnRenderedFrame(MetaData(frame)); @@ -1555,12 +1620,12 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, HarmonicFrameRateIsReported) { // Add pause. time_controller_.AdvanceTime(kPauseDuration); statistics_proxy_->OnStreamInactive(); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameDelta); statistics_proxy_->OnRenderedFrame(MetaData(frame)); - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); double kSumSquaredFrameDurationSecs = (kMinRequiredSamples - 1) * (kFrameDuration.seconds() * kFrameDuration.seconds()); @@ -1588,7 +1653,7 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, PausesAreIgnored) { for (int i = 0; i <= kMinRequiredSamples; ++i) { VideoFrameMetaData meta = MetaData(frame); statistics_proxy_->OnDecodedFrame( - meta, absl::nullopt, TimeDelta::Zero(), TimeDelta::Zero(), + meta, std::nullopt, TimeDelta::Zero(), TimeDelta::Zero(), TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameKey); statistics_proxy_->OnRenderedFrame(meta); time_controller_.AdvanceTime(kInterFrameDelay); @@ -1600,13 +1665,13 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, PausesAreIgnored) { for (int i = 0; i <= kMinRequiredSamples * 3; ++i) { VideoFrameMetaData meta = MetaData(frame); statistics_proxy_->OnDecodedFrame( - meta, absl::nullopt, TimeDelta::Zero(), TimeDelta::Zero(), + meta, std::nullopt, TimeDelta::Zero(), TimeDelta::Zero(), TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameDelta); statistics_proxy_->OnRenderedFrame(meta); time_controller_.AdvanceTime(kInterFrameDelay); } - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); // Average of two playback intervals. const TimeDelta kExpectedTimeBetweenFreezes = kInterFrameDelay * kMinRequiredSamples * 2; @@ -1631,19 +1696,19 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, ManyPausesAtTheBeginning) { webrtc::VideoFrame frame = CreateFrame(kWidth, kHeight); for (int i = 0; i <= kMinRequiredSamples; ++i) { - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameKey); time_controller_.AdvanceTime(kInterFrameDelay); statistics_proxy_->OnStreamInactive(); time_controller_.AdvanceTime(kPauseDuration); - statistics_proxy_->OnDecodedFrame(frame, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameDelta); time_controller_.AdvanceTime(kInterFrameDelay); } - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); // No freezes should be detected, as all long inter-frame delays were // pauses. @@ -1665,7 +1730,7 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, TimeInHdReported) { for (int i = 0; i < kMinRequiredSamples; ++i) { VideoFrameMetaData meta = MetaData(frame_hd); statistics_proxy_->OnDecodedFrame( - meta, absl::nullopt, TimeDelta::Zero(), TimeDelta::Zero(), + meta, std::nullopt, TimeDelta::Zero(), TimeDelta::Zero(), TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameKey); statistics_proxy_->OnRenderedFrame(meta); time_controller_.AdvanceTime(kInterFrameDelay); @@ -1674,7 +1739,7 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, TimeInHdReported) { for (int i = 0; i < 2 * kMinRequiredSamples; ++i) { VideoFrameMetaData meta = MetaData(frame_sd); statistics_proxy_->OnDecodedFrame( - meta, absl::nullopt, TimeDelta::Zero(), TimeDelta::Zero(), + meta, std::nullopt, TimeDelta::Zero(), TimeDelta::Zero(), TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameKey); statistics_proxy_->OnRenderedFrame(meta); time_controller_.AdvanceTime(kInterFrameDelay); @@ -1682,7 +1747,7 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, TimeInHdReported) { // Extra last frame. statistics_proxy_->OnRenderedFrame(MetaData(frame_sd)); - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); const int kExpectedTimeInHdPercents = 33; if (videocontenttypehelpers::IsScreenshare(content_type_)) { @@ -1725,7 +1790,7 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, TimeInBlockyVideoReported) { VideoFrameType::kVideoFrameKey); statistics_proxy_->OnRenderedFrame(MetaData(frame)); - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); const int kExpectedTimeInHdPercents = 66; if (videocontenttypehelpers::IsScreenshare(content_type_)) { EXPECT_METRIC_EQ( @@ -1748,7 +1813,7 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, DownscalesReported) { webrtc::VideoFrame frame_ld = CreateFrame(320, 180); // Call once to pass content type. - statistics_proxy_->OnDecodedFrame(frame_hd, absl::nullopt, TimeDelta::Zero(), + statistics_proxy_->OnDecodedFrame(frame_hd, std::nullopt, TimeDelta::Zero(), content_type_, VideoFrameType::kVideoFrameKey); @@ -1761,7 +1826,7 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, DownscalesReported) { // Downscale. statistics_proxy_->OnRenderedFrame(MetaData(frame_ld)); time_controller_.AdvanceTime(kInterFrameDelay); - statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(), + statistics_proxy_->UpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); const int kExpectedDownscales = 30; // 2 per 4 seconds = 30 per minute. if (!videocontenttypehelpers::IsScreenshare(content_type_)) { @@ -1783,7 +1848,7 @@ TEST_P(ReceiveStatisticsProxyTestWithContent, DecodeTimeReported) { VideoFrameType::kVideoFrameKey); time_controller_.AdvanceTime(kInterFrameDelay); } - FlushAndUpdateHistograms(absl::nullopt, StreamDataCounters(), nullptr); + FlushAndUpdateHistograms(std::nullopt, StreamDataCounters(), nullptr); EXPECT_METRIC_EQ( 1, metrics::NumEvents("WebRTC.Video.DecodeTimeInMs", kDecodeTime.ms())); } diff --git a/video/render/BUILD.gn b/video/render/BUILD.gn index ff721dc61c..6b235838fd 100644 --- a/video/render/BUILD.gn +++ b/video/render/BUILD.gn @@ -26,10 +26,7 @@ rtc_library("incoming_video_stream") { "../../rtc_base:event_tracer", "../../rtc_base:macromagic", "../../rtc_base:race_checker", - "../../rtc_base:rtc_task_queue", ] - - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("video_render_frames") { @@ -47,5 +44,4 @@ rtc_library("video_render_frames") { "../../rtc_base:timeutils", "../../system_wrappers:metrics", ] - absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } diff --git a/video/render/incoming_video_stream.cc b/video/render/incoming_video_stream.cc index e740c47bd0..8d4e1ea5f2 100644 --- a/video/render/incoming_video_stream.cc +++ b/video/render/incoming_video_stream.cc @@ -11,9 +11,9 @@ #include "video/render/incoming_video_stream.h" #include +#include #include -#include "absl/types/optional.h" #include "api/units/time_delta.h" #include "rtc_base/checks.h" #include "rtc_base/trace_event.h" @@ -24,7 +24,7 @@ namespace webrtc { IncomingVideoStream::IncomingVideoStream( TaskQueueFactory* task_queue_factory, int32_t delay_ms, - rtc::VideoSinkInterface* callback) + VideoSinkInterface* callback) : render_buffers_(delay_ms), callback_(callback), incoming_render_queue_(task_queue_factory->CreateTaskQueue( @@ -33,17 +33,23 @@ IncomingVideoStream::IncomingVideoStream( IncomingVideoStream::~IncomingVideoStream() { RTC_DCHECK(main_thread_checker_.IsCurrent()); + // The queue must be destroyed before its pointer is invalidated to avoid race + // between destructor and posting task to the task queue from itself. + // std::unique_ptr destructor does the same two operations in reverse order as + // it doesn't expect member would be used after its destruction has started. + incoming_render_queue_.get_deleter()(incoming_render_queue_.get()); + incoming_render_queue_.release(); } void IncomingVideoStream::OnFrame(const VideoFrame& video_frame) { TRACE_EVENT0("webrtc", "IncomingVideoStream::OnFrame"); RTC_CHECK_RUNS_SERIALIZED(&decoder_race_checker_); - RTC_DCHECK(!incoming_render_queue_.IsCurrent()); + RTC_DCHECK(!incoming_render_queue_->IsCurrent()); // TODO(srte): Using video_frame = std::move(video_frame) would move the frame // into the lambda instead of copying it, but it doesn't work unless we change // OnFrame to take its frame argument by value instead of const reference. - incoming_render_queue_.PostTask([this, video_frame = video_frame]() mutable { - RTC_DCHECK_RUN_ON(&incoming_render_queue_); + incoming_render_queue_->PostTask([this, video_frame = video_frame]() mutable { + RTC_DCHECK_RUN_ON(incoming_render_queue_.get()); if (render_buffers_.AddFrame(std::move(video_frame)) == 1) Dequeue(); }); @@ -51,14 +57,14 @@ void IncomingVideoStream::OnFrame(const VideoFrame& video_frame) { void IncomingVideoStream::Dequeue() { TRACE_EVENT0("webrtc", "IncomingVideoStream::Dequeue"); - RTC_DCHECK_RUN_ON(&incoming_render_queue_); - absl::optional frame_to_render = render_buffers_.FrameToRender(); + RTC_DCHECK_RUN_ON(incoming_render_queue_.get()); + std::optional frame_to_render = render_buffers_.FrameToRender(); if (frame_to_render) callback_->OnFrame(*frame_to_render); if (render_buffers_.HasPendingFrames()) { uint32_t wait_time = render_buffers_.TimeToNextFrameRelease(); - incoming_render_queue_.PostDelayedHighPrecisionTask( + incoming_render_queue_->PostDelayedHighPrecisionTask( [this]() { Dequeue(); }, TimeDelta::Millis(wait_time)); } } diff --git a/video/render/incoming_video_stream.h b/video/render/incoming_video_stream.h index 4873ae7dcb..c61e4dbe4f 100644 --- a/video/render/incoming_video_stream.h +++ b/video/render/incoming_video_stream.h @@ -13,22 +13,24 @@ #include +#include + #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "rtc_base/race_checker.h" -#include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" #include "video/render/video_render_frames.h" namespace webrtc { -class IncomingVideoStream : public rtc::VideoSinkInterface { +class IncomingVideoStream : public VideoSinkInterface { public: IncomingVideoStream(TaskQueueFactory* task_queue_factory, int32_t delay_ms, - rtc::VideoSinkInterface* callback); + VideoSinkInterface* callback); ~IncomingVideoStream() override; private: @@ -36,11 +38,11 @@ class IncomingVideoStream : public rtc::VideoSinkInterface { void Dequeue(); SequenceChecker main_thread_checker_; - rtc::RaceChecker decoder_race_checker_; + RaceChecker decoder_race_checker_; - VideoRenderFrames render_buffers_ RTC_GUARDED_BY(&incoming_render_queue_); - rtc::VideoSinkInterface* const callback_; - rtc::TaskQueue incoming_render_queue_; + VideoRenderFrames render_buffers_ RTC_GUARDED_BY(incoming_render_queue_); + VideoSinkInterface* const callback_; + std::unique_ptr incoming_render_queue_; }; } // namespace webrtc diff --git a/video/render/video_render_frames.cc b/video/render/video_render_frames.cc index ea1362abbb..d26a050150 100644 --- a/video/render/video_render_frames.cc +++ b/video/render/video_render_frames.cc @@ -49,20 +49,21 @@ VideoRenderFrames::~VideoRenderFrames() { } int32_t VideoRenderFrames::AddFrame(VideoFrame&& new_frame) { - const int64_t time_now = rtc::TimeMillis(); + const int64_t time_now = TimeMillis(); // Drop old frames only when there are other frames in the queue, otherwise, a // really slow system never renders any frames. if (!incoming_frames_.empty() && new_frame.render_time_ms() + kOldRenderTimestampMS < time_now) { - RTC_LOG(LS_WARNING) << "Too old frame, timestamp=" << new_frame.timestamp(); + RTC_LOG(LS_WARNING) << "Too old frame, timestamp=" + << new_frame.rtp_timestamp(); ++frames_dropped_; return -1; } if (new_frame.render_time_ms() > time_now + kFutureRenderTimestampMS) { RTC_LOG(LS_WARNING) << "Frame too long into the future, timestamp=" - << new_frame.timestamp(); + << new_frame.rtp_timestamp(); ++frames_dropped_; return -1; } @@ -87,8 +88,8 @@ int32_t VideoRenderFrames::AddFrame(VideoFrame&& new_frame) { return static_cast(incoming_frames_.size()); } -absl::optional VideoRenderFrames::FrameToRender() { - absl::optional render_frame; +std::optional VideoRenderFrames::FrameToRender() { + std::optional render_frame; // Get the newest frame that can be released for rendering. while (!incoming_frames_.empty() && TimeToNextFrameRelease() <= 0) { if (render_frame) { @@ -105,7 +106,7 @@ uint32_t VideoRenderFrames::TimeToNextFrameRelease() { return kEventMaxWaitTimeMs; } const int64_t time_to_release = incoming_frames_.front().render_time_ms() - - render_delay_ms_ - rtc::TimeMillis(); + render_delay_ms_ - TimeMillis(); return time_to_release < 0 ? 0u : static_cast(time_to_release); } diff --git a/video/render/video_render_frames.h b/video/render/video_render_frames.h index 7f48eae496..c69f9dfa7d 100644 --- a/video/render/video_render_frames.h +++ b/video/render/video_render_frames.h @@ -15,8 +15,8 @@ #include #include +#include -#include "absl/types/optional.h" #include "api/video/video_frame.h" namespace webrtc { @@ -32,7 +32,7 @@ class VideoRenderFrames { int32_t AddFrame(VideoFrame&& new_frame); // Get a frame for rendering, or false if it's not time to render. - absl::optional FrameToRender(); + std::optional FrameToRender(); // Returns the number of ms to next frame to render uint32_t TimeToNextFrameRelease(); diff --git a/video/rtp_streams_synchronizer2.cc b/video/rtp_streams_synchronizer2.cc index 0fbb3916cb..5fc75e9f54 100644 --- a/video/rtp_streams_synchronizer2.cc +++ b/video/rtp_streams_synchronizer2.cc @@ -10,7 +10,8 @@ #include "video/rtp_streams_synchronizer2.h" -#include "absl/types/optional.h" +#include + #include "call/syncable.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -41,7 +42,7 @@ RtpStreamsSynchronizer::RtpStreamsSynchronizer(TaskQueueBase* main_queue, Syncable* syncable_video) : task_queue_(main_queue), syncable_video_(syncable_video), - last_stats_log_ms_(rtc::TimeMillis()) { + last_stats_log_ms_(TimeMillis()) { RTC_DCHECK(syncable_video); } @@ -86,7 +87,7 @@ void RtpStreamsSynchronizer::UpdateDelay() { RTC_DCHECK(sync_.get()); bool log_stats = false; - const int64_t now_ms = rtc::TimeMillis(); + const int64_t now_ms = TimeMillis(); if (now_ms - last_stats_log_ms_ > kStatsLogIntervalMs) { last_stats_log_ms_ = now_ms; log_stats = true; @@ -94,7 +95,7 @@ void RtpStreamsSynchronizer::UpdateDelay() { int64_t last_audio_receive_time_ms = audio_measurement_.latest_receive_time_ms; - absl::optional audio_info = syncable_audio_->GetInfo(); + std::optional audio_info = syncable_audio_->GetInfo(); if (!audio_info || !UpdateMeasurements(&audio_measurement_, *audio_info)) { return; } @@ -105,7 +106,7 @@ void RtpStreamsSynchronizer::UpdateDelay() { } int64_t last_video_receive_ms = video_measurement_.latest_receive_time_ms; - absl::optional video_info = syncable_video_->GetInfo(); + std::optional video_info = syncable_video_->GetInfo(); if (!video_info || !UpdateMeasurements(&video_measurement_, *video_info)) { return; } @@ -201,7 +202,7 @@ bool RtpStreamsSynchronizer::GetStreamSyncOffsetInMs( int64_t latest_video_ntp_ms = latest_video_ntp.ToMs(); // Current audio ntp. - int64_t now_ms = rtc::TimeMillis(); + int64_t now_ms = TimeMillis(); latest_audio_ntp_ms += (now_ms - time_ms); // Remove video playout delay. diff --git a/video/rtp_streams_synchronizer2.h b/video/rtp_streams_synchronizer2.h index 7042b1bd9a..0503abe726 100644 --- a/video/rtp_streams_synchronizer2.h +++ b/video/rtp_streams_synchronizer2.h @@ -52,7 +52,7 @@ class RtpStreamsSynchronizer { // Used to check if we're running on the main thread/task queue. // The reason we currently don't use RTC_DCHECK_RUN_ON(task_queue_) is because - // we might be running on an rtc::Thread implementation of TaskQueue, which + // we might be running on an webrtc::Thread implementation of TaskQueue, which // does not consistently set itself as the active TaskQueue. // Instead, we rely on a SequenceChecker for now. RTC_NO_UNIQUE_ADDRESS SequenceChecker main_checker_; diff --git a/video/rtp_video_stream_receiver2.cc b/video/rtp_video_stream_receiver2.cc index 70c11e5868..bdd2cc22c6 100644 --- a/video/rtp_video_stream_receiver2.cc +++ b/video/rtp_video_stream_receiver2.cc @@ -11,41 +11,88 @@ #include "video/rtp_video_stream_receiver2.h" #include +#include +#include +#include #include #include +#include +#include #include +#include #include #include "absl/algorithm/container.h" -#include "absl/memory/memory.h" -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" +#include "api/rtp_headers.h" +#include "api/rtp_packet_info.h" +#include "api/rtp_packet_infos.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/rtp/corruption_detection_message.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_constants.h" #include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "api/video/video_timing.h" +#include "call/rtp_config.h" +#include "call/rtp_packet_sink_interface.h" +#include "call/syncable.h" +#include "call/video_receive_stream.h" +#include "common_video/corruption_detection_converters.h" +#include "common_video/frame_instrumentation_data.h" #include "media/base/media_constants.h" +#include "modules/include/module_common_types.h" #include "modules/pacing/packet_router.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" #include "modules/rtp_rtcp/include/receive_statistics.h" -#include "modules/rtp_rtcp/include/rtp_cvo.h" +#include "modules/rtp_rtcp/include/recovered_packet_receiver.h" +#include "modules/rtp_rtcp/include/rtcp_statistics.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h" +#include "modules/rtp_rtcp/source/corruption_detection_extension.h" #include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h" #include "modules/rtp_rtcp/source/frame_object.h" #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" -#include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h" #include "modules/rtp_rtcp/source/ulpfec_receiver.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h" #include "modules/video_coding/h264_sprop_parameter_sets.h" #include "modules/video_coding/h264_sps_pps_tracker.h" +#include "modules/video_coding/h26x_packet_buffer.h" +#include "modules/video_coding/loss_notification_controller.h" #include "modules/video_coding/nack_requester.h" #include "modules/video_coding/packet_buffer.h" +#include "modules/video_coding/rtp_frame_reference_finder.h" #include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/strings/string_builder.h" -#include "system_wrappers/include/metrics.h" +#include "rtc_base/thread.h" #include "system_wrappers/include/ntp_time.h" +#include "video/buffered_frame_decryptor.h" namespace webrtc { @@ -75,17 +122,15 @@ int PacketBufferMaxSize(const FieldTrialsView& field_trials) { } std::unique_ptr CreateRtpRtcpModule( - Clock* clock, + const Environment& env, ReceiveStatistics* receive_statistics, Transport* outgoing_transport, RtcpRttStats* rtt_stats, RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer, RtcpCnameCallback* rtcp_cname_callback, bool non_sender_rtt_measurement, - uint32_t local_ssrc, - RtcEventLog* rtc_event_log) { + uint32_t local_ssrc) { RtpRtcpInterface::Configuration configuration; - configuration.clock = clock; configuration.audio = false; configuration.receiver_only = true; configuration.receive_statistics = receive_statistics; @@ -96,30 +141,27 @@ std::unique_ptr CreateRtpRtcpModule( configuration.rtcp_cname_callback = rtcp_cname_callback; configuration.local_media_ssrc = local_ssrc; configuration.non_sender_rtt_measurement = non_sender_rtt_measurement; - configuration.event_log = rtc_event_log; - std::unique_ptr rtp_rtcp = - ModuleRtpRtcpImpl2::Create(configuration); + auto rtp_rtcp = std::make_unique(env, configuration); rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound); return rtp_rtcp; } std::unique_ptr MaybeConstructNackModule( + const Environment& env, TaskQueueBase* current_queue, NackPeriodicProcessor* nack_periodic_processor, const NackConfig& nack, - Clock* clock, NackSender* nack_sender, - KeyFrameRequestSender* keyframe_request_sender, - const FieldTrialsView& field_trials) { + KeyFrameRequestSender* keyframe_request_sender) { if (nack.rtp_history_ms == 0) return nullptr; // TODO(bugs.webrtc.org/12420): pass rtp_history_ms to the nack module. - return std::make_unique(current_queue, nack_periodic_processor, - clock, nack_sender, - keyframe_request_sender, field_trials); + return std::make_unique( + current_queue, nack_periodic_processor, &env.clock(), nack_sender, + keyframe_request_sender, env.field_trials()); } std::unique_ptr MaybeConstructUlpfecReceiver( @@ -191,7 +233,7 @@ void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendLossNotification( RTC_DCHECK(!lntf_state_) << "SendLossNotification() called twice in a row with no call to " "SendBufferedRtcpFeedback() in between."; - lntf_state_ = absl::make_optional( + lntf_state_ = std::make_optional( last_decoded_seq_num, last_received_seq_num, decodability_flag); } @@ -200,7 +242,7 @@ void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendBufferedRtcpFeedback() { bool request_key_frame = false; std::vector nack_sequence_numbers; - absl::optional lntf_state; + std::optional lntf_state; std::swap(request_key_frame, request_key_frame_); std::swap(nack_sequence_numbers, nack_sequence_numbers_); @@ -232,8 +274,8 @@ void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::ClearLossNotificationState() { } RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( + const Environment& env, TaskQueueBase* current_queue, - Clock* clock, Transport* transport, RtcpRttStats* rtt_stats, PacketRouter* packet_router, @@ -243,62 +285,60 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( RtcpCnameCallback* rtcp_cname_callback, NackPeriodicProcessor* nack_periodic_processor, OnCompleteFrameCallback* complete_frame_callback, - rtc::scoped_refptr frame_decryptor, - rtc::scoped_refptr frame_transformer, - const FieldTrialsView& field_trials, - RtcEventLog* event_log) - : field_trials_(field_trials), + scoped_refptr frame_decryptor, + scoped_refptr frame_transformer) + : env_(env), worker_queue_(current_queue), - clock_(clock), config_(*config), packet_router_(packet_router), - ntp_estimator_(clock), - forced_playout_delay_max_ms_("max_ms", absl::nullopt), - forced_playout_delay_min_ms_("min_ms", absl::nullopt), + ntp_estimator_(&env_.clock()), + forced_playout_delay_max_ms_("max_ms", std::nullopt), + forced_playout_delay_min_ms_("min_ms", std::nullopt), rtp_receive_statistics_(rtp_receive_statistics), ulpfec_receiver_( MaybeConstructUlpfecReceiver(config->rtp.remote_ssrc, config->rtp.red_payload_type, config->rtp.ulpfec_payload_type, this, - clock_)), + &env_.clock())), red_payload_type_(config_.rtp.red_payload_type), packet_sink_(config->rtp.packet_sink_), receiving_(false), last_packet_log_ms_(-1), rtp_rtcp_(CreateRtpRtcpModule( - clock, + env_, rtp_receive_statistics_, transport, rtt_stats, rtcp_packet_type_counter_observer, rtcp_cname_callback, config_.rtp.rtcp_xr.receiver_reference_time_report, - config_.rtp.local_ssrc, - event_log)), + config_.rtp.local_ssrc)), nack_periodic_processor_(nack_periodic_processor), complete_frame_callback_(complete_frame_callback), keyframe_request_method_(config_.rtp.keyframe_method), // TODO(bugs.webrtc.org/10336): Let `rtcp_feedback_buffer_` communicate // directly with `rtp_rtcp_`. rtcp_feedback_buffer_(this, this, this), - nack_module_(MaybeConstructNackModule(current_queue, + nack_module_(MaybeConstructNackModule(env_, + current_queue, nack_periodic_processor, config_.rtp.nack, - clock_, &rtcp_feedback_buffer_, - &rtcp_feedback_buffer_, - field_trials_)), + &rtcp_feedback_buffer_)), packet_buffer_(kPacketBufferStartSize, - PacketBufferMaxSize(field_trials_)), + PacketBufferMaxSize(env_.field_trials())), reference_finder_(std::make_unique()), has_received_frame_(false), frames_decryptable_(false), - absolute_capture_time_interpolator_(clock) { + absolute_capture_time_interpolator_(&env_.clock()) { packet_sequence_checker_.Detach(); - constexpr bool remb_candidate = true; - if (packet_router_) - packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate); + if (packet_router_) { + // Do not register as REMB candidate, this is only done when starting to + // receive. + packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), + /*remb_candidate=*/false); + } RTC_DCHECK(config_.rtp.rtcp_mode != RtcpMode::kOff) << "A stream should not be configured with RTCP disabled. This value is " @@ -316,7 +356,7 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( } ParseFieldTrial( {&forced_playout_delay_max_ms_, &forced_playout_delay_min_ms_}, - field_trials_.Lookup("WebRTC-ForcePlayoutDelay")); + env_.field_trials().Lookup("WebRTC-ForcePlayoutDelay")); if (config_.rtp.lntf.enabled) { loss_notification_controller_ = @@ -326,8 +366,8 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( // Only construct the encrypted receiver if frame encryption is enabled. if (config_.crypto_options.sframe.require_frame_encryption) { - buffered_frame_decryptor_ = - std::make_unique(this, this, field_trials_); + buffered_frame_decryptor_ = std::make_unique( + this, this, env_.field_trials()); if (frame_decryptor != nullptr) { buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor)); } @@ -335,9 +375,9 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( if (frame_transformer) { frame_transformer_delegate_ = - rtc::make_ref_counted( - this, clock_, std::move(frame_transformer), rtc::Thread::Current(), - config_.rtp.remote_ssrc); + make_ref_counted( + this, &env_.clock(), std::move(frame_transformer), + Thread::Current(), config_.rtp.remote_ssrc); frame_transformer_delegate_->Init(); } } @@ -353,44 +393,19 @@ RtpVideoStreamReceiver2::~RtpVideoStreamReceiver2() { void RtpVideoStreamReceiver2::AddReceiveCodec( uint8_t payload_type, VideoCodecType video_codec, - const std::map& codec_params, + const webrtc::CodecParameterMap& codec_params, bool raw_payload) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - if (codec_params.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) > 0 || - field_trials_.IsEnabled("WebRTC-SpsPpsIdrIsH264Keyframe")) { + if (codec_params.count(kH264FmtpSpsPpsIdrInKeyframe) > 0 || + env_.field_trials().IsEnabled("WebRTC-SpsPpsIdrIsH264Keyframe")) { packet_buffer_.ForceSpsPpsIdrIsH264Keyframe(); + sps_pps_idr_is_h264_keyframe_ = true; } payload_type_map_.emplace( payload_type, raw_payload ? std::make_unique() : CreateVideoRtpDepacketizer(video_codec)); pt_codec_params_.emplace(payload_type, codec_params); -} - -void RtpVideoStreamReceiver2::RemoveReceiveCodec(uint8_t payload_type) { - RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - auto codec_params_it = pt_codec_params_.find(payload_type); - if (codec_params_it == pt_codec_params_.end()) - return; - - const bool sps_pps_idr_in_key_frame = - codec_params_it->second.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) > 0; - - pt_codec_params_.erase(codec_params_it); - payload_type_map_.erase(payload_type); - - if (sps_pps_idr_in_key_frame) { - bool reset_setting = true; - for (auto& [unused, codec_params] : pt_codec_params_) { - if (codec_params.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) > 0) { - reset_setting = false; - break; - } - } - - if (reset_setting) { - packet_buffer_.ResetSpsPpsIdrIsH264Keyframe(); - } - } + pt_codec_.emplace(payload_type, video_codec); } void RtpVideoStreamReceiver2::RemoveReceiveCodecs() { @@ -399,22 +414,24 @@ void RtpVideoStreamReceiver2::RemoveReceiveCodecs() { pt_codec_params_.clear(); payload_type_map_.clear(); packet_buffer_.ResetSpsPpsIdrIsH264Keyframe(); + h26x_packet_buffer_.reset(); + pt_codec_.clear(); } -absl::optional RtpVideoStreamReceiver2::GetSyncInfo() const { +std::optional RtpVideoStreamReceiver2::GetSyncInfo() const { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); Syncable::Info info; - absl::optional last_sr = + std::optional last_sr = rtp_rtcp_->GetSenderReportStats(); if (!last_sr.has_value()) { - return absl::nullopt; + return std::nullopt; } - info.capture_time_ntp_secs = last_sr->last_remote_timestamp.seconds(); - info.capture_time_ntp_frac = last_sr->last_remote_timestamp.fractions(); + info.capture_time_ntp_secs = last_sr->last_remote_ntp_timestamp.seconds(); + info.capture_time_ntp_frac = last_sr->last_remote_ntp_timestamp.fractions(); info.capture_time_source_clock = last_sr->last_remote_rtp_timestamp; if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_) { - return absl::nullopt; + return std::nullopt; } info.latest_received_capture_timestamp = *last_received_rtp_timestamp_; info.latest_receive_time_ms = last_received_rtp_system_time_->ms(); @@ -428,23 +445,21 @@ RtpVideoStreamReceiver2::ParseGenericDependenciesExtension( const RtpPacketReceived& rtp_packet, RTPVideoHeader* video_header) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - if (rtp_packet.HasExtension()) { - webrtc::DependencyDescriptor dependency_descriptor; + if (DependencyDescriptorMandatory dd_mandatory; + rtp_packet.GetExtension( + &dd_mandatory)) { + const int64_t frame_id = + frame_id_unwrapper_.Unwrap(dd_mandatory.frame_number()); + DependencyDescriptor dependency_descriptor; if (!rtp_packet.GetExtension( video_structure_.get(), &dependency_descriptor)) { - // Descriptor is there, but failed to parse. Either it is invalid, - // or too old packet (after relevant video_structure_ changed), - // or too new packet (before relevant video_structure_ arrived). - // Drop such packet to be on the safe side. - // TODO(bugs.webrtc.org/10342): Stash too new packet. - Timestamp now = clock_->CurrentTime(); - if (now - last_logged_failed_to_parse_dd_ > TimeDelta::Seconds(1)) { - last_logged_failed_to_parse_dd_ = now; - RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc() - << " Failed to parse dependency descriptor."; + if (!video_structure_frame_id_ || frame_id < video_structure_frame_id_) { + return kDropPacket; + } else { + return kStashPacket; } - return kDropPacket; } + if (dependency_descriptor.attached_structure != nullptr && !dependency_descriptor.first_packet_in_frame) { RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc() @@ -457,8 +472,6 @@ RtpVideoStreamReceiver2::ParseGenericDependenciesExtension( video_header->is_last_packet_in_frame = dependency_descriptor.last_packet_in_frame; - int64_t frame_id = - frame_id_unwrapper_.Unwrap(dependency_descriptor.frame_number); auto& generic_descriptor_info = video_header->generic.emplace(); generic_descriptor_info.frame_id = frame_id; generic_descriptor_info.spatial_index = @@ -533,24 +546,42 @@ RtpVideoStreamReceiver2::ParseGenericDependenciesExtension( return kHasGenericDescriptor; } -void RtpVideoStreamReceiver2::OnReceivedPayloadData( - rtc::CopyOnWriteBuffer codec_payload, +void RtpVideoStreamReceiver2::SetLastCorruptionDetectionIndex( + const std::variant& + frame_instrumentation_data, + int spatial_idx) { + if (const auto* sync_data = std::get_if( + &frame_instrumentation_data)) { + last_corruption_detection_state_by_layer_[spatial_idx].sequence_index = + sync_data->sequence_index; + } else if (const auto* data = std::get_if( + &frame_instrumentation_data)) { + last_corruption_detection_state_by_layer_[spatial_idx].sequence_index = + data->sequence_index + data->sample_values.size(); + } else { + RTC_DCHECK_NOTREACHED(); + } +} + +bool RtpVideoStreamReceiver2::OnReceivedPayloadData( + CopyOnWriteBuffer codec_payload, const RtpPacketReceived& rtp_packet, - const RTPVideoHeader& video) { + const RTPVideoHeader& video, + int times_nacked) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - auto packet = - std::make_unique(rtp_packet, video); - int64_t unwrapped_rtp_seq_num = rtp_seq_num_unwrapper_.Unwrap(rtp_packet.SequenceNumber()); + auto packet = std::make_unique( + rtp_packet, unwrapped_rtp_seq_num, video); + RtpPacketInfo& packet_info = packet_infos_ .emplace(unwrapped_rtp_seq_num, RtpPacketInfo(rtp_packet.Ssrc(), rtp_packet.Csrcs(), rtp_packet.Timestamp(), - /*receive_time_ms=*/clock_->CurrentTime())) + /*receive_time=*/env_.clock().CurrentTime())) .first->second; // Try to extrapolate absolute capture time if it is missing. @@ -562,7 +593,13 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( // Assume frequency is the same one for all video frames. kVideoPayloadTypeFrequency, rtp_packet.GetExtension())); - + if (packet_info.absolute_capture_time().has_value()) { + packet_info.set_local_capture_clock_offset( + capture_clock_offset_updater_.ConvertsToTimeDela( + capture_clock_offset_updater_.AdjustEstimatedCaptureClockOffset( + packet_info.absolute_capture_time() + ->estimated_capture_clock_offset))); + } RTPVideoHeader& video_header = packet->video_header; video_header.rotation = kVideoRotation_0; video_header.content_type = VideoContentType::UNSPECIFIED; @@ -577,22 +614,30 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( if (!video_header.playout_delay.emplace().Set( TimeDelta::Millis(*forced_playout_delay_min_ms_), TimeDelta::Millis(*forced_playout_delay_max_ms_))) { - video_header.playout_delay = absl::nullopt; + video_header.playout_delay = std::nullopt; } } else { video_header.playout_delay = rtp_packet.GetExtension(); } - ParseGenericDependenciesResult generic_descriptor_state = - ParseGenericDependenciesExtension(rtp_packet, &video_header); - if (!rtp_packet.recovered()) { UpdatePacketReceiveTimestamps( rtp_packet, video_header.frame_type == VideoFrameType::kVideoFrameKey); } - if (generic_descriptor_state == kDropPacket) { - Timestamp now = clock_->CurrentTime(); + ParseGenericDependenciesResult generic_descriptor_state = + ParseGenericDependenciesExtension(rtp_packet, &video_header); + + if (generic_descriptor_state == kStashPacket) { + return true; + } else if (generic_descriptor_state == kDropPacket) { + Timestamp now = env_.clock().CurrentTime(); + if (now - last_logged_failed_to_parse_dd_ > TimeDelta::Seconds(1)) { + last_logged_failed_to_parse_dd_ = now; + RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc() + << ", timestamp: " << rtp_packet.Timestamp() + << " Failed to parse dependency descriptor."; + } if (video_structure_ == nullptr && next_keyframe_request_for_missing_video_structure_ < now) { // No video structure received yet, most likely part of the initial @@ -601,12 +646,10 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( next_keyframe_request_for_missing_video_structure_ = now + TimeDelta::Seconds(1); } - return; + return false; } - // Color space should only be transmitted in the last packet of a frame, - // therefore, neglect it otherwise so that last_color_space_ is not reset by - // mistake. + // Extensions that should only be transmitted in the last packet of a frame. if (video_header.is_last_packet_in_frame) { video_header.color_space = rtp_packet.GetExtension(); if (video_header.color_space || @@ -618,6 +661,50 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( } else if (last_color_space_) { video_header.color_space = last_color_space_; } + + std::optional spatial_id; + if (video_header.generic.has_value()) { + spatial_id = video_header.generic->spatial_index; + if (spatial_id >= kMaxSpatialLayers) { + RTC_LOG(LS_WARNING) << "Invalid spatial id: " << *spatial_id + << ". Ignoring corruption detection mesaage."; + spatial_id.reset(); + } + } else { + spatial_id = 0; + } + + std::optional message = + rtp_packet.GetExtension(); + if (message.has_value() && spatial_id.has_value()) { + if (message->sample_values().empty()) { + video_header.frame_instrumentation_data = + ConvertCorruptionDetectionMessageToFrameInstrumentationSyncData( + *message, last_corruption_detection_state_by_layer_[*spatial_id] + .sequence_index); + } else { + // `OnReceivedPayloadData` might be called several times, however, we + // don't want to increase the sequence index each time. + if (!last_corruption_detection_state_by_layer_[*spatial_id] + .timestamp.has_value() || + rtp_packet.Timestamp() != + last_corruption_detection_state_by_layer_[*spatial_id] + .timestamp) { + video_header.frame_instrumentation_data = + ConvertCorruptionDetectionMessageToFrameInstrumentationData( + *message, + last_corruption_detection_state_by_layer_[*spatial_id] + .sequence_index); + last_corruption_detection_state_by_layer_[*spatial_id].timestamp = + rtp_packet.Timestamp(); + } + } + + if (video_header.frame_instrumentation_data.has_value()) { + SetLastCorruptionDetectionIndex( + *video_header.frame_instrumentation_data, *spatial_id); + } + } } video_header.video_frame_tracking_id = rtp_packet.GetExtension(); @@ -647,21 +734,13 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( } } - if (nack_module_) { - const bool is_keyframe = - video_header.is_first_packet_in_frame && - video_header.frame_type == VideoFrameType::kVideoFrameKey; - - packet->times_nacked = nack_module_->OnReceivedPacket( - rtp_packet.SequenceNumber(), is_keyframe, rtp_packet.recovered()); - } else { - packet->times_nacked = -1; - } + packet->times_nacked = times_nacked; if (codec_payload.size() == 0) { - NotifyReceiverOfEmptyPacket(packet->seq_num); + NotifyReceiverOfEmptyPacket(packet->seq_num(), + GetCodecFromPayloadType(packet->payload_type)); rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); - return; + return false; } if (packet->codec() == kVideoCodecH264) { @@ -672,10 +751,13 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( last_payload_type_ = packet->payload_type; InsertSpsPpsIntoTracker(packet->payload_type); } + } + if (packet->codec() == kVideoCodecH264 && + !UseH26xPacketBuffer(packet->codec())) { video_coding::H264SpsPpsTracker::FixedBitstream fixed = tracker_.CopyAndFixBitstream( - rtc::MakeArrayView(codec_payload.cdata(), codec_payload.size()), + MakeArrayView(codec_payload.cdata(), codec_payload.size()), &packet->video_header); switch (fixed.action) { @@ -684,7 +766,7 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); [[fallthrough]]; case video_coding::H264SpsPpsTracker::kDrop: - return; + return false; case video_coding::H264SpsPpsTracker::kInsert: packet->video_payload = std::move(fixed.bitstream); break; @@ -696,7 +778,13 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); frame_counter_.Add(packet->timestamp); - OnInsertedPacket(packet_buffer_.InsertPacket(std::move(packet))); + + if (h26x_packet_buffer_ && UseH26xPacketBuffer(packet->codec())) { + OnInsertedPacket(h26x_packet_buffer_->InsertPacket(std::move(packet))); + } else { + OnInsertedPacket(packet_buffer_.InsertPacket(std::move(packet))); + } + return false; } void RtpVideoStreamReceiver2::OnRecoveredPacket( @@ -772,23 +860,46 @@ void RtpVideoStreamReceiver2::OnInsertedPacket( int max_nack_count; int64_t min_recv_time; int64_t max_recv_time; - std::vector> payloads; + std::optional absolute_capture_time_ms; + std::vector> payloads; RtpPacketInfos::vector_type packet_infos; - bool frame_boundary = true; + bool skip_frame = false; for (auto& packet : result.packets) { - // PacketBuffer promisses frame boundaries are correctly set on each - // packet. Document that assumption with the DCHECKs. - RTC_DCHECK_EQ(frame_boundary, packet->is_first_packet_in_frame()); - int64_t unwrapped_rtp_seq_num = - rtp_seq_num_unwrapper_.Unwrap(packet->seq_num); - RTC_DCHECK_GT(packet_infos_.count(unwrapped_rtp_seq_num), 0); - RtpPacketInfo& packet_info = packet_infos_[unwrapped_rtp_seq_num]; + if (skip_frame && !packet->is_first_packet_in_frame()) { + continue; + } + skip_frame = false; + + // Every time `FrameDecoded` is called outdated information is cleaned up, + // and because of that `packet_infos_` might not contain any information + // about some of the packets in the assembled frame. To avoid creating a + // frame with missing `packet_infos_`, simply drop this (old/duplicate) + // frame. + int64_t unwrapped_rtp_seq_num = packet->sequence_number; + auto packet_info_it = packet_infos_.find(unwrapped_rtp_seq_num); + if (packet_info_it == packet_infos_.end()) { + skip_frame = true; + continue; + } + + RtpPacketInfo& packet_info = packet_info_it->second; if (packet->is_first_packet_in_frame()) { + payloads.clear(); + packet_infos.clear(); first_packet = packet.get(); max_nack_count = packet->times_nacked; min_recv_time = packet_info.receive_time().ms(); max_recv_time = packet_info.receive_time().ms(); + if (env_.field_trials().IsEnabled("WebRTC-UseAbsCapTimeForG2gMetric") && + packet_info.absolute_capture_time().has_value() && + packet_info.local_capture_clock_offset().has_value()) { + absolute_capture_time_ms = + NtpTime( + packet_info.absolute_capture_time()->absolute_capture_timestamp) + .ToMs() + + packet_info.local_capture_clock_offset()->ms(); + } } else { max_nack_count = std::max(max_nack_count, packet->times_nacked); min_recv_time = std::min(min_recv_time, packet_info.receive_time().ms()); @@ -797,12 +908,14 @@ void RtpVideoStreamReceiver2::OnInsertedPacket( payloads.emplace_back(packet->video_payload); packet_infos.push_back(packet_info); - frame_boundary = packet->is_last_packet_in_frame(); + packet->video_header.absolute_capture_time = + packet_info.absolute_capture_time(); if (packet->is_last_packet_in_frame()) { auto depacketizer_it = payload_type_map_.find(first_packet->payload_type); RTC_CHECK(depacketizer_it != payload_type_map_.end()); + RTC_CHECK(depacketizer_it->second); - rtc::scoped_refptr bitstream = + scoped_refptr bitstream = depacketizer_it->second->AssembleFrame(payloads); if (!bitstream) { // Failed to assemble a frame. Discard and continue. @@ -811,28 +924,28 @@ void RtpVideoStreamReceiver2::OnInsertedPacket( const video_coding::PacketBuffer::Packet& last_packet = *packet; OnAssembledFrame(std::make_unique( - first_packet->seq_num, // - last_packet.seq_num, // - last_packet.marker_bit, // - max_nack_count, // - min_recv_time, // - max_recv_time, // - first_packet->timestamp, // - ntp_estimator_.Estimate(first_packet->timestamp), // - last_packet.video_header.video_timing, // - first_packet->payload_type, // - first_packet->codec(), // - last_packet.video_header.rotation, // - last_packet.video_header.content_type, // - first_packet->video_header, // - last_packet.video_header.color_space, // - RtpPacketInfos(std::move(packet_infos)), // + first_packet->seq_num(), // + last_packet.seq_num(), // + last_packet.marker_bit, // + max_nack_count, // + min_recv_time, // + max_recv_time, // + first_packet->timestamp, // + absolute_capture_time_ms.has_value() + ? *absolute_capture_time_ms + : ntp_estimator_.Estimate(first_packet->timestamp), // + last_packet.video_header.video_timing, // + first_packet->payload_type, // + first_packet->codec(), // + last_packet.video_header.rotation, // + last_packet.video_header.content_type, // + first_packet->video_header, // + last_packet.video_header.color_space, // + last_packet.video_header.frame_instrumentation_data, // + RtpPacketInfos(std::move(packet_infos)), // std::move(bitstream))); - payloads.clear(); - packet_infos.clear(); } } - RTC_DCHECK(frame_boundary); if (result.buffer_cleared) { last_received_rtp_system_time_.reset(); last_received_keyframe_rtp_system_time_.reset(); @@ -847,7 +960,7 @@ void RtpVideoStreamReceiver2::OnAssembledFrame( RTC_DCHECK_RUN_ON(&packet_sequence_checker_); RTC_DCHECK(frame); - const absl::optional& descriptor = + const std::optional& descriptor = frame->GetRtpVideoHeader().generic; if (loss_notification_controller_ && descriptor) { @@ -875,7 +988,7 @@ void RtpVideoStreamReceiver2::OnAssembledFrame( // Reset `reference_finder_` if `frame` is new and the codec have changed. if (current_codec_) { bool frame_is_newer = - AheadOf(frame->Timestamp(), last_assembled_frame_rtp_timestamp_); + AheadOf(frame->RtpTimestamp(), last_assembled_frame_rtp_timestamp_); if (frame->codec_type() != current_codec_) { if (frame_is_newer) { @@ -893,11 +1006,11 @@ void RtpVideoStreamReceiver2::OnAssembledFrame( } if (frame_is_newer) { - last_assembled_frame_rtp_timestamp_ = frame->Timestamp(); + last_assembled_frame_rtp_timestamp_ = frame->RtpTimestamp(); } } else { current_codec_ = frame->codec_type(); - last_assembled_frame_rtp_timestamp_ = frame->Timestamp(); + last_assembled_frame_rtp_timestamp_ = frame->RtpTimestamp(); } if (buffered_frame_decryptor_ != nullptr) { @@ -937,23 +1050,23 @@ void RtpVideoStreamReceiver2::OnDecryptionStatusChange( } void RtpVideoStreamReceiver2::SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) { + scoped_refptr frame_decryptor) { // TODO(bugs.webrtc.org/11993): Update callers or post the operation over to // the network thread. RTC_DCHECK_RUN_ON(&packet_sequence_checker_); if (buffered_frame_decryptor_ == nullptr) { - buffered_frame_decryptor_ = - std::make_unique(this, this, field_trials_); + buffered_frame_decryptor_ = std::make_unique( + this, this, env_.field_trials()); } buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor)); } void RtpVideoStreamReceiver2::SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&worker_task_checker_); frame_transformer_delegate_ = - rtc::make_ref_counted( - this, clock_, std::move(frame_transformer), rtc::Thread::Current(), + make_ref_counted( + this, &env_.clock(), std::move(frame_transformer), Thread::Current(), config_.rtp.remote_ssrc); frame_transformer_delegate_->Init(); } @@ -1003,8 +1116,8 @@ void RtpVideoStreamReceiver2::SetNackHistory(TimeDelta history) { nack_module_.reset(); } else if (!nack_module_) { nack_module_ = std::make_unique( - worker_queue_, nack_periodic_processor_, clock_, &rtcp_feedback_buffer_, - &rtcp_feedback_buffer_, field_trials_); + worker_queue_, nack_periodic_processor_, &env_.clock(), + &rtcp_feedback_buffer_, &rtcp_feedback_buffer_, env_.field_trials()); } rtp_receive_statistics_->SetMaxReorderingThreshold( @@ -1031,31 +1144,59 @@ void RtpVideoStreamReceiver2::SetProtectionPayloadTypes( red_payload_type_ = red_payload_type; ulpfec_receiver_ = MaybeConstructUlpfecReceiver(config_.rtp.remote_ssrc, red_payload_type, - ulpfec_payload_type, this, clock_); + ulpfec_payload_type, this, &env_.clock()); } -absl::optional RtpVideoStreamReceiver2::LastReceivedPacketMs() const { +std::optional RtpVideoStreamReceiver2::LastReceivedPacketMs() const { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); if (last_received_rtp_system_time_) { - return absl::optional(last_received_rtp_system_time_->ms()); + return std::optional(last_received_rtp_system_time_->ms()); } - return absl::nullopt; + return std::nullopt; } -absl::optional -RtpVideoStreamReceiver2::LastReceivedFrameRtpTimestamp() const { +std::optional RtpVideoStreamReceiver2::LastReceivedFrameRtpTimestamp() + const { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); return last_received_rtp_timestamp_; } -absl::optional RtpVideoStreamReceiver2::LastReceivedKeyframePacketMs() +std::optional RtpVideoStreamReceiver2::LastReceivedKeyframePacketMs() const { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); if (last_received_keyframe_rtp_system_time_) { - return absl::optional( + return std::optional( last_received_keyframe_rtp_system_time_->ms()); } - return absl::nullopt; + return std::nullopt; +} + +std::optional +RtpVideoStreamReceiver2::GetSenderReportStats() const { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + return rtp_rtcp_->GetSenderReportStats(); +} + +std::optional RtpVideoStreamReceiver2::GetCodecFromPayloadType( + uint8_t payload_type) const { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + auto it = pt_codec_.find(payload_type); + if (it == pt_codec_.end()) { + return std::nullopt; + } + return it->second; +} + +bool RtpVideoStreamReceiver2::UseH26xPacketBuffer( + std::optional codec) const { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + if (codec == kVideoCodecH265) { + return true; + } + if (codec == kVideoCodecH264) { + return env_.field_trials().IsEnabled("WebRTC-Video-H26xPacketBuffer"); + } + return false; } void RtpVideoStreamReceiver2::ManageFrame( @@ -1071,7 +1212,8 @@ void RtpVideoStreamReceiver2::ReceivePacket(const RtpPacketReceived& packet) { // Padding or keep-alive packet. // TODO(nisse): Could drop empty packets earlier, but need to figure out how // they should be counted in stats. - NotifyReceiverOfEmptyPacket(packet.SequenceNumber()); + NotifyReceiverOfEmptyPacket(packet.SequenceNumber(), + GetCodecFromPayloadType(packet.PayloadType())); return; } if (packet.PayloadType() == red_payload_type_) { @@ -1083,15 +1225,51 @@ void RtpVideoStreamReceiver2::ReceivePacket(const RtpPacketReceived& packet) { if (type_it == payload_type_map_.end()) { return; } - absl::optional parsed_payload = - type_it->second->Parse(packet.PayloadBuffer()); - if (parsed_payload == absl::nullopt) { - RTC_LOG(LS_WARNING) << "Failed parsing payload."; - return; - } - OnReceivedPayloadData(std::move(parsed_payload->video_payload), packet, - parsed_payload->video_header); + auto parse_and_insert = [&](const RtpPacketReceived& packet) { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + std::optional parsed_payload = + type_it->second->Parse(packet.PayloadBuffer()); + if (parsed_payload == std::nullopt) { + RTC_LOG(LS_WARNING) << "Failed parsing payload."; + return false; + } + + int times_nacked = nack_module_ + ? nack_module_->OnReceivedPacket( + packet.SequenceNumber(), packet.recovered()) + : -1; + + return OnReceivedPayloadData(std::move(parsed_payload->video_payload), + packet, parsed_payload->video_header, + times_nacked); + }; + + // When the dependency descriptor is used and the descriptor fail to parse + // then `OnReceivedPayloadData` may return true to signal the the packet + // should be retried at a later stage, which is why they are stashed here. + // + // TODO(bugs.webrtc.org/15782): + // This is an ugly solution. The way things should work is for the + // `RtpFrameReferenceFinder` to stash assembled frames until the keyframe with + // the relevant template structure has been received, but unfortunately the + // `frame_transformer_delegate_` is called before the frames are inserted into + // the `RtpFrameReferenceFinder`, and it expects the dependency descriptor to + // be parsed at that stage. + if (parse_and_insert(packet)) { + if (stashed_packets_.size() == 100) { + stashed_packets_.clear(); + } + stashed_packets_.push_back(packet); + } else { + for (auto it = stashed_packets_.begin(); it != stashed_packets_.end();) { + if (parse_and_insert(*it)) { + ++it; // keep in the stash. + } else { + it = stashed_packets_.erase(it); + } + } + } } void RtpVideoStreamReceiver2::ParseAndHandleEncapsulatingHeader( @@ -1105,7 +1283,8 @@ void RtpVideoStreamReceiver2::ParseAndHandleEncapsulatingHeader( if (packet.payload()[0] == ulpfec_receiver_->ulpfec_payload_type()) { // Notify video_receiver about received FEC packets to avoid NACKing these // packets. - NotifyReceiverOfEmptyPacket(packet.SequenceNumber()); + NotifyReceiverOfEmptyPacket(packet.SequenceNumber(), + GetCodecFromPayloadType(packet.PayloadType())); } if (ulpfec_receiver_->AddReceivedRedPacket(packet)) { ulpfec_receiver_->ProcessReceivedFec(); @@ -1115,16 +1294,21 @@ void RtpVideoStreamReceiver2::ParseAndHandleEncapsulatingHeader( // In the case of a video stream without picture ids and no rtx the // RtpFrameReferenceFinder will need to know about padding to // correctly calculate frame references. -void RtpVideoStreamReceiver2::NotifyReceiverOfEmptyPacket(uint16_t seq_num) { +void RtpVideoStreamReceiver2::NotifyReceiverOfEmptyPacket( + uint16_t seq_num, + std::optional codec) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); RTC_DCHECK_RUN_ON(&worker_task_checker_); OnCompleteFrames(reference_finder_->PaddingReceived(seq_num)); - OnInsertedPacket(packet_buffer_.InsertPadding(seq_num)); + if (h26x_packet_buffer_ && UseH26xPacketBuffer(codec)) { + OnInsertedPacket(h26x_packet_buffer_->InsertPadding(seq_num)); + } else { + OnInsertedPacket(packet_buffer_.InsertPadding(seq_num)); + } if (nack_module_) { - nack_module_->OnReceivedPacket(seq_num, /* is_keyframe = */ false, - /* is _recovered = */ false); + nack_module_->OnReceivedPacket(seq_num, /*is_recovered=*/false); } if (loss_notification_controller_) { // TODO(bugs.webrtc.org/10336): Handle empty packets. @@ -1141,28 +1325,27 @@ bool RtpVideoStreamReceiver2::DeliverRtcp(const uint8_t* rtcp_packet, return false; } - rtp_rtcp_->IncomingRtcpPacket( - rtc::MakeArrayView(rtcp_packet, rtcp_packet_length)); + rtp_rtcp_->IncomingRtcpPacket(MakeArrayView(rtcp_packet, rtcp_packet_length)); - absl::optional rtt = rtp_rtcp_->LastRtt(); + std::optional rtt = rtp_rtcp_->LastRtt(); if (!rtt.has_value()) { // Waiting for valid rtt. return true; } - absl::optional last_sr = + std::optional last_sr = rtp_rtcp_->GetSenderReportStats(); if (!last_sr.has_value()) { // Waiting for RTCP. return true; } - int64_t time_since_received = clock_->CurrentNtpInMilliseconds() - - last_sr->last_arrival_timestamp.ToMs(); + int64_t time_since_received = env_.clock().CurrentNtpInMilliseconds() - + last_sr->last_arrival_ntp_timestamp.ToMs(); // Don't use old SRs to estimate time. if (time_since_received <= 1) { - ntp_estimator_.UpdateRtcpTimestamp(*rtt, last_sr->last_remote_timestamp, + ntp_estimator_.UpdateRtcpTimestamp(*rtt, last_sr->last_remote_ntp_timestamp, last_sr->last_remote_rtp_timestamp); - absl::optional remote_to_local_clock_offset = + std::optional remote_to_local_clock_offset = ntp_estimator_.EstimateRemoteToLocalClockOffset(); if (remote_to_local_clock_offset.has_value()) { capture_clock_offset_updater_.SetRemoteToLocalClockOffset( @@ -1213,11 +1396,29 @@ void RtpVideoStreamReceiver2::SignalNetworkState(NetworkState state) { void RtpVideoStreamReceiver2::StartReceive() { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + // |h26x_packet_buffer_| is created here instead of in the ctor because we + // need to know the value of |sps_pps_id_is_h264_keyframe_|. + if (!h26x_packet_buffer_) { + h26x_packet_buffer_ = + std::make_unique(!sps_pps_idr_is_h264_keyframe_); + } + if (!receiving_ && packet_router_) { + // Change REMB candidate egibility. + packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get()); + packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), + /*remb_candidate=*/true); + } receiving_ = true; } void RtpVideoStreamReceiver2::StopReceive() { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + if (receiving_ && packet_router_) { + // Change REMB candidate egibility. + packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get()); + packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), + /*remb_candidate=*/false); + } receiving_ = false; } @@ -1235,7 +1436,7 @@ void RtpVideoStreamReceiver2::InsertSpsPpsIntoTracker(uint8_t payload_type) { H264SpropParameterSets sprop_decoder; auto sprop_base64_it = - codec_params_it->second.find(cricket::kH264FmtpSpropParameterSets); + codec_params_it->second.find(kH264FmtpSpropParameterSets); if (sprop_base64_it == codec_params_it->second.end()) return; @@ -1245,12 +1446,17 @@ void RtpVideoStreamReceiver2::InsertSpsPpsIntoTracker(uint8_t payload_type) { tracker_.InsertSpsPpsNalus(sprop_decoder.sps_nalu(), sprop_decoder.pps_nalu()); + + if (h26x_packet_buffer_ && + UseH26xPacketBuffer(GetCodecFromPayloadType(payload_type))) { + h26x_packet_buffer_->SetSpropParameterSets(sprop_base64_it->second); + } } void RtpVideoStreamReceiver2::UpdatePacketReceiveTimestamps( const RtpPacketReceived& packet, bool is_keyframe) { - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); if (is_keyframe || last_received_keyframe_rtp_timestamp_ == packet.Timestamp()) { last_received_keyframe_rtp_timestamp_ = packet.Timestamp(); @@ -1261,7 +1467,7 @@ void RtpVideoStreamReceiver2::UpdatePacketReceiveTimestamps( // Periodically log the RTP header of incoming packets. if (now.ms() - last_packet_log_ms_ > kPacketLogIntervalMs) { - rtc::StringBuilder ss; + StringBuilder ss; ss << "Packet received on SSRC: " << packet.Ssrc() << " with payload type: " << static_cast(packet.PayloadType()) << ", timestamp: " << packet.Timestamp() diff --git a/video/rtp_video_stream_receiver2.h b/video/rtp_video_stream_receiver2.h index 0178355262..bb0e8a42eb 100644 --- a/video/rtp_video_stream_receiver2.h +++ b/video/rtp_video_stream_receiver2.h @@ -11,27 +11,44 @@ #ifndef VIDEO_RTP_VIDEO_STREAM_RECEIVER2_H_ #define VIDEO_RTP_VIDEO_STREAM_RECEIVER2_H_ +#include +#include +#include #include #include -#include +#include +#include #include -#include "absl/types/optional.h" #include "api/crypto/frame_decryptor_interface.h" +#include "api/environment/environment.h" +#include "api/frame_transformer_interface.h" +#include "api/rtp_headers.h" +#include "api/rtp_packet_info.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "api/video/color_space.h" +#include "api/video/encoded_frame.h" +#include "api/video/video_codec_constants.h" #include "api/video/video_codec_type.h" #include "call/rtp_packet_sink_interface.h" #include "call/syncable.h" #include "call/video_receive_stream.h" +#include "common_video/frame_instrumentation_data.h" +#include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/recovered_packet_receiver.h" #include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h" -#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtcp_statistics.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/absolute_capture_time_interpolator.h" #include "modules/rtp_rtcp/source/capture_clock_offset_updater.h" -#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" +#include "modules/rtp_rtcp/source/frame_object.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" @@ -39,10 +56,12 @@ #include "modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "modules/video_coding/h264_sps_pps_tracker.h" +#include "modules/video_coding/h26x_packet_buffer.h" #include "modules/video_coding/loss_notification_controller.h" #include "modules/video_coding/nack_requester.h" #include "modules/video_coding/packet_buffer.h" #include "modules/video_coding/rtp_frame_reference_finder.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/sequence_number_unwrapper.h" #include "rtc_base/system/no_unique_address.h" @@ -78,8 +97,8 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, }; RtpVideoStreamReceiver2( + const Environment& env, TaskQueueBase* current_queue, - Clock* clock, Transport* transport, RtcpRttStats* rtt_stats, // The packet router is optional; if provided, the RtpRtcp module for this @@ -94,17 +113,14 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, // The KeyFrameRequestSender is optional; if not provided, key frame // requests are sent via the internal RtpRtcp module. OnCompleteFrameCallback* complete_frame_callback, - rtc::scoped_refptr frame_decryptor, - rtc::scoped_refptr frame_transformer, - const FieldTrialsView& field_trials, - RtcEventLog* event_log); + scoped_refptr frame_decryptor, + scoped_refptr frame_transformer); ~RtpVideoStreamReceiver2() override; void AddReceiveCodec(uint8_t payload_type, VideoCodecType video_codec, - const std::map& codec_params, + const webrtc::CodecParameterMap& codec_params, bool raw_payload); - void RemoveReceiveCodec(uint8_t payload_type); // Clears state for all receive codecs added via `AddReceiveCodec`. void RemoveReceiveCodecs(); @@ -113,7 +129,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, void StopReceive(); // Produces the transport-related timestamps; current_delay_ms is left unset. - absl::optional GetSyncInfo() const; + std::optional GetSyncInfo() const; bool DeliverRtcp(const uint8_t* rtcp_packet, size_t rtcp_packet_length); @@ -133,9 +149,11 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, void OnRtpPacket(const RtpPacketReceived& packet) override; // Public only for tests. - void OnReceivedPayloadData(rtc::CopyOnWriteBuffer codec_payload, + // Returns true if the packet should be stashed and retried at a later stage. + bool OnReceivedPayloadData(CopyOnWriteBuffer codec_payload, const RtpPacketReceived& rtp_packet, - const RTPVideoHeader& video); + const RTPVideoHeader& video, + int times_nacked); // Implements RecoveredPacketReceiver. void OnRecoveredPacket(const RtpPacketReceived& packet) override; @@ -168,12 +186,12 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, // Optionally set a frame decryptor after a stream has started. This will not // reset the decoder state. void SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor); + scoped_refptr frame_decryptor); // Sets a frame transformer after a stream has started, if no transformer // has previously been set. Does not reset the decoder state. void SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer); + scoped_refptr frame_transformer); // Called by VideoReceiveStreamInterface when stats are updated. void UpdateRtt(int64_t max_rtt_ms); @@ -203,9 +221,12 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, int red_payload_type() const; void SetProtectionPayloadTypes(int red_payload_type, int ulpfec_payload_type); - absl::optional LastReceivedPacketMs() const; - absl::optional LastReceivedFrameRtpTimestamp() const; - absl::optional LastReceivedKeyframePacketMs() const; + std::optional LastReceivedPacketMs() const; + std::optional LastReceivedFrameRtpTimestamp() const; + std::optional LastReceivedKeyframePacketMs() const; + + std::optional GetSenderReportStats() + const; private: // Implements RtpVideoFrameReceiver. @@ -275,10 +296,11 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, std::vector nack_sequence_numbers_ RTC_GUARDED_BY(packet_sequence_checker_); - absl::optional lntf_state_ + std::optional lntf_state_ RTC_GUARDED_BY(packet_sequence_checker_); }; enum ParseGenericDependenciesResult { + kStashPacket, kDropPacket, kHasGenericDescriptor, kNoGenericDescriptor @@ -293,7 +315,8 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, // This function assumes that it's being called from only one thread. void ParseAndHandleEncapsulatingHeader(const RtpPacketReceived& packet) RTC_RUN_ON(packet_sequence_checker_); - void NotifyReceiverOfEmptyPacket(uint16_t seq_num) + void NotifyReceiverOfEmptyPacket(uint16_t seq_num, + std::optional codec) RTC_RUN_ON(packet_sequence_checker_); bool IsRedEnabled() const; void InsertSpsPpsIntoTracker(uint8_t payload_type) @@ -308,10 +331,19 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, void UpdatePacketReceiveTimestamps(const RtpPacketReceived& packet, bool is_keyframe) RTC_RUN_ON(packet_sequence_checker_); + void SetLastCorruptionDetectionIndex( + const std::variant& frame_instrumentation_data, + int spatial_idx); + + std::optional GetCodecFromPayloadType( + uint8_t payload_type) const RTC_RUN_ON(packet_sequence_checker_); + bool UseH26xPacketBuffer(std::optional codec) const + RTC_RUN_ON(packet_sequence_checker_); - const FieldTrialsView& field_trials_; + const Environment env_; TaskQueueBase* const worker_queue_; - Clock* const clock_; + // Ownership of this object lies with VideoReceiveStreamInterface, which owns // `this`. const VideoReceiveStreamInterface::Config& config_; @@ -348,7 +380,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, const KeyFrameReqMethod keyframe_request_method_; RtcpFeedbackBuffer rtcp_feedback_buffer_; - // TODO(tommi): Consider absl::optional instead of unique_ptr + // TODO(tommi): Consider std::optional instead of unique_ptr // since nack is usually configured. std::unique_ptr nack_module_ RTC_GUARDED_BY(packet_sequence_checker_); @@ -357,6 +389,11 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, video_coding::PacketBuffer packet_buffer_ RTC_GUARDED_BY(packet_sequence_checker_); + // h26x_packet_buffer_ is applicable to H.264 and H.265. For H.265 it is + // always used but for H.264 it is only used if WebRTC-Video-H26xPacketBuffer + // is enabled, see condition inside UseH26xPacketBuffer(). + std::unique_ptr h26x_packet_buffer_ + RTC_GUARDED_BY(packet_sequence_checker_); UniqueTimestampCounter frame_counter_ RTC_GUARDED_BY(packet_sequence_checker_); SeqNumUnwrapper frame_id_unwrapper_ @@ -368,15 +405,15 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, std::unique_ptr video_structure_ RTC_GUARDED_BY(packet_sequence_checker_); // Frame id of the last frame with the attached video structure. - // absl::nullopt when `video_structure_ == nullptr`; - absl::optional video_structure_frame_id_ + // std::nullopt when `video_structure_ == nullptr`; + std::optional video_structure_frame_id_ RTC_GUARDED_BY(packet_sequence_checker_); Timestamp last_logged_failed_to_parse_dd_ RTC_GUARDED_BY(packet_sequence_checker_) = Timestamp::MinusInfinity(); std::unique_ptr reference_finder_ RTC_GUARDED_BY(packet_sequence_checker_); - absl::optional current_codec_ + std::optional current_codec_ RTC_GUARDED_BY(packet_sequence_checker_); uint32_t last_assembled_frame_rtp_timestamp_ RTC_GUARDED_BY(packet_sequence_checker_); @@ -393,19 +430,24 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, // TODO(johan): Remove pt_codec_params_ once // https://bugs.chromium.org/p/webrtc/issues/detail?id=6883 is resolved. // Maps a payload type to a map of out-of-band supplied codec parameters. - std::map> pt_codec_params_ + std::map pt_codec_params_ + RTC_GUARDED_BY(packet_sequence_checker_); + + // Maps payload type to the VideoCodecType. + std::map pt_codec_ RTC_GUARDED_BY(packet_sequence_checker_); + int16_t last_payload_type_ RTC_GUARDED_BY(packet_sequence_checker_) = -1; bool has_received_frame_ RTC_GUARDED_BY(packet_sequence_checker_); - absl::optional last_received_rtp_timestamp_ + std::optional last_received_rtp_timestamp_ RTC_GUARDED_BY(packet_sequence_checker_); - absl::optional last_received_keyframe_rtp_timestamp_ + std::optional last_received_keyframe_rtp_timestamp_ RTC_GUARDED_BY(packet_sequence_checker_); - absl::optional last_received_rtp_system_time_ + std::optional last_received_rtp_system_time_ RTC_GUARDED_BY(packet_sequence_checker_); - absl::optional last_received_keyframe_rtp_system_time_ + std::optional last_received_keyframe_rtp_system_time_ RTC_GUARDED_BY(packet_sequence_checker_); // Handles incoming encrypted frames and forwards them to the @@ -413,7 +455,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, std::unique_ptr buffered_frame_decryptor_ RTC_PT_GUARDED_BY(packet_sequence_checker_); bool frames_decryptable_ RTC_GUARDED_BY(worker_task_checker_); - absl::optional last_color_space_; + std::optional last_color_space_; AbsoluteCaptureTimeInterpolator absolute_capture_time_interpolator_ RTC_GUARDED_BY(packet_sequence_checker_); @@ -423,16 +465,26 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, int64_t last_completed_picture_id_ = 0; - rtc::scoped_refptr + scoped_refptr frame_transformer_delegate_; SeqNumUnwrapper rtp_seq_num_unwrapper_ RTC_GUARDED_BY(packet_sequence_checker_); std::map packet_infos_ RTC_GUARDED_BY(packet_sequence_checker_); + std::vector stashed_packets_ + RTC_GUARDED_BY(packet_sequence_checker_); Timestamp next_keyframe_request_for_missing_video_structure_ = Timestamp::MinusInfinity(); + bool sps_pps_idr_is_h264_keyframe_ = false; + + struct CorruptionDetectionLayerState { + int sequence_index = 0; + std::optional timestamp; + }; + std::array + last_corruption_detection_state_by_layer_; }; } // namespace webrtc diff --git a/video/rtp_video_stream_receiver2_unittest.cc b/video/rtp_video_stream_receiver2_unittest.cc index d82f7bb9a5..068f476196 100644 --- a/video/rtp_video_stream_receiver2_unittest.cc +++ b/video/rtp_video_stream_receiver2_unittest.cc @@ -10,17 +10,45 @@ #include "video/rtp_video_stream_receiver2.h" +#include +#include +#include #include -#include - +#include +#include +#include +#include + +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/frame_transformer_interface.h" +#include "api/make_ref_counted.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/test/mock_frame_transformer.h" +#include "api/transport/rtp/corruption_detection_message.h" +#include "api/transport/rtp/dependency_descriptor.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/color_space.h" +#include "api/video/encoded_frame.h" #include "api/video/video_codec_type.h" #include "api/video/video_frame_type.h" +#include "api/video/video_timing.h" +#include "call/rtp_packet_sink_interface.h" #include "call/test/mock_rtp_packet_sink_interface.h" +#include "call/video_receive_stream.h" +#include "common_video/frame_instrumentation_data.h" #include "common_video/h264/h264_common.h" #include "media/base/media_constants.h" -#include "modules/rtp_rtcp/source/frame_object.h" -#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h" +#include "modules/include/module_common_types.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/source/corruption_detection_extension.h" +#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_format_vp9.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" @@ -28,33 +56,44 @@ #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "modules/video_coding/include/video_coding_defines.h" -#include "modules/video_coding/rtp_frame_reference_finder.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "modules/video_coding/nack_requester.h" #include "rtc_base/byte_buffer.h" -#include "rtc_base/logging.h" -#include "system_wrappers/include/clock.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "system_wrappers/include/ntp_time.h" +#include "test/explicit_key_value_config.h" #include "test/gmock.h" #include "test/gtest.h" -#include "test/mock_frame_transformer.h" #include "test/mock_transport.h" #include "test/rtcp_packet_parser.h" -#include "test/scoped_key_value_config.h" -#include "test/time_controller/simulated_task_queue.h" #include "test/time_controller/simulated_time_controller.h" +namespace webrtc { + +namespace { + +using test::ExplicitKeyValueConfig; using ::testing::_; +using ::testing::DoubleNear; using ::testing::ElementsAre; using ::testing::Eq; using ::testing::Invoke; using ::testing::SizeIs; using ::testing::Values; -namespace webrtc { - -namespace { - const uint8_t kH264StartCode[] = {0x00, 0x00, 0x00, 0x01}; +// Corruption detection metrics for testing. +constexpr double kStd = 1.0; +constexpr int kLumaThreshold = 5; +constexpr int kChormaThreshold = 3; +constexpr int kVp9PayloadType = 99; +constexpr int kNumSamples = 13; +// 8 bits. +constexpr int kMaxSequenceIdx = 127; + std::vector GetAbsoluteCaptureTimestamps(const EncodedFrame* frame) { std::vector result; for (const auto& packet_info : frame->PacketInfos()) { @@ -117,10 +156,9 @@ class MockOnCompleteFrameCallback void ClearExpectedBitstream() { buffer_.Clear(); } void AppendExpectedBitstream(const uint8_t data[], size_t size_in_bytes) { - // TODO(Johan): Let rtc::ByteBuffer handle uint8_t* instead of char*. - buffer_.WriteBytes(reinterpret_cast(data), size_in_bytes); + buffer_.Write(ArrayView(data, size_in_bytes)); } - rtc::ByteBufferWriter buffer_; + ByteBufferWriter buffer_; }; constexpr uint32_t kSsrc = 111; @@ -149,19 +187,21 @@ class RtpVideoStreamReceiver2Test : public ::testing::Test, RtpVideoStreamReceiver2Test() : RtpVideoStreamReceiver2Test("") {} explicit RtpVideoStreamReceiver2Test(std::string field_trials) : time_controller_(Timestamp::Millis(100)), + env_(CreateEnvironment( + std::make_unique(field_trials), + time_controller_.GetClock(), + time_controller_.GetTaskQueueFactory())), task_queue_(time_controller_.GetTaskQueueFactory()->CreateTaskQueue( "RtpVideoStreamReceiver2Test", TaskQueueFactory::Priority::NORMAL)), task_queue_setter_(task_queue_.get()), - field_trials_(field_trials), config_(CreateConfig()) { - rtp_receive_statistics_ = - ReceiveStatistics::Create(Clock::GetRealTimeClock()); + rtp_receive_statistics_ = ReceiveStatistics::Create(&env_.clock()); rtp_video_stream_receiver_ = std::make_unique( - TaskQueueBase::Current(), Clock::GetRealTimeClock(), &mock_transport_, - nullptr, nullptr, &config_, rtp_receive_statistics_.get(), nullptr, - nullptr, &nack_periodic_processor_, &mock_on_complete_frame_callback_, - nullptr, nullptr, field_trials_, nullptr); + env_, TaskQueueBase::Current(), &mock_transport_, nullptr, nullptr, + &config_, rtp_receive_statistics_.get(), nullptr, nullptr, + &nack_periodic_processor_, &mock_on_complete_frame_callback_, nullptr, + nullptr); rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, kVideoCodecGeneric, {}, /*raw_payload=*/false); @@ -181,27 +221,27 @@ class RtpVideoStreamReceiver2Test : public ::testing::Test, // code. void AddSps(RTPVideoHeader* video_header, uint8_t sps_id, - rtc::CopyOnWriteBuffer* data) { + CopyOnWriteBuffer* data) { NaluInfo info; info.type = H264::NaluType::kSps; info.sps_id = sps_id; info.pps_id = -1; data->AppendData({H264::NaluType::kSps, sps_id}); - auto& h264 = absl::get(video_header->video_type_header); - h264.nalus[h264.nalus_length++] = info; + auto& h264 = std::get(video_header->video_type_header); + h264.nalus.push_back(info); } void AddPps(RTPVideoHeader* video_header, uint8_t sps_id, uint8_t pps_id, - rtc::CopyOnWriteBuffer* data) { + CopyOnWriteBuffer* data) { NaluInfo info; info.type = H264::NaluType::kPps; info.sps_id = sps_id; info.pps_id = pps_id; data->AppendData({H264::NaluType::kPps, pps_id}); - auto& h264 = absl::get(video_header->video_type_header); - h264.nalus[h264.nalus_length++] = info; + auto& h264 = std::get(video_header->video_type_header); + h264.nalus.push_back(info); } void AddIdr(RTPVideoHeader* video_header, int pps_id) { @@ -209,8 +249,8 @@ class RtpVideoStreamReceiver2Test : public ::testing::Test, info.type = H264::NaluType::kIdr; info.sps_id = -1; info.pps_id = pps_id; - auto& h264 = absl::get(video_header->video_type_header); - h264.nalus[h264.nalus_length++] = info; + auto& h264 = std::get(video_header->video_type_header); + h264.nalus.push_back(info); } void OnRtpPacket(const RtpPacketReceived& packet) override { @@ -229,10 +269,10 @@ class RtpVideoStreamReceiver2Test : public ::testing::Test, } GlobalSimulatedTimeController time_controller_; + Environment env_; std::unique_ptr task_queue_; TokenTaskQueue::CurrentTaskQueueSetter task_queue_setter_; - webrtc::test::ScopedKeyValueConfig field_trials_; VideoReceiveStreamInterface::Config config_; NackPeriodicProcessor nack_periodic_processor_; test::RtcpPacketParser rtcp_packet_parser_; @@ -246,7 +286,6 @@ class RtpVideoStreamReceiver2Test : public ::testing::Test, TEST_F(RtpVideoStreamReceiver2Test, CacheColorSpaceFromLastPacketOfKeyframe) { // Test that color space is cached from the last packet of a key frame and // that it's not reset by padding packets without color space. - constexpr int kVp9PayloadType = 99; const ColorSpace kColorSpace( ColorSpace::PrimaryID::kFILM, ColorSpace::TransferID::kBT2020_12, ColorSpace::MatrixID::kBT2020_NCL, ColorSpace::RangeID::kFull); @@ -307,7 +346,7 @@ TEST_F(RtpVideoStreamReceiver2Test, CacheColorSpaceFromLastPacketOfKeyframe) { received_packet_generator.SetColorSpace(kColorSpace); // Prepare the receiver for VP9. - std::map codec_params; + webrtc::CodecParameterMap codec_params; rtp_video_stream_receiver_->AddReceiveCodec(kVp9PayloadType, kVideoCodecVP9, codec_params, /*raw_payload=*/false); @@ -357,9 +396,240 @@ TEST_F(RtpVideoStreamReceiver2Test, CacheColorSpaceFromLastPacketOfKeyframe) { rtp_video_stream_receiver_->OnRtpPacket(delta_frame_packet); } +class ReceivedPacketGenerator { + public: + ReceivedPacketGenerator() = default; + + void SetPayload(const std::vector& payload, + VideoFrameType video_frame_type) { + video_frame_type_ = video_frame_type; + RtpPacketizer::PayloadSizeLimits pay_load_size_limits; + RTPVideoHeaderVP9 rtp_video_header_vp9; + rtp_video_header_vp9.InitRTPVideoHeaderVP9(); + rtp_video_header_vp9.inter_pic_predicted = + (video_frame_type == VideoFrameType::kVideoFrameDelta); + rtp_packetizer_ = std::make_unique( + payload, pay_load_size_limits, rtp_video_header_vp9); + } + + size_t NumPackets() { return rtp_packetizer_->NumPackets(); } + + void SetCorruptionDetectionHeader(const CorruptionDetectionMessage& msg) { + corruption_detection_msg_ = msg; + } + + RtpPacketReceived NextPacket(bool include_corruption_header) { + RtpHeaderExtensionMap extension_map; + extension_map.Register(/*id=*/1); + RtpPacketToSend packet_to_send(&extension_map); + packet_to_send.SetSequenceNumber(sequence_number_++); + packet_to_send.SetSsrc(kSsrc); + packet_to_send.SetPayloadType(kVp9PayloadType); + packet_to_send.SetTimestamp(timestamp_++); + if (include_corruption_header) { + EXPECT_TRUE(packet_to_send.SetExtension( + corruption_detection_msg_)); + } + rtp_packetizer_->NextPacket(&packet_to_send); + + RtpPacketReceived received_packet(&extension_map); + received_packet.Parse(packet_to_send.data(), packet_to_send.size()); + return received_packet; + } + + private: + uint16_t sequence_number_ = 0; + uint32_t timestamp_ = 0; + VideoFrameType video_frame_type_; + CorruptionDetectionMessage corruption_detection_msg_; + std::unique_ptr rtp_packetizer_; +}; + +std::optional GetCorruptionDetectionMessage( + int sequence_idx, + bool interpret_as_MSB) { + CorruptionDetectionMessage::Builder builder; + builder.WithSequenceIndex(sequence_idx); + builder.WithInterpretSequenceIndexAsMostSignificantBits(interpret_as_MSB); + builder.WithStdDev(kStd); + builder.WithLumaErrorThreshold(kLumaThreshold); + builder.WithChromaErrorThreshold(kChormaThreshold); + + double sample_value = 0.5; + std::vector sample_values; + for (int i = 0; i < kNumSamples; i++) { + sample_values.push_back(sample_value); + sample_value += 0.5; + } + builder.WithSampleValues(sample_values); + + std::optional kCorruptionDetectionMsg = + builder.Build(); + return kCorruptionDetectionMsg; +} + +TEST_F(RtpVideoStreamReceiver2Test, + FrameInstrumentationDataGetsPopulatedLSBIncreasedCorrectly) { + const std::vector kKeyFramePayload = {0, 1, 2, 3, 4}; + const std::vector kDeltaFramePayload = {5, 6, 7, 8, 9}; + + // Prepare the receiver for VP9. + webrtc::CodecParameterMap codec_params; + rtp_video_stream_receiver_->AddReceiveCodec(kVp9PayloadType, kVideoCodecVP9, + codec_params, + /*raw_payload=*/false); + + ReceivedPacketGenerator received_packet_generator; + std::optional corruption_detection_msg = + GetCorruptionDetectionMessage( + /*sequence_idx=*/0, /*interpret_as_MSB*/ true); + ASSERT_TRUE(corruption_detection_msg.has_value()); + received_packet_generator.SetCorruptionDetectionHeader( + *corruption_detection_msg); + + // Generate key frame packets. + received_packet_generator.SetPayload(kKeyFramePayload, + VideoFrameType::kVideoFrameKey); + // Have corruption header on the key frame. + RtpPacketReceived key_frame_packet = + received_packet_generator.NextPacket(/*include_corruption_header=*/true); + // Generate delta frame packet. + received_packet_generator.SetPayload(kDeltaFramePayload, + VideoFrameType::kVideoFrameDelta); + // Don't have corruption header on the delta frame (is not a general rule). + RtpPacketReceived delta_frame_packet = + received_packet_generator.NextPacket(/*include_corruption_header=*/false); + + rtp_video_stream_receiver_->StartReceive(); + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kKeyFramePayload.data(), kKeyFramePayload.size()); + + EXPECT_TRUE(key_frame_packet.GetExtension()); + std::unique_ptr key_encoded_frame; + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)) + .WillOnce([&](EncodedFrame* encoded_frame) { + key_encoded_frame = std::make_unique(*encoded_frame); + }); + rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet); + ASSERT_TRUE(key_encoded_frame != nullptr); + std::optional< + std::variant> + data_key_frame = + key_encoded_frame->CodecSpecific()->frame_instrumentation_data; + ASSERT_TRUE(data_key_frame.has_value()); + ASSERT_TRUE( + std::holds_alternative(*data_key_frame)); + FrameInstrumentationData frame_inst_data_key_frame = + std::get(*data_key_frame); + EXPECT_EQ(frame_inst_data_key_frame.sequence_index, 0); + EXPECT_TRUE(frame_inst_data_key_frame.communicate_upper_bits); + EXPECT_THAT(frame_inst_data_key_frame.std_dev, DoubleNear(kStd, 0.1)); + EXPECT_EQ(frame_inst_data_key_frame.luma_error_threshold, kLumaThreshold); + EXPECT_EQ(frame_inst_data_key_frame.chroma_error_threshold, kChormaThreshold); + + mock_on_complete_frame_callback_.ClearExpectedBitstream(); + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kDeltaFramePayload.data(), kDeltaFramePayload.size()); + + EXPECT_FALSE(delta_frame_packet.GetExtension()); + std::unique_ptr delta_encoded_frame; + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)) + .WillOnce([&](EncodedFrame* encoded_frame) { + delta_encoded_frame = std::make_unique(*encoded_frame); + }); + rtp_video_stream_receiver_->OnRtpPacket(delta_frame_packet); + ASSERT_TRUE(delta_encoded_frame != nullptr); + // Not delta frame specific but as this test is designed, second frame + // shouldnt have corruption header. + EXPECT_FALSE(delta_encoded_frame->CodecSpecific() + ->frame_instrumentation_data.has_value()); +} + +TEST_F(RtpVideoStreamReceiver2Test, + FrameInstrumentationDataGetsPopulatedMSBIncreasedCorrectly) { + const std::vector kKeyFramePayload = {0, 1, 2, 3, 4}; + const std::vector kDeltaFramePayload = {5, 6, 7, 8, 9}; + + // Prepare the receiver for VP9. + webrtc::CodecParameterMap codec_params; + rtp_video_stream_receiver_->AddReceiveCodec(kVp9PayloadType, kVideoCodecVP9, + codec_params, + /*raw_payload=*/false); + + ReceivedPacketGenerator received_packet_generator; + std::optional corruption_detection_msg = + GetCorruptionDetectionMessage( + /*sequence_idx=*/0, /*interpret_as_MSB*/ true); + ASSERT_TRUE(corruption_detection_msg.has_value()); + received_packet_generator.SetCorruptionDetectionHeader( + *corruption_detection_msg); + + // Generate key frame packets. + received_packet_generator.SetPayload(kKeyFramePayload, + VideoFrameType::kVideoFrameKey); + // Have corruption header on the key frame. + RtpPacketReceived key_frame_packet = + received_packet_generator.NextPacket(/*include_corruption_header=*/true); + rtp_video_stream_receiver_->StartReceive(); + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kKeyFramePayload.data(), kKeyFramePayload.size()); + rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet); + + RtpPacketReceived delta_frame_packet; + int sequence_idx = 0; + for (int i = 0; i < 10; i++) { + sequence_idx += kNumSamples; + if (sequence_idx > kMaxSequenceIdx) { + sequence_idx = sequence_idx - (kMaxSequenceIdx + 1); + } + corruption_detection_msg = GetCorruptionDetectionMessage( + /*sequence_idx=*/sequence_idx, /*interpret_as_MSB*/ false); + ASSERT_TRUE(corruption_detection_msg.has_value()); + received_packet_generator.SetCorruptionDetectionHeader( + *corruption_detection_msg); + + // Generate delta frame packet. + received_packet_generator.SetPayload(kDeltaFramePayload, + VideoFrameType::kVideoFrameDelta); + // Send corruption header with each frame. + delta_frame_packet = received_packet_generator.NextPacket( + /*include_corruption_header=*/true); + + mock_on_complete_frame_callback_.ClearExpectedBitstream(); + mock_on_complete_frame_callback_.AppendExpectedBitstream( + kDeltaFramePayload.data(), kDeltaFramePayload.size()); + + EXPECT_TRUE( + delta_frame_packet.GetExtension()); + std::unique_ptr delta_encoded_frame; + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)) + .WillOnce([&](EncodedFrame* encoded_frame) { + delta_encoded_frame = std::make_unique(*encoded_frame); + }); + rtp_video_stream_receiver_->OnRtpPacket(delta_frame_packet); + ASSERT_TRUE(delta_encoded_frame != nullptr); + std::optional< + std::variant> + data = delta_encoded_frame->CodecSpecific()->frame_instrumentation_data; + ASSERT_TRUE(data.has_value()); + ASSERT_TRUE(std::holds_alternative(*data)); + FrameInstrumentationData frame_inst_data = + std::get(*data); + if (frame_inst_data.sequence_index < (kMaxSequenceIdx + 1)) { + EXPECT_EQ(frame_inst_data.sequence_index, sequence_idx); + } else { + EXPECT_EQ(frame_inst_data.sequence_index, + sequence_idx + kMaxSequenceIdx + 1); + } + } +} + +// TODO: bugs.webrtc.org/358039777 - Add tests for corruption detection when we +// have scalability. + TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrame) { RtpPacketReceived rtp_packet; - rtc::CopyOnWriteBuffer data({'1', '2', '3', '4'}); + CopyOnWriteBuffer data({'1', '2', '3', '4'}); rtp_packet.SetPayloadType(kPayloadType); rtp_packet.SetSequenceNumber(1); RTPVideoHeader video_header = @@ -368,7 +638,7 @@ TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrame) { data.size()); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); } TEST_F(RtpVideoStreamReceiver2Test, SetProtectionPayloadTypes) { @@ -389,13 +659,13 @@ TEST_F(RtpVideoStreamReceiver2Test, PacketInfoIsPropagatedIntoVideoFrames) { extension_map.Register(kId0); RtpPacketReceived rtp_packet(&extension_map); rtp_packet.SetPayloadType(kPayloadType); - rtc::CopyOnWriteBuffer data({'1', '2', '3', '4'}); + CopyOnWriteBuffer data({'1', '2', '3', '4'}); rtp_packet.SetSequenceNumber(1); rtp_packet.SetTimestamp(1); rtp_packet.SetSsrc(kSsrc); rtp_packet.SetExtension( AbsoluteCaptureTime{kAbsoluteCaptureTimestamp, - /*estimated_capture_clock_offset=*/absl::nullopt}); + /*estimated_capture_clock_offset=*/std::nullopt}); RTPVideoHeader video_header = GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); @@ -407,7 +677,7 @@ TEST_F(RtpVideoStreamReceiver2Test, PacketInfoIsPropagatedIntoVideoFrames) { ElementsAre(kAbsoluteCaptureTimestamp)); })); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); } TEST_F(RtpVideoStreamReceiver2Test, @@ -420,7 +690,7 @@ TEST_F(RtpVideoStreamReceiver2Test, RtpPacketReceived rtp_packet(&extension_map); rtp_packet.SetPayloadType(kPayloadType); - rtc::CopyOnWriteBuffer data({'1', '2', '3', '4'}); + CopyOnWriteBuffer data({'1', '2', '3', '4'}); uint16_t sequence_number = 1; uint32_t rtp_timestamp = 1; rtp_packet.SetSequenceNumber(sequence_number); @@ -428,7 +698,7 @@ TEST_F(RtpVideoStreamReceiver2Test, rtp_packet.SetSsrc(kSsrc); rtp_packet.SetExtension( AbsoluteCaptureTime{kAbsoluteCaptureTimestamp, - /*estimated_capture_clock_offset=*/absl::nullopt}); + /*estimated_capture_clock_offset=*/std::nullopt}); RTPVideoHeader video_header = GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); @@ -436,7 +706,7 @@ TEST_F(RtpVideoStreamReceiver2Test, data.size()); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); // Rtp packet without absolute capture time. rtp_packet = RtpPacketReceived(&extension_map); @@ -453,7 +723,7 @@ TEST_F(RtpVideoStreamReceiver2Test, EXPECT_THAT(GetAbsoluteCaptureTimestamps(frame), SizeIs(1)); })); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); } TEST_F(RtpVideoStreamReceiver2Test, @@ -474,7 +744,6 @@ TEST_F(RtpVideoStreamReceiver2Test, TEST_F(RtpVideoStreamReceiver2Test, DropsPacketWithRedPayloadTypeAndEmptyPayload) { - const uint8_t kRedPayloadType = 125; config_.rtp.red_payload_type = kRedPayloadType; SetUp(); // re-create rtp_video_stream_receiver with red payload type. // clang-format off @@ -489,7 +758,7 @@ TEST_F(RtpVideoStreamReceiver2Test, RtpPacketReceived packet; // Manually convert to CopyOnWriteBuffer to be sure capacity == size // and asan bot can catch read buffer overflow. - EXPECT_TRUE(packet.Parse(rtc::CopyOnWriteBuffer(data))); + EXPECT_TRUE(packet.Parse(CopyOnWriteBuffer(data))); rtp_video_stream_receiver_->StartReceive(); rtp_video_stream_receiver_->OnRtpPacket(packet); // Expect asan doesn't find anything. @@ -498,7 +767,7 @@ TEST_F(RtpVideoStreamReceiver2Test, TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrameBitstreamError) { RtpPacketReceived rtp_packet; rtp_packet.SetPayloadType(kPayloadType); - rtc::CopyOnWriteBuffer data({'1', '2', '3', '4'}); + CopyOnWriteBuffer data({'1', '2', '3', '4'}); rtp_packet.SetSequenceNumber(1); RTPVideoHeader video_header = GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); @@ -508,7 +777,7 @@ TEST_F(RtpVideoStreamReceiver2Test, GenericKeyFrameBitstreamError) { EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrameFailBitstream(_)); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); } class RtpVideoStreamReceiver2TestH264 @@ -518,17 +787,28 @@ class RtpVideoStreamReceiver2TestH264 RtpVideoStreamReceiver2TestH264() : RtpVideoStreamReceiver2Test(GetParam()) {} }; -INSTANTIATE_TEST_SUITE_P(SpsPpsIdrIsKeyframe, +INSTANTIATE_TEST_SUITE_P(SpsPpsIdrIsKeyframeAndH26xPacketBuffer, RtpVideoStreamReceiver2TestH264, - Values("", "WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/")); + Values("", + "WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/", + "WebRTC-Video-H26xPacketBuffer/Enabled/", + "WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/" + "WebRTC-Video-H26xPacketBuffer/Enabled/")); TEST_P(RtpVideoStreamReceiver2TestH264, InBandSpsPps) { - rtc::CopyOnWriteBuffer sps_data; + constexpr int kH264PayloadType = 98; + webrtc::CodecParameterMap codec_params; + rtp_video_stream_receiver_->AddReceiveCodec(kH264PayloadType, kVideoCodecH264, + codec_params, + /*raw_payload=*/false); + rtp_video_stream_receiver_->StartReceive(); + + CopyOnWriteBuffer sps_data; RtpPacketReceived rtp_packet; RTPVideoHeader sps_video_header = GetDefaultH264VideoHeader(); AddSps(&sps_video_header, 0, &sps_data); rtp_packet.SetSequenceNumber(0); - rtp_packet.SetPayloadType(kPayloadType); + rtp_packet.SetPayloadType(kH264PayloadType); sps_video_header.is_first_packet_in_frame = true; sps_video_header.frame_type = VideoFrameType::kEmptyFrame; mock_on_complete_frame_callback_.AppendExpectedBitstream( @@ -536,12 +816,13 @@ TEST_P(RtpVideoStreamReceiver2TestH264, InBandSpsPps) { mock_on_complete_frame_callback_.AppendExpectedBitstream(sps_data.data(), sps_data.size()); rtp_video_stream_receiver_->OnReceivedPayloadData(sps_data, rtp_packet, - sps_video_header); + sps_video_header, 0); - rtc::CopyOnWriteBuffer pps_data; + CopyOnWriteBuffer pps_data; RTPVideoHeader pps_video_header = GetDefaultH264VideoHeader(); AddPps(&pps_video_header, 0, 1, &pps_data); rtp_packet.SetSequenceNumber(1); + rtp_packet.SetPayloadType(kH264PayloadType); pps_video_header.is_first_packet_in_frame = true; pps_video_header.frame_type = VideoFrameType::kEmptyFrame; mock_on_complete_frame_callback_.AppendExpectedBitstream( @@ -549,12 +830,14 @@ TEST_P(RtpVideoStreamReceiver2TestH264, InBandSpsPps) { mock_on_complete_frame_callback_.AppendExpectedBitstream(pps_data.data(), pps_data.size()); rtp_video_stream_receiver_->OnReceivedPayloadData(pps_data, rtp_packet, - pps_video_header); + pps_video_header, 0); - rtc::CopyOnWriteBuffer idr_data; + CopyOnWriteBuffer idr_data; RTPVideoHeader idr_video_header = GetDefaultH264VideoHeader(); AddIdr(&idr_video_header, 1); rtp_packet.SetSequenceNumber(2); + rtp_packet.SetPayloadType(kH264PayloadType); + rtp_packet.SetMarker(true); idr_video_header.is_first_packet_in_frame = true; idr_video_header.is_last_packet_in_frame = true; idr_video_header.frame_type = VideoFrameType::kVideoFrameKey; @@ -566,19 +849,19 @@ TEST_P(RtpVideoStreamReceiver2TestH264, InBandSpsPps) { idr_data.size()); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet, - idr_video_header); + idr_video_header, 0); } TEST_P(RtpVideoStreamReceiver2TestH264, OutOfBandFmtpSpsPps) { - constexpr int kPayloadType = 99; - std::map codec_params; + constexpr int kH264PayloadType = 99; + webrtc::CodecParameterMap codec_params; // Example parameter sets from https://tools.ietf.org/html/rfc3984#section-8.2 // . - codec_params.insert( - {cricket::kH264FmtpSpropParameterSets, "Z0IACpZTBYmI,aMljiA=="}); - rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, kVideoCodecH264, + codec_params.insert({kH264FmtpSpropParameterSets, "Z0IACpZTBYmI,aMljiA=="}); + rtp_video_stream_receiver_->AddReceiveCodec(kH264PayloadType, kVideoCodecH264, codec_params, /*raw_payload=*/false); + rtp_video_stream_receiver_->StartReceive(); const uint8_t binary_sps[] = {0x67, 0x42, 0x00, 0x0a, 0x96, 0x53, 0x05, 0x89, 0x88}; mock_on_complete_frame_callback_.AppendExpectedBitstream( @@ -594,33 +877,40 @@ TEST_P(RtpVideoStreamReceiver2TestH264, OutOfBandFmtpSpsPps) { RtpPacketReceived rtp_packet; RTPVideoHeader video_header = GetDefaultH264VideoHeader(); AddIdr(&video_header, 0); - rtp_packet.SetPayloadType(kPayloadType); + rtp_packet.SetPayloadType(kH264PayloadType); rtp_packet.SetSequenceNumber(2); + rtp_packet.SetMarker(true); video_header.is_first_packet_in_frame = true; video_header.is_last_packet_in_frame = true; video_header.codec = kVideoCodecH264; video_header.frame_type = VideoFrameType::kVideoFrameKey; - rtc::CopyOnWriteBuffer data({'1', '2', '3'}); + CopyOnWriteBuffer data({'1', '2', '3'}); mock_on_complete_frame_callback_.AppendExpectedBitstream( kH264StartCode, sizeof(kH264StartCode)); mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), data.size()); - EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); + // IDR frames without SPS/PPS are not returned by + // |H26xPacketBuffer.InsertPacket| until SPS and PPS are received when + // WebRTC-SpsPpsIdrIsH264Keyframe is enabled. + if (!env_.field_trials().IsEnabled("WebRTC-SpsPpsIdrIsH264Keyframe") || + !env_.field_trials().IsEnabled("WebRTC-Video-H26xPacketBuffer")) { + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); + } rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); } TEST_P(RtpVideoStreamReceiver2TestH264, ForceSpsPpsIdrIsKeyframe) { - constexpr int kPayloadType = 99; - std::map codec_params; - if (GetParam() == - "") { // Forcing can be done either with field trial or codec_params. - codec_params.insert({cricket::kH264FmtpSpsPpsIdrInKeyframe, ""}); + webrtc::CodecParameterMap codec_params; + // Forcing can be done either with field trial or codec_params. + if (!env_.field_trials().IsEnabled("WebRTC-SpsPpsIdrIsH264Keyframe")) { + codec_params.insert({kH264FmtpSpsPpsIdrInKeyframe, ""}); } rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, kVideoCodecH264, codec_params, /*raw_payload=*/false); - rtc::CopyOnWriteBuffer sps_data; + rtp_video_stream_receiver_->StartReceive(); + CopyOnWriteBuffer sps_data; RtpPacketReceived rtp_packet; RTPVideoHeader sps_video_header = GetDefaultH264VideoHeader(); AddSps(&sps_video_header, 0, &sps_data); @@ -633,9 +923,9 @@ TEST_P(RtpVideoStreamReceiver2TestH264, ForceSpsPpsIdrIsKeyframe) { mock_on_complete_frame_callback_.AppendExpectedBitstream(sps_data.data(), sps_data.size()); rtp_video_stream_receiver_->OnReceivedPayloadData(sps_data, rtp_packet, - sps_video_header); + sps_video_header, 0); - rtc::CopyOnWriteBuffer pps_data; + CopyOnWriteBuffer pps_data; RTPVideoHeader pps_video_header = GetDefaultH264VideoHeader(); AddPps(&pps_video_header, 0, 1, &pps_data); rtp_packet.SetSequenceNumber(1); @@ -646,12 +936,13 @@ TEST_P(RtpVideoStreamReceiver2TestH264, ForceSpsPpsIdrIsKeyframe) { mock_on_complete_frame_callback_.AppendExpectedBitstream(pps_data.data(), pps_data.size()); rtp_video_stream_receiver_->OnReceivedPayloadData(pps_data, rtp_packet, - pps_video_header); + pps_video_header, 0); - rtc::CopyOnWriteBuffer idr_data; + CopyOnWriteBuffer idr_data; RTPVideoHeader idr_video_header = GetDefaultH264VideoHeader(); AddIdr(&idr_video_header, 1); rtp_packet.SetSequenceNumber(2); + rtp_packet.SetMarker(true); idr_video_header.is_first_packet_in_frame = true; idr_video_header.is_last_packet_in_frame = true; idr_video_header.frame_type = VideoFrameType::kVideoFrameKey; @@ -665,24 +956,43 @@ TEST_P(RtpVideoStreamReceiver2TestH264, ForceSpsPpsIdrIsKeyframe) { .WillOnce( [&](EncodedFrame* frame) { EXPECT_TRUE(frame->is_keyframe()); }); rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet, - idr_video_header); + idr_video_header, 0); mock_on_complete_frame_callback_.ClearExpectedBitstream(); mock_on_complete_frame_callback_.AppendExpectedBitstream( kH264StartCode, sizeof(kH264StartCode)); mock_on_complete_frame_callback_.AppendExpectedBitstream(idr_data.data(), idr_data.size()); rtp_packet.SetSequenceNumber(3); - EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) - .WillOnce( - [&](EncodedFrame* frame) { EXPECT_FALSE(frame->is_keyframe()); }); + // IDR frames without SPS/PPS are not returned by + // |H26xPacketBuffer.InsertPacket| until SPS and PPS are received, while + // |PacketBuffer| returns it as a delta frame. + if (env_.field_trials().IsEnabled("WebRTC-Video-H26xPacketBuffer")) { + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame).Times(0); + } else { + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_FALSE(frame->is_keyframe()); }); + } rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet, - idr_video_header); + idr_video_header, 0); } -TEST_F(RtpVideoStreamReceiver2Test, PaddingInMediaStream) { +class RtpVideoStreamReceiver2TestPadding + : public RtpVideoStreamReceiver2Test, + public ::testing::WithParamInterface { + protected: + RtpVideoStreamReceiver2TestPadding() + : RtpVideoStreamReceiver2Test(GetParam()) {} +}; + +INSTANTIATE_TEST_SUITE_P(PaddingInMediaStreamAndH26xPacketBuffer, + RtpVideoStreamReceiver2TestPadding, + Values("", "WebRTC-Video-H26xPacketBuffer/Enabled/")); + +TEST_P(RtpVideoStreamReceiver2TestPadding, PaddingInMediaStream) { RtpPacketReceived rtp_packet; RTPVideoHeader video_header = GetDefaultH264VideoHeader(); - rtc::CopyOnWriteBuffer data({'1', '2', '3'}); + CopyOnWriteBuffer data({'1', '2', '3'}); rtp_packet.SetPayloadType(kPayloadType); rtp_packet.SetSequenceNumber(2); video_header.is_first_packet_in_frame = true; @@ -694,38 +1004,96 @@ TEST_F(RtpVideoStreamReceiver2Test, PaddingInMediaStream) { EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); rtp_packet.SetSequenceNumber(3); rtp_video_stream_receiver_->OnReceivedPayloadData({}, rtp_packet, - video_header); + video_header, 0); rtp_packet.SetSequenceNumber(4); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); video_header.frame_type = VideoFrameType::kVideoFrameDelta; rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); rtp_packet.SetSequenceNumber(6); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); rtp_packet.SetSequenceNumber(5); rtp_video_stream_receiver_->OnReceivedPayloadData({}, rtp_packet, - video_header); + video_header, 0); +} + +TEST_P(RtpVideoStreamReceiver2TestPadding, EmptyPaddingInMediaStream) { + constexpr int kH264PayloadType = 98; + RtpPacketReceived rtp_packet_idr, rtp_packet_padding, rtp_packet_slice; + // Example Stap-A packet with SPS, PPS, and IDR. + std::vector raw_rtp_with_sps_pps_idr{ + 0x80, 0xe2, 0x13, 0xba, 0x87, 0xa0, 0x0a, 0x8a, 0x00, 0x00, 0x6f, + 0x00, 0x78, 0x00, 0x19, 0x67, 0x42, 0x40, 0x29, 0x95, 0xb8, 0x78, + 0x2f, 0xf9, 0x70, 0x11, 0x00, 0x00, 0x03, 0x00, 0x01, 0x00, 0x00, + 0x03, 0x00, 0x78, 0x8d, 0xa1, 0xc3, 0x2e, 0x00, 0x04, 0x68, 0xce, + 0x3c, 0x80, 0x00, 0x07, 0x05, 0x88, 0x80, 0x03, 0x53, 0xff, 0xff}; + // Example Empty padding packet next Idr. + std::vector raw_rtp_empty_padding{ + 0x80, 0x62, 0x13, 0xbb, 0x87, 0xa0, 0x21, 0x0a, 0x00, 0x00, 0x6f, 0x00}; + // Example Single NALU packet with slice. + std::vector raw_rtp_slice( + {0x80, 0xE2, 0x13, 0xbc, 0x87, 0xa0, 0x21, 0x0a, 0x00, 0x00, 0x6f, + 0x00, 0x01, 0x9a, 0x02, 0x3f, 0xc1, 0x48, 0x9a, 0xeb, 0xea, 0xff}); + + // Example EncodedFrame with SPS, PPS, and IDR. + std::vector expect_frame_with_sps_pps_idr{ + 0x00, 0x00, 0x00, 0x01, 0x67, 0x42, 0x40, 0x29, 0x95, 0xb8, 0x78, 0x2f, + 0xf9, 0x70, 0x11, 0x00, 0x00, 0x03, 0x00, 0x01, 0x00, 0x00, 0x03, 0x00, + 0x78, 0x8d, 0xa1, 0xc3, 0x2e, 0x00, 0x00, 0x00, 0x01, 0x68, 0xce, 0x3c, + 0x80, 0x00, 0x00, 0x00, 0x01, 0x05, 0x88, 0x80, 0x03, 0x53, 0xff, 0xff}; + // Example EncodedFrame with slice. + std::vector expect_frame_with_slice{0x00, 0x00, 0x00, 0x01, 0x01, + 0x9a, 0x02, 0x3f, 0xc1, 0x48, + 0x9a, 0xeb, 0xea, 0xff}; + rtp_packet_idr.Parse(raw_rtp_with_sps_pps_idr.data(), + raw_rtp_with_sps_pps_idr.size()); + rtp_packet_padding.Parse(raw_rtp_empty_padding.data(), + raw_rtp_empty_padding.size()); + rtp_packet_slice.Parse(raw_rtp_slice.data(), raw_rtp_slice.size()); + + // Prepare the receiver for H264. + webrtc::CodecParameterMap codec_params; + rtp_video_stream_receiver_->AddReceiveCodec(kH264PayloadType, kVideoCodecH264, + codec_params, false); + rtp_video_stream_receiver_->StartReceive(); + + // Expect IDR frame. + mock_on_complete_frame_callback_.AppendExpectedBitstream( + expect_frame_with_sps_pps_idr.data(), + expect_frame_with_sps_pps_idr.size()); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); + + rtp_video_stream_receiver_->OnRtpPacket(rtp_packet_idr); + + rtp_video_stream_receiver_->OnRtpPacket(rtp_packet_padding); + + // Expect single NALU frame. + mock_on_complete_frame_callback_.ClearExpectedBitstream(); + mock_on_complete_frame_callback_.AppendExpectedBitstream( + expect_frame_with_slice.data(), expect_frame_with_slice.size()); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); + rtp_video_stream_receiver_->OnRtpPacket(rtp_packet_slice); } TEST_F(RtpVideoStreamReceiver2Test, RequestKeyframeIfFirstFrameIsDelta) { RtpPacketReceived rtp_packet; rtp_packet.SetPayloadType(kPayloadType); - rtc::CopyOnWriteBuffer data({'1', '2', '3', '4'}); + CopyOnWriteBuffer data({'1', '2', '3', '4'}); rtp_packet.SetSequenceNumber(1); RTPVideoHeader video_header = GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta); rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); EXPECT_THAT(rtcp_packet_parser_.pli()->num_packets(), Eq(1)); } @@ -734,7 +1102,7 @@ TEST_F(RtpVideoStreamReceiver2Test, RequestKeyframeWhenPacketBufferGetsFull) { RtpPacketReceived rtp_packet; rtp_packet.SetPayloadType(kPayloadType); - rtc::CopyOnWriteBuffer data({'1', '2', '3', '4'}); + CopyOnWriteBuffer data({'1', '2', '3', '4'}); RTPVideoHeader video_header = GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta); // Incomplete frames so that the packet buffer is filling up. @@ -744,12 +1112,12 @@ TEST_F(RtpVideoStreamReceiver2Test, RequestKeyframeWhenPacketBufferGetsFull) { while (rtp_packet.SequenceNumber() - start_sequence_number < kPacketBufferMaxSize) { rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); rtp_packet.SetSequenceNumber(rtp_packet.SequenceNumber() + 2); } rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet, - video_header); + video_header, 0); EXPECT_THAT(rtcp_packet_parser_.pli()->num_packets(), Eq(1)); } @@ -920,10 +1288,10 @@ TEST_F(RtpVideoStreamReceiver2Test, ParseGenericDescriptorRawPayload) { TEST_F(RtpVideoStreamReceiver2Test, UnwrapsFrameId) { const std::vector data = {0, 1, 2, 3, 4}; - const int kPayloadType = 123; + const int kPayloadTypeGeneric = 123; - rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, kVideoCodecGeneric, - {}, + rtp_video_stream_receiver_->AddReceiveCodec(kPayloadTypeGeneric, + kVideoCodecGeneric, {}, /*raw_payload=*/true); rtp_video_stream_receiver_->StartReceive(); RtpHeaderExtensionMap extension_map; @@ -947,7 +1315,7 @@ TEST_F(RtpVideoStreamReceiver2Test, UnwrapsFrameId) { mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), data.size()); rtp_packet.SetMarker(true); - rtp_packet.SetPayloadType(kPayloadType); + rtp_packet.SetPayloadType(kPayloadTypeGeneric); rtp_packet.SetSequenceNumber(++rtp_sequence_number); rtp_video_stream_receiver_->OnRtpPacket(rtp_packet); }; @@ -1031,7 +1399,7 @@ TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest, UnwrapsFrameId) { deltaframe1_descriptor.frame_number = 0xfffe; DependencyDescriptor deltaframe2_descriptor; - deltaframe1_descriptor.frame_dependencies = stream_structure.templates[1]; + deltaframe2_descriptor.frame_dependencies = stream_structure.templates[1]; deltaframe2_descriptor.frame_number = 0x0002; // Parser should unwrap frame ids correctly even if packets were reordered by @@ -1144,29 +1512,179 @@ TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest, EXPECT_THAT(rtcp_packet_parser_.pli()->num_packets(), Eq(2)); } +TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest, + RetryStashedPacketsAfterReceivingScalabilityStructure) { + FrameDependencyStructure stream_structure1 = CreateStreamStructure(); + FrameDependencyStructure stream_structure2 = CreateStreamStructure(); + // Make sure template ids for these two structures do not collide: + // adjust structure_id (that is also used as template id offset). + stream_structure1.structure_id = 13; + stream_structure2.structure_id = + stream_structure1.structure_id + stream_structure1.templates.size(); + + DependencyDescriptor keyframe1_descriptor; + keyframe1_descriptor.attached_structure = + std::make_unique(stream_structure1); + keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0]; + keyframe1_descriptor.frame_number = 1; + + DependencyDescriptor keyframe2_descriptor; + keyframe2_descriptor.attached_structure = + std::make_unique(stream_structure2); + keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0]; + keyframe2_descriptor.frame_number = 2; + + DependencyDescriptor deltaframe_descriptor; + deltaframe_descriptor.frame_dependencies = stream_structure2.templates[1]; + deltaframe_descriptor.frame_number = 3; + + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 1); }) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 2); }) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 3); }); + + InjectPacketWith(stream_structure1, keyframe1_descriptor); + InjectPacketWith(stream_structure2, deltaframe_descriptor); + InjectPacketWith(stream_structure2, keyframe2_descriptor); +} + +TEST_F(RtpVideoStreamReceiver2DependencyDescriptorTest, + RetryStashedPacketsAfterReceivingEarlierScalabilityStructure) { + FrameDependencyStructure stream_structure1 = CreateStreamStructure(); + FrameDependencyStructure stream_structure2 = CreateStreamStructure(); + FrameDependencyStructure stream_structure3 = CreateStreamStructure(); + // Make sure template ids for these two structures do not collide: + // adjust structure_id (that is also used as template id offset). + stream_structure1.structure_id = 13; + stream_structure2.structure_id = + stream_structure1.structure_id + stream_structure1.templates.size(); + stream_structure3.structure_id = + stream_structure2.structure_id + stream_structure2.templates.size(); + + DependencyDescriptor keyframe1_descriptor; + keyframe1_descriptor.attached_structure = + std::make_unique(stream_structure1); + keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0]; + keyframe1_descriptor.frame_number = 1; + + DependencyDescriptor keyframe2_descriptor; + keyframe2_descriptor.attached_structure = + std::make_unique(stream_structure2); + keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0]; + keyframe2_descriptor.frame_number = 2; + + DependencyDescriptor deltaframe2_descriptor; + deltaframe2_descriptor.frame_dependencies = stream_structure2.templates[1]; + deltaframe2_descriptor.frame_number = 3; + + DependencyDescriptor keyframe3_descriptor; + keyframe3_descriptor.attached_structure = + std::make_unique(stream_structure3); + keyframe3_descriptor.frame_dependencies = stream_structure3.templates[0]; + keyframe3_descriptor.frame_number = 4; + + DependencyDescriptor deltaframe3_descriptor; + deltaframe3_descriptor.frame_dependencies = stream_structure3.templates[1]; + deltaframe3_descriptor.frame_number = 5; + + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 1); }) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 2); }) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 3); }) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 4); }) + .WillOnce( + [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 5); }); + + InjectPacketWith(stream_structure1, keyframe1_descriptor); + InjectPacketWith(stream_structure2, deltaframe2_descriptor); + InjectPacketWith(stream_structure3, deltaframe3_descriptor); + InjectPacketWith(stream_structure2, keyframe2_descriptor); + InjectPacketWith(stream_structure3, keyframe3_descriptor); +} + TEST_F(RtpVideoStreamReceiver2Test, TransformFrame) { - rtc::scoped_refptr mock_frame_transformer = - rtc::make_ref_counted>(); + scoped_refptr mock_frame_transformer = + make_ref_counted>(); EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback(_, config_.rtp.remote_ssrc)); auto receiver = std::make_unique( - TaskQueueBase::Current(), Clock::GetRealTimeClock(), &mock_transport_, - nullptr, nullptr, &config_, rtp_receive_statistics_.get(), nullptr, - nullptr, &nack_periodic_processor_, &mock_on_complete_frame_callback_, - nullptr, mock_frame_transformer, field_trials_, nullptr); + env_, TaskQueueBase::Current(), &mock_transport_, nullptr, nullptr, + &config_, rtp_receive_statistics_.get(), nullptr, nullptr, + &nack_periodic_processor_, &mock_on_complete_frame_callback_, nullptr, + mock_frame_transformer); receiver->AddReceiveCodec(kPayloadType, kVideoCodecGeneric, {}, /*raw_payload=*/false); RtpPacketReceived rtp_packet; rtp_packet.SetPayloadType(kPayloadType); - rtc::CopyOnWriteBuffer data({'1', '2', '3', '4'}); + CopyOnWriteBuffer data({'1', '2', '3', '4'}); + rtp_packet.SetSequenceNumber(1); + RTPVideoHeader video_header = + GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); + mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), + data.size()); + std::unique_ptr transformed_frame; + EXPECT_CALL(*mock_frame_transformer, Transform(_)) + .WillOnce(testing::SaveArgByMove<0>(&transformed_frame)); + receiver->OnReceivedPayloadData(data, rtp_packet, video_header, 0); + EXPECT_TRUE(transformed_frame->ReceiveTime().has_value()); + EXPECT_FALSE(transformed_frame->CaptureTime()); + EXPECT_FALSE(transformed_frame->SenderCaptureTimeOffset()); + + EXPECT_CALL(*mock_frame_transformer, + UnregisterTransformedFrameSinkCallback(config_.rtp.remote_ssrc)); + receiver = nullptr; +} + +TEST_F(RtpVideoStreamReceiver2Test, TransformFrameWithAbsoluteCaptureTime) { + scoped_refptr mock_frame_transformer = + make_ref_counted>(); + EXPECT_CALL(*mock_frame_transformer, + RegisterTransformedFrameSinkCallback(_, config_.rtp.remote_ssrc)); + auto receiver = std::make_unique( + env_, TaskQueueBase::Current(), &mock_transport_, nullptr, nullptr, + &config_, rtp_receive_statistics_.get(), nullptr, nullptr, + &nack_periodic_processor_, &mock_on_complete_frame_callback_, nullptr, + mock_frame_transformer); + receiver->AddReceiveCodec(kPayloadType, kVideoCodecGeneric, {}, + /*raw_payload=*/false); + + constexpr int kId0 = 1; + RtpHeaderExtensionMap extension_map; + extension_map.Register(kId0); + RtpPacketReceived rtp_packet(&extension_map); + rtp_packet.SetPayloadType(kPayloadType); + CopyOnWriteBuffer data({'1', '2', '3', '4'}); rtp_packet.SetSequenceNumber(1); + + Timestamp capture_time = Timestamp::Millis(1234); + TimeDelta sender_capture_time_offset = TimeDelta::Millis(56); + AbsoluteCaptureTime absolute_capture_time = { + .absolute_capture_timestamp = Int64MsToUQ32x32(capture_time.ms()), + .estimated_capture_clock_offset = + Int64MsToUQ32x32(sender_capture_time_offset.ms())}; + rtp_packet.SetExtension(absolute_capture_time); + RTPVideoHeader video_header = GetGenericVideoHeader(VideoFrameType::kVideoFrameKey); mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(), data.size()); - EXPECT_CALL(*mock_frame_transformer, Transform(_)); - receiver->OnReceivedPayloadData(data, rtp_packet, video_header); + + std::unique_ptr transformed_frame; + EXPECT_CALL(*mock_frame_transformer, Transform(_)) + .WillOnce(testing::SaveArgByMove<0>(&transformed_frame)); + receiver->OnReceivedPayloadData(data, rtp_packet, video_header, 0); + EXPECT_TRUE(transformed_frame->ReceiveTime().has_value()); + EXPECT_EQ(transformed_frame->CaptureTime(), capture_time); + EXPECT_EQ(transformed_frame->SenderCaptureTimeOffset(), + sender_capture_time_offset); EXPECT_CALL(*mock_frame_transformer, UnregisterTransformedFrameSinkCallback(config_.rtp.remote_ssrc)); @@ -1205,7 +1723,7 @@ INSTANTIATE_TEST_SUITE_P(PlayoutDelay, Values(DefaultBehavior(), OverridePlayoutDelay())); TEST_P(RtpVideoStreamReceiver2TestPlayoutDelay, PlayoutDelay) { - rtc::CopyOnWriteBuffer payload_data({'1', '2', '3', '4'}); + CopyOnWriteBuffer payload_data({'1', '2', '3', '4'}); RtpHeaderExtensionMap extension_map; extension_map.Register(1); RtpPacketToSend packet_to_send(&extension_map); @@ -1233,7 +1751,75 @@ TEST_P(RtpVideoStreamReceiver2TestPlayoutDelay, PlayoutDelay) { EXPECT_EQ(frame->EncodedImage().PlayoutDelay(), expected_playout_delay); })); rtp_video_stream_receiver_->OnReceivedPayloadData( - received_packet.PayloadBuffer(), received_packet, video_header); + received_packet.PayloadBuffer(), received_packet, video_header, 0); +} + +#ifdef RTC_ENABLE_H265 +RTPVideoHeader GetDefaultH265VideoHeader() { + RTPVideoHeader video_header; + video_header.codec = kVideoCodecH265; + return video_header; +} + +class RtpVideoStreamReceiver2TestH265 : public RtpVideoStreamReceiver2Test { + protected: + RtpVideoStreamReceiver2TestH265() : RtpVideoStreamReceiver2Test("") {} +}; + +TEST_F(RtpVideoStreamReceiver2TestH265, H265Bitstream) { + constexpr int kH265PayloadType = 98; + webrtc::CodecParameterMap codec_params; + rtp_video_stream_receiver_->AddReceiveCodec(kH265PayloadType, kVideoCodecH265, + codec_params, + /*raw_payload=*/false); + rtp_video_stream_receiver_->StartReceive(); + + // Data is generated by WebCodecs H265 encoder, with 720p fake media capturer. + // IDR is not complete. + constexpr uint8_t vps[] = {0x00, 0x00, 0x00, 0x01, 0x40, 0x01, 0x0c, 0x01, + 0xff, 0xff, 0x21, 0x40, 0x00, 0x00, 0x03, 0x00, + 0x90, 0x00, 0x00, 0x03, 0x00, 0x00, 0x03, 0x00, + 0x7b, 0x3c, 0x0c, 0x00, 0x00, 0x03, 0x00, 0x04, + 0x00, 0x00, 0x03, 0x00, 0x79, 0x40}; + constexpr uint8_t sps[] = { + 0x00, 0x00, 0x00, 0x01, 0x42, 0x01, 0x01, 0x21, 0x40, 0x00, 0x00, 0x03, + 0x00, 0x90, 0x00, 0x00, 0x03, 0x00, 0x00, 0x03, 0x00, 0x7b, 0xa0, 0x02, + 0x80, 0x80, 0x2d, 0x16, 0x8f, 0x92, 0x46, 0xd9, 0x3f, 0xf6, 0x02, 0x80, + 0x10, 0x00, 0x00, 0x03, 0x00, 0x10, 0x00, 0x00, 0x03, 0x01, 0xe6, 0x45, + 0xde, 0xf7, 0xe0, 0x04, 0x65, 0x00, 0x23, 0x28, 0x80}; + constexpr uint8_t pps[] = {0x00, 0x00, 0x00, 0x01, 0x44, 0x01, + 0xc0, 0x25, 0x64, 0xc0, 0xed, 0x90}; + constexpr uint8_t idr[] = {0x00, 0x00, 0x00, 0x01, 0x26, 0x01, 0xaf, + 0xb0, 0x87, 0x11, 0x7a, 0xc1, 0x45, 0x57, + 0x3f, 0xff, 0x57, 0x14, 0x5f, 0xf7, 0x7a, + 0x37, 0xfd, 0xe3, 0xd9}; + + RtpPacketReceived rtp_packet; + rtp_packet.SetPayloadType(kPayloadType); + rtp_packet.SetSequenceNumber(0); + rtp_packet.SetPayloadType(kH265PayloadType); + RTPVideoHeader video_header = GetDefaultH265VideoHeader(); + mock_on_complete_frame_callback_.AppendExpectedBitstream(vps, sizeof(vps)); + rtp_video_stream_receiver_->OnReceivedPayloadData( + CopyOnWriteBuffer(vps, sizeof(vps)), rtp_packet, video_header, 0); + + rtp_packet.SetSequenceNumber(1); + mock_on_complete_frame_callback_.AppendExpectedBitstream(sps, sizeof(sps)); + rtp_video_stream_receiver_->OnReceivedPayloadData( + CopyOnWriteBuffer(sps, sizeof(sps)), rtp_packet, video_header, 0); + + rtp_packet.SetSequenceNumber(2); + mock_on_complete_frame_callback_.AppendExpectedBitstream(pps, sizeof(pps)); + rtp_video_stream_receiver_->OnReceivedPayloadData( + CopyOnWriteBuffer(pps, sizeof(pps)), rtp_packet, video_header, 0); + + rtp_packet.SetSequenceNumber(3); + rtp_packet.SetMarker(true); + mock_on_complete_frame_callback_.AppendExpectedBitstream(idr, sizeof(idr)); + EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_)); + rtp_video_stream_receiver_->OnReceivedPayloadData( + CopyOnWriteBuffer(idr, sizeof(idr)), rtp_packet, video_header, 0); } +#endif // RTC_ENABLE_H265 } // namespace webrtc diff --git a/video/screenshare_loopback.cc b/video/screenshare_loopback.cc index 239e472f6e..da2b2a3276 100644 --- a/video/screenshare_loopback.cc +++ b/video/screenshare_loopback.cc @@ -11,15 +11,16 @@ #include #include +#include #include #include #include "absl/flags/flag.h" #include "absl/flags/parse.h" -#include "absl/types/optional.h" #include "api/test/simulated_network.h" #include "api/test/video_quality_test_fixture.h" #include "api/transport/bitrate_settings.h" +#include "api/units/data_rate.h" #include "api/video_codecs/video_codec.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -28,6 +29,7 @@ #include "test/field_trial.h" #include "test/gtest.h" #include "test/run_test.h" +#include "test/test_flags.h" #include "video/video_quality_test.h" using ::webrtc::BitrateConstraints; @@ -139,8 +141,11 @@ ABSL_FLAG(int, link_capacity, 0, "Capacity (kbps) of the fake link. 0 means infinite."); -int LinkCapacityKbps() { - return absl::GetFlag(FLAGS_link_capacity); +webrtc::DataRate LinkCapacity() { + int link_capacity_kbps = absl::GetFlag(FLAGS_link_capacity); + return link_capacity_kbps == 0 + ? webrtc::DataRate::Infinity() + : webrtc::DataRate::KilobitsPerSec(link_capacity_kbps); } ABSL_FLAG(int, queue_size, 0, "Size of the bottleneck link queue in packets."); @@ -256,14 +261,13 @@ ABSL_FLAG(bool, generic_descriptor, false, "Use the generic frame descriptor."); ABSL_FLAG(bool, allow_reordering, false, "Allow packet reordering to occur"); -ABSL_FLAG( - std::string, - force_fieldtrials, - "", - "Field trials control experimental feature code which can be forced. " - "E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/" - " will assign the group Enable to field trial WebRTC-FooFeature. Multiple " - "trials are separated by \"/\""); +ABSL_FLAG(std::string, + clip, + "", + "Name of the clip to show. If empty, use frame generator."); +std::string Clip() { + return absl::GetFlag(FLAGS_clip); +} // Screenshare-specific flags. ABSL_FLAG(int, @@ -306,14 +310,14 @@ ABSL_FLAG(std::string, std::vector Slides() { std::vector slides; std::string slides_list = absl::GetFlag(FLAGS_slides); - rtc::tokenize(slides_list, ',', &slides); + webrtc::tokenize(slides_list, ',', &slides); return slides; } void Loopback() { BuiltInNetworkBehaviorConfig pipe_config; pipe_config.loss_percent = LossPercent(); - pipe_config.link_capacity_kbps = LinkCapacityKbps(); + pipe_config.link_capacity = LinkCapacity(); pipe_config.queue_length_packets = QueueSize(); pipe_config.queue_delay_ms = AvgPropagationDelayMs(); pipe_config.delay_standard_deviation_ms = StdPropagationDelayMs(); @@ -339,11 +343,14 @@ void Loopback() { params.video[0].num_temporal_layers = NumTemporalLayers(); params.video[0].selected_tl = SelectedTL(); params.video[0].min_transmit_bps = MinTransmitBitrateKbps() * 1000; + params.video[0].clip_path = Clip(); params.screenshare[0].enabled = true; params.screenshare[0].generate_slides = GenerateSlides(); params.screenshare[0].slide_change_interval = SlideChangeInterval(); params.screenshare[0].scroll_duration = ScrollDuration(); params.screenshare[0].slides = Slides(); + params.analyzer.test_label = "screenshare"; + params.analyzer.test_durations_secs = DurationSecs(); params.config = pipe_config; params.logging.rtc_event_log_name = RtcEventLogName(); params.logging.rtp_dump_name = RtpDumpName(); @@ -359,15 +366,16 @@ void Loopback() { std::vector SL_descriptors; SL_descriptors.push_back(SL0()); SL_descriptors.push_back(SL1()); - VideoQualityTest::FillScalabilitySettings( + + VideoQualityTest fixture(nullptr); + fixture.FillScalabilitySettings( ¶ms, 0, stream_descriptors, NumStreams(), SelectedStream(), NumSpatialLayers(), SelectedSL(), InterLayerPred(), SL_descriptors); - auto fixture = std::make_unique(nullptr); if (DurationSecs()) { - fixture->RunWithAnalyzer(params); + fixture.RunWithAnalyzer(params); } else { - fixture->RunWithRenderers(params); + fixture.RunWithRenderers(params); } } @@ -375,7 +383,7 @@ int main(int argc, char* argv[]) { ::testing::InitGoogleTest(&argc, argv); absl::ParseCommandLine(argc, argv); - rtc::LogMessage::SetLogToStderr(absl::GetFlag(FLAGS_logs)); + webrtc::LogMessage::SetLogToStderr(absl::GetFlag(FLAGS_logs)); // InitFieldTrialsFromString stores the char*, so the char array must outlive // the application. diff --git a/video/send_delay_stats.h b/video/send_delay_stats.h index f5781bba02..ac224279ba 100644 --- a/video/send_delay_stats.h +++ b/video/send_delay_stats.h @@ -32,10 +32,10 @@ namespace webrtc { // TODO(bugs.webrtc.org/11993): OnSendPacket and OnSentPacket will eventually // be called consistently on the same thread. Once we're there, we should be // able to avoid locking (at least for the fast path). -class SendDelayStats : public SendPacketObserver { +class SendDelayStats { public: explicit SendDelayStats(Clock* clock); - ~SendDelayStats() override; + ~SendDelayStats(); // Adds the configured ssrcs for the rtp streams. // Stats will be calculated for these streams. @@ -44,12 +44,8 @@ class SendDelayStats : public SendPacketObserver { // Called when a packet is sent (leaving socket). bool OnSentPacket(int packet_id, Timestamp time); - protected: - // From SendPacketObserver. // Called when a packet is sent to the transport. - void OnSendPacket(uint16_t packet_id, - Timestamp capture_time, - uint32_t ssrc) override; + void OnSendPacket(uint16_t packet_id, Timestamp capture_time, uint32_t ssrc); private: // Map holding sent packets (mapped by sequence number). diff --git a/video/send_delay_stats_unittest.cc b/video/send_delay_stats_unittest.cc index 288bc5fd8d..e23467e71c 100644 --- a/video/send_delay_stats_unittest.cc +++ b/video/send_delay_stats_unittest.cc @@ -11,6 +11,7 @@ #include "video/send_delay_stats.h" #include +#include #include #include "call/rtp_config.h" @@ -54,8 +55,7 @@ class SendDelayStatsTest : public ::testing::Test { } void OnSendPacket(uint16_t id, uint32_t ssrc, Timestamp capture) { - SendPacketObserver* observer = stats_.get(); - observer->OnSendPacket(id, capture, ssrc); + stats_->OnSendPacket(id, capture, ssrc); } bool OnSentPacket(uint16_t id) { diff --git a/video/send_statistics_proxy.cc b/video/send_statistics_proxy.cc index b857c0535b..57016db103 100644 --- a/video/send_statistics_proxy.cc +++ b/video/send_statistics_proxy.cc @@ -47,6 +47,7 @@ enum HistogramCodecType { kVideoVp9 = 2, kVideoH264 = 3, kVideoAv1 = 4, + kVideoH265 = 5, kVideoMax = 64, }; @@ -76,6 +77,8 @@ HistogramCodecType PayloadNameToHistogramCodecType( return kVideoH264; case kVideoCodecAV1: return kVideoAv1; + case kVideoCodecH265: + return kVideoH265; default: return kVideoUnknown; } @@ -94,43 +97,41 @@ bool IsForcedFallbackPossible(const CodecSpecificInfo* codec_info, codec_info->codecSpecific.VP8.temporalIdx == kNoTemporalIdx); } -absl::optional GetFallbackMaxPixels(const std::string& group) { +std::optional GetFallbackMaxPixels(const std::string& group) { if (group.empty()) - return absl::nullopt; + return std::nullopt; int min_pixels; int max_pixels; int min_bps; if (sscanf(group.c_str(), "-%d,%d,%d", &min_pixels, &max_pixels, &min_bps) != 3) { - return absl::optional(); + return std::optional(); } if (min_pixels <= 0 || max_pixels <= 0 || max_pixels < min_pixels) - return absl::optional(); + return std::optional(); - return absl::optional(max_pixels); + return std::optional(max_pixels); } -absl::optional GetFallbackMaxPixelsIfFieldTrialEnabled( +std::optional GetFallbackMaxPixelsIfFieldTrialEnabled( const webrtc::FieldTrialsView& field_trials) { std::string group = field_trials.Lookup(kVp8ForcedFallbackEncoderFieldTrial); return (absl::StartsWith(group, "Enabled")) ? GetFallbackMaxPixels(group.substr(7)) - : absl::optional(); + : std::optional(); } -absl::optional GetFallbackMaxPixelsIfFieldTrialDisabled( +std::optional GetFallbackMaxPixelsIfFieldTrialDisabled( const webrtc::FieldTrialsView& field_trials) { std::string group = field_trials.Lookup(kVp8ForcedFallbackEncoderFieldTrial); return (absl::StartsWith(group, "Disabled")) ? GetFallbackMaxPixels(group.substr(8)) - : absl::optional(); + : std::optional(); } } // namespace -const int SendStatisticsProxy::kStatsTimeoutMs = 5000; - SendStatisticsProxy::SendStatisticsProxy( Clock* clock, const VideoSendStream::Config& config, @@ -172,6 +173,9 @@ SendStatisticsProxy::~SendStatisticsProxy() { SendStatisticsProxy::FallbackEncoderInfo::FallbackEncoderInfo() = default; +SendStatisticsProxy::Trackers::Trackers() + : encoded_frame_rate(kBucketSizeMs, kBucketCount) {} + SendStatisticsProxy::UmaSamplesContainer::UmaSamplesContainer( const char* prefix, const VideoSendStream::Stats& stats, @@ -267,7 +271,7 @@ bool SendStatisticsProxy::UmaSamplesContainer::InsertEncodedFrame( // Check for jump in timestamp. if (!encoded_frames_.empty()) { uint32_t oldest_timestamp = encoded_frames_.begin()->first; - if (ForwardDiff(oldest_timestamp, encoded_frame.Timestamp()) > + if (ForwardDiff(oldest_timestamp, encoded_frame.RtpTimestamp()) > kMaxEncodedFrameTimestampDiff) { // Gap detected, clear frames to have a sequence where newest timestamp // is not too far away from oldest in order to distinguish old and new. @@ -275,11 +279,11 @@ bool SendStatisticsProxy::UmaSamplesContainer::InsertEncodedFrame( } } - auto it = encoded_frames_.find(encoded_frame.Timestamp()); + auto it = encoded_frames_.find(encoded_frame.RtpTimestamp()); if (it == encoded_frames_.end()) { // First frame with this timestamp. encoded_frames_.insert( - std::make_pair(encoded_frame.Timestamp(), + std::make_pair(encoded_frame.RtpTimestamp(), Frame(now_ms, encoded_frame._encodedWidth, encoded_frame._encodedHeight, simulcast_idx))); sent_fps_counter_.Add(1); @@ -302,7 +306,7 @@ void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms( const int kIndex = uma_prefix_ == kScreenPrefix ? 1 : 0; const int kMinRequiredPeriodicSamples = 6; char log_stream_buf[8 * 1024]; - rtc::SimpleStringBuilder log_stream(log_stream_buf); + SimpleStringBuilder log_stream(log_stream_buf); int in_width = input_width_counter_.Avg(kMinRequiredMetricsSamples); int in_height = input_height_counter_.Avg(kMinRequiredMetricsSamples); if (in_width != -1) { @@ -654,6 +658,10 @@ void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms( << current_stats.frames_dropped_by_capturer << "\n"; RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.Capturer", current_stats.frames_dropped_by_capturer); + log_stream << uma_prefix_ << "DroppedFrames.BadTimestamp" + << current_stats.frames_dropped_by_bad_timestamp << "\n"; + RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.BadTimestamp", + current_stats.frames_dropped_by_bad_timestamp); log_stream << uma_prefix_ << "DroppedFrames.EncoderQueue " << current_stats.frames_dropped_by_encoder_queue << "\n"; RTC_HISTOGRAMS_COUNTS_1000(kIndex, uma_prefix_ + "DroppedFrames.EncoderQueue", @@ -753,25 +761,25 @@ VideoSendStream::Stats SendStatisticsProxy::GetStats() { stats_.quality_limitation_durations_ms = quality_limitation_reason_tracker_.DurationsMs(); - for (auto& substream : stats_.substreams) { - uint32_t ssrc = substream.first; - if (encoded_frame_rate_trackers_.count(ssrc) > 0) { - substream.second.encode_frame_rate = - encoded_frame_rate_trackers_[ssrc]->ComputeRate(); + for (auto& [ssrc, substream] : stats_.substreams) { + if (auto it = trackers_.find(ssrc); it != trackers_.end()) { + substream.encode_frame_rate = it->second.encoded_frame_rate.ComputeRate(); } } return stats_; } +void SendStatisticsProxy::SetStats(const VideoSendStream::Stats& stats) { + MutexLock lock(&mutex_); + stats_ = stats; +} + void SendStatisticsProxy::PurgeOldStats() { - int64_t old_stats_ms = clock_->TimeInMilliseconds() - kStatsTimeoutMs; - for (std::map::iterator it = - stats_.substreams.begin(); - it != stats_.substreams.end(); ++it) { - uint32_t ssrc = it->first; - if (update_times_[ssrc].resolution_update_ms <= old_stats_ms) { - it->second.width = 0; - it->second.height = 0; + Timestamp now = clock_->CurrentTime(); + for (auto& [ssrc, substream] : stats_.substreams) { + if (now - trackers_[ssrc].resolution_update >= kStatsTimeout) { + substream.width = 0; + substream.height = 0; } } } @@ -969,16 +977,13 @@ void SendStatisticsProxy::OnSendEncodedImage( if (!stats) return; - if (encoded_frame_rate_trackers_.count(ssrc) == 0) { - encoded_frame_rate_trackers_[ssrc] = - std::make_unique(kBucketSizeMs, kBucketCount); - } + Trackers& track = trackers_[ssrc]; stats->frames_encoded++; stats->total_encode_time_ms += encoded_image.timing_.encode_finish_ms - encoded_image.timing_.encode_start_ms; stats->scalability_mode = - codec_info ? codec_info->scalability_mode : absl::nullopt; + codec_info ? codec_info->scalability_mode : std::nullopt; // Report resolution of the top spatial layer. bool is_top_spatial_layer = codec_info == nullptr || codec_info->end_of_picture; @@ -986,7 +991,7 @@ void SendStatisticsProxy::OnSendEncodedImage( if (!stats->width || !stats->height || is_top_spatial_layer) { stats->width = encoded_image._encodedWidth; stats->height = encoded_image._encodedHeight; - update_times_[ssrc].resolution_update_ms = clock_->TimeInMilliseconds(); + track.resolution_update = clock_->CurrentTime(); } uma_container_->key_frame_counter_.Add(encoded_image._frameType == @@ -1036,10 +1041,11 @@ void SendStatisticsProxy::OnSendEncodedImage( } // is_top_spatial_layer pertains only to SVC, will always be true for // simulcast. - if (is_top_spatial_layer) - encoded_frame_rate_trackers_[ssrc]->AddSamples(1); + if (is_top_spatial_layer) { + track.encoded_frame_rate.AddSamples(1); + } - absl::optional downscales = + std::optional downscales = adaptation_limitations_.MaskedQualityCounts().resolution_adaptations; stats_.bw_limited_resolution |= (downscales.has_value() && downscales.value() > 0); @@ -1062,7 +1068,7 @@ void SendStatisticsProxy::OnEncoderImplementationChanged( // Clear cached scalability mode values, they may no longer be accurate. for (auto& pair : stats_.substreams) { VideoSendStream::StreamStats& stream_stats = pair.second; - stream_stats.scalability_mode = absl::nullopt; + stream_stats.scalability_mode = std::nullopt; } } @@ -1100,6 +1106,9 @@ void SendStatisticsProxy::OnFrameDropped(DropReason reason) { case DropReason::kSource: ++stats_.frames_dropped_by_capturer; break; + case DropReason::kBadTimestamp: + ++stats_.frames_dropped_by_bad_timestamp; + break; case DropReason::kEncoderQueue: ++stats_.frames_dropped_by_encoder_queue; break; @@ -1237,6 +1246,26 @@ void SendStatisticsProxy::OnBitrateAllocationUpdated( bw_limited_layers_ = allocation.is_bw_limited(); UpdateAdaptationStats(); + // Store target bitrates per substream stats. + for (auto& [ssrc, substream] : stats_.substreams) { + std::optional simulcast_index; + for (size_t i = 0; i < rtp_config_.ssrcs.size(); ++i) { + if (rtp_config_.ssrcs[i] == ssrc) { + simulcast_index = i; + break; + } + } + if (!simulcast_index.has_value()) { + substream.target_bitrate = std::nullopt; + continue; + } + substream.target_bitrate = + DataRate::BitsPerSec(allocation.GetSpatialLayerSum(*simulcast_index)); + if (substream.target_bitrate == DataRate::Zero()) { + substream.target_bitrate = std::nullopt; + } + } + if (spatial_layers != last_spatial_layer_use_) { // If the number of spatial layers has changed, the resolution change is // not due to quality limitations, it is because the configuration @@ -1267,8 +1296,8 @@ void SendStatisticsProxy::OnInitialQualityResolutionAdaptDown() { } void SendStatisticsProxy::TryUpdateInitialQualityResolutionAdaptUp( - absl::optional old_quality_downscales, - absl::optional updated_quality_downscales) { + std::optional old_quality_downscales, + std::optional updated_quality_downscales) { if (uma_container_->initial_quality_changes_.down == 0) return; @@ -1324,6 +1353,13 @@ void SendStatisticsProxy::OnReportBlockDataUpdated( stats->report_block_data = std::move(report_block); } +StreamDataCounters SendStatisticsProxy::GetDataCounters(uint32_t ssrc) const { + MutexLock lock(&mutex_); + auto it = stats_.substreams.find(ssrc); + return it != stats_.substreams.end() ? it->second.rtp_stats + : StreamDataCounters(); +} + void SendStatisticsProxy::DataCountersUpdated( const StreamDataCounters& counters, uint32_t ssrc) { @@ -1331,12 +1367,6 @@ void SendStatisticsProxy::DataCountersUpdated( VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc); RTC_DCHECK(stats) << "DataCountersUpdated reported for unknown ssrc " << ssrc; - if (stats->type == VideoSendStream::StreamStats::StreamType::kFlexfec) { - // The same counters are reported for both the media ssrc and flexfec ssrc. - // Bitrate stats are summed for all SSRCs. Use fec stats from media update. - return; - } - stats->rtp_stats = counters; if (uma_container_->first_rtp_stats_time_ms_ == -1) { int64_t now_ms = clock_->TimeInMilliseconds(); @@ -1388,13 +1418,52 @@ void SendStatisticsProxy::FrameCountUpdated(const FrameCounts& frame_counts, stats->frame_counts = frame_counts; } -void SendStatisticsProxy::SendSideDelayUpdated(int avg_delay_ms, - int max_delay_ms, - uint32_t ssrc) { +void SendStatisticsProxy::Trackers::AddSendDelay(Timestamp now, + TimeDelta send_delay) { + // Add the new measurement. + send_delays.push_back({.when = now, .send_delay = send_delay}); + send_delay_sum += send_delay; + if (send_delay_max == nullptr || *send_delay_max <= send_delay) { + send_delay_max = &send_delays.back().send_delay; + } + + // Remove old. No need to check for emptiness because newly added entry would + // never be too old. + while (now - send_delays.front().when > TimeDelta::Seconds(1)) { + send_delay_sum -= send_delays.front().send_delay; + if (send_delay_max == &send_delays.front().send_delay) { + send_delay_max = nullptr; + } + send_delays.pop_front(); + } + + // Check if max value was pushed out from the queue as too old. + if (send_delay_max == nullptr) { + send_delay_max = &send_delays.front().send_delay; + for (const SendDelayEntry& entry : send_delays) { + // Use '>=' rather than '>' to prefer latest maximum as it would be pushed + // out later and thus trigger less recalculations. + if (entry.send_delay >= *send_delay_max) { + send_delay_max = &entry.send_delay; + } + } + } +} + +void SendStatisticsProxy::OnSendPacket(uint32_t ssrc, Timestamp capture_time) { + Timestamp now = clock_->CurrentTime(); + MutexLock lock(&mutex_); VideoSendStream::StreamStats* stats = GetStatsEntry(ssrc); if (!stats) return; + + Trackers& track = trackers_[ssrc]; + track.AddSendDelay(now, now - capture_time); + + int64_t avg_delay_ms = (track.send_delay_sum / track.send_delays.size()).ms(); + int64_t max_delay_ms = track.send_delay_max->ms(); + stats->avg_delay_ms = avg_delay_ms; stats->max_delay_ms = max_delay_ms; diff --git a/video/send_statistics_proxy.h b/video/send_statistics_proxy.h index 4203b1c873..8e7a4f6492 100644 --- a/video/send_statistics_proxy.h +++ b/video/send_statistics_proxy.h @@ -12,18 +12,31 @@ #define VIDEO_SEND_STATISTICS_PROXY_H_ #include +#include +#include +#include #include #include +#include #include #include #include "api/field_trials_view.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "api/video/video_adaptation_counters.h" +#include "api/video/video_adaptation_reason.h" +#include "api/video/video_bitrate_allocation.h" #include "api/video/video_codec_constants.h" +#include "api/video_codecs/video_codec.h" +#include "call/rtp_config.h" #include "call/video_send_stream.h" +#include "common_video/frame_counts.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/include/report_block_data.h" +#include "modules/rtp_rtcp/include/rtcp_statistics.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "modules/video_coding/include/video_coding_defines.h" #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/rate_tracker.h" #include "rtc_base/synchronization/mutex.h" @@ -42,10 +55,9 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, public RtcpPacketTypeCounterObserver, public StreamDataCountersCallback, public BitrateStatisticsObserver, - public FrameCountObserver, - public SendSideDelayObserver { + public FrameCountObserver { public: - static const int kStatsTimeoutMs; + static constexpr TimeDelta kStatsTimeout = TimeDelta::Seconds(5); // Number of required samples to be collected before a metric is added // to a rtc histogram. static const int kMinRequiredMetricsSamples = 200; @@ -57,6 +69,7 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, ~SendStatisticsProxy() override; virtual VideoSendStream::Stats GetStats(); + void SetStats(const VideoSendStream::Stats& stats); void OnSendEncodedImage(const EncodedImage& encoded_image, const CodecSpecificInfo* codec_info) override; @@ -103,6 +116,8 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, void OnEncodedFrameTimeMeasured(int encode_time_ms, int encode_usage_percent) override; + void OnSendPacket(uint32_t ssrc, Timestamp capture_time); + int GetInputFrameRate() const override; int GetSendFrameRate() const; @@ -114,6 +129,7 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, uint32_t ssrc, const RtcpPacketTypeCounter& packet_counter) override; // From StreamDataCountersCallback. + StreamDataCounters GetDataCounters(uint32_t ssrc) const override; void DataCountersUpdated(const StreamDataCounters& counters, uint32_t ssrc) override; @@ -126,11 +142,6 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, void FrameCountUpdated(const FrameCounts& frame_counts, uint32_t ssrc) override; - // From SendSideDelayObserver. - void SendSideDelayUpdated(int avg_delay_ms, - int max_delay_ms, - uint32_t ssrc) override; - private: class SampleCounter { public: @@ -157,11 +168,6 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, int64_t sum; int64_t num_samples; }; - struct StatsUpdateTimes { - StatsUpdateTimes() : resolution_update_ms(0), bitrate_update_ms(0) {} - int64_t resolution_update_ms; - int64_t bitrate_update_ms; - }; struct TargetRateUpdates { TargetRateUpdates() : pause_resume_events(0), last_paused_or_resumed(false), last_ms(-1) {} @@ -175,7 +181,7 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, bool is_active = false; int on_off_events = 0; int64_t elapsed_ms = 0; - absl::optional last_update_ms; + std::optional last_update_ms; const int max_frame_diff_ms = 2000; }; struct FallbackEncoderInfoDisabled { @@ -226,8 +232,8 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); struct MaskedAdaptationCounts { - absl::optional resolution_adaptations = absl::nullopt; - absl::optional num_framerate_reductions = absl::nullopt; + std::optional resolution_adaptations = std::nullopt; + std::optional num_framerate_reductions = std::nullopt; }; struct Adaptations { @@ -253,13 +259,38 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, MaskedAdaptationCounts Mask(const VideoAdaptationCounters& counters, const AdaptationSettings& settings) const; }; + // Collection of various stats that are tracked per ssrc. + struct Trackers { + struct SendDelayEntry { + Timestamp when; + TimeDelta send_delay; + }; + + Trackers(); + Trackers(const Trackers&) = delete; + Trackers& operator=(const Trackers&) = delete; + + void AddSendDelay(Timestamp now, TimeDelta send_delay); + + Timestamp resolution_update = Timestamp::MinusInfinity(); + RateTracker encoded_frame_rate; + + std::deque send_delays; + + // The sum of `send_delay` in `send_delays`. + TimeDelta send_delay_sum = TimeDelta::Zero(); + + // Pointer to the maximum `send_delay` in `send_delays` or nullptr if + // `send_delays.empty()` + const TimeDelta* send_delay_max = nullptr; + }; void SetAdaptTimer(const MaskedAdaptationCounts& counts, StatsTimer* timer) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void UpdateAdaptationStats() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void TryUpdateInitialQualityResolutionAdaptUp( - absl::optional old_quality_downscales, - absl::optional updated_quality_downscales) + std::optional old_quality_downscales, + std::optional updated_quality_downscales) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void UpdateEncoderFallbackStats(const CodecSpecificInfo* codec_info, @@ -274,23 +305,21 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, Clock* const clock_; const std::string payload_name_; const RtpConfig rtp_config_; - const absl::optional fallback_max_pixels_; - const absl::optional fallback_max_pixels_disabled_; + const std::optional fallback_max_pixels_; + const std::optional fallback_max_pixels_disabled_; mutable Mutex mutex_; VideoEncoderConfig::ContentType content_type_ RTC_GUARDED_BY(mutex_); const int64_t start_ms_; VideoSendStream::Stats stats_ RTC_GUARDED_BY(mutex_); - std::map update_times_ RTC_GUARDED_BY(mutex_); - rtc::ExpFilter encode_time_ RTC_GUARDED_BY(mutex_); + ExpFilter encode_time_ RTC_GUARDED_BY(mutex_); QualityLimitationReasonTracker quality_limitation_reason_tracker_ RTC_GUARDED_BY(mutex_); - rtc::RateTracker media_byte_rate_tracker_ RTC_GUARDED_BY(mutex_); - rtc::RateTracker encoded_frame_rate_tracker_ RTC_GUARDED_BY(mutex_); - // Rate trackers mapped by ssrc. - std::map> - encoded_frame_rate_trackers_ RTC_GUARDED_BY(mutex_); + RateTracker media_byte_rate_tracker_ RTC_GUARDED_BY(mutex_); + RateTracker encoded_frame_rate_tracker_ RTC_GUARDED_BY(mutex_); + // Trackers mapped by ssrc. + std::map trackers_ RTC_GUARDED_BY(mutex_); - absl::optional last_outlier_timestamp_ RTC_GUARDED_BY(mutex_); + std::optional last_outlier_timestamp_ RTC_GUARDED_BY(mutex_); int last_num_spatial_layers_ RTC_GUARDED_BY(mutex_); int last_num_simulcast_streams_ RTC_GUARDED_BY(mutex_); @@ -309,7 +338,7 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, }; // Stores the last change in encoder implementation in an optional, so that // the event can be consumed. - absl::optional encoder_changed_; + std::optional encoder_changed_; // Contains stats used for UMA histograms. These stats will be reset if // content type changes between real-time video and screenshare, since these @@ -344,7 +373,7 @@ class SendStatisticsProxy : public VideoStreamEncoderObserver, SampleCounter bw_resolutions_disabled_counter_; SampleCounter delay_counter_; SampleCounter max_delay_counter_; - rtc::RateTracker input_frame_rate_tracker_; + RateTracker input_frame_rate_tracker_; RateCounter input_fps_counter_; RateCounter sent_fps_counter_; RateAccCounter total_byte_counter_; diff --git a/video/send_statistics_proxy_unittest.cc b/video/send_statistics_proxy_unittest.cc index 9db774145e..39ff0a6ab8 100644 --- a/video/send_statistics_proxy_unittest.cc +++ b/video/send_statistics_proxy_unittest.cc @@ -69,7 +69,7 @@ class SendStatisticsProxyTest : public ::testing::Test { SendStatisticsProxyTest() : SendStatisticsProxyTest("") {} explicit SendStatisticsProxyTest(const std::string& field_trials) : override_field_trials_(field_trials), - fake_clock_(1234), + fake_clock_(Timestamp::Seconds(1234)), config_(GetTestConfig()) {} virtual ~SendStatisticsProxyTest() {} @@ -199,7 +199,8 @@ TEST_F(SendStatisticsProxyTest, ReportBlockDataObserver) { report_block.SetFractionLost(offset + 2); report_block.SetJitter(offset + 3); ReportBlockData data; - data.SetReportBlock(/*sender_ssrc=*/0, report_block, Timestamp::Zero()); + data.SetReportBlock(/*sender_ssrc=*/0, report_block, Timestamp::Zero(), + Timestamp::Zero()); expected_.substreams[ssrc].report_block_data = data; callback->OnReportBlockDataUpdated(data); @@ -214,7 +215,8 @@ TEST_F(SendStatisticsProxyTest, ReportBlockDataObserver) { report_block.SetFractionLost(offset + 2); report_block.SetJitter(offset + 3); ReportBlockData data; - data.SetReportBlock(/*sender_ssrc=*/0, report_block, Timestamp::Zero()); + data.SetReportBlock(/*sender_ssrc=*/0, report_block, Timestamp::Zero(), + Timestamp::Zero()); expected_.substreams[ssrc].report_block_data = data; callback->OnReportBlockDataUpdated(data); @@ -320,24 +322,29 @@ TEST_F(SendStatisticsProxyTest, Bitrate) { } TEST_F(SendStatisticsProxyTest, SendSideDelay) { - SendSideDelayObserver* observer = statistics_proxy_.get(); - for (const auto& ssrc : config_.rtp.ssrcs) { + for (uint32_t ssrc : config_.rtp.ssrcs) { // Use ssrc as avg_delay_ms and max_delay_ms to get a unique value for each // stream. - int avg_delay_ms = ssrc; - int max_delay_ms = ssrc + 1; - observer->SendSideDelayUpdated(avg_delay_ms, max_delay_ms, ssrc); - expected_.substreams[ssrc].avg_delay_ms = avg_delay_ms; - expected_.substreams[ssrc].max_delay_ms = max_delay_ms; + expected_.substreams[ssrc].avg_delay_ms = ssrc; + expected_.substreams[ssrc].max_delay_ms = ssrc + 1; + statistics_proxy_->OnSendPacket(ssrc, + /*capture_time=*/fake_clock_.CurrentTime() - + TimeDelta::Millis(ssrc + 1)); + statistics_proxy_->OnSendPacket(ssrc, + /*capture_time=*/fake_clock_.CurrentTime() - + TimeDelta::Millis(ssrc - 1)); } - for (const auto& ssrc : config_.rtp.rtx.ssrcs) { + for (uint32_t ssrc : config_.rtp.rtx.ssrcs) { // Use ssrc as avg_delay_ms and max_delay_ms to get a unique value for each // stream. - int avg_delay_ms = ssrc; - int max_delay_ms = ssrc + 1; - observer->SendSideDelayUpdated(avg_delay_ms, max_delay_ms, ssrc); - expected_.substreams[ssrc].avg_delay_ms = avg_delay_ms; - expected_.substreams[ssrc].max_delay_ms = max_delay_ms; + expected_.substreams[ssrc].avg_delay_ms = ssrc; + expected_.substreams[ssrc].max_delay_ms = ssrc + 1; + statistics_proxy_->OnSendPacket(ssrc, + /*capture_time=*/fake_clock_.CurrentTime() - + TimeDelta::Millis(ssrc + 1)); + statistics_proxy_->OnSendPacket(ssrc, + /*capture_time=*/fake_clock_.CurrentTime() - + TimeDelta::Millis(ssrc - 1)); } VideoSendStream::Stats stats = statistics_proxy_->GetStats(); ExpectEqual(expected_, stats); @@ -377,7 +384,7 @@ TEST_F(SendStatisticsProxyTest, OnSendEncodedImageIncreasesQpSum) { EncodedImage encoded_image; CodecSpecificInfo codec_info; auto ssrc = config_.rtp.ssrcs[0]; - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, statistics_proxy_->GetStats().substreams[ssrc].qp_sum); encoded_image.qp_ = 3; statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); @@ -392,10 +399,10 @@ TEST_F(SendStatisticsProxyTest, OnSendEncodedImageWithoutQpQpSumWontExist) { CodecSpecificInfo codec_info; auto ssrc = config_.rtp.ssrcs[0]; encoded_image.qp_ = -1; - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, statistics_proxy_->GetStats().substreams[ssrc].qp_sum); statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, statistics_proxy_->GetStats().substreams[ssrc].qp_sum); } @@ -407,16 +414,16 @@ TEST_F(SendStatisticsProxyTest, ScalabilityMode layer1_mode = ScalabilityMode::kL1T3; auto ssrc0 = config_.rtp.ssrcs[0]; auto ssrc1 = config_.rtp.ssrcs[1]; - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, statistics_proxy_->GetStats().substreams[ssrc0].scalability_mode); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, statistics_proxy_->GetStats().substreams[ssrc1].scalability_mode); encoded_image.SetSimulcastIndex(0); codec_info.scalability_mode = layer0_mode; statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); EXPECT_THAT(statistics_proxy_->GetStats().substreams[ssrc0].scalability_mode, layer0_mode); - EXPECT_EQ(absl::nullopt, + EXPECT_EQ(std::nullopt, statistics_proxy_->GetStats().substreams[ssrc1].scalability_mode); encoded_image.SetSimulcastIndex(1); codec_info.scalability_mode = layer1_mode; @@ -448,10 +455,11 @@ TEST_F(SendStatisticsProxyTest, // SendStatisticsProxy uses a RateTracker internally. SendStatisticsProxy uses // `fake_clock_` for testing, but the RateTracker relies on a global clock. - // This test relies on rtc::ScopedFakeClock to synchronize these two clocks. + // This test relies on webrtc::ScopedFakeClock to synchronize these two + // clocks. // TODO(https://crbug.com/webrtc/10640): When the RateTracker uses a Clock - // this test can stop relying on rtc::ScopedFakeClock. - rtc::ScopedFakeClock fake_global_clock; + // this test can stop relying on webrtc::ScopedFakeClock. + ScopedFakeClock fake_global_clock; fake_global_clock.SetTime( Timestamp::Millis(fake_clock_.TimeInMilliseconds())); @@ -466,8 +474,8 @@ TEST_F(SendStatisticsProxyTest, fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs); fake_global_clock.SetTime( Timestamp::Millis(fake_clock_.TimeInMilliseconds())); - encoded_image.SetTimestamp(encoded_image.Timestamp() + - 90 * kInterframeDelayMs); + encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() + + 90 * kInterframeDelayMs); statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); auto stats = statistics_proxy_->GetStats(); @@ -482,7 +490,7 @@ TEST_F(SendStatisticsProxyTest, TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStream) { const int kInterframeDelayMs = 100; const auto ssrc = config_.rtp.ssrcs[0]; - rtc::ScopedFakeClock fake_global_clock; + ScopedFakeClock fake_global_clock; fake_global_clock.SetTime( Timestamp::Millis(fake_clock_.TimeInMilliseconds())); @@ -493,8 +501,8 @@ TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStream) { fake_global_clock.SetTime( Timestamp::Millis(fake_clock_.TimeInMilliseconds())); // Second frame - encoded_image.SetTimestamp(encoded_image.Timestamp() + - 90 * kInterframeDelayMs); + encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() + + 90 * kInterframeDelayMs); statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs); fake_global_clock.SetTime( @@ -506,7 +514,7 @@ TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStream) { TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStreamsVp8Simulcast) { const int kInterframeDelayMs = 100; - rtc::ScopedFakeClock fake_global_clock; + ScopedFakeClock fake_global_clock; fake_global_clock.SetTime( Timestamp::Millis(fake_clock_.TimeInMilliseconds())); EncodedImage encoded_image; @@ -514,8 +522,8 @@ TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStreamsVp8Simulcast) { codec_info.codecType = kVideoCodecVP8; for (int i = 0; i < 10; ++i) { - encoded_image.SetTimestamp(encoded_image.Timestamp() + - 90 * kInterframeDelayMs); + encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() + + 90 * kInterframeDelayMs); encoded_image.SetSimulcastIndex(0); statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); encoded_image.SetSimulcastIndex(1); @@ -532,8 +540,8 @@ TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStreamsVp8Simulcast) { // Stop encoding second stream, expect framerate to be zero. for (int i = 0; i < 10; ++i) { - encoded_image.SetTimestamp(encoded_image.Timestamp() + - 90 * kInterframeDelayMs); + encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() + + 90 * kInterframeDelayMs); encoded_image.SetSimulcastIndex(0); statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); fake_clock_.AdvanceTimeMilliseconds(kInterframeDelayMs); @@ -548,8 +556,8 @@ TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStreamsVp8Simulcast) { // Start encoding second stream. for (int i = 0; i < 10; ++i) { - encoded_image.SetTimestamp(encoded_image.Timestamp() + - 90 * kInterframeDelayMs); + encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() + + 90 * kInterframeDelayMs); encoded_image.SetSimulcastIndex(0); statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); encoded_image.SetSimulcastIndex(1); @@ -567,7 +575,7 @@ TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStreamsVp8Simulcast) { TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStreamsVp9Svc) { const int kInterframeDelayMs = 100; - rtc::ScopedFakeClock fake_global_clock; + ScopedFakeClock fake_global_clock; fake_global_clock.SetTime( Timestamp::Millis(fake_clock_.TimeInMilliseconds())); EncodedImage encoded_image; @@ -575,8 +583,8 @@ TEST_F(SendStatisticsProxyTest, EncodeFrameRateInSubStreamsVp9Svc) { codec_info.codecType = kVideoCodecVP9; for (int i = 0; i < 10; ++i) { - encoded_image.SetTimestamp(encoded_image.Timestamp() + - 90 * kInterframeDelayMs); + encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() + + 90 * kInterframeDelayMs); encoded_image.SetSpatialIndex(0); codec_info.end_of_picture = false; statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); @@ -1501,6 +1509,36 @@ TEST_F(SendStatisticsProxyTest, 0u, statistics_proxy_->GetStats().quality_limitation_resolution_changes); } +TEST_F(SendStatisticsProxyTest, OnBitrateAllocationUpdatedSetsTargetBitrates) { + // We only update target bitrates for substreams that exist and these are + // created lazily in various places... calling OnInactiveSsrc() is one way to + // ensure the stats are reported. + statistics_proxy_->OnInactiveSsrc(kFirstSsrc); + statistics_proxy_->OnInactiveSsrc(kSecondSsrc); + + // Update target bitrates! + VideoBitrateAllocation allocation; + allocation.SetBitrate(0, 0, 123); + allocation.SetBitrate(1, 0, 321); + statistics_proxy_->OnBitrateAllocationUpdated(VideoCodec(), allocation); + EXPECT_EQ(statistics_proxy_->GetStats().substreams[kFirstSsrc].target_bitrate, + DataRate::BitsPerSec(123)); + EXPECT_EQ( + statistics_proxy_->GetStats().substreams[kSecondSsrc].target_bitrate, + DataRate::BitsPerSec(321)); + + // 0 bitrate = no target. + allocation.SetBitrate(0, 0, 0); + allocation.SetBitrate(1, 0, 0); + statistics_proxy_->OnBitrateAllocationUpdated(VideoCodec(), allocation); + EXPECT_FALSE(statistics_proxy_->GetStats() + .substreams[kFirstSsrc] + .target_bitrate.has_value()); + EXPECT_FALSE(statistics_proxy_->GetStats() + .substreams[kSecondSsrc] + .target_bitrate.has_value()); +} + TEST_F(SendStatisticsProxyTest, QualityLimitationResolutionDoesNotUpdateForSpatialLayerChanges) { VideoCodec codec; @@ -1648,7 +1686,8 @@ TEST_F(SendStatisticsProxyTest, SentResolutionHistogramsAreUpdated) { // Not enough samples, stats should not be updated. for (int i = 0; i < kMinSamples - 1; ++i) { fake_clock_.AdvanceTimeMilliseconds(1000 / kFps); - encoded_image.SetTimestamp(encoded_image.Timestamp() + 90 * 1000 / kFps); + encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() + + 90 * 1000 / kFps); statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); } SetUp(); // Reset stats proxy also causes histograms to be reported. @@ -1656,10 +1695,11 @@ TEST_F(SendStatisticsProxyTest, SentResolutionHistogramsAreUpdated) { EXPECT_METRIC_EQ(0, metrics::NumSamples("WebRTC.Video.SentHeightInPixels")); // Enough samples, max resolution per frame should be reported. - encoded_image.SetTimestamp(0xffff0000); // Will wrap. + encoded_image.SetRtpTimestamp(0xffff0000); // Will wrap. for (int i = 0; i < kMinSamples; ++i) { fake_clock_.AdvanceTimeMilliseconds(1000 / kFps); - encoded_image.SetTimestamp(encoded_image.Timestamp() + 90 * 1000 / kFps); + encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() + + 90 * 1000 / kFps); encoded_image._encodedWidth = kWidth; encoded_image._encodedHeight = kHeight; statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); @@ -1698,7 +1738,7 @@ TEST_F(SendStatisticsProxyTest, SentFpsHistogramIsUpdated) { int frames = kMinPeriodicSamples * kFpsPeriodicIntervalMs * kFps / 1000 + 1; for (int i = 0; i < frames; ++i) { fake_clock_.AdvanceTimeMilliseconds(1000 / kFps); - encoded_image.SetTimestamp(encoded_image.Timestamp() + 1); + encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() + 1); statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); // Frame with same timestamp should not be counted. statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); @@ -1741,7 +1781,7 @@ TEST_F(SendStatisticsProxyTest, SentFpsHistogramExcludesSuspendedTime) { int frames = kMinPeriodicSamples * kFpsPeriodicIntervalMs * kFps / 1000; for (int i = 0; i < frames; ++i) { fake_clock_.AdvanceTimeMilliseconds(1000 / kFps); - encoded_image.SetTimestamp(i + 1); + encoded_image.SetRtpTimestamp(i + 1); statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); } // Suspend. @@ -1750,7 +1790,7 @@ TEST_F(SendStatisticsProxyTest, SentFpsHistogramExcludesSuspendedTime) { for (int i = 0; i < frames; ++i) { fake_clock_.AdvanceTimeMilliseconds(1000 / kFps); - encoded_image.SetTimestamp(i + 1); + encoded_image.SetRtpTimestamp(i + 1); statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); } // Suspended time interval should not affect the framerate. @@ -2062,8 +2102,8 @@ TEST_F(SendStatisticsProxyTest, encoded_image._encodedHeight = kHeight; for (int i = 0; i < kMinSamples; ++i) { fake_clock_.AdvanceTimeMilliseconds(1000 / kFps); - encoded_image.SetTimestamp(encoded_image.Timestamp() + - (kRtpClockRateHz / kFps)); + encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() + + (kRtpClockRateHz / kFps)); statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); } @@ -2098,8 +2138,8 @@ TEST_F(SendStatisticsProxyTest, EncodedImage encoded_image; for (int i = 0; i < kMinSamples; ++i) { fake_clock_.AdvanceTimeMilliseconds(1000 / kFps); - encoded_image.SetTimestamp(encoded_image.Timestamp() + - (kRtpClockRateHz / kFps)); + encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() + + (kRtpClockRateHz / kFps)); encoded_image._encodedWidth = kWidth; encoded_image._encodedHeight = kHeight; statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); @@ -2145,8 +2185,8 @@ TEST_F(SendStatisticsProxyTest, encoded_image._encodedHeight = kHeight / 2; for (int i = 0; i < kMinSamples; ++i) { fake_clock_.AdvanceTimeMilliseconds(1000 / kFps); - encoded_image.SetTimestamp(encoded_image.Timestamp() + - (kRtpClockRateHz / kFps)); + encoded_image.SetRtpTimestamp(encoded_image.RtpTimestamp() + + (kRtpClockRateHz / kFps)); statistics_proxy_->OnSendEncodedImage(encoded_image, nullptr); } @@ -2312,7 +2352,7 @@ TEST_F(SendStatisticsProxyTest, NoSubstreams) { rtcp::ReportBlock report_block; report_block.SetMediaSsrc(excluded_ssrc); ReportBlockData data; - data.SetReportBlock(0, report_block, Timestamp::Zero()); + data.SetReportBlock(0, report_block, Timestamp::Zero(), Timestamp::Zero()); rtcp_callback->OnReportBlockDataUpdated(data); // From BitrateStatisticsObserver. @@ -2353,7 +2393,8 @@ TEST_F(SendStatisticsProxyTest, EncodedResolutionTimesOut) { EXPECT_EQ(kEncodedHeight, stats.substreams[config_.rtp.ssrcs[1]].height); // Forward almost to timeout, this should not have removed stats. - fake_clock_.AdvanceTimeMilliseconds(SendStatisticsProxy::kStatsTimeoutMs - 1); + fake_clock_.AdvanceTime(SendStatisticsProxy::kStatsTimeout - + TimeDelta::Millis(1)); stats = statistics_proxy_->GetStats(); EXPECT_EQ(kEncodedWidth, stats.substreams[config_.rtp.ssrcs[0]].width); EXPECT_EQ(kEncodedHeight, stats.substreams[config_.rtp.ssrcs[0]].height); @@ -2364,7 +2405,7 @@ TEST_F(SendStatisticsProxyTest, EncodedResolutionTimesOut) { rtcp::ReportBlock report_block; report_block.SetMediaSsrc(config_.rtp.ssrcs[0]); ReportBlockData data; - data.SetReportBlock(0, report_block, Timestamp::Zero()); + data.SetReportBlock(0, report_block, Timestamp::Zero(), Timestamp::Zero()); rtcp_callback->OnReportBlockDataUpdated(data); // Report stats for second SSRC to make sure it's not outdated along with the @@ -2525,7 +2566,7 @@ TEST_F(SendStatisticsProxyTest, GetStatsReportsIsRtx) { EXPECT_NE(GetStreamStats(kFirstSsrc).type, VideoSendStream::StreamStats::StreamType::kRtx); - EXPECT_EQ(GetStreamStats(kFirstSsrc).referenced_media_ssrc, absl::nullopt); + EXPECT_EQ(GetStreamStats(kFirstSsrc).referenced_media_ssrc, std::nullopt); EXPECT_EQ(GetStreamStats(kFirstRtxSsrc).type, VideoSendStream::StreamStats::StreamType::kRtx); EXPECT_EQ(GetStreamStats(kFirstRtxSsrc).referenced_media_ssrc, kFirstSsrc); @@ -2545,7 +2586,7 @@ TEST_F(SendStatisticsProxyTest, GetStatsReportsIsFlexFec) { EXPECT_NE(GetStreamStats(kFirstSsrc).type, VideoSendStream::StreamStats::StreamType::kFlexfec); - EXPECT_EQ(GetStreamStats(kFirstSsrc).referenced_media_ssrc, absl::nullopt); + EXPECT_EQ(GetStreamStats(kFirstSsrc).referenced_media_ssrc, std::nullopt); EXPECT_EQ(GetStreamStats(kFlexFecSsrc).type, VideoSendStream::StreamStats::StreamType::kFlexfec); EXPECT_EQ(GetStreamStats(kFlexFecSsrc).referenced_media_ssrc, kFirstSsrc); @@ -2561,6 +2602,7 @@ TEST_F(SendStatisticsProxyTest, SendBitratesAreReportedWithFlexFecEnabled) { static_cast(statistics_proxy_.get()); StreamDataCounters counters; StreamDataCounters rtx_counters; + StreamDataCounters flexfec_counters; const int kMinRequiredPeriodSamples = 8; const int kPeriodIntervalMs = 2000; @@ -2570,10 +2612,10 @@ TEST_F(SendStatisticsProxyTest, SendBitratesAreReportedWithFlexFecEnabled) { counters.transmitted.padding_bytes += 1000; counters.transmitted.payload_bytes += 2000; counters.retransmitted.packets += 2; - counters.retransmitted.header_bytes += 25; - counters.retransmitted.padding_bytes += 100; + counters.retransmitted.header_bytes += 50; + counters.retransmitted.padding_bytes += 200; counters.retransmitted.payload_bytes += 250; - counters.fec = counters.retransmitted; + flexfec_counters.fec = counters.retransmitted; rtx_counters.transmitted = counters.transmitted; // Advance one interval and update counters. fake_clock_.AdvanceTimeMilliseconds(kPeriodIntervalMs); @@ -2581,7 +2623,7 @@ TEST_F(SendStatisticsProxyTest, SendBitratesAreReportedWithFlexFecEnabled) { proxy->DataCountersUpdated(counters, kSecondSsrc); proxy->DataCountersUpdated(rtx_counters, kFirstRtxSsrc); proxy->DataCountersUpdated(rtx_counters, kSecondRtxSsrc); - proxy->DataCountersUpdated(counters, kFlexFecSsrc); + proxy->DataCountersUpdated(flexfec_counters, kFlexFecSsrc); } statistics_proxy_.reset(); @@ -2592,25 +2634,25 @@ TEST_F(SendStatisticsProxyTest, SendBitratesAreReportedWithFlexFecEnabled) { EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.RtxBitrateSentInKbps")); EXPECT_METRIC_EQ(1, metrics::NumEvents("WebRTC.Video.RtxBitrateSentInKbps", 28)); - // Interval: (2000 - 2 * 250) bytes / 2 sec = 1500 bytes / sec = 12 kbps + // Interval: (2000 - 250) bytes / 2 sec = 1750 bytes / sec = 14 kbps EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.MediaBitrateSentInKbps")); EXPECT_METRIC_EQ( - 1, metrics::NumEvents("WebRTC.Video.MediaBitrateSentInKbps", 12)); + 1, metrics::NumEvents("WebRTC.Video.MediaBitrateSentInKbps", 14)); // Interval: 1000 bytes * 4 / 2 sec = 2000 bytes / sec = 16 kbps EXPECT_METRIC_EQ( 1, metrics::NumSamples("WebRTC.Video.PaddingBitrateSentInKbps")); EXPECT_METRIC_EQ( 1, metrics::NumEvents("WebRTC.Video.PaddingBitrateSentInKbps", 16)); - // Interval: 375 bytes * 2 / 2 sec = 375 bytes / sec = 3 kbps + // Interval: 500 bytes / 2 sec = 200 bytes / sec = 2 kbps EXPECT_METRIC_EQ(1, metrics::NumSamples("WebRTC.Video.FecBitrateSentInKbps")); EXPECT_METRIC_EQ(1, - metrics::NumEvents("WebRTC.Video.FecBitrateSentInKbps", 3)); - // Interval: 375 bytes * 2 / 2 sec = 375 bytes / sec = 3 kbps + metrics::NumEvents("WebRTC.Video.FecBitrateSentInKbps", 2)); + // Interval: 500 bytes * 2 / 2 sec = 375 bytes / sec = 4 kbps EXPECT_METRIC_EQ( 1, metrics::NumSamples("WebRTC.Video.RetransmittedBitrateSentInKbps")); EXPECT_METRIC_EQ( - 1, metrics::NumEvents("WebRTC.Video.RetransmittedBitrateSentInKbps", 3)); + 1, metrics::NumEvents("WebRTC.Video.RetransmittedBitrateSentInKbps", 4)); } TEST_F(SendStatisticsProxyTest, ResetsRtpCountersOnContentChange) { diff --git a/video/stats_counter.cc b/video/stats_counter.cc index dc548ea3c3..09c2d4dd49 100644 --- a/video/stats_counter.cc +++ b/video/stats_counter.cc @@ -31,7 +31,7 @@ std::string AggregatedStats::ToString() const { } std::string AggregatedStats::ToStringWithMultiplier(int multiplier) const { - rtc::StringBuilder ss; + StringBuilder ss; ss << "periodic_samples:" << num_samples << ", {"; ss << "min:" << (min * multiplier) << ", "; ss << "avg:" << (average * multiplier) << ", "; diff --git a/video/stats_counter.h b/video/stats_counter.h index 9e2b8702d6..cc6dcc95d3 100644 --- a/video/stats_counter.h +++ b/video/stats_counter.h @@ -11,6 +11,8 @@ #ifndef VIDEO_STATS_COUNTER_H_ #define VIDEO_STATS_COUNTER_H_ +#include + #include #include diff --git a/video/sv_loopback.cc b/video/sv_loopback.cc index af475ae4eb..ee8075a7f1 100644 --- a/video/sv_loopback.cc +++ b/video/sv_loopback.cc @@ -11,15 +11,16 @@ #include #include +#include #include #include #include "absl/flags/flag.h" #include "absl/flags/parse.h" -#include "absl/types/optional.h" #include "api/test/simulated_network.h" #include "api/test/video_quality_test_fixture.h" #include "api/transport/bitrate_settings.h" +#include "api/units/data_rate.h" #include "api/video_codecs/video_codec.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -28,6 +29,7 @@ #include "test/field_trial.h" #include "test/gtest.h" #include "test/run_test.h" +#include "test/test_flags.h" #include "video/video_quality_test.h" // Flags for video. @@ -311,15 +313,6 @@ ABSL_FLAG(bool, ABSL_FLAG(bool, video, true, "Add video stream"); -ABSL_FLAG( - std::string, - force_fieldtrials, - "", - "Field trials control experimental feature code which can be forced. " - "E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/" - " will assign the group Enable to field trial WebRTC-FooFeature. Multiple " - "trials are separated by \"/\""); - // Video-specific flags. ABSL_FLAG(std::string, vclip, @@ -500,7 +493,7 @@ int ScrollDuration() { std::vector Slides() { std::vector slides; std::string slides_list = absl::GetFlag(FLAGS_slides); - rtc::tokenize(slides_list, ',', &slides); + tokenize(slides_list, ',', &slides); return slides; } @@ -508,7 +501,7 @@ int StartBitrateKbps() { return absl::GetFlag(FLAGS_start_bitrate); } -std::string Codec() { +std::string CodecName() { return absl::GetFlag(FLAGS_codec); } @@ -540,8 +533,10 @@ int AvgBurstLossLength() { return absl::GetFlag(FLAGS_avg_burst_loss_length); } -int LinkCapacityKbps() { - return absl::GetFlag(FLAGS_link_capacity); +DataRate LinkCapacity() { + int link_capacity_kbps = absl::GetFlag(FLAGS_link_capacity); + return link_capacity_kbps == 0 ? DataRate::Infinity() + : DataRate::KilobitsPerSec(link_capacity_kbps); } int QueueSize() { @@ -592,7 +587,7 @@ void Loopback() { BuiltInNetworkBehaviorConfig pipe_config; pipe_config.loss_percent = LossPercent(); pipe_config.avg_burst_loss_length = AvgBurstLossLength(); - pipe_config.link_capacity_kbps = LinkCapacityKbps(); + pipe_config.link_capacity = LinkCapacity(); pipe_config.queue_length_packets = QueueSize(); pipe_config.queue_delay_ms = AvgPropagationDelayMs(); pipe_config.delay_standard_deviation_ms = StdPropagationDelayMs(); @@ -620,7 +615,7 @@ void Loopback() { ScreenshareTargetBitrateKbps() * 1000; params.video[screenshare_idx].max_bitrate_bps = ScreenshareMaxBitrateKbps() * 1000; - params.video[screenshare_idx].codec = Codec(); + params.video[screenshare_idx].codec = CodecName(); params.video[screenshare_idx].num_temporal_layers = ScreenshareNumTemporalLayers(); params.video[screenshare_idx].selected_tl = ScreenshareSelectedTL(); @@ -635,7 +630,7 @@ void Loopback() { params.video[camera_idx].max_bitrate_bps = VideoMaxBitrateKbps() * 1000; params.video[camera_idx].suspend_below_min_bitrate = absl::GetFlag(FLAGS_suspend_below_min_bitrate); - params.video[camera_idx].codec = Codec(); + params.video[camera_idx].codec = CodecName(); params.video[camera_idx].num_temporal_layers = VideoNumTemporalLayers(); params.video[camera_idx].selected_tl = VideoSelectedTL(); params.video[camera_idx].ulpfec = absl::GetFlag(FLAGS_use_ulpfec); @@ -672,13 +667,15 @@ void Loopback() { params.ss[screenshare_idx].infer_streams = true; } + VideoQualityTest fixture(nullptr); + std::vector stream_descriptors; stream_descriptors.push_back(ScreenshareStream0()); stream_descriptors.push_back(ScreenshareStream1()); std::vector SL_descriptors; SL_descriptors.push_back(ScreenshareSL0()); SL_descriptors.push_back(ScreenshareSL1()); - VideoQualityTest::FillScalabilitySettings( + fixture.FillScalabilitySettings( ¶ms, screenshare_idx, stream_descriptors, ScreenshareNumStreams(), ScreenshareSelectedStream(), ScreenshareNumSpatialLayers(), ScreenshareSelectedSL(), ScreenshareInterLayerPred(), SL_descriptors); @@ -689,16 +686,15 @@ void Loopback() { SL_descriptors.clear(); SL_descriptors.push_back(VideoSL0()); SL_descriptors.push_back(VideoSL1()); - VideoQualityTest::FillScalabilitySettings( - ¶ms, camera_idx, stream_descriptors, VideoNumStreams(), - VideoSelectedStream(), VideoNumSpatialLayers(), VideoSelectedSL(), - VideoInterLayerPred(), SL_descriptors); + fixture.FillScalabilitySettings(¶ms, camera_idx, stream_descriptors, + VideoNumStreams(), VideoSelectedStream(), + VideoNumSpatialLayers(), VideoSelectedSL(), + VideoInterLayerPred(), SL_descriptors); - auto fixture = std::make_unique(nullptr); if (DurationSecs()) { - fixture->RunWithAnalyzer(params); + fixture.RunWithAnalyzer(params); } else { - fixture->RunWithRenderers(params); + fixture.RunWithRenderers(params); } } } // namespace webrtc @@ -707,7 +703,7 @@ int main(int argc, char* argv[]) { ::testing::InitGoogleTest(&argc, argv); absl::ParseCommandLine(argc, argv); - rtc::LogMessage::SetLogToStderr(absl::GetFlag(FLAGS_logs)); + webrtc::LogMessage::SetLogToStderr(absl::GetFlag(FLAGS_logs)); // InitFieldTrialsFromString stores the char*, so the char array must outlive // the application. diff --git a/video/task_queue_frame_decode_scheduler.cc b/video/task_queue_frame_decode_scheduler.cc index cd109c2932..624f8abf4a 100644 --- a/video/task_queue_frame_decode_scheduler.cc +++ b/video/task_queue_frame_decode_scheduler.cc @@ -53,18 +53,17 @@ void TaskQueueFrameDecodeScheduler::ScheduleFrame( // this scheduled release should be skipped. if (scheduled_rtp_ != rtp) return; - scheduled_rtp_ = absl::nullopt; + scheduled_rtp_ = std::nullopt; std::move(cb)(rtp, schedule.render_time); }), wait); } void TaskQueueFrameDecodeScheduler::CancelOutstanding() { - scheduled_rtp_ = absl::nullopt; + scheduled_rtp_ = std::nullopt; } -absl::optional -TaskQueueFrameDecodeScheduler::ScheduledRtpTimestamp() { +std::optional TaskQueueFrameDecodeScheduler::ScheduledRtpTimestamp() { return scheduled_rtp_; } diff --git a/video/task_queue_frame_decode_scheduler.h b/video/task_queue_frame_decode_scheduler.h index 69c6dae63d..1d95c3e344 100644 --- a/video/task_queue_frame_decode_scheduler.h +++ b/video/task_queue_frame_decode_scheduler.h @@ -27,7 +27,7 @@ class TaskQueueFrameDecodeScheduler : public FrameDecodeScheduler { const TaskQueueFrameDecodeScheduler&) = delete; // FrameDecodeScheduler implementation. - absl::optional ScheduledRtpTimestamp() override; + std::optional ScheduledRtpTimestamp() override; void ScheduleFrame(uint32_t rtp, FrameDecodeTiming::FrameSchedule schedule, FrameReleaseCallback cb) override; @@ -38,7 +38,7 @@ class TaskQueueFrameDecodeScheduler : public FrameDecodeScheduler { Clock* const clock_; TaskQueueBase* const bookkeeping_queue_; - absl::optional scheduled_rtp_; + std::optional scheduled_rtp_; ScopedTaskSafetyDetached task_safety_; bool stopped_ = false; }; diff --git a/video/task_queue_frame_decode_scheduler_unittest.cc b/video/task_queue_frame_decode_scheduler_unittest.cc index 20258c6382..38b43e6e41 100644 --- a/video/task_queue_frame_decode_scheduler_unittest.cc +++ b/video/task_queue_frame_decode_scheduler_unittest.cc @@ -13,9 +13,9 @@ #include #include +#include #include -#include "absl/types/optional.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "test/gmock.h" @@ -93,7 +93,7 @@ TEST(TaskQueueFrameDecodeSchedulerTest, CancelOutstanding) { time_controller_.AdvanceTime(decode_delay / 2); EXPECT_THAT(scheduler.ScheduledRtpTimestamp(), Optional(rtp)); scheduler.CancelOutstanding(); - EXPECT_THAT(scheduler.ScheduledRtpTimestamp(), Eq(absl::nullopt)); + EXPECT_THAT(scheduler.ScheduledRtpTimestamp(), Eq(std::nullopt)); time_controller_.AdvanceTime(decode_delay / 2); scheduler.Stop(); diff --git a/video/test/mock_video_stream_encoder.h b/video/test/mock_video_stream_encoder.h index 946f45cc76..f1cfc7f4a8 100644 --- a/video/test/mock_video_stream_encoder.h +++ b/video/test/mock_video_stream_encoder.h @@ -21,15 +21,15 @@ class MockVideoStreamEncoder : public VideoStreamEncoderInterface { public: MOCK_METHOD(void, AddAdaptationResource, - (rtc::scoped_refptr), + (webrtc::scoped_refptr), (override)); - MOCK_METHOD(std::vector>, + MOCK_METHOD(std::vector>, GetAdaptationResources, (), (override)); MOCK_METHOD(void, SetSource, - (rtc::VideoSourceInterface*, + (webrtc::VideoSourceInterface*, const DegradationPreference&), (override)); MOCK_METHOD(void, SetSink, (EncoderSink*, bool), (override)); diff --git a/video/transport_adapter.cc b/video/transport_adapter.cc index 8222f7abb2..b3116ea528 100644 --- a/video/transport_adapter.cc +++ b/video/transport_adapter.cc @@ -22,7 +22,7 @@ TransportAdapter::TransportAdapter(Transport* transport) TransportAdapter::~TransportAdapter() = default; -bool TransportAdapter::SendRtp(rtc::ArrayView packet, +bool TransportAdapter::SendRtp(ArrayView packet, const PacketOptions& options) { if (!enabled_.load()) return false; @@ -30,7 +30,7 @@ bool TransportAdapter::SendRtp(rtc::ArrayView packet, return transport_->SendRtp(packet, options); } -bool TransportAdapter::SendRtcp(rtc::ArrayView packet) { +bool TransportAdapter::SendRtcp(ArrayView packet) { if (!enabled_.load()) return false; diff --git a/video/transport_adapter.h b/video/transport_adapter.h index a1b6995ee5..28c113e6ee 100644 --- a/video/transport_adapter.h +++ b/video/transport_adapter.h @@ -25,9 +25,9 @@ class TransportAdapter : public Transport { explicit TransportAdapter(Transport* transport); ~TransportAdapter() override; - bool SendRtp(rtc::ArrayView packet, + bool SendRtp(ArrayView packet, const PacketOptions& options) override; - bool SendRtcp(rtc::ArrayView packet) override; + bool SendRtcp(ArrayView packet) override; void Enable(); void Disable(); diff --git a/video/video_analyzer.cc b/video/video_analyzer.cc index 9f17e3e015..0cd9babf18 100644 --- a/video/video_analyzer.cc +++ b/video/video_analyzer.cc @@ -12,25 +12,58 @@ #include #include +#include +#include +#include +#include +#include +#include #include #include "absl/algorithm/container.h" #include "absl/flags/flag.h" -#include "absl/flags/parse.h" #include "absl/strings/string_view.h" +#include "api/array_view.h" +#include "api/call/transport.h" +#include "api/media_types.h" +#include "api/numerics/samples_stats_counter.h" +#include "api/task_queue/task_queue_base.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/metrics/metric.h" +#include "api/units/time_delta.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "call/audio_receive_stream.h" +#include "call/call.h" +#include "call/packet_receiver.h" +#include "call/video_receive_stream.h" +#include "call/video_send_stream.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h" #include "modules/rtp_rtcp/source/rtp_packet.h" -#include "modules/rtp_rtcp/source/rtp_util.h" +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/video_coding/codecs/interface/common_constants.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/cpu_time.h" +#include "rtc_base/logging.h" #include "rtc_base/memory_usage.h" +#include "rtc_base/platform_thread.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system_time.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/time_utils.h" +#include "system_wrappers/include/clock.h" #include "system_wrappers/include/cpu_info.h" -#include "test/call_test.h" +#include "test/gtest.h" +#include "test/layer_filtering_transport.h" +#include "test/rtp_file_reader.h" +#include "test/rtp_file_writer.h" #include "test/testsupport/file_utils.h" #include "test/testsupport/frame_writer.h" #include "test/testsupport/test_artifacts.h" @@ -149,7 +182,7 @@ VideoAnalyzer::VideoAnalyzer(test::LayerFilteringTransport* transport, } for (uint32_t i = 0; i < num_cores; ++i) { - comparison_thread_pool_.push_back(rtc::PlatformThread::SpawnJoinable( + comparison_thread_pool_.push_back(PlatformThread::SpawnJoinable( [this] { while (CompareFrames()) { } @@ -177,12 +210,11 @@ void VideoAnalyzer::SetReceiver(PacketReceiver* receiver) { receiver_ = receiver; } -void VideoAnalyzer::SetSource( - rtc::VideoSourceInterface* video_source, - bool respect_sink_wants) { +void VideoAnalyzer::SetSource(VideoSourceInterface* video_source, + bool respect_sink_wants) { if (respect_sink_wants) captured_frame_forwarder_.SetSource(video_source); - rtc::VideoSinkWants wants; + VideoSinkWants wants; video_source->AddOrUpdateSink(InputInterface(), wants); } @@ -211,15 +243,15 @@ void VideoAnalyzer::SetAudioReceiveStream( audio_receive_stream_ = recv_stream; } -rtc::VideoSinkInterface* VideoAnalyzer::InputInterface() { +VideoSinkInterface* VideoAnalyzer::InputInterface() { return &captured_frame_forwarder_; } -rtc::VideoSourceInterface* VideoAnalyzer::OutputInterface() { +VideoSourceInterface* VideoAnalyzer::OutputInterface() { return &captured_frame_forwarder_; } -void VideoAnalyzer::DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) { +void VideoAnalyzer::DeliverRtcpPacket(CopyOnWriteBuffer packet) { return receiver_->DeliverRtcpPacket(std::move(packet)); } @@ -256,12 +288,12 @@ void VideoAnalyzer::DeliverRtpPacket( void VideoAnalyzer::PreEncodeOnFrame(const VideoFrame& video_frame) { MutexLock lock(&lock_); if (!first_encoded_timestamp_) { - while (frames_.front().timestamp() != video_frame.timestamp()) { + while (frames_.front().rtp_timestamp() != video_frame.rtp_timestamp()) { ++dropped_frames_before_first_encode_; frames_.pop_front(); RTC_CHECK(!frames_.empty()); } - first_encoded_timestamp_ = video_frame.timestamp(); + first_encoded_timestamp_ = video_frame.rtp_timestamp(); } } @@ -272,7 +304,7 @@ void VideoAnalyzer::PostEncodeOnFrame(size_t stream_id, uint32_t timestamp) { } } -bool VideoAnalyzer::SendRtp(rtc::ArrayView packet, +bool VideoAnalyzer::SendRtp(ArrayView packet, const PacketOptions& options) { RtpPacket rtp_packet; rtp_packet.Parse(packet); @@ -305,7 +337,7 @@ bool VideoAnalyzer::SendRtp(rtc::ArrayView packet, return result; } -bool VideoAnalyzer::SendRtcp(rtc::ArrayView packet) { +bool VideoAnalyzer::SendRtcp(ArrayView packet) { return transport_->SendRtcp(packet); } @@ -317,9 +349,10 @@ void VideoAnalyzer::OnFrame(const VideoFrame& video_frame) { StartExcludingCpuThreadTime(); int64_t send_timestamp = - wrap_handler_.Unwrap(video_frame.timestamp() - rtp_timestamp_delta_); + wrap_handler_.Unwrap(video_frame.rtp_timestamp() - rtp_timestamp_delta_); - while (wrap_handler_.Unwrap(frames_.front().timestamp()) < send_timestamp) { + while (wrap_handler_.Unwrap(frames_.front().rtp_timestamp()) < + send_timestamp) { if (!last_rendered_frame_) { // No previous frame rendered, this one was dropped after sending but // before rendering. @@ -335,7 +368,7 @@ void VideoAnalyzer::OnFrame(const VideoFrame& video_frame) { VideoFrame reference_frame = frames_.front(); frames_.pop_front(); int64_t reference_timestamp = - wrap_handler_.Unwrap(reference_frame.timestamp()); + wrap_handler_.Unwrap(reference_frame.rtp_timestamp()); if (send_timestamp == reference_timestamp - 1) { // TODO(ivica): Make this work for > 2 streams. // Look at RTPSender::BuildRTPHeader. @@ -410,24 +443,24 @@ void VideoAnalyzer::Wait() { void VideoAnalyzer::StartMeasuringCpuProcessTime() { MutexLock lock(&cpu_measurement_lock_); - cpu_time_ -= rtc::GetProcessCpuTimeNanos(); - wallclock_time_ -= rtc::SystemTimeNanos(); + cpu_time_ -= GetProcessCpuTimeNanos(); + wallclock_time_ -= SystemTimeNanos(); } void VideoAnalyzer::StopMeasuringCpuProcessTime() { MutexLock lock(&cpu_measurement_lock_); - cpu_time_ += rtc::GetProcessCpuTimeNanos(); - wallclock_time_ += rtc::SystemTimeNanos(); + cpu_time_ += GetProcessCpuTimeNanos(); + wallclock_time_ += SystemTimeNanos(); } void VideoAnalyzer::StartExcludingCpuThreadTime() { MutexLock lock(&cpu_measurement_lock_); - cpu_time_ += rtc::GetThreadCpuTimeNanos(); + cpu_time_ += GetThreadCpuTimeNanos(); } void VideoAnalyzer::StopExcludingCpuThreadTime() { MutexLock lock(&cpu_measurement_lock_); - cpu_time_ -= rtc::GetThreadCpuTimeNanos(); + cpu_time_ -= GetThreadCpuTimeNanos(); } double VideoAnalyzer::GetCpuUsagePercent() { @@ -440,7 +473,7 @@ bool VideoAnalyzer::IsInSelectedSpatialAndTemporalLayer( if (rtp_packet.PayloadType() == test::VideoTestConstants::kPayloadTypeVP8) { auto parsed_payload = vp8_depacketizer_->Parse(rtp_packet.PayloadBuffer()); RTC_DCHECK(parsed_payload); - const auto& vp8_header = absl::get( + const auto& vp8_header = std::get( parsed_payload->video_header.video_type_header); int temporal_idx = vp8_header.temporalIdx; return selected_tl_ < 0 || temporal_idx == kNoTemporalIdx || @@ -450,7 +483,7 @@ bool VideoAnalyzer::IsInSelectedSpatialAndTemporalLayer( if (rtp_packet.PayloadType() == test::VideoTestConstants::kPayloadTypeVP9) { auto parsed_payload = vp9_depacketizer_->Parse(rtp_packet.PayloadBuffer()); RTC_DCHECK(parsed_payload); - const auto& vp9_header = absl::get( + const auto& vp9_header = std::get( parsed_payload->video_header.video_type_header); int temporal_idx = vp9_header.temporal_idx; int spatial_idx = vp9_header.spatial_idx; @@ -537,7 +570,7 @@ void VideoAnalyzer::PollStats() { audio_jitter_buffer_ms_.AddSample(receive_stats.jitter_buffer_ms); } - memory_usage_.AddSample(rtc::GetProcessResidentSizeBytes()); + memory_usage_.AddSample(GetProcessResidentSizeBytes()); } bool VideoAnalyzer::CompareFrames() { @@ -668,7 +701,7 @@ void VideoAnalyzer::PrintResults() { const double total_freezes_duration_ms_double = static_cast(total_freezes_duration_ms_); const double total_frames_duration_ms_double = - total_inter_frame_delay_ * rtc::kNumMillisecsPerSec; + total_inter_frame_delay_ * kNumMillisecsPerSec; if (total_frames_duration_ms_double > 0) { GetGlobalMetricsLogger()->LogSingleValueMetric( @@ -906,7 +939,7 @@ void VideoAnalyzer::AddFrameComparison(const VideoFrame& reference, const VideoFrame& render, bool dropped, int64_t render_time_ms) { - int64_t reference_timestamp = wrap_handler_.Unwrap(reference.timestamp()); + int64_t reference_timestamp = wrap_handler_.Unwrap(reference.rtp_timestamp()); int64_t send_time_ms = send_times_[reference_timestamp]; send_times_.erase(reference_timestamp); int64_t recv_time_ms = recv_times_[reference_timestamp]; @@ -1011,10 +1044,10 @@ void VideoAnalyzer::CapturedFrameForwarder::OnFrame( VideoFrame copy = video_frame; // Frames from the capturer does not have a rtp timestamp. // Create one so it can be used for comparison. - RTC_DCHECK_EQ(0, video_frame.timestamp()); + RTC_DCHECK_EQ(0, video_frame.rtp_timestamp()); if (video_frame.ntp_time_ms() == 0) copy.set_ntp_time_ms(clock_->CurrentNtpInMilliseconds()); - copy.set_timestamp(copy.ntp_time_ms() * 90); + copy.set_rtp_timestamp(copy.ntp_time_ms() * 90); analyzer_->AddCapturedFrameForComparison(copy); MutexLock lock(&lock_); ++captured_frames_; @@ -1025,8 +1058,8 @@ void VideoAnalyzer::CapturedFrameForwarder::OnFrame( } void VideoAnalyzer::CapturedFrameForwarder::AddOrUpdateSink( - rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { + VideoSinkInterface* sink, + const VideoSinkWants& wants) { { MutexLock lock(&lock_); RTC_DCHECK(!send_stream_input_ || send_stream_input_ == sink); @@ -1038,7 +1071,7 @@ void VideoAnalyzer::CapturedFrameForwarder::AddOrUpdateSink( } void VideoAnalyzer::CapturedFrameForwarder::RemoveSink( - rtc::VideoSinkInterface* sink) { + VideoSinkInterface* sink) { MutexLock lock(&lock_); RTC_DCHECK(sink == send_stream_input_); send_stream_input_ = nullptr; diff --git a/video/video_analyzer.h b/video/video_analyzer.h index 8c8100c67e..6d68a5daf8 100644 --- a/video/video_analyzer.h +++ b/video/video_analyzer.h @@ -35,7 +35,7 @@ namespace webrtc { class VideoAnalyzer : public PacketReceiver, public Transport, - public rtc::VideoSinkInterface { + public VideoSinkInterface { public: VideoAnalyzer(test::LayerFilteringTransport* transport, const std::string& test_label, @@ -57,17 +57,17 @@ class VideoAnalyzer : public PacketReceiver, ~VideoAnalyzer(); virtual void SetReceiver(PacketReceiver* receiver); - void SetSource(rtc::VideoSourceInterface* video_source, + void SetSource(VideoSourceInterface* video_source, bool respect_sink_wants); void SetCall(Call* call); void SetSendStream(VideoSendStream* stream); void SetReceiveStream(VideoReceiveStreamInterface* stream); void SetAudioReceiveStream(AudioReceiveStreamInterface* recv_stream); - rtc::VideoSinkInterface* InputInterface(); - rtc::VideoSourceInterface* OutputInterface(); + VideoSinkInterface* InputInterface(); + VideoSourceInterface* OutputInterface(); - void DeliverRtcpPacket(rtc::CopyOnWriteBuffer packet) override; + void DeliverRtcpPacket(CopyOnWriteBuffer packet) override; void DeliverRtpPacket(MediaType media_type, RtpPacketReceived packet, PacketReceiver::OnUndemuxablePacketHandler @@ -76,10 +76,10 @@ class VideoAnalyzer : public PacketReceiver, void PreEncodeOnFrame(const VideoFrame& video_frame); void PostEncodeOnFrame(size_t stream_id, uint32_t timestamp); - bool SendRtp(rtc::ArrayView packet, + bool SendRtp(ArrayView packet, const PacketOptions& options) override; - bool SendRtcp(rtc::ArrayView packet) override; + bool SendRtcp(ArrayView packet) override; void OnFrame(const VideoFrame& video_frame) override; void Wait(); @@ -110,8 +110,8 @@ class VideoAnalyzer : public PacketReceiver, int64_t render_time_ms, size_t encoded_frame_size); - absl::optional reference; - absl::optional render; + std::optional reference; + std::optional render; bool dropped; int64_t input_time_ms; int64_t send_time_ms; @@ -145,32 +145,31 @@ class VideoAnalyzer : public PacketReceiver, // as a source to VideoSendStream. // It forwards all input frames to the VideoAnalyzer for later comparison and // forwards the captured frames to the VideoSendStream. - class CapturedFrameForwarder : public rtc::VideoSinkInterface, - public rtc::VideoSourceInterface { + class CapturedFrameForwarder : public VideoSinkInterface, + public VideoSourceInterface { public: CapturedFrameForwarder(VideoAnalyzer* analyzer, Clock* clock, int frames_to_capture, TimeDelta test_duration); - void SetSource(rtc::VideoSourceInterface* video_source); + void SetSource(VideoSourceInterface* video_source); private: void OnFrame(const VideoFrame& video_frame) RTC_LOCKS_EXCLUDED(lock_) override; // Called when `send_stream_.SetSource()` is called. - void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) + void AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) RTC_LOCKS_EXCLUDED(lock_) override; // Called by `send_stream_` when `send_stream_.SetSource()` is called. - void RemoveSink(rtc::VideoSinkInterface* sink) + void RemoveSink(VideoSinkInterface* sink) RTC_LOCKS_EXCLUDED(lock_) override; VideoAnalyzer* const analyzer_; Mutex lock_; - rtc::VideoSinkInterface* send_stream_input_ - RTC_GUARDED_BY(lock_); + VideoSinkInterface* send_stream_input_ RTC_GUARDED_BY(lock_); VideoSourceInterface* video_source_; Clock* clock_; int captured_frames_ RTC_GUARDED_BY(lock_); @@ -259,7 +258,7 @@ class VideoAnalyzer : public PacketReceiver, SamplesStatsCounter audio_jitter_buffer_ms_ RTC_GUARDED_BY(comparison_lock_); SamplesStatsCounter pixels_ RTC_GUARDED_BY(comparison_lock_); // Rendered frame with worst PSNR is saved for further analysis. - absl::optional worst_frame_ RTC_GUARDED_BY(comparison_lock_); + std::optional worst_frame_ RTC_GUARDED_BY(comparison_lock_); // Freeze metrics. SamplesStatsCounter time_between_freezes_ RTC_GUARDED_BY(comparison_lock_); uint32_t freeze_count_ RTC_GUARDED_BY(comparison_lock_); @@ -292,22 +291,22 @@ class VideoAnalyzer : public PacketReceiver, int64_t wallclock_time_ RTC_GUARDED_BY(cpu_measurement_lock_); std::deque frames_ RTC_GUARDED_BY(lock_); - absl::optional last_rendered_frame_ RTC_GUARDED_BY(lock_); + std::optional last_rendered_frame_ RTC_GUARDED_BY(lock_); RtpTimestampUnwrapper wrap_handler_ RTC_GUARDED_BY(lock_); std::map send_times_ RTC_GUARDED_BY(lock_); std::map recv_times_ RTC_GUARDED_BY(lock_); std::map encoded_frame_sizes_ RTC_GUARDED_BY(lock_); - absl::optional first_encoded_timestamp_ RTC_GUARDED_BY(lock_); - absl::optional first_sent_timestamp_ RTC_GUARDED_BY(lock_); + std::optional first_encoded_timestamp_ RTC_GUARDED_BY(lock_); + std::optional first_sent_timestamp_ RTC_GUARDED_BY(lock_); const double avg_psnr_threshold_; const double avg_ssim_threshold_; bool is_quick_test_enabled_; - std::vector comparison_thread_pool_; - rtc::Event comparison_available_event_; + std::vector comparison_thread_pool_; + Event comparison_available_event_; std::deque comparisons_ RTC_GUARDED_BY(comparison_lock_); bool quit_ RTC_GUARDED_BY(comparison_lock_); - rtc::Event done_; + Event done_; std::unique_ptr vp8_depacketizer_; std::unique_ptr vp9_depacketizer_; diff --git a/video/video_loopback.cc b/video/video_loopback.cc index ba0a0e5745..09254d7e73 100644 --- a/video/video_loopback.cc +++ b/video/video_loopback.cc @@ -12,15 +12,16 @@ #include #include +#include #include #include #include "absl/flags/flag.h" #include "absl/flags/parse.h" -#include "absl/types/optional.h" #include "api/test/simulated_network.h" #include "api/test/video_quality_test_fixture.h" #include "api/transport/bitrate_settings.h" +#include "api/units/data_rate.h" #include "api/video_codecs/video_codec.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -28,6 +29,7 @@ #include "test/field_trial.h" #include "test/gtest.h" #include "test/run_test.h" +#include "test/test_flags.h" #include "video/video_quality_test.h" // Flags common with screenshare loopback, with different default values. @@ -199,16 +201,6 @@ ABSL_FLAG(bool, ABSL_FLAG(bool, video, true, "Add video stream"); -ABSL_FLAG( - std::string, - force_fieldtrials, - "", - "Field trials control experimental feature code which can be forced. " - "E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enabled/" - " will assign the group Enable to field trial WebRTC-FooFeature. Multiple " - "trials are separated by \"/\""); - -// Video-specific flags. ABSL_FLAG(std::string, clip, "", @@ -264,7 +256,7 @@ InterLayerPredMode InterLayerPred() { } } -std::string Codec() { +std::string CodecName() { return absl::GetFlag(FLAGS_codec); } @@ -292,8 +284,10 @@ int AvgBurstLossLength() { return static_cast(absl::GetFlag(FLAGS_avg_burst_loss_length)); } -int LinkCapacityKbps() { - return static_cast(absl::GetFlag(FLAGS_link_capacity)); +DataRate LinkCapacity() { + int link_capacity_kbps = absl::GetFlag(FLAGS_link_capacity); + return link_capacity_kbps == 0 ? DataRate::Infinity() + : DataRate::KilobitsPerSec(link_capacity_kbps); } int QueueSize() { @@ -366,7 +360,7 @@ void Loopback() { BuiltInNetworkBehaviorConfig pipe_config; pipe_config.loss_percent = LossPercent(); pipe_config.avg_burst_loss_length = AvgBurstLossLength(); - pipe_config.link_capacity_kbps = LinkCapacityKbps(); + pipe_config.link_capacity = LinkCapacity(); pipe_config.queue_length_packets = QueueSize(); pipe_config.queue_delay_ms = AvgPropagationDelayMs(); pipe_config.delay_standard_deviation_ms = StdPropagationDelayMs(); @@ -393,7 +387,7 @@ void Loopback() { params.video[0].max_bitrate_bps = MaxBitrateKbps() * 1000; params.video[0].suspend_below_min_bitrate = absl::GetFlag(FLAGS_suspend_below_min_bitrate); - params.video[0].codec = Codec(); + params.video[0].codec = CodecName(); params.video[0].num_temporal_layers = NumTemporalLayers(); params.video[0].selected_tl = SelectedTL(); params.video[0].min_transmit_bps = 0; @@ -427,15 +421,16 @@ void Loopback() { SL_descriptors.push_back(SL0()); SL_descriptors.push_back(SL1()); SL_descriptors.push_back(SL2()); - VideoQualityTest::FillScalabilitySettings( + + VideoQualityTest fixture(nullptr); + fixture.FillScalabilitySettings( ¶ms, 0, stream_descriptors, NumStreams(), SelectedStream(), NumSpatialLayers(), SelectedSL(), InterLayerPred(), SL_descriptors); - auto fixture = std::make_unique(nullptr); if (DurationSecs()) { - fixture->RunWithAnalyzer(params); + fixture.RunWithAnalyzer(params); } else { - fixture->RunWithRenderers(params); + fixture.RunWithRenderers(params); } } @@ -443,7 +438,7 @@ int RunLoopbackTest(int argc, char* argv[]) { ::testing::InitGoogleTest(&argc, argv); absl::ParseCommandLine(argc, argv); - rtc::LogMessage::SetLogToStderr(absl::GetFlag(FLAGS_logs)); + LogMessage::SetLogToStderr(absl::GetFlag(FLAGS_logs)); // InitFieldTrialsFromString stores the char*, so the char array must outlive // the application. diff --git a/video/video_quality_observer2.cc b/video/video_quality_observer2.cc index 0afc2f5235..3c48fd2747 100644 --- a/video/video_quality_observer2.cc +++ b/video/video_quality_observer2.cc @@ -63,7 +63,7 @@ void VideoQualityObserver::UpdateHistograms(bool screenshare) { } char log_stream_buf[2 * 1024]; - rtc::SimpleStringBuilder log_stream(log_stream_buf); + SimpleStringBuilder log_stream(log_stream_buf); if (last_frame_rendered_ms_ > last_unfreeze_time_ms_) { smooth_playback_durations_.Add(last_frame_rendered_ms_ - @@ -165,7 +165,7 @@ void VideoQualityObserver::OnRenderedFrame( bool was_freeze = false; if (render_interframe_delays_.Size() >= kMinFrameSamplesToDetectFreeze) { - const absl::optional avg_interframe_delay = + const std::optional avg_interframe_delay = render_interframe_delays_.GetAverageRoundedDown(); RTC_DCHECK(avg_interframe_delay); was_freeze = interframe_delay_ms >= @@ -231,12 +231,12 @@ void VideoQualityObserver::OnRenderedFrame( } void VideoQualityObserver::OnDecodedFrame(uint32_t rtp_frame_timestamp, - absl::optional qp, + std::optional qp, VideoCodecType codec) { if (!qp) return; - absl::optional qp_blocky_threshold; + std::optional qp_blocky_threshold; // TODO(ilnik): add other codec types when we have QP for them. switch (codec) { case kVideoCodecVP8: @@ -246,7 +246,7 @@ void VideoQualityObserver::OnDecodedFrame(uint32_t rtp_frame_timestamp, qp_blocky_threshold = kBlockyQpThresholdVp9; break; default: - qp_blocky_threshold = absl::nullopt; + qp_blocky_threshold = std::nullopt; } RTC_DCHECK(blocky_frames_.find(rtp_frame_timestamp) == blocky_frames_.end()); diff --git a/video/video_quality_observer2.h b/video/video_quality_observer2.h index 35877858d4..e9b66c5dfd 100644 --- a/video/video_quality_observer2.h +++ b/video/video_quality_observer2.h @@ -13,10 +13,10 @@ #include +#include #include #include -#include "absl/types/optional.h" #include "api/video/video_codec_type.h" #include "api/video/video_content_type.h" #include "rtc_base/numerics/moving_average.h" @@ -37,7 +37,7 @@ class VideoQualityObserver { ~VideoQualityObserver() = default; void OnDecodedFrame(uint32_t rtp_frame_timestamp, - absl::optional qp, + std::optional qp, VideoCodecType codec); void OnRenderedFrame(const VideoFrameMetaData& frame_meta); @@ -72,14 +72,14 @@ class VideoQualityObserver { bool is_last_frame_blocky_; // Decoded timestamp of the last delayed frame. int64_t last_unfreeze_time_ms_; - rtc::MovingAverage render_interframe_delays_; + MovingAverage render_interframe_delays_; double sum_squared_interframe_delays_secs_; // An inter-frame delay is counted as a freeze if it's significantly longer // than average inter-frame delay. - rtc::SampleCounter freezes_durations_; - rtc::SampleCounter pauses_durations_; + SampleCounter freezes_durations_; + SampleCounter pauses_durations_; // Time between freezes. - rtc::SampleCounter smooth_playback_durations_; + SampleCounter smooth_playback_durations_; // Counters for time spent in different resolutions. Time between each two // Consecutive frames is counted to bin corresponding to the first frame // resolution. diff --git a/video/video_quality_test.cc b/video/video_quality_test.cc index 010e2ed325..39494cbe85 100644 --- a/video/video_quality_test.cc +++ b/video/video_quality_test.cc @@ -11,17 +11,63 @@ #include +#include +#include +#include +#include + +#include "absl/flags/flag.h" +#include "api/call/transport.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/make_ref_counted.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/test/frame_generator_interface.h" +#include "api/test/simulated_network.h" +#include "api/units/time_delta.h" +#include "api/video/encoded_image.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_source_interface.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/spatial_layer.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder.h" +#include "call/audio_receive_stream.h" +#include "call/audio_send_stream.h" +#include "call/audio_state.h" +#include "call/call_config.h" +#include "call/video_receive_stream.h" +#include "call/video_send_stream.h" +#include "media/engine/internal_decoder_factory.h" +#include "modules/audio_device/include/test_audio_device.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/system/file_wrapper.h" +#include "test/direct_transport.h" +#include "test/frame_generator_capturer.h" +#include "test/gtest.h" +#include "test/layer_filtering_transport.h" +#include "video/config/video_encoder_config.h" +#include "video/video_analyzer.h" + #if defined(WEBRTC_WIN) #include #endif #include -#include -#include #include #include #include +#include "api/audio/audio_device.h" +#include "api/audio/builtin_audio_processing_builder.h" #include "api/fec_controller_override.h" #include "api/rtc_event_log_output_file.h" #include "api/task_queue/default_task_queue_factory.h" @@ -30,24 +76,19 @@ #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "api/video_codecs/video_encoder.h" #include "call/fake_network_pipe.h" -#include "call/simulated_network.h" #include "media/base/media_constants.h" #include "media/engine/adm_helpers.h" #include "media/engine/fake_video_codec_factory.h" #include "media/engine/internal_encoder_factory.h" #include "media/engine/simulcast_encoder_adapter.h" #include "media/engine/webrtc_video_engine.h" -#include "modules/audio_device/include/audio_device.h" #include "modules/audio_mixer/audio_mixer_impl.h" -#include "modules/video_coding/codecs/h264/include/h264.h" -#include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h" -#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h" -#include "modules/video_coding/codecs/vp8/include/vp8.h" -#include "modules/video_coding/codecs/vp9/include/vp9.h" #include "modules/video_coding/utility/ivf_file_writer.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/task_queue_for_test.h" +#include "test/network/simulated_network.h" #include "test/platform_video_capturer.h" +#include "test/test_flags.h" #include "test/testsupport/file_utils.h" #include "test/video_renderer.h" #include "video/frame_dumping_decoder.h" @@ -76,8 +117,6 @@ constexpr int kFramesSentInQuickTest = 1; constexpr uint32_t kThumbnailSendSsrcStart = 0xE0000; constexpr uint32_t kThumbnailRtxSsrcStart = 0xF0000; -constexpr int kDefaultMaxQp = cricket::WebRtcVideoSendChannel::kDefaultQpMax; - const VideoEncoder::Capabilities kCapabilities(false); std::pair GetMinMaxBitratesBps(const VideoCodec& codec, @@ -109,6 +148,7 @@ class VideoStreamFactory private: std::vector CreateEncoderStreams( + const FieldTrialsView& /*field_trials*/, int frame_width, int frame_height, const VideoEncoderConfig& encoder_config) override { @@ -194,8 +234,8 @@ class QualityTestVideoEncoder : public VideoEncoder, std::tie(min_bitrate_bps, max_bitrate_bps) = GetMinMaxBitratesBps(codec_settings_, si); double overshoot_factor = overshoot_factor_; - const uint32_t corrected_bitrate = rtc::checked_cast( - overshoot_factor * spatial_layer_bitrate_bps); + const uint32_t corrected_bitrate = + checked_cast(overshoot_factor * spatial_layer_bitrate_bps); if (corrected_bitrate < min_bitrate_bps) { overshoot_factor = min_bitrate_bps / spatial_layer_bitrate_bps; } else if (corrected_bitrate > max_bitrate_bps) { @@ -206,8 +246,8 @@ class QualityTestVideoEncoder : public VideoEncoder, if (parameters.bitrate.HasBitrate(si, ti)) { overshot_allocation.SetBitrate( si, ti, - rtc::checked_cast( - overshoot_factor * parameters.bitrate.GetBitrate(si, ti))); + checked_cast(overshoot_factor * + parameters.bitrate.GetBitrate(si, ti))); } } } @@ -246,7 +286,7 @@ class QualityTestVideoEncoder : public VideoEncoder, RTC_DCHECK_GE(simulcast_index, 0); if (analyzer_) { analyzer_->PostEncodeOnFrame(simulcast_index, - encoded_image.Timestamp()); + encoded_image.RtpTimestamp()); } if (static_cast(simulcast_index) < writers_.size()) { writers_[simulcast_index]->WriteFrame(encoded_image, @@ -295,18 +335,16 @@ void PressEnterToContinue(TaskQueueBase* /*task_queue*/) { } // namespace std::unique_ptr VideoQualityTest::CreateVideoDecoder( + const Environment& env, const SdpVideoFormat& format) { std::unique_ptr decoder; - if (format.name == "multiplex") { - decoder = std::make_unique( - decoder_factory_.get(), SdpVideoFormat(cricket::kVp9CodecName)); - } else if (format.name == "FakeCodec") { + if (format.name == "FakeCodec") { decoder = webrtc::FakeVideoDecoderFactory::CreateVideoDecoder(); } else { - decoder = decoder_factory_->CreateVideoDecoder(format); + decoder = decoder_factory_->Create(env, format); } if (!params_.logging.encoded_frame_base_path.empty()) { - rtc::StringBuilder str; + StringBuilder str; str << receive_logs_++; std::string path = params_.logging.encoded_frame_base_path + "." + str.str() + ".recv.ivf"; @@ -317,25 +355,21 @@ std::unique_ptr VideoQualityTest::CreateVideoDecoder( } std::unique_ptr VideoQualityTest::CreateVideoEncoder( + const Environment& env, const SdpVideoFormat& format, VideoAnalyzer* analyzer) { std::unique_ptr encoder; - if (format.name == "VP8") { - encoder = std::make_unique(encoder_factory_.get(), - format); - } else if (format.name == "multiplex") { - encoder = std::make_unique( - encoder_factory_.get(), SdpVideoFormat(cricket::kVp9CodecName)); - } else if (format.name == "FakeCodec") { - encoder = webrtc::FakeVideoEncoderFactory::CreateVideoEncoder(); + if (format.name == "FakeCodec") { + encoder = FakeVideoEncoderFactory().Create(env, format); } else { - encoder = encoder_factory_->CreateVideoEncoder(format); + encoder = std::make_unique( + env, encoder_factory_.get(), nullptr, format); } std::vector encoded_frame_dump_files; if (!params_.logging.encoded_frame_base_path.empty()) { char ss_buf[100]; - rtc::SimpleStringBuilder sb(ss_buf); + SimpleStringBuilder sb(ss_buf); sb << send_logs_++; std::string prefix = params_.logging.encoded_frame_base_path + "." + sb.str() + ".send."; @@ -376,16 +410,17 @@ VideoQualityTest::VideoQualityTest( std::unique_ptr injection_components) : clock_(Clock::GetRealTimeClock()), task_queue_factory_(CreateDefaultTaskQueueFactory()), - rtc_event_log_factory_(task_queue_factory_.get()), - video_decoder_factory_([this](const SdpVideoFormat& format) { - return this->CreateVideoDecoder(format); - }), - video_encoder_factory_([this](const SdpVideoFormat& format) { - return this->CreateVideoEncoder(format, nullptr); - }), + video_decoder_factory_( + [this](const Environment& env, const SdpVideoFormat& format) { + return this->CreateVideoDecoder(env, format); + }), + video_encoder_factory_( + [this](const Environment& env, const SdpVideoFormat& format) { + return this->CreateVideoEncoder(env, format, nullptr); + }), video_encoder_factory_with_analyzer_( - [this](const SdpVideoFormat& format) { - return this->CreateVideoEncoder(format, analyzer_.get()); + [this](const Environment& env, const SdpVideoFormat& format) { + return this->CreateVideoEncoder(env, format, analyzer_.get()); }), video_bitrate_allocator_factory_( CreateBuiltinVideoBitrateAllocatorFactory()), @@ -437,7 +472,7 @@ VideoQualityTest::InjectionComponents::~InjectionComponents() = default; void VideoQualityTest::TestBody() {} std::string VideoQualityTest::GenerateGraphTitle() const { - rtc::StringBuilder ss; + StringBuilder ss; ss << params_.video[0].codec; ss << " (" << params_.video[0].target_bitrate_bps / 1000 << "kbps"; ss << ", " << params_.video[0].fps << " FPS"; @@ -578,7 +613,7 @@ VideoStream VideoQualityTest::DefaultVideoStream(const Params& params, stream.min_bitrate_bps = params.video[video_idx].min_bitrate_bps; stream.target_bitrate_bps = params.video[video_idx].target_bitrate_bps; stream.max_bitrate_bps = params.video[video_idx].max_bitrate_bps; - stream.max_qp = kDefaultMaxQp; + stream.max_qp = kDefaultVideoMaxQpVpx; stream.num_temporal_layers = params.video[video_idx].num_temporal_layers; stream.active = true; return stream; @@ -593,11 +628,10 @@ VideoStream VideoQualityTest::DefaultThumbnailStream() { stream.min_bitrate_bps = 7500; stream.target_bitrate_bps = 37500; stream.max_bitrate_bps = 50000; - stream.max_qp = kDefaultMaxQp; + stream.max_qp = kDefaultVideoMaxQpVpx; return stream; } -// Static. void VideoQualityTest::FillScalabilitySettings( Params* params, size_t video_idx, @@ -625,13 +659,11 @@ void VideoQualityTest::FillScalabilitySettings( encoder_config.spatial_layers = params->ss[video_idx].spatial_layers; encoder_config.simulcast_layers = std::vector(num_streams); encoder_config.video_stream_factory = - rtc::make_ref_counted( - params->video[video_idx].codec, kDefaultMaxQp, - params->screenshare[video_idx].enabled, true, encoder_info); + make_ref_counted(encoder_info); params->ss[video_idx].streams = encoder_config.video_stream_factory->CreateEncoderStreams( - params->video[video_idx].width, params->video[video_idx].height, - encoder_config); + env().field_trials(), params->video[video_idx].width, + params->video[video_idx].height, encoder_config); } else { // Read VideoStream and SpatialLayer elements from a list of comma separated // lists. To use a default value for an element, use -1 or leave empty. @@ -724,8 +756,6 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, payload_type = test::VideoTestConstants::kPayloadTypeVP8; } else if (params_.video[video_idx].codec == "VP9") { payload_type = test::VideoTestConstants::kPayloadTypeVP9; - } else if (params_.video[video_idx].codec == "multiplex") { - payload_type = test::VideoTestConstants::kPayloadTypeVP9; } else if (params_.video[video_idx].codec == "FakeCodec") { payload_type = test::VideoTestConstants::kFakeVideoSendPayloadType; } else { @@ -809,11 +839,6 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, video_encoder_configs_[video_idx].simulcast_layers = params_.ss[video_idx].streams; } - video_encoder_configs_[video_idx].video_stream_factory = - rtc::make_ref_counted( - params_.video[video_idx].codec, - params_.ss[video_idx].streams[0].max_qp, - params_.screenshare[video_idx].enabled, true, encoder_info); video_encoder_configs_[video_idx].spatial_layers = params_.ss[video_idx].spatial_layers; @@ -822,7 +847,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, decode_all_receive_streams = params_.ss[video_idx].selected_stream == params_.ss[video_idx].streams.size(); - absl::optional decode_sub_stream; + std::optional decode_sub_stream; if (!decode_all_receive_streams) decode_sub_stream = params_.ss[video_idx].selected_stream; CreateMatchingVideoReceiveConfigs( @@ -833,6 +858,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, // Fill out codec settings. video_encoder_configs_[video_idx].content_type = VideoEncoderConfig::ContentType::kScreen; + video_encoder_configs_[video_idx].legacy_conference_mode = true; degradation_preference_ = DegradationPreference::MAINTAIN_RESOLUTION; if (params_.video[video_idx].codec == "VP8") { VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); @@ -840,8 +866,8 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, vp8_settings.numberOfTemporalLayers = static_cast( params_.video[video_idx].num_temporal_layers); video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted< - VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); + make_ref_counted( + vp8_settings); } else if (params_.video[video_idx].codec == "VP9") { VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); vp9_settings.denoisingOn = false; @@ -856,8 +882,8 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, vp9_settings.flexibleMode = true; } video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted< - VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); + make_ref_counted( + vp9_settings); } } else if (params_.ss[video_idx].num_spatial_layers > 1) { // If SVC mode without screenshare, still need to set codec specifics. @@ -870,7 +896,7 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, vp9_settings.interLayerPred = params_.ss[video_idx].inter_layer_pred; vp9_settings.automaticResizeOn = false; video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); RTC_DCHECK_EQ(video_encoder_configs_[video_idx].simulcast_layers.size(), 1); @@ -881,19 +907,19 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); vp8_settings.automaticResizeOn = true; video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted< - VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); + make_ref_counted( + vp8_settings); } else if (params_.video[video_idx].codec == "VP9") { VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); // Only enable quality scaler for single spatial layer. vp9_settings.automaticResizeOn = params_.ss[video_idx].num_spatial_layers == 1; video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted< - VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); + make_ref_counted( + vp9_settings); } else if (params_.video[video_idx].codec == "H264") { // Quality scaling is always on for H.264. - } else if (params_.video[video_idx].codec == cricket::kAv1CodecName) { + } else if (params_.video[video_idx].codec == kAv1CodecName) { // TODO(bugs.webrtc.org/11404): Propagate the flag to // aom_codec_enc_cfg_t::rc_resize_mode in Av1 encoder wrapper. // Until then do nothing, specially do not crash. @@ -908,14 +934,14 @@ void VideoQualityTest::SetupVideo(Transport* send_transport, VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); vp8_settings.automaticResizeOn = false; video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted< - VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); + make_ref_counted( + vp8_settings); } else if (params_.video[video_idx].codec == "VP9") { VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); vp9_settings.automaticResizeOn = false; video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted< - VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); + make_ref_counted( + vp9_settings); } else if (params_.video[video_idx].codec == "H264") { video_encoder_configs_[video_idx].encoder_specific_settings = nullptr; } @@ -988,16 +1014,16 @@ void VideoQualityTest::SetupThumbnails(Transport* send_transport, thumbnail_encoder_config.max_bitrate_bps = 50000; std::vector streams{params_.ss[0].streams[0]}; thumbnail_encoder_config.video_stream_factory = - rtc::make_ref_counted(streams); + make_ref_counted(streams); thumbnail_encoder_config.spatial_layers = params_.ss[0].spatial_layers; thumbnail_encoder_configs_.push_back(thumbnail_encoder_config.Copy()); thumbnail_send_configs_.push_back(thumbnail_send_config.Copy()); - AddMatchingVideoReceiveConfigs( - &thumbnail_receive_configs_, thumbnail_send_config, send_transport, - &video_decoder_factory_, absl::nullopt, false, - test::VideoTestConstants::kNackRtpHistoryMs); + AddMatchingVideoReceiveConfigs(&thumbnail_receive_configs_, + thumbnail_send_config, send_transport, + &video_decoder_factory_, std::nullopt, false, + test::VideoTestConstants::kNackRtpHistoryMs); } for (size_t i = 0; i < thumbnail_send_configs_.size(); ++i) { thumbnail_send_streams_.push_back(receiver_call_->CreateVideoSendStream( @@ -1021,7 +1047,7 @@ void VideoQualityTest::DestroyThumbnailStreams() { } thumbnail_send_streams_.clear(); thumbnail_receive_streams_.clear(); - for (std::unique_ptr>& video_capturer : + for (std::unique_ptr>& video_capturer : thumbnail_capturers_) { video_capturer.reset(); } @@ -1035,7 +1061,7 @@ void VideoQualityTest::SetupThumbnailCapturers(size_t num_thumbnail_streams) { clock_, test::CreateSquareFrameGenerator(static_cast(thumbnail.width), static_cast(thumbnail.height), - absl::nullopt, absl::nullopt), + std::nullopt, std::nullopt), thumbnail.max_framerate, *task_queue_factory_); EXPECT_TRUE(frame_generator_capturer->Init()); thumbnail_capturers_.push_back(std::move(frame_generator_capturer)); @@ -1053,6 +1079,10 @@ VideoQualityTest::CreateFrameGenerator(size_t video_idx) { kWidth, kHeight, params_.screenshare[video_idx].slide_change_interval * params_.video[video_idx].fps); + } else if (!params_.video[video_idx].clip_path.empty()) { + frame_generator = test::CreateFromYuvFileFrameGenerator( + {params_.video[video_idx].clip_path}, params_.video[video_idx].width, + params_.video[video_idx].height, 1); } else { std::vector slides = params_.screenshare[video_idx].slides; if (slides.empty()) { @@ -1098,23 +1128,23 @@ void VideoQualityTest::CreateCapturers() { } else if (params_.video[video_idx].clip_path == "Generator") { frame_generator = test::CreateSquareFrameGenerator( static_cast(params_.video[video_idx].width), - static_cast(params_.video[video_idx].height), absl::nullopt, - absl::nullopt); + static_cast(params_.video[video_idx].height), std::nullopt, + std::nullopt); } else if (params_.video[video_idx].clip_path == "GeneratorI420A") { frame_generator = test::CreateSquareFrameGenerator( static_cast(params_.video[video_idx].width), static_cast(params_.video[video_idx].height), - test::FrameGeneratorInterface::OutputType::kI420A, absl::nullopt); + test::FrameGeneratorInterface::OutputType::kI420A, std::nullopt); } else if (params_.video[video_idx].clip_path == "GeneratorI010") { frame_generator = test::CreateSquareFrameGenerator( static_cast(params_.video[video_idx].width), static_cast(params_.video[video_idx].height), - test::FrameGeneratorInterface::OutputType::kI010, absl::nullopt); + test::FrameGeneratorInterface::OutputType::kI010, std::nullopt); } else if (params_.video[video_idx].clip_path == "GeneratorNV12") { frame_generator = test::CreateSquareFrameGenerator( static_cast(params_.video[video_idx].width), static_cast(params_.video[video_idx].height), - test::FrameGeneratorInterface::OutputType::kNV12, absl::nullopt); + test::FrameGeneratorInterface::OutputType::kNV12, std::nullopt); } else if (params_.video[video_idx].clip_path.empty()) { video_sources_[video_idx] = test::CreateVideoCapturer( params_.video[video_idx].width, params_.video[video_idx].height, @@ -1126,8 +1156,8 @@ void VideoQualityTest::CreateCapturers() { // Failed to get actual camera, use chroma generator as backup. frame_generator = test::CreateSquareFrameGenerator( static_cast(params_.video[video_idx].width), - static_cast(params_.video[video_idx].height), absl::nullopt, - absl::nullopt); + static_cast(params_.video[video_idx].height), std::nullopt, + std::nullopt); } } else { frame_generator = test::CreateFromYuvFileFrameGenerator( @@ -1222,10 +1252,10 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) { } if (!params.logging.rtc_event_log_name.empty()) { - send_event_log_ = rtc_event_log_factory_.CreateRtcEventLog( - RtcEventLog::EncodingType::NewFormat); - recv_event_log_ = rtc_event_log_factory_.CreateRtcEventLog( - RtcEventLog::EncodingType::NewFormat); + std::unique_ptr send_event_log = + rtc_event_log_factory_.Create(env()); + std::unique_ptr recv_event_log = + rtc_event_log_factory_.Create(env()); std::unique_ptr send_output( std::make_unique( params.logging.rtc_event_log_name + "_send", @@ -1235,26 +1265,25 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) { params.logging.rtc_event_log_name + "_recv", RtcEventLog::kUnlimitedOutput)); bool event_log_started = - send_event_log_->StartLogging(std::move(send_output), - RtcEventLog::kImmediateOutput) && - recv_event_log_->StartLogging(std::move(recv_output), - RtcEventLog::kImmediateOutput); + send_event_log->StartLogging(std::move(send_output), + RtcEventLog::kImmediateOutput) && + recv_event_log->StartLogging(std::move(recv_output), + RtcEventLog::kImmediateOutput); RTC_DCHECK(event_log_started); - } else { - send_event_log_ = std::make_unique(); - recv_event_log_ = std::make_unique(); + SetSendEventLog(std::move(send_event_log)); + SetRecvEventLog(std::move(recv_event_log)); } SendTask(task_queue(), [this, ¶ms, &send_transport, &recv_transport]() { - Call::Config send_call_config(send_event_log_.get()); - Call::Config recv_call_config(recv_event_log_.get()); + CallConfig send_call_config = SendCallConfig(); + CallConfig recv_call_config = RecvCallConfig(); send_call_config.bitrate_config = params.call.call_bitrate_config; recv_call_config.bitrate_config = params.call.call_bitrate_config; if (params_.audio.enabled) InitializeAudioDevice(&send_call_config, &recv_call_config, params_.audio.use_real_adm); - CreateCalls(send_call_config, recv_call_config); + CreateCalls(std::move(send_call_config), std::move(recv_call_config)); send_transport = CreateSendTransport(); recv_transport = CreateReceiveTransport(); }); @@ -1262,7 +1291,7 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) { std::string graph_title = params_.analyzer.graph_title; if (graph_title.empty()) graph_title = VideoQualityTest::GenerateGraphTitle(); - bool is_quick_test_enabled = field_trial::IsEnabled("WebRTC-QuickPerfTest"); + bool is_quick_test_enabled = absl::GetFlag(FLAGS_webrtc_quick_perf_test); analyzer_ = std::make_unique( send_transport.get(), params_.analyzer.test_label, params_.analyzer.avg_psnr_threshold, params_.analyzer.avg_ssim_threshold, @@ -1344,7 +1373,7 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) { analyzer_ = nullptr; } -rtc::scoped_refptr VideoQualityTest::CreateAudioDevice() { +scoped_refptr VideoQualityTest::CreateAudioDevice() { #ifdef WEBRTC_WIN RTC_LOG(LS_INFO) << "Using latest version of ADM on Windows"; // We must initialize the COM library on a thread before we calling any of @@ -1365,10 +1394,10 @@ rtc::scoped_refptr VideoQualityTest::CreateAudioDevice() { #endif } -void VideoQualityTest::InitializeAudioDevice(Call::Config* send_call_config, - Call::Config* recv_call_config, +void VideoQualityTest::InitializeAudioDevice(CallConfig* send_call_config, + CallConfig* recv_call_config, bool use_real_adm) { - rtc::scoped_refptr audio_device; + scoped_refptr audio_device; if (use_real_adm) { // Run test with real ADM (using default audio devices) if user has // explicitly set the --audio and --use_real_adm command-line flags. @@ -1384,7 +1413,8 @@ void VideoQualityTest::InitializeAudioDevice(Call::Config* send_call_config, AudioState::Config audio_state_config; audio_state_config.audio_mixer = AudioMixerImpl::Create(); - audio_state_config.audio_processing = AudioProcessingBuilder().Create(); + audio_state_config.audio_processing = + BuiltinAudioProcessingBuilder().Build(send_call_config->env); audio_state_config.audio_device_module = audio_device; send_call_config->audio_state = AudioState::Create(audio_state_config); recv_call_config->audio_state = AudioState::Create(audio_state_config); @@ -1421,7 +1451,6 @@ void VideoQualityTest::SetupAudio(Transport* transport) { kTransportSequenceNumberExtensionId)); audio_send_config.min_bitrate_bps = kOpusMinBitrateBps; audio_send_config.max_bitrate_bps = kOpusBitrateFbBps; - audio_send_config.send_codec_spec->transport_cc_enabled = true; // Only allow ANA when send-side BWE is enabled. audio_send_config.audio_network_adaptor_config = params_.audio.ana_config; } @@ -1445,10 +1474,10 @@ void VideoQualityTest::RunWithRenderers(const Params& params) { std::vector> loopback_renderers; if (!params.logging.rtc_event_log_name.empty()) { - send_event_log_ = rtc_event_log_factory_.CreateRtcEventLog( - RtcEventLog::EncodingType::NewFormat); - recv_event_log_ = rtc_event_log_factory_.CreateRtcEventLog( - RtcEventLog::EncodingType::NewFormat); + std::unique_ptr send_event_log = + rtc_event_log_factory_.Create(env()); + std::unique_ptr recv_event_log = + rtc_event_log_factory_.Create(env()); std::unique_ptr send_output( std::make_unique( params.logging.rtc_event_log_name + "_send", @@ -1458,31 +1487,30 @@ void VideoQualityTest::RunWithRenderers(const Params& params) { params.logging.rtc_event_log_name + "_recv", RtcEventLog::kUnlimitedOutput)); bool event_log_started = - send_event_log_->StartLogging(std::move(send_output), - /*output_period_ms=*/5000) && - recv_event_log_->StartLogging(std::move(recv_output), - /*output_period_ms=*/5000); + send_event_log->StartLogging(std::move(send_output), + /*output_period_ms=*/5000) && + recv_event_log->StartLogging(std::move(recv_output), + /*output_period_ms=*/5000); RTC_DCHECK(event_log_started); - } else { - send_event_log_ = std::make_unique(); - recv_event_log_ = std::make_unique(); + SetSendEventLog(std::move(send_event_log)); + SetRecvEventLog(std::move(recv_event_log)); } SendTask(task_queue(), [&]() { params_ = params; CheckParamsAndInjectionComponents(); - // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to + // TODO(ivica): Remove bitrate_config and use the default CallConfig(), to // match the full stack tests. - Call::Config send_call_config(send_event_log_.get()); + CallConfig send_call_config = SendCallConfig(); send_call_config.bitrate_config = params_.call.call_bitrate_config; - Call::Config recv_call_config(recv_event_log_.get()); + CallConfig recv_call_config = RecvCallConfig(); if (params_.audio.enabled) InitializeAudioDevice(&send_call_config, &recv_call_config, params_.audio.use_real_adm); - CreateCalls(send_call_config, recv_call_config); + CreateCalls(std::move(send_call_config), std::move(recv_call_config)); // TODO(minyue): consider if this is a good transport even for audio only // calls. @@ -1505,7 +1533,7 @@ void VideoQualityTest::RunWithRenderers(const Params& params) { const size_t num_streams = params_.ss[video_idx].streams.size(); if (selected_stream_id == num_streams) { for (size_t stream_id = 0; stream_id < num_streams; ++stream_id) { - rtc::StringBuilder oss; + StringBuilder oss; oss << "Loopback Video #" << video_idx << " - Stream #" << static_cast(stream_id); loopback_renderers.emplace_back(test::VideoRenderer::Create( @@ -1519,7 +1547,7 @@ void VideoQualityTest::RunWithRenderers(const Params& params) { .sync_group = kSyncGroup; } } else { - rtc::StringBuilder oss; + StringBuilder oss; oss << "Loopback Video #" << video_idx; loopback_renderers.emplace_back(test::VideoRenderer::Create( oss.str().c_str(), @@ -1543,7 +1571,7 @@ void VideoQualityTest::RunWithRenderers(const Params& params) { "Local Preview", params_.video[0].width, params_.video[0].height)); video_sources_[0]->AddOrUpdateSink(local_preview.get(), - rtc::VideoSinkWants()); + VideoSinkWants()); } ConnectVideoSourcesToStreams(); } diff --git a/video/video_quality_test.h b/video/video_quality_test.h index f66256e94c..4afe3977dc 100644 --- a/video/video_quality_test.h +++ b/video/video_quality_test.h @@ -15,6 +15,7 @@ #include #include +#include "api/environment/environment.h" #include "api/fec_controller.h" #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/task_queue/task_queue_base.h" @@ -48,7 +49,7 @@ class VideoQualityTest : public test::CallTest, return payload_type_map_; } - static void FillScalabilitySettings( + void FillScalabilitySettings( Params* params, size_t video_idx, const std::vector& stream_descriptors, @@ -79,8 +80,10 @@ class VideoQualityTest : public test::CallTest, size_t video_idx); void SetupThumbnailCapturers(size_t num_thumbnail_streams); std::unique_ptr CreateVideoDecoder( + const Environment& env, const SdpVideoFormat& format); - std::unique_ptr CreateVideoEncoder(const SdpVideoFormat& format, + std::unique_ptr CreateVideoEncoder(const Environment& env, + const SdpVideoFormat& format, VideoAnalyzer* analyzer); void SetupVideo(Transport* send_transport, Transport* recv_transport); void SetupThumbnails(Transport* send_transport, Transport* recv_transport); @@ -89,9 +92,9 @@ class VideoQualityTest : public test::CallTest, void StopThumbnails(); void DestroyThumbnailStreams(); // Helper method for creating a real ADM (using hardware) for all platforms. - rtc::scoped_refptr CreateAudioDevice(); - void InitializeAudioDevice(Call::Config* send_call_config, - Call::Config* recv_call_config, + scoped_refptr CreateAudioDevice(); + void InitializeAudioDevice(CallConfig* send_call_config, + CallConfig* recv_call_config, bool use_real_adm); void SetupAudio(Transport* transport); @@ -100,7 +103,7 @@ class VideoQualityTest : public test::CallTest, virtual std::unique_ptr CreateSendTransport(); virtual std::unique_ptr CreateReceiveTransport(); - std::vector>> + std::vector>> thumbnail_capturers_; Clock* const clock_; const std::unique_ptr task_queue_factory_; diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc index 6c1df7d874..3aced70817 100644 --- a/video/video_receive_stream2.cc +++ b/video/video_receive_stream2.cc @@ -14,47 +14,87 @@ #include #include +#include +#include +#include +#include #include +#include #include #include #include +#include #include "absl/algorithm/container.h" -#include "absl/types/optional.h" +#include "absl/strings/str_cat.h" #include "api/array_view.h" #include "api/crypto/frame_decryptor_interface.h" +#include "api/environment/environment.h" +#include "api/field_trials_view.h" +#include "api/frame_transformer_interface.h" +#include "api/rtp_headers.h" +#include "api/rtp_packet_infos.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/transport/rtp/rtp_source.h" #include "api/units/frequency.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "api/video/color_space.h" +#include "api/video/encoded_frame.h" #include "api/video/encoded_image.h" +#include "api/video/recordable_encoded_frame.h" +#include "api/video/render_resolution.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_timing.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder_factory.h" +#include "call/call.h" +#include "call/rtp_packet_sink_interface.h" #include "call/rtp_stream_receiver_controller_interface.h" +#include "call/rtp_transport_controller_send_interface.h" #include "call/rtx_receive_stream.h" +#include "call/syncable.h" +#include "call/video_receive_stream.h" +#include "common_video/frame_instrumentation_data.h" +#include "modules/rtp_rtcp/include/receive_statistics.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/nack_requester.h" #include "modules/video_coding/timing/timing.h" #include "modules/video_coding/utility/vp8_header_parser.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/synchronization/mutex.h" -#include "rtc_base/thread_annotations.h" +#include "rtc_base/system/file_wrapper.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" #include "video/call_stats2.h" +#include "video/corruption_detection/frame_instrumentation_evaluation.h" +#include "video/decode_synchronizer.h" +#include "video/frame_decode_scheduler.h" #include "video/frame_dumping_decoder.h" #include "video/receive_statistics_proxy.h" #include "video/render/incoming_video_stream.h" #include "video/task_queue_frame_decode_scheduler.h" +#include "video/video_stream_buffer_controller.h" +#include "video/video_stream_decoder2.h" namespace webrtc { @@ -81,18 +121,25 @@ class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { if (frame.ColorSpace()) { color_space_ = *frame.ColorSpace(); } + if (frame.rotation() != VideoRotation::kVideoRotation_0) { + video_rotation_ = frame.rotation(); + } } // VideoEncodedSinkInterface::FrameBuffer - rtc::scoped_refptr encoded_buffer() + scoped_refptr encoded_buffer() const override { return buffer_; } - absl::optional color_space() const override { + std::optional color_space() const override { return color_space_; } + std::optional video_rotation() const override { + return video_rotation_; + } + VideoCodecType codec() const override { return codec_; } bool is_key_frame() const override { return is_key_frame_; } @@ -104,12 +151,13 @@ class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { } private: - rtc::scoped_refptr buffer_; + scoped_refptr buffer_; int64_t render_time_ms_; VideoCodecType codec_; bool is_key_frame_; EncodedResolution resolution_; - absl::optional color_space_; + std::optional color_space_; + std::optional video_rotation_; }; RenderResolution InitialDecoderResolution(const FieldTrialsView& field_trials) { @@ -157,8 +205,8 @@ bool IsKeyFrameAndUnspecifiedResolution(const EncodedFrame& frame) { frame.EncodedImage()._encodedHeight == 0; } -std::string OptionalDelayToLogString(const absl::optional opt) { - return opt.has_value() ? ToLogString(*opt) : ""; +std::string OptionalDelayToLogString(std::optional opt) { + return opt.has_value() ? absl::StrCat(*opt) : ""; } } // namespace @@ -177,31 +225,30 @@ TimeDelta DetermineMaxWaitForFrame(TimeDelta rtp_history, bool is_keyframe) { } VideoReceiveStream2::VideoReceiveStream2( - TaskQueueFactory* task_queue_factory, + const Environment& env, Call* call, int num_cpu_cores, PacketRouter* packet_router, VideoReceiveStreamInterface::Config config, CallStats* call_stats, - Clock* clock, std::unique_ptr timing, NackPeriodicProcessor* nack_periodic_processor, - DecodeSynchronizer* decode_sync, - RtcEventLog* event_log) - : task_queue_factory_(task_queue_factory), + DecodeSynchronizer* decode_sync) + : env_(env), + packet_sequence_checker_(SequenceChecker::kDetached), + decode_sequence_checker_(SequenceChecker::kDetached), transport_adapter_(config.rtcp_send_transport), config_(std::move(config)), num_cpu_cores_(num_cpu_cores), call_(call), - clock_(clock), call_stats_(call_stats), - source_tracker_(clock_), - stats_proxy_(remote_ssrc(), clock_, call->worker_thread()), - rtp_receive_statistics_(ReceiveStatistics::Create(clock_)), + source_tracker_(&env_.clock()), + stats_proxy_(remote_ssrc(), &env_.clock(), call->worker_thread()), + rtp_receive_statistics_(ReceiveStatistics::Create(&env_.clock())), timing_(std::move(timing)), - video_receiver_(clock_, timing_.get(), call->trials()), - rtp_video_stream_receiver_(call->worker_thread(), - clock_, + video_receiver_(&env_.clock(), timing_.get(), env_.field_trials(), this), + rtp_video_stream_receiver_(env_, + call->worker_thread(), &transport_adapter_, call_stats->AsRtcpRttStats(), packet_router, @@ -212,9 +259,7 @@ VideoReceiveStream2::VideoReceiveStream2( nack_periodic_processor, this, // OnCompleteFrameCallback std::move(config_.frame_decryptor), - std::move(config_.frame_transformer), - call->trials(), - event_log), + std::move(config_.frame_transformer)), rtp_stream_sync_(call->worker_thread(), this), max_wait_for_keyframe_(DetermineMaxWaitForFrame( TimeDelta::Millis(config_.rtp.nack.rtp_history_ms), @@ -222,7 +267,7 @@ VideoReceiveStream2::VideoReceiveStream2( max_wait_for_frame_(DetermineMaxWaitForFrame( TimeDelta::Millis(config_.rtp.nack.rtp_history_ms), false)), - decode_queue_(task_queue_factory_->CreateTaskQueue( + decode_queue_(env_.task_queue_factory().CreateTaskQueue( "DecodingQueue", TaskQueueFactory::Priority::HIGH)) { RTC_LOG(LS_INFO) << "VideoReceiveStream2: " << config_.ToString(); @@ -230,7 +275,6 @@ VideoReceiveStream2::VideoReceiveStream2( RTC_DCHECK(call_->worker_thread()); RTC_DCHECK(config_.renderer); RTC_DCHECK(call_stats_); - packet_sequence_checker_.Detach(); RTC_DCHECK(!config_.decoders.empty()); RTC_CHECK(config_.decoder_factory); @@ -248,11 +292,11 @@ VideoReceiveStream2::VideoReceiveStream2( std::unique_ptr scheduler = decode_sync ? decode_sync->CreateSynchronizedFrameScheduler() : std::make_unique( - clock, call_->worker_thread()); + &env_.clock(), call_->worker_thread()); buffer_ = std::make_unique( - clock_, call_->worker_thread(), timing_.get(), &stats_proxy_, this, + &env_.clock(), call_->worker_thread(), timing_.get(), &stats_proxy_, this, max_wait_for_keyframe_, max_wait_for_frame_, std::move(scheduler), - call_->trials()); + env_.field_trials()); if (!config_.rtp.rtx_associated_payload_types.empty()) { rtx_receive_stream_ = std::make_unique( @@ -282,8 +326,7 @@ void VideoReceiveStream2::RegisterWithTransport( // Register with RtpStreamReceiverController. media_receiver_ = receiver_controller->CreateReceiver( remote_ssrc(), &rtp_video_stream_receiver_); - if (rtx_ssrc()) { - RTC_DCHECK(rtx_receive_stream_); + if (rtx_ssrc() && rtx_receive_stream_ != nullptr) { rtx_receiver_ = receiver_controller->CreateReceiver( rtx_ssrc(), rtx_receive_stream_.get()); } @@ -342,10 +385,10 @@ void VideoReceiveStream2::Start() { } transport_adapter_.Enable(); - rtc::VideoSinkInterface* renderer = nullptr; + VideoSinkInterface* renderer = nullptr; if (config_.enable_prerenderer_smoothing) { incoming_video_stream_.reset(new IncomingVideoStream( - task_queue_factory_, config_.render_delay_ms, this)); + &env_.task_queue_factory(), config_.render_delay_ms, this)); renderer = incoming_video_stream_.get(); } else { renderer = this; @@ -356,7 +399,7 @@ void VideoReceiveStream2::Start() { settings.set_codec_type( PayloadStringToCodecType(decoder.video_format.name)); settings.set_max_render_resolution( - InitialDecoderResolution(call_->trials())); + InitialDecoderResolution(env_.field_trials())); settings.set_number_of_cores(num_cpu_cores_); const bool raw_payload = @@ -381,8 +424,8 @@ void VideoReceiveStream2::Start() { // Start decoding on task queue. stats_proxy_.DecoderThreadStarting(); - decode_queue_.PostTask([this] { - RTC_DCHECK_RUN_ON(&decode_queue_); + decode_queue_->PostTask([this] { + RTC_DCHECK_RUN_ON(&decode_sequence_checker_); decoder_stopped_ = false; }); buffer_->StartNextDecode(true); @@ -411,9 +454,9 @@ void VideoReceiveStream2::Stop() { call_stats_->DeregisterStatsObserver(this); if (decoder_running_) { - rtc::Event done; - decode_queue_.PostTask([this, &done] { - RTC_DCHECK_RUN_ON(&decode_queue_); + Event done; + decode_queue_->PostTask([this, &done] { + RTC_DCHECK_RUN_ON(&decode_sequence_checker_); // Set `decoder_stopped_` before deregistering all decoders. This means // that any pending encoded frame will return early without trying to // access the decoder database. @@ -423,7 +466,7 @@ void VideoReceiveStream2::Stop() { } done.Set(); }); - done.Wait(rtc::Event::kForever); + done.Wait(Event::kForever); decoder_running_ = false; stats_proxy_.DecoderThreadStopped(); @@ -521,7 +564,7 @@ void VideoReceiveStream2::CreateAndRegisterExternalDecoder( TRACE_EVENT0("webrtc", "VideoReceiveStream2::CreateAndRegisterExternalDecoder"); std::unique_ptr video_decoder = - config_.decoder_factory->CreateVideoDecoder(decoder.video_format); + config_.decoder_factory->Create(env_, decoder.video_format); // If we still have no valid decoder, we have to create a "Null" decoder // that ignores all calls. The reason we can get into this state is that the // old decoder factory interface doesn't have a way to query supported @@ -531,7 +574,7 @@ void VideoReceiveStream2::CreateAndRegisterExternalDecoder( } std::string decoded_output_file = - call_->trials().Lookup("WebRTC-DecoderDataDumpDirectory"); + env_.field_trials().Lookup("WebRTC-DecoderDataDumpDirectory"); // Because '/' can't be used inside a field trial parameter, we use ';' // instead. // This is only relevant to WebRTC-DecoderDataDumpDirectory @@ -541,9 +584,9 @@ void VideoReceiveStream2::CreateAndRegisterExternalDecoder( absl::c_replace(decoded_output_file, ';', '/'); if (!decoded_output_file.empty()) { char filename_buffer[256]; - rtc::SimpleStringBuilder ssb(filename_buffer); + SimpleStringBuilder ssb(filename_buffer); ssb << decoded_output_file << "/webrtc_receive_stream_" << remote_ssrc() - << "-" << rtc::TimeMicros() << ".ivf"; + << "-" << TimeMicros() << ".ivf"; video_decoder = CreateFrameDumpingDecoderWrapper( std::move(video_decoder), FileWrapper::OpenWriteOnly(ssb.str())); } @@ -567,18 +610,28 @@ VideoReceiveStreamInterface::Stats VideoReceiveStream2::GetStats() const { rtp_receive_statistics_->GetStatistician(rtx_ssrc()); if (rtx_statistician) { stats.total_bitrate_bps += rtx_statistician->BitrateReceived(); - // TODO(bugs.webrtc.org/15096): remove kill-switch after rollout. - if (!call_->trials().IsDisabled("WebRTC-Stats-RtxReceiveStats")) { - stats.rtx_rtp_stats = rtx_statistician->GetStats(); - } + stats.rtx_rtp_stats = rtx_statistician->GetStats(); } } + + std::optional rtcp_sr_stats = + rtp_video_stream_receiver_.GetSenderReportStats(); + if (rtcp_sr_stats) { + stats.last_sender_report_timestamp = rtcp_sr_stats->last_arrival_timestamp; + stats.last_sender_report_utc_timestamp = + Clock::NtpToUtc(rtcp_sr_stats->last_arrival_ntp_timestamp); + stats.last_sender_report_remote_utc_timestamp = + Clock::NtpToUtc(rtcp_sr_stats->last_remote_ntp_timestamp); + stats.sender_reports_packets_sent = rtcp_sr_stats->packets_sent; + stats.sender_reports_bytes_sent = rtcp_sr_stats->bytes_sent; + stats.sender_reports_reports_count = rtcp_sr_stats->reports_count; + } return stats; } void VideoReceiveStream2::UpdateHistograms() { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - absl::optional fraction_lost; + std::optional fraction_lost; StreamDataCounters rtp_stats; StreamStatistician* statistician = rtp_receive_statistics_->GetStatistician(remote_ssrc()); @@ -599,6 +652,12 @@ void VideoReceiveStream2::UpdateHistograms() { stats_proxy_.UpdateHistograms(fraction_lost, rtp_stats, nullptr); } +std::optional VideoReceiveStream2::CalculateCorruptionScore( + const VideoFrame& frame, + const FrameInstrumentationData& frame_instrumentation_data) { + return GetCorruptionScore(frame_instrumentation_data, frame); +} + bool VideoReceiveStream2::SetBaseMinimumPlayoutDelayMs(int delay_ms) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); TimeDelta delay = TimeDelta::Millis(delay_ms); @@ -620,11 +679,11 @@ int VideoReceiveStream2::GetBaseMinimumPlayoutDelayMs() const { } void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { - source_tracker_.OnFrameDelivered(video_frame.packet_infos()); config_.renderer->OnFrame(video_frame); - // TODO(bugs.webrtc.org/10739): we should set local capture clock offset for - // `video_frame.packet_infos`. But VideoFrame is const qualified here. + // TODO: bugs.webrtc.org/42220804 - we should set local capture clock offset + // for `packet_infos`. + RtpPacketInfos packet_infos = video_frame.packet_infos(); // For frame delay metrics, calculated in `OnRenderedFrame`, to better reflect // user experience measurements must be done as close as possible to frame @@ -633,9 +692,9 @@ void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { // renderer. Frame may or may be not rendered by this time. This results in // inaccuracy but is still the best we can do in the absence of "frame // rendered" callback from the renderer. - VideoFrameMetaData frame_meta(video_frame, clock_->CurrentTime()); + VideoFrameMetaData frame_meta(video_frame, env_.clock().CurrentTime()); call_->worker_thread()->PostTask( - SafeTask(task_safety_.flag(), [frame_meta, this]() { + SafeTask(task_safety_.flag(), [frame_meta, packet_infos, this]() { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); int64_t video_playout_ntp_ms; int64_t sync_offset_ms; @@ -647,6 +706,8 @@ void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { estimated_freq_khz); } stats_proxy_.OnRenderedFrame(frame_meta); + source_tracker_.OnFrameDelivered(packet_infos, + frame_meta.decode_timestamp); })); webrtc::MutexLock lock(&pending_resolution_mutex_); @@ -668,12 +729,12 @@ void VideoReceiveStream2::OnFrame(const VideoFrame& video_frame) { } void VideoReceiveStream2::SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) { + scoped_refptr frame_decryptor) { rtp_video_stream_receiver_.SetFrameDecryptor(std::move(frame_decryptor)); } void VideoReceiveStream2::SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) { + scoped_refptr frame_transformer) { rtp_video_stream_receiver_.SetDepacketizerToDecoderFrameTransformer( std::move(frame_transformer)); } @@ -689,7 +750,7 @@ void VideoReceiveStream2::RequestKeyFrame(Timestamp now) { void VideoReceiveStream2::OnCompleteFrame(std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - if (absl::optional playout_delay = + if (std::optional playout_delay = frame->EncodedImage().PlayoutDelay()) { frame_minimum_playout_delay_ = playout_delay->min(); frame_maximum_playout_delay_ = playout_delay->max(); @@ -719,13 +780,12 @@ uint32_t VideoReceiveStream2::id() const { return remote_ssrc(); } -absl::optional VideoReceiveStream2::GetInfo() const { +std::optional VideoReceiveStream2::GetInfo() const { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - absl::optional info = - rtp_video_stream_receiver_.GetSyncInfo(); + std::optional info = rtp_video_stream_receiver_.GetSyncInfo(); if (!info) - return absl::nullopt; + return std::nullopt; info->current_delay_ms = timing_->TargetVideoDelay().ms(); return info; @@ -752,7 +812,7 @@ bool VideoReceiveStream2::SetMinimumPlayoutDelay(int delay_ms) { void VideoReceiveStream2::OnEncodedFrame(std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); const bool keyframe_request_is_due = !last_keyframe_request_ || now >= (*last_keyframe_request_ + max_wait_for_keyframe_); @@ -769,19 +829,20 @@ void VideoReceiveStream2::OnEncodedFrame(std::unique_ptr frame) { } stats_proxy_.OnPreDecode(frame->CodecSpecific()->codecType, qp); - decode_queue_.PostTask([this, now, keyframe_request_is_due, - received_frame_is_keyframe, frame = std::move(frame), - keyframe_required = keyframe_required_]() mutable { - RTC_DCHECK_RUN_ON(&decode_queue_); + decode_queue_->PostTask([this, now, keyframe_request_is_due, + received_frame_is_keyframe, frame = std::move(frame), + keyframe_required = keyframe_required_]() mutable { + RTC_DCHECK_RUN_ON(&decode_sequence_checker_); if (decoder_stopped_) return; + uint32_t rtp_timestamp = frame->RtpTimestamp(); DecodeFrameResult result = HandleEncodedFrameOnDecodeQueue( std::move(frame), keyframe_request_is_due, keyframe_required); // TODO(bugs.webrtc.org/11993): Make this PostTask to the network thread. call_->worker_thread()->PostTask( SafeTask(task_safety_.flag(), - [this, now, result = std::move(result), + [this, now, rtp_timestamp, result = std::move(result), received_frame_is_keyframe, keyframe_request_is_due]() { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); keyframe_required_ = result.keyframe_required; @@ -790,6 +851,7 @@ void VideoReceiveStream2::OnEncodedFrame(std::unique_ptr frame) { rtp_video_stream_receiver_.FrameDecoded( *result.decoded_frame_picture_id); } + last_decoded_rtp_timestamp_ = rtp_timestamp; HandleKeyFrameGeneration(received_frame_is_keyframe, now, result.force_request_key_frame, @@ -801,9 +863,9 @@ void VideoReceiveStream2::OnEncodedFrame(std::unique_ptr frame) { void VideoReceiveStream2::OnDecodableFrameTimeout(TimeDelta wait) { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - Timestamp now = clock_->CurrentTime(); + Timestamp now = env_.clock().CurrentTime(); - absl::optional last_packet_ms = + std::optional last_packet_ms = rtp_video_stream_receiver_.LastReceivedPacketMs(); // To avoid spamming keyframe requests for a stream that is not active we @@ -818,12 +880,16 @@ void VideoReceiveStream2::OnDecodableFrameTimeout(TimeDelta wait) { if (stream_is_active && !IsReceivingKeyFrame(now) && (!config_.crypto_options.sframe.require_frame_encryption || rtp_video_stream_receiver_.IsDecryptable())) { - absl::optional last_timestamp = + std::optional last_timestamp = rtp_video_stream_receiver_.LastReceivedFrameRtpTimestamp(); RTC_LOG(LS_WARNING) << "No decodable frame in " << wait << " requesting keyframe. Last RTP timestamp " - << (last_timestamp ? rtc::ToString(*last_timestamp) + << (last_timestamp ? absl::StrCat(*last_timestamp) : "") + << ", last decoded frame RTP timestamp " + << (last_decoded_rtp_timestamp_ + ? absl::StrCat(*last_decoded_rtp_timestamp_) + : "") << "."; RequestKeyFrame(now); } @@ -836,17 +902,24 @@ VideoReceiveStream2::HandleEncodedFrameOnDecodeQueue( std::unique_ptr frame, bool keyframe_request_is_due, bool keyframe_required) { - RTC_DCHECK_RUN_ON(&decode_queue_); + RTC_DCHECK_RUN_ON(&decode_sequence_checker_); bool force_request_key_frame = false; - absl::optional decoded_frame_picture_id; + std::optional decoded_frame_picture_id; if (!video_receiver_.IsExternalDecoderRegistered(frame->PayloadType())) { // Look for the decoder with this payload type. for (const Decoder& decoder : config_.decoders) { if (decoder.payload_type == frame->PayloadType()) { CreateAndRegisterExternalDecoder(decoder); - break; + } else { + // Unregister any external decoder not from this payload type. + // If not, any previous video decoder will be released when the next + // frame is decoded but the decoder wrapper will not. + // This will cause the decoder to be reused if we switch back to that + // payload in the future, failing to configure it and causing to + // fallback to the software decoder. + video_receiver_.RegisterExternalDecoder(nullptr, decoder.payload_type); } } } @@ -878,7 +951,7 @@ VideoReceiveStream2::HandleEncodedFrameOnDecodeQueue( int VideoReceiveStream2::DecodeAndMaybeDispatchEncodedFrame( std::unique_ptr frame) { - RTC_DCHECK_RUN_ON(&decode_queue_); + RTC_DCHECK_RUN_ON(&decode_sequence_checker_); // If `buffered_encoded_frames_` grows out of control (=60 queued frames), // maybe due to a stuck decoder, we just halt the process here and log the @@ -907,9 +980,24 @@ int VideoReceiveStream2::DecodeAndMaybeDispatchEncodedFrame( } int decode_result = video_receiver_.Decode(frame_ptr); + if (decode_result < WEBRTC_VIDEO_CODEC_OK) { + // Asynchronous decoders may delay error reporting, potentially resulting in + // error reports reflecting issues that occurred several frames back. + RTC_LOG(LS_WARNING) + << "Failed to decode frame. Return code: " << decode_result + << ", SSRC: " << remote_ssrc() + << ", frame RTP timestamp: " << frame_ptr->RtpTimestamp() + << ", type: " << VideoFrameTypeToString(frame_ptr->FrameType()) + << ", size: " << frame_ptr->size() + << ", width: " << frame_ptr->_encodedWidth + << ", height: " << frame_ptr->_encodedHeight + << ", spatial idx: " << frame_ptr->SpatialIndex().value_or(-1) + << ", temporal idx: " << frame_ptr->TemporalIndex().value_or(-1) + << ", id: " << frame_ptr->Id(); + } + if (encoded_frame_output_enabled) { - absl::optional - pending_resolution; + std::optional pending_resolution; { // Fish out `pending_resolution_` to avoid taking the mutex on every lap // or dispatching under the mutex in the flush loop. @@ -919,16 +1007,16 @@ int VideoReceiveStream2::DecodeAndMaybeDispatchEncodedFrame( } if (!pending_resolution.has_value() || !pending_resolution->empty()) { // Flush the buffered frames. - for (const auto& frame : buffered_encoded_frames_) { + for (const auto& buffered_frame : buffered_encoded_frames_) { RecordableEncodedFrame::EncodedResolution resolution{ - frame->EncodedImage()._encodedWidth, - frame->EncodedImage()._encodedHeight}; - if (IsKeyFrameAndUnspecifiedResolution(*frame)) { + buffered_frame->EncodedImage()._encodedWidth, + buffered_frame->EncodedImage()._encodedHeight}; + if (IsKeyFrameAndUnspecifiedResolution(*buffered_frame)) { RTC_DCHECK(!pending_resolution->empty()); resolution = *pending_resolution; } encoded_frame_buffer_function_( - WebRtcRecordableEncodedFrame(*frame, resolution)); + WebRtcRecordableEncodedFrame(*buffered_frame, resolution)); } buffered_encoded_frames_.clear(); } @@ -967,7 +1055,7 @@ void VideoReceiveStream2::HandleKeyFrameGeneration( bool VideoReceiveStream2::IsReceivingKeyFrame(Timestamp now) const { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - absl::optional last_keyframe_packet_ms = + std::optional last_keyframe_packet_ms = rtp_video_stream_receiver_.LastReceivedKeyframePacketMs(); // If we recently have been receiving packets belonging to a keyframe then @@ -980,48 +1068,74 @@ bool VideoReceiveStream2::IsReceivingKeyFrame(Timestamp now) const { void VideoReceiveStream2::UpdatePlayoutDelays() const { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - const std::initializer_list> min_delays = { + const std::initializer_list> min_delays = { frame_minimum_playout_delay_, base_minimum_playout_delay_, syncable_minimum_playout_delay_}; // Since nullopt < anything, this will return the largest of the minumum // delays, or nullopt if all are nullopt. - absl::optional minimum_delay = std::max(min_delays); - if (minimum_delay) { - auto num_playout_delays_set = - absl::c_count_if(min_delays, [](auto opt) { return opt.has_value(); }); - if (num_playout_delays_set > 1 && - timing_->min_playout_delay() != minimum_delay) { + std::optional minimum_delay = std::max(min_delays); + if (!minimum_delay.has_value()) { + // `frame_maximum_playout_delay_` and `frame_minimum_delay_value_` are set + // together. Thus absence of the `minimum_delay` implies absene of the + // `frame_minimum_playout_delay_` and thus implies absence of the + // `frame_maximum_playout_delay_`. + RTC_DCHECK(!frame_maximum_playout_delay_.has_value()); + return; + } + + // When maximum delay is smaller than minimum delay, maximum delay takes + // priority. It arrived with the frame, and thus is an explicit request to + // limit the delay. + if (frame_maximum_playout_delay_.has_value() && + minimum_delay > *frame_maximum_playout_delay_) { + minimum_delay = *frame_maximum_playout_delay_; + if (timing_->min_playout_delay() != *minimum_delay) { RTC_LOG(LS_WARNING) - << "Multiple playout delays set. Actual delay value set to " - << *minimum_delay << " frame min delay=" + << "Maximum playout delay " << *frame_maximum_playout_delay_ + << " overrides minimum delay. frame min delay=" << OptionalDelayToLogString(frame_minimum_playout_delay_) << " base min delay=" << OptionalDelayToLogString(base_minimum_playout_delay_) << " sync min delay=" << OptionalDelayToLogString(syncable_minimum_playout_delay_); } - timing_->set_min_playout_delay(*minimum_delay); - if (frame_minimum_playout_delay_ == TimeDelta::Zero() && - frame_maximum_playout_delay_ > TimeDelta::Zero()) { - // TODO(kron): Estimate frame rate from video stream. - constexpr Frequency kFrameRate = Frequency::Hertz(60); - // Convert playout delay in ms to number of frames. - int max_composition_delay_in_frames = - std::lrint(*frame_maximum_playout_delay_ * kFrameRate); - // Subtract frames in buffer. - max_composition_delay_in_frames = - std::max(max_composition_delay_in_frames - buffer_->Size(), 0); - timing_->SetMaxCompositionDelayInFrames(max_composition_delay_in_frames); - } } - if (frame_maximum_playout_delay_) { - timing_->set_max_playout_delay(*frame_maximum_playout_delay_); + auto num_playout_delays_set = + absl::c_count_if(min_delays, [](auto opt) { return opt.has_value(); }); + if (num_playout_delays_set > 1 && + timing_->min_playout_delay() != *minimum_delay) { + RTC_LOG(LS_WARNING) + << "Multiple playout delays set. Actual delay value set to " + << *minimum_delay << " frame min delay=" + << OptionalDelayToLogString(frame_minimum_playout_delay_) + << " base min delay=" + << OptionalDelayToLogString(base_minimum_playout_delay_) + << " sync min delay=" + << OptionalDelayToLogString(syncable_minimum_playout_delay_); + } + if (frame_maximum_playout_delay_.has_value()) { + timing_->set_playout_delay({*minimum_delay, *frame_maximum_playout_delay_}); + } else { + timing_->set_min_playout_delay(*minimum_delay); + } + if (frame_minimum_playout_delay_ == TimeDelta::Zero() && + frame_maximum_playout_delay_ > TimeDelta::Zero()) { + // TODO(kron): Estimate frame rate from video stream. + constexpr Frequency kFrameRate = Frequency::Hertz(60); + // Convert playout delay to number of frames. + int max_composition_delay_in_frames = + std::lrint(*frame_maximum_playout_delay_ * kFrameRate); + // Subtract frames in buffer. + max_composition_delay_in_frames = + std::max(max_composition_delay_in_frames - buffer_->Size(), 0); + timing_->SetMaxCompositionDelayInFrames(max_composition_delay_in_frames); } } std::vector VideoReceiveStream2::GetSources() const { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); return source_tracker_.GetSources(); } @@ -1029,26 +1143,26 @@ VideoReceiveStream2::RecordingState VideoReceiveStream2::SetAndGetRecordingState(RecordingState state, bool generate_key_frame) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - rtc::Event event; + Event event; // Save old state, set the new state. RecordingState old_state; - absl::optional last_keyframe_request; + std::optional last_keyframe_request; { // TODO(bugs.webrtc.org/11993): Post this to the network thread. RTC_DCHECK_RUN_ON(&packet_sequence_checker_); last_keyframe_request = last_keyframe_request_; last_keyframe_request_ = generate_key_frame - ? clock_->CurrentTime() + ? env_.clock().CurrentTime() : Timestamp::Millis(state.last_keyframe_request_ms.value_or(0)); } - decode_queue_.PostTask( + decode_queue_->PostTask( [this, &event, &old_state, callback = std::move(state.callback), last_keyframe_request = std::move(last_keyframe_request)] { - RTC_DCHECK_RUN_ON(&decode_queue_); + RTC_DCHECK_RUN_ON(&decode_sequence_checker_); old_state.callback = std::move(encoded_frame_buffer_function_); encoded_frame_buffer_function_ = std::move(callback); @@ -1067,13 +1181,13 @@ VideoReceiveStream2::SetAndGetRecordingState(RecordingState state, } } - event.Wait(rtc::Event::kForever); + event.Wait(Event::kForever); return old_state; } void VideoReceiveStream2::GenerateKeyFrame() { RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - RequestKeyFrame(clock_->CurrentTime()); + RequestKeyFrame(env_.clock().CurrentTime()); keyframe_generation_requested_ = true; } diff --git a/video/video_receive_stream2.h b/video/video_receive_stream2.h index 31b9a7eb7c..aae2b3e239 100644 --- a/video/video_receive_stream2.h +++ b/video/video_receive_stream2.h @@ -11,29 +11,45 @@ #ifndef VIDEO_VIDEO_RECEIVE_STREAM2_H_ #define VIDEO_VIDEO_RECEIVE_STREAM2_H_ +#include +#include +#include #include #include +#include #include #include -#include "absl/types/optional.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/environment/environment.h" +#include "api/frame_transformer_interface.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" -#include "api/task_queue/task_queue_factory.h" +#include "api/task_queue/task_queue_base.h" +#include "api/transport/rtp/rtp_source.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "api/video/encoded_frame.h" #include "api/video/recordable_encoded_frame.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" #include "call/call.h" #include "call/rtp_packet_sink_interface.h" #include "call/syncable.h" #include "call/video_receive_stream.h" +#include "common_video/frame_instrumentation_data.h" +#include "common_video/include/corruption_score_calculator.h" +#include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/source/source_tracker.h" #include "modules/video_coding/nack_requester.h" #include "modules/video_coding/video_receiver2.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" -#include "system_wrappers/include/clock.h" +#include "rtc_base/time_utils.h" +#include "video/decode_synchronizer.h" #include "video/receive_statistics_proxy.h" #include "video/rtp_streams_synchronizer2.h" #include "video/rtp_video_stream_receiver2.h" @@ -62,7 +78,7 @@ class CallStats; // multiple calls to clock->Now(). struct VideoFrameMetaData { VideoFrameMetaData(const webrtc::VideoFrame& frame, Timestamp now) - : rtp_timestamp(frame.timestamp()), + : rtp_timestamp(frame.rtp_timestamp()), timestamp_us(frame.timestamp_us()), ntp_time_ms(frame.ntp_time_ms()), width(frame.width()), @@ -70,7 +86,7 @@ struct VideoFrameMetaData { decode_timestamp(now) {} int64_t render_time_ms() const { - return timestamp_us / rtc::kNumMicrosecsPerMillisec; + return timestamp_us / kNumMicrosecsPerMillisec; } const uint32_t rtp_timestamp; @@ -84,27 +100,26 @@ struct VideoFrameMetaData { class VideoReceiveStream2 : public webrtc::VideoReceiveStreamInterface, - public rtc::VideoSinkInterface, + public VideoSinkInterface, public RtpVideoStreamReceiver2::OnCompleteFrameCallback, public Syncable, public CallStatsObserver, - public FrameSchedulingReceiver { + public FrameSchedulingReceiver, + public CorruptionScoreCalculator { public: // The maximum number of buffered encoded frames when encoded output is // configured. static constexpr size_t kBufferedEncodedFramesMaxSize = 60; - VideoReceiveStream2(TaskQueueFactory* task_queue_factory, + VideoReceiveStream2(const Environment& env, Call* call, int num_cpu_cores, PacketRouter* packet_router, VideoReceiveStreamInterface::Config config, CallStats* call_stats, - Clock* clock, std::unique_ptr timing, NackPeriodicProcessor* nack_periodic_processor, - DecodeSynchronizer* decode_sync, - RtcEventLog* event_log); + DecodeSynchronizer* decode_sync); // Destruction happens on the worker thread. Prior to destruction the caller // must ensure that a registration with the transport has been cleared. See // `RegisterWithTransport` for details. @@ -166,11 +181,11 @@ class VideoReceiveStream2 int GetBaseMinimumPlayoutDelayMs() const override; void SetFrameDecryptor( - rtc::scoped_refptr frame_decryptor) override; + scoped_refptr frame_decryptor) override; void SetDepacketizerToDecoderFrameTransformer( - rtc::scoped_refptr frame_transformer) override; + scoped_refptr frame_transformer) override; - // Implements rtc::VideoSinkInterface. + // Implements webrtc::VideoSinkInterface. void OnFrame(const VideoFrame& video_frame) override; // Implements RtpVideoStreamReceiver2::OnCompleteFrameCallback. @@ -181,7 +196,7 @@ class VideoReceiveStream2 // Implements Syncable. uint32_t id() const override; - absl::optional GetInfo() const override; + std::optional GetInfo() const override; bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const override; void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, @@ -216,7 +231,7 @@ class VideoReceiveStream2 // The picture id of the frame that was decoded, or nullopt if the frame was // not decoded. - absl::optional decoded_frame_picture_id; + std::optional decoded_frame_picture_id; // True if the next frame decoded must be a keyframe. This value will set // the value of `keyframe_required_`, which will force the frame buffer to @@ -227,7 +242,7 @@ class VideoReceiveStream2 DecodeFrameResult HandleEncodedFrameOnDecodeQueue( std::unique_ptr frame, bool keyframe_request_is_due, - bool keyframe_required) RTC_RUN_ON(decode_queue_); + bool keyframe_required) RTC_RUN_ON(decode_sequence_checker_); void UpdatePlayoutDelays() const RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_sequence_checker_); void RequestKeyFrame(Timestamp now) RTC_RUN_ON(packet_sequence_checker_); @@ -239,9 +254,14 @@ class VideoReceiveStream2 bool IsReceivingKeyFrame(Timestamp timestamp) const RTC_RUN_ON(packet_sequence_checker_); int DecodeAndMaybeDispatchEncodedFrame(std::unique_ptr frame) - RTC_RUN_ON(decode_queue_); + RTC_RUN_ON(decode_sequence_checker_); void UpdateHistograms(); + std::optional CalculateCorruptionScore( + const VideoFrame& frame, + const FrameInstrumentationData& frame_instrumentation_data) override; + + const Environment env_; RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_; // TODO(bugs.webrtc.org/11993): This checker conceptually represents @@ -253,20 +273,19 @@ class VideoReceiveStream2 // on the network thread, this comment will be deleted. RTC_NO_UNIQUE_ADDRESS SequenceChecker packet_sequence_checker_; - TaskQueueFactory* const task_queue_factory_; + RTC_NO_UNIQUE_ADDRESS SequenceChecker decode_sequence_checker_; TransportAdapter transport_adapter_; const VideoReceiveStreamInterface::Config config_; const int num_cpu_cores_; Call* const call_; - Clock* const clock_; CallStats* const call_stats_; bool decoder_running_ RTC_GUARDED_BY(worker_sequence_checker_) = false; - bool decoder_stopped_ RTC_GUARDED_BY(decode_queue_) = true; + bool decoder_stopped_ RTC_GUARDED_BY(decode_sequence_checker_) = true; - SourceTracker source_tracker_; + SourceTracker source_tracker_ RTC_GUARDED_BY(worker_sequence_checker_); ReceiveStatisticsProxy stats_proxy_; // Shared by media and rtx stream receivers, since the latter has no RtpRtcp // module of its own. @@ -274,7 +293,7 @@ class VideoReceiveStream2 std::unique_ptr timing_; // Jitter buffer experiment. VideoReceiver2 video_receiver_; - std::unique_ptr> incoming_video_stream_; + std::unique_ptr> incoming_video_stream_; RtpVideoStreamReceiver2 rtp_video_stream_receiver_; std::unique_ptr video_stream_decoder_; RtpStreamsSynchronizer rtp_stream_sync_; @@ -290,7 +309,7 @@ class VideoReceiveStream2 RTC_GUARDED_BY(packet_sequence_checker_); std::unique_ptr rtx_receive_stream_ RTC_GUARDED_BY(packet_sequence_checker_); - absl::optional updated_rtx_ssrc_ + std::optional updated_rtx_ssrc_ RTC_GUARDED_BY(packet_sequence_checker_); std::unique_ptr rtx_receiver_ RTC_GUARDED_BY(packet_sequence_checker_); @@ -300,9 +319,9 @@ class VideoReceiveStream2 bool keyframe_required_ RTC_GUARDED_BY(packet_sequence_checker_) = true; // If we have successfully decoded any frame. - bool frame_decoded_ RTC_GUARDED_BY(decode_queue_) = false; + bool frame_decoded_ RTC_GUARDED_BY(decode_sequence_checker_) = false; - absl::optional last_keyframe_request_ + std::optional last_keyframe_request_ RTC_GUARDED_BY(packet_sequence_checker_); // Keyframe request intervals are configurable through field trials. @@ -314,41 +333,46 @@ class VideoReceiveStream2 // biggest delay is used. -1 means use default value from the `timing_`. // // Minimum delay as decided by the RTP playout delay extension. - absl::optional frame_minimum_playout_delay_ + std::optional frame_minimum_playout_delay_ RTC_GUARDED_BY(worker_sequence_checker_); // Minimum delay as decided by the setLatency function in "webrtc/api". - absl::optional base_minimum_playout_delay_ + std::optional base_minimum_playout_delay_ RTC_GUARDED_BY(worker_sequence_checker_); // Minimum delay as decided by the A/V synchronization feature. - absl::optional syncable_minimum_playout_delay_ + std::optional syncable_minimum_playout_delay_ RTC_GUARDED_BY(worker_sequence_checker_); // Maximum delay as decided by the RTP playout delay extension. - absl::optional frame_maximum_playout_delay_ + std::optional frame_maximum_playout_delay_ RTC_GUARDED_BY(worker_sequence_checker_); // Function that is triggered with encoded frames, if not empty. std::function - encoded_frame_buffer_function_ RTC_GUARDED_BY(decode_queue_); + encoded_frame_buffer_function_ RTC_GUARDED_BY(decode_sequence_checker_); // Set to true while we're requesting keyframes but not yet received one. bool keyframe_generation_requested_ RTC_GUARDED_BY(packet_sequence_checker_) = false; // Lock to avoid unnecessary per-frame idle wakeups in the code. webrtc::Mutex pending_resolution_mutex_; // Signal from decode queue to OnFrame callback to fill pending_resolution_. - // absl::nullopt - no resolution needed. 0x0 - next OnFrame to fill with + // std::nullopt - no resolution needed. 0x0 - next OnFrame to fill with // received resolution. Not 0x0 - OnFrame has filled a resolution. - absl::optional pending_resolution_ + std::optional pending_resolution_ RTC_GUARDED_BY(pending_resolution_mutex_); // Buffered encoded frames held while waiting for decoded resolution. std::vector> buffered_encoded_frames_ - RTC_GUARDED_BY(decode_queue_); - - // Defined last so they are destroyed before all other members. - rtc::TaskQueue decode_queue_; + RTC_GUARDED_BY(decode_sequence_checker_); // Used to signal destruction to potentially pending tasks. ScopedTaskSafety task_safety_; + + // Defined last so they are destroyed before all other members, in particular + // `decode_queue_` should be stopped before `decode_sequence_checker_` is + // destructed to avoid races when running tasks on the `decode_queue_` during + // VideoReceiveStream2 destruction. + std::unique_ptr decode_queue_; + + std::optional last_decoded_rtp_timestamp_; }; } // namespace internal diff --git a/video/video_receive_stream2_unittest.cc b/video/video_receive_stream2_unittest.cc index 084b128af8..2feb51bb20 100644 --- a/video/video_receive_stream2_unittest.cc +++ b/video/video_receive_stream2_unittest.cc @@ -12,36 +12,43 @@ #include #include +#include +#include #include #include #include -#include -#include -#include +#include #include #include #include "absl/memory/memory.h" -#include "absl/types/optional.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/metronome/test/fake_metronome.h" +#include "api/rtp_packet_info.h" +#include "api/rtp_packet_infos.h" #include "api/test/mock_video_decoder.h" #include "api/test/mock_video_decoder_factory.h" #include "api/test/time_controller.h" +#include "api/transport/rtp/rtp_source.h" #include "api/units/frequency.h" #include "api/units/time_delta.h" -#include "api/video/encoded_image.h" +#include "api/units/timestamp.h" #include "api/video/recordable_encoded_frame.h" #include "api/video/test/video_frame_matchers.h" #include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_timing.h" #include "api/video_codecs/sdp_video_format.h" -#include "api/video_codecs/video_decoder.h" #include "call/rtp_stream_receiver_controller.h" #include "call/video_receive_stream.h" #include "common_video/test/utilities.h" #include "media/engine/fake_webrtc_call.h" #include "modules/pacing/packet_router.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "modules/video_coding/encoded_frame.h" +#include "modules/video_coding/nack_requester.h" #include "rtc_base/logging.h" #include "system_wrappers/include/clock.h" #include "test/fake_decoder.h" @@ -53,25 +60,7 @@ #include "test/time_controller/simulated_time_controller.h" #include "test/video_decoder_proxy_factory.h" #include "video/call_stats2.h" - -namespace webrtc { - -// Printing SdpVideoFormat for gmock argument matchers. -void PrintTo(const SdpVideoFormat& value, std::ostream* os) { - *os << value.ToString(); -} - -void PrintTo(const RecordableEncodedFrame::EncodedResolution& value, - std::ostream* os) { - *os << value.width << "x" << value.height; -} - -void PrintTo(const RecordableEncodedFrame& value, std::ostream* os) { - *os << "RecordableEncodedFrame(render_time=" << value.render_time() - << " resolution=" << ::testing::PrintToString(value.resolution()) << ")"; -} - -} // namespace webrtc +#include "video/decode_synchronizer.h" namespace webrtc { @@ -102,8 +91,8 @@ auto RenderedFrameWith(::testing::Matcher m) { auto RenderedFrame() { return RenderedFrameWith(_); } -testing::Matcher> DidNotReceiveFrame() { - return Eq(absl::nullopt); +testing::Matcher> DidNotReceiveFrame() { + return Eq(std::nullopt); } constexpr TimeDelta kDefaultTimeOut = TimeDelta::Millis(50); @@ -115,8 +104,13 @@ constexpr TimeDelta k30FpsDelay = 1 / k30Fps; constexpr Frequency kRtpTimestampHz = Frequency::KiloHertz(90); constexpr uint32_t k30FpsRtpTimestampDelta = kRtpTimestampHz / k30Fps; constexpr uint32_t kFirstRtpTimestamp = 90000; +constexpr uint8_t kH264PayloadType = 99; +constexpr uint8_t kH265PayloadType = 100; +constexpr uint8_t kAv1PayloadType = 101; +constexpr uint32_t kRemoteSsrc = 1111; +constexpr uint32_t kLocalSsrc = 2222; -class FakeVideoRenderer : public rtc::VideoSinkInterface { +class FakeVideoRenderer : public VideoSinkInterface { public: explicit FakeVideoRenderer(TimeController* time_controller) : time_controller_(time_controller) {} @@ -124,23 +118,23 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface { void OnFrame(const VideoFrame& frame) override { RTC_LOG(LS_VERBOSE) << "Received frame with timestamp=" - << frame.timestamp(); + << frame.rtp_timestamp(); if (!last_frame_.empty()) { RTC_LOG(LS_INFO) << "Already had frame queue with timestamp=" - << last_frame_.back().timestamp(); + << last_frame_.back().rtp_timestamp(); } last_frame_.push_back(frame); } // If `advance_time`, then the clock will always advance by `timeout`. - absl::optional WaitForFrame(TimeDelta timeout, - bool advance_time = false) { + std::optional WaitForFrame(TimeDelta timeout, + bool advance_time = false) { auto start = time_controller_->GetClock()->CurrentTime(); if (last_frame_.empty()) { time_controller_->AdvanceTime(TimeDelta::Zero()); time_controller_->Wait([this] { return !last_frame_.empty(); }, timeout); } - absl::optional ret; + std::optional ret; if (!last_frame_.empty()) { ret = last_frame_.front(); last_frame_.pop_front(); @@ -162,9 +156,9 @@ MATCHER_P2(MatchResolution, w, h, "") { } MATCHER_P(RtpTimestamp, timestamp, "") { - if (arg.timestamp() != timestamp) { + if (arg.rtp_timestamp() != timestamp) { *result_listener->stream() - << "rtp timestamp was " << arg.timestamp() << " != " << timestamp; + << "rtp timestamp was " << arg.rtp_timestamp() << " != " << timestamp; return false; } return true; @@ -192,20 +186,21 @@ class VideoReceiveStream2Test : public ::testing::TestWithParam { VideoReceiveStream2Test() : time_controller_(kStartTime), - clock_(time_controller_.GetClock()), - config_(&mock_transport_, &mock_h264_decoder_factory_), - call_stats_(clock_, time_controller_.GetMainThread()), + env_(CreateEnvironment(time_controller_.CreateTaskQueueFactory(), + time_controller_.GetClock())), + config_(&mock_transport_, &mock_decoder_factory_), + call_stats_(&env_.clock(), time_controller_.GetMainThread()), fake_renderer_(&time_controller_), + fake_call_(env_), fake_metronome_(TimeDelta::Millis(16)), - decode_sync_(clock_, + decode_sync_(&env_.clock(), &fake_metronome_, time_controller_.GetMainThread()), h264_decoder_factory_(&mock_decoder_) { // By default, mock decoder factory is backed by VideoDecoderProxyFactory. - ON_CALL(mock_h264_decoder_factory_, CreateVideoDecoder) - .WillByDefault( - Invoke(&h264_decoder_factory_, - &test::VideoDecoderProxyFactory::CreateVideoDecoder)); + ON_CALL(mock_decoder_factory_, Create) + .WillByDefault(Invoke(&h264_decoder_factory_, + &test::VideoDecoderProxyFactory::Create)); // By default, mock decode will wrap the fake decoder. ON_CALL(mock_decoder_, Configure) @@ -231,37 +226,40 @@ class VideoReceiveStream2Test : public ::testing::TestWithParam { } void SetUp() override { - config_.rtp.remote_ssrc = 1111; - config_.rtp.local_ssrc = 2222; + config_.rtp.remote_ssrc = kRemoteSsrc; + config_.rtp.local_ssrc = kLocalSsrc; config_.renderer = &fake_renderer_; VideoReceiveStreamInterface::Decoder h264_decoder; - h264_decoder.payload_type = 99; - h264_decoder.video_format = SdpVideoFormat("H264"); + h264_decoder.payload_type = kH264PayloadType; + h264_decoder.video_format = SdpVideoFormat::H264(); h264_decoder.video_format.parameters.insert( {"sprop-parameter-sets", "Z0IACpZTBYmI,aMljiA=="}); VideoReceiveStreamInterface::Decoder h265_decoder; - h265_decoder.payload_type = 100; + h265_decoder.payload_type = kH265PayloadType; h265_decoder.video_format = SdpVideoFormat("H265"); + VideoReceiveStreamInterface::Decoder av1_decoder; + av1_decoder.payload_type = kAv1PayloadType; + av1_decoder.video_format = SdpVideoFormat("AV1"); - config_.decoders = {h265_decoder, h264_decoder}; + config_.decoders = {av1_decoder, h265_decoder, h264_decoder}; RecreateReceiveStream(); } void RecreateReceiveStream( - absl::optional state = - absl::nullopt) { + std::optional state = + std::nullopt) { if (video_receive_stream_) { video_receive_stream_->UnregisterFromTransport(); video_receive_stream_ = nullptr; } - timing_ = new VCMTiming(clock_, fake_call_.trials()); + timing_ = new VCMTiming(&env_.clock(), env_.field_trials()); video_receive_stream_ = std::make_unique( - time_controller_.GetTaskQueueFactory(), &fake_call_, - kDefaultNumCpuCores, &packet_router_, config_.Copy(), &call_stats_, - clock_, absl::WrapUnique(timing_), &nack_periodic_processor_, - UseMetronome() ? &decode_sync_ : nullptr, nullptr); + env_, &fake_call_, kDefaultNumCpuCores, &packet_router_, + config_.Copy(), &call_stats_, absl::WrapUnique(timing_), + &nack_periodic_processor_, + UseMetronome() ? &decode_sync_ : nullptr); video_receive_stream_->RegisterWithTransport( &rtp_stream_receiver_controller_); if (state) @@ -270,14 +268,14 @@ class VideoReceiveStream2Test : public ::testing::TestWithParam { protected: GlobalSimulatedTimeController time_controller_; - Clock* const clock_; + Environment env_; NackPeriodicProcessor nack_periodic_processor_; - testing::NiceMock mock_h264_decoder_factory_; + testing::NiceMock mock_decoder_factory_; VideoReceiveStreamInterface::Config config_; internal::CallStats call_stats_; testing::NiceMock mock_decoder_; FakeVideoRenderer fake_renderer_; - cricket::FakeCall fake_call_; + FakeCall fake_call_; MockTransport mock_transport_; test::RtcpPacketParser rtcp_packet_parser_; PacketRouter packet_router_; @@ -298,8 +296,8 @@ TEST_P(VideoReceiveStream2Test, CreateFrameFromH264FmtpSpropAndIdr) { uint8_t* payload = rtppacket.AllocatePayload(sizeof(idr_nalu)); memcpy(payload, idr_nalu, sizeof(idr_nalu)); rtppacket.SetMarker(true); - rtppacket.SetSsrc(1111); - rtppacket.SetPayloadType(99); + rtppacket.SetSsrc(kRemoteSsrc); + rtppacket.SetPayloadType(kH264PayloadType); rtppacket.SetSequenceNumber(1); rtppacket.SetTimestamp(0); EXPECT_CALL(mock_decoder_, RegisterDecodeCompleteCallback(_)); @@ -315,7 +313,7 @@ TEST_P(VideoReceiveStream2Test, CreateFrameFromH264FmtpSpropAndIdr) { TEST_P(VideoReceiveStream2Test, PlayoutDelay) { const VideoPlayoutDelay kPlayoutDelay(TimeDelta::Millis(123), - TimeDelta::Millis(321)); + TimeDelta::Millis(521)); std::unique_ptr test_frame = test::FakeFrameBuilder() .Id(0) @@ -351,6 +349,21 @@ TEST_P(VideoReceiveStream2Test, PlayoutDelay) { EXPECT_EQ(123, timings.min_playout_delay.ms()); } +TEST_P(VideoReceiveStream2Test, MinPlayoutDelayIsLimitedByMaxPlayoutDelay) { + const VideoPlayoutDelay kPlayoutDelay(TimeDelta::Millis(123), + TimeDelta::Millis(321)); + video_receive_stream_->OnCompleteFrame(test::FakeFrameBuilder() + .Id(0) + .PlayoutDelay(kPlayoutDelay) + .AsLast() + .Build()); + EXPECT_EQ(timing_->GetTimings().min_playout_delay, kPlayoutDelay.min()); + + // Check that the biggest minimum delay is limited by the max playout delay. + video_receive_stream_->SetMinimumPlayoutDelay(400); + EXPECT_EQ(timing_->GetTimings().min_playout_delay, kPlayoutDelay.max()); +} + TEST_P(VideoReceiveStream2Test, RenderParametersSetToDefaultValues) { // Default render parameters. const VideoFrame::RenderParameters kDefaultRenderParameters; @@ -396,7 +409,7 @@ TEST_P(VideoReceiveStream2Test, MaxCompositionDelaySetFromMaxPlayoutDelay) { .Build(); video_receive_stream_->OnCompleteFrame(std::move(test_frame0)); EXPECT_THAT(timing_->RenderParameters().max_composition_delay_in_frames, - Eq(absl::nullopt)); + Eq(std::nullopt)); time_controller_.AdvanceTime(k30FpsDelay); // Max composition delay not set for playout delay 0,0. @@ -410,7 +423,7 @@ TEST_P(VideoReceiveStream2Test, MaxCompositionDelaySetFromMaxPlayoutDelay) { .Build(); video_receive_stream_->OnCompleteFrame(std::move(test_frame1)); EXPECT_THAT(timing_->RenderParameters().max_composition_delay_in_frames, - Eq(absl::nullopt)); + Eq(std::nullopt)); time_controller_.AdvanceTime(k30FpsDelay); // Max composition delay not set for playout delay X,Y, where X,Y>0. @@ -424,7 +437,7 @@ TEST_P(VideoReceiveStream2Test, MaxCompositionDelaySetFromMaxPlayoutDelay) { .Build(); video_receive_stream_->OnCompleteFrame(std::move(test_frame2)); EXPECT_THAT(timing_->RenderParameters().max_composition_delay_in_frames, - Eq(absl::nullopt)); + Eq(std::nullopt)); time_controller_.AdvanceTime(k30FpsDelay); @@ -449,24 +462,22 @@ TEST_P(VideoReceiveStream2Test, LazyDecoderCreation) { uint8_t* payload = rtppacket.AllocatePayload(sizeof(idr_nalu)); memcpy(payload, idr_nalu, sizeof(idr_nalu)); rtppacket.SetMarker(true); - rtppacket.SetSsrc(1111); - // H265 payload type. - rtppacket.SetPayloadType(99); + rtppacket.SetSsrc(kRemoteSsrc); + rtppacket.SetPayloadType(kH264PayloadType); rtppacket.SetSequenceNumber(1); rtppacket.SetTimestamp(0); // No decoders are created by default. - EXPECT_CALL(mock_h264_decoder_factory_, CreateVideoDecoder(_)).Times(0); + EXPECT_CALL(mock_decoder_factory_, Create).Times(0); video_receive_stream_->Start(); time_controller_.AdvanceTime(TimeDelta::Zero()); EXPECT_TRUE( - testing::Mock::VerifyAndClearExpectations(&mock_h264_decoder_factory_)); + testing::Mock::VerifyAndClearExpectations(&mock_decoder_factory_)); // Verify that the decoder is created when we receive payload data and tries // to decode a frame. - EXPECT_CALL( - mock_h264_decoder_factory_, - CreateVideoDecoder(Field(&SdpVideoFormat::name, testing::Eq("H264")))); + EXPECT_CALL(mock_decoder_factory_, + Create(_, Field(&SdpVideoFormat::name, Eq("H264")))); EXPECT_CALL(mock_decoder_, Configure); EXPECT_CALL(mock_decoder_, RegisterDecodeCompleteCallback); EXPECT_CALL(mock_decoder_, Decode(_, _)); @@ -479,12 +490,97 @@ TEST_P(VideoReceiveStream2Test, LazyDecoderCreation) { time_controller_.AdvanceTime(TimeDelta::Zero()); } +TEST_P(VideoReceiveStream2Test, LazyDecoderCreationCodecSwitch) { + constexpr uint8_t idr_nalu[] = {0x05, 0xFF, 0xFF, 0xFF}; + RtpPacketToSend rtppacket(nullptr); + uint8_t* payload = rtppacket.AllocatePayload(sizeof(idr_nalu)); + memcpy(payload, idr_nalu, sizeof(idr_nalu)); + rtppacket.SetMarker(true); + rtppacket.SetSsrc(kRemoteSsrc); + rtppacket.SetPayloadType(kH264PayloadType); + rtppacket.SetSequenceNumber(1); + rtppacket.SetTimestamp(0); + + // No decoders are created by default. + EXPECT_CALL(mock_decoder_factory_, Create).Times(0); + video_receive_stream_->Start(); + time_controller_.AdvanceTime(TimeDelta::Zero()); + + EXPECT_TRUE( + testing::Mock::VerifyAndClearExpectations(&mock_decoder_factory_)); + // Verify that the decoder is created when we receive payload data and tries + // to decode a frame. + EXPECT_CALL(mock_decoder_factory_, + Create(_, Field(&SdpVideoFormat::name, Eq("H264")))); + EXPECT_CALL(mock_decoder_, Configure); + EXPECT_CALL(mock_decoder_, RegisterDecodeCompleteCallback); + EXPECT_CALL(mock_decoder_, Decode(_, _)); + RtpPacketReceived parsed_packet; + ASSERT_TRUE(parsed_packet.Parse(rtppacket.data(), rtppacket.size())); + rtp_stream_receiver_controller_.OnRtpPacket(parsed_packet); + // H264 decoder is released after receiving the AV1 packet. + EXPECT_CALL(mock_decoder_, Release).Times(0); + + // Make sure the decoder thread had a chance to run. + time_controller_.AdvanceTime(TimeDelta::Millis(100)); + + // Switch to AV1. + const uint8_t av1_key_obu[] = {0x18, 0x48, 0x01, 0xAA}; // \ OBU + RtpPacketToSend av1_rtppacket(nullptr); + uint8_t* av1_payload = av1_rtppacket.AllocatePayload(sizeof(av1_key_obu)); + memcpy(av1_payload, av1_key_obu, sizeof(av1_key_obu)); + av1_rtppacket.SetMarker(true); + av1_rtppacket.SetSsrc(kRemoteSsrc); + av1_rtppacket.SetPayloadType(kAv1PayloadType); + av1_rtppacket.SetSequenceNumber(2); + av1_rtppacket.SetTimestamp(1); + + EXPECT_TRUE( + testing::Mock::VerifyAndClearExpectations(&mock_decoder_factory_)); + // Release the H264 previous decoder. + EXPECT_CALL(mock_decoder_, Release); + // Verify that the decoder is created when we receive payload data and tries + // to decode a frame. + EXPECT_CALL(mock_decoder_factory_, + Create(_, Field(&SdpVideoFormat::name, Eq("AV1")))); + EXPECT_CALL(mock_decoder_, Configure); + EXPECT_CALL(mock_decoder_, RegisterDecodeCompleteCallback); + EXPECT_CALL(mock_decoder_, Decode(_, _)); + ASSERT_TRUE(parsed_packet.Parse(av1_rtppacket.data(), av1_rtppacket.size())); + rtp_stream_receiver_controller_.OnRtpPacket(parsed_packet); + + // Make sure the decoder thread had a chance to run. + time_controller_.AdvanceTime(TimeDelta::Millis(100)); + + // Switch back to H264. + rtppacket.SetPayloadType(kH264PayloadType); + rtppacket.SetSequenceNumber(3); + rtppacket.SetTimestamp(2); + + EXPECT_TRUE( + testing::Mock::VerifyAndClearExpectations(&mock_decoder_factory_)); + // Release the AV1 previous decoder and the new H264 decoder on test end. + EXPECT_CALL(mock_decoder_, Release).Times(2); + // Verify that the decoder is created when we receive payload data and tries + // to decode a frame. + EXPECT_CALL(mock_decoder_factory_, + Create(_, Field(&SdpVideoFormat::name, Eq("H264")))); + EXPECT_CALL(mock_decoder_, Configure); + EXPECT_CALL(mock_decoder_, RegisterDecodeCompleteCallback); + EXPECT_CALL(mock_decoder_, Decode(_, _)); + ASSERT_TRUE(parsed_packet.Parse(rtppacket.data(), rtppacket.size())); + rtp_stream_receiver_controller_.OnRtpPacket(parsed_packet); + + // Make sure the decoder thread had a chance to run. + time_controller_.AdvanceTime(TimeDelta::Millis(100)); +} + TEST_P(VideoReceiveStream2Test, PassesNtpTime) { const Timestamp kNtpTimestamp = Timestamp::Millis(12345); std::unique_ptr test_frame = test::FakeFrameBuilder() .Id(0) - .PayloadType(99) + .PayloadType(kH264PayloadType) .NtpTime(kNtpTimestamp) .AsLast() .Build(); @@ -497,12 +593,13 @@ TEST_P(VideoReceiveStream2Test, PassesNtpTime) { TEST_P(VideoReceiveStream2Test, PassesRotation) { const webrtc::VideoRotation kRotation = webrtc::kVideoRotation_180; - std::unique_ptr test_frame = test::FakeFrameBuilder() - .Id(0) - .PayloadType(99) - .Rotation(kRotation) - .AsLast() - .Build(); + std::unique_ptr test_frame = + test::FakeFrameBuilder() + .Id(0) + .PayloadType(kH264PayloadType) + .Rotation(kRotation) + .AsLast() + .Build(); video_receive_stream_->Start(); video_receive_stream_->OnCompleteFrame(std::move(test_frame)); @@ -514,7 +611,7 @@ TEST_P(VideoReceiveStream2Test, PassesPacketInfos) { RtpPacketInfos packet_infos = CreatePacketInfos(3); auto test_frame = test::FakeFrameBuilder() .Id(0) - .PayloadType(99) + .PayloadType(kH264PayloadType) .PacketInfos(packet_infos) .AsLast() .Build(); @@ -526,13 +623,16 @@ TEST_P(VideoReceiveStream2Test, PassesPacketInfos) { } TEST_P(VideoReceiveStream2Test, RenderedFrameUpdatesGetSources) { - constexpr uint32_t kSsrc = 1111; + constexpr uint32_t kSsrc = kRemoteSsrc; constexpr uint32_t kCsrc = 9001; constexpr uint32_t kRtpTimestamp = 12345; // Prepare one video frame with per-packet information. - auto test_frame = - test::FakeFrameBuilder().Id(0).PayloadType(99).AsLast().Build(); + auto test_frame = test::FakeFrameBuilder() + .Id(0) + .PayloadType(kH264PayloadType) + .AsLast() + .Build(); RtpPacketInfos packet_infos; { RtpPacketInfos::vector_type infos; @@ -542,16 +642,16 @@ TEST_P(VideoReceiveStream2Test, RenderedFrameUpdatesGetSources) { info.set_csrcs({kCsrc}); info.set_rtp_timestamp(kRtpTimestamp); - info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(5000)); + info.set_receive_time(env_.clock().CurrentTime() - TimeDelta::Millis(5000)); infos.push_back(info); - info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(3000)); + info.set_receive_time(env_.clock().CurrentTime() - TimeDelta::Millis(3000)); infos.push_back(info); - info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(2000)); + info.set_receive_time(env_.clock().CurrentTime() - TimeDelta::Millis(2000)); infos.push_back(info); - info.set_receive_time(clock_->CurrentTime() - TimeDelta::Millis(1000)); + info.set_receive_time(env_.clock().CurrentTime() - TimeDelta::Millis(1000)); infos.push_back(info); packet_infos = RtpPacketInfos(std::move(infos)); @@ -563,12 +663,12 @@ TEST_P(VideoReceiveStream2Test, RenderedFrameUpdatesGetSources) { EXPECT_THAT(video_receive_stream_->GetSources(), IsEmpty()); // Render one video frame. - Timestamp timestamp_min = clock_->CurrentTime(); + Timestamp timestamp_min = env_.clock().CurrentTime(); video_receive_stream_->OnCompleteFrame(std::move(test_frame)); // Verify that the per-packet information is passed to the renderer. EXPECT_THAT(fake_renderer_.WaitForFrame(kDefaultTimeOut), RenderedFrameWith(PacketInfos(ElementsAreArray(packet_infos)))); - Timestamp timestamp_max = clock_->CurrentTime(); + Timestamp timestamp_max = env_.clock().CurrentTime(); // Verify that the per-packet information also updates `GetSources()`. std::vector sources = video_receive_stream_->GetSources(); @@ -606,8 +706,11 @@ std::unique_ptr MakeFrameWithResolution( int picture_id, int width, int height) { - auto frame = - test::FakeFrameBuilder().Id(picture_id).PayloadType(99).AsLast().Build(); + auto frame = test::FakeFrameBuilder() + .Id(picture_id) + .PayloadType(kH264PayloadType) + .AsLast() + .Build(); frame->SetFrameType(frame_type); frame->_encodedWidth = width; frame->_encodedHeight = height; @@ -748,14 +851,14 @@ TEST_P(VideoReceiveStream2Test, DependantFramesAreScheduled) { auto key_frame = test::FakeFrameBuilder() .Id(0) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(kFirstRtpTimestamp) .ReceivedTime(kStartTime) .AsLast() .Build(); auto delta_frame = test::FakeFrameBuilder() .Id(1) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(RtpTimestampForFrame(1)) .ReceivedTime(ReceiveTimeForFrame(1)) .Refs({0}) @@ -784,20 +887,20 @@ TEST_P(VideoReceiveStream2Test, FramesScheduledInOrder) { auto key_frame = test::FakeFrameBuilder() .Id(0) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(kFirstRtpTimestamp) .AsLast() .Build(); auto delta_frame1 = test::FakeFrameBuilder() .Id(1) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(RtpTimestampForFrame(1)) .Refs({0}) .AsLast() .Build(); auto delta_frame2 = test::FakeFrameBuilder() .Id(2) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(RtpTimestampForFrame(2)) .Refs({1}) .AsLast() @@ -813,15 +916,15 @@ TEST_P(VideoReceiveStream2Test, FramesScheduledInOrder) { EXPECT_CALL(mock_decoder_, Decode(test::RtpTimestamp(RtpTimestampForFrame(2)), _)) .Times(1); - key_frame->SetReceivedTime(clock_->CurrentTime().ms()); + key_frame->SetReceivedTime(env_.clock().CurrentTime().ms()); video_receive_stream_->OnCompleteFrame(std::move(key_frame)); EXPECT_THAT(fake_renderer_.WaitForFrame(TimeDelta::Zero()), RenderedFrame()); - delta_frame2->SetReceivedTime(clock_->CurrentTime().ms()); + delta_frame2->SetReceivedTime(env_.clock().CurrentTime().ms()); video_receive_stream_->OnCompleteFrame(std::move(delta_frame2)); EXPECT_THAT(fake_renderer_.WaitForFrame(k30FpsDelay), DidNotReceiveFrame()); // `delta_frame1` arrives late. - delta_frame1->SetReceivedTime(clock_->CurrentTime().ms()); + delta_frame1->SetReceivedTime(env_.clock().CurrentTime().ms()); video_receive_stream_->OnCompleteFrame(std::move(delta_frame1)); EXPECT_THAT(fake_renderer_.WaitForFrame(k30FpsDelay), RenderedFrame()); EXPECT_THAT(fake_renderer_.WaitForFrame(k30FpsDelay * 2), RenderedFrame()); @@ -832,20 +935,20 @@ TEST_P(VideoReceiveStream2Test, WaitsforAllSpatialLayers) { video_receive_stream_->Start(); auto sl0 = test::FakeFrameBuilder() .Id(0) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(kFirstRtpTimestamp) .ReceivedTime(kStartTime) .Build(); auto sl1 = test::FakeFrameBuilder() .Id(1) - .PayloadType(99) + .PayloadType(kH264PayloadType) .ReceivedTime(kStartTime) .Time(kFirstRtpTimestamp) .Refs({0}) .Build(); auto sl2 = test::FakeFrameBuilder() .Id(2) - .PayloadType(99) + .PayloadType(kH264PayloadType) .ReceivedTime(kStartTime) .Time(kFirstRtpTimestamp) .Refs({0, 1}) @@ -854,7 +957,7 @@ TEST_P(VideoReceiveStream2Test, WaitsforAllSpatialLayers) { // No decodes should be called until `sl2` is received. EXPECT_CALL(mock_decoder_, Decode(_, _)).Times(0); - sl0->SetReceivedTime(clock_->CurrentTime().ms()); + sl0->SetReceivedTime(env_.clock().CurrentTime().ms()); video_receive_stream_->OnCompleteFrame(std::move(sl0)); EXPECT_THAT(fake_renderer_.WaitForFrame(TimeDelta::Zero()), DidNotReceiveFrame()); @@ -881,20 +984,20 @@ TEST_P(VideoReceiveStream2Test, FramesFastForwardOnSystemHalt) { // resumes, F1 will be old and so F2 should be decoded. auto key_frame = test::FakeFrameBuilder() .Id(0) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(kFirstRtpTimestamp) .AsLast() .Build(); auto ffwd_frame = test::FakeFrameBuilder() .Id(1) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(RtpTimestampForFrame(1)) .Refs({0}) .AsLast() .Build(); auto rendered_frame = test::FakeFrameBuilder() .Id(2) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(RtpTimestampForFrame(2)) .Refs({0}) .AsLast() @@ -928,14 +1031,14 @@ TEST_P(VideoReceiveStream2Test, BetterFrameInsertedWhileWaitingToDecodeFrame) { auto key_frame = test::FakeFrameBuilder() .Id(0) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(kFirstRtpTimestamp) .ReceivedTime(ReceiveTimeForFrame(0)) .AsLast() .Build(); auto f1 = test::FakeFrameBuilder() .Id(1) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(RtpTimestampForFrame(1)) .ReceivedTime(ReceiveTimeForFrame(1)) .Refs({0}) @@ -943,7 +1046,7 @@ TEST_P(VideoReceiveStream2Test, BetterFrameInsertedWhileWaitingToDecodeFrame) { .Build(); auto f2 = test::FakeFrameBuilder() .Id(2) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(RtpTimestampForFrame(2)) .ReceivedTime(ReceiveTimeForFrame(2)) .Refs({0}) @@ -982,9 +1085,9 @@ TEST_P(VideoReceiveStream2Test, RtpTimestampWrapAround) { video_receive_stream_->OnCompleteFrame( test::FakeFrameBuilder() .Id(0) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(kBaseRtp) - .ReceivedTime(clock_->CurrentTime()) + .ReceivedTime(env_.clock().CurrentTime()) .AsLast() .Build()); EXPECT_THAT(fake_renderer_.WaitForFrame(TimeDelta::Zero()), RenderedFrame()); @@ -992,9 +1095,9 @@ TEST_P(VideoReceiveStream2Test, RtpTimestampWrapAround) { video_receive_stream_->OnCompleteFrame( test::FakeFrameBuilder() .Id(1) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(kBaseRtp + k30FpsRtpTimestampDelta) - .ReceivedTime(clock_->CurrentTime()) + .ReceivedTime(env_.clock().CurrentTime()) .AsLast() .Build()); EXPECT_THAT(fake_renderer_.WaitForFrame(k30FpsDelay), RenderedFrame()); @@ -1012,14 +1115,15 @@ TEST_P(VideoReceiveStream2Test, RtpTimestampWrapAround) { video_receive_stream_->OnCompleteFrame( test::FakeFrameBuilder() .Id(2) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(kWrapAroundRtp) - .ReceivedTime(clock_->CurrentTime()) + .ReceivedTime(env_.clock().CurrentTime()) .AsLast() .Build()); EXPECT_CALL(mock_decoder_, Decode(test::RtpTimestamp(kWrapAroundRtp), _)) .Times(1); - EXPECT_THAT(fake_renderer_.WaitForFrame(TimeDelta::Zero()), RenderedFrame()); + EXPECT_THAT(fake_renderer_.WaitForFrame(TimeDelta::Seconds(1)), + RenderedFrame()); video_receive_stream_->Stop(); } @@ -1043,7 +1147,7 @@ TEST_P(VideoReceiveStream2Test, PoorConnectionWithFpsChangeDuringLostFrame) { video_receive_stream_->OnCompleteFrame( test::FakeFrameBuilder() .Id(0) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(RtpTimestampForFrame(0)) .ReceivedTime(ReceiveTimeForFrame(0)) .AsLast() @@ -1054,7 +1158,7 @@ TEST_P(VideoReceiveStream2Test, PoorConnectionWithFpsChangeDuringLostFrame) { video_receive_stream_->OnCompleteFrame( test::FakeFrameBuilder() .Id(1) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(RtpTimestampForFrame(1)) .ReceivedTime(ReceiveTimeForFrame(1)) .Refs({0}) @@ -1067,39 +1171,41 @@ TEST_P(VideoReceiveStream2Test, PoorConnectionWithFpsChangeDuringLostFrame) { // 2 second of frames at 15 fps, and then a keyframe. time_controller_.AdvanceTime(k30FpsDelay); - Timestamp send_30fps_end_time = clock_->CurrentTime() + TimeDelta::Seconds(2); + Timestamp send_30fps_end_time = + env_.clock().CurrentTime() + TimeDelta::Seconds(2); int id = 3; EXPECT_CALL(mock_transport_, SendRtcp).Times(AnyNumber()); - while (clock_->CurrentTime() < send_30fps_end_time) { + while (env_.clock().CurrentTime() < send_30fps_end_time) { ++id; video_receive_stream_->OnCompleteFrame( test::FakeFrameBuilder() .Id(id) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(RtpTimestampForFrame(id)) .ReceivedTime(ReceiveTimeForFrame(id)) .Refs({id - 1}) .AsLast() .Build()); EXPECT_THAT(fake_renderer_.WaitForFrame(k30FpsDelay, /*advance_time=*/true), - Eq(absl::nullopt)); + Eq(std::nullopt)); } uint32_t current_rtp = RtpTimestampForFrame(id); - Timestamp send_15fps_end_time = clock_->CurrentTime() + TimeDelta::Seconds(2); - while (clock_->CurrentTime() < send_15fps_end_time) { + Timestamp send_15fps_end_time = + env_.clock().CurrentTime() + TimeDelta::Seconds(2); + while (env_.clock().CurrentTime() < send_15fps_end_time) { ++id; current_rtp += k15FpsRtpTimestampDelta; video_receive_stream_->OnCompleteFrame( test::FakeFrameBuilder() .Id(id) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(current_rtp) - .ReceivedTime(clock_->CurrentTime()) + .ReceivedTime(env_.clock().CurrentTime()) .Refs({id - 1}) .AsLast() .Build()); EXPECT_THAT(fake_renderer_.WaitForFrame(k15FpsDelay, /*advance_time=*/true), - Eq(absl::nullopt)); + Eq(std::nullopt)); } ++id; @@ -1110,9 +1216,9 @@ TEST_P(VideoReceiveStream2Test, PoorConnectionWithFpsChangeDuringLostFrame) { video_receive_stream_->OnCompleteFrame( test::FakeFrameBuilder() .Id(id) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(current_rtp) - .ReceivedTime(clock_->CurrentTime() + kKeyframeDelay) + .ReceivedTime(env_.clock().CurrentTime() + kKeyframeDelay) .AsLast() .Build()); // If the framerate was not updated to be 15fps from the frames that arrived @@ -1134,7 +1240,7 @@ TEST_P(VideoReceiveStream2Test, StreamShouldNotTimeoutWhileWaitingForFrame) { video_receive_stream_->OnCompleteFrame( test::FakeFrameBuilder() .Id(0) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(RtpTimestampForFrame(0)) .ReceivedTime(ReceiveTimeForFrame(0)) .AsLast() @@ -1146,7 +1252,7 @@ TEST_P(VideoReceiveStream2Test, StreamShouldNotTimeoutWhileWaitingForFrame) { video_receive_stream_->OnCompleteFrame( test::FakeFrameBuilder() .Id(id) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(RtpTimestampForFrame(id)) .ReceivedTime(ReceiveTimeForFrame(id)) .Refs({0}) @@ -1164,9 +1270,9 @@ TEST_P(VideoReceiveStream2Test, StreamShouldNotTimeoutWhileWaitingForFrame) { video_receive_stream_->OnCompleteFrame( test::FakeFrameBuilder() .Id(121) - .PayloadType(99) + .PayloadType(kH264PayloadType) .Time(late_decode_rtp) - .ReceivedTime(clock_->CurrentTime()) + .ReceivedTime(env_.clock().CurrentTime()) .AsLast() .Build()); EXPECT_THAT(fake_renderer_.WaitForFrame(TimeDelta::Millis(100), diff --git a/video/video_send_stream.cc b/video/video_send_stream.cc deleted file mode 100644 index 9111c3e6ed..0000000000 --- a/video/video_send_stream.cc +++ /dev/null @@ -1,342 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "video/video_send_stream.h" - -#include - -#include "api/array_view.h" -#include "api/task_queue/task_queue_base.h" -#include "api/video/video_stream_encoder_settings.h" -#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" -#include "modules/rtp_rtcp/source/rtp_header_extension_size.h" -#include "modules/rtp_rtcp/source/rtp_sender.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/strings/string_builder.h" -#include "system_wrappers/include/clock.h" -#include "video/adaptation/overuse_frame_detector.h" -#include "video/frame_cadence_adapter.h" -#include "video/video_stream_encoder.h" - -namespace webrtc { - -namespace { - -size_t CalculateMaxHeaderSize(const RtpConfig& config) { - size_t header_size = kRtpHeaderSize; - size_t extensions_size = 0; - size_t fec_extensions_size = 0; - if (!config.extensions.empty()) { - RtpHeaderExtensionMap extensions_map(config.extensions); - extensions_size = RtpHeaderExtensionSize(RTPSender::VideoExtensionSizes(), - extensions_map); - fec_extensions_size = - RtpHeaderExtensionSize(RTPSender::FecExtensionSizes(), extensions_map); - } - header_size += extensions_size; - if (config.flexfec.payload_type >= 0) { - // All FEC extensions again plus maximum FlexFec overhead. - header_size += fec_extensions_size + 32; - } else { - if (config.ulpfec.ulpfec_payload_type >= 0) { - // Header with all the FEC extensions will be repeated plus maximum - // UlpFec overhead. - header_size += fec_extensions_size + 18; - } - if (config.ulpfec.red_payload_type >= 0) { - header_size += 1; // RED header. - } - } - // Additional room for Rtx. - if (config.rtx.payload_type >= 0) - header_size += kRtxHeaderSize; - return header_size; -} - -VideoStreamEncoder::BitrateAllocationCallbackType -GetBitrateAllocationCallbackType(const VideoSendStream::Config& config, - const FieldTrialsView& field_trials) { - if (webrtc::RtpExtension::FindHeaderExtensionByUri( - config.rtp.extensions, - webrtc::RtpExtension::kVideoLayersAllocationUri, - config.crypto_options.srtp.enable_encrypted_rtp_header_extensions - ? RtpExtension::Filter::kPreferEncryptedExtension - : RtpExtension::Filter::kDiscardEncryptedExtension)) { - return VideoStreamEncoder::BitrateAllocationCallbackType:: - kVideoLayersAllocation; - } - if (field_trials.IsEnabled("WebRTC-Target-Bitrate-Rtcp")) { - return VideoStreamEncoder::BitrateAllocationCallbackType:: - kVideoBitrateAllocation; - } - return VideoStreamEncoder::BitrateAllocationCallbackType:: - kVideoBitrateAllocationWhenScreenSharing; -} - -RtpSenderFrameEncryptionConfig CreateFrameEncryptionConfig( - const VideoSendStream::Config* config) { - RtpSenderFrameEncryptionConfig frame_encryption_config; - frame_encryption_config.frame_encryptor = config->frame_encryptor.get(); - frame_encryption_config.crypto_options = config->crypto_options; - return frame_encryption_config; -} - -RtpSenderObservers CreateObservers(RtcpRttStats* call_stats, - EncoderRtcpFeedback* encoder_feedback, - SendStatisticsProxy* stats_proxy, - SendDelayStats* send_delay_stats) { - RtpSenderObservers observers; - observers.rtcp_rtt_stats = call_stats; - observers.intra_frame_callback = encoder_feedback; - observers.rtcp_loss_notification_observer = encoder_feedback; - observers.report_block_data_observer = stats_proxy; - observers.rtp_stats = stats_proxy; - observers.bitrate_observer = stats_proxy; - observers.frame_count_observer = stats_proxy; - observers.rtcp_type_observer = stats_proxy; - observers.send_delay_observer = stats_proxy; - observers.send_packet_observer = send_delay_stats; - return observers; -} - -std::unique_ptr CreateVideoStreamEncoder( - Clock* clock, - int num_cpu_cores, - TaskQueueFactory* task_queue_factory, - SendStatisticsProxy* stats_proxy, - const VideoStreamEncoderSettings& encoder_settings, - VideoStreamEncoder::BitrateAllocationCallbackType - bitrate_allocation_callback_type, - const FieldTrialsView& field_trials, - webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { - std::unique_ptr encoder_queue = - task_queue_factory->CreateTaskQueue("EncoderQueue", - TaskQueueFactory::Priority::NORMAL); - TaskQueueBase* encoder_queue_ptr = encoder_queue.get(); - return std::make_unique( - clock, num_cpu_cores, stats_proxy, encoder_settings, - std::make_unique(stats_proxy), - FrameCadenceAdapterInterface::Create(clock, encoder_queue_ptr, - field_trials), - std::move(encoder_queue), bitrate_allocation_callback_type, field_trials, - encoder_selector); -} - -} // namespace - -namespace internal { - -VideoSendStream::VideoSendStream( - Clock* clock, - int num_cpu_cores, - TaskQueueFactory* task_queue_factory, - TaskQueueBase* network_queue, - RtcpRttStats* call_stats, - RtpTransportControllerSendInterface* transport, - BitrateAllocatorInterface* bitrate_allocator, - SendDelayStats* send_delay_stats, - RtcEventLog* event_log, - VideoSendStream::Config config, - VideoEncoderConfig encoder_config, - const std::map& suspended_ssrcs, - const std::map& suspended_payload_states, - std::unique_ptr fec_controller, - const FieldTrialsView& field_trials) - : transport_(transport), - stats_proxy_(clock, config, encoder_config.content_type, field_trials), - config_(std::move(config)), - content_type_(encoder_config.content_type), - video_stream_encoder_(CreateVideoStreamEncoder( - clock, - num_cpu_cores, - task_queue_factory, - &stats_proxy_, - config_.encoder_settings, - GetBitrateAllocationCallbackType(config_, field_trials), - field_trials, - config_.encoder_selector)), - encoder_feedback_( - clock, - config_.rtp.ssrcs, - video_stream_encoder_.get(), - [this](uint32_t ssrc, const std::vector& seq_nums) { - return rtp_video_sender_->GetSentRtpPacketInfos(ssrc, seq_nums); - }), - rtp_video_sender_( - transport->CreateRtpVideoSender(suspended_ssrcs, - suspended_payload_states, - config_.rtp, - config_.rtcp_report_interval_ms, - config_.send_transport, - CreateObservers(call_stats, - &encoder_feedback_, - &stats_proxy_, - send_delay_stats), - event_log, - std::move(fec_controller), - CreateFrameEncryptionConfig(&config_), - config_.frame_transformer)), - send_stream_(clock, - &stats_proxy_, - transport, - bitrate_allocator, - video_stream_encoder_.get(), - &config_, - encoder_config.max_bitrate_bps, - encoder_config.bitrate_priority, - encoder_config.content_type, - rtp_video_sender_, - field_trials) { - RTC_DCHECK(config_.encoder_settings.encoder_factory); - RTC_DCHECK(config_.encoder_settings.bitrate_allocator_factory); - - video_stream_encoder_->SetFecControllerOverride(rtp_video_sender_); - - ReconfigureVideoEncoder(std::move(encoder_config)); -} - -VideoSendStream::~VideoSendStream() { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DCHECK(!running_); - transport_->DestroyRtpVideoSender(rtp_video_sender_); -} - -void VideoSendStream::Start() { - const std::vector active_layers(config_.rtp.ssrcs.size(), true); - StartPerRtpStream(active_layers); -} - -void VideoSendStream::StartPerRtpStream(const std::vector active_layers) { - RTC_DCHECK_RUN_ON(&thread_checker_); - - // Keep our `running_` flag expected state in sync with active layers since - // the `send_stream_` will be implicitly stopped/started depending on the - // state of the layers. - bool running = false; - - rtc::StringBuilder active_layers_string; - active_layers_string << "{"; - for (size_t i = 0; i < active_layers.size(); ++i) { - if (active_layers[i]) { - running = true; - active_layers_string << "1"; - } else { - active_layers_string << "0"; - } - if (i < active_layers.size() - 1) { - active_layers_string << ", "; - } - } - active_layers_string << "}"; - RTC_LOG(LS_INFO) << "StartPerRtpStream: " << active_layers_string.str(); - send_stream_.StartPerRtpStream(active_layers); - running_ = running; -} - -void VideoSendStream::Stop() { - RTC_DCHECK_RUN_ON(&thread_checker_); - if (!running_) - return; - RTC_DLOG(LS_INFO) << "VideoSendStream::Stop"; - running_ = false; - send_stream_.Stop(); -} - -bool VideoSendStream::started() { - RTC_DCHECK_RUN_ON(&thread_checker_); - return running_; -} - -void VideoSendStream::AddAdaptationResource( - rtc::scoped_refptr resource) { - RTC_DCHECK_RUN_ON(&thread_checker_); - video_stream_encoder_->AddAdaptationResource(resource); -} - -std::vector> -VideoSendStream::GetAdaptationResources() { - RTC_DCHECK_RUN_ON(&thread_checker_); - return video_stream_encoder_->GetAdaptationResources(); -} - -void VideoSendStream::SetSource( - rtc::VideoSourceInterface* source, - const DegradationPreference& degradation_preference) { - RTC_DCHECK_RUN_ON(&thread_checker_); - video_stream_encoder_->SetSource(source, degradation_preference); -} - -void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) { - ReconfigureVideoEncoder(std::move(config), nullptr); -} - -void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config, - SetParametersCallback callback) { - RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_DCHECK_EQ(content_type_, config.content_type); - video_stream_encoder_->ConfigureEncoder( - std::move(config), - config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp), - std::move(callback)); -} - -VideoSendStream::Stats VideoSendStream::GetStats() { - RTC_DCHECK_RUN_ON(&thread_checker_); - return stats_proxy_.GetStats(); -} - -absl::optional VideoSendStream::GetPacingFactorOverride() const { - return send_stream_.configured_pacing_factor(); -} - -void VideoSendStream::StopPermanentlyAndGetRtpStates( - VideoSendStream::RtpStateMap* rtp_state_map, - VideoSendStream::RtpPayloadStateMap* payload_state_map) { - RTC_DCHECK_RUN_ON(&thread_checker_); - video_stream_encoder_->Stop(); - - running_ = false; - // Always run these cleanup steps regardless of whether running_ was set - // or not. This will unregister callbacks before destruction. - // See `VideoSendStreamImpl::StopVideoSendStream` for more. - send_stream_.Stop(); - *rtp_state_map = send_stream_.GetRtpStates(); - *payload_state_map = send_stream_.GetRtpPayloadStates(); -} - -void VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { - RTC_DCHECK_RUN_ON(&thread_checker_); - send_stream_.DeliverRtcp(packet, length); -} - -void VideoSendStream::GenerateKeyFrame(const std::vector& rids) { - RTC_DCHECK_RUN_ON(&thread_checker_); - // Map rids to layers. If rids is empty, generate a keyframe for all layers. - std::vector next_frames(config_.rtp.ssrcs.size(), - VideoFrameType::kVideoFrameKey); - if (!config_.rtp.rids.empty() && !rids.empty()) { - std::fill(next_frames.begin(), next_frames.end(), - VideoFrameType::kVideoFrameDelta); - for (const auto& rid : rids) { - for (size_t i = 0; i < config_.rtp.rids.size(); i++) { - if (config_.rtp.rids[i] == rid) { - next_frames[i] = VideoFrameType::kVideoFrameKey; - break; - } - } - } - } - if (video_stream_encoder_) { - video_stream_encoder_->SendKeyFrame(next_frames); - } -} - -} // namespace internal -} // namespace webrtc diff --git a/video/video_send_stream.h b/video/video_send_stream.h deleted file mode 100644 index 1f4717fbec..0000000000 --- a/video/video_send_stream.h +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_VIDEO_SEND_STREAM_H_ -#define VIDEO_VIDEO_SEND_STREAM_H_ - -#include -#include -#include -#include - -#include "api/fec_controller.h" -#include "api/field_trials_view.h" -#include "api/sequence_checker.h" -#include "api/task_queue/pending_task_safety_flag.h" -#include "call/bitrate_allocator.h" -#include "call/video_receive_stream.h" -#include "call/video_send_stream.h" -#include "rtc_base/event.h" -#include "rtc_base/system/no_unique_address.h" -#include "video/encoder_rtcp_feedback.h" -#include "video/send_delay_stats.h" -#include "video/send_statistics_proxy.h" -#include "video/video_send_stream_impl.h" -#include "video/video_stream_encoder_interface.h" - -namespace webrtc { -namespace test { -class VideoSendStreamPeer; -} // namespace test - -class CallStats; -class IvfFileWriter; -class RateLimiter; -class RtpRtcp; -class RtpTransportControllerSendInterface; -class RtcEventLog; - -namespace internal { - -class VideoSendStreamImpl; - -// VideoSendStream implements webrtc::VideoSendStream. -// Internally, it delegates all public methods to VideoSendStreamImpl and / or -// VideoStreamEncoder. -class VideoSendStream : public webrtc::VideoSendStream { - public: - using RtpStateMap = std::map; - using RtpPayloadStateMap = std::map; - - VideoSendStream( - Clock* clock, - int num_cpu_cores, - TaskQueueFactory* task_queue_factory, - TaskQueueBase* network_queue, - RtcpRttStats* call_stats, - RtpTransportControllerSendInterface* transport, - BitrateAllocatorInterface* bitrate_allocator, - SendDelayStats* send_delay_stats, - RtcEventLog* event_log, - VideoSendStream::Config config, - VideoEncoderConfig encoder_config, - const std::map& suspended_ssrcs, - const std::map& suspended_payload_states, - std::unique_ptr fec_controller, - const FieldTrialsView& field_trials); - - ~VideoSendStream() override; - - void DeliverRtcp(const uint8_t* packet, size_t length); - - // webrtc::VideoSendStream implementation. - void Start() override; - void StartPerRtpStream(std::vector active_layers) override; - void Stop() override; - bool started() override; - - void AddAdaptationResource(rtc::scoped_refptr resource) override; - std::vector> GetAdaptationResources() override; - - void SetSource(rtc::VideoSourceInterface* source, - const DegradationPreference& degradation_preference) override; - - void ReconfigureVideoEncoder(VideoEncoderConfig config) override; - void ReconfigureVideoEncoder(VideoEncoderConfig config, - SetParametersCallback callback) override; - Stats GetStats() override; - - void StopPermanentlyAndGetRtpStates(RtpStateMap* rtp_state_map, - RtpPayloadStateMap* payload_state_map); - void GenerateKeyFrame(const std::vector& rids) override; - - private: - friend class test::VideoSendStreamPeer; - - absl::optional GetPacingFactorOverride() const; - - RTC_NO_UNIQUE_ADDRESS SequenceChecker thread_checker_; - RtpTransportControllerSendInterface* const transport_; - - SendStatisticsProxy stats_proxy_; - const VideoSendStream::Config config_; - const VideoEncoderConfig::ContentType content_type_; - std::unique_ptr video_stream_encoder_; - EncoderRtcpFeedback encoder_feedback_; - RtpVideoSenderInterface* const rtp_video_sender_; - VideoSendStreamImpl send_stream_; - bool running_ RTC_GUARDED_BY(thread_checker_) = false; -}; - -} // namespace internal -} // namespace webrtc - -#endif // VIDEO_VIDEO_SEND_STREAM_H_ diff --git a/video/video_send_stream_impl.cc b/video/video_send_stream_impl.cc index d0a96ce06c..dfcb9a30e9 100644 --- a/video/video_send_stream_impl.cc +++ b/video/video_send_stream_impl.cc @@ -13,20 +13,54 @@ #include #include +#include +#include +#include #include #include +#include #include "absl/algorithm/container.h" +#include "api/adaptation/resource.h" +#include "api/call/bitrate_allocation.h" #include "api/crypto/crypto_options.h" +#include "api/environment/environment.h" +#include "api/fec_controller.h" +#include "api/field_trials_view.h" +#include "api/metronome/metronome.h" #include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/video/encoded_image.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_layers_allocation.h" +#include "api/video/video_source_interface.h" +#include "api/video/video_stream_encoder_settings.h" #include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "call/bitrate_allocator.h" +#include "call/rtp_config.h" #include "call/rtp_transport_controller_send_interface.h" #include "call/video_send_stream.h" +#include "media/base/media_constants.h" +#include "media/base/sdp_video_format_utils.h" #include "modules/pacing/pacing_controller.h" +#include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_header_extension_size.h" +#include "modules/rtp_rtcp/source/rtp_sender.h" +#include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/alr_experiment.h" #include "rtc_base/experiments/field_trial_parser.h" @@ -34,9 +68,18 @@ #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" -#include "system_wrappers/include/field_trial.h" +#include "video/adaptation/overuse_frame_detector.h" +#include "video/config/video_encoder_config.h" +#include "video/encoder_rtcp_feedback.h" +#include "video/frame_cadence_adapter.h" +#include "video/send_delay_stats.h" +#include "video/send_statistics_proxy.h" +#include "video/video_stream_encoder.h" +#include "video/video_stream_encoder_interface.h" namespace webrtc { namespace internal { @@ -138,13 +181,16 @@ int CalculateMaxPadBitrateBps(const std::vector& streams, return pad_up_to_bitrate_bps; } -absl::optional GetAlrSettings( +std::optional GetAlrSettings( + const FieldTrialsView& field_trials, VideoEncoderConfig::ContentType content_type) { if (content_type == VideoEncoderConfig::ContentType::kScreen) { return AlrExperimentSettings::CreateFromFieldTrial( + field_trials, AlrExperimentSettings::kScreenshareProbingBweExperimentName); } return AlrExperimentSettings::CreateFromFieldTrial( + field_trials, AlrExperimentSettings::kStrictPacingAndProbingExperimentName); } @@ -162,27 +208,40 @@ bool SameStreamsEnabled(const VideoBitrateAllocation& lhs, // Returns an optional that has value iff TransportSeqNumExtensionConfigured // is `true` for the given video send stream config. -absl::optional GetConfiguredPacingFactor( +std::optional GetConfiguredPacingFactor( const VideoSendStream::Config& config, VideoEncoderConfig::ContentType content_type, - const PacingConfig& default_pacing_config) { + const PacingConfig& default_pacing_config, + const FieldTrialsView& field_trials) { if (!TransportSeqNumExtensionConfigured(config)) - return absl::nullopt; + return std::nullopt; - absl::optional alr_settings = - GetAlrSettings(content_type); + std::optional alr_settings = + GetAlrSettings(field_trials, content_type); if (alr_settings) return alr_settings->pacing_factor; - RateControlSettings rate_control_settings = - RateControlSettings::ParseFromFieldTrials(); - return rate_control_settings.GetPacingFactor().value_or( - default_pacing_config.pacing_factor); + return RateControlSettings(field_trials) + .GetPacingFactor() + .value_or(default_pacing_config.pacing_factor); +} + +int GetEncoderPriorityBitrate(std::string codec_name, + const FieldTrialsView& field_trials) { + int priority_bitrate = 0; + if (PayloadStringToCodecType(codec_name) == VideoCodecType::kVideoCodecAV1) { + webrtc::FieldTrialParameter av1_priority_bitrate("bitrate", 0); + webrtc::ParseFieldTrial( + {&av1_priority_bitrate}, + field_trials.Lookup("WebRTC-AV1-OverridePriorityBitrate")); + priority_bitrate = av1_priority_bitrate; + } + return priority_bitrate; } uint32_t GetInitialEncoderMaxBitrate(int initial_encoder_max_bitrate) { if (initial_encoder_max_bitrate > 0) - return rtc::dchecked_cast(initial_encoder_max_bitrate); + return dchecked_cast(initial_encoder_max_bitrate); // TODO(srte): Make sure max bitrate is not set to negative values. We don't // have any way to handle unset values in downstream code, such as the @@ -204,6 +263,114 @@ int GetDefaultMinVideoBitrateBps(VideoCodecType codec_type) { return kDefaultMinVideoBitrateBps; } +size_t CalculateMaxHeaderSize(const RtpConfig& config) { + size_t header_size = kRtpHeaderSize; + size_t extensions_size = 0; + size_t fec_extensions_size = 0; + if (!config.extensions.empty()) { + RtpHeaderExtensionMap extensions_map(config.extensions); + extensions_size = RtpHeaderExtensionSize(RTPSender::VideoExtensionSizes(), + extensions_map); + fec_extensions_size = + RtpHeaderExtensionSize(RTPSender::FecExtensionSizes(), extensions_map); + } + header_size += extensions_size; + if (config.flexfec.payload_type >= 0) { + // All FEC extensions again plus maximum FlexFec overhead. + header_size += fec_extensions_size + 32; + } else { + if (config.ulpfec.ulpfec_payload_type >= 0) { + // Header with all the FEC extensions will be repeated plus maximum + // UlpFec overhead. + header_size += fec_extensions_size + 18; + } + if (config.ulpfec.red_payload_type >= 0) { + header_size += 1; // RED header. + } + } + // Additional room for Rtx. + if (config.rtx.payload_type >= 0) + header_size += kRtxHeaderSize; + return header_size; +} + +VideoStreamEncoder::BitrateAllocationCallbackType +GetBitrateAllocationCallbackType(const VideoSendStream::Config& config, + const FieldTrialsView& field_trials) { + if (webrtc::RtpExtension::FindHeaderExtensionByUri( + config.rtp.extensions, + webrtc::RtpExtension::kVideoLayersAllocationUri, + config.crypto_options.srtp.enable_encrypted_rtp_header_extensions + ? RtpExtension::Filter::kPreferEncryptedExtension + : RtpExtension::Filter::kDiscardEncryptedExtension)) { + return VideoStreamEncoder::BitrateAllocationCallbackType:: + kVideoLayersAllocation; + } + if (field_trials.IsEnabled("WebRTC-Target-Bitrate-Rtcp")) { + return VideoStreamEncoder::BitrateAllocationCallbackType:: + kVideoBitrateAllocation; + } + return VideoStreamEncoder::BitrateAllocationCallbackType:: + kVideoBitrateAllocationWhenScreenSharing; +} + +RtpSenderFrameEncryptionConfig CreateFrameEncryptionConfig( + const VideoSendStream::Config* config) { + RtpSenderFrameEncryptionConfig frame_encryption_config; + frame_encryption_config.frame_encryptor = config->frame_encryptor.get(); + frame_encryption_config.crypto_options = config->crypto_options; + return frame_encryption_config; +} + +RtpSenderObservers CreateObservers(RtcpRttStats* call_stats, + EncoderRtcpFeedback* encoder_feedback, + SendStatisticsProxy* stats_proxy, + SendPacketObserver* send_packet_observer) { + RtpSenderObservers observers; + observers.rtcp_rtt_stats = call_stats; + observers.intra_frame_callback = encoder_feedback; + observers.rtcp_loss_notification_observer = encoder_feedback; + observers.report_block_data_observer = stats_proxy; + observers.rtp_stats = stats_proxy; + observers.bitrate_observer = stats_proxy; + observers.frame_count_observer = stats_proxy; + observers.rtcp_type_observer = stats_proxy; + observers.send_packet_observer = send_packet_observer; + return observers; +} + +std::unique_ptr CreateVideoStreamEncoder( + const Environment& env, + int num_cpu_cores, + SendStatisticsProxy* stats_proxy, + const VideoStreamEncoderSettings& encoder_settings, + VideoStreamEncoder::BitrateAllocationCallbackType + bitrate_allocation_callback_type, + Metronome* metronome, + webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) { + std::unique_ptr encoder_queue = + env.task_queue_factory().CreateTaskQueue( + "EncoderQueue", TaskQueueFactory::Priority::NORMAL); + TaskQueueBase* encoder_queue_ptr = encoder_queue.get(); + return std::make_unique( + env, num_cpu_cores, stats_proxy, encoder_settings, + std::make_unique(env, stats_proxy), + FrameCadenceAdapterInterface::Create( + &env.clock(), encoder_queue_ptr, metronome, + /*worker_queue=*/TaskQueueBase::Current(), env.field_trials()), + std::move(encoder_queue), bitrate_allocation_callback_type, + encoder_selector); +} + +bool HasActiveEncodings(const VideoEncoderConfig& config) { + for (const VideoStream& stream : config.simulcast_layers) { + if (stream.active) { + return true; + } + } + return false; +} + } // namespace PacingConfig::PacingConfig(const FieldTrialsView& field_trials) @@ -216,81 +383,117 @@ PacingConfig::PacingConfig(const PacingConfig&) = default; PacingConfig::~PacingConfig() = default; VideoSendStreamImpl::VideoSendStreamImpl( - Clock* clock, - SendStatisticsProxy* stats_proxy, + const Environment& env, + int num_cpu_cores, + RtcpRttStats* call_stats, RtpTransportControllerSendInterface* transport, + Metronome* metronome, BitrateAllocatorInterface* bitrate_allocator, - VideoStreamEncoderInterface* video_stream_encoder, - const VideoSendStream::Config* config, - int initial_encoder_max_bitrate, - double initial_encoder_bitrate_priority, - VideoEncoderConfig::ContentType content_type, - RtpVideoSenderInterface* rtp_video_sender, - const FieldTrialsView& field_trials) - : clock_(clock), - has_alr_probing_(config->periodic_alr_bandwidth_probing || - GetAlrSettings(content_type)), - pacing_config_(PacingConfig(field_trials)), - stats_proxy_(stats_proxy), - config_(config), + SendDelayStats* send_delay_stats, + VideoSendStream::Config config, + VideoEncoderConfig encoder_config, + const std::map& suspended_ssrcs, + const std::map& suspended_payload_states, + std::unique_ptr fec_controller, + std::unique_ptr video_stream_encoder_for_test) + : env_(env), + transport_(transport), + stats_proxy_(&env_.clock(), + config, + encoder_config.content_type, + env_.field_trials()), + send_packet_observer_(&stats_proxy_, send_delay_stats), + config_(std::move(config)), + content_type_(encoder_config.content_type), + video_stream_encoder_( + video_stream_encoder_for_test + ? std::move(video_stream_encoder_for_test) + : CreateVideoStreamEncoder( + env_, + num_cpu_cores, + &stats_proxy_, + config_.encoder_settings, + GetBitrateAllocationCallbackType(config_, + env_.field_trials()), + metronome, + config_.encoder_selector)), + encoder_feedback_( + env_, + SupportsPerLayerPictureLossIndication( + encoder_config.video_format.parameters), + config_.rtp.ssrcs, + video_stream_encoder_.get(), + [this](uint32_t ssrc, const std::vector& seq_nums) { + return rtp_video_sender_->GetSentRtpPacketInfos(ssrc, seq_nums); + }), + rtp_video_sender_(transport->CreateRtpVideoSender( + suspended_ssrcs, + suspended_payload_states, + config_.rtp, + config_.rtcp_report_interval_ms, + config_.send_transport, + CreateObservers(call_stats, + &encoder_feedback_, + &stats_proxy_, + &send_packet_observer_), + std::move(fec_controller), + CreateFrameEncryptionConfig(&config_), + config_.frame_transformer)), + has_alr_probing_( + config_.periodic_alr_bandwidth_probing || + GetAlrSettings(env_.field_trials(), encoder_config.content_type)), + pacing_config_(PacingConfig(env_.field_trials())), worker_queue_(TaskQueueBase::Current()), timed_out_(false), - transport_(transport), bitrate_allocator_(bitrate_allocator), + has_active_encodings_(HasActiveEncodings(encoder_config)), disable_padding_(true), max_padding_bitrate_(0), encoder_min_bitrate_bps_(0), encoder_max_bitrate_bps_( - GetInitialEncoderMaxBitrate(initial_encoder_max_bitrate)), + GetInitialEncoderMaxBitrate(encoder_config.max_bitrate_bps)), encoder_target_rate_bps_(0), - encoder_bitrate_priority_(initial_encoder_bitrate_priority), - video_stream_encoder_(video_stream_encoder), - rtp_video_sender_(rtp_video_sender), + encoder_bitrate_priority_(encoder_config.bitrate_priority), + encoder_av1_priority_bitrate_override_bps_( + GetEncoderPriorityBitrate(config_.rtp.payload_name, + env_.field_trials())), configured_pacing_factor_( - GetConfiguredPacingFactor(*config_, content_type, pacing_config_)) { - RTC_DCHECK_GE(config_->rtp.payload_type, 0); - RTC_DCHECK_LE(config_->rtp.payload_type, 127); - RTC_DCHECK(!config_->rtp.ssrcs.empty()); + GetConfiguredPacingFactor(config_, + content_type_, + pacing_config_, + env_.field_trials())) { + RTC_DCHECK_GE(config_.rtp.payload_type, 0); + RTC_DCHECK_LE(config_.rtp.payload_type, 127); + RTC_DCHECK(!config_.rtp.ssrcs.empty()); RTC_DCHECK(transport_); - RTC_DCHECK_NE(initial_encoder_max_bitrate, 0); - RTC_LOG(LS_INFO) << "VideoSendStreamImpl: " << config_->ToString(); + RTC_DCHECK_NE(encoder_max_bitrate_bps_, 0); + RTC_LOG(LS_INFO) << "VideoSendStreamImpl: " << config_.ToString(); - RTC_CHECK(AlrExperimentSettings::MaxOneFieldTrialEnabled()); + RTC_CHECK( + AlrExperimentSettings::MaxOneFieldTrialEnabled(env_.field_trials())); - // Only request rotation at the source when we positively know that the remote - // side doesn't support the rotation extension. This allows us to prepare the - // encoder in the expectation that rotation is supported - which is the common - // case. - bool rotation_applied = absl::c_none_of( - config_->rtp.extensions, [](const RtpExtension& extension) { - return extension.uri == RtpExtension::kVideoRotationUri; - }); - - video_stream_encoder_->SetSink(this, rotation_applied); - - absl::optional enable_alr_bw_probing; + std::optional enable_alr_bw_probing; // If send-side BWE is enabled, check if we should apply updated probing and // pacing settings. if (configured_pacing_factor_) { - absl::optional alr_settings = - GetAlrSettings(content_type); + std::optional alr_settings = + GetAlrSettings(env_.field_trials(), content_type_); int queue_time_limit_ms; if (alr_settings) { enable_alr_bw_probing = true; queue_time_limit_ms = alr_settings->max_paced_queue_time; } else { - RateControlSettings rate_control_settings = - RateControlSettings::ParseFromFieldTrials(); - enable_alr_bw_probing = rate_control_settings.UseAlrProbing(); + enable_alr_bw_probing = + RateControlSettings(env_.field_trials()).UseAlrProbing(); queue_time_limit_ms = pacing_config_.max_pacing_delay.Get().ms(); } - transport->SetQueueTimeLimit(queue_time_limit_ms); + transport_->SetQueueTimeLimit(queue_time_limit_ms); } - if (config_->periodic_alr_bandwidth_probing) { - enable_alr_bw_probing = config_->periodic_alr_bandwidth_probing; + if (config_.periodic_alr_bandwidth_probing) { + enable_alr_bw_probing = config_.periodic_alr_bandwidth_probing; } if (enable_alr_bw_probing) { @@ -300,13 +503,123 @@ VideoSendStreamImpl::VideoSendStreamImpl( if (configured_pacing_factor_) transport_->SetPacingFactor(*configured_pacing_factor_); + // Only request rotation at the source when we positively know that the remote + // side doesn't support the rotation extension. This allows us to prepare the + // encoder in the expectation that rotation is supported - which is the common + // case. + bool rotation_applied = absl::c_none_of( + config_.rtp.extensions, [](const RtpExtension& extension) { + return extension.uri == RtpExtension::kVideoRotationUri; + }); + + video_stream_encoder_->SetSink(this, rotation_applied); video_stream_encoder_->SetStartBitrate( bitrate_allocator_->GetStartBitrate(this)); + video_stream_encoder_->SetFecControllerOverride(rtp_video_sender_); + ReconfigureVideoEncoder(std::move(encoder_config)); } VideoSendStreamImpl::~VideoSendStreamImpl() { RTC_DCHECK_RUN_ON(&thread_checker_); - RTC_LOG(LS_INFO) << "~VideoSendStreamImpl: " << config_->ToString(); + RTC_LOG(LS_INFO) << "~VideoSendStreamImpl: " << config_.ToString(); + RTC_DCHECK(!started()); + RTC_DCHECK(!IsRunning()); + transport_->DestroyRtpVideoSender(rtp_video_sender_); +} + +void VideoSendStreamImpl::AddAdaptationResource( + scoped_refptr resource) { + RTC_DCHECK_RUN_ON(&thread_checker_); + video_stream_encoder_->AddAdaptationResource(resource); +} + +std::vector> +VideoSendStreamImpl::GetAdaptationResources() { + RTC_DCHECK_RUN_ON(&thread_checker_); + return video_stream_encoder_->GetAdaptationResources(); +} + +void VideoSendStreamImpl::SetSource( + VideoSourceInterface* source, + const DegradationPreference& degradation_preference) { + RTC_DCHECK_RUN_ON(&thread_checker_); + video_stream_encoder_->SetSource(source, degradation_preference); +} + +void VideoSendStreamImpl::ReconfigureVideoEncoder(VideoEncoderConfig config) { + ReconfigureVideoEncoder(std::move(config), nullptr); +} + +void VideoSendStreamImpl::ReconfigureVideoEncoder( + VideoEncoderConfig config, + SetParametersCallback callback) { + RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK_EQ(content_type_, config.content_type); + RTC_LOG(LS_INFO) << "Encoder config: " << config.ToString() + << " VideoSendStream config: " << config_.ToString(); + + has_active_encodings_ = HasActiveEncodings(config); + if (has_active_encodings_ && rtp_video_sender_->IsActive() && !IsRunning()) { + StartupVideoSendStream(); + } else if (!has_active_encodings_ && IsRunning()) { + StopVideoSendStream(); + } + video_stream_encoder_->ConfigureEncoder( + std::move(config), + config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp), + std::move(callback)); +} + +VideoSendStream::Stats VideoSendStreamImpl::GetStats() { + RTC_DCHECK_RUN_ON(&thread_checker_); + return stats_proxy_.GetStats(); +} + +void VideoSendStreamImpl::SetStats(const Stats& stats) { + RTC_DCHECK_RUN_ON(&thread_checker_); + stats_proxy_.SetStats(stats); +} + +std::optional VideoSendStreamImpl::GetPacingFactorOverride() const { + return configured_pacing_factor_; +} + +void VideoSendStreamImpl::StopPermanentlyAndGetRtpStates( + VideoSendStreamImpl::RtpStateMap* rtp_state_map, + VideoSendStreamImpl::RtpPayloadStateMap* payload_state_map) { + RTC_DCHECK_RUN_ON(&thread_checker_); + video_stream_encoder_->Stop(); + + running_ = false; + // Always run these cleanup steps regardless of whether running_ was set + // or not. This will unregister callbacks before destruction. + // See `VideoSendStreamImpl::StopVideoSendStream` for more. + Stop(); + *rtp_state_map = GetRtpStates(); + *payload_state_map = GetRtpPayloadStates(); +} + +void VideoSendStreamImpl::GenerateKeyFrame( + const std::vector& rids) { + RTC_DCHECK_RUN_ON(&thread_checker_); + // Map rids to layers. If rids is empty, generate a keyframe for all layers. + std::vector next_frames(config_.rtp.ssrcs.size(), + VideoFrameType::kVideoFrameKey); + if (!config_.rtp.rids.empty() && !rids.empty()) { + std::fill(next_frames.begin(), next_frames.end(), + VideoFrameType::kVideoFrameDelta); + for (const auto& rid : rids) { + for (size_t i = 0; i < config_.rtp.rids.size(); i++) { + if (config_.rtp.rids[i] == rid) { + next_frames[i] = VideoFrameType::kVideoFrameKey; + break; + } + } + } + } + if (video_stream_encoder_) { + video_stream_encoder_->SendKeyFrame(next_frames); + } } void VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) { @@ -314,20 +627,30 @@ void VideoSendStreamImpl::DeliverRtcp(const uint8_t* packet, size_t length) { rtp_video_sender_->DeliverRtcp(packet, length); } -void VideoSendStreamImpl::StartPerRtpStream( - const std::vector active_layers) { +bool VideoSendStreamImpl::started() { RTC_DCHECK_RUN_ON(&thread_checker_); - bool previously_active = rtp_video_sender_->IsActive(); - rtp_video_sender_->SetActiveModules(active_layers); - if (!rtp_video_sender_->IsActive() && previously_active) { - StopVideoSendStream(); - } else if (rtp_video_sender_->IsActive() && !previously_active) { + return rtp_video_sender_->IsActive(); +} + +void VideoSendStreamImpl::Start() { + RTC_DCHECK_RUN_ON(&thread_checker_); + // This sender is allowed to send RTP packets. Start monitoring and allocating + // a rate if there is also active encodings. (has_active_encodings_). + rtp_video_sender_->SetSending(true); + if (!IsRunning() && has_active_encodings_) { StartupVideoSendStream(); } } +bool VideoSendStreamImpl::IsRunning() const { + RTC_DCHECK_RUN_ON(&thread_checker_); + return check_encoder_activity_task_.Running(); +} + void VideoSendStreamImpl::StartupVideoSendStream() { RTC_DCHECK_RUN_ON(&thread_checker_); + RTC_DCHECK(rtp_video_sender_->IsActive()); + RTC_DCHECK(has_active_encodings_); bitrate_allocator_->AddObserver(this, GetAllocationConfig()); // Start monitoring encoder activity. @@ -363,9 +686,12 @@ void VideoSendStreamImpl::Stop() { if (!rtp_video_sender_->IsActive()) return; - TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop"); - rtp_video_sender_->Stop(); - StopVideoSendStream(); + TRACE_EVENT_INSTANT0("webrtc", "VideoSendStream::Stop", + TRACE_EVENT_SCOPE_GLOBAL); + rtp_video_sender_->SetSending(false); + if (IsRunning()) { + StopVideoSendStream(); + } } void VideoSendStreamImpl::StopVideoSendStream() { @@ -374,7 +700,7 @@ void VideoSendStreamImpl::StopVideoSendStream() { check_encoder_activity_task_.Stop(); video_stream_encoder_->OnBitrateUpdated(DataRate::Zero(), DataRate::Zero(), DataRate::Zero(), 0, 0, 0); - stats_proxy_->OnSetEncoderTargetRate(0); + stats_proxy_.OnSetEncoderTargetRate(0); } void VideoSendStreamImpl::SignalEncoderTimedOut() { @@ -397,7 +723,7 @@ void VideoSendStreamImpl::OnBitrateAllocationUpdated( if (encoder_target_rate_bps_ == 0) { return; } - int64_t now_ms = clock_->TimeInMilliseconds(); + int64_t now_ms = env_.clock().TimeInMilliseconds(); if (video_bitrate_allocation_context_) { // If new allocation is within kMaxVbaSizeDifferencePercent larger // than the previously sent allocation and the same streams are still @@ -446,7 +772,7 @@ void VideoSendStreamImpl::OnVideoLayersAllocationUpdated( void VideoSendStreamImpl::SignalEncoderActive() { RTC_DCHECK_RUN_ON(&thread_checker_); - if (rtp_video_sender_->IsActive()) { + if (IsRunning()) { RTC_LOG(LS_INFO) << "SignalEncoderActive, Encoder is active."; bitrate_allocator_->AddObserver(this, GetAllocationConfig()); } @@ -457,9 +783,12 @@ MediaStreamAllocationConfig VideoSendStreamImpl::GetAllocationConfig() const { static_cast(encoder_min_bitrate_bps_), encoder_max_bitrate_bps_, static_cast(disable_padding_ ? 0 : max_padding_bitrate_), - /* priority_bitrate */ 0, - !config_->suspend_below_min_bitrate, - encoder_bitrate_priority_}; + encoder_av1_priority_bitrate_override_bps_, + !config_.suspend_below_min_bitrate, + encoder_bitrate_priority_, + (content_type_ == VideoEncoderConfig::ContentType::kRealtimeVideo) + ? std::optional(TrackRateElasticity::kCanConsumeExtraRate) + : std::nullopt}; } void VideoSendStreamImpl::OnEncoderConfigurationChanged( @@ -471,26 +800,27 @@ void VideoSendStreamImpl::OnEncoderConfigurationChanged( RTC_DCHECK(!worker_queue_->IsCurrent()); auto closure = [this, streams = std::move(streams), is_svc, content_type, min_transmit_bitrate_bps]() mutable { - RTC_DCHECK_GE(config_->rtp.ssrcs.size(), streams.size()); + RTC_DCHECK_GE(config_.rtp.ssrcs.size(), streams.size()); TRACE_EVENT0("webrtc", "VideoSendStream::OnEncoderConfigurationChanged"); RTC_DCHECK_RUN_ON(&thread_checker_); const VideoCodecType codec_type = - PayloadStringToCodecType(config_->rtp.payload_name); + PayloadStringToCodecType(config_.rtp.payload_name); - const absl::optional experimental_min_bitrate = - GetExperimentalMinVideoBitrate(codec_type); + const std::optional experimental_min_bitrate = + GetExperimentalMinVideoBitrate(env_.field_trials(), codec_type); encoder_min_bitrate_bps_ = experimental_min_bitrate ? experimental_min_bitrate->bps() : std::max(streams[0].min_bitrate_bps, GetDefaultMinVideoBitrateBps(codec_type)); - - encoder_max_bitrate_bps_ = 0; double stream_bitrate_priority_sum = 0; + uint32_t encoder_max_bitrate_bps = 0; for (const auto& stream : streams) { // We don't want to allocate more bitrate than needed to inactive streams. - encoder_max_bitrate_bps_ += stream.active ? stream.max_bitrate_bps : 0; + if (stream.active) { + encoder_max_bitrate_bps += stream.max_bitrate_bps; + } if (stream.bitrate_priority) { RTC_DCHECK_GT(*stream.bitrate_priority, 0); stream_bitrate_priority_sum += *stream.bitrate_priority; @@ -498,18 +828,20 @@ void VideoSendStreamImpl::OnEncoderConfigurationChanged( } RTC_DCHECK_GT(stream_bitrate_priority_sum, 0); encoder_bitrate_priority_ = stream_bitrate_priority_sum; - encoder_max_bitrate_bps_ = - std::max(static_cast(encoder_min_bitrate_bps_), - encoder_max_bitrate_bps_); + if (encoder_max_bitrate_bps > 0) { + encoder_max_bitrate_bps_ = + std::max(static_cast(encoder_min_bitrate_bps_), + encoder_max_bitrate_bps); + } // TODO(bugs.webrtc.org/10266): Query the VideoBitrateAllocator instead. max_padding_bitrate_ = CalculateMaxPadBitrateBps( streams, is_svc, content_type, min_transmit_bitrate_bps, - config_->suspend_below_min_bitrate, has_alr_probing_); + config_.suspend_below_min_bitrate, has_alr_probing_); // Clear stats for disabled layers. - for (size_t i = streams.size(); i < config_->rtp.ssrcs.size(); ++i) { - stats_proxy_->OnInactiveSsrc(config_->rtp.ssrcs[i]); + for (size_t i = streams.size(); i < config_.rtp.ssrcs.size(); ++i) { + stats_proxy_.OnInactiveSsrc(config_.rtp.ssrcs[i]); } const size_t num_temporal_layers = @@ -518,7 +850,7 @@ void VideoSendStreamImpl::OnEncoderConfigurationChanged( rtp_video_sender_->SetEncodingData(streams[0].width, streams[0].height, num_temporal_layers); - if (rtp_video_sender_->IsActive()) { + if (IsRunning()) { // The send stream is started already. Update the allocator with new // bitrate limits. bitrate_allocator_->AddObserver(this, GetAllocationConfig()); @@ -585,7 +917,7 @@ uint32_t VideoSendStreamImpl::OnBitrateUpdated(BitrateAllocationUpdate update) { update.stable_target_bitrate = update.target_bitrate; } - rtp_video_sender_->OnBitrateUpdated(update, stats_proxy_->GetSendFrameRate()); + rtp_video_sender_->OnBitrateUpdated(update, stats_proxy_.GetSendFrameRate()); encoder_target_rate_bps_ = rtp_video_sender_->GetPayloadBitrateBps(); const uint32_t protection_bitrate_bps = rtp_video_sender_->GetProtectionBitrateBps(); @@ -614,11 +946,17 @@ uint32_t VideoSendStreamImpl::OnBitrateUpdated(BitrateAllocationUpdate update) { link_allocation = std::max(encoder_target_rate, link_allocation); video_stream_encoder_->OnBitrateUpdated( encoder_target_rate, encoder_stable_target_rate, link_allocation, - rtc::dchecked_cast(update.packet_loss_ratio * 256), + dchecked_cast(update.packet_loss_ratio * 256), update.round_trip_time.ms(), update.cwnd_reduce_ratio); - stats_proxy_->OnSetEncoderTargetRate(encoder_target_rate_bps_); + stats_proxy_.OnSetEncoderTargetRate(encoder_target_rate_bps_); return protection_bitrate_bps; } +std::optional VideoSendStreamImpl::GetUsedRate() const { + // This value is for real-time video. Screenshare may have unused bandwidth + // that can be shared, and this needs to be changed to support that. + return std::nullopt; +} + } // namespace internal } // namespace webrtc diff --git a/video/video_send_stream_impl.h b/video/video_send_stream_impl.h index c5e0980f6d..fec8962c01 100644 --- a/video/video_send_stream_impl.h +++ b/video/video_send_stream_impl.h @@ -16,21 +16,23 @@ #include #include #include +#include +#include #include -#include "absl/types/optional.h" +#include "api/environment/environment.h" #include "api/field_trials_view.h" +#include "api/metronome/metronome.h" #include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "api/video/encoded_image.h" #include "api/video/video_bitrate_allocation.h" -#include "api/video/video_bitrate_allocator.h" #include "api/video_codecs/video_encoder.h" #include "call/bitrate_allocator.h" #include "call/rtp_config.h" #include "call/rtp_transport_controller_send_interface.h" #include "call/rtp_video_sender_interface.h" -#include "modules/include/module_common_types.h" +#include "call/video_send_stream.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/experiments/field_trial_parser.h" @@ -38,10 +40,17 @@ #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/thread_annotations.h" #include "video/config/video_encoder_config.h" +#include "video/encoder_rtcp_feedback.h" +#include "video/send_delay_stats.h" #include "video/send_statistics_proxy.h" #include "video/video_stream_encoder_interface.h" namespace webrtc { + +namespace test { +class VideoSendStreamPeer; +} // namespace test + namespace internal { // Pacing buffer config; overridden by ALR config if provided. @@ -54,45 +63,91 @@ struct PacingConfig { FieldTrialParameter max_pacing_delay; }; -// VideoSendStreamImpl implements internal::VideoSendStream. -// It is created and destroyed on `rtp_transport_queue`. The intent is to -// decrease the need for locking and to ensure methods are called in sequence. -// Public methods except `DeliverRtcp` must be called on `rtp_transport_queue`. -// DeliverRtcp is called on the libjingle worker thread or a network thread. +// VideoSendStreamImpl implements webrtc::VideoSendStream. +// It is created and destroyed on `worker queue`. The intent is to // An encoder may deliver frames through the EncodedImageCallback on an // arbitrary thread. -class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, +class VideoSendStreamImpl : public webrtc::VideoSendStream, + public webrtc::BitrateAllocatorObserver, public VideoStreamEncoderInterface::EncoderSink { public: - VideoSendStreamImpl(Clock* clock, - SendStatisticsProxy* stats_proxy, + using RtpStateMap = std::map; + using RtpPayloadStateMap = std::map; + + VideoSendStreamImpl(const Environment& env, + int num_cpu_cores, + RtcpRttStats* call_stats, RtpTransportControllerSendInterface* transport, + Metronome* metronome, BitrateAllocatorInterface* bitrate_allocator, - VideoStreamEncoderInterface* video_stream_encoder, - const VideoSendStream::Config* config, - int initial_encoder_max_bitrate, - double initial_encoder_bitrate_priority, - VideoEncoderConfig::ContentType content_type, - RtpVideoSenderInterface* rtp_video_sender, - const FieldTrialsView& field_trials); + SendDelayStats* send_delay_stats, + VideoSendStream::Config config, + VideoEncoderConfig encoder_config, + const RtpStateMap& suspended_ssrcs, + const RtpPayloadStateMap& suspended_payload_states, + std::unique_ptr fec_controller, + std::unique_ptr + video_stream_encoder_for_test = nullptr); ~VideoSendStreamImpl() override; void DeliverRtcp(const uint8_t* packet, size_t length); - void StartPerRtpStream(std::vector active_layers); - void Stop(); + + // webrtc::VideoSendStream implementation. + void Start() override; + void Stop() override; + bool started() override; + + void AddAdaptationResource(scoped_refptr resource) override; + std::vector> GetAdaptationResources() override; + + void SetSource(VideoSourceInterface* source, + const DegradationPreference& degradation_preference) override; + + void ReconfigureVideoEncoder(VideoEncoderConfig config) override; + void ReconfigureVideoEncoder(VideoEncoderConfig config, + SetParametersCallback callback) override; + Stats GetStats() override; + void SetStats(const Stats& stats) override; + + void StopPermanentlyAndGetRtpStates(RtpStateMap* rtp_state_map, + RtpPayloadStateMap* payload_state_map); + void GenerateKeyFrame(const std::vector& rids) override; // TODO(holmer): Move these to RtpTransportControllerSend. std::map GetRtpStates() const; std::map GetRtpPayloadStates() const; - const absl::optional& configured_pacing_factor() const { + const std::optional& configured_pacing_factor() const { return configured_pacing_factor_; } private: + friend class test::VideoSendStreamPeer; + class OnSendPacketObserver : public SendPacketObserver { + public: + OnSendPacketObserver(SendStatisticsProxy* stats_proxy, + SendDelayStats* send_delay_stats) + : stats_proxy_(*stats_proxy), send_delay_stats_(*send_delay_stats) {} + + void OnSendPacket(std::optional packet_id, + Timestamp capture_time, + uint32_t ssrc) override { + stats_proxy_.OnSendPacket(ssrc, capture_time); + if (packet_id.has_value()) { + send_delay_stats_.OnSendPacket(*packet_id, capture_time, ssrc); + } + } + + private: + SendStatisticsProxy& stats_proxy_; + SendDelayStats& send_delay_stats_; + }; + + std::optional GetPacingFactorOverride() const; // Implements BitrateAllocatorObserver. uint32_t OnBitrateUpdated(BitrateAllocationUpdate update) override; + std::optional GetUsedRate() const override; // Implements VideoStreamEncoderInterface::EncoderSink void OnEncoderConfigurationChanged( @@ -126,17 +181,29 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, void ConfigureSsrcs(); void SignalEncoderTimedOut(); void SignalEncoderActive(); + // A video send stream is running if VideoSendStream::Start has been invoked + // and there is an active encoding. + bool IsRunning() const; MediaStreamAllocationConfig GetAllocationConfig() const RTC_RUN_ON(thread_checker_); + const Environment env_; RTC_NO_UNIQUE_ADDRESS SequenceChecker thread_checker_; - Clock* const clock_; + + RtpTransportControllerSendInterface* const transport_; + + SendStatisticsProxy stats_proxy_; + OnSendPacketObserver send_packet_observer_; + const VideoSendStream::Config config_; + const VideoEncoderConfig::ContentType content_type_; + std::unique_ptr video_stream_encoder_; + EncoderRtcpFeedback encoder_feedback_; + RtpVideoSenderInterface* const rtp_video_sender_; + bool running_ RTC_GUARDED_BY(thread_checker_) = false; + const bool has_alr_probing_; const PacingConfig pacing_config_; - SendStatisticsProxy* const stats_proxy_; - const VideoSendStream::Config* const config_; - TaskQueueBase* const worker_queue_; RepeatingTaskHandle check_encoder_activity_task_ @@ -145,18 +212,17 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, std::atomic_bool activity_; bool timed_out_ RTC_GUARDED_BY(thread_checker_); - RtpTransportControllerSendInterface* const transport_; BitrateAllocatorInterface* const bitrate_allocator_; + bool has_active_encodings_ RTC_GUARDED_BY(thread_checker_); bool disable_padding_ RTC_GUARDED_BY(thread_checker_); int max_padding_bitrate_ RTC_GUARDED_BY(thread_checker_); int encoder_min_bitrate_bps_ RTC_GUARDED_BY(thread_checker_); uint32_t encoder_max_bitrate_bps_ RTC_GUARDED_BY(thread_checker_); uint32_t encoder_target_rate_bps_ RTC_GUARDED_BY(thread_checker_); double encoder_bitrate_priority_ RTC_GUARDED_BY(thread_checker_); - - VideoStreamEncoderInterface* const video_stream_encoder_; - RtpVideoSenderInterface* const rtp_video_sender_; + const int encoder_av1_priority_bitrate_override_bps_ + RTC_GUARDED_BY(thread_checker_); ScopedTaskSafety worker_queue_safety_; @@ -164,12 +230,12 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, // throttle sending of similar bitrate allocations. struct VbaSendContext { VideoBitrateAllocation last_sent_allocation; - absl::optional throttled_allocation; + std::optional throttled_allocation; int64_t last_send_time_ms; }; - absl::optional video_bitrate_allocation_context_ + std::optional video_bitrate_allocation_context_ RTC_GUARDED_BY(thread_checker_); - const absl::optional configured_pacing_factor_; + const std::optional configured_pacing_factor_; }; } // namespace internal } // namespace webrtc diff --git a/video/video_send_stream_impl_unittest.cc b/video/video_send_stream_impl_unittest.cc index c88ad06cfb..a298a017ba 100644 --- a/video/video_send_stream_impl_unittest.cc +++ b/video/video_send_stream_impl_unittest.cc @@ -11,31 +11,53 @@ #include "video/video_send_stream_impl.h" #include +#include +#include +#include #include +#include #include +#include +#include -#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/call/bitrate_allocation.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/rtc_event_log/rtc_event_log.h" -#include "api/sequence_checker.h" +#include "api/rtp_parameters.h" #include "api/task_queue/task_queue_base.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" -#include "call/rtp_video_sender.h" +#include "api/video/encoded_image.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_layers_allocation.h" +#include "api/video_codecs/video_encoder.h" +#include "call/bitrate_allocator.h" +#include "call/rtp_config.h" +#include "call/rtp_video_sender_interface.h" #include "call/test/mock_bitrate_allocator.h" #include "call/test/mock_rtp_transport_controller_send.h" +#include "call/video_send_stream.h" +#include "modules/pacing/packet_router.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" -#include "modules/video_coding/fec_controller_default.h" -#include "rtc_base/event.h" +#include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/experiments/alr_experiment.h" -#include "rtc_base/fake_clock.h" -#include "rtc_base/logging.h" +#include "test/field_trial.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/mock_transport.h" #include "test/scoped_key_value_config.h" #include "test/time_controller/simulated_time_controller.h" +#include "video/config/video_encoder_config.h" +#include "video/send_delay_stats.h" +#include "video/send_statistics_proxy.h" #include "video/test/mock_video_stream_encoder.h" -#include "video/video_send_stream.h" +#include "video/video_stream_encoder.h" +#include "video/video_stream_encoder_interface.h" namespace webrtc { @@ -50,10 +72,16 @@ namespace internal { namespace { using ::testing::_; using ::testing::AllOf; +using ::testing::AnyNumber; +using ::testing::Eq; using ::testing::Field; using ::testing::Invoke; +using ::testing::Mock; using ::testing::NiceMock; using ::testing::Return; +using ::testing::SaveArg; +using ::testing::Sequence; +using ::testing::SizeIs; constexpr int64_t kDefaultInitialBitrateBps = 333000; const double kDefaultBitratePriority = 0.5; @@ -66,8 +94,7 @@ std::string GetAlrProbingExperimentString() { } class MockRtpVideoSender : public RtpVideoSenderInterface { public: - MOCK_METHOD(void, SetActiveModules, (const std::vector&), (override)); - MOCK_METHOD(void, Stop, (), (override)); + MOCK_METHOD(void, SetSending, (bool sending), (override)); MOCK_METHOD(bool, IsActive, (), (override)); MOCK_METHOD(void, OnNetworkAvailability, (bool), (override)); MOCK_METHOD((std::map), @@ -101,7 +128,8 @@ class MockRtpVideoSender : public RtpVideoSenderInterface { MOCK_METHOD(void, SetEncodingData, (size_t, size_t, size_t), (override)); MOCK_METHOD(std::vector, GetSentRtpPacketInfos, - (uint32_t ssrc, rtc::ArrayView sequence_numbers), + (uint32_t ssrc, + webrtc::ArrayView sequence_numbers), (const, override)); MOCK_METHOD(void, SetFecAllowed, (bool fec_allowed), (override)); @@ -114,6 +142,7 @@ BitrateAllocationUpdate CreateAllocation(int bitrate_bps) { update.round_trip_time = TimeDelta::Zero(); return update; } + } // namespace class VideoSendStreamImplTest : public ::testing::Test { @@ -136,36 +165,49 @@ class VideoSendStreamImplTest : public ::testing::Test { .WillRepeatedly(Return(&packet_router_)); EXPECT_CALL(transport_controller_, CreateRtpVideoSender) .WillRepeatedly(Return(&rtp_video_sender_)); - ON_CALL(rtp_video_sender_, Stop()).WillByDefault(::testing::Invoke([&] { - active_modules_.clear(); + ON_CALL(rtp_video_sender_, IsActive()).WillByDefault(Invoke([&]() { + return rtp_sending_; })); - ON_CALL(rtp_video_sender_, IsActive()) - .WillByDefault(::testing::Invoke([&]() { - for (bool enabled : active_modules_) { - if (enabled) - return true; - } - return false; - })); - ON_CALL(rtp_video_sender_, SetActiveModules) - .WillByDefault(::testing::SaveArg<0>(&active_modules_)); + ON_CALL(rtp_video_sender_, SetSending) + .WillByDefault(SaveArg<0>(&rtp_sending_)); } ~VideoSendStreamImplTest() {} + VideoEncoderConfig TestVideoEncoderConfig( + VideoEncoderConfig::ContentType content_type = + VideoEncoderConfig::ContentType::kRealtimeVideo, + int initial_encoder_max_bitrate = kDefaultInitialBitrateBps, + double initial_encoder_bitrate_priority = kDefaultBitratePriority) { + VideoEncoderConfig encoder_config; + encoder_config.max_bitrate_bps = initial_encoder_max_bitrate; + encoder_config.bitrate_priority = initial_encoder_bitrate_priority; + encoder_config.content_type = content_type; + encoder_config.simulcast_layers.push_back(VideoStream()); + encoder_config.simulcast_layers.back().active = true; + encoder_config.simulcast_layers.back().bitrate_priority = 1.0; + return encoder_config; + } + std::unique_ptr CreateVideoSendStreamImpl( - int initial_encoder_max_bitrate, - double initial_encoder_bitrate_priority, - VideoEncoderConfig::ContentType content_type) { - EXPECT_CALL(bitrate_allocator_, GetStartBitrate(_)) - .WillOnce(Return(123000)); + VideoEncoderConfig encoder_config) { + EXPECT_CALL(bitrate_allocator_, GetStartBitrate).WillOnce(Return(123000)); std::map suspended_ssrcs; std::map suspended_payload_states; + + std::unique_ptr> video_stream_encoder = + std::make_unique>(); + video_stream_encoder_ = video_stream_encoder.get(); + auto ret = std::make_unique( - time_controller_.GetClock(), &stats_proxy_, &transport_controller_, - &bitrate_allocator_, &video_stream_encoder_, &config_, - initial_encoder_max_bitrate, initial_encoder_bitrate_priority, - content_type, &rtp_video_sender_, field_trials_); + CreateEnvironment(&field_trials_, time_controller_.GetClock(), + time_controller_.GetTaskQueueFactory()), + /*num_cpu_cores=*/1, + /*call_stats=*/nullptr, &transport_controller_, + /*metronome=*/nullptr, &bitrate_allocator_, &send_delay_stats_, + config_.Copy(), std::move(encoder_config), suspended_ssrcs, + suspended_payload_states, + /*fec_controller=*/nullptr, std::move(video_stream_encoder)); // The call to GetStartBitrate() executes asynchronously on the tq. // Ensure all tasks get to run. @@ -181,9 +223,9 @@ class VideoSendStreamImplTest : public ::testing::Test { NiceMock transport_; NiceMock transport_controller_; NiceMock bitrate_allocator_; - NiceMock video_stream_encoder_; + NiceMock* video_stream_encoder_ = nullptr; NiceMock rtp_video_sender_; - std::vector active_modules_; + bool rtp_sending_ = false; RtcEventLogNull event_log_; VideoSendStream::Config config_; @@ -193,23 +235,150 @@ class VideoSendStreamImplTest : public ::testing::Test { PacketRouter packet_router_; }; -TEST_F(VideoSendStreamImplTest, RegistersAsBitrateObserverOnStart) { - auto vss_impl = CreateVideoSendStreamImpl( - kDefaultInitialBitrateBps, kDefaultBitratePriority, - VideoEncoderConfig::ContentType::kRealtimeVideo); - const bool kSuspend = false; - config_.suspend_below_min_bitrate = kSuspend; +TEST_F(VideoSendStreamImplTest, + NotRegistersAsBitrateObserverOnStartIfNoActiveEncodings) { + VideoEncoderConfig encoder_config = TestVideoEncoderConfig(); + encoder_config.simulcast_layers[0].active = false; + auto vss_impl = CreateVideoSendStreamImpl(std::move(encoder_config)); + EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _)).Times(0); + EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(0); + + vss_impl->Start(); + time_controller_.AdvanceTime(TimeDelta::Zero()); + vss_impl->Stop(); +} + +TEST_F(VideoSendStreamImplTest, + RegistersAsBitrateObserverOnStartIfHasActiveEncodings) { + auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig()); + + EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _)); + vss_impl->Start(); + time_controller_.AdvanceTime(TimeDelta::Zero()); + + EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1); + vss_impl->Stop(); +} + +TEST_F(VideoSendStreamImplTest, + DeRegistersAsBitrateObserverIfNoActiveEncodings) { + auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig()); + EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _)); + vss_impl->Start(); + time_controller_.AdvanceTime(TimeDelta::Zero()); + + EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1); + VideoEncoderConfig no_active_encodings = TestVideoEncoderConfig(); + no_active_encodings.simulcast_layers[0].active = false; + + vss_impl->ReconfigureVideoEncoder(std::move(no_active_encodings)); + + time_controller_.AdvanceTime(TimeDelta::Zero()); + ::testing::Mock::VerifyAndClearExpectations(&bitrate_allocator_); + + vss_impl->Stop(); +} + +TEST_F(VideoSendStreamImplTest, + MaxBitrateCorrectIfActiveEncodingUpdatedAfterCreation) { + VideoEncoderConfig one_active_encoding = TestVideoEncoderConfig(); + ASSERT_THAT(one_active_encoding.simulcast_layers, SizeIs(1)); + one_active_encoding.max_bitrate_bps = 10'000'000; + one_active_encoding.simulcast_layers[0].max_bitrate_bps = 2'000'000; + VideoEncoderConfig no_active_encodings = one_active_encoding.Copy(); + no_active_encodings.simulcast_layers[0].active = false; + auto vss_impl = CreateVideoSendStreamImpl(no_active_encodings.Copy()); + + encoder_queue_->PostTask([&] { + static_cast(vss_impl.get()) + ->OnEncoderConfigurationChanged( + no_active_encodings.simulcast_layers, false, + VideoEncoderConfig::ContentType::kRealtimeVideo, + /*min_transmit_bitrate_bps*/ 30000); + }); + time_controller_.AdvanceTime(TimeDelta::Zero()); + + Sequence s; + // Expect codec max bitrate as max needed bitrate before the encoder has + // notifed about the actual send streams. + EXPECT_CALL(bitrate_allocator_, + AddObserver(vss_impl.get(), + Field(&MediaStreamAllocationConfig::max_bitrate_bps, + Eq(one_active_encoding.max_bitrate_bps)))) + .InSequence(s); + + // Expect the sum of active encodings as max needed bitrate after + // ->OnEncoderConfigurationChanged. + EXPECT_CALL( + bitrate_allocator_, + AddObserver( + vss_impl.get(), + Field(&MediaStreamAllocationConfig::max_bitrate_bps, + Eq(one_active_encoding.simulcast_layers[0].max_bitrate_bps)))) + .InSequence(s); + vss_impl->Start(); + // Enable encoding of a stream. + vss_impl->ReconfigureVideoEncoder(one_active_encoding.Copy()); + encoder_queue_->PostTask([&] { + static_cast(vss_impl.get()) + ->OnEncoderConfigurationChanged( + one_active_encoding.simulcast_layers, false, + VideoEncoderConfig::ContentType::kRealtimeVideo, + /*min_transmit_bitrate_bps*/ 30000); + }); + time_controller_.AdvanceTime(TimeDelta::Zero()); + + EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).InSequence(s); + vss_impl->Stop(); +} + +TEST_F(VideoSendStreamImplTest, + DoNotRegistersAsBitrateObserverOnStrayEncodedImage) { + auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig()); + + EncodedImage encoded_image; + CodecSpecificInfo codec_specific; + ON_CALL(rtp_video_sender_, OnEncodedImage) + .WillByDefault(Return( + EncodedImageCallback::Result(EncodedImageCallback::Result::OK))); + EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _)) - .WillOnce(Invoke( - [&](BitrateAllocatorObserver*, MediaStreamAllocationConfig config) { - EXPECT_EQ(config.min_bitrate_bps, 0u); - EXPECT_EQ(config.max_bitrate_bps, kDefaultInitialBitrateBps); - EXPECT_EQ(config.pad_up_bitrate_bps, 0u); - EXPECT_EQ(config.enforce_min_bitrate, !kSuspend); - EXPECT_EQ(config.bitrate_priority, kDefaultBitratePriority); - })); - vss_impl->StartPerRtpStream({true}); + .Times(AnyNumber()); + vss_impl->Start(); + time_controller_.AdvanceTime(TimeDelta::Zero()); + + // VideoSendStreamImpl gets an allocated bitrate. + const uint32_t kBitrateBps = 100000; + EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) + .Times(1) + .WillOnce(Return(kBitrateBps)); + static_cast(vss_impl.get()) + ->OnBitrateUpdated(CreateAllocation(kBitrateBps)); + // A frame is encoded. + encoder_queue_->PostTask([&] { + static_cast(vss_impl.get()) + ->OnEncodedImage(encoded_image, &codec_specific); + }); + + // Expect allocation to be removed if encoder stop producing frames. EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1); + time_controller_.AdvanceTime(TimeDelta::Seconds(5)); + Mock::VerifyAndClearExpectations(&bitrate_allocator_); + + EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _)).Times(0); + + VideoEncoderConfig no_active_encodings = TestVideoEncoderConfig(); + no_active_encodings.simulcast_layers[0].active = false; + vss_impl->ReconfigureVideoEncoder(std::move(no_active_encodings)); + + // Expect that allocation in not resumed if a stray encoded image is received. + encoder_queue_->PostTask([&] { + static_cast(vss_impl.get()) + ->OnEncodedImage(encoded_image, &codec_specific); + }); + + time_controller_.AdvanceTime(TimeDelta::Zero()); + vss_impl->Stop(); } @@ -218,11 +387,12 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChange) { config_.suspend_below_min_bitrate = kSuspend; config_.rtp.extensions.emplace_back(RtpExtension::kTransportSequenceNumberUri, 1); - auto vss_impl = CreateVideoSendStreamImpl( - kDefaultInitialBitrateBps, kDefaultBitratePriority, - VideoEncoderConfig::ContentType::kRealtimeVideo); + config_.rtp.ssrcs.emplace_back(1); + config_.rtp.ssrcs.emplace_back(2); + + auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig()); - vss_impl->StartPerRtpStream({true}); + vss_impl->Start(); // QVGA + VGA configuration matching defaults in // media/engine/simulcast.cc. @@ -248,9 +418,6 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChange) { int min_transmit_bitrate_bps = 30000; - config_.rtp.ssrcs.emplace_back(1); - config_.rtp.ssrcs.emplace_back(2); - EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _)) .WillRepeatedly(Invoke( [&](BitrateAllocatorObserver*, MediaStreamAllocationConfig config) { @@ -284,10 +451,12 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChangeWithAlr) { config_.rtp.extensions.emplace_back(RtpExtension::kTransportSequenceNumberUri, 1); config_.periodic_alr_bandwidth_probing = true; + config_.rtp.ssrcs.emplace_back(1); + config_.rtp.ssrcs.emplace_back(2); + auto vss_impl = CreateVideoSendStreamImpl( - kDefaultInitialBitrateBps, kDefaultBitratePriority, - VideoEncoderConfig::ContentType::kScreen); - vss_impl->StartPerRtpStream({true}); + TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen)); + vss_impl->Start(); // Simulcast screenshare. VideoStream low_stream; @@ -316,9 +485,6 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChangeWithAlr) { // low_stream.target_bitrate_bps + high_stream.min_bitrate_bps. int min_transmit_bitrate_bps = 400000; - config_.rtp.ssrcs.emplace_back(1); - config_.rtp.ssrcs.emplace_back(2); - EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _)) .WillRepeatedly(Invoke( [&](BitrateAllocatorObserver*, MediaStreamAllocationConfig config) { @@ -347,12 +513,12 @@ TEST_F(VideoSendStreamImplTest, UpdatesObserverOnConfigurationChangeWithSimulcastVideoHysteresis) { test::ScopedKeyValueConfig hysteresis_experiment( field_trials_, "WebRTC-VideoRateControl/video_hysteresis:1.25/"); + config_.rtp.ssrcs.emplace_back(1); + config_.rtp.ssrcs.emplace_back(2); - auto vss_impl = CreateVideoSendStreamImpl( - kDefaultInitialBitrateBps, kDefaultBitratePriority, - VideoEncoderConfig::ContentType::kRealtimeVideo); + auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig()); - vss_impl->StartPerRtpStream({true}); + vss_impl->Start(); // 2-layer video simulcast. VideoStream low_stream; low_stream.width = 320; @@ -374,9 +540,6 @@ TEST_F(VideoSendStreamImplTest, high_stream.max_qp = 56; high_stream.bitrate_priority = 1; - config_.rtp.ssrcs.emplace_back(1); - config_.rtp.ssrcs.emplace_back(2); - EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _)) .WillRepeatedly(Invoke([&](BitrateAllocatorObserver*, MediaStreamAllocationConfig config) { @@ -397,7 +560,8 @@ TEST_F(VideoSendStreamImplTest, ->OnEncoderConfigurationChanged( std::vector{low_stream, high_stream}, false, VideoEncoderConfig::ContentType::kRealtimeVideo, - /*min_transmit_bitrate_bps=*/0); + /*min_transmit_bitrate_bps=*/ + 0); }); time_controller_.AdvanceTime(TimeDelta::Zero()); vss_impl->Stop(); @@ -413,31 +577,28 @@ TEST_F(VideoSendStreamImplTest, SetsScreensharePacingFactorWithFeedback) { SetPacingFactor(kAlrProbingExperimentPaceMultiplier)) .Times(1); auto vss_impl = CreateVideoSendStreamImpl( - kDefaultInitialBitrateBps, kDefaultBitratePriority, - VideoEncoderConfig::ContentType::kScreen); - vss_impl->StartPerRtpStream({true}); + TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen)); + vss_impl->Start(); vss_impl->Stop(); } TEST_F(VideoSendStreamImplTest, DoesNotSetPacingFactorWithoutFeedback) { test::ScopedFieldTrials alr_experiment(GetAlrProbingExperimentString()); auto vss_impl = CreateVideoSendStreamImpl( - kDefaultInitialBitrateBps, kDefaultBitratePriority, - VideoEncoderConfig::ContentType::kScreen); + TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen)); EXPECT_CALL(transport_controller_, SetPacingFactor(_)).Times(0); - vss_impl->StartPerRtpStream({true}); + vss_impl->Start(); vss_impl->Stop(); } TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationWhenEnabled) { auto vss_impl = CreateVideoSendStreamImpl( - kDefaultInitialBitrateBps, kDefaultBitratePriority, - VideoEncoderConfig::ContentType::kScreen); + TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen)); EXPECT_CALL(transport_controller_, SetPacingFactor(_)).Times(0); VideoStreamEncoderInterface::EncoderSink* const sink = static_cast(vss_impl.get()); - vss_impl->StartPerRtpStream({true}); + vss_impl->Start(); // Populate a test instance of video bitrate allocation. VideoBitrateAllocation alloc; alloc.SetBitrate(0, 0, 10000); @@ -477,9 +638,8 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationWhenEnabled) { TEST_F(VideoSendStreamImplTest, ThrottlesVideoBitrateAllocationWhenTooSimilar) { auto vss_impl = CreateVideoSendStreamImpl( - kDefaultInitialBitrateBps, kDefaultBitratePriority, - VideoEncoderConfig::ContentType::kScreen); - vss_impl->StartPerRtpStream({true}); + TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen)); + vss_impl->Start(); // Unpause encoder, to allows allocations to be passed through. const uint32_t kBitrateBps = 100000; EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) @@ -536,10 +696,9 @@ TEST_F(VideoSendStreamImplTest, ThrottlesVideoBitrateAllocationWhenTooSimilar) { TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationOnLayerChange) { auto vss_impl = CreateVideoSendStreamImpl( - kDefaultInitialBitrateBps, kDefaultBitratePriority, - VideoEncoderConfig::ContentType::kScreen); + TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen)); - vss_impl->StartPerRtpStream({true}); + vss_impl->Start(); // Unpause encoder, to allows allocations to be passed through. const uint32_t kBitrateBps = 100000; EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) @@ -578,9 +737,8 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationOnLayerChange) { TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) { auto vss_impl = CreateVideoSendStreamImpl( - kDefaultInitialBitrateBps, kDefaultBitratePriority, - VideoEncoderConfig::ContentType::kScreen); - vss_impl->StartPerRtpStream({true}); + TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen)); + vss_impl->Start(); const uint32_t kBitrateBps = 100000; // Unpause encoder, to allows allocations to be passed through. EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) @@ -681,15 +839,80 @@ TEST_F(VideoSendStreamImplTest, ForwardsVideoBitrateAllocationAfterTimeout) { vss_impl->Stop(); } +TEST_F(VideoSendStreamImplTest, PriorityBitrateConfigInactiveByDefault) { + auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig()); + EXPECT_CALL( + bitrate_allocator_, + AddObserver( + vss_impl.get(), + Field(&MediaStreamAllocationConfig::priority_bitrate_bps, 0))); + vss_impl->Start(); + EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1); + vss_impl->Stop(); +} + +TEST_F(VideoSendStreamImplTest, PriorityBitrateConfigAffectsAV1) { + test::ScopedFieldTrials override_priority_bitrate( + "WebRTC-AV1-OverridePriorityBitrate/bitrate:20000/"); + config_.rtp.payload_name = "AV1"; + auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig()); + EXPECT_CALL( + bitrate_allocator_, + AddObserver( + vss_impl.get(), + Field(&MediaStreamAllocationConfig::priority_bitrate_bps, 20000))); + vss_impl->Start(); + EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1); + vss_impl->Stop(); +} + +TEST_F(VideoSendStreamImplTest, + PriorityBitrateConfigSurvivesConfigurationChange) { + VideoStream qvga_stream; + qvga_stream.width = 320; + qvga_stream.height = 180; + qvga_stream.max_framerate = 30; + qvga_stream.min_bitrate_bps = 30000; + qvga_stream.target_bitrate_bps = 150000; + qvga_stream.max_bitrate_bps = 200000; + qvga_stream.max_qp = 56; + qvga_stream.bitrate_priority = 1; + + int min_transmit_bitrate_bps = 30000; + + test::ScopedFieldTrials override_priority_bitrate( + "WebRTC-AV1-OverridePriorityBitrate/bitrate:20000/"); + config_.rtp.payload_name = "AV1"; + auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig()); + EXPECT_CALL( + bitrate_allocator_, + AddObserver( + vss_impl.get(), + Field(&MediaStreamAllocationConfig::priority_bitrate_bps, 20000))) + .Times(2); + vss_impl->Start(); + + encoder_queue_->PostTask([&] { + static_cast(vss_impl.get()) + ->OnEncoderConfigurationChanged( + std::vector{qvga_stream}, false, + VideoEncoderConfig::ContentType::kRealtimeVideo, + min_transmit_bitrate_bps); + }); + time_controller_.AdvanceTime(TimeDelta::Zero()); + + EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(1); + vss_impl->Stop(); +} + TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { const bool kSuspend = false; config_.suspend_below_min_bitrate = kSuspend; config_.rtp.extensions.emplace_back(RtpExtension::kTransportSequenceNumberUri, 1); - auto vss_impl = CreateVideoSendStreamImpl( - kDefaultInitialBitrateBps, kDefaultBitratePriority, - VideoEncoderConfig::ContentType::kRealtimeVideo); - vss_impl->StartPerRtpStream({true}); + auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig()); + + vss_impl->Start(); VideoStream qvga_stream; qvga_stream.width = 320; qvga_stream.height = 180; @@ -723,7 +946,7 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) .WillOnce(Return(network_constrained_rate.bps())); EXPECT_CALL( - video_stream_encoder_, + *video_stream_encoder_, OnBitrateUpdated(network_constrained_rate, network_constrained_rate, network_constrained_rate, 0, _, 0)); static_cast(vss_impl.get()) @@ -740,7 +963,7 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { EXPECT_CALL(rtp_video_sender_, OnBitrateUpdated(update, _)); EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) .WillOnce(Return(rate_with_headroom.bps())); - EXPECT_CALL(video_stream_encoder_, + EXPECT_CALL(*video_stream_encoder_, OnBitrateUpdated(qvga_max_bitrate, qvga_max_bitrate, rate_with_headroom, 0, _, 0)); static_cast(vss_impl.get()) @@ -757,7 +980,7 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { .WillOnce(Return(rate_with_headroom.bps())); const DataRate headroom_minus_protection = rate_with_headroom - DataRate::BitsPerSec(protection_bitrate_bps); - EXPECT_CALL(video_stream_encoder_, + EXPECT_CALL(*video_stream_encoder_, OnBitrateUpdated(qvga_max_bitrate, qvga_max_bitrate, headroom_minus_protection, 0, _, 0)); static_cast(vss_impl.get()) @@ -770,14 +993,14 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { EXPECT_CALL(rtp_video_sender_, OnBitrateUpdated(update, _)); EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) .WillOnce(Return(rate_with_headroom.bps())); - EXPECT_CALL(video_stream_encoder_, + EXPECT_CALL(*video_stream_encoder_, OnBitrateUpdated(qvga_max_bitrate, qvga_max_bitrate, qvga_max_bitrate, 0, _, 0)); static_cast(vss_impl.get()) ->OnBitrateUpdated(update); // Set rates to zero on stop. - EXPECT_CALL(video_stream_encoder_, + EXPECT_CALL(*video_stream_encoder_, OnBitrateUpdated(DataRate::Zero(), DataRate::Zero(), DataRate::Zero(), 0, 0, 0)); vss_impl->Stop(); @@ -785,9 +1008,7 @@ TEST_F(VideoSendStreamImplTest, CallsVideoStreamEncoderOnBitrateUpdate) { TEST_F(VideoSendStreamImplTest, DisablesPaddingOnPausedEncoder) { int padding_bitrate = 0; - std::unique_ptr vss_impl = CreateVideoSendStreamImpl( - kDefaultInitialBitrateBps, kDefaultBitratePriority, - VideoEncoderConfig::ContentType::kRealtimeVideo); + auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig()); // Capture padding bitrate for testing. EXPECT_CALL(bitrate_allocator_, AddObserver(vss_impl.get(), _)) @@ -820,7 +1041,7 @@ TEST_F(VideoSendStreamImplTest, DisablesPaddingOnPausedEncoder) { int min_transmit_bitrate_bps = 30000; config_.rtp.ssrcs.emplace_back(1); - vss_impl->StartPerRtpStream({true}); + vss_impl->Start(); // Starts without padding. EXPECT_EQ(0, padding_bitrate); encoder_queue_->PostTask([&] { @@ -864,11 +1085,9 @@ TEST_F(VideoSendStreamImplTest, DisablesPaddingOnPausedEncoder) { } TEST_F(VideoSendStreamImplTest, KeepAliveOnDroppedFrame) { - std::unique_ptr vss_impl = CreateVideoSendStreamImpl( - kDefaultInitialBitrateBps, kDefaultBitratePriority, - VideoEncoderConfig::ContentType::kRealtimeVideo); + auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig()); EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())).Times(0); - vss_impl->StartPerRtpStream({true}); + vss_impl->Start(); const uint32_t kBitrateBps = 100000; EXPECT_CALL(rtp_video_sender_, GetPayloadBitrateBps()) .Times(1) @@ -907,13 +1126,12 @@ TEST_F(VideoSendStreamImplTest, ConfiguresBitratesForSvc) { config_.rtp.extensions.emplace_back( RtpExtension::kTransportSequenceNumberUri, 1); config_.periodic_alr_bandwidth_probing = test_config.alr; - auto vss_impl = CreateVideoSendStreamImpl( - kDefaultInitialBitrateBps, kDefaultBitratePriority, + auto vss_impl = CreateVideoSendStreamImpl(TestVideoEncoderConfig( test_config.screenshare ? VideoEncoderConfig::ContentType::kScreen - : VideoEncoderConfig::ContentType::kRealtimeVideo); + : VideoEncoderConfig::ContentType::kRealtimeVideo)); - vss_impl->StartPerRtpStream({true}); + vss_impl->Start(); // Svc VideoStream stream; @@ -992,10 +1210,35 @@ TEST_F(VideoSendStreamImplTest, ConfiguresBitratesForSvc) { ->OnEncodedImage(encoded_image, &codec_specific); }); time_controller_.AdvanceTime(TimeDelta::Zero()); - ::testing::Mock::VerifyAndClearExpectations(&bitrate_allocator_); + Mock::VerifyAndClearExpectations(&bitrate_allocator_); vss_impl->Stop(); } } + +TEST_F(VideoSendStreamImplTest, TestElasticityForRealtimeVideo) { + auto vss_impl = CreateVideoSendStreamImpl( + TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kRealtimeVideo)); + EXPECT_CALL(bitrate_allocator_, + AddObserver(vss_impl.get(), + Field(&MediaStreamAllocationConfig::rate_elasticity, + TrackRateElasticity::kCanConsumeExtraRate))); + vss_impl->Start(); + EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())); + vss_impl->Stop(); +} + +TEST_F(VideoSendStreamImplTest, TestElasticityForScreenshare) { + auto vss_impl = CreateVideoSendStreamImpl( + TestVideoEncoderConfig(VideoEncoderConfig::ContentType::kScreen)); + EXPECT_CALL(bitrate_allocator_, + AddObserver(vss_impl.get(), + Field(&MediaStreamAllocationConfig::rate_elasticity, + std::nullopt))); + vss_impl->Start(); + EXPECT_CALL(bitrate_allocator_, RemoveObserver(vss_impl.get())); + vss_impl->Stop(); +} + } // namespace internal } // namespace webrtc diff --git a/video/video_send_stream_tests.cc b/video/video_send_stream_tests.cc index bdff1cf824..69e0c8edf3 100644 --- a/video/video_send_stream_tests.cc +++ b/video/video_send_stream_tests.cc @@ -8,41 +8,83 @@ * be found in the AUTHORS file in the root of the source tree. */ #include // max +#include +#include +#include +#include #include +#include +#include +#include +#include +#include +#include #include -#include "absl/algorithm/container.h" #include "absl/strings/match.h" +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" +#include "api/fec_controller_override.h" +#include "api/field_trials_view.h" +#include "api/make_ref_counted.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" -#include "api/task_queue/default_task_queue_factory.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" #include "api/test/metrics/global_metrics_logger_and_exporter.h" #include "api/test/metrics/metric.h" #include "api/test/simulated_network.h" +#include "api/test/video/function_video_encoder_factory.h" +#include "api/transport/bitrate_settings.h" +#include "api/units/data_rate.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "api/video/encoded_image.h" #include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "api/video_codecs/scalability_mode.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" +#include "call/audio_receive_stream.h" +#include "call/audio_send_stream.h" #include "call/call.h" #include "call/fake_network_pipe.h" -#include "call/rtp_transport_controller_send.h" -#include "call/simulated_network.h" +#include "call/video_receive_stream.h" #include "call/video_send_stream.h" #include "media/engine/internal_encoder_factory.h" #include "media/engine/simulcast_encoder_adapter.h" #include "media/engine/webrtc_video_engine.h" +#include "modules/include/module_common_types_public.h" +#include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h" +#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" #include "modules/rtp_rtcp/source/rtcp_sender.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_util.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h" #include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/svc/create_scalability_structure.h" #include "modules/video_coding/svc/scalability_mode_util.h" #include "modules/video_coding/svc/scalable_video_controller.h" @@ -50,18 +92,21 @@ #include "rtc_base/event.h" #include "rtc_base/experiments/alr_experiment.h" #include "rtc_base/logging.h" -#include "rtc_base/platform_thread.h" +#include "rtc_base/network_route.h" #include "rtc_base/rate_limiter.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" +#include "rtc_base/thread.h" +#include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" #include "rtc_base/unique_id_generator.h" #include "system_wrappers/include/sleep.h" #include "test/call_test.h" #include "test/configurable_frame_size_encoder.h" +#include "test/encoder_settings.h" #include "test/fake_encoder.h" -#include "test/fake_texture_frame.h" +#include "test/field_trial.h" #include "test/frame_forwarder.h" #include "test/frame_generator_capturer.h" #include "test/frame_utils.h" @@ -70,12 +115,12 @@ #include "test/null_transport.h" #include "test/rtcp_packet_parser.h" #include "test/rtp_rtcp_observer.h" +#include "test/scoped_key_value_config.h" #include "test/video_encoder_proxy_factory.h" #include "test/video_test_constants.h" -#include "video/config/encoder_stream_factory.h" -#include "video/send_statistics_proxy.h" +#include "video/config/video_encoder_config.h" #include "video/transport_adapter.h" -#include "video/video_send_stream.h" +#include "video/video_send_stream_impl.h" namespace webrtc { namespace test { @@ -83,13 +128,13 @@ class VideoSendStreamPeer { public: explicit VideoSendStreamPeer(webrtc::VideoSendStream* base_class_stream) : internal_stream_( - static_cast(base_class_stream)) {} - absl::optional GetPacingFactorOverride() const { + static_cast(base_class_stream)) {} + std::optional GetPacingFactorOverride() const { return internal_stream_->GetPacingFactorOverride(); } private: - internal::VideoSendStream const* const internal_stream_; + internal::VideoSendStreamImpl const* const internal_stream_; }; } // namespace test @@ -108,7 +153,11 @@ enum class WaitUntil : bool { kZero = false, kNonZero = true }; constexpr int64_t kRtcpIntervalMs = 1000; -enum VideoFormat { +// Some of the test cases are expected to time out. +// Use a shorter timeout window than the default one for those. +constexpr TimeDelta kReducedTimeout = TimeDelta::Seconds(10); + +enum TestVideoFormat { kGeneric, kVP8, }; @@ -124,7 +173,7 @@ using ParameterizationType = std::tuple; std::string ParamInfoToStr( const testing::TestParamInfo& info) { - rtc::StringBuilder sb; + StringBuilder sb; sb << std::get<0>(info.param).scalability_mode << "_" << (std::get<1>(info.param) ? "WithIdentifier" : "WithoutIdentifier"); return sb.str(); @@ -140,9 +189,10 @@ class VideoSendStreamTest : public test::CallTest { } protected: - void TestNackRetransmission(uint32_t retransmit_ssrc, + void TestNackRetransmission(uint32_t media_ssrc, + uint32_t retransmit_ssrc, uint8_t retransmit_payload_type); - void TestPacketFragmentationSize(VideoFormat format, bool with_fec); + void TestPacketFragmentationSize(TestVideoFormat format, bool with_fec); void TestVp9NonFlexMode(const Vp9TestParams& params, bool use_scalability_mode_identifier); @@ -190,7 +240,7 @@ TEST_F(VideoSendStreamTest, SupportsCName) { CNameObserver() : SendTest(test::VideoTestConstants::kDefaultTimeout) {} private: - Action OnSendRtcp(rtc::ArrayView packet) override { + Action OnSendRtcp(ArrayView packet) override { test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet)); if (parser.sdes()->num_packets() > 0) { @@ -226,7 +276,7 @@ TEST_F(VideoSendStreamTest, SupportsAbsoluteSendTime) { extensions_.Register(kAbsSendTimeExtensionId); } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet(&extensions_); EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -274,15 +324,15 @@ TEST_F(VideoSendStreamTest, SupportsTransmissionTimeOffset) { public: TransmissionTimeOffsetObserver() : SendTest(test::VideoTestConstants::kDefaultTimeout), - encoder_factory_([]() { - return std::make_unique( - Clock::GetRealTimeClock(), kEncodeDelayMs); + encoder_factory_([](const Environment& env, + const SdpVideoFormat& format) { + return std::make_unique(env, kEncodeDelayMs); }) { extensions_.Register(kTimestampOffsetExtensionId); } private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet(&extensions_); EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -322,15 +372,15 @@ TEST_F(VideoSendStreamTest, SupportsTransportWideSequenceNumbers) { public: TransportWideSequenceNumberObserver() : SendTest(test::VideoTestConstants::kDefaultTimeout), - encoder_factory_([]() { - return std::make_unique( - Clock::GetRealTimeClock()); - }) { + encoder_factory_( + [](const Environment& env, const SdpVideoFormat& format) { + return std::make_unique(env); + }) { extensions_.Register(kExtensionId); } private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet(&extensions_); EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -369,7 +419,7 @@ TEST_F(VideoSendStreamTest, SupportsVideoRotation) { extensions_.Register(kVideoRotationExtensionId); } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet(&extensions_); EXPECT_TRUE(rtp_packet.Parse(packet)); // Only the last packet of the frame is required to have the extension. @@ -415,7 +465,7 @@ TEST_F(VideoSendStreamTest, SupportsVideoContentType) { kVideoContentTypeExtensionId); } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet(&extensions_); EXPECT_TRUE(rtp_packet.Parse(packet)); // Only the last packet of the key-frame must have extension. @@ -461,7 +511,7 @@ TEST_F(VideoSendStreamTest, SupportsVideoTimingFrames) { extensions_.Register(kVideoTimingExtensionId); } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet(&extensions_); EXPECT_TRUE(rtp_packet.Parse(packet)); // Only the last packet of the frame must have extension. @@ -519,18 +569,15 @@ class FakeReceiveStatistics : public ReceiveStatisticsProvider { class UlpfecObserver : public test::EndToEndTest { public: - // Some of the test cases are expected to time out. - // Use a shorter timeout window than the default one for those. - static constexpr TimeDelta kReducedTimeout = TimeDelta::Seconds(10); - - UlpfecObserver(bool header_extensions_enabled, - bool use_nack, - bool expect_red, - bool expect_ulpfec, - const std::string& codec, - VideoEncoderFactory* encoder_factory) - : EndToEndTest(expect_ulpfec ? test::VideoTestConstants::kDefaultTimeout - : kReducedTimeout), + UlpfecObserver( + bool header_extensions_enabled, + bool use_nack, + bool expect_red, + bool expect_ulpfec, + const std::string& codec, + VideoEncoderFactory* encoder_factory, + const TimeDelta& timeout = test::VideoTestConstants::kDefaultTimeout) + : EndToEndTest(timeout), encoder_factory_(encoder_factory), payload_name_(codec), use_nack_(use_nack), @@ -545,7 +592,7 @@ class UlpfecObserver : public test::EndToEndTest { } private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet(&extensions_); EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -668,14 +715,18 @@ class UlpfecObserver : public test::EndToEndTest { TEST_F(VideoSendStreamTest, SupportsUlpfecWithExtensions) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); UlpfecObserver test(true, false, true, true, "VP8", &encoder_factory); RunBaseTest(&test); } TEST_F(VideoSendStreamTest, SupportsUlpfecWithoutExtensions) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); UlpfecObserver test(false, false, true, true, "VP8", &encoder_factory); RunBaseTest(&test); } @@ -691,8 +742,11 @@ class VideoSendStreamWithoutUlpfecTest : public test::CallTest { TEST_F(VideoSendStreamWithoutUlpfecTest, NoUlpfecIfDisabledThroughFieldTrial) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); - UlpfecObserver test(false, false, false, false, "VP8", &encoder_factory); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); + UlpfecObserver test(false, false, false, false, "VP8", &encoder_factory, + kReducedTimeout); RunBaseTest(&test); } @@ -701,25 +755,30 @@ TEST_F(VideoSendStreamWithoutUlpfecTest, NoUlpfecIfDisabledThroughFieldTrial) { // bandwidth since the receiver has to wait for FEC retransmissions to determine // that the received state is actually decodable. TEST_F(VideoSendStreamTest, DoesNotUtilizeUlpfecForH264WithNackEnabled) { - test::FunctionVideoEncoderFactory encoder_factory([]() { - return std::make_unique(Clock::GetRealTimeClock()); - }); - UlpfecObserver test(false, true, false, false, "H264", &encoder_factory); + test::FunctionVideoEncoderFactory encoder_factory( + [](const Environment& env, const SdpVideoFormat& format) { + return std::make_unique(env); + }); + UlpfecObserver test(false, true, false, false, "H264", &encoder_factory, + kReducedTimeout); RunBaseTest(&test); } // Without retransmissions FEC for H264 is fine. TEST_F(VideoSendStreamTest, DoesUtilizeUlpfecForH264WithoutNackEnabled) { - test::FunctionVideoEncoderFactory encoder_factory([]() { - return std::make_unique(Clock::GetRealTimeClock()); - }); + test::FunctionVideoEncoderFactory encoder_factory( + [](const Environment& env, const SdpVideoFormat& format) { + return std::make_unique(env); + }); UlpfecObserver test(false, false, true, true, "H264", &encoder_factory); RunBaseTest(&test); } TEST_F(VideoSendStreamTest, DoesUtilizeUlpfecForVp8WithNackEnabled) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); UlpfecObserver test(false, true, true, true, "VP8", &encoder_factory); RunBaseTest(&test); } @@ -727,19 +786,21 @@ TEST_F(VideoSendStreamTest, DoesUtilizeUlpfecForVp8WithNackEnabled) { #if defined(RTC_ENABLE_VP9) TEST_F(VideoSendStreamTest, DoesUtilizeUlpfecForVp9WithNackEnabled) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP9Encoder::Create(); }); - UlpfecObserver test(false, true, true, true, "VP9", &encoder_factory); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp9Encoder(env); + }); + // Use kLongTimeout timeout because the test is flaky with kDefaultTimeout. + UlpfecObserver test(false, true, true, true, "VP9", &encoder_factory, + test::VideoTestConstants::kLongTimeout); RunBaseTest(&test); } #endif // defined(RTC_ENABLE_VP9) TEST_F(VideoSendStreamTest, SupportsUlpfecWithMultithreadedH264) { - std::unique_ptr task_queue_factory = - CreateDefaultTaskQueueFactory(); - test::FunctionVideoEncoderFactory encoder_factory([&]() { - return std::make_unique( - Clock::GetRealTimeClock(), task_queue_factory.get()); - }); + test::FunctionVideoEncoderFactory encoder_factory( + [&](const Environment& env, const SdpVideoFormat& format) { + return std::make_unique(env); + }); UlpfecObserver test(false, false, true, true, "H264", &encoder_factory); RunBaseTest(&test); } @@ -771,7 +832,7 @@ class FlexfecObserver : public test::EndToEndTest { size_t GetNumVideoStreams() const override { return num_video_streams_; } private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet(&extensions_); EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -860,28 +921,36 @@ class FlexfecObserver : public test::EndToEndTest { TEST_F(VideoSendStreamTest, SupportsFlexfecVp8) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); FlexfecObserver test(false, false, "VP8", &encoder_factory, 1); RunBaseTest(&test); } TEST_F(VideoSendStreamTest, SupportsFlexfecSimulcastVp8) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); FlexfecObserver test(false, false, "VP8", &encoder_factory, 2); RunBaseTest(&test); } TEST_F(VideoSendStreamTest, SupportsFlexfecWithNackVp8) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); FlexfecObserver test(false, true, "VP8", &encoder_factory, 1); RunBaseTest(&test); } TEST_F(VideoSendStreamTest, SupportsFlexfecWithRtpExtensionsVp8) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); FlexfecObserver test(true, false, "VP8", &encoder_factory, 1); RunBaseTest(&test); } @@ -889,137 +958,144 @@ TEST_F(VideoSendStreamTest, SupportsFlexfecWithRtpExtensionsVp8) { #if defined(RTC_ENABLE_VP9) TEST_F(VideoSendStreamTest, SupportsFlexfecVp9) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP9Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp9Encoder(env); + }); FlexfecObserver test(false, false, "VP9", &encoder_factory, 1); RunBaseTest(&test); } TEST_F(VideoSendStreamTest, SupportsFlexfecWithNackVp9) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP9Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp9Encoder(env); + }); FlexfecObserver test(false, true, "VP9", &encoder_factory, 1); RunBaseTest(&test); } #endif // defined(RTC_ENABLE_VP9) TEST_F(VideoSendStreamTest, SupportsFlexfecH264) { - test::FunctionVideoEncoderFactory encoder_factory([]() { - return std::make_unique(Clock::GetRealTimeClock()); - }); + test::FunctionVideoEncoderFactory encoder_factory( + [](const Environment& env, const SdpVideoFormat& format) { + return std::make_unique(env); + }); FlexfecObserver test(false, false, "H264", &encoder_factory, 1); RunBaseTest(&test); } TEST_F(VideoSendStreamTest, SupportsFlexfecWithNackH264) { - test::FunctionVideoEncoderFactory encoder_factory([]() { - return std::make_unique(Clock::GetRealTimeClock()); - }); + test::FunctionVideoEncoderFactory encoder_factory( + [](const Environment& env, const SdpVideoFormat& format) { + return std::make_unique(env); + }); FlexfecObserver test(false, true, "H264", &encoder_factory, 1); RunBaseTest(&test); } TEST_F(VideoSendStreamTest, SupportsFlexfecWithMultithreadedH264) { - std::unique_ptr task_queue_factory = - CreateDefaultTaskQueueFactory(); - test::FunctionVideoEncoderFactory encoder_factory([&]() { - return std::make_unique( - Clock::GetRealTimeClock(), task_queue_factory.get()); - }); + test::FunctionVideoEncoderFactory encoder_factory( + [&](const Environment& env, const SdpVideoFormat& format) { + return std::make_unique(env); + }); FlexfecObserver test(false, false, "H264", &encoder_factory, 1); RunBaseTest(&test); } void VideoSendStreamTest::TestNackRetransmission( + uint32_t media_ssrc, uint32_t retransmit_ssrc, uint8_t retransmit_payload_type) { class NackObserver : public test::SendTest { public: - explicit NackObserver(uint32_t retransmit_ssrc, + explicit NackObserver(uint32_t media_ssrc, + uint32_t retransmit_ssrc, uint8_t retransmit_payload_type) : SendTest(test::VideoTestConstants::kDefaultTimeout), - send_count_(0), retransmit_count_(0), + media_ssrc_(media_ssrc), retransmit_ssrc_(retransmit_ssrc), retransmit_payload_type_(retransmit_payload_type) {} private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); - // NACK packets two times at some arbitrary points. - const int kNackedPacketsAtOnceCount = 3; - const int kRetransmitTarget = kNackedPacketsAtOnceCount * 2; + uint16_t sequence_number = rtp_packet.SequenceNumber(); + if (rtp_packet.payload_size() >= 2 && + rtp_packet.Ssrc() == retransmit_ssrc_ && + retransmit_ssrc_ != media_ssrc_) { + // Assume correct RTX packet. Extract original sequence number. + ArrayView payload = rtp_packet.payload(); + sequence_number = (payload[0] << 8) + payload[1]; + } - // Skip padding packets because they will never be retransmitted. - if (rtp_packet.payload_size() == 0) { + if (auto it = pending_retransmission_.find(sequence_number) != + pending_retransmission_.end()) { + // Count each observer retransmission only once, so that any RTX-based + // padding doesn't double count. + pending_retransmission_.erase(it); + ++retransmit_count_; + if (retransmit_count_ >= 3) { + // Three unique retransmissions observed, should be enough for anyone. + observation_complete_.Set(); + } + return SEND_PACKET; + } + + // Skip padding packets and RTX packets, they will never be retransmitted. + if (rtp_packet.payload_size() == 0 || + (rtp_packet.Ssrc() == retransmit_ssrc_ && + retransmit_ssrc_ != media_ssrc_)) { return SEND_PACKET; } - ++send_count_; + // Immediately add any new media packet to the pending set. + const Timestamp now = env_.clock().CurrentTime(); + pending_retransmission_.emplace(sequence_number, now); + + // Find all requests we have not yet gotten a retransmission for, + // filtering out only those entries which have not be sent or requested + // within the last 50ms. A grace period is needed since the sender will + // not respond to a NACK within the first RTT of sending a message. + const TimeDelta kNackInterval = TimeDelta::Millis(50); + const size_t kMaxNackSize = 100; + + std::vector sequence_numbers; + for (auto& kv : pending_retransmission_) { + if (now - kv.second >= kNackInterval) { + sequence_numbers.push_back(kv.first); + kv.second = now; + + if (sequence_numbers.size() >= kMaxNackSize) { + break; + } + } + } - // NACK packets at arbitrary points. - if (send_count_ % 25 == 0) { + if (!sequence_numbers.empty()) { + // Inject a NACK message for the found sequence numbers. RTCPSender::Configuration config; - config.clock = Clock::GetRealTimeClock(); config.outgoing_transport = transport_adapter_.get(); config.rtcp_report_interval = TimeDelta::Millis(kRtcpIntervalMs); config.local_media_ssrc = test::VideoTestConstants::kReceiverLocalVideoSsrc; - RTCPSender rtcp_sender(config); + RTCPSender rtcp_sender(env_, config); rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize); rtcp_sender.SetRemoteSSRC(test::VideoTestConstants::kVideoSendSsrcs[0]); RTCPSender::FeedbackState feedback_state; - uint16_t nack_sequence_numbers[kNackedPacketsAtOnceCount]; - int nack_count = 0; - for (uint16_t sequence_number : - sequence_numbers_pending_retransmission_) { - if (nack_count < kNackedPacketsAtOnceCount) { - nack_sequence_numbers[nack_count++] = sequence_number; - } else { - break; - } - } - - EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpNack, nack_count, - nack_sequence_numbers)); - } - - uint16_t sequence_number = rtp_packet.SequenceNumber(); - if (rtp_packet.Ssrc() == retransmit_ssrc_ && - retransmit_ssrc_ != test::VideoTestConstants::kVideoSendSsrcs[0]) { - // Not kVideoSendSsrcs[0], assume correct RTX packet. Extract sequence - // number. - const uint8_t* rtx_header = rtp_packet.payload().data(); - sequence_number = (rtx_header[0] << 8) + rtx_header[1]; + EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpNack, + sequence_numbers.size(), + sequence_numbers.data())); } - auto it = sequence_numbers_pending_retransmission_.find(sequence_number); - if (it == sequence_numbers_pending_retransmission_.end()) { - // Not currently pending retransmission. Add it to retransmission queue - // if media and limit not reached. - if (rtp_packet.Ssrc() == test::VideoTestConstants::kVideoSendSsrcs[0] && - rtp_packet.payload_size() > 0 && - retransmit_count_ + - sequence_numbers_pending_retransmission_.size() < - kRetransmitTarget) { - sequence_numbers_pending_retransmission_.insert(sequence_number); - return DROP_PACKET; - } - } else { - // Packet is a retransmission, remove it from queue and check if done. - sequence_numbers_pending_retransmission_.erase(it); - if (++retransmit_count_ == kRetransmitTarget) { - EXPECT_EQ(retransmit_ssrc_, rtp_packet.Ssrc()); - EXPECT_EQ(retransmit_payload_type_, rtp_packet.PayloadType()); - observation_complete_.Set(); - } - } - - return SEND_PACKET; + // Drop media packet, otherwise transport feeback may indirectly ack the + // packet and remove it from the packet history. + return DROP_PACKET; } void ModifyVideoConfigs( @@ -1040,13 +1116,15 @@ void VideoSendStreamTest::TestNackRetransmission( EXPECT_TRUE(Wait()) << "Timed out while waiting for NACK retransmission."; } + const Environment env_ = CreateEnvironment(); std::unique_ptr transport_adapter_; - int send_count_; int retransmit_count_; + const uint32_t media_ssrc_; const uint32_t retransmit_ssrc_; const uint8_t retransmit_payload_type_; - std::set sequence_numbers_pending_retransmission_; - } test(retransmit_ssrc, retransmit_payload_type); + // Map from sequence number to timestamp for transmission or last NACK. + std::map pending_retransmission_; + } test(media_ssrc, retransmit_ssrc, retransmit_payload_type); RunBaseTest(&test); } @@ -1054,16 +1132,18 @@ void VideoSendStreamTest::TestNackRetransmission( TEST_F(VideoSendStreamTest, RetransmitsNack) { // Normal NACKs should use the send SSRC. TestNackRetransmission(test::VideoTestConstants::kVideoSendSsrcs[0], + test::VideoTestConstants::kVideoSendSsrcs[0], test::VideoTestConstants::kFakeVideoSendPayloadType); } TEST_F(VideoSendStreamTest, RetransmitsNackOverRtx) { // NACKs over RTX should use a separate SSRC. - TestNackRetransmission(test::VideoTestConstants::kSendRtxSsrcs[0], + TestNackRetransmission(test::VideoTestConstants::kVideoSendSsrcs[0], + test::VideoTestConstants::kSendRtxSsrcs[0], test::VideoTestConstants::kSendRtxPayloadType); } -void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format, +void VideoSendStreamTest::TestPacketFragmentationSize(TestVideoFormat format, bool with_fec) { // Use a fake encoder to output a frame of every size in the range [90, 290], // for each size making sure that the exact number of payload bytes received @@ -1105,7 +1185,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format, } private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { size_t length = packet.size(); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -1201,12 +1281,11 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format, packets_lost_, // Cumulative lost. loss_ratio); // Loss percent. RTCPSender::Configuration config; - config.clock = Clock::GetRealTimeClock(); config.receive_statistics = &lossy_receive_stats; config.outgoing_transport = transport_adapter_.get(); config.rtcp_report_interval = TimeDelta::Millis(kRtcpIntervalMs); config.local_media_ssrc = test::VideoTestConstants::kVideoSendSsrcs[0]; - RTCPSender rtcp_sender(config); + RTCPSender rtcp_sender(env_, config); rtcp_sender.SetRTCPStatus(RtcpMode::kReducedSize); rtcp_sender.SetRemoteSSRC(test::VideoTestConstants::kVideoSendSsrcs[0]); @@ -1267,6 +1346,7 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format, EXPECT_TRUE(Wait()) << "Timed out while observing incoming RTP packets."; } + const Environment env_ = CreateEnvironment(); std::unique_ptr transport_adapter_; test::ConfigurableFrameSizeEncoder encoder_; test::VideoEncoderProxyFactory encoder_factory_; @@ -1325,7 +1405,7 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) { capturer_(nullptr) {} private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock(&mutex_); last_packet_time_ms_ = clock_->TimeInMilliseconds(); @@ -1353,7 +1433,7 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) { return SEND_PACKET; } - Action OnSendRtcp(rtc::ArrayView packet) override { + Action OnSendRtcp(ArrayView packet) override { MutexLock lock(&mutex_); const int kNoPacketsThresholdMs = 2000; if (test_state_ == kWaitingForNoPackets && @@ -1397,7 +1477,7 @@ TEST_F(VideoSendStreamTest, NoPaddingWhenVideoIsMuted) { TestState test_state_ = kBeforeStopCapture; Clock* const clock_; Mutex mutex_; - absl::optional last_packet_time_ms_ RTC_GUARDED_BY(mutex_); + std::optional last_packet_time_ms_ RTC_GUARDED_BY(mutex_); test::FrameGeneratorCapturer* capturer_ RTC_GUARDED_BY(mutex_); } test; @@ -1420,7 +1500,7 @@ TEST_F(VideoSendStreamTest, PaddingIsPrimarilyRetransmissions) { call_ = sender_call; } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock(&mutex_); RtpPacket rtp_packet; @@ -1434,7 +1514,7 @@ TEST_F(VideoSendStreamTest, PaddingIsPrimarilyRetransmissions) { const int kNetworkDelayMs = 50; BuiltInNetworkBehaviorConfig config; config.loss_percent = 10; - config.link_capacity_kbps = kCapacityKbps; + config.link_capacity = DataRate::KilobitsPerSec(kCapacityKbps); config.queue_delay_ms = kNetworkDelayMs; return config; } @@ -1495,7 +1575,7 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) { task_safety_flag_(PendingTaskSafetyFlag::CreateDetached()) {} private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { if (IsRtcpPacket(packet)) return DROP_PACKET; @@ -1534,10 +1614,9 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) { receive_streams) override { stream_ = send_stream; RtpRtcpInterface::Configuration config; - config.clock = Clock::GetRealTimeClock(); config.outgoing_transport = feedback_transport_.get(); config.retransmission_rate_limiter = &retranmission_rate_limiter_; - rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(config); + rtp_rtcp_ = std::make_unique(env_, config); rtp_rtcp_->SetRTCPStatus(RtcpMode::kReducedSize); } @@ -1562,12 +1641,13 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) { } TaskQueueBase* const task_queue_; + const Environment env_ = CreateEnvironment(); std::unique_ptr rtp_rtcp_; std::unique_ptr feedback_transport_; RateLimiter retranmission_rate_limiter_; VideoSendStream* stream_; bool bitrate_capped_; - rtc::scoped_refptr task_safety_flag_; + scoped_refptr task_safety_flag_; } test(task_queue()); RunBaseTest(&test); @@ -1619,7 +1699,7 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { RtpExtension::kTransportSequenceNumberUri, kExtensionId)); } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RTC_DCHECK_RUN_ON(&module_process_thread_); task_queue_->PostTask([this]() { RTC_DCHECK_RUN_ON(&task_queue_thread_); @@ -1638,10 +1718,10 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { } void PerformTest() override { - rtc::NetworkRoute new_route; + NetworkRoute new_route; new_route.connected = true; - new_route.local = rtc::RouteEndpoint::CreateWithNetworkId(10); - new_route.remote = rtc::RouteEndpoint::CreateWithNetworkId(20); + new_route.local = RouteEndpoint::CreateWithNetworkId(10); + new_route.remote = RouteEndpoint::CreateWithNetworkId(20); BitrateConstraints bitrate_config; SendTask(task_queue_, [this, &new_route, &bitrate_config]() { @@ -1665,7 +1745,7 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { // TODO(holmer): We should set the last sent packet id here and // verify that we correctly ignore any packet loss reported prior to // that id. - new_route.local = rtc::RouteEndpoint::CreateWithNetworkId( + new_route.local = RouteEndpoint::CreateWithNetworkId( new_route.local.network_id() + 1); call_->GetTransportControllerSend()->OnNetworkRouteChanged("transport", new_route); @@ -1719,7 +1799,7 @@ TEST_F(VideoSendStreamTest, DISABLED_RelayToDirectRoute) { call_ = sender_call; } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RTC_DCHECK_RUN_ON(&module_process_thread_); task_queue_->PostTask([this]() { RTC_DCHECK_RUN_ON(&task_queue_thread_); @@ -1741,10 +1821,10 @@ TEST_F(VideoSendStreamTest, DISABLED_RelayToDirectRoute) { } void PerformTest() override { - rtc::NetworkRoute route; + NetworkRoute route; route.connected = true; - route.local = rtc::RouteEndpoint::CreateWithNetworkId(10); - route.remote = rtc::RouteEndpoint::CreateWithNetworkId(20); + route.local = RouteEndpoint::CreateWithNetworkId(10); + route.remote = RouteEndpoint::CreateWithNetworkId(20); SendTask(task_queue_, [this, &route]() { RTC_DCHECK_RUN_ON(&task_queue_thread_); @@ -1803,7 +1883,7 @@ TEST_F(VideoSendStreamTest, ChangingTransportOverhead) { call_ = sender_call; } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { EXPECT_LE(packet.size(), kMaxRtpPacketSize); MutexLock lock(&lock_); if (++packets_sent_ < 100) @@ -1911,7 +1991,7 @@ class MaxPaddingSetTest : public test::SendTest { } // Called on the pacer thread. - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RTC_DCHECK_RUN_ON(&module_process_thread_); // Check the stats on the correct thread and signal the 'complete' flag @@ -2013,8 +2093,8 @@ TEST_F(VideoSendStreamTest, EncoderReconfigureOnResolutionChangeWhenNotSending) { class EncoderObserver : public test::FakeEncoder { public: - EncoderObserver() - : FakeEncoder(Clock::GetRealTimeClock()), + explicit EncoderObserver(const Environment& env) + : FakeEncoder(env), last_initialized_frame_width_(0), last_initialized_frame_height_(0) {} @@ -2053,13 +2133,13 @@ TEST_F(VideoSendStreamTest, } Mutex mutex_; - rtc::Event init_encode_called_; + Event init_encode_called_; int last_initialized_frame_width_ RTC_GUARDED_BY(&mutex_); int last_initialized_frame_height_ RTC_GUARDED_BY(&mutex_); }; test::NullTransport transport; - EncoderObserver encoder; + EncoderObserver encoder(env()); test::VideoEncoderProxyFactory encoder_factory(&encoder); SendTask(task_queue(), [this, &transport, &encoder_factory]() { @@ -2094,8 +2174,8 @@ TEST_F(VideoSendStreamTest, TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) { class StartBitrateObserver : public test::FakeEncoder { public: - StartBitrateObserver() - : FakeEncoder(Clock::GetRealTimeClock()), start_bitrate_kbps_(0) {} + explicit StartBitrateObserver(const Environment& env) + : FakeEncoder(env), start_bitrate_kbps_(0) {} int32_t InitEncode(const VideoCodec* config, const Settings& settings) override { MutexLock lock(&mutex_); @@ -2123,7 +2203,7 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) { private: mutable Mutex mutex_; - rtc::Event start_bitrate_changed_; + Event start_bitrate_changed_; int start_bitrate_kbps_ RTC_GUARDED_BY(mutex_); }; @@ -2138,7 +2218,7 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) { sender_call_->GetTransportControllerSend()->SetSdpBitrateParameters( bitrate_config); - StartBitrateObserver encoder; + StartBitrateObserver encoder(env()); test::VideoEncoderProxyFactory encoder_factory(&encoder); GetVideoSendConfig()->encoder_settings.encoder_factory = &encoder_factory; @@ -2153,7 +2233,7 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) { // the test code context is interpreted as the worker thread and we assume // progress on it. The test should probably be ported to use simulated time // instead (ported to a scenario test perhaps?). - rtc::Thread::Current()->ProcessMessages(5000); + Thread::Current()->ProcessMessages(5000); EXPECT_TRUE(encoder.WaitForStartBitrate()); EXPECT_EQ(GetVideoEncoderConfig()->max_bitrate_bps / 1000, @@ -2167,7 +2247,7 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) { // the test code context is interpreted as the worker thread and we assume // progress on it. The test should probably be ported to use simulated time // instead (ported to a scenario test perhaps?). - rtc::Thread::Current()->ProcessMessages(5000); + Thread::Current()->ProcessMessages(5000); // New bitrate should be reconfigured above the previous max. As there's no // network connection this shouldn't be flaky, as no bitrate should've been @@ -2179,52 +2259,6 @@ TEST_F(VideoSendStreamTest, CanReconfigureToUseStartBitrateAbovePreviousMax) { DestroyStreams(); } -class StartStopBitrateObserver : public test::FakeEncoder { - public: - StartStopBitrateObserver() : FakeEncoder(Clock::GetRealTimeClock()) {} - int32_t InitEncode(const VideoCodec* config, - const Settings& settings) override { - MutexLock lock(&mutex_); - encoder_init_.Set(); - return FakeEncoder::InitEncode(config, settings); - } - - void SetRates(const RateControlParameters& parameters) override { - MutexLock lock(&mutex_); - bitrate_kbps_ = parameters.bitrate.get_sum_kbps(); - bitrate_changed_.Set(); - FakeEncoder::SetRates(parameters); - } - - bool WaitForEncoderInit() { - return encoder_init_.Wait(test::VideoTestConstants::kDefaultTimeout); - } - - bool WaitBitrateChanged(WaitUntil until) { - do { - absl::optional bitrate_kbps; - { - MutexLock lock(&mutex_); - bitrate_kbps = bitrate_kbps_; - } - if (!bitrate_kbps) - continue; - - if ((until == WaitUntil::kNonZero && *bitrate_kbps > 0) || - (until == WaitUntil::kZero && *bitrate_kbps == 0)) { - return true; - } - } while (bitrate_changed_.Wait(test::VideoTestConstants::kDefaultTimeout)); - return false; - } - - private: - Mutex mutex_; - rtc::Event encoder_init_; - rtc::Event bitrate_changed_; - absl::optional bitrate_kbps_ RTC_GUARDED_BY(mutex_); -}; - TEST_F(VideoSendStreamTest, EncoderIsProperlyInitializedAndDestroyed) { class EncoderStateObserver : public test::SendTest, public VideoEncoder { public: @@ -2360,10 +2394,11 @@ template class VideoCodecConfigObserver : public test::SendTest, public test::FakeEncoder { public: - VideoCodecConfigObserver(VideoCodecType video_codec_type, + VideoCodecConfigObserver(const Environment& env, + VideoCodecType video_codec_type, TaskQueueBase* task_queue) : SendTest(test::VideoTestConstants::kDefaultTimeout), - FakeEncoder(Clock::GetRealTimeClock()), + FakeEncoder(env), video_codec_type_(video_codec_type), stream_(nullptr), encoder_factory_(this), @@ -2404,7 +2439,7 @@ class VideoCodecConfigObserver : public test::SendTest, void InitCodecSpecifics(); void VerifyCodecSpecifics(const VideoCodec& config) const; - rtc::scoped_refptr + scoped_refptr GetEncoderSpecificSettings() const; void PerformTest() override { @@ -2434,7 +2469,7 @@ class VideoCodecConfigObserver : public test::SendTest, T encoder_settings_; const VideoCodecType video_codec_type_; - rtc::Event init_encode_event_; + Event init_encode_event_; VideoSendStream* stream_; test::VideoEncoderProxyFactory encoder_factory_; VideoEncoderConfig encoder_config_; @@ -2466,7 +2501,7 @@ void VideoCodecConfigObserver::VerifyCodecSpecifics( } template <> -rtc::scoped_refptr +scoped_refptr VideoCodecConfigObserver::GetEncoderSpecificSettings() const { return nullptr; } @@ -2499,9 +2534,9 @@ void VideoCodecConfigObserver::VerifyCodecSpecifics( } template <> -rtc::scoped_refptr +scoped_refptr VideoCodecConfigObserver::GetEncoderSpecificSettings() const { - return rtc::make_ref_counted( + return make_ref_counted( encoder_settings_); } @@ -2533,19 +2568,21 @@ void VideoCodecConfigObserver::VerifyCodecSpecifics( } template <> -rtc::scoped_refptr +scoped_refptr VideoCodecConfigObserver::GetEncoderSpecificSettings() const { - return rtc::make_ref_counted( + return make_ref_counted( encoder_settings_); } TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp8Config) { - VideoCodecConfigObserver test(kVideoCodecVP8, task_queue()); + VideoCodecConfigObserver test(env(), kVideoCodecVP8, + task_queue()); RunBaseTest(&test); } TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp9Config) { - VideoCodecConfigObserver test(kVideoCodecVP9, task_queue()); + VideoCodecConfigObserver test(env(), kVideoCodecVP9, + task_queue()); RunBaseTest(&test); } @@ -2557,7 +2594,8 @@ TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp9Config) { #define MAYBE_EncoderSetupPropagatesH264Config EncoderSetupPropagatesH264Config #endif TEST_F(VideoSendStreamTest, MAYBE_EncoderSetupPropagatesH264Config) { - VideoCodecConfigObserver test(kVideoCodecH264, task_queue()); + VideoCodecConfigObserver test(env(), kVideoCodecH264, + task_queue()); RunBaseTest(&test); } @@ -2570,7 +2608,7 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) { media_bytes_sent_(0) {} private: - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -2579,7 +2617,7 @@ TEST_F(VideoSendStreamTest, RtcpSenderReportContainsMediaBytesSent) { return SEND_PACKET; } - Action OnSendRtcp(rtc::ArrayView packet) override { + Action OnSendRtcp(ArrayView packet) override { MutexLock lock(&mutex_); test::RtcpPacketParser parser; EXPECT_TRUE(parser.Parse(packet)); @@ -2622,6 +2660,7 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) { private: std::vector CreateEncoderStreams( + const FieldTrialsView& /*field_trials*/, int frame_width, int frame_height, const VideoEncoderConfig& encoder_config) override { @@ -2639,9 +2678,9 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) { class ScreencastTargetBitrateTest : public test::SendTest, public test::FakeEncoder { public: - ScreencastTargetBitrateTest() + explicit ScreencastTargetBitrateTest(const Environment& env) : SendTest(test::VideoTestConstants::kDefaultTimeout), - test::FakeEncoder(Clock::GetRealTimeClock()), + test::FakeEncoder(env), encoder_factory_(this) {} private: @@ -2662,7 +2701,7 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) { send_config->encoder_settings.encoder_factory = &encoder_factory_; EXPECT_EQ(1u, encoder_config->number_of_streams); encoder_config->video_stream_factory = - rtc::make_ref_counted(); + make_ref_counted(); EXPECT_EQ(1u, encoder_config->simulcast_layers.size()); encoder_config->simulcast_layers[0].num_temporal_layers = 2; encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen; @@ -2673,7 +2712,7 @@ TEST_F(VideoSendStreamTest, TranslatesTwoLayerScreencastToTargetBitrate) { << "Timed out while waiting for the encoder to be initialized."; } test::VideoEncoderProxyFactory encoder_factory_; - } test; + } test(env()); RunBaseTest(&test); } @@ -2697,9 +2736,10 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { public VideoBitrateAllocatorFactory, public test::FakeEncoder { public: - explicit EncoderBitrateThresholdObserver(TaskQueueBase* task_queue) + explicit EncoderBitrateThresholdObserver(const Environment& env, + TaskQueueBase* task_queue) : SendTest(test::VideoTestConstants::kDefaultTimeout), - FakeEncoder(Clock::GetRealTimeClock()), + FakeEncoder(env), task_queue_(task_queue), target_bitrate_(0), num_rate_allocator_creations_(0), @@ -2711,7 +2751,8 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { CreateBuiltinVideoBitrateAllocatorFactory()) {} private: - std::unique_ptr CreateVideoBitrateAllocator( + std::unique_ptr Create( + const Environment& env, const VideoCodec& codec) override { EXPECT_GE(codec.startBitrate, codec.minBitrate); EXPECT_LE(codec.startBitrate, codec.maxBitrate); @@ -2736,7 +2777,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { ++num_rate_allocator_creations_; create_rate_allocator_event_.Set(); - return bitrate_allocator_factory_->CreateVideoBitrateAllocator(codec); + return bitrate_allocator_factory_->Create(env, codec); } int32_t InitEncode(const VideoCodec* codecSettings, @@ -2775,7 +2816,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { // more than one update pending, in which case we keep waiting // until the correct value has been observed. // The target_bitrate_ is reduced by the calculated packet overhead. - const int64_t start_time = rtc::TimeMillis(); + const int64_t start_time = TimeMillis(); do { MutexLock lock(&mutex_); @@ -2787,7 +2828,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { } while (bitrate_changed_event_.Wait( std::max(TimeDelta::Millis(1), test::VideoTestConstants::kDefaultTimeout - - TimeDelta::Millis(rtc::TimeMillis() - start_time)))); + TimeDelta::Millis(TimeMillis() - start_time)))); MutexLock lock(&mutex_); EXPECT_NEAR(target_bitrate_, expected_bitrate, abs_error) << "Timed out while waiting encoder rate to be set."; @@ -2870,9 +2911,9 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { } TaskQueueBase* const task_queue_; - rtc::Event create_rate_allocator_event_; - rtc::Event init_encode_event_; - rtc::Event bitrate_changed_event_; + Event create_rate_allocator_event_; + Event init_encode_event_; + Event bitrate_changed_event_; Mutex mutex_; uint32_t target_bitrate_ RTC_GUARDED_BY(&mutex_); @@ -2883,7 +2924,7 @@ TEST_F(VideoSendStreamTest, ReconfigureBitratesSetsEncoderBitratesCorrectly) { test::VideoEncoderProxyFactory encoder_factory_; std::unique_ptr bitrate_allocator_factory_; webrtc::VideoEncoderConfig encoder_config_; - } test(task_queue()); + } test(env(), task_queue()); RunBaseTest(&test); } @@ -2898,9 +2939,10 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { class ScreencastTargetBitrateTest : public test::SendTest, public test::FakeEncoder { public: - explicit ScreencastTargetBitrateTest(TaskQueueBase* task_queue) + explicit ScreencastTargetBitrateTest(const Environment& env, + TaskQueueBase* task_queue) : SendTest(test::VideoTestConstants::kDefaultTimeout), - test::FakeEncoder(Clock::GetRealTimeClock()), + test::FakeEncoder(env), send_stream_(nullptr), encoder_factory_(this), task_queue_(task_queue) {} @@ -2915,7 +2957,7 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { auto buffer = EncodedImageBuffer::Create(16); memset(buffer->data(), 0, 16); encoded.SetEncodedData(buffer); - encoded.SetTimestamp(input_image.timestamp()); + encoded.SetRtpTimestamp(input_image.rtp_timestamp()); encoded.capture_time_ms_ = input_image.render_time_ms(); for (size_t i = 0; i < kNumStreams; ++i) { @@ -2977,7 +3019,7 @@ TEST_F(VideoSendStreamTest, ReportsSentResolution) { VideoSendStream* send_stream_; test::VideoEncoderProxyFactory encoder_factory_; TaskQueueBase* const task_queue_; - } test(task_queue()); + } test(env(), task_queue()); RunBaseTest(&test); } @@ -2987,7 +3029,10 @@ class Vp9HeaderObserver : public test::SendTest { public: explicit Vp9HeaderObserver(const Vp9TestParams& params) : SendTest(test::VideoTestConstants::kLongTimeout), - encoder_factory_([]() { return VP9Encoder::Create(); }), + encoder_factory_( + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp9Encoder(env); + }), params_(params), vp9_settings_(VideoEncoder::GetDefaultVp9Settings()) {} @@ -3010,7 +3055,7 @@ class Vp9HeaderObserver : public test::SendTest { send_config->rtp.payload_type = kVp9PayloadType; ModifyVideoConfigsHook(send_config, receive_configs, encoder_config); encoder_config->encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings_); EXPECT_EQ(1u, encoder_config->number_of_streams); EXPECT_EQ(1u, encoder_config->simulcast_layers.size()); @@ -3034,12 +3079,12 @@ class Vp9HeaderObserver : public test::SendTest { } } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); EXPECT_EQ(kVp9PayloadType, rtp_packet.PayloadType()); - rtc::ArrayView rtp_payload = rtp_packet.payload(); + ArrayView rtp_payload = rtp_packet.payload(); bool new_packet = !last_packet_sequence_number_.has_value() || IsNewerSequenceNumber(rtp_packet.SequenceNumber(), @@ -3052,7 +3097,7 @@ class Vp9HeaderObserver : public test::SendTest { EXPECT_EQ(VideoCodecType::kVideoCodecVP9, video_header.codec); // Verify common fields for all configurations. const auto& vp9_header = - absl::get(video_header.video_type_header); + std::get(video_header.video_type_header); VerifyCommonHeader(vp9_header); CompareConsecutiveFrames(rtp_packet, video_header); // Verify configuration specific settings. @@ -3291,7 +3336,7 @@ class Vp9HeaderObserver : public test::SendTest { void CompareConsecutiveFrames(const RtpPacket& rtp_packet, const RTPVideoHeader& video) const { const auto& vp9_header = - absl::get(video.video_type_header); + std::get(video.video_type_header); const bool new_temporal_unit = !last_packet_timestamp_.has_value() || @@ -3322,10 +3367,10 @@ class Vp9HeaderObserver : public test::SendTest { } ScalableVideoController::StreamLayersConfig GetScalabilityConfig() const { - absl::optional scalability_mode = + std::optional scalability_mode = ScalabilityModeFromString(params_.scalability_mode); EXPECT_TRUE(scalability_mode.has_value()); - absl::optional config = + std::optional config = ScalabilityStructureConfig(*scalability_mode); EXPECT_TRUE(config.has_value()); EXPECT_EQ(config->num_spatial_layers, params_.num_spatial_layers); @@ -3337,8 +3382,8 @@ class Vp9HeaderObserver : public test::SendTest { VideoCodecVP9 vp9_settings_; webrtc::VideoEncoderConfig encoder_config_; bool last_packet_marker_ = false; - absl::optional last_packet_sequence_number_; - absl::optional last_packet_timestamp_; + std::optional last_packet_sequence_number_; + std::optional last_packet_timestamp_; RTPVideoHeaderVP9 last_vp9_; std::map last_temporal_idx_by_spatial_idx_; Mutex mutex_; @@ -3457,7 +3502,7 @@ void VideoSendStreamTest::TestVp9NonFlexMode( vp9_settings_.numberOfSpatialLayers = params_.num_spatial_layers; vp9_settings_.interLayerPred = params_.inter_layer_pred; } else { - absl::optional mode = + std::optional mode = ScalabilityModeFromString(params_.scalability_mode); encoder_config->simulcast_layers[0].scalability_mode = mode; EXPECT_TRUE(mode.has_value()); @@ -3469,8 +3514,8 @@ void VideoSendStreamTest::TestVp9NonFlexMode( GetScalabilityConfig(); int required_divisibility = 1; for (int sl_idx = 0; sl_idx < config.num_spatial_layers; ++sl_idx) { - required_divisibility = cricket::LeastCommonMultiple( - required_divisibility, config.scaling_factor_den[sl_idx]); + required_divisibility = + std::lcm(required_divisibility, config.scaling_factor_den[sl_idx]); } return required_divisibility; } @@ -3678,8 +3723,8 @@ TEST_F(VideoSendStreamTest, EncoderConfigMaxFramerateReportedToSource) { frame_generator_capturer->SetSinkWantsObserver(this); } - void OnSinkWantsChanged(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override { + void OnSinkWantsChanged(VideoSinkInterface* sink, + const VideoSinkWants& wants) override { if (wants.max_framerate_fps == kMaxFps) observation_complete_.Set(); } @@ -3706,9 +3751,10 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) { class RemoveOverheadFromBandwidthTest : public test::EndToEndTest, public test::FakeEncoder { public: - explicit RemoveOverheadFromBandwidthTest(TaskQueueBase* task_queue) + explicit RemoveOverheadFromBandwidthTest(const Environment& env, + TaskQueueBase* task_queue) : EndToEndTest(test::VideoTestConstants::kDefaultTimeout), - FakeEncoder(Clock::GetRealTimeClock()), + FakeEncoder(env), task_queue_(task_queue), encoder_factory_(this), call_(nullptr), @@ -3739,7 +3785,7 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) { EXPECT_FALSE(send_config->rtp.extensions.empty()); } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { MutexLock lock(&mutex_); first_packet_sent_ = true; return SEND_PACKET; @@ -3777,15 +3823,15 @@ TEST_F(VideoSendStreamTest, RemoveOverheadFromBandwidth) { Mutex mutex_; uint32_t max_bitrate_bps_ RTC_GUARDED_BY(&mutex_); bool first_packet_sent_ RTC_GUARDED_BY(&mutex_); - rtc::Event bitrate_changed_event_; - } test(task_queue()); + Event bitrate_changed_event_; + } test(env(), task_queue()); RunBaseTest(&test); } class PacingFactorObserver : public test::SendTest { public: PacingFactorObserver(bool configure_send_side, - absl::optional expected_pacing_factor) + std::optional expected_pacing_factor) : test::SendTest(test::VideoTestConstants::kDefaultTimeout), configure_send_side_(configure_send_side), expected_pacing_factor_(expected_pacing_factor) {} @@ -3810,7 +3856,7 @@ class PacingFactorObserver : public test::SendTest { } if (configure_send_side_ && !has_send_side) { - rtc::UniqueNumberGenerator unique_id_generator; + UniqueNumberGenerator unique_id_generator; unique_id_generator.AddKnownId(0); // First valid RTP extension ID is 1. for (const RtpExtension& extension : send_config->rtp.extensions) { unique_id_generator.AddKnownId(extension.id); @@ -3841,7 +3887,7 @@ class PacingFactorObserver : public test::SendTest { private: const bool configure_send_side_; - const absl::optional expected_pacing_factor_; + const std::optional expected_pacing_factor_; }; std::string GetAlrProbingExperimentString() { @@ -3862,7 +3908,7 @@ TEST_F(VideoSendStreamTest, AlrConfiguredWhenSendSideOn) { TEST_F(VideoSendStreamTest, AlrNotConfiguredWhenSendSideOff) { test::ScopedFieldTrials alr_experiment(GetAlrProbingExperimentString()); // Send-side bwe off, use configuration should not be overridden. - PacingFactorObserver test_without_send_side(false, absl::nullopt); + PacingFactorObserver test_without_send_side(false, std::nullopt); RunBaseTest(&test_without_send_side); } @@ -3922,7 +3968,7 @@ class ContentSwitchTest : public test::SendTest { done_ = true; } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { task_queue_->PostTask([this]() { MutexLock lock(&mutex_); if (done_) @@ -3995,7 +4041,7 @@ class ContentSwitchTest : public test::SendTest { } Mutex mutex_; - rtc::Event content_switch_event_; + Event content_switch_event_; Call* call_; bool done_ RTC_GUARDED_BY(mutex_) = false; StreamState state_ RTC_GUARDED_BY(mutex_); @@ -4057,8 +4103,8 @@ void VideoSendStreamTest::TestTemporalLayers( frame_generator_capturer->ChangeResolution(640, 360); } - void OnSinkWantsChanged(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override {} + void OnSinkWantsChanged(VideoSinkInterface* sink, + const VideoSinkWants& wants) override {} void ModifySenderBitrateConfig( BitrateConstraints* bitrate_config) override { @@ -4084,15 +4130,11 @@ void VideoSendStreamTest::TestTemporalLayers( test::VideoTestConstants::kVideoSendPayloadType; encoder_config->video_format.name = payload_name_; encoder_config->codec_type = PayloadStringToCodecType(payload_name_); - encoder_config->video_stream_factory = - rtc::make_ref_counted( - payload_name_, /*max_qp=*/56, /*is_screenshare=*/false, - /*conference_mode=*/false, encoder_info); encoder_config->max_bitrate_bps = kMaxBitrateBps; if (absl::EqualsIgnoreCase(payload_name_, "VP9")) { - encoder_config->encoder_specific_settings = rtc::make_ref_counted< - VideoEncoderConfig::Vp9EncoderSpecificSettings>( - VideoEncoder::GetDefaultVp9Settings()); + encoder_config->encoder_specific_settings = + make_ref_counted( + VideoEncoder::GetDefaultVp9Settings()); } if (scalability_mode_.empty()) { for (size_t i = 0; i < num_temporal_layers_.size(); ++i) { @@ -4118,7 +4160,7 @@ void VideoSendStreamTest::TestTemporalLayers( int temporal_idx; }; - bool ParsePayload(rtc::ArrayView packet, + bool ParsePayload(ArrayView packet, ParsedPacket& parsed) const { RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet)); @@ -4129,14 +4171,14 @@ void VideoSendStreamTest::TestTemporalLayers( parsed.timestamp = rtp_packet.Timestamp(); parsed.ssrc = rtp_packet.Ssrc(); - absl::optional parsed_payload = + std::optional parsed_payload = depacketizer_->Parse(rtp_packet.PayloadBuffer()); EXPECT_TRUE(parsed_payload); - if (const auto* vp8_header = absl::get_if( + if (const auto* vp8_header = std::get_if( &parsed_payload->video_header.video_type_header)) { parsed.temporal_idx = vp8_header->temporalIdx; - } else if (const auto* vp9_header = absl::get_if( + } else if (const auto* vp9_header = std::get_if( &parsed_payload->video_header.video_type_header)) { parsed.temporal_idx = vp9_header->temporal_idx; } else { @@ -4145,7 +4187,7 @@ void VideoSendStreamTest::TestTemporalLayers( return true; } - Action OnSendRtp(rtc::ArrayView packet) override { + Action OnSendRtp(ArrayView packet) override { ParsedPacket parsed; if (!ParsePayload(packet, parsed)) return SEND_PACKET; @@ -4212,9 +4254,10 @@ void VideoSendStreamTest::TestTemporalLayers( TEST_F(VideoSendStreamTest, TestTemporalLayersVp8) { InternalEncoderFactory internal_encoder_factory; test::FunctionVideoEncoderFactory encoder_factory( - [&internal_encoder_factory]() { + [&internal_encoder_factory](const Environment& env, + const SdpVideoFormat& format) { return std::make_unique( - &internal_encoder_factory, SdpVideoFormat("VP8")); + env, &internal_encoder_factory, nullptr, SdpVideoFormat::VP8()); }); TestTemporalLayers(&encoder_factory, "VP8", @@ -4225,9 +4268,10 @@ TEST_F(VideoSendStreamTest, TestTemporalLayersVp8) { TEST_F(VideoSendStreamTest, TestTemporalLayersVp8Simulcast) { InternalEncoderFactory internal_encoder_factory; test::FunctionVideoEncoderFactory encoder_factory( - [&internal_encoder_factory]() { + [&internal_encoder_factory](const Environment& env, + const SdpVideoFormat& format) { return std::make_unique( - &internal_encoder_factory, SdpVideoFormat("VP8")); + env, &internal_encoder_factory, nullptr, SdpVideoFormat::VP8()); }); TestTemporalLayers(&encoder_factory, "VP8", @@ -4238,9 +4282,10 @@ TEST_F(VideoSendStreamTest, TestTemporalLayersVp8Simulcast) { TEST_F(VideoSendStreamTest, TestTemporalLayersVp8SimulcastWithDifferentNumTls) { InternalEncoderFactory internal_encoder_factory; test::FunctionVideoEncoderFactory encoder_factory( - [&internal_encoder_factory]() { + [&internal_encoder_factory](const Environment& env, + const SdpVideoFormat& format) { return std::make_unique( - &internal_encoder_factory, SdpVideoFormat("VP8")); + env, &internal_encoder_factory, nullptr, SdpVideoFormat::VP8()); }); TestTemporalLayers(&encoder_factory, "VP8", @@ -4250,7 +4295,9 @@ TEST_F(VideoSendStreamTest, TestTemporalLayersVp8SimulcastWithDifferentNumTls) { TEST_F(VideoSendStreamTest, TestTemporalLayersVp8SimulcastWithoutSimAdapter) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); TestTemporalLayers(&encoder_factory, "VP8", /*num_temporal_layers=*/{2, 2}, @@ -4260,9 +4307,10 @@ TEST_F(VideoSendStreamTest, TestTemporalLayersVp8SimulcastWithoutSimAdapter) { TEST_F(VideoSendStreamTest, TestScalabilityModeVp8L1T2) { InternalEncoderFactory internal_encoder_factory; test::FunctionVideoEncoderFactory encoder_factory( - [&internal_encoder_factory]() { + [&internal_encoder_factory](const Environment& env, + const SdpVideoFormat& format) { return std::make_unique( - &internal_encoder_factory, SdpVideoFormat("VP8")); + env, &internal_encoder_factory, nullptr, SdpVideoFormat::VP8()); }); TestTemporalLayers(&encoder_factory, "VP8", @@ -4272,9 +4320,10 @@ TEST_F(VideoSendStreamTest, TestScalabilityModeVp8L1T2) { TEST_F(VideoSendStreamTest, TestScalabilityModeVp8Simulcast) { InternalEncoderFactory internal_encoder_factory; test::FunctionVideoEncoderFactory encoder_factory( - [&internal_encoder_factory]() { + [&internal_encoder_factory](const Environment& env, + const SdpVideoFormat& format) { return std::make_unique( - &internal_encoder_factory, SdpVideoFormat("VP8")); + env, &internal_encoder_factory, nullptr, SdpVideoFormat::VP8()); }); TestTemporalLayers(&encoder_factory, "VP8", @@ -4285,9 +4334,10 @@ TEST_F(VideoSendStreamTest, TestScalabilityModeVp8Simulcast) { TEST_F(VideoSendStreamTest, TestScalabilityModeVp8SimulcastWithDifferentMode) { InternalEncoderFactory internal_encoder_factory; test::FunctionVideoEncoderFactory encoder_factory( - [&internal_encoder_factory]() { + [&internal_encoder_factory](const Environment& env, + const SdpVideoFormat& format) { return std::make_unique( - &internal_encoder_factory, SdpVideoFormat("VP8")); + env, &internal_encoder_factory, nullptr, SdpVideoFormat::VP8()); }); TestTemporalLayers(&encoder_factory, "VP8", @@ -4297,7 +4347,9 @@ TEST_F(VideoSendStreamTest, TestScalabilityModeVp8SimulcastWithDifferentMode) { TEST_F(VideoSendStreamTest, TestScalabilityModeVp8SimulcastWithoutSimAdapter) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP8Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp8Encoder(env); + }); TestTemporalLayers(&encoder_factory, "VP8", /*num_temporal_layers=*/{}, @@ -4306,7 +4358,9 @@ TEST_F(VideoSendStreamTest, TestScalabilityModeVp8SimulcastWithoutSimAdapter) { TEST_F(VideoSendStreamTest, TestTemporalLayersVp9) { test::FunctionVideoEncoderFactory encoder_factory( - []() { return VP9Encoder::Create(); }); + [](const Environment& env, const SdpVideoFormat& format) { + return CreateVp9Encoder(env); + }); TestTemporalLayers(&encoder_factory, "VP9", /*num_temporal_layers=*/{2}, diff --git a/video/video_source_sink_controller.cc b/video/video_source_sink_controller.cc index 2f7b37585d..51316f8b0c 100644 --- a/video/video_source_sink_controller.cc +++ b/video/video_source_sink_controller.cc @@ -21,8 +21,8 @@ namespace webrtc { VideoSourceSinkController::VideoSourceSinkController( - rtc::VideoSinkInterface* sink, - rtc::VideoSourceInterface* source) + VideoSinkInterface* sink, + VideoSourceInterface* source) : sink_(sink), source_(source) { RTC_DCHECK(sink_); } @@ -32,10 +32,10 @@ VideoSourceSinkController::~VideoSourceSinkController() { } void VideoSourceSinkController::SetSource( - rtc::VideoSourceInterface* source) { + VideoSourceInterface* source) { RTC_DCHECK_RUN_ON(&sequence_checker_); - rtc::VideoSourceInterface* old_source = source_; + VideoSourceInterface* old_source = source_; source_ = source; if (old_source != source && old_source) @@ -62,7 +62,7 @@ void VideoSourceSinkController::PushSourceSinkSettings() { RTC_DCHECK_RUN_ON(&sequence_checker_); if (!source_) return; - rtc::VideoSinkWants wants = CurrentSettingsToSinkWants(); + VideoSinkWants wants = CurrentSettingsToSinkWants(); source_->AddOrUpdateSink(sink_, wants); } @@ -71,13 +71,13 @@ VideoSourceRestrictions VideoSourceSinkController::restrictions() const { return restrictions_; } -absl::optional VideoSourceSinkController::pixels_per_frame_upper_limit() +std::optional VideoSourceSinkController::pixels_per_frame_upper_limit() const { RTC_DCHECK_RUN_ON(&sequence_checker_); return pixels_per_frame_upper_limit_; } -absl::optional VideoSourceSinkController::frame_rate_upper_limit() +std::optional VideoSourceSinkController::frame_rate_upper_limit() const { RTC_DCHECK_RUN_ON(&sequence_checker_); return frame_rate_upper_limit_; @@ -93,7 +93,7 @@ int VideoSourceSinkController::resolution_alignment() const { return resolution_alignment_; } -const std::vector& +const std::vector& VideoSourceSinkController::resolutions() const { RTC_DCHECK_RUN_ON(&sequence_checker_); return resolutions_; @@ -104,10 +104,10 @@ bool VideoSourceSinkController::active() const { return active_; } -absl::optional -VideoSourceSinkController::requested_resolution() const { +std::optional +VideoSourceSinkController::scale_resolution_down_to() const { RTC_DCHECK_RUN_ON(&sequence_checker_); - return requested_resolution_; + return scale_resolution_down_to_; } void VideoSourceSinkController::SetRestrictions( @@ -117,13 +117,13 @@ void VideoSourceSinkController::SetRestrictions( } void VideoSourceSinkController::SetPixelsPerFrameUpperLimit( - absl::optional pixels_per_frame_upper_limit) { + std::optional pixels_per_frame_upper_limit) { RTC_DCHECK_RUN_ON(&sequence_checker_); pixels_per_frame_upper_limit_ = std::move(pixels_per_frame_upper_limit); } void VideoSourceSinkController::SetFrameRateUpperLimit( - absl::optional frame_rate_upper_limit) { + std::optional frame_rate_upper_limit) { RTC_DCHECK_RUN_ON(&sequence_checker_); frame_rate_upper_limit_ = std::move(frame_rate_upper_limit); } @@ -140,7 +140,7 @@ void VideoSourceSinkController::SetResolutionAlignment( } void VideoSourceSinkController::SetResolutions( - std::vector resolutions) { + std::vector resolutions) { RTC_DCHECK_RUN_ON(&sequence_checker_); resolutions_ = std::move(resolutions); } @@ -150,26 +150,25 @@ void VideoSourceSinkController::SetActive(bool active) { active_ = active; } -void VideoSourceSinkController::SetRequestedResolution( - absl::optional requested_resolution) { +void VideoSourceSinkController::SetScaleResolutionDownTo( + std::optional scale_resolution_down_to) { RTC_DCHECK_RUN_ON(&sequence_checker_); - requested_resolution_ = std::move(requested_resolution); + scale_resolution_down_to_ = std::move(scale_resolution_down_to); } // RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_) -rtc::VideoSinkWants VideoSourceSinkController::CurrentSettingsToSinkWants() - const { - rtc::VideoSinkWants wants; +VideoSinkWants VideoSourceSinkController::CurrentSettingsToSinkWants() const { + VideoSinkWants wants; wants.rotation_applied = rotation_applied_; // `wants.black_frames` is not used, it always has its default value false. wants.max_pixel_count = - rtc::dchecked_cast(restrictions_.max_pixels_per_frame().value_or( + dchecked_cast(restrictions_.max_pixels_per_frame().value_or( std::numeric_limits::max())); wants.target_pixel_count = restrictions_.target_pixels_per_frame().has_value() - ? absl::optional(rtc::dchecked_cast( + ? std::optional(dchecked_cast( restrictions_.target_pixels_per_frame().value())) - : absl::nullopt; + : std::nullopt; wants.max_framerate_fps = restrictions_.max_frame_rate().has_value() ? static_cast(restrictions_.max_frame_rate().value()) @@ -177,7 +176,7 @@ rtc::VideoSinkWants VideoSourceSinkController::CurrentSettingsToSinkWants() wants.resolution_alignment = resolution_alignment_; wants.max_pixel_count = std::min(wants.max_pixel_count, - rtc::dchecked_cast(pixels_per_frame_upper_limit_.value_or( + dchecked_cast(pixels_per_frame_upper_limit_.value_or( std::numeric_limits::max()))); wants.max_framerate_fps = std::min(wants.max_framerate_fps, @@ -186,7 +185,7 @@ rtc::VideoSinkWants VideoSourceSinkController::CurrentSettingsToSinkWants() : std::numeric_limits::max()); wants.resolutions = resolutions_; wants.is_active = active_; - wants.requested_resolution = requested_resolution_; + wants.requested_resolution = scale_resolution_down_to_; return wants; } diff --git a/video/video_source_sink_controller.h b/video/video_source_sink_controller.h index 1bb6ef61bf..451c59479e 100644 --- a/video/video_source_sink_controller.h +++ b/video/video_source_sink_controller.h @@ -11,10 +11,10 @@ #ifndef VIDEO_VIDEO_SOURCE_SINK_CONTROLLER_H_ #define VIDEO_VIDEO_SOURCE_SINK_CONTROLLER_H_ +#include #include #include -#include "absl/types/optional.h" #include "api/sequence_checker.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" @@ -25,17 +25,17 @@ namespace webrtc { // Responsible for configuring source/sink settings, i.e. performing -// rtc::VideoSourceInterface::AddOrUpdateSink(). It does this by -// storing settings internally which are converted to rtc::VideoSinkWants when -// PushSourceSinkSettings() is performed. +// webrtc::VideoSourceInterface::AddOrUpdateSink(). It does this by +// storing settings internally which are converted to webrtc::VideoSinkWants +// when PushSourceSinkSettings() is performed. class VideoSourceSinkController { public: - VideoSourceSinkController(rtc::VideoSinkInterface* sink, - rtc::VideoSourceInterface* source); + VideoSourceSinkController(VideoSinkInterface* sink, + VideoSourceInterface* source); ~VideoSourceSinkController(); - void SetSource(rtc::VideoSourceInterface* source); + void SetSource(VideoSourceInterface* source); bool HasSource() const; // Requests a refresh frame from the current source, if set. @@ -46,29 +46,29 @@ class VideoSourceSinkController { void PushSourceSinkSettings(); VideoSourceRestrictions restrictions() const; - absl::optional pixels_per_frame_upper_limit() const; - absl::optional frame_rate_upper_limit() const; + std::optional pixels_per_frame_upper_limit() const; + std::optional frame_rate_upper_limit() const; bool rotation_applied() const; int resolution_alignment() const; - const std::vector& resolutions() const; + const std::vector& resolutions() const; bool active() const; - absl::optional requested_resolution() const; + std::optional scale_resolution_down_to() const; // Updates the settings stored internally. In order for these settings to be // applied to the sink, PushSourceSinkSettings() must subsequently be called. void SetRestrictions(VideoSourceRestrictions restrictions); void SetPixelsPerFrameUpperLimit( - absl::optional pixels_per_frame_upper_limit); - void SetFrameRateUpperLimit(absl::optional frame_rate_upper_limit); + std::optional pixels_per_frame_upper_limit); + void SetFrameRateUpperLimit(std::optional frame_rate_upper_limit); void SetRotationApplied(bool rotation_applied); void SetResolutionAlignment(int resolution_alignment); - void SetResolutions(std::vector resolutions); + void SetResolutions(std::vector resolutions); void SetActive(bool active); - void SetRequestedResolution( - absl::optional requested_resolution); + void SetScaleResolutionDownTo( + std::optional scale_resolution_down_to); private: - rtc::VideoSinkWants CurrentSettingsToSinkWants() const + VideoSinkWants CurrentSettingsToSinkWants() const RTC_EXCLUSIVE_LOCKS_REQUIRED(sequence_checker_); // Used to ensure that this class is called on threads/sequences that it and @@ -76,24 +76,23 @@ class VideoSourceSinkController { // In practice, this represent's libjingle's worker thread. RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; - rtc::VideoSinkInterface* const sink_; - rtc::VideoSourceInterface* source_ - RTC_GUARDED_BY(&sequence_checker_); + VideoSinkInterface* const sink_; + VideoSourceInterface* source_ RTC_GUARDED_BY(&sequence_checker_); // Pixel and frame rate restrictions. VideoSourceRestrictions restrictions_ RTC_GUARDED_BY(&sequence_checker_); // Ensures that even if we are not restricted, the sink is never configured // above this limit. Example: We are not CPU limited (no `restrictions_`) but // our encoder is capped at 30 fps (= `frame_rate_upper_limit_`). - absl::optional pixels_per_frame_upper_limit_ + std::optional pixels_per_frame_upper_limit_ RTC_GUARDED_BY(&sequence_checker_); - absl::optional frame_rate_upper_limit_ + std::optional frame_rate_upper_limit_ RTC_GUARDED_BY(&sequence_checker_); bool rotation_applied_ RTC_GUARDED_BY(&sequence_checker_) = false; int resolution_alignment_ RTC_GUARDED_BY(&sequence_checker_) = 1; - std::vector resolutions_ + std::vector resolutions_ RTC_GUARDED_BY(&sequence_checker_); bool active_ RTC_GUARDED_BY(&sequence_checker_) = true; - absl::optional requested_resolution_ + std::optional scale_resolution_down_to_ RTC_GUARDED_BY(&sequence_checker_); }; diff --git a/video/video_source_sink_controller_unittest.cc b/video/video_source_sink_controller_unittest.cc index 75cc52bdaf..a1df43a71e 100644 --- a/video/video_source_sink_controller_unittest.cc +++ b/video/video_source_sink_controller_unittest.cc @@ -24,10 +24,10 @@ namespace webrtc { namespace { -using FrameSize = rtc::VideoSinkWants::FrameSize; +using FrameSize = VideoSinkWants::FrameSize; constexpr int kIntUnconstrained = std::numeric_limits::max(); -class MockVideoSinkWithVideoFrame : public rtc::VideoSinkInterface { +class MockVideoSinkWithVideoFrame : public VideoSinkInterface { public: ~MockVideoSinkWithVideoFrame() override {} @@ -35,19 +35,18 @@ class MockVideoSinkWithVideoFrame : public rtc::VideoSinkInterface { MOCK_METHOD(void, OnDiscardedFrame, (), (override)); }; -class MockVideoSourceWithVideoFrame - : public rtc::VideoSourceInterface { +class MockVideoSourceWithVideoFrame : public VideoSourceInterface { public: ~MockVideoSourceWithVideoFrame() override {} MOCK_METHOD(void, AddOrUpdateSink, - (rtc::VideoSinkInterface*, - const rtc::VideoSinkWants&), + (webrtc::VideoSinkInterface*, + const webrtc::VideoSinkWants&), (override)); MOCK_METHOD(void, RemoveSink, - (rtc::VideoSinkInterface*), + (webrtc::VideoSinkInterface*), (override)); MOCK_METHOD(void, RequestRefreshFrame, (), (override)); }; @@ -62,15 +61,15 @@ TEST(VideoSourceSinkControllerTest, UnconstrainedByDefault) { EXPECT_FALSE(controller.pixels_per_frame_upper_limit().has_value()); EXPECT_FALSE(controller.frame_rate_upper_limit().has_value()); EXPECT_FALSE(controller.rotation_applied()); - EXPECT_FALSE(controller.requested_resolution().has_value()); + EXPECT_FALSE(controller.scale_resolution_down_to().has_value()); EXPECT_EQ(controller.resolution_alignment(), 1); EXPECT_CALL(source, AddOrUpdateSink(_, _)) - .WillOnce([](rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { + .WillOnce([](VideoSinkInterface* sink, + const VideoSinkWants& wants) { EXPECT_FALSE(wants.rotation_applied); EXPECT_EQ(wants.max_pixel_count, kIntUnconstrained); - EXPECT_EQ(wants.target_pixel_count, absl::nullopt); + EXPECT_EQ(wants.target_pixel_count, std::nullopt); EXPECT_EQ(wants.max_framerate_fps, kIntUnconstrained); EXPECT_EQ(wants.resolution_alignment, 1); EXPECT_FALSE(wants.requested_resolution.has_value()); @@ -92,8 +91,8 @@ TEST(VideoSourceSinkControllerTest, VideoRestrictionsToSinkWants) { restrictions.set_max_frame_rate(30.0); controller.SetRestrictions(restrictions); EXPECT_CALL(source, AddOrUpdateSink(_, _)) - .WillOnce([](rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { + .WillOnce([](VideoSinkInterface* sink, + const VideoSinkWants& wants) { EXPECT_EQ(wants.max_pixel_count, 42); EXPECT_EQ(wants.target_pixel_count, 200); EXPECT_EQ(wants.max_framerate_fps, 30); @@ -106,8 +105,8 @@ TEST(VideoSourceSinkControllerTest, VideoRestrictionsToSinkWants) { controller.SetFrameRateUpperLimit(10.0); EXPECT_CALL(source, AddOrUpdateSink(_, _)) - .WillOnce([](rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { + .WillOnce([](VideoSinkInterface* sink, + const VideoSinkWants& wants) { EXPECT_EQ(wants.max_pixel_count, 24); EXPECT_EQ(wants.max_framerate_fps, 10); }); @@ -122,8 +121,8 @@ TEST(VideoSourceSinkControllerTest, RotationApplied) { EXPECT_TRUE(controller.rotation_applied()); EXPECT_CALL(source, AddOrUpdateSink(_, _)) - .WillOnce([](rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { + .WillOnce([](VideoSinkInterface* sink, + const VideoSinkWants& wants) { EXPECT_TRUE(wants.rotation_applied); }); controller.PushSourceSinkSettings(); @@ -137,8 +136,8 @@ TEST(VideoSourceSinkControllerTest, ResolutionAlignment) { EXPECT_EQ(controller.resolution_alignment(), 13); EXPECT_CALL(source, AddOrUpdateSink(_, _)) - .WillOnce([](rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { + .WillOnce([](VideoSinkInterface* sink, + const VideoSinkWants& wants) { EXPECT_EQ(wants.resolution_alignment, 13); }); controller.PushSourceSinkSettings(); @@ -166,16 +165,16 @@ TEST(VideoSourceSinkControllerTest, controller.RequestRefreshFrame(); } -TEST(VideoSourceSinkControllerTest, RequestedResolutionPropagatesToWants) { +TEST(VideoSourceSinkControllerTest, ScaleResolutionDownToPropagatesToWants) { MockVideoSinkWithVideoFrame sink; MockVideoSourceWithVideoFrame source; VideoSourceSinkController controller(&sink, &source); - controller.SetRequestedResolution(FrameSize(640, 360)); - EXPECT_TRUE(controller.requested_resolution().has_value()); + controller.SetScaleResolutionDownTo(FrameSize(640, 360)); + EXPECT_TRUE(controller.scale_resolution_down_to().has_value()); EXPECT_CALL(source, AddOrUpdateSink(_, _)) - .WillOnce([](rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { + .WillOnce([](VideoSinkInterface* sink, + const VideoSinkWants& wants) { EXPECT_EQ(*wants.requested_resolution, FrameSize(640, 360)); }); controller.PushSourceSinkSettings(); @@ -189,10 +188,9 @@ TEST(VideoSourceSinkControllerTest, ActivePropagatesToWants) { EXPECT_TRUE(controller.active()); EXPECT_CALL(source, AddOrUpdateSink(_, _)) - .WillOnce([](rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) { - EXPECT_TRUE(wants.is_active); - }); + .WillOnce( + [](VideoSinkInterface* sink, + const VideoSinkWants& wants) { EXPECT_TRUE(wants.is_active); }); controller.PushSourceSinkSettings(); } diff --git a/video/video_stream_buffer_controller.cc b/video/video_stream_buffer_controller.cc index 455f064b01..83de75ac3f 100644 --- a/video/video_stream_buffer_controller.cc +++ b/video/video_stream_buffer_controller.cc @@ -12,11 +12,11 @@ #include #include +#include #include #include "absl/base/attributes.h" #include "absl/functional/bind_front.h" -#include "absl/types/optional.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/units/data_size.h" @@ -56,7 +56,7 @@ struct FrameMetadata { size(frame.size()), contentType(frame.contentType()), delayed_by_retransmission(frame.delayed_by_retransmission()), - rtp_timestamp(frame.Timestamp()), + rtp_timestamp(frame.RtpTimestamp()), receive_time(frame.ReceivedTimestamp()) {} const bool is_last_spatial_layer; @@ -65,7 +65,7 @@ struct FrameMetadata { const VideoContentType contentType; const bool delayed_by_retransmission; const uint32_t rtp_timestamp; - const absl::optional receive_time; + const std::optional receive_time; }; Timestamp MinReceiveTime(const EncodedFrame& frame) { @@ -79,7 +79,7 @@ Timestamp MinReceiveTime(const EncodedFrame& frame) { } Timestamp ReceiveTime(const EncodedFrame& frame) { - absl::optional ts = frame.ReceivedTimestamp(); + std::optional ts = frame.ReceivedTimestamp(); RTC_DCHECK(ts.has_value()) << "Received frame must have a timestamp set!"; return *ts; } @@ -148,7 +148,7 @@ void VideoStreamBufferController::Clear() { frame_decode_scheduler_->CancelOutstanding(); } -absl::optional VideoStreamBufferController::InsertFrame( +std::optional VideoStreamBufferController::InsertFrame( std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); FrameMetadata metadata(*frame); @@ -224,10 +224,10 @@ void VideoStreamBufferController::OnFrameReady( TargetVideoDelayIsTooLarge(timing_->TargetVideoDelay())) { RTC_LOG(LS_WARNING) << "Resetting jitter estimator and timing module due " "to bad render timing for rtp_timestamp=" - << first_frame.Timestamp(); + << first_frame.RtpTimestamp(); jitter_estimator_.Reset(); timing_->Reset(); - render_time = timing_->RenderTime(first_frame.Timestamp(), now); + render_time = timing_->RenderTime(first_frame.RtpTimestamp(), now); } for (std::unique_ptr& frame : frames) { @@ -240,24 +240,20 @@ void VideoStreamBufferController::OnFrameReady( } if (!superframe_delayed_by_retransmission) { - absl::optional inter_frame_delay_variation = - ifdv_calculator_.Calculate(first_frame.Timestamp(), max_receive_time); + std::optional inter_frame_delay_variation = + ifdv_calculator_.Calculate(first_frame.RtpTimestamp(), + max_receive_time); if (inter_frame_delay_variation) { jitter_estimator_.UpdateEstimate(*inter_frame_delay_variation, superframe_size); } - float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0; - absl::optional rtt_mult_add_cap_ms = absl::nullopt; - if (rtt_mult_settings_.has_value()) { - rtt_mult = rtt_mult_settings_->rtt_mult_setting; - rtt_mult_add_cap_ms = - TimeDelta::Millis(rtt_mult_settings_->rtt_mult_add_cap_ms); - } + static constexpr float kRttMult = 0.9f; + static constexpr TimeDelta kRttMultAddCap = TimeDelta::Millis(200); timing_->SetJitterDelay( - jitter_estimator_.GetJitterEstimate(rtt_mult, rtt_mult_add_cap_ms)); + jitter_estimator_.GetJitterEstimate(kRttMult, kRttMultAddCap)); timing_->UpdateCurrentDelay(render_time, now); - } else if (RttMultExperiment::RttMultEnabled()) { + } else { jitter_estimator_.FrameNacked(); } @@ -356,7 +352,7 @@ void VideoStreamBufferController::UpdateFrameBufferTimings( } void VideoStreamBufferController::UpdateTimingFrameInfo() { - absl::optional info = timing_->GetTimingFrameInfo(); + std::optional info = timing_->GetTimingFrameInfo(); if (info) stats_proxy_->OnTimingFrameInfoUpdated(*info); } @@ -380,7 +376,7 @@ void VideoStreamBufferController::ForceKeyFrameReleaseImmediately() } // Found keyframe - decode right away. if (next_frame.front()->is_keyframe()) { - auto render_time = timing_->RenderTime(next_frame.front()->Timestamp(), + auto render_time = timing_->RenderTime(next_frame.front()->RtpTimestamp(), clock_->CurrentTime()); OnFrameReady(std::move(next_frame), render_time); return; @@ -409,7 +405,7 @@ void VideoStreamBufferController::MaybeScheduleFrameForRelease() // Ensures the frame is scheduled for decode before the stream times out. // This is otherwise a race condition. max_wait = std::max(max_wait - TimeDelta::Millis(1), TimeDelta::Zero()); - absl::optional schedule; + std::optional schedule; while (decodable_tu_info) { schedule = decode_timing_.OnFrameBufferUpdated( decodable_tu_info->next_rtp_timestamp, diff --git a/video/video_stream_buffer_controller.h b/video/video_stream_buffer_controller.h index f8793851dd..43d4544f71 100644 --- a/video/video_stream_buffer_controller.h +++ b/video/video_stream_buffer_controller.h @@ -21,7 +21,6 @@ #include "modules/video_coding/timing/inter_frame_delay_variation_calculator.h" #include "modules/video_coding/timing/jitter_estimator.h" #include "modules/video_coding/timing/timing.h" -#include "rtc_base/experiments/rtt_mult_experiment.h" #include "system_wrappers/include/clock.h" #include "video/decode_synchronizer.h" #include "video/video_receive_stream_timeout_tracker.h" @@ -81,7 +80,7 @@ class VideoStreamBufferController { void Stop(); void SetProtectionMode(VCMVideoProtection protection_mode); void Clear(); - absl::optional InsertFrame(std::unique_ptr frame); + std::optional InsertFrame(std::unique_ptr frame); void UpdateRtt(int64_t max_rtt_ms); void SetMaxWaits(TimeDelta max_wait_for_keyframe, TimeDelta max_wait_for_frame); @@ -103,8 +102,6 @@ class VideoStreamBufferController { RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_; const FieldTrialsView& field_trials_; - const absl::optional rtt_mult_settings_ = - RttMultExperiment::GetRttMultValue(); Clock* const clock_; VideoStreamBufferControllerStatsObserver* const stats_proxy_; FrameSchedulingReceiver* const receiver_; diff --git a/video/video_stream_buffer_controller_unittest.cc b/video/video_stream_buffer_controller_unittest.cc index 3224b20d83..6987e4a503 100644 --- a/video/video_stream_buffer_controller_unittest.cc +++ b/video/video_stream_buffer_controller_unittest.cc @@ -14,13 +14,13 @@ #include #include +#include #include #include #include +#include #include -#include "absl/types/optional.h" -#include "absl/types/variant.h" #include "api/metronome/test/fake_metronome.h" #include "api/units/frequency.h" #include "api/units/time_delta.h" @@ -69,11 +69,11 @@ auto Frame(testing::Matcher m) { std::unique_ptr WithReceiveTimeFromRtpTimestamp( std::unique_ptr frame) { - if (frame->Timestamp() == 0) { + if (frame->RtpTimestamp() == 0) { frame->SetReceivedTime(kClockStart.ms()); } else { frame->SetReceivedTime( - TimeDelta::Seconds(frame->Timestamp() / 90000.0).ms() + + TimeDelta::Seconds(frame->RtpTimestamp() / 90000.0).ms() + kClockStart.ms()); } return frame; @@ -181,9 +181,9 @@ class VideoStreamBufferControllerFixture } using WaitResult = - absl::variant, TimeDelta /*wait_time*/>; + std::variant, TimeDelta /*wait_time*/>; - absl::optional WaitForFrameOrTimeout(TimeDelta wait) { + std::optional WaitForFrameOrTimeout(TimeDelta wait) { if (wait_result_) { return std::move(wait_result_); } @@ -193,7 +193,7 @@ class VideoStreamBufferControllerFixture } Timestamp now = clock_->CurrentTime(); - // TODO(bugs.webrtc.org/13756): Remove this when rtc::Thread uses uses + // TODO(bugs.webrtc.org/13756): Remove this when webrtc::Thread uses uses // Timestamp instead of an integer milliseconds. This extra wait is needed // for some tests that use the metronome. This is due to rounding // milliseconds, affecting the precision of simulated time controller uses @@ -238,14 +238,14 @@ class VideoStreamBufferControllerFixture private: void SetWaitResult(WaitResult result) { RTC_DCHECK(!wait_result_); - if (absl::holds_alternative>(result)) { - RTC_DCHECK(absl::get>(result)); + if (std::holds_alternative>(result)) { + RTC_DCHECK(std::get>(result)); } wait_result_.emplace(std::move(result)); } uint32_t dropped_frames_ = 0; - absl::optional wait_result_; + std::optional wait_result_; }; class VideoStreamBufferControllerTest @@ -260,7 +260,7 @@ TEST_P(VideoStreamBufferControllerTest, // No new timeout set since receiver has not started new decode. ResetLastResult(); - EXPECT_THAT(WaitForFrameOrTimeout(kMaxWaitForKeyframe), Eq(absl::nullopt)); + EXPECT_THAT(WaitForFrameOrTimeout(kMaxWaitForKeyframe), Eq(std::nullopt)); // Now that receiver has asked for new frame, a new timeout can occur. StartNextDecodeForceKeyframe(); @@ -362,7 +362,7 @@ TEST_P(VideoStreamBufferControllerTest, buffer_->Stop(); // Wait for 2x max wait time. Since we stopped, this should cause no timeouts // or frame-ready callbacks. - EXPECT_THAT(WaitForFrameOrTimeout(kMaxWaitForFrame * 2), Eq(absl::nullopt)); + EXPECT_THAT(WaitForFrameOrTimeout(kMaxWaitForFrame * 2), Eq(std::nullopt)); } TEST_P(VideoStreamBufferControllerTest, FramesWaitForDecoderToComplete) { @@ -383,7 +383,7 @@ TEST_P(VideoStreamBufferControllerTest, FramesWaitForDecoderToComplete) { // Advancing time should not result in a frame since the scheduler has not // been signalled that we are ready. - EXPECT_THAT(WaitForFrameOrTimeout(kFps30Delay), Eq(absl::nullopt)); + EXPECT_THAT(WaitForFrameOrTimeout(kFps30Delay), Eq(std::nullopt)); // Signal ready. StartNextDecode(); EXPECT_THAT(WaitForFrameOrTimeout(kFps30Delay), Frame(test::WithId(1))); @@ -566,7 +566,7 @@ TEST_P(VideoStreamBufferControllerTest, .AsLast() .Build())); StartNextDecode(); - EXPECT_THAT(WaitForFrameOrTimeout(TimeDelta::Zero()), Eq(absl::nullopt)); + EXPECT_THAT(WaitForFrameOrTimeout(TimeDelta::Zero()), Eq(std::nullopt)); // Scheduler is waiting to deliver Frame 1 now. Insert Frame 2. Frame 1 should // be delivered still. @@ -747,7 +747,7 @@ TEST_P(VideoStreamBufferControllerTest, NextFrameWithOldTimestamp) { // Avoid timeout being set while waiting for the frame and before the receiver // is ready. ResetLastResult(); - EXPECT_THAT(WaitForFrameOrTimeout(kMaxWaitForFrame), Eq(absl::nullopt)); + EXPECT_THAT(WaitForFrameOrTimeout(kMaxWaitForFrame), Eq(std::nullopt)); time_controller_.AdvanceTime(kRolloverDelay - kMaxWaitForFrame); StartNextDecode(); buffer_->InsertFrame(test::FakeFrameBuilder() @@ -821,8 +821,7 @@ TEST_P(LowLatencyVideoStreamBufferControllerTest, FramesDecodedInstantlyWithLowLatencyRendering) { // Initial keyframe. StartNextDecodeForceKeyframe(); - timing_.set_min_playout_delay(TimeDelta::Zero()); - timing_.set_max_playout_delay(TimeDelta::Millis(10)); + timing_.set_playout_delay({TimeDelta::Zero(), TimeDelta::Millis(10)}); // Playout delay of 0 implies low-latency rendering. auto frame = test::FakeFrameBuilder() .Id(0) @@ -844,7 +843,7 @@ TEST_P(LowLatencyVideoStreamBufferControllerTest, .Build(); buffer_->InsertFrame(std::move(frame)); // Pacing is set to 16ms in the field trial so we should not decode yet. - EXPECT_THAT(WaitForFrameOrTimeout(TimeDelta::Zero()), Eq(absl::nullopt)); + EXPECT_THAT(WaitForFrameOrTimeout(TimeDelta::Zero()), Eq(std::nullopt)); time_controller_.AdvanceTime(TimeDelta::Millis(16)); EXPECT_THAT(WaitForFrameOrTimeout(TimeDelta::Zero()), Frame(test::WithId(1))); } @@ -852,8 +851,7 @@ TEST_P(LowLatencyVideoStreamBufferControllerTest, TEST_P(LowLatencyVideoStreamBufferControllerTest, ZeroPlayoutDelayFullQueue) { // Initial keyframe. StartNextDecodeForceKeyframe(); - timing_.set_min_playout_delay(TimeDelta::Zero()); - timing_.set_max_playout_delay(TimeDelta::Millis(10)); + timing_.set_playout_delay({TimeDelta::Zero(), TimeDelta::Millis(10)}); auto frame = test::FakeFrameBuilder() .Id(0) .Time(0) @@ -885,8 +883,7 @@ TEST_P(LowLatencyVideoStreamBufferControllerTest, MinMaxDelayZeroLowLatencyMode) { // Initial keyframe. StartNextDecodeForceKeyframe(); - timing_.set_min_playout_delay(TimeDelta::Zero()); - timing_.set_max_playout_delay(TimeDelta::Zero()); + timing_.set_playout_delay({TimeDelta::Zero(), TimeDelta::Zero()}); // Playout delay of 0 implies low-latency rendering. auto frame = test::FakeFrameBuilder() .Id(0) diff --git a/video/video_stream_decoder2.cc b/video/video_stream_decoder2.cc index 5640835c16..51175b7717 100644 --- a/video/video_stream_decoder2.cc +++ b/video/video_stream_decoder2.cc @@ -10,6 +10,11 @@ #include "video/video_stream_decoder2.h" +#include +#include + +#include "api/units/time_delta.h" +#include "api/video/video_frame.h" #include "api/video_codecs/video_decoder.h" #include "modules/video_coding/video_receiver2.h" #include "rtc_base/checks.h" @@ -21,7 +26,7 @@ namespace internal { VideoStreamDecoder::VideoStreamDecoder( VideoReceiver2* video_receiver, ReceiveStatisticsProxy* receive_statistics_proxy, - rtc::VideoSinkInterface* incoming_video_stream) + VideoSinkInterface* incoming_video_stream) : video_receiver_(video_receiver), receive_stats_callback_(receive_statistics_proxy), incoming_video_stream_(incoming_video_stream) { @@ -43,14 +48,15 @@ VideoStreamDecoder::~VideoStreamDecoder() { // callback won't necessarily be called from the decoding thread. The decoding // thread may have held the lock when calling VideoDecoder::Decode, Reset, or // Release. Acquiring the same lock in the path of decode callback can deadlock. -int32_t VideoStreamDecoder::FrameToRender(VideoFrame& video_frame, - absl::optional qp, - TimeDelta decode_time, - VideoContentType content_type, - VideoFrameType frame_type) { - receive_stats_callback_->OnDecodedFrame(video_frame, qp, decode_time, - content_type, frame_type); - incoming_video_stream_->OnFrame(video_frame); +int32_t VideoStreamDecoder::OnFrameToRender(const FrameToRender& arguments) { + receive_stats_callback_->OnDecodedFrame( + arguments.video_frame, arguments.qp, arguments.decode_time, + arguments.content_type, arguments.frame_type); + if (arguments.corruption_score.has_value()) { + receive_stats_callback_->OnCorruptionScore(*arguments.corruption_score, + arguments.content_type); + } + incoming_video_stream_->OnFrame(arguments.video_frame); return 0; } diff --git a/video/video_stream_decoder2.h b/video/video_stream_decoder2.h index 19db810b7c..616ec12ed1 100644 --- a/video/video_stream_decoder2.h +++ b/video/video_stream_decoder2.h @@ -11,12 +11,14 @@ #ifndef VIDEO_VIDEO_STREAM_DECODER2_H_ #define VIDEO_VIDEO_STREAM_DECODER2_H_ +#include #include #include #include #include #include "api/scoped_refptr.h" +#include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "api/video_codecs/video_decoder.h" #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h" @@ -33,18 +35,13 @@ class ReceiveStatisticsProxy; class VideoStreamDecoder : public VCMReceiveCallback { public: - VideoStreamDecoder( - VideoReceiver2* video_receiver, - ReceiveStatisticsProxy* receive_statistics_proxy, - rtc::VideoSinkInterface* incoming_video_stream); + VideoStreamDecoder(VideoReceiver2* video_receiver, + ReceiveStatisticsProxy* receive_statistics_proxy, + VideoSinkInterface* incoming_video_stream); ~VideoStreamDecoder() override; // Implements VCMReceiveCallback. - int32_t FrameToRender(VideoFrame& video_frame, - absl::optional qp, - TimeDelta decode_time, - VideoContentType content_type, - VideoFrameType frame_type) override; + int32_t OnFrameToRender(const FrameToRender& arguments) override; void OnDroppedFrames(uint32_t frames_dropped) override; void OnIncomingPayloadType(int payload_type) override; void OnDecoderInfoChanged( @@ -53,7 +50,7 @@ class VideoStreamDecoder : public VCMReceiveCallback { private: VideoReceiver2* const video_receiver_; ReceiveStatisticsProxy* const receive_stats_callback_; - rtc::VideoSinkInterface* const incoming_video_stream_; + VideoSinkInterface* const incoming_video_stream_; }; } // namespace internal diff --git a/video/video_stream_decoder_impl_unittest.cc b/video/video_stream_decoder_impl_unittest.cc index d0cf9255c3..6942e06679 100644 --- a/video/video_stream_decoder_impl_unittest.cc +++ b/video/video_stream_decoder_impl_unittest.cc @@ -131,12 +131,13 @@ class VideoStreamDecoderImplTest : public ::testing::Test { public: VideoStreamDecoderImplTest() : time_controller_(Timestamp::Seconds(0)), - video_stream_decoder_(&callbacks_, - &decoder_factory_, - time_controller_.GetTaskQueueFactory(), - {{1, std::make_pair(SdpVideoFormat("VP8"), 1)}, - {2, std::make_pair(SdpVideoFormat("AV1"), 1)}}, - &field_trials_) { + video_stream_decoder_( + &callbacks_, + &decoder_factory_, + time_controller_.GetTaskQueueFactory(), + {{1, std::make_pair(SdpVideoFormat::VP8(), 1)}, + {2, std::make_pair(SdpVideoFormat::AV1Profile0(), 1)}}, + &field_trials_) { // Set the min playout delay to a value greater than zero to not activate // the low-latency renderer. video_stream_decoder_.SetMinPlayoutDelay(TimeDelta::Millis(10)); diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc index c367510b21..86239444f7 100644 --- a/video/video_stream_encoder.cc +++ b/video/video_stream_encoder.cc @@ -11,51 +11,88 @@ #include "video/video_stream_encoder.h" #include -#include -#include +#include +#include #include -#include +#include +#include +#include #include +#include +#include #include "absl/algorithm/container.h" #include "absl/cleanup/cleanup.h" -#include "absl/types/optional.h" +#include "api/adaptation/resource.h" +#include "api/environment/environment.h" +#include "api/fec_controller_override.h" #include "api/field_trials_view.h" +#include "api/make_ref_counted.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/rtp_sender_interface.h" +#include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/task_queue/task_queue_base.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/encoded_image.h" -#include "api/video/i420_buffer.h" #include "api/video/render_resolution.h" +#include "api/video/video_adaptation_counters.h" #include "api/video/video_adaptation_reason.h" +#include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" #include "api/video/video_bitrate_allocator_factory.h" #include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" #include "api/video/video_layers_allocation.h" +#include "api/video/video_source_interface.h" +#include "api/video/video_stream_encoder_settings.h" +#include "api/video/video_timing.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "call/adaptation/adaptation_constraint.h" +#include "call/adaptation/degradation_preference_provider.h" +#include "call/adaptation/encoder_settings.h" #include "call/adaptation/resource_adaptation_processor.h" #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_adapter.h" +#include "common_video/frame_instrumentation_data.h" #include "media/base/media_channel.h" +#include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/include/video_codec_initializer.h" +#include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/svc/scalability_mode_util.h" #include "modules/video_coding/svc/svc_rate_allocator.h" -#include "modules/video_coding/utility/vp8_constants.h" -#include "rtc_base/arraysize.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/experiments/encoder_info_settings.h" +#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/trace_event.h" -#include "system_wrappers/include/metrics.h" +#include "video/adaptation/overuse_frame_detector.h" #include "video/adaptation/video_stream_encoder_resource_manager.h" #include "video/alignment_adjuster.h" #include "video/config/encoder_stream_factory.h" +#include "video/config/video_encoder_config.h" +#include "video/corruption_detection/frame_instrumentation_generator.h" +#include "video/encoder_bitrate_adjuster.h" #include "video/frame_cadence_adapter.h" #include "video/frame_dumping_encoder.h" +#include "video/video_stream_encoder_observer.h" namespace webrtc { @@ -74,13 +111,14 @@ constexpr char kFrameDropperFieldTrial[] = "WebRTC-FrameDropper"; constexpr char kSwitchEncoderOnInitializationFailuresFieldTrial[] = "WebRTC-SwitchEncoderOnInitializationFailures"; +// TODO(crbugs.com/378566918): Remove this kill switch after rollout. +constexpr char kSwitchEncoderFollowCodecPreferenceOrderFieldTrial[] = + "WebRTC-SwitchEncoderFollowCodecPreferenceOrder"; + const size_t kDefaultPayloadSize = 1440; const int64_t kParameterUpdateIntervalMs = 1000; -// Animation is capped to 720p. -constexpr int kMaxAnimationPixels = 1280 * 720; - constexpr int kDefaultMinScreenSharebps = 1200000; int GetNumSpatialLayers(const VideoCodec& codec) { @@ -89,11 +127,26 @@ int GetNumSpatialLayers(const VideoCodec& codec) { } else if (codec.codecType == kVideoCodecAV1 && codec.GetScalabilityMode().has_value()) { return ScalabilityModeToNumSpatialLayers(*(codec.GetScalabilityMode())); + } else if (codec.codecType == kVideoCodecH265) { + // No spatial scalability support for H.265. + return 1; } else { return 0; } } +std::optional MaybeConvertDropReason( + VideoStreamEncoderObserver::DropReason reason) { + switch (reason) { + case VideoStreamEncoderObserver::DropReason::kMediaOptimization: + return EncodedImageCallback::DropReason::kDroppedByMediaOptimizations; + case VideoStreamEncoderObserver::DropReason::kEncoder: + return EncodedImageCallback::DropReason::kDroppedByEncoder; + default: + return std::nullopt; + } +} + bool RequiresEncoderReset(const VideoCodec& prev_send_codec, const VideoCodec& new_send_codec, bool was_encode_called_since_last_initialization) { @@ -135,7 +188,9 @@ bool RequiresEncoderReset(const VideoCodec& prev_send_codec, return true; } break; - + case kVideoCodecH265: + // No H.265 specific handling needed. + [[fallthrough]]; default: break; } @@ -157,6 +212,15 @@ bool RequiresEncoderReset(const VideoCodec& prev_send_codec, prev_send_codec.simulcastStream[i].qpMax) { return true; } + + if (new_send_codec.simulcastStream[i].maxFramerate != + prev_send_codec.simulcastStream[i].maxFramerate && + new_send_codec.simulcastStream[i].maxFramerate != + new_send_codec.maxFramerate) { + // SetRates can only represent maxFramerate for one layer. Reset the + // encoder if there are multiple layers that differ in maxFramerate. + return true; + } } if (new_send_codec.codecType == kVideoCodecVP9) { @@ -270,7 +334,7 @@ VideoLayersAllocation CreateVideoLayersAllocation( // Encoder may drop frames internally if `maxFramerate` is set. spatial_layer.frame_rate_fps = std::min( encoder_config.simulcastStream[si].maxFramerate, - rtc::saturated_cast( + saturated_cast( (current_rate.framerate_fps * frame_rate_fraction) / VideoEncoder::EncoderInfo::kMaxFramerateFraction)); } @@ -340,7 +404,7 @@ VideoLayersAllocation CreateVideoLayersAllocation( // Encoder may drop frames internally if `maxFramerate` is set. spatial_layer.frame_rate_fps = std::min( encoder_config.spatialLayers[si].maxFramerate, - rtc::saturated_cast( + saturated_cast( (current_rate.framerate_fps * frame_rate_fraction) / VideoEncoder::EncoderInfo::kMaxFramerateFraction)); } @@ -353,10 +417,24 @@ VideoEncoder::EncoderInfo GetEncoderInfoWithBitrateLimitUpdate( const VideoEncoder::EncoderInfo& info, const VideoEncoderConfig& encoder_config, bool default_limits_allowed) { - if (!default_limits_allowed || !info.resolution_bitrate_limits.empty() || + bool are_all_bitrate_limits_zero = true; + // Hardware encoders commonly only report resolution limits, while reporting + // the bitrate limits as 0. In such case, we should not use them for setting + // bitrate limits. + if (!info.resolution_bitrate_limits.empty()) { + are_all_bitrate_limits_zero = std::all_of( + info.resolution_bitrate_limits.begin(), + info.resolution_bitrate_limits.end(), + [](const VideoEncoder::ResolutionBitrateLimits& limit) { + return limit.max_bitrate_bps == 0 && limit.min_bitrate_bps == 0; + }); + } + + if (!default_limits_allowed || !are_all_bitrate_limits_zero || encoder_config.simulcast_layers.size() <= 1) { return info; } + // Bitrate limits are not configured and more than one layer is used, use // the default limits (bitrate limits are not used for simulcast). VideoEncoder::EncoderInfo new_info = info; @@ -392,18 +470,18 @@ void ApplySpatialLayerBitrateLimits( } // Get bitrate limits for active stream. - absl::optional pixels = + std::optional pixels = VideoStreamAdapter::GetSingleActiveLayerPixels(*codec); if (!pixels.has_value()) { return; } - absl::optional bitrate_limits = + std::optional bitrate_limits = encoder_info.GetEncoderBitrateLimitsForResolution(*pixels); if (!bitrate_limits.has_value()) { return; } // Index for the active stream. - absl::optional index; + std::optional index; for (size_t i = 0; i < encoder_config.simulcast_layers.size(); ++i) { if (encoder_config.simulcast_layers[i].active) index = i; @@ -415,23 +493,21 @@ void ApplySpatialLayerBitrateLimits( if (encoder_config.simulcast_layers[*index].min_bitrate_bps <= 0) { min_bitrate_bps = bitrate_limits->min_bitrate_bps; } else { - min_bitrate_bps = - std::max(bitrate_limits->min_bitrate_bps, - encoder_config.simulcast_layers[*index].min_bitrate_bps); + min_bitrate_bps = encoder_config.simulcast_layers[*index].min_bitrate_bps; } int max_bitrate_bps; if (encoder_config.simulcast_layers[*index].max_bitrate_bps <= 0) { max_bitrate_bps = bitrate_limits->max_bitrate_bps; } else { - max_bitrate_bps = - std::min(bitrate_limits->max_bitrate_bps, - encoder_config.simulcast_layers[*index].max_bitrate_bps); - } - if (min_bitrate_bps >= max_bitrate_bps) { - RTC_LOG(LS_WARNING) << "Bitrate limits not used, min_bitrate_bps " - << min_bitrate_bps << " >= max_bitrate_bps " - << max_bitrate_bps; - return; + max_bitrate_bps = encoder_config.simulcast_layers[*index].max_bitrate_bps; + } + + if (encoder_config.simulcast_layers[*index].min_bitrate_bps > 0) { + // Ensure max is not below configured min. + max_bitrate_bps = std::max(min_bitrate_bps, max_bitrate_bps); + } else { + // Ensure min is not above max. + min_bitrate_bps = std::min(min_bitrate_bps, max_bitrate_bps); } for (int i = 0; i < GetNumSpatialLayers(*codec); ++i) { @@ -439,8 +515,9 @@ void ApplySpatialLayerBitrateLimits( codec->spatialLayers[i].minBitrate = min_bitrate_bps / 1000; codec->spatialLayers[i].maxBitrate = max_bitrate_bps / 1000; codec->spatialLayers[i].targetBitrate = - std::min(codec->spatialLayers[i].targetBitrate, - codec->spatialLayers[i].maxBitrate); + std::clamp(codec->spatialLayers[i].targetBitrate, + codec->spatialLayers[i].minBitrate, + codec->spatialLayers[i].maxBitrate); break; } } @@ -469,81 +546,59 @@ void ApplyEncoderBitrateLimitsIfSingleActiveStream( } // Get bitrate limits for active stream. - absl::optional encoder_bitrate_limits = + std::optional encoder_bitrate_limits = encoder_info.GetEncoderBitrateLimitsForResolution( (*streams)[index].width * (*streams)[index].height); if (!encoder_bitrate_limits) { return; } - // If bitrate limits are set by RtpEncodingParameters, use intersection. int min_bitrate_bps; if (encoder_config_layers[index].min_bitrate_bps <= 0) { min_bitrate_bps = encoder_bitrate_limits->min_bitrate_bps; } else { - min_bitrate_bps = std::max(encoder_bitrate_limits->min_bitrate_bps, - (*streams)[index].min_bitrate_bps); + min_bitrate_bps = (*streams)[index].min_bitrate_bps; } int max_bitrate_bps; if (encoder_config_layers[index].max_bitrate_bps <= 0) { max_bitrate_bps = encoder_bitrate_limits->max_bitrate_bps; } else { - max_bitrate_bps = std::min(encoder_bitrate_limits->max_bitrate_bps, - (*streams)[index].max_bitrate_bps); - } - if (min_bitrate_bps >= max_bitrate_bps) { - RTC_LOG(LS_WARNING) << "Encoder bitrate limits" - << " (min=" << encoder_bitrate_limits->min_bitrate_bps - << ", max=" << encoder_bitrate_limits->max_bitrate_bps - << ") do not intersect with stream limits" - << " (min=" << (*streams)[index].min_bitrate_bps - << ", max=" << (*streams)[index].max_bitrate_bps - << "). Encoder bitrate limits not used."; - return; + max_bitrate_bps = (*streams)[index].max_bitrate_bps; + } + + if (encoder_config_layers[index].min_bitrate_bps > 0) { + // Ensure max is not below configured min. + max_bitrate_bps = std::max(min_bitrate_bps, max_bitrate_bps); + } else { + // Ensure min is not above max. + min_bitrate_bps = std::min(min_bitrate_bps, max_bitrate_bps); } (*streams)[index].min_bitrate_bps = min_bitrate_bps; (*streams)[index].max_bitrate_bps = max_bitrate_bps; - (*streams)[index].target_bitrate_bps = - std::min((*streams)[index].target_bitrate_bps, - encoder_bitrate_limits->max_bitrate_bps); + (*streams)[index].target_bitrate_bps = std::clamp( + (*streams)[index].target_bitrate_bps, min_bitrate_bps, max_bitrate_bps); } -absl::optional ParseVp9LowTierCoreCountThreshold( +std::optional ParseVp9LowTierCoreCountThreshold( const FieldTrialsView& trials) { FieldTrialFlag disable_low_tier("Disabled"); FieldTrialParameter max_core_count("max_core_count", 2); ParseFieldTrial({&disable_low_tier, &max_core_count}, trials.Lookup("WebRTC-VP9-LowTierOptimizations")); if (disable_low_tier.Get()) { - return absl::nullopt; + return std::nullopt; } return max_core_count.Get(); } -absl::optional ParseEncoderThreadLimit(const FieldTrialsView& trials) { +std::optional ParseEncoderThreadLimit(const FieldTrialsView& trials) { FieldTrialOptional encoder_thread_limit("encoder_thread_limit"); ParseFieldTrial({&encoder_thread_limit}, trials.Lookup("WebRTC-VideoEncoderSettings")); return encoder_thread_limit.GetOptional(); } -absl::optional MergeRestrictions( - const std::vector>& list) { - absl::optional return_value; - for (const auto& res : list) { - if (!res) { - continue; - } - if (!return_value) { - return_value = *res; - continue; - } - return_value->UpdateMin(*res); - } - return return_value; -} - } // namespace VideoStreamEncoder::EncoderRateSettings::EncoderRateSettings() @@ -631,7 +686,7 @@ class VideoStreamEncoder::DegradationPreferenceManager }; VideoStreamEncoder::VideoStreamEncoder( - Clock* clock, + const Environment& env, uint32_t number_of_cores, VideoStreamEncoderObserver* encoder_stats_observer, const VideoStreamEncoderSettings& settings, @@ -640,15 +695,13 @@ VideoStreamEncoder::VideoStreamEncoder( std::unique_ptr encoder_queue, BitrateAllocationCallbackType allocation_cb_type, - const FieldTrialsView& field_trials, webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) - : field_trials_(field_trials), + : env_(env), worker_queue_(TaskQueueBase::Current()), number_of_cores_(number_of_cores), - sink_(nullptr), settings_(settings), allocation_cb_type_(allocation_cb_type), - rate_control_settings_(RateControlSettings::ParseFromFieldTrials()), + rate_control_settings_(env_.field_trials()), encoder_selector_from_constructor_(encoder_selector), encoder_selector_from_factory_( encoder_selector_from_constructor_ @@ -658,68 +711,38 @@ VideoStreamEncoder::VideoStreamEncoder( ? encoder_selector_from_constructor_ : encoder_selector_from_factory_.get()), encoder_stats_observer_(encoder_stats_observer), - cadence_callback_(*this), frame_cadence_adapter_(std::move(frame_cadence_adapter)), - encoder_initialized_(false), - max_framerate_(-1), - pending_encoder_reconfiguration_(false), - pending_encoder_creation_(false), - crop_width_(0), - crop_height_(0), - encoder_target_bitrate_bps_(absl::nullopt), - max_data_payload_length_(0), - encoder_paused_and_dropped_frame_(false), - was_encode_called_since_last_initialization_(false), - encoder_failed_(false), - clock_(clock), - last_captured_timestamp_(0), - delta_ntp_internal_ms_(clock_->CurrentNtpInMilliseconds() - - clock_->TimeInMilliseconds()), - last_frame_log_ms_(clock_->TimeInMilliseconds()), - captured_frame_count_(0), - dropped_frame_cwnd_pushback_count_(0), - dropped_frame_encoder_block_count_(0), - pending_frame_post_time_us_(0), - accumulated_update_rect_{0, 0, 0, 0}, - accumulated_update_rect_is_valid_(true), - animation_start_time_(Timestamp::PlusInfinity()), - cap_resolution_due_to_video_content_(false), - expect_resize_state_(ExpectResizeState::kNoResize), - fec_controller_override_(nullptr), - force_disable_frame_dropper_(false), - pending_frame_drops_(0), - cwnd_frame_counter_(0), + delta_ntp_internal_ms_(env_.clock().CurrentNtpInMilliseconds() - + env_.clock().TimeInMilliseconds()), + last_frame_log_ms_(env_.clock().TimeInMilliseconds()), next_frame_types_(1, VideoFrameType::kVideoFrameDelta), - frame_encode_metadata_writer_(this), - automatic_animation_detection_experiment_( - ParseAutomatincAnimationDetectionFieldTrial()), input_state_provider_(encoder_stats_observer), video_stream_adapter_( std::make_unique(&input_state_provider_, encoder_stats_observer, - field_trials)), + env_.field_trials())), degradation_preference_manager_( std::make_unique( video_stream_adapter_.get())), - adaptation_constraints_(), stream_resource_manager_(&input_state_provider_, encoder_stats_observer, - clock_, + &env_.clock(), settings_.experiment_cpu_load_estimator, std::move(overuse_detector), degradation_preference_manager_.get(), - field_trials), + env_.field_trials()), video_source_sink_controller_(/*sink=*/frame_cadence_adapter_.get(), /*source=*/nullptr), - default_limits_allowed_( - !field_trials.IsEnabled("WebRTC-DefaultBitrateLimitsKillSwitch")), + default_limits_allowed_(!env_.field_trials().IsEnabled( + "WebRTC-DefaultBitrateLimitsKillSwitch")), qp_parsing_allowed_( - !field_trials.IsEnabled("WebRTC-QpParsingKillSwitch")), - switch_encoder_on_init_failures_(!field_trials.IsDisabled( + !env_.field_trials().IsEnabled("WebRTC-QpParsingKillSwitch")), + switch_encoder_on_init_failures_(!env_.field_trials().IsDisabled( kSwitchEncoderOnInitializationFailuresFieldTrial)), vp9_low_tier_core_threshold_( - ParseVp9LowTierCoreCountThreshold(field_trials)), - experimental_encoder_thread_limit_(ParseEncoderThreadLimit(field_trials)), + ParseVp9LowTierCoreCountThreshold(env_.field_trials())), + experimental_encoder_thread_limit_( + ParseEncoderThreadLimit(env_.field_trials())), encoder_queue_(std::move(encoder_queue)) { TRACE_EVENT0("webrtc", "VideoStreamEncoder::VideoStreamEncoder"); RTC_DCHECK_RUN_ON(worker_queue_); @@ -727,10 +750,10 @@ VideoStreamEncoder::VideoStreamEncoder( RTC_DCHECK_GE(number_of_cores, 1); frame_cadence_adapter_->Initialize(&cadence_callback_); - stream_resource_manager_.Initialize(encoder_queue_.Get()); + stream_resource_manager_.Initialize(encoder_queue_.get()); - encoder_queue_.PostTask([this] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); resource_adaptation_processor_ = std::make_unique( @@ -756,17 +779,33 @@ VideoStreamEncoder::~VideoStreamEncoder() { RTC_DCHECK_RUN_ON(worker_queue_); RTC_DCHECK(!video_source_sink_controller_.HasSource()) << "Must call ::Stop() before destruction."; + + // The queue must be destroyed before its pointer is invalidated to avoid race + // between destructor and running task that check if function is called on the + // encoder_queue_. + // std::unique_ptr destructor does the same two operations in reverse order as + // it doesn't expect member would be used after its destruction has started. + encoder_queue_.get_deleter()(encoder_queue_.get()); + encoder_queue_.release(); } void VideoStreamEncoder::Stop() { RTC_DCHECK_RUN_ON(worker_queue_); video_source_sink_controller_.SetSource(nullptr); - rtc::Event shutdown_event; + Event shutdown_event; absl::Cleanup shutdown = [&shutdown_event] { shutdown_event.Set(); }; - encoder_queue_.PostTask([this, shutdown = std::move(shutdown)] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, shutdown = std::move(shutdown)] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); if (resource_adaptation_processor_) { + // We're no longer interested in restriction updates, which may get + // triggered as part of removing resources. + video_stream_adapter_->RemoveRestrictionsListener(this); + video_stream_adapter_->RemoveRestrictionsListener( + &stream_resource_manager_); + resource_adaptation_processor_->RemoveResourceLimitationsListener( + &stream_resource_manager_); + // Stop and remove resources and delete adaptation processor. stream_resource_manager_.StopManagedResources(); for (auto* constraint : adaptation_constraints_) { video_stream_adapter_->RemoveAdaptationConstraint(constraint); @@ -775,11 +814,6 @@ void VideoStreamEncoder::Stop() { stream_resource_manager_.RemoveResource(resource); } additional_resources_.clear(); - video_stream_adapter_->RemoveRestrictionsListener(this); - video_stream_adapter_->RemoveRestrictionsListener( - &stream_resource_manager_); - resource_adaptation_processor_->RemoveResourceLimitationsListener( - &stream_resource_manager_); stream_resource_manager_.SetAdaptationProcessor(nullptr, nullptr); resource_adaptation_processor_.reset(); } @@ -787,14 +821,15 @@ void VideoStreamEncoder::Stop() { ReleaseEncoder(); encoder_ = nullptr; frame_cadence_adapter_ = nullptr; + frame_instrumentation_generator_ = nullptr; }); - shutdown_event.Wait(rtc::Event::kForever); + shutdown_event.Wait(Event::kForever); } void VideoStreamEncoder::SetFecControllerOverride( FecControllerOverride* fec_controller_override) { - encoder_queue_.PostTask([this, fec_controller_override] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, fec_controller_override] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); RTC_DCHECK(!fec_controller_override_); fec_controller_override_ = fec_controller_override; if (encoder_) { @@ -804,7 +839,7 @@ void VideoStreamEncoder::SetFecControllerOverride( } void VideoStreamEncoder::AddAdaptationResource( - rtc::scoped_refptr resource) { + scoped_refptr resource) { RTC_DCHECK_RUN_ON(worker_queue_); TRACE_EVENT0("webrtc", "VideoStreamEncoder::AddAdaptationResource"); // Map any externally added resources as kCpu for the sake of stats reporting. @@ -812,43 +847,43 @@ void VideoStreamEncoder::AddAdaptationResource( // of this MapResourceToReason() call. TRACE_EVENT_ASYNC_BEGIN0( "webrtc", "VideoStreamEncoder::AddAdaptationResource(latency)", this); - encoder_queue_.PostTask([this, resource = std::move(resource)] { + encoder_queue_->PostTask([this, resource = std::move(resource)] { TRACE_EVENT_ASYNC_END0( "webrtc", "VideoStreamEncoder::AddAdaptationResource(latency)", this); - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); additional_resources_.push_back(resource); stream_resource_manager_.AddResource(resource, VideoAdaptationReason::kCpu); }); } -std::vector> +std::vector> VideoStreamEncoder::GetAdaptationResources() { RTC_DCHECK_RUN_ON(worker_queue_); // In practice, this method is only called by tests to verify operations that // run on the encoder queue. So rather than force PostTask() operations to // be accompanied by an event and a `Wait()`, we'll use PostTask + Wait() // here. - rtc::Event event; - std::vector> resources; - encoder_queue_.PostTask([&] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + Event event; + std::vector> resources; + encoder_queue_->PostTask([&] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); resources = resource_adaptation_processor_->GetResources(); event.Set(); }); - event.Wait(rtc::Event::kForever); + event.Wait(Event::kForever); return resources; } void VideoStreamEncoder::SetSource( - rtc::VideoSourceInterface* source, + VideoSourceInterface* source, const DegradationPreference& degradation_preference) { RTC_DCHECK_RUN_ON(worker_queue_); video_source_sink_controller_.SetSource(source); input_state_provider_.OnHasInputChanged(source); // This may trigger reconfiguring the QualityScaler on the encoder queue. - encoder_queue_.PostTask([this, degradation_preference] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, degradation_preference] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); degradation_preference_manager_->SetDegradationPreference( degradation_preference); stream_resource_manager_.SetDegradationPreferences(degradation_preference); @@ -866,19 +901,19 @@ void VideoStreamEncoder::SetSink(EncoderSink* sink, bool rotation_applied) { video_source_sink_controller_.SetRotationApplied(rotation_applied); video_source_sink_controller_.PushSourceSinkSettings(); - encoder_queue_.PostTask([this, sink] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, sink] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); sink_ = sink; }); } void VideoStreamEncoder::SetStartBitrate(int start_bitrate_bps) { - encoder_queue_.PostTask([this, start_bitrate_bps] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, start_bitrate_bps] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); RTC_LOG(LS_INFO) << "SetStartBitrate " << start_bitrate_bps; encoder_target_bitrate_bps_ = - start_bitrate_bps != 0 ? absl::optional(start_bitrate_bps) - : absl::nullopt; + start_bitrate_bps != 0 ? std::optional(start_bitrate_bps) + : std::nullopt; stream_resource_manager_.SetStartBitrate( DataRate::BitsPerSec(start_bitrate_bps)); }); @@ -893,10 +928,55 @@ void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, size_t max_data_payload_length, SetParametersCallback callback) { RTC_DCHECK_RUN_ON(worker_queue_); - encoder_queue_.PostTask([this, config = std::move(config), - max_data_payload_length, - callback = std::move(callback)]() mutable { - RTC_DCHECK_RUN_ON(&encoder_queue_); + + // Inform source about max configured framerate, + // scale_resolution_down_to and which layers are active. + int max_framerate = -1; + // Is any layer active. + bool active = false; + // The max scale_resolution_down_to. + std::optional scale_resolution_down_to; + for (const auto& stream : config.simulcast_layers) { + active |= stream.active; + if (stream.active) { + max_framerate = std::max(stream.max_framerate, max_framerate); + } + // Note: we propagate the highest scale_resolution_down_to regardless + // if layer is active or not. + if (stream.scale_resolution_down_to) { + if (!scale_resolution_down_to) { + scale_resolution_down_to.emplace( + stream.scale_resolution_down_to->width, + stream.scale_resolution_down_to->height); + } else { + scale_resolution_down_to.emplace( + std::max(stream.scale_resolution_down_to->width, + scale_resolution_down_to->width), + std::max(stream.scale_resolution_down_to->height, + scale_resolution_down_to->height)); + } + } + } + if (scale_resolution_down_to != + video_source_sink_controller_.scale_resolution_down_to() || + active != video_source_sink_controller_.active() || + max_framerate != + video_source_sink_controller_.frame_rate_upper_limit().value_or(-1)) { + video_source_sink_controller_.SetScaleResolutionDownTo( + scale_resolution_down_to); + if (max_framerate >= 0) { + video_source_sink_controller_.SetFrameRateUpperLimit(max_framerate); + } else { + video_source_sink_controller_.SetFrameRateUpperLimit(std::nullopt); + } + video_source_sink_controller_.SetActive(active); + video_source_sink_controller_.PushSourceSinkSettings(); + } + + encoder_queue_->PostTask([this, config = std::move(config), + max_data_payload_length, + callback = std::move(callback)]() mutable { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); RTC_DCHECK(sink_); RTC_LOG(LS_INFO) << "ConfigureEncoder requested."; @@ -911,7 +991,7 @@ void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, frame_cadence_adapter_->SetZeroHertzModeEnabled( FrameCadenceAdapterInterface::ZeroHertzModeParams{}); } else { - frame_cadence_adapter_->SetZeroHertzModeEnabled(absl::nullopt); + frame_cadence_adapter_->SetZeroHertzModeEnabled(std::nullopt); } pending_encoder_creation_ = @@ -943,6 +1023,8 @@ void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, void VideoStreamEncoder::ReconfigureEncoder() { // Running on the encoder queue. RTC_DCHECK(pending_encoder_reconfiguration_); + RTC_LOG(LS_INFO) << "[VSE] " << __func__ + << " [encoder_config=" << encoder_config_.ToString() << "]"; bool encoder_reset_required = false; if (pending_encoder_creation_) { @@ -952,9 +1034,8 @@ void VideoStreamEncoder::ReconfigureEncoder() { encoder_.reset(); encoder_ = MaybeCreateFrameDumpingEncoderWrapper( - settings_.encoder_factory->CreateVideoEncoder( - encoder_config_.video_format), - field_trials_); + settings_.encoder_factory->Create(env_, encoder_config_.video_format), + env_.field_trials()); if (!encoder_) { RTC_LOG(LS_ERROR) << "CreateVideoEncoder failed, failing encoder format: " << encoder_config_.video_format.ToString(); @@ -975,25 +1056,21 @@ void VideoStreamEncoder::ReconfigureEncoder() { // Possibly adjusts scale_resolution_down_by in `encoder_config_` to limit the // alignment value. AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors( - encoder_->GetEncoderInfo(), &encoder_config_, absl::nullopt); + encoder_->GetEncoderInfo(), &encoder_config_, std::nullopt); std::vector streams; if (encoder_config_.video_stream_factory) { // Note: only tests set their own EncoderStreamFactory... streams = encoder_config_.video_stream_factory->CreateEncoderStreams( - last_frame_info_->width, last_frame_info_->height, encoder_config_); + env_.field_trials(), last_frame_info_->width, last_frame_info_->height, + encoder_config_); } else { - rtc::scoped_refptr - factory = rtc::make_ref_counted( - encoder_config_.video_format.name, encoder_config_.max_qp, - encoder_config_.content_type == - webrtc::VideoEncoderConfig::ContentType::kScreen, - encoder_config_.legacy_conference_mode, encoder_->GetEncoderInfo(), - MergeRestrictions({latest_restrictions_, animate_restrictions_}), - &field_trials_); + auto factory = make_ref_counted( + encoder_->GetEncoderInfo(), latest_restrictions_); streams = factory->CreateEncoderStreams( - last_frame_info_->width, last_frame_info_->height, encoder_config_); + env_.field_trials(), last_frame_info_->width, last_frame_info_->height, + encoder_config_); } // TODO(webrtc:14451) : Move AlignmentAdjuster into EncoderStreamFactory @@ -1035,7 +1112,8 @@ void VideoStreamEncoder::ReconfigureEncoder() { const std::vector& bitrate_limits = encoder_->GetEncoderInfo().resolution_bitrate_limits.empty() ? EncoderInfoSettings:: - GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted() + GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted( + encoder_config_.codec_type) : encoder_->GetEncoderInfo().resolution_bitrate_limits; // For BandwidthQualityScaler, its implement based on a certain pixel_count @@ -1049,7 +1127,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { // to get a certain bitrate for certain pixel_count. It also doesn't work // for 960*540 and 640*520, we will nerver be stable at 640*520 due to their // |target_bitrate_bps| are both 2000Kbps. - absl::optional + std::optional qp_untrusted_bitrate_limit = EncoderInfoSettings:: GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted( last_frame_info_->width * last_frame_info_->height, @@ -1076,7 +1154,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { } } } else { - absl::optional + std::optional encoder_bitrate_limits = encoder_->GetEncoderInfo().GetEncoderBitrateLimitsForResolution( last_frame_info_->width * last_frame_info_->height); @@ -1097,7 +1175,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { int max_bitrate_bps; // The API max bitrate comes from both `encoder_config_.max_bitrate_bps` // and `encoder_config_.simulcast_layers[0].max_bitrate_bps`. - absl::optional api_max_bitrate_bps; + std::optional api_max_bitrate_bps; if (encoder_config_.simulcast_layers[0].max_bitrate_bps > 0) { api_max_bitrate_bps = encoder_config_.simulcast_layers[0].max_bitrate_bps; @@ -1140,15 +1218,19 @@ void VideoStreamEncoder::ReconfigureEncoder() { encoder_->GetEncoderInfo(), encoder_config_, default_limits_allowed_), encoder_config_.simulcast_layers, &streams); - VideoCodec codec; - if (!VideoCodecInitializer::SetupCodec(encoder_config_, streams, &codec)) { - RTC_LOG(LS_ERROR) << "Failed to create encoder configuration."; - } + VideoCodec codec = VideoCodecInitializer::SetupCodec( + env_.field_trials(), encoder_config_, streams); if (encoder_config_.codec_type == kVideoCodecVP9 || - encoder_config_.codec_type == kVideoCodecAV1) { + encoder_config_.codec_type == kVideoCodecAV1 +#ifdef RTC_ENABLE_H265 + || encoder_config_.codec_type == kVideoCodecH265 +#endif + ) { // Spatial layers configuration might impose some parity restrictions, // thus some cropping might be needed. + RTC_CHECK_GE(last_frame_info_->width, codec.width); + RTC_CHECK_GE(last_frame_info_->height, codec.height); crop_width_ = last_frame_info_->width - codec.width; crop_height_ = last_frame_info_->height - codec.height; ApplySpatialLayerBitrateLimits( @@ -1159,38 +1241,43 @@ void VideoStreamEncoder::ReconfigureEncoder() { } char log_stream_buf[4 * 1024]; - rtc::SimpleStringBuilder log_stream(log_stream_buf); - log_stream << "ReconfigureEncoder:\n"; - log_stream << "Simulcast streams:\n"; + SimpleStringBuilder log_stream(log_stream_buf); + log_stream << "ReconfigureEncoder: simulcast streams: "; for (size_t i = 0; i < codec.numberOfSimulcastStreams; ++i) { - log_stream << i << ": " << codec.simulcastStream[i].width << "x" - << codec.simulcastStream[i].height - << " min_kbps: " << codec.simulcastStream[i].minBitrate - << " target_kbps: " << codec.simulcastStream[i].targetBitrate - << " max_kbps: " << codec.simulcastStream[i].maxBitrate - << " max_fps: " << codec.simulcastStream[i].maxFramerate - << " max_qp: " << codec.simulcastStream[i].qpMax - << " num_tl: " << codec.simulcastStream[i].numberOfTemporalLayers - << " active: " - << (codec.simulcastStream[i].active ? "true" : "false") << "\n"; + log_stream << "{" << i << ": " << codec.simulcastStream[i].width << "x" + << codec.simulcastStream[i].height << " " + << ScalabilityModeToString( + codec.simulcastStream[i].GetScalabilityMode()) + << ", min_kbps: " << codec.simulcastStream[i].minBitrate + << ", target_kbps: " << codec.simulcastStream[i].targetBitrate + << ", max_kbps: " << codec.simulcastStream[i].maxBitrate + << ", max_fps: " << codec.simulcastStream[i].maxFramerate + << ", max_qp: " << codec.simulcastStream[i].qpMax << ", num_tl: " + << codec.simulcastStream[i].numberOfTemporalLayers + << ", active: " + << (codec.simulcastStream[i].active ? "true" : "false") << "}"; } if (encoder_config_.codec_type == kVideoCodecVP9 || - encoder_config_.codec_type == kVideoCodecAV1) { - log_stream << "Spatial layers:\n"; + encoder_config_.codec_type == kVideoCodecAV1 +#ifdef RTC_ENABLE_H265 + || encoder_config_.codec_type == kVideoCodecH265 +#endif + ) { + log_stream << ", spatial layers: "; for (int i = 0; i < GetNumSpatialLayers(codec); ++i) { - log_stream << i << ": " << codec.spatialLayers[i].width << "x" + log_stream << "{" << i << ": " << codec.spatialLayers[i].width << "x" << codec.spatialLayers[i].height - << " min_kbps: " << codec.spatialLayers[i].minBitrate - << " target_kbps: " << codec.spatialLayers[i].targetBitrate - << " max_kbps: " << codec.spatialLayers[i].maxBitrate - << " max_fps: " << codec.spatialLayers[i].maxFramerate - << " max_qp: " << codec.spatialLayers[i].qpMax - << " num_tl: " << codec.spatialLayers[i].numberOfTemporalLayers - << " active: " - << (codec.spatialLayers[i].active ? "true" : "false") << "\n"; + << ", min_kbps: " << codec.spatialLayers[i].minBitrate + << ", target_kbps: " << codec.spatialLayers[i].targetBitrate + << ", max_kbps: " << codec.spatialLayers[i].maxBitrate + << ", max_fps: " << codec.spatialLayers[i].maxFramerate + << ", max_qp: " << codec.spatialLayers[i].qpMax << ", num_tl: " + << codec.spatialLayers[i].numberOfTemporalLayers + << ", active: " + << (codec.spatialLayers[i].active ? "true" : "false") << "}"; } } - RTC_LOG(LS_INFO) << log_stream.str(); + RTC_LOG(LS_INFO) << "[VSE] " << log_stream.str(); codec.startBitrate = std::max(encoder_target_bitrate_bps_.value_or(0) / 1000, codec.minBitrate); @@ -1200,34 +1287,8 @@ void VideoStreamEncoder::ReconfigureEncoder() { RTC_DCHECK_LE(codec.startBitrate, 1000000); max_framerate_ = codec.maxFramerate; - // Inform source about max configured framerate, - // requested_resolution and which layers are active. - int max_framerate = 0; - // Is any layer active. - bool active = false; - // The max requested_resolution. - absl::optional requested_resolution; - for (const auto& stream : streams) { - max_framerate = std::max(stream.max_framerate, max_framerate); - active |= stream.active; - // Note: we propagate the highest requested_resolution regardless - // if layer is active or not. - if (stream.requested_resolution) { - if (!requested_resolution) { - requested_resolution.emplace(stream.requested_resolution->width, - stream.requested_resolution->height); - } else { - requested_resolution.emplace( - std::max(stream.requested_resolution->width, - requested_resolution->width), - std::max(stream.requested_resolution->height, - requested_resolution->height)); - } - } - } - // The resolutions that we're actually encoding with. - std::vector encoder_resolutions; + std::vector encoder_resolutions; // TODO(hbos): For the case of SVC, also make use of `codec.spatialLayers`. // For now, SVC layers are handled by the VP9 encoder. for (const auto& simulcastStream : codec.simulcastStream) { @@ -1239,31 +1300,20 @@ void VideoStreamEncoder::ReconfigureEncoder() { worker_queue_->PostTask(SafeTask( task_safety_.flag(), - [this, max_framerate, alignment, - encoder_resolutions = std::move(encoder_resolutions), - requested_resolution = std::move(requested_resolution), active]() { + [this, alignment, + encoder_resolutions = std::move(encoder_resolutions)]() { RTC_DCHECK_RUN_ON(worker_queue_); - if (max_framerate != - video_source_sink_controller_.frame_rate_upper_limit() || - alignment != video_source_sink_controller_.resolution_alignment() || + if (alignment != video_source_sink_controller_.resolution_alignment() || encoder_resolutions != - video_source_sink_controller_.resolutions() || - (video_source_sink_controller_.requested_resolution() != - requested_resolution) || - (video_source_sink_controller_.active() != active)) { - video_source_sink_controller_.SetFrameRateUpperLimit(max_framerate); + video_source_sink_controller_.resolutions()) { video_source_sink_controller_.SetResolutionAlignment(alignment); video_source_sink_controller_.SetResolutions( std::move(encoder_resolutions)); - video_source_sink_controller_.SetRequestedResolution( - requested_resolution); - video_source_sink_controller_.SetActive(active); video_source_sink_controller_.PushSourceSinkSettings(); } })); - rate_allocator_ = - settings_.bitrate_allocator_factory->CreateVideoBitrateAllocator(codec); + rate_allocator_ = settings_.bitrate_allocator_factory->Create(env_, codec); rate_allocator_->SetLegacyConferenceMode( encoder_config_.legacy_conference_mode); @@ -1279,6 +1329,10 @@ void VideoStreamEncoder::ReconfigureEncoder() { codec.SetVideoEncoderComplexity(VideoCodecComplexity::kComplexityLow); } + quality_convergence_controller_.Initialize( + codec.numberOfSimulcastStreams, encoder_->GetEncoderInfo().min_qp, + codec.codecType, env_.field_trials()); + send_codec_ = codec; // Keep the same encoder, as long as the video_format is unchanged. @@ -1309,10 +1363,15 @@ void VideoStreamEncoder::ReconfigureEncoder() { next_frame_types_.resize( std::max(static_cast(codec.numberOfSimulcastStreams), 1), VideoFrameType::kVideoFrameKey); + if (settings_.enable_frame_instrumentation_generator) { + frame_instrumentation_generator_ = + std::make_unique( + encoder_config_.codec_type); + } } frame_encode_metadata_writer_.Reset(); - last_encode_info_ms_ = absl::nullopt; + last_encode_info_ms_ = std::nullopt; was_encode_called_since_last_initialization_ = false; } @@ -1325,7 +1384,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { << " max frame rate " << codec.maxFramerate << " max payload size " << max_data_payload_length_; } else { - RTC_LOG(LS_ERROR) << "Failed to configure encoder."; + RTC_LOG(LS_ERROR) << "[VSE] Failed to configure encoder."; rate_allocator_ = nullptr; } @@ -1339,7 +1398,8 @@ void VideoStreamEncoder::ReconfigureEncoder() { num_layers = codec.VP8()->numberOfTemporalLayers; } else if (codec.codecType == kVideoCodecVP9) { num_layers = codec.VP9()->numberOfTemporalLayers; - } else if (codec.codecType == kVideoCodecAV1 && + } else if ((codec.codecType == kVideoCodecAV1 || + codec.codecType == kVideoCodecH265) && codec.GetScalabilityMode().has_value()) { num_layers = ScalabilityModeToNumTemporalLayers(*(codec.GetScalabilityMode())); @@ -1360,12 +1420,13 @@ void VideoStreamEncoder::ReconfigureEncoder() { // * We have screensharing with layers. // * "WebRTC-FrameDropper" field trial is "Disabled". force_disable_frame_dropper_ = - field_trials_.IsDisabled(kFrameDropperFieldTrial) || + env_.field_trials().IsDisabled(kFrameDropperFieldTrial) || (num_layers > 1 && codec.mode == VideoCodecMode::kScreensharing); const VideoEncoder::EncoderInfo info = encoder_->GetEncoderInfo(); if (rate_control_settings_.UseEncoderBitrateAdjuster()) { - bitrate_adjuster_ = std::make_unique(codec); + bitrate_adjuster_ = std::make_unique( + codec, env_.field_trials(), env_.clock()); bitrate_adjuster_->OnEncoderInfo(info); } @@ -1393,10 +1454,15 @@ void VideoStreamEncoder::ReconfigureEncoder() { break; } } - // Set min_bitrate_bps, max_bitrate_bps, and max padding bit rate for VP9 - // and AV1 and leave only one stream containing all necessary information. - if ((encoder_config_.codec_type == kVideoCodecVP9 || - encoder_config_.codec_type == kVideoCodecAV1) && + // Set min_bitrate_bps, max_bitrate_bps, and max padding bit rate for VP9, + // AV1 and H.265, and leave only one stream containing all necessary + // information. + if (( +#ifdef RTC_ENABLE_H265 + encoder_config_.codec_type == kVideoCodecH265 || +#endif + encoder_config_.codec_type == kVideoCodecVP9 || + encoder_config_.codec_type == kVideoCodecAV1) && single_stream_or_non_first_inactive) { // Lower max bitrate to the level codec actually can produce. streams[0].max_bitrate_bps = @@ -1424,8 +1490,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { if (!encoder_initialized_) { RTC_LOG(LS_WARNING) << "Failed to initialize " << CodecTypeToPayloadString(codec.codecType) - << " encoder." - << "switch_encoder_on_init_failures: " + << " encoder." << "switch_encoder_on_init_failures: " << switch_encoder_on_init_failures_; if (switch_encoder_on_init_failures_) { @@ -1461,15 +1526,21 @@ void VideoStreamEncoder::RequestEncoderSwitch() { } // If encoder selector is available, switch to the encoder it prefers. - // Otherwise try switching to VP8 (default WebRTC codec). - absl::optional preferred_fallback_encoder; + std::optional preferred_fallback_encoder; if (is_encoder_selector_available) { preferred_fallback_encoder = encoder_selector_->OnEncoderBroken(); } if (!preferred_fallback_encoder) { - preferred_fallback_encoder = - SdpVideoFormat(CodecTypeToPayloadString(kVideoCodecVP8)); + if (!env_.field_trials().IsDisabled( + kSwitchEncoderFollowCodecPreferenceOrderFieldTrial)) { + encoder_fallback_requested_ = true; + settings_.encoder_switch_request_callback->RequestEncoderFallback(); + return; + } else { + preferred_fallback_encoder = + SdpVideoFormat(CodecTypeToPayloadString(kVideoCodecVP8)); + } } settings_.encoder_switch_request_callback->RequestEncoderSwitch( @@ -1494,9 +1565,9 @@ void VideoStreamEncoder::OnEncoderSettingsChanged() { } void VideoStreamEncoder::OnFrame(Timestamp post_time, - int frames_scheduled_for_processing, + bool queue_overload, const VideoFrame& video_frame) { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); VideoFrame incoming_frame = video_frame; // In some cases, e.g., when the frame from decoder is fed to encoder, @@ -1519,13 +1590,13 @@ void VideoStreamEncoder::OnFrame(Timestamp post_time, // Convert NTP time, in ms, to RTP timestamp. const int kMsToRtpTimestamp = 90; - incoming_frame.set_timestamp( + incoming_frame.set_rtp_timestamp( kMsToRtpTimestamp * static_cast(incoming_frame.ntp_time_ms())); // Identifier should remain the same for newly produced incoming frame and the // received |video_frame|. - incoming_frame.set_capture_time_identifier( - video_frame.capture_time_identifier()); + incoming_frame.set_presentation_timestamp( + video_frame.presentation_timestamp()); if (incoming_frame.ntp_time_ms() <= last_captured_timestamp_) { // We don't allow the same capture time for two frames, drop this one. @@ -1533,11 +1604,8 @@ void VideoStreamEncoder::OnFrame(Timestamp post_time, << incoming_frame.ntp_time_ms() << " <= " << last_captured_timestamp_ << ") for incoming frame. Dropping."; - encoder_queue_.PostTask([this, incoming_frame]() { - RTC_DCHECK_RUN_ON(&encoder_queue_); - accumulated_update_rect_.Union(incoming_frame.update_rect()); - accumulated_update_rect_is_valid_ &= incoming_frame.has_update_rect(); - }); + ProcessDroppedFrame(incoming_frame, + VideoStreamEncoderObserver::DropReason::kBadTimestamp); return; } @@ -1552,29 +1620,27 @@ void VideoStreamEncoder::OnFrame(Timestamp post_time, encoder_stats_observer_->OnIncomingFrame(incoming_frame.width(), incoming_frame.height()); ++captured_frame_count_; - CheckForAnimatedContent(incoming_frame, post_time.us()); bool cwnd_frame_drop = cwnd_frame_drop_interval_ && (cwnd_frame_counter_++ % cwnd_frame_drop_interval_.value() == 0); - if (frames_scheduled_for_processing == 1 && !cwnd_frame_drop) { + if (!queue_overload && !cwnd_frame_drop) { MaybeEncodeVideoFrame(incoming_frame, post_time.us()); } else { if (cwnd_frame_drop) { // Frame drop by congestion window pushback. Do not encode this // frame. ++dropped_frame_cwnd_pushback_count_; - encoder_stats_observer_->OnFrameDropped( - VideoStreamEncoderObserver::DropReason::kCongestionWindow); } else { // There is a newer frame in flight. Do not encode this frame. RTC_LOG(LS_VERBOSE) << "Incoming frame dropped due to that the encoder is blocked."; ++dropped_frame_encoder_block_count_; - encoder_stats_observer_->OnFrameDropped( - VideoStreamEncoderObserver::DropReason::kEncoderQueue); } - accumulated_update_rect_.Union(incoming_frame.update_rect()); - accumulated_update_rect_is_valid_ &= incoming_frame.has_update_rect(); + ProcessDroppedFrame( + incoming_frame, + cwnd_frame_drop + ? VideoStreamEncoderObserver::DropReason::kCongestionWindow + : VideoStreamEncoderObserver::DropReason::kEncoderQueue); } if (log_stats) { RTC_LOG(LS_INFO) << "Number of frames: captured " << captured_frame_count_ @@ -1595,7 +1661,7 @@ void VideoStreamEncoder::OnDiscardedFrame() { } bool VideoStreamEncoder::EncoderPaused() const { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); // Pause video if paused by caller or as long as the network is down or the // pacer queue has grown too large in buffered mode. // If the pacer queue has grown too large or the network is down, @@ -1605,7 +1671,7 @@ bool VideoStreamEncoder::EncoderPaused() const { } void VideoStreamEncoder::TraceFrameDropStart() { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); // Start trace event only on the first frame after encoder is paused. if (!encoder_paused_and_dropped_frame_) { TRACE_EVENT_ASYNC_BEGIN0("webrtc", "EncoderPaused", this); @@ -1614,7 +1680,7 @@ void VideoStreamEncoder::TraceFrameDropStart() { } void VideoStreamEncoder::TraceFrameDropEnd() { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); // End trace event on first frame after encoder resumes, if frame was dropped. if (encoder_paused_and_dropped_frame_) { TRACE_EVENT_ASYNC_END0("webrtc", "EncoderPaused", this); @@ -1665,9 +1731,9 @@ uint32_t VideoStreamEncoder::GetInputFramerateFps() { // This method may be called after we cleared out the frame_cadence_adapter_ // reference in Stop(). In such a situation it's probably not important with a // decent estimate. - absl::optional input_fps = + std::optional input_fps = frame_cadence_adapter_ ? frame_cadence_adapter_->GetInputFrameRateFps() - : absl::nullopt; + : std::nullopt; if (!input_fps || *input_fps == 0) { return default_fps; } @@ -1747,7 +1813,7 @@ void VideoStreamEncoder::SetEncoderRates( void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, int64_t time_when_posted_us) { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); input_state_provider_.OnFrameSizeObserved(video_frame.size()); if (!last_frame_info_ || video_frame.width() != last_frame_info_->width || @@ -1780,15 +1846,9 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, // According to the testcase // InitialFrameDropOffWhenEncoderDisabledScaling, the return value // from GetScalingSettings should enable or disable the frame drop. - - // Update input frame rate before we start using it. If we update it after - // any potential frame drop we are going to artificially increase frame sizes. - // Poll the rate before updating, otherwise we risk the rate being estimated - // a little too high at the start of the call when then window is small. uint32_t framerate_fps = GetInputFramerateFps(); - frame_cadence_adapter_->UpdateFrameRate(); - int64_t now_ms = clock_->TimeInMilliseconds(); + int64_t now_ms = env_.clock().TimeInMilliseconds(); if (pending_encoder_reconfiguration_) { ReconfigureEncoder(); last_parameters_update_ms_.emplace(now_ms); @@ -1811,10 +1871,8 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, // Because pending frame will be dropped in any case, we need to // remember its updated region. if (pending_frame_) { - encoder_stats_observer_->OnFrameDropped( - VideoStreamEncoderObserver::DropReason::kEncoderQueue); - accumulated_update_rect_.Union(pending_frame_->update_rect()); - accumulated_update_rect_is_valid_ &= pending_frame_->has_update_rect(); + ProcessDroppedFrame(*pending_frame_, + VideoStreamEncoderObserver::DropReason::kEncoderQueue); } if (DropDueToSize(video_frame.size())) { @@ -1828,10 +1886,8 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, } else { // Ensure that any previously stored frame is dropped. pending_frame_.reset(); - accumulated_update_rect_.Union(video_frame.update_rect()); - accumulated_update_rect_is_valid_ &= video_frame.has_update_rect(); - encoder_stats_observer_->OnFrameDropped( - VideoStreamEncoderObserver::DropReason::kEncoderQueue); + ProcessDroppedFrame( + video_frame, VideoStreamEncoderObserver::DropReason::kEncoderQueue); } return; } @@ -1849,10 +1905,8 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, // Ensure that any previously stored frame is dropped. pending_frame_.reset(); TraceFrameDropStart(); - accumulated_update_rect_.Union(video_frame.update_rect()); - accumulated_update_rect_is_valid_ &= video_frame.has_update_rect(); - encoder_stats_observer_->OnFrameDropped( - VideoStreamEncoderObserver::DropReason::kEncoderQueue); + ProcessDroppedFrame( + video_frame, VideoStreamEncoderObserver::DropReason::kEncoderQueue); } return; } @@ -1874,10 +1928,9 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, ? last_encoder_rate_settings_->encoder_target.bps() : 0) << ", input frame rate " << framerate_fps; - OnDroppedFrame( - EncodedImageCallback::DropReason::kDroppedByMediaOptimizations); - accumulated_update_rect_.Union(video_frame.update_rect()); - accumulated_update_rect_is_valid_ &= video_frame.has_update_rect(); + ProcessDroppedFrame( + video_frame, + VideoStreamEncoderObserver::DropReason::kMediaOptimization); return; } @@ -1886,15 +1939,18 @@ void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, int64_t time_when_posted_us) { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); RTC_LOG(LS_VERBOSE) << __func__ << " posted " << time_when_posted_us << " ntp time " << video_frame.ntp_time_ms(); - // If the encoder fail we can't continue to encode frames. When this happens - // the WebrtcVideoSender is notified and the whole VideoSendStream is - // recreated. - if (encoder_failed_ || !encoder_initialized_) + // If encoder fallback is requested, but we run out of codecs to be + // negotiated, we don't continue to encode frames. The send streams will still + // be kept. Otherwise if WebRtcVideoEngine responds to the fallback request, + // the send streams will be recreated and current VideoStreamEncoder will no + // longer be used. + if (encoder_fallback_requested_ || !encoder_initialized_) { return; + } // It's possible that EncodeVideoFrame can be called after we've completed // a Stop() operation. Check if the encoder_ is set before continuing. @@ -1932,7 +1988,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, stream_resource_manager_.ConfigureQualityScaler(info); stream_resource_manager_.ConfigureBandwidthQualityScaler(info); - RTC_LOG(LS_INFO) << "Encoder info changed to " << info.ToString(); + RTC_LOG(LS_INFO) << "[VSE] Encoder info changed to " << info.ToString(); } if (bitrate_adjuster_) { @@ -1944,7 +2000,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, } } encoder_info_ = info; - last_encode_info_ms_ = clock_->TimeInMilliseconds(); + last_encode_info_ms_ = env_.clock().TimeInMilliseconds(); VideoFrame out_frame(video_frame); // Crop or scale the frame if needed. Dimension may be reduced to fit encoder @@ -1955,17 +2011,23 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, !info.supports_native_handle)) { int cropped_width = video_frame.width() - crop_width_; int cropped_height = video_frame.height() - crop_height_; - rtc::scoped_refptr cropped_buffer; + scoped_refptr cropped_buffer; // TODO(ilnik): Remove scaling if cropping is too big, as it should never // happen after SinkWants signaled correctly from ReconfigureEncoder. VideoFrame::UpdateRect update_rect = video_frame.update_rect(); if (crop_width_ < 4 && crop_height_ < 4) { // The difference is small, crop without scaling. + int offset_x = (crop_width_ + 1) / 2; + int offset_y = (crop_height_ + 1) / 2; + // Make sure offset is even so that u/v plane becomes aligned if u/v plane + // is subsampled. + offset_x -= offset_x % 2; + offset_y -= offset_y % 2; cropped_buffer = video_frame.video_frame_buffer()->CropAndScale( - crop_width_ / 2, crop_height_ / 2, cropped_width, cropped_height, - cropped_width, cropped_height); - update_rect.offset_x -= crop_width_ / 2; - update_rect.offset_y -= crop_height_ / 2; + offset_x, offset_y, cropped_width, cropped_height, cropped_width, + cropped_height); + update_rect.offset_x -= offset_x; + update_rect.offset_y -= offset_y; update_rect.Intersect( VideoFrame::UpdateRect{0, 0, cropped_width, cropped_height}); @@ -1988,8 +2050,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, out_frame.set_video_frame_buffer(cropped_buffer); out_frame.set_update_rect(update_rect); out_frame.set_ntp_time_ms(video_frame.ntp_time_ms()); - out_frame.set_capture_time_identifier( - video_frame.capture_time_identifier()); + out_frame.set_presentation_timestamp(video_frame.presentation_timestamp()); // Since accumulated_update_rect_ is constructed before cropping, // we can't trust it. If any changes were pending, we invalidate whole // frame here. @@ -2012,8 +2073,8 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, } accumulated_update_rect_is_valid_ = true; - TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(), - "Encode"); + TRACE_EVENT_ASYNC_STEP_INTO0("webrtc", "Video", video_frame.render_time_ms(), + "Encode"); stream_resource_manager_.OnEncodeStarted(out_frame, time_when_posted_us); @@ -2024,11 +2085,16 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, << send_codec_.height << " received a too small frame " << out_frame.width() << "x" << out_frame.height(); - TRACE_EVENT1("webrtc", "VCMGenericEncoder::Encode", "timestamp", - out_frame.timestamp()); + TRACE_EVENT2("webrtc", "webrtc::VideoEncoder::Encode", "rtp_timestamp", + out_frame.rtp_timestamp(), "storage_representation", + out_frame.video_frame_buffer()->storage_representation()); frame_encode_metadata_writer_.OnEncodeStarted(out_frame); + if (frame_instrumentation_generator_) { + frame_instrumentation_generator_->OnCapturedFrame(out_frame); + } + const int32_t encode_status = encoder_->Encode(out_frame, &next_frame_types_); was_encode_called_since_last_initialization_ = true; @@ -2053,11 +2119,11 @@ void VideoStreamEncoder::RequestRefreshFrame() { void VideoStreamEncoder::SendKeyFrame( const std::vector& layers) { - if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask([this, layers] { SendKeyFrame(layers); }); + if (!encoder_queue_->IsCurrent()) { + encoder_queue_->PostTask([this, layers] { SendKeyFrame(layers); }); return; } - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); TRACE_EVENT0("webrtc", "OnKeyFrameRequest"); RTC_DCHECK(!next_frame_types_.empty()); @@ -2082,13 +2148,13 @@ void VideoStreamEncoder::SendKeyFrame( void VideoStreamEncoder::OnLossNotification( const VideoEncoder::LossNotification& loss_notification) { - if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask( + if (!encoder_queue_->IsCurrent()) { + encoder_queue_->PostTask( [this, loss_notification] { OnLossNotification(loss_notification); }); return; } - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); if (encoder_) { encoder_->OnLossNotification(loss_notification); } @@ -2104,7 +2170,9 @@ EncodedImage VideoStreamEncoder::AugmentEncodedImage( // simulcast and spatial indices. int stream_idx = encoded_image.SpatialIndex().value_or( encoded_image.SimulcastIndex().value_or(0)); - frame_encode_metadata_writer_.FillTimingInfo(stream_idx, &image_copy); + + frame_encode_metadata_writer_.FillMetadataAndTimingInfo(stream_idx, + &image_copy); frame_encode_metadata_writer_.UpdateBitstream(codec_specific_info, &image_copy); VideoCodecType codec_type = codec_specific_info @@ -2117,12 +2185,12 @@ EncodedImage VideoStreamEncoder::AugmentEncodedImage( .Parse(codec_type, stream_idx, image_copy.data(), image_copy.size()) .value_or(-1); } + + TRACE_EVENT2("webrtc", "VideoStreamEncoder::AugmentEncodedImage", + "stream_idx", stream_idx, "qp", image_copy.qp_); RTC_LOG(LS_VERBOSE) << __func__ << " ntp time " << encoded_image.NtpTimeMs() << " stream_idx " << stream_idx << " qp " << image_copy.qp_; - image_copy.SetAtTargetQuality(codec_type == kVideoCodecVP8 && - image_copy.qp_ <= kVp8SteadyStateQpThreshold); - return image_copy; } @@ -2130,7 +2198,8 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( const EncodedImage& encoded_image, const CodecSpecificInfo* codec_specific_info) { TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded", - "timestamp", encoded_image.Timestamp()); + TRACE_EVENT_SCOPE_GLOBAL, "timestamp", + encoded_image.RtpTimestamp()); const size_t simulcast_index = encoded_image.SimulcastIndex().value_or(0); const VideoCodecType codec_type = codec_specific_info @@ -2143,45 +2212,67 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( // need to update on quality convergence. unsigned int image_width = image_copy._encodedWidth; unsigned int image_height = image_copy._encodedHeight; - encoder_queue_.PostTask([this, codec_type, image_width, image_height, - simulcast_index, - at_target_quality = image_copy.IsAtTargetQuality()] { - RTC_DCHECK_RUN_ON(&encoder_queue_); - - // Let the frame cadence adapter know about quality convergence. - if (frame_cadence_adapter_) - frame_cadence_adapter_->UpdateLayerQualityConvergence(simulcast_index, - at_target_quality); - - // Currently, the internal quality scaler is used for VP9 instead of the - // webrtc qp scaler (in the no-svc case or if only a single spatial layer is - // encoded). It has to be explicitly detected and reported to adaptation - // metrics. - if (codec_type == VideoCodecType::kVideoCodecVP9 && - send_codec_.VP9()->automaticResizeOn) { - unsigned int expected_width = send_codec_.width; - unsigned int expected_height = send_codec_.height; - int num_active_layers = 0; - for (int i = 0; i < send_codec_.VP9()->numberOfSpatialLayers; ++i) { - if (send_codec_.spatialLayers[i].active) { - ++num_active_layers; - expected_width = send_codec_.spatialLayers[i].width; - expected_height = send_codec_.spatialLayers[i].height; + encoder_queue_->PostTask( + [this, codec_type, image_width, image_height, simulcast_index, + qp = image_copy.qp_, + is_steady_state_refresh_frame = image_copy.IsSteadyStateRefreshFrame()] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); + + // Check if the encoded image has reached target quality. + bool at_target_quality = + quality_convergence_controller_.AddSampleAndCheckTargetQuality( + simulcast_index, qp, is_steady_state_refresh_frame); + + // Let the frame cadence adapter know about quality convergence. + if (frame_cadence_adapter_) + frame_cadence_adapter_->UpdateLayerQualityConvergence( + simulcast_index, at_target_quality); + + // Currently, the internal quality scaler is used for VP9 instead of the + // webrtc qp scaler (in the no-svc case or if only a single spatial + // layer is encoded). It has to be explicitly detected and reported to + // adaptation metrics. + if (codec_type == VideoCodecType::kVideoCodecVP9 && + send_codec_.VP9()->automaticResizeOn) { + unsigned int expected_width = send_codec_.width; + unsigned int expected_height = send_codec_.height; + int num_active_layers = 0; + for (int i = 0; i < send_codec_.VP9()->numberOfSpatialLayers; ++i) { + if (send_codec_.spatialLayers[i].active) { + ++num_active_layers; + expected_width = send_codec_.spatialLayers[i].width; + expected_height = send_codec_.spatialLayers[i].height; + } + } + RTC_DCHECK_LE(num_active_layers, 1) + << "VP9 quality scaling is enabled for " + "SVC with several active layers."; + encoder_stats_observer_->OnEncoderInternalScalerUpdate( + image_width < expected_width || image_height < expected_height); } - } - RTC_DCHECK_LE(num_active_layers, 1) - << "VP9 quality scaling is enabled for " - "SVC with several active layers."; - encoder_stats_observer_->OnEncoderInternalScalerUpdate( - image_width < expected_width || image_height < expected_height); - } - }); + }); // Encoded is called on whatever thread the real encoder implementation run // on. In the case of hardware encoders, there might be several encoders // running in parallel on different threads. encoder_stats_observer_->OnSendEncodedImage(image_copy, codec_specific_info); + std::unique_ptr codec_specific_info_copy; + if (codec_specific_info && frame_instrumentation_generator_) { + std::optional< + std::variant> + frame_instrumentation_data = + frame_instrumentation_generator_->OnEncodedImage(image_copy); + RTC_CHECK(!codec_specific_info->frame_instrumentation_data.has_value()) + << "CodecSpecificInfo must not have frame_instrumentation_data set."; + if (frame_instrumentation_data.has_value()) { + codec_specific_info_copy = + std::make_unique(*codec_specific_info); + codec_specific_info_copy->frame_instrumentation_data = + frame_instrumentation_data; + codec_specific_info = codec_specific_info_copy.get(); + } + } EncodedImageCallback::Result result = sink_->OnEncodedImage(image_copy, codec_specific_info); @@ -2192,7 +2283,10 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( image_copy.ClearEncodedData(); int temporal_index = 0; - if (codec_specific_info) { + if (encoded_image.TemporalIndex()) { + // Give precedence to the metadata on EncodedImage, if available. + temporal_index = *encoded_image.TemporalIndex(); + } else if (codec_specific_info) { if (codec_specific_info->codecType == kVideoCodecVP9) { temporal_index = codec_specific_info->codecSpecific.VP9.temporal_idx; } else if (codec_specific_info->codecType == kVideoCodecVP8) { @@ -2203,7 +2297,7 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( temporal_index = 0; } - RunPostEncode(image_copy, clock_->CurrentTime().us(), temporal_index, + RunPostEncode(image_copy, env_.clock().CurrentTime().us(), temporal_index, frame_size); if (result.error == Result::OK) { @@ -2223,26 +2317,16 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( } void VideoStreamEncoder::OnDroppedFrame(DropReason reason) { - switch (reason) { - case DropReason::kDroppedByMediaOptimizations: - encoder_stats_observer_->OnFrameDropped( - VideoStreamEncoderObserver::DropReason::kMediaOptimization); - break; - case DropReason::kDroppedByEncoder: - encoder_stats_observer_->OnFrameDropped( - VideoStreamEncoderObserver::DropReason::kEncoder); - break; - } sink_->OnDroppedFrame(reason); - encoder_queue_.PostTask([this, reason] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, reason] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); stream_resource_manager_.OnFrameDropped(reason); }); } DataRate VideoStreamEncoder::UpdateTargetBitrate(DataRate target_bitrate, double cwnd_reduce_ratio) { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); DataRate updated_target_bitrate = target_bitrate; // Drop frames when congestion window pushback ratio is larger than 1 @@ -2274,10 +2358,10 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, int64_t round_trip_time_ms, double cwnd_reduce_ratio) { RTC_DCHECK_GE(link_allocation, target_bitrate); - if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask([this, target_bitrate, stable_target_bitrate, - link_allocation, fraction_lost, round_trip_time_ms, - cwnd_reduce_ratio] { + if (!encoder_queue_->IsCurrent()) { + encoder_queue_->PostTask([this, target_bitrate, stable_target_bitrate, + link_allocation, fraction_lost, + round_trip_time_ms, cwnd_reduce_ratio] { DataRate updated_target_bitrate = UpdateTargetBitrate(target_bitrate, cwnd_reduce_ratio); OnBitrateUpdated(updated_target_bitrate, stable_target_bitrate, @@ -2286,7 +2370,7 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, }); return; } - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); const bool video_is_suspended = target_bitrate == DataRate::Zero(); const bool video_suspension_changed = video_is_suspended != EncoderPaused(); @@ -2334,7 +2418,7 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, !DropDueToSize(pending_frame_->size())) { // A pending stored frame can be processed. int64_t pending_time_us = - clock_->CurrentTime().us() - pending_frame_post_time_us_; + env_.clock().CurrentTime().us() - pending_frame_post_time_us_; if (pending_time_us < kPendingFrameTimeoutMs * 1000) EncodeVideoFrame(*pending_frame_, pending_frame_post_time_us_); pending_frame_.reset(); @@ -2362,7 +2446,7 @@ bool VideoStreamEncoder::DropDueToSize(uint32_t source_pixel_count) const { stream_resource_manager_.UseBandwidthAllocationBps().value_or( encoder_target_bitrate_bps_.value()); - absl::optional encoder_bitrate_limits = + std::optional encoder_bitrate_limits = GetEncoderInfoWithBitrateLimitUpdate( encoder_->GetEncoderInfo(), encoder_config_, default_limits_allowed_) .GetEncoderBitrateLimitsForResolution(pixel_count); @@ -2384,17 +2468,37 @@ bool VideoStreamEncoder::DropDueToSize(uint32_t source_pixel_count) const { void VideoStreamEncoder::OnVideoSourceRestrictionsUpdated( VideoSourceRestrictions restrictions, const VideoAdaptationCounters& adaptation_counters, - rtc::scoped_refptr reason, + scoped_refptr reason, const VideoSourceRestrictions& unfiltered_restrictions) { - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); RTC_LOG(LS_INFO) << "Updating sink restrictions from " << (reason ? reason->Name() : std::string("")) << " to " << restrictions.ToString(); + if (frame_cadence_adapter_) { + frame_cadence_adapter_->UpdateVideoSourceRestrictions( + restrictions.max_frame_rate()); + } + + bool max_pixels_updated = + (latest_restrictions_.has_value() + ? latest_restrictions_->max_pixels_per_frame() + : std::nullopt) != restrictions.max_pixels_per_frame(); + // TODO(webrtc:14451) Split video_source_sink_controller_ // so that ownership on restrictions/wants is kept on &encoder_queue_ latest_restrictions_ = restrictions; + // When the `scale_resolution_down_to` API is used, we need to reconfigure any + // time the restricted resolution is updated. When that API isn't used, the + // encoder settings are relative to the frame size and reconfiguration happens + // automatically on new frame size and we don't need to reconfigure here. + if (encoder_ && max_pixels_updated && + encoder_config_.HasScaleResolutionDownTo()) { + // The encoder will be reconfigured on the next frame. + pending_encoder_reconfiguration_ = true; + } + worker_queue_->PostTask(SafeTask( task_safety_.flag(), [this, restrictions = std::move(restrictions)]() { RTC_DCHECK_RUN_ON(worker_queue_); @@ -2407,17 +2511,17 @@ void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image, int64_t time_sent_us, int temporal_index, DataSize frame_size) { - if (!encoder_queue_.IsCurrent()) { - encoder_queue_.PostTask([this, encoded_image, time_sent_us, temporal_index, - frame_size] { + if (!encoder_queue_->IsCurrent()) { + encoder_queue_->PostTask([this, encoded_image, time_sent_us, temporal_index, + frame_size] { RunPostEncode(encoded_image, time_sent_us, temporal_index, frame_size); }); return; } - RTC_DCHECK_RUN_ON(&encoder_queue_); + RTC_DCHECK_RUN_ON(encoder_queue_.get()); - absl::optional encode_duration_us; + std::optional encode_duration_us; if (encoded_image.timing_.flags != VideoSendTiming::kInvalid) { encode_duration_us = TimeDelta::Millis(encoded_image.timing_.encode_finish_ms - @@ -2441,8 +2545,8 @@ void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image, // TODO(https://crbug.com/webrtc/14891): If we want to support a mix of // simulcast and SVC we'll also need to consider the case where we have both // simulcast and spatial indices. - int stream_index = encoded_image.SpatialIndex().value_or( - encoded_image.SimulcastIndex().value_or(0)); + int stream_index = std::max(encoded_image.SimulcastIndex().value_or(0), + encoded_image.SpatialIndex().value_or(0)); bitrate_adjuster_->OnEncodedFrame(frame_size, stream_index, temporal_index); } } @@ -2453,122 +2557,15 @@ void VideoStreamEncoder::ReleaseEncoder() { } encoder_->Release(); encoder_initialized_ = false; + frame_instrumentation_generator_ = nullptr; TRACE_EVENT0("webrtc", "VCMGenericEncoder::Release"); } -VideoStreamEncoder::AutomaticAnimationDetectionExperiment -VideoStreamEncoder::ParseAutomatincAnimationDetectionFieldTrial() const { - AutomaticAnimationDetectionExperiment result; - - result.Parser()->Parse( - field_trials_.Lookup("WebRTC-AutomaticAnimationDetectionScreenshare")); - - if (!result.enabled) { - RTC_LOG(LS_INFO) << "Automatic animation detection experiment is disabled."; - return result; - } - - RTC_LOG(LS_INFO) << "Automatic animation detection experiment settings:" - " min_duration_ms=" - << result.min_duration_ms - << " min_area_ration=" << result.min_area_ratio - << " min_fps=" << result.min_fps; - - return result; -} - -void VideoStreamEncoder::CheckForAnimatedContent( - const VideoFrame& frame, - int64_t time_when_posted_in_us) { - if (!automatic_animation_detection_experiment_.enabled || - encoder_config_.content_type != - VideoEncoderConfig::ContentType::kScreen || - stream_resource_manager_.degradation_preference() != - DegradationPreference::BALANCED) { - return; - } - - if (expect_resize_state_ == ExpectResizeState::kResize && last_frame_info_ && - last_frame_info_->width != frame.width() && - last_frame_info_->height != frame.height()) { - // On applying resolution cap there will be one frame with no/different - // update, which should be skipped. - // It can be delayed by several frames. - expect_resize_state_ = ExpectResizeState::kFirstFrameAfterResize; - return; - } - - if (expect_resize_state_ == ExpectResizeState::kFirstFrameAfterResize) { - // The first frame after resize should have new, scaled update_rect. - if (frame.has_update_rect()) { - last_update_rect_ = frame.update_rect(); - } else { - last_update_rect_ = absl::nullopt; - } - expect_resize_state_ = ExpectResizeState::kNoResize; - } - - bool should_cap_resolution = false; - if (!frame.has_update_rect()) { - last_update_rect_ = absl::nullopt; - animation_start_time_ = Timestamp::PlusInfinity(); - } else if ((!last_update_rect_ || - frame.update_rect() != *last_update_rect_)) { - last_update_rect_ = frame.update_rect(); - animation_start_time_ = Timestamp::Micros(time_when_posted_in_us); - } else { - TimeDelta animation_duration = - Timestamp::Micros(time_when_posted_in_us) - animation_start_time_; - float area_ratio = static_cast(last_update_rect_->width * - last_update_rect_->height) / - (frame.width() * frame.height()); - if (animation_duration.ms() >= - automatic_animation_detection_experiment_.min_duration_ms && - area_ratio >= - automatic_animation_detection_experiment_.min_area_ratio && - encoder_stats_observer_->GetInputFrameRate() >= - automatic_animation_detection_experiment_.min_fps) { - should_cap_resolution = true; - } - } - if (cap_resolution_due_to_video_content_ != should_cap_resolution) { - expect_resize_state_ = should_cap_resolution ? ExpectResizeState::kResize - : ExpectResizeState::kNoResize; - cap_resolution_due_to_video_content_ = should_cap_resolution; - if (should_cap_resolution) { - RTC_LOG(LS_INFO) << "Applying resolution cap due to animation detection."; - } else { - RTC_LOG(LS_INFO) << "Removing resolution cap due to no consistent " - "animation detection."; - } - // TODO(webrtc:14451) Split video_source_sink_controller_ - // so that ownership on restrictions/wants is kept on &encoder_queue_ - if (should_cap_resolution) { - animate_restrictions_ = - VideoSourceRestrictions(kMaxAnimationPixels, - /* target_pixels_per_frame= */ absl::nullopt, - /* max_frame_rate= */ absl::nullopt); - } else { - animate_restrictions_.reset(); - } - - worker_queue_->PostTask( - SafeTask(task_safety_.flag(), [this, should_cap_resolution]() { - RTC_DCHECK_RUN_ON(worker_queue_); - video_source_sink_controller_.SetPixelsPerFrameUpperLimit( - should_cap_resolution - ? absl::optional(kMaxAnimationPixels) - : absl::nullopt); - video_source_sink_controller_.PushSourceSinkSettings(); - })); - } -} - void VideoStreamEncoder::InjectAdaptationResource( - rtc::scoped_refptr resource, + scoped_refptr resource, VideoAdaptationReason reason) { - encoder_queue_.PostTask([this, resource = std::move(resource), reason] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + encoder_queue_->PostTask([this, resource = std::move(resource), reason] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); additional_resources_.push_back(resource); stream_resource_manager_.AddResource(resource, reason); }); @@ -2576,9 +2573,9 @@ void VideoStreamEncoder::InjectAdaptationResource( void VideoStreamEncoder::InjectAdaptationConstraint( AdaptationConstraint* adaptation_constraint) { - rtc::Event event; - encoder_queue_.PostTask([this, adaptation_constraint, &event] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + Event event; + encoder_queue_->PostTask([this, adaptation_constraint, &event] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); if (!resource_adaptation_processor_) { // The VideoStreamEncoder was stopped and the processor destroyed before // this task had a chance to execute. No action needed. @@ -2588,31 +2585,43 @@ void VideoStreamEncoder::InjectAdaptationConstraint( video_stream_adapter_->AddAdaptationConstraint(adaptation_constraint); event.Set(); }); - event.Wait(rtc::Event::kForever); + event.Wait(Event::kForever); } void VideoStreamEncoder::AddRestrictionsListenerForTesting( VideoSourceRestrictionsListener* restrictions_listener) { - rtc::Event event; - encoder_queue_.PostTask([this, restrictions_listener, &event] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + Event event; + encoder_queue_->PostTask([this, restrictions_listener, &event] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); RTC_DCHECK(resource_adaptation_processor_); video_stream_adapter_->AddRestrictionsListener(restrictions_listener); event.Set(); }); - event.Wait(rtc::Event::kForever); + event.Wait(Event::kForever); } void VideoStreamEncoder::RemoveRestrictionsListenerForTesting( VideoSourceRestrictionsListener* restrictions_listener) { - rtc::Event event; - encoder_queue_.PostTask([this, restrictions_listener, &event] { - RTC_DCHECK_RUN_ON(&encoder_queue_); + Event event; + encoder_queue_->PostTask([this, restrictions_listener, &event] { + RTC_DCHECK_RUN_ON(encoder_queue_.get()); RTC_DCHECK(resource_adaptation_processor_); video_stream_adapter_->RemoveRestrictionsListener(restrictions_listener); event.Set(); }); - event.Wait(rtc::Event::kForever); + event.Wait(Event::kForever); +} + +// RTC_RUN_ON(&encoder_queue_) +void VideoStreamEncoder::ProcessDroppedFrame( + const VideoFrame& frame, + VideoStreamEncoderObserver::DropReason reason) { + accumulated_update_rect_.Union(frame.update_rect()); + accumulated_update_rect_is_valid_ &= frame.has_update_rect(); + if (auto converted_reason = MaybeConvertDropReason(reason)) { + OnDroppedFrame(*converted_reason); + } + encoder_stats_observer_->OnFrameDropped(reason); } } // namespace webrtc diff --git a/video/video_stream_encoder.h b/video/video_stream_encoder.h index 68f008afc0..917d928149 100644 --- a/video/video_stream_encoder.h +++ b/video/video_stream_encoder.h @@ -19,7 +19,7 @@ #include "absl/container/inlined_vector.h" #include "api/adaptation/resource.h" -#include "api/field_trials_view.h" +#include "api/environment/environment.h" #include "api/rtp_sender_interface.h" #include "api/sequence_checker.h" #include "api/task_queue/pending_task_safety_flag.h" @@ -42,13 +42,13 @@ #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/race_checker.h" #include "rtc_base/rate_statistics.h" -#include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" -#include "system_wrappers/include/clock.h" #include "video/adaptation/video_stream_encoder_resource_manager.h" +#include "video/corruption_detection/frame_instrumentation_generator.h" #include "video/encoder_bitrate_adjuster.h" #include "video/frame_cadence_adapter.h" #include "video/frame_encode_metadata_writer.h" +#include "video/quality_convergence_controller.h" #include "video/video_source_sink_controller.h" #include "video/video_stream_encoder_interface.h" #include "video/video_stream_encoder_observer.h" @@ -75,7 +75,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, kVideoLayersAllocation }; VideoStreamEncoder( - Clock* clock, + const Environment& env, uint32_t number_of_cores, VideoStreamEncoderObserver* encoder_stats_observer, const VideoStreamEncoderSettings& settings, @@ -84,7 +84,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, std::unique_ptr encoder_queue, BitrateAllocationCallbackType allocation_cb_type, - const FieldTrialsView& field_trials, webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector = nullptr); ~VideoStreamEncoder() override; @@ -92,10 +91,10 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, VideoStreamEncoder(const VideoStreamEncoder&) = delete; VideoStreamEncoder& operator=(const VideoStreamEncoder&) = delete; - void AddAdaptationResource(rtc::scoped_refptr resource) override; - std::vector> GetAdaptationResources() override; + void AddAdaptationResource(scoped_refptr resource) override; + std::vector> GetAdaptationResources() override; - void SetSource(rtc::VideoSourceInterface* source, + void SetSource(VideoSourceInterface* source, const DegradationPreference& degradation_preference) override; void SetSink(EncoderSink* sink, bool rotation_applied) override; @@ -132,19 +131,21 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, double cwnd_reduce_ratio); protected: + friend class VideoStreamEncoderFrameCadenceRestrictionTest; + // Used for testing. For example the `ScalingObserverInterface` methods must // be called on `encoder_queue_`. - TaskQueueBase* encoder_queue() { return encoder_queue_.Get(); } + TaskQueueBase* encoder_queue() { return encoder_queue_.get(); } void OnVideoSourceRestrictionsUpdated( VideoSourceRestrictions restrictions, const VideoAdaptationCounters& adaptation_counters, - rtc::scoped_refptr reason, + scoped_refptr reason, const VideoSourceRestrictions& unfiltered_restrictions) override; // Used for injected test resources. // TODO(eshr): Move all adaptation tests out of VideoStreamEncoder tests. - void InjectAdaptationResource(rtc::scoped_refptr resource, + void InjectAdaptationResource(scoped_refptr resource, VideoAdaptationReason reason); void InjectAdaptationConstraint(AdaptationConstraint* adaptation_constraint); @@ -160,10 +161,9 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, : video_stream_encoder_(video_stream_encoder) {} // FrameCadenceAdapterInterface::Callback overrides. void OnFrame(Timestamp post_time, - int frames_scheduled_for_processing, + bool queue_overload, const VideoFrame& frame) override { - video_stream_encoder_.OnFrame(post_time, frames_scheduled_for_processing, - frame); + video_stream_encoder_.OnFrame(post_time, queue_overload, frame); } void OnDiscardedFrame() override { video_stream_encoder_.OnDiscardedFrame(); @@ -209,10 +209,10 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, class DegradationPreferenceManager; - void ReconfigureEncoder() RTC_RUN_ON(&encoder_queue_); - void OnEncoderSettingsChanged() RTC_RUN_ON(&encoder_queue_); + void ReconfigureEncoder() RTC_RUN_ON(encoder_queue_); + void OnEncoderSettingsChanged() RTC_RUN_ON(encoder_queue_); void OnFrame(Timestamp post_time, - int frames_scheduled_for_processing, + bool queue_overload, const VideoFrame& video_frame); void OnDiscardedFrame(); void RequestRefreshFrame(); @@ -224,7 +224,7 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, int64_t time_when_posted_in_ms); // Indicates whether frame should be dropped because the pixel count is too // large for the current bitrate configuration. - bool DropDueToSize(uint32_t pixel_count) const RTC_RUN_ON(&encoder_queue_); + bool DropDueToSize(uint32_t pixel_count) const RTC_RUN_ON(encoder_queue_); // Implements EncodedImageCallback. EncodedImageCallback::Result OnEncodedImage( @@ -240,25 +240,21 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // Returns a copy of `rate_settings` with the `bitrate` field updated using // the current VideoBitrateAllocator. EncoderRateSettings UpdateBitrateAllocation( - const EncoderRateSettings& rate_settings) RTC_RUN_ON(&encoder_queue_); + const EncoderRateSettings& rate_settings) RTC_RUN_ON(encoder_queue_); - uint32_t GetInputFramerateFps() RTC_RUN_ON(&encoder_queue_); + uint32_t GetInputFramerateFps() RTC_RUN_ON(encoder_queue_); void SetEncoderRates(const EncoderRateSettings& rate_settings) - RTC_RUN_ON(&encoder_queue_); + RTC_RUN_ON(encoder_queue_); void RunPostEncode(const EncodedImage& encoded_image, int64_t time_sent_us, int temporal_index, DataSize frame_size); - void ReleaseEncoder() RTC_RUN_ON(&encoder_queue_); + void ReleaseEncoder() RTC_RUN_ON(encoder_queue_); // After calling this function `resource_adaptation_processor_` will be null. void ShutdownResourceAdaptationQueue(); - void CheckForAnimatedContent(const VideoFrame& frame, - int64_t time_when_posted_in_ms) - RTC_RUN_ON(&encoder_queue_); - - void RequestEncoderSwitch() RTC_RUN_ON(&encoder_queue_); + void RequestEncoderSwitch() RTC_RUN_ON(encoder_queue_); // Augments an EncodedImage received from an encoder with parsable // information. @@ -266,12 +262,16 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, const EncodedImage& encoded_image, const CodecSpecificInfo* codec_specific_info); - const FieldTrialsView& field_trials_; + void ProcessDroppedFrame(const VideoFrame& frame, + VideoStreamEncoderObserver::DropReason reason) + RTC_RUN_ON(encoder_queue_); + + const Environment env_; TaskQueueBase* const worker_queue_; const int number_of_cores_; - EncoderSink* sink_; + EncoderSink* sink_ = nullptr; const VideoStreamEncoderSettings settings_; const BitrateAllocationCallbackType allocation_cb_type_; const RateControlSettings rate_control_settings_; @@ -287,147 +287,112 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, VideoStreamEncoderObserver* const encoder_stats_observer_; // Adapter that avoids public inheritance of the cadence adapter's callback // interface. - CadenceCallback cadence_callback_; + CadenceCallback cadence_callback_{*this}; // Frame cadence encoder adapter. Frames enter this adapter first, and it then // forwards them to our OnFrame method. std::unique_ptr frame_cadence_adapter_ - RTC_GUARDED_BY(&encoder_queue_) RTC_PT_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_) RTC_PT_GUARDED_BY(encoder_queue_); - VideoEncoderConfig encoder_config_ RTC_GUARDED_BY(&encoder_queue_); - std::unique_ptr encoder_ RTC_GUARDED_BY(&encoder_queue_) - RTC_PT_GUARDED_BY(&encoder_queue_); - bool encoder_initialized_; + VideoEncoderConfig encoder_config_ RTC_GUARDED_BY(encoder_queue_); + std::unique_ptr encoder_ RTC_GUARDED_BY(encoder_queue_) + RTC_PT_GUARDED_BY(encoder_queue_); + bool encoder_initialized_ = false; std::unique_ptr rate_allocator_ - RTC_GUARDED_BY(&encoder_queue_) RTC_PT_GUARDED_BY(&encoder_queue_); - int max_framerate_ RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_) RTC_PT_GUARDED_BY(encoder_queue_); + int max_framerate_ RTC_GUARDED_BY(encoder_queue_) = -1; // Set when ConfigureEncoder has been called in order to lazy reconfigure the // encoder on the next frame. - bool pending_encoder_reconfiguration_ RTC_GUARDED_BY(&encoder_queue_); + bool pending_encoder_reconfiguration_ RTC_GUARDED_BY(encoder_queue_) = false; // Set when configuration must create a new encoder object, e.g., // because of a codec change. - bool pending_encoder_creation_ RTC_GUARDED_BY(&encoder_queue_); + bool pending_encoder_creation_ RTC_GUARDED_BY(encoder_queue_) = false; absl::InlinedVector encoder_configuration_callbacks_ - RTC_GUARDED_BY(&encoder_queue_); - - absl::optional last_frame_info_ - RTC_GUARDED_BY(&encoder_queue_); - int crop_width_ RTC_GUARDED_BY(&encoder_queue_); - int crop_height_ RTC_GUARDED_BY(&encoder_queue_); - absl::optional encoder_target_bitrate_bps_ - RTC_GUARDED_BY(&encoder_queue_); - size_t max_data_payload_length_ RTC_GUARDED_BY(&encoder_queue_); - absl::optional last_encoder_rate_settings_ - RTC_GUARDED_BY(&encoder_queue_); - bool encoder_paused_and_dropped_frame_ RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); + + std::optional last_frame_info_ RTC_GUARDED_BY(encoder_queue_); + int crop_width_ RTC_GUARDED_BY(encoder_queue_) = 0; + int crop_height_ RTC_GUARDED_BY(encoder_queue_) = 0; + std::optional encoder_target_bitrate_bps_ + RTC_GUARDED_BY(encoder_queue_); + size_t max_data_payload_length_ RTC_GUARDED_BY(encoder_queue_) = 0; + std::optional last_encoder_rate_settings_ + RTC_GUARDED_BY(encoder_queue_); + bool encoder_paused_and_dropped_frame_ RTC_GUARDED_BY(encoder_queue_) = false; // Set to true if at least one frame was sent to encoder since last encoder // initialization. bool was_encode_called_since_last_initialization_ - RTC_GUARDED_BY(&encoder_queue_); - - bool encoder_failed_ RTC_GUARDED_BY(&encoder_queue_); - Clock* const clock_; + RTC_GUARDED_BY(encoder_queue_) = false; // Used to make sure incoming time stamp is increasing for every frame. - int64_t last_captured_timestamp_ RTC_GUARDED_BY(&encoder_queue_); + int64_t last_captured_timestamp_ RTC_GUARDED_BY(encoder_queue_) = 0; // Delta used for translating between NTP and internal timestamps. - const int64_t delta_ntp_internal_ms_ RTC_GUARDED_BY(&encoder_queue_); + const int64_t delta_ntp_internal_ms_ RTC_GUARDED_BY(encoder_queue_); - int64_t last_frame_log_ms_ RTC_GUARDED_BY(&encoder_queue_); - int captured_frame_count_ RTC_GUARDED_BY(&encoder_queue_); - int dropped_frame_cwnd_pushback_count_ RTC_GUARDED_BY(&encoder_queue_); - int dropped_frame_encoder_block_count_ RTC_GUARDED_BY(&encoder_queue_); - absl::optional pending_frame_ RTC_GUARDED_BY(&encoder_queue_); - int64_t pending_frame_post_time_us_ RTC_GUARDED_BY(&encoder_queue_); + int64_t last_frame_log_ms_ RTC_GUARDED_BY(encoder_queue_); + int captured_frame_count_ RTC_GUARDED_BY(encoder_queue_) = 0; + int dropped_frame_cwnd_pushback_count_ RTC_GUARDED_BY(encoder_queue_) = 0; + int dropped_frame_encoder_block_count_ RTC_GUARDED_BY(encoder_queue_) = 0; + std::optional pending_frame_ RTC_GUARDED_BY(encoder_queue_); + int64_t pending_frame_post_time_us_ RTC_GUARDED_BY(encoder_queue_) = 0; VideoFrame::UpdateRect accumulated_update_rect_ - RTC_GUARDED_BY(&encoder_queue_); - bool accumulated_update_rect_is_valid_ RTC_GUARDED_BY(&encoder_queue_); - - // Used for automatic content type detection. - absl::optional last_update_rect_ - RTC_GUARDED_BY(&encoder_queue_); - Timestamp animation_start_time_ RTC_GUARDED_BY(&encoder_queue_); - bool cap_resolution_due_to_video_content_ RTC_GUARDED_BY(&encoder_queue_); - // Used to correctly ignore changes in update_rect introduced by - // resize triggered by animation detection. - enum class ExpectResizeState { - kNoResize, // Normal operation. - kResize, // Resize was triggered by the animation detection. - kFirstFrameAfterResize // Resize observed. - } expect_resize_state_ RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); + bool accumulated_update_rect_is_valid_ RTC_GUARDED_BY(encoder_queue_) = true; FecControllerOverride* fec_controller_override_ - RTC_GUARDED_BY(&encoder_queue_); - absl::optional last_parameters_update_ms_ - RTC_GUARDED_BY(&encoder_queue_); - absl::optional last_encode_info_ms_ RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_) = nullptr; + std::optional last_parameters_update_ms_ + RTC_GUARDED_BY(encoder_queue_); + std::optional last_encode_info_ms_ RTC_GUARDED_BY(encoder_queue_); - VideoEncoder::EncoderInfo encoder_info_ RTC_GUARDED_BY(&encoder_queue_); - VideoCodec send_codec_ RTC_GUARDED_BY(&encoder_queue_); + VideoEncoder::EncoderInfo encoder_info_ RTC_GUARDED_BY(encoder_queue_); + VideoCodec send_codec_ RTC_GUARDED_BY(encoder_queue_); - FrameDropper frame_dropper_ RTC_GUARDED_BY(&encoder_queue_); + FrameDropper frame_dropper_ RTC_GUARDED_BY(encoder_queue_); // If frame dropper is not force disabled, frame dropping might still be // disabled if VideoEncoder::GetEncoderInfo() indicates that the encoder has a // trusted rate controller. This is determined on a per-frame basis, as the // encoder behavior might dynamically change. - bool force_disable_frame_dropper_ RTC_GUARDED_BY(&encoder_queue_); + bool force_disable_frame_dropper_ RTC_GUARDED_BY(encoder_queue_) = false; // Incremented on worker thread whenever `frame_dropper_` determines that a // frame should be dropped. Decremented on whichever thread runs // OnEncodedImage(), which is only called by one thread but not necessarily // the worker thread. - std::atomic pending_frame_drops_; + std::atomic pending_frame_drops_{0}; // Congestion window frame drop ratio (drop 1 in every // cwnd_frame_drop_interval_ frames). - absl::optional cwnd_frame_drop_interval_ RTC_GUARDED_BY(&encoder_queue_); + std::optional cwnd_frame_drop_interval_ RTC_GUARDED_BY(encoder_queue_); // Frame counter for congestion window frame drop. - int cwnd_frame_counter_ RTC_GUARDED_BY(&encoder_queue_); + int cwnd_frame_counter_ RTC_GUARDED_BY(encoder_queue_) = 0; std::unique_ptr bitrate_adjuster_ - RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); // TODO(sprang): Change actually support keyframe per simulcast stream, or // turn this into a simple bool `pending_keyframe_request_`. - std::vector next_frame_types_ RTC_GUARDED_BY(&encoder_queue_); - - FrameEncodeMetadataWriter frame_encode_metadata_writer_; - - struct AutomaticAnimationDetectionExperiment { - bool enabled = false; - int min_duration_ms = 2000; - double min_area_ratio = 0.8; - int min_fps = 10; - std::unique_ptr Parser() { - return StructParametersParser::Create( - "enabled", &enabled, // - "min_duration_ms", &min_duration_ms, // - "min_area_ratio", &min_area_ratio, // - "min_fps", &min_fps); - } - }; - - AutomaticAnimationDetectionExperiment - ParseAutomatincAnimationDetectionFieldTrial() const; + std::vector next_frame_types_ RTC_GUARDED_BY(encoder_queue_); - AutomaticAnimationDetectionExperiment - automatic_animation_detection_experiment_ RTC_GUARDED_BY(&encoder_queue_); + FrameEncodeMetadataWriter frame_encode_metadata_writer_{this}; // Provides video stream input states: current resolution and frame rate. VideoStreamInputStateProvider input_state_provider_; + bool encoder_fallback_requested_ RTC_GUARDED_BY(encoder_queue_) = false; + const std::unique_ptr video_stream_adapter_ - RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); // Responsible for adapting input resolution or frame rate to ensure resources // (e.g. CPU or bandwidth) are not overused. Adding resources can occur on any // thread. std::unique_ptr - resource_adaptation_processor_ RTC_GUARDED_BY(&encoder_queue_); + resource_adaptation_processor_ RTC_GUARDED_BY(encoder_queue_); std::unique_ptr degradation_preference_manager_ - RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); std::vector adaptation_constraints_ - RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); // Handles input, output and stats reporting related to VideoStreamEncoder // specific resources, such as "encode usage percent" measurements and "QP // scaling". Also involved with various mitigations such as initial frame @@ -436,9 +401,9 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // tied to the VideoStreamEncoder (which is destroyed off the encoder queue) // and its resource list is accessible from any thread. VideoStreamEncoderResourceManager stream_resource_manager_ - RTC_GUARDED_BY(&encoder_queue_); - std::vector> additional_resources_ - RTC_GUARDED_BY(&encoder_queue_); + RTC_GUARDED_BY(encoder_queue_); + std::vector> additional_resources_ + RTC_GUARDED_BY(encoder_queue_); // Carries out the VideoSourceRestrictions provided by the // ResourceAdaptationProcessor, i.e. reconfigures the source of video frames // to provide us with different resolution or frame rate. @@ -454,25 +419,27 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, QpParser qp_parser_; const bool qp_parsing_allowed_; + // The quality convergence controller is used to determine if a codec has + // reached its target quality. This is used for screenshare to determine when + // there's no need to continue encoding the same repeated frame. + QualityConvergenceController quality_convergence_controller_ + RTC_GUARDED_BY(encoder_queue_); + // Enables encoder switching on initialization failures. bool switch_encoder_on_init_failures_; - const absl::optional vp9_low_tier_core_threshold_; - const absl::optional experimental_encoder_thread_limit_; + const std::optional vp9_low_tier_core_threshold_; + const std::optional experimental_encoder_thread_limit_; - // These are copies of restrictions (glorified max_pixel_count) set by - // a) OnVideoSourceRestrictionsUpdated - // b) CheckForAnimatedContent - // They are used to scale down encoding resolution if needed when using - // requested_resolution. + // This is a copy of restrictions (glorified max_pixel_count) set by + // OnVideoSourceRestrictionsUpdated. It is used to scale down encoding + // resolution if needed when using requested_resolution. // // TODO(webrtc:14451) Split video_source_sink_controller_ // so that ownership on restrictions/wants is kept on &encoder_queue_, that // these extra copies would not be needed. - absl::optional latest_restrictions_ - RTC_GUARDED_BY(&encoder_queue_); - absl::optional animate_restrictions_ - RTC_GUARDED_BY(&encoder_queue_); + std::optional latest_restrictions_ + RTC_GUARDED_BY(encoder_queue_); // Used to cancel any potentially pending tasks to the worker thread. // Refrenced by tasks running on `encoder_queue_` so need to be destroyed @@ -480,9 +447,11 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // `worker_queue_`. ScopedTaskSafety task_safety_; - // Public methods are proxied to the task queues. The queues must be destroyed - // first to make sure no tasks run that use other members. - rtc::TaskQueue encoder_queue_; + std::unique_ptr encoder_queue_; + + // Required for automatic corruption detection. + std::unique_ptr + frame_instrumentation_generator_; }; } // namespace webrtc diff --git a/video/video_stream_encoder_interface.h b/video/video_stream_encoder_interface.h index 25190aa474..05da612f26 100644 --- a/video/video_stream_encoder_interface.h +++ b/video/video_stream_encoder_interface.h @@ -67,9 +67,8 @@ class VideoStreamEncoderInterface { // TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor // is moved to Call this method could be deleted altogether in favor of // Call-level APIs only. - virtual void AddAdaptationResource(rtc::scoped_refptr resource) = 0; - virtual std::vector> - GetAdaptationResources() = 0; + virtual void AddAdaptationResource(scoped_refptr resource) = 0; + virtual std::vector> GetAdaptationResources() = 0; // Sets the source that will provide video frames to the VideoStreamEncoder's // OnFrame method. `degradation_preference` control whether or not resolution @@ -79,7 +78,7 @@ class VideoStreamEncoderInterface { // TODO(bugs.webrtc.org/14246): When adaptation logic is extracted from this // class, it no longer needs to know the source. virtual void SetSource( - rtc::VideoSourceInterface* source, + VideoSourceInterface* source, const DegradationPreference& degradation_preference) = 0; // Sets the `sink` that gets the encoded frames. `rotation_applied` means diff --git a/video/video_stream_encoder_observer.h b/video/video_stream_encoder_observer.h index c10412181d..95ca5fa887 100644 --- a/video/video_stream_encoder_observer.h +++ b/video/video_stream_encoder_observer.h @@ -58,6 +58,7 @@ class VideoStreamEncoderObserver : public CpuOveruseMetricsObserver { enum class DropReason { kSource, + kBadTimestamp, kEncoderQueue, kEncoder, kMediaOptimization, diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc index 44fc53f90d..17f4ea1a2c 100644 --- a/video/video_stream_encoder_unittest.cc +++ b/video/video_stream_encoder_unittest.cc @@ -1,4 +1,3 @@ - /* * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. * @@ -11,54 +10,97 @@ #include "video/video_stream_encoder.h" #include +#include +#include +#include #include #include -#include +#include +#include #include - -#include "absl/memory/memory.h" +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/container/inlined_vector.h" +#include "absl/functional/any_invocable.h" +#include "api/adaptation/resource.h" +#include "api/array_view.h" +#include "api/environment/environment.h" +#include "api/environment/environment_factory.h" #include "api/field_trials_view.h" +#include "api/location.h" +#include "api/make_ref_counted.h" #include "api/rtp_parameters.h" -#include "api/task_queue/default_task_queue_factory.h" +#include "api/scoped_refptr.h" +#include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" #include "api/test/mock_fec_controller_override.h" #include "api/test/mock_video_encoder.h" #include "api/test/mock_video_encoder_factory.h" +#include "api/test/rtc_error_matchers.h" +#include "api/test/time_controller.h" #include "api/units/data_rate.h" #include "api/units/time_delta.h" +#include "api/units/timestamp.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" +#include "api/video/encoded_image.h" #include "api/video/i420_buffer.h" #include "api/video/nv12_buffer.h" +#include "api/video/render_resolution.h" +#include "api/video/resolution.h" +#include "api/video/video_adaptation_counters.h" #include "api/video/video_adaptation_reason.h" #include "api/video/video_bitrate_allocation.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video/video_bitrate_allocator_factory.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_codec_type.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_layers_allocation.h" +#include "api/video/video_rotation.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "api/video/video_stream_encoder_settings.h" +#include "api/video_codecs/scalability_mode.h" #include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" -#include "api/video_codecs/vp8_temporal_layers.h" +#include "api/video_codecs/vp8_frame_buffer_controller.h" #include "api/video_codecs/vp8_temporal_layers_factory.h" +#include "api/video_track_source_constraints.h" #include "call/adaptation/test/fake_adaptation_constraint.h" #include "call/adaptation/test/fake_resource.h" +#include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_adapter.h" +#include "call/video_send_stream.h" +#include "common_video/frame_instrumentation_data.h" #include "common_video/h264/h264_common.h" -#include "common_video/include/video_frame_buffer.h" #include "media/base/video_adapter.h" #include "media/engine/webrtc_video_engine.h" #include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" #include "modules/video_coding/codecs/h264/include/h264.h" -#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" -#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/codecs/vp9/svc_config.h" -#include "modules/video_coding/utility/quality_scaler.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "modules/video_coding/utility/vp8_constants.h" +#include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/experiments/encoder_info_settings.h" +#include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/gunit.h" #include "rtc_base/logging.h" #include "rtc_base/ref_counted_object.h" +#include "rtc_base/strings/string_builder.h" #include "rtc_base/synchronization/mutex.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/time_utils.h" #include "system_wrappers/include/metrics.h" #include "test/encoder_settings.h" #include "test/fake_encoder.h" @@ -70,9 +112,14 @@ #include "test/time_controller/simulated_time_controller.h" #include "test/video_encoder_nullable_proxy_factory.h" #include "test/video_encoder_proxy_factory.h" -#include "video/config/encoder_stream_factory.h" +#include "test/wait_until.h" +#include "video/adaptation/overuse_frame_detector.h" +#include "video/config/video_encoder_config.h" +#include "video/encoder_bitrate_adjuster.h" #include "video/frame_cadence_adapter.h" #include "video/send_statistics_proxy.h" +#include "video/video_stream_encoder_interface.h" +#include "video/video_stream_encoder_observer.h" namespace webrtc { @@ -83,6 +130,7 @@ using ::testing::Field; using ::testing::Ge; using ::testing::Gt; using ::testing::Invoke; +using ::testing::IsTrue; using ::testing::Le; using ::testing::Lt; using ::testing::Matcher; @@ -107,7 +155,7 @@ const DataRate kStartBitrate = DataRate::KilobitsPerSec(600); const DataRate kSimulcastTargetBitrate = DataRate::KilobitsPerSec(3150); const int kMaxInitialFramedrop = 4; const int kDefaultFramerate = 30; -const int64_t kFrameIntervalMs = rtc::kNumMillisecsPerSec / kDefaultFramerate; +const int64_t kFrameIntervalMs = kNumMillisecsPerSec / kDefaultFramerate; const int64_t kProcessIntervalMs = 1000; const VideoEncoder::ResolutionBitrateLimits kEncoderBitrateLimits540p(960 * 540, 100 * 1000, 100 * 1000, 2000 * 1000); @@ -124,9 +172,14 @@ const uint8_t kCodedFrameVp8Qp25[] = { 0x02, 0x47, 0x08, 0x85, 0x85, 0x88, 0x85, 0x84, 0x88, 0x0c, 0x82, 0x00, 0x0c, 0x0d, 0x60, 0x00, 0xfe, 0xfc, 0x5c, 0xd0}; +#ifdef RTC_ENABLE_H265 +// Default value from encoder_info_settings.cc +const DataRate kDefaultH265Bitrate180p = DataRate::KilobitsPerSec(150); +#endif + VideoFrame CreateSimpleNV12Frame() { return VideoFrame::Builder() - .set_video_frame_buffer(rtc::make_ref_counted( + .set_video_frame_buffer(make_ref_counted( /*width=*/16, /*height=*/16)) .build(); } @@ -136,55 +189,54 @@ void PassAFrame( FrameCadenceAdapterInterface::Callback* video_stream_encoder_callback, int64_t ntp_time_ms) { encoder_queue->PostTask([video_stream_encoder_callback, ntp_time_ms] { - video_stream_encoder_callback->OnFrame(Timestamp::Millis(ntp_time_ms), 1, - CreateSimpleNV12Frame()); + video_stream_encoder_callback->OnFrame(Timestamp::Millis(ntp_time_ms), + false, CreateSimpleNV12Frame()); }); } class TestBuffer : public webrtc::I420Buffer { public: - TestBuffer(rtc::Event* event, int width, int height) + TestBuffer(Event* event, int width, int height) : I420Buffer(width, height), event_(event) {} private: - friend class rtc::RefCountedObject; + friend class RefCountedObject; ~TestBuffer() override { if (event_) event_->Set(); } - rtc::Event* const event_; + Event* const event_; }; // A fake native buffer that can't be converted to I420. Upon scaling, it // produces another FakeNativeBuffer. class FakeNativeBuffer : public webrtc::VideoFrameBuffer { public: - FakeNativeBuffer(rtc::Event* event, int width, int height) + FakeNativeBuffer(Event* event, int width, int height) : event_(event), width_(width), height_(height) {} webrtc::VideoFrameBuffer::Type type() const override { return Type::kNative; } int width() const override { return width_; } int height() const override { return height_; } - rtc::scoped_refptr ToI420() override { + scoped_refptr ToI420() override { return nullptr; } - rtc::scoped_refptr CropAndScale( - int offset_x, - int offset_y, - int crop_width, - int crop_height, - int scaled_width, - int scaled_height) override { - return rtc::make_ref_counted(nullptr, scaled_width, - scaled_height); + scoped_refptr CropAndScale(int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) override { + return make_ref_counted(nullptr, scaled_width, + scaled_height); } private: - friend class rtc::RefCountedObject; + friend class RefCountedObject; ~FakeNativeBuffer() override { if (event_) event_->Set(); } - rtc::Event* const event_; + Event* const event_; const int width_; const int height_; }; @@ -192,17 +244,17 @@ class FakeNativeBuffer : public webrtc::VideoFrameBuffer { // A fake native buffer that is backed by an NV12 buffer. class FakeNV12NativeBuffer : public webrtc::VideoFrameBuffer { public: - FakeNV12NativeBuffer(rtc::Event* event, int width, int height) + FakeNV12NativeBuffer(Event* event, int width, int height) : nv12_buffer_(NV12Buffer::Create(width, height)), event_(event) {} webrtc::VideoFrameBuffer::Type type() const override { return Type::kNative; } int width() const override { return nv12_buffer_->width(); } int height() const override { return nv12_buffer_->height(); } - rtc::scoped_refptr ToI420() override { + scoped_refptr ToI420() override { return nv12_buffer_->ToI420(); } - rtc::scoped_refptr GetMappedFrameBuffer( - rtc::ArrayView types) override { + scoped_refptr GetMappedFrameBuffer( + ArrayView types) override { if (absl::c_find(types, Type::kNV12) != types.end()) { return nv12_buffer_; } @@ -211,19 +263,20 @@ class FakeNV12NativeBuffer : public webrtc::VideoFrameBuffer { const NV12BufferInterface* GetNV12() const { return nv12_buffer_.get(); } private: - friend class rtc::RefCountedObject; + friend class RefCountedObject; ~FakeNV12NativeBuffer() override { if (event_) event_->Set(); } - rtc::scoped_refptr nv12_buffer_; - rtc::Event* const event_; + scoped_refptr nv12_buffer_; + Event* const event_; }; class CpuOveruseDetectorProxy : public OveruseFrameDetector { public: - explicit CpuOveruseDetectorProxy(CpuOveruseMetricsObserver* metrics_observer) - : OveruseFrameDetector(metrics_observer), + CpuOveruseDetectorProxy(const Environment& env, + CpuOveruseMetricsObserver* metrics_observer) + : OveruseFrameDetector(env, metrics_observer), last_target_framerate_fps_(-1), framerate_updated_event_(true /* manual_reset */, false /* initially_signaled */) {} @@ -243,12 +296,12 @@ class CpuOveruseDetectorProxy : public OveruseFrameDetector { CpuOveruseOptions GetOptions() { return options_; } - rtc::Event* framerate_updated_event() { return &framerate_updated_event_; } + Event* framerate_updated_event() { return &framerate_updated_event_; } private: Mutex lock_; int last_target_framerate_fps_ RTC_GUARDED_BY(lock_); - rtc::Event framerate_updated_event_; + Event framerate_updated_event_; }; class FakeVideoSourceRestrictionsListener @@ -260,15 +313,13 @@ class FakeVideoSourceRestrictionsListener RTC_DCHECK(was_restrictions_updated_); } - rtc::Event* restrictions_updated_event() { - return &restrictions_updated_event_; - } + Event* restrictions_updated_event() { return &restrictions_updated_event_; } // VideoSourceRestrictionsListener implementation. void OnVideoSourceRestrictionsUpdated( VideoSourceRestrictions restrictions, const VideoAdaptationCounters& adaptation_counters, - rtc::scoped_refptr reason, + scoped_refptr reason, const VideoSourceRestrictions& unfiltered_restrictions) override { was_restrictions_updated_ = true; restrictions_updated_event_.Set(); @@ -276,24 +327,23 @@ class FakeVideoSourceRestrictionsListener private: bool was_restrictions_updated_; - rtc::Event restrictions_updated_event_; + Event restrictions_updated_event_; }; auto WantsFps(Matcher fps_matcher) { - return Field("max_framerate_fps", &rtc::VideoSinkWants::max_framerate_fps, + return Field("max_framerate_fps", &VideoSinkWants::max_framerate_fps, fps_matcher); } auto WantsMaxPixels(Matcher max_pixel_matcher) { - return Field("max_pixel_count", &rtc::VideoSinkWants::max_pixel_count, + return Field("max_pixel_count", &VideoSinkWants::max_pixel_count, AllOf(max_pixel_matcher, Gt(0))); } auto ResolutionMax() { - return AllOf( - WantsMaxPixels(Eq(std::numeric_limits::max())), - Field("target_pixel_count", &rtc::VideoSinkWants::target_pixel_count, - Eq(absl::nullopt))); + return AllOf(WantsMaxPixels(Eq(std::numeric_limits::max())), + Field("target_pixel_count", &VideoSinkWants::target_pixel_count, + Eq(std::nullopt))); } auto FpsMax() { @@ -312,6 +362,10 @@ auto FpsMaxResolutionMatches(Matcher pixel_matcher) { return AllOf(FpsMax(), WantsMaxPixels(pixel_matcher)); } +auto FpsUnlimitedResolutionMatches(Matcher pixel_matcher) { + return AllOf(FpsUnlimited(), WantsMaxPixels(pixel_matcher)); +} + auto FpsMaxResolutionMax() { return AllOf(FpsMax(), ResolutionMax()); } @@ -332,39 +386,47 @@ auto FpsInRangeForPixelsInBalanced(int last_frame_pixels) { } else { fps_range_matcher = Eq(kDefaultFramerate); } - return Field("max_framerate_fps", &rtc::VideoSinkWants::max_framerate_fps, + return Field("max_framerate_fps", &VideoSinkWants::max_framerate_fps, fps_range_matcher); } -auto FpsEqResolutionEqTo(const rtc::VideoSinkWants& other_wants) { +auto FpsEqResolutionEqTo(const VideoSinkWants& other_wants) { return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)), WantsMaxPixels(Eq(other_wants.max_pixel_count))); } -auto FpsMaxResolutionLt(const rtc::VideoSinkWants& other_wants) { +auto FpsMaxResolutionLt(const VideoSinkWants& other_wants) { return AllOf(FpsMax(), WantsMaxPixels(Lt(other_wants.max_pixel_count))); } -auto FpsMaxResolutionGt(const rtc::VideoSinkWants& other_wants) { +auto FpsUnlimitedResolutionLt(const VideoSinkWants& other_wants) { + return AllOf(FpsUnlimited(), WantsMaxPixels(Lt(other_wants.max_pixel_count))); +} + +auto FpsMaxResolutionGt(const VideoSinkWants& other_wants) { return AllOf(FpsMax(), WantsMaxPixels(Gt(other_wants.max_pixel_count))); } -auto FpsLtResolutionEq(const rtc::VideoSinkWants& other_wants) { +auto FpsUnlimitedResolutionGt(const VideoSinkWants& other_wants) { + return AllOf(FpsUnlimited(), WantsMaxPixels(Gt(other_wants.max_pixel_count))); +} + +auto FpsLtResolutionEq(const VideoSinkWants& other_wants) { return AllOf(WantsFps(Lt(other_wants.max_framerate_fps)), WantsMaxPixels(Eq(other_wants.max_pixel_count))); } -auto FpsGtResolutionEq(const rtc::VideoSinkWants& other_wants) { +auto FpsGtResolutionEq(const VideoSinkWants& other_wants) { return AllOf(WantsFps(Gt(other_wants.max_framerate_fps)), WantsMaxPixels(Eq(other_wants.max_pixel_count))); } -auto FpsEqResolutionLt(const rtc::VideoSinkWants& other_wants) { +auto FpsEqResolutionLt(const VideoSinkWants& other_wants) { return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)), WantsMaxPixels(Lt(other_wants.max_pixel_count))); } -auto FpsEqResolutionGt(const rtc::VideoSinkWants& other_wants) { +auto FpsEqResolutionGt(const VideoSinkWants& other_wants) { return AllOf(WantsFps(Eq(other_wants.max_framerate_fps)), WantsMaxPixels(Gt(other_wants.max_pixel_count))); } @@ -372,6 +434,7 @@ auto FpsEqResolutionGt(const rtc::VideoSinkWants& other_wants) { class VideoStreamEncoderUnderTest : public VideoStreamEncoder { public: VideoStreamEncoderUnderTest( + const Environment& env, TimeController* time_controller, std::unique_ptr cadence_adapter, std::unique_ptr @@ -380,19 +443,18 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder { const VideoStreamEncoderSettings& settings, VideoStreamEncoder::BitrateAllocationCallbackType allocation_callback_type, - const FieldTrialsView& field_trials, int num_cores) - : VideoStreamEncoder(time_controller->GetClock(), - num_cores, - stats_proxy, - settings, - std::unique_ptr( - overuse_detector_proxy_ = - new CpuOveruseDetectorProxy(stats_proxy)), - std::move(cadence_adapter), - std::move(encoder_queue), - allocation_callback_type, - field_trials), + : VideoStreamEncoder( + env, + num_cores, + stats_proxy, + settings, + std::unique_ptr( + overuse_detector_proxy_ = + new CpuOveruseDetectorProxy(env, stats_proxy)), + std::move(cadence_adapter), + std::move(encoder_queue), + allocation_callback_type), time_controller_(time_controller), fake_cpu_resource_(FakeResource::Create("FakeResource[CPU]")), fake_quality_resource_(FakeResource::Create("FakeResource[QP]")), @@ -404,7 +466,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder { } void SetSourceAndWaitForRestrictionsUpdated( - rtc::VideoSourceInterface* source, + VideoSourceInterface* source, const DegradationPreference& degradation_preference) { FakeVideoSourceRestrictionsListener listener; AddRestrictionsListenerForTesting(&listener); @@ -414,7 +476,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder { } void SetSourceAndWaitForFramerateUpdated( - rtc::VideoSourceInterface* source, + VideoSourceInterface* source, const DegradationPreference& degradation_preference) { overuse_detector_proxy_->framerate_updated_event()->Reset(); SetSource(source, degradation_preference); @@ -443,7 +505,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder { // Triggers resource usage measurements on the fake CPU resource. void TriggerCpuOveruse() { - rtc::Event event; + Event event; encoder_queue()->PostTask([this, &event] { fake_cpu_resource_->SetUsageState(ResourceUsageState::kOveruse); event.Set(); @@ -453,7 +515,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder { } void TriggerCpuUnderuse() { - rtc::Event event; + Event event; encoder_queue()->PostTask([this, &event] { fake_cpu_resource_->SetUsageState(ResourceUsageState::kUnderuse); event.Set(); @@ -464,7 +526,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder { // Triggers resource usage measurements on the fake quality resource. void TriggerQualityLow() { - rtc::Event event; + Event event; encoder_queue()->PostTask([this, &event] { fake_quality_resource_->SetUsageState(ResourceUsageState::kOveruse); event.Set(); @@ -473,7 +535,7 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder { time_controller_->AdvanceTime(TimeDelta::Zero()); } void TriggerQualityHigh() { - rtc::Event event; + Event event; encoder_queue()->PostTask([this, &event] { fake_quality_resource_->SetUsageState(ResourceUsageState::kUnderuse); event.Set(); @@ -484,8 +546,8 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder { TimeController* const time_controller_; CpuOveruseDetectorProxy* overuse_detector_proxy_; - rtc::scoped_refptr fake_cpu_resource_; - rtc::scoped_refptr fake_quality_resource_; + scoped_refptr fake_cpu_resource_; + scoped_refptr fake_quality_resource_; FakeAdaptationConstraint fake_adaptation_constraint_; }; @@ -498,6 +560,7 @@ class CroppingVideoStreamFactory private: std::vector CreateEncoderStreams( + const FieldTrialsView& /*field_trials*/, int frame_width, int frame_height, const VideoEncoderConfig& encoder_config) override { @@ -524,17 +587,17 @@ class AdaptingFrameForwarder : public test::FrameForwarder { return adaptation_enabled_; } - // The "last wants" is a snapshot of the previous rtc::VideoSinkWants where + // The "last wants" is a snapshot of the previous webrtc::VideoSinkWants where // the resolution or frame rate was different than it is currently. If // something else is modified, such as encoder resolutions, but the resolution // and frame rate stays the same, last wants is not updated. - rtc::VideoSinkWants last_wants() const { + VideoSinkWants last_wants() const { MutexLock lock(&mutex_); return last_wants_; } - absl::optional last_sent_width() const { return last_width_; } - absl::optional last_sent_height() const { return last_height_; } + std::optional last_sent_width() const { return last_width_; } + std::optional last_sent_height() const { return last_height_; } void IncomingCapturedFrame(const VideoFrame& video_frame) override { RTC_DCHECK(time_controller_->GetMainThread()->IsCurrent()); @@ -554,10 +617,10 @@ class AdaptingFrameForwarder : public test::FrameForwarder { &cropped_height, &out_width, &out_height)) { VideoFrame adapted_frame = VideoFrame::Builder() - .set_video_frame_buffer(rtc::make_ref_counted( + .set_video_frame_buffer(make_ref_counted( nullptr, out_width, out_height)) .set_ntp_time_ms(video_frame.ntp_time_ms()) - .set_timestamp_ms(99) + .set_timestamp_ms(video_frame.timestamp_us() * 1000) .set_rotation(kVideoRotation_0) .build(); if (video_frame.has_update_rect()) { @@ -571,8 +634,8 @@ class AdaptingFrameForwarder : public test::FrameForwarder { last_width_.emplace(adapted_frame.width()); last_height_.emplace(adapted_frame.height()); } else { - last_width_ = absl::nullopt; - last_height_ = absl::nullopt; + last_width_ = std::nullopt; + last_height_ = std::nullopt; } } else { RTC_DLOG(LS_INFO) << "IncomingCapturedFrame: adaptation not enabled"; @@ -583,18 +646,18 @@ class AdaptingFrameForwarder : public test::FrameForwarder { } void OnOutputFormatRequest(int width, int height) { - absl::optional> target_aspect_ratio = + std::optional> target_aspect_ratio = std::make_pair(width, height); - absl::optional max_pixel_count = width * height; - absl::optional max_fps; + std::optional max_pixel_count = width * height; + std::optional max_fps; adapter_.OnOutputFormatRequest(target_aspect_ratio, max_pixel_count, max_fps); } - void AddOrUpdateSink(rtc::VideoSinkInterface* sink, - const rtc::VideoSinkWants& wants) override { + void AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) override { MutexLock lock(&mutex_); - rtc::VideoSinkWants prev_wants = sink_wants_locked(); + VideoSinkWants prev_wants = sink_wants_locked(); bool did_adapt = prev_wants.max_pixel_count != wants.max_pixel_count || prev_wants.target_pixel_count != wants.target_pixel_count || @@ -609,11 +672,11 @@ class AdaptingFrameForwarder : public test::FrameForwarder { void RequestRefreshFrame() override { ++refresh_frames_requested_; } TimeController* const time_controller_; - cricket::VideoAdapter adapter_; + VideoAdapter adapter_; bool adaptation_enabled_ RTC_GUARDED_BY(mutex_); - rtc::VideoSinkWants last_wants_ RTC_GUARDED_BY(mutex_); - absl::optional last_width_; - absl::optional last_height_; + VideoSinkWants last_wants_ RTC_GUARDED_BY(mutex_); + std::optional last_width_; + std::optional last_height_; int refresh_frames_requested_{0}; }; @@ -661,7 +724,7 @@ class MockableSendStatisticsProxy : public SendStatisticsProxy { } mutable Mutex lock_; - absl::optional mock_stats_ RTC_GUARDED_BY(lock_); + std::optional mock_stats_ RTC_GUARDED_BY(lock_); std::function on_frame_dropped_; }; @@ -679,8 +742,12 @@ class SimpleVideoStreamEncoderFactory { MOCK_METHOD(CodecSpecificInfo, EncodeHook, (EncodedImage & encoded_image, - rtc::scoped_refptr buffer), + webrtc::scoped_refptr buffer), (override)); + MOCK_METHOD(VideoEncoder::EncoderInfo, + GetEncoderInfo, + (), + (const, override)); }; SimpleVideoStreamEncoderFactory() { @@ -693,16 +760,17 @@ class SimpleVideoStreamEncoderFactory { std::unique_ptr zero_hertz_adapter, std::unique_ptr encoder_queue, const FieldTrialsView* field_trials = nullptr) { + Environment env = CreateEnvironment(&field_trials_, field_trials, + time_controller_.GetClock()); auto result = std::make_unique( - time_controller_.GetClock(), + env, /*number_of_cores=*/1, /*stats_proxy=*/stats_proxy_.get(), encoder_settings_, - std::make_unique( - /*stats_proxy=*/nullptr), + std::make_unique(env, + /*stats_proxy=*/nullptr), std::move(zero_hertz_adapter), std::move(encoder_queue), VideoStreamEncoder::BitrateAllocationCallbackType:: - kVideoBitrateAllocation, - field_trials ? *field_trials : field_trials_); + kVideoBitrateAllocation); result->SetSink(&sink_, /*rotation_applied=*/false); return result; } @@ -748,8 +816,10 @@ class SimpleVideoStreamEncoderFactory { test::ScopedKeyValueConfig field_trials_; GlobalSimulatedTimeController time_controller_{Timestamp::Zero()}; - std::unique_ptr task_queue_factory_{ - time_controller_.CreateTaskQueueFactory()}; + Environment env_ = + CreateEnvironment(&field_trials_, + time_controller_.GetClock(), + time_controller_.CreateTaskQueueFactory()); std::unique_ptr stats_proxy_ = std::make_unique( time_controller_.GetClock(), @@ -760,7 +830,7 @@ class SimpleVideoStreamEncoderFactory { CreateBuiltinVideoBitrateAllocatorFactory(); VideoStreamEncoderSettings encoder_settings_{ VideoEncoder::Capabilities(/*loss_notification=*/false)}; - MockFakeEncoder mock_fake_encoder_{time_controller_.GetClock()}; + MockFakeEncoder mock_fake_encoder_{env_}; test::VideoEncoderProxyFactory encoder_factory_{&mock_fake_encoder_}; NullEncoderSink sink_; }; @@ -770,11 +840,10 @@ class MockFrameCadenceAdapter : public FrameCadenceAdapterInterface { MOCK_METHOD(void, Initialize, (Callback * callback), (override)); MOCK_METHOD(void, SetZeroHertzModeEnabled, - (absl::optional), + (std::optional), (override)); MOCK_METHOD(void, OnFrame, (const VideoFrame&), (override)); - MOCK_METHOD(absl::optional, GetInputFrameRateFps, (), (override)); - MOCK_METHOD(void, UpdateFrameRate, (), (override)); + MOCK_METHOD(std::optional, GetInputFrameRateFps, (), (override)); MOCK_METHOD(void, UpdateLayerQualityConvergence, (size_t spatial_index, bool converged), @@ -783,6 +852,10 @@ class MockFrameCadenceAdapter : public FrameCadenceAdapterInterface { UpdateLayerStatus, (size_t spatial_index, bool enabled), (override)); + MOCK_METHOD(void, + UpdateVideoSourceRestrictions, + (std::optional), + (override)); MOCK_METHOD(void, ProcessKeyFrameRequest, (), (override)); }; @@ -793,27 +866,27 @@ class MockEncoderSelector OnCurrentEncoder, (const SdpVideoFormat& format), (override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, OnAvailableBitrate, (const DataRate& rate), (override)); - MOCK_METHOD(absl::optional, + MOCK_METHOD(std::optional, OnResolutionChange, (const RenderResolution& resolution), (override)); - MOCK_METHOD(absl::optional, OnEncoderBroken, (), (override)); + MOCK_METHOD(std::optional, OnEncoderBroken, (), (override)); }; -class MockVideoSourceInterface : public rtc::VideoSourceInterface { +class MockVideoSourceInterface : public VideoSourceInterface { public: MOCK_METHOD(void, AddOrUpdateSink, - (rtc::VideoSinkInterface*, - const rtc::VideoSinkWants&), + (webrtc::VideoSinkInterface*, + const webrtc::VideoSinkWants&), (override)); MOCK_METHOD(void, RemoveSink, - (rtc::VideoSinkInterface*), + (webrtc::VideoSinkInterface*), (override)); MOCK_METHOD(void, RequestRefreshFrame, (), (override)); }; @@ -829,7 +902,7 @@ class VideoStreamEncoderTest : public ::testing::Test { codec_width_(320), codec_height_(240), max_framerate_(kDefaultFramerate), - fake_encoder_(&time_controller_), + fake_encoder_(env_), encoder_factory_(&fake_encoder_), stats_proxy_(new MockableSendStatisticsProxy( time_controller_.GetClock(), @@ -851,7 +924,6 @@ class VideoStreamEncoderTest : public ::testing::Test { test::FillEncoderConfiguration(kVideoCodecVP8, 1, &video_encoder_config); EXPECT_EQ(1u, video_encoder_config.simulcast_layers.size()); video_encoder_config.simulcast_layers[0].num_temporal_layers = 1; - video_encoder_config.simulcast_layers[0].max_framerate = max_framerate_; video_encoder_config_ = video_encoder_config.Copy(); ConfigureEncoder(std::move(video_encoder_config)); @@ -867,16 +939,18 @@ class VideoStreamEncoderTest : public ::testing::Test { if (video_stream_encoder_) video_stream_encoder_->Stop(); - auto encoder_queue = GetTaskQueueFactory()->CreateTaskQueue( + auto encoder_queue = env_.task_queue_factory().CreateTaskQueue( "EncoderQueue", TaskQueueFactory::Priority::NORMAL); TaskQueueBase* encoder_queue_ptr = encoder_queue.get(); std::unique_ptr cadence_adapter = - FrameCadenceAdapterInterface::Create(time_controller_.GetClock(), - encoder_queue_ptr, field_trials_); + FrameCadenceAdapterInterface::Create( + time_controller_.GetClock(), encoder_queue_ptr, + /*metronome=*/nullptr, /*worker_queue=*/nullptr, field_trials_); video_stream_encoder_ = std::make_unique( - &time_controller_, std::move(cadence_adapter), std::move(encoder_queue), - stats_proxy_.get(), video_send_config_.encoder_settings, - allocation_callback_type, field_trials_, num_cores); + env_, &time_controller_, std::move(cadence_adapter), + std::move(encoder_queue), stats_proxy_.get(), + video_send_config_.encoder_settings, allocation_callback_type, + num_cores); video_stream_encoder_->SetSink(&sink_, /*rotation_applied=*/false); video_stream_encoder_->SetSource( &video_source_, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); @@ -891,6 +965,7 @@ class VideoStreamEncoderTest : public ::testing::Test { size_t num_temporal_layers, unsigned char num_spatial_layers, bool screenshare, + std::optional max_frame_rate = kDefaultFramerate, VideoStreamEncoder::BitrateAllocationCallbackType allocation_callback_type = VideoStreamEncoder::BitrateAllocationCallbackType:: @@ -903,7 +978,9 @@ class VideoStreamEncoderTest : public ::testing::Test { num_streams, &video_encoder_config); for (auto& layer : video_encoder_config.simulcast_layers) { layer.num_temporal_layers = num_temporal_layers; - layer.max_framerate = kDefaultFramerate; + if (max_frame_rate) { + layer.max_framerate = *max_frame_rate; + } } video_encoder_config.max_bitrate_bps = num_streams == 1 ? kTargetBitrate.bps() : kSimulcastTargetBitrate.bps(); @@ -915,39 +992,38 @@ class VideoStreamEncoderTest : public ::testing::Test { vp9_settings.numberOfSpatialLayers = num_spatial_layers; vp9_settings.automaticResizeOn = num_spatial_layers <= 1; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); } ConfigureEncoder(std::move(video_encoder_config), allocation_callback_type, num_cores); } - VideoFrame CreateFrame(int64_t ntp_time_ms, - rtc::Event* destruction_event) const { + VideoFrame CreateFrame(int64_t ntp_time_ms, Event* destruction_event) const { return VideoFrame::Builder() - .set_video_frame_buffer(rtc::make_ref_counted( + .set_video_frame_buffer(make_ref_counted( destruction_event, codec_width_, codec_height_)) .set_ntp_time_ms(ntp_time_ms) - .set_timestamp_ms(99) + .set_timestamp_ms(ntp_time_ms) .set_rotation(kVideoRotation_0) .build(); } VideoFrame CreateFrameWithUpdatedPixel(int64_t ntp_time_ms, - rtc::Event* destruction_event, + Event* destruction_event, int offset_x) const { return VideoFrame::Builder() - .set_video_frame_buffer(rtc::make_ref_counted( + .set_video_frame_buffer(make_ref_counted( destruction_event, codec_width_, codec_height_)) .set_ntp_time_ms(ntp_time_ms) - .set_timestamp_ms(99) + .set_timestamp_ms(ntp_time_ms) .set_rotation(kVideoRotation_0) .set_update_rect(VideoFrame::UpdateRect{offset_x, 0, 1, 1}) .build(); } VideoFrame CreateFrame(int64_t ntp_time_ms, int width, int height) const { - auto buffer = rtc::make_ref_counted(nullptr, width, height); + auto buffer = make_ref_counted(nullptr, width, height); I420Buffer::SetBlack(buffer.get()); return VideoFrame::Builder() .set_video_frame_buffer(std::move(buffer)) @@ -967,33 +1043,33 @@ class VideoStreamEncoderTest : public ::testing::Test { } VideoFrame CreateFakeNativeFrame(int64_t ntp_time_ms, - rtc::Event* destruction_event, + Event* destruction_event, int width, int height) const { return VideoFrame::Builder() - .set_video_frame_buffer(rtc::make_ref_counted( + .set_video_frame_buffer(make_ref_counted( destruction_event, width, height)) .set_ntp_time_ms(ntp_time_ms) - .set_timestamp_ms(99) + .set_timestamp_ms(ntp_time_ms) .set_rotation(kVideoRotation_0) .build(); } VideoFrame CreateFakeNV12NativeFrame(int64_t ntp_time_ms, - rtc::Event* destruction_event, + Event* destruction_event, int width, int height) const { return VideoFrame::Builder() - .set_video_frame_buffer(rtc::make_ref_counted( + .set_video_frame_buffer(make_ref_counted( destruction_event, width, height)) .set_ntp_time_ms(ntp_time_ms) - .set_timestamp_ms(99) + .set_timestamp_ms(ntp_time_ms) .set_rotation(kVideoRotation_0) .build(); } VideoFrame CreateFakeNativeFrame(int64_t ntp_time_ms, - rtc::Event* destruction_event) const { + Event* destruction_event) const { return CreateFakeNativeFrame(ntp_time_ms, destruction_event, codec_width_, codec_height_); } @@ -1037,11 +1113,7 @@ class VideoStreamEncoderTest : public ::testing::Test { class TestEncoder : public test::FakeEncoder { public: - explicit TestEncoder(TimeController* time_controller) - : FakeEncoder(time_controller->GetClock()), - time_controller_(time_controller) { - RTC_DCHECK(time_controller_); - } + explicit TestEncoder(const Environment& env) : FakeEncoder(env) {} VideoEncoder::EncoderInfo GetEncoderInfo() const override { MutexLock lock(&local_mutex_); @@ -1165,7 +1237,7 @@ class VideoStreamEncoderTest : public ::testing::Test { } void SetEncodedImageData( - rtc::scoped_refptr encoded_image_data) { + scoped_refptr encoded_image_data) { MutexLock lock(&local_mutex_); encoded_image_data_ = encoded_image_data; } @@ -1175,7 +1247,7 @@ class VideoStreamEncoderTest : public ::testing::Test { expect_null_frame_ = true; } - absl::optional + std::optional GetAndResetLastRateControlSettings() { auto settings = last_rate_control_settings_; last_rate_control_settings_.reset(); @@ -1192,7 +1264,7 @@ class VideoStreamEncoderTest : public ::testing::Test { return last_input_height_; } - absl::optional GetLastInputPixelFormat() { + std::optional GetLastInputPixelFormat() { MutexLock lock(&local_mutex_); return last_input_pixel_format_; } @@ -1202,6 +1274,11 @@ class VideoStreamEncoderTest : public ::testing::Test { return num_set_rates_; } + int GetNumEncodes() const { + MutexLock lock(&local_mutex_); + return num_encodes_; + } + void SetPreferredPixelFormats( absl::InlinedVector pixel_formats) { @@ -1209,7 +1286,7 @@ class VideoStreamEncoderTest : public ::testing::Test { preferred_pixel_formats_ = std::move(pixel_formats); } - void SetIsQpTrusted(absl::optional trusted) { + void SetIsQpTrusted(std::optional trusted) { MutexLock lock(&local_mutex_); is_qp_trusted_ = trusted; } @@ -1224,18 +1301,20 @@ class VideoStreamEncoderTest : public ::testing::Test { const std::vector* frame_types) override { { MutexLock lock(&local_mutex_); + num_encodes_++; if (expect_null_frame_) { - EXPECT_EQ(input_image.timestamp(), 0u); + EXPECT_EQ(input_image.rtp_timestamp(), 0u); EXPECT_EQ(input_image.width(), 1); last_frame_types_ = *frame_types; expect_null_frame_ = false; } else { - EXPECT_GT(input_image.timestamp(), timestamp_); + EXPECT_GT(input_image.rtp_timestamp(), timestamp_); EXPECT_GT(input_image.ntp_time_ms(), ntp_time_ms_); - EXPECT_EQ(input_image.timestamp(), input_image.ntp_time_ms() * 90); + EXPECT_EQ(input_image.rtp_timestamp(), + input_image.ntp_time_ms() * 90); } - timestamp_ = input_image.timestamp(); + timestamp_ = input_image.rtp_timestamp(); ntp_time_ms_ = input_image.ntp_time_ms(); last_input_width_ = input_image.width(); last_input_height_ = input_image.height(); @@ -1249,7 +1328,7 @@ class VideoStreamEncoderTest : public ::testing::Test { CodecSpecificInfo EncodeHook( EncodedImage& encoded_image, - rtc::scoped_refptr buffer) override { + scoped_refptr buffer) override { CodecSpecificInfo codec_specific; { MutexLock lock(&mutex_); @@ -1316,14 +1395,13 @@ class VideoStreamEncoderTest : public ::testing::Test { FakeEncoder::SetRates(adjusted_paramters); } - TimeController* const time_controller_; mutable Mutex local_mutex_; enum class EncoderState { kUninitialized, kInitializationFailed, kInitialized } initialized_ RTC_GUARDED_BY(local_mutex_) = EncoderState::kUninitialized; - rtc::Event continue_encode_event_; + Event continue_encode_event_; uint32_t timestamp_ RTC_GUARDED_BY(local_mutex_) = 0; int64_t ntp_time_ms_ RTC_GUARDED_BY(local_mutex_) = 0; int last_input_width_ RTC_GUARDED_BY(local_mutex_) = 0; @@ -1333,17 +1411,17 @@ class VideoStreamEncoderTest : public ::testing::Test { bool apply_alignment_to_all_simulcast_layers_ RTC_GUARDED_BY(local_mutex_) = false; bool is_hardware_accelerated_ RTC_GUARDED_BY(local_mutex_) = false; - rtc::scoped_refptr encoded_image_data_ + scoped_refptr encoded_image_data_ RTC_GUARDED_BY(local_mutex_); std::unique_ptr frame_buffer_controller_ RTC_GUARDED_BY(local_mutex_); - absl::optional + std::optional temporal_layers_supported_[kMaxSpatialLayers] RTC_GUARDED_BY( local_mutex_); bool force_init_encode_failed_ RTC_GUARDED_BY(local_mutex_) = false; double rate_factor_ RTC_GUARDED_BY(local_mutex_) = 1.0; uint32_t last_framerate_ RTC_GUARDED_BY(local_mutex_) = 0; - absl::optional + std::optional last_rate_control_settings_; VideoFrame::UpdateRect last_update_rect_ RTC_GUARDED_BY(local_mutex_) = { 0, 0, 0, 0}; @@ -1355,11 +1433,12 @@ class VideoStreamEncoderTest : public ::testing::Test { std::vector resolution_bitrate_limits_ RTC_GUARDED_BY(local_mutex_); int num_set_rates_ RTC_GUARDED_BY(local_mutex_) = 0; - absl::optional last_input_pixel_format_ + int num_encodes_ RTC_GUARDED_BY(local_mutex_) = 0; + std::optional last_input_pixel_format_ RTC_GUARDED_BY(local_mutex_); absl::InlinedVector preferred_pixel_formats_ RTC_GUARDED_BY(local_mutex_); - absl::optional is_qp_trusted_ RTC_GUARDED_BY(local_mutex_); + std::optional is_qp_trusted_ RTC_GUARDED_BY(local_mutex_); VideoCodecComplexity last_encoder_complexity_ RTC_GUARDED_BY(local_mutex_){ VideoCodecComplexity::kComplexityNormal}; }; @@ -1483,6 +1562,18 @@ class VideoStreamEncoderTest : public ::testing::Test { return number_of_layers_allocations_; } + std::optional< + std::variant> + GetLastFrameInstrumentationData() const { + MutexLock lock(&mutex_); + return last_frame_instrumentation_data_; + } + + void ResetLastFrameInstrumentationData() { + MutexLock lock(&mutex_); + last_frame_instrumentation_data_.reset(); + } + private: Result OnEncodedImage( const EncodedImage& encoded_image, @@ -1492,7 +1583,7 @@ class VideoStreamEncoderTest : public ::testing::Test { last_encoded_image_ = EncodedImage(encoded_image); last_encoded_image_data_ = std::vector( encoded_image.data(), encoded_image.data() + encoded_image.size()); - uint32_t timestamp = encoded_image.Timestamp(); + uint32_t timestamp = encoded_image.RtpTimestamp(); if (last_timestamp_ != timestamp) { num_received_layers_ = 1; last_width_ = encoded_image._encodedWidth; @@ -1508,6 +1599,12 @@ class VideoStreamEncoderTest : public ::testing::Test { if (num_received_layers_ == num_expected_layers_) { encoded_frame_event_.Set(); } + if (codec_specific_info && + codec_specific_info->frame_instrumentation_data.has_value()) { + last_frame_instrumentation_data_ = + codec_specific_info->frame_instrumentation_data; + } + return Result(Result::OK, last_timestamp_); } @@ -1533,7 +1630,7 @@ class VideoStreamEncoderTest : public ::testing::Test { MutexLock lock(&mutex_); ++number_of_layers_allocations_; last_layers_allocation_ = allocation; - rtc::StringBuilder log; + StringBuilder log; for (const auto& layer : allocation.active_spatial_layers) { log << layer.width << "x" << layer.height << "@" << layer.frame_rate_fps << "["; @@ -1549,7 +1646,7 @@ class VideoStreamEncoderTest : public ::testing::Test { TimeController* const time_controller_; mutable Mutex mutex_; TestEncoder* test_encoder_; - rtc::Event encoded_frame_event_; + Event encoded_frame_event_; EncodedImage last_encoded_image_; std::vector last_encoded_image_data_; uint32_t last_timestamp_ = 0; @@ -1566,6 +1663,9 @@ class VideoStreamEncoderTest : public ::testing::Test { int number_of_bitrate_allocations_ RTC_GUARDED_BY(&mutex_) = 0; VideoLayersAllocation last_layers_allocation_ RTC_GUARDED_BY(&mutex_); int number_of_layers_allocations_ RTC_GUARDED_BY(&mutex_) = 0; + std::optional< + std::variant> + last_frame_instrumentation_data_ RTC_GUARDED_BY(&mutex_); }; class VideoBitrateAllocatorProxyFactory @@ -1575,11 +1675,12 @@ class VideoStreamEncoderTest : public ::testing::Test { : bitrate_allocator_factory_( CreateBuiltinVideoBitrateAllocatorFactory()) {} - std::unique_ptr CreateVideoBitrateAllocator( + std::unique_ptr Create( + const Environment& env, const VideoCodec& codec) override { MutexLock lock(&mutex_); codec_config_ = codec; - return bitrate_allocator_factory_->CreateVideoBitrateAllocator(codec); + return bitrate_allocator_factory_->Create(env, codec); } VideoCodec codec_config() const { @@ -1602,12 +1703,12 @@ class VideoStreamEncoderTest : public ::testing::Test { int64_t CurrentTimeMs() { return clock()->CurrentTime().ms(); } protected: - virtual TaskQueueFactory* GetTaskQueueFactory() { - return time_controller_.GetTaskQueueFactory(); - } - test::ScopedKeyValueConfig field_trials_; GlobalSimulatedTimeController time_controller_{Timestamp::Micros(1234)}; + const Environment env_ = + CreateEnvironment(&field_trials_, + time_controller_.GetClock(), + time_controller_.GetTaskQueueFactory()); VideoSendStream::Config video_send_config_; VideoEncoderConfig video_encoder_config_; int codec_width_; @@ -1625,41 +1726,178 @@ class VideoStreamEncoderTest : public ::testing::Test { TEST_F(VideoStreamEncoderTest, EncodeOneFrame) { video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); - rtc::Event frame_destroyed_event; + Event frame_destroyed_event; video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event)); WaitForEncodedFrame(1); EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeout)); video_stream_encoder_->Stop(); } +TEST_F(VideoStreamEncoderTest, PopulatesFrameInstrumentationDataWhenSetTo) { + video_send_config_.encoder_settings.enable_frame_instrumentation_generator = + true; + ConfigureEncoder(video_encoder_config_.Copy()); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); + + // We need a QP for the encoded frame. + fake_encoder_.SetEncodedImageData(EncodedImageBuffer::Create( + kCodedFrameVp8Qp25, sizeof(kCodedFrameVp8Qp25))); + video_source_.IncomingCapturedFrame( + CreateFrame(1, codec_width_, codec_height_)); + WaitForEncodedFrame(1); + + EXPECT_TRUE(sink_.GetLastFrameInstrumentationData().has_value()); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, + FrameInstrumentationGeneratorDoesNotStashDroppedFrames) { + // Set low rate but high resolution. Make sure input frame is dropped and + // instance is released, even with corruption detection enabled. + const DataRate kLowRate = DataRate::KilobitsPerSec(300); + codec_width_ = 1280; + codec_height_ = 720; + + video_send_config_.encoder_settings.enable_frame_instrumentation_generator = + true; + ConfigureEncoder(video_encoder_config_.Copy()); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + kLowRate, kLowRate, kLowRate, 0, 0, 0); + + Event frame_destroyed_event; + // Insert two frames, so that the first one isn't stored in the encoder queue. + video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event)); + video_source_.IncomingCapturedFrame(CreateFrame(34, /*event=*/nullptr)); + EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeout)); + + EXPECT_FALSE(sink_.GetLastFrameInstrumentationData().has_value()); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, + FrameInstrumentationGeneratorHandlesQueuedFrames) { + video_send_config_.encoder_settings.enable_frame_instrumentation_generator = + true; + ConfigureEncoder(video_encoder_config_.Copy()); + + // Mark stream as suspended. + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + DataRate::Zero(), DataRate::Zero(), DataRate::Zero(), 0, 0, 0); + video_stream_encoder_->WaitUntilTaskQueueIsIdle(); + + // We need a QP for the encoded frame. + fake_encoder_.SetEncodedImageData(EncodedImageBuffer::Create( + kCodedFrameVp8Qp25, sizeof(kCodedFrameVp8Qp25))); + + // Insert a frame, that should be treated as dropped due to suspended state. + video_source_.IncomingCapturedFrame( + CreateFrame(1, codec_width_, codec_height_)); + + ExpectDroppedFrame(); + + // Resume and increase bitrate budget, process stashed frames. + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); + + WaitForEncodedFrame(1); + EXPECT_TRUE(sink_.GetLastFrameInstrumentationData().has_value()); + + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, + DoesNotPopulateFrameInstrumentationDataWhenSetNotTo) { + video_send_config_.encoder_settings.enable_frame_instrumentation_generator = + false; + ConfigureEncoder(video_encoder_config_.Copy()); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); + + // We need a QP for the encoded frame. + fake_encoder_.SetEncodedImageData(EncodedImageBuffer::Create( + kCodedFrameVp8Qp25, sizeof(kCodedFrameVp8Qp25))); + video_source_.IncomingCapturedFrame( + CreateFrame(1, codec_width_, codec_height_)); + WaitForEncodedFrame(1); + + EXPECT_FALSE(sink_.GetLastFrameInstrumentationData().has_value()); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, + FrameInstrumentationGeneratorNotResetOnConfigurationUnlessEncoderIsToo) { + // Enable frame instrumentation generator and produce the first keyframe. + video_send_config_.encoder_settings.enable_frame_instrumentation_generator = + true; + ConfigureEncoder(video_encoder_config_.Copy()); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); + + // We need a QP for the encoded frame. + fake_encoder_.SetEncodedImageData(EncodedImageBuffer::Create( + kCodedFrameVp8Qp25, sizeof(kCodedFrameVp8Qp25))); + video_source_.IncomingCapturedFrame( + CreateFrame(1, codec_width_, codec_height_)); + WaitForEncodedFrame(1); + + EXPECT_TRUE(sink_.GetLastFrameInstrumentationData().has_value()); + sink_.ResetLastFrameInstrumentationData(); + + // Apply the same configuration again. Encoder should not be reinitilized. + video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(), + kMaxPayloadLength, nullptr); + + // Insert delta frames until a frame instrumentation should definitely have + // been sent. + for (int i = 1; i < 40; ++i) { + int timestamp = 1 + (33 * i); + fake_encoder_.SetEncodedImageData(EncodedImageBuffer::Create( + kCodedFrameVp8Qp25, sizeof(kCodedFrameVp8Qp25))); + video_source_.IncomingCapturedFrame( + CreateFrame(timestamp, codec_width_, codec_height_)); + WaitForEncodedFrame(timestamp); + } + + EXPECT_TRUE(sink_.GetLastFrameInstrumentationData().has_value()); + + video_stream_encoder_->Stop(); +} + TEST_F(VideoStreamEncoderTest, DropsFramesBeforeFirstOnBitrateUpdated) { // Dropped since no target bitrate has been set. - rtc::Event frame_destroyed_event; + Event frame_destroyed_event; // The encoder will cache up to one frame for a short duration. Adding two // frames means that the first frame will be dropped and the second frame will // be sent when the encoder is enabled. - video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event)); + const int64_t kFrame1TimestampMs = CurrentTimeMs(); + video_source_.IncomingCapturedFrame( + CreateFrame(kFrame1TimestampMs, &frame_destroyed_event)); AdvanceTime(TimeDelta::Millis(10)); - video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr)); + const int64_t kFrame2TimestampMs = CurrentTimeMs(); + video_source_.IncomingCapturedFrame(CreateFrame(kFrame2TimestampMs, nullptr)); AdvanceTime(TimeDelta::Zero()); EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeout)); video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); - // The pending frame should be received. - WaitForEncodedFrame(2); - video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr)); + // The pending frame should be encoded. + WaitForEncodedFrame(kFrame2TimestampMs); - WaitForEncodedFrame(3); + const int64_t kFrame3TimestampMs = CurrentTimeMs(); + video_source_.IncomingCapturedFrame(CreateFrame(kFrame3TimestampMs, nullptr)); + + WaitForEncodedFrame(kFrame3TimestampMs); video_stream_encoder_->Stop(); } TEST_F(VideoStreamEncoderTest, DropsFramesWhenRateSetToZero) { + int64_t time_ms = 123; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); - video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); - WaitForEncodedFrame(1); + video_source_.IncomingCapturedFrame(CreateFrame(time_ms, nullptr)); + WaitForEncodedFrame(time_ms); video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( DataRate::Zero(), DataRate::Zero(), DataRate::Zero(), 0, 0, 0); @@ -1667,14 +1905,17 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenRateSetToZero) { // The encoder will cache up to one frame for a short duration. Adding two // frames means that the first frame will be dropped and the second frame will // be sent when the encoder is resumed. - video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr)); - video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr)); + time_ms += 30; + video_source_.IncomingCapturedFrame(CreateFrame(time_ms, nullptr)); + time_ms += 30; + video_source_.IncomingCapturedFrame(CreateFrame(time_ms, nullptr)); video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); - WaitForEncodedFrame(3); - video_source_.IncomingCapturedFrame(CreateFrame(4, nullptr)); - WaitForEncodedFrame(4); + WaitForEncodedFrame(time_ms); + time_ms += 20; + video_source_.IncomingCapturedFrame(CreateFrame(time_ms, nullptr)); + WaitForEncodedFrame(time_ms); video_stream_encoder_->Stop(); } @@ -1701,7 +1942,7 @@ TEST_F(VideoStreamEncoderTest, DropsFrameAfterStop) { video_stream_encoder_->Stop(); sink_.SetExpectNoFrames(); - rtc::Event frame_destroyed_event; + Event frame_destroyed_event; video_source_.IncomingCapturedFrame(CreateFrame(2, &frame_destroyed_event)); EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeout)); } @@ -1730,7 +1971,7 @@ TEST_F(VideoStreamEncoderTest, NativeFrameWithoutI420SupportGetsDelivered) { video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); - rtc::Event frame_destroyed_event; + Event frame_destroyed_event; video_source_.IncomingCapturedFrame( CreateFakeNativeFrame(1, &frame_destroyed_event)); WaitForEncodedFrame(1); @@ -1745,7 +1986,7 @@ TEST_F(VideoStreamEncoderTest, NativeFrameWithoutI420SupportGetsCroppedIfNecessary) { // Use the cropping factory. video_encoder_config_.video_stream_factory = - rtc::make_ref_counted(); + make_ref_counted(); video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config_), kMaxPayloadLength); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); @@ -1762,7 +2003,7 @@ TEST_F(VideoStreamEncoderTest, // Now send in a fake frame that needs to be cropped as the width/height // aren't divisible by 4 (see CreateEncoderStreams above). - rtc::Event frame_destroyed_event; + Event frame_destroyed_event; video_source_.IncomingCapturedFrame(CreateFakeNativeFrame( 2, &frame_destroyed_event, codec_width_ + 1, codec_height_ + 1)); WaitForEncodedFrame(2); @@ -1791,7 +2032,7 @@ TEST_F(VideoStreamEncoderTest, NativeFrameGetsDelivered_NoFrameTypePreference) { fake_encoder_.SetPreferredPixelFormats({}); - rtc::Event frame_destroyed_event; + Event frame_destroyed_event; video_source_.IncomingCapturedFrame(CreateFakeNV12NativeFrame( 1, &frame_destroyed_event, codec_width_, codec_height_)); WaitForEncodedFrame(1); @@ -1807,7 +2048,7 @@ TEST_F(VideoStreamEncoderTest, fake_encoder_.SetPreferredPixelFormats({VideoFrameBuffer::Type::kNV12}); - rtc::Event frame_destroyed_event; + Event frame_destroyed_event; video_source_.IncomingCapturedFrame(CreateFakeNV12NativeFrame( 1, &frame_destroyed_event, codec_width_, codec_height_)); WaitForEncodedFrame(1); @@ -1823,7 +2064,7 @@ TEST_F(VideoStreamEncoderTest, NativeFrameGetsDelivered_MappingIsNotFeasible) { // Fake NV12 native frame does not allow mapping to I444. fake_encoder_.SetPreferredPixelFormats({VideoFrameBuffer::Type::kI444}); - rtc::Event frame_destroyed_event; + Event frame_destroyed_event; video_source_.IncomingCapturedFrame(CreateFakeNV12NativeFrame( 1, &frame_destroyed_event, codec_width_, codec_height_)); WaitForEncodedFrame(1); @@ -1836,7 +2077,7 @@ TEST_F(VideoStreamEncoderTest, NativeFrameGetsDelivered_BackedByNV12) { video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); - rtc::Event frame_destroyed_event; + Event frame_destroyed_event; video_source_.IncomingCapturedFrame(CreateFakeNV12NativeFrame( 1, &frame_destroyed_event, codec_width_, codec_height_)); WaitForEncodedFrame(1); @@ -2032,6 +2273,43 @@ TEST_F(VideoStreamEncoderTest, video_stream_encoder_->Stop(); } +// ReconfigureEncoder checks RTC_ENABLE_H265 flag, and we want to test specific +// behavior of H265, when bitrate limits reported from hardware encoders are 0, +// expecting default target bitrate to be used in this case. +#ifdef RTC_ENABLE_H265 +TEST_F(VideoStreamEncoderTest, + ApplyDefaultBitrateLimitsWhenEncoderInfoResolutionBitrateLimitsAreZero) { + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); + const uint32_t kMinEncBitrateKbps = 100; + const uint32_t kMaxEncBitrateKbps = 1000; + const VideoEncoder::ResolutionBitrateLimits encoder_bitrate_limits_180p( + 320 * 180, + /*min_start_bitrate_bps=*/0, + /*min_bitrate_bps=*/0, + /*max_bitrate_bps=*/0); + fake_encoder_.SetResolutionBitrateLimits({encoder_bitrate_limits_180p}); + + VideoEncoderConfig video_encoder_config; + test::FillEncoderConfiguration(kVideoCodecH265, 1, &video_encoder_config); + video_encoder_config.max_bitrate_bps = kMaxEncBitrateKbps * 1000; + video_encoder_config.simulcast_layers[0].min_bitrate_bps = + kMinEncBitrateKbps * 1000; + video_encoder_config.simulcast_layers[0].width = 320; + video_encoder_config.simulcast_layers[0].height = 180; + video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(), + kMaxPayloadLength); + + video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); + WaitForEncodedFrame(1); + EXPECT_EQ(bitrate_allocator_factory_.codec_config() + .simulcastStream[0] + .targetBitrate, + kDefaultH265Bitrate180p.kbps()); + video_stream_encoder_->Stop(); +} +#endif + TEST_F(VideoStreamEncoderTest, EncoderAndAppLimitsDontIntersectEncoderLimitsIgnored) { video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( @@ -2189,16 +2467,12 @@ TEST_F(VideoStreamEncoderTest, // Two streams, highest stream active. VideoEncoderConfig config; - webrtc::VideoEncoder::EncoderInfo encoder_info; const int kNumStreams = 2; test::FillEncoderConfiguration(kVideoCodecVP8, kNumStreams, &config); config.max_bitrate_bps = 0; config.simulcast_layers[0].active = false; config.simulcast_layers[1].active = true; - config.video_stream_factory = - rtc::make_ref_counted( - "VP8", /*max qp*/ 56, /*screencast*/ false, - /*screenshare enabled*/ false, encoder_info); + config.video_stream_factory = nullptr; video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength); // The encoder bitrate limits for 270p should be used. @@ -2254,20 +2528,16 @@ TEST_F(VideoStreamEncoderTest, DefaultEncoderMaxAndMinBitratesUsedForTwoStreamsHighestActive) { // Two streams, highest stream active. VideoEncoderConfig config; - webrtc::VideoEncoder::EncoderInfo encoder_info; const int kNumStreams = 2; test::FillEncoderConfiguration(kVideoCodecVP8, kNumStreams, &config); config.max_bitrate_bps = 0; config.simulcast_layers[0].active = false; config.simulcast_layers[1].active = true; - config.video_stream_factory = - rtc::make_ref_counted( - "VP8", /*max qp*/ 56, /*screencast*/ false, - /*screenshare enabled*/ false, encoder_info); + config.video_stream_factory = nullptr; video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength); // Default bitrate limits for 270p should be used. - const absl::optional + const std::optional kDefaultLimits270p = EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( kVideoCodecVP8, 480 * 270); @@ -2280,7 +2550,7 @@ TEST_F(VideoStreamEncoderTest, fake_encoder_.config().simulcastStream[1].maxBitrate * 1000); // Default bitrate limits for 360p should be used. - const absl::optional + const std::optional kDefaultLimits360p = EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( kVideoCodecVP8, 640 * 360); @@ -2301,7 +2571,7 @@ TEST_F(VideoStreamEncoderTest, fake_encoder_.config().simulcastStream[1].maxBitrate * 1000); // Default bitrate limits for 540p should be used. - const absl::optional + const std::optional kDefaultLimits540p = EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( kVideoCodecVP8, 960 * 540); @@ -2328,16 +2598,12 @@ TEST_F(VideoStreamEncoderTest, // Three streams, middle stream active. VideoEncoderConfig config; - webrtc::VideoEncoder::EncoderInfo encoder_info; const int kNumStreams = 3; test::FillEncoderConfiguration(kVideoCodecVP8, kNumStreams, &config); config.simulcast_layers[0].active = false; config.simulcast_layers[1].active = true; config.simulcast_layers[2].active = false; - config.video_stream_factory = - rtc::make_ref_counted( - "VP8", /*max qp*/ 56, /*screencast*/ false, - /*screenshare enabled*/ false, encoder_info); + config.video_stream_factory = nullptr; video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength); // The encoder bitrate limits for 360p should be used. @@ -2373,16 +2639,12 @@ TEST_F(VideoStreamEncoderTest, // Three streams, lowest stream active. VideoEncoderConfig config; - webrtc::VideoEncoder::EncoderInfo encoder_info; const int kNumStreams = 3; test::FillEncoderConfiguration(kVideoCodecVP8, kNumStreams, &config); config.simulcast_layers[0].active = true; config.simulcast_layers[1].active = false; config.simulcast_layers[2].active = false; - config.video_stream_factory = - rtc::make_ref_counted( - "VP8", /*max qp*/ 56, /*screencast*/ false, - /*screenshare enabled*/ false, encoder_info); + config.video_stream_factory = nullptr; video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength); // Resolution on lowest stream lower than 270p. The encoder limits not applied @@ -2410,28 +2672,26 @@ TEST_F(VideoStreamEncoderTest, // Two streams, highest stream active. VideoEncoderConfig config; - webrtc::VideoEncoder::EncoderInfo encoder_info; const int kNumStreams = 2; test::FillEncoderConfiguration(kVideoCodecVP8, kNumStreams, &config); config.simulcast_layers[0].active = false; config.simulcast_layers[1].active = true; config.simulcast_layers[1].max_bitrate_bps = kMaxBitrateBps; - config.video_stream_factory = - rtc::make_ref_counted( - "VP8", /*max qp*/ 56, /*screencast*/ false, - /*screenshare enabled*/ false, encoder_info); + config.video_stream_factory = nullptr; video_stream_encoder_->ConfigureEncoder(config.Copy(), kMaxPayloadLength); - // The encoder bitrate limits for 270p should be used. + // The max configured bitrate should be used. + // The encoder bitrate limits for 270p should be used for min bitrate video_source_.IncomingCapturedFrame(CreateFrame(1, 480, 270)); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); EXPECT_EQ(fake_encoder_.config().numberOfSimulcastStreams, kNumStreams); EXPECT_EQ(static_cast(kEncoderLimits270p.min_bitrate_bps), fake_encoder_.config().simulcastStream[1].minBitrate * 1000); - EXPECT_EQ(static_cast(kEncoderLimits270p.max_bitrate_bps), + EXPECT_EQ(static_cast(kMaxBitrateBps), fake_encoder_.config().simulcastStream[1].maxBitrate * 1000); - // The max configured bitrate is less than the encoder limit for 360p. + // The max configured bitrate should be used. + // The encoder bitrate limits for 360p should be used for min bitrate video_source_.IncomingCapturedFrame(CreateFrame(2, 640, 360)); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); EXPECT_EQ(static_cast(kEncoderLimits360p.min_bitrate_bps), @@ -2460,6 +2720,183 @@ TEST_F(VideoStreamEncoderTest, SinkWantsRotationApplied) { video_stream_encoder_->Stop(); } +TEST_F(VideoStreamEncoderTest, SinkWantsDefaultUnlimitedBeforeFirstFrame) { + ASSERT_TRUE(video_source_.has_sinks()); + EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants()); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, + FrameRateLimitPropagatedToSinkWantsBeforeFirstFrame) { + ASSERT_THAT(video_encoder_config_.simulcast_layers, SizeIs(1)); + // Set max wanted frame rate. + video_encoder_config_.simulcast_layers[0].max_framerate = 15; + video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(), + kMaxPayloadLength); + EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, 15); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, FrameRateLimitIgnoredOnInactiveStreams) { + ASSERT_THAT(video_encoder_config_.simulcast_layers, SizeIs(1)); + VideoEncoderConfig encoder_config = video_encoder_config_.Copy(); + encoder_config.simulcast_layers.push_back(encoder_config.simulcast_layers[0]); + + encoder_config.simulcast_layers[0].max_framerate = 15; + encoder_config.simulcast_layers[1].max_framerate = 30; + encoder_config.simulcast_layers[1].active = false; + + video_stream_encoder_->ConfigureEncoder(std::move(encoder_config), + kMaxPayloadLength); + EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, 15); + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, FrameRateLimitCanBeReset) { + ASSERT_THAT(video_encoder_config_.simulcast_layers, SizeIs(1)); + // Set max wanted frame rate. + video_encoder_config_.simulcast_layers[0].max_framerate = 15; + video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(), + kMaxPayloadLength); + EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, 15); + + video_encoder_config_.simulcast_layers[0].max_framerate = + VideoStream().max_framerate; + video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(), + kMaxPayloadLength); + EXPECT_THAT(video_source_.sink_wants(), FpsUnlimited()); + + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, RequestInSinkWantsBeforeFirstFrame) { + // Use a real video stream factory or else `scale_resolution_down_to` is not + // applied correctly. + video_encoder_config_.video_stream_factory = nullptr; + ConfigureEncoder(video_encoder_config_.Copy()); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); + + ASSERT_THAT(video_encoder_config_.simulcast_layers, SizeIs(1)); + video_encoder_config_.simulcast_layers[0].scale_resolution_down_to.emplace( + Resolution({.width = 320, .height = 160})); + + video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(), + kMaxPayloadLength); + + EXPECT_EQ(video_source_.sink_wants().requested_resolution, + VideoSinkWants::FrameSize(320, 160)); + + video_encoder_config_.simulcast_layers[0].scale_resolution_down_to->height = + 320; + video_encoder_config_.simulcast_layers[0].scale_resolution_down_to->width = + 640; + video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(), + kMaxPayloadLength); + + EXPECT_EQ(video_source_.sink_wants().requested_resolution, + VideoSinkWants::FrameSize(640, 320)); + + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, RequestInWrongAspectRatioWithAdapter) { + // Use a real video stream factory or else `scale_resolution_down_to` is not + // applied correctly. + video_encoder_config_.video_stream_factory = nullptr; + ConfigureEncoder(video_encoder_config_.Copy()); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); + + // Use a source that adapts resolution based on OnSinkWants. + AdaptingFrameForwarder source(&time_controller_); + source.set_adaptation_enabled(true); + video_stream_encoder_->SetSource( + &source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); + + ASSERT_THAT(video_encoder_config_.simulcast_layers, SizeIs(1)); + video_encoder_config_.simulcast_layers[0].scale_resolution_down_to = { + .width = 30, .height = 30}; + video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(), + kMaxPayloadLength); + + // Capture a 60x30 frame. + source.IncomingCapturedFrame(CreateFrame(1, 60, 30)); + // The 60x30 frame does not fit inside the 30x30 restrictions. + // Expect 30x15, maintaining aspect ratio. + WaitForEncodedFrame(30, 15); + + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, + EncodingActiveFlagPropagatedToSinkWantsBeforeFirstFrame) { + ASSERT_THAT(video_encoder_config_.simulcast_layers, SizeIs(1)); + video_encoder_config_.simulcast_layers[0].active = false; + + video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(), + kMaxPayloadLength); + EXPECT_FALSE(video_source_.sink_wants().is_active); + + video_encoder_config_.simulcast_layers[0].active = true; + video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(), + kMaxPayloadLength); + EXPECT_TRUE(video_source_.sink_wants().is_active); + + video_stream_encoder_->Stop(); +} + +TEST_F(VideoStreamEncoderTest, CorrectlyAdjustsAv1Bitrate) { + ResetEncoder("AV1", /*num_streams*/ 2, /*num_temporal_layers=*/2, + /*num_spatial_layers=*/1, /*screenshare*/ false, + kDefaultFramerate, + VideoStreamEncoder::BitrateAllocationCallbackType:: + kVideoLayersAllocation); + + // Let link allocation and stable bitrate be 2x the target bitrate. + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + kTargetBitrate, 2 * kTargetBitrate, 2 * kTargetBitrate, 0, 0, 0); + + video_source_.IncomingCapturedFrame( + CreateFrame(CurrentTimeMs(), codec_width_, codec_height_)); + WaitForEncodedFrame(CurrentTimeMs()); + + // Before enough data has been gathered, some default pushback is applied. + VideoEncoder::RateControlParameters rate_settings = + *fake_encoder_.GetAndResetLastRateControlSettings(); + // Allow 5% diff from target bitrate. + const double allowed_error_bps = + rate_settings.target_bitrate.get_sum_bps() * 0.05; + EXPECT_NEAR(rate_settings.bitrate.get_sum_bps(), + rate_settings.target_bitrate.get_sum_bps() / + EncoderBitrateAdjuster::kDefaultUtilizationFactor, + allowed_error_bps); + + // Insert frames until bitrate adjuster is saturated. + const TimeDelta runtime = EncoderBitrateAdjuster::kWindowSize; + const Timestamp start_time = clock()->CurrentTime(); + while (clock()->CurrentTime() - start_time < runtime) { + video_source_.IncomingCapturedFrame( + CreateFrame(CurrentTimeMs(), codec_width_, codec_height_)); + WaitForEncodedFrame(CurrentTimeMs()); + } + + // Make sure rate has been reallocated. + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + kTargetBitrate - DataRate::BitsPerSec(500), 2 * kTargetBitrate, + 2 * kTargetBitrate, 0, 0, 0); + video_source_.IncomingCapturedFrame( + CreateFrame(CurrentTimeMs(), codec_width_, codec_height_)); + WaitForEncodedFrame(CurrentTimeMs()); + + // Pushback should no longer happen. + rate_settings = *fake_encoder_.GetAndResetLastRateControlSettings(); + EXPECT_NEAR(rate_settings.bitrate.get_sum_bps(), + rate_settings.target_bitrate.get_sum_bps(), allowed_error_bps); + + video_stream_encoder_->Stop(); +} + class ResolutionAlignmentTest : public VideoStreamEncoderTest, public ::testing::WithParamInterface< @@ -2500,20 +2937,18 @@ TEST_P(ResolutionAlignmentTest, SinkWantsAlignmentApplied) { // Fill config with the scaling factor by which to reduce encoding size. const int num_streams = scale_factors_.size(); VideoEncoderConfig config; - webrtc::VideoEncoder::EncoderInfo encoder_info; test::FillEncoderConfiguration(kVideoCodecVP8, num_streams, &config); for (int i = 0; i < num_streams; ++i) { config.simulcast_layers[i].scale_resolution_down_by = scale_factors_[i]; } - config.video_stream_factory = - rtc::make_ref_counted( - "VP8", /*max qp*/ 56, /*screencast*/ false, - /*screenshare enabled*/ false, encoder_info); + config.video_stream_factory = nullptr; video_stream_encoder_->ConfigureEncoder(std::move(config), kMaxPayloadLength); + // We can get up to 3 streams of 1280x720 resolution each in this test. Make + // available bitrate large enough to get all streams encoded. + const DataRate kAvailableBitrate = 3 * kSimulcastTargetBitrate; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( - kSimulcastTargetBitrate, kSimulcastTargetBitrate, kSimulcastTargetBitrate, - 0, 0, 0); + kAvailableBitrate, kAvailableBitrate, kAvailableBitrate, 0, 0, 0); // Wait for all layers before triggering event. sink_.SetNumExpectedLayers(num_streams); @@ -2551,6 +2986,12 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) { const int kWidth = 1280; const int kHeight = 720; + ASSERT_EQ(video_encoder_config_.simulcast_layers.size(), 1u); + video_encoder_config_.simulcast_layers[0].width = kWidth; + video_encoder_config_.simulcast_layers[0].height = kHeight; + video_encoder_config_.simulcast_layers[0].max_framerate = kFramerateFps; + ConfigureEncoder(video_encoder_config_.Copy()); + // We rely on the automatic resolution adaptation, but we handle framerate // adaptation manually by mocking the stats proxy. video_source_.set_adaptation_enabled(true); @@ -2560,13 +3001,13 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) { kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); video_stream_encoder_->SetSource(&video_source_, webrtc::DegradationPreference::BALANCED); - EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants()); + EXPECT_THAT(video_source_.sink_wants(), WantsFps(Eq(kFramerateFps))); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); // Adapt down as far as possible. - rtc::VideoSinkWants last_wants; + VideoSinkWants last_wants; int64_t t = 1; int loop_count = 0; do { @@ -2575,7 +3016,7 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) { // Simulate the framerate we've been asked to adapt to. const int fps = std::min(kFramerateFps, last_wants.max_framerate_fps); - const int frame_interval_ms = rtc::kNumMillisecsPerSec / fps; + const int frame_interval_ms = kNumMillisecsPerSec / fps; VideoSendStream::Stats mock_stats = stats_proxy_->GetStats(); mock_stats.input_frame_rate = fps; stats_proxy_->SetMockStats(mock_stats); @@ -2611,7 +3052,7 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) { // Simulate the framerate we've been asked to adapt to. const int fps = std::min(kFramerateFps, last_wants.max_framerate_fps); - const int frame_interval_ms = rtc::kNumMillisecsPerSec / fps; + const int frame_interval_ms = kNumMillisecsPerSec / fps; VideoSendStream::Stats mock_stats = stats_proxy_->GetStats(); mock_stats.input_frame_rate = fps; stats_proxy_->SetMockStats(mock_stats); @@ -2705,7 +3146,7 @@ TEST_F(VideoStreamEncoderTest, EXPECT_FALSE(video_source_.sink_wants().target_pixel_count); EXPECT_LT(video_source_.sink_wants().max_pixel_count, kFrameWidth * kFrameHeight); - EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, kInputFps); + EXPECT_THAT(video_source_.sink_wants(), FpsUnlimited()); int pixel_count = video_source_.sink_wants().max_pixel_count; // Triggering a CPU underuse should not change the sink wants since it has @@ -2716,7 +3157,8 @@ TEST_F(VideoStreamEncoderTest, sink_.WaitForEncodedFrame(ntp_time); ntp_time += kFrameIntervalMs; EXPECT_EQ(video_source_.sink_wants().max_pixel_count, pixel_count); - EXPECT_EQ(video_source_.sink_wants().max_framerate_fps, kInputFps); + + EXPECT_THAT(video_source_.sink_wants(), FpsUnlimited()); // Change the degradation preference back. CPU underuse should not adapt since // QP is most limited. @@ -2747,7 +3189,7 @@ TEST_F(VideoStreamEncoderTest, CreateFrame(ntp_time, kFrameWidth, kFrameHeight)); sink_.WaitForEncodedFrame(ntp_time); ntp_time += kFrameIntervalMs; - EXPECT_THAT(video_source_.sink_wants(), FpsMax()); + EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants()); video_stream_encoder_->Stop(); } @@ -2779,7 +3221,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) { EXPECT_FALSE(video_source_.sink_wants().target_pixel_count); EXPECT_LT(video_source_.sink_wants().max_pixel_count, kFrameWidth * kFrameHeight); - EXPECT_EQ(kDefaultFramerate, video_source_.sink_wants().max_framerate_fps); + EXPECT_THAT(video_source_.sink_wants(), FpsUnlimited()); // Set new source, switch to maintain-resolution. test::FrameForwarder new_video_source; @@ -2792,7 +3234,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) { sink_.WaitForEncodedFrame(frame_timestamp); frame_timestamp += kFrameIntervalMs; // Initially no degradation registered. - EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(new_video_source.sink_wants(), UnlimitedSinkWants()); // Force an input frame rate to be available, or the adaptation call won't // know what framerate to adapt form. @@ -2822,7 +3264,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) { CreateFrame(frame_timestamp, kFrameWidth, kFrameWidth)); sink_.WaitForEncodedFrame(frame_timestamp); frame_timestamp += kFrameIntervalMs; - EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(new_video_source.sink_wants(), UnlimitedSinkWants()); video_stream_encoder_->TriggerCpuOveruse(); new_video_source.IncomingCapturedFrame( @@ -2831,7 +3273,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) { frame_timestamp += kFrameIntervalMs; // Still no degradation. - EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(new_video_source.sink_wants(), UnlimitedSinkWants()); // Calling SetSource with resolution scaling enabled apply the old SinkWants. video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated( @@ -2845,7 +3287,7 @@ TEST_F(VideoStreamEncoderTest, SinkWantsStoredByDegradationPreference) { EXPECT_LT(new_video_source.sink_wants().max_pixel_count, kFrameWidth * kFrameHeight); EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count); - EXPECT_EQ(kDefaultFramerate, new_video_source.sink_wants().max_framerate_fps); + EXPECT_THAT(new_video_source.sink_wants(), FpsUnlimited()); // Calling SetSource with framerate scaling enabled apply the old SinkWants. video_stream_encoder_->SetSourceAndWaitForRestrictionsUpdated( @@ -3380,14 +3822,14 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); WaitForEncodedFrame(1); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); // Trigger adapt down, expect scaled down resolution. video_stream_encoder_->TriggerCpuOveruse(); EXPECT_THAT(source.sink_wants(), - FpsMaxResolutionMatches(Lt(kWidth * kHeight))); + FpsUnlimitedResolutionMatches(Lt(kWidth * kHeight))); const int kLastMaxPixelCount = source.sink_wants().max_pixel_count; EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -3413,12 +3855,12 @@ TEST_F(VideoStreamEncoderTest, SkipsSameOrLargerAdaptDownRequest_BalancedMode) { webrtc::DegradationPreference::BALANCED); source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); sink_.WaitForEncodedFrame(1); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); // Trigger adapt down, expect scaled down resolution. video_stream_encoder_->TriggerQualityLow(); EXPECT_THAT(source.sink_wants(), - FpsMaxResolutionMatches(Lt(kWidth * kHeight))); + FpsUnlimitedResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); const int kLastMaxPixelCount = source.sink_wants().max_pixel_count; @@ -3444,9 +3886,9 @@ TEST_F(VideoStreamEncoderTest, SkipsSameOrLargerAdaptDownRequest_BalancedMode) { TEST_F(VideoStreamEncoderTest, FpsCountReturnsToZeroForFewerAdaptationsUpThanDown) { + const int64_t kFrameInterval150Ms = 150; const int kWidth = 640; const int kHeight = 360; - const int64_t kFrameIntervalMs = 150; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); @@ -3456,17 +3898,17 @@ TEST_F(VideoStreamEncoderTest, video_stream_encoder_->SetSource(&source, webrtc::DegradationPreference::BALANCED); - int64_t timestamp_ms = kFrameIntervalMs; + int64_t timestamp_ms = kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(kWidth, kHeight); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect reduced fps (640x360@15fps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), @@ -3477,14 +3919,14 @@ TEST_F(VideoStreamEncoderTest, // Source requests 270p, expect reduced resolution (480x270@15fps). source.OnOutputFormatRequest(480, 270); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(480, 270); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect reduced fps (480x270@10fps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants())); @@ -3494,14 +3936,14 @@ TEST_F(VideoStreamEncoderTest, // Source requests QVGA, expect reduced resolution (320x180@10fps). source.OnOutputFormatRequest(320, 180); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(320, 180); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect reduced fps (320x180@7fps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants())); @@ -3511,14 +3953,14 @@ TEST_F(VideoStreamEncoderTest, // Source requests VGA, expect increased resolution (640x360@7fps). source.OnOutputFormatRequest(640, 360); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt up, expect increased fps (640x360@(max-2)fps). video_stream_encoder_->TriggerQualityHigh(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants())); @@ -3528,7 +3970,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt up, expect increased fps (640x360@(max-1)fps). video_stream_encoder_->TriggerQualityHigh(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants())); @@ -3538,7 +3980,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt up, expect increased fps (640x360@maxfps). video_stream_encoder_->TriggerQualityHigh(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants())); @@ -3553,7 +3995,7 @@ TEST_F(VideoStreamEncoderTest, FpsCountReturnsToZeroForFewerAdaptationsUpThanDownWithTwoResources) { const int kWidth = 1280; const int kHeight = 720; - const int64_t kFrameIntervalMs = 150; + const int64_t kFrameInterval150Ms = 150; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); @@ -3563,38 +4005,39 @@ TEST_F(VideoStreamEncoderTest, video_stream_encoder_->SetSource(&source, webrtc::DegradationPreference::BALANCED); - int64_t timestamp_ms = kFrameIntervalMs; + int64_t timestamp_ms = kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(kWidth, kHeight); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect scaled down resolution (960x540@maxfps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), - FpsMaxResolutionMatches(Lt(kWidth * kHeight))); + FpsUnlimitedResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect scaled down resolution (640x360@maxfps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(timestamp_ms); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants())); + EXPECT_THAT(source.sink_wants(), + FpsUnlimitedResolutionLt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect reduced fps (640x360@15fps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants())); @@ -3604,7 +4047,7 @@ TEST_F(VideoStreamEncoderTest, // Source requests QVGA, expect reduced resolution (320x180@15fps). source.OnOutputFormatRequest(320, 180); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(320, 180); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -3612,7 +4055,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt down, expect reduced fps (320x180@7fps). video_stream_encoder_->TriggerCpuOveruse(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants())); @@ -3625,7 +4068,7 @@ TEST_F(VideoStreamEncoderTest, // Source requests HD, expect increased resolution (640x360@7fps). source.OnOutputFormatRequest(1280, 720); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -3633,7 +4076,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt up, expect increased fps (640x360@(max-1)fps). video_stream_encoder_->TriggerCpuUnderuse(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants())); @@ -3647,7 +4090,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt up, expect increased fps (640x360@maxfps). video_stream_encoder_->TriggerQualityHigh(); video_stream_encoder_->TriggerCpuUnderuse(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants())); @@ -3661,7 +4104,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt up, expect increased resolution (960x570@maxfps). video_stream_encoder_->TriggerQualityHigh(); video_stream_encoder_->TriggerCpuUnderuse(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants())); @@ -3675,7 +4118,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt up, expect increased resolution (1280x720@maxfps). video_stream_encoder_->TriggerQualityHigh(); video_stream_encoder_->TriggerCpuUnderuse(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants())); @@ -3703,13 +4146,13 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); // Trigger adapt up, expect no change. video_stream_encoder_->TriggerCpuUnderuse(); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -3730,13 +4173,13 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); // Trigger adapt up, expect no change. video_stream_encoder_->TriggerCpuUnderuse(); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -3756,14 +4199,14 @@ TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_BalancedMode) { source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); sink_.WaitForEncodedFrame(kWidth, kHeight); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt up, expect no change. video_stream_encoder_->TriggerQualityHigh(); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -3784,14 +4227,14 @@ TEST_F(VideoStreamEncoderTest, NoChangeForInitialNormalUsage_DisabledMode) { source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); sink_.WaitForEncodedFrame(kWidth, kHeight); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt up, expect no change. video_stream_encoder_->TriggerQualityHigh(); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -3814,7 +4257,7 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); WaitForEncodedFrame(1); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -3823,13 +4266,13 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); WaitForEncodedFrame(2); EXPECT_THAT(source.sink_wants(), - FpsMaxResolutionMatches(Lt(kWidth * kHeight))); + FpsUnlimitedResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt up, expect no restriction. video_stream_encoder_->TriggerQualityHigh(); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -3852,14 +4295,14 @@ TEST_F(VideoStreamEncoderTest, // Expect no scaling to begin with (preference: MAINTAIN_FRAMERATE). video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); sink_.WaitForEncodedFrame(1); - EXPECT_THAT(video_source_.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants()); // Trigger adapt down, expect scaled down resolution. video_stream_encoder_->TriggerQualityLow(); video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); sink_.WaitForEncodedFrame(2); EXPECT_THAT(video_source_.sink_wants(), - FpsMaxResolutionMatches(Lt(kWidth * kHeight))); + FpsUnlimitedResolutionMatches(Lt(kWidth * kHeight))); // Enable MAINTAIN_RESOLUTION preference. test::FrameForwarder new_video_source; @@ -3869,7 +4312,7 @@ TEST_F(VideoStreamEncoderTest, // by waiting for an encoded frame. new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); sink_.WaitForEncodedFrame(3); - EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(new_video_source.sink_wants(), UnlimitedSinkWants()); // Trigger adapt down, expect reduced framerate. video_stream_encoder_->TriggerQualityLow(); @@ -3880,7 +4323,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt up, expect no restriction. video_stream_encoder_->TriggerQualityHigh(); - EXPECT_THAT(new_video_source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(new_video_source.sink_wants(), UnlimitedSinkWants()); video_stream_encoder_->Stop(); } @@ -3907,7 +4350,7 @@ TEST_F(VideoStreamEncoderTest, DoesNotScaleBelowSetResolutionLimit) { WaitForEncodedFrame(i * kFrameIntervalMs); // Trigger scale down. - rtc::VideoSinkWants last_wants = video_source_.sink_wants(); + VideoSinkWants last_wants = video_source_.sink_wants(); video_stream_encoder_->TriggerQualityLow(); EXPECT_GE(video_source_.sink_wants().max_pixel_count, kMinPixelsPerFrame); @@ -3938,7 +4381,7 @@ TEST_F(VideoStreamEncoderTest, int64_t timestamp_ms = kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -3948,7 +4391,7 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), - FpsMaxResolutionMatches(Lt(kWidth * kHeight))); + FpsUnlimitedResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -3957,7 +4400,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -3967,7 +4410,7 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), - FpsMaxResolutionMatches(Lt(kWidth * kHeight))); + FpsUnlimitedResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -3976,7 +4419,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(kWidth, kHeight); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -3999,7 +4442,7 @@ TEST_F(VideoStreamEncoderTest, int64_t timestamp_ms = kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(kWidth, kHeight); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4009,7 +4452,7 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), - FpsMaxResolutionMatches(Lt(kWidth * kHeight))); + FpsUnlimitedResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4018,7 +4461,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(kWidth, kHeight); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4028,7 +4471,7 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), - FpsMaxResolutionMatches(Lt(kWidth * kHeight))); + FpsUnlimitedResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4037,7 +4480,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); sink_.WaitForEncodedFrame(kWidth, kHeight); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(4, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -4081,7 +4524,8 @@ TEST_F(VideoStreamEncoderTest, AdaptUpIfBwEstimateIsHigherThanMinBitrate) { // Trigger adapt up. Higher resolution should not be requested duo to lack // of bitrate. video_stream_encoder_->TriggerQualityHigh(); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMatches(Lt(1280 * 720))); + EXPECT_THAT(source.sink_wants(), + FpsUnlimitedResolutionMatches(Lt(1280 * 720))); // Increase bitrate. video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( @@ -4092,7 +4536,7 @@ TEST_F(VideoStreamEncoderTest, AdaptUpIfBwEstimateIsHigherThanMinBitrate) { // Trigger adapt up. Higher resolution should be requested. video_stream_encoder_->TriggerQualityHigh(); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); video_stream_encoder_->Stop(); } @@ -4119,7 +4563,9 @@ TEST_F(VideoStreamEncoderTest, DropFirstFramesIfBwEstimateIsTooLow) { int64_t timestamp_ms = kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, 1280, 720)); ExpectDroppedFrame(); - EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count < 1280 * 720, 5000); + EXPECT_THAT(WaitUntil([&] { return source.sink_wants().max_pixel_count; }, + Lt(1280 * 720)), + IsRtcOk()); // Insert 720p frame. It should be downscaled and encoded. timestamp_ms += kFrameIntervalMs; @@ -4177,7 +4623,7 @@ TEST_F(BalancedDegradationTest, AdaptDownTwiceIfMinFpsDiffLtThreshold) { stats_proxy_->SetMockStats(stats); InsertFrameAndWaitForEncoded(); - EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source_.sink_wants(), UnlimitedSinkWants()); // Trigger adapt down, expect scaled down framerate and resolution, // since Fps diff (input-requested:0) < threshold. @@ -4202,7 +4648,7 @@ TEST_F(BalancedDegradationTest, AdaptDownOnceIfFpsDiffGeThreshold) { stats_proxy_->SetMockStats(stats); InsertFrameAndWaitForEncoded(); - EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source_.sink_wants(), UnlimitedSinkWants()); // Trigger adapt down, expect scaled down framerate only (640x360@24fps). // Fps diff (input-requested:1) == threshold. @@ -4222,7 +4668,7 @@ TEST_F(BalancedDegradationTest, AdaptDownUsesCodecSpecificFps) { EXPECT_EQ(kVideoCodecVP8, video_encoder_config_.codec_type); InsertFrameAndWaitForEncoded(); - EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source_.sink_wants(), UnlimitedSinkWants()); // Trigger adapt down, expect scaled down framerate (640x360@22fps). video_stream_encoder_->TriggerQualityLow(); @@ -4243,7 +4689,7 @@ TEST_F(BalancedDegradationTest, NoAdaptUpIfBwEstimateIsLessThanMinBitrate) { OnBitrateUpdated(kTooLowMinBitrate); InsertFrameAndWaitForEncoded(); - EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source_.sink_wants(), UnlimitedSinkWants()); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect scaled down framerate (640x360@14fps). @@ -4325,7 +4771,7 @@ TEST_F(BalancedDegradationTest, OnBitrateUpdated(kTooLowMinResolutionBitrate); InsertFrameAndWaitForEncoded(); - EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source_.sink_wants(), UnlimitedSinkWants()); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect scaled down framerate (640x360@14fps). @@ -4382,7 +4828,7 @@ TEST_F(BalancedDegradationTest, OnBitrateUpdated(kTooLowMinBitrate); InsertFrameAndWaitForEncoded(); - EXPECT_THAT(source_.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source_.sink_wants(), UnlimitedSinkWants()); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, expect scaled down framerate (640x360@14fps). @@ -4447,7 +4893,7 @@ TEST_F(VideoStreamEncoderTest, int64_t timestamp_ms = kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -4459,7 +4905,7 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), - FpsMaxResolutionMatches(Lt(kWidth * kHeight))); + FpsUnlimitedResolutionMatches(Lt(kWidth * kHeight))); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -4470,7 +4916,8 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants())); + EXPECT_THAT(source.sink_wants(), + FpsUnlimitedResolutionLt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -4481,7 +4928,8 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants())); + EXPECT_THAT(source.sink_wants(), + FpsUnlimitedResolutionLt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -4492,8 +4940,9 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants())); - rtc::VideoSinkWants last_wants = source.sink_wants(); + EXPECT_THAT(source.sink_wants(), + FpsUnlimitedResolutionLt(source.last_wants())); + VideoSinkWants last_wants = source.sink_wants(); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -4504,7 +4953,7 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - EXPECT_THAT(source.sink_wants(), FpsMax()); + EXPECT_THAT(source.sink_wants(), FpsUnlimited()); EXPECT_EQ(source.sink_wants().max_pixel_count, last_wants.max_pixel_count); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); @@ -4516,7 +4965,8 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); + EXPECT_THAT(source.sink_wants(), + FpsUnlimitedResolutionGt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -4529,7 +4979,8 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); + EXPECT_THAT(source.sink_wants(), + FpsUnlimitedResolutionGt(source.last_wants())); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(4, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -4542,7 +4993,8 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); + EXPECT_THAT(source.sink_wants(), + FpsUnlimitedResolutionGt(source.last_wants())); last_wants = source.sink_wants(); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); @@ -4566,8 +5018,9 @@ TEST_F(VideoStreamEncoderTest, timestamp_ms += kFrameIntervalMs; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), + FpsUnlimitedResolutionGt(source.last_wants())); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(6, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -4579,20 +5032,24 @@ TEST_F(VideoStreamEncoderTest, TEST_F(VideoStreamEncoderTest, CpuLimitedHistogramIsReported) { const int kWidth = 640; const int kHeight = 360; + int64_t ntp_timestamp_ms = 123; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) { - video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight)); - WaitForEncodedFrame(i); + video_source_.IncomingCapturedFrame( + CreateFrame(ntp_timestamp_ms, kWidth, kHeight)); + WaitForEncodedFrame(ntp_timestamp_ms); + ntp_timestamp_ms += 20; } video_stream_encoder_->TriggerCpuOveruse(); for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) { - video_source_.IncomingCapturedFrame(CreateFrame( - SendStatisticsProxy::kMinRequiredMetricsSamples + i, kWidth, kHeight)); - WaitForEncodedFrame(SendStatisticsProxy::kMinRequiredMetricsSamples + i); + video_source_.IncomingCapturedFrame( + CreateFrame(ntp_timestamp_ms, kWidth, kHeight)); + WaitForEncodedFrame(ntp_timestamp_ms); + ntp_timestamp_ms += 20; } video_stream_encoder_->Stop(); @@ -4611,13 +5068,16 @@ TEST_F(VideoStreamEncoderTest, kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); const int kWidth = 640; const int kHeight = 360; + int64_t ntp_timestamp_ms = 123; video_stream_encoder_->SetSource(&video_source_, webrtc::DegradationPreference::DISABLED); for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) { - video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight)); - WaitForEncodedFrame(i); + video_source_.IncomingCapturedFrame( + CreateFrame(ntp_timestamp_ms, kWidth, kHeight)); + WaitForEncodedFrame(ntp_timestamp_ms); + ntp_timestamp_ms += 20; } video_stream_encoder_->Stop(); @@ -4629,13 +5089,13 @@ TEST_F(VideoStreamEncoderTest, } TEST_F(VideoStreamEncoderTest, ReportsVideoBitrateAllocation) { - ResetEncoder("FAKE", 1, 1, 1, /*screenshare*/ false, + ResetEncoder("FAKE", 1, 1, 1, /*screenshare*/ false, kDefaultFramerate, VideoStreamEncoder::BitrateAllocationCallbackType:: kVideoBitrateAllocation); const int kDefaultFps = 30; const VideoBitrateAllocation expected_bitrate = - SimulcastRateAllocator(fake_encoder_.config()) + SimulcastRateAllocator(env_, fake_encoder_.config()) .Allocate(VideoBitrateAllocationParameters(kLowTargetBitrate.bps(), kDefaultFps)); @@ -4674,6 +5134,7 @@ TEST_F(VideoStreamEncoderTest, ReportsVideoBitrateAllocation) { TEST_F(VideoStreamEncoderTest, ReportsVideoLayersAllocationForVP8Simulcast) { ResetEncoder("VP8", /*num_streams*/ 2, 1, 1, /*screenshare*/ false, + kDefaultFramerate, VideoStreamEncoder::BitrateAllocationCallbackType:: kVideoLayersAllocation); @@ -4734,7 +5195,7 @@ TEST_F(VideoStreamEncoderTest, video_encoder_config.content_type = VideoEncoderConfig::ContentType::kRealtimeVideo; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( VideoEncoder::GetDefaultVp8Settings()); for (auto& layer : video_encoder_config.simulcast_layers) { layer.num_temporal_layers = 2; @@ -4777,7 +5238,7 @@ TEST_F(VideoStreamEncoderTest, video_encoder_config.content_type = VideoEncoderConfig::ContentType::kRealtimeVideo; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( VideoEncoder::GetDefaultVp8Settings()); for (auto& layer : video_encoder_config.simulcast_layers) { layer.num_temporal_layers = 2; @@ -4824,7 +5285,7 @@ TEST_F(VideoStreamEncoderTest, vp9_settings.interLayerPred = InterLayerPredMode::kOn; vp9_settings.automaticResizeOn = false; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); ConfigureEncoder(std::move(video_encoder_config), VideoStreamEncoder::BitrateAllocationCallbackType:: @@ -4877,7 +5338,7 @@ TEST_F(VideoStreamEncoderTest, vp9_settings.interLayerPred = InterLayerPredMode::kOn; vp9_settings.automaticResizeOn = false; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); ConfigureEncoder(std::move(video_encoder_config), VideoStreamEncoder::BitrateAllocationCallbackType:: @@ -4923,7 +5384,7 @@ TEST_F(VideoStreamEncoderTest, vp9_settings.interLayerPred = InterLayerPredMode::kOnKeyPic; vp9_settings.automaticResizeOn = false; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); ConfigureEncoder(std::move(video_encoder_config), VideoStreamEncoder::BitrateAllocationCallbackType:: @@ -4969,7 +5430,7 @@ TEST_F(VideoStreamEncoderTest, vp9_settings.interLayerPred = InterLayerPredMode::kOn; vp9_settings.automaticResizeOn = false; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); // Simulcast layers are used for enabling/disabling streams. video_encoder_config.simulcast_layers.resize(3); @@ -5026,7 +5487,7 @@ TEST_F(VideoStreamEncoderTest, vp9_settings.interLayerPred = InterLayerPredMode::kOn; vp9_settings.automaticResizeOn = false; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); // Simulcast layers are used for enabling/disabling streams. video_encoder_config.simulcast_layers.resize(3); @@ -5076,7 +5537,7 @@ TEST_F(VideoStreamEncoderTest, vp9_settings.interLayerPred = InterLayerPredMode::kOn; vp9_settings.automaticResizeOn = false; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); // Simulcast layers are used for enabling/disabling streams. video_encoder_config.simulcast_layers.resize(3); @@ -5109,7 +5570,7 @@ TEST_F(VideoStreamEncoderTest, } TEST_F(VideoStreamEncoderTest, ReportsVideoLayersAllocationForH264) { - ResetEncoder("H264", 1, 1, 1, false, + ResetEncoder("H264", 1, 1, 1, false, kDefaultFramerate, VideoStreamEncoder::BitrateAllocationCallbackType:: kVideoLayersAllocation); video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( @@ -5137,6 +5598,7 @@ TEST_F(VideoStreamEncoderTest, ReportsVideoLayersAllocationForH264) { TEST_F(VideoStreamEncoderTest, ReportsUpdatedVideoLayersAllocationWhenBweChanges) { ResetEncoder("VP8", /*num_streams*/ 2, 1, 1, /*screenshare*/ false, + kDefaultFramerate, VideoStreamEncoder::BitrateAllocationCallbackType:: kVideoLayersAllocation); @@ -5175,6 +5637,7 @@ TEST_F(VideoStreamEncoderTest, TEST_F(VideoStreamEncoderTest, ReportsUpdatedVideoLayersAllocationWhenResolutionChanges) { ResetEncoder("VP8", /*num_streams*/ 2, 1, 1, /*screenshare*/ false, + kDefaultFramerate, VideoStreamEncoder::BitrateAllocationCallbackType:: kVideoLayersAllocation); @@ -5213,6 +5676,7 @@ TEST_F(VideoStreamEncoderTest, TemporalLayersNotDisabledIfSupported) { // 2 TLs configured, temporal layers supported by encoder. const int kNumTemporalLayers = 2; ResetEncoder("VP8", 1, kNumTemporalLayers, 1, /*screenshare*/ false, + kDefaultFramerate, VideoStreamEncoder::BitrateAllocationCallbackType:: kVideoBitrateAllocation); fake_encoder_.SetTemporalLayersSupported(0, true); @@ -5237,6 +5701,7 @@ TEST_F(VideoStreamEncoderTest, TemporalLayersNotDisabledIfSupported) { TEST_F(VideoStreamEncoderTest, TemporalLayersDisabledIfNotSupported) { // 2 TLs configured, temporal layers not supported by encoder. ResetEncoder("VP8", 1, /*num_temporal_layers*/ 2, 1, /*screenshare*/ false, + kDefaultFramerate, VideoStreamEncoder::BitrateAllocationCallbackType:: kVideoBitrateAllocation); fake_encoder_.SetTemporalLayersSupported(0, false); @@ -5260,6 +5725,7 @@ TEST_F(VideoStreamEncoderTest, VerifyBitrateAllocationForTwoStreams) { // 2 TLs configured, temporal layers only supported for first stream. ResetEncoder("VP8", 2, /*num_temporal_layers*/ 2, 1, /*screenshare*/ false, + kDefaultFramerate, VideoStreamEncoder::BitrateAllocationCallbackType:: kVideoBitrateAllocation); fake_encoder_.SetTemporalLayersSupported(0, true); @@ -5382,7 +5848,7 @@ TEST_F(VideoStreamEncoderTest, // Reconfigure the encoder with a new (higher max framerate), max fps should // still respect the adaptation. video_encoder_config.simulcast_layers[0].max_framerate = kHighFramerate; - source.IncomingCapturedFrame(CreateFrame(1, kFrameWidth, kFrameHeight)); + source.IncomingCapturedFrame(CreateFrame(2, kFrameWidth, kFrameHeight)); video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config), kMaxPayloadLength); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); @@ -5466,8 +5932,10 @@ TEST_F(VideoStreamEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) { ExpectDroppedFrame(); // Expect the sink_wants to specify a scaled frame. - EXPECT_TRUE_WAIT( - video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000); + EXPECT_THAT( + WaitUntil([&] { return video_source_.sink_wants().max_pixel_count; }, + Lt(kWidth * kHeight)), + IsRtcOk()); int last_pixel_count = video_source_.sink_wants().max_pixel_count; @@ -5478,8 +5946,10 @@ TEST_F(VideoStreamEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) { // Expect to drop this frame, the wait should time out. ExpectDroppedFrame(); - EXPECT_TRUE_WAIT( - video_source_.sink_wants().max_pixel_count < last_pixel_count, 5000); + EXPECT_THAT( + WaitUntil([&] { return video_source_.sink_wants().max_pixel_count; }, + Lt(last_pixel_count)), + IsRtcOk()); video_stream_encoder_->Stop(); } @@ -5589,8 +6059,10 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenBweDrops) { ExpectDroppedFrame(); // Expect the sink_wants to specify a scaled frame. - EXPECT_TRUE_WAIT( - video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000); + EXPECT_THAT( + WaitUntil([&] { return video_source_.sink_wants().max_pixel_count; }, + Lt(kWidth * kHeight)), + IsRtcOk()); video_stream_encoder_->Stop(); } @@ -5638,13 +6110,9 @@ TEST_F(VideoStreamEncoderTest, TEST_F(VideoStreamEncoderTest, InitialFrameDropAccountsForResolutionScaling) { VideoEncoderConfig video_encoder_config; - webrtc::VideoEncoder::EncoderInfo encoder_info; test::FillEncoderConfiguration(PayloadStringToCodecType("VP8"), 1, &video_encoder_config); - video_encoder_config.video_stream_factory = - rtc::make_ref_counted( - "VP8", /*max qp*/ 56, /*screencast*/ false, - /*screenshare enabled*/ false, encoder_info); + video_encoder_config.video_stream_factory = nullptr; for (auto& layer : video_encoder_config.simulcast_layers) { layer.num_temporal_layers = 1; layer.max_framerate = kDefaultFramerate; @@ -5675,14 +6143,18 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropAccountsForResolutionScaling) { } TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenLayersChange) { - const DataRate kLowTargetBitrate = DataRate::KilobitsPerSec(400); // Set simulcast. ResetEncoder("VP8", 3, 1, 1, false); fake_encoder_.SetQualityScaling(true); const int kWidth = 1280; const int kHeight = 720; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( - kLowTargetBitrate, kLowTargetBitrate, kLowTargetBitrate, 0, 0, 0); + /*target_bitrate=*/DataRate::KilobitsPerSec(400), + /*stable_target_bitrate=*/DataRate::KilobitsPerSec(400), + /*link_allocation=*/DataRate::KilobitsPerSec(400), + /*fraction_lost=*/0, + /*round_trip_time_ms=*/0, + /*cwnd_reduce_ratio=*/0); video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); // Frame should not be dropped. WaitForEncodedFrame(1); @@ -5690,13 +6162,9 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenLayersChange) { // Trigger QVGA "singlecast" // Update the config. VideoEncoderConfig video_encoder_config; - webrtc::VideoEncoder::EncoderInfo encoder_info; test::FillEncoderConfiguration(PayloadStringToCodecType("VP8"), 3, &video_encoder_config); - video_encoder_config.video_stream_factory = - rtc::make_ref_counted( - "VP8", /*max qp*/ 56, /*screencast*/ false, - /*screenshare enabled*/ false, encoder_info); + video_encoder_config.video_stream_factory = nullptr; for (auto& layer : video_encoder_config.simulcast_layers) { layer.num_temporal_layers = 1; layer.max_framerate = kDefaultFramerate; @@ -5731,20 +6199,26 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenLayersChange) { ExpectDroppedFrame(); // Expect the sink_wants to specify a scaled frame. - EXPECT_TRUE_WAIT( - video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000); + EXPECT_THAT( + WaitUntil([&] { return video_source_.sink_wants().max_pixel_count; }, + Lt(kWidth * kHeight)), + IsRtcOk()); video_stream_encoder_->Stop(); } TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenSVCLayersChange) { - const DataRate kLowTargetBitrate = DataRate::KilobitsPerSec(400); // Set simulcast. ResetEncoder("VP9", 1, 1, 3, false); fake_encoder_.SetQualityScaling(true); const int kWidth = 1280; const int kHeight = 720; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( - kLowTargetBitrate, kLowTargetBitrate, kLowTargetBitrate, 0, 0, 0); + /*target_bitrate=*/DataRate::KilobitsPerSec(400), + /*stable_target_bitrate=*/DataRate::KilobitsPerSec(400), + /*link_allocation=*/DataRate::KilobitsPerSec(400), + /*fraction_lost=*/0, + /*round_trip_time_ms=*/0, + /*cwnd_reduce_ratio=*/0); video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); // Frame should not be dropped. WaitForEncodedFrame(1); @@ -5759,7 +6233,7 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenSVCLayersChange) { // Since only one layer is active - automatic resize should be enabled. vp9_settings.automaticResizeOn = true; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrate.bps(); video_encoder_config.content_type = @@ -5794,8 +6268,10 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenSVCLayersChange) { ExpectDroppedFrame(); // Expect the sink_wants to specify a scaled frame. - EXPECT_TRUE_WAIT( - video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000); + EXPECT_THAT( + WaitUntil([&] { return video_source_.sink_wants().max_pixel_count; }, + Lt(kWidth * kHeight)), + IsRtcOk()); video_stream_encoder_->Stop(); } @@ -5818,7 +6294,7 @@ TEST_F(VideoStreamEncoderTest, // Since only one layer is active - automatic resize should be enabled. vp9_settings.automaticResizeOn = true; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrate.bps(); video_encoder_config.content_type = @@ -5873,7 +6349,7 @@ TEST_F(VideoStreamEncoderTest, // Since only one layer is active - automatic resize should be enabled. vp9_settings.automaticResizeOn = true; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrate.bps(); video_encoder_config.content_type = @@ -5888,7 +6364,7 @@ TEST_F(VideoStreamEncoderTest, kMaxPayloadLength); // The default bitrate limits for 360p should be used. - const absl::optional kLimits360p = + const std::optional kLimits360p = EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( kVideoCodecVP9, 640 * 360); video_source_.IncomingCapturedFrame(CreateFrame(1, 1280, 720)); @@ -5905,7 +6381,7 @@ TEST_F(VideoStreamEncoderTest, fake_encoder_.config().spatialLayers[0].maxBitrate * 1000); // The default bitrate limits for 270p should be used. - const absl::optional kLimits270p = + const std::optional kLimits270p = EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( kVideoCodecVP9, 480 * 270); video_source_.IncomingCapturedFrame(CreateFrame(2, 960, 540)); @@ -5935,7 +6411,7 @@ TEST_F(VideoStreamEncoderTest, DefaultMaxAndMinBitratesNotUsedIfDisabled) { // Since only one layer is active - automatic resize should be enabled. vp9_settings.automaticResizeOn = true; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrate.bps(); video_encoder_config.content_type = @@ -5953,7 +6429,7 @@ TEST_F(VideoStreamEncoderTest, DefaultMaxAndMinBitratesNotUsedIfDisabled) { kMaxPayloadLength); // The default bitrate limits for 360p should not be used. - const absl::optional kLimits360p = + const std::optional kLimits360p = EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( kVideoCodecVP9, 640 * 360); video_source_.IncomingCapturedFrame(CreateFrame(1, 1280, 720)); @@ -5975,7 +6451,7 @@ TEST_F(VideoStreamEncoderTest, SinglecastBitrateLimitsNotUsedForOneStream) { /*num_spatial_layers=*/1, /*screenshare=*/false); // The default singlecast bitrate limits for 720p should not be used. - const absl::optional kLimits720p = + const std::optional kLimits720p = EncoderInfoSettings::GetDefaultSinglecastBitrateLimitsForResolution( kVideoCodecVP9, 1280 * 720); video_source_.IncomingCapturedFrame(CreateFrame(1, 1280, 720)); @@ -6009,7 +6485,7 @@ TEST_F(VideoStreamEncoderTest, // Since only one layer is active - automatic resize should be enabled. vp9_settings.automaticResizeOn = true; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrate.bps(); video_encoder_config.content_type = @@ -6040,35 +6516,117 @@ TEST_F(VideoStreamEncoderTest, video_stream_encoder_->Stop(); } -TEST_F(VideoStreamEncoderTest, - InitialFrameDropActivatesWhenResolutionIncreases) { +enum class FrameResolutionChangeMethod { + MODIFY_SOURCE, + MODIFY_SCALE_RESOLUTION_DOWN_TO, + MODIFY_SCALE_RESOLUTION_DOWN_BY, +}; +class VideoStreamEncoderInitialFrameDropperTest + : public VideoStreamEncoderTest, + public ::testing::WithParamInterface { + public: + VideoStreamEncoderInitialFrameDropperTest() + : VideoStreamEncoderTest(), frame_resolution_change_method_(GetParam()) {} + + void SetUp() override { + VideoStreamEncoderTest::SetUp(); + switch (frame_resolution_change_method_) { + case FrameResolutionChangeMethod::MODIFY_SOURCE: + break; + case FrameResolutionChangeMethod::MODIFY_SCALE_RESOLUTION_DOWN_TO: + video_encoder_config_.video_stream_factory = nullptr; + captureWidth = kWidth; + captureHeight = kHeight; + break; + case FrameResolutionChangeMethod::MODIFY_SCALE_RESOLUTION_DOWN_BY: + captureWidth = kWidth; + captureHeight = kHeight; + break; + } + } + + void SetEncoderFrameSize(int width, int height) { + switch (frame_resolution_change_method_) { + case FrameResolutionChangeMethod::MODIFY_SOURCE: + captureWidth = width; + captureHeight = height; + break; + case FrameResolutionChangeMethod::MODIFY_SCALE_RESOLUTION_DOWN_TO: + ASSERT_THAT(video_encoder_config_.simulcast_layers, SizeIs(1)); + video_encoder_config_.simulcast_layers[0] + .scale_resolution_down_to.emplace( + Resolution({.width = width, .height = height})); + video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(), + kMaxPayloadLength); + break; + case FrameResolutionChangeMethod::MODIFY_SCALE_RESOLUTION_DOWN_BY: + ASSERT_THAT(video_encoder_config_.simulcast_layers, SizeIs(1)); + double scale_height = + static_cast(kHeight) / static_cast(height); + double scale_width = + static_cast(kWidth) / static_cast(width); + video_encoder_config_.simulcast_layers[0].scale_resolution_down_by = + std::max(scale_width, scale_height); + video_stream_encoder_->ConfigureEncoder(video_encoder_config_.Copy(), + kMaxPayloadLength); + break; + } + } + const int kWidth = 640; const int kHeight = 360; + int captureWidth = 0; + int captureHeight = 0; + + protected: + const FrameResolutionChangeMethod frame_resolution_change_method_; +}; + +INSTANTIATE_TEST_SUITE_P( + VideoStreamEncoderInitialFrameDropperTest, + VideoStreamEncoderInitialFrameDropperTest, + ::testing::Values( + FrameResolutionChangeMethod::MODIFY_SOURCE, + FrameResolutionChangeMethod::MODIFY_SCALE_RESOLUTION_DOWN_TO, + FrameResolutionChangeMethod::MODIFY_SCALE_RESOLUTION_DOWN_BY)); + +TEST_P(VideoStreamEncoderInitialFrameDropperTest, + InitialFrameDropActivatesWhenResolutionIncreases) { + SetEncoderFrameSize(kWidth / 2, kHeight / 2); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); - video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth / 2, kHeight / 2)); + video_source_.IncomingCapturedFrame( + CreateFrame(1, captureWidth, captureHeight)); // Frame should not be dropped. WaitForEncodedFrame(1); video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kLowTargetBitrate, kLowTargetBitrate, kLowTargetBitrate, 0, 0, 0); - video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth / 2, kHeight / 2)); + video_source_.IncomingCapturedFrame( + CreateFrame(2, captureWidth, captureHeight)); // Frame should not be dropped, bitrate not too low for frame. WaitForEncodedFrame(2); // Incoming resolution increases. - video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); + SetEncoderFrameSize(kWidth, kHeight); + + video_source_.IncomingCapturedFrame( + CreateFrame(3, captureWidth, captureHeight)); // Expect to drop this frame, bitrate too low for frame. ExpectDroppedFrame(); // Expect the sink_wants to specify a scaled frame. - EXPECT_TRUE_WAIT( - video_source_.sink_wants().max_pixel_count < kWidth * kHeight, 5000); + EXPECT_THAT( + WaitUntil([&] { return video_source_.sink_wants().max_pixel_count; }, + Lt(kWidth * kHeight)), + IsRtcOk()); video_stream_encoder_->Stop(); } -TEST_F(VideoStreamEncoderTest, InitialFrameDropIsNotReactivatedWhenAdaptingUp) { +TEST_P(VideoStreamEncoderInitialFrameDropperTest, + InitialFrameDropIsNotReactivatedWhenAdaptingUp) { const int kWidth = 640; const int kHeight = 360; // So that quality scaling doesn't happen by itself. @@ -6081,9 +6639,14 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropIsNotReactivatedWhenAdaptingUp) { int timestamp = 1; + // By using the `scale_resolution_down_to` API, ReconfigureEncoder() gets + // triggered from VideoStreamEncoder::OnVideoSourceRestrictionsUpdated(). + SetEncoderFrameSize(kWidth, kHeight); + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); - source.IncomingCapturedFrame(CreateFrame(timestamp, kWidth, kHeight)); + source.IncomingCapturedFrame( + CreateFrame(timestamp, captureWidth, captureHeight)); WaitForEncodedFrame(timestamp); timestamp += 9000; // Long pause to disable all first BWE drop logic. @@ -6091,7 +6654,8 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropIsNotReactivatedWhenAdaptingUp) { video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kLowTargetBitrate, kLowTargetBitrate, kLowTargetBitrate, 0, 0, 0); - source.IncomingCapturedFrame(CreateFrame(timestamp, kWidth, kHeight)); + source.IncomingCapturedFrame( + CreateFrame(timestamp, captureWidth, captureHeight)); // Not dropped frame, as initial frame drop is disabled by now. WaitForEncodedFrame(timestamp); timestamp += 9000; @@ -6101,11 +6665,13 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropIsNotReactivatedWhenAdaptingUp) { video_stream_encoder_->TriggerQualityLow(); // Adaptation has an effect. - EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count < kWidth * kHeight, - 5000); + EXPECT_THAT(WaitUntil([&] { return source.sink_wants().max_pixel_count; }, + Lt(kWidth * kHeight)), + IsRtcOk()); // Frame isn't dropped as initial frame dropper is disabled. - source.IncomingCapturedFrame(CreateFrame(timestamp, kWidth, kHeight)); + source.IncomingCapturedFrame( + CreateFrame(timestamp, captureWidth, captureHeight)); WaitForEncodedFrame(timestamp); timestamp += 9000; AdvanceTime(TimeDelta::Millis(100)); @@ -6114,17 +6680,19 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropIsNotReactivatedWhenAdaptingUp) { video_stream_encoder_->TriggerQualityHigh(); // Adaptation has an effect. - EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count > kWidth * kHeight, - 5000); + EXPECT_THAT(WaitUntil([&] { return source.sink_wants().max_pixel_count; }, + Gt(kWidth * kHeight)), + IsRtcOk()); - source.IncomingCapturedFrame(CreateFrame(timestamp, kWidth, kHeight)); + source.IncomingCapturedFrame( + CreateFrame(timestamp, captureWidth, captureHeight)); // Frame should not be dropped, as initial framedropper is off. WaitForEncodedFrame(timestamp); video_stream_encoder_->Stop(); } -TEST_F(VideoStreamEncoderTest, +TEST_P(VideoStreamEncoderInitialFrameDropperTest, FrameDroppedWhenResolutionIncreasesAndLinkAllocationIsLow) { const int kMinStartBps360p = 222000; fake_encoder_.SetResolutionBitrateLimits( @@ -6137,22 +6705,31 @@ TEST_F(VideoStreamEncoderTest, DataRate::BitsPerSec(kMinStartBps360p - 1), // stable_target_bitrate DataRate::BitsPerSec(kMinStartBps360p - 1), // link_allocation 0, 0, 0); + + SetEncoderFrameSize(kWidth / 2, kHeight / 2); + // Frame should not be dropped, bitrate not too low for frame. - video_source_.IncomingCapturedFrame(CreateFrame(1, 320, 180)); + video_source_.IncomingCapturedFrame( + CreateFrame(1, captureWidth, captureHeight)); WaitForEncodedFrame(1); // Incoming resolution increases, initial frame drop activates. // Frame should be dropped, link allocation too low for frame. - video_source_.IncomingCapturedFrame(CreateFrame(2, 640, 360)); + SetEncoderFrameSize(kWidth, kHeight); + + video_source_.IncomingCapturedFrame( + CreateFrame(2, captureWidth, captureHeight)); ExpectDroppedFrame(); // Expect sink_wants to specify a scaled frame. - EXPECT_TRUE_WAIT(video_source_.sink_wants().max_pixel_count < 640 * 360, - 5000); + EXPECT_THAT( + WaitUntil([&] { return video_source_.sink_wants().max_pixel_count; }, + Lt(640 * 360)), + IsRtcOk()); video_stream_encoder_->Stop(); } -TEST_F(VideoStreamEncoderTest, +TEST_P(VideoStreamEncoderInitialFrameDropperTest, FrameNotDroppedWhenResolutionIncreasesAndLinkAllocationIsHigh) { const int kMinStartBps360p = 222000; fake_encoder_.SetResolutionBitrateLimits( @@ -6165,94 +6742,28 @@ TEST_F(VideoStreamEncoderTest, DataRate::BitsPerSec(kMinStartBps360p - 1), // stable_target_bitrate DataRate::BitsPerSec(kMinStartBps360p), // link_allocation 0, 0, 0); + + const int kWidth = 640; + const int kHeight = 360; + + SetEncoderFrameSize(kWidth / 2, kHeight / 2); + // Frame should not be dropped, bitrate not too low for frame. - video_source_.IncomingCapturedFrame(CreateFrame(1, 320, 180)); + video_source_.IncomingCapturedFrame( + CreateFrame(1, captureWidth, captureHeight)); WaitForEncodedFrame(1); // Incoming resolution increases, initial frame drop activates. + SetEncoderFrameSize(kWidth, kHeight); + // Frame should be dropped, link allocation not too low for frame. - video_source_.IncomingCapturedFrame(CreateFrame(2, 640, 360)); + video_source_.IncomingCapturedFrame( + CreateFrame(2, captureWidth, captureHeight)); WaitForEncodedFrame(2); video_stream_encoder_->Stop(); } -TEST_F(VideoStreamEncoderTest, RampsUpInQualityWhenBwIsHigh) { - webrtc::test::ScopedKeyValueConfig field_trials( - field_trials_, - "WebRTC-Video-QualityRampupSettings/" - "min_pixels:921600,min_duration_ms:2000/"); - - const int kWidth = 1280; - const int kHeight = 720; - const int kFps = 10; - max_framerate_ = kFps; - - // Reset encoder for field trials to take effect. - VideoEncoderConfig config = video_encoder_config_.Copy(); - config.max_bitrate_bps = kTargetBitrate.bps(); - DataRate max_bitrate = DataRate::BitsPerSec(config.max_bitrate_bps); - ConfigureEncoder(std::move(config)); - fake_encoder_.SetQp(kQpLow); - - // Enable MAINTAIN_FRAMERATE preference. - AdaptingFrameForwarder source(&time_controller_); - source.set_adaptation_enabled(true); - video_stream_encoder_->SetSource(&source, - DegradationPreference::MAINTAIN_FRAMERATE); - - // Start at low bitrate. - const DataRate kLowBitrate = DataRate::KilobitsPerSec(200); - video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( - kLowBitrate, kLowBitrate, kLowBitrate, 0, 0, 0); - - // Expect first frame to be dropped and resolution to be limited. - const int64_t kFrameIntervalMs = 1000 / kFps; - int64_t timestamp_ms = kFrameIntervalMs; - source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); - ExpectDroppedFrame(); - EXPECT_TRUE_WAIT(source.sink_wants().max_pixel_count < kWidth * kHeight, - 5000); - - // Increase bitrate to encoder max. - video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( - max_bitrate, max_bitrate, max_bitrate, 0, 0, 0); - - // Insert frames and advance `min_duration_ms`. - const int64_t start_bw_high_ms = CurrentTimeMs(); - for (size_t i = 1; i <= 10; i++) { - timestamp_ms += kFrameIntervalMs; - source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); - WaitForEncodedFrame(timestamp_ms); - } - - // Advance to `min_duration_ms` - 1, frame should not trigger high BW. - int64_t elapsed_bw_high_ms = CurrentTimeMs() - start_bw_high_ms; - AdvanceTime(TimeDelta::Millis(2000 - elapsed_bw_high_ms - 1)); - timestamp_ms += kFrameIntervalMs; - source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); - WaitForEncodedFrame(timestamp_ms); - EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); - EXPECT_LT(source.sink_wants().max_pixel_count, kWidth * kHeight); - - // Frame should trigger high BW and release quality limitation. - timestamp_ms += kFrameIntervalMs; - source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); - WaitForEncodedFrame(timestamp_ms); - // The ramp-up code involves the adaptation queue, give it time to execute. - // TODO(hbos): Can we await an appropriate event instead? - video_stream_encoder_->WaitUntilTaskQueueIsIdle(); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); - - // Frame should not be adapted. - timestamp_ms += kFrameIntervalMs; - source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); - WaitForEncodedFrame(kWidth, kHeight); - EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); - - video_stream_encoder_->Stop(); -} - TEST_F(VideoStreamEncoderTest, QualityScalerAdaptationsRemovedWhenQualityScalingDisabled) { webrtc::test::ScopedKeyValueConfig field_trials( @@ -6266,10 +6777,10 @@ TEST_F(VideoStreamEncoderTest, fake_encoder_.SetQp(kQpHigh + 1); const int kWidth = 1280; const int kHeight = 720; - const int64_t kFrameIntervalMs = 100; + const int64_t kFrameInterval100Ms = 100; int64_t timestamp_ms = kFrameIntervalMs; for (size_t i = 1; i <= 100; i++) { - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval100Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); } @@ -6277,9 +6788,12 @@ TEST_F(VideoStreamEncoderTest, // for the first time. // TODO(eshr): We should avoid these waits by using threads with simulated // time. - EXPECT_TRUE_WAIT(stats_proxy_->GetStats().bw_limited_resolution, - 2000 * 2.5 * 2); - timestamp_ms += kFrameIntervalMs; + EXPECT_THAT( + WaitUntil([&] { return stats_proxy_->GetStats().bw_limited_resolution; }, + IsTrue(), + {.timeout = webrtc::TimeDelta::Millis(2000 * 2.5 * 2)}), + IsRtcOk()); + timestamp_ms += kFrameInterval100Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); @@ -6319,7 +6833,7 @@ TEST_F(VideoStreamEncoderTest, source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight)); WaitForEncodedFrame(1); video_stream_encoder_->TriggerCpuOveruse(); - EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); + EXPECT_THAT(source.sink_wants(), UnlimitedSinkWants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -6507,7 +7021,7 @@ TEST_F(VideoStreamEncoderTest, TEST_F(VideoStreamEncoderTest, DoesntAdaptDownPastMinFramerate) { const int kFramerateFps = 5; - const int kFrameIntervalMs = rtc::kNumMillisecsPerSec / kFramerateFps; + const int kFrameInterval5FpsInMs = kNumMillisecsPerSec / kFramerateFps; const int kFrameWidth = 1280; const int kFrameHeight = 720; @@ -6524,7 +7038,7 @@ TEST_F(VideoStreamEncoderTest, DoesntAdaptDownPastMinFramerate) { int64_t timestamp_ms = CurrentTimeMs(); // Trigger overuse as much as we can. - rtc::VideoSinkWants last_wants; + VideoSinkWants last_wants; do { last_wants = video_source_.sink_wants(); @@ -6535,8 +7049,8 @@ TEST_F(VideoStreamEncoderTest, DoesntAdaptDownPastMinFramerate) { if (video_source_.last_sent_width()) { sink_.WaitForEncodedFrame(timestamp_ms); } - timestamp_ms += kFrameIntervalMs; - AdvanceTime(TimeDelta::Millis(kFrameIntervalMs)); + timestamp_ms += kFrameInterval5FpsInMs; + AdvanceTime(TimeDelta::Millis(kFrameInterval5FpsInMs)); } // ...and then try to adapt again. video_stream_encoder_->TriggerCpuOveruse(); @@ -6553,8 +7067,13 @@ TEST_F(VideoStreamEncoderTest, AdaptsResolutionAndFramerateForLowQuality_BalancedMode) { const int kWidth = 1280; const int kHeight = 720; - const int64_t kFrameIntervalMs = 150; - int64_t timestamp_ms = kFrameIntervalMs; + const int64_t kFrameInterval150Ms = 150; + int64_t timestamp_ms = kFrameInterval150Ms; + ASSERT_EQ(video_encoder_config_.simulcast_layers.size(), 1u); + video_encoder_config_.simulcast_layers[0].width = kWidth; + video_encoder_config_.simulcast_layers[0].height = kHeight; + video_encoder_config_.simulcast_layers[0].max_framerate = kDefaultFramerate; + ConfigureEncoder(video_encoder_config_.Copy()); video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); @@ -6563,7 +7082,7 @@ TEST_F(VideoStreamEncoderTest, source.set_adaptation_enabled(true); video_stream_encoder_->SetSource(&source, webrtc::DegradationPreference::BALANCED); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); @@ -6573,7 +7092,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt down, expect scaled down resolution (960x540@30fps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), @@ -6584,7 +7103,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt down, expect scaled down resolution (640x360@30fps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants())); @@ -6594,7 +7113,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt down, expect reduced fps (640x360@15fps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants())); @@ -6604,7 +7123,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt down, expect scaled down resolution (480x270@15fps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants())); @@ -6614,7 +7133,7 @@ TEST_F(VideoStreamEncoderTest, // Restrict bitrate, trigger adapt down, expect reduced fps (480x270@10fps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants())); @@ -6624,7 +7143,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt down, expect scaled down resolution (320x180@10fps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants())); @@ -6634,18 +7153,18 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt down, expect reduced fps (320x180@7fps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants())); - rtc::VideoSinkWants last_wants = source.sink_wants(); + VideoSinkWants last_wants = source.sink_wants(); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_framerate); EXPECT_EQ(7, stats_proxy_->GetStats().number_of_quality_adapt_changes); // Trigger adapt down, min resolution reached, expect no change. video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(last_wants)); @@ -6655,7 +7174,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt up, expect increased fps (320x180@10fps). video_stream_encoder_->TriggerQualityHigh(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants())); @@ -6665,7 +7184,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt up, expect upscaled resolution (480x270@10fps). video_stream_encoder_->TriggerQualityHigh(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants())); @@ -6675,7 +7194,7 @@ TEST_F(VideoStreamEncoderTest, // Increase bitrate, trigger adapt up, expect increased fps (480x270@15fps). video_stream_encoder_->TriggerQualityHigh(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants())); @@ -6685,7 +7204,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt up, expect upscaled resolution (640x360@15fps). video_stream_encoder_->TriggerQualityHigh(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants())); @@ -6695,7 +7214,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt up, expect increased fps (640x360@30fps). video_stream_encoder_->TriggerQualityHigh(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsMax()); @@ -6707,7 +7226,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt up, expect upscaled resolution (960x540@30fps). video_stream_encoder_->TriggerQualityHigh(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); @@ -6717,7 +7236,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger adapt up, expect no restriction (1280x720fps@30fps). video_stream_encoder_->TriggerQualityHigh(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); @@ -6737,8 +7256,13 @@ TEST_F(VideoStreamEncoderTest, TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) { const int kWidth = 1280; const int kHeight = 720; - const int64_t kFrameIntervalMs = 150; - int64_t timestamp_ms = kFrameIntervalMs; + const int64_t kFrameInterval150Ms = 150; + int64_t timestamp_ms = kFrameInterval150Ms; + ASSERT_EQ(video_encoder_config_.simulcast_layers.size(), 1u); + video_encoder_config_.simulcast_layers[0].width = kWidth; + video_encoder_config_.simulcast_layers[0].height = kHeight; + video_encoder_config_.simulcast_layers[0].max_framerate = kDefaultFramerate; + ConfigureEncoder(video_encoder_config_.Copy()); video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); @@ -6747,7 +7271,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) { source.set_adaptation_enabled(true); video_stream_encoder_->SetSource(&source, webrtc::DegradationPreference::BALANCED); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); @@ -6760,7 +7284,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) { // Trigger cpu adapt down, expect scaled down resolution (960x540@30fps). video_stream_encoder_->TriggerCpuOveruse(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), @@ -6774,7 +7298,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) { // Trigger cpu adapt down, expect scaled down resolution (640x360@30fps). video_stream_encoder_->TriggerCpuOveruse(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsMaxResolutionLt(source.last_wants())); @@ -6787,7 +7311,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) { // Trigger quality adapt down, expect reduced fps (640x360@15fps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsLtResolutionEq(source.last_wants())); @@ -6804,7 +7328,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) { // change then last_wants() is not updated. auto previous_sink_wants = source.sink_wants(); video_stream_encoder_->TriggerCpuUnderuse(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(previous_sink_wants)); @@ -6814,7 +7338,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) { // Trigger quality adapt up, expect increased fps (640x360@30fps). video_stream_encoder_->TriggerQualityHigh(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsGtResolutionEq(source.last_wants())); @@ -6829,7 +7353,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) { // expect increased resolution (960x540@30fps). video_stream_encoder_->TriggerQualityHigh(); video_stream_encoder_->TriggerCpuUnderuse(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); @@ -6844,7 +7368,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) { // expect no restriction (1280x720fps@30fps). video_stream_encoder_->TriggerQualityHigh(); video_stream_encoder_->TriggerCpuUnderuse(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); EXPECT_THAT(source.sink_wants(), FpsMaxResolutionGt(source.last_wants())); @@ -6870,8 +7394,13 @@ TEST_F(VideoStreamEncoderTest, const int kWidth = 640; const int kHeight = 360; const int kFpsLimit = 15; - const int64_t kFrameIntervalMs = 150; - int64_t timestamp_ms = kFrameIntervalMs; + const int64_t kFrameInterval150Ms = 150; + ASSERT_EQ(video_encoder_config_.simulcast_layers.size(), 1u); + video_encoder_config_.simulcast_layers[0].width = kWidth; + video_encoder_config_.simulcast_layers[0].height = kHeight; + video_encoder_config_.simulcast_layers[0].max_framerate = kDefaultFramerate; + ConfigureEncoder(video_encoder_config_.Copy()); + int64_t timestamp_ms = kFrameInterval150Ms; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); @@ -6880,7 +7409,7 @@ TEST_F(VideoStreamEncoderTest, source.set_adaptation_enabled(true); video_stream_encoder_->SetSource(&source, webrtc::DegradationPreference::BALANCED); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(kWidth, kHeight); EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); @@ -6893,7 +7422,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger cpu adapt down, expect scaled down framerate (640x360@15fps). video_stream_encoder_->TriggerCpuOveruse(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsMatchesResolutionMax(Eq(kFpsLimit))); @@ -6906,7 +7435,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger quality adapt down, expect scaled down resolution (480x270@15fps). video_stream_encoder_->TriggerQualityLow(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsEqResolutionLt(source.last_wants())); @@ -6923,7 +7452,7 @@ TEST_F(VideoStreamEncoderTest, // Store current sink wants since we expect no change ind if there is no // change then last__wants() is not updated. video_stream_encoder_->TriggerCpuUnderuse(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsEqResolutionEqTo(previous_sink_wants)); @@ -6933,7 +7462,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger quality adapt up, expect upscaled resolution (640x360@15fps). video_stream_encoder_->TriggerQualityHigh(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsEqResolutionGt(source.last_wants())); @@ -6947,7 +7476,7 @@ TEST_F(VideoStreamEncoderTest, // Trigger quality and cpu adapt up, expect increased fps (640x360@30fps). video_stream_encoder_->TriggerQualityHigh(); video_stream_encoder_->TriggerCpuUnderuse(); - timestamp_ms += kFrameIntervalMs; + timestamp_ms += kFrameInterval150Ms; source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight)); WaitForEncodedFrame(timestamp_ms); EXPECT_THAT(source.sink_wants(), FpsMaxResolutionMax()); @@ -6975,6 +7504,7 @@ TEST_F(VideoStreamEncoderTest, AcceptsFullHdAdaptedDownSimulcastFrames) { // 2/3 of 1080. const int kAdaptedFrameHeight = 720; const int kFramerate = 24; + uint64_t ntp_time_ms = 123; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); @@ -6984,7 +7514,7 @@ TEST_F(VideoStreamEncoderTest, AcceptsFullHdAdaptedDownSimulcastFrames) { video_encoder_config.simulcast_layers[0].max_framerate = kFramerate; video_encoder_config.max_bitrate_bps = kTargetBitrate.bps(); video_encoder_config.video_stream_factory = - rtc::make_ref_counted(); + make_ref_counted(); video_stream_encoder_->ConfigureEncoder(std::move(video_encoder_config), kMaxPayloadLength); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); @@ -6992,13 +7522,14 @@ TEST_F(VideoStreamEncoderTest, AcceptsFullHdAdaptedDownSimulcastFrames) { video_source_.set_adaptation_enabled(true); video_source_.IncomingCapturedFrame( - CreateFrame(1, kFrameWidth, kFrameHeight)); + CreateFrame(ntp_time_ms, kFrameWidth, kFrameHeight)); WaitForEncodedFrame(kFrameWidth, kFrameHeight); // Trigger CPU overuse, downscale by 3/4. video_stream_encoder_->TriggerCpuOveruse(); + ntp_time_ms += 1000 / kFramerate; video_source_.IncomingCapturedFrame( - CreateFrame(2, kFrameWidth, kFrameHeight)); + CreateFrame(ntp_time_ms, kFrameWidth, kFrameHeight)); WaitForEncodedFrame(kAdaptedFrameWidth, kAdaptedFrameHeight); video_stream_encoder_->Stop(); @@ -7059,7 +7590,7 @@ TEST_F(VideoStreamEncoderTest, PeriodicallyUpdatesChannelParameters) { TEST_F(VideoStreamEncoderTest, DoesNotUpdateBitrateAllocationWhenSuspended) { const int kFrameWidth = 1280; const int kFrameHeight = 720; - ResetEncoder("FAKE", 1, 1, 1, false, + ResetEncoder("FAKE", 1, 1, 1, false, kDefaultFramerate, VideoStreamEncoder::BitrateAllocationCallbackType:: kVideoBitrateAllocation); @@ -7174,17 +7705,22 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenEncoderOvershoots) { const int kFrameWidth = 320; const int kFrameHeight = 240; const int kFps = 30; - const DataRate kTargetBitrate = DataRate::KilobitsPerSec(120); const int kNumFramesInRun = kFps * 5; // Runs of five seconds. video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( - kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); + /*target_bitrate=*/DataRate::KilobitsPerSec(120), + /*stable_target_bitrate=*/DataRate::KilobitsPerSec(120), + /*link_allocation=*/DataRate::KilobitsPerSec(120), + /*fraction_lost=*/0, + /*round_trip_time_ms=*/0, + /*cwnd_reduce_ratio=*/0); int64_t timestamp_ms = CurrentTimeMs(); max_framerate_ = kFps; // Insert 3 seconds of video, verify number of drops with normal bitrate. fake_encoder_.SimulateOvershoot(1.0); + video_stream_encoder_->WaitUntilTaskQueueIsIdle(); int num_dropped = 0; for (int i = 0; i < kNumFramesInRun; ++i) { video_source_.IncomingCapturedFrame( @@ -7207,8 +7743,7 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenEncoderOvershoots) { // of video, verify number of drops. Rate needs to be slightly changed in // order to force the rate to be reconfigured. double overshoot_factor = 2.0; - const RateControlSettings trials = - RateControlSettings::ParseFromFieldTrials(); + const RateControlSettings trials(env_.field_trials()); if (trials.UseEncoderBitrateAdjuster()) { // With bitrate adjuster, when need to overshoot even more to trigger // frame dropping since the adjuter will try to just lower the target @@ -7219,10 +7754,7 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenEncoderOvershoots) { overshoot_factor = 3.0; } fake_encoder_.SimulateOvershoot(overshoot_factor); - video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( - kTargetBitrate + DataRate::KilobitsPerSec(1), - kTargetBitrate + DataRate::KilobitsPerSec(1), - kTargetBitrate + DataRate::KilobitsPerSec(1), 0, 0, 0); + video_stream_encoder_->WaitUntilTaskQueueIsIdle(); num_dropped = 0; for (int i = 0; i < kNumFramesInRun; ++i) { video_source_.IncomingCapturedFrame( @@ -7235,9 +7767,6 @@ TEST_F(VideoStreamEncoderTest, DropsFramesWhenEncoderOvershoots) { timestamp_ms += 1000 / kFps; } - video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( - kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); - // Target framerate should be still be near the expected target, despite // the frame drops. EXPECT_NEAR(fake_encoder_.GetLastFramerate(), kFps, 1); @@ -7252,14 +7781,18 @@ TEST_F(VideoStreamEncoderTest, ConfiguresCorrectFrameRate) { const int kFrameWidth = 320; const int kFrameHeight = 240; const int kActualInputFps = 24; - const DataRate kTargetBitrate = DataRate::KilobitsPerSec(120); ASSERT_GT(max_framerate_, kActualInputFps); int64_t timestamp_ms = CurrentTimeMs(); max_framerate_ = kActualInputFps; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( - kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); + /*target_bitrate=*/DataRate::KilobitsPerSec(120), + /*stable_target_bitrate=*/DataRate::KilobitsPerSec(120), + /*link_allocation=*/DataRate::KilobitsPerSec(120), + /*fraction_lost=*/0, + /*round_trip_time_ms=*/0, + /*cwnd_reduce_ratio=*/0); // Insert 3 seconds of video, with an input fps lower than configured max. for (int i = 0; i < kActualInputFps * 3; ++i) { @@ -7435,11 +7968,14 @@ TEST_F(VideoStreamEncoderTest, RewritesH264BitstreamWithNonOptimalSps) { TEST_F(VideoStreamEncoderTest, CopiesVideoFrameMetadataAfterDownscale) { const int kFrameWidth = 1280; const int kFrameHeight = 720; - const DataRate kTargetBitrate = - DataRate::KilobitsPerSec(300); // Too low for HD resolution. video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( - kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); + /*target_bitrate=*/DataRate::KilobitsPerSec(300), + /*stable_target_bitrate=*/DataRate::KilobitsPerSec(300), + /*link_allocation=*/DataRate::KilobitsPerSec(300), + /*fraction_lost=*/0, + /*round_trip_time_ms=*/0, + /*cwnd_reduce_ratio=*/0); video_stream_encoder_->WaitUntilTaskQueueIsIdle(); // Insert a first video frame. It should be dropped because of downscale in @@ -7634,7 +8170,7 @@ TEST_F(VideoStreamEncoderTest, EncoderSelectorResolutionSwitch) { ConfigureEncoder(video_encoder_config_.Copy()); EXPECT_CALL(encoder_selector, OnResolutionChange(RenderResolution(640, 480))) - .WillOnce(Return(absl::nullopt)); + .WillOnce(Return(std::nullopt)); EXPECT_CALL(encoder_selector, OnResolutionChange(RenderResolution(320, 240))) .WillOnce(Return(SdpVideoFormat("AV1"))); EXPECT_CALL(switch_callback, @@ -7691,7 +8227,7 @@ TEST_F(VideoStreamEncoderTest, EncoderSelectorBrokenEncoderSwitch) { ON_CALL(encoder_selector, OnEncoderBroken) .WillByDefault(Return(SdpVideoFormat("AV2"))); - rtc::Event encode_attempted; + Event encode_attempted; EXPECT_CALL(switch_callback, RequestEncoderSwitch(Field(&SdpVideoFormat::name, "AV2"), /*allow_default_fallback=*/true)) @@ -7735,7 +8271,7 @@ TEST_F(VideoStreamEncoderTest, SwitchEncoderOnInitFailureWithEncoderSelector) { ON_CALL(encoder_selector, OnEncoderBroken) .WillByDefault(Return(SdpVideoFormat("AV2"))); - rtc::Event encode_attempted; + Event encode_attempted; EXPECT_CALL(switch_callback, RequestEncoderSwitch(Field(&SdpVideoFormat::name, "AV2"), /*allow_default_fallback=*/true)) @@ -7777,10 +8313,8 @@ TEST_F(VideoStreamEncoderTest, ON_CALL(video_encoder, InitEncode(_, _)) .WillByDefault(Return(WEBRTC_VIDEO_CODEC_ENCODER_FAILURE)); - rtc::Event encode_attempted; - EXPECT_CALL(switch_callback, - RequestEncoderSwitch(Field(&SdpVideoFormat::name, "VP8"), - /*allow_default_fallback=*/true)) + Event encode_attempted; + EXPECT_CALL(switch_callback, RequestEncoderFallback()) .WillOnce([&encode_attempted]() { encode_attempted.Set(); }); video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); @@ -7828,7 +8362,7 @@ TEST_F(VideoStreamEncoderTest, NullEncoderReturnSwitch) { /*cwnd_reduce_ratio=*/0); ON_CALL(encoder_selector, OnEncoderBroken) .WillByDefault(Return(SdpVideoFormat("AV2"))); - rtc::Event encode_attempted; + Event encode_attempted; EXPECT_CALL(switch_callback, RequestEncoderSwitch(Field(&SdpVideoFormat::name, "AV2"), /*allow_default_fallback=*/_)) @@ -7848,6 +8382,115 @@ TEST_F(VideoStreamEncoderTest, NullEncoderReturnSwitch) { video_stream_encoder_.reset(); } +TEST_F(VideoStreamEncoderTest, NoPreferenceDefaultFallbackToVP8Disabled) { + constexpr int kSufficientBitrateToNotDrop = 1000; + constexpr int kDontCare = 100; + constexpr int kNumFrames = 8; + + NiceMock video_encoder; + StrictMock switch_callback; + video_send_config_.encoder_settings.encoder_switch_request_callback = + &switch_callback; + auto encoder_factory = std::make_unique( + &video_encoder, /*encoder_selector=*/nullptr); + video_send_config_.encoder_settings.encoder_factory = encoder_factory.get(); + + // Reset encoder for new configuration to take effect. + ConfigureEncoder(video_encoder_config_.Copy()); + + // The VideoStreamEncoder needs some bitrate before it can start encoding, + // setting some bitrate so that subsequent calls to WaitForEncodedFrame does + // not fail. + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + /*target_bitrate=*/DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop), + /*stable_target_bitrate=*/ + DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop), + /*link_allocation=*/DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop), + /*fraction_lost=*/0, + /*round_trip_time_ms=*/0, + /*cwnd_reduce_ratio=*/0); + + EXPECT_CALL(video_encoder, Encode) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_ENCODER_FAILURE)); + + EXPECT_CALL(switch_callback, RequestEncoderFallback()); + + VideoFrame frame = CreateFrame(1, kDontCare, kDontCare); + for (int i = 0; i < kNumFrames; ++i) { + int64_t timestamp_ms = CurrentTimeMs(); + frame.set_ntp_time_ms(timestamp_ms); + frame.set_timestamp_us(timestamp_ms * 1000); + video_source_.IncomingCapturedFrame(frame); + time_controller_.AdvanceTime(TimeDelta::Millis(33)); + } + + ASSERT_THAT(WaitUntil([&] { return fake_encoder_.GetNumEncodes() == 0; }, + ::testing::IsTrue()), + IsRtcOk()); + + // After requesting fallback failure, the encoder will be released. + EXPECT_CALL(video_encoder, Release()).Times(1); + + AdvanceTime(TimeDelta::Zero()); + video_stream_encoder_->Stop(); + // The encoders produced by the VideoEncoderProxyFactory have a pointer back + // to it's factory, so in order for the encoder instance in the + // `video_stream_encoder_` to be destroyed before the `encoder_factory` we + // reset the `video_stream_encoder_` here. + video_stream_encoder_.reset(); +} + +TEST_F(VideoStreamEncoderTest, NoPreferenceDefaultFallbackToVP8Enabled) { + constexpr int kSufficientBitrateToNotDrop = 1000; + constexpr int kDontCare = 100; + + webrtc::test::ScopedKeyValueConfig field_trials( + field_trials_, + "WebRTC-SwitchEncoderFollowCodecPreferenceOrder/Disabled/"); + + NiceMock video_encoder; + StrictMock switch_callback; + video_send_config_.encoder_settings.encoder_switch_request_callback = + &switch_callback; + auto encoder_factory = std::make_unique( + &video_encoder, /*encoder_selector=*/nullptr); + video_send_config_.encoder_settings.encoder_factory = encoder_factory.get(); + video_encoder_config_.codec_type = kVideoCodecVP9; + + // Reset encoder for new configuration to take effect. + ConfigureEncoder(video_encoder_config_.Copy()); + + // The VideoStreamEncoder needs some bitrate before it can start encoding, + // setting some bitrate so that subsequent calls to WaitForEncodedFrame does + // not fail. + video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( + /*target_bitrate=*/DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop), + /*stable_target_bitrate=*/ + DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop), + /*link_allocation=*/DataRate::KilobitsPerSec(kSufficientBitrateToNotDrop), + /*fraction_lost=*/0, + /*round_trip_time_ms=*/0, + /*cwnd_reduce_ratio=*/0); + + EXPECT_CALL(video_encoder, Encode) + .WillOnce(Return(WEBRTC_VIDEO_CODEC_ENCODER_FAILURE)); + + // Fallback request will be asking for switching to VP8. + EXPECT_CALL(switch_callback, + RequestEncoderSwitch(Field(&SdpVideoFormat::name, "VP8"), + /*allow_default_fallback=*/true)); + + VideoFrame frame = CreateFrame(1, kDontCare, kDontCare); + video_source_.IncomingCapturedFrame(frame); + + video_stream_encoder_->Stop(); + // The encoders produced by the VideoEncoderProxyFactory have a pointer back + // to it's factory, so in order for the encoder instance in the + // `video_stream_encoder_` to be destroyed before the `encoder_factory` we + // reset the `video_stream_encoder_` here. + video_stream_encoder_.reset(); +} + TEST_F(VideoStreamEncoderTest, AllocationPropagatedToEncoderWhenTargetRateChanged) { const int kFrameWidth = 320; @@ -7923,63 +8566,6 @@ TEST_F(VideoStreamEncoderTest, video_stream_encoder_->Stop(); } -TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) { - test::ScopedKeyValueConfig field_trials( - field_trials_, - "WebRTC-AutomaticAnimationDetectionScreenshare/" - "enabled:true,min_fps:20,min_duration_ms:1000,min_area_ratio:0.8/"); - const int kFramerateFps = 30; - const int kWidth = 1920; - const int kHeight = 1080; - const int kNumFrames = 2 * kFramerateFps; // >1 seconds of frames. - // Works on screenshare mode. - ResetEncoder("VP8", 1, 1, 1, /*screenshare*/ true); - // We rely on the automatic resolution adaptation, but we handle framerate - // adaptation manually by mocking the stats proxy. - video_source_.set_adaptation_enabled(true); - - // BALANCED degradation preference is required for this feature. - video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( - kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0); - video_stream_encoder_->SetSource(&video_source_, - webrtc::DegradationPreference::BALANCED); - EXPECT_THAT(video_source_.sink_wants(), UnlimitedSinkWants()); - - VideoFrame frame = CreateFrame(1, kWidth, kHeight); - frame.set_update_rect(VideoFrame::UpdateRect{0, 0, kWidth, kHeight}); - - // Pass enough frames with the full update to trigger animation detection. - for (int i = 0; i < kNumFrames; ++i) { - int64_t timestamp_ms = CurrentTimeMs(); - frame.set_ntp_time_ms(timestamp_ms); - frame.set_timestamp_us(timestamp_ms * 1000); - video_source_.IncomingCapturedFrame(frame); - WaitForEncodedFrame(timestamp_ms); - } - - // Resolution should be limited. - rtc::VideoSinkWants expected; - expected.max_framerate_fps = kFramerateFps; - expected.max_pixel_count = 1280 * 720 + 1; - EXPECT_THAT(video_source_.sink_wants(), FpsEqResolutionLt(expected)); - - // Pass one frame with no known update. - // Resolution cap should be removed immediately. - int64_t timestamp_ms = CurrentTimeMs(); - frame.set_ntp_time_ms(timestamp_ms); - frame.set_timestamp_us(timestamp_ms * 1000); - frame.clear_update_rect(); - - video_source_.IncomingCapturedFrame(frame); - WaitForEncodedFrame(timestamp_ms); - - // Resolution should be unlimited now. - EXPECT_THAT(video_source_.sink_wants(), - FpsMatchesResolutionMax(Eq(kFramerateFps))); - - video_stream_encoder_->Stop(); -} - TEST_F(VideoStreamEncoderTest, ConfiguresVp9SvcAtOddResolutions) { const int kWidth = 720; // 540p adapted down. const int kHeight = 405; @@ -8012,16 +8598,12 @@ TEST_F(VideoStreamEncoderTest, EncoderResetAccordingToParameterChange) { const int number_layers = sizeof(downscale_factors) / sizeof(downscale_factors[0]); VideoEncoderConfig config; - webrtc::VideoEncoder::EncoderInfo encoder_info; test::FillEncoderConfiguration(kVideoCodecVP8, number_layers, &config); for (int i = 0; i < number_layers; ++i) { config.simulcast_layers[i].scale_resolution_down_by = downscale_factors[i]; config.simulcast_layers[i].active = true; } - config.video_stream_factory = - rtc::make_ref_counted( - "VP8", /*max qp*/ 56, /*screencast*/ false, - /*screenshare enabled*/ false, encoder_info); + config.video_stream_factory = nullptr; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kSimulcastTargetBitrate, kSimulcastTargetBitrate, kSimulcastTargetBitrate, 0, 0, 0); @@ -8114,7 +8696,7 @@ TEST_F(VideoStreamEncoderTest, EncoderResolutionsExposedInSinglecast) { video_stream_encoder_->WaitUntilTaskQueueIsIdle(); EXPECT_THAT(video_source_.sink_wants().resolutions, ::testing::ElementsAreArray( - {rtc::VideoSinkWants::FrameSize(kFrameWidth, kFrameHeight)})); + {VideoSinkWants::FrameSize(kFrameWidth, kFrameHeight)})); video_stream_encoder_->Stop(); } @@ -8130,24 +8712,20 @@ TEST_F(VideoStreamEncoderTest, EncoderResolutionsExposedInSimulcast) { const float kDownscaleFactors[] = {8.0, 4.0, 2.0}; const int kFrameWidth = 1280; const int kFrameHeight = 720; - const rtc::VideoSinkWants::FrameSize kLayer0Size( + const VideoSinkWants::FrameSize kLayer0Size( kFrameWidth / kDownscaleFactors[0], kFrameHeight / kDownscaleFactors[0]); - const rtc::VideoSinkWants::FrameSize kLayer1Size( + const VideoSinkWants::FrameSize kLayer1Size( kFrameWidth / kDownscaleFactors[1], kFrameHeight / kDownscaleFactors[1]); - const rtc::VideoSinkWants::FrameSize kLayer2Size( + const VideoSinkWants::FrameSize kLayer2Size( kFrameWidth / kDownscaleFactors[2], kFrameHeight / kDownscaleFactors[2]); VideoEncoderConfig config; - webrtc::VideoEncoder::EncoderInfo encoder_info; test::FillEncoderConfiguration(kVideoCodecVP8, kNumSimulcastLayers, &config); for (size_t i = 0; i < kNumSimulcastLayers; ++i) { config.simulcast_layers[i].scale_resolution_down_by = kDownscaleFactors[i]; config.simulcast_layers[i].active = true; } - config.video_stream_factory = - rtc::make_ref_counted( - "VP8", /*max qp*/ 56, /*screencast*/ false, - /*screenshare enabled*/ false, encoder_info); + config.video_stream_factory = nullptr; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kSimulcastTargetBitrate, kSimulcastTargetBitrate, kSimulcastTargetBitrate, 0, 0, 0); @@ -8267,6 +8845,8 @@ TEST_F(VideoStreamEncoderTest, QpAbsentParsingDisabled_QpAbsent) { TEST_F(VideoStreamEncoderTest, QualityScalingNotAllowed_QualityScalingDisabled) { VideoEncoderConfig video_encoder_config = video_encoder_config_.Copy(); + video_encoder_config.simulcast_layers[0].max_framerate = + -1; // No max frame rate. // Disable scaling settings in encoder info. fake_encoder_.SetQualityScaling(false); @@ -8419,12 +8999,13 @@ TEST_F(VideoStreamEncoderTest, EncoderDoesnotProvideLimitsWhenQPIsNotTrusted) { // Set QP not trusted in encoder info. fake_encoder_.SetIsQpTrusted(false); - absl::optional suitable_bitrate_limit = + std::optional suitable_bitrate_limit = EncoderInfoSettings:: GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted( codec_width_ * codec_height_, EncoderInfoSettings:: - GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted()); + GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted( + kVideoCodecH264)); EXPECT_TRUE(suitable_bitrate_limit.has_value()); const int max_encoder_bitrate = suitable_bitrate_limit->max_bitrate_bps; @@ -8460,12 +9041,13 @@ TEST_F(VideoStreamEncoderTest, // Set QP not trusted in encoder info. fake_encoder_.SetIsQpTrusted(false); - absl::optional suitable_bitrate_limit = + std::optional suitable_bitrate_limit = EncoderInfoSettings:: GetSinglecastBitrateLimitForResolutionWhenQpIsUntrusted( codec_width_ * codec_height_, EncoderInfoSettings:: - GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted()); + GetDefaultSinglecastBitrateLimitsWhenQpIsUntrusted( + kVideoCodecH264)); EXPECT_TRUE(suitable_bitrate_limit.has_value()); const int max_encoder_bitrate = suitable_bitrate_limit->max_bitrate_bps; @@ -8500,9 +9082,10 @@ TEST_F(VideoStreamEncoderTest, } TEST_F(VideoStreamEncoderTest, NormalComplexityWithMoreThanTwoCores) { - ResetEncoder("VP9", /*num_stream=*/1, /*num_temporal_layers=*/1, + ResetEncoder("VP9", /*num_streams=*/1, /*num_temporal_layers=*/1, /*num_spatial_layers=*/1, - /*screenshare=*/false, /*allocation_callback_type=*/ + /*screenshare=*/false, + kDefaultFramerate, /*allocation_callback_type=*/ VideoStreamEncoder::BitrateAllocationCallbackType:: kVideoBitrateAllocationWhenScreenSharing, /*num_cores=*/3); @@ -8522,9 +9105,10 @@ TEST_F(VideoStreamEncoderTest, webrtc::test::ScopedKeyValueConfig field_trials( field_trials_, "WebRTC-VP9-LowTierOptimizations/Disabled/"); - ResetEncoder("VP9", /*num_stream=*/1, /*num_temporal_layers=*/1, + ResetEncoder("VP9", /*num_streams=*/1, /*num_temporal_layers=*/1, /*num_spatial_layers=*/1, - /*screenshare=*/false, /*allocation_callback_type=*/ + /*screenshare=*/false, + kDefaultFramerate, /*allocation_callback_type=*/ VideoStreamEncoder::BitrateAllocationCallbackType:: kVideoBitrateAllocationWhenScreenSharing, /*num_cores=*/2); @@ -8540,9 +9124,10 @@ TEST_F(VideoStreamEncoderTest, } TEST_F(VideoStreamEncoderTest, LowComplexityWithTwoCores) { - ResetEncoder("VP9", /*num_stream=*/1, /*num_temporal_layers=*/1, + ResetEncoder("VP9", /*num_streams=*/1, /*num_temporal_layers=*/1, /*num_spatial_layers=*/1, - /*screenshare=*/false, /*allocation_callback_type=*/ + /*screenshare=*/false, + kDefaultFramerate, /*allocation_callback_type=*/ VideoStreamEncoder::BitrateAllocationCallbackType:: kVideoBitrateAllocationWhenScreenSharing, /*num_cores=*/2); @@ -8707,7 +9292,7 @@ TEST_F(VideoStreamEncoderTest, RequestsRefreshFrameAfterEarlyDroppedNativeFrame) { // Send a native frame before encoder rates have been set. The encoder is // seen as paused at this time. - rtc::Event frame_destroyed_event; + Event frame_destroyed_event; video_source_.IncomingCapturedFrame(CreateFakeNativeFrame( /*ntp_time_ms=*/1, &frame_destroyed_event, codec_width_, codec_height_)); @@ -8742,7 +9327,7 @@ TEST_F(VideoStreamEncoderTest, RecreatesEncoderWhenEnableVp9SpatialLayer) { vp9_settings.interLayerPred = InterLayerPredMode::kOn; vp9_settings.automaticResizeOn = false; video_encoder_config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); video_encoder_config.spatial_layers = GetSvcConfig(1280, 720, /*fps=*/30.0, @@ -8798,29 +9383,23 @@ class VideoStreamEncoderWithRealEncoderTest void SetUp() override { VideoStreamEncoderTest::SetUp(); + Environment env = CreateEnvironment(&field_trials_); std::unique_ptr encoder; switch (codec_type_) { case kVideoCodecVP8: - encoder = VP8Encoder::Create(); + encoder = CreateVp8Encoder(env); break; case kVideoCodecVP9: - encoder = VP9Encoder::Create(); + encoder = CreateVp9Encoder(env); break; case kVideoCodecAV1: - encoder = CreateLibaomAv1Encoder(); + encoder = CreateLibaomAv1Encoder(env); break; case kVideoCodecH264: - encoder = H264Encoder::Create(); + encoder = CreateH264Encoder(env); break; - case kVideoCodecMultiplex: - mock_encoder_factory_for_multiplex_ = - std::make_unique(); - EXPECT_CALL(*mock_encoder_factory_for_multiplex_, Die); - EXPECT_CALL(*mock_encoder_factory_for_multiplex_, CreateVideoEncoder) - .WillRepeatedly([] { return VP8Encoder::Create(); }); - encoder = std::make_unique( - mock_encoder_factory_for_multiplex_.get(), SdpVideoFormat("VP8"), - false); + case kVideoCodecH265: + // TODO(bugs.webrtc.org/13485): Use a fake encoder break; default: RTC_DCHECK_NOTREACHED(); @@ -8871,7 +9450,7 @@ TEST_P(VideoStreamEncoderWithRealEncoderTest, EncoderMapsNativeI420) { auto mappable_native_buffer = test::GetMappableNativeBufferFromVideoFrame(native_i420_frame); - std::vector> mapped_frame_buffers = + std::vector> mapped_frame_buffers = mappable_native_buffer->GetMappedFramedBuffers(); ASSERT_EQ(mapped_frame_buffers.size(), 1u); EXPECT_EQ(mapped_frame_buffers[0]->width(), codec_width_); @@ -8887,7 +9466,7 @@ TEST_P(VideoStreamEncoderWithRealEncoderTest, EncoderMapsNativeNV12) { auto mappable_native_buffer = test::GetMappableNativeBufferFromVideoFrame(native_nv12_frame); - std::vector> mapped_frame_buffers = + std::vector> mapped_frame_buffers = mappable_native_buffer->GetMappedFramedBuffers(); ASSERT_EQ(mapped_frame_buffers.size(), 1u); EXPECT_EQ(mapped_frame_buffers[0]->width(), codec_width_); @@ -8900,24 +9479,18 @@ TEST_P(VideoStreamEncoderWithRealEncoderTest, EncoderMapsNativeNV12) { } TEST_P(VideoStreamEncoderWithRealEncoderTest, HandlesLayerToggling) { - if (codec_type_ == kVideoCodecMultiplex) { - // Multiplex codec here uses wrapped mock codecs, ignore for this test. - return; - } - const size_t kNumSpatialLayers = 3u; const float kDownscaleFactors[] = {4.0, 2.0, 1.0}; const int kFrameWidth = 1280; const int kFrameHeight = 720; - const rtc::VideoSinkWants::FrameSize kLayer0Size( + const VideoSinkWants::FrameSize kLayer0Size( kFrameWidth / kDownscaleFactors[0], kFrameHeight / kDownscaleFactors[0]); - const rtc::VideoSinkWants::FrameSize kLayer1Size( + const VideoSinkWants::FrameSize kLayer1Size( kFrameWidth / kDownscaleFactors[1], kFrameHeight / kDownscaleFactors[1]); - const rtc::VideoSinkWants::FrameSize kLayer2Size( + const VideoSinkWants::FrameSize kLayer2Size( kFrameWidth / kDownscaleFactors[2], kFrameHeight / kDownscaleFactors[2]); VideoEncoderConfig config; - webrtc::VideoEncoder::EncoderInfo encoder_info; if (codec_type_ == VideoCodecType::kVideoCodecVP9) { test::FillEncoderConfiguration(codec_type_, 1, &config); config.max_bitrate_bps = kSimulcastTargetBitrate.bps(); @@ -8926,7 +9499,7 @@ TEST_P(VideoStreamEncoderWithRealEncoderTest, HandlesLayerToggling) { vp9_settings.numberOfTemporalLayers = 3; vp9_settings.automaticResizeOn = false; config.encoder_specific_settings = - rtc::make_ref_counted( + make_ref_counted( vp9_settings); config.spatial_layers = GetSvcConfig(kFrameWidth, kFrameHeight, /*fps=*/30.0, @@ -8967,11 +9540,7 @@ TEST_P(VideoStreamEncoderWithRealEncoderTest, HandlesLayerToggling) { } }; - config.video_stream_factory = - rtc::make_ref_counted( - CodecTypeToPayloadString(codec_type_), /*max qp*/ 56, - /*screencast*/ false, - /*screenshare enabled*/ false, encoder_info); + config.video_stream_factory = nullptr; video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources( kSimulcastTargetBitrate, kSimulcastTargetBitrate, kSimulcastTargetBitrate, 0, 0, 0); @@ -9037,8 +9606,6 @@ constexpr std::pair kVP9DisallowConversion = std::make_pair(kVideoCodecVP9, /*allow_i420_conversion=*/false); constexpr std::pair kAV1AllowConversion = std::make_pair(kVideoCodecAV1, /*allow_i420_conversion=*/false); -constexpr std::pair kMultiplexDisallowConversion = - std::make_pair(kVideoCodecMultiplex, /*allow_i420_conversion=*/false); #if defined(WEBRTC_USE_H264) constexpr std::pair kH264AllowConversion = std::make_pair(kVideoCodecH264, /*allow_i420_conversion=*/true); @@ -9052,7 +9619,6 @@ INSTANTIATE_TEST_SUITE_P( ::testing::Values(kVP8DisallowConversion, kVP9DisallowConversion, kAV1AllowConversion, - kMultiplexDisallowConversion, kH264AllowConversion), TestParametersVideoCodecAndAllowI420ConversionToString); #else @@ -9061,15 +9627,14 @@ INSTANTIATE_TEST_SUITE_P( VideoStreamEncoderWithRealEncoderTest, ::testing::Values(kVP8DisallowConversion, kVP9DisallowConversion, - kAV1AllowConversion, - kMultiplexDisallowConversion), + kAV1AllowConversion), TestParametersVideoCodecAndAllowI420ConversionToString); #endif -class ReconfigureEncoderTest : public VideoStreamEncoderTest { +class ReconfigureSimulcastEncoderTest : public VideoStreamEncoderTest { protected: - void RunTest(const std::vector& configs, - const int expected_num_init_encode) { + void RunSimulcastTest(const std::vector>& configs, + const int expected_num_init_encode) { ConfigureEncoder(configs[0]); OnBitrateUpdated(kTargetBitrate); InsertFrameAndWaitForEncoded(); @@ -9090,17 +9655,18 @@ class ReconfigureEncoderTest : public VideoStreamEncoderTest { video_stream_encoder_->Stop(); } - void ConfigureEncoder(const VideoStream& stream) { + void ConfigureEncoder(const std::vector& streams) { VideoEncoderConfig config; - webrtc::VideoEncoder::EncoderInfo encoder_info; - - test::FillEncoderConfiguration(kVideoCodecVP8, /*num_streams=*/1, &config); - config.max_bitrate_bps = stream.max_bitrate_bps; - config.simulcast_layers[0] = stream; - config.video_stream_factory = - rtc::make_ref_counted( - /*codec_name=*/"VP8", /*max_qp=*/0, /*is_screenshare=*/false, - /*conference_mode=*/false, encoder_info); + test::FillEncoderConfiguration(kVideoCodecVP8, + /*num_streams=*/streams.size(), &config); + auto highest_bitrate_stream = + absl::c_max_element(streams, [](const auto& a, const auto& b) { + return a.max_bitrate_bps < b.max_bitrate_bps; + }); + config.max_bitrate_bps = highest_bitrate_stream->max_bitrate_bps; + config.simulcast_layers = streams; + config.number_of_streams = streams.size(); + config.video_stream_factory = nullptr; video_stream_encoder_->ConfigureEncoder(std::move(config), kMaxPayloadLength); } @@ -9118,20 +9684,23 @@ class ReconfigureEncoderTest : public VideoStreamEncoderTest { } void ExpectEqual(const VideoCodec& actual, - const VideoStream& expected) const { - EXPECT_EQ(actual.numberOfSimulcastStreams, 1); - EXPECT_EQ(actual.simulcastStream[0].maxFramerate, expected.max_framerate); - EXPECT_EQ(actual.simulcastStream[0].minBitrate * 1000, - static_cast(expected.min_bitrate_bps)); - EXPECT_EQ(actual.simulcastStream[0].maxBitrate * 1000, - static_cast(expected.max_bitrate_bps)); - EXPECT_EQ(actual.simulcastStream[0].width, - kWidth / expected.scale_resolution_down_by); - EXPECT_EQ(actual.simulcastStream[0].height, - kHeight / expected.scale_resolution_down_by); - EXPECT_EQ(actual.simulcastStream[0].numberOfTemporalLayers, - expected.num_temporal_layers); - EXPECT_EQ(actual.GetScalabilityMode(), expected.scalability_mode); + const std::vector& expected) const { + EXPECT_EQ(actual.numberOfSimulcastStreams, expected.size()); + for (size_t i = 0; i < actual.numberOfSimulcastStreams; ++i) { + EXPECT_EQ(actual.simulcastStream[i].maxFramerate, + expected[i].max_framerate); + EXPECT_EQ(actual.simulcastStream[i].minBitrate * 1000, + static_cast(expected[i].min_bitrate_bps)); + EXPECT_EQ(actual.simulcastStream[i].maxBitrate * 1000, + static_cast(expected[i].max_bitrate_bps)); + EXPECT_EQ(actual.simulcastStream[i].width, + kWidth / expected[i].scale_resolution_down_by); + EXPECT_EQ(actual.simulcastStream[i].height, + kHeight / expected[i].scale_resolution_down_by); + EXPECT_EQ(actual.simulcastStream[i].numberOfTemporalLayers, + expected[i].num_temporal_layers); + EXPECT_EQ(actual.GetScalabilityMode(), expected[i].scalability_mode); + } } VideoStream DefaultConfig() const { @@ -9142,7 +9711,7 @@ class ReconfigureEncoderTest : public VideoStreamEncoderTest { stream.scale_resolution_down_by = 1.0; stream.num_temporal_layers = 1; stream.bitrate_priority = 1.0; - stream.scalability_mode = absl::nullopt; + stream.scalability_mode = std::nullopt; return stream; } @@ -9151,6 +9720,33 @@ class ReconfigureEncoderTest : public VideoStreamEncoderTest { int64_t timestamp_ms_ = 0; }; +TEST_F(ReconfigureSimulcastEncoderTest, ReconfiguredIfMaxFramerateChanges) { + VideoStream config_before_high = DefaultConfig(); + config_before_high.scale_resolution_down_by = 1; + config_before_high.max_framerate = 30; + VideoStream config_before_low = config_before_high; + config_before_low.scale_resolution_down_by = 2; + config_before_low.max_framerate = 20; + + // Keep the highest layer's max_framerate so as to not cause a change in + // VideoCodec::maxFramerate that may influence this test. + VideoStream config_after_high = config_before_high; + VideoStream config_after_low = config_before_low; + config_after_low.max_framerate = 10; + + RunSimulcastTest({{config_before_high, config_before_low}, + {config_after_high, config_after_low}}, + /*expected_num_init_encode=*/2); +} + +class ReconfigureEncoderTest : public ReconfigureSimulcastEncoderTest { + public: + void RunTest(const std::vector& configs, + const int expected_num_init_encode) { + RunSimulcastTest({{configs[0]}, {configs[1]}}, expected_num_init_encode); + } +}; + TEST_F(ReconfigureEncoderTest, NotReconfiguredIfMaxFramerateChanges) { VideoStream config1 = DefaultConfig(); VideoStream config2 = config1; @@ -9242,13 +9838,13 @@ TEST(VideoStreamEncoderSimpleTest, CreateDestroy) { }; // Lots of boiler plate. - test::ScopedKeyValueConfig field_trials; GlobalSimulatedTimeController time_controller(Timestamp::Zero()); - auto stats_proxy = std::make_unique( - time_controller.GetClock(), VideoSendStream::Config(nullptr), - webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo, field_trials); - SimpleVideoStreamEncoderFactory::MockFakeEncoder mock_fake_encoder( - time_controller.GetClock()); + Environment env = CreateEnvironment(time_controller.GetClock()); + MockableSendStatisticsProxy stats_proxy( + &env.clock(), VideoSendStream::Config(nullptr), + webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo, + env.field_trials()); + SimpleVideoStreamEncoderFactory::MockFakeEncoder mock_fake_encoder(env); test::VideoEncoderProxyFactory encoder_factory(&mock_fake_encoder); std::unique_ptr bitrate_allocator_factory = CreateBuiltinVideoBitrateAllocatorFactory(); @@ -9264,20 +9860,19 @@ TEST(VideoStreamEncoderSimpleTest, CreateDestroy) { encoder_queue(new SuperLazyTaskQueue()); // Construct a VideoStreamEncoder instance and let it go out of scope without - // doing anything else (including calling Stop()). This should be fine since - // the posted init task will simply be deleted. - auto encoder = std::make_unique( - time_controller.GetClock(), 1, stats_proxy.get(), encoder_settings, - std::make_unique(stats_proxy.get()), + // doing anything else. This should be fine since the posted init task will + // simply be deleted. + VideoStreamEncoder encoder( + env, 1, &stats_proxy, encoder_settings, + std::make_unique(env, &stats_proxy), std::move(adapter), std::move(encoder_queue), VideoStreamEncoder::BitrateAllocationCallbackType:: - kVideoBitrateAllocation, - field_trials); + kVideoBitrateAllocation); // Stop the encoder explicitly. This additional step tests if we could // hang when calling stop and the TQ has been stopped and/or isn't accepting // any more tasks. - encoder->Stop(); + encoder.Stop(); } TEST(VideoStreamEncoderFrameCadenceTest, ActivatesFrameCadenceOnContentType) { @@ -9318,7 +9913,7 @@ TEST(VideoStreamEncoderFrameCadenceTest, ActivatesFrameCadenceOnContentType) { Mock::VerifyAndClearExpectations(adapter_ptr); // Expect a disabled zero-hertz mode after passing realtime video. - EXPECT_CALL(*adapter_ptr, SetZeroHertzModeEnabled(Eq(absl::nullopt))); + EXPECT_CALL(*adapter_ptr, SetZeroHertzModeEnabled(Eq(std::nullopt))); VideoEncoderConfig config2; test::FillEncoderConfiguration(kVideoCodecVP8, 1, &config2); config2.content_type = VideoEncoderConfig::ContentType::kRealtimeVideo; @@ -9338,7 +9933,7 @@ TEST(VideoStreamEncoderFrameCadenceTest, &video_source, webrtc::DegradationPreference::MAINTAIN_FRAMERATE); EXPECT_CALL(*adapter_ptr, OnFrame); - auto buffer = rtc::make_ref_counted(/*width=*/16, /*height=*/16); + auto buffer = make_ref_counted(/*width=*/16, /*height=*/16); video_source.IncomingCapturedFrame( VideoFrame::Builder().set_video_frame_buffer(std::move(buffer)).build()); } @@ -9370,7 +9965,6 @@ TEST(VideoStreamEncoderFrameCadenceTest, UsesFrameCadenceAdapterForFrameRate) { /*max_data_payload_length=*/1000); EXPECT_CALL(*adapter_ptr, GetInputFrameRateFps); - EXPECT_CALL(*adapter_ptr, UpdateFrameRate); PassAFrame(encoder_queue, video_stream_encoder_callback, /*ntp_time_ms=*/1); factory.DepleteTaskQueues(); } @@ -9450,6 +10044,12 @@ TEST(VideoStreamEncoderFrameCadenceTest, UpdatesQualityConvergence) { auto video_stream_encoder = factory.Create(std::move(adapter), &encoder_queue); + // Set minimum QP. + VideoEncoder::EncoderInfo info; + info.min_qp = kVp8SteadyStateQpThreshold; + EXPECT_CALL(factory.GetMockFakeEncoder(), GetEncoderInfo) + .WillRepeatedly(Return(info)); + // Configure 2 simulcast layers and setup 1 MBit/s to unpause the encoder. VideoEncoderConfig video_encoder_config; test::FillEncoderConfiguration(kVideoCodecVP8, 2, &video_encoder_config); @@ -9463,7 +10063,7 @@ TEST(VideoStreamEncoderFrameCadenceTest, UpdatesQualityConvergence) { PassAFrame(encoder_queue, video_stream_encoder_callback, /*ntp_time_ms=*/1); EXPECT_CALL(factory.GetMockFakeEncoder(), EncodeHook) .WillRepeatedly(Invoke([](EncodedImage& encoded_image, - rtc::scoped_refptr buffer) { + scoped_refptr buffer) { encoded_image.qp_ = kVp8SteadyStateQpThreshold + 1; CodecSpecificInfo codec_specific; codec_specific.codecType = kVideoCodecVP8; @@ -9479,7 +10079,7 @@ TEST(VideoStreamEncoderFrameCadenceTest, UpdatesQualityConvergence) { PassAFrame(encoder_queue, video_stream_encoder_callback, /*ntp_time_ms=*/2); EXPECT_CALL(factory.GetMockFakeEncoder(), EncodeHook) .WillRepeatedly(Invoke([](EncodedImage& encoded_image, - rtc::scoped_refptr buffer) { + scoped_refptr buffer) { // This sets simulcast index 0 content to be at target quality, while // index 1 content is not. encoded_image.qp_ = kVp8SteadyStateQpThreshold + @@ -9544,12 +10144,10 @@ TEST(VideoStreamEncoderFrameCadenceTest, factory.GetTimeController()->GetTaskQueueFactory()->CreateTaskQueue( "EncoderQueue", TaskQueueFactory::Priority::NORMAL); - // Enables zero-hertz mode. - test::ScopedKeyValueConfig field_trials( - "WebRTC-ZeroHertzScreenshare/Enabled/"); + test::ScopedKeyValueConfig field_trials; auto adapter = FrameCadenceAdapterInterface::Create( factory.GetTimeController()->GetClock(), encoder_queue.get(), - field_trials); + /*metronome=*/nullptr, /*worker_queue=*/nullptr, field_trials); FrameCadenceAdapterInterface* adapter_ptr = adapter.get(); MockVideoSourceInterface mock_source; @@ -9577,4 +10175,60 @@ TEST(VideoStreamEncoderFrameCadenceTest, kMaxFps); } +class VideoStreamEncoderFrameCadenceRestrictionTest : public ::testing::Test { + public: + VideoStreamEncoderFrameCadenceRestrictionTest() + : adapter_ptr_(adapter_.get()), + fake_resource_(FakeResource::Create("FakeResource")), + video_stream_encoder_( + factory_.Create(std::move(adapter_), &encoder_queue_)) {} + + ~VideoStreamEncoderFrameCadenceRestrictionTest() { + factory_.DepleteTaskQueues(); + } + + void UpdateVideoSourceRestrictions(VideoSourceRestrictions restrictions) { + encoder_queue_->PostTask([this, restrictions] { + RTC_DCHECK_RUN_ON(encoder_queue_); + video_stream_encoder_->OnVideoSourceRestrictionsUpdated( + restrictions, VideoAdaptationCounters(), fake_resource_, + VideoSourceRestrictions()); + }); + } + + protected: + SimpleVideoStreamEncoderFactory factory_; + std::unique_ptr> adapter_{ + std::make_unique>()}; + NiceMock* adapter_ptr_; + TaskQueueBase* encoder_queue_{nullptr}; + scoped_refptr fake_resource_; + VideoSourceRestrictions restrictions_; + std::unique_ptr + video_stream_encoder_; +}; + +TEST_F(VideoStreamEncoderFrameCadenceRestrictionTest, + UpdatesVideoSourceRestrictionsUnRestricted) { + EXPECT_CALL(*adapter_ptr_, UpdateVideoSourceRestrictions(Eq(std::nullopt))); + UpdateVideoSourceRestrictions(VideoSourceRestrictions()); +} + +TEST_F(VideoStreamEncoderFrameCadenceRestrictionTest, + UpdatesVideoSourceRestrictionsWithMaxFrameRateRestriction) { + restrictions_.set_max_frame_rate(20); + EXPECT_CALL(*adapter_ptr_, UpdateVideoSourceRestrictions(Optional(20))); + UpdateVideoSourceRestrictions(restrictions_); +} + +TEST_F(VideoStreamEncoderFrameCadenceRestrictionTest, + UpdatesVideoSourceRestrictionsWithoutMaxFrameRateRestriction) { + // Restrictions in resolution count as restriction updated, even though the + // FPS is unlimited. + restrictions_.set_max_pixels_per_frame(99); + restrictions_.set_target_pixels_per_frame(101); + EXPECT_CALL(*adapter_ptr_, UpdateVideoSourceRestrictions(Eq(std::nullopt))); + UpdateVideoSourceRestrictions(restrictions_); +} + } // namespace webrtc diff --git a/webrtc.gni b/webrtc.gni index 5a1c43c888..2a407632d6 100644 --- a/webrtc.gni +++ b/webrtc.gni @@ -39,6 +39,10 @@ if (is_fuchsia) { import("//build/config/fuchsia/config.gni") } +if (build_with_chromium) { + import("//media/media_options.gni") +} + # This declare_args is separated from the next one because args declared # in this one, can be read from the next one (args defined in the same # declare_args cannot be referenced in that scope). @@ -49,10 +53,6 @@ declare_args() { } declare_args() { - # If set to true, C++ code will refer to the new JNI Generator symbols. - # If set to false the old ones will be used (to provide a nice update path). - rtc_jni_generator_legacy_symbols = false - # Setting this to true, will make RTC_DLOG() expand to log statements instead # of being removed by the preprocessor. # This is useful for example to be able to get RTC_DLOGs on a release build. @@ -99,9 +99,6 @@ declare_args() { # TLS-TURN server. In return disabling this saves around 100kb in the binary. rtc_builtin_ssl_root_certificates = true - # Include the iLBC audio codec? - rtc_include_ilbc = true - # Disable this to avoid building the Opus audio codec. rtc_include_opus = true @@ -131,9 +128,6 @@ declare_args() { # Selects whether the audio processing module should be excluded. rtc_exclude_audio_processing_module = false - # Set this to true to enable BWE test logging. - rtc_enable_bwe_test_logging = false - # Set this to false to skip building examples. rtc_build_examples = true @@ -176,21 +170,34 @@ declare_args() { rtc_build_with_neon = (current_cpu == "arm" && arm_use_neon) || current_cpu == "arm64" - # Enable this to build OpenH264 encoder/FFmpeg decoder. This is supported on - # all platforms except Android and iOS. Because FFmpeg can be built - # with/without H.264 support, `ffmpeg_branding` has to separately be set to a - # value that includes H.264, for example "Chrome". If FFmpeg is built without - # H.264, compilation succeeds but `H264DecoderImpl` fails to initialize. + # Enable this to build OpenH264 encoder/FFmpeg decoder. When building WebRTC + # as part of Chromium, this is delegated to `media_use_openh264`. When + # building WebRTC as a standalone library, this is supported on all platforms + # except Android and iOS. Because FFmpeg can be built with/without H.264 + # support, `ffmpeg_branding` has to separately be set to a value that + # includes H.264, for example "Chrome". If FFmpeg is built without H.264, + # compilation succeeds but `H264DecoderImpl` fails to initialize. # CHECK THE OPENH264, FFMPEG AND H.264 LICENSES/PATENTS BEFORE BUILDING. # http://www.openh264.org, https://www.ffmpeg.org/ # # Enabling H264 when building with MSVC is currently not supported, see # bugs.webrtc.org/9213#c13 for more info. - rtc_use_h264 = - proprietary_codecs && !is_android && !is_ios && !(is_win && !is_clang) + if (build_with_chromium) { + rtc_use_h264 = media_use_openh264 + } else { + rtc_use_h264 = + proprietary_codecs && !is_android && !is_ios && !(is_win && !is_clang) + } + + # Use system OpenH264 + rtc_system_openh264 = false # Enable to use H265 - rtc_use_h265 = proprietary_codecs + if (build_with_chromium) { + rtc_use_h265 = enable_hevc_parser_and_hw_decoder + } else { + rtc_use_h265 = proprietary_codecs + } # Enable this flag to make webrtc::Mutex be implemented by absl::Mutex. rtc_use_absl_mutex = false @@ -260,6 +267,14 @@ declare_args() { # WebRTC does not declare its public dependencies. See webrtc:8603. Instead # WebRTC is using a global common dependencies. rtc_common_public_deps = [] # no-presubmit-check TODO(webrtc:8603) + + # When true, include the Perfetto library. + rtc_use_perfetto = false + + # When true allows exports from deprecated namespaces. + # This will be set to false on Jun 23 2025. See bugs.webrtc.org/42232595 + # for details. + rtc_allow_deprecated_namespaces = true } if (!build_with_mozilla) { @@ -284,16 +299,6 @@ declare_args() { rtc_build_opus = !build_with_mozilla rtc_build_ssl = !build_with_mozilla - # Enable libevent task queues on platforms that support it. - if (is_win || is_mac || is_ios || is_nacl || is_fuchsia || - target_cpu == "wasm") { - rtc_enable_libevent = false - rtc_build_libevent = false - } else { - rtc_enable_libevent = true - rtc_build_libevent = !build_with_mozilla - } - # Excluded in Chromium since its prerequisites don't require Pulse Audio. rtc_include_pulse_audio = !build_with_chromium @@ -329,10 +334,6 @@ declare_args() { # Set this to true to disable webrtc metrics. rtc_disable_metrics = false - - # Set this to true to exclude the transient suppressor in the audio processing - # module from the build. - rtc_exclude_transient_suppressor = false } declare_args() { @@ -378,6 +379,10 @@ if (is_mac || is_ios) { } } +if (build_with_chromium) { + rtc_use_perfetto = true +} + # Global public configuration that should be applied to all WebRTC targets. You # normally shouldn't need to include this in your target as it's automatically # included when using the rtc_* templates. It set the defines, include paths and @@ -404,21 +409,18 @@ set_defaults("rtc_library") { configs = rtc_add_configs public_deps = rtc_common_public_deps # no-presubmit-check TODO(webrtc:8603) suppressed_configs = [] - absl_deps = [] } set_defaults("rtc_source_set") { configs = rtc_add_configs public_deps = rtc_common_public_deps # no-presubmit-check TODO(webrtc:8603) suppressed_configs = [] - absl_deps = [] } set_defaults("rtc_static_library") { configs = rtc_add_configs public_deps = rtc_common_public_deps # no-presubmit-check TODO(webrtc:8603) suppressed_configs = [] - absl_deps = [] } set_defaults("rtc_executable") { @@ -474,14 +476,13 @@ all_poison_types = [ # Encoders and decoders for specific audio codecs such as Opus and iSAC. "audio_codecs", - # Default task queue implementation. - "default_task_queue", - # Default echo detector implementation. "default_echo_detector", - # JSON parsing should not be needed in the "slim and modular" WebRTC. - "rtc_json", + # Implementations of the utilities exposed through `Environment`. + # Most webrtc classes must use propagated `Environment`. Only few top-level + # classes are allowed to create `Environment` from individual utilities. + "environment_construction", # Software video codecs (VP8 and VP9 through libvpx). "software_video_codecs", @@ -530,13 +531,18 @@ template("rtc_test") { public_configs += invoker.public_configs } if (!build_with_chromium && is_android) { + use_default_launcher = false android_manifest = webrtc_root + "test/android/AndroidManifest.xml" use_raw_android_executable = false min_sdk_version = 21 - target_sdk_version = 23 + target_sdk_version = 24 deps += [ "//build/android/gtest_apk:native_test_instrumentation_test_runner_java", + webrtc_root + "sdk/android:native_test_jni_onload", + webrtc_root + "sdk/android:base_java", webrtc_root + "test:native_test_java", + webrtc_root + "test:native_test_support", + "//third_party/jni_zero:jni_zero_java", ] } @@ -547,17 +553,20 @@ template("rtc_test") { } } - # If absl_deps is [], no action is needed. If not [], then it needs to be - # converted to //third_party/abseil-cpp:absl when build_with_chromium=true - # otherwise it just needs to be added to deps. - if (defined(absl_deps) && absl_deps != []) { - if (!defined(deps)) { + assert( + !defined(absl_deps), + "`absl_deps` has been deprecated, add your Abseil dependencies to the `deps` variable.") + + # Abseil dependencies need to be converted to //third_party/abseil-cpp:absl when build_with_chromium=true + if (build_with_chromium && defined(deps)) { + absl_dependencies = + filter_labels_include(deps, [ "//third_party/abseil-cpp/*" ]) + if (absl_dependencies != []) { + filtered_deps = + filter_labels_exclude(deps, [ "//third_party/abseil-cpp/*" ]) deps = [] - } - if (build_with_chromium) { + deps = filtered_deps deps += [ "//third_party/abseil-cpp:absl" ] - } else { - deps += absl_deps } } @@ -661,17 +670,20 @@ template("rtc_source_set") { public_configs += invoker.public_configs } - # If absl_deps is [], no action is needed. If not [], then it needs to be - # converted to //third_party/abseil-cpp:absl when build_with_chromium=true - # otherwise it just needs to be added to deps. - if (absl_deps != []) { - if (!defined(deps)) { + assert( + !defined(absl_deps), + "`absl_deps` has been deprecated, add your Abseil dependencies to the `deps` variable.") + + # Abseil dependencies need to be converted to //third_party/abseil-cpp:absl when build_with_chromium=true + if (build_with_chromium && defined(deps)) { + absl_dependencies = + filter_labels_include(deps, [ "//third_party/abseil-cpp/*" ]) + if (absl_dependencies != []) { + filtered_deps = + filter_labels_exclude(deps, [ "//third_party/abseil-cpp/*" ]) deps = [] - } - if (build_with_chromium) { + deps = filtered_deps deps += [ "//third_party/abseil-cpp:absl" ] - } else { - deps += absl_deps } } } @@ -750,17 +762,20 @@ template("rtc_static_library") { public_configs += invoker.public_configs } - # If absl_deps is [], no action is needed. If not [], then it needs to be - # converted to //third_party/abseil-cpp:absl when build_with_chromium=true - # otherwise it just needs to be added to deps. - if (absl_deps != []) { - if (!defined(deps)) { + assert( + !defined(absl_deps), + "`absl_deps` has been deprecated, add your Abseil dependencies to the `deps` variable.") + + # Abseil dependencies need to be converted to //third_party/abseil-cpp:absl when build_with_chromium=true + if (build_with_chromium && defined(deps)) { + absl_dependencies = + filter_labels_include(deps, [ "//third_party/abseil-cpp/*" ]) + if (absl_dependencies != []) { + filtered_deps = + filter_labels_exclude(deps, [ "//third_party/abseil-cpp/*" ]) deps = [] - } - if (build_with_chromium) { + deps = filtered_deps deps += [ "//third_party/abseil-cpp:absl" ] - } else { - deps += absl_deps } } } @@ -889,17 +904,20 @@ template("rtc_library") { public_configs += invoker.public_configs } - # If absl_deps is [], no action is needed. If not [], then it needs to be - # converted to //third_party/abseil-cpp:absl when build_with_chromium=true - # otherwise it just needs to be added to deps. - if (absl_deps != []) { - if (!defined(deps)) { + assert( + !defined(absl_deps), + "`absl_deps` has been deprecated, add your Abseil dependencies to the `deps` variable.") + + # Abseil dependencies need to be converted to //third_party/abseil-cpp:absl when build_with_chromium=true + if (build_with_chromium && defined(deps)) { + absl_dependencies = + filter_labels_include(deps, [ "//third_party/abseil-cpp/*" ]) + if (absl_dependencies != []) { + filtered_deps = + filter_labels_exclude(deps, [ "//third_party/abseil-cpp/*" ]) deps = [] - } - if (build_with_chromium) { + deps = filtered_deps deps += [ "//third_party/abseil-cpp:absl" ] - } else { - deps += absl_deps } } } @@ -942,6 +960,24 @@ template("rtc_executable") { "//build/win:default_exe_manifest", ] } + + # Convert abseil dependencies to //third_party/abseil-cpp:absl_full when + # build_with_chromium=true so that webrtc targets won't need exceptions to + # depend on individual absl targets. + # Note that //third_party/abseil-cpp:absl_full build target includes flags, + # but //third_party/abseil-cpp:absl target - doesn't. That allows webrtc + # executables, but not libraries to use absl flags. + if (build_with_chromium && defined(deps)) { + absl_dependencies = + filter_labels_include(deps, [ "//third_party/abseil-cpp/*" ]) + if (absl_dependencies != []) { + filtered_deps = + filter_labels_exclude(deps, [ "//third_party/abseil-cpp/*" ]) + deps = [] + deps = filtered_deps + deps += [ "//third_party/abseil-cpp:absl_full" ] + } + } } } @@ -1022,6 +1058,7 @@ if (is_mac || is_ios) { umbrella_header_path = "$target_gen_dir/$output_name.framework/WebRTC/$output_name.h" modulemap_path = "$target_gen_dir/Modules/module.modulemap" + privacy_manifest_path = "$target_gen_dir/$target_name/PrivacyInfo.xcprivacy" action_foreach("create_bracket_include_headers_$target_name") { script = "//tools_webrtc/apple/copy_framework_header.py" @@ -1062,6 +1099,7 @@ if (is_mac || is_ios) { deps += [ ":copy_framework_headers_$this_target_name", ":copy_modulemap_$this_target_name", + ":copy_privacy_manifest_$this_target_name", ":copy_umbrella_header_$this_target_name", ":create_bracket_include_headers_$this_target_name", ":modulemap_$this_target_name", @@ -1084,6 +1122,7 @@ if (is_mac || is_ios) { ":create_bracket_include_headers_$this_target_name") deps += [ + ":copy_privacy_manifest_$this_target_name", ":copy_umbrella_header_$this_target_name", ":create_bracket_include_headers_$this_target_name", ] @@ -1093,8 +1132,13 @@ if (is_mac || is_ios) { if (is_mac || target_environment == "catalyst") { # Catalyst frameworks use the same layout as regular Mac frameworks. headers_dir = "Versions/A/Headers" + + # The path to the privacy manifest file differs between Mac and iOS. + # https://developer.apple.com/documentation/bundleresources/privacy_manifest_files/adding_a_privacy_manifest_to_your_app_or_third-party_sdk + privacy_manifest_out_path = "Versions/A/Resources/PrivacyInfo.xcprivacy" } else { headers_dir = "Headers" + privacy_manifest_out_path = "PrivacyInfo.xcprivacy" } bundle_data("copy_framework_headers_$this_target_name") { @@ -1144,6 +1188,25 @@ if (is_mac || is_ios) { deps = [ ":umbrella_header_$target_name" ] } + + action("create_privacy_manifest_$target_name") { + script = "//tools_webrtc/apple/generate_privacy_manifest.py" + + args = [ + "--output", + rebase_path(privacy_manifest_path), + ] + + outputs = [ privacy_manifest_path ] + } + + copy("copy_privacy_manifest_$target_name") { + sources = [ privacy_manifest_path ] + outputs = + [ "$root_out_dir/$output_name.framework/$privacy_manifest_out_path" ] + + deps = [ ":create_privacy_manifest_$target_name" ] + } } } @@ -1159,21 +1222,11 @@ if (is_android) { "visibility", ]) - errorprone_args = [] - - # Treat warnings as errors. - errorprone_args += [ "-Werror" ] - # Add any arguments defined by the invoker. if (defined(invoker.errorprone_args)) { - errorprone_args += invoker.errorprone_args + errorprone_args = invoker.errorprone_args } - if (!defined(deps)) { - deps = [] - } - - no_build_hooks = true not_needed([ "android_manifest" ]) } } @@ -1188,16 +1241,6 @@ if (is_android) { "suppressed_configs", "visibility", ]) - - # Treat warnings as errors. - errorprone_args = [] - errorprone_args += [ "-Werror" ] - - if (!defined(deps)) { - deps = [] - } - - no_build_hooks = true } } @@ -1211,16 +1254,7 @@ if (is_android) { "suppressed_configs", "visibility", ]) - - # Treat warnings as errors. - errorprone_args = [] - errorprone_args += [ "-Werror" ] - - if (!defined(deps)) { - deps = [] - } - - no_build_hooks = true + deps += [ "//third_party/jni_zero:jni_zero_java" ] } } } diff --git a/webrtc_lib_link_test.cc b/webrtc_lib_link_test.cc index 868c17287e..4eeb59a57a 100644 --- a/webrtc_lib_link_test.cc +++ b/webrtc_lib_link_test.cc @@ -8,12 +8,15 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "api/audio/audio_device.h" +#include "api/audio/audio_processing.h" +#include "api/audio/builtin_audio_processing_builder.h" #include "api/audio_codecs/audio_decoder_factory_template.h" #include "api/audio_codecs/audio_encoder_factory_template.h" #include "api/audio_codecs/opus/audio_decoder_opus.h" #include "api/audio_codecs/opus/audio_encoder_opus.h" -#include "api/call/call_factory_interface.h" #include "api/create_peerconnection_factory.h" +#include "api/enable_media.h" #include "api/peer_connection_interface.h" #include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/stats/rtcstats_objects.h" @@ -28,18 +31,12 @@ #include "api/video_codecs/video_encoder_factory_template_libvpx_vp8_adapter.h" #include "api/video_codecs/video_encoder_factory_template_libvpx_vp9_adapter.h" #include "api/video_codecs/video_encoder_factory_template_open_h264_adapter.h" -#include "media/engine/webrtc_media_engine.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_processing/include/audio_processing.h" namespace webrtc { -cricket::MediaEngineDependencies CreateSomeMediaDeps( - TaskQueueFactory* task_queue_factory) { - cricket::MediaEngineDependencies media_deps; - media_deps.task_queue_factory = task_queue_factory; +void CreateSomeMediaDeps(PeerConnectionFactoryDependencies& media_deps) { media_deps.adm = AudioDeviceModule::CreateForTest( - AudioDeviceModule::kDummyAudio, task_queue_factory); + AudioDeviceModule::kDummyAudio, media_deps.task_queue_factory.get()); media_deps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory(); media_deps.audio_decoder_factory = @@ -52,21 +49,19 @@ cricket::MediaEngineDependencies CreateSomeMediaDeps( std::make_unique>(); - media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create(); - return media_deps; + media_deps.audio_processing_builder = + std::make_unique(); } webrtc::PeerConnectionFactoryDependencies CreateSomePcfDeps() { webrtc::PeerConnectionFactoryDependencies pcf_deps; pcf_deps.task_queue_factory = CreateDefaultTaskQueueFactory(); - pcf_deps.signaling_thread = rtc::Thread::Current(); - pcf_deps.network_thread = rtc::Thread::Current(); - pcf_deps.worker_thread = rtc::Thread::Current(); - pcf_deps.call_factory = webrtc::CreateCallFactory(); - pcf_deps.event_log_factory = std::make_unique( - pcf_deps.task_queue_factory.get()); - auto media_deps = CreateSomeMediaDeps(pcf_deps.task_queue_factory.get()); - pcf_deps.media_engine = cricket::CreateMediaEngine(std::move(media_deps)); + pcf_deps.signaling_thread = webrtc::Thread::Current(); + pcf_deps.network_thread = webrtc::Thread::Current(); + pcf_deps.worker_thread = webrtc::Thread::Current(); + pcf_deps.event_log_factory = std::make_unique(); + CreateSomeMediaDeps(pcf_deps); + EnableMedia(pcf_deps); return pcf_deps; } @@ -86,12 +81,14 @@ void TestCase1ModularFactory() { } void TestCase2RegularFactory() { - auto task_queue_factory = CreateDefaultTaskQueueFactory(); - auto media_deps = CreateSomeMediaDeps(task_queue_factory.get()); + PeerConnectionFactoryDependencies media_deps; + media_deps.task_queue_factory = CreateDefaultTaskQueueFactory(); + CreateSomeMediaDeps(media_deps); auto peer_connection_factory = webrtc::CreatePeerConnectionFactory( - rtc::Thread::Current(), rtc::Thread::Current(), rtc::Thread::Current(), - std::move(media_deps.adm), std::move(media_deps.audio_encoder_factory), + webrtc::Thread::Current(), webrtc::Thread::Current(), + webrtc::Thread::Current(), std::move(media_deps.adm), + std::move(media_deps.audio_encoder_factory), std::move(media_deps.audio_decoder_factory), std::move(media_deps.video_encoder_factory), std::move(media_deps.video_decoder_factory), nullptr, nullptr); diff --git a/whitespace.txt b/whitespace.txt index ea60e6175b..389c15fecc 100644 --- a/whitespace.txt +++ b/whitespace.txt @@ -2,6 +2,7 @@ You can modify this file to create no-op changelists. Try to write something funny. And please don't add trailing whitespace. -Once upon a time there was an elephant in Stockholm. +Once upon a time there was a white elephant in Stockholm. Why did the elephant get kicked out of the Swedish Parliament? Because it kept making trunk calls! +Migration is done, we are now testing!